From 68da90a11a3f12708a9492c56f3cf439732978af Mon Sep 17 00:00:00 2001 From: root Date: Tue, 28 Oct 2025 15:10:40 +0200 Subject: [PATCH] Restructure solution layout by module --- .gitattributes | 2 +- .../_deprecated-concelier-ci.yml.disabled | 16 +- .gitea/workflows/build-test-deploy.yml | 44 +- .gitea/workflows/docs.yml | 122 +- .gitea/workflows/release.yml | 2 +- .gitignore | 68 +- .venv/pyvenv.cfg | 10 +- Directory.Build.props | 24 +- Mongo2Go-4.1.0/src/Mongo2Go/Mongo2Go.csproj | 186 +- .../src/Mongo2GoTests/Mongo2GoTests.csproj | 40 +- NuGet.config | 88 +- README.md | 6 +- SPRINTS.md | 1119 - SPRINTS_PRIOR_20251019.md | 208 - SPRINTS_PRIOR_20251021.md | 88 - SPRINTS_PRIOR_20251025.md | 34 - deploy/compose/docker-compose.prod.yaml | 440 +- .../docker-compose.telemetry-storage.yaml | 114 +- deploy/compose/docker-compose.telemetry.yaml | 68 +- deploy/compose/env/prod.env.example | 56 +- .../files/otel-collector-config.yaml | 128 +- .../stellaops/templates/otel-collector.yaml | 242 +- deploy/helm/stellaops/values-prod.yaml | 442 +- deploy/helm/stellaops/values.yaml | 72 +- deploy/telemetry/.gitignore | 2 +- deploy/telemetry/README.md | 70 +- deploy/telemetry/otel-collector-config.yaml | 134 +- deploy/telemetry/storage/README.md | 66 +- deploy/telemetry/storage/loki.yaml | 96 +- deploy/telemetry/storage/prometheus.yaml | 38 +- deploy/telemetry/storage/tempo.yaml | 112 +- .../storage/tenants/loki-overrides.yaml | 38 +- .../storage/tenants/tempo-overrides.yaml | 32 +- deploy/tools/check-channel-alignment.py | 260 +- deploy/tools/validate-profiles.sh | 122 +- docs/09_API_CLI_REFERENCE.md | 1866 +- docs/10_CONCELIER_CLI_QUICKSTART.md | 36 +- docs/11_AUTHORITY.md | 760 +- docs/11_DATA_SCHEMAS.md | 348 +- docs/12_PERFORMANCE_WORKBOOK.md | 2 +- docs/19_TEST_SUITE_OVERVIEW.md | 138 +- docs/21_INSTALL_GUIDE.md | 380 +- docs/ARCHITECTURE_AUTHORITY.md | 878 +- docs/ARCHITECTURE_CLI.md | 812 +- docs/ARCHITECTURE_CONCELIER.md | 1036 +- docs/ARCHITECTURE_DEVOPS.md | 2 +- docs/ARCHITECTURE_SCANNER.md | 974 +- docs/ARCHITECTURE_VEXER.md | 926 +- docs/README.md | 6 +- docs/TASKS.md | 762 +- docs/accessibility.md | 262 +- docs/advisories/aggregation.md | 436 +- docs/airgap/EPIC_16_AIRGAP_MODE.md | 858 +- docs/aoc/aoc-guardrails.md | 26 +- docs/api/EPIC_17_SDKS_OPENAPI.md | 20 +- docs/api/policy.md | 4 +- docs/architecture/console.md | 4 +- docs/architecture/overview.md | 336 +- docs/architecture/policy-engine.md | 486 +- docs/assets/ui/tours/README.md | 26 +- docs/attestor/EPIC_19_ATTESTOR_CONSOLE.md | 2 +- docs/backlog/2025-10-cleanup.md | 4 +- docs/cli-vs-ui-parity.md | 2 +- docs/cli/cli-reference.md | 632 +- docs/cli/policy.md | 2 +- docs/deploy/console.md | 456 +- docs/deploy/containers.md | 320 +- docs/dev/30_EXCITITOR_CONNECTOR_GUIDE.md | 440 +- docs/dev/30_VEXER_CONNECTOR_GUIDE.md | 2 +- .../31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md | 424 +- docs/dev/BUILDX_PLUGIN_QUICKSTART.md | 206 +- docs/dev/EXCITITOR_STATEMENT_BACKFILL.md | 172 +- docs/dev/authority-dpop-mtls-plan.md | 284 +- docs/dev/authority-plugin-di-coordination.md | 92 +- docs/dev/fixtures.md | 18 +- docs/dev/kisa_connector_notes.md | 2 +- docs/dev/merge_semver_playbook.md | 308 +- docs/dev/normalized_versions_rollout.md | 14 +- docs/devops/policy-schema-export.md | 54 +- docs/events/orchestrator-scanner-events.md | 242 +- .../scanner.event.report.ready@1.sample.json | 186 +- ...scanner.event.scan.completed@1.sample.json | 198 +- ...cheduler.graph.job.completed@1.sample.json | 72 +- docs/events/scanner.event.report.ready@1.json | 328 +- .../scanner.event.scan.completed@1.json | 348 +- .../scheduler.graph.job.completed@1.json | 392 +- docs/examples/policies/README.md | 32 +- docs/examples/policies/baseline.md | 158 +- docs/examples/policies/baseline.stella | 92 +- docs/examples/policies/baseline.yaml | 68 +- docs/examples/policies/internal-only.md | 144 +- docs/examples/policies/internal-only.stella | 78 +- docs/examples/policies/internal-only.yaml | 62 +- docs/examples/policies/serverless.md | 144 +- docs/examples/policies/serverless.stella | 78 +- docs/examples/policies/serverless.yaml | 82 +- docs/examples/ui-tours.md | 308 +- docs/export-center/api.md | 674 +- docs/export-center/architecture.md | 250 +- docs/export-center/cli.md | 462 +- docs/export-center/mirror-bundles.md | 404 +- docs/export-center/overview.md | 126 +- docs/export-center/profiles.md | 278 +- docs/export-center/provenance-and-signing.md | 300 +- docs/export-center/trivy-adapter.md | 492 +- docs/faq/policy-faq.md | 192 +- AGENTS.md => docs/implplan/AGENTS.md | 0 EPIC_1.md => docs/implplan/EPIC_1.md | 1048 +- EPIC_10.md => docs/implplan/EPIC_10.md | 4 +- EPIC_11.md => docs/implplan/EPIC_11.md | Bin 38558 -> 38590 bytes EPIC_12.md => docs/implplan/EPIC_12.md | Bin 41202 -> 40398 bytes EPIC_13.md => docs/implplan/EPIC_13.md | Bin EPIC_14.md => docs/implplan/EPIC_14.md | Bin EPIC_15.md => docs/implplan/EPIC_15.md | Bin EPIC_16.md => docs/implplan/EPIC_16.md | 2 +- EPIC_17.md => docs/implplan/EPIC_17.md | 2 +- EPIC_18.md => docs/implplan/EPIC_18.md | 2 +- EPIC_19.md => docs/implplan/EPIC_19.md | 2 +- EPIC_2.md => docs/implplan/EPIC_2.md | 1134 +- EPIC_4.md => docs/implplan/EPIC_4.md | 818 +- EPIC_5.md => docs/implplan/EPIC_5.md | 862 +- EPIC_6.md => docs/implplan/EPIC_6.md | 6 +- EPIC_7.md => docs/implplan/EPIC_7.md | 10 +- EPIC_8.md => docs/implplan/EPIC_8.md | 2 +- EPIC_9.md => docs/implplan/EPIC_9.md | 4 +- EXECPLAN.md => docs/implplan/EXECPLAN.md | 800 +- docs/implplan/SPRINTS.md | 1096 + docs/implplan/SPRINTS_PRIOR_20251019.md | 208 + docs/implplan/SPRINTS_PRIOR_20251021.md | 88 + docs/implplan/SPRINTS_PRIOR_20251025.md | 34 + .../implplan/SPRINTS_PRIOR_20251027.md | 64 +- docs/implplan/SPRINTS_PRIOR_20251028.md | 26 + docs/ingestion/aggregation-only-contract.md | 360 +- docs/install/docker.md | 414 +- docs/notifications/architecture.md | 236 +- docs/notifications/digests.md | 184 +- docs/notifications/overview.md | 152 +- .../pack-approvals-integration.md | 124 +- docs/notifications/rules.md | 294 +- docs/notifications/templates.md | 260 +- docs/observability/policy.md | 332 +- docs/observability/ui-telemetry.md | 382 +- docs/operations/cli-release-and-packaging.md | 268 +- docs/operations/export-runbook.md | 406 +- docs/ops/authority-backup-restore.md | 194 +- docs/ops/authority-key-rotation.md | 188 +- docs/ops/authority-monitoring.md | 166 +- docs/ops/concelier-apple-operations.md | 154 +- docs/ops/concelier-authority-audit-runbook.md | 318 +- docs/ops/concelier-cccs-operations.md | 144 +- docs/ops/concelier-conflict-resolution.md | 320 +- docs/ops/concelier-cve-kev-operations.md | 2 +- docs/ops/concelier-kisa-operations.md | 148 +- docs/ops/concelier-mirror-operations.md | 476 +- docs/ops/concelier-nkcki-operations.md | 96 +- docs/ops/concelier-osv-operations.md | 48 +- docs/ops/deployment-upgrade-runbook.md | 302 +- docs/ops/launch-cutover.md | 256 +- docs/ops/launch-readiness.md | 98 +- docs/ops/nuget-preview-bootstrap.md | 2 +- docs/ops/registry-token-service.md | 4 +- docs/ops/scanner-analyzers-operations.md | 4 +- docs/ops/telemetry-collector.md | 226 +- docs/ops/ui-auth-smoke.md | 64 +- .../zastava-runtime-grafana-dashboard.json | 410 +- docs/ops/zastava-runtime-operations.md | 348 +- .../ops/zastava-runtime-prometheus-rules.yaml | 62 +- docs/policy/dsl.md | 588 +- docs/policy/exception-effects.md | 12 +- docs/policy/gateway.md | 248 +- docs/policy/lifecycle.md | 476 +- docs/policy/overview.md | 346 +- docs/policy/runs.md | 2 +- docs/risk/EPIC_18_RISK_PROFILES.md | 12 +- docs/scanner-core-contracts.md | 294 +- docs/schemas/policy-diff-summary.schema.json | 142 +- docs/schemas/policy-explain-trace.schema.json | 516 +- docs/schemas/policy-run-request.schema.json | 260 +- docs/schemas/policy-run-status.schema.json | 434 +- docs/security/authority-scopes.md | 522 +- docs/security/authority-threat-model.md | 212 +- docs/security/console-security.md | 366 +- docs/security/pack-signing-and-rbac.md | 330 +- docs/security/policy-governance.md | 224 +- docs/task-packs/authoring-guide.md | 416 +- docs/task-packs/registry.md | 348 +- docs/task-packs/runbook.md | 324 +- docs/task-packs/spec.md | 498 +- docs/ui/admin.md | 348 +- docs/ui/advisories-and-vex.md | 398 +- docs/ui/console-overview.md | 260 +- docs/ui/console.md | 288 +- docs/ui/downloads.md | 424 +- docs/ui/findings.md | 358 +- docs/ui/navigation.md | 326 +- docs/ui/policies.md | 384 +- docs/ui/policy-editor.md | 358 +- docs/ui/runs.md | 338 +- docs/ui/sbom-explorer.md | 390 +- .../2025-10-20-authority-identity-registry.md | 28 +- docs/updates/2025-10-20-scanner-events.md | 2 +- docs/updates/2025-10-22-docs-guild.md | 26 +- .../2025-10-26-authority-graph-scopes.md | 30 +- .../2025-10-26-scheduler-graph-jobs.md | 6 +- .../2025-10-27-console-security-signoff.md | 96 +- .../updates/2025-10-27-orch-operator-scope.md | 30 +- .../2025-10-27-policy-scope-migration.md | 30 +- docs/updates/2025-10-27-task-packs-docs.md | 30 +- docs/updates/2025-10-28-docs-guild.md | 52 +- .../2025-10-29-export-center-provenance.md | 18 +- docs/updates/2025-10-29-notify-docs.md | 20 +- ...2025-10-29-scheduler-policy-doc-refresh.md | 4 +- .../2025-10-31-console-security-refresh.md | 22 +- docs/vex/aggregation.md | 458 +- etc/authority.yaml | 412 +- etc/authority.yaml.sample | 674 +- etc/concelier.yaml.sample | 238 +- etc/policy-engine.yaml.sample | 66 +- etc/policy-gateway.yaml.sample | 78 +- etc/registry-signing-sample.pem | 54 +- etc/registry-token.yaml | 60 +- etc/secrets/cartographer-service.secret | 4 +- etc/secrets/concelier-ingest.secret | 4 +- etc/secrets/console-web.secret | 4 +- etc/secrets/excitor-ingest.secret | 4 +- etc/secrets/graph-api-cli.secret | 4 +- etc/secrets/graph-api.secret | 4 +- etc/secrets/policy-cli.secret | 4 +- etc/secrets/policy-engine.secret | 4 +- etc/signals.yaml.sample | 56 +- ops/authority/Dockerfile | 2 +- ops/authority/README.md | 124 +- ops/devops/README.md | 184 +- ops/devops/TASKS.md | 344 +- ops/devops/check_cli_parity.py | 106 +- ops/devops/nuget-preview-packages.csv | 60 +- ops/devops/release/build_release.py | 2206 +- ops/devops/release/components.json | 18 +- .../release/docker/Dockerfile.angular-ui | 4 +- .../release/docker/Dockerfile.dotnet-service | 104 +- ops/devops/release/docker/nginx-default.conf | 44 +- ops/devops/release/test_verify_release.py | 464 +- ops/devops/release/verify_release.py | 668 +- .../scripts/check-advisory-raw-duplicates.js | 154 +- ops/devops/sync-preview-nuget.sh | 142 +- ops/devops/telemetry/generate_dev_tls.sh | 154 +- .../telemetry/package_offline_bundle.py | 272 +- ops/devops/telemetry/smoke_otel_collector.py | 394 +- ops/devops/validate_restore_sources.py | 366 +- ops/offline-kit/build_offline_kit.py | 890 +- ops/offline-kit/mirror_debug_store.py | 442 +- ops/offline-kit/run-python-analyzer-smoke.sh | 2 +- ops/offline-kit/test_build_offline_kit.py | 512 +- .../python/StellaOps.Auth.Abstractions.xml | 844 +- .../python/StellaOps.Auth.Client.xml | 466 +- ...ps.Scanner.Analyzers.Lang.Python.deps.json | 1714 +- out/linknotmerge-bench.csv | 8 +- out/linknotmerge-bench.json | 166 +- out/linknotmerge-bench.prom | 120 +- out/linknotmerge-vex-bench.csv | 8 +- out/linknotmerge-vex-bench.json | 166 +- out/linknotmerge-vex-bench.prom | 100 +- out/notify-bench.csv | 8 +- out/notify-bench.json | 166 +- out/notify-bench.prom | 78 +- out/policy-bench.csv | 4 +- out/policy-bench.json | 48 +- out/policy-bench.prom | 34 +- .../policy-simulation-summary.json | 62 +- out/tmp-cdx/Program.cs | 8 +- .../manifest.json | 42 +- plugins/notify/email/notify-plugin.json | 36 +- plugins/notify/slack/notify-plugin.json | 38 +- plugins/notify/teams/notify-plugin.json | 38 +- plugins/notify/webhook/notify-plugin.json | 36 +- .../manifest.json | 46 +- .../manifest.json | 46 +- .../manifest.json | 44 +- .../manifest.json | 46 +- .../manifest.json | 46 +- samples/api/scheduler/graph-build-job.json | 38 +- samples/api/scheduler/graph-overlay-job.json | 42 +- .../api/scheduler/policy-diff-summary.json | 62 +- .../api/scheduler/policy-explain-trace.json | 166 +- samples/api/scheduler/policy-run-request.json | 58 +- samples/api/scheduler/policy-run-status.json | 82 +- samples/api/scheduler/run-summary.json | 202 +- samples/ci/buildx-demo/README.md | 84 +- .../github-actions-buildx-demo.yml | 186 +- samples/policy/README.md | 50 +- samples/policy/baseline/diffs.json | 24 +- samples/policy/baseline/findings.json | 28 +- samples/policy/internal-only/diffs.json | 24 +- samples/policy/internal-only/findings.json | 30 +- samples/policy/serverless/diffs.json | 24 +- samples/policy/serverless/findings.json | 30 +- samples/policy/simulations/baseline/diff.json | 46 +- .../policy/simulations/baseline/scenario.json | 42 +- .../simulations/internal-only/diff.json | 46 +- .../simulations/internal-only/scenario.json | 46 +- .../policy/simulations/serverless/diff.json | 46 +- .../simulations/serverless/scenario.json | 46 +- samples/runtime/java-demo/README.md | 10 +- scripts/export-policy-schemas.sh | 22 +- scripts/rotate-policy-cli-secret.sh | 126 +- scripts/update-apple-fixtures.ps1 | 38 +- scripts/update-apple-fixtures.sh | 28 +- scripts/update-model-goldens.ps1 | 18 +- scripts/update-model-goldens.sh | 16 +- scripts/verify-notify-plugins.ps1 | 114 +- scripts/verify-notify-plugins.sh | 112 +- scripts/verify-policy-scopes.py | 172 +- .../StellaOps.AdvisoryAI/AGENTS.md | 2 +- .../StellaOps.AdvisoryAI/TASKS.md | 24 +- .../StellaOps.AirGap.Controller/AGENTS.md | 32 +- .../StellaOps.AirGap.Controller/TASKS.md | 36 +- .../StellaOps.AirGap.Importer/AGENTS.md | 32 +- .../StellaOps.AirGap.Importer/TASKS.md | 38 +- .../StellaOps.AirGap.Policy/AGENTS.md | 32 +- .../StellaOps.AirGap.Policy/TASKS.md | 38 +- .../StellaOps.AirGap.Time/AGENTS.md | 30 +- .../StellaOps.AirGap.Time/TASKS.md | 26 +- src/Aoc/StellaOps.Aoc.sln | 56 + .../StellaOps.Aoc/AocForbiddenKeys.cs | 50 +- .../StellaOps.Aoc/AocGuardException.cs | 34 +- .../StellaOps.Aoc/AocGuardExtensions.cs | 44 +- .../StellaOps.Aoc/AocGuardOptions.cs | 58 +- .../StellaOps.Aoc/AocGuardResult.cs | 28 +- .../StellaOps.Aoc/AocViolation.cs | 26 +- .../StellaOps.Aoc/AocViolationCode.cs | 68 +- .../StellaOps.Aoc/AocWriteGuard.cs | 254 +- .../ServiceCollectionExtensions.cs | 34 +- .../StellaOps.Aoc/StellaOps.Aoc.csproj | 24 +- .../StellaOps.Aoc.Tests/AocWriteGuardTests.cs | 226 +- .../StellaOps.Aoc.Tests.csproj | 42 + .../__Tests}/StellaOps.Aoc.Tests/UnitTest1.cs | 20 +- .../StellaOps.Aoc.Tests}/xunit.runner.json | 0 .../StellaOps.Api.Governance/AGENTS.md | 30 +- .../StellaOps.Api.Governance/TASKS.md | 2 +- src/{ => Api}/StellaOps.Api.OpenApi/AGENTS.md | 4 +- src/{ => Api}/StellaOps.Api.OpenApi/TASKS.md | 38 +- .../authority/openapi.yaml | 1378 +- .../StellaOps.Attestor.Envelope/AGENTS.md | 30 +- .../StellaOps.Attestor.Envelope/TASKS.md | 26 +- .../StellaOps.Attestor.Types/AGENTS.md | 28 +- .../StellaOps.Attestor.Types/TASKS.md | 26 +- .../StellaOps.Attestor.Verify/AGENTS.md | 28 +- .../StellaOps.Attestor.Verify/TASKS.md | 26 +- src/Attestor/StellaOps.Attestor.sln | 182 + .../StellaOps.Attestor/AGENTS.md | 40 +- .../Audit/AttestorAuditRecord.cs | 0 .../Observability/AttestorMetrics.cs | 0 .../Options/AttestorOptions.cs | 0 .../Rekor/IRekorClient.cs | 0 .../Rekor/RekorBackend.cs | 0 .../Rekor/RekorProofResponse.cs | 0 .../Rekor/RekorSubmissionResponse.cs | 0 .../StellaOps.Attestor.Core.csproj | 0 .../Storage/AttestorArchiveBundle.cs | 0 .../Storage/AttestorEntry.cs | 0 .../Storage/IAttestorArchiveStore.cs | 0 .../Storage/IAttestorAuditSink.cs | 0 .../Storage/IAttestorDedupeStore.cs | 0 .../Storage/IAttestorEntryRepository.cs | 0 .../Submission/AttestorSubmissionRequest.cs | 0 .../Submission/AttestorSubmissionResult.cs | 0 .../AttestorSubmissionValidationResult.cs | 0 .../Submission/AttestorSubmissionValidator.cs | 0 .../Submission/AttestorValidationException.cs | 0 .../Submission/IAttestorSubmissionService.cs | 0 .../Submission/IDsseCanonicalizer.cs | 0 .../Submission/SubmissionContext.cs | 0 .../AttestorVerificationException.cs | 0 .../AttestorVerificationRequest.cs | 0 .../AttestorVerificationResult.cs | 0 .../IAttestorVerificationService.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../Rekor/HttpRekorClient.cs | 0 .../Rekor/StubRekorClient.cs | 0 .../ServiceCollectionExtensions.cs | 0 .../StellaOps.Attestor.Infrastructure.csproj | 42 +- .../Storage/InMemoryAttestorDedupeStore.cs | 0 .../Storage/MongoAttestorAuditSink.cs | 0 .../Storage/MongoAttestorEntryRepository.cs | 0 .../Storage/NullAttestorArchiveStore.cs | 0 .../Storage/RedisAttestorDedupeStore.cs | 0 .../Storage/S3AttestorArchiveStore.cs | 0 .../Submission/AttestorSubmissionService.cs | 0 .../Submission/DefaultDsseCanonicalizer.cs | 0 .../AttestorVerificationService.cs | 0 .../AttestorSubmissionServiceTests.cs | 0 .../AttestorVerificationServiceTests.cs | 0 .../HttpRekorClientTests.cs | 0 .../StellaOps.Attestor.Tests.csproj | 7 +- .../StellaOps.Attestor.Tests/TestDoubles.cs | 0 .../StellaOps.Attestor.WebService/Program.cs | 0 .../StellaOps.Attestor.WebService.csproj | 15 +- .../StellaOps.Attestor/StellaOps.Attestor.sln | 0 .../StellaOps.Attestor/TASKS.md | 0 src/Authority/StellaOps.Authority.sln | 303 + .../StellaOps.Authority/AGENTS.md | 8 +- .../NetworkMaskMatcherTests.cs | 0 .../StellaOps.Auth.Abstractions.Tests.csproj | 0 .../StellaOpsPrincipalBuilderTests.cs | 0 .../StellaOpsProblemResultFactoryTests.cs | 0 .../StellaOpsScopesTests.cs | 108 +- .../AuthorityTelemetry.cs | 0 .../NetworkMask.cs | 0 .../NetworkMaskMatcher.cs | 0 .../README.NuGet.md | 0 .../StellaOps.Auth.Abstractions.csproj | 0 .../StellaOpsAuthenticationDefaults.cs | 0 .../StellaOpsClaimTypes.cs | 124 +- .../StellaOpsPrincipalBuilder.cs | 0 .../StellaOpsProblemResultFactory.cs | 0 .../StellaOpsScopes.cs | 578 +- .../StellaOpsServiceIdentities.cs | 54 +- .../StellaOpsTenancyDefaults.cs | 24 +- .../ServiceCollectionExtensionsTests.cs | 0 .../StellaOps.Auth.Client.Tests.csproj | 30 +- .../StellaOpsAuthClientOptionsTests.cs | 0 .../StellaOpsDiscoveryCacheTests.cs | 0 .../StellaOpsTokenClientTests.cs | 0 .../TokenCacheTests.cs | 0 .../StellaOps.Auth.Client/FileTokenCache.cs | 0 .../IStellaOpsTokenCache.cs | 0 .../IStellaOpsTokenClient.cs | 84 +- .../InMemoryTokenCache.cs | 0 .../StellaOps.Auth.Client/README.NuGet.md | 0 .../ServiceCollectionExtensions.cs | 0 .../StellaOps.Auth.Client.csproj | 7 +- .../StellaOpsAuthClientOptions.cs | 0 .../StellaOpsDiscoveryCache.cs | 0 .../StellaOpsJwksCache.cs | 0 .../StellaOpsTokenCacheEntry.cs | 0 .../StellaOpsTokenClient.cs | 472 +- .../StellaOpsTokenResult.cs | 0 .../ServiceCollectionExtensionsTests.cs | 0 ...llaOps.Auth.ServerIntegration.Tests.csproj | 0 .../StellaOpsResourceServerOptionsTests.cs | 110 +- ...StellaOpsScopeAuthorizationHandlerTests.cs | 398 +- .../README.NuGet.md | 0 .../ServiceCollectionExtensions.cs | 184 +- .../StellaOps.Auth.ServerIntegration.csproj | 9 +- .../StellaOpsAuthorityConfigurationManager.cs | 232 +- ...OpsAuthorizationPolicyBuilderExtensions.cs | 0 .../StellaOpsBypassEvaluator.cs | 0 .../StellaOpsResourceServerOptions.cs | 356 +- .../StellaOpsScopeAuthorizationHandler.cs | 404 +- .../StellaOpsScopeRequirement.cs | 0 .../Security/CryptoPasswordHasherTests.cs | 0 .../StandardClientProvisioningStoreTests.cs | 370 +- .../StandardPluginOptionsTests.cs | 0 .../StandardPluginRegistrarTests.cs | 708 +- .../StandardUserCredentialStoreTests.cs | 0 ...Ops.Authority.Plugin.Standard.Tests.csproj | 0 .../AGENTS.md | 0 .../Bootstrap/StandardPluginBootstrapper.cs | 88 +- .../Properties/AssemblyInfo.cs | 0 .../Security/IPasswordHasher.cs | 0 .../StandardClaimsEnricher.cs | 0 .../StandardIdentityProviderPlugin.cs | 0 .../StandardPluginOptions.cs | 0 .../StandardPluginRegistrar.cs | 224 +- ...StellaOps.Authority.Plugin.Standard.csproj | 11 +- .../StandardClientProvisioningStore.cs | 0 .../Storage/StandardUserCredentialStore.cs | 0 .../Storage/StandardUserDocument.cs | 0 .../TASKS.md | 40 +- .../AuthorityClientRegistrationTests.cs | 64 +- ...horityCredentialVerificationResultTests.cs | 0 ...horityIdentityProviderCapabilitiesTests.cs | 0 .../AuthorityPluginHealthResultTests.cs | 0 .../AuthorityPluginOperationResultTests.cs | 0 .../AuthorityUserDescriptorTests.cs | 0 .../AuthorityUserRegistrationTests.cs | 0 ...uthority.Plugins.Abstractions.Tests.csproj | 0 .../AuthorityClientMetadataKeys.cs | 0 .../AuthorityPluginContracts.cs | 422 +- .../AuthorityPluginRegistrationContext.cs | 0 .../AuthoritySecretHasher.cs | 0 .../IdentityProviderContracts.cs | 1794 +- ...aOps.Authority.Plugins.Abstractions.csproj | 7 +- .../AuthorityMongoDefaults.cs | 0 .../Class1.cs | 0 .../AuthorityBootstrapInviteDocument.cs | 0 .../AuthorityClientCertificateBinding.cs | 0 .../Documents/AuthorityClientDocument.cs | 0 .../AuthorityLoginAttemptDocument.cs | 164 +- .../Documents/AuthorityRevocationDocument.cs | 0 .../AuthorityRevocationExportStateDocument.cs | 0 .../Documents/AuthorityScopeDocument.cs | 0 .../Documents/AuthorityTokenDocument.cs | 184 +- .../Documents/AuthorityUserDocument.cs | 0 .../Extensions/ServiceCollectionExtensions.cs | 0 ...ityBootstrapInviteCollectionInitializer.cs | 0 .../AuthorityClientCollectionInitializer.cs | 0 ...horityLoginAttemptCollectionInitializer.cs | 70 +- .../AuthorityMongoInitializer.cs | 0 ...uthorityRevocationCollectionInitializer.cs | 0 .../AuthorityScopeCollectionInitializer.cs | 0 .../AuthorityTokenCollectionInitializer.cs | 0 .../AuthorityUserCollectionInitializer.cs | 0 .../IAuthorityCollectionInitializer.cs | 0 .../AuthorityMongoMigrationRunner.cs | 0 .../EnsureAuthorityCollectionsMigration.cs | 0 .../Migrations/IAuthorityMongoMigration.cs | 0 .../Options/AuthorityMongoOptions.cs | 0 .../Sessions/AuthorityMongoSessionAccessor.cs | 0 .../StellaOps.Authority.Storage.Mongo.csproj | 7 +- .../Stores/AuthorityBootstrapInviteStore.cs | 0 .../Stores/AuthorityClientStore.cs | 0 .../Stores/AuthorityLoginAttemptStore.cs | 0 .../AuthorityRevocationExportStateStore.cs | 0 .../Stores/AuthorityRevocationStore.cs | 0 .../Stores/AuthorityScopeStore.cs | 0 .../Stores/AuthorityTokenStore.cs | 0 .../Stores/AuthorityUserStore.cs | 0 .../Stores/IAuthorityBootstrapInviteStore.cs | 0 .../Stores/IAuthorityClientStore.cs | 0 .../Stores/IAuthorityLoginAttemptStore.cs | 0 .../IAuthorityRevocationExportStateStore.cs | 0 .../Stores/IAuthorityRevocationStore.cs | 0 .../Stores/IAuthorityScopeStore.cs | 0 .../Stores/IAuthorityTokenStore.cs | 0 .../Stores/IAuthorityUserStore.cs | 0 .../BootstrapInviteCleanupServiceTests.cs | 0 .../Console/ConsoleEndpointsTests.cs | 678 +- .../AuthorityIdentityProviderRegistryTests.cs | 420 +- .../AuthorityIdentityProviderSelectorTests.cs | 250 +- .../AuthorityWebApplicationFactory.cs | 96 +- .../OpenApi/OpenApiDiscoveryEndpointTests.cs | 180 +- .../ClientCredentialsAndTokenHandlersTests.cs | 5350 +-- .../OpenIddict/PasswordGrantHandlersTests.cs | 1032 +- .../TokenPersistenceIntegrationTests.cs | 792 +- .../Permalinks/VulnPermalinkServiceTests.cs | 302 +- .../Plugins/AuthorityPluginLoaderTests.cs | 386 +- .../AuthorityRateLimiterIntegrationTests.cs | 0 ...thorityRateLimiterMetadataAccessorTests.cs | 72 +- ...orityRateLimiterMetadataMiddlewareTests.cs | 0 .../RateLimiting/AuthorityRateLimiterTests.cs | 0 .../AuthoritySigningKeyManagerTests.cs | 0 .../StellaOps.Authority.Tests.csproj | 33 +- .../TestEnvironment.cs | 26 +- .../StellaOps.Authority.sln | 0 .../Audit/AuthorityAuditSink.cs | 474 +- .../AuthorityHttpHeaders.cs | 14 +- .../AuthorityIdentityProviderRegistry.cs | 292 +- .../AuthorityPluginRegistry.cs | 0 .../AuthorityRateLimiter.cs | 0 .../AuthorityTelemetryConfiguration.cs | 0 .../Bootstrap/BootstrapApiKeyFilter.cs | 0 .../BootstrapInviteCleanupService.cs | 0 .../Bootstrap/BootstrapRequests.cs | 0 .../Console/ConsoleEndpointExtensions.cs | 1094 +- .../Console/TenantHeaderFilter.cs | 150 +- .../AuthorityOpenApiDocumentProvider.cs | 628 +- .../OpenApiDiscoveryEndpointExtensions.cs | 282 +- .../AuthorityIdentityProviderSelector.cs | 128 +- .../AuthorityOpenIddictConstants.cs | 0 .../Handlers/ClientCredentialsAuditHelper.cs | 538 +- .../Handlers/ClientCredentialsHandlers.cs | 0 .../OpenIddict/Handlers/DpopHandlers.cs | 0 .../Handlers/PasswordGrantHandlers.cs | 1752 +- .../OpenIddict/Handlers/RevocationHandlers.cs | 0 .../Handlers/TokenPersistenceHandlers.cs | 0 .../Handlers/TokenValidationHandlers.cs | 988 +- .../OpenIddict/TokenRequestTamperInspector.cs | 228 +- .../Permalinks/VulnPermalinkRequest.cs | 22 +- .../Permalinks/VulnPermalinkResponse.cs | 22 +- .../Permalinks/VulnPermalinkService.cs | 362 +- .../Plugins/AuthorityPluginLoader.cs | 684 +- .../AuthorityPluginRegistrationSummary.cs | 0 .../StellaOps.Authority/Program.Partial.cs | 6 +- .../StellaOps.Authority/Program.cs | 2670 +- .../Properties/AssemblyInfo.cs | 0 .../Properties/launchSettings.json | 0 .../AuthorityRateLimiterFeature.cs | 0 .../AuthorityRateLimiterMetadata.cs | 160 +- .../AuthorityRateLimiterMetadataAccessor.cs | 258 +- .../AuthorityRateLimiterMetadataMiddleware.cs | 0 ...uthorityRateLimiterPartitionKeyResolver.cs | 0 ...ateLimitingApplicationBuilderExtensions.cs | 0 .../AuthorityRevocationExportService.cs | 0 .../Revocation/RevocationBundleBuildResult.cs | 0 .../Revocation/RevocationBundleBuilder.cs | 0 .../Revocation/RevocationBundleModel.cs | 0 .../Revocation/RevocationBundleSignature.cs | 0 .../Revocation/RevocationBundleSigner.cs | 0 .../Revocation/RevocationEntryModel.cs | 0 .../Revocation/RevocationExportPackage.cs | 0 .../Revocation/RevocationExportResponse.cs | 0 ...horityClientCertificateValidationResult.cs | 0 .../AuthorityClientCertificateValidator.cs | 0 .../AuthoritySenderConstraintKinds.cs | 0 .../IAuthorityClientCertificateValidator.cs | 0 .../Signing/AuthorityJwksService.cs | 0 .../Signing/AuthoritySigningKeyManager.cs | 0 .../Signing/AuthoritySigningKeyRequest.cs | 0 .../Signing/AuthoritySigningKeyStatus.cs | 0 .../Signing/FileAuthoritySigningKeySource.cs | 0 .../Signing/IAuthoritySigningKeySource.cs | 0 .../Signing/SigningRotationRequest.cs | 0 .../StellaOps.Authority.csproj | 43 +- .../Tenants/AuthorityTenantCatalog.cs | 86 +- .../appsettings.Development.json | 0 .../StellaOps.Authority/appsettings.json | 0 .../StellaOps.Authority/TASKS.md | 4 +- src/Bench/StellaOps.Bench.sln | 412 + .../LinkNotMerge.Vex/README.md | 2 +- .../BaselineLoaderTests.cs | 74 +- .../BenchmarkScenarioReportTests.cs | 166 +- ...llaOps.Bench.LinkNotMerge.Vex.Tests.csproj | 56 +- .../VexScenarioRunnerTests.cs | 68 +- .../Baseline/BaselineEntry.cs | 36 +- .../Baseline/BaselineLoader.cs | 174 +- .../Program.cs | 752 +- .../Properties/AssemblyInfo.cs | 6 +- .../Reporting/BenchmarkJsonWriter.cs | 302 +- .../Reporting/BenchmarkScenarioReport.cs | 178 +- .../Reporting/PrometheusWriter.cs | 188 +- .../Statistics.cs | 168 +- .../StellaOps.Bench.LinkNotMerge.Vex.csproj | 32 +- .../VexLinksetAggregator.cs | 332 +- .../VexObservationGenerator.cs | 504 +- .../VexScenarioConfig.cs | 366 +- .../VexScenarioExecutionResult.cs | 28 +- .../VexScenarioResult.cs | 86 +- .../VexScenarioRunner.cs | 276 +- .../LinkNotMerge.Vex/baseline.csv | 8 +- .../LinkNotMerge.Vex/config.json | 108 +- .../StellaOps.Bench/LinkNotMerge/README.md | 2 +- .../BaselineLoaderTests.cs | 76 +- .../BenchmarkScenarioReportTests.cs | 162 +- .../LinkNotMergeScenarioRunnerTests.cs | 76 +- .../StellaOps.Bench.LinkNotMerge.Tests.csproj | 56 +- .../Baseline/BaselineEntry.cs | 36 +- .../Baseline/BaselineLoader.cs | 174 +- .../BenchmarkConfig.cs | 420 +- .../LinkNotMergeScenarioRunner.cs | 270 +- .../LinksetAggregator.cs | 280 +- .../ObservationData.cs | 540 +- .../StellaOps.Bench.LinkNotMerge/Program.cs | 750 +- .../Properties/AssemblyInfo.cs | 6 +- .../Reporting/BenchmarkJsonWriter.cs | 302 +- .../Reporting/BenchmarkScenarioReport.cs | 178 +- .../Reporting/PrometheusWriter.cs | 202 +- .../ScenarioExecutionResult.cs | 28 +- .../ScenarioResult.cs | 84 +- .../ScenarioStatistics.cs | 168 +- .../StellaOps.Bench.LinkNotMerge.csproj | 32 +- .../StellaOps.Bench/LinkNotMerge/baseline.csv | 8 +- .../StellaOps.Bench/LinkNotMerge/config.json | 114 +- .../StellaOps.Bench/Notify/README.md | 2 +- .../BaselineLoaderTests.cs | 76 +- .../BenchmarkScenarioReportTests.cs | 170 +- .../NotifyScenarioRunnerTests.cs | 66 +- .../PrometheusWriterTests.cs | 128 +- .../StellaOps.Bench.Notify.Tests.csproj | 54 +- .../Baseline/BaselineEntry.cs | 26 +- .../Baseline/BaselineLoader.cs | 174 +- .../StellaOps.Bench.Notify/BenchmarkConfig.cs | 440 +- .../DispatchAccumulator.cs | 52 +- .../NotifyScenarioRunner.cs | 772 +- .../Notify/StellaOps.Bench.Notify/Program.cs | 728 +- .../Properties/AssemblyInfo.cs | 6 +- .../Reporting/BenchmarkJsonWriter.cs | 294 +- .../Reporting/BenchmarkScenarioReport.cs | 168 +- .../Reporting/PrometheusWriter.cs | 172 +- .../ScenarioExecutionResult.cs | 34 +- .../StellaOps.Bench.Notify/ScenarioResult.cs | 92 +- .../ScenarioStatistics.cs | 174 +- .../StellaOps.Bench.Notify.csproj | 5 +- .../StellaOps.Bench/Notify/baseline.csv | 8 +- .../StellaOps.Bench/Notify/config.json | 94 +- .../StellaOps.Bench/PolicyEngine/README.md | 2 +- .../Baseline/BaselineEntry.cs | 24 +- .../Baseline/BaselineLoader.cs | 172 +- .../BenchmarkConfig.cs | 310 +- .../PathUtilities.cs | 30 +- .../PolicyScenarioRunner.cs | 498 +- .../StellaOps.Bench.PolicyEngine/Program.cs | 746 +- .../Reporting/BenchmarkJsonWriter.cs | 250 +- .../Reporting/BenchmarkScenarioReport.cs | 164 +- .../Reporting/PrometheusWriter.cs | 166 +- .../ScenarioResult.cs | 220 +- .../StellaOps.Bench.PolicyEngine.csproj | 5 +- .../StellaOps.Bench/PolicyEngine/baseline.csv | 4 +- .../StellaOps.Bench/PolicyEngine/config.json | 38 +- .../Scanner.Analyzers/README.md | 4 +- .../BaselineLoaderTests.cs | 74 +- .../BenchmarkJsonWriterTests.cs | 82 +- .../BenchmarkScenarioReportTests.cs | 116 +- .../PrometheusWriterTests.cs | 64 +- ...llaOps.Bench.ScannerAnalyzers.Tests.csproj | 52 +- .../Baseline/BaselineEntry.cs | 18 +- .../Baseline/BaselineLoader.cs | 176 +- .../BenchmarkConfig.cs | 208 +- .../Program.cs | 786 +- .../Reporting/BenchmarkJsonWriter.cs | 216 +- .../Reporting/BenchmarkScenarioReport.cs | 110 +- .../Reporting/PrometheusWriter.cs | 118 +- .../ScenarioResult.cs | 48 +- .../ScenarioRunners.cs | 570 +- .../StellaOps.Bench.ScannerAnalyzers.csproj | 24 + .../Scanner.Analyzers/baseline.csv | 14 +- .../Scanner.Analyzers/config.json | 6 +- .../Scanner.Analyzers/lang/README.md | 4 +- .../lang/dotnet/syft-comparison-20251023.csv | 4 +- .../lang/go/syft-comparison-20251021.csv | 4 +- .../lang/python/hash-throughput-20251023.csv | 6 +- src/{ => Bench}/StellaOps.Bench/TASKS.md | 2 +- src/Cartographer/StellaOps.Cartographer.sln | 179 + .../StellaOps.Cartographer/AGENTS.md | 36 +- .../Options/CartographerAuthorityOptions.cs | 202 +- ...artographerAuthorityOptionsConfigurator.cs | 74 +- .../StellaOps.Cartographer/Program.cs | 78 +- .../Properties/AssemblyInfo.cs | 6 +- .../StellaOps.Cartographer.csproj | 18 + .../StellaOps.Cartographer/TASKS.md | 12 +- ...rapherAuthorityOptionsConfiguratorTests.cs | 102 +- .../StellaOps.Cartographer.Tests.csproj | 5 +- src/Cli/StellaOps.Cli.sln | 169 + src/{ => Cli}/StellaOps.Cli/AGENTS.md | 50 +- .../StellaOps.Cli/Commands/CommandFactory.cs | 0 .../StellaOps.Cli/Commands/CommandHandlers.cs | 11276 +++---- .../Configuration/AuthorityTokenUtilities.cs | 116 +- .../Configuration/CliBootstrapper.cs | 836 +- .../Configuration/StellaOpsCliOptions.cs | 174 +- .../Plugins/CliCommandModuleLoader.cs | 556 +- .../Plugins/CliPluginManifest.cs | 78 +- .../Plugins/CliPluginManifestLoader.cs | 300 +- .../Plugins/ICliCommandModule.cs | 40 +- .../Plugins/RestartOnlyCliPluginGuard.cs | 82 +- src/{ => Cli}/StellaOps.Cli/Program.cs | 0 .../Prompts/TrivyDbExportPrompt.cs | 0 .../StellaOps.Cli/Properties/AssemblyInfo.cs | 0 .../Services/AuthorityDiagnosticsReporter.cs | 0 .../Services/AuthorityRevocationClient.cs | 446 +- .../Services/BackendOperationsClient.cs | 4972 +-- .../Services/ConcelierObservationsClient.cs | 500 +- .../Services/IAuthorityRevocationClient.cs | 0 .../Services/IBackendOperationsClient.cs | 0 .../Services/IConcelierObservationsClient.cs | 24 +- .../Services/IScannerExecutor.cs | 0 .../Services/IScannerInstaller.cs | 0 .../Models/AdvisoryObservationsModels.cs | 234 +- .../Services/Models/AocIngestDryRunModels.cs | 186 +- .../Services/Models/AocVerifyModels.cs | 200 +- .../Models/AuthorityRevocationExportResult.cs | 0 .../Models/ExcititorExportDownloadResult.cs | 0 .../Models/ExcititorOperationResult.cs | 0 .../Models/ExcititorProviderSummary.cs | 0 .../Services/Models/JobTriggerResult.cs | 0 .../Services/Models/OfflineKitModels.cs | 222 +- .../Services/Models/PolicyActivationModels.cs | 60 +- .../Services/Models/PolicyFindingsModels.cs | 100 +- .../Services/Models/PolicySimulationModels.cs | 52 +- .../Models/RuntimePolicyEvaluationModels.cs | 0 .../Services/Models/ScannerArtifactResult.cs | 0 .../Models/Transport/JobRunResponse.cs | 0 .../Models/Transport/JobTriggerRequest.cs | 0 .../Models/Transport/OfflineKitTransport.cs | 206 +- .../Transport/PolicyActivationTransport.cs | 104 +- .../Transport/PolicyFindingsTransport.cs | 164 +- .../Transport/PolicySimulationTransport.cs | 114 +- .../Models/Transport/ProblemDocument.cs | 0 .../RuntimePolicyEvaluationTransport.cs | 0 .../Services/PolicyApiException.cs | 36 +- .../Services/ScannerExecutionResult.cs | 0 .../StellaOps.Cli/Services/ScannerExecutor.cs | 0 .../Services/ScannerInstaller.cs | 0 .../StellaOps.Cli/StellaOps.Cli.csproj | 13 +- src/{ => Cli}/StellaOps.Cli/TASKS.md | 2 +- .../Telemetry/CliActivitySource.cs | 0 .../StellaOps.Cli/Telemetry/CliMetrics.cs | 0 .../StellaOps.Cli/Telemetry/VerbosityState.cs | 0 src/{ => Cli}/StellaOps.Cli/appsettings.json | 0 .../NonCoreCliCommandModule.cs | 832 +- .../StellaOps.Cli.Plugins.NonCore.csproj | 44 +- .../Commands/CommandHandlersTests.cs | 4978 +-- .../Configuration/CliBootstrapperTests.cs | 0 .../Plugins/CliCommandModuleLoaderTests.cs | 86 +- .../Plugins/RestartOnlyCliPluginGuardTests.cs | 58 +- .../AuthorityDiagnosticsReporterTests.cs | 0 .../Services/BackendOperationsClientTests.cs | 2262 +- .../StellaOps.Cli.Tests.csproj | 30 + .../Testing/TestHelpers.cs | 0 .../__Tests}/StellaOps.Cli.Tests/UnitTest1.cs | 0 .../StellaOps.Cli.Tests}/xunit.runner.json | 0 .../AssemblyInfo.cs | 0 .../MongoFixtureCollection.cs | 0 .../StellaOps.Concelier.WebService/AGENTS.md | 0 .../Contracts/AdvisoryObservationContracts.cs | 32 +- .../Contracts/AdvisoryRawContracts.cs | 254 +- .../Diagnostics/HealthContracts.cs | 0 .../Diagnostics/IngestionMetrics.cs | 44 +- .../Diagnostics/JobMetrics.cs | 0 .../Diagnostics/ProblemTypes.cs | 0 .../Diagnostics/ServiceStatus.cs | 0 .../Extensions/AdvisoryRawRequestMapper.cs | 314 +- .../Extensions/ConfigurationExtensions.cs | 0 .../Extensions/JobRegistrationExtensions.cs | 0 .../Extensions/MirrorEndpointExtensions.cs | 0 .../Extensions/TelemetryExtensions.cs | 0 .../Filters/JobAuthorizationAuditFilter.cs | 0 .../Jobs/JobDefinitionResponse.cs | 0 .../Jobs/JobRunResponse.cs | 0 .../Jobs/JobTriggerRequest.cs | 0 .../Options/ConcelierOptions.cs | 0 .../Options/ConcelierOptionsPostConfigure.cs | 0 .../Options/ConcelierOptionsValidator.cs | 0 .../StellaOps.Concelier.WebService/Program.cs | 0 .../Properties/launchSettings.json | 0 .../Services/MirrorFileLocator.cs | 0 .../Services/MirrorRateLimiter.cs | 0 .../StellaOps.Concelier.WebService.csproj | 38 + .../StellaOps.Concelier.WebService/TASKS.md | 190 +- src/Concelier/StellaOps.Concelier.sln | 1336 + .../AGENTS.md | 0 .../AcscConnector.cs | 0 .../AcscConnectorPlugin.cs | 0 .../AcscDependencyInjectionRoutine.cs | 0 .../AcscServiceCollectionExtensions.cs | 0 .../Configuration/AcscFeedOptions.cs | 0 .../Configuration/AcscOptions.cs | 0 .../Internal/AcscCursor.cs | 0 .../Internal/AcscDiagnostics.cs | 0 .../Internal/AcscDocumentMetadata.cs | 0 .../Internal/AcscDto.cs | 0 .../Internal/AcscFeedParser.cs | 0 .../Internal/AcscMapper.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../README.md | 136 +- .../StellaOps.Concelier.Connector.Acsc.csproj | 24 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../CccsConnector.cs | 0 .../CccsConnectorPlugin.cs | 0 .../CccsDependencyInjectionRoutine.cs | 0 .../CccsServiceCollectionExtensions.cs | 0 .../Configuration/CccsOptions.cs | 0 .../Internal/CccsAdvisoryDto.cs | 0 .../Internal/CccsCursor.cs | 0 .../Internal/CccsDiagnostics.cs | 0 .../Internal/CccsFeedClient.cs | 0 .../Internal/CccsFeedModels.cs | 0 .../Internal/CccsHtmlParser.cs | 0 .../Internal/CccsMapper.cs | 0 .../Internal/CccsRawAdvisoryDocument.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../StellaOps.Concelier.Connector.Cccs.csproj | 21 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../CertBundConnector.cs | 0 .../CertBundConnectorPlugin.cs | 0 .../CertBundDependencyInjectionRoutine.cs | 0 .../CertBundServiceCollectionExtensions.cs | 0 .../Configuration/CertBundOptions.cs | 0 .../Internal/CertBundAdvisoryDto.cs | 0 .../Internal/CertBundCursor.cs | 0 .../Internal/CertBundDetailParser.cs | 0 .../Internal/CertBundDetailResponse.cs | 0 .../Internal/CertBundDiagnostics.cs | 0 .../Internal/CertBundDocumentMetadata.cs | 0 .../Internal/CertBundFeedClient.cs | 0 .../Internal/CertBundFeedItem.cs | 0 .../Internal/CertBundMapper.cs | 0 .../Jobs.cs | 0 .../README.md | 0 ...llaOps.Concelier.Connector.CertBund.csproj | 21 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../CertCcConnector.cs | 0 .../CertCcConnectorPlugin.cs | 0 .../CertCcDependencyInjectionRoutine.cs | 0 .../CertCcServiceCollectionExtensions.cs | 0 .../Configuration/CertCcOptions.cs | 0 .../FEEDCONN-CERTCC-02-009_PLAN.md | 118 +- .../FEEDCONN-CERTCC-02-012_HANDOFF.md | 40 +- .../Internal/CertCcCursor.cs | 0 .../Internal/CertCcDiagnostics.cs | 0 .../Internal/CertCcMapper.cs | 0 .../Internal/CertCcNoteDto.cs | 0 .../Internal/CertCcNoteParser.cs | 0 .../Internal/CertCcSummaryParser.cs | 0 .../Internal/CertCcSummaryPlan.cs | 0 .../Internal/CertCcSummaryPlanner.cs | 0 .../Internal/CertCcVendorStatementParser.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../README.md | 126 +- ...tellaOps.Concelier.Connector.CertCc.csproj | 17 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../CertFrConnector.cs | 0 .../CertFrConnectorPlugin.cs | 0 .../CertFrDependencyInjectionRoutine.cs | 0 .../CertFrServiceCollectionExtensions.cs | 0 .../Configuration/CertFrOptions.cs | 0 .../Internal/CertFrCursor.cs | 0 .../Internal/CertFrDocumentMetadata.cs | 0 .../Internal/CertFrDto.cs | 0 .../Internal/CertFrFeedClient.cs | 0 .../Internal/CertFrFeedItem.cs | 0 .../Internal/CertFrMapper.cs | 0 .../Internal/CertFrParser.cs | 0 .../Jobs.cs | 0 ...tellaOps.Concelier.Connector.CertFr.csproj | 27 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../CertInConnector.cs | 0 .../CertInConnectorPlugin.cs | 0 .../CertInDependencyInjectionRoutine.cs | 0 .../CertInServiceCollectionExtensions.cs | 0 .../Configuration/CertInOptions.cs | 0 .../Internal/CertInAdvisoryDto.cs | 0 .../Internal/CertInClient.cs | 0 .../Internal/CertInCursor.cs | 0 .../Internal/CertInDetailParser.cs | 0 .../Internal/CertInListingItem.cs | 0 .../Jobs.cs | 0 ...tellaOps.Concelier.Connector.CertIn.csproj | 33 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Cursors/PaginationPlanner.cs | 0 .../Cursors/TimeWindowCursorOptions.cs | 0 .../Cursors/TimeWindowCursorPlanner.cs | 0 .../Cursors/TimeWindowCursorState.cs | 0 .../DocumentStatuses.cs | 0 .../Fetch/CryptoJitterSource.cs | 0 .../Fetch/IJitterSource.cs | 0 .../Fetch/RawDocumentStorage.cs | 0 .../Fetch/SourceFetchContentResult.cs | 0 .../Fetch/SourceFetchRequest.cs | 0 .../Fetch/SourceFetchResult.cs | 0 .../Fetch/SourceFetchService.cs | 0 .../Fetch/SourceRetryPolicy.cs | 0 .../Html/HtmlContentSanitizer.cs | 0 .../Http/AllowlistedHttpMessageHandler.cs | 0 .../Http/ServiceCollectionExtensions.cs | 412 +- .../SourceHttpClientConfigurationBinder.cs | 0 .../Http/SourceHttpClientOptions.cs | 0 .../Json/IJsonSchemaValidator.cs | 0 .../Json/JsonSchemaValidationError.cs | 0 .../Json/JsonSchemaValidationException.cs | 0 .../Json/JsonSchemaValidator.cs | 0 .../Packages/PackageCoordinateHelper.cs | 0 .../Pdf/PdfTextExtractor.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../State/SourceStateSeedModels.cs | 0 .../State/SourceStateSeedProcessor.cs | 0 ...tellaOps.Concelier.Connector.Common.csproj | 5 +- .../TASKS.md | 0 .../Telemetry/SourceDiagnostics.cs | 0 .../Testing/CannedHttpMessageHandler.cs | 0 .../Url/UrlNormalizer.cs | 0 .../Xml/IXmlSchemaValidator.cs | 0 .../Xml/XmlSchemaValidationError.cs | 0 .../Xml/XmlSchemaValidationException.cs | 0 .../Xml/XmlSchemaValidator.cs | 0 .../AGENTS.md | 0 .../Configuration/CveOptions.cs | 0 .../CveConnector.cs | 0 .../CveConnectorPlugin.cs | 0 .../CveDependencyInjectionRoutine.cs | 0 .../CveServiceCollectionExtensions.cs | 0 .../Internal/CveCursor.cs | 0 .../Internal/CveDiagnostics.cs | 0 .../Internal/CveListParser.cs | 0 .../Internal/CveMapper.cs | 0 .../Internal/CveRecordDto.cs | 0 .../Internal/CveRecordParser.cs | 0 .../StellaOps.Concelier.Connector.Cve/Jobs.cs | 0 .../StellaOps.Concelier.Connector.Cve.csproj | 32 +- .../TASKS.md | 2 +- .../AssemblyInfo.cs | 0 .../Configuration/DebianOptions.cs | 0 .../DebianConnector.cs | 0 .../DebianConnectorPlugin.cs | 0 .../DebianDependencyInjectionRoutine.cs | 0 .../DebianServiceCollectionExtensions.cs | 0 .../Internal/DebianAdvisoryDto.cs | 0 .../Internal/DebianCursor.cs | 0 .../Internal/DebianDetailMetadata.cs | 0 .../Internal/DebianFetchCacheEntry.cs | 0 .../Internal/DebianHtmlParser.cs | 0 .../Internal/DebianListEntry.cs | 0 .../Internal/DebianListParser.cs | 0 .../Internal/DebianMapper.cs | 0 .../Jobs.cs | 0 ...s.Concelier.Connector.Distro.Debian.csproj | 35 +- .../AGENTS.md | 0 .../CONFLICT_RESOLVER_NOTES.md | 50 +- .../Configuration/RedHatOptions.cs | 0 .../Internal/Models/RedHatCsafModels.cs | 0 .../Internal/RedHatCursor.cs | 0 .../Internal/RedHatMapper.cs | 0 .../Internal/RedHatSummaryItem.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../RedHatConnector.cs | 0 .../RedHatConnectorPlugin.cs | 0 .../RedHatDependencyInjectionRoutine.cs | 0 .../RedHatServiceCollectionExtensions.cs | 0 ...s.Concelier.Connector.Distro.RedHat.csproj | 31 +- .../TASKS.md | 0 .../AssemblyInfo.cs | 0 .../Configuration/SuseOptions.cs | 0 .../Internal/SuseAdvisoryDto.cs | 0 .../Internal/SuseChangeRecord.cs | 0 .../Internal/SuseChangesParser.cs | 0 .../Internal/SuseCsafParser.cs | 0 .../Internal/SuseCursor.cs | 0 .../Internal/SuseFetchCacheEntry.cs | 0 .../Internal/SuseMapper.cs | 0 .../Jobs.cs | 0 ...Ops.Concelier.Connector.Distro.Suse.csproj | 35 +- .../SuseConnector.cs | 0 .../SuseConnectorPlugin.cs | 0 .../SuseDependencyInjectionRoutine.cs | 0 .../SuseServiceCollectionExtensions.cs | 0 .../Configuration/UbuntuOptions.cs | 0 .../Internal/UbuntuCursor.cs | 0 .../Internal/UbuntuFetchCacheEntry.cs | 0 .../Internal/UbuntuMapper.cs | 0 .../Internal/UbuntuNoticeDto.cs | 0 .../Internal/UbuntuNoticeParser.cs | 0 .../Jobs.cs | 0 ...s.Concelier.Connector.Distro.Ubuntu.csproj | 35 +- .../TASKS.md | 0 .../UbuntuConnector.cs | 0 .../UbuntuConnectorPlugin.cs | 0 .../UbuntuDependencyInjectionRoutine.cs | 0 .../UbuntuServiceCollectionExtensions.cs | 0 .../AGENTS.md | 0 .../Configuration/GhsaOptions.cs | 0 .../GhsaConnector.cs | 0 .../GhsaConnectorPlugin.cs | 0 .../GhsaDependencyInjectionRoutine.cs | 0 .../GhsaServiceCollectionExtensions.cs | 0 .../Internal/GhsaCursor.cs | 0 .../Internal/GhsaDiagnostics.cs | 0 .../Internal/GhsaListParser.cs | 0 .../Internal/GhsaMapper.cs | 0 .../Internal/GhsaRateLimitParser.cs | 0 .../Internal/GhsaRateLimitSnapshot.cs | 0 .../Internal/GhsaRecordDto.cs | 0 .../Internal/GhsaRecordParser.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../StellaOps.Concelier.Connector.Ghsa.csproj | 26 +- .../TASKS.md | 4 +- .../AGENTS.md | 0 .../Configuration/IcsCisaOptions.cs | 0 .../HANDOVER.md | 42 +- .../IcsCisaConnector.cs | 0 .../IcsCisaConnectorPlugin.cs | 0 .../IcsCisaDependencyInjectionRoutine.cs | 0 .../IcsCisaServiceCollectionExtensions.cs | 0 .../Internal/IcsCisaAdvisoryDto.cs | 0 .../Internal/IcsCisaAttachmentDto.cs | 0 .../Internal/IcsCisaCursor.cs | 0 .../Internal/IcsCisaDiagnostics.cs | 0 .../Internal/IcsCisaFeedDto.cs | 0 .../Internal/IcsCisaFeedParser.cs | 0 .../Jobs.cs | 0 ...llaOps.Concelier.Connector.Ics.Cisa.csproj | 57 +- .../TASKS.md | 30 +- .../AGENTS.md | 0 .../Configuration/KasperskyOptions.cs | 0 .../Internal/KasperskyAdvisoryDto.cs | 0 .../Internal/KasperskyAdvisoryParser.cs | 0 .../Internal/KasperskyCursor.cs | 0 .../Internal/KasperskyFeedClient.cs | 0 .../Internal/KasperskyFeedItem.cs | 0 .../Jobs.cs | 0 .../KasperskyConnector.cs | 0 .../KasperskyConnectorPlugin.cs | 0 .../KasperskyDependencyInjectionRoutine.cs | 0 .../KasperskyServiceCollectionExtensions.cs | 0 ...s.Concelier.Connector.Ics.Kaspersky.csproj | 33 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Configuration/JvnOptions.cs | 0 .../Internal/JvnAdvisoryMapper.cs | 0 .../Internal/JvnConstants.cs | 0 .../Internal/JvnCursor.cs | 0 .../Internal/JvnDetailDto.cs | 0 .../Internal/JvnDetailParser.cs | 0 .../Internal/JvnOverviewItem.cs | 0 .../Internal/JvnOverviewPage.cs | 0 .../Internal/JvnSchemaProvider.cs | 0 .../Internal/JvnSchemaValidationException.cs | 0 .../Internal/MyJvnClient.cs | 0 .../StellaOps.Concelier.Connector.Jvn/Jobs.cs | 0 .../JvnConnector.cs | 0 .../JvnConnectorPlugin.cs | 0 .../JvnDependencyInjectionRoutine.cs | 0 .../JvnServiceCollectionExtensions.cs | 0 .../Schemas/data_marking.xsd | 0 .../Schemas/jvnrss_3.2.xsd | 0 .../Schemas/mod_sec_3.0.xsd | 0 .../Schemas/status_3.3.xsd | 0 .../Schemas/tlp_marking.xsd | 0 .../Schemas/vuldef_3.2.xsd | 0 .../Schemas/xml.xsd | 0 .../StellaOps.Concelier.Connector.Jvn.csproj | 31 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Configuration/KevOptions.cs | 0 .../Internal/KevCatalogDto.cs | 0 .../Internal/KevCursor.cs | 0 .../Internal/KevDiagnostics.cs | 0 .../Internal/KevMapper.cs | 0 .../Internal/KevSchemaProvider.cs | 0 .../StellaOps.Concelier.Connector.Kev/Jobs.cs | 0 .../KevConnector.cs | 0 .../KevConnectorPlugin.cs | 0 .../KevDependencyInjectionRoutine.cs | 0 .../KevServiceCollectionExtensions.cs | 0 .../Schemas/kev-catalog.schema.json | 0 .../StellaOps.Concelier.Connector.Kev.csproj | 22 +- .../TASKS.md | 24 +- .../AGENTS.md | 0 .../Configuration/KisaOptions.cs | 0 .../Internal/KisaCursor.cs | 0 .../Internal/KisaDetailParser.cs | 0 .../Internal/KisaDetailResponse.cs | 0 .../Internal/KisaDiagnostics.cs | 0 .../Internal/KisaDocumentMetadata.cs | 0 .../Internal/KisaFeedClient.cs | 0 .../Internal/KisaFeedItem.cs | 0 .../Internal/KisaMapper.cs | 0 .../Jobs.cs | 0 .../KisaConnector.cs | 0 .../KisaConnectorPlugin.cs | 0 .../KisaDependencyInjectionRoutine.cs | 0 .../KisaServiceCollectionExtensions.cs | 0 .../StellaOps.Concelier.Connector.Kisa.csproj | 26 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Configuration/NvdOptions.cs | 0 .../Internal/NvdCursor.cs | 0 .../Internal/NvdDiagnostics.cs | 0 .../Internal/NvdMapper.cs | 0 .../Internal/NvdSchemaProvider.cs | 0 .../NvdConnector.cs | 0 .../NvdConnectorPlugin.cs | 0 .../NvdServiceCollectionExtensions.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../Schemas/nvd-vulnerability.schema.json | 0 .../StellaOps.Concelier.Connector.Nvd.csproj | 35 +- .../TASKS.md | 18 +- .../AGENTS.md | 0 .../Configuration/OsvOptions.cs | 0 .../Internal/OsvCursor.cs | 0 .../Internal/OsvDiagnostics.cs | 0 .../Internal/OsvMapper.cs | 0 .../Internal/OsvVulnerabilityDto.cs | 0 .../StellaOps.Concelier.Connector.Osv/Jobs.cs | 0 .../OsvConnector.cs | 0 .../OsvConnectorPlugin.cs | 0 .../OsvDependencyInjectionRoutine.cs | 0 .../OsvServiceCollectionExtensions.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../StellaOps.Concelier.Connector.Osv.csproj | 47 +- .../TASKS.md | 6 +- .../AGENTS.md | 0 .../Configuration/RuBduOptions.cs | 0 .../Internal/RuBduCursor.cs | 0 .../Internal/RuBduDiagnostics.cs | 0 .../Internal/RuBduMapper.cs | 0 .../Internal/RuBduVulnerabilityDto.cs | 0 .../Internal/RuBduXmlParser.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../README.md | 4 +- .../RuBduConnector.cs | 0 .../RuBduConnectorPlugin.cs | 0 .../RuBduDependencyInjectionRoutine.cs | 0 .../RuBduServiceCollectionExtensions.cs | 0 ...tellaOps.Concelier.Connector.Ru.Bdu.csproj | 37 +- .../TASKS.md | 22 +- .../AGENTS.md | 0 .../Configuration/RuNkckiOptions.cs | 0 .../Internal/RuNkckiCursor.cs | 0 .../Internal/RuNkckiDiagnostics.cs | 0 .../Internal/RuNkckiJsonParser.cs | 0 .../Internal/RuNkckiMapper.cs | 0 .../Internal/RuNkckiVulnerabilityDto.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../RuNkckiConnector.cs | 0 .../RuNkckiConnectorPlugin.cs | 0 .../RuNkckiDependencyInjectionRoutine.cs | 0 .../RuNkckiServiceCollectionExtensions.cs | 0 ...llaOps.Concelier.Connector.Ru.Nkcki.csproj | 45 +- .../TASKS.md | 0 .../Client/MirrorManifestClient.cs | 0 .../Internal/MirrorAdvisoryMapper.cs | 406 +- .../Internal/MirrorBundleDocument.cs | 28 +- .../Internal/MirrorIndexDocument.cs | 0 .../Internal/StellaOpsMirrorCursor.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 6 +- .../Security/MirrorSignatureVerifier.cs | 546 +- .../StellaOpsMirrorConnectorOptions.cs | 0 ...Concelier.Connector.StellaOpsMirror.csproj | 33 +- .../StellaOpsMirrorConnector.cs | 1146 +- .../StellaOpsMirrorConnectorPlugin.cs | 0 ...ellaOpsMirrorDependencyInjectionRoutine.cs | 0 .../TASKS.md | 10 +- .../AGENTS.md | 0 .../AdobeConnector.cs | 0 .../AdobeConnectorPlugin.cs | 0 .../AdobeDiagnostics.cs | 0 .../AdobeServiceCollectionExtensions.cs | 0 .../Configuration/AdobeOptions.cs | 0 .../Internal/AdobeBulletinDto.cs | 0 .../Internal/AdobeCursor.cs | 0 .../Internal/AdobeDetailParser.cs | 0 .../Internal/AdobeDocumentMetadata.cs | 0 .../Internal/AdobeIndexEntry.cs | 0 .../Internal/AdobeIndexParser.cs | 0 .../Internal/AdobeSchemaProvider.cs | 0 .../Schemas/adobe-bulletin.schema.json | 0 ...aOps.Concelier.Connector.Vndr.Adobe.csproj | 50 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../AppleConnector.cs | 0 .../AppleDependencyInjectionRoutine.cs | 0 .../AppleOptions.cs | 0 .../AppleServiceCollectionExtensions.cs | 0 .../Internal/AppleCursor.cs | 0 .../Internal/AppleDetailDto.cs | 0 .../Internal/AppleDetailParser.cs | 0 .../Internal/AppleDiagnostics.cs | 0 .../Internal/AppleIndexEntry.cs | 0 .../Internal/AppleMapper.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../README.md | 98 +- ...aOps.Concelier.Connector.Vndr.Apple.csproj | 26 +- .../TASKS.md | 0 .../VndrAppleConnectorPlugin.cs | 0 .../AGENTS.md | 0 .../ChromiumConnector.cs | 0 .../ChromiumConnectorPlugin.cs | 0 .../ChromiumDiagnostics.cs | 0 .../ChromiumServiceCollectionExtensions.cs | 0 .../Configuration/ChromiumOptions.cs | 0 .../Internal/ChromiumCursor.cs | 0 .../Internal/ChromiumDocumentMetadata.cs | 0 .../Internal/ChromiumDto.cs | 0 .../Internal/ChromiumFeedEntry.cs | 0 .../Internal/ChromiumFeedLoader.cs | 0 .../Internal/ChromiumMapper.cs | 0 .../Internal/ChromiumParser.cs | 0 .../Internal/ChromiumSchemaProvider.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../Schemas/chromium-post.schema.json | 0 ...s.Concelier.Connector.Vndr.Chromium.csproj | 64 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../CiscoConnector.cs | 0 .../CiscoDependencyInjectionRoutine.cs | 0 .../CiscoServiceCollectionExtensions.cs | 0 .../Configuration/CiscoOptions.cs | 0 .../Internal/CiscoAccessTokenProvider.cs | 0 .../Internal/CiscoAdvisoryDto.cs | 0 .../Internal/CiscoCsafClient.cs | 0 .../Internal/CiscoCsafData.cs | 0 .../Internal/CiscoCsafParser.cs | 0 .../Internal/CiscoCursor.cs | 0 .../Internal/CiscoDiagnostics.cs | 0 .../Internal/CiscoDtoFactory.cs | 0 .../Internal/CiscoMapper.cs | 0 .../Internal/CiscoOAuthMessageHandler.cs | 0 .../Internal/CiscoOpenVulnClient.cs | 0 .../Internal/CiscoRawAdvisory.cs | 0 .../Jobs.cs | 0 ...aOps.Concelier.Connector.Vndr.Cisco.csproj | 21 +- .../TASKS.md | 0 .../VndrCiscoConnectorPlugin.cs | 0 .../AGENTS.md | 0 .../Configuration/MsrcOptions.cs | 0 .../Internal/MsrcAdvisoryDto.cs | 0 .../Internal/MsrcApiClient.cs | 0 .../Internal/MsrcCursor.cs | 0 .../Internal/MsrcDetailDto.cs | 0 .../Internal/MsrcDetailParser.cs | 0 .../Internal/MsrcDiagnostics.cs | 0 .../Internal/MsrcDocumentMetadata.cs | 0 .../Internal/MsrcMapper.cs | 0 .../Internal/MsrcSummaryResponse.cs | 0 .../Internal/MsrcTokenProvider.cs | 0 .../Jobs.cs | 0 .../MsrcConnector.cs | 0 .../MsrcConnectorPlugin.cs | 0 .../MsrcDependencyInjectionRoutine.cs | 0 .../MsrcServiceCollectionExtensions.cs | 0 .../README.md | 0 ...laOps.Concelier.Connector.Vndr.Msrc.csproj | 32 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Configuration/OracleOptions.cs | 0 .../Internal/OracleAffectedEntry.cs | 0 .../Internal/OracleCalendarFetcher.cs | 0 .../Internal/OracleCursor.cs | 0 .../Internal/OracleDocumentMetadata.cs | 0 .../Internal/OracleDto.cs | 0 .../Internal/OracleDtoValidator.cs | 0 .../Internal/OracleMapper.cs | 0 .../Internal/OracleParser.cs | 0 .../Internal/OraclePatchDocument.cs | 0 .../Jobs.cs | 0 .../OracleConnector.cs | 0 .../OracleDependencyInjectionRoutine.cs | 0 .../OracleServiceCollectionExtensions.cs | 0 .../Properties/AssemblyInfo.cs | 0 ...Ops.Concelier.Connector.Vndr.Oracle.csproj | 34 +- .../TASKS.md | 0 .../VndrOracleConnectorPlugin.cs | 0 .../AGENTS.md | 0 .../Configuration/VmwareOptions.cs | 0 .../Internal/VmwareCursor.cs | 0 .../Internal/VmwareDetailDto.cs | 0 .../Internal/VmwareFetchCacheEntry.cs | 0 .../Internal/VmwareIndexItem.cs | 0 .../Internal/VmwareMapper.cs | 0 .../Jobs.cs | 0 .../Properties/AssemblyInfo.cs | 0 ...Ops.Concelier.Connector.Vndr.Vmware.csproj | 46 +- .../TASKS.md | 0 .../VmwareConnector.cs | 0 .../VmwareConnectorPlugin.cs | 0 .../VmwareDependencyInjectionRoutine.cs | 0 .../VmwareDiagnostics.cs | 0 .../VmwareServiceCollectionExtensions.cs | 0 .../StellaOps.Concelier.Core/AGENTS.md | 0 .../Aoc/AdvisoryRawWriteGuard.cs | 70 +- .../Aoc/AocServiceCollectionExtensions.cs | 80 +- .../Aoc/ConcelierAocGuardException.cs | 64 +- .../Aoc/IAdvisoryRawWriteGuard.cs | 32 +- .../CanonicalMergeResult.cs | 0 .../CanonicalMerger.cs | 0 .../Events/AdvisoryEventContracts.cs | 0 .../Events/AdvisoryEventLog.cs | 0 .../Events/IAdvisoryEventLog.cs | 0 .../Events/IAdvisoryEventRepository.cs | 0 .../StellaOps.Concelier.Core/Jobs/IJob.cs | 0 .../Jobs/IJobCoordinator.cs | 0 .../Jobs/IJobStore.cs | 0 .../Jobs/ILeaseStore.cs | 0 .../Jobs/JobCoordinator.cs | 0 .../Jobs/JobDefinition.cs | 0 .../Jobs/JobDiagnostics.cs | 0 .../Jobs/JobExecutionContext.cs | 0 .../StellaOps.Concelier.Core/Jobs/JobLease.cs | 0 .../Jobs/JobPluginRegistrationExtensions.cs | 0 .../Jobs/JobRunCompletion.cs | 0 .../Jobs/JobRunCreateRequest.cs | 0 .../Jobs/JobRunSnapshot.cs | 0 .../Jobs/JobRunStatus.cs | 0 .../Jobs/JobSchedulerBuilder.cs | 0 .../Jobs/JobSchedulerHostedService.cs | 0 .../Jobs/JobSchedulerOptions.cs | 0 .../Jobs/JobTriggerResult.cs | 0 .../Jobs/ServiceCollectionExtensions.cs | 0 .../Linksets/AdvisoryLinksetMapper.cs | 616 +- .../Linksets/AdvisoryObservationFactory.cs | 576 +- .../Linksets/IAdvisoryLinksetMapper.cs | 32 +- .../Linksets/IAdvisoryObservationFactory.cs | 20 +- .../Linksets/LinksetNormalization.cs | 190 +- .../LinksetServiceCollectionExtensions.cs | 38 +- .../Noise/INoisePriorRepository.cs | 52 +- .../Noise/INoisePriorService.cs | 50 +- .../Noise/NoisePriorComputationRequest.cs | 20 +- .../Noise/NoisePriorComputationResult.cs | 20 +- .../Noise/NoisePriorService.cs | 800 +- .../NoisePriorServiceCollectionExtensions.cs | 48 +- .../Noise/NoisePriorSummary.cs | 48 +- .../Observations/AdvisoryObservationCursor.cs | 16 +- .../AdvisoryObservationQueryModels.cs | 164 +- .../AdvisoryObservationQueryService.cs | 488 +- .../IAdvisoryObservationLookup.cs | 78 +- .../IAdvisoryObservationQueryService.cs | 32 +- .../Properties/AssemblyInfo.cs | 6 +- .../Raw/AdvisoryRawQueryOptions.cs | 166 +- .../Raw/AdvisoryRawRecord.cs | 38 +- .../Raw/AdvisoryRawService.cs | 878 +- .../Raw/IAdvisoryRawRepository.cs | 74 +- .../Raw/IAdvisoryRawService.cs | 112 +- .../Raw/RawServiceCollectionExtensions.cs | 32 +- .../StellaOps.Concelier.Core.csproj | 7 +- .../StellaOps.Concelier.Core/TASKS.md | 238 +- .../Unknown/IUnknownStateLedger.cs | 0 .../Unknown/IUnknownStateRepository.cs | 0 .../Unknown/UnknownStateLedger.cs | 0 .../Unknown/UnknownStateLedgerRequest.cs | 0 .../Unknown/UnknownStateLedgerResult.cs | 0 .../Unknown/UnknownStateMarkerKinds.cs | 0 .../Unknown/UnknownStateSnapshot.cs | 0 .../AGENTS.md | 0 .../ExportDigestCalculator.cs | 0 .../ExporterVersion.cs | 0 .../IJsonExportPathResolver.cs | 0 .../JsonExportFile.cs | 0 .../JsonExportJob.cs | 0 .../JsonExportManifestWriter.cs | 0 .../JsonExportOptions.cs | 0 .../JsonExportResult.cs | 0 .../JsonExportSnapshotBuilder.cs | 0 .../JsonExporterDependencyInjectionRoutine.cs | 0 .../JsonExporterPlugin.cs | 0 .../JsonFeedExporter.cs | 0 .../JsonMirrorBundleWriter.cs | 0 .../StellaOps.Concelier.Exporter.Json.csproj | 8 +- .../TASKS.md | 22 +- .../VulnListJsonExportPathResolver.cs | 0 .../AGENTS.md | 0 .../ITrivyDbBuilder.cs | 0 .../ITrivyDbOrasPusher.cs | 0 .../OciDescriptor.cs | 0 .../OciIndex.cs | 0 .../OciManifest.cs | 0 ...tellaOps.Concelier.Exporter.TrivyDb.csproj | 6 +- .../TASKS.md | 4 +- .../TrivyConfigDocument.cs | 0 .../TrivyDbBlob.cs | 0 .../TrivyDbBoltBuilder.cs | 0 .../TrivyDbBuilderResult.cs | 0 .../TrivyDbExportJob.cs | 0 .../TrivyDbExportMode.cs | 0 .../TrivyDbExportOptions.cs | 0 .../TrivyDbExportOverrides.cs | 0 .../TrivyDbExportPlan.cs | 0 .../TrivyDbExportPlanner.cs | 0 ...ivyDbExporterDependencyInjectionRoutine.cs | 0 .../TrivyDbExporterPlugin.cs | 0 .../TrivyDbFeedExporter.cs | 0 .../TrivyDbMediaTypes.cs | 0 .../TrivyDbMirrorBundleWriter.cs | 0 .../TrivyDbOciWriteResult.cs | 0 .../TrivyDbOciWriter.cs | 0 .../TrivyDbOrasPusher.cs | 0 .../TrivyDbPackage.cs | 0 .../TrivyDbPackageBuilder.cs | 0 .../TrivyDbPackageRequest.cs | 0 .../StellaOps.Concelier.Merge/AGENTS.md | 0 .../StellaOps.Concelier.Merge/Class1.cs | 0 .../Comparers/DebianEvr.cs | 0 .../Comparers/Nevra.cs | 0 .../Comparers/SemanticVersionRangeResolver.cs | 0 .../Identity/AdvisoryIdentityCluster.cs | 0 .../Identity/AdvisoryIdentityResolver.cs | 0 .../Identity/AliasIdentity.cs | 0 .../Jobs/MergeJobKinds.cs | 0 .../Jobs/MergeReconcileJob.cs | 0 .../MergeServiceCollectionExtensions.cs | 0 .../Options/AdvisoryPrecedenceDefaults.cs | 0 .../Options/AdvisoryPrecedenceOptions.cs | 0 .../Options/AdvisoryPrecedenceTable.cs | 0 .../RANGE_PRIMITIVES_COORDINATION.md | 194 +- .../Services/AdvisoryMergeService.cs | 878 +- .../Services/AdvisoryPrecedenceMerger.cs | 0 .../AffectedPackagePrecedenceResolver.cs | 0 .../Services/AliasGraphResolver.cs | 0 .../Services/CanonicalHashCalculator.cs | 0 .../Services/ConflictDetailPayload.cs | 88 +- .../Services/MergeConflictDetail.cs | 0 .../Services/MergeConflictExplainerPayload.cs | 0 .../Services/MergeConflictSummary.cs | 0 .../Services/MergeEventWriter.cs | 0 .../Services/PrecedenceMergeResult.cs | 0 .../StellaOps.Concelier.Merge.csproj | 36 +- .../StellaOps.Concelier.Merge/TASKS.md | 66 +- .../StellaOps.Concelier.Models/AGENTS.md | 0 .../StellaOps.Concelier.Models/Advisory.cs | 0 .../AdvisoryCredit.cs | 0 .../AdvisoryProvenance.cs | 0 .../AdvisoryReference.cs | 0 .../AdvisoryWeakness.cs | 0 .../AffectedPackage.cs | 0 .../AffectedPackageStatus.cs | 0 .../AffectedPackageStatusCatalog.cs | 0 .../AffectedVersionRange.cs | 0 .../AffectedVersionRangeExtensions.cs | 0 .../AliasSchemeRegistry.cs | 0 .../AliasSchemes.cs | 0 .../BACKWARD_COMPATIBILITY.md | 0 .../CANONICAL_RECORDS.md | 0 .../CanonicalJsonSerializer.cs | 0 .../StellaOps.Concelier.Models/CvssMetric.cs | 0 .../EvrPrimitiveExtensions.cs | 0 .../NevraPrimitiveExtensions.cs | 0 .../NormalizedVersionRule.cs | 0 .../Observations/AdvisoryObservation.cs | 566 +- .../OsvGhsaParityDiagnostics.cs | 0 .../OsvGhsaParityInspector.cs | 0 .../PROVENANCE_GUIDELINES.md | 0 .../ProvenanceFieldMasks.cs | 0 .../ProvenanceInspector.cs | 0 .../RangePrimitives.cs | 0 .../SemVerPrimitiveExtensions.cs | 0 .../SeverityNormalization.cs | 0 .../SnapshotSerializer.cs | 0 .../StellaOps.Concelier.Models.csproj | 24 +- .../StellaOps.Concelier.Models/TASKS.md | 0 .../StellaOps.Concelier.Models/Validation.cs | 0 .../AssemblyInfo.cs | 0 .../Cvss/CvssMetricNormalizer.cs | 0 .../Distro/DebianEvr.cs | 0 .../Distro/Nevra.cs | 0 .../Identifiers/Cpe23.cs | 0 .../Identifiers/IdentifierNormalizer.cs | 0 .../Identifiers/PackageUrl.cs | 0 .../SemVer/SemVerRangeRuleBuilder.cs | 0 .../StellaOps.Concelier.Normalization.csproj | 0 .../TASKS.md | 0 .../Text/DescriptionNormalizer.cs | 0 .../AdvisoryRawDocument.cs | 152 +- .../StellaOps.Concelier.RawModels/Class1.cs | 12 +- .../JsonElementExtensions.cs | 24 +- .../RawDocumentFactory.cs | 78 +- .../StellaOps.Concelier.RawModels.csproj | 24 +- .../VexRawDocument.cs | 48 +- .../AGENTS.md | 0 .../Advisories/AdvisoryDocument.cs | 0 .../Advisories/AdvisoryStore.cs | 0 .../Advisories/IAdvisoryStore.cs | 0 .../Advisories/NormalizedVersionDocument.cs | 0 .../NormalizedVersionDocumentFactory.cs | 0 .../Aliases/AliasDocument.cs | 0 .../Aliases/AliasStore.cs | 0 .../Aliases/AliasStoreConstants.cs | 0 .../Aliases/AliasStoreMetrics.cs | 0 .../Aliases/IAliasStore.cs | 0 .../ChangeHistory/ChangeHistoryDocument.cs | 0 .../ChangeHistoryDocumentExtensions.cs | 0 .../ChangeHistory/ChangeHistoryFieldChange.cs | 0 .../ChangeHistory/ChangeHistoryRecord.cs | 0 .../ChangeHistory/IChangeHistoryStore.cs | 0 .../ChangeHistory/MongoChangeHistoryStore.cs | 0 .../Conflicts/AdvisoryConflictDocument.cs | 0 .../Conflicts/AdvisoryConflictRecord.cs | 0 .../Conflicts/AdvisoryConflictStore.cs | 0 .../Documents/DocumentDocument.cs | 0 .../Documents/DocumentRecord.cs | 0 .../Documents/DocumentStore.cs | 0 .../Documents/IDocumentStore.cs | 0 .../Dtos/DtoDocument.cs | 0 .../Dtos/DtoRecord.cs | 0 .../Dtos/DtoStore.cs | 0 .../Dtos/IDtoStore.cs | 0 .../Events/MongoAdvisoryEventRepository.cs | 0 .../Exporting/ExportStateDocument.cs | 0 .../Exporting/ExportStateManager.cs | 0 .../Exporting/ExportStateRecord.cs | 0 .../Exporting/ExportStateStore.cs | 0 .../Exporting/IExportStateStore.cs | 0 .../ISourceStateRepository.cs | 0 .../JobLeaseDocument.cs | 0 .../JobRunDocument.cs | 0 .../JpFlags/IJpFlagStore.cs | 0 .../JpFlags/JpFlagDocument.cs | 0 .../JpFlags/JpFlagRecord.cs | 0 .../JpFlags/JpFlagStore.cs | 0 .../MIGRATIONS.md | 0 .../MergeEvents/IMergeEventStore.cs | 0 .../MergeEvents/MergeEventDocument.cs | 0 .../MergeEvents/MergeEventRecord.cs | 0 .../MergeEvents/MergeEventStore.cs | 0 .../MergeEvents/MergeFieldDecision.cs | 0 ...EnsureAdvisoryEventCollectionsMigration.cs | 0 ...ureAdvisoryRawIdempotencyIndexMigration.cs | 312 +- .../EnsureAdvisoryRawValidatorMigration.cs | 744 +- ...sureAdvisorySupersedesBackfillMigration.cs | 484 +- .../EnsureDocumentExpiryIndexesMigration.cs | 0 .../EnsureGridFsExpiryIndexesMigration.cs | 0 .../Migrations/IMongoMigration.cs | 0 .../Migrations/MongoMigrationDocument.cs | 0 .../Migrations/MongoMigrationRunner.cs | 0 .../SemVerStyleBackfillMigration.cs | 0 .../MongoBootstrapper.cs | 0 .../MongoCollectionValidatorOptions.cs | 42 +- .../MongoJobStore.cs | 0 .../MongoLeaseStore.cs | 0 .../MongoSessionProvider.cs | 0 .../MongoSourceStateRepository.cs | 0 .../MongoStorageDefaults.cs | 0 .../MongoStorageOptions.cs | 0 .../AdvisoryObservationDocument.cs | 326 +- .../AdvisoryObservationDocumentFactory.cs | 184 +- .../Observations/AdvisoryObservationLookup.cs | 120 +- .../Observations/AdvisoryObservationStore.cs | 274 +- .../Observations/IAdvisoryObservationStore.cs | 40 +- .../Properties/AssemblyInfo.cs | 0 .../PsirtFlags/IPsirtFlagStore.cs | 0 .../PsirtFlags/PsirtFlagDocument.cs | 0 .../PsirtFlags/PsirtFlagRecord.cs | 0 .../PsirtFlags/PsirtFlagStore.cs | 0 .../Raw/MongoAdvisoryRawRepository.cs | 1440 +- .../RawDocumentRetentionService.cs | 0 .../ServiceCollectionExtensions.cs | 0 .../SourceStateDocument.cs | 0 .../SourceStateRecord.cs | 0 .../SourceStateRepositoryExtensions.cs | 0 .../Statements/AdvisoryStatementDocument.cs | 0 .../Statements/AdvisoryStatementRecord.cs | 0 .../Statements/AdvisoryStatementStore.cs | 0 .../StellaOps.Concelier.Storage.Mongo.csproj | 36 +- .../TASKS.md | 60 +- .../ConnectorTestHarness.cs | 0 .../MongoIntegrationFixture.cs | 0 .../StellaOps.Concelier.Testing.csproj | 40 +- .../Acsc/AcscConnectorFetchTests.cs | 0 .../Acsc/AcscConnectorParseTests.cs | 0 .../Acsc/AcscHttpClientConfigurationTests.cs | 0 .../acsc-advisories-multi.snapshot.json | 0 .../Fixtures/acsc-advisories.snapshot.json | 0 ...aOps.Concelier.Connector.Acsc.Tests.csproj | 20 + .../CccsConnectorTests.cs | 0 .../Fixtures/cccs-feed-en.json | 0 .../Fixtures/cccs-raw-advisory-fr.json | 0 .../Fixtures/cccs-raw-advisory.json | 0 .../Fixtures/cccs-taxonomy-en.json | 0 .../Internal/CccsHtmlParserTests.cs | 0 .../Internal/CccsMapperTests.cs | 0 ...aOps.Concelier.Connector.Cccs.Tests.csproj | 39 +- .../CertBundConnectorTests.cs | 0 .../Fixtures/certbund-detail.json | 0 .../Fixtures/certbund-feed.xml | 0 ....Concelier.Connector.CertBund.Tests.csproj | 45 +- .../CertCc/CertCcConnectorFetchTests.cs | 0 .../CertCc/CertCcConnectorSnapshotTests.cs | 0 .../CertCc/CertCcConnectorTests.cs | 0 .../Fixtures/certcc-advisories.snapshot.json | 0 .../Fixtures/certcc-documents.snapshot.json | 0 .../Fixtures/certcc-requests.snapshot.json | 0 .../Fixtures/certcc-state.snapshot.json | 0 .../Fixtures/summary-2025-09.json | 0 .../Fixtures/summary-2025-10.json | 0 .../Fixtures/summary-2025-11.json | 0 .../Fixtures/summary-2025.json | 0 .../Fixtures/vendor-statuses-294418.json | 0 .../Fixtures/vendors-294418.json | 0 .../Fixtures/vu-257161.json | 0 .../Fixtures/vu-294418-vendors.json | 0 .../Fixtures/vu-294418-vuls.json | 0 .../Fixtures/vu-294418.json | 0 .../Fixtures/vulnerabilities-294418.json | 0 .../Internal/CertCcMapperTests.cs | 0 .../Internal/CertCcSummaryParserTests.cs | 0 .../Internal/CertCcSummaryPlannerTests.cs | 0 .../CertCcVendorStatementParserTests.cs | 0 ...ps.Concelier.Connector.CertCc.Tests.csproj | 39 +- .../CertFr/CertFrConnectorTests.cs | 0 .../Fixtures/certfr-advisories.snapshot.json | 0 .../Fixtures/certfr-detail-AV-2024-001.html | 0 .../Fixtures/certfr-detail-AV-2024-002.html | 0 .../CertFr/Fixtures/certfr-feed.xml | 0 ...ps.Concelier.Connector.CertFr.Tests.csproj | 17 + .../CertIn/CertInConnectorTests.cs | 0 .../CertIn/Fixtures/alerts-page1.json | 0 .../Fixtures/detail-CIAD-2024-0005.html | 0 .../CertIn/Fixtures/expected-advisory.json | 0 ...ps.Concelier.Connector.CertIn.Tests.csproj | 17 + .../Common/CannedHttpMessageHandlerTests.cs | 0 .../Common/HtmlContentSanitizerTests.cs | 0 .../Common/PackageCoordinateHelperTests.cs | 0 .../Common/PdfTextExtractorTests.cs | 0 .../Common/SourceFetchServiceGuardTests.cs | 508 +- .../Common/SourceFetchServiceTests.cs | 0 .../Common/SourceHttpClientBuilderTests.cs | 654 +- .../Common/TimeWindowCursorPlannerTests.cs | 0 .../Common/UrlNormalizerTests.cs | 0 .../Json/JsonSchemaValidatorTests.cs | 0 ...ps.Concelier.Connector.Common.Tests.csproj | 21 +- .../Xml/XmlSchemaValidatorTests.cs | 0 .../Cve/CveConnectorTests.cs | 0 .../Fixtures/cve-CVE-2024-0001.json | 0 .../Fixtures/cve-list.json | 0 .../Fixtures/expected-CVE-2024-0001.json | 0 ...laOps.Concelier.Connector.Cve.Tests.csproj | 18 + .../DebianConnectorTests.cs | 0 .../DebianMapperTests.cs | 0 .../Fixtures/debian-detail-dsa-2024-123.html | 0 .../Fixtures/debian-detail-dsa-2024-124.html | 0 .../Distro/Debian/Fixtures/debian-list.txt | 0 ...elier.Connector.Distro.Debian.Tests.csproj | 14 + .../RedHat/Fixtures/csaf-rhsa-2025-0001.json | 0 .../RedHat/Fixtures/csaf-rhsa-2025-0002.json | 0 .../RedHat/Fixtures/csaf-rhsa-2025-0003.json | 0 .../Fixtures/rhsa-2025-0001.snapshot.json | 0 .../Fixtures/rhsa-2025-0002.snapshot.json | 0 .../Fixtures/rhsa-2025-0003.snapshot.json | 0 .../RedHat/Fixtures/summary-page1-repeat.json | 0 .../RedHat/Fixtures/summary-page1.json | 0 .../RedHat/Fixtures/summary-page2.json | 0 .../RedHat/Fixtures/summary-page3.json | 0 .../RedHat/RedHatConnectorHarnessTests.cs | 0 .../RedHat/RedHatConnectorTests.cs | 0 ...elier.Connector.Distro.RedHat.Tests.csproj | 17 + .../Distro/Suse/Fixtures/suse-changes.csv | 0 .../Suse/Fixtures/suse-su-2025_0001-1.json | 0 .../Suse/Fixtures/suse-su-2025_0002-1.json | 0 ...ncelier.Connector.Distro.Suse.Tests.csproj | 19 + .../SuseConnectorTests.cs | 0 .../SuseCsafParserTests.cs | 0 .../SuseMapperTests.cs | 0 .../Fixtures/ubuntu-notices-page0.json | 0 .../Fixtures/ubuntu-notices-page1.json | 0 ...elier.Connector.Distro.Ubuntu.Tests.csproj | 19 + .../UbuntuConnectorTests.cs | 0 .../Fixtures/conflict-ghsa.canonical.json | 0 .../Fixtures/credit-parity.ghsa.json | 0 .../Fixtures/credit-parity.nvd.json | 0 .../Fixtures/credit-parity.osv.json | 0 .../expected-GHSA-xxxx-yyyy-zzzz.json | 0 .../Fixtures/ghsa-GHSA-xxxx-yyyy-zzzz.json | 0 .../Fixtures/ghsa-list.json | 0 .../Ghsa/GhsaConflictFixtureTests.cs | 0 .../Ghsa/GhsaConnectorTests.cs | 0 .../Ghsa/GhsaCreditParityRegressionTests.cs | 0 .../GhsaDependencyInjectionRoutineTests.cs | 0 .../Ghsa/GhsaDiagnosticsTests.cs | 0 .../Ghsa/GhsaMapperTests.cs | 0 .../Ghsa/GhsaRateLimitParserTests.cs | 0 ...aOps.Concelier.Connector.Ghsa.Tests.csproj | 18 + .../IcsCisa/Fixtures/icsa-25-123-01.html | 0 .../IcsCisa/Fixtures/icsma-25-045-01.html | 0 .../IcsCisa/Fixtures/sample-feed.xml | 0 .../IcsCisa/IcsCisaConnectorMappingTests.cs | 0 .../IcsCisa/IcsCisaFeedParserTests.cs | 0 .../IcsCisaConnectorTests.cs | 0 ....Concelier.Connector.Ics.Cisa.Tests.csproj | 17 + .../Fixtures/detail-acme-controller-2024.html | 0 .../Kaspersky/Fixtures/expected-advisory.json | 0 .../Kaspersky/Fixtures/feed-page1.xml | 0 .../Kaspersky/KasperskyConnectorTests.cs | 0 ...elier.Connector.Ics.Kaspersky.Tests.csproj | 17 + .../Jvn/Fixtures/expected-advisory.json | 0 .../Jvn/Fixtures/jvnrss-window1.xml | 0 .../Jvn/Fixtures/vuldef-JVNDB-2024-123456.xml | 0 .../Jvn/JvnConnectorTests.cs | 0 ...laOps.Concelier.Connector.Jvn.Tests.csproj | 17 + .../Kev/Fixtures/kev-advisories.snapshot.json | 0 .../Kev/Fixtures/kev-catalog.json | 0 .../Kev/KevConnectorTests.cs | 0 .../Kev/KevMapperTests.cs | 0 ...laOps.Concelier.Connector.Kev.Tests.csproj | 20 + .../Fixtures/kisa-detail.json | 0 .../Fixtures/kisa-feed.xml | 0 .../KisaConnectorTests.cs | 0 ...aOps.Concelier.Connector.Kisa.Tests.csproj | 49 +- .../Nvd/Fixtures/conflict-nvd.canonical.json | 0 .../Nvd/Fixtures/credit-parity.ghsa.json | 0 .../Nvd/Fixtures/credit-parity.nvd.json | 0 .../Nvd/Fixtures/credit-parity.osv.json | 0 .../Nvd/Fixtures/nvd-invalid-schema.json | 0 .../Nvd/Fixtures/nvd-multipage-1.json | 0 .../Nvd/Fixtures/nvd-multipage-2.json | 0 .../Nvd/Fixtures/nvd-multipage-3.json | 0 .../Nvd/Fixtures/nvd-window-1.json | 0 .../Nvd/Fixtures/nvd-window-2.json | 0 .../Nvd/Fixtures/nvd-window-update.json | 0 .../Nvd/NvdConflictFixtureTests.cs | 0 .../Nvd/NvdConnectorHarnessTests.cs | 0 .../Nvd/NvdConnectorTests.cs | 0 .../Nvd/NvdMergeExportParityTests.cs | 0 ...laOps.Concelier.Connector.Nvd.Tests.csproj | 19 + .../Fixtures/conflict-osv.canonical.json | 0 .../Fixtures/osv-ghsa.ghsa.json | 0 .../Fixtures/osv-ghsa.osv.json | 0 .../Fixtures/osv-ghsa.raw-ghsa.json | 0 .../Fixtures/osv-ghsa.raw-osv.json | 0 .../Fixtures/osv-npm.snapshot.json | 0 .../Fixtures/osv-pypi.snapshot.json | 0 .../Osv/OsvConflictFixtureTests.cs | 0 .../Osv/OsvGhsaParityRegressionTests.cs | 0 .../Osv/OsvMapperTests.cs | 0 .../Osv/OsvSnapshotTests.cs | 0 ...laOps.Concelier.Connector.Osv.Tests.csproj | 19 + .../Fixtures/export-sample.xml | 0 .../Fixtures/ru-bdu-advisories.snapshot.json | 0 .../Fixtures/ru-bdu-documents.snapshot.json | 0 .../Fixtures/ru-bdu-dtos.snapshot.json | 0 .../Fixtures/ru-bdu-requests.snapshot.json | 0 .../Fixtures/ru-bdu-state.snapshot.json | 0 .../RuBduConnectorSnapshotTests.cs | 0 .../RuBduMapperTests.cs | 0 .../RuBduXmlParserTests.cs | 0 ...ps.Concelier.Connector.Ru.Bdu.Tests.csproj | 14 + .../Fixtures/bulletin-legacy.json.zip | Bin .../Fixtures/bulletin-sample.json.zip | Bin .../Fixtures/listing-page2.html | 0 .../Fixtures/listing.html | 0 .../Fixtures/nkcki-advisories.snapshot.json | 0 .../RuNkckiConnectorTests.cs | 0 .../RuNkckiJsonParserTests.cs | 0 .../RuNkckiMapperTests.cs | 0 ....Concelier.Connector.Ru.Nkcki.Tests.csproj | 14 + .../FixtureLoader.cs | 66 +- .../Fixtures/mirror-advisory.expected.json | 424 +- .../Fixtures/mirror-bundle.sample.json | 404 +- .../MirrorAdvisoryMapperTests.cs | 94 +- .../MirrorSignatureVerifierTests.cs | 378 +- .../SampleData.cs | 530 +- ...ier.Connector.StellaOpsMirror.Tests.csproj | 19 +- .../StellaOpsMirrorConnectorTests.cs | 928 +- .../Adobe/AdobeConnectorFetchTests.cs | 0 .../Fixtures/adobe-advisories.snapshot.json | 0 .../Fixtures/adobe-detail-apsb25-85.html | 0 .../Fixtures/adobe-detail-apsb25-87.html | 0 .../Adobe/Fixtures/adobe-index.html | 0 ...oncelier.Connector.Vndr.Adobe.Tests.csproj | 18 + .../Apple/AppleConnectorTests.cs | 0 .../Apple/AppleFixtureManager.cs | 0 .../Apple/AppleLiveRegressionTests.cs | 0 .../Apple/Fixtures/106355.expected.json | 0 .../Apple/Fixtures/106355.html | 0 .../Apple/Fixtures/125326.expected.json | 0 .../Apple/Fixtures/125326.html | 0 .../Apple/Fixtures/125328.expected.json | 0 .../Apple/Fixtures/125328.html | 0 .../Apple/Fixtures/HT214108.expected.json | 0 .../Apple/Fixtures/HT215500.expected.json | 0 .../Apple/Fixtures/ht214108.html | 0 .../Apple/Fixtures/ht215500.html | 0 .../Apple/Fixtures/index.json | 0 ...oncelier.Connector.Vndr.Apple.Tests.csproj | 19 + .../Chromium/ChromiumConnectorTests.cs | 0 .../Chromium/ChromiumMapperTests.cs | 0 .../Fixtures/chromium-advisory.snapshot.json | 0 .../Chromium/Fixtures/chromium-detail.html | 0 .../Chromium/Fixtures/chromium-feed.xml | 0 ...elier.Connector.Vndr.Chromium.Tests.csproj | 19 + .../CiscoDtoFactoryTests.cs | 0 .../CiscoMapperTests.cs | 0 ...oncelier.Connector.Vndr.Cisco.Tests.csproj | 18 + .../Fixtures/msrc-detail.json | 0 .../Fixtures/msrc-summary.json | 0 .../MsrcConnectorTests.cs | 0 ...Concelier.Connector.Vndr.Msrc.Tests.csproj | 25 + .../Fixtures/oracle-advisories.snapshot.json | 0 .../oracle-calendar-cpuapr2024-single.html | 0 .../Fixtures/oracle-calendar-cpuapr2024.html | 0 .../Fixtures/oracle-detail-cpuapr2024-01.html | 0 .../Fixtures/oracle-detail-cpuapr2024-02.html | 0 .../Fixtures/oracle-detail-invalid.html | 0 .../Oracle/OracleConnectorTests.cs | 0 ...ncelier.Connector.Vndr.Oracle.Tests.csproj | 18 + ...ncelier.Connector.Vndr.Vmware.Tests.csproj | 19 + .../Fixtures/vmware-advisories.snapshot.json | 0 .../vmware-detail-vmsa-2024-0001.json | 0 .../vmware-detail-vmsa-2024-0002.json | 0 .../vmware-detail-vmsa-2024-0003.json | 0 .../Vmware/Fixtures/vmware-index-initial.json | 0 .../Vmware/Fixtures/vmware-index-second.json | 0 .../Vmware/VmwareConnectorTests.cs | 0 .../Vmware/VmwareMapperTests.cs | 0 .../Aoc/AdvisoryRawWriteGuardTests.cs | 166 +- .../CanonicalMergerTests.cs | 0 .../Events/AdvisoryEventLogTests.cs | 0 .../JobCoordinatorTests.cs | 0 .../JobPluginRegistrationExtensionsTests.cs | 0 .../JobSchedulerBuilderTests.cs | 0 .../Linksets/AdvisoryLinksetMapperTests.cs | 250 +- .../AdvisoryObservationFactoryTests.cs | 302 +- .../Noise/NoisePriorServiceTests.cs | 640 +- .../AdvisoryObservationQueryServiceTests.cs | 652 +- .../PluginRoutineFixtures.cs | 0 .../Raw/AdvisoryRawServiceTests.cs | 286 +- .../StellaOps.Concelier.Core.Tests.csproj | 13 + .../Unknown/UnknownStateLedgerTests.cs | 0 .../JsonExportSnapshotBuilderTests.cs | 0 ...ExporterDependencyInjectionRoutineTests.cs | 0 .../JsonExporterParitySmokeTests.cs | 0 .../JsonFeedExporterTests.cs | 0 ...laOps.Concelier.Exporter.Json.Tests.csproj | 14 + .../VulnListJsonExportPathResolverTests.cs | 0 ...ps.Concelier.Exporter.TrivyDb.Tests.csproj | 14 + .../TrivyDbExportPlannerTests.cs | 0 .../TrivyDbFeedExporterTests.cs | 0 .../TrivyDbOciWriterTests.cs | 0 .../TrivyDbPackageBuilderTests.cs | 0 .../AdvisoryIdentityResolverTests.cs | 0 .../AdvisoryMergeServiceTests.cs | 0 .../AdvisoryPrecedenceMergerTests.cs | 0 .../AffectedPackagePrecedenceResolverTests.cs | 0 .../AliasGraphResolverTests.cs | 0 .../CanonicalHashCalculatorTests.cs | 0 .../DebianEvrComparerTests.cs | 0 .../MergeEventWriterTests.cs | 0 .../MergePrecedenceIntegrationTests.cs | 0 .../MetricCollector.cs | 0 .../NevraComparerTests.cs | 0 .../SemanticVersionRangeResolverTests.cs | 0 .../StellaOps.Concelier.Merge.Tests.csproj | 14 + .../TestLogger.cs | 0 .../AdvisoryProvenanceTests.cs | 0 .../AdvisoryTests.cs | 0 .../AffectedPackageStatusTests.cs | 0 .../AffectedVersionRangeExtensionsTests.cs | 0 .../AliasSchemeRegistryTests.cs | 0 .../CanonicalExampleFactory.cs | 0 .../CanonicalExamplesTests.cs | 0 .../CanonicalJsonSerializerTests.cs | 0 .../EvrPrimitiveExtensionsTests.cs | 0 .../Fixtures/ghsa-semver.actual.json | 252 +- .../Fixtures/ghsa-semver.json | 0 .../Fixtures/kev-flag.actual.json | 88 +- .../Fixtures/kev-flag.json | 0 .../Fixtures/nvd-basic.actual.json | 242 +- .../Fixtures/nvd-basic.json | 0 .../Fixtures/psirt-overlay.actual.json | 248 +- .../Fixtures/psirt-overlay.json | 0 .../NevraPrimitiveExtensionsTests.cs | 0 .../NormalizedVersionRuleTests.cs | 0 .../Observations/AdvisoryObservationTests.cs | 122 +- .../OsvGhsaParityDiagnosticsTests.cs | 0 .../OsvGhsaParityInspectorTests.cs | 0 .../ProvenanceDiagnosticsTests.cs | 0 .../RangePrimitivesTests.cs | 0 .../SemVerPrimitiveTests.cs | 0 .../SerializationDeterminismTests.cs | 0 .../SeverityNormalizationTests.cs | 0 .../StellaOps.Concelier.Models.Tests.csproj | 17 +- .../CpeNormalizerTests.cs | 0 .../CvssMetricNormalizerTests.cs | 0 .../DebianEvrParserTests.cs | 0 .../DescriptionNormalizerTests.cs | 0 .../NevraParserTests.cs | 0 .../PackageUrlNormalizerTests.cs | 0 .../SemVerRangeRuleBuilderTests.cs | 0 ...laOps.Concelier.Normalization.Tests.csproj | 12 + ...StellaOps.Concelier.RawModels.Tests.csproj | 7 +- .../UnitTest1.cs | 20 +- .../xunit.runner.json | 6 +- .../AdvisoryConflictStoreTests.cs | 0 .../AdvisoryStatementStoreTests.cs | 0 .../AdvisoryStorePerformanceTests.cs | 0 .../AdvisoryStoreTests.cs | 0 .../AliasStoreTests.cs | 0 .../DocumentStoreTests.cs | 0 .../DtoStoreTests.cs | 0 .../ExportStateManagerTests.cs | 0 .../ExportStateStoreTests.cs | 0 .../MergeEventStoreTests.cs | 0 .../Migrations/MongoMigrationRunnerTests.cs | 0 .../MongoAdvisoryEventRepositoryTests.cs | 0 .../MongoBootstrapperTests.cs | 0 .../MongoJobStoreTests.cs | 0 .../MongoSourceStateRepositoryTests.cs | 0 ...AdvisoryObservationDocumentFactoryTests.cs | 136 +- .../AdvisoryObservationStoreTests.cs | 444 +- .../RawDocumentRetentionServiceTests.cs | 0 ...laOps.Concelier.Storage.Mongo.Tests.csproj | 16 + .../ConcelierOptionsPostConfigureTests.cs | 0 .../PluginLoaderTests.cs | 0 ...tellaOps.Concelier.WebService.Tests.csproj | 14 + .../WebServiceEndpointsTests.cs | 3580 +- .../StellaOps.DevPortal.Site/AGENTS.md | 30 +- .../StellaOps.DevPortal.Site/TASKS.md | 38 +- src/Directory.Build.props | 98 +- .../StellaOps.EvidenceLocker.sln | 99 + .../StellaOps.EvidenceLocker/AGENTS.md | 56 +- .../StellaOps.EvidenceLocker.Core/Class1.cs | 12 +- .../StellaOps.EvidenceLocker.Core.csproj | 36 +- .../Class1.cs | 12 +- ...laOps.EvidenceLocker.Infrastructure.csproj | 56 +- .../StellaOps.EvidenceLocker.Tests.csproj | 270 +- .../UnitTest1.cs | 20 +- .../xunit.runner.json | 6 +- .../Program.cs | 82 +- .../Properties/launchSettings.json | 46 +- ...StellaOps.EvidenceLocker.WebService.csproj | 82 +- .../StellaOps.EvidenceLocker.WebService.http | 12 +- .../appsettings.Development.json | 16 +- .../appsettings.json | 18 +- .../Program.cs | 14 +- .../Properties/launchSettings.json | 24 +- .../StellaOps.EvidenceLocker.Worker.csproj | 86 +- .../StellaOps.EvidenceLocker.Worker/Worker.cs | 32 +- .../appsettings.Development.json | 16 +- .../appsettings.json | 16 +- .../StellaOps.EvidenceLocker.sln | 180 +- .../StellaOps.EvidenceLocker/TASKS.md | 48 +- .../TASKS.md | 0 .../StellaOps.Excititor.WebService/AGENTS.md | 0 .../Endpoints/IngestEndpoints.cs | 0 .../Endpoints/MirrorEndpoints.cs | 0 .../Endpoints/ResolveEndpoint.cs | 0 .../StellaOps.Excititor.WebService/Program.cs | 0 .../Properties/AssemblyInfo.cs | 6 +- .../Services/MirrorRateLimiter.cs | 0 .../Services/ScopeAuthorization.cs | 0 .../Services/VexIngestOrchestrator.cs | 0 .../StellaOps.Excititor.WebService.csproj | 23 + .../StellaOps.Excititor.WebService/TASKS.md | 188 +- .../StellaOps.Excititor.Worker/AGENTS.md | 0 .../Options/VexWorkerOptions.cs | 0 .../Options/VexWorkerOptionsValidator.cs | 0 .../Options/VexWorkerPluginOptions.cs | 0 .../Options/VexWorkerRefreshOptions.cs | 180 +- .../Options/VexWorkerRetryOptions.cs | 0 .../StellaOps.Excititor.Worker/Program.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../Scheduling/DefaultVexProviderRunner.cs | 542 +- .../IVexConsensusRefreshScheduler.cs | 12 +- .../Scheduling/IVexProviderRunner.cs | 0 .../Scheduling/VexConsensusRefreshService.cs | 1244 +- .../Scheduling/VexWorkerHostedService.cs | 0 .../Scheduling/VexWorkerSchedule.cs | 0 .../Signature/VerifyingVexRawDocumentSink.cs | 138 +- .../Signature/WorkerSignatureVerifier.cs | 728 +- .../StellaOps.Excititor.Worker.csproj | 25 + .../StellaOps.Excititor.Worker/TASKS.md | 38 +- src/Excititor/StellaOps.Excititor.sln | 705 + .../Extensions/ServiceCollectionExtensions.cs | 0 .../S3ArtifactClient.cs | 0 ...ellaOps.Excititor.ArtifactStores.S3.csproj | 34 +- .../StellaOps.Excititor.Attestation/AGENTS.md | 0 .../Dsse/DsseEnvelope.cs | 0 .../Dsse/VexDsseBuilder.cs | 0 .../EXCITITOR-ATTEST-01-003-plan.md | 0 .../Extensions/ServiceCollectionExtensions.cs | 0 .../Models/VexAttestationPredicate.cs | 0 .../Signing/IVexSigner.cs | 0 .../StellaOps.Excititor.Attestation.csproj | 34 +- .../StellaOps.Excititor.Attestation/TASKS.md | 0 .../Transparency/ITransparencyLogClient.cs | 0 .../Transparency/RekorHttpClient.cs | 0 .../Transparency/RekorHttpClientOptions.cs | 0 .../Verification/IVexAttestationVerifier.cs | 0 .../Verification/VexAttestationMetrics.cs | 0 .../VexAttestationVerificationOptions.cs | 0 .../Verification/VexAttestationVerifier.cs | 942 +- .../VexAttestationClient.cs | 0 .../AGENTS.md | 0 .../IVexConnectorOptionsValidator.cs | 0 ...s.Excititor.Connectors.Abstractions.csproj | 34 +- .../TASKS.md | 0 .../VexConnectorBase.cs | 0 .../VexConnectorDescriptor.cs | 0 .../VexConnectorLogScope.cs | 0 .../VexConnectorMetadataBuilder.cs | 0 .../VexConnectorOptionsBinder.cs | 0 .../VexConnectorOptionsBinderOptions.cs | 0 .../VexConnectorOptionsValidationException.cs | 0 .../AGENTS.md | 0 .../CiscoCsafConnector.cs | 0 .../Configuration/CiscoConnectorOptions.cs | 0 .../CiscoConnectorOptionsValidator.cs | 0 ...scoConnectorServiceCollectionExtensions.cs | 0 .../Metadata/CiscoProviderMetadataLoader.cs | 0 ...Ops.Excititor.Connectors.Cisco.CSAF.csproj | 40 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Authentication/MsrcTokenProvider.cs | 0 .../Configuration/MsrcConnectorOptions.cs | 0 ...srcConnectorServiceCollectionExtensions.cs | 0 .../MsrcCsafConnector.cs | 0 ...aOps.Excititor.Connectors.MSRC.CSAF.csproj | 38 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Authentication/OciCosignAuthority.cs | 0 .../OciRegistryAuthorization.cs | 0 .../OciOpenVexAttestationConnectorOptions.cs | 0 ...VexAttestationConnectorOptionsValidator.cs | 0 ...ionConnectorServiceCollectionExtensions.cs | 0 .../OciAttestationDiscoveryResult.cs | 0 .../OciAttestationDiscoveryService.cs | 0 .../Discovery/OciAttestationTarget.cs | 0 .../Discovery/OciImageReference.cs | 0 .../Discovery/OciImageReferenceParser.cs | 0 .../Discovery/OciOfflineBundleReference.cs | 0 .../Fetch/OciArtifactDescriptor.cs | 0 .../Fetch/OciAttestationDocument.cs | 0 .../Fetch/OciAttestationFetcher.cs | 0 .../Fetch/OciRegistryClient.cs | 0 .../OciOpenVexAttestationConnector.cs | 0 ...titor.Connectors.OCI.OpenVEX.Attest.csproj | 38 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Configuration/OracleConnectorOptions.cs | 0 .../OracleConnectorOptionsValidator.cs | 0 ...cleConnectorServiceCollectionExtensions.cs | 0 .../Metadata/OracleCatalogLoader.cs | 0 .../OracleCsafConnector.cs | 0 ...ps.Excititor.Connectors.Oracle.CSAF.csproj | 40 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Configuration/RedHatConnectorOptions.cs | 0 ...HatConnectorServiceCollectionExtensions.cs | 0 .../Metadata/RedHatProviderMetadataLoader.cs | 0 .../RedHatCsafConnector.cs | 0 ...ps.Excititor.Connectors.RedHat.CSAF.csproj | 38 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Authentication/RancherHubTokenProvider.cs | 0 .../RancherHubConnectorOptions.cs | 0 .../RancherHubConnectorOptionsValidator.cs | 0 ...HubConnectorServiceCollectionExtensions.cs | 0 .../Design/EXCITITOR-CONN-SUSE-01-002.md | 0 .../Events/RancherHubEventClient.cs | 0 .../Events/RancherHubEventModels.cs | 0 .../Metadata/RancherHubMetadataLoader.cs | 0 .../RancherHubConnector.cs | 0 .../State/RancherHubCheckpointManager.cs | 0 ...titor.Connectors.SUSE.RancherVEXHub.csproj | 38 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../Configuration/UbuntuConnectorOptions.cs | 0 .../UbuntuConnectorOptionsValidator.cs | 0 ...ntuConnectorServiceCollectionExtensions.cs | 0 .../Metadata/UbuntuCatalogLoader.cs | 0 ...ps.Excititor.Connectors.Ubuntu.CSAF.csproj | 40 +- .../TASKS.md | 0 .../UbuntuCsafConnector.cs | 0 .../StellaOps.Excititor.Core/AGENTS.md | 0 .../Aoc/AocServiceCollectionExtensions.cs | 76 +- .../Aoc/ExcititorAocGuardException.cs | 44 +- .../Aoc/IVexRawWriteGuard.cs | 32 +- .../Aoc/VexRawWriteGuard.cs | 70 +- .../BaselineVexConsensusPolicy.cs | 0 .../IVexConsensusPolicy.cs | 0 .../MirrorDistributionOptions.cs | 0 .../MirrorExportPlanner.cs | 0 .../Observations/IVexObservationLookup.cs | 64 +- .../IVexObservationQueryService.cs | 22 +- .../Observations/VexObservation.cs | 874 +- .../Observations/VexObservationQueryModels.cs | 158 +- .../VexObservationQueryService.cs | 622 +- .../StellaOps.Excititor.Core.csproj | 7 +- .../StellaOps.Excititor.Core/TASKS.md | 202 +- .../VexAttestationAbstractions.cs | 0 .../StellaOps.Excititor.Core/VexCacheEntry.cs | 0 .../VexCanonicalJsonSerializer.cs | 0 .../StellaOps.Excititor.Core/VexClaim.cs | 0 .../VexConnectorAbstractions.cs | 0 .../StellaOps.Excititor.Core/VexConsensus.cs | 0 .../VexConsensusHold.cs | 94 +- .../VexConsensusPolicyOptions.cs | 0 .../VexConsensusResolver.cs | 0 .../VexExportManifest.cs | 0 .../VexExporterAbstractions.cs | 0 .../VexNormalizerAbstractions.cs | 0 .../StellaOps.Excititor.Core/VexProvider.cs | 0 .../StellaOps.Excititor.Core/VexQuery.cs | 0 .../VexQuietProvenance.cs | 0 .../VexScoreEnvelope.cs | 0 .../StellaOps.Excititor.Core/VexSignals.cs | 0 .../VexSignatureVerifiers.cs | 0 .../StellaOps.Excititor.Export/AGENTS.md | 0 .../ExportEngine.cs | 0 .../FileSystemArtifactStore.cs | 0 .../IVexArtifactStore.cs | 0 .../OfflineBundleArtifactStore.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../S3ArtifactStore.cs | 0 .../StellaOps.Excititor.Export.csproj | 5 +- .../StellaOps.Excititor.Export/TASKS.md | 0 .../VexExportCacheService.cs | 0 .../VexExportEnvelopeBuilder.cs | 0 .../VexMirrorBundlePublisher.cs | 0 .../AGENTS.md | 0 .../CsafNormalizer.cs | 0 .../ServiceCollectionExtensions.cs | 0 .../StellaOps.Excititor.Formats.CSAF.csproj | 32 +- .../StellaOps.Excititor.Formats.CSAF/TASKS.md | 0 .../AGENTS.md | 0 .../CycloneDxNormalizer.cs | 0 .../ServiceCollectionExtensions.cs | 0 ...ellaOps.Excititor.Formats.CycloneDX.csproj | 32 +- .../TASKS.md | 0 .../AGENTS.md | 0 .../OpenVexNormalizer.cs | 0 .../ServiceCollectionExtensions.cs | 0 ...StellaOps.Excititor.Formats.OpenVEX.csproj | 32 +- .../TASKS.md | 0 .../StellaOps.Excititor.Policy/AGENTS.md | 0 .../IVexPolicyProvider.cs | 0 .../StellaOps.Excititor.Policy.csproj | 34 +- .../StellaOps.Excititor.Policy/TASKS.md | 22 +- .../VexPolicyBinder.cs | 0 .../VexPolicyDiagnostics.cs | 0 .../VexPolicyDigest.cs | 0 .../VexPolicyOptions.cs | 0 .../VexPolicyProcessing.cs | 0 .../VexPolicyTelemetry.cs | 0 .../AGENTS.md | 0 .../IVexExportStore.cs | 0 .../IVexRawStore.cs | 0 .../IVexStorageContracts.cs | 0 .../Migrations/IVexMongoMigration.cs | 0 .../Migrations/VexConsensusHoldMigration.cs | 58 +- .../VexConsensusSignalsMigration.cs | 0 .../Migrations/VexInitialIndexMigration.cs | 0 .../Migrations/VexMigrationRecord.cs | 0 .../VexMongoMigrationHostedService.cs | 0 .../Migrations/VexMongoMigrationRunner.cs | 0 .../MongoVexCacheIndex.cs | 0 .../MongoVexCacheMaintenance.cs | 0 .../MongoVexClaimStore.cs | 0 .../MongoVexConnectorStateRepository.cs | 0 .../MongoVexConsensusHoldStore.cs | 176 +- .../MongoVexConsensusStore.cs | 0 .../MongoVexExportStore.cs | 0 .../MongoVexProviderStore.cs | 0 .../MongoVexRawStore.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../ServiceCollectionExtensions.cs | 0 .../StellaOps.Excititor.Storage.Mongo.csproj | 36 +- .../StorageBackedVexNormalizerRouter.cs | 0 .../TASKS.md | 56 +- .../VexMongoMappingRegistry.cs | 0 .../VexMongoModels.cs | 0 .../VexMongoSessionProvider.cs | 0 .../VexMongoStorageOptions.cs | 0 .../VexStatementBackfillService.cs | 0 .../S3ArtifactClientTests.cs | 0 ...s.Excititor.ArtifactStores.S3.Tests.csproj | 31 +- ...ellaOps.Excititor.Attestation.Tests.csproj | 27 +- .../VexAttestationClientTests.cs | 0 .../VexAttestationVerifierTests.cs | 0 .../VexDsseBuilderTests.cs | 0 .../Connectors/CiscoCsafConnectorTests.cs | 430 +- .../CiscoProviderMetadataLoaderTests.cs | 0 ...cititor.Connectors.Cisco.CSAF.Tests.csproj | 33 +- .../Authentication/MsrcTokenProviderTests.cs | 0 .../Connectors/MsrcCsafConnectorTests.cs | 734 +- ...xcititor.Connectors.MSRC.CSAF.Tests.csproj | 5 +- ...testationConnectorOptionsValidatorTests.cs | 0 .../OciOpenVexAttestationConnectorTests.cs | 430 +- .../OciAttestationDiscoveryServiceTests.cs | 0 ...Connectors.OCI.OpenVEX.Attest.Tests.csproj | 5 +- .../Connectors/OracleCsafConnectorTests.cs | 628 +- .../Metadata/OracleCatalogLoaderTests.cs | 0 ...ititor.Connectors.Oracle.CSAF.Tests.csproj | 5 +- .../Connectors/RedHatCsafConnectorTests.cs | 0 .../RedHatProviderMetadataLoaderTests.cs | 0 ...ititor.Connectors.RedHat.CSAF.Tests.csproj | 35 +- .../RancherHubTokenProviderTests.cs | 0 .../Metadata/RancherHubMetadataLoaderTests.cs | 0 ...Connectors.SUSE.RancherVEXHub.Tests.csproj | 35 +- .../Connectors/UbuntuCsafConnectorTests.cs | 620 +- .../Metadata/UbuntuCatalogLoaderTests.cs | 0 ...ititor.Connectors.Ubuntu.CSAF.Tests.csproj | 5 +- .../Aoc/VexRawWriteGuardTests.cs | 136 +- .../VexObservationQueryServiceTests.cs | 614 +- .../StellaOps.Excititor.Core.Tests.csproj | 16 + .../VexCanonicalJsonSerializerTests.cs | 0 .../VexConsensusResolverTests.cs | 0 .../VexPolicyBinderTests.cs | 0 .../VexPolicyDiagnosticsTests.cs | 0 .../VexQuerySignatureTests.cs | 0 .../VexSignalSnapshotTests.cs | 0 .../ExportEngineTests.cs | 0 .../FileSystemArtifactStoreTests.cs | 0 .../MirrorBundlePublisherTests.cs | 0 .../OfflineBundleArtifactStoreTests.cs | 0 .../S3ArtifactStoreTests.cs | 0 .../StellaOps.Excititor.Export.Tests.csproj | 31 +- .../VexExportCacheServiceTests.cs | 0 .../CsafNormalizerTests.cs | 0 .../Fixtures/rhsa-sample.json | 0 ...llaOps.Excititor.Formats.CSAF.Tests.csproj | 41 +- .../CycloneDxNormalizerTests.cs | 0 ...s.Excititor.Formats.CycloneDX.Tests.csproj | 35 +- .../OpenVexNormalizerTests.cs | 0 ...Ops.Excititor.Formats.OpenVEX.Tests.csproj | 35 +- .../StellaOps.Excititor.Policy.Tests.csproj | 25 +- .../VexPolicyProviderTests.cs | 0 .../MongoVexCacheMaintenanceTests.cs | 0 .../MongoVexRepositoryTests.cs | 0 .../MongoVexSessionConsistencyTests.cs | 0 .../MongoVexStatementBackfillServiceTests.cs | 0 .../MongoVexStoreMappingTests.cs | 0 ...laOps.Excititor.Storage.Mongo.Tests.csproj | 16 + .../VexMongoMigrationRunnerTests.cs | 0 .../IngestEndpointsTests.cs | 548 +- .../MirrorEndpointsTests.cs | 422 +- .../ResolveEndpointTests.cs | 750 +- .../StatusEndpointTests.cs | 194 +- ...tellaOps.Excititor.WebService.Tests.csproj | 5 +- .../TestAuthentication.cs | 122 +- .../TestServiceOverrides.cs | 360 +- .../TestWebApplicationFactory.cs | 84 +- ...efaultVexProviderRunnerIntegrationTests.cs | 726 +- .../DefaultVexProviderRunnerTests.cs | 1434 +- .../Signature/WorkerSignatureVerifierTests.cs | 458 +- .../StellaOps.Excititor.Worker.Tests.csproj | 9 +- .../VexWorkerOptionsTests.cs | 0 .../AGENTS.md | 28 +- .../TASKS.md | 26 +- .../AGENTS.md | 28 +- .../TASKS.md | 14 +- .../AGENTS.md | 28 +- .../TASKS.md | 26 +- src/ExportCenter/StellaOps.ExportCenter.sln | 99 + .../StellaOps.ExportCenter/AGENTS.md | 36 +- .../StellaOps.ExportCenter.Core/Class1.cs | 12 +- .../StellaOps.ExportCenter.Core.csproj | 36 +- .../Class1.cs | 12 +- ...ellaOps.ExportCenter.Infrastructure.csproj | 56 +- .../StellaOps.ExportCenter.Tests.csproj | 270 +- .../StellaOps.ExportCenter.Tests/UnitTest1.cs | 20 +- .../xunit.runner.json | 6 +- .../Program.cs | 82 +- .../Properties/launchSettings.json | 46 +- .../StellaOps.ExportCenter.WebService.csproj | 82 +- .../StellaOps.ExportCenter.WebService.http | 12 +- .../appsettings.Development.json | 16 +- .../appsettings.json | 18 +- .../StellaOps.ExportCenter.Worker/Program.cs | 14 +- .../Properties/launchSettings.json | 24 +- .../StellaOps.ExportCenter.Worker.csproj | 86 +- .../StellaOps.ExportCenter.Worker/Worker.cs | 32 +- .../appsettings.Development.json | 16 +- .../appsettings.json | 16 +- .../StellaOps.ExportCenter.sln | 180 +- .../StellaOps.ExportCenter/TASKS.md | 154 +- .../StellaOps.Findings.Ledger/AGENTS.md | 4 +- .../StellaOps.Findings.Ledger/TASKS.md | 146 +- src/{ => Graph}/StellaOps.Graph.Api/AGENTS.md | 4 +- src/{ => Graph}/StellaOps.Graph.Api/TASKS.md | 32 +- .../StellaOps.Graph.Indexer/AGENTS.md | 4 +- .../StellaOps.Graph.Indexer/TASKS.md | 26 +- .../StellaOps.IssuerDirectory/AGENTS.md | 2 +- .../StellaOps.IssuerDirectory/TASKS.md | 18 +- .../StellaOps.Mirror.Creator/AGENTS.md | 30 +- .../StellaOps.Mirror.Creator/TASKS.md | 38 +- src/Notifier/StellaOps.Notifier.sln | 125 + .../StellaOps.Notifier/AGENTS.md | 34 +- .../EventProcessorTests.cs | 166 +- .../RuleEvaluatorTests.cs | 120 +- .../StellaOps.Notifier.Tests.csproj | 66 +- .../Support/InMemoryStores.cs | 346 +- .../xunit.runner.json | 6 +- .../StellaOps.Notifier.WebService/Program.cs | 48 +- .../Properties/launchSettings.json | 46 +- .../Setup/MongoInitializationHostedService.cs | 120 +- .../StellaOps.Notifier.WebService.csproj | 6 +- .../StellaOps.Notifier.WebService.http | 12 +- .../appsettings.Development.json | 16 +- .../appsettings.json | 18 +- .../Options/NotifierWorkerOptions.cs | 38 +- .../Processing/DefaultNotifyRuleEvaluator.cs | 600 +- .../Processing/IdempotencyKeyBuilder.cs | 60 +- .../MongoInitializationHostedService.cs | 120 +- .../Processing/NotifierEventProcessor.cs | 388 +- .../Processing/NotifierEventWorker.cs | 240 +- .../StellaOps.Notifier.Worker/Program.cs | 76 +- .../Properties/AssemblyInfo.cs | 6 +- .../Properties/launchSettings.json | 24 +- .../StellaOps.Notifier.Worker.csproj | 12 +- .../appsettings.Development.json | 16 +- .../appsettings.json | 16 +- .../StellaOps.Notifier/StellaOps.Notifier.sln | 124 +- .../StellaOps.Notifier/TASKS.md | 148 +- .../docs/NOTIFY-SVC-38-001-FOUNDATIONS.md | 46 +- .../StellaOps.Notify.WebService/AGENTS.md | 0 .../Contracts/ChannelHealthResponse.cs | 34 +- .../Contracts/ChannelTestSendRequest.cs | 0 .../Contracts/ChannelTestSendResponse.cs | 0 .../Contracts/LockRequests.cs | 0 .../Diagnostics/ServiceStatus.cs | 0 .../Extensions/ConfigurationExtensions.cs | 0 .../Hosting/NotifyPluginHostFactory.cs | 0 .../Internal/JsonHttpResult.cs | 0 .../Options/NotifyWebServiceOptions.cs | 0 .../NotifyWebServiceOptionsPostConfigure.cs | 0 .../NotifyWebServiceOptionsValidator.cs | 0 .../Plugins/NotifyPluginRegistry.cs | 0 .../Program.Partial.cs | 0 .../StellaOps.Notify.WebService/Program.cs | 0 .../Security/NotifyPolicies.cs | 0 .../Security/NotifyRateLimitPolicies.cs | 0 .../Services/NotifyChannelHealthService.cs | 364 +- .../Services/NotifyChannelTestService.cs | 0 .../Services/NotifySchemaMigrationService.cs | 0 .../StellaOps.Notify.WebService.csproj | 28 + .../Storage/InMemory/InMemoryStorageModule.cs | 0 .../StellaOps.Notify.WebService/TASKS.md | 2 + .../StellaOps.Notify.Worker/AGENTS.md | 0 .../Handlers/INotifyEventHandler.cs | 20 +- .../Handlers/NoOpNotifyEventHandler.cs | 50 +- .../NotifyWorkerOptions.cs | 104 +- .../Processing/NotifyEventLeaseProcessor.cs | 292 +- .../Processing/NotifyEventLeaseWorker.cs | 126 +- .../StellaOps.Notify.Worker/Program.cs | 66 +- .../Properties/AssemblyInfo.cs | 6 +- .../StellaOps.Notify.Worker.csproj | 48 +- .../StellaOps.Notify.Worker/TASKS.md | 2 +- .../StellaOps.Notify.Worker/appsettings.json | 86 +- src/Notify/StellaOps.Notify.sln | 422 + .../AGENTS.md | 0 .../EmailChannelHealthProvider.cs | 118 +- .../EmailChannelTestProvider.cs | 0 .../EmailMetadataBuilder.cs | 108 +- .../StellaOps.Notify.Connectors.Email.csproj | 19 +- .../TASKS.md | 2 + .../notify-plugin.json | 36 +- .../ConnectorHashing.cs | 62 +- .../ConnectorMetadataBuilder.cs | 294 +- .../ConnectorValueRedactor.cs | 150 +- .../StellaOps.Notify.Connectors.Shared.csproj | 24 +- .../AGENTS.md | 0 .../SlackChannelHealthProvider.cs | 112 +- .../SlackChannelTestProvider.cs | 0 .../SlackMetadataBuilder.cs | 154 +- .../StellaOps.Notify.Connectors.Slack.csproj | 19 +- .../TASKS.md | 2 + .../notify-plugin.json | 38 +- .../AGENTS.md | 0 .../StellaOps.Notify.Connectors.Teams.csproj | 19 +- .../TASKS.md | 2 +- .../TeamsChannelHealthProvider.cs | 114 +- .../TeamsChannelTestProvider.cs | 0 .../TeamsMetadataBuilder.cs | 178 +- .../notify-plugin.json | 38 +- .../AGENTS.md | 0 ...StellaOps.Notify.Connectors.Webhook.csproj | 19 +- .../TASKS.md | 2 + .../WebhookChannelTestProvider.cs | 0 .../WebhookMetadataBuilder.cs | 106 +- .../notify-plugin.json | 36 +- .../StellaOps.Notify.Engine/AGENTS.md | 0 .../ChannelHealthContracts.cs | 102 +- .../ChannelTestPreviewContracts.cs | 0 .../INotifyRuleEvaluator.cs | 56 +- .../NotifyRuleEvaluationOutcome.cs | 88 +- .../StellaOps.Notify.Engine.csproj | 0 .../StellaOps.Notify.Engine/TASKS.md | 2 +- .../StellaOps.Notify.Models/AGENTS.md | 0 .../Iso8601DurationConverter.cs | 0 .../NotifyCanonicalJsonSerializer.cs | 0 .../StellaOps.Notify.Models/NotifyChannel.cs | 0 .../StellaOps.Notify.Models/NotifyDelivery.cs | 0 .../StellaOps.Notify.Models/NotifyEnums.cs | 0 .../StellaOps.Notify.Models/NotifyEvent.cs | 0 .../NotifyEventKinds.cs | 0 .../StellaOps.Notify.Models/NotifyRule.cs | 0 .../NotifySchemaMigration.cs | 0 .../NotifySchemaVersions.cs | 0 .../StellaOps.Notify.Models/NotifyTemplate.cs | 0 .../NotifyValidation.cs | 0 .../StellaOps.Notify.Models.csproj | 0 .../StellaOps.Notify.Models/TASKS.md | 2 + .../StellaOps.Notify.Queue/AGENTS.md | 0 .../Nats/NatsNotifyDeliveryLease.cs | 160 +- .../Nats/NatsNotifyDeliveryQueue.cs | 1394 +- .../Nats/NatsNotifyEventLease.cs | 166 +- .../Nats/NatsNotifyEventQueue.cs | 1396 +- .../NotifyDeliveryQueueHealthCheck.cs | 110 +- .../NotifyDeliveryQueueOptions.cs | 138 +- .../NotifyEventQueueOptions.cs | 354 +- .../NotifyQueueContracts.cs | 462 +- .../NotifyQueueFields.cs | 36 +- .../NotifyQueueHealthCheck.cs | 110 +- .../NotifyQueueMetrics.cs | 78 +- .../NotifyQueueServiceCollectionExtensions.cs | 292 +- .../NotifyQueueTransportKind.cs | 20 +- .../Properties/AssemblyInfo.cs | 6 +- .../Redis/RedisNotifyDeliveryLease.cs | 152 +- .../Redis/RedisNotifyDeliveryQueue.cs | 1576 +- .../Redis/RedisNotifyEventLease.cs | 152 +- .../Redis/RedisNotifyEventQueue.cs | 1310 +- .../StellaOps.Notify.Queue.csproj | 46 +- .../StellaOps.Notify.Queue/TASKS.md | 2 +- .../StellaOps.Notify.Storage.Mongo/AGENTS.md | 0 .../Documents/NotifyAuditEntryDocument.cs | 0 .../Documents/NotifyDigestDocument.cs | 0 .../Documents/NotifyLockDocument.cs | 0 .../Internal/NotifyMongoContext.cs | 0 .../Internal/NotifyMongoInitializer.cs | 0 .../EnsureNotifyCollectionsMigration.cs | 0 .../EnsureNotifyIndexesMigration.cs | 0 .../Migrations/INotifyMongoMigration.cs | 0 .../Migrations/NotifyMongoMigrationRecord.cs | 0 .../Migrations/NotifyMongoMigrationRunner.cs | 0 .../Options/NotifyMongoOptions.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../Repositories/INotifyAuditRepository.cs | 0 .../Repositories/INotifyChannelRepository.cs | 0 .../Repositories/INotifyDeliveryRepository.cs | 0 .../Repositories/INotifyDigestRepository.cs | 0 .../Repositories/INotifyLockRepository.cs | 0 .../Repositories/INotifyRuleRepository.cs | 0 .../Repositories/INotifyTemplateRepository.cs | 0 .../Repositories/NotifyAuditRepository.cs | 0 .../Repositories/NotifyChannelRepository.cs | 0 .../Repositories/NotifyDeliveryQueryResult.cs | 0 .../Repositories/NotifyDeliveryRepository.cs | 0 .../Repositories/NotifyDigestRepository.cs | 0 .../Repositories/NotifyLockRepository.cs | 0 .../Repositories/NotifyRuleRepository.cs | 0 .../Repositories/NotifyTemplateRepository.cs | 0 .../BsonDocumentJsonExtensions.cs | 0 .../NotifyChannelDocumentMapper.cs | 0 .../NotifyDeliveryDocumentMapper.cs | 0 .../Serialization/NotifyRuleDocumentMapper.cs | 0 .../NotifyTemplateDocumentMapper.cs | 0 .../ServiceCollectionExtensions.cs | 0 .../StellaOps.Notify.Storage.Mongo.csproj | 36 +- .../StellaOps.Notify.Storage.Mongo/TASKS.md | 2 +- .../EmailChannelHealthProviderTests.cs | 200 +- ...laOps.Notify.Connectors.Email.Tests.csproj | 9 +- .../SlackChannelHealthProviderTests.cs | 192 +- .../SlackChannelTestProviderTests.cs | 226 +- ...laOps.Notify.Connectors.Slack.Tests.csproj | 9 +- ...laOps.Notify.Connectors.Teams.Tests.csproj | 9 +- .../TeamsChannelHealthProviderTests.cs | 196 +- .../TeamsChannelTestProviderTests.cs | 270 +- .../DocSampleTests.cs | 0 .../NotifyCanonicalJsonSerializerTests.cs | 0 .../NotifyDeliveryTests.cs | 0 .../NotifyRuleTests.cs | 0 .../NotifySchemaMigrationTests.cs | 0 .../PlatformEventSamplesTests.cs | 0 .../PlatformEventSchemaValidationTests.cs | 0 .../StellaOps.Notify.Models.Tests.csproj | 19 +- .../NatsNotifyDeliveryQueueTests.cs | 446 +- .../NatsNotifyEventQueueTests.cs | 450 +- .../RedisNotifyDeliveryQueueTests.cs | 394 +- .../RedisNotifyEventQueueTests.cs | 440 +- .../StellaOps.Notify.Queue.Tests.csproj | 7 +- .../AssemblyInfo.cs | 0 .../GlobalUsings.cs | 0 .../Internal/NotifyMongoMigrationTests.cs | 0 .../NotifyAuditRepositoryTests.cs | 0 .../NotifyChannelRepositoryTests.cs | 0 .../NotifyDeliveryRepositoryTests.cs | 0 .../NotifyDigestRepositoryTests.cs | 0 .../Repositories/NotifyLockRepositoryTests.cs | 0 .../Repositories/NotifyRuleRepositoryTests.cs | 0 .../NotifyTemplateRepositoryTests.cs | 0 .../NotifyChannelDocumentMapperTests.cs | 0 .../NotifyRuleDocumentMapperTests.cs | 0 .../NotifyTemplateDocumentMapperTests.cs | 0 ...tellaOps.Notify.Storage.Mongo.Tests.csproj | 7 +- .../CrudEndpointsTests.cs | 0 .../NormalizeEndpointsTests.cs | 0 .../StellaOps.Notify.WebService.Tests.csproj | 37 +- .../NotifyEventLeaseProcessorTests.cs | 334 +- .../StellaOps.Notify.Worker.Tests.csproj | 9 +- .../AGENTS.md | 20 +- .../TASKS.md | 18 +- .../AGENTS.md | 20 +- .../TASKS.md | 18 +- src/Orchestrator/StellaOps.Orchestrator.sln | 99 + .../StellaOps.Orchestrator/AGENTS.md | 36 +- .../StellaOps.Orchestrator.Core/Class1.cs | 12 +- .../StellaOps.Orchestrator.Core.csproj | 36 +- .../Class1.cs | 12 +- ...ellaOps.Orchestrator.Infrastructure.csproj | 56 +- .../StellaOps.Orchestrator.Tests.csproj | 270 +- .../StellaOps.Orchestrator.Tests/UnitTest1.cs | 20 +- .../xunit.runner.json | 3 + .../Program.cs | 82 +- .../Properties/launchSettings.json | 46 +- .../StellaOps.Orchestrator.WebService.csproj | 82 +- .../StellaOps.Orchestrator.WebService.http | 12 +- .../appsettings.Development.json | 16 +- .../appsettings.json | 18 +- .../StellaOps.Orchestrator.Worker/Program.cs | 14 +- .../Properties/launchSettings.json | 24 +- .../StellaOps.Orchestrator.Worker.csproj | 86 +- .../StellaOps.Orchestrator.Worker/Worker.cs | 32 +- .../appsettings.Development.json | 16 +- .../appsettings.json | 16 +- .../StellaOps.Orchestrator.sln | 180 +- .../StellaOps.Orchestrator/TASKS.md | 152 +- src/PacksRegistry/StellaOps.PacksRegistry.sln | 99 + .../StellaOps.PacksRegistry/AGENTS.md | 34 +- .../StellaOps.PacksRegistry.Core/Class1.cs | 12 +- .../StellaOps.PacksRegistry.Core.csproj | 36 +- .../Class1.cs | 12 +- ...llaOps.PacksRegistry.Infrastructure.csproj | 56 +- .../StellaOps.PacksRegistry.Tests.csproj | 270 +- .../UnitTest1.cs | 20 +- .../xunit.runner.json | 3 + .../Program.cs | 82 +- .../Properties/launchSettings.json | 46 +- .../StellaOps.PacksRegistry.WebService.csproj | 82 +- .../StellaOps.PacksRegistry.WebService.http | 12 +- .../appsettings.Development.json | 8 + .../appsettings.json | 9 + .../StellaOps.PacksRegistry.Worker/Program.cs | 14 +- .../Properties/launchSettings.json | 24 +- .../StellaOps.PacksRegistry.Worker.csproj | 86 +- .../StellaOps.PacksRegistry.Worker/Worker.cs | 32 +- .../appsettings.Development.json | 8 + .../appsettings.json | 8 + .../StellaOps.PacksRegistry.sln | 180 +- .../StellaOps.PacksRegistry/TASKS.md | 32 +- .../StellaOps.Policy.Engine/AGENTS.md | 36 +- .../Compilation/DslToken.cs | 320 +- .../Compilation/DslTokenizer.cs | 1152 +- .../Compilation/PolicyCompiler.cs | 338 +- .../Compilation/PolicyDslDiagnosticCodes.cs | 38 +- .../Compilation/PolicyIr.cs | 122 +- .../Compilation/PolicyIrSerializer.cs | 830 +- .../Compilation/PolicyParser.cs | 1356 +- .../Compilation/PolicySyntaxNodes.cs | 282 +- .../Domain/PolicyPackRecord.cs | 202 +- .../Endpoints/PolicyCompilationEndpoints.cs | 214 +- .../Endpoints/PolicyPackEndpoints.cs | 534 +- .../Evaluation/PolicyEvaluationContext.cs | 284 +- .../Evaluation/PolicyEvaluator.cs | 840 +- .../Evaluation/PolicyExpressionEvaluator.cs | 1018 +- .../Hosting/PolicyEngineStartupDiagnostics.cs | 24 +- .../Options/PolicyEngineOptions.cs | 336 +- .../StellaOps.Policy.Engine/Program.cs | 278 +- .../Properties/AssemblyInfo.cs | 6 +- .../StellaOps.Policy.Engine/README.md | 28 +- .../Services/IPolicyPackRepository.cs | 58 +- .../Services/InMemoryPolicyPackRepository.cs | 186 +- .../Services/PolicyCompilationService.cs | 240 +- .../Services/PolicyEvaluationService.cs | 52 +- .../Services/ScopeAuthorization.cs | 106 +- .../StellaOps.Policy.Engine.csproj | 20 + .../StellaOps.Policy.Engine/TASKS.md | 2 +- .../Workers/PolicyEngineBootstrapWorker.cs | 70 +- .../Clients/IPolicyEngineClient.cs | 30 +- .../Clients/PolicyEngineClient.cs | 398 +- .../Clients/PolicyEngineResponse.cs | 62 +- .../Clients/PolicyEngineResponseExtensions.cs | 142 +- .../Contracts/PolicyPackContracts.cs | 90 +- .../GatewayForwardingContext.cs | 118 +- .../Options/PolicyGatewayOptions.cs | 646 +- .../StellaOps.Policy.Gateway/Program.cs | 812 +- .../Properties/AssemblyInfo.cs | 6 +- .../Services/PolicyEngineTokenProvider.cs | 246 +- .../Services/PolicyGatewayAuthorization.cs | 48 +- .../Services/PolicyGatewayDpopHandler.cs | 84 +- .../PolicyGatewayDpopProofGenerator.cs | 470 +- .../Services/PolicyGatewayMetrics.cs | 102 +- .../StellaOps.Policy.Gateway.csproj | 23 + .../StellaOps.Policy.Registry/AGENTS.md | 8 +- .../StellaOps.Policy.Registry/TASKS.md | 34 +- .../StellaOps.Policy.RiskProfile/AGENTS.md | 30 +- .../StellaOps.Policy.RiskProfile/TASKS.md | 40 +- src/Policy/StellaOps.Policy.sln | 212 + .../__Libraries}/StellaOps.Policy/AGENTS.md | 0 .../Audit/IPolicyAuditRepository.cs | 0 .../Audit/InMemoryPolicyAuditRepository.cs | 0 .../StellaOps.Policy/PolicyAuditEntry.cs | 0 .../StellaOps.Policy/PolicyBinder.cs | 0 .../StellaOps.Policy/PolicyDiagnostics.cs | 0 .../StellaOps.Policy/PolicyDigest.cs | 0 .../StellaOps.Policy/PolicyDocument.cs | 0 .../StellaOps.Policy/PolicyEvaluation.cs | 0 .../StellaOps.Policy/PolicyFinding.cs | 0 .../StellaOps.Policy/PolicyIssue.cs | 0 .../StellaOps.Policy/PolicyPreviewModels.cs | 0 .../StellaOps.Policy/PolicyPreviewService.cs | 0 .../StellaOps.Policy/PolicySchemaResource.cs | 0 .../StellaOps.Policy/PolicyScoringConfig.cs | 0 .../PolicyScoringConfigBinder.cs | 0 .../PolicyScoringConfigDigest.cs | 0 .../StellaOps.Policy/PolicyScoringSchema.cs | 0 .../StellaOps.Policy/PolicySnapshot.cs | 0 .../StellaOps.Policy/PolicySnapshotStore.cs | 0 .../PolicyUnknownConfidenceConfig.cs | 0 .../StellaOps.Policy/PolicyValidationCli.cs | 0 .../StellaOps.Policy/PolicyVerdict.cs | 0 .../Schemas/policy-schema@1.json | 0 .../Schemas/policy-scoring-default.json | 0 .../Schemas/policy-scoring-schema@1.json | 0 .../StellaOps.Policy/StellaOps.Policy.csproj | 44 +- .../Storage/IPolicySnapshotRepository.cs | 0 .../InMemoryPolicySnapshotRepository.cs | 0 .../__Libraries}/StellaOps.Policy/TASKS.md | 2 +- .../PolicyCompilerTests.cs | 208 +- .../PolicyEvaluatorTests.cs | 582 +- .../PolicyPackRepositoryTests.cs | 88 +- .../StellaOps.Policy.Engine.Tests.csproj | 5 +- .../GatewayActivationTests.cs | 1096 +- .../PolicyEngineClientTests.cs | 424 +- .../PolicyGatewayDpopProofGeneratorTests.cs | 334 +- .../StellaOps.Policy.Gateway.Tests.csproj | 5 +- .../PolicyBinderTests.cs | 0 .../PolicyEvaluationTests.cs | 0 .../PolicyPreviewServiceTests.cs | 0 .../PolicyScoringConfigTests.cs | 0 .../PolicySnapshotStoreTests.cs | 0 .../StellaOps.Policy.Tests.csproj | 27 +- .../AGENTS.md | 40 +- .../StellaOps.Provenance.Attestation/TASKS.md | 26 +- .../Observability/RegistryTokenMetrics.cs | 68 +- .../PlanRegistry.cs | 300 +- .../Program.cs | 342 +- .../Properties/launchSettings.json | 28 +- .../RegistryAccessModels.cs | 26 +- .../RegistryScopeParser.cs | 186 +- .../RegistryTokenIssuer.cs | 258 +- .../RegistryTokenServiceOptions.cs | 642 +- .../Security/SigningKeyLoader.cs | 132 +- .../StellaOps.Registry.TokenService.csproj | 11 +- .../appsettings.Development.json | 8 + .../appsettings.json | 9 + src/Registry/StellaOps.Registry.sln | 137 + .../PlanRegistryTests.cs | 218 +- .../RegistryScopeParserTests.cs | 76 +- .../RegistryTokenIssuerTests.cs | 220 +- ...ellaOps.Registry.TokenService.Tests.csproj | 29 + .../UnitTest1.cs | 20 +- .../xunit.runner.json | 3 + src/RiskEngine/StellaOps.RiskEngine.sln | 99 + .../StellaOps.RiskEngine/AGENTS.md | 46 +- .../StellaOps.RiskEngine.Core/Class1.cs | 12 +- .../StellaOps.RiskEngine.Core.csproj | 36 +- .../Class1.cs | 12 +- ...StellaOps.RiskEngine.Infrastructure.csproj | 56 +- .../StellaOps.RiskEngine.Tests.csproj | 270 +- .../StellaOps.RiskEngine.Tests/UnitTest1.cs | 20 +- .../xunit.runner.json | 3 + .../Program.cs | 41 + .../Properties/launchSettings.json | 46 +- .../StellaOps.RiskEngine.WebService.csproj | 82 +- .../StellaOps.RiskEngine.WebService.http | 12 +- .../appsettings.Development.json | 8 + .../appsettings.json | 9 + .../StellaOps.RiskEngine.Worker/Program.cs | 14 +- .../Properties/launchSettings.json | 24 +- .../StellaOps.RiskEngine.Worker.csproj | 86 +- .../StellaOps.RiskEngine.Worker/Worker.cs | 32 +- .../appsettings.Development.json | 8 + .../appsettings.json | 8 + .../StellaOps.RiskEngine.sln | 180 +- .../StellaOps.RiskEngine/TASKS.md | 64 +- src/SbomService/StellaOps.SbomService.sln | 104 + .../StellaOps.SbomService/AGENTS.md | 30 +- .../StellaOps.SbomService/Program.cs | 34 +- .../StellaOps.SbomService.csproj | 7 +- .../StellaOps.SbomService/TASKS.md | 94 +- .../TASKS.md | 42 +- .../TASKS.md | 44 +- .../TASKS.md | 44 +- .../TASKS.md | 40 +- .../AGENTS.md | 0 .../Attestation/AttestorClient.cs | 0 .../Attestation/AttestorProvenanceRequest.cs | 0 .../BuildxPluginException.cs | 0 .../Cas/CasWriteResult.cs | 0 .../Cas/LocalCasClient.cs | 0 .../Cas/LocalCasOptions.cs | 0 .../Descriptor/DescriptorArtifact.cs | 0 .../Descriptor/DescriptorDocument.cs | 0 .../Descriptor/DescriptorGenerator.cs | 0 .../Descriptor/DescriptorGeneratorMetadata.cs | 0 .../Descriptor/DescriptorProvenance.cs | 0 .../Descriptor/DescriptorRequest.cs | 0 .../Descriptor/DescriptorSubject.cs | 0 .../Manifest/BuildxPluginCas.cs | 0 .../Manifest/BuildxPluginEntryPoint.cs | 0 .../Manifest/BuildxPluginImage.cs | 0 .../Manifest/BuildxPluginManifest.cs | 0 .../Manifest/BuildxPluginManifestLoader.cs | 0 .../Program.cs | 0 ...ellaOps.Scanner.Sbomer.BuildXPlugin.csproj | 0 .../TASKS.md | 0 .../stellaops.sbom-indexer.manifest.json | 0 .../AssemblyInfo.cs | 0 .../Constants/ProblemTypes.cs | 0 .../Contracts/OrchestratorEventContracts.cs | 554 +- .../Contracts/PolicyDiagnosticsContracts.cs | 0 .../Contracts/PolicyPreviewContracts.cs | 0 .../Contracts/ReportContracts.cs | 0 .../Contracts/RuntimeEventsContracts.cs | 44 +- .../Contracts/RuntimePolicyContracts.cs | 182 +- .../Contracts/ScanStatusResponse.cs | 0 .../Contracts/ScanSubmitRequest.cs | 0 .../Contracts/ScanSubmitResponse.cs | 0 .../Diagnostics/ServiceStatus.cs | 0 .../Domain/ScanId.cs | 0 .../Domain/ScanProgressEvent.cs | 0 .../Domain/ScanSnapshot.cs | 0 .../Domain/ScanStatus.cs | 0 .../Domain/ScanSubmission.cs | 0 .../Domain/ScanTarget.cs | 0 .../Endpoints/HealthEndpoints.cs | 0 .../Endpoints/PolicyEndpoints.cs | 0 .../Endpoints/ReportEndpoints.cs | 0 .../Endpoints/RuntimeEndpoints.cs | 506 +- .../Endpoints/ScanEndpoints.cs | 0 .../Extensions/ConfigurationExtensions.cs | 0 .../OpenApiRegistrationExtensions.cs | 0 .../Hosting/ScannerPluginHostFactory.cs | 0 .../Infrastructure/ProblemResultFactory.cs | 0 .../Options/ScannerWebServiceOptions.cs | 0 .../ScannerWebServiceOptionsPostConfigure.cs | 0 .../ScannerWebServiceOptionsValidator.cs | 0 .../StellaOps.Scanner.WebService/Program.cs | 0 .../AnonymousAuthenticationHandler.cs | 0 .../Security/ScannerAuthorityScopes.cs | 0 .../Security/ScannerPolicies.cs | 0 .../OrchestratorEventSerializer.cs | 396 +- .../Services/IPlatformEventPublisher.cs | 0 .../Services/IRedisConnectionFactory.cs | 26 +- .../Services/IReportEventDispatcher.cs | 0 .../Services/IScanCoordinator.cs | 0 .../Services/InMemoryScanCoordinator.cs | 0 .../Services/NullPlatformEventPublisher.cs | 0 .../Services/PolicyDtoMapper.cs | 0 .../Services/RedisConnectionFactory.cs | 38 +- .../Services/RedisPlatformEventPublisher.cs | 0 .../Services/ReportEventDispatcher.cs | 1166 +- .../Services/ReportSigner.cs | 0 .../Services/RuntimeEventIngestionService.cs | 430 +- .../Services/RuntimeEventRateLimiter.cs | 346 +- .../Services/RuntimePolicyService.cs | 1026 +- .../Services/ScanProgressStream.cs | 0 .../StellaOps.Scanner.WebService.csproj | 34 + .../StellaOps.Scanner.WebService/TASKS.md | 26 +- .../Utilities/ScanIdGenerator.cs | 0 .../StellaOps.Scanner.Worker/AGENTS.md | 0 .../ScannerWorkerInstrumentation.cs | 0 .../Diagnostics/ScannerWorkerMetrics.cs | 0 .../Diagnostics/TelemetryExtensions.cs | 0 .../Hosting/ScannerWorkerHostedService.cs | 0 .../Options/ScannerWorkerOptions.cs | 0 .../Options/ScannerWorkerOptionsValidator.cs | 0 .../Processing/AnalyzerStageExecutor.cs | 0 .../CompositeScanAnalyzerDispatcher.cs | 562 +- .../Processing/EntryTraceExecutionService.cs | 604 +- .../Processing/IDelayScheduler.cs | 0 .../Processing/IEntryTraceExecutionService.cs | 18 +- .../Processing/IScanAnalyzerDispatcher.cs | 0 .../Processing/IScanJobLease.cs | 0 .../Processing/IScanJobSource.cs | 0 .../Processing/IScanStageExecutor.cs | 0 .../Processing/LeaseHeartbeatService.cs | 0 .../Processing/NoOpStageExecutor.cs | 0 .../Processing/NullScanJobSource.cs | 0 .../Processing/PollDelayStrategy.cs | 0 .../Processing/ScanJobContext.cs | 0 .../Processing/ScanJobProcessor.cs | 0 .../Processing/ScanProgressReporter.cs | 0 .../Processing/ScanStageNames.cs | 0 .../Processing/SystemDelayScheduler.cs | 0 .../StellaOps.Scanner.Worker/Program.cs | 0 .../Properties/AssemblyInfo.cs | 6 +- .../StellaOps.Scanner.Worker.csproj | 45 +- .../StellaOps.Scanner.Worker/TASKS.md | 0 src/Scanner/StellaOps.Scanner.sln | 775 + .../AGENTS.md | 0 .../DotNetAnalyzerPlugin.cs | 34 +- .../DotNetLanguageAnalyzer.cs | 74 +- .../GlobalUsings.cs | 0 .../IDotNetAuthenticodeInspector.cs | 0 .../Internal/DotNetDependencyCollector.cs | 0 .../Internal/DotNetDepsFile.cs | 572 +- .../Internal/DotNetFileCaches.cs | 664 +- .../Internal/DotNetRuntimeConfig.cs | 316 +- ...laOps.Scanner.Analyzers.Lang.DotNet.csproj | 0 .../TASKS.md | 38 +- .../manifest.json | 46 +- .../AGENTS.md | 2 +- .../GlobalUsings.cs | 0 .../GoAnalyzerPlugin.cs | 34 +- .../GoLanguageAnalyzer.cs | 770 +- .../Internal/GoAnalyzerMetrics.cs | 60 +- .../Internal/GoBinaryScanner.cs | 528 +- .../Internal/GoBuildInfo.cs | 160 +- .../Internal/GoBuildInfoDecoder.cs | 318 +- .../Internal/GoBuildInfoParser.cs | 468 +- .../Internal/GoBuildInfoProvider.cs | 164 +- .../Internal/GoDwarfMetadata.cs | 66 +- .../Internal/GoDwarfReader.cs | 240 +- .../Internal/GoModule.cs | 134 +- .../GoStrippedBinaryClassification.cs | 0 ...StellaOps.Scanner.Analyzers.Lang.Go.csproj | 0 .../TASKS.md | 0 .../manifest.json | 46 +- .../GlobalUsings.cs | 0 .../Internal/ClassPath/JavaClassLocation.cs | 124 +- .../ClassPath/JavaClassPathAnalysis.cs | 204 +- .../ClassPath/JavaClassPathBuilder.cs | 1320 +- .../ClassPath/JavaModuleDescriptor.cs | 44 +- .../ClassPath/JavaModuleInfoParser.cs | 734 +- .../Internal/JavaArchive.cs | 528 +- .../Internal/JavaArchiveEntry.cs | 16 +- .../Internal/JavaPackagingKind.cs | 24 +- .../Internal/JavaReleaseFileParser.cs | 136 +- .../Internal/JavaRuntimeImage.cs | 14 +- .../Internal/JavaWorkspace.cs | 56 +- .../Internal/JavaWorkspaceNormalizer.cs | 202 +- .../Internal/JavaZipEntryUtilities.cs | 104 +- .../Reflection/JavaReflectionAnalysis.cs | 88 +- .../Reflection/JavaReflectionAnalyzer.cs | 1432 +- .../JavaServiceProviderScanner.cs | 320 +- .../ServiceProviders/JavaSpiCatalog.cs | 206 +- .../ServiceProviders/java-spi-catalog.json | 104 +- .../JavaLanguageAnalyzer.cs | 0 .../Properties/AssemblyInfo.cs | 6 +- ...ellaOps.Scanner.Analyzers.Lang.Java.csproj | 0 .../TASKS.md | 62 +- .../manifest.json | 0 .../AGENTS.md | 2 +- .../GlobalUsings.cs | 0 .../Internal/NodeAnalyzerMetrics.cs | 0 .../Internal/NodeLifecycleScript.cs | 0 .../Internal/NodeLockData.cs | 0 .../Internal/NodeLockEntry.cs | 0 .../Internal/NodePackage.cs | 0 .../Internal/NodePackageCollector.cs | 0 .../Internal/NodeWorkspaceIndex.cs | 0 .../NodeAnalyzerPlugin.cs | 36 +- .../NodeLanguageAnalyzer.cs | 0 ...ellaOps.Scanner.Analyzers.Lang.Node.csproj | 0 .../TASKS.md | 62 +- .../manifest.json | 44 +- .../AGENTS.md | 0 .../GlobalUsings.cs | 0 .../Internal/PythonDistributionLoader.cs | 1856 +- .../PythonAnalyzerPlugin.cs | 34 +- .../PythonLanguageAnalyzer.cs | 144 +- ...laOps.Scanner.Analyzers.Lang.Python.csproj | 0 .../TASKS.md | 62 +- .../manifest.json | 46 +- .../AGENTS.md | 0 .../GlobalUsings.cs | 0 .../Internal/RustAnalyzerCollector.cs | 0 .../Internal/RustBinaryClassifier.cs | 0 .../Internal/RustCargoLockParser.cs | 0 .../Internal/RustFingerprintScanner.cs | 0 .../RustAnalyzerPlugin.cs | 28 +- .../RustLanguageAnalyzer.cs | 6 +- ...ellaOps.Scanner.Analyzers.Lang.Rust.csproj | 0 .../TASKS.md | 0 .../manifest.json | 46 +- .../AGENTS.md | 0 .../Core/ILanguageAnalyzer.cs | 0 .../Core/Internal/LanguageAnalyzerJson.cs | 0 .../Core/LanguageAnalyzerContext.cs | 0 .../Core/LanguageAnalyzerEngine.cs | 0 .../Core/LanguageAnalyzerResult.cs | 0 .../Core/LanguageComponentEvidence.cs | 0 .../Core/LanguageComponentMapper.cs | 0 .../Core/LanguageComponentRecord.cs | 0 .../Core/LanguageUsageHints.cs | 0 .../GlobalUsings.cs | 0 .../Plugin/ILanguageAnalyzerPlugin.cs | 30 +- .../Plugin/LanguageAnalyzerPluginCatalog.cs | 294 +- .../SPRINTS_LANG_IMPLEMENTATION_PLAN.md | 8 +- .../StellaOps.Scanner.Analyzers.Lang.csproj | 35 +- .../StellaOps.Scanner.Analyzers.Lang/TASKS.md | 0 .../ApkAnalyzerPlugin.cs | 0 .../ApkDatabaseParser.cs | 0 .../ApkPackageAnalyzer.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../StellaOps.Scanner.Analyzers.OS.Apk.csproj | 30 +- .../manifest.json | 0 .../DpkgAnalyzerPlugin.cs | 0 .../DpkgPackageAnalyzer.cs | 0 .../DpkgStatusParser.cs | 0 .../Properties/AssemblyInfo.cs | 0 ...StellaOps.Scanner.Analyzers.OS.Dpkg.csproj | 30 +- .../manifest.json | 0 .../IRpmDatabaseReader.cs | 0 .../Internal/RpmHeader.cs | 0 .../Internal/RpmHeaderParser.cs | 0 .../Internal/RpmTags.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../RpmAnalyzerPlugin.cs | 0 .../RpmDatabaseReader.cs | 0 .../RpmPackageAnalyzer.cs | 0 .../StellaOps.Scanner.Analyzers.OS.Rpm.csproj | 32 +- .../manifest.json | 0 .../StellaOps.Scanner.Analyzers.OS/AGENTS.md | 0 .../Abstractions/IOSPackageAnalyzer.cs | 0 .../Analyzers/OsPackageAnalyzerBase.cs | 0 .../Helpers/CveHintExtractor.cs | 0 .../Helpers/PackageUrlBuilder.cs | 0 .../Helpers/PackageVersionParser.cs | 0 .../Mapping/OsComponentMapper.cs | 0 .../Model/AnalyzerWarning.cs | 0 .../Model/OSAnalyzerTelemetry.cs | 0 .../Model/OSPackageAnalyzerContext.cs | 0 .../Model/OSPackageAnalyzerResult.cs | 0 .../Model/OSPackageFileEvidence.cs | 0 .../Model/OSPackageRecord.cs | 0 .../Model/PackageEvidenceSource.cs | 0 .../Plugin/IOSAnalyzerPlugin.cs | 0 .../Plugin/OsAnalyzerPluginCatalog.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../StellaOps.Scanner.Analyzers.OS.csproj | 5 +- .../StellaOps.Scanner.Analyzers.OS/TASKS.md | 0 .../StellaOps.Scanner.Cache/AGENTS.md | 0 .../IFileContentAddressableStore.cs | 0 .../Abstractions/ILayerCacheStore.cs | 0 .../Abstractions/LayerCacheEntry.cs | 0 .../Abstractions/LayerCachePutRequest.cs | 0 .../FileCas/FileContentAddressableStore.cs | 0 .../NullFileContentAddressableStore.cs | 0 .../LayerCache/LayerCacheStore.cs | 0 .../ScannerCacheMaintenanceService.cs | 0 .../ScannerCacheMetrics.cs | 0 .../ScannerCacheOptions.cs | 0 ...ScannerCacheServiceCollectionExtensions.cs | 0 .../StellaOps.Scanner.Cache.csproj | 38 +- .../StellaOps.Scanner.Cache/TASKS.md | 0 .../StellaOps.Scanner.Core/AGENTS.md | 0 .../Contracts/ComponentGraph.cs | 0 .../Contracts/ComponentModels.cs | 0 .../Contracts/SbomView.cs | 0 .../Contracts/ScanAnalysisKeys.cs | 0 .../Contracts/ScanAnalysisStore.cs | 0 .../Contracts/ScanAnalysisStoreExtensions.cs | 0 .../Contracts/ScanJob.cs | 0 .../Contracts/ScanJobIdJsonConverter.cs | 0 .../Contracts/ScanMetadataKeys.cs | 0 .../Contracts/ScanProgressEvent.cs | 0 .../Contracts/ScannerError.cs | 0 .../ScannerCorrelationContext.cs | 0 .../Observability/ScannerDiagnostics.cs | 0 .../Observability/ScannerLogExtensions.cs | 0 .../Observability/ScannerMetricNames.cs | 0 .../Security/AuthorityTokenSource.cs | 0 .../Security/IAuthorityTokenSource.cs | 0 .../Security/IPluginCatalogGuard.cs | 0 .../Security/RestartOnlyPluginGuard.cs | 0 .../Security/ScannerOperationalToken.cs | 0 .../Security/ServiceCollectionExtensions.cs | 0 .../Serialization/ScannerJsonOptions.cs | 0 .../StellaOps.Scanner.Core.csproj | 7 +- .../StellaOps.Scanner.Core/TASKS.md | 0 .../Utility/ScannerIdentifiers.cs | 0 .../Utility/ScannerTimestamps.cs | 0 .../StellaOps.Scanner.Diff/AGENTS.md | 0 .../ComponentDiffModels.cs | 0 .../StellaOps.Scanner.Diff/ComponentDiffer.cs | 0 .../DiffJsonSerializer.cs | 0 .../StellaOps.Scanner.Diff.csproj | 0 .../StellaOps.Scanner.Diff/TASKS.md | 0 .../StellaOps.Scanner.Emit/AGENTS.md | 0 .../Composition/CycloneDxComposer.cs | 0 .../Composition/SbomCompositionRequest.cs | 0 .../Composition/SbomCompositionResult.cs | 0 .../Composition/SbomPolicyFinding.cs | 130 +- .../ScanAnalysisCompositionBuilder.cs | 0 .../Index/BomIndexBuilder.cs | 0 .../ScannerArtifactPackageBuilder.cs | 0 .../StellaOps.Scanner.Emit.csproj | 0 .../StellaOps.Scanner.Emit/TASKS.md | 0 .../StellaOps.Scanner.EntryTrace/AGENTS.md | 0 .../Diagnostics/EntryTraceMetrics.cs | 0 .../EntryTraceAnalyzer.cs | 0 .../EntryTraceAnalyzerOptions.cs | 0 .../EntryTraceContext.cs | 0 .../EntryTraceImageContextFactory.cs | 356 +- .../EntryTraceTypes.cs | 0 .../EntrypointSpecification.cs | 0 .../FileSystem/IRootFileSystem.cs | 0 .../FileSystem/LayeredRootFileSystem.cs | 1542 +- .../IEntryTraceAnalyzer.cs | 0 .../Oci/OciImageConfig.cs | 258 +- .../Parsing/ShellNodes.cs | 0 .../Parsing/ShellParser.cs | 0 .../Parsing/ShellToken.cs | 0 .../Parsing/ShellTokenizer.cs | 0 .../ServiceCollectionExtensions.cs | 0 .../StellaOps.Scanner.EntryTrace.csproj | 5 +- .../StellaOps.Scanner.EntryTrace/TASKS.md | 0 .../StellaOps.Scanner.Queue/AGENTS.md | 0 .../StellaOps.Scanner.Queue/IScanQueue.cs | 0 .../IScanQueueLease.cs | 0 .../Nats/NatsScanQueue.cs | 0 .../Nats/NatsScanQueueLease.cs | 0 .../QueueEnvelopeFields.cs | 0 .../StellaOps.Scanner.Queue/QueueMetrics.cs | 0 .../QueueTransportKind.cs | 0 .../Redis/RedisScanQueue.cs | 0 .../Redis/RedisScanQueueLease.cs | 0 .../ScanQueueContracts.cs | 0 .../ScannerQueueHealthCheck.cs | 0 .../ScannerQueueOptions.cs | 0 ...ScannerQueueServiceCollectionExtensions.cs | 0 .../StellaOps.Scanner.Queue.csproj | 42 +- .../StellaOps.Scanner.Queue/TASKS.md | 0 .../StellaOps.Scanner.Storage/AGENTS.md | 0 .../Catalog/ArtifactDocument.cs | 0 .../Catalog/CatalogIdFactory.cs | 0 .../Catalog/ImageDocument.cs | 0 .../Catalog/JobDocument.cs | 0 .../Catalog/LayerDocument.cs | 0 .../Catalog/LifecycleRuleDocument.cs | 0 .../Catalog/LinkDocument.cs | 0 .../Catalog/RuntimeEventDocument.cs | 178 +- .../Extensions/ServiceCollectionExtensions.cs | 0 .../EnsureLifecycleRuleTtlMigration.cs | 0 .../Migrations/IMongoMigration.cs | 0 .../Migrations/MongoMigrationDocument.cs | 0 .../Migrations/MongoMigrationRunner.cs | 0 .../Mongo/MongoBootstrapper.cs | 0 .../Mongo/MongoCollectionProvider.cs | 0 .../ObjectStore/IArtifactObjectStore.cs | 0 .../ObjectStore/RustFsArtifactObjectStore.cs | 0 .../ObjectStore/S3ArtifactObjectStore.cs | 0 .../Repositories/ArtifactRepository.cs | 0 .../Repositories/ImageRepository.cs | 0 .../Repositories/JobRepository.cs | 0 .../Repositories/LayerRepository.cs | 0 .../Repositories/LifecycleRuleRepository.cs | 0 .../Repositories/LinkRepository.cs | 0 .../Repositories/RuntimeEventRepository.cs | 264 +- .../ScannerStorageDefaults.cs | 0 .../ScannerStorageOptions.cs | 0 .../Services/ArtifactStorageService.cs | 0 .../StellaOps.Scanner.Storage.csproj | 36 +- .../StellaOps.Scanner.Storage/TASKS.md | 0 .../Fixtures/lang/go/basic/app | Bin .../Fixtures/lang/go/basic/expected.json | 234 +- .../Fixtures/lang/go/dwarf-only/app | Bin .../Fixtures/lang/go/dwarf-only/expected.json | 158 +- .../Fixtures/lang/go/stripped/app | 0 .../Fixtures/lang/go/stripped/expected.json | 60 +- .../Go/GoLanguageAnalyzerTests.cs | 268 +- ...Ops.Scanner.Analyzers.Lang.Go.Tests.csproj | 9 +- .../Fixtures/java/basic/expected.json | 70 +- .../Java/JavaClassPathBuilderTests.cs | 344 +- .../Java/JavaLanguageAnalyzerTests.cs | 66 +- .../Java/JavaReflectionAnalyzerTests.cs | 204 +- .../Java/JavaServiceProviderScannerTests.cs | 294 +- .../Java/JavaWorkspaceNormalizerTests.cs | 186 +- ...s.Scanner.Analyzers.Lang.Java.Tests.csproj | 9 +- .../lang/node/workspaces/expected.json | 268 +- .../lang/node/workspaces/package-lock.json | 98 +- .../lang/node/workspaces/package.json | 20 +- .../node/workspaces/packages/app/package.json | 22 +- .../workspaces/packages/app/scripts/setup.js | 2 +- .../node/workspaces/packages/lib/package.json | 14 +- .../workspaces/packages/shared/package.json | 14 +- .../Node/NodeLanguageAnalyzerTests.cs | 54 +- ...s.Scanner.Analyzers.Lang.Node.Tests.csproj | 9 +- .../python/layered-editable/expected.json | 0 .../layered-2.0.dist-info/INSTALLER | 0 .../layered-2.0.dist-info/METADATA | 0 .../layered-2.0.dist-info/RECORD | 0 .../site-packages/layered-2.0.dist-info/WHEEL | 0 .../layered-2.0.dist-info/entry_points.txt | 0 .../site-packages/layered/__init__.py | 0 .../python3.11/site-packages/layered/cli.py | 0 .../python3.11/site-packages/layered/core.py | 0 .../usr/lib/python3.11/site-packages/LICENSE | 0 .../layered-2.0.dist-info/INSTALLER | 0 .../layered-2.0.dist-info/METADATA | 0 .../layered-2.0.dist-info/RECORD | 0 .../site-packages/layered-2.0.dist-info/WHEEL | 0 .../layered-2.0.dist-info/direct_url.json | 0 .../layered-2.0.dist-info/entry_points.txt | 0 .../site-packages/layered/plugins/__init__.py | 0 .../site-packages/layered/plugins/plugin.py | 0 .../lang/python/pip-cache/expected.json | 0 .../cache_pkg-1.2.3.data/scripts/cache-tool | 0 .../cache_pkg-1.2.3.dist-info/INSTALLER | 0 .../cache_pkg-1.2.3.dist-info/METADATA | 0 .../cache_pkg-1.2.3.dist-info/RECORD | 0 .../cache_pkg-1.2.3.dist-info/WHEEL | 0 .../entry_points.txt | 0 .../site-packages/cache_pkg/LICENSE | 0 .../site-packages/cache_pkg/__init__.py | 0 .../site-packages/cache_pkg/data/config.json | 0 .../site-packages/cache_pkg/md5only.txt | 0 .../lang/python/simple-venv/expected.json | 0 .../simple-1.0.0.dist-info/INSTALLER | 0 .../simple-1.0.0.dist-info/METADATA | 0 .../simple-1.0.0.dist-info/RECORD | 0 .../simple-1.0.0.dist-info/WHEEL | 0 .../simple-1.0.0.dist-info/direct_url.json | 0 .../simple-1.0.0.dist-info/entry_points.txt | 0 .../site-packages/simple/__init__.py | 0 .../site-packages/simple/__main__.py | 0 .../python3.11/site-packages/simple/core.py | 0 .../Python/PythonLanguageAnalyzerTests.cs | 48 +- ...Scanner.Analyzers.Lang.Python.Tests.csproj | 9 +- .../Core/LanguageAnalyzerResultTests.cs | 0 .../Core/LanguageComponentMapperTests.cs | 0 .../LanguageAnalyzerHarnessTests.cs | 0 .../DotNet/DotNetLanguageAnalyzerTests.cs | 0 .../Fixtures/determinism/basic/expected.json | 0 .../determinism/basic/input/placeholder.txt | 0 .../Fixtures/lang/dotnet/multi/AppA.deps.json | 168 +- .../lang/dotnet/multi/AppA.runtimeconfig.json | 78 +- .../Fixtures/lang/dotnet/multi/AppB.deps.json | 152 +- .../lang/dotnet/multi/AppB.runtimeconfig.json | 76 +- .../Fixtures/lang/dotnet/multi/expected.json | 238 +- .../stellaops.logging/2.5.1/LICENSE.txt | 30 +- .../2.5.1/stellaops.logging.nuspec | 24 +- .../stellaops.toolkit/1.2.3/LICENSE.txt | 14 +- .../1.2.3/stellaops.toolkit.nuspec | 22 +- .../lang/dotnet/selfcontained/MyApp.deps.json | 0 .../selfcontained/MyApp.runtimeconfig.json | 0 .../lang/dotnet/selfcontained/expected.json | 186 +- .../stellaops.runtime.selfcontained.nuspec | 22 +- .../stellaops.toolkit/1.2.3/LICENSE.txt | 12 +- .../1.2.3/stellaops.toolkit.nuspec | 22 +- .../linux-x64/native/libstellaopsnative.so | 0 .../lang/dotnet/signed/Signed.App.deps.json | 0 .../signed/Signed.App.runtimeconfig.json | 0 .../Fixtures/lang/dotnet/signed/expected.json | 78 +- .../9.0.0/microsoft.extensions.logging.nuspec | 22 +- .../lang/dotnet/simple/Sample.App.deps.json | 146 +- .../simple/Sample.App.runtimeconfig.json | 70 +- .../Fixtures/lang/dotnet/simple/expected.json | 172 +- .../9.0.0/microsoft.extensions.logging.nuspec | 22 +- .../stellaops.toolkit/1.2.3/LICENSE.txt | 14 +- .../1.2.3/stellaops.toolkit.nuspec | 22 +- .../Fixtures/lang/rust/simple/Cargo.lock | 0 .../Fixtures/lang/rust/simple/expected.json | 122 +- .../bin-my_app-1234567890abcdef.json | 0 .../libserde-abcdef1234567890.json | 0 .../Harness/LanguageAnalyzerTestHarness.cs | 0 .../Rust/RustLanguageAnalyzerTests.cs | 0 ...llaOps.Scanner.Analyzers.Lang.Tests.csproj | 13 +- .../TestUtilities/JavaClassFileFactory.cs | 404 +- .../TestUtilities/JavaFixtureBuilder.cs | 0 .../TestUtilities/TestPaths.cs | 0 .../xunit.runner.json | 3 + .../Fixtures/apk/lib/apk/db/installed | 0 .../dpkg/var/lib/dpkg/info/bash.conffiles | 0 .../Fixtures/dpkg/var/lib/dpkg/info/bash.list | 0 .../dpkg/var/lib/dpkg/info/bash.md5sums | 0 .../Fixtures/dpkg/var/lib/dpkg/status | 0 .../Fixtures/goldens/apk.json | 0 .../Fixtures/goldens/dpkg.json | 0 .../Fixtures/goldens/rpm.json | 0 .../Mapping/OsComponentMapperTests.cs | 0 .../OsAnalyzerDeterminismTests.cs | 0 ...tellaOps.Scanner.Analyzers.OS.Tests.csproj | 11 +- .../TestUtilities/FixtureManager.cs | 0 .../TestUtilities/GoldenAssert.cs | 0 .../TestUtilities/SnapshotSerializer.cs | 0 .../LayerCacheRoundTripTests.cs | 0 .../StellaOps.Scanner.Cache.Tests.csproj | 5 +- .../Contracts/ComponentGraphBuilderTests.cs | 0 .../Contracts/ComponentModelsTests.cs | 0 .../Contracts/ScanJobTests.cs | 0 .../Contracts/ScannerCoreContractsTests.cs | 0 .../Fixtures/scan-job.json | 0 .../Fixtures/scan-progress-event.json | 0 .../Fixtures/scanner-error.json | 0 .../ScannerLogExtensionsPerformanceTests.cs | 0 .../ScannerLogExtensionsTests.cs | 0 .../Security/AuthorityTokenSourceTests.cs | 190 +- .../Security/DpopProofValidatorTests.cs | 0 .../Security/RestartOnlyPluginGuardTests.cs | 0 .../StellaOps.Scanner.Core.Tests.csproj | 16 + .../Utility/ScannerIdentifiersTests.cs | 0 .../Utility/ScannerTimestampsTests.cs | 0 .../ComponentDifferTests.cs | 0 .../StellaOps.Scanner.Diff.Tests.csproj | 23 +- .../Composition/CycloneDxComposerTests.cs | 0 .../ScanAnalysisCompositionBuilderTests.cs | 0 .../Index/BomIndexBuilderTests.cs | 0 .../ScannerArtifactPackageBuilderTests.cs | 0 .../StellaOps.Scanner.Emit.Tests.csproj | 23 +- .../EntryTraceAnalyzerTests.cs | 0 .../EntryTraceImageContextFactoryTests.cs | 172 +- .../LayeredRootFileSystemTests.cs | 352 +- .../ShellParserTests.cs | 0 .../StellaOps.Scanner.EntryTrace.Tests.csproj | 27 +- .../TestRootFileSystem.cs | 0 .../QueueLeaseIntegrationTests.cs | 0 .../StellaOps.Scanner.Queue.Tests.csproj | 29 +- .../Attestation/AttestorClientTests.cs | 0 .../Cas/LocalCasClientTests.cs | 0 .../Descriptor/DescriptorGeneratorTests.cs | 0 .../Descriptor/DescriptorGoldenTests.cs | 0 .../Fixtures/descriptor.baseline.json | 0 .../BuildxPluginManifestLoaderTests.cs | 0 ...s.Scanner.Sbomer.BuildXPlugin.Tests.csproj | 19 +- .../TestUtilities/TempDirectory.cs | 0 .../InMemoryArtifactObjectStore.cs | 0 .../RustFsArtifactObjectStoreTests.cs | 0 .../ScannerMongoFixture.cs | 0 .../StellaOps.Scanner.Storage.Tests.csproj | 21 +- .../StorageDualWriteFixture.cs | 0 .../AuthorizationTests.cs | 0 .../HealthEndpointsTests.cs | 0 ...PlatformEventPublisherRegistrationTests.cs | 0 .../PlatformEventSamplesTests.cs | 0 .../PolicyEndpointsTests.cs | 0 .../ReportEventDispatcherTests.cs | 0 .../ReportSamplesTests.cs | 0 .../ReportsEndpointsTests.cs | 0 .../RuntimeEndpointsTests.cs | 726 +- .../ScannerApplicationFactory.cs | 0 .../ScansEndpointsTests.cs | 0 .../StellaOps.Scanner.WebService.Tests.csproj | 21 +- .../CompositeScanAnalyzerDispatcherTests.cs | 346 +- .../EntryTraceExecutionServiceTests.cs | 358 +- .../LeaseHeartbeatServiceTests.cs | 0 .../RedisWorkerSmokeTests.cs | 0 .../ScannerWorkerOptionsValidatorTests.cs | 0 .../StellaOps.Scanner.Worker.Tests.csproj | 27 +- .../StaticOptionsMonitor.cs | 0 .../WorkerBasicScanScenarioTests.cs | 0 .../StellaOps.Scheduler.WebService/AGENTS.md | 0 .../Auth/AnonymousAuthenticationHandler.cs | 52 +- .../Auth/ClaimsTenantContextAccessor.cs | 54 +- .../Auth/HeaderScopeAuthorizer.cs | 62 +- .../Auth/HeaderTenantContextAccessor.cs | 48 +- .../Auth/IScopeAuthorizer.cs | 16 +- .../Auth/ITenantContextAccessor.cs | 20 +- .../Auth/TokenScopeAuthorizer.cs | 122 +- .../EventWebhookEndpointExtensions.cs | 346 +- .../EventWebhooks/IInboundExportEventSink.cs | 22 +- .../EventWebhooks/IWebhookRateLimiter.cs | 16 +- .../IWebhookRequestAuthenticator.cs | 214 +- .../InMemoryWebhookRateLimiter.cs | 126 +- .../EventWebhooks/LoggingExportEventSink.cs | 66 +- .../EventWebhooks/WebhookPayloads.cs | 212 +- .../GraphJobs/CartographerWebhookClient.cs | 204 +- .../Events/GraphJobCompletedEvent.cs | 92 +- .../GraphJobs/Events/GraphJobEventFactory.cs | 86 +- .../GraphJobs/Events/GraphJobEventKinds.cs | 12 +- .../Events/GraphJobEventPublisher.cs | 82 +- .../GraphJobs/GraphBuildJobRequest.cs | 52 +- .../GraphJobCompletionNotification.cs | 26 +- .../GraphJobs/GraphJobCompletionRequest.cs | 60 +- .../GraphJobs/GraphJobEndpointExtensions.cs | 322 +- .../GraphJobs/GraphJobQuery.cs | 54 +- .../GraphJobs/GraphJobResponse.cs | 90 +- .../GraphJobs/GraphJobService.cs | 676 +- .../GraphJobs/GraphOverlayJobRequest.cs | 58 +- .../GraphJobs/ICartographerWebhookClient.cs | 12 +- .../GraphJobs/IGraphJobCompletionPublisher.cs | 12 +- .../GraphJobs/IGraphJobService.cs | 32 +- .../GraphJobs/IGraphJobStore.cs | 44 +- .../GraphJobs/InMemoryGraphJobStore.cs | 166 +- .../GraphJobs/MongoGraphJobStore.cs | 110 +- .../NullCartographerWebhookClient.cs | 34 +- .../NullGraphJobCompletionPublisher.cs | 34 +- .../GraphJobs/OverlayLagMetricsResponse.cs | 40 +- .../Hosting/SchedulerPluginHostFactory.cs | 152 +- .../ISystemClock.cs | 22 +- .../Options/SchedulerAuthorityOptions.cs | 142 +- .../Options/SchedulerCartographerOptions.cs | 38 +- .../Options/SchedulerEventsOptions.cs | 218 +- .../Options/SchedulerOptions.cs | 140 +- .../PolicyRuns/IPolicyRunService.cs | 24 +- .../PolicyRuns/InMemoryPolicyRunService.cs | 276 +- .../PolicyRuns/PolicyRunEndpointExtensions.cs | 394 +- .../PolicyRuns/PolicyRunQueryOptions.cs | 240 +- .../PolicyRuns/PolicyRunService.cs | 426 +- .../StellaOps.Scheduler.WebService/Program.cs | 404 +- .../Properties/AssemblyInfo.cs | 6 +- .../Runs/InMemoryRunRepository.cs | 260 +- .../Runs/RunContracts.cs | 80 +- .../Runs/RunEndpoints.cs | 838 +- .../SchedulerEndpointHelpers.cs | 254 +- .../Schedules/InMemorySchedulerServices.cs | 306 +- .../Schedules/ScheduleContracts.cs | 68 +- .../Schedules/ScheduleEndpoints.cs | 794 +- .../StellaOps.Scheduler.WebService.csproj | 16 + .../StellaOps.Scheduler.WebService/TASKS.md | 2 +- .../docs/SCHED-WEB-16-103-RUN-APIS.md | 366 +- .../docs/SCHED-WEB-16-104-WEBHOOKS.md | 116 +- .../docs/SCHED-WEB-20-001-POLICY-RUNS.md | 2 +- .../docs/SCHED-WEB-21-001-GRAPH-APIS.md | 274 +- .../Program.cs | 0 .../StellaOps.Scheduler.Worker.Host.csproj | 0 src/Scheduler/StellaOps.Scheduler.sln | 416 + .../StellaOps.Scheduler.ImpactIndex/AGENTS.md | 0 .../FixtureImpactIndex.cs | 1230 +- .../IImpactIndex.cs | 92 +- .../ImpactImageRecord.cs | 34 +- .../ImpactIndexServiceCollectionExtensions.cs | 52 +- .../ImpactIndexStubOptions.cs | 38 +- .../Ingestion/BomIndexReader.cs | 238 +- .../Ingestion/ImpactIndexIngestionRequest.cs | 56 +- .../REMOVAL_NOTE.md | 30 +- .../RoaringImpactIndex.cs | 962 +- .../StellaOps.Scheduler.ImpactIndex.csproj | 0 .../StellaOps.Scheduler.ImpactIndex/TASKS.md | 10 +- .../StellaOps.Scheduler.Models/AGENTS.md | 0 .../AssemblyInfo.cs | 6 +- .../StellaOps.Scheduler.Models/AuditRecord.cs | 0 .../CanonicalJsonSerializer.cs | 0 .../EnumConverters.cs | 0 .../StellaOps.Scheduler.Models/Enums.cs | 0 .../GraphBuildJob.cs | 264 +- .../GraphJobStateMachine.cs | 482 +- .../GraphOverlayJob.cs | 264 +- .../StellaOps.Scheduler.Models/ImpactSet.cs | 0 .../PolicyRunJob.cs | 370 +- .../PolicyRunModels.cs | 1860 +- .../StellaOps.Scheduler.Models/Run.cs | 0 .../RunReasonExtensions.cs | 0 .../RunStateMachine.cs | 0 .../RunStatsBuilder.cs | 0 .../StellaOps.Scheduler.Models/Schedule.cs | 0 .../SchedulerSchemaMigration.cs | 0 .../SchedulerSchemaMigrationResult.cs | 0 .../SchedulerSchemaVersions.cs | 0 .../StellaOps.Scheduler.Models/Selector.cs | 0 .../StellaOps.Scheduler.Models.csproj | 0 .../StellaOps.Scheduler.Models/TASKS.md | 14 +- .../StellaOps.Scheduler.Models/Validation.cs | 0 .../docs/SCHED-MODELS-16-103-DESIGN.md | 0 .../docs/SCHED-MODELS-20-001-POLICY-RUNS.md | 296 +- .../docs/SCHED-MODELS-21-001-GRAPH-JOBS.md | 214 +- .../StellaOps.Scheduler.Queue/AGENTS.md | 0 .../StellaOps.Scheduler.Queue/AssemblyInfo.cs | 0 .../ISchedulerQueueTransportDiagnostics.cs | 18 +- .../Nats/INatsSchedulerQueuePayload.cs | 52 +- .../Nats/NatsSchedulerPlannerQueue.cs | 132 +- .../Nats/NatsSchedulerQueueBase.cs | 1384 +- .../Nats/NatsSchedulerQueueLease.cs | 202 +- .../Nats/NatsSchedulerRunnerQueue.cs | 148 +- .../StellaOps.Scheduler.Queue/README.md | 0 .../Redis/IRedisSchedulerQueuePayload.cs | 0 .../Redis/RedisSchedulerPlannerQueue.cs | 0 .../Redis/RedisSchedulerQueueBase.cs | 0 .../Redis/RedisSchedulerQueueLease.cs | 0 .../Redis/RedisSchedulerRunnerQueue.cs | 0 .../SchedulerQueueContracts.cs | 0 .../SchedulerQueueFields.cs | 0 .../SchedulerQueueHealthCheck.cs | 144 +- .../SchedulerQueueMetrics.cs | 0 .../SchedulerQueueOptions.cs | 0 ...hedulerQueueServiceCollectionExtensions.cs | 0 .../SchedulerQueueTransportKind.cs | 0 .../StellaOps.Scheduler.Queue.csproj | 42 +- .../StellaOps.Scheduler.Queue/TASKS.md | 0 .../AGENTS.md | 0 .../Documents/RunSummaryDocument.cs | 176 +- .../Internal/SchedulerMongoContext.cs | 0 .../Internal/SchedulerMongoInitializer.cs | 0 .../SchedulerMongoInitializerHostedService.cs | 0 .../EnsureSchedulerCollectionsMigration.cs | 0 .../EnsureSchedulerIndexesMigration.cs | 0 .../Migrations/ISchedulerMongoMigration.cs | 0 .../SchedulerMongoMigrationRecord.cs | 0 .../SchedulerMongoMigrationRunner.cs | 0 .../Options/SchedulerMongoOptions.cs | 0 .../Projections/RunSummaryProjection.cs | 72 +- .../Properties/AssemblyInfo.cs | 0 .../README.md | 0 .../Repositories/AuditQueryOptions.cs | 64 +- .../Repositories/AuditRepository.cs | 198 +- .../Repositories/GraphJobRepository.cs | 400 +- .../Repositories/IAuditRepository.cs | 36 +- .../Repositories/IGraphJobRepository.cs | 64 +- .../Repositories/IImpactSnapshotRepository.cs | 44 +- .../Repositories/IPolicyRunJobRepository.cs | 96 +- .../Repositories/IRunRepository.cs | 70 +- .../Repositories/IRunSummaryRepository.cs | 38 +- .../Repositories/IScheduleRepository.cs | 64 +- .../Repositories/ImpactSnapshotRepository.cs | 188 +- .../Repositories/PolicyRunJobRepository.cs | 498 +- .../Repositories/RunQueryOptions.cs | 70 +- .../Repositories/RunRepository.cs | 352 +- .../Repositories/RunSummaryRepository.cs | 158 +- .../Repositories/ScheduleQueryOptions.cs | 44 +- .../Repositories/ScheduleRepository.cs | 360 +- .../AuditRecordDocumentMapper.cs | 46 +- .../BsonDocumentJsonExtensions.cs | 288 +- .../Serialization/GraphJobDocumentMapper.cs | 250 +- .../Serialization/ImpactSetDocumentMapper.cs | 114 +- .../PolicyRunJobDocumentMapper.cs | 46 +- .../Serialization/RunDocumentMapper.cs | 46 +- .../Serialization/ScheduleDocumentMapper.cs | 50 +- .../ServiceCollectionExtensions.cs | 0 .../Services/IRunSummaryService.cs | 40 +- .../Services/ISchedulerAuditService.cs | 20 +- .../Services/RunSummaryService.cs | 408 +- .../Services/SchedulerAuditEvent.cs | 36 +- .../Services/SchedulerAuditService.cs | 124 +- .../Sessions/ISchedulerMongoSessionFactory.cs | 36 +- .../Sessions/SchedulerMongoSessionFactory.cs | 64 +- .../Sessions/SchedulerMongoSessionOptions.cs | 38 +- .../StellaOps.Scheduler.Storage.Mongo.csproj | 38 +- .../TASKS.md | 0 .../StellaOps.Scheduler.Worker/AGENTS.md | 0 ...edulerWorkerServiceCollectionExtensions.cs | 204 +- .../Events/SchedulerEventPublisher.cs | 0 .../Execution/HttpScannerReportClient.cs | 0 .../Execution/RunnerBackgroundService.cs | 0 .../Execution/RunnerExecutionService.cs | 0 .../Execution/ScannerReportClient.cs | 0 .../HttpCartographerBuildClient.cs | 468 +- .../HttpCartographerOverlayClient.cs | 454 +- .../Cartographer/ICartographerBuildClient.cs | 34 +- .../ICartographerOverlayClient.cs | 32 +- .../Graph/GraphBuildBackgroundService.cs | 258 +- .../Graph/GraphBuildExecutionService.cs | 454 +- .../Graph/GraphOverlayBackgroundService.cs | 256 +- .../Graph/GraphOverlayExecutionService.cs | 416 +- .../Scheduler/HttpGraphJobCompletionClient.cs | 198 +- .../Scheduler/IGraphJobCompletionClient.cs | 42 +- .../StellaOps.Scheduler.Worker/ImpactShard.cs | 0 .../ImpactShardPlanner.cs | 0 .../ImpactTargetingService.cs | 0 .../Observability/SchedulerWorkerMetrics.cs | 472 +- .../Options/SchedulerWorkerOptions.cs | 1298 +- .../Planning/PlannerBackgroundService.cs | 0 .../Planning/PlannerExecutionResult.cs | 0 .../Planning/PlannerExecutionService.cs | 0 .../Planning/PlannerQueueDispatchService.cs | 0 ...PlannerQueueDispatcherBackgroundService.cs | 0 .../Policy/HttpPolicyRunClient.cs | 308 +- .../Policy/IPolicyRunClient.cs | 20 +- .../Policy/IPolicyRunTargetingService.cs | 20 +- .../PolicyRunDispatchBackgroundService.cs | 376 +- .../Policy/PolicyRunExecutionResult.cs | 66 +- .../Policy/PolicyRunExecutionService.cs | 496 +- .../Policy/PolicyRunSubmissionResult.cs | 56 +- .../Policy/PolicyRunTargetingResult.cs | 50 +- .../Policy/PolicyRunTargetingService.cs | 910 +- .../Properties/AssemblyInfo.cs | 0 .../StellaOps.Scheduler.Worker.csproj | 7 +- .../StellaOps.Scheduler.Worker/TASKS.md | 2 +- .../docs/SCHED-WORKER-16-201-PLANNER.md | 0 .../SCHED-WORKER-16-202-IMPACT-TARGETING.md | 0 .../docs/SCHED-WORKER-16-203-RUNNER.md | 0 .../docs/SCHED-WORKER-16-204-EVENTS.md | 0 .../docs/SCHED-WORKER-16-205-OBSERVABILITY.md | 0 .../docs/SCHED-WORKER-20-301-POLICY-RUNS.md | 78 +- ...ED-WORKER-20-302-POLICY-DELTA-TARGETING.md | 154 +- .../docs/SCHED-WORKER-21-201-GRAPH-BUILD.md | 2 +- .../docs/SCHED-WORKER-21-202-GRAPH-OVERLAY.md | 2 +- .../FixtureImpactIndexTests.cs | 284 +- .../RoaringImpactIndexTests.cs | 390 +- ...ellaOps.Scheduler.ImpactIndex.Tests.csproj | 9 +- .../AuditRecordTests.cs | 0 .../GraphJobStateMachineTests.cs | 342 +- .../ImpactSetTests.cs | 0 .../PolicyRunModelsTests.cs | 166 +- .../RescanDeltaEventSampleTests.cs | 0 .../RunStateMachineTests.cs | 0 .../RunValidationTests.cs | 0 .../SamplePayloadTests.cs | 0 .../ScheduleSerializationTests.cs | 0 .../SchedulerSchemaMigrationTests.cs | 0 .../StellaOps.Scheduler.Models.Tests.csproj | 37 +- .../PlannerAndRunnerMessageTests.cs | 0 .../RedisSchedulerQueueTests.cs | 0 ...erQueueServiceCollectionExtensionsTests.cs | 230 +- .../StellaOps.Scheduler.Queue.Tests.csproj | 7 +- .../GlobalUsings.cs | 0 .../SchedulerMongoRoundTripTests.cs | 0 .../SchedulerMongoMigrationTests.cs | 0 .../Repositories/AuditRepositoryTests.cs | 120 +- .../ImpactSnapshotRepositoryTests.cs | 82 +- .../Repositories/RunRepositoryTests.cs | 152 +- .../Repositories/ScheduleRepositoryTests.cs | 148 +- .../SchedulerMongoTestHarness.cs | 72 +- .../Services/RunSummaryServiceTests.cs | 232 +- .../Services/SchedulerAuditServiceTests.cs | 164 +- .../SchedulerMongoSessionFactoryTests.cs | 70 +- ...laOps.Scheduler.Storage.Mongo.Tests.csproj | 7 +- .../TestDataFactory.cs | 196 +- .../CartographerWebhookClientTests.cs | 280 +- .../EventWebhookEndpointTests.cs | 256 +- .../GlobalUsings.cs | 12 +- .../GraphJobEndpointTests.cs | 220 +- .../GraphJobEventPublisherTests.cs | 302 +- .../PolicyRunEndpointTests.cs | 142 +- .../RunEndpointTests.cs | 208 +- .../ScheduleEndpointTests.cs | 176 +- .../SchedulerPluginHostFactoryTests.cs | 146 +- .../SchedulerWebApplicationFactory.cs | 92 +- ...tellaOps.Scheduler.WebService.Tests.csproj | 5 +- .../GlobalUsings.cs | 10 +- .../GraphBuildExecutionServiceTests.cs | 486 +- .../GraphOverlayExecutionServiceTests.cs | 474 +- .../HttpScannerReportClientTests.cs | 0 .../ImpactShardPlannerTests.cs | 0 .../ImpactTargetingServiceTests.cs | 0 .../PlannerBackgroundServiceTests.cs | 822 +- .../PlannerExecutionServiceTests.cs | 0 .../PlannerQueueDispatchServiceTests.cs | 0 .../PolicyRunExecutionServiceTests.cs | 656 +- .../PolicyRunTargetingServiceTests.cs | 510 +- .../RunnerExecutionServiceTests.cs | 0 .../SchedulerEventPublisherTests.cs | 0 .../StellaOps.Scheduler.Worker.Tests.csproj | 11 +- .../StellaOps.Sdk.Generator/AGENTS.md | 30 +- .../StellaOps.Sdk.Generator/TASKS.md | 42 +- src/{ => Sdk}/StellaOps.Sdk.Release/AGENTS.md | 30 +- src/{ => Sdk}/StellaOps.Sdk.Release/TASKS.md | 26 +- src/Signals/StellaOps.Signals.sln | 118 + src/{ => Signals}/StellaOps.Signals/AGENTS.md | 22 +- .../AnonymousAuthenticationHandler.cs | 58 +- .../Authentication/HeaderScopeAuthorizer.cs | 122 +- .../Authentication/TokenScopeAuthorizer.cs | 82 +- .../Hosting/SignalsStartupState.cs | 24 +- .../Models/CallgraphArtifactMetadata.cs | 42 +- .../Models/CallgraphDocument.cs | 82 +- .../StellaOps.Signals/Models/CallgraphEdge.cs | 18 +- .../Models/CallgraphIngestRequest.cs | 32 +- .../Models/CallgraphIngestResponse.cs | 18 +- .../StellaOps.Signals/Models/CallgraphNode.cs | 24 +- .../Options/SignalsArtifactStorageOptions.cs | 52 +- .../Options/SignalsAuthorityOptions.cs | 202 +- .../SignalsAuthorityOptionsConfigurator.cs | 76 +- .../Options/SignalsMongoOptions.cs | 90 +- .../Options/SignalsOptions.cs | 74 +- .../Parsing/CallgraphParseResult.cs | 24 +- .../CallgraphParserNotFoundException.cs | 34 +- .../CallgraphParserValidationException.cs | 28 +- .../Parsing/ICallgraphParser.cs | 42 +- .../Parsing/ICallgraphParserResolver.cs | 90 +- .../Parsing/SimpleJsonCallgraphParser.cs | 238 +- .../Persistence/ICallgraphRepository.cs | 26 +- .../Persistence/MongoCallgraphRepository.cs | 96 +- .../StellaOps.Signals/Program.cs | 626 +- .../Routing/SignalsPolicies.cs | 44 +- .../Services/CallgraphIngestionService.cs | 324 +- .../Services/ICallgraphIngestionService.cs | 32 +- .../StellaOps.Signals.csproj | 9 +- .../FileSystemCallgraphArtifactStore.cs | 120 +- .../Storage/ICallgraphArtifactStore.cs | 28 +- .../Models/CallgraphArtifactSaveRequest.cs | 24 +- .../Storage/Models/StoredCallgraphArtifact.cs | 20 +- src/{ => Signals}/StellaOps.Signals/TASKS.md | 26 +- src/Signer/StellaOps.Signer.sln | 182 + src/{ => Signer}/StellaOps.Signer/AGENTS.md | 42 +- .../SignerAbstractions.cs | 0 .../StellaOps.Signer.Core/SignerContracts.cs | 0 .../StellaOps.Signer.Core/SignerExceptions.cs | 0 .../StellaOps.Signer.Core/SignerPipeline.cs | 0 .../SignerStatementBuilder.cs | 0 .../StellaOps.Signer.Core.csproj | 0 .../Auditing/InMemorySignerAuditSink.cs | 0 .../Options/SignerCryptoOptions.cs | 0 .../Options/SignerEntitlementOptions.cs | 0 .../SignerReleaseVerificationOptions.cs | 0 .../InMemoryProofOfEntitlementIntrospector.cs | 0 .../Quotas/InMemoryQuotaService.cs | 0 .../DefaultReleaseIntegrityVerifier.cs | 0 .../ServiceCollectionExtensions.cs | 0 .../Signing/HmacDsseSigner.cs | 0 .../StellaOps.Signer.Infrastructure.csproj | 40 +- .../SignerEndpointsTests.cs | 254 +- .../StellaOps.Signer.Tests.csproj | 9 +- .../Contracts/SignDsseContracts.cs | 0 .../Endpoints/SignerEndpoints.cs | 0 .../StellaOps.Signer.WebService/Program.cs | 0 .../StubBearerAuthenticationDefaults.cs | 12 +- .../StubBearerAuthenticationHandler.cs | 110 +- .../StellaOps.Signer.WebService.csproj | 15 +- .../StellaOps.Signer/StellaOps.Signer.sln | 0 src/{ => Signer}/StellaOps.Signer/TASKS.md | 20 +- .../StellaOps.Aoc.Tests.csproj | 41 - .../StellaOps.Bench.ScannerAnalyzers.csproj | 23 - .../StellaOps.Cartographer.csproj | 17 - .../StellaOps.Cli.Tests.csproj | 29 - ...aOps.Concelier.Connector.Acsc.Tests.csproj | 19 - ...ps.Concelier.Connector.CertFr.Tests.csproj | 16 - ...ps.Concelier.Connector.CertIn.Tests.csproj | 16 - ...laOps.Concelier.Connector.Cve.Tests.csproj | 17 - ...elier.Connector.Distro.Debian.Tests.csproj | 13 - ...elier.Connector.Distro.RedHat.Tests.csproj | 18 - ...ncelier.Connector.Distro.Suse.Tests.csproj | 18 - ...elier.Connector.Distro.Ubuntu.Tests.csproj | 18 - ...aOps.Concelier.Connector.Ghsa.Tests.csproj | 17 - ....Concelier.Connector.Ics.Cisa.Tests.csproj | 16 - ...elier.Connector.Ics.Kaspersky.Tests.csproj | 16 - ...laOps.Concelier.Connector.Jvn.Tests.csproj | 16 - ...laOps.Concelier.Connector.Kev.Tests.csproj | 19 - ...laOps.Concelier.Connector.Nvd.Tests.csproj | 18 - ...laOps.Concelier.Connector.Osv.Tests.csproj | 18 - ...ps.Concelier.Connector.Ru.Bdu.Tests.csproj | 13 - ....Concelier.Connector.Ru.Nkcki.Tests.csproj | 13 - ...oncelier.Connector.Vndr.Adobe.Tests.csproj | 17 - ...oncelier.Connector.Vndr.Apple.Tests.csproj | 18 - ...elier.Connector.Vndr.Chromium.Tests.csproj | 18 - ...oncelier.Connector.Vndr.Cisco.Tests.csproj | 17 - ...Concelier.Connector.Vndr.Msrc.Tests.csproj | 24 - ...ncelier.Connector.Vndr.Oracle.Tests.csproj | 17 - ...ncelier.Connector.Vndr.Vmware.Tests.csproj | 18 - .../StellaOps.Concelier.Core.Tests.csproj | 12 - ...laOps.Concelier.Exporter.Json.Tests.csproj | 13 - ...ps.Concelier.Exporter.TrivyDb.Tests.csproj | 13 - .../StellaOps.Concelier.Merge.Tests.csproj | 13 - ...laOps.Concelier.Normalization.Tests.csproj | 11 - ...laOps.Concelier.Storage.Mongo.Tests.csproj | 15 - ...tellaOps.Concelier.WebService.Tests.csproj | 13 - .../StellaOps.Concelier.WebService.csproj | 37 - src/StellaOps.Concelier.sln | 1000 - .../StellaOps.Configuration.Tests.csproj | 11 - .../StellaOps.Excititor.Core.Tests.csproj | 15 - ...laOps.Excititor.Storage.Mongo.Tests.csproj | 15 - .../StellaOps.Excititor.WebService.csproj | 22 - .../StellaOps.Excititor.Worker.csproj | 24 - .../xunit.runner.json | 3 - .../TASKS.md | 2 - .../TASKS.md | 2 - .../TASKS.md | 2 - src/StellaOps.Notify.Models/TASKS.md | 2 - .../StellaOps.Notify.WebService.csproj | 27 - src/StellaOps.Notify.WebService/TASKS.md | 2 - .../xunit.runner.json | 3 - .../xunit.runner.json | 3 - .../appsettings.Development.json | 8 - .../appsettings.json | 9 - .../appsettings.Development.json | 8 - .../appsettings.json | 8 - .../StellaOps.Policy.Engine.csproj | 19 - .../StellaOps.Policy.Gateway.csproj | 22 - ...ellaOps.Registry.TokenService.Tests.csproj | 28 - .../xunit.runner.json | 3 - .../appsettings.Development.json | 8 - .../appsettings.json | 9 - .../xunit.runner.json | 3 - .../Program.cs | 41 - .../appsettings.Development.json | 8 - .../appsettings.json | 9 - .../appsettings.Development.json | 8 - .../appsettings.json | 8 - .../app/node_modules/left-pad/package.json | 5 - .../app/node_modules/lib/package.json | 5 - .../app/node_modules/shared/package.json | 5 - .../StellaOps.Scanner.Core.Tests.csproj | 15 - .../StellaOps.Scanner.WebService.csproj | 33 - .../StellaOps.Scheduler.WebService.csproj | 15 - .../xunit.runner.json | 3 - .../Program.cs | 41 - .../appsettings.Development.json | 8 - .../appsettings.json | 9 - .../appsettings.Development.json | 8 - .../xunit.runner.json | 3 - .../Program.cs | 41 - .../appsettings.Development.json | 8 - .../appsettings.json | 9 - .../appsettings.Development.json | 8 - .../appsettings.json | 8 - .../StellaOps.Zastava.Core.Tests.csproj | 14 - src/StellaOps.sln | 5292 +-- src/TaskRunner/StellaOps.TaskRunner.sln | 99 + .../StellaOps.TaskRunner/AGENTS.md | 34 +- .../Execution/IPackRunApprovalStore.cs | 20 +- .../Execution/IPackRunJobDispatcher.cs | 12 +- .../IPackRunNotificationPublisher.cs | 16 +- .../Execution/PackRunApprovalCoordinator.cs | 354 +- .../Execution/PackRunApprovalState.cs | 168 +- .../Execution/PackRunApprovalStatus.cs | 18 +- .../Execution/PackRunExecutionContext.cs | 44 +- .../Execution/PackRunProcessor.cs | 168 +- .../Execution/PackRunProcessorResult.cs | 10 +- .../Expressions/TaskPackExpressions.cs | 1192 +- .../Planning/TaskPackPlan.cs | 190 +- .../Planning/TaskPackPlanHasher.cs | 224 +- .../Planning/TaskPackPlanInsights.cs | 370 +- .../Planning/TaskPackPlanner.cs | 862 +- .../Serialization/CanonicalJson.cs | 136 +- .../StellaOps.TaskRunner.Core.csproj | 44 +- .../TaskPacks/TaskPackManifest.cs | 500 +- .../TaskPacks/TaskPackManifestLoader.cs | 336 +- .../TaskPacks/TaskPackManifestValidator.cs | 470 +- .../Execution/FilePackRunApprovalStore.cs | 236 +- .../Execution/FilesystemPackRunDispatcher.cs | 184 +- .../HttpPackRunNotificationPublisher.cs | 146 +- .../LoggingPackRunNotificationPublisher.cs | 68 +- .../Execution/NoopPackRunJobDispatcher.cs | 18 +- .../Execution/NotificationOptions.cs | 16 +- ...StellaOps.TaskRunner.Infrastructure.csproj | 50 +- .../PackRunApprovalCoordinatorTests.cs | 190 +- .../PackRunProcessorTests.cs | 170 +- .../StellaOps.TaskRunner.Tests.csproj | 270 +- .../TaskPackPlannerTests.cs | 354 +- .../TestManifests.cs | 330 +- .../xunit.runner.json | 3 + .../Program.cs | 41 + .../Properties/launchSettings.json | 46 +- .../StellaOps.TaskRunner.WebService.csproj | 82 +- .../StellaOps.TaskRunner.WebService.http | 12 +- .../appsettings.Development.json | 8 + .../appsettings.json | 9 + .../StellaOps.TaskRunner.Worker/Program.cs | 84 +- .../Properties/launchSettings.json | 24 +- .../Services/PackRunWorkerOptions.cs | 24 +- .../Services/PackRunWorkerService.cs | 98 +- .../StellaOps.TaskRunner.Worker.csproj | 86 +- .../appsettings.Development.json | 8 + .../appsettings.json | 36 +- .../StellaOps.TaskRunner.sln | 180 +- .../StellaOps.TaskRunner/TASKS.md | 102 +- .../StellaOps.Telemetry.Core/AGENTS.md | 42 +- .../StellaOps.Telemetry.Core/TASKS.md | 46 +- .../StellaOps.TimelineIndexer.sln | 99 + .../StellaOps.TimelineIndexer/AGENTS.md | 56 +- .../StellaOps.TimelineIndexer.Core/Class1.cs | 12 +- .../StellaOps.TimelineIndexer.Core.csproj | 36 +- .../Class1.cs | 12 +- ...aOps.TimelineIndexer.Infrastructure.csproj | 56 +- .../StellaOps.TimelineIndexer.Tests.csproj | 270 +- .../UnitTest1.cs | 20 +- .../xunit.runner.json | 3 + .../Program.cs | 41 + .../Properties/launchSettings.json | 46 +- ...tellaOps.TimelineIndexer.WebService.csproj | 82 +- .../StellaOps.TimelineIndexer.WebService.http | 12 +- .../appsettings.Development.json | 8 + .../appsettings.json | 9 + .../Program.cs | 14 +- .../Properties/launchSettings.json | 24 +- .../StellaOps.TimelineIndexer.Worker.csproj | 86 +- .../Worker.cs | 32 +- .../appsettings.Development.json | 8 + .../appsettings.json | 8 + .../StellaOps.TimelineIndexer.sln | 180 +- .../StellaOps.TimelineIndexer/TASKS.md | 28 +- src/{ => UI}/StellaOps.UI/TASKS.md | 190 +- src/{ => VexLens}/StellaOps.VexLens/AGENTS.md | 4 +- src/{ => VexLens}/StellaOps.VexLens/TASKS.md | 68 +- .../StellaOps.VulnExplorer.Api/AGENTS.md | 4 +- .../StellaOps.VulnExplorer.Api/TASKS.md | 28 +- src/{ => Web}/StellaOps.Web/.editorconfig | 0 src/{ => Web}/StellaOps.Web/.gitignore | 6 +- src/{ => Web}/StellaOps.Web/AGENTS.md | 48 +- src/{ => Web}/StellaOps.Web/README.md | 0 src/{ => Web}/StellaOps.Web/TASKS.md | 358 +- src/{ => Web}/StellaOps.Web/angular.json | 0 .../docs/DeterministicInstall.md | 2 +- .../StellaOps.Web/docs/TrivyDbSettings.md | 74 +- src/{ => Web}/StellaOps.Web/karma.conf.cjs | 126 +- src/{ => Web}/StellaOps.Web/package-lock.json | 27392 ++++++++-------- src/{ => Web}/StellaOps.Web/package.json | 0 .../StellaOps.Web/playwright.config.ts | 44 +- .../StellaOps.Web/scripts/chrome-path.js | 266 +- .../StellaOps.Web/scripts/verify-chromium.js | 48 +- .../StellaOps.Web/src/app/app.component.html | 92 +- .../StellaOps.Web/src/app/app.component.scss | 224 +- .../src/app/app.component.spec.ts | 70 +- .../StellaOps.Web/src/app/app.component.ts | 128 +- .../StellaOps.Web/src/app/app.config.ts | 162 +- .../StellaOps.Web/src/app/app.routes.ts | 96 +- .../app/core/api/authority-console.client.ts | 226 +- .../app/core/api/concelier-exporter.client.ts | 102 +- .../src/app/core/api/notify.client.ts | 284 +- .../src/app/core/api/notify.models.ts | 388 +- .../src/app/core/api/policy-preview.models.ts | 0 .../src/app/core/api/scanner.models.ts | 34 +- .../app/core/auth/auth-http.interceptor.ts | 342 +- .../src/app/core/auth/auth-session.model.ts | 112 +- .../app/core/auth/auth-session.store.spec.ts | 110 +- .../src/app/core/auth/auth-session.store.ts | 258 +- .../src/app/core/auth/auth-storage.service.ts | 90 +- .../app/core/auth/authority-auth.service.ts | 1244 +- .../src/app/core/auth/dpop/dpop-key-store.ts | 362 +- .../app/core/auth/dpop/dpop.service.spec.ts | 206 +- .../src/app/core/auth/dpop/dpop.service.ts | 296 +- .../src/app/core/auth/dpop/jose-utilities.ts | 246 +- .../src/app/core/auth/pkce.util.ts | 48 +- .../src/app/core/config/app-config.model.ts | 98 +- .../src/app/core/config/app-config.service.ts | 198 +- .../console/console-session.service.spec.ts | 278 +- .../core/console/console-session.service.ts | 322 +- .../console/console-session.store.spec.ts | 246 +- .../app/core/console/console-session.store.ts | 256 +- .../orchestrator/operator-context.service.ts | 70 +- .../operator-metadata.interceptor.ts | 82 +- .../features/auth/auth-callback.component.ts | 122 +- .../console/console-profile.component.html | 416 +- .../console/console-profile.component.scss | 440 +- .../console/console-profile.component.spec.ts | 220 +- .../console/console-profile.component.ts | 140 +- .../notify/notify-panel.component.html | 688 +- .../notify/notify-panel.component.scss | 772 +- .../notify/notify-panel.component.spec.ts | 132 +- .../features/notify/notify-panel.component.ts | 1284 +- .../scan-attestation-panel.component.html | 78 +- .../scan-attestation-panel.component.scss | 150 +- .../scan-attestation-panel.component.spec.ts | 110 +- .../scans/scan-attestation-panel.component.ts | 84 +- .../scans/scan-detail-page.component.html | 104 +- .../scans/scan-detail-page.component.scss | 158 +- .../scans/scan-detail-page.component.spec.ts | 100 +- .../scans/scan-detail-page.component.ts | 124 +- .../trivy-db-settings-page.component.html | 216 +- .../trivy-db-settings-page.component.scss | 460 +- .../trivy-db-settings-page.component.spec.ts | 188 +- .../trivy-db-settings-page.component.ts | 270 +- .../app/testing/mock-notify-api.service.ts | 580 +- .../src/app/testing/notify-fixtures.ts | 514 +- .../src/app/testing/policy-fixtures.spec.ts | 108 +- .../src/app/testing/policy-fixtures.ts | 46 +- .../src/app/testing/scan-fixtures.ts | 60 +- .../StellaOps.Web/src/assets/.gitkeep | 0 .../StellaOps.Web/src/config/config.json | 52 +- .../src/config/config.sample.json | 52 +- src/{ => Web}/StellaOps.Web/src/favicon.ico | Bin src/{ => Web}/StellaOps.Web/src/index.html | 0 src/{ => Web}/StellaOps.Web/src/main.ts | 0 src/{ => Web}/StellaOps.Web/src/styles.scss | 0 .../StellaOps.Web/test-results/.last-run.json | 6 +- .../StellaOps.Web/tests/e2e/auth.spec.ts | 158 +- src/{ => Web}/StellaOps.Web/tsconfig.app.json | 0 src/{ => Web}/StellaOps.Web/tsconfig.json | 0 .../StellaOps.Web/tsconfig.spec.json | 0 .../Backend/IRuntimePolicyClient.cs | 0 .../Backend/RuntimeEventsClient.cs | 0 .../Backend/RuntimePolicyClient.cs | 0 .../Backend/RuntimePolicyContracts.cs | 0 .../Backend/RuntimePolicyException.cs | 0 .../Configuration/ZastavaObserverOptions.cs | 190 +- .../ContainerRuntime/ContainerStateTracker.cs | 268 +- .../ContainerStateTrackerFactory.cs | 0 .../ContainerRuntime/Cri/CriConversions.cs | 134 +- .../ContainerRuntime/Cri/CriModels.cs | 58 +- .../ContainerRuntime/Cri/CriRuntimeClient.cs | 356 +- .../Cri/CriRuntimeClientFactory.cs | 52 +- .../ObserverServiceCollectionExtensions.cs | 0 .../Posture/IRuntimePostureCache.cs | 0 .../Posture/IRuntimePostureEvaluator.cs | 0 .../Posture/RuntimePostureCache.cs | 0 .../Posture/RuntimePostureCacheEntry.cs | 0 .../Posture/RuntimePostureEvaluationResult.cs | 0 .../Posture/RuntimePostureEvaluator.cs | 0 .../StellaOps.Zastava.Observer/Program.cs | 8 +- .../Properties/AssemblyInfo.cs | 0 .../Protos/runtime/v1/runtime.proto | 3710 +-- .../Runtime/ElfBuildIdReader.cs | 0 .../Runtime/RuntimeEventBuffer.cs | 0 .../Runtime/RuntimeProcessCollector.cs | 0 .../StellaOps.Zastava.Observer.csproj | 48 +- .../StellaOps.Zastava.Observer/TASKS.md | 0 .../Worker/BackoffCalculator.cs | 0 .../Worker/ContainerLifecycleHostedService.cs | 0 .../Worker/ContainerRuntimePoller.cs | 0 .../Worker/ObserverBootstrapService.cs | 102 +- .../Worker/RuntimeEventDispatchService.cs | 0 .../Worker/RuntimeEventFactory.cs | 0 .../Admission/AdmissionEndpoint.cs | 0 .../Admission/AdmissionRequestContext.cs | 0 .../Admission/AdmissionResponseBuilder.cs | 0 .../Admission/AdmissionReviewModels.cs | 0 .../Admission/AdmissionReviewParser.cs | 0 .../Admission/ImageDigestResolver.cs | 0 .../RuntimeAdmissionPolicyService.cs | 0 .../Admission/RuntimePolicyCache.cs | 0 .../Authority/AuthorityTokenProvider.cs | 102 +- .../Backend/IRuntimePolicyClient.cs | 18 +- .../Backend/RuntimePolicyClient.cs | 230 +- .../Backend/RuntimePolicyException.cs | 42 +- .../Backend/RuntimePolicyRequest.cs | 32 +- .../Backend/RuntimePolicyResponse.cs | 62 +- .../Certificates/CsrCertificateSource.cs | 0 .../IWebhookCertificateProvider.cs | 0 .../SecretFileCertificateSource.cs | 0 .../WebhookCertificateHealthCheck.cs | 0 .../Configuration/ZastavaWebhookOptions.cs | 0 .../ServiceCollectionExtensions.cs | 0 .../WebhookRuntimeOptionsPostConfigure.cs | 104 +- .../Hosting/StartupValidationHostedService.cs | 0 .../IMPLEMENTATION_PLAN.md | 0 .../StellaOps.Zastava.Webhook/Program.cs | 0 .../Properties/AssemblyInfo.cs | 6 +- .../StellaOps.Zastava.Webhook.csproj | 5 +- .../StellaOps.Zastava.Webhook/TASKS.md | 0 src/Zastava/StellaOps.Zastava.sln | 199 + .../Configuration/ZastavaAuthorityOptions.cs | 136 +- .../Configuration/ZastavaRuntimeOptions.cs | 168 +- .../Contracts/AdmissionDecision.cs | 0 .../Contracts/RuntimeEvent.cs | 0 .../Contracts/ZastavaContractVersions.cs | 0 .../ZastavaServiceCollectionExtensions.cs | 196 +- .../Diagnostics/ZastavaLogScopeBuilder.cs | 180 +- ...ZastavaLoggerFactoryOptionsConfigurator.cs | 60 +- .../Diagnostics/ZastavaRuntimeMetrics.cs | 156 +- .../StellaOps.Zastava.Core/GlobalUsings.cs | 0 .../Hashing/ZastavaHashing.cs | 0 .../Properties/AssemblyInfo.cs | 6 +- .../IZastavaAuthorityTokenProvider.cs | 28 +- .../Security/ZastavaAuthorityTokenProvider.cs | 628 +- .../Security/ZastavaOperationalToken.cs | 140 +- .../ZastavaCanonicalJsonSerializer.cs | 0 .../StellaOps.Zastava.Core.csproj | 7 +- .../StellaOps.Zastava.Core/TASKS.md | 0 .../Contracts/ZastavaContractVersionsTests.cs | 0 ...ZastavaServiceCollectionExtensionsTests.cs | 244 +- .../ZastavaAuthorityTokenProviderTests.cs | 456 +- .../ZastavaCanonicalJsonSerializerTests.cs | 390 +- .../StellaOps.Zastava.Core.Tests.csproj | 15 + .../ContainerRuntimePollerTests.cs | 0 .../Posture/RuntimePostureEvaluatorTests.cs | 0 .../Runtime/ElfBuildIdReaderTests.cs | 0 .../Runtime/RuntimeEventBufferTests.cs | 0 .../Runtime/RuntimeProcessCollectorTests.cs | 0 .../StellaOps.Zastava.Observer.Tests.csproj | 5 +- .../TestSupport/ElfTestFileBuilder.cs | 0 .../Worker/RuntimeEventFactoryTests.cs | 148 +- .../AdmissionResponseBuilderTests.cs | 0 .../Admission/AdmissionReviewParserTests.cs | 0 .../RuntimeAdmissionPolicyServiceTests.cs | 0 .../Backend/RuntimePolicyClientTests.cs | 396 +- .../SecretFileCertificateSourceTests.cs | 0 .../WebhookCertificateProviderTests.cs | 0 .../StellaOps.Zastava.Webhook.Tests.csproj | 5 +- .../Dpop/DpopNonceConsumeResult.cs | 0 .../Dpop/DpopNonceIssueResult.cs | 0 .../Dpop/DpopNonceUtilities.cs | 0 .../Dpop/DpopProofValidator.cs | 0 .../Dpop/DpopValidationOptions.cs | 0 .../Dpop/DpopValidationResult.cs | 0 .../Dpop/IDpopNonceStore.cs | 0 .../Dpop/IDpopProofValidator.cs | 0 .../Dpop/IDpopReplayCache.cs | 0 .../Dpop/InMemoryDpopNonceStore.cs | 0 .../Dpop/InMemoryDpopReplayCache.cs | 0 .../Dpop/RedisDpopNonceStore.cs | 0 .../StellaOps.Auth.Security/README.md | 0 .../StellaOps.Auth.Security.csproj | 76 +- .../AuthorityConfigurationDiagnostic.cs | 0 .../AuthorityPluginConfigurationAnalyzer.cs | 0 .../AuthorityPluginConfigurationLoader.cs | 0 .../AuthoritySigningAdditionalKeyOptions.cs | 0 .../AuthoritySigningOptions.cs | 0 .../StellaOps.Configuration.csproj | 7 +- .../StellaOpsAuthorityConfiguration.cs | 0 .../StellaOpsAuthorityOptions.cs | 0 .../StellaOpsBootstrapOptions.cs | 0 .../StellaOpsConfigurationBootstrapper.cs | 0 .../StellaOpsConfigurationContext.cs | 0 .../StellaOpsConfigurationOptions.cs | 0 .../StellaOpsOptionsBinder.cs | 0 .../CryptoProviderRegistryOptions.cs | 0 .../CryptoServiceCollectionExtensions.cs | 0 ...ps.Cryptography.DependencyInjection.csproj | 28 +- .../StellaOps.Cryptography.Kms/AGENTS.md | 28 +- .../StellaOps.Cryptography.Kms/TASKS.md | 26 +- ...CastleCryptoServiceCollectionExtensions.cs | 0 .../BouncyCastleEd25519CryptoProvider.cs | 0 ...ps.Cryptography.Plugin.BouncyCastle.csproj | 32 +- .../StellaOps.Cryptography/AGENTS.md | 44 +- .../Argon2idPasswordHasher.Konscious.cs | 0 .../Argon2idPasswordHasher.Sodium.cs | 0 .../Argon2idPasswordHasher.cs | 0 .../Audit/AuthEventRecord.cs | 536 +- .../StellaOps.Cryptography/CryptoProvider.cs | 0 .../CryptoProviderRegistry.cs | 0 .../CryptoSigningKey.cs | 0 .../DefaultCryptoProvider.cs | 0 .../StellaOps.Cryptography/EcdsaSigner.cs | 0 .../StellaOps.Cryptography/ICryptoSigner.cs | 0 .../LibsodiumCryptoProvider.cs | 0 .../PasswordHashAlgorithms.cs | 0 .../StellaOps.Cryptography/PasswordHashing.cs | 0 .../Pbkdf2PasswordHasher.cs | 0 .../SignatureAlgorithms.cs | 0 .../StellaOps.Cryptography.csproj | 32 +- .../StellaOps.Cryptography/TASKS.md | 0 .../IDependencyInjectionRoutine.cs | 0 .../ServiceBindingAttribute.cs | 0 .../StellaOps.DependencyInjection.csproj | 26 +- .../PluginDependencyInjectionExtensions.cs | 0 .../PluginServiceRegistration.cs | 0 .../StellaOpsPluginRegistration.cs | 0 .../Hosting/PluginAssembly.cs | 0 .../StellaOps.Plugin/Hosting/PluginHost.cs | 0 .../Hosting/PluginHostOptions.cs | 0 .../Hosting/PluginHostResult.cs | 0 .../Hosting/PluginLoadContext.cs | 0 .../Internal/ReflectionExtensions.cs | 0 .../StellaOps.Plugin/PluginContracts.cs | 0 .../Properties/AssemblyInfo.cs | 0 .../StellaOps.Plugin/StellaOps.Plugin.csproj | 3 +- .../StellaOps.Plugin/TASKS.md | 8 +- ...AuthorityPluginConfigurationLoaderTests.cs | 0 .../AuthorityTelemetryTests.cs | 0 .../StellaOps.Configuration.Tests.csproj | 12 + .../StellaOpsAuthorityOptionsTests.cs | 444 +- .../Argon2idPasswordHasherTests.cs | 0 .../Audit/AuthEventRecordTests.cs | 114 +- .../BouncyCastleEd25519CryptoProviderTests.cs | 0 .../CryptoProviderRegistryTests.cs | 0 .../DefaultCryptoProviderSigningTests.cs | 0 .../LibsodiumCryptoProviderTests.cs | 0 .../PasswordHashOptionsTests.cs | 0 .../Pbkdf2PasswordHasherTests.cs | 0 .../StellaOps.Cryptography.Tests.csproj | 33 +- ...luginDependencyInjectionExtensionsTests.cs | 0 .../PluginServiceRegistrationTests.cs | 0 .../StellaOps.Plugin.Tests.csproj | 7 +- .../CallgraphIngestionTests.cs | 276 +- .../SignalsApiTests.cs | 224 +- .../StellaOps.Signals.Tests.csproj | 5 +- .../TestInfrastructure/SignalsTestFactory.cs | 128 +- tmp/docenv/pyvenv.cfg | 10 +- tmp/reflect/Program.cs | 26 +- tmp/reflect/reflect.csproj | 28 +- tools/FixtureUpdater/FixtureUpdater.csproj | 40 +- .../LanguageAnalyzerSmoke.csproj | 36 +- tools/LanguageAnalyzerSmoke/Program.cs | 2 +- .../NotifySmokeCheck/NotifySmokeCheck.csproj | 24 +- tools/NotifySmokeCheck/Program.cs | 396 +- .../PolicyDslValidator.csproj | 28 +- tools/PolicyDslValidator/Program.cs | 112 +- .../PolicySchemaExporter.csproj | 42 +- tools/PolicySchemaExporter/Program.cs | 96 +- .../PolicySimulationSmoke.csproj | 28 +- tools/PolicySimulationSmoke/Program.cs | 582 +- 4103 files changed, 192899 insertions(+), 187024 deletions(-) delete mode 100644 SPRINTS.md delete mode 100644 SPRINTS_PRIOR_20251019.md delete mode 100644 SPRINTS_PRIOR_20251021.md delete mode 100644 SPRINTS_PRIOR_20251025.md rename AGENTS.md => docs/implplan/AGENTS.md (100%) rename EPIC_1.md => docs/implplan/EPIC_1.md (97%) rename EPIC_10.md => docs/implplan/EPIC_10.md (99%) rename EPIC_11.md => docs/implplan/EPIC_11.md (99%) rename EPIC_12.md => docs/implplan/EPIC_12.md (69%) rename EPIC_13.md => docs/implplan/EPIC_13.md (100%) rename EPIC_14.md => docs/implplan/EPIC_14.md (100%) rename EPIC_15.md => docs/implplan/EPIC_15.md (100%) rename EPIC_16.md => docs/implplan/EPIC_16.md (98%) rename EPIC_17.md => docs/implplan/EPIC_17.md (98%) rename EPIC_18.md => docs/implplan/EPIC_18.md (98%) rename EPIC_19.md => docs/implplan/EPIC_19.md (98%) rename EPIC_2.md => docs/implplan/EPIC_2.md (97%) rename EPIC_4.md => docs/implplan/EPIC_4.md (97%) rename EPIC_5.md => docs/implplan/EPIC_5.md (97%) rename EPIC_6.md => docs/implplan/EPIC_6.md (99%) rename EPIC_7.md => docs/implplan/EPIC_7.md (99%) rename EPIC_8.md => docs/implplan/EPIC_8.md (99%) rename EPIC_9.md => docs/implplan/EPIC_9.md (98%) rename EXECPLAN.md => docs/implplan/EXECPLAN.md (68%) create mode 100644 docs/implplan/SPRINTS.md create mode 100644 docs/implplan/SPRINTS_PRIOR_20251019.md create mode 100644 docs/implplan/SPRINTS_PRIOR_20251021.md create mode 100644 docs/implplan/SPRINTS_PRIOR_20251025.md rename SPRINTS_PRIOR_20251027.md => docs/implplan/SPRINTS_PRIOR_20251027.md (57%) create mode 100644 docs/implplan/SPRINTS_PRIOR_20251028.md rename src/{ => AdvisoryAI}/StellaOps.AdvisoryAI/AGENTS.md (90%) rename src/{ => AdvisoryAI}/StellaOps.AdvisoryAI/TASKS.md (99%) rename src/{ => AirGap}/StellaOps.AirGap.Controller/AGENTS.md (98%) rename src/{ => AirGap}/StellaOps.AirGap.Controller/TASKS.md (99%) rename src/{ => AirGap}/StellaOps.AirGap.Importer/AGENTS.md (98%) rename src/{ => AirGap}/StellaOps.AirGap.Importer/TASKS.md (99%) rename src/{ => AirGap}/StellaOps.AirGap.Policy/AGENTS.md (98%) rename src/{ => AirGap}/StellaOps.AirGap.Policy/TASKS.md (99%) rename src/{ => AirGap}/StellaOps.AirGap.Time/AGENTS.md (97%) rename src/{ => AirGap}/StellaOps.AirGap.Time/TASKS.md (99%) create mode 100644 src/Aoc/StellaOps.Aoc.sln rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/AocForbiddenKeys.cs (96%) rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/AocGuardException.cs (96%) rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/AocGuardExtensions.cs (95%) rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/AocGuardOptions.cs (96%) rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/AocGuardResult.cs (97%) rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/AocViolation.cs (97%) rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/AocViolationCode.cs (96%) rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/AocWriteGuard.cs (97%) rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/ServiceCollectionExtensions.cs (96%) rename src/{ => Aoc/__Libraries}/StellaOps.Aoc/StellaOps.Aoc.csproj (97%) rename src/{ => Aoc/__Tests}/StellaOps.Aoc.Tests/AocWriteGuardTests.cs (96%) create mode 100644 src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj rename src/{ => Aoc/__Tests}/StellaOps.Aoc.Tests/UnitTest1.cs (91%) rename src/{StellaOps.Cli.Tests => Aoc/__Tests/StellaOps.Aoc.Tests}/xunit.runner.json (100%) rename src/{ => Api}/StellaOps.Api.Governance/AGENTS.md (97%) rename src/{ => Api}/StellaOps.Api.Governance/TASKS.md (92%) rename src/{ => Api}/StellaOps.Api.OpenApi/AGENTS.md (78%) rename src/{ => Api}/StellaOps.Api.OpenApi/TASKS.md (99%) rename src/{ => Api}/StellaOps.Api.OpenApi/authority/openapi.yaml (97%) rename src/{ => Attestor}/StellaOps.Attestor.Envelope/AGENTS.md (98%) rename src/{ => Attestor}/StellaOps.Attestor.Envelope/TASKS.md (99%) rename src/{ => Attestor}/StellaOps.Attestor.Types/AGENTS.md (97%) rename src/{ => Attestor}/StellaOps.Attestor.Types/TASKS.md (99%) rename src/{ => Attestor}/StellaOps.Attestor.Verify/AGENTS.md (98%) rename src/{ => Attestor}/StellaOps.Attestor.Verify/TASKS.md (99%) create mode 100644 src/Attestor/StellaOps.Attestor.sln rename src/{ => Attestor}/StellaOps.Attestor/AGENTS.md (78%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Audit/AttestorAuditRecord.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/IRekorClient.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorBackend.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorProofResponse.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorSubmissionResponse.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorArchiveBundle.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorAuditSink.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorDedupeStore.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionRequest.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidationResult.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorValidationException.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IAttestorSubmissionService.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IDsseCanonicalizer.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/SubmissionContext.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationException.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationService.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Properties/AssemblyInfo.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/HttpRekorClient.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/StubRekorClient.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj (98%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/InMemoryAttestorDedupeStore.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/RedisAttestorDedupeStore.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/DefaultDsseCanonicalizer.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpRekorClientTests.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj (80%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs (100%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj (61%) rename src/{ => Attestor}/StellaOps.Attestor/StellaOps.Attestor.sln (100%) rename src/{ => Attestor}/StellaOps.Attestor/TASKS.md (100%) create mode 100644 src/Authority/StellaOps.Authority.sln rename src/{ => Authority}/StellaOps.Authority/AGENTS.md (65%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/NetworkMaskMatcherTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOps.Auth.Abstractions.Tests.csproj (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsPrincipalBuilderTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsProblemResultFactoryTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/AuthorityTelemetry.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMask.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMaskMatcher.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/README.NuGet.md (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsAuthenticationDefaults.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsPrincipalBuilder.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsProblemResultFactory.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsServiceIdentities.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsTenancyDefaults.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client.Tests/ServiceCollectionExtensionsTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOps.Auth.Client.Tests.csproj (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsAuthClientOptionsTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsDiscoveryCacheTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsTokenClientTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client.Tests/TokenCacheTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/FileTokenCache.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenCache.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenClient.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/InMemoryTokenCache.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/README.NuGet.md (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/ServiceCollectionExtensions.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj (91%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsAuthClientOptions.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsDiscoveryCache.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsJwksCache.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenCacheEntry.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenClient.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenResult.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/ServiceCollectionExtensionsTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOps.Auth.ServerIntegration.Tests.csproj (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerOptionsTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration/README.NuGet.md (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration/ServiceCollectionExtensions.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj (86%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorityConfigurationManager.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorizationPolicyBuilderExtensions.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsBypassEvaluator.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerOptions.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeRequirement.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/Security/CryptoPasswordHasherTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginOptionsTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardUserCredentialStoreTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/AGENTS.md (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Bootstrap/StandardPluginBootstrapper.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Properties/AssemblyInfo.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Security/IPasswordHasher.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardClaimsEnricher.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardIdentityProviderPlugin.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginOptions.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj (71%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardClientProvisioningStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserCredentialStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserDocument.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md (99%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityClientRegistrationTests.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityCredentialVerificationResultTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityIdentityProviderCapabilitiesTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginHealthResultTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginOperationResultTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserDescriptorTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserRegistrationTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/StellaOps.Authority.Plugins.Abstractions.Tests.csproj (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginContracts.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginRegistrationContext.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthoritySecretHasher.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/IdentityProviderContracts.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/StellaOps.Authority.Plugins.Abstractions.csproj (86%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Class1.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityBootstrapInviteDocument.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientCertificateBinding.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientDocument.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityLoginAttemptDocument.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationDocument.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationExportStateDocument.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityScopeDocument.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityUserDocument.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityBootstrapInviteCollectionInitializer.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityClientCollectionInitializer.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityLoginAttemptCollectionInitializer.cs (98%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityMongoInitializer.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityRevocationCollectionInitializer.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityScopeCollectionInitializer.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityTokenCollectionInitializer.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityUserCollectionInitializer.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/IAuthorityCollectionInitializer.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/AuthorityMongoMigrationRunner.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/EnsureAuthorityCollectionsMigration.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/IAuthorityMongoMigration.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Options/AuthorityMongoOptions.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Sessions/AuthorityMongoSessionAccessor.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj (78%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityClientStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityLoginAttemptStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationExportStateStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityScopeStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityUserStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityClientStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityLoginAttemptStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationExportStateStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityScopeStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityUserStore.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderRegistryTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderSelectorTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs (98%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/Plugins/AuthorityPluginLoaderTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterIntegrationTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataAccessorTests.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataMiddlewareTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/Signing/AuthoritySigningKeyManagerTests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj (73%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.Tests/TestEnvironment.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority.sln (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Audit/AuthorityAuditSink.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/AuthorityHttpHeaders.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/AuthorityIdentityProviderRegistry.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/AuthorityPluginRegistry.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/AuthorityRateLimiter.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/AuthorityTelemetryConfiguration.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapApiKeyFilter.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapInviteCleanupService.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapRequests.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Console/TenantHeaderFilter.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityIdentityProviderSelector.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityOpenIddictConstants.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/DpopHandlers.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkRequest.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkResponse.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkService.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginLoader.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginRegistrationSummary.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Program.Partial.cs (91%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Program.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Properties/AssemblyInfo.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Properties/launchSettings.json (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterFeature.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataAccessor.cs (96%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataMiddleware.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterPartitionKeyResolver.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimitingApplicationBuilderExtensions.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Revocation/AuthorityRevocationExportService.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuildResult.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuilder.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleModel.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSignature.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSigner.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationEntryModel.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportPackage.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportResponse.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidationResult.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidator.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Security/AuthoritySenderConstraintKinds.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Security/IAuthorityClientCertificateValidator.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Signing/AuthorityJwksService.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyManager.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyRequest.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyStatus.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Signing/FileAuthoritySigningKeySource.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Signing/IAuthoritySigningKeySource.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Signing/SigningRotationRequest.cs (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj (80%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/Tenants/AuthorityTenantCatalog.cs (97%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/appsettings.Development.json (100%) rename src/{ => Authority}/StellaOps.Authority/StellaOps.Authority/appsettings.json (100%) rename src/{ => Authority}/StellaOps.Authority/TASKS.md (98%) create mode 100644 src/Bench/StellaOps.Bench.sln rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/README.md (87%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BaselineLoaderTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BenchmarkScenarioReportTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/StellaOps.Bench.LinkNotMerge.Vex.Tests.csproj (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/VexScenarioRunnerTests.cs (96%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineEntry.cs (96%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineLoader.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Program.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Properties/AssemblyInfo.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkJsonWriter.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkScenarioReport.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/PrometheusWriter.cs (98%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Statistics.cs (96%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexLinksetAggregator.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexObservationGenerator.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioConfig.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioExecutionResult.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioResult.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioRunner.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/baseline.csv (99%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge.Vex/config.json (96%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/README.md (88%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BaselineLoaderTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BenchmarkScenarioReportTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/LinkNotMergeScenarioRunnerTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/StellaOps.Bench.LinkNotMerge.Tests.csproj (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineEntry.cs (96%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineLoader.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/BenchmarkConfig.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinkNotMergeScenarioRunner.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinksetAggregator.cs (96%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ObservationData.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Program.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Properties/AssemblyInfo.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkJsonWriter.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkScenarioReport.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/PrometheusWriter.cs (98%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioExecutionResult.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioResult.cs (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioStatistics.cs (96%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj (97%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/baseline.csv (99%) rename src/{ => Bench}/StellaOps.Bench/LinkNotMerge/config.json (96%) rename src/{ => Bench}/StellaOps.Bench/Notify/README.md (89%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BaselineLoaderTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BenchmarkScenarioReportTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/NotifyScenarioRunnerTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/PrometheusWriterTests.cs (96%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/StellaOps.Bench.Notify.Tests.csproj (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineEntry.cs (96%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineLoader.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/BenchmarkConfig.cs (96%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/DispatchAccumulator.cs (95%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/NotifyScenarioRunner.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Program.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Properties/AssemblyInfo.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkJsonWriter.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkScenarioReport.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/PrometheusWriter.cs (98%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioExecutionResult.cs (96%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioResult.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioStatistics.cs (96%) rename src/{ => Bench}/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj (67%) rename src/{ => Bench}/StellaOps.Bench/Notify/baseline.csv (99%) rename src/{ => Bench}/StellaOps.Bench/Notify/config.json (96%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/README.md (87%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineEntry.cs (96%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineLoader.cs (97%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/BenchmarkConfig.cs (96%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PathUtilities.cs (96%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PolicyScenarioRunner.cs (97%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Program.cs (97%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkJsonWriter.cs (97%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkScenarioReport.cs (97%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/PrometheusWriter.cs (98%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/ScenarioResult.cs (96%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj (69%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/baseline.csv (99%) rename src/{ => Bench}/StellaOps.Bench/PolicyEngine/config.json (96%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/README.md (94%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BaselineLoaderTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkJsonWriterTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkScenarioReportTests.cs (96%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/PrometheusWriterTests.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/StellaOps.Bench.ScannerAnalyzers.Tests.csproj (97%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineEntry.cs (95%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineLoader.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/BenchmarkConfig.cs (96%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Program.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkJsonWriter.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkScenarioReport.cs (96%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/PrometheusWriter.cs (97%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioResult.cs (96%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioRunners.cs (97%) create mode 100644 src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/baseline.csv (98%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/config.json (77%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/lang/README.md (84%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/lang/dotnet/syft-comparison-20251023.csv (98%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/lang/go/syft-comparison-20251021.csv (98%) rename src/{ => Bench}/StellaOps.Bench/Scanner.Analyzers/lang/python/hash-throughput-20251023.csv (98%) rename src/{ => Bench}/StellaOps.Bench/TASKS.md (98%) create mode 100644 src/Cartographer/StellaOps.Cartographer.sln rename src/{ => Cartographer}/StellaOps.Cartographer/AGENTS.md (93%) rename src/{ => Cartographer}/StellaOps.Cartographer/Options/CartographerAuthorityOptions.cs (97%) rename src/{ => Cartographer}/StellaOps.Cartographer/Options/CartographerAuthorityOptionsConfigurator.cs (96%) rename src/{ => Cartographer}/StellaOps.Cartographer/Program.cs (97%) rename src/{ => Cartographer}/StellaOps.Cartographer/Properties/AssemblyInfo.cs (97%) create mode 100644 src/Cartographer/StellaOps.Cartographer/StellaOps.Cartographer.csproj rename src/{ => Cartographer}/StellaOps.Cartographer/TASKS.md (99%) rename src/{ => Cartographer/__Tests}/StellaOps.Cartographer.Tests/Options/CartographerAuthorityOptionsConfiguratorTests.cs (96%) rename src/{ => Cartographer/__Tests}/StellaOps.Cartographer.Tests/StellaOps.Cartographer.Tests.csproj (79%) create mode 100644 src/Cli/StellaOps.Cli.sln rename src/{ => Cli}/StellaOps.Cli/AGENTS.md (95%) rename src/{ => Cli}/StellaOps.Cli/Commands/CommandFactory.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Commands/CommandHandlers.cs (97%) rename src/{ => Cli}/StellaOps.Cli/Configuration/AuthorityTokenUtilities.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Configuration/CliBootstrapper.cs (97%) rename src/{ => Cli}/StellaOps.Cli/Configuration/StellaOpsCliOptions.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Plugins/CliCommandModuleLoader.cs (97%) rename src/{ => Cli}/StellaOps.Cli/Plugins/CliPluginManifest.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Plugins/CliPluginManifestLoader.cs (97%) rename src/{ => Cli}/StellaOps.Cli/Plugins/ICliCommandModule.cs (95%) rename src/{ => Cli}/StellaOps.Cli/Plugins/RestartOnlyCliPluginGuard.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Program.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Prompts/TrivyDbExportPrompt.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Properties/AssemblyInfo.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/AuthorityDiagnosticsReporter.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/AuthorityRevocationClient.cs (97%) rename src/{ => Cli}/StellaOps.Cli/Services/BackendOperationsClient.cs (97%) rename src/{ => Cli}/StellaOps.Cli/Services/ConcelierObservationsClient.cs (97%) rename src/{ => Cli}/StellaOps.Cli/Services/IAuthorityRevocationClient.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/IBackendOperationsClient.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/IConcelierObservationsClient.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Services/IScannerExecutor.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/IScannerInstaller.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/AdvisoryObservationsModels.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/AocIngestDryRunModels.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/AocVerifyModels.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/AuthorityRevocationExportResult.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/ExcititorExportDownloadResult.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/ExcititorOperationResult.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/ExcititorProviderSummary.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/JobTriggerResult.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/OfflineKitModels.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/PolicyActivationModels.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/PolicyFindingsModels.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/PolicySimulationModels.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/RuntimePolicyEvaluationModels.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/ScannerArtifactResult.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/Transport/JobRunResponse.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/Transport/JobTriggerRequest.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/Transport/OfflineKitTransport.cs (95%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/Transport/PolicyActivationTransport.cs (95%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/Transport/PolicyFindingsTransport.cs (95%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/Transport/PolicySimulationTransport.cs (95%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/Transport/ProblemDocument.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/Models/Transport/RuntimePolicyEvaluationTransport.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/PolicyApiException.cs (96%) rename src/{ => Cli}/StellaOps.Cli/Services/ScannerExecutionResult.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/ScannerExecutor.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Services/ScannerInstaller.cs (100%) rename src/{ => Cli}/StellaOps.Cli/StellaOps.Cli.csproj (78%) rename src/{ => Cli}/StellaOps.Cli/TASKS.md (99%) rename src/{ => Cli}/StellaOps.Cli/Telemetry/CliActivitySource.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Telemetry/CliMetrics.cs (100%) rename src/{ => Cli}/StellaOps.Cli/Telemetry/VerbosityState.cs (100%) rename src/{ => Cli}/StellaOps.Cli/appsettings.json (100%) rename src/{ => Cli/__Libraries}/StellaOps.Cli.Plugins.NonCore/NonCoreCliCommandModule.cs (97%) rename src/{ => Cli/__Libraries}/StellaOps.Cli.Plugins.NonCore/StellaOps.Cli.Plugins.NonCore.csproj (97%) rename src/{ => Cli/__Tests}/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs (97%) rename src/{ => Cli/__Tests}/StellaOps.Cli.Tests/Configuration/CliBootstrapperTests.cs (100%) rename src/{ => Cli/__Tests}/StellaOps.Cli.Tests/Plugins/CliCommandModuleLoaderTests.cs (97%) rename src/{ => Cli/__Tests}/StellaOps.Cli.Tests/Plugins/RestartOnlyCliPluginGuardTests.cs (96%) rename src/{ => Cli/__Tests}/StellaOps.Cli.Tests/Services/AuthorityDiagnosticsReporterTests.cs (100%) rename src/{ => Cli/__Tests}/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs (97%) create mode 100644 src/Cli/__Tests/StellaOps.Cli.Tests/StellaOps.Cli.Tests.csproj rename src/{ => Cli/__Tests}/StellaOps.Cli.Tests/Testing/TestHelpers.cs (100%) rename src/{ => Cli/__Tests}/StellaOps.Cli.Tests/UnitTest1.cs (100%) rename src/{StellaOps.Scanner.Analyzers.Lang.Tests => Cli/__Tests/StellaOps.Cli.Tests}/xunit.runner.json (100%) rename src/{ => Concelier}/StellaOps.Concelier.Tests.Shared/AssemblyInfo.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.Tests.Shared/MongoFixtureCollection.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/AGENTS.md (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Contracts/AdvisoryObservationContracts.cs (97%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Contracts/AdvisoryRawContracts.cs (98%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Diagnostics/HealthContracts.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Diagnostics/IngestionMetrics.cs (97%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Diagnostics/JobMetrics.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Diagnostics/ProblemTypes.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Diagnostics/ServiceStatus.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Extensions/AdvisoryRawRequestMapper.cs (97%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Extensions/ConfigurationExtensions.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Extensions/JobRegistrationExtensions.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Extensions/MirrorEndpointExtensions.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Extensions/TelemetryExtensions.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Filters/JobAuthorizationAuditFilter.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Jobs/JobDefinitionResponse.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Jobs/JobRunResponse.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Jobs/JobTriggerRequest.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Options/ConcelierOptions.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Options/ConcelierOptionsPostConfigure.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Options/ConcelierOptionsValidator.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Program.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Properties/launchSettings.json (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Services/MirrorFileLocator.cs (100%) rename src/{ => Concelier}/StellaOps.Concelier.WebService/Services/MirrorRateLimiter.cs (100%) create mode 100644 src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj rename src/{ => Concelier}/StellaOps.Concelier.WebService/TASKS.md (99%) create mode 100644 src/Concelier/StellaOps.Concelier.sln rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/AcscConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/AcscConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/AcscDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/AcscServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Configuration/AcscFeedOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Configuration/AcscOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Internal/AcscCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Internal/AcscDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Internal/AcscDocumentMetadata.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Internal/AcscDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Internal/AcscFeedParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Internal/AcscMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/README.md (95%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj (81%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Acsc/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/CccsConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/CccsConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/CccsDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/CccsServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Configuration/CccsOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Internal/CccsAdvisoryDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Internal/CccsCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Internal/CccsDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedModels.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Internal/CccsHtmlParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Internal/CccsMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Internal/CccsRawAdvisoryDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/StellaOps.Concelier.Connector.Cccs.csproj (79%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cccs/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/CertBundConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/CertBundConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/CertBundDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/CertBundServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Configuration/CertBundOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Internal/CertBundAdvisoryDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Internal/CertBundCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailResponse.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDocumentMetadata.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedItem.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Internal/CertBundMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/README.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/StellaOps.Concelier.Connector.CertBund.csproj (75%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertBund/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/CertCcConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/CertCcConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/CertCcDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/CertCcServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Configuration/CertCcOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md (88%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-012_HANDOFF.md (58%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Internal/CertCcCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Internal/CertCcDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Internal/CertCcMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlan.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlanner.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Internal/CertCcVendorStatementParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/README.md (91%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/StellaOps.Concelier.Connector.CertCc.csproj (80%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertCc/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/CertFrConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/CertFrConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/CertFrDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/CertFrServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/Configuration/CertFrOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/Internal/CertFrCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDocumentMetadata.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedItem.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/Internal/CertFrMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/Internal/CertFrParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj (79%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertFr/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/CertInConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/CertInConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/CertInDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/CertInServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/Configuration/CertInOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/Internal/CertInAdvisoryDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/Internal/CertInClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/Internal/CertInCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/Internal/CertInDetailParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/Internal/CertInListingItem.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj (79%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.CertIn/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Cursors/PaginationPlanner.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorPlanner.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorState.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/DocumentStatuses.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Fetch/CryptoJitterSource.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Fetch/IJitterSource.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Fetch/RawDocumentStorage.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchContentResult.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchRequest.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchResult.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchService.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Fetch/SourceRetryPolicy.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Html/HtmlContentSanitizer.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Http/AllowlistedHttpMessageHandler.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Http/ServiceCollectionExtensions.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientConfigurationBinder.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Json/IJsonSchemaValidator.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationError.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationException.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidator.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Packages/PackageCoordinateHelper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Pdf/PdfTextExtractor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/State/SourceStateSeedModels.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/State/SourceStateSeedProcessor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj (87%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Telemetry/SourceDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Testing/CannedHttpMessageHandler.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Url/UrlNormalizer.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Xml/IXmlSchemaValidator.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationError.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationException.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidator.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/Configuration/CveOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/CveConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/CveConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/CveDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/CveServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/Internal/CveCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/Internal/CveDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/Internal/CveListParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/Internal/CveMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/Internal/CveRecordDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/Internal/CveRecordParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj (75%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Cve/TASKS.md (94%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Configuration/DebianOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/DebianConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/DebianConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/DebianDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/DebianServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianAdvisoryDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianDetailMetadata.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianFetchCacheEntry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianHtmlParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListEntry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj (82%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md (78%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/Configuration/RedHatOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/Internal/Models/RedHatCsafModels.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatSummaryItem.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/RedHatDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/RedHatServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj (71%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/Configuration/SuseOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseAdvisoryDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangeRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangesParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCsafParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseFetchCacheEntry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj (82%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/SuseConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/SuseConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/SuseDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Suse/SuseServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/Configuration/UbuntuOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuFetchCacheEntry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj (82%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Configuration/GhsaOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/GhsaConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/GhsaConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/GhsaDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/GhsaServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaListParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitSnapshot.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj (79%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ghsa/TASKS.md (91%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/Configuration/IcsCisaOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/HANDOVER.md (69%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAdvisoryDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAttachmentDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj (87%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md (94%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/Configuration/KasperskyOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedItem.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj (79%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ics.Kaspersky/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Configuration/JvnOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/JvnAdvisoryMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/JvnConstants.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/JvnCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewItem.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewPage.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaProvider.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaValidationException.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Internal/MyJvnClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/JvnConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/JvnConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/JvnDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/JvnServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Schemas/data_marking.xsd (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Schemas/jvnrss_3.2.xsd (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Schemas/mod_sec_3.0.xsd (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Schemas/status_3.3.xsd (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Schemas/tlp_marking.xsd (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Schemas/vuldef_3.2.xsd (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/Schemas/xml.xsd (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj (83%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Jvn/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/Configuration/KevOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/Internal/KevCatalogDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/Internal/KevCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/Internal/KevDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/Internal/KevMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/Internal/KevSchemaProvider.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/KevConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/KevConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/KevDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/KevServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/Schemas/kev-catalog.schema.json (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj (85%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kev/TASKS.md (88%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Configuration/KisaOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Internal/KisaCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailResponse.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Internal/KisaDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Internal/KisaDocumentMetadata.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedItem.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Internal/KisaMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/KisaConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/KisaConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/KisaDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/KisaServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj (79%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Kisa/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/Configuration/NvdOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/Internal/NvdCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/Internal/NvdDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/Internal/NvdMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/Internal/NvdSchemaProvider.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/NvdConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/NvdConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/NvdServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/Schemas/nvd-vulnerability.schema.json (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj (84%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Nvd/TASKS.md (93%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/Configuration/OsvOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/Internal/OsvCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/Internal/OsvDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/Internal/OsvMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/Internal/OsvVulnerabilityDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/OsvConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/OsvConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/OsvDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/OsvServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj (88%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Osv/TASKS.md (88%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/Configuration/RuBduOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduVulnerabilityDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduXmlParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/README.md (92%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/RuBduDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/RuBduServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/StellaOps.Concelier.Connector.Ru.Bdu.csproj (83%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md (94%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/Configuration/RuNkckiOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiJsonParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiVulnerabilityDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/StellaOps.Concelier.Connector.Ru.Nkcki.csproj (85%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/Client/MirrorManifestClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorAdvisoryMapper.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorBundleDocument.cs (98%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorIndexDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/StellaOpsMirrorCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/Properties/AssemblyInfo.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/Security/MirrorSignatureVerifier.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/Settings/StellaOpsMirrorConnectorOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj (68%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnector.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md (90%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/Configuration/AdobeOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeBulletinDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDetailParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDocumentMetadata.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexEntry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeSchemaProvider.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/Schemas/adobe-bulletin.schema.json (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj (83%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Adobe/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/AppleConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/AppleDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/AppleOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/AppleServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleIndexEntry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/README.md (89%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj (81%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Apple/VndrAppleConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Configuration/ChromiumOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDocumentMetadata.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedEntry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedLoader.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumSchemaProvider.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/Schemas/chromium-post.schema.json (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj (87%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Chromium/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Configuration/CiscoOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAccessTokenProvider.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAdvisoryDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafData.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDtoFactory.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOAuthMessageHandler.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOpenVulnClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoRawAdvisory.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/StellaOps.Concelier.Connector.Vndr.Cisco.csproj (81%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Cisco/VndrCiscoConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Configuration/MsrcOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcAdvisoryDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcApiClient.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDocumentMetadata.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcSummaryResponse.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcTokenProvider.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/README.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj (75%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Msrc/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Configuration/OracleOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleAffectedEntry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCalendarFetcher.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDocumentMetadata.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDtoValidator.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleParser.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OraclePatchDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/OracleConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/OracleDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/OracleServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj (79%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Oracle/VndrOracleConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/Configuration/VmwareOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareCursor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareDetailDto.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareFetchCacheEntry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareIndexItem.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareMapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/Jobs.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj (86%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnectorPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Aoc/AdvisoryRawWriteGuard.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Aoc/AocServiceCollectionExtensions.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Aoc/ConcelierAocGuardException.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Aoc/IAdvisoryRawWriteGuard.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/CanonicalMergeResult.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/CanonicalMerger.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Events/AdvisoryEventContracts.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Events/AdvisoryEventLog.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Events/IAdvisoryEventLog.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Events/IAdvisoryEventRepository.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/IJob.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/IJobCoordinator.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/IJobStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/ILeaseStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobCoordinator.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobDefinition.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobExecutionContext.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobLease.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobPluginRegistrationExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobRunCompletion.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobRunCreateRequest.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobRunSnapshot.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobRunStatus.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobSchedulerBuilder.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobSchedulerHostedService.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobSchedulerOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/JobTriggerResult.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Jobs/ServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetMapper.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Linksets/AdvisoryObservationFactory.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Linksets/IAdvisoryLinksetMapper.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Linksets/IAdvisoryObservationFactory.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Linksets/LinksetNormalization.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Linksets/LinksetServiceCollectionExtensions.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Noise/INoisePriorRepository.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Noise/INoisePriorService.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Noise/NoisePriorComputationRequest.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Noise/NoisePriorComputationResult.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Noise/NoisePriorService.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Noise/NoisePriorServiceCollectionExtensions.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Noise/NoisePriorSummary.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Observations/AdvisoryObservationCursor.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryModels.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryService.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Observations/IAdvisoryObservationLookup.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Observations/IAdvisoryObservationQueryService.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Properties/AssemblyInfo.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Raw/AdvisoryRawQueryOptions.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Raw/AdvisoryRawRecord.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Raw/AdvisoryRawService.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Raw/IAdvisoryRawRepository.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Raw/IAdvisoryRawService.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Raw/RawServiceCollectionExtensions.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj (82%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/TASKS.md (99%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Unknown/IUnknownStateLedger.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Unknown/IUnknownStateRepository.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Unknown/UnknownStateLedger.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerRequest.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerResult.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Unknown/UnknownStateMarkerKinds.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Core/Unknown/UnknownStateSnapshot.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/ExportDigestCalculator.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/ExporterVersion.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/IJsonExportPathResolver.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonExportFile.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonExportJob.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonExportManifestWriter.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonExportOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonExportResult.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonExportSnapshotBuilder.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonExporterDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonExporterPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonFeedExporter.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/JsonMirrorBundleWriter.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj (77%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/TASKS.md (83%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.Json/VulnListJsonExportPathResolver.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbBuilder.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbOrasPusher.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/OciDescriptor.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/OciIndex.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/OciManifest.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj (81%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md (87%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyConfigDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBlob.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBoltBuilder.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBuilderResult.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportJob.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportMode.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOverrides.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlan.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlanner.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterDependencyInjectionRoutine.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterPlugin.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbFeedExporter.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMediaTypes.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMirrorBundleWriter.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriteResult.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriter.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOrasPusher.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackage.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageBuilder.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageRequest.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Class1.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Comparers/DebianEvr.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Comparers/Nevra.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Comparers/SemanticVersionRangeResolver.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityCluster.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityResolver.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Identity/AliasIdentity.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Jobs/MergeJobKinds.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Jobs/MergeReconcileJob.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/MergeServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceDefaults.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceTable.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md (98%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/AdvisoryMergeService.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/AdvisoryPrecedenceMerger.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/AffectedPackagePrecedenceResolver.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/AliasGraphResolver.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/CanonicalHashCalculator.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/ConflictDetailPayload.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/MergeConflictDetail.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/MergeConflictExplainerPayload.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/MergeConflictSummary.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/MergeEventWriter.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/Services/PrecedenceMergeResult.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Merge/TASKS.md (99%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/Advisory.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AdvisoryCredit.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AdvisoryProvenance.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AdvisoryReference.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AdvisoryWeakness.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AffectedPackage.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AffectedPackageStatus.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AffectedPackageStatusCatalog.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AffectedVersionRange.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AffectedVersionRangeExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AliasSchemeRegistry.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/AliasSchemes.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/BACKWARD_COMPATIBILITY.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/CANONICAL_RECORDS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/CanonicalJsonSerializer.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/CvssMetric.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/EvrPrimitiveExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/NevraPrimitiveExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/NormalizedVersionRule.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/Observations/AdvisoryObservation.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/OsvGhsaParityDiagnostics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/OsvGhsaParityInspector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/ProvenanceFieldMasks.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/ProvenanceInspector.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/RangePrimitives.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/SemVerPrimitiveExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/SeverityNormalization.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/SnapshotSerializer.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Models/Validation.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/Cvss/CvssMetricNormalizer.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/Distro/DebianEvr.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/Distro/Nevra.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/Identifiers/Cpe23.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/Identifiers/IdentifierNormalizer.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/Identifiers/PackageUrl.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/SemVer/SemVerRangeRuleBuilder.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/TASKS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Normalization/Text/DescriptionNormalizer.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.RawModels/AdvisoryRawDocument.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.RawModels/Class1.cs (92%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.RawModels/JsonElementExtensions.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.RawModels/RawDocumentFactory.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.RawModels/VexRawDocument.cs (98%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/AGENTS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Advisories/IAdvisoryStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocumentFactory.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Aliases/AliasDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreConstants.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreMetrics.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Aliases/IAliasStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocumentExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryFieldChange.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/ChangeHistory/IChangeHistoryStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/ChangeHistory/MongoChangeHistoryStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Documents/DocumentDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Documents/DocumentRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Documents/DocumentStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Documents/IDocumentStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Dtos/DtoDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Dtos/DtoRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Dtos/DtoStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Dtos/IDtoStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Events/MongoAdvisoryEventRepository.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateManager.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Exporting/IExportStateStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/ISourceStateRepository.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/JobLeaseDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/JobRunDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/JpFlags/IJpFlagStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MIGRATIONS.md (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MergeEvents/IMergeEventStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeFieldDecision.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryEventCollectionsMigration.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawIdempotencyIndexMigration.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawValidatorMigration.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisorySupersedesBackfillMigration.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureDocumentExpiryIndexesMigration.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureGridFsExpiryIndexesMigration.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/IMongoMigration.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationRunner.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Migrations/SemVerStyleBackfillMigration.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MongoBootstrapper.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MongoCollectionValidatorOptions.cs (95%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MongoJobStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MongoLeaseStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MongoSessionProvider.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MongoSourceStateRepository.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MongoStorageDefaults.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/MongoStorageOptions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocument.cs (96%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocumentFactory.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationLookup.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationStore.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Observations/IAdvisoryObservationStore.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Properties/AssemblyInfo.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/PsirtFlags/IPsirtFlagStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Raw/MongoAdvisoryRawRepository.cs (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/RawDocumentRetentionService.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/SourceStateDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/SourceStateRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/SourceStateRepositoryExtensions.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementDocument.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementRecord.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementStore.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj (97%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Storage.Mongo/TASKS.md (99%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Testing/ConnectorTestHarness.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Testing/MongoIntegrationFixture.cs (100%) rename src/{ => Concelier/__Libraries}/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorFetchTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorParseTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscHttpClientConfigurationTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories-multi.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories.snapshot.json (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cccs.Tests/CccsConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-feed-en.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory-fr.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-taxonomy-en.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsHtmlParserTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsMapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cccs.Tests/StellaOps.Concelier.Connector.Cccs.Tests.csproj (59%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertBund.Tests/CertBundConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-detail.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-feed.xml (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertBund.Tests/StellaOps.Concelier.Connector.CertBund.Tests.csproj (64%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorFetchTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorSnapshotTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-advisories.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-documents.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-requests.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-state.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-09.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-10.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-11.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendor-statuses-294418.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendors-294418.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-257161.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vendors.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vuls.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vulnerabilities-294418.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcMapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryParserTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryPlannerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcVendorStatementParserTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertCc.Tests/StellaOps.Concelier.Connector.CertCc.Tests.csproj (59%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/CertFrConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-advisories.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-001.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-002.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-feed.xml (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/StellaOps.Concelier.Connector.CertFr.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/CertInConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/alerts-page1.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/detail-CIAD-2024-0005.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/expected-advisory.json (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/StellaOps.Concelier.Connector.CertIn.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Common/CannedHttpMessageHandlerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Common/HtmlContentSanitizerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Common/PackageCoordinateHelperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Common/PdfTextExtractorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceGuardTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Common/SourceHttpClientBuilderTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Common/TimeWindowCursorPlannerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Common/UrlNormalizerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Json/JsonSchemaValidatorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj (54%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Common.Tests/Xml/XmlSchemaValidatorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cve.Tests/Cve/CveConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-CVE-2024-0001.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-list.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/expected-CVE-2024-0001.json (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianMapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-123.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-124.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-list.txt (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0001.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0002.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0003.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1-repeat.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page2.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page3.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorHarnessTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-changes.csv (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0001-1.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0002-1.json (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseCsafParserTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseMapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page0.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page1.json (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/UbuntuConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/conflict-ghsa.canonical.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.ghsa.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.nvd.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.osv.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/expected-GHSA-xxxx-yyyy-zzzz.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-GHSA-xxxx-yyyy-zzzz.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-list.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConflictFixtureTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaCreditParityRegressionTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDependencyInjectionRoutineTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDiagnosticsTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaMapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaRateLimitParserTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsa-25-123-01.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsma-25-045-01.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/sample-feed.xml (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaConnectorMappingTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaFeedParserTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisaConnectorTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/detail-acme-controller-2024.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/expected-advisory.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/feed-page1.xml (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/KasperskyConnectorTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/expected-advisory.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/jvnrss-window1.xml (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/vuldef-JVNDB-2024-123456.xml (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/JvnConnectorTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/StellaOps.Concelier.Connector.Jvn.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-advisories.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-catalog.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevMapperTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/StellaOps.Concelier.Connector.Kev.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-detail.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-feed.xml (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Kisa.Tests/KisaConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj (62%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/conflict-nvd.canonical.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.ghsa.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.nvd.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.osv.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-invalid-schema.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-1.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-2.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-3.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-1.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-2.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-update.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConflictFixtureTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorHarnessTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdMergeExportParityTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/conflict-osv.canonical.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.ghsa.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.osv.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-ghsa.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-osv.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-npm.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-pypi.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvConflictFixtureTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvGhsaParityRegressionTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvMapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvSnapshotTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/export-sample.xml (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-advisories.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-documents.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-dtos.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-requests.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-state.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduConnectorSnapshotTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduMapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduXmlParserTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-legacy.json.zip (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-sample.json.zip (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing-page2.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/nkcki-advisories.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiJsonParserTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiMapperTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/FixtureLoader.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-advisory.expected.json (96%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-bundle.sample.json (96%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorAdvisoryMapperTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorSignatureVerifierTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/SampleData.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj (51%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOpsMirrorConnectorTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/AdobeConnectorFetchTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-advisories.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-85.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-87.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-index.html (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleFixtureManager.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleLiveRegressionTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.expected.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.expected.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.expected.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT214108.expected.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT215500.expected.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht214108.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht215500.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/index.json (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumMapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-advisory.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-detail.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-feed.xml (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoDtoFactoryTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoMapperTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-detail.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-summary.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/MsrcConnectorTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-advisories.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024-single.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-01.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-02.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-invalid.html (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/OracleConnectorTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-advisories.snapshot.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0001.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0002.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0003.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-initial.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-second.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareConnectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareMapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/Aoc/AdvisoryRawWriteGuardTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/CanonicalMergerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/Events/AdvisoryEventLogTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/JobCoordinatorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/JobPluginRegistrationExtensionsTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/JobSchedulerBuilderTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryLinksetMapperTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryObservationFactoryTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/Noise/NoisePriorServiceTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/Observations/AdvisoryObservationQueryServiceTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/PluginRoutineFixtures.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/Raw/AdvisoryRawServiceTests.cs (97%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Core.Tests/Unknown/UnknownStateLedgerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Exporter.Json.Tests/JsonExportSnapshotBuilderTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterParitySmokeTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Exporter.Json.Tests/VulnListJsonExportPathResolverTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbExportPlannerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbOciWriterTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbPackageBuilderTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/AdvisoryIdentityResolverTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/AdvisoryPrecedenceMergerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/AffectedPackagePrecedenceResolverTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/AliasGraphResolverTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/CanonicalHashCalculatorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/DebianEvrComparerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/MergeEventWriterTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/MergePrecedenceIntegrationTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/MetricCollector.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/NevraComparerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/SemanticVersionRangeResolverTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Merge.Tests/TestLogger.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/AdvisoryProvenanceTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/AdvisoryTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/AffectedPackageStatusTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/AffectedVersionRangeExtensionsTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/AliasSchemeRegistryTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/CanonicalExampleFactory.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/CanonicalExamplesTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/CanonicalJsonSerializerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/EvrPrimitiveExtensionsTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.actual.json (96%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.actual.json (96%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.actual.json (96%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.actual.json (96%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.json (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/NevraPrimitiveExtensionsTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/NormalizedVersionRuleTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/Observations/AdvisoryObservationTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/OsvGhsaParityDiagnosticsTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/OsvGhsaParityInspectorTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/ProvenanceDiagnosticsTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/RangePrimitivesTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/SemVerPrimitiveTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/SerializationDeterminismTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/SeverityNormalizationTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj (70%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Normalization.Tests/CpeNormalizerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Normalization.Tests/CvssMetricNormalizerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Normalization.Tests/DebianEvrParserTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Normalization.Tests/DescriptionNormalizerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Normalization.Tests/NevraParserTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Normalization.Tests/PackageUrlNormalizerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Normalization.Tests/SemVerRangeRuleBuilderTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/StellaOps.Concelier.Normalization.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj (80%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.RawModels.Tests/UnitTest1.cs (92%) rename src/{StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests => Concelier/__Tests/StellaOps.Concelier.RawModels.Tests}/xunit.runner.json (96%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryConflictStoreTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStatementStoreTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStorePerformanceTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStoreTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/AliasStoreTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/DocumentStoreTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/DtoStoreTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateManagerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateStoreTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/MergeEventStoreTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/Migrations/MongoMigrationRunnerTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/MongoAdvisoryEventRepositoryTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/MongoBootstrapperTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/MongoJobStoreTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/MongoSourceStateRepositoryTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationDocumentFactoryTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationStoreTests.cs (97%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.Storage.Mongo.Tests/RawDocumentRetentionServiceTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.WebService.Tests/ConcelierOptionsPostConfigureTests.cs (100%) rename src/{ => Concelier/__Tests}/StellaOps.Concelier.WebService.Tests/PluginLoaderTests.cs (100%) create mode 100644 src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj rename src/{ => Concelier/__Tests}/StellaOps.Concelier.WebService.Tests/WebServiceEndpointsTests.cs (97%) rename src/{ => DevPortal}/StellaOps.DevPortal.Site/AGENTS.md (97%) rename src/{ => DevPortal}/StellaOps.DevPortal.Site/TASKS.md (99%) create mode 100644 src/EvidenceLocker/StellaOps.EvidenceLocker.sln rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/AGENTS.md (98%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/Class1.cs (92%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/StellaOps.EvidenceLocker.Core.csproj (95%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Class1.cs (93%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/StellaOps.EvidenceLocker.Infrastructure.csproj (94%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/StellaOps.EvidenceLocker.Tests.csproj (91%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/UnitTest1.cs (92%) rename src/{StellaOps.Aoc.Tests => EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests}/xunit.runner.json (96%) rename src/{StellaOps.Orchestrator/StellaOps.Orchestrator.WebService => EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService}/Program.cs (96%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Properties/launchSettings.json (96%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj (95%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.http (96%) rename src/{StellaOps.Notifier/StellaOps.Notifier.WebService => EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService}/appsettings.Development.json (93%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json (94%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs (96%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Properties/launchSettings.json (96%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj (95%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Worker.cs (96%) rename src/{StellaOps.ExportCenter/StellaOps.ExportCenter.Worker => EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker}/appsettings.Development.json (94%) rename src/{StellaOps.Notifier/StellaOps.Notifier.Worker => EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker}/appsettings.json (94%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.sln (98%) rename src/{ => EvidenceLocker}/StellaOps.EvidenceLocker/TASKS.md (99%) rename src/{ => Excititor}/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md (100%) rename src/{ => Excititor}/StellaOps.Excititor.WebService/AGENTS.md (100%) rename src/{ => Excititor}/StellaOps.Excititor.WebService/Endpoints/IngestEndpoints.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.WebService/Endpoints/MirrorEndpoints.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.WebService/Endpoints/ResolveEndpoint.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.WebService/Program.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.WebService/Properties/AssemblyInfo.cs (97%) rename src/{ => Excititor}/StellaOps.Excititor.WebService/Services/MirrorRateLimiter.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.WebService/Services/ScopeAuthorization.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.WebService/Services/VexIngestOrchestrator.cs (100%) create mode 100644 src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj rename src/{ => Excititor}/StellaOps.Excititor.WebService/TASKS.md (99%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/AGENTS.md (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Options/VexWorkerOptions.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Options/VexWorkerOptionsValidator.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Options/VexWorkerPluginOptions.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Options/VexWorkerRefreshOptions.cs (96%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Options/VexWorkerRetryOptions.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Program.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Properties/AssemblyInfo.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs (97%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Scheduling/IVexConsensusRefreshScheduler.cs (96%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Scheduling/IVexProviderRunner.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Scheduling/VexConsensusRefreshService.cs (97%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Scheduling/VexWorkerHostedService.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Scheduling/VexWorkerSchedule.cs (100%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs (97%) rename src/{ => Excititor}/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs (97%) create mode 100644 src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj rename src/{ => Excititor}/StellaOps.Excititor.Worker/TASKS.md (99%) create mode 100644 src/Excititor/StellaOps.Excititor.sln rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.ArtifactStores.S3/Extensions/ServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.ArtifactStores.S3/S3ArtifactClient.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Dsse/DsseEnvelope.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Dsse/VexDsseBuilder.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/EXCITITOR-ATTEST-01-003-plan.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Extensions/ServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Models/VexAttestationPredicate.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Signing/IVexSigner.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Transparency/ITransparencyLogClient.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Transparency/RekorHttpClient.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Transparency/RekorHttpClientOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Verification/IVexAttestationVerifier.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Verification/VexAttestationMetrics.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Verification/VexAttestationVerificationOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/Verification/VexAttestationVerifier.cs (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Attestation/VexAttestationClient.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/IVexConnectorOptionsValidator.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/VexConnectorBase.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/VexConnectorDescriptor.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/VexConnectorLogScope.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/VexConnectorMetadataBuilder.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinder.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinderOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsValidationException.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Cisco.CSAF/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Cisco.CSAF/CiscoCsafConnector.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptionsValidator.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Cisco.CSAF/DependencyInjection/CiscoConnectorServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Cisco.CSAF/Metadata/CiscoProviderMetadataLoader.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj (98%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.MSRC.CSAF/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.MSRC.CSAF/Authentication/MsrcTokenProvider.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.MSRC.CSAF/Configuration/MsrcConnectorOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.MSRC.CSAF/DependencyInjection/MsrcConnectorServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.MSRC.CSAF/MsrcCsafConnector.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj (98%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciCosignAuthority.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciRegistryAuthorization.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptionsValidator.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/DependencyInjection/OciOpenVexAttestationConnectorServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryResult.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryService.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationTarget.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReference.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReferenceParser.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciOfflineBundleReference.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciArtifactDescriptor.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationDocument.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationFetcher.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciRegistryClient.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/OciOpenVexAttestationConnector.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj (98%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Oracle.CSAF/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptionsValidator.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Oracle.CSAF/DependencyInjection/OracleConnectorServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Oracle.CSAF/Metadata/OracleCatalogLoader.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Oracle.CSAF/OracleCsafConnector.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj (98%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.RedHat.CSAF/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.RedHat.CSAF/Configuration/RedHatConnectorOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.RedHat.CSAF/DependencyInjection/RedHatConnectorServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.RedHat.CSAF/Metadata/RedHatProviderMetadataLoader.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.RedHat.CSAF/RedHatCsafConnector.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj (98%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Authentication/RancherHubTokenProvider.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptionsValidator.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/DependencyInjection/RancherHubConnectorServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Design/EXCITITOR-CONN-SUSE-01-002.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventClient.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventModels.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Metadata/RancherHubMetadataLoader.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/RancherHubConnector.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/State/RancherHubCheckpointManager.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj (98%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Ubuntu.CSAF/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptionsValidator.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Ubuntu.CSAF/DependencyInjection/UbuntuConnectorServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Metadata/UbuntuCatalogLoader.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj (98%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Connectors.Ubuntu.CSAF/UbuntuCsafConnector.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/Aoc/AocServiceCollectionExtensions.cs (96%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/Aoc/ExcititorAocGuardException.cs (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/Aoc/IVexRawWriteGuard.cs (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/Aoc/VexRawWriteGuard.cs (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/BaselineVexConsensusPolicy.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/IVexConsensusPolicy.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/MirrorDistributionOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/MirrorExportPlanner.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/Observations/IVexObservationLookup.cs (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/Observations/IVexObservationQueryService.cs (96%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/Observations/VexObservation.cs (96%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/Observations/VexObservationQueryModels.cs (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/Observations/VexObservationQueryService.cs (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj (61%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/TASKS.md (99%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexAttestationAbstractions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexCacheEntry.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexCanonicalJsonSerializer.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexClaim.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexConnectorAbstractions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexConsensus.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexConsensusHold.cs (96%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexConsensusResolver.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexExportManifest.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexExporterAbstractions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexNormalizerAbstractions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexProvider.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexQuery.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexQuietProvenance.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexScoreEnvelope.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexSignals.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Core/VexSignatureVerifiers.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/ExportEngine.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/FileSystemArtifactStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/IVexArtifactStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/OfflineBundleArtifactStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/Properties/AssemblyInfo.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/S3ArtifactStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj (85%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/VexExportCacheService.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/VexExportEnvelopeBuilder.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Export/VexMirrorBundlePublisher.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CSAF/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CSAF/CsafNormalizer.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CSAF/ServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CSAF/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CycloneDX/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CycloneDX/CycloneDxNormalizer.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CycloneDX/ServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.CycloneDX/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.OpenVEX/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.OpenVEX/OpenVexNormalizer.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.OpenVEX/ServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Formats.OpenVEX/TASKS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/IVexPolicyProvider.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/TASKS.md (91%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/VexPolicyBinder.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/VexPolicyDiagnostics.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/VexPolicyDigest.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/VexPolicyOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/VexPolicyProcessing.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Policy/VexPolicyTelemetry.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/AGENTS.md (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/IVexExportStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/IVexRawStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/IVexStorageContracts.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/Migrations/IVexMongoMigration.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusHoldMigration.cs (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusSignalsMigration.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/Migrations/VexInitialIndexMigration.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/Migrations/VexMigrationRecord.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationHostedService.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationRunner.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/MongoVexCacheIndex.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/MongoVexCacheMaintenance.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/MongoVexClaimStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/MongoVexConnectorStateRepository.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusHoldStore.cs (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/MongoVexExportStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/MongoVexProviderStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/MongoVexRawStore.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/Properties/AssemblyInfo.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/ServiceCollectionExtensions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj (97%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/StorageBackedVexNormalizerRouter.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/TASKS.md (99%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/VexMongoMappingRegistry.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/VexMongoSessionProvider.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/VexMongoStorageOptions.cs (100%) rename src/{ => Excititor/__Libraries}/StellaOps.Excititor.Storage.Mongo/VexStatementBackfillService.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.ArtifactStores.S3.Tests/S3ArtifactClientTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.ArtifactStores.S3.Tests/StellaOps.Excititor.ArtifactStores.S3.Tests.csproj (68%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Attestation.Tests/StellaOps.Excititor.Attestation.Tests.csproj (52%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Attestation.Tests/VexAttestationClientTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Attestation.Tests/VexAttestationVerifierTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Attestation.Tests/VexDsseBuilderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Connectors/CiscoCsafConnectorTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Metadata/CiscoProviderMetadataLoaderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj (72%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Authentication/MsrcTokenProviderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj (78%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Configuration/OciOpenVexAttestationConnectorOptionsValidatorTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Connector/OciOpenVexAttestationConnectorTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Discovery/OciAttestationDiscoveryServiceTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.csproj (74%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Metadata/OracleCatalogLoaderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj (75%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Connectors/RedHatCsafConnectorTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Metadata/RedHatProviderMetadataLoaderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj (61%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Authentication/RancherHubTokenProviderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Metadata/RancherHubMetadataLoaderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj (60%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Metadata/UbuntuCatalogLoaderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj (75%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Core.Tests/Aoc/VexRawWriteGuardTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Core.Tests/Observations/VexObservationQueryServiceTests.cs (97%) create mode 100644 src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Core.Tests/VexCanonicalJsonSerializerTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Core.Tests/VexConsensusResolverTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Core.Tests/VexPolicyBinderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Core.Tests/VexPolicyDiagnosticsTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Core.Tests/VexQuerySignatureTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Core.Tests/VexSignalSnapshotTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Export.Tests/ExportEngineTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Export.Tests/FileSystemArtifactStoreTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Export.Tests/MirrorBundlePublisherTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Export.Tests/OfflineBundleArtifactStoreTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Export.Tests/S3ArtifactStoreTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Export.Tests/StellaOps.Excititor.Export.Tests.csproj (72%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Export.Tests/VexExportCacheServiceTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Formats.CSAF.Tests/CsafNormalizerTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Formats.CSAF.Tests/Fixtures/rhsa-sample.json (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Formats.CSAF.Tests/StellaOps.Excititor.Formats.CSAF.Tests.csproj (62%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Formats.CycloneDX.Tests/CycloneDxNormalizerTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Formats.CycloneDX.Tests/StellaOps.Excititor.Formats.CycloneDX.Tests.csproj (56%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Formats.OpenVEX.Tests/OpenVexNormalizerTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Formats.OpenVEX.Tests/StellaOps.Excititor.Formats.OpenVEX.Tests.csproj (56%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Policy.Tests/StellaOps.Excititor.Policy.Tests.csproj (66%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Policy.Tests/VexPolicyProviderTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexCacheMaintenanceTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexRepositoryTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexSessionConsistencyTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStatementBackfillServiceTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStoreMappingTests.cs (100%) create mode 100644 src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Storage.Mongo.Tests/VexMongoMigrationRunnerTests.cs (100%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.WebService.Tests/IngestEndpointsTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.WebService.Tests/StellaOps.Excititor.WebService.Tests.csproj (86%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.WebService.Tests/TestAuthentication.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.WebService.Tests/TestServiceOverrides.cs (98%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.WebService.Tests/TestWebApplicationFactory.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerIntegrationTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs (97%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Worker.Tests/StellaOps.Excititor.Worker.Tests.csproj (79%) rename src/{ => Excititor/__Tests}/StellaOps.Excititor.Worker.Tests/VexWorkerOptionsTests.cs (100%) rename src/{ => ExportCenter}/StellaOps.ExportCenter.AttestationBundles/AGENTS.md (97%) rename src/{ => ExportCenter}/StellaOps.ExportCenter.AttestationBundles/TASKS.md (99%) rename src/{ => ExportCenter}/StellaOps.ExportCenter.DevPortalOffline/AGENTS.md (97%) rename src/{ => ExportCenter}/StellaOps.ExportCenter.DevPortalOffline/TASKS.md (99%) rename src/{ => ExportCenter}/StellaOps.ExportCenter.RiskBundles/AGENTS.md (98%) rename src/{ => ExportCenter}/StellaOps.ExportCenter.RiskBundles/TASKS.md (99%) create mode 100644 src/ExportCenter/StellaOps.ExportCenter.sln rename src/{ => ExportCenter}/StellaOps.ExportCenter/AGENTS.md (98%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Class1.cs (91%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/StellaOps.ExportCenter.Core.csproj (95%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/Class1.cs (92%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/StellaOps.ExportCenter.Infrastructure.csproj (94%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj (91%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/UnitTest1.cs (92%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/xunit.runner.json (96%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs (96%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Properties/launchSettings.json (96%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj (95%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.http (96%) rename src/{StellaOps.Orchestrator/StellaOps.Orchestrator.WebService => ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService}/appsettings.Development.json (93%) rename src/{StellaOps.Notifier/StellaOps.Notifier.WebService => ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService}/appsettings.json (94%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs (96%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Properties/launchSettings.json (95%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj (95%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Worker.cs (96%) rename src/{StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker => ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker}/appsettings.Development.json (94%) rename src/{StellaOps.Orchestrator/StellaOps.Orchestrator.Worker => ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker}/appsettings.json (94%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/StellaOps.ExportCenter.sln (98%) rename src/{ => ExportCenter}/StellaOps.ExportCenter/TASKS.md (99%) rename src/{ => Findings}/StellaOps.Findings.Ledger/AGENTS.md (90%) rename src/{ => Findings}/StellaOps.Findings.Ledger/TASKS.md (99%) rename src/{ => Graph}/StellaOps.Graph.Api/AGENTS.md (90%) rename src/{ => Graph}/StellaOps.Graph.Api/TASKS.md (99%) rename src/{ => Graph}/StellaOps.Graph.Indexer/AGENTS.md (90%) rename src/{ => Graph}/StellaOps.Graph.Indexer/TASKS.md (99%) rename src/{ => IssuerDirectory}/StellaOps.IssuerDirectory/AGENTS.md (87%) rename src/{ => IssuerDirectory}/StellaOps.IssuerDirectory/TASKS.md (99%) rename src/{ => Mirror}/StellaOps.Mirror.Creator/AGENTS.md (98%) rename src/{ => Mirror}/StellaOps.Mirror.Creator/TASKS.md (99%) create mode 100644 src/Notifier/StellaOps.Notifier.sln rename src/{ => Notifier}/StellaOps.Notifier/AGENTS.md (98%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Tests/EventProcessorTests.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Tests/RuleEvaluatorTests.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Tests/StellaOps.Notifier.Tests.csproj (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Tests/Support/InMemoryStores.cs (97%) rename src/{StellaOps.Concelier.RawModels.Tests => Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests}/xunit.runner.json (96%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.WebService/Program.cs (96%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.WebService/Properties/launchSettings.json (96%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.WebService/Setup/MongoInitializationHostedService.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.csproj (64%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.http (96%) rename src/{StellaOps.ExportCenter/StellaOps.ExportCenter.WebService => Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService}/appsettings.Development.json (93%) rename src/{StellaOps.ExportCenter/StellaOps.ExportCenter.WebService => Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService}/appsettings.json (94%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/Options/NotifierWorkerOptions.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/DefaultNotifyRuleEvaluator.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/IdempotencyKeyBuilder.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/MongoInitializationHostedService.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventProcessor.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventWorker.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/AssemblyInfo.cs (97%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/launchSettings.json (95%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj (59%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.Development.json (94%) rename src/{StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker => Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker}/appsettings.json (94%) rename src/{ => Notifier}/StellaOps.Notifier/StellaOps.Notifier.sln (98%) rename src/{ => Notifier}/StellaOps.Notifier/TASKS.md (99%) rename src/{ => Notifier}/StellaOps.Notifier/docs/NOTIFY-SVC-38-001-FOUNDATIONS.md (99%) rename src/{ => Notify}/StellaOps.Notify.WebService/AGENTS.md (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Contracts/ChannelHealthResponse.cs (96%) rename src/{ => Notify}/StellaOps.Notify.WebService/Contracts/ChannelTestSendRequest.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Contracts/ChannelTestSendResponse.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Contracts/LockRequests.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Diagnostics/ServiceStatus.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Extensions/ConfigurationExtensions.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Hosting/NotifyPluginHostFactory.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Internal/JsonHttpResult.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsPostConfigure.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Plugins/NotifyPluginRegistry.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Program.Partial.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Program.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Security/NotifyPolicies.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Security/NotifyRateLimitPolicies.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Services/NotifyChannelHealthService.cs (97%) rename src/{ => Notify}/StellaOps.Notify.WebService/Services/NotifyChannelTestService.cs (100%) rename src/{ => Notify}/StellaOps.Notify.WebService/Services/NotifySchemaMigrationService.cs (100%) create mode 100644 src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj rename src/{ => Notify}/StellaOps.Notify.WebService/Storage/InMemory/InMemoryStorageModule.cs (100%) create mode 100644 src/Notify/StellaOps.Notify.WebService/TASKS.md rename src/{ => Notify}/StellaOps.Notify.Worker/AGENTS.md (100%) rename src/{ => Notify}/StellaOps.Notify.Worker/Handlers/INotifyEventHandler.cs (96%) rename src/{ => Notify}/StellaOps.Notify.Worker/Handlers/NoOpNotifyEventHandler.cs (96%) rename src/{ => Notify}/StellaOps.Notify.Worker/NotifyWorkerOptions.cs (96%) rename src/{ => Notify}/StellaOps.Notify.Worker/Processing/NotifyEventLeaseProcessor.cs (97%) rename src/{ => Notify}/StellaOps.Notify.Worker/Processing/NotifyEventLeaseWorker.cs (97%) rename src/{ => Notify}/StellaOps.Notify.Worker/Program.cs (97%) rename src/{ => Notify}/StellaOps.Notify.Worker/Properties/AssemblyInfo.cs (97%) rename src/{ => Notify}/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj (98%) rename src/{ => Notify}/StellaOps.Notify.Worker/TASKS.md (67%) rename src/{ => Notify}/StellaOps.Notify.Worker/appsettings.json (96%) create mode 100644 src/Notify/StellaOps.Notify.sln rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Email/AGENTS.md (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Email/EmailChannelHealthProvider.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Email/EmailChannelTestProvider.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Email/EmailMetadataBuilder.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Email/StellaOps.Notify.Connectors.Email.csproj (79%) create mode 100644 src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Email/notify-plugin.json (95%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Shared/ConnectorHashing.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Shared/ConnectorMetadataBuilder.cs (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Shared/ConnectorValueRedactor.cs (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Shared/StellaOps.Notify.Connectors.Shared.csproj (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Slack/AGENTS.md (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Slack/SlackChannelHealthProvider.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Slack/SlackChannelTestProvider.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Slack/SlackMetadataBuilder.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Slack/StellaOps.Notify.Connectors.Slack.csproj (79%) create mode 100644 src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Slack/notify-plugin.json (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Teams/AGENTS.md (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Teams/StellaOps.Notify.Connectors.Teams.csproj (79%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Teams/TASKS.md (79%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Teams/TeamsChannelHealthProvider.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Teams/TeamsChannelTestProvider.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Teams/TeamsMetadataBuilder.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Teams/notify-plugin.json (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Webhook/AGENTS.md (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Webhook/StellaOps.Notify.Connectors.Webhook.csproj (79%) create mode 100644 src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Webhook/WebhookChannelTestProvider.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Webhook/WebhookMetadataBuilder.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Connectors.Webhook/notify-plugin.json (95%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Engine/AGENTS.md (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Engine/ChannelHealthContracts.cs (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Engine/ChannelTestPreviewContracts.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Engine/INotifyRuleEvaluator.cs (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Engine/NotifyRuleEvaluationOutcome.cs (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Engine/TASKS.md (67%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/AGENTS.md (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/Iso8601DurationConverter.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifyCanonicalJsonSerializer.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifyChannel.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifyDelivery.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifyEnums.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifyEvent.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifyEventKinds.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifyRule.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifySchemaMigration.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifySchemaVersions.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifyTemplate.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/NotifyValidation.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj (100%) create mode 100644 src/Notify/__Libraries/StellaOps.Notify.Models/TASKS.md rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/AGENTS.md (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryLease.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryQueue.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/Nats/NatsNotifyEventLease.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/Nats/NatsNotifyEventQueue.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/NotifyDeliveryQueueHealthCheck.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/NotifyDeliveryQueueOptions.cs (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/NotifyEventQueueOptions.cs (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/NotifyQueueContracts.cs (96%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/NotifyQueueFields.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/NotifyQueueHealthCheck.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/NotifyQueueMetrics.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/NotifyQueueServiceCollectionExtensions.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/NotifyQueueTransportKind.cs (94%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/Properties/AssemblyInfo.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryLease.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryQueue.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/Redis/RedisNotifyEventLease.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/Redis/RedisNotifyEventQueue.cs (97%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj (98%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Queue/TASKS.md (67%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/AGENTS.md (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Documents/NotifyAuditEntryDocument.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Documents/NotifyDigestDocument.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Documents/NotifyLockDocument.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoContext.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoInitializer.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyCollectionsMigration.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyIndexesMigration.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Migrations/INotifyMongoMigration.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRecord.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRunner.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Options/NotifyMongoOptions.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Properties/AssemblyInfo.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/INotifyAuditRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/INotifyChannelRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDeliveryRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDigestRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/INotifyLockRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/INotifyRuleRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/INotifyTemplateRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/NotifyAuditRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/NotifyChannelRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryQueryResult.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDigestRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/NotifyLockRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/NotifyRuleRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Repositories/NotifyTemplateRepository.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Serialization/NotifyChannelDocumentMapper.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Serialization/NotifyDeliveryDocumentMapper.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Serialization/NotifyRuleDocumentMapper.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/Serialization/NotifyTemplateDocumentMapper.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/ServiceCollectionExtensions.cs (100%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj (98%) rename src/{ => Notify/__Libraries}/StellaOps.Notify.Storage.Mongo/TASKS.md (65%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Connectors.Email.Tests/EmailChannelHealthProviderTests.cs (97%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Connectors.Email.Tests/StellaOps.Notify.Connectors.Email.Tests.csproj (60%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelHealthProviderTests.cs (97%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelTestProviderTests.cs (97%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Connectors.Slack.Tests/StellaOps.Notify.Connectors.Slack.Tests.csproj (60%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Connectors.Teams.Tests/StellaOps.Notify.Connectors.Teams.Tests.csproj (60%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelHealthProviderTests.cs (97%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelTestProviderTests.cs (97%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Models.Tests/DocSampleTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Models.Tests/NotifyCanonicalJsonSerializerTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Models.Tests/NotifyDeliveryTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Models.Tests/NotifyRuleTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Models.Tests/NotifySchemaMigrationTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Models.Tests/PlatformEventSamplesTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Models.Tests/PlatformEventSchemaValidationTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj (81%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Queue.Tests/NatsNotifyDeliveryQueueTests.cs (97%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Queue.Tests/NatsNotifyEventQueueTests.cs (96%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Queue.Tests/RedisNotifyDeliveryQueueTests.cs (96%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Queue.Tests/RedisNotifyEventQueueTests.cs (96%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Queue.Tests/StellaOps.Notify.Queue.Tests.csproj (79%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/AssemblyInfo.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/GlobalUsings.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Internal/NotifyMongoMigrationTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyAuditRepositoryTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyChannelRepositoryTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDeliveryRepositoryTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDigestRepositoryTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyLockRepositoryTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyRuleRepositoryTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyTemplateRepositoryTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyChannelDocumentMapperTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyRuleDocumentMapperTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyTemplateDocumentMapperTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj (75%) rename src/{ => Notify/__Tests}/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.WebService.Tests/NormalizeEndpointsTests.cs (100%) rename src/{ => Notify/__Tests}/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj (59%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Worker.Tests/NotifyEventLeaseProcessorTests.cs (97%) rename src/{ => Notify/__Tests}/StellaOps.Notify.Worker.Tests/StellaOps.Notify.Worker.Tests.csproj (72%) rename src/{ => Orchestrator}/StellaOps.Orchestrator.WorkerSdk.Go/AGENTS.md (98%) rename src/{ => Orchestrator}/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md (99%) rename src/{ => Orchestrator}/StellaOps.Orchestrator.WorkerSdk.Python/AGENTS.md (98%) rename src/{ => Orchestrator}/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md (99%) create mode 100644 src/Orchestrator/StellaOps.Orchestrator.sln rename src/{ => Orchestrator}/StellaOps.Orchestrator/AGENTS.md (98%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/Class1.cs (91%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/StellaOps.Orchestrator.Core.csproj (95%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/Class1.cs (92%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/StellaOps.Orchestrator.Infrastructure.csproj (94%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/StellaOps.Orchestrator.Tests.csproj (91%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/UnitTest1.cs (92%) create mode 100644 src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/xunit.runner.json rename src/{StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService => Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService}/Program.cs (96%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Properties/launchSettings.json (96%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj (95%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.http (96%) rename src/{StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService => Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService}/appsettings.Development.json (93%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.json (94%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs (96%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Properties/launchSettings.json (95%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj (95%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Worker.cs (96%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.Development.json (94%) rename src/{StellaOps.ExportCenter/StellaOps.ExportCenter.Worker => Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker}/appsettings.json (94%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/StellaOps.Orchestrator.sln (98%) rename src/{ => Orchestrator}/StellaOps.Orchestrator/TASKS.md (99%) create mode 100644 src/PacksRegistry/StellaOps.PacksRegistry.sln rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/AGENTS.md (98%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/Class1.cs (92%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/StellaOps.PacksRegistry.Core.csproj (95%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/Class1.cs (92%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/StellaOps.PacksRegistry.Infrastructure.csproj (94%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/StellaOps.PacksRegistry.Tests.csproj (91%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/UnitTest1.cs (92%) create mode 100644 src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/xunit.runner.json rename src/{StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService => PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService}/Program.cs (96%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Properties/launchSettings.json (96%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj (95%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.http (96%) create mode 100644 src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.Development.json create mode 100644 src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.json rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs (96%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Properties/launchSettings.json (95%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj (95%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Worker.cs (96%) create mode 100644 src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.Development.json create mode 100644 src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.json rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/StellaOps.PacksRegistry.sln (98%) rename src/{ => PacksRegistry}/StellaOps.PacksRegistry/TASKS.md (99%) rename src/{ => Policy}/StellaOps.Policy.Engine/AGENTS.md (92%) rename src/{ => Policy}/StellaOps.Policy.Engine/Compilation/DslToken.cs (96%) rename src/{ => Policy}/StellaOps.Policy.Engine/Compilation/DslTokenizer.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Compilation/PolicyCompiler.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Compilation/PolicyDslDiagnosticCodes.cs (98%) rename src/{ => Policy}/StellaOps.Policy.Engine/Compilation/PolicyIr.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Compilation/PolicyIrSerializer.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Compilation/PolicyParser.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Compilation/PolicySyntaxNodes.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs (96%) rename src/{ => Policy}/StellaOps.Policy.Engine/Endpoints/PolicyCompilationEndpoints.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Endpoints/PolicyPackEndpoints.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Evaluation/PolicyEvaluationContext.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Evaluation/PolicyEvaluator.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Evaluation/PolicyExpressionEvaluator.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Hosting/PolicyEngineStartupDiagnostics.cs (95%) rename src/{ => Policy}/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs (96%) rename src/{ => Policy}/StellaOps.Policy.Engine/Program.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Properties/AssemblyInfo.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/README.md (98%) rename src/{ => Policy}/StellaOps.Policy.Engine/Services/IPolicyPackRepository.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Services/InMemoryPolicyPackRepository.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Engine/Services/PolicyEvaluationService.cs (96%) rename src/{ => Policy}/StellaOps.Policy.Engine/Services/ScopeAuthorization.cs (96%) create mode 100644 src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj rename src/{ => Policy}/StellaOps.Policy.Engine/TASKS.md (99%) rename src/{ => Policy}/StellaOps.Policy.Engine/Workers/PolicyEngineBootstrapWorker.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Clients/IPolicyEngineClient.cs (98%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Clients/PolicyEngineClient.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Clients/PolicyEngineResponse.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Clients/PolicyEngineResponseExtensions.cs (96%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Contracts/PolicyPackContracts.cs (96%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Infrastructure/GatewayForwardingContext.cs (96%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Options/PolicyGatewayOptions.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Program.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Properties/AssemblyInfo.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Services/PolicyEngineTokenProvider.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Services/PolicyGatewayAuthorization.cs (96%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopHandler.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopProofGenerator.cs (97%) rename src/{ => Policy}/StellaOps.Policy.Gateway/Services/PolicyGatewayMetrics.cs (97%) create mode 100644 src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj rename src/{ => Policy}/StellaOps.Policy.Registry/AGENTS.md (76%) rename src/{ => Policy}/StellaOps.Policy.Registry/TASKS.md (99%) rename src/{ => Policy}/StellaOps.Policy.RiskProfile/AGENTS.md (97%) rename src/{ => Policy}/StellaOps.Policy.RiskProfile/TASKS.md (99%) create mode 100644 src/Policy/StellaOps.Policy.sln rename src/{ => Policy/__Libraries}/StellaOps.Policy/AGENTS.md (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/Audit/IPolicyAuditRepository.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/Audit/InMemoryPolicyAuditRepository.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyAuditEntry.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyBinder.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyDiagnostics.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyDigest.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyDocument.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyEvaluation.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyFinding.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyIssue.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyPreviewModels.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyPreviewService.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicySchemaResource.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyScoringConfig.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyScoringConfigBinder.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyScoringConfigDigest.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyScoringSchema.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicySnapshot.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicySnapshotStore.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyUnknownConfidenceConfig.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyValidationCli.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/PolicyVerdict.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/Schemas/policy-schema@1.json (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/Schemas/policy-scoring-default.json (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/Schemas/policy-scoring-schema@1.json (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/StellaOps.Policy.csproj (97%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/Storage/IPolicySnapshotRepository.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/Storage/InMemoryPolicySnapshotRepository.cs (100%) rename src/{ => Policy/__Libraries}/StellaOps.Policy/TASKS.md (97%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Engine.Tests/PolicyCompilerTests.cs (97%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs (97%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Engine.Tests/PolicyPackRepositoryTests.cs (98%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Engine.Tests/StellaOps.Policy.Engine.Tests.csproj (68%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs (97%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Gateway.Tests/PolicyEngineClientTests.cs (97%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Gateway.Tests/PolicyGatewayDpopProofGeneratorTests.cs (97%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Gateway.Tests/StellaOps.Policy.Gateway.Tests.csproj (60%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Tests/PolicyBinderTests.cs (100%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Tests/PolicyEvaluationTests.cs (100%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs (100%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs (100%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Tests/PolicySnapshotStoreTests.cs (100%) rename src/{ => Policy/__Tests}/StellaOps.Policy.Tests/StellaOps.Policy.Tests.csproj (69%) rename src/{ => Provenance}/StellaOps.Provenance.Attestation/AGENTS.md (98%) rename src/{ => Provenance}/StellaOps.Provenance.Attestation/TASKS.md (99%) rename src/{ => Registry}/StellaOps.Registry.TokenService/Observability/RegistryTokenMetrics.cs (96%) rename src/{ => Registry}/StellaOps.Registry.TokenService/PlanRegistry.cs (96%) rename src/{ => Registry}/StellaOps.Registry.TokenService/Program.cs (97%) rename src/{ => Registry}/StellaOps.Registry.TokenService/Properties/launchSettings.json (96%) rename src/{ => Registry}/StellaOps.Registry.TokenService/RegistryAccessModels.cs (97%) rename src/{ => Registry}/StellaOps.Registry.TokenService/RegistryScopeParser.cs (96%) rename src/{ => Registry}/StellaOps.Registry.TokenService/RegistryTokenIssuer.cs (97%) rename src/{ => Registry}/StellaOps.Registry.TokenService/RegistryTokenServiceOptions.cs (96%) rename src/{ => Registry}/StellaOps.Registry.TokenService/Security/SigningKeyLoader.cs (96%) rename src/{ => Registry}/StellaOps.Registry.TokenService/StellaOps.Registry.TokenService.csproj (62%) create mode 100644 src/Registry/StellaOps.Registry.TokenService/appsettings.Development.json create mode 100644 src/Registry/StellaOps.Registry.TokenService/appsettings.json create mode 100644 src/Registry/StellaOps.Registry.sln rename src/{ => Registry/__Tests}/StellaOps.Registry.TokenService.Tests/PlanRegistryTests.cs (96%) rename src/{ => Registry/__Tests}/StellaOps.Registry.TokenService.Tests/RegistryScopeParserTests.cs (96%) rename src/{ => Registry/__Tests}/StellaOps.Registry.TokenService.Tests/RegistryTokenIssuerTests.cs (96%) create mode 100644 src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/StellaOps.Registry.TokenService.Tests.csproj rename src/{ => Registry/__Tests}/StellaOps.Registry.TokenService.Tests/UnitTest1.cs (92%) create mode 100644 src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/xunit.runner.json create mode 100644 src/RiskEngine/StellaOps.RiskEngine.sln rename src/{ => RiskEngine}/StellaOps.RiskEngine/AGENTS.md (98%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/Class1.cs (91%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/StellaOps.RiskEngine.Core.csproj (95%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/Class1.cs (92%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/StellaOps.RiskEngine.Infrastructure.csproj (94%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/StellaOps.RiskEngine.Tests.csproj (91%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/UnitTest1.cs (92%) create mode 100644 src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/xunit.runner.json create mode 100644 src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Program.cs rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Properties/launchSettings.json (96%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj (94%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.http (96%) create mode 100644 src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.Development.json create mode 100644 src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.json rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs (96%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Properties/launchSettings.json (95%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj (95%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Worker.cs (96%) create mode 100644 src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.Development.json create mode 100644 src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.json rename src/{ => RiskEngine}/StellaOps.RiskEngine/StellaOps.RiskEngine.sln (98%) rename src/{ => RiskEngine}/StellaOps.RiskEngine/TASKS.md (99%) create mode 100644 src/SbomService/StellaOps.SbomService.sln rename src/{ => SbomService}/StellaOps.SbomService/AGENTS.md (98%) rename src/{ => SbomService}/StellaOps.SbomService/Program.cs (96%) rename src/{ => SbomService}/StellaOps.SbomService/StellaOps.SbomService.csproj (58%) rename src/{ => SbomService}/StellaOps.SbomService/TASKS.md (99%) rename src/{ => Scanner}/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md (99%) rename src/{ => Scanner}/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md (99%) rename src/{ => Scanner}/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md (99%) rename src/{ => Scanner}/StellaOps.Scanner.Analyzers.Native/TASKS.md (99%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/AGENTS.md (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorClient.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorProvenanceRequest.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/BuildxPluginException.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/CasWriteResult.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasClient.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasOptions.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorArtifact.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorDocument.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGenerator.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGeneratorMetadata.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorProvenance.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorRequest.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorSubject.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginCas.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginEntryPoint.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginImage.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifest.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifestLoader.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/Program.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md (100%) rename src/{ => Scanner}/StellaOps.Scanner.Sbomer.BuildXPlugin/stellaops.sbom-indexer.manifest.json (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/AssemblyInfo.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Constants/ProblemTypes.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Contracts/OrchestratorEventContracts.cs (96%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Contracts/PolicyDiagnosticsContracts.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Contracts/PolicyPreviewContracts.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Contracts/RuntimeEventsContracts.cs (96%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Contracts/RuntimePolicyContracts.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Contracts/ScanStatusResponse.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Contracts/ScanSubmitRequest.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Contracts/ScanSubmitResponse.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Diagnostics/ServiceStatus.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Domain/ScanId.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Domain/ScanProgressEvent.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Domain/ScanSnapshot.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Domain/ScanStatus.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Domain/ScanSubmission.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Domain/ScanTarget.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Endpoints/HealthEndpoints.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Endpoints/ReportEndpoints.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Endpoints/RuntimeEndpoints.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Extensions/ConfigurationExtensions.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Extensions/OpenApiRegistrationExtensions.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Hosting/ScannerPluginHostFactory.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Infrastructure/ProblemResultFactory.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptions.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsPostConfigure.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsValidator.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Program.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Security/AnonymousAuthenticationHandler.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Security/ScannerAuthorityScopes.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Security/ScannerPolicies.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Serialization/OrchestratorEventSerializer.cs (96%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/IPlatformEventPublisher.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/IRedisConnectionFactory.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/IReportEventDispatcher.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/IScanCoordinator.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/InMemoryScanCoordinator.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/NullPlatformEventPublisher.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/PolicyDtoMapper.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/RedisConnectionFactory.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/RedisPlatformEventPublisher.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/ReportSigner.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/RuntimeEventIngestionService.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/RuntimeEventRateLimiter.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/RuntimePolicyService.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Services/ScanProgressStream.cs (100%) create mode 100644 src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj rename src/{ => Scanner}/StellaOps.Scanner.WebService/TASKS.md (97%) rename src/{ => Scanner}/StellaOps.Scanner.WebService/Utilities/ScanIdGenerator.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/AGENTS.md (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerInstrumentation.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerMetrics.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Diagnostics/TelemetryExtensions.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Hosting/ScannerWorkerHostedService.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Options/ScannerWorkerOptions.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Options/ScannerWorkerOptionsValidator.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/AnalyzerStageExecutor.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/CompositeScanAnalyzerDispatcher.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/IDelayScheduler.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/IEntryTraceExecutionService.cs (96%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/IScanAnalyzerDispatcher.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/IScanJobLease.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/IScanJobSource.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/IScanStageExecutor.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/LeaseHeartbeatService.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/NoOpStageExecutor.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/NullScanJobSource.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/PollDelayStrategy.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/ScanJobContext.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/ScanJobProcessor.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/ScanProgressReporter.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/ScanStageNames.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Processing/SystemDelayScheduler.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Program.cs (100%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/Properties/AssemblyInfo.cs (97%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj (52%) rename src/{ => Scanner}/StellaOps.Scanner.Worker/TASKS.md (100%) create mode 100644 src/Scanner/StellaOps.Scanner.sln rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetAnalyzerPlugin.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetLanguageAnalyzer.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/GlobalUsings.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/IDotNetAuthenticodeInspector.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDependencyCollector.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDepsFile.cs (98%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetFileCaches.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetRuntimeConfig.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md (99%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md (94%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/GlobalUsings.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/GoAnalyzerPlugin.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/GoLanguageAnalyzer.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoAnalyzerMetrics.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBinaryScanner.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfo.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoDecoder.cs (95%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoParser.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoProvider.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfMetadata.cs (95%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfReader.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoModule.cs (95%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoStrippedBinaryClassification.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/GlobalUsings.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassLocation.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathAnalysis.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathBuilder.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleDescriptor.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleInfoParser.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchive.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchiveEntry.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaPackagingKind.cs (93%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaReleaseFileParser.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaRuntimeImage.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspace.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspaceNormalizer.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaZipEntryUtilities.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalysis.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalyzer.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaServiceProviderScanner.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaSpiCatalog.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/java-spi-catalog.json (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/JavaLanguageAnalyzer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/Properties/AssemblyInfo.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md (99%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/GlobalUsings.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeAnalyzerMetrics.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLifecycleScript.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockData.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockEntry.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackage.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeWorkspaceIndex.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/NodeAnalyzerPlugin.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/NodeLanguageAnalyzer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md (99%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Python/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Python/GlobalUsings.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonDistributionLoader.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Python/PythonAnalyzerPlugin.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md (99%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/GlobalUsings.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustAnalyzerCollector.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustBinaryClassifier.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustCargoLockParser.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustFingerprintScanner.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/RustAnalyzerPlugin.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/RustLanguageAnalyzer.cs (99%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Core/ILanguageAnalyzer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Core/Internal/LanguageAnalyzerJson.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerContext.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerEngine.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerResult.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentEvidence.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentMapper.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentRecord.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Core/LanguageUsageHints.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/GlobalUsings.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Plugin/ILanguageAnalyzerPlugin.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/Plugin/LanguageAnalyzerPluginCatalog.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md (93%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj (67%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.Lang/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Apk/ApkAnalyzerPlugin.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Apk/ApkDatabaseParser.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Apk/ApkPackageAnalyzer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Apk/Properties/AssemblyInfo.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgAnalyzerPlugin.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgPackageAnalyzer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgStatusParser.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Dpkg/Properties/AssemblyInfo.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/IRpmDatabaseReader.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeader.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeaderParser.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmTags.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/Properties/AssemblyInfo.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/RpmAnalyzerPlugin.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/RpmDatabaseReader.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/RpmPackageAnalyzer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Abstractions/IOSPackageAnalyzer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Analyzers/OsPackageAnalyzerBase.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Helpers/CveHintExtractor.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Helpers/PackageVersionParser.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Mapping/OsComponentMapper.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Model/AnalyzerWarning.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Model/OSAnalyzerTelemetry.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerContext.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerResult.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Model/OSPackageFileEvidence.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Model/OSPackageRecord.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Plugin/IOSAnalyzerPlugin.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Plugin/OsAnalyzerPluginCatalog.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/Properties/AssemblyInfo.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj (78%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Analyzers.OS/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/Abstractions/IFileContentAddressableStore.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/Abstractions/ILayerCacheStore.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/Abstractions/LayerCacheEntry.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/Abstractions/LayerCachePutRequest.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/FileCas/FileContentAddressableStore.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/FileCas/NullFileContentAddressableStore.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/LayerCache/LayerCacheStore.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/Maintenance/ScannerCacheMaintenanceService.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/ScannerCacheMetrics.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/ScannerCacheOptions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/ScannerCacheServiceCollectionExtensions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj (98%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Cache/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ComponentGraph.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ComponentModels.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/SbomView.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ScanAnalysisStore.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ScanAnalysisStoreExtensions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ScanJob.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ScanJobIdJsonConverter.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ScanProgressEvent.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Contracts/ScannerError.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Observability/ScannerCorrelationContext.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Observability/ScannerDiagnostics.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Observability/ScannerLogExtensions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Observability/ScannerMetricNames.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Security/AuthorityTokenSource.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Security/IAuthorityTokenSource.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Security/IPluginCatalogGuard.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Security/RestartOnlyPluginGuard.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Security/ScannerOperationalToken.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Security/ServiceCollectionExtensions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Serialization/ScannerJsonOptions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj (65%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Utility/ScannerIdentifiers.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Core/Utility/ScannerTimestamps.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Diff/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Diff/ComponentDiffModels.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Diff/ComponentDiffer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Diff/DiffJsonSerializer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Diff/StellaOps.Scanner.Diff.csproj (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Diff/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/Composition/CycloneDxComposer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/Composition/SbomCompositionRequest.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/Composition/SbomCompositionResult.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/Composition/SbomPolicyFinding.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/Composition/ScanAnalysisCompositionBuilder.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/Index/BomIndexBuilder.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/Packaging/ScannerArtifactPackageBuilder.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Emit/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/Diagnostics/EntryTraceMetrics.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzerOptions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/EntryTraceContext.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/EntryTraceTypes.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/EntrypointSpecification.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/FileSystem/IRootFileSystem.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/IEntryTraceAnalyzer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/Parsing/ShellNodes.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/Parsing/ShellParser.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/Parsing/ShellToken.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/Parsing/ShellTokenizer.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/ServiceCollectionExtensions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj (84%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.EntryTrace/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/IScanQueue.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/IScanQueueLease.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/Nats/NatsScanQueue.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/Nats/NatsScanQueueLease.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/QueueEnvelopeFields.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/QueueMetrics.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/QueueTransportKind.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/Redis/RedisScanQueue.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/Redis/RedisScanQueueLease.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/ScanQueueContracts.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/ScannerQueueHealthCheck.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/ScannerQueueOptions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/ScannerQueueServiceCollectionExtensions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj (98%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Queue/TASKS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/AGENTS.md (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Catalog/ArtifactDocument.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Catalog/CatalogIdFactory.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Catalog/ImageDocument.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Catalog/JobDocument.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Catalog/LayerDocument.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Catalog/LifecycleRuleDocument.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Catalog/LinkDocument.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Catalog/RuntimeEventDocument.cs (96%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Migrations/EnsureLifecycleRuleTtlMigration.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Migrations/IMongoMigration.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Migrations/MongoMigrationDocument.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Migrations/MongoMigrationRunner.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Mongo/MongoBootstrapper.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Mongo/MongoCollectionProvider.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/ObjectStore/IArtifactObjectStore.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/ObjectStore/RustFsArtifactObjectStore.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/ObjectStore/S3ArtifactObjectStore.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Repositories/ArtifactRepository.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Repositories/ImageRepository.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Repositories/JobRepository.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Repositories/LayerRepository.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Repositories/LifecycleRuleRepository.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Repositories/LinkRepository.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Repositories/RuntimeEventRepository.cs (97%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/ScannerStorageDefaults.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/ScannerStorageOptions.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/Services/ArtifactStorageService.cs (100%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj (98%) rename src/{ => Scanner/__Libraries}/StellaOps.Scanner.Storage/TASKS.md (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/app (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/expected.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/app (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/expected.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/app (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/expected.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Go/GoLanguageAnalyzerTests.cs (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Go.Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj (80%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Fixtures/java/basic/expected.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaClassPathBuilderTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaLanguageAnalyzerTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaReflectionAnalyzerTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaServiceProviderScannerTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaWorkspaceNormalizerTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Java.Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj (80%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/expected.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package-lock.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package.json (94%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/package.json (94%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js (95%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json (92%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json (93%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Node/NodeLanguageAnalyzerTests.cs (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Node.Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj (80%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/expected.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/__init__.py (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/cli.py (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/core.py (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/LICENSE (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/direct_url.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/__init__.py (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/plugin.py (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/expected.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.data/scripts/cache-tool (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/INSTALLER (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/METADATA (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/RECORD (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/WHEEL (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/entry_points.txt (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/LICENSE (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/__init__.py (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/data/config.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/md5only.txt (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/expected.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/INSTALLER (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/METADATA (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/RECORD (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/WHEEL (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/direct_url.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/entry_points.txt (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__init__.py (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__main__.py (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/core.py (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Python/PythonLanguageAnalyzerTests.cs (99%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Python.Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj (79%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageAnalyzerResultTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageComponentMapperTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Determinism/LanguageAnalyzerHarnessTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/DotNet/DotNetLanguageAnalyzerTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/expected.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/input/placeholder.txt (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.deps.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.runtimeconfig.json (94%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.deps.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.runtimeconfig.json (94%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/expected.json (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/LICENSE.txt (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/stellaops.logging.nuspec (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/LICENSE.txt (96%) rename src/{StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple => Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi}/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.deps.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.runtimeconfig.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/expected.json (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.runtime.selfcontained/2.1.0/stellaops.runtime.selfcontained.nuspec (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/LICENSE.txt (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/runtimes/linux-x64/native/libstellaopsnative.so (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.deps.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.runtimeconfig.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/expected.json (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.deps.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.runtimeconfig.json (94%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/expected.json (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/LICENSE.txt (96%) rename src/{StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi => Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple}/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/Cargo.lock (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/expected.json (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/my_app-1234567890abcdef/bin-my_app-1234567890abcdef.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/serde-abcdef1234567890/libserde-abcdef1234567890.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Harness/LanguageAnalyzerTestHarness.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustLanguageAnalyzerTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj (72%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaClassFileFactory.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaFixtureBuilder.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/TestPaths.cs (100%) create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/xunit.runner.json rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/apk/lib/apk/db/installed (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.conffiles (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.list (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.md5sums (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/status (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/apk.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/dpkg.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/rpm.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/Mapping/OsComponentMapperTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/OsAnalyzerDeterminismTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj (62%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/FixtureManager.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/GoldenAssert.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/SnapshotSerializer.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Cache.Tests/LayerCacheRoundTripTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj (84%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Contracts/ComponentGraphBuilderTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Contracts/ComponentModelsTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Contracts/ScanJobTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Contracts/ScannerCoreContractsTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Fixtures/scan-job.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Fixtures/scan-progress-event.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Fixtures/scanner-error.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsPerformanceTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Security/AuthorityTokenSourceTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Security/DpopProofValidatorTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Security/RestartOnlyPluginGuardTests.cs (100%) create mode 100644 src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Utility/ScannerIdentifiersTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Core.Tests/Utility/ScannerTimestampsTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Diff.Tests/ComponentDifferTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj (59%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Emit.Tests/Composition/CycloneDxComposerTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Emit.Tests/Composition/ScanAnalysisCompositionBuilderTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Emit.Tests/Index/BomIndexBuilderTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Emit.Tests/Packaging/ScannerArtifactPackageBuilderTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj (59%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.EntryTrace.Tests/EntryTraceAnalyzerTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.EntryTrace.Tests/EntryTraceImageContextFactoryTests.cs (96%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.EntryTrace.Tests/ShellParserTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.EntryTrace.Tests/StellaOps.Scanner.EntryTrace.Tests.csproj (66%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.EntryTrace.Tests/TestRootFileSystem.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Queue.Tests/QueueLeaseIntegrationTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Queue.Tests/StellaOps.Scanner.Queue.Tests.csproj (69%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Attestation/AttestorClientTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Cas/LocalCasClientTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGeneratorTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGoldenTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Fixtures/descriptor.baseline.json (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Manifest/BuildxPluginManifestLoaderTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj (69%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/TestUtilities/TempDirectory.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Storage.Tests/InMemoryArtifactObjectStore.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Storage.Tests/RustFsArtifactObjectStoreTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Storage.Tests/ScannerMongoFixture.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Storage.Tests/StellaOps.Scanner.Storage.Tests.csproj (58%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Storage.Tests/StorageDualWriteFixture.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/AuthorizationTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/HealthEndpointsTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/PlatformEventPublisherRegistrationTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/PlatformEventSamplesTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/PolicyEndpointsTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/ReportEventDispatcherTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/ReportSamplesTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/ReportsEndpointsTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/RuntimeEndpointsTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/ScannerApplicationFactory.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj (81%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Worker.Tests/CompositeScanAnalyzerDispatcherTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs (97%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Worker.Tests/LeaseHeartbeatServiceTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Worker.Tests/ScannerWorkerOptionsValidatorTests.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj (53%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Worker.Tests/TestInfrastructure/StaticOptionsMonitor.cs (100%) rename src/{ => Scanner/__Tests}/StellaOps.Scanner.Worker.Tests/WorkerBasicScanScenarioTests.cs (100%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/AGENTS.md (100%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Auth/AnonymousAuthenticationHandler.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Auth/ClaimsTenantContextAccessor.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Auth/HeaderScopeAuthorizer.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Auth/HeaderTenantContextAccessor.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Auth/IScopeAuthorizer.cs (95%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Auth/ITenantContextAccessor.cs (95%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Auth/TokenScopeAuthorizer.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/EventWebhooks/EventWebhookEndpointExtensions.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/EventWebhooks/IInboundExportEventSink.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRateLimiter.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRequestAuthenticator.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/EventWebhooks/InMemoryWebhookRateLimiter.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/EventWebhooks/LoggingExportEventSink.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/EventWebhooks/WebhookPayloads.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/CartographerWebhookClient.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobCompletedEvent.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventFactory.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventKinds.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventPublisher.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/GraphBuildJobRequest.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionNotification.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionRequest.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/GraphJobEndpointExtensions.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/GraphJobQuery.cs (95%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/GraphJobResponse.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/GraphJobService.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/GraphOverlayJobRequest.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/ICartographerWebhookClient.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobCompletionPublisher.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobService.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobStore.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/InMemoryGraphJobStore.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/NullCartographerWebhookClient.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/NullGraphJobCompletionPublisher.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/GraphJobs/OverlayLagMetricsResponse.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Hosting/SchedulerPluginHostFactory.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/ISystemClock.cs (95%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Options/SchedulerAuthorityOptions.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Options/SchedulerCartographerOptions.cs (95%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Options/SchedulerEventsOptions.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Options/SchedulerOptions.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/PolicyRuns/IPolicyRunService.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/PolicyRuns/InMemoryPolicyRunService.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunEndpointExtensions.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunQueryOptions.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Program.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Properties/AssemblyInfo.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Runs/RunContracts.cs (98%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs (98%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs (97%) create mode 100644 src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/TASKS.md (98%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-103-RUN-APIS.md (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-104-WEBHOOKS.md (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/docs/SCHED-WEB-20-001-POLICY-RUNS.md (97%) rename src/{ => Scheduler}/StellaOps.Scheduler.WebService/docs/SCHED-WEB-21-001-GRAPH-APIS.md (96%) rename src/{ => Scheduler}/StellaOps.Scheduler.Worker.Host/Program.cs (100%) rename src/{ => Scheduler}/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj (100%) create mode 100644 src/Scheduler/StellaOps.Scheduler.sln rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/AGENTS.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/FixtureImpactIndex.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/IImpactIndex.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/ImpactImageRecord.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/ImpactIndexServiceCollectionExtensions.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/ImpactIndexStubOptions.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/Ingestion/BomIndexReader.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/Ingestion/ImpactIndexIngestionRequest.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/RoaringImpactIndex.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/StellaOps.Scheduler.ImpactIndex.csproj (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.ImpactIndex/TASKS.md (88%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/AGENTS.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/AssemblyInfo.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/AuditRecord.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/CanonicalJsonSerializer.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/EnumConverters.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/Enums.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/GraphBuildJob.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/GraphJobStateMachine.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/GraphOverlayJob.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/ImpactSet.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/PolicyRunJob.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/PolicyRunModels.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/Run.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/RunReasonExtensions.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/RunStateMachine.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/RunStatsBuilder.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/Schedule.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/SchedulerSchemaMigration.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/SchedulerSchemaMigrationResult.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/SchedulerSchemaVersions.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/Selector.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/TASKS.md (95%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/Validation.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/docs/SCHED-MODELS-16-103-DESIGN.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md (98%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/AGENTS.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/AssemblyInfo.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/ISchedulerQueueTransportDiagnostics.cs (95%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Nats/INatsSchedulerQueuePayload.cs (95%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Nats/NatsSchedulerPlannerQueue.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueBase.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueLease.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Nats/NatsSchedulerRunnerQueue.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/README.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Redis/IRedisSchedulerQueuePayload.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Redis/RedisSchedulerPlannerQueue.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueBase.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueLease.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/Redis/RedisSchedulerRunnerQueue.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/SchedulerQueueContracts.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/SchedulerQueueFields.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/SchedulerQueueHealthCheck.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/SchedulerQueueMetrics.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/SchedulerQueueOptions.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/SchedulerQueueServiceCollectionExtensions.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/SchedulerQueueTransportKind.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj (98%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Queue/TASKS.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/AGENTS.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Documents/RunSummaryDocument.cs (95%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoContext.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializer.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializerHostedService.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerCollectionsMigration.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerIndexesMigration.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Migrations/ISchedulerMongoMigration.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRecord.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRunner.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Options/SchedulerMongoOptions.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Projections/RunSummaryProjection.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Properties/AssemblyInfo.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/README.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditQueryOptions.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditRepository.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/GraphJobRepository.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/IAuditRepository.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/IGraphJobRepository.cs (98%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/IImpactSnapshotRepository.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/IPolicyRunJobRepository.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunRepository.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunSummaryRepository.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/IScheduleRepository.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/ImpactSnapshotRepository.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/PolicyRunJobRepository.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/RunQueryOptions.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/RunRepository.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/RunSummaryRepository.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleQueryOptions.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleRepository.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Serialization/AuditRecordDocumentMapper.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Serialization/GraphJobDocumentMapper.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Serialization/ImpactSetDocumentMapper.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Serialization/PolicyRunJobDocumentMapper.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Serialization/RunDocumentMapper.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Serialization/ScheduleDocumentMapper.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/ServiceCollectionExtensions.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Services/IRunSummaryService.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Services/ISchedulerAuditService.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Services/RunSummaryService.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditEvent.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditService.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Sessions/ISchedulerMongoSessionFactory.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionFactory.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionOptions.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj (98%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Storage.Mongo/TASKS.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/AGENTS.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Events/SchedulerEventPublisher.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Execution/HttpScannerReportClient.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Execution/RunnerBackgroundService.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Execution/RunnerExecutionService.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Execution/ScannerReportClient.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerBuildClient.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerOverlayClient.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerBuildClient.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerOverlayClient.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/GraphBuildBackgroundService.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/GraphBuildExecutionService.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/GraphOverlayBackgroundService.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/GraphOverlayExecutionService.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/Scheduler/HttpGraphJobCompletionClient.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Graph/Scheduler/IGraphJobCompletionClient.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/ImpactShard.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/ImpactShardPlanner.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/ImpactTargetingService.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Observability/SchedulerWorkerMetrics.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Options/SchedulerWorkerOptions.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Planning/PlannerBackgroundService.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Planning/PlannerExecutionResult.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Planning/PlannerExecutionService.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatchService.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatcherBackgroundService.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Policy/HttpPolicyRunClient.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Policy/IPolicyRunClient.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Policy/IPolicyRunTargetingService.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Policy/PolicyRunDispatchBackgroundService.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionResult.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionService.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Policy/PolicyRunSubmissionResult.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingResult.cs (96%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingService.cs (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/Properties/AssemblyInfo.cs (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/StellaOps.Scheduler.Worker.csproj (75%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/TASKS.md (98%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-201-PLANNER.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-202-IMPACT-TARGETING.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-203-RUNNER.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-204-EVENTS.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-205-OBSERVABILITY.md (100%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-301-POLICY-RUNS.md (98%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-302-POLICY-DELTA-TARGETING.md (98%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-201-GRAPH-BUILD.md (97%) rename src/{ => Scheduler/__Libraries}/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-202-GRAPH-OVERLAY.md (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.ImpactIndex.Tests/FixtureImpactIndexTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.ImpactIndex.Tests/RoaringImpactIndexTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.ImpactIndex.Tests/StellaOps.Scheduler.ImpactIndex.Tests.csproj (68%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/AuditRecordTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/GraphJobStateMachineTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/ImpactSetTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/PolicyRunModelsTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/RescanDeltaEventSampleTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/RunStateMachineTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/RunValidationTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/ScheduleSerializationTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/SchedulerSchemaMigrationTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj (64%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Queue.Tests/PlannerAndRunnerMessageTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Queue.Tests/RedisSchedulerQueueTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Queue.Tests/SchedulerQueueServiceCollectionExtensionsTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj (80%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/GlobalUsings.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/Integration/SchedulerMongoRoundTripTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/Migrations/SchedulerMongoMigrationTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/AuditRepositoryTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ImpactSnapshotRepositoryTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/RunRepositoryTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ScheduleRepositoryTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/SchedulerMongoTestHarness.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/Services/RunSummaryServiceTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/Services/SchedulerAuditServiceTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/Sessions/SchedulerMongoSessionFactoryTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj (71%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Storage.Mongo.Tests/TestDataFactory.cs (96%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/CartographerWebhookClientTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/EventWebhookEndpointTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/GlobalUsings.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/GraphJobEventPublisherTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/PolicyRunEndpointTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/RunEndpointTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/ScheduleEndpointTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/SchedulerPluginHostFactoryTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/SchedulerWebApplicationFactory.cs (98%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.WebService.Tests/StellaOps.Scheduler.WebService.Tests.csproj (81%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/GlobalUsings.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/GraphBuildExecutionServiceTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/GraphOverlayExecutionServiceTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/HttpScannerReportClientTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/ImpactShardPlannerTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/ImpactTargetingServiceTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/PlannerBackgroundServiceTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/PlannerExecutionServiceTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/PlannerQueueDispatchServiceTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/PolicyRunExecutionServiceTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/PolicyRunTargetingServiceTests.cs (97%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/RunnerExecutionServiceTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/SchedulerEventPublisherTests.cs (100%) rename src/{ => Scheduler/__Tests}/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj (57%) rename src/{ => Sdk}/StellaOps.Sdk.Generator/AGENTS.md (98%) rename src/{ => Sdk}/StellaOps.Sdk.Generator/TASKS.md (99%) rename src/{ => Sdk}/StellaOps.Sdk.Release/AGENTS.md (98%) rename src/{ => Sdk}/StellaOps.Sdk.Release/TASKS.md (99%) create mode 100644 src/Signals/StellaOps.Signals.sln rename src/{ => Signals}/StellaOps.Signals/AGENTS.md (89%) rename src/{ => Signals}/StellaOps.Signals/Authentication/AnonymousAuthenticationHandler.cs (97%) rename src/{ => Signals}/StellaOps.Signals/Authentication/HeaderScopeAuthorizer.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Authentication/TokenScopeAuthorizer.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Hosting/SignalsStartupState.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Models/CallgraphArtifactMetadata.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Models/CallgraphDocument.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Models/CallgraphEdge.cs (95%) rename src/{ => Signals}/StellaOps.Signals/Models/CallgraphIngestRequest.cs (97%) rename src/{ => Signals}/StellaOps.Signals/Models/CallgraphIngestResponse.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Models/CallgraphNode.cs (95%) rename src/{ => Signals}/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Options/SignalsAuthorityOptions.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Options/SignalsAuthorityOptionsConfigurator.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Options/SignalsMongoOptions.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Options/SignalsOptions.cs (95%) rename src/{ => Signals}/StellaOps.Signals/Parsing/CallgraphParseResult.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Parsing/CallgraphParserNotFoundException.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Parsing/CallgraphParserValidationException.cs (95%) rename src/{ => Signals}/StellaOps.Signals/Parsing/ICallgraphParser.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Parsing/ICallgraphParserResolver.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs (97%) rename src/{ => Signals}/StellaOps.Signals/Persistence/ICallgraphRepository.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Persistence/MongoCallgraphRepository.cs (97%) rename src/{ => Signals}/StellaOps.Signals/Program.cs (97%) rename src/{ => Signals}/StellaOps.Signals/Routing/SignalsPolicies.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Services/CallgraphIngestionService.cs (97%) rename src/{ => Signals}/StellaOps.Signals/Services/ICallgraphIngestionService.cs (96%) rename src/{ => Signals}/StellaOps.Signals/StellaOps.Signals.csproj (52%) rename src/{ => Signals}/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs (97%) rename src/{ => Signals}/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Storage/Models/CallgraphArtifactSaveRequest.cs (96%) rename src/{ => Signals}/StellaOps.Signals/Storage/Models/StoredCallgraphArtifact.cs (96%) rename src/{ => Signals}/StellaOps.Signals/TASKS.md (99%) create mode 100644 src/Signer/StellaOps.Signer.sln rename src/{ => Signer}/StellaOps.Signer/AGENTS.md (64%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Core/SignerAbstractions.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Core/SignerContracts.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Core/SignerExceptions.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Core/SignerPipeline.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Core/SignerStatementBuilder.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Core/StellaOps.Signer.Core.csproj (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/Auditing/InMemorySignerAuditSink.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerCryptoOptions.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerEntitlementOptions.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerReleaseVerificationOptions.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/ProofOfEntitlement/InMemoryProofOfEntitlementIntrospector.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/Quotas/InMemoryQuotaService.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/ReleaseVerification/DefaultReleaseIntegrityVerifier.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/Signing/HmacDsseSigner.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj (98%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Tests/SignerEndpointsTests.cs (97%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj (74%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.WebService/Contracts/SignDsseContracts.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.WebService/Endpoints/SignerEndpoints.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.WebService/Program.cs (100%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationDefaults.cs (96%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationHandler.cs (97%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj (60%) rename src/{ => Signer}/StellaOps.Signer/StellaOps.Signer.sln (100%) rename src/{ => Signer}/StellaOps.Signer/TASKS.md (94%) delete mode 100644 src/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj delete mode 100644 src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj delete mode 100644 src/StellaOps.Cartographer/StellaOps.Cartographer.csproj delete mode 100644 src/StellaOps.Cli.Tests/StellaOps.Cli.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.CertFr.Tests/StellaOps.Concelier.Connector.CertFr.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.CertIn.Tests/StellaOps.Concelier.Connector.CertIn.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Distro.Debian.Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Distro.Suse.Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Jvn.Tests/StellaOps.Concelier.Connector.Jvn.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Kev.Tests/StellaOps.Concelier.Connector.Kev.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Normalization.Tests/StellaOps.Concelier.Normalization.Tests.csproj delete mode 100644 src/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj delete mode 100644 src/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj delete mode 100644 src/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj delete mode 100644 src/StellaOps.Concelier.sln delete mode 100644 src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj delete mode 100644 src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj delete mode 100644 src/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj delete mode 100644 src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj delete mode 100644 src/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj delete mode 100644 src/StellaOps.Notifier/StellaOps.Notifier.Tests/xunit.runner.json delete mode 100644 src/StellaOps.Notify.Connectors.Email/TASKS.md delete mode 100644 src/StellaOps.Notify.Connectors.Slack/TASKS.md delete mode 100644 src/StellaOps.Notify.Connectors.Webhook/TASKS.md delete mode 100644 src/StellaOps.Notify.Models/TASKS.md delete mode 100644 src/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj delete mode 100644 src/StellaOps.Notify.WebService/TASKS.md delete mode 100644 src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/xunit.runner.json delete mode 100644 src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/xunit.runner.json delete mode 100644 src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.Development.json delete mode 100644 src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.json delete mode 100644 src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.Development.json delete mode 100644 src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.json delete mode 100644 src/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj delete mode 100644 src/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj delete mode 100644 src/StellaOps.Registry.TokenService.Tests/StellaOps.Registry.TokenService.Tests.csproj delete mode 100644 src/StellaOps.Registry.TokenService.Tests/xunit.runner.json delete mode 100644 src/StellaOps.Registry.TokenService/appsettings.Development.json delete mode 100644 src/StellaOps.Registry.TokenService/appsettings.json delete mode 100644 src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/xunit.runner.json delete mode 100644 src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Program.cs delete mode 100644 src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.Development.json delete mode 100644 src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.json delete mode 100644 src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.Development.json delete mode 100644 src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.json delete mode 100644 src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/left-pad/package.json delete mode 100644 src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/lib/package.json delete mode 100644 src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/shared/package.json delete mode 100644 src/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj delete mode 100644 src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj delete mode 100644 src/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj delete mode 100644 src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/xunit.runner.json delete mode 100644 src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs delete mode 100644 src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.Development.json delete mode 100644 src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.json delete mode 100644 src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.Development.json delete mode 100644 src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/xunit.runner.json delete mode 100644 src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs delete mode 100644 src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.Development.json delete mode 100644 src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json delete mode 100644 src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.Development.json delete mode 100644 src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.json delete mode 100644 src/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj create mode 100644 src/TaskRunner/StellaOps.TaskRunner.sln rename src/{ => TaskRunner}/StellaOps.TaskRunner/AGENTS.md (98%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunApprovalStore.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunJobDispatcher.cs (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunNotificationPublisher.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalCoordinator.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalState.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalStatus.cs (94%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunExecutionContext.cs (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessor.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessorResult.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Expressions/TaskPackExpressions.cs (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlan.cs (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanHasher.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanInsights.cs (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanner.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Serialization/CanonicalJson.cs (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/StellaOps.TaskRunner.Core.csproj (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifest.cs (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestLoader.cs (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestValidator.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilePackRunApprovalStore.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilesystemPackRunDispatcher.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/HttpPackRunNotificationPublisher.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/LoggingPackRunNotificationPublisher.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NoopPackRunJobDispatcher.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NotificationOptions.cs (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/StellaOps.TaskRunner.Infrastructure.csproj (95%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunApprovalCoordinatorTests.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunProcessorTests.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/StellaOps.TaskRunner.Tests.csproj (91%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TaskPackPlannerTests.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs (95%) create mode 100644 src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/xunit.runner.json create mode 100644 src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Properties/launchSettings.json (96%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj (94%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.http (96%) create mode 100644 src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.Development.json create mode 100644 src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.json rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Properties/launchSettings.json (95%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerOptions.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerService.cs (97%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj (95%) create mode 100644 src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.Development.json rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.json (95%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/StellaOps.TaskRunner.sln (98%) rename src/{ => TaskRunner}/StellaOps.TaskRunner/TASKS.md (99%) rename src/{ => Telemetry}/StellaOps.Telemetry.Core/AGENTS.md (98%) rename src/{ => Telemetry}/StellaOps.Telemetry.Core/TASKS.md (99%) create mode 100644 src/TimelineIndexer/StellaOps.TimelineIndexer.sln rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/AGENTS.md (98%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/Class1.cs (92%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/StellaOps.TimelineIndexer.Core.csproj (95%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/Class1.cs (93%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/StellaOps.TimelineIndexer.Infrastructure.csproj (94%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/StellaOps.TimelineIndexer.Tests.csproj (91%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/UnitTest1.cs (92%) create mode 100644 src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/xunit.runner.json create mode 100644 src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Properties/launchSettings.json (96%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj (95%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.http (96%) create mode 100644 src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.Development.json create mode 100644 src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs (96%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Properties/launchSettings.json (96%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj (95%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Worker.cs (96%) create mode 100644 src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.Development.json create mode 100644 src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.json rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.sln (98%) rename src/{ => TimelineIndexer}/StellaOps.TimelineIndexer/TASKS.md (99%) rename src/{ => UI}/StellaOps.UI/TASKS.md (99%) rename src/{ => VexLens}/StellaOps.VexLens/AGENTS.md (88%) rename src/{ => VexLens}/StellaOps.VexLens/TASKS.md (99%) rename src/{ => VulnExplorer}/StellaOps.VulnExplorer.Api/AGENTS.md (87%) rename src/{ => VulnExplorer}/StellaOps.VulnExplorer.Api/TASKS.md (99%) rename src/{ => Web}/StellaOps.Web/.editorconfig (100%) rename src/{ => Web}/StellaOps.Web/.gitignore (88%) rename src/{ => Web}/StellaOps.Web/AGENTS.md (93%) rename src/{ => Web}/StellaOps.Web/README.md (100%) rename src/{ => Web}/StellaOps.Web/TASKS.md (99%) rename src/{ => Web}/StellaOps.Web/angular.json (100%) rename src/{ => Web}/StellaOps.Web/docs/DeterministicInstall.md (96%) rename src/{ => Web}/StellaOps.Web/docs/TrivyDbSettings.md (87%) rename src/{ => Web}/StellaOps.Web/karma.conf.cjs (96%) rename src/{ => Web}/StellaOps.Web/package-lock.json (96%) rename src/{ => Web}/StellaOps.Web/package.json (100%) rename src/{ => Web}/StellaOps.Web/playwright.config.ts (96%) rename src/{ => Web}/StellaOps.Web/scripts/chrome-path.js (95%) rename src/{ => Web}/StellaOps.Web/scripts/verify-chromium.js (96%) rename src/{ => Web}/StellaOps.Web/src/app/app.component.html (97%) rename src/{ => Web}/StellaOps.Web/src/app/app.component.scss (95%) rename src/{ => Web}/StellaOps.Web/src/app/app.component.spec.ts (97%) rename src/{ => Web}/StellaOps.Web/src/app/app.component.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/app.config.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/app.routes.ts (95%) rename src/{ => Web}/StellaOps.Web/src/app/core/api/authority-console.client.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/api/concelier-exporter.client.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/api/notify.client.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/api/notify.models.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/api/policy-preview.models.ts (100%) rename src/{ => Web}/StellaOps.Web/src/app/core/api/scanner.models.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/auth-http.interceptor.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/auth-session.model.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/auth-session.store.spec.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/auth-session.store.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/auth-storage.service.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/authority-auth.service.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/dpop/dpop-key-store.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/dpop/dpop.service.spec.ts (97%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/dpop/dpop.service.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/dpop/jose-utilities.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/auth/pkce.util.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/config/app-config.model.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/config/app-config.service.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/console/console-session.service.spec.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/console/console-session.service.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/console/console-session.store.spec.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/console/console-session.store.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/orchestrator/operator-context.service.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/core/orchestrator/operator-metadata.interceptor.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/auth/auth-callback.component.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/console/console-profile.component.html (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/console/console-profile.component.scss (94%) rename src/{ => Web}/StellaOps.Web/src/app/features/console/console-profile.component.spec.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/console/console-profile.component.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/notify/notify-panel.component.html (97%) rename src/{ => Web}/StellaOps.Web/src/app/features/notify/notify-panel.component.scss (94%) rename src/{ => Web}/StellaOps.Web/src/app/features/notify/notify-panel.component.spec.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/notify/notify-panel.component.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.html (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.scss (94%) rename src/{ => Web}/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.spec.ts (97%) rename src/{ => Web}/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/scans/scan-detail-page.component.html (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/scans/scan-detail-page.component.scss (94%) rename src/{ => Web}/StellaOps.Web/src/app/features/scans/scan-detail-page.component.spec.ts (97%) rename src/{ => Web}/StellaOps.Web/src/app/features/scans/scan-detail-page.component.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.html (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.scss (94%) rename src/{ => Web}/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.spec.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/testing/mock-notify-api.service.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/testing/notify-fixtures.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/testing/policy-fixtures.spec.ts (97%) rename src/{ => Web}/StellaOps.Web/src/app/testing/policy-fixtures.ts (96%) rename src/{ => Web}/StellaOps.Web/src/app/testing/scan-fixtures.ts (97%) rename src/{ => Web}/StellaOps.Web/src/assets/.gitkeep (100%) rename src/{ => Web}/StellaOps.Web/src/config/config.json (97%) rename src/{ => Web}/StellaOps.Web/src/config/config.sample.json (97%) rename src/{ => Web}/StellaOps.Web/src/favicon.ico (100%) rename src/{ => Web}/StellaOps.Web/src/index.html (100%) rename src/{ => Web}/StellaOps.Web/src/main.ts (100%) rename src/{ => Web}/StellaOps.Web/src/styles.scss (100%) rename src/{ => Web}/StellaOps.Web/test-results/.last-run.json (93%) rename src/{ => Web}/StellaOps.Web/tests/e2e/auth.spec.ts (97%) rename src/{ => Web}/StellaOps.Web/tsconfig.app.json (100%) rename src/{ => Web}/StellaOps.Web/tsconfig.json (100%) rename src/{ => Web}/StellaOps.Web/tsconfig.spec.json (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Backend/IRuntimePolicyClient.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Backend/RuntimeEventsClient.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Backend/RuntimePolicyClient.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Backend/RuntimePolicyContracts.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Backend/RuntimePolicyException.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Configuration/ZastavaObserverOptions.cs (98%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTracker.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTrackerFactory.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriConversions.cs (98%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriModels.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClient.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClientFactory.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/DependencyInjection/ObserverServiceCollectionExtensions.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Posture/IRuntimePostureCache.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Posture/IRuntimePostureEvaluator.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Posture/RuntimePostureCache.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Posture/RuntimePostureCacheEntry.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluationResult.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluator.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Program.cs (98%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Properties/AssemblyInfo.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Protos/runtime/v1/runtime.proto (97%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Runtime/ElfBuildIdReader.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Runtime/RuntimeEventBuffer.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Runtime/RuntimeProcessCollector.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/StellaOps.Zastava.Observer.csproj (97%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/TASKS.md (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Worker/BackoffCalculator.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Worker/ContainerLifecycleHostedService.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Worker/ContainerRuntimePoller.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Worker/ObserverBootstrapService.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Worker/RuntimeEventDispatchService.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Observer/Worker/RuntimeEventFactory.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Admission/AdmissionEndpoint.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Admission/AdmissionRequestContext.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Admission/AdmissionResponseBuilder.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Admission/AdmissionReviewModels.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Admission/AdmissionReviewParser.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Admission/ImageDigestResolver.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Admission/RuntimeAdmissionPolicyService.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Admission/RuntimePolicyCache.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Backend/IRuntimePolicyClient.cs (96%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Backend/RuntimePolicyClient.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Backend/RuntimePolicyException.cs (96%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Backend/RuntimePolicyRequest.cs (96%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Backend/RuntimePolicyResponse.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Certificates/CsrCertificateSource.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Certificates/IWebhookCertificateProvider.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Certificates/SecretFileCertificateSource.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Certificates/WebhookCertificateHealthCheck.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Configuration/ZastavaWebhookOptions.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/DependencyInjection/ServiceCollectionExtensions.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/DependencyInjection/WebhookRuntimeOptionsPostConfigure.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Hosting/StartupValidationHostedService.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/IMPLEMENTATION_PLAN.md (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Program.cs (100%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/Properties/AssemblyInfo.cs (97%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj (81%) rename src/{ => Zastava}/StellaOps.Zastava.Webhook/TASKS.md (100%) create mode 100644 src/Zastava/StellaOps.Zastava.sln rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Configuration/ZastavaAuthorityOptions.cs (96%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Configuration/ZastavaRuntimeOptions.cs (96%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Contracts/AdmissionDecision.cs (100%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Contracts/RuntimeEvent.cs (100%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Contracts/ZastavaContractVersions.cs (100%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/DependencyInjection/ZastavaServiceCollectionExtensions.cs (97%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Diagnostics/ZastavaLogScopeBuilder.cs (96%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Diagnostics/ZastavaLoggerFactoryOptionsConfigurator.cs (97%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Diagnostics/ZastavaRuntimeMetrics.cs (97%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/GlobalUsings.cs (100%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Hashing/ZastavaHashing.cs (100%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Properties/AssemblyInfo.cs (97%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Security/IZastavaAuthorityTokenProvider.cs (96%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Security/ZastavaAuthorityTokenProvider.cs (97%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Security/ZastavaOperationalToken.cs (96%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/Serialization/ZastavaCanonicalJsonSerializer.cs (100%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj (75%) rename src/{ => Zastava/__Libraries}/StellaOps.Zastava.Core/TASKS.md (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Core.Tests/Contracts/ZastavaContractVersionsTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Core.Tests/DependencyInjection/ZastavaServiceCollectionExtensionsTests.cs (97%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Core.Tests/Security/ZastavaAuthorityTokenProviderTests.cs (97%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs (97%) create mode 100644 src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Observer.Tests/ContainerRuntimePollerTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Observer.Tests/Posture/RuntimePostureEvaluatorTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Observer.Tests/Runtime/ElfBuildIdReaderTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeEventBufferTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeProcessCollectorTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Observer.Tests/StellaOps.Zastava.Observer.Tests.csproj (66%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Observer.Tests/TestSupport/ElfTestFileBuilder.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Observer.Tests/Worker/RuntimeEventFactoryTests.cs (97%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionResponseBuilderTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionReviewParserTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Webhook.Tests/Admission/RuntimeAdmissionPolicyServiceTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Webhook.Tests/Backend/RuntimePolicyClientTests.cs (97%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Webhook.Tests/Certificates/SecretFileCertificateSourceTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Webhook.Tests/Certificates/WebhookCertificateProviderTests.cs (100%) rename src/{ => Zastava/__Tests}/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj (81%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/DpopNonceConsumeResult.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/DpopNonceIssueResult.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/DpopNonceUtilities.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/DpopProofValidator.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/DpopValidationOptions.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/DpopValidationResult.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/IDpopNonceStore.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/IDpopProofValidator.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/IDpopReplayCache.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/InMemoryDpopNonceStore.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/InMemoryDpopReplayCache.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/Dpop/RedisDpopNonceStore.cs (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/README.md (100%) rename src/{ => __Libraries}/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj (97%) rename src/{ => __Libraries}/StellaOps.Configuration/AuthorityConfigurationDiagnostic.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/AuthorityPluginConfigurationAnalyzer.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/AuthorityPluginConfigurationLoader.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/AuthoritySigningAdditionalKeyOptions.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/AuthoritySigningOptions.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/StellaOps.Configuration.csproj (80%) rename src/{ => __Libraries}/StellaOps.Configuration/StellaOpsAuthorityConfiguration.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/StellaOpsAuthorityOptions.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/StellaOpsBootstrapOptions.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/StellaOpsConfigurationBootstrapper.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/StellaOpsConfigurationContext.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/StellaOpsConfigurationOptions.cs (100%) rename src/{ => __Libraries}/StellaOps.Configuration/StellaOpsOptionsBinder.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryOptions.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj (97%) rename src/{ => __Libraries}/StellaOps.Cryptography.Kms/AGENTS.md (97%) rename src/{ => __Libraries}/StellaOps.Cryptography.Kms/TASKS.md (99%) rename src/{ => __Libraries}/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleCryptoServiceCollectionExtensions.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj (97%) rename src/{ => __Libraries}/StellaOps.Cryptography/AGENTS.md (83%) rename src/{ => __Libraries}/StellaOps.Cryptography/Argon2idPasswordHasher.Konscious.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/Argon2idPasswordHasher.Sodium.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/Argon2idPasswordHasher.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/Audit/AuthEventRecord.cs (96%) rename src/{ => __Libraries}/StellaOps.Cryptography/CryptoProvider.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/CryptoProviderRegistry.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/CryptoSigningKey.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/DefaultCryptoProvider.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/EcdsaSigner.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/ICryptoSigner.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/LibsodiumCryptoProvider.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/PasswordHashAlgorithms.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/PasswordHashing.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/Pbkdf2PasswordHasher.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/SignatureAlgorithms.cs (100%) rename src/{ => __Libraries}/StellaOps.Cryptography/StellaOps.Cryptography.csproj (97%) rename src/{ => __Libraries}/StellaOps.Cryptography/TASKS.md (100%) rename src/{ => __Libraries}/StellaOps.DependencyInjection/IDependencyInjectionRoutine.cs (100%) rename src/{ => __Libraries}/StellaOps.DependencyInjection/ServiceBindingAttribute.cs (100%) rename src/{ => __Libraries}/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj (97%) rename src/{ => __Libraries}/StellaOps.Plugin/DependencyInjection/PluginDependencyInjectionExtensions.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/DependencyInjection/PluginServiceRegistration.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/DependencyInjection/StellaOpsPluginRegistration.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/Hosting/PluginAssembly.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/Hosting/PluginHost.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/Hosting/PluginHostOptions.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/Hosting/PluginHostResult.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/Hosting/PluginLoadContext.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/Internal/ReflectionExtensions.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/PluginContracts.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/Properties/AssemblyInfo.cs (100%) rename src/{ => __Libraries}/StellaOps.Plugin/StellaOps.Plugin.csproj (80%) rename src/{ => __Libraries}/StellaOps.Plugin/TASKS.md (87%) rename src/{ => __Libraries/__Tests}/StellaOps.Configuration.Tests/AuthorityPluginConfigurationLoaderTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Configuration.Tests/AuthorityTelemetryTests.cs (100%) create mode 100644 src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj rename src/{ => __Libraries/__Tests}/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs (97%) rename src/{ => __Libraries/__Tests}/StellaOps.Cryptography.Tests/Argon2idPasswordHasherTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Cryptography.Tests/Audit/AuthEventRecordTests.cs (96%) rename src/{ => __Libraries/__Tests}/StellaOps.Cryptography.Tests/BouncyCastleEd25519CryptoProviderTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Cryptography.Tests/CryptoProviderRegistryTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Cryptography.Tests/DefaultCryptoProviderSigningTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Cryptography.Tests/LibsodiumCryptoProviderTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Cryptography.Tests/PasswordHashOptionsTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Cryptography.Tests/Pbkdf2PasswordHasherTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj (50%) rename src/{ => __Libraries/__Tests}/StellaOps.Plugin.Tests/DependencyInjection/PluginDependencyInjectionExtensionsTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Plugin.Tests/DependencyInjection/PluginServiceRegistrationTests.cs (100%) rename src/{ => __Libraries/__Tests}/StellaOps.Plugin.Tests/StellaOps.Plugin.Tests.csproj (78%) rename src/{ => __Libraries/__Tests}/StellaOps.Signals.Tests/CallgraphIngestionTests.cs (97%) rename src/{ => __Libraries/__Tests}/StellaOps.Signals.Tests/SignalsApiTests.cs (96%) rename src/{ => __Libraries/__Tests}/StellaOps.Signals.Tests/StellaOps.Signals.Tests.csproj (83%) rename src/{ => __Libraries/__Tests}/StellaOps.Signals.Tests/TestInfrastructure/SignalsTestFactory.cs (96%) diff --git a/.gitattributes b/.gitattributes index f7bffe5c..491baff4 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,2 +1,2 @@ # Ensure analyzer fixture assets keep LF endings for deterministic hashes -src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf +src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/** text eol=lf diff --git a/.gitea/workflows/_deprecated-concelier-ci.yml.disabled b/.gitea/workflows/_deprecated-concelier-ci.yml.disabled index 781a3c85..faf0f864 100644 --- a/.gitea/workflows/_deprecated-concelier-ci.yml.disabled +++ b/.gitea/workflows/_deprecated-concelier-ci.yml.disabled @@ -19,11 +19,11 @@ jobs: dotnet-version: 10.0.100-rc.1.25451.107 include-prerelease: true - - name: Restore dependencies - run: dotnet restore src/StellaOps.Feedser/StellaOps.Feedser.sln - - - name: Build - run: dotnet build src/StellaOps.Feedser/StellaOps.Feedser.sln --configuration Release --no-restore -warnaserror - - - name: Test - run: dotnet test src/StellaOps.Feedser/StellaOps.Feedser.Tests/StellaOps.Feedser.Tests.csproj --configuration Release --no-restore --logger "trx;LogFileName=feedser-tests.trx" + - name: Restore dependencies + run: dotnet restore src/Concelier/StellaOps.Concelier.sln + + - name: Build + run: dotnet build src/Concelier/StellaOps.Concelier.sln --configuration Release --no-restore -warnaserror + + - name: Test + run: dotnet test src/Concelier/StellaOps.Concelier.sln --configuration Release --no-restore --logger "trx;LogFileName=concelier-tests.trx" diff --git a/.gitea/workflows/build-test-deploy.yml b/.gitea/workflows/build-test-deploy.yml index 61825e84..790cd7bd 100644 --- a/.gitea/workflows/build-test-deploy.yml +++ b/.gitea/workflows/build-test-deploy.yml @@ -77,15 +77,15 @@ jobs: include-prerelease: true - name: Restore Concelier solution - run: dotnet restore src/StellaOps.Concelier.sln + run: dotnet restore src/Concelier/StellaOps.Concelier.sln - name: Build Concelier solution (warnings as errors) - run: dotnet build src/StellaOps.Concelier.sln --configuration $BUILD_CONFIGURATION --no-restore -warnaserror + run: dotnet build src/Concelier/StellaOps.Concelier.sln --configuration $BUILD_CONFIGURATION --no-restore -warnaserror - name: Run Concelier unit and integration tests run: | mkdir -p "$TEST_RESULTS_DIR" - dotnet test src/StellaOps.Concelier.sln \ + dotnet test src/Concelier/StellaOps.Concelier.sln \ --configuration $BUILD_CONFIGURATION \ --no-build \ --logger "trx;LogFileName=stellaops-concelier-tests.trx" \ @@ -202,20 +202,20 @@ PY run: | dotnet restore src/StellaOps.sln for project in \ - src/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj \ - src/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj \ - src/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj \ - src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj \ - src/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj \ - src/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj \ - src/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj + src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj \ + src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj \ + src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj \ + src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj \ + src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj \ + src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj \ + src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj do dotnet build "$project" --configuration $BUILD_CONFIGURATION --no-restore -warnaserror done - name: Run scanner language analyzer tests run: | - dotnet test src/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj \ + dotnet test src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj \ --configuration $BUILD_CONFIGURATION \ --no-build \ --logger "trx;LogFileName=stellaops-scanner-lang-tests.trx" \ @@ -231,11 +231,11 @@ PY CAPTURED_AT="$(date -u +"%Y-%m-%dT%H:%M:%SZ")" dotnet run \ - --project src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj \ + --project src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj \ --configuration $BUILD_CONFIGURATION \ -- \ --repo-root . \ - --baseline src/StellaOps.Bench/Scanner.Analyzers/baseline.csv \ + --baseline src/Bench/StellaOps.Bench/Scanner.Analyzers/baseline.csv \ --out "$PERF_OUTPUT_DIR/latest.csv" \ --json "$PERF_OUTPUT_DIR/report.json" \ --prom "$PERF_OUTPUT_DIR/metrics.prom" \ @@ -253,7 +253,7 @@ PY - name: Publish BuildX SBOM generator run: | - dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \ + dotnet publish src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \ --configuration $BUILD_CONFIGURATION \ --output out/buildx @@ -337,10 +337,10 @@ PY if-no-files-found: error retention-days: 7 - - name: Publish Feedser web service + - name: Publish Concelier web service run: | mkdir -p "$PUBLISH_DIR" - dotnet publish src/StellaOps.Feedser.WebService/StellaOps.Feedser.WebService.csproj \ + dotnet publish src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj \ --configuration $BUILD_CONFIGURATION \ --no-build \ --output "$PUBLISH_DIR" @@ -348,20 +348,20 @@ PY - name: Upload published artifacts uses: actions/upload-artifact@v4 with: - name: feedser-publish + name: concelier-publish path: ${{ env.PUBLISH_DIR }} if-no-files-found: error retention-days: 7 - name: Restore Authority solution - run: dotnet restore src/StellaOps.Authority/StellaOps.Authority.sln + run: dotnet restore src/Authority/StellaOps.Authority/StellaOps.Authority.sln - name: Build Authority solution - run: dotnet build src/StellaOps.Authority/StellaOps.Authority.sln --configuration $BUILD_CONFIGURATION --no-restore -warnaserror + run: dotnet build src/Authority/StellaOps.Authority/StellaOps.Authority.sln --configuration $BUILD_CONFIGURATION --no-restore -warnaserror - name: Run Authority tests run: | - dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj \ + dotnet test src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj \ --configuration $BUILD_CONFIGURATION \ --no-build \ --logger "trx;LogFileName=stellaops-authority-tests.trx" \ @@ -370,7 +370,7 @@ PY - name: Publish Authority web service run: | mkdir -p "$AUTHORITY_PUBLISH_DIR" - dotnet publish src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj \ + dotnet publish src/Authority/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj \ --configuration $BUILD_CONFIGURATION \ --no-build \ --output "$AUTHORITY_PUBLISH_DIR" @@ -439,7 +439,7 @@ PY runs-on: ubuntu-22.04 needs: build-test env: - BENCH_DIR: src/StellaOps.Bench/Scanner.Analyzers + BENCH_DIR: src/Bench/StellaOps.Bench/Scanner.Analyzers steps: - name: Checkout repository uses: actions/checkout@v4 diff --git a/.gitea/workflows/docs.yml b/.gitea/workflows/docs.yml index 612a0668..c21742b3 100755 --- a/.gitea/workflows/docs.yml +++ b/.gitea/workflows/docs.yml @@ -1,39 +1,39 @@ -# .gitea/workflows/docs.yml -# Documentation quality checks and preview artefacts - -name: Docs CI - -on: - push: - paths: - - 'docs/**' - - 'scripts/render_docs.py' - - '.gitea/workflows/docs.yml' - pull_request: - paths: - - 'docs/**' - - 'scripts/render_docs.py' - - '.gitea/workflows/docs.yml' - workflow_dispatch: {} - -env: - NODE_VERSION: '20' - PYTHON_VERSION: '3.11' - -jobs: - lint-and-preview: - runs-on: ubuntu-22.04 - env: - DOCS_OUTPUT_DIR: ${{ github.workspace }}/artifacts/docs-preview - steps: - - name: Checkout repository - uses: actions/checkout@v4 - - - name: Setup Node.js - uses: actions/setup-node@v4 - with: - node-version: ${{ env.NODE_VERSION }} - +# .gitea/workflows/docs.yml +# Documentation quality checks and preview artefacts + +name: Docs CI + +on: + push: + paths: + - 'docs/**' + - 'scripts/render_docs.py' + - '.gitea/workflows/docs.yml' + pull_request: + paths: + - 'docs/**' + - 'scripts/render_docs.py' + - '.gitea/workflows/docs.yml' + workflow_dispatch: {} + +env: + NODE_VERSION: '20' + PYTHON_VERSION: '3.11' + +jobs: + lint-and-preview: + runs-on: ubuntu-22.04 + env: + DOCS_OUTPUT_DIR: ${{ github.workspace }}/artifacts/docs-preview + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Setup Node.js + uses: actions/setup-node@v4 + with: + node-version: ${{ env.NODE_VERSION }} + - name: Install documentation toolchain run: | npm install --no-save markdown-link-check remark-cli remark-preset-lint-recommended ajv ajv-cli ajv-formats @@ -43,11 +43,11 @@ jobs: with: dotnet-version: '10.0.100-rc.2.25502.107' - - name: Link check - run: | - find docs -name '*.md' -print0 | \ - xargs -0 -n1 -I{} npx markdown-link-check --quiet '{}' - + - name: Link check + run: | + find docs -name '*.md' -print0 | \ + xargs -0 -n1 -I{} npx markdown-link-check --quiet '{}' + - name: Remark lint run: | npx remark docs -qf @@ -70,26 +70,26 @@ jobs: - name: Run Notify schema validation tests run: | - dotnet test src/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj --configuration Release --nologo + dotnet test src/Notify/__Tests/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj --configuration Release --nologo - name: Setup Python uses: actions/setup-python@v5 - with: - python-version: ${{ env.PYTHON_VERSION }} - - - name: Install documentation dependencies - run: | - python -m pip install --upgrade pip - python -m pip install markdown pygments - - - name: Render documentation preview bundle - run: | - python scripts/render_docs.py --source docs --output "$DOCS_OUTPUT_DIR" --clean - - - name: Upload documentation preview - if: always() - uses: actions/upload-artifact@v4 - with: - name: feedser-docs-preview - path: ${{ env.DOCS_OUTPUT_DIR }} - retention-days: 7 + with: + python-version: ${{ env.PYTHON_VERSION }} + + - name: Install documentation dependencies + run: | + python -m pip install --upgrade pip + python -m pip install markdown pygments + + - name: Render documentation preview bundle + run: | + python scripts/render_docs.py --source docs --output "$DOCS_OUTPUT_DIR" --clean + + - name: Upload documentation preview + if: always() + uses: actions/upload-artifact@v4 + with: + name: feedser-docs-preview + path: ${{ env.DOCS_OUTPUT_DIR }} + retention-days: 7 diff --git a/.gitea/workflows/release.yml b/.gitea/workflows/release.yml index 0ef66cf7..3b9c3ef3 100644 --- a/.gitea/workflows/release.yml +++ b/.gitea/workflows/release.yml @@ -69,7 +69,7 @@ jobs: - name: Publish Python analyzer plug-in run: | set -euo pipefail - dotnet publish src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj \ + dotnet publish src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj \ --configuration Release \ --output out/analyzers/python \ --no-self-contained diff --git a/.gitignore b/.gitignore index e4166469..1ee1dd1e 100644 --- a/.gitignore +++ b/.gitignore @@ -1,34 +1,34 @@ -# Build outputs -bin/ -obj/ -*.pdb -*.dll - -# IDE state -.vs/ -*.user -*.suo -*.userprefs - -# Rider/VSCode -.idea/ -.vscode/ - -# Packages and logs -*.log -TestResults/ - -.dotnet -.DS_Store -seed-data/ics-cisa/*.csv -seed-data/ics-cisa/*.xlsx -seed-data/ics-cisa/*.sha256 -seed-data/cert-bund/**/*.json -seed-data/cert-bund/**/*.sha256 - -out/offline-kit/web/**/* -src/StellaOps.Web/node_modules/**/* -src/StellaOps.Web/.angular/**/* -**/node_modules/**/* -node_modules -tmp/**/* +# Build outputs +bin/ +obj/ +*.pdb +*.dll + +# IDE state +.vs/ +*.user +*.suo +*.userprefs + +# Rider/VSCode +.idea/ +.vscode/ + +# Packages and logs +*.log +TestResults/ + +.dotnet +.DS_Store +seed-data/ics-cisa/*.csv +seed-data/ics-cisa/*.xlsx +seed-data/ics-cisa/*.sha256 +seed-data/cert-bund/**/*.json +seed-data/cert-bund/**/*.sha256 + +out/offline-kit/web/**/* +src/Web/StellaOps.Web/node_modules/**/* +src/Web/StellaOps.Web/.angular/**/* +**/node_modules/**/* +node_modules +tmp/**/* diff --git a/.venv/pyvenv.cfg b/.venv/pyvenv.cfg index ecf82ea0..ef350ee6 100644 --- a/.venv/pyvenv.cfg +++ b/.venv/pyvenv.cfg @@ -1,5 +1,5 @@ -home = /usr/bin -include-system-site-packages = false -version = 3.12.3 -executable = /usr/bin/python3.12 -command = /usr/bin/python3 -m venv /mnt/e/dev/git.stella-ops.org/.venv +home = /usr/bin +include-system-site-packages = false +version = 3.12.3 +executable = /usr/bin/python3.12 +command = /usr/bin/python3 -m venv /mnt/e/dev/git.stella-ops.org/.venv diff --git a/Directory.Build.props b/Directory.Build.props index c2475e91..07bb742d 100644 --- a/Directory.Build.props +++ b/Directory.Build.props @@ -1,12 +1,12 @@ - - - $([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)')) - $([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot)local-nuget/')) - https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json - https://api.nuget.org/v3/index.json - <_StellaOpsDefaultRestoreSources>$(StellaOpsLocalNuGetSource);$(StellaOpsDotNetPublicSource);$(StellaOpsNuGetOrgSource) - <_StellaOpsOriginalRestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(RestoreSources) - $(_StellaOpsDefaultRestoreSources) - $(_StellaOpsDefaultRestoreSources);$(_StellaOpsOriginalRestoreSources) - - + + + $([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)')) + $([System.IO.Path]::GetFullPath('$(StellaOpsRepoRoot)local-nuget/')) + https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/index.json + https://api.nuget.org/v3/index.json + <_StellaOpsDefaultRestoreSources>$(StellaOpsLocalNuGetSource);$(StellaOpsDotNetPublicSource);$(StellaOpsNuGetOrgSource) + <_StellaOpsOriginalRestoreSources Condition="'$(_StellaOpsOriginalRestoreSources)' == ''">$(RestoreSources) + $(_StellaOpsDefaultRestoreSources) + $(_StellaOpsDefaultRestoreSources);$(_StellaOpsOriginalRestoreSources) + + diff --git a/Mongo2Go-4.1.0/src/Mongo2Go/Mongo2Go.csproj b/Mongo2Go-4.1.0/src/Mongo2Go/Mongo2Go.csproj index 029cfb83..4d2555eb 100644 --- a/Mongo2Go-4.1.0/src/Mongo2Go/Mongo2Go.csproj +++ b/Mongo2Go-4.1.0/src/Mongo2Go/Mongo2Go.csproj @@ -1,93 +1,93 @@ - - - - net472;netstandard2.1 - Johannes Hoppe and many contributors - Mongo2Go is a managed wrapper around MongoDB binaries. It targets .NET Framework 4.7.2 and .NET Standard 2.1. -This Nuget package contains the executables of mongod, mongoimport and mongoexport v4.4.4 for Windows, Linux and macOS. - - -Mongo2Go has two use cases: - -1. Providing multiple, temporary and isolated MongoDB databases for integration tests -2. Providing a quick to set up MongoDB database for a local developer environment - HAUS HOPPE - ITS - Copyright © 2012-2025 Johannes Hoppe and many ❤️ contributors - true - icon.png - MIT - https://github.com/Mongo2Go/Mongo2Go - https://github.com/Mongo2Go/Mongo2Go/releases - MongoDB Mongo unit test integration runner - https://github.com/Mongo2Go/Mongo2Go - git - Mongo2Go - Mongo2Go is a managed wrapper around MongoDB binaries. - - - - 4 - 1701;1702;1591;1573 - - - - 4 - 1701;1702;1591;1573 - - - - 1701;1702;1591;1573 - - - - 1701;1702;1591;1573 - - - - true - true - true - - - - embedded - true - true - - - - v - - - - - - true - icon.png - - - true - tools - - - - - - - - - - - - - - - - - - - - - - - + + + + net472;netstandard2.1 + Johannes Hoppe and many contributors + Mongo2Go is a managed wrapper around MongoDB binaries. It targets .NET Framework 4.7.2 and .NET Standard 2.1. +This Nuget package contains the executables of mongod, mongoimport and mongoexport v4.4.4 for Windows, Linux and macOS. + + +Mongo2Go has two use cases: + +1. Providing multiple, temporary and isolated MongoDB databases for integration tests +2. Providing a quick to set up MongoDB database for a local developer environment + HAUS HOPPE - ITS + Copyright © 2012-2025 Johannes Hoppe and many ❤️ contributors + true + icon.png + MIT + https://github.com/Mongo2Go/Mongo2Go + https://github.com/Mongo2Go/Mongo2Go/releases + MongoDB Mongo unit test integration runner + https://github.com/Mongo2Go/Mongo2Go + git + Mongo2Go + Mongo2Go is a managed wrapper around MongoDB binaries. + + + + 4 + 1701;1702;1591;1573 + + + + 4 + 1701;1702;1591;1573 + + + + 1701;1702;1591;1573 + + + + 1701;1702;1591;1573 + + + + true + true + true + + + + embedded + true + true + + + + v + + + + + + true + icon.png + + + true + tools + + + + + + + + + + + + + + + + + + + + + + + diff --git a/Mongo2Go-4.1.0/src/Mongo2GoTests/Mongo2GoTests.csproj b/Mongo2Go-4.1.0/src/Mongo2GoTests/Mongo2GoTests.csproj index 34f3034a..7c596c21 100644 --- a/Mongo2Go-4.1.0/src/Mongo2GoTests/Mongo2GoTests.csproj +++ b/Mongo2Go-4.1.0/src/Mongo2GoTests/Mongo2GoTests.csproj @@ -1,21 +1,21 @@ - - - net8.0 - false - - - - - - - - - - - - - - - - + + + net8.0 + false + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/NuGet.config b/NuGet.config index 225ab2b3..359a8450 100644 --- a/NuGet.config +++ b/NuGet.config @@ -1,44 +1,44 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/README.md b/README.md index 2a6184b8..02c13d74 100755 --- a/README.md +++ b/README.md @@ -14,9 +14,9 @@ control against the Concelier API. 3. Copy `etc/authority.yaml.sample` to `etc/authority.yaml`, review the issuer, token lifetimes, and plug-in descriptors, then edit the companion manifests under `etc/authority.plugins/*.yaml` to match your deployment. -4. Start the web service with `dotnet run --project src/StellaOps.Concelier.WebService`. +4. Start the web service with `dotnet run --project src/Concelier/StellaOps.Concelier.WebService`. 5. Configure the CLI via environment variables (e.g. `STELLAOPS_BACKEND_URL`) and trigger - jobs with `dotnet run --project src/StellaOps.Cli -- db merge`. + jobs with `dotnet run --project src/Cli/StellaOps.Cli -- db merge`. Detailed operator guidance is available in `docs/10_CONCELIER_CLI_QUICKSTART.md`. API and command reference material lives in `docs/09_API_CLI_REFERENCE.md`. @@ -31,4 +31,4 @@ for integration steps once available. - `docs/README.md` now consolidates the platform index and points to the updated high-level architecture. - Module architecture dossiers live under `docs/ARCHITECTURE_*.md`; the most relevant here are `docs/ARCHITECTURE_CONCELIER.md` (service layout, merge engine, exports) and `docs/ARCHITECTURE_CLI.md` (command surface, AOT packaging, auth flows). Related services such as the Signer, Attestor, Authority, Scanner, UI, Excititor, Zastava, and DevOps pipeline each have their own dossier. - Offline operation guidance moved to `docs/24_OFFLINE_KIT.md`, which details bundle composition, verification, and delta workflows. Concelier-specific connector operations stay in `docs/ops/concelier-certbund-operations.md` and companion runbooks under `docs/ops/`. - + diff --git a/SPRINTS.md b/SPRINTS.md deleted file mode 100644 index dd09a0db..00000000 --- a/SPRINTS.md +++ /dev/null @@ -1,1119 +0,0 @@ -This file describe implementation of Stella Ops (docs/README.md). Implementation must respect rules from AGENTS.md (read if you have not). - -| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | -| --- | --- | --- | --- | --- | --- | --- | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | DOING (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-201 | Planner loop (cron/event triggers, leases, fairness). | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | DONE (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-202 | ImpactIndex targeting and shard planning. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | DONE (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-203 | Runner execution invoking Scanner analysis/content refresh. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | DONE (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-204 | Emit rescan/report events for Notify/UI. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | DONE (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-205 | Metrics/telemetry for Scheduler planners/runners. | -| Sprint 17 | Symbol Intelligence & Forensics | ops/offline-kit/TASKS.md | BLOCKED (2025-10-26) | Offline Kit Guild, DevOps Guild | DEVOPS-OFFLINE-17-004 | Run mirror_debug_store.py once release artefacts exist and archive verification evidence with the Offline Kit. | -| Sprint 17 | Symbol Intelligence & Forensics | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild | DEVOPS-REL-17-004 | Ensure release workflow publishes `out/release/debug` (build-id tree + manifest) and fails when symbols are missing. | -> DOCS-AOC-19-004: Architecture overview & policy-engine docs refreshed 2025-10-26 — reuse new AOC boundary diagram + metrics guidance. -> DOCS-AOC-19-005: Link to the new AOC reference and architecture overview; include exit code table sourced from those docs. -| Sprint 19 | Aggregation-Only Contract Enforcement | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild, Platform Guild | DEVOPS-AOC-19-001 | Integrate AOC analyzer/guard enforcement into CI pipelines. | -| Sprint 19 | Aggregation-Only Contract Enforcement | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild | DEVOPS-AOC-19-002 | Add CI stage running `stella aoc verify` against seeded snapshots. | -| Sprint 19 | Aggregation-Only Contract Enforcement | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild, QA Guild | DEVOPS-AOC-19-003 | Enforce guard coverage thresholds and export metrics to dashboards. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Authority/TASKS.md | DONE (2025-10-27) | Authority Core & Security Guild | AUTH-AOC-19-002 | Enforce tenant claim propagation and cross-tenant guardrails. | -> AUTH-AOC-19-002: Tenant metadata now flows through rate limiter/audit/token persistence; password grant scope/tenant enforcement landed. Docs/stakeholder walkthrough pending. -> 2025-10-27 Update: Ingestion scopes require tenant assignment; access tokens propagate tenant claims and reject cross-tenant mismatches with coverage. -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Authority/TASKS.md | DONE (2025-10-27) | Authority Core & Docs Guild | AUTH-AOC-19-003 | Update Authority docs/config samples for new scopes. | -> AUTH-AOC-19-003: Scope catalogue, console/CLI docs, and sample config updated to require `aoc:verify` plus read scopes; verification clients now explicitly include tenant hints. Authority test run remains blocked on Concelier build failure (`ImmutableHashSet`), previously noted under AUTH-AOC-19-002. -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Cli/TASKS.md | DOING (2025-10-27) | DevEx/CLI Guild | CLI-AOC-19-001 | Implement `stella sources ingest --dry-run` command. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AOC-19-002 | Implement `stella aoc verify` command with exit codes. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Cli/TASKS.md | TODO | Docs/CLI Guild | CLI-AOC-19-003 | Update CLI reference and quickstart docs for new AOC commands. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-001 | Implement AOC repository guard rejecting forbidden fields. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-002 | Deliver deterministic linkset extraction for advisories. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-003 | Enforce idempotent append-only upsert with supersedes pointers. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-004 | Remove ingestion normalization; defer derived logic to Policy Engine. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-013 | Extend smoke coverage to validate tenant-scoped Authority tokens and cross-tenant rejection. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-STORE-AOC-19-001 | Add Mongo schema validator for `advisory_raw`. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-STORE-AOC-19-002 | Create idempotency unique index backed by migration scripts. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-STORE-AOC-19-003 | Deliver append-only migration/backfill plan with supersedes chaining. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild, DevOps Guild | CONCELIER-STORE-AOC-19-004 | Document validator deployment steps for online/offline clusters. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-28) | Concelier WebService Guild | CONCELIER-WEB-AOC-19-001 | Implement raw advisory ingestion endpoints with AOC guard and verifier. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild, Observability Guild | CONCELIER-WEB-AOC-19-002 | Emit AOC observability metrics, traces, and structured logs. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.WebService/TASKS.md | TODO | QA Guild | CONCELIER-WEB-AOC-19-003 | Add schema/guard unit tests covering AOC error codes. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild, QA Guild | CONCELIER-WEB-AOC-19-004 | Build integration suite validating deterministic ingest under load. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-001 | Introduce VEX repository guard enforcing AOC invariants. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-002 | Build deterministic VEX linkset extraction. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-003 | Enforce append-only idempotent VEX raw upserts. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-004 | Remove ingestion consensus logic; rely on Policy Engine. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-013 | Update smoke suites to enforce tenant-scoped Authority tokens and cross-tenant VEX rejection. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-001 | Add Mongo schema validator for `vex_raw`. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-002 | Create idempotency unique index for VEX raw documents. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-003 | Deliver append-only migration/backfill for VEX raw collections. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild, DevOps Guild | EXCITITOR-STORE-AOC-19-004 | Document validator deployment for Excititor clusters/offline kit. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-AOC-19-001 | Implement raw VEX ingestion and AOC verifier endpoints. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild, Observability Guild | EXCITITOR-WEB-AOC-19-002 | Emit AOC metrics/traces/logging for Excititor ingestion. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.WebService/TASKS.md | TODO | QA Guild | EXCITITOR-WEB-AOC-19-003 | Add AOC guard test harness for VEX schemas. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild, QA Guild | EXCITITOR-WEB-AOC-19-004 | Validate large VEX ingest runs and CLI verification parity. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-WORKER-AOC-19-001 | Rewire worker to persist raw VEX docs with guard enforcement. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-WORKER-AOC-19-002 | Enforce signature/checksum verification prior to raw writes. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Excititor.Worker/TASKS.md | DONE (2025-10-28) | QA Guild | EXCITITOR-WORKER-AOC-19-003 | Expand worker tests for deterministic batching and restart safety. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-AOC-19-001 | Add lint preventing ingestion modules from referencing Policy-only helpers. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild, Security Guild | POLICY-AOC-19-002 | Enforce Policy-only writes to `effective_finding_*` collections. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-AOC-19-003 | Update Policy readers to consume only raw document fields. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild, QA Guild | POLICY-AOC-19-004 | Add determinism tests for raw-driven policy recomputation. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-AOC-19-001 | Add Sources dashboard tiles surfacing AOC status and violations. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-AOC-19-002 | Build violation drill-down view for offending documents. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-AOC-19-003 | Wire "Verify last 24h" action and CLI parity messaging. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Web/TASKS.md | DOING (2025-10-26) | BE-Base Platform Guild | WEB-AOC-19-001 | Provide shared AOC forbidden key set and guard middleware. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-AOC-19-002 | Ship provenance builder and signature helpers for ingestion services. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, QA Guild | WEB-AOC-19-003 | Author analyzer + shared test fixtures for guard compliance. | -| Sprint 20 | Policy Engine v2 | ops/devops/TASKS.md | BLOCKED (waiting on POLICY-ENGINE-20-006) | DevOps Guild | DEVOPS-POLICY-20-002 | Run `stella policy simulate` CI stage against golden SBOMs. | -| Sprint 20 | Policy Engine v2 | ops/devops/TASKS.md | DONE (2025-10-27) | DevOps Guild, Scheduler Guild, CLI Guild | DEVOPS-POLICY-20-004 | Automate policy schema exports and change notifications for CLI consumers. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Bench/TASKS.md | BLOCKED (waiting on SCHED-WORKER-20-302) | Bench Guild, Scheduler Guild | BENCH-POLICY-20-002 | Add incremental run benchmark capturing delta SLA compliance. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Cli/TASKS.md | DONE (2025-10-27) | DevEx/CLI Guild | CLI-POLICY-20-002 | Implement `stella policy simulate` with diff outputs + exit codes. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild, Docs Guild | CLI-POLICY-20-003 | Extend `stella findings` commands with policy filters and explain view. | -> 2025-10-27: Backend helpers drafted but command integration/tests pending; task reset to TODO awaiting follow-up. -| Sprint 20 | Policy Engine v2 | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-POLICY-20-002 | Strengthen linkset builders with equivalence tables + range parsing. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-POLICY-20-003 | Add advisory selection cursors + change-stream checkpoints for policy runs. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-POLICY-20-001 | Provide advisory selection endpoints for policy engine (batch PURL/ID). | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-POLICY-20-002 | Enhance VEX linkset scope + version resolution for policy accuracy. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-POLICY-20-003 | Introduce VEX selection cursors + change-stream checkpoints. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-POLICY-20-001 | Ship VEX selection APIs aligned with policy join requirements. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | BLOCKED (2025-10-26) | Policy Guild | POLICY-ENGINE-20-002 | Implement deterministic rule evaluator with priority/first-match semantics. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Concelier Core, Excititor Core | POLICY-ENGINE-20-003 | Build SBOM↔advisory↔VEX linkset joiners with deterministic batching. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Storage Guild | POLICY-ENGINE-20-004 | Materialize effective findings with append-only history and tenant scoping. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Security Guild | POLICY-ENGINE-20-005 | Enforce determinism guard banning wall-clock, RNG, and network usage. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Scheduler Guild | POLICY-ENGINE-20-006 | Implement incremental orchestrator reacting to change streams. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Observability Guild | POLICY-ENGINE-20-007 | Emit policy metrics, traces, and sampled rule-hit logs. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, QA Guild | POLICY-ENGINE-20-008 | Add unit/property/golden/perf suites verifying determinism + SLA. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Storage Guild | POLICY-ENGINE-20-009 | Define Mongo schemas/indexes + migrations for policies/runs/findings. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Scheduler.Models/TASKS.md | TODO | Scheduler Models Guild | SCHED-MODELS-20-002 | Update schema docs with policy run lifecycle samples. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-WEB-20-001 | Expose policy run scheduling APIs with scope enforcement. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-WEB-20-002 | Provide simulation trigger endpoint returning diff metadata. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-20-301 | Schedule policy runs via API with idempotent job tracking. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-20-302 | Implement delta targeting leveraging change streams + policy metadata. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild, Observability Guild | SCHED-WORKER-20-303 | Expose policy scheduling metrics/logs with policy/run identifiers. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-POLICY-20-001 | Ship Monaco-based policy editor with inline diagnostics + checklists. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-POLICY-20-002 | Build simulation panel with deterministic diff rendering + virtualization. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.UI/TASKS.md | TODO | UI Guild, Product Ops | UI-POLICY-20-003 | Implement submit/review/approve workflow with RBAC + audit trail. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.UI/TASKS.md | TODO | UI Guild, Observability Guild | UI-POLICY-20-004 | Add run dashboards (heatmap/VEX wins/suppressions) with export. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-POLICY-20-001 | Implement Policy CRUD/compile/run/simulate/findings/explain endpoints. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-POLICY-20-002 | Add pagination, filters, deterministic ordering to policy listings. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, QA Guild | WEB-POLICY-20-003 | Map engine errors to `ERR_POL_*` responses with contract tests. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Web/TASKS.md | TODO | Platform Reliability Guild | WEB-POLICY-20-004 | Introduce rate limits/quotas + metrics for simulation endpoints. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Bench/TASKS.md | BLOCKED (2025-10-27) | Bench Guild, Graph Platform Guild | BENCH-GRAPH-21-001 | Graph viewport/path perf harness (50k/100k nodes) measuring Graph API/Indexer latency and cache hit rates. Executed within Sprint 28 Graph program. Upstream Graph API/indexer contracts (`GRAPH-API-28-003`, `GRAPH-INDEX-28-006`) still pending, so benchmarks cannot target stable endpoints yet. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Bench/TASKS.md | BLOCKED (2025-10-27) | Bench Guild, UI Guild | BENCH-GRAPH-21-002 | Headless UI load benchmark for graph canvas interactions (Playwright) tracking render FPS budgets. Executed within Sprint 28 Graph program. Depends on BENCH-GRAPH-21-001 and UI Graph Explorer (`UI-GRAPH-24-001`), both pending. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Concelier.Core/TASKS.md | BLOCKED (2025-10-27) | Concelier Core Guild | CONCELIER-GRAPH-21-001 | Enrich SBOM normalization with relationships, scopes, entrypoint annotations for Cartographer. Requires finalized schemas from `CONCELIER-POLICY-20-002` and Cartographer event contract (`CARTO-GRAPH-21-002`). | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Concelier.Core/TASKS.md | BLOCKED (2025-10-27) | Concelier Core & Scheduler Guilds | CONCELIER-GRAPH-21-002 | Publish SBOM change events with tenant metadata for graph builds. Awaiting projection schema from `CONCELIER-GRAPH-21-001` and Cartographer webhook expectations. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Cartographer/TASKS.md | DONE (2025-10-27) | Cartographer Guild | CARTO-GRAPH-21-010 | Replace hard-coded `graph:*` scope strings with shared constants once graph services integrate. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Excititor.Core/TASKS.md | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-001 | Deliver batched VEX/advisory fetch helpers for inspector linkouts. Waiting on linkset enrichment (`EXCITITOR-POLICY-20-002`) and Cartographer inspector contract (`CARTO-GRAPH-21-005`). | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Excititor.Core/TASKS.md | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-002 | Enrich overlay metadata with VEX justification summaries for graph overlays. Depends on `EXCITITOR-GRAPH-21-001` and Policy overlay schema (`POLICY-ENGINE-30-001`). | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | BLOCKED (2025-10-27) | Excititor Storage Guild | EXCITITOR-GRAPH-21-005 | Create indexes/materialized views for VEX lookups by PURL/policy. Awaiting access pattern specs from `EXCITITOR-GRAPH-21-001`. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.SbomService/TASKS.md | BLOCKED (2025-10-27) | SBOM Service Guild | SBOM-SERVICE-21-001 | Expose normalized SBOM projection API with relationships, scopes, entrypoints. Waiting on Concelier projection schema (`CONCELIER-GRAPH-21-001`). | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.SbomService/TASKS.md | BLOCKED (2025-10-27) | SBOM Service & Scheduler Guilds | SBOM-SERVICE-21-002 | Emit SBOM version change events for Cartographer build queue. Depends on SBOM projection API (`SBOM-SERVICE-21-001`) and Scheduler contracts. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.SbomService/TASKS.md | BLOCKED (2025-10-27) | SBOM Service Guild | SBOM-SERVICE-21-003 | Provide entrypoint management API with tenant overrides. Blocked by SBOM projection API contract. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.SbomService/TASKS.md | BLOCKED (2025-10-27) | SBOM Service & Observability Guilds | SBOM-SERVICE-21-004 | Add metrics/traces/logs for SBOM projections. Requires projection pipeline from `SBOM-SERVICE-21-001`. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-21-002 | Expose overlay lag metrics and job completion hooks for Cartographer. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Web/TASKS.md | BLOCKED (2025-10-27) | BE-Base Platform Guild | WEB-GRAPH-21-001 | Add gateway routes for graph APIs with scope enforcement and streaming. Upstream Graph API (`GRAPH-API-28-003`) and Authority scope work (`AUTH-VULN-24-001`) pending. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Web/TASKS.md | BLOCKED (2025-10-27) | BE-Base Platform Guild | WEB-GRAPH-21-002 | Implement bbox/zoom/path validation and pagination for graph endpoints. Depends on core proxy routes. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Web/TASKS.md | BLOCKED (2025-10-27) | BE-Base Platform & QA Guilds | WEB-GRAPH-21-003 | Map graph errors to `ERR_Graph_*` and support export streaming. Requires `WEB-GRAPH-21-001`. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Web/TASKS.md | BLOCKED (2025-10-27) | BE-Base & Policy Guilds | WEB-GRAPH-21-004 | Wire Policy Engine simulation overlays into graph responses. Waiting on Graph routes and Policy overlay schema (`POLICY-ENGINE-30-002`). | -| Sprint 22 | Link-Not-Merge v1 | docs/TASKS.md | BLOCKED (2025-10-27) | Docs Guild | DOCS-LNM-22-001 | Publish advisories aggregation doc with observation/linkset philosophy. | -> Blocked by `CONCELIER-LNM-21-001..003`; draft doc exists but final alignment waits for schema/API delivery. -| Sprint 22 | Link-Not-Merge v1 | docs/TASKS.md | BLOCKED (2025-10-27) | Docs Guild | DOCS-LNM-22-002 | Publish VEX aggregation doc describing observation/linkset flow. | -> Blocked by `EXCITITOR-LNM-21-001..003`; draft doc staged pending observation/linkset implementation. -| Sprint 22 | Link-Not-Merge v1 | docs/TASKS.md | BLOCKED (2025-10-27) | Docs Guild | DOCS-LNM-22-005 | Document UI evidence panel with conflict badges/AOC drill-down. | -> Blocked by `UI-LNM-22-001..003`; need shipping UI to capture screenshots and finalize guidance. -| Sprint 22 | Link-Not-Merge v1 | ops/devops/TASKS.md | BLOCKED (2025-10-27) | DevOps Guild | DEVOPS-LNM-22-001 | Execute advisory observation/linkset migration/backfill and automation. | -| Sprint 22 | Link-Not-Merge v1 | ops/devops/TASKS.md | BLOCKED (2025-10-27) | DevOps Guild | DEVOPS-LNM-22-002 | Run VEX observation/linkset migration/backfill with monitoring/runbook. | -| Sprint 22 | Link-Not-Merge v1 | samples/TASKS.md | BLOCKED (2025-10-27) | Samples Guild | SAMPLES-LNM-22-001 | Add advisory observation/linkset fixtures with conflicts. | -| Sprint 22 | Link-Not-Merge v1 | samples/TASKS.md | BLOCKED (2025-10-27) | Samples Guild | SAMPLES-LNM-22-002 | Add VEX observation/linkset fixtures with status disagreements. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-AOC-22-001 | Roll out new advisory/vex ingest/read scopes. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-LNM-22-001 | Implement advisory observation/linkset CLI commands with JSON/OSV export. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-LNM-22-002 | Implement VEX observation/linkset CLI commands. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-LNM-21-001 | Define immutable advisory observation schema with AOC metadata. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild, Data Science Guild | CONCELIER-LNM-21-002 | Implement advisory linkset builder with correlation signals/conflicts. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Concelier.Merge/TASKS.md | TODO | BE-Merge | MERGE-LNM-21-002 | Deprecate merge service and enforce observation-only pipeline. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-LNM-21-101 | Provision observations/linksets collections and indexes. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage & DevOps Guilds | CONCELIER-LNM-21-102 | Backfill legacy merged advisories into observations/linksets with rollback tooling. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-LNM-21-201 | Ship advisory observation read APIs with pagination/RBAC. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-LNM-21-202 | Implement advisory linkset read/export/evidence endpoints mapped to `ERR_AGG_*`. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-LNM-21-001 | Define immutable VEX observation model. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-LNM-21-002 | Build VEX linkset correlator with confidence/conflict recording. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-LNM-21-101 | Provision VEX observation/linkset collections and indexes. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage & DevOps Guilds | EXCITITOR-LNM-21-102 | Backfill legacy VEX data into observations/linksets with rollback scripts. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-201 | Expose VEX observation APIs with filters/pagination and RBAC. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-202 | Implement VEX linkset endpoints + exports with evidence payloads. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-40-001 | Update severity selection to handle multiple source severities per linkset. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Excititor Guild | POLICY-ENGINE-40-002 | Integrate VEX linkset conflicts into effective findings/explain traces. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Scanner.WebService/TASKS.md | TODO | Scanner WebService Guild | SCANNER-LNM-21-001 | Update report/runtime payloads to consume linksets and surface source evidence. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-LNM-22-001 | Deliver Evidence panel with policy banner and source observations. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-LNM-22-003 | Add VEX evidence tab with conflict indicators and exports. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-LNM-21-001 | Surface advisory observation/linkset APIs through gateway with RBAC. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-LNM-21-002 | Expose VEX observation/linkset endpoints with export handling. | -| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-011 | Update `/docs/install/docker.md` to include console image, compose/Helm/offline examples. | -| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-012 | Publish `/docs/security/console-security.md` covering OIDC, scopes, CSP, evidence handling. | -| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-013 | Write `/docs/observability/ui-telemetry.md` cataloguing metrics/logs/dashboards/alerts. | -| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-014 | Maintain `/docs/cli-vs-ui-parity.md` matrix with CI drift detection guidance. | -| Sprint 23 | StellaOps Console | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-23-015 | Produce `/docs/architecture/console.md` describing packages, data flow, SSE design. | -| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-016 | Refresh `/docs/accessibility.md` with console keyboard flows, tokens, testing tools.
2025-10-28: Published guide covering keyboard matrix, screen-reader behaviour, colour tokens, testing workflow, offline guidance, and compliance checklist. | -| Sprint 23 | StellaOps Console | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-23-017 | Create `/docs/examples/ui-tours.md` walkthroughs with annotated screenshots/GIFs. | -| Sprint 23 | StellaOps Console | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-23-018 | Execute console security checklist and record Security Guild sign-off. | -| Sprint 23 | StellaOps Console | ops/deployment/TASKS.md | TODO | Deployment Guild | DOWNLOADS-CONSOLE-23-001 | Maintain signed downloads manifest pipeline feeding Console + docs parity checks. | -| Sprint 23 | StellaOps Console | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild | DEVOPS-CONSOLE-23-001 | Stand up console CI pipeline (pnpm cache, lint, tests, Playwright, Lighthouse, offline runners). | -| Sprint 23 | StellaOps Console | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CONSOLE-23-002 | Deliver `stella-console` container + Helm overlays with SBOM/provenance and offline packaging. | -| Sprint 23 | StellaOps Console | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-CONSOLE-23-001 | Register Console OIDC client with PKCE, scopes, short-lived tokens, and offline defaults. | -| Sprint 23 | StellaOps Console | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-CONSOLE-23-002 | Provide tenant catalog/user profile endpoints with audit logging and fresh-auth requirements. | -| Sprint 23 | StellaOps Console | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Docs Guild | AUTH-CONSOLE-23-003 | Update security docs/sample configs for Console flows, CSP, and session policies. | -| Sprint 23 | StellaOps Console | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-001 | Surface `/console/advisories` aggregation views with per-source metadata and filters. | -| Sprint 23 | StellaOps Console | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-002 | Provide advisory delta metrics API for dashboard + live status ticker. | -| Sprint 23 | StellaOps Console | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-003 | Add search helpers for CVE/GHSA/PURL lookups returning evidence fragments. | -| Sprint 23 | StellaOps Console | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-001 | Expose `/console/vex` aggregation endpoints with precedence and provenance. | -| Sprint 23 | StellaOps Console | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-002 | Publish VEX override delta metrics feeding dashboard/status ticker. | -| Sprint 23 | StellaOps Console | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-003 | Implement VEX search helpers for global search and explain drill-downs. | -| Sprint 23 | StellaOps Console | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Scheduler Guild | EXPORT-CONSOLE-23-001 | Implement evidence bundle/export generator with signed manifests and telemetry. | -| Sprint 23 | StellaOps Console | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-CONSOLE-23-001 | Optimize findings/explain APIs for Console filters, aggregation hints, and provenance traces. | -| Sprint 23 | StellaOps Console | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Product Ops | POLICY-CONSOLE-23-002 | Expose simulation diff + approval state metadata for policy workspace scenarios. | -| Sprint 23 | StellaOps Console | src/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-CONSOLE-23-001 | Deliver Console SBOM catalog API with filters, evaluation metadata, and raw projections. | -| Sprint 23 | StellaOps Console | src/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-CONSOLE-23-002 | Provide component lookup/neighborhood endpoints for global search and overlays. | -| Sprint 23 | StellaOps Console | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-CONSOLE-23-001 | Extend runs API with SSE progress, queue lag summaries, RBAC actions, and history pagination. | -| Sprint 23 | StellaOps Console | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-CONSOLE-23-201 | Stream run progress events with heartbeat/dedupe for Console SSE consumers. | -| Sprint 23 | StellaOps Console | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-CONSOLE-23-202 | Coordinate evidence bundle job queueing, status tracking, cancellation, and retention. | -| Sprint 23 | StellaOps Console | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONSOLE-23-001 | Ship `/console/dashboard` + `/console/filters` aggregates with tenant scoping and deterministic totals. | -| Sprint 23 | StellaOps Console | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, Scheduler Guild | WEB-CONSOLE-23-002 | Provide `/console/status` polling and `/console/runs/{id}/stream` SSE proxy with heartbeat/backoff. | -| Sprint 23 | StellaOps Console | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, Policy Guild | WEB-CONSOLE-23-003 | Expose `/console/exports` orchestration for evidence bundles, CSV/JSON streaming, manifest retrieval. | -| Sprint 23 | StellaOps Console | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONSOLE-23-004 | Implement `/console/search` fan-out router for CVE/GHSA/PURL/SBOM lookups with caching and RBAC. | -| Sprint 23 | StellaOps Console | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, DevOps Guild | WEB-CONSOLE-23-005 | Serve `/console/downloads` manifest with signed image metadata and offline guidance. | -| Sprint 24 | Graph & Vuln Explorer v1 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-VULN-24-001 | Extend scopes (`vuln:read`) and signed permalinks. | -> 2025-10-27: Scope enforcement spike paused; no production change landed. -| Sprint 24 | Graph & Vuln Explorer v1 | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-GRAPH-24-001 | Surface raw advisory observations/linksets for overlay services (no derived aggregation in ingestion). | -> 2025-10-27: Prototype not merged (query layer + CLI consumer under review); resetting to TODO. -| Sprint 24 | Graph & Vuln Explorer v1 | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-GRAPH-24-001 | Surface raw VEX statements/linksets for overlay services (no suppression/precedence logic here). | -| Sprint 24 | Graph & Vuln Explorer v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-60-001 | Maintain Redis effective decision maps for overlays. | -| Sprint 24 | Graph & Vuln Explorer v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-60-002 | Provide simulation bridge for graph what-if APIs. | -| Sprint 24 | Graph & Vuln Explorer v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-GRAPH-24-001 | Build Graph Explorer canvas with virtualization. | -| Sprint 24 | Graph & Vuln Explorer v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-GRAPH-24-002 | Implement overlays (Policy/Evidence/License/Exposure). | -| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-001 | Document exception governance concepts/workflow. | -| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-002 | Document approvals routing / MFA requirements. | -| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-003 | Publish API documentation for exceptions endpoints. | -| Sprint 25 | Exceptions v1 | docs/TASKS.md | DONE (2025-10-27) | Docs Guild | DOCS-EXC-25-004 | Document policy exception effects + simulation. | -| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-005 | Document UI exception center + badges. | -| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-006 | Update CLI docs for exception commands. | -| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-007 | Write migration guide for governed exceptions. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-EXC-25-001 | Introduce exception scopes and routing matrix with MFA. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Docs Guild | AUTH-EXC-25-002 | Update docs/config samples for exception governance. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-EXC-25-001 | Implement CLI exception workflow commands. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-EXC-25-002 | Extend policy simulate with exception overrides. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Policy.Engine/TASKS.md | DONE (2025-10-27) | Policy Guild | POLICY-ENGINE-70-001 | Add exception evaluation layer with specificity + effects. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-70-002 | Create exception collections/bindings storage + repos. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-70-003 | Implement Redis exception cache + invalidation. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-70-004 | Add metrics/tracing/logging for exception application. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-70-005 | Hook workers/events for activation/expiry. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Policy/TASKS.md | DONE (2025-10-27) | Policy Guild | POLICY-EXC-25-001 | Extend SPL schema to reference exception effects and routing. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-25-101 | Implement exception lifecycle worker for activation/expiry. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-25-102 | Add expiring notification job & metrics. | -| Sprint 25 | Exceptions v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-EXC-25-001 | Deliver Exception Center (list/kanban) with workflows. | -| Sprint 25 | Exceptions v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-EXC-25-002 | Build exception creation wizard with scope/timebox guardrails. | -| Sprint 25 | Exceptions v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-EXC-25-003 | Add inline exception drafting/proposing from explorers. | -| Sprint 25 | Exceptions v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-EXC-25-004 | Surface badges/countdowns/explain integration. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXC-25-001 | Ship exception CRUD + workflow API endpoints. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXC-25-002 | Extend policy endpoints to include exception metadata. | -| Sprint 25 | Exceptions v1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXC-25-003 | Emit exception events/notifications with rate limits. | -| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-001 | Document reachability concepts and scoring. | -| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-002 | Document callgraph formats. | -| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-003 | Document runtime facts ingestion. | -| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-004 | Document policy weighting for signals. | -| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-005 | Document UI overlays/timelines. | -| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-006 | Document CLI reachability commands. | -| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-007 | Publish API docs for signals endpoints. | -| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-008 | Write migration guide for enabling reachability. | -| Sprint 26 | Reachability v1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-SIG-26-001 | Provision pipelines/deployments for Signals service. | -| Sprint 26 | Reachability v1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-SIG-26-002 | Add dashboards/alerts for reachability metrics. | -| Sprint 26 | Reachability v1 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-SIG-26-001 | Add signals scopes/roles + AOC requirements. | -| Sprint 26 | Reachability v1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SIG-26-001 | Implement reachability CLI commands (upload/list/explain). | -| Sprint 26 | Reachability v1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SIG-26-002 | Add reachability overrides to policy simulate. | -| Sprint 26 | Reachability v1 | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-SIG-26-001 | Expose advisory symbol metadata for signals scoring. | -| Sprint 26 | Reachability v1 | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-SIG-26-001 | Surface vendor exploitability hints to Signals. | -| Sprint 26 | Reachability v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-80-001 | Integrate reachability inputs into policy evaluation and explainers. | -| Sprint 26 | Reachability v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-80-002 | Optimize reachability fact retrieval + cache. | -| Sprint 26 | Reachability v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-80-003 | Update SPL compiler for reachability predicates. | -| Sprint 26 | Reachability v1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-80-004 | Emit reachability metrics/traces. | -| Sprint 26 | Reachability v1 | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-SPL-24-001 | Extend SPL schema with reachability predicates/actions. | -| Sprint 26 | Reachability v1 | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-26-201 | Implement reachability joiner worker. | -| Sprint 26 | Reachability v1 | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-26-202 | Implement staleness monitor + notifications. | -| Sprint 26 | Reachability v1 | src/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild, Authority Guild | SIGNALS-24-001 | Stand up Signals API skeleton with RBAC + health checks. Host scaffold ready, waiting on `AUTH-SIG-26-001` to finalize scope issuance and tenant enforcement. | -| Sprint 26 | Reachability v1 | src/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild | SIGNALS-24-002 | Implement callgraph ingestion/normalization pipeline. Waiting on SIGNALS-24-001 skeleton deployment. | -| Sprint 26 | Reachability v1 | src/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild | SIGNALS-24-003 | Ingest runtime facts and persist context data with AOC provenance. Depends on SIGNALS-24-001 base host. | -| Sprint 26 | Reachability v1 | src/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild | SIGNALS-24-004 | Deliver reachability scoring engine writing reachability facts. Blocked until ingestion pipelines unblock. | -| Sprint 26 | Reachability v1 | src/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild | SIGNALS-24-005 | Implement caches + signals events. Downstream of SIGNALS-24-004. | -| Sprint 26 | Reachability v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SIG-26-001 | Add reachability columns/badges to Vulnerability Explorer. | -| Sprint 26 | Reachability v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SIG-26-002 | Enhance Why drawer with call path/timeline. | -| Sprint 26 | Reachability v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SIG-26-003 | Add reachability overlay/time slider to SBOM Graph. | -| Sprint 26 | Reachability v1 | src/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SIG-26-004 | Build Reachability Center + missing sensor view. | -| Sprint 26 | Reachability v1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-SIG-26-001 | Expose signals proxy endpoints with pagination and RBAC. | -| Sprint 26 | Reachability v1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-SIG-26-002 | Join reachability data into policy/vuln responses. | -| Sprint 26 | Reachability v1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-SIG-26-003 | Support reachability overrides in simulate APIs. | -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Guilds | DOCS-POLICY-27-001 | Publish `/docs/policy/studio-overview.md` with lifecycle + roles. | -> Blocked by `REGISTRY-API-27-001` and `POLICY-ENGINE-27-001`; revisit once spec and compile enrichments land. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Console Guilds | DOCS-POLICY-27-002 | Write `/docs/policy/authoring.md` with templates/snippets/lint rules. | -> Blocked by `CONSOLE-STUDIO-27-001` pending; waiting on Studio authoring UX. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Registry Guilds | DOCS-POLICY-27-003 | Document `/docs/policy/versioning-and-publishing.md`. | -> Blocked by `REGISTRY-API-27-007` pending publish/sign pipeline. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Scheduler Guilds | DOCS-POLICY-27-004 | Publish `/docs/policy/simulation.md` with quick vs batch guidance. | -> Blocked by `REGISTRY-API-27-005`/`SCHED-WORKER-27-301` pending batch simulation. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Product Ops | DOCS-POLICY-27-005 | Author `/docs/policy/review-and-approval.md`. | -> Blocked by `REGISTRY-API-27-006` review workflow outstanding. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Guilds | DOCS-POLICY-27-006 | Publish `/docs/policy/promotion.md` covering canary + rollback. | -> Blocked by `REGISTRY-API-27-008` promotion APIs not ready. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & DevEx/CLI Guilds | DOCS-POLICY-27-007 | Update `/docs/policy/cli.md` with new commands + JSON schemas. | -> Blocked by `CLI-POLICY-27-001..004` CLI commands missing. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Registry Guilds | DOCS-POLICY-27-008 | Publish `/docs/policy/api.md` aligning with Registry OpenAPI. | -> Blocked by Registry OpenAPI (`REGISTRY-API-27-001..008`) incomplete. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Security Guilds | DOCS-POLICY-27-009 | Create `/docs/security/policy-attestations.md`. | -> Blocked by `AUTH-POLICY-27-002` signing integration pending. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Architecture Guilds | DOCS-POLICY-27-010 | Write `/docs/architecture/policy-registry.md`. | -> Blocked by `REGISTRY-API-27-001` & `SCHED-WORKER-27-301` not delivered. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Observability Guilds | DOCS-POLICY-27-011 | Publish `/docs/observability/policy-telemetry.md`. | -> Blocked by `DEVOPS-POLICY-27-004` observability work outstanding. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Ops Guilds | DOCS-POLICY-27-012 | Write `/docs/runbooks/policy-incident.md`. | -> Blocked by `DEPLOY-POLICY-27-002` ops playbooks pending. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Guilds | DOCS-POLICY-27-013 | Update `/docs/examples/policy-templates.md`. | -> Blocked by `CONSOLE-STUDIO-27-001`/`REGISTRY-API-27-002` templates missing. -| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Registry Guilds | DOCS-POLICY-27-014 | Refresh `/docs/aoc/aoc-guardrails.md` with Studio guardrails. | -> Blocked by `REGISTRY-API-27-003` & `WEB-POLICY-27-001` guardrails not implemented. -| Sprint 27 | Policy Studio | ops/deployment/TASKS.md | TODO | Deployment & Policy Registry Guilds | DEPLOY-POLICY-27-001 | Create Helm/Compose overlays for Policy Registry + workers with signing config. | -| Sprint 27 | Policy Studio | ops/deployment/TASKS.md | TODO | Deployment & Policy Guilds | DEPLOY-POLICY-27-002 | Document policy rollout/rollback playbooks in runbook. | -| Sprint 27 | Policy Studio | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-POLICY-27-001 | Add CI stage for policy lint/compile/test + secret scanning and artifacts. | -| Sprint 27 | Policy Studio | ops/devops/TASKS.md | TODO | DevOps & Policy Registry Guilds | DEVOPS-POLICY-27-002 | Provide optional batch simulation CI job with drift gating + PR comment. | -| Sprint 27 | Policy Studio | ops/devops/TASKS.md | TODO | DevOps & Security Guilds | DEVOPS-POLICY-27-003 | Manage signing keys + attestation verification in pipelines. | -| Sprint 27 | Policy Studio | ops/devops/TASKS.md | TODO | DevOps & Observability Guilds | DEVOPS-POLICY-27-004 | Build dashboards/alerts for compile latency, queue depth, approvals, promotions. | -| Sprint 27 | Policy Studio | src/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-POLICY-27-001 | Define Policy Studio roles/scopes for author/review/approve/operate/audit. | -| Sprint 27 | Policy Studio | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guilds | AUTH-POLICY-27-002 | Wire signing service + fresh-auth enforcement for publish/promote. | -| Sprint 27 | Policy Studio | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Docs Guild | AUTH-POLICY-27-003 | Update authority configuration/docs for Policy Studio roles & signing. | -| Sprint 27 | Policy Studio | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-POLICY-27-001 | Implement policy workspace CLI commands (init, lint, compile, test). | -| Sprint 27 | Policy Studio | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-POLICY-27-002 | Add version bump, submit, review/approve CLI workflow commands. | -| Sprint 27 | Policy Studio | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-POLICY-27-003 | Extend simulate command for quick/batch runs, manifests, CI reports. | -| Sprint 27 | Policy Studio | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-POLICY-27-004 | Implement publish/promote/rollback/sign CLI lifecycle commands. | -| Sprint 27 | Policy Studio | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI & Docs Guilds | CLI-POLICY-27-005 | Update CLI docs/reference for Policy Studio commands and schemas. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-27-001 | Return rule coverage, symbol table, docs, hashes from compile endpoint. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-27-002 | Enhance simulate outputs with heatmap, explain traces, delta summaries. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-27-003 | Enforce complexity/time limits with diagnostics. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-27-004 | Update tests/fixtures for coverage, symbol table, explain, complexity. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-001 | Define Policy Registry OpenAPI spec for workspaces, versions, reviews, simulations, promotions, attestations. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-002 | Implement workspace storage + CRUD with tenant retention policies. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-003 | Integrate compile pipeline storing diagnostics, symbol tables, complexity metrics. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-004 | Deliver quick simulation API with limits and deterministic outputs. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry & Scheduler Guilds | REGISTRY-API-27-005 | Build batch simulation orchestration, reduction, and evidence bundle storage. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-006 | Implement review workflow with comments, required approvers, webhooks. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry & Security Guilds | REGISTRY-API-27-007 | Ship publish/sign pipeline with attestations, immutable versions. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-008 | Implement promotion/canary bindings per tenant/environment with rollback. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry & Observability Guilds | REGISTRY-API-27-009 | Instrument metrics/logs/traces for compile, simulation, approval latency. | -| Sprint 27 | Policy Studio | src/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry & QA Guilds | REGISTRY-API-27-010 | Build unit/integration/load test suites and seeded fixtures. | -| Sprint 27 | Policy Studio | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-CONSOLE-27-001 | Provide policy simulation orchestration endpoints with SSE + RBAC. | -| Sprint 27 | Policy Studio | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService & Observability Guilds | SCHED-CONSOLE-27-002 | Emit policy simulation telemetry endpoints/metrics + webhooks. | -| Sprint 27 | Policy Studio | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-27-301 | Implement batch simulation worker sharding SBOMs with retries/backoff. | -| Sprint 27 | Policy Studio | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-27-302 | Build reducer job aggregating shard outputs into manifests with checksums. | -| Sprint 27 | Policy Studio | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker & Security Guilds | SCHED-WORKER-27-303 | Enforce tenant isolation/attestation integration and secret scanning for jobs. | -| Sprint 27 | Policy Studio | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-POLICY-27-001 | Proxy Policy Registry APIs with tenant scoping, RBAC, evidence streaming. | -| Sprint 27 | Policy Studio | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-POLICY-27-002 | Implement review lifecycle routes with audit logs and webhooks. | -| Sprint 27 | Policy Studio | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Scheduler Guilds | WEB-POLICY-27-003 | Expose quick/batch simulation endpoints with SSE progress + manifests. | -| Sprint 27 | Policy Studio | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Security Guilds | WEB-POLICY-27-004 | Add publish/promote/rollback endpoints with canary + signing enforcement. | -| Sprint 27 | Policy Studio | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Observability Guilds | WEB-POLICY-27-005 | Instrument Policy Studio metrics/logs for dashboards. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & SBOM Guilds | DOCS-GRAPH-28-001 | Publish `/docs/sbom/graph-explorer-overview.md`. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Console Guilds | DOCS-GRAPH-28-002 | Write `/docs/sbom/graph-using-the-console.md` with walkthrough + accessibility tips. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Graph API Guilds | DOCS-GRAPH-28-003 | Document `/docs/sbom/graph-query-language.md` (JSON schema, cost rules). | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Graph API Guilds | DOCS-GRAPH-28-004 | Publish `/docs/sbom/graph-api.md` endpoints + streaming guidance. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & CLI Guilds | DOCS-GRAPH-28-005 | Produce `/docs/sbom/graph-cli.md` command reference. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Policy Guilds | DOCS-GRAPH-28-006 | Publish `/docs/policy/graph-overlays.md`. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Excitator Guilds | DOCS-GRAPH-28-007 | Document `/docs/vex/graph-integration.md`. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Concelier Guilds | DOCS-GRAPH-28-008 | Document `/docs/advisories/graph-integration.md`. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Architecture Guilds | DOCS-GRAPH-28-009 | Author `/docs/architecture/graph-services.md`. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Observability Guilds | DOCS-GRAPH-28-010 | Publish `/docs/observability/graph-telemetry.md`. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Ops Guilds | DOCS-GRAPH-28-011 | Write `/docs/runbooks/graph-incidents.md`. | -| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Security Guilds | DOCS-GRAPH-28-012 | Create `/docs/security/graph-rbac.md`. | -| Sprint 28 | Graph Explorer | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-GRAPH-28-001 | Provide deployment/offline instructions for Graph Indexer/API, including cache seeds. | -| Sprint 28 | Graph Explorer | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-GRAPH-28-001 | Configure load/perf tests, query budget alerts, and CI smoke for graph APIs. | -| Sprint 28 | Graph Explorer | ops/devops/TASKS.md | TODO | DevOps & Security Guilds | DEVOPS-GRAPH-28-002 | Implement caching/backpressure limits, rate limiting configs, and runaway query kill switches. | -| Sprint 28 | Graph Explorer | ops/devops/TASKS.md | TODO | DevOps & Observability Guilds | DEVOPS-GRAPH-28-003 | Build dashboards/alerts for tile latency, query denials, memory pressure. | -| Sprint 28 | Graph Explorer | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-GRAPH-28-001 | Ship `stella sbom graph` subcommands (search, query, paths, diff, impacted, export) with JSON output + exit codes. | -| Sprint 28 | Graph Explorer | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-GRAPH-28-002 | Add saved query management + deep link helpers to CLI. | -| Sprint 28 | Graph Explorer | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-GRAPH-28-003 | Update CLI docs/examples for Graph Explorer commands. | -| Sprint 28 | Graph Explorer | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-GRAPH-24-101 | Deliver advisory summary API feeding graph tooltips. | -| Sprint 28 | Graph Explorer | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-GRAPH-28-102 | Add batch fetch for advisory observations/linksets keyed by component sets to feed Graph overlay tooltips efficiently. | -| Sprint 28 | Graph Explorer | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | WEB-LNM-21-001 | Provide advisory observation endpoints optimized for graph overlays. | -| Sprint 28 | Graph Explorer | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-GRAPH-24-101 | Provide VEX summary API for Graph Explorer inspector overlays. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-001 | Publish Graph API OpenAPI + JSON schemas for queries/tiles. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-002 | Implement `/graph/search` with caching and RBAC. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-003 | Build query planner + streaming tile pipeline with budgets. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-004 | Deliver `/graph/paths` with depth limits and policy overlay support. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-005 | Implement `/graph/diff` streaming adds/removes/changes for SBOM snapshots. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-006 | Compose advisory/VEX/policy overlays with caching + explain sampling. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-007 | Provide export jobs (GraphML/CSV/NDJSON/PNG/SVG) with manifests. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API & Authority Guilds | GRAPH-API-28-008 | Enforce RBAC scopes, tenant headers, audit logging, rate limits. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API & Observability Guilds | GRAPH-API-28-009 | Instrument metrics/logs/traces; publish dashboards. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API & QA Guilds | GRAPH-API-28-010 | Build unit/integration/load tests with synthetic datasets. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Api/TASKS.md | TODO | Graph API & DevOps Guilds | GRAPH-API-28-011 | Ship deployment/offline manifests + gateway integration docs. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-001 | Define node/edge schemas, identity rules, and fixtures for graph ingestion. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-002 | Implement SBOM ingest consumer generating artifact/package/file nodes & edges. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-003 | Serve advisory overlay tiles from Conseiller linksets (no mutation of raw node/edge stores). | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-004 | Integrate VEX statements for `vex_exempts` edges with precedence metadata. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer & Policy Guilds | GRAPH-INDEX-28-005 | Hydrate policy overlay nodes/edges referencing determinations + explains. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-006 | Produce graph snapshots per SBOM with lineage for diff jobs. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer & Observability Guilds | GRAPH-INDEX-28-007 | Run clustering/centrality background jobs and persist cluster ids. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-008 | Build incremental/backfill pipeline with change streams, retries, backlog metrics. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer & QA Guilds | GRAPH-INDEX-28-009 | Extend tests/perf fixtures ensuring determinism on large graphs. | -| Sprint 28 | Graph Explorer | src/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer & DevOps Guilds | GRAPH-INDEX-28-010 | Provide deployment/offline artifacts and docs for Graph Indexer. | -| Sprint 28 | Graph Explorer | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-30-001 | Finalize graph overlay contract + projection API. | -| Sprint 28 | Graph Explorer | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-30-002 | Implement simulation overlay bridge for Graph Explorer queries. | -| Sprint 28 | Graph Explorer | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy & Scheduler Guilds | POLICY-ENGINE-30-003 | Emit change events for effective findings supporting graph overlays. | -| Sprint 28 | Graph Explorer | src/StellaOps.Scheduler.WebService/TASKS.md | DOING (2025-10-26) | Scheduler WebService Guild, Scheduler Storage Guild | SCHED-WEB-21-004 | Persist graph jobs + emit completion events/webhook. | -| Sprint 28 | Graph Explorer | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-21-201 | Run graph build worker for SBOM snapshots with retries/backoff. | -| Sprint 28 | Graph Explorer | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-21-202 | Execute overlay refresh worker subscribing to change events. | -| Sprint 28 | Graph Explorer | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker & Observability Guilds | SCHED-WORKER-21-203 | Emit metrics/logs for graph build/overlay jobs. | -| Sprint 28 | Graph Explorer | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-GRAPH-24-001 | Route `/graph/*` APIs through gateway with tenant scoping and RBAC. | -| Sprint 28 | Graph Explorer | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-GRAPH-24-002 | Maintain overlay proxy routes to dedicated services (Policy/Vuln API), ensuring caching + RBAC only. | -| Sprint 28 | Graph Explorer | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Observability Guilds | WEB-GRAPH-24-004 | Add Graph Explorer telemetry endpoints and metrics aggregation. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs Guild | DOCS-VULN-29-001 | Publish `/docs/vuln/explorer-overview.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Console Guilds | DOCS-VULN-29-002 | Write `/docs/vuln/explorer-using-console.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs Guild | DOCS-VULN-29-003 | Author `/docs/vuln/explorer-api.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs Guild | DOCS-VULN-29-004 | Publish `/docs/vuln/explorer-cli.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Ledger Guilds | DOCS-VULN-29-005 | Document Findings Ledger (`/docs/vuln/findings-ledger.md`). | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Policy Guilds | DOCS-VULN-29-006 | Update `/docs/policy/vuln-determinations.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Excititor Guilds | DOCS-VULN-29-007 | Publish `/docs/vex/explorer-integration.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Concelier Guilds | DOCS-VULN-29-008 | Publish `/docs/advisories/explorer-integration.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & SBOM Guilds | DOCS-VULN-29-009 | Publish `/docs/sbom/vuln-resolution.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Observability Guilds | DOCS-VULN-29-010 | Publish `/docs/observability/vuln-telemetry.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Security Guilds | DOCS-VULN-29-011 | Publish `/docs/security/vuln-rbac.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Ops Guilds | DOCS-VULN-29-012 | Publish `/docs/runbooks/vuln-ops.md`. | -| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Deployment Guilds | DOCS-VULN-29-013 | Update `/docs/install/containers.md` with Findings Ledger & Vuln Explorer API. | -| Sprint 29 | Vulnerability Explorer | ops/deployment/TASKS.md | TODO | Deployment & Findings Ledger Guilds | DEPLOY-VULN-29-001 | Provide deployments for Findings Ledger/projector with migrations/backups. | -| Sprint 29 | Vulnerability Explorer | ops/deployment/TASKS.md | TODO | Deployment & Vuln Explorer API Guilds | DEPLOY-VULN-29-002 | Package Vuln Explorer API deployments/health checks/offline kit notes. | -| Sprint 29 | Vulnerability Explorer | ops/devops/TASKS.md | TODO | DevOps & Findings Ledger Guilds | DEVOPS-VULN-29-001 | Set up CI/backups/anchoring monitoring for Findings Ledger. | -| Sprint 29 | Vulnerability Explorer | ops/devops/TASKS.md | TODO | DevOps & Vuln Explorer API Guilds | DEVOPS-VULN-29-002 | Configure Vuln Explorer perf tests, budgets, dashboards, alerts. | -| Sprint 29 | Vulnerability Explorer | ops/devops/TASKS.md | TODO | DevOps & Console Guilds | DEVOPS-VULN-29-003 | Integrate Vuln Explorer telemetry pipeline with privacy safeguards + dashboards. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-VULN-29-001 | Define Vuln Explorer RBAC/ABAC scopes and issuer metadata. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-VULN-29-002 | Enforce CSRF, attachment signing, and audit logging referencing ledger hashes. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Docs Guild | AUTH-VULN-29-003 | Update docs/config samples for Vuln Explorer roles and security posture. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-001 | Implement `stella vuln list` with grouping, filters, JSON/CSV output. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-002 | Implement `stella vuln show` with evidence/policy/path display. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-003 | Add workflow CLI commands (assign/comment/accept-risk/verify-fix/target-fix/reopen). | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-004 | Implement `stella vuln simulate` producing diff summaries/Markdown. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-005 | Implement `stella vuln export` and bundle signature verification. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI & Docs Guilds | CLI-VULN-29-006 | Update CLI docs/examples for Vulnerability Explorer commands. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-VULN-29-001 | Canonicalize (lossless) advisory identifiers, persist `links[]`, backfill, and expose raw payload snapshots (no merge/derived fields). | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-VULN-29-002 | Provide advisory evidence retrieval endpoint for Vuln Explorer. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService & Observability Guilds | CONCELIER-VULN-29-004 | Add metrics/logs/events for advisory normalization supporting resolver. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-001 | Canonicalize (lossless) VEX keys and product scopes with backfill + links (no merge/suppression). | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-002 | Expose VEX evidence retrieval endpoint for Explorer evidence tabs. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService & Observability Guilds | EXCITITOR-VULN-29-004 | Instrument metrics/logs for VEX normalization and suppression events. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-29-001 | Design ledger & projection schemas, hashing strategy, and migrations for Findings Ledger. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-29-002 | Implement ledger write API with hash chaining and Merkle root anchoring job. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & Scheduler Guilds | LEDGER-29-003 | Build projector worker deriving `findings_projection` with idempotent replay. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & Policy Guilds | LEDGER-29-004 | Integrate Policy Engine batch evaluation into projector with rationale caching. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-29-005 | Implement workflow mutation endpoints producing ledger events (assign/comment/accept-risk/etc.). | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & Security Guilds | LEDGER-29-006 | Add attachment encryption, signed URLs, and CSRF protections for workflow endpoints. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & Observability Guilds | LEDGER-29-007 | Instrument ledger metrics/logs/alerts (write latency, projection lag, anchoring). | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & QA Guilds | LEDGER-29-008 | Provide replay/determinism/load tests for ledger/projector pipelines. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & DevOps Guilds | LEDGER-29-009 | Deliver deployment/offline artefacts, backup/restore, Merkle anchoring guidance. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-29-001 | Implement policy batch evaluation endpoint returning determinations + rationale. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-29-002 | Provide simulation diff API for Vuln Explorer comparisons. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-29-003 | Include path/scope annotations in determinations for Explorer. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild & Observability Guild | POLICY-ENGINE-29-004 | Add telemetry for batch evaluation + simulation jobs. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-VULN-29-001 | Emit inventory evidence with scope/runtime/path/safe version hints; publish change events. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.SbomService/TASKS.md | TODO | SBOM Service & Findings Ledger Guilds | SBOM-VULN-29-002 | Provide resolver feed for candidate generation with idempotent delivery. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-VULN-29-001 | Expose resolver job APIs + status monitoring for Vuln Explorer recomputation. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService & Observability Guilds | SCHED-VULN-29-002 | Provide projector lag metrics endpoint + webhook notifications. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-29-001 | Implement resolver worker applying ecosystem version semantics and path scope. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-29-002 | Implement evaluation worker invoking Policy Engine and updating ledger queues. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker & Observability Guilds | SCHED-WORKER-29-003 | Add monitoring for resolver/evaluation backlog and SLA alerts. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-001 | Publish Vuln Explorer OpenAPI + query schemas. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-002 | Implement list/query endpoints with grouping, paging, cost budgets. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-003 | Implement detail endpoint combining evidence, policy rationale, paths, history. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & Findings Ledger Guilds | VULN-API-29-004 | Expose workflow APIs writing ledger events with validation + idempotency. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & Policy Guilds | VULN-API-29-005 | Implement policy simulation endpoint producing diffs without side effects. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-006 | Integrate Graph Explorer paths metadata and deep-link parameters. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & Security Guilds | VULN-API-29-007 | Enforce RBAC/ABAC, CSRF, attachment security, and audit logging. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-008 | Provide evidence bundle export job with signing + manifests. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & Observability Guilds | VULN-API-29-009 | Instrument API telemetry (latency, workflow counts, exports). | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & QA Guilds | VULN-API-29-010 | Deliver unit/integration/perf/determinism tests for Vuln Explorer API. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & DevOps Guilds | VULN-API-29-011 | Ship deployment/offline manifests, health checks, scaling docs. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-VULN-29-001 | Route `/vuln/*` APIs with tenant RBAC, ABAC, anti-forgery enforcement. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-VULN-29-002 | Proxy workflow calls to Findings Ledger with correlation IDs + retries. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-VULN-29-003 | Expose simulation/export orchestration with SSE/progress + signed links. | -| Sprint 29 | Vulnerability Explorer | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Observability Guilds | WEB-VULN-29-004 | Aggregate Vuln Explorer telemetry (latency, errors, exports). | -| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-001 | Publish `/docs/vex/consensus-overview.md`. | -| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-002 | Write `/docs/vex/consensus-algorithm.md`. | -| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-003 | Document `/docs/vex/issuer-directory.md`. | -| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-004 | Publish `/docs/vex/consensus-api.md`. | -| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-005 | Create `/docs/vex/consensus-console.md`. | -| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-006 | Add `/docs/policy/vex-trust-model.md`. | -| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-007 | Author `/docs/sbom/vex-mapping.md`. | -| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-008 | Publish `/docs/security/vex-signatures.md`. | -| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-009 | Write `/docs/runbooks/vex-ops.md`. | -| Sprint 30 | VEX Lens | ops/devops/TASKS.md | TODO | DevOps Guild | VEXLENS-30-009, ISSUER-30-005 | Set up CI/perf/telemetry dashboards for VEX Lens and Issuer Directory. | -| Sprint 30 | VEX Lens | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | VEXLENS-30-007 | Implement `stella vex consensus` CLI commands with list/show/simulate/export. | -| Sprint 30 | VEX Lens | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild, VEX Lens Guild | CONCELIER-VEXLENS-30-001 | Guarantee advisory key consistency and provide cross-links for consensus rationale (VEX Lens). | -| Sprint 30 | VEX Lens | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-001 | Ensure VEX evidence includes issuer hints, signatures, product trees for Lens consumption. | -| Sprint 30 | VEX Lens | src/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory Guild | ISSUER-30-001 | Implement issuer CRUD API with RBAC and audit logs. | -| Sprint 30 | VEX Lens | src/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & Security Guilds | ISSUER-30-002 | Implement key management endpoints with expiry enforcement. | -| Sprint 30 | VEX Lens | src/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & Policy Guilds | ISSUER-30-003 | Provide trust weight override APIs with audit trails. | -| Sprint 30 | VEX Lens | src/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & VEX Lens Guilds | ISSUER-30-004 | Integrate issuer data into signature verification clients. | -| Sprint 30 | VEX Lens | src/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & Observability Guilds | ISSUER-30-005 | Instrument issuer change metrics/logs and dashboards. | -| Sprint 30 | VEX Lens | src/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & DevOps Guilds | ISSUER-30-006 | Provide deployment/backup/offline docs for Issuer Directory. | -| Sprint 30 | VEX Lens | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-30-101 | Surface trust weighting configuration (issuer weights, modifiers, decay) for VEX Lens via Policy Studio/API. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-30-001 | Implement VEX normalization pipeline (CSAF, OpenVEX, CycloneDX) with deterministic outputs. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-30-002 | Build product mapping library aligning CSAF product trees to purls/versions with scope scoring. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Issuer Directory Guilds | VEXLENS-30-003 | Integrate signature verification using issuer keys; annotate evidence. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Policy Guilds | VEXLENS-30-004 | Implement trust weighting functions configurable via policy. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-30-005 | Implement consensus algorithm producing state, confidence, rationale, and quorum. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Findings Ledger Guilds | VEXLENS-30-006 | Materialize consensus projections and change events. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-30-007 | Deliver query/detail/simulation/export APIs with budgets and OpenAPI docs. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Policy Guilds | VEXLENS-30-008 | Integrate consensus signals with Policy Engine and Vuln Explorer. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Observability Guilds | VEXLENS-30-009 | Instrument metrics/logs/traces; publish dashboards/alerts. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & QA Guilds | VEXLENS-30-010 | Build unit/property/integration/load tests and determinism harness. | -| Sprint 30 | VEX Lens | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & DevOps Guilds | VEXLENS-30-011 | Provide deployment manifests, scaling guides, offline seeds, runbooks. | -| Sprint 30 | VEX Lens | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, VEX Lens Guild | WEB-VEX-30-007 | Route `/vex/consensus` APIs via gateway with RBAC/ABAC, caching, and telemetry (proxy-only). | -| Sprint 31 | Advisory AI | docs/TASKS.md | TODO | Docs Guild | DOCS-AIAI-31-001 | Publish Advisory AI overview doc. | -| Sprint 31 | Advisory AI | docs/TASKS.md | TODO | Docs Guild | DOCS-AIAI-31-002 | Publish architecture doc for Advisory AI. | -| Sprint 31 | Advisory AI | docs/TASKS.md | TODO | Docs Guild | DOCS-AIAI-31-003..009 | Complete API/Console/CLI/Policy/Security/SBOM/Runbook docs. | -| Sprint 31 | Advisory AI | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-AIAI-31-001 | Provide Advisory AI deployment/offline guidance. | -| Sprint 31 | Advisory AI | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIAI-31-001 | Provision CI/perf/telemetry for Advisory AI. | -| Sprint 31 | Advisory AI | src/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-001 | Implement advisory/VEX retrievers with paragraph anchors and citations. | -| Sprint 31 | Advisory AI | src/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-002 | Build SBOM context retriever and blast radius estimator. | -| Sprint 31 | Advisory AI | src/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-003 | Deliver deterministic toolset (version checks, dependency analysis, policy lookup). | -| Sprint 31 | Advisory AI | src/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-004 | Orchestrator with task templates, tool chaining, caching. | -| Sprint 31 | Advisory AI | src/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI & Security Guilds | AIAI-31-005 | Guardrails (redaction, injection defense, output validation). | -| Sprint 31 | Advisory AI | src/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-006 | Expose REST/batch APIs with RBAC and OpenAPI. | -| Sprint 31 | Advisory AI | src/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI & Observability Guilds | AIAI-31-007 | Instrument metrics/logs/traces and dashboards. | -| Sprint 31 | Advisory AI | src/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI & DevOps Guilds | AIAI-31-008 | Package inference + deployment manifests/flags. | -| Sprint 31 | Advisory AI | src/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI & QA Guilds | AIAI-31-009 | Build golden/injection/perf tests ensuring determinism. | -| Sprint 31 | Advisory AI | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-AIAI-31-001 | Define Advisory AI scopes and remote inference toggles. | -| Sprint 31 | Advisory AI | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-AIAI-31-002 | Enforce prompt logging and consent/audit flows. | -| Sprint 31 | Advisory AI | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIAI-31-001 | Implement `stella advise *` CLI commands leveraging Advisory AI orchestration and policy scopes. | -| Sprint 31 | Advisory AI | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-AIAI-31-001 | Expose advisory chunk API with paragraph anchors. | -| Sprint 31 | Advisory AI | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-AIAI-31-001 | Provide VEX chunks with justifications and signatures. | -| Sprint 31 | Advisory AI | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-31-001 | Provide policy knobs for Advisory AI. | -| Sprint 31 | Advisory AI | src/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-AIAI-31-001 | Deliver SBOM path/timeline endpoints for Advisory AI. | -| Sprint 31 | Advisory AI | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-AIAI-31-001 | Expose enriched rationale API for conflict explanations. | -| Sprint 31 | Advisory AI | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-AIAI-31-002 | Provide batching/caching hooks for Advisory AI. | -| Sprint 31 | Advisory AI | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-AIAI-31-001 | Route `/advisory/ai/*` APIs with RBAC/telemetry. | -| Sprint 31 | Advisory AI | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-AIAI-31-002 | Provide batch orchestration and retry handling for Advisory AI. | -| Sprint 31 | Advisory AI | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-AIAI-31-003 | Emit Advisory AI gateway telemetry/audit logs. | -| Sprint 32 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-32-001 | Author `/docs/orchestrator/overview.md` covering mission, roles, AOC alignment, and imposed rule reminder. | -| Sprint 32 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-32-002 | Author `/docs/orchestrator/architecture.md` detailing scheduler, DAGs, rate limits, and data model. | -| Sprint 32 | Orchestrator Dashboard | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ORCH-32-001 | Provision staging Postgres/message-bus charts, CI smoke deploy, and baseline dashboards for queue depth and inflight jobs. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-ORCH-32-001 | Introduce `orch:read` scope and `Orch.Viewer` role with metadata, discovery docs, and offline defaults. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-ORCH-32-001 | Register Concelier sources with orchestrator, publish schedules/rate policies, and seed metadata. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-ORCH-32-002 | Embed worker SDK into Concelier ingestion loops emitting progress, heartbeats, and artifact hashes. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-ORCH-32-001 | Adopt worker SDK in Excititor worker with job claim/heartbeat and artifact summary emission. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-32-001 | Bootstrap Go worker SDK (client config, job claim, acknowledgement flow) with integration tests. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-32-002 | Add heartbeat/progress helpers, structured logging, and default metrics exporters to Go SDK. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-32-001 | Bootstrap Python async SDK with job claim/config adapters and sample worker. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-32-002 | Implement heartbeat/progress helpers and logging/metrics instrumentation for Python workers. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-001 | Bootstrap orchestrator service with Postgres schema/migrations for sources, runs, jobs, dag_edges, artifacts, quotas, schedules. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-002 | Implement scheduler DAG planner, dependency resolver, and job state machine for read-only tracking. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-003 | Expose read-only REST APIs (sources, runs, jobs, DAG) with OpenAPI + validation. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-004 | Ship WebSocket/SSE live update stream and metrics counters/histograms for job lifecycle. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-005 | Deliver worker claim/heartbeat/progress endpoints capturing artifact metadata and checksums. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-32-101 | Define orchestrator `policy_eval` job contract, idempotency keys, and enqueue hooks for change events. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-ORCH-32-001 | Integrate orchestrator job IDs into SBOM ingest/index pipelines with artifact hashing and status updates. | -| Sprint 32 | Orchestrator Dashboard | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-ORCH-32-001 | Expose read-only orchestrator APIs via gateway with tenant scoping, caching headers, and rate limits. | -| Sprint 33 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-33-001 | Author `/docs/orchestrator/api.md` with endpoints, WebSocket events, error codes, and imposed rule reminder. | -| Sprint 33 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-33-002 | Author `/docs/orchestrator/console.md` covering screens, accessibility, and live updates. | -| Sprint 33 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-33-003 | Author `/docs/orchestrator/cli.md` with command reference, examples, and exit codes. | -| Sprint 33 | Governance & Rules | ops/devops/TASKS.md | DOING (2025-10-26) | DevOps Guild, Platform Leads | DEVOPS-RULES-33-001 | Contracts & Rules anchor (gateway proxy-only; Policy Engine overlays/simulations; AOC ingestion canonicalization; Graph Indexer + Graph API as sole platform). | -| Sprint 33 | Orchestrator Dashboard | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ORCH-33-001 | Publish Grafana dashboards for rate-limit/backpressure/error clustering and configure alert rules with runbooks. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-ORCH-33-001 | Add `Orch.Operator` role, control action scopes, and enforce reason/ticket field capture. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-ORCH-33-001 | Wire orchestrator control hooks (pause, throttle, retry) into Concelier workers with safe checkpoints. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-ORCH-33-001 | Honor orchestrator throttles, classify VEX errors, and emit retry-safe checkpoints in Excititor worker. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-33-001 | Add artifact upload helpers (object store + checksum) and idempotency guard to Go SDK. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-33-002 | Implement error classification/retry helper and structured failure report in Go SDK. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-33-001 | Add artifact publish/idempotency features to Python SDK with object store integration. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-33-002 | Expose error classification/retry/backoff helpers in Python SDK with structured logging. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-33-001 | Enable source/job control actions (test, pause/resume, retry/cancel/prioritize) with RBAC and audit hooks. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-33-002 | Implement adaptive token-bucket rate limiter and concurrency caps reacting to upstream 429/503 signals. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-33-003 | Add watermark/backfill manager with event-time windows, duplicate suppression, and preview API. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-33-004 | Deliver dead-letter storage, replay endpoints, and surfaced error classes with remediation hints. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-33-101 | Implement orchestrator-driven policy evaluation workers with heartbeats, SLO metrics, and rate limit awareness. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-ORCH-33-001 | Report SBOM ingest backpressure metrics and support orchestrator pause/resume/backfill signals. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-ORCH-33-001 | Expose `consensus_compute` orchestrator job type and integrate VEX Lens worker for diff batches. | -| Sprint 33 | Orchestrator Dashboard | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-ORCH-33-001 | Add control endpoints (actions/backfill) and SSE bridging with permission checks and error mapping. | -| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-001 | Author `/docs/orchestrator/run-ledger.md` describing provenance export format and audits. | -| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-002 | Author `/docs/security/secrets-handling.md` covering KMS refs, redaction, and operator hygiene. | -| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-003 | Author `/docs/operations/orchestrator-runbook.md` (failures, backfill guide, circuit breakers). | -| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-004 | Author `/docs/schemas/artifacts.md` detailing artifact kinds, schema versions, hashing, storage layout. | -| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-005 | Author `/docs/slo/orchestrator-slo.md` defining SLOs, burn alerts, and measurement strategy. | -| Sprint 34 | Orchestrator Dashboard | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-ORCH-34-001 | Provide Helm/Compose manifests, scaling defaults, and offline kit instructions for orchestrator service. | -| Sprint 34 | Orchestrator Dashboard | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ORCH-34-001 | Harden production dashboards/alerts, synthetic probes, and incident response playbooks for orchestrator. | -| Sprint 34 | Orchestrator Dashboard | ops/offline-kit/TASKS.md | TODO | Offline Kit Guild | DEVOPS-OFFLINE-34-006 | Bundle orchestrator service, worker SDK samples, and Postgres snapshot into Offline Kit with integrity checks. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-ORCH-34-001 | Add `Orch.Admin` role for quotas/backfills, enforce audit reason requirements, update docs and offline defaults. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-ORCH-34-001 | Implement backfill wizard and quota management commands with dry-run preview and guardrails. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-ORCH-34-001 | Implement orchestrator-driven backfills for advisory sources with idempotent artifact reuse and ledger linkage. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-ORCH-34-001 | Support orchestrator backfills and circuit breaker resets for Excititor sources with auditing. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-34-101 | Link orchestrator run ledger entries into Findings Ledger provenance export and audit queries. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-34-001 | Add backfill range execution, watermark handshake, and artifact dedupe verification to Go SDK. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-34-001 | Add backfill support and deterministic artifact dedupe validation to Python SDK. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-34-001 | Implement quota management APIs, SLO burn-rate computation, and alert budget tracking. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-34-002 | Build audit log and immutable run ledger export with signed manifest support. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-34-003 | Run perf/scale validation (10k jobs, dispatch <150 ms) and add autoscaling hooks. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-34-004 | Package orchestrator container, Helm overlays, offline bundle seeds, and provenance attestations. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-34-101 | Expose policy eval run ledger exports and SLO burn metrics to orchestrator. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-ORCH-34-001 | Enable SBOM backfill and watermark reconciliation; emit coverage metrics and flood guard. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-ORCH-34-001 | Integrate consensus compute completion events with orchestrator ledger and provenance outputs. | -| Sprint 34 | Orchestrator Dashboard | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-ORCH-34-001 | Expose quotas/backfill/queue metrics endpoints, throttle toggles, and error clustering APIs. | -| Sprint 35 | EPDR Foundations | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild | SCANNER-ANALYZERS-LANG-11-001 | Build entrypoint resolver (identity + environment profiles) and emit normalized entrypoint records. | -| Sprint 35 | EPDR Foundations | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild | SCANNER-ANALYZERS-LANG-11-002 | Static IL/reflection/ALC heuristics producing dependency edges with reason codes and confidence. | -| Sprint 35 | EPDR Foundations | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild, Signals Guild | SCANNER-ANALYZERS-LANG-11-003 | Runtime loader/PInvoke signal ingestion merged with static/declared edges (confidence & explain). | -| Sprint 35 | Export Center Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-35-001 | Author `/docs/export-center/overview.md` with purpose, profiles, security, and imposed rule reminder. | -| Sprint 35 | Export Center Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-35-002 | Author `/docs/export-center/architecture.md` detailing service components, adapters, manifests, signing, and distribution. | -| Sprint 35 | Export Center Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-35-003 | Publish `/docs/export-center/profiles.md` covering schemas, examples, and compatibility. | -| Sprint 35 | Export Center Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-EXPORT-35-001 | Package exporter service/worker containers, Helm overlays (download-only), and rollout guide. | -| Sprint 35 | Export Center Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-EXPORT-35-001 | Create exporter CI pipeline (lint/test/perf smoke), object storage fixtures, and initial Grafana dashboards. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-001 | Bootstrap exporter service, configuration, and migrations for export profiles/runs/inputs/distributions with tenant scopes. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-002 | Implement planner resolving filters to iterators and orchestrator job contract with deterministic sampling. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-003 | Deliver JSON adapters (raw/policy) with canonical normalization, redaction enforcement, and zstd writers. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-004 | Build mirror (full) adapter producing filesystem layout, manifests, and bundle assembly for download profile. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-005 | Implement manifest/provenance writer and KMS signing/attestation for export bundles. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-006 | Expose Export API (profiles, runs, download) with SSE updates, concurrency controls, and audit logging. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-EXPORT-35-001 | Provide paginated streaming endpoints for advisories, VEX, SBOMs, and findings filtered by scope selectors. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-35-101 | Register export job type, quotas, and rate policies; surface export job telemetry for scheduler. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-35-201 | Expose deterministic policy snapshot + evaluated findings endpoint aligned with Export Center requirements. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-EXPORT-35-001 | Publish consensus snapshot API delivering deterministic JSON for export consumption. | -| Sprint 35 | Export Center Phase 1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXPORT-35-001 | Route Export Center APIs through gateway with tenant scoping, viewer/operator scopes, and streaming downloads. | -| Sprint 36 | EPDR Observations | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild, SBOM Service Guild | SCANNER-ANALYZERS-LANG-11-004 | Normalize EPDR output to Scanner observation writer (entrypoints + edges + env profiles). | -| Sprint 36 | EPDR Observations | src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild, QA Guild | SCANNER-ANALYZERS-LANG-11-005 | End-to-end fixtures/benchmarks covering publish modes, RIDs, trimming, NativeAOT with explain traces. | -| Sprint 36 | Export Center Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-36-004 | Author `/docs/export-center/api.md` with endpoint examples and imposed rule note. | -| Sprint 36 | Export Center Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-36-005 | Publish `/docs/export-center/cli.md` covering commands, scripts, verification, and imposed rule reminder. | -| Sprint 36 | Export Center Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-36-006 | Write `/docs/export-center/trivy-adapter.md` detailing mappings, compatibility, and test matrix. | -| Sprint 36 | Export Center Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-EXPORT-36-001 | Document registry credentials, OCI push workflows, and automation for export distributions. | -| Sprint 36 | Export Center Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-EXPORT-36-001 | Integrate Trivy compatibility validation, OCI push smoke tests, and metrics dashboards for export throughput. | -| Sprint 36 | Export Center Phase 2 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-EXPORT-36-001 | Add `stella export distribute` (OCI/objstore), `run download --resume`, and status polling enhancements. | -| Sprint 36 | Export Center Phase 2 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-36-001 | Implement Trivy DB adapter (core) with schema mapping, validation, and compatibility gating. | -| Sprint 36 | Export Center Phase 2 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-36-002 | Add Trivy Java DB variant, shared manifest entries, and adapter regression tests. | -| Sprint 36 | Export Center Phase 2 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-36-003 | Build OCI distribution engine for exports with descriptor annotations and registry auth handling. | -| Sprint 36 | Export Center Phase 2 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-36-004 | Extend planner/run lifecycle for OCI/object storage distributions with retry + idempotency. | -| Sprint 36 | Export Center Phase 2 | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-36-101 | Add distribution job follow-ups, retention metadata, and metrics for export runs. | -| Sprint 36 | Export Center Phase 2 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXPORT-36-001 | Expose distribution endpoints (OCI/object storage) and manifest/provenance download proxies with RBAC. | -| Sprint 37 | Export Center Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-37-001 | Publish `/docs/export-center/mirror-bundles.md` detailing layouts, deltas, encryption, imposed rule reminder. | -| Sprint 37 | Export Center Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-37-002 | Publish `/docs/export-center/provenance-and-signing.md` covering manifests, attestation, verification. | -| Sprint 37 | Export Center Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-37-003 | Publish `/docs/operations/export-runbook.md` for failures, tuning, capacity, with imposed rule note. | -| Sprint 37 | Export Center Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-37-004 | Publish `/docs/security/export-hardening.md` covering RBAC, isolation, encryption, and imposed rule. | -| Sprint 37 | Export Center Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-EXPORT-37-001 | Finalize dashboards/alerts for exports (failure, verify), retention jobs, and chaos testing harness. | -| Sprint 37 | Export Center Phase 3 | ops/offline-kit/TASKS.md | TODO | Offline Kit Guild | DEVOPS-OFFLINE-37-001 | Package Export Center mirror bundles + verification tooling into Offline Kit with manifest/signature updates. | -| Sprint 37 | Export Center Phase 3 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-EXPORT-37-001 | Add `Export.Admin` scope enforcement for retention, encryption keys, and scheduling APIs. | -| Sprint 37 | Export Center Phase 3 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-EXPORT-37-001 | Implement `stella export schedule`, `run verify`, and bundle verification tooling with signature/hash checks. | -| Sprint 37 | Export Center Phase 3 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-37-001 | Implement mirror delta adapter, base export linkage, and content-addressed reuse. | -| Sprint 37 | Export Center Phase 3 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-37-002 | Add bundle encryption, key wrapping with KMS, and verification tooling for encrypted exports. | -| Sprint 37 | Export Center Phase 3 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-37-003 | Deliver scheduling/retention engine (cron/event triggers), audit trails, and retry idempotency enhancements. | -| Sprint 37 | Export Center Phase 3 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-37-004 | Provide export verification API and CLI integration, including hash/signature validation endpoints. | -| Sprint 37 | Export Center Phase 3 | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-37-101 | Enable scheduled export runs, retention pruning hooks, and failure alerting integration. | -| Sprint 37 | Export Center Phase 3 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXPORT-37-001 | Surface scheduling, retention, and verification endpoints plus encryption parameter handling. | -| Sprint 37 | Native Analyzer Core | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-001 | Format detector & binary identity for ELF/PE/Mach-O (multi-slice) with stable entrypoint IDs. | -| Sprint 37 | Native Analyzer Core | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-002 | ELF dynamic parser emitting dtneeded edges, runpath metadata, symbol version needs. | -| Sprint 37 | Native Analyzer Core | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-003 | PE import + delay-load + SxS manifest parsing producing reason-coded edges. | -| Sprint 37 | Native Analyzer Core | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-004 | Mach-O load command parsing with @rpath expansion and slice handling. | -| Sprint 37 | Native Analyzer Core | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-005 | Cross-platform resolver engine modeling search order/explain traces for ELF/PE/Mach-O. | -| Sprint 37 | Native Analyzer Core | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-006 | Heuristic scanner for dlopen/LoadLibrary strings, plugin configs, ecosystem hints with confidence tags. | -| Sprint 38 | Native Observation Pipeline | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-007 | Serialize entrypoints/edges/env profiles to Scanner writer (AOC-compliant observations). | -| Sprint 38 | Native Observation Pipeline | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild, QA Guild | SCANNER-ANALYZERS-NATIVE-20-008 | Fixture suite + determinism benchmarks for native analyzer across linux/windows/macos. | -| Sprint 38 | Native Observation Pipeline | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-NATIVE-20-009 | Optional runtime capture adapters (eBPF/ETW/dyld) producing runtime-load edges with redaction. | -| Sprint 38 | Native Observation Pipeline | src/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-NATIVE-20-010 | Package native analyzer plug-in + Offline Kit updates and restart-time loading. | -| Sprint 38 | Notifications Studio Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-NOTIFY-38-001 | Publish `/docs/notifications/overview.md` and `/docs/notifications/architecture.md` ending with imposed rule statement. | -| Sprint 38 | Notifications Studio Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-NOTIFY-38-001 | Package notifier API/worker Helm overlays (email/chat/webhook), secrets templates, rollout guide. | -| Sprint 38 | Notifications Studio Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-NOTIFY-38-001 | Stand up notifier CI pipelines, event bus fixtures, base dashboards for events/notifications latency. | -| Sprint 38 | Notifications Studio Phase 1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-NOTIFY-38-001 | Implement `stella notify` rule/template/incident commands (list/create/test/ack) with file-based inputs. | -| Sprint 38 | Notifications Studio Phase 1 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-38-001 | Bootstrap notifier service, migrations for notif tables, event ingestion, and rule engine foundation (policy violations + job failures). | -| Sprint 38 | Notifications Studio Phase 1 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-38-002 | Implement channel adapters (email, chat-webhook, generic webhook) with retry and audit logging. | -| Sprint 38 | Notifications Studio Phase 1 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-38-003 | Deliver template service (versioning, preview), rendering pipeline with redaction, and provenance links. | -| Sprint 38 | Notifications Studio Phase 1 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-38-004 | Expose initial API (rules CRUD, templates, incidents list, ack) and live feed WS stream. | -| Sprint 38 | Notifications Studio Phase 1 | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-38-101 | Standardize event envelope publication (policy/export/job lifecycle) with idempotency keys for notifier ingestion. | -| Sprint 38 | Notifications Studio Phase 1 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-38-201 | Emit enriched violation events including rationale IDs via orchestrator bus. | -| Sprint 38 | Notifications Studio Phase 1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-NOTIFY-38-001 | Route notifier APIs through gateway with tenant scoping and operator scopes. | -| Sprint 39 | Java Analyzer Core | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-001 | Java input normalizer (jar/war/ear/fat/jmod/jimage) with MR overlay selection. | -| Sprint 39 | Java Analyzer Core | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | Module/classpath builder with duplicate & split-package detection. | -| Sprint 39 | Java Analyzer Core | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-003 | SPI scanner & provider selection with warnings. | -| Sprint 39 | Java Analyzer Core | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-004 | Reflection/TCCL heuristics emitting reason-coded edges. | -| Sprint 39 | Java Analyzer Core | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-005 | Framework config extraction (Spring, Jakarta, MicroProfile, logging, Graal configs). | -| Sprint 39 | Java Analyzer Core | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-006 | JNI/native hint detection for Java artifacts. | -| Sprint 39 | Java Analyzer Core | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-007 | Manifest/signature metadata collector (main/start/agent classes, signers). | -| Sprint 39 | Notifications Studio Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-NOTIFY-39-002 | Publish `/docs/notifications/rules.md`, `/templates.md`, `/digests.md` with imposed rule reminder. | -| Sprint 39 | Notifications Studio Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-NOTIFY-39-002 | Add throttling/quiet-hours dashboards, digest job monitoring, and storm breaker alerts. | -| Sprint 39 | Notifications Studio Phase 2 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-NOTIFY-39-001 | Add simulation/digest CLI verbs and advanced filtering for incidents. | -| Sprint 39 | Notifications Studio Phase 2 | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-NOTIFY-39-001 | Optimize digest queries and provide API for notifier to fetch unresolved policy violations/SBOM deltas. | -| Sprint 39 | Notifications Studio Phase 2 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-39-001 | Implement correlation engine, throttling, quiet hours/maintenance evaluator, and incident state machine. | -| Sprint 39 | Notifications Studio Phase 2 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-39-002 | Add digests generator with Findings Ledger queries and distribution (email/chat). | -| Sprint 39 | Notifications Studio Phase 2 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-39-003 | Provide simulation engine and API for rule dry-run against historical events. | -| Sprint 39 | Notifications Studio Phase 2 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-39-004 | Integrate quiet hours calendars and default throttles with audit logging. | -| Sprint 39 | Notifications Studio Phase 2 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-NOTIFY-39-001 | Surface digest scheduling, simulation, and throttle management endpoints via gateway. | -| Sprint 40 | Java Observation & Runtime | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-008 | Observation writer producing entrypoints/components/edges with warnings. | -| Sprint 40 | Java Observation & Runtime | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild, QA Guild | SCANNER-ANALYZERS-JAVA-21-009 | Fixture suite + determinism/perf benchmarks for Java analyzer. | -| Sprint 40 | Java Observation & Runtime | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-JAVA-21-010 | Optional runtime ingestion via agent/JFR producing runtime edges. | -| Sprint 40 | Java Observation & Runtime | src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-JAVA-21-011 | Package Java analyzer plug-in + Offline Kit/CLI updates. | -| Sprint 40 | Notifications Studio Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-NOTIFY-40-001 | Publish `/docs/notifications/channels.md`, `/escalations.md`, `/api.md`, `/operations/notifier-runbook.md`, `/security/notifications-hardening.md` with imposed rule lines. | -| Sprint 40 | Notifications Studio Phase 3 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-NOTIFY-40-001 | Package notifier escalations + localization deployment overlays, signed ack token rotation scripts, and rollback guidance. | -| Sprint 40 | Notifications Studio Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-NOTIFY-40-001 | Finalize notifier dashboards/alerts (escalation failures, ack latency), chaos testing harness, and channel health monitoring. | -| Sprint 40 | Notifications Studio Phase 3 | ops/offline-kit/TASKS.md | CARRY (no scope change) | Offline Kit Guild | DEVOPS-OFFLINE-37-002 | Carry from Sprint 37: Notifier offline packs (sample configs, template/digest packs, dry-run harness) with integrity checks. | -| Sprint 40 | Notifications Studio Phase 3 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-NOTIFY-40-001 | Enforce ack token signing/rotation, webhook allowlists, and admin-only escalation settings. | -| Sprint 40 | Notifications Studio Phase 3 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-NOTIFY-40-001 | Implement ack token redemption, escalation management, localization previews. | -| Sprint 40 | Notifications Studio Phase 3 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-40-001 | Implement escalations, on-call schedules, ack bridge, PagerDuty/OpsGenie adapters, and localization bundles. | -| Sprint 40 | Notifications Studio Phase 3 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-40-002 | Add CLI inbox/in-app feed channels and summary storm breaker notifications. | -| Sprint 40 | Notifications Studio Phase 3 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-40-003 | Harden security: signed ack links, webhook HMAC/IP allowlists, tenant isolation fuzzing, localization fallback. | -| Sprint 40 | Notifications Studio Phase 3 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-40-004 | Finalize observability (incident metrics, escalation latency) and chaos tests for channel outages. | -| Sprint 40 | Notifications Studio Phase 3 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-NOTIFY-40-001 | Expose escalation, localization, channel health endpoints and verification of signed links. | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-CLI-41-001 | Publish `/docs/cli/overview.md`, `/cli/configuration.md`, `/cli/output-and-exit-codes.md` (with imposed rule). | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-CLI-41-001 | Package CLI release artifacts (tarballs, completions, container image) with distribution docs. | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CLI-41-001 | Establish CLI build pipeline (multi-platform binaries, SBOM, checksums) and parity matrix CI enforcement. | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-PACKS-41-001 | Define CLI SSO scopes and Packs (`Packs.Read/Write/Run/Approve`) roles; update discovery/offline defaults. | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-CORE-41-001 | Implement CLI config/auth foundation, global flags, output renderer, and error/exit code mapping. | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PARITY-41-001 | Deliver parity command groups (`policy`, `sbom`, `vuln`, `vex`, `advisory`, `export`, `orchestrator`) with JSON/table outputs and `--explain`. | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PARITY-41-002 | Implement `notify`, `aoc`, `auth` command groups, idempotency keys, completions, and parity matrix export. | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-41-101 | Register `pack-run` job type, integrate logs/artifacts, expose pack run metadata. | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/StellaOps.PacksRegistry/TASKS.md | TODO | Packs Registry Guild | PACKS-REG-41-001 | Implement packs index API, signature verification, provenance storage, and RBAC. | -| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-41-001 | Bootstrap Task Runner service, migrations, run API, local executor, approvals pause, artifact capture. | -| Sprint 42 | CLI Parity & Task Packs Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-CLI-42-001 | Publish `/docs/cli/parity-matrix.md`, `/cli/commands/*.md`, `/docs/task-packs/spec.md` (imposed rule). | -| Sprint 42 | CLI Parity & Task Packs Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CLI-42-001 | Add CLI golden output tests, parity diff automation, and pack run CI harness. | -| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PACKS-42-001 | Implement Task Pack CLI commands (`pack plan/run/push/pull/verify`) with plan/simulate engine and expression sandbox. | -| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PARITY-41-001..002 | Close parity gaps for Notifications, Policy Studio advanced features, SBOM graph, Vuln Explorer; parity matrix green. | -| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-PACKS-42-001 | Expose snapshot/time-travel APIs for CLI offline mode and pack simulation. | -| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-42-101 | Stream pack run logs via SSE/WS, expose artifact manifests, enforce pack run quotas. | -| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/StellaOps.PacksRegistry/TASKS.md | TODO | Packs Registry Guild | PACKS-REG-42-001 | Support pack version lifecycle, tenant allowlists, provenance export, signature rotation. | -| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-42-201 | Provide stable rationale IDs/APIs for CLI `--explain` and pack policy gates. | -| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-42-001 | Add loops, conditionals, `maxParallel`, outputs, simulation mode, policy gates in Task Runner. | -| Sprint 43 | CLI Parity & Task Packs Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-PACKS-43-001 | Publish `/docs/task-packs/authoring-guide.md`, `/registry.md`, `/runbook.md`, `/security/pack-signing-and-rbac.md`, `/operations/cli-release-and-packaging.md` (imposed rule). | -| Sprint 43 | CLI Parity & Task Packs Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CLI-43-001 | Finalize multi-platform release automation, SBOM signing, parity gate enforcement, pack run chaos tests. | -| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-PACKS-41-001 | Enforce pack signing policies, approval RBAC, CLI token scopes for CI headless runs. | -| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PACKS-42-001 | Deliver advanced pack features (approvals pause/resume, remote streaming, secret injection), localization, man pages. | -| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-005, PACKS-REG-41-001 | Integrate pack run manifests into export bundles and CLI verify flows. | -| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/StellaOps.PacksRegistry/TASKS.md | TODO | Packs Registry Guild | PACKS-REG-42-001 | Enforce pack signing policies, audit trails, registry mirroring, Offline Kit support. | -| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-42-001 | Implement approvals workflow, notifications integration, remote artifact uploads, chaos resilience. | -| Sprint 44 | Containerized Distribution Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-INSTALL-44-001 | Publish install overview + Compose Quickstart docs (imposed rule). | -| Sprint 44 | Containerized Distribution Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | COMPOSE-44-001 | Deliver Quickstart Compose stack with seed data and quickstart script. | -| Sprint 44 | Containerized Distribution Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | COMPOSE-44-002 | Provide backup/reset scripts with guardrails and documentation. | -| Sprint 44 | Containerized Distribution Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | COMPOSE-44-003 | Implement seed job and onboarding wizard toggle (`QUICKSTART_MODE`). | -| Sprint 44 | Containerized Distribution Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-COMPOSE-44-001 | Finalize Quickstart scripts and README. | -| Sprint 44 | Containerized Distribution Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CONTAINERS-44-001 | Automate multi-arch builds with SBOM/signature pipeline. | -| Sprint 44 | Containerized Distribution Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DOCKER-44-001 | Author multi-stage Dockerfiles with non-root users, read-only FS, and health scripts for all services. | -| Sprint 44 | Containerized Distribution Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DOCKER-44-002 | Generate SBOMs and cosign attestations for each image; integrate signature verification in CI. | -| Sprint 44 | Containerized Distribution Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DOCKER-44-003 | Ensure `/health/*`, `/version`, `/metrics`, and capability endpoints (`merge=false`) are exposed across services. | -| Sprint 44 | Containerized Distribution Phase 1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONTAINERS-44-001 | Expose config discovery and quickstart handling with health/version endpoints. | -| Sprint 45 | Containerized Distribution Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-INSTALL-45-001 | Publish Helm production + configuration reference docs (imposed rule). | -| Sprint 45 | Containerized Distribution Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-HELM-45-001 | Publish Helm install guide and sample values. | -| Sprint 45 | Containerized Distribution Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | HELM-45-001 | Scaffold Helm chart with component toggles and pinned digests. | -| Sprint 45 | Containerized Distribution Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | HELM-45-002 | Add security features (TLS, NetworkPolicy, Secrets integration). | -| Sprint 45 | Containerized Distribution Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | HELM-45-003 | Implement HPA, PDB, readiness gates, and observability hooks. | -| Sprint 45 | Containerized Distribution Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CONTAINERS-45-001 | Add Compose/Helm smoke tests to CI. | -| Sprint 45 | Containerized Distribution Phase 2 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONTAINERS-45-001 | Ensure readiness endpoints and config toggles support Helm deployments. | -| Sprint 46 | Containerized Distribution Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-INSTALL-46-001 | Publish air-gap, supply chain, health/readiness, image catalog, console onboarding docs (imposed rule). | -| Sprint 46 | Containerized Distribution Phase 3 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-AIRGAP-46-001 | Provide air-gap load script and docs. | -| Sprint 46 | Containerized Distribution Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CONTAINERS-46-001 | Build signed air-gap bundle and verify in CI. | -| Sprint 46 | Containerized Distribution Phase 3 | ops/offline-kit/TASKS.md | TODO | Offline Kit Guild | OFFLINE-CONTAINERS-46-001 | Include air-gap bundle and instructions in Offline Kit. | -| Sprint 46 | Containerized Distribution Phase 3 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONTAINERS-46-001 | Harden offline mode and document fallback behavior. | -| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-TEN-47-001 | Publish `/docs/security/tenancy-overview.md` and `/docs/security/scopes-and-roles.md` (imposed rule). | -| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-TEN-47-001 | Integrate JWKS caching, signature verification tests, and auth regression suite into CI. | -| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-TEN-47-001 | Implement unified JWT/ODIC config, scope grammar, tenant/project claims, and JWKS caching in Authority. | -| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-TEN-47-001 | Ship `stella login`, `whoami`, `tenants list`, and tenant flag persistence with secure token storage. | -| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-TEN-47-001 | Add auth middleware (token verification, tenant activation, scope checks) and structured 403 responses. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-TEN-48-001 | Publish `/docs/operations/multi-tenancy.md`, `/docs/operations/rls-and-data-isolation.md`, `/docs/console/admin-tenants.md` (imposed rule). | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-TEN-48-001 | Write integration tests for RLS enforcement, tenant audit stream, and object store prefix checks. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-TEN-48-001 | Ensure advisory linkers operate per tenant with RLS, enforce aggregation-only capability endpoint. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-TEN-48-001 | Same as above for VEX linkers; enforce capability endpoint `merge=false`. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-TEN-48-001 | Add tenant prefixes to manifests/artifacts, enforce scope checks, and block cross-tenant exports by default. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-TEN-48-001 | Partition findings by tenant/project, enable RLS, and update queries/events to include tenant context. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-TEN-48-001 | Tenant-scope notification rules, incidents, and outbound channels; update storage schemas. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-TEN-48-001 | Stamp jobs with tenant/project, set DB session context, and reject jobs without context. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-TEN-48-001 | Add `tenant_id`/`project_id` to policy data, enable Postgres RLS, and expose rationale IDs with tenant context. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-TEN-48-001 | Propagate tenant/project to all steps, enforce object store prefix, and validate before execution. | -| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-TEN-48-001 | Enforce tenant context through persistence (DB GUC, object store prefix), add request annotations, and emit audit events. | -| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-TEN-49-001 | Publish `/docs/cli/authentication.md`, `/docs/api/authentication.md`, `/docs/policy/examples/abac-overlays.md`, `/docs/install/configuration-reference.md` updates (imposed rule). | -| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-TEN-49-001 | Implement audit log pipeline, monitor scope usage, chaos tests for JWKS outage, and tenant load/perf tests. | -| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-TEN-49-001 | Implement service accounts, delegation tokens (`act` chain), per-tenant quotas, and audit log streaming. | -| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-TEN-49-001 | Add service account token minting, delegation, and `--impersonate` banner/controls. | -| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-TEN-49-001 | Integrate ABAC policy overlay (optional), expose audit API, and support service token minting endpoints. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-INSTALL-50-001 | Add `/docs/install/telemetry-stack.md` for collector deployment and offline packaging. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | BLOCKED (2025-10-26) | Docs Guild | DOCS-OBS-50-001 | Author `/docs/observability/overview.md` with imposed rule banner and architecture context. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-OBS-50-002 | Document telemetry standards (fields, scrubbing, sampling) under `/docs/observability/telemetry-standards.md`. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-OBS-50-003 | Publish structured logging guide `/docs/observability/logging.md` with examples and imposed rule banner. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-OBS-50-004 | Publish tracing guide `/docs/observability/tracing.md` covering context propagation and sampling. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-SEC-OBS-50-001 | Update `/docs/security/redaction-and-privacy.md` for telemetry privacy controls. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | ops/devops/TASKS.md | DOING (2025-10-26) | DevOps Guild | DEVOPS-OBS-50-002 | Stand up multi-tenant metrics/logs/traces backends with retention and isolation. | -> Staging rollout plan recorded in `docs/ops/telemetry-storage.md`; waiting on Authority-issued tokens and namespace bootstrap. -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OBS-50-001 | Introduce observability/timeline/evidence/attestation scopes and update discovery metadata. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-50-001 | Propagate trace headers from CLI commands and print correlation IDs. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-50-001 | Replace ad-hoc logging with telemetry core across advisory ingestion/linking. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-50-001 | Adopt telemetry core in Concelier APIs and surface correlation IDs. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-50-001 | Integrate telemetry core into VEX ingestion/linking with scope metadata. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-50-001 | Add telemetry core to VEX APIs and emit trace headers. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-50-001 | Enable telemetry core in export planner/workers capturing bundle metadata. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-50-001 | Wire telemetry core through ledger writer/projector for append/replay operations. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-50-001 | Instrument orchestrator scheduler/control APIs with telemetry core spans/logs. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-50-001 | Instrument policy compile/evaluate flows with telemetry core spans/logs. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-50-001 | Adopt telemetry core in Task Runner host and workers with scrubbed transcripts. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-50-001 | Bootstrap telemetry core library with structured logging, OTLP exporters, and deterministic bootstrap. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-50-002 | Deliver context propagation middleware for HTTP/gRPC/jobs/CLI carrying trace + tenant metadata. | -| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-50-001 | Integrate telemetry core into gateway and emit structured traces/logs for all routes. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | docs/TASKS.md | TODO | Docs Guild | DOCS-OBS-51-001 | Publish `/docs/observability/metrics-and-slos.md` with alert policies. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-51-001 | Deploy SLO evaluator service, dashboards, and alert routing. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-51-001 | Implement `stella obs top` streaming health metrics command. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-51-001 | Emit ingest latency metrics + SLO thresholds for advisories. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-51-001 | Provide VEX ingest metrics and SLO burn-rate automation. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-51-001 | Capture export planner/bundle latency metrics and SLOs. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-51-001 | Add ledger/projector metrics dashboards and burn-rate policies. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OBS-51-001 | Ingest SLO burn-rate webhooks and deliver observability alerts. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-51-001 | Publish orchestration metrics, SLOs, and burn-rate alerts. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-51-001 | Publish policy evaluation metrics + dashboards meeting SLO targets. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-51-001 | Emit task runner golden-signal metrics and SLO alerts. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-51-001 | Ship metrics helpers + exemplar guards for golden signals. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Telemetry.Core/TASKS.md | TODO | Security Guild | TELEMETRY-OBS-51-002 | Implement logging scrubbing and tenant debug override controls. | -| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-51-001 | Expose `/obs/health` and `/obs/slo` aggregations for services. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | docs/TASKS.md | TODO | Docs Guild | DOCS-CLI-OBS-52-001 | Document `stella obs` CLI commands and scripting patterns. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-OBS-52-001 | Document Console observability hub and trace/log search workflows. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-OBS-52-002 | Publish Console forensics/timeline guidance with imposed rule banner. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-52-001 | Configure streaming pipelines and schema validation for timeline events. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-52-001 | Add `stella obs trace` + log commands correlating timeline data. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-52-001 | Emit advisory ingest/link timeline events with provenance metadata. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-52-001 | Provide SSE bridge for advisory timeline events. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-52-001 | Emit VEX ingest/link timeline events with justification info. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-52-001 | Stream VEX timeline updates to clients with tenant filters. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-52-001 | Publish export lifecycle events into timeline. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-52-001 | Record ledger append/projection events into timeline stream. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-52-001 | Emit job lifecycle timeline events with tenant/project metadata. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-52-001 | Emit policy decision timeline events with rule summaries and trace IDs. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-52-001 | Emit pack run timeline events and dedupe logic. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.TimelineIndexer/TASKS.md | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-001 | Bootstrap timeline indexer service and schema with RLS scaffolding. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.TimelineIndexer/TASKS.md | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-002 | Implement event ingestion pipeline with ordering and dedupe. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.TimelineIndexer/TASKS.md | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-003 | Expose timeline query APIs with tenant filters and pagination. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.TimelineIndexer/TASKS.md | TODO | Security Guild | TIMELINE-OBS-52-004 | Finalize RLS + scope enforcement and audit logging for timeline reads. | -| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-52-001 | Provide trace/log proxy endpoints bridging to timeline + log store. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | docs/TASKS.md | TODO | Docs Guild | DOCS-CLI-FORENSICS-53-001 | Document `stella forensic` CLI workflows with sample bundles. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | docs/TASKS.md | TODO | Docs Guild | DOCS-FORENSICS-53-001 | Publish `/docs/forensics/evidence-locker.md` covering bundles, WORM, legal holds. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | docs/TASKS.md | TODO | Docs Guild | DOCS-FORENSICS-53-003 | Publish `/docs/forensics/timeline.md` with schema and query examples. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-53-001 | Provision WORM-capable storage, legal hold automation, and backup/restore scripts for evidence locker. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-FORENSICS-53-001 | Ship `stella forensic snapshot` commands invoking evidence locker. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-53-001 | Generate advisory evidence payloads (raw doc, linkset diff) for locker. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-53-001 | Add `/evidence/advisories/*` gateway endpoints consuming locker APIs. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-53-001 | Bootstrap evidence locker service with schema, storage abstraction, and RLS. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-53-002 | Implement bundle builders for evaluation, job, and export snapshots. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-53-003 | Expose evidence APIs (create/get/verify/hold) with audit + quotas. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-53-001 | Produce VEX evidence payloads and push to locker. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-53-001 | Expose `/evidence/vex/*` endpoints retrieving locker bundles. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-53-001 | Store export manifests + transcripts within evidence bundles. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-53-001 | Persist evidence bundle references alongside ledger entries and expose lookup API. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-53-001 | Attach job capsules + manifests to evidence locker snapshots. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-53-001 | Build evaluation evidence bundles (inputs, rule traces, engine version). | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-53-001 | Capture step transcripts and manifests into evidence bundles. | -| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/StellaOps.TimelineIndexer/TASKS.md | TODO | Timeline Indexer Guild | TIMELINE-OBS-53-001 | Link timeline events to evidence bundle digests and expose evidence lookup endpoint. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | docs/TASKS.md | TODO | Docs Guild | DOCS-FORENSICS-53-002 | Publish `/docs/forensics/provenance-attestation.md` covering signing + verification. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-54-001 | Manage provenance signing infrastructure (KMS keys, timestamp authority) and CI verification. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-FORENSICS-54-001 | Implement `stella forensic verify` command verifying bundles + signatures. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-FORENSICS-54-002 | Add `stella forensic attest show` command with signer/timestamp details. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-54-001 | Sign advisory batches with DSSE attestations and expose verification. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-54-001 | Add `/attestations/advisories/*` endpoints surfacing verification metadata. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-54-001 | Attach DSSE signing/timestamping to evidence bundles and emit timeline hooks. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-54-002 | Provide bundle packaging + offline verification fixtures. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-54-001 | Produce VEX batch attestations linking to timeline/ledger. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-54-001 | Expose `/attestations/vex/*` endpoints with verification summaries. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-54-001 | Produce export attestation manifests and CLI verification hooks. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-54-001 | Produce DSSE attestations for jobs and surface verification endpoint. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-54-001 | Generate DSSE attestations for policy evaluations and expose verification API. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Provenance.Attestation/TASKS.md | TODO | Provenance Guild | PROV-OBS-53-001 | Implement DSSE/SLSA models with deterministic serializer + test vectors. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Provenance.Attestation/TASKS.md | TODO | Provenance Guild | PROV-OBS-53-002 | Build signer abstraction (cosign/KMS/offline) with policy enforcement. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Provenance.Attestation/TASKS.md | TODO | Provenance Guild | PROV-OBS-54-001 | Deliver verification library validating DSSE signatures + Merkle roots. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.Provenance.Attestation/TASKS.md | TODO | Provenance Guild, DevEx/CLI Guild | PROV-OBS-54-002 | Package provenance verification tool for CLI integration and offline use. | -| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-54-001 | Generate pack run attestations and link to timeline/evidence. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | docs/TASKS.md | TODO | Docs Guild | DOCS-RUNBOOK-55-001 | Publish `/docs/runbooks/incidents.md` covering activation, escalation, and verification checklist. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-55-001 | Automate incident mode activation via SLO alerts, retention override management, and reset job. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OBS-55-001 | Enforce `obs:incident` scope with fresh-auth requirement and audit export for toggles. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-55-001 | Ship `stella obs incident-mode` commands with safeguards and audit logging. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-55-001 | Increase sampling and raw payload retention under incident mode with redaction guards. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-55-001 | Provide incident mode toggle endpoints and propagate to services. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-55-001 | Extend evidence retention + activation events for incident windows. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-55-001 | Enable incident sampling + retention overrides for VEX pipelines. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-55-001 | Add incident mode APIs for VEX services with audit + guardrails. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-55-001 | Increase export telemetry + debug retention during incident mode and emit events. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-55-001 | Extend retention and diagnostics capture during incident mode. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OBS-55-001 | Send incident mode start/stop notifications with quick links to evidence/timeline. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-55-001 | Increase telemetry + evidence capture during incident mode and emit activation events. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-55-001 | Capture full rule traces + retention bump on incident activation with timeline events. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-55-001 | Capture extra debug data + notifications for incident mode runs. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-55-001 | Implement incident mode sampling toggle API with activation audit trail. | -| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-55-001 | Deliver `/obs/incident-mode` control endpoints with audit + retention previews. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-56-001 | Publish `/docs/airgap/overview.md`. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-56-002 | Document sealing and egress controls. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-56-003 | Publish mirror bundles guide. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-56-004 | Publish bootstrap pack guide. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-56-001 | Publish deny-all egress policies and verification script for sealed environments. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-56-002 | Provide bundle staging/import scripts for air-gapped object stores. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-56-003 | Build Bootstrap Pack pipeline bundling images/charts with checksums. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.AirGap.Controller/TASKS.md | TODO | AirGap Controller Guild | AIRGAP-CTL-56-001 | Implement sealing state machine, persistence, and RBAC scopes for air-gapped status. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.AirGap.Controller/TASKS.md | TODO | AirGap Controller Guild | AIRGAP-CTL-56-002 | Expose seal/status APIs with policy hash validation and staleness placeholders. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.AirGap.Importer/TASKS.md | TODO | AirGap Importer Guild | AIRGAP-IMP-56-001 | Implement DSSE/TUF/Merkle verification helpers. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.AirGap.Importer/TASKS.md | TODO | AirGap Importer Guild | AIRGAP-IMP-56-002 | Enforce root rotation policy for bundles. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-56-001 | Ship `EgressPolicy` facade with sealed/unsealed enforcement and remediation errors. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-56-002 | Deliver Roslyn analyzer blocking raw HTTP clients; wire into CI. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-56-001 | Implement mirror create/verify and airgap verify commands. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-50-001 | Ensure telemetry propagation for sealed logging. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-AIRGAP-56-001 | Add mirror ingestion adapters preserving source metadata. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-AIRGAP-56-001 | Add VEX mirror ingestion adapters. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-AIRGAP-56-001 | Extend export center to build mirror bundles. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.Mirror.Creator/TASKS.md | TODO | Mirror Creator Guild | MIRROR-CRT-56-001 | Build deterministic bundle assembler (advisories/vex/policy). | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-AIRGAP-56-001 | Validate jobs against sealed-mode restrictions. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-AIRGAP-56-001 | Accept policy packs from bundles with provenance tracking. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-AIRGAP-56-001 | Enforce sealed-mode plan validation for network calls. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-56-001 | (Carry) Extend telemetry core with sealed-mode hooks before integration. | -| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-56-001 | Extend telemetry core usage for sealed-mode status surfaces (seal/unseal dashboards, drift signals). | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-57-001 | Publish staleness/time doc. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-57-002 | Publish console airgap doc. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-57-003 | Publish CLI airgap doc. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-57-004 | Publish airgap operations runbook. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-57-001 | Automate mirror bundle creation with approvals. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-57-002 | Run sealed-mode CI suite enforcing zero egress. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.AirGap.Importer/TASKS.md | TODO | AirGap Importer Guild | AIRGAP-IMP-57-001 | Implement bundle catalog with RLS + migrations. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.AirGap.Importer/TASKS.md | TODO | AirGap Importer Guild | AIRGAP-IMP-57-002 | Load artifacts into object store with checksum verification. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-57-001 | Adopt EgressPolicy in core services. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-57-002 | Enforce Task Runner job plan validation. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.AirGap.Time/TASKS.md | TODO | AirGap Time Guild | AIRGAP-TIME-57-001 | Parse signed time tokens and expose normalized anchors. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-57-001 | Complete airgap import CLI with diff preview. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-57-002 | Ship seal/status CLI commands. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-AIRGAP-56-002 | Deliver bootstrap pack artifacts. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.Mirror.Creator/TASKS.md | TODO | Mirror Creator Guild | MIRROR-CRT-57-001 | Add OCI image support to mirror bundles. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.Mirror.Creator/TASKS.md | TODO | Mirror Creator Guild | MIRROR-CRT-57-002 | Embed signed time anchors in bundles. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-AIRGAP-56-001 | Lock notifications to enclave-safe channels. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-AIRGAP-56-002 | Integrate sealing status + staleness into scheduling. | -| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-AIRGAP-56-002 | Provide bundle ingestion helper steps. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-001 | Publish degradation matrix doc. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-002 | Update trust & signing doc for DSSE/TUF roots. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-003 | Publish developer airgap contracts doc. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-004 | Document portable evidence workflows. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.AirGap.Controller/TASKS.md | TODO | AirGap Controller Guild | AIRGAP-CTL-58-001 | Persist time anchor data and expose drift metrics. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-58-001 | Disable remote observability exporters in sealed mode. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-58-002 | Add CLI sealed-mode guard. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.AirGap.Time/TASKS.md | TODO | AirGap Time Guild | AIRGAP-TIME-58-001 | Compute drift/staleness metrics and surface via controller status. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.AirGap.Time/TASKS.md | TODO | AirGap Time Guild | AIRGAP-TIME-58-002 | Emit notifications/events for staleness budgets. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-58-001 | Ship portable evidence export helper. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-AIRGAP-57-002 | Annotate advisories with staleness metadata. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-AIRGAP-57-002 | Annotate VEX statements with staleness metadata. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-AIRGAP-57-001 | Add portable evidence export integration. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-AIRGAP-57-001 | Notify on drift/staleness thresholds. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-AIRGAP-58-001 | Link import/export jobs to timeline/evidence. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-AIRGAP-57-002 | Show degradation fallback info in explain traces. | -| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-AIRGAP-58-001 | Capture import job evidence transcripts. | -| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-AIRGAP-57-001 | Map sealed-mode violations to standard errors. | -| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-AIRGAP-57-001 | Map sealed-mode violations to standard errors. | -| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-AIRGAP-58-001 | Emit notifications/timeline for bundle readiness. | -| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-AIRGAP-56-002 | Enforce staleness thresholds for findings exports. | -| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-AIRGAP-58-001 | Notify on portable evidence exports. | -| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-AIRGAP-57-001 | Automate mirror bundle job scheduling with audit provenance. | -| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-AIRGAP-57-001 | Enforce sealed-mode guardrails inside evaluation engine. | -| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-AIRGAP-57-001 | Block execution when seal state mismatched; emit timeline events. | -| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-004 | Document portable evidence workflows. | -| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-58-001 | Finalize portable evidence CLI workflow with verification. | -| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-AIRGAP-58-001 | Emit timeline events for bundle imports. | -| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-60-001 | Deliver portable evidence export flow for sealed environments with checksum manifest and offline verification script. | -| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-AIRGAP-58-001 | Emit timeline events for VEX bundle imports. | -| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-AIRGAP-57-001 | Link findings to portable evidence bundles. | -| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-AIRGAP-58-001 | (Carry) Portable evidence notifications. | -| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-AIRGAP-58-001 | Notify on stale policy packs and guide remediation. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-OAS-61-001 | Publish `/docs/api/overview.md`. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-OAS-61-002 | Publish `/docs/api/conventions.md`. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-OAS-61-003 | Publish `/docs/api/versioning.md`. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OAS-61-001 | Add OAS lint/validation/diff stages to CI. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-61-001 | Configure lint rules and CI enforcement. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-61-002 | Enforce example coverage in CI. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-61-001 | Scaffold per-service OpenAPI skeletons with shared components. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-61-002 | Build aggregate composer and integrate into CI. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OAS-61-001 | Document Authority authentication APIs in OAS. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OAS-61-002 | Provide Authority discovery endpoint. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OAS-61-001 | Update advisory OAS coverage. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OAS-61-002 | Populate advisory examples. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OAS-61-001 | Implement Concelier discovery endpoint. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OAS-61-002 | Standardize error envelope. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OAS-61-001 | Update VEX OAS coverage. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OAS-61-002 | Provide VEX examples. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OAS-61-001 | Implement discovery endpoint. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OAS-61-002 | Migrate errors to standard envelope. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OAS-61-001 | Update Exporter spec coverage. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OAS-61-002 | Implement Exporter discovery endpoint. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OAS-61-001 | Expand Findings Ledger spec coverage. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OAS-61-002 | Provide ledger discovery endpoint. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OAS-61-001 | Update notifier spec coverage. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OAS-61-002 | Implement notifier discovery endpoint. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OAS-61-001 | Extend Orchestrator spec coverage. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OAS-61-002 | Provide orchestrator discovery endpoint. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OAS-61-001 | Document Task Runner APIs in OAS. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OAS-61-002 | Expose Task Runner discovery endpoint. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OAS-61-001 | Implement gateway discovery endpoint. | -| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OAS-61-002 | Standardize error envelope across gateway. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-CONTRIB-62-001 | Publish API contracts contributing guide. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-DEVPORT-62-001 | Document dev portal publishing. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-OAS-62-001 | Deploy `/docs/api/reference/` generated site. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-SDK-62-001 | Publish SDK overview + language guides. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-SEC-62-001 | Update auth scopes documentation. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-TEST-62-001 | Publish contract testing doc. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-62-001 | Implement compatibility diff tool. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-62-001 | Populate examples for top endpoints. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OAS-62-001 | Provide SDK auth helpers/tests. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SDK-62-001 | Migrate CLI to official SDK. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SDK-62-002 | Update CLI error handling for new envelope. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OAS-62-001 | Add SDK smoke tests for advisory APIs. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OAS-62-001 | Add advisory API examples. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-62-001 | Build static generator with nav/search. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-62-002 | Add schema viewer, examples, version selector. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OAS-62-001 | Add SDK tests for VEX APIs. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OAS-62-001 | Provide VEX API examples. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OAS-62-001 | Ensure SDK streaming helpers for exports. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OAS-62-001 | Provide SDK tests for ledger APIs. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OAS-62-001 | Provide SDK examples for notifier APIs. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-62-001 | Establish generator framework. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-62-002 | Implement shared post-processing helpers. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OAS-62-001 | Provide SDK examples for pack runs. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OAS-62-001 | Align pagination/idempotency behaviors. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | test/contract/TASKS.md | TODO | Contract Testing Guild | CONTR-62-001 | Generate mock server fixtures. | -| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | test/contract/TASKS.md | TODO | Contract Testing Guild | CONTR-62-002 | Integrate mock server into CI. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | docs/TASKS.md | TODO | Docs Guild | DOCS-TEST-62-001 | (Carry) ensure contract testing doc final. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-63-001 | Integrate compatibility diff gating. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-63-001 | Compatibility diff support. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-63-002 | Define discovery schema metadata. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SDK-63-001 | Add CLI spec download command. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-63-001 | Add Try-It console. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-63-002 | Embed SDK snippets/quick starts. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-63-001 | Release TypeScript SDK alpha. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-63-002 | Release Python SDK alpha. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-63-003 | Release Go SDK alpha. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-63-004 | Release Java SDK alpha. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-63-001 | Configure SDK release pipelines. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-63-002 | Automate changelogs from OAS diffs. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | test/contract/TASKS.md | TODO | Contract Testing Guild | CONTR-63-001 | Build replay harness for drift detection. | -| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | test/contract/TASKS.md | TODO | Contract Testing Guild | CONTR-63-002 | Emit contract testing metrics. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-DEVPORT-64-001 | Document devportal offline usage. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-DEVPORT-63-001 | Automate developer portal pipeline. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-DEVPORT-64-001 | Schedule offline bundle builds. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-64-001 | Offline portal build. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-64-002 | Add accessibility/performance checks. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/StellaOps.ExportCenter.DevPortalOffline/TASKS.md | TODO | DevPortal Offline Guild | DVOFF-64-001 | Implement devportal offline export job. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/StellaOps.ExportCenter.DevPortalOffline/TASKS.md | TODO | DevPortal Offline Guild | DVOFF-64-002 | Provide verification CLI. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-64-001 | Migrate CLI to SDK. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-64-002 | Integrate SDKs into Console. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-64-001 | Hook SDK releases to Notifications. | -| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-64-002 | Produce devportal offline bundle. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-DEVPORT-64-001 | (Carry) ensure offline doc published; update as necessary. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-63-001 | (Carry) compatibility gating monitoring. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OAS-63-001 | Deprecation headers for auth endpoints. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SDK-64-001 | SDK update awareness command. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OAS-63-001 | Deprecation metadata for Concelier APIs. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OAS-63-001 | Deprecation metadata for VEX APIs. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OAS-63-001 | Deprecation headers for exporter APIs. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OAS-63-001 | Deprecation headers for ledger APIs. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OAS-63-001 | Emit deprecation notifications. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OAS-63-001 | Add orchestrator deprecation headers. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-64-001 | Production rollout of notifications feed. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OAS-63-001 | Add Task Runner deprecation headers. | -| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OAS-63-001 | Implement deprecation headers in gateway. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-66-001 | Publish `/docs/risk/overview.md`. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-66-002 | Publish `/docs/risk/profiles.md`. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-66-003 | Publish `/docs/risk/factors.md`. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-66-004 | Publish `/docs/risk/formulas.md`. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-66-001 | Implement CLI profile management commands. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-66-002 | Implement CLI simulation command. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-RISK-66-001 | Expose CVSS/KEV provider data. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-RISK-66-002 | Provide fix availability signals. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-RISK-66-001 | Supply VEX gating data to risk engine. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-RISK-66-002 | Provide reachability inputs. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-66-001 | Add risk scoring columns/indexes. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-66-002 | Implement deterministic scoring upserts. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-66-001 | Create risk severity alert templates. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-RISK-66-003 | Integrate schema validation into Policy Engine. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Policy.RiskProfile/TASKS.md | TODO | Risk Profile Schema Guild | POLICY-RISK-66-001 | Deliver RiskProfile schema + validators. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Policy.RiskProfile/TASKS.md | TODO | Risk Profile Schema Guild | POLICY-RISK-66-002 | Implement inheritance/merge and hashing. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-RISK-66-004 | Extend Policy libraries for RiskProfile handling. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-66-001 | Scaffold risk engine queue/worker/registry. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-66-002 | Implement transforms/gates/contribution calculator. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-RISK-66-001 | Expose risk API routing in gateway. | -| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-RISK-66-002 | Handle explainability downloads. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-001 | Publish explainability doc. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-002 | Publish risk API doc. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-003 | Publish console risk UI doc. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-004 | Publish CLI risk doc. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-67-001 | Provide risk results query command. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-RISK-67-001 | Add source consensus metrics. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-RISK-67-001 | Add VEX explainability metadata. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-67-001 | Notify on profile publish/deprecate. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-68-001 | (Prep) risk routing settings seeds. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-RISK-67-001 | Enqueue scoring on new findings. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-RISK-67-002 | Deliver profile lifecycle APIs. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Policy.RiskProfile/TASKS.md | TODO | Risk Profile Schema Guild | POLICY-RISK-67-001 | Integrate profiles into policy store lifecycle. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Policy.RiskProfile/TASKS.md | TODO | Risk Profile Schema Guild | POLICY-RISK-67-002 | Publish schema endpoint + validation tooling. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-RISK-67-003 | Provide simulation orchestration APIs. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-67-001 | Integrate CVSS/KEV providers. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-67-002 | Integrate VEX gate provider. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-67-003 | Add fix availability/criticality/exposure providers. | -| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-RISK-67-001 | Provide risk status endpoint. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-68-001 | Publish risk bundle doc. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-68-002 | Update AOC invariants doc. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-68-001 | Add risk bundle verification command. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-67-001 | Provide scored findings query API. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-68-001 | Enable scored findings export. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-68-001 | Configure risk notification routing UI/logic. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-RISK-68-001 | Ship simulation API endpoint. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-RISK-68-002 | Support profile export/import. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-68-001 | Persist scoring results & explanations. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-68-002 | Expose jobs/results/explanations APIs. | -| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-RISK-68-001 | Emit severity transition events via gateway. | -| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-001..004 | (Carry) ensure docs updated from simulation release. | -| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/StellaOps.ExportCenter.RiskBundles/TASKS.md | TODO | Risk Bundle Export Guild | RISK-BUNDLE-69-001 | Build risk bundle. | -| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/StellaOps.ExportCenter.RiskBundles/TASKS.md | TODO | Risk Bundle Export Guild | RISK-BUNDLE-69-002 | Integrate bundle into pipelines. | -| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-RISK-69-002 | Enable simulation report exports. | -| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-66-001 | (Completion) finalize severity alert templates. | -| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-69-001 | Implement simulation mode. | -| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-69-002 | Add telemetry/metrics dashboards. | -| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-68-001 | (Carry) finalize risk bundle doc after verification CLI. | -| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/StellaOps.ExportCenter.RiskBundles/TASKS.md | TODO | Risk Bundle Export Guild | RISK-BUNDLE-70-001 | Provide bundle verification CLI. | -| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/StellaOps.ExportCenter.RiskBundles/TASKS.md | TODO | Risk Bundle Export Guild | RISK-BUNDLE-70-002 | Publish documentation. | -| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-RISK-70-001 | Integrate risk bundle into offline kit. | -| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-68-001 | Finalize risk alert routing UI. | -| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-70-001 | Support offline provider bundles. | -| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-70-002 | Integrate runtime/reachability providers. | -| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-001..68-002 | Final editorial pass on risk documentation set. | -| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | src/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-66-001..68-001 | Harden CLI commands with integration tests and error handling. | -| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | src/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-69-001 | Finalize dashboards and alerts for scoring latency. | -| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-68-001 | Tune routing/quiet hour dedupe for risk alerts. | -| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | src/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-69-002 | Optimize performance, cache, and incremental scoring; validate SLOs. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-73-001 | (Prep) align CI secrets for Attestor service. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | src/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-72-001 | Implement DSSE canonicalization and hashing helpers. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | src/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-72-002 | Support compact/expanded output and detached payloads. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | src/StellaOps.Attestor.Types/TASKS.md | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Draft schemas for all attestation payload types. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | src/StellaOps.Attestor.Types/TASKS.md | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Generate models/validators from schemas. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-72-001 | Scaffold attestor service skeleton. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-72-002 | Implement attestation store + storage integration. | -| Sprint 72 | Attestor Console Phase 1 – Foundations | src/StellaOps.Cryptography.Kms/TASKS.md | TODO | KMS Guild | KMS-72-001 | Implement KMS interface + file driver. | -| Sprint 73 | Attestor CLI Phase 2 – Signing & Policies | src/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-73-001 | Implement `stella attest sign` (payload selection, subject digest, key reference, output format) using official SDK transport. | -| Sprint 73 | Attestor CLI Phase 2 – Signing & Policies | src/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-73-002 | Implement `stella attest verify` with policy selection, explainability output, and JSON/table formatting. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-001 | Publish attestor overview. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-002 | Publish payload docs. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-003 | Publish policies doc. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-004 | Publish workflows doc. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-73-001 | Add signing/verification helpers with KMS integration. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/StellaOps.Attestor.Types/TASKS.md | TODO | Attestation Payloads Guild | ATTEST-TYPES-73-001 | Create golden payload fixtures. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-73-001 | Ship signing endpoint. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-73-002 | Ship verification pipeline and reports. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-73-003 | Implement list/fetch APIs. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/StellaOps.Cryptography.Kms/TASKS.md | TODO | KMS Guild | KMS-72-002 | CLI support for key import/export. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ATTEST-73-001 | Implement VerificationPolicy lifecycle. | -| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ATTEST-73-002 | Surface policies in Policy Studio. | -| Sprint 74 | Attestor CLI Phase 3 – Transparency & Chain of Custody | src/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-74-001 | Implement `stella attest list` with filters (subject, type, issuer, scope) and pagination. | -| Sprint 74 | Attestor CLI Phase 3 – Transparency & Chain of Custody | src/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-74-002 | Implement `stella attest fetch` to download envelopes and payloads to disk. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-74-001 | Publish keys & issuers doc. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-74-002 | Publish transparency doc. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-74-003 | Publish console attestor UI doc. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-74-004 | Publish CLI attest doc. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-74-001 | Deploy transparency witness infra. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-73-002 | Run fuzz tests for envelope handling. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/StellaOps.Attestor.Verify/TASKS.md | TODO | Verification Guild | ATTEST-VERIFY-74-001 | Add telemetry for verification pipeline. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/StellaOps.Attestor.Verify/TASKS.md | TODO | Verification Guild | ATTEST-VERIFY-74-002 | Document verification explainability. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-74-001 | Integrate transparency witness client. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-74-002 | Implement bulk verification worker. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/StellaOps.ExportCenter.AttestationBundles/TASKS.md | TODO | Attestation Bundle Guild | EXPORT-ATTEST-74-001 | Build attestation bundle export job. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-ATTEST-74-001 | Add verification/key notifications. | -| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-ATTEST-74-002 | Notify key rotation/revocation. | -| Sprint 75 | Attestor CLI Phase 4 – Air Gap & Bulk | src/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild, Export Guild | CLI-ATTEST-75-002 | Add support for building/verifying attestation bundles in CLI. | -| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-75-001 | Publish attestor airgap doc. | -| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-75-002 | Update AOC invariants for attestations. | -| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-74-002 | Integrate bundle builds into release/offline pipelines. | -| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-75-001 | Dashboards/alerts for attestor metrics. | -| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-75-001 | Support attestation bundle export/import for air gap. | -| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-75-002 | Harden APIs (rate limits, fuzz tests, threat model actions). | -| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/StellaOps.ExportCenter.AttestationBundles/TASKS.md | TODO | Attestation Bundle Guild | EXPORT-ATTEST-75-001 | CLI bundle verify/import. | -| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/StellaOps.ExportCenter.AttestationBundles/TASKS.md | TODO | Attestation Bundle Guild | EXPORT-ATTEST-75-002 | Document attestor airgap workflow. | diff --git a/SPRINTS_PRIOR_20251019.md b/SPRINTS_PRIOR_20251019.md deleted file mode 100644 index f7b3cd43..00000000 --- a/SPRINTS_PRIOR_20251019.md +++ /dev/null @@ -1,208 +0,0 @@ -Closed sprint tasks archived from SPRINTS.md on 2025-10-19. - -| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | -| --- | --- | --- | --- | --- | --- | --- | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-12) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-001 | SemVer primitive range-style metadata
Instructions to work:
DONE Read ./AGENTS.md and src/StellaOps.Concelier.Models/AGENTS.md. This task lays the groundwork—complete the SemVer helper updates before teammates pick up FEEDMODELS-SCHEMA-01-002/003 and FEEDMODELS-SCHEMA-02-900. Use ./src/FASTER_MODELING_AND_NORMALIZATION.md for the target rule structure. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-002 | Provenance decision rationale field
Instructions to work:
AdvisoryProvenance now carries `decisionReason` and docs/tests were updated. Connectors and merge tasks should populate the field when applying precedence/freshness/tie-breaker logic; see src/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md for usage guidance. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-003 | Normalized version rules collection
Instructions to work:
`AffectedPackage.NormalizedVersions` and supporting comparer/docs/tests shipped. Connector owners must emit rule arrays per ./src/FASTER_MODELING_AND_NORMALIZATION.md and report progress via FEEDMERGE-COORD-02-900 so merge/storage backfills can proceed. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-12) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-02-900 | Range primitives for SemVer/EVR/NEVRA metadata
Instructions to work:
DONE Read ./AGENTS.md and src/StellaOps.Concelier.Models/AGENTS.md before resuming this stalled effort. Confirm helpers align with the new `NormalizedVersions` representation so connectors finishing in Sprint 2 can emit consistent metadata. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Normalization/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter
Shared `SemVerRangeRuleBuilder` now outputs primitives + normalized rules per `FASTER_MODELING_AND_NORMALIZATION.md`; CVE/GHSA connectors consuming the API have verified fixtures. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill
AdvisoryStore dual-writes flattened `normalizedVersions` when `concelier.storage.enableSemVerStyle` is set; migration `20251011-semver-style-backfill` updates historical records and docs outline the rollout. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence
Storage now persists `provenance.decisionReason` for advisories and merge events; tests cover round-trips. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing
Bootstrapper seeds compound/sparse indexes for flattened normalized rules and `docs/dev/mongo_indices.md` documents query guidance. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-TESTS-02-004 | Restore AdvisoryStore build after normalized versions refactor
Updated constructors/tests keep storage suites passing with the new feature flag defaults. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-ENGINE-01-002 | Plumb Authority client resilience options
WebService wires `authority.resilience.*` into `AddStellaOpsAuthClient` and adds binding coverage via `AuthorityClientResilienceOptionsAreBound`. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning
Install/runbooks document connected vs air-gapped resilience profiles and monitoring hooks. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns
Operator guides now call out `route/status/subject/clientId/scopes/bypass/remote` audit fields and SIEM triggers. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Concelier operator guide for enforcement cutoff
Install guide reiterates the 2025-12-31 cutoff and links audit signals to the rollout checklist. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.HOST | Rate limiter policy binding
Authority host now applies configuration-driven fixed windows to `/token`, `/authorize`, and `/internal/*`; integration tests assert 429 + `Retry-After` headers; docs/config samples refreshed for Docs guild diagrams. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.BUILD | Authority rate-limiter follow-through
`Security.RateLimiting` now fronts token/authorize/internal limiters; Authority + Configuration matrices (`dotnet test src/StellaOps.Authority/StellaOps.Authority.sln`, `dotnet test src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) passed on 2025-10-11; awaiting #authority-core broadcast. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHCORE-BUILD-OPENIDDICT / AUTHCORE-STORAGE-DEVICE-TOKENS / AUTHCORE-BOOTSTRAP-INVITES | Address remaining Authority compile blockers (OpenIddict transaction shim, token device document, bootstrap invite cleanup) so `dotnet build src/StellaOps.Authority.sln` returns success. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | PLG6.DOC | Plugin developer guide polish
Section 9 now documents rate limiter metadata, config keys, and lockout interplay; YAML samples updated alongside Authority config templates. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-001 | Fetch pipeline & state tracking
Summary planner now drives monthly/yearly VINCE fetches, persists pending summaries/notes, and hydrates VINCE detail queue with telemetry.
Team instructions: Read ./AGENTS.md and src/StellaOps.Concelier.Connector.CertCc/AGENTS.md. Coordinate daily with Models/Merge leads so new normalizedVersions output and provenance tags stay aligned with ./src/FASTER_MODELING_AND_NORMALIZATION.md. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-002 | VINCE note detail fetcher
Summary planner queues VINCE note detail endpoints, persists raw JSON with SHA/ETag metadata, and records retry/backoff metrics. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-003 | DTO & parser implementation
Added VINCE DTO aggregate, Markdown→text sanitizer, vendor/status/vulnerability parsers, and parser regression fixture. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-004 | Canonical mapping & range primitives
VINCE DTO aggregate flows through `CertCcMapper`, emitting vendor range primitives + normalized version rules that persist via `_advisoryStore`. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-005 | Deterministic fixtures/tests
Snapshot harness refreshed 2025-10-12; `certcc-*.snapshot.json` regenerated and regression suite green without UPDATE flag drift. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-006 | Telemetry & documentation
`CertCcDiagnostics` publishes summary/detail/parse/map metrics (meter `StellaOps.Concelier.Connector.CertCc`), README documents instruments, and log guidance captured for Ops on 2025-10-12. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-007 | Connector test harness remediation
Harness now wires `AddSourceCommon`, resets `FakeTimeProvider`, and passes canned-response regression run dated 2025-10-12. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-008 | Snapshot coverage handoff
Fixtures regenerated with normalized ranges + provenance fields on 2025-10-11; QA handoff notes published and merge backfill unblocked. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-012 | Schema sync & snapshot regen follow-up
Fixtures regenerated with normalizedVersions + provenance decision reasons; handoff notes updated for Merge backfill 2025-10-12. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-009 | Detail/map reintegration plan
Staged reintegration plan published in `src/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; coordinates enablement with FEEDCONN-CERTCC-02-004. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-010 | Partial-detail graceful degradation
Detail fetch now tolerates 404/403/410 responses and regression tests cover mixed endpoint availability. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-REDHAT-02-001 | Fixture validation sweep
Instructions to work:
Fixtures regenerated post-model-helper rollout; provenance ordering and normalizedVersions scaffolding verified via tests. Conflict resolver deltas logged in src/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md for Sprint 3 consumers. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-12) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-001 | Canonical mapping & range primitives
Mapper emits SemVer rules (`scheme=apple:*`); fixtures regenerated with trimmed references + new RSR coverage, update tooling finalized. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-002 | Deterministic fixtures/tests
Sanitized live fixtures + regression snapshots wired into tests; normalized rule coverage asserted. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-003 | Telemetry & documentation
Apple meter metrics wired into Concelier WebService OpenTelemetry configuration; README and fixtures document normalizedVersions coverage. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-12) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-004 | Live HTML regression sweep
Sanitised HT125326/HT125328/HT106355/HT214108/HT215500 fixtures recorded and regression tests green on 2025-10-12. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-005 | Fixture regeneration tooling
`UPDATE_APPLE_FIXTURES=1` flow fetches & rewrites fixtures; README documents usage.
Instructions to work:
DONE Read ./AGENTS.md and src/StellaOps.Concelier.Connector.Vndr.Apple/AGENTS.md. Resume stalled tasks, ensuring normalizedVersions output and fixtures align with ./src/FASTER_MODELING_AND_NORMALIZATION.md before handing data to the conflict sprint. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance
Team instructions: Read ./AGENTS.md and each module's AGENTS file. Adopt the `NormalizedVersions` array emitted by the models sprint, wiring provenance `decisionReason` where merge overrides occur. Follow ./src/FASTER_MODELING_AND_NORMALIZATION.md; report via src/StellaOps.Concelier.Merge/TASKS.md (FEEDMERGE-COORD-02-900). Progress 2025-10-11: GHSA/OSV emit normalized arrays with refreshed fixtures; CVE mapper now surfaces SemVer normalized ranges; NVD/KEV adoption pending; outstanding follow-ups include FEEDSTORAGE-DATA-02-001, FEEDMERGE-ENGINE-02-002, and rolling `tools/FixtureUpdater` updates across connectors. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Cve/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-CVE-02-003 | CVE normalized versions uplift | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Kev/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-KEV-02-003 | KEV normalized versions propagation | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-04-003 | OSV parity fixture refresh | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-10) | Team WebService & Authority | FEEDWEB-DOCS-01-001 | Document authority toggle & scope requirements
Quickstart carries toggle/scope guidance pending docs guild review (no change this sprint). | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning
Operator docs now outline connected vs air-gapped resilience profiles and monitoring cues. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns
Audit logging guidance highlights `route/status/subject/clientId/scopes/bypass/remote` fields and SIEM alerts. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Concelier operator guide for enforcement cutoff
Install guide reiterates the 2025-12-31 cutoff and ties audit signals to rollout checks. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-006 | Rename plugin drop directory to namespaced path
Build outputs, tests, and docs now target `StellaOps.Concelier.PluginBinaries`/`StellaOps.Authority.PluginBinaries`. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-007 | Authority resilience adoption
Deployment docs and CLI notes explain the LIB5 resilience knobs for rollout.
Instructions to work:
DONE Read ./AGENTS.md and src/StellaOps.Concelier.WebService/AGENTS.md. These items were mid-flight; resume implementation ensuring docs/operators receive timely updates. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCORE-ENGINE-01-001 | CORE8.RL — Rate limiter plumbing validated; integration tests green and docs handoff recorded for middleware ordering + Retry-After headers (see `docs/dev/authority-rate-limit-tuning-outline.md` for continuing guidance). | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCRYPTO-ENGINE-01-001 | SEC3.A — Shared metadata resolver confirmed via host test run; SEC3.B now unblocked for tuning guidance (outline captured in `docs/dev/authority-rate-limit-tuning-outline.md`). | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-13) | Team Authority Platform & Security Guild | AUTHSEC-DOCS-01-002 | SEC3.B — Published `docs/security/rate-limits.md` with tuning matrix, alert thresholds, and lockout interplay guidance; Docs guild can lift copy into plugin guide. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHSEC-CRYPTO-02-001 | SEC5.B1 — Introduce libsodium signing provider and parity tests to unblock CLI verification enhancements. | -| Sprint 1 | Bootstrap & Replay Hardening | src/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Security Guild | AUTHSEC-CRYPTO-02-004 | SEC5.D/E — Finish bootstrap invite lifecycle (API/store/cleanup) and token device heuristics; build currently red due to pending handler integration. | -| Sprint 1 | Developer Tooling | src/StellaOps.Cli/TASKS.md | DONE (2025-10-15) | DevEx/CLI | AUTHCLI-DIAG-01-001 | Surface password policy diagnostics in CLI startup/output so operators see weakened overrides immediately.
CLI now loads Authority plug-ins at startup, logs weakened password policies (length/complexity), and regression coverage lives in `StellaOps.Cli.Tests/Services/AuthorityDiagnosticsReporterTests`. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHPLUG-DOCS-01-001 | PLG6.DOC — Developer guide copy + diagrams merged 2025-10-11; limiter guidance incorporated and handed to Docs guild for asset export. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Normalization/TASKS.md | DONE (2025-10-12) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter
`SemVerRangeRuleBuilder` shipped 2025-10-12 with comparator/`||` support and fixtures aligning to `FASTER_MODELING_AND_NORMALIZATION.md`. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing
Indexes seeded + docs updated 2025-10-11 to cover flattened normalized rules for connector adoption. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDMERGE-ENGINE-02-002 | Normalized versions union & dedupe
Affected package resolver unions/dedupes normalized rules, stamps merge provenance with `decisionReason`, and tests cover the rollout. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-004 | GHSA credits & ecosystem severity mapping | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-005 | GitHub quota monitoring & retries | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-006 | Production credential & scheduler rollout | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-007 | Credit parity regression fixtures | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-004 | NVD CVSS & CWE precedence payloads | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-005 | NVD merge/export parity regression | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-004 | OSV references & credits alignment | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-005 | Fixture updater workflow
Resolved 2025-10-12: OSV mapper now derives canonical PURLs for Go + scoped npm packages when raw payloads omit `purl`; conflict fixtures unchanged for invalid npm names. Verified via `dotnet test src/StellaOps.Concelier.Connector.Osv.Tests`, `src/StellaOps.Concelier.Connector.Ghsa.Tests`, `src/StellaOps.Concelier.Connector.Nvd.Tests`, and backbone normalization/storage suites. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Acsc/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ACSC-02-001 … 02-008 | Fetch→parse→map pipeline, fixtures, diagnostics, and README finished 2025-10-12; downstream export parity captured via FEEDEXPORT-JSON-04-001 / FEEDEXPORT-TRIVY-04-001 (completed). | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Cccs/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CCCS-02-001 … 02-008 | Observability meter, historical harvest plan, and DOM sanitizer refinements wrapped; ops notes live under `docs/ops/concelier-cccs-operations.md` with fixtures validating EN/FR list handling. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.CertBund/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CERTBUND-02-001 … 02-008 | Telemetry/docs (02-006) and history/locale sweep (02-007) completed alongside pipeline; runbook `docs/ops/concelier-certbund-operations.md` captures locale guidance and offline packaging. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Kisa/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-KISA-02-001 … 02-007 | Connector, tests, and telemetry/docs (02-006) finalized; localisation notes in `docs/dev/kisa_connector_notes.md` complete rollout. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-RUBDU-02-001 … 02-008 | Fetch/parser/mapper refinements, regression fixtures, telemetry/docs, access options, and trusted root packaging all landed; README documents offline access strategy. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md | DONE (2025-10-13) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-NKCKI-02-001 … 02-008 | Listing fetch, parser, mapper, fixtures, telemetry/docs, and archive plan finished; Mongo2Go/libcrypto dependency resolved via bundled OpenSSL noted in ops guide. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ICSCISA-02-001 … 02-011 | Feed parser attachment fixes, SemVer exact values, regression suites, telemetry/docs updates, and handover complete; ops runbook now details attachment verification + proxy usage. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CISCO-02-001 … 02-007 | OAuth fetch pipeline, DTO/mapping, tests, and telemetry/docs shipped; monitoring/export integration follow-ups recorded in Ops docs and exporter backlog (completed). | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Vndr.Msrc/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-MSRC-02-001 … 02-008 | Azure AD onboarding (02-008) unblocked fetch/parse/map pipeline; fixtures, telemetry/docs, and Offline Kit guidance published in `docs/ops/concelier-msrc-operations.md`. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Cve/TASKS.md | DONE (2025-10-15) | Team Connector Support & Monitoring | FEEDCONN-CVE-02-001 … 02-002 | CVE data-source selection, fetch pipeline, and docs landed 2025-10-10. 2025-10-15: smoke verified using the seeded mirror fallback; connector now logs a warning and pulls from `seed-data/cve/` until live CVE Services credentials arrive. | -| Sprint 2 | Connector & Data Implementation Wave | src/StellaOps.Concelier.Connector.Kev/TASKS.md | DONE (2025-10-12) | Team Connector Support & Monitoring | FEEDCONN-KEV-02-001 … 02-002 | KEV catalog ingestion, fixtures, telemetry, and schema validation completed 2025-10-12; ops dashboard published. | -| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-01-001 | Canonical schema docs refresh
Updated canonical schema + provenance guides with SemVer style, normalized version rules, decision reason change log, and migration notes. | -| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-001 | Concelier-SemVer Playbook
Published merge playbook covering mapper patterns, dedupe flow, indexes, and rollout checklist. | -| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-002 | Normalized versions query guide
Delivered Mongo index/query addendum with `$unwind` recipes, dedupe checks, and operational checklist.
Instructions to work:
DONE Read ./AGENTS.md and docs/AGENTS.md. Document every schema/index/query change produced in Sprint 1-2 leveraging ./src/FASTER_MODELING_AND_NORMALIZATION.md. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-03-001 | Canonical merger implementation
`CanonicalMerger` ships with freshness/tie-breaker logic, provenance, and unit coverage feeding Merge. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-03-002 | Field precedence and tie-breaker map
Field precedence tables and tie-breaker metrics wired into the canonical merge flow; docs/tests updated.
Instructions to work:
Read ./AGENTS.md and core AGENTS. Implement the conflict resolver exactly as specified in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md, coordinating with Merge and Storage teammates. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDSTORAGE-DATA-03-001 | Merge event provenance audit prep
Merge events now persist `fieldDecisions` and analytics-ready provenance snapshots. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill
Dual-write/backfill flag delivered; migration + options validated in tests. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDSTORAGE-TESTS-02-004 | Restore AdvisoryStore build after normalized versions refactor
Storage tests adjusted for normalized versions/decision reasons.
Instructions to work:
Read ./AGENTS.md and storage AGENTS. Extend merge events with decision reasons and analytics views to support the conflict rules, and deliver the dual-write/backfill for `NormalizedVersions` + `decisionReason` so connectors can roll out safely. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-001 | GHSA/NVD/OSV conflict rules
Merge pipeline consumes `CanonicalMerger` output prior to precedence merge. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-002 | Override metrics instrumentation
Merge events capture per-field decisions; counters/logs align with conflict rules. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-003 | Reference & credit union pipeline
Canonical merge preserves unions with updated tests. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-QA-04-001 | End-to-end conflict regression suite
Added regression tests (`AdvisoryMergeServiceTests`) covering canonical + precedence flow.
Instructions to work:
Read ./AGENTS.md and merge AGENTS. Integrate the canonical merger, instrument metrics, and deliver comprehensive regression tests following ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md. | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-GHSA-04-002 | GHSA conflict regression fixtures | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-NVD-04-002 | NVD conflict regression fixtures | -| Sprint 3 | Conflict Resolution Integration & Communications | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-OSV-04-002 | OSV conflict regression fixtures
Instructions to work:
Read ./AGENTS.md and module AGENTS. Produce fixture triples supporting the precedence/tie-breaker paths defined in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md and hand them to Merge QA. | -| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-11) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-001 | Concelier Conflict Rules
Runbook published at `docs/ops/concelier-conflict-resolution.md`; metrics/log guidance aligned with Sprint 3 merge counters. | -| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-16) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-002 | Conflict runbook ops rollout
Ops review completed, alert thresholds applied, and change log appended in `docs/ops/concelier-conflict-resolution.md`; task closed after connector signals verified. | -| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-15) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-04-001 | Advisory schema parity (description/CWE/canonical metric)
Extend `Advisory` and related records with description text, CWE collection, and canonical metric pointer; refresh validation + serializer determinism tests. | -| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-15) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-04-003 | Canonical merger parity for new fields
Teach `CanonicalMerger` to populate description, CWEResults, and canonical metric pointer with provenance + regression coverage. | -| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-15) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-04-004 | Reference normalization & freshness instrumentation cleanup
Implement URL normalization for reference dedupe, align freshness-sensitive instrumentation, and add analytics tests. | -| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-15) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-004 | Merge pipeline parity for new advisory fields
Ensure merge service + merge events surface description/CWE/canonical metric decisions with updated metrics/tests. | -| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-15) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-005 | Connector coordination for new advisory fields
GHSA/NVD/OSV connectors now ship description, CWE, and canonical metric data with refreshed fixtures; merge coordination log updated and exporters notified. | -| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Exporter.Json/TASKS.md | DONE (2025-10-15) | Team Exporters – JSON | FEEDEXPORT-JSON-04-001 | Surface new advisory fields in JSON exporter
Update schemas/offline bundle + fixtures once model/core parity lands.
2025-10-15: `dotnet test src/StellaOps.Concelier.Exporter.Json.Tests` validated canonical metric/CWE emission. | -| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md | DONE (2025-10-15) | Team Exporters – Trivy DB | FEEDEXPORT-TRIVY-04-001 | Propagate new advisory fields into Trivy DB package
Extend Bolt builder, metadata, and regression tests for the expanded schema.
2025-10-15: `dotnet test src/StellaOps.Concelier.Exporter.TrivyDb.Tests` confirmed canonical metric/CWE propagation. | -| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-16) | Team Connector Regression Fixtures | FEEDCONN-GHSA-04-004 | Harden CVSS fallback so canonical metric ids persist when GitHub omits vectors; extend fixtures and document severity precedence hand-off to Merge. | -| Sprint 4 | Schema Parity & Freshness Alignment | src/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-04-005 | Map OSV advisories lacking CVSS vectors to canonical metric ids/notes and document CWE provenance quirks; schedule parity fixture updates. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-15) | Team Excititor Core & Policy | EXCITITOR-CORE-01-001 | Stand up canonical VEX claim/consensus records with deterministic serializers so Storage/Exports share a stable contract. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-15) | Team Excititor Core & Policy | EXCITITOR-CORE-01-002 | Implement trust-weighted consensus resolver with baseline policy weights, justification gates, telemetry output, and majority/tie handling. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-15) | Team Excititor Core & Policy | EXCITITOR-CORE-01-003 | Publish shared connector/exporter/attestation abstractions and deterministic query signature utilities for cache/attestation workflows. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-15) | Team Excititor Policy | EXCITITOR-POLICY-01-001 | Established policy options & snapshot provider covering baseline weights/overrides. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-15) | Team Excititor Policy | EXCITITOR-POLICY-01-002 | Policy evaluator now feeds consensus resolver with immutable snapshots. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-16) | Team Excititor Policy | EXCITITOR-POLICY-01-003 | Author policy diagnostics, CLI/WebService surfacing, and documentation updates. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-16) | Team Excititor Policy | EXCITITOR-POLICY-01-004 | Implement YAML/JSON schema validation and deterministic diagnostics for operator bundles. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-16) | Team Excititor Policy | EXCITITOR-POLICY-01-005 | Add policy change tracking, snapshot digests, and telemetry/logging hooks. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-15) | Team Excititor Storage | EXCITITOR-STORAGE-01-001 | Mongo mapping registry plus raw/export entities and DI extensions in place. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-16) | Team Excititor Storage | EXCITITOR-STORAGE-01-004 | Build provider/consensus/cache class maps and related collections. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-15) | Team Excititor Export | EXCITITOR-EXPORT-01-001 | Export engine delivers cache lookup, manifest creation, and policy integration. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-17) | Team Excititor Export | EXCITITOR-EXPORT-01-004 | Connect export engine to attestation client and persist Rekor metadata. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Attestation/TASKS.md | DONE (2025-10-16) | Team Excititor Attestation | EXCITITOR-ATTEST-01-001 | Implement in-toto predicate + DSSE builder providing envelopes for export attestation. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.Connectors.Abstractions/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors | EXCITITOR-CONN-ABS-01-001 | Deliver shared connector context/base classes so provider plug-ins can be activated via WebService/Worker. | -| Sprint 5 | Excititor Core Foundations | src/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-17) | Team Excititor WebService | EXCITITOR-WEB-01-001 | Scaffold minimal API host, DI, and `/excititor/status` endpoint integrating policy, storage, export, and attestation services. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Worker/TASKS.md | DONE (2025-10-17) | Team Excititor Worker | EXCITITOR-WORKER-01-001 | Create Worker host with provider scheduling and logging to drive recurring pulls/reconciliation. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Formats | EXCITITOR-FMT-CSAF-01-001 | Implement CSAF normalizer foundation translating provider documents into `VexClaim` entries. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md | DONE (2025-10-17) | Team Excititor Formats | EXCITITOR-FMT-CYCLONE-01-001 | Implement CycloneDX VEX normalizer capturing `analysis` state and component references. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md | DONE (2025-10-17) | Team Excititor Formats | EXCITITOR-FMT-OPENVEX-01-001 | Implement OpenVEX normalizer to ingest attestations into canonical claims with provenance. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-001 | Ship Red Hat CSAF provider metadata discovery enabling incremental pulls. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-002 | Fetch CSAF windows with ETag handling, resume tokens, quarantine on schema errors, and persist raw docs. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-003 | Populate provider trust overrides (cosign issuer, identity regex) and provenance hints for policy evaluation/logging. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-004 | Persist resume cursors (last updated timestamp/document hashes) in storage and reload during fetch to avoid duplicates. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-005 | Register connector in Worker/WebService DI, add scheduled jobs, and document CLI triggers for Red Hat CSAF pulls. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-006 | Add CSAF normalization parity fixtures ensuring RHSA-specific metadata is preserved. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Cisco | EXCITITOR-CONN-CISCO-01-001 | Implement Cisco CSAF endpoint discovery/auth to unlock paginated pulls. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Cisco | EXCITITOR-CONN-CISCO-01-002 | Implement Cisco CSAF paginated fetch loop with dedupe and raw persistence support. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – SUSE | EXCITITOR-CONN-SUSE-01-001 | Build Rancher VEX Hub discovery/subscription path with offline snapshot support. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – MSRC | EXCITITOR-CONN-MS-01-001 | Deliver AAD onboarding/token cache for MSRC CSAF ingestion. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Oracle | EXCITITOR-CONN-ORACLE-01-001 | Implement Oracle CSAF catalogue discovery with CPU calendar awareness. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Ubuntu | EXCITITOR-CONN-UBUNTU-01-001 | Implement Ubuntu CSAF discovery and channel selection for USN ingestion. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md | DONE (2025-10-18) | Team Excititor Connectors – OCI | EXCITITOR-CONN-OCI-01-001 | Wire OCI discovery/auth to fetch OpenVEX attestations for configured images. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md | DONE (2025-10-18) | Team Excititor Connectors – OCI | EXCITITOR-CONN-OCI-01-002 | Attestation fetch & verify loop – download DSSE attestations, trigger verification, handle retries/backoff, persist raw statements. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md | DONE (2025-10-18) | Team Excititor Connectors – OCI | EXCITITOR-CONN-OCI-01-003 | Provenance metadata & policy hooks – emit image, subject digest, issuer, and trust metadata for policy weighting/logging. | -| Sprint 6 | Excititor Ingest & Formats | src/StellaOps.Cli/TASKS.md | DONE (2025-10-18) | DevEx/CLI | EXCITITOR-CLI-01-001 | Add `excititor` CLI verbs bridging to WebService with consistent auth and offline UX. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-19) | Team Excititor Core & Policy | EXCITITOR-CORE-02-001 | Context signal schema prep – extend consensus models with severity/KEV/EPSS fields and update canonical serializers. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-19) | Team Excititor Policy | EXCITITOR-POLICY-02-001 | Scoring coefficients & weight ceilings – add α/β options, weight boosts, and validation guidance. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Attestation/TASKS.md | DONE (2025-10-16) | Team Excititor Attestation | EXCITITOR-ATTEST-01-002 | Rekor v2 client integration – ship transparency log client with retries and offline queue. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-501 | Define shared DTOs (ScanJob, ProgressEvent), error taxonomy, and deterministic ID/timestamp helpers aligning with `ARCHITECTURE_SCANNER.md` §3–§4. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-502 | Observability helpers (correlation IDs, logging scopes, metric namespacing, deterministic hashes) consumed by WebService/Worker. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-503 | Security utilities: Authority client factory, OpTok caching, DPoP verifier, restart-time plug-in guardrails for scanner components. | -| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-001 | Buildx driver scaffold + handshake with Scanner.Emit (local CAS). | -| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-002 | OCI annotations + provenance hand-off to Attestor. | -| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-003 | CI demo: minimal SBOM push & backend report wiring. | -| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-004 | Stabilize descriptor nonce derivation so repeated builds emit deterministic placeholders. | -| Sprint 9 | Scanner Build-time | src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-005 | Integrate determinism guard into GitHub/Gitea workflows and archive proof artifacts. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-18) | Team Scanner WebService | SCANNER-WEB-09-101 | Minimal API host with Authority enforcement, health/ready endpoints, and restart-time plug-in loader per architecture §1, §4. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-18) | Team Scanner WebService | SCANNER-WEB-09-102 | `/api/v1/scans` submission/status endpoints with deterministic IDs, validation, and cancellation support. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-WEB-09-104 | Configuration binding for Mongo, MinIO, queue, feature flags; startup diagnostics and fail-fast policy. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-201 | Worker host bootstrap with Authority auth, hosted services, and graceful shutdown semantics. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-202 | Lease/heartbeat loop with retry+jitter, poison-job quarantine, structured logging. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-203 | Analyzer dispatch skeleton emitting deterministic stage progress and honoring cancellation tokens. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-204 | Worker metrics (queue latency, stage duration, failure counts) with OpenTelemetry resource wiring. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-205 | Harden heartbeat jitter so lease safety margin stays ≥3× and cover with regression tests + optional live queue smoke run. | -| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-001 | Policy schema + binder + diagnostics. | -| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-002 | Policy snapshot store + revision digests. | -| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-003 | `/policy/preview` API (image digest → projected verdict diff). | -| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | DONE (2025-10-19) | DevOps Guild | DEVOPS-HELM-09-001 | Helm/Compose environment profiles (dev/staging/airgap) with deterministic digests. | -| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-19) | Docs Guild, DevEx | DOCS-ADR-09-001 | Establish ADR process and template. | -| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-19) | Docs Guild, Platform Events | DOCS-EVENTS-09-002 | Publish event schema catalog (`docs/events/`) for critical envelopes. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-301 | Mongo catalog schemas/indexes for images, layers, artifacts, jobs, lifecycle rules plus migrations. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-302 | MinIO layout, immutability policies, client abstraction, and configuration binding. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-303 | Repositories/services with dual-write feature flag, deterministic digests, TTL enforcement tests. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-401 | Queue abstraction + Redis Streams adapter with ack/claim APIs and idempotency tokens. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-402 | Pluggable backend support (Redis, NATS) with configuration binding, health probes, failover docs. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-403 | Retry + dead-letter strategy with structured logs/metrics for offline deployments. | -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance
Team instructions: Read ./AGENTS.md and each module's AGENTS file. Adopt the `NormalizedVersions` array emitted by the models sprint, wiring provenance `decisionReason` where merge overrides occur. Follow ./src/FASTER_MODELING_AND_NORMALIZATION.md; report via src/StellaOps.Concelier.Merge/TASKS.md (FEEDMERGE-COORD-02-900). Progress 2025-10-11: GHSA/OSV emit normalized arrays with refreshed fixtures; CVE mapper now surfaces SemVer normalized ranges; NVD/KEV adoption pending; outstanding follow-ups include FEEDSTORAGE-DATA-02-001, FEEDMERGE-ENGINE-02-002, and rolling `tools/FixtureUpdater` updates across connectors.
Progress 2025-10-20: Coordination matrix + rollout dashboard refreshed; upcoming deadlines tracked (Cccs/Cisco 2025-10-21, CertBund 2025-10-22, ICS-CISA 2025-10-23, KISA 2025-10-24) with escalation path documented in FEEDMERGE-COORD-02-900.| -| Sprint 1 | Stabilize In-Progress Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-19) | Team WebService & Authority | FEEDWEB-OPS-01-006 | Rename plugin drop directory to namespaced path
Build outputs now point at `StellaOps.Concelier.PluginBinaries`/`StellaOps.Authority.PluginBinaries`; defaults/docs/tests updated to reflect the new layout. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Excititor Storage | EXCITITOR-STORAGE-02-001 | Statement events & scoring signals – immutable VEX statements store, consensus signal fields, and migration `20251019-consensus-signals-statements` with tests (`dotnet test src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj`, `dotnet test src/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj`). | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-19) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-07-001 | Advisory event log & asOf queries – surface immutable statements and replay capability. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-19) | Concelier WebService Guild | FEEDWEB-EVENTS-07-001 | Advisory event replay API – expose `/concelier/advisories/{key}/replay` with `asOf` filter, hex hashes, and conflict data. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-20) | BE-Merge | FEEDMERGE-ENGINE-07-001 | Conflict sets & explainers – persist conflict materialization and replay hashes for merge decisions. | -| Sprint 8 | Mongo strengthening | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Normalization & Storage Backbone | FEEDSTORAGE-MONGO-08-001 | Causal-consistent Concelier storage sessions
Scoped session facilitator registered, repositories accept optional session handles, and replica-set failover tests verify read-your-write + monotonic reads. | -| Sprint 8 | Mongo strengthening | src/StellaOps.Authority/TASKS.md | DONE (2025-10-19) | Authority Core & Storage Guild | AUTHSTORAGE-MONGO-08-001 | Harden Authority Mongo usage
Scoped Mongo sessions with majority read/write concerns wired through stores and GraphQL/HTTP pipelines; replica-set election regression validated. | -| Sprint 8 | Mongo strengthening | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Excititor Storage | EXCITITOR-STORAGE-MONGO-08-001 | Causal consistency for Excititor repositories
Session-scoped repositories shipped with new Mongo records, orchestrators/workers now share scoped sessions, and replica-set failover coverage added via `dotnet test src/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj`. | -| Sprint 8 | Platform Maintenance | src/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Excititor Storage | EXCITITOR-STORAGE-03-001 | Statement backfill tooling – shipped admin backfill endpoint, CLI hook (`stellaops excititor backfill-statements`), integration tests, and operator runbook (`docs/dev/EXCITITOR_STATEMENT_BACKFILL.md`). | -| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Exporter.Json/TASKS.md | DONE (2025-10-19) | Concelier Export Guild | CONCELIER-EXPORT-08-201 | Mirror bundle + domain manifest – produce signed JSON aggregates for `*.stella-ops.org` mirrors. | -| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md | DONE (2025-10-19) | Concelier Export Guild | CONCELIER-EXPORT-08-202 | Mirror-ready Trivy DB bundles – mirror options emit per-domain manifests/metadata/db archives with deterministic digests for downstream sync. | -| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-20) | Concelier WebService Guild | CONCELIER-WEB-08-201 | Mirror distribution endpoints – expose domain-scoped index/download APIs with auth/quota. | -| Sprint 8 | Mirror Distribution | ops/devops/TASKS.md | DONE (2025-10-19) | DevOps Guild | DEVOPS-MIRROR-08-001 | Managed mirror deployments for `*.stella-ops.org` – Helm/Compose overlays, CDN, runbooks. | -| Sprint 8 | Plugin Infrastructure | src/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-003 | Refactor Authority identity-provider registry to resolve scoped plugin services on-demand.
Introduce factory pattern aligned with scoped lifetimes decided in coordination workshop. | -| Sprint 8 | Plugin Infrastructure | src/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-004 | Update Authority plugin loader to activate registrars with DI support and scoped service awareness.
Add two-phase initialization allowing scoped dependencies post-container build. | -| Sprint 8 | Plugin Infrastructure | src/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-005 | Provide scoped-safe bootstrap execution for Authority plugins.
Implement scope-per-run pattern for hosted bootstrap tasks and document migration guidance. | -| Sprint 10 | DevOps Security | ops/devops/TASKS.md | DONE (2025-10-20) | DevOps Guild | DEVOPS-SEC-10-301 | Address NU1902/NU1903 advisories for `MongoDB.Driver` 2.12.0 and `SharpCompress` 0.23.0; Wave 0A prerequisites confirmed complete before remediation work. | -| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Authority/TASKS.md | DONE (2025-10-20) | Authority Core & Security Guild | AUTH-DPOP-11-001 | Implement DPoP proof validation + nonce handling for high-value audiences per architecture. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.WebService/TASKS.md | DONE (2025-10-19) | Notify WebService Guild | NOTIFY-WEB-15-103 | Delivery history & test-send endpoints. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Slack/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-SLACK-15-502 | Slack health/test-send support. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Teams/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-602 | Teams health/test-send support. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Teams/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-604 | Teams health endpoint metadata alignment. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Slack/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-SLACK-15-503 | Package Slack connector as restart-time plug-in (manifest + host registration). | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Teams/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-603 | Package Teams connector as restart-time plug-in (manifest + host registration). | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Email/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-EMAIL-15-703 | Package Email connector as restart-time plug-in (manifest + host registration). | -| Sprint 15 | Notify Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-20) | Scanner WebService Guild | SCANNER-EVENTS-15-201 | Emit `scanner.report.ready` + `scanner.scan.completed` events. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Connectors.Webhook/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-WEBHOOK-15-803 | Package Webhook connector as restart-time plug-in (manifest + host registration). | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-20) | Scheduler Models Guild | SCHED-MODELS-16-103 | Versioning/migration helpers for schedules/runs. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Queue/TASKS.md | DONE (2025-10-20) | Scheduler Queue Guild | SCHED-QUEUE-16-401 | Queue abstraction + Redis Streams adapter. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Queue/TASKS.md | DONE (2025-10-20) | Scheduler Queue Guild | SCHED-QUEUE-16-402 | NATS JetStream adapter with health probes. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.ImpactIndex/TASKS.md | DONE (2025-10-20) | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-300 | **STUB** ImpactIndex ingest/query using fixtures (to be removed by SP16 completion). | diff --git a/SPRINTS_PRIOR_20251021.md b/SPRINTS_PRIOR_20251021.md deleted file mode 100644 index f195a15c..00000000 --- a/SPRINTS_PRIOR_20251021.md +++ /dev/null @@ -1,88 +0,0 @@ -This file describe implementation of Stella Ops (docs/README.md). Implementation must respect rules from AGENTS.md (read if you have not). - -| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | -| --- | --- | --- | --- | --- | --- | --- | -| Sprint 7 | Contextual Truth Foundations | docs/TASKS.md | DONE (2025-10-22) | Docs Guild, Concelier WebService | DOCS-CONCELIER-07-201 | Final editorial review and publish pass for Concelier authority toggle documentation (Quickstart + operator guide). | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-20) | Team Excititor WebService | EXCITITOR-WEB-01-002 | Ingest & reconcile endpoints – scope-enforced `/excititor/init`, `/excititor/ingest/run`, `/excititor/ingest/resume`, `/excititor/reconcile`; regression via `dotnet test … --filter FullyQualifiedName~IngestEndpointsTests`. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-20) | Team Excititor WebService | EXCITITOR-WEB-01-004 | Resolve API & signed responses – expose `/excititor/resolve`, return signed consensus/score envelopes, document auth. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Worker/TASKS.md | DONE (2025-10-21) | Team Excititor Worker | EXCITITOR-WORKER-01-004 | TTL refresh & stability damper – schedule re-resolve loops and guard against status flapping. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-21) | Team Core Engine & Data Science | FEEDCORE-ENGINE-07-002 | Noise prior computation service – learn false-positive priors and expose deterministic summaries. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-21) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-07-003 | Unknown state ledger & confidence seeding – persist unknown flags, seed confidence bands, expose query surface. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-19) | Team Excititor WebService | EXCITITOR-WEB-01-005 | Mirror distribution endpoints – expose download APIs for downstream Excititor instances. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-21) | Team Excititor Export | EXCITITOR-EXPORT-01-005 | Score & resolve envelope surfaces – include signed consensus/score artifacts in exports. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-21) | Team Excititor Export | EXCITITOR-EXPORT-01-006 | Quiet provenance packaging – attach quieted-by statement IDs, signers, justification codes to exports and attestations. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-21) | Team Excititor Export | EXCITITOR-EXPORT-01-007 | Mirror bundle + domain manifest – publish signed consensus bundles for mirrors. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md | DONE (2025-10-21) | Excititor Connectors – Stella | EXCITITOR-CONN-STELLA-07-001 | Excititor mirror connector – ingest signed mirror bundles and map to VexClaims with resume handling. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-07-001 | Advisory statement & conflict collections – provision Mongo schema/indexes for event-sourced merge. | -| Sprint 7 | Contextual Truth Foundations | src/StellaOps.Web/TASKS.md | DONE (2025-10-21) | UX Specialist, Angular Eng | WEB1.TRIVY-SETTINGS-TESTS | Add headless UI test run (`ng test --watch=false`) and document prerequisites once Angular tooling is chained up. | -| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-001 | Concelier mirror connector – fetch mirror manifest, verify signatures, and hydrate canonical DTOs with resume support. | -| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-002 | Map mirror payloads into canonical advisory DTOs with provenance referencing mirror domain + original source metadata. | -| Sprint 8 | Mirror Distribution | src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-003 | Add incremental cursor + resume support (per-export fingerprint) and document configuration for downstream Concelier instances. | -| Sprint 8 | Plugin Infrastructure | src/StellaOps.Plugin/TASKS.md | DONE (2025-10-21) | Plugin Platform Guild | PLUGIN-DI-08-001 | Scoped service support in plugin bootstrap – added dynamic plugin tests ensuring `[ServiceBinding]` metadata flows through plugin hosts and remains idempotent. | -| Sprint 8 | Plugin Infrastructure | src/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-002.COORD | Authority scoped-service integration handshake
Workshop concluded 2025-10-20 15:00–16:05 UTC; decisions + follow-ups recorded in `docs/dev/authority-plugin-di-coordination.md`. | -| Sprint 8 | Plugin Infrastructure | src/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-002 | Authority plugin integration updates – scoped identity-provider services with registry handles; regression coverage via scoped registrar/unit tests. | -| Sprint 8 | Plugin Infrastructure | src/StellaOps.Authority/TASKS.md | DONE (2025-10-20) | Authority Core, Plugin Platform Guild | AUTH-PLUGIN-COORD-08-002 | Coordinate scoped-service adoption for Authority plug-in registrars
Workshop notes and follow-up backlog captured 2025-10-20 in `docs/dev/authority-plugin-di-coordination.md`. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-WEB-09-103 | Progress streaming (SSE/JSONL) with correlation IDs and ISO-8601 UTC timestamps, documented in API reference. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-POLICY-09-105 | Policy snapshot loader + schema + OpenAPI (YAML ignore rules, VEX include/exclude, vendor precedence). | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-POLICY-09-106 | `/reports` verdict assembly (Feedser+Vexer+Policy) + signed response envelope. | -| Sprint 9 | Scanner Core Foundations | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-POLICY-09-107 | Expose score inputs, config version, and quiet provenance in `/reports` JSON and signed payload. | -| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | DONE (2025-10-21) | DevOps Guild, Scanner WebService Guild | DEVOPS-SCANNER-09-204 | Surface `SCANNER__EVENTS__*` env config across Compose/Helm and document overrides. | -| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | DONE (2025-10-21) | DevOps Guild, Notify Guild | DEVOPS-SCANNER-09-205 | Notify smoke job validates Redis stream + Notify deliveries after staging deploys. | -| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE (2025-10-19) | Policy Guild | POLICY-CORE-09-004 | Versioned scoring config with schema validation, trust table, and golden fixtures. | -| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE (2025-10-19) | Policy Guild | POLICY-CORE-09-005 | Scoring/quiet engine – compute score, enforce VEX-only quiet rules, emit inputs and provenance. | -| Sprint 9 | Policy Foundations | src/StellaOps.Policy/TASKS.md | DONE (2025-10-19) | Policy Guild | POLICY-CORE-09-006 | Unknown state & confidence decay – deterministic bands surfaced in policy outputs. | -| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-21) | Platform Events Guild | PLATFORM-EVENTS-09-401 | Embed canonical event samples into contract/integration tests and ensure CI validates payloads against published schemas. | -| Sprint 10 | Benchmarks | src/StellaOps.Bench/TASKS.md | DONE (2025-10-21) | Bench Guild, Language Analyzer Guild | BENCH-SCANNER-10-002 | Wire real language analyzers into bench harness & refresh baselines post-implementation. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-21) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-302 | Node analyzer handling workspaces/symlinks emitting `pkg:npm`. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-21) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-303 | Python analyzer reading `*.dist-info`, RECORD hashes, entry points. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-22) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-304 | Go analyzer leveraging buildinfo for `pkg:golang` components. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md | DONE (2025-10-22) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-304E | Plumb Go heuristic counter into Scanner metrics pipeline and alerting. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-22) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-305 | .NET analyzer parsing `*.deps.json`, assembly metadata, RID variants. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-22) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-306 | Rust analyzer detecting crates or falling back to `bin:{sha256}`. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-307 | Shared language evidence helpers + usage flag propagation. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-308 | Determinism + fixture harness for language analyzers. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-21) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309 | Package language analyzers as restart-time plug-ins (manifest + host registration). | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-601 | Compose inventory SBOM (CycloneDX JSON/Protobuf) from layer fragments. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-602 | Compose usage SBOM leveraging EntryTrace to flag actual usage. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-603 | Generate BOM index sidecar (purl table + roaring bitmap + usage flag). | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-604 | Package artifacts for export + attestation with deterministic manifests. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-605 | Emit BOM-Index sidecar schema/fixtures (CRITICAL PATH for SP16). | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-606 | Usage view bit flags integrated with EntryTrace. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-607 | Embed scoring inputs, confidence band, and quiet provenance in CycloneDX/DSSE artifacts. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-101 | Implement layer cache store keyed by layer digest with metadata retention per architecture §3.3. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-102 | Build file CAS with dedupe, TTL enforcement, and offline import/export hooks. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-103 | Expose cache metrics/logging and configuration toggles for warm/cold thresholds. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-104 | Implement cache invalidation workflows (layer delete, TTL expiry, diff invalidation). | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-201 | Alpine/apk analyzer emitting deterministic components with provenance. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-202 | Debian/dpkg analyzer mapping packages to purl identity with evidence. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-203 | RPM analyzer capturing EVR, file listings, provenance. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-204 | Shared OS evidence helpers for package identity + provenance. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-205 | Vendor metadata enrichment (source packages, license, CVE hints). | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-206 | Determinism harness + fixtures for OS analyzers. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-207 | Package OS analyzers as restart-time plug-ins (manifest + host registration). | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-301 | Java analyzer emitting `pkg:maven` with provenance. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-401 | POSIX shell AST parser with deterministic output. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-402 | Command resolution across layered rootfs with evidence attribution. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-403 | Interpreter tracing for shell wrappers to Python/Node/Java launchers. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-404 | Python entry analyzer (venv shebang, module invocation, usage flag). | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-405 | Node/Java launcher analyzer capturing script/jar targets. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-406 | Explainability + diagnostics for unresolved constructs with metrics. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-407 | Package EntryTrace analyzers as restart-time plug-ins (manifest + host registration). | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Diff/TASKS.md | DONE (2025-10-19) | Diff Guild | SCANNER-DIFF-10-501 | Build component differ tracking add/remove/version changes with deterministic ordering. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Diff/TASKS.md | DONE (2025-10-19) | Diff Guild | SCANNER-DIFF-10-502 | Attribute diffs to introducing/removing layers including provenance evidence. | -| Sprint 10 | Scanner Analyzers & SBOM | src/StellaOps.Scanner.Diff/TASKS.md | DONE (2025-10-19) | Diff Guild | SCANNER-DIFF-10-503 | Produce JSON diff output for inventory vs usage views aligned with API contract. | -| Sprint 10 | Samples | samples/TASKS.md | DONE (2025-10-20) | Samples Guild, Scanner Team | SAMPLES-10-001 | Sample images with SBOM/BOM-Index sidecars. | -| Sprint 10 | DevOps Perf | ops/devops/TASKS.md | DONE (2025-10-22) | DevOps Guild | DEVOPS-PERF-10-001 | Perf smoke job ensuring <5 s SBOM compose. | -| Sprint 10 | DevOps Perf | ops/devops/TASKS.md | DONE (2025-10-23) | DevOps Guild | DEVOPS-PERF-10-002 | Publish analyzer bench metrics to Grafana/perf workbook and alarm on ≥20 % regressions. | -| Sprint 10 | Policy Samples | samples/TASKS.md | DONE (2025-10-23) | Samples Guild, Policy Guild | SAMPLES-13-004 | Add policy preview/report fixtures showing confidence bands and unknown-age tags. | -| Sprint 10 | Policy Samples | src/StellaOps.Web/TASKS.md | DONE (2025-10-23) | UI Guild | WEB-POLICY-FIXTURES-10-001 | Wire policy preview/report doc fixtures into UI harness (test utility or Storybook substitute) with type bindings and validation guard so UI stays aligned with documented payloads. | -| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Signer/TASKS.md | DONE (2025-10-21) | Signer Guild | SIGNER-API-11-101 | `/sign/dsse` pipeline with Authority auth, PoE introspection, release verification, DSSE signing. | -| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Signer/TASKS.md | DONE (2025-10-21) | Signer Guild | SIGNER-REF-11-102 | `/verify/referrers` endpoint with OCI lookup, caching, and policy enforcement. | -| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Signer/TASKS.md | DONE (2025-10-21) | Signer Guild | SIGNER-QUOTA-11-103 | Enforce plan quotas, concurrency/QPS limits, artifact size caps with metrics/audit logs. | -| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Authority/TASKS.md | DONE (2025-10-23) | Authority Core & Security Guild | AUTH-MTLS-11-002 | Add OAuth mTLS client credential support with certificate-bound tokens and introspection updates. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-20) | Scanner WebService Guild | SCANNER-RUNTIME-12-301 | `/runtime/events` ingestion endpoint with validation, batching, storage hooks. | -| Sprint 13 | UX & CLI Experience | src/StellaOps.Cli/TASKS.md | DONE (2025-10-21) | DevEx/CLI | CLI-OFFLINE-13-006 | Implement offline kit pull/import/status commands with integrity checks. | -| Sprint 13 | UX & CLI Experience | src/StellaOps.Cli/TASKS.md | DONE (2025-10-22) | DevEx/CLI | CLI-PLUGIN-13-007 | Package non-core CLI verbs as restart-time plug-ins (manifest + loader tests). | -| Sprint 13 | UX & CLI Experience | src/StellaOps.Web/TASKS.md | DONE (2025-10-21) | UX Specialist, Angular Eng, DevEx | WEB1.DEPS-13-001 | Stabilise Angular workspace dependencies for headless CI installs (`npm install`, Chromium handling, docs). | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Queue/TASKS.md | DONE (2025-10-20) | Scheduler Queue Guild | SCHED-QUEUE-16-403 | Dead-letter handling + metrics. | -| Sprint 18 | Launch Readiness | ops/offline-kit/TASKS.md | DONE (2025-10-22) | Offline Kit Guild, Scanner Guild | DEVOPS-OFFLINE-18-004 | Rebuild Offline Kit bundle with Go analyzer plug-in and refreshed manifest/signature set. | diff --git a/SPRINTS_PRIOR_20251025.md b/SPRINTS_PRIOR_20251025.md deleted file mode 100644 index 53b12e13..00000000 --- a/SPRINTS_PRIOR_20251025.md +++ /dev/null @@ -1,34 +0,0 @@ -This file describe implementation of Stella Ops (docs/README.md). Implementation must respect rules from AGENTS.md (read if you have not). - -| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | -| --- | --- | --- | --- | --- | --- | --- | -| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Attestor/TASKS.md | DONE (2025-10-19) | Attestor Guild | ATTESTOR-API-11-201 | `/rekor/entries` submission pipeline with dedupe, proof acquisition, and persistence. | -| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Attestor/TASKS.md | DONE (2025-10-19) | Attestor Guild | ATTESTOR-VERIFY-11-202 | `/rekor/verify` + retrieval endpoints validating signatures and Merkle proofs. | -| Sprint 11 | Signing Chain Bring-up | src/StellaOps.Attestor/TASKS.md | DONE (2025-10-19) | Attestor Guild | ATTESTOR-OBS-11-203 | Telemetry, alerting, mTLS hardening, and archive workflow for Attestor. | -| Sprint 11 | Storage Platform Hardening | src/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-23) | Scanner Storage Guild | SCANNER-STORAGE-11-401 | Migrate scanner object storage integration from MinIO to RustFS with data migration plan. | -| Sprint 11 | UI Integration | src/StellaOps.UI/TASKS.md | DONE (2025-10-23) | UI Guild | UI-ATTEST-11-005 | Attestation visibility (Rekor id, status) on Scan Detail. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Core/TASKS.md | DONE (2025-10-23) | Zastava Core Guild | ZASTAVA-CORE-12-201 | Define runtime event/admission DTOs, hashing helpers, and versioning strategy. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Core/TASKS.md | DONE (2025-10-23) | Zastava Core Guild | ZASTAVA-CORE-12-202 | Provide configuration/logging/metrics utilities shared by Observer/Webhook. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Core/TASKS.md | DONE (2025-10-23) | Zastava Core Guild | ZASTAVA-CORE-12-203 | Authority client helpers, OpTok caching, and security guardrails for runtime services. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Core/TASKS.md | DONE (2025-10-23) | Zastava Core Guild | ZASTAVA-OPS-12-204 | Operational runbooks, alert rules, and dashboard exports for runtime plane. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-24) | Zastava Observer Guild | ZASTAVA-OBS-12-001 | Container lifecycle watcher emitting deterministic runtime events with buffering. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-24) | Zastava Observer Guild | ZASTAVA-OBS-12-002 | Capture entrypoint traces + loaded libraries, hashing binaries and linking to baseline SBOM. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-24) | Zastava Observer Guild | ZASTAVA-OBS-12-003 | Posture checks for signatures/SBOM/attestation with offline caching. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-24) | Zastava Observer Guild | ZASTAVA-OBS-12-004 | Batch `/runtime/events` submissions with disk-backed buffer and rate limits. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Webhook/TASKS.md | DONE (2025-10-24) | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-101 | Admission controller host with TLS bootstrap and Authority auth. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Webhook/TASKS.md | DONE (2025-10-24) | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-102 | Query Scanner `/policy/runtime`, resolve digests, enforce verdicts. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Webhook/TASKS.md | DONE (2025-10-24) | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-103 | Caching, fail-open/closed toggles, metrics/logging for admission decisions. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Zastava.Webhook/TASKS.md | DONE (2025-10-24) | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-104 | Wire `/admission` endpoint to runtime policy client and emit allow/deny envelopes. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-302 | `/policy/runtime` endpoint joining SBOM baseline + policy verdict, returning admission guidance. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-303 | Align `/policy/runtime` verdicts with canonical policy evaluation (Feedser/Vexer). | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-304 | Integrate attestation verification into runtime policy metadata. | -| Sprint 12 | Runtime Guardrails | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-305 | Deliver shared fixtures + e2e validation with Zastava/CLI teams. | -| Sprint 13 | UX & CLI Experience | src/StellaOps.UI/TASKS.md | DONE (2025-10-23) | UI Guild | UI-AUTH-13-001 | Integrate Authority OIDC + DPoP flows with session management. | -| Sprint 13 | UX & CLI Experience | src/StellaOps.UI/TASKS.md | DONE (2025-10-25) | UI Guild | UI-NOTIFY-13-006 | Notify panel: channels/rules CRUD, deliveries view, test send. | -| Sprint 13 | Platform Reliability | ops/devops/TASKS.md | DONE (2025-10-25) | DevOps Guild, Platform Leads | DEVOPS-NUGET-13-001 | Wire up .NET 10 preview feeds/local mirrors so `dotnet restore` succeeds offline; document updated NuGet bootstrap. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Queue/TASKS.md | DONE (2025-10-23) | Notify Queue Guild | NOTIFY-QUEUE-15-401 | Bus abstraction + Redis Streams adapter with ordering/idempotency. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Queue/TASKS.md | DONE (2025-10-23) | Notify Queue Guild | NOTIFY-QUEUE-15-402 | NATS JetStream adapter with health probes and failover. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Queue/TASKS.md | DONE (2025-10-23) | Notify Queue Guild | NOTIFY-QUEUE-15-403 | Delivery queue with retry/dead-letter + metrics. | -| Sprint 15 | Notify Foundations | src/StellaOps.Notify.Worker/TASKS.md | DONE (2025-10-23) | Notify Worker Guild | NOTIFY-WORKER-15-201 | Bus subscription + leasing loop with backoff. | -| Sprint 17 | Symbol Intelligence & Forensics | src/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-25) | Zastava Observer Guild | ZASTAVA-OBS-17-005 | Collect GNU build-id during runtime observation and attach it to emitted events. | -| Sprint 17 | Symbol Intelligence & Forensics | src/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-25) | Scanner WebService Guild | SCANNER-RUNTIME-17-401 | Persist runtime build-id observations and expose them for debug-symbol correlation. | diff --git a/deploy/compose/docker-compose.prod.yaml b/deploy/compose/docker-compose.prod.yaml index 1b22790e..d58df33e 100644 --- a/deploy/compose/docker-compose.prod.yaml +++ b/deploy/compose/docker-compose.prod.yaml @@ -1,180 +1,180 @@ -x-release-labels: &release-labels - com.stellaops.release.version: "2025.09.2" - com.stellaops.release.channel: "stable" - com.stellaops.profile: "prod" - -networks: - stellaops: - driver: bridge - frontdoor: - external: true - name: ${FRONTDOOR_NETWORK:-stellaops_frontdoor} - -volumes: - mongo-data: - minio-data: - rustfs-data: - concelier-jobs: - nats-data: - -services: - mongo: - image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49 - command: ["mongod", "--bind_ip_all"] - restart: unless-stopped - environment: - MONGO_INITDB_ROOT_USERNAME: "${MONGO_INITDB_ROOT_USERNAME}" - MONGO_INITDB_ROOT_PASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}" - volumes: - - mongo-data:/data/db - networks: - - stellaops - labels: *release-labels - - minio: - image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e - command: ["server", "/data", "--console-address", ":9001"] - restart: unless-stopped - environment: - MINIO_ROOT_USER: "${MINIO_ROOT_USER}" - MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}" - volumes: - - minio-data:/data - ports: - - "${MINIO_CONSOLE_PORT:-9001}:9001" - networks: - - stellaops - labels: *release-labels - - rustfs: - image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge - command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"] - restart: unless-stopped - environment: - RUSTFS__LOG__LEVEL: info - RUSTFS__STORAGE__PATH: /data - volumes: - - rustfs-data:/data - ports: - - "${RUSTFS_HTTP_PORT:-8080}:8080" - networks: - - stellaops - labels: *release-labels - - nats: - image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e - command: - - "-js" - - "-sd" - - /data - restart: unless-stopped - ports: - - "${NATS_CLIENT_PORT:-4222}:4222" - volumes: - - nats-data:/data - networks: - - stellaops - labels: *release-labels - - authority: - image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5 - restart: unless-stopped - depends_on: - - mongo - environment: - STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}" - STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins" - STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins" - volumes: - - ../../etc/authority.yaml:/etc/authority.yaml:ro - - ../../etc/authority.plugins:/app/etc/authority.plugins:ro - ports: - - "${AUTHORITY_PORT:-8440}:8440" - networks: - - stellaops - - frontdoor - labels: *release-labels - - signer: - image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e - restart: unless-stopped - depends_on: - - authority - environment: - SIGNER__AUTHORITY__BASEURL: "https://authority:8440" - SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}" - SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - ports: - - "${SIGNER_PORT:-8441}:8441" - networks: - - stellaops - - frontdoor - labels: *release-labels - - attestor: - image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f - restart: unless-stopped - depends_on: - - signer - environment: - ATTESTOR__SIGNER__BASEURL: "https://signer:8441" - ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - ports: - - "${ATTESTOR_PORT:-8442}:8442" - networks: - - stellaops - - frontdoor - labels: *release-labels - - concelier: - image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5 - restart: unless-stopped - depends_on: - - mongo - - minio - environment: - CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - CONCELIER__STORAGE__S3__ENDPOINT: "http://minio:9000" - CONCELIER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}" - CONCELIER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}" - CONCELIER__AUTHORITY__BASEURL: "https://authority:8440" - volumes: - - concelier-jobs:/var/lib/concelier/jobs - ports: - - "${CONCELIER_PORT:-8445}:8445" - networks: - - stellaops - - frontdoor - labels: *release-labels - - scanner-web: - image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7 - restart: unless-stopped - depends_on: - - concelier - - rustfs - - nats - environment: - SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" - SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1" - SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" - SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" - SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}" - SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-true}" - SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}" - SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}" - SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}" - SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}" - SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}" - ports: - - "${SCANNER_WEB_PORT:-8444}:8444" - networks: - - stellaops - - frontdoor - labels: *release-labels - +x-release-labels: &release-labels + com.stellaops.release.version: "2025.09.2" + com.stellaops.release.channel: "stable" + com.stellaops.profile: "prod" + +networks: + stellaops: + driver: bridge + frontdoor: + external: true + name: ${FRONTDOOR_NETWORK:-stellaops_frontdoor} + +volumes: + mongo-data: + minio-data: + rustfs-data: + concelier-jobs: + nats-data: + +services: + mongo: + image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49 + command: ["mongod", "--bind_ip_all"] + restart: unless-stopped + environment: + MONGO_INITDB_ROOT_USERNAME: "${MONGO_INITDB_ROOT_USERNAME}" + MONGO_INITDB_ROOT_PASSWORD: "${MONGO_INITDB_ROOT_PASSWORD}" + volumes: + - mongo-data:/data/db + networks: + - stellaops + labels: *release-labels + + minio: + image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e + command: ["server", "/data", "--console-address", ":9001"] + restart: unless-stopped + environment: + MINIO_ROOT_USER: "${MINIO_ROOT_USER}" + MINIO_ROOT_PASSWORD: "${MINIO_ROOT_PASSWORD}" + volumes: + - minio-data:/data + ports: + - "${MINIO_CONSOLE_PORT:-9001}:9001" + networks: + - stellaops + labels: *release-labels + + rustfs: + image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge + command: ["serve", "--listen", "0.0.0.0:8080", "--root", "/data"] + restart: unless-stopped + environment: + RUSTFS__LOG__LEVEL: info + RUSTFS__STORAGE__PATH: /data + volumes: + - rustfs-data:/data + ports: + - "${RUSTFS_HTTP_PORT:-8080}:8080" + networks: + - stellaops + labels: *release-labels + + nats: + image: docker.io/library/nats@sha256:c82559e4476289481a8a5196e675ebfe67eea81d95e5161e3e78eccfe766608e + command: + - "-js" + - "-sd" + - /data + restart: unless-stopped + ports: + - "${NATS_CLIENT_PORT:-4222}:4222" + volumes: + - nats-data:/data + networks: + - stellaops + labels: *release-labels + + authority: + image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5 + restart: unless-stopped + depends_on: + - mongo + environment: + STELLAOPS_AUTHORITY__ISSUER: "${AUTHORITY_ISSUER}" + STELLAOPS_AUTHORITY__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins" + STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins" + volumes: + - ../../etc/authority.yaml:/etc/authority.yaml:ro + - ../../etc/authority.plugins:/app/etc/authority.plugins:ro + ports: + - "${AUTHORITY_PORT:-8440}:8440" + networks: + - stellaops + - frontdoor + labels: *release-labels + + signer: + image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e + restart: unless-stopped + depends_on: + - authority + environment: + SIGNER__AUTHORITY__BASEURL: "https://authority:8440" + SIGNER__POE__INTROSPECTURL: "${SIGNER_POE_INTROSPECT_URL}" + SIGNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + ports: + - "${SIGNER_PORT:-8441}:8441" + networks: + - stellaops + - frontdoor + labels: *release-labels + + attestor: + image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f + restart: unless-stopped + depends_on: + - signer + environment: + ATTESTOR__SIGNER__BASEURL: "https://signer:8441" + ATTESTOR__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + ports: + - "${ATTESTOR_PORT:-8442}:8442" + networks: + - stellaops + - frontdoor + labels: *release-labels + + concelier: + image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5 + restart: unless-stopped + depends_on: + - mongo + - minio + environment: + CONCELIER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + CONCELIER__STORAGE__S3__ENDPOINT: "http://minio:9000" + CONCELIER__STORAGE__S3__ACCESSKEYID: "${MINIO_ROOT_USER}" + CONCELIER__STORAGE__S3__SECRETACCESSKEY: "${MINIO_ROOT_PASSWORD}" + CONCELIER__AUTHORITY__BASEURL: "https://authority:8440" + volumes: + - concelier-jobs:/var/lib/concelier/jobs + ports: + - "${CONCELIER_PORT:-8445}:8445" + networks: + - stellaops + - frontdoor + labels: *release-labels + + scanner-web: + image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7 + restart: unless-stopped + depends_on: + - concelier + - rustfs + - nats + environment: + SCANNER__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" + SCANNER__ARTIFACTSTORE__ENDPOINT: "http://rustfs:8080/api/v1" + SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" + SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" + SCANNER__QUEUE__BROKER: "${SCANNER_QUEUE_BROKER}" + SCANNER__EVENTS__ENABLED: "${SCANNER_EVENTS_ENABLED:-true}" + SCANNER__EVENTS__DRIVER: "${SCANNER_EVENTS_DRIVER:-redis}" + SCANNER__EVENTS__DSN: "${SCANNER_EVENTS_DSN:-}" + SCANNER__EVENTS__STREAM: "${SCANNER_EVENTS_STREAM:-stella.events}" + SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "${SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS:-5}" + SCANNER__EVENTS__MAXSTREAMLENGTH: "${SCANNER_EVENTS_MAX_STREAM_LENGTH:-10000}" + ports: + - "${SCANNER_WEB_PORT:-8444}:8444" + networks: + - stellaops + - frontdoor + labels: *release-labels + scanner-worker: image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab restart: unless-stopped @@ -212,46 +212,46 @@ services: networks: - stellaops labels: *release-labels - - notify-web: - image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2} - restart: unless-stopped - depends_on: - - mongo - - authority - environment: - DOTNET_ENVIRONMENT: Production - volumes: - - ../../etc/notify.prod.yaml:/app/etc/notify.yaml:ro - ports: - - "${NOTIFY_WEB_PORT:-8446}:8446" - networks: - - stellaops - - frontdoor - labels: *release-labels - - excititor: - image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa - restart: unless-stopped - depends_on: - - concelier - environment: - EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445" - EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" - networks: - - stellaops - labels: *release-labels - - web-ui: - image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23 - restart: unless-stopped - depends_on: - - scanner-web - environment: - STELLAOPS_UI__BACKEND__BASEURL: "https://scanner-web:8444" - ports: - - "${UI_PORT:-8443}:8443" - networks: - - stellaops - - frontdoor - labels: *release-labels + + notify-web: + image: ${NOTIFY_WEB_IMAGE:-registry.stella-ops.org/stellaops/notify-web:2025.09.2} + restart: unless-stopped + depends_on: + - mongo + - authority + environment: + DOTNET_ENVIRONMENT: Production + volumes: + - ../../etc/notify.prod.yaml:/app/etc/notify.yaml:ro + ports: + - "${NOTIFY_WEB_PORT:-8446}:8446" + networks: + - stellaops + - frontdoor + labels: *release-labels + + excititor: + image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa + restart: unless-stopped + depends_on: + - concelier + environment: + EXCITITOR__CONCELIER__BASEURL: "https://concelier:8445" + EXCITITOR__STORAGE__MONGO__CONNECTIONSTRING: "mongodb://${MONGO_INITDB_ROOT_USERNAME}:${MONGO_INITDB_ROOT_PASSWORD}@mongo:27017" + networks: + - stellaops + labels: *release-labels + + web-ui: + image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23 + restart: unless-stopped + depends_on: + - scanner-web + environment: + STELLAOPS_UI__BACKEND__BASEURL: "https://scanner-web:8444" + ports: + - "${UI_PORT:-8443}:8443" + networks: + - stellaops + - frontdoor + labels: *release-labels diff --git a/deploy/compose/docker-compose.telemetry-storage.yaml b/deploy/compose/docker-compose.telemetry-storage.yaml index cb9462f6..aa2ee148 100644 --- a/deploy/compose/docker-compose.telemetry-storage.yaml +++ b/deploy/compose/docker-compose.telemetry-storage.yaml @@ -1,57 +1,57 @@ -version: "3.9" - -services: - prometheus: - image: prom/prometheus:v2.53.0 - container_name: stellaops-prometheus - command: - - "--config.file=/etc/prometheus/prometheus.yaml" - volumes: - - ../telemetry/storage/prometheus.yaml:/etc/prometheus/prometheus.yaml:ro - - prometheus-data:/prometheus - - ../telemetry/certs:/etc/telemetry/tls:ro - - ../telemetry/storage/auth:/etc/telemetry/auth:ro - environment: - PROMETHEUS_COLLECTOR_TARGET: stellaops-otel-collector:9464 - ports: - - "9090:9090" - depends_on: - - tempo - - loki - - tempo: - image: grafana/tempo:2.5.0 - container_name: stellaops-tempo - command: - - "-config.file=/etc/tempo/tempo.yaml" - volumes: - - ../telemetry/storage/tempo.yaml:/etc/tempo/tempo.yaml:ro - - ../telemetry/storage/tenants/tempo-overrides.yaml:/etc/telemetry/tenants/tempo-overrides.yaml:ro - - ../telemetry/certs:/etc/telemetry/tls:ro - - tempo-data:/var/tempo - ports: - - "3200:3200" - environment: - TEMPO_ZONE: docker - - loki: - image: grafana/loki:3.1.0 - container_name: stellaops-loki - command: - - "-config.file=/etc/loki/loki.yaml" - volumes: - - ../telemetry/storage/loki.yaml:/etc/loki/loki.yaml:ro - - ../telemetry/storage/tenants/loki-overrides.yaml:/etc/telemetry/tenants/loki-overrides.yaml:ro - - ../telemetry/certs:/etc/telemetry/tls:ro - - loki-data:/var/loki - ports: - - "3100:3100" - -volumes: - prometheus-data: - tempo-data: - loki-data: - -networks: - default: - name: stellaops-telemetry +version: "3.9" + +services: + prometheus: + image: prom/prometheus:v2.53.0 + container_name: stellaops-prometheus + command: + - "--config.file=/etc/prometheus/prometheus.yaml" + volumes: + - ../telemetry/storage/prometheus.yaml:/etc/prometheus/prometheus.yaml:ro + - prometheus-data:/prometheus + - ../telemetry/certs:/etc/telemetry/tls:ro + - ../telemetry/storage/auth:/etc/telemetry/auth:ro + environment: + PROMETHEUS_COLLECTOR_TARGET: stellaops-otel-collector:9464 + ports: + - "9090:9090" + depends_on: + - tempo + - loki + + tempo: + image: grafana/tempo:2.5.0 + container_name: stellaops-tempo + command: + - "-config.file=/etc/tempo/tempo.yaml" + volumes: + - ../telemetry/storage/tempo.yaml:/etc/tempo/tempo.yaml:ro + - ../telemetry/storage/tenants/tempo-overrides.yaml:/etc/telemetry/tenants/tempo-overrides.yaml:ro + - ../telemetry/certs:/etc/telemetry/tls:ro + - tempo-data:/var/tempo + ports: + - "3200:3200" + environment: + TEMPO_ZONE: docker + + loki: + image: grafana/loki:3.1.0 + container_name: stellaops-loki + command: + - "-config.file=/etc/loki/loki.yaml" + volumes: + - ../telemetry/storage/loki.yaml:/etc/loki/loki.yaml:ro + - ../telemetry/storage/tenants/loki-overrides.yaml:/etc/telemetry/tenants/loki-overrides.yaml:ro + - ../telemetry/certs:/etc/telemetry/tls:ro + - loki-data:/var/loki + ports: + - "3100:3100" + +volumes: + prometheus-data: + tempo-data: + loki-data: + +networks: + default: + name: stellaops-telemetry diff --git a/deploy/compose/docker-compose.telemetry.yaml b/deploy/compose/docker-compose.telemetry.yaml index c94b6ac4..1d6fd07b 100644 --- a/deploy/compose/docker-compose.telemetry.yaml +++ b/deploy/compose/docker-compose.telemetry.yaml @@ -1,34 +1,34 @@ -version: "3.9" - -services: - otel-collector: - image: otel/opentelemetry-collector:0.105.0 - container_name: stellaops-otel-collector - command: - - "--config=/etc/otel-collector/config.yaml" - environment: - STELLAOPS_OTEL_TLS_CERT: /etc/otel-collector/tls/collector.crt - STELLAOPS_OTEL_TLS_KEY: /etc/otel-collector/tls/collector.key - STELLAOPS_OTEL_TLS_CA: /etc/otel-collector/tls/ca.crt - STELLAOPS_OTEL_PROMETHEUS_ENDPOINT: 0.0.0.0:9464 - STELLAOPS_OTEL_REQUIRE_CLIENT_CERT: "true" - STELLAOPS_TENANT_ID: dev - volumes: - - ../telemetry/otel-collector-config.yaml:/etc/otel-collector/config.yaml:ro - - ../telemetry/certs:/etc/otel-collector/tls:ro - ports: - - "4317:4317" # OTLP gRPC (mTLS) - - "4318:4318" # OTLP HTTP (mTLS) - - "9464:9464" # Prometheus exporter (mTLS) - - "13133:13133" # Health check - - "1777:1777" # pprof - healthcheck: - test: ["CMD", "curl", "-fsk", "--cert", "/etc/otel-collector/tls/client.crt", "--key", "/etc/otel-collector/tls/client.key", "--cacert", "/etc/otel-collector/tls/ca.crt", "https://localhost:13133/healthz"] - interval: 30s - start_period: 15s - timeout: 5s - retries: 3 - -networks: - default: - name: stellaops-telemetry +version: "3.9" + +services: + otel-collector: + image: otel/opentelemetry-collector:0.105.0 + container_name: stellaops-otel-collector + command: + - "--config=/etc/otel-collector/config.yaml" + environment: + STELLAOPS_OTEL_TLS_CERT: /etc/otel-collector/tls/collector.crt + STELLAOPS_OTEL_TLS_KEY: /etc/otel-collector/tls/collector.key + STELLAOPS_OTEL_TLS_CA: /etc/otel-collector/tls/ca.crt + STELLAOPS_OTEL_PROMETHEUS_ENDPOINT: 0.0.0.0:9464 + STELLAOPS_OTEL_REQUIRE_CLIENT_CERT: "true" + STELLAOPS_TENANT_ID: dev + volumes: + - ../telemetry/otel-collector-config.yaml:/etc/otel-collector/config.yaml:ro + - ../telemetry/certs:/etc/otel-collector/tls:ro + ports: + - "4317:4317" # OTLP gRPC (mTLS) + - "4318:4318" # OTLP HTTP (mTLS) + - "9464:9464" # Prometheus exporter (mTLS) + - "13133:13133" # Health check + - "1777:1777" # pprof + healthcheck: + test: ["CMD", "curl", "-fsk", "--cert", "/etc/otel-collector/tls/client.crt", "--key", "/etc/otel-collector/tls/client.key", "--cacert", "/etc/otel-collector/tls/ca.crt", "https://localhost:13133/healthz"] + interval: 30s + start_period: 15s + timeout: 5s + retries: 3 + +networks: + default: + name: stellaops-telemetry diff --git a/deploy/compose/env/prod.env.example b/deploy/compose/env/prod.env.example index 211ef8b5..79064dfd 100644 --- a/deploy/compose/env/prod.env.example +++ b/deploy/compose/env/prod.env.example @@ -1,33 +1,33 @@ -# Substitutions for docker-compose.prod.yaml -# ⚠️ Replace all placeholder secrets with values sourced from your secret manager. -MONGO_INITDB_ROOT_USERNAME=stellaops-prod -MONGO_INITDB_ROOT_PASSWORD=REPLACE_WITH_STRONG_PASSWORD -MINIO_ROOT_USER=stellaops-prod -MINIO_ROOT_PASSWORD=REPLACE_WITH_STRONG_PASSWORD -# Expose the MinIO console only to trusted operator networks. -MINIO_CONSOLE_PORT=39001 -RUSTFS_HTTP_PORT=8080 -AUTHORITY_ISSUER=https://authority.prod.stella-ops.org -AUTHORITY_PORT=8440 -SIGNER_POE_INTROSPECT_URL=https://licensing.prod.stella-ops.org/introspect -SIGNER_PORT=8441 -ATTESTOR_PORT=8442 -CONCELIER_PORT=8445 -SCANNER_WEB_PORT=8444 -UI_PORT=8443 -NATS_CLIENT_PORT=4222 -SCANNER_QUEUE_BROKER=nats://nats:4222 -# `true` enables signed scanner events for Notify ingestion. -SCANNER_EVENTS_ENABLED=true -SCANNER_EVENTS_DRIVER=redis -# Leave SCANNER_EVENTS_DSN empty to inherit the Redis queue DSN when SCANNER_QUEUE_BROKER uses redis://. -SCANNER_EVENTS_DSN= -SCANNER_EVENTS_STREAM=stella.events -SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5 +# Substitutions for docker-compose.prod.yaml +# ⚠️ Replace all placeholder secrets with values sourced from your secret manager. +MONGO_INITDB_ROOT_USERNAME=stellaops-prod +MONGO_INITDB_ROOT_PASSWORD=REPLACE_WITH_STRONG_PASSWORD +MINIO_ROOT_USER=stellaops-prod +MINIO_ROOT_PASSWORD=REPLACE_WITH_STRONG_PASSWORD +# Expose the MinIO console only to trusted operator networks. +MINIO_CONSOLE_PORT=39001 +RUSTFS_HTTP_PORT=8080 +AUTHORITY_ISSUER=https://authority.prod.stella-ops.org +AUTHORITY_PORT=8440 +SIGNER_POE_INTROSPECT_URL=https://licensing.prod.stella-ops.org/introspect +SIGNER_PORT=8441 +ATTESTOR_PORT=8442 +CONCELIER_PORT=8445 +SCANNER_WEB_PORT=8444 +UI_PORT=8443 +NATS_CLIENT_PORT=4222 +SCANNER_QUEUE_BROKER=nats://nats:4222 +# `true` enables signed scanner events for Notify ingestion. +SCANNER_EVENTS_ENABLED=true +SCANNER_EVENTS_DRIVER=redis +# Leave SCANNER_EVENTS_DSN empty to inherit the Redis queue DSN when SCANNER_QUEUE_BROKER uses redis://. +SCANNER_EVENTS_DSN= +SCANNER_EVENTS_STREAM=stella.events +SCANNER_EVENTS_PUBLISH_TIMEOUT_SECONDS=5 SCANNER_EVENTS_MAX_STREAM_LENGTH=10000 SCHEDULER_QUEUE_KIND=Nats SCHEDULER_QUEUE_NATS_URL=nats://nats:4222 SCHEDULER_STORAGE_DATABASE=stellaops_scheduler SCHEDULER_SCANNER_BASEADDRESS=http://scanner-web:8444 -# External reverse proxy (Traefik, Envoy, etc.) that terminates TLS. -FRONTDOOR_NETWORK=stellaops_frontdoor +# External reverse proxy (Traefik, Envoy, etc.) that terminates TLS. +FRONTDOOR_NETWORK=stellaops_frontdoor diff --git a/deploy/helm/stellaops/files/otel-collector-config.yaml b/deploy/helm/stellaops/files/otel-collector-config.yaml index d5d0167e..2a401a65 100644 --- a/deploy/helm/stellaops/files/otel-collector-config.yaml +++ b/deploy/helm/stellaops/files/otel-collector-config.yaml @@ -1,64 +1,64 @@ -receivers: - otlp: - protocols: - grpc: - endpoint: 0.0.0.0:4317 - tls: - cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} - key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} - client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} - require_client_certificate: ${STELLAOPS_OTEL_REQUIRE_CLIENT_CERT:true} - http: - endpoint: 0.0.0.0:4318 - tls: - cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} - key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} - client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} - require_client_certificate: ${STELLAOPS_OTEL_REQUIRE_CLIENT_CERT:true} - -processors: - attributes/tenant-tag: - actions: - - key: tenant.id - action: insert - value: ${STELLAOPS_TENANT_ID:unknown} - batch: - send_batch_size: 1024 - timeout: 5s - -exporters: - logging: - verbosity: normal - prometheus: - endpoint: ${STELLAOPS_OTEL_PROMETHEUS_ENDPOINT:0.0.0.0:9464} - enable_open_metrics: true - metric_expiration: 5m - tls: - cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} - key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} - client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} - -extensions: - health_check: - endpoint: ${STELLAOPS_OTEL_HEALTH_ENDPOINT:0.0.0.0:13133} - pprof: - endpoint: ${STELLAOPS_OTEL_PPROF_ENDPOINT:0.0.0.0:1777} - -service: - telemetry: - logs: - level: ${STELLAOPS_OTEL_LOG_LEVEL:info} - extensions: [health_check, pprof] - pipelines: - traces: - receivers: [otlp] - processors: [attributes/tenant-tag, batch] - exporters: [logging] - metrics: - receivers: [otlp] - processors: [attributes/tenant-tag, batch] - exporters: [logging, prometheus] - logs: - receivers: [otlp] - processors: [attributes/tenant-tag, batch] - exporters: [logging] +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + tls: + cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} + key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} + client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} + require_client_certificate: ${STELLAOPS_OTEL_REQUIRE_CLIENT_CERT:true} + http: + endpoint: 0.0.0.0:4318 + tls: + cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} + key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} + client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} + require_client_certificate: ${STELLAOPS_OTEL_REQUIRE_CLIENT_CERT:true} + +processors: + attributes/tenant-tag: + actions: + - key: tenant.id + action: insert + value: ${STELLAOPS_TENANT_ID:unknown} + batch: + send_batch_size: 1024 + timeout: 5s + +exporters: + logging: + verbosity: normal + prometheus: + endpoint: ${STELLAOPS_OTEL_PROMETHEUS_ENDPOINT:0.0.0.0:9464} + enable_open_metrics: true + metric_expiration: 5m + tls: + cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} + key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} + client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} + +extensions: + health_check: + endpoint: ${STELLAOPS_OTEL_HEALTH_ENDPOINT:0.0.0.0:13133} + pprof: + endpoint: ${STELLAOPS_OTEL_PPROF_ENDPOINT:0.0.0.0:1777} + +service: + telemetry: + logs: + level: ${STELLAOPS_OTEL_LOG_LEVEL:info} + extensions: [health_check, pprof] + pipelines: + traces: + receivers: [otlp] + processors: [attributes/tenant-tag, batch] + exporters: [logging] + metrics: + receivers: [otlp] + processors: [attributes/tenant-tag, batch] + exporters: [logging, prometheus] + logs: + receivers: [otlp] + processors: [attributes/tenant-tag, batch] + exporters: [logging] diff --git a/deploy/helm/stellaops/templates/otel-collector.yaml b/deploy/helm/stellaops/templates/otel-collector.yaml index f4f10f34..9d52e949 100644 --- a/deploy/helm/stellaops/templates/otel-collector.yaml +++ b/deploy/helm/stellaops/templates/otel-collector.yaml @@ -1,121 +1,121 @@ -{{- if .Values.telemetry.collector.enabled }} -apiVersion: v1 -kind: ConfigMap -metadata: - name: {{ include "stellaops.telemetryCollector.fullname" . }} - labels: - {{- include "stellaops.labels" (dict "root" . "name" "otel-collector" "svc" (dict "class" "telemetry")) | nindent 4 }} -data: - config.yaml: | -{{ include "stellaops.telemetryCollector.config" . | indent 4 }} ---- -apiVersion: apps/v1 -kind: Deployment -metadata: - name: {{ include "stellaops.telemetryCollector.fullname" . }} - labels: - {{- include "stellaops.labels" (dict "root" . "name" "otel-collector" "svc" (dict "class" "telemetry")) | nindent 4 }} -spec: - replicas: {{ .Values.telemetry.collector.replicas | default 1 }} - selector: - matchLabels: - app.kubernetes.io/name: {{ include "stellaops.name" . | quote }} - app.kubernetes.io/component: "otel-collector" - template: - metadata: - labels: - app.kubernetes.io/name: {{ include "stellaops.name" . | quote }} - app.kubernetes.io/component: "otel-collector" - stellaops.profile: {{ .Values.global.profile | quote }} - spec: - containers: - - name: otel-collector - image: {{ .Values.telemetry.collector.image | default "otel/opentelemetry-collector:0.105.0" | quote }} - args: - - "--config=/etc/otel/config.yaml" - ports: - - name: otlp-grpc - containerPort: 4317 - - name: otlp-http - containerPort: 4318 - - name: metrics - containerPort: 9464 - - name: health - containerPort: 13133 - - name: pprof - containerPort: 1777 - env: - - name: STELLAOPS_OTEL_TLS_CERT - value: {{ .Values.telemetry.collector.tls.certPath | default "/etc/otel/tls/tls.crt" | quote }} - - name: STELLAOPS_OTEL_TLS_KEY - value: {{ .Values.telemetry.collector.tls.keyPath | default "/etc/otel/tls/tls.key" | quote }} - - name: STELLAOPS_OTEL_TLS_CA - value: {{ .Values.telemetry.collector.tls.caPath | default "/etc/otel/tls/ca.crt" | quote }} - - name: STELLAOPS_OTEL_PROMETHEUS_ENDPOINT - value: {{ .Values.telemetry.collector.prometheusEndpoint | default "0.0.0.0:9464" | quote }} - - name: STELLAOPS_OTEL_REQUIRE_CLIENT_CERT - value: {{ .Values.telemetry.collector.requireClientCert | default true | quote }} - - name: STELLAOPS_TENANT_ID - value: {{ .Values.telemetry.collector.defaultTenant | default "unknown" | quote }} - - name: STELLAOPS_OTEL_LOG_LEVEL - value: {{ .Values.telemetry.collector.logLevel | default "info" | quote }} - volumeMounts: - - name: config - mountPath: /etc/otel/config.yaml - subPath: config.yaml - readOnly: true - - name: tls - mountPath: /etc/otel/tls - readOnly: true - livenessProbe: - httpGet: - scheme: HTTPS - port: health - path: /healthz - initialDelaySeconds: 10 - periodSeconds: 30 - readinessProbe: - httpGet: - scheme: HTTPS - port: health - path: /healthz - initialDelaySeconds: 5 - periodSeconds: 15 -{{- with .Values.telemetry.collector.resources }} - resources: -{{ toYaml . | indent 12 }} -{{- end }} - volumes: - - name: config - configMap: - name: {{ include "stellaops.telemetryCollector.fullname" . }} - - name: tls - secret: - secretName: {{ .Values.telemetry.collector.tls.secretName | required "telemetry.collector.tls.secretName is required" }} -{{- if .Values.telemetry.collector.tls.items }} - items: -{{ toYaml .Values.telemetry.collector.tls.items | indent 14 }} -{{- end }} ---- -apiVersion: v1 -kind: Service -metadata: - name: {{ include "stellaops.telemetryCollector.fullname" . }} - labels: - {{- include "stellaops.labels" (dict "root" . "name" "otel-collector" "svc" (dict "class" "telemetry")) | nindent 4 }} -spec: - type: ClusterIP - selector: - app.kubernetes.io/name: {{ include "stellaops.name" . | quote }} - app.kubernetes.io/component: "otel-collector" - ports: - - name: otlp-grpc - port: {{ .Values.telemetry.collector.service.grpcPort | default 4317 }} - targetPort: otlp-grpc - - name: otlp-http - port: {{ .Values.telemetry.collector.service.httpPort | default 4318 }} - targetPort: otlp-http - - name: metrics - port: {{ .Values.telemetry.collector.service.metricsPort | default 9464 }} - targetPort: metrics -{{- end }} +{{- if .Values.telemetry.collector.enabled }} +apiVersion: v1 +kind: ConfigMap +metadata: + name: {{ include "stellaops.telemetryCollector.fullname" . }} + labels: + {{- include "stellaops.labels" (dict "root" . "name" "otel-collector" "svc" (dict "class" "telemetry")) | nindent 4 }} +data: + config.yaml: | +{{ include "stellaops.telemetryCollector.config" . | indent 4 }} +--- +apiVersion: apps/v1 +kind: Deployment +metadata: + name: {{ include "stellaops.telemetryCollector.fullname" . }} + labels: + {{- include "stellaops.labels" (dict "root" . "name" "otel-collector" "svc" (dict "class" "telemetry")) | nindent 4 }} +spec: + replicas: {{ .Values.telemetry.collector.replicas | default 1 }} + selector: + matchLabels: + app.kubernetes.io/name: {{ include "stellaops.name" . | quote }} + app.kubernetes.io/component: "otel-collector" + template: + metadata: + labels: + app.kubernetes.io/name: {{ include "stellaops.name" . | quote }} + app.kubernetes.io/component: "otel-collector" + stellaops.profile: {{ .Values.global.profile | quote }} + spec: + containers: + - name: otel-collector + image: {{ .Values.telemetry.collector.image | default "otel/opentelemetry-collector:0.105.0" | quote }} + args: + - "--config=/etc/otel/config.yaml" + ports: + - name: otlp-grpc + containerPort: 4317 + - name: otlp-http + containerPort: 4318 + - name: metrics + containerPort: 9464 + - name: health + containerPort: 13133 + - name: pprof + containerPort: 1777 + env: + - name: STELLAOPS_OTEL_TLS_CERT + value: {{ .Values.telemetry.collector.tls.certPath | default "/etc/otel/tls/tls.crt" | quote }} + - name: STELLAOPS_OTEL_TLS_KEY + value: {{ .Values.telemetry.collector.tls.keyPath | default "/etc/otel/tls/tls.key" | quote }} + - name: STELLAOPS_OTEL_TLS_CA + value: {{ .Values.telemetry.collector.tls.caPath | default "/etc/otel/tls/ca.crt" | quote }} + - name: STELLAOPS_OTEL_PROMETHEUS_ENDPOINT + value: {{ .Values.telemetry.collector.prometheusEndpoint | default "0.0.0.0:9464" | quote }} + - name: STELLAOPS_OTEL_REQUIRE_CLIENT_CERT + value: {{ .Values.telemetry.collector.requireClientCert | default true | quote }} + - name: STELLAOPS_TENANT_ID + value: {{ .Values.telemetry.collector.defaultTenant | default "unknown" | quote }} + - name: STELLAOPS_OTEL_LOG_LEVEL + value: {{ .Values.telemetry.collector.logLevel | default "info" | quote }} + volumeMounts: + - name: config + mountPath: /etc/otel/config.yaml + subPath: config.yaml + readOnly: true + - name: tls + mountPath: /etc/otel/tls + readOnly: true + livenessProbe: + httpGet: + scheme: HTTPS + port: health + path: /healthz + initialDelaySeconds: 10 + periodSeconds: 30 + readinessProbe: + httpGet: + scheme: HTTPS + port: health + path: /healthz + initialDelaySeconds: 5 + periodSeconds: 15 +{{- with .Values.telemetry.collector.resources }} + resources: +{{ toYaml . | indent 12 }} +{{- end }} + volumes: + - name: config + configMap: + name: {{ include "stellaops.telemetryCollector.fullname" . }} + - name: tls + secret: + secretName: {{ .Values.telemetry.collector.tls.secretName | required "telemetry.collector.tls.secretName is required" }} +{{- if .Values.telemetry.collector.tls.items }} + items: +{{ toYaml .Values.telemetry.collector.tls.items | indent 14 }} +{{- end }} +--- +apiVersion: v1 +kind: Service +metadata: + name: {{ include "stellaops.telemetryCollector.fullname" . }} + labels: + {{- include "stellaops.labels" (dict "root" . "name" "otel-collector" "svc" (dict "class" "telemetry")) | nindent 4 }} +spec: + type: ClusterIP + selector: + app.kubernetes.io/name: {{ include "stellaops.name" . | quote }} + app.kubernetes.io/component: "otel-collector" + ports: + - name: otlp-grpc + port: {{ .Values.telemetry.collector.service.grpcPort | default 4317 }} + targetPort: otlp-grpc + - name: otlp-http + port: {{ .Values.telemetry.collector.service.httpPort | default 4318 }} + targetPort: otlp-http + - name: metrics + port: {{ .Values.telemetry.collector.service.metricsPort | default 9464 }} + targetPort: metrics +{{- end }} diff --git a/deploy/helm/stellaops/values-prod.yaml b/deploy/helm/stellaops/values-prod.yaml index 03efbad9..bb1f5768 100644 --- a/deploy/helm/stellaops/values-prod.yaml +++ b/deploy/helm/stellaops/values-prod.yaml @@ -1,221 +1,221 @@ -global: - profile: prod - release: - version: "2025.09.2" - channel: stable - manifestSha256: "dc3c8fe1ab83941c838ccc5a8a5862f7ddfa38c2078e580b5649db26554565b7" - image: - pullPolicy: IfNotPresent - labels: - stellaops.io/channel: stable - stellaops.io/profile: prod - -configMaps: - notify-config: - data: - notify.yaml: | - storage: - driver: mongo - connectionString: "mongodb://stellaops-mongo:27017" - database: "stellaops_notify_prod" - commandTimeoutSeconds: 45 - - authority: - enabled: true - issuer: "https://authority.prod.stella-ops.org" - metadataAddress: "https://authority.prod.stella-ops.org/.well-known/openid-configuration" - requireHttpsMetadata: true - allowAnonymousFallback: false - backchannelTimeoutSeconds: 30 - tokenClockSkewSeconds: 60 - audiences: - - notify - readScope: notify.read - adminScope: notify.admin - - api: - basePath: "/api/v1/notify" - internalBasePath: "/internal/notify" - tenantHeader: "X-StellaOps-Tenant" - - plugins: - baseDirectory: "/opt/stellaops" - directory: "plugins/notify" - searchPatterns: - - "StellaOps.Notify.Connectors.*.dll" - orderedPlugins: - - StellaOps.Notify.Connectors.Slack - - StellaOps.Notify.Connectors.Teams - - StellaOps.Notify.Connectors.Email - - StellaOps.Notify.Connectors.Webhook - - telemetry: - enableRequestLogging: true - minimumLogLevel: Information -services: - authority: - image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5 - service: - port: 8440 - env: - STELLAOPS_AUTHORITY__ISSUER: "https://authority.prod.stella-ops.org" - STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins" - STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins" - envFrom: - - secretRef: - name: stellaops-prod-core - signer: - image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e - service: - port: 8441 - env: - SIGNER__AUTHORITY__BASEURL: "https://stellaops-authority:8440" - SIGNER__POE__INTROSPECTURL: "https://licensing.prod.stella-ops.org/introspect" - envFrom: - - secretRef: - name: stellaops-prod-core - attestor: - image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f - service: - port: 8442 - env: - ATTESTOR__SIGNER__BASEURL: "https://stellaops-signer:8441" - envFrom: - - secretRef: - name: stellaops-prod-core - concelier: - image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5 - service: - port: 8445 - env: - CONCELIER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000" - CONCELIER__AUTHORITY__BASEURL: "https://stellaops-authority:8440" - envFrom: - - secretRef: - name: stellaops-prod-core - volumeMounts: - - name: concelier-jobs - mountPath: /var/lib/concelier/jobs - volumeClaims: - - name: concelier-jobs - claimName: stellaops-concelier-jobs - scanner-web: - image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7 - service: - port: 8444 - env: - SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" - SCANNER__ARTIFACTSTORE__ENDPOINT: "http://stellaops-rustfs:8080/api/v1" - SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" - SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" - SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222" - SCANNER__EVENTS__ENABLED: "true" - SCANNER__EVENTS__DRIVER: "redis" - SCANNER__EVENTS__DSN: "" - SCANNER__EVENTS__STREAM: "stella.events" - SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "5" - SCANNER__EVENTS__MAXSTREAMLENGTH: "10000" - envFrom: - - secretRef: - name: stellaops-prod-core - scanner-worker: - image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab - replicas: 3 - env: - SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" - SCANNER__ARTIFACTSTORE__ENDPOINT: "http://stellaops-rustfs:8080/api/v1" - SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" - SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" - SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222" - SCANNER__EVENTS__ENABLED: "true" - SCANNER__EVENTS__DRIVER: "redis" - SCANNER__EVENTS__DSN: "" - SCANNER__EVENTS__STREAM: "stella.events" - SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "5" - SCANNER__EVENTS__MAXSTREAMLENGTH: "10000" - envFrom: - - secretRef: - name: stellaops-prod-core - notify-web: - image: registry.stella-ops.org/stellaops/notify-web:2025.09.2 - service: - port: 8446 - env: - DOTNET_ENVIRONMENT: Production - envFrom: - - secretRef: - name: stellaops-prod-notify - configMounts: - - name: notify-config - mountPath: /app/etc/notify.yaml - subPath: notify.yaml - configMap: notify-config - excititor: - image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa - env: - EXCITITOR__CONCELIER__BASEURL: "https://stellaops-concelier:8445" - envFrom: - - secretRef: - name: stellaops-prod-core - web-ui: - image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23 - service: - port: 8443 - env: - STELLAOPS_UI__BACKEND__BASEURL: "https://stellaops-scanner-web:8444" - mongo: - class: infrastructure - image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49 - service: - port: 27017 - command: - - mongod - - --bind_ip_all - envFrom: - - secretRef: - name: stellaops-prod-mongo - volumeMounts: - - name: mongo-data - mountPath: /data/db - volumeClaims: - - name: mongo-data - claimName: stellaops-mongo-data - minio: - class: infrastructure - image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e - service: - port: 9000 - command: - - server - - /data - - --console-address - - :9001 - envFrom: - - secretRef: - name: stellaops-prod-minio - volumeMounts: - - name: minio-data - mountPath: /data - volumeClaims: - - name: minio-data - claimName: stellaops-minio-data - rustfs: - class: infrastructure - image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge - service: - port: 8080 - command: - - serve - - --listen - - 0.0.0.0:8080 - - --root - - /data - env: - RUSTFS__LOG__LEVEL: info - RUSTFS__STORAGE__PATH: /data - volumeMounts: - - name: rustfs-data - mountPath: /data - volumeClaims: - - name: rustfs-data - claimName: stellaops-rustfs-data +global: + profile: prod + release: + version: "2025.09.2" + channel: stable + manifestSha256: "dc3c8fe1ab83941c838ccc5a8a5862f7ddfa38c2078e580b5649db26554565b7" + image: + pullPolicy: IfNotPresent + labels: + stellaops.io/channel: stable + stellaops.io/profile: prod + +configMaps: + notify-config: + data: + notify.yaml: | + storage: + driver: mongo + connectionString: "mongodb://stellaops-mongo:27017" + database: "stellaops_notify_prod" + commandTimeoutSeconds: 45 + + authority: + enabled: true + issuer: "https://authority.prod.stella-ops.org" + metadataAddress: "https://authority.prod.stella-ops.org/.well-known/openid-configuration" + requireHttpsMetadata: true + allowAnonymousFallback: false + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + audiences: + - notify + readScope: notify.read + adminScope: notify.admin + + api: + basePath: "/api/v1/notify" + internalBasePath: "/internal/notify" + tenantHeader: "X-StellaOps-Tenant" + + plugins: + baseDirectory: "/opt/stellaops" + directory: "plugins/notify" + searchPatterns: + - "StellaOps.Notify.Connectors.*.dll" + orderedPlugins: + - StellaOps.Notify.Connectors.Slack + - StellaOps.Notify.Connectors.Teams + - StellaOps.Notify.Connectors.Email + - StellaOps.Notify.Connectors.Webhook + + telemetry: + enableRequestLogging: true + minimumLogLevel: Information +services: + authority: + image: registry.stella-ops.org/stellaops/authority@sha256:b0348bad1d0b401cc3c71cb40ba034c8043b6c8874546f90d4783c9dbfcc0bf5 + service: + port: 8440 + env: + STELLAOPS_AUTHORITY__ISSUER: "https://authority.prod.stella-ops.org" + STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0: "/app/plugins" + STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY: "/app/etc/authority.plugins" + envFrom: + - secretRef: + name: stellaops-prod-core + signer: + image: registry.stella-ops.org/stellaops/signer@sha256:8ad574e61f3a9e9bda8a58eb2700ae46813284e35a150b1137bc7c2b92ac0f2e + service: + port: 8441 + env: + SIGNER__AUTHORITY__BASEURL: "https://stellaops-authority:8440" + SIGNER__POE__INTROSPECTURL: "https://licensing.prod.stella-ops.org/introspect" + envFrom: + - secretRef: + name: stellaops-prod-core + attestor: + image: registry.stella-ops.org/stellaops/attestor@sha256:0534985f978b0b5d220d73c96fddd962cd9135f616811cbe3bff4666c5af568f + service: + port: 8442 + env: + ATTESTOR__SIGNER__BASEURL: "https://stellaops-signer:8441" + envFrom: + - secretRef: + name: stellaops-prod-core + concelier: + image: registry.stella-ops.org/stellaops/concelier@sha256:c58cdcaee1d266d68d498e41110a589dd204b487d37381096bd61ab345a867c5 + service: + port: 8445 + env: + CONCELIER__STORAGE__S3__ENDPOINT: "http://stellaops-minio:9000" + CONCELIER__AUTHORITY__BASEURL: "https://stellaops-authority:8440" + envFrom: + - secretRef: + name: stellaops-prod-core + volumeMounts: + - name: concelier-jobs + mountPath: /var/lib/concelier/jobs + volumeClaims: + - name: concelier-jobs + claimName: stellaops-concelier-jobs + scanner-web: + image: registry.stella-ops.org/stellaops/scanner-web@sha256:14b23448c3f9586a9156370b3e8c1991b61907efa666ca37dd3aaed1e79fe3b7 + service: + port: 8444 + env: + SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" + SCANNER__ARTIFACTSTORE__ENDPOINT: "http://stellaops-rustfs:8080/api/v1" + SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" + SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" + SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222" + SCANNER__EVENTS__ENABLED: "true" + SCANNER__EVENTS__DRIVER: "redis" + SCANNER__EVENTS__DSN: "" + SCANNER__EVENTS__STREAM: "stella.events" + SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "5" + SCANNER__EVENTS__MAXSTREAMLENGTH: "10000" + envFrom: + - secretRef: + name: stellaops-prod-core + scanner-worker: + image: registry.stella-ops.org/stellaops/scanner-worker@sha256:32e25e76386eb9ea8bee0a1ad546775db9a2df989fab61ac877e351881960dab + replicas: 3 + env: + SCANNER__ARTIFACTSTORE__DRIVER: "rustfs" + SCANNER__ARTIFACTSTORE__ENDPOINT: "http://stellaops-rustfs:8080/api/v1" + SCANNER__ARTIFACTSTORE__BUCKET: "scanner-artifacts" + SCANNER__ARTIFACTSTORE__TIMEOUTSECONDS: "30" + SCANNER__QUEUE__BROKER: "nats://stellaops-nats:4222" + SCANNER__EVENTS__ENABLED: "true" + SCANNER__EVENTS__DRIVER: "redis" + SCANNER__EVENTS__DSN: "" + SCANNER__EVENTS__STREAM: "stella.events" + SCANNER__EVENTS__PUBLISHTIMEOUTSECONDS: "5" + SCANNER__EVENTS__MAXSTREAMLENGTH: "10000" + envFrom: + - secretRef: + name: stellaops-prod-core + notify-web: + image: registry.stella-ops.org/stellaops/notify-web:2025.09.2 + service: + port: 8446 + env: + DOTNET_ENVIRONMENT: Production + envFrom: + - secretRef: + name: stellaops-prod-notify + configMounts: + - name: notify-config + mountPath: /app/etc/notify.yaml + subPath: notify.yaml + configMap: notify-config + excititor: + image: registry.stella-ops.org/stellaops/excititor@sha256:59022e2016aebcef5c856d163ae705755d3f81949d41195256e935ef40a627fa + env: + EXCITITOR__CONCELIER__BASEURL: "https://stellaops-concelier:8445" + envFrom: + - secretRef: + name: stellaops-prod-core + web-ui: + image: registry.stella-ops.org/stellaops/web-ui@sha256:10d924808c48e4353e3a241da62eb7aefe727a1d6dc830eb23a8e181013b3a23 + service: + port: 8443 + env: + STELLAOPS_UI__BACKEND__BASEURL: "https://stellaops-scanner-web:8444" + mongo: + class: infrastructure + image: docker.io/library/mongo@sha256:c258b26dbb7774f97f52aff52231ca5f228273a84329c5f5e451c3739457db49 + service: + port: 27017 + command: + - mongod + - --bind_ip_all + envFrom: + - secretRef: + name: stellaops-prod-mongo + volumeMounts: + - name: mongo-data + mountPath: /data/db + volumeClaims: + - name: mongo-data + claimName: stellaops-mongo-data + minio: + class: infrastructure + image: docker.io/minio/minio@sha256:14cea493d9a34af32f524e538b8346cf79f3321eff8e708c1e2960462bd8936e + service: + port: 9000 + command: + - server + - /data + - --console-address + - :9001 + envFrom: + - secretRef: + name: stellaops-prod-minio + volumeMounts: + - name: minio-data + mountPath: /data + volumeClaims: + - name: minio-data + claimName: stellaops-minio-data + rustfs: + class: infrastructure + image: registry.stella-ops.org/stellaops/rustfs:2025.10.0-edge + service: + port: 8080 + command: + - serve + - --listen + - 0.0.0.0:8080 + - --root + - /data + env: + RUSTFS__LOG__LEVEL: info + RUSTFS__STORAGE__PATH: /data + volumeMounts: + - name: rustfs-data + mountPath: /data + volumeClaims: + - name: rustfs-data + claimName: stellaops-rustfs-data diff --git a/deploy/helm/stellaops/values.yaml b/deploy/helm/stellaops/values.yaml index 581c0609..af20ed89 100644 --- a/deploy/helm/stellaops/values.yaml +++ b/deploy/helm/stellaops/values.yaml @@ -1,39 +1,39 @@ -global: - release: - version: "" - channel: "" - manifestSha256: "" - profile: "" - image: - pullPolicy: IfNotPresent - labels: {} - -telemetry: - collector: - enabled: false - replicas: 1 - image: otel/opentelemetry-collector:0.105.0 - requireClientCert: true - defaultTenant: unknown - logLevel: info - tls: - secretName: "" - certPath: /etc/otel/tls/tls.crt - keyPath: /etc/otel/tls/tls.key - caPath: /etc/otel/tls/ca.crt - items: - - key: tls.crt - path: tls.crt - - key: tls.key - path: tls.key - - key: ca.crt - path: ca.crt - service: - grpcPort: 4317 - httpPort: 4318 - metricsPort: 9464 - resources: {} - +global: + release: + version: "" + channel: "" + manifestSha256: "" + profile: "" + image: + pullPolicy: IfNotPresent + labels: {} + +telemetry: + collector: + enabled: false + replicas: 1 + image: otel/opentelemetry-collector:0.105.0 + requireClientCert: true + defaultTenant: unknown + logLevel: info + tls: + secretName: "" + certPath: /etc/otel/tls/tls.crt + keyPath: /etc/otel/tls/tls.key + caPath: /etc/otel/tls/ca.crt + items: + - key: tls.crt + path: tls.crt + - key: tls.key + path: tls.key + - key: ca.crt + path: ca.crt + service: + grpcPort: 4317 + httpPort: 4318 + metricsPort: 9464 + resources: {} + services: scheduler-worker: image: registry.stella-ops.org/stellaops/scheduler-worker:2025.10.0-edge diff --git a/deploy/telemetry/.gitignore b/deploy/telemetry/.gitignore index df912870..88259de6 100644 --- a/deploy/telemetry/.gitignore +++ b/deploy/telemetry/.gitignore @@ -1 +1 @@ -certs/ +certs/ diff --git a/deploy/telemetry/README.md b/deploy/telemetry/README.md index 926c9a36..6e992e9a 100644 --- a/deploy/telemetry/README.md +++ b/deploy/telemetry/README.md @@ -1,35 +1,35 @@ -# Telemetry Collector Assets - -These assets provision the default OpenTelemetry Collector instance required by -`DEVOPS-OBS-50-001`. The collector acts as the secured ingest point for traces, -metrics, and logs emitted by Stella Ops services. - -## Contents - -| File | Purpose | -| ---- | ------- | -| `otel-collector-config.yaml` | Baseline collector configuration (mutual TLS, OTLP receivers, Prometheus exporter). | -| `storage/prometheus.yaml` | Prometheus scrape configuration tuned for the collector and service tenants. | -| `storage/tempo.yaml` | Tempo configuration with multitenancy, WAL, and compaction settings. | -| `storage/loki.yaml` | Loki configuration enabling multitenant log ingestion with retention policies. | -| `storage/tenants/*.yaml` | Per-tenant overrides for Tempo and Loki rate/retention controls. | - -## Development workflow - -1. Generate development certificates (collector + client) using - `ops/devops/telemetry/generate_dev_tls.sh`. -2. Launch the collector via `docker compose -f docker-compose.telemetry.yaml up`. -3. Launch the storage backends (Prometheus, Tempo, Loki) via - `docker compose -f docker-compose.telemetry-storage.yaml up`. -4. Run the smoke test: `python ops/devops/telemetry/smoke_otel_collector.py`. -5. Explore the storage configuration (`storage/README.md`) to tune retention/limits. - -The smoke test sends OTLP traffic over TLS and asserts the collector accepted -traces, metrics, and logs by scraping the Prometheus metrics endpoint. - -## Kubernetes - -The Helm chart consumes the same configuration (see `values.yaml`). Provide TLS -material via a secret referenced by `telemetry.collector.tls.secretName`, -containing `ca.crt`, `tls.crt`, and `tls.key`. Client certificates are required -for ingestion and should be issued by the same CA. +# Telemetry Collector Assets + +These assets provision the default OpenTelemetry Collector instance required by +`DEVOPS-OBS-50-001`. The collector acts as the secured ingest point for traces, +metrics, and logs emitted by Stella Ops services. + +## Contents + +| File | Purpose | +| ---- | ------- | +| `otel-collector-config.yaml` | Baseline collector configuration (mutual TLS, OTLP receivers, Prometheus exporter). | +| `storage/prometheus.yaml` | Prometheus scrape configuration tuned for the collector and service tenants. | +| `storage/tempo.yaml` | Tempo configuration with multitenancy, WAL, and compaction settings. | +| `storage/loki.yaml` | Loki configuration enabling multitenant log ingestion with retention policies. | +| `storage/tenants/*.yaml` | Per-tenant overrides for Tempo and Loki rate/retention controls. | + +## Development workflow + +1. Generate development certificates (collector + client) using + `ops/devops/telemetry/generate_dev_tls.sh`. +2. Launch the collector via `docker compose -f docker-compose.telemetry.yaml up`. +3. Launch the storage backends (Prometheus, Tempo, Loki) via + `docker compose -f docker-compose.telemetry-storage.yaml up`. +4. Run the smoke test: `python ops/devops/telemetry/smoke_otel_collector.py`. +5. Explore the storage configuration (`storage/README.md`) to tune retention/limits. + +The smoke test sends OTLP traffic over TLS and asserts the collector accepted +traces, metrics, and logs by scraping the Prometheus metrics endpoint. + +## Kubernetes + +The Helm chart consumes the same configuration (see `values.yaml`). Provide TLS +material via a secret referenced by `telemetry.collector.tls.secretName`, +containing `ca.crt`, `tls.crt`, and `tls.key`. Client certificates are required +for ingestion and should be issued by the same CA. diff --git a/deploy/telemetry/otel-collector-config.yaml b/deploy/telemetry/otel-collector-config.yaml index bc693d4f..5cdf6908 100644 --- a/deploy/telemetry/otel-collector-config.yaml +++ b/deploy/telemetry/otel-collector-config.yaml @@ -1,67 +1,67 @@ -receivers: - otlp: - protocols: - grpc: - endpoint: 0.0.0.0:4317 - tls: - cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} - key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} - client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} - require_client_certificate: ${STELLAOPS_OTEL_REQUIRE_CLIENT_CERT:true} - http: - endpoint: 0.0.0.0:4318 - tls: - cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} - key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} - client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} - require_client_certificate: ${STELLAOPS_OTEL_REQUIRE_CLIENT_CERT:true} - -processors: - attributes/tenant-tag: - actions: - - key: tenant.id - action: insert - value: ${STELLAOPS_TENANT_ID:unknown} - batch: - send_batch_size: 1024 - timeout: 5s - -exporters: - logging: - verbosity: normal - prometheus: - endpoint: ${STELLAOPS_OTEL_PROMETHEUS_ENDPOINT:0.0.0.0:9464} - enable_open_metrics: true - metric_expiration: 5m - tls: - cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} - key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} - client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} -# Additional OTLP exporters can be configured by extending this section at runtime. -# For example, set STELLAOPS_OTEL_UPSTREAM_ENDPOINT and mount certificates, then -# add the exporter via a sidecar overlay. - -extensions: - health_check: - endpoint: ${STELLAOPS_OTEL_HEALTH_ENDPOINT:0.0.0.0:13133} - pprof: - endpoint: ${STELLAOPS_OTEL_PPROF_ENDPOINT:0.0.0.0:1777} - -service: - telemetry: - logs: - level: ${STELLAOPS_OTEL_LOG_LEVEL:info} - extensions: [health_check, pprof] - pipelines: - traces: - receivers: [otlp] - processors: [attributes/tenant-tag, batch] - exporters: [logging] - metrics: - receivers: [otlp] - processors: [attributes/tenant-tag, batch] - exporters: [logging, prometheus] - logs: - receivers: [otlp] - processors: [attributes/tenant-tag, batch] - exporters: [logging] +receivers: + otlp: + protocols: + grpc: + endpoint: 0.0.0.0:4317 + tls: + cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} + key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} + client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} + require_client_certificate: ${STELLAOPS_OTEL_REQUIRE_CLIENT_CERT:true} + http: + endpoint: 0.0.0.0:4318 + tls: + cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} + key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} + client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} + require_client_certificate: ${STELLAOPS_OTEL_REQUIRE_CLIENT_CERT:true} + +processors: + attributes/tenant-tag: + actions: + - key: tenant.id + action: insert + value: ${STELLAOPS_TENANT_ID:unknown} + batch: + send_batch_size: 1024 + timeout: 5s + +exporters: + logging: + verbosity: normal + prometheus: + endpoint: ${STELLAOPS_OTEL_PROMETHEUS_ENDPOINT:0.0.0.0:9464} + enable_open_metrics: true + metric_expiration: 5m + tls: + cert_file: ${STELLAOPS_OTEL_TLS_CERT:?STELLAOPS_OTEL_TLS_CERT not set} + key_file: ${STELLAOPS_OTEL_TLS_KEY:?STELLAOPS_OTEL_TLS_KEY not set} + client_ca_file: ${STELLAOPS_OTEL_TLS_CA:?STELLAOPS_OTEL_TLS_CA not set} +# Additional OTLP exporters can be configured by extending this section at runtime. +# For example, set STELLAOPS_OTEL_UPSTREAM_ENDPOINT and mount certificates, then +# add the exporter via a sidecar overlay. + +extensions: + health_check: + endpoint: ${STELLAOPS_OTEL_HEALTH_ENDPOINT:0.0.0.0:13133} + pprof: + endpoint: ${STELLAOPS_OTEL_PPROF_ENDPOINT:0.0.0.0:1777} + +service: + telemetry: + logs: + level: ${STELLAOPS_OTEL_LOG_LEVEL:info} + extensions: [health_check, pprof] + pipelines: + traces: + receivers: [otlp] + processors: [attributes/tenant-tag, batch] + exporters: [logging] + metrics: + receivers: [otlp] + processors: [attributes/tenant-tag, batch] + exporters: [logging, prometheus] + logs: + receivers: [otlp] + processors: [attributes/tenant-tag, batch] + exporters: [logging] diff --git a/deploy/telemetry/storage/README.md b/deploy/telemetry/storage/README.md index b730d5ed..b3e5899c 100644 --- a/deploy/telemetry/storage/README.md +++ b/deploy/telemetry/storage/README.md @@ -1,33 +1,33 @@ -# Telemetry Storage Stack - -Configuration snippets for the default StellaOps observability backends used in -staging and production environments. The stack comprises: - -- **Prometheus** for metrics (scraping the collector's Prometheus exporter) -- **Tempo** for traces (OTLP ingest via mTLS) -- **Loki** for logs (HTTP ingest with tenant isolation) - -## Files - -| Path | Description | -| ---- | ----------- | -| `prometheus.yaml` | Scrape configuration for the collector (mTLS + bearer token placeholder). | -| `tempo.yaml` | Tempo configuration with multitenancy enabled and local storage paths. | -| `loki.yaml` | Loki configuration enabling per-tenant overrides and boltdb-shipper storage. | -| `tenants/tempo-overrides.yaml` | Example tenant overrides for Tempo (retention, limits). | -| `tenants/loki-overrides.yaml` | Example tenant overrides for Loki (rate limits, retention). | -| `auth/` | Placeholder directory for Prometheus bearer token files (e.g., `token`). | - -These configurations are referenced by the Docker Compose overlay -(`deploy/compose/docker-compose.telemetry-storage.yaml`) and the staging rollout documented in -`docs/ops/telemetry-storage.md`. Adjust paths, credentials, and overrides before running in -connected environments. Place the Prometheus bearer token in `auth/token` when using the -Compose overlay (the directory contains a `.gitkeep` placeholder and is gitignored by default). - -## Security - -- Both Tempo and Loki require mutual TLS. -- Prometheus uses mTLS plus a bearer token that should be minted by Authority. -- Update the overrides files to enforce per-tenant retention/ingestion limits. - -For comprehensive deployment steps see `docs/ops/telemetry-storage.md`. +# Telemetry Storage Stack + +Configuration snippets for the default StellaOps observability backends used in +staging and production environments. The stack comprises: + +- **Prometheus** for metrics (scraping the collector's Prometheus exporter) +- **Tempo** for traces (OTLP ingest via mTLS) +- **Loki** for logs (HTTP ingest with tenant isolation) + +## Files + +| Path | Description | +| ---- | ----------- | +| `prometheus.yaml` | Scrape configuration for the collector (mTLS + bearer token placeholder). | +| `tempo.yaml` | Tempo configuration with multitenancy enabled and local storage paths. | +| `loki.yaml` | Loki configuration enabling per-tenant overrides and boltdb-shipper storage. | +| `tenants/tempo-overrides.yaml` | Example tenant overrides for Tempo (retention, limits). | +| `tenants/loki-overrides.yaml` | Example tenant overrides for Loki (rate limits, retention). | +| `auth/` | Placeholder directory for Prometheus bearer token files (e.g., `token`). | + +These configurations are referenced by the Docker Compose overlay +(`deploy/compose/docker-compose.telemetry-storage.yaml`) and the staging rollout documented in +`docs/ops/telemetry-storage.md`. Adjust paths, credentials, and overrides before running in +connected environments. Place the Prometheus bearer token in `auth/token` when using the +Compose overlay (the directory contains a `.gitkeep` placeholder and is gitignored by default). + +## Security + +- Both Tempo and Loki require mutual TLS. +- Prometheus uses mTLS plus a bearer token that should be minted by Authority. +- Update the overrides files to enforce per-tenant retention/ingestion limits. + +For comprehensive deployment steps see `docs/ops/telemetry-storage.md`. diff --git a/deploy/telemetry/storage/loki.yaml b/deploy/telemetry/storage/loki.yaml index 101b4df3..3a9917ff 100644 --- a/deploy/telemetry/storage/loki.yaml +++ b/deploy/telemetry/storage/loki.yaml @@ -1,48 +1,48 @@ -auth_enabled: true - -server: - http_listen_port: 3100 - log_level: info - -common: - ring: - instance_addr: 127.0.0.1 - kvstore: - store: inmemory - replication_factor: 1 - path_prefix: /var/loki - -schema_config: - configs: - - from: 2024-01-01 - store: boltdb-shipper - object_store: filesystem - schema: v13 - index: - prefix: loki_index_ - period: 24h - -storage_config: - filesystem: - directory: /var/loki/chunks - boltdb_shipper: - active_index_directory: /var/loki/index - cache_location: /var/loki/index_cache - shared_store: filesystem - -ruler: - storage: - type: local - local: - directory: /var/loki/rules - rule_path: /tmp/loki-rules - enable_api: true - -limits_config: - enforce_metric_name: false - reject_old_samples: true - reject_old_samples_max_age: 168h - max_entries_limit_per_query: 5000 - ingestion_rate_mb: 10 - ingestion_burst_size_mb: 20 - per_tenant_override_config: /etc/telemetry/tenants/loki-overrides.yaml +auth_enabled: true + +server: + http_listen_port: 3100 + log_level: info + +common: + ring: + instance_addr: 127.0.0.1 + kvstore: + store: inmemory + replication_factor: 1 + path_prefix: /var/loki + +schema_config: + configs: + - from: 2024-01-01 + store: boltdb-shipper + object_store: filesystem + schema: v13 + index: + prefix: loki_index_ + period: 24h + +storage_config: + filesystem: + directory: /var/loki/chunks + boltdb_shipper: + active_index_directory: /var/loki/index + cache_location: /var/loki/index_cache + shared_store: filesystem + +ruler: + storage: + type: local + local: + directory: /var/loki/rules + rule_path: /tmp/loki-rules + enable_api: true + +limits_config: + enforce_metric_name: false + reject_old_samples: true + reject_old_samples_max_age: 168h + max_entries_limit_per_query: 5000 + ingestion_rate_mb: 10 + ingestion_burst_size_mb: 20 + per_tenant_override_config: /etc/telemetry/tenants/loki-overrides.yaml diff --git a/deploy/telemetry/storage/prometheus.yaml b/deploy/telemetry/storage/prometheus.yaml index e1dcfe4c..c64b5cf4 100644 --- a/deploy/telemetry/storage/prometheus.yaml +++ b/deploy/telemetry/storage/prometheus.yaml @@ -1,19 +1,19 @@ -global: - scrape_interval: 15s - evaluation_interval: 30s - -scrape_configs: - - job_name: "stellaops-otel-collector" - scheme: https - metrics_path: / - tls_config: - ca_file: ${PROMETHEUS_TLS_CA_FILE:-/etc/telemetry/tls/ca.crt} - cert_file: ${PROMETHEUS_TLS_CERT_FILE:-/etc/telemetry/tls/client.crt} - key_file: ${PROMETHEUS_TLS_KEY_FILE:-/etc/telemetry/tls/client.key} - insecure_skip_verify: false - authorization: - type: Bearer - credentials_file: ${PROMETHEUS_BEARER_TOKEN_FILE:-/etc/telemetry/auth/token} - static_configs: - - targets: - - ${PROMETHEUS_COLLECTOR_TARGET:-stellaops-otel-collector:9464} +global: + scrape_interval: 15s + evaluation_interval: 30s + +scrape_configs: + - job_name: "stellaops-otel-collector" + scheme: https + metrics_path: / + tls_config: + ca_file: ${PROMETHEUS_TLS_CA_FILE:-/etc/telemetry/tls/ca.crt} + cert_file: ${PROMETHEUS_TLS_CERT_FILE:-/etc/telemetry/tls/client.crt} + key_file: ${PROMETHEUS_TLS_KEY_FILE:-/etc/telemetry/tls/client.key} + insecure_skip_verify: false + authorization: + type: Bearer + credentials_file: ${PROMETHEUS_BEARER_TOKEN_FILE:-/etc/telemetry/auth/token} + static_configs: + - targets: + - ${PROMETHEUS_COLLECTOR_TARGET:-stellaops-otel-collector:9464} diff --git a/deploy/telemetry/storage/tempo.yaml b/deploy/telemetry/storage/tempo.yaml index 976e517b..1811eef6 100644 --- a/deploy/telemetry/storage/tempo.yaml +++ b/deploy/telemetry/storage/tempo.yaml @@ -1,56 +1,56 @@ -multitenancy_enabled: true -usage_report: - reporting_enabled: false - -server: - http_listen_port: 3200 - log_level: info - -distributor: - receivers: - otlp: - protocols: - grpc: - tls: - cert_file: ${TEMPO_TLS_CERT_FILE:-/etc/telemetry/tls/server.crt} - key_file: ${TEMPO_TLS_KEY_FILE:-/etc/telemetry/tls/server.key} - client_ca_file: ${TEMPO_TLS_CA_FILE:-/etc/telemetry/tls/ca.crt} - require_client_cert: true - http: - tls: - cert_file: ${TEMPO_TLS_CERT_FILE:-/etc/telemetry/tls/server.crt} - key_file: ${TEMPO_TLS_KEY_FILE:-/etc/telemetry/tls/server.key} - client_ca_file: ${TEMPO_TLS_CA_FILE:-/etc/telemetry/tls/ca.crt} - require_client_cert: true - -ingester: - lifecycler: - ring: - instance_availability_zone: ${TEMPO_ZONE:-zone-a} - trace_idle_period: 10s - max_block_bytes: 1_048_576 - -compactor: - compaction: - block_retention: 168h - -metrics_generator: - registry: - external_labels: - cluster: stellaops - -storage: - trace: - backend: local - local: - path: /var/tempo/traces - wal: - path: /var/tempo/wal - metrics: - backend: prometheus - -overrides: - defaults: - ingestion_rate_limit_bytes: 1048576 - max_traces_per_user: 200000 - per_tenant_override_config: /etc/telemetry/tenants/tempo-overrides.yaml +multitenancy_enabled: true +usage_report: + reporting_enabled: false + +server: + http_listen_port: 3200 + log_level: info + +distributor: + receivers: + otlp: + protocols: + grpc: + tls: + cert_file: ${TEMPO_TLS_CERT_FILE:-/etc/telemetry/tls/server.crt} + key_file: ${TEMPO_TLS_KEY_FILE:-/etc/telemetry/tls/server.key} + client_ca_file: ${TEMPO_TLS_CA_FILE:-/etc/telemetry/tls/ca.crt} + require_client_cert: true + http: + tls: + cert_file: ${TEMPO_TLS_CERT_FILE:-/etc/telemetry/tls/server.crt} + key_file: ${TEMPO_TLS_KEY_FILE:-/etc/telemetry/tls/server.key} + client_ca_file: ${TEMPO_TLS_CA_FILE:-/etc/telemetry/tls/ca.crt} + require_client_cert: true + +ingester: + lifecycler: + ring: + instance_availability_zone: ${TEMPO_ZONE:-zone-a} + trace_idle_period: 10s + max_block_bytes: 1_048_576 + +compactor: + compaction: + block_retention: 168h + +metrics_generator: + registry: + external_labels: + cluster: stellaops + +storage: + trace: + backend: local + local: + path: /var/tempo/traces + wal: + path: /var/tempo/wal + metrics: + backend: prometheus + +overrides: + defaults: + ingestion_rate_limit_bytes: 1048576 + max_traces_per_user: 200000 + per_tenant_override_config: /etc/telemetry/tenants/tempo-overrides.yaml diff --git a/deploy/telemetry/storage/tenants/loki-overrides.yaml b/deploy/telemetry/storage/tenants/loki-overrides.yaml index b0680f31..df52c29a 100644 --- a/deploy/telemetry/storage/tenants/loki-overrides.yaml +++ b/deploy/telemetry/storage/tenants/loki-overrides.yaml @@ -1,19 +1,19 @@ -# Example Loki per-tenant overrides -# Adjust according to https://grafana.com/docs/loki/latest/configuration/#limits_config - -stellaops-dev: - ingestion_rate_mb: 10 - ingestion_burst_size_mb: 20 - max_global_streams_per_user: 5000 - retention_period: 168h - -stellaops-stage: - ingestion_rate_mb: 20 - ingestion_burst_size_mb: 40 - max_global_streams_per_user: 10000 - retention_period: 336h - -__default__: - ingestion_rate_mb: 5 - ingestion_burst_size_mb: 10 - retention_period: 72h +# Example Loki per-tenant overrides +# Adjust according to https://grafana.com/docs/loki/latest/configuration/#limits_config + +stellaops-dev: + ingestion_rate_mb: 10 + ingestion_burst_size_mb: 20 + max_global_streams_per_user: 5000 + retention_period: 168h + +stellaops-stage: + ingestion_rate_mb: 20 + ingestion_burst_size_mb: 40 + max_global_streams_per_user: 10000 + retention_period: 336h + +__default__: + ingestion_rate_mb: 5 + ingestion_burst_size_mb: 10 + retention_period: 72h diff --git a/deploy/telemetry/storage/tenants/tempo-overrides.yaml b/deploy/telemetry/storage/tenants/tempo-overrides.yaml index 26066897..20024629 100644 --- a/deploy/telemetry/storage/tenants/tempo-overrides.yaml +++ b/deploy/telemetry/storage/tenants/tempo-overrides.yaml @@ -1,16 +1,16 @@ -# Example Tempo per-tenant overrides -# Consult https://grafana.com/docs/tempo/latest/configuration/#limits-configuration -# before applying in production. - -stellaops-dev: - traces_per_second_limit: 100000 - max_bytes_per_trace: 10485760 - max_search_bytes_per_trace: 20971520 - -stellaops-stage: - traces_per_second_limit: 200000 - max_bytes_per_trace: 20971520 - -__default__: - traces_per_second_limit: 50000 - max_bytes_per_trace: 5242880 +# Example Tempo per-tenant overrides +# Consult https://grafana.com/docs/tempo/latest/configuration/#limits-configuration +# before applying in production. + +stellaops-dev: + traces_per_second_limit: 100000 + max_bytes_per_trace: 10485760 + max_search_bytes_per_trace: 20971520 + +stellaops-stage: + traces_per_second_limit: 200000 + max_bytes_per_trace: 20971520 + +__default__: + traces_per_second_limit: 50000 + max_bytes_per_trace: 5242880 diff --git a/deploy/tools/check-channel-alignment.py b/deploy/tools/check-channel-alignment.py index d92dd0e1..2463d662 100644 --- a/deploy/tools/check-channel-alignment.py +++ b/deploy/tools/check-channel-alignment.py @@ -1,130 +1,130 @@ -#!/usr/bin/env python3 -""" -Ensure deployment bundles reference the images defined in a release manifest. - -Usage: - ./deploy/tools/check-channel-alignment.py \ - --release deploy/releases/2025.10-edge.yaml \ - --target deploy/helm/stellaops/values-dev.yaml \ - --target deploy/compose/docker-compose.dev.yaml - -For every target file, the script scans `image:` declarations and verifies that -any image belonging to a repository listed in the release manifest matches the -exact digest or tag recorded there. Images outside of the manifest (for example, -supporting services such as `nats`) are ignored. -""" - -from __future__ import annotations - -import argparse -import pathlib -import re -import sys -from typing import Dict, Iterable, List, Optional, Set - -IMAGE_LINE = re.compile(r"^\s*image:\s*['\"]?(?P\S+)['\"]?\s*$") - - -def extract_images(path: pathlib.Path) -> List[str]: - images: List[str] = [] - for line in path.read_text(encoding="utf-8").splitlines(): - match = IMAGE_LINE.match(line) - if match: - images.append(match.group("image")) - return images - - -def image_repo(image: str) -> str: - if "@" in image: - return image.split("@", 1)[0] - # Split on the last colon to preserve registries with ports (e.g. localhost:5000) - if ":" in image: - prefix, tag = image.rsplit(":", 1) - if "/" in tag: - # handle digestive colon inside path (unlikely) - return image - return prefix - return image - - -def load_release_map(release_path: pathlib.Path) -> Dict[str, str]: - release_map: Dict[str, str] = {} - for image in extract_images(release_path): - repo = image_repo(image) - release_map[repo] = image - return release_map - - -def check_target( - target_path: pathlib.Path, - release_map: Dict[str, str], - ignore_repos: Set[str], -) -> List[str]: - errors: List[str] = [] - for image in extract_images(target_path): - repo = image_repo(image) - if repo in ignore_repos: - continue - if repo not in release_map: - continue - expected = release_map[repo] - if image != expected: - errors.append( - f"{target_path}: {image} does not match release value {expected}" - ) - return errors - - -def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace: - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--release", - required=True, - type=pathlib.Path, - help="Path to the release manifest (YAML)", - ) - parser.add_argument( - "--target", - action="append", - required=True, - type=pathlib.Path, - help="Deployment profile to validate against the release manifest", - ) - parser.add_argument( - "--ignore-repo", - action="append", - default=[], - help="Repository prefix to ignore (may be repeated)", - ) - return parser.parse_args(argv) - - -def main(argv: Optional[Iterable[str]] = None) -> int: - args = parse_args(argv) - - release_map = load_release_map(args.release) - ignore_repos = {repo.rstrip("/") for repo in args.ignore_repo} - - if not release_map: - print(f"error: no images found in release manifest {args.release}", file=sys.stderr) - return 2 - - total_errors: List[str] = [] - for target in args.target: - if not target.exists(): - total_errors.append(f"{target}: file not found") - continue - total_errors.extend(check_target(target, release_map, ignore_repos)) - - if total_errors: - print("✖ channel alignment check failed:", file=sys.stderr) - for err in total_errors: - print(f" - {err}", file=sys.stderr) - return 1 - - print("✓ deployment profiles reference release images for the inspected repositories.") - return 0 - - -if __name__ == "__main__": - raise SystemExit(main()) +#!/usr/bin/env python3 +""" +Ensure deployment bundles reference the images defined in a release manifest. + +Usage: + ./deploy/tools/check-channel-alignment.py \ + --release deploy/releases/2025.10-edge.yaml \ + --target deploy/helm/stellaops/values-dev.yaml \ + --target deploy/compose/docker-compose.dev.yaml + +For every target file, the script scans `image:` declarations and verifies that +any image belonging to a repository listed in the release manifest matches the +exact digest or tag recorded there. Images outside of the manifest (for example, +supporting services such as `nats`) are ignored. +""" + +from __future__ import annotations + +import argparse +import pathlib +import re +import sys +from typing import Dict, Iterable, List, Optional, Set + +IMAGE_LINE = re.compile(r"^\s*image:\s*['\"]?(?P\S+)['\"]?\s*$") + + +def extract_images(path: pathlib.Path) -> List[str]: + images: List[str] = [] + for line in path.read_text(encoding="utf-8").splitlines(): + match = IMAGE_LINE.match(line) + if match: + images.append(match.group("image")) + return images + + +def image_repo(image: str) -> str: + if "@" in image: + return image.split("@", 1)[0] + # Split on the last colon to preserve registries with ports (e.g. localhost:5000) + if ":" in image: + prefix, tag = image.rsplit(":", 1) + if "/" in tag: + # handle digestive colon inside path (unlikely) + return image + return prefix + return image + + +def load_release_map(release_path: pathlib.Path) -> Dict[str, str]: + release_map: Dict[str, str] = {} + for image in extract_images(release_path): + repo = image_repo(image) + release_map[repo] = image + return release_map + + +def check_target( + target_path: pathlib.Path, + release_map: Dict[str, str], + ignore_repos: Set[str], +) -> List[str]: + errors: List[str] = [] + for image in extract_images(target_path): + repo = image_repo(image) + if repo in ignore_repos: + continue + if repo not in release_map: + continue + expected = release_map[repo] + if image != expected: + errors.append( + f"{target_path}: {image} does not match release value {expected}" + ) + return errors + + +def parse_args(argv: Optional[Iterable[str]] = None) -> argparse.Namespace: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--release", + required=True, + type=pathlib.Path, + help="Path to the release manifest (YAML)", + ) + parser.add_argument( + "--target", + action="append", + required=True, + type=pathlib.Path, + help="Deployment profile to validate against the release manifest", + ) + parser.add_argument( + "--ignore-repo", + action="append", + default=[], + help="Repository prefix to ignore (may be repeated)", + ) + return parser.parse_args(argv) + + +def main(argv: Optional[Iterable[str]] = None) -> int: + args = parse_args(argv) + + release_map = load_release_map(args.release) + ignore_repos = {repo.rstrip("/") for repo in args.ignore_repo} + + if not release_map: + print(f"error: no images found in release manifest {args.release}", file=sys.stderr) + return 2 + + total_errors: List[str] = [] + for target in args.target: + if not target.exists(): + total_errors.append(f"{target}: file not found") + continue + total_errors.extend(check_target(target, release_map, ignore_repos)) + + if total_errors: + print("✖ channel alignment check failed:", file=sys.stderr) + for err in total_errors: + print(f" - {err}", file=sys.stderr) + return 1 + + print("✓ deployment profiles reference release images for the inspected repositories.") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/deploy/tools/validate-profiles.sh b/deploy/tools/validate-profiles.sh index 5680f0f5..371c4ad3 100644 --- a/deploy/tools/validate-profiles.sh +++ b/deploy/tools/validate-profiles.sh @@ -1,61 +1,61 @@ -#!/usr/bin/env bash -set -euo pipefail - -ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -COMPOSE_DIR="$ROOT_DIR/compose" -HELM_DIR="$ROOT_DIR/helm/stellaops" - -compose_profiles=( - "docker-compose.dev.yaml:env/dev.env.example" - "docker-compose.stage.yaml:env/stage.env.example" - "docker-compose.prod.yaml:env/prod.env.example" - "docker-compose.airgap.yaml:env/airgap.env.example" - "docker-compose.mirror.yaml:env/mirror.env.example" - "docker-compose.telemetry.yaml:" - "docker-compose.telemetry-storage.yaml:" -) - -docker_ready=false -if command -v docker >/dev/null 2>&1; then - if docker compose version >/dev/null 2>&1; then - docker_ready=true - else - echo "⚠️ docker CLI present but Compose plugin unavailable; skipping compose validation" >&2 - fi -else - echo "⚠️ docker CLI not found; skipping compose validation" >&2 -fi - -if [[ "$docker_ready" == "true" ]]; then - for entry in "${compose_profiles[@]}"; do - IFS=":" read -r compose_file env_file <<<"$entry" - printf '→ validating %s with %s\n' "$compose_file" "$env_file" - if [[ -n "$env_file" ]]; then - docker compose \ - --env-file "$COMPOSE_DIR/$env_file" \ - -f "$COMPOSE_DIR/$compose_file" config >/dev/null - else - docker compose -f "$COMPOSE_DIR/$compose_file" config >/dev/null - fi - done -fi - -helm_values=( - "$HELM_DIR/values-dev.yaml" - "$HELM_DIR/values-stage.yaml" - "$HELM_DIR/values-prod.yaml" - "$HELM_DIR/values-airgap.yaml" - "$HELM_DIR/values-mirror.yaml" -) - -if command -v helm >/dev/null 2>&1; then - for values in "${helm_values[@]}"; do - printf '→ linting Helm chart with %s\n' "$(basename "$values")" - helm lint "$HELM_DIR" -f "$values" - helm template test-release "$HELM_DIR" -f "$values" >/dev/null - done -else - echo "⚠️ helm CLI not found; skipping Helm lint/template" >&2 -fi - -printf 'Profiles validated (where tooling was available).\n' +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +COMPOSE_DIR="$ROOT_DIR/compose" +HELM_DIR="$ROOT_DIR/helm/stellaops" + +compose_profiles=( + "docker-compose.dev.yaml:env/dev.env.example" + "docker-compose.stage.yaml:env/stage.env.example" + "docker-compose.prod.yaml:env/prod.env.example" + "docker-compose.airgap.yaml:env/airgap.env.example" + "docker-compose.mirror.yaml:env/mirror.env.example" + "docker-compose.telemetry.yaml:" + "docker-compose.telemetry-storage.yaml:" +) + +docker_ready=false +if command -v docker >/dev/null 2>&1; then + if docker compose version >/dev/null 2>&1; then + docker_ready=true + else + echo "⚠️ docker CLI present but Compose plugin unavailable; skipping compose validation" >&2 + fi +else + echo "⚠️ docker CLI not found; skipping compose validation" >&2 +fi + +if [[ "$docker_ready" == "true" ]]; then + for entry in "${compose_profiles[@]}"; do + IFS=":" read -r compose_file env_file <<<"$entry" + printf '→ validating %s with %s\n' "$compose_file" "$env_file" + if [[ -n "$env_file" ]]; then + docker compose \ + --env-file "$COMPOSE_DIR/$env_file" \ + -f "$COMPOSE_DIR/$compose_file" config >/dev/null + else + docker compose -f "$COMPOSE_DIR/$compose_file" config >/dev/null + fi + done +fi + +helm_values=( + "$HELM_DIR/values-dev.yaml" + "$HELM_DIR/values-stage.yaml" + "$HELM_DIR/values-prod.yaml" + "$HELM_DIR/values-airgap.yaml" + "$HELM_DIR/values-mirror.yaml" +) + +if command -v helm >/dev/null 2>&1; then + for values in "${helm_values[@]}"; do + printf '→ linting Helm chart with %s\n' "$(basename "$values")" + helm lint "$HELM_DIR" -f "$values" + helm template test-release "$HELM_DIR" -f "$values" >/dev/null + done +else + echo "⚠️ helm CLI not found; skipping Helm lint/template" >&2 +fi + +printf 'Profiles validated (where tooling was available).\n' diff --git a/docs/09_API_CLI_REFERENCE.md b/docs/09_API_CLI_REFERENCE.md index c1d0822c..bc0efe46 100755 --- a/docs/09_API_CLI_REFERENCE.md +++ b/docs/09_API_CLI_REFERENCE.md @@ -1,933 +1,933 @@ -# API & CLI Reference - -*Purpose* – give operators and integrators a single, authoritative spec for REST/GRPC calls **and** first‑party CLI tools (`stella-cli`, `zastava`, `stella`). -Everything here is *source‑of‑truth* for generated Swagger/OpenAPI and the `--help` screens in the CLIs. - ---- - -## 0 Quick Glance - -| Area | Call / Flag | Notes | -| ------------------ | ------------------------------------------- | ------------------------------------------------------------------------------ | -| Scan entry | `POST /scan` | Accepts SBOM or image; sub‑5 s target | -| Delta check | `POST /layers/missing` | <20 ms reply; powers *delta SBOM* feature | -| Rate‑limit / quota | — | Headers **`X‑Stella‑Quota‑Remaining`**, **`X‑Stella‑Reset`** on every response | -| Policy I/O | `GET /policy/export`, `POST /policy/import` | YAML now; Rego coming | -| Policy lint | `POST /policy/validate` | Returns 200 OK if ruleset passes | -| Auth | `POST /connect/token` (OpenIddict) | Client‑credentials preferred | -| Health | `GET /healthz` | Simple liveness probe | -| Attestation * | `POST /attest` (TODO Q1‑2026) | SLSA provenance + Rekor log | -| CLI flags | `--sbom-type` `--delta` `--policy-file` | Added to `stella` | - -\* Marked **TODO** → delivered after sixth month (kept on Feature Matrix “To Do” list). - ---- - -## 1 Authentication - -Stella Ops uses **OAuth 2.0 / OIDC** (token endpoint mounted via OpenIddict). - -``` -POST /connect/token -Content‑Type: application/x-www-form-urlencoded - -grant_type=client_credentials& -client_id=ci‑bot& -client_secret=REDACTED& -scope=stella.api -``` - -Successful response: - -```json -{ - "access_token": "eyJraWQi...", - "token_type": "Bearer", - "expires_in": 3600 -} -``` - -> **Tip** – pass the token via `Authorization: Bearer ` on every call. - ---- - -## 2 REST API - -### 2.0 Obtain / Refresh Offline‑Token - -```text -POST /token/offline -Authorization: Bearer -``` - -| Body field | Required | Example | Notes | -|------------|----------|---------|-------| -| `expiresDays` | no | `30` | Max 90 days | - -```json -{ - "jwt": "eyJhbGciOiJSUzI1NiIsInR5cCI6...", - "expires": "2025‑08‑17T00:00:00Z" -} -``` - -Token is signed with the backend’s private key and already contains -`"maxScansPerDay": {{ quota_token }}`. - - -### 2.1 Scan – Upload SBOM **or** Image - -``` -POST /scan -``` - -| Param / Header | In | Required | Description | -| -------------------- | ------ | -------- | --------------------------------------------------------------------- | -| `X‑Stella‑Sbom‑Type` | header | no | `trivy-json-v2`, `spdx-json`, `cyclonedx-json`; omitted ➞ auto‑detect | -| `?threshold` | query | no | `low`, `medium`, `high`, `critical`; default **critical** | -| body | body | yes | *Either* SBOM JSON *or* Docker image tarball/upload URL | - -Every successful `/scan` response now includes: - -| Header | Example | -|--------|---------| -| `X‑Stella‑Quota‑Remaining` | `129` | -| `X‑Stella‑Reset` | `2025‑07‑18T23:59:59Z` | -| `X‑Stella‑Token‑Expires` | `2025‑08‑17T00:00:00Z` | - -**Response 200** (scan completed): - -```json -{ - "digest": "sha256:…", - "summary": { - "Critical": 0, - "High": 3, - "Medium": 12, - "Low": 41 - }, - "policyStatus": "pass", - "quota": { - "remaining": 131, - "reset": "2025-07-18T00:00:00Z" - } -} -``` - -**Response 202** – queued; polling URL in `Location` header. - ---- - -### 2.2 Delta SBOM – Layer Cache Check - -``` -POST /layers/missing -Content‑Type: application/json -Authorization: Bearer -``` - -```json -{ - "layers": [ - "sha256:d38b...", - "sha256:af45..." - ] -} -``` - -**Response 200** — <20 ms target: - -```json -{ - "missing": [ - "sha256:af45..." - ] -} -``` - -Client then generates SBOM **only** for the `missing` layers and re‑posts `/scan`. - ---- - -### 2.3 Policy Endpoints *(preview feature flag: `scanner.features.enablePolicyPreview`)* - -All policy APIs require **`scanner.reports`** scope (or anonymous access while auth is disabled). - -**Fetch schema** - -``` -GET /api/v1/policy/schema -Authorization: Bearer -Accept: application/schema+json -``` - -Returns the embedded `policy-schema@1` JSON schema used by the binder. - -**Run diagnostics** - -``` -POST /api/v1/policy/diagnostics -Content-Type: application/json -Authorization: Bearer -``` - -```json -{ - "policy": { - "format": "yaml", - "actor": "cli", - "description": "dev override", - "content": "version: \"1.0\"\nrules:\n - name: Quiet Dev\n environments: [dev]\n action:\n type: ignore\n justification: dev waiver\n" - } -} -``` - -**Response 200**: - -```json -{ - "success": false, - "version": "1.0", - "ruleCount": 1, - "errorCount": 0, - "warningCount": 1, - "generatedAt": "2025-10-19T03:25:14.112Z", - "issues": [ - { "code": "policy.rule.quiet.missing_vex", "message": "Quiet flag ignored: rule must specify requireVex justifications.", "severity": "Warning", "path": "$.rules[0]" } - ], - "recommendations": [ - "Review policy warnings and ensure intentional overrides are documented." - ] -} -``` - -`success` is `false` when blocking issues remain; recommendations aggregate YAML ignore rules, VEX include/exclude hints, and vendor precedence guidance. - -**Preview impact** - -``` -POST /api/v1/policy/preview -Authorization: Bearer -Content-Type: application/json -``` - -```json -{ - "imageDigest": "sha256:abc123", - "findings": [ - { "id": "finding-1", "severity": "Critical", "source": "NVD" } - ], - "policy": { - "format": "yaml", - "content": "version: \"1.0\"\nrules:\n - name: Block Critical\n severity: [Critical]\n action: block\n" - } -} -``` - -**Response 200**: - -```json -{ - "success": true, - "policyDigest": "9c5e...", - "revisionId": "preview", - "changed": 1, - "diffs": [ - { - "findingId": "finding-1", - "baseline": {"findingId": "finding-1", "status": "Pass"}, - "projected": { - "findingId": "finding-1", - "status": "Blocked", - "ruleName": "Block Critical", - "ruleAction": "Block", - "score": 5.0, - "configVersion": "1.0", - "inputs": {"severityWeight": 5.0} - }, - "changed": true - } - ], - "issues": [] -} -``` - -- Provide `policy` to preview staged changes; omit it to compare against the active snapshot. -- Baseline verdicts are optional; when omitted, the API synthesises pass baselines before computing diffs. -- Quieted verdicts include `quietedBy` and `quiet` flags; score inputs now surface reachability/vendor trust weights (`reachability.*`, `trustWeight.*`). - -**OpenAPI**: the full API document (including these endpoints) is exposed at `/openapi/v1.json` and can be fetched for tooling or contract regeneration. - -### 2.4 Scanner – Queue a Scan Job *(SP9 milestone)* - -``` -POST /api/v1/scans -Authorization: Bearer -Content-Type: application/json -``` - -```json -{ - "image": { - "reference": "registry.example.com/acme/app:1.2.3" - }, - "force": false, - "clientRequestId": "ci-build-1845", - "metadata": { - "pipeline": "github", - "trigger": "pull-request" - } -} -``` - -| Field | Required | Notes | -| ------------------- | -------- | ------------------------------------------------------------------------------------------------ | -| `image.reference` | no\* | Full repo/tag (`registry/repo:tag`). Provide **either** `reference` or `digest` (sha256:…). | -| `image.digest` | no\* | OCI digest (e.g. `sha256:…`). | -| `force` | no | `true` forces a re-run even if an identical scan (`scanId`) already exists. Default **false**. | -| `clientRequestId` | no | Free-form string surfaced in audit logs. | -| `metadata` | no | Optional string map stored with the job and surfaced in observability feeds. | - -\* At least one of `image.reference` or `image.digest` must be supplied. - -**Response 202** – job accepted (idempotent): - -```http -HTTP/1.1 202 Accepted -Location: /api/v1/scans/2f6c17f9b3f548e2a28b9c412f4d63f8 -``` - -```json -{ - "scanId": "2f6c17f9b3f548e2a28b9c412f4d63f8", - "status": "Pending", - "location": "/api/v1/scans/2f6c17f9b3f548e2a28b9c412f4d63f8", - "created": true -} -``` - -- `scanId` is deterministic – resubmitting an identical payload returns the same identifier with `"created": false`. -- API is cancellation-aware; aborting the HTTP request cancels the submission attempt. -- Required scope: **`scanner.scans.enqueue`**. - -**Response 400** – validation problem (`Content-Type: application/problem+json`) when both `image.reference` and `image.digest` are blank. - -### 2.5 Scanner – Fetch Scan Status - -``` -GET /api/v1/scans/{scanId} -Authorization: Bearer -Accept: application/json -``` - -**Response 200**: - -```json -{ - "scanId": "2f6c17f9b3f548e2a28b9c412f4d63f8", - "status": "Pending", - "image": { - "reference": "registry.example.com/acme/app:1.2.3", - "digest": null - }, - "createdAt": "2025-10-18T20:15:12.482Z", - "updatedAt": "2025-10-18T20:15:12.482Z", - "failureReason": null -} -``` - -Statuses: `Pending`, `Running`, `Succeeded`, `Failed`, `Cancelled`. - -### 2.6 Scanner – Stream Progress (SSE / JSONL) - -``` -GET /api/v1/scans/{scanId}/events?format=sse|jsonl -Authorization: Bearer -Accept: text/event-stream -``` - -When `format` is omitted the endpoint emits **Server-Sent Events** (SSE). Specify `format=jsonl` to receive newline-delimited JSON (`application/x-ndjson`). Response headers include `Cache-Control: no-store` and `X-Accel-Buffering: no` so intermediaries avoid buffering the stream. - -**SSE frame** (default): - -``` -id: 1 -event: pending -data: {"scanId":"2f6c17f9b3f548e2a28b9c412f4d63f8","sequence":1,"state":"Pending","message":"queued","timestamp":"2025-10-19T03:12:45.118Z","correlationId":"2f6c17f9b3f548e2a28b9c412f4d63f8:0001","data":{"force":false,"meta.pipeline":"github"}} -``` - -**JSONL frame** (`format=jsonl`): - -```json -{"scanId":"2f6c17f9b3f548e2a28b9c412f4d63f8","sequence":1,"state":"Pending","message":"queued","timestamp":"2025-10-19T03:12:45.118Z","correlationId":"2f6c17f9b3f548e2a28b9c412f4d63f8:0001","data":{"force":false,"meta.pipeline":"github"}} -``` - -- `sequence` is monotonic starting at `1`. -- `correlationId` is deterministic (`{scanId}:{sequence:0000}`) unless a custom identifier is supplied by the publisher. -- `timestamp` is ISO‑8601 UTC with millisecond precision, ensuring deterministic ordering for consumers. -- The stream completes when the client disconnects or the coordinator stops publishing events. - -### 2.7 Scanner – Assemble Report (Signed Envelope) - -``` -POST /api/v1/reports -Authorization: Bearer -Content-Type: application/json -``` - -Request body mirrors policy preview inputs (image digest plus findings). The service evaluates the active policy snapshot, assembles a verdict, and signs the canonical report payload. - -**Response 200**: - -```json -{ - "report": { - "reportId": "report-9f8cde21aab54321", - "imageDigest": "sha256:7dbe0c9a5d4f1c8184007e9d94dbe55928f8a2db5ab9c1c2d4a2f7bbcdfe1234", - "generatedAt": "2025-10-23T15:32:22Z", - "verdict": "blocked", - "policy": { - "revisionId": "rev-42", - "digest": "8a0f72f8dc5c51c46991db3bba34e9b3c0c8e944a7a6d0a9c29a9aa6b8439876" - }, - "summary": { "total": 2, "blocked": 1, "warned": 1, "ignored": 0, "quieted": 0 }, - "verdicts": [ - { - "findingId": "library:pkg/openssl@1.1.1w", - "status": "Blocked", - "ruleName": "Block vendor unknowns", - "ruleAction": "block", - "notes": "Unknown vendor telemetry — medium confidence band.", - "score": 19.5, - "configVersion": "1.0", - "inputs": { - "severityWeight": 50, - "trustWeight": 0.65, - "reachabilityWeight": 0.6, - "baseScore": 19.5, - "trustWeight.vendor": 0.65, - "reachability.unknown": 0.6, - "unknownConfidence": 0.55, - "unknownAgeDays": 5 - }, - "quietedBy": null, - "quiet": false, - "unknownConfidence": 0.55, - "confidenceBand": "medium", - "unknownAgeDays": 5, - "sourceTrust": "vendor", - "reachability": "unknown" - }, - { - "findingId": "library:pkg/zlib@1.3.1", - "status": "Warned", - "ruleName": "Runtime mitigation required", - "ruleAction": "warn", - "notes": "Runtime reachable unknown — mitigation window required.", - "score": 18.75, - "configVersion": "1.0", - "inputs": { - "severityWeight": 75, - "trustWeight": 1, - "reachabilityWeight": 0.45, - "baseScore": 33.75, - "reachability.runtime": 0.45, - "warnPenalty": 15, - "unknownConfidence": 0.35, - "unknownAgeDays": 13 - }, - "quietedBy": null, - "quiet": false, - "unknownConfidence": 0.35, - "confidenceBand": "medium", - "unknownAgeDays": 13, - "sourceTrust": "NVD", - "reachability": "runtime" - } - ], - "issues": [] - }, - "dsse": { - "payloadType": "application/vnd.stellaops.report+json", - "payload": "eyJyZXBvcnQiOnsicmVwb3J0SWQiOiJyZXBvcnQtOWY4Y2RlMjFhYWI1NDMyMSJ9fQ==", - "signatures": [ - { - "keyId": "scanner-report-signing", - "algorithm": "hs256", - "signature": "MEQCIGHscnJ2bm9wYXlsb2FkZXIAIjANBgkqhkiG9w0BAQsFAAOCAQEASmFja3Nvbk1ldGE=" - } - ] - } -} -``` - -- The `report` object omits null fields and is deterministic (ISO timestamps, sorted keys) while surfacing `unknownConfidence`, `confidenceBand`, and `unknownAgeDays` for auditability. -- `dsse` follows the DSSE (Dead Simple Signing Envelope) shape; `payload` is the canonical UTF-8 JSON and `signatures[0].signature` is the base64 HMAC/Ed25519 value depending on configuration. -- Full offline samples live at `samples/policy/policy-report-unknown.json` (request + response) and `samples/api/reports/report-sample.dsse.json` (envelope fixture) for tooling tests or signature verification. - -**Response 404** – `application/problem+json` payload with type `https://stellaops.org/problems/not-found` when the scan identifier is unknown. - -> **Tip** – poll `Location` from the submission call until `status` transitions away from `Pending`/`Running`. - -```yaml -# Example import payload (YAML) -version: "1.0" -rules: - - name: Ignore Low dev - severity: [Low, None] - environments: [dev, staging] - action: ignore -``` - -Validation errors come back as: - -```json -{ - "errors": [ - { - "path": "$.rules[0].severity", - "msg": "Invalid level 'None'" - } - ] -} -``` - -```json -# Preview response excerpt -{ - "success": true, - "policyDigest": "9c5e...", - "revisionId": "rev-12", - "changed": 1, - "diffs": [ - { - "baseline": {"findingId": "finding-1", "status": "pass"}, - "projected": {"findingId": "finding-1", "status": "blocked", "ruleName": "Block Critical"}, - "changed": true - } - ] -} -``` - ---- - -### 2.4 Attestation (Planned – Q1‑2026) - -``` -POST /attest -``` - -| Param | Purpose | -| ----------- | ------------------------------------- | -| body (JSON) | SLSA v1.0 provenance doc | -| | Signed + stored in local Rekor mirror | - -Returns `202 Accepted` and `Location: /attest/{id}` for async verify. - ---- - -### 2.8 Runtime – Ingest Observer Events *(SCANNER-RUNTIME-12-301)* - -``` -POST /api/v1/runtime/events -Authorization: Bearer -Content-Type: application/json -``` - -| Requirement | Details | -|-------------|---------| -| Auth scope | `scanner.runtime.ingest` | -| Batch size | ≤ **256** envelopes (`scanner.runtime.maxBatchSize`, configurable) | -| Payload cap | ≤ **1 MiB** serialized JSON (`scanner.runtime.maxPayloadBytes`) | -| Rate limits | Per-tenant and per-node token buckets (default 200 events/s tenant, 50 events/s node, burst 200) – excess returns **429** with `Retry-After`. | -| TTL | Runtime events retained **45 days** by default (`scanner.runtime.eventTtlDays`). | - -**Request body** - -```json -{ - "batchId": "node-a-2025-10-20T15:03:12Z", - "events": [ - { - "schemaVersion": "zastava.runtime.event@v1", - "event": { - "eventId": "evt-2f9c02b8", - "when": "2025-10-20T15:03:08Z", - "kind": "ContainerStart", - "tenant": "tenant-alpha", - "node": "cluster-a/node-01", - "runtime": { "engine": "containerd", "version": "1.7.19" }, - "workload": { - "platform": "kubernetes", - "namespace": "payments", - "pod": "api-7c9fbbd8b7-ktd84", - "container": "api", - "containerId": "containerd://bead5...", - "imageRef": "ghcr.io/acme/api@sha256:deadbeef" - }, - "process": { "pid": 12345, "entrypoint": ["/start.sh", "--serve"], "buildId": "5f0c7c3c..." }, - "loadedLibs": [ - { "path": "/lib/x86_64-linux-gnu/libssl.so.3", "inode": 123456, "sha256": "abc123..." } - ], - "posture": { "imageSigned": true, "sbomReferrer": "present" }, - "delta": { "baselineImageDigest": "sha256:deadbeef" }, - "evidence": [ { "signal": "proc.maps", "value": "libssl.so.3@0x7f..." } ], - "annotations": { "observerVersion": "1.0.0" } - } - } - ] -} -``` - -**Responses** - -| Code | Body | Notes | -|------|------|-------| -| `202 Accepted` | `{ "accepted": 128, "duplicates": 2 }` | Batch persisted; duplicates are ignored via unique `eventId`. | -| `400 Bad Request` | Problem+JSON | Validation failures – empty batch, duplicate IDs, unsupported schema version, payload too large. | -| `429 Too Many Requests` | Problem+JSON | Per-tenant/node rate limit exceeded; `Retry-After` header emitted in seconds. | - -Persisted documents capture the canonical envelope (`payload` field), tenant/node metadata, and set an automatic TTL on `expiresAt`. Observers should retry rejected batches with exponential backoff honouring the provided `Retry-After` hint. - ---- - -## 3 StellaOps CLI (`stellaops-cli`) - -The new CLI is built on **System.CommandLine 2.0.0‑beta5** and mirrors the Concelier backend REST API. -Configuration follows the same precedence chain everywhere: - -1. Environment variables (e.g. `API_KEY`, `STELLAOPS_BACKEND_URL`, `StellaOps:ApiKey`) -2. `appsettings.json` → `appsettings.local.json` -3. `appsettings.yaml` → `appsettings.local.yaml` -4. Defaults (`ApiKey = ""`, `BackendUrl = ""`, cache folders under the current working directory) - -**Authority auth client resilience settings** - -| Setting | Environment variable | Default | Purpose | -|---------|----------------------|---------|---------| -| `StellaOps:Authority:Resilience:EnableRetries` | `STELLAOPS_AUTHORITY_ENABLE_RETRIES` | `true` | Toggle Polly wait-and-retry handlers for discovery/token calls | -| `StellaOps:Authority:Resilience:RetryDelays` | `STELLAOPS_AUTHORITY_RETRY_DELAYS` | `1s,2s,5s` | Comma/space-separated backoff sequence (HH:MM:SS) | -| `StellaOps:Authority:Resilience:AllowOfflineCacheFallback` | `STELLAOPS_AUTHORITY_ALLOW_OFFLINE_CACHE_FALLBACK` | `true` | Reuse cached discovery/JWKS metadata when Authority is temporarily unreachable | -| `StellaOps:Authority:Resilience:OfflineCacheTolerance` | `STELLAOPS_AUTHORITY_OFFLINE_CACHE_TOLERANCE` | `00:10:00` | Additional tolerance window added to the discovery/JWKS cache lifetime | - -See `docs/dev/32_AUTH_CLIENT_GUIDE.md` for recommended profiles (online vs. air-gapped) and testing guidance. - -| Command | Purpose | Key Flags / Arguments | Notes | -|---------|---------|-----------------------|-------| -| `stellaops-cli scanner download` | Fetch and install scanner container | `--channel ` (default `stable`)
`--output `
`--overwrite`
`--no-install` | Saves artefact under `ScannerCacheDirectory`, verifies digest/signature, and executes `docker load` unless `--no-install` is supplied. | -| `stellaops-cli scan run` | Execute scanner container against a directory (auto-upload) | `--target ` (required)
`--runner ` (default from config)
`--entry `
`[scanner-args...]` | Runs the scanner, writes results into `ResultsDirectory`, emits a structured `scan-run-*.json` metadata file, and automatically uploads the artefact when the exit code is `0`. | -| `stellaops-cli scan upload` | Re-upload existing scan artefact | `--file ` | Useful for retries when automatic upload fails or when operating offline. | -| `stellaops-cli db fetch` | Trigger connector jobs | `--source ` (e.g. `redhat`, `osv`)
`--stage ` (default `fetch`)
`--mode ` | Translates to `POST /jobs/source:{source}:{stage}` with `trigger=cli` | -| `stellaops-cli db merge` | Run canonical merge reconcile | — | Calls `POST /jobs/merge:reconcile`; exit code `0` on acceptance, `1` on failures/conflicts | -| `stellaops-cli db export` | Kick JSON / Trivy exports | `--format ` (default `json`)
`--delta`
`--publish-full/--publish-delta`
`--bundle-full/--bundle-delta` | Sets `{ delta = true }` parameter when requested and can override ORAS/bundle toggles per run | -| `stellaops-cli auth ` | Manage cached tokens for StellaOps Authority | `auth login --force` (ignore cache)
`auth status`
`auth whoami` | Uses `StellaOps.Auth.Client`; honours `StellaOps:Authority:*` configuration, stores tokens under `~/.stellaops/tokens` by default, and `whoami` prints subject/scope/expiry | -| `stellaops-cli auth revoke export` | Export the Authority revocation bundle | `--output ` (defaults to CWD) | Writes `revocation-bundle.json`, `.json.jws`, and `.json.sha256`; verifies the digest locally and includes key metadata in the log summary. | -| `stellaops-cli auth revoke verify` | Validate a revocation bundle offline | `--bundle ` `--signature ` `--key `
`--verbose` | Verifies detached JWS signatures, reports the computed SHA-256, and can fall back to cached JWKS when `--key` is omitted. | -| `stellaops-cli offline kit pull` | Download the latest offline kit bundle and manifest | `--bundle-id ` (optional)
`--destination `
`--overwrite`
`--no-resume` | Streams the bundle + manifest from the configured mirror/backend, resumes interrupted downloads, verifies SHA-256, and writes signatures plus a `.metadata.json` manifest alongside the artefacts. | -| `stellaops-cli offline kit import` | Upload an offline kit bundle to the backend | `` (argument)
`--manifest `
`--bundle-signature `
`--manifest-signature ` | Validates digests when metadata is present, then posts multipart payloads to `POST /api/offline-kit/import`; logs the submitted import ID/status for air-gapped rollout tracking. | -| `stellaops-cli offline kit status` | Display imported offline kit details | `--json` | Shows bundle id/kind, captured/imported timestamps, digests, and component versions; `--json` emits machine-readable output for scripting. | -| `stellaops-cli sources ingest --dry-run` | Dry-run guard validation for individual payloads | `--source `
`--input `
`--tenant `
`--format table\|json`
`--output ` | Normalises gzip/base64 payloads, invokes `api/aoc/ingest/dry-run`, and maps guard failures to deterministic `ERR_AOC_00x` exit codes. | -| `stellaops-cli aoc verify` | Replay AOC guardrails over stored documents | `--since `
`--limit `
`--sources `
`--codes `
`--format table\|json`
`--export ` | Summarises checked counts/violations, supports JSON evidence exports, and returns `0`, `11…17`, `18`, `70`, or `71` depending on guard outcomes. | -| `stellaops-cli config show` | Display resolved configuration | — | Masks secret values; helpful for air‑gapped installs | -| `stellaops-cli runtime policy test` | Ask Scanner.WebService for runtime verdicts (Webhook parity) | `--image/-i ` (repeatable, comma/space lists supported)
`--file/-f `
`--namespace/--ns `
`--label/-l key=value` (repeatable)
`--json` | Posts to `POST /api/v1/scanner/policy/runtime`, deduplicates image digests, and prints TTL/policy revision plus per-image columns for signed state, SBOM referrers, quieted-by metadata, confidence, Rekor attestation (uuid + verified flag), and recently observed build IDs (shortened for readability). Accepts newline/whitespace-delimited stdin when piped; `--json` emits the raw response without additional logging. | - -#### Example: Pivot from runtime verdicts to debug symbols - -```bash -$ stellaops-cli runtime policy test \ - --image ghcr.io/acme/payments@sha256:4f7d55f6... \ - --namespace payments - -Image Digest Signed SBOM Build IDs TTL -ghcr.io/acme/payments@sha256:4f7d55f6... yes present 5f0c7c3c..., 1122aabbccddeeff... 04:59:55 -``` - -1. Copy one of the hashes (e.g. `5f0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789`) and locate the bundled debug artefact: - ```bash - ls offline-kit/debug/.build-id/5f/0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789.debug - ``` -2. Confirm the running binary advertises the same GNU build-id: - ```bash - readelf -n /proc/$(pgrep -f payments-api | head -n1)/exe | grep -i 'Build ID' - ``` -3. If you operate a debuginfod mirror backed by the Offline Kit tree, resolve symbols with: - ```bash - debuginfod-find debuginfo 5f0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789 >/tmp/payments-api.debug - ``` - -See [Offline Kit step 0](24_OFFLINE_KIT.md#0-prepare-the-debug-store) for instructions on mirroring the debug store before packaging. - -`POST /api/v1/scanner/policy/runtime` responds with one entry per digest. Each result now includes: - -- `policyVerdict` (`pass|warn|fail|error`), `signed`, and `hasSbomReferrers` parity with the webhook contract. -- `confidence` (0-1 double) derived from canonical `PolicyPreviewService` evaluation and `quieted`/`quietedBy` flags for muted findings. -- `rekor` block carrying `uuid`, `url`, and the attestor-backed `verified` boolean when Rekor inclusion proofs have been confirmed. -- `metadata` (stringified JSON) capturing runtime heuristics, policy issues, evaluated findings, and timestamps for downstream audit. -- `buildIds` (array) lists up to three distinct GNU build-id hashes recently observed for that digest so debuggers can derive `/usr/lib/debug/.build-id//.debug` paths for symbol stores. - -When running on an interactive terminal without explicit override flags, the CLI uses Spectre.Console prompts to let you choose per-run ORAS/offline bundle behaviour. - -Runtime verdict output reflects the SCANNER-RUNTIME-12-302 contract sign-off (quieted provenance, confidence band, attestation verification). CLI-RUNTIME-13-008 now mirrors those fields in both table and `--json` formats. - -**Startup diagnostics** - -- `stellaops-cli` now loads Authority plug-in manifests during startup (respecting `Authority:Plugins:*`) and surfaces analyzer warnings when a plug-in weakens the baseline password policy (minimum length **12** and all character classes required). -- Follow the log entry’s config path and raise `passwordPolicy.minimumLength` to at least 12 while keeping `requireUppercase`, `requireLowercase`, `requireDigit`, and `requireSymbol` set to `true` to clear the warning; weakened overrides are treated as actionable security deviations. - -**Logging & exit codes** - -- Structured logging via `Microsoft.Extensions.Logging` with single-line console output (timestamps in UTC). -- `--verbose / -v` raises log level to `Debug`. -- Command exit codes bubble up: backend conflict → `1`, cancelled via `CTRL+C` → `130`, scanner exit codes propagate as-is. - -**Artifact validation** - -- Downloads are verified against the `X-StellaOps-Digest` header (SHA-256). When `StellaOps:ScannerSignaturePublicKeyPath` points to a PEM-encoded RSA key, the optional `X-StellaOps-Signature` header is validated as well. -- Metadata for each bundle is written alongside the artefact (`*.metadata.json`) with digest, signature, source URL, and timestamps. -- Retry behaviour is controlled via `StellaOps:ScannerDownloadAttempts` (default **3** with exponential backoff). -- Successful `scan run` executions create timestamped JSON artefacts inside `ResultsDirectory` plus a `scan-run-*.json` metadata envelope documenting the runner, arguments, timing, and stdout/stderr. The artefact is posted back to Concelier automatically. - -#### Trivy DB export metadata (`metadata.json`) - -`stellaops-cli db export --format trivy-db` (and the backing `POST /jobs/export:trivy-db`) always emits a `metadata.json` document in the OCI layout root. Operators consuming the bundle or delta updates should inspect the following fields: - -| Field | Type | Purpose | -| ----- | ---- | ------- | -| `mode` | `full` \| `delta` | Indicates whether the current run rebuilt the entire database (`full`) or only the changed files (`delta`). | -| `baseExportId` | string? | Export ID of the last full baseline that the delta builds upon. Only present for `mode = delta`. | -| `baseManifestDigest` | string? | SHA-256 digest of the manifest belonging to the baseline OCI layout. | -| `resetBaseline` | boolean | `true` when the exporter rotated the baseline (e.g., repo change, delta chain reset). Treat as a full refresh. | -| `treeDigest` | string | Canonical SHA-256 digest of the JSON tree used to build the database. | -| `treeBytes` | number | Total bytes across exported JSON files. | -| `advisoryCount` | number | Count of advisories included in the export. | -| `exporterVersion` | string | Version stamp of `StellaOps.Concelier.Exporter.TrivyDb`. | -| `builder` | object? | Raw metadata emitted by `trivy-db build` (version, update cadence, etc.). | -| `delta.changedFiles[]` | array | Present when `mode = delta`. Each entry lists `{ "path": "", "length": , "digest": "sha256:..." }`. | -| `delta.removedPaths[]` | array | Paths that existed in the previous manifest but were removed in the new run. | - -When the planner opts for a delta run, the exporter copies unmodified blobs from the baseline layout identified by `baseManifestDigest`. Consumers that cache OCI blobs only need to fetch the `changedFiles` and the new manifest/metadata unless `resetBaseline` is true. -When pushing to ORAS, set `concelier:exporters:trivyDb:oras:publishFull` / `publishDelta` to control whether full or delta runs are copied to the registry. Offline bundles follow the analogous `includeFull` / `includeDelta` switches under `offlineBundle`. - -Example configuration (`appsettings.yaml`): - -```yaml -concelier: - exporters: - trivyDb: - oras: - enabled: true - publishFull: true - publishDelta: false - offlineBundle: - enabled: true - includeFull: true - includeDelta: false -``` - - -**Authentication** - -- API key is sent as `Authorization: Bearer ` automatically when configured. -- Anonymous operation is permitted only when Concelier runs with - `authority.allowAnonymousFallback: true`. This flag is temporary—plan to disable - it before **2025-12-31 UTC** so bearer tokens become mandatory. - -Authority-backed auth workflow: -1. Configure Authority settings via config or env vars (see sample below). Minimum fields: `Url`, `ClientId`, and either `ClientSecret` (client credentials) or `Username`/`Password` (password grant). -2. Run `stellaops-cli auth login` to acquire and cache a token. Use `--force` if you need to ignore an existing cache entry. -3. Execute CLI commands as normal—the backend client injects the cached bearer token automatically and retries on transient 401/403 responses with operator guidance. -4. Inspect the cache with `stellaops-cli auth status` (shows expiry, scope, mode) or clear it via `stellaops-cli auth logout`. -5. Run `stellaops-cli auth whoami` to dump token subject, audience, issuer, scopes, and remaining lifetime (verbose mode prints additional claims). -6. Expect Concelier to emit audit logs for each `/jobs*` request showing `subject`, - `clientId`, `scopes`, `status`, and whether network bypass rules were applied. - -Tokens live in `~/.stellaops/tokens` unless `StellaOps:Authority:TokenCacheDirectory` overrides it. Cached tokens are reused offline until they expire; the CLI surfaces clear errors if refresh fails. - -For offline workflows, configure `StellaOps:Offline:KitsDirectory` (or `STELLAOPS_OFFLINE_KITS_DIR`) to control where bundles, manifests, and metadata are stored, and `StellaOps:Offline:KitMirror` (or `STELLAOPS_OFFLINE_MIRROR_URL`) to override the download base URL when pulling from a mirror. - -**Configuration file template** - -```jsonc -{ - "StellaOps": { - "ApiKey": "your-api-token", - "BackendUrl": "https://concelier.example.org", - "ScannerCacheDirectory": "scanners", - "ResultsDirectory": "results", - "DefaultRunner": "docker", - "ScannerSignaturePublicKeyPath": "", - "ScannerDownloadAttempts": 3, - "Offline": { - "KitsDirectory": "offline-kits", - "KitMirror": "https://get.stella-ops.org/ouk/" - }, - "Authority": { - "Url": "https://authority.example.org", - "ClientId": "concelier-cli", - "ClientSecret": "REDACTED", - "Username": "", - "Password": "", - "Scope": "concelier.jobs.trigger advisory:ingest advisory:read", - "TokenCacheDirectory": "" - } - } -} -``` - -Drop `appsettings.local.json` or `.yaml` beside the binary to override per environment. - ---- - -### 2.5 Misc Endpoints - -| Path | Method | Description | -| ---------- | ------ | ---------------------------- | -| `/healthz` | GET | Liveness; returns `"ok"` | -| `/metrics` | GET | Prometheus exposition (OTel) | -| `/version` | GET | Git SHA + build date | - ---- - -### 2.6 Authority Admin APIs - -Administrative endpoints live under `/internal/*` on the Authority host and require the bootstrap API key (`x-stellaops-bootstrap-key`). Responses are deterministic and audited via `AuthEventRecord`. - -| Path | Method | Description | -| ---- | ------ | ----------- | -| `/internal/revocations/export` | GET | Returns the revocation bundle (JSON + detached JWS + digest). Mirrors the output of `stellaops-cli auth revoke export`. | -| `/internal/signing/rotate` | POST | Promotes a new signing key and marks the previous key as retired without restarting the service. | - -**Rotate request body** - -```json -{ - "keyId": "authority-signing-2025", - "location": "../certificates/authority-signing-2025.pem", - "source": "file", - "provider": "default" -} -``` - -The API responds with the active `kid`, previous key (if any), and the set of retired key identifiers. Always export a fresh revocation bundle after rotation so downstream mirrors receive signatures from the new key. - ---- - -## 3 First‑Party CLI Tools - -### 3.1 `stella` - -> *Package SBOM + Scan + Exit code* – designed for CI. - -``` -Usage: stella [OPTIONS] IMAGE_OR_SBOM -``` - -| Flag / Option | Default | Description | -| --------------- | ----------------------- | -------------------------------------------------- | -| `--server` | `http://localhost:8080` | API root | -| `--token` | *env `STELLA_TOKEN`* | Bearer token | -| `--sbom-type` | *auto* | Force `trivy-json-v2`/`spdx-json`/`cyclonedx-json` | -| `--delta` | `false` | Enable delta layer optimisation | -| `--policy-file` | *none* | Override server rules with local YAML/Rego | -| `--threshold` | `critical` | Fail build if ≥ level found | -| `--output-json` | *none* | Write raw scan result to file | -| `--wait-quota` | `true` | If 429 received, automatically wait `Retry‑After` and retry once. | - -**Exit codes** - -| Code | Meaning | -| ---- | ------------------------------------------- | -| 0 | Scan OK, policy passed | -| 1 | Vulnerabilities ≥ threshold OR policy block | -| 2 | Internal error (network etc.) | - ---- - -### 3.2 `stella‑zastava` - -> *Daemon / K8s DaemonSet* – watch container runtime, push SBOMs. - -Core flags (excerpt): - -| Flag | Purpose | -| ---------------- | ---------------------------------- | -| `--mode` | `listen` (default) / `enforce` | -| `--filter-image` | Regex; ignore infra/busybox images | -| `--threads` | Worker pool size | - ---- - -### 3.3 `stellopsctl` - -> *Admin utility* – policy snapshots, feed status, user CRUD. - -Examples: - -``` -stellopsctl policy export > policies/backup-2025-07-14.yaml -stellopsctl feed refresh # force OSV merge -stellopsctl user add dev-team --role developer -``` - ---- - -## 4 Error Model - -Uniform problem‑details object (RFC 7807): - -```json -{ - "type": "https://stella-ops.org/probs/validation", - "title": "Invalid request", - "status": 400, - "detail": "Layer digest malformed", - "traceId": "00-7c39..." -} -``` - ---- - -## 5 Rate Limits - -Default **40 requests / second / token**. -429 responses include `Retry-After` seconds header. - ---- - -## 6 FAQ & Tips - -* **Skip SBOM generation in CI** – supply a *pre‑built* SBOM and add `?sbom-only=true` to `/scan` for <1 s path. -* **Air‑gapped?** – point `--server` to `http://oukgw:8080` inside the Offline Update Kit. -* **YAML vs Rego** – YAML simpler; Rego unlocks time‑based logic (see samples). -* **Cosign verify plug‑ins** – enable `SCANNER_VERIFY_SIG=true` env to refuse unsigned plug‑ins. - ---- - -## 7 Planned Changes (Beyond 6 Months) - -These stay in *Feature Matrix → To Do* until design is frozen. - -| Epic / Feature | API Impact Sketch | -| ---------------------------- | ---------------------------------- | -| **SLSA L1‑L3** attestation | `/attest` (see §2.4) | -| Rekor transparency log | `/rekor/log/{id}` (GET) | -| Plug‑in Marketplace metadata | `/plugins/market` (catalog) | -| Horizontal scaling controls | `POST /cluster/node` (add/remove) | -| Windows agent support | Update LSAPI to PDE, no API change | - ---- - -## 8 References - -* OpenAPI YAML → `/openapi/v1.yaml` (served by backend) -* OAuth2 spec: -* SLSA spec: - ---- - -## 9 Changelog (truncated) - -* **2025‑07‑14** – added *delta SBOM*, policy import/export, CLI `--sbom-type`. -* **2025‑07‑12** – initial public reference. - ---- +# API & CLI Reference + +*Purpose* – give operators and integrators a single, authoritative spec for REST/GRPC calls **and** first‑party CLI tools (`stella-cli`, `zastava`, `stella`). +Everything here is *source‑of‑truth* for generated Swagger/OpenAPI and the `--help` screens in the CLIs. + +--- + +## 0 Quick Glance + +| Area | Call / Flag | Notes | +| ------------------ | ------------------------------------------- | ------------------------------------------------------------------------------ | +| Scan entry | `POST /scan` | Accepts SBOM or image; sub‑5 s target | +| Delta check | `POST /layers/missing` | <20 ms reply; powers *delta SBOM* feature | +| Rate‑limit / quota | — | Headers **`X‑Stella‑Quota‑Remaining`**, **`X‑Stella‑Reset`** on every response | +| Policy I/O | `GET /policy/export`, `POST /policy/import` | YAML now; Rego coming | +| Policy lint | `POST /policy/validate` | Returns 200 OK if ruleset passes | +| Auth | `POST /connect/token` (OpenIddict) | Client‑credentials preferred | +| Health | `GET /healthz` | Simple liveness probe | +| Attestation * | `POST /attest` (TODO Q1‑2026) | SLSA provenance + Rekor log | +| CLI flags | `--sbom-type` `--delta` `--policy-file` | Added to `stella` | + +\* Marked **TODO** → delivered after sixth month (kept on Feature Matrix “To Do” list). + +--- + +## 1 Authentication + +Stella Ops uses **OAuth 2.0 / OIDC** (token endpoint mounted via OpenIddict). + +``` +POST /connect/token +Content‑Type: application/x-www-form-urlencoded + +grant_type=client_credentials& +client_id=ci‑bot& +client_secret=REDACTED& +scope=stella.api +``` + +Successful response: + +```json +{ + "access_token": "eyJraWQi...", + "token_type": "Bearer", + "expires_in": 3600 +} +``` + +> **Tip** – pass the token via `Authorization: Bearer ` on every call. + +--- + +## 2 REST API + +### 2.0 Obtain / Refresh Offline‑Token + +```text +POST /token/offline +Authorization: Bearer +``` + +| Body field | Required | Example | Notes | +|------------|----------|---------|-------| +| `expiresDays` | no | `30` | Max 90 days | + +```json +{ + "jwt": "eyJhbGciOiJSUzI1NiIsInR5cCI6...", + "expires": "2025‑08‑17T00:00:00Z" +} +``` + +Token is signed with the backend’s private key and already contains +`"maxScansPerDay": {{ quota_token }}`. + + +### 2.1 Scan – Upload SBOM **or** Image + +``` +POST /scan +``` + +| Param / Header | In | Required | Description | +| -------------------- | ------ | -------- | --------------------------------------------------------------------- | +| `X‑Stella‑Sbom‑Type` | header | no | `trivy-json-v2`, `spdx-json`, `cyclonedx-json`; omitted ➞ auto‑detect | +| `?threshold` | query | no | `low`, `medium`, `high`, `critical`; default **critical** | +| body | body | yes | *Either* SBOM JSON *or* Docker image tarball/upload URL | + +Every successful `/scan` response now includes: + +| Header | Example | +|--------|---------| +| `X‑Stella‑Quota‑Remaining` | `129` | +| `X‑Stella‑Reset` | `2025‑07‑18T23:59:59Z` | +| `X‑Stella‑Token‑Expires` | `2025‑08‑17T00:00:00Z` | + +**Response 200** (scan completed): + +```json +{ + "digest": "sha256:…", + "summary": { + "Critical": 0, + "High": 3, + "Medium": 12, + "Low": 41 + }, + "policyStatus": "pass", + "quota": { + "remaining": 131, + "reset": "2025-07-18T00:00:00Z" + } +} +``` + +**Response 202** – queued; polling URL in `Location` header. + +--- + +### 2.2 Delta SBOM – Layer Cache Check + +``` +POST /layers/missing +Content‑Type: application/json +Authorization: Bearer +``` + +```json +{ + "layers": [ + "sha256:d38b...", + "sha256:af45..." + ] +} +``` + +**Response 200** — <20 ms target: + +```json +{ + "missing": [ + "sha256:af45..." + ] +} +``` + +Client then generates SBOM **only** for the `missing` layers and re‑posts `/scan`. + +--- + +### 2.3 Policy Endpoints *(preview feature flag: `scanner.features.enablePolicyPreview`)* + +All policy APIs require **`scanner.reports`** scope (or anonymous access while auth is disabled). + +**Fetch schema** + +``` +GET /api/v1/policy/schema +Authorization: Bearer +Accept: application/schema+json +``` + +Returns the embedded `policy-schema@1` JSON schema used by the binder. + +**Run diagnostics** + +``` +POST /api/v1/policy/diagnostics +Content-Type: application/json +Authorization: Bearer +``` + +```json +{ + "policy": { + "format": "yaml", + "actor": "cli", + "description": "dev override", + "content": "version: \"1.0\"\nrules:\n - name: Quiet Dev\n environments: [dev]\n action:\n type: ignore\n justification: dev waiver\n" + } +} +``` + +**Response 200**: + +```json +{ + "success": false, + "version": "1.0", + "ruleCount": 1, + "errorCount": 0, + "warningCount": 1, + "generatedAt": "2025-10-19T03:25:14.112Z", + "issues": [ + { "code": "policy.rule.quiet.missing_vex", "message": "Quiet flag ignored: rule must specify requireVex justifications.", "severity": "Warning", "path": "$.rules[0]" } + ], + "recommendations": [ + "Review policy warnings and ensure intentional overrides are documented." + ] +} +``` + +`success` is `false` when blocking issues remain; recommendations aggregate YAML ignore rules, VEX include/exclude hints, and vendor precedence guidance. + +**Preview impact** + +``` +POST /api/v1/policy/preview +Authorization: Bearer +Content-Type: application/json +``` + +```json +{ + "imageDigest": "sha256:abc123", + "findings": [ + { "id": "finding-1", "severity": "Critical", "source": "NVD" } + ], + "policy": { + "format": "yaml", + "content": "version: \"1.0\"\nrules:\n - name: Block Critical\n severity: [Critical]\n action: block\n" + } +} +``` + +**Response 200**: + +```json +{ + "success": true, + "policyDigest": "9c5e...", + "revisionId": "preview", + "changed": 1, + "diffs": [ + { + "findingId": "finding-1", + "baseline": {"findingId": "finding-1", "status": "Pass"}, + "projected": { + "findingId": "finding-1", + "status": "Blocked", + "ruleName": "Block Critical", + "ruleAction": "Block", + "score": 5.0, + "configVersion": "1.0", + "inputs": {"severityWeight": 5.0} + }, + "changed": true + } + ], + "issues": [] +} +``` + +- Provide `policy` to preview staged changes; omit it to compare against the active snapshot. +- Baseline verdicts are optional; when omitted, the API synthesises pass baselines before computing diffs. +- Quieted verdicts include `quietedBy` and `quiet` flags; score inputs now surface reachability/vendor trust weights (`reachability.*`, `trustWeight.*`). + +**OpenAPI**: the full API document (including these endpoints) is exposed at `/openapi/v1.json` and can be fetched for tooling or contract regeneration. + +### 2.4 Scanner – Queue a Scan Job *(SP9 milestone)* + +``` +POST /api/v1/scans +Authorization: Bearer +Content-Type: application/json +``` + +```json +{ + "image": { + "reference": "registry.example.com/acme/app:1.2.3" + }, + "force": false, + "clientRequestId": "ci-build-1845", + "metadata": { + "pipeline": "github", + "trigger": "pull-request" + } +} +``` + +| Field | Required | Notes | +| ------------------- | -------- | ------------------------------------------------------------------------------------------------ | +| `image.reference` | no\* | Full repo/tag (`registry/repo:tag`). Provide **either** `reference` or `digest` (sha256:…). | +| `image.digest` | no\* | OCI digest (e.g. `sha256:…`). | +| `force` | no | `true` forces a re-run even if an identical scan (`scanId`) already exists. Default **false**. | +| `clientRequestId` | no | Free-form string surfaced in audit logs. | +| `metadata` | no | Optional string map stored with the job and surfaced in observability feeds. | + +\* At least one of `image.reference` or `image.digest` must be supplied. + +**Response 202** – job accepted (idempotent): + +```http +HTTP/1.1 202 Accepted +Location: /api/v1/scans/2f6c17f9b3f548e2a28b9c412f4d63f8 +``` + +```json +{ + "scanId": "2f6c17f9b3f548e2a28b9c412f4d63f8", + "status": "Pending", + "location": "/api/v1/scans/2f6c17f9b3f548e2a28b9c412f4d63f8", + "created": true +} +``` + +- `scanId` is deterministic – resubmitting an identical payload returns the same identifier with `"created": false`. +- API is cancellation-aware; aborting the HTTP request cancels the submission attempt. +- Required scope: **`scanner.scans.enqueue`**. + +**Response 400** – validation problem (`Content-Type: application/problem+json`) when both `image.reference` and `image.digest` are blank. + +### 2.5 Scanner – Fetch Scan Status + +``` +GET /api/v1/scans/{scanId} +Authorization: Bearer +Accept: application/json +``` + +**Response 200**: + +```json +{ + "scanId": "2f6c17f9b3f548e2a28b9c412f4d63f8", + "status": "Pending", + "image": { + "reference": "registry.example.com/acme/app:1.2.3", + "digest": null + }, + "createdAt": "2025-10-18T20:15:12.482Z", + "updatedAt": "2025-10-18T20:15:12.482Z", + "failureReason": null +} +``` + +Statuses: `Pending`, `Running`, `Succeeded`, `Failed`, `Cancelled`. + +### 2.6 Scanner – Stream Progress (SSE / JSONL) + +``` +GET /api/v1/scans/{scanId}/events?format=sse|jsonl +Authorization: Bearer +Accept: text/event-stream +``` + +When `format` is omitted the endpoint emits **Server-Sent Events** (SSE). Specify `format=jsonl` to receive newline-delimited JSON (`application/x-ndjson`). Response headers include `Cache-Control: no-store` and `X-Accel-Buffering: no` so intermediaries avoid buffering the stream. + +**SSE frame** (default): + +``` +id: 1 +event: pending +data: {"scanId":"2f6c17f9b3f548e2a28b9c412f4d63f8","sequence":1,"state":"Pending","message":"queued","timestamp":"2025-10-19T03:12:45.118Z","correlationId":"2f6c17f9b3f548e2a28b9c412f4d63f8:0001","data":{"force":false,"meta.pipeline":"github"}} +``` + +**JSONL frame** (`format=jsonl`): + +```json +{"scanId":"2f6c17f9b3f548e2a28b9c412f4d63f8","sequence":1,"state":"Pending","message":"queued","timestamp":"2025-10-19T03:12:45.118Z","correlationId":"2f6c17f9b3f548e2a28b9c412f4d63f8:0001","data":{"force":false,"meta.pipeline":"github"}} +``` + +- `sequence` is monotonic starting at `1`. +- `correlationId` is deterministic (`{scanId}:{sequence:0000}`) unless a custom identifier is supplied by the publisher. +- `timestamp` is ISO‑8601 UTC with millisecond precision, ensuring deterministic ordering for consumers. +- The stream completes when the client disconnects or the coordinator stops publishing events. + +### 2.7 Scanner – Assemble Report (Signed Envelope) + +``` +POST /api/v1/reports +Authorization: Bearer +Content-Type: application/json +``` + +Request body mirrors policy preview inputs (image digest plus findings). The service evaluates the active policy snapshot, assembles a verdict, and signs the canonical report payload. + +**Response 200**: + +```json +{ + "report": { + "reportId": "report-9f8cde21aab54321", + "imageDigest": "sha256:7dbe0c9a5d4f1c8184007e9d94dbe55928f8a2db5ab9c1c2d4a2f7bbcdfe1234", + "generatedAt": "2025-10-23T15:32:22Z", + "verdict": "blocked", + "policy": { + "revisionId": "rev-42", + "digest": "8a0f72f8dc5c51c46991db3bba34e9b3c0c8e944a7a6d0a9c29a9aa6b8439876" + }, + "summary": { "total": 2, "blocked": 1, "warned": 1, "ignored": 0, "quieted": 0 }, + "verdicts": [ + { + "findingId": "library:pkg/openssl@1.1.1w", + "status": "Blocked", + "ruleName": "Block vendor unknowns", + "ruleAction": "block", + "notes": "Unknown vendor telemetry — medium confidence band.", + "score": 19.5, + "configVersion": "1.0", + "inputs": { + "severityWeight": 50, + "trustWeight": 0.65, + "reachabilityWeight": 0.6, + "baseScore": 19.5, + "trustWeight.vendor": 0.65, + "reachability.unknown": 0.6, + "unknownConfidence": 0.55, + "unknownAgeDays": 5 + }, + "quietedBy": null, + "quiet": false, + "unknownConfidence": 0.55, + "confidenceBand": "medium", + "unknownAgeDays": 5, + "sourceTrust": "vendor", + "reachability": "unknown" + }, + { + "findingId": "library:pkg/zlib@1.3.1", + "status": "Warned", + "ruleName": "Runtime mitigation required", + "ruleAction": "warn", + "notes": "Runtime reachable unknown — mitigation window required.", + "score": 18.75, + "configVersion": "1.0", + "inputs": { + "severityWeight": 75, + "trustWeight": 1, + "reachabilityWeight": 0.45, + "baseScore": 33.75, + "reachability.runtime": 0.45, + "warnPenalty": 15, + "unknownConfidence": 0.35, + "unknownAgeDays": 13 + }, + "quietedBy": null, + "quiet": false, + "unknownConfidence": 0.35, + "confidenceBand": "medium", + "unknownAgeDays": 13, + "sourceTrust": "NVD", + "reachability": "runtime" + } + ], + "issues": [] + }, + "dsse": { + "payloadType": "application/vnd.stellaops.report+json", + "payload": "eyJyZXBvcnQiOnsicmVwb3J0SWQiOiJyZXBvcnQtOWY4Y2RlMjFhYWI1NDMyMSJ9fQ==", + "signatures": [ + { + "keyId": "scanner-report-signing", + "algorithm": "hs256", + "signature": "MEQCIGHscnJ2bm9wYXlsb2FkZXIAIjANBgkqhkiG9w0BAQsFAAOCAQEASmFja3Nvbk1ldGE=" + } + ] + } +} +``` + +- The `report` object omits null fields and is deterministic (ISO timestamps, sorted keys) while surfacing `unknownConfidence`, `confidenceBand`, and `unknownAgeDays` for auditability. +- `dsse` follows the DSSE (Dead Simple Signing Envelope) shape; `payload` is the canonical UTF-8 JSON and `signatures[0].signature` is the base64 HMAC/Ed25519 value depending on configuration. +- Full offline samples live at `samples/policy/policy-report-unknown.json` (request + response) and `samples/api/reports/report-sample.dsse.json` (envelope fixture) for tooling tests or signature verification. + +**Response 404** – `application/problem+json` payload with type `https://stellaops.org/problems/not-found` when the scan identifier is unknown. + +> **Tip** – poll `Location` from the submission call until `status` transitions away from `Pending`/`Running`. + +```yaml +# Example import payload (YAML) +version: "1.0" +rules: + - name: Ignore Low dev + severity: [Low, None] + environments: [dev, staging] + action: ignore +``` + +Validation errors come back as: + +```json +{ + "errors": [ + { + "path": "$.rules[0].severity", + "msg": "Invalid level 'None'" + } + ] +} +``` + +```json +# Preview response excerpt +{ + "success": true, + "policyDigest": "9c5e...", + "revisionId": "rev-12", + "changed": 1, + "diffs": [ + { + "baseline": {"findingId": "finding-1", "status": "pass"}, + "projected": {"findingId": "finding-1", "status": "blocked", "ruleName": "Block Critical"}, + "changed": true + } + ] +} +``` + +--- + +### 2.4 Attestation (Planned – Q1‑2026) + +``` +POST /attest +``` + +| Param | Purpose | +| ----------- | ------------------------------------- | +| body (JSON) | SLSA v1.0 provenance doc | +| | Signed + stored in local Rekor mirror | + +Returns `202 Accepted` and `Location: /attest/{id}` for async verify. + +--- + +### 2.8 Runtime – Ingest Observer Events *(SCANNER-RUNTIME-12-301)* + +``` +POST /api/v1/runtime/events +Authorization: Bearer +Content-Type: application/json +``` + +| Requirement | Details | +|-------------|---------| +| Auth scope | `scanner.runtime.ingest` | +| Batch size | ≤ **256** envelopes (`scanner.runtime.maxBatchSize`, configurable) | +| Payload cap | ≤ **1 MiB** serialized JSON (`scanner.runtime.maxPayloadBytes`) | +| Rate limits | Per-tenant and per-node token buckets (default 200 events/s tenant, 50 events/s node, burst 200) – excess returns **429** with `Retry-After`. | +| TTL | Runtime events retained **45 days** by default (`scanner.runtime.eventTtlDays`). | + +**Request body** + +```json +{ + "batchId": "node-a-2025-10-20T15:03:12Z", + "events": [ + { + "schemaVersion": "zastava.runtime.event@v1", + "event": { + "eventId": "evt-2f9c02b8", + "when": "2025-10-20T15:03:08Z", + "kind": "ContainerStart", + "tenant": "tenant-alpha", + "node": "cluster-a/node-01", + "runtime": { "engine": "containerd", "version": "1.7.19" }, + "workload": { + "platform": "kubernetes", + "namespace": "payments", + "pod": "api-7c9fbbd8b7-ktd84", + "container": "api", + "containerId": "containerd://bead5...", + "imageRef": "ghcr.io/acme/api@sha256:deadbeef" + }, + "process": { "pid": 12345, "entrypoint": ["/start.sh", "--serve"], "buildId": "5f0c7c3c..." }, + "loadedLibs": [ + { "path": "/lib/x86_64-linux-gnu/libssl.so.3", "inode": 123456, "sha256": "abc123..." } + ], + "posture": { "imageSigned": true, "sbomReferrer": "present" }, + "delta": { "baselineImageDigest": "sha256:deadbeef" }, + "evidence": [ { "signal": "proc.maps", "value": "libssl.so.3@0x7f..." } ], + "annotations": { "observerVersion": "1.0.0" } + } + } + ] +} +``` + +**Responses** + +| Code | Body | Notes | +|------|------|-------| +| `202 Accepted` | `{ "accepted": 128, "duplicates": 2 }` | Batch persisted; duplicates are ignored via unique `eventId`. | +| `400 Bad Request` | Problem+JSON | Validation failures – empty batch, duplicate IDs, unsupported schema version, payload too large. | +| `429 Too Many Requests` | Problem+JSON | Per-tenant/node rate limit exceeded; `Retry-After` header emitted in seconds. | + +Persisted documents capture the canonical envelope (`payload` field), tenant/node metadata, and set an automatic TTL on `expiresAt`. Observers should retry rejected batches with exponential backoff honouring the provided `Retry-After` hint. + +--- + +## 3 StellaOps CLI (`stellaops-cli`) + +The new CLI is built on **System.CommandLine 2.0.0‑beta5** and mirrors the Concelier backend REST API. +Configuration follows the same precedence chain everywhere: + +1. Environment variables (e.g. `API_KEY`, `STELLAOPS_BACKEND_URL`, `StellaOps:ApiKey`) +2. `appsettings.json` → `appsettings.local.json` +3. `appsettings.yaml` → `appsettings.local.yaml` +4. Defaults (`ApiKey = ""`, `BackendUrl = ""`, cache folders under the current working directory) + +**Authority auth client resilience settings** + +| Setting | Environment variable | Default | Purpose | +|---------|----------------------|---------|---------| +| `StellaOps:Authority:Resilience:EnableRetries` | `STELLAOPS_AUTHORITY_ENABLE_RETRIES` | `true` | Toggle Polly wait-and-retry handlers for discovery/token calls | +| `StellaOps:Authority:Resilience:RetryDelays` | `STELLAOPS_AUTHORITY_RETRY_DELAYS` | `1s,2s,5s` | Comma/space-separated backoff sequence (HH:MM:SS) | +| `StellaOps:Authority:Resilience:AllowOfflineCacheFallback` | `STELLAOPS_AUTHORITY_ALLOW_OFFLINE_CACHE_FALLBACK` | `true` | Reuse cached discovery/JWKS metadata when Authority is temporarily unreachable | +| `StellaOps:Authority:Resilience:OfflineCacheTolerance` | `STELLAOPS_AUTHORITY_OFFLINE_CACHE_TOLERANCE` | `00:10:00` | Additional tolerance window added to the discovery/JWKS cache lifetime | + +See `docs/dev/32_AUTH_CLIENT_GUIDE.md` for recommended profiles (online vs. air-gapped) and testing guidance. + +| Command | Purpose | Key Flags / Arguments | Notes | +|---------|---------|-----------------------|-------| +| `stellaops-cli scanner download` | Fetch and install scanner container | `--channel ` (default `stable`)
`--output `
`--overwrite`
`--no-install` | Saves artefact under `ScannerCacheDirectory`, verifies digest/signature, and executes `docker load` unless `--no-install` is supplied. | +| `stellaops-cli scan run` | Execute scanner container against a directory (auto-upload) | `--target ` (required)
`--runner ` (default from config)
`--entry `
`[scanner-args...]` | Runs the scanner, writes results into `ResultsDirectory`, emits a structured `scan-run-*.json` metadata file, and automatically uploads the artefact when the exit code is `0`. | +| `stellaops-cli scan upload` | Re-upload existing scan artefact | `--file ` | Useful for retries when automatic upload fails or when operating offline. | +| `stellaops-cli db fetch` | Trigger connector jobs | `--source ` (e.g. `redhat`, `osv`)
`--stage ` (default `fetch`)
`--mode ` | Translates to `POST /jobs/source:{source}:{stage}` with `trigger=cli` | +| `stellaops-cli db merge` | Run canonical merge reconcile | — | Calls `POST /jobs/merge:reconcile`; exit code `0` on acceptance, `1` on failures/conflicts | +| `stellaops-cli db export` | Kick JSON / Trivy exports | `--format ` (default `json`)
`--delta`
`--publish-full/--publish-delta`
`--bundle-full/--bundle-delta` | Sets `{ delta = true }` parameter when requested and can override ORAS/bundle toggles per run | +| `stellaops-cli auth ` | Manage cached tokens for StellaOps Authority | `auth login --force` (ignore cache)
`auth status`
`auth whoami` | Uses `StellaOps.Auth.Client`; honours `StellaOps:Authority:*` configuration, stores tokens under `~/.stellaops/tokens` by default, and `whoami` prints subject/scope/expiry | +| `stellaops-cli auth revoke export` | Export the Authority revocation bundle | `--output ` (defaults to CWD) | Writes `revocation-bundle.json`, `.json.jws`, and `.json.sha256`; verifies the digest locally and includes key metadata in the log summary. | +| `stellaops-cli auth revoke verify` | Validate a revocation bundle offline | `--bundle ` `--signature ` `--key `
`--verbose` | Verifies detached JWS signatures, reports the computed SHA-256, and can fall back to cached JWKS when `--key` is omitted. | +| `stellaops-cli offline kit pull` | Download the latest offline kit bundle and manifest | `--bundle-id ` (optional)
`--destination `
`--overwrite`
`--no-resume` | Streams the bundle + manifest from the configured mirror/backend, resumes interrupted downloads, verifies SHA-256, and writes signatures plus a `.metadata.json` manifest alongside the artefacts. | +| `stellaops-cli offline kit import` | Upload an offline kit bundle to the backend | `` (argument)
`--manifest `
`--bundle-signature `
`--manifest-signature ` | Validates digests when metadata is present, then posts multipart payloads to `POST /api/offline-kit/import`; logs the submitted import ID/status for air-gapped rollout tracking. | +| `stellaops-cli offline kit status` | Display imported offline kit details | `--json` | Shows bundle id/kind, captured/imported timestamps, digests, and component versions; `--json` emits machine-readable output for scripting. | +| `stellaops-cli sources ingest --dry-run` | Dry-run guard validation for individual payloads | `--source `
`--input `
`--tenant `
`--format table\|json`
`--output ` | Normalises gzip/base64 payloads, invokes `api/aoc/ingest/dry-run`, and maps guard failures to deterministic `ERR_AOC_00x` exit codes. | +| `stellaops-cli aoc verify` | Replay AOC guardrails over stored documents | `--since `
`--limit `
`--sources `
`--codes `
`--format table\|json`
`--export ` | Summarises checked counts/violations, supports JSON evidence exports, and returns `0`, `11…17`, `18`, `70`, or `71` depending on guard outcomes. | +| `stellaops-cli config show` | Display resolved configuration | — | Masks secret values; helpful for air‑gapped installs | +| `stellaops-cli runtime policy test` | Ask Scanner.WebService for runtime verdicts (Webhook parity) | `--image/-i ` (repeatable, comma/space lists supported)
`--file/-f `
`--namespace/--ns `
`--label/-l key=value` (repeatable)
`--json` | Posts to `POST /api/v1/scanner/policy/runtime`, deduplicates image digests, and prints TTL/policy revision plus per-image columns for signed state, SBOM referrers, quieted-by metadata, confidence, Rekor attestation (uuid + verified flag), and recently observed build IDs (shortened for readability). Accepts newline/whitespace-delimited stdin when piped; `--json` emits the raw response without additional logging. | + +#### Example: Pivot from runtime verdicts to debug symbols + +```bash +$ stellaops-cli runtime policy test \ + --image ghcr.io/acme/payments@sha256:4f7d55f6... \ + --namespace payments + +Image Digest Signed SBOM Build IDs TTL +ghcr.io/acme/payments@sha256:4f7d55f6... yes present 5f0c7c3c..., 1122aabbccddeeff... 04:59:55 +``` + +1. Copy one of the hashes (e.g. `5f0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789`) and locate the bundled debug artefact: + ```bash + ls offline-kit/debug/.build-id/5f/0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789.debug + ``` +2. Confirm the running binary advertises the same GNU build-id: + ```bash + readelf -n /proc/$(pgrep -f payments-api | head -n1)/exe | grep -i 'Build ID' + ``` +3. If you operate a debuginfod mirror backed by the Offline Kit tree, resolve symbols with: + ```bash + debuginfod-find debuginfo 5f0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789 >/tmp/payments-api.debug + ``` + +See [Offline Kit step 0](24_OFFLINE_KIT.md#0-prepare-the-debug-store) for instructions on mirroring the debug store before packaging. + +`POST /api/v1/scanner/policy/runtime` responds with one entry per digest. Each result now includes: + +- `policyVerdict` (`pass|warn|fail|error`), `signed`, and `hasSbomReferrers` parity with the webhook contract. +- `confidence` (0-1 double) derived from canonical `PolicyPreviewService` evaluation and `quieted`/`quietedBy` flags for muted findings. +- `rekor` block carrying `uuid`, `url`, and the attestor-backed `verified` boolean when Rekor inclusion proofs have been confirmed. +- `metadata` (stringified JSON) capturing runtime heuristics, policy issues, evaluated findings, and timestamps for downstream audit. +- `buildIds` (array) lists up to three distinct GNU build-id hashes recently observed for that digest so debuggers can derive `/usr/lib/debug/.build-id//.debug` paths for symbol stores. + +When running on an interactive terminal without explicit override flags, the CLI uses Spectre.Console prompts to let you choose per-run ORAS/offline bundle behaviour. + +Runtime verdict output reflects the SCANNER-RUNTIME-12-302 contract sign-off (quieted provenance, confidence band, attestation verification). CLI-RUNTIME-13-008 now mirrors those fields in both table and `--json` formats. + +**Startup diagnostics** + +- `stellaops-cli` now loads Authority plug-in manifests during startup (respecting `Authority:Plugins:*`) and surfaces analyzer warnings when a plug-in weakens the baseline password policy (minimum length **12** and all character classes required). +- Follow the log entry’s config path and raise `passwordPolicy.minimumLength` to at least 12 while keeping `requireUppercase`, `requireLowercase`, `requireDigit`, and `requireSymbol` set to `true` to clear the warning; weakened overrides are treated as actionable security deviations. + +**Logging & exit codes** + +- Structured logging via `Microsoft.Extensions.Logging` with single-line console output (timestamps in UTC). +- `--verbose / -v` raises log level to `Debug`. +- Command exit codes bubble up: backend conflict → `1`, cancelled via `CTRL+C` → `130`, scanner exit codes propagate as-is. + +**Artifact validation** + +- Downloads are verified against the `X-StellaOps-Digest` header (SHA-256). When `StellaOps:ScannerSignaturePublicKeyPath` points to a PEM-encoded RSA key, the optional `X-StellaOps-Signature` header is validated as well. +- Metadata for each bundle is written alongside the artefact (`*.metadata.json`) with digest, signature, source URL, and timestamps. +- Retry behaviour is controlled via `StellaOps:ScannerDownloadAttempts` (default **3** with exponential backoff). +- Successful `scan run` executions create timestamped JSON artefacts inside `ResultsDirectory` plus a `scan-run-*.json` metadata envelope documenting the runner, arguments, timing, and stdout/stderr. The artefact is posted back to Concelier automatically. + +#### Trivy DB export metadata (`metadata.json`) + +`stellaops-cli db export --format trivy-db` (and the backing `POST /jobs/export:trivy-db`) always emits a `metadata.json` document in the OCI layout root. Operators consuming the bundle or delta updates should inspect the following fields: + +| Field | Type | Purpose | +| ----- | ---- | ------- | +| `mode` | `full` \| `delta` | Indicates whether the current run rebuilt the entire database (`full`) or only the changed files (`delta`). | +| `baseExportId` | string? | Export ID of the last full baseline that the delta builds upon. Only present for `mode = delta`. | +| `baseManifestDigest` | string? | SHA-256 digest of the manifest belonging to the baseline OCI layout. | +| `resetBaseline` | boolean | `true` when the exporter rotated the baseline (e.g., repo change, delta chain reset). Treat as a full refresh. | +| `treeDigest` | string | Canonical SHA-256 digest of the JSON tree used to build the database. | +| `treeBytes` | number | Total bytes across exported JSON files. | +| `advisoryCount` | number | Count of advisories included in the export. | +| `exporterVersion` | string | Version stamp of `StellaOps.Concelier.Exporter.TrivyDb`. | +| `builder` | object? | Raw metadata emitted by `trivy-db build` (version, update cadence, etc.). | +| `delta.changedFiles[]` | array | Present when `mode = delta`. Each entry lists `{ "path": "", "length": , "digest": "sha256:..." }`. | +| `delta.removedPaths[]` | array | Paths that existed in the previous manifest but were removed in the new run. | + +When the planner opts for a delta run, the exporter copies unmodified blobs from the baseline layout identified by `baseManifestDigest`. Consumers that cache OCI blobs only need to fetch the `changedFiles` and the new manifest/metadata unless `resetBaseline` is true. +When pushing to ORAS, set `concelier:exporters:trivyDb:oras:publishFull` / `publishDelta` to control whether full or delta runs are copied to the registry. Offline bundles follow the analogous `includeFull` / `includeDelta` switches under `offlineBundle`. + +Example configuration (`appsettings.yaml`): + +```yaml +concelier: + exporters: + trivyDb: + oras: + enabled: true + publishFull: true + publishDelta: false + offlineBundle: + enabled: true + includeFull: true + includeDelta: false +``` + + +**Authentication** + +- API key is sent as `Authorization: Bearer ` automatically when configured. +- Anonymous operation is permitted only when Concelier runs with + `authority.allowAnonymousFallback: true`. This flag is temporary—plan to disable + it before **2025-12-31 UTC** so bearer tokens become mandatory. + +Authority-backed auth workflow: +1. Configure Authority settings via config or env vars (see sample below). Minimum fields: `Url`, `ClientId`, and either `ClientSecret` (client credentials) or `Username`/`Password` (password grant). +2. Run `stellaops-cli auth login` to acquire and cache a token. Use `--force` if you need to ignore an existing cache entry. +3. Execute CLI commands as normal—the backend client injects the cached bearer token automatically and retries on transient 401/403 responses with operator guidance. +4. Inspect the cache with `stellaops-cli auth status` (shows expiry, scope, mode) or clear it via `stellaops-cli auth logout`. +5. Run `stellaops-cli auth whoami` to dump token subject, audience, issuer, scopes, and remaining lifetime (verbose mode prints additional claims). +6. Expect Concelier to emit audit logs for each `/jobs*` request showing `subject`, + `clientId`, `scopes`, `status`, and whether network bypass rules were applied. + +Tokens live in `~/.stellaops/tokens` unless `StellaOps:Authority:TokenCacheDirectory` overrides it. Cached tokens are reused offline until they expire; the CLI surfaces clear errors if refresh fails. + +For offline workflows, configure `StellaOps:Offline:KitsDirectory` (or `STELLAOPS_OFFLINE_KITS_DIR`) to control where bundles, manifests, and metadata are stored, and `StellaOps:Offline:KitMirror` (or `STELLAOPS_OFFLINE_MIRROR_URL`) to override the download base URL when pulling from a mirror. + +**Configuration file template** + +```jsonc +{ + "StellaOps": { + "ApiKey": "your-api-token", + "BackendUrl": "https://concelier.example.org", + "ScannerCacheDirectory": "scanners", + "ResultsDirectory": "results", + "DefaultRunner": "docker", + "ScannerSignaturePublicKeyPath": "", + "ScannerDownloadAttempts": 3, + "Offline": { + "KitsDirectory": "offline-kits", + "KitMirror": "https://get.stella-ops.org/ouk/" + }, + "Authority": { + "Url": "https://authority.example.org", + "ClientId": "concelier-cli", + "ClientSecret": "REDACTED", + "Username": "", + "Password": "", + "Scope": "concelier.jobs.trigger advisory:ingest advisory:read", + "TokenCacheDirectory": "" + } + } +} +``` + +Drop `appsettings.local.json` or `.yaml` beside the binary to override per environment. + +--- + +### 2.5 Misc Endpoints + +| Path | Method | Description | +| ---------- | ------ | ---------------------------- | +| `/healthz` | GET | Liveness; returns `"ok"` | +| `/metrics` | GET | Prometheus exposition (OTel) | +| `/version` | GET | Git SHA + build date | + +--- + +### 2.6 Authority Admin APIs + +Administrative endpoints live under `/internal/*` on the Authority host and require the bootstrap API key (`x-stellaops-bootstrap-key`). Responses are deterministic and audited via `AuthEventRecord`. + +| Path | Method | Description | +| ---- | ------ | ----------- | +| `/internal/revocations/export` | GET | Returns the revocation bundle (JSON + detached JWS + digest). Mirrors the output of `stellaops-cli auth revoke export`. | +| `/internal/signing/rotate` | POST | Promotes a new signing key and marks the previous key as retired without restarting the service. | + +**Rotate request body** + +```json +{ + "keyId": "authority-signing-2025", + "location": "../certificates/authority-signing-2025.pem", + "source": "file", + "provider": "default" +} +``` + +The API responds with the active `kid`, previous key (if any), and the set of retired key identifiers. Always export a fresh revocation bundle after rotation so downstream mirrors receive signatures from the new key. + +--- + +## 3 First‑Party CLI Tools + +### 3.1 `stella` + +> *Package SBOM + Scan + Exit code* – designed for CI. + +``` +Usage: stella [OPTIONS] IMAGE_OR_SBOM +``` + +| Flag / Option | Default | Description | +| --------------- | ----------------------- | -------------------------------------------------- | +| `--server` | `http://localhost:8080` | API root | +| `--token` | *env `STELLA_TOKEN`* | Bearer token | +| `--sbom-type` | *auto* | Force `trivy-json-v2`/`spdx-json`/`cyclonedx-json` | +| `--delta` | `false` | Enable delta layer optimisation | +| `--policy-file` | *none* | Override server rules with local YAML/Rego | +| `--threshold` | `critical` | Fail build if ≥ level found | +| `--output-json` | *none* | Write raw scan result to file | +| `--wait-quota` | `true` | If 429 received, automatically wait `Retry‑After` and retry once. | + +**Exit codes** + +| Code | Meaning | +| ---- | ------------------------------------------- | +| 0 | Scan OK, policy passed | +| 1 | Vulnerabilities ≥ threshold OR policy block | +| 2 | Internal error (network etc.) | + +--- + +### 3.2 `stella‑zastava` + +> *Daemon / K8s DaemonSet* – watch container runtime, push SBOMs. + +Core flags (excerpt): + +| Flag | Purpose | +| ---------------- | ---------------------------------- | +| `--mode` | `listen` (default) / `enforce` | +| `--filter-image` | Regex; ignore infra/busybox images | +| `--threads` | Worker pool size | + +--- + +### 3.3 `stellopsctl` + +> *Admin utility* – policy snapshots, feed status, user CRUD. + +Examples: + +``` +stellopsctl policy export > policies/backup-2025-07-14.yaml +stellopsctl feed refresh # force OSV merge +stellopsctl user add dev-team --role developer +``` + +--- + +## 4 Error Model + +Uniform problem‑details object (RFC 7807): + +```json +{ + "type": "https://stella-ops.org/probs/validation", + "title": "Invalid request", + "status": 400, + "detail": "Layer digest malformed", + "traceId": "00-7c39..." +} +``` + +--- + +## 5 Rate Limits + +Default **40 requests / second / token**. +429 responses include `Retry-After` seconds header. + +--- + +## 6 FAQ & Tips + +* **Skip SBOM generation in CI** – supply a *pre‑built* SBOM and add `?sbom-only=true` to `/scan` for <1 s path. +* **Air‑gapped?** – point `--server` to `http://oukgw:8080` inside the Offline Update Kit. +* **YAML vs Rego** – YAML simpler; Rego unlocks time‑based logic (see samples). +* **Cosign verify plug‑ins** – enable `SCANNER_VERIFY_SIG=true` env to refuse unsigned plug‑ins. + +--- + +## 7 Planned Changes (Beyond 6 Months) + +These stay in *Feature Matrix → To Do* until design is frozen. + +| Epic / Feature | API Impact Sketch | +| ---------------------------- | ---------------------------------- | +| **SLSA L1‑L3** attestation | `/attest` (see §2.4) | +| Rekor transparency log | `/rekor/log/{id}` (GET) | +| Plug‑in Marketplace metadata | `/plugins/market` (catalog) | +| Horizontal scaling controls | `POST /cluster/node` (add/remove) | +| Windows agent support | Update LSAPI to PDE, no API change | + +--- + +## 8 References + +* OpenAPI YAML → `/openapi/v1.yaml` (served by backend) +* OAuth2 spec: +* SLSA spec: + +--- + +## 9 Changelog (truncated) + +* **2025‑07‑14** – added *delta SBOM*, policy import/export, CLI `--sbom-type`. +* **2025‑07‑12** – initial public reference. + +--- diff --git a/docs/10_CONCELIER_CLI_QUICKSTART.md b/docs/10_CONCELIER_CLI_QUICKSTART.md index a159bb98..c4fdc7b6 100644 --- a/docs/10_CONCELIER_CLI_QUICKSTART.md +++ b/docs/10_CONCELIER_CLI_QUICKSTART.md @@ -45,7 +45,7 @@ runtime wiring, CLI usage) and leaves connector/internal customization for later 4. Start the web service from the repository root: ```bash - dotnet run --project src/StellaOps.Concelier.WebService + dotnet run --project src/Concelier/StellaOps.Concelier.WebService ``` On startup Concelier validates the options, boots MongoDB indexes, loads plug-ins, @@ -94,7 +94,7 @@ Rollout checkpoints for the two Authority toggles: ## 2 · Configure the CLI The CLI reads configuration from JSON/YAML files *and* environment variables. The -defaults live in `src/StellaOps.Cli/appsettings.json` and expect overrides at runtime. +defaults live in `src/Cli/StellaOps.Cli/appsettings.json` and expect overrides at runtime. | Setting | Environment variable | Default | Purpose | | ------- | -------------------- | ------- | ------- | @@ -123,12 +123,12 @@ export STELLAOPS_AUTHORITY_URL="https://authority.local" export STELLAOPS_AUTHORITY_CLIENT_ID="concelier-cli" export STELLAOPS_AUTHORITY_CLIENT_SECRET="s3cr3t" export STELLAOPS_AUTHORITY_SCOPE="concelier.jobs.trigger advisory:ingest advisory:read" -dotnet run --project src/StellaOps.Cli -- db merge +dotnet run --project src/Cli/StellaOps.Cli -- db merge # Acquire a bearer token and confirm cache state -dotnet run --project src/StellaOps.Cli -- auth login -dotnet run --project src/StellaOps.Cli -- auth status -dotnet run --project src/StellaOps.Cli -- auth whoami +dotnet run --project src/Cli/StellaOps.Cli -- auth login +dotnet run --project src/Cli/StellaOps.Cli -- auth status +dotnet run --project src/Cli/StellaOps.Cli -- auth whoami ``` Refer to `docs/dev/32_AUTH_CLIENT_GUIDE.md` for deeper guidance on tuning retry/offline settings and rollout checklists. @@ -143,31 +143,31 @@ rely on environment variables for ephemeral runners. 1. **Trigger connector fetch stages** ```bash - dotnet run --project src/StellaOps.Cli -- db fetch --source osv --stage fetch - dotnet run --project src/StellaOps.Cli -- db fetch --source osv --stage parse - dotnet run --project src/StellaOps.Cli -- db fetch --source osv --stage map + dotnet run --project src/Cli/StellaOps.Cli -- db fetch --source osv --stage fetch + dotnet run --project src/Cli/StellaOps.Cli -- db fetch --source osv --stage parse + dotnet run --project src/Cli/StellaOps.Cli -- db fetch --source osv --stage map ``` Use `--mode resume` when continuing from a previous window: ```bash - dotnet run --project src/StellaOps.Cli -- db fetch --source redhat --stage fetch --mode resume + dotnet run --project src/Cli/StellaOps.Cli -- db fetch --source redhat --stage fetch --mode resume ``` 2. **Merge canonical advisories** ```bash - dotnet run --project src/StellaOps.Cli -- db merge + dotnet run --project src/Cli/StellaOps.Cli -- db merge ``` 3. **Produce exports** ```bash # JSON tree (vuln-list style) - dotnet run --project src/StellaOps.Cli -- db export --format json + dotnet run --project src/Cli/StellaOps.Cli -- db export --format json # Trivy DB (delta example) - dotnet run --project src/StellaOps.Cli -- db export --format trivy-db --delta + dotnet run --project src/Cli/StellaOps.Cli -- db export --format trivy-db --delta ``` Concelier always produces a deterministic OCI layout. The first run after a clean @@ -207,13 +207,13 @@ rely on environment variables for ephemeral runners. ```bash export STELLA_TENANT="${STELLA_TENANT:-tenant-a}" - dotnet run --project src/StellaOps.Cli -- aoc verify \ + dotnet run --project src/Cli/StellaOps.Cli -- aoc verify \ --since 24h \ --format table \ --tenant "$STELLA_TENANT" # Optional: capture JSON evidence for pipelines/audits - dotnet run --project src/StellaOps.Cli -- aoc verify \ + dotnet run --project src/Cli/StellaOps.Cli -- aoc verify \ --since 7d \ --limit 100 \ --format json \ @@ -244,9 +244,9 @@ rely on environment variables for ephemeral runners. 6. **Manage scanners (optional)** ```bash - dotnet run --project src/StellaOps.Cli -- scanner download --channel stable - dotnet run --project src/StellaOps.Cli -- scan run --entry scanners/latest/Scanner.dll --target ./sboms - dotnet run --project src/StellaOps.Cli -- scan upload --file results/scan-001.json + dotnet run --project src/Cli/StellaOps.Cli -- scanner download --channel stable + dotnet run --project src/Cli/StellaOps.Cli -- scan run --entry scanners/latest/Scanner.dll --target ./sboms + dotnet run --project src/Cli/StellaOps.Cli -- scan upload --file results/scan-001.json ``` Add `--verbose` to any command for structured console logs. All commands honour diff --git a/docs/11_AUTHORITY.md b/docs/11_AUTHORITY.md index aaf00014..0a95b978 100644 --- a/docs/11_AUTHORITY.md +++ b/docs/11_AUTHORITY.md @@ -1,380 +1,380 @@ -# StellaOps Authority Service - -> **Status:** Drafted 2025-10-12 (CORE5B.DOC / DOC1.AUTH) – aligns with Authority revocation store, JWKS rotation, and bootstrap endpoints delivered in Sprint 1. - -## 1. Purpose -The **StellaOps Authority** service issues OAuth2/OIDC tokens for every StellaOps module (Concelier, Backend, Agent, Zastava) and exposes the policy controls required in sovereign/offline environments. Authority is built as a minimal ASP.NET host that: - -- brokers password, client-credentials, and device-code flows through pluggable identity providers; -- persists access/refresh/device tokens in MongoDB with deterministic schemas for replay analysis and air-gapped audit copies; -- distributes revocation bundles and JWKS material so downstream services can enforce lockouts without direct database access; -- offers bootstrap APIs for first-run provisioning and key rotation without redeploying binaries. - -Authority is deployed alongside Concelier in air-gapped environments and never requires outbound internet access. All trusted metadata (OpenIddict discovery, JWKS, revocation bundles) is cacheable, signed, and reproducible. - -## 2. Component Architecture -Authority is composed of five cooperating subsystems: - -1. **Minimal API host** – configures OpenIddict endpoints (`/token`, `/authorize`, `/revoke`, `/jwks`), publishes the OpenAPI contract at `/.well-known/openapi`, and enables structured logging/telemetry. Rate limiting hooks (`AuthorityRateLimiter`) wrap every request. -2. **Plugin host** – loads `StellaOps.Authority.Plugin.*.dll` assemblies, applies capability metadata, and exposes password/client provisioning surfaces through dependency injection. -3. **Mongo storage** – persists tokens, revocations, bootstrap invites, and plugin state in deterministic collections indexed for offline sync (`authority_tokens`, `authority_revocations`, etc.). -4. **Cryptography layer** – `StellaOps.Cryptography` abstractions manage password hashing, signing keys, JWKS export, and detached JWS generation. -5. **Offline ops APIs** – internal endpoints under `/internal/*` provide administrative flows (bootstrap users/clients, revocation export) guarded by API keys and deterministic audit events. - -A high-level sequence for password logins: - -``` -Client -> /token (password grant) - -> Rate limiter & audit hooks - -> Plugin credential store (Argon2id verification) - -> Token persistence (Mongo authority_tokens) - -> Response (access/refresh tokens + deterministic claims) -``` - -## 3. Token Lifecycle & Persistence -Authority persists every issued token in MongoDB so operators can audit or revoke without scanning distributed caches. - -- **Collection:** `authority_tokens` -- **Key fields:** -- `tokenId`, `type` (`access_token`, `refresh_token`, `device_code`, `authorization_code`) -- `subjectId`, `clientId`, ordered `scope` array -- `tenant` (lower-cased tenant hint from the issuing client, omitted for global clients) - -### Console OIDC client - -- **Client ID**: `console-web` -- **Grants**: `authorization_code` (PKCE required), `refresh_token` -- **Audience**: `console` -- **Scopes**: `openid`, `profile`, `email`, `advisory:read`, `vex:read`, `aoc:verify`, `findings:read`, `orch:read`, `vuln:read` -- **Redirect URIs** (defaults): `https://console.stella-ops.local/oidc/callback` -- **Post-logout redirect**: `https://console.stella-ops.local/` -- **Tokens**: Access tokens inherit the global 2 minute lifetime; refresh tokens remain short-lived (30 days) and can be exchanged silently via `/token`. -- **Roles**: Assign Authority role `Orch.Viewer` (exposed to tenants as `role/orch-viewer`) when operators need read-only access to Orchestrator telemetry via Console dashboards. Policy Studio ships dedicated roles (`role/policy-author`, `role/policy-reviewer`, `role/policy-approver`, `role/policy-operator`, `role/policy-auditor`) that align with the new `policy:*` scope family; issue them per tenant so audit trails remain scoped. - -Configuration sample (`etc/authority.yaml.sample`) seeds the client with a confidential secret so Console can negotiate the code exchange on the backend while browsers execute the PKCE dance. - -### Console Authority endpoints - -- `/console/tenants` — Requires `authority:tenants.read`; returns the tenant catalogue for the authenticated principal. Requests lacking the `X-Stella-Tenant` header are rejected (`tenant_header_missing`) and logged. -- `/console/profile` — Requires `ui.read`; exposes subject metadata (roles, scopes, audiences) and indicates whether the session is within the five-minute fresh-auth window. -- `/console/token/introspect` — Requires `ui.read`; introspects the active access token so the SPA can prompt for re-authentication before privileged actions. - -All endpoints demand DPoP-bound tokens and propagate structured audit events (`authority.console.*`). Gateways must forward the `X-Stella-Tenant` header derived from the access token; downstream services rely on the same value for isolation. Keep Console access tokens short-lived (default 15 minutes) and enforce the fresh-auth window for admin actions (`ui.admin`, `authority:*`, `policy:activate`, `exceptions:approve`). -- `status` (`valid`, `revoked`, `expired`), `createdAt`, optional `expiresAt` -- `revokedAt`, machine-readable `revokedReason`, optional `revokedReasonDescription` -- `revokedMetadata` (string dictionary for plugin-specific context) -- **Persistence flow:** `PersistTokensHandler` stamps missing JWT IDs, normalises scopes, and stores every principal emitted by OpenIddict. -- **Revocation flow:** `AuthorityTokenStore.UpdateStatusAsync` flips status, records the reason metadata, and is invoked by token revocation handlers and plugin provisioning events (e.g., disabling a user). -- **Expiry maintenance:** `AuthorityTokenStore.DeleteExpiredAsync` prunes non-revoked tokens past their `expiresAt` timestamp. Operators should schedule this in maintenance windows if large volumes of tokens are issued. - -### Expectations for resource servers -Resource servers (Concelier WebService, Backend, Agent) **must not** assume in-memory caches are authoritative. They should: - -- cache `/jwks` and `/revocations/export` responses within configured lifetimes; -- honour `revokedReason` metadata when shaping audit trails; -- treat `status != "valid"` or missing tokens as immediate denial conditions. -- propagate the `tenant` claim (`X-Stella-Tenant` header in REST calls) and reject requests when the tenant supplied by Authority does not match the resource server's scope; Concelier and Excititor guard endpoints refuse cross-tenant tokens. - -### Tenant propagation - -- Client provisioning (bootstrap or plug-in) accepts a `tenant` hint. Authority normalises the value (`trim().ToLowerInvariant()`) and persists it alongside the registration. Clients without an explicit tenant remain global. -- Issued principals include the `stellaops:tenant` claim. `PersistTokensHandler` mirrors this claim into `authority_tokens.tenant`, enabling per-tenant revocation and reporting. -- Rate limiter metadata now tags requests with `authority.tenant`, unlocking per-tenant throughput metrics and diagnostic filters. Audit events (`authority.client_credentials.grant`, `authority.password.grant`, bootstrap flows) surface the tenant and login attempt documents index on `{tenant, occurredAt}` for quick queries. -- Client credentials that request `advisory:ingest`, `advisory:read`, `vex:ingest`, `vex:read`, `signals:read`, `signals:write`, `signals:admin`, or `aoc:verify` now fail fast when the client registration lacks a tenant hint. Issued tokens are re-validated against persisted tenant metadata, and Authority rejects any cross-tenant replay (`invalid_client`/`invalid_token`), ensuring aggregation-only workloads remain tenant-scoped. -- Client credentials that request `export.viewer`, `export.operator`, or `export.admin` must provide a tenant hint. Requests for `export.admin` also need accompanying `export_reason` and `export_ticket` parameters; Authority returns `invalid_request` when either value is missing and records the denial in token audit events. -- Policy Studio scopes (`policy:author`, `policy:review`, `policy:approve`, `policy:operate`, `policy:audit`, `policy:simulate`, `policy:run`, `policy:activate`) require a tenant assignment; Authority rejects tokens missing the hint with `invalid_client` and records `scope.invalid` metadata for auditing. -- **AOC pairing guardrails** – Tokens that request `advisory:read`, `vex:read`, or any `signals:*` scope must also request `aoc:verify`. Authority rejects mismatches with `invalid_scope` (`Scope 'aoc:verify' is required when requesting advisory/vex read scopes.` or `Scope 'aoc:verify' is required when requesting signals scopes.`) so automation surfaces deterministic errors. -- **Signals ingestion guardrails** – Sensors and services requesting `signals:write`/`signals:admin` must also request `aoc:verify`; Authority records the `authority.aoc_scope_violation` tag when the pairing is missing so operators can trace failing sensors immediately. -- Password grant flows reuse the client registration's tenant and enforce the configured scope allow-list. Requested scopes outside that list (or mismatched tenants) trigger `invalid_scope`/`invalid_client` failures, ensuring cross-tenant access is denied before token issuance. - -### Default service scopes - -| Client ID | Purpose | Scopes granted | Sender constraint | Tenant | -|----------------------|---------------------------------------|--------------------------------------|-------------------|-----------------| -| `concelier-ingest` | Concelier raw advisory ingestion | `advisory:ingest`, `advisory:read` | `dpop` | `tenant-default` | -| `excitor-ingest` | Excititor raw VEX ingestion | `vex:ingest`, `vex:read` | `dpop` | `tenant-default` | -| `aoc-verifier` | Aggregation-only contract verification | `aoc:verify`, `advisory:read`, `vex:read` | `dpop` | `tenant-default` | -| `cartographer-service` | Graph snapshot construction | `graph:write`, `graph:read` | `dpop` | `tenant-default` | -| `graph-api` | Graph Explorer gateway/API | `graph:read`, `graph:export`, `graph:simulate` | `dpop` | `tenant-default` | -| `export-center-operator` | Export Center operator automation | `export.viewer`, `export.operator` | `dpop` | `tenant-default` | -| `export-center-admin` | Export Center administrative automation | `export.viewer`, `export.operator`, `export.admin` | `dpop` | `tenant-default` | -| `vuln-explorer-ui` | Vuln Explorer UI/API | `vuln:read` | `dpop` | `tenant-default` | -| `signals-uploader` | Reachability sensor ingestion | `signals:write`, `signals:read`, `aoc:verify` | `dpop` | `tenant-default` | - -> **Secret hygiene (2025‑10‑27):** The repository includes a convenience `etc/authority.yaml` for compose/helm smoke tests. Every entry’s `secretFile` points to `etc/secrets/*.secret`, which ship with `*-change-me` placeholders—replace them with strong values (and wire them through your vault/secret manager) before issuing tokens in CI, staging, or production. - -For factory provisioning, issue sensors the **SignalsUploader** role template (`signals:write`, `signals:read`, `aoc:verify`). Authority rejects ingestion tokens that omit `aoc:verify`, preserving aggregation-only contract guarantees for reachability signals. - -These registrations are provided as examples in `etc/authority.yaml.sample`. Clone them per tenant (for example `concelier-tenant-a`, `concelier-tenant-b`) so tokens remain tenant-scoped by construction. - -Graph Explorer introduces dedicated scopes: `graph:write` for Cartographer build jobs, `graph:read` for query/read operations, `graph:export` for long-running export downloads, and `graph:simulate` for what-if overlays. Assign only the scopes a client actually needs to preserve least privilege—UI-facing clients should typically request read/export access, while background services (Cartographer, Scheduler) require write privileges. - -#### Least-privilege guidance for graph clients - -- **Service identities** – The Cartographer worker should request `graph:write` and `graph:read` only; grant `graph:simulate` exclusively to pipeline automation that invokes Policy Engine overlays on demand. Keep `graph:export` scoped to API gateway components responsible for streaming GraphML/JSONL artifacts. Authority enforces this by rejecting `graph:write` tokens that lack `properties.serviceIdentity: cartographer`. -- **Tenant propagation** – Every client registration must pin a `tenant` hint. Authority normalises the value and stamps it into issued tokens (`stellaops:tenant`) so downstream services (Scheduler, Graph API, Console) can enforce tenant isolation without custom headers. Graph scopes (`graph:read`, `graph:write`, `graph:export`, `graph:simulate`) are denied if the tenant hint is missing. -- **SDK alignment** – Use the generated `StellaOpsScopes` constants in service code to request graph scopes. Hard-coded strings risk falling out of sync as additional graph capabilities are added. -- **DPOP for automation** – Maintain sender-constrained (`dpop`) flows for Cartographer and Scheduler to limit reuse of access tokens if a build host is compromised. For UI-facing tokens, pair `graph:read`/`graph:export` with short lifetimes and enforce refresh-token rotation at the gateway. - -#### Export Center scope guardrails - -- **Viewer vs operator** – `export.viewer` grants read-only access to export profiles, manifests, and bundles. Automation that schedules or reruns exports should request `export.operator` (and typically `export.viewer`). Tenant hints remain mandatory; Authority refuses tokens without them. -- **Administrative mutations** – Changes to retention policies, encryption key references, or schedule defaults require `export.admin`. When requesting tokens with this scope, clients must supply `export_reason` and `export_ticket` parameters; Authority persists the values for audit records and rejects missing metadata with `invalid_request`. -- **Operational hygiene** – Rotate `export.admin` credentials infrequently and run them through fresh-auth workflows where possible. Prefer distributing verification tooling with `export.viewer` tokens for day-to-day bundle validation. - -#### Vuln Explorer permalinks - -- **Scope** – `vuln:read` authorises Vuln Explorer to fetch advisory/linkset evidence and issue shareable links. Assign it only to front-end/API clients that must render vulnerability details. -- **Signed links** – `POST /permalinks/vuln` (requires `vuln:read`) accepts `{ "tenant": "tenant-a", "resourceKind": "vulnerability", "state": { ... }, "expiresInSeconds": 86400 }` and returns a JWT (`token`) plus `issuedAt`/`expiresAt`. The token embeds the tenant, requested state, and `vuln:read` scope and is signed with the same Authority signing keys published via `/jwks`. -- **Validation** – Resource servers verify the permalink using cached JWKS: check signature, ensure the tenant matches the current request context, honour the expiry, and enforce the contained `vuln:read` scope. The payload’s `resource.state` block is opaque JSON so UIs can round-trip filters/search terms without new schema changes. - -## 4. Revocation Pipeline -Authority centralises revocation in `authority_revocations` with deterministic categories: - -| Category | Meaning | Required fields | -| --- | --- | --- | -| `token` | Specific OAuth token revoked early. | `revocationId` (token id), `tokenType`, optional `clientId`, `subjectId` | -| `subject` | All tokens for a subject disabled. | `revocationId` (= subject id) | -| `client` | OAuth client registration revoked. | `revocationId` (= client id) | -| `key` | Signing/JWE key withdrawn. | `revocationId` (= key id) | - -`RevocationBundleBuilder` flattens Mongo documents into canonical JSON, sorts entries by (`category`, `revocationId`, `revokedAt`), and signs exports using detached JWS (RFC 7797) with cosign-compatible headers. - -**Export surfaces** (deterministic output, suitable for Offline Kit): - -- CLI: `stella auth revoke export --output ./out` writes `revocation-bundle.json`, `.jws`, `.sha256`. -- Verification: `stella auth revoke verify --bundle --signature --key ` validates detached JWS signatures before distribution, selecting the crypto provider advertised in the detached header (see `docs/security/revocation-bundle.md`). -- API: `GET /internal/revocations/export` (requires bootstrap API key) returns the same payload. -- Verification: `stella auth revoke verify` validates schema, digest, and detached JWS using cached JWKS or offline keys, automatically preferring the hinted provider (libsodium builds honour `provider=libsodium`; other builds fall back to the managed provider). - -**Consumer guidance:** - -1. Mirror `revocation-bundle.json*` alongside Concelier exports. Offline agents fetch both over the existing update channel. -2. Use bundle `sequence` and `bundleId` to detect replay or monotonicity regressions. Ignore bundles with older sequence numbers unless `bundleId` changes and `issuedAt` advances. -3. Treat `revokedReason` taxonomy as machine-friendly codes (`compromised`, `rotation`, `policy`, `lifecycle`). Translating to human-readable logs is the consumer’s responsibility. - -## 5. Signing Keys & JWKS Rotation -Authority signs revocation bundles and publishes JWKS entries via the new signing manager: - -- **Configuration (`authority.yaml`):** - ```yaml - signing: - enabled: true - algorithm: ES256 # Defaults to ES256 - keySource: file # Loader identifier (file, vault, etc.) - provider: default # Optional preferred crypto provider - activeKeyId: authority-signing-dev - keyPath: "../certificates/authority-signing-dev.pem" - additionalKeys: - - keyId: authority-signing-dev-2024 - path: "../certificates/authority-signing-dev-2024.pem" - source: "file" - ``` -- **Sources:** The default loader supports PEM files relative to the content root; additional loaders can be registered via `IAuthoritySigningKeySource`. -- **Providers:** Keys are registered against the `ICryptoProviderRegistry`, so alternative implementations (HSM, libsodium) can be plugged in without changing host code. -- **OpenAPI discovery:** `GET /.well-known/openapi` returns the published authentication contract (JSON by default, YAML when requested). Responses include `X-StellaOps-Service`, `X-StellaOps-Api-Version`, `X-StellaOps-Build-Version`, plus grant and scope headers, and honour conditional requests via `ETag`/`If-None-Match`. -- **JWKS output:** `GET /jwks` lists every signing key with `status` metadata (`active`, `retired`). Old keys remain until operators remove them from configuration, allowing verification of historical bundles/tokens. - -### Rotation SOP (no downtime) -1. Generate a new P-256 private key (PEM) on an offline workstation and place it where the Authority host can read it (e.g., `../certificates/authority-signing-2025.pem`). -2. Call the authenticated admin API: - ```bash - curl -sS -X POST https://authority.example.com/internal/signing/rotate \ - -H "x-stellaops-bootstrap-key: ${BOOTSTRAP_KEY}" \ - -H "Content-Type: application/json" \ - -d '{ - "keyId": "authority-signing-2025", - "location": "../certificates/authority-signing-2025.pem", - "source": "file" - }' - ``` -3. Verify the response reports the previous key as retired and fetch `/jwks` to confirm the new `kid` appears with `status: "active"`. -4. Persist the old key path in `signing.additionalKeys` (the rotation API updates in-memory options; rewrite the YAML to match so restarts remain consistent). -5. If you prefer automation, trigger the `.gitea/workflows/authority-key-rotation.yml` workflow with the new `keyId`/`keyPath`; it wraps `ops/authority/key-rotation.sh` and reads environment-specific secrets. The older key will be marked `retired` and appended to `signing.additionalKeys`. -6. Re-run `stella auth revoke export` so revocation bundles are signed with the new key. Downstream caches should refresh JWKS within their configured lifetime (`StellaOpsAuthorityOptions.Signing` + client cache tolerance). - -The rotation API leverages the same cryptography abstractions as revocation signing; no restart is required and the previous key is marked `retired` but kept available for verification. - -## 6. Bootstrap & Administrative Endpoints -Administrative APIs live under `/internal/*` and require the bootstrap API key plus rate-limiter compliance. - -| Endpoint | Method | Description | -| --- | --- | --- | -| `/internal/users` | `POST` | Provision initial administrative accounts through the registered password-capable plug-in. Emits structured audit events. | -| `/internal/clients` | `POST` | Provision OAuth clients (client credentials / device code). | -| `/internal/revocations/export` | `GET` | Export revocation bundle + detached JWS + digest. | -| `/internal/signing/rotate` | `POST` | Promote a new signing key (see SOP above). Request body accepts `keyId`, `location`, optional `source`, `algorithm`, `provider`, and metadata. | - -All administrative calls emit `AuthEventRecord` entries enriched with correlation IDs, PII tags, and network metadata for offline SOC ingestion. - -> **Tenant hint:** include a `tenant` entry inside `properties` when bootstrapping clients. Authority normalises the value, stores it on the registration, and stamps future tokens/audit events with the tenant. - -### Bootstrap client example - -```jsonc -POST /internal/clients -{ - "clientId": "concelier", - "confidential": true, - "displayName": "Concelier Backend", - "allowedGrantTypes": ["client_credentials"], - "allowedScopes": ["concelier.jobs.trigger", "advisory:ingest", "advisory:read"], - "properties": { - "tenant": "tenant-default" - } -} -``` - -For environments with multiple tenants, repeat the call per tenant-specific client (e.g. `concelier-tenant-a`, `concelier-tenant-b`) or append suffixes to the client identifier. - -### Aggregation-only verification tokens - -- Issue a dedicated client (e.g. `aoc-verifier`) with the scopes `aoc:verify`, `advisory:read`, and `vex:read` for each tenant that runs guard checks. Authority refuses to mint tokens for these scopes unless the client registration provides a tenant hint. -- The CLI (`stella aoc verify --tenant `) and Console verification panel both call `/aoc/verify` on Concelier and Excititor. Tokens that omit the tenant claim or present a tenant that does not match the stored registration are rejected with `invalid_client`/`invalid_token`. -- Audit: `authority.client_credentials.grant` entries record `scope.invalid="aoc:verify"` when requests are rejected because the tenant hint is missing or mismatched. - -### Exception approvals & routing - -- New scopes `exceptions:read`, `exceptions:write`, and `exceptions:approve` govern access to the exception lifecycle. Map these via tenant roles (`exceptions-service`, `exceptions-approver`) as described in `/docs/security/authority-scopes.md`. -- Configure approval routing in `authority.yaml` with declarative templates. Each template exposes an `authorityRouteId` for downstream services (Policy Engine, Console) and an optional `requireMfa` flag: - -```yaml -exceptions: - routingTemplates: - - id: "secops" - authorityRouteId: "approvals/secops" - requireMfa: true - description: "Security Operations approval chain" - - id: "governance" - authorityRouteId: "approvals/governance" - requireMfa: false - description: "Non-production waiver review" -``` - -- Clients requesting exception scopes must include a tenant assignment. Authority rejects client-credential flows that request `exceptions:*` with `invalid_client` and logs `scope.invalid="exceptions:write"` (or the requested scope) in `authority.client_credentials.grant` audit events when the tenant hint is missing. -- When any configured routing template sets `requireMfa: true`, user-facing tokens that contain `exceptions:approve` must be acquired through an MFA-capable identity provider. Password/OIDC flows that lack MFA support are rejected with `authority.password.grant` audit events where `reason="Exception approval scope requires an MFA-capable identity provider."` -- Update interactive clients (Console) to request `exceptions:read` by default and elevate to `exceptions:approve` only inside fresh-auth workflows for approvers. Documented examples live in `etc/authority.yaml.sample`. -- Verification responses map guard failures to `ERR_AOC_00x` codes and Authority emits `authority.client_credentials.grant` + `authority.token.validate_access` audit records containing the tenant and scopes so operators can trace who executed a run. -- For air-gapped or offline replicas, pre-issue verification tokens per tenant and rotate them alongside ingest credentials; the guard endpoints never mutate data and remain safe to expose through the offline kit schedule. - -## 7. Configuration Reference - -| Section | Key | Description | Notes | -| --- | --- | --- | --- | -| Root | `issuer` | Absolute HTTPS issuer advertised to clients. | Required. Loopback HTTP allowed only for development. | -| Tokens | `accessTokenLifetime`, `refreshTokenLifetime`, etc. | Lifetimes for each grant (access, refresh, device, authorization code, identity). | Enforced during issuance; persisted on each token document. | -| Storage | `storage.connectionString` | MongoDB connection string. | Required even for tests; offline kits ship snapshots for seeding. | -| Signing | `signing.enabled` | Enable JWKS/revocation signing. | Disable only for development. | -| Signing | `signing.algorithm` | Signing algorithm identifier. | Currently ES256; additional curves can be wired through crypto providers. | -| Signing | `signing.keySource` | Loader identifier (`file`, `vault`, custom). | Determines which `IAuthoritySigningKeySource` resolves keys. | -| Signing | `signing.keyPath` | Relative/absolute path understood by the loader. | Stored as-is; rotation request should keep it in sync with filesystem layout. | -| Signing | `signing.activeKeyId` | Active JWKS / revocation signing key id. | Exposed as `kid` in JWKS and bundles. | -| Signing | `signing.additionalKeys[].keyId` | Retired key identifier retained for verification. | Manager updates this automatically after rotation; keep YAML aligned. | -| Signing | `signing.additionalKeys[].source` | Loader identifier per retired key. | Defaults to `signing.keySource` if omitted. | -| Security | `security.rateLimiting` | Fixed-window limits for `/token`, `/authorize`, `/internal/*`. | See `docs/security/rate-limits.md` for tuning. | -| Bootstrap | `bootstrap.apiKey` | Shared secret required for `/internal/*`. | Only required when `bootstrap.enabled` is true. | - -### 7.1 Sender-constrained clients (DPoP & mTLS) - -Authority now understands two flavours of sender-constrained OAuth clients: - -- **DPoP proof-of-possession** – clients sign a `DPoP` header for `/token` requests. Authority validates the JWK thumbprint, HTTP method/URI, and replay window, then stamps the resulting access token with `cnf.jkt` so downstream services can verify the same key is reused. - - Configure under `security.senderConstraints.dpop`. `allowedAlgorithms`, `proofLifetime`, and `replayWindow` are enforced at validation time. - - `security.senderConstraints.dpop.nonce.enabled` enables nonce challenges for high-value audiences (`requiredAudiences`, normalised to case-insensitive strings). When a nonce is required but missing or expired, `/token` replies with `WWW-Authenticate: DPoP error="use_dpop_nonce"` (and, when available, a fresh `DPoP-Nonce` header). Clients must retry with the issued nonce embedded in the proof. - - `security.senderConstraints.dpop.nonce.store` selects `memory` (default) or `redis`. When `redis` is configured, set `security.senderConstraints.dpop.nonce.redisConnectionString` so replicas share nonce issuance and high-value clients avoid replay gaps during failover. - - Example (enabling Redis-backed nonces; adjust audiences per deployment): - ```yaml - security: - senderConstraints: - dpop: - enabled: true - proofLifetime: "00:02:00" - replayWindow: "00:05:00" - allowedAlgorithms: [ "ES256", "ES384" ] - nonce: - enabled: true - ttl: "00:10:00" - maxIssuancePerMinute: 120 - store: "redis" - redisConnectionString: "redis://authority-redis:6379?ssl=false" - requiredAudiences: - - "signer" - - "attestor" - ``` - Operators can override any field via environment variables (e.g. `STELLAOPS_AUTHORITY__SECURITY__SENDERCONSTRAINTS__DPOP__NONCE__STORE=redis`). - - Declare client `audiences` in bootstrap manifests or plug-in provisioning metadata; Authority now defaults the token `aud` claim and `resource` indicator from this list, which is also used to trigger nonce enforcement for audiences such as `signer` and `attestor`. -- **Mutual TLS clients** – client registrations may declare an mTLS binding (`senderConstraint: mtls`). When enabled via `security.senderConstraints.mtls`, Authority validates the presented client certificate against stored bindings (`certificateBindings[]`), optional chain verification, and timing windows. Successful requests embed `cnf.x5t#S256` into the access token (and introspection output) so resource servers can enforce the certificate thumbprint. - - `security.senderConstraints.mtls.enforceForAudiences` forces mTLS whenever the requested `aud`/`resource` (or the client's configured audiences) intersect the configured allow-list (default includes `signer`). Clients configured for different sender constraints are rejected early so operator policy remains consistent. - - Certificate bindings now act as an allow-list: Authority verifies thumbprint, subject, issuer, serial number, and any declared SAN values against the presented certificate, with rotation grace windows applied to `notBefore/notAfter`. Operators can enforce subject regexes, SAN type allow-lists (`dns`, `uri`, `ip`), trusted certificate authorities, and rotation grace via `security.senderConstraints.mtls.*`. - -Both modes persist additional metadata in `authority_tokens`: `senderConstraint` records the enforced policy, while `senderKeyThumbprint` stores the DPoP JWK thumbprint or mTLS certificate hash captured at issuance. Downstream services can rely on these fields (and the corresponding `cnf` claim) when auditing offline copies of the token store. - -### 7.2 Policy Engine clients & scopes - -Policy Engine v2 introduces dedicated scopes and a service identity that materialises effective findings. Configure Authority as follows when provisioning policy clients: - -| Client | Scopes | Notes | -| --- | --- | --- | -| `policy-engine` (service) | `policy:run`, `findings:read`, `effective:write` | Must include `properties.serviceIdentity: policy-engine` and a tenant. Authority rejects `effective:write` tokens without the marker or tenant. | -| `policy-cli` / automation | `policy:read`, `policy:author`, `policy:review`, `policy:simulate`, `findings:read` *(optionally add `policy:approve` / `policy:operate` / `policy:activate` for promotion pipelines)* | Keep scopes minimal; reroll CLI/CI tokens issued before 2025‑10‑27 so they drop legacy scope names and adopt the new set. | -| UI/editor sessions | `policy:read`, `policy:author`, `policy:simulate` (+ reviewer/approver/operator scopes as appropriate) | Issue tenant-specific clients so audit and rate limits remain scoped. | - -Sample YAML entry: - -```yaml - - clientId: "policy-engine" - displayName: "Policy Engine Service" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "policy:run", "findings:read", "effective:write" ] - tenant: "tenant-default" - properties: - serviceIdentity: "policy-engine" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/policy-engine.secret" -``` - -Compliance checklist: - -- [ ] `policy-engine` client includes `properties.serviceIdentity: policy-engine` and a tenant hint; logins missing either are rejected. -- [ ] Non-service clients omit `effective:write` and receive only the scopes required for their role (`policy:read`, `policy:author`, `policy:review`, `policy:approve`, `policy:operate`, `policy:simulate`, etc.). -- [ ] Legacy tokens using `policy:write`/`policy:submit`/`policy:edit` are rotated to the new scope set before Production change freeze (see release migration note below). -- [ ] Approval/activation workflows use identities distinct from authoring identities; tenants are provisioned per client to keep telemetry segregated. -- [ ] Operators document reviewer assignments and incident procedures alongside `/docs/security/policy-governance.md` and archive policy evidence bundles (`stella policy bundle export`) with each release. - -### 7.3 Orchestrator roles & scopes - -| Role / Client | Scopes | Notes | -| --- | --- | --- | -| `Orch.Viewer` role | `orch:read` | Read-only access to Orchestrator dashboards, queues, and telemetry. | -| `Orch.Operator` role | `orch:read`, `orch:operate` | Issue short-lived tokens for control actions (pause/resume, retry, sync). Token requests **must** include `operator_reason` (≤256 chars) and `operator_ticket` (≤128 chars); Authority rejects requests missing either value and records both in audit events. | - -Token request example via client credentials: - -```bash -curl -u orch-operator:s3cr3t! \ - -d 'grant_type=client_credentials' \ - -d 'scope=orch:operate' \ - -d 'operator_reason=resume source after maintenance' \ - -d 'operator_ticket=INC-2045' \ - https://authority.example.com/token -``` - -Tokens lacking `operator_reason` or `operator_ticket` receive `invalid_request`; audit events (`authority.client_credentials.grant`) surface the supplied values under `request.reason` and `request.ticket` for downstream review. -CLI clients set these parameters via `Authority.OperatorReason` / `Authority.OperatorTicket` (environment variables `STELLAOPS_ORCH_REASON` and `STELLAOPS_ORCH_TICKET`). - -## 8. Offline & Sovereign Operation -- **No outbound dependencies:** Authority only contacts MongoDB and local plugins. Discovery and JWKS are cached by clients with offline tolerances (`AllowOfflineCacheFallback`, `OfflineCacheTolerance`). Operators should mirror these responses for air-gapped use. -- **Structured logging:** Every revocation export, signing rotation, bootstrap action, and token issuance emits structured logs with `traceId`, `client_id`, `subjectId`, and `network.remoteIp` where applicable. Mirror logs to your SIEM to retain audit trails without central connectivity. -- **Determinism:** Sorting rules in token and revocation exports guarantee byte-for-byte identical artefacts given the same datastore state. Hashes and signatures remain stable across machines. - -## 9. Operational Checklist -- [ ] Protect the bootstrap API key and disable bootstrap endpoints (`bootstrap.enabled: false`) once initial setup is complete. -- [ ] Schedule `stella auth revoke export` (or `/internal/revocations/export`) at the same cadence as Concelier exports so bundles remain in lockstep. -- [ ] Rotate signing keys before expiration; keep at least one retired key until all cached bundles/tokens signed with it have expired. -- [ ] Monitor `/health` and `/ready` plus rate-limiter metrics to detect plugin outages early. -- [ ] Ensure downstream services cache JWKS and revocation bundles within tolerances; stale caches risk accepting revoked tokens. - -For plug-in specific requirements, refer to **[Authority Plug-in Developer Guide](dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md)**. For revocation bundle validation workflow, see **[Authority Revocation Bundle](security/revocation-bundle.md)**. +# StellaOps Authority Service + +> **Status:** Drafted 2025-10-12 (CORE5B.DOC / DOC1.AUTH) – aligns with Authority revocation store, JWKS rotation, and bootstrap endpoints delivered in Sprint 1. + +## 1. Purpose +The **StellaOps Authority** service issues OAuth2/OIDC tokens for every StellaOps module (Concelier, Backend, Agent, Zastava) and exposes the policy controls required in sovereign/offline environments. Authority is built as a minimal ASP.NET host that: + +- brokers password, client-credentials, and device-code flows through pluggable identity providers; +- persists access/refresh/device tokens in MongoDB with deterministic schemas for replay analysis and air-gapped audit copies; +- distributes revocation bundles and JWKS material so downstream services can enforce lockouts without direct database access; +- offers bootstrap APIs for first-run provisioning and key rotation without redeploying binaries. + +Authority is deployed alongside Concelier in air-gapped environments and never requires outbound internet access. All trusted metadata (OpenIddict discovery, JWKS, revocation bundles) is cacheable, signed, and reproducible. + +## 2. Component Architecture +Authority is composed of five cooperating subsystems: + +1. **Minimal API host** – configures OpenIddict endpoints (`/token`, `/authorize`, `/revoke`, `/jwks`), publishes the OpenAPI contract at `/.well-known/openapi`, and enables structured logging/telemetry. Rate limiting hooks (`AuthorityRateLimiter`) wrap every request. +2. **Plugin host** – loads `StellaOps.Authority.Plugin.*.dll` assemblies, applies capability metadata, and exposes password/client provisioning surfaces through dependency injection. +3. **Mongo storage** – persists tokens, revocations, bootstrap invites, and plugin state in deterministic collections indexed for offline sync (`authority_tokens`, `authority_revocations`, etc.). +4. **Cryptography layer** – `StellaOps.Cryptography` abstractions manage password hashing, signing keys, JWKS export, and detached JWS generation. +5. **Offline ops APIs** – internal endpoints under `/internal/*` provide administrative flows (bootstrap users/clients, revocation export) guarded by API keys and deterministic audit events. + +A high-level sequence for password logins: + +``` +Client -> /token (password grant) + -> Rate limiter & audit hooks + -> Plugin credential store (Argon2id verification) + -> Token persistence (Mongo authority_tokens) + -> Response (access/refresh tokens + deterministic claims) +``` + +## 3. Token Lifecycle & Persistence +Authority persists every issued token in MongoDB so operators can audit or revoke without scanning distributed caches. + +- **Collection:** `authority_tokens` +- **Key fields:** +- `tokenId`, `type` (`access_token`, `refresh_token`, `device_code`, `authorization_code`) +- `subjectId`, `clientId`, ordered `scope` array +- `tenant` (lower-cased tenant hint from the issuing client, omitted for global clients) + +### Console OIDC client + +- **Client ID**: `console-web` +- **Grants**: `authorization_code` (PKCE required), `refresh_token` +- **Audience**: `console` +- **Scopes**: `openid`, `profile`, `email`, `advisory:read`, `vex:read`, `aoc:verify`, `findings:read`, `orch:read`, `vuln:read` +- **Redirect URIs** (defaults): `https://console.stella-ops.local/oidc/callback` +- **Post-logout redirect**: `https://console.stella-ops.local/` +- **Tokens**: Access tokens inherit the global 2 minute lifetime; refresh tokens remain short-lived (30 days) and can be exchanged silently via `/token`. +- **Roles**: Assign Authority role `Orch.Viewer` (exposed to tenants as `role/orch-viewer`) when operators need read-only access to Orchestrator telemetry via Console dashboards. Policy Studio ships dedicated roles (`role/policy-author`, `role/policy-reviewer`, `role/policy-approver`, `role/policy-operator`, `role/policy-auditor`) that align with the new `policy:*` scope family; issue them per tenant so audit trails remain scoped. + +Configuration sample (`etc/authority.yaml.sample`) seeds the client with a confidential secret so Console can negotiate the code exchange on the backend while browsers execute the PKCE dance. + +### Console Authority endpoints + +- `/console/tenants` — Requires `authority:tenants.read`; returns the tenant catalogue for the authenticated principal. Requests lacking the `X-Stella-Tenant` header are rejected (`tenant_header_missing`) and logged. +- `/console/profile` — Requires `ui.read`; exposes subject metadata (roles, scopes, audiences) and indicates whether the session is within the five-minute fresh-auth window. +- `/console/token/introspect` — Requires `ui.read`; introspects the active access token so the SPA can prompt for re-authentication before privileged actions. + +All endpoints demand DPoP-bound tokens and propagate structured audit events (`authority.console.*`). Gateways must forward the `X-Stella-Tenant` header derived from the access token; downstream services rely on the same value for isolation. Keep Console access tokens short-lived (default 15 minutes) and enforce the fresh-auth window for admin actions (`ui.admin`, `authority:*`, `policy:activate`, `exceptions:approve`). +- `status` (`valid`, `revoked`, `expired`), `createdAt`, optional `expiresAt` +- `revokedAt`, machine-readable `revokedReason`, optional `revokedReasonDescription` +- `revokedMetadata` (string dictionary for plugin-specific context) +- **Persistence flow:** `PersistTokensHandler` stamps missing JWT IDs, normalises scopes, and stores every principal emitted by OpenIddict. +- **Revocation flow:** `AuthorityTokenStore.UpdateStatusAsync` flips status, records the reason metadata, and is invoked by token revocation handlers and plugin provisioning events (e.g., disabling a user). +- **Expiry maintenance:** `AuthorityTokenStore.DeleteExpiredAsync` prunes non-revoked tokens past their `expiresAt` timestamp. Operators should schedule this in maintenance windows if large volumes of tokens are issued. + +### Expectations for resource servers +Resource servers (Concelier WebService, Backend, Agent) **must not** assume in-memory caches are authoritative. They should: + +- cache `/jwks` and `/revocations/export` responses within configured lifetimes; +- honour `revokedReason` metadata when shaping audit trails; +- treat `status != "valid"` or missing tokens as immediate denial conditions. +- propagate the `tenant` claim (`X-Stella-Tenant` header in REST calls) and reject requests when the tenant supplied by Authority does not match the resource server's scope; Concelier and Excititor guard endpoints refuse cross-tenant tokens. + +### Tenant propagation + +- Client provisioning (bootstrap or plug-in) accepts a `tenant` hint. Authority normalises the value (`trim().ToLowerInvariant()`) and persists it alongside the registration. Clients without an explicit tenant remain global. +- Issued principals include the `stellaops:tenant` claim. `PersistTokensHandler` mirrors this claim into `authority_tokens.tenant`, enabling per-tenant revocation and reporting. +- Rate limiter metadata now tags requests with `authority.tenant`, unlocking per-tenant throughput metrics and diagnostic filters. Audit events (`authority.client_credentials.grant`, `authority.password.grant`, bootstrap flows) surface the tenant and login attempt documents index on `{tenant, occurredAt}` for quick queries. +- Client credentials that request `advisory:ingest`, `advisory:read`, `vex:ingest`, `vex:read`, `signals:read`, `signals:write`, `signals:admin`, or `aoc:verify` now fail fast when the client registration lacks a tenant hint. Issued tokens are re-validated against persisted tenant metadata, and Authority rejects any cross-tenant replay (`invalid_client`/`invalid_token`), ensuring aggregation-only workloads remain tenant-scoped. +- Client credentials that request `export.viewer`, `export.operator`, or `export.admin` must provide a tenant hint. Requests for `export.admin` also need accompanying `export_reason` and `export_ticket` parameters; Authority returns `invalid_request` when either value is missing and records the denial in token audit events. +- Policy Studio scopes (`policy:author`, `policy:review`, `policy:approve`, `policy:operate`, `policy:audit`, `policy:simulate`, `policy:run`, `policy:activate`) require a tenant assignment; Authority rejects tokens missing the hint with `invalid_client` and records `scope.invalid` metadata for auditing. +- **AOC pairing guardrails** – Tokens that request `advisory:read`, `vex:read`, or any `signals:*` scope must also request `aoc:verify`. Authority rejects mismatches with `invalid_scope` (`Scope 'aoc:verify' is required when requesting advisory/vex read scopes.` or `Scope 'aoc:verify' is required when requesting signals scopes.`) so automation surfaces deterministic errors. +- **Signals ingestion guardrails** – Sensors and services requesting `signals:write`/`signals:admin` must also request `aoc:verify`; Authority records the `authority.aoc_scope_violation` tag when the pairing is missing so operators can trace failing sensors immediately. +- Password grant flows reuse the client registration's tenant and enforce the configured scope allow-list. Requested scopes outside that list (or mismatched tenants) trigger `invalid_scope`/`invalid_client` failures, ensuring cross-tenant access is denied before token issuance. + +### Default service scopes + +| Client ID | Purpose | Scopes granted | Sender constraint | Tenant | +|----------------------|---------------------------------------|--------------------------------------|-------------------|-----------------| +| `concelier-ingest` | Concelier raw advisory ingestion | `advisory:ingest`, `advisory:read` | `dpop` | `tenant-default` | +| `excitor-ingest` | Excititor raw VEX ingestion | `vex:ingest`, `vex:read` | `dpop` | `tenant-default` | +| `aoc-verifier` | Aggregation-only contract verification | `aoc:verify`, `advisory:read`, `vex:read` | `dpop` | `tenant-default` | +| `cartographer-service` | Graph snapshot construction | `graph:write`, `graph:read` | `dpop` | `tenant-default` | +| `graph-api` | Graph Explorer gateway/API | `graph:read`, `graph:export`, `graph:simulate` | `dpop` | `tenant-default` | +| `export-center-operator` | Export Center operator automation | `export.viewer`, `export.operator` | `dpop` | `tenant-default` | +| `export-center-admin` | Export Center administrative automation | `export.viewer`, `export.operator`, `export.admin` | `dpop` | `tenant-default` | +| `vuln-explorer-ui` | Vuln Explorer UI/API | `vuln:read` | `dpop` | `tenant-default` | +| `signals-uploader` | Reachability sensor ingestion | `signals:write`, `signals:read`, `aoc:verify` | `dpop` | `tenant-default` | + +> **Secret hygiene (2025‑10‑27):** The repository includes a convenience `etc/authority.yaml` for compose/helm smoke tests. Every entry’s `secretFile` points to `etc/secrets/*.secret`, which ship with `*-change-me` placeholders—replace them with strong values (and wire them through your vault/secret manager) before issuing tokens in CI, staging, or production. + +For factory provisioning, issue sensors the **SignalsUploader** role template (`signals:write`, `signals:read`, `aoc:verify`). Authority rejects ingestion tokens that omit `aoc:verify`, preserving aggregation-only contract guarantees for reachability signals. + +These registrations are provided as examples in `etc/authority.yaml.sample`. Clone them per tenant (for example `concelier-tenant-a`, `concelier-tenant-b`) so tokens remain tenant-scoped by construction. + +Graph Explorer introduces dedicated scopes: `graph:write` for Cartographer build jobs, `graph:read` for query/read operations, `graph:export` for long-running export downloads, and `graph:simulate` for what-if overlays. Assign only the scopes a client actually needs to preserve least privilege—UI-facing clients should typically request read/export access, while background services (Cartographer, Scheduler) require write privileges. + +#### Least-privilege guidance for graph clients + +- **Service identities** – The Cartographer worker should request `graph:write` and `graph:read` only; grant `graph:simulate` exclusively to pipeline automation that invokes Policy Engine overlays on demand. Keep `graph:export` scoped to API gateway components responsible for streaming GraphML/JSONL artifacts. Authority enforces this by rejecting `graph:write` tokens that lack `properties.serviceIdentity: cartographer`. +- **Tenant propagation** – Every client registration must pin a `tenant` hint. Authority normalises the value and stamps it into issued tokens (`stellaops:tenant`) so downstream services (Scheduler, Graph API, Console) can enforce tenant isolation without custom headers. Graph scopes (`graph:read`, `graph:write`, `graph:export`, `graph:simulate`) are denied if the tenant hint is missing. +- **SDK alignment** – Use the generated `StellaOpsScopes` constants in service code to request graph scopes. Hard-coded strings risk falling out of sync as additional graph capabilities are added. +- **DPOP for automation** – Maintain sender-constrained (`dpop`) flows for Cartographer and Scheduler to limit reuse of access tokens if a build host is compromised. For UI-facing tokens, pair `graph:read`/`graph:export` with short lifetimes and enforce refresh-token rotation at the gateway. + +#### Export Center scope guardrails + +- **Viewer vs operator** – `export.viewer` grants read-only access to export profiles, manifests, and bundles. Automation that schedules or reruns exports should request `export.operator` (and typically `export.viewer`). Tenant hints remain mandatory; Authority refuses tokens without them. +- **Administrative mutations** – Changes to retention policies, encryption key references, or schedule defaults require `export.admin`. When requesting tokens with this scope, clients must supply `export_reason` and `export_ticket` parameters; Authority persists the values for audit records and rejects missing metadata with `invalid_request`. +- **Operational hygiene** – Rotate `export.admin` credentials infrequently and run them through fresh-auth workflows where possible. Prefer distributing verification tooling with `export.viewer` tokens for day-to-day bundle validation. + +#### Vuln Explorer permalinks + +- **Scope** – `vuln:read` authorises Vuln Explorer to fetch advisory/linkset evidence and issue shareable links. Assign it only to front-end/API clients that must render vulnerability details. +- **Signed links** – `POST /permalinks/vuln` (requires `vuln:read`) accepts `{ "tenant": "tenant-a", "resourceKind": "vulnerability", "state": { ... }, "expiresInSeconds": 86400 }` and returns a JWT (`token`) plus `issuedAt`/`expiresAt`. The token embeds the tenant, requested state, and `vuln:read` scope and is signed with the same Authority signing keys published via `/jwks`. +- **Validation** – Resource servers verify the permalink using cached JWKS: check signature, ensure the tenant matches the current request context, honour the expiry, and enforce the contained `vuln:read` scope. The payload’s `resource.state` block is opaque JSON so UIs can round-trip filters/search terms without new schema changes. + +## 4. Revocation Pipeline +Authority centralises revocation in `authority_revocations` with deterministic categories: + +| Category | Meaning | Required fields | +| --- | --- | --- | +| `token` | Specific OAuth token revoked early. | `revocationId` (token id), `tokenType`, optional `clientId`, `subjectId` | +| `subject` | All tokens for a subject disabled. | `revocationId` (= subject id) | +| `client` | OAuth client registration revoked. | `revocationId` (= client id) | +| `key` | Signing/JWE key withdrawn. | `revocationId` (= key id) | + +`RevocationBundleBuilder` flattens Mongo documents into canonical JSON, sorts entries by (`category`, `revocationId`, `revokedAt`), and signs exports using detached JWS (RFC 7797) with cosign-compatible headers. + +**Export surfaces** (deterministic output, suitable for Offline Kit): + +- CLI: `stella auth revoke export --output ./out` writes `revocation-bundle.json`, `.jws`, `.sha256`. +- Verification: `stella auth revoke verify --bundle --signature --key ` validates detached JWS signatures before distribution, selecting the crypto provider advertised in the detached header (see `docs/security/revocation-bundle.md`). +- API: `GET /internal/revocations/export` (requires bootstrap API key) returns the same payload. +- Verification: `stella auth revoke verify` validates schema, digest, and detached JWS using cached JWKS or offline keys, automatically preferring the hinted provider (libsodium builds honour `provider=libsodium`; other builds fall back to the managed provider). + +**Consumer guidance:** + +1. Mirror `revocation-bundle.json*` alongside Concelier exports. Offline agents fetch both over the existing update channel. +2. Use bundle `sequence` and `bundleId` to detect replay or monotonicity regressions. Ignore bundles with older sequence numbers unless `bundleId` changes and `issuedAt` advances. +3. Treat `revokedReason` taxonomy as machine-friendly codes (`compromised`, `rotation`, `policy`, `lifecycle`). Translating to human-readable logs is the consumer’s responsibility. + +## 5. Signing Keys & JWKS Rotation +Authority signs revocation bundles and publishes JWKS entries via the new signing manager: + +- **Configuration (`authority.yaml`):** + ```yaml + signing: + enabled: true + algorithm: ES256 # Defaults to ES256 + keySource: file # Loader identifier (file, vault, etc.) + provider: default # Optional preferred crypto provider + activeKeyId: authority-signing-dev + keyPath: "../certificates/authority-signing-dev.pem" + additionalKeys: + - keyId: authority-signing-dev-2024 + path: "../certificates/authority-signing-dev-2024.pem" + source: "file" + ``` +- **Sources:** The default loader supports PEM files relative to the content root; additional loaders can be registered via `IAuthoritySigningKeySource`. +- **Providers:** Keys are registered against the `ICryptoProviderRegistry`, so alternative implementations (HSM, libsodium) can be plugged in without changing host code. +- **OpenAPI discovery:** `GET /.well-known/openapi` returns the published authentication contract (JSON by default, YAML when requested). Responses include `X-StellaOps-Service`, `X-StellaOps-Api-Version`, `X-StellaOps-Build-Version`, plus grant and scope headers, and honour conditional requests via `ETag`/`If-None-Match`. +- **JWKS output:** `GET /jwks` lists every signing key with `status` metadata (`active`, `retired`). Old keys remain until operators remove them from configuration, allowing verification of historical bundles/tokens. + +### Rotation SOP (no downtime) +1. Generate a new P-256 private key (PEM) on an offline workstation and place it where the Authority host can read it (e.g., `../certificates/authority-signing-2025.pem`). +2. Call the authenticated admin API: + ```bash + curl -sS -X POST https://authority.example.com/internal/signing/rotate \ + -H "x-stellaops-bootstrap-key: ${BOOTSTRAP_KEY}" \ + -H "Content-Type: application/json" \ + -d '{ + "keyId": "authority-signing-2025", + "location": "../certificates/authority-signing-2025.pem", + "source": "file" + }' + ``` +3. Verify the response reports the previous key as retired and fetch `/jwks` to confirm the new `kid` appears with `status: "active"`. +4. Persist the old key path in `signing.additionalKeys` (the rotation API updates in-memory options; rewrite the YAML to match so restarts remain consistent). +5. If you prefer automation, trigger the `.gitea/workflows/authority-key-rotation.yml` workflow with the new `keyId`/`keyPath`; it wraps `ops/authority/key-rotation.sh` and reads environment-specific secrets. The older key will be marked `retired` and appended to `signing.additionalKeys`. +6. Re-run `stella auth revoke export` so revocation bundles are signed with the new key. Downstream caches should refresh JWKS within their configured lifetime (`StellaOpsAuthorityOptions.Signing` + client cache tolerance). + +The rotation API leverages the same cryptography abstractions as revocation signing; no restart is required and the previous key is marked `retired` but kept available for verification. + +## 6. Bootstrap & Administrative Endpoints +Administrative APIs live under `/internal/*` and require the bootstrap API key plus rate-limiter compliance. + +| Endpoint | Method | Description | +| --- | --- | --- | +| `/internal/users` | `POST` | Provision initial administrative accounts through the registered password-capable plug-in. Emits structured audit events. | +| `/internal/clients` | `POST` | Provision OAuth clients (client credentials / device code). | +| `/internal/revocations/export` | `GET` | Export revocation bundle + detached JWS + digest. | +| `/internal/signing/rotate` | `POST` | Promote a new signing key (see SOP above). Request body accepts `keyId`, `location`, optional `source`, `algorithm`, `provider`, and metadata. | + +All administrative calls emit `AuthEventRecord` entries enriched with correlation IDs, PII tags, and network metadata for offline SOC ingestion. + +> **Tenant hint:** include a `tenant` entry inside `properties` when bootstrapping clients. Authority normalises the value, stores it on the registration, and stamps future tokens/audit events with the tenant. + +### Bootstrap client example + +```jsonc +POST /internal/clients +{ + "clientId": "concelier", + "confidential": true, + "displayName": "Concelier Backend", + "allowedGrantTypes": ["client_credentials"], + "allowedScopes": ["concelier.jobs.trigger", "advisory:ingest", "advisory:read"], + "properties": { + "tenant": "tenant-default" + } +} +``` + +For environments with multiple tenants, repeat the call per tenant-specific client (e.g. `concelier-tenant-a`, `concelier-tenant-b`) or append suffixes to the client identifier. + +### Aggregation-only verification tokens + +- Issue a dedicated client (e.g. `aoc-verifier`) with the scopes `aoc:verify`, `advisory:read`, and `vex:read` for each tenant that runs guard checks. Authority refuses to mint tokens for these scopes unless the client registration provides a tenant hint. +- The CLI (`stella aoc verify --tenant `) and Console verification panel both call `/aoc/verify` on Concelier and Excititor. Tokens that omit the tenant claim or present a tenant that does not match the stored registration are rejected with `invalid_client`/`invalid_token`. +- Audit: `authority.client_credentials.grant` entries record `scope.invalid="aoc:verify"` when requests are rejected because the tenant hint is missing or mismatched. + +### Exception approvals & routing + +- New scopes `exceptions:read`, `exceptions:write`, and `exceptions:approve` govern access to the exception lifecycle. Map these via tenant roles (`exceptions-service`, `exceptions-approver`) as described in `/docs/security/authority-scopes.md`. +- Configure approval routing in `authority.yaml` with declarative templates. Each template exposes an `authorityRouteId` for downstream services (Policy Engine, Console) and an optional `requireMfa` flag: + +```yaml +exceptions: + routingTemplates: + - id: "secops" + authorityRouteId: "approvals/secops" + requireMfa: true + description: "Security Operations approval chain" + - id: "governance" + authorityRouteId: "approvals/governance" + requireMfa: false + description: "Non-production waiver review" +``` + +- Clients requesting exception scopes must include a tenant assignment. Authority rejects client-credential flows that request `exceptions:*` with `invalid_client` and logs `scope.invalid="exceptions:write"` (or the requested scope) in `authority.client_credentials.grant` audit events when the tenant hint is missing. +- When any configured routing template sets `requireMfa: true`, user-facing tokens that contain `exceptions:approve` must be acquired through an MFA-capable identity provider. Password/OIDC flows that lack MFA support are rejected with `authority.password.grant` audit events where `reason="Exception approval scope requires an MFA-capable identity provider."` +- Update interactive clients (Console) to request `exceptions:read` by default and elevate to `exceptions:approve` only inside fresh-auth workflows for approvers. Documented examples live in `etc/authority.yaml.sample`. +- Verification responses map guard failures to `ERR_AOC_00x` codes and Authority emits `authority.client_credentials.grant` + `authority.token.validate_access` audit records containing the tenant and scopes so operators can trace who executed a run. +- For air-gapped or offline replicas, pre-issue verification tokens per tenant and rotate them alongside ingest credentials; the guard endpoints never mutate data and remain safe to expose through the offline kit schedule. + +## 7. Configuration Reference + +| Section | Key | Description | Notes | +| --- | --- | --- | --- | +| Root | `issuer` | Absolute HTTPS issuer advertised to clients. | Required. Loopback HTTP allowed only for development. | +| Tokens | `accessTokenLifetime`, `refreshTokenLifetime`, etc. | Lifetimes for each grant (access, refresh, device, authorization code, identity). | Enforced during issuance; persisted on each token document. | +| Storage | `storage.connectionString` | MongoDB connection string. | Required even for tests; offline kits ship snapshots for seeding. | +| Signing | `signing.enabled` | Enable JWKS/revocation signing. | Disable only for development. | +| Signing | `signing.algorithm` | Signing algorithm identifier. | Currently ES256; additional curves can be wired through crypto providers. | +| Signing | `signing.keySource` | Loader identifier (`file`, `vault`, custom). | Determines which `IAuthoritySigningKeySource` resolves keys. | +| Signing | `signing.keyPath` | Relative/absolute path understood by the loader. | Stored as-is; rotation request should keep it in sync with filesystem layout. | +| Signing | `signing.activeKeyId` | Active JWKS / revocation signing key id. | Exposed as `kid` in JWKS and bundles. | +| Signing | `signing.additionalKeys[].keyId` | Retired key identifier retained for verification. | Manager updates this automatically after rotation; keep YAML aligned. | +| Signing | `signing.additionalKeys[].source` | Loader identifier per retired key. | Defaults to `signing.keySource` if omitted. | +| Security | `security.rateLimiting` | Fixed-window limits for `/token`, `/authorize`, `/internal/*`. | See `docs/security/rate-limits.md` for tuning. | +| Bootstrap | `bootstrap.apiKey` | Shared secret required for `/internal/*`. | Only required when `bootstrap.enabled` is true. | + +### 7.1 Sender-constrained clients (DPoP & mTLS) + +Authority now understands two flavours of sender-constrained OAuth clients: + +- **DPoP proof-of-possession** – clients sign a `DPoP` header for `/token` requests. Authority validates the JWK thumbprint, HTTP method/URI, and replay window, then stamps the resulting access token with `cnf.jkt` so downstream services can verify the same key is reused. + - Configure under `security.senderConstraints.dpop`. `allowedAlgorithms`, `proofLifetime`, and `replayWindow` are enforced at validation time. + - `security.senderConstraints.dpop.nonce.enabled` enables nonce challenges for high-value audiences (`requiredAudiences`, normalised to case-insensitive strings). When a nonce is required but missing or expired, `/token` replies with `WWW-Authenticate: DPoP error="use_dpop_nonce"` (and, when available, a fresh `DPoP-Nonce` header). Clients must retry with the issued nonce embedded in the proof. + - `security.senderConstraints.dpop.nonce.store` selects `memory` (default) or `redis`. When `redis` is configured, set `security.senderConstraints.dpop.nonce.redisConnectionString` so replicas share nonce issuance and high-value clients avoid replay gaps during failover. + - Example (enabling Redis-backed nonces; adjust audiences per deployment): + ```yaml + security: + senderConstraints: + dpop: + enabled: true + proofLifetime: "00:02:00" + replayWindow: "00:05:00" + allowedAlgorithms: [ "ES256", "ES384" ] + nonce: + enabled: true + ttl: "00:10:00" + maxIssuancePerMinute: 120 + store: "redis" + redisConnectionString: "redis://authority-redis:6379?ssl=false" + requiredAudiences: + - "signer" + - "attestor" + ``` + Operators can override any field via environment variables (e.g. `STELLAOPS_AUTHORITY__SECURITY__SENDERCONSTRAINTS__DPOP__NONCE__STORE=redis`). + - Declare client `audiences` in bootstrap manifests or plug-in provisioning metadata; Authority now defaults the token `aud` claim and `resource` indicator from this list, which is also used to trigger nonce enforcement for audiences such as `signer` and `attestor`. +- **Mutual TLS clients** – client registrations may declare an mTLS binding (`senderConstraint: mtls`). When enabled via `security.senderConstraints.mtls`, Authority validates the presented client certificate against stored bindings (`certificateBindings[]`), optional chain verification, and timing windows. Successful requests embed `cnf.x5t#S256` into the access token (and introspection output) so resource servers can enforce the certificate thumbprint. + - `security.senderConstraints.mtls.enforceForAudiences` forces mTLS whenever the requested `aud`/`resource` (or the client's configured audiences) intersect the configured allow-list (default includes `signer`). Clients configured for different sender constraints are rejected early so operator policy remains consistent. + - Certificate bindings now act as an allow-list: Authority verifies thumbprint, subject, issuer, serial number, and any declared SAN values against the presented certificate, with rotation grace windows applied to `notBefore/notAfter`. Operators can enforce subject regexes, SAN type allow-lists (`dns`, `uri`, `ip`), trusted certificate authorities, and rotation grace via `security.senderConstraints.mtls.*`. + +Both modes persist additional metadata in `authority_tokens`: `senderConstraint` records the enforced policy, while `senderKeyThumbprint` stores the DPoP JWK thumbprint or mTLS certificate hash captured at issuance. Downstream services can rely on these fields (and the corresponding `cnf` claim) when auditing offline copies of the token store. + +### 7.2 Policy Engine clients & scopes + +Policy Engine v2 introduces dedicated scopes and a service identity that materialises effective findings. Configure Authority as follows when provisioning policy clients: + +| Client | Scopes | Notes | +| --- | --- | --- | +| `policy-engine` (service) | `policy:run`, `findings:read`, `effective:write` | Must include `properties.serviceIdentity: policy-engine` and a tenant. Authority rejects `effective:write` tokens without the marker or tenant. | +| `policy-cli` / automation | `policy:read`, `policy:author`, `policy:review`, `policy:simulate`, `findings:read` *(optionally add `policy:approve` / `policy:operate` / `policy:activate` for promotion pipelines)* | Keep scopes minimal; reroll CLI/CI tokens issued before 2025‑10‑27 so they drop legacy scope names and adopt the new set. | +| UI/editor sessions | `policy:read`, `policy:author`, `policy:simulate` (+ reviewer/approver/operator scopes as appropriate) | Issue tenant-specific clients so audit and rate limits remain scoped. | + +Sample YAML entry: + +```yaml + - clientId: "policy-engine" + displayName: "Policy Engine Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "policy:run", "findings:read", "effective:write" ] + tenant: "tenant-default" + properties: + serviceIdentity: "policy-engine" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/policy-engine.secret" +``` + +Compliance checklist: + +- [ ] `policy-engine` client includes `properties.serviceIdentity: policy-engine` and a tenant hint; logins missing either are rejected. +- [ ] Non-service clients omit `effective:write` and receive only the scopes required for their role (`policy:read`, `policy:author`, `policy:review`, `policy:approve`, `policy:operate`, `policy:simulate`, etc.). +- [ ] Legacy tokens using `policy:write`/`policy:submit`/`policy:edit` are rotated to the new scope set before Production change freeze (see release migration note below). +- [ ] Approval/activation workflows use identities distinct from authoring identities; tenants are provisioned per client to keep telemetry segregated. +- [ ] Operators document reviewer assignments and incident procedures alongside `/docs/security/policy-governance.md` and archive policy evidence bundles (`stella policy bundle export`) with each release. + +### 7.3 Orchestrator roles & scopes + +| Role / Client | Scopes | Notes | +| --- | --- | --- | +| `Orch.Viewer` role | `orch:read` | Read-only access to Orchestrator dashboards, queues, and telemetry. | +| `Orch.Operator` role | `orch:read`, `orch:operate` | Issue short-lived tokens for control actions (pause/resume, retry, sync). Token requests **must** include `operator_reason` (≤256 chars) and `operator_ticket` (≤128 chars); Authority rejects requests missing either value and records both in audit events. | + +Token request example via client credentials: + +```bash +curl -u orch-operator:s3cr3t! \ + -d 'grant_type=client_credentials' \ + -d 'scope=orch:operate' \ + -d 'operator_reason=resume source after maintenance' \ + -d 'operator_ticket=INC-2045' \ + https://authority.example.com/token +``` + +Tokens lacking `operator_reason` or `operator_ticket` receive `invalid_request`; audit events (`authority.client_credentials.grant`) surface the supplied values under `request.reason` and `request.ticket` for downstream review. +CLI clients set these parameters via `Authority.OperatorReason` / `Authority.OperatorTicket` (environment variables `STELLAOPS_ORCH_REASON` and `STELLAOPS_ORCH_TICKET`). + +## 8. Offline & Sovereign Operation +- **No outbound dependencies:** Authority only contacts MongoDB and local plugins. Discovery and JWKS are cached by clients with offline tolerances (`AllowOfflineCacheFallback`, `OfflineCacheTolerance`). Operators should mirror these responses for air-gapped use. +- **Structured logging:** Every revocation export, signing rotation, bootstrap action, and token issuance emits structured logs with `traceId`, `client_id`, `subjectId`, and `network.remoteIp` where applicable. Mirror logs to your SIEM to retain audit trails without central connectivity. +- **Determinism:** Sorting rules in token and revocation exports guarantee byte-for-byte identical artefacts given the same datastore state. Hashes and signatures remain stable across machines. + +## 9. Operational Checklist +- [ ] Protect the bootstrap API key and disable bootstrap endpoints (`bootstrap.enabled: false`) once initial setup is complete. +- [ ] Schedule `stella auth revoke export` (or `/internal/revocations/export`) at the same cadence as Concelier exports so bundles remain in lockstep. +- [ ] Rotate signing keys before expiration; keep at least one retired key until all cached bundles/tokens signed with it have expired. +- [ ] Monitor `/health` and `/ready` plus rate-limiter metrics to detect plugin outages early. +- [ ] Ensure downstream services cache JWKS and revocation bundles within tolerances; stale caches risk accepting revoked tokens. + +For plug-in specific requirements, refer to **[Authority Plug-in Developer Guide](dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md)**. For revocation bundle validation workflow, see **[Authority Revocation Bundle](security/revocation-bundle.md)**. diff --git a/docs/11_DATA_SCHEMAS.md b/docs/11_DATA_SCHEMAS.md index 0e98e14e..6a5c7413 100755 --- a/docs/11_DATA_SCHEMAS.md +++ b/docs/11_DATA_SCHEMAS.md @@ -1,91 +1,91 @@ -# Data Schemas & Persistence Contracts - -*Audience* – backend developers, plug‑in authors, DB admins. -*Scope* – describes **Redis**, **MongoDB** (optional), and on‑disk blob shapes that power Stella Ops. - ---- - -## 0 Document Conventions - -* **CamelCase** for JSON. -* All timestamps are **RFC 3339 / ISO 8601** with `Z` (UTC). -* `⭑` = planned but *not* shipped yet (kept on Feature Matrix “To Do”). - ---- - -## 1 SBOM Wrapper Envelope - -Every SBOM blob (regardless of format) is stored on disk or in object storage with a *sidecar* JSON file that indexes it for the scanners. - -#### 1.1 JSON Shape - -```jsonc -{ - "id": "sha256:417f…", // digest of the SBOM *file* itself - "imageDigest": "sha256:e2b9…", // digest of the original container image - "created": "2025-07-14T07:02:13Z", - "format": "trivy-json-v2", // NEW enum: trivy-json-v2 | spdx-json | cyclonedx-json - "layers": [ - "sha256:d38b…", // layer digests (ordered) - "sha256:af45…" - ], - "partial": false, // true => delta SBOM (only some layers) - "provenanceId": "prov_0291" // ⭑ link to SLSA attestation (Q1‑2026) -} -``` - -*`format`* **NEW** – added to support **multiple SBOM formats**. -*`partial`* **NEW** – true when generated via the **delta SBOM** flow (§1.3). - -#### 1.2 File‑system Layout - -``` -blobs/ - ├─ 417f… # digest prefix - │   ├─ sbom.json # payload (any format) - │   └─ sbom.meta.json # wrapper (shape above) -``` - -> **Note** – blob storage can point at S3, MinIO, or plain disk; driver plug‑ins adapt. - -#### 1.3 Delta SBOM Extension - -When `partial: true`, *only* the missing layers have been scanned. -Merging logic inside `scanning` module stitches new data onto the cached full SBOM in Redis. - ---- - -## 2 Redis Keyspace - -| Key pattern | Type | TTL | Purpose | -|-------------------------------------|---------|------|--------------------------------------------------| -| `scan:<digest>` | string | ∞ | Last scan JSON result (as returned by `/scan`) | -| `layers:<digest>` | set | 90d | Layers already possessing SBOMs (delta cache) | -| `policy:active` | string | ∞ | YAML **or** Rego ruleset | -| `quota:<token>` | string | *until next UTC midnight* | Per‑token scan counter for Free tier ({{ quota_token }} scans). | -| `policy:history` | list | ∞ | Change audit IDs (see Mongo) | -| `feed:nvd:json` | string | 24h | Normalised feed snapshot | -| `locator:<imageDigest>` | string | 30d | Maps image digest → sbomBlobId | -| `metrics:…` | various | — | Prom / OTLP runtime metrics | - -> **Delta SBOM** uses `layers:*` to skip work in <20 ms. -> **Quota enforcement** increments `quota:` atomically; when {{ quota_token }} the API returns **429**. - ---- - -## 3 MongoDB Collections (Optional) - -Only enabled when `MONGO_URI` is supplied (for long‑term audit). - -| Collection | Shape (summary) | Indexes | -|--------------------|------------------------------------------------------------|-------------------------------------| -| `sbom_history` | Wrapper JSON + `replaceTs` on overwrite | `{imageDigest}` `{created}` | -| `policy_versions` | `{_id, yaml, rego, authorId, created}` | `{created}` | -| `attestations` ⭑ | SLSA provenance doc + Rekor log pointer | `{imageDigest}` | -| `audit_log` | Fully rendered RFC 5424 entries (UI & CLI actions) | `{userId}` `{ts}` | - -Schema detail for **policy_versions**: - +# Data Schemas & Persistence Contracts + +*Audience* – backend developers, plug‑in authors, DB admins. +*Scope* – describes **Redis**, **MongoDB** (optional), and on‑disk blob shapes that power Stella Ops. + +--- + +## 0 Document Conventions + +* **CamelCase** for JSON. +* All timestamps are **RFC 3339 / ISO 8601** with `Z` (UTC). +* `⭑` = planned but *not* shipped yet (kept on Feature Matrix “To Do”). + +--- + +## 1 SBOM Wrapper Envelope + +Every SBOM blob (regardless of format) is stored on disk or in object storage with a *sidecar* JSON file that indexes it for the scanners. + +#### 1.1 JSON Shape + +```jsonc +{ + "id": "sha256:417f…", // digest of the SBOM *file* itself + "imageDigest": "sha256:e2b9…", // digest of the original container image + "created": "2025-07-14T07:02:13Z", + "format": "trivy-json-v2", // NEW enum: trivy-json-v2 | spdx-json | cyclonedx-json + "layers": [ + "sha256:d38b…", // layer digests (ordered) + "sha256:af45…" + ], + "partial": false, // true => delta SBOM (only some layers) + "provenanceId": "prov_0291" // ⭑ link to SLSA attestation (Q1‑2026) +} +``` + +*`format`* **NEW** – added to support **multiple SBOM formats**. +*`partial`* **NEW** – true when generated via the **delta SBOM** flow (§1.3). + +#### 1.2 File‑system Layout + +``` +blobs/ + ├─ 417f… # digest prefix + │   ├─ sbom.json # payload (any format) + │   └─ sbom.meta.json # wrapper (shape above) +``` + +> **Note** – blob storage can point at S3, MinIO, or plain disk; driver plug‑ins adapt. + +#### 1.3 Delta SBOM Extension + +When `partial: true`, *only* the missing layers have been scanned. +Merging logic inside `scanning` module stitches new data onto the cached full SBOM in Redis. + +--- + +## 2 Redis Keyspace + +| Key pattern | Type | TTL | Purpose | +|-------------------------------------|---------|------|--------------------------------------------------| +| `scan:<digest>` | string | ∞ | Last scan JSON result (as returned by `/scan`) | +| `layers:<digest>` | set | 90d | Layers already possessing SBOMs (delta cache) | +| `policy:active` | string | ∞ | YAML **or** Rego ruleset | +| `quota:<token>` | string | *until next UTC midnight* | Per‑token scan counter for Free tier ({{ quota_token }} scans). | +| `policy:history` | list | ∞ | Change audit IDs (see Mongo) | +| `feed:nvd:json` | string | 24h | Normalised feed snapshot | +| `locator:<imageDigest>` | string | 30d | Maps image digest → sbomBlobId | +| `metrics:…` | various | — | Prom / OTLP runtime metrics | + +> **Delta SBOM** uses `layers:*` to skip work in <20 ms. +> **Quota enforcement** increments `quota:` atomically; when {{ quota_token }} the API returns **429**. + +--- + +## 3 MongoDB Collections (Optional) + +Only enabled when `MONGO_URI` is supplied (for long‑term audit). + +| Collection | Shape (summary) | Indexes | +|--------------------|------------------------------------------------------------|-------------------------------------| +| `sbom_history` | Wrapper JSON + `replaceTs` on overwrite | `{imageDigest}` `{created}` | +| `policy_versions` | `{_id, yaml, rego, authorId, created}` | `{created}` | +| `attestations` ⭑ | SLSA provenance doc + Rekor log pointer | `{imageDigest}` | +| `audit_log` | Fully rendered RFC 5424 entries (UI & CLI actions) | `{userId}` `{ts}` | + +Schema detail for **policy_versions**: + Samples live under `samples/api/scheduler/` (e.g., `schedule.json`, `run.json`, `impact-set.json`, `audit.json`) and mirror the canonical serializer output shown below. ```jsonc @@ -327,34 +327,34 @@ Materialized view powering the Scheduler UI dashboards. Stores the latest roll-u - Schedulers should call the projection service after every run state change so the cache mirrors planner/runner progress. Sample file: `samples/api/scheduler/run-summary.json`. - ---- - -## 4 Policy Schema (YAML v1.0) - -Minimal viable grammar (subset of OSV‑SCHEMA ideas). - -```yaml -version: "1.0" -rules: - - name: Block Critical - severity: [Critical] - action: block - - name: Ignore Low Dev - severity: [Low, None] - environments: [dev, staging] - action: ignore - expires: "2026-01-01" - - name: Escalate RegionalFeed High - sources: [NVD, CNNVD, CNVD, ENISA, JVN, BDU] - severity: [High, Critical] - action: escalate -``` - + +--- + +## 4 Policy Schema (YAML v1.0) + +Minimal viable grammar (subset of OSV‑SCHEMA ideas). + +```yaml +version: "1.0" +rules: + - name: Block Critical + severity: [Critical] + action: block + - name: Ignore Low Dev + severity: [Low, None] + environments: [dev, staging] + action: ignore + expires: "2026-01-01" + - name: Escalate RegionalFeed High + sources: [NVD, CNNVD, CNVD, ENISA, JVN, BDU] + severity: [High, Critical] + action: escalate +``` + Validation is performed by `policy:mapping.yaml` JSON‑Schema embedded in backend. -Canonical schema source: `src/StellaOps.Policy/Schemas/policy-schema@1.json` (embedded into `StellaOps.Policy`). -`PolicyValidationCli` (see `src/StellaOps.Policy/PolicyValidationCli.cs`) provides the reusable command handler that the main CLI wires up; in the interim it can be invoked from a short host like: +Canonical schema source: `src/Policy/__Libraries/StellaOps.Policy/Schemas/policy-schema@1.json` (embedded into `StellaOps.Policy`). +`PolicyValidationCli` (see `src/Policy/__Libraries/StellaOps.Policy/PolicyValidationCli.cs`) provides the reusable command handler that the main CLI wires up; in the interim it can be invoked from a short host like: ```csharp await new PolicyValidationCli().RunAsync(new PolicyValidationCliOptions @@ -363,7 +363,7 @@ await new PolicyValidationCli().RunAsync(new PolicyValidationCliOptions Strict = true, }); ``` - + ### 4.1 Rego Variant (Advanced – TODO) *Accepted but stored as‑is in `rego` field.* @@ -372,7 +372,7 @@ Evaluated via internal **OPA** side‑car once feature graduates from TODO list. ### 4.2 Policy Scoring Config (JSON) *Schema id.* `https://schemas.stella-ops.org/policy/policy-scoring-schema@1.json` -*Source.* `src/StellaOps.Policy/Schemas/policy-scoring-schema@1.json` (embedded in `StellaOps.Policy`), default fixture at `src/StellaOps.Policy/Schemas/policy-scoring-default.json`. +*Source.* `src/Policy/__Libraries/StellaOps.Policy/Schemas/policy-scoring-schema@1.json` (embedded in `StellaOps.Policy`), default fixture at `src/Policy/__Libraries/StellaOps.Policy/Schemas/policy-scoring-default.json`. ```jsonc { @@ -426,25 +426,25 @@ npx ajv validate --spec=draft2020 -c ajv-formats \ Planned for Q1‑2026 (kept here for early plug‑in authors). ```jsonc -{ - "id": "prov_0291", - "imageDigest": "sha256:e2b9…", - "buildType": "https://slsa.dev/container/v1", - "builder": { - "id": "https://git.stella-ops.ru/ci/stella-runner@sha256:f7b7…" - }, - "metadata": { - "invocation": { - "parameters": {"GIT_SHA": "f6a1…"}, - "buildStart": "2025-07-14T06:59:17Z", - "buildEnd": "2025-07-14T07:01:22Z" - }, - "completeness": {"parameters": true} - }, - "materials": [ - {"uri": "git+https://git…", "digest": {"sha1": "f6a1…"}} - ], - "rekorLogIndex": 99817 // entry in local Rekor mirror +{ + "id": "prov_0291", + "imageDigest": "sha256:e2b9…", + "buildType": "https://slsa.dev/container/v1", + "builder": { + "id": "https://git.stella-ops.ru/ci/stella-runner@sha256:f7b7…" + }, + "metadata": { + "invocation": { + "parameters": {"GIT_SHA": "f6a1…"}, + "buildStart": "2025-07-14T06:59:17Z", + "buildEnd": "2025-07-14T07:01:22Z" + }, + "completeness": {"parameters": true} + }, + "materials": [ + {"uri": "git+https://git…", "digest": {"sha1": "f6a1…"}} + ], + "rekorLogIndex": 99817 // entry in local Rekor mirror } ``` @@ -509,42 +509,42 @@ done ``` Integration tests can embed the sample fixtures to guarantee deterministic serialisation from the `StellaOps.Notify.Models` DTOs introduced in Sprint 15. - ---- - -## 6 Validator Contracts - -* For SBOM wrapper – `ISbomValidator` (DLL plug‑in) must return *typed* error list. -* For YAML policies – JSON‑Schema at `/schemas/policy‑v1.json`. -* For Rego – OPA `opa eval --fail-defined` under the hood. -* For **Free‑tier quotas** – `IQuotaService` integration tests ensure `quota:` resets at UTC midnight and produces correct `Retry‑After` headers. - ---- - -## 7 Migration Notes - -1. **Add `format` column** to existing SBOM wrappers; default to `trivy-json-v2`. -2. **Populate `layers` & `partial`** via backfill script (ship with `stellopsctl migrate` wizard). -3. Policy YAML previously stored in Redis → copy to Mongo if persistence enabled. -4. Prepare `attestations` collection (empty) – safe to create in advance. - ---- - -## 8 Open Questions / Future Work - -* How to de‑duplicate *identical* Rego policies differing only in whitespace? -* Embed *GOST 34.11‑2018* digests when users enable Russian crypto suite? -* Should enterprise tiers share the same Redis quota keys or switch to JWT claim `tier != Free` bypass? -* Evaluate sliding‑window quota instead of strict daily reset. -* Consider rate‑limit for `/layers/missing` to avoid brute‑force enumeration. - ---- - -## 9 Change Log - -| Date | Note | -|------------|--------------------------------------------------------------------------------| -| 2025‑07‑14 | **Added:** `format`, `partial`, delta cache keys, YAML policy schema v1.0. | -| 2025‑07‑12 | **Initial public draft** – SBOM wrapper, Redis keyspace, audit collections. | - ---- + +--- + +## 6 Validator Contracts + +* For SBOM wrapper – `ISbomValidator` (DLL plug‑in) must return *typed* error list. +* For YAML policies – JSON‑Schema at `/schemas/policy‑v1.json`. +* For Rego – OPA `opa eval --fail-defined` under the hood. +* For **Free‑tier quotas** – `IQuotaService` integration tests ensure `quota:` resets at UTC midnight and produces correct `Retry‑After` headers. + +--- + +## 7 Migration Notes + +1. **Add `format` column** to existing SBOM wrappers; default to `trivy-json-v2`. +2. **Populate `layers` & `partial`** via backfill script (ship with `stellopsctl migrate` wizard). +3. Policy YAML previously stored in Redis → copy to Mongo if persistence enabled. +4. Prepare `attestations` collection (empty) – safe to create in advance. + +--- + +## 8 Open Questions / Future Work + +* How to de‑duplicate *identical* Rego policies differing only in whitespace? +* Embed *GOST 34.11‑2018* digests when users enable Russian crypto suite? +* Should enterprise tiers share the same Redis quota keys or switch to JWT claim `tier != Free` bypass? +* Evaluate sliding‑window quota instead of strict daily reset. +* Consider rate‑limit for `/layers/missing` to avoid brute‑force enumeration. + +--- + +## 9 Change Log + +| Date | Note | +|------------|--------------------------------------------------------------------------------| +| 2025‑07‑14 | **Added:** `format`, `partial`, delta cache keys, YAML policy schema v1.0. | +| 2025‑07‑12 | **Initial public draft** – SBOM wrapper, Redis keyspace, audit collections. | + +--- diff --git a/docs/12_PERFORMANCE_WORKBOOK.md b/docs/12_PERFORMANCE_WORKBOOK.md index 2460fd30..1444ed63 100755 --- a/docs/12_PERFORMANCE_WORKBOOK.md +++ b/docs/12_PERFORMANCE_WORKBOOK.md @@ -56,7 +56,7 @@ ## 3 Test Harness * **Runner** – `perf/run.sh`, accepts `--phase` and `--samples`. -* **Language analyzers microbench** – `dotnet run --project src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj -- --repo-root . --out src/StellaOps.Bench/Scanner.Analyzers/baseline.csv --json out/bench/scanner-analyzers/latest.json --prom out/bench/scanner-analyzers/latest.prom --commit $(git rev-parse HEAD)` produces CSV + JSON + Prometheus gauges for analyzer scenarios. Runs fail if `max_ms` regresses ≥ 20 % against `baseline.csv` or if thresholds are exceeded. +* **Language analyzers microbench** – `dotnet run --project src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj -- --repo-root . --out src/Bench/StellaOps.Bench/Scanner.Analyzers/baseline.csv --json out/bench/scanner-analyzers/latest.json --prom out/bench/scanner-analyzers/latest.prom --commit $(git rev-parse HEAD)` produces CSV + JSON + Prometheus gauges for analyzer scenarios. Runs fail if `max_ms` regresses ≥ 20 % against `baseline.csv` or if thresholds are exceeded. * **Metrics** – Prometheus + `jq` extracts; aggregated via `scripts/aggregate.ts`. * **CI** – GitLab CI job *benchmark* publishes JSON to `bench‑artifacts/`. * **Visualisation** – Grafana dashboard *Stella‑Perf* (provisioned JSON). diff --git a/docs/19_TEST_SUITE_OVERVIEW.md b/docs/19_TEST_SUITE_OVERVIEW.md index 991e8d07..2c9c956e 100755 --- a/docs/19_TEST_SUITE_OVERVIEW.md +++ b/docs/19_TEST_SUITE_OVERVIEW.md @@ -1,47 +1,47 @@ -# Automated Test‑Suite Overview - -This document enumerates **every automated check** executed by the Stella Ops -CI pipeline, from unit level to chaos experiments. It is intended for -contributors who need to extend coverage or diagnose failures. - -> **Build parameters** – values such as `{{ dotnet }}` (runtime) and -> `{{ angular }}` (UI framework) are injected at build time. - ---- - -## Layer map - -| Layer | Tooling | Entry‑point | Frequency | -|-------|---------|-------------|-----------| -| **1. Unit** | `xUnit` (dotnet test) | `*.Tests.csproj` | per PR / push | -| **2. Property‑based** | `FsCheck` | `SbomPropertyTests` | per PR | -| **3. Integration (API)** | `Testcontainers` suite | `test/Api.Integration` | per PR + nightly | +# Automated Test‑Suite Overview + +This document enumerates **every automated check** executed by the Stella Ops +CI pipeline, from unit level to chaos experiments. It is intended for +contributors who need to extend coverage or diagnose failures. + +> **Build parameters** – values such as `{{ dotnet }}` (runtime) and +> `{{ angular }}` (UI framework) are injected at build time. + +--- + +## Layer map + +| Layer | Tooling | Entry‑point | Frequency | +|-------|---------|-------------|-----------| +| **1. Unit** | `xUnit` (dotnet test) | `*.Tests.csproj` | per PR / push | +| **2. Property‑based** | `FsCheck` | `SbomPropertyTests` | per PR | +| **3. Integration (API)** | `Testcontainers` suite | `test/Api.Integration` | per PR + nightly | | **4. Integration (DB-merge)** | in-memory Mongo + Redis | `Concelier.Integration` (vulnerability ingest/merge/export service) | per PR | -| **5. Contract (gRPC)** | `Buf breaking` | `buf.yaml` files | per PR | -| **6. Front‑end unit** | `Jest` | `ui/src/**/*.spec.ts` | per PR | -| **7. Front‑end E2E** | `Playwright` | `ui/e2e/**` | nightly | -| **8. Lighthouse perf / a11y** | `lighthouse-ci` (Chrome headless) | `ui/dist/index.html` | nightly | -| **9. Load** | `k6` scripted scenarios | `k6/*.js` | nightly | -| **10. Chaos CPU / OOM** | `pumba` | Docker Compose overlay | weekly | -| **11. Dependency scanning** | `Trivy fs` + `dotnet list package --vuln` | root | per PR | -| **12. License compliance** | `LicenceFinder` | root | per PR | -| **13. SBOM reproducibility** | `in‑toto attestation` diff | GitLab job | release tags | - ---- - -## Quality gates - -| Metric | Budget | Gate | -|--------|--------|------| -| API unit coverage | ≥ 85 % lines | PR merge | -| API response P95 | ≤ 120 ms | nightly alert | -| Δ‑SBOM warm scan P95 (4 vCPU) | ≤ 5 s | nightly alert | -| Lighthouse performance score | ≥ 90 | nightly alert | -| Lighthouse accessibility score | ≥ 95 | nightly alert | -| k6 sustained RPS drop | < 5 % vs baseline | nightly alert | - ---- - +| **5. Contract (gRPC)** | `Buf breaking` | `buf.yaml` files | per PR | +| **6. Front‑end unit** | `Jest` | `ui/src/**/*.spec.ts` | per PR | +| **7. Front‑end E2E** | `Playwright` | `ui/e2e/**` | nightly | +| **8. Lighthouse perf / a11y** | `lighthouse-ci` (Chrome headless) | `ui/dist/index.html` | nightly | +| **9. Load** | `k6` scripted scenarios | `k6/*.js` | nightly | +| **10. Chaos CPU / OOM** | `pumba` | Docker Compose overlay | weekly | +| **11. Dependency scanning** | `Trivy fs` + `dotnet list package --vuln` | root | per PR | +| **12. License compliance** | `LicenceFinder` | root | per PR | +| **13. SBOM reproducibility** | `in‑toto attestation` diff | GitLab job | release tags | + +--- + +## Quality gates + +| Metric | Budget | Gate | +|--------|--------|------| +| API unit coverage | ≥ 85 % lines | PR merge | +| API response P95 | ≤ 120 ms | nightly alert | +| Δ‑SBOM warm scan P95 (4 vCPU) | ≤ 5 s | nightly alert | +| Lighthouse performance score | ≥ 90 | nightly alert | +| Lighthouse accessibility score | ≥ 95 | nightly alert | +| k6 sustained RPS drop | < 5 % vs baseline | nightly alert | + +--- + ## Local runner ```bash @@ -63,13 +63,13 @@ The script spins up MongoDB/Redis via Testcontainers and requires: The Concelier connector suite includes a regression test (`OsvGhsaParityRegressionTests`) that checks a curated set of GHSA identifiers against OSV responses. The fixture -snapshots live in `src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/` and are kept +snapshots live in `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/` and are kept deterministic so the parity report remains reproducible. To refresh the fixtures when GHSA/OSV payloads change: 1. Ensure outbound HTTPS access to `https://api.osv.dev` and `https://api.github.com`. -2. Run `UPDATE_PARITY_FIXTURES=1 dotnet test src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj`. +2. Run `UPDATE_PARITY_FIXTURES=1 dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj`. 3. Commit the regenerated `osv-ghsa.*.json` files that the test emits (raw snapshots and canonical advisories). The regen flow logs `[Parity]` messages and normalises `recordedAt` timestamps so the @@ -82,28 +82,28 @@ fixtures stay stable across machines. ```mermaid flowchart LR subgraph fast-path - U[xUnit] --> P[FsCheck] --> I1[Testcontainer API] - end - - I1 --> FE[Jest] - FE --> E2E[Playwright] - E2E --> Lighthouse + U[xUnit] --> P[FsCheck] --> I1[Testcontainer API] + end + + I1 --> FE[Jest] + FE --> E2E[Playwright] + E2E --> Lighthouse Lighthouse --> INTEG2[Concelier] - INTEG2 --> LOAD[k6] - LOAD --> CHAOS[pumba] - CHAOS --> RELEASE[Attestation diff] -``` - ---- - -## Adding a new test layer - -1. Extend `scripts/dev-test.sh` so local contributors get the layer by default. -2. Add a dedicated GitLab job in `.gitlab-ci.yml` (stage `test` or `nightly`). -3. Register the job in `docs/19_TEST_SUITE_OVERVIEW.md` *and* list its metric - in `docs/metrics/README.md`. - ---- - -*Last updated {{ "now" | date: "%Y‑%m‑%d" }}* - + INTEG2 --> LOAD[k6] + LOAD --> CHAOS[pumba] + CHAOS --> RELEASE[Attestation diff] +``` + +--- + +## Adding a new test layer + +1. Extend `scripts/dev-test.sh` so local contributors get the layer by default. +2. Add a dedicated GitLab job in `.gitlab-ci.yml` (stage `test` or `nightly`). +3. Register the job in `docs/19_TEST_SUITE_OVERVIEW.md` *and* list its metric + in `docs/metrics/README.md`. + +--- + +*Last updated {{ "now" | date: "%Y‑%m‑%d" }}* + diff --git a/docs/21_INSTALL_GUIDE.md b/docs/21_INSTALL_GUIDE.md index a6f8c830..9b9c28d4 100755 --- a/docs/21_INSTALL_GUIDE.md +++ b/docs/21_INSTALL_GUIDE.md @@ -1,190 +1,190 @@ -# Stella Ops — Installation Guide (Docker & Air‑Gap) - - - -> **Status — public α not yet published.** -> The commands below will work as soon as the first image is tagged -> `registry.stella-ops.org/stella-ops/stella-ops:0.1.0-alpha` -> (target date: **late 2025**). Track progress on the -> [road‑map](/roadmap/). - ---- - -## 0 · Prerequisites - -| Item | Minimum | Notes | -|------|---------|-------| -| Linux | Ubuntu 22.04 LTS / Alma 9 | x86‑64 or arm64 | -| CPU / RAM | 2 vCPU / 2 GiB | Laptop baseline | -| Disk | 10 GiB SSD | SBOM + vuln DB cache | -| Docker | **Engine 25 + Compose v2** | `docker -v` | -| TLS | OpenSSL 1.1 +  | Self‑signed cert generated at first run | - ---- - -## 1 · Connected‑host install (Docker Compose) - -```bash -# 1. Make a working directory -mkdir stella && cd stella - -# 2. Download the signed Compose bundle + example .env -curl -LO https://get.stella-ops.org/releases/latest/.env.example -curl -LO https://get.stella-ops.org/releases/latest/.env.example.sig -curl -LO https://get.stella-ops.org/releases/latest/docker-compose.infrastructure.yml -curl -LO https://get.stella-ops.org/releases/latest/docker-compose.infrastructure.yml.sig -curl -LO https://get.stella-ops.org/releases/latest/docker-compose.stella-ops.yml -curl -LO https://get.stella-ops.org/releases/latest/docker-compose.stella-ops.yml.sig - -# 3. Verify provenance (Cosign public key is stable) -cosign verify-blob \ - --key https://stella-ops.org/keys/cosign.pub \ - --signature .env.example.sig \ - .env.example - -cosign verify-blob \ - --key https://stella-ops.org/keys/cosign.pub \ - --signature docker-compose.infrastructure.yml.sig \ - docker-compose.infrastructure.yml - -cosign verify-blob \ - --key https://stella-ops.org/keys/cosign.pub \ - --signature docker-compose.stella-ops.yml.sig \ - docker-compose.stella-ops.yml - -# 4. Copy .env.example → .env and edit secrets -cp .env.example .env -$EDITOR .env - -# 5. Launch databases (MongoDB + Redis) -docker compose --env-file .env -f docker-compose.infrastructure.yml up -d - -# 6. Launch Stella Ops (first run pulls ~50 MB merged vuln DB) -docker compose --env-file .env -f docker-compose.stella-ops.yml up -d -```` - -*Default login:* `admin / changeme` -UI: [https://\<host\>:8443](https://<host>:8443) (self‑signed certificate) - -> **Pinning best‑practice** – in production environments replace -> `stella-ops:latest` with the immutable digest printed by -> `docker images --digests`. - -> **Repo bundles** – Development, staging, and air‑gapped Compose profiles live -> under `deploy/compose/`, already tied to the release manifests in -> `deploy/releases/`. Helm users can pull the same channel overlays from -> `deploy/helm/stellaops/values-*.yaml` and validate everything with -> `deploy/tools/validate-profiles.sh`. - -### 1.1 · Concelier authority configuration - -The Concelier container reads configuration from `etc/concelier.yaml` plus -`CONCELIER_` environment variables. To enable the new Authority integration: - -1. Add the following keys to `.env` (replace values for your environment): - - ```bash - CONCELIER_AUTHORITY__ENABLED=true - CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK=true # temporary rollout only - CONCELIER_AUTHORITY__ISSUER="https://authority.internal" - CONCELIER_AUTHORITY__AUDIENCES__0="api://concelier" - CONCELIER_AUTHORITY__REQUIREDSCOPES__0="concelier.jobs.trigger" - CONCELIER_AUTHORITY__REQUIREDSCOPES__1="advisory:read" - CONCELIER_AUTHORITY__REQUIREDSCOPES__2="advisory:ingest" - CONCELIER_AUTHORITY__REQUIREDTENANTS__0="tenant-default" - CONCELIER_AUTHORITY__CLIENTID="concelier-jobs" - CONCELIER_AUTHORITY__CLIENTSCOPES__0="concelier.jobs.trigger" - CONCELIER_AUTHORITY__CLIENTSCOPES__1="advisory:read" - CONCELIER_AUTHORITY__CLIENTSCOPES__2="advisory:ingest" - CONCELIER_AUTHORITY__CLIENTSECRETFILE="/run/secrets/concelier_authority_client" - CONCELIER_AUTHORITY__BYPASSNETWORKS__0="127.0.0.1/32" - CONCELIER_AUTHORITY__BYPASSNETWORKS__1="::1/128" - CONCELIER_AUTHORITY__RESILIENCE__ENABLERETRIES=true - CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__0="00:00:01" - CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__1="00:00:02" - CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__2="00:00:05" - CONCELIER_AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK=true - CONCELIER_AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE="00:10:00" - ``` - - Store the client secret outside source control (Docker secrets, mounted file, - or Kubernetes Secret). Concelier loads the secret during post-configuration, so - the value never needs to appear in the YAML template. - - Connected sites can keep the retry ladder short (1 s, 2 s, 5 s) so job triggers fail fast when Authority is down. For air‑gapped or intermittently connected deployments, extend `RESILIENCE__OFFLINECACHETOLERANCE` (e.g. `00:30:00`) so cached discovery/JWKS data remains valid while the Offline Kit synchronises upstream changes. - -2. Redeploy Concelier: - - ```bash - docker compose --env-file .env -f docker-compose.stella-ops.yml up -d concelier - ``` - -3. Tail the logs: `docker compose logs -f concelier`. Successful `/jobs*` calls now - emit `Concelier.Authorization.Audit` entries with `route`, `status`, `subject`, - `clientId`, `scopes`, `bypass`, and `remote` fields. 401 denials keep the same - shape—watch for `bypass=True`, which indicates a bypass CIDR accepted an anonymous - call. See `docs/ops/concelier-authority-audit-runbook.md` for a full audit/alerting checklist. - -> **Enforcement deadline** – keep `CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK=true` -> only while validating the rollout. Set it to `false` (and restart Concelier) -> before **2025-12-31 UTC** to require tokens in production. - ---- - -## 2 · Optional: request a free quota token - -Anonymous installs allow **{{ quota\_anon }} scans per UTC day**. -Email `token@stella-ops.org` to receive a signed JWT that raises the limit to -**{{ quota\_token }} scans/day**. Insert it into `.env`: - -```bash -STELLA_JWT="paste‑token‑here" -docker compose --env-file .env -f docker-compose.stella-ops.yml \ - exec stella-ops stella set-jwt "$STELLA_JWT" -``` - ->  The UI shows a reminder at 200 scans and throttles above the limit but will ->  **never block** your pipeline. - ---- - -## 3 · Air‑gapped install (Offline Update Kit) - -When running on an isolated network use the **Offline Update Kit (OUK)**: - -```bash -# Download & verify on a connected host -curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-v0.1a.tgz -curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-v0.1a.tgz.sig - -cosign verify-blob \ - --key https://stella-ops.org/keys/cosign.pub \ - --signature stella-ops-offline-kit-v0.1a.tgz.sig \ - stella-ops-offline-kit-v0.1a.tgz - -# Transfer → air‑gap → import -docker compose --env-file .env -f docker-compose.stella-ops.yml \ - exec stella admin import-offline-usage-kit stella-ops-offline-kit-v0.1a.tgz -``` - -*Import is atomic; no service downtime.* - -For details see the dedicated [Offline Kit guide](/offline/). - ---- - -## 4 · Next steps - -* **5‑min Quick‑Start:** `/quickstart/` -* **CI recipes:** `docs/ci/20_CI_RECIPES.md` -* **Plug‑in SDK:** `/plugins/` - ---- - -*Generated {{ "now" | date: "%Y‑%m‑%d" }} — build tags inserted at render time.* +# Stella Ops — Installation Guide (Docker & Air‑Gap) + + + +> **Status — public α not yet published.** +> The commands below will work as soon as the first image is tagged +> `registry.stella-ops.org/stella-ops/stella-ops:0.1.0-alpha` +> (target date: **late 2025**). Track progress on the +> [road‑map](/roadmap/). + +--- + +## 0 · Prerequisites + +| Item | Minimum | Notes | +|------|---------|-------| +| Linux | Ubuntu 22.04 LTS / Alma 9 | x86‑64 or arm64 | +| CPU / RAM | 2 vCPU / 2 GiB | Laptop baseline | +| Disk | 10 GiB SSD | SBOM + vuln DB cache | +| Docker | **Engine 25 + Compose v2** | `docker -v` | +| TLS | OpenSSL 1.1 +  | Self‑signed cert generated at first run | + +--- + +## 1 · Connected‑host install (Docker Compose) + +```bash +# 1. Make a working directory +mkdir stella && cd stella + +# 2. Download the signed Compose bundle + example .env +curl -LO https://get.stella-ops.org/releases/latest/.env.example +curl -LO https://get.stella-ops.org/releases/latest/.env.example.sig +curl -LO https://get.stella-ops.org/releases/latest/docker-compose.infrastructure.yml +curl -LO https://get.stella-ops.org/releases/latest/docker-compose.infrastructure.yml.sig +curl -LO https://get.stella-ops.org/releases/latest/docker-compose.stella-ops.yml +curl -LO https://get.stella-ops.org/releases/latest/docker-compose.stella-ops.yml.sig + +# 3. Verify provenance (Cosign public key is stable) +cosign verify-blob \ + --key https://stella-ops.org/keys/cosign.pub \ + --signature .env.example.sig \ + .env.example + +cosign verify-blob \ + --key https://stella-ops.org/keys/cosign.pub \ + --signature docker-compose.infrastructure.yml.sig \ + docker-compose.infrastructure.yml + +cosign verify-blob \ + --key https://stella-ops.org/keys/cosign.pub \ + --signature docker-compose.stella-ops.yml.sig \ + docker-compose.stella-ops.yml + +# 4. Copy .env.example → .env and edit secrets +cp .env.example .env +$EDITOR .env + +# 5. Launch databases (MongoDB + Redis) +docker compose --env-file .env -f docker-compose.infrastructure.yml up -d + +# 6. Launch Stella Ops (first run pulls ~50 MB merged vuln DB) +docker compose --env-file .env -f docker-compose.stella-ops.yml up -d +```` + +*Default login:* `admin / changeme` +UI: [https://\<host\>:8443](https://<host>:8443) (self‑signed certificate) + +> **Pinning best‑practice** – in production environments replace +> `stella-ops:latest` with the immutable digest printed by +> `docker images --digests`. + +> **Repo bundles** – Development, staging, and air‑gapped Compose profiles live +> under `deploy/compose/`, already tied to the release manifests in +> `deploy/releases/`. Helm users can pull the same channel overlays from +> `deploy/helm/stellaops/values-*.yaml` and validate everything with +> `deploy/tools/validate-profiles.sh`. + +### 1.1 · Concelier authority configuration + +The Concelier container reads configuration from `etc/concelier.yaml` plus +`CONCELIER_` environment variables. To enable the new Authority integration: + +1. Add the following keys to `.env` (replace values for your environment): + + ```bash + CONCELIER_AUTHORITY__ENABLED=true + CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK=true # temporary rollout only + CONCELIER_AUTHORITY__ISSUER="https://authority.internal" + CONCELIER_AUTHORITY__AUDIENCES__0="api://concelier" + CONCELIER_AUTHORITY__REQUIREDSCOPES__0="concelier.jobs.trigger" + CONCELIER_AUTHORITY__REQUIREDSCOPES__1="advisory:read" + CONCELIER_AUTHORITY__REQUIREDSCOPES__2="advisory:ingest" + CONCELIER_AUTHORITY__REQUIREDTENANTS__0="tenant-default" + CONCELIER_AUTHORITY__CLIENTID="concelier-jobs" + CONCELIER_AUTHORITY__CLIENTSCOPES__0="concelier.jobs.trigger" + CONCELIER_AUTHORITY__CLIENTSCOPES__1="advisory:read" + CONCELIER_AUTHORITY__CLIENTSCOPES__2="advisory:ingest" + CONCELIER_AUTHORITY__CLIENTSECRETFILE="/run/secrets/concelier_authority_client" + CONCELIER_AUTHORITY__BYPASSNETWORKS__0="127.0.0.1/32" + CONCELIER_AUTHORITY__BYPASSNETWORKS__1="::1/128" + CONCELIER_AUTHORITY__RESILIENCE__ENABLERETRIES=true + CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__0="00:00:01" + CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__1="00:00:02" + CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__2="00:00:05" + CONCELIER_AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK=true + CONCELIER_AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE="00:10:00" + ``` + + Store the client secret outside source control (Docker secrets, mounted file, + or Kubernetes Secret). Concelier loads the secret during post-configuration, so + the value never needs to appear in the YAML template. + + Connected sites can keep the retry ladder short (1 s, 2 s, 5 s) so job triggers fail fast when Authority is down. For air‑gapped or intermittently connected deployments, extend `RESILIENCE__OFFLINECACHETOLERANCE` (e.g. `00:30:00`) so cached discovery/JWKS data remains valid while the Offline Kit synchronises upstream changes. + +2. Redeploy Concelier: + + ```bash + docker compose --env-file .env -f docker-compose.stella-ops.yml up -d concelier + ``` + +3. Tail the logs: `docker compose logs -f concelier`. Successful `/jobs*` calls now + emit `Concelier.Authorization.Audit` entries with `route`, `status`, `subject`, + `clientId`, `scopes`, `bypass`, and `remote` fields. 401 denials keep the same + shape—watch for `bypass=True`, which indicates a bypass CIDR accepted an anonymous + call. See `docs/ops/concelier-authority-audit-runbook.md` for a full audit/alerting checklist. + +> **Enforcement deadline** – keep `CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK=true` +> only while validating the rollout. Set it to `false` (and restart Concelier) +> before **2025-12-31 UTC** to require tokens in production. + +--- + +## 2 · Optional: request a free quota token + +Anonymous installs allow **{{ quota\_anon }} scans per UTC day**. +Email `token@stella-ops.org` to receive a signed JWT that raises the limit to +**{{ quota\_token }} scans/day**. Insert it into `.env`: + +```bash +STELLA_JWT="paste‑token‑here" +docker compose --env-file .env -f docker-compose.stella-ops.yml \ + exec stella-ops stella set-jwt "$STELLA_JWT" +``` + +>  The UI shows a reminder at 200 scans and throttles above the limit but will +>  **never block** your pipeline. + +--- + +## 3 · Air‑gapped install (Offline Update Kit) + +When running on an isolated network use the **Offline Update Kit (OUK)**: + +```bash +# Download & verify on a connected host +curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-v0.1a.tgz +curl -LO https://get.stella-ops.org/ouk/stella-ops-offline-kit-v0.1a.tgz.sig + +cosign verify-blob \ + --key https://stella-ops.org/keys/cosign.pub \ + --signature stella-ops-offline-kit-v0.1a.tgz.sig \ + stella-ops-offline-kit-v0.1a.tgz + +# Transfer → air‑gap → import +docker compose --env-file .env -f docker-compose.stella-ops.yml \ + exec stella admin import-offline-usage-kit stella-ops-offline-kit-v0.1a.tgz +``` + +*Import is atomic; no service downtime.* + +For details see the dedicated [Offline Kit guide](/offline/). + +--- + +## 4 · Next steps + +* **5‑min Quick‑Start:** `/quickstart/` +* **CI recipes:** `docs/ci/20_CI_RECIPES.md` +* **Plug‑in SDK:** `/plugins/` + +--- + +*Generated {{ "now" | date: "%Y‑%m‑%d" }} — build tags inserted at render time.* diff --git a/docs/ARCHITECTURE_AUTHORITY.md b/docs/ARCHITECTURE_AUTHORITY.md index edccfdb8..26b5ad4f 100644 --- a/docs/ARCHITECTURE_AUTHORITY.md +++ b/docs/ARCHITECTURE_AUTHORITY.md @@ -1,443 +1,443 @@ -# component_architecture_authority.md — **Stella Ops Authority** (2025Q4) - -> **Scope.** Implementation‑ready architecture for **Stella Ops Authority**: the on‑prem **OIDC/OAuth2** service that issues **short‑lived, sender‑constrained operational tokens (OpToks)** to first‑party services and tools. Covers protocols (DPoP & mTLS binding), token shapes, endpoints, storage, rotation, HA, RBAC, audit, and testing. This component is the trust anchor for *who* is calling inside a Stella Ops installation. (Entitlement is proven separately by **PoE** from the cloud Licensing Service; Authority does not issue PoE.) - ---- - -## 0) Mission & boundaries - -**Mission.** Provide **fast, local, verifiable** authentication for Stella Ops microservices and tools by minting **very short‑lived** OAuth2/OIDC tokens that are **sender‑constrained** (DPoP or mTLS‑bound). Support RBAC scopes, multi‑tenant claims, and deterministic validation for APIs (Scanner, Signer, Attestor, Excititor, Concelier, UI, CLI, Zastava). - -**Boundaries.** - -* Authority **does not** validate entitlements/licensing. That’s enforced by **Signer** using **PoE** with the cloud Licensing Service. -* Authority tokens are **operational only** (2–5 min TTL) and must not be embedded in long‑lived artifacts or stored in SBOMs. -* Authority is **stateless for validation** (JWT) and **optional introspection** for services that prefer online checks. - ---- - -## 1) Protocols & cryptography - -* **OIDC Discovery**: `/.well-known/openid-configuration` -* **OAuth2** grant types: - - * **Client Credentials** (service↔service, with mTLS or private_key_jwt) - * **Device Code** (CLI login on headless agents; optional) - * **Authorization Code + PKCE** (browser login for UI; optional) -* **Sender constraint options** (choose per caller or per audience): - - * **DPoP** (Demonstration of Proof‑of‑Possession): proof JWT on each HTTP request, bound to the access token via `cnf.jkt`. - * **OAuth 2.0 mTLS** (certificate‑bound tokens): token bound to client certificate thumbprint via `cnf.x5t#S256`. -* **Signing algorithms**: **EdDSA (Ed25519)** preferred; fallback **ES256 (P‑256)**. Rotation is supported via **kid** in JWKS. -* **Token format**: **JWT** access tokens (compact), optionally opaque reference tokens for services that insist on introspection. -* **Clock skew tolerance**: ±60 s; issue `nbf`, `iat`, `exp` accordingly. - ---- - -## 2) Token model - -### 2.1 Access token (OpTok) — short‑lived (120–300 s) - -**Registered claims** - -``` -iss = https://authority. -sub = -aud = -exp = (<= 300 s from iat) -iat = -nbf = iat - 30 -jti = -scope = "scanner.scan scanner.export signer.sign ..." -``` - -**Sender‑constraint (`cnf`)** - -* **DPoP**: - - ```json - "cnf": { "jkt": "" } - ``` -* **mTLS**: - - ```json - "cnf": { "x5t#S256": "" } - ``` - -**Install/tenant context (custom claims)** - -``` -tid = // multi-tenant -inst = // unique installation -roles = [ "svc.scanner", "svc.signer", "ui.admin", ... ] -plan? = // optional hint for UIs; not used for enforcement -``` - -> **Note**: Do **not** copy PoE claims into OpTok; OpTok ≠ entitlement. Only **Signer** checks PoE. - -### 2.2 Refresh tokens (optional) - -* Default **disabled**. If enabled (for UI interactive logins), pair with **DPoP‑bound** refresh tokens or **mTLS** client sessions; short TTL (≤ 8 h), rotating on use (replay‑safe). - -### 2.3 ID tokens (optional) - -* Issued for UI/browser OIDC flows (Authorization Code + PKCE); not used for service auth. - ---- - -## 3) Endpoints & flows - -### 3.1 OIDC discovery & keys - -* `GET /.well-known/openid-configuration` → endpoints, algs, jwks_uri -* `GET /jwks` → JSON Web Key Set (rotating, at least 2 active keys during transition) - -### 3.2 Token issuance - -* `POST /oauth/token` - - * **Client Credentials** (service→service): - +# component_architecture_authority.md — **Stella Ops Authority** (2025Q4) + +> **Scope.** Implementation‑ready architecture for **Stella Ops Authority**: the on‑prem **OIDC/OAuth2** service that issues **short‑lived, sender‑constrained operational tokens (OpToks)** to first‑party services and tools. Covers protocols (DPoP & mTLS binding), token shapes, endpoints, storage, rotation, HA, RBAC, audit, and testing. This component is the trust anchor for *who* is calling inside a Stella Ops installation. (Entitlement is proven separately by **PoE** from the cloud Licensing Service; Authority does not issue PoE.) + +--- + +## 0) Mission & boundaries + +**Mission.** Provide **fast, local, verifiable** authentication for Stella Ops microservices and tools by minting **very short‑lived** OAuth2/OIDC tokens that are **sender‑constrained** (DPoP or mTLS‑bound). Support RBAC scopes, multi‑tenant claims, and deterministic validation for APIs (Scanner, Signer, Attestor, Excititor, Concelier, UI, CLI, Zastava). + +**Boundaries.** + +* Authority **does not** validate entitlements/licensing. That’s enforced by **Signer** using **PoE** with the cloud Licensing Service. +* Authority tokens are **operational only** (2–5 min TTL) and must not be embedded in long‑lived artifacts or stored in SBOMs. +* Authority is **stateless for validation** (JWT) and **optional introspection** for services that prefer online checks. + +--- + +## 1) Protocols & cryptography + +* **OIDC Discovery**: `/.well-known/openid-configuration` +* **OAuth2** grant types: + + * **Client Credentials** (service↔service, with mTLS or private_key_jwt) + * **Device Code** (CLI login on headless agents; optional) + * **Authorization Code + PKCE** (browser login for UI; optional) +* **Sender constraint options** (choose per caller or per audience): + + * **DPoP** (Demonstration of Proof‑of‑Possession): proof JWT on each HTTP request, bound to the access token via `cnf.jkt`. + * **OAuth 2.0 mTLS** (certificate‑bound tokens): token bound to client certificate thumbprint via `cnf.x5t#S256`. +* **Signing algorithms**: **EdDSA (Ed25519)** preferred; fallback **ES256 (P‑256)**. Rotation is supported via **kid** in JWKS. +* **Token format**: **JWT** access tokens (compact), optionally opaque reference tokens for services that insist on introspection. +* **Clock skew tolerance**: ±60 s; issue `nbf`, `iat`, `exp` accordingly. + +--- + +## 2) Token model + +### 2.1 Access token (OpTok) — short‑lived (120–300 s) + +**Registered claims** + +``` +iss = https://authority. +sub = +aud = +exp = (<= 300 s from iat) +iat = +nbf = iat - 30 +jti = +scope = "scanner.scan scanner.export signer.sign ..." +``` + +**Sender‑constraint (`cnf`)** + +* **DPoP**: + + ```json + "cnf": { "jkt": "" } + ``` +* **mTLS**: + + ```json + "cnf": { "x5t#S256": "" } + ``` + +**Install/tenant context (custom claims)** + +``` +tid = // multi-tenant +inst = // unique installation +roles = [ "svc.scanner", "svc.signer", "ui.admin", ... ] +plan? = // optional hint for UIs; not used for enforcement +``` + +> **Note**: Do **not** copy PoE claims into OpTok; OpTok ≠ entitlement. Only **Signer** checks PoE. + +### 2.2 Refresh tokens (optional) + +* Default **disabled**. If enabled (for UI interactive logins), pair with **DPoP‑bound** refresh tokens or **mTLS** client sessions; short TTL (≤ 8 h), rotating on use (replay‑safe). + +### 2.3 ID tokens (optional) + +* Issued for UI/browser OIDC flows (Authorization Code + PKCE); not used for service auth. + +--- + +## 3) Endpoints & flows + +### 3.1 OIDC discovery & keys + +* `GET /.well-known/openid-configuration` → endpoints, algs, jwks_uri +* `GET /jwks` → JSON Web Key Set (rotating, at least 2 active keys during transition) + +### 3.2 Token issuance + +* `POST /oauth/token` + + * **Client Credentials** (service→service): + * **mTLS**: mutual TLS + `client_id` → bound token (`cnf.x5t#S256`) * `security.senderConstraints.mtls.enforceForAudiences` forces the mTLS path when requested `aud`/`resource` values intersect high-value audiences (defaults include `signer`). Authority rejects clients attempting to use DPoP/basic secrets for these audiences. * Stored `certificateBindings` are authoritative: thumbprint, subject, issuer, serial number, and SAN values are matched against the presented certificate, with rotation grace applied to activation windows. Failures surface deterministic error codes (e.g. `certificate_binding_subject_mismatch`). * **private_key_jwt**: JWT‑based client auth + **DPoP** header (preferred for tools and CLI) - * **Device Code** (CLI): `POST /oauth/device/code` + `POST /oauth/token` poll - * **Authorization Code + PKCE** (UI): standard - -**DPoP handshake (example)** - -1. Client prepares **JWK** (ephemeral keypair). -2. Client sends **DPoP proof** header with fields: - - ``` - htm=POST - htu=https://authority.../oauth/token - iat= - jti= - ``` - - signed with the DPoP private key; header carries JWK. -3. Authority validates proof; issues access token with `cnf.jkt=`. -4. Client uses the same DPoP key to sign **every subsequent API request** to services (Signer, Scanner, …). - -**mTLS flow** - -* Mutual TLS at the connection; Authority extracts client cert, validates chain; token carries `cnf.x5t#S256`. - -### 3.3 Introspection & revocation (optional) - -* `POST /oauth/introspect` → `{ active, sub, scope, aud, exp, cnf, ... }` -* `POST /oauth/revoke` → revokes refresh tokens or opaque access tokens. -* **Replay prevention**: maintain **DPoP `jti` cache** (TTL ≤ 10 min) to reject duplicate proofs when services supply DPoP nonces (Signer requires nonce for high‑value operations). - -### 3.4 UserInfo (optional for UI) - -* `GET /userinfo` (ID token context). - ---- - -## 4) Audiences, scopes & RBAC - -### 4.1 Audiences - -* `signer` — only the **Signer** service should accept tokens with `aud=signer`. -* `attestor`, `scanner`, `concelier`, `excititor`, `ui`, `zastava` similarly. - -Services **must** verify `aud` and **sender constraint** (DPoP/mTLS) per their policy. - -### 4.2 Core scopes - -| Scope | Service | Operation | -| ---------------------------------- | ------------------ | -------------------------- | -| `signer.sign` | Signer | Request DSSE signing | -| `attestor.write` | Attestor | Submit Rekor entries | -| `scanner.scan` | Scanner.WebService | Submit scan jobs | -| `scanner.export` | Scanner.WebService | Export SBOMs | -| `scanner.read` | Scanner.WebService | Read catalog/SBOMs | -| `vex.read` / `vex.admin` | Excititor | Query/operate | -| `concelier.read` / `concelier.export` | Concelier | Query/exports | -| `ui.read` / `ui.admin` | UI | View/admin | -| `zastava.emit` / `zastava.enforce` | Scanner/Zastava | Runtime events / admission | - -**Roles → scopes mapping** is configured centrally (Authority policy) and pushed during token issuance. - ---- - -## 5) Storage & state - -* **Configuration DB** (PostgreSQL/MySQL): clients, audiences, role→scope maps, tenant/installation registry, device code grants, persistent consents (if any). -* **Cache** (Redis): - - * DPoP **jti** replay cache (short TTL) - * **Nonce** store (per resource server, if they demand nonce) - * Device code pollers, rate limiting buckets -* **JWKS**: key material in HSM/KMS or encrypted at rest; JWKS served from memory. - ---- - -## 6) Key management & rotation - -* Maintain **at least 2 signing keys** active during rotation; tokens carry `kid`. -* Prefer **Ed25519** for compact tokens; maintain **ES256** fallback for FIPS contexts. -* Rotation cadence: 30–90 days; emergency rotation supported. -* Publish new JWKS **before** issuing tokens with the new `kid` to avoid cold‑start validation misses. -* Keep **old keys** available **at least** for max token TTL + 5 minutes. - ---- - -## 7) HA & performance - -* **Stateless issuance** (except device codes/refresh) → scale horizontally behind a load‑balancer. -* **DB** only for client metadata and optional flows; token checks are JWT‑local; introspection endpoints hit cache/DB minimally. -* **Targets**: - - * Token issuance P95 ≤ **20 ms** under warm cache. - * DPoP proof validation ≤ **1 ms** extra per request at resource servers (Signer/Scanner). - * 99.9% uptime; HPA on CPU/latency. - ---- - -## 8) Security posture - -* **Strict TLS** (1.3 preferred); HSTS; modern cipher suites. -* **mTLS** enabled where required (Signer/Attestor paths). -* **Replay protection**: DPoP `jti` cache, nonce support for **Signer** (add `DPoP-Nonce` header on 401; clients re‑sign). -* **Rate limits** per client & per IP; exponential backoff on failures. -* **Secrets**: clients use **private_key_jwt** or **mTLS**; never basic secrets over the wire. -* **CSP/CSRF** hardening on UI flows; `SameSite=Lax` cookies; PKCE enforced. -* **Logs** redact `Authorization` and DPoP proofs; store `sub`, `aud`, `scopes`, `inst`, `tid`, `cnf` thumbprints, not full keys. - ---- - -## 9) Multi‑tenancy & installations - -* **Tenant (`tid`)** and **Installation (`inst`)** registries define which audiences/scopes a client can request. -* Cross‑tenant isolation enforced at issuance (disallow rogue `aud`), and resource servers **must** check that `tid` matches their configured tenant. - ---- - -## 10) Admin & operations APIs - -All under `/admin` (mTLS + `authority.admin` scope). - -``` -POST /admin/clients # create/update client (confidential/public) -POST /admin/audiences # register audience resource URIs -POST /admin/roles # define role→scope mappings -POST /admin/tenants # create tenant/install entries -POST /admin/keys/rotate # rotate signing key (zero-downtime) -GET /admin/metrics # Prometheus exposition (token issue rates, errors) -GET /admin/healthz|readyz # health/readiness -``` - -Declared client `audiences` flow through to the issued JWT `aud` claim and the token request's `resource` indicators. Authority relies on this metadata to enforce DPoP nonce challenges for `signer`, `attestor`, and other high-value services without requiring clients to repeat the audience parameter on every request. - ---- - -## 11) Integration hard lines (what resource servers must enforce) - -Every Stella Ops service that consumes Authority tokens **must**: - -1. Verify JWT signature (`kid` in JWKS), `iss`, `aud`, `exp`, `nbf`. -2. Enforce **sender‑constraint**: - - * **DPoP**: validate DPoP proof (`htu`, `htm`, `iat`, `jti`) and match `cnf.jkt`; cache `jti` for replay defense; honor nonce challenges. - * **mTLS**: match presented client cert thumbprint to token `cnf.x5t#S256`. -3. Check **scopes**; optionally map to internal roles. -4. Check **tenant** (`tid`) and **installation** (`inst`) as appropriate. -5. For **Signer** only: require **both** OpTok and **PoE** in the request (enforced by Signer, not Authority). - ---- - -## 12) Error surfaces & UX - -* Token endpoint errors follow OAuth2 (`invalid_client`, `invalid_grant`, `invalid_scope`, `unauthorized_client`). -* Resource servers use RFC 6750 style (`WWW-Authenticate: DPoP error="invalid_token", error_description="…", dpop_nonce="…" `). -* For DPoP nonce challenges, clients retry with the server‑supplied nonce once. - ---- - -## 13) Observability & audit - -* **Metrics**: - - * `authority.tokens_issued_total{grant,aud}` - * `authority.dpop_validations_total{result}` - * `authority.mtls_bindings_total{result}` - * `authority.jwks_rotations_total` - * `authority.errors_total{type}` -* **Audit log** (immutable sink): token issuance (`sub`, `aud`, `scopes`, `tid`, `inst`, `cnf thumbprint`, `jti`), revocations, admin changes. -* **Tracing**: token flows, DB reads, JWKS cache. - ---- - -## 14) Configuration (YAML) - -```yaml -authority: - issuer: "https://authority.internal" - signing: - enabled: true - activeKeyId: "authority-signing-2025" - keyPath: "../certificates/authority-signing-2025.pem" - algorithm: "ES256" - keySource: "file" - security: - rateLimiting: - token: - enabled: true - permitLimit: 30 - window: "00:01:00" - queueLimit: 0 - authorize: - enabled: true - permitLimit: 60 - window: "00:01:00" - queueLimit: 10 - internal: - enabled: false - permitLimit: 5 - window: "00:01:00" - queueLimit: 0 - senderConstraints: - dpop: - enabled: true - allowedAlgorithms: [ "ES256", "ES384" ] - proofLifetime: "00:02:00" - allowedClockSkew: "00:00:30" - replayWindow: "00:05:00" - nonce: - enabled: true - ttl: "00:10:00" - maxIssuancePerMinute: 120 - store: "redis" - redisConnectionString: "redis://authority-redis:6379?ssl=false" - requiredAudiences: - - "signer" - - "attestor" - mtls: - enabled: true - requireChainValidation: true - rotationGrace: "00:15:00" - enforceForAudiences: - - "signer" - allowedSanTypes: - - "dns" - - "uri" - allowedCertificateAuthorities: - - "/etc/ssl/mtls/clients-ca.pem" - clients: - - clientId: scanner-web - grantTypes: [ "client_credentials" ] - audiences: [ "scanner" ] - auth: { type: "private_key_jwt", jwkFile: "/secrets/scanner-web.jwk" } - senderConstraint: "dpop" - scopes: [ "scanner.scan", "scanner.export", "scanner.read" ] - - clientId: signer - grantTypes: [ "client_credentials" ] - audiences: [ "signer" ] - auth: { type: "mtls" } - senderConstraint: "mtls" - scopes: [ "signer.sign" ] - - clientId: notify-web-dev - grantTypes: [ "client_credentials" ] - audiences: [ "notify.dev" ] - auth: { type: "client_secret", secretFile: "/secrets/notify-web-dev.secret" } - senderConstraint: "dpop" - scopes: [ "notify.read", "notify.admin" ] - - clientId: notify-web - grantTypes: [ "client_credentials" ] - audiences: [ "notify" ] - auth: { type: "client_secret", secretFile: "/secrets/notify-web.secret" } - senderConstraint: "dpop" - scopes: [ "notify.read", "notify.admin" ] -``` - ---- - -## 15) Testing matrix - -* **JWT validation**: wrong `aud`, expired `exp`, skewed `nbf`, stale `kid`. -* **DPoP**: invalid `htu`/`htm`, replayed `jti`, stale `iat`, wrong `jkt`, nonce dance. -* **mTLS**: wrong client cert, wrong CA, thumbprint mismatch. -* **RBAC**: scope enforcement per audience; over‑privileged client denied. -* **Rotation**: JWKS rotation while load‑testing; zero‑downtime verification. -* **HA**: kill one Authority instance; verify issuance continues; JWKS served by peers. -* **Performance**: 1k token issuance/sec on 2 cores with Redis enabled for jti caching. - ---- - -## 16) Threat model & mitigations (summary) - -| Threat | Vector | Mitigation | -| ------------------- | ---------------- | ------------------------------------------------------------------------------------------ | -| Token theft | Copy of JWT | **Short TTL**, **sender‑constraint** (DPoP/mTLS); replay blocked by `jti` cache and nonces | -| Replay across hosts | Reuse DPoP proof | Enforce `htu`/`htm`, `iat` freshness, `jti` uniqueness; services may require **nonce** | -| Impersonation | Fake client | mTLS or `private_key_jwt` with pinned JWK; client registration & rotation | -| Key compromise | Signing key leak | HSM/KMS storage, key rotation, audit; emergency key revoke path; narrow token TTL | -| Cross‑tenant abuse | Scope elevation | Enforce `aud`, `tid`, `inst` at issuance and resource servers | -| Downgrade to bearer | Strip DPoP | Resource servers require DPoP/mTLS based on `aud`; reject bearer without `cnf` | - ---- - -## 17) Deployment & HA - -* **Stateless** microservice, containerized; run ≥ 2 replicas behind LB. -* **DB**: HA Postgres (or MySQL) for clients/roles; **Redis** for device codes, DPoP nonces/jtis. -* **Secrets**: mount client JWKs via K8s Secrets/HashiCorp Vault; signing keys via KMS. -* **Backups**: DB daily; Redis not critical (ephemeral). -* **Disaster recovery**: export/import of client registry; JWKS rehydrate from KMS. -* **Compliance**: TLS audit; penetration testing for OIDC flows. - ---- - -## 18) Implementation notes - -* Reference stack: **.NET 10** + **OpenIddict 6** (or IdentityServer if licensed) with custom DPoP validator and mTLS binding middleware. -* Keep the DPoP/JTI cache pluggable; allow Redis/Memcached. -* Provide **client SDKs** for C# and Go: DPoP key mgmt, proof generation, nonce handling, token refresh helper. - ---- - -## 19) Quick reference — wire examples - -**Access token (payload excerpt)** - -```json -{ - "iss": "https://authority.internal", - "sub": "scanner-web", - "aud": "signer", - "exp": 1760668800, - "iat": 1760668620, - "nbf": 1760668620, - "jti": "9d9c3f01-6e1a-49f1-8f77-9b7e6f7e3c50", - "scope": "signer.sign", - "tid": "tenant-01", - "inst": "install-7A2B", - "cnf": { "jkt": "KcVb2V...base64url..." } -} -``` - -**DPoP proof header fields (for POST /sign/dsse)** - -```json -{ - "htu": "https://signer.internal/sign/dsse", - "htm": "POST", - "iat": 1760668620, - "jti": "4b1c9b3c-8a95-4c58-8a92-9c6cfb4a6a0b" -} -``` - -Signer validates that `hash(JWK)` in the proof matches `cnf.jkt` in the token. - ---- - -## 20) Rollout plan - -1. **MVP**: Client Credentials (private_key_jwt + DPoP), JWKS, short OpToks, per‑audience scopes. -2. **Add**: mTLS‑bound tokens for Signer/Attestor; device code for CLI; optional introspection. -3. **Hardening**: DPoP nonce support; full audit pipeline; HA tuning. -4. **UX**: Tenant/installation admin UI; role→scope editors; client bootstrap wizards. + * **Device Code** (CLI): `POST /oauth/device/code` + `POST /oauth/token` poll + * **Authorization Code + PKCE** (UI): standard + +**DPoP handshake (example)** + +1. Client prepares **JWK** (ephemeral keypair). +2. Client sends **DPoP proof** header with fields: + + ``` + htm=POST + htu=https://authority.../oauth/token + iat= + jti= + ``` + + signed with the DPoP private key; header carries JWK. +3. Authority validates proof; issues access token with `cnf.jkt=`. +4. Client uses the same DPoP key to sign **every subsequent API request** to services (Signer, Scanner, …). + +**mTLS flow** + +* Mutual TLS at the connection; Authority extracts client cert, validates chain; token carries `cnf.x5t#S256`. + +### 3.3 Introspection & revocation (optional) + +* `POST /oauth/introspect` → `{ active, sub, scope, aud, exp, cnf, ... }` +* `POST /oauth/revoke` → revokes refresh tokens or opaque access tokens. +* **Replay prevention**: maintain **DPoP `jti` cache** (TTL ≤ 10 min) to reject duplicate proofs when services supply DPoP nonces (Signer requires nonce for high‑value operations). + +### 3.4 UserInfo (optional for UI) + +* `GET /userinfo` (ID token context). + +--- + +## 4) Audiences, scopes & RBAC + +### 4.1 Audiences + +* `signer` — only the **Signer** service should accept tokens with `aud=signer`. +* `attestor`, `scanner`, `concelier`, `excititor`, `ui`, `zastava` similarly. + +Services **must** verify `aud` and **sender constraint** (DPoP/mTLS) per their policy. + +### 4.2 Core scopes + +| Scope | Service | Operation | +| ---------------------------------- | ------------------ | -------------------------- | +| `signer.sign` | Signer | Request DSSE signing | +| `attestor.write` | Attestor | Submit Rekor entries | +| `scanner.scan` | Scanner.WebService | Submit scan jobs | +| `scanner.export` | Scanner.WebService | Export SBOMs | +| `scanner.read` | Scanner.WebService | Read catalog/SBOMs | +| `vex.read` / `vex.admin` | Excititor | Query/operate | +| `concelier.read` / `concelier.export` | Concelier | Query/exports | +| `ui.read` / `ui.admin` | UI | View/admin | +| `zastava.emit` / `zastava.enforce` | Scanner/Zastava | Runtime events / admission | + +**Roles → scopes mapping** is configured centrally (Authority policy) and pushed during token issuance. + +--- + +## 5) Storage & state + +* **Configuration DB** (PostgreSQL/MySQL): clients, audiences, role→scope maps, tenant/installation registry, device code grants, persistent consents (if any). +* **Cache** (Redis): + + * DPoP **jti** replay cache (short TTL) + * **Nonce** store (per resource server, if they demand nonce) + * Device code pollers, rate limiting buckets +* **JWKS**: key material in HSM/KMS or encrypted at rest; JWKS served from memory. + +--- + +## 6) Key management & rotation + +* Maintain **at least 2 signing keys** active during rotation; tokens carry `kid`. +* Prefer **Ed25519** for compact tokens; maintain **ES256** fallback for FIPS contexts. +* Rotation cadence: 30–90 days; emergency rotation supported. +* Publish new JWKS **before** issuing tokens with the new `kid` to avoid cold‑start validation misses. +* Keep **old keys** available **at least** for max token TTL + 5 minutes. + +--- + +## 7) HA & performance + +* **Stateless issuance** (except device codes/refresh) → scale horizontally behind a load‑balancer. +* **DB** only for client metadata and optional flows; token checks are JWT‑local; introspection endpoints hit cache/DB minimally. +* **Targets**: + + * Token issuance P95 ≤ **20 ms** under warm cache. + * DPoP proof validation ≤ **1 ms** extra per request at resource servers (Signer/Scanner). + * 99.9% uptime; HPA on CPU/latency. + +--- + +## 8) Security posture + +* **Strict TLS** (1.3 preferred); HSTS; modern cipher suites. +* **mTLS** enabled where required (Signer/Attestor paths). +* **Replay protection**: DPoP `jti` cache, nonce support for **Signer** (add `DPoP-Nonce` header on 401; clients re‑sign). +* **Rate limits** per client & per IP; exponential backoff on failures. +* **Secrets**: clients use **private_key_jwt** or **mTLS**; never basic secrets over the wire. +* **CSP/CSRF** hardening on UI flows; `SameSite=Lax` cookies; PKCE enforced. +* **Logs** redact `Authorization` and DPoP proofs; store `sub`, `aud`, `scopes`, `inst`, `tid`, `cnf` thumbprints, not full keys. + +--- + +## 9) Multi‑tenancy & installations + +* **Tenant (`tid`)** and **Installation (`inst`)** registries define which audiences/scopes a client can request. +* Cross‑tenant isolation enforced at issuance (disallow rogue `aud`), and resource servers **must** check that `tid` matches their configured tenant. + +--- + +## 10) Admin & operations APIs + +All under `/admin` (mTLS + `authority.admin` scope). + +``` +POST /admin/clients # create/update client (confidential/public) +POST /admin/audiences # register audience resource URIs +POST /admin/roles # define role→scope mappings +POST /admin/tenants # create tenant/install entries +POST /admin/keys/rotate # rotate signing key (zero-downtime) +GET /admin/metrics # Prometheus exposition (token issue rates, errors) +GET /admin/healthz|readyz # health/readiness +``` + +Declared client `audiences` flow through to the issued JWT `aud` claim and the token request's `resource` indicators. Authority relies on this metadata to enforce DPoP nonce challenges for `signer`, `attestor`, and other high-value services without requiring clients to repeat the audience parameter on every request. + +--- + +## 11) Integration hard lines (what resource servers must enforce) + +Every Stella Ops service that consumes Authority tokens **must**: + +1. Verify JWT signature (`kid` in JWKS), `iss`, `aud`, `exp`, `nbf`. +2. Enforce **sender‑constraint**: + + * **DPoP**: validate DPoP proof (`htu`, `htm`, `iat`, `jti`) and match `cnf.jkt`; cache `jti` for replay defense; honor nonce challenges. + * **mTLS**: match presented client cert thumbprint to token `cnf.x5t#S256`. +3. Check **scopes**; optionally map to internal roles. +4. Check **tenant** (`tid`) and **installation** (`inst`) as appropriate. +5. For **Signer** only: require **both** OpTok and **PoE** in the request (enforced by Signer, not Authority). + +--- + +## 12) Error surfaces & UX + +* Token endpoint errors follow OAuth2 (`invalid_client`, `invalid_grant`, `invalid_scope`, `unauthorized_client`). +* Resource servers use RFC 6750 style (`WWW-Authenticate: DPoP error="invalid_token", error_description="…", dpop_nonce="…" `). +* For DPoP nonce challenges, clients retry with the server‑supplied nonce once. + +--- + +## 13) Observability & audit + +* **Metrics**: + + * `authority.tokens_issued_total{grant,aud}` + * `authority.dpop_validations_total{result}` + * `authority.mtls_bindings_total{result}` + * `authority.jwks_rotations_total` + * `authority.errors_total{type}` +* **Audit log** (immutable sink): token issuance (`sub`, `aud`, `scopes`, `tid`, `inst`, `cnf thumbprint`, `jti`), revocations, admin changes. +* **Tracing**: token flows, DB reads, JWKS cache. + +--- + +## 14) Configuration (YAML) + +```yaml +authority: + issuer: "https://authority.internal" + signing: + enabled: true + activeKeyId: "authority-signing-2025" + keyPath: "../certificates/authority-signing-2025.pem" + algorithm: "ES256" + keySource: "file" + security: + rateLimiting: + token: + enabled: true + permitLimit: 30 + window: "00:01:00" + queueLimit: 0 + authorize: + enabled: true + permitLimit: 60 + window: "00:01:00" + queueLimit: 10 + internal: + enabled: false + permitLimit: 5 + window: "00:01:00" + queueLimit: 0 + senderConstraints: + dpop: + enabled: true + allowedAlgorithms: [ "ES256", "ES384" ] + proofLifetime: "00:02:00" + allowedClockSkew: "00:00:30" + replayWindow: "00:05:00" + nonce: + enabled: true + ttl: "00:10:00" + maxIssuancePerMinute: 120 + store: "redis" + redisConnectionString: "redis://authority-redis:6379?ssl=false" + requiredAudiences: + - "signer" + - "attestor" + mtls: + enabled: true + requireChainValidation: true + rotationGrace: "00:15:00" + enforceForAudiences: + - "signer" + allowedSanTypes: + - "dns" + - "uri" + allowedCertificateAuthorities: + - "/etc/ssl/mtls/clients-ca.pem" + clients: + - clientId: scanner-web + grantTypes: [ "client_credentials" ] + audiences: [ "scanner" ] + auth: { type: "private_key_jwt", jwkFile: "/secrets/scanner-web.jwk" } + senderConstraint: "dpop" + scopes: [ "scanner.scan", "scanner.export", "scanner.read" ] + - clientId: signer + grantTypes: [ "client_credentials" ] + audiences: [ "signer" ] + auth: { type: "mtls" } + senderConstraint: "mtls" + scopes: [ "signer.sign" ] + - clientId: notify-web-dev + grantTypes: [ "client_credentials" ] + audiences: [ "notify.dev" ] + auth: { type: "client_secret", secretFile: "/secrets/notify-web-dev.secret" } + senderConstraint: "dpop" + scopes: [ "notify.read", "notify.admin" ] + - clientId: notify-web + grantTypes: [ "client_credentials" ] + audiences: [ "notify" ] + auth: { type: "client_secret", secretFile: "/secrets/notify-web.secret" } + senderConstraint: "dpop" + scopes: [ "notify.read", "notify.admin" ] +``` + +--- + +## 15) Testing matrix + +* **JWT validation**: wrong `aud`, expired `exp`, skewed `nbf`, stale `kid`. +* **DPoP**: invalid `htu`/`htm`, replayed `jti`, stale `iat`, wrong `jkt`, nonce dance. +* **mTLS**: wrong client cert, wrong CA, thumbprint mismatch. +* **RBAC**: scope enforcement per audience; over‑privileged client denied. +* **Rotation**: JWKS rotation while load‑testing; zero‑downtime verification. +* **HA**: kill one Authority instance; verify issuance continues; JWKS served by peers. +* **Performance**: 1k token issuance/sec on 2 cores with Redis enabled for jti caching. + +--- + +## 16) Threat model & mitigations (summary) + +| Threat | Vector | Mitigation | +| ------------------- | ---------------- | ------------------------------------------------------------------------------------------ | +| Token theft | Copy of JWT | **Short TTL**, **sender‑constraint** (DPoP/mTLS); replay blocked by `jti` cache and nonces | +| Replay across hosts | Reuse DPoP proof | Enforce `htu`/`htm`, `iat` freshness, `jti` uniqueness; services may require **nonce** | +| Impersonation | Fake client | mTLS or `private_key_jwt` with pinned JWK; client registration & rotation | +| Key compromise | Signing key leak | HSM/KMS storage, key rotation, audit; emergency key revoke path; narrow token TTL | +| Cross‑tenant abuse | Scope elevation | Enforce `aud`, `tid`, `inst` at issuance and resource servers | +| Downgrade to bearer | Strip DPoP | Resource servers require DPoP/mTLS based on `aud`; reject bearer without `cnf` | + +--- + +## 17) Deployment & HA + +* **Stateless** microservice, containerized; run ≥ 2 replicas behind LB. +* **DB**: HA Postgres (or MySQL) for clients/roles; **Redis** for device codes, DPoP nonces/jtis. +* **Secrets**: mount client JWKs via K8s Secrets/HashiCorp Vault; signing keys via KMS. +* **Backups**: DB daily; Redis not critical (ephemeral). +* **Disaster recovery**: export/import of client registry; JWKS rehydrate from KMS. +* **Compliance**: TLS audit; penetration testing for OIDC flows. + +--- + +## 18) Implementation notes + +* Reference stack: **.NET 10** + **OpenIddict 6** (or IdentityServer if licensed) with custom DPoP validator and mTLS binding middleware. +* Keep the DPoP/JTI cache pluggable; allow Redis/Memcached. +* Provide **client SDKs** for C# and Go: DPoP key mgmt, proof generation, nonce handling, token refresh helper. + +--- + +## 19) Quick reference — wire examples + +**Access token (payload excerpt)** + +```json +{ + "iss": "https://authority.internal", + "sub": "scanner-web", + "aud": "signer", + "exp": 1760668800, + "iat": 1760668620, + "nbf": 1760668620, + "jti": "9d9c3f01-6e1a-49f1-8f77-9b7e6f7e3c50", + "scope": "signer.sign", + "tid": "tenant-01", + "inst": "install-7A2B", + "cnf": { "jkt": "KcVb2V...base64url..." } +} +``` + +**DPoP proof header fields (for POST /sign/dsse)** + +```json +{ + "htu": "https://signer.internal/sign/dsse", + "htm": "POST", + "iat": 1760668620, + "jti": "4b1c9b3c-8a95-4c58-8a92-9c6cfb4a6a0b" +} +``` + +Signer validates that `hash(JWK)` in the proof matches `cnf.jkt` in the token. + +--- + +## 20) Rollout plan + +1. **MVP**: Client Credentials (private_key_jwt + DPoP), JWKS, short OpToks, per‑audience scopes. +2. **Add**: mTLS‑bound tokens for Signer/Attestor; device code for CLI; optional introspection. +3. **Hardening**: DPoP nonce support; full audit pipeline; HA tuning. +4. **UX**: Tenant/installation admin UI; role→scope editors; client bootstrap wizards. diff --git a/docs/ARCHITECTURE_CLI.md b/docs/ARCHITECTURE_CLI.md index ce2c541f..fccf3c3c 100644 --- a/docs/ARCHITECTURE_CLI.md +++ b/docs/ARCHITECTURE_CLI.md @@ -1,406 +1,406 @@ -# component_architecture_cli.md — **Stella Ops CLI** (2025Q4) - -> **Scope.** Implementation‑ready architecture for **Stella Ops CLI**: command surface, process model, auth (Authority/DPoP), integration with Scanner/Excititor/Concelier/Signer/Attestor, Buildx plug‑in management, offline kit behavior, packaging, observability, security posture, and CI ergonomics. - ---- - -## 0) Mission & boundaries - -**Mission.** Provide a **fast, deterministic, CI‑friendly** command‑line interface to drive Stella Ops workflows: - -* Build‑time SBOM generation via **Buildx generator** orchestration. -* Post‑build **scan/compose/diff/export** against **Scanner.WebService**. -* **Policy** operations and **VEX/Vuln** data pulls (operator tasks). -* **Verification** (attestation, referrers, signatures) for audits. -* Air‑gapped/offline **kit** administration. - -**Boundaries.** - -* CLI **never** signs; it only calls **Signer**/**Attestor** via backend APIs when needed (e.g., `report --attest`). -* CLI **does not** store long‑lived credentials beyond OS keychain; tokens are **short** (Authority OpToks). -* Heavy work (scanning, merging, policy) is executed **server‑side** (Scanner/Excititor/Concelier). - ---- - -## 1) Solution layout & runtime form - -``` -src/ - ├─ StellaOps.Cli/ # net10.0 (Native AOT) single binary - ├─ StellaOps.Cli.Core/ # verb plumbing, config, HTTP, auth - ├─ StellaOps.Cli.Plugins/ # optional verbs packaged as plugins - ├─ StellaOps.Cli.Tests/ # unit + golden-output tests - └─ packaging/ - ├─ msix / msi / deb / rpm / brew formula - └─ scoop manifest / winget manifest -``` - -**Language/runtime**: .NET 10 **Native AOT** for speed/startup; Linux builds use **musl** static when possible. - -**Plug-in verbs.** Non-core verbs (Excititor, runtime helpers, future integrations) ship as restart-time plug-ins under `plugins/cli/**` with manifest descriptors. The launcher loads plug-ins on startup; hot reloading is intentionally unsupported. The inaugural bundle, `StellaOps.Cli.Plugins.NonCore`, packages the Excititor, runtime, and offline-kit command groups and publishes its manifest at `plugins/cli/StellaOps.Cli.Plugins.NonCore/`. - -**OS targets**: linux‑x64/arm64, windows‑x64/arm64, macOS‑x64/arm64. - ---- - -## 2) Command surface (verbs) - -> All verbs default to **JSON** output when `--json` is set (CI mode). Human output is concise, deterministic. - -### 2.1 Auth & profile - -* `auth login` - - * Modes: **device‑code** (default), **client‑credentials** (service principal). - * Produces **Authority** access token (OpTok) + stores **DPoP** keypair in OS keychain. -* `auth status` — show current issuer, subject, audiences, expiry. -* `auth logout` — wipe cached tokens/keys. - -### 2.2 Build‑time SBOM (Buildx) - -* `buildx install` — install/update the **StellaOps.Scanner.Sbomer.BuildXPlugin** on the host. -* `buildx verify` — ensure generator is usable. -* `buildx build` — thin wrapper around `docker buildx build --attest=type=sbom,generator=stellaops/sbom-indexer` with convenience flags: - - * `--attest` (request Signer/Attestor via backend post‑push) - * `--provenance` pass‑through (optional) - -### 2.3 Scanning & artifacts - -* `scan image ` - - * Options: `--force`, `--wait`, `--view=inventory|usage|both`, `--format=cdx-json|cdx-pb|spdx-json`, `--attest` (ask backend to sign/log). - * Streams progress; exits early unless `--wait`. -* `diff image --old --new [--view ...]` — show layer‑attributed changes. -* `export sbom [--view ... --format ... --out file]` — download artifact. -* `report final [--policy-revision ... --attest]` — request PASS/FAIL report from backend (policy+vex) and optional attestation. - -### 2.4 Policy & data - -* `policy get/set/apply` — fetch active policy, apply staged policy, compute digest. -* `concelier export` — trigger/export canonical JSON or Trivy DB (admin). -* `excititor export` — trigger/export consensus/raw claims (admin). - -### 2.5 Verification - -* `verify attestation --uuid | --artifact | --bundle ` — call **Attestor /verify** and print proof summary. -* `verify referrers ` — ask **Signer /verify/referrers** (is image Stella‑signed?). -* `verify image-signature ` — standalone cosign verification (optional, local). - -### 2.6 Runtime (Zastava helper) - -* `runtime policy test --image/-i [--file --ns --label key=value --json]` — ask backend `/policy/runtime` like the webhook would (accepts multiple `--image`, comma/space lists, or stdin pipelines). - -### 2.7 Offline kit - -* `offline kit pull` — fetch latest **Concelier JSON + Trivy DB + Excititor exports** as a tarball from a mirror. -* `offline kit import ` — upload the kit to on‑prem services (Concelier/Excititor). -* `offline kit status` — list current seed versions. - -### 2.8 Utilities - -* `config set/get` — endpoint & defaults. -* `whoami` — short auth display. -* `version` — CLI + protocol versions; release channel. - -### 2.9 Aggregation-only guard helpers - -* `sources ingest --dry-run --source --input [--tenant ... --format table|json --output file]` - - * Normalises documents (handles gzip/base64), posts them to the backend `aoc/ingest/dry-run` route, and exits non-zero when guard violations are detected. - * Defaults to table output with ANSI colour; `--json`/`--output` produce deterministic JSON for CI pipelines. - -* `aoc verify [--since ] [--limit ] [--sources list] [--codes list] [--format table|json] [--export file] [--tenant id] [--no-color]` - - * Replays guard checks against stored raw documents. Maps backend `ERR_AOC_00x` codes onto deterministic exit codes so CI can block regressions. - * Supports pagination hints (`--limit`, `--since`), tenant scoping via `--tenant` or `STELLA_TENANT`, and JSON exports for evidence lockers. - ---- - -## 3) AuthN: Authority + DPoP - -### 3.1 Token acquisition - -* **Device‑code**: the CLI opens an OIDC device code flow against **Authority**; the browser login is optional for service principals. -* **Client‑credentials**: service principals use **private_key_jwt** or **mTLS** to get tokens. - -### 3.2 DPoP key management - -* On first login, the CLI generates an **ephemeral JWK** (Ed25519) and stores it in the **OS keychain** (Keychain/DPAPI/KWallet/Gnome Keyring). -* Every request to backend services includes a **DPoP proof**; CLI refreshes tokens as needed. - -### 3.3 Multi‑audience & scopes - -* CLI requests **audiences** as needed per verb: - - * `scanner` for scan/export/report/diff - * `signer` (indirect; usually backend calls Signer) - * `attestor` for verify - * `concelier`/`excititor` for admin verbs - -CLI rejects verbs if required scopes are missing. - ---- - -## 4) Process model & reliability - -### 4.1 HTTP client - -* Single **http2** client with connection pooling, DNS pinning, retry/backoff (idempotent GET/POST marked safe). -* **DPoP nonce** handling: on `401` with nonce challenge, CLI replays once. - -### 4.2 Streaming - -* `scan` and `report` support **server‑sent JSON lines** (progress events). -* `--json` prints machine events; human mode shows compact spinners and crucial updates only. - -### 4.3 Exit codes (CI‑safe) - -| Code | Meaning | -| ---- | ------------------------------------------- | -| 0 | Success | -| 2 | Policy fail (final report verdict=fail) | -| 3 | Verification failed (attestation/signature) | -| 4 | Auth error (invalid/missing token/DPoP) | -| 5 | Resource not found (image/SBOM) | -| 6 | Rate limited / quota exceeded | -| 7 | Backend unavailable (retryable) | -| 9 | Invalid arguments | -| 11–17 | Aggregation-only guard violation (`ERR_AOC_00x`) | -| 18 | Verification truncated (increase `--limit`) | -| 70 | Transport/authentication failure | -| 71 | CLI usage error (missing tenant, invalid cursor) | - ---- - -## 5) Configuration model - -**Precedence:** CLI flags → env vars → config file → defaults. - -**Config file**: `${XDG_CONFIG_HOME}/stellaops/config.yaml` (Windows: `%APPDATA%\StellaOps\config.yaml`) - -```yaml -cli: - authority: "https://authority.internal" - backend: - scanner: "https://scanner-web.internal" - attestor: "https://attestor.internal" - concelier: "https://concelier-web.internal" - excititor: "https://excititor-web.internal" - auth: - audienceDefault: "scanner" - deviceCode: true - output: - json: false - color: auto - tls: - caBundle: "/etc/ssl/certs/ca-bundle.crt" - offline: - kitMirror: "s3://mirror/stellaops-kit" -``` - -Environment variables: `STELLAOPS_AUTHORITY`, `STELLAOPS_SCANNER_URL`, etc. - ---- - -## 6) Buildx generator orchestration - -* `buildx install` locates the Docker root directory, writes the **generator** plugin manifest, and pulls `stellaops/sbom-indexer` image (pinned digest). -* `buildx build` wrapper injects: - - * `--attest=type=sbom,generator=stellaops/sbom-indexer` - * `--label org.stellaops.request=sbom` -* Post‑build: CLI optionally calls **Scanner.WebService** to **verify referrers**, **compose** image SBOMs, and **attest** via Signer/Attestor. - -**Detection**: If Buildx or generator unavailable, CLI falls back to **post‑build scan** with a warning. - ---- - -## 7) Artifact handling - -* **Downloads** (`export sbom`, `report final`): stream to file; compute sha256 on the fly; write sidecar `.sha256` and optional **verification bundle** (if `--bundle`). -* **Uploads** (`offline kit import`): chunked upload; retry on transient errors; show progress bar (unless `--json`). - ---- - -## 8) Security posture - -* **DPoP private keys** stored in **OS keychain**; metadata cached in config. -* **No plaintext tokens** on disk; short‑lived **OpToks** held in memory. -* **TLS**: verify backend certificates; allow custom CA bundle for on‑prem. -* **Redaction**: CLI logs remove `Authorization`, DPoP headers, PoE tokens. -* **Supply chain**: CLI distribution binaries are **cosign‑signed**; `stellaops version --verify` checks its own signature. - ---- - -## 9) Observability - -* `--verbose` adds request IDs, timings, and retry traces. -* **Metrics** (optional, disabled by default): Prometheus text file exporter for local monitoring in long‑running agents. -* **Structured logs** (`--json`): per‑event JSON lines with `ts`, `verb`, `status`, `latencyMs`. - ---- - -## 10) Performance targets - -* Startup ≤ **20 ms** (AOT). -* `scan image` request/response overhead ≤ **5 ms** (excluding server work). -* Buildx wrapper overhead negligible (<1 ms). -* Large artifact download (100 MB) sustained ≥ **80 MB/s** on local networks. - ---- - -## 11) Tests & golden outputs - -* **Unit tests**: argument parsing, config precedence, URL resolution, DPoP proof creation. -* **Integration tests** (Testcontainers): mock Authority/Scanner/Attestor; CI pipeline with fake registry. -* **Golden outputs**: verb snapshots for `--json` across OSes; kept in `tests/golden/…`. -* **Contract tests**: ensure API shapes match service OpenAPI; fail build if incompatible. - ---- - -## 12) Error envelopes (human + JSON) - -**Human:** - -``` -✖ Policy FAIL: 3 high, 1 critical (VEX suppressed 12) - - pkg:rpm/openssl (CVE-2025-12345) — affected (vendor) — fixed in 3.0.14 - - pkg:npm/lodash (GHSA-xxxx) — affected — no fix - See: https://ui.internal/scans/sha256:... -Exit code: 2 -``` - -**JSON (`--json`):** - -```json -{ "event":"report", "status":"fail", "critical":1, "high":3, "url":"https://ui..." } -``` - ---- - -## 13) Admin & advanced flags - -* `--authority`, `--scanner`, `--attestor`, `--concelier`, `--excititor` override config URLs. -* `--no-color`, `--quiet`, `--json`. -* `--timeout`, `--retries`, `--retry-backoff-ms`. -* `--ca-bundle`, `--insecure` (dev only; prints warning). -* `--trace` (dump HTTP traces to file; scrubbed). - ---- - -## 14) Interop with other tools - -* Emits **CycloneDX Protobuf** directly to stdout when `export sbom --format cdx-pb --out -`. -* Pipes to `jq`/`yq` cleanly in JSON mode. -* Can act as a **credential helper** for scripts: `stellaops auth token --aud scanner` prints a one‑shot token for curl. - ---- - -## 15) Packaging & distribution - -* **Installers**: deb/rpm (postinst registers completions), Homebrew, Scoop, Winget, MSI/MSIX. -* **Shell completions**: bash/zsh/fish/pwsh. -* **Update channel**: `stellaops self-update` (optional) fetches cosign‑signed release manifest; corporate environments can disable. - ---- - -## 16) Security hard lines - -* Refuse to print token values; redact Authorization headers in verbose output. -* Disallow `--insecure` unless `STELLAOPS_CLI_ALLOW_INSECURE=1` set (double opt‑in). -* Enforce **short token TTL**; refresh proactively when <30 s left. -* Device‑code cache binding to **machine** and **user** (protect against copy to other machines). - ---- - -## 17) Wire sequences - -**A) Scan & wait with attestation** - -```mermaid -sequenceDiagram - autonumber - participant CLI - participant Auth as Authority - participant SW as Scanner.WebService - participant SG as Signer - participant AT as Attestor - - CLI->>Auth: device code flow (DPoP) - Auth-->>CLI: OpTok (aud=scanner) - - CLI->>SW: POST /scans { imageRef, attest:true } - SW-->>CLI: { scanId } - CLI->>SW: GET /scans/{id} (poll) - SW-->>CLI: { status: completed, artifacts, rekor? } # if attested - - alt attestation pending - SW->>SG: POST /sign/dsse (server-side) - SG-->>SW: DSSE - SW->>AT: POST /rekor/entries - AT-->>SW: { uuid, proof } - end - - CLI->>SW: GET /sboms/?format=cdx-pb&view=usage - SW-->>CLI: bytes -``` - -**B) Verify attestation by artifact** - -```mermaid -sequenceDiagram - autonumber - participant CLI - participant AT as Attestor - - CLI->>AT: POST /rekor/verify { artifactSha256 } - AT-->>CLI: { ok:true, uuid, index, logURL } -``` - ---- - -## 18) Roadmap (CLI) - -* `scan fs ` (local filesystem tree) → upload to backend for analysis. -* `policy test --sbom ` (simulate policy results offline using local policy bundle). -* `runtime capture` (developer mode) — capture small `/proc//maps` for troubleshooting. -* Pluggable output renderers for SARIF/HTML (admin‑controlled). - ---- - -## 19) Example CI snippets - -**GitHub Actions (post‑build)** - -```yaml -- name: Login (device code w/ OIDC broker) - run: stellaops auth login --json --authority ${{ secrets.AUTHORITY_URL }} - -- name: Scan - run: stellaops scan image ${{ steps.build.outputs.digest }} --wait --json - -- name: Export (usage view, protobuf) - run: stellaops export sbom ${{ steps.build.outputs.digest }} --view usage --format cdx-pb --out sbom.pb - -- name: Verify attestation - run: stellaops verify attestation --artifact $(sha256sum sbom.pb | cut -d' ' -f1) --json -``` - -**GitLab (buildx generator)** - -```yaml -script: - - stellaops buildx install - - docker buildx build --attest=type=sbom,generator=stellaops/sbom-indexer -t $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA . - - stellaops scan image $CI_REGISTRY_IMAGE@$IMAGE_DIGEST --wait --json -``` - ---- - -## 20) Test matrix (OS/arch) - -* Linux: ubuntu‑20.04/22.04/24.04 (x64, arm64), alpine (musl). -* macOS: 13–15 (x64, arm64). -* Windows: 10/11, Server 2019/2022 (x64, arm64). -* Docker engines: Docker Desktop, containerd‑based runners. +# component_architecture_cli.md — **Stella Ops CLI** (2025Q4) + +> **Scope.** Implementation‑ready architecture for **Stella Ops CLI**: command surface, process model, auth (Authority/DPoP), integration with Scanner/Excititor/Concelier/Signer/Attestor, Buildx plug‑in management, offline kit behavior, packaging, observability, security posture, and CI ergonomics. + +--- + +## 0) Mission & boundaries + +**Mission.** Provide a **fast, deterministic, CI‑friendly** command‑line interface to drive Stella Ops workflows: + +* Build‑time SBOM generation via **Buildx generator** orchestration. +* Post‑build **scan/compose/diff/export** against **Scanner.WebService**. +* **Policy** operations and **VEX/Vuln** data pulls (operator tasks). +* **Verification** (attestation, referrers, signatures) for audits. +* Air‑gapped/offline **kit** administration. + +**Boundaries.** + +* CLI **never** signs; it only calls **Signer**/**Attestor** via backend APIs when needed (e.g., `report --attest`). +* CLI **does not** store long‑lived credentials beyond OS keychain; tokens are **short** (Authority OpToks). +* Heavy work (scanning, merging, policy) is executed **server‑side** (Scanner/Excititor/Concelier). + +--- + +## 1) Solution layout & runtime form + +``` +src/ + ├─ StellaOps.Cli/ # net10.0 (Native AOT) single binary + ├─ StellaOps.Cli.Core/ # verb plumbing, config, HTTP, auth + ├─ StellaOps.Cli.Plugins/ # optional verbs packaged as plugins + ├─ StellaOps.Cli.Tests/ # unit + golden-output tests + └─ packaging/ + ├─ msix / msi / deb / rpm / brew formula + └─ scoop manifest / winget manifest +``` + +**Language/runtime**: .NET 10 **Native AOT** for speed/startup; Linux builds use **musl** static when possible. + +**Plug-in verbs.** Non-core verbs (Excititor, runtime helpers, future integrations) ship as restart-time plug-ins under `plugins/cli/**` with manifest descriptors. The launcher loads plug-ins on startup; hot reloading is intentionally unsupported. The inaugural bundle, `StellaOps.Cli.Plugins.NonCore`, packages the Excititor, runtime, and offline-kit command groups and publishes its manifest at `plugins/cli/StellaOps.Cli.Plugins.NonCore/`. + +**OS targets**: linux‑x64/arm64, windows‑x64/arm64, macOS‑x64/arm64. + +--- + +## 2) Command surface (verbs) + +> All verbs default to **JSON** output when `--json` is set (CI mode). Human output is concise, deterministic. + +### 2.1 Auth & profile + +* `auth login` + + * Modes: **device‑code** (default), **client‑credentials** (service principal). + * Produces **Authority** access token (OpTok) + stores **DPoP** keypair in OS keychain. +* `auth status` — show current issuer, subject, audiences, expiry. +* `auth logout` — wipe cached tokens/keys. + +### 2.2 Build‑time SBOM (Buildx) + +* `buildx install` — install/update the **StellaOps.Scanner.Sbomer.BuildXPlugin** on the host. +* `buildx verify` — ensure generator is usable. +* `buildx build` — thin wrapper around `docker buildx build --attest=type=sbom,generator=stellaops/sbom-indexer` with convenience flags: + + * `--attest` (request Signer/Attestor via backend post‑push) + * `--provenance` pass‑through (optional) + +### 2.3 Scanning & artifacts + +* `scan image ` + + * Options: `--force`, `--wait`, `--view=inventory|usage|both`, `--format=cdx-json|cdx-pb|spdx-json`, `--attest` (ask backend to sign/log). + * Streams progress; exits early unless `--wait`. +* `diff image --old --new [--view ...]` — show layer‑attributed changes. +* `export sbom [--view ... --format ... --out file]` — download artifact. +* `report final [--policy-revision ... --attest]` — request PASS/FAIL report from backend (policy+vex) and optional attestation. + +### 2.4 Policy & data + +* `policy get/set/apply` — fetch active policy, apply staged policy, compute digest. +* `concelier export` — trigger/export canonical JSON or Trivy DB (admin). +* `excititor export` — trigger/export consensus/raw claims (admin). + +### 2.5 Verification + +* `verify attestation --uuid | --artifact | --bundle ` — call **Attestor /verify** and print proof summary. +* `verify referrers ` — ask **Signer /verify/referrers** (is image Stella‑signed?). +* `verify image-signature ` — standalone cosign verification (optional, local). + +### 2.6 Runtime (Zastava helper) + +* `runtime policy test --image/-i [--file --ns --label key=value --json]` — ask backend `/policy/runtime` like the webhook would (accepts multiple `--image`, comma/space lists, or stdin pipelines). + +### 2.7 Offline kit + +* `offline kit pull` — fetch latest **Concelier JSON + Trivy DB + Excititor exports** as a tarball from a mirror. +* `offline kit import ` — upload the kit to on‑prem services (Concelier/Excititor). +* `offline kit status` — list current seed versions. + +### 2.8 Utilities + +* `config set/get` — endpoint & defaults. +* `whoami` — short auth display. +* `version` — CLI + protocol versions; release channel. + +### 2.9 Aggregation-only guard helpers + +* `sources ingest --dry-run --source --input [--tenant ... --format table|json --output file]` + + * Normalises documents (handles gzip/base64), posts them to the backend `aoc/ingest/dry-run` route, and exits non-zero when guard violations are detected. + * Defaults to table output with ANSI colour; `--json`/`--output` produce deterministic JSON for CI pipelines. + +* `aoc verify [--since ] [--limit ] [--sources list] [--codes list] [--format table|json] [--export file] [--tenant id] [--no-color]` + + * Replays guard checks against stored raw documents. Maps backend `ERR_AOC_00x` codes onto deterministic exit codes so CI can block regressions. + * Supports pagination hints (`--limit`, `--since`), tenant scoping via `--tenant` or `STELLA_TENANT`, and JSON exports for evidence lockers. + +--- + +## 3) AuthN: Authority + DPoP + +### 3.1 Token acquisition + +* **Device‑code**: the CLI opens an OIDC device code flow against **Authority**; the browser login is optional for service principals. +* **Client‑credentials**: service principals use **private_key_jwt** or **mTLS** to get tokens. + +### 3.2 DPoP key management + +* On first login, the CLI generates an **ephemeral JWK** (Ed25519) and stores it in the **OS keychain** (Keychain/DPAPI/KWallet/Gnome Keyring). +* Every request to backend services includes a **DPoP proof**; CLI refreshes tokens as needed. + +### 3.3 Multi‑audience & scopes + +* CLI requests **audiences** as needed per verb: + + * `scanner` for scan/export/report/diff + * `signer` (indirect; usually backend calls Signer) + * `attestor` for verify + * `concelier`/`excititor` for admin verbs + +CLI rejects verbs if required scopes are missing. + +--- + +## 4) Process model & reliability + +### 4.1 HTTP client + +* Single **http2** client with connection pooling, DNS pinning, retry/backoff (idempotent GET/POST marked safe). +* **DPoP nonce** handling: on `401` with nonce challenge, CLI replays once. + +### 4.2 Streaming + +* `scan` and `report` support **server‑sent JSON lines** (progress events). +* `--json` prints machine events; human mode shows compact spinners and crucial updates only. + +### 4.3 Exit codes (CI‑safe) + +| Code | Meaning | +| ---- | ------------------------------------------- | +| 0 | Success | +| 2 | Policy fail (final report verdict=fail) | +| 3 | Verification failed (attestation/signature) | +| 4 | Auth error (invalid/missing token/DPoP) | +| 5 | Resource not found (image/SBOM) | +| 6 | Rate limited / quota exceeded | +| 7 | Backend unavailable (retryable) | +| 9 | Invalid arguments | +| 11–17 | Aggregation-only guard violation (`ERR_AOC_00x`) | +| 18 | Verification truncated (increase `--limit`) | +| 70 | Transport/authentication failure | +| 71 | CLI usage error (missing tenant, invalid cursor) | + +--- + +## 5) Configuration model + +**Precedence:** CLI flags → env vars → config file → defaults. + +**Config file**: `${XDG_CONFIG_HOME}/stellaops/config.yaml` (Windows: `%APPDATA%\StellaOps\config.yaml`) + +```yaml +cli: + authority: "https://authority.internal" + backend: + scanner: "https://scanner-web.internal" + attestor: "https://attestor.internal" + concelier: "https://concelier-web.internal" + excititor: "https://excititor-web.internal" + auth: + audienceDefault: "scanner" + deviceCode: true + output: + json: false + color: auto + tls: + caBundle: "/etc/ssl/certs/ca-bundle.crt" + offline: + kitMirror: "s3://mirror/stellaops-kit" +``` + +Environment variables: `STELLAOPS_AUTHORITY`, `STELLAOPS_SCANNER_URL`, etc. + +--- + +## 6) Buildx generator orchestration + +* `buildx install` locates the Docker root directory, writes the **generator** plugin manifest, and pulls `stellaops/sbom-indexer` image (pinned digest). +* `buildx build` wrapper injects: + + * `--attest=type=sbom,generator=stellaops/sbom-indexer` + * `--label org.stellaops.request=sbom` +* Post‑build: CLI optionally calls **Scanner.WebService** to **verify referrers**, **compose** image SBOMs, and **attest** via Signer/Attestor. + +**Detection**: If Buildx or generator unavailable, CLI falls back to **post‑build scan** with a warning. + +--- + +## 7) Artifact handling + +* **Downloads** (`export sbom`, `report final`): stream to file; compute sha256 on the fly; write sidecar `.sha256` and optional **verification bundle** (if `--bundle`). +* **Uploads** (`offline kit import`): chunked upload; retry on transient errors; show progress bar (unless `--json`). + +--- + +## 8) Security posture + +* **DPoP private keys** stored in **OS keychain**; metadata cached in config. +* **No plaintext tokens** on disk; short‑lived **OpToks** held in memory. +* **TLS**: verify backend certificates; allow custom CA bundle for on‑prem. +* **Redaction**: CLI logs remove `Authorization`, DPoP headers, PoE tokens. +* **Supply chain**: CLI distribution binaries are **cosign‑signed**; `stellaops version --verify` checks its own signature. + +--- + +## 9) Observability + +* `--verbose` adds request IDs, timings, and retry traces. +* **Metrics** (optional, disabled by default): Prometheus text file exporter for local monitoring in long‑running agents. +* **Structured logs** (`--json`): per‑event JSON lines with `ts`, `verb`, `status`, `latencyMs`. + +--- + +## 10) Performance targets + +* Startup ≤ **20 ms** (AOT). +* `scan image` request/response overhead ≤ **5 ms** (excluding server work). +* Buildx wrapper overhead negligible (<1 ms). +* Large artifact download (100 MB) sustained ≥ **80 MB/s** on local networks. + +--- + +## 11) Tests & golden outputs + +* **Unit tests**: argument parsing, config precedence, URL resolution, DPoP proof creation. +* **Integration tests** (Testcontainers): mock Authority/Scanner/Attestor; CI pipeline with fake registry. +* **Golden outputs**: verb snapshots for `--json` across OSes; kept in `tests/golden/…`. +* **Contract tests**: ensure API shapes match service OpenAPI; fail build if incompatible. + +--- + +## 12) Error envelopes (human + JSON) + +**Human:** + +``` +✖ Policy FAIL: 3 high, 1 critical (VEX suppressed 12) + - pkg:rpm/openssl (CVE-2025-12345) — affected (vendor) — fixed in 3.0.14 + - pkg:npm/lodash (GHSA-xxxx) — affected — no fix + See: https://ui.internal/scans/sha256:... +Exit code: 2 +``` + +**JSON (`--json`):** + +```json +{ "event":"report", "status":"fail", "critical":1, "high":3, "url":"https://ui..." } +``` + +--- + +## 13) Admin & advanced flags + +* `--authority`, `--scanner`, `--attestor`, `--concelier`, `--excititor` override config URLs. +* `--no-color`, `--quiet`, `--json`. +* `--timeout`, `--retries`, `--retry-backoff-ms`. +* `--ca-bundle`, `--insecure` (dev only; prints warning). +* `--trace` (dump HTTP traces to file; scrubbed). + +--- + +## 14) Interop with other tools + +* Emits **CycloneDX Protobuf** directly to stdout when `export sbom --format cdx-pb --out -`. +* Pipes to `jq`/`yq` cleanly in JSON mode. +* Can act as a **credential helper** for scripts: `stellaops auth token --aud scanner` prints a one‑shot token for curl. + +--- + +## 15) Packaging & distribution + +* **Installers**: deb/rpm (postinst registers completions), Homebrew, Scoop, Winget, MSI/MSIX. +* **Shell completions**: bash/zsh/fish/pwsh. +* **Update channel**: `stellaops self-update` (optional) fetches cosign‑signed release manifest; corporate environments can disable. + +--- + +## 16) Security hard lines + +* Refuse to print token values; redact Authorization headers in verbose output. +* Disallow `--insecure` unless `STELLAOPS_CLI_ALLOW_INSECURE=1` set (double opt‑in). +* Enforce **short token TTL**; refresh proactively when <30 s left. +* Device‑code cache binding to **machine** and **user** (protect against copy to other machines). + +--- + +## 17) Wire sequences + +**A) Scan & wait with attestation** + +```mermaid +sequenceDiagram + autonumber + participant CLI + participant Auth as Authority + participant SW as Scanner.WebService + participant SG as Signer + participant AT as Attestor + + CLI->>Auth: device code flow (DPoP) + Auth-->>CLI: OpTok (aud=scanner) + + CLI->>SW: POST /scans { imageRef, attest:true } + SW-->>CLI: { scanId } + CLI->>SW: GET /scans/{id} (poll) + SW-->>CLI: { status: completed, artifacts, rekor? } # if attested + + alt attestation pending + SW->>SG: POST /sign/dsse (server-side) + SG-->>SW: DSSE + SW->>AT: POST /rekor/entries + AT-->>SW: { uuid, proof } + end + + CLI->>SW: GET /sboms/?format=cdx-pb&view=usage + SW-->>CLI: bytes +``` + +**B) Verify attestation by artifact** + +```mermaid +sequenceDiagram + autonumber + participant CLI + participant AT as Attestor + + CLI->>AT: POST /rekor/verify { artifactSha256 } + AT-->>CLI: { ok:true, uuid, index, logURL } +``` + +--- + +## 18) Roadmap (CLI) + +* `scan fs ` (local filesystem tree) → upload to backend for analysis. +* `policy test --sbom ` (simulate policy results offline using local policy bundle). +* `runtime capture` (developer mode) — capture small `/proc//maps` for troubleshooting. +* Pluggable output renderers for SARIF/HTML (admin‑controlled). + +--- + +## 19) Example CI snippets + +**GitHub Actions (post‑build)** + +```yaml +- name: Login (device code w/ OIDC broker) + run: stellaops auth login --json --authority ${{ secrets.AUTHORITY_URL }} + +- name: Scan + run: stellaops scan image ${{ steps.build.outputs.digest }} --wait --json + +- name: Export (usage view, protobuf) + run: stellaops export sbom ${{ steps.build.outputs.digest }} --view usage --format cdx-pb --out sbom.pb + +- name: Verify attestation + run: stellaops verify attestation --artifact $(sha256sum sbom.pb | cut -d' ' -f1) --json +``` + +**GitLab (buildx generator)** + +```yaml +script: + - stellaops buildx install + - docker buildx build --attest=type=sbom,generator=stellaops/sbom-indexer -t $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA . + - stellaops scan image $CI_REGISTRY_IMAGE@$IMAGE_DIGEST --wait --json +``` + +--- + +## 20) Test matrix (OS/arch) + +* Linux: ubuntu‑20.04/22.04/24.04 (x64, arm64), alpine (musl). +* macOS: 13–15 (x64, arm64). +* Windows: 10/11, Server 2019/2022 (x64, arm64). +* Docker engines: Docker Desktop, containerd‑based runners. diff --git a/docs/ARCHITECTURE_CONCELIER.md b/docs/ARCHITECTURE_CONCELIER.md index 8437c2ba..16a6f9b4 100644 --- a/docs/ARCHITECTURE_CONCELIER.md +++ b/docs/ARCHITECTURE_CONCELIER.md @@ -1,518 +1,518 @@ -# component_architecture_concelier.md — **Stella Ops Concelier** (Sprint 22) - -> **Scope.** Implementation-ready architecture for **Concelier**: the advisory ingestion and Link-Not-Merge (LNM) observation pipeline that produces deterministic raw observations, correlation linksets, and evidence events consumed by Policy Engine, Console, CLI, and Export centers. Covers domain models, connectors, observation/linkset builders, storage schema, events, APIs, performance, security, and test matrices. - ---- - -## 0) Mission & boundaries - -**Mission.** Acquire authoritative **vulnerability advisories** (vendor PSIRTs, distros, OSS ecosystems, CERTs), persist them as immutable **observations** under the Aggregation-Only Contract (AOC), construct **linksets** that correlate observations without merging or precedence, and export deterministic evidence bundles (JSON, Trivy DB, Offline Kit) for downstream policy evaluation and operator tooling. - -**Boundaries.** - -* Concelier **does not** sign with private keys. When attestation is required, the export artifact is handed to the **Signer**/**Attestor** pipeline (out‑of‑process). -* Concelier **does not** decide PASS/FAIL; it provides data to the **Policy** engine. -* Online operation is **allowlist‑only**; air‑gapped deployments use the **Offline Kit**. - ---- - -## 1) Topology & processes - -**Process shape:** single ASP.NET Core service `StellaOps.Concelier.WebService` hosting: - -* **Scheduler** with distributed locks (Mongo backed). -* **Connectors** (fetch/parse/map) that emit immutable observation candidates. -* **Observation writer** enforcing AOC invariants via `AOCWriteGuard`. -* **Linkset builder** that correlates observations into `advisory_linksets` and annotates conflicts. -* **Event publisher** emitting `advisory.observation.updated` and `advisory.linkset.updated` messages. -* **Exporters** (JSON, Trivy DB, Offline Kit slices) fed from observation/linkset stores. -* **Minimal REST** for health/status/trigger/export and observation/linkset reads. - -**Scale:** HA by running N replicas; **locks** prevent overlapping jobs per source/exporter. - ---- - -## 2) Canonical domain model - -> Stored in MongoDB (database `concelier`), serialized with a **canonical JSON** writer (stable order, camelCase, normalized timestamps). - -### 2.1 Core entities - -#### AdvisoryObservation - -```jsonc -observationId // deterministic id: {tenant}:{source.vendor}:{upstreamId}:{revision} -tenant // issuing tenant (lower-case) -source{ - vendor, stream, api, collectorVersion -} -upstream{ - upstreamId, documentVersion, fetchedAt, receivedAt, - contentHash, signature{present, format?, keyId?, signature?} -} -content{ - format, specVersion, raw, metadata? -} -identifiers{ - cve?, ghsa?, vendorIds[], aliases[] -} -linkset{ - purls[], cpes[], aliases[], references[{type,url}], - reconciledFrom[] -} -createdAt // when Concelier recorded the observation -attributes // optional provenance metadata (batch ids, ingest cursor) -```jsonc - -#### AdvisoryLinkset - -```jsonc -linksetId // sha256 over sorted (tenant, product/vuln tuple, observation ids) -tenant -key{ - vulnerabilityId, - productKey, - confidence // low|medium|high -} -observations[] = [ - { - observationId, - sourceVendor, - statement{ - status?, severity?, references?, notes? - }, - collectedAt - } -] -aliases{ - primary, - others[] -} -purls[] -cpes[] -conflicts[]? // see AdvisoryLinksetConflict -createdAt -updatedAt -```jsonc - -#### AdvisoryLinksetConflict - -```jsonc -conflictId // deterministic hash -type // severity-mismatch | affected-range-divergence | reference-clash | alias-inconsistency | metadata-gap -field? // optional JSON pointer (e.g., /statement/severity/vector) -observations[] // per-source values contributing to the conflict -confidence // low|medium|high (heuristic weight) -detectedAt -```jsonc - -#### ObservationEvent / LinksetEvent - -```jsonc -eventId // ULID -tenant -type // advisory.observation.updated | advisory.linkset.updated -key{ - observationId? // on observation event - linksetId? // on linkset event - vulnerabilityId?, - productKey? -} -delta{ - added[], removed[], changed[] // normalized summary for consumers -} -hash // canonical hash of serialized delta payload -occurredAt -```jsonc - -#### ExportState - -```jsonc -exportKind // json | trivydb -baseExportId? // last full baseline -baseDigest? // digest of last full baseline -lastFullDigest? // digest of last full export -lastDeltaDigest? // digest of last delta export -cursor // per-kind incremental cursor -files[] // last manifest snapshot (path → sha256) -```jsonc - -Legacy `Advisory`, `Affected`, and merge-centric entities remain in the repository for historical exports and replay but are being phased out as Link-Not-Merge takes over. New code paths must interact with `AdvisoryObservation` / `AdvisoryLinkset` exclusively and emit conflicts through the structured payloads described above. - -### 2.2 Product identity (`productKey`) - -* **Primary:** `purl` (Package URL). -* **OS packages:** RPM (NEVRA→purl:rpm), DEB (dpkg→purl:deb), APK (apk→purl:alpine), with **EVR/NVRA** preserved. -* **Secondary:** `cpe` retained for compatibility; advisory records may carry both. -* **Image/platform:** `oci:/@` for image‑level advisories (rare). -* **Unmappable:** if a source is non‑deterministic, keep native string under `productKey="native::"` and mark **non‑joinable**. - ---- - -## 3) Source families & precedence - -### 3.1 Families - -* **Vendor PSIRTs**: Microsoft, Oracle, Cisco, Adobe, Apple, VMware, Chromium… -* **Linux distros**: Red Hat, SUSE, Ubuntu, Debian, Alpine… -* **OSS ecosystems**: OSV, GHSA (GitHub Security Advisories), PyPI, npm, Maven, NuGet, Go. -* **CERTs / national CSIRTs**: CISA (KEV, ICS), JVN, ACSC, CCCS, KISA, CERT‑FR/BUND, etc. - -### 3.2 Precedence (when claims conflict) - -1. **Vendor PSIRT** (authoritative for their product). -2. **Distro** (authoritative for packages they ship, including backports). -3. **Ecosystem** (OSV/GHSA) for library semantics. -4. **CERTs/aggregators** for enrichment (KEV/known exploited). - -> Precedence affects **Affected** ranges and **fixed** info; **severity** is normalized to the **maximum** credible severity unless policy overrides. Conflicts are retained with **source provenance**. - ---- - -## 4) Connectors & normalization - -### 4.1 Connector contract - -```csharp -public interface IFeedConnector { - string SourceName { get; } - Task FetchAsync(IServiceProvider sp, CancellationToken ct); // -> document collection - Task ParseAsync(IServiceProvider sp, CancellationToken ct); // -> dto collection (validated) - Task MapAsync(IServiceProvider sp, CancellationToken ct); // -> advisory/alias/affected/reference -} -```jsonc - -* **Fetch**: windowed (cursor), conditional GET (ETag/Last‑Modified), retry/backoff, rate limiting. -* **Parse**: schema validation (JSON Schema, XSD/CSAF), content type checks; write **DTO** with normalized casing. -* **Map**: build canonical records; all outputs carry **provenance** (doc digest, URI, anchors). - -### 4.2 Version range normalization - -* **SemVer** ecosystems (npm, pypi, maven, nuget, golang): normalize to `introduced`/`fixed` semver ranges (use `~`, `^`, `<`, `>=` canonicalized to intervals). -* **RPM EVR**: `epoch:version-release` with `rpmvercmp` semantics; store raw EVR strings and also **computed order keys** for query. -* **DEB**: dpkg version comparison semantics mirrored; store computed keys. -* **APK**: Alpine version semantics; compute order keys. -* **Generic**: if provider uses text, retain raw; do **not** invent ranges. - -### 4.3 Severity & CVSS - -* Normalize **CVSS v2/v3/v4** where available (vector, baseScore, severity). -* If multiple CVSS sources exist, track them all; **effective severity** defaults to **max** by policy (configurable). -* **ExploitKnown** toggled by KEV and equivalent sources; store **evidence** (source, date). - ---- - -## 5) Observation & linkset pipeline - -> **Goal:** deterministically ingest raw documents into immutable observations, correlate them into evidence-rich linksets, and broadcast changes without precedence or mutation. - -### 5.1 Observation flow - -1. **Connector fetch/parse/map** — connectors download upstream payloads, validate signatures, and map to DTOs (identifiers, references, raw payload, provenance). -2. **AOC guard** — `AOCWriteGuard` verifies forbidden keys, provenance completeness, tenant claims, timestamp normalization, and content hash idempotency. Violations raise `ERR_AOC_00x` mapped to structured logs and metrics. -3. **Append-only write** — observations insert into `advisory_observations`; duplicates by `(tenant, source.vendor, upstream.upstreamId, upstream.contentHash)` become no-ops; new content for same upstream id creates a supersedes chain. -4. **Change feed + event** — Mongo change streams trigger `advisory.observation.updated@1` events with deterministic payloads (IDs, hash, supersedes pointer, linkset summary). Policy Engine, Offline Kit builder, and guard dashboards subscribe. - -### 5.2 Linkset correlation - -1. **Queue** — observation deltas enqueue correlation jobs keyed by `(tenant, vulnerabilityId, productKey)` candidates derived from identifiers + alias graph. -2. **Canonical grouping** — builder resolves aliases using Concelier’s alias store and deterministic heuristics (vendor > distro > cert), deriving normalized product keys (purl preferred) and confidence scores. -3. **Linkset materialization** — `advisory_linksets` documents store sorted observation references, alias sets, product keys, range metadata, and conflict payloads. Writes are idempotent; unchanged hashes skip updates. -4. **Conflict detection** — builder emits structured conflicts (`severity-mismatch`, `affected-range-divergence`, `reference-clash`, `alias-inconsistency`, `metadata-gap`). Conflicts carry per-observation values for explainability. -5. **Event emission** — `advisory.linkset.updated@1` summarizes deltas (`added`, `removed`, `changed` observation IDs, conflict updates, confidence changes) and includes a canonical hash for replay validation. - -### 5.3 Event contract - -| Event | Schema | Notes | -|-------|--------|-------| -| `advisory.observation.updated@1` | `events/advisory.observation.updated@1.json` | Fired on new or superseded observations. Includes `observationId`, source metadata, `linksetSummary` (aliases/purls), supersedes pointer (if any), SHA-256 hash, and `traceId`. | -| `advisory.linkset.updated@1` | `events/advisory.linkset.updated@1.json` | Fired when correlation changes. Includes `linksetId`, `key{vulnerabilityId, productKey, confidence}`, observation deltas, conflicts, `updatedAt`, and canonical hash. | - -Events are emitted via NATS (primary) and Redis Stream (fallback). Consumers acknowledge idempotently using the hash; duplicates are safe. Offline Kit captures both topics during bundle creation for air-gapped replay. - ---- - -## 6) Storage schema (MongoDB) - -### Collections & indexes (LNM path) - -* `concelier.sources` `{_id, type, baseUrl, enabled, notes}` — connector catalog. -* `concelier.source_state` `{sourceName(unique), enabled, cursor, lastSuccess, backoffUntil, paceOverrides}` — run-state (TTL indexes on `backoffUntil`). -* `concelier.documents` `{_id, sourceName, uri, fetchedAt, sha256, contentType, status, metadata, gridFsId?, etag?, lastModified?}` — raw payload registry. - * Indexes: `{sourceName:1, uri:1}` unique; `{fetchedAt:-1}` for recent fetches. -* `concelier.dto` `{_id, sourceName, documentId, schemaVer, payload, validatedAt}` — normalized connector DTOs used for replay. - * Index: `{sourceName:1, documentId:1}`. -* `concelier.advisory_observations` - -``` -{ - _id: "tenant:vendor:upstreamId:revision", - tenant, - source: { vendor, stream, api, collectorVersion }, - upstream: { upstreamId, documentVersion, fetchedAt, receivedAt, contentHash, signature }, - content: { format, specVersion, raw, metadata? }, - identifiers: { cve?, ghsa?, vendorIds[], aliases[] }, - linkset: { purls[], cpes[], aliases[], references[], reconciledFrom[] }, - supersedes?: "prevObservationId", - createdAt, - attributes?: object -} -``` - - * Indexes: `{tenant:1, upstream.upstreamId:1}`, `{tenant:1, source.vendor:1, linkset.purls:1}`, `{tenant:1, linkset.aliases:1}`, `{tenant:1, createdAt:-1}`. -* `concelier.advisory_linksets` - -``` -{ - _id: "sha256:...", - tenant, - key: { vulnerabilityId, productKey, confidence }, - observations: [ - { observationId, sourceVendor, statement, collectedAt } - ], - aliases: { primary, others: [] }, - purls: [], - cpes: [], - conflicts: [], - createdAt, - updatedAt -} -``` - - * Indexes: `{tenant:1, key.vulnerabilityId:1, key.productKey:1}`, `{tenant:1, purls:1}`, `{tenant:1, aliases.primary:1}`, `{tenant:1, updatedAt:-1}`. -* `concelier.advisory_events` - -``` -{ - _id: ObjectId, - tenant, - type: "advisory.observation.updated" | "advisory.linkset.updated", - key, - delta, - hash, - occurredAt -} -``` - - * TTL index on `occurredAt` (configurable retention), `{type:1, occurredAt:-1}` for replay. -* `concelier.export_state` `{_id(exportKind), baseExportId?, baseDigest?, lastFullDigest?, lastDeltaDigest?, cursor, files[]}` -* `locks` `{_id(jobKey), holder, acquiredAt, heartbeatAt, leaseMs, ttlAt}` (TTL cleans dead locks) -* `jobs` `{_id, type, args, state, startedAt, heartbeatAt, endedAt, error}` - -**Legacy collections** (`advisory`, `alias`, `affected`, `reference`, `merge_event`) remain read-only during the migration window to support back-compat exports. New code must not write to them; scheduled cleanup removes them after Link-Not-Merge GA. - -**GridFS buckets**: `fs.documents` for raw payloads (immutable); `fs.exports` for historical JSON/Trivy archives. - ---- - -## 7) Exporters - -### 7.1 Deterministic JSON (vuln‑list style) - -* Folder structure mirroring `////…` with one JSON per advisory; deterministic ordering, stable timestamps, normalized whitespace. -* `manifest.json` lists all files with SHA‑256 and a top‑level **export digest**. - -### 7.2 Trivy DB exporter - -* Builds Bolt DB archives compatible with Trivy; supports **full** and **delta** modes. -* In delta, unchanged blobs are reused from the base; metadata captures: - - ```json - { - "mode": "delta|full", - "baseExportId": "...", - "baseManifestDigest": "sha256:...", - "changed": ["path1", "path2"], - "removed": ["path3"] - } - ``` -* Optional ORAS push (OCI layout) for registries. -* Offline kit bundles include Trivy DB + JSON tree + export manifest. -* Mirror-ready bundles: when `concelier.trivy.mirror` defines domains, the exporter emits `mirror/index.json` plus per-domain `manifest.json`, `metadata.json`, and `db.tar.gz` files with SHA-256 digests so Concelier mirrors can expose domain-scoped download endpoints. -* Concelier.WebService serves `/concelier/exports/index.json` and `/concelier/exports/mirror/{domain}/…` directly from the export tree with hour-long budgets (index: 60 s, bundles: 300 s, immutable) and per-domain rate limiting; the endpoints honour Stella Ops Authority or CIDR bypass lists depending on mirror topology. - -### 7.3 Hand‑off to Signer/Attestor (optional) - -* On export completion, if `attest: true` is set in job args, Concelier **posts** the artifact metadata to **Signer**/**Attestor**; Concelier itself **does not** hold signing keys. -* Export record stores returned `{ uuid, index, url }` from **Rekor v2**. - ---- - -## 8) REST APIs - -All under `/api/v1/concelier`. - -**Health & status** - -``` -GET /healthz | /readyz -GET /status → sources, last runs, export cursors -``` - -**Sources & jobs** - -``` -GET /sources → list of configured sources -POST /sources/{name}/trigger → { jobId } -POST /sources/{name}/pause | /resume → toggle -GET /jobs/{id} → job status -``` - -**Exports** - -``` -POST /exports/json { full?:bool, force?:bool, attest?:bool } → { exportId, digest, rekor? } -POST /exports/trivy { full?:bool, force?:bool, publish?:bool, attest?:bool } → { exportId, digest, rekor? } -GET /exports/{id} → export metadata (kind, digest, createdAt, rekor?) -GET /concelier/exports/index.json → mirror index describing available domains/bundles -GET /concelier/exports/mirror/{domain}/manifest.json -GET /concelier/exports/mirror/{domain}/bundle.json -GET /concelier/exports/mirror/{domain}/bundle.json.jws -``` - -**Search (operator debugging)** - -``` -GET /advisories/{key} -GET /advisories?scheme=CVE&value=CVE-2025-12345 -GET /affected?productKey=pkg:rpm/openssl&limit=100 -``` - -**AuthN/Z:** Authority tokens (OpTok) with roles: `concelier.read`, `concelier.admin`, `concelier.export`. - ---- - -## 9) Configuration (YAML) - -```yaml -concelier: - mongo: { uri: "mongodb://mongo/concelier" } - s3: - endpoint: "http://minio:9000" - bucket: "stellaops-concelier" - scheduler: - windowSeconds: 30 - maxParallelSources: 4 - sources: - - name: redhat - kind: csaf - baseUrl: https://access.redhat.com/security/data/csaf/v2/ - signature: { type: pgp, keys: [ "…redhat PGP…" ] } - enabled: true - windowDays: 7 - - name: suse - kind: csaf - baseUrl: https://ftp.suse.com/pub/projects/security/csaf/ - signature: { type: pgp, keys: [ "…suse PGP…" ] } - - name: ubuntu - kind: usn-json - baseUrl: https://ubuntu.com/security/notices.json - signature: { type: none } - - name: osv - kind: osv - baseUrl: https://api.osv.dev/v1/ - signature: { type: none } - - name: ghsa - kind: ghsa - baseUrl: https://api.github.com/graphql - auth: { tokenRef: "env:GITHUB_TOKEN" } - exporters: - json: - enabled: true - output: s3://stellaops-concelier/json/ - trivy: - enabled: true - mode: full - output: s3://stellaops-concelier/trivy/ - oras: - enabled: false - repo: ghcr.io/org/concelier - precedence: - vendorWinsOverDistro: true - distroWinsOverOsv: true - severity: - policy: max # or 'vendorPreferred' / 'distroPreferred' -``` - ---- - -## 10) Security & compliance - -* **Outbound allowlist** per connector (domains, protocols); proxy support; TLS pinning where possible. -* **Signature verification** for raw docs (PGP/cosign/x509) with results stored in `document.metadata.sig`. Docs failing verification may still be ingested but flagged; Policy Engine or downstream policy can down-weight them. -* **No secrets in logs**; auth material via `env:` or mounted files; HTTP redaction of `Authorization` headers. -* **Multi‑tenant**: per‑tenant DBs or prefixes; per‑tenant S3 prefixes; tenant‑scoped API tokens. -* **Determinism**: canonical JSON writer; export digests stable across runs given same inputs. - ---- - -## 11) Performance targets & scale - -* **Ingest**: ≥ 5k documents/min on 4 cores (CSAF/OpenVEX/JSON). -* **Normalize/map**: ≥ 50k observation statements/min on 4 cores. -* **Observation write**: ≤ 5 ms P95 per document (including guard + Mongo write). -* **Linkset build**: ≤ 15 ms P95 per `(vulnerabilityId, productKey)` update, even with 20+ contributing observations. -* **Export**: 1M advisories JSON in ≤ 90 s (streamed, zstd), Trivy DB in ≤ 60 s on 8 cores. -* **Memory**: hard cap per job; chunked streaming writers; backpressure to avoid GC spikes. - -**Scale pattern**: add Concelier replicas; Mongo scaling via indices and read/write concerns; GridFS only for oversized docs. - ---- - -## 12) Observability - -* **Metrics** - - * `concelier.fetch.docs_total{source}` - * `concelier.fetch.bytes_total{source}` - * `concelier.parse.failures_total{source}` - * `concelier.map.statements_total{source}` - * `concelier.observations.write_total{result=ok|noop|error}` - * `concelier.linksets.updated_total{result=ok|skip|error}` - * `concelier.linksets.conflicts_total{type}` - * `concelier.export.bytes{kind}` - * `concelier.export.duration_seconds{kind}` -* **Tracing** around fetch/parse/map/observe/linkset/export. -* **Logs**: structured with `source`, `uri`, `docDigest`, `advisoryKey`, `exportId`. - ---- - -## 13) Testing matrix - -* **Connectors:** fixture suites for each provider/format (happy path; malformed; signature fail). -* **Version semantics:** EVR vs dpkg vs semver edge cases (epoch bumps, tilde versions, pre‑releases). -* **Linkset correlation:** multi-source conflicts (severity, range, alias) produce deterministic conflict payloads; ensure confidence scoring stable. -* **Export determinism:** byte‑for‑byte stable outputs across runs; digest equality. -* **Performance:** soak tests with 1M advisories; cap memory; verify backpressure. -* **API:** pagination, filters, RBAC, error envelopes (RFC 7807). -* **Offline kit:** bundle build & import correctness. - ---- - -## 14) Failure modes & recovery - -* **Source outages:** scheduler backs off with exponential delay; `source_state.backoffUntil`; alerts on staleness. -* **Schema drifts:** parse stage marks DTO invalid; job fails with clear diagnostics; connector version flags track supported schema ranges. -* **Partial exports:** exporters write to temp prefix; **manifest commit** is atomic; only then move to final prefix and update `export_state`. -* **Resume:** all stages idempotent; `source_state.cursor` supports window resume. - ---- - -## 15) Operator runbook (quick) - -* **Trigger all sources:** `POST /api/v1/concelier/sources/*/trigger` -* **Force full export JSON:** `POST /api/v1/concelier/exports/json { "full": true, "force": true }` -* **Force Trivy DB delta publish:** `POST /api/v1/concelier/exports/trivy { "full": false, "publish": true }` -* **Inspect observation:** `GET /api/v1/concelier/observations/{observationId}` -* **Query linkset:** `GET /api/v1/concelier/linksets?vulnerabilityId=CVE-2025-12345&productKey=pkg:rpm/redhat/openssl` -* **Pause noisy source:** `POST /api/v1/concelier/sources/osv/pause` - ---- - -## 16) Rollout plan - -1. **MVP**: Red Hat (CSAF), SUSE (CSAF), Ubuntu (USN JSON), OSV; JSON export. -2. **Add**: GHSA GraphQL, Debian (DSA HTML/JSON), Alpine secdb; Trivy DB export. -3. **Attestation hand‑off**: integrate with **Signer/Attestor** (optional). -4. **Scale & diagnostics**: provider dashboards, staleness alerts, export cache reuse. -5. **Offline kit**: end‑to‑end verified bundles for air‑gap. +# component_architecture_concelier.md — **Stella Ops Concelier** (Sprint 22) + +> **Scope.** Implementation-ready architecture for **Concelier**: the advisory ingestion and Link-Not-Merge (LNM) observation pipeline that produces deterministic raw observations, correlation linksets, and evidence events consumed by Policy Engine, Console, CLI, and Export centers. Covers domain models, connectors, observation/linkset builders, storage schema, events, APIs, performance, security, and test matrices. + +--- + +## 0) Mission & boundaries + +**Mission.** Acquire authoritative **vulnerability advisories** (vendor PSIRTs, distros, OSS ecosystems, CERTs), persist them as immutable **observations** under the Aggregation-Only Contract (AOC), construct **linksets** that correlate observations without merging or precedence, and export deterministic evidence bundles (JSON, Trivy DB, Offline Kit) for downstream policy evaluation and operator tooling. + +**Boundaries.** + +* Concelier **does not** sign with private keys. When attestation is required, the export artifact is handed to the **Signer**/**Attestor** pipeline (out‑of‑process). +* Concelier **does not** decide PASS/FAIL; it provides data to the **Policy** engine. +* Online operation is **allowlist‑only**; air‑gapped deployments use the **Offline Kit**. + +--- + +## 1) Topology & processes + +**Process shape:** single ASP.NET Core service `StellaOps.Concelier.WebService` hosting: + +* **Scheduler** with distributed locks (Mongo backed). +* **Connectors** (fetch/parse/map) that emit immutable observation candidates. +* **Observation writer** enforcing AOC invariants via `AOCWriteGuard`. +* **Linkset builder** that correlates observations into `advisory_linksets` and annotates conflicts. +* **Event publisher** emitting `advisory.observation.updated` and `advisory.linkset.updated` messages. +* **Exporters** (JSON, Trivy DB, Offline Kit slices) fed from observation/linkset stores. +* **Minimal REST** for health/status/trigger/export and observation/linkset reads. + +**Scale:** HA by running N replicas; **locks** prevent overlapping jobs per source/exporter. + +--- + +## 2) Canonical domain model + +> Stored in MongoDB (database `concelier`), serialized with a **canonical JSON** writer (stable order, camelCase, normalized timestamps). + +### 2.1 Core entities + +#### AdvisoryObservation + +```jsonc +observationId // deterministic id: {tenant}:{source.vendor}:{upstreamId}:{revision} +tenant // issuing tenant (lower-case) +source{ + vendor, stream, api, collectorVersion +} +upstream{ + upstreamId, documentVersion, fetchedAt, receivedAt, + contentHash, signature{present, format?, keyId?, signature?} +} +content{ + format, specVersion, raw, metadata? +} +identifiers{ + cve?, ghsa?, vendorIds[], aliases[] +} +linkset{ + purls[], cpes[], aliases[], references[{type,url}], + reconciledFrom[] +} +createdAt // when Concelier recorded the observation +attributes // optional provenance metadata (batch ids, ingest cursor) +```jsonc + +#### AdvisoryLinkset + +```jsonc +linksetId // sha256 over sorted (tenant, product/vuln tuple, observation ids) +tenant +key{ + vulnerabilityId, + productKey, + confidence // low|medium|high +} +observations[] = [ + { + observationId, + sourceVendor, + statement{ + status?, severity?, references?, notes? + }, + collectedAt + } +] +aliases{ + primary, + others[] +} +purls[] +cpes[] +conflicts[]? // see AdvisoryLinksetConflict +createdAt +updatedAt +```jsonc + +#### AdvisoryLinksetConflict + +```jsonc +conflictId // deterministic hash +type // severity-mismatch | affected-range-divergence | reference-clash | alias-inconsistency | metadata-gap +field? // optional JSON pointer (e.g., /statement/severity/vector) +observations[] // per-source values contributing to the conflict +confidence // low|medium|high (heuristic weight) +detectedAt +```jsonc + +#### ObservationEvent / LinksetEvent + +```jsonc +eventId // ULID +tenant +type // advisory.observation.updated | advisory.linkset.updated +key{ + observationId? // on observation event + linksetId? // on linkset event + vulnerabilityId?, + productKey? +} +delta{ + added[], removed[], changed[] // normalized summary for consumers +} +hash // canonical hash of serialized delta payload +occurredAt +```jsonc + +#### ExportState + +```jsonc +exportKind // json | trivydb +baseExportId? // last full baseline +baseDigest? // digest of last full baseline +lastFullDigest? // digest of last full export +lastDeltaDigest? // digest of last delta export +cursor // per-kind incremental cursor +files[] // last manifest snapshot (path → sha256) +```jsonc + +Legacy `Advisory`, `Affected`, and merge-centric entities remain in the repository for historical exports and replay but are being phased out as Link-Not-Merge takes over. New code paths must interact with `AdvisoryObservation` / `AdvisoryLinkset` exclusively and emit conflicts through the structured payloads described above. + +### 2.2 Product identity (`productKey`) + +* **Primary:** `purl` (Package URL). +* **OS packages:** RPM (NEVRA→purl:rpm), DEB (dpkg→purl:deb), APK (apk→purl:alpine), with **EVR/NVRA** preserved. +* **Secondary:** `cpe` retained for compatibility; advisory records may carry both. +* **Image/platform:** `oci:/@` for image‑level advisories (rare). +* **Unmappable:** if a source is non‑deterministic, keep native string under `productKey="native::"` and mark **non‑joinable**. + +--- + +## 3) Source families & precedence + +### 3.1 Families + +* **Vendor PSIRTs**: Microsoft, Oracle, Cisco, Adobe, Apple, VMware, Chromium… +* **Linux distros**: Red Hat, SUSE, Ubuntu, Debian, Alpine… +* **OSS ecosystems**: OSV, GHSA (GitHub Security Advisories), PyPI, npm, Maven, NuGet, Go. +* **CERTs / national CSIRTs**: CISA (KEV, ICS), JVN, ACSC, CCCS, KISA, CERT‑FR/BUND, etc. + +### 3.2 Precedence (when claims conflict) + +1. **Vendor PSIRT** (authoritative for their product). +2. **Distro** (authoritative for packages they ship, including backports). +3. **Ecosystem** (OSV/GHSA) for library semantics. +4. **CERTs/aggregators** for enrichment (KEV/known exploited). + +> Precedence affects **Affected** ranges and **fixed** info; **severity** is normalized to the **maximum** credible severity unless policy overrides. Conflicts are retained with **source provenance**. + +--- + +## 4) Connectors & normalization + +### 4.1 Connector contract + +```csharp +public interface IFeedConnector { + string SourceName { get; } + Task FetchAsync(IServiceProvider sp, CancellationToken ct); // -> document collection + Task ParseAsync(IServiceProvider sp, CancellationToken ct); // -> dto collection (validated) + Task MapAsync(IServiceProvider sp, CancellationToken ct); // -> advisory/alias/affected/reference +} +```jsonc + +* **Fetch**: windowed (cursor), conditional GET (ETag/Last‑Modified), retry/backoff, rate limiting. +* **Parse**: schema validation (JSON Schema, XSD/CSAF), content type checks; write **DTO** with normalized casing. +* **Map**: build canonical records; all outputs carry **provenance** (doc digest, URI, anchors). + +### 4.2 Version range normalization + +* **SemVer** ecosystems (npm, pypi, maven, nuget, golang): normalize to `introduced`/`fixed` semver ranges (use `~`, `^`, `<`, `>=` canonicalized to intervals). +* **RPM EVR**: `epoch:version-release` with `rpmvercmp` semantics; store raw EVR strings and also **computed order keys** for query. +* **DEB**: dpkg version comparison semantics mirrored; store computed keys. +* **APK**: Alpine version semantics; compute order keys. +* **Generic**: if provider uses text, retain raw; do **not** invent ranges. + +### 4.3 Severity & CVSS + +* Normalize **CVSS v2/v3/v4** where available (vector, baseScore, severity). +* If multiple CVSS sources exist, track them all; **effective severity** defaults to **max** by policy (configurable). +* **ExploitKnown** toggled by KEV and equivalent sources; store **evidence** (source, date). + +--- + +## 5) Observation & linkset pipeline + +> **Goal:** deterministically ingest raw documents into immutable observations, correlate them into evidence-rich linksets, and broadcast changes without precedence or mutation. + +### 5.1 Observation flow + +1. **Connector fetch/parse/map** — connectors download upstream payloads, validate signatures, and map to DTOs (identifiers, references, raw payload, provenance). +2. **AOC guard** — `AOCWriteGuard` verifies forbidden keys, provenance completeness, tenant claims, timestamp normalization, and content hash idempotency. Violations raise `ERR_AOC_00x` mapped to structured logs and metrics. +3. **Append-only write** — observations insert into `advisory_observations`; duplicates by `(tenant, source.vendor, upstream.upstreamId, upstream.contentHash)` become no-ops; new content for same upstream id creates a supersedes chain. +4. **Change feed + event** — Mongo change streams trigger `advisory.observation.updated@1` events with deterministic payloads (IDs, hash, supersedes pointer, linkset summary). Policy Engine, Offline Kit builder, and guard dashboards subscribe. + +### 5.2 Linkset correlation + +1. **Queue** — observation deltas enqueue correlation jobs keyed by `(tenant, vulnerabilityId, productKey)` candidates derived from identifiers + alias graph. +2. **Canonical grouping** — builder resolves aliases using Concelier’s alias store and deterministic heuristics (vendor > distro > cert), deriving normalized product keys (purl preferred) and confidence scores. +3. **Linkset materialization** — `advisory_linksets` documents store sorted observation references, alias sets, product keys, range metadata, and conflict payloads. Writes are idempotent; unchanged hashes skip updates. +4. **Conflict detection** — builder emits structured conflicts (`severity-mismatch`, `affected-range-divergence`, `reference-clash`, `alias-inconsistency`, `metadata-gap`). Conflicts carry per-observation values for explainability. +5. **Event emission** — `advisory.linkset.updated@1` summarizes deltas (`added`, `removed`, `changed` observation IDs, conflict updates, confidence changes) and includes a canonical hash for replay validation. + +### 5.3 Event contract + +| Event | Schema | Notes | +|-------|--------|-------| +| `advisory.observation.updated@1` | `events/advisory.observation.updated@1.json` | Fired on new or superseded observations. Includes `observationId`, source metadata, `linksetSummary` (aliases/purls), supersedes pointer (if any), SHA-256 hash, and `traceId`. | +| `advisory.linkset.updated@1` | `events/advisory.linkset.updated@1.json` | Fired when correlation changes. Includes `linksetId`, `key{vulnerabilityId, productKey, confidence}`, observation deltas, conflicts, `updatedAt`, and canonical hash. | + +Events are emitted via NATS (primary) and Redis Stream (fallback). Consumers acknowledge idempotently using the hash; duplicates are safe. Offline Kit captures both topics during bundle creation for air-gapped replay. + +--- + +## 6) Storage schema (MongoDB) + +### Collections & indexes (LNM path) + +* `concelier.sources` `{_id, type, baseUrl, enabled, notes}` — connector catalog. +* `concelier.source_state` `{sourceName(unique), enabled, cursor, lastSuccess, backoffUntil, paceOverrides}` — run-state (TTL indexes on `backoffUntil`). +* `concelier.documents` `{_id, sourceName, uri, fetchedAt, sha256, contentType, status, metadata, gridFsId?, etag?, lastModified?}` — raw payload registry. + * Indexes: `{sourceName:1, uri:1}` unique; `{fetchedAt:-1}` for recent fetches. +* `concelier.dto` `{_id, sourceName, documentId, schemaVer, payload, validatedAt}` — normalized connector DTOs used for replay. + * Index: `{sourceName:1, documentId:1}`. +* `concelier.advisory_observations` + +``` +{ + _id: "tenant:vendor:upstreamId:revision", + tenant, + source: { vendor, stream, api, collectorVersion }, + upstream: { upstreamId, documentVersion, fetchedAt, receivedAt, contentHash, signature }, + content: { format, specVersion, raw, metadata? }, + identifiers: { cve?, ghsa?, vendorIds[], aliases[] }, + linkset: { purls[], cpes[], aliases[], references[], reconciledFrom[] }, + supersedes?: "prevObservationId", + createdAt, + attributes?: object +} +``` + + * Indexes: `{tenant:1, upstream.upstreamId:1}`, `{tenant:1, source.vendor:1, linkset.purls:1}`, `{tenant:1, linkset.aliases:1}`, `{tenant:1, createdAt:-1}`. +* `concelier.advisory_linksets` + +``` +{ + _id: "sha256:...", + tenant, + key: { vulnerabilityId, productKey, confidence }, + observations: [ + { observationId, sourceVendor, statement, collectedAt } + ], + aliases: { primary, others: [] }, + purls: [], + cpes: [], + conflicts: [], + createdAt, + updatedAt +} +``` + + * Indexes: `{tenant:1, key.vulnerabilityId:1, key.productKey:1}`, `{tenant:1, purls:1}`, `{tenant:1, aliases.primary:1}`, `{tenant:1, updatedAt:-1}`. +* `concelier.advisory_events` + +``` +{ + _id: ObjectId, + tenant, + type: "advisory.observation.updated" | "advisory.linkset.updated", + key, + delta, + hash, + occurredAt +} +``` + + * TTL index on `occurredAt` (configurable retention), `{type:1, occurredAt:-1}` for replay. +* `concelier.export_state` `{_id(exportKind), baseExportId?, baseDigest?, lastFullDigest?, lastDeltaDigest?, cursor, files[]}` +* `locks` `{_id(jobKey), holder, acquiredAt, heartbeatAt, leaseMs, ttlAt}` (TTL cleans dead locks) +* `jobs` `{_id, type, args, state, startedAt, heartbeatAt, endedAt, error}` + +**Legacy collections** (`advisory`, `alias`, `affected`, `reference`, `merge_event`) remain read-only during the migration window to support back-compat exports. New code must not write to them; scheduled cleanup removes them after Link-Not-Merge GA. + +**GridFS buckets**: `fs.documents` for raw payloads (immutable); `fs.exports` for historical JSON/Trivy archives. + +--- + +## 7) Exporters + +### 7.1 Deterministic JSON (vuln‑list style) + +* Folder structure mirroring `////…` with one JSON per advisory; deterministic ordering, stable timestamps, normalized whitespace. +* `manifest.json` lists all files with SHA‑256 and a top‑level **export digest**. + +### 7.2 Trivy DB exporter + +* Builds Bolt DB archives compatible with Trivy; supports **full** and **delta** modes. +* In delta, unchanged blobs are reused from the base; metadata captures: + + ```json + { + "mode": "delta|full", + "baseExportId": "...", + "baseManifestDigest": "sha256:...", + "changed": ["path1", "path2"], + "removed": ["path3"] + } + ``` +* Optional ORAS push (OCI layout) for registries. +* Offline kit bundles include Trivy DB + JSON tree + export manifest. +* Mirror-ready bundles: when `concelier.trivy.mirror` defines domains, the exporter emits `mirror/index.json` plus per-domain `manifest.json`, `metadata.json`, and `db.tar.gz` files with SHA-256 digests so Concelier mirrors can expose domain-scoped download endpoints. +* Concelier.WebService serves `/concelier/exports/index.json` and `/concelier/exports/mirror/{domain}/…` directly from the export tree with hour-long budgets (index: 60 s, bundles: 300 s, immutable) and per-domain rate limiting; the endpoints honour Stella Ops Authority or CIDR bypass lists depending on mirror topology. + +### 7.3 Hand‑off to Signer/Attestor (optional) + +* On export completion, if `attest: true` is set in job args, Concelier **posts** the artifact metadata to **Signer**/**Attestor**; Concelier itself **does not** hold signing keys. +* Export record stores returned `{ uuid, index, url }` from **Rekor v2**. + +--- + +## 8) REST APIs + +All under `/api/v1/concelier`. + +**Health & status** + +``` +GET /healthz | /readyz +GET /status → sources, last runs, export cursors +``` + +**Sources & jobs** + +``` +GET /sources → list of configured sources +POST /sources/{name}/trigger → { jobId } +POST /sources/{name}/pause | /resume → toggle +GET /jobs/{id} → job status +``` + +**Exports** + +``` +POST /exports/json { full?:bool, force?:bool, attest?:bool } → { exportId, digest, rekor? } +POST /exports/trivy { full?:bool, force?:bool, publish?:bool, attest?:bool } → { exportId, digest, rekor? } +GET /exports/{id} → export metadata (kind, digest, createdAt, rekor?) +GET /concelier/exports/index.json → mirror index describing available domains/bundles +GET /concelier/exports/mirror/{domain}/manifest.json +GET /concelier/exports/mirror/{domain}/bundle.json +GET /concelier/exports/mirror/{domain}/bundle.json.jws +``` + +**Search (operator debugging)** + +``` +GET /advisories/{key} +GET /advisories?scheme=CVE&value=CVE-2025-12345 +GET /affected?productKey=pkg:rpm/openssl&limit=100 +``` + +**AuthN/Z:** Authority tokens (OpTok) with roles: `concelier.read`, `concelier.admin`, `concelier.export`. + +--- + +## 9) Configuration (YAML) + +```yaml +concelier: + mongo: { uri: "mongodb://mongo/concelier" } + s3: + endpoint: "http://minio:9000" + bucket: "stellaops-concelier" + scheduler: + windowSeconds: 30 + maxParallelSources: 4 + sources: + - name: redhat + kind: csaf + baseUrl: https://access.redhat.com/security/data/csaf/v2/ + signature: { type: pgp, keys: [ "…redhat PGP…" ] } + enabled: true + windowDays: 7 + - name: suse + kind: csaf + baseUrl: https://ftp.suse.com/pub/projects/security/csaf/ + signature: { type: pgp, keys: [ "…suse PGP…" ] } + - name: ubuntu + kind: usn-json + baseUrl: https://ubuntu.com/security/notices.json + signature: { type: none } + - name: osv + kind: osv + baseUrl: https://api.osv.dev/v1/ + signature: { type: none } + - name: ghsa + kind: ghsa + baseUrl: https://api.github.com/graphql + auth: { tokenRef: "env:GITHUB_TOKEN" } + exporters: + json: + enabled: true + output: s3://stellaops-concelier/json/ + trivy: + enabled: true + mode: full + output: s3://stellaops-concelier/trivy/ + oras: + enabled: false + repo: ghcr.io/org/concelier + precedence: + vendorWinsOverDistro: true + distroWinsOverOsv: true + severity: + policy: max # or 'vendorPreferred' / 'distroPreferred' +``` + +--- + +## 10) Security & compliance + +* **Outbound allowlist** per connector (domains, protocols); proxy support; TLS pinning where possible. +* **Signature verification** for raw docs (PGP/cosign/x509) with results stored in `document.metadata.sig`. Docs failing verification may still be ingested but flagged; Policy Engine or downstream policy can down-weight them. +* **No secrets in logs**; auth material via `env:` or mounted files; HTTP redaction of `Authorization` headers. +* **Multi‑tenant**: per‑tenant DBs or prefixes; per‑tenant S3 prefixes; tenant‑scoped API tokens. +* **Determinism**: canonical JSON writer; export digests stable across runs given same inputs. + +--- + +## 11) Performance targets & scale + +* **Ingest**: ≥ 5k documents/min on 4 cores (CSAF/OpenVEX/JSON). +* **Normalize/map**: ≥ 50k observation statements/min on 4 cores. +* **Observation write**: ≤ 5 ms P95 per document (including guard + Mongo write). +* **Linkset build**: ≤ 15 ms P95 per `(vulnerabilityId, productKey)` update, even with 20+ contributing observations. +* **Export**: 1M advisories JSON in ≤ 90 s (streamed, zstd), Trivy DB in ≤ 60 s on 8 cores. +* **Memory**: hard cap per job; chunked streaming writers; backpressure to avoid GC spikes. + +**Scale pattern**: add Concelier replicas; Mongo scaling via indices and read/write concerns; GridFS only for oversized docs. + +--- + +## 12) Observability + +* **Metrics** + + * `concelier.fetch.docs_total{source}` + * `concelier.fetch.bytes_total{source}` + * `concelier.parse.failures_total{source}` + * `concelier.map.statements_total{source}` + * `concelier.observations.write_total{result=ok|noop|error}` + * `concelier.linksets.updated_total{result=ok|skip|error}` + * `concelier.linksets.conflicts_total{type}` + * `concelier.export.bytes{kind}` + * `concelier.export.duration_seconds{kind}` +* **Tracing** around fetch/parse/map/observe/linkset/export. +* **Logs**: structured with `source`, `uri`, `docDigest`, `advisoryKey`, `exportId`. + +--- + +## 13) Testing matrix + +* **Connectors:** fixture suites for each provider/format (happy path; malformed; signature fail). +* **Version semantics:** EVR vs dpkg vs semver edge cases (epoch bumps, tilde versions, pre‑releases). +* **Linkset correlation:** multi-source conflicts (severity, range, alias) produce deterministic conflict payloads; ensure confidence scoring stable. +* **Export determinism:** byte‑for‑byte stable outputs across runs; digest equality. +* **Performance:** soak tests with 1M advisories; cap memory; verify backpressure. +* **API:** pagination, filters, RBAC, error envelopes (RFC 7807). +* **Offline kit:** bundle build & import correctness. + +--- + +## 14) Failure modes & recovery + +* **Source outages:** scheduler backs off with exponential delay; `source_state.backoffUntil`; alerts on staleness. +* **Schema drifts:** parse stage marks DTO invalid; job fails with clear diagnostics; connector version flags track supported schema ranges. +* **Partial exports:** exporters write to temp prefix; **manifest commit** is atomic; only then move to final prefix and update `export_state`. +* **Resume:** all stages idempotent; `source_state.cursor` supports window resume. + +--- + +## 15) Operator runbook (quick) + +* **Trigger all sources:** `POST /api/v1/concelier/sources/*/trigger` +* **Force full export JSON:** `POST /api/v1/concelier/exports/json { "full": true, "force": true }` +* **Force Trivy DB delta publish:** `POST /api/v1/concelier/exports/trivy { "full": false, "publish": true }` +* **Inspect observation:** `GET /api/v1/concelier/observations/{observationId}` +* **Query linkset:** `GET /api/v1/concelier/linksets?vulnerabilityId=CVE-2025-12345&productKey=pkg:rpm/redhat/openssl` +* **Pause noisy source:** `POST /api/v1/concelier/sources/osv/pause` + +--- + +## 16) Rollout plan + +1. **MVP**: Red Hat (CSAF), SUSE (CSAF), Ubuntu (USN JSON), OSV; JSON export. +2. **Add**: GHSA GraphQL, Debian (DSA HTML/JSON), Alpine secdb; Trivy DB export. +3. **Attestation hand‑off**: integrate with **Signer/Attestor** (optional). +4. **Scale & diagnostics**: provider dashboards, staleness alerts, export cache reuse. +5. **Offline kit**: end‑to‑end verified bundles for air‑gap. diff --git a/docs/ARCHITECTURE_DEVOPS.md b/docs/ARCHITECTURE_DEVOPS.md index eddb74d0..2eb884a1 100644 --- a/docs/ARCHITECTURE_DEVOPS.md +++ b/docs/ARCHITECTURE_DEVOPS.md @@ -98,7 +98,7 @@ At startup, services **self‑advertise** their semver & channel; the UI surface **Gating policy**: * **Core images** (Authority, Scanner, Concelier, Excititor, Attestor, UI): public **read**. -* **Enterprise add‑ons** (if any) and **pre‑release**: private repos via the **Registry Token Service** (`src/StellaOps.Registry.TokenService`) which exchanges Authority-issued OpToks for short-lived Docker registry bearer tokens. +* **Enterprise add‑ons** (if any) and **pre‑release**: private repos via the **Registry Token Service** (`src/Registry/StellaOps.Registry.TokenService`) which exchanges Authority-issued OpToks for short-lived Docker registry bearer tokens. > Monetization lever is **signing** (PoE gate), not image pulls, so the core remains simple to consume. diff --git a/docs/ARCHITECTURE_SCANNER.md b/docs/ARCHITECTURE_SCANNER.md index 8d9df7ed..3713850d 100644 --- a/docs/ARCHITECTURE_SCANNER.md +++ b/docs/ARCHITECTURE_SCANNER.md @@ -1,487 +1,487 @@ -# component_architecture_scanner.md — **Stella Ops Scanner** (2025Q4) - -> **Scope.** Implementation‑ready architecture for the **Scanner** subsystem: WebService, Workers, analyzers, SBOM assembly (inventory & usage), per‑layer caching, three‑way diffs, artifact catalog (RustFS default + Mongo, S3-compatible fallback), attestation hand‑off, and scale/security posture. This document is the contract between the scanning plane and everything else (Policy, Excititor, Concelier, UI, CLI). - ---- - -## 0) Mission & boundaries - -**Mission.** Produce **deterministic**, **explainable** SBOMs and diffs for container images and filesystems, quickly and repeatedly, without guessing. Emit two views: **Inventory** (everything present) and **Usage** (entrypoint closure + actually linked libs). Attach attestations through **Signer→Attestor→Rekor v2**. - -**Boundaries.** - -* Scanner **does not** produce PASS/FAIL. The backend (Policy + Excititor + Concelier) decides presentation and verdicts. -* Scanner **does not** keep third‑party SBOM warehouses. It may **bind** to existing attestations for exact hashes. -* Core analyzers are **deterministic** (no fuzzy identity). Optional heuristic plug‑ins (e.g., patch‑presence) run under explicit flags and never contaminate the core SBOM. - ---- - -## 1) Solution & project layout - -``` -src/ - ├─ StellaOps.Scanner.WebService/ # REST control plane, catalog, diff, exports - ├─ StellaOps.Scanner.Worker/ # queue consumer; executes analyzers - ├─ StellaOps.Scanner.Models/ # DTOs, evidence, graph nodes, CDX/SPDX adapters - ├─ StellaOps.Scanner.Storage/ # Mongo repositories; RustFS object client (default) + S3 fallback; ILM/GC - ├─ StellaOps.Scanner.Queue/ # queue abstraction (Redis/NATS/RabbitMQ) - ├─ StellaOps.Scanner.Cache/ # layer cache; file CAS; bloom/bitmap indexes - ├─ StellaOps.Scanner.EntryTrace/ # ENTRYPOINT/CMD → terminal program resolver (shell AST) - ├─ StellaOps.Scanner.Analyzers.OS.[Apk|Dpkg|Rpm]/ - ├─ StellaOps.Scanner.Analyzers.Lang.[Java|Node|Python|Go|DotNet|Rust]/ - ├─ StellaOps.Scanner.Analyzers.Native.[ELF|PE|MachO]/ # PE/Mach-O planned (M2) - ├─ StellaOps.Scanner.Emit.CDX/ # CycloneDX (JSON + Protobuf) - ├─ StellaOps.Scanner.Emit.SPDX/ # SPDX 3.0.1 JSON - ├─ StellaOps.Scanner.Diff/ # image→layer→component three‑way diff - ├─ StellaOps.Scanner.Index/ # BOM‑Index sidecar (purls + roaring bitmaps) - ├─ StellaOps.Scanner.Tests.* # unit/integration/e2e fixtures - └─ tools/ - ├─ StellaOps.Scanner.Sbomer.BuildXPlugin/ # BuildKit generator (image referrer SBOMs) - └─ StellaOps.Scanner.Sbomer.DockerImage/ # CLI‑driven scanner container -``` - -Analyzer assemblies and buildx generators are packaged as **restart-time plug-ins** under `plugins/scanner/**` with manifests; services must restart to activate new plug-ins. - -### 1.1 Queue backbone (Redis / NATS) - -`StellaOps.Scanner.Queue` exposes a transport-agnostic contract (`IScanQueue`/`IScanQueueLease`) used by the WebService producer and Worker consumers. Sprint 9 introduces two first-party transports: - -- **Redis Streams** (default). Uses consumer groups, deterministic idempotency keys (`scanner:jobs:idemp:*`), and supports lease claim (`XCLAIM`), renewal, exponential-backoff retries, and a `scanner:jobs:dead` stream for exhausted attempts. -- **NATS JetStream**. Provisions the `SCANNER_JOBS` work-queue stream + durable consumer `scanner-workers`, publishes with `MsgId` for dedupe, applies backoff via `NAK` delays, and routes dead-lettered jobs to `SCANNER_JOBS_DEAD`. - -Metrics are emitted via `Meter` counters (`scanner_queue_enqueued_total`, `scanner_queue_retry_total`, `scanner_queue_deadletter_total`), and `ScannerQueueHealthCheck` pings the active backend (Redis `PING`, NATS `PING`). Configuration is bound from `scanner.queue`: - -```yaml -scanner: - queue: - kind: redis # or nats - redis: - connectionString: "redis://queue:6379/0" - streamName: "scanner:jobs" - nats: - url: "nats://queue:4222" - stream: "SCANNER_JOBS" - subject: "scanner.jobs" - durableConsumer: "scanner-workers" - deadLetterSubject: "scanner.jobs.dead" - maxDeliveryAttempts: 5 - retryInitialBackoff: 00:00:05 - retryMaxBackoff: 00:02:00 -``` - -The DI extension (`AddScannerQueue`) wires the selected transport, so future additions (e.g., RabbitMQ) only implement the same contract and register. - -**Runtime form‑factor:** two deployables - -* **Scanner.WebService** (stateless REST) -* **Scanner.Worker** (N replicas; queue‑driven) - ---- - -## 2) External dependencies - -* **OCI registry** with **Referrers API** (discover attached SBOMs/signatures). -* **RustFS** (default, offline-first) for SBOM artifacts; optional S3/MinIO compatibility retained for migration; **Object Lock** semantics emulated via retention headers; **ILM** for TTL. -* **MongoDB** for catalog, job state, diffs, ILM rules. -* **Queue** (Redis Streams/NATS/RabbitMQ). -* **Authority** (on‑prem OIDC) for **OpToks** (DPoP/mTLS). -* **Signer** + **Attestor** (+ **Fulcio/KMS** + **Rekor v2**) for DSSE + transparency. - ---- - -## 3) Contracts & data model - -### 3.1 Evidence‑first component model - -**Nodes** - -* `Image`, `Layer`, `File` -* `Component` (`purl?`, `name`, `version?`, `type`, `id` — may be `bin:{sha256}`) -* `Executable` (ELF/PE/Mach‑O), `Library` (native or managed), `EntryScript` (shell/launcher) - -**Edges** (all carry **Evidence**) - -* `contains(Image|Layer → File)` -* `installs(PackageDB → Component)` (OS database row) -* `declares(InstalledMetadata → Component)` (dist‑info, pom.properties, deps.json…) -* `links_to(Executable → Library)` (ELF `DT_NEEDED`, PE imports) -* `calls(EntryScript → Program)` (file:line from shell AST) -* `attests(Rekor → Component|Image)` (SBOM/predicate binding) -* `bound_from_attestation(Component_attested → Component_observed)` (hash equality proof) - -**Evidence** - -``` -{ source: enum, locator: (path|offset|line), sha256?, method: enum, timestamp } -``` - -No confidences. Either a fact is proven with listed mechanisms, or it is not claimed. - -### 3.2 Catalog schema (Mongo) - -* `artifacts` - - ``` - { _id, type: layer-bom|image-bom|diff|index, - format: cdx-json|cdx-pb|spdx-json, - bytesSha256, size, rekor: { uuid,index,url }?, - ttlClass, immutable, refCount, createdAt } - ``` -* `images { imageDigest, repo, tag?, arch, createdAt, lastSeen }` -* `layers { layerDigest, mediaType, size, createdAt, lastSeen }` -* `links { fromType, fromDigest, artifactId }` // image/layer -> artifact -* `jobs { _id, kind, args, state, startedAt, heartbeatAt, endedAt, error }` -* `lifecycleRules { ruleId, scope, ttlDays, retainIfReferenced, immutable }` - -### 3.3 Object store layout (RustFS) - -``` -layers//sbom.cdx.json.zst -layers//sbom.spdx.json.zst -images//inventory.cdx.pb # CycloneDX Protobuf -images//usage.cdx.pb -indexes//bom-index.bin # purls + roaring bitmaps -diffs/_/diff.json.zst -attest/.dsse.json # DSSE bundle (cert chain + Rekor proof) -``` - -RustFS exposes a deterministic HTTP API (`PUT|GET|DELETE /api/v1/buckets/{bucket}/objects/{key}`). -Scanner clients tag immutable uploads with `X-RustFS-Immutable: true` and, when retention applies, -`X-RustFS-Retain-Seconds: `. Additional headers can be injected via -`scanner.artifactStore.headers` to support custom auth or proxy requirements. Legacy MinIO/S3 -deployments remain supported by setting `scanner.artifactStore.driver = "s3"` during phased -migrations. - ---- - -## 4) REST API (Scanner.WebService) - -All under `/api/v1/scanner`. Auth: **OpTok** (DPoP/mTLS); RBAC scopes. - -``` -POST /scans { imageRef|digest, force?:bool } → { scanId } -GET /scans/{id} → { status, imageDigest, artifacts[], rekor? } -GET /sboms/{imageDigest} ?format=cdx-json|cdx-pb|spdx-json&view=inventory|usage → bytes -GET /diff?old=&new=&view=inventory|usage → diff.json -POST /exports { imageDigest, format, view, attest?:bool } → { artifactId, rekor? } -POST /reports { imageDigest, policyRevision? } → { reportId, rekor? } # delegates to backend policy+vex -GET /catalog/artifacts/{id} → { meta } -GET /healthz | /readyz | /metrics -``` - -### Report events - -When `scanner.events.enabled = true`, the WebService serialises the signed report (canonical JSON + DSSE envelope) with `NotifyCanonicalJsonSerializer` and publishes two Redis Stream entries (`scanner.report.ready`, `scanner.scan.completed`) to the configured stream (default `stella.events`). The stream fields carry the whole envelope plus lightweight headers (`kind`, `tenant`, `ts`) so Notify and UI timelines can consume the event bus without recomputing signatures. Publish timeouts and bounded stream length are controlled via `scanner:events:publishTimeoutSeconds` and `scanner:events:maxStreamLength`. If the queue driver is already Redis and no explicit events DSN is provided, the host reuses the queue connection and auto-enables event emission so deployments get live envelopes without extra wiring. Compose/Helm bundles expose the same knobs via the `SCANNER__EVENTS__*` environment variables for quick tuning. - ---- - -## 5) Execution flow (Worker) - -### 5.1 Acquire & verify - -1. **Resolve image** (prefer `repo@sha256:…`). -2. **(Optional) verify image signature** per policy (cosign). -3. **Pull blobs**, compute layer digests; record metadata. - -### 5.2 Layer union FS - -* Apply whiteouts; materialize final filesystem; map **file → first introducing layer**. -* Windows layers (MSI/SxS/GAC) planned in **M2**. - -### 5.3 Evidence harvest (parallel analyzers; deterministic only) - -**A) OS packages** - -* **apk**: `/lib/apk/db/installed` -* **dpkg**: `/var/lib/dpkg/status`, `/var/lib/dpkg/info/*.list` -* **rpm**: `/var/lib/rpm/Packages` (via librpm or parser) -* Record `name`, `version` (epoch/revision), `arch`, source package where present, and **declared file lists**. - -> **Data flow note:** Each OS analyzer now writes its canonical output into the shared `ScanAnalysisStore` under -> `analysis.os.packages` (raw results), `analysis.os.fragments` (per-analyzer layer fragments), and contributes to -> `analysis.layers.fragments` (the aggregated view consumed by emit/diff pipelines). Helpers in -> `ScanAnalysisCompositionBuilder` convert these fragments into SBOM composition requests and component graphs so the -> diff/emit stages no longer reach back into individual analyzer implementations. - -**B) Language ecosystems (installed state only)** - -* **Java**: `META-INF/maven/*/pom.properties`, MANIFEST → `pkg:maven/...` -* **Node**: `node_modules/**/package.json` → `pkg:npm/...` -* **Python**: `*.dist-info/{METADATA,RECORD}` → `pkg:pypi/...` -* **Go**: Go **buildinfo** in binaries → `pkg:golang/...` -* **.NET**: `*.deps.json` + assembly metadata → `pkg:nuget/...` -* **Rust**: crates only when **explicitly present** (embedded metadata or cargo/registry traces); otherwise binaries reported as `bin:{sha256}`. - -> **Rule:** We only report components proven **on disk** with authoritative metadata. Lockfiles are evidence only. - -**C) Native link graph** - -* **ELF**: parse `PT_INTERP`, `DT_NEEDED`, RPATH/RUNPATH, **GNU symbol versions**; map **SONAMEs** to file paths; link executables → libs. -* **PE/Mach‑O** (planned M2): import table, delay‑imports; version resources; code signatures. -* Map libs back to **OS packages** if possible (via file lists); else emit `bin:{sha256}` components. -* The exported metadata (`stellaops.os.*` properties, license list, source package) feeds policy scoring and export pipelines - directly – Policy evaluates quiet rules against package provenance while Exporters forward the enriched fields into - downstream JSON/Trivy payloads. - -**D) EntryTrace (ENTRYPOINT/CMD → terminal program)** - -* Read image config; parse shell (POSIX/Bash subset) with AST: `source`/`.` includes; `case/if`; `exec`/`command`; `run‑parts`. -* Resolve commands via **PATH** within the **built rootfs**; follow language launchers (Java/Node/Python) to identify the terminal program (ELF/JAR/venv script). -* Record **file:line** and choices for each hop; output chain graph. -* Unresolvable dynamic constructs are recorded as **unknown** edges with reasons (e.g., `$FOO` unresolved). - -**E) Attestation & SBOM bind (optional)** - -* For each **file hash** or **binary hash**, query local cache of **Rekor v2** indices; if an SBOM attestation is found for **exact hash**, bind it to the component (origin=`attested`). -* For the **image** digest, likewise bind SBOM attestations (build‑time referrers). - -### 5.4 Component normalization (exact only) - -* Create `Component` nodes only with deterministic identities: purl, or **`bin:{sha256}`** for unlabeled binaries. -* Record **origin** (OS DB, installed metadata, linker, attestation). - -### 5.5 SBOM assembly & emit - -* **Per-layer SBOM fragments**: components introduced by the layer (+ relationships). -* **Image SBOMs**: merge fragments; refer back to them via **CycloneDX BOM‑Link** (or SPDX ExternalRef). -* Emit both **Inventory** & **Usage** views. -* When the native analyzer reports an ELF `buildId`, attach it to component metadata and surface it as `stellaops:buildId` in CycloneDX properties (and diff metadata). This keeps SBOM/diff output in lockstep with runtime events and the debug-store manifest. -* Serialize **CycloneDX JSON** and **CycloneDX Protobuf**; optionally **SPDX 3.0.1 JSON**. -* Build **BOM‑Index** sidecar: purl table + roaring bitmap; flag `usedByEntrypoint` components for fast backend joins. - -The emitted `buildId` metadata is preserved in component hashes, diff payloads, and `/policy/runtime` responses so operators can pivot from SBOM entries → runtime events → `debug/.build-id//.debug` within the Offline Kit or release bundle. - -### 5.6 DSSE attestation (via Signer/Attestor) - -* WebService constructs **predicate** with `image_digest`, `stellaops_version`, `license_id`, `policy_digest?` (when emitting **final reports**), timestamps. -* Calls **Signer** (requires **OpTok + PoE**); Signer verifies **entitlement + scanner image integrity** and returns **DSSE bundle**. -* **Attestor** logs to **Rekor v2**; returns `{uuid,index,proof}` → stored in `artifacts.rekor`. - ---- - -## 6) Three‑way diff (image → layer → component) - -### 6.1 Keys & classification - -* Component key: **purl** when present; else `bin:{sha256}`. -* Diff classes: `added`, `removed`, `version_changed` (`upgraded|downgraded`), `metadata_changed` (e.g., origin from attestation vs observed). -* Layer attribution: for each change, resolve the **introducing/removing layer**. - -### 6.2 Algorithm (outline) - -``` -A = components(imageOld, key) -B = components(imageNew, key) - -added = B \ A -removed = A \ B -changed = { k in A∩B : version(A[k]) != version(B[k]) || origin changed } - -for each item in added/removed/changed: - layer = attribute_to_layer(item, imageOld|imageNew) - usageFlag = usedByEntrypoint(item, imageNew) -emit diff.json (grouped by layer with badges) -``` - -Diffs are stored as artifacts and feed **UI** and **CLI**. - ---- - -## 7) Build‑time SBOMs (fast CI path) - -**Scanner.Sbomer.BuildXPlugin** can act as a BuildKit **generator**: - -* During `docker buildx build --attest=type=sbom,generator=stellaops/sbom-indexer`, run analyzers on the build context/output; attach SBOMs as OCI **referrers** to the built image. -* Optionally request **Signer/Attestor** to produce **Stella Ops‑verified** attestation immediately; else, Scanner.WebService can verify and re‑attest post‑push. -* Scanner.WebService trusts build‑time SBOMs per policy, enabling **no‑rescan** for unchanged bases. - ---- - -## 8) Configuration (YAML) - -```yaml -scanner: - queue: - kind: redis - url: "redis://queue:6379/0" - mongo: - uri: "mongodb://mongo/scanner" - s3: - endpoint: "http://minio:9000" - bucket: "stellaops" - objectLock: "governance" # or 'compliance' - analyzers: - os: { apk: true, dpkg: true, rpm: true } - lang: { java: true, node: true, python: true, go: true, dotnet: true, rust: true } - native: { elf: true, pe: false, macho: false } # PE/Mach-O in M2 - entryTrace: { enabled: true, shellMaxDepth: 64, followRunParts: true } - emit: - cdx: { json: true, protobuf: true } - spdx: { json: true } - compress: "zstd" - rekor: - url: "https://rekor-v2.internal" - signer: - url: "https://signer.internal" - limits: - maxParallel: 8 - perRegistryConcurrency: 2 - policyHints: - verifyImageSignature: false - trustBuildTimeSboms: true -``` - ---- - -## 9) Scale & performance - -* **Parallelism**: per‑analyzer concurrency; bounded directory walkers; file CAS dedupe by sha256. -* **Distributed locks** per **layer digest** to prevent duplicate work across Workers. -* **Registry throttles**: per‑host concurrency budgets; exponential backoff on 429/5xx. -* **Targets**: - - * **Build‑time**: P95 ≤ 3–5 s on warmed bases (CI generator). - * **Post‑build delta**: P95 ≤ 10 s for 200 MB images with cache hit. - * **Emit**: CycloneDX Protobuf ≤ 150 ms for 5k components; JSON ≤ 500 ms. - * **Diff**: ≤ 200 ms for 5k vs 5k components. - ---- - -## 10) Security posture - -* **AuthN**: Authority‑issued short OpToks (DPoP/mTLS). -* **AuthZ**: scopes (`scanner.scan`, `scanner.export`, `scanner.catalog.read`). -* **mTLS** to **Signer**/**Attestor**; only **Signer** can sign. -* **No network fetches** during analysis (except registry pulls and optional Rekor index reads). -* **Sandboxing**: non‑root containers; read‑only FS; seccomp profiles; disable execution of scanned content. -* **Release integrity**: all first‑party images are **cosign‑signed**; Workers/WebService self‑verify at startup. - ---- - -## 11) Observability & audit - -* **Metrics**: - - * `scanner.jobs_inflight`, `scanner.scan_latency_seconds` - * `scanner.layer_cache_hits_total`, `scanner.file_cas_hits_total` - * `scanner.artifact_bytes_total{format}` - * `scanner.attestation_latency_seconds`, `scanner.rekor_failures_total` - * `scanner_analyzer_golang_heuristic_total{indicator,version_hint}` — increments whenever the Go analyzer falls back to heuristics (build-id or runtime markers). Grafana panel: `sum by (indicator) (rate(scanner_analyzer_golang_heuristic_total[5m]))`; alert when the rate is ≥ 1 for 15 minutes to highlight unexpected stripped binaries. -* **Tracing**: spans for acquire→union→analyzers→compose→emit→sign→log. -* **Audit logs**: DSSE requests log `license_id`, `image_digest`, `artifactSha256`, `policy_digest?`, Rekor UUID on success. - ---- - -## 12) Testing matrix - -* **Determinism:** given same image + analyzers → byte‑identical **CDX Protobuf**; JSON normalized. -* **OS packages:** ground‑truth images per distro; compare to package DB. -* **Lang ecosystems:** sample images per ecosystem (Java/Node/Python/Go/.NET/Rust) with installed metadata; negative tests w/ lockfile‑only. -* **Native & EntryTrace:** ELF graph correctness; shell AST cases (includes, run‑parts, exec, case/if). -* **Diff:** layer attribution against synthetic two‑image sequences. -* **Performance:** cold vs warm cache; large `node_modules` and `site‑packages`. -* **Security:** ensure no code execution from image; fuzz parser inputs; path traversal resistance on layer extract. - ---- - -## 13) Failure modes & degradations - -* **Missing OS DB** (files exist, DB removed): record **files**; do **not** fabricate package components; emit `bin:{sha256}` where unavoidable; flag in evidence. -* **Unreadable metadata** (corrupt dist‑info): record file evidence; skip component creation; annotate. -* **Dynamic shell constructs**: mark unresolved edges with reasons (env var unknown) and continue; **Usage** view may be partial. -* **Registry rate limits**: honor backoff; queue job retries with jitter. -* **Signer refusal** (license/plan/version): scan completes; artifact produced; **no attestation**; WebService marks result as **unverified**. - ---- - -## 14) Optional plug‑ins (off by default) - -* **Patch‑presence detector** (signature‑based backport checks). Reads curated function‑level signatures from advisories; inspects binaries for patched code snippets to lower false‑positives for backported fixes. Runs as a sidecar analyzer that **annotates** components; never overrides core identities. -* **Runtime probes** (with Zastava): when allowed, compare **/proc//maps** (DSOs actually loaded) with static **Usage** view for precision. - ---- - -## 15) DevOps & operations - -* **HA**: WebService horizontal scale; Workers autoscale by queue depth & CPU; distributed locks on layers. -* **Retention**: ILM rules per artifact class (`short`, `default`, `compliance`); **Object Lock** for compliance artifacts (reports, signed SBOMs). -* **Upgrades**: bump **cache schema** when analyzer outputs change; WebService triggers refresh of dependent artifacts. -* **Backups**: Mongo (daily dumps); RustFS snapshots (filesystem-level rsync/ZFS) or S3 versioning when legacy driver enabled; Rekor v2 DB snapshots. - ---- - -## 16) CLI & UI touch points - -* **CLI**: `stellaops scan `, `stellaops diff --old --new`, `stellaops export`, `stellaops verify attestation `. -* **UI**: Scan detail shows **Inventory/Usage** toggles, **Diff by Layer**, **Attestation badge** (verified/unverified), Rekor link, and **EntryTrace** chain with file:line breadcrumbs. - ---- - -## 17) Roadmap (Scanner) - -* **M2**: Windows containers (MSI/SxS/GAC analyzers), PE/Mach‑O native analyzer, deeper Rust metadata. -* **M2**: Buildx generator GA (certified external registries), cross‑registry trust policies. -* **M3**: Patch‑presence plug‑in GA (opt‑in), cross‑image corpus clustering (evidence‑only; not identity). -* **M3**: Advanced EntryTrace (POSIX shell features breadth, busybox detection). - ---- - -### Appendix A — EntryTrace resolution (pseudo) - -```csharp -ResolveEntrypoint(ImageConfig cfg, RootFs fs): - cmd = Normalize(cfg.ENTRYPOINT, cfg.CMD) - stack = [ Script(cmd, path=FindOnPath(cmd[0], fs)) ] - visited = set() - - while stack not empty and depth < MAX: - cur = stack.pop() - if cur in visited: continue - visited.add(cur) - - if IsShellScript(cur.path): - ast = ParseShell(cur.path) - foreach directive in ast: - if directive is Source include: - p = ResolveInclude(include.path, cur.env, fs) - stack.push(Script(p)) - if directive is Exec call: - p = ResolveExec(call.argv[0], cur.env, fs) - stack.push(Program(p, argv=call.argv)) - if directive is Interpreter (python -m / node / java -jar): - term = ResolveInterpreterTarget(call, fs) - stack.push(Program(term)) - else: - return Terminal(cur.path) - - return Unknown(reason) -``` - -### Appendix A.1 — EntryTrace Explainability - -EntryTrace emits structured diagnostics and metrics so operators can quickly understand why resolution succeeded or degraded: - -| Reason | Description | Typical Mitigation | -|--------|-------------|--------------------| -| `CommandNotFound` | A command referenced in the script cannot be located in the layered root filesystem or `PATH`. | Ensure binaries exist in the image or extend `PATH` hints. | -| `MissingFile` | `source`/`.`/`run-parts` targets are missing. | Bundle the script or guard the include. | -| `DynamicEnvironmentReference` | Path depends on `$VARS` that are unknown at scan time. | Provide defaults via scan metadata or accept partial usage. | -| `RecursionLimitReached` | Nested includes exceeded the analyzer depth limit (default 64). | Flatten indirection or increase the limit in options. | -| `RunPartsEmpty` | `run-parts` directory contained no executable entries. | Remove empty directories or ignore if intentional. | -| `JarNotFound` / `ModuleNotFound` | Java/Python targets missing, preventing interpreter tracing. | Ship the jar/module with the image or adjust the launcher. | - -Diagnostics drive two metrics published by `EntryTraceMetrics`: - -- `entrytrace_resolutions_total{outcome}` — resolution attempts segmented by outcome (`resolved`, `partiallyresolved`, `unresolved`). -- `entrytrace_unresolved_total{reason}` — diagnostic counts keyed by reason. - -Structured logs include `entrytrace.path`, `entrytrace.command`, `entrytrace.reason`, and `entrytrace.depth`, all correlated with scan/job IDs. Timestamps are normalized to UTC (microsecond precision) to keep DSSE attestations and UI traces explainable. - -### Appendix B — BOM‑Index sidecar - -``` -struct Header { magic, version, imageDigest, createdAt } -vector purls -map components -optional map usedByEntrypoint -``` +# component_architecture_scanner.md — **Stella Ops Scanner** (2025Q4) + +> **Scope.** Implementation‑ready architecture for the **Scanner** subsystem: WebService, Workers, analyzers, SBOM assembly (inventory & usage), per‑layer caching, three‑way diffs, artifact catalog (RustFS default + Mongo, S3-compatible fallback), attestation hand‑off, and scale/security posture. This document is the contract between the scanning plane and everything else (Policy, Excititor, Concelier, UI, CLI). + +--- + +## 0) Mission & boundaries + +**Mission.** Produce **deterministic**, **explainable** SBOMs and diffs for container images and filesystems, quickly and repeatedly, without guessing. Emit two views: **Inventory** (everything present) and **Usage** (entrypoint closure + actually linked libs). Attach attestations through **Signer→Attestor→Rekor v2**. + +**Boundaries.** + +* Scanner **does not** produce PASS/FAIL. The backend (Policy + Excititor + Concelier) decides presentation and verdicts. +* Scanner **does not** keep third‑party SBOM warehouses. It may **bind** to existing attestations for exact hashes. +* Core analyzers are **deterministic** (no fuzzy identity). Optional heuristic plug‑ins (e.g., patch‑presence) run under explicit flags and never contaminate the core SBOM. + +--- + +## 1) Solution & project layout + +``` +src/ + ├─ StellaOps.Scanner.WebService/ # REST control plane, catalog, diff, exports + ├─ StellaOps.Scanner.Worker/ # queue consumer; executes analyzers + ├─ StellaOps.Scanner.Models/ # DTOs, evidence, graph nodes, CDX/SPDX adapters + ├─ StellaOps.Scanner.Storage/ # Mongo repositories; RustFS object client (default) + S3 fallback; ILM/GC + ├─ StellaOps.Scanner.Queue/ # queue abstraction (Redis/NATS/RabbitMQ) + ├─ StellaOps.Scanner.Cache/ # layer cache; file CAS; bloom/bitmap indexes + ├─ StellaOps.Scanner.EntryTrace/ # ENTRYPOINT/CMD → terminal program resolver (shell AST) + ├─ StellaOps.Scanner.Analyzers.OS.[Apk|Dpkg|Rpm]/ + ├─ StellaOps.Scanner.Analyzers.Lang.[Java|Node|Python|Go|DotNet|Rust]/ + ├─ StellaOps.Scanner.Analyzers.Native.[ELF|PE|MachO]/ # PE/Mach-O planned (M2) + ├─ StellaOps.Scanner.Emit.CDX/ # CycloneDX (JSON + Protobuf) + ├─ StellaOps.Scanner.Emit.SPDX/ # SPDX 3.0.1 JSON + ├─ StellaOps.Scanner.Diff/ # image→layer→component three‑way diff + ├─ StellaOps.Scanner.Index/ # BOM‑Index sidecar (purls + roaring bitmaps) + ├─ StellaOps.Scanner.Tests.* # unit/integration/e2e fixtures + └─ tools/ + ├─ StellaOps.Scanner.Sbomer.BuildXPlugin/ # BuildKit generator (image referrer SBOMs) + └─ StellaOps.Scanner.Sbomer.DockerImage/ # CLI‑driven scanner container +``` + +Analyzer assemblies and buildx generators are packaged as **restart-time plug-ins** under `plugins/scanner/**` with manifests; services must restart to activate new plug-ins. + +### 1.1 Queue backbone (Redis / NATS) + +`StellaOps.Scanner.Queue` exposes a transport-agnostic contract (`IScanQueue`/`IScanQueueLease`) used by the WebService producer and Worker consumers. Sprint 9 introduces two first-party transports: + +- **Redis Streams** (default). Uses consumer groups, deterministic idempotency keys (`scanner:jobs:idemp:*`), and supports lease claim (`XCLAIM`), renewal, exponential-backoff retries, and a `scanner:jobs:dead` stream for exhausted attempts. +- **NATS JetStream**. Provisions the `SCANNER_JOBS` work-queue stream + durable consumer `scanner-workers`, publishes with `MsgId` for dedupe, applies backoff via `NAK` delays, and routes dead-lettered jobs to `SCANNER_JOBS_DEAD`. + +Metrics are emitted via `Meter` counters (`scanner_queue_enqueued_total`, `scanner_queue_retry_total`, `scanner_queue_deadletter_total`), and `ScannerQueueHealthCheck` pings the active backend (Redis `PING`, NATS `PING`). Configuration is bound from `scanner.queue`: + +```yaml +scanner: + queue: + kind: redis # or nats + redis: + connectionString: "redis://queue:6379/0" + streamName: "scanner:jobs" + nats: + url: "nats://queue:4222" + stream: "SCANNER_JOBS" + subject: "scanner.jobs" + durableConsumer: "scanner-workers" + deadLetterSubject: "scanner.jobs.dead" + maxDeliveryAttempts: 5 + retryInitialBackoff: 00:00:05 + retryMaxBackoff: 00:02:00 +``` + +The DI extension (`AddScannerQueue`) wires the selected transport, so future additions (e.g., RabbitMQ) only implement the same contract and register. + +**Runtime form‑factor:** two deployables + +* **Scanner.WebService** (stateless REST) +* **Scanner.Worker** (N replicas; queue‑driven) + +--- + +## 2) External dependencies + +* **OCI registry** with **Referrers API** (discover attached SBOMs/signatures). +* **RustFS** (default, offline-first) for SBOM artifacts; optional S3/MinIO compatibility retained for migration; **Object Lock** semantics emulated via retention headers; **ILM** for TTL. +* **MongoDB** for catalog, job state, diffs, ILM rules. +* **Queue** (Redis Streams/NATS/RabbitMQ). +* **Authority** (on‑prem OIDC) for **OpToks** (DPoP/mTLS). +* **Signer** + **Attestor** (+ **Fulcio/KMS** + **Rekor v2**) for DSSE + transparency. + +--- + +## 3) Contracts & data model + +### 3.1 Evidence‑first component model + +**Nodes** + +* `Image`, `Layer`, `File` +* `Component` (`purl?`, `name`, `version?`, `type`, `id` — may be `bin:{sha256}`) +* `Executable` (ELF/PE/Mach‑O), `Library` (native or managed), `EntryScript` (shell/launcher) + +**Edges** (all carry **Evidence**) + +* `contains(Image|Layer → File)` +* `installs(PackageDB → Component)` (OS database row) +* `declares(InstalledMetadata → Component)` (dist‑info, pom.properties, deps.json…) +* `links_to(Executable → Library)` (ELF `DT_NEEDED`, PE imports) +* `calls(EntryScript → Program)` (file:line from shell AST) +* `attests(Rekor → Component|Image)` (SBOM/predicate binding) +* `bound_from_attestation(Component_attested → Component_observed)` (hash equality proof) + +**Evidence** + +``` +{ source: enum, locator: (path|offset|line), sha256?, method: enum, timestamp } +``` + +No confidences. Either a fact is proven with listed mechanisms, or it is not claimed. + +### 3.2 Catalog schema (Mongo) + +* `artifacts` + + ``` + { _id, type: layer-bom|image-bom|diff|index, + format: cdx-json|cdx-pb|spdx-json, + bytesSha256, size, rekor: { uuid,index,url }?, + ttlClass, immutable, refCount, createdAt } + ``` +* `images { imageDigest, repo, tag?, arch, createdAt, lastSeen }` +* `layers { layerDigest, mediaType, size, createdAt, lastSeen }` +* `links { fromType, fromDigest, artifactId }` // image/layer -> artifact +* `jobs { _id, kind, args, state, startedAt, heartbeatAt, endedAt, error }` +* `lifecycleRules { ruleId, scope, ttlDays, retainIfReferenced, immutable }` + +### 3.3 Object store layout (RustFS) + +``` +layers//sbom.cdx.json.zst +layers//sbom.spdx.json.zst +images//inventory.cdx.pb # CycloneDX Protobuf +images//usage.cdx.pb +indexes//bom-index.bin # purls + roaring bitmaps +diffs/_/diff.json.zst +attest/.dsse.json # DSSE bundle (cert chain + Rekor proof) +``` + +RustFS exposes a deterministic HTTP API (`PUT|GET|DELETE /api/v1/buckets/{bucket}/objects/{key}`). +Scanner clients tag immutable uploads with `X-RustFS-Immutable: true` and, when retention applies, +`X-RustFS-Retain-Seconds: `. Additional headers can be injected via +`scanner.artifactStore.headers` to support custom auth or proxy requirements. Legacy MinIO/S3 +deployments remain supported by setting `scanner.artifactStore.driver = "s3"` during phased +migrations. + +--- + +## 4) REST API (Scanner.WebService) + +All under `/api/v1/scanner`. Auth: **OpTok** (DPoP/mTLS); RBAC scopes. + +``` +POST /scans { imageRef|digest, force?:bool } → { scanId } +GET /scans/{id} → { status, imageDigest, artifacts[], rekor? } +GET /sboms/{imageDigest} ?format=cdx-json|cdx-pb|spdx-json&view=inventory|usage → bytes +GET /diff?old=&new=&view=inventory|usage → diff.json +POST /exports { imageDigest, format, view, attest?:bool } → { artifactId, rekor? } +POST /reports { imageDigest, policyRevision? } → { reportId, rekor? } # delegates to backend policy+vex +GET /catalog/artifacts/{id} → { meta } +GET /healthz | /readyz | /metrics +``` + +### Report events + +When `scanner.events.enabled = true`, the WebService serialises the signed report (canonical JSON + DSSE envelope) with `NotifyCanonicalJsonSerializer` and publishes two Redis Stream entries (`scanner.report.ready`, `scanner.scan.completed`) to the configured stream (default `stella.events`). The stream fields carry the whole envelope plus lightweight headers (`kind`, `tenant`, `ts`) so Notify and UI timelines can consume the event bus without recomputing signatures. Publish timeouts and bounded stream length are controlled via `scanner:events:publishTimeoutSeconds` and `scanner:events:maxStreamLength`. If the queue driver is already Redis and no explicit events DSN is provided, the host reuses the queue connection and auto-enables event emission so deployments get live envelopes without extra wiring. Compose/Helm bundles expose the same knobs via the `SCANNER__EVENTS__*` environment variables for quick tuning. + +--- + +## 5) Execution flow (Worker) + +### 5.1 Acquire & verify + +1. **Resolve image** (prefer `repo@sha256:…`). +2. **(Optional) verify image signature** per policy (cosign). +3. **Pull blobs**, compute layer digests; record metadata. + +### 5.2 Layer union FS + +* Apply whiteouts; materialize final filesystem; map **file → first introducing layer**. +* Windows layers (MSI/SxS/GAC) planned in **M2**. + +### 5.3 Evidence harvest (parallel analyzers; deterministic only) + +**A) OS packages** + +* **apk**: `/lib/apk/db/installed` +* **dpkg**: `/var/lib/dpkg/status`, `/var/lib/dpkg/info/*.list` +* **rpm**: `/var/lib/rpm/Packages` (via librpm or parser) +* Record `name`, `version` (epoch/revision), `arch`, source package where present, and **declared file lists**. + +> **Data flow note:** Each OS analyzer now writes its canonical output into the shared `ScanAnalysisStore` under +> `analysis.os.packages` (raw results), `analysis.os.fragments` (per-analyzer layer fragments), and contributes to +> `analysis.layers.fragments` (the aggregated view consumed by emit/diff pipelines). Helpers in +> `ScanAnalysisCompositionBuilder` convert these fragments into SBOM composition requests and component graphs so the +> diff/emit stages no longer reach back into individual analyzer implementations. + +**B) Language ecosystems (installed state only)** + +* **Java**: `META-INF/maven/*/pom.properties`, MANIFEST → `pkg:maven/...` +* **Node**: `node_modules/**/package.json` → `pkg:npm/...` +* **Python**: `*.dist-info/{METADATA,RECORD}` → `pkg:pypi/...` +* **Go**: Go **buildinfo** in binaries → `pkg:golang/...` +* **.NET**: `*.deps.json` + assembly metadata → `pkg:nuget/...` +* **Rust**: crates only when **explicitly present** (embedded metadata or cargo/registry traces); otherwise binaries reported as `bin:{sha256}`. + +> **Rule:** We only report components proven **on disk** with authoritative metadata. Lockfiles are evidence only. + +**C) Native link graph** + +* **ELF**: parse `PT_INTERP`, `DT_NEEDED`, RPATH/RUNPATH, **GNU symbol versions**; map **SONAMEs** to file paths; link executables → libs. +* **PE/Mach‑O** (planned M2): import table, delay‑imports; version resources; code signatures. +* Map libs back to **OS packages** if possible (via file lists); else emit `bin:{sha256}` components. +* The exported metadata (`stellaops.os.*` properties, license list, source package) feeds policy scoring and export pipelines + directly – Policy evaluates quiet rules against package provenance while Exporters forward the enriched fields into + downstream JSON/Trivy payloads. + +**D) EntryTrace (ENTRYPOINT/CMD → terminal program)** + +* Read image config; parse shell (POSIX/Bash subset) with AST: `source`/`.` includes; `case/if`; `exec`/`command`; `run‑parts`. +* Resolve commands via **PATH** within the **built rootfs**; follow language launchers (Java/Node/Python) to identify the terminal program (ELF/JAR/venv script). +* Record **file:line** and choices for each hop; output chain graph. +* Unresolvable dynamic constructs are recorded as **unknown** edges with reasons (e.g., `$FOO` unresolved). + +**E) Attestation & SBOM bind (optional)** + +* For each **file hash** or **binary hash**, query local cache of **Rekor v2** indices; if an SBOM attestation is found for **exact hash**, bind it to the component (origin=`attested`). +* For the **image** digest, likewise bind SBOM attestations (build‑time referrers). + +### 5.4 Component normalization (exact only) + +* Create `Component` nodes only with deterministic identities: purl, or **`bin:{sha256}`** for unlabeled binaries. +* Record **origin** (OS DB, installed metadata, linker, attestation). + +### 5.5 SBOM assembly & emit + +* **Per-layer SBOM fragments**: components introduced by the layer (+ relationships). +* **Image SBOMs**: merge fragments; refer back to them via **CycloneDX BOM‑Link** (or SPDX ExternalRef). +* Emit both **Inventory** & **Usage** views. +* When the native analyzer reports an ELF `buildId`, attach it to component metadata and surface it as `stellaops:buildId` in CycloneDX properties (and diff metadata). This keeps SBOM/diff output in lockstep with runtime events and the debug-store manifest. +* Serialize **CycloneDX JSON** and **CycloneDX Protobuf**; optionally **SPDX 3.0.1 JSON**. +* Build **BOM‑Index** sidecar: purl table + roaring bitmap; flag `usedByEntrypoint` components for fast backend joins. + +The emitted `buildId` metadata is preserved in component hashes, diff payloads, and `/policy/runtime` responses so operators can pivot from SBOM entries → runtime events → `debug/.build-id//.debug` within the Offline Kit or release bundle. + +### 5.6 DSSE attestation (via Signer/Attestor) + +* WebService constructs **predicate** with `image_digest`, `stellaops_version`, `license_id`, `policy_digest?` (when emitting **final reports**), timestamps. +* Calls **Signer** (requires **OpTok + PoE**); Signer verifies **entitlement + scanner image integrity** and returns **DSSE bundle**. +* **Attestor** logs to **Rekor v2**; returns `{uuid,index,proof}` → stored in `artifacts.rekor`. + +--- + +## 6) Three‑way diff (image → layer → component) + +### 6.1 Keys & classification + +* Component key: **purl** when present; else `bin:{sha256}`. +* Diff classes: `added`, `removed`, `version_changed` (`upgraded|downgraded`), `metadata_changed` (e.g., origin from attestation vs observed). +* Layer attribution: for each change, resolve the **introducing/removing layer**. + +### 6.2 Algorithm (outline) + +``` +A = components(imageOld, key) +B = components(imageNew, key) + +added = B \ A +removed = A \ B +changed = { k in A∩B : version(A[k]) != version(B[k]) || origin changed } + +for each item in added/removed/changed: + layer = attribute_to_layer(item, imageOld|imageNew) + usageFlag = usedByEntrypoint(item, imageNew) +emit diff.json (grouped by layer with badges) +``` + +Diffs are stored as artifacts and feed **UI** and **CLI**. + +--- + +## 7) Build‑time SBOMs (fast CI path) + +**Scanner.Sbomer.BuildXPlugin** can act as a BuildKit **generator**: + +* During `docker buildx build --attest=type=sbom,generator=stellaops/sbom-indexer`, run analyzers on the build context/output; attach SBOMs as OCI **referrers** to the built image. +* Optionally request **Signer/Attestor** to produce **Stella Ops‑verified** attestation immediately; else, Scanner.WebService can verify and re‑attest post‑push. +* Scanner.WebService trusts build‑time SBOMs per policy, enabling **no‑rescan** for unchanged bases. + +--- + +## 8) Configuration (YAML) + +```yaml +scanner: + queue: + kind: redis + url: "redis://queue:6379/0" + mongo: + uri: "mongodb://mongo/scanner" + s3: + endpoint: "http://minio:9000" + bucket: "stellaops" + objectLock: "governance" # or 'compliance' + analyzers: + os: { apk: true, dpkg: true, rpm: true } + lang: { java: true, node: true, python: true, go: true, dotnet: true, rust: true } + native: { elf: true, pe: false, macho: false } # PE/Mach-O in M2 + entryTrace: { enabled: true, shellMaxDepth: 64, followRunParts: true } + emit: + cdx: { json: true, protobuf: true } + spdx: { json: true } + compress: "zstd" + rekor: + url: "https://rekor-v2.internal" + signer: + url: "https://signer.internal" + limits: + maxParallel: 8 + perRegistryConcurrency: 2 + policyHints: + verifyImageSignature: false + trustBuildTimeSboms: true +``` + +--- + +## 9) Scale & performance + +* **Parallelism**: per‑analyzer concurrency; bounded directory walkers; file CAS dedupe by sha256. +* **Distributed locks** per **layer digest** to prevent duplicate work across Workers. +* **Registry throttles**: per‑host concurrency budgets; exponential backoff on 429/5xx. +* **Targets**: + + * **Build‑time**: P95 ≤ 3–5 s on warmed bases (CI generator). + * **Post‑build delta**: P95 ≤ 10 s for 200 MB images with cache hit. + * **Emit**: CycloneDX Protobuf ≤ 150 ms for 5k components; JSON ≤ 500 ms. + * **Diff**: ≤ 200 ms for 5k vs 5k components. + +--- + +## 10) Security posture + +* **AuthN**: Authority‑issued short OpToks (DPoP/mTLS). +* **AuthZ**: scopes (`scanner.scan`, `scanner.export`, `scanner.catalog.read`). +* **mTLS** to **Signer**/**Attestor**; only **Signer** can sign. +* **No network fetches** during analysis (except registry pulls and optional Rekor index reads). +* **Sandboxing**: non‑root containers; read‑only FS; seccomp profiles; disable execution of scanned content. +* **Release integrity**: all first‑party images are **cosign‑signed**; Workers/WebService self‑verify at startup. + +--- + +## 11) Observability & audit + +* **Metrics**: + + * `scanner.jobs_inflight`, `scanner.scan_latency_seconds` + * `scanner.layer_cache_hits_total`, `scanner.file_cas_hits_total` + * `scanner.artifact_bytes_total{format}` + * `scanner.attestation_latency_seconds`, `scanner.rekor_failures_total` + * `scanner_analyzer_golang_heuristic_total{indicator,version_hint}` — increments whenever the Go analyzer falls back to heuristics (build-id or runtime markers). Grafana panel: `sum by (indicator) (rate(scanner_analyzer_golang_heuristic_total[5m]))`; alert when the rate is ≥ 1 for 15 minutes to highlight unexpected stripped binaries. +* **Tracing**: spans for acquire→union→analyzers→compose→emit→sign→log. +* **Audit logs**: DSSE requests log `license_id`, `image_digest`, `artifactSha256`, `policy_digest?`, Rekor UUID on success. + +--- + +## 12) Testing matrix + +* **Determinism:** given same image + analyzers → byte‑identical **CDX Protobuf**; JSON normalized. +* **OS packages:** ground‑truth images per distro; compare to package DB. +* **Lang ecosystems:** sample images per ecosystem (Java/Node/Python/Go/.NET/Rust) with installed metadata; negative tests w/ lockfile‑only. +* **Native & EntryTrace:** ELF graph correctness; shell AST cases (includes, run‑parts, exec, case/if). +* **Diff:** layer attribution against synthetic two‑image sequences. +* **Performance:** cold vs warm cache; large `node_modules` and `site‑packages`. +* **Security:** ensure no code execution from image; fuzz parser inputs; path traversal resistance on layer extract. + +--- + +## 13) Failure modes & degradations + +* **Missing OS DB** (files exist, DB removed): record **files**; do **not** fabricate package components; emit `bin:{sha256}` where unavoidable; flag in evidence. +* **Unreadable metadata** (corrupt dist‑info): record file evidence; skip component creation; annotate. +* **Dynamic shell constructs**: mark unresolved edges with reasons (env var unknown) and continue; **Usage** view may be partial. +* **Registry rate limits**: honor backoff; queue job retries with jitter. +* **Signer refusal** (license/plan/version): scan completes; artifact produced; **no attestation**; WebService marks result as **unverified**. + +--- + +## 14) Optional plug‑ins (off by default) + +* **Patch‑presence detector** (signature‑based backport checks). Reads curated function‑level signatures from advisories; inspects binaries for patched code snippets to lower false‑positives for backported fixes. Runs as a sidecar analyzer that **annotates** components; never overrides core identities. +* **Runtime probes** (with Zastava): when allowed, compare **/proc//maps** (DSOs actually loaded) with static **Usage** view for precision. + +--- + +## 15) DevOps & operations + +* **HA**: WebService horizontal scale; Workers autoscale by queue depth & CPU; distributed locks on layers. +* **Retention**: ILM rules per artifact class (`short`, `default`, `compliance`); **Object Lock** for compliance artifacts (reports, signed SBOMs). +* **Upgrades**: bump **cache schema** when analyzer outputs change; WebService triggers refresh of dependent artifacts. +* **Backups**: Mongo (daily dumps); RustFS snapshots (filesystem-level rsync/ZFS) or S3 versioning when legacy driver enabled; Rekor v2 DB snapshots. + +--- + +## 16) CLI & UI touch points + +* **CLI**: `stellaops scan `, `stellaops diff --old --new`, `stellaops export`, `stellaops verify attestation `. +* **UI**: Scan detail shows **Inventory/Usage** toggles, **Diff by Layer**, **Attestation badge** (verified/unverified), Rekor link, and **EntryTrace** chain with file:line breadcrumbs. + +--- + +## 17) Roadmap (Scanner) + +* **M2**: Windows containers (MSI/SxS/GAC analyzers), PE/Mach‑O native analyzer, deeper Rust metadata. +* **M2**: Buildx generator GA (certified external registries), cross‑registry trust policies. +* **M3**: Patch‑presence plug‑in GA (opt‑in), cross‑image corpus clustering (evidence‑only; not identity). +* **M3**: Advanced EntryTrace (POSIX shell features breadth, busybox detection). + +--- + +### Appendix A — EntryTrace resolution (pseudo) + +```csharp +ResolveEntrypoint(ImageConfig cfg, RootFs fs): + cmd = Normalize(cfg.ENTRYPOINT, cfg.CMD) + stack = [ Script(cmd, path=FindOnPath(cmd[0], fs)) ] + visited = set() + + while stack not empty and depth < MAX: + cur = stack.pop() + if cur in visited: continue + visited.add(cur) + + if IsShellScript(cur.path): + ast = ParseShell(cur.path) + foreach directive in ast: + if directive is Source include: + p = ResolveInclude(include.path, cur.env, fs) + stack.push(Script(p)) + if directive is Exec call: + p = ResolveExec(call.argv[0], cur.env, fs) + stack.push(Program(p, argv=call.argv)) + if directive is Interpreter (python -m / node / java -jar): + term = ResolveInterpreterTarget(call, fs) + stack.push(Program(term)) + else: + return Terminal(cur.path) + + return Unknown(reason) +``` + +### Appendix A.1 — EntryTrace Explainability + +EntryTrace emits structured diagnostics and metrics so operators can quickly understand why resolution succeeded or degraded: + +| Reason | Description | Typical Mitigation | +|--------|-------------|--------------------| +| `CommandNotFound` | A command referenced in the script cannot be located in the layered root filesystem or `PATH`. | Ensure binaries exist in the image or extend `PATH` hints. | +| `MissingFile` | `source`/`.`/`run-parts` targets are missing. | Bundle the script or guard the include. | +| `DynamicEnvironmentReference` | Path depends on `$VARS` that are unknown at scan time. | Provide defaults via scan metadata or accept partial usage. | +| `RecursionLimitReached` | Nested includes exceeded the analyzer depth limit (default 64). | Flatten indirection or increase the limit in options. | +| `RunPartsEmpty` | `run-parts` directory contained no executable entries. | Remove empty directories or ignore if intentional. | +| `JarNotFound` / `ModuleNotFound` | Java/Python targets missing, preventing interpreter tracing. | Ship the jar/module with the image or adjust the launcher. | + +Diagnostics drive two metrics published by `EntryTraceMetrics`: + +- `entrytrace_resolutions_total{outcome}` — resolution attempts segmented by outcome (`resolved`, `partiallyresolved`, `unresolved`). +- `entrytrace_unresolved_total{reason}` — diagnostic counts keyed by reason. + +Structured logs include `entrytrace.path`, `entrytrace.command`, `entrytrace.reason`, and `entrytrace.depth`, all correlated with scan/job IDs. Timestamps are normalized to UTC (microsecond precision) to keep DSSE attestations and UI traces explainable. + +### Appendix B — BOM‑Index sidecar + +``` +struct Header { magic, version, imageDigest, createdAt } +vector purls +map components +optional map usedByEntrypoint +``` diff --git a/docs/ARCHITECTURE_VEXER.md b/docs/ARCHITECTURE_VEXER.md index 0ea54de5..7722fb19 100644 --- a/docs/ARCHITECTURE_VEXER.md +++ b/docs/ARCHITECTURE_VEXER.md @@ -1,463 +1,463 @@ -# component_architecture_vexer.md — **Stella Ops Vexer** (2025Q4) - -> **Scope.** This document specifies the **Vexer** service: its purpose, trust model, data structures, APIs, plug‑in contracts, storage schema, normalization/consensus algorithms, performance budgets, testing matrix, and how it integrates with Scanner, Policy, Feedser, and the attestation chain. It is implementation‑ready. - ---- - -## 0) Mission & role in the platform - -**Mission.** Convert heterogeneous **VEX** statements (OpenVEX, CSAF VEX, CycloneDX VEX; vendor/distro/platform sources) into **canonical, queryable claims**; compute **deterministic consensus** per *(vuln, product)*; preserve **conflicts with provenance**; publish **stable, attestable exports** that the backend uses to suppress non‑exploitable findings, prioritize remaining risk, and explain decisions. - -**Boundaries.** - -* Vexer **does not** decide PASS/FAIL. It supplies **evidence** (statuses + justifications + provenance weights). -* Vexer preserves **conflicting claims** unchanged; consensus encodes how we would pick, but the raw set is always exportable. -* VEX consumption is **backend‑only**: Scanner never applies VEX. The backend’s **Policy Engine** asks Vexer for status evidence and then decides what to show. - ---- - -## 1) Inputs, outputs & canonical domain - -### 1.1 Accepted input formats (ingest) - -* **OpenVEX** JSON documents (attested or raw). -* **CSAF VEX** 2.x (vendor PSIRTs and distros commonly publish CSAF). -* **CycloneDX VEX** 1.4+ (standalone VEX or embedded VEX blocks). -* **OCI‑attached attestations** (VEX statements shipped as OCI referrers) — optional connectors. - -All connectors register **source metadata**: provider identity, trust tier, signature expectations (PGP/cosign/PKI), fetch windows, rate limits, and time anchors. - -### 1.2 Canonical model (normalized) - -Every incoming statement becomes a set of **VexClaim** records: - -``` -VexClaim -- providerId // 'redhat', 'suse', 'ubuntu', 'github', 'vendorX' -- vulnId // 'CVE-2025-12345', 'GHSA-xxxx', canonicalized -- productKey // canonical product identity (see §2.2) -- status // affected | not_affected | fixed | under_investigation -- justification? // for 'not_affected'/'affected' where provided -- introducedVersion? // semantics per provider (range or exact) -- fixedVersion? // where provided (range or exact) -- lastObserved // timestamp from source or fetch time -- provenance // doc digest, signature status, fetch URI, line/offset anchors -- evidence[] // raw source snippets for explainability -- supersedes? // optional cross-doc chain (docDigest → docDigest) -``` - -### 1.3 Exports (consumption) - -* **VexConsensus** per `(vulnId, productKey)` with: - - * `rollupStatus` (after policy weights/justification gates), - * `sources[]` (winning + losing claims with weights & reasons), - * `policyRevisionId` (identifier of the Vexer policy used), - * `consensusDigest` (stable SHA‑256 over canonical JSON). -* **Raw claims** export for auditing (unchanged, with provenance). -* **Provider snapshots** (per source, last N days) for operator debugging. -* **Index** optimized for backend joins: `(productKey, vulnId) → (status, confidence, sourceSet)`. - -All exports are **deterministic**, and (optionally) **attested** via DSSE and logged to Rekor v2. - ---- - -## 2) Identity model — products & joins - -### 2.1 Vuln identity - -* Accepts **CVE**, **GHSA**, vendor IDs (MSRC, RHSA…), distro IDs (DSA/USN/RHSA…) — normalized to `vulnId` with alias sets. -* **Alias graph** maintained (from Feedser) to map vendor/distro IDs → CVE (primary) and to **GHSA** where applicable. - -### 2.2 Product identity (`productKey`) - -* **Primary:** `purl` (Package URL). -* **Secondary links:** `cpe`, **OS package NVRA/EVR**, NuGet/Maven/Golang identity, and **OS package name** when purl unavailable. -* **Fallback:** `oci:/@` for image‑level VEX. -* **Special cases:** kernel modules, firmware, platforms → provider‑specific mapping helpers (connector captures provider’s product taxonomy → canonical `productKey`). - -> Vexer does not invent identities. If a provider cannot be mapped to purl/CPE/NVRA deterministically, we keep the native **product string** and mark the claim as **non‑joinable**; the backend will ignore it unless a policy explicitly whitelists that provider mapping. - ---- - -## 3) Storage schema (MongoDB) - -Database: `vexer` - -### 3.1 Collections - -**`vex.providers`** - -``` -_id: providerId -name, homepage, contact -trustTier: enum {vendor, distro, platform, hub, attestation} -signaturePolicy: { type: pgp|cosign|x509|none, keys[], certs[], cosignKeylessRoots[] } -fetch: { baseUrl, kind: http|oci|file, rateLimit, etagSupport, windowDays } -enabled: bool -createdAt, modifiedAt -``` - -**`vex.raw`** (immutable raw documents) - -``` -_id: sha256(doc bytes) -providerId -uri -ingestedAt -contentType -sig: { verified: bool, method: pgp|cosign|x509|none, keyId|certSubject, bundle? } -payload: GridFS pointer (if large) -disposition: kept|replaced|superseded -correlation: { replaces?: sha256, replacedBy?: sha256 } -``` - -**`vex.claims`** (normalized rows; dedupe on providerId+vulnId+productKey+docDigest) - -``` -_id -providerId -vulnId -productKey -status -justification? -introducedVersion? -fixedVersion? -lastObserved -docDigest -provenance { uri, line?, pointer?, signatureState } -evidence[] { key, value, locator } -indices: - - {vulnId:1, productKey:1} - - {providerId:1, lastObserved:-1} - - {status:1} - - text index (optional) on evidence.value for debugging -``` - -**`vex.consensus`** (rollups) - -``` -_id: sha256(canonical(vulnId, productKey, policyRevision)) -vulnId -productKey -rollupStatus -sources[]: [ - { providerId, status, justification?, weight, lastObserved, accepted:bool, reason } -] -policyRevisionId -evaluatedAt -consensusDigest // same as _id -indices: - - {vulnId:1, productKey:1} - - {policyRevisionId:1, evaluatedAt:-1} -``` - -**`vex.exports`** (manifest of emitted artifacts) - -``` -_id -querySignature -format: raw|consensus|index -artifactSha256 -rekor { uuid, index, url }? -createdAt -policyRevisionId -cacheable: bool -``` - -**`vex.cache`** - -``` -querySignature -> exportId (for fast reuse) -ttl, hits -``` - -**`vex.migrations`** - -* ordered migrations applied at bootstrap to ensure indexes. - -### 3.2 Indexing strategy - -* Hot path queries use exact `(vulnId, productKey)` and time‑bounded windows; compound indexes cover both. -* Providers list view by `lastObserved` for monitoring staleness. -* `vex.consensus` keyed by `(vulnId, productKey, policyRevision)` for deterministic reuse. - ---- - -## 4) Ingestion pipeline - -### 4.1 Connector contract - -```csharp -public interface IVexConnector -{ - string ProviderId { get; } - Task FetchAsync(VexConnectorContext ctx, CancellationToken ct); // raw docs - Task NormalizeAsync(VexConnectorContext ctx, CancellationToken ct); // raw -> VexClaim[] -} -``` - -* **Fetch** must implement: window scheduling, conditional GET (ETag/If‑Modified‑Since), rate limiting, retry/backoff. -* **Normalize** parses the format, validates schema, maps product identities deterministically, emits `VexClaim` records with **provenance**. - -### 4.2 Signature verification (per provider) - -* **cosign (keyless or keyful)** for OCI referrers or HTTP‑served JSON with Sigstore bundles. -* **PGP** (provider keyrings) for distro/vendor feeds that sign docs. -* **x509** (mutual TLS / provider‑pinned certs) where applicable. -* Signature state is stored on **vex.raw.sig** and copied into **provenance.signatureState** on claims. - -> Claims from sources failing signature policy are marked `"signatureState.verified=false"` and **policy** can down‑weight or ignore them. - -### 4.3 Time discipline - -* For each doc, prefer **provider’s document timestamp**; if absent, use fetch time. -* Claims carry `lastObserved` which drives **tie‑breaking** within equal weight tiers. - ---- - -## 5) Normalization: product & status semantics - -### 5.1 Product mapping - -* **purl** first; **cpe** second; OS package NVRA/EVR mapping helpers (distro connectors) produce purls via canonical tables (e.g., rpm→purl:rpm, deb→purl:deb). -* Where a provider publishes **platform‑level** VEX (e.g., “RHEL 9 not affected”), connectors expand to known product inventory rules (e.g., map to sets of packages/components shipped in the platform). Expansion tables are versioned and kept per provider; every expansion emits **evidence** indicating the rule applied. -* If expansion would be speculative, the claim remains **platform‑scoped** with `productKey="platform:redhat:rhel:9"` and is flagged **non‑joinable**; backend can decide to use platform VEX only when Scanner proves the platform runtime. - -### 5.2 Status + justification mapping - -* Canonical **status**: `affected | not_affected | fixed | under_investigation`. -* **Justifications** normalized to a controlled vocabulary (CISA‑aligned), e.g.: - - * `component_not_present` - * `vulnerable_code_not_in_execute_path` - * `vulnerable_configuration_unused` - * `inline_mitigation_applied` - * `fix_available` (with `fixedVersion`) - * `under_investigation` -* Providers with free‑text justifications are mapped by deterministic tables; raw text preserved as `evidence`. - ---- - -## 6) Consensus algorithm - -**Goal:** produce a **stable**, explainable `rollupStatus` per `(vulnId, productKey)` given possibly conflicting claims. - -### 6.1 Inputs - -* Set **S** of `VexClaim` for the key. -* **Vexer policy snapshot**: - - * **weights** per provider tier and per provider overrides. - * **justification gates** (e.g., require justification for `not_affected` to be acceptable). - * **minEvidence** rules (e.g., `not_affected` must come from ≥1 vendor or 2 distros). - * **signature requirements** (e.g., require verified signature for ‘fixed’ to be considered). - -### 6.2 Steps - -1. **Filter invalid** claims by signature policy & justification gates → set `S'`. -2. **Score** each claim: - `score = weight(provider) * freshnessFactor(lastObserved)` where freshnessFactor ∈ [0.8, 1.0] for staleness decay (configurable; small effect). -3. **Aggregate** scores per status: `W(status) = Σ score(claims with that status)`. -4. **Pick** `rollupStatus = argmax_status W(status)`. -5. **Tie‑breakers** (in order): - - * Higher **max single** provider score wins (vendor > distro > platform > hub). - * More **recent** lastObserved wins. - * Deterministic lexicographic order of status (`fixed` > `not_affected` > `under_investigation` > `affected`) as final tiebreaker. -6. **Explain**: mark accepted sources (`accepted=true; reason="weight"`/`"freshness"`), mark rejected sources with explicit `reason` (`"insufficient_justification"`, `"signature_unverified"`, `"lower_weight"`). - -> The algorithm is **pure** given S and policy snapshot; result is reproducible and hashed into `consensusDigest`. - ---- - -## 7) Query & export APIs - -All endpoints are versioned under `/api/v1/vex`. - -### 7.1 Query (online) - -``` -POST /claims/search - body: { vulnIds?: string[], productKeys?: string[], providers?: string[], since?: timestamp, limit?: int, pageToken?: string } - → { claims[], nextPageToken? } - -POST /consensus/search - body: { vulnIds?: string[], productKeys?: string[], policyRevisionId?: string, since?: timestamp, limit?: int, pageToken?: string } - → { entries[], nextPageToken? } - -POST /excititor/resolve (scope: vex.read) - body: { productKeys?: string[], purls?: string[], vulnerabilityIds: string[], policyRevisionId?: string } - → { policy, resolvedAt, results: [ { vulnerabilityId, productKey, status, sources[], conflicts[], decisions[], signals?, summary?, envelope: { artifact, contentSignature?, attestation?, attestationEnvelope?, attestationSignature? } } ] } -``` - -### 7.2 Exports (cacheable snapshots) - -``` -POST /exports - body: { signature: { vulnFilter?, productFilter?, providers?, since? }, format: raw|consensus|index, policyRevisionId?: string, force?: bool } - → { exportId, artifactSha256, rekor? } - -GET /exports/{exportId} → bytes (application/json or binary index) -GET /exports/{exportId}/meta → { signature, policyRevisionId, createdAt, artifactSha256, rekor? } -``` - -### 7.3 Provider operations - -``` -GET /providers → provider list & signature policy -POST /providers/{id}/refresh → trigger fetch/normalize window -GET /providers/{id}/status → last fetch, doc counts, signature stats -``` - -**Auth:** service‑to‑service via Authority tokens; operator operations via UI/CLI with RBAC. - ---- - -## 8) Attestation integration - -* Exports can be **DSSE‑signed** via **Signer** and logged to **Rekor v2** via **Attestor** (optional but recommended for regulated pipelines). -* `vex.exports.rekor` stores `{uuid, index, url}` when present. -* **Predicate type**: `https://stella-ops.org/attestations/vex-export/1` with fields: - - * `querySignature`, `policyRevisionId`, `artifactSha256`, `createdAt`. - ---- - -## 9) Configuration (YAML) - -```yaml -vexer: - mongo: { uri: "mongodb://mongo/vexer" } - s3: - endpoint: http://minio:9000 - bucket: stellaops - policy: - weights: - vendor: 1.0 - distro: 0.9 - platform: 0.7 - hub: 0.5 - attestation: 0.6 - providerOverrides: - redhat: 1.0 - suse: 0.95 - requireJustificationForNotAffected: true - signatureRequiredForFixed: true - minEvidence: - not_affected: - vendorOrTwoDistros: true - connectors: - - providerId: redhat - kind: csaf - baseUrl: https://access.redhat.com/security/data/csaf/v2/ - signaturePolicy: { type: pgp, keys: [ "…redhat-pgp-key…" ] } - windowDays: 7 - - providerId: suse - kind: csaf - baseUrl: https://ftp.suse.com/pub/projects/security/csaf/ - signaturePolicy: { type: pgp, keys: [ "…suse-pgp-key…" ] } - - providerId: ubuntu - kind: openvex - baseUrl: https://…/vex/ - signaturePolicy: { type: none } - - providerId: vendorX - kind: cyclonedx-vex - ociRef: ghcr.io/vendorx/vex@sha256:… - signaturePolicy: { type: cosign, cosignKeylessRoots: [ "sigstore-root" ] } -``` - ---- - -## 10) Security model - -* **Input signature verification** enforced per provider policy (PGP, cosign, x509). -* **Connector allowlists**: outbound fetch constrained to configured domains. -* **Tenant isolation**: per‑tenant DB prefixes or separate DBs; per‑tenant S3 prefixes; per‑tenant policies. -* **AuthN/Z**: Authority‑issued OpToks; RBAC roles (`vex.read`, `vex.admin`, `vex.export`). -* **No secrets in logs**; deterministic logging contexts include providerId, docDigest, claim keys. - ---- - -## 11) Performance & scale - -* **Targets:** - - * Normalize 10k VEX claims/minute/core. - * Consensus compute ≤ 50 ms for 1k unique `(vuln, product)` pairs in hot cache. - * Export (consensus) 1M rows in ≤ 60 s on 8 cores with streaming writer. - -* **Scaling:** - - * WebService handles control APIs; **Worker** background services (same image) execute fetch/normalize in parallel with rate‑limits; Mongo writes batched; upserts by natural keys. - * Exports stream straight to S3 (MinIO) with rolling buffers. - -* **Caching:** - - * `vex.cache` maps query signatures → export; TTL to avoid stampedes; optimistic reuse unless `force`. - ---- - -## 12) Observability - -* **Metrics:** - - * `vex.ingest.docs_total{provider}` - * `vex.normalize.claims_total{provider}` - * `vex.signature.failures_total{provider,method}` - * `vex.consensus.conflicts_total{vulnId}` - * `vex.exports.bytes{format}` / `vex.exports.latency_seconds` -* **Tracing:** spans for fetch, verify, parse, map, consensus, export. -* **Dashboards:** provider staleness, top conflicting vulns/components, signature posture, export cache hit‑rate. - ---- - -## 13) Testing matrix - -* **Connectors:** golden raw docs → deterministic claims (fixtures per provider/format). -* **Signature policies:** valid/invalid PGP/cosign/x509 samples; ensure rejects are recorded but not accepted. -* **Normalization edge cases:** platform‑only claims, free‑text justifications, non‑purl products. -* **Consensus:** conflict scenarios across tiers; check tie‑breakers; justification gates. -* **Performance:** 1M‑row export timing; memory ceilings; stream correctness. -* **Determinism:** same inputs + policy → identical `consensusDigest` and export bytes. -* **API contract tests:** pagination, filters, RBAC, rate limits. - ---- - -## 14) Integration points - -* **Backend Policy Engine** (in Scanner.WebService): calls `POST /excititor/resolve` (scope `vex.read`) with batched `(purl, vulnId)` pairs to fetch `rollupStatus + sources`. -* **Feedser**: provides alias graph (CVE↔vendor IDs) and may supply VEX‑adjacent metadata (e.g., KEV flag) for policy escalation. -* **UI**: VEX explorer screens use `/claims/search` and `/consensus/search`; show conflicts & provenance. -* **CLI**: `stellaops vex export --consensus --since 7d --out vex.json` for audits. - ---- - -## 15) Failure modes & fallback - -* **Provider unreachable:** stale thresholds trigger warnings; policy can down‑weight stale providers automatically (freshness factor). -* **Signature outage:** continue to ingest but mark `signatureState.verified=false`; consensus will likely exclude or down‑weight per policy. -* **Schema drift:** unknown fields are preserved as `evidence`; normalization rejects only on **invalid identity** or **status**. - ---- - -## 16) Rollout plan (incremental) - -1. **MVP**: OpenVEX + CSAF connectors for 3 major providers (e.g., Red Hat/SUSE/Ubuntu), normalization + consensus + `/excititor/resolve`. -2. **Signature policies**: PGP for distros; cosign for OCI. -3. **Exports + optional attestation**. -4. **CycloneDX VEX** connectors; platform claim expansion tables; UI explorer. -5. **Scale hardening**: export indexes; conflict analytics. - ---- - -## 17) Appendix — canonical JSON (stable ordering) - -All exports and consensus entries are serialized via `VexCanonicalJsonSerializer`: - -* UTF‑8 without BOM; -* keys sorted (ASCII); -* arrays sorted by `(providerId, vulnId, productKey, lastObserved)` unless semantic order mandated; -* timestamps in `YYYY‑MM‑DDThh:mm:ssZ`; -* no insignificant whitespace. - +# component_architecture_vexer.md — **Stella Ops Vexer** (2025Q4) + +> **Scope.** This document specifies the **Vexer** service: its purpose, trust model, data structures, APIs, plug‑in contracts, storage schema, normalization/consensus algorithms, performance budgets, testing matrix, and how it integrates with Scanner, Policy, Feedser, and the attestation chain. It is implementation‑ready. + +--- + +## 0) Mission & role in the platform + +**Mission.** Convert heterogeneous **VEX** statements (OpenVEX, CSAF VEX, CycloneDX VEX; vendor/distro/platform sources) into **canonical, queryable claims**; compute **deterministic consensus** per *(vuln, product)*; preserve **conflicts with provenance**; publish **stable, attestable exports** that the backend uses to suppress non‑exploitable findings, prioritize remaining risk, and explain decisions. + +**Boundaries.** + +* Vexer **does not** decide PASS/FAIL. It supplies **evidence** (statuses + justifications + provenance weights). +* Vexer preserves **conflicting claims** unchanged; consensus encodes how we would pick, but the raw set is always exportable. +* VEX consumption is **backend‑only**: Scanner never applies VEX. The backend’s **Policy Engine** asks Vexer for status evidence and then decides what to show. + +--- + +## 1) Inputs, outputs & canonical domain + +### 1.1 Accepted input formats (ingest) + +* **OpenVEX** JSON documents (attested or raw). +* **CSAF VEX** 2.x (vendor PSIRTs and distros commonly publish CSAF). +* **CycloneDX VEX** 1.4+ (standalone VEX or embedded VEX blocks). +* **OCI‑attached attestations** (VEX statements shipped as OCI referrers) — optional connectors. + +All connectors register **source metadata**: provider identity, trust tier, signature expectations (PGP/cosign/PKI), fetch windows, rate limits, and time anchors. + +### 1.2 Canonical model (normalized) + +Every incoming statement becomes a set of **VexClaim** records: + +``` +VexClaim +- providerId // 'redhat', 'suse', 'ubuntu', 'github', 'vendorX' +- vulnId // 'CVE-2025-12345', 'GHSA-xxxx', canonicalized +- productKey // canonical product identity (see §2.2) +- status // affected | not_affected | fixed | under_investigation +- justification? // for 'not_affected'/'affected' where provided +- introducedVersion? // semantics per provider (range or exact) +- fixedVersion? // where provided (range or exact) +- lastObserved // timestamp from source or fetch time +- provenance // doc digest, signature status, fetch URI, line/offset anchors +- evidence[] // raw source snippets for explainability +- supersedes? // optional cross-doc chain (docDigest → docDigest) +``` + +### 1.3 Exports (consumption) + +* **VexConsensus** per `(vulnId, productKey)` with: + + * `rollupStatus` (after policy weights/justification gates), + * `sources[]` (winning + losing claims with weights & reasons), + * `policyRevisionId` (identifier of the Vexer policy used), + * `consensusDigest` (stable SHA‑256 over canonical JSON). +* **Raw claims** export for auditing (unchanged, with provenance). +* **Provider snapshots** (per source, last N days) for operator debugging. +* **Index** optimized for backend joins: `(productKey, vulnId) → (status, confidence, sourceSet)`. + +All exports are **deterministic**, and (optionally) **attested** via DSSE and logged to Rekor v2. + +--- + +## 2) Identity model — products & joins + +### 2.1 Vuln identity + +* Accepts **CVE**, **GHSA**, vendor IDs (MSRC, RHSA…), distro IDs (DSA/USN/RHSA…) — normalized to `vulnId` with alias sets. +* **Alias graph** maintained (from Feedser) to map vendor/distro IDs → CVE (primary) and to **GHSA** where applicable. + +### 2.2 Product identity (`productKey`) + +* **Primary:** `purl` (Package URL). +* **Secondary links:** `cpe`, **OS package NVRA/EVR**, NuGet/Maven/Golang identity, and **OS package name** when purl unavailable. +* **Fallback:** `oci:/@` for image‑level VEX. +* **Special cases:** kernel modules, firmware, platforms → provider‑specific mapping helpers (connector captures provider’s product taxonomy → canonical `productKey`). + +> Vexer does not invent identities. If a provider cannot be mapped to purl/CPE/NVRA deterministically, we keep the native **product string** and mark the claim as **non‑joinable**; the backend will ignore it unless a policy explicitly whitelists that provider mapping. + +--- + +## 3) Storage schema (MongoDB) + +Database: `vexer` + +### 3.1 Collections + +**`vex.providers`** + +``` +_id: providerId +name, homepage, contact +trustTier: enum {vendor, distro, platform, hub, attestation} +signaturePolicy: { type: pgp|cosign|x509|none, keys[], certs[], cosignKeylessRoots[] } +fetch: { baseUrl, kind: http|oci|file, rateLimit, etagSupport, windowDays } +enabled: bool +createdAt, modifiedAt +``` + +**`vex.raw`** (immutable raw documents) + +``` +_id: sha256(doc bytes) +providerId +uri +ingestedAt +contentType +sig: { verified: bool, method: pgp|cosign|x509|none, keyId|certSubject, bundle? } +payload: GridFS pointer (if large) +disposition: kept|replaced|superseded +correlation: { replaces?: sha256, replacedBy?: sha256 } +``` + +**`vex.claims`** (normalized rows; dedupe on providerId+vulnId+productKey+docDigest) + +``` +_id +providerId +vulnId +productKey +status +justification? +introducedVersion? +fixedVersion? +lastObserved +docDigest +provenance { uri, line?, pointer?, signatureState } +evidence[] { key, value, locator } +indices: + - {vulnId:1, productKey:1} + - {providerId:1, lastObserved:-1} + - {status:1} + - text index (optional) on evidence.value for debugging +``` + +**`vex.consensus`** (rollups) + +``` +_id: sha256(canonical(vulnId, productKey, policyRevision)) +vulnId +productKey +rollupStatus +sources[]: [ + { providerId, status, justification?, weight, lastObserved, accepted:bool, reason } +] +policyRevisionId +evaluatedAt +consensusDigest // same as _id +indices: + - {vulnId:1, productKey:1} + - {policyRevisionId:1, evaluatedAt:-1} +``` + +**`vex.exports`** (manifest of emitted artifacts) + +``` +_id +querySignature +format: raw|consensus|index +artifactSha256 +rekor { uuid, index, url }? +createdAt +policyRevisionId +cacheable: bool +``` + +**`vex.cache`** + +``` +querySignature -> exportId (for fast reuse) +ttl, hits +``` + +**`vex.migrations`** + +* ordered migrations applied at bootstrap to ensure indexes. + +### 3.2 Indexing strategy + +* Hot path queries use exact `(vulnId, productKey)` and time‑bounded windows; compound indexes cover both. +* Providers list view by `lastObserved` for monitoring staleness. +* `vex.consensus` keyed by `(vulnId, productKey, policyRevision)` for deterministic reuse. + +--- + +## 4) Ingestion pipeline + +### 4.1 Connector contract + +```csharp +public interface IVexConnector +{ + string ProviderId { get; } + Task FetchAsync(VexConnectorContext ctx, CancellationToken ct); // raw docs + Task NormalizeAsync(VexConnectorContext ctx, CancellationToken ct); // raw -> VexClaim[] +} +``` + +* **Fetch** must implement: window scheduling, conditional GET (ETag/If‑Modified‑Since), rate limiting, retry/backoff. +* **Normalize** parses the format, validates schema, maps product identities deterministically, emits `VexClaim` records with **provenance**. + +### 4.2 Signature verification (per provider) + +* **cosign (keyless or keyful)** for OCI referrers or HTTP‑served JSON with Sigstore bundles. +* **PGP** (provider keyrings) for distro/vendor feeds that sign docs. +* **x509** (mutual TLS / provider‑pinned certs) where applicable. +* Signature state is stored on **vex.raw.sig** and copied into **provenance.signatureState** on claims. + +> Claims from sources failing signature policy are marked `"signatureState.verified=false"` and **policy** can down‑weight or ignore them. + +### 4.3 Time discipline + +* For each doc, prefer **provider’s document timestamp**; if absent, use fetch time. +* Claims carry `lastObserved` which drives **tie‑breaking** within equal weight tiers. + +--- + +## 5) Normalization: product & status semantics + +### 5.1 Product mapping + +* **purl** first; **cpe** second; OS package NVRA/EVR mapping helpers (distro connectors) produce purls via canonical tables (e.g., rpm→purl:rpm, deb→purl:deb). +* Where a provider publishes **platform‑level** VEX (e.g., “RHEL 9 not affected”), connectors expand to known product inventory rules (e.g., map to sets of packages/components shipped in the platform). Expansion tables are versioned and kept per provider; every expansion emits **evidence** indicating the rule applied. +* If expansion would be speculative, the claim remains **platform‑scoped** with `productKey="platform:redhat:rhel:9"` and is flagged **non‑joinable**; backend can decide to use platform VEX only when Scanner proves the platform runtime. + +### 5.2 Status + justification mapping + +* Canonical **status**: `affected | not_affected | fixed | under_investigation`. +* **Justifications** normalized to a controlled vocabulary (CISA‑aligned), e.g.: + + * `component_not_present` + * `vulnerable_code_not_in_execute_path` + * `vulnerable_configuration_unused` + * `inline_mitigation_applied` + * `fix_available` (with `fixedVersion`) + * `under_investigation` +* Providers with free‑text justifications are mapped by deterministic tables; raw text preserved as `evidence`. + +--- + +## 6) Consensus algorithm + +**Goal:** produce a **stable**, explainable `rollupStatus` per `(vulnId, productKey)` given possibly conflicting claims. + +### 6.1 Inputs + +* Set **S** of `VexClaim` for the key. +* **Vexer policy snapshot**: + + * **weights** per provider tier and per provider overrides. + * **justification gates** (e.g., require justification for `not_affected` to be acceptable). + * **minEvidence** rules (e.g., `not_affected` must come from ≥1 vendor or 2 distros). + * **signature requirements** (e.g., require verified signature for ‘fixed’ to be considered). + +### 6.2 Steps + +1. **Filter invalid** claims by signature policy & justification gates → set `S'`. +2. **Score** each claim: + `score = weight(provider) * freshnessFactor(lastObserved)` where freshnessFactor ∈ [0.8, 1.0] for staleness decay (configurable; small effect). +3. **Aggregate** scores per status: `W(status) = Σ score(claims with that status)`. +4. **Pick** `rollupStatus = argmax_status W(status)`. +5. **Tie‑breakers** (in order): + + * Higher **max single** provider score wins (vendor > distro > platform > hub). + * More **recent** lastObserved wins. + * Deterministic lexicographic order of status (`fixed` > `not_affected` > `under_investigation` > `affected`) as final tiebreaker. +6. **Explain**: mark accepted sources (`accepted=true; reason="weight"`/`"freshness"`), mark rejected sources with explicit `reason` (`"insufficient_justification"`, `"signature_unverified"`, `"lower_weight"`). + +> The algorithm is **pure** given S and policy snapshot; result is reproducible and hashed into `consensusDigest`. + +--- + +## 7) Query & export APIs + +All endpoints are versioned under `/api/v1/vex`. + +### 7.1 Query (online) + +``` +POST /claims/search + body: { vulnIds?: string[], productKeys?: string[], providers?: string[], since?: timestamp, limit?: int, pageToken?: string } + → { claims[], nextPageToken? } + +POST /consensus/search + body: { vulnIds?: string[], productKeys?: string[], policyRevisionId?: string, since?: timestamp, limit?: int, pageToken?: string } + → { entries[], nextPageToken? } + +POST /excititor/resolve (scope: vex.read) + body: { productKeys?: string[], purls?: string[], vulnerabilityIds: string[], policyRevisionId?: string } + → { policy, resolvedAt, results: [ { vulnerabilityId, productKey, status, sources[], conflicts[], decisions[], signals?, summary?, envelope: { artifact, contentSignature?, attestation?, attestationEnvelope?, attestationSignature? } } ] } +``` + +### 7.2 Exports (cacheable snapshots) + +``` +POST /exports + body: { signature: { vulnFilter?, productFilter?, providers?, since? }, format: raw|consensus|index, policyRevisionId?: string, force?: bool } + → { exportId, artifactSha256, rekor? } + +GET /exports/{exportId} → bytes (application/json or binary index) +GET /exports/{exportId}/meta → { signature, policyRevisionId, createdAt, artifactSha256, rekor? } +``` + +### 7.3 Provider operations + +``` +GET /providers → provider list & signature policy +POST /providers/{id}/refresh → trigger fetch/normalize window +GET /providers/{id}/status → last fetch, doc counts, signature stats +``` + +**Auth:** service‑to‑service via Authority tokens; operator operations via UI/CLI with RBAC. + +--- + +## 8) Attestation integration + +* Exports can be **DSSE‑signed** via **Signer** and logged to **Rekor v2** via **Attestor** (optional but recommended for regulated pipelines). +* `vex.exports.rekor` stores `{uuid, index, url}` when present. +* **Predicate type**: `https://stella-ops.org/attestations/vex-export/1` with fields: + + * `querySignature`, `policyRevisionId`, `artifactSha256`, `createdAt`. + +--- + +## 9) Configuration (YAML) + +```yaml +vexer: + mongo: { uri: "mongodb://mongo/vexer" } + s3: + endpoint: http://minio:9000 + bucket: stellaops + policy: + weights: + vendor: 1.0 + distro: 0.9 + platform: 0.7 + hub: 0.5 + attestation: 0.6 + providerOverrides: + redhat: 1.0 + suse: 0.95 + requireJustificationForNotAffected: true + signatureRequiredForFixed: true + minEvidence: + not_affected: + vendorOrTwoDistros: true + connectors: + - providerId: redhat + kind: csaf + baseUrl: https://access.redhat.com/security/data/csaf/v2/ + signaturePolicy: { type: pgp, keys: [ "…redhat-pgp-key…" ] } + windowDays: 7 + - providerId: suse + kind: csaf + baseUrl: https://ftp.suse.com/pub/projects/security/csaf/ + signaturePolicy: { type: pgp, keys: [ "…suse-pgp-key…" ] } + - providerId: ubuntu + kind: openvex + baseUrl: https://…/vex/ + signaturePolicy: { type: none } + - providerId: vendorX + kind: cyclonedx-vex + ociRef: ghcr.io/vendorx/vex@sha256:… + signaturePolicy: { type: cosign, cosignKeylessRoots: [ "sigstore-root" ] } +``` + +--- + +## 10) Security model + +* **Input signature verification** enforced per provider policy (PGP, cosign, x509). +* **Connector allowlists**: outbound fetch constrained to configured domains. +* **Tenant isolation**: per‑tenant DB prefixes or separate DBs; per‑tenant S3 prefixes; per‑tenant policies. +* **AuthN/Z**: Authority‑issued OpToks; RBAC roles (`vex.read`, `vex.admin`, `vex.export`). +* **No secrets in logs**; deterministic logging contexts include providerId, docDigest, claim keys. + +--- + +## 11) Performance & scale + +* **Targets:** + + * Normalize 10k VEX claims/minute/core. + * Consensus compute ≤ 50 ms for 1k unique `(vuln, product)` pairs in hot cache. + * Export (consensus) 1M rows in ≤ 60 s on 8 cores with streaming writer. + +* **Scaling:** + + * WebService handles control APIs; **Worker** background services (same image) execute fetch/normalize in parallel with rate‑limits; Mongo writes batched; upserts by natural keys. + * Exports stream straight to S3 (MinIO) with rolling buffers. + +* **Caching:** + + * `vex.cache` maps query signatures → export; TTL to avoid stampedes; optimistic reuse unless `force`. + +--- + +## 12) Observability + +* **Metrics:** + + * `vex.ingest.docs_total{provider}` + * `vex.normalize.claims_total{provider}` + * `vex.signature.failures_total{provider,method}` + * `vex.consensus.conflicts_total{vulnId}` + * `vex.exports.bytes{format}` / `vex.exports.latency_seconds` +* **Tracing:** spans for fetch, verify, parse, map, consensus, export. +* **Dashboards:** provider staleness, top conflicting vulns/components, signature posture, export cache hit‑rate. + +--- + +## 13) Testing matrix + +* **Connectors:** golden raw docs → deterministic claims (fixtures per provider/format). +* **Signature policies:** valid/invalid PGP/cosign/x509 samples; ensure rejects are recorded but not accepted. +* **Normalization edge cases:** platform‑only claims, free‑text justifications, non‑purl products. +* **Consensus:** conflict scenarios across tiers; check tie‑breakers; justification gates. +* **Performance:** 1M‑row export timing; memory ceilings; stream correctness. +* **Determinism:** same inputs + policy → identical `consensusDigest` and export bytes. +* **API contract tests:** pagination, filters, RBAC, rate limits. + +--- + +## 14) Integration points + +* **Backend Policy Engine** (in Scanner.WebService): calls `POST /excititor/resolve` (scope `vex.read`) with batched `(purl, vulnId)` pairs to fetch `rollupStatus + sources`. +* **Feedser**: provides alias graph (CVE↔vendor IDs) and may supply VEX‑adjacent metadata (e.g., KEV flag) for policy escalation. +* **UI**: VEX explorer screens use `/claims/search` and `/consensus/search`; show conflicts & provenance. +* **CLI**: `stellaops vex export --consensus --since 7d --out vex.json` for audits. + +--- + +## 15) Failure modes & fallback + +* **Provider unreachable:** stale thresholds trigger warnings; policy can down‑weight stale providers automatically (freshness factor). +* **Signature outage:** continue to ingest but mark `signatureState.verified=false`; consensus will likely exclude or down‑weight per policy. +* **Schema drift:** unknown fields are preserved as `evidence`; normalization rejects only on **invalid identity** or **status**. + +--- + +## 16) Rollout plan (incremental) + +1. **MVP**: OpenVEX + CSAF connectors for 3 major providers (e.g., Red Hat/SUSE/Ubuntu), normalization + consensus + `/excititor/resolve`. +2. **Signature policies**: PGP for distros; cosign for OCI. +3. **Exports + optional attestation**. +4. **CycloneDX VEX** connectors; platform claim expansion tables; UI explorer. +5. **Scale hardening**: export indexes; conflict analytics. + +--- + +## 17) Appendix — canonical JSON (stable ordering) + +All exports and consensus entries are serialized via `VexCanonicalJsonSerializer`: + +* UTF‑8 without BOM; +* keys sorted (ASCII); +* arrays sorted by `(providerId, vulnId, productKey, lastObserved)` unless semantic order mandated; +* timestamps in `YYYY‑MM‑DDThh:mm:ssZ`; +* no insignificant whitespace. + diff --git a/docs/README.md b/docs/README.md index 523d6ceb..2491e2be 100755 --- a/docs/README.md +++ b/docs/README.md @@ -82,7 +82,7 @@ Everything here is open‑source and versioned — when you check out a git ta - **70a – [Policy Gateway](policy/gateway.md)** - **71 – [Policy Examples](examples/policies/README.md)** - **72 – [Policy FAQ](faq/policy-faq.md)** -- **73 – [Policy Run DTOs](../src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md)** +- **73 – [Policy Run DTOs](../src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md)** - **30 – [Fixture Maintenance](dev/fixtures.md)** - **74 – [Export Center Overview](export-center/overview.md)** - **75 – [Export Center Architecture](export-center/architecture.md)** @@ -147,10 +147,10 @@ Everything here is open‑source and versioned — when you check out a git ta > Imposed rule: Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. -- **Aggregation-Only Contract (AOC).** Ingestion services aggregate and link facts only—derived precedence, severity, and safe-fix hints live in Policy overlays and dedicated explorers. Review [`../AGENTS.md`](../AGENTS.md) and the AOC guardrails in [`aoc/aoc-guardrails.md`](aoc/aoc-guardrails.md). +- **Aggregation-Only Contract (AOC).** Ingestion services aggregate and link facts only—derived precedence, severity, and safe-fix hints live in Policy overlays and dedicated explorers. Review [`implplan/AGENTS.md`](implplan/AGENTS.md) and the AOC guardrails in [`aoc/aoc-guardrails.md`](aoc/aoc-guardrails.md). - **Cartographer owns graphs.** SBOM Service emits projections/events; Cartographer (`CARTO-GRAPH-21-00x`) builds graph storage, overlays, and tiles. See `ARCHITECTURE_CONCELIER.md` (Cartographer handshake section) for handoff boundaries. - **Notifier replaces legacy Notify.** Sprint‑15 `StellaOps.Notify.*` tasks are frozen; use the Notifications Studio/Notifier backlogs (`NOTIFY-SVC-38..40`, `WEB-NOTIFY-3x-00x`, `CLI-NOTIFY-3x-00x`). -- **Dedicated services for Vuln & Policy.** Vuln Explorer work flows through `src/StellaOps.VulnExplorer.Api`/Console/CLI (Sprint 29); gateway routes proxy only. Policy Engine remains the sole source for precedence/suppression overlays. +- **Dedicated services for Vuln & Policy.** Vuln Explorer work flows through `src/VulnExplorer/StellaOps.VulnExplorer.Api`/Console/CLI (Sprint 29); gateway routes proxy only. Policy Engine remains the sole source for precedence/suppression overlays. - **Cleanup log.** The backlog consolidation summary lives in [`backlog/2025-10-cleanup.md`](backlog/2025-10-cleanup.md). © 2025 Stella Ops contributors – licensed AGPL‑3.0‑or‑later diff --git a/docs/TASKS.md b/docs/TASKS.md index 98984bd4..2d529135 100644 --- a/docs/TASKS.md +++ b/docs/TASKS.md @@ -1,381 +1,381 @@ -# Docs Guild Task Board (UTC 2025-10-10) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOC7.README-INDEX | DONE (2025-10-17) | Docs Guild | — | Refresh index docs (docs/README.md + root README) after architecture dossier split and Offline Kit overhaul. | ✅ ToC reflects new component architecture docs; ✅ root README highlights updated doc set; ✅ Offline Kit guide linked correctly. | -| DOC4.AUTH-PDG | DONE (2025-10-19) | Docs Guild, Plugin Team | PLG6.DOC | Copy-edit `docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md`, export lifecycle diagram, add LDAP RFC cross-link. | ✅ PR merged with polish; ✅ Diagram committed; ✅ Slack handoff posted. | -| DOC1.AUTH | DONE (2025-10-12) | Docs Guild, Authority Core | CORE5B.DOC | Draft `docs/11_AUTHORITY.md` covering architecture, configuration, bootstrap flows. | ✅ Architecture + config sections approved by Core; ✅ Samples reference latest options; ✅ Offline note added. | -| DOC3.Concelier-Authority | DONE (2025-10-12) | Docs Guild, DevEx | FSR4 | Polish operator/runbook sections (DOC3/DOC5) to document Concelier authority rollout, bypass logging, and enforcement checklist. | ✅ DOC3/DOC5 updated with audit runbook references; ✅ enforcement deadline highlighted; ✅ Docs guild sign-off. | -| DOC5.Concelier-Runbook | DONE (2025-10-12) | Docs Guild | DOC3.Concelier-Authority | Produce dedicated Concelier authority audit runbook covering log fields, monitoring recommendations, and troubleshooting steps. | ✅ Runbook published; ✅ linked from DOC3/DOC5; ✅ alerting guidance included. | -| FEEDDOCS-DOCS-05-001 | DONE (2025-10-11) | Docs Guild | FEEDMERGE-ENGINE-04-001, FEEDMERGE-ENGINE-04-002 | Publish Concelier conflict resolution runbook covering precedence workflow, merge-event auditing, and Sprint 3 metrics. | ✅ `docs/ops/concelier-conflict-resolution.md` committed; ✅ metrics/log tables align with latest merge code; ✅ Ops alert guidance handed to Concelier team. | -| FEEDDOCS-DOCS-05-002 | DONE (2025-10-16) | Docs Guild, Concelier Ops | FEEDDOCS-DOCS-05-001 | Ops sign-off captured: conflict runbook circulated, alert thresholds tuned, and rollout decisions documented in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised using `docs/ops/concelier-authority-audit-runbook.md`; ✅ change-log entry linked from runbook once GHSA/NVD/OSV regression fixtures land. | -| DOCS-ADR-09-001 | DONE (2025-10-19) | Docs Guild, DevEx | — | Establish ADR process (`docs/adr/0000-template.md`) and document usage guidelines. | Template published; README snippet linking ADR process; announcement posted (`docs/updates/2025-10-18-docs-guild.md`). | -| DOCS-EVENTS-09-002 | DONE (2025-10-19) | Docs Guild, Platform Events | SCANNER-EVENTS-15-201 | Publish event schema catalog (`docs/events/`) for `scanner.report.ready@1`, `scheduler.rescan.delta@1`, `attestor.logged@1`. | Schemas validated (Ajv CI hooked); docs/events/README summarises usage; Platform Events notified via `docs/updates/2025-10-18-docs-guild.md`. | -| DOCS-EVENTS-09-003 | DONE (2025-10-19) | Docs Guild | DOCS-EVENTS-09-002 | Add human-readable envelope field references and canonical payload samples for published events, including offline validation workflow. | Tables explain common headers/payload segments; versioned sample payloads committed; README links to validation instructions and samples. | -| DOCS-EVENTS-09-004 | DONE (2025-10-19) | Docs Guild, Scanner WebService | SCANNER-EVENTS-15-201 | Refresh scanner event docs to mirror DSSE-backed report fields, document `scanner.scan.completed`, and capture canonical sample validation. | Schemas updated for new payload shape; README references DSSE reuse and validation test; samples align with emitted events. | -| PLATFORM-EVENTS-09-401 | DONE (2025-10-21) | Platform Events Guild | DOCS-EVENTS-09-003 | Embed canonical event samples into contract/integration tests and ensure CI validates payloads against published schemas. | Notify models tests now run schema validation against `docs/events/*.json`, event schemas allow optional `attributes`, and docs capture the new validation workflow. | -| RUNTIME-GUILD-09-402 | DONE (2025-10-19) | Runtime Guild | SCANNER-POLICY-09-107 | Confirm Scanner WebService surfaces `quietedFindingCount` and progress hints to runtime consumers; document readiness checklist. | Runtime verification run captures enriched payload; checklist/doc updates merged; stakeholders acknowledge availability. | -| DOCS-CONCELIER-07-201 | DONE (2025-10-22) | Docs Guild, Concelier WebService | FEEDWEB-DOCS-01-001 | Final editorial review and publish pass for Concelier authority toggle documentation (Quickstart + operator guide). | Review feedback resolved, publish PR merged, release notes updated with documentation pointer. | -| DOCS-RUNTIME-17-004 | DONE (2025-10-26) | Docs Guild, Runtime Guild | SCANNER-EMIT-17-701, ZASTAVA-OBS-17-005, DEVOPS-REL-17-002 | Document build-id workflows: SBOM exposure, runtime event payloads (`process.buildId`), Scanner `/policy/runtime` response (`buildIds` list), debug-store layout, and operator guidance for symbol retrieval. | Architecture + operator docs updated with build-id sections (Observer, Scanner, CLI), examples show `readelf` output + debuginfod usage, references linked from Offline Kit/Release guides + CLI help. | -| DOCS-OBS-50-001 | BLOCKED (2025-10-26) | Docs Guild, Observability Guild | TELEMETRY-OBS-50-001 | Publish `/docs/observability/overview.md` introducing scope, imposed rule banner, architecture diagram, and tenant guarantees. | Doc merged with imposed rule banner; diagram committed; cross-links to telemetry stack + evidence locker docs. | -> Blocked: waiting on telemetry core deliverable (TELEMETRY-OBS-50-001) to finalise architecture details and diagrams. -| DOCS-OBS-50-002 | TODO | Docs Guild, Security Guild | TELEMETRY-OBS-50-002 | Author `/docs/observability/telemetry-standards.md` detailing common fields, scrubbing policy, sampling defaults, and redaction override procedure. | Doc merged; imposed rule banner present; examples validated with telemetry fixtures; security review sign-off captured. | -| DOCS-OBS-50-003 | TODO | Docs Guild, Observability Guild | TELEMETRY-OBS-50-001 | Create `/docs/observability/logging.md` covering structured log schema, dos/don'ts, tenant isolation, and copyable examples. | Doc merged with banner; sample logs redacted; lint passes; linked from coding standards. | -| DOCS-OBS-50-004 | TODO | Docs Guild, Observability Guild | TELEMETRY-OBS-50-002 | Draft `/docs/observability/tracing.md` explaining context propagation, async linking, CLI header usage, and sampling strategies. | Doc merged; imposed rule banner included; diagrams updated; references to CLI/Console features added. | -| DOCS-OBS-51-001 | TODO | Docs Guild, DevOps Guild | WEB-OBS-51-001, DEVOPS-OBS-51-001 | Publish `/docs/observability/metrics-and-slos.md` cataloging metrics, SLO targets, burn rate policies, and alert runbooks. | Doc merged with banner; SLO tables verified; alert workflows linked to incident runbook. | -| DOCS-SEC-OBS-50-001 | TODO | Docs Guild, Security Guild | TELEMETRY-OBS-51-002 | Update `/docs/security/redaction-and-privacy.md` to cover telemetry privacy controls, tenant opt-in debug, and imposed rule reminder. | Doc merged; redaction matrix updated; banner present; security sign-off recorded. | -| DOCS-INSTALL-50-001 | TODO | Docs Guild, DevOps Guild | DEVOPS-OBS-50-003 | Add `/docs/install/telemetry-stack.md` with collector deployment, exporter options, offline kit notes, and imposed rule banner. | Doc merged; install steps verified on air-gapped profile; banner present; screenshots attached. | -| DOCS-FORENSICS-53-001 | TODO | Docs Guild, Evidence Locker Guild | EVID-OBS-53-003 | Publish `/docs/forensics/evidence-locker.md` describing bundle formats, WORM options, retention, legal hold, and imposed rule banner. | Doc merged; manifest examples validated; banner present; legal hold steps aligned with API. | -| DOCS-FORENSICS-53-002 | TODO | Docs Guild, Provenance Guild | PROV-OBS-54-001 | Release `/docs/forensics/provenance-attestation.md` covering DSSE schema, signing process, verification workflow, and imposed rule banner. | Doc merged; sample statements reference fixtures; banner included; verification steps tested. | -| DOCS-FORENSICS-53-003 | TODO | Docs Guild, Timeline Indexer Guild | TIMELINE-OBS-52-003 | Publish `/docs/forensics/timeline.md` with schema, event kinds, filters, query examples, and imposed rule banner. | Doc merged; query examples validated; banner present; linked from Console/CLI docs. | -| DOCS-CONSOLE-OBS-52-001 | TODO | Docs Guild, Console Guild | CONSOLE-OBS-51-001 | Document `/docs/console/observability.md` showcasing Observability Hub widgets, trace/log search, imposed rule banner, and accessibility tips. | Doc merged; screenshots updated; banner present; navigation steps verified. | -| DOCS-CONSOLE-OBS-52-002 | TODO | Docs Guild, Console Guild | CONSOLE-OBS-52-002, CONSOLE-OBS-53-001 | Publish `/docs/console/forensics.md` covering timeline explorer, evidence viewer, attestation verifier, imposed rule banner, and troubleshooting. | Doc merged; banner included; workflows validated via Playwright capture; troubleshooting section populated. | -| DOCS-CLI-OBS-52-001 | TODO | Docs Guild, DevEx/CLI Guild | CLI-OBS-52-001 | Create `/docs/cli/observability.md` detailing `stella obs` commands, examples, exit codes, imposed rule banner, and scripting tips. | Doc merged; examples tested; banner included; CLI parity matrix updated. | -| DOCS-CLI-FORENSICS-53-001 | TODO | Docs Guild, DevEx/CLI Guild | CLI-FORENSICS-54-001 | Publish `/docs/cli/forensics.md` for snapshot/verify/attest commands with sample outputs, imposed rule banner, and offline workflows. | Doc merged; sample bundles verified; banner present; offline notes cross-linked. | -| DOCS-RUNBOOK-55-001 | TODO | Docs Guild, Ops Guild | DEVOPS-OBS-55-001, WEB-OBS-55-001 | Author `/docs/runbooks/incidents.md` describing incident mode activation, escalation steps, retention impact, verification checklist, and imposed rule banner. | Doc merged; runbook rehearsed; banner included; linked from alerts. | -| DOCS-AOC-19-001 | DONE (2025-10-26) | Docs Guild, Concelier Guild | CONCELIER-WEB-AOC-19-001, EXCITITOR-WEB-AOC-19-001 | Author `/docs/ingestion/aggregation-only-contract.md` covering philosophy, invariants, schemas, error codes, migration, observability, and security checklist. | New doc published with compliance checklist; cross-links from existing docs added. | -| DOCS-AOC-19-002 | DONE (2025-10-26) | Docs Guild, Architecture Guild | DOCS-AOC-19-001 | Update `/docs/architecture/overview.md` to include AOC boundary, raw stores, and sequence diagram (fetch → guard → raw insert → policy evaluation). | Overview doc updated with diagrams/text; lint passes; stakeholders sign off. | -| DOCS-AOC-19-003 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-AOC-19-003 | Refresh `/docs/architecture/policy-engine.md` clarifying ingestion boundary, raw inputs, and policy-only derived data. | Doc highlights raw-only ingestion contract, updated diagrams merge, compliance checklist added. | -| DOCS-AOC-19-004 | DONE (2025-10-26) | Docs Guild, UI Guild | UI-AOC-19-001 | Extend `/docs/ui/console.md` with Sources dashboard tiles, violation drill-down workflow, and verification action. | UI doc updated with screenshots/flow descriptions, compliance checklist appended. | -> DOCS-AOC-19-004: Architecture overview & policy-engine updates landed 2025-10-26; incorporate the new AOC boundary diagrams and metrics references. -| DOCS-AOC-19-005 | DONE (2025-10-26) | Docs Guild, CLI Guild | CLI-AOC-19-003 | Update `/docs/cli/cli-reference.md` with `stella sources ingest --dry-run` and `stella aoc verify` usage, exit codes, and offline notes. | CLI reference + quickstart sections updated; examples validated; compliance checklist added. | -> DOCS-AOC-19-005: New ingestion reference + architecture overview published 2025-10-26; ensure CLI docs link to both and surface AOC exit codes mapping. -| DOCS-AOC-19-006 | DONE (2025-10-26) | Docs Guild, Observability Guild | CONCELIER-WEB-AOC-19-002, EXCITITOR-WEB-AOC-19-002 | Document new metrics/traces/log keys in `/docs/observability/observability.md`. | Observability doc lists new metrics/traces/log fields; dashboards referenced; compliance checklist appended. | -| DOCS-AOC-19-007 | DONE (2025-10-26) | Docs Guild, Authority Core | AUTH-AOC-19-001 | Update `/docs/security/authority-scopes.md` with new ingestion scopes and tenancy enforcement notes. | Doc reflects new scopes, sample policies updated, compliance checklist added. | -| DOCS-AOC-19-008 | DONE (2025-10-26) | Docs Guild, DevOps Guild | DEVOPS-AOC-19-002 | Refresh `/docs/deploy/containers.md` to cover validator enablement, guard env flags, and read-only verify user. | Deploy doc updated; offline kit section mentions validator scripts; compliance checklist appended. | -| DOCS-AOC-19-009 | DONE (2025-10-26) | Docs Guild, Authority Core | AUTH-AOC-19-001 | Update AOC docs/samples to reflect new `advisory:*`, `vex:*`, and `aoc:verify` scopes. | Docs reference new scopes, samples aligned, compliance checklist updated. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-AIRGAP-56-001 | TODO | Docs Guild, AirGap Controller Guild | AIRGAP-CTL-56-002 | Publish `/docs/airgap/overview.md` outlining modes, lifecycle, responsibilities, and imposed rule banner. | Doc merged; banner present; diagrams included. | -| DOCS-AIRGAP-56-002 | TODO | Docs Guild, DevOps Guild | DEVOPS-AIRGAP-56-001 | Author `/docs/airgap/sealing-and-egress.md` covering network policies, EgressPolicy facade usage, and verification steps. | Doc merged; examples validated; banner included. | -| DOCS-AIRGAP-56-003 | TODO | Docs Guild, Exporter Guild | EXPORT-AIRGAP-56-001 | Create `/docs/airgap/mirror-bundles.md` describing bundle format, DSSE/TUF/Merkle validation, creation/import workflows. | Doc merged; sample commands verified; banner present. | -| DOCS-AIRGAP-56-004 | TODO | Docs Guild, Deployment Guild | DEVOPS-AIRGAP-56-003 | Publish `/docs/airgap/bootstrap.md` detailing Bootstrap Pack creation, validation, and install procedures. | Doc merged; checklist appended; screenshots verified. | -| DOCS-AIRGAP-57-001 | TODO | Docs Guild, AirGap Time Guild | AIRGAP-TIME-58-001 | Write `/docs/airgap/staleness-and-time.md` explaining time anchors, drift policies, staleness budgets, and UI indicators. | Doc merged; math checked; banner included. | -| DOCS-AIRGAP-57-002 | TODO | Docs Guild, Console Guild | CONSOLE-AIRGAP-57-001 | Publish `/docs/console/airgap.md` covering sealed badge, import wizard, staleness dashboards. | Doc merged; screenshots captured; banner present. | -| DOCS-AIRGAP-57-003 | TODO | Docs Guild, CLI Guild | CLI-AIRGAP-57-001 | Publish `/docs/cli/airgap.md` documenting commands, examples, exit codes. | Doc merged; examples validated; banner present. | -| DOCS-AIRGAP-57-004 | TODO | Docs Guild, Ops Guild | DEVOPS-AIRGAP-56-002 | Create `/docs/airgap/operations.md` with runbooks for imports, failure recovery, and auditing. | Doc merged; runbooks rehearsed; banner included. | -| DOCS-AIRGAP-58-001 | TODO | Docs Guild, Product Guild | CONSOLE-AIRGAP-58-002 | Provide `/docs/airgap/degradation-matrix.md` enumerating feature availability, fallbacks, remediation. | Doc merged; matrix reviewed; banner included. | -| DOCS-AIRGAP-58-002 | TODO | Docs Guild, Security Guild | PROV-OBS-54-001 | Update `/docs/security/trust-and-signing.md` with DSSE/TUF roots, rotation, and signed time tokens. | Doc merged; security sign-off recorded; banner present. | -| DOCS-AIRGAP-58-003 | TODO | Docs Guild, DevEx Guild | AIRGAP-POL-56-001 | Publish `/docs/dev/airgap-contracts.md` describing EgressPolicy usage, sealed-mode tests, linting. | Doc merged; sample code validated; banner included. | -| DOCS-AIRGAP-58-004 | TODO | Docs Guild, Evidence Locker Guild | EVID-OBS-55-001 | Document `/docs/airgap/portable-evidence.md` for exporting/importing portable evidence bundles across enclaves. | Doc merged; verification steps tested; banner present. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-OAS-61-001 | TODO | Docs Guild, API Contracts Guild | OAS-61-002 | Publish `/docs/api/overview.md` covering auth, tenancy, pagination, idempotency, rate limits with banner. | Doc merged; examples validated; banner present. | -| DOCS-OAS-61-002 | TODO | Docs Guild, API Governance Guild | APIGOV-61-001 | Author `/docs/api/conventions.md` capturing naming, errors, filters, sorting, examples. | Doc merged; lint passes; banner included. | -| DOCS-OAS-61-003 | TODO | Docs Guild, API Governance Guild | APIGOV-63-001 | Publish `/docs/api/versioning.md` describing SemVer, deprecation headers, migration playbooks. | Doc merged; example headers validated; banner present. | -| DOCS-OAS-62-001 | TODO | Docs Guild, Developer Portal Guild | DEVPORT-62-002 | Stand up `/docs/api/reference/` auto-generated site; integrate with portal nav. | Reference site builds; search works; banner included. | -| DOCS-SDK-62-001 | TODO | Docs Guild, SDK Generator Guild | SDKGEN-63-001 | Publish `/docs/sdks/overview.md` plus language guides (`typescript.md`, `python.md`, `go.md`, `java.md`). | Docs merged; code samples pulled from tested examples; banner present. | -| DOCS-DEVPORT-62-001 | TODO | Docs Guild, Developer Portal Guild | DEVPORT-62-001 | Document `/docs/devportal/publishing.md` for build pipeline, offline bundle steps. | Doc merged; cross-links validated; banner included. | -| DOCS-CONTRIB-62-001 | TODO | Docs Guild, API Governance Guild | APIGOV-61-001 | Publish `/docs/contributing/api-contracts.md` detailing how to edit OAS, lint rules, compatibility checks. | Doc merged; banner present; examples validated. | -| DOCS-TEST-62-001 | TODO | Docs Guild, Contract Testing Guild | CONTR-62-001 | Author `/docs/testing/contract-testing.md` covering mock server, replay tests, golden fixtures. | Doc merged; references to tooling validated; banner present. | -| DOCS-SEC-62-001 | TODO | Docs Guild, Authority Core | AUTH-AIRGAP-56-001 | Update `/docs/security/auth-scopes.md` with OAuth2/PAT scopes, tenancy header usage. | Doc merged; scope tables verified; banner included. | -| DOCS-AIRGAP-DEVPORT-64-001 | TODO | Docs Guild, DevPortal Offline Guild | DVOFF-64-001 | Create `/docs/airgap/devportal-offline.md` describing offline bundle usage and verification. | Doc merged; verification steps tested; banner present. | - -## Risk Profiles (Epic 18) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-RISK-66-001 | TODO | Docs Guild, Risk Profile Schema Guild | POLICY-RISK-66-001 | Publish `/docs/risk/overview.md` covering concepts and glossary. | Doc merged with banner; terminology reviewed. | -| DOCS-RISK-66-002 | TODO | Docs Guild, Policy Guild | POLICY-RISK-66-003 | Author `/docs/risk/profiles.md` (authoring, versioning, scope). | Doc merged; schema examples validated; banner present. | -| DOCS-RISK-66-003 | TODO | Docs Guild, Risk Engine Guild | RISK-ENGINE-67-001 | Publish `/docs/risk/factors.md` cataloging signals, transforms, reducers, TTLs. | Document merged; tables verified; banner included. | -| DOCS-RISK-66-004 | TODO | Docs Guild, Risk Engine Guild | RISK-ENGINE-66-002 | Create `/docs/risk/formulas.md` detailing math, normalization, gating, severity. | Doc merged; equations rendered; banner present. | -| DOCS-RISK-67-001 | TODO | Docs Guild, Risk Engine Guild | RISK-ENGINE-68-001 | Publish `/docs/risk/explainability.md` showing artifact schema and UI screenshots. | Doc merged; CLI examples validated; banner included. | -| DOCS-RISK-67-002 | TODO | Docs Guild, API Guild | POLICY-RISK-67-002 | Produce `/docs/risk/api.md` with endpoint reference/examples. | Doc merged; OAS examples synced; banner present. | -| DOCS-RISK-67-003 | TODO | Docs Guild, Console Guild | CONSOLE-RISK-66-001 | Document `/docs/console/risk-ui.md` for authoring, simulation, dashboards. | Doc merged; screenshots updated; banner included. | -| DOCS-RISK-67-004 | TODO | Docs Guild, CLI Guild | CLI-RISK-66-001 | Publish `/docs/cli/risk.md` covering CLI workflows. | Doc merged; command examples validated; banner present. | -| DOCS-RISK-68-001 | TODO | Docs Guild, Export Guild | RISK-BUNDLE-69-001 | Add `/docs/airgap/risk-bundles.md` for offline factor bundles. | Doc merged; verification steps confirmed; banner included. | -| DOCS-RISK-68-002 | TODO | Docs Guild, Security Guild | POLICY-RISK-66-003 | Update `/docs/security/aoc-invariants.md` with risk scoring provenance guarantees. | Doc merged; audit references updated; banner present. | - -## Attestor Console (Epic 19) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-ATTEST-73-001 | TODO | Docs Guild, Attestor Service Guild | ATTEST-TYPES-73-001 | Publish `/docs/attestor/overview.md` with imposed rule banner. | Doc merged; terminology validated. | -| DOCS-ATTEST-73-002 | TODO | Docs Guild, Attestation Payloads Guild | ATTEST-TYPES-73-002 | Write `/docs/attestor/payloads.md` with schemas/examples. | Doc merged; examples validated via tests. | -| DOCS-ATTEST-73-003 | TODO | Docs Guild, Policy Guild | POLICY-ATTEST-73-002 | Publish `/docs/attestor/policies.md` covering verification policies. | Doc merged; policy examples validated. | -| DOCS-ATTEST-73-004 | TODO | Docs Guild, Attestor Service Guild | ATTESTOR-73-002 | Add `/docs/attestor/workflows.md` detailing ingest, verify, bulk operations. | Doc merged; workflows tested. | -| DOCS-ATTEST-74-001 | TODO | Docs Guild, KMS Guild | KMS-73-001 | Publish `/docs/attestor/keys-and-issuers.md`. | Doc merged; rotation guidance verified. | -| DOCS-ATTEST-74-002 | TODO | Docs Guild, Transparency Guild | TRANSP-74-001 | Document `/docs/attestor/transparency.md` with witness usage/offline validation. | Doc merged; proofs validated. | -| DOCS-ATTEST-74-003 | TODO | Docs Guild, Attestor Console Guild | CONSOLE-ATTEST-73-001 | Write `/docs/console/attestor-ui.md` with screenshots/workflows. | Doc merged; screenshots captured; banner present. | -| DOCS-ATTEST-74-004 | TODO | Docs Guild, CLI Attestor Guild | CLI-ATTEST-73-001 | Publish `/docs/cli/attest.md` covering CLI usage. | Doc merged; commands validated. | -| DOCS-ATTEST-75-001 | TODO | Docs Guild, Export Attestation Guild | EXPORT-ATTEST-75-002 | Add `/docs/attestor/airgap.md` for attestation bundles. | Doc merged; verification steps confirmed. | -| DOCS-ATTEST-75-002 | TODO | Docs Guild, Security Guild | ATTESTOR-73-002 | Update `/docs/security/aoc-invariants.md` with attestation invariants. | Doc merged; invariants detailed. | -## Policy Engine v2 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-POLICY-20-001 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-ENGINE-20-000 | Author `/docs/policy/overview.md` covering concepts, inputs/outputs, determinism, and compliance checklist. | Doc published with diagrams + glossary; lint passes; checklist included. | -| DOCS-POLICY-20-002 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-ENGINE-20-001 | Write `/docs/policy/dsl.md` with grammar, built-ins, examples, anti-patterns. | DSL doc includes grammar tables, examples, compliance checklist; validated against parser tests. | -| DOCS-POLICY-20-003 | DONE (2025-10-26) | Docs Guild, Authority Core | AUTH-POLICY-20-001 | Publish `/docs/policy/lifecycle.md` describing draft→approve workflow, roles, audit, compliance list. | Lifecycle doc linked from UI/CLI help; approvals roles documented; checklist appended. | -| DOCS-POLICY-20-004 | DONE (2025-10-26) | Docs Guild, Scheduler Guild | SCHED-MODELS-20-001 | Create `/docs/policy/runs.md` detailing run modes, incremental mechanics, cursors, replay. | Run doc includes sequence diagrams + compliance checklist; cross-links to scheduler docs. | -| DOCS-POLICY-20-005 | DONE (2025-10-26) | Docs Guild, BE-Base Platform Guild | WEB-POLICY-20-001 | Draft `/docs/api/policy.md` describing endpoints, schemas, error codes. | API doc validated against OpenAPI; examples included; checklist appended. | -| DOCS-POLICY-20-006 | DONE (2025-10-26) | Docs Guild, DevEx/CLI Guild | CLI-POLICY-20-002 | Produce `/docs/cli/policy.md` with command usage, exit codes, JSON output contracts. | CLI doc includes examples, exit codes, compliance checklist. | -| DOCS-POLICY-20-007 | DONE (2025-10-26) | Docs Guild, UI Guild | UI-POLICY-20-001 | Document `/docs/ui/policy-editor.md` covering editor, simulation, diff workflows, approvals. | UI doc includes screenshots/placeholders, accessibility notes, compliance checklist. | -| DOCS-POLICY-20-008 | DONE (2025-10-26) | Docs Guild, Architecture Guild | POLICY-ENGINE-20-003 | Write `/docs/architecture/policy-engine.md` (new epic content) with sequence diagrams, selection strategy, schema. | Architecture doc merged with diagrams; compliance checklist appended; references updated. | -| DOCS-POLICY-20-009 | DONE (2025-10-26) | Docs Guild, Observability Guild | POLICY-ENGINE-20-007 | Add `/docs/observability/policy.md` for metrics/traces/logs, sample dashboards. | Observability doc includes metrics tables, dashboard screenshots, checklist. | -| DOCS-POLICY-20-010 | DONE (2025-10-26) | Docs Guild, Security Guild | AUTH-POLICY-20-002 | Publish `/docs/security/policy-governance.md` covering scopes, approvals, tenancy, least privilege. | Security doc merged; compliance checklist appended; reviewed by Security Guild. | -| DOCS-POLICY-20-011 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-ENGINE-20-001 | Populate `/docs/examples/policies/` with baseline/serverless/internal-only samples and commentary. | Example policies committed with explanations; lint passes; compliance checklist per file. | -| DOCS-POLICY-20-012 | DONE (2025-10-26) | Docs Guild, Support Guild | WEB-POLICY-20-003 | Draft `/docs/faq/policy-faq.md` addressing common pitfalls, VEX conflicts, determinism issues. | FAQ published with Q/A entries, cross-links, compliance checklist. | - -## Graph Explorer v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| - -## Link-Not-Merge v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-LNM-22-001 | BLOCKED (2025-10-27) | Docs Guild, Concelier Guild | CONCELIER-LNM-21-001..003 | Author `/docs/advisories/aggregation.md` covering observation vs linkset, conflict handling, AOC requirements, and reviewer checklist. | Draft doc merged with examples + checklist; final sign-off blocked until Concelier schema/API tasks land. | -> Blocker (2025-10-27): `CONCELIER-LNM-21-001..003` still TODO; update doc + fixtures once schema/API implementations are available. -| DOCS-LNM-22-002 | BLOCKED (2025-10-27) | Docs Guild, Excititor Guild | EXCITITOR-LNM-21-001..003 | Publish `/docs/vex/aggregation.md` describing VEX observation/linkset model, product matching, conflicts. | Draft doc merged with fixtures; final approval blocked until Excititor observation/linkset work ships. | -> Blocker (2025-10-27): `EXCITITOR-LNM-21-001..003` remain TODO; refresh doc, fixtures, and examples post-implementation. -| DOCS-LNM-22-003 | BLOCKED (2025-10-27) | Docs Guild, BE-Base Platform Guild | WEB-LNM-21-001..003 | Update `/docs/api/advisories.md` and `/docs/api/vex.md` for new endpoints, parameters, errors, exports. | Draft pending gateway/API delivery; unblock once endpoints + OpenAPI specs are available. | -> Blocker (2025-10-27): `WEB-LNM-21-001..003` all TODO—no gateway endpoints/OpenAPI to document yet. -| DOCS-LNM-22-004 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-40-001 | Create `/docs/policy/effective-severity.md` detailing severity selection strategies from multiple sources. | Doc merged with policy examples; checklist included. | -| DOCS-LNM-22-005 | BLOCKED (2025-10-27) | Docs Guild, UI Guild | UI-LNM-22-001..003 | Document `/docs/ui/evidence-panel.md` with screenshots, conflict badges, accessibility guidance. | Awaiting UI implementation to capture screenshots + flows; unblock once Evidence panel ships. | -> Blocker (2025-10-27): `UI-LNM-22-001..003` all TODO; documentation requires final UI states and accessibility audit artifacts. - -## StellaOps Console (Sprint 23) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-CONSOLE-23-001 | DONE (2025-10-26) | Docs Guild, Console Guild | CONSOLE-CORE-23-004 | Publish `/docs/ui/console-overview.md` covering IA, tenant model, global filters, and AOC alignment with compliance checklist. | Doc merged with diagrams + overview tables; checklist appended; Console Guild sign-off. | -| DOCS-CONSOLE-23-002 | DONE (2025-10-26) | Docs Guild, Console Guild | DOCS-CONSOLE-23-001 | Author `/docs/ui/navigation.md` detailing routes, breadcrumbs, keyboard shortcuts, deep links, and tenant context switching. | Navigation doc merged with shortcut tables and screenshots; accessibility checklist satisfied. | -| DOCS-CONSOLE-23-003 | DONE (2025-10-26) | Docs Guild, SBOM Service Guild, Console Guild | SBOM-CONSOLE-23-001, CONSOLE-FEAT-23-102 | Document `/docs/ui/sbom-explorer.md` (catalog, detail, graph overlays, exports) including compliance checklist and performance tips. | Doc merged with annotated screenshots, export instructions, and overlay examples; checklist appended. | -| DOCS-CONSOLE-23-004 | DONE (2025-10-26) | Docs Guild, Concelier Guild, Excititor Guild | CONCELIER-CONSOLE-23-001, EXCITITOR-CONSOLE-23-001 | Produce `/docs/ui/advisories-and-vex.md` explaining aggregation-not-merge, conflict indicators, raw viewers, and provenance banners. | Doc merged; raw JSON examples included; compliance checklist complete. | -| DOCS-CONSOLE-23-005 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-CONSOLE-23-001, CONSOLE-FEAT-23-104 | Write `/docs/ui/findings.md` describing filters, saved views, explain drawer, exports, and CLI parity callouts. | Doc merged with filter matrix + explain walkthrough; checklist appended. | -| DOCS-CONSOLE-23-006 | DONE (2025-10-26) | Docs Guild, Policy Guild, Product Ops | POLICY-CONSOLE-23-002, CONSOLE-FEAT-23-105 | Publish `/docs/ui/policies.md` with editor, simulation, approvals, compliance checklist, and RBAC mapping. | Doc merged; Monaco screenshots + simulation diff examples included; approval flow described; checklist appended. | -| DOCS-CONSOLE-23-007 | DONE (2025-10-26) | Docs Guild, Scheduler Guild | SCHED-CONSOLE-23-001, CONSOLE-FEAT-23-106 | Document `/docs/ui/runs.md` covering queues, live progress, diffs, retries, evidence downloads, and troubleshooting. | Doc merged with SSE troubleshooting, metrics references, compliance checklist. | -| DOCS-CONSOLE-23-008 | DONE (2025-10-26) | Docs Guild, Authority Guild | AUTH-CONSOLE-23-002, CONSOLE-FEAT-23-108 | Draft `/docs/ui/admin.md` describing users/roles, tenants, tokens, integrations, fresh-auth prompts, and RBAC mapping. | Doc merged with tables for scopes vs roles, screenshots, compliance checklist. | -| DOCS-CONSOLE-23-009 | DONE (2025-10-27) | Docs Guild, DevOps Guild | DOWNLOADS-CONSOLE-23-001, CONSOLE-FEAT-23-109 | Publish `/docs/ui/downloads.md` listing product images, commands, offline instructions, parity with CLI, and compliance checklist. | Doc merged; manifest sample included; copy-to-clipboard guidance documented; checklist complete. | -| DOCS-CONSOLE-23-010 | DONE (2025-10-27) | Docs Guild, Deployment Guild, Console Guild | DEVOPS-CONSOLE-23-002, CONSOLE-REL-23-301 | Write `/docs/deploy/console.md` (Helm, ingress, TLS, CSP, env vars, health checks) with compliance checklist. | Deploy doc merged; templates validated; CSP guidance included; checklist appended. | -| DOCS-CONSOLE-23-011 | DONE (2025-10-28) | Docs Guild, Deployment Guild | DOCS-CONSOLE-23-010 | Update `/docs/install/docker.md` to cover Console image, Compose/Helm usage, offline tarballs, parity with CLI. | Doc updated with new sections; commands validated; compliance checklist appended. | -| DOCS-CONSOLE-23-012 | DONE (2025-10-28) | Docs Guild, Security Guild | AUTH-CONSOLE-23-003, WEB-CONSOLE-23-002 | Publish `/docs/security/console-security.md` detailing OIDC flows, scopes, CSP, fresh-auth, evidence handling, and compliance checklist. | Security doc merged; threat model notes included; checklist appended. | -| DOCS-CONSOLE-23-013 | DONE (2025-10-28) | Docs Guild, Observability Guild | TELEMETRY-CONSOLE-23-001, CONSOLE-QA-23-403 | Write `/docs/observability/ui-telemetry.md` cataloguing metrics/logs/traces, dashboards, alerts, and feature flags. | Doc merged with instrumentation tables, dashboard screenshots, checklist appended. | -| DOCS-CONSOLE-23-014 | DONE (2025-10-28) | Docs Guild, Console Guild, CLI Guild | CONSOLE-DOC-23-502 | Maintain `/docs/cli-vs-ui-parity.md` matrix and integrate CI check guidance. | Matrix published with parity status, CI workflow documented, compliance checklist appended. | -> 2025-10-28: Install Docker guide references pending CLI commands (`stella downloads manifest`, `stella downloads mirror`, `stella console status`). Update once CLI parity lands. -| DOCS-CONSOLE-23-015 | DONE (2025-10-27) | Docs Guild, Architecture Guild | CONSOLE-CORE-23-001, WEB-CONSOLE-23-001 | Produce `/docs/architecture/console.md` describing frontend packages, data flow diagrams, SSE design, performance budgets. | Architecture doc merged with diagrams + compliance checklist; reviewers approve. | -| DOCS-CONSOLE-23-016 | DONE (2025-10-28) | Docs Guild, Accessibility Guild | CONSOLE-QA-23-402, CONSOLE-FEAT-23-102 | Refresh `/docs/accessibility.md` with Console-specific keyboard flows, color tokens, testing tools, and compliance checklist updates. | Accessibility doc updated; audits referenced; checklist appended. | -> 2025-10-28: Added guide covering keyboard matrix, screen reader behaviour, colour/focus tokens, testing workflow, offline guidance, and compliance checklist. -| DOCS-CONSOLE-23-017 | DONE (2025-10-27) | Docs Guild, Console Guild | CONSOLE-FEAT-23-101..109 | Create `/docs/examples/ui-tours.md` providing triage, audit, policy rollout walkthroughs with annotated screenshots and GIFs. | UI tours doc merged; capture instructions + asset placeholders committed; compliance checklist appended. | -| DOCS-CONSOLE-23-018 | DONE (2025-10-27) | Docs Guild, Security Guild | DOCS-CONSOLE-23-012 | Execute console security compliance checklist and capture Security Guild sign-off in Sprint 23 log. | Checklist completed; findings addressed or tickets filed; sign-off noted in updates file. | -| DOCS-LNM-22-006 | DONE (2025-10-27) | Docs Guild, Architecture Guild | CONCELIER-LNM-21-001..005, EXCITITOR-LNM-21-001..005 | Refresh `/docs/architecture/conseiller.md` and `/docs/architecture/excitator.md` describing observation/linkset pipelines and event contracts. | Architecture docs updated with observation/linkset flow + event tables; revisit once service implementations land. | -> Follow-up: align diagrams/examples after `CONCELIER-LNM-21` & `EXCITITOR-LNM-21` work merges (currently TODO). -| DOCS-LNM-22-007 | TODO | Docs Guild, Observability Guild | CONCELIER-LNM-21-005, EXCITITOR-LNM-21-005, DEVOPS-LNM-22-002 | Publish `/docs/observability/aggregation.md` with metrics/traces/logs/SLOs. | Observability doc merged; dashboards referenced; checklist appended. | -| DOCS-LNM-22-008 | TODO | Docs Guild, DevOps Guild | MERGE-LNM-21-001, CONCELIER-LNM-21-102 | Write `/docs/migration/no-merge.md` describing migration plan, backfill steps, rollback, feature flags. | Migration doc approved by stakeholders; checklist appended. | - -## Policy Engine + Editor v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-POLICY-23-001 | TODO | Docs Guild, Policy Guild | POLICY-SPL-23-001..003 | Author `/docs/policy/overview.md` describing SPL philosophy, layering, and glossary with reviewer checklist. | Doc merged; lint passes; checklist appended. | -| DOCS-POLICY-23-002 | TODO | Docs Guild, Policy Guild | POLICY-SPL-23-001 | Write `/docs/policy/spl-v1.md` (language reference, JSON Schema, examples). | Reference published with schema snippets; checklist completed. | -| DOCS-POLICY-23-003 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-50-001..004 | Produce `/docs/policy/runtime.md` covering compiler, evaluator, caching, events, SLOs. | Runtime doc merged with diagrams; observability references included. | -| DOCS-POLICY-23-004 | TODO | Docs Guild, UI Guild | UI-POLICY-23-001..006 | Document `/docs/policy/editor.md` (UI walkthrough, validation, simulation, approvals). | Editor doc merged with screenshots; accessibility checklist satisfied. | -| DOCS-POLICY-23-005 | TODO | Docs Guild, Security Guild | AUTH-POLICY-23-001..002 | Publish `/docs/policy/governance.md` (roles, scopes, approvals, signing, exceptions). | Governance doc merged; checklist appended. | -| DOCS-POLICY-23-006 | TODO | Docs Guild, BE-Base Platform Guild | WEB-POLICY-23-001..004 | Update `/docs/api/policy.md` with new endpoints, schemas, errors, pagination. | API doc aligns with OpenAPI; examples validated; checklist included. | -| DOCS-POLICY-23-007 | TODO | Docs Guild, DevEx/CLI Guild | CLI-POLICY-23-004..006 | Update `/docs/cli/policy.md` for lint/simulate/activate/history commands, exit codes. | CLI doc updated; samples verified; checklist appended. | -| DOCS-POLICY-23-008 | TODO | Docs Guild, Architecture Guild | POLICY-ENGINE-50-005..006 | Refresh `/docs/architecture/policy-engine.md` with data model, sequence diagrams, event flows. | Architecture doc merged with diagrams; checklist appended. | -| DOCS-POLICY-23-009 | TODO | Docs Guild, DevOps Guild | MERGE-LNM-21-001, DEVOPS-LNM-22-001 | Create `/docs/migration/policy-parity.md` covering dual-run parity plan and rollback. | Migration doc approved; checklist appended. | -| DOCS-POLICY-23-010 | TODO | Docs Guild, UI Guild | UI-POLICY-23-006 | Write `/docs/ui/explainers.md` showing explain trees, evidence overlays, interpretation guidance. | Doc merged with annotated screenshots; checklist appended. | - -## Graph & Vuln Explorer v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-GRAPH-24-001 | TODO | Docs Guild, UI Guild | UI-GRAPH-24-001..006 | Author `/docs/ui/sbom-graph-explorer.md` detailing overlays, filters, saved views, accessibility, and AOC visibility. | Doc merged; screenshots included; checklist appended. | -| DOCS-GRAPH-24-002 | TODO | Docs Guild, UI Guild | UI-GRAPH-24-005 | Publish `/docs/ui/vulnerability-explorer.md` covering table usage, grouping, fix suggestions, Why drawer. | Doc merged with annotated images; accessibility checklist satisfied. | -| DOCS-GRAPH-24-003 | TODO | Docs Guild, SBOM Service Guild | SBOM-GRAPH-24-001..003 | Create `/docs/architecture/graph-index.md` describing data model, ingestion pipeline, caches, events. | Architecture doc merged with diagrams; checklist appended. | -| DOCS-GRAPH-24-004 | TODO | Docs Guild, BE-Base Platform Guild | WEB-GRAPH-24-001..003 | Document `/docs/api/graph.md` and `/docs/api/vuln.md` avec endpoints, parameters, errors, RBAC. | API docs aligned with OpenAPI; examples validated; checklist appended. | -| DOCS-GRAPH-24-005 | TODO | Docs Guild, DevEx/CLI Guild | CLI-GRAPH-24-001..003 | Update `/docs/cli/graph-and-vuln.md` covering new CLI commands, exit codes, scripting. | CLI doc merged; examples tested; checklist appended. | -| DOCS-GRAPH-24-006 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-60-001..002 | Write `/docs/policy/ui-integration.md` explaining overlays, cache usage, simulator contracts. | Doc merged; references cross-linked; checklist appended. | -| DOCS-GRAPH-24-007 | TODO | Docs Guild, DevOps Guild | DEVOPS-GRAPH-24-001..003 | Produce `/docs/migration/graph-parity.md` with rollout plan, parity checks, fallback guidance. | Migration doc approved; checklist appended. | - -## Exceptions v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-EXC-25-001 | TODO | Docs Guild, Governance Guild | WEB-EXC-25-001 | Author `/docs/governance/exceptions.md` covering lifecycle, scope patterns, examples, compliance checklist. | Doc merged; reviewers sign off; checklist included. | -| DOCS-EXC-25-002 | TODO | Docs Guild, Authority Core | AUTH-EXC-25-001 | Publish `/docs/governance/approvals-and-routing.md` detailing roles, routing matrix, MFA rules, audit trails. | Doc merged; routing examples validated; checklist appended. | -| DOCS-EXC-25-003 | TODO | Docs Guild, BE-Base Platform Guild | WEB-EXC-25-001..003 | Create `/docs/api/exceptions.md` with endpoints, payloads, errors, idempotency notes. | API doc aligned with OpenAPI; examples tested; checklist appended. | -| DOCS-EXC-25-004 | DONE (2025-10-27) | Docs Guild, Policy Guild | POLICY-ENGINE-70-001 | Document `/docs/policy/exception-effects.md` explaining evaluation order, conflicts, simulation. | Doc merged; tests cross-referenced; checklist appended. | -| DOCS-EXC-25-005 | TODO | Docs Guild, UI Guild | UI-EXC-25-001..004 | Write `/docs/ui/exception-center.md` with UI walkthrough, badges, accessibility, shortcuts. | Doc merged with screenshots; accessibility checklist completed. | -| DOCS-EXC-25-006 | TODO | Docs Guild, DevEx/CLI Guild | CLI-EXC-25-001..002 | Update `/docs/cli/exceptions.md` covering command usage and exit codes. | CLI doc updated; examples validated; checklist appended. | -| DOCS-EXC-25-007 | TODO | Docs Guild, DevOps Guild | SCHED-WORKER-25-101, DEVOPS-GRAPH-24-003 | Publish `/docs/migration/exception-governance.md` describing cutover from legacy suppressions, notifications, rollback. | Migration doc approved; checklist included. | - -> Update statuses (TODO/DOING/REVIEW/DONE/BLOCKED) as progress changes. Keep guides in sync with configuration samples under `etc/`. - -> Remark (2025-10-13, DOC4.AUTH-PDG): Rate limit guide published (`docs/security/rate-limits.md`) and handed to plugin docs team for diagram uplift once PLG6.DIAGRAM lands. - -## Orchestrator Dashboard (Epic 9) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-ORCH-32-001 | TODO | Docs Guild | ORCH-SVC-32-001, AUTH-ORCH-32-001 | Author `/docs/orchestrator/overview.md` covering mission, roles, AOC alignment, governance, with imposed rule reminder. | Doc merged with diagrams; imposed rule statement included; entry linked from docs index. | -| DOCS-ORCH-32-002 | TODO | Docs Guild | ORCH-SVC-32-002 | Author `/docs/orchestrator/architecture.md` detailing scheduler, DAGs, rate limits, data model, message bus, storage layout, restating imposed rule. | Architecture doc merged; diagrams reviewed; imposed rule noted. | -| DOCS-ORCH-33-001 | TODO | Docs Guild | ORCH-SVC-33-001..004, WEB-ORCH-33-001 | Publish `/docs/orchestrator/api.md` (REST/WebSocket endpoints, payloads, error codes) with imposed rule note. | API doc merged; examples validated; imposed rule appended. | -| DOCS-ORCH-33-002 | TODO | Docs Guild | CONSOLE-ORCH-32-002, CONSOLE-ORCH-33-001..002 | Publish `/docs/orchestrator/console.md` covering screens, a11y, live updates, control actions, reiterating imposed rule. | Console doc merged with screenshots; accessibility checklist done; imposed rule statement present. | -| DOCS-ORCH-33-003 | TODO | Docs Guild | CLI-ORCH-33-001 | Publish `/docs/orchestrator/cli.md` documenting commands, options, exit codes, streaming output, offline usage, and imposed rule. | CLI doc merged; examples tested; imposed rule appended. | -| DOCS-ORCH-34-001 | TODO | Docs Guild | ORCH-SVC-34-002, LEDGER-34-101 | Author `/docs/orchestrator/run-ledger.md` covering ledger schema, provenance chain, audit workflows, with imposed rule reminder. | Run-ledger doc merged; payload samples validated; imposed rule included; cross-links added. | -| DOCS-ORCH-34-002 | TODO | Docs Guild | AUTH-ORCH-32-001, AUTH-ORCH-34-001 | Update `/docs/security/secrets-handling.md` for orchestrator KMS refs, redaction badges, operator hygiene, reiterating imposed rule. | Security doc merged; checklists updated; imposed rule restated; references from Console/CLI docs added. | -| DOCS-ORCH-34-003 | TODO | Docs Guild | ORCH-SVC-33-003, ORCH-SVC-34-001, DEVOPS-ORCH-34-001 | Publish `/docs/operations/orchestrator-runbook.md` (incident playbook, backfill guide, circuit breakers, throttling) with imposed rule statement. | Runbook merged; steps validated with DevOps; imposed rule included; runbook linked from ops index. | -| DOCS-ORCH-34-004 | TODO | Docs Guild | ORCH-SVC-32-005, WORKER-GO-33-001, WORKER-PY-33-001 | Document `/docs/schemas/artifacts.md` describing artifact kinds, schema versions, hashing, storage layout, restating imposed rule. | Schema doc merged; JSON schema provided; imposed rule included; sample payload validated. | -| DOCS-ORCH-34-005 | TODO | Docs Guild | ORCH-SVC-34-001, DEVOPS-ORCH-34-001 | Author `/docs/slo/orchestrator-slo.md` defining SLOs, burn alerts, measurement, and reiterating imposed rule. | SLO doc merged; dashboard screenshots embedded; imposed rule appended; alerts documented. | - -## Export Center (Epic 10) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-EXPORT-35-001 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-35-001..006 | Author `/docs/export-center/overview.md` covering purpose, profiles, security, AOC alignment, surfaces, ending with imposed rule statement. | Doc merged with diagrams/examples; imposed rule line present; index updated. | -| DOCS-EXPORT-35-002 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-35-002..005 | Publish `/docs/export-center/architecture.md` describing planner, adapters, manifests, signing, distribution flows, restating imposed rule. | Architecture doc merged; sequence diagrams included; rule statement appended. | -| DOCS-EXPORT-35-003 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-35-003..004 | Publish `/docs/export-center/profiles.md` detailing schema fields, examples, compatibility, and imposed rule reminder. | Profiles doc merged; JSON schemas linked; imposed rule noted. | -| DOCS-EXPORT-36-004 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-36-001..004, WEB-EXPORT-36-001 | Publish `/docs/export-center/api.md` covering endpoints, payloads, errors, and mention imposed rule. | API doc merged; examples validated; rule included. | -| DOCS-EXPORT-36-005 | DONE (2025-10-29) | Docs Guild | CLI-EXPORT-35-001, CLI-EXPORT-36-001 | Publish `/docs/export-center/cli.md` with command reference, CI scripts, verification steps, restating imposed rule. | CLI doc merged; script snippets tested; rule appended. | -| DOCS-EXPORT-36-006 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-36-001, DEVOPS-EXPORT-36-001 | Publish `/docs/export-center/trivy-adapter.md` covering field mappings, compatibility matrix, and imposed rule reminder. | Doc merged; mapping tables validated; rule included. | -| DOCS-EXPORT-37-001 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-37-001, DEVOPS-EXPORT-37-001 | Publish `/docs/export-center/mirror-bundles.md` describing filesystem/OCI layouts, delta/encryption, import guide, ending with imposed rule. | Doc merged; diagrams provided; verification steps tested; rule stated. | -| DOCS-EXPORT-37-002 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-35-005, EXPORT-SVC-37-002 | Publish `/docs/export-center/provenance-and-signing.md` detailing manifests, attestation flow, verification, reiterating imposed rule. | Doc merged; signature examples validated; rule appended. | -| DOCS-EXPORT-37-003 | DONE (2025-10-29) | Docs Guild | DEVOPS-EXPORT-37-001 | Publish `/docs/operations/export-runbook.md` covering failures, tuning, capacity planning, with imposed rule reminder. | Runbook merged; procedures validated; rule included. | -| DOCS-EXPORT-37-004 | TODO | Docs Guild | AUTH-EXPORT-37-001, EXPORT-SVC-37-002 | Publish `/docs/security/export-hardening.md` outlining RBAC, tenancy, encryption, redaction, restating imposed rule. | Security doc merged; checklist updated; rule appended. | -| DOCS-EXPORT-37-101 | TODO | Docs Guild, DevEx/CLI Guild | CLI-EXPORT-37-001 | Refresh CLI verification sections once `stella export verify` lands (flags, exit codes, samples). | `docs/export-center/cli.md` & `docs/export-center/provenance-and-signing.md` updated with final command syntax; examples tested; rule reminder retained. | -| DOCS-EXPORT-37-102 | TODO | Docs Guild, DevOps Guild | DEVOPS-EXPORT-37-001 | Embed export dashboards/alerts references into provenance/runbook docs after Grafana work ships. | Docs updated with dashboard IDs/alert notes; update logged; rule reminder present. | -| DOCS-EXPORT-37-005 | TODO | Docs Guild, Exporter Service Guild | EXPORT-SVC-35-006, DEVOPS-EXPORT-36-001 | Validate Export Center docs against live Trivy/mirror bundles once implementation lands; refresh examples and CLI snippets accordingly. | Real bundle examples recorded; docs updated; verification steps confirmed with production artefacts. | -> Note (2025-10-29): Blocked until exporter API (`EXPORT-SVC-35-006`) and Trivy/mirror adapters (`EXPORT-SVC-36-001`, `EXPORT-SVC-37-001`) ship. Requires access to CI smoke outputs (`DEVOPS-EXPORT-36-001`) for verification artifacts. - -## Reachability v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-SIG-26-001 | TODO | Docs Guild, Signals Guild | SIGNALS-24-004 | Write `/docs/signals/reachability.md` covering states, scores, provenance, retention. | Doc merged with diagrams/examples; checklist appended. | -| DOCS-SIG-26-002 | TODO | Docs Guild, Signals Guild | SIGNALS-24-002 | Publish `/docs/signals/callgraph-formats.md` with schemas and validation errors. | Doc merged; examples tested; checklist included. | -| DOCS-SIG-26-003 | TODO | Docs Guild, Runtime Guild | SIGNALS-24-003 | Create `/docs/signals/runtime-facts.md` detailing agent capabilities, privacy safeguards, opt-in flags. | Doc merged; privacy review done; checklist appended. | -| DOCS-SIG-26-004 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-80-001 | Document `/docs/policy/signals-weighting.md` for SPL predicates and weighting strategies. | Doc merged; sample policies validated; checklist appended. | -| DOCS-SIG-26-005 | TODO | Docs Guild, UI Guild | UI-SIG-26-001..003 | Draft `/docs/ui/reachability-overlays.md` with badges, timelines, shortcuts. | Doc merged with screenshots; accessibility checklist completed. | -| DOCS-SIG-26-006 | TODO | Docs Guild, DevEx/CLI Guild | CLI-SIG-26-001..002 | Update `/docs/cli/reachability.md` for new commands and automation recipes. | Doc merged; examples verified; checklist appended. | -| DOCS-SIG-26-007 | TODO | Docs Guild, BE-Base Platform Guild | WEB-SIG-26-001..003 | Publish `/docs/api/signals.md` covering endpoints, payloads, ETags, errors. | API doc aligned with OpenAPI; examples tested; checklist appended. | -| DOCS-SIG-26-008 | TODO | Docs Guild, DevOps Guild | DEVOPS-SIG-26-001..002 | Write `/docs/migration/enable-reachability.md` guiding rollout, fallbacks, monitoring. | Migration doc approved; checklist appended. | - -## Policy Studio (Sprint 27) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-POLICY-27-001 | BLOCKED (2025-10-27) | Docs Guild, Policy Guild | REGISTRY-API-27-001, POLICY-ENGINE-27-001 | Publish `/docs/policy/studio-overview.md` covering lifecycle, roles, glossary, and compliance checklist. | Doc merged with diagrams + lifecycle table; checklist appended; stakeholders sign off. | -> Blocked by `REGISTRY-API-27-001` and `POLICY-ENGINE-27-001`; need spec + compile data. -> Blocker: Registry OpenAPI (`REGISTRY-API-27-001`) and policy compile enrichments (`POLICY-ENGINE-27-001`) are still TODO; need final interfaces before drafting overview. -| DOCS-POLICY-27-002 | BLOCKED (2025-10-27) | Docs Guild, Console Guild | CONSOLE-STUDIO-27-001 | Write `/docs/policy/authoring.md` detailing workspace templates, snippets, lint rules, IDE shortcuts, and best practices. | Authoring doc includes annotated screenshots, snippet catalog, compliance checklist. | -> Blocked by `CONSOLE-STUDIO-27-001` Studio authoring UI pending. -> Blocker: Console Studio authoring UI (`CONSOLE-STUDIO-27-001`) not implemented; awaiting UX to capture flows/snippets. -| DOCS-POLICY-27-003 | BLOCKED (2025-10-27) | Docs Guild, Policy Registry Guild | REGISTRY-API-27-007 | Document `/docs/policy/versioning-and-publishing.md` (semver rules, attestations, rollback) with compliance checklist. | Doc merged with flow diagrams; attestation steps documented; checklist appended. | -> Blocked by `REGISTRY-API-27-007` publish/sign pipeline outstanding. -> Blocker: Registry publish/sign workflow (`REGISTRY-API-27-007`) pending. -| DOCS-POLICY-27-004 | BLOCKED (2025-10-27) | Docs Guild, Scheduler Guild | REGISTRY-API-27-005, SCHED-WORKER-27-301 | Write `/docs/policy/simulation.md` covering quick vs batch sim, thresholds, evidence bundles, CLI examples. | Simulation doc includes charts, sample manifests, checklist appended. | -> Blocked by `REGISTRY-API-27-005`/`SCHED-WORKER-27-301` batch simulation not ready. -> Blocker: Batch simulation APIs/workers (`REGISTRY-API-27-005`, `SCHED-WORKER-27-301`) still TODO. -| DOCS-POLICY-27-005 | BLOCKED (2025-10-27) | Docs Guild, Product Ops | REGISTRY-API-27-006 | Publish `/docs/policy/review-and-approval.md` with approver requirements, comments, webhooks, audit trail guidance. | Doc merged with role matrix + webhook schema; checklist appended. | -> Blocked by `REGISTRY-API-27-006` review workflow not implemented. -> Blocker: Review workflow (`REGISTRY-API-27-006`) not landed. -| DOCS-POLICY-27-006 | BLOCKED (2025-10-27) | Docs Guild, Policy Guild | REGISTRY-API-27-008 | Author `/docs/policy/promotion.md` covering environments, canary, rollback, and monitoring steps. | Promotion doc includes examples + checklist; verified by Policy Ops. | -> Blocked by `REGISTRY-API-27-008` promotion APIs pending. -> Blocker: Promotion/canary APIs (`REGISTRY-API-27-008`) outstanding. -| DOCS-POLICY-27-007 | BLOCKED (2025-10-27) | Docs Guild, DevEx/CLI Guild | CLI-POLICY-27-001..004 | Update `/docs/policy/cli.md` with new commands, JSON schemas, CI usage, and compliance checklist. | CLI doc merged with transcripts; schema references validated; checklist appended. | -> Blocked by `CLI-POLICY-27-001..004` CLI commands missing. -> Blocker: Policy CLI commands (`CLI-POLICY-27-001..004`) yet to implement. -| DOCS-POLICY-27-008 | BLOCKED (2025-10-27) | Docs Guild, Policy Registry Guild | REGISTRY-API-27-001..008 | Publish `/docs/policy/api.md` describing Registry endpoints, request/response schemas, errors, and feature flags. | API doc aligned with OpenAPI; examples validated; checklist appended. | -> Blocked by `REGISTRY-API-27-001..008` OpenAPI + endpoints incomplete. -> Blocker: Registry OpenAPI/spec suite (`REGISTRY-API-27-001..008`) incomplete. -| DOCS-POLICY-27-009 | BLOCKED (2025-10-27) | Docs Guild, Security Guild | AUTH-POLICY-27-002 | Create `/docs/security/policy-attestations.md` covering signing, verification, key rotation, and compliance checklist. | Security doc approved by Security Guild; verifier steps documented; checklist appended. | -> Blocked by `AUTH-POLICY-27-002` signing enforcement pending. -> Blocker: Authority signing enforcement (`AUTH-POLICY-27-002`) pending. -| DOCS-POLICY-27-010 | BLOCKED (2025-10-27) | Docs Guild, Architecture Guild | REGISTRY-API-27-001, SCHED-WORKER-27-301 | Author `/docs/architecture/policy-registry.md` (service design, schemas, queues, failure modes) with diagrams and checklist. | Architecture doc merged; diagrams committed; checklist appended. | -> Blocked by `REGISTRY-API-27-001` & `SCHED-WORKER-27-301` need delivery. -> Blocker: Policy Registry schema/workers not delivered (see `REGISTRY-API-27-001`, `SCHED-WORKER-27-301`). -| DOCS-POLICY-27-011 | BLOCKED (2025-10-27) | Docs Guild, Observability Guild | DEVOPS-POLICY-27-004 | Publish `/docs/observability/policy-telemetry.md` with metrics/log tables, dashboards, alerts, and compliance checklist. | Observability doc merged; dashboards linked; checklist appended. | -> Blocked by `DEVOPS-POLICY-27-004` observability dashboards outstanding. -> Blocker: Observability dashboards (`DEVOPS-POLICY-27-004`) not built. -| DOCS-POLICY-27-012 | BLOCKED (2025-10-27) | Docs Guild, Ops Guild | DEPLOY-POLICY-27-002 | Write `/docs/runbooks/policy-incident.md` detailing rollback, freeze, forensic steps, notifications. | Runbook merged; rehearsal recorded; checklist appended. | -> Blocked by `DEPLOY-POLICY-27-002` incident runbook inputs pending. -> Blocker: Ops runbook inputs (`DEPLOY-POLICY-27-002`) pending. -| DOCS-POLICY-27-013 | BLOCKED (2025-10-27) | Docs Guild, Policy Guild | CONSOLE-STUDIO-27-001, REGISTRY-API-27-002 | Update `/docs/examples/policy-templates.md` with new templates, snippets, and sample policies. | Examples committed with commentary; lint passes; checklist appended. | -> Blocked by `CONSOLE-STUDIO-27-001`/`REGISTRY-API-27-002` templates missing. -> Blocker: Studio templates and registry storage (`CONSOLE-STUDIO-27-001`, `REGISTRY-API-27-002`) not available. -| DOCS-POLICY-27-014 | BLOCKED (2025-10-27) | Docs Guild, Policy Registry Guild | REGISTRY-API-27-003, WEB-POLICY-27-001 | Refresh `/docs/aoc/aoc-guardrails.md` to include Studio-specific guardrails and validation scenarios. | Doc updated with Studio guardrails; compliance checklist appended. | -> Blocked by `REGISTRY-API-27-003` & `WEB-POLICY-27-001` guardrails not implemented. -> Blocker: Registry compile pipeline/web proxy (`REGISTRY-API-27-003`, `WEB-POLICY-27-001`) outstanding. - -## Vulnerability Explorer (Sprint 29) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-VULN-29-001 | TODO | Docs Guild, Vuln Explorer Guild | VULN-API-29-001 | Publish `/docs/vuln/explorer-overview.md` covering domain model, identities, AOC guarantees, workflow summary. | Doc merged with diagrams/table; compliance checklist appended. | -| DOCS-VULN-29-002 | TODO | Docs Guild, Console Guild | CONSOLE-VULN-29-001..006 | Write `/docs/vuln/explorer-using-console.md` with workflows, screenshots, keyboard shortcuts, saved views, deep links. | Doc merged; images stored; WCAG notes included; checklist appended. | -| DOCS-VULN-29-003 | TODO | Docs Guild, Vuln Explorer API Guild | VULN-API-29-001..009 | Author `/docs/vuln/explorer-api.md` (endpoints, query schema, grouping, errors, rate limits). | Doc aligned with OpenAPI; examples validated; checklist appended. | -| DOCS-VULN-29-004 | TODO | Docs Guild, DevEx/CLI Guild | CLI-VULN-29-001..005 | Publish `/docs/vuln/explorer-cli.md` with command reference, samples, exit codes, CI snippets. | CLI doc merged; transcripts/JSON outputs validated; checklist appended. | -| DOCS-VULN-29-005 | TODO | Docs Guild, Findings Ledger Guild | LEDGER-29-001..009 | Write `/docs/vuln/findings-ledger.md` detailing event schema, hashing, Merkle roots, replay tooling. | Doc merged; compliance checklist appended; audit team sign-off. | -| DOCS-VULN-29-006 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-29-001..003 | Update `/docs/policy/vuln-determinations.md` for new rationale, signals, simulation semantics. | Doc updated; examples validated; checklist appended. | -| DOCS-VULN-29-007 | TODO | Docs Guild, Excititor Guild | EXCITITOR-VULN-29-001..004 | Publish `/docs/vex/explorer-integration.md` covering CSAF mapping, suppression precedence, status semantics. | Doc merged; compliance checklist appended. | -| DOCS-VULN-29-008 | TODO | Docs Guild, Concelier Guild | CONCELIER-VULN-29-001..004 | Publish `/docs/advisories/explorer-integration.md` covering key normalization, withdrawn handling, provenance. | Doc merged; checklist appended. | -| DOCS-VULN-29-009 | TODO | Docs Guild, SBOM Service Guild | SBOM-VULN-29-001..002 | Author `/docs/sbom/vuln-resolution.md` detailing version semantics, scope, paths, safe version hints. | Doc merged; ecosystem tables validated; checklist appended. | -| DOCS-VULN-29-010 | TODO | Docs Guild, Observability Guild | VULN-API-29-009, DEVOPS-VULN-29-002 | Publish `/docs/observability/vuln-telemetry.md` (metrics, logs, tracing, dashboards, SLOs). | Doc merged; dashboards linked; checklist appended. | -| DOCS-VULN-29-011 | TODO | Docs Guild, Security Guild | AUTH-VULN-29-001..003 | Create `/docs/security/vuln-rbac.md` for roles, ABAC policies, attachment encryption, CSRF. | Security doc approved; checklist appended. | -| DOCS-VULN-29-012 | TODO | Docs Guild, Ops Guild | DEVOPS-VULN-29-002, SCHED-WORKER-29-003 | Write `/docs/runbooks/vuln-ops.md` (projector lag, resolver storms, export failures, policy activation). | Runbook merged; rehearsal recorded; checklist appended. | -| DOCS-VULN-29-013 | TODO | Docs Guild, Deployment Guild | DEPLOY-VULN-29-001..002 | Update `/docs/install/containers.md` with Findings Ledger & Vuln Explorer API images, manifests, resource sizing, health checks. | Install doc updated; validation commands included; checklist appended. | - -## VEX Lens (Sprint 30) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-VEX-30-001 | TODO | Docs Guild, VEX Lens Guild | VEXLENS-30-005 | Publish `/docs/vex/consensus-overview.md` describing purpose, scope, AOC guarantees. | Doc merged with diagrams/terminology tables; compliance checklist appended. | -| DOCS-VEX-30-002 | TODO | Docs Guild, VEX Lens Guild | VEXLENS-30-005 | Author `/docs/vex/consensus-algorithm.md` covering normalization, weighting, thresholds, examples. | Doc merged; math reviewed by Policy; checklist appended. | -| DOCS-VEX-30-003 | TODO | Docs Guild, Issuer Directory Guild | ISSUER-30-001..003 | Document `/docs/vex/issuer-directory.md` (issuer management, keys, trust overrides, audit). | Doc merged; security review done; checklist appended. | -| DOCS-VEX-30-004 | TODO | Docs Guild, VEX Lens Guild | VEXLENS-30-007 | Publish `/docs/vex/consensus-api.md` with endpoint specs, query params, rate limits. | API doc aligned with OpenAPI; examples validated; checklist appended. | -| DOCS-VEX-30-005 | TODO | Docs Guild, Console Guild | CONSOLE-VEX-30-001 | Write `/docs/vex/consensus-console.md` covering UI workflows, filters, conflicts, accessibility. | Doc merged; screenshots added; checklist appended. | -| DOCS-VEX-30-006 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-29-001, VEXLENS-30-004 | Add `/docs/policy/vex-trust-model.md` detailing policy knobs, thresholds, simulation. | Doc merged; policy review completed; checklist appended. | -| DOCS-VEX-30-007 | TODO | Docs Guild, SBOM Service Guild | VEXLENS-30-002 | Publish `/docs/sbom/vex-mapping.md` (CPE→purl strategy, edge cases, overrides). | Doc merged; mapping tables validated; checklist appended. | -| DOCS-VEX-30-008 | TODO | Docs Guild, Security Guild | ISSUER-30-002, VEXLENS-30-003 | Deliver `/docs/security/vex-signatures.md` (verification flow, key rotation, audit). | Doc approved by Security; checklist appended. | -| DOCS-VEX-30-009 | TODO | Docs Guild, DevOps Guild | VEXLENS-30-009, DEVOPS-VEX-30-001 | Create `/docs/runbooks/vex-ops.md` for recompute storms, mapping failures, signature errors. | Runbook merged; rehearsal logged; checklist appended. | - -## Advisory AI (Sprint 31) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-AIAI-31-001 | TODO | Docs Guild, Advisory AI Guild | AIAI-31-006 | Publish `/docs/advisory-ai/overview.md` covering capabilities, guardrails, RBAC. | Doc merged with diagrams; compliance checklist appended. | -| DOCS-AIAI-31-002 | TODO | Docs Guild, Advisory AI Guild | AIAI-31-004 | Author `/docs/advisory-ai/architecture.md` detailing RAG pipeline, deterministics, caching, model options. | Doc merged; architecture review done; checklist appended. | -| DOCS-AIAI-31-003 | TODO | Docs Guild, Advisory AI Guild | AIAI-31-006 | Write `/docs/advisory-ai/api.md` describing endpoints, schemas, errors, rate limits. | API doc aligned with OpenAPI; examples validated; checklist appended. | -| DOCS-AIAI-31-004 | TODO | Docs Guild, Console Guild | CONSOLE-VULN-29-001, CONSOLE-VEX-30-001 | Create `/docs/advisory-ai/console.md` with screenshots, a11y notes, copy-as-ticket instructions. | Doc merged; images stored; checklist appended. | -| DOCS-AIAI-31-005 | TODO | Docs Guild, DevEx/CLI Guild | CLI-VULN-29-001, CLI-VEX-30-001 | Publish `/docs/advisory-ai/cli.md` covering commands, exit codes, scripting patterns. | Doc merged; examples tested; checklist appended. | -| DOCS-AIAI-31-006 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-31-001 | Update `/docs/policy/assistant-parameters.md` covering temperature, token limits, ranking weights, TTLs. | Doc merged; policy review done; checklist appended. | -| DOCS-AIAI-31-007 | TODO | Docs Guild, Security Guild | AIAI-31-005 | Write `/docs/security/assistant-guardrails.md` detailing redaction, injection defense, logging. | Doc approved by Security; checklist appended. | -| DOCS-AIAI-31-008 | TODO | Docs Guild, SBOM Service Guild | SBOM-AIAI-31-001 | Publish `/docs/sbom/remediation-heuristics.md` (feasibility scoring, blast radius). | Doc merged; heuristics reviewed; checklist appended. | -| DOCS-AIAI-31-009 | TODO | Docs Guild, DevOps Guild | DEVOPS-AIAI-31-001 | Create `/docs/runbooks/assistant-ops.md` for warmup, cache priming, model outages, scaling. | Runbook merged; rehearsal logged; checklist appended. | - -## Notifications Studio - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-NOTIFY-38-001 | DONE (2025-10-29) | Docs Guild, Notifications Service Guild | NOTIFY-SVC-38-001..004 | Publish `/docs/notifications/overview.md` and `/docs/notifications/architecture.md`, each ending with imposed rule reminder. | Docs merged; diagrams verified; imposed rule appended. | -| DOCS-NOTIFY-39-002 | DONE (2025-10-29) | Docs Guild, Notifications Service Guild | NOTIFY-SVC-39-001..004 | Publish `/docs/notifications/rules.md`, `/docs/notifications/templates.md`, `/docs/notifications/digests.md` with examples and imposed rule line. | Docs merged; examples validated; imposed rule appended. | -| DOCS-NOTIFY-40-001 | TODO | Docs Guild, Security Guild | AUTH-NOTIFY-38-001, NOTIFY-SVC-40-001..004 | Publish `/docs/notifications/channels.md`, `/docs/notifications/escalations.md`, `/docs/notifications/api.md`, `/docs/operations/notifier-runbook.md`, `/docs/security/notifications-hardening.md`; each ends with imposed rule line. | Docs merged; accessibility checks passed; imposed rule appended. | - -## CLI Parity & Task Packs - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-CLI-41-001 | TODO | Docs Guild, DevEx/CLI Guild | CLI-CORE-41-001 | Publish `/docs/cli/overview.md`, `/docs/cli/configuration.md`, `/docs/cli/output-and-exit-codes.md` with imposed rule statements. | Docs merged; examples verified; imposed rule appended. | -| DOCS-CLI-42-001 | TODO | Docs Guild | DOCS-CLI-41-001, CLI-PARITY-41-001 | Publish `/docs/cli/parity-matrix.md` and command guides under `/docs/cli/commands/*.md` (policy, sbom, vuln, vex, advisory, export, orchestrator, notify, aoc, auth). | Guides merged; parity automation documented; imposed rule appended. | -| DOCS-PACKS-43-001 | DONE (2025-10-27) | Docs Guild, Task Runner Guild | PACKS-REG-42-001, TASKRUN-42-001 | Publish `/docs/task-packs/spec.md`, `/docs/task-packs/authoring-guide.md`, `/docs/task-packs/registry.md`, `/docs/task-packs/runbook.md`, `/docs/security/pack-signing-and-rbac.md`, `/docs/operations/cli-release-and-packaging.md` with imposed rule statements. | Docs merged; tutorials validated; imposed rule appended; cross-links added. | - -## Containerized Distribution (Epic 13) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-INSTALL-44-001 | TODO | Docs Guild, Deployment Guild | COMPOSE-44-001 | Publish `/docs/install/overview.md` and `/docs/install/compose-quickstart.md` with imposed rule line and copy-ready commands. | Docs merged; screenshots/commands verified; imposed rule appended. | -| DOCS-INSTALL-45-001 | TODO | Docs Guild, Deployment Guild | HELM-45-001 | Publish `/docs/install/helm-prod.md` and `/docs/install/configuration-reference.md` with values tables and imposed rule reminder. | Docs merged; configuration matrix verified; imposed rule appended. | -| DOCS-INSTALL-46-001 | TODO | Docs Guild, Security Guild | DEPLOY-PACKS-43-001, CLI-PACKS-43-001 | Publish `/docs/install/airgap.md`, `/docs/security/supply-chain.md`, `/docs/operations/health-and-readiness.md`, `/docs/release/image-catalog.md`, `/docs/console/onboarding.md` (each with imposed rule). | Docs merged; checksum/signature sections validated; imposed rule appended. | - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCS-TEN-47-001 | TODO | Docs Guild, Authority Core | AUTH-TEN-47-001 | Publish `/docs/security/tenancy-overview.md` and `/docs/security/scopes-and-roles.md` outlining scope grammar, tenant model, imposed rule reminder. | Docs merged; diagrams included; imposed rule appended. | -| DOCS-TEN-48-001 | TODO | Docs Guild, Platform Ops | WEB-TEN-48-001 | Publish `/docs/operations/multi-tenancy.md`, `/docs/operations/rls-and-data-isolation.md`, `/docs/console/admin-tenants.md`. | Docs merged; examples validated; imposed rule appended. | -| DOCS-TEN-49-001 | TODO | Docs & DevEx Guilds | CLI-TEN-47-001, AUTH-TEN-49-001 | Publish `/docs/cli/authentication.md`, `/docs/api/authentication.md`, `/docs/policy/examples/abac-overlays.md`, update `/docs/install/configuration-reference.md` with new env vars, all ending with imposed rule line. | Docs merged; command examples verified; imposed rule appended. | +# Docs Guild Task Board (UTC 2025-10-10) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOC7.README-INDEX | DONE (2025-10-17) | Docs Guild | — | Refresh index docs (docs/README.md + root README) after architecture dossier split and Offline Kit overhaul. | ✅ ToC reflects new component architecture docs; ✅ root README highlights updated doc set; ✅ Offline Kit guide linked correctly. | +| DOC4.AUTH-PDG | DONE (2025-10-19) | Docs Guild, Plugin Team | PLG6.DOC | Copy-edit `docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md`, export lifecycle diagram, add LDAP RFC cross-link. | ✅ PR merged with polish; ✅ Diagram committed; ✅ Slack handoff posted. | +| DOC1.AUTH | DONE (2025-10-12) | Docs Guild, Authority Core | CORE5B.DOC | Draft `docs/11_AUTHORITY.md` covering architecture, configuration, bootstrap flows. | ✅ Architecture + config sections approved by Core; ✅ Samples reference latest options; ✅ Offline note added. | +| DOC3.Concelier-Authority | DONE (2025-10-12) | Docs Guild, DevEx | FSR4 | Polish operator/runbook sections (DOC3/DOC5) to document Concelier authority rollout, bypass logging, and enforcement checklist. | ✅ DOC3/DOC5 updated with audit runbook references; ✅ enforcement deadline highlighted; ✅ Docs guild sign-off. | +| DOC5.Concelier-Runbook | DONE (2025-10-12) | Docs Guild | DOC3.Concelier-Authority | Produce dedicated Concelier authority audit runbook covering log fields, monitoring recommendations, and troubleshooting steps. | ✅ Runbook published; ✅ linked from DOC3/DOC5; ✅ alerting guidance included. | +| FEEDDOCS-DOCS-05-001 | DONE (2025-10-11) | Docs Guild | FEEDMERGE-ENGINE-04-001, FEEDMERGE-ENGINE-04-002 | Publish Concelier conflict resolution runbook covering precedence workflow, merge-event auditing, and Sprint 3 metrics. | ✅ `docs/ops/concelier-conflict-resolution.md` committed; ✅ metrics/log tables align with latest merge code; ✅ Ops alert guidance handed to Concelier team. | +| FEEDDOCS-DOCS-05-002 | DONE (2025-10-16) | Docs Guild, Concelier Ops | FEEDDOCS-DOCS-05-001 | Ops sign-off captured: conflict runbook circulated, alert thresholds tuned, and rollout decisions documented in change log. | ✅ Ops review recorded; ✅ alert thresholds finalised using `docs/ops/concelier-authority-audit-runbook.md`; ✅ change-log entry linked from runbook once GHSA/NVD/OSV regression fixtures land. | +| DOCS-ADR-09-001 | DONE (2025-10-19) | Docs Guild, DevEx | — | Establish ADR process (`docs/adr/0000-template.md`) and document usage guidelines. | Template published; README snippet linking ADR process; announcement posted (`docs/updates/2025-10-18-docs-guild.md`). | +| DOCS-EVENTS-09-002 | DONE (2025-10-19) | Docs Guild, Platform Events | SCANNER-EVENTS-15-201 | Publish event schema catalog (`docs/events/`) for `scanner.report.ready@1`, `scheduler.rescan.delta@1`, `attestor.logged@1`. | Schemas validated (Ajv CI hooked); docs/events/README summarises usage; Platform Events notified via `docs/updates/2025-10-18-docs-guild.md`. | +| DOCS-EVENTS-09-003 | DONE (2025-10-19) | Docs Guild | DOCS-EVENTS-09-002 | Add human-readable envelope field references and canonical payload samples for published events, including offline validation workflow. | Tables explain common headers/payload segments; versioned sample payloads committed; README links to validation instructions and samples. | +| DOCS-EVENTS-09-004 | DONE (2025-10-19) | Docs Guild, Scanner WebService | SCANNER-EVENTS-15-201 | Refresh scanner event docs to mirror DSSE-backed report fields, document `scanner.scan.completed`, and capture canonical sample validation. | Schemas updated for new payload shape; README references DSSE reuse and validation test; samples align with emitted events. | +| PLATFORM-EVENTS-09-401 | DONE (2025-10-21) | Platform Events Guild | DOCS-EVENTS-09-003 | Embed canonical event samples into contract/integration tests and ensure CI validates payloads against published schemas. | Notify models tests now run schema validation against `docs/events/*.json`, event schemas allow optional `attributes`, and docs capture the new validation workflow. | +| RUNTIME-GUILD-09-402 | DONE (2025-10-19) | Runtime Guild | SCANNER-POLICY-09-107 | Confirm Scanner WebService surfaces `quietedFindingCount` and progress hints to runtime consumers; document readiness checklist. | Runtime verification run captures enriched payload; checklist/doc updates merged; stakeholders acknowledge availability. | +| DOCS-CONCELIER-07-201 | DONE (2025-10-22) | Docs Guild, Concelier WebService | FEEDWEB-DOCS-01-001 | Final editorial review and publish pass for Concelier authority toggle documentation (Quickstart + operator guide). | Review feedback resolved, publish PR merged, release notes updated with documentation pointer. | +| DOCS-RUNTIME-17-004 | DONE (2025-10-26) | Docs Guild, Runtime Guild | SCANNER-EMIT-17-701, ZASTAVA-OBS-17-005, DEVOPS-REL-17-002 | Document build-id workflows: SBOM exposure, runtime event payloads (`process.buildId`), Scanner `/policy/runtime` response (`buildIds` list), debug-store layout, and operator guidance for symbol retrieval. | Architecture + operator docs updated with build-id sections (Observer, Scanner, CLI), examples show `readelf` output + debuginfod usage, references linked from Offline Kit/Release guides + CLI help. | +| DOCS-OBS-50-001 | BLOCKED (2025-10-26) | Docs Guild, Observability Guild | TELEMETRY-OBS-50-001 | Publish `/docs/observability/overview.md` introducing scope, imposed rule banner, architecture diagram, and tenant guarantees. | Doc merged with imposed rule banner; diagram committed; cross-links to telemetry stack + evidence locker docs. | +> Blocked: waiting on telemetry core deliverable (TELEMETRY-OBS-50-001) to finalise architecture details and diagrams. +| DOCS-OBS-50-002 | TODO | Docs Guild, Security Guild | TELEMETRY-OBS-50-002 | Author `/docs/observability/telemetry-standards.md` detailing common fields, scrubbing policy, sampling defaults, and redaction override procedure. | Doc merged; imposed rule banner present; examples validated with telemetry fixtures; security review sign-off captured. | +| DOCS-OBS-50-003 | TODO | Docs Guild, Observability Guild | TELEMETRY-OBS-50-001 | Create `/docs/observability/logging.md` covering structured log schema, dos/don'ts, tenant isolation, and copyable examples. | Doc merged with banner; sample logs redacted; lint passes; linked from coding standards. | +| DOCS-OBS-50-004 | TODO | Docs Guild, Observability Guild | TELEMETRY-OBS-50-002 | Draft `/docs/observability/tracing.md` explaining context propagation, async linking, CLI header usage, and sampling strategies. | Doc merged; imposed rule banner included; diagrams updated; references to CLI/Console features added. | +| DOCS-OBS-51-001 | TODO | Docs Guild, DevOps Guild | WEB-OBS-51-001, DEVOPS-OBS-51-001 | Publish `/docs/observability/metrics-and-slos.md` cataloging metrics, SLO targets, burn rate policies, and alert runbooks. | Doc merged with banner; SLO tables verified; alert workflows linked to incident runbook. | +| DOCS-SEC-OBS-50-001 | TODO | Docs Guild, Security Guild | TELEMETRY-OBS-51-002 | Update `/docs/security/redaction-and-privacy.md` to cover telemetry privacy controls, tenant opt-in debug, and imposed rule reminder. | Doc merged; redaction matrix updated; banner present; security sign-off recorded. | +| DOCS-INSTALL-50-001 | TODO | Docs Guild, DevOps Guild | DEVOPS-OBS-50-003 | Add `/docs/install/telemetry-stack.md` with collector deployment, exporter options, offline kit notes, and imposed rule banner. | Doc merged; install steps verified on air-gapped profile; banner present; screenshots attached. | +| DOCS-FORENSICS-53-001 | TODO | Docs Guild, Evidence Locker Guild | EVID-OBS-53-003 | Publish `/docs/forensics/evidence-locker.md` describing bundle formats, WORM options, retention, legal hold, and imposed rule banner. | Doc merged; manifest examples validated; banner present; legal hold steps aligned with API. | +| DOCS-FORENSICS-53-002 | TODO | Docs Guild, Provenance Guild | PROV-OBS-54-001 | Release `/docs/forensics/provenance-attestation.md` covering DSSE schema, signing process, verification workflow, and imposed rule banner. | Doc merged; sample statements reference fixtures; banner included; verification steps tested. | +| DOCS-FORENSICS-53-003 | TODO | Docs Guild, Timeline Indexer Guild | TIMELINE-OBS-52-003 | Publish `/docs/forensics/timeline.md` with schema, event kinds, filters, query examples, and imposed rule banner. | Doc merged; query examples validated; banner present; linked from Console/CLI docs. | +| DOCS-CONSOLE-OBS-52-001 | TODO | Docs Guild, Console Guild | CONSOLE-OBS-51-001 | Document `/docs/console/observability.md` showcasing Observability Hub widgets, trace/log search, imposed rule banner, and accessibility tips. | Doc merged; screenshots updated; banner present; navigation steps verified. | +| DOCS-CONSOLE-OBS-52-002 | TODO | Docs Guild, Console Guild | CONSOLE-OBS-52-002, CONSOLE-OBS-53-001 | Publish `/docs/console/forensics.md` covering timeline explorer, evidence viewer, attestation verifier, imposed rule banner, and troubleshooting. | Doc merged; banner included; workflows validated via Playwright capture; troubleshooting section populated. | +| DOCS-CLI-OBS-52-001 | TODO | Docs Guild, DevEx/CLI Guild | CLI-OBS-52-001 | Create `/docs/cli/observability.md` detailing `stella obs` commands, examples, exit codes, imposed rule banner, and scripting tips. | Doc merged; examples tested; banner included; CLI parity matrix updated. | +| DOCS-CLI-FORENSICS-53-001 | TODO | Docs Guild, DevEx/CLI Guild | CLI-FORENSICS-54-001 | Publish `/docs/cli/forensics.md` for snapshot/verify/attest commands with sample outputs, imposed rule banner, and offline workflows. | Doc merged; sample bundles verified; banner present; offline notes cross-linked. | +| DOCS-RUNBOOK-55-001 | TODO | Docs Guild, Ops Guild | DEVOPS-OBS-55-001, WEB-OBS-55-001 | Author `/docs/runbooks/incidents.md` describing incident mode activation, escalation steps, retention impact, verification checklist, and imposed rule banner. | Doc merged; runbook rehearsed; banner included; linked from alerts. | +| DOCS-AOC-19-001 | DONE (2025-10-26) | Docs Guild, Concelier Guild | CONCELIER-WEB-AOC-19-001, EXCITITOR-WEB-AOC-19-001 | Author `/docs/ingestion/aggregation-only-contract.md` covering philosophy, invariants, schemas, error codes, migration, observability, and security checklist. | New doc published with compliance checklist; cross-links from existing docs added. | +| DOCS-AOC-19-002 | DONE (2025-10-26) | Docs Guild, Architecture Guild | DOCS-AOC-19-001 | Update `/docs/architecture/overview.md` to include AOC boundary, raw stores, and sequence diagram (fetch → guard → raw insert → policy evaluation). | Overview doc updated with diagrams/text; lint passes; stakeholders sign off. | +| DOCS-AOC-19-003 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-AOC-19-003 | Refresh `/docs/architecture/policy-engine.md` clarifying ingestion boundary, raw inputs, and policy-only derived data. | Doc highlights raw-only ingestion contract, updated diagrams merge, compliance checklist added. | +| DOCS-AOC-19-004 | DONE (2025-10-26) | Docs Guild, UI Guild | UI-AOC-19-001 | Extend `/docs/ui/console.md` with Sources dashboard tiles, violation drill-down workflow, and verification action. | UI doc updated with screenshots/flow descriptions, compliance checklist appended. | +> DOCS-AOC-19-004: Architecture overview & policy-engine updates landed 2025-10-26; incorporate the new AOC boundary diagrams and metrics references. +| DOCS-AOC-19-005 | DONE (2025-10-26) | Docs Guild, CLI Guild | CLI-AOC-19-003 | Update `/docs/cli/cli-reference.md` with `stella sources ingest --dry-run` and `stella aoc verify` usage, exit codes, and offline notes. | CLI reference + quickstart sections updated; examples validated; compliance checklist added. | +> DOCS-AOC-19-005: New ingestion reference + architecture overview published 2025-10-26; ensure CLI docs link to both and surface AOC exit codes mapping. +| DOCS-AOC-19-006 | DONE (2025-10-26) | Docs Guild, Observability Guild | CONCELIER-WEB-AOC-19-002, EXCITITOR-WEB-AOC-19-002 | Document new metrics/traces/log keys in `/docs/observability/observability.md`. | Observability doc lists new metrics/traces/log fields; dashboards referenced; compliance checklist appended. | +| DOCS-AOC-19-007 | DONE (2025-10-26) | Docs Guild, Authority Core | AUTH-AOC-19-001 | Update `/docs/security/authority-scopes.md` with new ingestion scopes and tenancy enforcement notes. | Doc reflects new scopes, sample policies updated, compliance checklist added. | +| DOCS-AOC-19-008 | DONE (2025-10-26) | Docs Guild, DevOps Guild | DEVOPS-AOC-19-002 | Refresh `/docs/deploy/containers.md` to cover validator enablement, guard env flags, and read-only verify user. | Deploy doc updated; offline kit section mentions validator scripts; compliance checklist appended. | +| DOCS-AOC-19-009 | DONE (2025-10-26) | Docs Guild, Authority Core | AUTH-AOC-19-001 | Update AOC docs/samples to reflect new `advisory:*`, `vex:*`, and `aoc:verify` scopes. | Docs reference new scopes, samples aligned, compliance checklist updated. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-AIRGAP-56-001 | TODO | Docs Guild, AirGap Controller Guild | AIRGAP-CTL-56-002 | Publish `/docs/airgap/overview.md` outlining modes, lifecycle, responsibilities, and imposed rule banner. | Doc merged; banner present; diagrams included. | +| DOCS-AIRGAP-56-002 | TODO | Docs Guild, DevOps Guild | DEVOPS-AIRGAP-56-001 | Author `/docs/airgap/sealing-and-egress.md` covering network policies, EgressPolicy facade usage, and verification steps. | Doc merged; examples validated; banner included. | +| DOCS-AIRGAP-56-003 | TODO | Docs Guild, Exporter Guild | EXPORT-AIRGAP-56-001 | Create `/docs/airgap/mirror-bundles.md` describing bundle format, DSSE/TUF/Merkle validation, creation/import workflows. | Doc merged; sample commands verified; banner present. | +| DOCS-AIRGAP-56-004 | TODO | Docs Guild, Deployment Guild | DEVOPS-AIRGAP-56-003 | Publish `/docs/airgap/bootstrap.md` detailing Bootstrap Pack creation, validation, and install procedures. | Doc merged; checklist appended; screenshots verified. | +| DOCS-AIRGAP-57-001 | TODO | Docs Guild, AirGap Time Guild | AIRGAP-TIME-58-001 | Write `/docs/airgap/staleness-and-time.md` explaining time anchors, drift policies, staleness budgets, and UI indicators. | Doc merged; math checked; banner included. | +| DOCS-AIRGAP-57-002 | TODO | Docs Guild, Console Guild | CONSOLE-AIRGAP-57-001 | Publish `/docs/console/airgap.md` covering sealed badge, import wizard, staleness dashboards. | Doc merged; screenshots captured; banner present. | +| DOCS-AIRGAP-57-003 | TODO | Docs Guild, CLI Guild | CLI-AIRGAP-57-001 | Publish `/docs/cli/airgap.md` documenting commands, examples, exit codes. | Doc merged; examples validated; banner present. | +| DOCS-AIRGAP-57-004 | TODO | Docs Guild, Ops Guild | DEVOPS-AIRGAP-56-002 | Create `/docs/airgap/operations.md` with runbooks for imports, failure recovery, and auditing. | Doc merged; runbooks rehearsed; banner included. | +| DOCS-AIRGAP-58-001 | TODO | Docs Guild, Product Guild | CONSOLE-AIRGAP-58-002 | Provide `/docs/airgap/degradation-matrix.md` enumerating feature availability, fallbacks, remediation. | Doc merged; matrix reviewed; banner included. | +| DOCS-AIRGAP-58-002 | TODO | Docs Guild, Security Guild | PROV-OBS-54-001 | Update `/docs/security/trust-and-signing.md` with DSSE/TUF roots, rotation, and signed time tokens. | Doc merged; security sign-off recorded; banner present. | +| DOCS-AIRGAP-58-003 | TODO | Docs Guild, DevEx Guild | AIRGAP-POL-56-001 | Publish `/docs/dev/airgap-contracts.md` describing EgressPolicy usage, sealed-mode tests, linting. | Doc merged; sample code validated; banner included. | +| DOCS-AIRGAP-58-004 | TODO | Docs Guild, Evidence Locker Guild | EVID-OBS-55-001 | Document `/docs/airgap/portable-evidence.md` for exporting/importing portable evidence bundles across enclaves. | Doc merged; verification steps tested; banner present. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-OAS-61-001 | TODO | Docs Guild, API Contracts Guild | OAS-61-002 | Publish `/docs/api/overview.md` covering auth, tenancy, pagination, idempotency, rate limits with banner. | Doc merged; examples validated; banner present. | +| DOCS-OAS-61-002 | TODO | Docs Guild, API Governance Guild | APIGOV-61-001 | Author `/docs/api/conventions.md` capturing naming, errors, filters, sorting, examples. | Doc merged; lint passes; banner included. | +| DOCS-OAS-61-003 | TODO | Docs Guild, API Governance Guild | APIGOV-63-001 | Publish `/docs/api/versioning.md` describing SemVer, deprecation headers, migration playbooks. | Doc merged; example headers validated; banner present. | +| DOCS-OAS-62-001 | TODO | Docs Guild, Developer Portal Guild | DEVPORT-62-002 | Stand up `/docs/api/reference/` auto-generated site; integrate with portal nav. | Reference site builds; search works; banner included. | +| DOCS-SDK-62-001 | TODO | Docs Guild, SDK Generator Guild | SDKGEN-63-001 | Publish `/docs/sdks/overview.md` plus language guides (`typescript.md`, `python.md`, `go.md`, `java.md`). | Docs merged; code samples pulled from tested examples; banner present. | +| DOCS-DEVPORT-62-001 | TODO | Docs Guild, Developer Portal Guild | DEVPORT-62-001 | Document `/docs/devportal/publishing.md` for build pipeline, offline bundle steps. | Doc merged; cross-links validated; banner included. | +| DOCS-CONTRIB-62-001 | TODO | Docs Guild, API Governance Guild | APIGOV-61-001 | Publish `/docs/contributing/api-contracts.md` detailing how to edit OAS, lint rules, compatibility checks. | Doc merged; banner present; examples validated. | +| DOCS-TEST-62-001 | TODO | Docs Guild, Contract Testing Guild | CONTR-62-001 | Author `/docs/testing/contract-testing.md` covering mock server, replay tests, golden fixtures. | Doc merged; references to tooling validated; banner present. | +| DOCS-SEC-62-001 | TODO | Docs Guild, Authority Core | AUTH-AIRGAP-56-001 | Update `/docs/security/auth-scopes.md` with OAuth2/PAT scopes, tenancy header usage. | Doc merged; scope tables verified; banner included. | +| DOCS-AIRGAP-DEVPORT-64-001 | TODO | Docs Guild, DevPortal Offline Guild | DVOFF-64-001 | Create `/docs/airgap/devportal-offline.md` describing offline bundle usage and verification. | Doc merged; verification steps tested; banner present. | + +## Risk Profiles (Epic 18) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-RISK-66-001 | TODO | Docs Guild, Risk Profile Schema Guild | POLICY-RISK-66-001 | Publish `/docs/risk/overview.md` covering concepts and glossary. | Doc merged with banner; terminology reviewed. | +| DOCS-RISK-66-002 | TODO | Docs Guild, Policy Guild | POLICY-RISK-66-003 | Author `/docs/risk/profiles.md` (authoring, versioning, scope). | Doc merged; schema examples validated; banner present. | +| DOCS-RISK-66-003 | TODO | Docs Guild, Risk Engine Guild | RISK-ENGINE-67-001 | Publish `/docs/risk/factors.md` cataloging signals, transforms, reducers, TTLs. | Document merged; tables verified; banner included. | +| DOCS-RISK-66-004 | TODO | Docs Guild, Risk Engine Guild | RISK-ENGINE-66-002 | Create `/docs/risk/formulas.md` detailing math, normalization, gating, severity. | Doc merged; equations rendered; banner present. | +| DOCS-RISK-67-001 | TODO | Docs Guild, Risk Engine Guild | RISK-ENGINE-68-001 | Publish `/docs/risk/explainability.md` showing artifact schema and UI screenshots. | Doc merged; CLI examples validated; banner included. | +| DOCS-RISK-67-002 | TODO | Docs Guild, API Guild | POLICY-RISK-67-002 | Produce `/docs/risk/api.md` with endpoint reference/examples. | Doc merged; OAS examples synced; banner present. | +| DOCS-RISK-67-003 | TODO | Docs Guild, Console Guild | CONSOLE-RISK-66-001 | Document `/docs/console/risk-ui.md` for authoring, simulation, dashboards. | Doc merged; screenshots updated; banner included. | +| DOCS-RISK-67-004 | TODO | Docs Guild, CLI Guild | CLI-RISK-66-001 | Publish `/docs/cli/risk.md` covering CLI workflows. | Doc merged; command examples validated; banner present. | +| DOCS-RISK-68-001 | TODO | Docs Guild, Export Guild | RISK-BUNDLE-69-001 | Add `/docs/airgap/risk-bundles.md` for offline factor bundles. | Doc merged; verification steps confirmed; banner included. | +| DOCS-RISK-68-002 | TODO | Docs Guild, Security Guild | POLICY-RISK-66-003 | Update `/docs/security/aoc-invariants.md` with risk scoring provenance guarantees. | Doc merged; audit references updated; banner present. | + +## Attestor Console (Epic 19) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-ATTEST-73-001 | TODO | Docs Guild, Attestor Service Guild | ATTEST-TYPES-73-001 | Publish `/docs/attestor/overview.md` with imposed rule banner. | Doc merged; terminology validated. | +| DOCS-ATTEST-73-002 | TODO | Docs Guild, Attestation Payloads Guild | ATTEST-TYPES-73-002 | Write `/docs/attestor/payloads.md` with schemas/examples. | Doc merged; examples validated via tests. | +| DOCS-ATTEST-73-003 | TODO | Docs Guild, Policy Guild | POLICY-ATTEST-73-002 | Publish `/docs/attestor/policies.md` covering verification policies. | Doc merged; policy examples validated. | +| DOCS-ATTEST-73-004 | TODO | Docs Guild, Attestor Service Guild | ATTESTOR-73-002 | Add `/docs/attestor/workflows.md` detailing ingest, verify, bulk operations. | Doc merged; workflows tested. | +| DOCS-ATTEST-74-001 | TODO | Docs Guild, KMS Guild | KMS-73-001 | Publish `/docs/attestor/keys-and-issuers.md`. | Doc merged; rotation guidance verified. | +| DOCS-ATTEST-74-002 | TODO | Docs Guild, Transparency Guild | TRANSP-74-001 | Document `/docs/attestor/transparency.md` with witness usage/offline validation. | Doc merged; proofs validated. | +| DOCS-ATTEST-74-003 | TODO | Docs Guild, Attestor Console Guild | CONSOLE-ATTEST-73-001 | Write `/docs/console/attestor-ui.md` with screenshots/workflows. | Doc merged; screenshots captured; banner present. | +| DOCS-ATTEST-74-004 | TODO | Docs Guild, CLI Attestor Guild | CLI-ATTEST-73-001 | Publish `/docs/cli/attest.md` covering CLI usage. | Doc merged; commands validated. | +| DOCS-ATTEST-75-001 | TODO | Docs Guild, Export Attestation Guild | EXPORT-ATTEST-75-002 | Add `/docs/attestor/airgap.md` for attestation bundles. | Doc merged; verification steps confirmed. | +| DOCS-ATTEST-75-002 | TODO | Docs Guild, Security Guild | ATTESTOR-73-002 | Update `/docs/security/aoc-invariants.md` with attestation invariants. | Doc merged; invariants detailed. | +## Policy Engine v2 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-POLICY-20-001 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-ENGINE-20-000 | Author `/docs/policy/overview.md` covering concepts, inputs/outputs, determinism, and compliance checklist. | Doc published with diagrams + glossary; lint passes; checklist included. | +| DOCS-POLICY-20-002 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-ENGINE-20-001 | Write `/docs/policy/dsl.md` with grammar, built-ins, examples, anti-patterns. | DSL doc includes grammar tables, examples, compliance checklist; validated against parser tests. | +| DOCS-POLICY-20-003 | DONE (2025-10-26) | Docs Guild, Authority Core | AUTH-POLICY-20-001 | Publish `/docs/policy/lifecycle.md` describing draft→approve workflow, roles, audit, compliance list. | Lifecycle doc linked from UI/CLI help; approvals roles documented; checklist appended. | +| DOCS-POLICY-20-004 | DONE (2025-10-26) | Docs Guild, Scheduler Guild | SCHED-MODELS-20-001 | Create `/docs/policy/runs.md` detailing run modes, incremental mechanics, cursors, replay. | Run doc includes sequence diagrams + compliance checklist; cross-links to scheduler docs. | +| DOCS-POLICY-20-005 | DONE (2025-10-26) | Docs Guild, BE-Base Platform Guild | WEB-POLICY-20-001 | Draft `/docs/api/policy.md` describing endpoints, schemas, error codes. | API doc validated against OpenAPI; examples included; checklist appended. | +| DOCS-POLICY-20-006 | DONE (2025-10-26) | Docs Guild, DevEx/CLI Guild | CLI-POLICY-20-002 | Produce `/docs/cli/policy.md` with command usage, exit codes, JSON output contracts. | CLI doc includes examples, exit codes, compliance checklist. | +| DOCS-POLICY-20-007 | DONE (2025-10-26) | Docs Guild, UI Guild | UI-POLICY-20-001 | Document `/docs/ui/policy-editor.md` covering editor, simulation, diff workflows, approvals. | UI doc includes screenshots/placeholders, accessibility notes, compliance checklist. | +| DOCS-POLICY-20-008 | DONE (2025-10-26) | Docs Guild, Architecture Guild | POLICY-ENGINE-20-003 | Write `/docs/architecture/policy-engine.md` (new epic content) with sequence diagrams, selection strategy, schema. | Architecture doc merged with diagrams; compliance checklist appended; references updated. | +| DOCS-POLICY-20-009 | DONE (2025-10-26) | Docs Guild, Observability Guild | POLICY-ENGINE-20-007 | Add `/docs/observability/policy.md` for metrics/traces/logs, sample dashboards. | Observability doc includes metrics tables, dashboard screenshots, checklist. | +| DOCS-POLICY-20-010 | DONE (2025-10-26) | Docs Guild, Security Guild | AUTH-POLICY-20-002 | Publish `/docs/security/policy-governance.md` covering scopes, approvals, tenancy, least privilege. | Security doc merged; compliance checklist appended; reviewed by Security Guild. | +| DOCS-POLICY-20-011 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-ENGINE-20-001 | Populate `/docs/examples/policies/` with baseline/serverless/internal-only samples and commentary. | Example policies committed with explanations; lint passes; compliance checklist per file. | +| DOCS-POLICY-20-012 | DONE (2025-10-26) | Docs Guild, Support Guild | WEB-POLICY-20-003 | Draft `/docs/faq/policy-faq.md` addressing common pitfalls, VEX conflicts, determinism issues. | FAQ published with Q/A entries, cross-links, compliance checklist. | + +## Graph Explorer v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| + +## Link-Not-Merge v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-LNM-22-001 | BLOCKED (2025-10-27) | Docs Guild, Concelier Guild | CONCELIER-LNM-21-001..003 | Author `/docs/advisories/aggregation.md` covering observation vs linkset, conflict handling, AOC requirements, and reviewer checklist. | Draft doc merged with examples + checklist; final sign-off blocked until Concelier schema/API tasks land. | +> Blocker (2025-10-27): `CONCELIER-LNM-21-001..003` still TODO; update doc + fixtures once schema/API implementations are available. +| DOCS-LNM-22-002 | BLOCKED (2025-10-27) | Docs Guild, Excititor Guild | EXCITITOR-LNM-21-001..003 | Publish `/docs/vex/aggregation.md` describing VEX observation/linkset model, product matching, conflicts. | Draft doc merged with fixtures; final approval blocked until Excititor observation/linkset work ships. | +> Blocker (2025-10-27): `EXCITITOR-LNM-21-001..003` remain TODO; refresh doc, fixtures, and examples post-implementation. +| DOCS-LNM-22-003 | BLOCKED (2025-10-27) | Docs Guild, BE-Base Platform Guild | WEB-LNM-21-001..003 | Update `/docs/api/advisories.md` and `/docs/api/vex.md` for new endpoints, parameters, errors, exports. | Draft pending gateway/API delivery; unblock once endpoints + OpenAPI specs are available. | +> Blocker (2025-10-27): `WEB-LNM-21-001..003` all TODO—no gateway endpoints/OpenAPI to document yet. +| DOCS-LNM-22-004 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-40-001 | Create `/docs/policy/effective-severity.md` detailing severity selection strategies from multiple sources. | Doc merged with policy examples; checklist included. | +| DOCS-LNM-22-005 | BLOCKED (2025-10-27) | Docs Guild, UI Guild | UI-LNM-22-001..003 | Document `/docs/ui/evidence-panel.md` with screenshots, conflict badges, accessibility guidance. | Awaiting UI implementation to capture screenshots + flows; unblock once Evidence panel ships. | +> Blocker (2025-10-27): `UI-LNM-22-001..003` all TODO; documentation requires final UI states and accessibility audit artifacts. + +## StellaOps Console (Sprint 23) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-CONSOLE-23-001 | DONE (2025-10-26) | Docs Guild, Console Guild | CONSOLE-CORE-23-004 | Publish `/docs/ui/console-overview.md` covering IA, tenant model, global filters, and AOC alignment with compliance checklist. | Doc merged with diagrams + overview tables; checklist appended; Console Guild sign-off. | +| DOCS-CONSOLE-23-002 | DONE (2025-10-26) | Docs Guild, Console Guild | DOCS-CONSOLE-23-001 | Author `/docs/ui/navigation.md` detailing routes, breadcrumbs, keyboard shortcuts, deep links, and tenant context switching. | Navigation doc merged with shortcut tables and screenshots; accessibility checklist satisfied. | +| DOCS-CONSOLE-23-003 | DONE (2025-10-26) | Docs Guild, SBOM Service Guild, Console Guild | SBOM-CONSOLE-23-001, CONSOLE-FEAT-23-102 | Document `/docs/ui/sbom-explorer.md` (catalog, detail, graph overlays, exports) including compliance checklist and performance tips. | Doc merged with annotated screenshots, export instructions, and overlay examples; checklist appended. | +| DOCS-CONSOLE-23-004 | DONE (2025-10-26) | Docs Guild, Concelier Guild, Excititor Guild | CONCELIER-CONSOLE-23-001, EXCITITOR-CONSOLE-23-001 | Produce `/docs/ui/advisories-and-vex.md` explaining aggregation-not-merge, conflict indicators, raw viewers, and provenance banners. | Doc merged; raw JSON examples included; compliance checklist complete. | +| DOCS-CONSOLE-23-005 | DONE (2025-10-26) | Docs Guild, Policy Guild | POLICY-CONSOLE-23-001, CONSOLE-FEAT-23-104 | Write `/docs/ui/findings.md` describing filters, saved views, explain drawer, exports, and CLI parity callouts. | Doc merged with filter matrix + explain walkthrough; checklist appended. | +| DOCS-CONSOLE-23-006 | DONE (2025-10-26) | Docs Guild, Policy Guild, Product Ops | POLICY-CONSOLE-23-002, CONSOLE-FEAT-23-105 | Publish `/docs/ui/policies.md` with editor, simulation, approvals, compliance checklist, and RBAC mapping. | Doc merged; Monaco screenshots + simulation diff examples included; approval flow described; checklist appended. | +| DOCS-CONSOLE-23-007 | DONE (2025-10-26) | Docs Guild, Scheduler Guild | SCHED-CONSOLE-23-001, CONSOLE-FEAT-23-106 | Document `/docs/ui/runs.md` covering queues, live progress, diffs, retries, evidence downloads, and troubleshooting. | Doc merged with SSE troubleshooting, metrics references, compliance checklist. | +| DOCS-CONSOLE-23-008 | DONE (2025-10-26) | Docs Guild, Authority Guild | AUTH-CONSOLE-23-002, CONSOLE-FEAT-23-108 | Draft `/docs/ui/admin.md` describing users/roles, tenants, tokens, integrations, fresh-auth prompts, and RBAC mapping. | Doc merged with tables for scopes vs roles, screenshots, compliance checklist. | +| DOCS-CONSOLE-23-009 | DONE (2025-10-27) | Docs Guild, DevOps Guild | DOWNLOADS-CONSOLE-23-001, CONSOLE-FEAT-23-109 | Publish `/docs/ui/downloads.md` listing product images, commands, offline instructions, parity with CLI, and compliance checklist. | Doc merged; manifest sample included; copy-to-clipboard guidance documented; checklist complete. | +| DOCS-CONSOLE-23-010 | DONE (2025-10-27) | Docs Guild, Deployment Guild, Console Guild | DEVOPS-CONSOLE-23-002, CONSOLE-REL-23-301 | Write `/docs/deploy/console.md` (Helm, ingress, TLS, CSP, env vars, health checks) with compliance checklist. | Deploy doc merged; templates validated; CSP guidance included; checklist appended. | +| DOCS-CONSOLE-23-011 | DONE (2025-10-28) | Docs Guild, Deployment Guild | DOCS-CONSOLE-23-010 | Update `/docs/install/docker.md` to cover Console image, Compose/Helm usage, offline tarballs, parity with CLI. | Doc updated with new sections; commands validated; compliance checklist appended. | +| DOCS-CONSOLE-23-012 | DONE (2025-10-28) | Docs Guild, Security Guild | AUTH-CONSOLE-23-003, WEB-CONSOLE-23-002 | Publish `/docs/security/console-security.md` detailing OIDC flows, scopes, CSP, fresh-auth, evidence handling, and compliance checklist. | Security doc merged; threat model notes included; checklist appended. | +| DOCS-CONSOLE-23-013 | DONE (2025-10-28) | Docs Guild, Observability Guild | TELEMETRY-CONSOLE-23-001, CONSOLE-QA-23-403 | Write `/docs/observability/ui-telemetry.md` cataloguing metrics/logs/traces, dashboards, alerts, and feature flags. | Doc merged with instrumentation tables, dashboard screenshots, checklist appended. | +| DOCS-CONSOLE-23-014 | DONE (2025-10-28) | Docs Guild, Console Guild, CLI Guild | CONSOLE-DOC-23-502 | Maintain `/docs/cli-vs-ui-parity.md` matrix and integrate CI check guidance. | Matrix published with parity status, CI workflow documented, compliance checklist appended. | +> 2025-10-28: Install Docker guide references pending CLI commands (`stella downloads manifest`, `stella downloads mirror`, `stella console status`). Update once CLI parity lands. +| DOCS-CONSOLE-23-015 | DONE (2025-10-27) | Docs Guild, Architecture Guild | CONSOLE-CORE-23-001, WEB-CONSOLE-23-001 | Produce `/docs/architecture/console.md` describing frontend packages, data flow diagrams, SSE design, performance budgets. | Architecture doc merged with diagrams + compliance checklist; reviewers approve. | +| DOCS-CONSOLE-23-016 | DONE (2025-10-28) | Docs Guild, Accessibility Guild | CONSOLE-QA-23-402, CONSOLE-FEAT-23-102 | Refresh `/docs/accessibility.md` with Console-specific keyboard flows, color tokens, testing tools, and compliance checklist updates. | Accessibility doc updated; audits referenced; checklist appended. | +> 2025-10-28: Added guide covering keyboard matrix, screen reader behaviour, colour/focus tokens, testing workflow, offline guidance, and compliance checklist. +| DOCS-CONSOLE-23-017 | DONE (2025-10-27) | Docs Guild, Console Guild | CONSOLE-FEAT-23-101..109 | Create `/docs/examples/ui-tours.md` providing triage, audit, policy rollout walkthroughs with annotated screenshots and GIFs. | UI tours doc merged; capture instructions + asset placeholders committed; compliance checklist appended. | +| DOCS-CONSOLE-23-018 | DONE (2025-10-27) | Docs Guild, Security Guild | DOCS-CONSOLE-23-012 | Execute console security compliance checklist and capture Security Guild sign-off in Sprint 23 log. | Checklist completed; findings addressed or tickets filed; sign-off noted in updates file. | +| DOCS-LNM-22-006 | DONE (2025-10-27) | Docs Guild, Architecture Guild | CONCELIER-LNM-21-001..005, EXCITITOR-LNM-21-001..005 | Refresh `/docs/architecture/conseiller.md` and `/docs/architecture/excitator.md` describing observation/linkset pipelines and event contracts. | Architecture docs updated with observation/linkset flow + event tables; revisit once service implementations land. | +> Follow-up: align diagrams/examples after `CONCELIER-LNM-21` & `EXCITITOR-LNM-21` work merges (currently TODO). +| DOCS-LNM-22-007 | TODO | Docs Guild, Observability Guild | CONCELIER-LNM-21-005, EXCITITOR-LNM-21-005, DEVOPS-LNM-22-002 | Publish `/docs/observability/aggregation.md` with metrics/traces/logs/SLOs. | Observability doc merged; dashboards referenced; checklist appended. | +| DOCS-LNM-22-008 | TODO | Docs Guild, DevOps Guild | MERGE-LNM-21-001, CONCELIER-LNM-21-102 | Write `/docs/migration/no-merge.md` describing migration plan, backfill steps, rollback, feature flags. | Migration doc approved by stakeholders; checklist appended. | + +## Policy Engine + Editor v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-POLICY-23-001 | TODO | Docs Guild, Policy Guild | POLICY-SPL-23-001..003 | Author `/docs/policy/overview.md` describing SPL philosophy, layering, and glossary with reviewer checklist. | Doc merged; lint passes; checklist appended. | +| DOCS-POLICY-23-002 | TODO | Docs Guild, Policy Guild | POLICY-SPL-23-001 | Write `/docs/policy/spl-v1.md` (language reference, JSON Schema, examples). | Reference published with schema snippets; checklist completed. | +| DOCS-POLICY-23-003 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-50-001..004 | Produce `/docs/policy/runtime.md` covering compiler, evaluator, caching, events, SLOs. | Runtime doc merged with diagrams; observability references included. | +| DOCS-POLICY-23-004 | TODO | Docs Guild, UI Guild | UI-POLICY-23-001..006 | Document `/docs/policy/editor.md` (UI walkthrough, validation, simulation, approvals). | Editor doc merged with screenshots; accessibility checklist satisfied. | +| DOCS-POLICY-23-005 | TODO | Docs Guild, Security Guild | AUTH-POLICY-23-001..002 | Publish `/docs/policy/governance.md` (roles, scopes, approvals, signing, exceptions). | Governance doc merged; checklist appended. | +| DOCS-POLICY-23-006 | TODO | Docs Guild, BE-Base Platform Guild | WEB-POLICY-23-001..004 | Update `/docs/api/policy.md` with new endpoints, schemas, errors, pagination. | API doc aligns with OpenAPI; examples validated; checklist included. | +| DOCS-POLICY-23-007 | TODO | Docs Guild, DevEx/CLI Guild | CLI-POLICY-23-004..006 | Update `/docs/cli/policy.md` for lint/simulate/activate/history commands, exit codes. | CLI doc updated; samples verified; checklist appended. | +| DOCS-POLICY-23-008 | TODO | Docs Guild, Architecture Guild | POLICY-ENGINE-50-005..006 | Refresh `/docs/architecture/policy-engine.md` with data model, sequence diagrams, event flows. | Architecture doc merged with diagrams; checklist appended. | +| DOCS-POLICY-23-009 | TODO | Docs Guild, DevOps Guild | MERGE-LNM-21-001, DEVOPS-LNM-22-001 | Create `/docs/migration/policy-parity.md` covering dual-run parity plan and rollback. | Migration doc approved; checklist appended. | +| DOCS-POLICY-23-010 | TODO | Docs Guild, UI Guild | UI-POLICY-23-006 | Write `/docs/ui/explainers.md` showing explain trees, evidence overlays, interpretation guidance. | Doc merged with annotated screenshots; checklist appended. | + +## Graph & Vuln Explorer v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-GRAPH-24-001 | TODO | Docs Guild, UI Guild | UI-GRAPH-24-001..006 | Author `/docs/ui/sbom-graph-explorer.md` detailing overlays, filters, saved views, accessibility, and AOC visibility. | Doc merged; screenshots included; checklist appended. | +| DOCS-GRAPH-24-002 | TODO | Docs Guild, UI Guild | UI-GRAPH-24-005 | Publish `/docs/ui/vulnerability-explorer.md` covering table usage, grouping, fix suggestions, Why drawer. | Doc merged with annotated images; accessibility checklist satisfied. | +| DOCS-GRAPH-24-003 | TODO | Docs Guild, SBOM Service Guild | SBOM-GRAPH-24-001..003 | Create `/docs/architecture/graph-index.md` describing data model, ingestion pipeline, caches, events. | Architecture doc merged with diagrams; checklist appended. | +| DOCS-GRAPH-24-004 | TODO | Docs Guild, BE-Base Platform Guild | WEB-GRAPH-24-001..003 | Document `/docs/api/graph.md` and `/docs/api/vuln.md` avec endpoints, parameters, errors, RBAC. | API docs aligned with OpenAPI; examples validated; checklist appended. | +| DOCS-GRAPH-24-005 | TODO | Docs Guild, DevEx/CLI Guild | CLI-GRAPH-24-001..003 | Update `/docs/cli/graph-and-vuln.md` covering new CLI commands, exit codes, scripting. | CLI doc merged; examples tested; checklist appended. | +| DOCS-GRAPH-24-006 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-60-001..002 | Write `/docs/policy/ui-integration.md` explaining overlays, cache usage, simulator contracts. | Doc merged; references cross-linked; checklist appended. | +| DOCS-GRAPH-24-007 | TODO | Docs Guild, DevOps Guild | DEVOPS-GRAPH-24-001..003 | Produce `/docs/migration/graph-parity.md` with rollout plan, parity checks, fallback guidance. | Migration doc approved; checklist appended. | + +## Exceptions v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-EXC-25-001 | TODO | Docs Guild, Governance Guild | WEB-EXC-25-001 | Author `/docs/governance/exceptions.md` covering lifecycle, scope patterns, examples, compliance checklist. | Doc merged; reviewers sign off; checklist included. | +| DOCS-EXC-25-002 | TODO | Docs Guild, Authority Core | AUTH-EXC-25-001 | Publish `/docs/governance/approvals-and-routing.md` detailing roles, routing matrix, MFA rules, audit trails. | Doc merged; routing examples validated; checklist appended. | +| DOCS-EXC-25-003 | TODO | Docs Guild, BE-Base Platform Guild | WEB-EXC-25-001..003 | Create `/docs/api/exceptions.md` with endpoints, payloads, errors, idempotency notes. | API doc aligned with OpenAPI; examples tested; checklist appended. | +| DOCS-EXC-25-004 | DONE (2025-10-27) | Docs Guild, Policy Guild | POLICY-ENGINE-70-001 | Document `/docs/policy/exception-effects.md` explaining evaluation order, conflicts, simulation. | Doc merged; tests cross-referenced; checklist appended. | +| DOCS-EXC-25-005 | TODO | Docs Guild, UI Guild | UI-EXC-25-001..004 | Write `/docs/ui/exception-center.md` with UI walkthrough, badges, accessibility, shortcuts. | Doc merged with screenshots; accessibility checklist completed. | +| DOCS-EXC-25-006 | TODO | Docs Guild, DevEx/CLI Guild | CLI-EXC-25-001..002 | Update `/docs/cli/exceptions.md` covering command usage and exit codes. | CLI doc updated; examples validated; checklist appended. | +| DOCS-EXC-25-007 | TODO | Docs Guild, DevOps Guild | SCHED-WORKER-25-101, DEVOPS-GRAPH-24-003 | Publish `/docs/migration/exception-governance.md` describing cutover from legacy suppressions, notifications, rollback. | Migration doc approved; checklist included. | + +> Update statuses (TODO/DOING/REVIEW/DONE/BLOCKED) as progress changes. Keep guides in sync with configuration samples under `etc/`. + +> Remark (2025-10-13, DOC4.AUTH-PDG): Rate limit guide published (`docs/security/rate-limits.md`) and handed to plugin docs team for diagram uplift once PLG6.DIAGRAM lands. + +## Orchestrator Dashboard (Epic 9) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-ORCH-32-001 | TODO | Docs Guild | ORCH-SVC-32-001, AUTH-ORCH-32-001 | Author `/docs/orchestrator/overview.md` covering mission, roles, AOC alignment, governance, with imposed rule reminder. | Doc merged with diagrams; imposed rule statement included; entry linked from docs index. | +| DOCS-ORCH-32-002 | TODO | Docs Guild | ORCH-SVC-32-002 | Author `/docs/orchestrator/architecture.md` detailing scheduler, DAGs, rate limits, data model, message bus, storage layout, restating imposed rule. | Architecture doc merged; diagrams reviewed; imposed rule noted. | +| DOCS-ORCH-33-001 | TODO | Docs Guild | ORCH-SVC-33-001..004, WEB-ORCH-33-001 | Publish `/docs/orchestrator/api.md` (REST/WebSocket endpoints, payloads, error codes) with imposed rule note. | API doc merged; examples validated; imposed rule appended. | +| DOCS-ORCH-33-002 | TODO | Docs Guild | CONSOLE-ORCH-32-002, CONSOLE-ORCH-33-001..002 | Publish `/docs/orchestrator/console.md` covering screens, a11y, live updates, control actions, reiterating imposed rule. | Console doc merged with screenshots; accessibility checklist done; imposed rule statement present. | +| DOCS-ORCH-33-003 | TODO | Docs Guild | CLI-ORCH-33-001 | Publish `/docs/orchestrator/cli.md` documenting commands, options, exit codes, streaming output, offline usage, and imposed rule. | CLI doc merged; examples tested; imposed rule appended. | +| DOCS-ORCH-34-001 | TODO | Docs Guild | ORCH-SVC-34-002, LEDGER-34-101 | Author `/docs/orchestrator/run-ledger.md` covering ledger schema, provenance chain, audit workflows, with imposed rule reminder. | Run-ledger doc merged; payload samples validated; imposed rule included; cross-links added. | +| DOCS-ORCH-34-002 | TODO | Docs Guild | AUTH-ORCH-32-001, AUTH-ORCH-34-001 | Update `/docs/security/secrets-handling.md` for orchestrator KMS refs, redaction badges, operator hygiene, reiterating imposed rule. | Security doc merged; checklists updated; imposed rule restated; references from Console/CLI docs added. | +| DOCS-ORCH-34-003 | TODO | Docs Guild | ORCH-SVC-33-003, ORCH-SVC-34-001, DEVOPS-ORCH-34-001 | Publish `/docs/operations/orchestrator-runbook.md` (incident playbook, backfill guide, circuit breakers, throttling) with imposed rule statement. | Runbook merged; steps validated with DevOps; imposed rule included; runbook linked from ops index. | +| DOCS-ORCH-34-004 | TODO | Docs Guild | ORCH-SVC-32-005, WORKER-GO-33-001, WORKER-PY-33-001 | Document `/docs/schemas/artifacts.md` describing artifact kinds, schema versions, hashing, storage layout, restating imposed rule. | Schema doc merged; JSON schema provided; imposed rule included; sample payload validated. | +| DOCS-ORCH-34-005 | TODO | Docs Guild | ORCH-SVC-34-001, DEVOPS-ORCH-34-001 | Author `/docs/slo/orchestrator-slo.md` defining SLOs, burn alerts, measurement, and reiterating imposed rule. | SLO doc merged; dashboard screenshots embedded; imposed rule appended; alerts documented. | + +## Export Center (Epic 10) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-EXPORT-35-001 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-35-001..006 | Author `/docs/export-center/overview.md` covering purpose, profiles, security, AOC alignment, surfaces, ending with imposed rule statement. | Doc merged with diagrams/examples; imposed rule line present; index updated. | +| DOCS-EXPORT-35-002 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-35-002..005 | Publish `/docs/export-center/architecture.md` describing planner, adapters, manifests, signing, distribution flows, restating imposed rule. | Architecture doc merged; sequence diagrams included; rule statement appended. | +| DOCS-EXPORT-35-003 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-35-003..004 | Publish `/docs/export-center/profiles.md` detailing schema fields, examples, compatibility, and imposed rule reminder. | Profiles doc merged; JSON schemas linked; imposed rule noted. | +| DOCS-EXPORT-36-004 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-36-001..004, WEB-EXPORT-36-001 | Publish `/docs/export-center/api.md` covering endpoints, payloads, errors, and mention imposed rule. | API doc merged; examples validated; rule included. | +| DOCS-EXPORT-36-005 | DONE (2025-10-29) | Docs Guild | CLI-EXPORT-35-001, CLI-EXPORT-36-001 | Publish `/docs/export-center/cli.md` with command reference, CI scripts, verification steps, restating imposed rule. | CLI doc merged; script snippets tested; rule appended. | +| DOCS-EXPORT-36-006 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-36-001, DEVOPS-EXPORT-36-001 | Publish `/docs/export-center/trivy-adapter.md` covering field mappings, compatibility matrix, and imposed rule reminder. | Doc merged; mapping tables validated; rule included. | +| DOCS-EXPORT-37-001 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-37-001, DEVOPS-EXPORT-37-001 | Publish `/docs/export-center/mirror-bundles.md` describing filesystem/OCI layouts, delta/encryption, import guide, ending with imposed rule. | Doc merged; diagrams provided; verification steps tested; rule stated. | +| DOCS-EXPORT-37-002 | DONE (2025-10-29) | Docs Guild | EXPORT-SVC-35-005, EXPORT-SVC-37-002 | Publish `/docs/export-center/provenance-and-signing.md` detailing manifests, attestation flow, verification, reiterating imposed rule. | Doc merged; signature examples validated; rule appended. | +| DOCS-EXPORT-37-003 | DONE (2025-10-29) | Docs Guild | DEVOPS-EXPORT-37-001 | Publish `/docs/operations/export-runbook.md` covering failures, tuning, capacity planning, with imposed rule reminder. | Runbook merged; procedures validated; rule included. | +| DOCS-EXPORT-37-004 | TODO | Docs Guild | AUTH-EXPORT-37-001, EXPORT-SVC-37-002 | Publish `/docs/security/export-hardening.md` outlining RBAC, tenancy, encryption, redaction, restating imposed rule. | Security doc merged; checklist updated; rule appended. | +| DOCS-EXPORT-37-101 | TODO | Docs Guild, DevEx/CLI Guild | CLI-EXPORT-37-001 | Refresh CLI verification sections once `stella export verify` lands (flags, exit codes, samples). | `docs/export-center/cli.md` & `docs/export-center/provenance-and-signing.md` updated with final command syntax; examples tested; rule reminder retained. | +| DOCS-EXPORT-37-102 | TODO | Docs Guild, DevOps Guild | DEVOPS-EXPORT-37-001 | Embed export dashboards/alerts references into provenance/runbook docs after Grafana work ships. | Docs updated with dashboard IDs/alert notes; update logged; rule reminder present. | +| DOCS-EXPORT-37-005 | TODO | Docs Guild, Exporter Service Guild | EXPORT-SVC-35-006, DEVOPS-EXPORT-36-001 | Validate Export Center docs against live Trivy/mirror bundles once implementation lands; refresh examples and CLI snippets accordingly. | Real bundle examples recorded; docs updated; verification steps confirmed with production artefacts. | +> Note (2025-10-29): Blocked until exporter API (`EXPORT-SVC-35-006`) and Trivy/mirror adapters (`EXPORT-SVC-36-001`, `EXPORT-SVC-37-001`) ship. Requires access to CI smoke outputs (`DEVOPS-EXPORT-36-001`) for verification artifacts. + +## Reachability v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-SIG-26-001 | TODO | Docs Guild, Signals Guild | SIGNALS-24-004 | Write `/docs/signals/reachability.md` covering states, scores, provenance, retention. | Doc merged with diagrams/examples; checklist appended. | +| DOCS-SIG-26-002 | TODO | Docs Guild, Signals Guild | SIGNALS-24-002 | Publish `/docs/signals/callgraph-formats.md` with schemas and validation errors. | Doc merged; examples tested; checklist included. | +| DOCS-SIG-26-003 | TODO | Docs Guild, Runtime Guild | SIGNALS-24-003 | Create `/docs/signals/runtime-facts.md` detailing agent capabilities, privacy safeguards, opt-in flags. | Doc merged; privacy review done; checklist appended. | +| DOCS-SIG-26-004 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-80-001 | Document `/docs/policy/signals-weighting.md` for SPL predicates and weighting strategies. | Doc merged; sample policies validated; checklist appended. | +| DOCS-SIG-26-005 | TODO | Docs Guild, UI Guild | UI-SIG-26-001..003 | Draft `/docs/ui/reachability-overlays.md` with badges, timelines, shortcuts. | Doc merged with screenshots; accessibility checklist completed. | +| DOCS-SIG-26-006 | TODO | Docs Guild, DevEx/CLI Guild | CLI-SIG-26-001..002 | Update `/docs/cli/reachability.md` for new commands and automation recipes. | Doc merged; examples verified; checklist appended. | +| DOCS-SIG-26-007 | TODO | Docs Guild, BE-Base Platform Guild | WEB-SIG-26-001..003 | Publish `/docs/api/signals.md` covering endpoints, payloads, ETags, errors. | API doc aligned with OpenAPI; examples tested; checklist appended. | +| DOCS-SIG-26-008 | TODO | Docs Guild, DevOps Guild | DEVOPS-SIG-26-001..002 | Write `/docs/migration/enable-reachability.md` guiding rollout, fallbacks, monitoring. | Migration doc approved; checklist appended. | + +## Policy Studio (Sprint 27) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-POLICY-27-001 | BLOCKED (2025-10-27) | Docs Guild, Policy Guild | REGISTRY-API-27-001, POLICY-ENGINE-27-001 | Publish `/docs/policy/studio-overview.md` covering lifecycle, roles, glossary, and compliance checklist. | Doc merged with diagrams + lifecycle table; checklist appended; stakeholders sign off. | +> Blocked by `REGISTRY-API-27-001` and `POLICY-ENGINE-27-001`; need spec + compile data. +> Blocker: Registry OpenAPI (`REGISTRY-API-27-001`) and policy compile enrichments (`POLICY-ENGINE-27-001`) are still TODO; need final interfaces before drafting overview. +| DOCS-POLICY-27-002 | BLOCKED (2025-10-27) | Docs Guild, Console Guild | CONSOLE-STUDIO-27-001 | Write `/docs/policy/authoring.md` detailing workspace templates, snippets, lint rules, IDE shortcuts, and best practices. | Authoring doc includes annotated screenshots, snippet catalog, compliance checklist. | +> Blocked by `CONSOLE-STUDIO-27-001` Studio authoring UI pending. +> Blocker: Console Studio authoring UI (`CONSOLE-STUDIO-27-001`) not implemented; awaiting UX to capture flows/snippets. +| DOCS-POLICY-27-003 | BLOCKED (2025-10-27) | Docs Guild, Policy Registry Guild | REGISTRY-API-27-007 | Document `/docs/policy/versioning-and-publishing.md` (semver rules, attestations, rollback) with compliance checklist. | Doc merged with flow diagrams; attestation steps documented; checklist appended. | +> Blocked by `REGISTRY-API-27-007` publish/sign pipeline outstanding. +> Blocker: Registry publish/sign workflow (`REGISTRY-API-27-007`) pending. +| DOCS-POLICY-27-004 | BLOCKED (2025-10-27) | Docs Guild, Scheduler Guild | REGISTRY-API-27-005, SCHED-WORKER-27-301 | Write `/docs/policy/simulation.md` covering quick vs batch sim, thresholds, evidence bundles, CLI examples. | Simulation doc includes charts, sample manifests, checklist appended. | +> Blocked by `REGISTRY-API-27-005`/`SCHED-WORKER-27-301` batch simulation not ready. +> Blocker: Batch simulation APIs/workers (`REGISTRY-API-27-005`, `SCHED-WORKER-27-301`) still TODO. +| DOCS-POLICY-27-005 | BLOCKED (2025-10-27) | Docs Guild, Product Ops | REGISTRY-API-27-006 | Publish `/docs/policy/review-and-approval.md` with approver requirements, comments, webhooks, audit trail guidance. | Doc merged with role matrix + webhook schema; checklist appended. | +> Blocked by `REGISTRY-API-27-006` review workflow not implemented. +> Blocker: Review workflow (`REGISTRY-API-27-006`) not landed. +| DOCS-POLICY-27-006 | BLOCKED (2025-10-27) | Docs Guild, Policy Guild | REGISTRY-API-27-008 | Author `/docs/policy/promotion.md` covering environments, canary, rollback, and monitoring steps. | Promotion doc includes examples + checklist; verified by Policy Ops. | +> Blocked by `REGISTRY-API-27-008` promotion APIs pending. +> Blocker: Promotion/canary APIs (`REGISTRY-API-27-008`) outstanding. +| DOCS-POLICY-27-007 | BLOCKED (2025-10-27) | Docs Guild, DevEx/CLI Guild | CLI-POLICY-27-001..004 | Update `/docs/policy/cli.md` with new commands, JSON schemas, CI usage, and compliance checklist. | CLI doc merged with transcripts; schema references validated; checklist appended. | +> Blocked by `CLI-POLICY-27-001..004` CLI commands missing. +> Blocker: Policy CLI commands (`CLI-POLICY-27-001..004`) yet to implement. +| DOCS-POLICY-27-008 | BLOCKED (2025-10-27) | Docs Guild, Policy Registry Guild | REGISTRY-API-27-001..008 | Publish `/docs/policy/api.md` describing Registry endpoints, request/response schemas, errors, and feature flags. | API doc aligned with OpenAPI; examples validated; checklist appended. | +> Blocked by `REGISTRY-API-27-001..008` OpenAPI + endpoints incomplete. +> Blocker: Registry OpenAPI/spec suite (`REGISTRY-API-27-001..008`) incomplete. +| DOCS-POLICY-27-009 | BLOCKED (2025-10-27) | Docs Guild, Security Guild | AUTH-POLICY-27-002 | Create `/docs/security/policy-attestations.md` covering signing, verification, key rotation, and compliance checklist. | Security doc approved by Security Guild; verifier steps documented; checklist appended. | +> Blocked by `AUTH-POLICY-27-002` signing enforcement pending. +> Blocker: Authority signing enforcement (`AUTH-POLICY-27-002`) pending. +| DOCS-POLICY-27-010 | BLOCKED (2025-10-27) | Docs Guild, Architecture Guild | REGISTRY-API-27-001, SCHED-WORKER-27-301 | Author `/docs/architecture/policy-registry.md` (service design, schemas, queues, failure modes) with diagrams and checklist. | Architecture doc merged; diagrams committed; checklist appended. | +> Blocked by `REGISTRY-API-27-001` & `SCHED-WORKER-27-301` need delivery. +> Blocker: Policy Registry schema/workers not delivered (see `REGISTRY-API-27-001`, `SCHED-WORKER-27-301`). +| DOCS-POLICY-27-011 | BLOCKED (2025-10-27) | Docs Guild, Observability Guild | DEVOPS-POLICY-27-004 | Publish `/docs/observability/policy-telemetry.md` with metrics/log tables, dashboards, alerts, and compliance checklist. | Observability doc merged; dashboards linked; checklist appended. | +> Blocked by `DEVOPS-POLICY-27-004` observability dashboards outstanding. +> Blocker: Observability dashboards (`DEVOPS-POLICY-27-004`) not built. +| DOCS-POLICY-27-012 | BLOCKED (2025-10-27) | Docs Guild, Ops Guild | DEPLOY-POLICY-27-002 | Write `/docs/runbooks/policy-incident.md` detailing rollback, freeze, forensic steps, notifications. | Runbook merged; rehearsal recorded; checklist appended. | +> Blocked by `DEPLOY-POLICY-27-002` incident runbook inputs pending. +> Blocker: Ops runbook inputs (`DEPLOY-POLICY-27-002`) pending. +| DOCS-POLICY-27-013 | BLOCKED (2025-10-27) | Docs Guild, Policy Guild | CONSOLE-STUDIO-27-001, REGISTRY-API-27-002 | Update `/docs/examples/policy-templates.md` with new templates, snippets, and sample policies. | Examples committed with commentary; lint passes; checklist appended. | +> Blocked by `CONSOLE-STUDIO-27-001`/`REGISTRY-API-27-002` templates missing. +> Blocker: Studio templates and registry storage (`CONSOLE-STUDIO-27-001`, `REGISTRY-API-27-002`) not available. +| DOCS-POLICY-27-014 | BLOCKED (2025-10-27) | Docs Guild, Policy Registry Guild | REGISTRY-API-27-003, WEB-POLICY-27-001 | Refresh `/docs/aoc/aoc-guardrails.md` to include Studio-specific guardrails and validation scenarios. | Doc updated with Studio guardrails; compliance checklist appended. | +> Blocked by `REGISTRY-API-27-003` & `WEB-POLICY-27-001` guardrails not implemented. +> Blocker: Registry compile pipeline/web proxy (`REGISTRY-API-27-003`, `WEB-POLICY-27-001`) outstanding. + +## Vulnerability Explorer (Sprint 29) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-VULN-29-001 | TODO | Docs Guild, Vuln Explorer Guild | VULN-API-29-001 | Publish `/docs/vuln/explorer-overview.md` covering domain model, identities, AOC guarantees, workflow summary. | Doc merged with diagrams/table; compliance checklist appended. | +| DOCS-VULN-29-002 | TODO | Docs Guild, Console Guild | CONSOLE-VULN-29-001..006 | Write `/docs/vuln/explorer-using-console.md` with workflows, screenshots, keyboard shortcuts, saved views, deep links. | Doc merged; images stored; WCAG notes included; checklist appended. | +| DOCS-VULN-29-003 | TODO | Docs Guild, Vuln Explorer API Guild | VULN-API-29-001..009 | Author `/docs/vuln/explorer-api.md` (endpoints, query schema, grouping, errors, rate limits). | Doc aligned with OpenAPI; examples validated; checklist appended. | +| DOCS-VULN-29-004 | TODO | Docs Guild, DevEx/CLI Guild | CLI-VULN-29-001..005 | Publish `/docs/vuln/explorer-cli.md` with command reference, samples, exit codes, CI snippets. | CLI doc merged; transcripts/JSON outputs validated; checklist appended. | +| DOCS-VULN-29-005 | TODO | Docs Guild, Findings Ledger Guild | LEDGER-29-001..009 | Write `/docs/vuln/findings-ledger.md` detailing event schema, hashing, Merkle roots, replay tooling. | Doc merged; compliance checklist appended; audit team sign-off. | +| DOCS-VULN-29-006 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-29-001..003 | Update `/docs/policy/vuln-determinations.md` for new rationale, signals, simulation semantics. | Doc updated; examples validated; checklist appended. | +| DOCS-VULN-29-007 | TODO | Docs Guild, Excititor Guild | EXCITITOR-VULN-29-001..004 | Publish `/docs/vex/explorer-integration.md` covering CSAF mapping, suppression precedence, status semantics. | Doc merged; compliance checklist appended. | +| DOCS-VULN-29-008 | TODO | Docs Guild, Concelier Guild | CONCELIER-VULN-29-001..004 | Publish `/docs/advisories/explorer-integration.md` covering key normalization, withdrawn handling, provenance. | Doc merged; checklist appended. | +| DOCS-VULN-29-009 | TODO | Docs Guild, SBOM Service Guild | SBOM-VULN-29-001..002 | Author `/docs/sbom/vuln-resolution.md` detailing version semantics, scope, paths, safe version hints. | Doc merged; ecosystem tables validated; checklist appended. | +| DOCS-VULN-29-010 | TODO | Docs Guild, Observability Guild | VULN-API-29-009, DEVOPS-VULN-29-002 | Publish `/docs/observability/vuln-telemetry.md` (metrics, logs, tracing, dashboards, SLOs). | Doc merged; dashboards linked; checklist appended. | +| DOCS-VULN-29-011 | TODO | Docs Guild, Security Guild | AUTH-VULN-29-001..003 | Create `/docs/security/vuln-rbac.md` for roles, ABAC policies, attachment encryption, CSRF. | Security doc approved; checklist appended. | +| DOCS-VULN-29-012 | TODO | Docs Guild, Ops Guild | DEVOPS-VULN-29-002, SCHED-WORKER-29-003 | Write `/docs/runbooks/vuln-ops.md` (projector lag, resolver storms, export failures, policy activation). | Runbook merged; rehearsal recorded; checklist appended. | +| DOCS-VULN-29-013 | TODO | Docs Guild, Deployment Guild | DEPLOY-VULN-29-001..002 | Update `/docs/install/containers.md` with Findings Ledger & Vuln Explorer API images, manifests, resource sizing, health checks. | Install doc updated; validation commands included; checklist appended. | + +## VEX Lens (Sprint 30) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-VEX-30-001 | TODO | Docs Guild, VEX Lens Guild | VEXLENS-30-005 | Publish `/docs/vex/consensus-overview.md` describing purpose, scope, AOC guarantees. | Doc merged with diagrams/terminology tables; compliance checklist appended. | +| DOCS-VEX-30-002 | TODO | Docs Guild, VEX Lens Guild | VEXLENS-30-005 | Author `/docs/vex/consensus-algorithm.md` covering normalization, weighting, thresholds, examples. | Doc merged; math reviewed by Policy; checklist appended. | +| DOCS-VEX-30-003 | TODO | Docs Guild, Issuer Directory Guild | ISSUER-30-001..003 | Document `/docs/vex/issuer-directory.md` (issuer management, keys, trust overrides, audit). | Doc merged; security review done; checklist appended. | +| DOCS-VEX-30-004 | TODO | Docs Guild, VEX Lens Guild | VEXLENS-30-007 | Publish `/docs/vex/consensus-api.md` with endpoint specs, query params, rate limits. | API doc aligned with OpenAPI; examples validated; checklist appended. | +| DOCS-VEX-30-005 | TODO | Docs Guild, Console Guild | CONSOLE-VEX-30-001 | Write `/docs/vex/consensus-console.md` covering UI workflows, filters, conflicts, accessibility. | Doc merged; screenshots added; checklist appended. | +| DOCS-VEX-30-006 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-29-001, VEXLENS-30-004 | Add `/docs/policy/vex-trust-model.md` detailing policy knobs, thresholds, simulation. | Doc merged; policy review completed; checklist appended. | +| DOCS-VEX-30-007 | TODO | Docs Guild, SBOM Service Guild | VEXLENS-30-002 | Publish `/docs/sbom/vex-mapping.md` (CPE→purl strategy, edge cases, overrides). | Doc merged; mapping tables validated; checklist appended. | +| DOCS-VEX-30-008 | TODO | Docs Guild, Security Guild | ISSUER-30-002, VEXLENS-30-003 | Deliver `/docs/security/vex-signatures.md` (verification flow, key rotation, audit). | Doc approved by Security; checklist appended. | +| DOCS-VEX-30-009 | TODO | Docs Guild, DevOps Guild | VEXLENS-30-009, DEVOPS-VEX-30-001 | Create `/docs/runbooks/vex-ops.md` for recompute storms, mapping failures, signature errors. | Runbook merged; rehearsal logged; checklist appended. | + +## Advisory AI (Sprint 31) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-AIAI-31-001 | TODO | Docs Guild, Advisory AI Guild | AIAI-31-006 | Publish `/docs/advisory-ai/overview.md` covering capabilities, guardrails, RBAC. | Doc merged with diagrams; compliance checklist appended. | +| DOCS-AIAI-31-002 | TODO | Docs Guild, Advisory AI Guild | AIAI-31-004 | Author `/docs/advisory-ai/architecture.md` detailing RAG pipeline, deterministics, caching, model options. | Doc merged; architecture review done; checklist appended. | +| DOCS-AIAI-31-003 | TODO | Docs Guild, Advisory AI Guild | AIAI-31-006 | Write `/docs/advisory-ai/api.md` describing endpoints, schemas, errors, rate limits. | API doc aligned with OpenAPI; examples validated; checklist appended. | +| DOCS-AIAI-31-004 | TODO | Docs Guild, Console Guild | CONSOLE-VULN-29-001, CONSOLE-VEX-30-001 | Create `/docs/advisory-ai/console.md` with screenshots, a11y notes, copy-as-ticket instructions. | Doc merged; images stored; checklist appended. | +| DOCS-AIAI-31-005 | TODO | Docs Guild, DevEx/CLI Guild | CLI-VULN-29-001, CLI-VEX-30-001 | Publish `/docs/advisory-ai/cli.md` covering commands, exit codes, scripting patterns. | Doc merged; examples tested; checklist appended. | +| DOCS-AIAI-31-006 | TODO | Docs Guild, Policy Guild | POLICY-ENGINE-31-001 | Update `/docs/policy/assistant-parameters.md` covering temperature, token limits, ranking weights, TTLs. | Doc merged; policy review done; checklist appended. | +| DOCS-AIAI-31-007 | TODO | Docs Guild, Security Guild | AIAI-31-005 | Write `/docs/security/assistant-guardrails.md` detailing redaction, injection defense, logging. | Doc approved by Security; checklist appended. | +| DOCS-AIAI-31-008 | TODO | Docs Guild, SBOM Service Guild | SBOM-AIAI-31-001 | Publish `/docs/sbom/remediation-heuristics.md` (feasibility scoring, blast radius). | Doc merged; heuristics reviewed; checklist appended. | +| DOCS-AIAI-31-009 | TODO | Docs Guild, DevOps Guild | DEVOPS-AIAI-31-001 | Create `/docs/runbooks/assistant-ops.md` for warmup, cache priming, model outages, scaling. | Runbook merged; rehearsal logged; checklist appended. | + +## Notifications Studio + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-NOTIFY-38-001 | DONE (2025-10-29) | Docs Guild, Notifications Service Guild | NOTIFY-SVC-38-001..004 | Publish `/docs/notifications/overview.md` and `/docs/notifications/architecture.md`, each ending with imposed rule reminder. | Docs merged; diagrams verified; imposed rule appended. | +| DOCS-NOTIFY-39-002 | DONE (2025-10-29) | Docs Guild, Notifications Service Guild | NOTIFY-SVC-39-001..004 | Publish `/docs/notifications/rules.md`, `/docs/notifications/templates.md`, `/docs/notifications/digests.md` with examples and imposed rule line. | Docs merged; examples validated; imposed rule appended. | +| DOCS-NOTIFY-40-001 | TODO | Docs Guild, Security Guild | AUTH-NOTIFY-38-001, NOTIFY-SVC-40-001..004 | Publish `/docs/notifications/channels.md`, `/docs/notifications/escalations.md`, `/docs/notifications/api.md`, `/docs/operations/notifier-runbook.md`, `/docs/security/notifications-hardening.md`; each ends with imposed rule line. | Docs merged; accessibility checks passed; imposed rule appended. | + +## CLI Parity & Task Packs + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-CLI-41-001 | TODO | Docs Guild, DevEx/CLI Guild | CLI-CORE-41-001 | Publish `/docs/cli/overview.md`, `/docs/cli/configuration.md`, `/docs/cli/output-and-exit-codes.md` with imposed rule statements. | Docs merged; examples verified; imposed rule appended. | +| DOCS-CLI-42-001 | TODO | Docs Guild | DOCS-CLI-41-001, CLI-PARITY-41-001 | Publish `/docs/cli/parity-matrix.md` and command guides under `/docs/cli/commands/*.md` (policy, sbom, vuln, vex, advisory, export, orchestrator, notify, aoc, auth). | Guides merged; parity automation documented; imposed rule appended. | +| DOCS-PACKS-43-001 | DONE (2025-10-27) | Docs Guild, Task Runner Guild | PACKS-REG-42-001, TASKRUN-42-001 | Publish `/docs/task-packs/spec.md`, `/docs/task-packs/authoring-guide.md`, `/docs/task-packs/registry.md`, `/docs/task-packs/runbook.md`, `/docs/security/pack-signing-and-rbac.md`, `/docs/operations/cli-release-and-packaging.md` with imposed rule statements. | Docs merged; tutorials validated; imposed rule appended; cross-links added. | + +## Containerized Distribution (Epic 13) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-INSTALL-44-001 | TODO | Docs Guild, Deployment Guild | COMPOSE-44-001 | Publish `/docs/install/overview.md` and `/docs/install/compose-quickstart.md` with imposed rule line and copy-ready commands. | Docs merged; screenshots/commands verified; imposed rule appended. | +| DOCS-INSTALL-45-001 | TODO | Docs Guild, Deployment Guild | HELM-45-001 | Publish `/docs/install/helm-prod.md` and `/docs/install/configuration-reference.md` with values tables and imposed rule reminder. | Docs merged; configuration matrix verified; imposed rule appended. | +| DOCS-INSTALL-46-001 | TODO | Docs Guild, Security Guild | DEPLOY-PACKS-43-001, CLI-PACKS-43-001 | Publish `/docs/install/airgap.md`, `/docs/security/supply-chain.md`, `/docs/operations/health-and-readiness.md`, `/docs/release/image-catalog.md`, `/docs/console/onboarding.md` (each with imposed rule). | Docs merged; checksum/signature sections validated; imposed rule appended. | + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCS-TEN-47-001 | TODO | Docs Guild, Authority Core | AUTH-TEN-47-001 | Publish `/docs/security/tenancy-overview.md` and `/docs/security/scopes-and-roles.md` outlining scope grammar, tenant model, imposed rule reminder. | Docs merged; diagrams included; imposed rule appended. | +| DOCS-TEN-48-001 | TODO | Docs Guild, Platform Ops | WEB-TEN-48-001 | Publish `/docs/operations/multi-tenancy.md`, `/docs/operations/rls-and-data-isolation.md`, `/docs/console/admin-tenants.md`. | Docs merged; examples validated; imposed rule appended. | +| DOCS-TEN-49-001 | TODO | Docs & DevEx Guilds | CLI-TEN-47-001, AUTH-TEN-49-001 | Publish `/docs/cli/authentication.md`, `/docs/api/authentication.md`, `/docs/policy/examples/abac-overlays.md`, update `/docs/install/configuration-reference.md` with new env vars, all ending with imposed rule line. | Docs merged; command examples verified; imposed rule appended. | diff --git a/docs/accessibility.md b/docs/accessibility.md index 119b2603..09be0df3 100644 --- a/docs/accessibility.md +++ b/docs/accessibility.md @@ -1,131 +1,131 @@ -# StellaOps Console Accessibility Guide - -> **Audience:** Accessibility Guild, Console Guild, Docs Guild, QA. -> **Scope:** Keyboard interaction model, screen-reader behaviour, colour & focus tokens, testing workflows, offline considerations, and compliance checklist for the StellaOps Console (Sprint 23). - -The console targets **WCAG 2.2 AA** across all supported browsers (Chromium, Firefox ESR) and honours StellaOps’ sovereign/offline constraints. Every build must keep keyboard-only users, screen-reader users, and high-contrast operators productive without relying on third-party services. - ---- - -## 1 · Accessibility Principles - -1. **Deterministic navigation** – Focus order, shortcuts, and announcements remain stable across releases; URLs encode state for deep links. -2. **Keyboard-first design** – Every actionable element is reachable via keyboard; shortcuts provide accelerators, and remapping is available via *Settings → Accessibility → Keyboard shortcuts*. -3. **Assistive technology parity** – ARIA roles and live regions mirror visual affordances (status banners, SSE tickers, progress drawers). Screen readers receive polite/atomic updates to avoid chatter. -4. **Colour & contrast tokens** – All palettes derive from design tokens that achieve ≥ 4.5:1 contrast (text) and ≥ 3:1 for graphical indicators; tokens pass automated contrast linting. -5. **Offline equivalence** – Accessibility features (shortcuts, offline banners, focus restoration) behave the same in sealed environments, with guidance when actions require online authority. - ---- - -## 2 · Keyboard Interaction Map - -### 2.1 Global shortcuts - -| Action | Macs | Windows/Linux | Notes | -|--------|------|---------------|-------| -| Command palette | `⌘ K` | `Ctrl K` | Focuses palette search; respects tenant scope. | -| Tenant picker | `⌘ T` | `Ctrl T` | Opens modal; `Enter` confirms, `Esc` cancels. | -| Filter tray toggle | `⇧ F` | `Shift F` | Focus lands on first filter; `Tab` cycles filters before returning to page. | -| Saved view presets | `⌘ 1-9` | `Ctrl 1-9` | Bound per tenant; missing preset triggers tooltip. | -| Keyboard reference | `?` | `?` | Opens overlay listing context-specific shortcuts; `Esc` closes. | -| Global search (context) | `/` | `/` | When the filter tray is closed, focuses inline search field. | - -### 2.2 Module-specific shortcuts - -| Module | Action | Macs | Windows/Linux | Notes | -|--------|--------|------|---------------|-------| -| Findings | Explain search | `⌘ /` | `Ctrl /` | Only when Explain drawer open; announces results via live region. | -| SBOM Explorer | Toggle overlays | `⌘ G` | `Ctrl G` | Persists per session (see `/docs/ui/sbom-explorer.md`). | -| Advisories & VEX | Provider filter | `⌘ ⌥ F` | `Ctrl Alt F` | Moves focus to provider chip row. | -| Runs | Refresh snapshot | `⌘ R` | `Ctrl R` | Soft refresh of SSE state; no full page reload. | -| Policies | Save draft | `⌘ S` | `Ctrl S` | Requires edit scope; exposes toast + status live update. | -| Downloads | Copy CLI command | `⇧ D` | `Shift D` | Copies manifest or export command; toast announces scope hints. | - -All shortcuts are remappable. Remaps persist in IndexedDB (per tenant) and export as part of profile bundles so operators can restore preferences offline. - ---- - -## 3 · Screen Reader & Focus Behaviour - -- **Skip navigation** – Each route exposes a “Skip to content” link revealed on keyboard focus. Focus order: global header → page breadcrumb → action shelf → data grid/list → drawers/dialogs. -- **Live regions** – Status ticker and SSE progress bars use `aria-live="polite"` with throttling to avoid flooding AT. Error toasts use `aria-live="assertive"` and auto-focus dismiss buttons. -- **Drawers & modals** – Dialog components trap focus, support `Esc` to close, and restore focus to the launching control. Screen readers announce title + purpose. -- **Tables & grids** – Large tables (Findings, SBOM inventory) switch to virtualised rows but retain ARIA grid semantics (`aria-rowcount`, `aria-colindex`). Column headers include sorting state via `aria-sort`. -- **Tenancy context** – Tenant badge exposes `aria-describedby` linking to context summary (environment, offline snapshot). Switching tenant queues a polite announcement summarising new scope. -- **Command palette** – Uses `role="dialog"` with search input labelled. Keyboard navigation within results uses `Up/Down`; screen readers announce result category + command. -- **Offline banner** – When offline, a dismissible banner announces reason and includes instructions for CLI fallback. The banner has `role="status"` so it announces once without stealing focus. - ---- - -## 4 · Colour & Focus Tokens - -Console consumes design tokens published by the Console Guild (tracked via CONSOLE-FEAT-23-102). Tokens live in the design system bundle (`ui/design/tokens/colors.json`, mirrored at build time). Key tokens: - -| Token | Purpose | Contrast target | -|-------|---------|-----------------| -| `so-color-surface-base` | Primary surface/background | ≥ 4.5:1 against `so-color-text-primary`. | -| `so-color-surface-raised` | Cards, drawers, modals | ≥ 3:1 against surrounding surfaces. | -| `so-color-text-primary` | Default text colour | ≥ 4.5:1 against base surfaces. | -| `so-color-text-inverted` | Text on accent buttons | ≥ 4.5:1 against accent fills. | -| `so-color-accent-primary` | Action buttons, focus headings | ≥ 3:1 against surface. | -| `so-color-status-critical` | Error toasts, violation chips | ≥ 4.5:1 for text; `critical-bg` provides >3:1 on neutral surface. | -| `so-color-status-warning` | Warning banners | Meets 3:1 on surface and 4.5:1 for text overlays. | -| `so-color-status-success` | Success toasts, pass badges | ≥ 3:1 for iconography; text uses `text-primary`. | -| `so-focus-ring` | 2 px outline used across focusable elements | 3:1 against both light/dark surfaces. | - -Colour tokens undergo automated linting (**axe-core contrast checks** + custom luminance script) during build. Any new token must include dark/light variants and pass the token contract tests. - ---- - -## 5 · Testing Workflow - -| Layer | Tooling | Frequency | Notes | -|-------|---------|-----------|-------| -| Component a11y | Storybook + axe-core addon | On PR (story CI) | Fails when axe detects violations. | -| Route regression | Playwright a11y sweep (`pnpm test:a11y`) | Nightly & release pipeline | Executes keyboard navigation, checks focus trap, runs Axe on key routes (Dashboard, Findings, SBOM, Admin). | -| Colour contrast lint | Token validator (`tools/a11y/check-contrast.ts`) | On token change | Guards design token updates. | -| CI parity | Pending `scripts/check-console-cli-parity.sh` (CONSOLE-DOC-23-502) | Release CI | Ensures CLI commands documented for parity features. | -| Screen-reader spot checks | Manual NVDA + VoiceOver scripts | Pre-release checklist | Scenarios: tenant switch, explain drawer, downloads parity copy. | -| Offline smoke | `stella offline kit import` + Playwright sealed-mode run | Prior to Offline Kit cut | Validates offline banners, disabled actions, keyboard flows without Authority. | - -Accessibility QA (CONSOLE-QA-23-402) tracks failing scenarios via Playwright snapshots and publishes reports in the Downloads parity channel (`kind = "parity.report"` placeholder until CLI parity CI lands). - ---- - -## 6 · Offline & Internationalisation Considerations - -- Offline mode surfaces staleness badges and disables remote-only palette entries; keyboard focus skips disabled controls. -- Saved shortcuts, presets, and remaps serialise into Offline Kit bundles so operators can restore preferences post-import. -- Locale switching (future feature flag) will load translations at runtime; ensure ARIA labels use i18n tokens rather than hard-coded strings. -- For sealed installs, guidance panels include CLI equivalents (`stella auth fresh-auth`, `stella runs export`) to unblock tasks when Authority is unavailable. - ---- - -## 7 · Compliance Checklist - -- [ ] Keyboard shortcut matrix validated (default + remapped) and documented. -- [ ] Screen-reader pass recorded for tenant switch, Explain drawer, Downloads copy-to-clipboard. -- [ ] Colour tokens audited; contrast reports stored with release artifacts. -- [ ] Automated a11y pipelines (Storybook axe, Playwright a11y) green; failures feed the `#console-qa` channel. -- [ ] Offline kit a11y smoke executed before publishing each bundle. -- [ ] CLI parity gaps logged in `/docs/cli-vs-ui-parity.md`; UI callouts reference fallback commands until parity closes. -- [ ] Accessibility Guild sign-off captured in sprint log and release notes reference this guide. -- [ ] References cross-checked (`/docs/ui/navigation.md`, `/docs/ui/downloads.md`, `/docs/security/console-security.md`, `/docs/observability/ui-telemetry.md`). - ---- - -## 8 · References - -- `/docs/ui/navigation.md` – shortcut definitions, URL schema. -- `/docs/ui/downloads.md` – CLI parity and offline copy workflows. -- `/docs/ui/console-overview.md` – tenant model, filter behaviours. -- `/docs/security/console-security.md` – security metrics and DPoP/fresh-auth requirements. -- `/docs/observability/ui-telemetry.md` – telemetry metrics mapped to accessibility features. -- `/docs/cli-vs-ui-parity.md` – parity status per console feature. -- `CONSOLE-QA-23-402` – Accessibility QA backlog (Playwright + manual checks). -- `CONSOLE-FEAT-23-102` – Design tokens & theming delivery. - ---- - -*Last updated: 2025-10-28 (Sprint 23).* - +# StellaOps Console Accessibility Guide + +> **Audience:** Accessibility Guild, Console Guild, Docs Guild, QA. +> **Scope:** Keyboard interaction model, screen-reader behaviour, colour & focus tokens, testing workflows, offline considerations, and compliance checklist for the StellaOps Console (Sprint 23). + +The console targets **WCAG 2.2 AA** across all supported browsers (Chromium, Firefox ESR) and honours StellaOps’ sovereign/offline constraints. Every build must keep keyboard-only users, screen-reader users, and high-contrast operators productive without relying on third-party services. + +--- + +## 1 · Accessibility Principles + +1. **Deterministic navigation** – Focus order, shortcuts, and announcements remain stable across releases; URLs encode state for deep links. +2. **Keyboard-first design** – Every actionable element is reachable via keyboard; shortcuts provide accelerators, and remapping is available via *Settings → Accessibility → Keyboard shortcuts*. +3. **Assistive technology parity** – ARIA roles and live regions mirror visual affordances (status banners, SSE tickers, progress drawers). Screen readers receive polite/atomic updates to avoid chatter. +4. **Colour & contrast tokens** – All palettes derive from design tokens that achieve ≥ 4.5:1 contrast (text) and ≥ 3:1 for graphical indicators; tokens pass automated contrast linting. +5. **Offline equivalence** – Accessibility features (shortcuts, offline banners, focus restoration) behave the same in sealed environments, with guidance when actions require online authority. + +--- + +## 2 · Keyboard Interaction Map + +### 2.1 Global shortcuts + +| Action | Macs | Windows/Linux | Notes | +|--------|------|---------------|-------| +| Command palette | `⌘ K` | `Ctrl K` | Focuses palette search; respects tenant scope. | +| Tenant picker | `⌘ T` | `Ctrl T` | Opens modal; `Enter` confirms, `Esc` cancels. | +| Filter tray toggle | `⇧ F` | `Shift F` | Focus lands on first filter; `Tab` cycles filters before returning to page. | +| Saved view presets | `⌘ 1-9` | `Ctrl 1-9` | Bound per tenant; missing preset triggers tooltip. | +| Keyboard reference | `?` | `?` | Opens overlay listing context-specific shortcuts; `Esc` closes. | +| Global search (context) | `/` | `/` | When the filter tray is closed, focuses inline search field. | + +### 2.2 Module-specific shortcuts + +| Module | Action | Macs | Windows/Linux | Notes | +|--------|--------|------|---------------|-------| +| Findings | Explain search | `⌘ /` | `Ctrl /` | Only when Explain drawer open; announces results via live region. | +| SBOM Explorer | Toggle overlays | `⌘ G` | `Ctrl G` | Persists per session (see `/docs/ui/sbom-explorer.md`). | +| Advisories & VEX | Provider filter | `⌘ ⌥ F` | `Ctrl Alt F` | Moves focus to provider chip row. | +| Runs | Refresh snapshot | `⌘ R` | `Ctrl R` | Soft refresh of SSE state; no full page reload. | +| Policies | Save draft | `⌘ S` | `Ctrl S` | Requires edit scope; exposes toast + status live update. | +| Downloads | Copy CLI command | `⇧ D` | `Shift D` | Copies manifest or export command; toast announces scope hints. | + +All shortcuts are remappable. Remaps persist in IndexedDB (per tenant) and export as part of profile bundles so operators can restore preferences offline. + +--- + +## 3 · Screen Reader & Focus Behaviour + +- **Skip navigation** – Each route exposes a “Skip to content” link revealed on keyboard focus. Focus order: global header → page breadcrumb → action shelf → data grid/list → drawers/dialogs. +- **Live regions** – Status ticker and SSE progress bars use `aria-live="polite"` with throttling to avoid flooding AT. Error toasts use `aria-live="assertive"` and auto-focus dismiss buttons. +- **Drawers & modals** – Dialog components trap focus, support `Esc` to close, and restore focus to the launching control. Screen readers announce title + purpose. +- **Tables & grids** – Large tables (Findings, SBOM inventory) switch to virtualised rows but retain ARIA grid semantics (`aria-rowcount`, `aria-colindex`). Column headers include sorting state via `aria-sort`. +- **Tenancy context** – Tenant badge exposes `aria-describedby` linking to context summary (environment, offline snapshot). Switching tenant queues a polite announcement summarising new scope. +- **Command palette** – Uses `role="dialog"` with search input labelled. Keyboard navigation within results uses `Up/Down`; screen readers announce result category + command. +- **Offline banner** – When offline, a dismissible banner announces reason and includes instructions for CLI fallback. The banner has `role="status"` so it announces once without stealing focus. + +--- + +## 4 · Colour & Focus Tokens + +Console consumes design tokens published by the Console Guild (tracked via CONSOLE-FEAT-23-102). Tokens live in the design system bundle (`ui/design/tokens/colors.json`, mirrored at build time). Key tokens: + +| Token | Purpose | Contrast target | +|-------|---------|-----------------| +| `so-color-surface-base` | Primary surface/background | ≥ 4.5:1 against `so-color-text-primary`. | +| `so-color-surface-raised` | Cards, drawers, modals | ≥ 3:1 against surrounding surfaces. | +| `so-color-text-primary` | Default text colour | ≥ 4.5:1 against base surfaces. | +| `so-color-text-inverted` | Text on accent buttons | ≥ 4.5:1 against accent fills. | +| `so-color-accent-primary` | Action buttons, focus headings | ≥ 3:1 against surface. | +| `so-color-status-critical` | Error toasts, violation chips | ≥ 4.5:1 for text; `critical-bg` provides >3:1 on neutral surface. | +| `so-color-status-warning` | Warning banners | Meets 3:1 on surface and 4.5:1 for text overlays. | +| `so-color-status-success` | Success toasts, pass badges | ≥ 3:1 for iconography; text uses `text-primary`. | +| `so-focus-ring` | 2 px outline used across focusable elements | 3:1 against both light/dark surfaces. | + +Colour tokens undergo automated linting (**axe-core contrast checks** + custom luminance script) during build. Any new token must include dark/light variants and pass the token contract tests. + +--- + +## 5 · Testing Workflow + +| Layer | Tooling | Frequency | Notes | +|-------|---------|-----------|-------| +| Component a11y | Storybook + axe-core addon | On PR (story CI) | Fails when axe detects violations. | +| Route regression | Playwright a11y sweep (`pnpm test:a11y`) | Nightly & release pipeline | Executes keyboard navigation, checks focus trap, runs Axe on key routes (Dashboard, Findings, SBOM, Admin). | +| Colour contrast lint | Token validator (`tools/a11y/check-contrast.ts`) | On token change | Guards design token updates. | +| CI parity | Pending `scripts/check-console-cli-parity.sh` (CONSOLE-DOC-23-502) | Release CI | Ensures CLI commands documented for parity features. | +| Screen-reader spot checks | Manual NVDA + VoiceOver scripts | Pre-release checklist | Scenarios: tenant switch, explain drawer, downloads parity copy. | +| Offline smoke | `stella offline kit import` + Playwright sealed-mode run | Prior to Offline Kit cut | Validates offline banners, disabled actions, keyboard flows without Authority. | + +Accessibility QA (CONSOLE-QA-23-402) tracks failing scenarios via Playwright snapshots and publishes reports in the Downloads parity channel (`kind = "parity.report"` placeholder until CLI parity CI lands). + +--- + +## 6 · Offline & Internationalisation Considerations + +- Offline mode surfaces staleness badges and disables remote-only palette entries; keyboard focus skips disabled controls. +- Saved shortcuts, presets, and remaps serialise into Offline Kit bundles so operators can restore preferences post-import. +- Locale switching (future feature flag) will load translations at runtime; ensure ARIA labels use i18n tokens rather than hard-coded strings. +- For sealed installs, guidance panels include CLI equivalents (`stella auth fresh-auth`, `stella runs export`) to unblock tasks when Authority is unavailable. + +--- + +## 7 · Compliance Checklist + +- [ ] Keyboard shortcut matrix validated (default + remapped) and documented. +- [ ] Screen-reader pass recorded for tenant switch, Explain drawer, Downloads copy-to-clipboard. +- [ ] Colour tokens audited; contrast reports stored with release artifacts. +- [ ] Automated a11y pipelines (Storybook axe, Playwright a11y) green; failures feed the `#console-qa` channel. +- [ ] Offline kit a11y smoke executed before publishing each bundle. +- [ ] CLI parity gaps logged in `/docs/cli-vs-ui-parity.md`; UI callouts reference fallback commands until parity closes. +- [ ] Accessibility Guild sign-off captured in sprint log and release notes reference this guide. +- [ ] References cross-checked (`/docs/ui/navigation.md`, `/docs/ui/downloads.md`, `/docs/security/console-security.md`, `/docs/observability/ui-telemetry.md`). + +--- + +## 8 · References + +- `/docs/ui/navigation.md` – shortcut definitions, URL schema. +- `/docs/ui/downloads.md` – CLI parity and offline copy workflows. +- `/docs/ui/console-overview.md` – tenant model, filter behaviours. +- `/docs/security/console-security.md` – security metrics and DPoP/fresh-auth requirements. +- `/docs/observability/ui-telemetry.md` – telemetry metrics mapped to accessibility features. +- `/docs/cli-vs-ui-parity.md` – parity status per console feature. +- `CONSOLE-QA-23-402` – Accessibility QA backlog (Playwright + manual checks). +- `CONSOLE-FEAT-23-102` – Design tokens & theming delivery. + +--- + +*Last updated: 2025-10-28 (Sprint 23).* + diff --git a/docs/advisories/aggregation.md b/docs/advisories/aggregation.md index c1c0038c..cff7b79e 100644 --- a/docs/advisories/aggregation.md +++ b/docs/advisories/aggregation.md @@ -1,218 +1,218 @@ -# Advisory Observations & Linksets - -> Imposed rule: Work of this type or tasks of this type on this component must also -> be applied everywhere else it should be applied. - -The Link-Not-Merge (LNM) initiative replaces the legacy "merge" pipeline with -immutable observations and correlation linksets. This guide explains how -Concelier ingests advisory statements, preserves upstream truth, and produces -linksets that downstream services (Policy Engine, Vuln Explorer, Console) can -use without collapsing sources together. - ---- - -## 1. Model overview - -### 1.1 Observation lifecycle - -1. **Ingest** – Connectors fetch upstream payloads (CSAF, OSV, vendor feeds), - validate signatures, and drop any derived fields prohibited by the - Aggregation-Only Contract (AOC). -2. **Persist** – Concelier writes immutable `advisory_observations` scoped by - `tenant`, `(source.vendor, upstreamId)`, and `contentHash`. Supersedes chains - capture revisions without mutating history. -3. **Expose** – WebService surfaces paged/read APIs; Offline Kit snapshots - include the same documents for air-gapped installs. - -Observation schema highlights: - -```text -observationId = {tenant}:{source.vendor}:{upstreamId}:{revision} -tenant, source{vendor, stream, api, collectorVersion} -upstream{upstreamId, documentVersion, fetchedAt, receivedAt, - contentHash, signature{present, format, keyId, signature}} -content{format, specVersion, raw} -identifiers{cve?, ghsa?, aliases[], osvIds[]} -linkset{purls[], cpes[], aliases[], references[], conflicts[]?} -createdAt, attributes{batchId?, replayCursor?} -``` - -- **Immutable raw** (`content.raw`) mirrors upstream payloads exactly. -- **Provenance** (`source.*`, `upstream.*`) satisfies AOC guardrails and enables - cryptographic attestations. -- **Identifiers** retain lossless extracts (CVE, GHSA, vendor aliases) that seed - linksets. -- **Linkset** captures join hints but never merges or adds derived severity. - -### 1.2 Linkset lifecycle - -Linksets correlate observations that describe the same vulnerable product while -keeping each source intact. - -1. **Seed** – Observations emit normalized identifiers (`purl`, `cpe`, - `alias`) during ingestion. -2. **Correlate** – Linkset builder groups observations by tenant, product - coordinates, and equivalence signals (PURL alias graph, CVE overlap, CVSS - vector equality, fuzzy titles). -3. **Annotate** – Detected conflicts (severity disagreements, affected-range - mismatch, incompatible references) are recorded with structured payloads and - preserved for UI/API export. -4. **Persist** – Results land in `advisory_linksets` with deterministic IDs - (`linksetId = {tenant}:{hash(aliases+purls+seedIds)}`) and append-only history - for reproducibility. - -Linksets never suppress or prefer one source; they provide aligned evidence so -other services can apply policy. - ---- - -## 2. Observation vs. linkset - -- **Purpose** - - Observation: Immutable record per vendor and revision. - - Linkset: Correlates observations that share product identity. -- **Mutation** - - Observation: Append-only via supersedes chain. - - Linkset: Rebuilt deterministically from canonical signals. -- **Allowed fields** - - Observation: Raw payload, provenance, identifiers, join hints. - - Linkset: Observation references, normalized product metadata, conflicts. -- **Forbidden fields** - - Observation: Derived severity, policy status, opinionated dedupe. - - Linkset: Derived severity (conflicts recorded but unresolved). -- **Consumers** - - Observation: Evidence API, Offline Kit, CLI exports. - - Linkset: Policy Engine overlay, UI evidence panel, Vuln Explorer. - -### 2.1 Example sequence - -1. Red Hat PSIRT publishes RHSA-2025:1234 for OpenSSL; Concelier inserts an - observation for vendor `redhat` with `pkg:rpm/redhat/openssl@1.1.1w-12`. -2. NVD issues CVE-2025-0001; a second observation is inserted for vendor `nvd`. -3. Linkset builder runs, groups the two observations, records alias and PURL - overlap, and flags a CVSS disagreement (`7.5` vs `7.2`). -4. Policy Engine reads the linkset, recognises the severity variance, and relies - on configured rules to decide the effective output. - ---- - -## 3. Conflict handling - -Conflicts record disagreements without altering source payloads. The builder -emits structured entries: - -```json -{ - "type": "severity-mismatch", - "field": "cvss.baseScore", - "observations": [ - { - "source": "redhat", - "value": "7.5", - "vector": "AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N" - }, - { - "source": "nvd", - "value": "7.2", - "vector": "AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:H/A:N" - } - ], - "confidence": "medium", - "detectedAt": "2025-10-27T14:00:00Z" -} -``` - -Supported conflict classes: - -- `severity-mismatch` – CVSS or qualitative severities differ. -- `affected-range-divergence` – Product ranges, fixed versions, or platforms - disagree. -- `statement-disagreement` – One observation declares `not_affected` while - another states `affected`. -- `reference-clash` – URL or classifier collisions (for example, exploit URL vs - conflicting advisory). -- `alias-inconsistency` – Aliases map to different canonical IDs (GHSA vs CVE). -- `metadata-gap` – Required provenance missing on one source; logged as a - warning. - -Conflict surfaces: - -- WebService endpoints (`GET /advisories/linksets/{id}` → `conflicts[]`). -- UI evidence panel chips and conflict badges. -- CLI exports (JSON/OSV) exposed through LNM commands. -- Observability metrics (`advisory_linkset_conflicts_total{type}`). - ---- - -## 4. AOC alignment - -Observations and linksets must satisfy Aggregation-Only Contract invariants: - -- **No derived severity** – `content.raw` may include upstream severity, but the - observation body never injects or edits severity. -- **No merges** – Each upstream document stays separate; linksets reference - observations via deterministic IDs. -- **Provenance mandatory** – Missing `signature` or `source` metadata is an AOC - violation (`ERR_AOC_004`). -- **Idempotent writes** – Duplicate `contentHash` yields a no-op; supersedes - pointer captures new revisions. -- **Deterministic output** – Linkset builder sorts keys, normalizes timestamps - (UTC ISO-8601), and uses canonical JSON hashing. - -Violations trigger guard errors (`ERR_AOC_00x`), emit `aoc_violation_total` -metrics, and block persistence until corrected. - ---- - -## 5. Downstream consumption - -- **Policy Engine** – Computes effective severity and risk overlays from linkset - evidence and conflicts. -- **Console UI** – Renders per-source statements, signed hashes, and conflict - banners inside the evidence panel. -- **CLI (`stella advisories linkset …`)** – Exports observations and linksets as - JSON or OSV for offline triage. -- **Offline Kit** – Shipping snapshots include observation and linkset - collections for air-gap parity. -- **Observability** – Dashboards track ingestion latency, conflict counts, and - supersedes depth. - -When adding new consumers, ensure they honour append-only semantics and do not -mutate observation or linkset collections. - ---- - -## 6. Validation & testing - -- **Unit tests** (`StellaOps.Concelier.Core.Tests`) validate schema guards, - deterministic linkset hashing, conflict detection fixtures, and supersedes - chains. -- **Mongo integration tests** (`StellaOps.Concelier.Storage.Mongo.Tests`) verify - indexes and idempotent writes under concurrency. -- **CLI smoke suites** confirm `stella advisories observations` and `stella - advisories linksets` export stable JSON. -- **Determinism checks** replay identical upstream payloads and assert that the - resulting observation and linkset documents match byte for byte. -- **Offline kit verification** simulates air-gapped bootstrap to confirm that - snapshots align with live data. - -Add fixtures whenever a new conflict type or correlation signal is introduced. -Ensure canonical JSON serialization remains stable across .NET runtime updates. - ---- - -## 7. Reviewer checklist - -- Observation schema segment matches the latest `StellaOps.Concelier.Models` - contract. -- Linkset lifecycle covers correlation signals, conflict classes, and - deterministic IDs. -- AOC invariants are explicitly called out with violation codes. -- Examples include multi-source correlation plus conflict annotation. -- Downstream consumer guidance reflects active APIs and CLI features. -- Testing section lists required suites (Core, Storage, CLI, Offline). -- Imposed rule reminder is present at the top of the document. - -Confirmed against Concelier Link-Not-Merge tasks: -`CONCELIER-LNM-21-001..005`, `CONCELIER-LNM-21-101..103`, -`CONCELIER-LNM-21-201..203`. +# Advisory Observations & Linksets + +> Imposed rule: Work of this type or tasks of this type on this component must also +> be applied everywhere else it should be applied. + +The Link-Not-Merge (LNM) initiative replaces the legacy "merge" pipeline with +immutable observations and correlation linksets. This guide explains how +Concelier ingests advisory statements, preserves upstream truth, and produces +linksets that downstream services (Policy Engine, Vuln Explorer, Console) can +use without collapsing sources together. + +--- + +## 1. Model overview + +### 1.1 Observation lifecycle + +1. **Ingest** – Connectors fetch upstream payloads (CSAF, OSV, vendor feeds), + validate signatures, and drop any derived fields prohibited by the + Aggregation-Only Contract (AOC). +2. **Persist** – Concelier writes immutable `advisory_observations` scoped by + `tenant`, `(source.vendor, upstreamId)`, and `contentHash`. Supersedes chains + capture revisions without mutating history. +3. **Expose** – WebService surfaces paged/read APIs; Offline Kit snapshots + include the same documents for air-gapped installs. + +Observation schema highlights: + +```text +observationId = {tenant}:{source.vendor}:{upstreamId}:{revision} +tenant, source{vendor, stream, api, collectorVersion} +upstream{upstreamId, documentVersion, fetchedAt, receivedAt, + contentHash, signature{present, format, keyId, signature}} +content{format, specVersion, raw} +identifiers{cve?, ghsa?, aliases[], osvIds[]} +linkset{purls[], cpes[], aliases[], references[], conflicts[]?} +createdAt, attributes{batchId?, replayCursor?} +``` + +- **Immutable raw** (`content.raw`) mirrors upstream payloads exactly. +- **Provenance** (`source.*`, `upstream.*`) satisfies AOC guardrails and enables + cryptographic attestations. +- **Identifiers** retain lossless extracts (CVE, GHSA, vendor aliases) that seed + linksets. +- **Linkset** captures join hints but never merges or adds derived severity. + +### 1.2 Linkset lifecycle + +Linksets correlate observations that describe the same vulnerable product while +keeping each source intact. + +1. **Seed** – Observations emit normalized identifiers (`purl`, `cpe`, + `alias`) during ingestion. +2. **Correlate** – Linkset builder groups observations by tenant, product + coordinates, and equivalence signals (PURL alias graph, CVE overlap, CVSS + vector equality, fuzzy titles). +3. **Annotate** – Detected conflicts (severity disagreements, affected-range + mismatch, incompatible references) are recorded with structured payloads and + preserved for UI/API export. +4. **Persist** – Results land in `advisory_linksets` with deterministic IDs + (`linksetId = {tenant}:{hash(aliases+purls+seedIds)}`) and append-only history + for reproducibility. + +Linksets never suppress or prefer one source; they provide aligned evidence so +other services can apply policy. + +--- + +## 2. Observation vs. linkset + +- **Purpose** + - Observation: Immutable record per vendor and revision. + - Linkset: Correlates observations that share product identity. +- **Mutation** + - Observation: Append-only via supersedes chain. + - Linkset: Rebuilt deterministically from canonical signals. +- **Allowed fields** + - Observation: Raw payload, provenance, identifiers, join hints. + - Linkset: Observation references, normalized product metadata, conflicts. +- **Forbidden fields** + - Observation: Derived severity, policy status, opinionated dedupe. + - Linkset: Derived severity (conflicts recorded but unresolved). +- **Consumers** + - Observation: Evidence API, Offline Kit, CLI exports. + - Linkset: Policy Engine overlay, UI evidence panel, Vuln Explorer. + +### 2.1 Example sequence + +1. Red Hat PSIRT publishes RHSA-2025:1234 for OpenSSL; Concelier inserts an + observation for vendor `redhat` with `pkg:rpm/redhat/openssl@1.1.1w-12`. +2. NVD issues CVE-2025-0001; a second observation is inserted for vendor `nvd`. +3. Linkset builder runs, groups the two observations, records alias and PURL + overlap, and flags a CVSS disagreement (`7.5` vs `7.2`). +4. Policy Engine reads the linkset, recognises the severity variance, and relies + on configured rules to decide the effective output. + +--- + +## 3. Conflict handling + +Conflicts record disagreements without altering source payloads. The builder +emits structured entries: + +```json +{ + "type": "severity-mismatch", + "field": "cvss.baseScore", + "observations": [ + { + "source": "redhat", + "value": "7.5", + "vector": "AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N" + }, + { + "source": "nvd", + "value": "7.2", + "vector": "AV:N/AC:L/PR:N/UI:N/S:U/C:N/I:H/A:N" + } + ], + "confidence": "medium", + "detectedAt": "2025-10-27T14:00:00Z" +} +``` + +Supported conflict classes: + +- `severity-mismatch` – CVSS or qualitative severities differ. +- `affected-range-divergence` – Product ranges, fixed versions, or platforms + disagree. +- `statement-disagreement` – One observation declares `not_affected` while + another states `affected`. +- `reference-clash` – URL or classifier collisions (for example, exploit URL vs + conflicting advisory). +- `alias-inconsistency` – Aliases map to different canonical IDs (GHSA vs CVE). +- `metadata-gap` – Required provenance missing on one source; logged as a + warning. + +Conflict surfaces: + +- WebService endpoints (`GET /advisories/linksets/{id}` → `conflicts[]`). +- UI evidence panel chips and conflict badges. +- CLI exports (JSON/OSV) exposed through LNM commands. +- Observability metrics (`advisory_linkset_conflicts_total{type}`). + +--- + +## 4. AOC alignment + +Observations and linksets must satisfy Aggregation-Only Contract invariants: + +- **No derived severity** – `content.raw` may include upstream severity, but the + observation body never injects or edits severity. +- **No merges** – Each upstream document stays separate; linksets reference + observations via deterministic IDs. +- **Provenance mandatory** – Missing `signature` or `source` metadata is an AOC + violation (`ERR_AOC_004`). +- **Idempotent writes** – Duplicate `contentHash` yields a no-op; supersedes + pointer captures new revisions. +- **Deterministic output** – Linkset builder sorts keys, normalizes timestamps + (UTC ISO-8601), and uses canonical JSON hashing. + +Violations trigger guard errors (`ERR_AOC_00x`), emit `aoc_violation_total` +metrics, and block persistence until corrected. + +--- + +## 5. Downstream consumption + +- **Policy Engine** – Computes effective severity and risk overlays from linkset + evidence and conflicts. +- **Console UI** – Renders per-source statements, signed hashes, and conflict + banners inside the evidence panel. +- **CLI (`stella advisories linkset …`)** – Exports observations and linksets as + JSON or OSV for offline triage. +- **Offline Kit** – Shipping snapshots include observation and linkset + collections for air-gap parity. +- **Observability** – Dashboards track ingestion latency, conflict counts, and + supersedes depth. + +When adding new consumers, ensure they honour append-only semantics and do not +mutate observation or linkset collections. + +--- + +## 6. Validation & testing + +- **Unit tests** (`StellaOps.Concelier.Core.Tests`) validate schema guards, + deterministic linkset hashing, conflict detection fixtures, and supersedes + chains. +- **Mongo integration tests** (`StellaOps.Concelier.Storage.Mongo.Tests`) verify + indexes and idempotent writes under concurrency. +- **CLI smoke suites** confirm `stella advisories observations` and `stella + advisories linksets` export stable JSON. +- **Determinism checks** replay identical upstream payloads and assert that the + resulting observation and linkset documents match byte for byte. +- **Offline kit verification** simulates air-gapped bootstrap to confirm that + snapshots align with live data. + +Add fixtures whenever a new conflict type or correlation signal is introduced. +Ensure canonical JSON serialization remains stable across .NET runtime updates. + +--- + +## 7. Reviewer checklist + +- Observation schema segment matches the latest `StellaOps.Concelier.Models` + contract. +- Linkset lifecycle covers correlation signals, conflict classes, and + deterministic IDs. +- AOC invariants are explicitly called out with violation codes. +- Examples include multi-source correlation plus conflict annotation. +- Downstream consumer guidance reflects active APIs and CLI features. +- Testing section lists required suites (Core, Storage, CLI, Offline). +- Imposed rule reminder is present at the top of the document. + +Confirmed against Concelier Link-Not-Merge tasks: +`CONCELIER-LNM-21-001..005`, `CONCELIER-LNM-21-101..103`, +`CONCELIER-LNM-21-201..203`. diff --git a/docs/airgap/EPIC_16_AIRGAP_MODE.md b/docs/airgap/EPIC_16_AIRGAP_MODE.md index 9a11679c..3581e835 100644 --- a/docs/airgap/EPIC_16_AIRGAP_MODE.md +++ b/docs/airgap/EPIC_16_AIRGAP_MODE.md @@ -1,429 +1,429 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -# Epic 16: Air‑Gapped Mode - -**Short name:** Air‑Gapped Mode -**Primary components:** Web Services API, Console, CLI, Orchestrator, Task Runner, Conseiller (Feedser), Excitator (VEXer), Policy Engine, Findings Ledger, Export Center, Authority & Tenancy, Notifications, Observability & Forensics -**Surfaces:** offline bootstrap, update ingestion via mirror bundles, sealed egress, deterministic jobs, offline advisories/VEX, offline policy packs, offline notifications, evidence exports -**Dependencies:** Export Center, Containerized Distribution, Authority‑Backed Scopes & Tenancy, Observability & Forensics, Policy Studio - -**AOC ground rule reminder:** Conseiller and Excitator aggregate and link advisories/VEX. They never merge or mutate source records. Air‑Gapped Mode must preserve this invariant even when mirroring and importing updates. - ---- - -## 1) What it is - -A fully supported operating profile where StellaOps runs in a disconnected environment with: - -* **Zero external egress** from platform services and jobs. -* **Deterministic inputs** provided via signed, offline **Mirror Bundles** (advisories, VEX, policy packs, vendor feeds, Stella metadata, container images, dashboards). -* **Offline bootstrap** for images and charts, plus reproducible configuration and cryptographically verifiable updates. -* **Graceful feature degradation** with explicit UX: features that require external connectivity are either backed by local artifacts or clearly disabled with an explanation. -* **Auditable import/export** including provenance attestations, evidence bundles, and chain‑of‑custody for all offline exchanges. - -Air‑Gapped Mode is selectable at install time and enforceable at runtime. When enabled, all components operate under an “egress sealed” policy and only consume data from local stores. - ---- - -## 2) Why - -Many users operate in classified, regulated, or high‑sensitivity networks where egress is prohibited. They still need SBOM analysis, policy evaluation, advisory/VEX mapping, and reporting. Air‑Gapped Mode provides the same core outcomes with verifiable offline inputs and explicit operational guardrails. - ---- - -## 3) How it should work - -### 3.1 Modes and lifecycle - -* **Connected Mode:** normal operation; can create Mirror Bundles on a staging host. -* **Sealed Air‑Gapped Mode:** platform enforces no egress. Only local resources are allowed. -* **Transition flow:** - - 1. Prepare an offline **Bootstrap Pack** with all container images, Helm/compose charts, seed database, and initial Mirror Bundle. - 2. Install in the air‑gapped enclave and **seal** egress. - 3. Periodically import new **Mirror Bundles** via removable media. - 4. Export evidence/reports as needed. - -### 3.2 Egress sealing - -* **Static guardrails:** - - * Platform flag `STELLA_AIRGAP=sealed` and database feature flag `env.mode='sealed'`. - * NetworkPolicy/iptables/eBPF deny‑all egress for namespaces/pods except loopback and the internal object store. - * Outbound DNS blocked. - * HTTP clients in code use a single `EgressPolicy` facade. When sealed, it panics on direct network calls and returns a typed error with remediation (“import a Mirror Bundle”). -* **Verification:** `GET /system/airgap/status` returns `sealed: true|false`, current policy hash, and last import timestamp. CLI prints warning if not sealed in declared air‑gapped install. - -### 3.3 Trusted time - -* Air‑gapped systems cannot NTP. Each Mirror Bundle includes a **signed time token** (Roughtime‑style or RFC 3161) from a trusted authority. On import, platform stores `time_anchor` for drift calculations and staleness checks. -* If time drift exceeds policy threshold, UI shows “stale view” badges and some jobs are blocked until a new bundle provides a fresh anchor. - -### 3.4 Mirror Bundles (offline updates) - -* **Content types:** - - * Public advisories (OSV, GHSA, vendor advisories), NVD mappings, CPE/Package metadata. - * VEX statements from vendors/communities. - * Policy packs (templates, baselines, versioned rule sets). - * StellaOps engine metadata and schema migrations. - * Optional: **OCI image set** for platform and recommended runners. - * Optional: dashboards and alert rule packs. -* **Format:** a TUF‑like layout: - - ``` - root.json, snapshot.json, timestamp.json, targets/ - advisories/*.jsonl.zst - vex/*.jsonl.zst - policy/*.tar.zst - images/* (OCI layout or oci-archive) - meta/engine/*.tgz - meta/time-anchor.json (signed) - ``` -* **Integrity & trust:** - - * DSSE‑signed target manifests. - * Root of trust rotated via `root.json` within strict policy; rotation requires manual dual approval in sealed mode. - * Each content artifact has a content digest and a **Merkle root** for the overall bundle. -* **Creation:** in connected networks, `stella mirror create --content advisories,vex,policy,images --since 2025-01-01 --out bundle.tgz`. -* **Import:** in air‑gap, `stella airgap import bundle.tgz`. The importer verifies DSSE, TUF metadata, Merkle root, then writes to local object store and updates catalog tables. -* **Idempotence:** imports are content‑addressed; re‑imports deduplicate. - -### 3.5 Deterministic jobs and sources - -* **Allowed sources:** filesystem, internal object store, tenant private registry, and pre‑approved connectors that don’t require external egress. -* **Disallowed in sealed mode:** remote package registries, web scrapers, outbound webhooks, cloud KMS unless on the enclave network. -* **Runner policy:** the Task Runner verifies job descriptors contain no network calls unless marked `internal:` with allow‑listed destinations. Violations fail at plan time with an explainable error. - -### 3.6 Conseiller and Excitator in air‑gap - -* **Conseiller (Feedser):** ingests advisories only from imported bundles or tenant local feeds. It preserves source identities and never merges. Linkage uses bundle‑provided cross‑refs and local heuristics. -* **Excitator (VEXer):** imports VEX records as‑is, links them to components and advisories, and records the origin bundle and statement digests. Consensus Lens (Epic 7) operates offline across the imported sources. - -### 3.7 Policy Engine and Studio - -* Policy packs are versioned and imported via bundles. -* Simulation and authoring work locally. Exports of new or updated policies can be packaged as **Policy Sub‑Bundles** for transfer back to connected environments if needed. -* Engine shows which rules depend on external evidence and how they degrade in sealed mode (e.g., “No external EPSS; using cached percentile from last bundle.”). - -### 3.8 Notifications in sealed mode - -* Default to **local delivery** only: SMTP relay inside enclave, syslog, file sink. -* External webhooks are disabled. -* Notification templates show “air‑gap compliant channel” tags to avoid misconfiguration. - -### 3.9 Observability & Forensics - -* Traces, logs, metrics remain local. -* Evidence Locker supports **portable evidence packages** for cross‑domain transfer: `stella forensic snapshot create --portable`. -* Importing an evidence bundle in another enclave verifies signatures and maintains chain‑of‑custody. - -### 3.10 Console and CLI behavior - -* Console shows a prominent **Air‑Gapped: Sealed** badge with last import time and staleness indicators for advisories, VEX, and policy packs. -* CLI commands gain `--sealed` awareness: any operation that would egress prints a refusal with remediation suggesting the appropriate import. - -### 3.11 Multi‑tenant and scope - -* Tenancy works unchanged. Bundle imports can target: - - * `--tenant-global`: shared catalogs (advisories, VEX, policy baselines). - * `--tenant=`: tenant‑specific content (e.g., private advisories). -* Authority scopes gain `airgap:import`, `airgap:status:read`, `airgap:seal` (admin‑only). - -### 3.12 Feature degradation matrix - -* **AI Assistant:** offline variants use local models if installed; otherwise feature is disabled with a message. -* **External reputation feeds (e.g., EPSS‑like):** replaced by cached values from the bundle. -* **Container base image lookups:** rely on imported metadata or tenant private registry. - ---- - -## 4) Architecture - -### 4.1 New modules - -* `airgap/controller` - - * Sealing state machine; status API; guardrails wiring into HTTP clients and runner. -* `airgap/importer` - - * TUF/DSSE verification, Merkle validation, object store loader, catalog updater. -* `mirror/creator` - - * Connected‑side builder for bundles; content plug‑ins for advisories/VEX/policy/images. -* `airgap/policy` - - * Enforcement library exposing `EgressPolicy` facade and job plan validators. -* `airgap/time` - - * Time anchor parser, drift checks, staleness annotations. -* `console/airgap` - - * Sealed badge, import UI, staleness dashboards, degradation notices. -* `cli/airgap` - - * `stella airgap seal|status|import|verify` commands; `stella mirror create|verify`. - -### 4.2 Data model additions - -* `airgap_state(id, sealed BOOLEAN, policy_hash TEXT, last_import_at TIMESTAMP, time_anchor JSONB)` -* `bundle_catalog(id, kind ENUM, merkle_root TEXT, dsse_signer TEXT, created_at TIMESTAMP, imported_at TIMESTAMP, scope ENUM('global','tenant'), tenant_id NULLABLE, labels JSONB)` -* `bundle_items(bundle_id, path TEXT, sha256 TEXT, size BIGINT, type TEXT, meta JSONB)` -* `import_audit(id, bundle_id, actor, tenant_scope, verify_result, trace_id, created_at)` - -RLS: tenant‑scoped rows when `scope='tenant'`; global rows readable only with `stella:airgap:status:read`. - -### 4.3 Storage layout - -Object store paths: - -``` -tenants/_global/mirror//targets/... -tenants//mirror//targets/... -tenants/_global/images//... -``` - -Evidence locker remains separate. Imported images use **OCI layout** for local registry sync. - -### 4.4 Message topics - -* `stella..airgap.imported` with bundle metadata. -* `stella..airgap.staleness` periodic events emitted for UX. -* `stella..policy.degraded` when rules fall back due to sealed mode. - ---- - -## 5) APIs and contracts - -### 5.1 Status and control - -* `GET /system/airgap/status` → `{ sealed, policy_hash, last_import_at, time_anchor, drift_seconds, staleness: { advisories_days, vex_days, policy_days } }` -* `POST /system/airgap/seal` → seals environment; requires `stella:airgap:seal#tenant/`. -* `POST /system/airgap/unseal` → only allowed if installed mode is not declared “permanently sealed” at bootstrap. Typically disabled. - -### 5.2 Import & verify - -* `POST /airgap/import` multipart or file reference → runs verify, writes catalog, returns bundle summary and warnings. -* `POST /airgap/verify` dry‑run verification returning DSSE/TUF and Merkle results. -* `GET /airgap/bundles` list imported bundles with filters. - -### 5.3 Conseiller/Excitator sources - -* `POST /feeds/register` supports `kind=mirror` with `bundle_id` and paths; disallowed to point to external URLs in sealed mode. -* `GET /feeds/status` shows per‑source staleness and last artifact version. - -### 5.4 Errors - -Standardized sealed‑mode error: - -``` -{ - "code": "AIRGAP_EGRESS_BLOCKED", - "message": "Egress is sealed. Import a Mirror Bundle with advisories.", - "remediation": "Run: stella airgap import bundle.tgz", - "trace_id": "..." -} -``` - ---- - -## 6) Documentation changes - -Create or update: - -1. `/docs/airgap/overview.md` - - * Modes, lifecycle, responsibilities, threat model, what degrades. -2. `/docs/airgap/bootstrap.md` - - * Offline Bootstrap Pack creation, validation, install steps for Helm/compose, local registry seeding. -3. `/docs/airgap/mirror-bundles.md` - - * Bundle format, DSSE/TUF/Merkle, signed time, creation on connected host, import in sealed environment, rotation of roots. -4. `/docs/airgap/sealing-and-egress.md` - - * Network policies, EgressPolicy facade, runner validation, verifying sealed status. -5. `/docs/airgap/staleness-and-time.md` - - * Time anchor, drift, staleness budgets and UI behavior. -6. `/docs/airgap/operations.md` - - * Periodic update cadence, runbooks, failure scenarios, disaster recovery. -7. `/docs/airgap/degradation-matrix.md` - - * Feature map: available, degraded, disabled; with remediation. -8. `/docs/console/airgap.md` - - * Status badges, import wizard, staleness indicators. -9. `/docs/cli/airgap.md` - - * Commands, examples, exit codes. -10. `/docs/security/trust-and-signing.md` - -* Roots of trust, key rotation, DSSE, TUF model. - -11. `/docs/dev/airgap-contracts.md` - -* EgressPolicy usage, testing patterns, sealed‑mode CI gates. - -Add the banner at the top of each page: - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 7) Implementation plan - -### Phase 1 — Foundations - -* Add `airgap/controller` with sealed state and status API. -* Integrate `EgressPolicy` facade in all outbound network call sites. -* Provide default NetworkPolicy/iptables templates and Helm values to block egress. -* Console shows sealed badge and status. - -### Phase 2 — Mirror Bundles - -* Implement `mirror/creator` in connected mode with content plug‑ins. -* Implement `airgap/importer` with DSSE/TUF/Merkle verification and catalog updates. -* Export Center gains **Mirror bundle** build and verify commands (connected side). - -### Phase 3 — Deterministic jobs - -* Add job plan validation in the Task Runner. -* Restrict sources in sealed mode. -* Conseiller/Excitator add “mirror source” adapters. - -### Phase 4 — Staleness and time - -* Parse time anchors; enforce staleness budgets; add UI indicators and task refusal when budgets exceeded. -* Notifications for expiring anchors. - -### Phase 5 — Degradation matrix and UX - -* Wire feature flags and fallbacks in Console and APIs. -* Improve error messages with remediation guidance. - -### Phase 6 — Evidence portability - -* Portable evidence packages: export/import with full verification. -* Document cross‑domain workflows. - ---- - -## 8) Engineering tasks - -**Air‑gap controller and sealing** - -* [ ] Implement `airgap/controller` with persistent state and RBAC. -* [ ] Add `GET /system/airgap/status`, `POST /system/airgap/seal`. -* [ ] Provide cluster egress templates for Kubernetes and for docker‑compose. -* [ ] Instrument startup checks to refuse running in sealed mode if egress rules aren’t applied. - -**EgressPolicy integration** - -* [ ] Create `pkg/egress` facade and replace all direct HTTP client constructions in services. -* [ ] Add linter rule and CI check forbidding raw `http.NewClient` in server code. -* [ ] Add unit tests for sealed and unsealed behavior. - -**Mirror bundles** - -* [ ] Implement TUF/DSSE verifiers and Merkle root builder. -* [ ] Build content plug‑ins: advisories, VEX, policy packs, images. -* [ ] Write `bundle_catalog` and `bundle_items` tables with RLS. -* [ ] CLI: `stella mirror create|verify`, `stella airgap import|verify`. - -**Conseiller/Excitator** - -* [ ] Add mirror adapters for read‑only ingestion from bundle paths. -* [ ] Persist source digests and bundle IDs on each linked record. -* [ ] Unit tests to ensure no merge behavior is introduced by bundle ingestion. - -**Policy Engine & Studio** - -* [ ] Accept policy packs from bundles; track `policy_version` and `bundle_id`. -* [ ] Add degradation notices for rules requiring external reputation; provide cached fallbacks. - -**Task Runner & Orchestrator** - -* [ ] Plan‑time validation against network calls; add `internal:` allow‑list mapping. -* [ ] Emit sealed‑mode violations to Timeline with remediation text. - -**Console** - -* [ ] Status panel: sealed badge, last import, staleness meters. -* [ ] Import wizard with verify results and catalog diff preview. -* [ ] Degradation matrix UI and contextual tooltips. - -**Observability & Forensics** - -* [ ] Mark sealed mode in telemetry attributes. -* [ ] Add portable evidence package export/import; verify on read. - -**Authority & Tenancy** - -* [ ] New scopes: `airgap:seal`, `airgap:import`, `airgap:status:read`. -* [ ] Audit import actions with actor and trace ID. - -**Docs** - -* [ ] Author all pages listed in section 6, include signed‑time workflow diagrams. -* [ ] Insert banner statement in each page. - -**Testing** - -* [ ] Sealed‑mode e2e: attempt egress; ensure refusal and remediation. -* [ ] Bundle import e2e: corrupt DSSE, wrong root, tampered artifact → rejected. -* [ ] Performance: large advisory bundle import within target time (see Acceptance). -* [ ] Time drift scenarios and staleness budget enforcement. -* [ ] Regression: ensure AOC rules unchanged in sealed mode. - ---- - -## 9) Feature changes required in other components - -* **Export Center:** add mirror bundle export profile, signed‑time token inclusion, and portable evidence packages. -* **Notifications:** remove external webhooks by default in sealed mode; add local SMTP/syslog sinks. -* **CLI Parity:** ensure all admin and import operations are exposed; add sealed‑mode safety prompts. -* **Containerized Distribution:** ship **Bootstrap Pack** that includes all images and charts in a single oci‑archive set with index manifest. -* **Observability:** disable remote exporters; include local dashboards; mark sealed mode in UI. -* **Policy Studio:** enable offline authoring and export of policy sub‑bundles. -* **VEX Consensus Lens:** ensure it operates solely on imported VEX statements; highlight coverage vs. stale. - -> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 10) Acceptance criteria - -* Environment can be **sealed** and verified via API, CLI, and network policies. -* Import of a valid Mirror Bundle succeeds; DSSE, TUF, and Merkle validations recorded in `import_audit`. -* Conseiller and Excitator operate only on imported sources; linkage reflects original source identities. -* Policy packs are importable and versioned; rules that depend on external evidence show clear degradation. -* Large bundle (e.g., 8–12 GB with images) imports in under 20 minutes on SSD storage and indexes advisories in under 5 minutes on a 4‑core node. -* Console displays sealed badge, last import, staleness, and degradation matrix. -* Attempted egress in sealed mode fails with `AIRGAP_EGRESS_BLOCKED` and remediation. -* Portable evidence packages export and verify across separate enclaves. -* All changes documented with the banner statement. - ---- - -## 11) Risks and mitigations - -* **Key management complexity:** rotate TUF roots with dual‑control workflow and explicit docs; fail‑safe to previous root if rotation bundle absent. -* **Staleness risk:** enforce budgets and block risk‑critical jobs when expired; provide monitoring and notifications for impending staleness. -* **Operator error during import:** dry‑run verification, diff preview of catalog changes, and ability to roll back via content address. -* **Hidden egress paths:** CI lints and runtime guardrails; network policies enforced at cluster layer. -* **Bundle size bloat:** Zstandard compression, delta bundles, and selective content flags for creation. - ---- - -## 12) Philosophy - -* **Predictable over perfect:** deterministic, explainable results beat unknown “live” results in sensitive networks. -* **Trust is earned:** every offline exchange is signed, verifiable, and auditable. -* **Degrade transparently:** when features reduce capability, explain it and guide remediation. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +# Epic 16: Air‑Gapped Mode + +**Short name:** Air‑Gapped Mode +**Primary components:** Web Services API, Console, CLI, Orchestrator, Task Runner, Conseiller (Feedser), Excitator (VEXer), Policy Engine, Findings Ledger, Export Center, Authority & Tenancy, Notifications, Observability & Forensics +**Surfaces:** offline bootstrap, update ingestion via mirror bundles, sealed egress, deterministic jobs, offline advisories/VEX, offline policy packs, offline notifications, evidence exports +**Dependencies:** Export Center, Containerized Distribution, Authority‑Backed Scopes & Tenancy, Observability & Forensics, Policy Studio + +**AOC ground rule reminder:** Conseiller and Excitator aggregate and link advisories/VEX. They never merge or mutate source records. Air‑Gapped Mode must preserve this invariant even when mirroring and importing updates. + +--- + +## 1) What it is + +A fully supported operating profile where StellaOps runs in a disconnected environment with: + +* **Zero external egress** from platform services and jobs. +* **Deterministic inputs** provided via signed, offline **Mirror Bundles** (advisories, VEX, policy packs, vendor feeds, Stella metadata, container images, dashboards). +* **Offline bootstrap** for images and charts, plus reproducible configuration and cryptographically verifiable updates. +* **Graceful feature degradation** with explicit UX: features that require external connectivity are either backed by local artifacts or clearly disabled with an explanation. +* **Auditable import/export** including provenance attestations, evidence bundles, and chain‑of‑custody for all offline exchanges. + +Air‑Gapped Mode is selectable at install time and enforceable at runtime. When enabled, all components operate under an “egress sealed” policy and only consume data from local stores. + +--- + +## 2) Why + +Many users operate in classified, regulated, or high‑sensitivity networks where egress is prohibited. They still need SBOM analysis, policy evaluation, advisory/VEX mapping, and reporting. Air‑Gapped Mode provides the same core outcomes with verifiable offline inputs and explicit operational guardrails. + +--- + +## 3) How it should work + +### 3.1 Modes and lifecycle + +* **Connected Mode:** normal operation; can create Mirror Bundles on a staging host. +* **Sealed Air‑Gapped Mode:** platform enforces no egress. Only local resources are allowed. +* **Transition flow:** + + 1. Prepare an offline **Bootstrap Pack** with all container images, Helm/compose charts, seed database, and initial Mirror Bundle. + 2. Install in the air‑gapped enclave and **seal** egress. + 3. Periodically import new **Mirror Bundles** via removable media. + 4. Export evidence/reports as needed. + +### 3.2 Egress sealing + +* **Static guardrails:** + + * Platform flag `STELLA_AIRGAP=sealed` and database feature flag `env.mode='sealed'`. + * NetworkPolicy/iptables/eBPF deny‑all egress for namespaces/pods except loopback and the internal object store. + * Outbound DNS blocked. + * HTTP clients in code use a single `EgressPolicy` facade. When sealed, it panics on direct network calls and returns a typed error with remediation (“import a Mirror Bundle”). +* **Verification:** `GET /system/airgap/status` returns `sealed: true|false`, current policy hash, and last import timestamp. CLI prints warning if not sealed in declared air‑gapped install. + +### 3.3 Trusted time + +* Air‑gapped systems cannot NTP. Each Mirror Bundle includes a **signed time token** (Roughtime‑style or RFC 3161) from a trusted authority. On import, platform stores `time_anchor` for drift calculations and staleness checks. +* If time drift exceeds policy threshold, UI shows “stale view” badges and some jobs are blocked until a new bundle provides a fresh anchor. + +### 3.4 Mirror Bundles (offline updates) + +* **Content types:** + + * Public advisories (OSV, GHSA, vendor advisories), NVD mappings, CPE/Package metadata. + * VEX statements from vendors/communities. + * Policy packs (templates, baselines, versioned rule sets). + * StellaOps engine metadata and schema migrations. + * Optional: **OCI image set** for platform and recommended runners. + * Optional: dashboards and alert rule packs. +* **Format:** a TUF‑like layout: + + ``` + root.json, snapshot.json, timestamp.json, targets/ + advisories/*.jsonl.zst + vex/*.jsonl.zst + policy/*.tar.zst + images/* (OCI layout or oci-archive) + meta/engine/*.tgz + meta/time-anchor.json (signed) + ``` +* **Integrity & trust:** + + * DSSE‑signed target manifests. + * Root of trust rotated via `root.json` within strict policy; rotation requires manual dual approval in sealed mode. + * Each content artifact has a content digest and a **Merkle root** for the overall bundle. +* **Creation:** in connected networks, `stella mirror create --content advisories,vex,policy,images --since 2025-01-01 --out bundle.tgz`. +* **Import:** in air‑gap, `stella airgap import bundle.tgz`. The importer verifies DSSE, TUF metadata, Merkle root, then writes to local object store and updates catalog tables. +* **Idempotence:** imports are content‑addressed; re‑imports deduplicate. + +### 3.5 Deterministic jobs and sources + +* **Allowed sources:** filesystem, internal object store, tenant private registry, and pre‑approved connectors that don’t require external egress. +* **Disallowed in sealed mode:** remote package registries, web scrapers, outbound webhooks, cloud KMS unless on the enclave network. +* **Runner policy:** the Task Runner verifies job descriptors contain no network calls unless marked `internal:` with allow‑listed destinations. Violations fail at plan time with an explainable error. + +### 3.6 Conseiller and Excitator in air‑gap + +* **Conseiller (Feedser):** ingests advisories only from imported bundles or tenant local feeds. It preserves source identities and never merges. Linkage uses bundle‑provided cross‑refs and local heuristics. +* **Excitator (VEXer):** imports VEX records as‑is, links them to components and advisories, and records the origin bundle and statement digests. Consensus Lens (Epic 7) operates offline across the imported sources. + +### 3.7 Policy Engine and Studio + +* Policy packs are versioned and imported via bundles. +* Simulation and authoring work locally. Exports of new or updated policies can be packaged as **Policy Sub‑Bundles** for transfer back to connected environments if needed. +* Engine shows which rules depend on external evidence and how they degrade in sealed mode (e.g., “No external EPSS; using cached percentile from last bundle.”). + +### 3.8 Notifications in sealed mode + +* Default to **local delivery** only: SMTP relay inside enclave, syslog, file sink. +* External webhooks are disabled. +* Notification templates show “air‑gap compliant channel” tags to avoid misconfiguration. + +### 3.9 Observability & Forensics + +* Traces, logs, metrics remain local. +* Evidence Locker supports **portable evidence packages** for cross‑domain transfer: `stella forensic snapshot create --portable`. +* Importing an evidence bundle in another enclave verifies signatures and maintains chain‑of‑custody. + +### 3.10 Console and CLI behavior + +* Console shows a prominent **Air‑Gapped: Sealed** badge with last import time and staleness indicators for advisories, VEX, and policy packs. +* CLI commands gain `--sealed` awareness: any operation that would egress prints a refusal with remediation suggesting the appropriate import. + +### 3.11 Multi‑tenant and scope + +* Tenancy works unchanged. Bundle imports can target: + + * `--tenant-global`: shared catalogs (advisories, VEX, policy baselines). + * `--tenant=`: tenant‑specific content (e.g., private advisories). +* Authority scopes gain `airgap:import`, `airgap:status:read`, `airgap:seal` (admin‑only). + +### 3.12 Feature degradation matrix + +* **AI Assistant:** offline variants use local models if installed; otherwise feature is disabled with a message. +* **External reputation feeds (e.g., EPSS‑like):** replaced by cached values from the bundle. +* **Container base image lookups:** rely on imported metadata or tenant private registry. + +--- + +## 4) Architecture + +### 4.1 New modules + +* `airgap/controller` + + * Sealing state machine; status API; guardrails wiring into HTTP clients and runner. +* `airgap/importer` + + * TUF/DSSE verification, Merkle validation, object store loader, catalog updater. +* `mirror/creator` + + * Connected‑side builder for bundles; content plug‑ins for advisories/VEX/policy/images. +* `airgap/policy` + + * Enforcement library exposing `EgressPolicy` facade and job plan validators. +* `airgap/time` + + * Time anchor parser, drift checks, staleness annotations. +* `console/airgap` + + * Sealed badge, import UI, staleness dashboards, degradation notices. +* `cli/airgap` + + * `stella airgap seal|status|import|verify` commands; `stella mirror create|verify`. + +### 4.2 Data model additions + +* `airgap_state(id, sealed BOOLEAN, policy_hash TEXT, last_import_at TIMESTAMP, time_anchor JSONB)` +* `bundle_catalog(id, kind ENUM, merkle_root TEXT, dsse_signer TEXT, created_at TIMESTAMP, imported_at TIMESTAMP, scope ENUM('global','tenant'), tenant_id NULLABLE, labels JSONB)` +* `bundle_items(bundle_id, path TEXT, sha256 TEXT, size BIGINT, type TEXT, meta JSONB)` +* `import_audit(id, bundle_id, actor, tenant_scope, verify_result, trace_id, created_at)` + +RLS: tenant‑scoped rows when `scope='tenant'`; global rows readable only with `stella:airgap:status:read`. + +### 4.3 Storage layout + +Object store paths: + +``` +tenants/_global/mirror//targets/... +tenants//mirror//targets/... +tenants/_global/images//... +``` + +Evidence locker remains separate. Imported images use **OCI layout** for local registry sync. + +### 4.4 Message topics + +* `stella..airgap.imported` with bundle metadata. +* `stella..airgap.staleness` periodic events emitted for UX. +* `stella..policy.degraded` when rules fall back due to sealed mode. + +--- + +## 5) APIs and contracts + +### 5.1 Status and control + +* `GET /system/airgap/status` → `{ sealed, policy_hash, last_import_at, time_anchor, drift_seconds, staleness: { advisories_days, vex_days, policy_days } }` +* `POST /system/airgap/seal` → seals environment; requires `stella:airgap:seal#tenant/`. +* `POST /system/airgap/unseal` → only allowed if installed mode is not declared “permanently sealed” at bootstrap. Typically disabled. + +### 5.2 Import & verify + +* `POST /airgap/import` multipart or file reference → runs verify, writes catalog, returns bundle summary and warnings. +* `POST /airgap/verify` dry‑run verification returning DSSE/TUF and Merkle results. +* `GET /airgap/bundles` list imported bundles with filters. + +### 5.3 Conseiller/Excitator sources + +* `POST /feeds/register` supports `kind=mirror` with `bundle_id` and paths; disallowed to point to external URLs in sealed mode. +* `GET /feeds/status` shows per‑source staleness and last artifact version. + +### 5.4 Errors + +Standardized sealed‑mode error: + +``` +{ + "code": "AIRGAP_EGRESS_BLOCKED", + "message": "Egress is sealed. Import a Mirror Bundle with advisories.", + "remediation": "Run: stella airgap import bundle.tgz", + "trace_id": "..." +} +``` + +--- + +## 6) Documentation changes + +Create or update: + +1. `/docs/airgap/overview.md` + + * Modes, lifecycle, responsibilities, threat model, what degrades. +2. `/docs/airgap/bootstrap.md` + + * Offline Bootstrap Pack creation, validation, install steps for Helm/compose, local registry seeding. +3. `/docs/airgap/mirror-bundles.md` + + * Bundle format, DSSE/TUF/Merkle, signed time, creation on connected host, import in sealed environment, rotation of roots. +4. `/docs/airgap/sealing-and-egress.md` + + * Network policies, EgressPolicy facade, runner validation, verifying sealed status. +5. `/docs/airgap/staleness-and-time.md` + + * Time anchor, drift, staleness budgets and UI behavior. +6. `/docs/airgap/operations.md` + + * Periodic update cadence, runbooks, failure scenarios, disaster recovery. +7. `/docs/airgap/degradation-matrix.md` + + * Feature map: available, degraded, disabled; with remediation. +8. `/docs/console/airgap.md` + + * Status badges, import wizard, staleness indicators. +9. `/docs/cli/airgap.md` + + * Commands, examples, exit codes. +10. `/docs/security/trust-and-signing.md` + +* Roots of trust, key rotation, DSSE, TUF model. + +11. `/docs/dev/airgap-contracts.md` + +* EgressPolicy usage, testing patterns, sealed‑mode CI gates. + +Add the banner at the top of each page: + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 7) Implementation plan + +### Phase 1 — Foundations + +* Add `airgap/controller` with sealed state and status API. +* Integrate `EgressPolicy` facade in all outbound network call sites. +* Provide default NetworkPolicy/iptables templates and Helm values to block egress. +* Console shows sealed badge and status. + +### Phase 2 — Mirror Bundles + +* Implement `mirror/creator` in connected mode with content plug‑ins. +* Implement `airgap/importer` with DSSE/TUF/Merkle verification and catalog updates. +* Export Center gains **Mirror bundle** build and verify commands (connected side). + +### Phase 3 — Deterministic jobs + +* Add job plan validation in the Task Runner. +* Restrict sources in sealed mode. +* Conseiller/Excitator add “mirror source” adapters. + +### Phase 4 — Staleness and time + +* Parse time anchors; enforce staleness budgets; add UI indicators and task refusal when budgets exceeded. +* Notifications for expiring anchors. + +### Phase 5 — Degradation matrix and UX + +* Wire feature flags and fallbacks in Console and APIs. +* Improve error messages with remediation guidance. + +### Phase 6 — Evidence portability + +* Portable evidence packages: export/import with full verification. +* Document cross‑domain workflows. + +--- + +## 8) Engineering tasks + +**Air‑gap controller and sealing** + +* [ ] Implement `airgap/controller` with persistent state and RBAC. +* [ ] Add `GET /system/airgap/status`, `POST /system/airgap/seal`. +* [ ] Provide cluster egress templates for Kubernetes and for docker‑compose. +* [ ] Instrument startup checks to refuse running in sealed mode if egress rules aren’t applied. + +**EgressPolicy integration** + +* [ ] Create `pkg/egress` facade and replace all direct HTTP client constructions in services. +* [ ] Add linter rule and CI check forbidding raw `http.NewClient` in server code. +* [ ] Add unit tests for sealed and unsealed behavior. + +**Mirror bundles** + +* [ ] Implement TUF/DSSE verifiers and Merkle root builder. +* [ ] Build content plug‑ins: advisories, VEX, policy packs, images. +* [ ] Write `bundle_catalog` and `bundle_items` tables with RLS. +* [ ] CLI: `stella mirror create|verify`, `stella airgap import|verify`. + +**Conseiller/Excitator** + +* [ ] Add mirror adapters for read‑only ingestion from bundle paths. +* [ ] Persist source digests and bundle IDs on each linked record. +* [ ] Unit tests to ensure no merge behavior is introduced by bundle ingestion. + +**Policy Engine & Studio** + +* [ ] Accept policy packs from bundles; track `policy_version` and `bundle_id`. +* [ ] Add degradation notices for rules requiring external reputation; provide cached fallbacks. + +**Task Runner & Orchestrator** + +* [ ] Plan‑time validation against network calls; add `internal:` allow‑list mapping. +* [ ] Emit sealed‑mode violations to Timeline with remediation text. + +**Console** + +* [ ] Status panel: sealed badge, last import, staleness meters. +* [ ] Import wizard with verify results and catalog diff preview. +* [ ] Degradation matrix UI and contextual tooltips. + +**Observability & Forensics** + +* [ ] Mark sealed mode in telemetry attributes. +* [ ] Add portable evidence package export/import; verify on read. + +**Authority & Tenancy** + +* [ ] New scopes: `airgap:seal`, `airgap:import`, `airgap:status:read`. +* [ ] Audit import actions with actor and trace ID. + +**Docs** + +* [ ] Author all pages listed in section 6, include signed‑time workflow diagrams. +* [ ] Insert banner statement in each page. + +**Testing** + +* [ ] Sealed‑mode e2e: attempt egress; ensure refusal and remediation. +* [ ] Bundle import e2e: corrupt DSSE, wrong root, tampered artifact → rejected. +* [ ] Performance: large advisory bundle import within target time (see Acceptance). +* [ ] Time drift scenarios and staleness budget enforcement. +* [ ] Regression: ensure AOC rules unchanged in sealed mode. + +--- + +## 9) Feature changes required in other components + +* **Export Center:** add mirror bundle export profile, signed‑time token inclusion, and portable evidence packages. +* **Notifications:** remove external webhooks by default in sealed mode; add local SMTP/syslog sinks. +* **CLI Parity:** ensure all admin and import operations are exposed; add sealed‑mode safety prompts. +* **Containerized Distribution:** ship **Bootstrap Pack** that includes all images and charts in a single oci‑archive set with index manifest. +* **Observability:** disable remote exporters; include local dashboards; mark sealed mode in UI. +* **Policy Studio:** enable offline authoring and export of policy sub‑bundles. +* **VEX Consensus Lens:** ensure it operates solely on imported VEX statements; highlight coverage vs. stale. + +> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 10) Acceptance criteria + +* Environment can be **sealed** and verified via API, CLI, and network policies. +* Import of a valid Mirror Bundle succeeds; DSSE, TUF, and Merkle validations recorded in `import_audit`. +* Conseiller and Excitator operate only on imported sources; linkage reflects original source identities. +* Policy packs are importable and versioned; rules that depend on external evidence show clear degradation. +* Large bundle (e.g., 8–12 GB with images) imports in under 20 minutes on SSD storage and indexes advisories in under 5 minutes on a 4‑core node. +* Console displays sealed badge, last import, staleness, and degradation matrix. +* Attempted egress in sealed mode fails with `AIRGAP_EGRESS_BLOCKED` and remediation. +* Portable evidence packages export and verify across separate enclaves. +* All changes documented with the banner statement. + +--- + +## 11) Risks and mitigations + +* **Key management complexity:** rotate TUF roots with dual‑control workflow and explicit docs; fail‑safe to previous root if rotation bundle absent. +* **Staleness risk:** enforce budgets and block risk‑critical jobs when expired; provide monitoring and notifications for impending staleness. +* **Operator error during import:** dry‑run verification, diff preview of catalog changes, and ability to roll back via content address. +* **Hidden egress paths:** CI lints and runtime guardrails; network policies enforced at cluster layer. +* **Bundle size bloat:** Zstandard compression, delta bundles, and selective content flags for creation. + +--- + +## 12) Philosophy + +* **Predictable over perfect:** deterministic, explainable results beat unknown “live” results in sensitive networks. +* **Trust is earned:** every offline exchange is signed, verifiable, and auditable. +* **Degrade transparently:** when features reduce capability, explain it and guide remediation. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/aoc/aoc-guardrails.md b/docs/aoc/aoc-guardrails.md index 686881f7..09340158 100644 --- a/docs/aoc/aoc-guardrails.md +++ b/docs/aoc/aoc-guardrails.md @@ -1,13 +1,13 @@ -# Aggregation-Only Contract (AOC) Guardrails - -The Aggregation-Only Contract keeps ingestion services deterministic and policy-neutral. Use these checkpoints whenever you add or modify backlog items: - -1. **Ingestion writes raw facts only.** Concelier and Excititor append immutable observations/linksets. No precedence, severity, suppression, or "safe fix" hints may be computed at ingest time. -2. **Derived semantics live elsewhere.** Policy Engine overlays, Vuln Explorer composition, and downstream reporting layers attach severity, precedence, policy verdicts, and UI hints. -3. **Provenance is mandatory.** Every ingestion write must include original source metadata, digests, and signing/provenance evidence when available. Reject writes lacking provenance. -4. **Deterministic outputs.** Given the same inputs, ingestion must produce identical documents, hashes, and event payloads across reruns. -5. **Guardrails everywhere.** Roslyn analyzers, schema validators, and CI smoke tests should fail builds that attempt forbidden writes. - -For detailed roles and ownership boundaries, see `AGENTS.md` at the repo root and the module-specific `ARCHITECTURE_*.md` dossiers. - -Need the full contract? Read the [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) for schemas, error codes, and migration guidance. +# Aggregation-Only Contract (AOC) Guardrails + +The Aggregation-Only Contract keeps ingestion services deterministic and policy-neutral. Use these checkpoints whenever you add or modify backlog items: + +1. **Ingestion writes raw facts only.** Concelier and Excititor append immutable observations/linksets. No precedence, severity, suppression, or "safe fix" hints may be computed at ingest time. +2. **Derived semantics live elsewhere.** Policy Engine overlays, Vuln Explorer composition, and downstream reporting layers attach severity, precedence, policy verdicts, and UI hints. +3. **Provenance is mandatory.** Every ingestion write must include original source metadata, digests, and signing/provenance evidence when available. Reject writes lacking provenance. +4. **Deterministic outputs.** Given the same inputs, ingestion must produce identical documents, hashes, and event payloads across reruns. +5. **Guardrails everywhere.** Roslyn analyzers, schema validators, and CI smoke tests should fail builds that attempt forbidden writes. + +For detailed roles and ownership boundaries, see `AGENTS.md` at the repo root and the module-specific `ARCHITECTURE_*.md` dossiers. + +Need the full contract? Read the [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) for schemas, error codes, and migration guidance. diff --git a/docs/api/EPIC_17_SDKS_OPENAPI.md b/docs/api/EPIC_17_SDKS_OPENAPI.md index 5121f4e7..8dc0d548 100644 --- a/docs/api/EPIC_17_SDKS_OPENAPI.md +++ b/docs/api/EPIC_17_SDKS_OPENAPI.md @@ -41,9 +41,9 @@ Net result: partners and internal teams integrate quickly without reverse‑engi ### 3.1 Source of truth and layout -* Each service owns a **module-scoped OAS** file: `src/StellaOps.Api.OpenApi//openapi.yaml`. - * Authority authentication/token surface now lives at `src/StellaOps.Api.OpenApi/authority/openapi.yaml`, covering `/token`, `/introspect`, `/revoke`, and `/jwks` flows with examples and scope catalog metadata. -* An aggregate spec `src/StellaOps.Api.OpenApi/stella.yaml` is produced by build tooling that composes per-service specs, resolves `$ref`s, and validates cross-service schemas. +* Each service owns a **module-scoped OAS** file: `src/Api/StellaOps.Api.OpenApi//openapi.yaml`. + * Authority authentication/token surface now lives at `src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml`, covering `/token`, `/introspect`, `/revoke`, and `/jwks` flows with examples and scope catalog metadata. +* An aggregate spec `src/Api/StellaOps.Api.OpenApi/stella.yaml` is produced by build tooling that composes per-service specs, resolves `$ref`s, and validates cross-service schemas. * JSON Schema dialect: 2020‑12 (OpenAPI 3.1). No vendor‑specific features for core models. * Every response and error has at least one **validated example**. @@ -138,13 +138,13 @@ Net result: partners and internal teams integrate quickly without reverse‑engi ### 4.1 New modules -* `src/StellaOps.Api.OpenApi/*` per service and aggregate composer -* `src/StellaOps.Api.Governance` OAS linter rules and compatibility checker -* `src/StellaOps.Sdk.Generator` codegen drivers, post‑processing templates, smoke tests -* `src/StellaOps.Sdk.Release` packaging, signing, publishing -* `src/StellaOps.DevPortal.Site` static generator and assets +* `src/Api/StellaOps.Api.OpenApi/*` per service and aggregate composer +* `src/Api/StellaOps.Api.Governance` OAS linter rules and compatibility checker +* `src/Sdk/StellaOps.Sdk.Generator` codegen drivers, post‑processing templates, smoke tests +* `src/Sdk/StellaOps.Sdk.Release` packaging, signing, publishing +* `src/DevPortal/StellaOps.DevPortal.Site` static generator and assets * `test/contract` mock server config, golden examples -* `src/StellaOps.ExportCenter.DevPortalOffline` bundler +* `src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline` bundler ### 4.2 Build flow @@ -254,7 +254,7 @@ Add the banner at the top of each page: **OAS & governance** -* [ ] Create `src/StellaOps.Api.OpenApi//openapi.yaml` for all services with minimal paths and shared components. +* [ ] Create `src/Api/StellaOps.Api.OpenApi//openapi.yaml` for all services with minimal paths and shared components. * [ ] Implement aggregate composer and `$ref` resolver. * [ ] Add CI job: lint, validate, compatibility diff; block merges on failure. * [ ] Migrate all endpoints to standard error envelope and provide examples. diff --git a/docs/api/policy.md b/docs/api/policy.md index ff329383..c616a36f 100644 --- a/docs/api/policy.md +++ b/docs/api/policy.md @@ -230,7 +230,7 @@ Slim wrapper used by CLI; returns 204 on success or `ERR_POL_001` payload. ## 6 · Run & Simulation APIs -> Schema reference: canonical policy run request/status/diff payloads ship with the Scheduler Models guide (`src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`) and JSON fixtures under `samples/api/scheduler/policy-*.json`. +> Schema reference: canonical policy run request/status/diff payloads ship with the Scheduler Models guide (`src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`) and JSON fixtures under `samples/api/scheduler/policy-*.json`. ### 6.1 Trigger Run @@ -389,7 +389,7 @@ Returns rule hit sequence: ## 9 · Compliance Checklist -- [ ] **Scopes enforced:** Endpoint access requires correct Authority scope mapping (see `/src/StellaOps.Authority/TASKS.md`). +- [ ] **Scopes enforced:** Endpoint access requires correct Authority scope mapping (see `/src/Authority/StellaOps.Authority/TASKS.md`). - [ ] **Schemas current:** JSON examples align with Scheduler Models (`SCHED-MODELS-20-001`) and Policy Engine DTOs; update when contracts change. - [ ] **Error codes mapped:** `ERR_POL_*` table reflects implementation and CI tests cover edge cases. - [ ] **Pagination documented:** List endpoints specify page/size and cursor semantics; responses include `X-Total-Count` or `nextCursor`. diff --git a/docs/architecture/console.md b/docs/architecture/console.md index a106edf5..2c14a59b 100644 --- a/docs/architecture/console.md +++ b/docs/architecture/console.md @@ -22,7 +22,7 @@ Non-goals: authoring ingestion logic, mutating Policy overlays, exposing interna ## 2 · Workspace & Packages -The console is implemented in `src/StellaOps.Web`, an Angular 17 workspace built on standalone components and Signals. +The console is implemented in `src/Web/StellaOps.Web`, an Angular 17 workspace built on standalone components and Signals. | Path | Purpose | Highlights | |------|---------|------------| @@ -148,7 +148,7 @@ Optimisation levers: ## 6 · Offline & Configuration Workflows - **Config manifest:** `/config.json` includes Authority issuer/client ID, gateway base URL, feature flags, telemetry endpoints, and offline hints. Operators can swap config by copying `src/config/config.sample.json` and editing before build, or by rewriting the response at gateway runtime. -- **Deterministic install:** Documented in `src/StellaOps.Web/docs/DeterministicInstall.md`—`npm run ci:install` plus Chromium provisioning ensures offline runners reproduce builds. +- **Deterministic install:** Documented in `src/Web/StellaOps.Web/docs/DeterministicInstall.md`—`npm run ci:install` plus Chromium provisioning ensures offline runners reproduce builds. - **Offline Kit parity:** UI validates downloads manifest signatures (cosign) and surfaces snapshot timestamps per tenant. When offline, buttons switch to CLI snippets (`stella runs export`, `stella downloads sync`). - **Feature flags:** `CONSOLE_FEATURE_FLAGS` toggles modules (`runs`, `downloads`, `telemetry`); offline bundles include flag manifest so UI can render only supported panes. - **Snapshot awareness:** Global banner shows snapshot timestamp and disables actions needing Authority fresh-auth when running in sealed mode. diff --git a/docs/architecture/overview.md b/docs/architecture/overview.md index 2373e622..c28663f1 100644 --- a/docs/architecture/overview.md +++ b/docs/architecture/overview.md @@ -1,168 +1,168 @@ -# StellaOps Architecture Overview (Sprint 19) - -> **Ownership:** Architecture Guild • Docs Guild -> **Audience:** Service owners, platform engineers, solution architects -> **Related:** [High-Level Architecture](../07_HIGH_LEVEL_ARCHITECTURE.md), [Concelier Architecture](../ARCHITECTURE_CONCELIER.md), [Policy Engine Architecture](policy-engine.md), [Aggregation-Only Contract](../ingestion/aggregation-only-contract.md) - -This dossier summarises the end-to-end runtime topology after the Aggregation-Only Contract (AOC) rollout. It highlights where raw facts live, how ingest services enforce guardrails, and how downstream components consume those facts to derive policy decisions and user-facing experiences. - ---- - -## 1 · System landscape - -```mermaid -graph TD - subgraph Edge["Clients & Automation"] - CLI[stella CLI] - UI[Console SPA] - APIClients[CI / API Clients] - end - Gateway[API Gateway
(JWT + DPoP scopes)] - subgraph Scanner["Fact Collection"] - ScannerWeb[Scanner.WebService] - ScannerWorkers[Scanner.Workers] - Agent[Agent Runtime] - end - subgraph Ingestion["Aggregation-Only Ingestion (AOC)"] - Concelier[Concelier.WebService] - Excititor[Excititor.WebService] - RawStore[(MongoDB
advisory_raw / vex_raw)] - end - subgraph Derivation["Policy & Overlay"] - Policy[Policy Engine] - Scheduler[Scheduler Services] - Notify[Notifier] - end - subgraph Experience["UX & Export"] - UIService[Console Backend] - Exporters[Export / Offline Kit] - end - Observability[Telemetry Stack] - - CLI --> Gateway - UI --> Gateway - APIClients --> Gateway - Gateway --> ScannerWeb - ScannerWeb --> ScannerWorkers - ScannerWorkers --> Concelier - ScannerWorkers --> Excititor - Concelier --> RawStore - Excititor --> RawStore - RawStore --> Policy - Policy --> Scheduler - Policy --> Notify - Policy --> UIService - Scheduler --> UIService - UIService --> Exporters - Exporters --> CLI - Exporters --> Offline[Offline Kit] - Observability -.-> ScannerWeb - Observability -.-> Concelier - Observability -.-> Excititor - Observability -.-> Policy - Observability -.-> Scheduler - Observability -.-> Notify -``` - -Key boundaries: - -- **AOC border.** Everything inside the Ingestion subgraph writes only immutable raw facts plus link hints. Derived severity, consensus, and risk remain outside the border. -- **Policy-only derivation.** Policy Engine materialises `effective_finding_*` collections and emits overlays; other services consume but never mutate them. -- **Tenant enforcement.** Authority-issued DPoP scopes flow through Gateway to every service; raw stores and overlays include `tenant` strictly. - ---- - -## 2 · Aggregation-Only Contract focus - -### 2.1 Responsibilities at the boundary - -| Area | Services | Responsibilities under AOC | Forbidden under AOC | -|------|----------|-----------------------------|---------------------| -| **Ingestion (Concelier / Excititor)** | `StellaOps.Concelier.WebService`, `StellaOps.Excititor.WebService` | Fetch upstream advisories/VEX, verify signatures, compute linksets, append immutable documents to `advisory_raw` / `vex_raw`, emit observability signals, expose raw read APIs. | Computing severity, consensus, suppressions, or policy hints; merging upstream sources into a single derived record; mutating existing documents. | -| **Policy & Overlay** | `StellaOps.Policy.Engine`, Scheduler | Join SBOM inventory with raw advisories/VEX, evaluate policies, issue `effective_finding_*` overlays, drive remediation workflows. | Writing to raw collections; bypassing guard scopes; running without recorded provenance. | -| **Experience layers** | Console, CLI, Exporters | Surface raw facts + policy overlays; run `stella aoc verify`; render AOC dashboards and reports. | Accepting ingestion payloads that lack provenance or violate guard results. | - -### 2.2 Raw stores - -| Collection | Purpose | Key fields | Notes | -|------------|---------|------------|-------| -| `advisory_raw` | Immutable vendor/ecosystem advisory documents. | `_id`, `tenant`, `source.*`, `upstream.*`, `content.raw`, `linkset`, `supersedes`. | Idempotent by `(source.vendor, upstream.upstream_id, upstream.content_hash)`. | -| `vex_raw` | Immutable vendor VEX statements. | Mirrors `advisory_raw`; `identifiers.statements` summarises affected components. | Maintains supersedes chain identical to advisory flow. | -| Change streams (`advisory_raw_stream`, `vex_raw_stream`) | Feed Policy Engine and Scheduler. | `operationType`, `documentKey`, `fullDocument`, `tenant`, `traceId`. | Scope filtered per tenant before delivery. | - -### 2.3 Guarded ingestion sequence - -```mermaid -sequenceDiagram - participant Upstream as Upstream Source - participant Connector as Concelier/Excititor Connector - participant Guard as AOCWriteGuard - participant Mongo as MongoDB (advisory_raw / vex_raw) - participant Stream as Change Stream - participant Policy as Policy Engine - - Upstream-->>Connector: CSAF / OSV / VEX document - Connector->>Connector: Normalize transport, compute content_hash - Connector->>Guard: Candidate raw doc (source + upstream + content + linkset) - Guard-->>Connector: ERR_AOC_00x on violation - Guard->>Mongo: Append immutable document (with tenant & supersedes) - Mongo-->>Stream: Change event (tenant scoped) - Stream->>Policy: Raw delta payload - Policy->>Policy: Evaluate policies, compute effective findings -``` - ---- - -### 2.4 Authority scopes & tenancy - -| Scope | Holder | Purpose | Notes | -|-------|--------|---------|-------| -| `advisory:ingest` / `vex:ingest` | Concelier / Excititor collectors | Append raw documents through ingestion endpoints. | Paired with tenant claims; requests without tenant are rejected. | -| `advisory:read` / `vex:read` | DevOps verify identity, CLI | Run `stella aoc verify` or call `/aoc/verify`. | Read-only; cannot mutate raw docs. | -| `effective:write` | Policy Engine | Materialise `effective_finding_*` overlays. | Only Policy Engine identity may hold; ingestion contexts receive `ERR_AOC_006` if they attempt. | -| `findings:read` | Console, CLI, exports | Consume derived findings. | Enforced by Gateway and downstream services. | - ---- - -## 3 · Data & control flow highlights - -1. **Ingestion:** Concelier / Excititor connectors fetch upstream documents, compute linksets, and hand payloads to `AOCWriteGuard`. Guards validate schema, provenance, forbidden fields, supersedes pointers, and append-only rules before writing to Mongo. -2. **Verification:** `stella aoc verify` (CLI/CI) and `/aoc/verify` endpoints replay guard checks against stored documents, mapping `ERR_AOC_00x` codes to exit codes for automation. -3. **Policy evaluation:** Mongo change streams deliver tenant-scoped raw deltas. Policy Engine joins SBOM inventory (via BOM Index), executes deterministic policies, writes overlays, and emits events to Scheduler/Notify. -4. **Experience surfaces:** Console renders an AOC dashboard showing ingestion latency, guard violations, and supersedes depth. CLI exposes raw-document fetch helpers for auditing. Offline Kit bundles raw collections alongside guard configs to keep air-gapped installs verifiable. -5. **Observability:** All services emit `ingestion_write_total`, `aoc_violation_total{code}`, `ingestion_latency_seconds`, and trace spans `ingest.fetch`, `ingest.transform`, `ingest.write`, `aoc.guard`. Logs correlate via `traceId`, `tenant`, `source.vendor`, and `content_hash`. - ---- - -## 4 · Offline & disaster readiness - -- **Offline Kit:** Packages raw Mongo snapshots (`advisory_raw`, `vex_raw`) plus guard configuration and CLI verifier binaries so air-gapped sites can re-run AOC checks before promotion. -- **Recovery:** Supersedes chains allow rollback to prior revisions without mutating documents. Disaster exercises must rehearse restoring from snapshot, replaying change streams into Policy Engine, and re-validating guard compliance. -- **Migration:** Legacy normalised fields are moved to temporary views during cutover; ingestion runtime removes writes once guard-enforced path is live (see [Migration playbook](../ingestion/aggregation-only-contract.md#8-migration-playbook)). - ---- - -## 5 · References - -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) -- [Concelier architecture](../ARCHITECTURE_CONCELIER.md) -- [Excititor architecture](../ARCHITECTURE_EXCITITOR.md) -- [Policy Engine architecture](policy-engine.md) -- [Authority service](../ARCHITECTURE_AUTHORITY.md) -- [Observability standards (upcoming)](../observability/policy.md) – interim reference for telemetry naming. - ---- - -## 6 · Compliance checklist - -- [ ] AOC guard enabled for all Concelier and Excititor write paths in production. -- [ ] Mongo schema validators deployed for `advisory_raw` and `vex_raw`; change streams scoped per tenant. -- [ ] Authority scopes (`advisory:*`, `vex:*`, `effective:*`) configured in Gateway and validated via integration tests. -- [ ] `stella aoc verify` wired into CI/CD pipelines with seeded violation fixtures. -- [ ] Console AOC dashboard and CLI documentation reference the new ingestion contract. -- [ ] Offline Kit bundles include guard configs, verifier tooling, and documentation updates. -- [ ] Observability dashboards include violation, latency, and supersedes depth metrics with alert thresholds. - ---- - -*Last updated: 2025-10-26 (Sprint 19).* +# StellaOps Architecture Overview (Sprint 19) + +> **Ownership:** Architecture Guild • Docs Guild +> **Audience:** Service owners, platform engineers, solution architects +> **Related:** [High-Level Architecture](../07_HIGH_LEVEL_ARCHITECTURE.md), [Concelier Architecture](../ARCHITECTURE_CONCELIER.md), [Policy Engine Architecture](policy-engine.md), [Aggregation-Only Contract](../ingestion/aggregation-only-contract.md) + +This dossier summarises the end-to-end runtime topology after the Aggregation-Only Contract (AOC) rollout. It highlights where raw facts live, how ingest services enforce guardrails, and how downstream components consume those facts to derive policy decisions and user-facing experiences. + +--- + +## 1 · System landscape + +```mermaid +graph TD + subgraph Edge["Clients & Automation"] + CLI[stella CLI] + UI[Console SPA] + APIClients[CI / API Clients] + end + Gateway[API Gateway
(JWT + DPoP scopes)] + subgraph Scanner["Fact Collection"] + ScannerWeb[Scanner.WebService] + ScannerWorkers[Scanner.Workers] + Agent[Agent Runtime] + end + subgraph Ingestion["Aggregation-Only Ingestion (AOC)"] + Concelier[Concelier.WebService] + Excititor[Excititor.WebService] + RawStore[(MongoDB
advisory_raw / vex_raw)] + end + subgraph Derivation["Policy & Overlay"] + Policy[Policy Engine] + Scheduler[Scheduler Services] + Notify[Notifier] + end + subgraph Experience["UX & Export"] + UIService[Console Backend] + Exporters[Export / Offline Kit] + end + Observability[Telemetry Stack] + + CLI --> Gateway + UI --> Gateway + APIClients --> Gateway + Gateway --> ScannerWeb + ScannerWeb --> ScannerWorkers + ScannerWorkers --> Concelier + ScannerWorkers --> Excititor + Concelier --> RawStore + Excititor --> RawStore + RawStore --> Policy + Policy --> Scheduler + Policy --> Notify + Policy --> UIService + Scheduler --> UIService + UIService --> Exporters + Exporters --> CLI + Exporters --> Offline[Offline Kit] + Observability -.-> ScannerWeb + Observability -.-> Concelier + Observability -.-> Excititor + Observability -.-> Policy + Observability -.-> Scheduler + Observability -.-> Notify +``` + +Key boundaries: + +- **AOC border.** Everything inside the Ingestion subgraph writes only immutable raw facts plus link hints. Derived severity, consensus, and risk remain outside the border. +- **Policy-only derivation.** Policy Engine materialises `effective_finding_*` collections and emits overlays; other services consume but never mutate them. +- **Tenant enforcement.** Authority-issued DPoP scopes flow through Gateway to every service; raw stores and overlays include `tenant` strictly. + +--- + +## 2 · Aggregation-Only Contract focus + +### 2.1 Responsibilities at the boundary + +| Area | Services | Responsibilities under AOC | Forbidden under AOC | +|------|----------|-----------------------------|---------------------| +| **Ingestion (Concelier / Excititor)** | `StellaOps.Concelier.WebService`, `StellaOps.Excititor.WebService` | Fetch upstream advisories/VEX, verify signatures, compute linksets, append immutable documents to `advisory_raw` / `vex_raw`, emit observability signals, expose raw read APIs. | Computing severity, consensus, suppressions, or policy hints; merging upstream sources into a single derived record; mutating existing documents. | +| **Policy & Overlay** | `StellaOps.Policy.Engine`, Scheduler | Join SBOM inventory with raw advisories/VEX, evaluate policies, issue `effective_finding_*` overlays, drive remediation workflows. | Writing to raw collections; bypassing guard scopes; running without recorded provenance. | +| **Experience layers** | Console, CLI, Exporters | Surface raw facts + policy overlays; run `stella aoc verify`; render AOC dashboards and reports. | Accepting ingestion payloads that lack provenance or violate guard results. | + +### 2.2 Raw stores + +| Collection | Purpose | Key fields | Notes | +|------------|---------|------------|-------| +| `advisory_raw` | Immutable vendor/ecosystem advisory documents. | `_id`, `tenant`, `source.*`, `upstream.*`, `content.raw`, `linkset`, `supersedes`. | Idempotent by `(source.vendor, upstream.upstream_id, upstream.content_hash)`. | +| `vex_raw` | Immutable vendor VEX statements. | Mirrors `advisory_raw`; `identifiers.statements` summarises affected components. | Maintains supersedes chain identical to advisory flow. | +| Change streams (`advisory_raw_stream`, `vex_raw_stream`) | Feed Policy Engine and Scheduler. | `operationType`, `documentKey`, `fullDocument`, `tenant`, `traceId`. | Scope filtered per tenant before delivery. | + +### 2.3 Guarded ingestion sequence + +```mermaid +sequenceDiagram + participant Upstream as Upstream Source + participant Connector as Concelier/Excititor Connector + participant Guard as AOCWriteGuard + participant Mongo as MongoDB (advisory_raw / vex_raw) + participant Stream as Change Stream + participant Policy as Policy Engine + + Upstream-->>Connector: CSAF / OSV / VEX document + Connector->>Connector: Normalize transport, compute content_hash + Connector->>Guard: Candidate raw doc (source + upstream + content + linkset) + Guard-->>Connector: ERR_AOC_00x on violation + Guard->>Mongo: Append immutable document (with tenant & supersedes) + Mongo-->>Stream: Change event (tenant scoped) + Stream->>Policy: Raw delta payload + Policy->>Policy: Evaluate policies, compute effective findings +``` + +--- + +### 2.4 Authority scopes & tenancy + +| Scope | Holder | Purpose | Notes | +|-------|--------|---------|-------| +| `advisory:ingest` / `vex:ingest` | Concelier / Excititor collectors | Append raw documents through ingestion endpoints. | Paired with tenant claims; requests without tenant are rejected. | +| `advisory:read` / `vex:read` | DevOps verify identity, CLI | Run `stella aoc verify` or call `/aoc/verify`. | Read-only; cannot mutate raw docs. | +| `effective:write` | Policy Engine | Materialise `effective_finding_*` overlays. | Only Policy Engine identity may hold; ingestion contexts receive `ERR_AOC_006` if they attempt. | +| `findings:read` | Console, CLI, exports | Consume derived findings. | Enforced by Gateway and downstream services. | + +--- + +## 3 · Data & control flow highlights + +1. **Ingestion:** Concelier / Excititor connectors fetch upstream documents, compute linksets, and hand payloads to `AOCWriteGuard`. Guards validate schema, provenance, forbidden fields, supersedes pointers, and append-only rules before writing to Mongo. +2. **Verification:** `stella aoc verify` (CLI/CI) and `/aoc/verify` endpoints replay guard checks against stored documents, mapping `ERR_AOC_00x` codes to exit codes for automation. +3. **Policy evaluation:** Mongo change streams deliver tenant-scoped raw deltas. Policy Engine joins SBOM inventory (via BOM Index), executes deterministic policies, writes overlays, and emits events to Scheduler/Notify. +4. **Experience surfaces:** Console renders an AOC dashboard showing ingestion latency, guard violations, and supersedes depth. CLI exposes raw-document fetch helpers for auditing. Offline Kit bundles raw collections alongside guard configs to keep air-gapped installs verifiable. +5. **Observability:** All services emit `ingestion_write_total`, `aoc_violation_total{code}`, `ingestion_latency_seconds`, and trace spans `ingest.fetch`, `ingest.transform`, `ingest.write`, `aoc.guard`. Logs correlate via `traceId`, `tenant`, `source.vendor`, and `content_hash`. + +--- + +## 4 · Offline & disaster readiness + +- **Offline Kit:** Packages raw Mongo snapshots (`advisory_raw`, `vex_raw`) plus guard configuration and CLI verifier binaries so air-gapped sites can re-run AOC checks before promotion. +- **Recovery:** Supersedes chains allow rollback to prior revisions without mutating documents. Disaster exercises must rehearse restoring from snapshot, replaying change streams into Policy Engine, and re-validating guard compliance. +- **Migration:** Legacy normalised fields are moved to temporary views during cutover; ingestion runtime removes writes once guard-enforced path is live (see [Migration playbook](../ingestion/aggregation-only-contract.md#8-migration-playbook)). + +--- + +## 5 · References + +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) +- [Concelier architecture](../ARCHITECTURE_CONCELIER.md) +- [Excititor architecture](../ARCHITECTURE_EXCITITOR.md) +- [Policy Engine architecture](policy-engine.md) +- [Authority service](../ARCHITECTURE_AUTHORITY.md) +- [Observability standards (upcoming)](../observability/policy.md) – interim reference for telemetry naming. + +--- + +## 6 · Compliance checklist + +- [ ] AOC guard enabled for all Concelier and Excititor write paths in production. +- [ ] Mongo schema validators deployed for `advisory_raw` and `vex_raw`; change streams scoped per tenant. +- [ ] Authority scopes (`advisory:*`, `vex:*`, `effective:*`) configured in Gateway and validated via integration tests. +- [ ] `stella aoc verify` wired into CI/CD pipelines with seeded violation fixtures. +- [ ] Console AOC dashboard and CLI documentation reference the new ingestion contract. +- [ ] Offline Kit bundles include guard configs, verifier tooling, and documentation updates. +- [ ] Observability dashboards include violation, latency, and supersedes depth metrics with alert thresholds. + +--- + +*Last updated: 2025-10-26 (Sprint 19).* diff --git a/docs/architecture/policy-engine.md b/docs/architecture/policy-engine.md index 8a854670..fffc61ae 100644 --- a/docs/architecture/policy-engine.md +++ b/docs/architecture/policy-engine.md @@ -1,243 +1,243 @@ -# Policy Engine Architecture (v2) - -> **Ownership:** Policy Guild • Platform Guild -> **Services:** `StellaOps.Policy.Engine` (Minimal API + worker host) -> **Data Stores:** MongoDB (`policies`, `policy_runs`, `effective_finding_*`), Object storage (explain bundles), optional NATS/Mongo queue -> **Related docs:** [Policy overview](../policy/overview.md), [DSL](../policy/dsl.md), [Lifecycle](../policy/lifecycle.md), [Runs](../policy/runs.md), [REST API](../api/policy.md), [Policy CLI](../cli/policy.md), [Architecture overview](../architecture/overview.md), [AOC reference](../ingestion/aggregation-only-contract.md) - -This dossier describes the internal structure of the Policy Engine service delivered in Epic 2. It focuses on module boundaries, deterministic evaluation, orchestration, and integration contracts with Concelier, Excititor, SBOM Service, Authority, Scheduler, and Observability stacks. - -The service operates strictly downstream of the **Aggregation-Only Contract (AOC)**. It consumes immutable `advisory_raw` and `vex_raw` documents emitted by Concelier and Excititor, derives findings inside Policy-owned collections, and never mutates ingestion stores. Refer to the architecture overview and AOC reference for system-wide guardrails and provenance obligations. - ---- - -## 1 · Responsibilities & Constraints - -- Compile and evaluate `stella-dsl@1` policy packs into deterministic verdicts. -- Join SBOM inventory, Concelier advisories, and Excititor VEX evidence via canonical linksets and equivalence tables. -- Materialise effective findings (`effective_finding_{policyId}`) with append-only history and produce explain traces. -- Operate incrementally: react to change streams (advisory/vex/SBOM deltas) with ≤ 5 min SLA. -- Provide simulations with diff summaries for UI/CLI workflows without modifying state. -- Enforce strict determinism guard (no wall-clock, RNG, network beyond allow-listed services) and RBAC + tenancy via Authority scopes. -- Support sealed/air-gapped deployments with offline bundles and sealed-mode hints. - -Non-goals: policy authoring UI (handled by Console), ingestion or advisory normalisation (Concelier), VEX consensus (Excititor), runtime enforcement (Zastava). - ---- - -## 2 · High-Level Architecture - -```mermaid -graph TD - subgraph Clients - CLI[stella CLI] - UI[Console Policy Editor] - CI[CI Pipelines] - end - subgraph PolicyEngine["StellaOps.Policy.Engine"] - API[Minimal API Host] - Orchestrator[Run Orchestrator] - WorkerPool[Evaluation Workers] - Compiler[DSL Compiler Cache] - Materializer[Effective Findings Writer] - end - subgraph RawStores["Raw Stores (AOC)"] - AdvisoryRaw[(MongoDB
advisory_raw)] - VexRaw[(MongoDB
vex_raw)] - end - subgraph Derived["Derived Stores"] - Mongo[(MongoDB
policies / policy_runs / effective_finding_*)] - Blob[(Object Store / Evidence Locker)] - Queue[(Mongo Queue / NATS)] - end - Concelier[(Concelier APIs)] - Excititor[(Excititor APIs)] - SBOM[(SBOM Service)] - Authority[(Authority / DPoP Gateway)] - - CLI --> API - UI --> API - CI --> API - API --> Compiler - API --> Orchestrator - Orchestrator --> Queue - Queue --> WorkerPool - Concelier --> AdvisoryRaw - Excititor --> VexRaw - WorkerPool --> AdvisoryRaw - WorkerPool --> VexRaw - WorkerPool --> SBOM - WorkerPool --> Materializer - Materializer --> Mongo - WorkerPool --> Blob - API --> Mongo - API --> Blob - API --> Authority - Orchestrator --> Mongo - Authority --> API -``` - -Key notes: - -- API host exposes lifecycle, run, simulate, findings endpoints with DPoP-bound OAuth enforcement. -- Orchestrator manages run scheduling/fairness; writes run tickets to queue, leases jobs to worker pool. -- Workers evaluate policies using cached IR; join external services via tenant-scoped clients; pull immutable advisories/VEX from the raw stores; write derived overlays to Mongo and optional explain bundles to blob storage. -- Observability (metrics/traces/logs) integrated via OpenTelemetry (not shown). - ---- - -### 2.1 · AOC inputs & immutability - -- **Raw-only reads.** Evaluation workers access `advisory_raw` / `vex_raw` via tenant-scoped Mongo clients or the Concelier/Excititor raw APIs. No Policy Engine component is permitted to mutate these collections. -- **Guarded ingestion.** `AOCWriteGuard` rejects forbidden fields before data reaches the raw stores. Policy tests replay known `ERR_AOC_00x` violations to confirm ingestion compliance. -- **Change streams as contract.** Run orchestration stores resumable cursors for raw change streams. Replays of these cursors (e.g., after failover) must yield identical materialisation outcomes. -- **Derived stores only.** All severity, consensus, and suppression state lives in `effective_finding_*` collections and explain bundles owned by Policy Engine. Provenance fields link back to raw document IDs so auditors can trace every verdict. -- **Authority scopes.** Only the Policy Engine service identity holds `effective:write`. Ingestion identities retain `advisory:*`/`vex:*` scopes, ensuring separation of duties enforced by Authority and the API Gateway. - ---- - -## 3 · Module Breakdown - -| Module | Responsibility | Notes | -|--------|----------------|-------| -| **Configuration** (`Configuration/`) | Bind settings (Mongo URIs, queue options, service URLs, sealed mode), validate on start. | Strict schema; fails fast on missing secrets. | -| **Authority Client** (`Authority/`) | Acquire tokens, enforce scopes, perform DPoP key rotation. | Only service identity uses `effective:write`. | -| **DSL Compiler** (`Dsl/`) | Parse, canonicalise, IR generation, checksum caching. | Uses Roslyn-like pipeline; caches by `policyId+version+hash`. | -| **Selection Layer** (`Selection/`) | Batch SBOM ↔ advisory ↔ VEX joiners; apply equivalence tables; support incremental cursors. | Deterministic ordering (SBOM → advisory → VEX). | -| **Evaluator** (`Evaluation/`) | Execute IR with first-match semantics, compute severity/trust/reachability weights, record rule hits. | Stateless; all inputs provided by selection layer. | -| **Materialiser** (`Materialization/`) | Upsert effective findings, append history, manage explain bundle exports. | Mongo transactions per SBOM chunk. | -| **Orchestrator** (`Runs/`) | Change-stream ingestion, fairness, retry/backoff, queue writer. | Works with Scheduler Models DTOs. | -| **API** (`Api/`) | Minimal API endpoints, DTO validation, problem responses, idempotency. | Generated clients for CLI/UI. | -| **Observability** (`Telemetry/`) | Metrics (`policy_run_seconds`, `rules_fired_total`), traces, structured logs. | Sampled rule-hit logs with redaction. | -| **Offline Adapter** (`Offline/`) | Bundle export/import (policies, simulations, runs), sealed-mode enforcement. | Uses DSSE signing via Signer service. | - ---- - -## 4 · Data Model & Persistence - -### 4.1 Collections - -- `policies` – policy versions, metadata, lifecycle states, simulation artefact references. -- `policy_runs` – run records, inputs (cursors, env), stats, determinism hash, run status. -- `policy_run_events` – append-only log (queued, leased, completed, failed, canceled, replay). -- `effective_finding_{policyId}` – current verdict snapshot per finding. -- `effective_finding_{policyId}_history` – append-only history (previous verdicts, timestamps, runId). -- `policy_reviews` – review comments/decisions. - -### 4.2 Schema Highlights - -- Run records include `changeDigests` (hash of advisory/VEX inputs) for replay verification. -- Effective findings store provenance references (`advisory_raw_ids`, `vex_raw_ids`, `sbom_component_id`). -- All collections include `tenant`, `policyId`, `version`, `createdAt`, `updatedAt`, `traceId` for audit. - -### 4.3 Indexing - -- Compound indexes: `{tenant, policyId, status}` on `policies`; `{tenant, policyId, status, startedAt}` on `policy_runs`; `{policyId, sbomId, findingKey}` on findings. -- TTL indexes on transient explain bundle references (configurable). - ---- - -## 5 · Evaluation Pipeline - -```mermaid -sequenceDiagram - autonumber - participant Worker as EvaluationWorker - participant Compiler as CompilerCache - participant Selector as SelectionLayer - participant Eval as Evaluator - participant Mat as Materialiser - participant Expl as ExplainStore - - Worker->>Compiler: Load IR (policyId, version, digest) - Compiler-->>Worker: CompiledPolicy (cached or compiled) - Worker->>Selector: Fetch tuple batches (sbom, advisory, vex) - Selector-->>Worker: Deterministic batches (1024 tuples) - loop For each batch - Worker->>Eval: Execute rules (batch, env) - Eval-->>Worker: Verdicts + rule hits - Worker->>Mat: Upsert effective findings - Mat-->>Worker: Success - Worker->>Expl: Persist sampled explain traces (optional) - end - Worker->>Mat: Append history + run stats - Worker-->>Worker: Compute determinism hash - Worker->>+Mat: Finalize transaction - Mat-->>Worker: Ack -``` - -Determinism guard instrumentation wraps the evaluator, rejecting access to forbidden APIs and ensuring batch ordering remains stable. - ---- - -## 6 · Run Orchestration & Incremental Flow - -- **Change streams:** Concelier and Excititor publish document changes to the scheduler queue (`policy.trigger.delta`). Payload includes `tenant`, `source`, `linkset digests`, `cursor`. -- **Orchestrator:** Maintains per-tenant backlog; merges deltas until time/size thresholds met, then enqueues `PolicyRunRequest`. -- **Queue:** Mongo queue with lease; each job assigned `leaseDuration`, `maxAttempts`. -- **Workers:** Lease jobs, execute evaluation pipeline, report status (success/failure/canceled). Failures with recoverable errors requeue with backoff; determinism or schema violations mark job `failed` and raise incident event. -- **Fairness:** Round-robin per `{tenant, policyId}`; emergency jobs (`priority=emergency`) jump queue but limited via circuit breaker. -- **Replay:** On demand, orchestrator rehydrates run via stored cursors and exports sealed bundle for audit/CI determinism checks. - ---- - -## 7 · Security & Tenancy - -- **Auth:** All API calls pass through Authority gateway; DPoP tokens enforced for service-to-service (Policy Engine service principal). CLI/UI tokens include scope claims. -- **Scopes:** Mutations require `policy:*` scopes corresponding to action; `effective:write` restricted to service identity. -- **Tenancy:** All queries filter by `tenant`. Service identity uses `tenant-global` for shared policies; cross-tenant reads prohibited unless `policy:tenant-admin` scope present. -- **Secrets:** Configuration loaded via environment variables or sealed secrets; runtime avoids writing secrets to logs. -- **Determinism guard:** Static analyzer prevents referencing forbidden namespaces; runtime guard intercepts `DateTime.Now`, `Random`, `Guid`, HTTP clients beyond allow-list. -- **Sealed mode:** Global flag disables outbound network except allow-listed internal hosts; watchers fail fast if unexpected egress attempted. - ---- - -## 8 · Observability - -- Metrics: - - `policy_run_seconds{mode,tenant,policy}` (histogram) - - `policy_run_queue_depth{tenant}` - - `policy_rules_fired_total{policy,rule}` - - `policy_vex_overrides_total{policy,vendor}` -- Logs: Structured JSON with `traceId`, `policyId`, `version`, `runId`, `tenant`, `phase`. Guard ensures no sensitive data leakage. -- Traces: Spans `policy.select`, `policy.evaluate`, `policy.materialize`, `policy.simulate`. Trace IDs surfaced to CLI/UI. -- Incident mode toggles 100 % sampling and extended retention windows. - ---- - -## 9 · Offline / Bundle Integration - -- **Imports:** Offline Kit delivers policy packs, advisory/VEX snapshots, SBOM updates. Policy Engine ingests bundles via `offline import`. -- **Exports:** `stella policy bundle export` packages policy, IR digest, simulations, run metadata; UI provides export triggers. -- **Sealed hints:** Explain traces annotate when cached values used (EPSS, KEV). Run records mark `env.sealed=true`. -- **Sync cadence:** Operators perform monthly bundle sync; Policy Engine warns when snapshots > configured staleness (default 14 days). - ---- - -## 10 · Testing & Quality - -- **Unit tests:** DSL parsing, evaluator semantics, guard enforcement. -- **Integration tests:** Joiners with sample SBOM/advisory/VEX data; materialisation with deterministic ordering; API contract tests generated from OpenAPI. -- **Property tests:** Ensure rule evaluation deterministic across permutations. -- **Golden tests:** Replay recorded runs, compare determinism hash. -- **Performance tests:** Evaluate 100k component / 1M advisory dataset under warmed caches (<30 s full run). -- **Chaos hooks:** Optional toggles to simulate upstream latency/failures; used in staging. - ---- - -## 11 · Compliance Checklist - -- [ ] **Determinism guard enforced:** Static analyzer + runtime guard block wall-clock, RNG, unauthorized network calls. -- [ ] **Incremental correctness:** Change-stream cursors stored and replayed during tests; unit/integration coverage for dedupe. -- [ ] **RBAC validated:** Endpoint scope requirements match Authority configuration; integration tests cover deny/allow. -- [ ] **AOC separation enforced:** No code path writes to `advisory_raw` / `vex_raw`; integration tests capture `ERR_AOC_00x` handling; read-only clients verified. -- [ ] **Effective findings ownership:** Only Policy Engine identity holds `effective:write`; unauthorized callers receive `ERR_AOC_006`. -- [ ] **Observability wired:** Metrics/traces/logs exported with correlation IDs; dashboards include `aoc_violation_total` and ingest latency panels. -- [ ] **Offline parity:** Sealed-mode tests executed; bundle import/export flows documented and validated. -- [ ] **Schema docs synced:** DTOs match Scheduler Models (`SCHED-MODELS-20-001`); JSON schemas committed. -- [ ] **Security reviews complete:** Threat model (including queue poisoning, determinism bypass, data exfiltration) documented; mitigations in place. -- [ ] **Disaster recovery rehearsed:** Run replay+rollback procedures tested and recorded. - ---- - -*Last updated: 2025-10-26 (Sprint 19).* +# Policy Engine Architecture (v2) + +> **Ownership:** Policy Guild • Platform Guild +> **Services:** `StellaOps.Policy.Engine` (Minimal API + worker host) +> **Data Stores:** MongoDB (`policies`, `policy_runs`, `effective_finding_*`), Object storage (explain bundles), optional NATS/Mongo queue +> **Related docs:** [Policy overview](../policy/overview.md), [DSL](../policy/dsl.md), [Lifecycle](../policy/lifecycle.md), [Runs](../policy/runs.md), [REST API](../api/policy.md), [Policy CLI](../cli/policy.md), [Architecture overview](../architecture/overview.md), [AOC reference](../ingestion/aggregation-only-contract.md) + +This dossier describes the internal structure of the Policy Engine service delivered in Epic 2. It focuses on module boundaries, deterministic evaluation, orchestration, and integration contracts with Concelier, Excititor, SBOM Service, Authority, Scheduler, and Observability stacks. + +The service operates strictly downstream of the **Aggregation-Only Contract (AOC)**. It consumes immutable `advisory_raw` and `vex_raw` documents emitted by Concelier and Excititor, derives findings inside Policy-owned collections, and never mutates ingestion stores. Refer to the architecture overview and AOC reference for system-wide guardrails and provenance obligations. + +--- + +## 1 · Responsibilities & Constraints + +- Compile and evaluate `stella-dsl@1` policy packs into deterministic verdicts. +- Join SBOM inventory, Concelier advisories, and Excititor VEX evidence via canonical linksets and equivalence tables. +- Materialise effective findings (`effective_finding_{policyId}`) with append-only history and produce explain traces. +- Operate incrementally: react to change streams (advisory/vex/SBOM deltas) with ≤ 5 min SLA. +- Provide simulations with diff summaries for UI/CLI workflows without modifying state. +- Enforce strict determinism guard (no wall-clock, RNG, network beyond allow-listed services) and RBAC + tenancy via Authority scopes. +- Support sealed/air-gapped deployments with offline bundles and sealed-mode hints. + +Non-goals: policy authoring UI (handled by Console), ingestion or advisory normalisation (Concelier), VEX consensus (Excititor), runtime enforcement (Zastava). + +--- + +## 2 · High-Level Architecture + +```mermaid +graph TD + subgraph Clients + CLI[stella CLI] + UI[Console Policy Editor] + CI[CI Pipelines] + end + subgraph PolicyEngine["StellaOps.Policy.Engine"] + API[Minimal API Host] + Orchestrator[Run Orchestrator] + WorkerPool[Evaluation Workers] + Compiler[DSL Compiler Cache] + Materializer[Effective Findings Writer] + end + subgraph RawStores["Raw Stores (AOC)"] + AdvisoryRaw[(MongoDB
advisory_raw)] + VexRaw[(MongoDB
vex_raw)] + end + subgraph Derived["Derived Stores"] + Mongo[(MongoDB
policies / policy_runs / effective_finding_*)] + Blob[(Object Store / Evidence Locker)] + Queue[(Mongo Queue / NATS)] + end + Concelier[(Concelier APIs)] + Excititor[(Excititor APIs)] + SBOM[(SBOM Service)] + Authority[(Authority / DPoP Gateway)] + + CLI --> API + UI --> API + CI --> API + API --> Compiler + API --> Orchestrator + Orchestrator --> Queue + Queue --> WorkerPool + Concelier --> AdvisoryRaw + Excititor --> VexRaw + WorkerPool --> AdvisoryRaw + WorkerPool --> VexRaw + WorkerPool --> SBOM + WorkerPool --> Materializer + Materializer --> Mongo + WorkerPool --> Blob + API --> Mongo + API --> Blob + API --> Authority + Orchestrator --> Mongo + Authority --> API +``` + +Key notes: + +- API host exposes lifecycle, run, simulate, findings endpoints with DPoP-bound OAuth enforcement. +- Orchestrator manages run scheduling/fairness; writes run tickets to queue, leases jobs to worker pool. +- Workers evaluate policies using cached IR; join external services via tenant-scoped clients; pull immutable advisories/VEX from the raw stores; write derived overlays to Mongo and optional explain bundles to blob storage. +- Observability (metrics/traces/logs) integrated via OpenTelemetry (not shown). + +--- + +### 2.1 · AOC inputs & immutability + +- **Raw-only reads.** Evaluation workers access `advisory_raw` / `vex_raw` via tenant-scoped Mongo clients or the Concelier/Excititor raw APIs. No Policy Engine component is permitted to mutate these collections. +- **Guarded ingestion.** `AOCWriteGuard` rejects forbidden fields before data reaches the raw stores. Policy tests replay known `ERR_AOC_00x` violations to confirm ingestion compliance. +- **Change streams as contract.** Run orchestration stores resumable cursors for raw change streams. Replays of these cursors (e.g., after failover) must yield identical materialisation outcomes. +- **Derived stores only.** All severity, consensus, and suppression state lives in `effective_finding_*` collections and explain bundles owned by Policy Engine. Provenance fields link back to raw document IDs so auditors can trace every verdict. +- **Authority scopes.** Only the Policy Engine service identity holds `effective:write`. Ingestion identities retain `advisory:*`/`vex:*` scopes, ensuring separation of duties enforced by Authority and the API Gateway. + +--- + +## 3 · Module Breakdown + +| Module | Responsibility | Notes | +|--------|----------------|-------| +| **Configuration** (`Configuration/`) | Bind settings (Mongo URIs, queue options, service URLs, sealed mode), validate on start. | Strict schema; fails fast on missing secrets. | +| **Authority Client** (`Authority/`) | Acquire tokens, enforce scopes, perform DPoP key rotation. | Only service identity uses `effective:write`. | +| **DSL Compiler** (`Dsl/`) | Parse, canonicalise, IR generation, checksum caching. | Uses Roslyn-like pipeline; caches by `policyId+version+hash`. | +| **Selection Layer** (`Selection/`) | Batch SBOM ↔ advisory ↔ VEX joiners; apply equivalence tables; support incremental cursors. | Deterministic ordering (SBOM → advisory → VEX). | +| **Evaluator** (`Evaluation/`) | Execute IR with first-match semantics, compute severity/trust/reachability weights, record rule hits. | Stateless; all inputs provided by selection layer. | +| **Materialiser** (`Materialization/`) | Upsert effective findings, append history, manage explain bundle exports. | Mongo transactions per SBOM chunk. | +| **Orchestrator** (`Runs/`) | Change-stream ingestion, fairness, retry/backoff, queue writer. | Works with Scheduler Models DTOs. | +| **API** (`Api/`) | Minimal API endpoints, DTO validation, problem responses, idempotency. | Generated clients for CLI/UI. | +| **Observability** (`Telemetry/`) | Metrics (`policy_run_seconds`, `rules_fired_total`), traces, structured logs. | Sampled rule-hit logs with redaction. | +| **Offline Adapter** (`Offline/`) | Bundle export/import (policies, simulations, runs), sealed-mode enforcement. | Uses DSSE signing via Signer service. | + +--- + +## 4 · Data Model & Persistence + +### 4.1 Collections + +- `policies` – policy versions, metadata, lifecycle states, simulation artefact references. +- `policy_runs` – run records, inputs (cursors, env), stats, determinism hash, run status. +- `policy_run_events` – append-only log (queued, leased, completed, failed, canceled, replay). +- `effective_finding_{policyId}` – current verdict snapshot per finding. +- `effective_finding_{policyId}_history` – append-only history (previous verdicts, timestamps, runId). +- `policy_reviews` – review comments/decisions. + +### 4.2 Schema Highlights + +- Run records include `changeDigests` (hash of advisory/VEX inputs) for replay verification. +- Effective findings store provenance references (`advisory_raw_ids`, `vex_raw_ids`, `sbom_component_id`). +- All collections include `tenant`, `policyId`, `version`, `createdAt`, `updatedAt`, `traceId` for audit. + +### 4.3 Indexing + +- Compound indexes: `{tenant, policyId, status}` on `policies`; `{tenant, policyId, status, startedAt}` on `policy_runs`; `{policyId, sbomId, findingKey}` on findings. +- TTL indexes on transient explain bundle references (configurable). + +--- + +## 5 · Evaluation Pipeline + +```mermaid +sequenceDiagram + autonumber + participant Worker as EvaluationWorker + participant Compiler as CompilerCache + participant Selector as SelectionLayer + participant Eval as Evaluator + participant Mat as Materialiser + participant Expl as ExplainStore + + Worker->>Compiler: Load IR (policyId, version, digest) + Compiler-->>Worker: CompiledPolicy (cached or compiled) + Worker->>Selector: Fetch tuple batches (sbom, advisory, vex) + Selector-->>Worker: Deterministic batches (1024 tuples) + loop For each batch + Worker->>Eval: Execute rules (batch, env) + Eval-->>Worker: Verdicts + rule hits + Worker->>Mat: Upsert effective findings + Mat-->>Worker: Success + Worker->>Expl: Persist sampled explain traces (optional) + end + Worker->>Mat: Append history + run stats + Worker-->>Worker: Compute determinism hash + Worker->>+Mat: Finalize transaction + Mat-->>Worker: Ack +``` + +Determinism guard instrumentation wraps the evaluator, rejecting access to forbidden APIs and ensuring batch ordering remains stable. + +--- + +## 6 · Run Orchestration & Incremental Flow + +- **Change streams:** Concelier and Excititor publish document changes to the scheduler queue (`policy.trigger.delta`). Payload includes `tenant`, `source`, `linkset digests`, `cursor`. +- **Orchestrator:** Maintains per-tenant backlog; merges deltas until time/size thresholds met, then enqueues `PolicyRunRequest`. +- **Queue:** Mongo queue with lease; each job assigned `leaseDuration`, `maxAttempts`. +- **Workers:** Lease jobs, execute evaluation pipeline, report status (success/failure/canceled). Failures with recoverable errors requeue with backoff; determinism or schema violations mark job `failed` and raise incident event. +- **Fairness:** Round-robin per `{tenant, policyId}`; emergency jobs (`priority=emergency`) jump queue but limited via circuit breaker. +- **Replay:** On demand, orchestrator rehydrates run via stored cursors and exports sealed bundle for audit/CI determinism checks. + +--- + +## 7 · Security & Tenancy + +- **Auth:** All API calls pass through Authority gateway; DPoP tokens enforced for service-to-service (Policy Engine service principal). CLI/UI tokens include scope claims. +- **Scopes:** Mutations require `policy:*` scopes corresponding to action; `effective:write` restricted to service identity. +- **Tenancy:** All queries filter by `tenant`. Service identity uses `tenant-global` for shared policies; cross-tenant reads prohibited unless `policy:tenant-admin` scope present. +- **Secrets:** Configuration loaded via environment variables or sealed secrets; runtime avoids writing secrets to logs. +- **Determinism guard:** Static analyzer prevents referencing forbidden namespaces; runtime guard intercepts `DateTime.Now`, `Random`, `Guid`, HTTP clients beyond allow-list. +- **Sealed mode:** Global flag disables outbound network except allow-listed internal hosts; watchers fail fast if unexpected egress attempted. + +--- + +## 8 · Observability + +- Metrics: + - `policy_run_seconds{mode,tenant,policy}` (histogram) + - `policy_run_queue_depth{tenant}` + - `policy_rules_fired_total{policy,rule}` + - `policy_vex_overrides_total{policy,vendor}` +- Logs: Structured JSON with `traceId`, `policyId`, `version`, `runId`, `tenant`, `phase`. Guard ensures no sensitive data leakage. +- Traces: Spans `policy.select`, `policy.evaluate`, `policy.materialize`, `policy.simulate`. Trace IDs surfaced to CLI/UI. +- Incident mode toggles 100 % sampling and extended retention windows. + +--- + +## 9 · Offline / Bundle Integration + +- **Imports:** Offline Kit delivers policy packs, advisory/VEX snapshots, SBOM updates. Policy Engine ingests bundles via `offline import`. +- **Exports:** `stella policy bundle export` packages policy, IR digest, simulations, run metadata; UI provides export triggers. +- **Sealed hints:** Explain traces annotate when cached values used (EPSS, KEV). Run records mark `env.sealed=true`. +- **Sync cadence:** Operators perform monthly bundle sync; Policy Engine warns when snapshots > configured staleness (default 14 days). + +--- + +## 10 · Testing & Quality + +- **Unit tests:** DSL parsing, evaluator semantics, guard enforcement. +- **Integration tests:** Joiners with sample SBOM/advisory/VEX data; materialisation with deterministic ordering; API contract tests generated from OpenAPI. +- **Property tests:** Ensure rule evaluation deterministic across permutations. +- **Golden tests:** Replay recorded runs, compare determinism hash. +- **Performance tests:** Evaluate 100k component / 1M advisory dataset under warmed caches (<30 s full run). +- **Chaos hooks:** Optional toggles to simulate upstream latency/failures; used in staging. + +--- + +## 11 · Compliance Checklist + +- [ ] **Determinism guard enforced:** Static analyzer + runtime guard block wall-clock, RNG, unauthorized network calls. +- [ ] **Incremental correctness:** Change-stream cursors stored and replayed during tests; unit/integration coverage for dedupe. +- [ ] **RBAC validated:** Endpoint scope requirements match Authority configuration; integration tests cover deny/allow. +- [ ] **AOC separation enforced:** No code path writes to `advisory_raw` / `vex_raw`; integration tests capture `ERR_AOC_00x` handling; read-only clients verified. +- [ ] **Effective findings ownership:** Only Policy Engine identity holds `effective:write`; unauthorized callers receive `ERR_AOC_006`. +- [ ] **Observability wired:** Metrics/traces/logs exported with correlation IDs; dashboards include `aoc_violation_total` and ingest latency panels. +- [ ] **Offline parity:** Sealed-mode tests executed; bundle import/export flows documented and validated. +- [ ] **Schema docs synced:** DTOs match Scheduler Models (`SCHED-MODELS-20-001`); JSON schemas committed. +- [ ] **Security reviews complete:** Threat model (including queue poisoning, determinism bypass, data exfiltration) documented; mitigations in place. +- [ ] **Disaster recovery rehearsed:** Run replay+rollback procedures tested and recorded. + +--- + +*Last updated: 2025-10-26 (Sprint 19).* diff --git a/docs/assets/ui/tours/README.md b/docs/assets/ui/tours/README.md index dc8978fd..d16015b3 100644 --- a/docs/assets/ui/tours/README.md +++ b/docs/assets/ui/tours/README.md @@ -1,13 +1,13 @@ -# UI Tours Media Assets - -Store annotated screenshots and GIFs referenced by `/docs/examples/ui-tours.md` in this directory. Use the naming convention documented in the guide (e.g., `triage-step-01.png`, `triage-flow.gif`). - -## Contribution checklist - -- Capture at 1920×1080 resolution unless otherwise specified. -- Add annotations using the shared Docs Guild template (narrow callouts, numbered badges). -- Optimize images to stay below 2 MB (PNG) and 8 MB (GIF) while preserving legibility. -- Record GIFs at ≤30 seconds using 12–15 fps for balance between smoothness and size. -- Update the capture checklist in `docs/examples/ui-tours.md` when assets are added or replaced. -- Commit binaries using Git LFS if size exceeds repository limits; otherwise store directly. -- Include the console build hash in the asset metadata or caption, matching the Downloads manifest version. +# UI Tours Media Assets + +Store annotated screenshots and GIFs referenced by `/docs/examples/ui-tours.md` in this directory. Use the naming convention documented in the guide (e.g., `triage-step-01.png`, `triage-flow.gif`). + +## Contribution checklist + +- Capture at 1920×1080 resolution unless otherwise specified. +- Add annotations using the shared Docs Guild template (narrow callouts, numbered badges). +- Optimize images to stay below 2 MB (PNG) and 8 MB (GIF) while preserving legibility. +- Record GIFs at ≤30 seconds using 12–15 fps for balance between smoothness and size. +- Update the capture checklist in `docs/examples/ui-tours.md` when assets are added or replaced. +- Commit binaries using Git LFS if size exceeds repository limits; otherwise store directly. +- Include the console build hash in the asset metadata or caption, matching the Downloads manifest version. diff --git a/docs/attestor/EPIC_19_ATTESTOR_CONSOLE.md b/docs/attestor/EPIC_19_ATTESTOR_CONSOLE.md index 508e7217..22b74685 100644 --- a/docs/attestor/EPIC_19_ATTESTOR_CONSOLE.md +++ b/docs/attestor/EPIC_19_ATTESTOR_CONSOLE.md @@ -76,7 +76,7 @@ Use compressed JSON payloads, cached verification results, batched operations, a ## 4) Architecture -New services (`src/StellaOps.Attestor/`), libraries (`src/StellaOps.Attestor.Envelope/`, `src/StellaOps.Attestor.Types/`, `src/StellaOps.Attestor.Verify/`), CLI (`src/StellaOps.Cli/`), export tooling (`src/StellaOps.ExportCenter.AttestationBundles/`), and shared KMS providers (`src/StellaOps.Cryptography.Kms/`). REST endpoints documented in OpenAPI. +New services (`src/Attestor/StellaOps.Attestor/`), libraries (`src/Attestor/StellaOps.Attestor.Envelope/`, `src/Attestor/StellaOps.Attestor.Types/`, `src/Attestor/StellaOps.Attestor.Verify/`), CLI (`src/Cli/StellaOps.Cli/`), export tooling (`src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/`), and shared KMS providers (`src/__Libraries/StellaOps.Cryptography.Kms/`). REST endpoints documented in OpenAPI. --- diff --git a/docs/backlog/2025-10-cleanup.md b/docs/backlog/2025-10-cleanup.md index 5fdd8cfa..b98a2b29 100644 --- a/docs/backlog/2025-10-cleanup.md +++ b/docs/backlog/2025-10-cleanup.md @@ -12,6 +12,6 @@ This note captures the Sprint backlog hygiene pass applied on 26 October 2025. T - **CI/Offline adjustments.** `DEVOPS-UI-13-006` and `DEVOPS-OFFLINE-18-003` moved under Console release tasks (`CONSOLE-QA-23-401`, `DEVOPS-CONSOLE-23-001`, `CONSOLE-REL-23-302`). ## Follow-up -- Update module task boards only under their active backlogs (`src/StellaOps.Notifier`, Cartographer, Vuln Explorer). +- Update module task boards only under their active backlogs (`src/Notifier/StellaOps.Notifier`, Cartographer, Vuln Explorer). - Ensure future ingestion tasks reference AOC guardrails and avoid derived semantics. -- Cross-check `SPRINTS.md` after adding new tasks to keep tables consistent with module `TASKS.md` files. +- Cross-check `../implplan/SPRINTS.md` after adding new tasks to keep tables consistent with module `TASKS.md` files. diff --git a/docs/cli-vs-ui-parity.md b/docs/cli-vs-ui-parity.md index 1acf437d..0fde684a 100644 --- a/docs/cli-vs-ui-parity.md +++ b/docs/cli-vs-ui-parity.md @@ -146,7 +146,7 @@ The script should emit a parity report that feeds into the Downloads workspace ( - `/docs/install/docker.md` – CLI parity section for deployments. - `/docs/observability/ui-telemetry.md` – telemetry metrics referencing CLI checks. - `/docs/security/console-security.md` – security metrics & CLI parity expectations. -- `src/StellaOps.Cli/TASKS.md` – authoritative status for CLI backlog. +- `src/Cli/StellaOps.Cli/TASKS.md` – authoritative status for CLI backlog. - `/docs/updates/2025-10-28-docs-guild.md` – coordination note for Authority/Security follow-up. --- diff --git a/docs/cli/cli-reference.md b/docs/cli/cli-reference.md index 8bcc0069..1f88de91 100644 --- a/docs/cli/cli-reference.md +++ b/docs/cli/cli-reference.md @@ -1,316 +1,316 @@ -# CLI AOC Commands Reference - -> **Audience:** DevEx engineers, operators, and CI authors integrating the `stella` CLI with Aggregation-Only Contract (AOC) workflows. -> **Scope:** Command synopsis, options, exit codes, and offline considerations for `stella sources ingest --dry-run` and `stella aoc verify` as introduced in Sprint 19. - -Both commands are designed to enforce the AOC guardrails documented in the [aggregation-only reference](../ingestion/aggregation-only-contract.md) and the [architecture overview](../architecture/overview.md). They consume Authority-issued tokens with tenant scopes and never mutate ingestion stores. - ---- - -## 1 · Prerequisites - -- CLI version: `stella` ≥ 0.19.0 (AOC feature gate enabled). -- Required scopes (DPoP-bound): - - `advisory:read` for Concelier sources. - - `vex:read` for Excititor sources (optional but required for VEX checks). - - `aoc:verify` to invoke guard verification endpoints. - - `tenant:select` if your deployment uses tenant switching. -- Connectivity: direct access to Concelier/Excititor APIs or Offline Kit snapshot (see § 4). -- Environment: set `STELLA_AUTHORITY_URL`, `STELLA_TENANT`, and export a valid OpTok via `stella auth login` or existing token cache. - ---- - -## 2 · `stella sources ingest --dry-run` - -### 2.1 Synopsis - -```bash -stella sources ingest --dry-run \ - --source \ - --input \ - [--tenant ] \ - [--format json|table] \ - [--no-color] \ - [--output ] -``` - -### 2.2 Description - -Previews an ingestion write without touching MongoDB. The command loads an upstream advisory or VEX document, computes the would-write payload, runs it through the `AOCWriteGuard`, and reports any forbidden fields, provenance gaps, or idempotency issues. Use it during connector development, CI validation, or while triaging incidents. - -### 2.3 Options - -| Option | Description | -|--------|-------------| -| `--source ` | Logical source name (`redhat`, `ubuntu`, `osv`, etc.). Mirrors connector configuration. | -| `--input ` | Path to local CSAF/OSV/VEX file or HTTPS URI. CLI normalises transport (gzip/base64) before guard evaluation. | -| `--tenant ` | Overrides default tenant for multi-tenant deployments. Mandatory when `STELLA_TENANT` is not set. | -| `--format json|table` | Output format. `table` (default) prints summary with highlighted violations; `json` emits machine-readable report (see below). | -| `--no-color` | Disables ANSI colour output for CI logs. | -| `--output ` | Writes the JSON report to file while still printing human-readable summary to stdout. | - -### 2.4 Output schema (JSON) - -```json -{ - "source": "redhat", - "tenant": "default", - "guardVersion": "1.0.0", - "status": "ok", - "document": { - "contentHash": "sha256:…", - "supersedes": null, - "provenance": { - "signature": { "format": "pgp", "present": true } - } - }, - "violations": [] -} -``` - -When violations exist, `status` becomes `error` and `violations` contains entries with `code` (`ERR_AOC_00x`), a short `message`, and JSON Pointer `path` values indicating offending fields. - -### 2.5 Exit codes - -| Exit code | Meaning | -|-----------|---------| -| `0` | Guard passed; would-write payload is AOC compliant. | -| `11` | `ERR_AOC_001` – Forbidden field (`severity`, `cvss`, etc.) detected. | -| `12` | `ERR_AOC_002` – Merge attempt (multiple upstream sources fused). | -| `13` | `ERR_AOC_003` – Idempotency violation (duplicate without supersedes). | -| `14` | `ERR_AOC_004` – Missing provenance fields. | -| `15` | `ERR_AOC_005` – Signature/checksum mismatch. | -| `16` | `ERR_AOC_006` – Effective findings present (Policy-only data). | -| `17` | `ERR_AOC_007` – Unknown top-level fields / schema violation. | -| `70` | Transport error (network, auth, malformed input). | - -> Exit codes map directly to the `ERR_AOC_00x` table for scripting consistency. Multiple violations yield the highest-priority code (e.g., 11 takes precedence over 14). - -### 2.6 Examples - -Dry-run a local CSAF file: - -```bash -stella sources ingest --dry-run \ - --source redhat \ - --input ./fixtures/redhat/RHSA-2025-1234.json -``` - -Stream from HTTPS and emit JSON for CI: - -```bash -stella sources ingest --dry-run \ - --source osv \ - --input https://osv.dev/vulnerability/GHSA-aaaa-bbbb \ - --format json \ - --output artifacts/osv-dry-run.json - -cat artifacts/osv-dry-run.json | jq '.violations' -``` - -### 2.7 Offline notes - -When operating in sealed/offline mode: - -- Use `--input` paths pointing to Offline Kit snapshots (`offline-kit/advisories/*.json`). -- Provide `--tenant` explicitly if the offline bundle contains multiple tenants. -- The command does not attempt network access when given a file path. -- Store reports with `--output` to include in transfer packages for policy review. - ---- - -## 3 · `stella aoc verify` - -### 3.1 Synopsis - -```bash -stella aoc verify \ - [--since ] \ - [--limit ] \ - [--sources ] \ - [--codes ] \ - [--format table|json] \ - [--export ] \ - [--tenant ] \ - [--no-color] -``` - -### 3.2 Description - -Replays the AOC guard against stored raw documents. By default it checks all advisories and VEX statements ingested in the last 24 hours for the active tenant, reporting totals, top violation codes, and sample documents. Use it in CI pipelines, scheduled verifications, or during incident response. - -### 3.3 Options - -| Option | Description | -|--------|-------------| -| `--since ` | Verification window. Accepts ISO 8601 timestamp (`2025-10-25T12:00:00Z`) or duration (`48h`, `7d`). Defaults to `24h`. | -| `--limit ` | Maximum number of violations to display (per code). `0` means show all. Defaults to `20`. | -| `--sources ` | Comma-separated list of sources (`redhat,ubuntu,osv`). Filters both advisories and VEX entries. | -| `--codes ` | Restricts output to specific `ERR_AOC_00x` codes. Useful for regression tracking. | -| `--format table|json` | `table` (default) prints summary plus top violations; `json` outputs machine-readable report identical to the `/aoc/verify` API. | -| `--export ` | Writes the JSON report to disk (useful for audits/offline uploads). | -| `--tenant ` | Overrides tenant context. Required for cross-tenant verifications when run by platform operators. | -| `--no-color` | Disables ANSI colours. | - -`table` mode prints a summary showing the active tenant, evaluated window, counts of checked advisories/VEX statements, the active limit, total writes/violations, and whether the page was truncated. Status is colour-coded as `ok`, `violations`, or `truncated`. When violations exist the detail table lists the code, total occurrences, first sample document (`source` + `documentId` + `contentHash`), and JSON pointer path. - -### 3.4 Report structure (JSON) - -```json -{ - "tenant": "default", - "window": { - "from": "2025-10-25T12:00:00Z", - "to": "2025-10-26T12:00:00Z" - }, - "checked": { - "advisories": 482, - "vex": 75 - }, - "violations": [ - { - "code": "ERR_AOC_001", - "count": 2, - "examples": [ - { - "source": "redhat", - "documentId": "advisory_raw:redhat:RHSA-2025:1", - "contentHash": "sha256:…", - "path": "/content/raw/cvss" - } - ] - } - ], - "metrics": { - "ingestion_write_total": 557, - "aoc_violation_total": 2 - }, - "truncated": false -} -``` - -### 3.5 Exit codes - -| Exit code | Meaning | -|-----------|---------| -| `0` | Verification succeeded with zero violations. | -| `11…17` | Same mapping as § 2.5 when violations are detected. Highest-priority code returned. | -| `18` | Verification ran but results truncated (limit reached) – treat as warning; rerun with higher `--limit`. | -| `70` | Transport/authentication error. | -| `71` | CLI misconfiguration (missing tenant, invalid `--since`, etc.). | - -### 3.6 Examples - -Daily verification across all sources: - -```bash -stella aoc verify --since 24h --format table -``` - -CI pipeline focusing on errant sources and exporting evidence: - -```bash -stella aoc verify \ - --sources redhat,ubuntu \ - --codes ERR_AOC_001,ERR_AOC_004 \ - --format json \ - --limit 100 \ - --export artifacts/aoc-verify.json - -jq '.violations[] | {code, count}' artifacts/aoc-verify.json -``` - -Air-gapped verification using Offline Kit snapshot (example script): - -```bash -stella aoc verify \ - --since 7d \ - --format json \ - --export /mnt/offline/aoc-verify-$(date +%F).json - -sha256sum /mnt/offline/aoc-verify-*.json > /mnt/offline/checksums.txt -``` - -### 3.7 Automation tips - -- Schedule with `cron` or platform scheduler and fail the job when exit code ≥ 11. -- Pair with `stella sources ingest --dry-run` for pre-flight validation before re-enabling a paused source. -- Push JSON exports to observability pipelines for historical tracking of violation counts. - -### 3.8 Offline notes - -- Works against Offline Kit Mongo snapshots when CLI is pointed at the local API gateway included in the bundle. -- When fully disconnected, run against exported `aoc verify` reports generated on production and replay them using `--format json --export` (automation recipe above). -- Include verification output in compliance packages alongside Offline Kit manifests. - ---- - -## 4 · Global exit-code reference - -| Code | Summary | -|------|---------| -| `0` | Success / no violations. | -| `11` | `ERR_AOC_001` – Forbidden field present. | -| `12` | `ERR_AOC_002` – Merge attempt detected. | -| `13` | `ERR_AOC_003` – Idempotency violation. | -| `14` | `ERR_AOC_004` – Missing provenance/signature metadata. | -| `15` | `ERR_AOC_005` – Signature/checksum mismatch. | -| `16` | `ERR_AOC_006` – Effective findings in ingestion payload. | -| `17` | `ERR_AOC_007` – Schema violation / unknown fields. | -| `18` | Partial verification (limit reached). | -| `70` | Transport or HTTP failure. | -| `71` | CLI usage error (invalid arguments, missing tenant). | - -Use these codes in CI to map outcomes to build statuses or alert severities. - ---- - -## 4 · `stella vuln observations` (Overlay paging) - -`stella vuln observations` lists raw advisory observations for downstream overlays (Graph Explorer, Policy simulations, Console). Large tenants can now page through results deterministically. - -| Option | Description | -|--------|-------------| -| `--limit ` | Caps the number of observations returned in a single call. Defaults to `200`; values above `500` are clamped server-side. | -| `--cursor ` | Opaque continuation token produced by the previous page (`nextCursor` in JSON output). Pass it back to resume iteration. | - -Additional notes: - -- Table mode prints a hint when `hasMore` is `true`: - `[yellow]More observations available. Continue with --cursor [/]`. -- JSON mode returns `nextCursor` and `hasMore` alongside the observation list so automation can loop until `hasMore` is `false`. -- Supplying a non-positive limit falls back to the default (`200`). Invalid/expired cursors yield `400 Bad Request`; restart without `--cursor` to begin a fresh iteration. - ---- - -## 5 · Related references - -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) -- [Architecture overview](../architecture/overview.md) -- [Console AOC dashboard](../ui/console.md) -- [Authority scopes](../ARCHITECTURE_AUTHORITY.md) - ---- - -## 6 · Compliance checklist - -- [ ] Usage documented for both table and JSON formats. -- [ ] Exit-code mapping matches `ERR_AOC_00x` definitions and automation guidance. -- [ ] Offline/air-gap workflow captured for both commands. -- [ ] References to AOC architecture and console docs included. -- [ ] Examples validated against current CLI syntax (update post-implementation). -- [ ] Docs guild screenshot/narrative placeholder logged for release notes (pending CLI team capture). - ---- - -*Last updated: 2025-10-29 (Sprint 24).* - -## 13. Authority configuration quick reference - -| Setting | Purpose | How to set | -|---------|---------|------------| -| `StellaOps:Authority:OperatorReason` | Incident/change description recorded with `orch:operate` tokens. | CLI flag `--Authority:OperatorReason=...` or env `STELLAOPS_ORCH_REASON`. | -| `StellaOps:Authority:OperatorTicket` | Change/incident ticket reference paired with orchestrator control actions. | CLI flag `--Authority:OperatorTicket=...` or env `STELLAOPS_ORCH_TICKET`. | - -> Tokens requesting `orch:operate` will fail with `invalid_request` unless both values are present. Choose concise strings (≤256 chars for reason, ≤128 chars for ticket) and avoid sensitive data. - +# CLI AOC Commands Reference + +> **Audience:** DevEx engineers, operators, and CI authors integrating the `stella` CLI with Aggregation-Only Contract (AOC) workflows. +> **Scope:** Command synopsis, options, exit codes, and offline considerations for `stella sources ingest --dry-run` and `stella aoc verify` as introduced in Sprint 19. + +Both commands are designed to enforce the AOC guardrails documented in the [aggregation-only reference](../ingestion/aggregation-only-contract.md) and the [architecture overview](../architecture/overview.md). They consume Authority-issued tokens with tenant scopes and never mutate ingestion stores. + +--- + +## 1 · Prerequisites + +- CLI version: `stella` ≥ 0.19.0 (AOC feature gate enabled). +- Required scopes (DPoP-bound): + - `advisory:read` for Concelier sources. + - `vex:read` for Excititor sources (optional but required for VEX checks). + - `aoc:verify` to invoke guard verification endpoints. + - `tenant:select` if your deployment uses tenant switching. +- Connectivity: direct access to Concelier/Excititor APIs or Offline Kit snapshot (see § 4). +- Environment: set `STELLA_AUTHORITY_URL`, `STELLA_TENANT`, and export a valid OpTok via `stella auth login` or existing token cache. + +--- + +## 2 · `stella sources ingest --dry-run` + +### 2.1 Synopsis + +```bash +stella sources ingest --dry-run \ + --source \ + --input \ + [--tenant ] \ + [--format json|table] \ + [--no-color] \ + [--output ] +``` + +### 2.2 Description + +Previews an ingestion write without touching MongoDB. The command loads an upstream advisory or VEX document, computes the would-write payload, runs it through the `AOCWriteGuard`, and reports any forbidden fields, provenance gaps, or idempotency issues. Use it during connector development, CI validation, or while triaging incidents. + +### 2.3 Options + +| Option | Description | +|--------|-------------| +| `--source ` | Logical source name (`redhat`, `ubuntu`, `osv`, etc.). Mirrors connector configuration. | +| `--input ` | Path to local CSAF/OSV/VEX file or HTTPS URI. CLI normalises transport (gzip/base64) before guard evaluation. | +| `--tenant ` | Overrides default tenant for multi-tenant deployments. Mandatory when `STELLA_TENANT` is not set. | +| `--format json|table` | Output format. `table` (default) prints summary with highlighted violations; `json` emits machine-readable report (see below). | +| `--no-color` | Disables ANSI colour output for CI logs. | +| `--output ` | Writes the JSON report to file while still printing human-readable summary to stdout. | + +### 2.4 Output schema (JSON) + +```json +{ + "source": "redhat", + "tenant": "default", + "guardVersion": "1.0.0", + "status": "ok", + "document": { + "contentHash": "sha256:…", + "supersedes": null, + "provenance": { + "signature": { "format": "pgp", "present": true } + } + }, + "violations": [] +} +``` + +When violations exist, `status` becomes `error` and `violations` contains entries with `code` (`ERR_AOC_00x`), a short `message`, and JSON Pointer `path` values indicating offending fields. + +### 2.5 Exit codes + +| Exit code | Meaning | +|-----------|---------| +| `0` | Guard passed; would-write payload is AOC compliant. | +| `11` | `ERR_AOC_001` – Forbidden field (`severity`, `cvss`, etc.) detected. | +| `12` | `ERR_AOC_002` – Merge attempt (multiple upstream sources fused). | +| `13` | `ERR_AOC_003` – Idempotency violation (duplicate without supersedes). | +| `14` | `ERR_AOC_004` – Missing provenance fields. | +| `15` | `ERR_AOC_005` – Signature/checksum mismatch. | +| `16` | `ERR_AOC_006` – Effective findings present (Policy-only data). | +| `17` | `ERR_AOC_007` – Unknown top-level fields / schema violation. | +| `70` | Transport error (network, auth, malformed input). | + +> Exit codes map directly to the `ERR_AOC_00x` table for scripting consistency. Multiple violations yield the highest-priority code (e.g., 11 takes precedence over 14). + +### 2.6 Examples + +Dry-run a local CSAF file: + +```bash +stella sources ingest --dry-run \ + --source redhat \ + --input ./fixtures/redhat/RHSA-2025-1234.json +``` + +Stream from HTTPS and emit JSON for CI: + +```bash +stella sources ingest --dry-run \ + --source osv \ + --input https://osv.dev/vulnerability/GHSA-aaaa-bbbb \ + --format json \ + --output artifacts/osv-dry-run.json + +cat artifacts/osv-dry-run.json | jq '.violations' +``` + +### 2.7 Offline notes + +When operating in sealed/offline mode: + +- Use `--input` paths pointing to Offline Kit snapshots (`offline-kit/advisories/*.json`). +- Provide `--tenant` explicitly if the offline bundle contains multiple tenants. +- The command does not attempt network access when given a file path. +- Store reports with `--output` to include in transfer packages for policy review. + +--- + +## 3 · `stella aoc verify` + +### 3.1 Synopsis + +```bash +stella aoc verify \ + [--since ] \ + [--limit ] \ + [--sources ] \ + [--codes ] \ + [--format table|json] \ + [--export ] \ + [--tenant ] \ + [--no-color] +``` + +### 3.2 Description + +Replays the AOC guard against stored raw documents. By default it checks all advisories and VEX statements ingested in the last 24 hours for the active tenant, reporting totals, top violation codes, and sample documents. Use it in CI pipelines, scheduled verifications, or during incident response. + +### 3.3 Options + +| Option | Description | +|--------|-------------| +| `--since ` | Verification window. Accepts ISO 8601 timestamp (`2025-10-25T12:00:00Z`) or duration (`48h`, `7d`). Defaults to `24h`. | +| `--limit ` | Maximum number of violations to display (per code). `0` means show all. Defaults to `20`. | +| `--sources ` | Comma-separated list of sources (`redhat,ubuntu,osv`). Filters both advisories and VEX entries. | +| `--codes ` | Restricts output to specific `ERR_AOC_00x` codes. Useful for regression tracking. | +| `--format table|json` | `table` (default) prints summary plus top violations; `json` outputs machine-readable report identical to the `/aoc/verify` API. | +| `--export ` | Writes the JSON report to disk (useful for audits/offline uploads). | +| `--tenant ` | Overrides tenant context. Required for cross-tenant verifications when run by platform operators. | +| `--no-color` | Disables ANSI colours. | + +`table` mode prints a summary showing the active tenant, evaluated window, counts of checked advisories/VEX statements, the active limit, total writes/violations, and whether the page was truncated. Status is colour-coded as `ok`, `violations`, or `truncated`. When violations exist the detail table lists the code, total occurrences, first sample document (`source` + `documentId` + `contentHash`), and JSON pointer path. + +### 3.4 Report structure (JSON) + +```json +{ + "tenant": "default", + "window": { + "from": "2025-10-25T12:00:00Z", + "to": "2025-10-26T12:00:00Z" + }, + "checked": { + "advisories": 482, + "vex": 75 + }, + "violations": [ + { + "code": "ERR_AOC_001", + "count": 2, + "examples": [ + { + "source": "redhat", + "documentId": "advisory_raw:redhat:RHSA-2025:1", + "contentHash": "sha256:…", + "path": "/content/raw/cvss" + } + ] + } + ], + "metrics": { + "ingestion_write_total": 557, + "aoc_violation_total": 2 + }, + "truncated": false +} +``` + +### 3.5 Exit codes + +| Exit code | Meaning | +|-----------|---------| +| `0` | Verification succeeded with zero violations. | +| `11…17` | Same mapping as § 2.5 when violations are detected. Highest-priority code returned. | +| `18` | Verification ran but results truncated (limit reached) – treat as warning; rerun with higher `--limit`. | +| `70` | Transport/authentication error. | +| `71` | CLI misconfiguration (missing tenant, invalid `--since`, etc.). | + +### 3.6 Examples + +Daily verification across all sources: + +```bash +stella aoc verify --since 24h --format table +``` + +CI pipeline focusing on errant sources and exporting evidence: + +```bash +stella aoc verify \ + --sources redhat,ubuntu \ + --codes ERR_AOC_001,ERR_AOC_004 \ + --format json \ + --limit 100 \ + --export artifacts/aoc-verify.json + +jq '.violations[] | {code, count}' artifacts/aoc-verify.json +``` + +Air-gapped verification using Offline Kit snapshot (example script): + +```bash +stella aoc verify \ + --since 7d \ + --format json \ + --export /mnt/offline/aoc-verify-$(date +%F).json + +sha256sum /mnt/offline/aoc-verify-*.json > /mnt/offline/checksums.txt +``` + +### 3.7 Automation tips + +- Schedule with `cron` or platform scheduler and fail the job when exit code ≥ 11. +- Pair with `stella sources ingest --dry-run` for pre-flight validation before re-enabling a paused source. +- Push JSON exports to observability pipelines for historical tracking of violation counts. + +### 3.8 Offline notes + +- Works against Offline Kit Mongo snapshots when CLI is pointed at the local API gateway included in the bundle. +- When fully disconnected, run against exported `aoc verify` reports generated on production and replay them using `--format json --export` (automation recipe above). +- Include verification output in compliance packages alongside Offline Kit manifests. + +--- + +## 4 · Global exit-code reference + +| Code | Summary | +|------|---------| +| `0` | Success / no violations. | +| `11` | `ERR_AOC_001` – Forbidden field present. | +| `12` | `ERR_AOC_002` – Merge attempt detected. | +| `13` | `ERR_AOC_003` – Idempotency violation. | +| `14` | `ERR_AOC_004` – Missing provenance/signature metadata. | +| `15` | `ERR_AOC_005` – Signature/checksum mismatch. | +| `16` | `ERR_AOC_006` – Effective findings in ingestion payload. | +| `17` | `ERR_AOC_007` – Schema violation / unknown fields. | +| `18` | Partial verification (limit reached). | +| `70` | Transport or HTTP failure. | +| `71` | CLI usage error (invalid arguments, missing tenant). | + +Use these codes in CI to map outcomes to build statuses or alert severities. + +--- + +## 4 · `stella vuln observations` (Overlay paging) + +`stella vuln observations` lists raw advisory observations for downstream overlays (Graph Explorer, Policy simulations, Console). Large tenants can now page through results deterministically. + +| Option | Description | +|--------|-------------| +| `--limit ` | Caps the number of observations returned in a single call. Defaults to `200`; values above `500` are clamped server-side. | +| `--cursor ` | Opaque continuation token produced by the previous page (`nextCursor` in JSON output). Pass it back to resume iteration. | + +Additional notes: + +- Table mode prints a hint when `hasMore` is `true`: + `[yellow]More observations available. Continue with --cursor [/]`. +- JSON mode returns `nextCursor` and `hasMore` alongside the observation list so automation can loop until `hasMore` is `false`. +- Supplying a non-positive limit falls back to the default (`200`). Invalid/expired cursors yield `400 Bad Request`; restart without `--cursor` to begin a fresh iteration. + +--- + +## 5 · Related references + +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) +- [Architecture overview](../architecture/overview.md) +- [Console AOC dashboard](../ui/console.md) +- [Authority scopes](../ARCHITECTURE_AUTHORITY.md) + +--- + +## 6 · Compliance checklist + +- [ ] Usage documented for both table and JSON formats. +- [ ] Exit-code mapping matches `ERR_AOC_00x` definitions and automation guidance. +- [ ] Offline/air-gap workflow captured for both commands. +- [ ] References to AOC architecture and console docs included. +- [ ] Examples validated against current CLI syntax (update post-implementation). +- [ ] Docs guild screenshot/narrative placeholder logged for release notes (pending CLI team capture). + +--- + +*Last updated: 2025-10-29 (Sprint 24).* + +## 13. Authority configuration quick reference + +| Setting | Purpose | How to set | +|---------|---------|------------| +| `StellaOps:Authority:OperatorReason` | Incident/change description recorded with `orch:operate` tokens. | CLI flag `--Authority:OperatorReason=...` or env `STELLAOPS_ORCH_REASON`. | +| `StellaOps:Authority:OperatorTicket` | Change/incident ticket reference paired with orchestrator control actions. | CLI flag `--Authority:OperatorTicket=...` or env `STELLAOPS_ORCH_TICKET`. | + +> Tokens requesting `orch:operate` will fail with `invalid_request` unless both values are present. Choose concise strings (≤256 chars for reason, ≤128 chars for ticket) and avoid sensitive data. + diff --git a/docs/cli/policy.md b/docs/cli/policy.md index 7bed3a29..6c791c3e 100644 --- a/docs/cli/policy.md +++ b/docs/cli/policy.md @@ -185,7 +185,7 @@ Output fields (JSON): } ``` -> Schema reminder: CLI commands surface objects defined in `src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`; use the samples in `samples/api/scheduler/` for contract validation when extending output parsing. +> Schema reminder: CLI commands surface objects defined in `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`; use the samples in `samples/api/scheduler/` for contract validation when extending output parsing. Exit codes: diff --git a/docs/deploy/console.md b/docs/deploy/console.md index 0a8dc4ab..c336c97b 100644 --- a/docs/deploy/console.md +++ b/docs/deploy/console.md @@ -1,228 +1,228 @@ -# Deploying the StellaOps Console - -> **Audience:** Deployment Guild, Console Guild, operators rolling out the web console. -> **Scope:** Helm and Docker Compose deployment steps, ingress/TLS configuration, required environment variables, health checks, offline/air-gap operation, and compliance checklist (Sprint 23). - -The StellaOps Console ships as part of the `stellaops` stack Helm chart and Compose bundles maintained under `deploy/`. This guide describes the supported deployment paths, the configuration surface, and operational checks needed to run the console in connected or air-gapped environments. - ---- - -## 1. Prerequisites - -- Kubernetes cluster (v1.28+) with ingress controller (NGINX, Traefik, or equivalent) and Cert-Manager for automated TLS, or Docker host for Compose deployments. -- Container registry access to `registry.stella-ops.org` (or mirrored registry) for all images listed in `deploy/releases/*.yaml`. -- Authority service configured with console client (`aud=ui`, scopes `ui.read`, `ui.admin`). -- DNS entry pointing to the console hostname (for example, `console.acme.internal`). -- Cosign public key for manifest verification (`deploy/releases/manifest.json.sig`). -- Optional: Offline Kit bundle for air-gapped sites (`stella-ops-offline-kit-.tar.gz`). - ---- - -## 2. Helm deployment (recommended) - -### 2.1 Install chart repository - -```bash -helm repo add stellaops https://downloads.stella-ops.org/helm -helm repo update stellaops -``` - -If operating offline, copy the chart archive from the Offline Kit (`deploy/helm/stellaops-.tgz`) and run: - -```bash -helm install stellaops ./stellaops-.tgz --namespace stellaops --create-namespace -``` - -### 2.2 Base installation - -```bash -helm install stellaops stellaops/stellaops \ - --namespace stellaops \ - --create-namespace \ - --values deploy/helm/stellaops/values-prod.yaml -``` - -The chart deploys Authority, Console web/API gateway, Scanner API, Scheduler, and supporting services. The console frontend pod is labelled `app=stellaops-web-ui`. - -### 2.3 Helm values highlights - -Key sections in `deploy/helm/stellaops/values-prod.yaml`: - -| Path | Description | -|------|-------------| -| `console.ingress.host` | Hostname served by the console (`console.example.com`). | -| `console.ingress.tls.secretName` | Kubernetes secret containing TLS certificate (generated by Cert-Manager or uploaded manually). | -| `console.config.apiGateway.baseUrl` | Internal base URL the UI uses to reach the gateway (defaults to `https://stellaops-web`). | -| `console.env.AUTHORITY_ISSUER` | Authority issuer URL (for example, `https://authority.example.com`). | -| `console.env.AUTHORITY_CLIENT_ID` | Authority client ID for the console UI. | -| `console.env.AUTHORITY_SCOPES` | Space-separated scopes required by UI (`ui.read ui.admin`). | -| `console.resources` | CPU/memory requests and limits (default 250m CPU / 512Mi memory). | -| `console.podAnnotations` | Optional annotations for service mesh or monitoring. | - -Use `values-stage.yaml`, `values-dev.yaml`, or `values-airgap.yaml` as templates for other environments. - -### 2.4 TLS and ingress - -Example ingress override: - -```yaml -console: - ingress: - enabled: true - className: nginx - host: console.acme.internal - tls: - enabled: true - secretName: console-tls -``` - -Generate certificates using Cert-Manager or provide an existing secret. For air-gapped deployments, pre-create the secret with the mirrored CA chain. - -### 2.5 Health checks - -Console pods expose: - -| Path | Purpose | Notes | -|------|---------|-------| -| `/health/live` | Liveness probe | Confirms process responsive. | -| `/health/ready` | Readiness probe | Verifies configuration bootstrap and Authority reachability. | -| `/metrics` | Prometheus metrics | Enabled when `console.metrics.enabled=true`. | - -Helm chart sets default probes (`initialDelaySeconds: 10`, `periodSeconds: 15`). Adjust via `console.livenessProbe` and `console.readinessProbe`. - ---- - -## 3. Docker Compose deployment - -Located in `deploy/compose/docker-compose.console.yaml`. Quick start: - -```bash -cd deploy/compose -docker compose -f docker-compose.console.yaml --env-file console.env up -d -``` - -`console.env` should define: - -``` -CONSOLE_PUBLIC_BASE_URL=https://console.acme.internal -AUTHORITY_ISSUER=https://authority.acme.internal -AUTHORITY_CLIENT_ID=console-ui -AUTHORITY_CLIENT_SECRET= -AUTHORITY_SCOPES=ui.read ui.admin -CONSOLE_GATEWAY_BASE_URL=https://api.acme.internal -``` - -The compose bundle includes Traefik as reverse proxy with TLS termination. Update `traefik/dynamic/console.yml` for custom certificates or additional middlewares (CSP headers, rate limits). - ---- - -## 4. Environment variables - -| Variable | Description | Default | -|----------|-------------|---------| -| `CONSOLE_PUBLIC_BASE_URL` | External URL used for redirects, deep links, and telemetry. | None (required). | -| `CONSOLE_GATEWAY_BASE_URL` | URL of the web gateway that proxies API calls (`/console/*`). | Chart service name. | -| `AUTHORITY_ISSUER` | Authority issuer (`https://authority.example.com`). | None (required). | -| `AUTHORITY_CLIENT_ID` | OIDC client configured in Authority. | None (required). | -| `AUTHORITY_SCOPES` | Space-separated scopes assigned to the console client. | `ui.read ui.admin`. | -| `AUTHORITY_DPOP_ENABLED` | Enables DPoP challenge/response (recommended true). | `true`. | -| `CONSOLE_FEATURE_FLAGS` | Comma-separated feature flags (`runs`, `downloads.offline`, etc.). | `runs,downloads,policies`. | -| `CONSOLE_LOG_LEVEL` | Minimum log level (`Information`, `Debug`, etc.). | `Information`. | -| `CONSOLE_METRICS_ENABLED` | Expose `/metrics` endpoint. | `true`. | -| `CONSOLE_SENTRY_DSN` | Optional error reporting DSN. | Blank. | - -When running behind additional proxies, set `ASPNETCORE_FORWARDEDHEADERS_ENABLED=true` to honour `X-Forwarded-*` headers. - ---- - -## 5. Security headers and CSP - -The console serves a strict Content Security Policy (CSP) by default: - -``` -default-src 'self'; -connect-src 'self' https://*.stella-ops.local; -script-src 'self'; -style-src 'self' 'unsafe-inline'; -img-src 'self' data:; -font-src 'self'; -frame-ancestors 'none'; -``` - -Adjust via `console.config.cspOverrides` if additional domains are required. For integrations embedding the console, update OIDC redirect URIs and Authority scopes accordingly. - -TLS recommendations: - -- Use TLS 1.2+ with modern cipher suite policy. -- Enable HSTS (`Strict-Transport-Security: max-age=31536000; includeSubDomains`). -- Provide custom trust bundles via `console.config.trustBundleSecret` when using private CAs. - ---- - -## 6. Logging and metrics - -- Structured logs emitted to stdout with correlation IDs. Configure log shipping via Fluent Bit or similar. -- Metrics available at `/metrics` in Prometheus format. Key metrics include `ui_request_duration_seconds`, `ui_tenant_switch_total`, and `ui_download_manifest_refresh_seconds`. -- Enable OpenTelemetry exporter by setting `OTEL_EXPORTER_OTLP_ENDPOINT` and associated headers in environment variables. - ---- - -## 7. Offline and air-gap deployment - -- Mirror container images using the Downloads workspace or Offline Kit manifest. Example: - -```bash -oras copy registry.stella-ops.org/stellaops/web-ui@sha256: \ - registry.airgap.local/stellaops/web-ui:2025.10.0 -``` - -- Import Offline Kit using `stella ouk import` before starting the console so manifest parity checks succeed. -- Use `values-airgap.yaml` to disable external telemetry endpoints and configure internal certificate chains. -- Run `helm upgrade --install` using the mirrored chart (`stellaops-.tgz`) and set `console.offlineMode=true` to surface offline banners. - ---- - -## 8. Health checks and remediation - -| Check | Command | Expected result | -|-------|---------|-----------------| -| Pod status | `kubectl get pods -n stellaops` | `Running` state with restarts = 0. | -| Liveness | `kubectl exec deploy/stellaops-web-ui -- curl -fsS http://localhost:8080/health/live` | Returns `{"status":"Healthy"}`. | -| Readiness | `kubectl exec deploy/stellaops-web-ui -- curl -fsS http://localhost:8080/health/ready` | Returns `{"status":"Ready"}`. | -| Gateway reachability | `curl -I https://console.example.com/api/console/status` | `200 OK` with CSP headers. | -| Static assets | `curl -I https://console.example.com/static/assets/app.js` | `200 OK` with long cache headers. | - -Troubleshooting steps: - -- **Authority unreachable:** readiness fails with `AUTHORITY_UNREACHABLE`. Check DNS, trust bundles, and Authority service health. -- **Manifest mismatch:** console logs `DOWNLOAD_MANIFEST_SIGNATURE_INVALID`. Verify cosign key and re-sync manifest. -- **Ingress 404:** ensure ingress controller routes host to `stellaops-web-ui` service; check TLS secret name. -- **SSE blocked:** confirm proxy allows HTTP/1.1 and disables buffering on `/console/runs/*`. - ---- - -## 9. References - -- `deploy/helm/stellaops/values-*.yaml` - environment-specific overrides. -- `deploy/compose/docker-compose.console.yaml` - Compose bundle. -- `/docs/ui/downloads.md` - manifest and offline bundle guidance. -- `/docs/security/console-security.md` - CSP and Authority scopes. -- `/docs/24_OFFLINE_KIT.md` - Offline kit packaging and verification. -- `/docs/ops/deployment-runbook.md` (pending) - wider platform deployment steps. - ---- - -## 10. Compliance checklist - -- [ ] Helm and Compose instructions verified against `deploy/` assets. -- [ ] Ingress/TLS guidance aligns with Security Guild recommendations. -- [ ] Environment variables documented with defaults and required values. -- [ ] Health/liveness/readiness endpoints tested and listed. -- [ ] Offline workflow (mirrors, manifest parity) captured. -- [ ] Logging and metrics surface documented metrics. -- [ ] CSP and security header defaults stated alongside override guidance. -- [ ] Troubleshooting section linked to relevant runbooks. - ---- - -*Last updated: 2025-10-27 (Sprint 23).* +# Deploying the StellaOps Console + +> **Audience:** Deployment Guild, Console Guild, operators rolling out the web console. +> **Scope:** Helm and Docker Compose deployment steps, ingress/TLS configuration, required environment variables, health checks, offline/air-gap operation, and compliance checklist (Sprint 23). + +The StellaOps Console ships as part of the `stellaops` stack Helm chart and Compose bundles maintained under `deploy/`. This guide describes the supported deployment paths, the configuration surface, and operational checks needed to run the console in connected or air-gapped environments. + +--- + +## 1. Prerequisites + +- Kubernetes cluster (v1.28+) with ingress controller (NGINX, Traefik, or equivalent) and Cert-Manager for automated TLS, or Docker host for Compose deployments. +- Container registry access to `registry.stella-ops.org` (or mirrored registry) for all images listed in `deploy/releases/*.yaml`. +- Authority service configured with console client (`aud=ui`, scopes `ui.read`, `ui.admin`). +- DNS entry pointing to the console hostname (for example, `console.acme.internal`). +- Cosign public key for manifest verification (`deploy/releases/manifest.json.sig`). +- Optional: Offline Kit bundle for air-gapped sites (`stella-ops-offline-kit-.tar.gz`). + +--- + +## 2. Helm deployment (recommended) + +### 2.1 Install chart repository + +```bash +helm repo add stellaops https://downloads.stella-ops.org/helm +helm repo update stellaops +``` + +If operating offline, copy the chart archive from the Offline Kit (`deploy/helm/stellaops-.tgz`) and run: + +```bash +helm install stellaops ./stellaops-.tgz --namespace stellaops --create-namespace +``` + +### 2.2 Base installation + +```bash +helm install stellaops stellaops/stellaops \ + --namespace stellaops \ + --create-namespace \ + --values deploy/helm/stellaops/values-prod.yaml +``` + +The chart deploys Authority, Console web/API gateway, Scanner API, Scheduler, and supporting services. The console frontend pod is labelled `app=stellaops-web-ui`. + +### 2.3 Helm values highlights + +Key sections in `deploy/helm/stellaops/values-prod.yaml`: + +| Path | Description | +|------|-------------| +| `console.ingress.host` | Hostname served by the console (`console.example.com`). | +| `console.ingress.tls.secretName` | Kubernetes secret containing TLS certificate (generated by Cert-Manager or uploaded manually). | +| `console.config.apiGateway.baseUrl` | Internal base URL the UI uses to reach the gateway (defaults to `https://stellaops-web`). | +| `console.env.AUTHORITY_ISSUER` | Authority issuer URL (for example, `https://authority.example.com`). | +| `console.env.AUTHORITY_CLIENT_ID` | Authority client ID for the console UI. | +| `console.env.AUTHORITY_SCOPES` | Space-separated scopes required by UI (`ui.read ui.admin`). | +| `console.resources` | CPU/memory requests and limits (default 250m CPU / 512Mi memory). | +| `console.podAnnotations` | Optional annotations for service mesh or monitoring. | + +Use `values-stage.yaml`, `values-dev.yaml`, or `values-airgap.yaml` as templates for other environments. + +### 2.4 TLS and ingress + +Example ingress override: + +```yaml +console: + ingress: + enabled: true + className: nginx + host: console.acme.internal + tls: + enabled: true + secretName: console-tls +``` + +Generate certificates using Cert-Manager or provide an existing secret. For air-gapped deployments, pre-create the secret with the mirrored CA chain. + +### 2.5 Health checks + +Console pods expose: + +| Path | Purpose | Notes | +|------|---------|-------| +| `/health/live` | Liveness probe | Confirms process responsive. | +| `/health/ready` | Readiness probe | Verifies configuration bootstrap and Authority reachability. | +| `/metrics` | Prometheus metrics | Enabled when `console.metrics.enabled=true`. | + +Helm chart sets default probes (`initialDelaySeconds: 10`, `periodSeconds: 15`). Adjust via `console.livenessProbe` and `console.readinessProbe`. + +--- + +## 3. Docker Compose deployment + +Located in `deploy/compose/docker-compose.console.yaml`. Quick start: + +```bash +cd deploy/compose +docker compose -f docker-compose.console.yaml --env-file console.env up -d +``` + +`console.env` should define: + +``` +CONSOLE_PUBLIC_BASE_URL=https://console.acme.internal +AUTHORITY_ISSUER=https://authority.acme.internal +AUTHORITY_CLIENT_ID=console-ui +AUTHORITY_CLIENT_SECRET= +AUTHORITY_SCOPES=ui.read ui.admin +CONSOLE_GATEWAY_BASE_URL=https://api.acme.internal +``` + +The compose bundle includes Traefik as reverse proxy with TLS termination. Update `traefik/dynamic/console.yml` for custom certificates or additional middlewares (CSP headers, rate limits). + +--- + +## 4. Environment variables + +| Variable | Description | Default | +|----------|-------------|---------| +| `CONSOLE_PUBLIC_BASE_URL` | External URL used for redirects, deep links, and telemetry. | None (required). | +| `CONSOLE_GATEWAY_BASE_URL` | URL of the web gateway that proxies API calls (`/console/*`). | Chart service name. | +| `AUTHORITY_ISSUER` | Authority issuer (`https://authority.example.com`). | None (required). | +| `AUTHORITY_CLIENT_ID` | OIDC client configured in Authority. | None (required). | +| `AUTHORITY_SCOPES` | Space-separated scopes assigned to the console client. | `ui.read ui.admin`. | +| `AUTHORITY_DPOP_ENABLED` | Enables DPoP challenge/response (recommended true). | `true`. | +| `CONSOLE_FEATURE_FLAGS` | Comma-separated feature flags (`runs`, `downloads.offline`, etc.). | `runs,downloads,policies`. | +| `CONSOLE_LOG_LEVEL` | Minimum log level (`Information`, `Debug`, etc.). | `Information`. | +| `CONSOLE_METRICS_ENABLED` | Expose `/metrics` endpoint. | `true`. | +| `CONSOLE_SENTRY_DSN` | Optional error reporting DSN. | Blank. | + +When running behind additional proxies, set `ASPNETCORE_FORWARDEDHEADERS_ENABLED=true` to honour `X-Forwarded-*` headers. + +--- + +## 5. Security headers and CSP + +The console serves a strict Content Security Policy (CSP) by default: + +``` +default-src 'self'; +connect-src 'self' https://*.stella-ops.local; +script-src 'self'; +style-src 'self' 'unsafe-inline'; +img-src 'self' data:; +font-src 'self'; +frame-ancestors 'none'; +``` + +Adjust via `console.config.cspOverrides` if additional domains are required. For integrations embedding the console, update OIDC redirect URIs and Authority scopes accordingly. + +TLS recommendations: + +- Use TLS 1.2+ with modern cipher suite policy. +- Enable HSTS (`Strict-Transport-Security: max-age=31536000; includeSubDomains`). +- Provide custom trust bundles via `console.config.trustBundleSecret` when using private CAs. + +--- + +## 6. Logging and metrics + +- Structured logs emitted to stdout with correlation IDs. Configure log shipping via Fluent Bit or similar. +- Metrics available at `/metrics` in Prometheus format. Key metrics include `ui_request_duration_seconds`, `ui_tenant_switch_total`, and `ui_download_manifest_refresh_seconds`. +- Enable OpenTelemetry exporter by setting `OTEL_EXPORTER_OTLP_ENDPOINT` and associated headers in environment variables. + +--- + +## 7. Offline and air-gap deployment + +- Mirror container images using the Downloads workspace or Offline Kit manifest. Example: + +```bash +oras copy registry.stella-ops.org/stellaops/web-ui@sha256: \ + registry.airgap.local/stellaops/web-ui:2025.10.0 +``` + +- Import Offline Kit using `stella ouk import` before starting the console so manifest parity checks succeed. +- Use `values-airgap.yaml` to disable external telemetry endpoints and configure internal certificate chains. +- Run `helm upgrade --install` using the mirrored chart (`stellaops-.tgz`) and set `console.offlineMode=true` to surface offline banners. + +--- + +## 8. Health checks and remediation + +| Check | Command | Expected result | +|-------|---------|-----------------| +| Pod status | `kubectl get pods -n stellaops` | `Running` state with restarts = 0. | +| Liveness | `kubectl exec deploy/stellaops-web-ui -- curl -fsS http://localhost:8080/health/live` | Returns `{"status":"Healthy"}`. | +| Readiness | `kubectl exec deploy/stellaops-web-ui -- curl -fsS http://localhost:8080/health/ready` | Returns `{"status":"Ready"}`. | +| Gateway reachability | `curl -I https://console.example.com/api/console/status` | `200 OK` with CSP headers. | +| Static assets | `curl -I https://console.example.com/static/assets/app.js` | `200 OK` with long cache headers. | + +Troubleshooting steps: + +- **Authority unreachable:** readiness fails with `AUTHORITY_UNREACHABLE`. Check DNS, trust bundles, and Authority service health. +- **Manifest mismatch:** console logs `DOWNLOAD_MANIFEST_SIGNATURE_INVALID`. Verify cosign key and re-sync manifest. +- **Ingress 404:** ensure ingress controller routes host to `stellaops-web-ui` service; check TLS secret name. +- **SSE blocked:** confirm proxy allows HTTP/1.1 and disables buffering on `/console/runs/*`. + +--- + +## 9. References + +- `deploy/helm/stellaops/values-*.yaml` - environment-specific overrides. +- `deploy/compose/docker-compose.console.yaml` - Compose bundle. +- `/docs/ui/downloads.md` - manifest and offline bundle guidance. +- `/docs/security/console-security.md` - CSP and Authority scopes. +- `/docs/24_OFFLINE_KIT.md` - Offline kit packaging and verification. +- `/docs/ops/deployment-runbook.md` (pending) - wider platform deployment steps. + +--- + +## 10. Compliance checklist + +- [ ] Helm and Compose instructions verified against `deploy/` assets. +- [ ] Ingress/TLS guidance aligns with Security Guild recommendations. +- [ ] Environment variables documented with defaults and required values. +- [ ] Health/liveness/readiness endpoints tested and listed. +- [ ] Offline workflow (mirrors, manifest parity) captured. +- [ ] Logging and metrics surface documented metrics. +- [ ] CSP and security header defaults stated alongside override guidance. +- [ ] Troubleshooting section linked to relevant runbooks. + +--- + +*Last updated: 2025-10-27 (Sprint 23).* diff --git a/docs/deploy/containers.md b/docs/deploy/containers.md index 1edbbacd..b361b321 100644 --- a/docs/deploy/containers.md +++ b/docs/deploy/containers.md @@ -1,160 +1,160 @@ -# Container Deployment Guide — AOC Update - -> **Audience:** DevOps Guild, platform operators deploying StellaOps services. -> **Scope:** Deployment configuration changes required by the Aggregation-Only Contract (AOC), including schema validators, guard environment flags, and verifier identities. - -This guide supplements existing deployment manuals with AOC-specific configuration. It assumes familiarity with the base Compose/Helm manifests described in `ops/deployment/` and `docs/ARCHITECTURE_DEVOPS.md`. - ---- - -## 1 · Schema validator enablement - -### 1.1 MongoDB validators - -- Apply JSON schema validators to `advisory_raw` and `vex_raw` collections before enabling AOC guards. -- Before enabling validators or the idempotency index, run the duplicate audit helper to confirm no conflicting raw advisories remain: - ```bash - mongo concelier ops/devops/scripts/check-advisory-raw-duplicates.js --eval 'var LIMIT=200;' - ``` - Resolve any reported rows prior to rollout. -- Use the migration script provided in `ops/devops/scripts/apply-aoc-validators.js`: - -```bash -kubectl exec -n concelier deploy/concelier-mongo -- \ - mongo concelier ops/devops/scripts/apply-aoc-validators.js - -kubectl exec -n excititor deploy/excititor-mongo -- \ - mongo excititor ops/devops/scripts/apply-aoc-validators.js -``` - -- Validators enforce required fields (`tenant`, `source`, `upstream`, `linkset`) and reject forbidden keys at DB level. -- Rollback plan: validators are applied with `validationLevel: moderate`—downgrade via the same script with `--remove` if required. - -### 1.2 Migration order - -1. Deploy validators in maintenance window. -2. Roll out Concelier/Excititor images with guard middleware enabled (`AOC_GUARD_ENABLED=true`). -3. Run smoke tests (`stella sources ingest --dry-run` fixtures) before resuming production ingestion. - -### 1.3 Supersedes backfill verification - -1. **Duplicate audit:** Confirm `mongo concelier ops/devops/scripts/check-advisory-raw-duplicates.js --eval 'var LIMIT=200;'` reports no conflicts before restarting Concelier with the new migrations. -2. **Post-migration check:** After the service restarts, validate that `db.advisory` is a view pointing to `advisory_backup_20251028`: - ```bash - mongo concelier --quiet --eval 'db.getCollectionInfos({ name: "advisory" })[0]' - ``` - The `type` should be `"view"` and `options.viewOn` should equal `"advisory_backup_20251028"`. -3. **Supersedes chain spot-check:** Inspect a sample set to ensure deterministic chaining: - ```bash - mongo concelier --quiet --eval ' - db.advisory_raw.aggregate([ - { $match: { "upstream.upstream_id": { $exists: true } } }, - { $sort: { "tenant": 1, "source.vendor": 1, "upstream.upstream_id": 1, "upstream.retrieved_at": 1 } }, - { $limit: 5 }, - { $project: { _id: 1, supersedes: 1 } } - ]).forEach(printjson)' - ``` - Each revision should reference the previous `_id` (or `null` for the first revision). Record findings in the change ticket before proceeding to production. - ---- - -## 2 · Container environment flags - -Add the following environment variables to Concelier/Excititor deployments: - -| Variable | Default | Description | -|----------|---------|-------------| -| `AOC_GUARD_ENABLED` | `true` | Enables `AOCWriteGuard` interception. Set `false` only for controlled rollback. | -| `AOC_ALLOW_SUPERSEDES_RETROFIT` | `false` | Allows temporary supersedes backfill during migration. Remove after cutover. | -| `AOC_METRICS_ENABLED` | `true` | Emits `ingestion_write_total`, `aoc_violation_total`, etc. | -| `AOC_TENANT_HEADER` | `X-Stella-Tenant` | Header name expected from Gateway. | -| `AOC_VERIFIER_USER` | `stella-aoc-verify` | Read-only service user used by UI/CLI verification. | - -Compose snippet: - -```yaml -environment: - - AOC_GUARD_ENABLED=true - - AOC_ALLOW_SUPERSEDES_RETROFIT=false - - AOC_METRICS_ENABLED=true - - AOC_TENANT_HEADER=X-Stella-Tenant - - AOC_VERIFIER_USER=stella-aoc-verify -``` - -Ensure `AOC_VERIFIER_USER` exists in Authority with `aoc:verify` scope and no write permissions. - ---- - -## 3 · Verifier identity - -- Create a dedicated client (`stella-aoc-verify`) via Authority bootstrap: - -```yaml -clients: - - clientId: stella-aoc-verify - grantTypes: [client_credentials] - scopes: [aoc:verify, advisory:read, vex:read] - tenants: [default] -``` - -- Store credentials in secret store (`Kubernetes Secret`, `Docker swarm secret`). -- Bind credentials to `stella aoc verify` CI jobs and Console verification service. -- Rotate quarterly; document in `ops/authority-key-rotation.md`. - ---- - -## 4 · Deployment steps - -1. **Pre-checks:** Confirm database backups, alerting in maintenance mode, and staging environment validated. -2. **Apply validators:** Run scripts per § 1.1. -3. **Update manifests:** Inject environment variables (§ 2) and mount guard configuration configmaps. -4. **Redeploy services:** Rolling restart Concelier/Excititor pods. Monitor `ingestion_write_total` for steady throughput. -5. **Seed verifier:** Deploy read-only verifier user and store credentials. -6. **Run verification:** Execute `stella aoc verify --since 24h` and ensure exit code `0`. -7. **Update dashboards:** Point Grafana panels to new metrics (`aoc_violation_total`). -8. **Record handoff:** Capture console screenshots and verification logs for release notes. - ---- - -## 5 · Offline Kit updates - -- Ship validator scripts with Offline Kit (`offline-kit/scripts/apply-aoc-validators.js`). -- Include pre-generated verification reports for air-gapped deployments. -- Document offline CLI workflow in bundle README referencing `docs/cli/cli-reference.md`. -- Ensure `stella-aoc-verify` credentials are scoped to offline tenant and rotated during bundle refresh. - ---- - -## 6 · Rollback plan - -1. Disable guard via `AOC_GUARD_ENABLED=false` on Concelier/Excititor and rollout. -2. Remove validators with the migration script (`--remove`). -3. Pause verification jobs to prevent noise. -4. Investigate and remediate upstream issues before re-enabling guards. - ---- - -## 7 · References - -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) -- [Authority scopes & tenancy](../security/authority-scopes.md) -- [Observability guide](../observability/observability.md) -- [CLI AOC commands](../cli/cli-reference.md) -- [Concelier architecture](../ARCHITECTURE_CONCELIER.md) -- [Excititor architecture](../ARCHITECTURE_EXCITITOR.md) - ---- - -## 8 · Compliance checklist - -- [ ] Validators documented and scripts referenced for online/offline deployments. -- [ ] Environment variables cover guard enablement, metrics, and tenant header. -- [ ] Read-only verifier user installation steps included. -- [ ] Offline kit instructions align with validator/verification workflow. -- [ ] Rollback procedure captured. -- [ ] Cross-links to AOC docs, Authority scopes, and observability guides present. -- [ ] DevOps Guild sign-off tracked (owner: @devops-guild, due 2025-10-29). - ---- - -*Last updated: 2025-10-26 (Sprint 19).* +# Container Deployment Guide — AOC Update + +> **Audience:** DevOps Guild, platform operators deploying StellaOps services. +> **Scope:** Deployment configuration changes required by the Aggregation-Only Contract (AOC), including schema validators, guard environment flags, and verifier identities. + +This guide supplements existing deployment manuals with AOC-specific configuration. It assumes familiarity with the base Compose/Helm manifests described in `ops/deployment/` and `docs/ARCHITECTURE_DEVOPS.md`. + +--- + +## 1 · Schema validator enablement + +### 1.1 MongoDB validators + +- Apply JSON schema validators to `advisory_raw` and `vex_raw` collections before enabling AOC guards. +- Before enabling validators or the idempotency index, run the duplicate audit helper to confirm no conflicting raw advisories remain: + ```bash + mongo concelier ops/devops/scripts/check-advisory-raw-duplicates.js --eval 'var LIMIT=200;' + ``` + Resolve any reported rows prior to rollout. +- Use the migration script provided in `ops/devops/scripts/apply-aoc-validators.js`: + +```bash +kubectl exec -n concelier deploy/concelier-mongo -- \ + mongo concelier ops/devops/scripts/apply-aoc-validators.js + +kubectl exec -n excititor deploy/excititor-mongo -- \ + mongo excititor ops/devops/scripts/apply-aoc-validators.js +``` + +- Validators enforce required fields (`tenant`, `source`, `upstream`, `linkset`) and reject forbidden keys at DB level. +- Rollback plan: validators are applied with `validationLevel: moderate`—downgrade via the same script with `--remove` if required. + +### 1.2 Migration order + +1. Deploy validators in maintenance window. +2. Roll out Concelier/Excititor images with guard middleware enabled (`AOC_GUARD_ENABLED=true`). +3. Run smoke tests (`stella sources ingest --dry-run` fixtures) before resuming production ingestion. + +### 1.3 Supersedes backfill verification + +1. **Duplicate audit:** Confirm `mongo concelier ops/devops/scripts/check-advisory-raw-duplicates.js --eval 'var LIMIT=200;'` reports no conflicts before restarting Concelier with the new migrations. +2. **Post-migration check:** After the service restarts, validate that `db.advisory` is a view pointing to `advisory_backup_20251028`: + ```bash + mongo concelier --quiet --eval 'db.getCollectionInfos({ name: "advisory" })[0]' + ``` + The `type` should be `"view"` and `options.viewOn` should equal `"advisory_backup_20251028"`. +3. **Supersedes chain spot-check:** Inspect a sample set to ensure deterministic chaining: + ```bash + mongo concelier --quiet --eval ' + db.advisory_raw.aggregate([ + { $match: { "upstream.upstream_id": { $exists: true } } }, + { $sort: { "tenant": 1, "source.vendor": 1, "upstream.upstream_id": 1, "upstream.retrieved_at": 1 } }, + { $limit: 5 }, + { $project: { _id: 1, supersedes: 1 } } + ]).forEach(printjson)' + ``` + Each revision should reference the previous `_id` (or `null` for the first revision). Record findings in the change ticket before proceeding to production. + +--- + +## 2 · Container environment flags + +Add the following environment variables to Concelier/Excititor deployments: + +| Variable | Default | Description | +|----------|---------|-------------| +| `AOC_GUARD_ENABLED` | `true` | Enables `AOCWriteGuard` interception. Set `false` only for controlled rollback. | +| `AOC_ALLOW_SUPERSEDES_RETROFIT` | `false` | Allows temporary supersedes backfill during migration. Remove after cutover. | +| `AOC_METRICS_ENABLED` | `true` | Emits `ingestion_write_total`, `aoc_violation_total`, etc. | +| `AOC_TENANT_HEADER` | `X-Stella-Tenant` | Header name expected from Gateway. | +| `AOC_VERIFIER_USER` | `stella-aoc-verify` | Read-only service user used by UI/CLI verification. | + +Compose snippet: + +```yaml +environment: + - AOC_GUARD_ENABLED=true + - AOC_ALLOW_SUPERSEDES_RETROFIT=false + - AOC_METRICS_ENABLED=true + - AOC_TENANT_HEADER=X-Stella-Tenant + - AOC_VERIFIER_USER=stella-aoc-verify +``` + +Ensure `AOC_VERIFIER_USER` exists in Authority with `aoc:verify` scope and no write permissions. + +--- + +## 3 · Verifier identity + +- Create a dedicated client (`stella-aoc-verify`) via Authority bootstrap: + +```yaml +clients: + - clientId: stella-aoc-verify + grantTypes: [client_credentials] + scopes: [aoc:verify, advisory:read, vex:read] + tenants: [default] +``` + +- Store credentials in secret store (`Kubernetes Secret`, `Docker swarm secret`). +- Bind credentials to `stella aoc verify` CI jobs and Console verification service. +- Rotate quarterly; document in `ops/authority-key-rotation.md`. + +--- + +## 4 · Deployment steps + +1. **Pre-checks:** Confirm database backups, alerting in maintenance mode, and staging environment validated. +2. **Apply validators:** Run scripts per § 1.1. +3. **Update manifests:** Inject environment variables (§ 2) and mount guard configuration configmaps. +4. **Redeploy services:** Rolling restart Concelier/Excititor pods. Monitor `ingestion_write_total` for steady throughput. +5. **Seed verifier:** Deploy read-only verifier user and store credentials. +6. **Run verification:** Execute `stella aoc verify --since 24h` and ensure exit code `0`. +7. **Update dashboards:** Point Grafana panels to new metrics (`aoc_violation_total`). +8. **Record handoff:** Capture console screenshots and verification logs for release notes. + +--- + +## 5 · Offline Kit updates + +- Ship validator scripts with Offline Kit (`offline-kit/scripts/apply-aoc-validators.js`). +- Include pre-generated verification reports for air-gapped deployments. +- Document offline CLI workflow in bundle README referencing `docs/cli/cli-reference.md`. +- Ensure `stella-aoc-verify` credentials are scoped to offline tenant and rotated during bundle refresh. + +--- + +## 6 · Rollback plan + +1. Disable guard via `AOC_GUARD_ENABLED=false` on Concelier/Excititor and rollout. +2. Remove validators with the migration script (`--remove`). +3. Pause verification jobs to prevent noise. +4. Investigate and remediate upstream issues before re-enabling guards. + +--- + +## 7 · References + +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) +- [Authority scopes & tenancy](../security/authority-scopes.md) +- [Observability guide](../observability/observability.md) +- [CLI AOC commands](../cli/cli-reference.md) +- [Concelier architecture](../ARCHITECTURE_CONCELIER.md) +- [Excititor architecture](../ARCHITECTURE_EXCITITOR.md) + +--- + +## 8 · Compliance checklist + +- [ ] Validators documented and scripts referenced for online/offline deployments. +- [ ] Environment variables cover guard enablement, metrics, and tenant header. +- [ ] Read-only verifier user installation steps included. +- [ ] Offline kit instructions align with validator/verification workflow. +- [ ] Rollback procedure captured. +- [ ] Cross-links to AOC docs, Authority scopes, and observability guides present. +- [ ] DevOps Guild sign-off tracked (owner: @devops-guild, due 2025-10-29). + +--- + +*Last updated: 2025-10-26 (Sprint 19).* diff --git a/docs/dev/30_EXCITITOR_CONNECTOR_GUIDE.md b/docs/dev/30_EXCITITOR_CONNECTOR_GUIDE.md index bd2362bb..c0fc6c5a 100644 --- a/docs/dev/30_EXCITITOR_CONNECTOR_GUIDE.md +++ b/docs/dev/30_EXCITITOR_CONNECTOR_GUIDE.md @@ -1,220 +1,220 @@ -# Excititor Connector Packaging Guide - -> **Audience:** teams implementing new Excititor provider plug‑ins (CSAF feeds, -> OpenVEX attestations, etc.) -> **Prerequisites:** read `docs/ARCHITECTURE_EXCITITOR.md` and the module -> `AGENTS.md` in `src/StellaOps.Excititor.Connectors.Abstractions/`. - -The Excititor connector SDK gives you: - -- `VexConnectorBase` – deterministic logging, SHA‑256 helpers, time provider. -- `VexConnectorOptionsBinder` – strongly typed YAML/JSON configuration binding. -- `IVexConnectorOptionsValidator` – custom validation hooks (offline defaults, auth invariants). -- `VexConnectorDescriptor` & metadata helpers for consistent telemetry. - -This guide explains how to package a connector so the Excititor Worker/WebService -can load it via the plugin host. - ---- - -## 1. Project layout - -Start from the template under -`docs/dev/templates/excititor-connector/`. It contains: - -``` -Excititor.MyConnector/ -├── src/ -│ ├── Excititor.MyConnector.csproj -│ ├── MyConnectorOptions.cs -│ ├── MyConnector.cs -│ └── MyConnectorPlugin.cs -└── manifest/ - └── connector.manifest.yaml -``` - -Key points: - -- Target `net10.0`, enable `TreatWarningsAsErrors`, reference the - `StellaOps.Excititor.Connectors.Abstractions` project (or NuGet once published). -- Keep project ID prefix `StellaOps.Excititor.Connectors.` so the - plugin loader can discover it with the default search pattern. - -### 1.1 csproj snippet - -```xml - - - net10.0 - enable - enable - true - - - - - -``` - -Adjust the `ProjectReference` for your checkout (or switch to a NuGet package -once published). - ---- - -## 2. Implement the connector - -1. **Options model** – create an options POCO with data-annotation attributes. - Bind it via `VexConnectorOptionsBinder.Bind` in your connector - constructor or `ValidateAsync`. -2. **Validator** – implement `IVexConnectorOptionsValidator` to add - complex checks (e.g., ensure both `clientId` and `clientSecret` are present). -3. **Connector** – inherit from `VexConnectorBase`. Implement: - - `ValidateAsync` – run binder/validators, log configuration summary. - - `FetchAsync` – stream raw documents to `context.RawSink`. - - `NormalizeAsync` – convert raw documents into `VexClaimBatch` via - format-specific normalizers (`context.Normalizers`). -4. **Plugin adapter** – expose the connector via a plugin entry point so the - host can instantiate it. - -### 2.1 Options binding example - -```csharp -public sealed class MyConnectorOptions -{ - [Required] - [Url] - public string CatalogUri { get; set; } = default!; - - [Required] - public string ApiKey { get; set; } = default!; - - [Range(1, 64)] - public int MaxParallelRequests { get; set; } = 4; -} - -public sealed class MyConnectorOptionsValidator : IVexConnectorOptionsValidator -{ - public void Validate(VexConnectorDescriptor descriptor, MyConnectorOptions options, IList errors) - { - if (!options.CatalogUri.StartsWith("https://", StringComparison.OrdinalIgnoreCase)) - { - errors.Add("CatalogUri must use HTTPS."); - } - } -} -``` - -Bind inside the connector: - -```csharp -private readonly MyConnectorOptions _options; - -public MyConnector(VexConnectorDescriptor descriptor, ILogger logger, TimeProvider timeProvider) - : base(descriptor, logger, timeProvider) -{ - // `settings` comes from the orchestrator; validators registered via DI. - _options = VexConnectorOptionsBinder.Bind( - descriptor, - VexConnectorSettings.Empty, - validators: new[] { new MyConnectorOptionsValidator() }); -} -``` - -Replace `VexConnectorSettings.Empty` with the actual settings from context -inside `ValidateAsync`. - ---- - -## 3. Plugin adapter & manifest - -Create a simple plugin class that implements -`StellaOps.Plugin.IConnectorPlugin`. The Worker/WebService plugin host uses -this contract today. - -```csharp -public sealed class MyConnectorPlugin : IConnectorPlugin -{ - private static readonly VexConnectorDescriptor Descriptor = - new("excititor:my-provider", VexProviderKind.Vendor, "My Provider VEX"); - - public string Name => Descriptor.DisplayName; - - public bool IsAvailable(IServiceProvider services) => true; // inject feature flags if needed - - public IFeedConnector Create(IServiceProvider services) - { - var logger = services.GetRequiredService>(); - var timeProvider = services.GetRequiredService(); - return new MyConnector(Descriptor, logger, timeProvider); - } -} -``` - -> **Note:** the Excititor Worker currently instantiates connectors through the -> shared `IConnectorPlugin` contract. Once a dedicated Excititor plugin interface -> lands you simply swap the base interface; the descriptor/connector code -> remains unchanged. - -Provide a manifest describing the assembly for operational tooling: - -```yaml -# manifest/connector.manifest.yaml -id: excititor-my-provider -assembly: StellaOps.Excititor.Connectors.MyProvider.dll -entryPoint: StellaOps.Excititor.Connectors.MyProvider.MyConnectorPlugin -description: > - Official VEX feed for ExampleCorp products (CSAF JSON, daily updates). -tags: - - excititor - - csaf - - vendor -``` - -Store manifests under `/opt/stella/excititor/plugins//manifest/` in -production so the deployment tooling can inventory and verify plug‑ins. - ---- - -## 4. Packaging workflow - -1. `dotnet publish -c Release` → copy the published DLLs to - `/opt/stella/excititor/plugins//`. -2. Place `connector.manifest.yaml` next to the binaries. -3. Restart the Excititor Worker or WebService (hot reload not supported yet). -4. Verify logs: `VEX-ConnectorLoader` should list the connector descriptor. - -### 4.1 Offline kits - -- Add the connector folder (binaries + manifest) to the Offline Kit bundle. -- Include a `settings.sample.yaml` demonstrating offline-friendly defaults. -- Document any external dependencies (e.g., SHA mirrors) in the manifest `notes` - field. - ---- - -## 5. Testing checklist - -- Unit tests around options binding & validators. -- Integration tests (future `StellaOps.Excititor.Connectors.Abstractions.Tests`) - verifying deterministic logging scopes: - `logger.BeginScope` should produce `vex.connector.id`, `vex.connector.kind`, - and `vex.connector.operation`. -- Deterministic SHA tests: repeated `CreateRawDocument` calls with identical - content must return the same digest. - ---- - -## 6. Reference template - -See `docs/dev/templates/excititor-connector/` for the full quick‑start including: - -- Sample options class + validator. -- Connector implementation inheriting from `VexConnectorBase`. -- Plugin adapter + manifest. - -Copy the directory, rename namespaces/IDs, then iterate on provider-specific -logic. - ---- - -*Last updated: 2025-10-17* +# Excititor Connector Packaging Guide + +> **Audience:** teams implementing new Excititor provider plug‑ins (CSAF feeds, +> OpenVEX attestations, etc.) +> **Prerequisites:** read `docs/ARCHITECTURE_EXCITITOR.md` and the module +> `AGENTS.md` in `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/`. + +The Excititor connector SDK gives you: + +- `VexConnectorBase` – deterministic logging, SHA‑256 helpers, time provider. +- `VexConnectorOptionsBinder` – strongly typed YAML/JSON configuration binding. +- `IVexConnectorOptionsValidator` – custom validation hooks (offline defaults, auth invariants). +- `VexConnectorDescriptor` & metadata helpers for consistent telemetry. + +This guide explains how to package a connector so the Excititor Worker/WebService +can load it via the plugin host. + +--- + +## 1. Project layout + +Start from the template under +`docs/dev/templates/excititor-connector/`. It contains: + +``` +Excititor.MyConnector/ +├── src/ +│ ├── Excititor.MyConnector.csproj +│ ├── MyConnectorOptions.cs +│ ├── MyConnector.cs +│ └── MyConnectorPlugin.cs +└── manifest/ + └── connector.manifest.yaml +``` + +Key points: + +- Target `net10.0`, enable `TreatWarningsAsErrors`, reference the + `StellaOps.Excititor.Connectors.Abstractions` project (or NuGet once published). +- Keep project ID prefix `StellaOps.Excititor.Connectors.` so the + plugin loader can discover it with the default search pattern. + +### 1.1 csproj snippet + +```xml + + + net10.0 + enable + enable + true + + + + + +``` + +Adjust the `ProjectReference` for your checkout (or switch to a NuGet package +once published). + +--- + +## 2. Implement the connector + +1. **Options model** – create an options POCO with data-annotation attributes. + Bind it via `VexConnectorOptionsBinder.Bind` in your connector + constructor or `ValidateAsync`. +2. **Validator** – implement `IVexConnectorOptionsValidator` to add + complex checks (e.g., ensure both `clientId` and `clientSecret` are present). +3. **Connector** – inherit from `VexConnectorBase`. Implement: + - `ValidateAsync` – run binder/validators, log configuration summary. + - `FetchAsync` – stream raw documents to `context.RawSink`. + - `NormalizeAsync` – convert raw documents into `VexClaimBatch` via + format-specific normalizers (`context.Normalizers`). +4. **Plugin adapter** – expose the connector via a plugin entry point so the + host can instantiate it. + +### 2.1 Options binding example + +```csharp +public sealed class MyConnectorOptions +{ + [Required] + [Url] + public string CatalogUri { get; set; } = default!; + + [Required] + public string ApiKey { get; set; } = default!; + + [Range(1, 64)] + public int MaxParallelRequests { get; set; } = 4; +} + +public sealed class MyConnectorOptionsValidator : IVexConnectorOptionsValidator +{ + public void Validate(VexConnectorDescriptor descriptor, MyConnectorOptions options, IList errors) + { + if (!options.CatalogUri.StartsWith("https://", StringComparison.OrdinalIgnoreCase)) + { + errors.Add("CatalogUri must use HTTPS."); + } + } +} +``` + +Bind inside the connector: + +```csharp +private readonly MyConnectorOptions _options; + +public MyConnector(VexConnectorDescriptor descriptor, ILogger logger, TimeProvider timeProvider) + : base(descriptor, logger, timeProvider) +{ + // `settings` comes from the orchestrator; validators registered via DI. + _options = VexConnectorOptionsBinder.Bind( + descriptor, + VexConnectorSettings.Empty, + validators: new[] { new MyConnectorOptionsValidator() }); +} +``` + +Replace `VexConnectorSettings.Empty` with the actual settings from context +inside `ValidateAsync`. + +--- + +## 3. Plugin adapter & manifest + +Create a simple plugin class that implements +`StellaOps.Plugin.IConnectorPlugin`. The Worker/WebService plugin host uses +this contract today. + +```csharp +public sealed class MyConnectorPlugin : IConnectorPlugin +{ + private static readonly VexConnectorDescriptor Descriptor = + new("excititor:my-provider", VexProviderKind.Vendor, "My Provider VEX"); + + public string Name => Descriptor.DisplayName; + + public bool IsAvailable(IServiceProvider services) => true; // inject feature flags if needed + + public IFeedConnector Create(IServiceProvider services) + { + var logger = services.GetRequiredService>(); + var timeProvider = services.GetRequiredService(); + return new MyConnector(Descriptor, logger, timeProvider); + } +} +``` + +> **Note:** the Excititor Worker currently instantiates connectors through the +> shared `IConnectorPlugin` contract. Once a dedicated Excititor plugin interface +> lands you simply swap the base interface; the descriptor/connector code +> remains unchanged. + +Provide a manifest describing the assembly for operational tooling: + +```yaml +# manifest/connector.manifest.yaml +id: excititor-my-provider +assembly: StellaOps.Excititor.Connectors.MyProvider.dll +entryPoint: StellaOps.Excititor.Connectors.MyProvider.MyConnectorPlugin +description: > + Official VEX feed for ExampleCorp products (CSAF JSON, daily updates). +tags: + - excititor + - csaf + - vendor +``` + +Store manifests under `/opt/stella/excititor/plugins//manifest/` in +production so the deployment tooling can inventory and verify plug‑ins. + +--- + +## 4. Packaging workflow + +1. `dotnet publish -c Release` → copy the published DLLs to + `/opt/stella/excititor/plugins//`. +2. Place `connector.manifest.yaml` next to the binaries. +3. Restart the Excititor Worker or WebService (hot reload not supported yet). +4. Verify logs: `VEX-ConnectorLoader` should list the connector descriptor. + +### 4.1 Offline kits + +- Add the connector folder (binaries + manifest) to the Offline Kit bundle. +- Include a `settings.sample.yaml` demonstrating offline-friendly defaults. +- Document any external dependencies (e.g., SHA mirrors) in the manifest `notes` + field. + +--- + +## 5. Testing checklist + +- Unit tests around options binding & validators. +- Integration tests (future `StellaOps.Excititor.Connectors.Abstractions.Tests`) + verifying deterministic logging scopes: + `logger.BeginScope` should produce `vex.connector.id`, `vex.connector.kind`, + and `vex.connector.operation`. +- Deterministic SHA tests: repeated `CreateRawDocument` calls with identical + content must return the same digest. + +--- + +## 6. Reference template + +See `docs/dev/templates/excititor-connector/` for the full quick‑start including: + +- Sample options class + validator. +- Connector implementation inheriting from `VexConnectorBase`. +- Plugin adapter + manifest. + +Copy the directory, rename namespaces/IDs, then iterate on provider-specific +logic. + +--- + +*Last updated: 2025-10-17* diff --git a/docs/dev/30_VEXER_CONNECTOR_GUIDE.md b/docs/dev/30_VEXER_CONNECTOR_GUIDE.md index 4affd54f..7eba6a8c 100644 --- a/docs/dev/30_VEXER_CONNECTOR_GUIDE.md +++ b/docs/dev/30_VEXER_CONNECTOR_GUIDE.md @@ -3,7 +3,7 @@ > **Audience:** teams implementing new Vexer provider plug‑ins (CSAF feeds, > OpenVEX attestations, etc.) > **Prerequisites:** read `docs/ARCHITECTURE_VEXER.md` and the module -> `AGENTS.md` in `src/StellaOps.Vexer.Connectors.Abstractions/`. +> `AGENTS.md` in `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/`. The Vexer connector SDK gives you: diff --git a/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md b/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md index 6949965b..168304c0 100644 --- a/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md +++ b/docs/dev/31_AUTHORITY_PLUGIN_DEVELOPER_GUIDE.md @@ -1,212 +1,212 @@ -# Authority Plug-in Developer Guide - -> **Status:** Updated 2025-10-11 (AUTHPLUG-DOCS-01-001) with lifecycle + limiter diagrams and refreshed rate-limit guidance aligned to PLG6 acceptance criteria. - -## 1. Overview -Authority plug-ins extend the **StellaOps Authority** service with custom identity providers, credential stores, and client-management logic. Unlike Concelier plug-ins (which ingest or export advisories), Authority plug-ins participate directly in authentication flows: - -- **Use cases:** integrate corporate directories (LDAP/AD)[^ldap-rfc], delegate to external IDPs, enforce bespoke password/lockout policies, or add client provisioning automation. -- **Constraints:** plug-ins load only during service start (no hot-reload), must function without outbound internet access, and must emit deterministic results for identical configuration input. -- **Ship targets:** build against the host’s .NET 10 preview SDK, honour offline-first requirements, and surface actionable diagnostics so operators can triage issues from `/ready`. - -## 2. Architecture Snapshot -Authority hosts follow a deterministic plug-in lifecycle. The exported diagram (`docs/assets/authority/authority-plugin-lifecycle.svg`) mirrors the steps below; regenerate it from the Mermaid source if you update the flow. - -1. **Configuration load** – `AuthorityPluginConfigurationLoader` resolves YAML manifests under `etc/authority.plugins/`. -2. **Assembly discovery** – the shared `PluginHost` scans `StellaOps.Authority.PluginBinaries` for `StellaOps.Authority.Plugin.*.dll` assemblies. -3. **Registrar execution** – each assembly is searched for `IAuthorityPluginRegistrar` implementations. Registrars bind options, register services, and optionally queue bootstrap tasks. -4. **Runtime** – the host resolves `IIdentityProviderPlugin` instances, uses capability metadata to decide which OAuth grants to expose, and invokes health checks for readiness endpoints. - -![Authority plug-in lifecycle diagram](../assets/authority/authority-plugin-lifecycle.svg) - -_Source:_ `docs/assets/authority/authority-plugin-lifecycle.mmd` - -**Data persistence primer:** the standard Mongo-backed plugin stores users in collections named `authority_users_` and lockout metadata in embedded documents. Additional plugins must document their storage layout and provide deterministic collection naming to honour the Offline Kit replication process. - -## 3. Capability Metadata -Capability flags let the host reason about what your plug-in supports: - -- Declare capabilities in your descriptor using the string constants from `AuthorityPluginCapabilities` (`password`, `mfa`, `clientProvisioning`, `bootstrap`). The configuration loader now validates these tokens and rejects unknown values at startup. -- `AuthorityIdentityProviderCapabilities.FromCapabilities` projects those strings into strongly typed booleans (`SupportsPassword`, etc.). Authority Core will use these flags when wiring flows such as the password grant. Built-in plugins (e.g., Standard) will fail fast or force-enable required capabilities if the descriptor is misconfigured, so keep manifests accurate. -- Typical configuration (`etc/authority.plugins/standard.yaml`): - ```yaml - plugins: - descriptors: - standard: - assemblyName: "StellaOps.Authority.Plugin.Standard" - capabilities: - - password - - bootstrap - ``` -- Only declare a capability if the plug-in genuinely implements it. For example, if `SupportsClientProvisioning` is `true`, the plug-in must supply a working `IClientProvisioningStore`. - -**Operational reminder:** the Authority host surfaces capability summaries during startup (see `AuthorityIdentityProviderRegistry` log lines). Use those logs during smoke tests to ensure manifests align with expectations. - -**Configuration path normalisation:** Manifest-relative paths (e.g., `tokenSigning.keyDirectory: "../keys"`) are resolved against the YAML file location and environment variables are expanded before validation. Plug-ins should expect to receive an absolute, canonical path when options are injected. - -**Password policy guardrails:** The Standard registrar logs a warning when a plug-in weakens the default password policy (minimum length or required character classes). Keep overrides at least as strong as the compiled defaults—operators treat the warning as an actionable security deviation. - -## 4. Project Scaffold -- Target **.NET 10 preview**, enable nullable, treat warnings as errors, and mark Authority plug-ins with `true`. -- Minimum references: - - `StellaOps.Authority.Plugins.Abstractions` (contracts & capability helpers) - - `StellaOps.Plugin` (hosting/DI helpers) - - `StellaOps.Auth.*` libraries as needed for shared token utilities (optional today). -- Example `.csproj` (trimmed from `StellaOps.Authority.Plugin.Standard`): - ```xml - - - net10.0 - enable - true - true - - - - - - - ``` - (Add other references—e.g., MongoDB driver, shared auth libraries—according to your implementation.) - -## 5. Implementing `IAuthorityPluginRegistrar` -- Create a parameterless registrar class that returns your plug-in type name via `PluginType`. -- Use `AuthorityPluginRegistrationContext` to: - - Bind options (`AddOptions(pluginName).Bind(...)`). - - Register singletons for stores/enrichers using manifest metadata. - - Register any hosted bootstrap tasks (e.g., seed admin users). -- Always validate configuration inside `PostConfigure` and throw meaningful `InvalidOperationException` to fail fast during startup. -- Use the provided `ILoggerFactory` from DI; avoid static loggers or console writes. -- Example skeleton: - ```csharp - internal sealed class MyPluginRegistrar : IAuthorityPluginRegistrar - { - public string PluginType => "my-custom"; - - public void Register(AuthorityPluginRegistrationContext context) - { - var name = context.Plugin.Manifest.Name; - - context.Services.AddOptions(name) - .Bind(context.Plugin.Configuration) - .PostConfigure(opts => opts.Validate(name)); - - context.Services.AddSingleton(sp => - new MyIdentityProvider(context.Plugin, sp.GetRequiredService(), - sp.GetRequiredService(), - sp.GetRequiredService>())); - } - } - ``` - -## 6. Identity Provider Surface -- Implement `IIdentityProviderPlugin` to expose: - - `IUserCredentialStore` for password validation and user CRUD. - - `IClaimsEnricher` to append roles/attributes onto issued principals. - - Optional `IClientProvisioningStore` for machine-to-machine clients. - - `AuthorityIdentityProviderCapabilities` to advertise supported flows. -- Password guidance: - - Standard plug-in hashes via `ICryptoProvider` using Argon2id by default and emits PHC-compliant strings. Successful PBKDF2 logins trigger automatic rehashes so migrations complete gradually. See `docs/security/password-hashing.md` for tuning advice. - - Enforce password policies before hashing to avoid storing weak credentials. -- Health checks should probe backing stores (e.g., Mongo `ping`) and return `AuthorityPluginHealthResult` so `/ready` can surface issues. -- When supporting additional factors (e.g., TOTP), implement `SupportsMfa` and document the enrolment flow for resource servers. - -## 7. Configuration & Secrets -- Authority looks for manifests under `etc/authority.plugins/`. Each YAML file maps directly to a plug-in name. -- Support environment overrides using `STELLAOPS_AUTHORITY_PLUGINS__DESCRIPTORS____...`. -- Never store raw secrets in git: allow operators to supply them via `.local.yaml`, environment variables, or injected secret files. Document which keys are mandatory. -- Validate configuration as soon as the registrar runs; use explicit error messages to guide operators. The Standard plug-in now enforces complete bootstrap credentials (username + password) and positive lockout windows via `StandardPluginOptions.Validate`. -- Cross-reference bootstrap workflows with `docs/ops/authority_bootstrap.md` (to be published alongside CORE6) so operators can reuse the same payload formats for manual provisioning. -- `passwordHashing` inherits defaults from `authority.security.passwordHashing`. Override only when hardware constraints differ per plug-in: - ```yaml - passwordHashing: - algorithm: Argon2id - memorySizeInKib: 19456 - iterations: 2 - parallelism: 1 - ``` - Invalid values (≤0) fail fast during startup, and legacy PBKDF2 hashes rehash automatically once the new algorithm succeeds. - -### 7.1 Token Persistence Contract -- The host automatically persists every issued principal (access, refresh, device, authorization code) in `authority_tokens`. Plug-in code **must not** bypass this store; use the provided `IAuthorityTokenStore` helpers when implementing custom flows. -- When a plug-in disables a subject or client outside the standard handlers, call `IAuthorityTokenStore.UpdateStatusAsync(...)` for each affected token so revocation bundles stay consistent. -- Supply machine-friendly `revokedReason` codes (`compromised`, `rotation`, `policy`, `lifecycle`, etc.) and optional `revokedMetadata` entries when invalidating credentials. These flow straight into `revocation-bundle.json` and should remain deterministic. -- Token scopes should be normalised (trimmed, unique, ordinal sort) before returning from plug-in verification paths. `TokenPersistenceHandlers` will keep that ordering for downstream consumers. - -### 7.2 Claims & Enrichment Checklist -- Authority always sets the OpenID Connect basics: `sub`, `client_id`, `preferred_username`, optional `name`, and `role` (for password flows). Plug-ins must use `IClaimsEnricher` to append additional claims in a **deterministic** order (sort arrays, normalise casing) so resource servers can rely on stable shapes. -- Recommended enrichment keys: - - `stellaops.realm` – plug-in/tenant identifier so services can scope policies. - - `stellaops.subject.type` – values such as `human`, `service`, `bootstrap`. - - `groups` / `projects` – sorted arrays describing operator entitlements. -- Claims visible in tokens should mirror what `/token` and `/userinfo` emit. Avoid injecting sensitive PII directly; mark values with `ClassifiedString.Personal` inside the plug-in so audit sinks can tag them appropriately. -- For client-credential flows, remember to enrich both the client principal and the validation path (`TokenValidationHandlers`) so refresh flows keep the same metadata. - -### 7.3 Revocation Bundles & Reasons -- Use `IAuthorityRevocationStore` to record subject/client/token revocations when credentials are deleted or rotated. Stick to the standard categories (`token`, `subject`, `client`, `key`). -- Include a deterministic `reason` string and optional `reasonDescription` so operators understand *why* a subject was revoked when inspecting bundles offline. -- Plug-ins should populate `metadata` with stable keys (e.g., `revokedBy`, `sourcePlugin`, `ticketId`) to simplify SOC correlation. The keys must be lowercase, ASCII, and free of secrets—bundles are mirrored to air-gapped agents. - -## 8. Rate Limiting & Lockout Interplay -Rate limiting and account lockouts are complementary controls. Plug-ins must surface both deterministically so operators can correlate limiter hits with credential rejections. - -**Baseline quotas** (from `docs/dev/authority-rate-limit-tuning-outline.md`): - -| Endpoint | Default policy | Notes | -|----------|----------------|-------| -| `/token` | 30 requests / 60s, queue 0 | Drop to 10/60s for untrusted ranges; raise only with WAF + monitoring. | -| `/authorize` | 60 requests / 60s, queue 10 | Reduce carefully; interactive UX depends on headroom. | -| `/internal/*` | Disabled by default; recommended 5/60s when enabled | Keep queue 0 for bootstrap APIs. | - -**Retry metadata:** The middleware stamps `Retry-After` plus tags `authority.client_id`, `authority.remote_ip`, and `authority.endpoint`. Plug-ins should keep these tags intact when crafting responses or telemetry so dashboards remain consistent. - -**Lockout counters:** Treat lockouts as **subject-scoped** decisions. When multiple instances update counters, reuse the deterministic tie-breakers documented in `src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md` (freshness overrides, precedence, and stable hashes) to avoid divergent lockout states across replicas. - -**Alerting hooks:** Emit structured logs/metrics when either the limiter or credential store rejects access. Suggested gauges include `aspnetcore_rate_limiting_rejections_total{limiter="authority-token"}` and any custom `auth.plugins..lockouts_total` counter. - -![Authority rate limit and lockout flow](../assets/authority/authority-rate-limit-flow.svg) - -_Source:_ `docs/assets/authority/authority-rate-limit-flow.mmd` - -## 9. Logging, Metrics, and Diagnostics -- Always log via the injected `ILogger`; include `pluginName` and correlation IDs where available. -- Activity/metric names should align with `AuthorityTelemetry` constants (`service.name=stellaops-authority`). -- Expose additional diagnostics via structured logging rather than writing custom HTTP endpoints; the host will integrate these into `/health` and `/ready`. -- Emit metrics with stable names (`auth.plugins..*`) when introducing custom instrumentation; coordinate with the Observability guild to reserve prefixes. - -## 10. Testing & Tooling -- Unit tests: use Mongo2Go (or similar) to exercise credential stores without hitting production infrastructure (`StandardUserCredentialStoreTests` is a template). -- Determinism: fix timestamps to UTC and sort outputs consistently; avoid random GUIDs unless stable. -- Smoke tests: launch `dotnet run --project src/StellaOps.Authority/StellaOps.Authority` with your plug-in under `StellaOps.Authority.PluginBinaries` and verify `/ready`. -- Example verification snippet: - ```csharp - [Fact] - public async Task VerifyPasswordAsync_ReturnsSuccess() - { - var store = CreateCredentialStore(); - await store.UpsertUserAsync(new AuthorityUserRegistration("alice", "Pa55!", null, null, false, - Array.Empty(), new Dictionary()), CancellationToken.None); - - var result = await store.VerifyPasswordAsync("alice", "Pa55!", CancellationToken.None); - Assert.True(result.Succeeded); - Assert.True(result.User?.Roles.Count == 0); - } - ``` - -## 11. Packaging & Delivery -- Output assembly should follow `StellaOps.Authority.Plugin..dll` so the host’s search pattern picks it up. -- Place the compiled DLL plus dependencies under `StellaOps.Authority.PluginBinaries` for offline deployments; include hashes/signatures in release notes (Security Guild guidance forthcoming). -- Document any external prerequisites (e.g., CA cert bundle) in your plug-in README. -- Update `etc/authority.plugins/.yaml` samples and include deterministic SHA256 hashes for optional bootstrap payloads when distributing Offline Kit artefacts. - -[^ldap-rfc]: Lightweight Directory Access Protocol (LDAPv3) specification — [RFC 4511](https://datatracker.ietf.org/doc/html/rfc4511). - -## 12. Checklist & Handoff -- ✅ Capabilities declared and validated in automated tests. -- ✅ Bootstrap workflows documented (if `bootstrap` capability used) and repeatable. -- ✅ Local smoke test + unit/integration suites green (`dotnet test`). -- ✅ Operational docs updated: configuration keys, secrets guidance, troubleshooting. -- Submit the developer guide update referencing PLG6/DOC4 and tag DevEx + Docs reviewers for sign-off. - ---- -Mermaid sources for the embedded diagrams live under `docs/assets/authority/`. Regenerate the SVG assets with your preferred renderer before committing future updates so the visuals stay in sync with the `.mmd` definitions. +# Authority Plug-in Developer Guide + +> **Status:** Updated 2025-10-11 (AUTHPLUG-DOCS-01-001) with lifecycle + limiter diagrams and refreshed rate-limit guidance aligned to PLG6 acceptance criteria. + +## 1. Overview +Authority plug-ins extend the **StellaOps Authority** service with custom identity providers, credential stores, and client-management logic. Unlike Concelier plug-ins (which ingest or export advisories), Authority plug-ins participate directly in authentication flows: + +- **Use cases:** integrate corporate directories (LDAP/AD)[^ldap-rfc], delegate to external IDPs, enforce bespoke password/lockout policies, or add client provisioning automation. +- **Constraints:** plug-ins load only during service start (no hot-reload), must function without outbound internet access, and must emit deterministic results for identical configuration input. +- **Ship targets:** build against the host’s .NET 10 preview SDK, honour offline-first requirements, and surface actionable diagnostics so operators can triage issues from `/ready`. + +## 2. Architecture Snapshot +Authority hosts follow a deterministic plug-in lifecycle. The exported diagram (`docs/assets/authority/authority-plugin-lifecycle.svg`) mirrors the steps below; regenerate it from the Mermaid source if you update the flow. + +1. **Configuration load** – `AuthorityPluginConfigurationLoader` resolves YAML manifests under `etc/authority.plugins/`. +2. **Assembly discovery** – the shared `PluginHost` scans `StellaOps.Authority.PluginBinaries` for `StellaOps.Authority.Plugin.*.dll` assemblies. +3. **Registrar execution** – each assembly is searched for `IAuthorityPluginRegistrar` implementations. Registrars bind options, register services, and optionally queue bootstrap tasks. +4. **Runtime** – the host resolves `IIdentityProviderPlugin` instances, uses capability metadata to decide which OAuth grants to expose, and invokes health checks for readiness endpoints. + +![Authority plug-in lifecycle diagram](../assets/authority/authority-plugin-lifecycle.svg) + +_Source:_ `docs/assets/authority/authority-plugin-lifecycle.mmd` + +**Data persistence primer:** the standard Mongo-backed plugin stores users in collections named `authority_users_` and lockout metadata in embedded documents. Additional plugins must document their storage layout and provide deterministic collection naming to honour the Offline Kit replication process. + +## 3. Capability Metadata +Capability flags let the host reason about what your plug-in supports: + +- Declare capabilities in your descriptor using the string constants from `AuthorityPluginCapabilities` (`password`, `mfa`, `clientProvisioning`, `bootstrap`). The configuration loader now validates these tokens and rejects unknown values at startup. +- `AuthorityIdentityProviderCapabilities.FromCapabilities` projects those strings into strongly typed booleans (`SupportsPassword`, etc.). Authority Core will use these flags when wiring flows such as the password grant. Built-in plugins (e.g., Standard) will fail fast or force-enable required capabilities if the descriptor is misconfigured, so keep manifests accurate. +- Typical configuration (`etc/authority.plugins/standard.yaml`): + ```yaml + plugins: + descriptors: + standard: + assemblyName: "StellaOps.Authority.Plugin.Standard" + capabilities: + - password + - bootstrap + ``` +- Only declare a capability if the plug-in genuinely implements it. For example, if `SupportsClientProvisioning` is `true`, the plug-in must supply a working `IClientProvisioningStore`. + +**Operational reminder:** the Authority host surfaces capability summaries during startup (see `AuthorityIdentityProviderRegistry` log lines). Use those logs during smoke tests to ensure manifests align with expectations. + +**Configuration path normalisation:** Manifest-relative paths (e.g., `tokenSigning.keyDirectory: "../keys"`) are resolved against the YAML file location and environment variables are expanded before validation. Plug-ins should expect to receive an absolute, canonical path when options are injected. + +**Password policy guardrails:** The Standard registrar logs a warning when a plug-in weakens the default password policy (minimum length or required character classes). Keep overrides at least as strong as the compiled defaults—operators treat the warning as an actionable security deviation. + +## 4. Project Scaffold +- Target **.NET 10 preview**, enable nullable, treat warnings as errors, and mark Authority plug-ins with `true`. +- Minimum references: + - `StellaOps.Authority.Plugins.Abstractions` (contracts & capability helpers) + - `StellaOps.Plugin` (hosting/DI helpers) + - `StellaOps.Auth.*` libraries as needed for shared token utilities (optional today). +- Example `.csproj` (trimmed from `StellaOps.Authority.Plugin.Standard`): + ```xml + + + net10.0 + enable + true + true + + + + + + + ``` + (Add other references—e.g., MongoDB driver, shared auth libraries—according to your implementation.) + +## 5. Implementing `IAuthorityPluginRegistrar` +- Create a parameterless registrar class that returns your plug-in type name via `PluginType`. +- Use `AuthorityPluginRegistrationContext` to: + - Bind options (`AddOptions(pluginName).Bind(...)`). + - Register singletons for stores/enrichers using manifest metadata. + - Register any hosted bootstrap tasks (e.g., seed admin users). +- Always validate configuration inside `PostConfigure` and throw meaningful `InvalidOperationException` to fail fast during startup. +- Use the provided `ILoggerFactory` from DI; avoid static loggers or console writes. +- Example skeleton: + ```csharp + internal sealed class MyPluginRegistrar : IAuthorityPluginRegistrar + { + public string PluginType => "my-custom"; + + public void Register(AuthorityPluginRegistrationContext context) + { + var name = context.Plugin.Manifest.Name; + + context.Services.AddOptions(name) + .Bind(context.Plugin.Configuration) + .PostConfigure(opts => opts.Validate(name)); + + context.Services.AddSingleton(sp => + new MyIdentityProvider(context.Plugin, sp.GetRequiredService(), + sp.GetRequiredService(), + sp.GetRequiredService>())); + } + } + ``` + +## 6. Identity Provider Surface +- Implement `IIdentityProviderPlugin` to expose: + - `IUserCredentialStore` for password validation and user CRUD. + - `IClaimsEnricher` to append roles/attributes onto issued principals. + - Optional `IClientProvisioningStore` for machine-to-machine clients. + - `AuthorityIdentityProviderCapabilities` to advertise supported flows. +- Password guidance: + - Standard plug-in hashes via `ICryptoProvider` using Argon2id by default and emits PHC-compliant strings. Successful PBKDF2 logins trigger automatic rehashes so migrations complete gradually. See `docs/security/password-hashing.md` for tuning advice. + - Enforce password policies before hashing to avoid storing weak credentials. +- Health checks should probe backing stores (e.g., Mongo `ping`) and return `AuthorityPluginHealthResult` so `/ready` can surface issues. +- When supporting additional factors (e.g., TOTP), implement `SupportsMfa` and document the enrolment flow for resource servers. + +## 7. Configuration & Secrets +- Authority looks for manifests under `etc/authority.plugins/`. Each YAML file maps directly to a plug-in name. +- Support environment overrides using `STELLAOPS_AUTHORITY_PLUGINS__DESCRIPTORS____...`. +- Never store raw secrets in git: allow operators to supply them via `.local.yaml`, environment variables, or injected secret files. Document which keys are mandatory. +- Validate configuration as soon as the registrar runs; use explicit error messages to guide operators. The Standard plug-in now enforces complete bootstrap credentials (username + password) and positive lockout windows via `StandardPluginOptions.Validate`. +- Cross-reference bootstrap workflows with `docs/ops/authority_bootstrap.md` (to be published alongside CORE6) so operators can reuse the same payload formats for manual provisioning. +- `passwordHashing` inherits defaults from `authority.security.passwordHashing`. Override only when hardware constraints differ per plug-in: + ```yaml + passwordHashing: + algorithm: Argon2id + memorySizeInKib: 19456 + iterations: 2 + parallelism: 1 + ``` + Invalid values (≤0) fail fast during startup, and legacy PBKDF2 hashes rehash automatically once the new algorithm succeeds. + +### 7.1 Token Persistence Contract +- The host automatically persists every issued principal (access, refresh, device, authorization code) in `authority_tokens`. Plug-in code **must not** bypass this store; use the provided `IAuthorityTokenStore` helpers when implementing custom flows. +- When a plug-in disables a subject or client outside the standard handlers, call `IAuthorityTokenStore.UpdateStatusAsync(...)` for each affected token so revocation bundles stay consistent. +- Supply machine-friendly `revokedReason` codes (`compromised`, `rotation`, `policy`, `lifecycle`, etc.) and optional `revokedMetadata` entries when invalidating credentials. These flow straight into `revocation-bundle.json` and should remain deterministic. +- Token scopes should be normalised (trimmed, unique, ordinal sort) before returning from plug-in verification paths. `TokenPersistenceHandlers` will keep that ordering for downstream consumers. + +### 7.2 Claims & Enrichment Checklist +- Authority always sets the OpenID Connect basics: `sub`, `client_id`, `preferred_username`, optional `name`, and `role` (for password flows). Plug-ins must use `IClaimsEnricher` to append additional claims in a **deterministic** order (sort arrays, normalise casing) so resource servers can rely on stable shapes. +- Recommended enrichment keys: + - `stellaops.realm` – plug-in/tenant identifier so services can scope policies. + - `stellaops.subject.type` – values such as `human`, `service`, `bootstrap`. + - `groups` / `projects` – sorted arrays describing operator entitlements. +- Claims visible in tokens should mirror what `/token` and `/userinfo` emit. Avoid injecting sensitive PII directly; mark values with `ClassifiedString.Personal` inside the plug-in so audit sinks can tag them appropriately. +- For client-credential flows, remember to enrich both the client principal and the validation path (`TokenValidationHandlers`) so refresh flows keep the same metadata. + +### 7.3 Revocation Bundles & Reasons +- Use `IAuthorityRevocationStore` to record subject/client/token revocations when credentials are deleted or rotated. Stick to the standard categories (`token`, `subject`, `client`, `key`). +- Include a deterministic `reason` string and optional `reasonDescription` so operators understand *why* a subject was revoked when inspecting bundles offline. +- Plug-ins should populate `metadata` with stable keys (e.g., `revokedBy`, `sourcePlugin`, `ticketId`) to simplify SOC correlation. The keys must be lowercase, ASCII, and free of secrets—bundles are mirrored to air-gapped agents. + +## 8. Rate Limiting & Lockout Interplay +Rate limiting and account lockouts are complementary controls. Plug-ins must surface both deterministically so operators can correlate limiter hits with credential rejections. + +**Baseline quotas** (from `docs/dev/authority-rate-limit-tuning-outline.md`): + +| Endpoint | Default policy | Notes | +|----------|----------------|-------| +| `/token` | 30 requests / 60s, queue 0 | Drop to 10/60s for untrusted ranges; raise only with WAF + monitoring. | +| `/authorize` | 60 requests / 60s, queue 10 | Reduce carefully; interactive UX depends on headroom. | +| `/internal/*` | Disabled by default; recommended 5/60s when enabled | Keep queue 0 for bootstrap APIs. | + +**Retry metadata:** The middleware stamps `Retry-After` plus tags `authority.client_id`, `authority.remote_ip`, and `authority.endpoint`. Plug-ins should keep these tags intact when crafting responses or telemetry so dashboards remain consistent. + +**Lockout counters:** Treat lockouts as **subject-scoped** decisions. When multiple instances update counters, reuse the deterministic tie-breakers documented in `src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md` (freshness overrides, precedence, and stable hashes) to avoid divergent lockout states across replicas. + +**Alerting hooks:** Emit structured logs/metrics when either the limiter or credential store rejects access. Suggested gauges include `aspnetcore_rate_limiting_rejections_total{limiter="authority-token"}` and any custom `auth.plugins..lockouts_total` counter. + +![Authority rate limit and lockout flow](../assets/authority/authority-rate-limit-flow.svg) + +_Source:_ `docs/assets/authority/authority-rate-limit-flow.mmd` + +## 9. Logging, Metrics, and Diagnostics +- Always log via the injected `ILogger`; include `pluginName` and correlation IDs where available. +- Activity/metric names should align with `AuthorityTelemetry` constants (`service.name=stellaops-authority`). +- Expose additional diagnostics via structured logging rather than writing custom HTTP endpoints; the host will integrate these into `/health` and `/ready`. +- Emit metrics with stable names (`auth.plugins..*`) when introducing custom instrumentation; coordinate with the Observability guild to reserve prefixes. + +## 10. Testing & Tooling +- Unit tests: use Mongo2Go (or similar) to exercise credential stores without hitting production infrastructure (`StandardUserCredentialStoreTests` is a template). +- Determinism: fix timestamps to UTC and sort outputs consistently; avoid random GUIDs unless stable. +- Smoke tests: launch `dotnet run --project src/Authority/StellaOps.Authority/StellaOps.Authority` with your plug-in under `StellaOps.Authority.PluginBinaries` and verify `/ready`. +- Example verification snippet: + ```csharp + [Fact] + public async Task VerifyPasswordAsync_ReturnsSuccess() + { + var store = CreateCredentialStore(); + await store.UpsertUserAsync(new AuthorityUserRegistration("alice", "Pa55!", null, null, false, + Array.Empty(), new Dictionary()), CancellationToken.None); + + var result = await store.VerifyPasswordAsync("alice", "Pa55!", CancellationToken.None); + Assert.True(result.Succeeded); + Assert.True(result.User?.Roles.Count == 0); + } + ``` + +## 11. Packaging & Delivery +- Output assembly should follow `StellaOps.Authority.Plugin..dll` so the host’s search pattern picks it up. +- Place the compiled DLL plus dependencies under `StellaOps.Authority.PluginBinaries` for offline deployments; include hashes/signatures in release notes (Security Guild guidance forthcoming). +- Document any external prerequisites (e.g., CA cert bundle) in your plug-in README. +- Update `etc/authority.plugins/.yaml` samples and include deterministic SHA256 hashes for optional bootstrap payloads when distributing Offline Kit artefacts. + +[^ldap-rfc]: Lightweight Directory Access Protocol (LDAPv3) specification — [RFC 4511](https://datatracker.ietf.org/doc/html/rfc4511). + +## 12. Checklist & Handoff +- ✅ Capabilities declared and validated in automated tests. +- ✅ Bootstrap workflows documented (if `bootstrap` capability used) and repeatable. +- ✅ Local smoke test + unit/integration suites green (`dotnet test`). +- ✅ Operational docs updated: configuration keys, secrets guidance, troubleshooting. +- Submit the developer guide update referencing PLG6/DOC4 and tag DevEx + Docs reviewers for sign-off. + +--- +Mermaid sources for the embedded diagrams live under `docs/assets/authority/`. Regenerate the SVG assets with your preferred renderer before committing future updates so the visuals stay in sync with the `.mmd` definitions. diff --git a/docs/dev/BUILDX_PLUGIN_QUICKSTART.md b/docs/dev/BUILDX_PLUGIN_QUICKSTART.md index dadcb2dd..61a7fd29 100644 --- a/docs/dev/BUILDX_PLUGIN_QUICKSTART.md +++ b/docs/dev/BUILDX_PLUGIN_QUICKSTART.md @@ -1,115 +1,115 @@ -# BuildX Generator Quickstart - -This quickstart explains how to run the StellaOps **BuildX SBOM generator** offline, verify the CAS handshake, and emit OCI descriptors that downstream services can attest. - -## 1. Prerequisites - -- Docker 25+ with BuildKit enabled (`docker buildx` available). -- .NET 10 (preview) SDK matching the repository `global.json`. -- Optional: network access to a StellaOps Attestor endpoint (the quickstart uses a mock service). - -## 2. Publish the plug-in binaries - -The BuildX generator publishes as a .NET self-contained executable with its manifest under `plugins/scanner/buildx/`. - -```bash -# From the repository root -DOTNET_CLI_HOME="${PWD}/.dotnet" \ -dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \ - -c Release \ - -o out/buildx -``` - +# BuildX Generator Quickstart + +This quickstart explains how to run the StellaOps **BuildX SBOM generator** offline, verify the CAS handshake, and emit OCI descriptors that downstream services can attest. + +## 1. Prerequisites + +- Docker 25+ with BuildKit enabled (`docker buildx` available). +- .NET 10 (preview) SDK matching the repository `global.json`. +- Optional: network access to a StellaOps Attestor endpoint (the quickstart uses a mock service). + +## 2. Publish the plug-in binaries + +The BuildX generator publishes as a .NET self-contained executable with its manifest under `plugins/scanner/buildx/`. + +```bash +# From the repository root +DOTNET_CLI_HOME="${PWD}/.dotnet" \ +dotnet publish src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \ + -c Release \ + -o out/buildx +``` + - `out/buildx/` now contains `StellaOps.Scanner.Sbomer.BuildXPlugin.dll` and the manifest `stellaops.sbom-indexer.manifest.json`. - `plugins/scanner/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin/` receives the same artefacts for release packaging. - The CI pipeline also tars and signs (SHA-256 manifest) the OS analyzer plug-ins located under `plugins/scanner/analyzers/os/` so they ship alongside the BuildX generator artefacts. - -## 3. Verify the CAS handshake - -```bash -dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \ - --manifest out/buildx \ - --cas out/cas -``` - -The command performs a deterministic probe write (`sha256`) into the provided CAS directory and prints the resolved path. - -## 4. Emit a descriptor + provenance placeholder - -1. Build or identify the image you want to describe and capture its digest: - - ```bash - docker buildx build --load -t stellaops/buildx-demo:ci samples/ci/buildx-demo - DIGEST=$(docker image inspect stellaops/buildx-demo:ci --format '{{index .RepoDigests 0}}') - ``` - -2. Generate a CycloneDX SBOM for the built image (any tool works; here we use `docker sbom`): - - ```bash - docker sbom stellaops/buildx-demo:ci --format cyclonedx-json > out/buildx-sbom.cdx.json - ``` - -3. Invoke the `descriptor` command, pointing at the SBOM file and optional metadata: - - ```bash - dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \ - --manifest out/buildx \ - --image "$DIGEST" \ - --sbom out/buildx-sbom.cdx.json \ - --sbom-name buildx-sbom.cdx.json \ - --artifact-type application/vnd.stellaops.sbom.layer+json \ - --sbom-format cyclonedx-json \ - --sbom-kind inventory \ - --repository git.stella-ops.org/stellaops/buildx-demo \ - --build-ref $(git rev-parse HEAD) \ - > out/buildx-descriptor.json - ``` - + +## 3. Verify the CAS handshake + +```bash +dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \ + --manifest out/buildx \ + --cas out/cas +``` + +The command performs a deterministic probe write (`sha256`) into the provided CAS directory and prints the resolved path. + +## 4. Emit a descriptor + provenance placeholder + +1. Build or identify the image you want to describe and capture its digest: + + ```bash + docker buildx build --load -t stellaops/buildx-demo:ci samples/ci/buildx-demo + DIGEST=$(docker image inspect stellaops/buildx-demo:ci --format '{{index .RepoDigests 0}}') + ``` + +2. Generate a CycloneDX SBOM for the built image (any tool works; here we use `docker sbom`): + + ```bash + docker sbom stellaops/buildx-demo:ci --format cyclonedx-json > out/buildx-sbom.cdx.json + ``` + +3. Invoke the `descriptor` command, pointing at the SBOM file and optional metadata: + + ```bash + dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \ + --manifest out/buildx \ + --image "$DIGEST" \ + --sbom out/buildx-sbom.cdx.json \ + --sbom-name buildx-sbom.cdx.json \ + --artifact-type application/vnd.stellaops.sbom.layer+json \ + --sbom-format cyclonedx-json \ + --sbom-kind inventory \ + --repository git.stella-ops.org/stellaops/buildx-demo \ + --build-ref $(git rev-parse HEAD) \ + > out/buildx-descriptor.json + ``` + The output JSON captures: - OCI artifact descriptor including size, digest, and annotations (`org.stellaops.*`). - Provenance placeholder (`expectedDsseSha256`, `nonce`, `attestorUri` when provided). `nonce` is derived deterministically from the image + SBOM metadata so repeated runs produce identical placeholders for identical inputs. - Generator metadata and deterministic timestamps. - -## 5. (Optional) Send the placeholder to an Attestor - -The plug-in can POST the descriptor metadata to an Attestor endpoint, returning once it receives an HTTP 202. - -```bash -python3 - <<'PY' & -from http.server import BaseHTTPRequestHandler, HTTPServer -class Handler(BaseHTTPRequestHandler): - def do_POST(self): - _ = self.rfile.read(int(self.headers.get('Content-Length', 0))) - self.send_response(202); self.end_headers(); self.wfile.write(b'accepted') - def log_message(self, fmt, *args): - return -server = HTTPServer(('127.0.0.1', 8085), Handler) -try: - server.serve_forever() -except KeyboardInterrupt: - pass -finally: - server.server_close() -PY -MOCK_PID=$! - -dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \ - --manifest out/buildx \ - --image "$DIGEST" \ - --sbom out/buildx-sbom.cdx.json \ - --attestor http://127.0.0.1:8085/provenance \ - --attestor-token "$STELLAOPS_ATTESTOR_TOKEN" \ - > out/buildx-descriptor.json - -kill $MOCK_PID -``` - -Set `STELLAOPS_ATTESTOR_TOKEN` (or pass `--attestor-token`) when the Attestor requires bearer authentication. Use `--attestor-insecure` for lab environments with self-signed certificates. - -## 6. CI workflow example - + +## 5. (Optional) Send the placeholder to an Attestor + +The plug-in can POST the descriptor metadata to an Attestor endpoint, returning once it receives an HTTP 202. + +```bash +python3 - <<'PY' & +from http.server import BaseHTTPRequestHandler, HTTPServer +class Handler(BaseHTTPRequestHandler): + def do_POST(self): + _ = self.rfile.read(int(self.headers.get('Content-Length', 0))) + self.send_response(202); self.end_headers(); self.wfile.write(b'accepted') + def log_message(self, fmt, *args): + return +server = HTTPServer(('127.0.0.1', 8085), Handler) +try: + server.serve_forever() +except KeyboardInterrupt: + pass +finally: + server.server_close() +PY +MOCK_PID=$! + +dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \ + --manifest out/buildx \ + --image "$DIGEST" \ + --sbom out/buildx-sbom.cdx.json \ + --attestor http://127.0.0.1:8085/provenance \ + --attestor-token "$STELLAOPS_ATTESTOR_TOKEN" \ + > out/buildx-descriptor.json + +kill $MOCK_PID +``` + +Set `STELLAOPS_ATTESTOR_TOKEN` (or pass `--attestor-token`) when the Attestor requires bearer authentication. Use `--attestor-insecure` for lab environments with self-signed certificates. + +## 6. CI workflow example + A reusable GitHub Actions workflow is provided under `samples/ci/buildx-demo/github-actions-buildx-demo.yml`. It publishes the plug-in, runs the handshake, builds the demo image, emits a descriptor, and uploads both the descriptor and the mock-Attestor request as artefacts. Add the workflow to your repository (or call it via `workflow_call`) and adjust the SBOM path + Attestor URL as needed. The workflow also re-runs the `descriptor` command and diffs the results (ignoring the `generatedAt` timestamp) so you catch regressions that would break deterministic CI use. diff --git a/docs/dev/EXCITITOR_STATEMENT_BACKFILL.md b/docs/dev/EXCITITOR_STATEMENT_BACKFILL.md index 11af06ef..ab95c7b4 100644 --- a/docs/dev/EXCITITOR_STATEMENT_BACKFILL.md +++ b/docs/dev/EXCITITOR_STATEMENT_BACKFILL.md @@ -1,86 +1,86 @@ -# Excititor Statement Backfill Runbook - -Last updated: 2025-10-19 - -## Overview - -Use this runbook when you need to rebuild the `vex.statements` collection from historical raw documents. Typical scenarios: - -- Upgrading the statement schema (e.g., adding severity/KEV/EPSS signals). -- Recovering from a partial ingest outage where statements were never persisted. -- Seeding a freshly provisioned Excititor deployment from an existing raw archive. - -Backfill operates server-side via the Excititor WebService and reuses the same pipeline that powers the `/excititor/statements` ingestion endpoint. Each raw document is normalized, signed metadata is preserved, and duplicate statements are skipped unless the run is forced. - -## Prerequisites - -1. **Connectivity to Excititor WebService** – the CLI uses the backend URL configured in `stellaops.yml` or the `--backend-url` argument. -2. **Authority credentials** – the CLI honours the existing Authority client configuration; ensure the caller has permission to invoke admin endpoints. -3. **Mongo replica set** (recommended) – causal consistency guarantees rely on majority read/write concerns. Standalone deployment works but skips cross-document transactions. - -## CLI command - -``` -stellaops excititor backfill-statements \ - [--retrieved-since ] \ - [--force] \ - [--batch-size ] \ - [--max-documents ] -``` - -| Option | Description | -| ------ | ----------- | -| `--retrieved-since` | Only process raw documents fetched on or after the specified timestamp (UTC by default). | -| `--force` | Reprocess documents even if matching statements already exist (useful after schema upgrades). | -| `--batch-size` | Number of raw documents pulled per batch (default `100`). | -| `--max-documents` | Optional hard limit on the number of raw documents to evaluate. | - -Example – replay the last 48 hours of Red Hat ingest while keeping existing statements: - -``` -stellaops excititor backfill-statements \ - --retrieved-since "$(date -u -d '48 hours ago' +%Y-%m-%dT%H:%M:%SZ)" -``` - -Example – full replay with forced overwrites, capped at 2,000 documents: - -``` -stellaops excititor backfill-statements --force --max-documents 2000 -``` - -The command returns a summary similar to: - -``` -Backfill completed: evaluated 450, backfilled 180, claims written 320, skipped 270, failures 0. -``` - -## Behaviour - -- Raw documents are streamed in ascending `retrievedAt` order. -- Each document is normalized using the registered VEX normalizers (CSAF, CycloneDX, OpenVEX). -- Statements are appended through the same `IVexClaimStore.AppendAsync` path that powers `/excititor/statements`. -- Duplicate detection compares `Document.Digest`; duplicates are skipped unless `--force` is specified. -- Failures are logged with the offending digest and continue with the next document. - -## Observability - -- CLI logs aggregate counts and the backend logs per-digest warnings or errors. -- Mongo writes carry majority write concern; expect backfill throughput to match ingest baselines (≈5 seconds warm, 30 seconds cold). -- Monitor the `excititor.storage.backfill` log scope for detailed telemetry. - -## Post-run verification - -1. Inspect the `vex.statements` collection for the targeted window (check `InsertedAt`). -2. Re-run the Excititor storage test suite if possible: - ``` - dotnet test src/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj - ``` -3. Optionally, call `/excititor/statements/{vulnerabilityId}/{productKey}` to confirm the expected statements exist. - -## Rollback - -If a forced run produced incorrect statements, use the standard Mongo rollback procedure: - -1. Identify the `InsertedAt` window for the backfill run. -2. Delete affected records from `vex.statements` (and any downstream exports if applicable). -3. Rerun the backfill command with corrected parameters. +# Excititor Statement Backfill Runbook + +Last updated: 2025-10-19 + +## Overview + +Use this runbook when you need to rebuild the `vex.statements` collection from historical raw documents. Typical scenarios: + +- Upgrading the statement schema (e.g., adding severity/KEV/EPSS signals). +- Recovering from a partial ingest outage where statements were never persisted. +- Seeding a freshly provisioned Excititor deployment from an existing raw archive. + +Backfill operates server-side via the Excititor WebService and reuses the same pipeline that powers the `/excititor/statements` ingestion endpoint. Each raw document is normalized, signed metadata is preserved, and duplicate statements are skipped unless the run is forced. + +## Prerequisites + +1. **Connectivity to Excititor WebService** – the CLI uses the backend URL configured in `stellaops.yml` or the `--backend-url` argument. +2. **Authority credentials** – the CLI honours the existing Authority client configuration; ensure the caller has permission to invoke admin endpoints. +3. **Mongo replica set** (recommended) – causal consistency guarantees rely on majority read/write concerns. Standalone deployment works but skips cross-document transactions. + +## CLI command + +``` +stellaops excititor backfill-statements \ + [--retrieved-since ] \ + [--force] \ + [--batch-size ] \ + [--max-documents ] +``` + +| Option | Description | +| ------ | ----------- | +| `--retrieved-since` | Only process raw documents fetched on or after the specified timestamp (UTC by default). | +| `--force` | Reprocess documents even if matching statements already exist (useful after schema upgrades). | +| `--batch-size` | Number of raw documents pulled per batch (default `100`). | +| `--max-documents` | Optional hard limit on the number of raw documents to evaluate. | + +Example – replay the last 48 hours of Red Hat ingest while keeping existing statements: + +``` +stellaops excititor backfill-statements \ + --retrieved-since "$(date -u -d '48 hours ago' +%Y-%m-%dT%H:%M:%SZ)" +``` + +Example – full replay with forced overwrites, capped at 2,000 documents: + +``` +stellaops excititor backfill-statements --force --max-documents 2000 +``` + +The command returns a summary similar to: + +``` +Backfill completed: evaluated 450, backfilled 180, claims written 320, skipped 270, failures 0. +``` + +## Behaviour + +- Raw documents are streamed in ascending `retrievedAt` order. +- Each document is normalized using the registered VEX normalizers (CSAF, CycloneDX, OpenVEX). +- Statements are appended through the same `IVexClaimStore.AppendAsync` path that powers `/excititor/statements`. +- Duplicate detection compares `Document.Digest`; duplicates are skipped unless `--force` is specified. +- Failures are logged with the offending digest and continue with the next document. + +## Observability + +- CLI logs aggregate counts and the backend logs per-digest warnings or errors. +- Mongo writes carry majority write concern; expect backfill throughput to match ingest baselines (≈5 seconds warm, 30 seconds cold). +- Monitor the `excititor.storage.backfill` log scope for detailed telemetry. + +## Post-run verification + +1. Inspect the `vex.statements` collection for the targeted window (check `InsertedAt`). +2. Re-run the Excititor storage test suite if possible: + ``` + dotnet test src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj + ``` +3. Optionally, call `/excititor/statements/{vulnerabilityId}/{productKey}` to confirm the expected statements exist. + +## Rollback + +If a forced run produced incorrect statements, use the standard Mongo rollback procedure: + +1. Identify the `InsertedAt` window for the backfill run. +2. Delete affected records from `vex.statements` (and any downstream exports if applicable). +3. Rerun the backfill command with corrected parameters. diff --git a/docs/dev/authority-dpop-mtls-plan.md b/docs/dev/authority-dpop-mtls-plan.md index 7cc6e792..dd5b7f83 100644 --- a/docs/dev/authority-dpop-mtls-plan.md +++ b/docs/dev/authority-dpop-mtls-plan.md @@ -1,146 +1,146 @@ -# Authority DPoP & mTLS Implementation Plan (2025-10-19) - -## Purpose -- Provide the implementation blueprint for AUTH-DPOP-11-001 and AUTH-MTLS-11-002. -- Unify sender-constraint validation across Authority, downstream services, and clients. -- Capture deterministic, testable steps that unblock UI/Signer guilds depending on DPoP/mTLS hardening. - -## Scope -- Token endpoint validation, issuance, and storage changes inside `StellaOps.Authority`. -- Shared security primitives consumed by Authority, Scanner, Signer, CLI, and UI. -- Operator-facing configuration, auditing, and observability. -- Out of scope: PoE enforcement (Signer) and CLI/UI client UX; those teams consume the new capabilities. - -> **Status update (2025-10-19):** `ValidateDpopProofHandler`, `AuthorityClientCertificateValidator`, and the supporting storage/audit plumbing now live in `src/StellaOps.Authority`. DPoP proofs populate `cnf.jkt`, mTLS bindings enforce certificate thumbprints via `cnf.x5t#S256`, and token documents persist the sender constraint metadata. In-memory nonce issuance is wired (Redis implementation to follow). Documentation and configuration references were updated (`docs/11_AUTHORITY.md`). Targeted unit/integration tests were added; running the broader test suite is currently blocked by pre-existing `StellaOps.Concelier.Storage.Mongo` build errors. +# Authority DPoP & mTLS Implementation Plan (2025-10-19) + +## Purpose +- Provide the implementation blueprint for AUTH-DPOP-11-001 and AUTH-MTLS-11-002. +- Unify sender-constraint validation across Authority, downstream services, and clients. +- Capture deterministic, testable steps that unblock UI/Signer guilds depending on DPoP/mTLS hardening. + +## Scope +- Token endpoint validation, issuance, and storage changes inside `StellaOps.Authority`. +- Shared security primitives consumed by Authority, Scanner, Signer, CLI, and UI. +- Operator-facing configuration, auditing, and observability. +- Out of scope: PoE enforcement (Signer) and CLI/UI client UX; those teams consume the new capabilities. + +> **Status update (2025-10-19):** `ValidateDpopProofHandler`, `AuthorityClientCertificateValidator`, and the supporting storage/audit plumbing now live in `src/Authority/StellaOps.Authority`. DPoP proofs populate `cnf.jkt`, mTLS bindings enforce certificate thumbprints via `cnf.x5t#S256`, and token documents persist the sender constraint metadata. In-memory nonce issuance is wired (Redis implementation to follow). Documentation and configuration references were updated (`docs/11_AUTHORITY.md`). Targeted unit/integration tests were added; running the broader test suite is currently blocked by pre-existing `StellaOps.Concelier.Storage.Mongo` build errors. > > **Status update (2025-10-20):** Redis-backed nonce configuration is exposed through `security.senderConstraints.dpop.nonce` with sample YAML (`etc/authority.yaml.sample`) and architecture docs refreshed. Operator guide now includes concrete Redis/required audiences snippet; nonce challenge regression remains covered by `ValidateDpopProof_IssuesNonceChallenge_WhenNonceMissing`. > > **Status update (2025-10-23):** mTLS enforcement now honours `security.senderConstraints.mtls.enforceForAudiences`, automatically rejecting non-mTLS clients targeting audiences such as `signer`. Certificate bindings validate thumbprint, issuer, subject, serial number, and SAN values, producing deterministic error codes for operators. Introspection responses include `cnf.x5t#S256`, and new unit tests cover audience enforcement, binding mismatches, and bootstrap storage. Docs/sample config updated accordingly. - -## Design Summary -- Extract the existing Scanner `DpopProofValidator` stack into a shared `StellaOps.Auth.Security` library used by Authority and resource servers. -- Extend Authority configuration (`authority.yaml`) with strongly-typed `senderConstraints.dpop` and `senderConstraints.mtls` sections (map to sample already shown in architecture doc). -- Require DPoP proofs on `/token` when the registered client policy is `senderConstraint=dpop`; bind issued access tokens via `cnf.jkt`. -- Introduce Authority-managed nonce issuance for “high value” audiences (default: `signer`, `attestor`) with Redis-backed persistence and deterministic auditing. -- Enable OAuth 2.0 mTLS (RFC 8705) by storing certificate bindings per client, requesting client certificates at TLS termination, and stamping `cnf.x5t#S256` into issued tokens plus introspection output. -- Surface structured logs and counters for both DPoP and mTLS flows; provide integration tests that cover success, replay, invalid proof, and certificate mismatch cases. - -## AUTH-DPOP-11-001 — Proof Validation & Nonce Handling - -**Shared validator** -- Move `DpopProofValidator`, option types, and replay cache interfaces from `StellaOps.Scanner.Core` into a new assembly `StellaOps.Auth.Security`. -- Provide pluggable caches: `InMemoryDpopReplayCache` (existing) and new `RedisDpopReplayCache` (leveraging the Authority Redis connection). -- Ensure the validator exposes the validated `SecurityKey`, `jti`, and `iat` so Authority can construct the `cnf` claim and compute nonce expiry. - -**Configuration model** -- Extend `StellaOpsAuthorityOptions.Security` with a `SenderConstraints` property containing: - - `Dpop` (`enabled`, `allowedAlgorithms`, `maxAgeSeconds`, `clockSkewSeconds`, `replayWindowSeconds`, `nonce` settings with `enabled`, `ttlSeconds`, `requiredAudiences`, `maxIssuancePerMinute`). - - `Mtls` (`enabled`, `requireChainValidation`, `clientCaBundle`, `allowedSubjectPatterns`, `allowedSanTypes`). -- Bind from YAML (`authority.security.senderConstraints.*`) while preserving backwards compatibility (defaults keep both disabled). - -**Token endpoint pipeline** -- Introduce a scoped OpenIddict handler `ValidateDpopProofHandler` inserted before `ValidateClientCredentialsHandler`. -- Determine the required sender constraint from client metadata: - - Add `AuthorityClientMetadataKeys.SenderConstraint` storing `dpop` or `mtls`. - - Optionally allow per-client overrides for nonce requirement. -- When `dpop` is required: - - Read the `DPoP` header from the ASP.NET request, reject with `invalid_token` + `WWW-Authenticate: DPoP error="invalid_dpop_proof"` if absent. - - Call the shared validator with method/URI. Enforce algorithm allowlist and `iat` window from options. - - Persist the `jkt` thumbprint plus replay cache state in the OpenIddict transaction (`AuthorityOpenIddictConstants.DpopKeyThumbprintProperty`, `DpopIssuedAtProperty`). - - When the requested audience intersects `SenderConstraints.Dpop.Nonce.RequiredAudiences`, require `nonce` in the proof; on first failure respond with HTTP 401, `error="use_dpop_nonce"`, and include `DPoP-Nonce` header (see nonce note below). Cache the rejection reason for audit logging. - -**Nonce service** -- Add `IDpopNonceStore` with methods `IssueAsync(audience, clientId, jkt)` and `TryConsumeAsync(nonce, audience, clientId, jkt)`. -- Default implementation `RedisDpopNonceStore` storing SHA-256 hashes of nonces keyed by `audience:clientId:jkt`. TTL comes from `SenderConstraints.Dpop.Nonce.Ttl`. -- Create helper `DpopNonceIssuer` used by `ValidateDpopProofHandler` to issue nonces when missing/expired, enforcing issuance rate limits (per options) and tagging audit/log records. -- On successful validation (nonce supplied and consumed) stamp metadata into the transaction for auditing. -- Update `ClientCredentialsHandlers` to observe nonce enforcement: when a nonce challenge was sent, emit structured audit with `nonce_issued`, `audiences`, and `retry`. - -**Token issuance** -- In `HandleClientCredentialsHandler`, if the transaction contains a validated DPoP key: - - Build `cnf.jkt` using thumbprint from validator. - - Include `auth_time`/`dpop_jti` as needed for diagnostics. - - Persist the thumbprint alongside token metadata in Mongo (extend `AuthorityTokenDocument` with `SenderConstraint`, `KeyThumbprint`, `Nonce` fields). - -**Auditing & observability** -- Emit new audit events: - - `authority.dpop.proof.validated` (success/failure, clientId, audience, thumbprint, nonce status, jti). - - `authority.dpop.nonce.issued` and `authority.dpop.nonce.consumed`. -- Metrics (Prometheus style): - - `authority_dpop_validations_total{result,reason}`. - - `authority_dpop_nonce_issued_total{audience}` and `authority_dpop_nonce_fails_total{reason}`. -- Structured logs include `authority.sender_constraint=dpop`, `authority.dpop_thumbprint`, `authority.dpop_nonce`. - -**Testing** -- Unit tests for the handler pipeline using fake OpenIddict transactions. -- Replay/nonce tests with in-memory and Redis stores. -- Integration tests in `StellaOps.Authority.Tests` covering: - - Valid DPoP proof issuing `cnf.jkt`. - - Missing header → challenge with nonce. - - Replayed `jti` rejected. - - Invalid nonce rejected even after issuance. -- Contract tests to ensure `/.well-known/openid-configuration` advertises `dpop_signing_alg_values_supported` and `dpop_nonce_supported` when enabled. - -## AUTH-MTLS-11-002 — Certificate-Bound Tokens - -**Configuration model** -- Reuse `SenderConstraints.Mtls` described above; include: - - `enforceForAudiences` list (defaults `signer`, `attestor`, `scheduler`). - - `certificateRotationGraceSeconds` for overlap. - - `allowedClientCertificateAuthorities` absolute paths. - -**Kestrel/TLS pipeline** -- Configure Kestrel with `ClientCertificateMode.AllowCertificate` globally and implement middleware that enforces certificate presence only when the resolved client requires mTLS. -- Add `IAuthorityClientCertificateValidator` that validates presented certificate chain, SANs (`dns`, `uri`, optional SPIFFE), and thumbprint matches one of the stored bindings. -- Cache validation results per connection id to avoid rehashing on every request. - -**Client registration & storage** -- Extend `AuthorityClientDocument` with `List` containing: - - `Thumbprint`, `SerialNumber`, `Subject`, `NotBefore`, `NotAfter`, `Sans`, `CreatedAt`, `UpdatedAt`, `Label`. -- Provide admin API mutations (`/admin/clients/{id}/certificates`) for ops tooling (deferred implementation but schema ready). -- Update plugin provisioning store (`StandardClientProvisioningStore`) to map descriptors with certificate bindings and `senderConstraint`. -- Persist binding state in Mongo migrations (index on `{clientId, thumbprint}`). - -**Token issuance & introspection** -- Add a transaction property capturing the validated client certificate thumbprint. -- `HandleClientCredentialsHandler`: - - When mTLS required, ensure certificate info present; reject otherwise. - - Stamp `cnf` claim: `principal.SetClaim("cnf", JsonSerializer.Serialize(new { x5t#S256 = thumbprint }))`. - - Store binding metadata in issued token document for audit. -- Update `ValidateAccessTokenHandler` and introspection responses to surface `cnf.x5t#S256`. -- Ensure refresh tokens (if ever enabled) copy the binding data. - -**Auditing & observability** -- Audit events: - - `authority.mtls.handshake` (success/failure, clientId, thumbprint, issuer, subject). - - `authority.mtls.binding.missing` when a required client posts without a cert. -- Metrics: - - `authority_mtls_handshakes_total{result}`. - - `authority_mtls_certificate_rotations_total`. -- Logs include `authority.sender_constraint=mtls`, `authority.mtls_thumbprint`, `authority.mtls_subject`. - -**Testing** -- Unit tests for certificate validation rules (SAN mismatches, expiry, CA trust). -- Integration tests running Kestrel with test certificates: - - Successful token issuance with bound certificate. - - Request without certificate → `invalid_client`. - - Token introspection reveals `cnf.x5t#S256`. - - Rotation scenario (old + new cert allowed during grace window). - -## Implementation Checklist - -**DPoP work-stream** -1. Extract shared validator into `StellaOps.Auth.Security`; update Scanner references. -2. Introduce configuration classes and bind from YAML/environment. -3. Implement nonce store (Redis + in-memory), handler integration, and OpenIddict transaction plumbing. -4. Stamp `cnf.jkt`, audit events, and metrics; update Mongo documents and migrations. -5. Extend docs: `docs/ARCHITECTURE_AUTHORITY.md`, `docs/security/audit-events.md`, `docs/security/rate-limits.md`, CLI/UI references. - -**mTLS work-stream** -1. Extend client document/schema and provisioning stores with certificate bindings + sender constraint flag. -2. Configure Kestrel/middleware for optional client certificates and validation service. -3. Update token issuance/introspection to honour certificate bindings and emit `cnf.x5t#S256`. -4. Add auditing/metrics and integration tests (happy path + failure). -5. Refresh operator documentation (`docs/ops/authority-backup-restore.md`, `docs/ops/authority-monitoring.md`, sample `authority.yaml`) to cover certificate lifecycle. - -Both streams should conclude with `dotnet test src/StellaOps.Authority.sln` and documentation cross-links so dependent guilds can unblock UI/Signer work. + +## Design Summary +- Extract the existing Scanner `DpopProofValidator` stack into a shared `StellaOps.Auth.Security` library used by Authority and resource servers. +- Extend Authority configuration (`authority.yaml`) with strongly-typed `senderConstraints.dpop` and `senderConstraints.mtls` sections (map to sample already shown in architecture doc). +- Require DPoP proofs on `/token` when the registered client policy is `senderConstraint=dpop`; bind issued access tokens via `cnf.jkt`. +- Introduce Authority-managed nonce issuance for “high value” audiences (default: `signer`, `attestor`) with Redis-backed persistence and deterministic auditing. +- Enable OAuth 2.0 mTLS (RFC 8705) by storing certificate bindings per client, requesting client certificates at TLS termination, and stamping `cnf.x5t#S256` into issued tokens plus introspection output. +- Surface structured logs and counters for both DPoP and mTLS flows; provide integration tests that cover success, replay, invalid proof, and certificate mismatch cases. + +## AUTH-DPOP-11-001 — Proof Validation & Nonce Handling + +**Shared validator** +- Move `DpopProofValidator`, option types, and replay cache interfaces from `StellaOps.Scanner.Core` into a new assembly `StellaOps.Auth.Security`. +- Provide pluggable caches: `InMemoryDpopReplayCache` (existing) and new `RedisDpopReplayCache` (leveraging the Authority Redis connection). +- Ensure the validator exposes the validated `SecurityKey`, `jti`, and `iat` so Authority can construct the `cnf` claim and compute nonce expiry. + +**Configuration model** +- Extend `StellaOpsAuthorityOptions.Security` with a `SenderConstraints` property containing: + - `Dpop` (`enabled`, `allowedAlgorithms`, `maxAgeSeconds`, `clockSkewSeconds`, `replayWindowSeconds`, `nonce` settings with `enabled`, `ttlSeconds`, `requiredAudiences`, `maxIssuancePerMinute`). + - `Mtls` (`enabled`, `requireChainValidation`, `clientCaBundle`, `allowedSubjectPatterns`, `allowedSanTypes`). +- Bind from YAML (`authority.security.senderConstraints.*`) while preserving backwards compatibility (defaults keep both disabled). + +**Token endpoint pipeline** +- Introduce a scoped OpenIddict handler `ValidateDpopProofHandler` inserted before `ValidateClientCredentialsHandler`. +- Determine the required sender constraint from client metadata: + - Add `AuthorityClientMetadataKeys.SenderConstraint` storing `dpop` or `mtls`. + - Optionally allow per-client overrides for nonce requirement. +- When `dpop` is required: + - Read the `DPoP` header from the ASP.NET request, reject with `invalid_token` + `WWW-Authenticate: DPoP error="invalid_dpop_proof"` if absent. + - Call the shared validator with method/URI. Enforce algorithm allowlist and `iat` window from options. + - Persist the `jkt` thumbprint plus replay cache state in the OpenIddict transaction (`AuthorityOpenIddictConstants.DpopKeyThumbprintProperty`, `DpopIssuedAtProperty`). + - When the requested audience intersects `SenderConstraints.Dpop.Nonce.RequiredAudiences`, require `nonce` in the proof; on first failure respond with HTTP 401, `error="use_dpop_nonce"`, and include `DPoP-Nonce` header (see nonce note below). Cache the rejection reason for audit logging. + +**Nonce service** +- Add `IDpopNonceStore` with methods `IssueAsync(audience, clientId, jkt)` and `TryConsumeAsync(nonce, audience, clientId, jkt)`. +- Default implementation `RedisDpopNonceStore` storing SHA-256 hashes of nonces keyed by `audience:clientId:jkt`. TTL comes from `SenderConstraints.Dpop.Nonce.Ttl`. +- Create helper `DpopNonceIssuer` used by `ValidateDpopProofHandler` to issue nonces when missing/expired, enforcing issuance rate limits (per options) and tagging audit/log records. +- On successful validation (nonce supplied and consumed) stamp metadata into the transaction for auditing. +- Update `ClientCredentialsHandlers` to observe nonce enforcement: when a nonce challenge was sent, emit structured audit with `nonce_issued`, `audiences`, and `retry`. + +**Token issuance** +- In `HandleClientCredentialsHandler`, if the transaction contains a validated DPoP key: + - Build `cnf.jkt` using thumbprint from validator. + - Include `auth_time`/`dpop_jti` as needed for diagnostics. + - Persist the thumbprint alongside token metadata in Mongo (extend `AuthorityTokenDocument` with `SenderConstraint`, `KeyThumbprint`, `Nonce` fields). + +**Auditing & observability** +- Emit new audit events: + - `authority.dpop.proof.validated` (success/failure, clientId, audience, thumbprint, nonce status, jti). + - `authority.dpop.nonce.issued` and `authority.dpop.nonce.consumed`. +- Metrics (Prometheus style): + - `authority_dpop_validations_total{result,reason}`. + - `authority_dpop_nonce_issued_total{audience}` and `authority_dpop_nonce_fails_total{reason}`. +- Structured logs include `authority.sender_constraint=dpop`, `authority.dpop_thumbprint`, `authority.dpop_nonce`. + +**Testing** +- Unit tests for the handler pipeline using fake OpenIddict transactions. +- Replay/nonce tests with in-memory and Redis stores. +- Integration tests in `StellaOps.Authority.Tests` covering: + - Valid DPoP proof issuing `cnf.jkt`. + - Missing header → challenge with nonce. + - Replayed `jti` rejected. + - Invalid nonce rejected even after issuance. +- Contract tests to ensure `/.well-known/openid-configuration` advertises `dpop_signing_alg_values_supported` and `dpop_nonce_supported` when enabled. + +## AUTH-MTLS-11-002 — Certificate-Bound Tokens + +**Configuration model** +- Reuse `SenderConstraints.Mtls` described above; include: + - `enforceForAudiences` list (defaults `signer`, `attestor`, `scheduler`). + - `certificateRotationGraceSeconds` for overlap. + - `allowedClientCertificateAuthorities` absolute paths. + +**Kestrel/TLS pipeline** +- Configure Kestrel with `ClientCertificateMode.AllowCertificate` globally and implement middleware that enforces certificate presence only when the resolved client requires mTLS. +- Add `IAuthorityClientCertificateValidator` that validates presented certificate chain, SANs (`dns`, `uri`, optional SPIFFE), and thumbprint matches one of the stored bindings. +- Cache validation results per connection id to avoid rehashing on every request. + +**Client registration & storage** +- Extend `AuthorityClientDocument` with `List` containing: + - `Thumbprint`, `SerialNumber`, `Subject`, `NotBefore`, `NotAfter`, `Sans`, `CreatedAt`, `UpdatedAt`, `Label`. +- Provide admin API mutations (`/admin/clients/{id}/certificates`) for ops tooling (deferred implementation but schema ready). +- Update plugin provisioning store (`StandardClientProvisioningStore`) to map descriptors with certificate bindings and `senderConstraint`. +- Persist binding state in Mongo migrations (index on `{clientId, thumbprint}`). + +**Token issuance & introspection** +- Add a transaction property capturing the validated client certificate thumbprint. +- `HandleClientCredentialsHandler`: + - When mTLS required, ensure certificate info present; reject otherwise. + - Stamp `cnf` claim: `principal.SetClaim("cnf", JsonSerializer.Serialize(new { x5t#S256 = thumbprint }))`. + - Store binding metadata in issued token document for audit. +- Update `ValidateAccessTokenHandler` and introspection responses to surface `cnf.x5t#S256`. +- Ensure refresh tokens (if ever enabled) copy the binding data. + +**Auditing & observability** +- Audit events: + - `authority.mtls.handshake` (success/failure, clientId, thumbprint, issuer, subject). + - `authority.mtls.binding.missing` when a required client posts without a cert. +- Metrics: + - `authority_mtls_handshakes_total{result}`. + - `authority_mtls_certificate_rotations_total`. +- Logs include `authority.sender_constraint=mtls`, `authority.mtls_thumbprint`, `authority.mtls_subject`. + +**Testing** +- Unit tests for certificate validation rules (SAN mismatches, expiry, CA trust). +- Integration tests running Kestrel with test certificates: + - Successful token issuance with bound certificate. + - Request without certificate → `invalid_client`. + - Token introspection reveals `cnf.x5t#S256`. + - Rotation scenario (old + new cert allowed during grace window). + +## Implementation Checklist + +**DPoP work-stream** +1. Extract shared validator into `StellaOps.Auth.Security`; update Scanner references. +2. Introduce configuration classes and bind from YAML/environment. +3. Implement nonce store (Redis + in-memory), handler integration, and OpenIddict transaction plumbing. +4. Stamp `cnf.jkt`, audit events, and metrics; update Mongo documents and migrations. +5. Extend docs: `docs/ARCHITECTURE_AUTHORITY.md`, `docs/security/audit-events.md`, `docs/security/rate-limits.md`, CLI/UI references. + +**mTLS work-stream** +1. Extend client document/schema and provisioning stores with certificate bindings + sender constraint flag. +2. Configure Kestrel/middleware for optional client certificates and validation service. +3. Update token issuance/introspection to honour certificate bindings and emit `cnf.x5t#S256`. +4. Add auditing/metrics and integration tests (happy path + failure). +5. Refresh operator documentation (`docs/ops/authority-backup-restore.md`, `docs/ops/authority-monitoring.md`, sample `authority.yaml`) to cover certificate lifecycle. + +Both streams should conclude with `dotnet test src/Authority/StellaOps.Authority/StellaOps.Authority.sln` and documentation cross-links so dependent guilds can unblock UI/Signer work. diff --git a/docs/dev/authority-plugin-di-coordination.md b/docs/dev/authority-plugin-di-coordination.md index 50e1f499..b8364e33 100644 --- a/docs/dev/authority-plugin-di-coordination.md +++ b/docs/dev/authority-plugin-di-coordination.md @@ -2,42 +2,42 @@ > Created: 2025-10-19 — Plugin Platform Guild & Authority Core > Status: Completed (workshop held 2025-10-20 15:00–16:05 UTC) - -This document tracks preparation, agenda, and outcomes for the scoped-service workshop required before implementing PLUGIN-DI-08-002. - -## Objectives - -- Inventory Authority plug-in surfaces that need scoped service lifetimes. -- Confirm session/scope handling for identity-provider registrars and background jobs. -- Assign follow-up tasks/actions with owners and due dates. - -## Scheduling Snapshot - -- **Meeting time:** 2025-10-20 15:00–16:00 UTC (10:00–11:00 CDT / 08:00–09:00 PDT). -- **Facilitator:** Plugin Platform Guild — Alicia Rivera. -- **Attendees (confirmed):** Authority Core — Jasmin Patel; Authority Security Guild — Mohan Singh; Plugin Platform — Alicia Rivera, Leah Chen. -- **Optional invitees:** DevOps liaison — Sofia Ortega (accepted). -- **Logistics:** Invites sent via shared calendar on 2025-10-19 15:30 UTC with Teams bridge + offline dial-in. Meeting notes will be captured here. -- **Preparation deadline:** 2025-10-20 12:00 UTC — complete checklist below. - -## Pre-work Checklist - -- Review `ServiceBindingAttribute` contract introduced by PLUGIN-DI-08-001. -- Collect existing Authority plug-in registration code paths to evaluate. -- Audit background jobs that assume singleton lifetimes. -- Identify plug-in health checks/telemetry surfaces impacted by scoped lifetimes. - + +This document tracks preparation, agenda, and outcomes for the scoped-service workshop required before implementing PLUGIN-DI-08-002. + +## Objectives + +- Inventory Authority plug-in surfaces that need scoped service lifetimes. +- Confirm session/scope handling for identity-provider registrars and background jobs. +- Assign follow-up tasks/actions with owners and due dates. + +## Scheduling Snapshot + +- **Meeting time:** 2025-10-20 15:00–16:00 UTC (10:00–11:00 CDT / 08:00–09:00 PDT). +- **Facilitator:** Plugin Platform Guild — Alicia Rivera. +- **Attendees (confirmed):** Authority Core — Jasmin Patel; Authority Security Guild — Mohan Singh; Plugin Platform — Alicia Rivera, Leah Chen. +- **Optional invitees:** DevOps liaison — Sofia Ortega (accepted). +- **Logistics:** Invites sent via shared calendar on 2025-10-19 15:30 UTC with Teams bridge + offline dial-in. Meeting notes will be captured here. +- **Preparation deadline:** 2025-10-20 12:00 UTC — complete checklist below. + +## Pre-work Checklist + +- Review `ServiceBindingAttribute` contract introduced by PLUGIN-DI-08-001. +- Collect existing Authority plug-in registration code paths to evaluate. +- Audit background jobs that assume singleton lifetimes. +- Identify plug-in health checks/telemetry surfaces impacted by scoped lifetimes. + ### Pre-work References | Focus | Path | Notes | |-------|------|-------| -| Host DI wiring | `src/StellaOps.Authority/StellaOps.Authority/Program.cs:159` | Startup registers `IAuthorityIdentityProviderRegistry` as a singleton and invokes `AuthorityPluginLoader.RegisterPlugins(...)` before the container is built. Any scoped plugin services will currently be captured in the singleton registry context. | -| Registrar discovery | `src/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginLoader.cs:46` | Loader instantiates `IAuthorityPluginRegistrar` implementations via `Activator.CreateInstance`, so registrars cannot depend on host services yet. Need agreement on whether to move discovery post-build or introduce `ActivatorUtilities`. | -| Registry aggregation | `src/StellaOps.Authority/StellaOps.Authority/AuthorityIdentityProviderRegistry.cs:16` | Registry caches `IIdentityProviderPlugin` instances at construction time. With scoped lifetimes we must revisit how providers are resolved (factory vs accessor). | -| Standard registrar services | `src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs:21` | All plugin services are registered as singletons today (`StandardUserCredentialStore`, `StandardClientProvisioningStore`, hosted bootstrapper). This registrar is our baseline for migrating to scoped bindings. | -| Hosted bootstrapper | `src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Bootstrap/StandardPluginBootstrapper.cs:17` | Background job directly consumes `StandardUserCredentialStore`. If the store becomes scoped we will need an `IServiceScopeFactory` bridge. | -| Password grant handler | `src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs:26` | Password flow resolves `IIdentityProviderPlugin` during scoped requests. Scope semantics must ensure credential stores stay cancellation-aware. | -| Client credential handler | `src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs:21` | Handler fetches provider + `ClientProvisioning` store; confirms need for consistent scoping in both user and client flows. | +| Host DI wiring | `src/Authority/StellaOps.Authority/StellaOps.Authority/Program.cs:159` | Startup registers `IAuthorityIdentityProviderRegistry` as a singleton and invokes `AuthorityPluginLoader.RegisterPlugins(...)` before the container is built. Any scoped plugin services will currently be captured in the singleton registry context. | +| Registrar discovery | `src/Authority/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginLoader.cs:46` | Loader instantiates `IAuthorityPluginRegistrar` implementations via `Activator.CreateInstance`, so registrars cannot depend on host services yet. Need agreement on whether to move discovery post-build or introduce `ActivatorUtilities`. | +| Registry aggregation | `src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityIdentityProviderRegistry.cs:16` | Registry caches `IIdentityProviderPlugin` instances at construction time. With scoped lifetimes we must revisit how providers are resolved (factory vs accessor). | +| Standard registrar services | `src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs:21` | All plugin services are registered as singletons today (`StandardUserCredentialStore`, `StandardClientProvisioningStore`, hosted bootstrapper). This registrar is our baseline for migrating to scoped bindings. | +| Hosted bootstrapper | `src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Bootstrap/StandardPluginBootstrapper.cs:17` | Background job directly consumes `StandardUserCredentialStore`. If the store becomes scoped we will need an `IServiceScopeFactory` bridge. | +| Password grant handler | `src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs:26` | Password flow resolves `IIdentityProviderPlugin` during scoped requests. Scope semantics must ensure credential stores stay cancellation-aware. | +| Client credential handler | `src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs:21` | Handler fetches provider + `ClientProvisioning` store; confirms need for consistent scoping in both user and client flows. | ## Preliminary Findings — 2025-10-20 @@ -49,15 +49,15 @@ This document tracks preparation, agenda, and outcomes for the scoped-service wo - 2025-10-20 (PLUGIN-DI-08-003): Registry implementation updated to expose metadata + scoped handles; OpenIddict flows, bootstrap endpoints, and `/health` now resolve providers via scoped leases with accompanying test coverage. - 2025-10-20 (PLUGIN-DI-08-004): Authority plugin loader now instantiates registrars via scoped DI activations and honours `[ServiceBinding]` metadata in plugin assemblies. - 2025-10-20 (PLUGIN-DI-08-005): `StandardPluginBootstrapper` shifted to scope-per-run execution using `IServiceScopeFactory`, enabling future scoped stores without singleton leaks. - -## Draft Agenda - -1. Context recap (5 min) — why scoped DI is needed; summary of PLUGIN-DI-08-001 changes. -2. Authority plug-in surfaces (15 min) — registrars, background services, telemetry. -3. Session handling strategy (10 min) — scope creation semantics, cancellation propagation. -4. Action items & owners (10 min) — capture code/docs/test tasks with due dates. -5. Risks & follow-ups (5 min) — dependencies, rollout sequencing. - + +## Draft Agenda + +1. Context recap (5 min) — why scoped DI is needed; summary of PLUGIN-DI-08-001 changes. +2. Authority plug-in surfaces (15 min) — registrars, background services, telemetry. +3. Session handling strategy (10 min) — scope creation semantics, cancellation propagation. +4. Action items & owners (10 min) — capture code/docs/test tasks with due dates. +5. Risks & follow-ups (5 min) — dependencies, rollout sequencing. + ## Notes - Session opened with recap of scoped-service goals and PLUGIN-DI-08-001 changes, confirming Authority readiness to adopt `[ServiceBinding]` metadata. @@ -65,11 +65,11 @@ This document tracks preparation, agenda, and outcomes for the scoped-service wo - Standard plug-in bootstrap will create scopes via `IServiceScopeFactory` and pass cancellation tokens through to avoid lingering singleton references. - Authority Plugin Loader will enumerate plug-in assemblies at startup but defer registrar activation until a scoped service provider is available, aligning with PLUGIN-DI-08-004 implementation. - Follow-up engineering tasks assigned to land PLUGIN-DI-08-002 code path adjustments and Authority host updates before 2025-10-24. - -## Action Item Log - -| Item | Owner | Due | Status | Notes | -|------|-------|-----|--------|-------| + +## Action Item Log + +| Item | Owner | Due | Status | Notes | +|------|-------|-----|--------|-------| | Confirm meeting time | Alicia Rivera | 2025-10-19 15:30 UTC | DONE | Calendar invite sent; all required attendees accepted | | Compile Authority plug-in DI entry points | Jasmin Patel | 2025-10-20 | DONE (2025-10-20) | Scoped-service touchpoints summarised in **Pre-work References** and **Preliminary Findings** ahead of the workshop. | | Outline scoped-session pattern for background jobs | Leah Chen | 2025-10-21 | DONE (2025-10-20) | Pattern agreed: bootstrap services must open transient scopes per execution via `IServiceScopeFactory`; document update to follow in PLUGIN-DI-08-002 patch. | diff --git a/docs/dev/fixtures.md b/docs/dev/fixtures.md index 876905b5..4967ce88 100644 --- a/docs/dev/fixtures.md +++ b/docs/dev/fixtures.md @@ -7,39 +7,39 @@ fixture sets, where they live, and how to regenerate them safely. ## GHSA ↔ OSV parity fixtures -- **Location:** `src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.*.json` +- **Location:** `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.*.json` - **Purpose:** Exercised by `OsvGhsaParityRegressionTests` to ensure OSV + GHSA outputs stay aligned on aliases, ranges, references, and credits. -- **Regeneration:** Either run the test harness with online regeneration (`UPDATE_PARITY_FIXTURES=1 dotnet test src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj`) +- **Regeneration:** Either run the test harness with online regeneration (`UPDATE_PARITY_FIXTURES=1 dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj`) or execute the fixture updater (`dotnet run --project tools/FixtureUpdater/FixtureUpdater.csproj`). Both paths normalise timestamps and canonical ordering. - **SemVer provenance:** The regenerated fixtures should show `normalizedVersions[].notes` in the `osv:{ecosystem}:{advisoryId}:{identifier}` shape emitted by `SemVerRangeRuleBuilder`. Confirm the constraints and notes line up with GHSA/NVD composites before committing. -- **Verification:** Inspect the diff, then re-run `dotnet test src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj` to confirm parity. +- **Verification:** Inspect the diff, then re-run `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj` to confirm parity. ## GHSA credit parity fixtures -- **Location:** `src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.{ghsa,osv,nvd}.json` +- **Location:** `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.{ghsa,osv,nvd}.json` - **Purpose:** Exercised by `GhsaCreditParityRegressionTests` to guarantee GHSA/NVD/OSV acknowledgements remain in lockstep. - **Regeneration:** `dotnet run --project tools/FixtureUpdater/FixtureUpdater.csproj` rewrites all three canonical snapshots. -- **Verification:** `dotnet test src/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj`. +- **Verification:** `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj`. > Always commit fixture changes together with the code that motivated them and reference the regression test that guards the behaviour. ## Apple security update fixtures -- **Location:** `src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/*.html` and `.expected.json`. +- **Location:** `src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/*.html` and `.expected.json`. - **Purpose:** Exercised by `AppleLiveRegressionTests` to guarantee the Apple HTML parser and mapper stay deterministic while covering Rapid Security Responses and multi-device advisories. - **Regeneration:** Use the helper scripts (`scripts/update-apple-fixtures.sh` or `scripts/update-apple-fixtures.ps1`). They export `UPDATE_APPLE_FIXTURES=1`, propagate the flag through `WSLENV`, touch `.update-apple-fixtures`, and then run the Apple test project. This keeps WSL/VSCode test invocations in sync while the refresh workflow fetches live Apple support pages, sanitises them, and rewrites both the HTML and expected DTO snapshots with normalised ordering. -- **Verification:** Inspect the generated diffs and re-run `dotnet test src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj` without the env var to confirm determinism. +- **Verification:** Inspect the generated diffs and re-run `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj` without the env var to confirm determinism. > **Tip for other connector owners:** mirror the sentinel + `WSLENV` pattern (`touch .update--fixtures`, append the env var via `WSLENV`) when you add fixture refresh scripts so contributors running under WSL inherit the regeneration flag automatically. ## KISA advisory fixtures -- **Location:** `src/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-{feed,detail}.(xml|json)` +- **Location:** `src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-{feed,detail}.(xml|json)` - **Purpose:** Used by `KisaConnectorTests` to verify Hangul-aware fetch → parse → map flows and to assert telemetry counters stay wired. -- **Regeneration:** `UPDATE_KISA_FIXTURES=1 dotnet test src/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj` +- **Regeneration:** `UPDATE_KISA_FIXTURES=1 dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj` - **Verification:** Re-run the same test suite without the env var; confirm advisory content remains NFC-normalised and HTML is sanitised. Metrics assertions will fail if counters drift. - **Localisation note:** RSS `category` values (e.g. `취약점정보`) remain in Hangul—do not translate them in fixtures; they feed directly into metrics/log tags. diff --git a/docs/dev/kisa_connector_notes.md b/docs/dev/kisa_connector_notes.md index 5ebd37b4..bf498455 100644 --- a/docs/dev/kisa_connector_notes.md +++ b/docs/dev/kisa_connector_notes.md @@ -39,7 +39,7 @@ The messages use structured properties (`Idx`, `Category`, `DocumentId`, `Severi - Hangul fields (`title`, `summary`, `category`, `reference.label`, product vendor/name) are normalised to NFC before storage. Sample category `취약점정보` roughly translates to “vulnerability information”. - Advisory HTML is sanitised via `HtmlContentSanitizer`, stripping script/style while preserving inline anchors for translation pipelines. - Metrics carry Hangul `category` tags and logging keeps Hangul strings intact; this ensures air-gapped operators can validate native-language content without relying on MT. -- Fixtures live under `src/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/`. Regenerate with `UPDATE_KISA_FIXTURES=1 dotnet test src/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj`. +- Fixtures live under `src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/`. Regenerate with `UPDATE_KISA_FIXTURES=1 dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj`. - The regression suite asserts canonical mapping, state cleanup, and telemetry counters (`KisaConnectorTests.Telemetry_RecordsMetrics`) so QA can track instrumentation drift. For operator docs, link to this brief when documenting Hangul handling or counter dashboards so localisation reviewers have a single reference point. diff --git a/docs/dev/merge_semver_playbook.md b/docs/dev/merge_semver_playbook.md index c45777cf..81c93f2d 100644 --- a/docs/dev/merge_semver_playbook.md +++ b/docs/dev/merge_semver_playbook.md @@ -1,154 +1,154 @@ -# Concelier SemVer Merge Playbook (Sprint 1–2) - -This playbook describes how the merge layer and connector teams should emit the new SemVer primitives introduced in Sprint 1–2, how those primitives become normalized version rules, and how downstream jobs query them deterministically. - -## 1. What landed in Sprint 1–2 - -- `RangePrimitives.SemVer` now infers a canonical `style` (`range`, `exact`, `lt`, `lte`, `gt`, `gte`) and captures `exactValue` when the constraint is a single version. -- `NormalizedVersionRule` documents the analytics-friendly projection of each `AffectedPackage` coverage entry and is persisted alongside legacy `versionRanges`. -- `AdvisoryProvenance.decisionReason` records whether merge resolution favored precedence, freshness, or a tie-breaker comparison. - -See `src/StellaOps.Concelier.Models/CANONICAL_RECORDS.md` for the full schema and field descriptions. - -## 2. Mapper pattern - -Connectors should emit SemVer primitives as soon as they can normalize a vendor constraint. The helper `SemVerPrimitiveExtensions.ToNormalizedVersionRule` turns those primitives into the persisted rules: - -```csharp -var primitive = new SemVerPrimitive( - introduced: "1.2.3", - introducedInclusive: true, - fixed: "2.0.0", - fixedInclusive: false, - lastAffected: null, - lastAffectedInclusive: false, - constraintExpression: ">=1.2.3 <2.0.0", - exactValue: null); - -var rule = primitive.ToNormalizedVersionRule(notes: "nvd:CVE-2025-1234"); -// rule => scheme=semver, type=range, min=1.2.3, minInclusive=true, max=2.0.0, maxInclusive=false -``` - -If you omit the optional `notes` argument, `ToNormalizedVersionRule` now falls back to the primitive’s `ConstraintExpression`, ensuring the original comparator expression is preserved for provenance/audit queries. - -Emit the resulting rule inside `AffectedPackage.NormalizedVersions` while continuing to populate `AffectedVersionRange.RangeExpression` for backward compatibility. - -## 3. Merge dedupe flow - -During merge, feed all package candidates through `NormalizedVersionRuleComparer.Instance` prior to persistence. The comparer orders by scheme → type → min → minInclusive → max → maxInclusive → value → notes, guaranteeing consistent document layout and making `$unwind` pipelines deterministic. - -If multiple connectors emit identical constraints, the merge layer should: - -1. Combine provenance entries (preserving one per source). -2. Preserve a single normalized rule instance (thanks to `NormalizedVersionRuleEqualityComparer.Instance`). -3. Attach `decisionReason="precedence"` if one source overrides another. - -## 4. Example Mongo pipeline - -Use the following aggregation to locate advisories that affect a specific SemVer: - -```javascript -db.advisories.aggregate([ - { $match: { "affectedPackages.type": "semver", "affectedPackages.identifier": "pkg:npm/lodash" } }, - { $unwind: "$affectedPackages" }, - { $unwind: "$affectedPackages.normalizedVersions" }, - { $match: { - $or: [ - { "affectedPackages.normalizedVersions.type": "exact", - "affectedPackages.normalizedVersions.value": "4.17.21" }, - { "affectedPackages.normalizedVersions.type": "range", - "affectedPackages.normalizedVersions.min": { $lte: "4.17.21" }, - "affectedPackages.normalizedVersions.max": { $gt: "4.17.21" } }, - { "affectedPackages.normalizedVersions.type": "gte", - "affectedPackages.normalizedVersions.min": { $lte: "4.17.21" } }, - { "affectedPackages.normalizedVersions.type": "lte", - "affectedPackages.normalizedVersions.max": { $gte: "4.17.21" } } - ] - }}, - { $project: { advisoryKey: 1, title: 1, "affectedPackages.identifier": 1 } } -]); -``` - -Pair this query with the indexes listed in [Normalized Versions Query Guide](mongo_indices.md). - -## 5. Recommended indexes - -| Collection | Index | Purpose | -|------------|-------|---------| -| `advisory` | `{ "affectedPackages.identifier": 1, "affectedPackages.normalizedVersions.scheme": 1, "affectedPackages.normalizedVersions.type": 1 }` (compound, multikey) | Speeds up `$match` on identifier + rule style. | -| `advisory` | `{ "affectedPackages.normalizedVersions.value": 1 }` (sparse) | Optimizes lookups for exact version hits. | - -Coordinate with the Storage team when enabling these indexes so deployment windows account for collection size. - -## 6. Dual-write rollout - -Follow the operational checklist in `docs/ops/migrations/SEMVER_STYLE.md`. The summary: - -1. **Dual write (now)** – emit both legacy `versionRanges` and the new `normalizedVersions`. -2. **Backfill** – follow the storage migration in `docs/ops/migrations/SEMVER_STYLE.md` to rewrite historical advisories before switching consumers. -3. **Verify** – run the aggregation above (with `explain("executionStats")`) to ensure the new indexes are used. -4. **Cutover** – after consumers switch to normalized rules, mark the old `rangeExpression` as deprecated. - -## 7. Checklist for connectors & merge - -- [ ] Populate `SemVerPrimitive` for every SemVer-friendly constraint. -- [ ] Call `ToNormalizedVersionRule` and store the result. -- [ ] Emit provenance masks covering both `versionRanges[].primitives.semver` and `normalizedVersions[]`. -- [ ] Ensure merge deduping relies on the canonical comparer. -- [ ] Capture merge decisions via `decisionReason`. -- [ ] Confirm integration tests include fixtures with normalized rules and SemVer styles. - -For deeper query examples and maintenance tasks, continue with [Normalized Versions Query Guide](mongo_indices.md). - -## 8. Storage projection reference - -`NormalizedVersionDocumentFactory` copies each normalized rule into MongoDB using the shape below. Use this as a contract when reviewing connector fixtures or diagnosing merge/storage diffs: - -```json -{ - "packageId": "pkg:npm/example", - "packageType": "npm", - "scheme": "semver", - "type": "range", - "style": "range", - "min": "1.2.3", - "minInclusive": true, - "max": "2.0.0", - "maxInclusive": false, - "value": null, - "notes": "ghsa:GHSA-xxxx-yyyy", - "decisionReason": "ghsa-precedence-over-nvd", - "constraint": ">= 1.2.3 < 2.0.0", - "source": "ghsa", - "recordedAt": "2025-10-11T00:00:00Z" -} -``` - -For distro-specific ranges (`nevra`, `evr`) the same envelope applies with `scheme` switched accordingly. Example: - -```json -{ - "packageId": "bash", - "packageType": "rpm", - "scheme": "nevra", - "type": "range", - "style": "range", - "min": "0:4.4.18-2.el7", - "minInclusive": true, - "max": "0:4.4.20-1.el7", - "maxInclusive": false, - "value": null, - "notes": "redhat:RHSA-2025:1234", - "decisionReason": "rhel-priority-over-nvd", - "constraint": "<= 0:4.4.20-1.el7", - "source": "redhat", - "recordedAt": "2025-10-11T00:00:00Z" -} -``` - -If a new scheme is required (for example, `apple.build` or `ios.semver`), raise it with the Models team before emitting documents so merge comparers and hashing logic can incorporate the change deterministically. - -## 9. Observability signals - -- `concelier.merge.normalized_rules` (counter, tags: `package_type`, `scheme`) – increments once per normalized rule retained after precedence merge. -- `concelier.merge.normalized_rules_missing` (counter, tags: `package_type`) – increments when a merged package still carries version ranges but no normalized rules; watch for spikes to catch connectors that have not emitted normalized arrays yet. +# Concelier SemVer Merge Playbook (Sprint 1–2) + +This playbook describes how the merge layer and connector teams should emit the new SemVer primitives introduced in Sprint 1–2, how those primitives become normalized version rules, and how downstream jobs query them deterministically. + +## 1. What landed in Sprint 1–2 + +- `RangePrimitives.SemVer` now infers a canonical `style` (`range`, `exact`, `lt`, `lte`, `gt`, `gte`) and captures `exactValue` when the constraint is a single version. +- `NormalizedVersionRule` documents the analytics-friendly projection of each `AffectedPackage` coverage entry and is persisted alongside legacy `versionRanges`. +- `AdvisoryProvenance.decisionReason` records whether merge resolution favored precedence, freshness, or a tie-breaker comparison. + +See `src/Concelier/__Libraries/StellaOps.Concelier.Models/CANONICAL_RECORDS.md` for the full schema and field descriptions. + +## 2. Mapper pattern + +Connectors should emit SemVer primitives as soon as they can normalize a vendor constraint. The helper `SemVerPrimitiveExtensions.ToNormalizedVersionRule` turns those primitives into the persisted rules: + +```csharp +var primitive = new SemVerPrimitive( + introduced: "1.2.3", + introducedInclusive: true, + fixed: "2.0.0", + fixedInclusive: false, + lastAffected: null, + lastAffectedInclusive: false, + constraintExpression: ">=1.2.3 <2.0.0", + exactValue: null); + +var rule = primitive.ToNormalizedVersionRule(notes: "nvd:CVE-2025-1234"); +// rule => scheme=semver, type=range, min=1.2.3, minInclusive=true, max=2.0.0, maxInclusive=false +``` + +If you omit the optional `notes` argument, `ToNormalizedVersionRule` now falls back to the primitive’s `ConstraintExpression`, ensuring the original comparator expression is preserved for provenance/audit queries. + +Emit the resulting rule inside `AffectedPackage.NormalizedVersions` while continuing to populate `AffectedVersionRange.RangeExpression` for backward compatibility. + +## 3. Merge dedupe flow + +During merge, feed all package candidates through `NormalizedVersionRuleComparer.Instance` prior to persistence. The comparer orders by scheme → type → min → minInclusive → max → maxInclusive → value → notes, guaranteeing consistent document layout and making `$unwind` pipelines deterministic. + +If multiple connectors emit identical constraints, the merge layer should: + +1. Combine provenance entries (preserving one per source). +2. Preserve a single normalized rule instance (thanks to `NormalizedVersionRuleEqualityComparer.Instance`). +3. Attach `decisionReason="precedence"` if one source overrides another. + +## 4. Example Mongo pipeline + +Use the following aggregation to locate advisories that affect a specific SemVer: + +```javascript +db.advisories.aggregate([ + { $match: { "affectedPackages.type": "semver", "affectedPackages.identifier": "pkg:npm/lodash" } }, + { $unwind: "$affectedPackages" }, + { $unwind: "$affectedPackages.normalizedVersions" }, + { $match: { + $or: [ + { "affectedPackages.normalizedVersions.type": "exact", + "affectedPackages.normalizedVersions.value": "4.17.21" }, + { "affectedPackages.normalizedVersions.type": "range", + "affectedPackages.normalizedVersions.min": { $lte: "4.17.21" }, + "affectedPackages.normalizedVersions.max": { $gt: "4.17.21" } }, + { "affectedPackages.normalizedVersions.type": "gte", + "affectedPackages.normalizedVersions.min": { $lte: "4.17.21" } }, + { "affectedPackages.normalizedVersions.type": "lte", + "affectedPackages.normalizedVersions.max": { $gte: "4.17.21" } } + ] + }}, + { $project: { advisoryKey: 1, title: 1, "affectedPackages.identifier": 1 } } +]); +``` + +Pair this query with the indexes listed in [Normalized Versions Query Guide](mongo_indices.md). + +## 5. Recommended indexes + +| Collection | Index | Purpose | +|------------|-------|---------| +| `advisory` | `{ "affectedPackages.identifier": 1, "affectedPackages.normalizedVersions.scheme": 1, "affectedPackages.normalizedVersions.type": 1 }` (compound, multikey) | Speeds up `$match` on identifier + rule style. | +| `advisory` | `{ "affectedPackages.normalizedVersions.value": 1 }` (sparse) | Optimizes lookups for exact version hits. | + +Coordinate with the Storage team when enabling these indexes so deployment windows account for collection size. + +## 6. Dual-write rollout + +Follow the operational checklist in `docs/ops/migrations/SEMVER_STYLE.md`. The summary: + +1. **Dual write (now)** – emit both legacy `versionRanges` and the new `normalizedVersions`. +2. **Backfill** – follow the storage migration in `docs/ops/migrations/SEMVER_STYLE.md` to rewrite historical advisories before switching consumers. +3. **Verify** – run the aggregation above (with `explain("executionStats")`) to ensure the new indexes are used. +4. **Cutover** – after consumers switch to normalized rules, mark the old `rangeExpression` as deprecated. + +## 7. Checklist for connectors & merge + +- [ ] Populate `SemVerPrimitive` for every SemVer-friendly constraint. +- [ ] Call `ToNormalizedVersionRule` and store the result. +- [ ] Emit provenance masks covering both `versionRanges[].primitives.semver` and `normalizedVersions[]`. +- [ ] Ensure merge deduping relies on the canonical comparer. +- [ ] Capture merge decisions via `decisionReason`. +- [ ] Confirm integration tests include fixtures with normalized rules and SemVer styles. + +For deeper query examples and maintenance tasks, continue with [Normalized Versions Query Guide](mongo_indices.md). + +## 8. Storage projection reference + +`NormalizedVersionDocumentFactory` copies each normalized rule into MongoDB using the shape below. Use this as a contract when reviewing connector fixtures or diagnosing merge/storage diffs: + +```json +{ + "packageId": "pkg:npm/example", + "packageType": "npm", + "scheme": "semver", + "type": "range", + "style": "range", + "min": "1.2.3", + "minInclusive": true, + "max": "2.0.0", + "maxInclusive": false, + "value": null, + "notes": "ghsa:GHSA-xxxx-yyyy", + "decisionReason": "ghsa-precedence-over-nvd", + "constraint": ">= 1.2.3 < 2.0.0", + "source": "ghsa", + "recordedAt": "2025-10-11T00:00:00Z" +} +``` + +For distro-specific ranges (`nevra`, `evr`) the same envelope applies with `scheme` switched accordingly. Example: + +```json +{ + "packageId": "bash", + "packageType": "rpm", + "scheme": "nevra", + "type": "range", + "style": "range", + "min": "0:4.4.18-2.el7", + "minInclusive": true, + "max": "0:4.4.20-1.el7", + "maxInclusive": false, + "value": null, + "notes": "redhat:RHSA-2025:1234", + "decisionReason": "rhel-priority-over-nvd", + "constraint": "<= 0:4.4.20-1.el7", + "source": "redhat", + "recordedAt": "2025-10-11T00:00:00Z" +} +``` + +If a new scheme is required (for example, `apple.build` or `ios.semver`), raise it with the Models team before emitting documents so merge comparers and hashing logic can incorporate the change deterministically. + +## 9. Observability signals + +- `concelier.merge.normalized_rules` (counter, tags: `package_type`, `scheme`) – increments once per normalized rule retained after precedence merge. +- `concelier.merge.normalized_rules_missing` (counter, tags: `package_type`) – increments when a merged package still carries version ranges but no normalized rules; watch for spikes to catch connectors that have not emitted normalized arrays yet. diff --git a/docs/dev/normalized_versions_rollout.md b/docs/dev/normalized_versions_rollout.md index b163d351..f4cbca88 100644 --- a/docs/dev/normalized_versions_rollout.md +++ b/docs/dev/normalized_versions_rollout.md @@ -4,7 +4,7 @@ _Status date: 2025-10-20 19:10 UTC_ This dashboard tracks connector readiness for emitting `AffectedPackage.NormalizedVersions` arrays and highlights upcoming coordination checkpoints. Use it alongside: -- [`src/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md`](../../src/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md) for detailed guidance and timelines. +- [`src/Concelier/__Libraries/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md`](../../src/Concelier/__Libraries/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md) for detailed guidance and timelines. - [Concelier SemVer Merge Playbook](merge_semver_playbook.md) §8 for persisted Mongo document shapes. - [Normalized Versions Query Guide](mongo_indices.md) for index/query validation steps. @@ -20,20 +20,20 @@ This dashboard tracks connector readiness for emitting `AffectedPackage.Normaliz | Connector | Owner team | Normalized versions status | Last update | Next action / link | |-----------|------------|---------------------------|-------------|--------------------| -| Acsc | BE-Conn-ACSC | ❌ Not started – normalized helper pending relay stability | 2025-10-20 | Prepare builder integration plan for 2025-10-24 kickoff; update `src/StellaOps.Concelier.Connector.Acsc/TASKS.md` once branch opens. | -| Cccs | BE-Conn-CCCS | ⚠️ DOING – trailing-version helper MR reviewing (due 2025-10-21) | 2025-10-20 | Land helper + fixture refresh, post merge-counter screenshot; `src/StellaOps.Concelier.Connector.Cccs/TASKS.md`. | -| CertBund | BE-Conn-CERTBUND | ⚠️ In progress – localisation translator WIP (due 2025-10-22) | 2025-10-20 | Finish translator + provenance notes, regenerate fixtures; `src/StellaOps.Concelier.Connector.CertBund/TASKS.md`. | +| Acsc | BE-Conn-ACSC | ❌ Not started – normalized helper pending relay stability | 2025-10-20 | Prepare builder integration plan for 2025-10-24 kickoff; update `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Acsc/TASKS.md` once branch opens. | +| Cccs | BE-Conn-CCCS | ⚠️ DOING – trailing-version helper MR reviewing (due 2025-10-21) | 2025-10-20 | Land helper + fixture refresh, post merge-counter screenshot; `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Cccs/TASKS.md`. | +| CertBund | BE-Conn-CERTBUND | ⚠️ In progress – localisation translator WIP (due 2025-10-22) | 2025-10-20 | Finish translator + provenance notes, regenerate fixtures; `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertBund/TASKS.md`. | | CertCc | BE-Conn-CERTCC | ✅ Complete – `certcc.vendor` rules emitting | 2025-10-20 | Monitor VINCE payload changes; no action. | | Kev | BE-Conn-KEV | ✅ Complete – catalog/due-date rules verified | 2025-10-20 | Routine monitoring only. | | Cve | BE-Conn-CVE | ✅ Complete – SemVer normalized rules live | 2025-10-20 | Keep fixtures in sync as CVE schema evolves. | | Ghsa | BE-Conn-GHSA | ✅ Complete – rollout merged 2025-10-11 | 2025-10-20 | Maintain parity with OSV ecosystems; no action. | | Osv | BE-Conn-OSV | ✅ Complete – normalized rules shipping | 2025-10-20 | Watch for new ecosystems; refresh fixtures as needed. | -| Ics.Cisa | BE-Conn-ICS-CISA | ⚠️ Decision pending – exact SemVer promotion due 2025-10-23 | 2025-10-20 | Promote primitives or request new scheme; `src/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md`. | -| Kisa | BE-Conn-KISA | ⚠️ Proposal drafting – firmware scheme due 2025-10-24 | 2025-10-20 | Finalise `kisa.build` proposal with Models; update mapper/tests; `src/StellaOps.Concelier.Connector.Kisa/TASKS.md`. | +| Ics.Cisa | BE-Conn-ICS-CISA | ⚠️ Decision pending – exact SemVer promotion due 2025-10-23 | 2025-10-20 | Promote primitives or request new scheme; `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md`. | +| Kisa | BE-Conn-KISA | ⚠️ Proposal drafting – firmware scheme due 2025-10-24 | 2025-10-20 | Finalise `kisa.build` proposal with Models; update mapper/tests; `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Kisa/TASKS.md`. | | Ru.Bdu | BE-Conn-BDU | ✅ Complete – `ru-bdu.raw` rules live | 2025-10-20 | Continue monitoring UTF-8 handling; no action. | | Ru.Nkcki | BE-Conn-Nkcki | ✅ Complete – normalized rules emitted | 2025-10-20 | Maintain transliteration guidance; no action. | | Vndr.Apple | BE-Conn-Apple | ✅ Complete – normalized arrays emitting | 2025-10-20 | Add beta-channel coverage follow-up; see module README. | -| Vndr.Cisco | BE-Conn-Cisco | ⚠️ DOING – normalized promotion branch open (due 2025-10-21) | 2025-10-20 | Merge helper branch, refresh fixtures, post counters; `src/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md`. | +| Vndr.Cisco | BE-Conn-Cisco | ⚠️ DOING – normalized promotion branch open (due 2025-10-21) | 2025-10-20 | Merge helper branch, refresh fixtures, post counters; `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md`. | | Vndr.Msrc | BE-Conn-MSRC | ✅ Complete – `msrc.build` rules emitting | 2025-10-20 | Monitor monthly rollups; no action. | | Nvd | BE-Conn-NVD | ✅ Complete – normalized SemVer output live | 2025-10-20 | Keep provenance aligned with CVE IDs; monitor export parity toggle. | diff --git a/docs/devops/policy-schema-export.md b/docs/devops/policy-schema-export.md index 2200a54d..c731f8c2 100644 --- a/docs/devops/policy-schema-export.md +++ b/docs/devops/policy-schema-export.md @@ -1,27 +1,27 @@ -# Policy Schema Export Automation - -This utility generates JSON Schema documents for the Policy Engine run contracts. - -## Command - -``` -scripts/export-policy-schemas.sh [output-directory] -``` - -When no output directory is supplied, schemas are written to `docs/schemas/`. - -The exporter builds against `StellaOps.Scheduler.Models` and emits: - -- `policy-run-request.schema.json` -- `policy-run-status.schema.json` -- `policy-diff-summary.schema.json` -- `policy-explain-trace.schema.json` - -The build pipeline (`.gitea/workflows/build-test-deploy.yml`, job **Export policy run schemas**) runs this script on every push and pull request. Exports land under `artifacts/policy-schemas//`, are published as the `policy-schema-exports` artifact, and changes trigger a Slack post to `#policy-engine` via the `POLICY_ENGINE_SCHEMA_WEBHOOK` secret. A unified diff is stored alongside the exports for downstream consumers. - -## CI integration checklist - -- [x] Invoke the script in the DevOps pipeline (see `DEVOPS-POLICY-20-004`). -- [x] Publish the generated schemas as pipeline artifacts. -- [x] Notify downstream consumers when schemas change (Slack `#policy-engine`, changelog snippet). -- [ ] Gate CLI validation once schema artifacts are available. +# Policy Schema Export Automation + +This utility generates JSON Schema documents for the Policy Engine run contracts. + +## Command + +``` +scripts/export-policy-schemas.sh [output-directory] +``` + +When no output directory is supplied, schemas are written to `docs/schemas/`. + +The exporter builds against `StellaOps.Scheduler.Models` and emits: + +- `policy-run-request.schema.json` +- `policy-run-status.schema.json` +- `policy-diff-summary.schema.json` +- `policy-explain-trace.schema.json` + +The build pipeline (`.gitea/workflows/build-test-deploy.yml`, job **Export policy run schemas**) runs this script on every push and pull request. Exports land under `artifacts/policy-schemas//`, are published as the `policy-schema-exports` artifact, and changes trigger a Slack post to `#policy-engine` via the `POLICY_ENGINE_SCHEMA_WEBHOOK` secret. A unified diff is stored alongside the exports for downstream consumers. + +## CI integration checklist + +- [x] Invoke the script in the DevOps pipeline (see `DEVOPS-POLICY-20-004`). +- [x] Publish the generated schemas as pipeline artifacts. +- [x] Notify downstream consumers when schemas change (Slack `#policy-engine`, changelog snippet). +- [ ] Gate CLI validation once schema artifacts are available. diff --git a/docs/events/orchestrator-scanner-events.md b/docs/events/orchestrator-scanner-events.md index cbdb5950..5f246aef 100644 --- a/docs/events/orchestrator-scanner-events.md +++ b/docs/events/orchestrator-scanner-events.md @@ -1,121 +1,121 @@ -# Scanner Orchestrator Events (ORCH-SVC-38-101) - -Last updated: 2025-10-26 - -The Notifications Studio initiative (NOTIFY-SVC-38-001) and orchestrator backlog (ORCH-SVC-38-101) standardise how platform services emit lifecycle events. This document describes the Scanner WebService contract for the new **orchestrator envelopes** (`scanner.event.*`) and how they supersede the legacy Redis-backed `scanner.report.ready` / `scanner.scan.completed` events. - -## 1. Envelope overview - -Orchestrator events share a deterministic JSON envelope: - -| Field | Type | Notes | -|-------|------|-------| -| `eventId` | `uuid` | Globally unique identifier generated per occurrence. | -| `kind` | `string` | Event identifier; Scanner emits `scanner.event.report.ready` and `scanner.event.scan.completed`. | -| `version` | `integer` | Schema version. Initial release uses `1`. | -| `tenant` | `string` | Tenant that owns the scan/report. Mirrors Authority claims. | -| `occurredAt` | `date-time` | UTC instant when the underlying state transition happened (e.g., report persisted). | -| `recordedAt` | `date-time` | UTC instant when the event was durably written. Optional but recommended. | -| `source` | `string` | Producer identifier (`scanner.webservice`). | -| `idempotencyKey` | `string` | Deterministic key for duplicate suppression (see §4). | -| `correlationId` | `string` | Maps back to the API request or scan identifier. | -| `traceId` / `spanId` | `string` | W3C trace context propagated into downstream telemetry. | -| `scope` | `object` | Describes the affected artefact. Requires `repo` and `digest`; optional `namespace`, `component`, `image`. | -| `attributes` | `object` | Flat string map for frequently queried metadata (e.g., policy revision). | -| `payload` | `object` | Event-specific body (see §2). | - -Canonical schemas live under `docs/events/scanner.event.*@1.json`. Samples that round-trip through `NotifyCanonicalJsonSerializer` are stored in `docs/events/samples/`. - -## 2. Event kinds and payloads - -### 2.1 `scanner.event.report.ready` - -Emitted once a signed report is persisted and attested. Payload highlights: - -- `reportId` / `scanId` — identifiers for the persisted report and originating scan. Until Scan IDs are surfaced by the API, `scanId` mirrors `reportId` so downstream correlators can stabilise on a single key. -- **Attributes:** `reportId`, `policyRevisionId`, `policyDigest`, `verdict` — pre-sorted for deterministic routing. -- **Links:** - - `ui` → `/ui/reports/{reportId}` on the current host. - - `report` → `{apiBasePath}/{reportsSegment}/{reportId}` (defaults to `/api/v1/reports/{reportId}`). - - `policy` → `{apiBasePath}/{policySegment}/revisions/{revisionId}` when a revision is present. - - `attestation` → `/ui/attestations/{reportId}` when a DSSE envelope is included. -- `imageDigest` — OCI image digest associated with the analysis. -- `generatedAt` — report generation timestamp (ISO-8601 UTC). -- `verdict` — `pass`, `warn`, or `fail` after policy evaluation. -- `summary` — blocked/warned/ignored/quieted counters (all non-negative integers). -- `delta` — newly critical/high counts and optional `kev` array. -- `quietedFindingCount` — mirrors `summary.quieted`. -- `policy` — revision metadata (`digest`, `revisionId`) surfaced for routing. -- `links` — UI/report/policy URLs suitable for operators. -- `dsse` — embedded DSSE envelope (payload, type, signature list). -- `report` — canonical report document; identical to the DSSE payload. - -Schema: `docs/events/scanner.event.report.ready@1.json` -Sample: `docs/events/samples/scanner.event.report.ready@1.sample.json` - -### 2.2 `scanner.event.scan.completed` - -Emitted after scan execution finishes (success or policy failure). Payload highlights: - -- `reportId` / `scanId` / `imageDigest` — identifiers mirroring the report-ready event. As with the report-ready payload, `scanId` currently mirrors `reportId` as a temporary shim. -- **Attributes:** `reportId`, `policyRevisionId`, `policyDigest`, `verdict`. -- **Links:** same as above (`ui`, `report`, `policy`) with `attestation` populated when DSSE metadata exists. -- `verdict`, `summary`, `delta`, `policy` — same semantics as above. -- `findings` — array of surfaced findings with `id`, `severity`, optional `cve`, `purl`, and `reachability`. -- `links`, `dsse`, `report` — same structure as §2.1 (allows Notifier to reuse signatures). - -Schema: `docs/events/scanner.event.scan.completed@1.json` -Sample: `docs/events/samples/scanner.event.scan.completed@1.sample.json` - -### 2.3 Relationship to legacy events - -| Legacy Redis event | Replacement orchestrator event | Notes | -|--------------------|-------------------------------|-------| -| `scanner.report.ready` | `scanner.event.report.ready` | Adds versioning, idempotency, trace context. Payload is a superset of the legacy fields. | -| `scanner.scan.completed` | `scanner.event.scan.completed` | Same data plus explicit scan identifiers and orchestrator metadata. | - -Legacy schemas remain for backwards-compatibility during migration, but new integrations **must** target the orchestrator variants. - -## 3. Deterministic serialization - -- Producers must serialise events using `NotifyCanonicalJsonSerializer` to guarantee consistent key ordering and whitespace. -- Timestamps (`occurredAt`, `recordedAt`, `payload.generatedAt`) use `DateTimeOffset.UtcDateTime.ToString("O")`. -- Payload arrays (`delta.kev`, `findings`) should be pre-sorted (e.g., alphabetical CVE order) so hash-based consumers remain stable. -- Optional fields are omitted rather than emitted as `null`. - -## 4. Idempotency and correlation - -Idempotency keys dedupe repeated publishes and align with the orchestrator’s outbox pattern: - -| Event kind | Idempotency key template | -|------------|-------------------------| -| `scanner.event.report.ready` | `scanner.event.report.ready::` | -| `scanner.event.scan.completed` | `scanner.event.scan.completed::` | - -Keys are ASCII lowercase; components should be trimmed and validated before concatenation. Retries must reuse the same key. - -`correlationId` should match the scan identifier that appears in REST responses (`scanId`). Re-using the same value across the pair of events allows Notifier and orchestrator analytics to stitch lifecycle data together. - -## 5. Versioning and evolution - -- Increment the `version` field and the `@` suffix for **breaking** changes (field removals, type changes, semantic shifts). -- Additive optional fields may remain within version 1; update the JSON schema and samples accordingly. -- When introducing `@2`, keep the `@1` schema/docs in place until orchestrator subscribers confirm migration. - -## 6. Consumer checklist - -1. Validate incoming payloads against the schema for the targeted version. -2. Use `idempotencyKey` for dedupe, not `eventId`. -3. Map `traceId`/`spanId` into telemetry spans to preserve causality. -4. Prefer `payload.report` → `policy.revisionId` when populating templates; the top-level `attributes` are convenience duplicates for quick routing. -5. Reserve the legacy Redis events for transitional compatibility only; downstream systems should subscribe to the orchestrator bus exposed by ORCH-SVC-38-101. - -## 7. Implementation status and next actions - -- **Scanner WebService** — `SCANNER-EVENTS-16-301` (blocked) and `SCANNER-EVENTS-16-302` (doing) track the production of these envelopes. The remaining blocker is the .NET 10 preview OpenAPI/Auth dependency drift that currently breaks `dotnet test`. Once Gateway and Notifier owners land the replacement packages, rerun the full test suite and capture fresh fixtures under `docs/events/samples/`. -- **Gateway/Notifier consumers** — subscribe to the orchestrator stream documented in ORCH-SVC-38-101. When the Scanner tasks unblock, regenerate notifier contract tests against the sample events included here. -- **Docs cadence** — update this file and the matching JSON schemas whenever payload fields change. Use the rehearsal checklist in `docs/ops/launch-cutover.md` to confirm downstream validation before the production cutover. Record gaps or newly required fields in `docs/ops/launch-readiness.md` so they land in the launch checklist. - ---- - -**Imposed rule reminder:** work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Scanner Orchestrator Events (ORCH-SVC-38-101) + +Last updated: 2025-10-26 + +The Notifications Studio initiative (NOTIFY-SVC-38-001) and orchestrator backlog (ORCH-SVC-38-101) standardise how platform services emit lifecycle events. This document describes the Scanner WebService contract for the new **orchestrator envelopes** (`scanner.event.*`) and how they supersede the legacy Redis-backed `scanner.report.ready` / `scanner.scan.completed` events. + +## 1. Envelope overview + +Orchestrator events share a deterministic JSON envelope: + +| Field | Type | Notes | +|-------|------|-------| +| `eventId` | `uuid` | Globally unique identifier generated per occurrence. | +| `kind` | `string` | Event identifier; Scanner emits `scanner.event.report.ready` and `scanner.event.scan.completed`. | +| `version` | `integer` | Schema version. Initial release uses `1`. | +| `tenant` | `string` | Tenant that owns the scan/report. Mirrors Authority claims. | +| `occurredAt` | `date-time` | UTC instant when the underlying state transition happened (e.g., report persisted). | +| `recordedAt` | `date-time` | UTC instant when the event was durably written. Optional but recommended. | +| `source` | `string` | Producer identifier (`scanner.webservice`). | +| `idempotencyKey` | `string` | Deterministic key for duplicate suppression (see §4). | +| `correlationId` | `string` | Maps back to the API request or scan identifier. | +| `traceId` / `spanId` | `string` | W3C trace context propagated into downstream telemetry. | +| `scope` | `object` | Describes the affected artefact. Requires `repo` and `digest`; optional `namespace`, `component`, `image`. | +| `attributes` | `object` | Flat string map for frequently queried metadata (e.g., policy revision). | +| `payload` | `object` | Event-specific body (see §2). | + +Canonical schemas live under `docs/events/scanner.event.*@1.json`. Samples that round-trip through `NotifyCanonicalJsonSerializer` are stored in `docs/events/samples/`. + +## 2. Event kinds and payloads + +### 2.1 `scanner.event.report.ready` + +Emitted once a signed report is persisted and attested. Payload highlights: + +- `reportId` / `scanId` — identifiers for the persisted report and originating scan. Until Scan IDs are surfaced by the API, `scanId` mirrors `reportId` so downstream correlators can stabilise on a single key. +- **Attributes:** `reportId`, `policyRevisionId`, `policyDigest`, `verdict` — pre-sorted for deterministic routing. +- **Links:** + - `ui` → `/ui/reports/{reportId}` on the current host. + - `report` → `{apiBasePath}/{reportsSegment}/{reportId}` (defaults to `/api/v1/reports/{reportId}`). + - `policy` → `{apiBasePath}/{policySegment}/revisions/{revisionId}` when a revision is present. + - `attestation` → `/ui/attestations/{reportId}` when a DSSE envelope is included. +- `imageDigest` — OCI image digest associated with the analysis. +- `generatedAt` — report generation timestamp (ISO-8601 UTC). +- `verdict` — `pass`, `warn`, or `fail` after policy evaluation. +- `summary` — blocked/warned/ignored/quieted counters (all non-negative integers). +- `delta` — newly critical/high counts and optional `kev` array. +- `quietedFindingCount` — mirrors `summary.quieted`. +- `policy` — revision metadata (`digest`, `revisionId`) surfaced for routing. +- `links` — UI/report/policy URLs suitable for operators. +- `dsse` — embedded DSSE envelope (payload, type, signature list). +- `report` — canonical report document; identical to the DSSE payload. + +Schema: `docs/events/scanner.event.report.ready@1.json` +Sample: `docs/events/samples/scanner.event.report.ready@1.sample.json` + +### 2.2 `scanner.event.scan.completed` + +Emitted after scan execution finishes (success or policy failure). Payload highlights: + +- `reportId` / `scanId` / `imageDigest` — identifiers mirroring the report-ready event. As with the report-ready payload, `scanId` currently mirrors `reportId` as a temporary shim. +- **Attributes:** `reportId`, `policyRevisionId`, `policyDigest`, `verdict`. +- **Links:** same as above (`ui`, `report`, `policy`) with `attestation` populated when DSSE metadata exists. +- `verdict`, `summary`, `delta`, `policy` — same semantics as above. +- `findings` — array of surfaced findings with `id`, `severity`, optional `cve`, `purl`, and `reachability`. +- `links`, `dsse`, `report` — same structure as §2.1 (allows Notifier to reuse signatures). + +Schema: `docs/events/scanner.event.scan.completed@1.json` +Sample: `docs/events/samples/scanner.event.scan.completed@1.sample.json` + +### 2.3 Relationship to legacy events + +| Legacy Redis event | Replacement orchestrator event | Notes | +|--------------------|-------------------------------|-------| +| `scanner.report.ready` | `scanner.event.report.ready` | Adds versioning, idempotency, trace context. Payload is a superset of the legacy fields. | +| `scanner.scan.completed` | `scanner.event.scan.completed` | Same data plus explicit scan identifiers and orchestrator metadata. | + +Legacy schemas remain for backwards-compatibility during migration, but new integrations **must** target the orchestrator variants. + +## 3. Deterministic serialization + +- Producers must serialise events using `NotifyCanonicalJsonSerializer` to guarantee consistent key ordering and whitespace. +- Timestamps (`occurredAt`, `recordedAt`, `payload.generatedAt`) use `DateTimeOffset.UtcDateTime.ToString("O")`. +- Payload arrays (`delta.kev`, `findings`) should be pre-sorted (e.g., alphabetical CVE order) so hash-based consumers remain stable. +- Optional fields are omitted rather than emitted as `null`. + +## 4. Idempotency and correlation + +Idempotency keys dedupe repeated publishes and align with the orchestrator’s outbox pattern: + +| Event kind | Idempotency key template | +|------------|-------------------------| +| `scanner.event.report.ready` | `scanner.event.report.ready::` | +| `scanner.event.scan.completed` | `scanner.event.scan.completed::` | + +Keys are ASCII lowercase; components should be trimmed and validated before concatenation. Retries must reuse the same key. + +`correlationId` should match the scan identifier that appears in REST responses (`scanId`). Re-using the same value across the pair of events allows Notifier and orchestrator analytics to stitch lifecycle data together. + +## 5. Versioning and evolution + +- Increment the `version` field and the `@` suffix for **breaking** changes (field removals, type changes, semantic shifts). +- Additive optional fields may remain within version 1; update the JSON schema and samples accordingly. +- When introducing `@2`, keep the `@1` schema/docs in place until orchestrator subscribers confirm migration. + +## 6. Consumer checklist + +1. Validate incoming payloads against the schema for the targeted version. +2. Use `idempotencyKey` for dedupe, not `eventId`. +3. Map `traceId`/`spanId` into telemetry spans to preserve causality. +4. Prefer `payload.report` → `policy.revisionId` when populating templates; the top-level `attributes` are convenience duplicates for quick routing. +5. Reserve the legacy Redis events for transitional compatibility only; downstream systems should subscribe to the orchestrator bus exposed by ORCH-SVC-38-101. + +## 7. Implementation status and next actions + +- **Scanner WebService** — `SCANNER-EVENTS-16-301` (blocked) and `SCANNER-EVENTS-16-302` (doing) track the production of these envelopes. The remaining blocker is the .NET 10 preview OpenAPI/Auth dependency drift that currently breaks `dotnet test`. Once Gateway and Notifier owners land the replacement packages, rerun the full test suite and capture fresh fixtures under `docs/events/samples/`. +- **Gateway/Notifier consumers** — subscribe to the orchestrator stream documented in ORCH-SVC-38-101. When the Scanner tasks unblock, regenerate notifier contract tests against the sample events included here. +- **Docs cadence** — update this file and the matching JSON schemas whenever payload fields change. Use the rehearsal checklist in `docs/ops/launch-cutover.md` to confirm downstream validation before the production cutover. Record gaps or newly required fields in `docs/ops/launch-readiness.md` so they land in the launch checklist. + +--- + +**Imposed rule reminder:** work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/events/samples/scanner.event.report.ready@1.sample.json b/docs/events/samples/scanner.event.report.ready@1.sample.json index bcfb8742..978c0322 100644 --- a/docs/events/samples/scanner.event.report.ready@1.sample.json +++ b/docs/events/samples/scanner.event.report.ready@1.sample.json @@ -1,93 +1,93 @@ -{ - "eventId": "6d2d1b77-f3c3-4f70-8a9d-6f2d0c8801ab", - "kind": "scanner.event.report.ready", - "version": 1, - "tenant": "tenant-alpha", - "occurredAt": "2025-10-19T12:34:56Z", - "recordedAt": "2025-10-19T12:34:57Z", - "source": "scanner.webservice", - "idempotencyKey": "scanner.event.report.ready:tenant-alpha:report-abc", - "correlationId": "report-abc", - "traceId": "0af7651916cd43dd8448eb211c80319c", - "spanId": "b7ad6b7169203331", - "scope": { - "namespace": "acme/edge", - "repo": "api", - "digest": "sha256:feedface" - }, - "attributes": { - "reportId": "report-abc", - "policyRevisionId": "rev-42", - "policyDigest": "digest-123", - "verdict": "blocked" - }, - "payload": { - "reportId": "report-abc", - "scanId": "report-abc", - "imageDigest": "sha256:feedface", - "generatedAt": "2025-10-19T12:34:56Z", - "verdict": "fail", - "summary": { - "total": 1, - "blocked": 1, - "warned": 0, - "ignored": 0, - "quieted": 0 - }, - "delta": { - "newCritical": 1, - "kev": [ - "CVE-2024-9999" - ] - }, - "quietedFindingCount": 0, - "policy": { - "digest": "digest-123", - "revisionId": "rev-42" - }, - "links": { - "ui": "https://scanner.example/ui/reports/report-abc", - "report": "https://scanner.example/api/v1/reports/report-abc", - "policy": "https://scanner.example/api/v1/policy/revisions/rev-42", - "attestation": "https://scanner.example/ui/attestations/report-abc" - }, - "dsse": { - "payloadType": "application/vnd.stellaops.report+json", - "payload": "eyJyZXBvcnRJZCI6InJlcG9ydC1hYmMiLCJpbWFnZURpZ2VzdCI6InNoYTI1NjpmZWVkZmFjZSIsImdlbmVyYXRlZEF0IjoiMjAyNS0xMC0xOVQxMjozNDo1NiswMDowMCIsInZlcmRpY3QiOiJibG9ja2VkIiwicG9saWN5Ijp7InJldmlzaW9uSWQiOiJyZXYtNDIiLCJkaWdlc3QiOiJkaWdlc3QtMTIzIn0sInN1bW1hcnkiOnsidG90YWwiOjEsImJsb2NrZWQiOjEsIndhcm5lZCI6MCwiaWdub3JlZCI6MCwicXVpZXRlZCI6MH0sInZlcmRpY3RzIjpbeyJmaW5kaW5nSWQiOiJmaW5kaW5nLTEiLCJzdGF0dXMiOiJCbG9ja2VkIiwic2NvcmUiOjQ3LjUsInNvdXJjZVRydXN0IjoiTlZEIiwicmVhY2hhYmlsaXR5IjoicnVudGltZSJ9XSwiaXNzdWVzIjpbXX0=", - "signatures": [ - { - "keyId": "test-key", - "algorithm": "hs256", - "signature": "signature-value" - } - ] - }, - "report": { - "reportId": "report-abc", - "generatedAt": "2025-10-19T12:34:56Z", - "imageDigest": "sha256:feedface", - "policy": { - "digest": "digest-123", - "revisionId": "rev-42" - }, - "summary": { - "total": 1, - "blocked": 1, - "warned": 0, - "ignored": 0, - "quieted": 0 - }, - "verdict": "blocked", - "verdicts": [ - { - "findingId": "finding-1", - "status": "Blocked", - "score": 47.5, - "sourceTrust": "NVD", - "reachability": "runtime" - } - ], - "issues": [] - } - } -} +{ + "eventId": "6d2d1b77-f3c3-4f70-8a9d-6f2d0c8801ab", + "kind": "scanner.event.report.ready", + "version": 1, + "tenant": "tenant-alpha", + "occurredAt": "2025-10-19T12:34:56Z", + "recordedAt": "2025-10-19T12:34:57Z", + "source": "scanner.webservice", + "idempotencyKey": "scanner.event.report.ready:tenant-alpha:report-abc", + "correlationId": "report-abc", + "traceId": "0af7651916cd43dd8448eb211c80319c", + "spanId": "b7ad6b7169203331", + "scope": { + "namespace": "acme/edge", + "repo": "api", + "digest": "sha256:feedface" + }, + "attributes": { + "reportId": "report-abc", + "policyRevisionId": "rev-42", + "policyDigest": "digest-123", + "verdict": "blocked" + }, + "payload": { + "reportId": "report-abc", + "scanId": "report-abc", + "imageDigest": "sha256:feedface", + "generatedAt": "2025-10-19T12:34:56Z", + "verdict": "fail", + "summary": { + "total": 1, + "blocked": 1, + "warned": 0, + "ignored": 0, + "quieted": 0 + }, + "delta": { + "newCritical": 1, + "kev": [ + "CVE-2024-9999" + ] + }, + "quietedFindingCount": 0, + "policy": { + "digest": "digest-123", + "revisionId": "rev-42" + }, + "links": { + "ui": "https://scanner.example/ui/reports/report-abc", + "report": "https://scanner.example/api/v1/reports/report-abc", + "policy": "https://scanner.example/api/v1/policy/revisions/rev-42", + "attestation": "https://scanner.example/ui/attestations/report-abc" + }, + "dsse": { + "payloadType": "application/vnd.stellaops.report+json", + "payload": "eyJyZXBvcnRJZCI6InJlcG9ydC1hYmMiLCJpbWFnZURpZ2VzdCI6InNoYTI1NjpmZWVkZmFjZSIsImdlbmVyYXRlZEF0IjoiMjAyNS0xMC0xOVQxMjozNDo1NiswMDowMCIsInZlcmRpY3QiOiJibG9ja2VkIiwicG9saWN5Ijp7InJldmlzaW9uSWQiOiJyZXYtNDIiLCJkaWdlc3QiOiJkaWdlc3QtMTIzIn0sInN1bW1hcnkiOnsidG90YWwiOjEsImJsb2NrZWQiOjEsIndhcm5lZCI6MCwiaWdub3JlZCI6MCwicXVpZXRlZCI6MH0sInZlcmRpY3RzIjpbeyJmaW5kaW5nSWQiOiJmaW5kaW5nLTEiLCJzdGF0dXMiOiJCbG9ja2VkIiwic2NvcmUiOjQ3LjUsInNvdXJjZVRydXN0IjoiTlZEIiwicmVhY2hhYmlsaXR5IjoicnVudGltZSJ9XSwiaXNzdWVzIjpbXX0=", + "signatures": [ + { + "keyId": "test-key", + "algorithm": "hs256", + "signature": "signature-value" + } + ] + }, + "report": { + "reportId": "report-abc", + "generatedAt": "2025-10-19T12:34:56Z", + "imageDigest": "sha256:feedface", + "policy": { + "digest": "digest-123", + "revisionId": "rev-42" + }, + "summary": { + "total": 1, + "blocked": 1, + "warned": 0, + "ignored": 0, + "quieted": 0 + }, + "verdict": "blocked", + "verdicts": [ + { + "findingId": "finding-1", + "status": "Blocked", + "score": 47.5, + "sourceTrust": "NVD", + "reachability": "runtime" + } + ], + "issues": [] + } + } +} diff --git a/docs/events/samples/scanner.event.scan.completed@1.sample.json b/docs/events/samples/scanner.event.scan.completed@1.sample.json index f8c2634e..b6096ec4 100644 --- a/docs/events/samples/scanner.event.scan.completed@1.sample.json +++ b/docs/events/samples/scanner.event.scan.completed@1.sample.json @@ -1,99 +1,99 @@ -{ - "eventId": "08a6de24-4a94-4d14-8432-9d14f36f6da3", - "kind": "scanner.event.scan.completed", - "version": 1, - "tenant": "tenant-alpha", - "occurredAt": "2025-10-19T12:34:56Z", - "recordedAt": "2025-10-19T12:34:57Z", - "source": "scanner.webservice", - "idempotencyKey": "scanner.event.scan.completed:tenant-alpha:report-abc", - "correlationId": "report-abc", - "traceId": "4bf92f3577b34da6a3ce929d0e0e4736", - "scope": { - "namespace": "acme/edge", - "repo": "api", - "digest": "sha256:feedface" - }, - "attributes": { - "reportId": "report-abc", - "policyRevisionId": "rev-42", - "policyDigest": "digest-123", - "verdict": "blocked" - }, - "payload": { - "reportId": "report-abc", - "scanId": "report-abc", - "imageDigest": "sha256:feedface", - "verdict": "fail", - "summary": { - "total": 1, - "blocked": 1, - "warned": 0, - "ignored": 0, - "quieted": 0 - }, - "delta": { - "newCritical": 1, - "kev": [ - "CVE-2024-9999" - ] - }, - "policy": { - "digest": "digest-123", - "revisionId": "rev-42" - }, - "findings": [ - { - "id": "finding-1", - "severity": "Critical", - "cve": "CVE-2024-9999", - "purl": "pkg:docker/acme/edge-api@sha256-feedface", - "reachability": "runtime" - } - ], - "links": { - "ui": "https://scanner.example/ui/reports/report-abc", - "report": "https://scanner.example/api/v1/reports/report-abc", - "policy": "https://scanner.example/api/v1/policy/revisions/rev-42", - "attestation": "https://scanner.example/ui/attestations/report-abc" - }, - "dsse": { - "payloadType": "application/vnd.stellaops.report+json", - "payload": "eyJyZXBvcnRJZCI6InJlcG9ydC1hYmMiLCJpbWFnZURpZ2VzdCI6InNoYTI1NjpmZWVkZmFjZSIsImdlbmVyYXRlZEF0IjoiMjAyNS0xMC0xOVQxMjozNDo1NiswMDowMCIsInZlcmRpY3QiOiJibG9ja2VkIiwicG9saWN5Ijp7InJldmlzaW9uSWQiOiJyZXYtNDIiLCJkaWdlc3QiOiJkaWdlc3QtMTIzIn0sInN1bW1hcnkiOnsidG90YWwiOjEsImJsb2NrZWQiOjEsIndhcm5lZCI6MCwiaWdub3JlZCI6MCwicXVpZXRlZCI6MH0sInZlcmRpY3RzIjpbeyJmaW5kaW5nSWQiOiJmaW5kaW5nLTEiLCJzdGF0dXMiOiJCbG9ja2VkIiwic2NvcmUiOjQ3LjUsInNvdXJjZVRydXN0IjoiTlZEIiwicmVhY2hhYmlsaXR5IjoicnVudGltZSJ9XSwiaXNzdWVzIjpbXX0=", - "signatures": [ - { - "keyId": "test-key", - "algorithm": "hs256", - "signature": "signature-value" - } - ] - }, - "report": { - "reportId": "report-abc", - "generatedAt": "2025-10-19T12:34:56Z", - "imageDigest": "sha256:feedface", - "policy": { - "digest": "digest-123", - "revisionId": "rev-42" - }, - "summary": { - "total": 1, - "blocked": 1, - "warned": 0, - "ignored": 0, - "quieted": 0 - }, - "verdict": "blocked", - "verdicts": [ - { - "findingId": "finding-1", - "status": "Blocked", - "score": 47.5, - "sourceTrust": "NVD", - "reachability": "runtime" - } - ], - "issues": [] - } - } -} +{ + "eventId": "08a6de24-4a94-4d14-8432-9d14f36f6da3", + "kind": "scanner.event.scan.completed", + "version": 1, + "tenant": "tenant-alpha", + "occurredAt": "2025-10-19T12:34:56Z", + "recordedAt": "2025-10-19T12:34:57Z", + "source": "scanner.webservice", + "idempotencyKey": "scanner.event.scan.completed:tenant-alpha:report-abc", + "correlationId": "report-abc", + "traceId": "4bf92f3577b34da6a3ce929d0e0e4736", + "scope": { + "namespace": "acme/edge", + "repo": "api", + "digest": "sha256:feedface" + }, + "attributes": { + "reportId": "report-abc", + "policyRevisionId": "rev-42", + "policyDigest": "digest-123", + "verdict": "blocked" + }, + "payload": { + "reportId": "report-abc", + "scanId": "report-abc", + "imageDigest": "sha256:feedface", + "verdict": "fail", + "summary": { + "total": 1, + "blocked": 1, + "warned": 0, + "ignored": 0, + "quieted": 0 + }, + "delta": { + "newCritical": 1, + "kev": [ + "CVE-2024-9999" + ] + }, + "policy": { + "digest": "digest-123", + "revisionId": "rev-42" + }, + "findings": [ + { + "id": "finding-1", + "severity": "Critical", + "cve": "CVE-2024-9999", + "purl": "pkg:docker/acme/edge-api@sha256-feedface", + "reachability": "runtime" + } + ], + "links": { + "ui": "https://scanner.example/ui/reports/report-abc", + "report": "https://scanner.example/api/v1/reports/report-abc", + "policy": "https://scanner.example/api/v1/policy/revisions/rev-42", + "attestation": "https://scanner.example/ui/attestations/report-abc" + }, + "dsse": { + "payloadType": "application/vnd.stellaops.report+json", + "payload": "eyJyZXBvcnRJZCI6InJlcG9ydC1hYmMiLCJpbWFnZURpZ2VzdCI6InNoYTI1NjpmZWVkZmFjZSIsImdlbmVyYXRlZEF0IjoiMjAyNS0xMC0xOVQxMjozNDo1NiswMDowMCIsInZlcmRpY3QiOiJibG9ja2VkIiwicG9saWN5Ijp7InJldmlzaW9uSWQiOiJyZXYtNDIiLCJkaWdlc3QiOiJkaWdlc3QtMTIzIn0sInN1bW1hcnkiOnsidG90YWwiOjEsImJsb2NrZWQiOjEsIndhcm5lZCI6MCwiaWdub3JlZCI6MCwicXVpZXRlZCI6MH0sInZlcmRpY3RzIjpbeyJmaW5kaW5nSWQiOiJmaW5kaW5nLTEiLCJzdGF0dXMiOiJCbG9ja2VkIiwic2NvcmUiOjQ3LjUsInNvdXJjZVRydXN0IjoiTlZEIiwicmVhY2hhYmlsaXR5IjoicnVudGltZSJ9XSwiaXNzdWVzIjpbXX0=", + "signatures": [ + { + "keyId": "test-key", + "algorithm": "hs256", + "signature": "signature-value" + } + ] + }, + "report": { + "reportId": "report-abc", + "generatedAt": "2025-10-19T12:34:56Z", + "imageDigest": "sha256:feedface", + "policy": { + "digest": "digest-123", + "revisionId": "rev-42" + }, + "summary": { + "total": 1, + "blocked": 1, + "warned": 0, + "ignored": 0, + "quieted": 0 + }, + "verdict": "blocked", + "verdicts": [ + { + "findingId": "finding-1", + "status": "Blocked", + "score": 47.5, + "sourceTrust": "NVD", + "reachability": "runtime" + } + ], + "issues": [] + } + } +} diff --git a/docs/events/samples/scheduler.graph.job.completed@1.sample.json b/docs/events/samples/scheduler.graph.job.completed@1.sample.json index 6dfd91d0..f4ee3f23 100644 --- a/docs/events/samples/scheduler.graph.job.completed@1.sample.json +++ b/docs/events/samples/scheduler.graph.job.completed@1.sample.json @@ -1,36 +1,36 @@ -{ - "eventId": "4d33c19c-1c8a-44d1-9954-1d5e98b2af71", - "kind": "scheduler.graph.job.completed", - "tenant": "tenant-alpha", - "ts": "2025-10-26T12:00:45Z", - "payload": { - "jobType": "build", - "status": "completed", - "occurredAt": "2025-10-26T12:00:45Z", - "job": { - "schemaVersion": "scheduler.graph-build-job@1", - "id": "gbj_20251026a", - "tenantId": "tenant-alpha", - "sbomId": "sbom_20251026", - "sbomVersionId": "sbom_ver_20251026", - "sbomDigest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", - "graphSnapshotId": "graph_snap_20251026", - "status": "completed", - "trigger": "sbom-version", - "attempts": 1, - "cartographerJobId": "carto_job_42", - "correlationId": "evt_svc_987", - "createdAt": "2025-10-26T12:00:00+00:00", - "startedAt": "2025-10-26T12:00:05+00:00", - "completedAt": "2025-10-26T12:00:45+00:00", - "metadata": { - "sbomEventId": "sbom_evt_20251026" - } - }, - "resultUri": "oras://cartographer/offline/tenant-alpha/graph_snap_20251026" - }, - "attributes": { - "cartographerCluster": "offline-kit", - "plannerShard": "graph-builders-01" - } -} +{ + "eventId": "4d33c19c-1c8a-44d1-9954-1d5e98b2af71", + "kind": "scheduler.graph.job.completed", + "tenant": "tenant-alpha", + "ts": "2025-10-26T12:00:45Z", + "payload": { + "jobType": "build", + "status": "completed", + "occurredAt": "2025-10-26T12:00:45Z", + "job": { + "schemaVersion": "scheduler.graph-build-job@1", + "id": "gbj_20251026a", + "tenantId": "tenant-alpha", + "sbomId": "sbom_20251026", + "sbomVersionId": "sbom_ver_20251026", + "sbomDigest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + "graphSnapshotId": "graph_snap_20251026", + "status": "completed", + "trigger": "sbom-version", + "attempts": 1, + "cartographerJobId": "carto_job_42", + "correlationId": "evt_svc_987", + "createdAt": "2025-10-26T12:00:00+00:00", + "startedAt": "2025-10-26T12:00:05+00:00", + "completedAt": "2025-10-26T12:00:45+00:00", + "metadata": { + "sbomEventId": "sbom_evt_20251026" + } + }, + "resultUri": "oras://cartographer/offline/tenant-alpha/graph_snap_20251026" + }, + "attributes": { + "cartographerCluster": "offline-kit", + "plannerShard": "graph-builders-01" + } +} diff --git a/docs/events/scanner.event.report.ready@1.json b/docs/events/scanner.event.report.ready@1.json index 5d78d68c..611df5d7 100644 --- a/docs/events/scanner.event.report.ready@1.json +++ b/docs/events/scanner.event.report.ready@1.json @@ -1,164 +1,164 @@ -{ - "$id": "https://stella-ops.org/schemas/events/scanner.event.report.ready@1.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Scanner orchestrator event – report ready (v1)", - "type": "object", - "additionalProperties": false, - "required": [ - "eventId", - "kind", - "version", - "tenant", - "occurredAt", - "source", - "idempotencyKey", - "payload" - ], - "properties": { - "eventId": { - "type": "string", - "format": "uuid", - "description": "Globally unique identifier for this occurrence." - }, - "kind": { - "const": "scanner.event.report.ready", - "description": "Event kind identifier consumed by orchestrator subscribers." - }, - "version": { - "const": 1, - "description": "Schema version for orchestrator envelopes." - }, - "tenant": { - "type": "string", - "description": "Tenant that owns the scan/report." - }, - "occurredAt": { - "type": "string", - "format": "date-time", - "description": "Timestamp (UTC) when the report transitioned to ready." - }, - "recordedAt": { - "type": "string", - "format": "date-time", - "description": "Timestamp (UTC) when the event was persisted. Optional." - }, - "source": { - "type": "string", - "description": "Producer identifier, e.g. `scanner.webservice`." - }, - "idempotencyKey": { - "type": "string", - "minLength": 8, - "description": "Deterministic key used to deduplicate events downstream." - }, - "correlationId": { - "type": "string", - "description": "Correlation identifier that ties this event to a request or workflow." - }, - "traceId": { - "type": "string", - "description": "W3C trace ID (32 hex chars) for distributed tracing." - }, - "spanId": { - "type": "string", - "description": "Optional span identifier associated with traceId." - }, - "scope": { - "type": "object", - "additionalProperties": false, - "required": ["repo", "digest"], - "properties": { - "namespace": {"type": "string"}, - "repo": {"type": "string"}, - "digest": {"type": "string"}, - "component": {"type": "string"}, - "image": {"type": "string"} - } - }, - "attributes": { - "type": "object", - "description": "String attributes for downstream correlation (policy revision, scan id, etc.).", - "additionalProperties": {"type": "string"} - }, - "payload": { - "type": "object", - "additionalProperties": true, - "required": ["reportId", "verdict", "summary", "links", "report"], - "properties": { - "reportId": {"type": "string"}, - "scanId": {"type": "string"}, - "imageDigest": {"type": "string"}, - "generatedAt": {"type": "string", "format": "date-time"}, - "verdict": {"enum": ["pass", "warn", "fail"]}, - "summary": { - "type": "object", - "additionalProperties": false, - "required": ["total", "blocked", "warned", "ignored", "quieted"], - "properties": { - "total": {"type": "integer", "minimum": 0}, - "blocked": {"type": "integer", "minimum": 0}, - "warned": {"type": "integer", "minimum": 0}, - "ignored": {"type": "integer", "minimum": 0}, - "quieted": {"type": "integer", "minimum": 0} - } - }, - "delta": { - "type": "object", - "additionalProperties": false, - "properties": { - "newCritical": {"type": "integer", "minimum": 0}, - "newHigh": {"type": "integer", "minimum": 0}, - "kev": { - "type": "array", - "items": {"type": "string"} - } - } - }, - "quietedFindingCount": { - "type": "integer", - "minimum": 0 - }, - "policy": { - "type": "object", - "description": "Policy revision metadata surfaced alongside the report." - }, - "links": { - "type": "object", - "additionalProperties": false, - "properties": { - "ui": {"type": "string", "format": "uri"}, - "report": {"type": "string", "format": "uri"}, - "policy": {"type": "string", "format": "uri"}, - "attestation": {"type": "string", "format": "uri"} - } - }, - "dsse": { - "type": "object", - "additionalProperties": false, - "required": ["payloadType", "payload", "signatures"], - "properties": { - "payloadType": {"type": "string"}, - "payload": {"type": "string"}, - "signatures": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": false, - "required": ["keyId", "algorithm", "signature"], - "properties": { - "keyId": {"type": "string"}, - "algorithm": {"type": "string"}, - "signature": {"type": "string"} - } - } - } - } - }, - "report": { - "type": "object", - "description": "Canonical scanner report document that aligns with the DSSE payload." - } - } - } - } -} +{ + "$id": "https://stella-ops.org/schemas/events/scanner.event.report.ready@1.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Scanner orchestrator event – report ready (v1)", + "type": "object", + "additionalProperties": false, + "required": [ + "eventId", + "kind", + "version", + "tenant", + "occurredAt", + "source", + "idempotencyKey", + "payload" + ], + "properties": { + "eventId": { + "type": "string", + "format": "uuid", + "description": "Globally unique identifier for this occurrence." + }, + "kind": { + "const": "scanner.event.report.ready", + "description": "Event kind identifier consumed by orchestrator subscribers." + }, + "version": { + "const": 1, + "description": "Schema version for orchestrator envelopes." + }, + "tenant": { + "type": "string", + "description": "Tenant that owns the scan/report." + }, + "occurredAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp (UTC) when the report transitioned to ready." + }, + "recordedAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp (UTC) when the event was persisted. Optional." + }, + "source": { + "type": "string", + "description": "Producer identifier, e.g. `scanner.webservice`." + }, + "idempotencyKey": { + "type": "string", + "minLength": 8, + "description": "Deterministic key used to deduplicate events downstream." + }, + "correlationId": { + "type": "string", + "description": "Correlation identifier that ties this event to a request or workflow." + }, + "traceId": { + "type": "string", + "description": "W3C trace ID (32 hex chars) for distributed tracing." + }, + "spanId": { + "type": "string", + "description": "Optional span identifier associated with traceId." + }, + "scope": { + "type": "object", + "additionalProperties": false, + "required": ["repo", "digest"], + "properties": { + "namespace": {"type": "string"}, + "repo": {"type": "string"}, + "digest": {"type": "string"}, + "component": {"type": "string"}, + "image": {"type": "string"} + } + }, + "attributes": { + "type": "object", + "description": "String attributes for downstream correlation (policy revision, scan id, etc.).", + "additionalProperties": {"type": "string"} + }, + "payload": { + "type": "object", + "additionalProperties": true, + "required": ["reportId", "verdict", "summary", "links", "report"], + "properties": { + "reportId": {"type": "string"}, + "scanId": {"type": "string"}, + "imageDigest": {"type": "string"}, + "generatedAt": {"type": "string", "format": "date-time"}, + "verdict": {"enum": ["pass", "warn", "fail"]}, + "summary": { + "type": "object", + "additionalProperties": false, + "required": ["total", "blocked", "warned", "ignored", "quieted"], + "properties": { + "total": {"type": "integer", "minimum": 0}, + "blocked": {"type": "integer", "minimum": 0}, + "warned": {"type": "integer", "minimum": 0}, + "ignored": {"type": "integer", "minimum": 0}, + "quieted": {"type": "integer", "minimum": 0} + } + }, + "delta": { + "type": "object", + "additionalProperties": false, + "properties": { + "newCritical": {"type": "integer", "minimum": 0}, + "newHigh": {"type": "integer", "minimum": 0}, + "kev": { + "type": "array", + "items": {"type": "string"} + } + } + }, + "quietedFindingCount": { + "type": "integer", + "minimum": 0 + }, + "policy": { + "type": "object", + "description": "Policy revision metadata surfaced alongside the report." + }, + "links": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui": {"type": "string", "format": "uri"}, + "report": {"type": "string", "format": "uri"}, + "policy": {"type": "string", "format": "uri"}, + "attestation": {"type": "string", "format": "uri"} + } + }, + "dsse": { + "type": "object", + "additionalProperties": false, + "required": ["payloadType", "payload", "signatures"], + "properties": { + "payloadType": {"type": "string"}, + "payload": {"type": "string"}, + "signatures": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "required": ["keyId", "algorithm", "signature"], + "properties": { + "keyId": {"type": "string"}, + "algorithm": {"type": "string"}, + "signature": {"type": "string"} + } + } + } + } + }, + "report": { + "type": "object", + "description": "Canonical scanner report document that aligns with the DSSE payload." + } + } + } + } +} diff --git a/docs/events/scanner.event.scan.completed@1.json b/docs/events/scanner.event.scan.completed@1.json index d89ffcfc..2cda8a0d 100644 --- a/docs/events/scanner.event.scan.completed@1.json +++ b/docs/events/scanner.event.scan.completed@1.json @@ -1,174 +1,174 @@ -{ - "$id": "https://stella-ops.org/schemas/events/scanner.event.scan.completed@1.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Scanner orchestrator event – scan completed (v1)", - "type": "object", - "additionalProperties": false, - "required": [ - "eventId", - "kind", - "version", - "tenant", - "occurredAt", - "source", - "idempotencyKey", - "payload" - ], - "properties": { - "eventId": { - "type": "string", - "format": "uuid", - "description": "Globally unique identifier for this occurrence." - }, - "kind": { - "const": "scanner.event.scan.completed", - "description": "Event kind identifier consumed by orchestrator subscribers." - }, - "version": { - "const": 1, - "description": "Schema version for orchestrator envelopes." - }, - "tenant": { - "type": "string", - "description": "Tenant that owns the scan." - }, - "occurredAt": { - "type": "string", - "format": "date-time", - "description": "Timestamp (UTC) when the scan completed." - }, - "recordedAt": { - "type": "string", - "format": "date-time", - "description": "Timestamp (UTC) when the event was persisted. Optional." - }, - "source": { - "type": "string", - "description": "Producer identifier, e.g. `scanner.webservice`." - }, - "idempotencyKey": { - "type": "string", - "minLength": 8, - "description": "Deterministic key used to deduplicate events downstream." - }, - "correlationId": { - "type": "string", - "description": "Correlation identifier tying this event to a request or workflow." - }, - "traceId": { - "type": "string", - "description": "W3C trace ID (32 hex chars) for distributed tracing." - }, - "spanId": { - "type": "string", - "description": "Optional span identifier associated with traceId." - }, - "scope": { - "type": "object", - "additionalProperties": false, - "required": ["repo", "digest"], - "properties": { - "namespace": {"type": "string"}, - "repo": {"type": "string"}, - "digest": {"type": "string"}, - "component": {"type": "string"}, - "image": {"type": "string"} - } - }, - "attributes": { - "type": "object", - "description": "String attributes for downstream correlation (policy revision, scan id, etc.).", - "additionalProperties": {"type": "string"} - }, - "payload": { - "type": "object", - "additionalProperties": true, - "required": ["reportId", "scanId", "imageDigest", "verdict", "summary", "report"], - "properties": { - "reportId": {"type": "string"}, - "scanId": {"type": "string"}, - "imageDigest": {"type": "string"}, - "verdict": {"enum": ["pass", "warn", "fail"]}, - "summary": { - "type": "object", - "additionalProperties": false, - "required": ["total", "blocked", "warned", "ignored", "quieted"], - "properties": { - "total": {"type": "integer", "minimum": 0}, - "blocked": {"type": "integer", "minimum": 0}, - "warned": {"type": "integer", "minimum": 0}, - "ignored": {"type": "integer", "minimum": 0}, - "quieted": {"type": "integer", "minimum": 0} - } - }, - "delta": { - "type": "object", - "additionalProperties": false, - "properties": { - "newCritical": {"type": "integer", "minimum": 0}, - "newHigh": {"type": "integer", "minimum": 0}, - "kev": { - "type": "array", - "items": {"type": "string"} - } - } - }, - "policy": { - "type": "object", - "description": "Policy revision metadata surfaced alongside the report." - }, - "findings": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": false, - "required": ["id"], - "properties": { - "id": {"type": "string"}, - "severity": {"type": "string"}, - "cve": {"type": "string"}, - "purl": {"type": "string"}, - "reachability": {"type": "string"} - } - } - }, - "links": { - "type": "object", - "additionalProperties": false, - "properties": { - "ui": {"type": "string", "format": "uri"}, - "report": {"type": "string", "format": "uri"}, - "policy": {"type": "string", "format": "uri"}, - "attestation": {"type": "string", "format": "uri"} - } - }, - "dsse": { - "type": "object", - "additionalProperties": false, - "required": ["payloadType", "payload", "signatures"], - "properties": { - "payloadType": {"type": "string"}, - "payload": {"type": "string"}, - "signatures": { - "type": "array", - "items": { - "type": "object", - "additionalProperties": false, - "required": ["keyId", "algorithm", "signature"], - "properties": { - "keyId": {"type": "string"}, - "algorithm": {"type": "string"}, - "signature": {"type": "string"} - } - } - } - } - }, - "report": { - "type": "object", - "description": "Canonical scanner report document that aligns with the DSSE payload." - } - } - } - } -} +{ + "$id": "https://stella-ops.org/schemas/events/scanner.event.scan.completed@1.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Scanner orchestrator event – scan completed (v1)", + "type": "object", + "additionalProperties": false, + "required": [ + "eventId", + "kind", + "version", + "tenant", + "occurredAt", + "source", + "idempotencyKey", + "payload" + ], + "properties": { + "eventId": { + "type": "string", + "format": "uuid", + "description": "Globally unique identifier for this occurrence." + }, + "kind": { + "const": "scanner.event.scan.completed", + "description": "Event kind identifier consumed by orchestrator subscribers." + }, + "version": { + "const": 1, + "description": "Schema version for orchestrator envelopes." + }, + "tenant": { + "type": "string", + "description": "Tenant that owns the scan." + }, + "occurredAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp (UTC) when the scan completed." + }, + "recordedAt": { + "type": "string", + "format": "date-time", + "description": "Timestamp (UTC) when the event was persisted. Optional." + }, + "source": { + "type": "string", + "description": "Producer identifier, e.g. `scanner.webservice`." + }, + "idempotencyKey": { + "type": "string", + "minLength": 8, + "description": "Deterministic key used to deduplicate events downstream." + }, + "correlationId": { + "type": "string", + "description": "Correlation identifier tying this event to a request or workflow." + }, + "traceId": { + "type": "string", + "description": "W3C trace ID (32 hex chars) for distributed tracing." + }, + "spanId": { + "type": "string", + "description": "Optional span identifier associated with traceId." + }, + "scope": { + "type": "object", + "additionalProperties": false, + "required": ["repo", "digest"], + "properties": { + "namespace": {"type": "string"}, + "repo": {"type": "string"}, + "digest": {"type": "string"}, + "component": {"type": "string"}, + "image": {"type": "string"} + } + }, + "attributes": { + "type": "object", + "description": "String attributes for downstream correlation (policy revision, scan id, etc.).", + "additionalProperties": {"type": "string"} + }, + "payload": { + "type": "object", + "additionalProperties": true, + "required": ["reportId", "scanId", "imageDigest", "verdict", "summary", "report"], + "properties": { + "reportId": {"type": "string"}, + "scanId": {"type": "string"}, + "imageDigest": {"type": "string"}, + "verdict": {"enum": ["pass", "warn", "fail"]}, + "summary": { + "type": "object", + "additionalProperties": false, + "required": ["total", "blocked", "warned", "ignored", "quieted"], + "properties": { + "total": {"type": "integer", "minimum": 0}, + "blocked": {"type": "integer", "minimum": 0}, + "warned": {"type": "integer", "minimum": 0}, + "ignored": {"type": "integer", "minimum": 0}, + "quieted": {"type": "integer", "minimum": 0} + } + }, + "delta": { + "type": "object", + "additionalProperties": false, + "properties": { + "newCritical": {"type": "integer", "minimum": 0}, + "newHigh": {"type": "integer", "minimum": 0}, + "kev": { + "type": "array", + "items": {"type": "string"} + } + } + }, + "policy": { + "type": "object", + "description": "Policy revision metadata surfaced alongside the report." + }, + "findings": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "required": ["id"], + "properties": { + "id": {"type": "string"}, + "severity": {"type": "string"}, + "cve": {"type": "string"}, + "purl": {"type": "string"}, + "reachability": {"type": "string"} + } + } + }, + "links": { + "type": "object", + "additionalProperties": false, + "properties": { + "ui": {"type": "string", "format": "uri"}, + "report": {"type": "string", "format": "uri"}, + "policy": {"type": "string", "format": "uri"}, + "attestation": {"type": "string", "format": "uri"} + } + }, + "dsse": { + "type": "object", + "additionalProperties": false, + "required": ["payloadType", "payload", "signatures"], + "properties": { + "payloadType": {"type": "string"}, + "payload": {"type": "string"}, + "signatures": { + "type": "array", + "items": { + "type": "object", + "additionalProperties": false, + "required": ["keyId", "algorithm", "signature"], + "properties": { + "keyId": {"type": "string"}, + "algorithm": {"type": "string"}, + "signature": {"type": "string"} + } + } + } + } + }, + "report": { + "type": "object", + "description": "Canonical scanner report document that aligns with the DSSE payload." + } + } + } + } +} diff --git a/docs/events/scheduler.graph.job.completed@1.json b/docs/events/scheduler.graph.job.completed@1.json index 03b519c9..069c2272 100644 --- a/docs/events/scheduler.graph.job.completed@1.json +++ b/docs/events/scheduler.graph.job.completed@1.json @@ -1,196 +1,196 @@ -{ - "$id": "https://stella-ops.org/schemas/events/scheduler.graph.job.completed@1.json", - "$schema": "http://json-schema.org/draft-07/schema#", - "title": "Scheduler Graph Job Completed Event", - "description": "Legacy scheduler event emitted when a graph build or overlay job reaches a terminal state. Consumers validate downstream caches and surface overlay freshness.", - "type": "object", - "additionalProperties": false, - "required": ["eventId", "kind", "tenant", "ts", "payload"], - "properties": { - "eventId": { - "type": "string", - "format": "uuid", - "description": "Globally unique identifier per event." - }, - "kind": { - "const": "scheduler.graph.job.completed" - }, - "tenant": { - "type": "string", - "description": "Tenant identifier scoped to the originating job." - }, - "ts": { - "type": "string", - "format": "date-time", - "description": "UTC timestamp when the job reached a terminal state." - }, - "payload": { - "type": "object", - "additionalProperties": false, - "required": ["jobType", "job", "status", "occurredAt"], - "properties": { - "jobType": { - "type": "string", - "enum": ["build", "overlay"], - "description": "Job flavour, matches the CLR type of the serialized job payload." - }, - "status": { - "type": "string", - "enum": ["completed", "failed", "cancelled"], - "description": "Terminal status recorded for the job." - }, - "occurredAt": { - "type": "string", - "format": "date-time", - "description": "UTC timestamp of the terminal transition, mirrors job.CompletedAt." - }, - "job": { - "oneOf": [ - {"$ref": "#/definitions/graphBuildJob"}, - {"$ref": "#/definitions/graphOverlayJob"} - ], - "description": "Canonical serialized representation of the finished job." - }, - "resultUri": { - "type": "string", - "description": "Optional URI pointing to Cartographer snapshot or overlay bundle (if available)." - } - } - }, - "attributes": { - "type": "object", - "description": "Optional correlation bag for downstream consumers.", - "additionalProperties": { - "type": "string" - } - } - }, - "definitions": { - "graphBuildJob": { - "type": "object", - "additionalProperties": false, - "required": [ - "schemaVersion", - "id", - "tenantId", - "sbomId", - "sbomVersionId", - "sbomDigest", - "status", - "trigger", - "attempts", - "createdAt" - ], - "properties": { - "schemaVersion": { - "const": "scheduler.graph-build-job@1" - }, - "id": {"type": "string"}, - "tenantId": {"type": "string"}, - "sbomId": {"type": "string"}, - "sbomVersionId": {"type": "string"}, - "sbomDigest": { - "type": "string", - "pattern": "^sha256:[a-f0-9]{64}$" - }, - "graphSnapshotId": {"type": "string"}, - "status": { - "type": "string", - "enum": ["pending", "queued", "running", "completed", "failed", "cancelled"] - }, - "trigger": { - "type": "string", - "enum": ["sbom-version", "backfill", "manual"] - }, - "attempts": { - "type": "integer", - "minimum": 0 - }, - "cartographerJobId": {"type": "string"}, - "correlationId": {"type": "string"}, - "createdAt": { - "type": "string", - "format": "date-time" - }, - "startedAt": { - "type": "string", - "format": "date-time" - }, - "completedAt": { - "type": "string", - "format": "date-time" - }, - "error": {"type": "string"}, - "metadata": { - "type": "object", - "additionalProperties": {"type": "string"} - } - } - }, - "graphOverlayJob": { - "type": "object", - "additionalProperties": false, - "required": [ - "schemaVersion", - "id", - "tenantId", - "graphSnapshotId", - "overlayKind", - "overlayKey", - "status", - "trigger", - "attempts", - "createdAt" - ], - "properties": { - "schemaVersion": { - "const": "scheduler.graph-overlay-job@1" - }, - "id": {"type": "string"}, - "tenantId": {"type": "string"}, - "graphSnapshotId": {"type": "string"}, - "buildJobId": {"type": "string"}, - "overlayKind": { - "type": "string", - "enum": ["policy", "advisory", "vex"] - }, - "overlayKey": {"type": "string"}, - "subjects": { - "type": "array", - "items": {"type": "string"}, - "uniqueItems": true - }, - "status": { - "type": "string", - "enum": ["pending", "queued", "running", "completed", "failed", "cancelled"] - }, - "trigger": { - "type": "string", - "enum": ["policy", "advisory", "vex", "sbom-version", "manual"] - }, - "attempts": { - "type": "integer", - "minimum": 0 - }, - "correlationId": {"type": "string"}, - "createdAt": { - "type": "string", - "format": "date-time" - }, - "startedAt": { - "type": "string", - "format": "date-time" - }, - "completedAt": { - "type": "string", - "format": "date-time" - }, - "error": {"type": "string"}, - "metadata": { - "type": "object", - "additionalProperties": {"type": "string"} - } - } - } - } -} +{ + "$id": "https://stella-ops.org/schemas/events/scheduler.graph.job.completed@1.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "title": "Scheduler Graph Job Completed Event", + "description": "Legacy scheduler event emitted when a graph build or overlay job reaches a terminal state. Consumers validate downstream caches and surface overlay freshness.", + "type": "object", + "additionalProperties": false, + "required": ["eventId", "kind", "tenant", "ts", "payload"], + "properties": { + "eventId": { + "type": "string", + "format": "uuid", + "description": "Globally unique identifier per event." + }, + "kind": { + "const": "scheduler.graph.job.completed" + }, + "tenant": { + "type": "string", + "description": "Tenant identifier scoped to the originating job." + }, + "ts": { + "type": "string", + "format": "date-time", + "description": "UTC timestamp when the job reached a terminal state." + }, + "payload": { + "type": "object", + "additionalProperties": false, + "required": ["jobType", "job", "status", "occurredAt"], + "properties": { + "jobType": { + "type": "string", + "enum": ["build", "overlay"], + "description": "Job flavour, matches the CLR type of the serialized job payload." + }, + "status": { + "type": "string", + "enum": ["completed", "failed", "cancelled"], + "description": "Terminal status recorded for the job." + }, + "occurredAt": { + "type": "string", + "format": "date-time", + "description": "UTC timestamp of the terminal transition, mirrors job.CompletedAt." + }, + "job": { + "oneOf": [ + {"$ref": "#/definitions/graphBuildJob"}, + {"$ref": "#/definitions/graphOverlayJob"} + ], + "description": "Canonical serialized representation of the finished job." + }, + "resultUri": { + "type": "string", + "description": "Optional URI pointing to Cartographer snapshot or overlay bundle (if available)." + } + } + }, + "attributes": { + "type": "object", + "description": "Optional correlation bag for downstream consumers.", + "additionalProperties": { + "type": "string" + } + } + }, + "definitions": { + "graphBuildJob": { + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "id", + "tenantId", + "sbomId", + "sbomVersionId", + "sbomDigest", + "status", + "trigger", + "attempts", + "createdAt" + ], + "properties": { + "schemaVersion": { + "const": "scheduler.graph-build-job@1" + }, + "id": {"type": "string"}, + "tenantId": {"type": "string"}, + "sbomId": {"type": "string"}, + "sbomVersionId": {"type": "string"}, + "sbomDigest": { + "type": "string", + "pattern": "^sha256:[a-f0-9]{64}$" + }, + "graphSnapshotId": {"type": "string"}, + "status": { + "type": "string", + "enum": ["pending", "queued", "running", "completed", "failed", "cancelled"] + }, + "trigger": { + "type": "string", + "enum": ["sbom-version", "backfill", "manual"] + }, + "attempts": { + "type": "integer", + "minimum": 0 + }, + "cartographerJobId": {"type": "string"}, + "correlationId": {"type": "string"}, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "startedAt": { + "type": "string", + "format": "date-time" + }, + "completedAt": { + "type": "string", + "format": "date-time" + }, + "error": {"type": "string"}, + "metadata": { + "type": "object", + "additionalProperties": {"type": "string"} + } + } + }, + "graphOverlayJob": { + "type": "object", + "additionalProperties": false, + "required": [ + "schemaVersion", + "id", + "tenantId", + "graphSnapshotId", + "overlayKind", + "overlayKey", + "status", + "trigger", + "attempts", + "createdAt" + ], + "properties": { + "schemaVersion": { + "const": "scheduler.graph-overlay-job@1" + }, + "id": {"type": "string"}, + "tenantId": {"type": "string"}, + "graphSnapshotId": {"type": "string"}, + "buildJobId": {"type": "string"}, + "overlayKind": { + "type": "string", + "enum": ["policy", "advisory", "vex"] + }, + "overlayKey": {"type": "string"}, + "subjects": { + "type": "array", + "items": {"type": "string"}, + "uniqueItems": true + }, + "status": { + "type": "string", + "enum": ["pending", "queued", "running", "completed", "failed", "cancelled"] + }, + "trigger": { + "type": "string", + "enum": ["policy", "advisory", "vex", "sbom-version", "manual"] + }, + "attempts": { + "type": "integer", + "minimum": 0 + }, + "correlationId": {"type": "string"}, + "createdAt": { + "type": "string", + "format": "date-time" + }, + "startedAt": { + "type": "string", + "format": "date-time" + }, + "completedAt": { + "type": "string", + "format": "date-time" + }, + "error": {"type": "string"}, + "metadata": { + "type": "object", + "additionalProperties": {"type": "string"} + } + } + } + } +} diff --git a/docs/examples/policies/README.md b/docs/examples/policies/README.md index c46ede0e..5fc9cc15 100644 --- a/docs/examples/policies/README.md +++ b/docs/examples/policies/README.md @@ -1,16 +1,16 @@ -# Policy Examples - -Sample `stella-dsl@1` policies illustrating common deployment personas. Each example includes commentary, CLI usage hints, and a compliance checklist. - -| Example | Description | -|---------|-------------| -| [Baseline](baseline.md) | Balanced production defaults (block critical, respect strong VEX). | -| [Serverless](serverless.md) | Aggressive blocking for serverless workloads (no High+, pinned base images). | -| [Internal Only](internal-only.md) | Lenient policy for internal/dev environments with KEV safeguards. | - -Policy source files (`*.stella`) live alongside the documentation so you can copy/paste or use `stella policy new --from file://...`. - ---- - -*Last updated: 2025-10-26.* - +# Policy Examples + +Sample `stella-dsl@1` policies illustrating common deployment personas. Each example includes commentary, CLI usage hints, and a compliance checklist. + +| Example | Description | +|---------|-------------| +| [Baseline](baseline.md) | Balanced production defaults (block critical, respect strong VEX). | +| [Serverless](serverless.md) | Aggressive blocking for serverless workloads (no High+, pinned base images). | +| [Internal Only](internal-only.md) | Lenient policy for internal/dev environments with KEV safeguards. | + +Policy source files (`*.stella`) live alongside the documentation so you can copy/paste or use `stella policy new --from file://...`. + +--- + +*Last updated: 2025-10-26.* + diff --git a/docs/examples/policies/baseline.md b/docs/examples/policies/baseline.md index 76c0eb0d..0c0106a3 100644 --- a/docs/examples/policies/baseline.md +++ b/docs/examples/policies/baseline.md @@ -1,79 +1,79 @@ -# Baseline Policy Example (`baseline.stella`) - -This sample policy provides a balanced default for production workloads: block critical findings, require strong VEX justifications to suppress advisories, and warn on deprecated runtimes. Use it as a starting point for tenants that want guardrails without excessive noise. - -```dsl -policy "Baseline Production Policy" syntax "stella-dsl@1" { - metadata { - description = "Block critical, escalate high, enforce VEX justifications." - tags = ["baseline","production"] - } - - profile severity { - map vendor_weight { - source "GHSA" => +0.5 - source "OSV" => +0.0 - source "VendorX" => -0.2 - } - env exposure_adjustments { - if env.exposure == "internet" then +0.5 - if env.runtime == "legacy" then +0.3 - } - } - - rule block_critical priority 5 { - when severity.normalized >= "Critical" - then status := "blocked" - because "Critical severity must be remediated before deploy." - } - - rule escalate_high_internet { - when severity.normalized == "High" - and env.exposure == "internet" - then escalate to severity_band("Critical") - because "High severity on internet-exposed asset escalates to critical." - } - - rule require_vex_justification { - when vex.any(status in ["not_affected","fixed"]) - and vex.justification in ["component_not_present","vulnerable_code_not_present"] - then status := vex.status - annotate winning_statement := vex.latest().statementId - because "Respect strong vendor VEX claims." - } - - rule alert_warn_eol_runtime priority 1 { - when severity.normalized <= "Medium" - and sbom.has_tag("runtime:eol") - then warn message "Runtime marked as EOL; upgrade recommended." - because "Deprecated runtime should be upgraded." - } -} -``` - -## Commentary - -- **Severity profile** tightens vendor weights and applies exposure modifiers so internet-facing/high severity pairs escalate automatically. -- **VEX rule** only honours strong justifications, preventing weaker claims from hiding issues. -- **Warnings first** – The `alert_warn_eol_runtime` rule name ensures it sorts before the require-VEX rule, keeping alerts visible without flipping to `RequiresVex`. -- Works well as shared `tenant-global` baseline; use tenant overrides for stricter tolerant environments. - -## Try it out - -```bash -stella policy new --policy-id P-baseline --template blank --open -stella policy lint examples/policies/baseline.stella -stella policy simulate P-baseline --candidate 1 --sbom sbom:sample-prod -``` - -## Compliance checklist - -- [ ] Policy compiled via `stella policy lint` without diagnostics. -- [ ] Simulation diff reviewed against golden SBOM set. -- [ ] Approval note documents rationale before promoting to production. -- [ ] EOL runtime tags kept up to date in SBOM metadata. -- [ ] VEX vendor allow-list reviewed quarterly. - ---- - -*Last updated: 2025-10-26.* +# Baseline Policy Example (`baseline.stella`) + +This sample policy provides a balanced default for production workloads: block critical findings, require strong VEX justifications to suppress advisories, and warn on deprecated runtimes. Use it as a starting point for tenants that want guardrails without excessive noise. + +```dsl +policy "Baseline Production Policy" syntax "stella-dsl@1" { + metadata { + description = "Block critical, escalate high, enforce VEX justifications." + tags = ["baseline","production"] + } + + profile severity { + map vendor_weight { + source "GHSA" => +0.5 + source "OSV" => +0.0 + source "VendorX" => -0.2 + } + env exposure_adjustments { + if env.exposure == "internet" then +0.5 + if env.runtime == "legacy" then +0.3 + } + } + + rule block_critical priority 5 { + when severity.normalized >= "Critical" + then status := "blocked" + because "Critical severity must be remediated before deploy." + } + + rule escalate_high_internet { + when severity.normalized == "High" + and env.exposure == "internet" + then escalate to severity_band("Critical") + because "High severity on internet-exposed asset escalates to critical." + } + + rule require_vex_justification { + when vex.any(status in ["not_affected","fixed"]) + and vex.justification in ["component_not_present","vulnerable_code_not_present"] + then status := vex.status + annotate winning_statement := vex.latest().statementId + because "Respect strong vendor VEX claims." + } + + rule alert_warn_eol_runtime priority 1 { + when severity.normalized <= "Medium" + and sbom.has_tag("runtime:eol") + then warn message "Runtime marked as EOL; upgrade recommended." + because "Deprecated runtime should be upgraded." + } +} +``` + +## Commentary + +- **Severity profile** tightens vendor weights and applies exposure modifiers so internet-facing/high severity pairs escalate automatically. +- **VEX rule** only honours strong justifications, preventing weaker claims from hiding issues. +- **Warnings first** – The `alert_warn_eol_runtime` rule name ensures it sorts before the require-VEX rule, keeping alerts visible without flipping to `RequiresVex`. +- Works well as shared `tenant-global` baseline; use tenant overrides for stricter tolerant environments. + +## Try it out + +```bash +stella policy new --policy-id P-baseline --template blank --open +stella policy lint examples/policies/baseline.stella +stella policy simulate P-baseline --candidate 1 --sbom sbom:sample-prod +``` + +## Compliance checklist + +- [ ] Policy compiled via `stella policy lint` without diagnostics. +- [ ] Simulation diff reviewed against golden SBOM set. +- [ ] Approval note documents rationale before promoting to production. +- [ ] EOL runtime tags kept up to date in SBOM metadata. +- [ ] VEX vendor allow-list reviewed quarterly. + +--- + +*Last updated: 2025-10-26.* diff --git a/docs/examples/policies/baseline.stella b/docs/examples/policies/baseline.stella index 747cab23..c1a04e21 100644 --- a/docs/examples/policies/baseline.stella +++ b/docs/examples/policies/baseline.stella @@ -1,46 +1,46 @@ -policy "Baseline Production Policy" syntax "stella-dsl@1" { - metadata { - description = "Block critical, escalate high, enforce VEX justifications." - tags = ["baseline","production"] - } - - profile severity { - map vendor_weight { - source "GHSA" => +0.5 - source "OSV" => +0.0 - source "VendorX" => -0.2 - } - env exposure_adjustments { - if env.exposure == "internet" then +0.5 - if env.runtime == "legacy" then +0.3 - } - } - - rule block_critical priority 5 { - when severity.normalized >= "Critical" - then status := "blocked" - because "Critical severity must be remediated before deploy." - } - - rule escalate_high_internet { - when severity.normalized == "High" - and env.exposure == "internet" - then escalate to severity_band("Critical") - because "High severity on internet-exposed asset escalates to critical." - } - - rule require_vex_justification { - when vex.any(status in ["not_affected","fixed"]) - and vex.justification in ["component_not_present","vulnerable_code_not_present"] - then status := vex.status - annotate winning_statement := vex.latest().statementId - because "Respect strong vendor VEX claims." - } - - rule alert_warn_eol_runtime priority 1 { - when severity.normalized <= "Medium" - and sbom.has_tag("runtime:eol") - then warn message "Runtime marked as EOL; upgrade recommended." - because "Deprecated runtime should be upgraded." - } -} +policy "Baseline Production Policy" syntax "stella-dsl@1" { + metadata { + description = "Block critical, escalate high, enforce VEX justifications." + tags = ["baseline","production"] + } + + profile severity { + map vendor_weight { + source "GHSA" => +0.5 + source "OSV" => +0.0 + source "VendorX" => -0.2 + } + env exposure_adjustments { + if env.exposure == "internet" then +0.5 + if env.runtime == "legacy" then +0.3 + } + } + + rule block_critical priority 5 { + when severity.normalized >= "Critical" + then status := "blocked" + because "Critical severity must be remediated before deploy." + } + + rule escalate_high_internet { + when severity.normalized == "High" + and env.exposure == "internet" + then escalate to severity_band("Critical") + because "High severity on internet-exposed asset escalates to critical." + } + + rule require_vex_justification { + when vex.any(status in ["not_affected","fixed"]) + and vex.justification in ["component_not_present","vulnerable_code_not_present"] + then status := vex.status + annotate winning_statement := vex.latest().statementId + because "Respect strong vendor VEX claims." + } + + rule alert_warn_eol_runtime priority 1 { + when severity.normalized <= "Medium" + and sbom.has_tag("runtime:eol") + then warn message "Runtime marked as EOL; upgrade recommended." + because "Deprecated runtime should be upgraded." + } +} diff --git a/docs/examples/policies/baseline.yaml b/docs/examples/policies/baseline.yaml index 55940ee6..7edf4ccd 100644 --- a/docs/examples/policies/baseline.yaml +++ b/docs/examples/policies/baseline.yaml @@ -1,34 +1,34 @@ -version: "1.0" -metadata: - description: Baseline production policy - tags: - - baseline - - production -rules: - - name: Block Critical - severity: [Critical] - action: block - - - name: Escalate High Internet - severity: [High] - environments: [internet] - action: - type: escalate - escalate: - minimumSeverity: Critical - - - name: Require VEX justification - sources: [NVD, GHSA] - action: - type: requireVex - requireVex: - vendors: [VendorX, VendorY] - justifications: - - component_not_present - - vulnerable_code_not_present - - - name: Alert warn EOL runtime - priority: 1 - severity: [Low, Medium] - tags: [runtime:eol] - action: warn +version: "1.0" +metadata: + description: Baseline production policy + tags: + - baseline + - production +rules: + - name: Block Critical + severity: [Critical] + action: block + + - name: Escalate High Internet + severity: [High] + environments: [internet] + action: + type: escalate + escalate: + minimumSeverity: Critical + + - name: Require VEX justification + sources: [NVD, GHSA] + action: + type: requireVex + requireVex: + vendors: [VendorX, VendorY] + justifications: + - component_not_present + - vulnerable_code_not_present + + - name: Alert warn EOL runtime + priority: 1 + severity: [Low, Medium] + tags: [runtime:eol] + action: warn diff --git a/docs/examples/policies/internal-only.md b/docs/examples/policies/internal-only.md index 60803cc1..6979cbb4 100644 --- a/docs/examples/policies/internal-only.md +++ b/docs/examples/policies/internal-only.md @@ -1,72 +1,72 @@ -# Internal-Only Policy Example (`internal-only.stella`) - -A relaxed profile for internal services and development environments: allow Medium severities with warnings, rely on VEX more heavily, but still block KEV/actively exploited advisories. - -```dsl -policy "Internal Only Policy" syntax "stella-dsl@1" { - metadata { - description = "Lenient policy for internal / dev tenants." - tags = ["internal","dev"] - } - - profile severity { - env exposure_adjustments { - if env.exposure == "internal" then -0.4 - if env.stage == "dev" then -0.6 - } - } - - rule block_kev priority 1 { - when advisory.has_tag("kev") - then status := "blocked" - because "Known exploited vulnerabilities must be remediated." - } - - rule allow_medium_with_warning { - when severity.normalized == "Medium" - and env.exposure == "internal" - then warn message "Medium severity permitted in internal environments." - because "Allow Medium findings with warning for internal workloads." - } - - rule accept_vendor_vex { - when vex.any(status in ["not_affected","fixed"]) - then status := vex.status - annotate justification := vex.latest().justification - because "Trust vendor VEX statements for internal scope." - } - - rule quiet_low_priority { - when severity.normalized <= "Low" - then ignore until "2026-01-01T00:00:00Z" - because "Quiet low severity until next annual remediation sweep." - } -} -``` - -## Commentary - -- Suitable for staging/dev tenants with lower blast radius. -- KEV advisories override lenient behaviour to maintain minimum security bar. -- Warnings ensure Medium findings stay visible in dashboards and CLI outputs. -- Quiet rule enforces planned clean-up date; update before expiry. - -## Try it out - -```bash -stella policy lint examples/policies/internal-only.stella -stella policy simulate P-internal --candidate 1 \ - --sbom sbom:internal-service --env exposure=internal --env stage=dev -``` - -## Compliance checklist - -- [ ] Tenant classified as internal-only with documented risk acceptance. -- [ ] KEV feed synced (Concelier) and tags confirmed before relying on rule. -- [ ] Quiet expiry tracked; remediation backlog updated prior to deadline. -- [ ] Developers informed that warnings still affect quality score. -- [ ] Policy not used for production or internet-exposed services. - ---- - -*Last updated: 2025-10-26.* +# Internal-Only Policy Example (`internal-only.stella`) + +A relaxed profile for internal services and development environments: allow Medium severities with warnings, rely on VEX more heavily, but still block KEV/actively exploited advisories. + +```dsl +policy "Internal Only Policy" syntax "stella-dsl@1" { + metadata { + description = "Lenient policy for internal / dev tenants." + tags = ["internal","dev"] + } + + profile severity { + env exposure_adjustments { + if env.exposure == "internal" then -0.4 + if env.stage == "dev" then -0.6 + } + } + + rule block_kev priority 1 { + when advisory.has_tag("kev") + then status := "blocked" + because "Known exploited vulnerabilities must be remediated." + } + + rule allow_medium_with_warning { + when severity.normalized == "Medium" + and env.exposure == "internal" + then warn message "Medium severity permitted in internal environments." + because "Allow Medium findings with warning for internal workloads." + } + + rule accept_vendor_vex { + when vex.any(status in ["not_affected","fixed"]) + then status := vex.status + annotate justification := vex.latest().justification + because "Trust vendor VEX statements for internal scope." + } + + rule quiet_low_priority { + when severity.normalized <= "Low" + then ignore until "2026-01-01T00:00:00Z" + because "Quiet low severity until next annual remediation sweep." + } +} +``` + +## Commentary + +- Suitable for staging/dev tenants with lower blast radius. +- KEV advisories override lenient behaviour to maintain minimum security bar. +- Warnings ensure Medium findings stay visible in dashboards and CLI outputs. +- Quiet rule enforces planned clean-up date; update before expiry. + +## Try it out + +```bash +stella policy lint examples/policies/internal-only.stella +stella policy simulate P-internal --candidate 1 \ + --sbom sbom:internal-service --env exposure=internal --env stage=dev +``` + +## Compliance checklist + +- [ ] Tenant classified as internal-only with documented risk acceptance. +- [ ] KEV feed synced (Concelier) and tags confirmed before relying on rule. +- [ ] Quiet expiry tracked; remediation backlog updated prior to deadline. +- [ ] Developers informed that warnings still affect quality score. +- [ ] Policy not used for production or internet-exposed services. + +--- + +*Last updated: 2025-10-26.* diff --git a/docs/examples/policies/internal-only.stella b/docs/examples/policies/internal-only.stella index 9457c336..25900af6 100644 --- a/docs/examples/policies/internal-only.stella +++ b/docs/examples/policies/internal-only.stella @@ -1,39 +1,39 @@ -policy "Internal Only Policy" syntax "stella-dsl@1" { - metadata { - description = "Lenient policy for internal / dev tenants." - tags = ["internal","dev"] - } - - profile severity { - env exposure_adjustments { - if env.exposure == "internal" then -0.4 - if env.stage == "dev" then -0.6 - } - } - - rule block_kev priority 1 { - when advisory.has_tag("kev") - then status := "blocked" - because "Known exploited vulnerabilities must be remediated." - } - - rule allow_medium_with_warning { - when severity.normalized == "Medium" - and env.exposure == "internal" - then warn message "Medium severity permitted in internal environments." - because "Allow Medium findings with warning for internal workloads." - } - - rule accept_vendor_vex { - when vex.any(status in ["not_affected","fixed"]) - then status := vex.status - annotate justification := vex.latest().justification - because "Trust vendor VEX statements for internal scope." - } - - rule quiet_low_priority { - when severity.normalized <= "Low" - then ignore until "2026-01-01T00:00:00Z" - because "Quiet low severity until next annual remediation sweep." - } -} +policy "Internal Only Policy" syntax "stella-dsl@1" { + metadata { + description = "Lenient policy for internal / dev tenants." + tags = ["internal","dev"] + } + + profile severity { + env exposure_adjustments { + if env.exposure == "internal" then -0.4 + if env.stage == "dev" then -0.6 + } + } + + rule block_kev priority 1 { + when advisory.has_tag("kev") + then status := "blocked" + because "Known exploited vulnerabilities must be remediated." + } + + rule allow_medium_with_warning { + when severity.normalized == "Medium" + and env.exposure == "internal" + then warn message "Medium severity permitted in internal environments." + because "Allow Medium findings with warning for internal workloads." + } + + rule accept_vendor_vex { + when vex.any(status in ["not_affected","fixed"]) + then status := vex.status + annotate justification := vex.latest().justification + because "Trust vendor VEX statements for internal scope." + } + + rule quiet_low_priority { + when severity.normalized <= "Low" + then ignore until "2026-01-01T00:00:00Z" + because "Quiet low severity until next annual remediation sweep." + } +} diff --git a/docs/examples/policies/internal-only.yaml b/docs/examples/policies/internal-only.yaml index a7106776..a44f1a5c 100644 --- a/docs/examples/policies/internal-only.yaml +++ b/docs/examples/policies/internal-only.yaml @@ -1,31 +1,31 @@ -version: "1.0" -metadata: - description: Relaxed internal/development policy - tags: - - internal - - dev -rules: - - name: Block KEV advisories - tags: [kev] - action: block - - - name: Warn medium severity - severity: [Medium] - environments: [internal] - action: warn - - - name: Accept vendor VEX - action: - type: require_vex - requireVex: - vendors: [VendorX, VendorY] - justifications: - - component_not_present - - vulnerable_code_not_present - - - name: Quiet low severity - severity: [Low, Informational] - action: - type: ignore - until: 2026-01-01T00:00:00Z - justification: "Deferred to annual remediation cycle" +version: "1.0" +metadata: + description: Relaxed internal/development policy + tags: + - internal + - dev +rules: + - name: Block KEV advisories + tags: [kev] + action: block + + - name: Warn medium severity + severity: [Medium] + environments: [internal] + action: warn + + - name: Accept vendor VEX + action: + type: require_vex + requireVex: + vendors: [VendorX, VendorY] + justifications: + - component_not_present + - vulnerable_code_not_present + + - name: Quiet low severity + severity: [Low, Informational] + action: + type: ignore + until: 2026-01-01T00:00:00Z + justification: "Deferred to annual remediation cycle" diff --git a/docs/examples/policies/serverless.md b/docs/examples/policies/serverless.md index ac14675f..1a05ced1 100644 --- a/docs/examples/policies/serverless.md +++ b/docs/examples/policies/serverless.md @@ -1,72 +1,72 @@ -# Serverless Policy Example (`serverless.stella`) - -Optimised for short-lived serverless workloads: focus on runtime integrity, disallow vulnerable layers entirely, and permit temporary suppressions only with strict justification windows. - -```dsl -policy "Serverless Tight Policy" syntax "stella-dsl@1" { - metadata { - description = "Aggressive blocking for serverless runtimes." - tags = ["serverless","prod","strict"] - } - - profile severity { - env runtime_overrides { - if env.runtime == "serverless" then +0.7 - if env.runtime == "batch" then +0.2 - } - } - - rule block_any_high { - when severity.normalized >= "High" - then status := "blocked" - because "Serverless workloads block High+ severities." - } - - rule forbid_unpinned_base { - when sbom.has_tag("image:latest-tag") - then status := "blocked" - because "Base image must be pinned (no :latest)." - } - - rule zero_tolerance_vex { - when vex.any(status == "not_affected") - then requireVex { vendors = ["VendorX","VendorY"], justifications = ["component_not_present"] } - because "Allow not_affected only from trusted vendors with strongest justification." - } - - rule temporary_quiet { - when env.deployment == "canary" - and severity.normalized == "Medium" - then ignore until coalesce(env.quietUntil, "2025-12-31T00:00:00Z") - because "Allow short canary quiet window while fix rolls out." - } -} -``` - -## Commentary - -- Designed for serverless tenants where redeploy cost is low and failing fast is preferred. -- `forbid_unpinned_base` enforces supply-chain best practices. -- `temporary_quiet` ensures quiet windows expire automatically; require deployments to set `env.quietUntil`. -- Intended to be layered on top of baseline (override per tenant) or used standalone for serverless-only accounts. - -## Try it out - -```bash -stella policy lint examples/policies/serverless.stella -stella policy simulate P-serverless --candidate 1 \ - --sbom sbom:lambda-hello --env runtime=serverless --env deployment=canary -``` - -## Compliance checklist - -- [ ] Quiet window expirations tracked and documented. -- [ ] Trusted VEX vendor list reviewed quarterly. -- [ ] Deployment pipeline enforces pinned base images before approval. -- [ ] Canary deployments monitored for recurrence before ignoring Medium severity. -- [ ] Serverless teams acknowledge runbook for blocked deployments. - ---- - -*Last updated: 2025-10-26.* - +# Serverless Policy Example (`serverless.stella`) + +Optimised for short-lived serverless workloads: focus on runtime integrity, disallow vulnerable layers entirely, and permit temporary suppressions only with strict justification windows. + +```dsl +policy "Serverless Tight Policy" syntax "stella-dsl@1" { + metadata { + description = "Aggressive blocking for serverless runtimes." + tags = ["serverless","prod","strict"] + } + + profile severity { + env runtime_overrides { + if env.runtime == "serverless" then +0.7 + if env.runtime == "batch" then +0.2 + } + } + + rule block_any_high { + when severity.normalized >= "High" + then status := "blocked" + because "Serverless workloads block High+ severities." + } + + rule forbid_unpinned_base { + when sbom.has_tag("image:latest-tag") + then status := "blocked" + because "Base image must be pinned (no :latest)." + } + + rule zero_tolerance_vex { + when vex.any(status == "not_affected") + then requireVex { vendors = ["VendorX","VendorY"], justifications = ["component_not_present"] } + because "Allow not_affected only from trusted vendors with strongest justification." + } + + rule temporary_quiet { + when env.deployment == "canary" + and severity.normalized == "Medium" + then ignore until coalesce(env.quietUntil, "2025-12-31T00:00:00Z") + because "Allow short canary quiet window while fix rolls out." + } +} +``` + +## Commentary + +- Designed for serverless tenants where redeploy cost is low and failing fast is preferred. +- `forbid_unpinned_base` enforces supply-chain best practices. +- `temporary_quiet` ensures quiet windows expire automatically; require deployments to set `env.quietUntil`. +- Intended to be layered on top of baseline (override per tenant) or used standalone for serverless-only accounts. + +## Try it out + +```bash +stella policy lint examples/policies/serverless.stella +stella policy simulate P-serverless --candidate 1 \ + --sbom sbom:lambda-hello --env runtime=serverless --env deployment=canary +``` + +## Compliance checklist + +- [ ] Quiet window expirations tracked and documented. +- [ ] Trusted VEX vendor list reviewed quarterly. +- [ ] Deployment pipeline enforces pinned base images before approval. +- [ ] Canary deployments monitored for recurrence before ignoring Medium severity. +- [ ] Serverless teams acknowledge runbook for blocked deployments. + +--- + +*Last updated: 2025-10-26.* + diff --git a/docs/examples/policies/serverless.stella b/docs/examples/policies/serverless.stella index 6ec91dcb..f4d9b51f 100644 --- a/docs/examples/policies/serverless.stella +++ b/docs/examples/policies/serverless.stella @@ -1,39 +1,39 @@ -policy "Serverless Tight Policy" syntax "stella-dsl@1" { - metadata { - description = "Aggressive blocking for serverless runtimes." - tags = ["serverless","prod","strict"] - } - - profile severity { - env runtime_overrides { - if env.runtime == "serverless" then +0.7 - if env.runtime == "batch" then +0.2 - } - } - - rule block_any_high { - when severity.normalized >= "High" - then status := "blocked" - because "Serverless workloads block High+ severities." - } - - rule forbid_unpinned_base { - when sbom.has_tag("image:latest-tag") - then status := "blocked" - because "Base image must be pinned (no :latest)." - } - - rule zero_tolerance_vex { - when vex.any(status == "not_affected") - then requireVex { vendors = ["VendorX","VendorY"], justifications = ["component_not_present"] } - because "Allow not_affected only from trusted vendors with strongest justification." - } - - rule temporary_quiet { - when env.deployment == "canary" - and severity.normalized == "Medium" - then ignore until coalesce(env.quietUntil, "2025-12-31T00:00:00Z") - because "Allow short canary quiet window while fix rolls out." - } -} - +policy "Serverless Tight Policy" syntax "stella-dsl@1" { + metadata { + description = "Aggressive blocking for serverless runtimes." + tags = ["serverless","prod","strict"] + } + + profile severity { + env runtime_overrides { + if env.runtime == "serverless" then +0.7 + if env.runtime == "batch" then +0.2 + } + } + + rule block_any_high { + when severity.normalized >= "High" + then status := "blocked" + because "Serverless workloads block High+ severities." + } + + rule forbid_unpinned_base { + when sbom.has_tag("image:latest-tag") + then status := "blocked" + because "Base image must be pinned (no :latest)." + } + + rule zero_tolerance_vex { + when vex.any(status == "not_affected") + then requireVex { vendors = ["VendorX","VendorY"], justifications = ["component_not_present"] } + because "Allow not_affected only from trusted vendors with strongest justification." + } + + rule temporary_quiet { + when env.deployment == "canary" + and severity.normalized == "Medium" + then ignore until coalesce(env.quietUntil, "2025-12-31T00:00:00Z") + because "Allow short canary quiet window while fix rolls out." + } +} + diff --git a/docs/examples/policies/serverless.yaml b/docs/examples/policies/serverless.yaml index 12ee831d..53500715 100644 --- a/docs/examples/policies/serverless.yaml +++ b/docs/examples/policies/serverless.yaml @@ -1,41 +1,41 @@ -version: "1.0" -metadata: - description: Strict policy for serverless workloads - tags: - - serverless - - prod - - strict -exceptions: - effects: - - id: suppress-canary - name: Canary Freeze - effect: suppress - routingTemplate: secops-approvers - maxDurationDays: 14 - routingTemplates: - - id: secops-approvers - authorityRouteId: governance.secops - requireMfa: true -rules: - - name: Block High And Above - severity: [High, Critical] - action: block - - - name: Forbid Unpinned Base Images - tags: [image:latest-tag] - action: block - - - name: Require Trusted VEX - action: - type: require_vex - requireVex: - vendors: [VendorX, VendorY] - justifications: [component_not_present] - - - name: Quiet Medium Canary - severity: [Medium] - environments: [canary] - action: - type: ignore - until: 2025-12-31T00:00:00Z - justification: "Temporary canary exception" +version: "1.0" +metadata: + description: Strict policy for serverless workloads + tags: + - serverless + - prod + - strict +exceptions: + effects: + - id: suppress-canary + name: Canary Freeze + effect: suppress + routingTemplate: secops-approvers + maxDurationDays: 14 + routingTemplates: + - id: secops-approvers + authorityRouteId: governance.secops + requireMfa: true +rules: + - name: Block High And Above + severity: [High, Critical] + action: block + + - name: Forbid Unpinned Base Images + tags: [image:latest-tag] + action: block + + - name: Require Trusted VEX + action: + type: require_vex + requireVex: + vendors: [VendorX, VendorY] + justifications: [component_not_present] + + - name: Quiet Medium Canary + severity: [Medium] + environments: [canary] + action: + type: ignore + until: 2025-12-31T00:00:00Z + justification: "Temporary canary exception" diff --git a/docs/examples/ui-tours.md b/docs/examples/ui-tours.md index 4d48863a..725f6b93 100644 --- a/docs/examples/ui-tours.md +++ b/docs/examples/ui-tours.md @@ -1,154 +1,154 @@ -# StellaOps Console – Guided Tours (Sprint 23) - -> **Audience:** Field enablement, Docs Guild writers, Console product leads, and onboarding facilitators. -> **Scope:** Ready-to-run walkthrough scripts that showcase the Console’s critical workflows—triage, audit evidence, and policy rollout—while reinforcing CLI parity, tenancy, and offline expectations. - -These tours stitch together the primary Console workspaces so trainers can deliver consistent demos or capture annotated media (screenshots/GIFs). Each tour lists prerequisites, live steps, CLI fallbacks, and assets to capture. Use them alongside the workspace dossiers in `/docs/ui/*.md` when preparing customer sessions or internal dry runs. - ---- - -## 1 · Prerequisites & Setup - -- **Environment:** Console deployed per [deployment guide](../deploy/console.md) with Scheduler, Policy Engine, Concelier, Excititor, SBOM Service, and Downloads manifest available. -- **Tenant & data:** Sample tenant populated with recent scans, findings, runs, and export bundles. Ensure Offline Kit snapshot exists for offline callouts. -- **Scopes:** Presenter identity must hold `ui.read`, `findings.read`, `policy:*` (read/write/simulate/approve), `runs.read`, `downloads.read`, `aoc:verify`, and `ui.telemetry` to surface telemetry banners. -- **Browser tooling:** Enable screen recording (1920×1080 @ 60 fps) and keyboard overlay if capturing walkthroughs. -- **CLI parity:** Have `stella` CLI configured against the same tenant; keep terminal window ready for parity steps. -- **Assets directory:** Store captures under `docs/assets/ui/tours/` (see [`README`](../assets/ui/tours/README.md)) with the naming convention `-step-.png` and `-flow.gif`. - ---- - -## 2 · Tour A — Critical Finding Triage - -**Persona:** Security analyst responding to a fresh high-severity finding. -**Goal:** Navigate from dashboard signal to remediation decision, highlighting explain trails and run evidence. - -### 2.1 Key references -- [Console overview](../ui/console-overview.md) – tenant switching, status ticker. -- [Navigation](../ui/navigation.md) – command palette, shortcuts. -- [Findings workspace](../ui/findings.md) – filters, explain drawer, exports. -- [Runs workspace](../ui/runs.md) – live progress, evidence downloads. - -### 2.2 Live walkthrough -1. **Start on Dashboard:** Show status ticker surfacing new `Critical` badge. Call out tenant pill and offline banner behaviour (§3 of console overview). -2. **Command palette jump:** Press `Ctrl/Cmd+K`, type `Findings`, hit `Enter`. Narrate keyboard accessibility from navigation guide. -3. **Apply global filters:** Open filter tray (`Shift+F`), set `Severity = Critical`, `Status = affected`, time window `Last 24h`. Mention saved view presets triggered with `Ctrl/Cmd+1`. -4. **Open explain drawer:** Select top finding, trigger `Explain` tab. Highlight rule chain, VEX impact, and evidence references (§5 of findings doc). -5. **Dive into related run:** Click `Run ID` link inside explain drawer → opens Runs detail drawer filtered by run ID. Show segmented progress SSE updates. -6. **Capture evidence:** In Runs drawer, download evidence bundle; note CLI parity `stella runs export --run `. Mention offline fallback (download queue offline banner from runs doc §10). -7. **Escalate / create ticket:** Use bulk action or comment (if configured) to demonstrate optional integration; mention Authority audit log tie-in. -8. **Wrap with CLI:** Pop terminal and run `stella findings explain --policy --finding --format markdown` to show reproducibility. - -### 2.3 Capture checklist -- `docs/assets/ui/tours/triage-step-01.png` — dashboard ticker highlighting new criticals. - ![Tour A – dashboard criticals](../assets/ui/tours/triage-step-01.png) -- `docs/assets/ui/tours/triage-step-03.png` — filter tray with severity/time window applied. - ![Tour A – filter tray](../assets/ui/tours/triage-step-03.png) -- `docs/assets/ui/tours/triage-step-04.png` — explain drawer evidence tab. - ![Tour A – explain drawer evidence](../assets/ui/tours/triage-step-04.png) -- `docs/assets/ui/tours/triage-flow.gif` — 20 s screen recording of steps 1–5 with annotations. - ![Tour A – walkthrough GIF](../assets/ui/tours/triage-flow.gif) - -### 2.4 Talking points & callouts -- Call out Aggregation-Only boundaries: findings reference Concelier/Excititor provenance, UI stays read-only. -- Mention `ui_route_render_seconds` telemetry for demos (see [observability guide](../observability/ui-telemetry.md)). -- Offline note: highlight offline banner that appears if `/console/status` heartbeat fails (§6 of console overview). - ---- - -## 3 · Tour B — Audit Evidence Export - -**Persona:** Compliance lead compiling artefacts for an external audit. -**Goal:** Retrieve signed manifests, export run/finding evidence, and verify parity with Offline Kit. - -### 3.1 Key references -- [Downloads workspace](../ui/downloads.md) – manifest, parity, export queue. -- [Runs workspace](../ui/runs.md) – evidence panel. -- [Console security posture](../security/console-security.md) – evidence handling. -- [CLI vs UI parity matrix](../cli-vs-ui-parity.md). - -### 3.2 Live walkthrough -1. **Open Downloads:** Use left rail or command palette to reach `/console/downloads`. Point out snapshot banner, cosign verification status. -2. **Verify manifest:** Click “Verify signature” quick action; narrate parity with `cosign verify --key manifest.json` from downloads doc §3. -3. **Compare Offline Kit:** Switch to “Offline Kits” tab, run parity check to ensure kit digest matches manifest. Demonstrate offline guidance (downloads doc §6). -4. **Queue evidence bundle:** Navigate to Runs workspace, choose relevant run, trigger “Bundle for offline” (runs doc §8). -5. **Return to Downloads → Exports tab:** Show newly generated evidence bundle with retention countdown. -6. **Download & inspect:** Open detail drawer, copy CLI command `stella runs export --run --bundle`. Mention location for storing evidence. -7. **Log parity results:** Use notes or tags to flag audit package completion (if notifications configured). -8. **CLI parity close-out:** Run `stella downloads manifest --channel stable` to mirror UI manifest retrieval. Confirm digests match. - -### 3.3 Capture checklist -- `docs/assets/ui/tours/audit-step-02.png` — manifest verification banner (green). - ![Tour B – manifest verification](../assets/ui/tours/audit-step-02.png) -- `docs/assets/ui/tours/audit-step-05.png` — exports tab showing evidence bundle ready. - ![Tour B – exports tab](../assets/ui/tours/audit-step-05.png) -- `docs/assets/ui/tours/audit-flow.gif` — 25 s capture from manifest view through export download. - ![Tour B – walkthrough GIF](../assets/ui/tours/audit-flow.gif) - -### 3.4 Talking points & callouts -- Stress deterministic manifests and Cosign signatures; reference deployment doc for TLS/CSP alignment. -- Highlight audit trail: downloads actions recorded via `ui.download.commandCopied` logs and Authority audit entries. -- Offline note: show guidance when parity check detects stale manifest; mention CLI fallback for sealed networks. - ---- - -## 4 · Tour C — Policy Rollout & Promotion - -**Persona:** Policy owner preparing and promoting a new ruleset. -**Goal:** Draft review, simulation, approval, and promotion within Console, with CLI parity. - -### 4.1 Key references -- [Policies workspace](../ui/policies.md) – simulations, approvals, promotion. -- [Policy editor](../ui/policy-editor.md) – Monaco editor, linting. -- [Runs workspace](../ui/runs.md) – policy run monitoring. -- [Security posture](../security/console-security.md) – fresh-auth and scopes. - -### 4.2 Live walkthrough -1. **Policy overview:** Open `/console/policies`, filter by “Staged” state. Highlight list columns (owners, pending approvals). -2. **Enter draft:** Select policy → open editor view. Show checklist sidebar (lint, simulation, determinism). -3. **Run lint & simulation:** Hit `Run lint`, then `Run simulation`. Narrate asynchronous progress with SSE ticker; reference CLI `stella policy simulate`. -4. **Review diff:** Open simulation diff view to compare Active vs Staged; highlight severity up/down badges (§6 of policies doc). -5. **Approval workflow:** Assign reviewer, show comment thread. Trigger fresh-auth prompt when clicking “Submit for review” (security doc §1.2). -6. **Promote policy:** After approvals, open promotion dialog, choose “Full run”. Emphasise policy run scheduling and RBAC. -7. **Monitor run:** Jump to Runs workspace, filter by policy run; show progress segments and findings delta metrics. -8. **Publish CLI parity:** Execute `stella policy promote --policy --revision --run-mode full` to reinforce reproducibility. - -### 4.3 Capture checklist -- `docs/assets/ui/tours/policy-step-02.png` — editor checklist with lint/simulation statuses. - ![Tour C – editor checklist](../assets/ui/tours/policy-step-02.png) -- `docs/assets/ui/tours/policy-step-04.png` — simulation diff comparing Active vs Staged. - ![Tour C – simulation diff](../assets/ui/tours/policy-step-04.png) -- `docs/assets/ui/tours/policy-flow.gif` — 30 s clip from draft view through promotion confirmation. - ![Tour C – walkthrough GIF](../assets/ui/tours/policy-flow.gif) - -### 4.4 Talking points & callouts -- Stress governance: approvals logged with correlation IDs, fresh-auth enforced. -- Mention telemetry metrics (`ui_tenant_switch_total`, policy run charts) for monitoring adoption. -- Offline note: show how promotion dialog surfaces CLI script when in sealed mode; reference offline guidance in policies doc §10. - ---- - -## 5 · Production Tips & Media Hygiene - -- **Script timing:** Keep each tour ≤ 3 minutes live demo, ≤ 30 s GIF. Include captions for accessibility. -- **Annotations:** Use consistent callouts (numbered badges, short labels) overlayed in post-processing; ensure final media compressed but legible (< 2 MB PNG, < 8 MB GIF). See `docs/assets/ui/tours/README.md` for shared template guidance. -- **Versioning:** Annotated assets should include Console build hash in metadata or caption (align with `/console/downloads` manifest version). -- **Storage:** Commit final media under `docs/assets/ui/tours/` and update `.gitattributes` if smudge filters required. Note large GIFs may need Git LFS depending on repository policy. -- **Review cadence:** Re-run tours whenever workspaces change navigation or introduce new buttons; log updates in `docs/updates/-console-tours.md` (create if absent). - ---- - -## 6 · Compliance Checklist - -- [x] Tour scripts cover triage, audit evidence, and policy rollout scenarios requested in DOCS-CONSOLE-23-017. -- [x] Each tour references authoritative workspace docs and CLI parity commands. -- [x] Capture checklist names align with `docs/assets/ui/tours/` convention. -- [x] Offline and sealed-mode notes included for every flow. -- [x] Security considerations (scopes, fresh-auth, evidence handling) highlighted. -- [x] Observability/telemetry pointers surfaced to support Ops follow-up. -- [x] Media hygiene guidance documented (assets, compression, versioning). -- [x] Document timestamp reflects Sprint 23 delivery. - ---- - -*Last updated: 2025-10-27 (Sprint 23).* +# StellaOps Console – Guided Tours (Sprint 23) + +> **Audience:** Field enablement, Docs Guild writers, Console product leads, and onboarding facilitators. +> **Scope:** Ready-to-run walkthrough scripts that showcase the Console’s critical workflows—triage, audit evidence, and policy rollout—while reinforcing CLI parity, tenancy, and offline expectations. + +These tours stitch together the primary Console workspaces so trainers can deliver consistent demos or capture annotated media (screenshots/GIFs). Each tour lists prerequisites, live steps, CLI fallbacks, and assets to capture. Use them alongside the workspace dossiers in `/docs/ui/*.md` when preparing customer sessions or internal dry runs. + +--- + +## 1 · Prerequisites & Setup + +- **Environment:** Console deployed per [deployment guide](../deploy/console.md) with Scheduler, Policy Engine, Concelier, Excititor, SBOM Service, and Downloads manifest available. +- **Tenant & data:** Sample tenant populated with recent scans, findings, runs, and export bundles. Ensure Offline Kit snapshot exists for offline callouts. +- **Scopes:** Presenter identity must hold `ui.read`, `findings.read`, `policy:*` (read/write/simulate/approve), `runs.read`, `downloads.read`, `aoc:verify`, and `ui.telemetry` to surface telemetry banners. +- **Browser tooling:** Enable screen recording (1920×1080 @ 60 fps) and keyboard overlay if capturing walkthroughs. +- **CLI parity:** Have `stella` CLI configured against the same tenant; keep terminal window ready for parity steps. +- **Assets directory:** Store captures under `docs/assets/ui/tours/` (see [`README`](../assets/ui/tours/README.md)) with the naming convention `-step-.png` and `-flow.gif`. + +--- + +## 2 · Tour A — Critical Finding Triage + +**Persona:** Security analyst responding to a fresh high-severity finding. +**Goal:** Navigate from dashboard signal to remediation decision, highlighting explain trails and run evidence. + +### 2.1 Key references +- [Console overview](../ui/console-overview.md) – tenant switching, status ticker. +- [Navigation](../ui/navigation.md) – command palette, shortcuts. +- [Findings workspace](../ui/findings.md) – filters, explain drawer, exports. +- [Runs workspace](../ui/runs.md) – live progress, evidence downloads. + +### 2.2 Live walkthrough +1. **Start on Dashboard:** Show status ticker surfacing new `Critical` badge. Call out tenant pill and offline banner behaviour (§3 of console overview). +2. **Command palette jump:** Press `Ctrl/Cmd+K`, type `Findings`, hit `Enter`. Narrate keyboard accessibility from navigation guide. +3. **Apply global filters:** Open filter tray (`Shift+F`), set `Severity = Critical`, `Status = affected`, time window `Last 24h`. Mention saved view presets triggered with `Ctrl/Cmd+1`. +4. **Open explain drawer:** Select top finding, trigger `Explain` tab. Highlight rule chain, VEX impact, and evidence references (§5 of findings doc). +5. **Dive into related run:** Click `Run ID` link inside explain drawer → opens Runs detail drawer filtered by run ID. Show segmented progress SSE updates. +6. **Capture evidence:** In Runs drawer, download evidence bundle; note CLI parity `stella runs export --run `. Mention offline fallback (download queue offline banner from runs doc §10). +7. **Escalate / create ticket:** Use bulk action or comment (if configured) to demonstrate optional integration; mention Authority audit log tie-in. +8. **Wrap with CLI:** Pop terminal and run `stella findings explain --policy --finding --format markdown` to show reproducibility. + +### 2.3 Capture checklist +- `docs/assets/ui/tours/triage-step-01.png` — dashboard ticker highlighting new criticals. + ![Tour A – dashboard criticals](../assets/ui/tours/triage-step-01.png) +- `docs/assets/ui/tours/triage-step-03.png` — filter tray with severity/time window applied. + ![Tour A – filter tray](../assets/ui/tours/triage-step-03.png) +- `docs/assets/ui/tours/triage-step-04.png` — explain drawer evidence tab. + ![Tour A – explain drawer evidence](../assets/ui/tours/triage-step-04.png) +- `docs/assets/ui/tours/triage-flow.gif` — 20 s screen recording of steps 1–5 with annotations. + ![Tour A – walkthrough GIF](../assets/ui/tours/triage-flow.gif) + +### 2.4 Talking points & callouts +- Call out Aggregation-Only boundaries: findings reference Concelier/Excititor provenance, UI stays read-only. +- Mention `ui_route_render_seconds` telemetry for demos (see [observability guide](../observability/ui-telemetry.md)). +- Offline note: highlight offline banner that appears if `/console/status` heartbeat fails (§6 of console overview). + +--- + +## 3 · Tour B — Audit Evidence Export + +**Persona:** Compliance lead compiling artefacts for an external audit. +**Goal:** Retrieve signed manifests, export run/finding evidence, and verify parity with Offline Kit. + +### 3.1 Key references +- [Downloads workspace](../ui/downloads.md) – manifest, parity, export queue. +- [Runs workspace](../ui/runs.md) – evidence panel. +- [Console security posture](../security/console-security.md) – evidence handling. +- [CLI vs UI parity matrix](../cli-vs-ui-parity.md). + +### 3.2 Live walkthrough +1. **Open Downloads:** Use left rail or command palette to reach `/console/downloads`. Point out snapshot banner, cosign verification status. +2. **Verify manifest:** Click “Verify signature” quick action; narrate parity with `cosign verify --key manifest.json` from downloads doc §3. +3. **Compare Offline Kit:** Switch to “Offline Kits” tab, run parity check to ensure kit digest matches manifest. Demonstrate offline guidance (downloads doc §6). +4. **Queue evidence bundle:** Navigate to Runs workspace, choose relevant run, trigger “Bundle for offline” (runs doc §8). +5. **Return to Downloads → Exports tab:** Show newly generated evidence bundle with retention countdown. +6. **Download & inspect:** Open detail drawer, copy CLI command `stella runs export --run --bundle`. Mention location for storing evidence. +7. **Log parity results:** Use notes or tags to flag audit package completion (if notifications configured). +8. **CLI parity close-out:** Run `stella downloads manifest --channel stable` to mirror UI manifest retrieval. Confirm digests match. + +### 3.3 Capture checklist +- `docs/assets/ui/tours/audit-step-02.png` — manifest verification banner (green). + ![Tour B – manifest verification](../assets/ui/tours/audit-step-02.png) +- `docs/assets/ui/tours/audit-step-05.png` — exports tab showing evidence bundle ready. + ![Tour B – exports tab](../assets/ui/tours/audit-step-05.png) +- `docs/assets/ui/tours/audit-flow.gif` — 25 s capture from manifest view through export download. + ![Tour B – walkthrough GIF](../assets/ui/tours/audit-flow.gif) + +### 3.4 Talking points & callouts +- Stress deterministic manifests and Cosign signatures; reference deployment doc for TLS/CSP alignment. +- Highlight audit trail: downloads actions recorded via `ui.download.commandCopied` logs and Authority audit entries. +- Offline note: show guidance when parity check detects stale manifest; mention CLI fallback for sealed networks. + +--- + +## 4 · Tour C — Policy Rollout & Promotion + +**Persona:** Policy owner preparing and promoting a new ruleset. +**Goal:** Draft review, simulation, approval, and promotion within Console, with CLI parity. + +### 4.1 Key references +- [Policies workspace](../ui/policies.md) – simulations, approvals, promotion. +- [Policy editor](../ui/policy-editor.md) – Monaco editor, linting. +- [Runs workspace](../ui/runs.md) – policy run monitoring. +- [Security posture](../security/console-security.md) – fresh-auth and scopes. + +### 4.2 Live walkthrough +1. **Policy overview:** Open `/console/policies`, filter by “Staged” state. Highlight list columns (owners, pending approvals). +2. **Enter draft:** Select policy → open editor view. Show checklist sidebar (lint, simulation, determinism). +3. **Run lint & simulation:** Hit `Run lint`, then `Run simulation`. Narrate asynchronous progress with SSE ticker; reference CLI `stella policy simulate`. +4. **Review diff:** Open simulation diff view to compare Active vs Staged; highlight severity up/down badges (§6 of policies doc). +5. **Approval workflow:** Assign reviewer, show comment thread. Trigger fresh-auth prompt when clicking “Submit for review” (security doc §1.2). +6. **Promote policy:** After approvals, open promotion dialog, choose “Full run”. Emphasise policy run scheduling and RBAC. +7. **Monitor run:** Jump to Runs workspace, filter by policy run; show progress segments and findings delta metrics. +8. **Publish CLI parity:** Execute `stella policy promote --policy --revision --run-mode full` to reinforce reproducibility. + +### 4.3 Capture checklist +- `docs/assets/ui/tours/policy-step-02.png` — editor checklist with lint/simulation statuses. + ![Tour C – editor checklist](../assets/ui/tours/policy-step-02.png) +- `docs/assets/ui/tours/policy-step-04.png` — simulation diff comparing Active vs Staged. + ![Tour C – simulation diff](../assets/ui/tours/policy-step-04.png) +- `docs/assets/ui/tours/policy-flow.gif` — 30 s clip from draft view through promotion confirmation. + ![Tour C – walkthrough GIF](../assets/ui/tours/policy-flow.gif) + +### 4.4 Talking points & callouts +- Stress governance: approvals logged with correlation IDs, fresh-auth enforced. +- Mention telemetry metrics (`ui_tenant_switch_total`, policy run charts) for monitoring adoption. +- Offline note: show how promotion dialog surfaces CLI script when in sealed mode; reference offline guidance in policies doc §10. + +--- + +## 5 · Production Tips & Media Hygiene + +- **Script timing:** Keep each tour ≤ 3 minutes live demo, ≤ 30 s GIF. Include captions for accessibility. +- **Annotations:** Use consistent callouts (numbered badges, short labels) overlayed in post-processing; ensure final media compressed but legible (< 2 MB PNG, < 8 MB GIF). See `docs/assets/ui/tours/README.md` for shared template guidance. +- **Versioning:** Annotated assets should include Console build hash in metadata or caption (align with `/console/downloads` manifest version). +- **Storage:** Commit final media under `docs/assets/ui/tours/` and update `.gitattributes` if smudge filters required. Note large GIFs may need Git LFS depending on repository policy. +- **Review cadence:** Re-run tours whenever workspaces change navigation or introduce new buttons; log updates in `docs/updates/-console-tours.md` (create if absent). + +--- + +## 6 · Compliance Checklist + +- [x] Tour scripts cover triage, audit evidence, and policy rollout scenarios requested in DOCS-CONSOLE-23-017. +- [x] Each tour references authoritative workspace docs and CLI parity commands. +- [x] Capture checklist names align with `docs/assets/ui/tours/` convention. +- [x] Offline and sealed-mode notes included for every flow. +- [x] Security considerations (scopes, fresh-auth, evidence handling) highlighted. +- [x] Observability/telemetry pointers surfaced to support Ops follow-up. +- [x] Media hygiene guidance documented (assets, compression, versioning). +- [x] Document timestamp reflects Sprint 23 delivery. + +--- + +*Last updated: 2025-10-27 (Sprint 23).* diff --git a/docs/export-center/api.md b/docs/export-center/api.md index 79c2dd55..838744e6 100644 --- a/docs/export-center/api.md +++ b/docs/export-center/api.md @@ -1,337 +1,337 @@ -# Export Center REST API - -> **Audience:** Platform integrators, Console/CLI developers, and automation engineers orchestrating export runs. -> **Base route:** `/api/export/*` behind the StellaOps gateway; requires Authority-issued tokens with export scopes. - -This reference describes the Export Center API introduced in Export Center Phase 1 (Epic 10) and extended in Phase 2. Use it alongside the [Export Center Architecture](architecture.md) and [Profiles](profiles.md) guides for service-level semantics. - -> Status: Endpoint implementation lands with `EXPORT-SVC-35-006` (Sprint 35) and related follow-on tasks. As of the current build the WebService hosts only the template stub; use this contract for coordination and update once the API is wired. - -## 1. Authentication and headers - -- **Authorization:** Bearer tokens in `Authorization: Bearer ` paired with DPoP proof. Required scopes per endpoint: - - `export:profile:manage` for profile CRUD. - - `export:run` to submit and cancel runs. - - `export:read` to list and inspect runs. - - `export:download` for bundle downloads and manifests. -- **Tenant context:** Provide `X-Stella-Tenant` when the token carries multiple tenants; defaults to token tenant otherwise. -- **Idempotency:** Mutating endpoints accept `Idempotency-Key` (UUID). Retrying with the same key returns the original result. -- **Rate limits and quotas:** Responses include `X-Stella-Quota-Limit`, `X-Stella-Quota-Remaining`, and `X-Stella-Quota-Reset`. Exceeding quotas returns `429 Too Many Requests` with `ERR_EXPORT_QUOTA`. -- **Content negotiation:** Requests and responses use `application/json; charset=utf-8` unless otherwise stated. Downloads stream binary content with profile-specific media types. -- **SSE:** Event streams set `Content-Type: text/event-stream` and keep connections alive with comment heartbeats every 15 seconds. - -## 2. Error model - -Errors follow standard HTTP codes with structured payloads: - -```json -{ - "code": "ERR_EXPORT_002", - "message": "Profile not found for tenant acme", - "details": [], - "traceId": "01J9N4Y4K2XY8C5V7T2S", - "timestamp": "2025-10-29T13:42:11Z" -} -``` - -| Code | Description | Typical HTTP status | Notes | -|------|-------------|---------------------|-------| -| `ERR_EXPORT_001` | Validation failure (selectors, configuration) | 400 | `details` enumerates offending fields. | -| `ERR_EXPORT_002` | Profile missing or not accessible for tenant | 404 | Returned on run submission or profile fetch. | -| `ERR_EXPORT_003` | Concurrency or quota exceeded | 429 | Includes `retryAfterSeconds` in `details`. | -| `ERR_EXPORT_004` | Adapter failure (schema mismatch, upstream outage) | 502 | Worker logs contain adapter error reason. | -| `ERR_EXPORT_005` | Signing or KMS error | 500 | Run marked failed with `errorCode=signing`. | -| `ERR_EXPORT_006` | Distribution failure (HTTP, OCI, object storage) | 502 | `details` lists failing distribution driver. | -| `ERR_EXPORT_007` | Run canceled or expired | 409 | Includes cancel author and timestamp. | -| `ERR_EXPORT_BASE_MISSING` | Base manifest for delta exports not found | 400 | Specific to `mirror:delta`. | -| `ERR_EXPORT_EMPTY` | No records matched selectors (when `allowEmpty=false`) | 422 | Useful for guard-railled automation. | -| `ERR_EXPORT_QUOTA` | Daily quota exhausted | 429 | Always paired with quota headers. | - -All responses include `traceId` for correlation with logs and metrics. - -## 3. Profiles endpoints - -### 3.1 List profiles - -``` -GET /api/export/profiles?kind=json&variant=raw&page=1&pageSize=20 -Scopes: export:read -``` - -Returns tenant-scoped profiles. Response headers: `X-Total-Count`, `Link` for pagination. - -**Response** - -```json -{ - "items": [ - { - "profileId": "prof-json-raw", - "name": "Daily JSON Raw", - "kind": "json", - "variant": "raw", - "distribution": ["http", "object"], - "retention": {"mode": "days", "value": 14}, - "createdAt": "2025-10-23T08:00:00Z", - "createdBy": "user:ops" - } - ], - "page": 1, - "pageSize": 20 -} -``` - -### 3.2 Get a profile - -``` -GET /api/export/profiles/{profileId} -Scopes: export:read -``` - -Returns full configuration, including `config` payload, distribution options, and metadata. - -### 3.3 Create a profile - -``` -POST /api/export/profiles -Scopes: export:profile:manage -``` - -**Request** - -```json -{ - "profileId": "prof-airgap-mirror", - "name": "Airgap Mirror Weekly", - "kind": "mirror", - "variant": "full", - "include": ["advisories", "vex", "sboms", "policy"], - "distribution": ["http", "object"], - "encryption": { - "enabled": true, - "recipientKeys": ["age1tenantkey..."], - "strict": false - }, - "retention": {"mode": "days", "value": 30} -} -``` - -**Response 201** - -```json -{ - "profileId": "prof-airgap-mirror", - "version": 1, - "createdAt": "2025-10-29T12:05:22Z", - "createdBy": "user:ops", - "status": "active" -} -``` - -### 3.4 Update profile metadata - -``` -PATCH /api/export/profiles/{profileId} -Scopes: export:profile:manage -``` - -Allows renaming, toggling distribution switches, or updating retention. Structural configuration updates (kind/variant/include) create a new revision; the API returns `revisionCreated=true` and the new `profileId` (e.g., `prof-airgap-mirror@2`). - -### 3.5 Archive profile - -``` -POST /api/export/profiles/{profileId}:archive -Scopes: export:profile:manage -``` - -Marks profile as inactive; existing runs remain accessible. Use `:restore` to reactivate. - -## 4. Run management - -### 4.1 Submit an export run - -``` -POST /api/export/runs -Scopes: export:run -``` - -**Request** - -```json -{ - "profileId": "prof-json-raw", - "selectors": { - "tenants": ["acme"], - "timeWindow": { - "from": "2025-10-01T00:00:00Z", - "to": "2025-10-29T00:00:00Z" - }, - "products": ["registry.example.com/app:*"], - "sboms": ["sbom:S-1001", "sbom:S-2004"] - }, - "policySnapshotId": "policy-snap-42", - "options": { - "allowEmpty": false, - "priority": "standard" - } -} -``` - -**Response 202** - -```json -{ - "runId": "run-20251029-01", - "status": "pending", - "profileId": "prof-json-raw", - "createdAt": "2025-10-29T12:12:11Z", - "createdBy": "user:ops", - "selectors": { "...": "..." }, - "links": { - "self": "/api/export/runs/run-20251029-01", - "events": "/api/export/runs/run-20251029-01/events" - } -} -``` - -### 4.2 List runs - -``` -GET /api/export/runs?status=active&profileId=prof-json-raw&page=1&pageSize=10 -Scopes: export:read -``` - -Returns latest runs with pagination. Each item includes summary counts, duration, and last event. - -### 4.3 Get run status - -``` -GET /api/export/runs/{runId} -Scopes: export:read -``` - -Response fields: - -| Field | Description | -|-------|-------------| -| `status` | `pending`, `running`, `success`, `failed`, `canceled`. | -| `progress` | Object with `adapters`, `bytesWritten`, `recordsProcessed`. | -| `errorCode` | Populated when `status=failed` (`signing`, `distribution`, etc). | -| `policySnapshotId` | Returned for policy-aware profiles. | -| `distributions` | List of available distribution descriptors (type, location, sha256, expiresAt). | - -### 4.4 Cancel a run - -``` -POST /api/export/runs/{runId}:cancel -Scopes: export:run -``` - -Body optional (`{"reason": "Aborted due to incident INC-123"}`). Returns 202 and pushes `run.canceled` event. - -## 5. Events and telemetry - -### 5.1 Server-sent events - -``` -GET /api/export/runs/{runId}/events -Scopes: export:read -Accept: text/event-stream -``` - -Event payload example: - -``` -event: run.progress -data: {"runId":"run-20251029-01","phase":"adapter","adapter":"json","records":1024,"bytes":7340032,"timestamp":"2025-10-29T12:13:15Z"} -``` - -Event types: - -| Event | Meaning | -|-------|---------| -| `run.accepted` | Planner accepted job and queued with Orchestrator. | -| `run.progress` | Periodic updates with phase, adapter, counts. | -| `run.distribution` | Distribution driver finished (includes descriptor). | -| `run.signed` | Signing completed successfully. | -| `run.succeeded` | Run marked `success`. | -| `run.failed` | Run failed; payload includes `errorCode`. | -| `run.canceled` | Run canceled; includes `canceledBy`. | - -SSE heartbeats (`: ping`) keep long-lived connections alive and should be ignored by clients. - -### 5.2 Audit events - -`GET /api/export/runs/{runId}/events?format=audit` returns the same event stream in newline-delimited JSON for offline ingestion. - -## 6. Download endpoints - -### 6.1 Bundle download - -``` -GET /api/export/runs/{runId}/download -Scopes: export:download -``` - -Streams the primary bundle (tarball, zip, or profile-specific layout). Headers: - -- `Content-Disposition: attachment; filename="export-run-20251029-01.tar.zst"` -- `X-Export-Digest: sha256:...` -- `X-Export-Size: 73482019` -- `X-Export-Encryption: age` (when mirror encryption enabled) - -Supports HTTP range requests for resume functionality. If no bundle exists yet, responds `409` with `ERR_EXPORT_007`. - -### 6.2 Manifest download - -``` -GET /api/export/runs/{runId}/manifest -Scopes: export:download -``` - -Returns signed `export.json`. To fetch the detached signature, append `?signature=true`. - -### 6.3 Provenance download - -``` -GET /api/export/runs/{runId}/provenance -Scopes: export:download -``` - -Returns signed `provenance.json`. Supports `?signature=true`. Provenance includes attestation subject digests, policy snapshot ids, adapter versions, and KMS key identifiers. - -### 6.4 Distribution descriptors - -``` -GET /api/export/runs/{runId}/distributions -Scopes: export:read -``` - -Lists all registered distribution targets (HTTP, OCI, object storage). Each item includes `type`, `location`, `sha256`, `sizeBytes`, and `expiresAt`. - -## 7. Webhook hand-off - -Exports can notify external systems once a run succeeds by registering an HTTP webhook: - -``` -POST /api/export/webhooks -Scopes: export:profile:manage -``` - -Payload includes `targetUrl`, `events` (e.g., `run.succeeded`), and optional secret for HMAC signatures. Webhook deliveries sign payloads with `X-Stella-Signature` header (`sha256=...`). Retries follow exponential backoff with dead-letter capture in `export_events`. - -## 8. Observability - -- **Metrics endpoint:** `/metrics` (service-local) exposes Prometheus metrics listed in [Architecture](architecture.md#observability). -- **Tracing:** When `traceparent` header is provided, worker spans join the calling trace. -- **Run lookup by trace:** Use `GET /api/export/runs?traceId={id}` when troubleshooting distributed traces. - -## 9. Related documentation - -- [Export Center Overview](overview.md) -- [Export Center Architecture](architecture.md) -- [Export Center Profiles](profiles.md) -- [Export Center CLI Guide](cli.md) *(companion document)* -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Export Center REST API + +> **Audience:** Platform integrators, Console/CLI developers, and automation engineers orchestrating export runs. +> **Base route:** `/api/export/*` behind the StellaOps gateway; requires Authority-issued tokens with export scopes. + +This reference describes the Export Center API introduced in Export Center Phase 1 (Epic 10) and extended in Phase 2. Use it alongside the [Export Center Architecture](architecture.md) and [Profiles](profiles.md) guides for service-level semantics. + +> Status: Endpoint implementation lands with `EXPORT-SVC-35-006` (Sprint 35) and related follow-on tasks. As of the current build the WebService hosts only the template stub; use this contract for coordination and update once the API is wired. + +## 1. Authentication and headers + +- **Authorization:** Bearer tokens in `Authorization: Bearer ` paired with DPoP proof. Required scopes per endpoint: + - `export:profile:manage` for profile CRUD. + - `export:run` to submit and cancel runs. + - `export:read` to list and inspect runs. + - `export:download` for bundle downloads and manifests. +- **Tenant context:** Provide `X-Stella-Tenant` when the token carries multiple tenants; defaults to token tenant otherwise. +- **Idempotency:** Mutating endpoints accept `Idempotency-Key` (UUID). Retrying with the same key returns the original result. +- **Rate limits and quotas:** Responses include `X-Stella-Quota-Limit`, `X-Stella-Quota-Remaining`, and `X-Stella-Quota-Reset`. Exceeding quotas returns `429 Too Many Requests` with `ERR_EXPORT_QUOTA`. +- **Content negotiation:** Requests and responses use `application/json; charset=utf-8` unless otherwise stated. Downloads stream binary content with profile-specific media types. +- **SSE:** Event streams set `Content-Type: text/event-stream` and keep connections alive with comment heartbeats every 15 seconds. + +## 2. Error model + +Errors follow standard HTTP codes with structured payloads: + +```json +{ + "code": "ERR_EXPORT_002", + "message": "Profile not found for tenant acme", + "details": [], + "traceId": "01J9N4Y4K2XY8C5V7T2S", + "timestamp": "2025-10-29T13:42:11Z" +} +``` + +| Code | Description | Typical HTTP status | Notes | +|------|-------------|---------------------|-------| +| `ERR_EXPORT_001` | Validation failure (selectors, configuration) | 400 | `details` enumerates offending fields. | +| `ERR_EXPORT_002` | Profile missing or not accessible for tenant | 404 | Returned on run submission or profile fetch. | +| `ERR_EXPORT_003` | Concurrency or quota exceeded | 429 | Includes `retryAfterSeconds` in `details`. | +| `ERR_EXPORT_004` | Adapter failure (schema mismatch, upstream outage) | 502 | Worker logs contain adapter error reason. | +| `ERR_EXPORT_005` | Signing or KMS error | 500 | Run marked failed with `errorCode=signing`. | +| `ERR_EXPORT_006` | Distribution failure (HTTP, OCI, object storage) | 502 | `details` lists failing distribution driver. | +| `ERR_EXPORT_007` | Run canceled or expired | 409 | Includes cancel author and timestamp. | +| `ERR_EXPORT_BASE_MISSING` | Base manifest for delta exports not found | 400 | Specific to `mirror:delta`. | +| `ERR_EXPORT_EMPTY` | No records matched selectors (when `allowEmpty=false`) | 422 | Useful for guard-railled automation. | +| `ERR_EXPORT_QUOTA` | Daily quota exhausted | 429 | Always paired with quota headers. | + +All responses include `traceId` for correlation with logs and metrics. + +## 3. Profiles endpoints + +### 3.1 List profiles + +``` +GET /api/export/profiles?kind=json&variant=raw&page=1&pageSize=20 +Scopes: export:read +``` + +Returns tenant-scoped profiles. Response headers: `X-Total-Count`, `Link` for pagination. + +**Response** + +```json +{ + "items": [ + { + "profileId": "prof-json-raw", + "name": "Daily JSON Raw", + "kind": "json", + "variant": "raw", + "distribution": ["http", "object"], + "retention": {"mode": "days", "value": 14}, + "createdAt": "2025-10-23T08:00:00Z", + "createdBy": "user:ops" + } + ], + "page": 1, + "pageSize": 20 +} +``` + +### 3.2 Get a profile + +``` +GET /api/export/profiles/{profileId} +Scopes: export:read +``` + +Returns full configuration, including `config` payload, distribution options, and metadata. + +### 3.3 Create a profile + +``` +POST /api/export/profiles +Scopes: export:profile:manage +``` + +**Request** + +```json +{ + "profileId": "prof-airgap-mirror", + "name": "Airgap Mirror Weekly", + "kind": "mirror", + "variant": "full", + "include": ["advisories", "vex", "sboms", "policy"], + "distribution": ["http", "object"], + "encryption": { + "enabled": true, + "recipientKeys": ["age1tenantkey..."], + "strict": false + }, + "retention": {"mode": "days", "value": 30} +} +``` + +**Response 201** + +```json +{ + "profileId": "prof-airgap-mirror", + "version": 1, + "createdAt": "2025-10-29T12:05:22Z", + "createdBy": "user:ops", + "status": "active" +} +``` + +### 3.4 Update profile metadata + +``` +PATCH /api/export/profiles/{profileId} +Scopes: export:profile:manage +``` + +Allows renaming, toggling distribution switches, or updating retention. Structural configuration updates (kind/variant/include) create a new revision; the API returns `revisionCreated=true` and the new `profileId` (e.g., `prof-airgap-mirror@2`). + +### 3.5 Archive profile + +``` +POST /api/export/profiles/{profileId}:archive +Scopes: export:profile:manage +``` + +Marks profile as inactive; existing runs remain accessible. Use `:restore` to reactivate. + +## 4. Run management + +### 4.1 Submit an export run + +``` +POST /api/export/runs +Scopes: export:run +``` + +**Request** + +```json +{ + "profileId": "prof-json-raw", + "selectors": { + "tenants": ["acme"], + "timeWindow": { + "from": "2025-10-01T00:00:00Z", + "to": "2025-10-29T00:00:00Z" + }, + "products": ["registry.example.com/app:*"], + "sboms": ["sbom:S-1001", "sbom:S-2004"] + }, + "policySnapshotId": "policy-snap-42", + "options": { + "allowEmpty": false, + "priority": "standard" + } +} +``` + +**Response 202** + +```json +{ + "runId": "run-20251029-01", + "status": "pending", + "profileId": "prof-json-raw", + "createdAt": "2025-10-29T12:12:11Z", + "createdBy": "user:ops", + "selectors": { "...": "..." }, + "links": { + "self": "/api/export/runs/run-20251029-01", + "events": "/api/export/runs/run-20251029-01/events" + } +} +``` + +### 4.2 List runs + +``` +GET /api/export/runs?status=active&profileId=prof-json-raw&page=1&pageSize=10 +Scopes: export:read +``` + +Returns latest runs with pagination. Each item includes summary counts, duration, and last event. + +### 4.3 Get run status + +``` +GET /api/export/runs/{runId} +Scopes: export:read +``` + +Response fields: + +| Field | Description | +|-------|-------------| +| `status` | `pending`, `running`, `success`, `failed`, `canceled`. | +| `progress` | Object with `adapters`, `bytesWritten`, `recordsProcessed`. | +| `errorCode` | Populated when `status=failed` (`signing`, `distribution`, etc). | +| `policySnapshotId` | Returned for policy-aware profiles. | +| `distributions` | List of available distribution descriptors (type, location, sha256, expiresAt). | + +### 4.4 Cancel a run + +``` +POST /api/export/runs/{runId}:cancel +Scopes: export:run +``` + +Body optional (`{"reason": "Aborted due to incident INC-123"}`). Returns 202 and pushes `run.canceled` event. + +## 5. Events and telemetry + +### 5.1 Server-sent events + +``` +GET /api/export/runs/{runId}/events +Scopes: export:read +Accept: text/event-stream +``` + +Event payload example: + +``` +event: run.progress +data: {"runId":"run-20251029-01","phase":"adapter","adapter":"json","records":1024,"bytes":7340032,"timestamp":"2025-10-29T12:13:15Z"} +``` + +Event types: + +| Event | Meaning | +|-------|---------| +| `run.accepted` | Planner accepted job and queued with Orchestrator. | +| `run.progress` | Periodic updates with phase, adapter, counts. | +| `run.distribution` | Distribution driver finished (includes descriptor). | +| `run.signed` | Signing completed successfully. | +| `run.succeeded` | Run marked `success`. | +| `run.failed` | Run failed; payload includes `errorCode`. | +| `run.canceled` | Run canceled; includes `canceledBy`. | + +SSE heartbeats (`: ping`) keep long-lived connections alive and should be ignored by clients. + +### 5.2 Audit events + +`GET /api/export/runs/{runId}/events?format=audit` returns the same event stream in newline-delimited JSON for offline ingestion. + +## 6. Download endpoints + +### 6.1 Bundle download + +``` +GET /api/export/runs/{runId}/download +Scopes: export:download +``` + +Streams the primary bundle (tarball, zip, or profile-specific layout). Headers: + +- `Content-Disposition: attachment; filename="export-run-20251029-01.tar.zst"` +- `X-Export-Digest: sha256:...` +- `X-Export-Size: 73482019` +- `X-Export-Encryption: age` (when mirror encryption enabled) + +Supports HTTP range requests for resume functionality. If no bundle exists yet, responds `409` with `ERR_EXPORT_007`. + +### 6.2 Manifest download + +``` +GET /api/export/runs/{runId}/manifest +Scopes: export:download +``` + +Returns signed `export.json`. To fetch the detached signature, append `?signature=true`. + +### 6.3 Provenance download + +``` +GET /api/export/runs/{runId}/provenance +Scopes: export:download +``` + +Returns signed `provenance.json`. Supports `?signature=true`. Provenance includes attestation subject digests, policy snapshot ids, adapter versions, and KMS key identifiers. + +### 6.4 Distribution descriptors + +``` +GET /api/export/runs/{runId}/distributions +Scopes: export:read +``` + +Lists all registered distribution targets (HTTP, OCI, object storage). Each item includes `type`, `location`, `sha256`, `sizeBytes`, and `expiresAt`. + +## 7. Webhook hand-off + +Exports can notify external systems once a run succeeds by registering an HTTP webhook: + +``` +POST /api/export/webhooks +Scopes: export:profile:manage +``` + +Payload includes `targetUrl`, `events` (e.g., `run.succeeded`), and optional secret for HMAC signatures. Webhook deliveries sign payloads with `X-Stella-Signature` header (`sha256=...`). Retries follow exponential backoff with dead-letter capture in `export_events`. + +## 8. Observability + +- **Metrics endpoint:** `/metrics` (service-local) exposes Prometheus metrics listed in [Architecture](architecture.md#observability). +- **Tracing:** When `traceparent` header is provided, worker spans join the calling trace. +- **Run lookup by trace:** Use `GET /api/export/runs?traceId={id}` when troubleshooting distributed traces. + +## 9. Related documentation + +- [Export Center Overview](overview.md) +- [Export Center Architecture](architecture.md) +- [Export Center Profiles](profiles.md) +- [Export Center CLI Guide](cli.md) *(companion document)* +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/export-center/architecture.md b/docs/export-center/architecture.md index 96052b92..7775250d 100644 --- a/docs/export-center/architecture.md +++ b/docs/export-center/architecture.md @@ -1,125 +1,125 @@ -# Export Center Architecture - -The Export Center is the dedicated service layer that packages StellaOps evidence and policy overlays into reproducible bundles. It runs as a multi-surface API backed by asynchronous workers and format adapters, enforcing Aggregation-Only Contract (AOC) guardrails while providing deterministic manifests, signing, and distribution paths. - -## Runtime topology -- **Export Center API (`StellaOps.ExportCenter.WebService`).** Receives profile CRUD, export run requests, status queries, and download streams through the unified Web API gateway. Enforces tenant scopes, RBAC, quotas, and concurrency guards. -- **Export Center Worker (`StellaOps.ExportCenter.Worker`).** Dequeues export jobs from the Orchestrator, resolves selectors, invokes adapters, and writes manifests and bundle artefacts. Stateless; scales horizontally. -- **Backing stores.** - - MongoDB collections: `export_profiles`, `export_runs`, `export_inputs`, `export_distributions`, `export_events`. - - Object storage bucket or filesystem for staging bundle payloads. - - Optional registry/object storage credentials injected via Authority-scoped secrets. -- **Integration peers.** - - **Findings Ledger** for advisory, VEX, SBOM payload streaming. - - **Policy Engine** for deterministic policy snapshots and evaluated findings. - - **Orchestrator** for job scheduling, quotas, and telemetry fan-out. - - **Authority** for tenant-aware access tokens and KMS key references. - - **Console & CLI** as presentation surfaces consuming the API. - -## Job lifecycle -1. **Profile selection.** Operator or automation picks a profile (`json:raw`, `json:policy`, `trivy:db`, `trivy:java-db`, `mirror:full`, `mirror:delta`) and submits scope selectors (tenant, time window, products, SBOM subjects, ecosystems). See `docs/export-center/profiles.md` for profile definitions and configuration fields. -2. **Planner resolution.** API validates selectors, expands include/exclude lists, and writes a pending `export_run` with immutable parameters and deterministic ordering hints. -3. **Orchestrator dispatch.** `export_run` triggers a job lease via Orchestrator with quotas per tenant/profile and concurrency caps (default 4 active per tenant). -4. **Worker execution.** Worker streams data from Findings Ledger and Policy Engine using pagination cursors. Adapters write canonical payloads to staging storage, compute checksums, and emit streaming progress events (SSE). -5. **Manifest and provenance emission.** Worker writes `export.json` and `provenance.json`, signs them with configured KMS keys (cosign-compatible), and uploads signatures alongside content. -6. **Distribution registration.** Worker records available distribution methods (download URL, OCI reference, object storage path), raises completion/failure events, and exposes metrics/logs. -7. **Download & verification.** Clients download bundles or pull OCI artefacts, verify signatures, and consume provenance to trace source artefacts. - -Cancellation requests mark runs as `aborted` and cause workers to stop iterating sources; partially written files are destroyed and the run is marked with an audit entry. - -## Core components -### API surface -- Detailed request and response payloads are catalogued in `docs/export-center/api.md`. -- **Profiles API.** - - `GET /api/export/profiles`: list tenant-scoped profiles. - - `POST /api/export/profiles`: create custom profiles (variants of JSON, Trivy, mirror) with validated configuration schema. - - `PATCH /api/export/profiles/{id}`: update metadata; config changes clone new revision to preserve determinism. -- **Runs API.** - - `POST /api/export/runs`: submit export run for a profile with selectors and options (policy snapshot id, mirror base manifest). - - `GET /api/export/runs/{id}`: status, progress counters, provenance summary. - - `GET /api/export/runs/{id}/events`: server-sent events with state transitions, adapter milestones, signing status. - - `POST /api/export/runs/{id}/cancel`: cooperative cancellation with audit logging. -- **Downloads API.** - - `GET /api/export/runs/{id}/download`: streaming download with range support and checksum trailers. - - `GET /api/export/runs/{id}/manifest`: signed `export.json`. - - `GET /api/export/runs/{id}/provenance`: signed `provenance.json`. - -All endpoints require Authority-issued JWT + DPoP tokens with scopes `export:run`, `export:read`, and tenant claim alignment. Rate-limiting and quotas surface via `X-Stella-Quota-*` headers. - -### Worker pipeline -- **Input resolvers.** Query Findings Ledger and Policy Engine using stable pagination (Mongo `_id` ascending, or resume tokens for change streams). Selector expressions compile into Mongo filter fragments and/or API query parameters. -- **Adapter host.** Adapter plugin loader (restart-time only) resolves profile variant to adapter implementation. Adapters present a deterministic `RunAsync(context)` contract with streaming writers and telemetry instrumentation. -- **Content writers.** - - JSON adapters emit `.jsonl.zst` files with canonical ordering (tenant, subject, document id). - - Trivy adapters materialise SQLite databases or tar archives matching Trivy DB expectations; schema version gates prevent unsupported outputs. - - Mirror adapters assemble deterministic filesystem trees (manifests, indexes, payload subtrees) and, when configured, OCI artefact layers. -- **Manifest generator.** Aggregates counts, bytes, hash digests (SHA-256), profile metadata, and input references. Writes `export.json` and `provenance.json` using canonical JSON (sorted keys, RFC3339 UTC timestamps). -- **Signing service.** Integrates with platform KMS via Authority (default cosign signer). Produces in-toto SLSA attestations when configured. Supports detached signatures and optional in-bundle signatures. -- **Distribution drivers.** `dist-http` exposes staged files via download endpoint; `dist-oci` pushes artefacts to registries using ORAS with digest pinning; `dist-objstore` uploads to tenant-specific prefixes with immutability flags. - -## Data model snapshots - -| Collection | Purpose | Key fields | Notes | -|------------|---------|------------|-------| -| `export_profiles` | Profile definitions (kind, variant, config). | `_id`, `tenant`, `name`, `kind`, `variant`, `config_json`, `created_by`, `created_at`. | Config includes adapter parameters (included record types, compression, encryption). | -| `export_runs` | Run state machine and audit info. | `_id`, `profile_id`, `tenant`, `status`, `requested_by`, `selectors`, `policy_snapshot_id`, `started_at`, `completed_at`, `duration_ms`, `error_code`. | Immutable selectors; status transitions recorded in `export_events`. | -| `export_inputs` | Resolved input ranges. | `run_id`, `source`, `cursor`, `count`, `hash`. | Enables resumable retries and audit. | -| `export_distributions` | Distribution artefacts. | `run_id`, `type` (`http`, `oci`, `object`), `location`, `sha256`, `size_bytes`, `expires_at`. | `expires_at` used for retention policies and automatic pruning. | -| `export_events` | Timeline of state transitions and metrics. | `run_id`, `event_type`, `message`, `at`, `metrics`. | Feeds SSE stream and audit trails. | - -## Adapter responsibilities -- **JSON (`json:raw`, `json:policy`).** - - Ensures canonical casing, timezone normalization, and linkset preservation. - - Policy variant embeds policy snapshot metadata (`policy_version`, `inputs_hash`, `decision_trace` fingerprint) and emits evaluated findings as separate files. - - Enforces AOC guardrails: no derived modifications to raw evidence fields. -- **Trivy (`trivy:db`, `trivy:java-db`).** - - Maps StellaOps advisory schema to Trivy DB format, handling namespace collisions and ecosystem-specific ranges. - - Validates compatibility against supported Trivy schema versions; run fails fast if mismatch. - - Emits optional manifest summarising package counts and severity distribution. -- **Mirror (`mirror:full`, `mirror:delta`).** - - Builds self-contained filesystem layout (`/manifests`, `/data/raw`, `/data/policy`, `/indexes`). - - Delta variant compares against base manifest (`base_export_id`) to write only changed artefacts; records `removed` entries for cleanup. - - Supports optional encryption of `/data` subtree (age/AES-GCM) with key wrapping stored in `provenance.json`. - -Adapters expose structured telemetry events (`adapter.start`, `adapter.chunk`, `adapter.complete`) with record counts and byte totals per chunk. Failures emit `adapter.error` with reason codes. - -## Signing and provenance -- **Manifest schema.** `export.json` contains run metadata, profile descriptor, selector summary, counts, SHA-256 digests, compression hints, and distribution list. Deterministic field ordering and normalized timestamps. -- **Provenance schema.** `provenance.json` captures in-toto subject listing (bundle digest, manifest digest), referenced inputs (findings ledger queries, policy snapshot ids, SBOM identifiers), tool version (`exporter_version`, adapter versions), and KMS key identifiers. -- **Attestation.** Cosign SLSA Level 2 template by default; optional SLSA Level 3 when supply chain attestations are enabled. Detached signatures stored alongside manifests; CLI/Console encourage `cosign verify --key ` workflow. -- **Audit trail.** Each run stores success/failure status, signature identifiers, and verification hints for downstream automation (CI pipelines, offline verification scripts). - -## Distribution flows -- **HTTP download.** Console and CLI stream bundles via chunked transfer; supports range requests and resumable downloads. Response includes `X-Export-Digest`, `X-Export-Length`, and optional encryption metadata. -- **OCI push.** Worker uses ORAS to publish bundles as OCI artefacts with annotations describing profile, tenant, manifest digest, and provenance reference. Supports multi-tenant registries with `repository-per-tenant` naming. -- **Object storage.** Writes to tenant-prefixed paths (`s3://stella-exports/{tenant}/{run-id}/...`) with immutable retention policies. Retention scheduler purges expired runs based on profile configuration. -- **Offline Kit seeding.** Mirror bundles optionally staged into Offline Kit assembly pipelines, inheriting the same manifests and signatures. - -## Observability -- **Metrics.** Emits `exporter_run_duration_seconds`, `exporter_run_bytes_total{profile}`, `exporter_run_failures_total{error_code}`, `exporter_active_runs{tenant}`, `exporter_distribution_push_seconds{type}`. -- **Logs.** Structured logs with fields `run_id`, `tenant`, `profile_kind`, `adapter`, `phase`, `correlation_id`, `error_code`. Phases include `plan`, `resolve`, `adapter`, `manifest`, `sign`, `distribute`. -- **Traces.** Optional OpenTelemetry spans (`export.plan`, `export.fetch`, `export.write`, `export.sign`, `export.distribute`) for cross-service correlation. -- **Dashboards & alerts.** DevOps pipeline seeds Grafana dashboards summarising throughput, size, failure ratios, and distribution latency. Alert thresholds: failure rate >5% per profile, median run duration >p95 baseline, signature verification failures >0. - -## Security posture -- Tenant claim enforced at every query and distribution path; cross-tenant selectors rejected unless explicit cross-tenant mirror feature toggled with signed approval. -- RBAC scopes: `export:profile:manage`, `export:run`, `export:read`, `export:download`. Console hides actions without scope; CLI returns `401/403`. -- Encryption options configurable per profile; keys derived from Authority-managed KMS. Mirror encryption uses tenant-specific recipients; JSON/Trivy rely on transport security plus optional encryption at rest. -- Restart-only plugin loading ensures adapters and distribution drivers are vetted at deployment time, reducing runtime injection risks. -- Deterministic output ensures tamper detection via content hashes; provenance links to source runs and policy snapshots to maintain auditability. - -## Deployment considerations -- Packaged as separate API and worker containers. Helm chart and compose overlays define horizontal scaling, worker concurrency, queue leases, and object storage credentials. -- Requires Authority client credentials for KMS and optional registry credentials stored via sealed secrets. -- Offline-first deployments disable OCI distribution by default and provide local object storage endpoints; HTTP downloads served via internal gateway. -- Health endpoints: `/health/ready` validates Mongo connectivity, object storage access, adapter registry integrity, and KMS signer readiness. - -## Compliance checklist -- [ ] Profiles and runs enforce tenant scoping; cross-tenant exports disabled unless approved. -- [ ] Manifests and provenance files are generated with deterministic hashes and signed via configured KMS. -- [ ] Adapters run with restart-time registration only; no runtime plugin loading. -- [ ] Distribution drivers respect allowlist; OCI push disabled when offline mode is active. -- [ ] Metrics, logs, and traces follow observability guidelines; dashboards and alerts configured. -- [ ] Retention policies and pruning jobs configured for staged bundles. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Export Center Architecture + +The Export Center is the dedicated service layer that packages StellaOps evidence and policy overlays into reproducible bundles. It runs as a multi-surface API backed by asynchronous workers and format adapters, enforcing Aggregation-Only Contract (AOC) guardrails while providing deterministic manifests, signing, and distribution paths. + +## Runtime topology +- **Export Center API (`StellaOps.ExportCenter.WebService`).** Receives profile CRUD, export run requests, status queries, and download streams through the unified Web API gateway. Enforces tenant scopes, RBAC, quotas, and concurrency guards. +- **Export Center Worker (`StellaOps.ExportCenter.Worker`).** Dequeues export jobs from the Orchestrator, resolves selectors, invokes adapters, and writes manifests and bundle artefacts. Stateless; scales horizontally. +- **Backing stores.** + - MongoDB collections: `export_profiles`, `export_runs`, `export_inputs`, `export_distributions`, `export_events`. + - Object storage bucket or filesystem for staging bundle payloads. + - Optional registry/object storage credentials injected via Authority-scoped secrets. +- **Integration peers.** + - **Findings Ledger** for advisory, VEX, SBOM payload streaming. + - **Policy Engine** for deterministic policy snapshots and evaluated findings. + - **Orchestrator** for job scheduling, quotas, and telemetry fan-out. + - **Authority** for tenant-aware access tokens and KMS key references. + - **Console & CLI** as presentation surfaces consuming the API. + +## Job lifecycle +1. **Profile selection.** Operator or automation picks a profile (`json:raw`, `json:policy`, `trivy:db`, `trivy:java-db`, `mirror:full`, `mirror:delta`) and submits scope selectors (tenant, time window, products, SBOM subjects, ecosystems). See `docs/export-center/profiles.md` for profile definitions and configuration fields. +2. **Planner resolution.** API validates selectors, expands include/exclude lists, and writes a pending `export_run` with immutable parameters and deterministic ordering hints. +3. **Orchestrator dispatch.** `export_run` triggers a job lease via Orchestrator with quotas per tenant/profile and concurrency caps (default 4 active per tenant). +4. **Worker execution.** Worker streams data from Findings Ledger and Policy Engine using pagination cursors. Adapters write canonical payloads to staging storage, compute checksums, and emit streaming progress events (SSE). +5. **Manifest and provenance emission.** Worker writes `export.json` and `provenance.json`, signs them with configured KMS keys (cosign-compatible), and uploads signatures alongside content. +6. **Distribution registration.** Worker records available distribution methods (download URL, OCI reference, object storage path), raises completion/failure events, and exposes metrics/logs. +7. **Download & verification.** Clients download bundles or pull OCI artefacts, verify signatures, and consume provenance to trace source artefacts. + +Cancellation requests mark runs as `aborted` and cause workers to stop iterating sources; partially written files are destroyed and the run is marked with an audit entry. + +## Core components +### API surface +- Detailed request and response payloads are catalogued in `docs/export-center/api.md`. +- **Profiles API.** + - `GET /api/export/profiles`: list tenant-scoped profiles. + - `POST /api/export/profiles`: create custom profiles (variants of JSON, Trivy, mirror) with validated configuration schema. + - `PATCH /api/export/profiles/{id}`: update metadata; config changes clone new revision to preserve determinism. +- **Runs API.** + - `POST /api/export/runs`: submit export run for a profile with selectors and options (policy snapshot id, mirror base manifest). + - `GET /api/export/runs/{id}`: status, progress counters, provenance summary. + - `GET /api/export/runs/{id}/events`: server-sent events with state transitions, adapter milestones, signing status. + - `POST /api/export/runs/{id}/cancel`: cooperative cancellation with audit logging. +- **Downloads API.** + - `GET /api/export/runs/{id}/download`: streaming download with range support and checksum trailers. + - `GET /api/export/runs/{id}/manifest`: signed `export.json`. + - `GET /api/export/runs/{id}/provenance`: signed `provenance.json`. + +All endpoints require Authority-issued JWT + DPoP tokens with scopes `export:run`, `export:read`, and tenant claim alignment. Rate-limiting and quotas surface via `X-Stella-Quota-*` headers. + +### Worker pipeline +- **Input resolvers.** Query Findings Ledger and Policy Engine using stable pagination (Mongo `_id` ascending, or resume tokens for change streams). Selector expressions compile into Mongo filter fragments and/or API query parameters. +- **Adapter host.** Adapter plugin loader (restart-time only) resolves profile variant to adapter implementation. Adapters present a deterministic `RunAsync(context)` contract with streaming writers and telemetry instrumentation. +- **Content writers.** + - JSON adapters emit `.jsonl.zst` files with canonical ordering (tenant, subject, document id). + - Trivy adapters materialise SQLite databases or tar archives matching Trivy DB expectations; schema version gates prevent unsupported outputs. + - Mirror adapters assemble deterministic filesystem trees (manifests, indexes, payload subtrees) and, when configured, OCI artefact layers. +- **Manifest generator.** Aggregates counts, bytes, hash digests (SHA-256), profile metadata, and input references. Writes `export.json` and `provenance.json` using canonical JSON (sorted keys, RFC3339 UTC timestamps). +- **Signing service.** Integrates with platform KMS via Authority (default cosign signer). Produces in-toto SLSA attestations when configured. Supports detached signatures and optional in-bundle signatures. +- **Distribution drivers.** `dist-http` exposes staged files via download endpoint; `dist-oci` pushes artefacts to registries using ORAS with digest pinning; `dist-objstore` uploads to tenant-specific prefixes with immutability flags. + +## Data model snapshots + +| Collection | Purpose | Key fields | Notes | +|------------|---------|------------|-------| +| `export_profiles` | Profile definitions (kind, variant, config). | `_id`, `tenant`, `name`, `kind`, `variant`, `config_json`, `created_by`, `created_at`. | Config includes adapter parameters (included record types, compression, encryption). | +| `export_runs` | Run state machine and audit info. | `_id`, `profile_id`, `tenant`, `status`, `requested_by`, `selectors`, `policy_snapshot_id`, `started_at`, `completed_at`, `duration_ms`, `error_code`. | Immutable selectors; status transitions recorded in `export_events`. | +| `export_inputs` | Resolved input ranges. | `run_id`, `source`, `cursor`, `count`, `hash`. | Enables resumable retries and audit. | +| `export_distributions` | Distribution artefacts. | `run_id`, `type` (`http`, `oci`, `object`), `location`, `sha256`, `size_bytes`, `expires_at`. | `expires_at` used for retention policies and automatic pruning. | +| `export_events` | Timeline of state transitions and metrics. | `run_id`, `event_type`, `message`, `at`, `metrics`. | Feeds SSE stream and audit trails. | + +## Adapter responsibilities +- **JSON (`json:raw`, `json:policy`).** + - Ensures canonical casing, timezone normalization, and linkset preservation. + - Policy variant embeds policy snapshot metadata (`policy_version`, `inputs_hash`, `decision_trace` fingerprint) and emits evaluated findings as separate files. + - Enforces AOC guardrails: no derived modifications to raw evidence fields. +- **Trivy (`trivy:db`, `trivy:java-db`).** + - Maps StellaOps advisory schema to Trivy DB format, handling namespace collisions and ecosystem-specific ranges. + - Validates compatibility against supported Trivy schema versions; run fails fast if mismatch. + - Emits optional manifest summarising package counts and severity distribution. +- **Mirror (`mirror:full`, `mirror:delta`).** + - Builds self-contained filesystem layout (`/manifests`, `/data/raw`, `/data/policy`, `/indexes`). + - Delta variant compares against base manifest (`base_export_id`) to write only changed artefacts; records `removed` entries for cleanup. + - Supports optional encryption of `/data` subtree (age/AES-GCM) with key wrapping stored in `provenance.json`. + +Adapters expose structured telemetry events (`adapter.start`, `adapter.chunk`, `adapter.complete`) with record counts and byte totals per chunk. Failures emit `adapter.error` with reason codes. + +## Signing and provenance +- **Manifest schema.** `export.json` contains run metadata, profile descriptor, selector summary, counts, SHA-256 digests, compression hints, and distribution list. Deterministic field ordering and normalized timestamps. +- **Provenance schema.** `provenance.json` captures in-toto subject listing (bundle digest, manifest digest), referenced inputs (findings ledger queries, policy snapshot ids, SBOM identifiers), tool version (`exporter_version`, adapter versions), and KMS key identifiers. +- **Attestation.** Cosign SLSA Level 2 template by default; optional SLSA Level 3 when supply chain attestations are enabled. Detached signatures stored alongside manifests; CLI/Console encourage `cosign verify --key ` workflow. +- **Audit trail.** Each run stores success/failure status, signature identifiers, and verification hints for downstream automation (CI pipelines, offline verification scripts). + +## Distribution flows +- **HTTP download.** Console and CLI stream bundles via chunked transfer; supports range requests and resumable downloads. Response includes `X-Export-Digest`, `X-Export-Length`, and optional encryption metadata. +- **OCI push.** Worker uses ORAS to publish bundles as OCI artefacts with annotations describing profile, tenant, manifest digest, and provenance reference. Supports multi-tenant registries with `repository-per-tenant` naming. +- **Object storage.** Writes to tenant-prefixed paths (`s3://stella-exports/{tenant}/{run-id}/...`) with immutable retention policies. Retention scheduler purges expired runs based on profile configuration. +- **Offline Kit seeding.** Mirror bundles optionally staged into Offline Kit assembly pipelines, inheriting the same manifests and signatures. + +## Observability +- **Metrics.** Emits `exporter_run_duration_seconds`, `exporter_run_bytes_total{profile}`, `exporter_run_failures_total{error_code}`, `exporter_active_runs{tenant}`, `exporter_distribution_push_seconds{type}`. +- **Logs.** Structured logs with fields `run_id`, `tenant`, `profile_kind`, `adapter`, `phase`, `correlation_id`, `error_code`. Phases include `plan`, `resolve`, `adapter`, `manifest`, `sign`, `distribute`. +- **Traces.** Optional OpenTelemetry spans (`export.plan`, `export.fetch`, `export.write`, `export.sign`, `export.distribute`) for cross-service correlation. +- **Dashboards & alerts.** DevOps pipeline seeds Grafana dashboards summarising throughput, size, failure ratios, and distribution latency. Alert thresholds: failure rate >5% per profile, median run duration >p95 baseline, signature verification failures >0. + +## Security posture +- Tenant claim enforced at every query and distribution path; cross-tenant selectors rejected unless explicit cross-tenant mirror feature toggled with signed approval. +- RBAC scopes: `export:profile:manage`, `export:run`, `export:read`, `export:download`. Console hides actions without scope; CLI returns `401/403`. +- Encryption options configurable per profile; keys derived from Authority-managed KMS. Mirror encryption uses tenant-specific recipients; JSON/Trivy rely on transport security plus optional encryption at rest. +- Restart-only plugin loading ensures adapters and distribution drivers are vetted at deployment time, reducing runtime injection risks. +- Deterministic output ensures tamper detection via content hashes; provenance links to source runs and policy snapshots to maintain auditability. + +## Deployment considerations +- Packaged as separate API and worker containers. Helm chart and compose overlays define horizontal scaling, worker concurrency, queue leases, and object storage credentials. +- Requires Authority client credentials for KMS and optional registry credentials stored via sealed secrets. +- Offline-first deployments disable OCI distribution by default and provide local object storage endpoints; HTTP downloads served via internal gateway. +- Health endpoints: `/health/ready` validates Mongo connectivity, object storage access, adapter registry integrity, and KMS signer readiness. + +## Compliance checklist +- [ ] Profiles and runs enforce tenant scoping; cross-tenant exports disabled unless approved. +- [ ] Manifests and provenance files are generated with deterministic hashes and signed via configured KMS. +- [ ] Adapters run with restart-time registration only; no runtime plugin loading. +- [ ] Distribution drivers respect allowlist; OCI push disabled when offline mode is active. +- [ ] Metrics, logs, and traces follow observability guidelines; dashboards and alerts configured. +- [ ] Retention policies and pruning jobs configured for staged bundles. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/export-center/cli.md b/docs/export-center/cli.md index 16be22d7..2cbf08bf 100644 --- a/docs/export-center/cli.md +++ b/docs/export-center/cli.md @@ -1,231 +1,231 @@ -# Stella CLI - Export Center Commands - -> **Audience:** Operators, release engineers, and CI maintainers using the `stella` CLI to manage Export Center profiles and runs. -> **Supported from:** `stella` CLI >= 0.22.0 (Export Center Phase 1). -> **Prerequisites:** Authority token with the scopes noted per command (`export:profile:manage`, `export:run`, `export:read`, `export:download`). - -Use this guide with the [Export Center API reference](api.md) and [Profiles](profiles.md) catalogue. The CLI wraps the same REST endpoints, preserving deterministic behaviour and guardrails. - -> Status: CLI support is tracked under `CLI-EXPORT-35-001` and `CLI-EXPORT-36-001`. The current CLI build does not yet surface these commands; treat this guide as the target contract and adjust once implementations merge. - -## 1. Global options and configuration - -| Flag | Default | Description | -|------|---------|-------------| -| `--server ` | `https://stella.local` | Gateway root. Matches `STELLA_SERVER`. | -| `--tenant ` | Token tenant | Override tenant for multi-tenant tokens. | -| `--profile ` | none | Loads saved defaults from `~/.stella/profiles/.toml`. | -| `--output ` | stdout | Redirect full JSON response. | -| `--format ` | `table` on TTY | Controls table formatting for list commands. | -| `--trace` | false | Emit request timing and correlation ids. | - -Environment variables: `STELLA_TOKEN`, `STELLA_SERVER`, `STELLA_TENANT`, `STELLA_PROFILE`. - -Exit codes align with API error codes (see section 6). - -## 2. Profile management commands - -### 2.1 `stella export profile list` - -List profiles for the current tenant. - -``` -stella export profile list --kind json --variant raw --format table -``` - -Outputs columns `PROFILE`, `KIND`, `VARIANT`, `DISTRIBUTION`, `RETENTION`. Use `--format json` for automation. - -### 2.2 `stella export profile show` - -``` -stella export profile show prof-json-raw --output profile.json -``` - -Fetches full configuration and writes it to file. - -### 2.3 `stella export profile create` - -``` -stella export profile create --file profiles/prof-json-raw.json -``` - -JSON schema matches `POST /api/export/profiles`. CLI validates against built-in schema before submission. Requires `export:profile:manage`. - -### 2.4 `stella export profile update` - -``` -stella export profile update prof-json-raw \ - --retention "days:21" \ - --distribution http,object -``` - -Supports toggling retention, adding/removing distribution targets, and renaming. Structural changes (kind, variant, include set) require editing the JSON and using `--replace-file` to create a new revision. - -### 2.5 `stella export profile archive` - -``` -stella export profile archive prof-json-raw --reason "Superseded by Phase 2 profile" -``` - -Marks the profile inactive. Use `stella export profile restore` to re-activate. - -## 3. Run lifecycle commands - -### 3.1 `stella export run submit` - -``` -stella export run submit prof-json-raw \ - --selector tenant=acme \ - --selector product=registry.example.com/app:* \ - --selector time=2025-10-01T00:00:00Z,2025-10-29T00:00:00Z \ - --policy-snapshot policy-snap-42 \ - --allow-empty=false -``` - -Selectors accept `key=value` pairs; use `time=,` for windows. The command prints the `runId` and initial status. - -### 3.2 `stella export run ls` - -``` -stella export run ls --profile prof-json-raw --status active --tail 5 -``` - -Shows recent runs with columns `RUN`, `PROFILE`, `STATUS`, `PROGRESS`, `UPDATED`. - -### 3.3 `stella export run show` - -``` -stella export run show run-20251029-01 --format json -``` - -Outputs full metadata, progress counters, distribution descriptors, and links. - -### 3.4 `stella export run watch` - -``` -stella export run watch run-20251029-01 --follow -``` - -Streams server-sent events and renders a live progress bar. `--json` prints raw events for scripting. - -### 3.5 `stella export run cancel` - -``` -stella export run cancel run-20251029-01 --reason "Replacing with refined selectors" -``` - -Gracefully cancels the run; exit code `0` indicates cancellation request accepted. - -## 4. Download and verification commands - -### 4.1 `stella export download` - -``` -stella export download run-20251029-01 \ - --output out/exports/run-20251029-01.tar.zst \ - --resume -``` - -Downloads the primary bundle. `--resume` enables HTTP range requests; the CLI checkpoints progress to `.part` files. - -### 4.2 `stella export manifest` - -``` -stella export manifest run-20251029-01 --output manifests/export.json -``` - -Fetches the signed manifest. Use `--signature manifests/export.json.sig` to save the detached signature. - -### 4.3 `stella export provenance` - -``` -stella export provenance run-20251029-01 --output manifests/provenance.json -``` - -Retrieves the signed provenance file. `--signature` behaves like the manifest command. - -### 4.4 `stella export verify` - -``` -stella export verify run-20251029-01 \ - --manifest manifests/export.json \ - --provenance manifests/provenance.json \ - --key keys/acme-export.pub -``` - -Wrapper around `cosign verify`. Returns exit `0` when signatures and digests validate. Exit `20` when verification fails. - -## 5. CI recipe (GitHub Actions example) - -```yaml -name: Export Center Bundle -on: - workflow_dispatch: -jobs: - export: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - name: Install Stella CLI - run: curl -sSfL https://downloads.stellaops.org/cli/install.sh | sh - - name: Submit export run - env: - STELLA_TOKEN: ${{ secrets.STELLA_TOKEN }} - run: | - run_id=$(stella export run submit prof-json-raw \ - --selector tenant=acme \ - --selector product=registry.example.com/app:* \ - --allow-empty=false \ - --format json | jq -r '.runId') - echo "RUN_ID=$run_id" >> $GITHUB_ENV - - name: Wait for completion - env: - STELLA_TOKEN: ${{ secrets.STELLA_TOKEN }} - run: | - stella export run watch "$RUN_ID" --json \ - | tee artifacts/run.log \ - | jq -e 'select(.event == "run.succeeded")' > /dev/null - - name: Download bundle - env: - STELLA_TOKEN: ${{ secrets.STELLA_TOKEN }} - run: | - stella export download "$RUN_ID" --output artifacts/export.tar.zst --resume - stella export manifest "$RUN_ID" --output artifacts/export.json --signature artifacts/export.json.sig - stella export provenance "$RUN_ID" --output artifacts/provenance.json --signature artifacts/provenance.json.sig - - name: Verify signatures - run: | - stella export verify "$RUN_ID" \ - --manifest artifacts/export.json \ - --provenance artifacts/provenance.json \ - --key keys/acme-export.pub -``` - -## 6. Exit codes - -| Code | Meaning | -|------|---------| -| `0` | Command succeeded. | -| `10` | Validation error (`ERR_EXPORT_001`). | -| `11` | Profile missing or inaccessible (`ERR_EXPORT_002`). | -| `12` | Quota or concurrency exceeded (`ERR_EXPORT_003` or `ERR_EXPORT_QUOTA`). | -| `13` | Run failed due to adapter/signing/distribution error. | -| `20` | Verification failure (`stella export verify`). | -| `21` | Download incomplete after retries (network errors). | -| `30` | CLI configuration error (missing token, invalid profile file). | - -Exit codes above 100 are reserved for future profile-specific tooling. - -## 7. Offline usage notes - -- Use profiles that enable `object` distribution with local object storage endpoints. CLI reads `STELLA_EXPORT_OBJECT_ENDPOINT` when provided (falls back to gateway). -- Mirror bundles work offline by skipping OCI distribution. CLI adds `--offline` to bypass OCI checks. -- `stella export verify` works fully offline when provided with tenant public keys (packaged in Offline Kit). - -## 8. Related documentation - -- [Export Center Profiles](profiles.md) -- [Export Center API reference](api.md) -- [Export Center Architecture](architecture.md) -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Stella CLI - Export Center Commands + +> **Audience:** Operators, release engineers, and CI maintainers using the `stella` CLI to manage Export Center profiles and runs. +> **Supported from:** `stella` CLI >= 0.22.0 (Export Center Phase 1). +> **Prerequisites:** Authority token with the scopes noted per command (`export:profile:manage`, `export:run`, `export:read`, `export:download`). + +Use this guide with the [Export Center API reference](api.md) and [Profiles](profiles.md) catalogue. The CLI wraps the same REST endpoints, preserving deterministic behaviour and guardrails. + +> Status: CLI support is tracked under `CLI-EXPORT-35-001` and `CLI-EXPORT-36-001`. The current CLI build does not yet surface these commands; treat this guide as the target contract and adjust once implementations merge. + +## 1. Global options and configuration + +| Flag | Default | Description | +|------|---------|-------------| +| `--server ` | `https://stella.local` | Gateway root. Matches `STELLA_SERVER`. | +| `--tenant ` | Token tenant | Override tenant for multi-tenant tokens. | +| `--profile ` | none | Loads saved defaults from `~/.stella/profiles/.toml`. | +| `--output ` | stdout | Redirect full JSON response. | +| `--format ` | `table` on TTY | Controls table formatting for list commands. | +| `--trace` | false | Emit request timing and correlation ids. | + +Environment variables: `STELLA_TOKEN`, `STELLA_SERVER`, `STELLA_TENANT`, `STELLA_PROFILE`. + +Exit codes align with API error codes (see section 6). + +## 2. Profile management commands + +### 2.1 `stella export profile list` + +List profiles for the current tenant. + +``` +stella export profile list --kind json --variant raw --format table +``` + +Outputs columns `PROFILE`, `KIND`, `VARIANT`, `DISTRIBUTION`, `RETENTION`. Use `--format json` for automation. + +### 2.2 `stella export profile show` + +``` +stella export profile show prof-json-raw --output profile.json +``` + +Fetches full configuration and writes it to file. + +### 2.3 `stella export profile create` + +``` +stella export profile create --file profiles/prof-json-raw.json +``` + +JSON schema matches `POST /api/export/profiles`. CLI validates against built-in schema before submission. Requires `export:profile:manage`. + +### 2.4 `stella export profile update` + +``` +stella export profile update prof-json-raw \ + --retention "days:21" \ + --distribution http,object +``` + +Supports toggling retention, adding/removing distribution targets, and renaming. Structural changes (kind, variant, include set) require editing the JSON and using `--replace-file` to create a new revision. + +### 2.5 `stella export profile archive` + +``` +stella export profile archive prof-json-raw --reason "Superseded by Phase 2 profile" +``` + +Marks the profile inactive. Use `stella export profile restore` to re-activate. + +## 3. Run lifecycle commands + +### 3.1 `stella export run submit` + +``` +stella export run submit prof-json-raw \ + --selector tenant=acme \ + --selector product=registry.example.com/app:* \ + --selector time=2025-10-01T00:00:00Z,2025-10-29T00:00:00Z \ + --policy-snapshot policy-snap-42 \ + --allow-empty=false +``` + +Selectors accept `key=value` pairs; use `time=,` for windows. The command prints the `runId` and initial status. + +### 3.2 `stella export run ls` + +``` +stella export run ls --profile prof-json-raw --status active --tail 5 +``` + +Shows recent runs with columns `RUN`, `PROFILE`, `STATUS`, `PROGRESS`, `UPDATED`. + +### 3.3 `stella export run show` + +``` +stella export run show run-20251029-01 --format json +``` + +Outputs full metadata, progress counters, distribution descriptors, and links. + +### 3.4 `stella export run watch` + +``` +stella export run watch run-20251029-01 --follow +``` + +Streams server-sent events and renders a live progress bar. `--json` prints raw events for scripting. + +### 3.5 `stella export run cancel` + +``` +stella export run cancel run-20251029-01 --reason "Replacing with refined selectors" +``` + +Gracefully cancels the run; exit code `0` indicates cancellation request accepted. + +## 4. Download and verification commands + +### 4.1 `stella export download` + +``` +stella export download run-20251029-01 \ + --output out/exports/run-20251029-01.tar.zst \ + --resume +``` + +Downloads the primary bundle. `--resume` enables HTTP range requests; the CLI checkpoints progress to `.part` files. + +### 4.2 `stella export manifest` + +``` +stella export manifest run-20251029-01 --output manifests/export.json +``` + +Fetches the signed manifest. Use `--signature manifests/export.json.sig` to save the detached signature. + +### 4.3 `stella export provenance` + +``` +stella export provenance run-20251029-01 --output manifests/provenance.json +``` + +Retrieves the signed provenance file. `--signature` behaves like the manifest command. + +### 4.4 `stella export verify` + +``` +stella export verify run-20251029-01 \ + --manifest manifests/export.json \ + --provenance manifests/provenance.json \ + --key keys/acme-export.pub +``` + +Wrapper around `cosign verify`. Returns exit `0` when signatures and digests validate. Exit `20` when verification fails. + +## 5. CI recipe (GitHub Actions example) + +```yaml +name: Export Center Bundle +on: + workflow_dispatch: +jobs: + export: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Install Stella CLI + run: curl -sSfL https://downloads.stellaops.org/cli/install.sh | sh + - name: Submit export run + env: + STELLA_TOKEN: ${{ secrets.STELLA_TOKEN }} + run: | + run_id=$(stella export run submit prof-json-raw \ + --selector tenant=acme \ + --selector product=registry.example.com/app:* \ + --allow-empty=false \ + --format json | jq -r '.runId') + echo "RUN_ID=$run_id" >> $GITHUB_ENV + - name: Wait for completion + env: + STELLA_TOKEN: ${{ secrets.STELLA_TOKEN }} + run: | + stella export run watch "$RUN_ID" --json \ + | tee artifacts/run.log \ + | jq -e 'select(.event == "run.succeeded")' > /dev/null + - name: Download bundle + env: + STELLA_TOKEN: ${{ secrets.STELLA_TOKEN }} + run: | + stella export download "$RUN_ID" --output artifacts/export.tar.zst --resume + stella export manifest "$RUN_ID" --output artifacts/export.json --signature artifacts/export.json.sig + stella export provenance "$RUN_ID" --output artifacts/provenance.json --signature artifacts/provenance.json.sig + - name: Verify signatures + run: | + stella export verify "$RUN_ID" \ + --manifest artifacts/export.json \ + --provenance artifacts/provenance.json \ + --key keys/acme-export.pub +``` + +## 6. Exit codes + +| Code | Meaning | +|------|---------| +| `0` | Command succeeded. | +| `10` | Validation error (`ERR_EXPORT_001`). | +| `11` | Profile missing or inaccessible (`ERR_EXPORT_002`). | +| `12` | Quota or concurrency exceeded (`ERR_EXPORT_003` or `ERR_EXPORT_QUOTA`). | +| `13` | Run failed due to adapter/signing/distribution error. | +| `20` | Verification failure (`stella export verify`). | +| `21` | Download incomplete after retries (network errors). | +| `30` | CLI configuration error (missing token, invalid profile file). | + +Exit codes above 100 are reserved for future profile-specific tooling. + +## 7. Offline usage notes + +- Use profiles that enable `object` distribution with local object storage endpoints. CLI reads `STELLA_EXPORT_OBJECT_ENDPOINT` when provided (falls back to gateway). +- Mirror bundles work offline by skipping OCI distribution. CLI adds `--offline` to bypass OCI checks. +- `stella export verify` works fully offline when provided with tenant public keys (packaged in Offline Kit). + +## 8. Related documentation + +- [Export Center Profiles](profiles.md) +- [Export Center API reference](api.md) +- [Export Center Architecture](architecture.md) +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/export-center/mirror-bundles.md b/docs/export-center/mirror-bundles.md index 34381fe6..b01c8ffa 100644 --- a/docs/export-center/mirror-bundles.md +++ b/docs/export-center/mirror-bundles.md @@ -1,202 +1,202 @@ -# Export Center Mirror Bundles - -Mirror bundles package StellaOps evidence, policy overlays, and indexes for air-gapped or bandwidth-constrained environments. They are produced by the `mirror:full` and `mirror:delta` profiles described in Epic 10 (Export Center) and implemented across Sprints 35-37 (`EXPORT-SVC-35-004`, `EXPORT-SVC-37-001`, `EXPORT-SVC-37-002`). This guide details bundle layouts, delta mechanics, encryption workflow, import procedures, and operational best practices. - -> Export Center workers are being wired while this document is written. Treat the content as the target contract for adapter development and update specifics as the implementation lands. - -## 1. Bundle overview - -| Profile | Contents | Typical use cases | Distribution | -|---------|----------|-------------------|--------------| -| `mirror:full` | Complete snapshot of raw evidence, normalized records, indexes, policy snapshots, provenance, signatures. | Initial seeding of an air-gapped mirror, disaster recovery drills. | Download bundle, optional OCI artifact. | -| `mirror:delta` | Changes since a specified base export: added/updated/removed advisories, VEX statements, SBOMs, indexes, manifests. | Incremental updates, bandwidth reduction, nightly refreshes. | Download bundle, optional OCI artifact. | - -Both profiles respect AOC boundaries: raw ingestion data remains untouched, and policy outputs live under their own directory with explicit provenance. - -## 2. Filesystem layout - -Directory structure inside the extracted bundle: - -``` -mirror/ - manifest.yaml - export.json - provenance.json - README.md - indexes/ - advisories.index.json - vex.index.json - sbom.index.json - findings.index.json - data/ - raw/ - advisories/*.jsonl.zst - vex/*.jsonl.zst - sboms//sbom.json - normalized/ - advisories/*.jsonl.zst - vex/*.jsonl.zst - policy/ - snapshot.json - evaluations.jsonl.zst - consensus/ - vex_consensus.jsonl.zst - signatures/ - export.sig - manifest.sig -``` - -`manifest.yaml` summarises profile metadata, selectors, counts, sizes, and SHA-256 digests. `export.json` and `provenance.json` mirror the JSON profile manifests and are signed using the configured KMS key. - -Example `manifest.yaml`: - -```yaml -profile: mirror:full -runId: run-20251029-01 -tenant: acme -selectors: - products: - - registry.example.com/app:* - timeWindow: - from: 2025-10-01T00:00:00Z - to: 2025-10-29T00:00:00Z -counts: - advisories: 15234 - vex: 3045 - sboms: 872 - policyEvaluations: 19876 -artifacts: - - path: data/raw/advisories/a0.jsonl.zst - sha256: 9f4b... - bytes: 7340021 -encryption: - mode: age - strict: false - recipients: - - age1tenantkey... -``` - -## 3. Delta mechanics - -Delta bundles reference a previous full or delta run via `baseExportId` and `baseManifestDigest`. They contain: - -``` -delta/ - changed/ - data/raw/advisories/*.jsonl.zst - ... - removed/ - advisories.jsonl # list of advisory IDs removed - vex.jsonl - sboms.jsonl - manifest.diff.json # summary of counts, hashes, base export metadata -``` - -- **Base lookup:** The worker verifies that the base export is reachable (download path or OCI reference). If missing, the run fails with `ERR_EXPORT_BASE_MISSING`. -- **Change detection:** Uses deterministic hashing of normalized records to compute additions/updates. Indexes are regenerated only for affected subjects. -- **Application order:** Consumers apply deltas sequentially. A `resetBaseline=true` flag instructs them to drop cached state and apply the bundle as a full refresh. - -Example `manifest.diff.json` (delta): - -```json -{ - "baseExportId": "run-20251025-01", - "baseManifestDigest": "sha256:aa11...", - "resetBaseline": false, - "added": { - "advisories": 43, - "vex": 12, - "sboms": 5 - }, - "changed": { - "advisories": 18, - "vex": 7 - }, - "removed": { - "advisories": 2, - "vex": 0, - "sboms": 0 - } -} -``` - -## 4. Encryption workflow - -Mirror bundles support optional encryption of the `data/` subtree: - -- **Algorithm:** Age (X25519) or AES-GCM (256-bit) based on profile configuration. -- **Key wrapping:** Keys fetched from Authority-managed KMS through Export Center. Wrapped data keys stored in `provenance.json` under `encryption.recipients[]`. -- **Metadata:** `manifest.yaml` records `encryption.mode`, `recipients`, and `encryptedPaths`. -- **Strict mode:** `strict=true` encrypts everything except `manifest.yaml` and `export.json`. Default (`false`) leaves manifests unencrypted to simplify discovery. -- **Verification:** CLI (`stella export verify`) and Offline Kit scripts perform signature checks prior to decryption. - -Operators must distribute recipient keys out of band. Export Center does not transmit private keys. - -## 5. Import workflow - -### 5.1 Offline Kit - -Offline Kit bundles reference the latest full mirror export plus the last `N` deltas. Administrators run: - -``` -./offline-kit/bin/mirror import /path/to/mirror-20251029-full.tar.zst -./offline-kit/bin/mirror import /path/to/mirror-20251030-delta.tar.zst -``` - -The tool verifies signatures, applies deltas, and updates the mirror index served by the local gateway. - -### 5.2 Custom automation - -1. Download bundle (`stella export download`) and verify signatures (`stella export verify`). -2. Extract archive into a staging directory. -3. For encrypted bundles, decrypt using the provided age/AES key. -4. Sync `mirror/data` onto the target mirror store (object storage, NFS, etc.). -5. Republish indexes or reload services that depend on the mirror. - -Delta consumers must track `appliedExportIds` to ensure ordering. - -Sequence diagram of download/import: - -```mermaid -sequenceDiagram - participant CLI as stella CLI - participant Mirror as Mirror Store - participant Verify as Verification Tool - CLI->>CLI: stella export download run-20251029-01 - CLI->>Verify: stella export verify run-20251029-01 - CLI->>Mirror: mirror import mirror-20251029-full.tar.zst - CLI->>Mirror: mirror import mirror-20251030-delta.tar.zst - Mirror-->>CLI: import complete (run-20251030-02) -``` - -## 6. Operational guidance - -- **Retention:** Keep at least one full bundle plus the deltas required for disaster recovery. Configure `ExportCenter:Retention:Mirror` to prune older bundles automatically. -- **Storage footprint:** Full bundles can exceed tens of gigabytes. Plan object storage or NAS capacity accordingly and enable compression (`compression.codec=zstd`). -- **Scheduling:** For high-churn environments, run daily full exports and hourly deltas. Record cadence in `manifest.yaml` (`schedule.cron`). -- **Incident recovery:** To rebuild a mirror: - 1. Apply the most recent full bundle. - 2. Apply deltas in order of `createdAt`. - 3. Re-run integrity checks (`mirror verify `). -- **Audit logging:** Export Center logs `mirror.bundle.created`, `mirror.delta.applied`, and `mirror.encryption.enabled` events. Consume them in the central observability pipeline. - -## 7. Troubleshooting - -| Symptom | Meaning | Action | -|---------|---------|--------| -| `ERR_EXPORT_BASE_MISSING` | Base export not available | Republish base bundle or rebuild as full export. | -| Delta applies but mirror misses entries | Deltas applied out of order | Rebuild from last full bundle and reapply in sequence. | -| Decryption fails | Recipient key mismatch or corrupted bundle | Confirm key distribution and re-download bundle. | -| Verification errors | Signature mismatch | Do not import; regenerate bundle and investigate signing pipeline. | -| Manifest hash mismatch | Files changed after extraction | Re-extract bundle and re-run verification; check storage tampering. | - -## 8. References - -- [Export Center Overview](overview.md) -- [Export Center Architecture](architecture.md) -- [Export Center API reference](api.md) -- [Export Center CLI Guide](cli.md) -- [Concelier mirror runbook](../ops/concelier-mirror-operations.md) -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Export Center Mirror Bundles + +Mirror bundles package StellaOps evidence, policy overlays, and indexes for air-gapped or bandwidth-constrained environments. They are produced by the `mirror:full` and `mirror:delta` profiles described in Epic 10 (Export Center) and implemented across Sprints 35-37 (`EXPORT-SVC-35-004`, `EXPORT-SVC-37-001`, `EXPORT-SVC-37-002`). This guide details bundle layouts, delta mechanics, encryption workflow, import procedures, and operational best practices. + +> Export Center workers are being wired while this document is written. Treat the content as the target contract for adapter development and update specifics as the implementation lands. + +## 1. Bundle overview + +| Profile | Contents | Typical use cases | Distribution | +|---------|----------|-------------------|--------------| +| `mirror:full` | Complete snapshot of raw evidence, normalized records, indexes, policy snapshots, provenance, signatures. | Initial seeding of an air-gapped mirror, disaster recovery drills. | Download bundle, optional OCI artifact. | +| `mirror:delta` | Changes since a specified base export: added/updated/removed advisories, VEX statements, SBOMs, indexes, manifests. | Incremental updates, bandwidth reduction, nightly refreshes. | Download bundle, optional OCI artifact. | + +Both profiles respect AOC boundaries: raw ingestion data remains untouched, and policy outputs live under their own directory with explicit provenance. + +## 2. Filesystem layout + +Directory structure inside the extracted bundle: + +``` +mirror/ + manifest.yaml + export.json + provenance.json + README.md + indexes/ + advisories.index.json + vex.index.json + sbom.index.json + findings.index.json + data/ + raw/ + advisories/*.jsonl.zst + vex/*.jsonl.zst + sboms//sbom.json + normalized/ + advisories/*.jsonl.zst + vex/*.jsonl.zst + policy/ + snapshot.json + evaluations.jsonl.zst + consensus/ + vex_consensus.jsonl.zst + signatures/ + export.sig + manifest.sig +``` + +`manifest.yaml` summarises profile metadata, selectors, counts, sizes, and SHA-256 digests. `export.json` and `provenance.json` mirror the JSON profile manifests and are signed using the configured KMS key. + +Example `manifest.yaml`: + +```yaml +profile: mirror:full +runId: run-20251029-01 +tenant: acme +selectors: + products: + - registry.example.com/app:* + timeWindow: + from: 2025-10-01T00:00:00Z + to: 2025-10-29T00:00:00Z +counts: + advisories: 15234 + vex: 3045 + sboms: 872 + policyEvaluations: 19876 +artifacts: + - path: data/raw/advisories/a0.jsonl.zst + sha256: 9f4b... + bytes: 7340021 +encryption: + mode: age + strict: false + recipients: + - age1tenantkey... +``` + +## 3. Delta mechanics + +Delta bundles reference a previous full or delta run via `baseExportId` and `baseManifestDigest`. They contain: + +``` +delta/ + changed/ + data/raw/advisories/*.jsonl.zst + ... + removed/ + advisories.jsonl # list of advisory IDs removed + vex.jsonl + sboms.jsonl + manifest.diff.json # summary of counts, hashes, base export metadata +``` + +- **Base lookup:** The worker verifies that the base export is reachable (download path or OCI reference). If missing, the run fails with `ERR_EXPORT_BASE_MISSING`. +- **Change detection:** Uses deterministic hashing of normalized records to compute additions/updates. Indexes are regenerated only for affected subjects. +- **Application order:** Consumers apply deltas sequentially. A `resetBaseline=true` flag instructs them to drop cached state and apply the bundle as a full refresh. + +Example `manifest.diff.json` (delta): + +```json +{ + "baseExportId": "run-20251025-01", + "baseManifestDigest": "sha256:aa11...", + "resetBaseline": false, + "added": { + "advisories": 43, + "vex": 12, + "sboms": 5 + }, + "changed": { + "advisories": 18, + "vex": 7 + }, + "removed": { + "advisories": 2, + "vex": 0, + "sboms": 0 + } +} +``` + +## 4. Encryption workflow + +Mirror bundles support optional encryption of the `data/` subtree: + +- **Algorithm:** Age (X25519) or AES-GCM (256-bit) based on profile configuration. +- **Key wrapping:** Keys fetched from Authority-managed KMS through Export Center. Wrapped data keys stored in `provenance.json` under `encryption.recipients[]`. +- **Metadata:** `manifest.yaml` records `encryption.mode`, `recipients`, and `encryptedPaths`. +- **Strict mode:** `strict=true` encrypts everything except `manifest.yaml` and `export.json`. Default (`false`) leaves manifests unencrypted to simplify discovery. +- **Verification:** CLI (`stella export verify`) and Offline Kit scripts perform signature checks prior to decryption. + +Operators must distribute recipient keys out of band. Export Center does not transmit private keys. + +## 5. Import workflow + +### 5.1 Offline Kit + +Offline Kit bundles reference the latest full mirror export plus the last `N` deltas. Administrators run: + +``` +./offline-kit/bin/mirror import /path/to/mirror-20251029-full.tar.zst +./offline-kit/bin/mirror import /path/to/mirror-20251030-delta.tar.zst +``` + +The tool verifies signatures, applies deltas, and updates the mirror index served by the local gateway. + +### 5.2 Custom automation + +1. Download bundle (`stella export download`) and verify signatures (`stella export verify`). +2. Extract archive into a staging directory. +3. For encrypted bundles, decrypt using the provided age/AES key. +4. Sync `mirror/data` onto the target mirror store (object storage, NFS, etc.). +5. Republish indexes or reload services that depend on the mirror. + +Delta consumers must track `appliedExportIds` to ensure ordering. + +Sequence diagram of download/import: + +```mermaid +sequenceDiagram + participant CLI as stella CLI + participant Mirror as Mirror Store + participant Verify as Verification Tool + CLI->>CLI: stella export download run-20251029-01 + CLI->>Verify: stella export verify run-20251029-01 + CLI->>Mirror: mirror import mirror-20251029-full.tar.zst + CLI->>Mirror: mirror import mirror-20251030-delta.tar.zst + Mirror-->>CLI: import complete (run-20251030-02) +``` + +## 6. Operational guidance + +- **Retention:** Keep at least one full bundle plus the deltas required for disaster recovery. Configure `ExportCenter:Retention:Mirror` to prune older bundles automatically. +- **Storage footprint:** Full bundles can exceed tens of gigabytes. Plan object storage or NAS capacity accordingly and enable compression (`compression.codec=zstd`). +- **Scheduling:** For high-churn environments, run daily full exports and hourly deltas. Record cadence in `manifest.yaml` (`schedule.cron`). +- **Incident recovery:** To rebuild a mirror: + 1. Apply the most recent full bundle. + 2. Apply deltas in order of `createdAt`. + 3. Re-run integrity checks (`mirror verify `). +- **Audit logging:** Export Center logs `mirror.bundle.created`, `mirror.delta.applied`, and `mirror.encryption.enabled` events. Consume them in the central observability pipeline. + +## 7. Troubleshooting + +| Symptom | Meaning | Action | +|---------|---------|--------| +| `ERR_EXPORT_BASE_MISSING` | Base export not available | Republish base bundle or rebuild as full export. | +| Delta applies but mirror misses entries | Deltas applied out of order | Rebuild from last full bundle and reapply in sequence. | +| Decryption fails | Recipient key mismatch or corrupted bundle | Confirm key distribution and re-download bundle. | +| Verification errors | Signature mismatch | Do not import; regenerate bundle and investigate signing pipeline. | +| Manifest hash mismatch | Files changed after extraction | Re-extract bundle and re-run verification; check storage tampering. | + +## 8. References + +- [Export Center Overview](overview.md) +- [Export Center Architecture](architecture.md) +- [Export Center API reference](api.md) +- [Export Center CLI Guide](cli.md) +- [Concelier mirror runbook](../ops/concelier-mirror-operations.md) +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/export-center/overview.md b/docs/export-center/overview.md index 94675057..3e48412b 100644 --- a/docs/export-center/overview.md +++ b/docs/export-center/overview.md @@ -1,63 +1,63 @@ -# Export Center Overview - -The Export Center packages StellaOps evidence and policy outputs into portable, verifiable bundles. It provides one workflow for operators to deliver advisories, SBOMs, VEX statements, and policy decisions into downstream systems or air-gapped environments without rewriting data or violating the Aggregation-Only Contract (AOC). - -## What the Export Center delivers -- **Unified export service.** A dedicated `exporter` service coordinates profiles, runs, signing, and distribution targets with deterministic manifests. -- **Profile catalogue.** Out of the box variants include `json:raw`, `json:policy`, `trivy:db`, `trivy:java-db`, `mirror:full`, and `mirror:delta`, each aligned with AOC rules and downstream compatibility requirements. -- **Surface parity.** Operators can create, monitor, and download exports through the Web API gateway, Console workflows, and the CLI (`stella export ...`). All surfaces enforce tenant scope and RBAC consistently. -- **Automation hooks.** One-off, cron, and event triggers are orchestrated via the Scheduler/Orchestrator integration. Export telemetry (durations, bundle size, verification outcomes) feeds structured logs, metrics, and optional OpenTelemetry traces. - -### Profile variants at a glance - -| Profile | Contents | Primary scenarios | Distribution defaults | -|---------|----------|-------------------|-----------------------| -| `json:raw` | Canonical advisories, VEX, SBOM JSONL with hashes | Downstream analytics, evidence escrow | HTTP download, object storage | -| `json:policy` | `json:raw` plus policy snapshot, evaluated findings | Policy attestation, audit packages | HTTP download, object storage | -| `trivy:db` / `trivy:java-db` | Trivy-compatible vulnerability databases | Feed external scanners and CI | OCI artifact push, download | -| `mirror:full` | Complete evidence, indexes, policy, provenance | Air-gap mirror, disaster recovery | Filesystem bundle, OCI artifact | -| `mirror:delta` | Changes relative to prior manifest | Incremental updates to mirrors | Filesystem bundle, OCI artifact | - -## How it works end-to-end -1. **Profile & scope resolution.** A profile defines export type, content filters, and bundle settings. Scope selectors target tenants, artifacts, time windows, ecosystems, or SBOM subjects. -2. **Ledger collection.** Workers stream canonical data from Findings Ledger, VEX Lens, Conseiller feeds, and SBOM service. Policy exports pin a deterministic policy snapshot from Policy Engine. -3. **Adapter execution.** JSON adapters produce normalized `.jsonl.zst` outputs, Trivy adapters translate to the Trivy DB schema, and mirror adapters build filesystem or OCI bundle layouts. -4. **Manifesting & provenance.** Every run emits `export.json` (profile, filters, counts, checksums) and `provenance.json` (source artifacts, policy snapshot ids, signature references). -5. **Signing & distribution.** Bundles are signed via configured KMS (cosign-compatible) and distributed through HTTP streaming, OCI registry pushes, or object storage staging. - -Refer to `docs/export-center/architecture.md` (Sprint 35 task) for component diagrams and adapter internals once published. - -## Security and compliance guardrails -- **AOC alignment.** Exports bundle raw evidence and optional policy evaluations without mutating source content. Policy overlays remain attributed to Policy Engine and are clearly partitioned. -- **Tenant isolation.** All queries, manifests, and bundle paths carry tenant identifiers. Cross-tenant exports require explicit signed approval and ship with provenance trails. -- **Signing and encryption.** Manifests and payloads are signed using the platform KMS. Mirror profiles support optional in-bundle encryption (age/AES-GCM) with key wrapping. -- **Determinism.** Identical inputs yield identical bundles. Timestamps serialize in UTC ISO-8601; manifests include content hashes for audit replay. - -See `docs/security/policy-governance.md` and `docs/ingestion/aggregation-only-contract.md` for broader guardrail context. - -## Operating it offline -- **Offline Kit integration.** Air-gapped deployments receive pre-built export profiles and object storage layout templates through the Offline Kit bundles. -- **Mirror bundles.** `mirror:full` packages raw evidence, normalized indexes, policy snapshots, and provenance in a portable filesystem layout suitable for disconnected environments. `mirror:delta` tracks changes relative to a prior export manifest. -- **No unsanctioned egress.** The exporter respects the platform allowlist. External calls (e.g., OCI pushes) require explicit configuration and are disabled by default for offline installs. - -Consult `docs/24_OFFLINE_KIT.md` for Offline Kit delivery and `docs/ops/concelier-mirror-operations.md` for mirror ingestion procedures. - -## Getting started -1. **Choose a profile.** Map requirements to the profile table above. Policy-aware exports need a published policy snapshot. -2. **Define selectors.** Decide on tenants, products, SBOM subjects, or time windows to include. Default selectors export the entire tenant scope. -3. **Run via preferred surface.** - - **Console:** Navigate to the Export Center view, create a run, monitor progress, and download artifacts. - - **CLI:** Use `stella export run --profile --selector ` to submit a job, then `stella export download`. - - **API:** POST to `/api/export/runs` with profile id and scope payload; stream results from `/api/export/runs/{id}/download`. -4. **Verify bundles.** Use the attached provenance manifest and cosign signature to validate contents before distributing downstream. - -Refer to `docs/export-center/cli.md` for detailed command syntax and automation examples. - -## Observability & troubleshooting -- Structured logs emit lifecycle events (`fetch`, `adapter`, `sign`, `publish`) with correlation IDs for parallel job tracing. -- Metrics `exporter_run_duration_seconds`, `exporter_bundle_bytes_total`, and `exporter_run_failures_total` feed Grafana dashboards defined in the deployment runbooks. -- Verification failures or schema mismatches bubble up through failure events and appear in Console/CLI with actionable error messages. Inspect the run's audit log and `provenance.json` for root cause. - -See `docs/observability/policy.md` and `docs/ops/deployment-upgrade-runbook.md` for telemetry and operations guidance. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Export Center Overview + +The Export Center packages StellaOps evidence and policy outputs into portable, verifiable bundles. It provides one workflow for operators to deliver advisories, SBOMs, VEX statements, and policy decisions into downstream systems or air-gapped environments without rewriting data or violating the Aggregation-Only Contract (AOC). + +## What the Export Center delivers +- **Unified export service.** A dedicated `exporter` service coordinates profiles, runs, signing, and distribution targets with deterministic manifests. +- **Profile catalogue.** Out of the box variants include `json:raw`, `json:policy`, `trivy:db`, `trivy:java-db`, `mirror:full`, and `mirror:delta`, each aligned with AOC rules and downstream compatibility requirements. +- **Surface parity.** Operators can create, monitor, and download exports through the Web API gateway, Console workflows, and the CLI (`stella export ...`). All surfaces enforce tenant scope and RBAC consistently. +- **Automation hooks.** One-off, cron, and event triggers are orchestrated via the Scheduler/Orchestrator integration. Export telemetry (durations, bundle size, verification outcomes) feeds structured logs, metrics, and optional OpenTelemetry traces. + +### Profile variants at a glance + +| Profile | Contents | Primary scenarios | Distribution defaults | +|---------|----------|-------------------|-----------------------| +| `json:raw` | Canonical advisories, VEX, SBOM JSONL with hashes | Downstream analytics, evidence escrow | HTTP download, object storage | +| `json:policy` | `json:raw` plus policy snapshot, evaluated findings | Policy attestation, audit packages | HTTP download, object storage | +| `trivy:db` / `trivy:java-db` | Trivy-compatible vulnerability databases | Feed external scanners and CI | OCI artifact push, download | +| `mirror:full` | Complete evidence, indexes, policy, provenance | Air-gap mirror, disaster recovery | Filesystem bundle, OCI artifact | +| `mirror:delta` | Changes relative to prior manifest | Incremental updates to mirrors | Filesystem bundle, OCI artifact | + +## How it works end-to-end +1. **Profile & scope resolution.** A profile defines export type, content filters, and bundle settings. Scope selectors target tenants, artifacts, time windows, ecosystems, or SBOM subjects. +2. **Ledger collection.** Workers stream canonical data from Findings Ledger, VEX Lens, Conseiller feeds, and SBOM service. Policy exports pin a deterministic policy snapshot from Policy Engine. +3. **Adapter execution.** JSON adapters produce normalized `.jsonl.zst` outputs, Trivy adapters translate to the Trivy DB schema, and mirror adapters build filesystem or OCI bundle layouts. +4. **Manifesting & provenance.** Every run emits `export.json` (profile, filters, counts, checksums) and `provenance.json` (source artifacts, policy snapshot ids, signature references). +5. **Signing & distribution.** Bundles are signed via configured KMS (cosign-compatible) and distributed through HTTP streaming, OCI registry pushes, or object storage staging. + +Refer to `docs/export-center/architecture.md` (Sprint 35 task) for component diagrams and adapter internals once published. + +## Security and compliance guardrails +- **AOC alignment.** Exports bundle raw evidence and optional policy evaluations without mutating source content. Policy overlays remain attributed to Policy Engine and are clearly partitioned. +- **Tenant isolation.** All queries, manifests, and bundle paths carry tenant identifiers. Cross-tenant exports require explicit signed approval and ship with provenance trails. +- **Signing and encryption.** Manifests and payloads are signed using the platform KMS. Mirror profiles support optional in-bundle encryption (age/AES-GCM) with key wrapping. +- **Determinism.** Identical inputs yield identical bundles. Timestamps serialize in UTC ISO-8601; manifests include content hashes for audit replay. + +See `docs/security/policy-governance.md` and `docs/ingestion/aggregation-only-contract.md` for broader guardrail context. + +## Operating it offline +- **Offline Kit integration.** Air-gapped deployments receive pre-built export profiles and object storage layout templates through the Offline Kit bundles. +- **Mirror bundles.** `mirror:full` packages raw evidence, normalized indexes, policy snapshots, and provenance in a portable filesystem layout suitable for disconnected environments. `mirror:delta` tracks changes relative to a prior export manifest. +- **No unsanctioned egress.** The exporter respects the platform allowlist. External calls (e.g., OCI pushes) require explicit configuration and are disabled by default for offline installs. + +Consult `docs/24_OFFLINE_KIT.md` for Offline Kit delivery and `docs/ops/concelier-mirror-operations.md` for mirror ingestion procedures. + +## Getting started +1. **Choose a profile.** Map requirements to the profile table above. Policy-aware exports need a published policy snapshot. +2. **Define selectors.** Decide on tenants, products, SBOM subjects, or time windows to include. Default selectors export the entire tenant scope. +3. **Run via preferred surface.** + - **Console:** Navigate to the Export Center view, create a run, monitor progress, and download artifacts. + - **CLI:** Use `stella export run --profile --selector ` to submit a job, then `stella export download`. + - **API:** POST to `/api/export/runs` with profile id and scope payload; stream results from `/api/export/runs/{id}/download`. +4. **Verify bundles.** Use the attached provenance manifest and cosign signature to validate contents before distributing downstream. + +Refer to `docs/export-center/cli.md` for detailed command syntax and automation examples. + +## Observability & troubleshooting +- Structured logs emit lifecycle events (`fetch`, `adapter`, `sign`, `publish`) with correlation IDs for parallel job tracing. +- Metrics `exporter_run_duration_seconds`, `exporter_bundle_bytes_total`, and `exporter_run_failures_total` feed Grafana dashboards defined in the deployment runbooks. +- Verification failures or schema mismatches bubble up through failure events and appear in Console/CLI with actionable error messages. Inspect the run's audit log and `provenance.json` for root cause. + +See `docs/observability/policy.md` and `docs/ops/deployment-upgrade-runbook.md` for telemetry and operations guidance. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/export-center/profiles.md b/docs/export-center/profiles.md index ffc5cc4d..4b0f91a8 100644 --- a/docs/export-center/profiles.md +++ b/docs/export-center/profiles.md @@ -1,139 +1,139 @@ -# Export Center Profiles - -Export Center profiles define what data is collected, how it is encoded, and which distribution paths are enabled for a run. Profiles are tenant-scoped and deterministic: identical selectors and source data produce identical bundles. This guide summarises built-in profiles, configuration fields, schema conventions, and compatibility notes. - -## Profile catalogue - -| Profile | Kind / Variant | Output artefacts | Primary use cases | -|---------|----------------|------------------|-------------------| -| `json:raw` | `json` / `raw` | Canonical JSONL archives of advisories, VEX, SBOMs | Evidence escrow, analytics pipelines | -| `json:policy` | `json` / `policy` | `json:raw` artefacts plus policy snapshot + evaluated findings | Audit, compliance attestations | -| `trivy:db` | `trivy` / `db` | Trivy-compatible vulnerability database | Feeding external scanners / CI | -| `trivy:java-db` | `trivy` / `java-db` | Java ecosystem supplement for Trivy | Supply Java CVE data to Trivy | -| `mirror:full` | `mirror` / `full` | Complete mirror bundle (raw, policy, indexes, provenance) | Air-gap deployments, disaster recovery | -| `mirror:delta` | `mirror` / `delta` | Incremental changes relative to a prior manifest | Efficient mirror updates | - -Profiles can be cloned and customised; configuration is immutable per revision to keep runs reproducible. - -## Common configuration fields - -| Field | Description | Applies to | Notes | -|-------|-------------|------------|-------| -| `name` | Human-readable identifier displayed in Console/CLI | All | Unique per tenant. | -| `kind` | Logical family (`json`, `trivy`, `mirror`) | All | Determines eligible adapters. | -| `variant` | Specific export flavour (see table above) | All | Controls adapter behaviour. | -| `include` | Record types to include (`advisories`, `vex`, `sboms`, `findings`) | JSON, mirror | Defaults depend on variant. | -| `policySnapshotMode` | `required`, `optional`, or `none` | JSON policy, mirror | `required` forces a policy snapshot id when creating runs. | -| `distribution` | Enabled distribution drivers (`http`, `oci`, `object`) | All | Offline installs typically disable `oci`. | -| `compression` | Compression settings (`zstd`, level) | JSON, mirror | Trivy adapters manage compression internally. | -| `encryption` | Mirror encryption options (`enabled`, `recipientKeys`, `strict`) | Mirror | When enabled, only `/data` subtree is encrypted; manifests remain plaintext. | -| `retention` | Retention policy (days or `never`) | All | Drives pruning jobs for staged bundles. | - -Selectors (time windows, tenants, products, SBOM subjects, ecosystems) are supplied per run, not stored in the profile. - -## JSON profiles - -### `json:raw` -- **Content:** Exports raw advisories, VEX statements, and SBOMs as newline-delimited JSON (`.jsonl.zst`). -- **Schema:** Follows canonical StellaOps schema with casing and timestamps normalised. Each record includes `tenant`, `source`, `linkset`, and `content` fields. -- **Options:** - - `include` defaults to `["advisories", "vex", "sboms"]`. - - `compression` defaults to `zstd` level 9. - - `maxRecordsPerFile` (optional) splits outputs for large datasets. -- **Compatibility:** Intended for analytics platforms, data escrow, or feeding downstream normalisation pipelines. -- **Sample manifest excerpt:** - -```json -{ - "profile": { "kind": "json", "variant": "raw" }, - "outputs": [ - { "type": "advisories.jsonl.zst", "sha256": "...", "count": 15234 }, - { "type": "vex.jsonl.zst", "sha256": "...", "count": 3045 }, - { "type": "sboms.jsonl.zst", "sha256": "...", "count": 872 } - ], - "selectors": { "tenant": "acme", "products": ["registry.example/app"] } -} -``` - -### `json:policy` -- **Content:** Everything from `json:raw` plus: - - `policy_snapshot.json` (policy metadata, version, hash). - - `findings.policy.jsonl.zst` (evaluated findings with decision, rationale, rule id, inputs hash). -- **Determinism:** Requires a policy snapshot id; runs fail if snapshot is missing or non-deterministic mode is active. -- **Use cases:** Compliance exports, auditor packages, policy attestation archives. -- **Guardrails:** AOC boundaries preserved: policy outputs are clearly partitioned from raw evidence. - -## Trivy profiles - -### `trivy:db` -- Detailed adapter behaviour is documented in `docs/export-center/trivy-adapter.md`. -- **Content:** Produces a Trivy DB-compatible bundle (SQLite database or tarball as required by Trivy version). -- **Mapping rules:** - - Advisory namespaces mapped to Trivy vendor IDs (e.g., `ubuntu`, `debian`, `npm`). - - Version ranges translated into Trivy's semantic version syntax. - - Severity mapped to Trivy standard (`UNKNOWN`, `LOW`, `MEDIUM`, `HIGH`, `CRITICAL`). -- **Validation:** Adapter enforces supported Trivy schema versions; configuration includes `targetSchemaVersion`. -- **Distribution:** Typically pushed to OCI or object storage for downstream scanners; Console download remains available. - -### `trivy:java-db` -- Refer to `docs/export-center/trivy-adapter.md` for ecosystem-specific notes. -- **Content:** Optional Java ecosystem supplement for Trivy (matching Trivy's separate Java DB). -- **Dependencies:** Requires Java advisories in Findings Ledger; run fails with `ERR_EXPORT_EMPTY` if no Java data present and `allowEmpty=false`. -- **Compatibility:** Intended for organisations using Trivy's Java plugin or hardened pipelines that split general and Java feeds. - -## Mirror profiles - -### `mirror:full` -- Bundle structure and delta strategy are covered in `docs/export-center/mirror-bundles.md`. -- **Content:** Complete export with: - - Raw advisories, VEX, SBOMs (`/data/raw`). - - Policy overlays (`/data/policy`), including evaluated findings and policy snapshots. - - Indexes for fast lookup (`/indexes/advisories.pb`, `/indexes/sboms.pb`). - - Manifests and provenance (`/manifests/export.json`, `/manifests/provenance.json`). -- **Layout:** Deterministic directory structure with hashed filenames to reduce duplication. -- **Encryption:** Optional `encryption` block enables age/AES-GCM encryption of `/data`. `strict=true` encrypts everything except `export.json`. -- **Use cases:** Air-gap replication, disaster recovery drills, Offline Kit seeding. - -### `mirror:delta` -- See `docs/export-center/mirror-bundles.md` for delta mechanics and application order. -- **Content:** Includes only changes relative to a base manifest (specified by `baseExportId` when running). - - `changed`, `added`, `removed` lists in `manifests/delta.json`. - - Incremental indexes capturing only updated subjects. -- **Constraints:** Requires the base manifest to exist in object storage or artifact registry accessible to the worker. Fails with `ERR_EXPORT_BASE_MISSING` otherwise. -- **Workflow:** Ideal for frequent updates to mirrored environments with limited bandwidth. - -## Compatibility and guardrails -- **Aggregation-Only Contract:** All profiles respect AOC boundaries: raw evidence is never mutated. Policy outputs are appended separately with clear provenance. -- **Tenant scoping:** Profiles are tenant-specific. Cross-tenant exports require explicit administrative approval and signed justification. -- **Retriable runs:** Re-running a profile with identical selectors yields matching manifests and hashes, facilitating verify-on-download workflows. -- **Offline operation:** JSON and mirror profiles function in offline mode without additional configuration. Trivy profiles require pre-seeded schema metadata shipped via Offline Kit. -- **Quota integration:** Profiles can define run quotas (per tenant per day). Quota exhaustion surfaces as `429 Too Many Requests` with `X-Stella-Quota-*` hints. - -## Example profile definition (CLI) - -```jsonc -{ - "name": "daily-json-raw", - "kind": "json", - "variant": "raw", - "include": ["advisories", "vex", "sboms"], - "distribution": ["http", "object"], - "compression": { "codec": "zstd", "level": 9 }, - "retention": { "mode": "days", "value": 14 } -} -``` - -Create via `stella export profile create --file profile.json` (CLI command documented separately). - -## Verification workflow -- Download bundle via Console/CLI and extract `export.json` and `provenance.json`. -- Run `cosign verify --key export.json` (for detatched signatures use `--signature export.json.sig`). -- Validate Trivy bundles with `trivy --cache-dir --debug --db-repository ` or local `trivy module db import`. -- For mirror bundles, run internal `mirror verify` script (bundled in Offline Kit) to ensure directory layout and digests match manifest. - -## Extending profiles -- Use API/CLI to clone an existing profile and adjust `include`, `distribution`, or retention policies. -- Adapter plug-ins can introduce new variants (e.g., `json:raw-lite`, `mirror:policy-only`). Plug-ins must be registered at service restart and documented under `/docs/export-center/profiles.md`. -- Any new profile must append the imposed rule line and follow determinism and guardrail requirements. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Export Center Profiles + +Export Center profiles define what data is collected, how it is encoded, and which distribution paths are enabled for a run. Profiles are tenant-scoped and deterministic: identical selectors and source data produce identical bundles. This guide summarises built-in profiles, configuration fields, schema conventions, and compatibility notes. + +## Profile catalogue + +| Profile | Kind / Variant | Output artefacts | Primary use cases | +|---------|----------------|------------------|-------------------| +| `json:raw` | `json` / `raw` | Canonical JSONL archives of advisories, VEX, SBOMs | Evidence escrow, analytics pipelines | +| `json:policy` | `json` / `policy` | `json:raw` artefacts plus policy snapshot + evaluated findings | Audit, compliance attestations | +| `trivy:db` | `trivy` / `db` | Trivy-compatible vulnerability database | Feeding external scanners / CI | +| `trivy:java-db` | `trivy` / `java-db` | Java ecosystem supplement for Trivy | Supply Java CVE data to Trivy | +| `mirror:full` | `mirror` / `full` | Complete mirror bundle (raw, policy, indexes, provenance) | Air-gap deployments, disaster recovery | +| `mirror:delta` | `mirror` / `delta` | Incremental changes relative to a prior manifest | Efficient mirror updates | + +Profiles can be cloned and customised; configuration is immutable per revision to keep runs reproducible. + +## Common configuration fields + +| Field | Description | Applies to | Notes | +|-------|-------------|------------|-------| +| `name` | Human-readable identifier displayed in Console/CLI | All | Unique per tenant. | +| `kind` | Logical family (`json`, `trivy`, `mirror`) | All | Determines eligible adapters. | +| `variant` | Specific export flavour (see table above) | All | Controls adapter behaviour. | +| `include` | Record types to include (`advisories`, `vex`, `sboms`, `findings`) | JSON, mirror | Defaults depend on variant. | +| `policySnapshotMode` | `required`, `optional`, or `none` | JSON policy, mirror | `required` forces a policy snapshot id when creating runs. | +| `distribution` | Enabled distribution drivers (`http`, `oci`, `object`) | All | Offline installs typically disable `oci`. | +| `compression` | Compression settings (`zstd`, level) | JSON, mirror | Trivy adapters manage compression internally. | +| `encryption` | Mirror encryption options (`enabled`, `recipientKeys`, `strict`) | Mirror | When enabled, only `/data` subtree is encrypted; manifests remain plaintext. | +| `retention` | Retention policy (days or `never`) | All | Drives pruning jobs for staged bundles. | + +Selectors (time windows, tenants, products, SBOM subjects, ecosystems) are supplied per run, not stored in the profile. + +## JSON profiles + +### `json:raw` +- **Content:** Exports raw advisories, VEX statements, and SBOMs as newline-delimited JSON (`.jsonl.zst`). +- **Schema:** Follows canonical StellaOps schema with casing and timestamps normalised. Each record includes `tenant`, `source`, `linkset`, and `content` fields. +- **Options:** + - `include` defaults to `["advisories", "vex", "sboms"]`. + - `compression` defaults to `zstd` level 9. + - `maxRecordsPerFile` (optional) splits outputs for large datasets. +- **Compatibility:** Intended for analytics platforms, data escrow, or feeding downstream normalisation pipelines. +- **Sample manifest excerpt:** + +```json +{ + "profile": { "kind": "json", "variant": "raw" }, + "outputs": [ + { "type": "advisories.jsonl.zst", "sha256": "...", "count": 15234 }, + { "type": "vex.jsonl.zst", "sha256": "...", "count": 3045 }, + { "type": "sboms.jsonl.zst", "sha256": "...", "count": 872 } + ], + "selectors": { "tenant": "acme", "products": ["registry.example/app"] } +} +``` + +### `json:policy` +- **Content:** Everything from `json:raw` plus: + - `policy_snapshot.json` (policy metadata, version, hash). + - `findings.policy.jsonl.zst` (evaluated findings with decision, rationale, rule id, inputs hash). +- **Determinism:** Requires a policy snapshot id; runs fail if snapshot is missing or non-deterministic mode is active. +- **Use cases:** Compliance exports, auditor packages, policy attestation archives. +- **Guardrails:** AOC boundaries preserved: policy outputs are clearly partitioned from raw evidence. + +## Trivy profiles + +### `trivy:db` +- Detailed adapter behaviour is documented in `docs/export-center/trivy-adapter.md`. +- **Content:** Produces a Trivy DB-compatible bundle (SQLite database or tarball as required by Trivy version). +- **Mapping rules:** + - Advisory namespaces mapped to Trivy vendor IDs (e.g., `ubuntu`, `debian`, `npm`). + - Version ranges translated into Trivy's semantic version syntax. + - Severity mapped to Trivy standard (`UNKNOWN`, `LOW`, `MEDIUM`, `HIGH`, `CRITICAL`). +- **Validation:** Adapter enforces supported Trivy schema versions; configuration includes `targetSchemaVersion`. +- **Distribution:** Typically pushed to OCI or object storage for downstream scanners; Console download remains available. + +### `trivy:java-db` +- Refer to `docs/export-center/trivy-adapter.md` for ecosystem-specific notes. +- **Content:** Optional Java ecosystem supplement for Trivy (matching Trivy's separate Java DB). +- **Dependencies:** Requires Java advisories in Findings Ledger; run fails with `ERR_EXPORT_EMPTY` if no Java data present and `allowEmpty=false`. +- **Compatibility:** Intended for organisations using Trivy's Java plugin or hardened pipelines that split general and Java feeds. + +## Mirror profiles + +### `mirror:full` +- Bundle structure and delta strategy are covered in `docs/export-center/mirror-bundles.md`. +- **Content:** Complete export with: + - Raw advisories, VEX, SBOMs (`/data/raw`). + - Policy overlays (`/data/policy`), including evaluated findings and policy snapshots. + - Indexes for fast lookup (`/indexes/advisories.pb`, `/indexes/sboms.pb`). + - Manifests and provenance (`/manifests/export.json`, `/manifests/provenance.json`). +- **Layout:** Deterministic directory structure with hashed filenames to reduce duplication. +- **Encryption:** Optional `encryption` block enables age/AES-GCM encryption of `/data`. `strict=true` encrypts everything except `export.json`. +- **Use cases:** Air-gap replication, disaster recovery drills, Offline Kit seeding. + +### `mirror:delta` +- See `docs/export-center/mirror-bundles.md` for delta mechanics and application order. +- **Content:** Includes only changes relative to a base manifest (specified by `baseExportId` when running). + - `changed`, `added`, `removed` lists in `manifests/delta.json`. + - Incremental indexes capturing only updated subjects. +- **Constraints:** Requires the base manifest to exist in object storage or artifact registry accessible to the worker. Fails with `ERR_EXPORT_BASE_MISSING` otherwise. +- **Workflow:** Ideal for frequent updates to mirrored environments with limited bandwidth. + +## Compatibility and guardrails +- **Aggregation-Only Contract:** All profiles respect AOC boundaries: raw evidence is never mutated. Policy outputs are appended separately with clear provenance. +- **Tenant scoping:** Profiles are tenant-specific. Cross-tenant exports require explicit administrative approval and signed justification. +- **Retriable runs:** Re-running a profile with identical selectors yields matching manifests and hashes, facilitating verify-on-download workflows. +- **Offline operation:** JSON and mirror profiles function in offline mode without additional configuration. Trivy profiles require pre-seeded schema metadata shipped via Offline Kit. +- **Quota integration:** Profiles can define run quotas (per tenant per day). Quota exhaustion surfaces as `429 Too Many Requests` with `X-Stella-Quota-*` hints. + +## Example profile definition (CLI) + +```jsonc +{ + "name": "daily-json-raw", + "kind": "json", + "variant": "raw", + "include": ["advisories", "vex", "sboms"], + "distribution": ["http", "object"], + "compression": { "codec": "zstd", "level": 9 }, + "retention": { "mode": "days", "value": 14 } +} +``` + +Create via `stella export profile create --file profile.json` (CLI command documented separately). + +## Verification workflow +- Download bundle via Console/CLI and extract `export.json` and `provenance.json`. +- Run `cosign verify --key export.json` (for detatched signatures use `--signature export.json.sig`). +- Validate Trivy bundles with `trivy --cache-dir --debug --db-repository ` or local `trivy module db import`. +- For mirror bundles, run internal `mirror verify` script (bundled in Offline Kit) to ensure directory layout and digests match manifest. + +## Extending profiles +- Use API/CLI to clone an existing profile and adjust `include`, `distribution`, or retention policies. +- Adapter plug-ins can introduce new variants (e.g., `json:raw-lite`, `mirror:policy-only`). Plug-ins must be registered at service restart and documented under `/docs/export-center/profiles.md`. +- Any new profile must append the imposed rule line and follow determinism and guardrail requirements. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/export-center/provenance-and-signing.md b/docs/export-center/provenance-and-signing.md index 98f06b91..622694e5 100644 --- a/docs/export-center/provenance-and-signing.md +++ b/docs/export-center/provenance-and-signing.md @@ -1,150 +1,150 @@ -# Export Center Provenance & Signing - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -Export Center runs emit deterministic manifests, provenance records, and signatures so operators can prove bundle integrity end-to-end—whether the artefact is downloaded over HTTPS, pulled as an OCI object, or staged through the Offline Kit. This guide captures the canonical artefacts, signing pipeline, verification workflows, and failure handling expectations that backlogs `EXPORT-SVC-35-005` and `EXPORT-SVC-37-002` implement. - ---- - -## 1. Goals & scope - -- **Authenticity.** Every export manifest and provenance document is signed using Authority-managed KMS keys (cosign-compatible) with optional SLSA Level 3 attestation. -- **Traceability.** Provenance links each bundle to the inputs that produced it: tenant, findings ledger queries, policy snapshots, SBOM identifiers, adapter versions, and encryption recipients. -- **Determinism.** Canonical JSON (sorted keys, RFC 3339 UTC timestamps, normalized numbers) guarantees byte-for-byte stability across reruns with identical input. -- **Portability.** Signatures and attestations travel with filesystem bundles, OCI artefacts, and Offline Kit staging trees. Verification does not require online Authority access when the bundle includes the cosign public key. - ---- - -## 2. Artefact inventory - -| File | Location | Description | Notes | -|------|----------|-------------|-------| -| `export.json` | `manifests/export.json` or HTTP `GET /api/export/runs/{id}/manifest` | Canonical manifest describing profile, selectors, counts, SHA-256 digests, compression hints, distribution targets. | Hash of this file is included in provenance `subjects[]`. | -| `provenance.json` | `manifests/provenance.json` or `GET /api/export/runs/{id}/provenance` | In-toto provenance record listing subjects, materials, toolchain metadata, encryption recipients, and KMS key identifiers. | Mirrors SLSA Level 2 schema; optionally upgraded to Level 3 with builder attestations. | -| `export.json.sig` / `export.json.dsse` | `signatures/export.json.sig` | Cosign signature (and optional DSSE envelope) for manifest. | File naming matches cosign defaults; offline verification scripts expect `.sig`. | -| `provenance.json.sig` / `provenance.json.dsse` | `signatures/provenance.json.sig` | Cosign signature (and optional DSSE envelope) for provenance document. | `dsse` present when SLSA Level 3 is enabled. | -| `bundle.attestation` | `signatures/bundle.attestation` (optional) | SLSA Level 2/3 attestation binding bundle tarball/OCI digest to the run. | Only produced when `export.attestation.enabled=true`. | -| `manifest.yaml` | bundle root | Human-readable summary including digests, sizes, encryption metadata, and verification hints. | Unsigned but redundant; signatures cover the JSON manifests. | - -All digests use lowercase hex SHA-256 (`sha256:`). When bundle encryption is enabled, `provenance.json` records wrapped data keys and recipient fingerprints under `encryption.recipients[]`. - ---- - -## 3. Signing pipeline - -1. **Canonicalisation.** Export worker serialises `export.json` and `provenance.json` using `NotifyCanonicalJsonSerializer` (identical canonical JSON helpers shared across services). Keys are sorted lexicographically, arrays ordered deterministically, timestamps normalised to UTC. -2. **Digest creation.** SHA-256 digests are computed and recorded: - - `manifest_hash` and `provenance_hash` stored in the run metadata (Mongo) and exported via `/api/export/runs/{id}`. - - Provenance `subjects[]` contains both manifest hash and bundle/archive hash. -3. **Key retrieval.** Worker obtains a short-lived signing token from Authority’s KMS client using tenant-scoped credentials (`export.sign` scope). Keys live in Authority or tenant-specific HSMs depending on deployment. -4. **Signature emission.** Cosign generates detached signatures (`*.sig`). If DSSE is enabled, cosign wraps payload bytes in a DSSE envelope (`*.dsse`). Attestations follow the SLSA Level 2 provenance template; Level 3 requires builder metadata (`EXPORT-SVC-37-002` optional feature flag). -5. **Storage & distribution.** Signatures and attestations are written alongside manifests in object storage, included in filesystem bundles, and attached as OCI artefact layers/annotations. -6. **Audit trail.** Run metadata captures signer identity (`signing_key_id`), cosign certificate serial, signature timestamps, and verification hints. Console/CLI surface these details for downstream automation. - -> **Key management.** Secrets and key references are configured per tenant via `export.signing`, pointing to Authority clients or external HSM aliases. Offline deployments pre-load cosign public keys into the bundle (`signatures/pubkeys/{tenant}.pem`). - ---- - -## 4. Provenance schema highlights - -`provenance.json` follows the SLSA provenance (`https://slsa.dev/provenance/v1`) structure with StellaOps-specific extensions. Key fields: - -| Path | Description | -|------|-------------| -| `subject[]` | Array of `{name,digest}` pairs. Includes bundle tarball/OCI digest and `export.json` digest. | -| `predicateType` | SLSA v1 (default). | -| `predicate.builder` | `{id:"stellaops/export-center@"}` identifies the worker instance/cluster. | -| `predicate.buildType` | Profile identifier (`mirror:full`, `mirror:delta`, etc.). | -| `predicate.invocation.parameters` | Profile selectors, retention flags, encryption mode, base export references. | -| `predicate.materials[]` | Source artefacts with digests: findings ledger query snapshots, policy snapshot IDs + hashes, SBOM identifiers, adapter release digests. | -| `predicate.metadata.buildFinishedOn` | RFC 3339 timestamp when signing completed. | -| `predicate.metadata.reproducible` | Always `true`—workers guarantee determinism. | -| `predicate.environment.encryption` | Records encryption recipients, wrapped keys, algorithm (`age` or `aes-gcm`). | -| `predicate.environment.kms` | Signing key identifier (`authority://tenant/export-signing-key`) and certificate chain fingerprints. | - -Sample (abridged): - -```json -{ - "subject": [ - { "name": "bundle.tar.zst", "digest": { "sha256": "c1fe..." } }, - { "name": "manifests/export.json", "digest": { "sha256": "ad42..." } } - ], - "predicate": { - "buildType": "mirror:delta", - "invocation": { - "parameters": { - "tenant": "tenant-01", - "baseExportId": "run-20251020-01", - "selectors": { "sources": ["concelier","vexer"], "profiles": ["mirror"] } - } - }, - "materials": [ - { "uri": "ledger://tenant-01/findings?cursor=rev-42", "digest": { "sha256": "0f9a..." } }, - { "uri": "policy://tenant-01/snapshots/rev-17", "digest": { "sha256": "8c3d..." } } - ], - "environment": { - "encryption": { - "mode": "age", - "recipients": [ - { "recipient": "age1qxyz...", "wrappedKey": "BASE64...", "keyId": "tenant-01/notify-age" } - ] - }, - "kms": { - "signingKeyId": "authority://tenant-01/export-signing", - "certificateChainSha256": "1f5e..." - } - } - } -} -``` - ---- - -## 5. Verification workflows - -| Scenario | Steps | -|----------|-------| -| **CLI verification** | 1. `stella export manifest --output manifests/export.json --signature manifests/export.json.sig`
2. `stella export provenance --output manifests/provenance.json --signature manifests/provenance.json.sig`
3. `cosign verify-blob --key pubkeys/tenant.pem --signature manifests/export.json.sig manifests/export.json`
4. `cosign verify-blob --key pubkeys/tenant.pem --signature manifests/provenance.json.sig manifests/provenance.json` | -| **Bundle verification (offline)** | 1. Extract bundle (or mount OCI artefact).
2. Validate manifest/provenance signatures using bundled public key.
3. Recompute SHA-256 for `data/` files and compare with entries in `export.json`.
4. If encrypted, decrypt with Age/AES-GCM recipient key, then re-run digest comparisons on decrypted content. | -| **CI pipeline** | Use `stella export verify --manifest manifests/export.json --provenance manifests/provenance.json --signature manifests/export.json.sig --signature manifests/provenance.json.sig` (task `CLI-EXPORT-37-001`). Failure exits non-zero with reason codes (`ERR_EXPORT_SIG_INVALID`, `ERR_EXPORT_DIGEST_MISMATCH`). | -| **Console download** | Console automatically verifies signatures before exposing the bundle; failure surfaces an actionable error referencing the export run ID and required remediation. | - -Verification guidance (docs/cli/cli-reference.md §export) cross-links here; keep both docs in sync when CLI behaviour changes. - ---- - -## 6. Distribution considerations - -- **HTTP headers.** `X-Export-Digest` includes bundle digest; `X-Export-Provenance` references `provenance.json` URL; `X-Export-Signature` references `.sig`. Clients use these hints to short-circuit re-downloads. -- **OCI annotations.** `org.opencontainers.image.ref.name`, `io.stellaops.export.manifest-digest`, and `io.stellaops.export.provenance-ref` allow registry tooling to locate manifests/signatures quickly. -- **Offline Kit staging.** Offline kit assembler copies `manifests/`, `signatures/`, and `pubkeys/` verbatim. Verification scripts (`offline-kits/bin/verify-export.sh`) wrap the cosign commands described above. - ---- - -## 7. Failure handling & observability - -- Runs surface signature status via `/api/export/runs/{id}` (`signing.status`, `signing.lastError`). Common errors include `ERR_EXPORT_KMS_UNAVAILABLE`, `ERR_EXPORT_ATTESTATION_FAILED`, `ERR_EXPORT_CANONICALIZE`. -- Metrics: `exporter_sign_duration_seconds`, `exporter_sign_failures_total{error_code}`, `exporter_provenance_verify_failures_total`. -- Logs: `phase=sign`, `error_code`, `signing_key_id`, `cosign_certificate_sn`. -- Alerts: DevOps dashboards (task `DEVOPS-EXPORT-37-001`) trigger on consecutive signing failures or verification failures >0. - -When verification fails downstream, operators should: -1. Confirm signatures using the known-good key. -2. Inspect `provenance.json` materials; rerun the source queries to ensure matching digests. -3. Review run audit logs and retry export with `--resume` to regenerate manifests. - ---- - -## 8. Compliance checklist - -- [ ] Manifests and provenance documents generated with canonical JSON, deterministic digests, and signatures. -- [ ] Cosign public keys published per tenant, rotated through Authority, and distributed to Offline Kit consumers. -- [ ] SLSA attestations enabled where supply-chain requirements demand Level 3 evidence. -- [ ] CLI/Console verification paths documented and tested (CI pipelines exercise `stella export verify`). -- [ ] Encryption metadata (recipients, wrapped keys) recorded in provenance and validated during verification. -- [ ] Run audit logs capture signature timestamps, signer identity, and failure reasons. - ---- - -> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Export Center Provenance & Signing + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +Export Center runs emit deterministic manifests, provenance records, and signatures so operators can prove bundle integrity end-to-end—whether the artefact is downloaded over HTTPS, pulled as an OCI object, or staged through the Offline Kit. This guide captures the canonical artefacts, signing pipeline, verification workflows, and failure handling expectations that backlogs `EXPORT-SVC-35-005` and `EXPORT-SVC-37-002` implement. + +--- + +## 1. Goals & scope + +- **Authenticity.** Every export manifest and provenance document is signed using Authority-managed KMS keys (cosign-compatible) with optional SLSA Level 3 attestation. +- **Traceability.** Provenance links each bundle to the inputs that produced it: tenant, findings ledger queries, policy snapshots, SBOM identifiers, adapter versions, and encryption recipients. +- **Determinism.** Canonical JSON (sorted keys, RFC 3339 UTC timestamps, normalized numbers) guarantees byte-for-byte stability across reruns with identical input. +- **Portability.** Signatures and attestations travel with filesystem bundles, OCI artefacts, and Offline Kit staging trees. Verification does not require online Authority access when the bundle includes the cosign public key. + +--- + +## 2. Artefact inventory + +| File | Location | Description | Notes | +|------|----------|-------------|-------| +| `export.json` | `manifests/export.json` or HTTP `GET /api/export/runs/{id}/manifest` | Canonical manifest describing profile, selectors, counts, SHA-256 digests, compression hints, distribution targets. | Hash of this file is included in provenance `subjects[]`. | +| `provenance.json` | `manifests/provenance.json` or `GET /api/export/runs/{id}/provenance` | In-toto provenance record listing subjects, materials, toolchain metadata, encryption recipients, and KMS key identifiers. | Mirrors SLSA Level 2 schema; optionally upgraded to Level 3 with builder attestations. | +| `export.json.sig` / `export.json.dsse` | `signatures/export.json.sig` | Cosign signature (and optional DSSE envelope) for manifest. | File naming matches cosign defaults; offline verification scripts expect `.sig`. | +| `provenance.json.sig` / `provenance.json.dsse` | `signatures/provenance.json.sig` | Cosign signature (and optional DSSE envelope) for provenance document. | `dsse` present when SLSA Level 3 is enabled. | +| `bundle.attestation` | `signatures/bundle.attestation` (optional) | SLSA Level 2/3 attestation binding bundle tarball/OCI digest to the run. | Only produced when `export.attestation.enabled=true`. | +| `manifest.yaml` | bundle root | Human-readable summary including digests, sizes, encryption metadata, and verification hints. | Unsigned but redundant; signatures cover the JSON manifests. | + +All digests use lowercase hex SHA-256 (`sha256:`). When bundle encryption is enabled, `provenance.json` records wrapped data keys and recipient fingerprints under `encryption.recipients[]`. + +--- + +## 3. Signing pipeline + +1. **Canonicalisation.** Export worker serialises `export.json` and `provenance.json` using `NotifyCanonicalJsonSerializer` (identical canonical JSON helpers shared across services). Keys are sorted lexicographically, arrays ordered deterministically, timestamps normalised to UTC. +2. **Digest creation.** SHA-256 digests are computed and recorded: + - `manifest_hash` and `provenance_hash` stored in the run metadata (Mongo) and exported via `/api/export/runs/{id}`. + - Provenance `subjects[]` contains both manifest hash and bundle/archive hash. +3. **Key retrieval.** Worker obtains a short-lived signing token from Authority’s KMS client using tenant-scoped credentials (`export.sign` scope). Keys live in Authority or tenant-specific HSMs depending on deployment. +4. **Signature emission.** Cosign generates detached signatures (`*.sig`). If DSSE is enabled, cosign wraps payload bytes in a DSSE envelope (`*.dsse`). Attestations follow the SLSA Level 2 provenance template; Level 3 requires builder metadata (`EXPORT-SVC-37-002` optional feature flag). +5. **Storage & distribution.** Signatures and attestations are written alongside manifests in object storage, included in filesystem bundles, and attached as OCI artefact layers/annotations. +6. **Audit trail.** Run metadata captures signer identity (`signing_key_id`), cosign certificate serial, signature timestamps, and verification hints. Console/CLI surface these details for downstream automation. + +> **Key management.** Secrets and key references are configured per tenant via `export.signing`, pointing to Authority clients or external HSM aliases. Offline deployments pre-load cosign public keys into the bundle (`signatures/pubkeys/{tenant}.pem`). + +--- + +## 4. Provenance schema highlights + +`provenance.json` follows the SLSA provenance (`https://slsa.dev/provenance/v1`) structure with StellaOps-specific extensions. Key fields: + +| Path | Description | +|------|-------------| +| `subject[]` | Array of `{name,digest}` pairs. Includes bundle tarball/OCI digest and `export.json` digest. | +| `predicateType` | SLSA v1 (default). | +| `predicate.builder` | `{id:"stellaops/export-center@"}` identifies the worker instance/cluster. | +| `predicate.buildType` | Profile identifier (`mirror:full`, `mirror:delta`, etc.). | +| `predicate.invocation.parameters` | Profile selectors, retention flags, encryption mode, base export references. | +| `predicate.materials[]` | Source artefacts with digests: findings ledger query snapshots, policy snapshot IDs + hashes, SBOM identifiers, adapter release digests. | +| `predicate.metadata.buildFinishedOn` | RFC 3339 timestamp when signing completed. | +| `predicate.metadata.reproducible` | Always `true`—workers guarantee determinism. | +| `predicate.environment.encryption` | Records encryption recipients, wrapped keys, algorithm (`age` or `aes-gcm`). | +| `predicate.environment.kms` | Signing key identifier (`authority://tenant/export-signing-key`) and certificate chain fingerprints. | + +Sample (abridged): + +```json +{ + "subject": [ + { "name": "bundle.tar.zst", "digest": { "sha256": "c1fe..." } }, + { "name": "manifests/export.json", "digest": { "sha256": "ad42..." } } + ], + "predicate": { + "buildType": "mirror:delta", + "invocation": { + "parameters": { + "tenant": "tenant-01", + "baseExportId": "run-20251020-01", + "selectors": { "sources": ["concelier","vexer"], "profiles": ["mirror"] } + } + }, + "materials": [ + { "uri": "ledger://tenant-01/findings?cursor=rev-42", "digest": { "sha256": "0f9a..." } }, + { "uri": "policy://tenant-01/snapshots/rev-17", "digest": { "sha256": "8c3d..." } } + ], + "environment": { + "encryption": { + "mode": "age", + "recipients": [ + { "recipient": "age1qxyz...", "wrappedKey": "BASE64...", "keyId": "tenant-01/notify-age" } + ] + }, + "kms": { + "signingKeyId": "authority://tenant-01/export-signing", + "certificateChainSha256": "1f5e..." + } + } + } +} +``` + +--- + +## 5. Verification workflows + +| Scenario | Steps | +|----------|-------| +| **CLI verification** | 1. `stella export manifest --output manifests/export.json --signature manifests/export.json.sig`
2. `stella export provenance --output manifests/provenance.json --signature manifests/provenance.json.sig`
3. `cosign verify-blob --key pubkeys/tenant.pem --signature manifests/export.json.sig manifests/export.json`
4. `cosign verify-blob --key pubkeys/tenant.pem --signature manifests/provenance.json.sig manifests/provenance.json` | +| **Bundle verification (offline)** | 1. Extract bundle (or mount OCI artefact).
2. Validate manifest/provenance signatures using bundled public key.
3. Recompute SHA-256 for `data/` files and compare with entries in `export.json`.
4. If encrypted, decrypt with Age/AES-GCM recipient key, then re-run digest comparisons on decrypted content. | +| **CI pipeline** | Use `stella export verify --manifest manifests/export.json --provenance manifests/provenance.json --signature manifests/export.json.sig --signature manifests/provenance.json.sig` (task `CLI-EXPORT-37-001`). Failure exits non-zero with reason codes (`ERR_EXPORT_SIG_INVALID`, `ERR_EXPORT_DIGEST_MISMATCH`). | +| **Console download** | Console automatically verifies signatures before exposing the bundle; failure surfaces an actionable error referencing the export run ID and required remediation. | + +Verification guidance (docs/cli/cli-reference.md §export) cross-links here; keep both docs in sync when CLI behaviour changes. + +--- + +## 6. Distribution considerations + +- **HTTP headers.** `X-Export-Digest` includes bundle digest; `X-Export-Provenance` references `provenance.json` URL; `X-Export-Signature` references `.sig`. Clients use these hints to short-circuit re-downloads. +- **OCI annotations.** `org.opencontainers.image.ref.name`, `io.stellaops.export.manifest-digest`, and `io.stellaops.export.provenance-ref` allow registry tooling to locate manifests/signatures quickly. +- **Offline Kit staging.** Offline kit assembler copies `manifests/`, `signatures/`, and `pubkeys/` verbatim. Verification scripts (`offline-kits/bin/verify-export.sh`) wrap the cosign commands described above. + +--- + +## 7. Failure handling & observability + +- Runs surface signature status via `/api/export/runs/{id}` (`signing.status`, `signing.lastError`). Common errors include `ERR_EXPORT_KMS_UNAVAILABLE`, `ERR_EXPORT_ATTESTATION_FAILED`, `ERR_EXPORT_CANONICALIZE`. +- Metrics: `exporter_sign_duration_seconds`, `exporter_sign_failures_total{error_code}`, `exporter_provenance_verify_failures_total`. +- Logs: `phase=sign`, `error_code`, `signing_key_id`, `cosign_certificate_sn`. +- Alerts: DevOps dashboards (task `DEVOPS-EXPORT-37-001`) trigger on consecutive signing failures or verification failures >0. + +When verification fails downstream, operators should: +1. Confirm signatures using the known-good key. +2. Inspect `provenance.json` materials; rerun the source queries to ensure matching digests. +3. Review run audit logs and retry export with `--resume` to regenerate manifests. + +--- + +## 8. Compliance checklist + +- [ ] Manifests and provenance documents generated with canonical JSON, deterministic digests, and signatures. +- [ ] Cosign public keys published per tenant, rotated through Authority, and distributed to Offline Kit consumers. +- [ ] SLSA attestations enabled where supply-chain requirements demand Level 3 evidence. +- [ ] CLI/Console verification paths documented and tested (CI pipelines exercise `stella export verify`). +- [ ] Encryption metadata (recipients, wrapped keys) recorded in provenance and validated during verification. +- [ ] Run audit logs capture signature timestamps, signer identity, and failure reasons. + +--- + +> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/export-center/trivy-adapter.md b/docs/export-center/trivy-adapter.md index fc2e2d13..0d2e196f 100644 --- a/docs/export-center/trivy-adapter.md +++ b/docs/export-center/trivy-adapter.md @@ -1,246 +1,246 @@ -# Export Center Trivy Adapters - -The Trivy adapters translate StellaOps normalized advisories into the format consumed by Aqua Security's Trivy scanner. They enable downstream tooling to reuse StellaOps' curated data without bespoke converters, while preserving Aggregation-Only Contract (AOC) boundaries. This guide documents bundle layouts, field mappings, compatibility guarantees, validation workflows, and configuration toggles introduced in Sprint 36 (`EXPORT-SVC-36-001`, `EXPORT-SVC-36-002`). - -> The current Export Center build is wiring the API and workers. Treat this document as the canonical interface for adapter implementation and update any behavioural changes during task sign-off. - -## 1. Adapter overview - -| Variant | Bundle | Default profile | Notes | -|---------|--------|-----------------|-------| -| `trivy:db` | `db.bundle` | `trivy:db` | Core vulnerability database compatible with Trivy CLI >= 0.50.0 (schema v2). | -| `trivy:java-db` | `java-db.bundle` | Optional extension | Java ecosystem supplement (Maven, Gradle). Enabled when `ExportCenter:Profiles:Trivy:EnableJavaDb=true`. | - -Both variants ship inside the export run under `/export/trivy/`. Each bundle is a gzip-compressed tarball containing: - -``` -metadata.json -trivy.db # BoltDB file with vulnerability/provider tables -packages/*.json # Only when schema requires JSON overlays (language ecosystems) -``` - -The adapters never mutate input evidence. They only reshape normalized advisories and copy the exact upstream references so consumers can trace provenance. - -## 2. Bundle layout - -``` -trivy/ - db.bundle - +-- metadata.json - +-- trivy.db - java-db.bundle # present when Java DB enabled - +-- metadata.json - +-- trivy-java.db - +-- ecosystem/... -signatures/ - trivy-db.sig - trivy-java-db.sig -``` - -`metadata.json` aligns with Trivy's expectations (`schemaVersion`, `buildInfo`, `updatedAt`, etc.). Export Center adds an `stella` block to capture profile id, run id, and policy snapshot hints. - -Example `metadata.json` (trimmed): - -```json -{ - "schemaVersion": 2, - "buildInfo": { - "trivyVersion": "0.50.1", - "vulnerabilityDBVersion": "2025-10-28T00:00:00Z" - }, - "updatedAt": "2025-10-29T11:42:03Z", - "stella": { - "runId": "run-20251029-01", - "profileId": "prof-trivy-db", - "tenant": "acme", - "policySnapshotId": "policy-snap-42", - "schemaVersion": 2 - } -} -``` - -## 3. Field mappings - -### 3.1 Namespace resolution - -| Stella field | Trivy field | Notes | -|--------------|-------------|-------| -| `advisory.source.vendor` | `namespace` | Canonicalized to lowercase; e.g. `Ubuntu` -> `ubuntu`. | -| `advisory.source.product` | `distribution` / `ecosystem` | Mapped via allowlist (`Ubuntu 22.04` -> `ubuntu:22.04`). | -| `package.ecosystem` | `package.ecosystem` | OSS ecosystems (`npm`, `pip`, `nuget`, etc.). | -| `package.nevra` / `package.evr` | `package.version` (OS) | RPM/DEB version semantics preserved. | - -If a record lacks a supported namespace, the adapter drops it and logs `adapter.trivy.unsupported_namespace`. - -### 3.2 Vulnerability metadata - -| Stella field | Trivy field | Transformation | -|--------------|-------------|----------------| -| `advisory.identifiers.cve[]` | `vulnerability.CVEIDs` | Array of strings. | -| `advisory.identifiers.aliases[]` | `vulnerability.CWEIDs` / `References` | CVE -> `CVEIDs`, others appended to `References`. | -| `advisory.summary` | `vulnerability.Title` | Stripped to 256 chars; rest moved to `Description`. | -| `advisory.description` | `vulnerability.Description` | Markdown allowed, normalized to LF line endings. | -| `advisory.severity.normalized` | `vulnerability.Severity` | Uses table below. | -| `advisory.cvss[]` | `vulnerability.CVSS` | Stored as `{"vector": "...", "score": 7.8, "source": "NVD"}`. | -| `advisory.published` | `vulnerability.PublishedDate` | ISO 8601 UTC. | -| `advisory.modified` | `vulnerability.LastModifiedDate` | ISO 8601 UTC. | -| `advisory.vendorStatement` | `vulnerability.VendorSeverity` / `VendorVectors` | Preserved in vendor block. | - -Severity mapping: - -| Stella severity | Trivy severity | -|-----------------|----------------| -| `critical` | `CRITICAL` | -| `high` | `HIGH` | -| `medium` | `MEDIUM` | -| `low` | `LOW` | -| `none` / `info` | `UNKNOWN` | - -### 3.3 Affected packages - -| Stella field | Trivy field | Notes | -|--------------|-------------|-------| -| `package.name` | `package.name` | For OS distros uses source package when available. | -| `package.purl` | `package.PURL` | Copied verbatim. | -| `affects.vulnerableRange` | `package.vulnerableVersionRange` | SemVer or distro version range. | -| `remediations.fixedVersion` | `package.fixedVersion` | Latest known fix. | -| `remediations.urls[]` | `package.links` | Array; duplicates removed. | -| `states.cpes[]` | `package.cpes` | For CPE-backed advisories. | - -The adapter deduplicates entries by `(namespace, package.name, vulnerableRange)` to avoid duplicate records when multiple upstream segments agree. - -Example mapping (Ubuntu advisory): - -```jsonc -// Stella normalized input -{ - "source": {"vendor": "Ubuntu", "product": "22.04"}, - "identifiers": {"cve": ["CVE-2024-12345"]}, - "severity": {"normalized": "high"}, - "affects": [{ - "package": {"name": "openssl", "ecosystem": "ubuntu", "nevra": "1.1.1f-1ubuntu2.12"}, - "vulnerableRange": "< 1.1.1f-1ubuntu2.13", - "remediations": [{"fixedVersion": "1.1.1f-1ubuntu2.13"}] - }] -} - -// Trivy vulnerability entry -{ - "namespace": "ubuntu", - "package": { - "name": "openssl", - "version": "< 1.1.1f-1ubuntu2.13", - "fixedVersion": "1.1.1f-1ubuntu2.13" - }, - "vulnerability": { - "ID": "CVE-2024-12345", - "Severity": "HIGH" - } -} -``` - -### 3.4 Java DB specifics - -The Java supplement only includes ecosystems `maven`, `gradle`, `sbt`. Additional fields: - -| Stella field | Trivy Java field | Notes | -|--------------|------------------|-------| -| `package.group` | `GroupID` | Derived from Maven coordinates. | -| `package.artifact` | `ArtifactID` | Derived from Maven coordinates. | -| `package.version` | `Version` | Compared with semver-lite rules. | -| `affects.symbolicRanges[]` | `VulnerableVersions` | Strings like `[1.0.0,1.2.3)`. | - -## 4. Compatibility matrix - -| Trivy version | Schema version | Supported by adapter | Notes | -|---------------|----------------|----------------------|-------| -| 0.46.x | 2 | Yes | Baseline compatibility target. | -| 0.50.x | 2 | Yes | Default validation target in CI. | -| 0.51.x+ | 3 | Pending | Adapter throws `ERR_EXPORT_UNSUPPORTED_SCHEMA` until implemented. | - -Schema mismatches emit `adapter.trivy.unsupported_schema_version` and abort the run. Operators can pin the schema via `ExportCenter:Adapters:Trivy:SchemaVersion`. - -## 5. Validation workflow - -1. **Unit tests** (`StellaOps.ExportCenter.Tests`): - - Mapping tests for OS and ecosystem packages. - - Severity conversion and range handling property tests. -2. **Integration tests** (`EXPORT-SVC-36-001`): - - Generate bundle from fixture dataset. - - Run `trivy module db import ` (Trivy CLI) to ensure the bundle is accepted. - - For Java DB, run `trivy java-repo --db ` against sample repository. -3. **CI smoke (`DEVOPS-EXPORT-36-001`)**: - - Validate metadata fields using `jq`. - - Ensure signatures verify with `cosign`. - - Check runtime by invoking `trivy fs --cache-dir --skip-update --custom-db fixtures/image`. - -Failures set the run status to `failed` with `errorCode="adapter-trivy"` so Console/CLI expose the reason. - -## 6. Configuration knobs - -```yaml -ExportCenter: - Adapters: - Trivy: - SchemaVersion: 2 # enforce schema version - IncludeJavaDb: true # enable java-db.bundle - AllowEmpty: false # fail when no records match - MaxCvssVectorsPerEntry: 5 # truncate to avoid oversized payloads - Distribution: - Oras: - TrivyRepository: "registry.example.com/stella/trivy-db" - PublishDelta: false - Download: - FilenameFormat: "trivy-db-{runId}.tar.gz" - IncludeMetadata: true -``` - -- `AllowEmpty=false` converts empty datasets into `ERR_EXPORT_EMPTY`. -- `MaxCvssVectorsPerEntry` prevents extremely large multi-vector advisories from bloating the DB. -- `PublishDelta` works in tandem with the planner's delta logic; when true, only changed blobs are pushed. -- `FilenameFormat` lets operators align downloads with existing mirror tooling. -- `IncludeMetadata` toggles whether `metadata.json` is stored alongside the bundle in the staging directory for quick inspection. - -## 7. Distribution guidelines - -- **Download profile**: `db.bundle` placed under `/export/trivy/` and signed. Recommended filename `trivy-db-.tar.gz`. -- **OCI push**: ORAS artifact with annotations: - - `org.opencontainers.artifact.description=StellaOps Trivy DB` - - `io.stella.export.profile=trivy:db` - - `io.stella.export.run=` - - `io.stella.export.schemaVersion=2` -- **Offline Kit**: When `offlineBundle.includeTrivyDb=true`, the exporter copies the latest full bundle plus the last `N` deltas (configurable) with manifests for quick import. - -Consumers should always verify signatures using `trivy-db.sig` / `trivy-java-db.sig` before trusting the bundle. - -Example verification flow: - -```bash -cosign verify-blob \ - --key tenants/acme/export-center.pub \ - --signature signatures/trivy-db.sig \ - trivy/db.bundle - -trivy module db import trivy/db.bundle --cache-dir /tmp/trivy-cache -``` - -## 8. Troubleshooting - -| Symptom | Likely cause | Remedy | -|---------|--------------|--------| -| `ERR_EXPORT_UNSUPPORTED_SCHEMA` | Trivy CLI updated schema version. | Bump `SchemaVersion`, extend mapping tables, regenerate fixtures. | -| `adapter.trivy.unsupported_namespace` | Advisory namespace not in allowlist. | Extend namespace mapping or exclude in selector. | -| `trivy import` fails with "invalid bolt page" | Corrupted bundle or truncated upload. | Re-run export; verify storage backend and signatures. | -| Missing Java advisories | `IncludeJavaDb=false` or no Java data in Findings Ledger. | Enable flag and confirm upstream connectors populate Java ecosystems. | -| Severity downgraded to UNKNOWN | Source severity missing or unrecognized. | Ensure upstream connectors populate severity or supply CVSS scores. | -| `ERR_EXPORT_EMPTY` returned unexpectedly | Selectors yielded zero records while `AllowEmpty=false`. | Review selectors; set `AllowEmpty=true` if empty exports are acceptable. | - -## 9. References - -- [Export Center API reference](api.md) -- [Export Center CLI Guide](cli.md) -- [Export Center Architecture](architecture.md) -- [Export Center Overview](overview.md) -- [Aqua Security Trivy documentation](https://aquasecurity.github.io/trivy/dev/database/structure/) *(external reference for schema expectations)* - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Export Center Trivy Adapters + +The Trivy adapters translate StellaOps normalized advisories into the format consumed by Aqua Security's Trivy scanner. They enable downstream tooling to reuse StellaOps' curated data without bespoke converters, while preserving Aggregation-Only Contract (AOC) boundaries. This guide documents bundle layouts, field mappings, compatibility guarantees, validation workflows, and configuration toggles introduced in Sprint 36 (`EXPORT-SVC-36-001`, `EXPORT-SVC-36-002`). + +> The current Export Center build is wiring the API and workers. Treat this document as the canonical interface for adapter implementation and update any behavioural changes during task sign-off. + +## 1. Adapter overview + +| Variant | Bundle | Default profile | Notes | +|---------|--------|-----------------|-------| +| `trivy:db` | `db.bundle` | `trivy:db` | Core vulnerability database compatible with Trivy CLI >= 0.50.0 (schema v2). | +| `trivy:java-db` | `java-db.bundle` | Optional extension | Java ecosystem supplement (Maven, Gradle). Enabled when `ExportCenter:Profiles:Trivy:EnableJavaDb=true`. | + +Both variants ship inside the export run under `/export/trivy/`. Each bundle is a gzip-compressed tarball containing: + +``` +metadata.json +trivy.db # BoltDB file with vulnerability/provider tables +packages/*.json # Only when schema requires JSON overlays (language ecosystems) +``` + +The adapters never mutate input evidence. They only reshape normalized advisories and copy the exact upstream references so consumers can trace provenance. + +## 2. Bundle layout + +``` +trivy/ + db.bundle + +-- metadata.json + +-- trivy.db + java-db.bundle # present when Java DB enabled + +-- metadata.json + +-- trivy-java.db + +-- ecosystem/... +signatures/ + trivy-db.sig + trivy-java-db.sig +``` + +`metadata.json` aligns with Trivy's expectations (`schemaVersion`, `buildInfo`, `updatedAt`, etc.). Export Center adds an `stella` block to capture profile id, run id, and policy snapshot hints. + +Example `metadata.json` (trimmed): + +```json +{ + "schemaVersion": 2, + "buildInfo": { + "trivyVersion": "0.50.1", + "vulnerabilityDBVersion": "2025-10-28T00:00:00Z" + }, + "updatedAt": "2025-10-29T11:42:03Z", + "stella": { + "runId": "run-20251029-01", + "profileId": "prof-trivy-db", + "tenant": "acme", + "policySnapshotId": "policy-snap-42", + "schemaVersion": 2 + } +} +``` + +## 3. Field mappings + +### 3.1 Namespace resolution + +| Stella field | Trivy field | Notes | +|--------------|-------------|-------| +| `advisory.source.vendor` | `namespace` | Canonicalized to lowercase; e.g. `Ubuntu` -> `ubuntu`. | +| `advisory.source.product` | `distribution` / `ecosystem` | Mapped via allowlist (`Ubuntu 22.04` -> `ubuntu:22.04`). | +| `package.ecosystem` | `package.ecosystem` | OSS ecosystems (`npm`, `pip`, `nuget`, etc.). | +| `package.nevra` / `package.evr` | `package.version` (OS) | RPM/DEB version semantics preserved. | + +If a record lacks a supported namespace, the adapter drops it and logs `adapter.trivy.unsupported_namespace`. + +### 3.2 Vulnerability metadata + +| Stella field | Trivy field | Transformation | +|--------------|-------------|----------------| +| `advisory.identifiers.cve[]` | `vulnerability.CVEIDs` | Array of strings. | +| `advisory.identifiers.aliases[]` | `vulnerability.CWEIDs` / `References` | CVE -> `CVEIDs`, others appended to `References`. | +| `advisory.summary` | `vulnerability.Title` | Stripped to 256 chars; rest moved to `Description`. | +| `advisory.description` | `vulnerability.Description` | Markdown allowed, normalized to LF line endings. | +| `advisory.severity.normalized` | `vulnerability.Severity` | Uses table below. | +| `advisory.cvss[]` | `vulnerability.CVSS` | Stored as `{"vector": "...", "score": 7.8, "source": "NVD"}`. | +| `advisory.published` | `vulnerability.PublishedDate` | ISO 8601 UTC. | +| `advisory.modified` | `vulnerability.LastModifiedDate` | ISO 8601 UTC. | +| `advisory.vendorStatement` | `vulnerability.VendorSeverity` / `VendorVectors` | Preserved in vendor block. | + +Severity mapping: + +| Stella severity | Trivy severity | +|-----------------|----------------| +| `critical` | `CRITICAL` | +| `high` | `HIGH` | +| `medium` | `MEDIUM` | +| `low` | `LOW` | +| `none` / `info` | `UNKNOWN` | + +### 3.3 Affected packages + +| Stella field | Trivy field | Notes | +|--------------|-------------|-------| +| `package.name` | `package.name` | For OS distros uses source package when available. | +| `package.purl` | `package.PURL` | Copied verbatim. | +| `affects.vulnerableRange` | `package.vulnerableVersionRange` | SemVer or distro version range. | +| `remediations.fixedVersion` | `package.fixedVersion` | Latest known fix. | +| `remediations.urls[]` | `package.links` | Array; duplicates removed. | +| `states.cpes[]` | `package.cpes` | For CPE-backed advisories. | + +The adapter deduplicates entries by `(namespace, package.name, vulnerableRange)` to avoid duplicate records when multiple upstream segments agree. + +Example mapping (Ubuntu advisory): + +```jsonc +// Stella normalized input +{ + "source": {"vendor": "Ubuntu", "product": "22.04"}, + "identifiers": {"cve": ["CVE-2024-12345"]}, + "severity": {"normalized": "high"}, + "affects": [{ + "package": {"name": "openssl", "ecosystem": "ubuntu", "nevra": "1.1.1f-1ubuntu2.12"}, + "vulnerableRange": "< 1.1.1f-1ubuntu2.13", + "remediations": [{"fixedVersion": "1.1.1f-1ubuntu2.13"}] + }] +} + +// Trivy vulnerability entry +{ + "namespace": "ubuntu", + "package": { + "name": "openssl", + "version": "< 1.1.1f-1ubuntu2.13", + "fixedVersion": "1.1.1f-1ubuntu2.13" + }, + "vulnerability": { + "ID": "CVE-2024-12345", + "Severity": "HIGH" + } +} +``` + +### 3.4 Java DB specifics + +The Java supplement only includes ecosystems `maven`, `gradle`, `sbt`. Additional fields: + +| Stella field | Trivy Java field | Notes | +|--------------|------------------|-------| +| `package.group` | `GroupID` | Derived from Maven coordinates. | +| `package.artifact` | `ArtifactID` | Derived from Maven coordinates. | +| `package.version` | `Version` | Compared with semver-lite rules. | +| `affects.symbolicRanges[]` | `VulnerableVersions` | Strings like `[1.0.0,1.2.3)`. | + +## 4. Compatibility matrix + +| Trivy version | Schema version | Supported by adapter | Notes | +|---------------|----------------|----------------------|-------| +| 0.46.x | 2 | Yes | Baseline compatibility target. | +| 0.50.x | 2 | Yes | Default validation target in CI. | +| 0.51.x+ | 3 | Pending | Adapter throws `ERR_EXPORT_UNSUPPORTED_SCHEMA` until implemented. | + +Schema mismatches emit `adapter.trivy.unsupported_schema_version` and abort the run. Operators can pin the schema via `ExportCenter:Adapters:Trivy:SchemaVersion`. + +## 5. Validation workflow + +1. **Unit tests** (`StellaOps.ExportCenter.Tests`): + - Mapping tests for OS and ecosystem packages. + - Severity conversion and range handling property tests. +2. **Integration tests** (`EXPORT-SVC-36-001`): + - Generate bundle from fixture dataset. + - Run `trivy module db import ` (Trivy CLI) to ensure the bundle is accepted. + - For Java DB, run `trivy java-repo --db ` against sample repository. +3. **CI smoke (`DEVOPS-EXPORT-36-001`)**: + - Validate metadata fields using `jq`. + - Ensure signatures verify with `cosign`. + - Check runtime by invoking `trivy fs --cache-dir --skip-update --custom-db fixtures/image`. + +Failures set the run status to `failed` with `errorCode="adapter-trivy"` so Console/CLI expose the reason. + +## 6. Configuration knobs + +```yaml +ExportCenter: + Adapters: + Trivy: + SchemaVersion: 2 # enforce schema version + IncludeJavaDb: true # enable java-db.bundle + AllowEmpty: false # fail when no records match + MaxCvssVectorsPerEntry: 5 # truncate to avoid oversized payloads + Distribution: + Oras: + TrivyRepository: "registry.example.com/stella/trivy-db" + PublishDelta: false + Download: + FilenameFormat: "trivy-db-{runId}.tar.gz" + IncludeMetadata: true +``` + +- `AllowEmpty=false` converts empty datasets into `ERR_EXPORT_EMPTY`. +- `MaxCvssVectorsPerEntry` prevents extremely large multi-vector advisories from bloating the DB. +- `PublishDelta` works in tandem with the planner's delta logic; when true, only changed blobs are pushed. +- `FilenameFormat` lets operators align downloads with existing mirror tooling. +- `IncludeMetadata` toggles whether `metadata.json` is stored alongside the bundle in the staging directory for quick inspection. + +## 7. Distribution guidelines + +- **Download profile**: `db.bundle` placed under `/export/trivy/` and signed. Recommended filename `trivy-db-.tar.gz`. +- **OCI push**: ORAS artifact with annotations: + - `org.opencontainers.artifact.description=StellaOps Trivy DB` + - `io.stella.export.profile=trivy:db` + - `io.stella.export.run=` + - `io.stella.export.schemaVersion=2` +- **Offline Kit**: When `offlineBundle.includeTrivyDb=true`, the exporter copies the latest full bundle plus the last `N` deltas (configurable) with manifests for quick import. + +Consumers should always verify signatures using `trivy-db.sig` / `trivy-java-db.sig` before trusting the bundle. + +Example verification flow: + +```bash +cosign verify-blob \ + --key tenants/acme/export-center.pub \ + --signature signatures/trivy-db.sig \ + trivy/db.bundle + +trivy module db import trivy/db.bundle --cache-dir /tmp/trivy-cache +``` + +## 8. Troubleshooting + +| Symptom | Likely cause | Remedy | +|---------|--------------|--------| +| `ERR_EXPORT_UNSUPPORTED_SCHEMA` | Trivy CLI updated schema version. | Bump `SchemaVersion`, extend mapping tables, regenerate fixtures. | +| `adapter.trivy.unsupported_namespace` | Advisory namespace not in allowlist. | Extend namespace mapping or exclude in selector. | +| `trivy import` fails with "invalid bolt page" | Corrupted bundle or truncated upload. | Re-run export; verify storage backend and signatures. | +| Missing Java advisories | `IncludeJavaDb=false` or no Java data in Findings Ledger. | Enable flag and confirm upstream connectors populate Java ecosystems. | +| Severity downgraded to UNKNOWN | Source severity missing or unrecognized. | Ensure upstream connectors populate severity or supply CVSS scores. | +| `ERR_EXPORT_EMPTY` returned unexpectedly | Selectors yielded zero records while `AllowEmpty=false`. | Review selectors; set `AllowEmpty=true` if empty exports are acceptable. | + +## 9. References + +- [Export Center API reference](api.md) +- [Export Center CLI Guide](cli.md) +- [Export Center Architecture](architecture.md) +- [Export Center Overview](overview.md) +- [Aqua Security Trivy documentation](https://aquasecurity.github.io/trivy/dev/database/structure/) *(external reference for schema expectations)* + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/faq/policy-faq.md b/docs/faq/policy-faq.md index d10e4540..06b6995d 100644 --- a/docs/faq/policy-faq.md +++ b/docs/faq/policy-faq.md @@ -1,96 +1,96 @@ -# Policy Engine FAQ - -Answers to questions that Support, Ops, and Policy Guild teams receive most frequently. Pair this FAQ with the [Policy Lifecycle](../policy/lifecycle.md), [Runs](../policy/runs.md), and [CLI guide](../cli/policy.md) for deeper explanations. - ---- - -## Authoring & DSL - -**Q:** *Lint succeeds locally, but submit still fails with `ERR_POL_001`. Why?* -**A:** The CLI requires lint & compile artefacts newer than 24 hours. Re-run `stella policy lint` and `stella policy compile` before submitting; ensure you upload the latest diff files with `--attach`. - -**Q:** *How do I layer tenant-specific overrides on top of the baseline policy?* -**A:** Keep the baseline in `tenant-global`. For tenant overrides, create a policy referencing the baseline via CLI (`stella policy new --from baseline@`), then adjust rules. Activation is per tenant. - -**Q:** *Can I import YAML/Rego policies from earlier releases?* -**A:** No direct import. Use the migration script (`stella policy migrate legacy.yaml`) which outputs `stella-dsl@1` skeletons. Review manually before submission. - ---- - -## Simulation & Determinism - -**Q:** *Simulation shows huge differences even though I only tweaked metadata. What did I miss?* -**A:** Check if your simulation used the same SBOM set/env as previous runs. CLI default uses golden fixtures; UI can store custom presets. Large diffs may also indicate Concelier updates; compare advisory cursors in the Simulation tab. - -**Q:** *How do we guard against non-deterministic behaviour?* -**A:** CI runs `policy simulate` twice with identical inputs and compares outputs (`DEVOPS-POLICY-20-003`). Any difference fails the pipeline. Locally you can use `stella policy run replay` to verify determinism. - -**Q:** *What happens if the determinism guard (`ERR_POL_004`) triggers?* -**A:** Policy Engine halts the run, raises `policy.run.failed` with code `ERR_POL_004`, and switches to incident mode (100 % sampling). Review recent code changes; often caused by new helpers that call `DateTime.Now` or non-allowlisted HTTP clients. - ---- - -## VEX & Suppressions - -**Q:** *A vendor marked a CVE `not_affected` but the policy still blocks. Why?* -**A:** Check the required justifications. Baseline policy only accepts `component_not_present` and `vulnerable_code_not_present`. Other statuses need explicit rules. Use `stella findings explain` to see which VEX statement was considered. - -**Q:** *Can we quiet a finding indefinitely?* -**A:** Avoid indefinite quiets. Policy DSL requires an `until` timestamp. If the use case is permanent, move the rule into baseline logic with strong justification and documentation. - -**Q:** *How do we detect overuse of suppressions?* -**A:** Observability exports `policy_suppressions_total` and CLI `stella policy stats`. Review weekly; Support flags tenants whose suppressions grow faster than remediation tickets. - ---- - -## Runs & Operations - -**Q:** *Incremental runs are backlogged. What should we check first?* -**A:** Inspect `policy_run_queue_depth` and `policy_delta_backlog_age_seconds` dashboards. If queue depth high, scale worker replicas or investigate upstream change storms (Concelier/Excititor). Use `stella policy run list --status failed` for recent errors. - -**Q:** *Full runs take longer than 30 min. Is that a breach?* -**A:** Goal is ≤ 30 min, but large tenants may exceed temporarily. Ensure Mongo indexes are current and that worker nodes meet sizing (4 vCPU). Consider sharding runs by SBOM group. - -**Q:** *How do I replay a run for audit evidence?* -**A:** `stella policy run replay --output replay.tgz` produces a sealed bundle. Upload to evidence locker or attach to incident tickets. - ---- - -## Approvals & Governance - -**Q:** *Can authors approve their own policies?* -**A:** No. Authority denies approval if `approved_by == submitted_by`. Assign at least two reviewers (one security, one product). - -**Q:** *What scopes do bots need for CI pipelines?* -**A:** Typically `policy:read`, `policy:simulate`, `policy:runs`. Only grant `policy:run` if the pipeline should trigger runs. Never give CI tokens `policy:approve`. - -**Q:** *How do we manage policies in air-gapped deployments?* -**A:** Use `stella policy bundle export --sealed` on a connected site, transfer via approved media, then `stella policy bundle import` inside the enclave. Enable `--sealed` flag in CLI/UI to block accidental outbound calls. - ---- - -## Troubleshooting - -**Q:** *API calls return `403` despite valid token.* -**A:** Verify scope includes the specific operation (`policy:activate` vs `policy:run`). Check tenant header matches token tenant. Inspect Authority logs for denial reason (`policy_scope_denied_total` metric). - -**Q:** *`stella policy run` exits with code `30`.* -**A:** Network/transport error. Check connectivity to Policy Engine endpoint, TLS configuration, and CLI proxy settings. - -**Q:** *Explain drawer shows no VEX data.* -**A:** Either no VEX statement matched or the tenant lacks `findings:read` scope. If VEX should exist, confirm Excititor ingestion and policy joiners (see Observability dashboards). - ---- - -## Compliance Checklist - -- [ ] FAQ linked from Console help menu and CLI `stella policy help`. -- [ ] Entries reviewed quarterly by Policy & Support Guilds. -- [ ] Answers cross-reference lifecycle, runs, observability, and governance docs. -- [ ] Incident/Escalation contact details kept current in Support playbooks. -- [ ] FAQ translated for supported locales (if applicable). - ---- - -*Last updated: 2025-10-26 (Sprint 20).* - +# Policy Engine FAQ + +Answers to questions that Support, Ops, and Policy Guild teams receive most frequently. Pair this FAQ with the [Policy Lifecycle](../policy/lifecycle.md), [Runs](../policy/runs.md), and [CLI guide](../cli/policy.md) for deeper explanations. + +--- + +## Authoring & DSL + +**Q:** *Lint succeeds locally, but submit still fails with `ERR_POL_001`. Why?* +**A:** The CLI requires lint & compile artefacts newer than 24 hours. Re-run `stella policy lint` and `stella policy compile` before submitting; ensure you upload the latest diff files with `--attach`. + +**Q:** *How do I layer tenant-specific overrides on top of the baseline policy?* +**A:** Keep the baseline in `tenant-global`. For tenant overrides, create a policy referencing the baseline via CLI (`stella policy new --from baseline@`), then adjust rules. Activation is per tenant. + +**Q:** *Can I import YAML/Rego policies from earlier releases?* +**A:** No direct import. Use the migration script (`stella policy migrate legacy.yaml`) which outputs `stella-dsl@1` skeletons. Review manually before submission. + +--- + +## Simulation & Determinism + +**Q:** *Simulation shows huge differences even though I only tweaked metadata. What did I miss?* +**A:** Check if your simulation used the same SBOM set/env as previous runs. CLI default uses golden fixtures; UI can store custom presets. Large diffs may also indicate Concelier updates; compare advisory cursors in the Simulation tab. + +**Q:** *How do we guard against non-deterministic behaviour?* +**A:** CI runs `policy simulate` twice with identical inputs and compares outputs (`DEVOPS-POLICY-20-003`). Any difference fails the pipeline. Locally you can use `stella policy run replay` to verify determinism. + +**Q:** *What happens if the determinism guard (`ERR_POL_004`) triggers?* +**A:** Policy Engine halts the run, raises `policy.run.failed` with code `ERR_POL_004`, and switches to incident mode (100 % sampling). Review recent code changes; often caused by new helpers that call `DateTime.Now` or non-allowlisted HTTP clients. + +--- + +## VEX & Suppressions + +**Q:** *A vendor marked a CVE `not_affected` but the policy still blocks. Why?* +**A:** Check the required justifications. Baseline policy only accepts `component_not_present` and `vulnerable_code_not_present`. Other statuses need explicit rules. Use `stella findings explain` to see which VEX statement was considered. + +**Q:** *Can we quiet a finding indefinitely?* +**A:** Avoid indefinite quiets. Policy DSL requires an `until` timestamp. If the use case is permanent, move the rule into baseline logic with strong justification and documentation. + +**Q:** *How do we detect overuse of suppressions?* +**A:** Observability exports `policy_suppressions_total` and CLI `stella policy stats`. Review weekly; Support flags tenants whose suppressions grow faster than remediation tickets. + +--- + +## Runs & Operations + +**Q:** *Incremental runs are backlogged. What should we check first?* +**A:** Inspect `policy_run_queue_depth` and `policy_delta_backlog_age_seconds` dashboards. If queue depth high, scale worker replicas or investigate upstream change storms (Concelier/Excititor). Use `stella policy run list --status failed` for recent errors. + +**Q:** *Full runs take longer than 30 min. Is that a breach?* +**A:** Goal is ≤ 30 min, but large tenants may exceed temporarily. Ensure Mongo indexes are current and that worker nodes meet sizing (4 vCPU). Consider sharding runs by SBOM group. + +**Q:** *How do I replay a run for audit evidence?* +**A:** `stella policy run replay --output replay.tgz` produces a sealed bundle. Upload to evidence locker or attach to incident tickets. + +--- + +## Approvals & Governance + +**Q:** *Can authors approve their own policies?* +**A:** No. Authority denies approval if `approved_by == submitted_by`. Assign at least two reviewers (one security, one product). + +**Q:** *What scopes do bots need for CI pipelines?* +**A:** Typically `policy:read`, `policy:simulate`, `policy:runs`. Only grant `policy:run` if the pipeline should trigger runs. Never give CI tokens `policy:approve`. + +**Q:** *How do we manage policies in air-gapped deployments?* +**A:** Use `stella policy bundle export --sealed` on a connected site, transfer via approved media, then `stella policy bundle import` inside the enclave. Enable `--sealed` flag in CLI/UI to block accidental outbound calls. + +--- + +## Troubleshooting + +**Q:** *API calls return `403` despite valid token.* +**A:** Verify scope includes the specific operation (`policy:activate` vs `policy:run`). Check tenant header matches token tenant. Inspect Authority logs for denial reason (`policy_scope_denied_total` metric). + +**Q:** *`stella policy run` exits with code `30`.* +**A:** Network/transport error. Check connectivity to Policy Engine endpoint, TLS configuration, and CLI proxy settings. + +**Q:** *Explain drawer shows no VEX data.* +**A:** Either no VEX statement matched or the tenant lacks `findings:read` scope. If VEX should exist, confirm Excititor ingestion and policy joiners (see Observability dashboards). + +--- + +## Compliance Checklist + +- [ ] FAQ linked from Console help menu and CLI `stella policy help`. +- [ ] Entries reviewed quarterly by Policy & Support Guilds. +- [ ] Answers cross-reference lifecycle, runs, observability, and governance docs. +- [ ] Incident/Escalation contact details kept current in Support playbooks. +- [ ] FAQ translated for supported locales (if applicable). + +--- + +*Last updated: 2025-10-26 (Sprint 20).* + diff --git a/AGENTS.md b/docs/implplan/AGENTS.md similarity index 100% rename from AGENTS.md rename to docs/implplan/AGENTS.md diff --git a/EPIC_1.md b/docs/implplan/EPIC_1.md similarity index 97% rename from EPIC_1.md rename to docs/implplan/EPIC_1.md index 51bf5409..eaefb8c4 100644 --- a/EPIC_1.md +++ b/docs/implplan/EPIC_1.md @@ -1,524 +1,524 @@ -Here’s the full write‑up you can drop into your repo as the canonical reference for Epic 1. It’s written in clean product‑doc style so it’s safe to check in as Markdown. No fluff, just everything you need to build, test, and police it. - ---- - -# Epic 1: Aggregation‑Only Contract (AOC) Enforcement - -> Short name: **AOC enforcement** -> Services touched: **Conseiller (advisory ingestion), Excitator (VEX ingestion), Web API, Workers, Policy Engine, CLI, Console, Authority** -> Data stores: **MongoDB primary, optional Redis/NATS for jobs** - ---- - -## 1) What it is - -**Aggregation‑Only Contract (AOC)** is the ingestion covenant for StellaOps. It defines a hard boundary between **collection** and **interpretation**: - -* **Ingestion (Conseiller/Excitator)** only **collects** data and preserves it as immutable raw facts with provenance. It does not decide, merge, normalize, prioritize, or assign severity. It may compute **links** that help future joins (aliases, PURLs, CPEs), but never derived judgments. -* **Policy evaluation** is the only place where merges, deduplication, consensus, severity computation, and status folding are allowed. It’s reproducible and traceable. - -The AOC establishes: - -* **Immutable raw stores**: `advisory_raw` and `vex_raw` documents with full provenance, signatures, checksums, and upstream identifiers. -* **Linksets**: machine‑generated join hints (aliases, PURLs, CPEs, CVE/GHSA IDs) that never change the underlying source content. -* **Invariants**: a strict set of “never do this in ingestion” rules enforced by schema validation, runtime guards, and CI checks. -* **AOC Verifier**: a build‑time and runtime watchdog that blocks non‑compliant code and data writes. - -This epic delivers: schemas, guards, error codes, APIs, tests, migration, docs, and ops dashboards to make AOC non‑negotiable across the platform. - ---- - -## 2) Why - -AOC makes results **auditable, deterministic, and organization‑specific**. Source vendors disagree; your policies decide. By removing hidden heuristics from ingestion, we avoid unexplainable risk changes, race conditions between collectors, and vendor bias. Policy‑time evaluation yields reproducible deltas with complete “why” traces. - ---- - -## 3) How it should work (deep details) - -### 3.1 Core invariants - -The following must be true for every write to `advisory_raw` and `vex_raw` and for every ingestion pipeline: - -1. **No severity in ingestion** - - * Forbidden fields: `severity`, `cvss`, `cvss_vector`, `effective_status`, `effective_range`, `merged_from`, `consensus_provider`, `reachability`, `asset_criticality`, `risk_score`. -2. **No merges or de‑dups in ingestion** - - * No combining two upstream advisories into one. No picking a single truth when multiple VEX statements exist. -3. **Provenance is mandatory** - - * Every raw doc includes `provenance` and `signature/checksum`. -4. **Idempotent upserts** - - * Same upstream document (by `upstream_id` + `source` + `content_hash`) must not create duplicates. -5. **Append‑only versioning** - - * Revisions from the source create new immutable documents with `supersedes` pointers; no in‑place edits. -6. **Linkset only** - - * Ingestion can compute and store a `linkset` for join performance. Linkset does not alter or infer severity/status. -7. **Policy‑time only for effective findings** - - * Only the Policy Engine can write `effective_finding_*` materializations. -8. **Schema safety** - - * Strict JSON schema validation at DB level; unknown fields reject writes. -9. **Clock discipline** - - * Timestamps are UTC, monotonic within a batch; collectors record `fetched_at` and `received_at`. - -### 3.2 Data model - -#### 3.2.1 `advisory_raw` (Mongo collection) - -```json -{ - "_id": "advisory_raw:osv:GHSA-xxxx-....:v3", - "source": { - "vendor": "OSV", - "stream": "github", - "api": "https://api.osv.dev/v1/.../GHSA-...", - "collector_version": "conseiller/1.7.3" - }, - "upstream": { - "upstream_id": "GHSA-xxxx-....", - "document_version": "2024-09-01T12:13:14Z", - "fetched_at": "2025-01-02T03:04:05Z", - "received_at": "2025-01-02T03:04:06Z", - "content_hash": "sha256:...", - "signature": { - "present": true, - "format": "dsse", - "key_id": "rekor:.../key/abc", - "sig": "base64..." - } - }, - "content": { - "format": "OSV", - "spec_version": "1.6", - "raw": { /* full upstream JSON, unmodified */ } - }, - "identifiers": { - "cve": ["CVE-2023-1234"], - "ghsa": ["GHSA-xxxx-...."], - "aliases": ["CVE-2023-1234", "GHSA-xxxx-...."] - }, - "linkset": { - "purls": ["pkg:npm/lodash@4.17.21", "pkg:maven/..."], - "cpes": ["cpe:2.3:a:..."], - "references": [ - {"type":"advisory","url":"https://..."}, - {"type":"fix","url":"https://..."} - ], - "reconciled_from": ["content.raw.affected.ranges", "content.raw.pkg"] - }, - "supersedes": "advisory_raw:osv:GHSA-xxxx-....:v2", - "tenant": "default" -} -``` - -> Note: No `severity`, no `cvss`, no `effective_*`. If the upstream payload includes CVSS, it stays inside `content.raw` and is not promoted or normalized at ingestion. - -#### 3.2.2 `vex_raw` (Mongo collection) - -```json -{ - "_id": "vex_raw:vendorX:doc-123:v4", - "source": { - "vendor": "VendorX", - "stream": "vex", - "api": "https://.../vex/doc-123", - "collector_version": "excitator/0.9.2" - }, - "upstream": { - "upstream_id": "doc-123", - "document_version": "2025-01-15T08:09:10Z", - "fetched_at": "2025-01-16T01:02:03Z", - "received_at": "2025-01-16T01:02:03Z", - "content_hash": "sha256:...", - "signature": { "present": true, "format": "cms", "key_id": "kid:...", "sig": "..." } - }, - "content": { - "format": "CycloneDX-VEX", // or "CSAF-VEX" - "spec_version": "1.5", - "raw": { /* full upstream VEX */ } - }, - "identifiers": { - "statements": [ - { - "advisory_ids": ["CVE-2023-1234","GHSA-..."], - "component_purls": ["pkg:deb/openssl@1.1.1"], - "status": "not_affected", - "justification": "component_not_present" - } - ] - }, - "linkset": { - "purls": ["pkg:deb/openssl@1.1.1"], - "cves": ["CVE-2023-1234"], - "ghsas": ["GHSA-..."] - }, - "supersedes": "vex_raw:vendorX:doc-123:v3", - "tenant": "default" -} -``` - -> VEX statuses remain as raw facts. No cross‑provider consensus is computed here. - -### 3.3 Database validation - -* MongoDB JSON Schema validators on both collections: - - * Reject forbidden fields at the top level. - * Enforce presence of `source`, `upstream`, `content`, `linkset`, `tenant`. - * Enforce string formats for timestamps and hashes. - -### 3.4 Write paths - -1. **Collector fetches upstream** - - * Normalize transport (gzip/json), compute `content_hash`, verify signature if available. -2. **Build raw doc** - - * Populate `source`, `upstream`, `content.raw`, `identifiers`, `linkset`. -3. **Idempotent upsert** - - * Lookup by `(source.vendor, upstream.upstream_id, upstream.content_hash)`. If exists, skip; if new content hash, insert new revision with `supersedes`. -4. **AOC guard** - - * Runtime interceptor inspects write payload; if any forbidden field detected, reject with `ERR_AOC_001`. -5. **Metrics** - - * Emit `ingestion_write_ok` or `ingestion_write_reject` with reason code. - -### 3.5 Read paths (ingestion scope) - -* Allow only listing, getting raw docs, and searching by linkset. No endpoints return “effective findings” from ingestion services. - -### 3.6 Error codes - -| Code | Meaning | HTTP | -| ------------- | ------------------------------------------------------------ | ---- | -| `ERR_AOC_001` | Forbidden field present (severity/consensus/normalized data) | 400 | -| `ERR_AOC_002` | Merge attempt detected (multiple upstreams fused) | 400 | -| `ERR_AOC_003` | Idempotency violation (duplicate without supersedes) | 409 | -| `ERR_AOC_004` | Missing provenance fields | 422 | -| `ERR_AOC_005` | Signature/checksum mismatch | 422 | -| `ERR_AOC_006` | Attempt to write effective findings from ingestion context | 403 | -| `ERR_AOC_007` | Unknown top‑level fields (schema violation) | 400 | - -### 3.7 AOC Verifier - -A build‑time and runtime safeguard: - -* **Static checks (CI)** - - * Block imports of `*.Policy*` or `*.Merge*` from ingestion modules. - * AST lint rule: any write to `advisory_raw` or `vex_raw` setting a forbidden key fails the build. -* **Runtime checks** - - * Repository layer interceptor inspects documents before insert/update; rejects forbidden fields and multi‑source merges. -* **Drift detection job** - - * Nightly job scans newest N docs; if violation found, pages ops and blocks new pipeline runs. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 3.8 Indexing strategy - -* `advisory_raw`: - - * `{ "identifiers.cve": 1 }`, `{ "identifiers.ghsa": 1 }`, `{ "linkset.purls": 1 }`, `{ "source.vendor": 1, "upstream.upstream_id": 1, "upstream.content_hash": 1 }` (unique), `{ "tenant": 1 }`. -* `vex_raw`: - - * `{ "identifiers.statements.advisory_ids": 1 }`, `{ "linkset.purls": 1 }`, `{ "source.vendor": 1, "upstream.upstream_id": 1, "upstream.content_hash": 1 }` (unique), `{ "tenant": 1 }`. - -### 3.9 Interaction with Policy Engine - -* Policy Engine pulls raw docs by identifiers/linksets and computes: - - * De‑dup/merge per policy - * Consensus for VEX statements - * Severity normalization and risk scoring -* Writes **only** to `effective_finding_{policyId}` collections. - -A dedicated write guard refuses `effective_finding_*` writes from any caller that isn’t the Policy Engine service identity. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 3.10 Migration plan - -1. **Freeze ingestion writes** except raw pass‑through. -2. **Backfill**: copy existing ingestion collections to `_backup_*`. -3. **Strip forbidden fields** from raw copies, move them into a temporary `advisory_view_legacy` used only by Policy Engine for parity. -4. **Enable DB schema validators**. -5. **Run collectors** in dry‑run; ensure only allowed keys land. -6. **Switch Policy Engine** to pull exclusively from `*_raw` and to compute everything else. -7. **Delete legacy normalized fields** in ingestion codepaths. -8. **Enable runtime guards** and CI lint. - -### 3.11 Observability - -* Metrics: - - * `aoc_violation_total{code=...}`, `ingestion_write_total{result=ok|reject}`, `ingestion_signature_verified_total{result=ok|fail}`, `ingestion_latency_seconds`, `advisory_revision_count`. -* Tracing: span `ingest.fetch`, `ingest.transform`, `ingest.write`, `aoc.guard`. -* Logs: include `tenant`, `source.vendor`, `upstream.upstream_id`, `content_hash`, `correlation_id`. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 3.12 Security and tenancy - -* Every raw doc carries a `tenant` field. -* Authority enforces `advisory:ingest` and `vex:ingest` scopes for ingestion endpoints. -* Cross‑tenant reads/writes are blocked by default. -* Secrets never logged; signatures verified with pinned trust stores. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 3.13 CLI and Console behavior - -* **CLI** - - * `stella sources ingest --dry-run` prints would‑write payload and explicitly shows that no severity/status fields are present. - * `stella aoc verify` scans last K documents and reports violations with exit codes. -* **Console** - - * Sources dashboard shows AOC pass/fail per job, most recent violation codes, and a drill‑down to the offending document. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 4) API surface (ingestion scope) - -### 4.1 Conseiller (Advisories) - -* `POST /ingest/advisory` - - * Body: raw upstream advisory with metadata; server constructs document, not the client. - * Rejections: `ERR_AOC_00x` per table above. -* `GET /advisories/raw/{id}` -* `GET /advisories/raw?cve=CVE-...&purl=pkg:...&tenant=...` -* `GET /advisories/raw/{id}/provenance` -* `POST /aoc/verify?since=ISO8601` returns summary stats and first N violations. - -### 4.2 Excitator (VEX) - -* `POST /ingest/vex` -* `GET /vex/raw/{id}` -* `GET /vex/raw?advisory_id=CVE-...&purl=pkg:...` -* `POST /aoc/verify?since=ISO8601` - -All endpoints require `tenant` scope and appropriate `:write` or `:read`. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 5) Example: end‑to‑end flow - -1. Collector fetches `GHSA-1234` from OSV. -2. Build `advisory_raw` with linkset PURLs. -3. Insert; AOC guard approves. -4. Policy Engine later evaluates SBOM `S-42` under `policy P-7`, reads raw advisory and any VEX raw docs, computes effective findings, and writes to `effective_finding_P-7`. -5. CLI `stella aoc verify --since 24h` returns `0` violations. - ---- - -## 6) Implementation tasks - -Breakdown by component with exact work items. Each section ends with the imposed sentence you requested. - -### 6.1 Conseiller (advisory ingestion, WS + Worker) - -* [ ] Add Mongo JSON schema validation for `advisory_raw`. -* [ ] Implement repository layer with **write interceptors** that reject forbidden fields. -* [ ] Compute `linkset` from upstream using deterministic mappers. -* [ ] Enforce idempotency by unique index on `(source.vendor, upstream.upstream_id, upstream.content_hash, tenant)`. -* [ ] Remove any normalization pipelines; relocate to Policy Engine. -* [ ] Add `POST /ingest/advisory` and `GET /advisories/raw*` endpoints with Authority scope checks. -* [ ] Emit observability metrics and traces. -* [ ] Unit tests: schema violations, idempotency, supersedes chain, forbidden fields. -* [ ] Integration tests: large batch ingest, linkset correctness against golden fixtures. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.2 Excitator (VEX ingestion, WS + Worker) - -* [ ] Add Mongo JSON schema validation for `vex_raw`. -* [ ] Implement repository layer guard identical to Conseiller. -* [ ] Deterministic `linkset` extraction for advisory IDs and PURLs. -* [ ] Endpoints `POST /ingest/vex`, `GET /vex/raw*` with scopes. -* [ ] Remove any consensus or folding logic; leave VEX statements as raw. -* [ ] Tests as per Conseiller, with rich fixtures for CycloneDX‑VEX and CSAF. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.3 Web API shared library - -* [ ] Define `AOCForbiddenKeys` and export for both services. -* [ ] Provide `AOCWriteGuard` middleware and `AOCError` types. -* [ ] Provide `ProvenanceBuilder` utility. -* [ ] Provide `SignatureVerifier` and `Checksum` helpers. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.4 Policy Engine - -* [ ] Block any import/use from ingestion modules by lint rule. -* [ ] Add hard gate on `effective_finding_*` writes that verifies caller identity is Policy Engine. -* [ ] Update readers to pull fields only from `content.raw`, `identifiers`, `linkset`, not any legacy normalized fields. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.5 Authority - -* [ ] Introduce scopes: `advisory:ingest`, `advisory:read`, `vex:ingest`, `vex:read`, `aoc:verify`. -* [ ] Add `tenant` claim propagation to ingestion services. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.6 CLI - -* [ ] `stella sources ingest --dry-run` and `stella aoc verify` commands. -* [ ] Exit codes mapping to `ERR_AOC_00x`. -* [ ] JSON output schema including violation list. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.7 Console - -* [ ] Sources dashboard tiles: last run, AOC violations, top error codes. -* [ ] Drill‑down page rendering offending doc with highlight on forbidden keys. -* [ ] “Verify last 24h” action calling the AOC Verifier endpoint. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.8 CI/CD - -* [ ] AST linter to forbid writes of banned keys in ingestion modules. -* [ ] Unit test coverage gates for AOC guard code. -* [ ] Pipeline stage that runs `stella aoc verify` against seeded DB snapshots. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 7) Documentation changes (create/update these files) - -1. **`/docs/ingestion/aggregation-only-contract.md`** - - * Add: philosophy, invariants, schemas for `advisory_raw`/`vex_raw`, error codes, linkset definition, examples, idempotency rules, supersedes, API references, migration steps, observability, security. -2. **`/docs/architecture/overview.md`** - - * Update system diagram to show AOC boundary and raw stores; add sequence diagram: fetch → guard → raw insert → policy evaluation. -3. **`/docs/architecture/policy-engine.md`** - - * Clarify ingestion boundary; list inputs consumed from raw; note that any severity/consensus is policy‑time only. -4. **`/docs/ui/console.md`** - - * Add Sources dashboard section: AOC tiles and violation drill‑down. -5. **`/docs/cli/cli-reference.md`** - - * Add `stella aoc verify` and `stella sources ingest --dry-run` usage and exit codes. -6. **`/docs/observability/observability.md`** - - * Document new metrics, traces, logs keys for AOC. -7. **`/docs/security/authority-scopes.md`** - - * Add new scopes and tenancy enforcement for ingestion endpoints. -8. **`/docs/deploy/containers.md`** - - * Note DB validators must be enabled; environment flags for AOC guards; read‑only user for verify endpoint. - -Each file should include a “Compliance checklist” subsection for AOC. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 8) Acceptance criteria - -* DB validators are active and reject writes with forbidden fields. -* AOC runtime guards log and reject violations with correct error codes. -* CI linter prevents shipping code that writes forbidden keys to raw stores. -* Ingestion of known fixture sets results in zero normalized fields outside `content.raw`. -* Policy Engine is the only writer of `effective_finding_*` materializations. -* CLI `stella aoc verify` returns success on clean datasets and non‑zero on seeded violations. -* Console shows AOC status and violation drill‑downs. - ---- - -## 9) Risks and mitigations - -* **Collector drift**: new upstream fields tempt developers to normalize. - - * Mitigation: CI linter + guard + schema validators; require RFC to extend linkset. -* **Performance impact**: extra validation on write. - - * Mitigation: guard is O(number of keys) and schema check is bounded; indexes sized appropriately. -* **Migration complexity**: moving legacy normalized fields out. - - * Mitigation: temporary `advisory_view_legacy` for parity; stepwise cutover. -* **Tenant leakage**: missing tenant on write. - - * Mitigation: schema requires `tenant`; middleware injects and asserts. - ---- - -## 10) Test plan - -* **Unit tests** - - * Guard rejects forbidden keys; idempotency; supersedes chain; provenance required. - * Signature verification paths: good, bad, absent. -* **Property tests** - - * Randomized upstream docs never produce forbidden keys at top level. -* **Integration tests** - - * Batch ingest of 50k advisories: throughput, zero violations. - * Mixed VEX sources with contradictory statements remain separate in raw. -* **Contract tests** - - * Policy Engine refuses to run without raw inputs; writes only to `effective_finding_*`. -* **End‑to‑end** - - * Seed SBOM + advisories + VEX; ensure findings are identical pre/post migration. - ---- - -## 11) Developer checklists - -**Definition of Ready** - -* Upstream spec reference attached. -* Linkset mappers defined. -* Example fixtures added. - -**Definition of Done** - -* DB validators deployed and tested. -* Runtime guards enabled. -* CI linter merged and enforced. -* Docs updated (files in section 7). -* Metrics visible on dashboard. -* CLI verify passes. - ---- - -## 12) Glossary - -* **Raw document**: exact upstream content plus provenance, with join hints. -* **Linkset**: PURLs/CPEs/IDs extracted to accelerate joins later. -* **Supersedes**: pointer from a newer raw doc to the previous revision of the same upstream doc. -* **Policy‑time**: evaluation phase where merges, consensus, and severity are computed. -* **AOC**: Aggregation‑Only Contract. - ---- - -### Final imposed reminder - -**Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied.** +Here’s the full write‑up you can drop into your repo as the canonical reference for Epic 1. It’s written in clean product‑doc style so it’s safe to check in as Markdown. No fluff, just everything you need to build, test, and police it. + +--- + +# Epic 1: Aggregation‑Only Contract (AOC) Enforcement + +> Short name: **AOC enforcement** +> Services touched: **Conseiller (advisory ingestion), Excitator (VEX ingestion), Web API, Workers, Policy Engine, CLI, Console, Authority** +> Data stores: **MongoDB primary, optional Redis/NATS for jobs** + +--- + +## 1) What it is + +**Aggregation‑Only Contract (AOC)** is the ingestion covenant for StellaOps. It defines a hard boundary between **collection** and **interpretation**: + +* **Ingestion (Conseiller/Excitator)** only **collects** data and preserves it as immutable raw facts with provenance. It does not decide, merge, normalize, prioritize, or assign severity. It may compute **links** that help future joins (aliases, PURLs, CPEs), but never derived judgments. +* **Policy evaluation** is the only place where merges, deduplication, consensus, severity computation, and status folding are allowed. It’s reproducible and traceable. + +The AOC establishes: + +* **Immutable raw stores**: `advisory_raw` and `vex_raw` documents with full provenance, signatures, checksums, and upstream identifiers. +* **Linksets**: machine‑generated join hints (aliases, PURLs, CPEs, CVE/GHSA IDs) that never change the underlying source content. +* **Invariants**: a strict set of “never do this in ingestion” rules enforced by schema validation, runtime guards, and CI checks. +* **AOC Verifier**: a build‑time and runtime watchdog that blocks non‑compliant code and data writes. + +This epic delivers: schemas, guards, error codes, APIs, tests, migration, docs, and ops dashboards to make AOC non‑negotiable across the platform. + +--- + +## 2) Why + +AOC makes results **auditable, deterministic, and organization‑specific**. Source vendors disagree; your policies decide. By removing hidden heuristics from ingestion, we avoid unexplainable risk changes, race conditions between collectors, and vendor bias. Policy‑time evaluation yields reproducible deltas with complete “why” traces. + +--- + +## 3) How it should work (deep details) + +### 3.1 Core invariants + +The following must be true for every write to `advisory_raw` and `vex_raw` and for every ingestion pipeline: + +1. **No severity in ingestion** + + * Forbidden fields: `severity`, `cvss`, `cvss_vector`, `effective_status`, `effective_range`, `merged_from`, `consensus_provider`, `reachability`, `asset_criticality`, `risk_score`. +2. **No merges or de‑dups in ingestion** + + * No combining two upstream advisories into one. No picking a single truth when multiple VEX statements exist. +3. **Provenance is mandatory** + + * Every raw doc includes `provenance` and `signature/checksum`. +4. **Idempotent upserts** + + * Same upstream document (by `upstream_id` + `source` + `content_hash`) must not create duplicates. +5. **Append‑only versioning** + + * Revisions from the source create new immutable documents with `supersedes` pointers; no in‑place edits. +6. **Linkset only** + + * Ingestion can compute and store a `linkset` for join performance. Linkset does not alter or infer severity/status. +7. **Policy‑time only for effective findings** + + * Only the Policy Engine can write `effective_finding_*` materializations. +8. **Schema safety** + + * Strict JSON schema validation at DB level; unknown fields reject writes. +9. **Clock discipline** + + * Timestamps are UTC, monotonic within a batch; collectors record `fetched_at` and `received_at`. + +### 3.2 Data model + +#### 3.2.1 `advisory_raw` (Mongo collection) + +```json +{ + "_id": "advisory_raw:osv:GHSA-xxxx-....:v3", + "source": { + "vendor": "OSV", + "stream": "github", + "api": "https://api.osv.dev/v1/.../GHSA-...", + "collector_version": "conseiller/1.7.3" + }, + "upstream": { + "upstream_id": "GHSA-xxxx-....", + "document_version": "2024-09-01T12:13:14Z", + "fetched_at": "2025-01-02T03:04:05Z", + "received_at": "2025-01-02T03:04:06Z", + "content_hash": "sha256:...", + "signature": { + "present": true, + "format": "dsse", + "key_id": "rekor:.../key/abc", + "sig": "base64..." + } + }, + "content": { + "format": "OSV", + "spec_version": "1.6", + "raw": { /* full upstream JSON, unmodified */ } + }, + "identifiers": { + "cve": ["CVE-2023-1234"], + "ghsa": ["GHSA-xxxx-...."], + "aliases": ["CVE-2023-1234", "GHSA-xxxx-...."] + }, + "linkset": { + "purls": ["pkg:npm/lodash@4.17.21", "pkg:maven/..."], + "cpes": ["cpe:2.3:a:..."], + "references": [ + {"type":"advisory","url":"https://..."}, + {"type":"fix","url":"https://..."} + ], + "reconciled_from": ["content.raw.affected.ranges", "content.raw.pkg"] + }, + "supersedes": "advisory_raw:osv:GHSA-xxxx-....:v2", + "tenant": "default" +} +``` + +> Note: No `severity`, no `cvss`, no `effective_*`. If the upstream payload includes CVSS, it stays inside `content.raw` and is not promoted or normalized at ingestion. + +#### 3.2.2 `vex_raw` (Mongo collection) + +```json +{ + "_id": "vex_raw:vendorX:doc-123:v4", + "source": { + "vendor": "VendorX", + "stream": "vex", + "api": "https://.../vex/doc-123", + "collector_version": "excitator/0.9.2" + }, + "upstream": { + "upstream_id": "doc-123", + "document_version": "2025-01-15T08:09:10Z", + "fetched_at": "2025-01-16T01:02:03Z", + "received_at": "2025-01-16T01:02:03Z", + "content_hash": "sha256:...", + "signature": { "present": true, "format": "cms", "key_id": "kid:...", "sig": "..." } + }, + "content": { + "format": "CycloneDX-VEX", // or "CSAF-VEX" + "spec_version": "1.5", + "raw": { /* full upstream VEX */ } + }, + "identifiers": { + "statements": [ + { + "advisory_ids": ["CVE-2023-1234","GHSA-..."], + "component_purls": ["pkg:deb/openssl@1.1.1"], + "status": "not_affected", + "justification": "component_not_present" + } + ] + }, + "linkset": { + "purls": ["pkg:deb/openssl@1.1.1"], + "cves": ["CVE-2023-1234"], + "ghsas": ["GHSA-..."] + }, + "supersedes": "vex_raw:vendorX:doc-123:v3", + "tenant": "default" +} +``` + +> VEX statuses remain as raw facts. No cross‑provider consensus is computed here. + +### 3.3 Database validation + +* MongoDB JSON Schema validators on both collections: + + * Reject forbidden fields at the top level. + * Enforce presence of `source`, `upstream`, `content`, `linkset`, `tenant`. + * Enforce string formats for timestamps and hashes. + +### 3.4 Write paths + +1. **Collector fetches upstream** + + * Normalize transport (gzip/json), compute `content_hash`, verify signature if available. +2. **Build raw doc** + + * Populate `source`, `upstream`, `content.raw`, `identifiers`, `linkset`. +3. **Idempotent upsert** + + * Lookup by `(source.vendor, upstream.upstream_id, upstream.content_hash)`. If exists, skip; if new content hash, insert new revision with `supersedes`. +4. **AOC guard** + + * Runtime interceptor inspects write payload; if any forbidden field detected, reject with `ERR_AOC_001`. +5. **Metrics** + + * Emit `ingestion_write_ok` or `ingestion_write_reject` with reason code. + +### 3.5 Read paths (ingestion scope) + +* Allow only listing, getting raw docs, and searching by linkset. No endpoints return “effective findings” from ingestion services. + +### 3.6 Error codes + +| Code | Meaning | HTTP | +| ------------- | ------------------------------------------------------------ | ---- | +| `ERR_AOC_001` | Forbidden field present (severity/consensus/normalized data) | 400 | +| `ERR_AOC_002` | Merge attempt detected (multiple upstreams fused) | 400 | +| `ERR_AOC_003` | Idempotency violation (duplicate without supersedes) | 409 | +| `ERR_AOC_004` | Missing provenance fields | 422 | +| `ERR_AOC_005` | Signature/checksum mismatch | 422 | +| `ERR_AOC_006` | Attempt to write effective findings from ingestion context | 403 | +| `ERR_AOC_007` | Unknown top‑level fields (schema violation) | 400 | + +### 3.7 AOC Verifier + +A build‑time and runtime safeguard: + +* **Static checks (CI)** + + * Block imports of `*.Policy*` or `*.Merge*` from ingestion modules. + * AST lint rule: any write to `advisory_raw` or `vex_raw` setting a forbidden key fails the build. +* **Runtime checks** + + * Repository layer interceptor inspects documents before insert/update; rejects forbidden fields and multi‑source merges. +* **Drift detection job** + + * Nightly job scans newest N docs; if violation found, pages ops and blocks new pipeline runs. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 3.8 Indexing strategy + +* `advisory_raw`: + + * `{ "identifiers.cve": 1 }`, `{ "identifiers.ghsa": 1 }`, `{ "linkset.purls": 1 }`, `{ "source.vendor": 1, "upstream.upstream_id": 1, "upstream.content_hash": 1 }` (unique), `{ "tenant": 1 }`. +* `vex_raw`: + + * `{ "identifiers.statements.advisory_ids": 1 }`, `{ "linkset.purls": 1 }`, `{ "source.vendor": 1, "upstream.upstream_id": 1, "upstream.content_hash": 1 }` (unique), `{ "tenant": 1 }`. + +### 3.9 Interaction with Policy Engine + +* Policy Engine pulls raw docs by identifiers/linksets and computes: + + * De‑dup/merge per policy + * Consensus for VEX statements + * Severity normalization and risk scoring +* Writes **only** to `effective_finding_{policyId}` collections. + +A dedicated write guard refuses `effective_finding_*` writes from any caller that isn’t the Policy Engine service identity. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 3.10 Migration plan + +1. **Freeze ingestion writes** except raw pass‑through. +2. **Backfill**: copy existing ingestion collections to `_backup_*`. +3. **Strip forbidden fields** from raw copies, move them into a temporary `advisory_view_legacy` used only by Policy Engine for parity. +4. **Enable DB schema validators**. +5. **Run collectors** in dry‑run; ensure only allowed keys land. +6. **Switch Policy Engine** to pull exclusively from `*_raw` and to compute everything else. +7. **Delete legacy normalized fields** in ingestion codepaths. +8. **Enable runtime guards** and CI lint. + +### 3.11 Observability + +* Metrics: + + * `aoc_violation_total{code=...}`, `ingestion_write_total{result=ok|reject}`, `ingestion_signature_verified_total{result=ok|fail}`, `ingestion_latency_seconds`, `advisory_revision_count`. +* Tracing: span `ingest.fetch`, `ingest.transform`, `ingest.write`, `aoc.guard`. +* Logs: include `tenant`, `source.vendor`, `upstream.upstream_id`, `content_hash`, `correlation_id`. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 3.12 Security and tenancy + +* Every raw doc carries a `tenant` field. +* Authority enforces `advisory:ingest` and `vex:ingest` scopes for ingestion endpoints. +* Cross‑tenant reads/writes are blocked by default. +* Secrets never logged; signatures verified with pinned trust stores. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 3.13 CLI and Console behavior + +* **CLI** + + * `stella sources ingest --dry-run` prints would‑write payload and explicitly shows that no severity/status fields are present. + * `stella aoc verify` scans last K documents and reports violations with exit codes. +* **Console** + + * Sources dashboard shows AOC pass/fail per job, most recent violation codes, and a drill‑down to the offending document. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 4) API surface (ingestion scope) + +### 4.1 Conseiller (Advisories) + +* `POST /ingest/advisory` + + * Body: raw upstream advisory with metadata; server constructs document, not the client. + * Rejections: `ERR_AOC_00x` per table above. +* `GET /advisories/raw/{id}` +* `GET /advisories/raw?cve=CVE-...&purl=pkg:...&tenant=...` +* `GET /advisories/raw/{id}/provenance` +* `POST /aoc/verify?since=ISO8601` returns summary stats and first N violations. + +### 4.2 Excitator (VEX) + +* `POST /ingest/vex` +* `GET /vex/raw/{id}` +* `GET /vex/raw?advisory_id=CVE-...&purl=pkg:...` +* `POST /aoc/verify?since=ISO8601` + +All endpoints require `tenant` scope and appropriate `:write` or `:read`. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 5) Example: end‑to‑end flow + +1. Collector fetches `GHSA-1234` from OSV. +2. Build `advisory_raw` with linkset PURLs. +3. Insert; AOC guard approves. +4. Policy Engine later evaluates SBOM `S-42` under `policy P-7`, reads raw advisory and any VEX raw docs, computes effective findings, and writes to `effective_finding_P-7`. +5. CLI `stella aoc verify --since 24h` returns `0` violations. + +--- + +## 6) Implementation tasks + +Breakdown by component with exact work items. Each section ends with the imposed sentence you requested. + +### 6.1 Conseiller (advisory ingestion, WS + Worker) + +* [ ] Add Mongo JSON schema validation for `advisory_raw`. +* [ ] Implement repository layer with **write interceptors** that reject forbidden fields. +* [ ] Compute `linkset` from upstream using deterministic mappers. +* [ ] Enforce idempotency by unique index on `(source.vendor, upstream.upstream_id, upstream.content_hash, tenant)`. +* [ ] Remove any normalization pipelines; relocate to Policy Engine. +* [ ] Add `POST /ingest/advisory` and `GET /advisories/raw*` endpoints with Authority scope checks. +* [ ] Emit observability metrics and traces. +* [ ] Unit tests: schema violations, idempotency, supersedes chain, forbidden fields. +* [ ] Integration tests: large batch ingest, linkset correctness against golden fixtures. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.2 Excitator (VEX ingestion, WS + Worker) + +* [ ] Add Mongo JSON schema validation for `vex_raw`. +* [ ] Implement repository layer guard identical to Conseiller. +* [ ] Deterministic `linkset` extraction for advisory IDs and PURLs. +* [ ] Endpoints `POST /ingest/vex`, `GET /vex/raw*` with scopes. +* [ ] Remove any consensus or folding logic; leave VEX statements as raw. +* [ ] Tests as per Conseiller, with rich fixtures for CycloneDX‑VEX and CSAF. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.3 Web API shared library + +* [ ] Define `AOCForbiddenKeys` and export for both services. +* [ ] Provide `AOCWriteGuard` middleware and `AOCError` types. +* [ ] Provide `ProvenanceBuilder` utility. +* [ ] Provide `SignatureVerifier` and `Checksum` helpers. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.4 Policy Engine + +* [ ] Block any import/use from ingestion modules by lint rule. +* [ ] Add hard gate on `effective_finding_*` writes that verifies caller identity is Policy Engine. +* [ ] Update readers to pull fields only from `content.raw`, `identifiers`, `linkset`, not any legacy normalized fields. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.5 Authority + +* [ ] Introduce scopes: `advisory:ingest`, `advisory:read`, `vex:ingest`, `vex:read`, `aoc:verify`. +* [ ] Add `tenant` claim propagation to ingestion services. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.6 CLI + +* [ ] `stella sources ingest --dry-run` and `stella aoc verify` commands. +* [ ] Exit codes mapping to `ERR_AOC_00x`. +* [ ] JSON output schema including violation list. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.7 Console + +* [ ] Sources dashboard tiles: last run, AOC violations, top error codes. +* [ ] Drill‑down page rendering offending doc with highlight on forbidden keys. +* [ ] “Verify last 24h” action calling the AOC Verifier endpoint. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.8 CI/CD + +* [ ] AST linter to forbid writes of banned keys in ingestion modules. +* [ ] Unit test coverage gates for AOC guard code. +* [ ] Pipeline stage that runs `stella aoc verify` against seeded DB snapshots. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 7) Documentation changes (create/update these files) + +1. **`/docs/ingestion/aggregation-only-contract.md`** + + * Add: philosophy, invariants, schemas for `advisory_raw`/`vex_raw`, error codes, linkset definition, examples, idempotency rules, supersedes, API references, migration steps, observability, security. +2. **`/docs/architecture/overview.md`** + + * Update system diagram to show AOC boundary and raw stores; add sequence diagram: fetch → guard → raw insert → policy evaluation. +3. **`/docs/architecture/policy-engine.md`** + + * Clarify ingestion boundary; list inputs consumed from raw; note that any severity/consensus is policy‑time only. +4. **`/docs/ui/console.md`** + + * Add Sources dashboard section: AOC tiles and violation drill‑down. +5. **`/docs/cli/cli-reference.md`** + + * Add `stella aoc verify` and `stella sources ingest --dry-run` usage and exit codes. +6. **`/docs/observability/observability.md`** + + * Document new metrics, traces, logs keys for AOC. +7. **`/docs/security/authority-scopes.md`** + + * Add new scopes and tenancy enforcement for ingestion endpoints. +8. **`/docs/deploy/containers.md`** + + * Note DB validators must be enabled; environment flags for AOC guards; read‑only user for verify endpoint. + +Each file should include a “Compliance checklist” subsection for AOC. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 8) Acceptance criteria + +* DB validators are active and reject writes with forbidden fields. +* AOC runtime guards log and reject violations with correct error codes. +* CI linter prevents shipping code that writes forbidden keys to raw stores. +* Ingestion of known fixture sets results in zero normalized fields outside `content.raw`. +* Policy Engine is the only writer of `effective_finding_*` materializations. +* CLI `stella aoc verify` returns success on clean datasets and non‑zero on seeded violations. +* Console shows AOC status and violation drill‑downs. + +--- + +## 9) Risks and mitigations + +* **Collector drift**: new upstream fields tempt developers to normalize. + + * Mitigation: CI linter + guard + schema validators; require RFC to extend linkset. +* **Performance impact**: extra validation on write. + + * Mitigation: guard is O(number of keys) and schema check is bounded; indexes sized appropriately. +* **Migration complexity**: moving legacy normalized fields out. + + * Mitigation: temporary `advisory_view_legacy` for parity; stepwise cutover. +* **Tenant leakage**: missing tenant on write. + + * Mitigation: schema requires `tenant`; middleware injects and asserts. + +--- + +## 10) Test plan + +* **Unit tests** + + * Guard rejects forbidden keys; idempotency; supersedes chain; provenance required. + * Signature verification paths: good, bad, absent. +* **Property tests** + + * Randomized upstream docs never produce forbidden keys at top level. +* **Integration tests** + + * Batch ingest of 50k advisories: throughput, zero violations. + * Mixed VEX sources with contradictory statements remain separate in raw. +* **Contract tests** + + * Policy Engine refuses to run without raw inputs; writes only to `effective_finding_*`. +* **End‑to‑end** + + * Seed SBOM + advisories + VEX; ensure findings are identical pre/post migration. + +--- + +## 11) Developer checklists + +**Definition of Ready** + +* Upstream spec reference attached. +* Linkset mappers defined. +* Example fixtures added. + +**Definition of Done** + +* DB validators deployed and tested. +* Runtime guards enabled. +* CI linter merged and enforced. +* Docs updated (files in section 7). +* Metrics visible on dashboard. +* CLI verify passes. + +--- + +## 12) Glossary + +* **Raw document**: exact upstream content plus provenance, with join hints. +* **Linkset**: PURLs/CPEs/IDs extracted to accelerate joins later. +* **Supersedes**: pointer from a newer raw doc to the previous revision of the same upstream doc. +* **Policy‑time**: evaluation phase where merges, consensus, and severity are computed. +* **AOC**: Aggregation‑Only Contract. + +--- + +### Final imposed reminder + +**Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied.** diff --git a/EPIC_10.md b/docs/implplan/EPIC_10.md similarity index 99% rename from EPIC_10.md rename to docs/implplan/EPIC_10.md index 4f27d104..ac9f90fc 100644 --- a/EPIC_10.md +++ b/docs/implplan/EPIC_10.md @@ -327,7 +327,7 @@ Exit codes: `0` ok, `2` bad args, `4` not found, `5` denied, `6` integrity faile ### 4.1 Modules -* **New service:** `src/StellaOps.ExportCenter` +* **New service:** `src/ExportCenter/StellaOps.ExportCenter` * `api/` REST + WS * `planner/` scope planning, delta computation, sampling @@ -343,7 +343,7 @@ Exit codes: `0` ok, `2` bad args, `4` not found, `5` denied, `6` integrity faile * **SDK/CLI** - * `src/StellaOps.Cli` subcommands with streaming download and verification. + * `src/Cli/StellaOps.Cli` subcommands with streaming download and verification. * **Console** diff --git a/EPIC_11.md b/docs/implplan/EPIC_11.md similarity index 99% rename from EPIC_11.md rename to docs/implplan/EPIC_11.md index fe3cc19a2369197125507af673c4588ae669e072..312858f0184e78fb10d84dc4d929611fb3556d46 100644 GIT binary patch delta 53 zcmbQYmTBKwrVR;k{C*7i3?&Si3~4}|%22d9Hf}x>b0~x7(Ew@d)jVhuz9 diff --git a/EPIC_12.md b/docs/implplan/EPIC_12.md similarity index 69% rename from EPIC_12.md rename to docs/implplan/EPIC_12.md index e97bf2c92b634b31d0f5e21c36d950df7b78be0d..3a0440ff31de4a20c12cf6a3be1e8456d67922f8 100644 GIT binary patch delta 2382 zcmZuzdrXs86u&K^SO=>uN-b@n1r*AoMPA~N@f1NeD{mFFJme*mS{_10R3xaV2>mG{ zqByf`&KN{WjML4H%^Ck#oD$p;XPi;voY_kgH?u8s=leurr@6W3+c>A$7Q1=MQ4Tq4>On{3 z=rkWf{xmI^l%6h!gv8E^o(LVhed+2);$K{y9K}xJY%(nnFCsc1{>*Ni87V-oyu z?|S*#>%5Lrv{0)5SfTd0j;B6j_}ce22K`1LN%n=evm1Km zHo$FO2X57c;PSjal=xqSHlUF#J`8w^=#K%>&}hf;RI8uGx%Q@JvO&4hxK^OIhfLL4TZ)S1IBPRb48E+ki= zA>5r`@<@0kad)U9;wCZHh&Lu}{R`Z1FTyyXb91g?7FIVA&JCENxDE2bx)!r43b&9B zsh*h$p~TfF5yhs!(|c1elMZv<}29 zy-gm~=}U>OPv3=q(=XxDJHO-XyA9AUYryekgQQty@Fcp?unB(|`p~m{990=N17CW= zl$t)&yjcVbAR5mMER_P{5PZ5~YbM=$j$BZSXPHXe&Q#g5LO21^vNYV%SxTDuVkE2G zmL-QcM+9TG{P|EZUBq%(CU@?5maW6j*)D`#fRLOb;z?xvea><^iRbU)rJ#_S8&9?` zLO8W*76x-gIF}oQ2f5);<;5UBPeJzAj8b&vEykEpLi`Q1P%g?=m^jS}PkgXKi?J0w z)a7VfX(Z|C)l&SkG6zGeWT;w|KsaYVhw4Z>Cuwj zSm^uOUB6ctOniYY-APK=SIYiqbp!$au2vIOo1{deO%X7g(x|I&!=&fAwkCw)g}^kofn9qS}TI;hNw(isdNXSy}kke*0=g_>vMU}Wxjz*MloAQw2W0y@hBr5 zMF+J;iVF=bh~3bLvm2}kZ!C9s)2A9kl`s2AWa;#FyvY|=qSi7IoSSyR)?|Xv+J__7 zSlqI5=@zk(s{(}^X9!?wHsWe?9e)E93W% z-?jwSHcFURuiCZ!9dh(&yAsXq!+5-XKAOwKuxxL!Ki)o90Asr^Vs{?Hot;GvXOr8( zmCVB(chS1*41FlIDB0Z(jgJgd_Vm$BMDma1=ALxAnIPnJS{rI8%1?gQM&_QuolZm&x=fJ)D1U~QHhaB55MCf<$@G&?KPY@0LzE8Lf>`SGQ3M=!B0oFP(?PRmF`mdepyDOA1IHZvr1WyXEj7LSG4)^{W>1h9V=6g#yDh8C^X`Gwqc z=z?r6Sr?3r@n{G$dYdn4@TWWtc-Re-iX$r*$2@hJQ6UX75z(mH?$y4I%E0S zL2QqN^Rbt4WqsUrM9b5Yh;cgFeZr!HzjUvlh3nQB7W_w71s4sA8JqP8vo5)G4xNSe*1~9CDD#x|{2^J(w8>}M zdA)HnN<+qQL{U`3RilD4`KkULL!n#L`jS(E>H0+xQ-Up=NmJlyaWY?`f|+a>%q^Y= zCsNP*wdPb|NEFL1)A784>fP(D_q936UlvPncX2mM>f~ zeTJ#)S#G6}T+bXuY+G>(F(K;{B&M>Ok?>V!_eoiN|2Z=^kiC*B%dqMuA1lo{T(X+4 z;i9dpLZ&`U*4i&x_2rc^Cy4dVvn~`i4eC`HO_^7eC;7 zi5t~i%GvlbGUFQu5KHo@+>8YinNqpUXq%X{arn~;=|Uo+S;!Z1(P2qbx)IZ+II>Zw zmLms~RyA~QqQoZY$|lMttLJpRi?X=p$?ihFAAX@iUlhx7&BUF z+Ps8mXTsIZ6-<&Y%2ikpMcUjKlxQ^L4Del{ndQ(7Ef}|jhU~+WR0~d_7JW7s8PKYu zC>?RQC>wF2D2jV}zsQE;`$Zb8npjv>5)3Db_i%1viyET0B#}N!6wf-RyX8KwE#`Vr4;+ld&m zovemzkA#lxJGlNY+o_uFl&T@VG@47LrE~{(N@L(;=_V%88A+$EEEralQAGVCNjhEj z^#8YBu3;!Q2Bk<{7?ho`(LQlqyA(qWlFA-^7*ina%ak^f6Qvhb61@X_4@19BHpZ%8f8qq zgJct z+FZ?uf4i!ueVSRf^#?>9ixtL;x)Ajti|_(=AT37IiER^KEf&kM1JnU?8!1{B8mX%l zyhPok>m}>LM=XF?!~38w#2rF!MrczxqN~ZpoxRAMIWM0?eLhNs;LVVCd zj=Ntz`iO&8_GRHh=c@5pb#n(bt#sVJR=SP{tx|hvJ}3pH{~$54LtThnhopkhctW`! z=^h6%!=7BkDNnV3_VIaG-?o7-9BZSuYOF1aiUz)ZrFD7~28Ry+#zs0=QjrQ*UaiAX z=8?*PcX8&3lnGNuenh=5Ub}$kIXa3cw)+q#EGE4F*%0fcZnoEZCZL_D4(Syw?4Y2Z z>ljANUYQ83ub*b+OF3-?XJ;g=?tB-ypF1gEJYBTMvhJaPR&R7mR#iQjs8`U7Rn>EH zx*a4zk0KJ{d$G=Z1--MRC*20r3krH;;4-|%12&bV2KzB;p=4z6*xLa&%suY^?!@+X zoZOSWL3z^i#&y;^_}dP{3KbmcyNHbG#CwR7488rI2eeuCCjX}Zhu<7QZNtDN#Dh_h Ry@Ma|3qLm)E(@lF{ug#H4&DF& diff --git a/EPIC_13.md b/docs/implplan/EPIC_13.md similarity index 100% rename from EPIC_13.md rename to docs/implplan/EPIC_13.md diff --git a/EPIC_14.md b/docs/implplan/EPIC_14.md similarity index 100% rename from EPIC_14.md rename to docs/implplan/EPIC_14.md diff --git a/EPIC_15.md b/docs/implplan/EPIC_15.md similarity index 100% rename from EPIC_15.md rename to docs/implplan/EPIC_15.md diff --git a/EPIC_16.md b/docs/implplan/EPIC_16.md similarity index 98% rename from EPIC_16.md rename to docs/implplan/EPIC_16.md index b73e1cab..dea96db6 100644 --- a/EPIC_16.md +++ b/docs/implplan/EPIC_16.md @@ -1 +1 @@ -See `docs/airgap/EPIC_16_AIRGAP_MODE.md` for the full Epic 16 specification. +See `docs/airgap/EPIC_16_AIRGAP_MODE.md` for the full Epic 16 specification. diff --git a/EPIC_17.md b/docs/implplan/EPIC_17.md similarity index 98% rename from EPIC_17.md rename to docs/implplan/EPIC_17.md index aaa01c0d..5caad030 100644 --- a/EPIC_17.md +++ b/docs/implplan/EPIC_17.md @@ -1 +1 @@ -See `docs/api/EPIC_17_SDKS_OPENAPI.md` for the complete Epic 17 specification. +See `docs/api/EPIC_17_SDKS_OPENAPI.md` for the complete Epic 17 specification. diff --git a/EPIC_18.md b/docs/implplan/EPIC_18.md similarity index 98% rename from EPIC_18.md rename to docs/implplan/EPIC_18.md index ce01c65f..e6878951 100644 --- a/EPIC_18.md +++ b/docs/implplan/EPIC_18.md @@ -1 +1 @@ -See `docs/risk/EPIC_18_RISK_PROFILES.md` for the complete Epic 18 specification. +See `docs/risk/EPIC_18_RISK_PROFILES.md` for the complete Epic 18 specification. diff --git a/EPIC_19.md b/docs/implplan/EPIC_19.md similarity index 98% rename from EPIC_19.md rename to docs/implplan/EPIC_19.md index 94c28dcf..81b22dbe 100644 --- a/EPIC_19.md +++ b/docs/implplan/EPIC_19.md @@ -1 +1 @@ -See `docs/attestor/EPIC_19_ATTESTOR_CONSOLE.md` for the complete Epic 19 specification. +See `docs/attestor/EPIC_19_ATTESTOR_CONSOLE.md` for the complete Epic 19 specification. diff --git a/EPIC_2.md b/docs/implplan/EPIC_2.md similarity index 97% rename from EPIC_2.md rename to docs/implplan/EPIC_2.md index 508c07a3..0422f310 100644 --- a/EPIC_2.md +++ b/docs/implplan/EPIC_2.md @@ -1,567 +1,567 @@ -Fine. Here’s the next epic, written so you can paste it straight into the repo without having to babysit me. Same structure as before, maximum detail, zero hand‑waving. - ---- - -# Epic 2: Policy Engine & Policy Editor (VEX + Advisory Application Rules) - -> Short name: **Policy Engine v2** -> Services touched: **Policy Engine, Web API, Console (Policy Editor), CLI, Conseiller, Excitator, SBOM Service, Authority, Workers/Scheduler** -> Data stores: **MongoDB (policies, runs, effective findings), optional Redis/NATS for jobs** - ---- - -## 1) What it is - -This epic delivers the **organization‑specific decision layer** for Stella. Ingestion is now AOC‑compliant (Epic 1). That means advisories and VEX arrive as immutable raw facts. This epic builds the place where those facts become **effective findings** under policies you control. - -Core deliverables: - -* **Policy Engine**: deterministic evaluator that applies rule sets to inputs: - - * Inputs: `advisory_raw`, `vex_raw`, SBOMs, optional telemetry hooks (reachability stubs), org metadata. - * Outputs: `effective_finding_{policyId}` materializations, with full explanation traces. -* **Policy Editor (Console + CLI)**: versioned policy authoring, simulation, review/approval workflow, and change diffs. -* **Rules DSL v1**: safe, declarative language for VEX application, advisory normalization, and risk scoring. No arbitrary code execution, no network calls. -* **Run Orchestrator**: incremental re‑evaluation when new raw facts or SBOM changes arrive; efficient partial updates. - -The philosophy is boring on purpose: policy is a **pure function of inputs**. Same inputs and same policy yield the same outputs, every time, on every machine. If you want drama, watch reality TV, not your risk pipeline. - ---- - -## 2) Why - -* Vendors disagree, contexts differ, and your tolerance for risk is not universal. -* VEX means nothing until you decide **how** to apply it to **your** assets. -* Auditors care about the “why.” You’ll need consistent, replayable answers, with traces. -* Security teams need **simulation** before rollouts, and **diffs** after. - ---- - -## 3) How it should work (deep details) - -### 3.1 Data model - -#### 3.1.1 Policy documents (Mongo: `policies`) - -```json -{ - "_id": "policy:P-7:v3", - "policy_id": "P-7", - "version": 3, - "name": "Default Org Policy", - "status": "approved", // draft | submitted | approved | archived - "owned_by": "team:sec-plat", - "valid_from": "2025-01-15T00:00:00Z", - "valid_to": null, - "dsl": { - "syntax": "stella-dsl@1", - "source": "rule-set text or compiled IR ref" - }, - "metadata": { - "description": "Baseline scoring + VEX precedence", - "tags": ["baseline","vex","cvss"] - }, - "provenance": { - "created_by": "user:ali", - "created_at": "2025-01-15T08:00:00Z", - "submitted_by": "user:kay", - "approved_by": "user:root", - "approval_at": "2025-01-16T10:00:00Z", - "checksum": "sha256:..." - }, - "tenant": "default" -} -``` - -Constraints: - -* `status=approved` is required to run in production. -* Version increments are append‑only. Old versions remain runnable for replay. - -#### 3.1.2 Policy runs (Mongo: `policy_runs`) - -```json -{ - "_id": "run:P-7:2025-02-20T12:34:56Z:abcd", - "policy_id": "P-7", - "policy_version": 3, - "inputs": { - "sbom_set": ["sbom:S-42"], - "advisory_cursor": "2025-02-20T00:00:00Z", - "vex_cursor": "2025-02-20T00:00:00Z" - }, - "mode": "incremental", // full | incremental | simulate - "stats": { - "components": 1742, - "advisories_considered": 9210, - "vex_considered": 1187, - "rules_fired": 68023, - "findings_out": 4321 - }, - "trace": { - "location": "blob://traces/run-.../index.json", - "sampling": "smart-10pct" - }, - "status": "succeeded", // queued | running | failed | succeeded | canceled - "started_at": "2025-02-20T12:34:56Z", - "finished_at": "2025-02-20T12:35:41Z", - "tenant": "default" -} -``` - -#### 3.1.3 Effective findings (Mongo: `effective_finding_P-7`) - -```json -{ - "_id": "P-7:S-42:pkg:npm/lodash@4.17.21:CVE-2021-23337", - "policy_id": "P-7", - "policy_version": 3, - "sbom_id": "S-42", - "component_purl": "pkg:npm/lodash@4.17.21", - "advisory_ids": ["CVE-2021-23337", "GHSA-..."], - "status": "affected", // affected | not_affected | fixed | under_investigation | suppressed - "severity": { - "normalized": "High", - "score": 7.5, - "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:N", - "rationale": "cvss_base(OSV) + vendor_weighting + env_modifiers" - }, - "rationale": [ - {"rule":"vex.precedence","detail":"VendorX not_affected justified=component_not_present wins"}, - {"rule":"advisory.cvss.normalization","detail":"mapped GHSA severity to CVSS 3.1 = 7.5"} - ], - "references": { - "advisory_raw_ids": ["advisory_raw:osv:GHSA-...:v3"], - "vex_raw_ids": ["vex_raw:VendorX:doc-123:v4"] - }, - "run_id": "run:P-7:2025-02-20T12:34:56Z:abcd", - "tenant": "default" -} -``` - -Write protection: only the **Policy Engine** service identity may write any `effective_finding_*` collection. - ---- - -### 3.2 Rules DSL v1 (stella‑dsl@1) - -**Design goals** - -* Declarative, composable, deterministic. -* No loops, no network IO, no non‑deterministic time. -* Policy authors see readable text; the engine compiles to a safe IR. - -**Concepts** - -* **WHEN** condition matches a tuple `(sbom_component, advisory, optional vex_statements)` -* **THEN** actions set `status`, compute `severity`, attach `rationale`, or `suppress` with reason. -* **Profiles** for severity and scoring; **Maps** for vendor weighting; **Guards** for VEX justification. - -**Mini‑grammar (subset)** - -``` -policy "Default Org Policy" syntax "stella-dsl@1" { - - profile severity { - map vendor_weight { - source "GHSA" => +0.5 - source "OSV" => +0.0 - source "VendorX" => -0.2 - } - env base_cvss { - if env.runtime == "serverless" then -0.5 - if env.exposure == "internal-only" then -1.0 - } - } - - rule vex_precedence { - when vex.any(status in ["not_affected","fixed"]) - and vex.justification in ["component_not_present","vulnerable_code_not_present"] - then status := vex.status - because "VEX strong justification prevails"; - } - - rule advisory_to_cvss { - when advisory.source in ["GHSA","OSV"] - then severity := normalize_cvss(advisory) - because "Map vendor severity or CVSS vector"; - } - - rule reachability_soft_suppress { - when severity.normalized <= "Medium" - and telemetry.reachability == "none" - then status := "suppressed" - because "not reachable and low severity"; - } -} -``` - -**Built‑ins** (non‑exhaustive) - -* `normalize_cvss(advisory)` maps GHSA/OSV/CSAF severity fields to CVSS v3.1 numbers when possible; otherwise vendor‑to‑numeric mapping table in policy. -* `vex.any(...)` tests across matching VEX statements for the same `(component, advisory)`. -* `telemetry.*` is an optional input namespace reserved for future reachability data; if absent, expressions evaluate to `unknown` (no effect). - -**Determinism** - -* Rules are evaluated in **stable order**: explicit `priority` attribute or lexical order. -* **First‑match** semantics for conflicting status unless `combine` is used. -* Severity computations are pure; numeric maps are part of policy document. - ---- - -### 3.3 Evaluation model - -1. **Selection** - - * For each SBOM component PURL, find candidate advisories from `advisory_raw` via linkset PURLs or identifiers. - * For each pair `(component, advisory)`, load all matching VEX facts from `vex_raw`. - -2. **Context assembly** - - * Build an evaluation context from: - - * `sbom_component`: PURL, licenses, relationships. - * `advisory`: source, identifiers, references, embedded vendor severity (kept in `content.raw`). - * `vex`: list of statements with status and justification. - * `env`: org‑specific env vars configured per policy run (e.g., exposure). - * Optional `telemetry` if available. - -3. **Rule execution** - - * Compile DSL to IR once per policy version; cache. - * Execute rules per tuple; record which rules fired and the order. - * If no rule sets status, default is `affected`. - * If no rule sets severity, default severity uses `normalize_cvss(advisory)` with vendor defaults. - -4. **Materialization** - - * Write to `effective_finding_{policyId}` with `rationale` chain and references to raw docs. - * Emit per‑tuple trace events; sample and store full traces per run. - -5. **Incremental updates** - - * A watch job observes new `advisory_raw` and `vex_raw` inserts and SBOM deltas. - * The orchestrator computes the affected tuples and re‑evaluates only those. - -6. **Replay** - - * Any `policy_run` is fully reproducible by `(policy_id, version, input set, cursors)`. - ---- - -### 3.4 VEX application semantics - -* **Precedence**: a `not_affected` with strong justification (`component_not_present`, `vulnerable_code_not_present`, `fix_not_required`) wins unless another rule explicitly overrides by environment context. -* **Scoping**: VEX statements often specify product/component scope. Matching uses PURL equivalence and version ranges extracted during ingestion linkset generation. -* **Conflicts**: If multiple VEX statements conflict, the default is **most‑specific scope wins** (component > product > vendor), then newest `document_version`. Policies can override with explicit rules. -* **Explainability**: Every VEX‑driven decision records which statement IDs were considered and which one won. - ---- - -### 3.5 Advisory normalization rules - -* **Vendor severity mapping**: Map GHSA levels or CSAF product‑tree severities to CVSS‑like numeric bands via policy maps. -* **CVSS vector use**: If a valid vector exists in `content.raw`, parse and compute; apply policy modifiers from `profile severity`. -* **Temporal/environment modifiers**: Optional reductions for network exposure, isolation, or compensating controls, all encoded in policy. - ---- - -### 3.6 Performance and scale - -* Partition evaluation by SBOM ID and hash ranges of PURLs. -* Pre‑index `advisory_raw.linkset.purls` and `vex_raw.linkset.purls` (already in Epic 1). -* Use streaming iterators; avoid loading entire SBOM or advisory sets into memory. -* Materialize only changed findings (diff‑aware writes). -* Target: 100k components, 1M advisories considered, 5 minutes incremental SLA on commodity hardware. - ---- - -### 3.7 Error codes - -| Code | Meaning | HTTP | -| ------------- | ----------------------------------------------------- | ---- | -| `ERR_POL_001` | Policy syntax error | 400 | -| `ERR_POL_002` | Policy not approved for run | 403 | -| `ERR_POL_003` | Missing inputs (SBOM/advisory/vex fetch failed) | 424 | -| `ERR_POL_004` | Determinism guard triggered (non‑pure function usage) | 500 | -| `ERR_POL_005` | Write denied to effective findings (caller invalid) | 403 | -| `ERR_POL_006` | Run canceled or timed out | 408 | - ---- - -### 3.8 Observability - -* Metrics: - - * `policy_compile_seconds`, `policy_run_seconds{mode=...}`, `rules_fired_total`, `findings_written_total`, `vex_overrides_total`, `simulate_diff_total{delta=up|down|unchanged}`. -* Tracing: - - * Spans: `policy.compile`, `policy.select`, `policy.eval`, `policy.materialize`. -* Logs: - - * Include `policy_id`, `version`, `run_id`, `sbom_id`, `component_purl`, `advisory_id`, `vex_count`, `rule_hits`. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -### 3.9 Security and tenancy - -* Only users with `policy:write` can create/modify policies. -* `policy:approve` is a separate privileged role. -* Only Policy Engine service identity has `effective:write`. -* Tenancy is explicit on all documents and queries. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 4) API surface - -### 4.1 Policy CRUD and lifecycle - -* `POST /policies` create draft -* `GET /policies?status=...` list -* `GET /policies/{policyId}/versions/{v}` fetch -* `POST /policies/{policyId}/submit` move draft to submitted -* `POST /policies/{policyId}/approve` approve version -* `POST /policies/{policyId}/archive` archive version - -### 4.2 Compilation and validation - -* `POST /policies/{policyId}/versions/{v}/compile` - - * Returns IR checksum, syntax diagnostics, rule stats. - -### 4.3 Runs - -* `POST /policies/{policyId}/runs` body: `{mode, sbom_set, advisory_cursor?, vex_cursor?, env?}` -* `GET /policies/{policyId}/runs/{runId}` status + stats -* `POST /policies/{policyId}/simulate` returns **diff** vs current approved version on a sample SBOM set. - -### 4.4 Findings and explanations - -* `GET /findings/{policyId}?sbom_id=S-42&status=affected&severity=High+Critical` -* `GET /findings/{policyId}/{findingId}/explain` returns ordered rule hits and linked raw IDs. - -All endpoints require tenant scoping and appropriate `policy:*` or `findings:*` roles. - ---- - -## 5) Console (Policy Editor) and CLI behavior - -**Console** - -* Monaco‑style editor with DSL syntax highlighting, lint, quick docs. -* Side‑by‑side **Simulation** panel: show count of affected findings before/after. -* Approval workflow: submit, review comments, approve with rationale. -* Diffs: show rule‑wise changes and estimated impact. -* Read‑only run viewer: heatmap of rules fired, top suppressions, VEX wins. - -**CLI** - -* `stella policy new --name "Default Org Policy"` -* `stella policy edit P-7` opens local editor -> `submit` -* `stella policy approve P-7 --version 3` -* `stella policy simulate P-7 --sbom S-42 --env exposure=internal-only` -* `stella findings ls --policy P-7 --sbom S-42 --status affected` - -Exit codes map to `ERR_POL_*`. - ---- - -## 6) Implementation tasks - -### 6.1 Policy Engine service - -* [ ] Implement DSL parser and IR compiler (`stella-dsl@1`). -* [ ] Build evaluator with stable ordering and first‑match semantics. -* [ ] Implement selection joiners for SBOM↔advisory↔vex using linksets. -* [ ] Materialization writer with upsert‑only semantics to `effective_finding_{policyId}`. -* [ ] Determinism guard (ban wall‑clock, network, and RNG during eval). -* [ ] Incremental orchestrator listening to advisory/vex/SBOM change streams. -* [ ] Trace emitter with rule‑hit sampling. -* [ ] Unit tests, property tests, golden fixtures; perf tests to target SLA. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.2 Web API - -* [ ] Policy CRUD, compile, run, simulate, findings, explain endpoints. -* [ ] Pagination, filters, and tenant enforcement on all list endpoints. -* [ ] Error mapping to `ERR_POL_*`. -* [ ] Rate limits on simulate endpoints. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.3 Console (Policy Editor) - -* [ ] Editor with DSL syntax highlighting and inline diagnostics. -* [ ] Simulation UI with pre/post counts and top deltas. -* [ ] Approval workflow UI with audit trail. -* [ ] Run viewer dashboards (rule heatmap, VEX wins, suppressions). - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.4 CLI - -* [ ] New commands: `policy new|edit|submit|approve|simulate`, `findings ls|get`. -* [ ] Json/YAML output formats for CI consumption. -* [ ] Non‑zero exits on syntax errors or simulation failures; map to `ERR_POL_*`. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.5 Conseiller & Excitator integration - -* [ ] Provide search endpoints optimized for policy selection (batch by PURLs and IDs). -* [ ] Harden linkset extraction to maximize join recall. -* [ ] Add cursors for incremental selection windows per run. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.6 SBOM Service - -* [ ] Ensure fast PURL index and component metadata projection for policy queries. -* [ ] Provide relationship graph API for future transitive logic. -* [ ] Emit change events on SBOM updates. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.7 Authority - -* [ ] Define scopes: `policy:write`, `policy:approve`, `policy:run`, `findings:read`, `effective:write`. -* [ ] Issue service identity for Policy Engine with `effective:write` only. -* [ ] Enforce tenant claims at gateway. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 6.8 CI/CD - -* [ ] Lint policy DSL in PRs; block invalid syntax. -* [ ] Run `simulate` against golden SBOMs to detect explosive deltas. -* [ ] Determinism CI: two runs with identical seeds produce identical outputs. - -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 7) Documentation changes (create/update these files) - -1. **`/docs/policy/overview.md`** - - * What the Policy Engine is, high‑level concepts, inputs, outputs, determinism. -2. **`/docs/policy/dsl.md`** - - * Full grammar, built‑ins, examples, best practices, anti‑patterns. -3. **`/docs/policy/lifecycle.md`** - - * Draft → submitted → approved → archived, roles, and audit trail. -4. **`/docs/policy/runs.md`** - - * Run modes, incremental mechanics, cursors, replay. -5. **`/docs/api/policy.md`** - - * Endpoints, request/response schemas, error codes. -6. **`/docs/cli/policy.md`** - - * Command usage, examples, exit codes, JSON output contracts. -7. **`/docs/ui/policy-editor.md`** - - * Screens, workflows, simulation, diffs, approvals. -8. **`/docs/architecture/policy-engine.md`** - - * Detailed sequence diagrams, selection/join strategy, materialization schema. -9. **`/docs/observability/policy.md`** - - * Metrics, tracing, logs, sample dashboards. -10. **`/docs/security/policy-governance.md`** - - * Scopes, approvals, tenancy, least privilege. -11. **`/docs/examples/policies/`** - - * `baseline.pol`, `serverless.pol`, `internal-only.pol`, each with commentary. -12. **`/docs/faq/policy-faq.md`** - - * Common pitfalls, VEX conflict handling, determinism gotchas. - -Each file includes a **Compliance checklist** for authors and reviewers. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 8) Acceptance criteria - -* Policies are versioned, approvable, and compilable; invalid DSL blocks merges. -* Engine produces deterministic outputs with full rationale chains. -* VEX precedence rules work per spec and are overridable by policy. -* Simulation yields accurate pre/post deltas and diffs. -* Only Policy Engine can write to `effective_finding_*`. -* Incremental runs pick up new advisories/VEX/SBOM changes without full re‑runs. -* Console and CLI cover authoring, simulation, approval, and retrieval. -* Observability dashboards show rule hits, VEX wins, and run timings. - ---- - -## 9) Risks and mitigations - -* **Policy sprawl**: too many similar policies. - - * Mitigation: templates, policy inheritance in v1.1, tagging, ownership metadata. -* **Non‑determinism creep**: someone sneaks wall‑clock or network into evaluation. - - * Mitigation: determinism guard, static analyzer, and CI replay check. -* **Join miss‑rate**: weak linksets cause under‑matching. - - * Mitigation: linkset strengthening in ingestion, PURL equivalence tables, monitoring for “zero‑hit” rates. -* **Approval bottlenecks**: blocked rollouts. - - * Mitigation: RBAC with delegated approvers and time‑boxed SLAs. - ---- - -## 10) Test plan - -* **Unit**: parser, compiler, evaluator; conflict resolution; precedence. -* **Property**: random policies over synthetic inputs; ensure no panics and stable outputs. -* **Golden**: fixed SBOM + curated advisories/VEX → expected findings; compare every run. -* **Performance**: large SBOMs with heavy rule sets; assert run times and memory ceilings. -* **Integration**: end‑to‑end simulate → approve → run → diff; verify write protections. -* **Chaos**: inject malformed VEX, missing advisories; ensure graceful degradation and clear errors. - ---- - -## 11) Developer checklists - -**Definition of Ready** - -* Policy grammar finalized; examples prepared. -* Linkset join queries benchmarked. -* Owner and approvers assigned. - -**Definition of Done** - -* All APIs live with RBAC. -* CLI and Console features shipped. -* Determinism and golden tests green. -* Observability dashboards deployed. -* Docs in section 7 merged. -* Two real org policies migrated and in production. - ---- - -## 12) Glossary - -* **Policy**: versioned rule set controlling status and severity. -* **DSL**: domain‑specific language used to express rules. -* **Run**: a single evaluation execution with defined inputs and outputs. -* **Simulation**: a run that doesn’t write findings; returns diffs. -* **Materialization**: persisted effective findings for fast queries. -* **Determinism**: same inputs + same policy = same outputs. Always. - ---- - -### Final imposed reminder - -**Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied.** +Fine. Here’s the next epic, written so you can paste it straight into the repo without having to babysit me. Same structure as before, maximum detail, zero hand‑waving. + +--- + +# Epic 2: Policy Engine & Policy Editor (VEX + Advisory Application Rules) + +> Short name: **Policy Engine v2** +> Services touched: **Policy Engine, Web API, Console (Policy Editor), CLI, Conseiller, Excitator, SBOM Service, Authority, Workers/Scheduler** +> Data stores: **MongoDB (policies, runs, effective findings), optional Redis/NATS for jobs** + +--- + +## 1) What it is + +This epic delivers the **organization‑specific decision layer** for Stella. Ingestion is now AOC‑compliant (Epic 1). That means advisories and VEX arrive as immutable raw facts. This epic builds the place where those facts become **effective findings** under policies you control. + +Core deliverables: + +* **Policy Engine**: deterministic evaluator that applies rule sets to inputs: + + * Inputs: `advisory_raw`, `vex_raw`, SBOMs, optional telemetry hooks (reachability stubs), org metadata. + * Outputs: `effective_finding_{policyId}` materializations, with full explanation traces. +* **Policy Editor (Console + CLI)**: versioned policy authoring, simulation, review/approval workflow, and change diffs. +* **Rules DSL v1**: safe, declarative language for VEX application, advisory normalization, and risk scoring. No arbitrary code execution, no network calls. +* **Run Orchestrator**: incremental re‑evaluation when new raw facts or SBOM changes arrive; efficient partial updates. + +The philosophy is boring on purpose: policy is a **pure function of inputs**. Same inputs and same policy yield the same outputs, every time, on every machine. If you want drama, watch reality TV, not your risk pipeline. + +--- + +## 2) Why + +* Vendors disagree, contexts differ, and your tolerance for risk is not universal. +* VEX means nothing until you decide **how** to apply it to **your** assets. +* Auditors care about the “why.” You’ll need consistent, replayable answers, with traces. +* Security teams need **simulation** before rollouts, and **diffs** after. + +--- + +## 3) How it should work (deep details) + +### 3.1 Data model + +#### 3.1.1 Policy documents (Mongo: `policies`) + +```json +{ + "_id": "policy:P-7:v3", + "policy_id": "P-7", + "version": 3, + "name": "Default Org Policy", + "status": "approved", // draft | submitted | approved | archived + "owned_by": "team:sec-plat", + "valid_from": "2025-01-15T00:00:00Z", + "valid_to": null, + "dsl": { + "syntax": "stella-dsl@1", + "source": "rule-set text or compiled IR ref" + }, + "metadata": { + "description": "Baseline scoring + VEX precedence", + "tags": ["baseline","vex","cvss"] + }, + "provenance": { + "created_by": "user:ali", + "created_at": "2025-01-15T08:00:00Z", + "submitted_by": "user:kay", + "approved_by": "user:root", + "approval_at": "2025-01-16T10:00:00Z", + "checksum": "sha256:..." + }, + "tenant": "default" +} +``` + +Constraints: + +* `status=approved` is required to run in production. +* Version increments are append‑only. Old versions remain runnable for replay. + +#### 3.1.2 Policy runs (Mongo: `policy_runs`) + +```json +{ + "_id": "run:P-7:2025-02-20T12:34:56Z:abcd", + "policy_id": "P-7", + "policy_version": 3, + "inputs": { + "sbom_set": ["sbom:S-42"], + "advisory_cursor": "2025-02-20T00:00:00Z", + "vex_cursor": "2025-02-20T00:00:00Z" + }, + "mode": "incremental", // full | incremental | simulate + "stats": { + "components": 1742, + "advisories_considered": 9210, + "vex_considered": 1187, + "rules_fired": 68023, + "findings_out": 4321 + }, + "trace": { + "location": "blob://traces/run-.../index.json", + "sampling": "smart-10pct" + }, + "status": "succeeded", // queued | running | failed | succeeded | canceled + "started_at": "2025-02-20T12:34:56Z", + "finished_at": "2025-02-20T12:35:41Z", + "tenant": "default" +} +``` + +#### 3.1.3 Effective findings (Mongo: `effective_finding_P-7`) + +```json +{ + "_id": "P-7:S-42:pkg:npm/lodash@4.17.21:CVE-2021-23337", + "policy_id": "P-7", + "policy_version": 3, + "sbom_id": "S-42", + "component_purl": "pkg:npm/lodash@4.17.21", + "advisory_ids": ["CVE-2021-23337", "GHSA-..."], + "status": "affected", // affected | not_affected | fixed | under_investigation | suppressed + "severity": { + "normalized": "High", + "score": 7.5, + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:N", + "rationale": "cvss_base(OSV) + vendor_weighting + env_modifiers" + }, + "rationale": [ + {"rule":"vex.precedence","detail":"VendorX not_affected justified=component_not_present wins"}, + {"rule":"advisory.cvss.normalization","detail":"mapped GHSA severity to CVSS 3.1 = 7.5"} + ], + "references": { + "advisory_raw_ids": ["advisory_raw:osv:GHSA-...:v3"], + "vex_raw_ids": ["vex_raw:VendorX:doc-123:v4"] + }, + "run_id": "run:P-7:2025-02-20T12:34:56Z:abcd", + "tenant": "default" +} +``` + +Write protection: only the **Policy Engine** service identity may write any `effective_finding_*` collection. + +--- + +### 3.2 Rules DSL v1 (stella‑dsl@1) + +**Design goals** + +* Declarative, composable, deterministic. +* No loops, no network IO, no non‑deterministic time. +* Policy authors see readable text; the engine compiles to a safe IR. + +**Concepts** + +* **WHEN** condition matches a tuple `(sbom_component, advisory, optional vex_statements)` +* **THEN** actions set `status`, compute `severity`, attach `rationale`, or `suppress` with reason. +* **Profiles** for severity and scoring; **Maps** for vendor weighting; **Guards** for VEX justification. + +**Mini‑grammar (subset)** + +``` +policy "Default Org Policy" syntax "stella-dsl@1" { + + profile severity { + map vendor_weight { + source "GHSA" => +0.5 + source "OSV" => +0.0 + source "VendorX" => -0.2 + } + env base_cvss { + if env.runtime == "serverless" then -0.5 + if env.exposure == "internal-only" then -1.0 + } + } + + rule vex_precedence { + when vex.any(status in ["not_affected","fixed"]) + and vex.justification in ["component_not_present","vulnerable_code_not_present"] + then status := vex.status + because "VEX strong justification prevails"; + } + + rule advisory_to_cvss { + when advisory.source in ["GHSA","OSV"] + then severity := normalize_cvss(advisory) + because "Map vendor severity or CVSS vector"; + } + + rule reachability_soft_suppress { + when severity.normalized <= "Medium" + and telemetry.reachability == "none" + then status := "suppressed" + because "not reachable and low severity"; + } +} +``` + +**Built‑ins** (non‑exhaustive) + +* `normalize_cvss(advisory)` maps GHSA/OSV/CSAF severity fields to CVSS v3.1 numbers when possible; otherwise vendor‑to‑numeric mapping table in policy. +* `vex.any(...)` tests across matching VEX statements for the same `(component, advisory)`. +* `telemetry.*` is an optional input namespace reserved for future reachability data; if absent, expressions evaluate to `unknown` (no effect). + +**Determinism** + +* Rules are evaluated in **stable order**: explicit `priority` attribute or lexical order. +* **First‑match** semantics for conflicting status unless `combine` is used. +* Severity computations are pure; numeric maps are part of policy document. + +--- + +### 3.3 Evaluation model + +1. **Selection** + + * For each SBOM component PURL, find candidate advisories from `advisory_raw` via linkset PURLs or identifiers. + * For each pair `(component, advisory)`, load all matching VEX facts from `vex_raw`. + +2. **Context assembly** + + * Build an evaluation context from: + + * `sbom_component`: PURL, licenses, relationships. + * `advisory`: source, identifiers, references, embedded vendor severity (kept in `content.raw`). + * `vex`: list of statements with status and justification. + * `env`: org‑specific env vars configured per policy run (e.g., exposure). + * Optional `telemetry` if available. + +3. **Rule execution** + + * Compile DSL to IR once per policy version; cache. + * Execute rules per tuple; record which rules fired and the order. + * If no rule sets status, default is `affected`. + * If no rule sets severity, default severity uses `normalize_cvss(advisory)` with vendor defaults. + +4. **Materialization** + + * Write to `effective_finding_{policyId}` with `rationale` chain and references to raw docs. + * Emit per‑tuple trace events; sample and store full traces per run. + +5. **Incremental updates** + + * A watch job observes new `advisory_raw` and `vex_raw` inserts and SBOM deltas. + * The orchestrator computes the affected tuples and re‑evaluates only those. + +6. **Replay** + + * Any `policy_run` is fully reproducible by `(policy_id, version, input set, cursors)`. + +--- + +### 3.4 VEX application semantics + +* **Precedence**: a `not_affected` with strong justification (`component_not_present`, `vulnerable_code_not_present`, `fix_not_required`) wins unless another rule explicitly overrides by environment context. +* **Scoping**: VEX statements often specify product/component scope. Matching uses PURL equivalence and version ranges extracted during ingestion linkset generation. +* **Conflicts**: If multiple VEX statements conflict, the default is **most‑specific scope wins** (component > product > vendor), then newest `document_version`. Policies can override with explicit rules. +* **Explainability**: Every VEX‑driven decision records which statement IDs were considered and which one won. + +--- + +### 3.5 Advisory normalization rules + +* **Vendor severity mapping**: Map GHSA levels or CSAF product‑tree severities to CVSS‑like numeric bands via policy maps. +* **CVSS vector use**: If a valid vector exists in `content.raw`, parse and compute; apply policy modifiers from `profile severity`. +* **Temporal/environment modifiers**: Optional reductions for network exposure, isolation, or compensating controls, all encoded in policy. + +--- + +### 3.6 Performance and scale + +* Partition evaluation by SBOM ID and hash ranges of PURLs. +* Pre‑index `advisory_raw.linkset.purls` and `vex_raw.linkset.purls` (already in Epic 1). +* Use streaming iterators; avoid loading entire SBOM or advisory sets into memory. +* Materialize only changed findings (diff‑aware writes). +* Target: 100k components, 1M advisories considered, 5 minutes incremental SLA on commodity hardware. + +--- + +### 3.7 Error codes + +| Code | Meaning | HTTP | +| ------------- | ----------------------------------------------------- | ---- | +| `ERR_POL_001` | Policy syntax error | 400 | +| `ERR_POL_002` | Policy not approved for run | 403 | +| `ERR_POL_003` | Missing inputs (SBOM/advisory/vex fetch failed) | 424 | +| `ERR_POL_004` | Determinism guard triggered (non‑pure function usage) | 500 | +| `ERR_POL_005` | Write denied to effective findings (caller invalid) | 403 | +| `ERR_POL_006` | Run canceled or timed out | 408 | + +--- + +### 3.8 Observability + +* Metrics: + + * `policy_compile_seconds`, `policy_run_seconds{mode=...}`, `rules_fired_total`, `findings_written_total`, `vex_overrides_total`, `simulate_diff_total{delta=up|down|unchanged}`. +* Tracing: + + * Spans: `policy.compile`, `policy.select`, `policy.eval`, `policy.materialize`. +* Logs: + + * Include `policy_id`, `version`, `run_id`, `sbom_id`, `component_purl`, `advisory_id`, `vex_count`, `rule_hits`. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +### 3.9 Security and tenancy + +* Only users with `policy:write` can create/modify policies. +* `policy:approve` is a separate privileged role. +* Only Policy Engine service identity has `effective:write`. +* Tenancy is explicit on all documents and queries. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 4) API surface + +### 4.1 Policy CRUD and lifecycle + +* `POST /policies` create draft +* `GET /policies?status=...` list +* `GET /policies/{policyId}/versions/{v}` fetch +* `POST /policies/{policyId}/submit` move draft to submitted +* `POST /policies/{policyId}/approve` approve version +* `POST /policies/{policyId}/archive` archive version + +### 4.2 Compilation and validation + +* `POST /policies/{policyId}/versions/{v}/compile` + + * Returns IR checksum, syntax diagnostics, rule stats. + +### 4.3 Runs + +* `POST /policies/{policyId}/runs` body: `{mode, sbom_set, advisory_cursor?, vex_cursor?, env?}` +* `GET /policies/{policyId}/runs/{runId}` status + stats +* `POST /policies/{policyId}/simulate` returns **diff** vs current approved version on a sample SBOM set. + +### 4.4 Findings and explanations + +* `GET /findings/{policyId}?sbom_id=S-42&status=affected&severity=High+Critical` +* `GET /findings/{policyId}/{findingId}/explain` returns ordered rule hits and linked raw IDs. + +All endpoints require tenant scoping and appropriate `policy:*` or `findings:*` roles. + +--- + +## 5) Console (Policy Editor) and CLI behavior + +**Console** + +* Monaco‑style editor with DSL syntax highlighting, lint, quick docs. +* Side‑by‑side **Simulation** panel: show count of affected findings before/after. +* Approval workflow: submit, review comments, approve with rationale. +* Diffs: show rule‑wise changes and estimated impact. +* Read‑only run viewer: heatmap of rules fired, top suppressions, VEX wins. + +**CLI** + +* `stella policy new --name "Default Org Policy"` +* `stella policy edit P-7` opens local editor -> `submit` +* `stella policy approve P-7 --version 3` +* `stella policy simulate P-7 --sbom S-42 --env exposure=internal-only` +* `stella findings ls --policy P-7 --sbom S-42 --status affected` + +Exit codes map to `ERR_POL_*`. + +--- + +## 6) Implementation tasks + +### 6.1 Policy Engine service + +* [ ] Implement DSL parser and IR compiler (`stella-dsl@1`). +* [ ] Build evaluator with stable ordering and first‑match semantics. +* [ ] Implement selection joiners for SBOM↔advisory↔vex using linksets. +* [ ] Materialization writer with upsert‑only semantics to `effective_finding_{policyId}`. +* [ ] Determinism guard (ban wall‑clock, network, and RNG during eval). +* [ ] Incremental orchestrator listening to advisory/vex/SBOM change streams. +* [ ] Trace emitter with rule‑hit sampling. +* [ ] Unit tests, property tests, golden fixtures; perf tests to target SLA. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.2 Web API + +* [ ] Policy CRUD, compile, run, simulate, findings, explain endpoints. +* [ ] Pagination, filters, and tenant enforcement on all list endpoints. +* [ ] Error mapping to `ERR_POL_*`. +* [ ] Rate limits on simulate endpoints. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.3 Console (Policy Editor) + +* [ ] Editor with DSL syntax highlighting and inline diagnostics. +* [ ] Simulation UI with pre/post counts and top deltas. +* [ ] Approval workflow UI with audit trail. +* [ ] Run viewer dashboards (rule heatmap, VEX wins, suppressions). + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.4 CLI + +* [ ] New commands: `policy new|edit|submit|approve|simulate`, `findings ls|get`. +* [ ] Json/YAML output formats for CI consumption. +* [ ] Non‑zero exits on syntax errors or simulation failures; map to `ERR_POL_*`. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.5 Conseiller & Excitator integration + +* [ ] Provide search endpoints optimized for policy selection (batch by PURLs and IDs). +* [ ] Harden linkset extraction to maximize join recall. +* [ ] Add cursors for incremental selection windows per run. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.6 SBOM Service + +* [ ] Ensure fast PURL index and component metadata projection for policy queries. +* [ ] Provide relationship graph API for future transitive logic. +* [ ] Emit change events on SBOM updates. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.7 Authority + +* [ ] Define scopes: `policy:write`, `policy:approve`, `policy:run`, `findings:read`, `effective:write`. +* [ ] Issue service identity for Policy Engine with `effective:write` only. +* [ ] Enforce tenant claims at gateway. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 6.8 CI/CD + +* [ ] Lint policy DSL in PRs; block invalid syntax. +* [ ] Run `simulate` against golden SBOMs to detect explosive deltas. +* [ ] Determinism CI: two runs with identical seeds produce identical outputs. + +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 7) Documentation changes (create/update these files) + +1. **`/docs/policy/overview.md`** + + * What the Policy Engine is, high‑level concepts, inputs, outputs, determinism. +2. **`/docs/policy/dsl.md`** + + * Full grammar, built‑ins, examples, best practices, anti‑patterns. +3. **`/docs/policy/lifecycle.md`** + + * Draft → submitted → approved → archived, roles, and audit trail. +4. **`/docs/policy/runs.md`** + + * Run modes, incremental mechanics, cursors, replay. +5. **`/docs/api/policy.md`** + + * Endpoints, request/response schemas, error codes. +6. **`/docs/cli/policy.md`** + + * Command usage, examples, exit codes, JSON output contracts. +7. **`/docs/ui/policy-editor.md`** + + * Screens, workflows, simulation, diffs, approvals. +8. **`/docs/architecture/policy-engine.md`** + + * Detailed sequence diagrams, selection/join strategy, materialization schema. +9. **`/docs/observability/policy.md`** + + * Metrics, tracing, logs, sample dashboards. +10. **`/docs/security/policy-governance.md`** + + * Scopes, approvals, tenancy, least privilege. +11. **`/docs/examples/policies/`** + + * `baseline.pol`, `serverless.pol`, `internal-only.pol`, each with commentary. +12. **`/docs/faq/policy-faq.md`** + + * Common pitfalls, VEX conflict handling, determinism gotchas. + +Each file includes a **Compliance checklist** for authors and reviewers. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 8) Acceptance criteria + +* Policies are versioned, approvable, and compilable; invalid DSL blocks merges. +* Engine produces deterministic outputs with full rationale chains. +* VEX precedence rules work per spec and are overridable by policy. +* Simulation yields accurate pre/post deltas and diffs. +* Only Policy Engine can write to `effective_finding_*`. +* Incremental runs pick up new advisories/VEX/SBOM changes without full re‑runs. +* Console and CLI cover authoring, simulation, approval, and retrieval. +* Observability dashboards show rule hits, VEX wins, and run timings. + +--- + +## 9) Risks and mitigations + +* **Policy sprawl**: too many similar policies. + + * Mitigation: templates, policy inheritance in v1.1, tagging, ownership metadata. +* **Non‑determinism creep**: someone sneaks wall‑clock or network into evaluation. + + * Mitigation: determinism guard, static analyzer, and CI replay check. +* **Join miss‑rate**: weak linksets cause under‑matching. + + * Mitigation: linkset strengthening in ingestion, PURL equivalence tables, monitoring for “zero‑hit” rates. +* **Approval bottlenecks**: blocked rollouts. + + * Mitigation: RBAC with delegated approvers and time‑boxed SLAs. + +--- + +## 10) Test plan + +* **Unit**: parser, compiler, evaluator; conflict resolution; precedence. +* **Property**: random policies over synthetic inputs; ensure no panics and stable outputs. +* **Golden**: fixed SBOM + curated advisories/VEX → expected findings; compare every run. +* **Performance**: large SBOMs with heavy rule sets; assert run times and memory ceilings. +* **Integration**: end‑to‑end simulate → approve → run → diff; verify write protections. +* **Chaos**: inject malformed VEX, missing advisories; ensure graceful degradation and clear errors. + +--- + +## 11) Developer checklists + +**Definition of Ready** + +* Policy grammar finalized; examples prepared. +* Linkset join queries benchmarked. +* Owner and approvers assigned. + +**Definition of Done** + +* All APIs live with RBAC. +* CLI and Console features shipped. +* Determinism and golden tests green. +* Observability dashboards deployed. +* Docs in section 7 merged. +* Two real org policies migrated and in production. + +--- + +## 12) Glossary + +* **Policy**: versioned rule set controlling status and severity. +* **DSL**: domain‑specific language used to express rules. +* **Run**: a single evaluation execution with defined inputs and outputs. +* **Simulation**: a run that doesn’t write findings; returns diffs. +* **Materialization**: persisted effective findings for fast queries. +* **Determinism**: same inputs + same policy = same outputs. Always. + +--- + +### Final imposed reminder + +**Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied.** diff --git a/EPIC_4.md b/docs/implplan/EPIC_4.md similarity index 97% rename from EPIC_4.md rename to docs/implplan/EPIC_4.md index f85d1a5f..af47e81a 100644 --- a/EPIC_4.md +++ b/docs/implplan/EPIC_4.md @@ -1,409 +1,409 @@ -Here’s Epic 4 in the same paste‑into‑repo, implementation‑ready style as the prior epics. It’s exhaustive, formal, and slots directly into the existing AOC model, Policy Engine, and Console. - ---- - -# Epic 4: Policy Studio (author, version, simulate) - -> Short name: **Policy Studio** -> Services touched: **Policy Engine**, **Policy Registry** (new), **Web API Gateway**, **Authority** (authN/Z), **Scheduler/Workers**, **SBOM Service**, **Conseiller (Feedser)**, **Excitator (Vexer)**, **Telemetry** -> Surfaces: **Console (Web UI)** feature module, **CLI**, **CI hooks** -> Deliverables: Authoring workspace, policy versioning, static checks, simulation at scale, reviews/approvals, signing/publishing, promotion - ---- - -## 1) What it is - -**Policy Studio** is the end‑to‑end system for creating, evolving, and safely rolling out the rules that turn AOC facts (SBOM, advisories, VEX) into **effective findings**. It provides: - -* A **workspace** where authors write policies in the DSL (Epic 2), with linting, autocompletion, snippets, and templates. -* A **Policy Registry** that stores immutable versions, compiled artifacts, metadata, provenance, and signatures. -* **Simulation** at two levels: quick local samples and large batch simulations across real SBOM inventories with full deltas. -* A **review/approval** workflow with comments, diffs, required approvers, and promotion to environments (dev/test/prod). -* **Publishing** semantics: signed, immutable versions bound to tenants; rollback and deprecation. -* Tight integration with **Explain** traces so any change can show exactly which rules fired and why outcomes shifted. - -The Studio respects **AOC enforcement**: policies never edit or merge facts. They only interpret facts and produce determinations consistent with precedence rules defined in the DSL. - ---- - -## 2) Why - -* Policy errors are expensive. Authors need safe sandboxes, deterministic builds, and evidence before rollout. -* Auditors require immutability, provenance, and reproducibility from “source policy” to “effective finding.” -* Teams want gradual rollout: simulate, canary, promote, observe, rollback. -* Policy knowledge should be modular, reusable, and testable, not tribal. - ---- - -## 3) How it should work (maximum detail) - -### 3.1 Domain model - -* **PolicyPackage**: `{name, tenant, description, owners[], tags[], created_at}` -* **PolicyVersion** (immutable): `{package, semver, source_sha, compiled_sha, status: draft|review|approved|published|deprecated|archived, created_by, created_at, signatures[], changelog, metadata{}}` -* **Workspace**: mutable working area for authors; holds unversioned edits until compiled. -* **CompilationArtifact**: `{policy_version, compiler_version, diagnostics[], rule_index[], symbol_table}` -* **SimulationSpec**: `{policy_version|workspace, sbom_selector, time_window?, environment?, sample_size?, severity_floor?, includes{advisories?, vex?}}` -* **SimulationRun**: `{run_id, spec, started_at, finished_at, result{counts_before, counts_after, top_deltas[], by_rule_hit[], sample_explains[]}}` -* **Review**: `{policy_version, required_approvers[], votes[], comments[], files_changed[], diffs[]}` -* **Promotion**: `{policy_version, environment: dev|test|prod, promoted_by, promoted_at, rollout_strategy: All|Percent|TenantSubset}` -* **Attestation**: OIDC‑backed signature metadata binding `source_sha` and `compiled_sha` to an actor and time. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 3.2 Authoring workflow - -1. **Create** a workspace from a template (e.g., “Default Risk Model,” “License Tilted,” “Cloud‑Native SBOM”). -2. **Edit** in the Studio: Monaco editor with DSL grammar, intelligent completion for predicates, policies, attributes. -3. **Lint & compile** locally: semantic checks, forbidden rules detection, policy size limits, constant‑folding. -4. **Unit tests**: run policy test cases on bundled fixtures and golden expectations. -5. **Quick simulate** on selected SBOMs (10–50 items) to preview counts, examples, and rule heatmap. -6. **Propose version**: bump semver, enter changelog; create a **PolicyVersion** in `review` with compiled artifacts. -7. **Review & approval**: side‑by‑side diff, comments, required approvers enforced by RBAC. -8. **Batch simulation**: run at scale across tenant inventory; produce deltas, sample explainer evidence. -9. **Publish**: sign and move to `published`; optional **Promotion** to target environment(s). -10. **Run** evaluation with the selected policy version; verify outcomes; optionally promote to default. -11. **Rollback**: select an older version; promotion updates references without mutating older versions. - -### 3.3 Editing experience (Console) - -* **Three‑pane layout**: file tree, editor, diagnostics/simulation. -* **Features**: autocomplete from symbol table, in‑editor docs on hover, go‑to definition, rule references, rename symbols across files, snippet library, policy templates. -* **Validations**: - - * AOC guardrails: no edit/merge actions on source facts, only interpretation. - * Precedence correctness: if rules conflict, studio shows explicit order and effective winner. - * Severity floor and normalization mapping validated against registry configuration. -* **Diagnostics panel**: errors, warnings, performance hints (e.g., “predicate X loads N advisories per component; consider indexing”). -* **Rule heatmap**: during simulation, bar chart of rule firings and the objects they impact. -* **Explain sampler**: click any delta bucket to open a sampled finding with full trace. - -### 3.4 Simulation - -* **Quick Sim**: synchronous; runs in browser‑orchestrated job against API, constrained by `sample_size`. -* **Batch Sim**: asynchronous run in workers: - - * Input selection: all SBOMs, labels, artifact regex, last N ingests, or a curated set. - * Outputs: counts by severity before/after, by status, top deltas by component and advisory, rule heatmap, top K affected artifacts. - * Evidence: NDJSON of sampled findings with traces; CSV summary; signed result manifest. - * Guardrails: cannot publish if batch sim drift > configurable threshold without an override justification. - -### 3.5 Versioning & promotion - -* Semver enforced: `major` implies compatibility break (e.g., precedence changes), `minor` adds rules, `patch` fixes. -* **Immutable**: after `published`, the version cannot change; deprecate instead. -* **Environment bindings**: dev/test/prod mapping per tenant; default policy per environment. -* **Canary**: promote to a subset of tenants or artifacts; the Runs page displays A/B comparisons. - -### 3.6 Review & approval - -* Require N approvers by role; self‑approval optionally prohibited. -* Line and file comments; overall decision with justification. -* Review snapshot captures: diffs, diagnostics, simulation summary. -* Webhooks to notify external systems of review events. - -### 3.7 RBAC (Authority) - -Roles per tenant: - -* **Policy Author**: create/edit workspace, quick sim, propose versions. -* **Policy Reviewer**: comment, request changes, approve/reject. -* **Policy Approver**: final approve, publish. -* **Policy Operator**: promote, rollback, schedule runs. -* **Read‑only Auditor**: view everything, download evidence. - -All actions server‑checked; UI only hides affordances. - -### 3.8 CLI + CI integration - -CLI verbs (examples): - -``` -stella policy init --template default -stella policy lint -stella policy compile -stella policy test --golden ./tests -stella policy simulate --sboms label:prod --sample 1000 -stella policy version bump --level minor --changelog "Normalize GHSA CVSS" -stella policy submit --reviewers alice@example.com,bob@example.com -stella policy approve --version 1.3.0 -stella policy publish --version 1.3.0 --sign -stella policy promote --version 1.3.0 --env test --percent 20 -stella policy rollback --env prod --to 1.2.1 -``` - -CI usage: - -* Lint, compile, and run unit tests on PRs that modify `/policies/**`. -* Optionally trigger **Batch Sim** against a staging inventory and post a Markdown report to the PR. -* Block merge if diagnostics include errors or drift exceeds thresholds. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 3.9 APIs (representative) - -* `POST /policies/workspaces` create from template -* `PUT /policies/workspaces/{id}/files` edit source files -* `POST /policies/workspaces/{id}/compile` get diagnostics + compiled artifact -* `POST /policies/workspaces/{id}/simulate` quick sim -* `POST /policies/versions` create version from workspace with semver + changelog -* `GET /policies/versions/{id}` fetch version + diagnostics + sim summary -* `POST /policies/versions/{id}/reviews` open review -* `POST /policies/versions/{id}/approve` record approval -* `POST /policies/versions/{id}/publish` sign + publish -* `POST /policies/versions/{id}/promote` bind to env/canary -* `POST /policies/versions/{id}/simulate-batch` start batch sim (async) -* `GET /policies/simulations/{run_id}` get sim results and artifacts -* `GET /policies/registry` list packages/versions, status and bindings - -All calls require tenant scoping and RBAC. - -### 3.10 Storage & data - -* **Policy Registry DB** (MongoDB): packages, versions, workspaces, metadata. -* **Object storage**: source bundles, compiled artifacts, simulation result bundles, evidence. -* **Indexing**: compound indexes by `{tenant, package}`, `{tenant, status}`, `{tenant, environment}`. -* **Retention**: configurable retention for workspaces and simulation artifacts; versions never deleted, only archived. - -### 3.11 Evidence & provenance - -* Every published version has: - - * `source_sha` (content digest of the policy source bundle) - * `compiled_sha` (digest of compiled artifact) - * Attestation: signed envelope binding digests to an identity, time, and tenant. - * Links to the exact compiler version, inputs, and environment. - -### 3.12 Observability - -* Metrics: compile time, diagnostics rate, simulation queue depth, delta magnitude distribution, approval latencies. -* Logs: structured events for lifecycle transitions. -* Traces: long simulations emit span per shard. - -### 3.13 Performance & scale - -* Compilation should complete under 3 seconds for typical policies; warn at 10s. -* Batch sim uses workers with partitioning by SBOM id; results reduced by the API. -* Memory guardrails on rule execution; deny policies that exceed configured complexity limits. - -### 3.14 Security - -* OIDC‑backed signing and attestation. -* Policy sources are scanned on upload for secrets; blocked if found. -* Strict CSP in Studio pages; tokens stored in memory, not localStorage. -* Tenant isolation in buckets and DB collections. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 4) Implementation plan - -### 4.1 Services - -* **Policy Registry (new microservice)** - - * REST API and background workers for batch simulation orchestration. - * Stores workspaces, versions, metadata, bindings, reviews. - * Generates signed attestations at publish time. - * Coordinates with **Policy Engine** for compile/simulate invocations. - -* **Policy Engine (existing)** - - * Expose compile and simulate endpoints with deterministic outputs. - * Provide rule coverage, symbol table, and explain traces for samples. - -* **Web API Gateway** - - * Routes requests; injects tenant context; enforces RBAC. - -### 4.2 Console (Web UI) feature module - -* `packages/features/policies` (shared with Epic 3): - - * **Studio** routes: `/policies/studio`, `/policies/:id/versions/:v/edit`, `/simulate`, `/review`. - * Monaco editor wrapper for DSL with hover docs, autocomplete. - * Diff viewer, diagnostics, heatmap, explain sampler, review UI. - -### 4.3 CLI - -* New commands under `stella policy *`; typed client generated from OpenAPI. -* Outputs machine‑readable JSON and pretty tables. - -### 4.4 Workers - -* **Simulation workers**: pull shards of SBOMs, run policy, emit partials, reduce into result bundle. -* **Notification worker**: sends webhooks on review, approval, publish, promote. - ---- - -## 5) Documentation changes (create/update) - -1. **`/docs/policy/studio-overview.md`** - - * Concepts, roles, lifecycle, glossary. -2. **`/docs/policy/authoring.md`** - - * Workspace, templates, snippets, lint rules, best practices. -3. **`/docs/policy/versioning-and-publishing.md`** - - * Semver, immutability, deprecation, rollback, attestations. -4. **`/docs/policy/simulation.md`** - - * Quick vs batch sim, selection strategies, thresholds, evidence artifacts. -5. **`/docs/policy/review-and-approval.md`** - - * Required approvers, comments, webhooks, audit trail. -6. **`/docs/policy/promotion.md`** - - * Environments, canary, default policy binding, rollback. -7. **`/docs/policy/cli.md`** - - * Command reference with examples and JSON outputs. -8. **`/docs/policy/api.md`** - - * REST endpoints, request/response schemas, error codes. -9. **`/docs/security/policy-attestations.md`** - - * Signatures, digests, verifier steps. -10. **`/docs/architecture/policy-registry.md`** - - * Service design, schemas, queues, failure modes. -11. **`/docs/observability/policy-telemetry.md`** - - * Metrics, logs, tracing, dashboards. -12. **`/docs/runbooks/policy-incident.md`** - - * Rolling back a bad policy, freezing publishes, forensic steps. -13. **`/docs/examples/policy-templates.md`** - - * Ready‑made templates and snippet catalog. -14. **`/docs/aoc/aoc-guardrails.md`** - - * How Studio enforces AOC in authoring and review. - -Each doc ends with a “Compliance checklist.” -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 6) Tasks - -### 6.1 Backend: Policy Registry - -* [ ] Define OpenAPI spec for Registry (workspaces, versions, reviews, sim). -* [ ] Implement workspace storage and file CRUD. -* [ ] Integrate with Policy Engine compile endpoint; return diagnostics, symbol table. -* [ ] Implement quick simulation with request limits. -* [ ] Implement batch simulation orchestration: enqueue shards, collect results, reduce deltas, store artifacts. -* [ ] Implement review model: comments, required approvers, decisions. -* [ ] Implement publish: sign, persist attestation, set status=published. -* [ ] Implement promotion bindings per tenant/environment; canary subsets. -* [ ] RBAC checks for all endpoints. -* [ ] Unit/integration tests; load tests for batch sim. - -### 6.2 Policy Engine enhancements - -* [ ] Return rule coverage and firing counts with compile/simulate. -* [ ] Return symbol table and inline docs for editor autocomplete. -* [ ] Expose deterministic Explain traces for sampled findings. -* [ ] Enforce complexity/time limits and report breaches. - -### 6.3 Console (Web UI) - -* [ ] Build Studio editor wrapper with Monaco + DSL language server hooks. -* [ ] Implement file tree, snippets, templates, hotkeys, search/replace. -* [ ] Diagnostics panel with jump‑to‑line, quick fixes. -* [ ] Simulation panel: quick sim UI, charts, heatmap, sample explains. -* [ ] Review UI: diff, comments, approvals, status badges. -* [ ] Publish & Promote flows with confirmation and post‑actions. -* [ ] Batch sim results pages with export buttons. -* [ ] Accessibility audits and keyboard‑only authoring flow. - -### 6.4 CLI - -* [ ] Implement commands listed in 3.8 with rich help and examples. -* [ ] Add `--json` flag for machine consumption; emit stable schemas. -* [ ] Exit codes aligned with CI usage (lint errors → non‑zero). - -### 6.5 CI/CD & Security - -* [ ] Add CI job that runs `stella policy lint/compile/test` on PRs. -* [ ] Optional job that triggers batch sim against staging inventory; post summary to PR. -* [ ] Policy source secret scanning; block on findings. -* [ ] Signing keys configuration; verify pipeline for attestation on publish. - -### 6.6 Docs - -* [ ] Write all docs in section 5 with screenshots and CLI transcripts. -* [ ] Add cookbook examples and templates in `/docs/examples/policy-templates.md`. -* [ ] Wire contextual Help links from Studio to relevant docs. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 7) Acceptance criteria - -* Authors can create, edit, lint, compile policies with inline diagnostics and autocomplete. -* Quick simulation produces counts, rule heatmap, and sample explains within UI. -* Batch simulation scales across large SBOM sets, producing deltas and downloadable evidence. -* Review requires configured approvers; comments and diffs are preserved. -* Publish generates immutable, signed versions with attestations. -* Promotion binds versions to environments and supports canary and rollback. -* CLI supports full lifecycle and is usable in CI. -* All actions are tenant‑scoped, RBAC‑enforced, and logged. -* AOC guardrails prevent any mutation of raw facts. -* Documentation shipped and linked contextually from the Studio. - ---- - -## 8) Risks & mitigations - -* **Policy complexity causes timeouts** → compile‑time complexity scoring, execution limits, early diagnostics. -* **Simulation cost at scale** → sharding and streaming reducers; sampling; configurable quotas. -* **RBAC misconfiguration** → server‑enforced checks, defense‑in‑depth tests, deny‑by‑default. -* **Attestation key management** → OIDC‑backed signatures; auditable verifier tool; time‑boxed credentials. -* **Editor usability** → language server with accurate completions; docs on hover; snippet library. - ---- - -## 9) Test plan - -* **Unit**: compiler adapters, registry models, reviewers workflow, CLI options. -* **Integration**: compile→simulate→publish→promote on seeded data. -* **E2E**: Playwright flows for author→review→batch sim→publish→promote→rollback. -* **Performance**: load test batch simulation with 100k components spread across SBOMs. -* **Security**: RBAC matrix tests; secret scanning; signing and verification. -* **Determinism**: same inputs produce identical `compiled_sha` and simulation summaries. - ---- - -## 10) Feature flags - -* `policy.studio` (enables editor and quick sim) -* `policy.batch-sim` -* `policy.canary-promotion` -* `policy.signature-required` (enforce signing on publish) - -Flags documented in `/docs/observability/policy-telemetry.md`. - ---- - -## 11) Non‑goals (this epic) - -* Building a general IDE for arbitrary languages; the editor is purpose‑built for the DSL. -* Auto‑generated policies from AI without human approval. -* Cross‑tenant policies; all policies are tenant‑scoped. - ---- - -## 12) Philosophy - -* **Safety first**: it’s cheaper to prevent a bad policy than to fix its fallout. -* **Determinism**: same inputs, same outputs, verifiably. -* **Immutability**: versions and evidence are forever; we deprecate, not mutate. -* **Transparency**: every change is explainable with traces and proofs. -* **Reusability**: templates, snippets, and tests turn policy from art into engineering. - -> Final reminder: **Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied.** +Here’s Epic 4 in the same paste‑into‑repo, implementation‑ready style as the prior epics. It’s exhaustive, formal, and slots directly into the existing AOC model, Policy Engine, and Console. + +--- + +# Epic 4: Policy Studio (author, version, simulate) + +> Short name: **Policy Studio** +> Services touched: **Policy Engine**, **Policy Registry** (new), **Web API Gateway**, **Authority** (authN/Z), **Scheduler/Workers**, **SBOM Service**, **Conseiller (Feedser)**, **Excitator (Vexer)**, **Telemetry** +> Surfaces: **Console (Web UI)** feature module, **CLI**, **CI hooks** +> Deliverables: Authoring workspace, policy versioning, static checks, simulation at scale, reviews/approvals, signing/publishing, promotion + +--- + +## 1) What it is + +**Policy Studio** is the end‑to‑end system for creating, evolving, and safely rolling out the rules that turn AOC facts (SBOM, advisories, VEX) into **effective findings**. It provides: + +* A **workspace** where authors write policies in the DSL (Epic 2), with linting, autocompletion, snippets, and templates. +* A **Policy Registry** that stores immutable versions, compiled artifacts, metadata, provenance, and signatures. +* **Simulation** at two levels: quick local samples and large batch simulations across real SBOM inventories with full deltas. +* A **review/approval** workflow with comments, diffs, required approvers, and promotion to environments (dev/test/prod). +* **Publishing** semantics: signed, immutable versions bound to tenants; rollback and deprecation. +* Tight integration with **Explain** traces so any change can show exactly which rules fired and why outcomes shifted. + +The Studio respects **AOC enforcement**: policies never edit or merge facts. They only interpret facts and produce determinations consistent with precedence rules defined in the DSL. + +--- + +## 2) Why + +* Policy errors are expensive. Authors need safe sandboxes, deterministic builds, and evidence before rollout. +* Auditors require immutability, provenance, and reproducibility from “source policy” to “effective finding.” +* Teams want gradual rollout: simulate, canary, promote, observe, rollback. +* Policy knowledge should be modular, reusable, and testable, not tribal. + +--- + +## 3) How it should work (maximum detail) + +### 3.1 Domain model + +* **PolicyPackage**: `{name, tenant, description, owners[], tags[], created_at}` +* **PolicyVersion** (immutable): `{package, semver, source_sha, compiled_sha, status: draft|review|approved|published|deprecated|archived, created_by, created_at, signatures[], changelog, metadata{}}` +* **Workspace**: mutable working area for authors; holds unversioned edits until compiled. +* **CompilationArtifact**: `{policy_version, compiler_version, diagnostics[], rule_index[], symbol_table}` +* **SimulationSpec**: `{policy_version|workspace, sbom_selector, time_window?, environment?, sample_size?, severity_floor?, includes{advisories?, vex?}}` +* **SimulationRun**: `{run_id, spec, started_at, finished_at, result{counts_before, counts_after, top_deltas[], by_rule_hit[], sample_explains[]}}` +* **Review**: `{policy_version, required_approvers[], votes[], comments[], files_changed[], diffs[]}` +* **Promotion**: `{policy_version, environment: dev|test|prod, promoted_by, promoted_at, rollout_strategy: All|Percent|TenantSubset}` +* **Attestation**: OIDC‑backed signature metadata binding `source_sha` and `compiled_sha` to an actor and time. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 3.2 Authoring workflow + +1. **Create** a workspace from a template (e.g., “Default Risk Model,” “License Tilted,” “Cloud‑Native SBOM”). +2. **Edit** in the Studio: Monaco editor with DSL grammar, intelligent completion for predicates, policies, attributes. +3. **Lint & compile** locally: semantic checks, forbidden rules detection, policy size limits, constant‑folding. +4. **Unit tests**: run policy test cases on bundled fixtures and golden expectations. +5. **Quick simulate** on selected SBOMs (10–50 items) to preview counts, examples, and rule heatmap. +6. **Propose version**: bump semver, enter changelog; create a **PolicyVersion** in `review` with compiled artifacts. +7. **Review & approval**: side‑by‑side diff, comments, required approvers enforced by RBAC. +8. **Batch simulation**: run at scale across tenant inventory; produce deltas, sample explainer evidence. +9. **Publish**: sign and move to `published`; optional **Promotion** to target environment(s). +10. **Run** evaluation with the selected policy version; verify outcomes; optionally promote to default. +11. **Rollback**: select an older version; promotion updates references without mutating older versions. + +### 3.3 Editing experience (Console) + +* **Three‑pane layout**: file tree, editor, diagnostics/simulation. +* **Features**: autocomplete from symbol table, in‑editor docs on hover, go‑to definition, rule references, rename symbols across files, snippet library, policy templates. +* **Validations**: + + * AOC guardrails: no edit/merge actions on source facts, only interpretation. + * Precedence correctness: if rules conflict, studio shows explicit order and effective winner. + * Severity floor and normalization mapping validated against registry configuration. +* **Diagnostics panel**: errors, warnings, performance hints (e.g., “predicate X loads N advisories per component; consider indexing”). +* **Rule heatmap**: during simulation, bar chart of rule firings and the objects they impact. +* **Explain sampler**: click any delta bucket to open a sampled finding with full trace. + +### 3.4 Simulation + +* **Quick Sim**: synchronous; runs in browser‑orchestrated job against API, constrained by `sample_size`. +* **Batch Sim**: asynchronous run in workers: + + * Input selection: all SBOMs, labels, artifact regex, last N ingests, or a curated set. + * Outputs: counts by severity before/after, by status, top deltas by component and advisory, rule heatmap, top K affected artifacts. + * Evidence: NDJSON of sampled findings with traces; CSV summary; signed result manifest. + * Guardrails: cannot publish if batch sim drift > configurable threshold without an override justification. + +### 3.5 Versioning & promotion + +* Semver enforced: `major` implies compatibility break (e.g., precedence changes), `minor` adds rules, `patch` fixes. +* **Immutable**: after `published`, the version cannot change; deprecate instead. +* **Environment bindings**: dev/test/prod mapping per tenant; default policy per environment. +* **Canary**: promote to a subset of tenants or artifacts; the Runs page displays A/B comparisons. + +### 3.6 Review & approval + +* Require N approvers by role; self‑approval optionally prohibited. +* Line and file comments; overall decision with justification. +* Review snapshot captures: diffs, diagnostics, simulation summary. +* Webhooks to notify external systems of review events. + +### 3.7 RBAC (Authority) + +Roles per tenant: + +* **Policy Author**: create/edit workspace, quick sim, propose versions. +* **Policy Reviewer**: comment, request changes, approve/reject. +* **Policy Approver**: final approve, publish. +* **Policy Operator**: promote, rollback, schedule runs. +* **Read‑only Auditor**: view everything, download evidence. + +All actions server‑checked; UI only hides affordances. + +### 3.8 CLI + CI integration + +CLI verbs (examples): + +``` +stella policy init --template default +stella policy lint +stella policy compile +stella policy test --golden ./tests +stella policy simulate --sboms label:prod --sample 1000 +stella policy version bump --level minor --changelog "Normalize GHSA CVSS" +stella policy submit --reviewers alice@example.com,bob@example.com +stella policy approve --version 1.3.0 +stella policy publish --version 1.3.0 --sign +stella policy promote --version 1.3.0 --env test --percent 20 +stella policy rollback --env prod --to 1.2.1 +``` + +CI usage: + +* Lint, compile, and run unit tests on PRs that modify `/policies/**`. +* Optionally trigger **Batch Sim** against a staging inventory and post a Markdown report to the PR. +* Block merge if diagnostics include errors or drift exceeds thresholds. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 3.9 APIs (representative) + +* `POST /policies/workspaces` create from template +* `PUT /policies/workspaces/{id}/files` edit source files +* `POST /policies/workspaces/{id}/compile` get diagnostics + compiled artifact +* `POST /policies/workspaces/{id}/simulate` quick sim +* `POST /policies/versions` create version from workspace with semver + changelog +* `GET /policies/versions/{id}` fetch version + diagnostics + sim summary +* `POST /policies/versions/{id}/reviews` open review +* `POST /policies/versions/{id}/approve` record approval +* `POST /policies/versions/{id}/publish` sign + publish +* `POST /policies/versions/{id}/promote` bind to env/canary +* `POST /policies/versions/{id}/simulate-batch` start batch sim (async) +* `GET /policies/simulations/{run_id}` get sim results and artifacts +* `GET /policies/registry` list packages/versions, status and bindings + +All calls require tenant scoping and RBAC. + +### 3.10 Storage & data + +* **Policy Registry DB** (MongoDB): packages, versions, workspaces, metadata. +* **Object storage**: source bundles, compiled artifacts, simulation result bundles, evidence. +* **Indexing**: compound indexes by `{tenant, package}`, `{tenant, status}`, `{tenant, environment}`. +* **Retention**: configurable retention for workspaces and simulation artifacts; versions never deleted, only archived. + +### 3.11 Evidence & provenance + +* Every published version has: + + * `source_sha` (content digest of the policy source bundle) + * `compiled_sha` (digest of compiled artifact) + * Attestation: signed envelope binding digests to an identity, time, and tenant. + * Links to the exact compiler version, inputs, and environment. + +### 3.12 Observability + +* Metrics: compile time, diagnostics rate, simulation queue depth, delta magnitude distribution, approval latencies. +* Logs: structured events for lifecycle transitions. +* Traces: long simulations emit span per shard. + +### 3.13 Performance & scale + +* Compilation should complete under 3 seconds for typical policies; warn at 10s. +* Batch sim uses workers with partitioning by SBOM id; results reduced by the API. +* Memory guardrails on rule execution; deny policies that exceed configured complexity limits. + +### 3.14 Security + +* OIDC‑backed signing and attestation. +* Policy sources are scanned on upload for secrets; blocked if found. +* Strict CSP in Studio pages; tokens stored in memory, not localStorage. +* Tenant isolation in buckets and DB collections. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 4) Implementation plan + +### 4.1 Services + +* **Policy Registry (new microservice)** + + * REST API and background workers for batch simulation orchestration. + * Stores workspaces, versions, metadata, bindings, reviews. + * Generates signed attestations at publish time. + * Coordinates with **Policy Engine** for compile/simulate invocations. + +* **Policy Engine (existing)** + + * Expose compile and simulate endpoints with deterministic outputs. + * Provide rule coverage, symbol table, and explain traces for samples. + +* **Web API Gateway** + + * Routes requests; injects tenant context; enforces RBAC. + +### 4.2 Console (Web UI) feature module + +* `packages/features/policies` (shared with Epic 3): + + * **Studio** routes: `/policies/studio`, `/policies/:id/versions/:v/edit`, `/simulate`, `/review`. + * Monaco editor wrapper for DSL with hover docs, autocomplete. + * Diff viewer, diagnostics, heatmap, explain sampler, review UI. + +### 4.3 CLI + +* New commands under `stella policy *`; typed client generated from OpenAPI. +* Outputs machine‑readable JSON and pretty tables. + +### 4.4 Workers + +* **Simulation workers**: pull shards of SBOMs, run policy, emit partials, reduce into result bundle. +* **Notification worker**: sends webhooks on review, approval, publish, promote. + +--- + +## 5) Documentation changes (create/update) + +1. **`/docs/policy/studio-overview.md`** + + * Concepts, roles, lifecycle, glossary. +2. **`/docs/policy/authoring.md`** + + * Workspace, templates, snippets, lint rules, best practices. +3. **`/docs/policy/versioning-and-publishing.md`** + + * Semver, immutability, deprecation, rollback, attestations. +4. **`/docs/policy/simulation.md`** + + * Quick vs batch sim, selection strategies, thresholds, evidence artifacts. +5. **`/docs/policy/review-and-approval.md`** + + * Required approvers, comments, webhooks, audit trail. +6. **`/docs/policy/promotion.md`** + + * Environments, canary, default policy binding, rollback. +7. **`/docs/policy/cli.md`** + + * Command reference with examples and JSON outputs. +8. **`/docs/policy/api.md`** + + * REST endpoints, request/response schemas, error codes. +9. **`/docs/security/policy-attestations.md`** + + * Signatures, digests, verifier steps. +10. **`/docs/architecture/policy-registry.md`** + + * Service design, schemas, queues, failure modes. +11. **`/docs/observability/policy-telemetry.md`** + + * Metrics, logs, tracing, dashboards. +12. **`/docs/runbooks/policy-incident.md`** + + * Rolling back a bad policy, freezing publishes, forensic steps. +13. **`/docs/examples/policy-templates.md`** + + * Ready‑made templates and snippet catalog. +14. **`/docs/aoc/aoc-guardrails.md`** + + * How Studio enforces AOC in authoring and review. + +Each doc ends with a “Compliance checklist.” +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 6) Tasks + +### 6.1 Backend: Policy Registry + +* [ ] Define OpenAPI spec for Registry (workspaces, versions, reviews, sim). +* [ ] Implement workspace storage and file CRUD. +* [ ] Integrate with Policy Engine compile endpoint; return diagnostics, symbol table. +* [ ] Implement quick simulation with request limits. +* [ ] Implement batch simulation orchestration: enqueue shards, collect results, reduce deltas, store artifacts. +* [ ] Implement review model: comments, required approvers, decisions. +* [ ] Implement publish: sign, persist attestation, set status=published. +* [ ] Implement promotion bindings per tenant/environment; canary subsets. +* [ ] RBAC checks for all endpoints. +* [ ] Unit/integration tests; load tests for batch sim. + +### 6.2 Policy Engine enhancements + +* [ ] Return rule coverage and firing counts with compile/simulate. +* [ ] Return symbol table and inline docs for editor autocomplete. +* [ ] Expose deterministic Explain traces for sampled findings. +* [ ] Enforce complexity/time limits and report breaches. + +### 6.3 Console (Web UI) + +* [ ] Build Studio editor wrapper with Monaco + DSL language server hooks. +* [ ] Implement file tree, snippets, templates, hotkeys, search/replace. +* [ ] Diagnostics panel with jump‑to‑line, quick fixes. +* [ ] Simulation panel: quick sim UI, charts, heatmap, sample explains. +* [ ] Review UI: diff, comments, approvals, status badges. +* [ ] Publish & Promote flows with confirmation and post‑actions. +* [ ] Batch sim results pages with export buttons. +* [ ] Accessibility audits and keyboard‑only authoring flow. + +### 6.4 CLI + +* [ ] Implement commands listed in 3.8 with rich help and examples. +* [ ] Add `--json` flag for machine consumption; emit stable schemas. +* [ ] Exit codes aligned with CI usage (lint errors → non‑zero). + +### 6.5 CI/CD & Security + +* [ ] Add CI job that runs `stella policy lint/compile/test` on PRs. +* [ ] Optional job that triggers batch sim against staging inventory; post summary to PR. +* [ ] Policy source secret scanning; block on findings. +* [ ] Signing keys configuration; verify pipeline for attestation on publish. + +### 6.6 Docs + +* [ ] Write all docs in section 5 with screenshots and CLI transcripts. +* [ ] Add cookbook examples and templates in `/docs/examples/policy-templates.md`. +* [ ] Wire contextual Help links from Studio to relevant docs. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 7) Acceptance criteria + +* Authors can create, edit, lint, compile policies with inline diagnostics and autocomplete. +* Quick simulation produces counts, rule heatmap, and sample explains within UI. +* Batch simulation scales across large SBOM sets, producing deltas and downloadable evidence. +* Review requires configured approvers; comments and diffs are preserved. +* Publish generates immutable, signed versions with attestations. +* Promotion binds versions to environments and supports canary and rollback. +* CLI supports full lifecycle and is usable in CI. +* All actions are tenant‑scoped, RBAC‑enforced, and logged. +* AOC guardrails prevent any mutation of raw facts. +* Documentation shipped and linked contextually from the Studio. + +--- + +## 8) Risks & mitigations + +* **Policy complexity causes timeouts** → compile‑time complexity scoring, execution limits, early diagnostics. +* **Simulation cost at scale** → sharding and streaming reducers; sampling; configurable quotas. +* **RBAC misconfiguration** → server‑enforced checks, defense‑in‑depth tests, deny‑by‑default. +* **Attestation key management** → OIDC‑backed signatures; auditable verifier tool; time‑boxed credentials. +* **Editor usability** → language server with accurate completions; docs on hover; snippet library. + +--- + +## 9) Test plan + +* **Unit**: compiler adapters, registry models, reviewers workflow, CLI options. +* **Integration**: compile→simulate→publish→promote on seeded data. +* **E2E**: Playwright flows for author→review→batch sim→publish→promote→rollback. +* **Performance**: load test batch simulation with 100k components spread across SBOMs. +* **Security**: RBAC matrix tests; secret scanning; signing and verification. +* **Determinism**: same inputs produce identical `compiled_sha` and simulation summaries. + +--- + +## 10) Feature flags + +* `policy.studio` (enables editor and quick sim) +* `policy.batch-sim` +* `policy.canary-promotion` +* `policy.signature-required` (enforce signing on publish) + +Flags documented in `/docs/observability/policy-telemetry.md`. + +--- + +## 11) Non‑goals (this epic) + +* Building a general IDE for arbitrary languages; the editor is purpose‑built for the DSL. +* Auto‑generated policies from AI without human approval. +* Cross‑tenant policies; all policies are tenant‑scoped. + +--- + +## 12) Philosophy + +* **Safety first**: it’s cheaper to prevent a bad policy than to fix its fallout. +* **Determinism**: same inputs, same outputs, verifiably. +* **Immutability**: versions and evidence are forever; we deprecate, not mutate. +* **Transparency**: every change is explainable with traces and proofs. +* **Reusability**: templates, snippets, and tests turn policy from art into engineering. + +> Final reminder: **Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied.** diff --git a/EPIC_5.md b/docs/implplan/EPIC_5.md similarity index 97% rename from EPIC_5.md rename to docs/implplan/EPIC_5.md index 9fac8b0c..ce689b2d 100644 --- a/EPIC_5.md +++ b/docs/implplan/EPIC_5.md @@ -1,431 +1,431 @@ -Here’s Epic 5 in the same paste‑into‑repo, implementation‑ready format as the prior epics. It’s exhaustive, formal, and designed to slot into AOC, Policy Engine, Conseiller/Excitator, and the Console. - ---- - -# Epic 5: SBOM Graph Explorer - -> Short name: **Graph Explorer** -> Services touched: **SBOM Service**, **Graph Indexer** (new), **Graph API** (new), **Policy Engine**, **Conseiller (Feedser)**, **Excitator (Vexer)**, **Web API Gateway**, **Authority** (authN/Z), **Workers/Scheduler**, **Telemetry** -> Surfaces: **Console (Web UI)** graph module, **CLI**, **Exports** -> Deliverables: Interactive graph UI with semantic zoom, saved queries, policy/VEX/advisory overlays, diff views, impact analysis, exports - ---- - -## 1) What it is - -**SBOM Graph Explorer** is the interactive, tenant‑scoped view of all supply‑chain relationships the platform knows about, rendered as a navigable graph. It connects: - -* **Artifacts** (applications, images, libs), **Packages/Versions**, **Files/Paths**, **Licenses**, **Advisories** (from Conseiller), **VEX statements** (from Excitator), **Provenance** (builds, sources), and **Policies** (overlays of determinations) -* **Edges** like `depends_on`, `contains`, `built_from`, `declared_in`, `affected_by`, `vex_exempts`, `governs_with` -* **Time/version** dimension: multiple SBOM snapshots with diffs - -It’s built for investigation and review: find where a vulnerable package enters; see which apps are impacted; understand why a finding exists; simulate a policy version and see the delta. The explorer observes **AOC enforcement**: it never mutates facts; it aggregates and visualizes them. Only the Policy Engine may classify, and classification is displayed as overlays. - ---- - -## 2) Why - -* SBOMs are graphs. Tables flatten what matters and hide transitive risk. -* Engineers, security, and auditors need impact answers quickly: “What pulls in `log4j:2.17` and where is it at runtime?” -* Policy/VEX/advisory interactions are nuanced. A visual overlay makes precedence and outcomes obvious. -* Review is collaborative; you need saved queries, deep links, exports, and consistent evidence. - ---- - -## 3) How it should work (maximum detail) - -### 3.1 Domain model - -**Nodes** (typed, versioned, tenant‑scoped): - -* `Artifact`: application, service, container image, library, module -* `Package`: name + ecosystem (purl), `PackageVersion` with resolved version -* `File`: path within artifact or image layer -* `License`: SPDX id -* `Advisory`: normalized advisory id (GHSA, CVE, vendor), source = Conseiller -* `VEX`: statement with product context, status, justification, source = Excitator -* `SBOM`: ingestion unit; includes metadata (tool, sha, build info) -* `PolicyDetermination`: materialized view of Policy Engine results (read‑only overlay) -* `Build`: provenance, commit, workflow run -* `Source`: repo, tag, commit - -**Edges** (directed): - -* `declared_in` (PackageVersion → SBOM) -* `contains` (Artifact → PackageVersion | File) -* `depends_on` (PackageVersion → PackageVersion) with scope attr (prod|dev|test|optional) -* `built_from` (Artifact → Build), `provenance_of` (Build → Source) -* `affected_by` (PackageVersion → Advisory) with range semantics -* `vex_exempts` (Advisory ↔ VEX) scoped by product/component -* `licensed_under` (Artifact|PackageVersion → License) -* `governs_with` (Artifact|PackageVersion → PolicyDetermination) -* `derived_from` (SBOM → SBOM) for superseding snapshots - -**Identity & versioning** - -* Every node has a stable key: `{tenant}:{type}:{natural_id}` (e.g., purl for packages, digest for images). -* SBOM snapshots are immutable; edges carry `valid_from`/`valid_to` for time travel and diffing. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 3.2 User capabilities (end‑to‑end) - -* **Search & Navigate**: global search (purls, CVEs, repos, licenses), keyboard nav, breadcrumbs, semantic zoom. -* **Lenses**: toggle views (Security, License, Provenance, Runtime vs Dev, Policy effect). -* **Overlays**: - - * **Advisory overlay**: show affected nodes/edges with source, severity, ranges. - * **VEX overlay**: show suppressions/justifications; collapse exempted paths. - * **Policy overlay**: choose a policy version; nodes/edges reflect determinations (severity, status) with explain sampling. -* **Impact analysis**: pick a vulnerable node; highlight upstream/downstream dependents, scope filters, shortest/all paths with constraints. -* **Diff view**: compare SBOM A vs B; show added/removed nodes/edges, changed versions, changed determinations. -* **Saved queries**: visual builder + JSON query; shareable permalinks scoped by tenant and environment. -* **Exports**: GraphML, CSV edge list, NDJSON of findings, PNG/SVG snapshot. -* **Evidence details**: side panel with raw facts, advisory links, VEX statements, policy explain trace, provenance. -* **Accessibility**: tab‑navigable, high‑contrast, screen‑reader labels for nodes and sidebars. - -### 3.3 Query model - -* **Visual builder** for common queries: - - * “Show all paths from Artifact X to Advisory Y up to depth 6.” - * “All runtime dependencies with license = GPL‑3.0.” - * “All artifacts affected by GHSA‑… with no applicable VEX.” - * “Which SBOMs introduced/removed `openssl` between build 120 and 130?” -* **JSON query** (internal, POST body) with: - - * `start`: list of node selectors (type + id or attributes) - * `expand`: edge types and depth, direction, scope filters - * `where`: predicates on node/edge attributes - * `overlay`: policy version id, advisory sources, VEX filters - * `limit`: nodes, edges, timebox, cost budget -* **Cost control**: server estimates cost, denies or pages results; UI streams partial graph tiles. - -### 3.4 UI architecture (Console) - -* **Canvas**: WebGL renderer with level‑of‑detail, edge bundling, and label culling; deterministic layout when possible (seeded). -* **Semantic zoom**: - - * Far: clusters by artifact/repo/ecosystem, color by lens - * Mid: package groups, advisory badges, license swatches - * Near: concrete versions, direct edges, inline badges for policy determinations -* **Panels**: - - * Left: search, filters, lens selector, saved queries - * Right: details, explain trace, evidence tabs (Advisory/VEX/Policy/Provenance) - * Bottom: query expression, diagnostics, performance/stream status -* **Diff mode**: split or overlay, color legend (add/remove/changed), filter by node type. -* **Deep links**: URL encodes query + viewport; shareable respecting RBAC. -* **Keyboard**: space drag, +/- zoom, F to focus, G to expand neighbors, P to show paths. - -### 3.5 Back‑end architecture - -**Graph Indexer (new)** - -* Consumes SBOM ingests, Conseiller advisories, Excitator VEX statements, Policy Engine determinations (read‑only). -* Projects facts into a **property graph** persisted in: - - * Primary: document store + adjacency sets (e.g., Mongo collections + compressed adjacency lists) - * Optional driver for graph DB backends if needed (pluggable) -* Maintains materialized aggregates: degree, critical paths cache, affected artifact counts, license distribution. -* Emits **graph snapshots** per SBOM with lineage to original ingestion. - -**Graph API (new)** - -* Endpoints for search, neighbor expansion, path queries, diffs, overlays, exports. -* Streaming responses for large graphs (chunked NDJSON tiles). -* Cost accounting + quotas per tenant. - -**Workers** - -* **Centrality & clustering** precompute on idle: betweenness approximations, connected components, Louvain clusters. -* **Diff compute** on new SBOM ingestion pairs (previous vs current). -* **Overlay materialization** cache for popular policy versions. - -**Policy Engine integration** - -* Graph API requests can specify a policy version. -* For sampled nodes, the API fetches explain traces; for counts, uses precomputed overlay materializations where available. - -**AOC enforcement** - -* Graph Indexer never merges or edits advisories/VEX; it links them and exposes overlays that the Policy Engine evaluates. -* Conseiller and Excitator remain authoritative sources; severities come from Policy‑governed normalization. - -### 3.6 APIs (representative) - -* `GET /graph/search?q=...&type=package|artifact|advisory|license` -* `POST /graph/query` ⇒ stream tiles `{nodes[], edges[], stats, cursor}` -* `POST /graph/paths` body: `{from, to, depth<=6, constraints{scope, runtime_only}}` -* `POST /graph/diff` body: `{sbom_a, sbom_b, filters}` -* `GET /graph/snapshots/{sbom_id}` ⇒ graph metadata, counts, top advisories -* `POST /graph/export` body: `{format: graphml|csv|ndjson|png|svg, query|snapshot}` -* `GET /graph/saved` / `POST /graph/saved` save and list tenant queries -* `GET /graph/overlays/policy/{version_id}` ⇒ summary stats for caching - -All endpoints tenant‑scoped, RBAC‑checked. Timeouts and pagination by server. Errors return structured diagnostics. - -### 3.7 CLI - -``` -stella sbom graph search "purl:pkg:maven/org.apache.logging.log4j/log4j-core" -stella sbom graph query --file ./query.json --export graphml > graph.graphml -stella sbom graph impacted --advisory GHSA-xxxx --runtime-only --limit 100 -stella sbom graph paths --from artifact:service-a --to advisory:GHSA-xxxx --depth 5 --policy 1.3.0 -stella sbom graph diff --sbom-a 2025-03-15T10:00Z --sbom-b 2025-03-22T10:00Z --export csv > diff.csv -stella sbom graph save --name "openssl-runtime" --file ./query.json -``` - -Exit codes: 0 ok, 2 query validation error, 3 over‑budget, 4 not found, 5 RBAC denied. - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -### 3.8 Performance & scale - -* **Progressive loading**: server pages tiles by BFS frontier; client renders incrementally. -* **Viewport culling**: only visible nodes/edges in canvas; offscreen demoted to aggregates. -* **Level‑of‑detail**: simplified glyphs and collapsed clusters at distance. -* **Query budgets**: per‑tenant rate + node/edge caps; interactive paths limited to depth ≤ 6. -* **Caching**: hot queries memoized per tenant + overlay version; diffs precomputed for consecutive SBOMs. - -### 3.9 Security - -* Multi‑tenant isolation at storage and API layers. -* RBAC roles: - - * **Viewer**: browse graphs, saved queries - * **Investigator**: run queries, export data - * **Operator**: configure budgets, purge caches - * **Auditor**: download evidence bundles -* Input validation for query JSON; deny disallowed edge traversals; strict CSP in web app. - -### 3.10 Observability - -* Metrics: tile latency, nodes/edges per tile, cache hit rate, query denials, memory pressure. -* Logs: structured, include query hash, cost, truncation flags. -* Traces: server spans per stage (parse, plan, fetch, overlay, stream). - -### 3.11 Accessibility & UX guarantees - -* Keyboard complete, ARIA roles for graph and panels, high‑contrast theme. -* Deterministic layout on reload for shareable investigations. - -### 3.12 Data retention - -* Graph nodes derived from SBOMs share retention with SBOM artifacts; overlays are ephemeral caches. -* Saved queries retained until deleted; references to missing objects show warnings. - ---- - -## 4) Implementation plan - -### 4.1 Services - -* **Graph Indexer (new microservice)** - - * Subscribes to SBOM ingest events, Conseiller advisory updates, Excitator VEX updates, Policy overlay materializations. - * Builds adjacency lists and node documents; computes aggregates and clusters. - -* **Graph API (new microservice)** - - * Validates and executes queries; streams tiles; composes overlays; serves diffs and exports. - * Integrates with Policy Engine for explain sample retrieval. - -* **SBOM Service (existing)** - - * Emits ingestion events with SBOM ids and lineage; exposes SBOM metadata to Graph API. - -* **Web API Gateway** - - * Routes `/graph/*`, injects tenant context, enforces RBAC. - -### 4.2 Console (Web UI) feature module - -* `packages/features/graph-explorer` - - * Canvas renderer (WebGL), panels, query builder, diff mode, overlays, exports. - * Deep‑link router and viewport state serializer. - -### 4.3 Workers - -* Centrality/clustering worker, diff worker, overlay materialization worker. -* Schedules on low‑traffic windows; backpressure aware. - -### 4.4 Data model (storage) - -* Collections: - - * `graph_nodes`: `{_id, tenant, type, natural_id, attrs, degree, created_at, updated_at}` - * `graph_edges`: `{_id, tenant, from_id, to_id, type, attrs, valid_from, valid_to}` - * `graph_snapshots`: per‑SBOM node/edge references - * `graph_saved_queries`: `{_id, tenant, name, query_json, created_by}` - * `graph_overlays_cache`: keyed by `{tenant, policy_version, hash(query)}` -* Indexes: compound on `{tenant, type, natural_id}`, `{tenant, from_id}`, `{tenant, to_id}`, time bounds. - ---- - -## 5) Documentation changes (create/update) - -1. **`/docs/sbom/graph-explorer-overview.md`** - - * Concepts, node/edge taxonomy, lenses, overlays, roles, limitations. -2. **`/docs/sbom/graph-using-the-console.md`** - - * Walkthroughs: search, navigate, impact, diff, export; screenshots and keyboard cheatsheet. -3. **`/docs/sbom/graph-query-language.md`** - - * JSON schema, examples, constraints, cost/budget rules. -4. **`/docs/sbom/graph-api.md`** - - * REST endpoints, request/response examples, streaming and pagination. -5. **`/docs/sbom/graph-cli.md`** - - * CLI command reference and example pipelines. -6. **`/docs/policy/graph-overlays.md`** - - * How policy versions render in Graph; explain sampling; AOC guardrails. -7. **`/docs/vex/graph-integration.md`** - - * How VEX suppressions appear and how to validate product scoping. -8. **`/docs/advisories/graph-integration.md`** - - * Advisory linkage and severity normalization by policy. -9. **`/docs/architecture/graph-services.md`** - - * Graph Indexer, Graph API, storage choices, failure modes. -10. **`/docs/observability/graph-telemetry.md`** - - * Metrics, logs, tracing, dashboards. -11. **`/docs/runbooks/graph-incidents.md`** - - * Handling runaway queries, cache poisoning, degraded render. -12. **`/docs/security/graph-rbac.md`** - - * Permissions matrix, multi‑tenant boundaries. - -Every doc should end with a “Compliance checklist.” -**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 6) Tasks - -### 6.1 Backend: Graph Indexer - -* [ ] Define node/edge schemas and attribute dictionaries for each type. -* [ ] Implement event consumers for SBOM ingests, Conseiller updates, Excitator updates. -* [ ] Build ingestion pipeline that populates nodes/edges and maintains `valid_from/valid_to`. -* [ ] Implement aggregate counters and degree metrics. -* [ ] Implement clustering job and persist cluster ids per node. -* [ ] Implement snapshot materialization per SBOM and lineage tracking. -* [ ] Unit tests for each node/edge builder; property‑based tests for identity stability. - -### 6.2 Backend: Graph API - -* [ ] Implement `/graph/search` with prefix and exact match across node types. -* [ ] Implement `/graph/query` with validation, planning, cost estimation, and streaming tile results. -* [ ] Implement `/graph/paths` with constraints and depth limits; shortest path heuristic. -* [ ] Implement `/graph/diff` computing adds/removes/changed versions; stream results. -* [ ] Implement overlays: advisory join, VEX join, policy materialization and explain sampling. -* [ ] Implement exports: GraphML, CSV edge list, NDJSON findings, PNG/SVG snapshots. -* [ ] RBAC middleware integration; multi‑tenant scoping. -* [ ] Load tests with synthetic large SBOMs; define default budgets. - -### 6.3 Policy Engine integration - -* [ ] Add endpoint to fetch explain traces for specific node ids in batch. -* [ ] Add materialization export that Graph API can cache per policy version. - -### 6.4 Console (Web UI) - -* [ ] Create `graph-explorer` module with routes `/graph`, `/graph/diff`, `/graph/q/:id`. -* [ ] Implement WebGL canvas with LOD, culling, edge bundling, deterministic layout seed. -* [ ] Build search, filter, lens, and overlay toolbars. -* [ ] Side panels: details, evidence tabs, explain viewer. -* [ ] Diff mode: split/overlay toggles and color legend. -* [ ] Saved queries: create, update, run; deep links. -* [ ] Export UI: formats, server round‑trip, progress indicators. -* [ ] a11y audit and keyboard‑only flow. - -### 6.5 CLI - -* [ ] Implement `stella sbom graph *` subcommands with JSON IO and piping support. -* [ ] Document examples and stable output schemas for CI consumption. - -### 6.6 Observability & Ops - -* [ ] Dashboards for tile latency, query denials, cache hit rate, memory. -* [ ] Alerting on query error spikes, OOM risk, cache churn. -* [ ] Runbooks in `/docs/runbooks/graph-incidents.md`. - -### 6.7 Docs - -* [ ] Author all docs in section 5, link from Console contextual help. -* [ ] Add end‑to‑end tutorial: “Investigate GHSA‑XXXX across prod artifacts.” - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - ---- - -## 7) Acceptance criteria - -* Console renders large SBOM graphs with semantic zoom, overlays, and responsive interactions. -* Users can run impact and path queries with bounded depth and get results within budget. -* VEX suppressions and advisory severities appear correctly and are consistent with policy. -* Diff view clearly shows added/removed/changed nodes/edges between two SBOMs. -* Saved queries and deep links reproduce the same view deterministically (given same data). -* Exports produce valid GraphML/CSV/NDJSON and image snapshots. -* CLI supports search, query, paths, impacted, diff, and export with stable schemas. -* AOC guardrails: explorer never mutates facts; overlays reflect Policy Engine decisions. -* RBAC enforced; all actions logged and observable. - ---- - -## 8) Risks & mitigations - -* **Graph explosion on large monorepos** → tiling, clustering, budgets, and strict depth limits. -* **Inconsistent identities across tools** → canonicalize purls/digests; property‑based tests for identity stability. -* **Policy overlay latency** → precompute materializations for hot policy versions; sample explains only on focus. -* **User confusion** → strong lens defaults, deterministic layouts, legends, in‑context help. - ---- - -## 9) Test plan - -* **Unit**: node/edge builders, identity normalization, cost estimator. -* **Integration**: ingest SBOM + advisories + VEX, verify overlays and counts. -* **E2E**: Playwright flows for search→impact→diff→export; deep link determinism. -* **Performance**: simulate 500k nodes/2M edges; measure tile latency and memory. -* **Security**: RBAC matrix; tenant isolation tests; query validation fuzzing. -* **Determinism**: snapshot round‑trip: same query and seed produce identical layout and stats. - ---- - -## 10) Feature flags - -* `graph.explorer` (UI feature module) -* `graph.paths` (advanced path queries) -* `graph.diff` (SBOM diff mode) -* `graph.overlays.policy` (policy overlay + explain sampling) -* `graph.export` (exports enabled) - -Documented in `/docs/observability/graph-telemetry.md`. - ---- - -## 11) Non‑goals (this epic) - -* Real‑time process/runtime call graphs. -* Full substitution for text reports; Explorer complements Reports. -* Cross‑tenant graphs; all queries are tenant‑scoped. - ---- - -## 12) Philosophy - -* **See the system**: security and license risk are structural. If you cannot see structure, you will miss risk. -* **Evidence over assertion**: every colored node corresponds to raw facts and explainable determinations. -* **Bounded interactivity**: fast, partial answers beat slow “complete” ones. -* **Immutability**: graphs mirror SBOM snapshots and are never rewritten; we add context, not edits. - -> Final reminder: **Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied.** +Here’s Epic 5 in the same paste‑into‑repo, implementation‑ready format as the prior epics. It’s exhaustive, formal, and designed to slot into AOC, Policy Engine, Conseiller/Excitator, and the Console. + +--- + +# Epic 5: SBOM Graph Explorer + +> Short name: **Graph Explorer** +> Services touched: **SBOM Service**, **Graph Indexer** (new), **Graph API** (new), **Policy Engine**, **Conseiller (Feedser)**, **Excitator (Vexer)**, **Web API Gateway**, **Authority** (authN/Z), **Workers/Scheduler**, **Telemetry** +> Surfaces: **Console (Web UI)** graph module, **CLI**, **Exports** +> Deliverables: Interactive graph UI with semantic zoom, saved queries, policy/VEX/advisory overlays, diff views, impact analysis, exports + +--- + +## 1) What it is + +**SBOM Graph Explorer** is the interactive, tenant‑scoped view of all supply‑chain relationships the platform knows about, rendered as a navigable graph. It connects: + +* **Artifacts** (applications, images, libs), **Packages/Versions**, **Files/Paths**, **Licenses**, **Advisories** (from Conseiller), **VEX statements** (from Excitator), **Provenance** (builds, sources), and **Policies** (overlays of determinations) +* **Edges** like `depends_on`, `contains`, `built_from`, `declared_in`, `affected_by`, `vex_exempts`, `governs_with` +* **Time/version** dimension: multiple SBOM snapshots with diffs + +It’s built for investigation and review: find where a vulnerable package enters; see which apps are impacted; understand why a finding exists; simulate a policy version and see the delta. The explorer observes **AOC enforcement**: it never mutates facts; it aggregates and visualizes them. Only the Policy Engine may classify, and classification is displayed as overlays. + +--- + +## 2) Why + +* SBOMs are graphs. Tables flatten what matters and hide transitive risk. +* Engineers, security, and auditors need impact answers quickly: “What pulls in `log4j:2.17` and where is it at runtime?” +* Policy/VEX/advisory interactions are nuanced. A visual overlay makes precedence and outcomes obvious. +* Review is collaborative; you need saved queries, deep links, exports, and consistent evidence. + +--- + +## 3) How it should work (maximum detail) + +### 3.1 Domain model + +**Nodes** (typed, versioned, tenant‑scoped): + +* `Artifact`: application, service, container image, library, module +* `Package`: name + ecosystem (purl), `PackageVersion` with resolved version +* `File`: path within artifact or image layer +* `License`: SPDX id +* `Advisory`: normalized advisory id (GHSA, CVE, vendor), source = Conseiller +* `VEX`: statement with product context, status, justification, source = Excitator +* `SBOM`: ingestion unit; includes metadata (tool, sha, build info) +* `PolicyDetermination`: materialized view of Policy Engine results (read‑only overlay) +* `Build`: provenance, commit, workflow run +* `Source`: repo, tag, commit + +**Edges** (directed): + +* `declared_in` (PackageVersion → SBOM) +* `contains` (Artifact → PackageVersion | File) +* `depends_on` (PackageVersion → PackageVersion) with scope attr (prod|dev|test|optional) +* `built_from` (Artifact → Build), `provenance_of` (Build → Source) +* `affected_by` (PackageVersion → Advisory) with range semantics +* `vex_exempts` (Advisory ↔ VEX) scoped by product/component +* `licensed_under` (Artifact|PackageVersion → License) +* `governs_with` (Artifact|PackageVersion → PolicyDetermination) +* `derived_from` (SBOM → SBOM) for superseding snapshots + +**Identity & versioning** + +* Every node has a stable key: `{tenant}:{type}:{natural_id}` (e.g., purl for packages, digest for images). +* SBOM snapshots are immutable; edges carry `valid_from`/`valid_to` for time travel and diffing. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 3.2 User capabilities (end‑to‑end) + +* **Search & Navigate**: global search (purls, CVEs, repos, licenses), keyboard nav, breadcrumbs, semantic zoom. +* **Lenses**: toggle views (Security, License, Provenance, Runtime vs Dev, Policy effect). +* **Overlays**: + + * **Advisory overlay**: show affected nodes/edges with source, severity, ranges. + * **VEX overlay**: show suppressions/justifications; collapse exempted paths. + * **Policy overlay**: choose a policy version; nodes/edges reflect determinations (severity, status) with explain sampling. +* **Impact analysis**: pick a vulnerable node; highlight upstream/downstream dependents, scope filters, shortest/all paths with constraints. +* **Diff view**: compare SBOM A vs B; show added/removed nodes/edges, changed versions, changed determinations. +* **Saved queries**: visual builder + JSON query; shareable permalinks scoped by tenant and environment. +* **Exports**: GraphML, CSV edge list, NDJSON of findings, PNG/SVG snapshot. +* **Evidence details**: side panel with raw facts, advisory links, VEX statements, policy explain trace, provenance. +* **Accessibility**: tab‑navigable, high‑contrast, screen‑reader labels for nodes and sidebars. + +### 3.3 Query model + +* **Visual builder** for common queries: + + * “Show all paths from Artifact X to Advisory Y up to depth 6.” + * “All runtime dependencies with license = GPL‑3.0.” + * “All artifacts affected by GHSA‑… with no applicable VEX.” + * “Which SBOMs introduced/removed `openssl` between build 120 and 130?” +* **JSON query** (internal, POST body) with: + + * `start`: list of node selectors (type + id or attributes) + * `expand`: edge types and depth, direction, scope filters + * `where`: predicates on node/edge attributes + * `overlay`: policy version id, advisory sources, VEX filters + * `limit`: nodes, edges, timebox, cost budget +* **Cost control**: server estimates cost, denies or pages results; UI streams partial graph tiles. + +### 3.4 UI architecture (Console) + +* **Canvas**: WebGL renderer with level‑of‑detail, edge bundling, and label culling; deterministic layout when possible (seeded). +* **Semantic zoom**: + + * Far: clusters by artifact/repo/ecosystem, color by lens + * Mid: package groups, advisory badges, license swatches + * Near: concrete versions, direct edges, inline badges for policy determinations +* **Panels**: + + * Left: search, filters, lens selector, saved queries + * Right: details, explain trace, evidence tabs (Advisory/VEX/Policy/Provenance) + * Bottom: query expression, diagnostics, performance/stream status +* **Diff mode**: split or overlay, color legend (add/remove/changed), filter by node type. +* **Deep links**: URL encodes query + viewport; shareable respecting RBAC. +* **Keyboard**: space drag, +/- zoom, F to focus, G to expand neighbors, P to show paths. + +### 3.5 Back‑end architecture + +**Graph Indexer (new)** + +* Consumes SBOM ingests, Conseiller advisories, Excitator VEX statements, Policy Engine determinations (read‑only). +* Projects facts into a **property graph** persisted in: + + * Primary: document store + adjacency sets (e.g., Mongo collections + compressed adjacency lists) + * Optional driver for graph DB backends if needed (pluggable) +* Maintains materialized aggregates: degree, critical paths cache, affected artifact counts, license distribution. +* Emits **graph snapshots** per SBOM with lineage to original ingestion. + +**Graph API (new)** + +* Endpoints for search, neighbor expansion, path queries, diffs, overlays, exports. +* Streaming responses for large graphs (chunked NDJSON tiles). +* Cost accounting + quotas per tenant. + +**Workers** + +* **Centrality & clustering** precompute on idle: betweenness approximations, connected components, Louvain clusters. +* **Diff compute** on new SBOM ingestion pairs (previous vs current). +* **Overlay materialization** cache for popular policy versions. + +**Policy Engine integration** + +* Graph API requests can specify a policy version. +* For sampled nodes, the API fetches explain traces; for counts, uses precomputed overlay materializations where available. + +**AOC enforcement** + +* Graph Indexer never merges or edits advisories/VEX; it links them and exposes overlays that the Policy Engine evaluates. +* Conseiller and Excitator remain authoritative sources; severities come from Policy‑governed normalization. + +### 3.6 APIs (representative) + +* `GET /graph/search?q=...&type=package|artifact|advisory|license` +* `POST /graph/query` ⇒ stream tiles `{nodes[], edges[], stats, cursor}` +* `POST /graph/paths` body: `{from, to, depth<=6, constraints{scope, runtime_only}}` +* `POST /graph/diff` body: `{sbom_a, sbom_b, filters}` +* `GET /graph/snapshots/{sbom_id}` ⇒ graph metadata, counts, top advisories +* `POST /graph/export` body: `{format: graphml|csv|ndjson|png|svg, query|snapshot}` +* `GET /graph/saved` / `POST /graph/saved` save and list tenant queries +* `GET /graph/overlays/policy/{version_id}` ⇒ summary stats for caching + +All endpoints tenant‑scoped, RBAC‑checked. Timeouts and pagination by server. Errors return structured diagnostics. + +### 3.7 CLI + +``` +stella sbom graph search "purl:pkg:maven/org.apache.logging.log4j/log4j-core" +stella sbom graph query --file ./query.json --export graphml > graph.graphml +stella sbom graph impacted --advisory GHSA-xxxx --runtime-only --limit 100 +stella sbom graph paths --from artifact:service-a --to advisory:GHSA-xxxx --depth 5 --policy 1.3.0 +stella sbom graph diff --sbom-a 2025-03-15T10:00Z --sbom-b 2025-03-22T10:00Z --export csv > diff.csv +stella sbom graph save --name "openssl-runtime" --file ./query.json +``` + +Exit codes: 0 ok, 2 query validation error, 3 over‑budget, 4 not found, 5 RBAC denied. + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +### 3.8 Performance & scale + +* **Progressive loading**: server pages tiles by BFS frontier; client renders incrementally. +* **Viewport culling**: only visible nodes/edges in canvas; offscreen demoted to aggregates. +* **Level‑of‑detail**: simplified glyphs and collapsed clusters at distance. +* **Query budgets**: per‑tenant rate + node/edge caps; interactive paths limited to depth ≤ 6. +* **Caching**: hot queries memoized per tenant + overlay version; diffs precomputed for consecutive SBOMs. + +### 3.9 Security + +* Multi‑tenant isolation at storage and API layers. +* RBAC roles: + + * **Viewer**: browse graphs, saved queries + * **Investigator**: run queries, export data + * **Operator**: configure budgets, purge caches + * **Auditor**: download evidence bundles +* Input validation for query JSON; deny disallowed edge traversals; strict CSP in web app. + +### 3.10 Observability + +* Metrics: tile latency, nodes/edges per tile, cache hit rate, query denials, memory pressure. +* Logs: structured, include query hash, cost, truncation flags. +* Traces: server spans per stage (parse, plan, fetch, overlay, stream). + +### 3.11 Accessibility & UX guarantees + +* Keyboard complete, ARIA roles for graph and panels, high‑contrast theme. +* Deterministic layout on reload for shareable investigations. + +### 3.12 Data retention + +* Graph nodes derived from SBOMs share retention with SBOM artifacts; overlays are ephemeral caches. +* Saved queries retained until deleted; references to missing objects show warnings. + +--- + +## 4) Implementation plan + +### 4.1 Services + +* **Graph Indexer (new microservice)** + + * Subscribes to SBOM ingest events, Conseiller advisory updates, Excitator VEX updates, Policy overlay materializations. + * Builds adjacency lists and node documents; computes aggregates and clusters. + +* **Graph API (new microservice)** + + * Validates and executes queries; streams tiles; composes overlays; serves diffs and exports. + * Integrates with Policy Engine for explain sample retrieval. + +* **SBOM Service (existing)** + + * Emits ingestion events with SBOM ids and lineage; exposes SBOM metadata to Graph API. + +* **Web API Gateway** + + * Routes `/graph/*`, injects tenant context, enforces RBAC. + +### 4.2 Console (Web UI) feature module + +* `packages/features/graph-explorer` + + * Canvas renderer (WebGL), panels, query builder, diff mode, overlays, exports. + * Deep‑link router and viewport state serializer. + +### 4.3 Workers + +* Centrality/clustering worker, diff worker, overlay materialization worker. +* Schedules on low‑traffic windows; backpressure aware. + +### 4.4 Data model (storage) + +* Collections: + + * `graph_nodes`: `{_id, tenant, type, natural_id, attrs, degree, created_at, updated_at}` + * `graph_edges`: `{_id, tenant, from_id, to_id, type, attrs, valid_from, valid_to}` + * `graph_snapshots`: per‑SBOM node/edge references + * `graph_saved_queries`: `{_id, tenant, name, query_json, created_by}` + * `graph_overlays_cache`: keyed by `{tenant, policy_version, hash(query)}` +* Indexes: compound on `{tenant, type, natural_id}`, `{tenant, from_id}`, `{tenant, to_id}`, time bounds. + +--- + +## 5) Documentation changes (create/update) + +1. **`/docs/sbom/graph-explorer-overview.md`** + + * Concepts, node/edge taxonomy, lenses, overlays, roles, limitations. +2. **`/docs/sbom/graph-using-the-console.md`** + + * Walkthroughs: search, navigate, impact, diff, export; screenshots and keyboard cheatsheet. +3. **`/docs/sbom/graph-query-language.md`** + + * JSON schema, examples, constraints, cost/budget rules. +4. **`/docs/sbom/graph-api.md`** + + * REST endpoints, request/response examples, streaming and pagination. +5. **`/docs/sbom/graph-cli.md`** + + * CLI command reference and example pipelines. +6. **`/docs/policy/graph-overlays.md`** + + * How policy versions render in Graph; explain sampling; AOC guardrails. +7. **`/docs/vex/graph-integration.md`** + + * How VEX suppressions appear and how to validate product scoping. +8. **`/docs/advisories/graph-integration.md`** + + * Advisory linkage and severity normalization by policy. +9. **`/docs/architecture/graph-services.md`** + + * Graph Indexer, Graph API, storage choices, failure modes. +10. **`/docs/observability/graph-telemetry.md`** + + * Metrics, logs, tracing, dashboards. +11. **`/docs/runbooks/graph-incidents.md`** + + * Handling runaway queries, cache poisoning, degraded render. +12. **`/docs/security/graph-rbac.md`** + + * Permissions matrix, multi‑tenant boundaries. + +Every doc should end with a “Compliance checklist.” +**Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 6) Tasks + +### 6.1 Backend: Graph Indexer + +* [ ] Define node/edge schemas and attribute dictionaries for each type. +* [ ] Implement event consumers for SBOM ingests, Conseiller updates, Excitator updates. +* [ ] Build ingestion pipeline that populates nodes/edges and maintains `valid_from/valid_to`. +* [ ] Implement aggregate counters and degree metrics. +* [ ] Implement clustering job and persist cluster ids per node. +* [ ] Implement snapshot materialization per SBOM and lineage tracking. +* [ ] Unit tests for each node/edge builder; property‑based tests for identity stability. + +### 6.2 Backend: Graph API + +* [ ] Implement `/graph/search` with prefix and exact match across node types. +* [ ] Implement `/graph/query` with validation, planning, cost estimation, and streaming tile results. +* [ ] Implement `/graph/paths` with constraints and depth limits; shortest path heuristic. +* [ ] Implement `/graph/diff` computing adds/removes/changed versions; stream results. +* [ ] Implement overlays: advisory join, VEX join, policy materialization and explain sampling. +* [ ] Implement exports: GraphML, CSV edge list, NDJSON findings, PNG/SVG snapshots. +* [ ] RBAC middleware integration; multi‑tenant scoping. +* [ ] Load tests with synthetic large SBOMs; define default budgets. + +### 6.3 Policy Engine integration + +* [ ] Add endpoint to fetch explain traces for specific node ids in batch. +* [ ] Add materialization export that Graph API can cache per policy version. + +### 6.4 Console (Web UI) + +* [ ] Create `graph-explorer` module with routes `/graph`, `/graph/diff`, `/graph/q/:id`. +* [ ] Implement WebGL canvas with LOD, culling, edge bundling, deterministic layout seed. +* [ ] Build search, filter, lens, and overlay toolbars. +* [ ] Side panels: details, evidence tabs, explain viewer. +* [ ] Diff mode: split/overlay toggles and color legend. +* [ ] Saved queries: create, update, run; deep links. +* [ ] Export UI: formats, server round‑trip, progress indicators. +* [ ] a11y audit and keyboard‑only flow. + +### 6.5 CLI + +* [ ] Implement `stella sbom graph *` subcommands with JSON IO and piping support. +* [ ] Document examples and stable output schemas for CI consumption. + +### 6.6 Observability & Ops + +* [ ] Dashboards for tile latency, query denials, cache hit rate, memory. +* [ ] Alerting on query error spikes, OOM risk, cache churn. +* [ ] Runbooks in `/docs/runbooks/graph-incidents.md`. + +### 6.7 Docs + +* [ ] Author all docs in section 5, link from Console contextual help. +* [ ] Add end‑to‑end tutorial: “Investigate GHSA‑XXXX across prod artifacts.” + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +--- + +## 7) Acceptance criteria + +* Console renders large SBOM graphs with semantic zoom, overlays, and responsive interactions. +* Users can run impact and path queries with bounded depth and get results within budget. +* VEX suppressions and advisory severities appear correctly and are consistent with policy. +* Diff view clearly shows added/removed/changed nodes/edges between two SBOMs. +* Saved queries and deep links reproduce the same view deterministically (given same data). +* Exports produce valid GraphML/CSV/NDJSON and image snapshots. +* CLI supports search, query, paths, impacted, diff, and export with stable schemas. +* AOC guardrails: explorer never mutates facts; overlays reflect Policy Engine decisions. +* RBAC enforced; all actions logged and observable. + +--- + +## 8) Risks & mitigations + +* **Graph explosion on large monorepos** → tiling, clustering, budgets, and strict depth limits. +* **Inconsistent identities across tools** → canonicalize purls/digests; property‑based tests for identity stability. +* **Policy overlay latency** → precompute materializations for hot policy versions; sample explains only on focus. +* **User confusion** → strong lens defaults, deterministic layouts, legends, in‑context help. + +--- + +## 9) Test plan + +* **Unit**: node/edge builders, identity normalization, cost estimator. +* **Integration**: ingest SBOM + advisories + VEX, verify overlays and counts. +* **E2E**: Playwright flows for search→impact→diff→export; deep link determinism. +* **Performance**: simulate 500k nodes/2M edges; measure tile latency and memory. +* **Security**: RBAC matrix; tenant isolation tests; query validation fuzzing. +* **Determinism**: snapshot round‑trip: same query and seed produce identical layout and stats. + +--- + +## 10) Feature flags + +* `graph.explorer` (UI feature module) +* `graph.paths` (advanced path queries) +* `graph.diff` (SBOM diff mode) +* `graph.overlays.policy` (policy overlay + explain sampling) +* `graph.export` (exports enabled) + +Documented in `/docs/observability/graph-telemetry.md`. + +--- + +## 11) Non‑goals (this epic) + +* Real‑time process/runtime call graphs. +* Full substitution for text reports; Explorer complements Reports. +* Cross‑tenant graphs; all queries are tenant‑scoped. + +--- + +## 12) Philosophy + +* **See the system**: security and license risk are structural. If you cannot see structure, you will miss risk. +* **Evidence over assertion**: every colored node corresponds to raw facts and explainable determinations. +* **Bounded interactivity**: fast, partial answers beat slow “complete” ones. +* **Immutability**: graphs mirror SBOM snapshots and are never rewritten; we add context, not edits. + +> Final reminder: **Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied.** diff --git a/EPIC_6.md b/docs/implplan/EPIC_6.md similarity index 99% rename from EPIC_6.md rename to docs/implplan/EPIC_6.md index 89cb0e1d..7de7a5dd 100644 --- a/EPIC_6.md +++ b/docs/implplan/EPIC_6.md @@ -499,11 +499,11 @@ CREATE INDEX fp_query ON findings_projection(tenant, policy_version, effective_s ### 4.2 Code structure ``` -/src/StellaOps.Findings.Ledger +/src/Findings/StellaOps.Findings.Ledger /api /projector /storage -/src/StellaOps.VulnExplorer.Api +/src/VulnExplorer/StellaOps.VulnExplorer.Api /routes /query /simulation @@ -512,7 +512,7 @@ CREATE INDEX fp_query ON findings_projection(tenant, policy_version, effective_s /components /pages /state -/src/StellaOps.Cli +/src/Cli/StellaOps.Cli ``` ### 4.3 Performance tasks diff --git a/EPIC_7.md b/docs/implplan/EPIC_7.md similarity index 99% rename from EPIC_7.md rename to docs/implplan/EPIC_7.md index 8e9e7958..6208ecd5 100644 --- a/EPIC_7.md +++ b/docs/implplan/EPIC_7.md @@ -398,17 +398,17 @@ Targets: ### 4.2 Code structure ``` -/src/StellaOps.VexLens +/src/VexLens/StellaOps.VexLens /normalizer /mapping # CPE/purl translators /trust # weighting functions /consensus # algorithm and projections /api -/src/StellaOps.Excititor # updates -/src/StellaOps.Policy # updates -/src/StellaOps.IssuerDirectory +/src/Excititor # updates +/src/Policy/__Libraries/StellaOps.Policy # updates +/src/IssuerDirectory/StellaOps.IssuerDirectory /packages/console/features/vex-consensus -/src/StellaOps.Cli +/src/Cli/StellaOps.Cli ``` ### 4.3 Rollout diff --git a/EPIC_8.md b/docs/implplan/EPIC_8.md similarity index 99% rename from EPIC_8.md rename to docs/implplan/EPIC_8.md index 101501e0..babe78f5 100644 --- a/EPIC_8.md +++ b/docs/implplan/EPIC_8.md @@ -284,7 +284,7 @@ Exit codes: `0` ok, `2` invalid args, `4` not found, `5` denied, `7` validation ### 4.1 Services and components -* **New:** `src/StellaOps.AdvisoryAI` +* **New:** `src/AdvisoryAI/StellaOps.AdvisoryAI` * `retriever/` wrappers for Conseiller, Excitator, VEX Lens, SBOM. * `deterministic/` version and path analyzers. diff --git a/EPIC_9.md b/docs/implplan/EPIC_9.md similarity index 98% rename from EPIC_9.md rename to docs/implplan/EPIC_9.md index dd4f3ec1..199d1458 100644 --- a/EPIC_9.md +++ b/docs/implplan/EPIC_9.md @@ -319,7 +319,7 @@ Exit codes: `0` success, `2` invalid args, `4` not found, `5` denied, `7` precon ### 4.1 Modules (new and updated) -* New service: `src/StellaOps.Orchestrator` +* New service: `src/Orchestrator/StellaOps.Orchestrator` * `api/` REST + WS handlers * `scheduler/` run planner, DAG builder, watermark/backfill logic @@ -332,7 +332,7 @@ Exit codes: `0` success, `2` invalid args, `4` not found, `5` denied, `7` precon * Worker SDKs: - * `src/StellaOps.Orchestrator.WorkerSdk.Go` and `src/StellaOps.Orchestrator.WorkerSdk.Python` with job claim, heartbeat, progress, artifact publish, and structured error reporting. + * `src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go` and `src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python` with job claim, heartbeat, progress, artifact publish, and structured error reporting. * Console: diff --git a/EXECPLAN.md b/docs/implplan/EXECPLAN.md similarity index 68% rename from EXECPLAN.md rename to docs/implplan/EXECPLAN.md index 826d2530..eca9b102 100644 --- a/EXECPLAN.md +++ b/docs/implplan/EXECPLAN.md @@ -3,195 +3,195 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster ## Wave Instructions ### Wave 0 -- Team Authority Core & Security Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Authority/TASKS.md`. Focus on AUTH-DPOP-11-001 (DONE 2025-10-20), AUTH-MTLS-11-002 (DONE 2025-10-23). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team Authority Core & Storage Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Authority/TASKS.md`. Focus on AUTHSTORAGE-MONGO-08-001 (DONE 2025-10-19). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team DevEx/CLI: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on EXCITITOR-CLI-01-002 (TODO), CLI-RUNTIME-13-005 (TODO). Confirm prerequisites (external: EXCITITOR-CLI-01-001, EXCITITOR-EXPORT-01-001) before starting and report status in module TASKS.md. +- Team Authority Core & Security Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Authority/StellaOps.Authority/TASKS.md`. Focus on AUTH-DPOP-11-001 (DONE 2025-10-20), AUTH-MTLS-11-002 (DONE 2025-10-23). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Authority Core & Storage Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Authority/StellaOps.Authority/TASKS.md`. Focus on AUTHSTORAGE-MONGO-08-001 (DONE 2025-10-19). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team DevEx/CLI: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Cli/StellaOps.Cli/TASKS.md`. Focus on EXCITITOR-CLI-01-002 (TODO), CLI-RUNTIME-13-005 (TODO). Confirm prerequisites (external: EXCITITOR-CLI-01-001, EXCITITOR-EXPORT-01-001) before starting and report status in module TASKS.md. - Team DevOps Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `ops/devops/TASKS.md`. Focus on DEVOPS-SEC-10-301 (DONE 2025-10-20); Wave 0A prerequisites reconfirmed so remediation work may proceed. Keep module TASKS.md/Sprints in sync as patches land. -- Team Diff Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.Diff/TASKS.md`. SCANNER-DIFF-10-501/502/503 all closed on 2025-10-19; keep determinism fixtures green and sync downstream consumers as Emit/Diff integration tickets arise. +- Team Diff Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Diff/TASKS.md`. SCANNER-DIFF-10-501/502/503 all closed on 2025-10-19; keep determinism fixtures green and sync downstream consumers as Emit/Diff integration tickets arise. - Team Docs Guild, Plugin Team: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `docs/TASKS.md`. Focus on DOC4.AUTH-PDG (REVIEW). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team Docs/CLI: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on EXCITITOR-CLI-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-CLI-01-001) before starting and report status in module TASKS.md. -- Team Emit Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.Emit/TASKS.md`. Sprint 10 composition milestones (10-601..10-606) wrapped 2025-10-22 and SCANNER-EMIT-10-607 completed alongside; remaining watch item is SCANNER-EMIT-17-701 (Wave 1) with build-id enrichment. -- Team EntryTrace Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.EntryTrace/TASKS.md`. SCANNER-ENTRYTRACE-10-401..407 landed 2025-10-19; continue monitoring determinism harness outputs and raise follow-ups if new interpreter cases appear. -- Team Language Analyzer Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md`, `src/StellaOps.Scanner.Analyzers.Lang/TASKS.md`. Java, shared helpers, determinism harness, and the Sprint 10 analyzers (10-301..10-309) are DONE (latest 2025-10-22); keep fixture refresh notes current and pivot to Wave 1 benchmarking/packaging follow-ups. -- Team Notify Models Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Notify.Models/TASKS.md`. Focus on NOTIFY-MODELS-15-101 (TODO), NOTIFY-MODELS-15-102 (TODO), NOTIFY-MODELS-15-103 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team Notify Storage Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Notify.Storage.Mongo/TASKS.md`. Focus on NOTIFY-STORAGE-15-201 (TODO), NOTIFY-STORAGE-15-202 (TODO), NOTIFY-STORAGE-15-203 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team Notify WebService Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Notify.WebService/TASKS.md`. Focus on NOTIFY-WEB-15-101 (TODO), NOTIFY-WEB-15-102 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Docs/CLI: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Cli/StellaOps.Cli/TASKS.md`. Focus on EXCITITOR-CLI-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-CLI-01-001) before starting and report status in module TASKS.md. +- Team Emit Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md`. Sprint 10 composition milestones (10-601..10-606) wrapped 2025-10-22 and SCANNER-EMIT-10-607 completed alongside; remaining watch item is SCANNER-EMIT-17-701 (Wave 1) with build-id enrichment. +- Team EntryTrace Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md`. SCANNER-ENTRYTRACE-10-401..407 landed 2025-10-19; continue monitoring determinism harness outputs and raise follow-ups if new interpreter cases appear. +- Team Language Analyzer Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md`. Java, shared helpers, determinism harness, and the Sprint 10 analyzers (10-301..10-309) are DONE (latest 2025-10-22); keep fixture refresh notes current and pivot to Wave 1 benchmarking/packaging follow-ups. +- Team Notify Models Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Notify/__Libraries/StellaOps.Notify.Models/TASKS.md`. Focus on NOTIFY-MODELS-15-101 (TODO), NOTIFY-MODELS-15-102 (TODO), NOTIFY-MODELS-15-103 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Notify Storage Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/TASKS.md`. Focus on NOTIFY-STORAGE-15-201 (TODO), NOTIFY-STORAGE-15-202 (TODO), NOTIFY-STORAGE-15-203 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Notify WebService Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Notify/StellaOps.Notify.WebService/TASKS.md`. Focus on NOTIFY-WEB-15-101 (TODO), NOTIFY-WEB-15-102 (TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. - Team Platform Events Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `docs/TASKS.md`. Focus on PLATFORM-EVENTS-09-401 (TODO). Confirm prerequisites (external: DOCS-EVENTS-09-003) before starting and report status in module TASKS.md. -- Team Plugin Platform Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Plugin/TASKS.md`. Focus on PLUGIN-DI-08-002.COORD (DONE 2025-10-20), PLUGIN-DI-08-002 (DONE 2025-10-20), PLUGIN-DI-08-003 (DONE 2025-10-20), PLUGIN-DI-08-004 (DONE 2025-10-20), and PLUGIN-DI-08-005 (DONE 2025-10-20). Confirm prerequisites (PLUGIN-DI-08-001) before starting and report status in module TASKS.md. -- Team Plugin Platform Guild, Authority Core: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Plugin/TASKS.md`. Coordination session for PLUGIN-DI-08-002 implementation completed on 2025-10-20 15:00–16:05 UTC and scoped-service changes have shipped with regression coverage; subsequent tasks (PLUGIN-DI-08-003/004/005) remain green. +- Team Plugin Platform Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/__Libraries/StellaOps.Plugin/TASKS.md`. Focus on PLUGIN-DI-08-002.COORD (DONE 2025-10-20), PLUGIN-DI-08-002 (DONE 2025-10-20), PLUGIN-DI-08-003 (DONE 2025-10-20), PLUGIN-DI-08-004 (DONE 2025-10-20), and PLUGIN-DI-08-005 (DONE 2025-10-20). Confirm prerequisites (PLUGIN-DI-08-001) before starting and report status in module TASKS.md. +- Team Plugin Platform Guild, Authority Core: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/__Libraries/StellaOps.Plugin/TASKS.md`. Coordination session for PLUGIN-DI-08-002 implementation completed on 2025-10-20 15:00–16:05 UTC and scoped-service changes have shipped with regression coverage; subsequent tasks (PLUGIN-DI-08-003/004/005) remain green. - Team Policy Guild: Sprint 9 core tasks (POLICY-CORE-09-004/005/006) closed on 2025-10-19; ensure downstream consumers refresh against the published scoring config + quiet/unknown outputs and raise follow-up tasks if additional polish is required. - Team Runtime Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `docs/TASKS.md`. Focus on RUNTIME-GUILD-09-402 (TODO). Confirm prerequisites (external: SCANNER-POLICY-09-107) before starting and report status in module TASKS.md. -- Team Scanner WebService Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.WebService/TASKS.md`. Focus on SCANNER-EVENTS-15-201 (DONE 2025-10-20). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team Scheduler ImpactIndex Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.ImpactIndex/TASKS.md`. Focus on SCHED-IMPACT-16-300 (DONE 2025-10-20) and ensure the temporary stub removal note stays tracked. Confirm prerequisites (external: SAMPLES-10-001) before starting and report status in module TASKS.md. -- Team Scheduler Models Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.Models/TASKS.md`. SCHED-MODELS-16-103 completed (2025-10-20); ensure downstream teams consume the migration helpers and log upgrade warnings. -- Team Scheduler Queue Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.Queue/TASKS.md`. SCHED-QUEUE-16-401 completed (2025-10-20); proceed with Wave 1 queue enhancements. -- Team Scheduler Storage Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.Storage.Mongo/TASKS.md`. Focus on SCHED-STORAGE-16-201 (TODO). Confirm prerequisites (external: SCHED-MODELS-16-101) before starting and report status in module TASKS.md. -- Team Scheduler WebService Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scheduler.WebService/TASKS.md`. Focus on SCHED-WEB-16-101 (TODO). Confirm prerequisites (external: SCHED-MODELS-16-101) before starting and report status in module TASKS.md. -- Team Signer Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Signer/TASKS.md`. Focus on SIGNER-API-11-101 (DONE 2025-10-21), SIGNER-REF-11-102 (DONE 2025-10-21), SIGNER-QUOTA-11-103 (DONE 2025-10-21). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team TBD: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-302C (TODO). Confirm prerequisites (external: SCANNER-ANALYZERS-LANG-10-302B) before starting and report status in module TASKS.md. -- Team Team Connector Resumption – CERT/RedHat: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md`. Focus on FEEDCONN-REDHAT-02-001 (DOING). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team Team Excititor Attestation: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Attestation/TASKS.md`. Focus on EXCITITOR-ATTEST-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-ATTEST-01-002) before starting and report status in module TASKS.md. -- Team Team Excititor Connectors – Cisco: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-CISCO-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-CISCO-01-002, EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. -- Team Team Excititor Connectors – MSRC: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-MS-01-002 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-MS-01-001, EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. -- Team Team Excititor Connectors – Oracle: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-ORACLE-01-001 (DOING). Confirm prerequisites (external: EXCITITOR-CONN-ABS-01-001) before starting and report status in module TASKS.md. -- Team Team Excititor Connectors – SUSE: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md`. Focus on EXCITITOR-CONN-SUSE-01-002 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-SUSE-01-001, EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. -- Team Team Excititor Connectors – Ubuntu: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-UBUNTU-01-002 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-UBUNTU-01-001, EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. -- Team Team Excititor Export: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Export/TASKS.md`. Focus on EXCITITOR-EXPORT-01-005 (DONE 2025-10-21). Confirm prerequisites (external: EXCITITOR-CORE-02-001, EXCITITOR-EXPORT-01-004) before starting and report status in module TASKS.md. -- Team Team Excititor Formats: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Formats.CSAF/TASKS.md`, `src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md`, `src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md`. Focus on EXCITITOR-FMT-CSAF-01-002 (TODO), EXCITITOR-FMT-CSAF-01-003 (TODO), EXCITITOR-FMT-CYCLONE-01-002 (TODO), EXCITITOR-FMT-CYCLONE-01-003 (TODO), EXCITITOR-FMT-OPENVEX-01-002 (TODO), EXCITITOR-FMT-OPENVEX-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-EXPORT-01-001, EXCITITOR-FMT-CSAF-01-001, EXCITITOR-FMT-CYCLONE-01-001, EXCITITOR-FMT-OPENVEX-01-001, EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. -- Team Team Excititor Storage: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Storage.Mongo/TASKS.md`. Focus on EXCITITOR-STORAGE-MONGO-08-001 (DONE 2025-10-19), EXCITITOR-STORAGE-03-001 (TODO). Confirm prerequisites (external: EXCITITOR-STORAGE-01-003, EXCITITOR-STORAGE-02-001) before starting and report status in module TASKS.md. -- Team Team Excititor WebService: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.WebService/TASKS.md`. Focus on EXCITITOR-WEB-01-002 (DONE 2025-10-20), EXCITITOR-WEB-01-003 (TODO), EXCITITOR-WEB-01-004 (DONE 2025-10-20). Confirm prerequisites (external: EXCITITOR-ATTEST-01-001, EXCITITOR-EXPORT-01-001, EXCITITOR-WEB-01-001) before starting and report status in module TASKS.md. -- Team Team Excititor Worker: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Excititor.Worker/TASKS.md`. Focus on EXCITITOR-WORKER-01-004 (DONE 2025-10-21); EXCITITOR-WORKER-01-002 (DONE 2025-10-21) and EXCITITOR-WORKER-02-001 (DONE 2025-10-21) recorded. Confirm prerequisites (external: EXCITITOR-CORE-02-001, EXCITITOR-WORKER-01-001) before starting and report status in module TASKS.md. -- Team Team Merge & QA Enforcement: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Concelier.Merge/TASKS.md`. Focus on FEEDMERGE-COORD-02-900 (DOING). Confirm prerequisites (none) before starting and report status in module TASKS.md. **2025-10-19:** Coordination refreshed; connector owners notified and TASKS.md entries updated. **2025-10-20:** Coordination matrix + rollout dashboard refreshed with connector due dates (Cccs/Cisco 2025-10-21, CertBund 2025-10-22, ICS-CISA 2025-10-23, KISA 2025-10-24) and escalation plan logged. -- Team Team Normalization & Storage Backbone: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Concelier.Storage.Mongo/TASKS.md`. Focus on FEEDSTORAGE-MONGO-08-001 (DONE 2025-10-19). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team Team WebService & Authority: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md`, `src/StellaOps.Concelier.WebService/TASKS.md`. Focus on SEC2.PLG (DOING), SEC3.PLG (DOING), SEC5.PLG (DOING), PLG4-6.CAPABILITIES (BLOCKED), PLG6.DIAGRAM (TODO), PLG7.RFC (REVIEW), FEEDWEB-DOCS-01-001 (DOING), FEEDWEB-OPS-01-006 (TODO), FEEDWEB-OPS-01-007 (BLOCKED). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team Tools Guild, BE-Conn-MSRC: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.Common/TASKS.md`. Focus on FEEDCONN-SHARED-STATE-003 (**TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. -- Team UX Specialist, Angular Eng: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/StellaOps.Web/TASKS.md`. Focus on WEB1.TRIVY-SETTINGS (DONE 2025-10-21), WEB1.TRIVY-SETTINGS-TESTS (DONE 2025-10-21), and WEB1.DEPS-13-001 (DONE 2025-10-21). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Scanner WebService Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scanner/StellaOps.Scanner.WebService/TASKS.md`. Focus on SCANNER-EVENTS-15-201 (DONE 2025-10-20). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Scheduler ImpactIndex Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md`. Focus on SCHED-IMPACT-16-300 (DONE 2025-10-20) and ensure the temporary stub removal note stays tracked. Confirm prerequisites (external: SAMPLES-10-001) before starting and report status in module TASKS.md. +- Team Scheduler Models Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md`. SCHED-MODELS-16-103 completed (2025-10-20); ensure downstream teams consume the migration helpers and log upgrade warnings. +- Team Scheduler Queue Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/TASKS.md`. SCHED-QUEUE-16-401 completed (2025-10-20); proceed with Wave 1 queue enhancements. +- Team Scheduler Storage Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/TASKS.md`. Focus on SCHED-STORAGE-16-201 (TODO). Confirm prerequisites (external: SCHED-MODELS-16-101) before starting and report status in module TASKS.md. +- Team Scheduler WebService Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md`. Focus on SCHED-WEB-16-101 (TODO). Confirm prerequisites (external: SCHED-MODELS-16-101) before starting and report status in module TASKS.md. +- Team Signer Guild: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Signer/StellaOps.Signer/TASKS.md`. Focus on SIGNER-API-11-101 (DONE 2025-10-21), SIGNER-REF-11-102 (DONE 2025-10-21), SIGNER-QUOTA-11-103 (DONE 2025-10-21). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`. Focus on SCANNER-ANALYZERS-LANG-10-302C (TODO). Confirm prerequisites (external: SCANNER-ANALYZERS-LANG-10-302B) before starting and report status in module TASKS.md. +- Team Team Connector Resumption – CERT/RedHat: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md`. Focus on FEEDCONN-REDHAT-02-001 (DOING). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Team Excititor Attestation: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Attestation/TASKS.md`. Focus on EXCITITOR-ATTEST-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-ATTEST-01-002) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Cisco: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-CISCO-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-CISCO-01-002, EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – MSRC: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-MS-01-002 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-MS-01-001, EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Oracle: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-ORACLE-01-001 (DOING). Confirm prerequisites (external: EXCITITOR-CONN-ABS-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – SUSE: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md`. Focus on EXCITITOR-CONN-SUSE-01-002 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-SUSE-01-001, EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Ubuntu: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-UBUNTU-01-002 (TODO). Confirm prerequisites (external: EXCITITOR-CONN-UBUNTU-01-001, EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. +- Team Team Excititor Export: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md`. Focus on EXCITITOR-EXPORT-01-005 (DONE 2025-10-21). Confirm prerequisites (external: EXCITITOR-CORE-02-001, EXCITITOR-EXPORT-01-004) before starting and report status in module TASKS.md. +- Team Team Excititor Formats: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/TASKS.md`, `src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/TASKS.md`, `src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/TASKS.md`. Focus on EXCITITOR-FMT-CSAF-01-002 (TODO), EXCITITOR-FMT-CSAF-01-003 (TODO), EXCITITOR-FMT-CYCLONE-01-002 (TODO), EXCITITOR-FMT-CYCLONE-01-003 (TODO), EXCITITOR-FMT-OPENVEX-01-002 (TODO), EXCITITOR-FMT-OPENVEX-01-003 (TODO). Confirm prerequisites (external: EXCITITOR-EXPORT-01-001, EXCITITOR-FMT-CSAF-01-001, EXCITITOR-FMT-CYCLONE-01-001, EXCITITOR-FMT-OPENVEX-01-001, EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Storage: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md`. Focus on EXCITITOR-STORAGE-MONGO-08-001 (DONE 2025-10-19), EXCITITOR-STORAGE-03-001 (TODO). Confirm prerequisites (external: EXCITITOR-STORAGE-01-003, EXCITITOR-STORAGE-02-001) before starting and report status in module TASKS.md. +- Team Team Excititor WebService: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/StellaOps.Excititor.WebService/TASKS.md`. Focus on EXCITITOR-WEB-01-002 (DONE 2025-10-20), EXCITITOR-WEB-01-003 (TODO), EXCITITOR-WEB-01-004 (DONE 2025-10-20). Confirm prerequisites (external: EXCITITOR-ATTEST-01-001, EXCITITOR-EXPORT-01-001, EXCITITOR-WEB-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Worker: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Excititor/StellaOps.Excititor.Worker/TASKS.md`. Focus on EXCITITOR-WORKER-01-004 (DONE 2025-10-21); EXCITITOR-WORKER-01-002 (DONE 2025-10-21) and EXCITITOR-WORKER-02-001 (DONE 2025-10-21) recorded. Confirm prerequisites (external: EXCITITOR-CORE-02-001, EXCITITOR-WORKER-01-001) before starting and report status in module TASKS.md. +- Team Team Merge & QA Enforcement: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md`. Focus on FEEDMERGE-COORD-02-900 (DOING). Confirm prerequisites (none) before starting and report status in module TASKS.md. **2025-10-19:** Coordination refreshed; connector owners notified and TASKS.md entries updated. **2025-10-20:** Coordination matrix + rollout dashboard refreshed with connector due dates (Cccs/Cisco 2025-10-21, CertBund 2025-10-22, ICS-CISA 2025-10-23, KISA 2025-10-24) and escalation plan logged. +- Team Team Normalization & Storage Backbone: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md`. Focus on FEEDSTORAGE-MONGO-08-001 (DONE 2025-10-19). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Team WebService & Authority: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md`, `src/Concelier/StellaOps.Concelier.WebService/TASKS.md`. Focus on SEC2.PLG (DOING), SEC3.PLG (DOING), SEC5.PLG (DOING), PLG4-6.CAPABILITIES (BLOCKED), PLG6.DIAGRAM (TODO), PLG7.RFC (REVIEW), FEEDWEB-DOCS-01-001 (DOING), FEEDWEB-OPS-01-006 (TODO), FEEDWEB-OPS-01-007 (BLOCKED). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team Tools Guild, BE-Conn-MSRC: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Common/TASKS.md`. Focus on FEEDCONN-SHARED-STATE-003 (**TODO). Confirm prerequisites (none) before starting and report status in module TASKS.md. +- Team UX Specialist, Angular Eng: read EXECPLAN.md Wave 0 and SPRINTS.md rows for `src/Web/StellaOps.Web/TASKS.md`. Focus on WEB1.TRIVY-SETTINGS (DONE 2025-10-21), WEB1.TRIVY-SETTINGS-TESTS (DONE 2025-10-21), and WEB1.DEPS-13-001 (DONE 2025-10-21). Confirm prerequisites (none) before starting and report status in module TASKS.md. ### Wave 1 -- Team Concelier WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Concelier.WebService/TASKS.md`. Focus on CONCELIER-WEB-AOC-19-001/002/003/004 (TODO). Confirm prerequisites (WEB-AOC-19-001, CONCELIER-CORE-AOC-19-001, CONCELIER-STORE-AOC-19-001) before starting and record progress in TASKS.md. -- Team Concelier Core Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Concelier.Core/TASKS.md`. Focus on CONCELIER-CORE-AOC-19-001/002/003/004 (TODO). Coordinate with Policy team on derived-data removal. -- Team Concelier Storage Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Concelier.Storage.Mongo/TASKS.md`. Prioritise CONCELIER-STORE-AOC-19-001/002/003/004 (TODO) and align validator rollout with DevOps. -- Team Excititor WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.WebService/TASKS.md`. Focus on EXCITITOR-WEB-AOC-19-001/002/003/004 (TODO). Ensure parity with Concelier ingestion guard. -- Team Excititor Core Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Core/TASKS.md`. Focus on EXCITITOR-CORE-AOC-19-001/002/003/004 (TODO). -- Team Excititor Storage Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Storage.Mongo/TASKS.md`. Work on EXCITITOR-STORE-AOC-19-001/002/003/004 (TODO) with migration dry-run plans. -- Team Excititor Worker Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Worker/TASKS.md`. Focus on EXCITITOR-WORKER-AOC-19-001/002/003 (TODO) coordinating signature enforcement with storage guard. -- Team BE-Base Platform Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Web/TASKS.md`. Deliver WEB-AOC-19-001/002/003 (TODO) to unblock ingestion services. -- Team Policy Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Policy/TASKS.md`. Work on POLICY-AOC-19-001/002/003/004 (TODO) to keep derived data policy-only. -- Team Authority Core & Security Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Authority/TASKS.md`. Prioritise AUTH-AOC-19-001/002/003 (TODO) for new scopes + tenancy. -- Team DevEx/CLI Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on CLI-AOC-19-001/002/003 (TODO) and sync exit codes with services. -- Team UI Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.UI/TASKS.md`. Execute UI-AOC-19-001/002/003 (TODO) using new verify endpoints. +- Team Concelier WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Concelier/StellaOps.Concelier.WebService/TASKS.md`. Focus on CONCELIER-WEB-AOC-19-001/002/003/004 (TODO). Confirm prerequisites (WEB-AOC-19-001, CONCELIER-CORE-AOC-19-001, CONCELIER-STORE-AOC-19-001) before starting and record progress in TASKS.md. +- Team Concelier Core Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md`. Focus on CONCELIER-CORE-AOC-19-001/002/003/004 (TODO). Coordinate with Policy team on derived-data removal. +- Team Concelier Storage Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md`. Prioritise CONCELIER-STORE-AOC-19-001/002/003/004 (TODO) and align validator rollout with DevOps. +- Team Excititor WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/StellaOps.Excititor.WebService/TASKS.md`. Focus on EXCITITOR-WEB-AOC-19-001/002/003/004 (TODO). Ensure parity with Concelier ingestion guard. +- Team Excititor Core Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md`. Focus on EXCITITOR-CORE-AOC-19-001/002/003/004 (TODO). +- Team Excititor Storage Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md`. Work on EXCITITOR-STORE-AOC-19-001/002/003/004 (TODO) with migration dry-run plans. +- Team Excititor Worker Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/StellaOps.Excititor.Worker/TASKS.md`. Focus on EXCITITOR-WORKER-AOC-19-001/002/003 (TODO) coordinating signature enforcement with storage guard. +- Team BE-Base Platform Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Web/StellaOps.Web/TASKS.md`. Deliver WEB-AOC-19-001/002/003 (TODO) to unblock ingestion services. +- Team Policy Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Policy/__Libraries/StellaOps.Policy/TASKS.md`. Work on POLICY-AOC-19-001/002/003/004 (TODO) to keep derived data policy-only. +- Team Authority Core & Security Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Authority/StellaOps.Authority/TASKS.md`. Prioritise AUTH-AOC-19-001/002/003 (TODO) for new scopes + tenancy. +- Team DevEx/CLI Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Cli/StellaOps.Cli/TASKS.md`. Focus on CLI-AOC-19-001/002/003 (TODO) and sync exit codes with services. +- Team UI Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/UI/StellaOps.UI/TASKS.md`. Execute UI-AOC-19-001/002/003 (TODO) using new verify endpoints. - Team DevOps Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `ops/devops/TASKS.md`. Implement DEVOPS-AOC-19-001/002/003 (TODO) to gate CI with new guards. - Team Docs Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `docs/TASKS.md`. Cover DOCS-AOC-19-001..008 (TODO) aligning docs with new ingestion contract. -- Team Bench Guild, Language Analyzer Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Bench/TASKS.md`. Focus on BENCH-SCANNER-10-002 (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-301 (Wave 0)) before starting and report status in module TASKS.md. -- Team DevEx/CLI, QA Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on CLI-RUNTIME-13-009 (TODO). Confirm prerequisites (internal: CLI-RUNTIME-13-005 (Wave 0)) before starting and report status in module TASKS.md. +- Team Bench Guild, Language Analyzer Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Bench/StellaOps.Bench/TASKS.md`. Focus on BENCH-SCANNER-10-002 (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-301 (Wave 0)) before starting and report status in module TASKS.md. +- Team DevEx/CLI, QA Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Cli/StellaOps.Cli/TASKS.md`. Focus on CLI-RUNTIME-13-009 (TODO). Confirm prerequisites (internal: CLI-RUNTIME-13-005 (Wave 0)) before starting and report status in module TASKS.md. - Team DevOps Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `ops/devops/TASKS.md`. Focus on DEVOPS-REL-14-001 (DOING 2025-10-23). Confirm prerequisites (internal: SIGNER-API-11-101 (Wave 0)) before starting and report status in module TASKS.md. - Team DevOps Guild, Scanner WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `ops/devops/TASKS.md`. Focus on DEVOPS-SCANNER-09-204 (TODO). Confirm prerequisites (internal: SCANNER-EVENTS-15-201 (Wave 0)) before starting and report status in module TASKS.md. -- Team Emit Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scanner.Emit/TASKS.md`. SCANNER-EMIT-10-607 shipped 2025-10-22; remaining focus is SCANNER-EMIT-17-701 (build-id enrichment). Confirm prerequisites (internal: POLICY-CORE-09-005 (Wave 0), SCANNER-EMIT-10-602 (Wave 0), SCANNER-EMIT-10-604 (Wave 0)) before starting and report status in module TASKS.md. -- Team Language Analyzer Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang/TASKS.md`. Sprint 10 language analyzers (10-303..10-306) wrapped by 2025-10-22; shift to Wave 1 benchmarking/packaging follow-ups (10-308+/309 variants) and ensure shared helpers stay stable. Node stream (tasks 10-302/309) closed on 2025-10-21; verify prereqs SCANNER-ANALYZERS-LANG-10-301/307 remain satisfied before new work. +- Team Emit Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md`. SCANNER-EMIT-10-607 shipped 2025-10-22; remaining focus is SCANNER-EMIT-17-701 (build-id enrichment). Confirm prerequisites (internal: POLICY-CORE-09-005 (Wave 0), SCANNER-EMIT-10-602 (Wave 0), SCANNER-EMIT-10-604 (Wave 0)) before starting and report status in module TASKS.md. +- Team Language Analyzer Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md`. Sprint 10 language analyzers (10-303..10-306) wrapped by 2025-10-22; shift to Wave 1 benchmarking/packaging follow-ups (10-308+/309 variants) and ensure shared helpers stay stable. Node stream (tasks 10-302/309) closed on 2025-10-21; verify prereqs SCANNER-ANALYZERS-LANG-10-301/307 remain satisfied before new work. - Team Licensing Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `ops/licensing/TASKS.md`. Focus on DEVOPS-LIC-14-004 (TODO). Confirm prerequisites (internal: AUTH-MTLS-11-002 (Wave 0)) before starting and report status in module TASKS.md. -- Team Notify Engine Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-301 (TODO). Confirm prerequisites (internal: NOTIFY-MODELS-15-101 (Wave 0)) before starting and report status in module TASKS.md. -- Team Notify WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Notify.WebService/TASKS.md`. Focus on NOTIFY-WEB-15-103 (DONE). Confirm prerequisites (internal: NOTIFY-WEB-15-102 (Wave 0)) before starting and report status in module TASKS.md. -- Team Scheduler ImpactIndex Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.ImpactIndex/TASKS.md`. Focus on SCHED-IMPACT-16-301 (TODO). Confirm prerequisites (internal: SCANNER-EMIT-10-605 (Wave 0)) before starting and report status in module TASKS.md. -- Team Scheduler Queue Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.Queue/TASKS.md`. SCHED-QUEUE-16-402 completed (2025-10-20); next focus is SCHED-QUEUE-16-403. -- Team Scheduler Storage Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.Storage.Mongo/TASKS.md`. Focus on SCHED-STORAGE-16-203 (TODO), SCHED-STORAGE-16-202 (TODO). Confirm prerequisites (internal: SCHED-STORAGE-16-201 (Wave 0)) before starting and report status in module TASKS.md. -- Team Scheduler WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.WebService/TASKS.md`. Focus on SCHED-WEB-16-104 (TODO), SCHED-WEB-16-102 (TODO). Confirm prerequisites (internal: SCHED-QUEUE-16-401 (Wave 0), SCHED-STORAGE-16-201 (Wave 0), SCHED-WEB-16-101 (Wave 0)) before starting and report status in module TASKS.md. -- Team Scheduler Worker Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-201 (TODO). Confirm prerequisites (internal: SCHED-QUEUE-16-401 (Wave 0)) before starting and report status in module TASKS.md. -- Team TBD: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-305A/304A/303A/306A all closed by 2025-10-22; use this slot to review cross-language fixture hygiene and prep Wave 1 benchmarking tickets. Node add-ons 10-307N/10-308N/10-309N remain DONE with restart-time packaging verified 2025-10-21. Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-302C (Wave 0), SCANNER-ANALYZERS-LANG-10-307 (Wave 0)) before starting any new follow-ups and report status in module TASKS.md. -- Team Team Excititor Connectors – MSRC: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-MS-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-MS-01-002 (Wave 0); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. -- Team Team Excititor Connectors – Oracle: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-ORACLE-01-002 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-ORACLE-01-001 (Wave 0); external: EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. -- Team Team Excititor Connectors – SUSE: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md`. Focus on EXCITITOR-CONN-SUSE-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-SUSE-01-002 (Wave 0); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. -- Team Team Excititor Connectors – Ubuntu: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-UBUNTU-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-UBUNTU-01-002 (Wave 0); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. -- Team Team Excititor Export: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Export/TASKS.md`. Focus on EXCITITOR-EXPORT-01-006 (DONE 2025-10-21). Confirm prerequisites (internal: EXCITITOR-EXPORT-01-005 (Wave 0), POLICY-CORE-09-005 (Wave 0)) before starting and report status in module TASKS.md. -- Team Team Excititor Worker: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.Excititor.Worker/TASKS.md`. Focus on EXCITITOR-WORKER-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-ATTEST-01-003 (Wave 0); external: EXCITITOR-EXPORT-01-002, EXCITITOR-WORKER-01-001) before starting and report status in module TASKS.md. -- Team UI Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/StellaOps.UI/TASKS.md`. Focus on UI-SCANS-13-002 (TODO), UI-VEX-13-003 (TODO), UI-ADMIN-13-004 (TODO), UI-SCHED-13-005 (TODO). Confirm prerequisites (internal: AUTH-DPOP-11-001 (Wave 0), AUTH-MTLS-11-002 (Wave 0), EXCITITOR-EXPORT-01-005 (Wave 0), NOTIFY-WEB-15-101 (Wave 0), POLICY-CORE-09-006 (Wave 0), SCHED-WEB-16-101 (Wave 0), SIGNER-API-11-101 (Wave 0); external: EXCITITOR-CORE-02-001, SCANNER-WEB-09-102, SCANNER-WEB-09-103) before starting and report status in module TASKS.md. +- Team Notify Engine Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-301 (TODO). Confirm prerequisites (internal: NOTIFY-MODELS-15-101 (Wave 0)) before starting and report status in module TASKS.md. +- Team Notify WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Notify/StellaOps.Notify.WebService/TASKS.md`. Focus on NOTIFY-WEB-15-103 (DONE). Confirm prerequisites (internal: NOTIFY-WEB-15-102 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler ImpactIndex Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md`. Focus on SCHED-IMPACT-16-301 (TODO). Confirm prerequisites (internal: SCANNER-EMIT-10-605 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler Queue Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/TASKS.md`. SCHED-QUEUE-16-402 completed (2025-10-20); next focus is SCHED-QUEUE-16-403. +- Team Scheduler Storage Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/TASKS.md`. Focus on SCHED-STORAGE-16-203 (TODO), SCHED-STORAGE-16-202 (TODO). Confirm prerequisites (internal: SCHED-STORAGE-16-201 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler WebService Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md`. Focus on SCHED-WEB-16-104 (TODO), SCHED-WEB-16-102 (TODO). Confirm prerequisites (internal: SCHED-QUEUE-16-401 (Wave 0), SCHED-STORAGE-16-201 (Wave 0), SCHED-WEB-16-101 (Wave 0)) before starting and report status in module TASKS.md. +- Team Scheduler Worker Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-201 (TODO). Confirm prerequisites (internal: SCHED-QUEUE-16-401 (Wave 0)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-305A/304A/303A/306A all closed by 2025-10-22; use this slot to review cross-language fixture hygiene and prep Wave 1 benchmarking tickets. Node add-ons 10-307N/10-308N/10-309N remain DONE with restart-time packaging verified 2025-10-21. Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-302C (Wave 0), SCANNER-ANALYZERS-LANG-10-307 (Wave 0)) before starting any new follow-ups and report status in module TASKS.md. +- Team Team Excititor Connectors – MSRC: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-MS-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-MS-01-002 (Wave 0); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Oracle: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-ORACLE-01-002 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-ORACLE-01-001 (Wave 0); external: EXCITITOR-STORAGE-01-003) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – SUSE: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md`. Focus on EXCITITOR-CONN-SUSE-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-SUSE-01-002 (Wave 0); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Connectors – Ubuntu: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-UBUNTU-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-UBUNTU-01-002 (Wave 0); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Export: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md`. Focus on EXCITITOR-EXPORT-01-006 (DONE 2025-10-21). Confirm prerequisites (internal: EXCITITOR-EXPORT-01-005 (Wave 0), POLICY-CORE-09-005 (Wave 0)) before starting and report status in module TASKS.md. +- Team Team Excititor Worker: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/Excititor/StellaOps.Excititor.Worker/TASKS.md`. Focus on EXCITITOR-WORKER-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-ATTEST-01-003 (Wave 0); external: EXCITITOR-EXPORT-01-002, EXCITITOR-WORKER-01-001) before starting and report status in module TASKS.md. +- Team UI Guild: read EXECPLAN.md Wave 1 and SPRINTS.md rows for `src/UI/StellaOps.UI/TASKS.md`. Focus on UI-SCANS-13-002 (TODO), UI-VEX-13-003 (TODO), UI-ADMIN-13-004 (TODO), UI-SCHED-13-005 (TODO). Confirm prerequisites (internal: AUTH-DPOP-11-001 (Wave 0), AUTH-MTLS-11-002 (Wave 0), EXCITITOR-EXPORT-01-005 (Wave 0), NOTIFY-WEB-15-101 (Wave 0), POLICY-CORE-09-006 (Wave 0), SCHED-WEB-16-101 (Wave 0), SIGNER-API-11-101 (Wave 0); external: EXCITITOR-CORE-02-001, SCANNER-WEB-09-102, SCANNER-WEB-09-103) before starting and report status in module TASKS.md. ### Wave 2 -- Team Bench Guild, Notify Team: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Bench/TASKS.md`. Focus on BENCH-NOTIFY-15-001 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-301 (Wave 1)) before starting and report status in module TASKS.md. -- Team Bench Guild, Scheduler Team: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Bench/TASKS.md`. Focus on BENCH-IMPACT-16-001 (TODO). Confirm prerequisites (internal: SCHED-IMPACT-16-301 (Wave 1)) before starting and report status in module TASKS.md. +- Team Bench Guild, Notify Team: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/Bench/StellaOps.Bench/TASKS.md`. Focus on BENCH-NOTIFY-15-001 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-301 (Wave 1)) before starting and report status in module TASKS.md. +- Team Bench Guild, Scheduler Team: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/Bench/StellaOps.Bench/TASKS.md`. Focus on BENCH-IMPACT-16-001 (TODO). Confirm prerequisites (internal: SCHED-IMPACT-16-301 (Wave 1)) before starting and report status in module TASKS.md. - Team Deployment Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `ops/deployment/TASKS.md`. Focus on DEVOPS-OPS-14-003 (TODO). Confirm prerequisites (internal: DEVOPS-REL-14-001 (Wave 1)) before starting and report status in module TASKS.md. - Team DevOps Guild, Notify Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `ops/devops/TASKS.md`. Focus on DEVOPS-SCANNER-09-205 (TODO). Confirm prerequisites (internal: DEVOPS-SCANNER-09-204 (Wave 1)) before starting and report status in module TASKS.md. -- Team Notify Engine Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-302 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-301 (Wave 1)) before starting and report status in module TASKS.md. +- Team Notify Engine Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-302 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-301 (Wave 1)) before starting and report status in module TASKS.md. - Team Offline Kit Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `ops/offline-kit/TASKS.md`. Focus on DEVOPS-OFFLINE-14-002 (TODO), DEVOPS-OFFLINE-18-003 (TODO), and DEVOPS-OFFLINE-18-005 (TODO). Confirm prerequisites (internal: DEVOPS-REL-14-001 (Wave 1), DEVOPS-REL-14-004 (Wave 2)) before starting and report status in module TASKS.md. - Team Samples Guild, Policy Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `samples/TASKS.md`. Focus on SAMPLES-13-004 (TODO). Confirm prerequisites (internal: POLICY-CORE-09-006 (Wave 0), UI-POLICY-13-007 (Wave 1)) before starting and report status in module TASKS.md. -- Team Scheduler ImpactIndex Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Scheduler.ImpactIndex/TASKS.md`. Focus on SCHED-IMPACT-16-303 (TODO), SCHED-IMPACT-16-302 (TODO). Confirm prerequisites (internal: SCHED-IMPACT-16-301 (Wave 1)) before starting and report status in module TASKS.md. -- Team Scheduler WebService Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Scheduler.WebService/TASKS.md`. Focus on SCHED-WEB-16-103 (TODO). Confirm prerequisites (internal: SCHED-WEB-16-102 (Wave 1)) before starting and report status in module TASKS.md. -- Team Scheduler Worker Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-202 (TODO), SCHED-WORKER-16-205 (TODO). Confirm prerequisites (internal: SCHED-IMPACT-16-301 (Wave 1), SCHED-WORKER-16-201 (Wave 1)) before starting and report status in module TASKS.md. -- Team TBD: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-305B/304B/303B/306B wrapped on 2025-10-22; next focus moves to `10-307*` shared helper integration and Wave 2 benchmark polish. Node packaging milestone 10-308N closed 2025-10-21. Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-303A (Wave 1), SCANNER-ANALYZERS-LANG-10-304A (Wave 1), SCANNER-ANALYZERS-LANG-10-305A (Wave 1), SCANNER-ANALYZERS-LANG-10-306A (Wave 1), SCANNER-ANALYZERS-LANG-10-307N (Wave 1)) before starting new work and report status in module TASKS.md. -- Team Team Excititor Connectors – Oracle: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-ORACLE-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-ORACLE-01-002 (Wave 1); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. -- Team Team Excititor Export: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/StellaOps.Excititor.Export/TASKS.md`. Focus on EXCITITOR-EXPORT-01-007 (DONE 2025-10-21). Confirm prerequisites (internal: EXCITITOR-EXPORT-01-006 (Wave 1)) before starting and report status in module TASKS.md. +- Team Scheduler ImpactIndex Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md`. Focus on SCHED-IMPACT-16-303 (TODO), SCHED-IMPACT-16-302 (TODO). Confirm prerequisites (internal: SCHED-IMPACT-16-301 (Wave 1)) before starting and report status in module TASKS.md. +- Team Scheduler WebService Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md`. Focus on SCHED-WEB-16-103 (TODO). Confirm prerequisites (internal: SCHED-WEB-16-102 (Wave 1)) before starting and report status in module TASKS.md. +- Team Scheduler Worker Guild: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-202 (TODO), SCHED-WORKER-16-205 (TODO). Confirm prerequisites (internal: SCHED-IMPACT-16-301 (Wave 1), SCHED-WORKER-16-201 (Wave 1)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-305B/304B/303B/306B wrapped on 2025-10-22; next focus moves to `10-307*` shared helper integration and Wave 2 benchmark polish. Node packaging milestone 10-308N closed 2025-10-21. Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-303A (Wave 1), SCANNER-ANALYZERS-LANG-10-304A (Wave 1), SCANNER-ANALYZERS-LANG-10-305A (Wave 1), SCANNER-ANALYZERS-LANG-10-306A (Wave 1), SCANNER-ANALYZERS-LANG-10-307N (Wave 1)) before starting new work and report status in module TASKS.md. +- Team Team Excititor Connectors – Oracle: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md`. Focus on EXCITITOR-CONN-ORACLE-01-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-ORACLE-01-002 (Wave 1); external: EXCITITOR-POLICY-01-001) before starting and report status in module TASKS.md. +- Team Team Excititor Export: read EXECPLAN.md Wave 2 and SPRINTS.md rows for `src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md`. Focus on EXCITITOR-EXPORT-01-007 (DONE 2025-10-21). Confirm prerequisites (internal: EXCITITOR-EXPORT-01-006 (Wave 1)) before starting and report status in module TASKS.md. ### Wave 3 -- Team DevEx/CLI: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on CLI-OFFLINE-13-006 (DONE 2025-10-21). Confirm prerequisites (internal: DEVOPS-OFFLINE-14-002 (Wave 2)) before starting and report status in module TASKS.md. -- Team Excititor Connectors – Stella: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md`. Focus on EXCITITOR-CONN-STELLA-07-001 (DONE 2025-10-21). Confirm prerequisites (internal: EXCITITOR-EXPORT-01-007 (Wave 2)) before starting and report status in module TASKS.md. -- Team Notify Engine Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-303 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-302 (Wave 2)) before starting and report status in module TASKS.md. -- Team Notify Worker Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Notify.Worker/TASKS.md`. Focus on NOTIFY-WORKER-15-203 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-302 (Wave 2)) before starting and report status in module TASKS.md. -- Team Scheduler Worker Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-203 (TODO). Confirm prerequisites (internal: SCHED-WORKER-16-202 (Wave 2)) before starting and report status in module TASKS.md. -- Team TBD: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-305C/304C/309N/303C/306C are all DONE (latest 2025-10-22); remaining Wave 3 attention shifts to 10-307* helper consolidation and subsequent benchmarking tickets. Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-303B (Wave 2), SCANNER-ANALYZERS-LANG-10-304B (Wave 2), SCANNER-ANALYZERS-LANG-10-305B (Wave 2), SCANNER-ANALYZERS-LANG-10-306B (Wave 2), SCANNER-ANALYZERS-LANG-10-308N (Wave 2)) before scheduling new work and report status in module TASKS.md. +- Team DevEx/CLI: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/Cli/StellaOps.Cli/TASKS.md`. Focus on CLI-OFFLINE-13-006 (DONE 2025-10-21). Confirm prerequisites (internal: DEVOPS-OFFLINE-14-002 (Wave 2)) before starting and report status in module TASKS.md. +- Team Excititor Connectors – Stella: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md`. Focus on EXCITITOR-CONN-STELLA-07-001 (DONE 2025-10-21). Confirm prerequisites (internal: EXCITITOR-EXPORT-01-007 (Wave 2)) before starting and report status in module TASKS.md. +- Team Notify Engine Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-303 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-302 (Wave 2)) before starting and report status in module TASKS.md. +- Team Notify Worker Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/Notify/StellaOps.Notify.Worker/TASKS.md`. Focus on NOTIFY-WORKER-15-203 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-302 (Wave 2)) before starting and report status in module TASKS.md. +- Team Scheduler Worker Guild: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-203 (TODO). Confirm prerequisites (internal: SCHED-WORKER-16-202 (Wave 2)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 3 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-305C/304C/309N/303C/306C are all DONE (latest 2025-10-22); remaining Wave 3 attention shifts to 10-307* helper consolidation and subsequent benchmarking tickets. Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-303B (Wave 2), SCANNER-ANALYZERS-LANG-10-304B (Wave 2), SCANNER-ANALYZERS-LANG-10-305B (Wave 2), SCANNER-ANALYZERS-LANG-10-306B (Wave 2), SCANNER-ANALYZERS-LANG-10-308N (Wave 2)) before scheduling new work and report status in module TASKS.md. ### Wave 4 -- Team DevEx/CLI: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Cli/TASKS.md`. Focus on CLI-PLUGIN-13-007 (DONE 2025-10-22). Confirm prerequisites (internal: CLI-OFFLINE-13-006 (Wave 3), CLI-RUNTIME-13-005 (Wave 0)) before starting and report status in module TASKS.md. -- Team Excititor Connectors – Stella: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md`. Focus on EXCITITOR-CONN-STELLA-07-002 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-STELLA-07-001 (Wave 3)) before starting and report status in module TASKS.md. -- Team Notify Connectors Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Notify.Connectors.Email/TASKS.md`, `src/StellaOps.Notify.Connectors.Slack/TASKS.md`, `src/StellaOps.Notify.Connectors.Teams/TASKS.md`, `src/StellaOps.Notify.Connectors.Webhook/TASKS.md`. Focus on NOTIFY-CONN-SLACK-15-501 (TODO), NOTIFY-CONN-TEAMS-15-601 (TODO), NOTIFY-CONN-EMAIL-15-701 (TODO), NOTIFY-CONN-WEBHOOK-15-801 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-303 (Wave 3)) before starting and report status in module TASKS.md. -- Team Notify Engine Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-304 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-303 (Wave 3)) before starting and report status in module TASKS.md. -- Team Notify Worker Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Notify.Worker/TASKS.md`. Focus on NOTIFY-WORKER-15-204 (TODO). Confirm prerequisites (internal: NOTIFY-WORKER-15-203 (Wave 3)) before starting and report status in module TASKS.md. -- Team Scheduler Worker Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-204 (TODO). Confirm prerequisites (internal: SCHED-WORKER-16-203 (Wave 3)) before starting and report status in module TASKS.md. -- Team TBD: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-307D/G/P are DONE (latest 2025-10-23); remaining focus is SCANNER-ANALYZERS-LANG-10-307R (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-303C (Wave 3), SCANNER-ANALYZERS-LANG-10-304C (Wave 3), SCANNER-ANALYZERS-LANG-10-305C (Wave 3), SCANNER-ANALYZERS-LANG-10-306C (Wave 3)) before progressing and report status in module TASKS.md. +- Team DevEx/CLI: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/Cli/StellaOps.Cli/TASKS.md`. Focus on CLI-PLUGIN-13-007 (DONE 2025-10-22). Confirm prerequisites (internal: CLI-OFFLINE-13-006 (Wave 3), CLI-RUNTIME-13-005 (Wave 0)) before starting and report status in module TASKS.md. +- Team Excititor Connectors – Stella: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md`. Focus on EXCITITOR-CONN-STELLA-07-002 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-STELLA-07-001 (Wave 3)) before starting and report status in module TASKS.md. +- Team Notify Connectors Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md`, `src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md`, `src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md`, `src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md`. Focus on NOTIFY-CONN-SLACK-15-501 (TODO), NOTIFY-CONN-TEAMS-15-601 (TODO), NOTIFY-CONN-EMAIL-15-701 (TODO), NOTIFY-CONN-WEBHOOK-15-801 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-303 (Wave 3)) before starting and report status in module TASKS.md. +- Team Notify Engine Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md`. Focus on NOTIFY-ENGINE-15-304 (TODO). Confirm prerequisites (internal: NOTIFY-ENGINE-15-303 (Wave 3)) before starting and report status in module TASKS.md. +- Team Notify Worker Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/Notify/StellaOps.Notify.Worker/TASKS.md`. Focus on NOTIFY-WORKER-15-204 (TODO). Confirm prerequisites (internal: NOTIFY-WORKER-15-203 (Wave 3)) before starting and report status in module TASKS.md. +- Team Scheduler Worker Guild: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md`. Focus on SCHED-WORKER-16-204 (TODO). Confirm prerequisites (internal: SCHED-WORKER-16-203 (Wave 3)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 4 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-307D/G/P are DONE (latest 2025-10-23); remaining focus is SCANNER-ANALYZERS-LANG-10-307R (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-303C (Wave 3), SCANNER-ANALYZERS-LANG-10-304C (Wave 3), SCANNER-ANALYZERS-LANG-10-305C (Wave 3), SCANNER-ANALYZERS-LANG-10-306C (Wave 3)) before progressing and report status in module TASKS.md. ### Wave 5 - **Sprint 23-28** · StellaOps Console, Policy Studio, Graph Explorer - Team: Policy Registry Guild - - Path: `src/StellaOps.Policy.Registry/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Registry/TASKS.md` 1. [TODO] REGISTRY-API-27-001..010 — Deliver Registry service (OpenAPI, workspace storage, compile/sim integration, review workflow, publish/attest, promotion, telemetry, testing). Coordinate closely with Policy Engine, Scheduler, Authority, Console, CLI, Docs, and DevOps. - Team: Findings Ledger Guild - - Path: `src/StellaOps.Findings.Ledger/TASKS.md` + - Path: `src/Findings/StellaOps.Findings.Ledger/TASKS.md` 1. [TODO] LEDGER-29-001..009 — Stand up immutable ledger, projector, workflow handlers, hashing/Merkle anchoring, and deployment tooling powering Vuln Explorer. - Team: VEX Lens Guild - - Path: `src/StellaOps.VexLens/TASKS.md` + - Path: `src/VexLens/StellaOps.VexLens/TASKS.md` 1. [TODO] VEXLENS-30-001..011 — Build VEX normalization, mapping, trust weighting, consensus projection, APIs, simulation, telemetry, and deployment. - Team: Issuer Directory Guild - - Path: `src/StellaOps.IssuerDirectory/TASKS.md` + - Path: `src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md` 1. [TODO] ISSUER-30-001..006 — Provide issuer/key management, trust overrides, integration with VEX Lens, telemetry, and deployment guidance. - Team: Advisory AI Guild - - Path: `src/StellaOps.AdvisoryAI/TASKS.md` + - Path: `src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md` 1. [TODO] AIAI-31-001..009 — Implement retrievers, deterministics, guardrails, APIs, telemetry, and deployment for Advisory AI summaries/conflict explain/remediation. - Team: Graph Indexer Guild - - Path: `src/StellaOps.Graph.Indexer/TASKS.md` + - Path: `src/Graph/StellaOps.Graph.Indexer/TASKS.md` 1. [TODO] GRAPH-INDEX-28-001..010 — Build graph ingestion (SBOM, advisory, VEX, policy overlays), snapshots, clustering, incremental updates, and deployment artifacts. Maintain deterministic identity + tenant isolation. - Team: Graph API Guild - - Path: `src/StellaOps.Graph.Api/TASKS.md` + - Path: `src/Graph/StellaOps.Graph.Api/TASKS.md` 1. [TODO] GRAPH-API-28-001..011 — Ship streaming query/search/paths/diff/export endpoints with cost enforcement, overlays, RBAC, telemetry, and deployment docs. - Team: Vuln Explorer API Guild - - Path: `src/StellaOps.VulnExplorer.Api/TASKS.md` + - Path: `src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md` 1. [TODO] VULN-API-29-001..011 — Provide policy-aware list/detail/workflow/simulation/export APIs atop the ledger with deterministic outputs and auditable telemetry. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-CORE-23-001..CONSOLE-REL-23-303, CONSOLE-DOC-23-501/502, TELEMETRY-CONSOLE-23-001 — Bootstrap the Next.js workspace, build shell/navigation, deliver feature modules (Dashboard, SBOM, Advisories/VEX, Findings, Policies, Runs, Reports, Admin, Downloads), wire telemetry, QA (Playwright, Storybook a11y, Lighthouse), release artifacts, and support docs/parity automation. Sequence: finish core scaffolding (23-001..005) before picking up feature modules; hold Reports/Downloads until backend export + manifest tasks signal ready. 2. [TODO] CONSOLE-STUDIO-27-001..007, CONSOLE-GRAPH-28-001..008, TELEMETRY-CONSOLE-27-001 — Deliver Policy Studio editor experience and Graph Explorer WebGL module (semantic zoom, overlays, diff, exports, saved queries, accessibility, telemetry). 3. [TODO] CONSOLE-VULN-29-001..007 — Ship Vuln Explorer UI enhancements (list/detail/workflow/simulation/export) with telemetry and accessibility. 4. [TODO] CONSOLE-VEX-30-001..005 — Provide VEX Lens console experience with quorum/conflict visualization and telemetry. 5. [TODO] CONSOLE-AIAI-31-001..005 — Build Advisory AI side panel (summary/conflict/remediation) with copy-as-ticket, a11y, and telemetry integration. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-CONSOLE-23-001..005 — Stand up `/console/*` aggregates, SSE proxy, export orchestrator, global search, and downloads manifest endpoints. Coordinate closely with Policy, Scheduler, Concelier, Excititor, SBOM services to validate payloads. 2. [TODO] WEB-GRAPH-24-001..004 — Route `/graph/*` APIs to Graph service, enforce scopes, provide overlay/export proxies, and aggregate telemetry. 3. [TODO] WEB-VULN-29-001..004 — Provide Vuln Explorer routing, ledger proxying, simulation/export orchestration, and telemetry. 4. [TODO] WEB-AIAI-31-001..003 — Route Advisory AI endpoints, batch orchestration, and telemetry/audit pipelines. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-CONSOLE-23-001..003 — Register Console OIDC client, expose tenant/profile endpoints, refresh security docs. PKCE + short-lived tokens must land before Console auth wiring can start. 2. [TODO] AUTH-POLICY-27-001..003, AUTH-GRAPH-21-001..003 — Roll out Policy Studio scopes + signing enforcement and ensure Graph scopes/RBAC stay in sync. 3. [TODO] AUTH-VULN-29-001..003 — Deliver Vuln Explorer scopes, CSRF enforcement, attachment signing, and documentation. 4. [TODO] AUTH-AIAI-31-001..002 — Define Advisory AI scopes/consent controls and enforce anonymized logging/audit flows. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-CONSOLE-23-001/002, EXPORT-CONSOLE-23-001 — Optimize findings/explain APIs, expose simulation diff + approvals metadata, and deliver evidence bundle generator feeding Web gateway + Console Reports. 2. [TODO] POLICY-ENGINE-27-001..004, POLICY-ENGINE-30-001..003 — Provide Studio compile metadata, simulation enhancements, complexity limits, and graph overlay contracts/events. 3. [TODO] POLICY-ENGINE-29-001..004 — Supply batch evaluation/simulation for Vuln Explorer and consensus overlays with telemetry. 4. [TODO] POLICY-ENGINE-31-001..002 — Surface Advisory AI parameters and policy context endpoints consumed by the assistant. - Team: SBOM Service Guild - - Path: `src/StellaOps.SbomService/TASKS.md` + - Path: `src/SbomService/StellaOps.SbomService/TASKS.md` 1. [TODO] SBOM-CONSOLE-23-001/002 — Provide Console catalog + component lookup endpoints (filters, overlays, raw projections). Coordinate caching hints with Web + Console teams. 2. [TODO] SBOM-GRAPH-24-001..004 — Maintain graph node/edge collections, builders, diff events, and caches feeding Graph Explorer. 3. [TODO] SBOM-VULN-29-001/002 — Emit enriched inventory evidence (scope/runtime/path/safe versions) and resolver feeds for Vuln Explorer. 4. [TODO] SBOM-AIAI-31-001/002 — Deliver path/timeline APIs and telemetry for Advisory AI remediation hints. - Team: Concelier WebService Guild - - Path: `src/StellaOps.Concelier.WebService/TASKS.md` + - Path: `src/Concelier/StellaOps.Concelier.WebService/TASKS.md` 1. [TODO] CONCELIER-CONSOLE-23-001..003 — Deliver advisory aggregation views, delta metrics feed, and search helpers backing Dashboard/Search modules. 2. [TODO] CONCELIER-VULN-29-001..004 — Normalize advisory keys, expose raw evidence, publish safe fix hints, and instrument metrics for Vuln Explorer. 3. [TODO] CONCELIER-AIAI-31-001..003 — Provide paragraph anchors, structured fields, and telemetry required by Advisory AI. - Team: Excititor WebService Guild - - Path: `src/StellaOps.Excititor.WebService/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.WebService/TASKS.md` 1. [TODO] EXCITITOR-CONSOLE-23-001..003 — Provide VEX aggregation, override deltas, and search helpers for Console UX. 2. [TODO] EXCITITOR-GRAPH-24-101/102 — Supply VEX summaries for Graph Explorer overlays and inspectors. 3. [TODO] EXCITITOR-VULN-29-001..004 — Canonicalize VEX keys, surface evidence APIs, suppression metadata, and telemetry for Vuln Explorer. 4. [TODO] EXCITITOR-AIAI-31-001..003 — Serve VEX chunks/justifications/signature metadata and telemetry for Advisory AI. - Team: Scheduler WebService Guild - - Path: `src/StellaOps.Scheduler.WebService/TASKS.md` + - Path: `src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md` 1. [TODO] SCHED-CONSOLE-23-001 — Extend runs API with SSE progress stream, queue lag summaries, RBAC-gated actions. 2. [TODO] SCHED-CONSOLE-27-001/002, SCHED-WEB-21-001/002 — Surface policy batch sim orchestration and graph build/overlay monitoring endpoints. 3. [TODO] SCHED-VULN-29-001/002 — Provide resolver job APIs and lag metrics for Vulnerability Explorer recomputation. - Team: Scheduler Worker Guild - - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-WORKER-CONSOLE-23-201/202 — Publish run progress events and coordinate evidence bundle jobs consumed by Console + gateway. 2. [TODO] SCHED-WORKER-27-301..303, SCHED-WORKER-21-201..203 — Execute policy batch simulation sharding/reduction and graph build/overlay workers with telemetry + security controls. 3. [TODO] SCHED-WORKER-29-001..003 — Run vulnerability resolver/evaluation workers and monitoring to keep projections fresh. @@ -220,7 +220,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 5. [TODO] DOCS-VEX-30-001..009 — Publish VEX Lens documentation set (overview, algorithm, issuer directory, APIs, console, policy trust model, mapping, signatures, runbooks). 6. [TODO] DOCS-AIAI-31-001..009 — Publish Advisory AI documentation suite (overview, architecture, APIs, console, CLI, policy parameters, guardrails, remediation heuristics, ops runbook). - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-POLICY-27-001..005 — Implement Policy Studio CLI lifecycle (init→lint→simulate→submit→approve→publish→promote/rollback), enhance simulation reporting, and update documentation with CI-friendly outputs. 2. [TODO] CLI-GRAPH-28-001..003 — Implement Graph Explorer CLI commands, saved query management, and updated docs/examples. 3. [TODO] CLI-VULN-29-001..006 — Deliver Vuln Explorer CLI commands (list/show/workflow/simulate/export) and documentation updates. @@ -228,45 +228,45 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 5. [TODO] CLI-AIAI-31-001..004 — Implement Advisory AI CLI commands (`stella advise *`) with docs and tests. 2. [TODO] CLI-GRAPH-28-001..003 — Implement Graph Explorer CLI commands, saved query management, and updated docs/examples. 3. [TODO] CLI-VULN-29-001..006 — Deliver Vuln Explorer CLI commands (list/show/workflow/simulate/export) and documentation updates. -- Team Excititor Connectors – Stella: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md`. Focus on EXCITITOR-CONN-STELLA-07-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-STELLA-07-002 (Wave 4)) before starting and report status in module TASKS.md. -- Team Notify Connectors Guild: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/StellaOps.Notify.Connectors.Email/TASKS.md`, `src/StellaOps.Notify.Connectors.Slack/TASKS.md`, `src/StellaOps.Notify.Connectors.Teams/TASKS.md`, `src/StellaOps.Notify.Connectors.Webhook/TASKS.md`. Focus on NOTIFY-CONN-SLACK-15-502 (DONE), NOTIFY-CONN-TEAMS-15-602 (DONE), NOTIFY-CONN-EMAIL-15-702 (BLOCKED 2025-10-20), NOTIFY-CONN-WEBHOOK-15-802 (BLOCKED 2025-10-20). Confirm prerequisites (internal: NOTIFY-CONN-EMAIL-15-701 (Wave 4), NOTIFY-CONN-SLACK-15-501 (Wave 4), NOTIFY-CONN-TEAMS-15-601 (Wave 4), NOTIFY-CONN-WEBHOOK-15-801 (Wave 4)) before starting and report status in module TASKS.md. -- Team TBD: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-308D/G/P completed (2025-10-23/2025-10-22/2025-10-23); pending items are SCANNER-ANALYZERS-LANG-10-308R (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-307D (Wave 4), SCANNER-ANALYZERS-LANG-10-307G (Wave 4), SCANNER-ANALYZERS-LANG-10-307P (Wave 4), SCANNER-ANALYZERS-LANG-10-307R (Wave 4)) before starting and report status in module TASKS.md. +- Team Excititor Connectors – Stella: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md`. Focus on EXCITITOR-CONN-STELLA-07-003 (TODO). Confirm prerequisites (internal: EXCITITOR-CONN-STELLA-07-002 (Wave 4)) before starting and report status in module TASKS.md. +- Team Notify Connectors Guild: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md`, `src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md`, `src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md`, `src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md`. Focus on NOTIFY-CONN-SLACK-15-502 (DONE), NOTIFY-CONN-TEAMS-15-602 (DONE), NOTIFY-CONN-EMAIL-15-702 (BLOCKED 2025-10-20), NOTIFY-CONN-WEBHOOK-15-802 (BLOCKED 2025-10-20). Confirm prerequisites (internal: NOTIFY-CONN-EMAIL-15-701 (Wave 4), NOTIFY-CONN-SLACK-15-501 (Wave 4), NOTIFY-CONN-TEAMS-15-601 (Wave 4), NOTIFY-CONN-WEBHOOK-15-801 (Wave 4)) before starting and report status in module TASKS.md. +- Team TBD: read EXECPLAN.md Wave 5 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-308D/G/P completed (2025-10-23/2025-10-22/2025-10-23); pending items are SCANNER-ANALYZERS-LANG-10-308R (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-307D (Wave 4), SCANNER-ANALYZERS-LANG-10-307G (Wave 4), SCANNER-ANALYZERS-LANG-10-307P (Wave 4), SCANNER-ANALYZERS-LANG-10-307R (Wave 4)) before starting and report status in module TASKS.md. ### Wave 6 -- Team Notify Connectors Guild: read EXECPLAN.md Wave 6 and SPRINTS.md rows for `src/StellaOps.Notify.Connectors.Email/TASKS.md`, `src/StellaOps.Notify.Connectors.Slack/TASKS.md`, `src/StellaOps.Notify.Connectors.Teams/TASKS.md`, `src/StellaOps.Notify.Connectors.Webhook/TASKS.md`. Focus on NOTIFY-CONN-SLACK-15-503 (DONE), NOTIFY-CONN-TEAMS-15-603 (DONE), NOTIFY-CONN-EMAIL-15-703 (DONE), NOTIFY-CONN-WEBHOOK-15-803 (DONE). Confirm packaging outputs remain deterministic while upstream implementation tasks (15-702/802) stay blocked. -- Team TBD: read EXECPLAN.md Wave 6 and SPRINTS.md rows for `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-309D/G/P completed (2025-10-23/2025-10-22/2025-10-23); remaining item is SCANNER-ANALYZERS-LANG-10-309R (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-308D (Wave 5), SCANNER-ANALYZERS-LANG-10-308G (Wave 5), SCANNER-ANALYZERS-LANG-10-308P (Wave 5), SCANNER-ANALYZERS-LANG-10-308R (Wave 5)) before starting and report status in module TASKS.md. +- Team Notify Connectors Guild: read EXECPLAN.md Wave 6 and SPRINTS.md rows for `src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md`, `src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md`, `src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md`, `src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md`. Focus on NOTIFY-CONN-SLACK-15-503 (DONE), NOTIFY-CONN-TEAMS-15-603 (DONE), NOTIFY-CONN-EMAIL-15-703 (DONE), NOTIFY-CONN-WEBHOOK-15-803 (DONE). Confirm packaging outputs remain deterministic while upstream implementation tasks (15-702/802) stay blocked. +- Team TBD: read EXECPLAN.md Wave 6 and SPRINTS.md rows for `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md`, `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md`. SCANNER-ANALYZERS-LANG-10-309D/G/P completed (2025-10-23/2025-10-22/2025-10-23); remaining item is SCANNER-ANALYZERS-LANG-10-309R (TODO). Confirm prerequisites (internal: SCANNER-ANALYZERS-LANG-10-308D (Wave 5), SCANNER-ANALYZERS-LANG-10-308G (Wave 5), SCANNER-ANALYZERS-LANG-10-308P (Wave 5), SCANNER-ANALYZERS-LANG-10-308R (Wave 5)) before starting and report status in module TASKS.md. ### Wave 7 -- Team Team Core Engine & Storage Analytics: read EXECPLAN.md Wave 7 and SPRINTS.md rows for `src/StellaOps.Concelier.Core/TASKS.md`. Focus on FEEDCORE-ENGINE-07-001 (DONE 2025-10-19). Confirm prerequisites (internal: FEEDSTORAGE-DATA-07-001 (Wave 10)) before starting and report status in module TASKS.md. +- Team Team Core Engine & Storage Analytics: read EXECPLAN.md Wave 7 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md`. Focus on FEEDCORE-ENGINE-07-001 (DONE 2025-10-19). Confirm prerequisites (internal: FEEDSTORAGE-DATA-07-001 (Wave 10)) before starting and report status in module TASKS.md. ### Wave 8 -- Team Team Core Engine & Data Science: read EXECPLAN.md Wave 8 and SPRINTS.md rows for `src/StellaOps.Concelier.Core/TASKS.md`. Focus on FEEDCORE-ENGINE-07-002 (DONE 2025-10-21). Confirm prerequisites (internal: FEEDCORE-ENGINE-07-001 (Wave 7)) before starting and report status in module TASKS.md. +- Team Team Core Engine & Data Science: read EXECPLAN.md Wave 8 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md`. Focus on FEEDCORE-ENGINE-07-002 (DONE 2025-10-21). Confirm prerequisites (internal: FEEDCORE-ENGINE-07-001 (Wave 7)) before starting and report status in module TASKS.md. ### Wave 9 -- Team Team Core Engine & Storage Analytics: read EXECPLAN.md Wave 9 and SPRINTS.md rows for `src/StellaOps.Concelier.Core/TASKS.md`. FEEDCORE-ENGINE-07-003 marked DONE (2025-10-21); share ledger heuristics with Policy when integrating confidence decay. +- Team Team Core Engine & Storage Analytics: read EXECPLAN.md Wave 9 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md`. FEEDCORE-ENGINE-07-003 marked DONE (2025-10-21); share ledger heuristics with Policy when integrating confidence decay. ### Wave 10 -- Team Team Normalization & Storage Backbone: read EXECPLAN.md Wave 10 and SPRINTS.md rows for `src/StellaOps.Concelier.Storage.Mongo/TASKS.md`. Focus on FEEDSTORAGE-DATA-07-001 (DONE 2025-10-19). Confirm prerequisites (internal: FEEDMERGE-ENGINE-07-001 (Wave 11)) before starting and report status in module TASKS.md. +- Team Team Normalization & Storage Backbone: read EXECPLAN.md Wave 10 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md`. Focus on FEEDSTORAGE-DATA-07-001 (DONE 2025-10-19). Confirm prerequisites (internal: FEEDMERGE-ENGINE-07-001 (Wave 11)) before starting and report status in module TASKS.md. ### Wave 11 — 48 task(s) ready after Wave 10 - **Sprint 25** · Exceptions v1 - Team: Policy Guild - - Paths: `src/StellaOps.Policy/TASKS.md`, `src/StellaOps.Policy.Engine/TASKS.md` + - Paths: `src/Policy/__Libraries/StellaOps.Policy/TASKS.md`, `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-EXC-25-001, POLICY-ENGINE-70-001..005 — SPL updates, evaluation layer, storage, cache, observability, worker hooks. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-EXC-25-001..003 — Exceptions API workflow, policy integration, events/notifications. - Team: UI Guild - - Path: `src/StellaOps.UI/TASKS.md` + - Path: `src/UI/StellaOps.UI/TASKS.md` 1. [TODO] UI-EXC-25-001..005 — Exception Center, creation wizard, inline flows, badges, accessibility. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-EXC-25-001/002 — CLI workflow commands and simulation overrides. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-EXC-25-001/002 — Exception scopes, routing matrix, docs. - Team: Scheduler Worker Guild - - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-WORKER-25-101/102 — Exception lifecycle + expiring notification jobs. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -275,30 +275,30 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster - Path: `ops/devops/TASKS.md` 1. [TODO] (future) exception monitoring/notifications integration if needed (track under DEVOPS-LNM-22-003 extension). -- Team BE-Merge: read EXECPLAN.md Wave 11 and SPRINTS.md rows for `src/StellaOps.Concelier.Merge/TASKS.md`. FEEDMERGE-ENGINE-07-001 marked DONE (2025-10-20); share conflict explainer rollout notes with Storage before Wave 10 resumes. +- Team BE-Merge: read EXECPLAN.md Wave 11 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md`. FEEDMERGE-ENGINE-07-001 marked DONE (2025-10-20); share conflict explainer rollout notes with Storage before Wave 10 resumes. ### Wave 12 — 40 task(s) ready after Wave 11 - **Sprint 26** · Reachability v1 - Team: Signals Guild - - Path: `src/StellaOps.Signals/TASKS.md` + - Path: `src/Signals/StellaOps.Signals/TASKS.md` 1. [TODO] SIGNALS-24-001..005 — Signals service API, parsers, runtime ingest, scoring, caching/events. - Team: Policy Guild - - Paths: `src/StellaOps.Policy/TASKS.md`, `src/StellaOps.Policy.Engine/TASKS.md` + - Paths: `src/Policy/__Libraries/StellaOps.Policy/TASKS.md`, `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-SPL-24-001, POLICY-ENGINE-80-001..004 — SPL updates, evaluation integration, cache optimization, metrics. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-SIG-26-001..003 — Signals endpoints, reachability joins, simulation overrides. - Team: UI Guild - - Path: `src/StellaOps.UI/TASKS.md` + - Path: `src/UI/StellaOps.UI/TASKS.md` 1. [TODO] UI-SIG-26-001..004 — Reachability columns/overlays, explain drawer, center. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-SIG-26-001/002 — CLI commands for reachability upload/list/simulate. - Team: Authority Core - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-SIG-26-001 — Signals scopes/roles with AOC requirements. - Team: Scheduler Worker Guild - - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-WORKER-26-201/202 — Reachability joiner and staleness monitor jobs. - Team: DevOps Guild - Path: `ops/devops/TASKS.md` @@ -307,56 +307,56 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster - Path: `docs/TASKS.md` 1. [TODO] DOCS-SIG-26-001..008 — Reachability concepts, formats, runtime, policy weighting, UI, CLI, API, migration docs. - Team: Concelier/Excititor Guilds - - Paths: `src/StellaOps.Concelier.Core/TASKS.md`, `src/StellaOps.Excititor.Core/TASKS.md` + - Paths: `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md`, `src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md` 1. [TODO] CONCELIER-SIG-26-001, EXCITITOR-SIG-26-001 — Provide symbol/exploitability metadata to Signals. - Team: Bench Guild - - Path: `src/StellaOps.Bench/TASKS.md` + - Path: `src/Bench/StellaOps.Bench/TASKS.md` 1. [TODO] BENCH-SIG-26-001/002 — Performance benchmarks for Signals and policy evaluation overhead. -- Team Concelier Export Guild: read EXECPLAN.md Wave 12 and SPRINTS.md rows for `src/StellaOps.Concelier.Exporter.Json/TASKS.md`. Focus on CONCELIER-EXPORT-08-201 (TODO). Confirm prerequisites (internal: FEEDCORE-ENGINE-07-001 (Wave 7)) before starting and report status in module TASKS.md. +- Team Concelier Export Guild: read EXECPLAN.md Wave 12 and SPRINTS.md rows for `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.Json/TASKS.md`. Focus on CONCELIER-EXPORT-08-201 (TODO). Confirm prerequisites (internal: FEEDCORE-ENGINE-07-001 (Wave 7)) before starting and report status in module TASKS.md. ### Wave 13 -- Team Concelier Export Guild: read EXECPLAN.md Wave 13 and SPRINTS.md rows for `src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md`. Focus on CONCELIER-EXPORT-08-202 (DONE 2025-10-19). Confirm prerequisites (internal: CONCELIER-EXPORT-08-201 (Wave 12)) before starting and report status in module TASKS.md. +- Team Concelier Export Guild: read EXECPLAN.md Wave 13 and SPRINTS.md rows for `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md`. Focus on CONCELIER-EXPORT-08-202 (DONE 2025-10-19). Confirm prerequisites (internal: CONCELIER-EXPORT-08-201 (Wave 12)) before starting and report status in module TASKS.md. ### Wave 14 -- Team Concelier WebService Guild: read EXECPLAN.md Wave 14 and SPRINTS.md rows for `src/StellaOps.Concelier.WebService/TASKS.md`. CONCELIER-WEB-08-201 closed (2025-10-20); coordinate with DevOps for mirror smoke before promoting to stable. +- Team Concelier WebService Guild: read EXECPLAN.md Wave 14 and SPRINTS.md rows for `src/Concelier/StellaOps.Concelier.WebService/TASKS.md`. CONCELIER-WEB-08-201 closed (2025-10-20); coordinate with DevOps for mirror smoke before promoting to stable. ### Wave 15 -- Team BE-Conn-Stella: read EXECPLAN.md Wave 15 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md`. Focus on FEEDCONN-STELLA-08-001 (DONE 2025-10-20). Confirm prerequisites (internal: CONCELIER-EXPORT-08-201 (Wave 12)) before starting and report status in module TASKS.md. +- Team BE-Conn-Stella: read EXECPLAN.md Wave 15 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md`. Focus on FEEDCONN-STELLA-08-001 (DONE 2025-10-20). Confirm prerequisites (internal: CONCELIER-EXPORT-08-201 (Wave 12)) before starting and report status in module TASKS.md. ### Wave 16 -- Team BE-Conn-Stella: read EXECPLAN.md Wave 16 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md`. FEEDCONN-STELLA-08-002 completed (2025-10-20) with canonical DTO mapper + provenance fixtures. +- Team BE-Conn-Stella: read EXECPLAN.md Wave 16 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md`. FEEDCONN-STELLA-08-002 completed (2025-10-20) with canonical DTO mapper + provenance fixtures. ### Wave 17 -- Team BE-Conn-Stella: read EXECPLAN.md Wave 17 and SPRINTS.md rows for `src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md`. Focus on FEEDCONN-STELLA-08-003 (TODO). Confirm prerequisites (internal: FEEDCONN-STELLA-08-002 (Wave 16)) before starting and report status in module TASKS.md. +- Team BE-Conn-Stella: read EXECPLAN.md Wave 17 and SPRINTS.md rows for `src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md`. Focus on FEEDCONN-STELLA-08-003 (TODO). Confirm prerequisites (internal: FEEDCONN-STELLA-08-002 (Wave 16)) before starting and report status in module TASKS.md. ## Wave 0 — 98 task(s) ready now - **Sprint 1** · Backlog - Team: UX Specialist, Angular Eng - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` • Prereqs: WEB1.TRIVY-SETTINGS • Current: DONE (2025-10-21) – ChromeHeadless launcher + README updates merged; dependency hardening completed via WEB1.DEPS-13-001. • Prereqs: WEB1.TRIVY-SETTINGS-TESTS • Current: DONE (2025-10-21) – Lockfile generated via `npm ci`, Chromium auto-detection/verification scripts added, and deterministic install guide published for offline runners. - **Sprint 1** · Developer Tooling - Team: DevEx/CLI - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] EXCITITOR-CLI-01-002 — EXCITITOR-CLI-01-002 – Export download & attestation UX • Prereqs: EXCITITOR-CLI-01-001 (external/completed), EXCITITOR-EXPORT-01-001 (external/completed) • Current: TODO – Display export metadata (sha256, size, Rekor link), support optional artifact download path, and handle cache hits gracefully. - Team: Docs/CLI - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] EXCITITOR-CLI-01-003 — EXCITITOR-CLI-01-003 – CLI docs & examples for Excititor • Prereqs: EXCITITOR-CLI-01-001 (external/completed) • Current: TODO – Update docs/09_API_CLI_REFERENCE.md and quickstart snippets to cover Excititor verbs, offline guidance, and attestation verification workflow. - **Sprint 1** · Stabilize In-Progress Foundations - Team: Team Connector Resumption – CERT/RedHat - - Path: `src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md` - 1. [DOING] FEEDCONN-REDHAT-02-001 — Fixture validation sweep — Instructions to work: — Regenerating RHSA fixtures awaits remaining range provenance patches; review snapshot diffs and update docs once upstream helpers land. Conflict resolver deltas logged in src/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md for Sprint 3 consumers. + - Path: `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md` + 1. [DOING] FEEDCONN-REDHAT-02-001 — Fixture validation sweep — Instructions to work: — Regenerating RHSA fixtures awaits remaining range provenance patches; review snapshot diffs and update docs once upstream helpers land. Conflict resolver deltas logged in src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md for Sprint 3 consumers. • Prereqs: — • Current: DOING (2025-10-10) - Team: Team WebService & Authority - - Path: `src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md` 1. [DOING] SEC2.PLG — Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`; Serilog enrichment complete, storage durability tests in flight. • Prereqs: — • Current: DOING (2025-10-14) @@ -375,7 +375,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 6. [REVIEW] PLG7.RFC — Socialize LDAP plugin RFC and capture guild feedback; awaiting final review sign-off and follow-up issue tracking. • Prereqs: — • Current: REVIEW (2025-10-13) - - Path: `src/StellaOps.Concelier.WebService/TASKS.md` + - Path: `src/Concelier/StellaOps.Concelier.WebService/TASKS.md` 1. [DOING] FEEDWEB-DOCS-01-001 — Document authority toggle & scope requirements — Quickstart updates are staged; awaiting Docs guild review before publishing operator guide refresh. • Prereqs: — • Current: DOING (2025-10-10) @@ -389,69 +389,69 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: — • Current: REVIEW - Team: Team Merge & QA Enforcement - - Path: `src/StellaOps.Concelier.Merge/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md` 1. [DOING] FEEDMERGE-COORD-02-900 — Range primitives rollout coordination — Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical range primitives with provenance tags; fixtures tracked in `RANGE_PRIMITIVES_COORDINATION.md`. • Prereqs: — • Current: DOING (2025-10-20) – Coordination docs refreshed with connector due dates (Cccs/Cisco 2025-10-21, CertBund 2025-10-22, ICS-CISA 2025-10-23, KISA 2025-10-24); escalation plan defined if deadlines slip. - **Sprint 3** · Backlog - Team: Tools Guild, BE-Conn-MSRC - - Path: `src/StellaOps.Concelier.Connector.Common/TASKS.md` + - Path: `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Common/TASKS.md` 1. [**TODO] FEEDCONN-SHARED-STATE-003 — FEEDCONN-SHARED-STATE-003 Source state seeding helper • Prereqs: — • Current: **TODO (2025-10-15)** – Provide a reusable CLI/utility to seed `pendingDocuments`/`pendingMappings` for connectors (MSRC backfills require scripted CVRF + detail injection). Coordinate with MSRC team for expected JSON schema and handoff once prototype lands. - **Sprint 5** · Excititor Core Foundations - Team: Team Excititor Attestation - - Path: `src/StellaOps.Excititor.Attestation/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Attestation/TASKS.md` 1. [TODO] EXCITITOR-ATTEST-01-003 — EXCITITOR-ATTEST-01-003 – Verification suite & observability • Prereqs: EXCITITOR-ATTEST-01-002 (external/completed) • Current: TODO – Add verification helpers for Worker/WebService, metrics/logging hooks, and negative-path regression tests. - Team: Team Excititor WebService - - Path: `src/StellaOps.Excititor.WebService/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.WebService/TASKS.md` 2. [TODO] EXCITITOR-WEB-01-003 — EXCITITOR-WEB-01-003 – Export & verify endpoints • Prereqs: EXCITITOR-WEB-01-001 (external/completed), EXCITITOR-EXPORT-01-001 (external/completed), EXCITITOR-ATTEST-01-001 (external/completed) • Current: TODO – Add `/excititor/export`, `/excititor/export/{id}`, `/excititor/export/{id}/download`, `/excititor/verify`, returning artifact + attestation metadata with cache awareness. - **Sprint 6** · Excititor Ingest & Formats - Team: Team Excititor Connectors – Cisco - - Path: `src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md` 1. [TODO] EXCITITOR-CONN-CISCO-01-003 — EXCITITOR-CONN-CISCO-01-003 – Provider trust metadata • Prereqs: EXCITITOR-CONN-CISCO-01-002 (external/completed), EXCITITOR-POLICY-01-001 (external/completed) • Current: TODO – Emit cosign/PGP trust metadata and advisory provenance hints for policy weighting. - Team: Team Excititor Connectors – MSRC - - Path: `src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md` 1. [TODO] EXCITITOR-CONN-MS-01-002 — EXCITITOR-CONN-MS-01-002 – CSAF download pipeline • Prereqs: EXCITITOR-CONN-MS-01-001 (external/completed), EXCITITOR-STORAGE-01-003 (external/completed) • Current: TODO – Fetch CSAF packages with retry/backoff, checksum verification, and raw document persistence plus quarantine for schema failures. - Team: Team Excititor Connectors – Oracle - - Path: `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md` 1. [DOING] EXCITITOR-CONN-ORACLE-01-001 — EXCITITOR-CONN-ORACLE-01-001 – Oracle CSAF catalogue discovery • Prereqs: EXCITITOR-CONN-ABS-01-001 (external/completed) • Current: DOING (2025-10-17) – Implement catalogue discovery, CPU calendar awareness, and offline snapshot import for Oracle CSAF feeds. - Team: Team Excititor Connectors – SUSE - - Path: `src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md` 1. [TODO] EXCITITOR-CONN-SUSE-01-002 — EXCITITOR-CONN-SUSE-01-002 – Checkpointed event ingestion • Prereqs: EXCITITOR-CONN-SUSE-01-001 (external/completed), EXCITITOR-STORAGE-01-003 (external/completed) • Current: TODO – Process hub events with resume checkpoints, deduplication, and quarantine path for malformed payloads. - Team: Team Excititor Connectors – Ubuntu - - Path: `src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md` 1. [TODO] EXCITITOR-CONN-UBUNTU-01-002 — EXCITITOR-CONN-UBUNTU-01-002 – Incremental fetch & deduplication • Prereqs: EXCITITOR-CONN-UBUNTU-01-001 (external/completed), EXCITITOR-STORAGE-01-003 (external/completed) • Current: TODO – Fetch CSAF bundles with ETag handling, checksum validation, deduplication, and raw persistence. - Team: Team Excititor Formats - - Path: `src/StellaOps.Excititor.Formats.CSAF/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/TASKS.md` 1. [TODO] EXCITITOR-FMT-CSAF-01-002 — EXCITITOR-FMT-CSAF-01-002 – Status/justification mapping • Prereqs: EXCITITOR-FMT-CSAF-01-001 (external/completed), EXCITITOR-POLICY-01-001 (external/completed) • Current: TODO – Normalize CSAF `product_status` + `justification` values into policy-aware enums with audit diagnostics for unsupported codes. 2. [TODO] EXCITITOR-FMT-CSAF-01-003 — EXCITITOR-FMT-CSAF-01-003 – CSAF export adapter • Prereqs: EXCITITOR-EXPORT-01-001 (external/completed), EXCITITOR-FMT-CSAF-01-001 (external/completed) • Current: TODO – Provide CSAF export writer producing deterministic documents (per vuln/product) and manifest metadata for attestation. - - Path: `src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/TASKS.md` 1. [TODO] EXCITITOR-FMT-CYCLONE-01-002 — EXCITITOR-FMT-CYCLONE-01-002 – Component reference reconciliation • Prereqs: EXCITITOR-FMT-CYCLONE-01-001 (external/completed) • Current: TODO – Implement helpers to reconcile component/service references against policy expectations and emit diagnostics for missing SBOM links. 2. [TODO] EXCITITOR-FMT-CYCLONE-01-003 — EXCITITOR-FMT-CYCLONE-01-003 – CycloneDX export serializer • Prereqs: EXCITITOR-EXPORT-01-001 (external/completed), EXCITITOR-FMT-CYCLONE-01-001 (external/completed) • Current: TODO – Provide exporters producing CycloneDX VEX output with canonical ordering and hash-stable manifests. - - Path: `src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/TASKS.md` 1. [TODO] EXCITITOR-FMT-OPENVEX-01-002 — EXCITITOR-FMT-OPENVEX-01-002 – Statement merge utilities • Prereqs: EXCITITOR-FMT-OPENVEX-01-001 (external/completed) • Current: TODO – Add reducers merging multiple OpenVEX statements, resolving conflicts deterministically, and emitting policy diagnostics. @@ -461,7 +461,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster - **Sprint 7** · Contextual Truth Foundations - Team: Team Excititor Export - - Path: `src/StellaOps.Excititor.Export/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md` • Prereqs: EXCITITOR-EXPORT-01-004 (external/completed), EXCITITOR-CORE-02-001 (external/completed) • Current: TODO – Emit consensus+score envelopes in export manifests, include policy/scoring digests, and update offline bundle/ORAS layouts to carry signed VEX responses. @@ -474,12 +474,12 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: TODO - **Sprint 10** · Backlog - Team: TBD - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-302B (external/completed) • Current: DONE — Telemetry counter wired, lifecycle script evidence emitted; see Node analyzer fixtures. - **Sprint 10** · Scanner Analyzers & SBOM - Team: Diff Guild - - Path: `src/StellaOps.Scanner.Diff/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Diff/TASKS.md` • Prereqs: — • Current: DONE — Diff engine produces deterministic add/remove/version deltas; regression suite covers warm/cold path parity. • Prereqs: — @@ -487,7 +487,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: — • Current: DONE — JSON serializer emits stable ordering; golden outputs locked in tests. - Team: Emit Guild - - Path: `src/StellaOps.Scanner.Emit/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md` • Prereqs: — • Current: DONE — Inventory builder validated against CycloneDX schema; deterministic fixtures added. • Prereqs: — @@ -501,7 +501,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: — • Current: DONE — EntryTrace usage bits round-trip in BOM Index; regression harness verified. - Team: EntryTrace Guild - - Path: `src/StellaOps.Scanner.EntryTrace/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md` • Prereqs: — • Current: DONE — Parser emits stable AST; determinism tests captured. • Prereqs: — @@ -517,10 +517,10 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: — • Current: DONE — Plug-in manifests under `plugins/scanner/entrytrace`; restart-only guard documented. - Team: Language Analyzer Guild - - Path: `src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md` • Prereqs: — • Current: DONE — Implementation plan captured per language with progress notes through 2025-10-22. - - Path: `src/StellaOps.Scanner.Analyzers.Lang/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md` • Prereqs: — • Current: DONE — Java analyzer shipped with deterministic fixtures. • Prereqs: — @@ -529,13 +529,13 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: DONE — Determinism harness + fixtures checked in; CI guard active. - **Sprint 13** · UX & CLI Experience - Team: DevEx/CLI - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-RUNTIME-13-005 — Add runtime policy test verbs that consume `/policy/runtime` and display verdicts. • Prereqs: — • Current: TODO - **Sprint 15** · Notify Foundations - Team: Notify Models Guild - - Path: `src/StellaOps.Notify.Models/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Models/TASKS.md` 1. [TODO] NOTIFY-MODELS-15-101 — Define core Notify DTOs, validation helpers, canonical serialization. • Prereqs: — • Current: TODO @@ -546,7 +546,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: — • Current: TODO - Team: Notify Storage Guild - - Path: `src/StellaOps.Notify.Storage.Mongo/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/TASKS.md` 1. [TODO] NOTIFY-STORAGE-15-201 — Mongo schemas/indexes for rules, channels, deliveries, digests, locks, audit. • Prereqs: — • Current: TODO @@ -557,7 +557,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: — • Current: TODO - Team: Notify WebService Guild - - Path: `src/StellaOps.Notify.WebService/TASKS.md` + - Path: `src/Notify/StellaOps.Notify.WebService/TASKS.md` 1. [TODO] NOTIFY-WEB-15-101 — Minimal API host with Authority enforcement and plug-in loading. • Prereqs: — • Current: TODO @@ -565,18 +565,18 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: — • Current: TODO - Team: Scanner WebService Guild - - Path: `src/StellaOps.Scanner.WebService/TASKS.md` + - Path: `src/Scanner/StellaOps.Scanner.WebService/TASKS.md` 2. [BLOCKED] SCANNER-EVENTS-16-301 — Redis publisher integration tests once Notify queue adapter ships. • Current: BLOCKED – waiting on Notify queue abstraction and Redis adapter deliverables for end-to-end validation. - **Sprint 16** · Scheduler Intelligence - Team: Scheduler Storage Guild - - Path: `src/StellaOps.Scheduler.Storage.Mongo/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/TASKS.md` 1. [TODO] SCHED-STORAGE-16-201 — Create Mongo collections (schedules, runs, impact_cursors, locks, audit) with indexes/migrations per architecture. • Prereqs: SCHED-MODELS-16-101 (external/completed) • Current: TODO - Team: Scheduler WebService Guild - - Path: `src/StellaOps.Scheduler.WebService/TASKS.md` + - Path: `src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md` 1. [TODO] SCHED-WEB-16-101 — Bootstrap Minimal API host with Authority OpTok + DPoP, health endpoints, plug-in discovery per architecture §§1–2. • Prereqs: SCHED-MODELS-16-101 (external/completed) • Current: TODO @@ -601,59 +601,59 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster ## Wave 1 — 45 task(s) ready after Wave 0 - **Sprint 6** · Excititor Ingest & Formats - Team: Team Excititor Connectors – MSRC - - Path: `src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md` 1. [TODO] EXCITITOR-CONN-MS-01-003 — EXCITITOR-CONN-MS-01-003 – Trust metadata & provenance hints • Prereqs: EXCITITOR-CONN-MS-01-002 (Wave 0), EXCITITOR-POLICY-01-001 (external/completed) • Current: TODO – Emit cosign/AAD issuer metadata, attach provenance details, and document policy integration. - Team: Team Excititor Connectors – Oracle - - Path: `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md` 1. [TODO] EXCITITOR-CONN-ORACLE-01-002 — EXCITITOR-CONN-ORACLE-01-002 – CSAF download & dedupe pipeline • Prereqs: EXCITITOR-CONN-ORACLE-01-001 (Wave 0), EXCITITOR-STORAGE-01-003 (external/completed) • Current: TODO – Fetch CSAF documents with retry/backoff, checksum validation, revision deduplication, and raw persistence. - Team: Team Excititor Connectors – SUSE - - Path: `src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md` 1. [TODO] EXCITITOR-CONN-SUSE-01-003 — EXCITITOR-CONN-SUSE-01-003 – Trust metadata & policy hints • Prereqs: EXCITITOR-CONN-SUSE-01-002 (Wave 0), EXCITITOR-POLICY-01-001 (external/completed) • Current: TODO – Emit provider trust configuration (signers, weight overrides) and attach provenance hints for consensus engine. - Team: Team Excititor Connectors – Ubuntu - - Path: `src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md` 1. [TODO] EXCITITOR-CONN-UBUNTU-01-003 — EXCITITOR-CONN-UBUNTU-01-003 – Trust metadata & provenance • Prereqs: EXCITITOR-CONN-UBUNTU-01-002 (Wave 0), EXCITITOR-POLICY-01-001 (external/completed) • Current: TODO – Emit Ubuntu signing metadata (GPG fingerprints) plus provenance hints for policy weighting and diagnostics. - Team: Team Excititor Worker - - Path: `src/StellaOps.Excititor.Worker/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.Worker/TASKS.md` 1. [TODO] EXCITITOR-WORKER-01-003 — EXCITITOR-WORKER-01-003 – Verification & cache GC loops • Prereqs: EXCITITOR-WORKER-01-001 (external/completed), EXCITITOR-ATTEST-01-003 (Wave 0), EXCITITOR-EXPORT-01-002 (external/completed) • Current: TODO – Add scheduled attestation re-verification and cache pruning routines, surfacing metrics for export reuse ratios. - **Sprint 7** · Contextual Truth Foundations - Team: Team Excititor Export - - Path: `src/StellaOps.Excititor.Export/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md` • Prereqs: EXCITITOR-EXPORT-01-005 (Wave 0), POLICY-CORE-09-005 (Wave 0) • Current: TODO – Attach `quietedBy` statement IDs, signers, and justification codes to exports/offline bundles, mirror metadata into attested manifest, and add regression fixtures. - **Sprint 10** · Backlog - Team: TBD - - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) • Current: DONE — RID-aware deps/runtimeconfig parser emitting deterministic NuGet components with tests landed. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) • Current: DONE – Varint build-info decoder implemented with fixtures and determinism harness coverage. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-302C (Wave 0) • Current: DONE — Node analyzer now reuses shared metadata/evidence helpers. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) • Current: DONE — Python analyzer ingests METADATA/WHEEL/entry_points with deterministic ordering and UTF-8 normalization. Fixtures updated (`simple-venv`). - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) • Current: DONE — Cargo metadata walker emits `pkg:cargo` components with provenance and deterministic fixtures. - **Sprint 10** · Scanner Analyzers & SBOM - Team: Emit Guild - - Path: `src/StellaOps.Scanner.Emit/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md` • Prereqs: SCANNER-EMIT-10-604 (Wave 0), POLICY-CORE-09-005 (Wave 0) • Current: DONE — SBOM/attestation fixtures include scoring metadata and serialize deterministically. - Team: Language Analyzer Guild - - Path: `src/StellaOps.Scanner.Analyzers.Lang/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-301 (Wave 0) • Current: DONE — Manifest published under `plugins/scanner/analyzers/lang/`, Worker loader wired, integration tests updated. • Prereqs: SCANNER-ANALYZERS-LANG-10-307 (Wave 0) @@ -668,16 +668,16 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: DONE — Dist-info parser, RECORD verifier, editable install metadata, and entrypoint usage hints shipped with deterministic fixture/tests. - **Sprint 13** · UX & CLI Experience - Team: DevEx/CLI, QA Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-RUNTIME-13-009 — CLI-RUNTIME-13-009 – Runtime policy smoke fixture • Prereqs: CLI-RUNTIME-13-005 (Wave 0) • Current: TODO – Build Spectre test harness exercising `runtime policy test` against a stubbed backend to lock output shape (table + `--json`) and guard regressions. Integrate into `dotnet test` suite. - Team: UX Specialist, Angular Eng, DevEx - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` • Prereqs: WEB1.TRIVY-SETTINGS-TESTS (Wave 0) • Current: TODO – Capture deterministic lockfile flow, cache Puppeteer downloads, validate `npm test` from clean checkout offline, and update README. - Team: UI Guild - - Path: `src/StellaOps.UI/TASKS.md` + - Path: `src/UI/StellaOps.UI/TASKS.md` 1. [TODO] UI-VEX-13-003 — Implement VEX explorer + policy editor with preview integration. • Prereqs: EXCITITOR-CORE-02-001 (external/completed), EXCITITOR-EXPORT-01-005 (Wave 0) • Current: TODO @@ -716,24 +716,24 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: TODO - **Sprint 15** · Notify Foundations - Team: Notify Engine Guild - - Path: `src/StellaOps.Notify.Engine/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md` 1. [DOING (2025-10-24)] NOTIFY-ENGINE-15-301 — Rules evaluation core: tenant/kind filters, severity/delta gates, VEX gating, throttling, idempotency key generation. • Prereqs: NOTIFY-MODELS-15-101 (Wave 0) • Current: DOING (2025-10-24) - Team: Notify Queue Guild - - Path: `src/StellaOps.Notify.Queue/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Queue/TASKS.md` • Prereqs: NOTIFY-MODELS-15-101 (Wave 0) • Current: DONE — Redis transport, queue contracts, and integration tests delivered (2025-10-23). - **Sprint 16** · Scheduler Intelligence - Team: Scheduler ImpactIndex Guild - - Path: `src/StellaOps.Scheduler.ImpactIndex/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md` 1. [TODO] SCHED-IMPACT-16-301 — Implement ingestion of per-image BOM-Index sidecars into roaring bitmap store (contains/usedBy). • Prereqs: SCANNER-EMIT-10-605 (Wave 0) • Current: TODO - Team: Scheduler Storage Guild - - Path: `src/StellaOps.Scheduler.Storage.Mongo/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/TASKS.md` 1. [TODO] SCHED-STORAGE-16-203 — Audit/logging pipeline + run stats materialized views for UI. • Prereqs: SCHED-STORAGE-16-201 (Wave 0) • Current: TODO @@ -741,7 +741,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: SCHED-STORAGE-16-201 (Wave 0) • Current: TODO - Team: Scheduler WebService Guild - - Path: `src/StellaOps.Scheduler.WebService/TASKS.md` + - Path: `src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md` 1. [TODO] SCHED-WEB-16-104 — Webhook endpoints for Feedser/Vexer exports with mTLS/HMAC validation and rate limiting. • Prereqs: SCHED-QUEUE-16-401 (Wave 0), SCHED-STORAGE-16-201 (Wave 0) • Current: TODO @@ -749,13 +749,13 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: SCHED-WEB-16-101 (Wave 0) • Current: TODO - Team: Scheduler Worker Guild - - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-WORKER-16-201 — Planner loop (cron + event triggers) with lease management, fairness, and rate limiting (§6). • Prereqs: SCHED-QUEUE-16-401 (Wave 0) • Current: TODO - **Sprint 17** · Symbol Intelligence & Forensics - Team: Emit Guild - - Path: `src/StellaOps.Scanner.Emit/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md` 1. [TODO] SCANNER-EMIT-17-701 — Record GNU build-id for ELF components and surface it in inventory/usage SBOM plus diff payloads with deterministic ordering. • Prereqs: SCANNER-EMIT-10-602 (Wave 0) • Current: TODO @@ -763,13 +763,13 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster ## Wave 2 — 29 task(s) ready after Wave 1 - **Sprint 6** · Excititor Ingest & Formats - Team: Team Excititor Connectors – Oracle - - Path: `src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md` 1. [TODO] EXCITITOR-CONN-ORACLE-01-003 — EXCITITOR-CONN-ORACLE-01-003 – Trust metadata + provenance • Prereqs: EXCITITOR-CONN-ORACLE-01-002 (Wave 1), EXCITITOR-POLICY-01-001 (external/completed) • Current: TODO – Emit Oracle signing metadata (PGP/cosign) and provenance hints for consensus weighting. - **Sprint 7** · Contextual Truth Foundations - Team: Team Excititor Export - - Path: `src/StellaOps.Excititor.Export/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md` • Prereqs: EXCITITOR-EXPORT-01-006 (Wave 1) • Current: TODO – Create per-domain mirror bundles with consensus/score artifacts, publish signed index for downstream Excititor sync, and ensure deterministic digests + fixtures. - **Sprint 9** · DevOps Foundations @@ -780,19 +780,19 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: TODO - **Sprint 10** · Backlog - Team: TBD - - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-305A (Wave 1) • Current: DONE — Assembly metadata now emits strong-name, file/product info, and optional Authenticode signals with deterministic fixtures/tests. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-304A (Wave 1) • Current: DONE — DWARF fallback parses vcs.* markers, cache reuses metadata keyed by file identity. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-307N (Wave 1) - • Current: DONE — Harness + fixtures merged; benchmark CSV recorded under `src/StellaOps.Bench/Scanner.Analyzers`. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + • Current: DONE — Harness + fixtures merged; benchmark CSV recorded under `src/Bench/StellaOps.Bench/Scanner.Analyzers`. + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-303A (Wave 1) • Current: DONE — Streaming SHA-256 verification with deterministic mismatch evidence; unsupported algorithms tracked; fixtures validated. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-306A (Wave 1) • Current: DONE — Heuristic classifier flags stripped binaries, regression tests guard false positives. - **Sprint 10** · DevOps Perf @@ -806,7 +806,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: POLICY-CORE-09-006 (Wave 0), UI-POLICY-13-007 (Wave 1) • Current: DONE (2025-10-23) - Team: UI Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` • Prereqs: SAMPLES-13-004 (Wave 0) • Current: DONE (2025-10-23) - **Sprint 14** · Release & Offline Ops @@ -822,39 +822,39 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: TODO - **Sprint 15** · Benchmarks - Team: Bench Guild, Notify Team - - Path: `src/StellaOps.Bench/TASKS.md` + - Path: `src/Bench/StellaOps.Bench/TASKS.md` 1. [TODO] BENCH-NOTIFY-15-001 — Notify dispatch throughput bench (vary rule density) with results CSV. • Prereqs: NOTIFY-ENGINE-15-301 (Wave 1) • Current: TODO - **Sprint 15** · Notify Foundations - Team: Notify Engine Guild - - Path: `src/StellaOps.Notify.Engine/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md` 1. [TODO] NOTIFY-ENGINE-15-302 — Action planner + digest coalescer with window management and dedupe per architecture §4. • Prereqs: NOTIFY-ENGINE-15-301 (Wave 1) • Current: TODO - Team: Notify Queue Guild - - Path: `src/StellaOps.Notify.Queue/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Queue/TASKS.md` • Current: DONE — delivery queue + retry/dead-letter pipeline shipped with integration tests and metrics (2025-10-23). • Current: DONE — JetStream transport, DI binding, health check, and integration tests delivered (2025-10-23). - Team: Notify WebService Guild - - Path: `src/StellaOps.Notify.WebService/TASKS.md` + - Path: `src/Notify/StellaOps.Notify.WebService/TASKS.md` 1. [TODO] NOTIFY-WEB-15-104 — Configuration binding for Mongo/queue/secrets; startup diagnostics. • Current: TODO - Team: Notify Worker Guild - - Path: `src/StellaOps.Notify.Worker/TASKS.md` + - Path: `src/Notify/StellaOps.Notify.Worker/TASKS.md` • Current: DONE — worker leasing loop wired to queue adapters with retry/backoff telemetry (2025-10-23). 2. [TODO] NOTIFY-WORKER-15-202 — Wire rules evaluation pipeline (tenant scoping, filters, throttles, digests, idempotency) with deterministic decisions. • Prereqs: NOTIFY-ENGINE-15-301 (Wave 1) • Current: TODO - **Sprint 16** · Benchmarks - Team: Bench Guild, Scheduler Team - - Path: `src/StellaOps.Bench/TASKS.md` + - Path: `src/Bench/StellaOps.Bench/TASKS.md` 1. [TODO] BENCH-IMPACT-16-001 — ImpactIndex throughput bench (resolve 10k productKeys) + RAM profile. • Prereqs: SCHED-IMPACT-16-301 (Wave 1) • Current: TODO - **Sprint 16** · Scheduler Intelligence - Team: Scheduler ImpactIndex Guild - - Path: `src/StellaOps.Scheduler.ImpactIndex/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md` 1. [TODO] SCHED-IMPACT-16-303 — Snapshot/compaction + invalidation for removed images; persistence to RocksDB/Redis per architecture. • Prereqs: SCHED-IMPACT-16-301 (Wave 1) • Current: TODO @@ -862,12 +862,12 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: SCHED-IMPACT-16-301 (Wave 1) • Current: TODO - Team: Scheduler WebService Guild - - Path: `src/StellaOps.Scheduler.WebService/TASKS.md` + - Path: `src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md` 1. [TODO] SCHED-WEB-16-103 — Runs API (list/detail/cancel), ad-hoc run POST, and impact preview endpoints. • Prereqs: SCHED-WEB-16-102 (Wave 1) • Current: TODO - Team: Scheduler Worker Guild - - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-WORKER-16-202 — Wire ImpactIndex targeting (ResolveByPurls/vulns), dedupe, shard planning. • Prereqs: SCHED-IMPACT-16-301 (Wave 1) • Current: TODO @@ -884,117 +884,117 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster ## Wave 3 — 14 task(s) ready after Wave 2 - **Sprint 7** · Contextual Truth Foundations - Team: Excititor Connectors – Stella - - Path: `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md` • Prereqs: EXCITITOR-EXPORT-01-007 (Wave 2) • Current: TODO - **Sprint 10** · Backlog - Team: TBD - - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-305A (Wave 1) • Current: DONE — Self-contained fixtures emit components with RID flags; EntryTrace usage hints preserved. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-304B (Wave 2) • Current: DONE — `bin:{sha256}` fallback + quiet provenance docs shipped with determinism fixtures. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-308N (Wave 2) • Current: DONE — Manifest shipped, Worker catalog integration complete, Offline Kit docs updated. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-303B (Wave 2) • Current: DONE — `direct_url.json` editable insights surfaced; EntryTrace usage hints mark console scripts; deterministic fixture covers editable vs wheel installs. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-306B (Wave 2) • Current: DONE — Hash fallback wired through shared helpers; fixtures ensure deterministic output. - **Sprint 13** · UX & CLI Experience - Team: DevEx/CLI, Scanner WebService Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-RUNTIME-13-008 — CLI-RUNTIME-13-008 – Runtime policy contract sync • Current: TODO – Once `/api/v1/scanner/policy/runtime` exits TODO, verify CLI output against final schema (field names, metadata) and update formatter/tests if the contract moves. Capture joint review notes in docs/09 and link Scanner task sign-off. - **Sprint 15** · Notify Foundations - Team: Notify Engine Guild - - Path: `src/StellaOps.Notify.Engine/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md` 1. [TODO] NOTIFY-ENGINE-15-303 — Template rendering engine (Slack, Teams, Email, Webhook) with helpers and i18n support. • Prereqs: NOTIFY-ENGINE-15-302 (Wave 2) • Current: TODO - Team: Notify Worker Guild - - Path: `src/StellaOps.Notify.Worker/TASKS.md` + - Path: `src/Notify/StellaOps.Notify.Worker/TASKS.md` 1. [TODO] NOTIFY-WORKER-15-203 — Channel dispatch orchestration: invoke connectors, manage retries/jitter, record delivery outcomes. • Prereqs: NOTIFY-ENGINE-15-302 (Wave 2) • Current: TODO - **Sprint 16** · Scheduler Intelligence - Team: Scheduler Worker Guild - - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-WORKER-16-203 — Runner execution: call Scanner `/reports` (analysis-only) or `/scans` when configured; collect deltas; handle retries. • Prereqs: SCHED-WORKER-16-202 (Wave 2) • Current: TODO - **Sprint 17** · Symbol Intelligence & Forensics - Team: Zastava Observer Guild - - Path: `src/StellaOps.Zastava.Observer/TASKS.md` + - Path: `src/Zastava/StellaOps.Zastava.Observer/TASKS.md` • Current: DONE — Build-id capture wired through RuntimeProcessCollector + RuntimeEventFactory; docs/runbook updated with debug-store workflow. ## Wave 4 — 15 task(s) ready after Wave 3 - **Sprint 7** · Contextual Truth Foundations - Team: Excititor Connectors – Stella - - Path: `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md` 1. [TODO] EXCITITOR-CONN-STELLA-07-002 — Normalize mirror bundles into VexClaim sets referencing original provider metadata and mirror provenance. • Prereqs: EXCITITOR-CONN-STELLA-07-001 (Wave 3) • Current: TODO - **Sprint 9** · Policy Foundations - Team: Policy Guild, Scanner WebService Guild - - Path: `src/StellaOps.Policy/TASKS.md` + - Path: `src/Policy/__Libraries/StellaOps.Policy/TASKS.md` • Current: TODO - **Sprint 10** · Backlog - Team: TBD - - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-305C (Wave 3) • Current: DONE 2025-10-22 - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-304C (Wave 3) • Current: DONE — Shared helpers integrated; concurrency tests verify buffer reuse. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` 1. [TODO] SCANNER-ANALYZERS-LANG-10-307P — Shared helper integration (license metadata, quiet provenance, component merging). • Prereqs: SCANNER-ANALYZERS-LANG-10-303C (Wave 3) • Current: TODO - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` 1. [TODO] SCANNER-ANALYZERS-LANG-10-307R — Finalize shared helper usage (license, usage flags) and concurrency-safe caches. • Prereqs: SCANNER-ANALYZERS-LANG-10-306C (Wave 3) • Current: TODO - **Sprint 13** · UX & CLI Experience - Team: DevEx/CLI - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` • Prereqs: CLI-RUNTIME-13-005 (Wave 0), CLI-OFFLINE-13-006 (Wave 3) • Current: TODO – Package non-core verbs as restart-time plug-ins (manifest + loader updates, tests ensuring no hot reload). - **Sprint 15** · Notify Foundations - Team: Notify Connectors Guild - - Path: `src/StellaOps.Notify.Connectors.Email/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md` 1. [TODO] NOTIFY-CONN-EMAIL-15-701 — Implement SMTP connector with STARTTLS/implicit TLS support, HTML+text rendering, attachment policy enforcement. • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) • Current: TODO - - Path: `src/StellaOps.Notify.Connectors.Slack/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md` 1. [TODO] NOTIFY-CONN-SLACK-15-501 — Implement Slack connector with bot token auth, message rendering (blocks), rate limit handling, retries/backoff. • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) • Current: TODO - - Path: `src/StellaOps.Notify.Connectors.Teams/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md` 1. [TODO] NOTIFY-CONN-TEAMS-15-601 — Implement Teams connector using Adaptive Cards 1.5, handle webhook auth, size limits, retries. • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) • Current: TODO - - Path: `src/StellaOps.Notify.Connectors.Webhook/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md` 1. [TODO] NOTIFY-CONN-WEBHOOK-15-801 — Implement webhook connector: JSON payload, signature (HMAC/Ed25519), retries/backoff, status code handling. • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) • Current: TODO - Team: Notify Engine Guild - - Path: `src/StellaOps.Notify.Engine/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md` 1. [TODO] NOTIFY-ENGINE-15-304 — Test-send sandbox + preview utilities for WebService. • Prereqs: NOTIFY-ENGINE-15-303 (Wave 3) • Current: TODO - Team: Notify Worker Guild - - Path: `src/StellaOps.Notify.Worker/TASKS.md` + - Path: `src/Notify/StellaOps.Notify.Worker/TASKS.md` 1. [TODO] NOTIFY-WORKER-15-204 — Metrics/telemetry: `notify.sent_total`, `notify.dropped_total`, latency histograms, tracing integration. • Prereqs: NOTIFY-WORKER-15-203 (Wave 3) • Current: TODO - **Sprint 16** · Scheduler Intelligence - Team: Scheduler Worker Guild - - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-WORKER-16-204 — Emit events (`scheduler.rescan.delta`, `scanner.report.ready`) for Notify/UI with summaries. • Prereqs: SCHED-WORKER-16-203 (Wave 3) • Current: TODO @@ -1007,68 +1007,68 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster ## Wave 5 — 10 task(s) ready after Wave 4 - **Sprint 7** · Contextual Truth Foundations - Team: Excititor Connectors – Stella - - Path: `src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md` 1. [TODO] EXCITITOR-CONN-STELLA-07-003 — Implement incremental cursor handling per-export digest, support resume, and document configuration for downstream Excititor mirrors. • Prereqs: EXCITITOR-CONN-STELLA-07-002 (Wave 4) • Current: TODO - **Sprint 10** · Backlog - Team: TBD - - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-307D (Wave 4) • Current: DONE — fixtures + benchmarks merged 2025-10-23 - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-307G (Wave 4) • Current: DONE — Fixtures and benchmark harness merged; perf delta captured vs competitor. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-307P (Wave 4) • Current: DONE — Fixtures `simple-venv`, `pip-cache`, `layered-editable` + hash throughput benchmarks merged 2025-10-23. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` 1. [TODO] SCANNER-ANALYZERS-LANG-10-308R — Determinism fixtures + performance benchmarks; compare against competitor heuristic coverage. • Prereqs: SCANNER-ANALYZERS-LANG-10-307R (Wave 4) • Current: TODO - **Sprint 15** · Notify Foundations - Team: Notify Connectors Guild - - Path: `src/StellaOps.Notify.Connectors.Email/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md` 1. [BLOCKED] NOTIFY-CONN-EMAIL-15-702 — Add DKIM signing optional support and health/test-send flows. • Prereqs: NOTIFY-CONN-EMAIL-15-701 (Wave 4) • Current: BLOCKED – waiting on base SMTP connector implementation (NOTIFY-CONN-EMAIL-15-701). - - Path: `src/StellaOps.Notify.Connectors.Slack/TASKS.md` - - Path: `src/StellaOps.Notify.Connectors.Teams/TASKS.md` - - Path: `src/StellaOps.Notify.Connectors.Webhook/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md` + - Path: `src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md` 1. [DOING] NOTIFY-CONN-WEBHOOK-15-802 — Health/test-send support with signature validation hints and secret management. • Prereqs: NOTIFY-CONN-WEBHOOK-15-801 (Wave 4) • Current: TODO - **Sprint 17** · Symbol Intelligence & Forensics - Team: Scanner WebService Guild - - Path: `src/StellaOps.Scanner.WebService/TASKS.md` + - Path: `src/Scanner/StellaOps.Scanner.WebService/TASKS.md` • Current: DONE — runtime events normalize digests/build IDs, policy responses/CLI emit `buildIds`, docs/tests updated for debug-store workflows. ## Wave 6 — 8 task(s) ready after Wave 5 - **Sprint 10** · Backlog - Team: TBD - - Path: `src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-308D (Wave 5) • Current: DONE — manifest + Offline Kit docs updated 2025-10-23 - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-308G (Wave 5) • Current: DONE — Manifest copied, Worker DI registration verified, Offline Kit docs updated. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md` • Prereqs: SCANNER-ANALYZERS-LANG-10-308P (Wave 5) • Current: DONE — Manifest copied, Worker integration verified, Offline Kit docs updated with Python plug-in guidance. - - Path: `src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` + - Path: `src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md` 1. [TODO] SCANNER-ANALYZERS-LANG-10-309R — Package plug-in manifest + Offline Kit documentation; ensure Worker integration. • Prereqs: SCANNER-ANALYZERS-LANG-10-308R (Wave 5) • Current: TODO - **Sprint 7** · Contextual Truth Foundations - Team: Team Normalization & Storage Backbone - - Path: `src/StellaOps.Concelier.Storage.Mongo/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md` • Prereqs: FEEDMERGE-ENGINE-07-001 (Wave 11) • Current: TODO – Create `advisory_statements` (immutable) and `advisory_conflicts` collections, define `asOf`/`vulnerabilityKey` indexes, and document migration/rollback steps for event-sourced merge. ## Wave 7 — 52 task(s) ready after Wave 6 - **Sprint 20** · Policy Engine v2 - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-20-000 — New Policy Engine service host, DI bootstrap, Authority scaffolding. • Prereqs: POLICY-AOC-19-001 (Wave 1) • Current: TODO @@ -1091,7 +1091,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: POLICY-ENGINE-20-000 & POLICY-ENGINE-20-004 (Wave 7) • Current: TODO - Team: Policy Guild · Data Joiners - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-20-003 — SBOM↔advisory↔VEX joiners using linksets. • Prereqs: POLICY-ENGINE-20-001 (Wave 7), CONCELIER-POLICY-20-002 (Wave 7), EXCITITOR-POLICY-20-002 (Wave 7) • Current: TODO @@ -1103,7 +1103,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: TODO - **Sprint 20** · Policy API Surface - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-POLICY-20-001 — Policy CRUD/compile/run/simulate/findings/explain endpoints. • Prereqs: POLICY-ENGINE-20-001/004 (Wave 7), AUTH-POLICY-20-001 (Wave 7) • Current: TODO @@ -1118,7 +1118,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: TODO - **Sprint 20** · Policy Console - Team: UI Guild - - Path: `src/StellaOps.UI/TASKS.md` + - Path: `src/UI/StellaOps.UI/TASKS.md` 1. [TODO] UI-POLICY-20-001 — Monaco editor with inline diagnostics/compliance checklist. • Prereqs: WEB-POLICY-20-001 (Wave 7) • Current: TODO @@ -1133,7 +1133,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: TODO - **Sprint 20** · Policy CLI - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-POLICY-20-001 — `policy new|edit|submit|approve` commands. • Prereqs: WEB-POLICY-20-001 (Wave 7), AUTH-POLICY-20-001 (Wave 7) • Current: TODO @@ -1145,38 +1145,38 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: TODO - **Sprint 20** · Policy Selection Services - Team: Concelier WebService Guild - - Path: `src/StellaOps.Concelier.WebService/TASKS.md` + - Path: `src/Concelier/StellaOps.Concelier.WebService/TASKS.md` 1. [TODO] CONCELIER-POLICY-20-001 — Advisory selection endpoints for policy engine. • Prereqs: CONCELIER-CORE-AOC-19-004 (Wave 1), WEB-POLICY-20-001 (Wave 7) • Current: TODO - Team: Concelier Core Guild - - Path: `src/StellaOps.Concelier.Core/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md` 1. [TODO] CONCELIER-POLICY-20-002 — Linkset enrichment with equivalence tables/ranges. • Prereqs: CONCELIER-CORE-AOC-19-002 (Wave 1), POLICY-ENGINE-20-001 (Wave 7) • Current: TODO - Team: Concelier Storage Guild - - Path: `src/StellaOps.Concelier.Storage.Mongo/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md` 1. [TODO] CONCELIER-POLICY-20-003 — Selection cursors + change-stream checkpoints. • Prereqs: CONCELIER-STORE-AOC-19-002 (Wave 1), POLICY-ENGINE-20-003 (Wave 7) • Current: TODO - Team: Excititor WebService Guild - - Path: `src/StellaOps.Excititor.WebService/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.WebService/TASKS.md` 1. [TODO] EXCITITOR-POLICY-20-001 — VEX selection APIs (batch PURL/ID, tenant filters). • Prereqs: EXCITITOR-CORE-AOC-19-004 (Wave 1), WEB-POLICY-20-001 (Wave 7) • Current: TODO - Team: Excititor Core Guild - - Path: `src/StellaOps.Excititor.Core/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md` 1. [TODO] EXCITITOR-POLICY-20-002 — Scope-aware linksets + version range handling. • Prereqs: EXCITITOR-CORE-AOC-19-002 (Wave 1), POLICY-ENGINE-20-001 (Wave 7) • Current: TODO - Team: Excititor Storage Guild - - Path: `src/StellaOps.Excititor.Storage.Mongo/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md` 1. [TODO] EXCITITOR-POLICY-20-003 — Selection cursors + checkpoints for VEX change streams. • Prereqs: EXCITITOR-STORE-AOC-19-002 (Wave 1), POLICY-ENGINE-20-003 (Wave 7) • Current: TODO - **Sprint 20** · Scheduler Integration - Team: Scheduler Models Guild - - Path: `src/StellaOps.Scheduler.Models/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md` 1. [TODO] SCHED-MODELS-20-001 — Policy run/diff DTOs + validation helpers. • Prereqs: POLICY-ENGINE-20-000 (Wave 7) • Current: TODO @@ -1184,7 +1184,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: SCHED-MODELS-20-001 (Wave 7) • Current: TODO - Team: Scheduler WebService Guild - - Path: `src/StellaOps.Scheduler.WebService/TASKS.md` + - Path: `src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md` 1. [TODO] SCHED-WEB-20-001 — Policy run scheduling APIs with `policy:run` enforcement. • Prereqs: SCHED-WEB-16-101 (Wave 1), AUTH-POLICY-20-001 (Wave 7) • Current: TODO @@ -1192,7 +1192,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: SCHED-WEB-20-001 (Wave 7), POLICY-ENGINE-20-006 (Wave 7) • Current: TODO - Team: Scheduler Worker Guild - - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-WORKER-20-301 — Trigger policy runs (full/incremental/simulate) via API. • Prereqs: SCHED-WORKER-16-201 (Wave 1), POLICY-ENGINE-20-000 (Wave 7) • Current: TODO @@ -1204,7 +1204,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Current: TODO - **Sprint 20** · Authority & Security - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-POLICY-20-001 — Introduce policy scopes (`policy:*`, `findings:read`, `effective:write`). • Prereqs: AUTH-AOC-19-001 (Wave 1) • Current: TODO @@ -1275,7 +1275,7 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster • Prereqs: UI-POLICY-20-002 (Wave 7) • Current: TODO - Team: Bench Guild - - Path: `src/StellaOps.Bench/TASKS.md` + - Path: `src/Bench/StellaOps.Bench/TASKS.md` 1. [TODO] BENCH-POLICY-20-001 — Policy evaluation performance benchmark suite. • Prereqs: POLICY-ENGINE-20-002/006 (Wave 7) • Current: TODO @@ -1286,84 +1286,84 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster ## Wave 8 — 60 task(s) ready after Wave 7 - **Sprint 21** · Graph Explorer v1 - Team: Cartographer Guild - - Path: `src/StellaOps.Cartographer/TASKS.md` + - Path: `src/Cartographer/StellaOps.Cartographer/TASKS.md` 1. [TODO] CARTO-GRAPH-21-001/002/003/004 — Schema, projection reader, graph constructor, and layout tiling are ready once SBOM projections ship (Wave 7 prereqs). 2. [TODO] CARTO-GRAPH-21-005/006/007/008/009 — Overlay worker, API surface, backfill/overlay jobs, testing, and deployment artefacts depend on Cartographer infrastructure plus Policy Engine 30-series work. - Team: SBOM Service Guild - - Path: `src/StellaOps.SbomService/TASKS.md` + - Path: `src/SbomService/StellaOps.SbomService/TASKS.md` 1. [TODO] SBOM-SERVICE-21-001/002/003/004 — Normalized projection API, change events, entrypoint management, and observability unblock Cartographer’s ingestion. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-30-001/002/003 — Graph overlay contract, simulation bridge, and change events rely on Policy Engine v2 core (Wave 7) and feed Cartographer overlays. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-GRAPH-21-001..004 — Graph gateway routes, validation, exports, and simulation bridging activate once Cartographer endpoints exist. - Team: UI Guild - - Path: `src/StellaOps.UI/TASKS.md` + - Path: `src/UI/StellaOps.UI/TASKS.md` 1. [TODO] UI-GRAPH-21-001..006 — Canvas, inspector, filters, paths, diff, and accessibility depend on Cartographer/Web graph APIs and Samples fixtures. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-GRAPH-21-001..003 — CLI commands, path/simulation options, and docs require Cartographer/Web readiness. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-GRAPH-21-001..003 — Graph scope issuance, enforcement, and documentation unblock service deployments. - Team: Scheduler Guilds - - Paths: `src/StellaOps.Scheduler.Models/TASKS.md`, `src/StellaOps.Scheduler.WebService/TASKS.md`, `src/StellaOps.Scheduler.Worker/TASKS.md` + - Paths: `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md`, `src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md`, `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-MODELS-21-001/002, SCHED-WEB-21-001/002, SCHED-WORKER-21-201..203 — Graph job DTOs, APIs, workers, and metrics coordinate Cartographer runs after SBOM change events. - Team: Concelier Guild - - Paths: `src/StellaOps.Concelier.Core/TASKS.md`, `src/StellaOps.Concelier.WebService/TASKS.md` + - Paths: `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md`, `src/Concelier/StellaOps.Concelier.WebService/TASKS.md` 1. [TODO] CONCELIER-GRAPH-21-001..004 — SBOM projection enrichment and entrypoint APIs feed SBOM Service/Cartographer. - Team: Excititor Guild - - Paths: `src/StellaOps.Excititor.Core/TASKS.md`, `src/StellaOps.Excititor.WebService/TASKS.md`, `src/StellaOps.Excititor.Storage.Mongo/TASKS.md` + - Paths: `src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md`, `src/Excititor/StellaOps.Excititor.WebService/TASKS.md`, `src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md` 1. [TODO] EXCITITOR-GRAPH-21-001..005 — Provide VEX inspector data, overlay enrichment, events, and indexes for Graph Explorer. - Team: DevOps Guild - Path: `ops/devops/TASKS.md` 1. [TODO] DEVOPS-GRAPH-21-001..003 — Perf tests, visual regression captures, and offline kit bundling align with Cartographer/SBOM readiness. - Team: Docs/Samples/Bench Guilds - - Paths: `docs/TASKS.md`, `samples/TASKS.md`, `src/StellaOps.Bench/TASKS.md` + - Paths: `docs/TASKS.md`, `samples/TASKS.md`, `src/Bench/StellaOps.Bench/TASKS.md` 1. [TODO] DOCS-GRAPH-21-001..009, SAMPLES-GRAPH-21-001..002, BENCH-GRAPH-21-001..002 — Publish documentation set, sample assets, and benchmarks once API/UI stabilize. ## Wave 9 — 58 task(s) ready after Wave 8 - **Sprint 22** · Link-Not-Merge v1 - Team: Concelier Core Guild - - Path: `src/StellaOps.Concelier.Core/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md` 1. [TODO] CONCELIER-LNM-21-001/002/003/004/005 — Observation schema, linkset builder, conflict annotator, merge removal, and event emission follow Graph wave completion and AOC guard readiness. - Team: Concelier Storage Guild - - Path: `src/StellaOps.Concelier.Storage.Mongo/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md` 1. [TODO] CONCELIER-LNM-21-101/102/103 — Collections, backfill tooling, and blob storage wiring depend on core schema finalization. - Team: Concelier WebService Guild - - Path: `src/StellaOps.Concelier.WebService/TASKS.md` + - Path: `src/Concelier/StellaOps.Concelier.WebService/TASKS.md` 1. [TODO] CONCELIER-LNM-21-201/202/203 — Advisory observation/linkset APIs and event publishing follow storage readiness. - Team: BE-Merge - - Path: `src/StellaOps.Concelier.Merge/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md` 1. [TODO] MERGE-LNM-21-001/002/003 — Decommission merge pipeline once observation/linkset flow validated. - Team: Excititor Core Guild - - Path: `src/StellaOps.Excititor.Core/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md` 1. [TODO] EXCITITOR-LNM-21-001..005 — VEX observations/linksets, conflicts, merge removal, and events mirror advisory work. - Team: Excititor Storage Guild - - Path: `src/StellaOps.Excititor.Storage.Mongo/TASKS.md` + - Path: `src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md` 1. [TODO] EXCITITOR-LNM-21-101/102 — Collections and backfill for VEX data prepared after schema finalization. - Team: Excititor WebService Guild - - Path: `src/StellaOps.Excititor.WebService/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.WebService/TASKS.md` 1. [TODO] EXCITITOR-LNM-21-201..203 — VEX observation/linkset APIs and event publishing. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-40-001..003 — Effective severity adjustments, VEX conflict handling, and consumer utilities once observation/linkset data shape is fixed. - Team: Scanner WebService Guild - - Path: `src/StellaOps.Scanner.WebService/TASKS.md` + - Path: `src/Scanner/StellaOps.Scanner.WebService/TASKS.md` 1. [TODO] SCANNER-LNM-21-001/002 — Report/runtime updates and evidence endpoint leveraging new linksets. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-LNM-21-001..003 — Gateway exposure for advisory/vex APIs and policy evidence combos. - Team: UI Guild - - Path: `src/StellaOps.UI/TASKS.md` + - Path: `src/UI/StellaOps.UI/TASKS.md` 1. [TODO] UI-LNM-22-001..004 — Evidence panel, filters, VEX tab, permalinks after API readiness. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-LNM-22-001/002 — CLI support for observations/linksets and exports. - Team: Authority Core Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-AOC-19-001 — Scope rollout (`advisory/vex ingest/read`) enabling new APIs. - Team: DevOps Guild - Path: `ops/devops/TASKS.md` @@ -1375,78 +1375,78 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster - Path: `samples/TASKS.md` 1. [TODO] SAMPLES-LNM-22-001/002 — Observation/linkset fixtures for advisories and VEX. - Team: Bench Guild - - Path: `src/StellaOps.Bench/TASKS.md` + - Path: `src/Bench/StellaOps.Bench/TASKS.md` 1. [TODO] BENCH-LNM-22-001/002 — Ingest/correlation performance benchmarks to enforce SLA. ## Wave 10 — 54 task(s) ready after Wave 9 - **Sprint 23** · Policy Engine + Editor v1 - Team: Policy Guild (Library) - - Path: `src/StellaOps.Policy/TASKS.md` + - Path: `src/Policy/__Libraries/StellaOps.Policy/TASKS.md` 1. [TODO] POLICY-SPL-23-001..005 — SPL schema/canonicalizer/layering/explain model/migration tooling once Link-Not-Merge data model is stable. - Team: Policy Engine Service - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-50-001..007 — Compiler, evaluator, observability, event pipeline, storage schemas, explainer persistence, worker orchestration. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-POLICY-23-001..004 — Policy pack CRUD, activation, simulation/evaluation, explain history APIs. - Team: UI Guild - - Path: `src/StellaOps.UI/TASKS.md` + - Path: `src/UI/StellaOps.UI/TASKS.md` 1. [TODO] UI-POLICY-23-001..006 — Policy editor workspace, YAML builder, guided builder, approvals, simulator, explain view. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-POLICY-23-004..006 — CLI lint/activate/history + explain commands aligned with new APIs. - Team: Authority Core Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-POLICY-23-001..003 — Policy scopes, two-person activation, documentation. - Team: SBOM Service Guild - - Path: `src/StellaOps.SbomService/TASKS.md` + - Path: `src/SbomService/StellaOps.SbomService/TASKS.md` 1. [TODO] SBOM-SERVICE-23-001/002 — Asset metadata projection + `sbom.asset.updated` events feeding evaluator. - Team: Concelier & Excititor Guilds - - Paths: `src/StellaOps.Concelier.Core/TASKS.md`, `src/StellaOps.Excititor.Core/TASKS.md`, `src/StellaOps.Concelier.WebService/TASKS.md`, `src/StellaOps.Excititor.WebService/TASKS.md` + - Paths: `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md`, `src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md`, `src/Concelier/StellaOps.Concelier.WebService/TASKS.md`, `src/Excititor/StellaOps.Excititor.WebService/TASKS.md` 1. [TODO] CONCELIER-POLICY-23-001/002 and EXCITITOR-POLICY-23-001/002 plus CONCELIER/EXCITITOR-LNM-21-201..203 — Evidence indexes, enriched events, observation/linkset APIs supporting policy runtime. - Team: Scheduler Worker Guild - - Path: `src/StellaOps.Scheduler.Worker/TASKS.md` + - Path: `src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md` 1. [TODO] SCHED-WORKER-23-101/102 — Policy re-evaluation worker + reconciliation job post activation. - Team: DevOps Guild - Path: `ops/devops/TASKS.md` 1. [TODO] DEVOPS-LNM-22-001..003 (migration/monitoring) and future policy deployment automation for SPL bundles. - Team: Docs Guild, Samples, Bench - - Paths: `docs/TASKS.md`, `samples/TASKS.md`, `src/StellaOps.Bench/TASKS.md` + - Paths: `docs/TASKS.md`, `samples/TASKS.md`, `src/Bench/StellaOps.Bench/TASKS.md` 1. [TODO] DOCS-POLICY-23-001..010, SAMPLES-LNM-22-001/002, BENCH-LNM-22-001/002 — Documentation set, policy fixtures, performance benchmarks. ## Wave 11 — 1 task(s) ready after Wave 10 - **Sprint 32** · Orchestrator Dashboard Phase 1 (Foundations) - Team: Orchestrator Service Guild - - Path: `src/StellaOps.Orchestrator/TASKS.md` + - Path: `src/Orchestrator/StellaOps.Orchestrator/TASKS.md` 1. [TODO] ORCH-SVC-32-001..005 — Stand up the orchestrator service (schema, scheduler, read-only APIs, SSE, worker endpoints). Coordinate with DevOps (DEVOPS-ORCH-32-001) for Postgres + message bus availability before enabling progression. - Team: Worker SDK Guild - - Paths: `src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md`, `src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md` + - Paths: `src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md`, `src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md` 1. [TODO] WORKER-GO-32-001/002, WORKER-PY-32-001/002 — Deliver baseline job claim/heartbeat libraries. These unblock Concelier/Excititor/SBOM adoption tasks and should validate against ORCH-SVC-32-005 contract. - Team: Concelier Core Guild - - Path: `src/StellaOps.Concelier.Core/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md` 1. [TODO] CONCELIER-ORCH-32-001/002 — Register sources and embed SDK hooks in ingestion loops. Depends on Worker SDK handshake and orchestrator read APIs. - Team: Excititor Worker Guild - - Path: `src/StellaOps.Excititor.Worker/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.Worker/TASKS.md` 1. [TODO] EXCITITOR-ORCH-32-001 — Adopt worker SDK for VEX ingestion. Requires ORCH-SVC-32-005 and Worker SDK readiness. - Team: SBOM Service Guild - - Path: `src/StellaOps.SbomService/TASKS.md` + - Path: `src/SbomService/StellaOps.SbomService/TASKS.md` 1. [TODO] SBOM-ORCH-32-001 — Emit orchestrator job metadata and artifact hashes for SBOM ingest/index jobs; depends on orchestrator schema finalization. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-32-101 — Define `policy_eval` job contract and enqueue hooks so orchestrator DAGs can plan downstream work. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-ORCH-32-001 — Surface read-only orchestrator APIs through the gateway with tenant scoping once service endpoints exist. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-ORCH-32-001 — Introduce `orch:read` scope and `Orch.Viewer` role so CLI/Console work can proceed safely. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-ORCH-32-001 — Provide read-only `stella orch` listings after gateway routes/scopes are available; validate against imposed rule requirement. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-ORCH-32-001/002 — Overview + Sources pages (read-only) rely on SSE stream, viewer scope, and CLI/gateway parity. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1456,37 +1456,37 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEVOPS-ORCH-32-001 — Stand up Postgres/message bus environments and seed Grafana dashboards; prerequisite for orchestrator integration workstreams. - **Sprint 33** · Orchestrator Dashboard Phase 2 (Controls & Recovery) - Team: Orchestrator Service Guild - - Path: `src/StellaOps.Orchestrator/TASKS.md` + - Path: `src/Orchestrator/StellaOps.Orchestrator/TASKS.md` 1. [TODO] ORCH-SVC-33-001..004 — Add control actions, adaptive rate limiter, watermark/backfill manager, and dead-letter replay. Requires Phase 1 completion and Worker SDK control hooks. - Team: Worker SDK Guild - - Paths: `src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md`, `src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md` + - Paths: `src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md`, `src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md` 1. [TODO] WORKER-GO-33-001/002, WORKER-PY-33-001/002 — Provide artifact upload, idempotency guards, and error classification so orchestrator controls function safely. - Team: Concelier Core Guild - - Path: `src/StellaOps.Concelier.Core/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md` 1. [TODO] CONCELIER-ORCH-33-001 — Honor orchestrator throttles and retry semantics; unblocker for circuit breaker work in Sprint 34. - Team: Excititor Worker Guild - - Path: `src/StellaOps.Excititor.Worker/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.Worker/TASKS.md` 1. [TODO] EXCITITOR-ORCH-33-001 — Surface error classes and throttling compliance; depends on Worker SDK error helpers. - Team: SBOM Service Guild - - Path: `src/StellaOps.SbomService/TASKS.md` + - Path: `src/SbomService/StellaOps.SbomService/TASKS.md` 1. [TODO] SBOM-ORCH-33-001 — Report backpressure metrics and respect orchestrator pause/backfill signals. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-33-101 — Implement orchestrator-driven evaluation workers with SLO metrics; prerequisites: ORCH-SVC-32-003/005 and Worker SDK upgrades. - Team: VEX Lens Guild - - Path: `src/StellaOps.VexLens/TASKS.md` + - Path: `src/VexLens/StellaOps.VexLens/TASKS.md` 1. [TODO] VEXLENS-ORCH-33-001 — Register `consensus_compute` job type and worker integration so orchestrator can schedule consensus batches. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-ORCH-33-001 — Wire control/backfill endpoints through gateway with proper error mapping and SSE bridging; relies on AUTH-ORCH-33-001. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-ORCH-33-001 — Add `Orch.Operator` role/scopes and enforce reason strings; prerequisite for CLI/Console control surfaces. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-ORCH-33-001 — Implement action verbs (`pause|resume|test`, `retry|cancel`, `jobs tail`) with streaming output and scope enforcement. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-ORCH-33-001/002 — Runs timeline/DAG and Jobs tail views with action buttons. Requires SSE, operator scopes, and orchestrator control endpoints. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1496,40 +1496,40 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEVOPS-ORCH-33-001 — Deliver Grafana dashboards/alerts (rate limiter, queue depth, error clustering) gated by orchestrator metrics. - **Sprint 34** · Orchestrator Dashboard Phase 3 (Backfills, Quotas, GA) - Team: Orchestrator Service Guild - - Path: `src/StellaOps.Orchestrator/TASKS.md` + - Path: `src/Orchestrator/StellaOps.Orchestrator/TASKS.md` 1. [TODO] ORCH-SVC-34-001..004 — Quotas/SLOs, audit ledger export, scale tests, and packaging. Requires Phase 2 controls plus DevOps support for perf/load validation. - Team: Worker SDK Guild - - Paths: `src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md`, `src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md` + - Paths: `src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md`, `src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md` 1. [TODO] WORKER-GO-34-001, WORKER-PY-34-001 — Backfill range execution and dedupe verification; prerequisites: ORCH-SVC-33-003 and service artifact schemas. - Team: Concelier Core Guild - - Path: `src/StellaOps.Concelier.Core/TASKS.md` + - Path: `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md` 1. [TODO] CONCELIER-ORCH-34-001 — Execute orchestrator-driven backfills with ledger linkage; ensure idempotency before GA sign-off. - Team: Excititor Worker Guild - - Path: `src/StellaOps.Excititor.Worker/TASKS.md` + - Path: `src/Excititor/StellaOps.Excititor.Worker/TASKS.md` 1. [TODO] EXCITITOR-ORCH-34-001 — Backfill + circuit breaker reset logic; depends on Worker SDK backfill support. - Team: SBOM Service Guild - - Path: `src/StellaOps.SbomService/TASKS.md` + - Path: `src/SbomService/StellaOps.SbomService/TASKS.md` 1. [TODO] SBOM-ORCH-34-001 — Watermark reconciliation and coverage metrics for sbom backfills. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-34-101 — Surface run ledger exports and SLO burn metrics to orchestrator; coordinates with Findings Ledger. - Team: VEX Lens Guild - - Path: `src/StellaOps.VexLens/TASKS.md` + - Path: `src/VexLens/StellaOps.VexLens/TASKS.md` 1. [TODO] VEXLENS-ORCH-34-001 — Emit consensus completion events into orchestrator ledger + provenance chain. - Team: Findings Ledger Guild - - Path: `src/StellaOps.Findings.Ledger/TASKS.md` + - Path: `src/Findings/StellaOps.Findings.Ledger/TASKS.md` 1. [TODO] LEDGER-34-101 — Consume orchestrator ledger entries for provenance exports; must align with ORCH-SVC-34-002 hashing. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-ORCH-34-001 — Route quotas/backfill/error clustering APIs; prerequisite for CLI/Console GA features. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-ORCH-34-001 — Add `Orch.Admin` role, quota scopes, and audit reason enforcement; required before exposing admin controls. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-ORCH-34-001 — Implement backfill/quota commands with dry-run preview; depends on ORCH-SVC-34-001/003 and AUTH-ORCH-34-001. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-ORCH-34-001..003 — Queues/backpressure dashboard, backfill wizard, and error clustering view; align with API + metrics outputs. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1545,31 +1545,31 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEVOPS-OFFLINE-34-006 — Bundle orchestrator service artifacts, worker SDK samples, and Postgres snapshot into Offline Kit with integrity checks. - **Sprint 35** · Export Center Phase 1 (Foundations) - Team: Exporter Service Guild - - Path: `src/StellaOps.ExportCenter/TASKS.md` + - Path: `src/ExportCenter/StellaOps.ExportCenter/TASKS.md` 1. [TODO] EXPORT-SVC-35-001..006 — Bootstrap exporter service, planner, JSON/mirror adapters, manifests/signing, and download APIs. Blocks downstream integrations (Findings Ledger, Policy, VEX Lens, Web, CLI, Console). - Team: Orchestrator Service Guild - - Path: `src/StellaOps.Orchestrator/TASKS.md` + - Path: `src/Orchestrator/StellaOps.Orchestrator/TASKS.md` 1. [TODO] ORCH-SVC-35-101 — Register export job type, quotas, and telemetry to support exporter workers. - Team: Findings Ledger Guild - - Path: `src/StellaOps.Findings.Ledger/TASKS.md` + - Path: `src/Findings/StellaOps.Findings.Ledger/TASKS.md` 1. [TODO] LEDGER-EXPORT-35-001 — Provide streaming endpoints for advisories/VEX/SBOM/findings filtered per export scopes. Required before planner work can complete. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-35-201 — Supply deterministic policy snapshot + evaluated findings endpoint for policy-aware exports. - Team: VEX Lens Guild - - Path: `src/StellaOps.VexLens/TASKS.md` + - Path: `src/VexLens/StellaOps.VexLens/TASKS.md` 1. [TODO] VEXLENS-EXPORT-35-001 — Produce consensus snapshot API consumed by mirror bundles. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-EXPORT-35-001 — Route export APIs and downloads through gateway once exporter endpoints are live. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-EXPORT-35-001 — Publish Export Viewer/Operator/Admin scopes and issuer templates before Console/CLI ship. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-EXPORT-35-001 — Read-only CLI commands for profiles/runs/downloads; depends on WEB-EXPORT-35-001 and AUTH-EXPORT-35-001. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-EXPORT-35-001 — Profiles + overview UI; requires gateway routes and scopes. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1582,19 +1582,19 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEPLOY-EXPORT-35-001 — Package exporter service/worker Helm overlays for download-only phase. - **Sprint 36** · Export Center Phase 2 (Trivy + Distribution) - Team: Exporter Service Guild - - Path: `src/StellaOps.ExportCenter/TASKS.md` + - Path: `src/ExportCenter/StellaOps.ExportCenter/TASKS.md` 1. [TODO] EXPORT-SVC-36-001..004 — Trivy adapters, OCI/object storage distribution, planner updates. Trivy bundles require DEVOPS-EXPORT-36-001 validation. - Team: Orchestrator Service Guild - - Path: `src/StellaOps.Orchestrator/TASKS.md` + - Path: `src/Orchestrator/StellaOps.Orchestrator/TASKS.md` 1. [TODO] ORCH-SVC-36-101 — Extend orchestrator telemetry/retention fields for export runs. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-EXPORT-36-001 — Distribution endpoints must land before CLI/Console actions move forward. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-EXPORT-36-001 — Distribute/download resume features depend on WEB-EXPORT-36-001 and AUTH scopes. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-EXPORT-36-001 — Runs detail + distribution UI after API support exists. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1607,22 +1607,22 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEPLOY-EXPORT-36-001 — Document registry credentials and automation for distributions. - **Sprint 37** · Export Center Phase 3 (Delta, Encryption, Scheduling, GA) - Team: Exporter Service Guild - - Path: `src/StellaOps.ExportCenter/TASKS.md` + - Path: `src/ExportCenter/StellaOps.ExportCenter/TASKS.md` 1. [TODO] EXPORT-SVC-37-001..004 — Mirror delta/encryption, scheduling+retention, verification API. Depends on DEVOPS-EXPORT-37-001 for chaos/alert readiness. - Team: Orchestrator Service Guild - - Path: `src/StellaOps.Orchestrator/TASKS.md` + - Path: `src/Orchestrator/StellaOps.Orchestrator/TASKS.md` 1. [TODO] ORCH-SVC-37-101 — Scheduling + retention hooks required for exporter automation. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-EXPORT-37-001 — Surface scheduling, retention, verification, encryption parameters once exporter endpoints exist. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-EXPORT-37-001 — Admin scope enforcement for scheduling, retention, encryption. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-EXPORT-37-001 — Scheduling and verification commands with signature/hash checks; relies on WEB-EXPORT-37-001. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-EXPORT-37-001 — Verification panel, scheduling UI, retention controls, encryption workflows. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1635,25 +1635,25 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEVOPS-OFFLINE-37-001 — Bundle export tooling and sample mirror bundles into Offline Kit. - **Sprint 38** · Notifications Studio Phase 1 (Foundations) - Team: Notifications Service Guild - - Path: `src/StellaOps.Notifier/TASKS.md` + - Path: `src/Notifier/StellaOps.Notifier/TASKS.md` 1. [TODO] NOTIFY-SVC-38-001..004 — Bootstrap notifier service, migrations, ingestion, templates, channel adapters, initial APIs. Requires orchestrator event envelope updates and policy violation enrichment. - Team: Orchestrator Service Guild - - Path: `src/StellaOps.Orchestrator/TASKS.md` + - Path: `src/Orchestrator/StellaOps.Orchestrator/TASKS.md` 1. [TODO] ORCH-SVC-38-101 — Standardize event publication (policy/export/job lifecycle) with idempotency keys for notifier. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-38-201 — Emit enriched policy violation events (decision rationale IDs) for notifier ingestion. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-NOTIFY-38-001 — Gateway routing for notifier APIs with tenant RBAC. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-NOTIFY-38-001 — Publish Notify Viewer/Operator/Admin scopes and issuer templates. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-NOTIFY-38-001 — CLI commands for rules/templates/incidents. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-NOTIFY-38-001 — Studio home, rule editor, incidents UI (phase 1). - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1666,19 +1666,19 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEPLOY-NOTIFY-38-001 — Helm overlays and rollout guide for notifier foundations. - **Sprint 39** · Notifications Studio Phase 2 (Correlation, Digests, Simulation) - Team: Notifications Service Guild - - Path: `src/StellaOps.Notifier/TASKS.md` + - Path: `src/Notifier/StellaOps.Notifier/TASKS.md` 1. [TODO] NOTIFY-SVC-39-001..004 — Correlation, throttling, quiet hours, digest generator, simulation engine. - Team: Findings Ledger Guild - - Path: `src/StellaOps.Findings.Ledger/TASKS.md` + - Path: `src/Findings/StellaOps.Findings.Ledger/TASKS.md` 1. [TODO] LEDGER-NOTIFY-39-001 — Digest query optimization endpoints. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-NOTIFY-39-001 — Gateway updates for digests, simulation, throttles. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-NOTIFY-39-001 — CLI simulation/digest commands. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-NOTIFY-39-001 — Template editor, digest profiles, quiet calendar, storm banner. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1688,19 +1688,19 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEVOPS-NOTIFY-39-002 — Throttle/quiet/digest dashboards. - **Sprint 40** · Notifications Studio Phase 3 (Escalations, Localization, Hardening) - Team: Notifications Service Guild - - Path: `src/StellaOps.Notifier/TASKS.md` + - Path: `src/Notifier/StellaOps.Notifier/TASKS.md` 1. [TODO] NOTIFY-SVC-40-001..004 — Escalations, ack bridge, PagerDuty/OpsGenie adapters, localization, security hardening, chaos tests. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-NOTIFY-40-001 — Ack token signing/rotation, webhook allowlists, admin enforcement. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-NOTIFY-40-001 — Expose escalation/localization/channel health endpoints. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-NOTIFY-40-001 — Ack redemption, escalation management, localization previews. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-NOTIFY-40-001 — Escalation settings, on-call schedules, localization UI, incident Kanban enhancements. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1710,19 +1710,19 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEVOPS-NOTIFY-40-001 — Escalation/ack latency dashboards, chaos tooling. - **Sprint 41** · CLI Parity & Task Packs Phase 1 - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-CORE-41-001, CLI-PARITY-41-001/002 — Implement CLI core config/auth/output foundations and initial parity command groups. - Team: Task Runner Guild - - Path: `src/StellaOps.TaskRunner/TASKS.md` + - Path: `src/TaskRunner/StellaOps.TaskRunner/TASKS.md` 1. [TODO] TASKRUN-41-001 — Bootstrap Task Runner service, run API, local executor, approvals pause, artifact capture. - Team: Packs Registry Guild - - Path: `src/StellaOps.PacksRegistry/TASKS.md` + - Path: `src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md` 1. [TODO] PACKS-REG-41-001 — Registry API, signature verification, provenance storage, RBAC. - Team: Orchestrator Service Guild - - Path: `src/StellaOps.Orchestrator/TASKS.md` + - Path: `src/Orchestrator/StellaOps.Orchestrator/TASKS.md` 1. [TODO] ORCH-SVC-41-101 — Register `pack-run` job type, integrate logs/artifacts, expose metadata. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-PACKS-41-001 — Define CLI/pack scopes, discovery metadata, offline defaults. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1735,25 +1735,25 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEPLOY-CLI-41-001 — Package CLI release artifacts and distribution docs. - **Sprint 42** · CLI Parity & Task Packs Phase 2 - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-PARITY-41-001/002, CLI-PACKS-42-001 — Close remaining parity gaps and ship Task Pack CLI commands. - Team: Task Runner Guild - - Path: `src/StellaOps.TaskRunner/TASKS.md` + - Path: `src/TaskRunner/StellaOps.TaskRunner/TASKS.md` 1. [TODO] TASKRUN-42-001 — Loops, conditionals, simulation mode, policy gates. - Team: Packs Registry Guild - - Path: `src/StellaOps.PacksRegistry/TASKS.md` + - Path: `src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md` 1. [TODO] PACKS-REG-42-001 — Version lifecycle, allowlists, provenance export, signature rotation. - Team: Orchestrator Service Guild - - Path: `src/StellaOps.Orchestrator/TASKS.md` + - Path: `src/Orchestrator/StellaOps.Orchestrator/TASKS.md` 1. [TODO] ORCH-SVC-42-101 — Stream pack run logs, expose manifolds, enforce quotas. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-ENGINE-42-201 — Stable rationale IDs/APIs for CLI `--explain` and packs. - Team: Findings Ledger Guild - - Path: `src/StellaOps.Findings.Ledger/TASKS.md` + - Path: `src/Findings/StellaOps.Findings.Ledger/TASKS.md` 1. [TODO] LEDGER-PACKS-42-001 — Snapshot/time-travel APIs for pack simulation. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-CLI-42-001 — Copy CLI buttons, parity hints, pack browser. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1766,22 +1766,22 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] DEPLOY-PACKS-42-001 — Deploy packs registry/task runner with secrets templates. - **Sprint 43** · CLI Parity & Task Packs Phase 3 - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-PACKS-43-001 — Advanced pack features (approvals pause/resume, secrets, localization, man pages). - Team: Task Runner Guild - - Path: `src/StellaOps.TaskRunner/TASKS.md` + - Path: `src/TaskRunner/StellaOps.TaskRunner/TASKS.md` 1. [TODO] TASKRUN-43-001 — Approvals workflow, notifications integration, chaos resilience. - Team: Packs Registry Guild - - Path: `src/StellaOps.PacksRegistry/TASKS.md` + - Path: `src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md` 1. [TODO] PACKS-REG-43-001 — Mirroring, signing policies, attestation integration. - Team: Exporter Service Guild - - Path: `src/StellaOps.ExportCenter/TASKS.md` + - Path: `src/ExportCenter/StellaOps.ExportCenter/TASKS.md` 1. [TODO] EXPORT-SVC-35-005, EXPORT-SVC-37-001 — Include pack run manifests in exports. - Team: Notifications Service Guild - - Path: `src/StellaOps.Notifier/TASKS.md` + - Path: `src/Notifier/StellaOps.Notifier/TASKS.md` 1. [TODO] NOTIFY-SVC-40-001 — Emit pack run notifications. - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-PACKS-43-001 — Enforce pack signing/approval policies, CLI CI scopes. - Team: Docs Guild - Path: `docs/TASKS.md` @@ -1797,26 +1797,26 @@ Generated from SPRINTS.md and module TASKS.md files on 2025-10-19. Waves cluster 1. [TODO] CLI-PACKS-43-002 — Bundle CLI, pack samples, registry mirror into Offline Kit with manifests. - **Sprint 47-49** · Authority-Backed Scopes & Tenancy - Team: Authority Core & Security Guild - - Path: `src/StellaOps.Authority/TASKS.md` + - Path: `src/Authority/StellaOps.Authority/TASKS.md` 1. [TODO] AUTH-TEN-47-001 — JWT/OIDC alignment, scope grammar, tenant/project claims. 2. [TODO] AUTH-TEN-49-001 — Service accounts, delegation, quotas, audit streaming. - Team: BE-Base Platform Guild - - Path: `src/StellaOps.Web/TASKS.md` + - Path: `src/Web/StellaOps.Web/TASKS.md` 1. [TODO] WEB-TEN-47-001/48-001/49-001 — Middleware enforcement, tenant context propagation, ABAC overlay, audit API. - Team: DevEx/CLI Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CLI-TEN-47-001/49-001 — Auth CLI flows, tenant switching, service tokens, delegation. - Team: Console Guild - - Path: `src/StellaOps.Cli/TASKS.md` + - Path: `src/Cli/StellaOps.Cli/TASKS.md` 1. [TODO] CONSOLE-TEN-48-001/49-001 — Tenant switcher, admin screens, audit viewer. - Team: Policy Guild - - Path: `src/StellaOps.Policy.Engine/TASKS.md` + - Path: `src/Policy/StellaOps.Policy.Engine/TASKS.md` 1. [TODO] POLICY-TEN-48-001 — Tenant-aware policy storage, RLS, rationale IDs. - Team: Findings Ledger Guild - - Path: `src/StellaOps.Findings.Ledger/TASKS.md` + - Path: `src/Findings/StellaOps.Findings.Ledger/TASKS.md` 1. [TODO] LEDGER-TEN-48-001 — Tenant partitioning and RLS. - Team: Exporter/Notifications/Orchestrator/Task Runner/Concelier/Excititor Guilds - - Paths: `src/StellaOps.ExportCenter/TASKS.md`, `src/StellaOps.Notifier/TASKS.md`, `src/StellaOps.Orchestrator/TASKS.md`, `src/StellaOps.TaskRunner/TASKS.md`, `src/StellaOps.Concelier.Core/TASKS.md`, `src/StellaOps.Excititor.Core/TASKS.md` + - Paths: `src/ExportCenter/StellaOps.ExportCenter/TASKS.md`, `src/Notifier/StellaOps.Notifier/TASKS.md`, `src/Orchestrator/StellaOps.Orchestrator/TASKS.md`, `src/TaskRunner/StellaOps.TaskRunner/TASKS.md`, `src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md`, `src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md` 1. [TODO] Export/Notify tasks (EXPORT-TEN-48-001, NOTIFY-TEN-48-001) — Tenant stamping. 2. [TODO] ORCH-TEN-48-001, TASKRUN-TEN-48-001 — Job context enforcement. 3. [TODO] CONCELIER/EXCITITOR-TEN-48-001 — Tenant-aware linking with aggregation-only guarantee. diff --git a/docs/implplan/SPRINTS.md b/docs/implplan/SPRINTS.md new file mode 100644 index 00000000..80fbf844 --- /dev/null +++ b/docs/implplan/SPRINTS.md @@ -0,0 +1,1096 @@ +This file describe implementation of Stella Ops (docs/README.md). Implementation must respect rules from AGENTS.md (read if you have not). + +| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | +| --- | --- | --- | --- | --- | --- | --- | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | DOING (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-201 | Planner loop (cron/event triggers, leases, fairness). | +| Sprint 17 | Symbol Intelligence & Forensics | ops/offline-kit/TASKS.md | BLOCKED (2025-10-26) | Offline Kit Guild, DevOps Guild | DEVOPS-OFFLINE-17-004 | Run mirror_debug_store.py once release artefacts exist and archive verification evidence with the Offline Kit. | +| Sprint 17 | Symbol Intelligence & Forensics | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild | DEVOPS-REL-17-004 | Ensure release workflow publishes `out/release/debug` (build-id tree + manifest) and fails when symbols are missing. | +> DOCS-AOC-19-004: Architecture overview & policy-engine docs refreshed 2025-10-26 — reuse new AOC boundary diagram + metrics guidance. +> DOCS-AOC-19-005: Link to the new AOC reference and architecture overview; include exit code table sourced from those docs. +| Sprint 19 | Aggregation-Only Contract Enforcement | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild, Platform Guild | DEVOPS-AOC-19-001 | Integrate AOC analyzer/guard enforcement into CI pipelines. | +| Sprint 19 | Aggregation-Only Contract Enforcement | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild | DEVOPS-AOC-19-002 | Add CI stage running `stella aoc verify` against seeded snapshots. | +| Sprint 19 | Aggregation-Only Contract Enforcement | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild, QA Guild | DEVOPS-AOC-19-003 | Enforce guard coverage thresholds and export metrics to dashboards. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Cli/StellaOps.Cli/TASKS.md | DOING (2025-10-27) | DevEx/CLI Guild | CLI-AOC-19-001 | Implement `stella sources ingest --dry-run` command. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AOC-19-002 | Implement `stella aoc verify` command with exit codes. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Cli/StellaOps.Cli/TASKS.md | TODO | Docs/CLI Guild | CLI-AOC-19-003 | Update CLI reference and quickstart docs for new AOC commands. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-001 | Implement AOC repository guard rejecting forbidden fields. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-002 | Deliver deterministic linkset extraction for advisories. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-003 | Enforce idempotent append-only upsert with supersedes pointers. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-004 | Remove ingestion normalization; defer derived logic to Policy Engine. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-013 | Extend smoke coverage to validate tenant-scoped Authority tokens and cross-tenant rejection. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-STORE-AOC-19-001 | Add Mongo schema validator for `advisory_raw`. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-STORE-AOC-19-002 | Create idempotency unique index backed by migration scripts. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-STORE-AOC-19-003 | Deliver append-only migration/backfill plan with supersedes chaining. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild, DevOps Guild | CONCELIER-STORE-AOC-19-004 | Document validator deployment steps for online/offline clusters. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild, Observability Guild | CONCELIER-WEB-AOC-19-002 | Emit AOC observability metrics, traces, and structured logs. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | QA Guild | CONCELIER-WEB-AOC-19-003 | Add schema/guard unit tests covering AOC error codes. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild, QA Guild | CONCELIER-WEB-AOC-19-004 | Build integration suite validating deterministic ingest under load. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-001 | Introduce VEX repository guard enforcing AOC invariants. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-002 | Build deterministic VEX linkset extraction. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-003 | Enforce append-only idempotent VEX raw upserts. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-004 | Remove ingestion consensus logic; rely on Policy Engine. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-013 | Update smoke suites to enforce tenant-scoped Authority tokens and cross-tenant VEX rejection. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-001 | Add Mongo schema validator for `vex_raw`. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-002 | Create idempotency unique index for VEX raw documents. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-003 | Deliver append-only migration/backfill for VEX raw collections. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild, DevOps Guild | EXCITITOR-STORE-AOC-19-004 | Document validator deployment for Excititor clusters/offline kit. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-AOC-19-001 | Implement raw VEX ingestion and AOC verifier endpoints. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild, Observability Guild | EXCITITOR-WEB-AOC-19-002 | Emit AOC metrics/traces/logging for Excititor ingestion. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | QA Guild | EXCITITOR-WEB-AOC-19-003 | Add AOC guard test harness for VEX schemas. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild, QA Guild | EXCITITOR-WEB-AOC-19-004 | Validate large VEX ingest runs and CLI verification parity. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-WORKER-AOC-19-001 | Rewire worker to persist raw VEX docs with guard enforcement. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-WORKER-AOC-19-002 | Enforce signature/checksum verification prior to raw writes. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-AOC-19-001 | Add lint preventing ingestion modules from referencing Policy-only helpers. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | TODO | Policy Guild, Security Guild | POLICY-AOC-19-002 | Enforce Policy-only writes to `effective_finding_*` collections. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-AOC-19-003 | Update Policy readers to consume only raw document fields. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | TODO | Policy Guild, QA Guild | POLICY-AOC-19-004 | Add determinism tests for raw-driven policy recomputation. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-AOC-19-001 | Add Sources dashboard tiles surfacing AOC status and violations. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-AOC-19-002 | Build violation drill-down view for offending documents. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-AOC-19-003 | Wire "Verify last 24h" action and CLI parity messaging. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Web/StellaOps.Web/TASKS.md | DOING (2025-10-26) | BE-Base Platform Guild | WEB-AOC-19-001 | Provide shared AOC forbidden key set and guard middleware. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-AOC-19-002 | Ship provenance builder and signature helpers for ingestion services. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, QA Guild | WEB-AOC-19-003 | Author analyzer + shared test fixtures for guard compliance. | +| Sprint 20 | Policy Engine v2 | ops/devops/TASKS.md | BLOCKED (waiting on POLICY-ENGINE-20-006) | DevOps Guild | DEVOPS-POLICY-20-002 | Run `stella policy simulate` CI stage against golden SBOMs. | +| Sprint 20 | Policy Engine v2 | src/Bench/StellaOps.Bench/TASKS.md | BLOCKED (waiting on SCHED-WORKER-20-302) | Bench Guild, Scheduler Guild | BENCH-POLICY-20-002 | Add incremental run benchmark capturing delta SLA compliance. | +| Sprint 20 | Policy Engine v2 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild, Docs Guild | CLI-POLICY-20-003 | Extend `stella findings` commands with policy filters and explain view. | +> 2025-10-27: Backend helpers drafted but command integration/tests pending; task reset to TODO awaiting follow-up. +| Sprint 20 | Policy Engine v2 | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-POLICY-20-002 | Strengthen linkset builders with equivalence tables + range parsing. | +| Sprint 20 | Policy Engine v2 | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-POLICY-20-003 | Add advisory selection cursors + change-stream checkpoints for policy runs. | +| Sprint 20 | Policy Engine v2 | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-POLICY-20-001 | Provide advisory selection endpoints for policy engine (batch PURL/ID). | +| Sprint 20 | Policy Engine v2 | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-POLICY-20-002 | Enhance VEX linkset scope + version resolution for policy accuracy. | +| Sprint 20 | Policy Engine v2 | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-POLICY-20-003 | Introduce VEX selection cursors + change-stream checkpoints. | +| Sprint 20 | Policy Engine v2 | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-POLICY-20-001 | Ship VEX selection APIs aligned with policy join requirements. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | BLOCKED (2025-10-26) | Policy Guild | POLICY-ENGINE-20-002 | Implement deterministic rule evaluator with priority/first-match semantics. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Concelier Core, Excititor Core | POLICY-ENGINE-20-003 | Build SBOM↔advisory↔VEX linkset joiners with deterministic batching. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Storage Guild | POLICY-ENGINE-20-004 | Materialize effective findings with append-only history and tenant scoping. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Security Guild | POLICY-ENGINE-20-005 | Enforce determinism guard banning wall-clock, RNG, and network usage. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Scheduler Guild | POLICY-ENGINE-20-006 | Implement incremental orchestrator reacting to change streams. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Observability Guild | POLICY-ENGINE-20-007 | Emit policy metrics, traces, and sampled rule-hit logs. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, QA Guild | POLICY-ENGINE-20-008 | Add unit/property/golden/perf suites verifying determinism + SLA. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Storage Guild | POLICY-ENGINE-20-009 | Define Mongo schemas/indexes + migrations for policies/runs/findings. | +| Sprint 20 | Policy Engine v2 | src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md | TODO | Scheduler Models Guild | SCHED-MODELS-20-002 | Update schema docs with policy run lifecycle samples. | +| Sprint 20 | Policy Engine v2 | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-WEB-20-001 | Expose policy run scheduling APIs with scope enforcement. | +| Sprint 20 | Policy Engine v2 | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-WEB-20-002 | Provide simulation trigger endpoint returning diff metadata. | +| Sprint 20 | Policy Engine v2 | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-20-301 | Schedule policy runs via API with idempotent job tracking. | +| Sprint 20 | Policy Engine v2 | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-20-302 | Implement delta targeting leveraging change streams + policy metadata. | +| Sprint 20 | Policy Engine v2 | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild, Observability Guild | SCHED-WORKER-20-303 | Expose policy scheduling metrics/logs with policy/run identifiers. | +| Sprint 20 | Policy Engine v2 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-POLICY-20-001 | Ship Monaco-based policy editor with inline diagnostics + checklists. | +| Sprint 20 | Policy Engine v2 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-POLICY-20-002 | Build simulation panel with deterministic diff rendering + virtualization. | +| Sprint 20 | Policy Engine v2 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild, Product Ops | UI-POLICY-20-003 | Implement submit/review/approve workflow with RBAC + audit trail. | +| Sprint 20 | Policy Engine v2 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild, Observability Guild | UI-POLICY-20-004 | Add run dashboards (heatmap/VEX wins/suppressions) with export. | +| Sprint 20 | Policy Engine v2 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-POLICY-20-001 | Implement Policy CRUD/compile/run/simulate/findings/explain endpoints. | +| Sprint 20 | Policy Engine v2 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-POLICY-20-002 | Add pagination, filters, deterministic ordering to policy listings. | +| Sprint 20 | Policy Engine v2 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, QA Guild | WEB-POLICY-20-003 | Map engine errors to `ERR_POL_*` responses with contract tests. | +| Sprint 20 | Policy Engine v2 | src/Web/StellaOps.Web/TASKS.md | TODO | Platform Reliability Guild | WEB-POLICY-20-004 | Introduce rate limits/quotas + metrics for simulation endpoints. | +| Sprint 21 | Graph Explorer v1 | src/Bench/StellaOps.Bench/TASKS.md | BLOCKED (2025-10-27) | Bench Guild, Graph Platform Guild | BENCH-GRAPH-21-001 | Graph viewport/path perf harness (50k/100k nodes) measuring Graph API/Indexer latency and cache hit rates. Executed within Sprint 28 Graph program. Upstream Graph API/indexer contracts (`GRAPH-API-28-003`, `GRAPH-INDEX-28-006`) still pending, so benchmarks cannot target stable endpoints yet. | +| Sprint 21 | Graph Explorer v1 | src/Bench/StellaOps.Bench/TASKS.md | BLOCKED (2025-10-27) | Bench Guild, UI Guild | BENCH-GRAPH-21-002 | Headless UI load benchmark for graph canvas interactions (Playwright) tracking render FPS budgets. Executed within Sprint 28 Graph program. Depends on BENCH-GRAPH-21-001 and UI Graph Explorer (`UI-GRAPH-24-001`), both pending. | +| Sprint 21 | Graph Explorer v1 | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | BLOCKED (2025-10-27) | Concelier Core Guild | CONCELIER-GRAPH-21-001 | Enrich SBOM normalization with relationships, scopes, entrypoint annotations for Cartographer. Requires finalized schemas from `CONCELIER-POLICY-20-002` and Cartographer event contract (`CARTO-GRAPH-21-002`). | +| Sprint 21 | Graph Explorer v1 | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | BLOCKED (2025-10-27) | Concelier Core & Scheduler Guilds | CONCELIER-GRAPH-21-002 | Publish SBOM change events with tenant metadata for graph builds. Awaiting projection schema from `CONCELIER-GRAPH-21-001` and Cartographer webhook expectations. | +| Sprint 21 | Graph Explorer v1 | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-001 | Deliver batched VEX/advisory fetch helpers for inspector linkouts. Waiting on linkset enrichment (`EXCITITOR-POLICY-20-002`) and Cartographer inspector contract (`CARTO-GRAPH-21-005`). | +| Sprint 21 | Graph Explorer v1 | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-002 | Enrich overlay metadata with VEX justification summaries for graph overlays. Depends on `EXCITITOR-GRAPH-21-001` and Policy overlay schema (`POLICY-ENGINE-30-001`). | +| Sprint 21 | Graph Explorer v1 | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | BLOCKED (2025-10-27) | Excititor Storage Guild | EXCITITOR-GRAPH-21-005 | Create indexes/materialized views for VEX lookups by PURL/policy. Awaiting access pattern specs from `EXCITITOR-GRAPH-21-001`. | +| Sprint 21 | Graph Explorer v1 | src/SbomService/StellaOps.SbomService/TASKS.md | BLOCKED (2025-10-27) | SBOM Service Guild | SBOM-SERVICE-21-001 | Expose normalized SBOM projection API with relationships, scopes, entrypoints. Waiting on Concelier projection schema (`CONCELIER-GRAPH-21-001`). | +| Sprint 21 | Graph Explorer v1 | src/SbomService/StellaOps.SbomService/TASKS.md | BLOCKED (2025-10-27) | SBOM Service & Scheduler Guilds | SBOM-SERVICE-21-002 | Emit SBOM version change events for Cartographer build queue. Depends on SBOM projection API (`SBOM-SERVICE-21-001`) and Scheduler contracts. | +| Sprint 21 | Graph Explorer v1 | src/SbomService/StellaOps.SbomService/TASKS.md | BLOCKED (2025-10-27) | SBOM Service Guild | SBOM-SERVICE-21-003 | Provide entrypoint management API with tenant overrides. Blocked by SBOM projection API contract. | +| Sprint 21 | Graph Explorer v1 | src/SbomService/StellaOps.SbomService/TASKS.md | BLOCKED (2025-10-27) | SBOM Service & Observability Guilds | SBOM-SERVICE-21-004 | Add metrics/traces/logs for SBOM projections. Requires projection pipeline from `SBOM-SERVICE-21-001`. | +| Sprint 21 | Graph Explorer v1 | src/Web/StellaOps.Web/TASKS.md | BLOCKED (2025-10-27) | BE-Base Platform Guild | WEB-GRAPH-21-001 | Add gateway routes for graph APIs with scope enforcement and streaming. Upstream Graph API (`GRAPH-API-28-003`) and Authority scope work (`AUTH-VULN-24-001`) pending. | +| Sprint 21 | Graph Explorer v1 | src/Web/StellaOps.Web/TASKS.md | BLOCKED (2025-10-27) | BE-Base Platform Guild | WEB-GRAPH-21-002 | Implement bbox/zoom/path validation and pagination for graph endpoints. Depends on core proxy routes. | +| Sprint 21 | Graph Explorer v1 | src/Web/StellaOps.Web/TASKS.md | BLOCKED (2025-10-27) | BE-Base Platform & QA Guilds | WEB-GRAPH-21-003 | Map graph errors to `ERR_Graph_*` and support export streaming. Requires `WEB-GRAPH-21-001`. | +| Sprint 21 | Graph Explorer v1 | src/Web/StellaOps.Web/TASKS.md | BLOCKED (2025-10-27) | BE-Base & Policy Guilds | WEB-GRAPH-21-004 | Wire Policy Engine simulation overlays into graph responses. Waiting on Graph routes and Policy overlay schema (`POLICY-ENGINE-30-002`). | +| Sprint 22 | Link-Not-Merge v1 | docs/TASKS.md | BLOCKED (2025-10-27) | Docs Guild | DOCS-LNM-22-001 | Publish advisories aggregation doc with observation/linkset philosophy. | +> Blocked by `CONCELIER-LNM-21-001..003`; draft doc exists but final alignment waits for schema/API delivery. +| Sprint 22 | Link-Not-Merge v1 | docs/TASKS.md | BLOCKED (2025-10-27) | Docs Guild | DOCS-LNM-22-002 | Publish VEX aggregation doc describing observation/linkset flow. | +> Blocked by `EXCITITOR-LNM-21-001..003`; draft doc staged pending observation/linkset implementation. +| Sprint 22 | Link-Not-Merge v1 | docs/TASKS.md | BLOCKED (2025-10-27) | Docs Guild | DOCS-LNM-22-005 | Document UI evidence panel with conflict badges/AOC drill-down. | +> Blocked by `UI-LNM-22-001..003`; need shipping UI to capture screenshots and finalize guidance. +| Sprint 22 | Link-Not-Merge v1 | ops/devops/TASKS.md | BLOCKED (2025-10-27) | DevOps Guild | DEVOPS-LNM-22-001 | Execute advisory observation/linkset migration/backfill and automation. | +| Sprint 22 | Link-Not-Merge v1 | ops/devops/TASKS.md | BLOCKED (2025-10-27) | DevOps Guild | DEVOPS-LNM-22-002 | Run VEX observation/linkset migration/backfill with monitoring/runbook. | +| Sprint 22 | Link-Not-Merge v1 | samples/TASKS.md | BLOCKED (2025-10-27) | Samples Guild | SAMPLES-LNM-22-001 | Add advisory observation/linkset fixtures with conflicts. | +| Sprint 22 | Link-Not-Merge v1 | samples/TASKS.md | BLOCKED (2025-10-27) | Samples Guild | SAMPLES-LNM-22-002 | Add VEX observation/linkset fixtures with status disagreements. | +| Sprint 22 | Link-Not-Merge v1 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-AOC-22-001 | Roll out new advisory/vex ingest/read scopes. | +| Sprint 22 | Link-Not-Merge v1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-LNM-22-001 | Implement advisory observation/linkset CLI commands with JSON/OSV export. | +| Sprint 22 | Link-Not-Merge v1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-LNM-22-002 | Implement VEX observation/linkset CLI commands. | +| Sprint 22 | Link-Not-Merge v1 | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-LNM-21-001 | Define immutable advisory observation schema with AOC metadata. | +| Sprint 22 | Link-Not-Merge v1 | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild, Data Science Guild | CONCELIER-LNM-21-002 | Implement advisory linkset builder with correlation signals/conflicts. | +| Sprint 22 | Link-Not-Merge v1 | src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md | TODO | BE-Merge | MERGE-LNM-21-002 | Deprecate merge service and enforce observation-only pipeline. | +| Sprint 22 | Link-Not-Merge v1 | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage Guild | CONCELIER-LNM-21-101 | Provision observations/linksets collections and indexes. | +| Sprint 22 | Link-Not-Merge v1 | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | TODO | Concelier Storage & DevOps Guilds | CONCELIER-LNM-21-102 | Backfill legacy merged advisories into observations/linksets with rollback tooling. | +| Sprint 22 | Link-Not-Merge v1 | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-LNM-21-201 | Ship advisory observation read APIs with pagination/RBAC. | +| Sprint 22 | Link-Not-Merge v1 | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-LNM-21-202 | Implement advisory linkset read/export/evidence endpoints mapped to `ERR_AGG_*`. | +| Sprint 22 | Link-Not-Merge v1 | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-LNM-21-001 | Define immutable VEX observation model. | +| Sprint 22 | Link-Not-Merge v1 | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-LNM-21-002 | Build VEX linkset correlator with confidence/conflict recording. | +| Sprint 22 | Link-Not-Merge v1 | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage Guild | EXCITITOR-LNM-21-101 | Provision VEX observation/linkset collections and indexes. | +| Sprint 22 | Link-Not-Merge v1 | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | TODO | Excititor Storage & DevOps Guilds | EXCITITOR-LNM-21-102 | Backfill legacy VEX data into observations/linksets with rollback scripts. | +| Sprint 22 | Link-Not-Merge v1 | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-201 | Expose VEX observation APIs with filters/pagination and RBAC. | +| Sprint 22 | Link-Not-Merge v1 | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-202 | Implement VEX linkset endpoints + exports with evidence payloads. | +| Sprint 22 | Link-Not-Merge v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-40-001 | Update severity selection to handle multiple source severities per linkset. | +| Sprint 22 | Link-Not-Merge v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Excititor Guild | POLICY-ENGINE-40-002 | Integrate VEX linkset conflicts into effective findings/explain traces. | +| Sprint 22 | Link-Not-Merge v1 | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | TODO | Scanner WebService Guild | SCANNER-LNM-21-001 | Update report/runtime payloads to consume linksets and surface source evidence. | +| Sprint 22 | Link-Not-Merge v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-LNM-22-001 | Deliver Evidence panel with policy banner and source observations. | +| Sprint 22 | Link-Not-Merge v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-LNM-22-003 | Add VEX evidence tab with conflict indicators and exports. | +| Sprint 22 | Link-Not-Merge v1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-LNM-21-001 | Surface advisory observation/linkset APIs through gateway with RBAC. | +| Sprint 22 | Link-Not-Merge v1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-LNM-21-002 | Expose VEX observation/linkset endpoints with export handling. | +| Sprint 23 | StellaOps Console | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-23-015 | Produce `/docs/architecture/console.md` describing packages, data flow, SSE design. | +| Sprint 23 | StellaOps Console | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-23-017 | Create `/docs/examples/ui-tours.md` walkthroughs with annotated screenshots/GIFs. | +| Sprint 23 | StellaOps Console | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-23-018 | Execute console security checklist and record Security Guild sign-off. | +| Sprint 23 | StellaOps Console | ops/deployment/TASKS.md | TODO | Deployment Guild | DOWNLOADS-CONSOLE-23-001 | Maintain signed downloads manifest pipeline feeding Console + docs parity checks. | +| Sprint 23 | StellaOps Console | ops/devops/TASKS.md | BLOCKED (2025-10-26) | DevOps Guild | DEVOPS-CONSOLE-23-001 | Stand up console CI pipeline (pnpm cache, lint, tests, Playwright, Lighthouse, offline runners). | +| Sprint 23 | StellaOps Console | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CONSOLE-23-002 | Deliver `stella-console` container + Helm overlays with SBOM/provenance and offline packaging. | +| Sprint 23 | StellaOps Console | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-CONSOLE-23-001 | Register Console OIDC client with PKCE, scopes, short-lived tokens, and offline defaults. | +| Sprint 23 | StellaOps Console | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-CONSOLE-23-002 | Provide tenant catalog/user profile endpoints with audit logging and fresh-auth requirements. | +| Sprint 23 | StellaOps Console | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Docs Guild | AUTH-CONSOLE-23-003 | Update security docs/sample configs for Console flows, CSP, and session policies. | +| Sprint 23 | StellaOps Console | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-001 | Surface `/console/advisories` aggregation views with per-source metadata and filters. | +| Sprint 23 | StellaOps Console | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-002 | Provide advisory delta metrics API for dashboard + live status ticker. | +| Sprint 23 | StellaOps Console | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-003 | Add search helpers for CVE/GHSA/PURL lookups returning evidence fragments. | +| Sprint 23 | StellaOps Console | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-001 | Expose `/console/vex` aggregation endpoints with precedence and provenance. | +| Sprint 23 | StellaOps Console | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-002 | Publish VEX override delta metrics feeding dashboard/status ticker. | +| Sprint 23 | StellaOps Console | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-003 | Implement VEX search helpers for global search and explain drill-downs. | +| Sprint 23 | StellaOps Console | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Scheduler Guild | EXPORT-CONSOLE-23-001 | Implement evidence bundle/export generator with signed manifests and telemetry. | +| Sprint 23 | StellaOps Console | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-CONSOLE-23-001 | Optimize findings/explain APIs for Console filters, aggregation hints, and provenance traces. | +| Sprint 23 | StellaOps Console | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild, Product Ops | POLICY-CONSOLE-23-002 | Expose simulation diff + approval state metadata for policy workspace scenarios. | +| Sprint 23 | StellaOps Console | src/SbomService/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-CONSOLE-23-001 | Deliver Console SBOM catalog API with filters, evaluation metadata, and raw projections. | +| Sprint 23 | StellaOps Console | src/SbomService/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-CONSOLE-23-002 | Provide component lookup/neighborhood endpoints for global search and overlays. | +| Sprint 23 | StellaOps Console | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-CONSOLE-23-001 | Extend runs API with SSE progress, queue lag summaries, RBAC actions, and history pagination. | +| Sprint 23 | StellaOps Console | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-CONSOLE-23-201 | Stream run progress events with heartbeat/dedupe for Console SSE consumers. | +| Sprint 23 | StellaOps Console | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-CONSOLE-23-202 | Coordinate evidence bundle job queueing, status tracking, cancellation, and retention. | +| Sprint 23 | StellaOps Console | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONSOLE-23-001 | Ship `/console/dashboard` + `/console/filters` aggregates with tenant scoping and deterministic totals. | +| Sprint 23 | StellaOps Console | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, Scheduler Guild | WEB-CONSOLE-23-002 | Provide `/console/status` polling and `/console/runs/{id}/stream` SSE proxy with heartbeat/backoff. | +| Sprint 23 | StellaOps Console | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, Policy Guild | WEB-CONSOLE-23-003 | Expose `/console/exports` orchestration for evidence bundles, CSV/JSON streaming, manifest retrieval. | +| Sprint 23 | StellaOps Console | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONSOLE-23-004 | Implement `/console/search` fan-out router for CVE/GHSA/PURL/SBOM lookups with caching and RBAC. | +| Sprint 23 | StellaOps Console | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, DevOps Guild | WEB-CONSOLE-23-005 | Serve `/console/downloads` manifest with signed image metadata and offline guidance. | +| Sprint 24 | Graph & Vuln Explorer v1 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-VULN-24-001 | Extend scopes (`vuln:read`) and signed permalinks. | +> 2025-10-27: Scope enforcement spike paused; no production change landed. +| Sprint 24 | Graph & Vuln Explorer v1 | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-GRAPH-24-001 | Surface raw advisory observations/linksets for overlay services (no derived aggregation in ingestion). | +> 2025-10-27: Prototype not merged (query layer + CLI consumer under review); resetting to TODO. +| Sprint 24 | Graph & Vuln Explorer v1 | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-GRAPH-24-001 | Surface raw VEX statements/linksets for overlay services (no suppression/precedence logic here). | +| Sprint 24 | Graph & Vuln Explorer v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-60-001 | Maintain Redis effective decision maps for overlays. | +| Sprint 24 | Graph & Vuln Explorer v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-60-002 | Provide simulation bridge for graph what-if APIs. | +| Sprint 24 | Graph & Vuln Explorer v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-GRAPH-24-001 | Build Graph Explorer canvas with virtualization. | +| Sprint 24 | Graph & Vuln Explorer v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-GRAPH-24-002 | Implement overlays (Policy/Evidence/License/Exposure). | +| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-001 | Document exception governance concepts/workflow. | +| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-002 | Document approvals routing / MFA requirements. | +| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-003 | Publish API documentation for exceptions endpoints. | +| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-005 | Document UI exception center + badges. | +| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-006 | Update CLI docs for exception commands. | +| Sprint 25 | Exceptions v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXC-25-007 | Write migration guide for governed exceptions. | +| Sprint 25 | Exceptions v1 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-EXC-25-001 | Introduce exception scopes and routing matrix with MFA. | +| Sprint 25 | Exceptions v1 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Docs Guild | AUTH-EXC-25-002 | Update docs/config samples for exception governance. | +| Sprint 25 | Exceptions v1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-EXC-25-001 | Implement CLI exception workflow commands. | +| Sprint 25 | Exceptions v1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-EXC-25-002 | Extend policy simulate with exception overrides. | +| Sprint 25 | Exceptions v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-70-002 | Create exception collections/bindings storage + repos. | +| Sprint 25 | Exceptions v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-70-003 | Implement Redis exception cache + invalidation. | +| Sprint 25 | Exceptions v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-70-004 | Add metrics/tracing/logging for exception application. | +| Sprint 25 | Exceptions v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-70-005 | Hook workers/events for activation/expiry. | +| Sprint 25 | Exceptions v1 | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-25-101 | Implement exception lifecycle worker for activation/expiry. | +| Sprint 25 | Exceptions v1 | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-25-102 | Add expiring notification job & metrics. | +| Sprint 25 | Exceptions v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-EXC-25-001 | Deliver Exception Center (list/kanban) with workflows. | +| Sprint 25 | Exceptions v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-EXC-25-002 | Build exception creation wizard with scope/timebox guardrails. | +| Sprint 25 | Exceptions v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-EXC-25-003 | Add inline exception drafting/proposing from explorers. | +| Sprint 25 | Exceptions v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-EXC-25-004 | Surface badges/countdowns/explain integration. | +| Sprint 25 | Exceptions v1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXC-25-001 | Ship exception CRUD + workflow API endpoints. | +| Sprint 25 | Exceptions v1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXC-25-002 | Extend policy endpoints to include exception metadata. | +| Sprint 25 | Exceptions v1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXC-25-003 | Emit exception events/notifications with rate limits. | +| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-001 | Document reachability concepts and scoring. | +| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-002 | Document callgraph formats. | +| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-003 | Document runtime facts ingestion. | +| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-004 | Document policy weighting for signals. | +| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-005 | Document UI overlays/timelines. | +| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-006 | Document CLI reachability commands. | +| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-007 | Publish API docs for signals endpoints. | +| Sprint 26 | Reachability v1 | docs/TASKS.md | TODO | Docs Guild | DOCS-SIG-26-008 | Write migration guide for enabling reachability. | +| Sprint 26 | Reachability v1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-SIG-26-001 | Provision pipelines/deployments for Signals service. | +| Sprint 26 | Reachability v1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-SIG-26-002 | Add dashboards/alerts for reachability metrics. | +| Sprint 26 | Reachability v1 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-SIG-26-001 | Add signals scopes/roles + AOC requirements. | +| Sprint 26 | Reachability v1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SIG-26-001 | Implement reachability CLI commands (upload/list/explain). | +| Sprint 26 | Reachability v1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SIG-26-002 | Add reachability overrides to policy simulate. | +| Sprint 26 | Reachability v1 | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-SIG-26-001 | Expose advisory symbol metadata for signals scoring. | +| Sprint 26 | Reachability v1 | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-SIG-26-001 | Surface vendor exploitability hints to Signals. | +| Sprint 26 | Reachability v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-80-001 | Integrate reachability inputs into policy evaluation and explainers. | +| Sprint 26 | Reachability v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-80-002 | Optimize reachability fact retrieval + cache. | +| Sprint 26 | Reachability v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-80-003 | Update SPL compiler for reachability predicates. | +| Sprint 26 | Reachability v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-80-004 | Emit reachability metrics/traces. | +| Sprint 26 | Reachability v1 | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-SPL-24-001 | Extend SPL schema with reachability predicates/actions. | +| Sprint 26 | Reachability v1 | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-26-201 | Implement reachability joiner worker. | +| Sprint 26 | Reachability v1 | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-26-202 | Implement staleness monitor + notifications. | +| Sprint 26 | Reachability v1 | src/Signals/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild, Authority Guild | SIGNALS-24-001 | Stand up Signals API skeleton with RBAC + health checks. Host scaffold ready, waiting on `AUTH-SIG-26-001` to finalize scope issuance and tenant enforcement. | +| Sprint 26 | Reachability v1 | src/Signals/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild | SIGNALS-24-002 | Implement callgraph ingestion/normalization pipeline. Waiting on SIGNALS-24-001 skeleton deployment. | +| Sprint 26 | Reachability v1 | src/Signals/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild | SIGNALS-24-003 | Ingest runtime facts and persist context data with AOC provenance. Depends on SIGNALS-24-001 base host. | +| Sprint 26 | Reachability v1 | src/Signals/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild | SIGNALS-24-004 | Deliver reachability scoring engine writing reachability facts. Blocked until ingestion pipelines unblock. | +| Sprint 26 | Reachability v1 | src/Signals/StellaOps.Signals/TASKS.md | BLOCKED (2025-10-27) | Signals Guild | SIGNALS-24-005 | Implement caches + signals events. Downstream of SIGNALS-24-004. | +| Sprint 26 | Reachability v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SIG-26-001 | Add reachability columns/badges to Vulnerability Explorer. | +| Sprint 26 | Reachability v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SIG-26-002 | Enhance Why drawer with call path/timeline. | +| Sprint 26 | Reachability v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SIG-26-003 | Add reachability overlay/time slider to SBOM Graph. | +| Sprint 26 | Reachability v1 | src/UI/StellaOps.UI/TASKS.md | TODO | UI Guild | UI-SIG-26-004 | Build Reachability Center + missing sensor view. | +| Sprint 26 | Reachability v1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-SIG-26-001 | Expose signals proxy endpoints with pagination and RBAC. | +| Sprint 26 | Reachability v1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-SIG-26-002 | Join reachability data into policy/vuln responses. | +| Sprint 26 | Reachability v1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-SIG-26-003 | Support reachability overrides in simulate APIs. | +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Guilds | DOCS-POLICY-27-001 | Publish `/docs/policy/studio-overview.md` with lifecycle + roles. | +> Blocked by `REGISTRY-API-27-001` and `POLICY-ENGINE-27-001`; revisit once spec and compile enrichments land. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Console Guilds | DOCS-POLICY-27-002 | Write `/docs/policy/authoring.md` with templates/snippets/lint rules. | +> Blocked by `CONSOLE-STUDIO-27-001` pending; waiting on Studio authoring UX. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Registry Guilds | DOCS-POLICY-27-003 | Document `/docs/policy/versioning-and-publishing.md`. | +> Blocked by `REGISTRY-API-27-007` pending publish/sign pipeline. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Scheduler Guilds | DOCS-POLICY-27-004 | Publish `/docs/policy/simulation.md` with quick vs batch guidance. | +> Blocked by `REGISTRY-API-27-005`/`SCHED-WORKER-27-301` pending batch simulation. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Product Ops | DOCS-POLICY-27-005 | Author `/docs/policy/review-and-approval.md`. | +> Blocked by `REGISTRY-API-27-006` review workflow outstanding. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Guilds | DOCS-POLICY-27-006 | Publish `/docs/policy/promotion.md` covering canary + rollback. | +> Blocked by `REGISTRY-API-27-008` promotion APIs not ready. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & DevEx/CLI Guilds | DOCS-POLICY-27-007 | Update `/docs/policy/cli.md` with new commands + JSON schemas. | +> Blocked by `CLI-POLICY-27-001..004` CLI commands missing. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Registry Guilds | DOCS-POLICY-27-008 | Publish `/docs/policy/api.md` aligning with Registry OpenAPI. | +> Blocked by Registry OpenAPI (`REGISTRY-API-27-001..008`) incomplete. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Security Guilds | DOCS-POLICY-27-009 | Create `/docs/security/policy-attestations.md`. | +> Blocked by `AUTH-POLICY-27-002` signing integration pending. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Architecture Guilds | DOCS-POLICY-27-010 | Write `/docs/architecture/policy-registry.md`. | +> Blocked by `REGISTRY-API-27-001` & `SCHED-WORKER-27-301` not delivered. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Observability Guilds | DOCS-POLICY-27-011 | Publish `/docs/observability/policy-telemetry.md`. | +> Blocked by `DEVOPS-POLICY-27-004` observability work outstanding. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Ops Guilds | DOCS-POLICY-27-012 | Write `/docs/runbooks/policy-incident.md`. | +> Blocked by `DEPLOY-POLICY-27-002` ops playbooks pending. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Guilds | DOCS-POLICY-27-013 | Update `/docs/examples/policy-templates.md`. | +> Blocked by `CONSOLE-STUDIO-27-001`/`REGISTRY-API-27-002` templates missing. +| Sprint 27 | Policy Studio | docs/TASKS.md | BLOCKED (2025-10-27) | Docs & Policy Registry Guilds | DOCS-POLICY-27-014 | Refresh `/docs/aoc/aoc-guardrails.md` with Studio guardrails. | +> Blocked by `REGISTRY-API-27-003` & `WEB-POLICY-27-001` guardrails not implemented. +| Sprint 27 | Policy Studio | ops/deployment/TASKS.md | TODO | Deployment & Policy Registry Guilds | DEPLOY-POLICY-27-001 | Create Helm/Compose overlays for Policy Registry + workers with signing config. | +| Sprint 27 | Policy Studio | ops/deployment/TASKS.md | TODO | Deployment & Policy Guilds | DEPLOY-POLICY-27-002 | Document policy rollout/rollback playbooks in runbook. | +| Sprint 27 | Policy Studio | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-POLICY-27-001 | Add CI stage for policy lint/compile/test + secret scanning and artifacts. | +| Sprint 27 | Policy Studio | ops/devops/TASKS.md | TODO | DevOps & Policy Registry Guilds | DEVOPS-POLICY-27-002 | Provide optional batch simulation CI job with drift gating + PR comment. | +| Sprint 27 | Policy Studio | ops/devops/TASKS.md | TODO | DevOps & Security Guilds | DEVOPS-POLICY-27-003 | Manage signing keys + attestation verification in pipelines. | +| Sprint 27 | Policy Studio | ops/devops/TASKS.md | TODO | DevOps & Observability Guilds | DEVOPS-POLICY-27-004 | Build dashboards/alerts for compile latency, queue depth, approvals, promotions. | +| Sprint 27 | Policy Studio | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core Guild | AUTH-POLICY-27-001 | Define Policy Studio roles/scopes for author/review/approve/operate/audit. | +| Sprint 27 | Policy Studio | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guilds | AUTH-POLICY-27-002 | Wire signing service + fresh-auth enforcement for publish/promote. | +| Sprint 27 | Policy Studio | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Docs Guild | AUTH-POLICY-27-003 | Update authority configuration/docs for Policy Studio roles & signing. | +| Sprint 27 | Policy Studio | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-POLICY-27-001 | Implement policy workspace CLI commands (init, lint, compile, test). | +| Sprint 27 | Policy Studio | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-POLICY-27-002 | Add version bump, submit, review/approve CLI workflow commands. | +| Sprint 27 | Policy Studio | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-POLICY-27-003 | Extend simulate command for quick/batch runs, manifests, CI reports. | +| Sprint 27 | Policy Studio | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-POLICY-27-004 | Implement publish/promote/rollback/sign CLI lifecycle commands. | +| Sprint 27 | Policy Studio | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI & Docs Guilds | CLI-POLICY-27-005 | Update CLI docs/reference for Policy Studio commands and schemas. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-27-001 | Return rule coverage, symbol table, docs, hashes from compile endpoint. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-27-002 | Enhance simulate outputs with heatmap, explain traces, delta summaries. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-27-003 | Enforce complexity/time limits with diagnostics. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-27-004 | Update tests/fixtures for coverage, symbol table, explain, complexity. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-001 | Define Policy Registry OpenAPI spec for workspaces, versions, reviews, simulations, promotions, attestations. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-002 | Implement workspace storage + CRUD with tenant retention policies. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-003 | Integrate compile pipeline storing diagnostics, symbol tables, complexity metrics. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-004 | Deliver quick simulation API with limits and deterministic outputs. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry & Scheduler Guilds | REGISTRY-API-27-005 | Build batch simulation orchestration, reduction, and evidence bundle storage. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-006 | Implement review workflow with comments, required approvers, webhooks. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry & Security Guilds | REGISTRY-API-27-007 | Ship publish/sign pipeline with attestations, immutable versions. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry Guild | REGISTRY-API-27-008 | Implement promotion/canary bindings per tenant/environment with rollback. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry & Observability Guilds | REGISTRY-API-27-009 | Instrument metrics/logs/traces for compile, simulation, approval latency. | +| Sprint 27 | Policy Studio | src/Policy/StellaOps.Policy.Registry/TASKS.md | TODO | Policy Registry & QA Guilds | REGISTRY-API-27-010 | Build unit/integration/load test suites and seeded fixtures. | +| Sprint 27 | Policy Studio | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-CONSOLE-27-001 | Provide policy simulation orchestration endpoints with SSE + RBAC. | +| Sprint 27 | Policy Studio | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService & Observability Guilds | SCHED-CONSOLE-27-002 | Emit policy simulation telemetry endpoints/metrics + webhooks. | +| Sprint 27 | Policy Studio | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-27-301 | Implement batch simulation worker sharding SBOMs with retries/backoff. | +| Sprint 27 | Policy Studio | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-27-302 | Build reducer job aggregating shard outputs into manifests with checksums. | +| Sprint 27 | Policy Studio | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker & Security Guilds | SCHED-WORKER-27-303 | Enforce tenant isolation/attestation integration and secret scanning for jobs. | +| Sprint 27 | Policy Studio | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-POLICY-27-001 | Proxy Policy Registry APIs with tenant scoping, RBAC, evidence streaming. | +| Sprint 27 | Policy Studio | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-POLICY-27-002 | Implement review lifecycle routes with audit logs and webhooks. | +| Sprint 27 | Policy Studio | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Scheduler Guilds | WEB-POLICY-27-003 | Expose quick/batch simulation endpoints with SSE progress + manifests. | +| Sprint 27 | Policy Studio | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Security Guilds | WEB-POLICY-27-004 | Add publish/promote/rollback endpoints with canary + signing enforcement. | +| Sprint 27 | Policy Studio | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Observability Guilds | WEB-POLICY-27-005 | Instrument Policy Studio metrics/logs for dashboards. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & SBOM Guilds | DOCS-GRAPH-28-001 | Publish `/docs/sbom/graph-explorer-overview.md`. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Console Guilds | DOCS-GRAPH-28-002 | Write `/docs/sbom/graph-using-the-console.md` with walkthrough + accessibility tips. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Graph API Guilds | DOCS-GRAPH-28-003 | Document `/docs/sbom/graph-query-language.md` (JSON schema, cost rules). | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Graph API Guilds | DOCS-GRAPH-28-004 | Publish `/docs/sbom/graph-api.md` endpoints + streaming guidance. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & CLI Guilds | DOCS-GRAPH-28-005 | Produce `/docs/sbom/graph-cli.md` command reference. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Policy Guilds | DOCS-GRAPH-28-006 | Publish `/docs/policy/graph-overlays.md`. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Excitator Guilds | DOCS-GRAPH-28-007 | Document `/docs/vex/graph-integration.md`. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Concelier Guilds | DOCS-GRAPH-28-008 | Document `/docs/advisories/graph-integration.md`. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Architecture Guilds | DOCS-GRAPH-28-009 | Author `/docs/architecture/graph-services.md`. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Observability Guilds | DOCS-GRAPH-28-010 | Publish `/docs/observability/graph-telemetry.md`. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Ops Guilds | DOCS-GRAPH-28-011 | Write `/docs/runbooks/graph-incidents.md`. | +| Sprint 28 | Graph Explorer | docs/TASKS.md | TODO | Docs & Security Guilds | DOCS-GRAPH-28-012 | Create `/docs/security/graph-rbac.md`. | +| Sprint 28 | Graph Explorer | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-GRAPH-28-001 | Provide deployment/offline instructions for Graph Indexer/API, including cache seeds. | +| Sprint 28 | Graph Explorer | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-GRAPH-28-001 | Configure load/perf tests, query budget alerts, and CI smoke for graph APIs. | +| Sprint 28 | Graph Explorer | ops/devops/TASKS.md | TODO | DevOps & Security Guilds | DEVOPS-GRAPH-28-002 | Implement caching/backpressure limits, rate limiting configs, and runaway query kill switches. | +| Sprint 28 | Graph Explorer | ops/devops/TASKS.md | TODO | DevOps & Observability Guilds | DEVOPS-GRAPH-28-003 | Build dashboards/alerts for tile latency, query denials, memory pressure. | +| Sprint 28 | Graph Explorer | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-GRAPH-28-001 | Ship `stella sbom graph` subcommands (search, query, paths, diff, impacted, export) with JSON output + exit codes. | +| Sprint 28 | Graph Explorer | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-GRAPH-28-002 | Add saved query management + deep link helpers to CLI. | +| Sprint 28 | Graph Explorer | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-GRAPH-28-003 | Update CLI docs/examples for Graph Explorer commands. | +| Sprint 28 | Graph Explorer | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-GRAPH-24-101 | Deliver advisory summary API feeding graph tooltips. | +| Sprint 28 | Graph Explorer | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-GRAPH-28-102 | Add batch fetch for advisory observations/linksets keyed by component sets to feed Graph overlay tooltips efficiently. | +| Sprint 28 | Graph Explorer | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | WEB-LNM-21-001 | Provide advisory observation endpoints optimized for graph overlays. | +| Sprint 28 | Graph Explorer | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-GRAPH-24-101 | Provide VEX summary API for Graph Explorer inspector overlays. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-001 | Publish Graph API OpenAPI + JSON schemas for queries/tiles. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-002 | Implement `/graph/search` with caching and RBAC. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-003 | Build query planner + streaming tile pipeline with budgets. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-004 | Deliver `/graph/paths` with depth limits and policy overlay support. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-005 | Implement `/graph/diff` streaming adds/removes/changes for SBOM snapshots. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-006 | Compose advisory/VEX/policy overlays with caching + explain sampling. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API Guild | GRAPH-API-28-007 | Provide export jobs (GraphML/CSV/NDJSON/PNG/SVG) with manifests. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API & Authority Guilds | GRAPH-API-28-008 | Enforce RBAC scopes, tenant headers, audit logging, rate limits. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API & Observability Guilds | GRAPH-API-28-009 | Instrument metrics/logs/traces; publish dashboards. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API & QA Guilds | GRAPH-API-28-010 | Build unit/integration/load tests with synthetic datasets. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Api/TASKS.md | TODO | Graph API & DevOps Guilds | GRAPH-API-28-011 | Ship deployment/offline manifests + gateway integration docs. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-001 | Define node/edge schemas, identity rules, and fixtures for graph ingestion. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-002 | Implement SBOM ingest consumer generating artifact/package/file nodes & edges. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-003 | Serve advisory overlay tiles from Conseiller linksets (no mutation of raw node/edge stores). | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-004 | Integrate VEX statements for `vex_exempts` edges with precedence metadata. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer & Policy Guilds | GRAPH-INDEX-28-005 | Hydrate policy overlay nodes/edges referencing determinations + explains. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-006 | Produce graph snapshots per SBOM with lineage for diff jobs. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer & Observability Guilds | GRAPH-INDEX-28-007 | Run clustering/centrality background jobs and persist cluster ids. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer Guild | GRAPH-INDEX-28-008 | Build incremental/backfill pipeline with change streams, retries, backlog metrics. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer & QA Guilds | GRAPH-INDEX-28-009 | Extend tests/perf fixtures ensuring determinism on large graphs. | +| Sprint 28 | Graph Explorer | src/Graph/StellaOps.Graph.Indexer/TASKS.md | TODO | Graph Indexer & DevOps Guilds | GRAPH-INDEX-28-010 | Provide deployment/offline artifacts and docs for Graph Indexer. | +| Sprint 28 | Graph Explorer | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-30-001 | Finalize graph overlay contract + projection API. | +| Sprint 28 | Graph Explorer | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-30-002 | Implement simulation overlay bridge for Graph Explorer queries. | +| Sprint 28 | Graph Explorer | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy & Scheduler Guilds | POLICY-ENGINE-30-003 | Emit change events for effective findings supporting graph overlays. | +| Sprint 28 | Graph Explorer | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | DOING (2025-10-26) | Scheduler WebService Guild, Scheduler Storage Guild | SCHED-WEB-21-004 | Persist graph jobs + emit completion events/webhook. | +| Sprint 28 | Graph Explorer | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-21-201 | Run graph build worker for SBOM snapshots with retries/backoff. | +| Sprint 28 | Graph Explorer | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-21-202 | Execute overlay refresh worker subscribing to change events. | +| Sprint 28 | Graph Explorer | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker & Observability Guilds | SCHED-WORKER-21-203 | Emit metrics/logs for graph build/overlay jobs. | +| Sprint 28 | Graph Explorer | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-GRAPH-24-001 | Route `/graph/*` APIs through gateway with tenant scoping and RBAC. | +| Sprint 28 | Graph Explorer | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-GRAPH-24-002 | Maintain overlay proxy routes to dedicated services (Policy/Vuln API), ensuring caching + RBAC only. | +| Sprint 28 | Graph Explorer | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Observability Guilds | WEB-GRAPH-24-004 | Add Graph Explorer telemetry endpoints and metrics aggregation. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs Guild | DOCS-VULN-29-001 | Publish `/docs/vuln/explorer-overview.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Console Guilds | DOCS-VULN-29-002 | Write `/docs/vuln/explorer-using-console.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs Guild | DOCS-VULN-29-003 | Author `/docs/vuln/explorer-api.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs Guild | DOCS-VULN-29-004 | Publish `/docs/vuln/explorer-cli.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Ledger Guilds | DOCS-VULN-29-005 | Document Findings Ledger (`/docs/vuln/findings-ledger.md`). | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Policy Guilds | DOCS-VULN-29-006 | Update `/docs/policy/vuln-determinations.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Excititor Guilds | DOCS-VULN-29-007 | Publish `/docs/vex/explorer-integration.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Concelier Guilds | DOCS-VULN-29-008 | Publish `/docs/advisories/explorer-integration.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & SBOM Guilds | DOCS-VULN-29-009 | Publish `/docs/sbom/vuln-resolution.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Observability Guilds | DOCS-VULN-29-010 | Publish `/docs/observability/vuln-telemetry.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Security Guilds | DOCS-VULN-29-011 | Publish `/docs/security/vuln-rbac.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Ops Guilds | DOCS-VULN-29-012 | Publish `/docs/runbooks/vuln-ops.md`. | +| Sprint 29 | Vulnerability Explorer | docs/TASKS.md | TODO | Docs & Deployment Guilds | DOCS-VULN-29-013 | Update `/docs/install/containers.md` with Findings Ledger & Vuln Explorer API. | +| Sprint 29 | Vulnerability Explorer | ops/deployment/TASKS.md | TODO | Deployment & Findings Ledger Guilds | DEPLOY-VULN-29-001 | Provide deployments for Findings Ledger/projector with migrations/backups. | +| Sprint 29 | Vulnerability Explorer | ops/deployment/TASKS.md | TODO | Deployment & Vuln Explorer API Guilds | DEPLOY-VULN-29-002 | Package Vuln Explorer API deployments/health checks/offline kit notes. | +| Sprint 29 | Vulnerability Explorer | ops/devops/TASKS.md | TODO | DevOps & Findings Ledger Guilds | DEVOPS-VULN-29-001 | Set up CI/backups/anchoring monitoring for Findings Ledger. | +| Sprint 29 | Vulnerability Explorer | ops/devops/TASKS.md | TODO | DevOps & Vuln Explorer API Guilds | DEVOPS-VULN-29-002 | Configure Vuln Explorer perf tests, budgets, dashboards, alerts. | +| Sprint 29 | Vulnerability Explorer | ops/devops/TASKS.md | TODO | DevOps & Console Guilds | DEVOPS-VULN-29-003 | Integrate Vuln Explorer telemetry pipeline with privacy safeguards + dashboards. | +| Sprint 29 | Vulnerability Explorer | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-VULN-29-001 | Define Vuln Explorer RBAC/ABAC scopes and issuer metadata. | +| Sprint 29 | Vulnerability Explorer | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-VULN-29-002 | Enforce CSRF, attachment signing, and audit logging referencing ledger hashes. | +| Sprint 29 | Vulnerability Explorer | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Docs Guild | AUTH-VULN-29-003 | Update docs/config samples for Vuln Explorer roles and security posture. | +| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-001 | Implement `stella vuln list` with grouping, filters, JSON/CSV output. | +| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-002 | Implement `stella vuln show` with evidence/policy/path display. | +| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-003 | Add workflow CLI commands (assign/comment/accept-risk/verify-fix/target-fix/reopen). | +| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-004 | Implement `stella vuln simulate` producing diff summaries/Markdown. | +| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-VULN-29-005 | Implement `stella vuln export` and bundle signature verification. | +| Sprint 29 | Vulnerability Explorer | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI & Docs Guilds | CLI-VULN-29-006 | Update CLI docs/examples for Vulnerability Explorer commands. | +| Sprint 29 | Vulnerability Explorer | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-VULN-29-001 | Canonicalize (lossless) advisory identifiers, persist `links[]`, backfill, and expose raw payload snapshots (no merge/derived fields). | +| Sprint 29 | Vulnerability Explorer | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-VULN-29-002 | Provide advisory evidence retrieval endpoint for Vuln Explorer. | +| Sprint 29 | Vulnerability Explorer | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService & Observability Guilds | CONCELIER-VULN-29-004 | Add metrics/logs/events for advisory normalization supporting resolver. | +| Sprint 29 | Vulnerability Explorer | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-001 | Canonicalize (lossless) VEX keys and product scopes with backfill + links (no merge/suppression). | +| Sprint 29 | Vulnerability Explorer | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-002 | Expose VEX evidence retrieval endpoint for Explorer evidence tabs. | +| Sprint 29 | Vulnerability Explorer | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService & Observability Guilds | EXCITITOR-VULN-29-004 | Instrument metrics/logs for VEX normalization and suppression events. | +| Sprint 29 | Vulnerability Explorer | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-29-001 | Design ledger & projection schemas, hashing strategy, and migrations for Findings Ledger. | +| Sprint 29 | Vulnerability Explorer | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-29-002 | Implement ledger write API with hash chaining and Merkle root anchoring job. | +| Sprint 29 | Vulnerability Explorer | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & Scheduler Guilds | LEDGER-29-003 | Build projector worker deriving `findings_projection` with idempotent replay. | +| Sprint 29 | Vulnerability Explorer | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & Policy Guilds | LEDGER-29-004 | Integrate Policy Engine batch evaluation into projector with rationale caching. | +| Sprint 29 | Vulnerability Explorer | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-29-005 | Implement workflow mutation endpoints producing ledger events (assign/comment/accept-risk/etc.). | +| Sprint 29 | Vulnerability Explorer | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & Security Guilds | LEDGER-29-006 | Add attachment encryption, signed URLs, and CSRF protections for workflow endpoints. | +| Sprint 29 | Vulnerability Explorer | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & Observability Guilds | LEDGER-29-007 | Instrument ledger metrics/logs/alerts (write latency, projection lag, anchoring). | +| Sprint 29 | Vulnerability Explorer | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & QA Guilds | LEDGER-29-008 | Provide replay/determinism/load tests for ledger/projector pipelines. | +| Sprint 29 | Vulnerability Explorer | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger & DevOps Guilds | LEDGER-29-009 | Deliver deployment/offline artefacts, backup/restore, Merkle anchoring guidance. | +| Sprint 29 | Vulnerability Explorer | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-29-001 | Implement policy batch evaluation endpoint returning determinations + rationale. | +| Sprint 29 | Vulnerability Explorer | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-29-002 | Provide simulation diff API for Vuln Explorer comparisons. | +| Sprint 29 | Vulnerability Explorer | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-29-003 | Include path/scope annotations in determinations for Explorer. | +| Sprint 29 | Vulnerability Explorer | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild & Observability Guild | POLICY-ENGINE-29-004 | Add telemetry for batch evaluation + simulation jobs. | +| Sprint 29 | Vulnerability Explorer | src/SbomService/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-VULN-29-001 | Emit inventory evidence with scope/runtime/path/safe version hints; publish change events. | +| Sprint 29 | Vulnerability Explorer | src/SbomService/StellaOps.SbomService/TASKS.md | TODO | SBOM Service & Findings Ledger Guilds | SBOM-VULN-29-002 | Provide resolver feed for candidate generation with idempotent delivery. | +| Sprint 29 | Vulnerability Explorer | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService Guild | SCHED-VULN-29-001 | Expose resolver job APIs + status monitoring for Vuln Explorer recomputation. | +| Sprint 29 | Vulnerability Explorer | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | TODO | Scheduler WebService & Observability Guilds | SCHED-VULN-29-002 | Provide projector lag metrics endpoint + webhook notifications. | +| Sprint 29 | Vulnerability Explorer | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-29-001 | Implement resolver worker applying ecosystem version semantics and path scope. | +| Sprint 29 | Vulnerability Explorer | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker Guild | SCHED-WORKER-29-002 | Implement evaluation worker invoking Policy Engine and updating ledger queues. | +| Sprint 29 | Vulnerability Explorer | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | TODO | Scheduler Worker & Observability Guilds | SCHED-WORKER-29-003 | Add monitoring for resolver/evaluation backlog and SLA alerts. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-001 | Publish Vuln Explorer OpenAPI + query schemas. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-002 | Implement list/query endpoints with grouping, paging, cost budgets. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-003 | Implement detail endpoint combining evidence, policy rationale, paths, history. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & Findings Ledger Guilds | VULN-API-29-004 | Expose workflow APIs writing ledger events with validation + idempotency. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & Policy Guilds | VULN-API-29-005 | Implement policy simulation endpoint producing diffs without side effects. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-006 | Integrate Graph Explorer paths metadata and deep-link parameters. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & Security Guilds | VULN-API-29-007 | Enforce RBAC/ABAC, CSRF, attachment security, and audit logging. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API Guild | VULN-API-29-008 | Provide evidence bundle export job with signing + manifests. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & Observability Guilds | VULN-API-29-009 | Instrument API telemetry (latency, workflow counts, exports). | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & QA Guilds | VULN-API-29-010 | Deliver unit/integration/perf/determinism tests for Vuln Explorer API. | +| Sprint 29 | Vulnerability Explorer | src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md | TODO | Vuln Explorer API & DevOps Guilds | VULN-API-29-011 | Ship deployment/offline manifests, health checks, scaling docs. | +| Sprint 29 | Vulnerability Explorer | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-VULN-29-001 | Route `/vuln/*` APIs with tenant RBAC, ABAC, anti-forgery enforcement. | +| Sprint 29 | Vulnerability Explorer | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-VULN-29-002 | Proxy workflow calls to Findings Ledger with correlation IDs + retries. | +| Sprint 29 | Vulnerability Explorer | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-VULN-29-003 | Expose simulation/export orchestration with SSE/progress + signed links. | +| Sprint 29 | Vulnerability Explorer | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform & Observability Guilds | WEB-VULN-29-004 | Aggregate Vuln Explorer telemetry (latency, errors, exports). | +| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-001 | Publish `/docs/vex/consensus-overview.md`. | +| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-002 | Write `/docs/vex/consensus-algorithm.md`. | +| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-003 | Document `/docs/vex/issuer-directory.md`. | +| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-004 | Publish `/docs/vex/consensus-api.md`. | +| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-005 | Create `/docs/vex/consensus-console.md`. | +| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-006 | Add `/docs/policy/vex-trust-model.md`. | +| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-007 | Author `/docs/sbom/vex-mapping.md`. | +| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-008 | Publish `/docs/security/vex-signatures.md`. | +| Sprint 30 | VEX Lens | docs/TASKS.md | TODO | Docs Guild | DOCS-VEX-30-009 | Write `/docs/runbooks/vex-ops.md`. | +| Sprint 30 | VEX Lens | ops/devops/TASKS.md | TODO | DevOps Guild | VEXLENS-30-009, ISSUER-30-005 | Set up CI/perf/telemetry dashboards for VEX Lens and Issuer Directory. | +| Sprint 30 | VEX Lens | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | VEXLENS-30-007 | Implement `stella vex consensus` CLI commands with list/show/simulate/export. | +| Sprint 30 | VEX Lens | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild, VEX Lens Guild | CONCELIER-VEXLENS-30-001 | Guarantee advisory key consistency and provide cross-links for consensus rationale (VEX Lens). | +| Sprint 30 | VEX Lens | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-001 | Ensure VEX evidence includes issuer hints, signatures, product trees for Lens consumption. | +| Sprint 30 | VEX Lens | src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory Guild | ISSUER-30-001 | Implement issuer CRUD API with RBAC and audit logs. | +| Sprint 30 | VEX Lens | src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & Security Guilds | ISSUER-30-002 | Implement key management endpoints with expiry enforcement. | +| Sprint 30 | VEX Lens | src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & Policy Guilds | ISSUER-30-003 | Provide trust weight override APIs with audit trails. | +| Sprint 30 | VEX Lens | src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & VEX Lens Guilds | ISSUER-30-004 | Integrate issuer data into signature verification clients. | +| Sprint 30 | VEX Lens | src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & Observability Guilds | ISSUER-30-005 | Instrument issuer change metrics/logs and dashboards. | +| Sprint 30 | VEX Lens | src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md | TODO | Issuer Directory & DevOps Guilds | ISSUER-30-006 | Provide deployment/backup/offline docs for Issuer Directory. | +| Sprint 30 | VEX Lens | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-30-101 | Surface trust weighting configuration (issuer weights, modifiers, decay) for VEX Lens via Policy Studio/API. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-30-001 | Implement VEX normalization pipeline (CSAF, OpenVEX, CycloneDX) with deterministic outputs. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-30-002 | Build product mapping library aligning CSAF product trees to purls/versions with scope scoring. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Issuer Directory Guilds | VEXLENS-30-003 | Integrate signature verification using issuer keys; annotate evidence. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Policy Guilds | VEXLENS-30-004 | Implement trust weighting functions configurable via policy. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-30-005 | Implement consensus algorithm producing state, confidence, rationale, and quorum. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Findings Ledger Guilds | VEXLENS-30-006 | Materialize consensus projections and change events. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-30-007 | Deliver query/detail/simulation/export APIs with budgets and OpenAPI docs. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Policy Guilds | VEXLENS-30-008 | Integrate consensus signals with Policy Engine and Vuln Explorer. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & Observability Guilds | VEXLENS-30-009 | Instrument metrics/logs/traces; publish dashboards/alerts. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & QA Guilds | VEXLENS-30-010 | Build unit/property/integration/load tests and determinism harness. | +| Sprint 30 | VEX Lens | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens & DevOps Guilds | VEXLENS-30-011 | Provide deployment manifests, scaling guides, offline seeds, runbooks. | +| Sprint 30 | VEX Lens | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild, VEX Lens Guild | WEB-VEX-30-007 | Route `/vex/consensus` APIs via gateway with RBAC/ABAC, caching, and telemetry (proxy-only). | +| Sprint 31 | Advisory AI | docs/TASKS.md | TODO | Docs Guild | DOCS-AIAI-31-001 | Publish Advisory AI overview doc. | +| Sprint 31 | Advisory AI | docs/TASKS.md | TODO | Docs Guild | DOCS-AIAI-31-002 | Publish architecture doc for Advisory AI. | +| Sprint 31 | Advisory AI | docs/TASKS.md | TODO | Docs Guild | DOCS-AIAI-31-003..009 | Complete API/Console/CLI/Policy/Security/SBOM/Runbook docs. | +| Sprint 31 | Advisory AI | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-AIAI-31-001 | Provide Advisory AI deployment/offline guidance. | +| Sprint 31 | Advisory AI | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIAI-31-001 | Provision CI/perf/telemetry for Advisory AI. | +| Sprint 31 | Advisory AI | src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-001 | Implement advisory/VEX retrievers with paragraph anchors and citations. | +| Sprint 31 | Advisory AI | src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-002 | Build SBOM context retriever and blast radius estimator. | +| Sprint 31 | Advisory AI | src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-003 | Deliver deterministic toolset (version checks, dependency analysis, policy lookup). | +| Sprint 31 | Advisory AI | src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-004 | Orchestrator with task templates, tool chaining, caching. | +| Sprint 31 | Advisory AI | src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI & Security Guilds | AIAI-31-005 | Guardrails (redaction, injection defense, output validation). | +| Sprint 31 | Advisory AI | src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI Guild | AIAI-31-006 | Expose REST/batch APIs with RBAC and OpenAPI. | +| Sprint 31 | Advisory AI | src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI & Observability Guilds | AIAI-31-007 | Instrument metrics/logs/traces and dashboards. | +| Sprint 31 | Advisory AI | src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI & DevOps Guilds | AIAI-31-008 | Package inference + deployment manifests/flags. | +| Sprint 31 | Advisory AI | src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md | TODO | Advisory AI & QA Guilds | AIAI-31-009 | Build golden/injection/perf tests ensuring determinism. | +| Sprint 31 | Advisory AI | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-AIAI-31-001 | Define Advisory AI scopes and remote inference toggles. | +| Sprint 31 | Advisory AI | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-AIAI-31-002 | Enforce prompt logging and consent/audit flows. | +| Sprint 31 | Advisory AI | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIAI-31-001 | Implement `stella advise *` CLI commands leveraging Advisory AI orchestration and policy scopes. | +| Sprint 31 | Advisory AI | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-AIAI-31-001 | Expose advisory chunk API with paragraph anchors. | +| Sprint 31 | Advisory AI | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-AIAI-31-001 | Provide VEX chunks with justifications and signatures. | +| Sprint 31 | Advisory AI | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-31-001 | Provide policy knobs for Advisory AI. | +| Sprint 31 | Advisory AI | src/SbomService/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-AIAI-31-001 | Deliver SBOM path/timeline endpoints for Advisory AI. | +| Sprint 31 | Advisory AI | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-AIAI-31-001 | Expose enriched rationale API for conflict explanations. | +| Sprint 31 | Advisory AI | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-AIAI-31-002 | Provide batching/caching hooks for Advisory AI. | +| Sprint 31 | Advisory AI | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-AIAI-31-001 | Route `/advisory/ai/*` APIs with RBAC/telemetry. | +| Sprint 31 | Advisory AI | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-AIAI-31-002 | Provide batch orchestration and retry handling for Advisory AI. | +| Sprint 31 | Advisory AI | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-AIAI-31-003 | Emit Advisory AI gateway telemetry/audit logs. | +| Sprint 32 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-32-001 | Author `/docs/orchestrator/overview.md` covering mission, roles, AOC alignment, and imposed rule reminder. | +| Sprint 32 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-32-002 | Author `/docs/orchestrator/architecture.md` detailing scheduler, DAGs, rate limits, and data model. | +| Sprint 32 | Orchestrator Dashboard | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ORCH-32-001 | Provision staging Postgres/message-bus charts, CI smoke deploy, and baseline dashboards for queue depth and inflight jobs. | +| Sprint 32 | Orchestrator Dashboard | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-ORCH-32-001 | Introduce `orch:read` scope and `Orch.Viewer` role with metadata, discovery docs, and offline defaults. | +| Sprint 32 | Orchestrator Dashboard | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-ORCH-32-001 | Register Concelier sources with orchestrator, publish schedules/rate policies, and seed metadata. | +| Sprint 32 | Orchestrator Dashboard | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-ORCH-32-002 | Embed worker SDK into Concelier ingestion loops emitting progress, heartbeats, and artifact hashes. | +| Sprint 32 | Orchestrator Dashboard | src/Excititor/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-ORCH-32-001 | Adopt worker SDK in Excititor worker with job claim/heartbeat and artifact summary emission. | +| Sprint 32 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-32-001 | Bootstrap Go worker SDK (client config, job claim, acknowledgement flow) with integration tests. | +| Sprint 32 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-32-002 | Add heartbeat/progress helpers, structured logging, and default metrics exporters to Go SDK. | +| Sprint 32 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-32-001 | Bootstrap Python async SDK with job claim/config adapters and sample worker. | +| Sprint 32 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-32-002 | Implement heartbeat/progress helpers and logging/metrics instrumentation for Python workers. | +| Sprint 32 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-001 | Bootstrap orchestrator service with Postgres schema/migrations for sources, runs, jobs, dag_edges, artifacts, quotas, schedules. | +| Sprint 32 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-002 | Implement scheduler DAG planner, dependency resolver, and job state machine for read-only tracking. | +| Sprint 32 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-003 | Expose read-only REST APIs (sources, runs, jobs, DAG) with OpenAPI + validation. | +| Sprint 32 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-004 | Ship WebSocket/SSE live update stream and metrics counters/histograms for job lifecycle. | +| Sprint 32 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-32-005 | Deliver worker claim/heartbeat/progress endpoints capturing artifact metadata and checksums. | +| Sprint 32 | Orchestrator Dashboard | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-32-101 | Define orchestrator `policy_eval` job contract, idempotency keys, and enqueue hooks for change events. | +| Sprint 32 | Orchestrator Dashboard | src/SbomService/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-ORCH-32-001 | Integrate orchestrator job IDs into SBOM ingest/index pipelines with artifact hashing and status updates. | +| Sprint 32 | Orchestrator Dashboard | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-ORCH-32-001 | Expose read-only orchestrator APIs via gateway with tenant scoping, caching headers, and rate limits. | +| Sprint 33 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-33-001 | Author `/docs/orchestrator/api.md` with endpoints, WebSocket events, error codes, and imposed rule reminder. | +| Sprint 33 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-33-002 | Author `/docs/orchestrator/console.md` covering screens, accessibility, and live updates. | +| Sprint 33 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-33-003 | Author `/docs/orchestrator/cli.md` with command reference, examples, and exit codes. | +| Sprint 33 | Governance & Rules | ops/devops/TASKS.md | DOING (2025-10-26) | DevOps Guild, Platform Leads | DEVOPS-RULES-33-001 | Contracts & Rules anchor (gateway proxy-only; Policy Engine overlays/simulations; AOC ingestion canonicalization; Graph Indexer + Graph API as sole platform). | +| Sprint 33 | Orchestrator Dashboard | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ORCH-33-001 | Publish Grafana dashboards for rate-limit/backpressure/error clustering and configure alert rules with runbooks. | +| Sprint 33 | Orchestrator Dashboard | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-ORCH-33-001 | Add `Orch.Operator` role, control action scopes, and enforce reason/ticket field capture. | +| Sprint 33 | Orchestrator Dashboard | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-ORCH-33-001 | Wire orchestrator control hooks (pause, throttle, retry) into Concelier workers with safe checkpoints. | +| Sprint 33 | Orchestrator Dashboard | src/Excititor/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-ORCH-33-001 | Honor orchestrator throttles, classify VEX errors, and emit retry-safe checkpoints in Excititor worker. | +| Sprint 33 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-33-001 | Add artifact upload helpers (object store + checksum) and idempotency guard to Go SDK. | +| Sprint 33 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-33-002 | Implement error classification/retry helper and structured failure report in Go SDK. | +| Sprint 33 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-33-001 | Add artifact publish/idempotency features to Python SDK with object store integration. | +| Sprint 33 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-33-002 | Expose error classification/retry/backoff helpers in Python SDK with structured logging. | +| Sprint 33 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-33-001 | Enable source/job control actions (test, pause/resume, retry/cancel/prioritize) with RBAC and audit hooks. | +| Sprint 33 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-33-002 | Implement adaptive token-bucket rate limiter and concurrency caps reacting to upstream 429/503 signals. | +| Sprint 33 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-33-003 | Add watermark/backfill manager with event-time windows, duplicate suppression, and preview API. | +| Sprint 33 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-33-004 | Deliver dead-letter storage, replay endpoints, and surfaced error classes with remediation hints. | +| Sprint 33 | Orchestrator Dashboard | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-33-101 | Implement orchestrator-driven policy evaluation workers with heartbeats, SLO metrics, and rate limit awareness. | +| Sprint 33 | Orchestrator Dashboard | src/SbomService/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-ORCH-33-001 | Report SBOM ingest backpressure metrics and support orchestrator pause/resume/backfill signals. | +| Sprint 33 | Orchestrator Dashboard | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-ORCH-33-001 | Expose `consensus_compute` orchestrator job type and integrate VEX Lens worker for diff batches. | +| Sprint 33 | Orchestrator Dashboard | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-ORCH-33-001 | Add control endpoints (actions/backfill) and SSE bridging with permission checks and error mapping. | +| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-001 | Author `/docs/orchestrator/run-ledger.md` describing provenance export format and audits. | +| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-002 | Author `/docs/security/secrets-handling.md` covering KMS refs, redaction, and operator hygiene. | +| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-003 | Author `/docs/operations/orchestrator-runbook.md` (failures, backfill guide, circuit breakers). | +| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-004 | Author `/docs/schemas/artifacts.md` detailing artifact kinds, schema versions, hashing, storage layout. | +| Sprint 34 | Orchestrator Dashboard | docs/TASKS.md | TODO | Docs Guild | DOCS-ORCH-34-005 | Author `/docs/slo/orchestrator-slo.md` defining SLOs, burn alerts, and measurement strategy. | +| Sprint 34 | Orchestrator Dashboard | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-ORCH-34-001 | Provide Helm/Compose manifests, scaling defaults, and offline kit instructions for orchestrator service. | +| Sprint 34 | Orchestrator Dashboard | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ORCH-34-001 | Harden production dashboards/alerts, synthetic probes, and incident response playbooks for orchestrator. | +| Sprint 34 | Orchestrator Dashboard | ops/offline-kit/TASKS.md | TODO | Offline Kit Guild | DEVOPS-OFFLINE-34-006 | Bundle orchestrator service, worker SDK samples, and Postgres snapshot into Offline Kit with integrity checks. | +| Sprint 34 | Orchestrator Dashboard | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-ORCH-34-001 | Add `Orch.Admin` role for quotas/backfills, enforce audit reason requirements, update docs and offline defaults. | +| Sprint 34 | Orchestrator Dashboard | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-ORCH-34-001 | Implement backfill wizard and quota management commands with dry-run preview and guardrails. | +| Sprint 34 | Orchestrator Dashboard | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-ORCH-34-001 | Implement orchestrator-driven backfills for advisory sources with idempotent artifact reuse and ledger linkage. | +| Sprint 34 | Orchestrator Dashboard | src/Excititor/StellaOps.Excititor.Worker/TASKS.md | TODO | Excititor Worker Guild | EXCITITOR-ORCH-34-001 | Support orchestrator backfills and circuit breaker resets for Excititor sources with auditing. | +| Sprint 34 | Orchestrator Dashboard | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-34-101 | Link orchestrator run ledger entries into Findings Ledger provenance export and audit queries. | +| Sprint 34 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md | TODO | Worker SDK Guild | WORKER-GO-34-001 | Add backfill range execution, watermark handshake, and artifact dedupe verification to Go SDK. | +| Sprint 34 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md | TODO | Worker SDK Guild | WORKER-PY-34-001 | Add backfill support and deterministic artifact dedupe validation to Python SDK. | +| Sprint 34 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-34-001 | Implement quota management APIs, SLO burn-rate computation, and alert budget tracking. | +| Sprint 34 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-34-002 | Build audit log and immutable run ledger export with signed manifest support. | +| Sprint 34 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-34-003 | Run perf/scale validation (10k jobs, dispatch <150 ms) and add autoscaling hooks. | +| Sprint 34 | Orchestrator Dashboard | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-34-004 | Package orchestrator container, Helm overlays, offline bundle seeds, and provenance attestations. | +| Sprint 34 | Orchestrator Dashboard | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-34-101 | Expose policy eval run ledger exports and SLO burn metrics to orchestrator. | +| Sprint 34 | Orchestrator Dashboard | src/SbomService/StellaOps.SbomService/TASKS.md | TODO | SBOM Service Guild | SBOM-ORCH-34-001 | Enable SBOM backfill and watermark reconciliation; emit coverage metrics and flood guard. | +| Sprint 34 | Orchestrator Dashboard | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-ORCH-34-001 | Integrate consensus compute completion events with orchestrator ledger and provenance outputs. | +| Sprint 34 | Orchestrator Dashboard | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-ORCH-34-001 | Expose quotas/backfill/queue metrics endpoints, throttle toggles, and error clustering APIs. | +| Sprint 35 | EPDR Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild | SCANNER-ANALYZERS-LANG-11-001 | Build entrypoint resolver (identity + environment profiles) and emit normalized entrypoint records. | +| Sprint 35 | EPDR Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild | SCANNER-ANALYZERS-LANG-11-002 | Static IL/reflection/ALC heuristics producing dependency edges with reason codes and confidence. | +| Sprint 35 | EPDR Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild, Signals Guild | SCANNER-ANALYZERS-LANG-11-003 | Runtime loader/PInvoke signal ingestion merged with static/declared edges (confidence & explain). | +| Sprint 35 | Export Center Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-35-001 | Author `/docs/export-center/overview.md` with purpose, profiles, security, and imposed rule reminder. | +| Sprint 35 | Export Center Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-35-002 | Author `/docs/export-center/architecture.md` detailing service components, adapters, manifests, signing, and distribution. | +| Sprint 35 | Export Center Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-35-003 | Publish `/docs/export-center/profiles.md` covering schemas, examples, and compatibility. | +| Sprint 35 | Export Center Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-EXPORT-35-001 | Package exporter service/worker containers, Helm overlays (download-only), and rollout guide. | +| Sprint 35 | Export Center Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-EXPORT-35-001 | Create exporter CI pipeline (lint/test/perf smoke), object storage fixtures, and initial Grafana dashboards. | +| Sprint 35 | Export Center Phase 1 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-001 | Bootstrap exporter service, configuration, and migrations for export profiles/runs/inputs/distributions with tenant scopes. | +| Sprint 35 | Export Center Phase 1 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-002 | Implement planner resolving filters to iterators and orchestrator job contract with deterministic sampling. | +| Sprint 35 | Export Center Phase 1 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-003 | Deliver JSON adapters (raw/policy) with canonical normalization, redaction enforcement, and zstd writers. | +| Sprint 35 | Export Center Phase 1 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-004 | Build mirror (full) adapter producing filesystem layout, manifests, and bundle assembly for download profile. | +| Sprint 35 | Export Center Phase 1 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-005 | Implement manifest/provenance writer and KMS signing/attestation for export bundles. | +| Sprint 35 | Export Center Phase 1 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-006 | Expose Export API (profiles, runs, download) with SSE updates, concurrency controls, and audit logging. | +| Sprint 35 | Export Center Phase 1 | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-EXPORT-35-001 | Provide paginated streaming endpoints for advisories, VEX, SBOMs, and findings filtered by scope selectors. | +| Sprint 35 | Export Center Phase 1 | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-35-101 | Register export job type, quotas, and rate policies; surface export job telemetry for scheduler. | +| Sprint 35 | Export Center Phase 1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-35-201 | Expose deterministic policy snapshot + evaluated findings endpoint aligned with Export Center requirements. | +| Sprint 35 | Export Center Phase 1 | src/VexLens/StellaOps.VexLens/TASKS.md | TODO | VEX Lens Guild | VEXLENS-EXPORT-35-001 | Publish consensus snapshot API delivering deterministic JSON for export consumption. | +| Sprint 35 | Export Center Phase 1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXPORT-35-001 | Route Export Center APIs through gateway with tenant scoping, viewer/operator scopes, and streaming downloads. | +| Sprint 36 | EPDR Observations | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild, SBOM Service Guild | SCANNER-ANALYZERS-LANG-11-004 | Normalize EPDR output to Scanner observation writer (entrypoints + edges + env profiles). | +| Sprint 36 | EPDR Observations | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md | TODO | Scanner EPDR Guild, QA Guild | SCANNER-ANALYZERS-LANG-11-005 | End-to-end fixtures/benchmarks covering publish modes, RIDs, trimming, NativeAOT with explain traces. | +| Sprint 36 | Export Center Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-36-004 | Author `/docs/export-center/api.md` with endpoint examples and imposed rule note. | +| Sprint 36 | Export Center Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-36-005 | Publish `/docs/export-center/cli.md` covering commands, scripts, verification, and imposed rule reminder. | +| Sprint 36 | Export Center Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-36-006 | Write `/docs/export-center/trivy-adapter.md` detailing mappings, compatibility, and test matrix. | +| Sprint 36 | Export Center Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-EXPORT-36-001 | Document registry credentials, OCI push workflows, and automation for export distributions. | +| Sprint 36 | Export Center Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-EXPORT-36-001 | Integrate Trivy compatibility validation, OCI push smoke tests, and metrics dashboards for export throughput. | +| Sprint 36 | Export Center Phase 2 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-EXPORT-36-001 | Add `stella export distribute` (OCI/objstore), `run download --resume`, and status polling enhancements. | +| Sprint 36 | Export Center Phase 2 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-36-001 | Implement Trivy DB adapter (core) with schema mapping, validation, and compatibility gating. | +| Sprint 36 | Export Center Phase 2 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-36-002 | Add Trivy Java DB variant, shared manifest entries, and adapter regression tests. | +| Sprint 36 | Export Center Phase 2 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-36-003 | Build OCI distribution engine for exports with descriptor annotations and registry auth handling. | +| Sprint 36 | Export Center Phase 2 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-36-004 | Extend planner/run lifecycle for OCI/object storage distributions with retry + idempotency. | +| Sprint 36 | Export Center Phase 2 | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-36-101 | Add distribution job follow-ups, retention metadata, and metrics for export runs. | +| Sprint 36 | Export Center Phase 2 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXPORT-36-001 | Expose distribution endpoints (OCI/object storage) and manifest/provenance download proxies with RBAC. | +| Sprint 37 | Export Center Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-37-001 | Publish `/docs/export-center/mirror-bundles.md` detailing layouts, deltas, encryption, imposed rule reminder. | +| Sprint 37 | Export Center Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-37-002 | Publish `/docs/export-center/provenance-and-signing.md` covering manifests, attestation, verification. | +| Sprint 37 | Export Center Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-37-003 | Publish `/docs/operations/export-runbook.md` for failures, tuning, capacity, with imposed rule note. | +| Sprint 37 | Export Center Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-EXPORT-37-004 | Publish `/docs/security/export-hardening.md` covering RBAC, isolation, encryption, and imposed rule. | +| Sprint 37 | Export Center Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-EXPORT-37-001 | Finalize dashboards/alerts for exports (failure, verify), retention jobs, and chaos testing harness. | +| Sprint 37 | Export Center Phase 3 | ops/offline-kit/TASKS.md | TODO | Offline Kit Guild | DEVOPS-OFFLINE-37-001 | Package Export Center mirror bundles + verification tooling into Offline Kit with manifest/signature updates. | +| Sprint 37 | Export Center Phase 3 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-EXPORT-37-001 | Add `Export.Admin` scope enforcement for retention, encryption keys, and scheduling APIs. | +| Sprint 37 | Export Center Phase 3 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-EXPORT-37-001 | Implement `stella export schedule`, `run verify`, and bundle verification tooling with signature/hash checks. | +| Sprint 37 | Export Center Phase 3 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-37-001 | Implement mirror delta adapter, base export linkage, and content-addressed reuse. | +| Sprint 37 | Export Center Phase 3 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-37-002 | Add bundle encryption, key wrapping with KMS, and verification tooling for encrypted exports. | +| Sprint 37 | Export Center Phase 3 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-37-003 | Deliver scheduling/retention engine (cron/event triggers), audit trails, and retry idempotency enhancements. | +| Sprint 37 | Export Center Phase 3 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-37-004 | Provide export verification API and CLI integration, including hash/signature validation endpoints. | +| Sprint 37 | Export Center Phase 3 | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-37-101 | Enable scheduled export runs, retention pruning hooks, and failure alerting integration. | +| Sprint 37 | Export Center Phase 3 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-EXPORT-37-001 | Surface scheduling, retention, and verification endpoints plus encryption parameter handling. | +| Sprint 37 | Native Analyzer Core | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-001 | Format detector & binary identity for ELF/PE/Mach-O (multi-slice) with stable entrypoint IDs. | +| Sprint 37 | Native Analyzer Core | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-002 | ELF dynamic parser emitting dtneeded edges, runpath metadata, symbol version needs. | +| Sprint 37 | Native Analyzer Core | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-003 | PE import + delay-load + SxS manifest parsing producing reason-coded edges. | +| Sprint 37 | Native Analyzer Core | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-004 | Mach-O load command parsing with @rpath expansion and slice handling. | +| Sprint 37 | Native Analyzer Core | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-005 | Cross-platform resolver engine modeling search order/explain traces for ELF/PE/Mach-O. | +| Sprint 37 | Native Analyzer Core | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-006 | Heuristic scanner for dlopen/LoadLibrary strings, plugin configs, ecosystem hints with confidence tags. | +| Sprint 38 | Native Observation Pipeline | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-007 | Serialize entrypoints/edges/env profiles to Scanner writer (AOC-compliant observations). | +| Sprint 38 | Native Observation Pipeline | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild, QA Guild | SCANNER-ANALYZERS-NATIVE-20-008 | Fixture suite + determinism benchmarks for native analyzer across linux/windows/macos. | +| Sprint 38 | Native Observation Pipeline | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-NATIVE-20-009 | Optional runtime capture adapters (eBPF/ETW/dyld) producing runtime-load edges with redaction. | +| Sprint 38 | Native Observation Pipeline | src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md | TODO | Native Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-NATIVE-20-010 | Package native analyzer plug-in + Offline Kit updates and restart-time loading. | +| Sprint 38 | Notifications Studio Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-NOTIFY-38-001 | Publish `/docs/notifications/overview.md` and `/docs/notifications/architecture.md` ending with imposed rule statement. | +| Sprint 38 | Notifications Studio Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-NOTIFY-38-001 | Package notifier API/worker Helm overlays (email/chat/webhook), secrets templates, rollout guide. | +| Sprint 38 | Notifications Studio Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-NOTIFY-38-001 | Stand up notifier CI pipelines, event bus fixtures, base dashboards for events/notifications latency. | +| Sprint 38 | Notifications Studio Phase 1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-NOTIFY-38-001 | Implement `stella notify` rule/template/incident commands (list/create/test/ack) with file-based inputs. | +| Sprint 38 | Notifications Studio Phase 1 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-38-001 | Bootstrap notifier service, migrations for notif tables, event ingestion, and rule engine foundation (policy violations + job failures). | +| Sprint 38 | Notifications Studio Phase 1 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-38-002 | Implement channel adapters (email, chat-webhook, generic webhook) with retry and audit logging. | +| Sprint 38 | Notifications Studio Phase 1 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-38-003 | Deliver template service (versioning, preview), rendering pipeline with redaction, and provenance links. | +| Sprint 38 | Notifications Studio Phase 1 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-38-004 | Expose initial API (rules CRUD, templates, incidents list, ack) and live feed WS stream. | +| Sprint 38 | Notifications Studio Phase 1 | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-38-101 | Standardize event envelope publication (policy/export/job lifecycle) with idempotency keys for notifier ingestion. | +| Sprint 38 | Notifications Studio Phase 1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-38-201 | Emit enriched violation events including rationale IDs via orchestrator bus. | +| Sprint 38 | Notifications Studio Phase 1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-NOTIFY-38-001 | Route notifier APIs through gateway with tenant scoping and operator scopes. | +| Sprint 39 | Java Analyzer Core | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-001 | Java input normalizer (jar/war/ear/fat/jmod/jimage) with MR overlay selection. | +| Sprint 39 | Java Analyzer Core | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | Module/classpath builder with duplicate & split-package detection. | +| Sprint 39 | Java Analyzer Core | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-003 | SPI scanner & provider selection with warnings. | +| Sprint 39 | Java Analyzer Core | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-004 | Reflection/TCCL heuristics emitting reason-coded edges. | +| Sprint 39 | Java Analyzer Core | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-005 | Framework config extraction (Spring, Jakarta, MicroProfile, logging, Graal configs). | +| Sprint 39 | Java Analyzer Core | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-006 | JNI/native hint detection for Java artifacts. | +| Sprint 39 | Java Analyzer Core | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-007 | Manifest/signature metadata collector (main/start/agent classes, signers). | +| Sprint 39 | Notifications Studio Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-NOTIFY-39-002 | Publish `/docs/notifications/rules.md`, `/templates.md`, `/digests.md` with imposed rule reminder. | +| Sprint 39 | Notifications Studio Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-NOTIFY-39-002 | Add throttling/quiet-hours dashboards, digest job monitoring, and storm breaker alerts. | +| Sprint 39 | Notifications Studio Phase 2 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-NOTIFY-39-001 | Add simulation/digest CLI verbs and advanced filtering for incidents. | +| Sprint 39 | Notifications Studio Phase 2 | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-NOTIFY-39-001 | Optimize digest queries and provide API for notifier to fetch unresolved policy violations/SBOM deltas. | +| Sprint 39 | Notifications Studio Phase 2 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-39-001 | Implement correlation engine, throttling, quiet hours/maintenance evaluator, and incident state machine. | +| Sprint 39 | Notifications Studio Phase 2 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-39-002 | Add digests generator with Findings Ledger queries and distribution (email/chat). | +| Sprint 39 | Notifications Studio Phase 2 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-39-003 | Provide simulation engine and API for rule dry-run against historical events. | +| Sprint 39 | Notifications Studio Phase 2 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-39-004 | Integrate quiet hours calendars and default throttles with audit logging. | +| Sprint 39 | Notifications Studio Phase 2 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-NOTIFY-39-001 | Surface digest scheduling, simulation, and throttle management endpoints via gateway. | +| Sprint 40 | Java Observation & Runtime | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-008 | Observation writer producing entrypoints/components/edges with warnings. | +| Sprint 40 | Java Observation & Runtime | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild, QA Guild | SCANNER-ANALYZERS-JAVA-21-009 | Fixture suite + determinism/perf benchmarks for Java analyzer. | +| Sprint 40 | Java Observation & Runtime | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-JAVA-21-010 | Optional runtime ingestion via agent/JFR producing runtime edges. | +| Sprint 40 | Java Observation & Runtime | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md | TODO | Java Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-JAVA-21-011 | Package Java analyzer plug-in + Offline Kit/CLI updates. | +| Sprint 40 | Notifications Studio Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-NOTIFY-40-001 | Publish `/docs/notifications/channels.md`, `/escalations.md`, `/api.md`, `/operations/notifier-runbook.md`, `/security/notifications-hardening.md` with imposed rule lines. | +| Sprint 40 | Notifications Studio Phase 3 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-NOTIFY-40-001 | Package notifier escalations + localization deployment overlays, signed ack token rotation scripts, and rollback guidance. | +| Sprint 40 | Notifications Studio Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-NOTIFY-40-001 | Finalize notifier dashboards/alerts (escalation failures, ack latency), chaos testing harness, and channel health monitoring. | +| Sprint 40 | Notifications Studio Phase 3 | ops/offline-kit/TASKS.md | CARRY (no scope change) | Offline Kit Guild | DEVOPS-OFFLINE-37-002 | Carry from Sprint 37: Notifier offline packs (sample configs, template/digest packs, dry-run harness) with integrity checks. | +| Sprint 40 | Notifications Studio Phase 3 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-NOTIFY-40-001 | Enforce ack token signing/rotation, webhook allowlists, and admin-only escalation settings. | +| Sprint 40 | Notifications Studio Phase 3 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-NOTIFY-40-001 | Implement ack token redemption, escalation management, localization previews. | +| Sprint 40 | Notifications Studio Phase 3 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-40-001 | Implement escalations, on-call schedules, ack bridge, PagerDuty/OpsGenie adapters, and localization bundles. | +| Sprint 40 | Notifications Studio Phase 3 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-40-002 | Add CLI inbox/in-app feed channels and summary storm breaker notifications. | +| Sprint 40 | Notifications Studio Phase 3 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-40-003 | Harden security: signed ack links, webhook HMAC/IP allowlists, tenant isolation fuzzing, localization fallback. | +| Sprint 40 | Notifications Studio Phase 3 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-SVC-40-004 | Finalize observability (incident metrics, escalation latency) and chaos tests for channel outages. | +| Sprint 40 | Notifications Studio Phase 3 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-NOTIFY-40-001 | Expose escalation, localization, channel health endpoints and verification of signed links. | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-CLI-41-001 | Publish `/docs/cli/overview.md`, `/cli/configuration.md`, `/cli/output-and-exit-codes.md` (with imposed rule). | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-CLI-41-001 | Package CLI release artifacts (tarballs, completions, container image) with distribution docs. | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CLI-41-001 | Establish CLI build pipeline (multi-platform binaries, SBOM, checksums) and parity matrix CI enforcement. | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-PACKS-41-001 | Define CLI SSO scopes and Packs (`Packs.Read/Write/Run/Approve`) roles; update discovery/offline defaults. | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-CORE-41-001 | Implement CLI config/auth foundation, global flags, output renderer, and error/exit code mapping. | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PARITY-41-001 | Deliver parity command groups (`policy`, `sbom`, `vuln`, `vex`, `advisory`, `export`, `orchestrator`) with JSON/table outputs and `--explain`. | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PARITY-41-002 | Implement `notify`, `aoc`, `auth` command groups, idempotency keys, completions, and parity matrix export. | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-41-101 | Register `pack-run` job type, integrate logs/artifacts, expose pack run metadata. | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md | TODO | Packs Registry Guild | PACKS-REG-41-001 | Implement packs index API, signature verification, provenance storage, and RBAC. | +| Sprint 41 | CLI Parity & Task Packs Phase 1 | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-41-001 | Bootstrap Task Runner service, migrations, run API, local executor, approvals pause, artifact capture. | +| Sprint 42 | CLI Parity & Task Packs Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-CLI-42-001 | Publish `/docs/cli/parity-matrix.md`, `/cli/commands/*.md`, `/docs/task-packs/spec.md` (imposed rule). | +| Sprint 42 | CLI Parity & Task Packs Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CLI-42-001 | Add CLI golden output tests, parity diff automation, and pack run CI harness. | +| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PACKS-42-001 | Implement Task Pack CLI commands (`pack plan/run/push/pull/verify`) with plan/simulate engine and expression sandbox. | +| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PARITY-41-001..002 | Close parity gaps for Notifications, Policy Studio advanced features, SBOM graph, Vuln Explorer; parity matrix green. | +| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-PACKS-42-001 | Expose snapshot/time-travel APIs for CLI offline mode and pack simulation. | +| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-SVC-42-101 | Stream pack run logs via SSE/WS, expose artifact manifests, enforce pack run quotas. | +| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md | TODO | Packs Registry Guild | PACKS-REG-42-001 | Support pack version lifecycle, tenant allowlists, provenance export, signature rotation. | +| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ENGINE-42-201 | Provide stable rationale IDs/APIs for CLI `--explain` and pack policy gates. | +| Sprint 42 | CLI Parity & Task Packs Phase 2 | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-42-001 | Add loops, conditionals, `maxParallel`, outputs, simulation mode, policy gates in Task Runner. | +| Sprint 43 | CLI Parity & Task Packs Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-PACKS-43-001 | Publish `/docs/task-packs/authoring-guide.md`, `/registry.md`, `/runbook.md`, `/security/pack-signing-and-rbac.md`, `/operations/cli-release-and-packaging.md` (imposed rule). | +| Sprint 43 | CLI Parity & Task Packs Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CLI-43-001 | Finalize multi-platform release automation, SBOM signing, parity gate enforcement, pack run chaos tests. | +| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-PACKS-41-001 | Enforce pack signing policies, approval RBAC, CLI token scopes for CI headless runs. | +| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-PACKS-42-001 | Deliver advanced pack features (approvals pause/resume, remote streaming, secret injection), localization, man pages. | +| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-SVC-35-005, PACKS-REG-41-001 | Integrate pack run manifests into export bundles and CLI verify flows. | +| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md | TODO | Packs Registry Guild | PACKS-REG-42-001 | Enforce pack signing policies, audit trails, registry mirroring, Offline Kit support. | +| Sprint 43 | CLI Parity & Task Packs Phase 3 | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-42-001 | Implement approvals workflow, notifications integration, remote artifact uploads, chaos resilience. | +| Sprint 44 | Containerized Distribution Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-INSTALL-44-001 | Publish install overview + Compose Quickstart docs (imposed rule). | +| Sprint 44 | Containerized Distribution Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | COMPOSE-44-001 | Deliver Quickstart Compose stack with seed data and quickstart script. | +| Sprint 44 | Containerized Distribution Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | COMPOSE-44-002 | Provide backup/reset scripts with guardrails and documentation. | +| Sprint 44 | Containerized Distribution Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | COMPOSE-44-003 | Implement seed job and onboarding wizard toggle (`QUICKSTART_MODE`). | +| Sprint 44 | Containerized Distribution Phase 1 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-COMPOSE-44-001 | Finalize Quickstart scripts and README. | +| Sprint 44 | Containerized Distribution Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CONTAINERS-44-001 | Automate multi-arch builds with SBOM/signature pipeline. | +| Sprint 44 | Containerized Distribution Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DOCKER-44-001 | Author multi-stage Dockerfiles with non-root users, read-only FS, and health scripts for all services. | +| Sprint 44 | Containerized Distribution Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DOCKER-44-002 | Generate SBOMs and cosign attestations for each image; integrate signature verification in CI. | +| Sprint 44 | Containerized Distribution Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DOCKER-44-003 | Ensure `/health/*`, `/version`, `/metrics`, and capability endpoints (`merge=false`) are exposed across services. | +| Sprint 44 | Containerized Distribution Phase 1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONTAINERS-44-001 | Expose config discovery and quickstart handling with health/version endpoints. | +| Sprint 45 | Containerized Distribution Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-INSTALL-45-001 | Publish Helm production + configuration reference docs (imposed rule). | +| Sprint 45 | Containerized Distribution Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-HELM-45-001 | Publish Helm install guide and sample values. | +| Sprint 45 | Containerized Distribution Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | HELM-45-001 | Scaffold Helm chart with component toggles and pinned digests. | +| Sprint 45 | Containerized Distribution Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | HELM-45-002 | Add security features (TLS, NetworkPolicy, Secrets integration). | +| Sprint 45 | Containerized Distribution Phase 2 | ops/deployment/TASKS.md | TODO | Deployment Guild | HELM-45-003 | Implement HPA, PDB, readiness gates, and observability hooks. | +| Sprint 45 | Containerized Distribution Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CONTAINERS-45-001 | Add Compose/Helm smoke tests to CI. | +| Sprint 45 | Containerized Distribution Phase 2 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONTAINERS-45-001 | Ensure readiness endpoints and config toggles support Helm deployments. | +| Sprint 46 | Containerized Distribution Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-INSTALL-46-001 | Publish air-gap, supply chain, health/readiness, image catalog, console onboarding docs (imposed rule). | +| Sprint 46 | Containerized Distribution Phase 3 | ops/deployment/TASKS.md | TODO | Deployment Guild | DEPLOY-AIRGAP-46-001 | Provide air-gap load script and docs. | +| Sprint 46 | Containerized Distribution Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-CONTAINERS-46-001 | Build signed air-gap bundle and verify in CI. | +| Sprint 46 | Containerized Distribution Phase 3 | ops/offline-kit/TASKS.md | TODO | Offline Kit Guild | OFFLINE-CONTAINERS-46-001 | Include air-gap bundle and instructions in Offline Kit. | +| Sprint 46 | Containerized Distribution Phase 3 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-CONTAINERS-46-001 | Harden offline mode and document fallback behavior. | +| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | docs/TASKS.md | TODO | Docs Guild | DOCS-TEN-47-001 | Publish `/docs/security/tenancy-overview.md` and `/docs/security/scopes-and-roles.md` (imposed rule). | +| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-TEN-47-001 | Integrate JWKS caching, signature verification tests, and auth regression suite into CI. | +| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-TEN-47-001 | Implement unified JWT/ODIC config, scope grammar, tenant/project claims, and JWKS caching in Authority. | +| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-TEN-47-001 | Ship `stella login`, `whoami`, `tenants list`, and tenant flag persistence with secure token storage. | +| Sprint 47 | Authority-Backed Scopes & Tenancy Phase 1 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-TEN-47-001 | Add auth middleware (token verification, tenant activation, scope checks) and structured 403 responses. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | docs/TASKS.md | TODO | Docs Guild | DOCS-TEN-48-001 | Publish `/docs/operations/multi-tenancy.md`, `/docs/operations/rls-and-data-isolation.md`, `/docs/console/admin-tenants.md` (imposed rule). | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-TEN-48-001 | Write integration tests for RLS enforcement, tenant audit stream, and object store prefix checks. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-TEN-48-001 | Ensure advisory linkers operate per tenant with RLS, enforce aggregation-only capability endpoint. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-TEN-48-001 | Same as above for VEX linkers; enforce capability endpoint `merge=false`. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-TEN-48-001 | Add tenant prefixes to manifests/artifacts, enforce scope checks, and block cross-tenant exports by default. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-TEN-48-001 | Partition findings by tenant/project, enable RLS, and update queries/events to include tenant context. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-TEN-48-001 | Tenant-scope notification rules, incidents, and outbound channels; update storage schemas. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-TEN-48-001 | Stamp jobs with tenant/project, set DB session context, and reject jobs without context. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-TEN-48-001 | Add `tenant_id`/`project_id` to policy data, enable Postgres RLS, and expose rationale IDs with tenant context. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-TEN-48-001 | Propagate tenant/project to all steps, enforce object store prefix, and validate before execution. | +| Sprint 48 | Authority-Backed Scopes & Tenancy Phase 2 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-TEN-48-001 | Enforce tenant context through persistence (DB GUC, object store prefix), add request annotations, and emit audit events. | +| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | docs/TASKS.md | TODO | Docs Guild | DOCS-TEN-49-001 | Publish `/docs/cli/authentication.md`, `/docs/api/authentication.md`, `/docs/policy/examples/abac-overlays.md`, `/docs/install/configuration-reference.md` updates (imposed rule). | +| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-TEN-49-001 | Implement audit log pipeline, monitor scope usage, chaos tests for JWKS outage, and tenant load/perf tests. | +| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-TEN-49-001 | Implement service accounts, delegation tokens (`act` chain), per-tenant quotas, and audit log streaming. | +| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-TEN-49-001 | Add service account token minting, delegation, and `--impersonate` banner/controls. | +| Sprint 49 | Authority-Backed Scopes & Tenancy Phase 3 | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-TEN-49-001 | Integrate ABAC policy overlay (optional), expose audit API, and support service token minting endpoints. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-INSTALL-50-001 | Add `/docs/install/telemetry-stack.md` for collector deployment and offline packaging. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | BLOCKED (2025-10-26) | Docs Guild | DOCS-OBS-50-001 | Author `/docs/observability/overview.md` with imposed rule banner and architecture context. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-OBS-50-002 | Document telemetry standards (fields, scrubbing, sampling) under `/docs/observability/telemetry-standards.md`. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-OBS-50-003 | Publish structured logging guide `/docs/observability/logging.md` with examples and imposed rule banner. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-OBS-50-004 | Publish tracing guide `/docs/observability/tracing.md` covering context propagation and sampling. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | docs/TASKS.md | TODO | Docs Guild | DOCS-SEC-OBS-50-001 | Update `/docs/security/redaction-and-privacy.md` for telemetry privacy controls. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | ops/devops/TASKS.md | DOING (2025-10-26) | DevOps Guild | DEVOPS-OBS-50-002 | Stand up multi-tenant metrics/logs/traces backends with retention and isolation. | +> Staging rollout plan recorded in `docs/ops/telemetry-storage.md`; waiting on Authority-issued tokens and namespace bootstrap. +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OBS-50-001 | Introduce observability/timeline/evidence/attestation scopes and update discovery metadata. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-50-001 | Propagate trace headers from CLI commands and print correlation IDs. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-50-001 | Replace ad-hoc logging with telemetry core across advisory ingestion/linking. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-50-001 | Adopt telemetry core in Concelier APIs and surface correlation IDs. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-50-001 | Integrate telemetry core into VEX ingestion/linking with scope metadata. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-50-001 | Add telemetry core to VEX APIs and emit trace headers. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-50-001 | Enable telemetry core in export planner/workers capturing bundle metadata. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-50-001 | Wire telemetry core through ledger writer/projector for append/replay operations. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-50-001 | Instrument orchestrator scheduler/control APIs with telemetry core spans/logs. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-50-001 | Instrument policy compile/evaluate flows with telemetry core spans/logs. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-50-001 | Adopt telemetry core in Task Runner host and workers with scrubbed transcripts. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Telemetry/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-50-001 | Bootstrap telemetry core library with structured logging, OTLP exporters, and deterministic bootstrap. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Telemetry/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-50-002 | Deliver context propagation middleware for HTTP/gRPC/jobs/CLI carrying trace + tenant metadata. | +| Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-50-001 | Integrate telemetry core into gateway and emit structured traces/logs for all routes. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | docs/TASKS.md | TODO | Docs Guild | DOCS-OBS-51-001 | Publish `/docs/observability/metrics-and-slos.md` with alert policies. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-51-001 | Deploy SLO evaluator service, dashboards, and alert routing. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-51-001 | Implement `stella obs top` streaming health metrics command. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-51-001 | Emit ingest latency metrics + SLO thresholds for advisories. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-51-001 | Provide VEX ingest metrics and SLO burn-rate automation. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-51-001 | Capture export planner/bundle latency metrics and SLOs. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-51-001 | Add ledger/projector metrics dashboards and burn-rate policies. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OBS-51-001 | Ingest SLO burn-rate webhooks and deliver observability alerts. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-51-001 | Publish orchestration metrics, SLOs, and burn-rate alerts. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-51-001 | Publish policy evaluation metrics + dashboards meeting SLO targets. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-51-001 | Emit task runner golden-signal metrics and SLO alerts. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Telemetry/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-51-001 | Ship metrics helpers + exemplar guards for golden signals. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Telemetry/StellaOps.Telemetry.Core/TASKS.md | TODO | Security Guild | TELEMETRY-OBS-51-002 | Implement logging scrubbing and tenant debug override controls. | +| Sprint 51 | Observability & Forensics Phase 2 – SLOs & Dashboards | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-51-001 | Expose `/obs/health` and `/obs/slo` aggregations for services. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | docs/TASKS.md | TODO | Docs Guild | DOCS-CLI-OBS-52-001 | Document `stella obs` CLI commands and scripting patterns. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-OBS-52-001 | Document Console observability hub and trace/log search workflows. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | docs/TASKS.md | TODO | Docs Guild | DOCS-CONSOLE-OBS-52-002 | Publish Console forensics/timeline guidance with imposed rule banner. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-52-001 | Configure streaming pipelines and schema validation for timeline events. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-52-001 | Add `stella obs trace` + log commands correlating timeline data. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-52-001 | Emit advisory ingest/link timeline events with provenance metadata. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-52-001 | Provide SSE bridge for advisory timeline events. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-52-001 | Emit VEX ingest/link timeline events with justification info. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-52-001 | Stream VEX timeline updates to clients with tenant filters. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-52-001 | Publish export lifecycle events into timeline. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-52-001 | Record ledger append/projection events into timeline stream. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-52-001 | Emit job lifecycle timeline events with tenant/project metadata. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-52-001 | Emit policy decision timeline events with rule summaries and trace IDs. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-52-001 | Emit pack run timeline events and dedupe logic. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-001 | Bootstrap timeline indexer service and schema with RLS scaffolding. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-002 | Implement event ingestion pipeline with ordering and dedupe. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-003 | Expose timeline query APIs with tenant filters and pagination. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md | TODO | Security Guild | TIMELINE-OBS-52-004 | Finalize RLS + scope enforcement and audit logging for timeline reads. | +| Sprint 52 | Observability & Forensics Phase 3 – Timeline & Decision Logs | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-52-001 | Provide trace/log proxy endpoints bridging to timeline + log store. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | docs/TASKS.md | TODO | Docs Guild | DOCS-CLI-FORENSICS-53-001 | Document `stella forensic` CLI workflows with sample bundles. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | docs/TASKS.md | TODO | Docs Guild | DOCS-FORENSICS-53-001 | Publish `/docs/forensics/evidence-locker.md` covering bundles, WORM, legal holds. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | docs/TASKS.md | TODO | Docs Guild | DOCS-FORENSICS-53-003 | Publish `/docs/forensics/timeline.md` with schema and query examples. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-53-001 | Provision WORM-capable storage, legal hold automation, and backup/restore scripts for evidence locker. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-FORENSICS-53-001 | Ship `stella forensic snapshot` commands invoking evidence locker. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-53-001 | Generate advisory evidence payloads (raw doc, linkset diff) for locker. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-53-001 | Add `/evidence/advisories/*` gateway endpoints consuming locker APIs. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-53-001 | Bootstrap evidence locker service with schema, storage abstraction, and RLS. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-53-002 | Implement bundle builders for evaluation, job, and export snapshots. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-53-003 | Expose evidence APIs (create/get/verify/hold) with audit + quotas. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-53-001 | Produce VEX evidence payloads and push to locker. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-53-001 | Expose `/evidence/vex/*` endpoints retrieving locker bundles. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-53-001 | Store export manifests + transcripts within evidence bundles. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-53-001 | Persist evidence bundle references alongside ledger entries and expose lookup API. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-53-001 | Attach job capsules + manifests to evidence locker snapshots. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-53-001 | Build evaluation evidence bundles (inputs, rule traces, engine version). | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-53-001 | Capture step transcripts and manifests into evidence bundles. | +| Sprint 53 | Observability & Forensics Phase 4 – Evidence Locker | src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md | TODO | Timeline Indexer Guild | TIMELINE-OBS-53-001 | Link timeline events to evidence bundle digests and expose evidence lookup endpoint. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | docs/TASKS.md | TODO | Docs Guild | DOCS-FORENSICS-53-002 | Publish `/docs/forensics/provenance-attestation.md` covering signing + verification. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-54-001 | Manage provenance signing infrastructure (KMS keys, timestamp authority) and CI verification. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-FORENSICS-54-001 | Implement `stella forensic verify` command verifying bundles + signatures. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-FORENSICS-54-002 | Add `stella forensic attest show` command with signer/timestamp details. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-54-001 | Sign advisory batches with DSSE attestations and expose verification. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-54-001 | Add `/attestations/advisories/*` endpoints surfacing verification metadata. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-54-001 | Attach DSSE signing/timestamping to evidence bundles and emit timeline hooks. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-54-002 | Provide bundle packaging + offline verification fixtures. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-54-001 | Produce VEX batch attestations linking to timeline/ledger. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-54-001 | Expose `/attestations/vex/*` endpoints with verification summaries. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-54-001 | Produce export attestation manifests and CLI verification hooks. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-54-001 | Produce DSSE attestations for jobs and surface verification endpoint. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-54-001 | Generate DSSE attestations for policy evaluations and expose verification API. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Provenance/StellaOps.Provenance.Attestation/TASKS.md | TODO | Provenance Guild | PROV-OBS-53-001 | Implement DSSE/SLSA models with deterministic serializer + test vectors. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Provenance/StellaOps.Provenance.Attestation/TASKS.md | TODO | Provenance Guild | PROV-OBS-53-002 | Build signer abstraction (cosign/KMS/offline) with policy enforcement. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Provenance/StellaOps.Provenance.Attestation/TASKS.md | TODO | Provenance Guild | PROV-OBS-54-001 | Deliver verification library validating DSSE signatures + Merkle roots. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/Provenance/StellaOps.Provenance.Attestation/TASKS.md | TODO | Provenance Guild, DevEx/CLI Guild | PROV-OBS-54-002 | Package provenance verification tool for CLI integration and offline use. | +| Sprint 54 | Observability & Forensics Phase 5 – Provenance & Verification | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-54-001 | Generate pack run attestations and link to timeline/evidence. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | docs/TASKS.md | TODO | Docs Guild | DOCS-RUNBOOK-55-001 | Publish `/docs/runbooks/incidents.md` covering activation, escalation, and verification checklist. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OBS-55-001 | Automate incident mode activation via SLO alerts, retention override management, and reset job. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OBS-55-001 | Enforce `obs:incident` scope with fresh-auth requirement and audit export for toggles. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-55-001 | Ship `stella obs incident-mode` commands with safeguards and audit logging. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OBS-55-001 | Increase sampling and raw payload retention under incident mode with redaction guards. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-55-001 | Provide incident mode toggle endpoints and propagate to services. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-55-001 | Extend evidence retention + activation events for incident windows. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OBS-55-001 | Enable incident sampling + retention overrides for VEX pipelines. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-55-001 | Add incident mode APIs for VEX services with audit + guardrails. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OBS-55-001 | Increase export telemetry + debug retention during incident mode and emit events. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OBS-55-001 | Extend retention and diagnostics capture during incident mode. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OBS-55-001 | Send incident mode start/stop notifications with quick links to evidence/timeline. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OBS-55-001 | Increase telemetry + evidence capture during incident mode and emit activation events. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-OBS-55-001 | Capture full rule traces + retention bump on incident activation with timeline events. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OBS-55-001 | Capture extra debug data + notifications for incident mode runs. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Telemetry/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-55-001 | Implement incident mode sampling toggle API with activation audit trail. | +| Sprint 55 | Observability & Forensics Phase 6 – Incident Mode | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-55-001 | Deliver `/obs/incident-mode` control endpoints with audit + retention previews. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-56-001 | Publish `/docs/airgap/overview.md`. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-56-002 | Document sealing and egress controls. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-56-003 | Publish mirror bundles guide. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-56-004 | Publish bootstrap pack guide. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-56-001 | Publish deny-all egress policies and verification script for sealed environments. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-56-002 | Provide bundle staging/import scripts for air-gapped object stores. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-56-003 | Build Bootstrap Pack pipeline bundling images/charts with checksums. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/AirGap/StellaOps.AirGap.Controller/TASKS.md | TODO | AirGap Controller Guild | AIRGAP-CTL-56-001 | Implement sealing state machine, persistence, and RBAC scopes for air-gapped status. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/AirGap/StellaOps.AirGap.Controller/TASKS.md | TODO | AirGap Controller Guild | AIRGAP-CTL-56-002 | Expose seal/status APIs with policy hash validation and staleness placeholders. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/AirGap/StellaOps.AirGap.Importer/TASKS.md | TODO | AirGap Importer Guild | AIRGAP-IMP-56-001 | Implement DSSE/TUF/Merkle verification helpers. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/AirGap/StellaOps.AirGap.Importer/TASKS.md | TODO | AirGap Importer Guild | AIRGAP-IMP-56-002 | Enforce root rotation policy for bundles. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/AirGap/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-56-001 | Ship `EgressPolicy` facade with sealed/unsealed enforcement and remediation errors. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/AirGap/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-56-002 | Deliver Roslyn analyzer blocking raw HTTP clients; wire into CI. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-56-001 | Implement mirror create/verify and airgap verify commands. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-OBS-50-001 | Ensure telemetry propagation for sealed logging. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-AIRGAP-56-001 | Add mirror ingestion adapters preserving source metadata. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-AIRGAP-56-001 | Add VEX mirror ingestion adapters. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-AIRGAP-56-001 | Extend export center to build mirror bundles. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/Mirror/StellaOps.Mirror.Creator/TASKS.md | TODO | Mirror Creator Guild | MIRROR-CRT-56-001 | Build deterministic bundle assembler (advisories/vex/policy). | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-AIRGAP-56-001 | Validate jobs against sealed-mode restrictions. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-AIRGAP-56-001 | Accept policy packs from bundles with provenance tracking. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-AIRGAP-56-001 | Enforce sealed-mode plan validation for network calls. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/Telemetry/StellaOps.Telemetry.Core/TASKS.md | TODO | Observability Guild | TELEMETRY-OBS-56-001 | (Carry) Extend telemetry core with sealed-mode hooks before integration. | +| Sprint 56 | Air-Gapped Mode Phase 1 – Sealing Foundations | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OBS-56-001 | Extend telemetry core usage for sealed-mode status surfaces (seal/unseal dashboards, drift signals). | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-57-001 | Publish staleness/time doc. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-57-002 | Publish console airgap doc. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-57-003 | Publish CLI airgap doc. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-57-004 | Publish airgap operations runbook. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-57-001 | Automate mirror bundle creation with approvals. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-AIRGAP-57-002 | Run sealed-mode CI suite enforcing zero egress. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/AirGap/StellaOps.AirGap.Importer/TASKS.md | TODO | AirGap Importer Guild | AIRGAP-IMP-57-001 | Implement bundle catalog with RLS + migrations. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/AirGap/StellaOps.AirGap.Importer/TASKS.md | TODO | AirGap Importer Guild | AIRGAP-IMP-57-002 | Load artifacts into object store with checksum verification. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/AirGap/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-57-001 | Adopt EgressPolicy in core services. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/AirGap/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-57-002 | Enforce Task Runner job plan validation. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/AirGap/StellaOps.AirGap.Time/TASKS.md | TODO | AirGap Time Guild | AIRGAP-TIME-57-001 | Parse signed time tokens and expose normalized anchors. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-57-001 | Complete airgap import CLI with diff preview. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-57-002 | Ship seal/status CLI commands. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-AIRGAP-56-002 | Deliver bootstrap pack artifacts. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/Mirror/StellaOps.Mirror.Creator/TASKS.md | TODO | Mirror Creator Guild | MIRROR-CRT-57-001 | Add OCI image support to mirror bundles. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/Mirror/StellaOps.Mirror.Creator/TASKS.md | TODO | Mirror Creator Guild | MIRROR-CRT-57-002 | Embed signed time anchors in bundles. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-AIRGAP-56-001 | Lock notifications to enclave-safe channels. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-AIRGAP-56-002 | Integrate sealing status + staleness into scheduling. | +| Sprint 57 | Air-Gapped Mode Phase 2 – Mirror Bundles & Imports | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-AIRGAP-56-002 | Provide bundle ingestion helper steps. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-001 | Publish degradation matrix doc. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-002 | Update trust & signing doc for DSSE/TUF roots. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-003 | Publish developer airgap contracts doc. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-004 | Document portable evidence workflows. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/AirGap/StellaOps.AirGap.Controller/TASKS.md | TODO | AirGap Controller Guild | AIRGAP-CTL-58-001 | Persist time anchor data and expose drift metrics. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/AirGap/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-58-001 | Disable remote observability exporters in sealed mode. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/AirGap/StellaOps.AirGap.Policy/TASKS.md | TODO | AirGap Policy Guild | AIRGAP-POL-58-002 | Add CLI sealed-mode guard. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/AirGap/StellaOps.AirGap.Time/TASKS.md | TODO | AirGap Time Guild | AIRGAP-TIME-58-001 | Compute drift/staleness metrics and surface via controller status. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/AirGap/StellaOps.AirGap.Time/TASKS.md | TODO | AirGap Time Guild | AIRGAP-TIME-58-002 | Emit notifications/events for staleness budgets. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-58-001 | Ship portable evidence export helper. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-AIRGAP-57-002 | Annotate advisories with staleness metadata. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-AIRGAP-57-002 | Annotate VEX statements with staleness metadata. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-AIRGAP-57-001 | Add portable evidence export integration. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-AIRGAP-57-001 | Notify on drift/staleness thresholds. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-AIRGAP-58-001 | Link import/export jobs to timeline/evidence. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-AIRGAP-57-002 | Show degradation fallback info in explain traces. | +| Sprint 58 | Air-Gapped Mode Phase 3 – Staleness & Enforcement | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-AIRGAP-58-001 | Capture import job evidence transcripts. | +| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-AIRGAP-57-001 | Map sealed-mode violations to standard errors. | +| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-AIRGAP-57-001 | Map sealed-mode violations to standard errors. | +| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-AIRGAP-58-001 | Emit notifications/timeline for bundle readiness. | +| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-AIRGAP-56-002 | Enforce staleness thresholds for findings exports. | +| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-AIRGAP-58-001 | Notify on portable evidence exports. | +| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-AIRGAP-57-001 | Automate mirror bundle job scheduling with audit provenance. | +| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-AIRGAP-57-001 | Enforce sealed-mode guardrails inside evaluation engine. | +| Sprint 59 | Air-Gapped Mode Phase 4 – Deterministic Jobs & Enforcement | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-AIRGAP-57-001 | Block execution when seal state mismatched; emit timeline events. | +| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-58-004 | Document portable evidence workflows. | +| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-AIRGAP-58-001 | Finalize portable evidence CLI workflow with verification. | +| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-AIRGAP-58-001 | Emit timeline events for bundle imports. | +| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md | TODO | Evidence Locker Guild | EVID-OBS-60-001 | Deliver portable evidence export flow for sealed environments with checksum manifest and offline verification script. | +| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-AIRGAP-58-001 | Emit timeline events for VEX bundle imports. | +| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-AIRGAP-57-001 | Link findings to portable evidence bundles. | +| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-AIRGAP-58-001 | (Carry) Portable evidence notifications. | +| Sprint 60 | Air-Gapped Mode Phase 5 – Evidence Portability & UX | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-AIRGAP-58-001 | Notify on stale policy packs and guide remediation. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-OAS-61-001 | Publish `/docs/api/overview.md`. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-OAS-61-002 | Publish `/docs/api/conventions.md`. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-OAS-61-003 | Publish `/docs/api/versioning.md`. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-OAS-61-001 | Add OAS lint/validation/diff stages to CI. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Api/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-61-001 | Configure lint rules and CI enforcement. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Api/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-61-002 | Enforce example coverage in CI. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Api/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-61-001 | Scaffold per-service OpenAPI skeletons with shared components. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Api/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-61-002 | Build aggregate composer and integrate into CI. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OAS-61-001 | Document Authority authentication APIs in OAS. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OAS-61-002 | Provide Authority discovery endpoint. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OAS-61-001 | Update advisory OAS coverage. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OAS-61-002 | Populate advisory examples. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OAS-61-001 | Implement Concelier discovery endpoint. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OAS-61-002 | Standardize error envelope. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OAS-61-001 | Update VEX OAS coverage. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OAS-61-002 | Provide VEX examples. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OAS-61-001 | Implement discovery endpoint. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OAS-61-002 | Migrate errors to standard envelope. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OAS-61-001 | Update Exporter spec coverage. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OAS-61-002 | Implement Exporter discovery endpoint. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OAS-61-001 | Expand Findings Ledger spec coverage. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OAS-61-002 | Provide ledger discovery endpoint. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OAS-61-001 | Update notifier spec coverage. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OAS-61-002 | Implement notifier discovery endpoint. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OAS-61-001 | Extend Orchestrator spec coverage. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OAS-61-002 | Provide orchestrator discovery endpoint. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OAS-61-001 | Document Task Runner APIs in OAS. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OAS-61-002 | Expose Task Runner discovery endpoint. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OAS-61-001 | Implement gateway discovery endpoint. | +| Sprint 61 | SDKs & OpenAPI Phase 1 – Contract Foundations | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OAS-61-002 | Standardize error envelope across gateway. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-CONTRIB-62-001 | Publish API contracts contributing guide. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-DEVPORT-62-001 | Document dev portal publishing. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-OAS-62-001 | Deploy `/docs/api/reference/` generated site. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-SDK-62-001 | Publish SDK overview + language guides. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-SEC-62-001 | Update auth scopes documentation. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | docs/TASKS.md | TODO | Docs Guild | DOCS-TEST-62-001 | Publish contract testing doc. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Api/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-62-001 | Implement compatibility diff tool. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Api/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-62-001 | Populate examples for top endpoints. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OAS-62-001 | Provide SDK auth helpers/tests. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SDK-62-001 | Migrate CLI to official SDK. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SDK-62-002 | Update CLI error handling for new envelope. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OAS-62-001 | Add SDK smoke tests for advisory APIs. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | TODO | Concelier WebService Guild | CONCELIER-WEB-OAS-62-001 | Add advisory API examples. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/DevPortal/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-62-001 | Build static generator with nav/search. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/DevPortal/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-62-002 | Add schema viewer, examples, version selector. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OAS-62-001 | Add SDK tests for VEX APIs. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | TODO | Excititor WebService Guild | EXCITITOR-WEB-OAS-62-001 | Provide VEX API examples. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OAS-62-001 | Ensure SDK streaming helpers for exports. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OAS-62-001 | Provide SDK tests for ledger APIs. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OAS-62-001 | Provide SDK examples for notifier APIs. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Sdk/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-62-001 | Establish generator framework. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Sdk/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-62-002 | Implement shared post-processing helpers. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OAS-62-001 | Provide SDK examples for pack runs. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OAS-62-001 | Align pagination/idempotency behaviors. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | test/contract/TASKS.md | TODO | Contract Testing Guild | CONTR-62-001 | Generate mock server fixtures. | +| Sprint 62 | SDKs & OpenAPI Phase 2 – Examples & Portal | test/contract/TASKS.md | TODO | Contract Testing Guild | CONTR-62-002 | Integrate mock server into CI. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | docs/TASKS.md | TODO | Docs Guild | DOCS-TEST-62-001 | (Carry) ensure contract testing doc final. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Api/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-63-001 | Integrate compatibility diff gating. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Api/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-63-001 | Compatibility diff support. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Api/StellaOps.Api.OpenApi/TASKS.md | TODO | API Contracts Guild | OAS-63-002 | Define discovery schema metadata. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SDK-63-001 | Add CLI spec download command. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/DevPortal/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-63-001 | Add Try-It console. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/DevPortal/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-63-002 | Embed SDK snippets/quick starts. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Sdk/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-63-001 | Release TypeScript SDK alpha. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Sdk/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-63-002 | Release Python SDK alpha. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Sdk/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-63-003 | Release Go SDK alpha. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Sdk/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-63-004 | Release Java SDK alpha. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Sdk/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-63-001 | Configure SDK release pipelines. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | src/Sdk/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-63-002 | Automate changelogs from OAS diffs. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | test/contract/TASKS.md | TODO | Contract Testing Guild | CONTR-63-001 | Build replay harness for drift detection. | +| Sprint 63 | SDKs & OpenAPI Phase 3 – SDK Alpha & Try-It | test/contract/TASKS.md | TODO | Contract Testing Guild | CONTR-63-002 | Emit contract testing metrics. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-DEVPORT-64-001 | Document devportal offline usage. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-DEVPORT-63-001 | Automate developer portal pipeline. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-DEVPORT-64-001 | Schedule offline bundle builds. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/DevPortal/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-64-001 | Offline portal build. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/DevPortal/StellaOps.DevPortal.Site/TASKS.md | TODO | Developer Portal Guild | DEVPORT-64-002 | Add accessibility/performance checks. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/TASKS.md | TODO | DevPortal Offline Guild | DVOFF-64-001 | Implement devportal offline export job. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/TASKS.md | TODO | DevPortal Offline Guild | DVOFF-64-002 | Provide verification CLI. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/Sdk/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-64-001 | Migrate CLI to SDK. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/Sdk/StellaOps.Sdk.Generator/TASKS.md | TODO | SDK Generator Guild | SDKGEN-64-002 | Integrate SDKs into Console. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/Sdk/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-64-001 | Hook SDK releases to Notifications. | +| Sprint 64 | SDKs & OpenAPI Phase 4 – Harden & Offline Bundles | src/Sdk/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-64-002 | Produce devportal offline bundle. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | docs/TASKS.md | TODO | Docs Guild | DOCS-AIRGAP-DEVPORT-64-001 | (Carry) ensure offline doc published; update as necessary. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Api/StellaOps.Api.Governance/TASKS.md | TODO | API Governance Guild | APIGOV-63-001 | (Carry) compatibility gating monitoring. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Authority/StellaOps.Authority/TASKS.md | TODO | Authority Core & Security Guild | AUTH-OAS-63-001 | Deprecation headers for auth endpoints. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-SDK-64-001 | SDK update awareness command. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-OAS-63-001 | Deprecation metadata for Concelier APIs. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-OAS-63-001 | Deprecation metadata for VEX APIs. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-OAS-63-001 | Deprecation headers for exporter APIs. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-OAS-63-001 | Deprecation headers for ledger APIs. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-OAS-63-001 | Emit deprecation notifications. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Orchestrator/StellaOps.Orchestrator/TASKS.md | TODO | Orchestrator Service Guild | ORCH-OAS-63-001 | Add orchestrator deprecation headers. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Sdk/StellaOps.Sdk.Release/TASKS.md | TODO | SDK Release Guild | SDKREL-64-001 | Production rollout of notifications feed. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/TaskRunner/StellaOps.TaskRunner/TASKS.md | TODO | Task Runner Guild | TASKRUN-OAS-63-001 | Add Task Runner deprecation headers. | +| Sprint 65 | SDKs & OpenAPI Phase 5 – Deprecation & Notifications | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-OAS-63-001 | Implement deprecation headers in gateway. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-66-001 | Publish `/docs/risk/overview.md`. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-66-002 | Publish `/docs/risk/profiles.md`. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-66-003 | Publish `/docs/risk/factors.md`. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-66-004 | Publish `/docs/risk/formulas.md`. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-66-001 | Implement CLI profile management commands. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-66-002 | Implement CLI simulation command. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-RISK-66-001 | Expose CVSS/KEV provider data. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-RISK-66-002 | Provide fix availability signals. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-RISK-66-001 | Supply VEX gating data to risk engine. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-RISK-66-002 | Provide reachability inputs. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-66-001 | Add risk scoring columns/indexes. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-66-002 | Implement deterministic scoring upserts. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-66-001 | Create risk severity alert templates. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-RISK-66-003 | Integrate schema validation into Policy Engine. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Policy/StellaOps.Policy.RiskProfile/TASKS.md | TODO | Risk Profile Schema Guild | POLICY-RISK-66-001 | Deliver RiskProfile schema + validators. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Policy/StellaOps.Policy.RiskProfile/TASKS.md | TODO | Risk Profile Schema Guild | POLICY-RISK-66-002 | Implement inheritance/merge and hashing. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-RISK-66-004 | Extend Policy libraries for RiskProfile handling. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-66-001 | Scaffold risk engine queue/worker/registry. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-66-002 | Implement transforms/gates/contribution calculator. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-RISK-66-001 | Expose risk API routing in gateway. | +| Sprint 66 | Risk Profiles Phase 1 – Foundations | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-RISK-66-002 | Handle explainability downloads. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-001 | Publish explainability doc. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-002 | Publish risk API doc. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-003 | Publish console risk UI doc. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-004 | Publish CLI risk doc. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-67-001 | Provide risk results query command. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | TODO | Concelier Core Guild | CONCELIER-RISK-67-001 | Add source consensus metrics. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | TODO | Excititor Core Guild | EXCITITOR-RISK-67-001 | Add VEX explainability metadata. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-67-001 | Notify on profile publish/deprecate. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-68-001 | (Prep) risk routing settings seeds. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-RISK-67-001 | Enqueue scoring on new findings. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-RISK-67-002 | Deliver profile lifecycle APIs. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Policy/StellaOps.Policy.RiskProfile/TASKS.md | TODO | Risk Profile Schema Guild | POLICY-RISK-67-001 | Integrate profiles into policy store lifecycle. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Policy/StellaOps.Policy.RiskProfile/TASKS.md | TODO | Risk Profile Schema Guild | POLICY-RISK-67-002 | Publish schema endpoint + validation tooling. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-RISK-67-003 | Provide simulation orchestration APIs. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-67-001 | Integrate CVSS/KEV providers. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-67-002 | Integrate VEX gate provider. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-67-003 | Add fix availability/criticality/exposure providers. | +| Sprint 67 | Risk Profiles Phase 2 – Providers & Lifecycle | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-RISK-67-001 | Provide risk status endpoint. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-68-001 | Publish risk bundle doc. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-68-002 | Update AOC invariants doc. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-68-001 | Add risk bundle verification command. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-67-001 | Provide scored findings query API. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-68-001 | Enable scored findings export. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-68-001 | Configure risk notification routing UI/logic. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-RISK-68-001 | Ship simulation API endpoint. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | TODO | Policy Guild | POLICY-RISK-68-002 | Support profile export/import. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-68-001 | Persist scoring results & explanations. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-68-002 | Expose jobs/results/explanations APIs. | +| Sprint 68 | Risk Profiles Phase 3 – APIs & Ledger | src/Web/StellaOps.Web/TASKS.md | TODO | BE-Base Platform Guild | WEB-RISK-68-001 | Emit severity transition events via gateway. | +| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-001..004 | (Carry) ensure docs updated from simulation release. | +| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md | TODO | Risk Bundle Export Guild | RISK-BUNDLE-69-001 | Build risk bundle. | +| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md | TODO | Risk Bundle Export Guild | RISK-BUNDLE-69-002 | Integrate bundle into pipelines. | +| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-RISK-69-002 | Enable simulation report exports. | +| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-66-001 | (Completion) finalize severity alert templates. | +| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-69-001 | Implement simulation mode. | +| Sprint 69 | Risk Profiles Phase 4 – Simulation & Reporting | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-69-002 | Add telemetry/metrics dashboards. | +| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-68-001 | (Carry) finalize risk bundle doc after verification CLI. | +| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md | TODO | Risk Bundle Export Guild | RISK-BUNDLE-70-001 | Provide bundle verification CLI. | +| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md | TODO | Risk Bundle Export Guild | RISK-BUNDLE-70-002 | Publish documentation. | +| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/ExportCenter/StellaOps.ExportCenter/TASKS.md | TODO | Exporter Service Guild | EXPORT-RISK-70-001 | Integrate risk bundle into offline kit. | +| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-68-001 | Finalize risk alert routing UI. | +| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-70-001 | Support offline provider bundles. | +| Sprint 70 | Risk Profiles Phase 5 – Air-Gap & Advanced Factors | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-70-002 | Integrate runtime/reachability providers. | +| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | docs/TASKS.md | TODO | Docs Guild | DOCS-RISK-67-001..68-002 | Final editorial pass on risk documentation set. | +| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | src/Cli/StellaOps.Cli/TASKS.md | TODO | DevEx/CLI Guild | CLI-RISK-66-001..68-001 | Harden CLI commands with integration tests and error handling. | +| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | src/Findings/StellaOps.Findings.Ledger/TASKS.md | TODO | Findings Ledger Guild | LEDGER-RISK-69-001 | Finalize dashboards and alerts for scoring latency. | +| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-RISK-68-001 | Tune routing/quiet hour dedupe for risk alerts. | +| Sprint 71 | Risk Profiles Phase 6 – Quality & Performance | src/RiskEngine/StellaOps.RiskEngine/TASKS.md | TODO | Risk Engine Guild | RISK-ENGINE-69-002 | Optimize performance, cache, and incremental scoring; validate SLOs. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-73-001 | (Prep) align CI secrets for Attestor service. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-72-001 | Implement DSSE canonicalization and hashing helpers. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-72-002 | Support compact/expanded output and detached payloads. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Types/TASKS.md | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Draft schemas for all attestation payload types. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor.Types/TASKS.md | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Generate models/validators from schemas. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-72-001 | Scaffold attestor service skeleton. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-72-002 | Implement attestation store + storage integration. | +| Sprint 72 | Attestor Console Phase 1 – Foundations | src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md | TODO | KMS Guild | KMS-72-001 | Implement KMS interface + file driver. | +| Sprint 73 | Attestor CLI Phase 2 – Signing & Policies | src/Cli/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-73-001 | Implement `stella attest sign` (payload selection, subject digest, key reference, output format) using official SDK transport. | +| Sprint 73 | Attestor CLI Phase 2 – Signing & Policies | src/Cli/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-73-002 | Implement `stella attest verify` with policy selection, explainability output, and JSON/table formatting. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-001 | Publish attestor overview. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-002 | Publish payload docs. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-003 | Publish policies doc. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-73-004 | Publish workflows doc. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-73-001 | Add signing/verification helpers with KMS integration. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor.Types/TASKS.md | TODO | Attestation Payloads Guild | ATTEST-TYPES-73-001 | Create golden payload fixtures. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-73-001 | Ship signing endpoint. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-73-002 | Ship verification pipeline and reports. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-73-003 | Implement list/fetch APIs. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md | TODO | KMS Guild | KMS-72-002 | CLI support for key import/export. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ATTEST-73-001 | Implement VerificationPolicy lifecycle. | +| Sprint 73 | Attestor Console Phase 2 – Signing & Policies | src/Policy/StellaOps.Policy.Engine/TASKS.md | TODO | Policy Guild | POLICY-ATTEST-73-002 | Surface policies in Policy Studio. | +| Sprint 74 | Attestor CLI Phase 3 – Transparency & Chain of Custody | src/Cli/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-74-001 | Implement `stella attest list` with filters (subject, type, issuer, scope) and pagination. | +| Sprint 74 | Attestor CLI Phase 3 – Transparency & Chain of Custody | src/Cli/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild | CLI-ATTEST-74-002 | Implement `stella attest fetch` to download envelopes and payloads to disk. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-74-001 | Publish keys & issuers doc. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-74-002 | Publish transparency doc. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-74-003 | Publish console attestor UI doc. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-74-004 | Publish CLI attest doc. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-74-001 | Deploy transparency witness infra. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor.Envelope/TASKS.md | TODO | Envelope Guild | ATTEST-ENVELOPE-73-002 | Run fuzz tests for envelope handling. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor.Verify/TASKS.md | TODO | Verification Guild | ATTEST-VERIFY-74-001 | Add telemetry for verification pipeline. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor.Verify/TASKS.md | TODO | Verification Guild | ATTEST-VERIFY-74-002 | Document verification explainability. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-74-001 | Integrate transparency witness client. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-74-002 | Implement bulk verification worker. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md | TODO | Attestation Bundle Guild | EXPORT-ATTEST-74-001 | Build attestation bundle export job. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-ATTEST-74-001 | Add verification/key notifications. | +| Sprint 74 | Attestor Console Phase 3 – Transparency & Chain of Custody | src/Notifier/StellaOps.Notifier/TASKS.md | TODO | Notifications Service Guild | NOTIFY-ATTEST-74-002 | Notify key rotation/revocation. | +| Sprint 75 | Attestor CLI Phase 4 – Air Gap & Bulk | src/Cli/StellaOps.Cli/TASKS.md | TODO | CLI Attestor Guild, Export Guild | CLI-ATTEST-75-002 | Add support for building/verifying attestation bundles in CLI. | +| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-75-001 | Publish attestor airgap doc. | +| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | docs/TASKS.md | TODO | Docs Guild | DOCS-ATTEST-75-002 | Update AOC invariants for attestations. | +| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-74-002 | Integrate bundle builds into release/offline pipelines. | +| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | ops/devops/TASKS.md | TODO | DevOps Guild | DEVOPS-ATTEST-75-001 | Dashboards/alerts for attestor metrics. | +| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-75-001 | Support attestation bundle export/import for air gap. | +| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/Attestor/StellaOps.Attestor/TASKS.md | TODO | Attestor Service Guild | ATTESTOR-75-002 | Harden APIs (rate limits, fuzz tests, threat model actions). | +| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md | TODO | Attestation Bundle Guild | EXPORT-ATTEST-75-001 | CLI bundle verify/import. | +| Sprint 75 | Attestor Console Phase 4 – Air Gap & Bulk | src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md | TODO | Attestation Bundle Guild | EXPORT-ATTEST-75-002 | Document attestor airgap workflow. | diff --git a/docs/implplan/SPRINTS_PRIOR_20251019.md b/docs/implplan/SPRINTS_PRIOR_20251019.md new file mode 100644 index 00000000..97bfd25c --- /dev/null +++ b/docs/implplan/SPRINTS_PRIOR_20251019.md @@ -0,0 +1,208 @@ +Closed sprint tasks archived from SPRINTS.md on 2025-10-19. + +| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | +| --- | --- | --- | --- | --- | --- | --- | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-12) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-001 | SemVer primitive range-style metadata
Instructions to work:
DONE Read ./AGENTS.md and src/Concelier/__Libraries/StellaOps.Concelier.Models/AGENTS.md. This task lays the groundwork—complete the SemVer helper updates before teammates pick up FEEDMODELS-SCHEMA-01-002/003 and FEEDMODELS-SCHEMA-02-900. Use ./src/FASTER_MODELING_AND_NORMALIZATION.md for the target rule structure. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-002 | Provenance decision rationale field
Instructions to work:
AdvisoryProvenance now carries `decisionReason` and docs/tests were updated. Connectors and merge tasks should populate the field when applying precedence/freshness/tie-breaker logic; see src/Concelier/__Libraries/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md for usage guidance. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-11) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-01-003 | Normalized version rules collection
Instructions to work:
`AffectedPackage.NormalizedVersions` and supporting comparer/docs/tests shipped. Connector owners must emit rule arrays per ./src/FASTER_MODELING_AND_NORMALIZATION.md and report progress via FEEDMERGE-COORD-02-900 so merge/storage backfills can proceed. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-12) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-02-900 | Range primitives for SemVer/EVR/NEVRA metadata
Instructions to work:
DONE Read ./AGENTS.md and src/Concelier/__Libraries/StellaOps.Concelier.Models/AGENTS.md before resuming this stalled effort. Confirm helpers align with the new `NormalizedVersions` representation so connectors finishing in Sprint 2 can emit consistent metadata. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Normalization/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter
Shared `SemVerRangeRuleBuilder` now outputs primitives + normalized rules per `FASTER_MODELING_AND_NORMALIZATION.md`; CVE/GHSA connectors consuming the API have verified fixtures. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill
AdvisoryStore dual-writes flattened `normalizedVersions` when `concelier.storage.enableSemVerStyle` is set; migration `20251011-semver-style-backfill` updates historical records and docs outline the rollout. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence
Storage now persists `provenance.decisionReason` for advisories and merge events; tests cover round-trips. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing
Bootstrapper seeds compound/sparse indexes for flattened normalized rules and `docs/dev/mongo_indices.md` documents query guidance. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-TESTS-02-004 | Restore AdvisoryStore build after normalized versions refactor
Updated constructors/tests keep storage suites passing with the new feature flag defaults. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-ENGINE-01-002 | Plumb Authority client resilience options
WebService wires `authority.resilience.*` into `AddStellaOpsAuthClient` and adds binding coverage via `AuthorityClientResilienceOptionsAreBound`. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning
Install/runbooks document connected vs air-gapped resilience profiles and monitoring hooks. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns
Operator guides now call out `route/status/subject/clientId/scopes/bypass/remote` audit fields and SIEM triggers. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Concelier operator guide for enforcement cutoff
Install guide reiterates the 2025-12-31 cutoff and links audit signals to the rollout checklist. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.HOST | Rate limiter policy binding
Authority host now applies configuration-driven fixed windows to `/token`, `/authorize`, and `/internal/*`; integration tests assert 429 + `Retry-After` headers; docs/config samples refreshed for Docs guild diagrams. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | SEC3.BUILD | Authority rate-limiter follow-through
`Security.RateLimiting` now fronts token/authorize/internal limiters; Authority + Configuration matrices (`dotnet test src/Authority/StellaOps.Authority/StellaOps.Authority.sln`, `dotnet test src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj`) passed on 2025-10-11; awaiting #authority-core broadcast. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHCORE-BUILD-OPENIDDICT / AUTHCORE-STORAGE-DEVICE-TOKENS / AUTHCORE-BOOTSTRAP-INVITES | Address remaining Authority compile blockers (OpenIddict transaction shim, token device document, bootstrap invite cleanup) so `dotnet build src/Authority/StellaOps.Authority/StellaOps.Authority.sln` returns success. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | PLG6.DOC | Plugin developer guide polish
Section 9 now documents rate limiter metadata, config keys, and lockout interplay; YAML samples updated alongside Authority config templates. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-001 | Fetch pipeline & state tracking
Summary planner now drives monthly/yearly VINCE fetches, persists pending summaries/notes, and hydrates VINCE detail queue with telemetry.
Team instructions: Read ./AGENTS.md and src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/AGENTS.md. Coordinate daily with Models/Merge leads so new normalizedVersions output and provenance tags stay aligned with ./src/FASTER_MODELING_AND_NORMALIZATION.md. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-002 | VINCE note detail fetcher
Summary planner queues VINCE note detail endpoints, persists raw JSON with SHA/ETag metadata, and records retry/backoff metrics. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-003 | DTO & parser implementation
Added VINCE DTO aggregate, Markdown→text sanitizer, vendor/status/vulnerability parsers, and parser regression fixture. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-004 | Canonical mapping & range primitives
VINCE DTO aggregate flows through `CertCcMapper`, emitting vendor range primitives + normalized version rules that persist via `_advisoryStore`. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-005 | Deterministic fixtures/tests
Snapshot harness refreshed 2025-10-12; `certcc-*.snapshot.json` regenerated and regression suite green without UPDATE flag drift. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-006 | Telemetry & documentation
`CertCcDiagnostics` publishes summary/detail/parse/map metrics (meter `StellaOps.Concelier.Connector.CertCc`), README documents instruments, and log guidance captured for Ops on 2025-10-12. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-007 | Connector test harness remediation
Harness now wires `AddSourceCommon`, resets `FakeTimeProvider`, and passes canned-response regression run dated 2025-10-12. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-008 | Snapshot coverage handoff
Fixtures regenerated with normalized ranges + provenance fields on 2025-10-11; QA handoff notes published and merge backfill unblocked. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-012 | Schema sync & snapshot regen follow-up
Fixtures regenerated with normalizedVersions + provenance decision reasons; handoff notes updated for Merge backfill 2025-10-12. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-009 | Detail/map reintegration plan
Staged reintegration plan published in `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md`; coordinates enablement with FEEDCONN-CERTCC-02-004. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md | DONE (2025-10-12) | Team Connector Resumption – CERT/RedHat | FEEDCONN-CERTCC-02-010 | Partial-detail graceful degradation
Detail fetch now tolerates 404/403/410 responses and regression tests cover mixed endpoint availability. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md | DONE (2025-10-11) | Team Connector Resumption – CERT/RedHat | FEEDCONN-REDHAT-02-001 | Fixture validation sweep
Instructions to work:
Fixtures regenerated post-model-helper rollout; provenance ordering and normalizedVersions scaffolding verified via tests. Conflict resolver deltas logged in src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md for Sprint 3 consumers. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-12) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-001 | Canonical mapping & range primitives
Mapper emits SemVer rules (`scheme=apple:*`); fixtures regenerated with trimmed references + new RSR coverage, update tooling finalized. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-002 | Deterministic fixtures/tests
Sanitized live fixtures + regression snapshots wired into tests; normalized rule coverage asserted. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-003 | Telemetry & documentation
Apple meter metrics wired into Concelier WebService OpenTelemetry configuration; README and fixtures document normalizedVersions coverage. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-12) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-004 | Live HTML regression sweep
Sanitised HT125326/HT125328/HT106355/HT214108/HT215500 fixtures recorded and regression tests green on 2025-10-12. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md | DONE (2025-10-11) | Team Vendor Apple Specialists | FEEDCONN-APPLE-02-005 | Fixture regeneration tooling
`UPDATE_APPLE_FIXTURES=1` flow fetches & rewrites fixtures; README documents usage.
Instructions to work:
DONE Read ./AGENTS.md and src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Apple/AGENTS.md. Resume stalled tasks, ensuring normalizedVersions output and fixtures align with ./src/FASTER_MODELING_AND_NORMALIZATION.md before handing data to the conflict sprint. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance
Team instructions: Read ./AGENTS.md and each module's AGENTS file. Adopt the `NormalizedVersions` array emitted by the models sprint, wiring provenance `decisionReason` where merge overrides occur. Follow ./src/FASTER_MODELING_AND_NORMALIZATION.md; report via src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md (FEEDMERGE-COORD-02-900). Progress 2025-10-11: GHSA/OSV emit normalized arrays with refreshed fixtures; CVE mapper now surfaces SemVer normalized ranges; NVD/KEV adoption pending; outstanding follow-ups include FEEDSTORAGE-DATA-02-001, FEEDMERGE-ENGINE-02-002, and rolling `tools/FixtureUpdater` updates across connectors. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Cve/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-CVE-02-003 | CVE normalized versions uplift | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Kev/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-KEV-02-003 | KEV normalized versions propagation | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-OSV-04-003 | OSV parity fixture refresh | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-10) | Team WebService & Authority | FEEDWEB-DOCS-01-001 | Document authority toggle & scope requirements
Quickstart carries toggle/scope guidance pending docs guild review (no change this sprint). | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-003 | Author ops guidance for resilience tuning
Operator docs now outline connected vs air-gapped resilience profiles and monitoring cues. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-004 | Document authority bypass logging patterns
Audit logging guidance highlights `route/status/subject/clientId/scopes/bypass/remote` fields and SIEM alerts. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-12) | Team WebService & Authority | FEEDWEB-DOCS-01-005 | Update Concelier operator guide for enforcement cutoff
Install guide reiterates the 2025-12-31 cutoff and ties audit signals to rollout checks. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-006 | Rename plugin drop directory to namespaced path
Build outputs, tests, and docs now target `StellaOps.Concelier.PluginBinaries`/`StellaOps.Authority.PluginBinaries`. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-11) | Team WebService & Authority | FEEDWEB-OPS-01-007 | Authority resilience adoption
Deployment docs and CLI notes explain the LIB5 resilience knobs for rollout.
Instructions to work:
DONE Read ./AGENTS.md and src/Concelier/StellaOps.Concelier.WebService/AGENTS.md. These items were mid-flight; resume implementation ensuring docs/operators receive timely updates. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCORE-ENGINE-01-001 | CORE8.RL — Rate limiter plumbing validated; integration tests green and docs handoff recorded for middleware ordering + Retry-After headers (see `docs/dev/authority-rate-limit-tuning-outline.md` for continuing guidance). | +| Sprint 1 | Stabilize In-Progress Foundations | src/__Libraries/StellaOps.Cryptography/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHCRYPTO-ENGINE-01-001 | SEC3.A — Shared metadata resolver confirmed via host test run; SEC3.B now unblocked for tuning guidance (outline captured in `docs/dev/authority-rate-limit-tuning-outline.md`). | +| Sprint 1 | Stabilize In-Progress Foundations | src/__Libraries/StellaOps.Cryptography/TASKS.md | DONE (2025-10-13) | Team Authority Platform & Security Guild | AUTHSEC-DOCS-01-002 | SEC3.B — Published `docs/security/rate-limits.md` with tuning matrix, alert thresholds, and lockout interplay guidance; Docs guild can lift copy into plugin guide. | +| Sprint 1 | Stabilize In-Progress Foundations | src/__Libraries/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Team Authority Platform & Security Guild | AUTHSEC-CRYPTO-02-001 | SEC5.B1 — Introduce libsodium signing provider and parity tests to unblock CLI verification enhancements. | +| Sprint 1 | Bootstrap & Replay Hardening | src/__Libraries/StellaOps.Cryptography/TASKS.md | DONE (2025-10-14) | Security Guild | AUTHSEC-CRYPTO-02-004 | SEC5.D/E — Finish bootstrap invite lifecycle (API/store/cleanup) and token device heuristics; build currently red due to pending handler integration. | +| Sprint 1 | Developer Tooling | src/Cli/StellaOps.Cli/TASKS.md | DONE (2025-10-15) | DevEx/CLI | AUTHCLI-DIAG-01-001 | Surface password policy diagnostics in CLI startup/output so operators see weakened overrides immediately.
CLI now loads Authority plug-ins at startup, logs weakened password policies (length/complexity), and regression coverage lives in `StellaOps.Cli.Tests/Services/AuthorityDiagnosticsReporterTests`. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md | DONE (2025-10-11) | Team Authority Platform & Security Guild | AUTHPLUG-DOCS-01-001 | PLG6.DOC — Developer guide copy + diagrams merged 2025-10-11; limiter guidance incorporated and handed to Docs guild for asset export. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/__Libraries/StellaOps.Concelier.Normalization/TASKS.md | DONE (2025-10-12) | Team Normalization & Storage Backbone | FEEDNORM-NORM-02-001 | SemVer normalized rule emitter
`SemVerRangeRuleBuilder` shipped 2025-10-12 with comparator/`||` support and fixtures aligning to `FASTER_MODELING_AND_NORMALIZATION.md`. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-002 | Provenance decision reason persistence | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-02-003 | Normalized versions indexing
Indexes seeded + docs updated 2025-10-11 to cover flattened normalized rules for connector adoption. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Normalization & Storage Backbone | FEEDMERGE-ENGINE-02-002 | Normalized versions union & dedupe
Affected package resolver unions/dedupes normalized rules, stamps merge provenance with `decisionReason`, and tests cover the rollout. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-004 | GHSA credits & ecosystem severity mapping | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-005 | GitHub quota monitoring & retries | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-006 | Production credential & scheduler rollout | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-GHSA-02-007 | Credit parity regression fixtures | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-002 | NVD normalized versions & timestamps | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-004 | NVD CVSS & CWE precedence payloads | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-NVD-02-005 | NVD merge/export parity regression | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-003 | OSV normalized versions & freshness | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-11) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-004 | OSV references & credits alignment | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-02-005 | Fixture updater workflow
Resolved 2025-10-12: OSV mapper now derives canonical PURLs for Go + scoped npm packages when raw payloads omit `purl`; conflict fixtures unchanged for invalid npm names. Verified via `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests`, `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa.Tests`, `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd.Tests`, and backbone normalization/storage suites. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Acsc/TASKS.md | DONE (2025-10-12) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ACSC-02-001 … 02-008 | Fetch→parse→map pipeline, fixtures, diagnostics, and README finished 2025-10-12; downstream export parity captured via FEEDEXPORT-JSON-04-001 / FEEDEXPORT-TRIVY-04-001 (completed). | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Cccs/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CCCS-02-001 … 02-008 | Observability meter, historical harvest plan, and DOM sanitizer refinements wrapped; ops notes live under `docs/ops/concelier-cccs-operations.md` with fixtures validating EN/FR list handling. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertBund/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CERTBUND-02-001 … 02-008 | Telemetry/docs (02-006) and history/locale sweep (02-007) completed alongside pipeline; runbook `docs/ops/concelier-certbund-operations.md` captures locale guidance and offline packaging. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Kisa/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-KISA-02-001 … 02-007 | Connector, tests, and telemetry/docs (02-006) finalized; localisation notes in `docs/dev/kisa_connector_notes.md` complete rollout. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-RUBDU-02-001 … 02-008 | Fetch/parser/mapper refinements, regression fixtures, telemetry/docs, access options, and trusted root packaging all landed; README documents offline access strategy. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md | DONE (2025-10-13) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-NKCKI-02-001 … 02-008 | Listing fetch, parser, mapper, fixtures, telemetry/docs, and archive plan finished; Mongo2Go/libcrypto dependency resolved via bundled OpenSSL noted in ops guide. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-ICSCISA-02-001 … 02-011 | Feed parser attachment fixes, SemVer exact values, regression suites, telemetry/docs updates, and handover complete; ops runbook now details attachment verification + proxy usage. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md | DONE (2025-10-14) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-CISCO-02-001 … 02-007 | OAuth fetch pipeline, DTO/mapping, tests, and telemetry/docs shipped; monitoring/export integration follow-ups recorded in Ops docs and exporter backlog (completed). | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Msrc/TASKS.md | DONE (2025-10-15) | Team Connector Expansion – Regional & Vendor Feeds | FEEDCONN-MSRC-02-001 … 02-008 | Azure AD onboarding (02-008) unblocked fetch/parse/map pipeline; fixtures, telemetry/docs, and Offline Kit guidance published in `docs/ops/concelier-msrc-operations.md`. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Cve/TASKS.md | DONE (2025-10-15) | Team Connector Support & Monitoring | FEEDCONN-CVE-02-001 … 02-002 | CVE data-source selection, fetch pipeline, and docs landed 2025-10-10. 2025-10-15: smoke verified using the seeded mirror fallback; connector now logs a warning and pulls from `seed-data/cve/` until live CVE Services credentials arrive. | +| Sprint 2 | Connector & Data Implementation Wave | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Kev/TASKS.md | DONE (2025-10-12) | Team Connector Support & Monitoring | FEEDCONN-KEV-02-001 … 02-002 | KEV catalog ingestion, fixtures, telemetry, and schema validation completed 2025-10-12; ops dashboard published. | +| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-01-001 | Canonical schema docs refresh
Updated canonical schema + provenance guides with SemVer style, normalized version rules, decision reason change log, and migration notes. | +| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-001 | Concelier-SemVer Playbook
Published merge playbook covering mapper patterns, dedupe flow, indexes, and rollout checklist. | +| Sprint 2 | Connector & Data Implementation Wave | docs/TASKS.md | DONE (2025-10-11) | Team Docs & Knowledge Base | FEEDDOCS-DOCS-02-002 | Normalized versions query guide
Delivered Mongo index/query addendum with `$unwind` recipes, dedupe checks, and operational checklist.
Instructions to work:
DONE Read ./AGENTS.md and docs/AGENTS.md. Document every schema/index/query change produced in Sprint 1-2 leveraging ./src/FASTER_MODELING_AND_NORMALIZATION.md. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-03-001 | Canonical merger implementation
`CanonicalMerger` ships with freshness/tie-breaker logic, provenance, and unit coverage feeding Merge. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-03-002 | Field precedence and tie-breaker map
Field precedence tables and tie-breaker metrics wired into the canonical merge flow; docs/tests updated.
Instructions to work:
Read ./AGENTS.md and core AGENTS. Implement the conflict resolver exactly as specified in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md, coordinating with Merge and Storage teammates. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDSTORAGE-DATA-03-001 | Merge event provenance audit prep
Merge events now persist `fieldDecisions` and analytics-ready provenance snapshots. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDSTORAGE-DATA-02-001 | Normalized range dual-write + backfill
Dual-write/backfill flag delivered; migration + options validated in tests. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-11) | Team Core Engine & Storage Analytics | FEEDSTORAGE-TESTS-02-004 | Restore AdvisoryStore build after normalized versions refactor
Storage tests adjusted for normalized versions/decision reasons.
Instructions to work:
Read ./AGENTS.md and storage AGENTS. Extend merge events with decision reasons and analytics views to support the conflict rules, and deliver the dual-write/backfill for `NormalizedVersions` + `decisionReason` so connectors can roll out safely. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-001 | GHSA/NVD/OSV conflict rules
Merge pipeline consumes `CanonicalMerger` output prior to precedence merge. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-002 | Override metrics instrumentation
Merge events capture per-field decisions; counters/logs align with conflict rules. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-003 | Reference & credit union pipeline
Canonical merge preserves unions with updated tests. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-11) | Team Merge & QA Enforcement | FEEDMERGE-QA-04-001 | End-to-end conflict regression suite
Added regression tests (`AdvisoryMergeServiceTests`) covering canonical + precedence flow.
Instructions to work:
Read ./AGENTS.md and merge AGENTS. Integrate the canonical merger, instrument metrics, and deliver comprehensive regression tests following ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md. | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-GHSA-04-002 | GHSA conflict regression fixtures | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-NVD-04-002 | NVD conflict regression fixtures | +| Sprint 3 | Conflict Resolution Integration & Communications | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-12) | Team Connector Regression Fixtures | FEEDCONN-OSV-04-002 | OSV conflict regression fixtures
Instructions to work:
Read ./AGENTS.md and module AGENTS. Produce fixture triples supporting the precedence/tie-breaker paths defined in ./src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md and hand them to Merge QA. | +| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-11) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-001 | Concelier Conflict Rules
Runbook published at `docs/ops/concelier-conflict-resolution.md`; metrics/log guidance aligned with Sprint 3 merge counters. | +| Sprint 3 | Conflict Resolution Integration & Communications | docs/TASKS.md | DONE (2025-10-16) | Team Documentation Guild – Conflict Guidance | FEEDDOCS-DOCS-05-002 | Conflict runbook ops rollout
Ops review completed, alert thresholds applied, and change log appended in `docs/ops/concelier-conflict-resolution.md`; task closed after connector signals verified. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/Concelier/__Libraries/StellaOps.Concelier.Models/TASKS.md | DONE (2025-10-15) | Team Models & Merge Leads | FEEDMODELS-SCHEMA-04-001 | Advisory schema parity (description/CWE/canonical metric)
Extend `Advisory` and related records with description text, CWE collection, and canonical metric pointer; refresh validation + serializer determinism tests. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-15) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-04-003 | Canonical merger parity for new fields
Teach `CanonicalMerger` to populate description, CWEResults, and canonical metric pointer with provenance + regression coverage. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-15) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-04-004 | Reference normalization & freshness instrumentation cleanup
Implement URL normalization for reference dedupe, align freshness-sensitive instrumentation, and add analytics tests. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-15) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-004 | Merge pipeline parity for new advisory fields
Ensure merge service + merge events surface description/CWE/canonical metric decisions with updated metrics/tests. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-15) | Team Merge & QA Enforcement | FEEDMERGE-ENGINE-04-005 | Connector coordination for new advisory fields
GHSA/NVD/OSV connectors now ship description, CWE, and canonical metric data with refreshed fixtures; merge coordination log updated and exporters notified. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.Json/TASKS.md | DONE (2025-10-15) | Team Exporters – JSON | FEEDEXPORT-JSON-04-001 | Surface new advisory fields in JSON exporter
Update schemas/offline bundle + fixtures once model/core parity lands.
2025-10-15: `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.Json.Tests` validated canonical metric/CWE emission. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md | DONE (2025-10-15) | Team Exporters – Trivy DB | FEEDEXPORT-TRIVY-04-001 | Propagate new advisory fields into Trivy DB package
Extend Bolt builder, metadata, and regression tests for the expanded schema.
2025-10-15: `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.TrivyDb.Tests` confirmed canonical metric/CWE propagation. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-16) | Team Connector Regression Fixtures | FEEDCONN-GHSA-04-004 | Harden CVSS fallback so canonical metric ids persist when GitHub omits vectors; extend fixtures and document severity precedence hand-off to Merge. | +| Sprint 4 | Schema Parity & Freshness Alignment | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv/TASKS.md | DONE (2025-10-16) | Team Connector Expansion – GHSA/NVD/OSV | FEEDCONN-OSV-04-005 | Map OSV advisories lacking CVSS vectors to canonical metric ids/notes and document CWE provenance quirks; schedule parity fixture updates. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-15) | Team Excititor Core & Policy | EXCITITOR-CORE-01-001 | Stand up canonical VEX claim/consensus records with deterministic serializers so Storage/Exports share a stable contract. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-15) | Team Excititor Core & Policy | EXCITITOR-CORE-01-002 | Implement trust-weighted consensus resolver with baseline policy weights, justification gates, telemetry output, and majority/tie handling. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-15) | Team Excititor Core & Policy | EXCITITOR-CORE-01-003 | Publish shared connector/exporter/attestation abstractions and deterministic query signature utilities for cache/attestation workflows. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-15) | Team Excititor Policy | EXCITITOR-POLICY-01-001 | Established policy options & snapshot provider covering baseline weights/overrides. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-15) | Team Excititor Policy | EXCITITOR-POLICY-01-002 | Policy evaluator now feeds consensus resolver with immutable snapshots. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-16) | Team Excititor Policy | EXCITITOR-POLICY-01-003 | Author policy diagnostics, CLI/WebService surfacing, and documentation updates. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-16) | Team Excititor Policy | EXCITITOR-POLICY-01-004 | Implement YAML/JSON schema validation and deterministic diagnostics for operator bundles. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-16) | Team Excititor Policy | EXCITITOR-POLICY-01-005 | Add policy change tracking, snapshot digests, and telemetry/logging hooks. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-15) | Team Excititor Storage | EXCITITOR-STORAGE-01-001 | Mongo mapping registry plus raw/export entities and DI extensions in place. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-16) | Team Excititor Storage | EXCITITOR-STORAGE-01-004 | Build provider/consensus/cache class maps and related collections. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-15) | Team Excititor Export | EXCITITOR-EXPORT-01-001 | Export engine delivers cache lookup, manifest creation, and policy integration. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-17) | Team Excititor Export | EXCITITOR-EXPORT-01-004 | Connect export engine to attestation client and persist Rekor metadata. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Attestation/TASKS.md | DONE (2025-10-16) | Team Excititor Attestation | EXCITITOR-ATTEST-01-001 | Implement in-toto predicate + DSSE builder providing envelopes for export attestation. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors | EXCITITOR-CONN-ABS-01-001 | Deliver shared connector context/base classes so provider plug-ins can be activated via WebService/Worker. | +| Sprint 5 | Excititor Core Foundations | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-17) | Team Excititor WebService | EXCITITOR-WEB-01-001 | Scaffold minimal API host, DI, and `/excititor/status` endpoint integrating policy, storage, export, and attestation services. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/StellaOps.Excititor.Worker/TASKS.md | DONE (2025-10-17) | Team Excititor Worker | EXCITITOR-WORKER-01-001 | Create Worker host with provider scheduling and logging to drive recurring pulls/reconciliation. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Formats | EXCITITOR-FMT-CSAF-01-001 | Implement CSAF normalizer foundation translating provider documents into `VexClaim` entries. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/TASKS.md | DONE (2025-10-17) | Team Excititor Formats | EXCITITOR-FMT-CYCLONE-01-001 | Implement CycloneDX VEX normalizer capturing `analysis` state and component references. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/TASKS.md | DONE (2025-10-17) | Team Excititor Formats | EXCITITOR-FMT-OPENVEX-01-001 | Implement OpenVEX normalizer to ingest attestations into canonical claims with provenance. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-001 | Ship Red Hat CSAF provider metadata discovery enabling incremental pulls. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-002 | Fetch CSAF windows with ETag handling, resume tokens, quarantine on schema errors, and persist raw docs. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-003 | Populate provider trust overrides (cosign issuer, identity regex) and provenance hints for policy evaluation/logging. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-004 | Persist resume cursors (last updated timestamp/document hashes) in storage and reload during fetch to avoid duplicates. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-005 | Register connector in Worker/WebService DI, add scheduled jobs, and document CLI triggers for Red Hat CSAF pulls. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Red Hat | EXCITITOR-CONN-RH-01-006 | Add CSAF normalization parity fixtures ensuring RHSA-specific metadata is preserved. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Cisco | EXCITITOR-CONN-CISCO-01-001 | Implement Cisco CSAF endpoint discovery/auth to unlock paginated pulls. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Cisco | EXCITITOR-CONN-CISCO-01-002 | Implement Cisco CSAF paginated fetch loop with dedupe and raw persistence support. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – SUSE | EXCITITOR-CONN-SUSE-01-001 | Build Rancher VEX Hub discovery/subscription path with offline snapshot support. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – MSRC | EXCITITOR-CONN-MS-01-001 | Deliver AAD onboarding/token cache for MSRC CSAF ingestion. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Oracle | EXCITITOR-CONN-ORACLE-01-001 | Implement Oracle CSAF catalogue discovery with CPU calendar awareness. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md | DONE (2025-10-17) | Team Excititor Connectors – Ubuntu | EXCITITOR-CONN-UBUNTU-01-001 | Implement Ubuntu CSAF discovery and channel selection for USN ingestion. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md | DONE (2025-10-18) | Team Excititor Connectors – OCI | EXCITITOR-CONN-OCI-01-001 | Wire OCI discovery/auth to fetch OpenVEX attestations for configured images. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md | DONE (2025-10-18) | Team Excititor Connectors – OCI | EXCITITOR-CONN-OCI-01-002 | Attestation fetch & verify loop – download DSSE attestations, trigger verification, handle retries/backoff, persist raw statements. | +| Sprint 6 | Excititor Ingest & Formats | src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md | DONE (2025-10-18) | Team Excititor Connectors – OCI | EXCITITOR-CONN-OCI-01-003 | Provenance metadata & policy hooks – emit image, subject digest, issuer, and trust metadata for policy weighting/logging. | +| Sprint 6 | Excititor Ingest & Formats | src/Cli/StellaOps.Cli/TASKS.md | DONE (2025-10-18) | DevEx/CLI | EXCITITOR-CLI-01-001 | Add `excititor` CLI verbs bridging to WebService with consistent auth and offline UX. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md | DONE (2025-10-19) | Team Excititor Core & Policy | EXCITITOR-CORE-02-001 | Context signal schema prep – extend consensus models with severity/KEV/EPSS fields and update canonical serializers. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md | DONE (2025-10-19) | Team Excititor Policy | EXCITITOR-POLICY-02-001 | Scoring coefficients & weight ceilings – add α/β options, weight boosts, and validation guidance. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Attestation/TASKS.md | DONE (2025-10-16) | Team Excititor Attestation | EXCITITOR-ATTEST-01-002 | Rekor v2 client integration – ship transparency log client with retries and offline queue. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-501 | Define shared DTOs (ScanJob, ProgressEvent), error taxonomy, and deterministic ID/timestamp helpers aligning with `ARCHITECTURE_SCANNER.md` §3–§4. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-502 | Observability helpers (correlation IDs, logging scopes, metric namespacing, deterministic hashes) consumed by WebService/Worker. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Core/TASKS.md | DONE (2025-10-18) | Team Scanner Core | SCANNER-CORE-09-503 | Security utilities: Authority client factory, OpTok caching, DPoP verifier, restart-time plug-in guardrails for scanner components. | +| Sprint 9 | Scanner Build-time | src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-001 | Buildx driver scaffold + handshake with Scanner.Emit (local CAS). | +| Sprint 9 | Scanner Build-time | src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-002 | OCI annotations + provenance hand-off to Attestor. | +| Sprint 9 | Scanner Build-time | src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-003 | CI demo: minimal SBOM push & backend report wiring. | +| Sprint 9 | Scanner Build-time | src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-004 | Stabilize descriptor nonce derivation so repeated builds emit deterministic placeholders. | +| Sprint 9 | Scanner Build-time | src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md | DONE (2025-10-19) | BuildX Guild | SP9-BLDX-09-005 | Integrate determinism guard into GitHub/Gitea workflows and archive proof artifacts. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-18) | Team Scanner WebService | SCANNER-WEB-09-101 | Minimal API host with Authority enforcement, health/ready endpoints, and restart-time plug-in loader per architecture §1, §4. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-18) | Team Scanner WebService | SCANNER-WEB-09-102 | `/api/v1/scans` submission/status endpoints with deterministic IDs, validation, and cancellation support. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-WEB-09-104 | Configuration binding for Mongo, MinIO, queue, feature flags; startup diagnostics and fail-fast policy. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-201 | Worker host bootstrap with Authority auth, hosted services, and graceful shutdown semantics. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-202 | Lease/heartbeat loop with retry+jitter, poison-job quarantine, structured logging. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-203 | Analyzer dispatch skeleton emitting deterministic stage progress and honoring cancellation tokens. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-204 | Worker metrics (queue latency, stage duration, failure counts) with OpenTelemetry resource wiring. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.Worker/TASKS.md | DONE (2025-10-19) | Team Scanner Worker | SCANNER-WORKER-09-205 | Harden heartbeat jitter so lease safety margin stays ≥3× and cover with regression tests + optional live queue smoke run. | +| Sprint 9 | Policy Foundations | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-001 | Policy schema + binder + diagnostics. | +| Sprint 9 | Policy Foundations | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-002 | Policy snapshot store + revision digests. | +| Sprint 9 | Policy Foundations | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | DONE | Policy Guild | POLICY-CORE-09-003 | `/policy/preview` API (image digest → projected verdict diff). | +| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | DONE (2025-10-19) | DevOps Guild | DEVOPS-HELM-09-001 | Helm/Compose environment profiles (dev/staging/airgap) with deterministic digests. | +| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-19) | Docs Guild, DevEx | DOCS-ADR-09-001 | Establish ADR process and template. | +| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-19) | Docs Guild, Platform Events | DOCS-EVENTS-09-002 | Publish event schema catalog (`docs/events/`) for critical envelopes. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-301 | Mongo catalog schemas/indexes for images, layers, artifacts, jobs, lifecycle rules plus migrations. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-302 | MinIO layout, immutability policies, client abstraction, and configuration binding. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-19) | Team Scanner Storage | SCANNER-STORAGE-09-303 | Repositories/services with dual-write feature flag, deterministic digests, TTL enforcement tests. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-401 | Queue abstraction + Redis Streams adapter with ack/claim APIs and idempotency tokens. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-402 | Pluggable backend support (Redis, NATS) with configuration binding, health probes, failover docs. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/__Libraries/StellaOps.Scanner.Queue/TASKS.md | DONE (2025-10-19) | Team Scanner Queue | SCANNER-QUEUE-09-403 | Retry + dead-letter strategy with structured logs/metrics for offline deployments. | +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/TASKS.md | DONE (2025-10-12) | Team Connector Normalized Versions Rollout | FEEDCONN-GHSA-02-001 | GHSA normalized versions & provenance
Team instructions: Read ./AGENTS.md and each module's AGENTS file. Adopt the `NormalizedVersions` array emitted by the models sprint, wiring provenance `decisionReason` where merge overrides occur. Follow ./src/FASTER_MODELING_AND_NORMALIZATION.md; report via src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md (FEEDMERGE-COORD-02-900). Progress 2025-10-11: GHSA/OSV emit normalized arrays with refreshed fixtures; CVE mapper now surfaces SemVer normalized ranges; NVD/KEV adoption pending; outstanding follow-ups include FEEDSTORAGE-DATA-02-001, FEEDMERGE-ENGINE-02-002, and rolling `tools/FixtureUpdater` updates across connectors.
Progress 2025-10-20: Coordination matrix + rollout dashboard refreshed; upcoming deadlines tracked (Cccs/Cisco 2025-10-21, CertBund 2025-10-22, ICS-CISA 2025-10-23, KISA 2025-10-24) with escalation path documented in FEEDMERGE-COORD-02-900.| +| Sprint 1 | Stabilize In-Progress Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-19) | Team WebService & Authority | FEEDWEB-OPS-01-006 | Rename plugin drop directory to namespaced path
Build outputs now point at `StellaOps.Concelier.PluginBinaries`/`StellaOps.Authority.PluginBinaries`; defaults/docs/tests updated to reflect the new layout. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Excititor Storage | EXCITITOR-STORAGE-02-001 | Statement events & scoring signals – immutable VEX statements store, consensus signal fields, and migration `20251019-consensus-signals-statements` with tests (`dotnet test src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj`, `dotnet test src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj`). | +| Sprint 7 | Contextual Truth Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-19) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-07-001 | Advisory event log & asOf queries – surface immutable statements and replay capability. | +| Sprint 7 | Contextual Truth Foundations | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-19) | Concelier WebService Guild | FEEDWEB-EVENTS-07-001 | Advisory event replay API – expose `/concelier/advisories/{key}/replay` with `asOf` filter, hex hashes, and conflict data. | +| Sprint 7 | Contextual Truth Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md | DONE (2025-10-20) | BE-Merge | FEEDMERGE-ENGINE-07-001 | Conflict sets & explainers – persist conflict materialization and replay hashes for merge decisions. | +| Sprint 8 | Mongo strengthening | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Normalization & Storage Backbone | FEEDSTORAGE-MONGO-08-001 | Causal-consistent Concelier storage sessions
Scoped session facilitator registered, repositories accept optional session handles, and replica-set failover tests verify read-your-write + monotonic reads. | +| Sprint 8 | Mongo strengthening | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-19) | Authority Core & Storage Guild | AUTHSTORAGE-MONGO-08-001 | Harden Authority Mongo usage
Scoped Mongo sessions with majority read/write concerns wired through stores and GraphQL/HTTP pipelines; replica-set election regression validated. | +| Sprint 8 | Mongo strengthening | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Excititor Storage | EXCITITOR-STORAGE-MONGO-08-001 | Causal consistency for Excititor repositories
Session-scoped repositories shipped with new Mongo records, orchestrators/workers now share scoped sessions, and replica-set failover coverage added via `dotnet test src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj`. | +| Sprint 8 | Platform Maintenance | src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Excititor Storage | EXCITITOR-STORAGE-03-001 | Statement backfill tooling – shipped admin backfill endpoint, CLI hook (`stellaops excititor backfill-statements`), integration tests, and operator runbook (`docs/dev/EXCITITOR_STATEMENT_BACKFILL.md`). | +| Sprint 8 | Mirror Distribution | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.Json/TASKS.md | DONE (2025-10-19) | Concelier Export Guild | CONCELIER-EXPORT-08-201 | Mirror bundle + domain manifest – produce signed JSON aggregates for `*.stella-ops.org` mirrors. | +| Sprint 8 | Mirror Distribution | src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md | DONE (2025-10-19) | Concelier Export Guild | CONCELIER-EXPORT-08-202 | Mirror-ready Trivy DB bundles – mirror options emit per-domain manifests/metadata/db archives with deterministic digests for downstream sync. | +| Sprint 8 | Mirror Distribution | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-20) | Concelier WebService Guild | CONCELIER-WEB-08-201 | Mirror distribution endpoints – expose domain-scoped index/download APIs with auth/quota. | +| Sprint 8 | Mirror Distribution | ops/devops/TASKS.md | DONE (2025-10-19) | DevOps Guild | DEVOPS-MIRROR-08-001 | Managed mirror deployments for `*.stella-ops.org` – Helm/Compose overlays, CDN, runbooks. | +| Sprint 8 | Plugin Infrastructure | src/__Libraries/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-003 | Refactor Authority identity-provider registry to resolve scoped plugin services on-demand.
Introduce factory pattern aligned with scoped lifetimes decided in coordination workshop. | +| Sprint 8 | Plugin Infrastructure | src/__Libraries/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-004 | Update Authority plugin loader to activate registrars with DI support and scoped service awareness.
Add two-phase initialization allowing scoped dependencies post-container build. | +| Sprint 8 | Plugin Infrastructure | src/__Libraries/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-005 | Provide scoped-safe bootstrap execution for Authority plugins.
Implement scope-per-run pattern for hosted bootstrap tasks and document migration guidance. | +| Sprint 10 | DevOps Security | ops/devops/TASKS.md | DONE (2025-10-20) | DevOps Guild | DEVOPS-SEC-10-301 | Address NU1902/NU1903 advisories for `MongoDB.Driver` 2.12.0 and `SharpCompress` 0.23.0; Wave 0A prerequisites confirmed complete before remediation work. | +| Sprint 11 | Signing Chain Bring-up | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-20) | Authority Core & Security Guild | AUTH-DPOP-11-001 | Implement DPoP proof validation + nonce handling for high-value audiences per architecture. | +| Sprint 15 | Notify Foundations | src/Notify/StellaOps.Notify.WebService/TASKS.md | DONE (2025-10-19) | Notify WebService Guild | NOTIFY-WEB-15-103 | Delivery history & test-send endpoints. | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-SLACK-15-502 | Slack health/test-send support. | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-602 | Teams health/test-send support. | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-604 | Teams health endpoint metadata alignment. | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-SLACK-15-503 | Package Slack connector as restart-time plug-in (manifest + host registration). | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-TEAMS-15-603 | Package Teams connector as restart-time plug-in (manifest + host registration). | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-EMAIL-15-703 | Package Email connector as restart-time plug-in (manifest + host registration). | +| Sprint 15 | Notify Foundations | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-20) | Scanner WebService Guild | SCANNER-EVENTS-15-201 | Emit `scanner.report.ready` + `scanner.scan.completed` events. | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md | DONE (2025-10-20) | Notify Connectors Guild | NOTIFY-CONN-WEBHOOK-15-803 | Package Webhook connector as restart-time plug-in (manifest + host registration). | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-20) | Scheduler Models Guild | SCHED-MODELS-16-103 | Versioning/migration helpers for schedules/runs. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/TASKS.md | DONE (2025-10-20) | Scheduler Queue Guild | SCHED-QUEUE-16-401 | Queue abstraction + Redis Streams adapter. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/TASKS.md | DONE (2025-10-20) | Scheduler Queue Guild | SCHED-QUEUE-16-402 | NATS JetStream adapter with health probes. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md | DONE (2025-10-20) | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-300 | **STUB** ImpactIndex ingest/query using fixtures (to be removed by SP16 completion). | diff --git a/docs/implplan/SPRINTS_PRIOR_20251021.md b/docs/implplan/SPRINTS_PRIOR_20251021.md new file mode 100644 index 00000000..33eb1cc5 --- /dev/null +++ b/docs/implplan/SPRINTS_PRIOR_20251021.md @@ -0,0 +1,88 @@ +This file describe implementation of Stella Ops (docs/README.md). Implementation must respect rules from AGENTS.md (read if you have not). + +| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | +| --- | --- | --- | --- | --- | --- | --- | +| Sprint 7 | Contextual Truth Foundations | docs/TASKS.md | DONE (2025-10-22) | Docs Guild, Concelier WebService | DOCS-CONCELIER-07-201 | Final editorial review and publish pass for Concelier authority toggle documentation (Quickstart + operator guide). | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-20) | Team Excititor WebService | EXCITITOR-WEB-01-002 | Ingest & reconcile endpoints – scope-enforced `/excititor/init`, `/excititor/ingest/run`, `/excititor/ingest/resume`, `/excititor/reconcile`; regression via `dotnet test … --filter FullyQualifiedName~IngestEndpointsTests`. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-20) | Team Excititor WebService | EXCITITOR-WEB-01-004 | Resolve API & signed responses – expose `/excititor/resolve`, return signed consensus/score envelopes, document auth. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/StellaOps.Excititor.Worker/TASKS.md | DONE (2025-10-21) | Team Excititor Worker | EXCITITOR-WORKER-01-004 | TTL refresh & stability damper – schedule re-resolve loops and guard against status flapping. | +| Sprint 7 | Contextual Truth Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-21) | Team Core Engine & Data Science | FEEDCORE-ENGINE-07-002 | Noise prior computation service – learn false-positive priors and expose deterministic summaries. | +| Sprint 7 | Contextual Truth Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md | DONE (2025-10-21) | Team Core Engine & Storage Analytics | FEEDCORE-ENGINE-07-003 | Unknown state ledger & confidence seeding – persist unknown flags, seed confidence bands, expose query surface. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/StellaOps.Excititor.WebService/TASKS.md | DONE (2025-10-19) | Team Excititor WebService | EXCITITOR-WEB-01-005 | Mirror distribution endpoints – expose download APIs for downstream Excititor instances. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-21) | Team Excititor Export | EXCITITOR-EXPORT-01-005 | Score & resolve envelope surfaces – include signed consensus/score artifacts in exports. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-21) | Team Excititor Export | EXCITITOR-EXPORT-01-006 | Quiet provenance packaging – attach quieted-by statement IDs, signers, justification codes to exports and attestations. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md | DONE (2025-10-21) | Team Excititor Export | EXCITITOR-EXPORT-01-007 | Mirror bundle + domain manifest – publish signed consensus bundles for mirrors. | +| Sprint 7 | Contextual Truth Foundations | src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md | DONE (2025-10-21) | Excititor Connectors – Stella | EXCITITOR-CONN-STELLA-07-001 | Excititor mirror connector – ingest signed mirror bundles and map to VexClaims with resume handling. | +| Sprint 7 | Contextual Truth Foundations | src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Team Normalization & Storage Backbone | FEEDSTORAGE-DATA-07-001 | Advisory statement & conflict collections – provision Mongo schema/indexes for event-sourced merge. | +| Sprint 7 | Contextual Truth Foundations | src/Web/StellaOps.Web/TASKS.md | DONE (2025-10-21) | UX Specialist, Angular Eng | WEB1.TRIVY-SETTINGS-TESTS | Add headless UI test run (`ng test --watch=false`) and document prerequisites once Angular tooling is chained up. | +| Sprint 8 | Mirror Distribution | src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-001 | Concelier mirror connector – fetch mirror manifest, verify signatures, and hydrate canonical DTOs with resume support. | +| Sprint 8 | Mirror Distribution | src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-002 | Map mirror payloads into canonical advisory DTOs with provenance referencing mirror domain + original source metadata. | +| Sprint 8 | Mirror Distribution | src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-003 | Add incremental cursor + resume support (per-export fingerprint) and document configuration for downstream Concelier instances. | +| Sprint 8 | Plugin Infrastructure | src/__Libraries/StellaOps.Plugin/TASKS.md | DONE (2025-10-21) | Plugin Platform Guild | PLUGIN-DI-08-001 | Scoped service support in plugin bootstrap – added dynamic plugin tests ensuring `[ServiceBinding]` metadata flows through plugin hosts and remains idempotent. | +| Sprint 8 | Plugin Infrastructure | src/__Libraries/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-002.COORD | Authority scoped-service integration handshake
Workshop concluded 2025-10-20 15:00–16:05 UTC; decisions + follow-ups recorded in `docs/dev/authority-plugin-di-coordination.md`. | +| Sprint 8 | Plugin Infrastructure | src/__Libraries/StellaOps.Plugin/TASKS.md | DONE (2025-10-20) | Plugin Platform Guild, Authority Core | PLUGIN-DI-08-002 | Authority plugin integration updates – scoped identity-provider services with registry handles; regression coverage via scoped registrar/unit tests. | +| Sprint 8 | Plugin Infrastructure | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-20) | Authority Core, Plugin Platform Guild | AUTH-PLUGIN-COORD-08-002 | Coordinate scoped-service adoption for Authority plug-in registrars
Workshop notes and follow-up backlog captured 2025-10-20 in `docs/dev/authority-plugin-di-coordination.md`. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-WEB-09-103 | Progress streaming (SSE/JSONL) with correlation IDs and ISO-8601 UTC timestamps, documented in API reference. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-POLICY-09-105 | Policy snapshot loader + schema + OpenAPI (YAML ignore rules, VEX include/exclude, vendor precedence). | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-POLICY-09-106 | `/reports` verdict assembly (Feedser+Vexer+Policy) + signed response envelope. | +| Sprint 9 | Scanner Core Foundations | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-19) | Team Scanner WebService | SCANNER-POLICY-09-107 | Expose score inputs, config version, and quiet provenance in `/reports` JSON and signed payload. | +| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | DONE (2025-10-21) | DevOps Guild, Scanner WebService Guild | DEVOPS-SCANNER-09-204 | Surface `SCANNER__EVENTS__*` env config across Compose/Helm and document overrides. | +| Sprint 9 | DevOps Foundations | ops/devops/TASKS.md | DONE (2025-10-21) | DevOps Guild, Notify Guild | DEVOPS-SCANNER-09-205 | Notify smoke job validates Redis stream + Notify deliveries after staging deploys. | +| Sprint 9 | Policy Foundations | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | DONE (2025-10-19) | Policy Guild | POLICY-CORE-09-004 | Versioned scoring config with schema validation, trust table, and golden fixtures. | +| Sprint 9 | Policy Foundations | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | DONE (2025-10-19) | Policy Guild | POLICY-CORE-09-005 | Scoring/quiet engine – compute score, enforce VEX-only quiet rules, emit inputs and provenance. | +| Sprint 9 | Policy Foundations | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | DONE (2025-10-19) | Policy Guild | POLICY-CORE-09-006 | Unknown state & confidence decay – deterministic bands surfaced in policy outputs. | +| Sprint 9 | Docs & Governance | docs/TASKS.md | DONE (2025-10-21) | Platform Events Guild | PLATFORM-EVENTS-09-401 | Embed canonical event samples into contract/integration tests and ensure CI validates payloads against published schemas. | +| Sprint 10 | Benchmarks | src/Bench/StellaOps.Bench/TASKS.md | DONE (2025-10-21) | Bench Guild, Language Analyzer Guild | BENCH-SCANNER-10-002 | Wire real language analyzers into bench harness & refresh baselines post-implementation. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-21) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-302 | Node analyzer handling workspaces/symlinks emitting `pkg:npm`. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-21) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-303 | Python analyzer reading `*.dist-info`, RECORD hashes, entry points. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-22) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-304 | Go analyzer leveraging buildinfo for `pkg:golang` components. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md | DONE (2025-10-22) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-304E | Plumb Go heuristic counter into Scanner metrics pipeline and alerting. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-22) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-305 | .NET analyzer parsing `*.deps.json`, assembly metadata, RID variants. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-22) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-306 | Rust analyzer detecting crates or falling back to `bin:{sha256}`. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-307 | Shared language evidence helpers + usage flag propagation. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-308 | Determinism + fixture harness for language analyzers. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-21) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309 | Package language analyzers as restart-time plug-ins (manifest + host registration). | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-601 | Compose inventory SBOM (CycloneDX JSON/Protobuf) from layer fragments. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-602 | Compose usage SBOM leveraging EntryTrace to flag actual usage. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-603 | Generate BOM index sidecar (purl table + roaring bitmap + usage flag). | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-604 | Package artifacts for export + attestation with deterministic manifests. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-605 | Emit BOM-Index sidecar schema/fixtures (CRITICAL PATH for SP16). | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-606 | Usage view bit flags integrated with EntryTrace. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-22) | Emit Guild | SCANNER-EMIT-10-607 | Embed scoring inputs, confidence band, and quiet provenance in CycloneDX/DSSE artifacts. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-101 | Implement layer cache store keyed by layer digest with metadata retention per architecture §3.3. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-102 | Build file CAS with dedupe, TTL enforcement, and offline import/export hooks. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-103 | Expose cache metrics/logging and configuration toggles for warm/cold thresholds. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Cache/TASKS.md | DONE (2025-10-19) | Scanner Cache Guild | SCANNER-CACHE-10-104 | Implement cache invalidation workflows (layer delete, TTL expiry, diff invalidation). | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-201 | Alpine/apk analyzer emitting deterministic components with provenance. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-202 | Debian/dpkg analyzer mapping packages to purl identity with evidence. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-203 | RPM analyzer capturing EVR, file listings, provenance. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-204 | Shared OS evidence helpers for package identity + provenance. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-205 | Vendor metadata enrichment (source packages, license, CVE hints). | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-206 | Determinism harness + fixtures for OS analyzers. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/TASKS.md | DONE (2025-10-19) | OS Analyzer Guild | SCANNER-ANALYZERS-OS-10-207 | Package OS analyzers as restart-time plug-ins (manifest + host registration). | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md | DONE (2025-10-19) | Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-301 | Java analyzer emitting `pkg:maven` with provenance. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-401 | POSIX shell AST parser with deterministic output. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-402 | Command resolution across layered rootfs with evidence attribution. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-403 | Interpreter tracing for shell wrappers to Python/Node/Java launchers. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-404 | Python entry analyzer (venv shebang, module invocation, usage flag). | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-405 | Node/Java launcher analyzer capturing script/jar targets. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-406 | Explainability + diagnostics for unresolved constructs with metrics. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md | DONE (2025-10-19) | EntryTrace Guild | SCANNER-ENTRYTRACE-10-407 | Package EntryTrace analyzers as restart-time plug-ins (manifest + host registration). | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Diff/TASKS.md | DONE (2025-10-19) | Diff Guild | SCANNER-DIFF-10-501 | Build component differ tracking add/remove/version changes with deterministic ordering. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Diff/TASKS.md | DONE (2025-10-19) | Diff Guild | SCANNER-DIFF-10-502 | Attribute diffs to introducing/removing layers including provenance evidence. | +| Sprint 10 | Scanner Analyzers & SBOM | src/Scanner/__Libraries/StellaOps.Scanner.Diff/TASKS.md | DONE (2025-10-19) | Diff Guild | SCANNER-DIFF-10-503 | Produce JSON diff output for inventory vs usage views aligned with API contract. | +| Sprint 10 | Samples | samples/TASKS.md | DONE (2025-10-20) | Samples Guild, Scanner Team | SAMPLES-10-001 | Sample images with SBOM/BOM-Index sidecars. | +| Sprint 10 | DevOps Perf | ops/devops/TASKS.md | DONE (2025-10-22) | DevOps Guild | DEVOPS-PERF-10-001 | Perf smoke job ensuring <5 s SBOM compose. | +| Sprint 10 | DevOps Perf | ops/devops/TASKS.md | DONE (2025-10-23) | DevOps Guild | DEVOPS-PERF-10-002 | Publish analyzer bench metrics to Grafana/perf workbook and alarm on ≥20 % regressions. | +| Sprint 10 | Policy Samples | samples/TASKS.md | DONE (2025-10-23) | Samples Guild, Policy Guild | SAMPLES-13-004 | Add policy preview/report fixtures showing confidence bands and unknown-age tags. | +| Sprint 10 | Policy Samples | src/Web/StellaOps.Web/TASKS.md | DONE (2025-10-23) | UI Guild | WEB-POLICY-FIXTURES-10-001 | Wire policy preview/report doc fixtures into UI harness (test utility or Storybook substitute) with type bindings and validation guard so UI stays aligned with documented payloads. | +| Sprint 11 | Signing Chain Bring-up | src/Signer/StellaOps.Signer/TASKS.md | DONE (2025-10-21) | Signer Guild | SIGNER-API-11-101 | `/sign/dsse` pipeline with Authority auth, PoE introspection, release verification, DSSE signing. | +| Sprint 11 | Signing Chain Bring-up | src/Signer/StellaOps.Signer/TASKS.md | DONE (2025-10-21) | Signer Guild | SIGNER-REF-11-102 | `/verify/referrers` endpoint with OCI lookup, caching, and policy enforcement. | +| Sprint 11 | Signing Chain Bring-up | src/Signer/StellaOps.Signer/TASKS.md | DONE (2025-10-21) | Signer Guild | SIGNER-QUOTA-11-103 | Enforce plan quotas, concurrency/QPS limits, artifact size caps with metrics/audit logs. | +| Sprint 11 | Signing Chain Bring-up | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-23) | Authority Core & Security Guild | AUTH-MTLS-11-002 | Add OAuth mTLS client credential support with certificate-bound tokens and introspection updates. | +| Sprint 12 | Runtime Guardrails | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-20) | Scanner WebService Guild | SCANNER-RUNTIME-12-301 | `/runtime/events` ingestion endpoint with validation, batching, storage hooks. | +| Sprint 13 | UX & CLI Experience | src/Cli/StellaOps.Cli/TASKS.md | DONE (2025-10-21) | DevEx/CLI | CLI-OFFLINE-13-006 | Implement offline kit pull/import/status commands with integrity checks. | +| Sprint 13 | UX & CLI Experience | src/Cli/StellaOps.Cli/TASKS.md | DONE (2025-10-22) | DevEx/CLI | CLI-PLUGIN-13-007 | Package non-core CLI verbs as restart-time plug-ins (manifest + loader tests). | +| Sprint 13 | UX & CLI Experience | src/Web/StellaOps.Web/TASKS.md | DONE (2025-10-21) | UX Specialist, Angular Eng, DevEx | WEB1.DEPS-13-001 | Stabilise Angular workspace dependencies for headless CI installs (`npm install`, Chromium handling, docs). | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/TASKS.md | DONE (2025-10-20) | Scheduler Queue Guild | SCHED-QUEUE-16-403 | Dead-letter handling + metrics. | +| Sprint 18 | Launch Readiness | ops/offline-kit/TASKS.md | DONE (2025-10-22) | Offline Kit Guild, Scanner Guild | DEVOPS-OFFLINE-18-004 | Rebuild Offline Kit bundle with Go analyzer plug-in and refreshed manifest/signature set. | diff --git a/docs/implplan/SPRINTS_PRIOR_20251025.md b/docs/implplan/SPRINTS_PRIOR_20251025.md new file mode 100644 index 00000000..bdeac463 --- /dev/null +++ b/docs/implplan/SPRINTS_PRIOR_20251025.md @@ -0,0 +1,34 @@ +This file describe implementation of Stella Ops (docs/README.md). Implementation must respect rules from AGENTS.md (read if you have not). + +| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | +| --- | --- | --- | --- | --- | --- | --- | +| Sprint 11 | Signing Chain Bring-up | src/Attestor/StellaOps.Attestor/TASKS.md | DONE (2025-10-19) | Attestor Guild | ATTESTOR-API-11-201 | `/rekor/entries` submission pipeline with dedupe, proof acquisition, and persistence. | +| Sprint 11 | Signing Chain Bring-up | src/Attestor/StellaOps.Attestor/TASKS.md | DONE (2025-10-19) | Attestor Guild | ATTESTOR-VERIFY-11-202 | `/rekor/verify` + retrieval endpoints validating signatures and Merkle proofs. | +| Sprint 11 | Signing Chain Bring-up | src/Attestor/StellaOps.Attestor/TASKS.md | DONE (2025-10-19) | Attestor Guild | ATTESTOR-OBS-11-203 | Telemetry, alerting, mTLS hardening, and archive workflow for Attestor. | +| Sprint 11 | Storage Platform Hardening | src/Scanner/__Libraries/StellaOps.Scanner.Storage/TASKS.md | DONE (2025-10-23) | Scanner Storage Guild | SCANNER-STORAGE-11-401 | Migrate scanner object storage integration from MinIO to RustFS with data migration plan. | +| Sprint 11 | UI Integration | src/UI/StellaOps.UI/TASKS.md | DONE (2025-10-23) | UI Guild | UI-ATTEST-11-005 | Attestation visibility (Rekor id, status) on Scan Detail. | +| Sprint 12 | Runtime Guardrails | src/Zastava/__Libraries/StellaOps.Zastava.Core/TASKS.md | DONE (2025-10-23) | Zastava Core Guild | ZASTAVA-CORE-12-201 | Define runtime event/admission DTOs, hashing helpers, and versioning strategy. | +| Sprint 12 | Runtime Guardrails | src/Zastava/__Libraries/StellaOps.Zastava.Core/TASKS.md | DONE (2025-10-23) | Zastava Core Guild | ZASTAVA-CORE-12-202 | Provide configuration/logging/metrics utilities shared by Observer/Webhook. | +| Sprint 12 | Runtime Guardrails | src/Zastava/__Libraries/StellaOps.Zastava.Core/TASKS.md | DONE (2025-10-23) | Zastava Core Guild | ZASTAVA-CORE-12-203 | Authority client helpers, OpTok caching, and security guardrails for runtime services. | +| Sprint 12 | Runtime Guardrails | src/Zastava/__Libraries/StellaOps.Zastava.Core/TASKS.md | DONE (2025-10-23) | Zastava Core Guild | ZASTAVA-OPS-12-204 | Operational runbooks, alert rules, and dashboard exports for runtime plane. | +| Sprint 12 | Runtime Guardrails | src/Zastava/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-24) | Zastava Observer Guild | ZASTAVA-OBS-12-001 | Container lifecycle watcher emitting deterministic runtime events with buffering. | +| Sprint 12 | Runtime Guardrails | src/Zastava/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-24) | Zastava Observer Guild | ZASTAVA-OBS-12-002 | Capture entrypoint traces + loaded libraries, hashing binaries and linking to baseline SBOM. | +| Sprint 12 | Runtime Guardrails | src/Zastava/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-24) | Zastava Observer Guild | ZASTAVA-OBS-12-003 | Posture checks for signatures/SBOM/attestation with offline caching. | +| Sprint 12 | Runtime Guardrails | src/Zastava/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-24) | Zastava Observer Guild | ZASTAVA-OBS-12-004 | Batch `/runtime/events` submissions with disk-backed buffer and rate limits. | +| Sprint 12 | Runtime Guardrails | src/Zastava/StellaOps.Zastava.Webhook/TASKS.md | DONE (2025-10-24) | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-101 | Admission controller host with TLS bootstrap and Authority auth. | +| Sprint 12 | Runtime Guardrails | src/Zastava/StellaOps.Zastava.Webhook/TASKS.md | DONE (2025-10-24) | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-102 | Query Scanner `/policy/runtime`, resolve digests, enforce verdicts. | +| Sprint 12 | Runtime Guardrails | src/Zastava/StellaOps.Zastava.Webhook/TASKS.md | DONE (2025-10-24) | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-103 | Caching, fail-open/closed toggles, metrics/logging for admission decisions. | +| Sprint 12 | Runtime Guardrails | src/Zastava/StellaOps.Zastava.Webhook/TASKS.md | DONE (2025-10-24) | Zastava Webhook Guild | ZASTAVA-WEBHOOK-12-104 | Wire `/admission` endpoint to runtime policy client and emit allow/deny envelopes. | +| Sprint 12 | Runtime Guardrails | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-302 | `/policy/runtime` endpoint joining SBOM baseline + policy verdict, returning admission guidance. | +| Sprint 12 | Runtime Guardrails | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-303 | Align `/policy/runtime` verdicts with canonical policy evaluation (Feedser/Vexer). | +| Sprint 12 | Runtime Guardrails | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-304 | Integrate attestation verification into runtime policy metadata. | +| Sprint 12 | Runtime Guardrails | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-305 | Deliver shared fixtures + e2e validation with Zastava/CLI teams. | +| Sprint 13 | UX & CLI Experience | src/UI/StellaOps.UI/TASKS.md | DONE (2025-10-23) | UI Guild | UI-AUTH-13-001 | Integrate Authority OIDC + DPoP flows with session management. | +| Sprint 13 | UX & CLI Experience | src/UI/StellaOps.UI/TASKS.md | DONE (2025-10-25) | UI Guild | UI-NOTIFY-13-006 | Notify panel: channels/rules CRUD, deliveries view, test send. | +| Sprint 13 | Platform Reliability | ops/devops/TASKS.md | DONE (2025-10-25) | DevOps Guild, Platform Leads | DEVOPS-NUGET-13-001 | Wire up .NET 10 preview feeds/local mirrors so `dotnet restore` succeeds offline; document updated NuGet bootstrap. | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Queue/TASKS.md | DONE (2025-10-23) | Notify Queue Guild | NOTIFY-QUEUE-15-401 | Bus abstraction + Redis Streams adapter with ordering/idempotency. | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Queue/TASKS.md | DONE (2025-10-23) | Notify Queue Guild | NOTIFY-QUEUE-15-402 | NATS JetStream adapter with health probes and failover. | +| Sprint 15 | Notify Foundations | src/Notify/__Libraries/StellaOps.Notify.Queue/TASKS.md | DONE (2025-10-23) | Notify Queue Guild | NOTIFY-QUEUE-15-403 | Delivery queue with retry/dead-letter + metrics. | +| Sprint 15 | Notify Foundations | src/Notify/StellaOps.Notify.Worker/TASKS.md | DONE (2025-10-23) | Notify Worker Guild | NOTIFY-WORKER-15-201 | Bus subscription + leasing loop with backoff. | +| Sprint 17 | Symbol Intelligence & Forensics | src/Zastava/StellaOps.Zastava.Observer/TASKS.md | DONE (2025-10-25) | Zastava Observer Guild | ZASTAVA-OBS-17-005 | Collect GNU build-id during runtime observation and attach it to emitted events. | +| Sprint 17 | Symbol Intelligence & Forensics | src/Scanner/StellaOps.Scanner.WebService/TASKS.md | DONE (2025-10-25) | Scanner WebService Guild | SCANNER-RUNTIME-17-401 | Persist runtime build-id observations and expose them for debug-symbol correlation. | diff --git a/SPRINTS_PRIOR_20251027.md b/docs/implplan/SPRINTS_PRIOR_20251027.md similarity index 57% rename from SPRINTS_PRIOR_20251027.md rename to docs/implplan/SPRINTS_PRIOR_20251027.md index 903de924..c32c3696 100644 --- a/SPRINTS_PRIOR_20251027.md +++ b/docs/implplan/SPRINTS_PRIOR_20251027.md @@ -9,21 +9,21 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 14 | Release & Offline Ops | ops/devops/TASKS.md | DONE (2025-10-26) | DevOps Guild, Scanner Guild | DEVOPS-REL-14-004 | Extend release/offline smoke jobs to cover Python analyzer plug-ins (warm/cold, determinism, signing). | | Sprint 14 | Release & Offline Ops | ops/licensing/TASKS.md | DONE (2025-10-26) | Licensing Guild | DEVOPS-LIC-14-004 | Registry token service tied to Authority, plan gating, revocation handling, monitoring. | | Sprint 14 | Release & Offline Ops | ops/offline-kit/TASKS.md | DONE (2025-10-26) | Offline Kit Guild | DEVOPS-OFFLINE-14-002 | Offline kit packaging workflow with integrity verification and documentation. | -| Sprint 15 | Benchmarks | src/StellaOps.Bench/TASKS.md | DONE (2025-10-26) | Bench Guild, Notify Team | BENCH-NOTIFY-15-001 | Notify dispatch throughput bench with results CSV. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-101 | Define Scheduler DTOs & validation. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-102 | Publish schema docs/sample payloads. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Scheduler Storage Guild | SCHED-STORAGE-16-201 | Mongo schemas/indexes for Scheduler state. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Storage.Mongo/TASKS.md | DONE (2025-10-26) | Scheduler Storage Guild | SCHED-STORAGE-16-202 | Repositories with tenant scoping, TTL, causal consistency. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Storage.Mongo/TASKS.md | DONE (2025-10-26) | Scheduler Storage Guild | SCHED-STORAGE-16-203 | Audit/run stats materialization for UI. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.ImpactIndex/TASKS.md | DONE (2025-10-26) | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-302 | Query APIs for ResolveByPurls/ResolveByVulns/ResolveAll. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.ImpactIndex/TASKS.md | DONE (2025-10-26) | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-301 | Ingest BOM-Index into roaring bitmap store. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-16-102 | Schedules CRUD (cron validation, pause/resume, audit). | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-16-103 | Runs API (list/detail/cancel) + impact previews. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-27) | Scheduler WebService Guild | SCHED-WEB-16-104 | Feedser/Vexer webhook handlers with security enforcement. | +| Sprint 15 | Benchmarks | src/Bench/StellaOps.Bench/TASKS.md | DONE (2025-10-26) | Bench Guild, Notify Team | BENCH-NOTIFY-15-001 | Notify dispatch throughput bench with results CSV. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-101 | Define Scheduler DTOs & validation. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-102 | Publish schema docs/sample payloads. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/TASKS.md | DONE (2025-10-19) | Scheduler Storage Guild | SCHED-STORAGE-16-201 | Mongo schemas/indexes for Scheduler state. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/TASKS.md | DONE (2025-10-26) | Scheduler Storage Guild | SCHED-STORAGE-16-202 | Repositories with tenant scoping, TTL, causal consistency. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/TASKS.md | DONE (2025-10-26) | Scheduler Storage Guild | SCHED-STORAGE-16-203 | Audit/run stats materialization for UI. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md | DONE (2025-10-26) | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-302 | Query APIs for ResolveByPurls/ResolveByVulns/ResolveAll. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md | DONE (2025-10-26) | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-301 | Ingest BOM-Index into roaring bitmap store. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-16-102 | Schedules CRUD (cron validation, pause/resume, audit). | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-16-103 | Runs API (list/detail/cancel) + impact previews. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-27) | Scheduler WebService Guild | SCHED-WEB-16-104 | Feedser/Vexer webhook handlers with security enforcement. | | Sprint 17 | Symbol Intelligence & Forensics | docs/TASKS.md | DONE (2025-10-26) | Docs Guild | DOCS-RUNTIME-17-004 | Document build-id workflows for SBOMs, runtime events, and debug-store usage. | | Sprint 17 | Symbol Intelligence & Forensics | ops/devops/TASKS.md | DONE (2025-10-26) | DevOps Guild | DEVOPS-REL-17-002 | Ship stripped debug artifacts organised by build-id within release/offline kits. | | Sprint 17 | Symbol Intelligence & Forensics | ops/offline-kit/TASKS.md | DONE (2025-10-26) | Offline Kit Guild, DevOps Guild | DEVOPS-OFFLINE-17-003 | Mirror release debug-store artefacts into Offline Kit packaging and document validation. | -| Sprint 17 | Symbol Intelligence & Forensics | src/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-26) | Emit Guild | SCANNER-EMIT-17-701 | Record GNU build-id for ELF components and surface it in SBOM/diff outputs. | +| Sprint 17 | Symbol Intelligence & Forensics | src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md | DONE (2025-10-26) | Emit Guild | SCANNER-EMIT-17-701 | Record GNU build-id for ELF components and surface it in SBOM/diff outputs. | | Sprint 18 | Launch Readiness | ops/devops/TASKS.md | DONE (2025-10-26) | DevOps Guild | DEVOPS-LAUNCH-18-001 | Production launch cutover rehearsal and runbook publication. | | Sprint 18 | Launch Readiness | ops/offline-kit/TASKS.md | DONE (2025-10-26) | Offline Kit Guild, Scanner Guild | DEVOPS-OFFLINE-18-005 | Rebuild Offline Kit with Python analyzer artefacts and refreshed manifest/signature pair. | | Sprint 19 | Aggregation-Only Contract Enforcement | docs/TASKS.md | DONE (2025-10-26) | Docs Guild | DOCS-AOC-19-001 | Publish aggregation-only contract reference documentation. | @@ -34,7 +34,7 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 19 | Aggregation-Only Contract Enforcement | docs/TASKS.md | DONE (2025-10-26) | Docs Guild, Observability Guild | DOCS-AOC-19-006 | Document new AOC metrics, traces, and logs. | | Sprint 19 | Aggregation-Only Contract Enforcement | docs/TASKS.md | DONE (2025-10-26) | Docs Guild, Authority Core | DOCS-AOC-19-007 | Document new Authority scopes and tenancy enforcement. | | Sprint 19 | Aggregation-Only Contract Enforcement | docs/TASKS.md | DONE (2025-10-26) | Docs Guild, DevOps Guild | DOCS-AOC-19-008 | Update deployment guide with validator enablement and verify user guidance. | -| Sprint 19 | Aggregation-Only Contract Enforcement | src/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Security Guild | AUTH-AOC-19-001 | Introduce new ingestion/auth scopes across Authority. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Security Guild | AUTH-AOC-19-001 | Introduce new ingestion/auth scopes across Authority. | | Sprint 20 | Policy Engine v2 | docs/TASKS.md | DONE (2025-10-26) | Docs Guild | DOCS-POLICY-20-001 | Publish `/docs/policy/overview.md` with compliance checklist. | | Sprint 20 | Policy Engine v2 | docs/TASKS.md | DONE (2025-10-26) | Docs Guild | DOCS-POLICY-20-002 | Document DSL grammar + examples in `/docs/policy/dsl.md`. | | Sprint 20 | Policy Engine v2 | docs/TASKS.md | DONE (2025-10-26) | Docs Guild, Authority Core | DOCS-POLICY-20-003 | Write `/docs/policy/lifecycle.md` covering workflow + roles. | @@ -51,20 +51,20 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 20 | Policy Engine v2 | ops/devops/TASKS.md | DONE (2025-10-26) | DevOps Guild, QA Guild | DEVOPS-POLICY-20-003 | Add determinism CI job diffing repeated policy runs. | | Sprint 20 | Policy Engine v2 | samples/TASKS.md | DONE (2025-10-26) | Samples Guild, Policy Guild | SAMPLES-POLICY-20-001 | Commit baseline/serverless/internal-only policy samples + fixtures. | | Sprint 20 | Policy Engine v2 | samples/TASKS.md | DONE (2025-10-26) | Samples Guild, UI Guild | SAMPLES-POLICY-20-002 | Produce simulation diff fixtures for UI/CLI tests. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Security Guild | AUTH-POLICY-20-001 | Add new policy scopes (`policy:*`, `findings:read`, `effective:write`). | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Security Guild | AUTH-POLICY-20-002 | Enforce Policy Engine service identity and scope checks at gateway. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Docs Guild | AUTH-POLICY-20-003 | Update Authority docs/config samples for policy scopes + workflows. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Bench/TASKS.md | DONE (2025-10-26) | Bench Guild, Policy Guild | BENCH-POLICY-20-001 | Create policy evaluation benchmark suite + baseline metrics. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | DONE (2025-10-26) | Policy Guild, Platform Guild | POLICY-ENGINE-20-000 | Spin up new Policy Engine service host with DI bootstrap and Authority wiring. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Policy.Engine/TASKS.md | DONE (2025-10-26) | Policy Guild | POLICY-ENGINE-20-001 | Deliver `stella-dsl@1` parser + IR compiler with diagnostics and checksums. | -| Sprint 20 | Policy Engine v2 | src/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-26) | Scheduler Models Guild | SCHED-MODELS-20-001 | Define policy run/diff DTOs + validation helpers. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core Guild | AUTH-GRAPH-21-001 | Introduce graph scopes (`graph:*`) with configuration binding and defaults. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core Guild | AUTH-GRAPH-21-002 | Enforce graph scopes/identities at gateway with tenant propagation. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Docs Guild | AUTH-GRAPH-21-003 | Update security docs/config samples for graph access and least privilege. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-26) | Scheduler Models Guild | SCHED-MODELS-21-001 | Define job DTOs for graph builds/overlay refresh (`GraphBuildJob`, `GraphOverlayJob`) with deterministic serialization and status enums; document in `src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md`. | -| Sprint 21 | Graph Explorer v1 | src/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-26) | Scheduler Models Guild | SCHED-MODELS-21-002 | Publish schema docs/sample payloads for graph job lifecycle. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Bench/TASKS.md | DONE (2025-10-26) | Bench Guild | BENCH-LNM-22-001 | Benchmark advisory observation ingest/correlation throughput. | -| Sprint 22 | Link-Not-Merge v1 | src/StellaOps.Bench/TASKS.md | DONE (2025-10-26) | Bench Guild | BENCH-LNM-22-002 | Benchmark VEX ingest/correlation latency and event emission. | +| Sprint 20 | Policy Engine v2 | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Security Guild | AUTH-POLICY-20-001 | Add new policy scopes (`policy:*`, `findings:read`, `effective:write`). | +| Sprint 20 | Policy Engine v2 | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Security Guild | AUTH-POLICY-20-002 | Enforce Policy Engine service identity and scope checks at gateway. | +| Sprint 20 | Policy Engine v2 | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Docs Guild | AUTH-POLICY-20-003 | Update Authority docs/config samples for policy scopes + workflows. | +| Sprint 20 | Policy Engine v2 | src/Bench/StellaOps.Bench/TASKS.md | DONE (2025-10-26) | Bench Guild, Policy Guild | BENCH-POLICY-20-001 | Create policy evaluation benchmark suite + baseline metrics. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | DONE (2025-10-26) | Policy Guild, Platform Guild | POLICY-ENGINE-20-000 | Spin up new Policy Engine service host with DI bootstrap and Authority wiring. | +| Sprint 20 | Policy Engine v2 | src/Policy/StellaOps.Policy.Engine/TASKS.md | DONE (2025-10-26) | Policy Guild | POLICY-ENGINE-20-001 | Deliver `stella-dsl@1` parser + IR compiler with diagnostics and checksums. | +| Sprint 20 | Policy Engine v2 | src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-26) | Scheduler Models Guild | SCHED-MODELS-20-001 | Define policy run/diff DTOs + validation helpers. | +| Sprint 21 | Graph Explorer v1 | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core Guild | AUTH-GRAPH-21-001 | Introduce graph scopes (`graph:*`) with configuration binding and defaults. | +| Sprint 21 | Graph Explorer v1 | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core Guild | AUTH-GRAPH-21-002 | Enforce graph scopes/identities at gateway with tenant propagation. | +| Sprint 21 | Graph Explorer v1 | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-26) | Authority Core & Docs Guild | AUTH-GRAPH-21-003 | Update security docs/config samples for graph access and least privilege. | +| Sprint 21 | Graph Explorer v1 | src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-26) | Scheduler Models Guild | SCHED-MODELS-21-001 | Define job DTOs for graph builds/overlay refresh (`GraphBuildJob`, `GraphOverlayJob`) with deterministic serialization and status enums; document in `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md`. | +| Sprint 21 | Graph Explorer v1 | src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md | DONE (2025-10-26) | Scheduler Models Guild | SCHED-MODELS-21-002 | Publish schema docs/sample payloads for graph job lifecycle. | +| Sprint 22 | Link-Not-Merge v1 | src/Bench/StellaOps.Bench/TASKS.md | DONE (2025-10-26) | Bench Guild | BENCH-LNM-22-001 | Benchmark advisory observation ingest/correlation throughput. | +| Sprint 22 | Link-Not-Merge v1 | src/Bench/StellaOps.Bench/TASKS.md | DONE (2025-10-26) | Bench Guild | BENCH-LNM-22-002 | Benchmark VEX ingest/correlation latency and event emission. | | Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-26) | Docs Guild | DOCS-CONSOLE-23-001 | Publish `/docs/ui/console-overview.md` (IA, tenant model, filters, AOC alignment). | | Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-26) | Docs Guild | DOCS-CONSOLE-23-002 | Author `/docs/ui/navigation.md` with route map, filters, keyboard shortcuts, deep links. | | Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-26) | Docs Guild | DOCS-CONSOLE-23-003 | Document `/docs/ui/sbom-explorer.md` covering catalog, graph, overlays, exports. | @@ -75,10 +75,10 @@ This file describe implementation of Stella Ops (docs/README.md). Implementation | Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-26) | Docs Guild | DOCS-CONSOLE-23-008 | Draft `/docs/ui/admin.md` covering tenants, roles, tokens, integrations, fresh-auth. | | Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-27) | Docs Guild | DOCS-CONSOLE-23-009 | Publish `/docs/ui/downloads.md` aligning manifest with commands and offline flow. | | Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-27) | Docs Guild, Deployment Guild, Console Guild | DOCS-CONSOLE-23-010 | Write `/docs/deploy/console.md` (Helm, ingress, TLS, env vars, health checks). | -| Sprint 28 | Graph Explorer | src/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-21-001 | Provide graph build/overlay job APIs; see `docs/SCHED-WEB-21-001-GRAPH-APIS.md`. | -| Sprint 28 | Graph Explorer | src/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-21-002 | Provide overlay lag metrics endpoint/webhook; see `docs/SCHED-WEB-21-001-GRAPH-APIS.md`. | -| Sprint 28 | Graph Explorer | src/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild, Authority Core Guild | SCHED-WEB-21-003 | Replace header auth with Authority scopes using `StellaOpsScopes`; dev fallback only when `Scheduler:Authority:Enabled=false`. | +| Sprint 28 | Graph Explorer | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-21-001 | Provide graph build/overlay job APIs; see `docs/SCHED-WEB-21-001-GRAPH-APIS.md`. | +| Sprint 28 | Graph Explorer | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-21-002 | Provide overlay lag metrics endpoint/webhook; see `docs/SCHED-WEB-21-001-GRAPH-APIS.md`. | +| Sprint 28 | Graph Explorer | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild, Authority Core Guild | SCHED-WEB-21-003 | Replace header auth with Authority scopes using `StellaOpsScopes`; dev fallback only when `Scheduler:Authority:Enabled=false`. | | Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | ops/devops/TASKS.md | DONE (2025-10-26) | DevOps Guild | DEVOPS-OBS-50-001 | Deploy default OpenTelemetry collector manifests with secure OTLP pipeline. | | Sprint 50 | Observability & Forensics Phase 1 – Baseline Telemetry | ops/devops/TASKS.md | DONE (2025-10-26) | DevOps Guild | DEVOPS-OBS-50-003 | Package telemetry stack configs for offline/air-gapped installs with signatures. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-27) | Scheduler WebService Guild | SCHED-WEB-16-101 | Minimal API host with Authority enforcement. | -| Sprint 16 | Scheduler Intelligence | src/StellaOps.Scheduler.Worker/TASKS.md | DONE (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-202 | ImpactIndex targeting and shard planning. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-27) | Scheduler WebService Guild | SCHED-WEB-16-101 | Minimal API host with Authority enforcement. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | DONE (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-202 | ImpactIndex targeting and shard planning. | diff --git a/docs/implplan/SPRINTS_PRIOR_20251028.md b/docs/implplan/SPRINTS_PRIOR_20251028.md new file mode 100644 index 00000000..1425e408 --- /dev/null +++ b/docs/implplan/SPRINTS_PRIOR_20251028.md @@ -0,0 +1,26 @@ +This file describe implementation of Stella Ops (docs/README.md). Implementation must respect rules from AGENTS.md (read if you have not). + +| Sprint | Theme | Tasks File Path | Status | Type of Specialist | Task ID | Task Description | +| --- | --- | --- | --- | --- | --- | --- | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | DONE (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-203 | Runner execution invoking Scanner analysis/content refresh. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | DONE (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-204 | Emit rescan/report events for Notify/UI. | +| Sprint 16 | Scheduler Intelligence | src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md | DONE (2025-10-27) | Scheduler Worker Guild | SCHED-WORKER-16-205 | Metrics/telemetry for Scheduler planners/runners. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-27) | Authority Core & Security Guild | AUTH-AOC-19-002 | Enforce tenant claim propagation and cross-tenant guardrails. | +> AUTH-AOC-19-002: Tenant metadata now flows through rate limiter/audit/token persistence; password grant scope/tenant enforcement landed. Docs/stakeholder walkthrough pending. +> 2025-10-27 Update: Ingestion scopes require tenant assignment; access tokens propagate tenant claims and reject cross-tenant mismatches with coverage. +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Authority/StellaOps.Authority/TASKS.md | DONE (2025-10-27) | Authority Core & Docs Guild | AUTH-AOC-19-003 | Update Authority docs/config samples for new scopes. | +> AUTH-AOC-19-003: Scope catalogue, console/CLI docs, and sample config updated to require `aoc:verify` plus read scopes; verification clients now explicitly include tenant hints. Authority test run remains blocked on Concelier build failure (`ImmutableHashSet`), previously noted under AUTH-AOC-19-002. +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Concelier/StellaOps.Concelier.WebService/TASKS.md | DONE (2025-10-28) | Concelier WebService Guild | CONCELIER-WEB-AOC-19-001 | Implement raw advisory ingestion endpoints with AOC guard and verifier. | +| Sprint 19 | Aggregation-Only Contract Enforcement | src/Excititor/StellaOps.Excititor.Worker/TASKS.md | DONE (2025-10-28) | QA Guild | EXCITITOR-WORKER-AOC-19-003 | Expand worker tests for deterministic batching and restart safety. | +| Sprint 20 | Policy Engine v2 | ops/devops/TASKS.md | DONE (2025-10-27) | DevOps Guild, Scheduler Guild, CLI Guild | DEVOPS-POLICY-20-004 | Automate policy schema exports and change notifications for CLI consumers. | +| Sprint 20 | Policy Engine v2 | src/Cli/StellaOps.Cli/TASKS.md | DONE (2025-10-27) | DevEx/CLI Guild | CLI-POLICY-20-002 | Implement `stella policy simulate` with diff outputs + exit codes. | +| Sprint 21 | Graph Explorer v1 | src/Cartographer/StellaOps.Cartographer/TASKS.md | DONE (2025-10-27) | Cartographer Guild | CARTO-GRAPH-21-010 | Replace hard-coded `graph:*` scope strings with shared constants once graph services integrate. | +| Sprint 21 | Graph Explorer v1 | src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md | DONE (2025-10-26) | Scheduler WebService Guild | SCHED-WEB-21-002 | Expose overlay lag metrics and job completion hooks for Cartographer. | +| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-011 | Update `/docs/install/docker.md` to include console image, compose/Helm/offline examples. | +| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-012 | Publish `/docs/security/console-security.md` covering OIDC, scopes, CSP, evidence handling. | +| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-013 | Write `/docs/observability/ui-telemetry.md` cataloguing metrics/logs/dashboards/alerts. | +| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-014 | Maintain `/docs/cli-vs-ui-parity.md` matrix with CI drift detection guidance. | +| Sprint 23 | StellaOps Console | docs/TASKS.md | DONE (2025-10-28) | Docs Guild | DOCS-CONSOLE-23-016 | Refresh `/docs/accessibility.md` with console keyboard flows, tokens, testing tools.
2025-10-28: Published guide covering keyboard matrix, screen-reader behaviour, colour tokens, testing workflow, offline guidance, and compliance checklist. | +| Sprint 25 | Exceptions v1 | docs/TASKS.md | DONE (2025-10-27) | Docs Guild | DOCS-EXC-25-004 | Document policy exception effects + simulation. | +| Sprint 25 | Exceptions v1 | src/Policy/StellaOps.Policy.Engine/TASKS.md | DONE (2025-10-27) | Policy Guild | POLICY-ENGINE-70-001 | Add exception evaluation layer with specificity + effects. | +| Sprint 25 | Exceptions v1 | src/Policy/__Libraries/StellaOps.Policy/TASKS.md | DONE (2025-10-27) | Policy Guild | POLICY-EXC-25-001 | Extend SPL schema to reference exception effects and routing. | diff --git a/docs/ingestion/aggregation-only-contract.md b/docs/ingestion/aggregation-only-contract.md index 44a8cde7..cc79b8e9 100644 --- a/docs/ingestion/aggregation-only-contract.md +++ b/docs/ingestion/aggregation-only-contract.md @@ -1,180 +1,180 @@ -# Aggregation-Only Contract Reference - -> The Aggregation-Only Contract (AOC) is the governing rule set that keeps StellaOps ingestion services deterministic, policy-neutral, and auditable. It applies to Concelier, Excititor, and any future collectors that write raw advisory or VEX documents. - -## 1. Purpose and Scope - -- Defines the canonical behaviour for `advisory_raw` and `vex_raw` collections and the linkset hints they may emit. -- Applies to every ingestion runtime (`StellaOps.Concelier.*`, `StellaOps.Excititor.*`), the Authority scopes that guard them, and the DevOps/QA surfaces that verify compliance. -- Complements the high-level architecture in [Concelier](../ARCHITECTURE_CONCELIER.md) and Authority enforcement documented in [Authority Architecture](../ARCHITECTURE_AUTHORITY.md). -- Paired guidance: see the guard-rail checkpoints in [AOC Guardrails](../aoc/aoc-guardrails.md) and CLI usage that will land in `/docs/cli/` as part of Sprint 19 follow-up. - -## 2. Philosophy and Goals - -- Preserve upstream truth: ingestion only captures immutable raw facts plus provenance, never derived severity or policy decisions. -- Defer interpretation: Policy Engine and downstream overlays remain the sole writers of materialised findings, severity, consensus, or risk scores. -- Make every write explainable: provenance, signatures, and content hashes are required so operators can prove where each fact originated. -- Keep outputs reproducible: identical inputs must yield identical documents, hashes, and linksets across replays and air-gapped installs. - -## 3. Contract Invariants - -| # | Invariant | What it forbids or requires | Enforcement surfaces | -|---|-----------|-----------------------------|----------------------| -| 1 | No derived severity at ingest | Reject top-level keys such as `severity`, `cvss`, `effective_status`, `consensus_provider`, `risk_score`. Raw upstream CVSS remains inside `content.raw`. | Mongo schema validator, `AOCWriteGuard`, Roslyn analyzer, `stella aoc verify`. | -| 2 | No merges or opinionated dedupe | Each upstream document persists on its own; ingestion never collapses multiple vendors into one document. | Repository interceptors, unit/fixture suites. | -| 3 | Provenance is mandatory | `source.*`, `upstream.*`, and `signature` metadata must be present; missing provenance triggers `ERR_AOC_004`. | Schema validator, guard, CLI verifier. | -| 4 | Idempotent upserts | Writes keyed by `(vendor, upstream_id, content_hash)` either no-op or insert a new revision with `supersedes`. Duplicate hashes map to the same document. | Repository guard, storage unique index, CI smoke tests. | -| 5 | Append-only revisions | Updates create a new document with `supersedes` pointer; no in-place mutation of content. | Mongo schema (`supersedes` format), guard, data migration scripts. | -| 6 | Linkset only | Ingestion may compute link hints (`purls`, `cpes`, IDs) to accelerate joins, but must not transform or infer severity or policy. | Linkset builders reviewed via fixtures and analyzers. | -| 7 | Policy-only effective findings | Only Policy Engine identities can write `effective_finding_*`; ingestion callers receive `ERR_AOC_006` if they attempt it. | Authority scopes, Policy Engine guard. | -| 8 | Schema safety | Unknown top-level keys reject with `ERR_AOC_007`; timestamps use ISO 8601 UTC strings; tenant is required. | Mongo validator, JSON schema tests. | -| 9 | Clock discipline | Collectors stamp `fetched_at` and `received_at` monotonically per batch to support reproducibility windows. | Collector contracts, QA fixtures. | - -## 4. Raw Schemas - -### 4.1 `advisory_raw` - -| Field | Type | Notes | -|-------|------|-------| -| `_id` | string | `advisory_raw:{source}:{upstream_id}:{revision}`; deterministic and tenant-scoped. | -| `tenant` | string | Required; injected by Authority middleware and asserted by schema validator. | -| `source.vendor` | string | Provider identifier (e.g., `redhat`, `osv`, `ghsa`). | -| `source.stream` | string | Connector stream name (`csaf`, `osv`, etc.). | -| `source.api` | string | Absolute URI of upstream document; stored for traceability. | -| `source.collector_version` | string | Semantic version of the collector. | -| `upstream.upstream_id` | string | Vendor- or ecosystem-provided identifier (CVE, GHSA, vendor ID). | -| `upstream.document_version` | string | Upstream issued timestamp or revision string. | -| `upstream.fetched_at` / `received_at` | string | ISO 8601 UTC timestamps recorded by the collector. | -| `upstream.content_hash` | string | `sha256:` digest of the raw payload used for idempotency. | -| `upstream.signature` | object | Required structure storing `present`, `format`, `key_id`, `sig`; even unsigned payloads set `present: false`. | -| `content.format` | string | Source format (`CSAF`, `OSV`, etc.). | -| `content.spec_version` | string | Upstream spec version when known. | -| `content.raw` | object | Full upstream payload, untouched except for transport normalisation. | -| `identifiers` | object | Normalised identifiers (`cve`, `ghsa`, `aliases`, etc.) derived losslessly from raw content. | -| `linkset` | object | Join hints (see section 4.3). | -| `supersedes` | string or null | Points to previous revision of same upstream doc when content hash changes. | - -### 4.2 `vex_raw` - -| Field | Type | Notes | -|-------|------|-------| -| `_id` | string | `vex_raw:{source}:{upstream_id}:{revision}`. | -| `tenant` | string | Required; matches advisory collection requirements. | -| `source.*` | object | Same shape and requirements as `advisory_raw`. | -| `upstream.*` | object | Includes `document_version`, timestamps, `content_hash`, and `signature`. | -| `content.format` | string | Typically `CycloneDX-VEX` or `CSAF-VEX`. | -| `content.raw` | object | Entire upstream VEX payload. | -| `identifiers.statements` | array | Normalised statement summaries (IDs, PURLs, status, justification) to accelerate policy joins. | -| `linkset` | object | CVEs, GHSA IDs, and PURLs referenced in the document. | -| `supersedes` | string or null | Same convention as advisory documents. | - -### 4.3 Linkset Fields - -- `purls`: fully qualified Package URLs extracted from raw ranges or product nodes. -- `cpes`: Common Platform Enumerations when upstream docs provide them. -- `aliases`: Any alternate advisory identifiers present in the payload. -- `references`: Array of `{ type, url }` pairs pointing back to vendor advisories, patches, or exploits. -- `reconciled_from`: Provenance of linkset entries (JSON Pointer or field origin) to make automated checks auditable. - -Canonicalisation rules: -- Package URLs are rendered in canonical form without qualifiers/subpaths (`pkg:type/namespace/name@version`). -- CPE values are normalised to the 2.3 binding (`cpe:2.3:part:vendor:product:version:*:*:*:*:*:*:*`). - -### 4.4 `advisory_observations` - -`advisory_observations` is an immutable projection of the validated raw document used by Link‑Not‑Merge overlays. Fields mirror the JSON contract surfaced by `StellaOps.Concelier.Models.Observations.AdvisoryObservation`. - -| Field | Type | Notes | -|-------|------|-------| -| `_id` | string | Deterministic observation id — `{tenant}:{source.vendor}:{upstreamId}:{revision}`. | -| `tenant` | string | Lower-case tenant identifier. | -| `source.vendor` / `source.stream` | string | Connector identity (e.g., `vendor/redhat`, `ecosystem/osv`). | -| `source.api` | string | Absolute URI the connector fetched from. | -| `source.collectorVersion` | string | Optional semantic version of the connector build. | -| `upstream.upstream_id` | string | Advisory identifier as issued by the provider (CVE, vendor ID, etc.). | -| `upstream.document_version` | string | Upstream revision/version string. | -| `upstream.fetchedAt` / `upstream.receivedAt` | datetime | UTC timestamps recorded by the connector. | -| `upstream.contentHash` | string | `sha256:` digest used for idempotency. | -| `upstream.signature` | object | `{present, format?, keyId?, signature?}` describing upstream signature material. | -| `content.format` / `content.specVersion` | string | Raw payload format metadata (CSAF, OSV, JSON, etc.). | -| `content.raw` | object | Full upstream document stored losslessly (Relaxed Extended JSON). | -| `content.metadata` | object | Optional connector-specific metadata (batch ids, hints). | -| `linkset.aliases` | array | Normalized aliases (lower-case, sorted). | -| `linkset.purls` | array | Normalized PURLs extracted from the document. | -| `linkset.cpes` | array | Normalized CPE URIs. | -| `linkset.references` | array | `{ type, url }` pairs (type lower-case). | -| `createdAt` | datetime | Timestamp when Concelier persisted the observation. | -| `attributes` | object | Optional provenance attributes keyed by connector. | - -## 5. Error Model - -| Code | Description | HTTP status | Surfaces | -|------|-------------|-------------|----------| -| `ERR_AOC_001` | Forbidden field detected (severity, cvss, effective data). | 400 | Ingestion APIs, CLI verifier, CI guard. | -| `ERR_AOC_002` | Merge attempt detected (multiple upstream sources fused into one document). | 400 | Ingestion APIs, CLI verifier. | -| `ERR_AOC_003` | Idempotency violation (duplicate without supersedes pointer). | 409 | Repository guard, Mongo unique index, CLI verifier. | -| `ERR_AOC_004` | Missing provenance metadata (`source`, `upstream`, `signature`). | 422 | Schema validator, ingestion endpoints. | -| `ERR_AOC_005` | Signature or checksum mismatch. | 422 | Collector validation, CLI verifier. | -| `ERR_AOC_006` | Attempt to persist derived findings from ingestion context. | 403 | Policy engine guard, Authority scopes. | -| `ERR_AOC_007` | Unknown top-level fields (schema violation). | 400 | Mongo validator, CLI verifier. | - -Consumers should map these codes to CLI exit codes and structured log events so automation can fail fast and produce actionable guidance. - -## 6. API and Tooling Interfaces - -- **Concelier ingestion** (`StellaOps.Concelier.WebService`) - - `POST /ingest/advisory`: accepts upstream payload metadata; server-side guard constructs and persists raw document. - - `GET /advisories/raw/{id}` and filterable list endpoints expose raw documents for debugging and offline analysis. - - `POST /aoc/verify`: runs guard checks over recent documents and returns summary totals plus first violations. -- **Excititor ingestion** (`StellaOps.Excititor.WebService`) mirrors the same surface for VEX documents. -- **CLI workflows** (`stella aoc verify`, `stella sources ingest --dry-run`) surface pre-flight verification; documentation will live in `/docs/cli/` alongside Sprint 19 CLI updates. -- **Authority scopes**: new `advisory:ingest`, `advisory:read`, `vex:ingest`, and `vex:read` scopes enforce least privilege; see [Authority Architecture](../ARCHITECTURE_AUTHORITY.md) for scope grammar. - -## 7. Idempotency and Supersedes Rules - -1. Compute `content_hash` before any transformation; use it with `(source.vendor, upstream.upstream_id)` to detect duplicates. -2. If a document with the same hash already exists, skip the write and log a no-op. -3. When a new hash arrives for an existing upstream document, insert a new record and set `supersedes` to the previous `_id`. -4. Keep supersedes chains acyclic; collectors must resolve conflicts by rewinding before they insert. -5. Expose idempotency counters via metrics (`ingestion_write_total{result=ok|noop}`) to catch regressions early. - -## 8. Migration Playbook - -1. Freeze ingestion writes except for raw pass-through paths while deploying schema validators. -2. Snapshot existing collections to `_backup_*` for rollback safety. -3. Strip forbidden fields from historical documents into a temporary `advisory_view_legacy` used only during transition. -4. Enable Mongo JSON schema validators for `advisory_raw` and `vex_raw`. -5. Run collectors in `--dry-run` to confirm only allowed keys appear; fix violations before lifting the freeze. -6. Point Policy Engine to consume exclusively from raw collections and compute derived outputs downstream. -7. Delete legacy normalisation paths from ingestion code and enable runtime guards plus CI linting. -8. Roll forward CLI, Console, and dashboards so operators can monitor AOC status end-to-end. - -## 9. Observability and Diagnostics - -- **Metrics**: `ingestion_write_total{result=ok|reject}`, `aoc_violation_total{code}`, `ingestion_signature_verified_total{result}`, `ingestion_latency_seconds`, `advisory_revision_count`. -- **Traces**: spans `ingest.fetch`, `ingest.transform`, `ingest.write`, and `aoc.guard` with correlation IDs shared across workers. -- **Logs**: structured entries must include `tenant`, `source.vendor`, `upstream.upstream_id`, `content_hash`, and `violation_code` when applicable. -- **Dashboards**: DevOps should add panels for violation counts, signature failures, supersedes growth, and CLI verifier outcomes for each tenant. - -## 10. Security and Tenancy Checklist - -- Enforce Authority scopes (`advisory:ingest`, `vex:ingest`, `advisory:read`, `vex:read`) and require tenant claims on every request. -- Maintain pinned trust stores for signature verification; capture verification result in metrics and logs. -- Ensure collectors never log secrets or raw authentication headers; redact tokens before persistence. -- Validate that Policy Engine remains the only identity with permission to write `effective_finding_*` documents. -- Verify offline bundles include the raw collections, guard configuration, and verifier binaries so air-gapped installs can audit parity. -- Document operator steps for recovering from violations, including rollback to superseded revisions and re-running policy evaluation. - -## 11. Compliance Checklist - -- [ ] Deterministic guard enabled in Concelier and Excititor repositories. -- [ ] Mongo validators deployed for `advisory_raw` and `vex_raw`. -- [ ] Authority scopes and tenant enforcement verified via integration tests. -- [ ] CLI and CI pipelines run `stella aoc verify` against seeded snapshots. -- [ ] Observability feeds (metrics, logs, traces) wired into dashboards with alerts. -- [ ] Offline kit instructions updated to bundle validators and verifier tooling. -- [ ] Security review recorded covering ingestion, tenancy, and rollback procedures. - ---- - -*Last updated: 2025-10-27 (Sprint 19).* +# Aggregation-Only Contract Reference + +> The Aggregation-Only Contract (AOC) is the governing rule set that keeps StellaOps ingestion services deterministic, policy-neutral, and auditable. It applies to Concelier, Excititor, and any future collectors that write raw advisory or VEX documents. + +## 1. Purpose and Scope + +- Defines the canonical behaviour for `advisory_raw` and `vex_raw` collections and the linkset hints they may emit. +- Applies to every ingestion runtime (`StellaOps.Concelier.*`, `StellaOps.Excititor.*`), the Authority scopes that guard them, and the DevOps/QA surfaces that verify compliance. +- Complements the high-level architecture in [Concelier](../ARCHITECTURE_CONCELIER.md) and Authority enforcement documented in [Authority Architecture](../ARCHITECTURE_AUTHORITY.md). +- Paired guidance: see the guard-rail checkpoints in [AOC Guardrails](../aoc/aoc-guardrails.md) and CLI usage that will land in `/docs/cli/` as part of Sprint 19 follow-up. + +## 2. Philosophy and Goals + +- Preserve upstream truth: ingestion only captures immutable raw facts plus provenance, never derived severity or policy decisions. +- Defer interpretation: Policy Engine and downstream overlays remain the sole writers of materialised findings, severity, consensus, or risk scores. +- Make every write explainable: provenance, signatures, and content hashes are required so operators can prove where each fact originated. +- Keep outputs reproducible: identical inputs must yield identical documents, hashes, and linksets across replays and air-gapped installs. + +## 3. Contract Invariants + +| # | Invariant | What it forbids or requires | Enforcement surfaces | +|---|-----------|-----------------------------|----------------------| +| 1 | No derived severity at ingest | Reject top-level keys such as `severity`, `cvss`, `effective_status`, `consensus_provider`, `risk_score`. Raw upstream CVSS remains inside `content.raw`. | Mongo schema validator, `AOCWriteGuard`, Roslyn analyzer, `stella aoc verify`. | +| 2 | No merges or opinionated dedupe | Each upstream document persists on its own; ingestion never collapses multiple vendors into one document. | Repository interceptors, unit/fixture suites. | +| 3 | Provenance is mandatory | `source.*`, `upstream.*`, and `signature` metadata must be present; missing provenance triggers `ERR_AOC_004`. | Schema validator, guard, CLI verifier. | +| 4 | Idempotent upserts | Writes keyed by `(vendor, upstream_id, content_hash)` either no-op or insert a new revision with `supersedes`. Duplicate hashes map to the same document. | Repository guard, storage unique index, CI smoke tests. | +| 5 | Append-only revisions | Updates create a new document with `supersedes` pointer; no in-place mutation of content. | Mongo schema (`supersedes` format), guard, data migration scripts. | +| 6 | Linkset only | Ingestion may compute link hints (`purls`, `cpes`, IDs) to accelerate joins, but must not transform or infer severity or policy. | Linkset builders reviewed via fixtures and analyzers. | +| 7 | Policy-only effective findings | Only Policy Engine identities can write `effective_finding_*`; ingestion callers receive `ERR_AOC_006` if they attempt it. | Authority scopes, Policy Engine guard. | +| 8 | Schema safety | Unknown top-level keys reject with `ERR_AOC_007`; timestamps use ISO 8601 UTC strings; tenant is required. | Mongo validator, JSON schema tests. | +| 9 | Clock discipline | Collectors stamp `fetched_at` and `received_at` monotonically per batch to support reproducibility windows. | Collector contracts, QA fixtures. | + +## 4. Raw Schemas + +### 4.1 `advisory_raw` + +| Field | Type | Notes | +|-------|------|-------| +| `_id` | string | `advisory_raw:{source}:{upstream_id}:{revision}`; deterministic and tenant-scoped. | +| `tenant` | string | Required; injected by Authority middleware and asserted by schema validator. | +| `source.vendor` | string | Provider identifier (e.g., `redhat`, `osv`, `ghsa`). | +| `source.stream` | string | Connector stream name (`csaf`, `osv`, etc.). | +| `source.api` | string | Absolute URI of upstream document; stored for traceability. | +| `source.collector_version` | string | Semantic version of the collector. | +| `upstream.upstream_id` | string | Vendor- or ecosystem-provided identifier (CVE, GHSA, vendor ID). | +| `upstream.document_version` | string | Upstream issued timestamp or revision string. | +| `upstream.fetched_at` / `received_at` | string | ISO 8601 UTC timestamps recorded by the collector. | +| `upstream.content_hash` | string | `sha256:` digest of the raw payload used for idempotency. | +| `upstream.signature` | object | Required structure storing `present`, `format`, `key_id`, `sig`; even unsigned payloads set `present: false`. | +| `content.format` | string | Source format (`CSAF`, `OSV`, etc.). | +| `content.spec_version` | string | Upstream spec version when known. | +| `content.raw` | object | Full upstream payload, untouched except for transport normalisation. | +| `identifiers` | object | Normalised identifiers (`cve`, `ghsa`, `aliases`, etc.) derived losslessly from raw content. | +| `linkset` | object | Join hints (see section 4.3). | +| `supersedes` | string or null | Points to previous revision of same upstream doc when content hash changes. | + +### 4.2 `vex_raw` + +| Field | Type | Notes | +|-------|------|-------| +| `_id` | string | `vex_raw:{source}:{upstream_id}:{revision}`. | +| `tenant` | string | Required; matches advisory collection requirements. | +| `source.*` | object | Same shape and requirements as `advisory_raw`. | +| `upstream.*` | object | Includes `document_version`, timestamps, `content_hash`, and `signature`. | +| `content.format` | string | Typically `CycloneDX-VEX` or `CSAF-VEX`. | +| `content.raw` | object | Entire upstream VEX payload. | +| `identifiers.statements` | array | Normalised statement summaries (IDs, PURLs, status, justification) to accelerate policy joins. | +| `linkset` | object | CVEs, GHSA IDs, and PURLs referenced in the document. | +| `supersedes` | string or null | Same convention as advisory documents. | + +### 4.3 Linkset Fields + +- `purls`: fully qualified Package URLs extracted from raw ranges or product nodes. +- `cpes`: Common Platform Enumerations when upstream docs provide them. +- `aliases`: Any alternate advisory identifiers present in the payload. +- `references`: Array of `{ type, url }` pairs pointing back to vendor advisories, patches, or exploits. +- `reconciled_from`: Provenance of linkset entries (JSON Pointer or field origin) to make automated checks auditable. + +Canonicalisation rules: +- Package URLs are rendered in canonical form without qualifiers/subpaths (`pkg:type/namespace/name@version`). +- CPE values are normalised to the 2.3 binding (`cpe:2.3:part:vendor:product:version:*:*:*:*:*:*:*`). + +### 4.4 `advisory_observations` + +`advisory_observations` is an immutable projection of the validated raw document used by Link‑Not‑Merge overlays. Fields mirror the JSON contract surfaced by `StellaOps.Concelier.Models.Observations.AdvisoryObservation`. + +| Field | Type | Notes | +|-------|------|-------| +| `_id` | string | Deterministic observation id — `{tenant}:{source.vendor}:{upstreamId}:{revision}`. | +| `tenant` | string | Lower-case tenant identifier. | +| `source.vendor` / `source.stream` | string | Connector identity (e.g., `vendor/redhat`, `ecosystem/osv`). | +| `source.api` | string | Absolute URI the connector fetched from. | +| `source.collectorVersion` | string | Optional semantic version of the connector build. | +| `upstream.upstream_id` | string | Advisory identifier as issued by the provider (CVE, vendor ID, etc.). | +| `upstream.document_version` | string | Upstream revision/version string. | +| `upstream.fetchedAt` / `upstream.receivedAt` | datetime | UTC timestamps recorded by the connector. | +| `upstream.contentHash` | string | `sha256:` digest used for idempotency. | +| `upstream.signature` | object | `{present, format?, keyId?, signature?}` describing upstream signature material. | +| `content.format` / `content.specVersion` | string | Raw payload format metadata (CSAF, OSV, JSON, etc.). | +| `content.raw` | object | Full upstream document stored losslessly (Relaxed Extended JSON). | +| `content.metadata` | object | Optional connector-specific metadata (batch ids, hints). | +| `linkset.aliases` | array | Normalized aliases (lower-case, sorted). | +| `linkset.purls` | array | Normalized PURLs extracted from the document. | +| `linkset.cpes` | array | Normalized CPE URIs. | +| `linkset.references` | array | `{ type, url }` pairs (type lower-case). | +| `createdAt` | datetime | Timestamp when Concelier persisted the observation. | +| `attributes` | object | Optional provenance attributes keyed by connector. | + +## 5. Error Model + +| Code | Description | HTTP status | Surfaces | +|------|-------------|-------------|----------| +| `ERR_AOC_001` | Forbidden field detected (severity, cvss, effective data). | 400 | Ingestion APIs, CLI verifier, CI guard. | +| `ERR_AOC_002` | Merge attempt detected (multiple upstream sources fused into one document). | 400 | Ingestion APIs, CLI verifier. | +| `ERR_AOC_003` | Idempotency violation (duplicate without supersedes pointer). | 409 | Repository guard, Mongo unique index, CLI verifier. | +| `ERR_AOC_004` | Missing provenance metadata (`source`, `upstream`, `signature`). | 422 | Schema validator, ingestion endpoints. | +| `ERR_AOC_005` | Signature or checksum mismatch. | 422 | Collector validation, CLI verifier. | +| `ERR_AOC_006` | Attempt to persist derived findings from ingestion context. | 403 | Policy engine guard, Authority scopes. | +| `ERR_AOC_007` | Unknown top-level fields (schema violation). | 400 | Mongo validator, CLI verifier. | + +Consumers should map these codes to CLI exit codes and structured log events so automation can fail fast and produce actionable guidance. + +## 6. API and Tooling Interfaces + +- **Concelier ingestion** (`StellaOps.Concelier.WebService`) + - `POST /ingest/advisory`: accepts upstream payload metadata; server-side guard constructs and persists raw document. + - `GET /advisories/raw/{id}` and filterable list endpoints expose raw documents for debugging and offline analysis. + - `POST /aoc/verify`: runs guard checks over recent documents and returns summary totals plus first violations. +- **Excititor ingestion** (`StellaOps.Excititor.WebService`) mirrors the same surface for VEX documents. +- **CLI workflows** (`stella aoc verify`, `stella sources ingest --dry-run`) surface pre-flight verification; documentation will live in `/docs/cli/` alongside Sprint 19 CLI updates. +- **Authority scopes**: new `advisory:ingest`, `advisory:read`, `vex:ingest`, and `vex:read` scopes enforce least privilege; see [Authority Architecture](../ARCHITECTURE_AUTHORITY.md) for scope grammar. + +## 7. Idempotency and Supersedes Rules + +1. Compute `content_hash` before any transformation; use it with `(source.vendor, upstream.upstream_id)` to detect duplicates. +2. If a document with the same hash already exists, skip the write and log a no-op. +3. When a new hash arrives for an existing upstream document, insert a new record and set `supersedes` to the previous `_id`. +4. Keep supersedes chains acyclic; collectors must resolve conflicts by rewinding before they insert. +5. Expose idempotency counters via metrics (`ingestion_write_total{result=ok|noop}`) to catch regressions early. + +## 8. Migration Playbook + +1. Freeze ingestion writes except for raw pass-through paths while deploying schema validators. +2. Snapshot existing collections to `_backup_*` for rollback safety. +3. Strip forbidden fields from historical documents into a temporary `advisory_view_legacy` used only during transition. +4. Enable Mongo JSON schema validators for `advisory_raw` and `vex_raw`. +5. Run collectors in `--dry-run` to confirm only allowed keys appear; fix violations before lifting the freeze. +6. Point Policy Engine to consume exclusively from raw collections and compute derived outputs downstream. +7. Delete legacy normalisation paths from ingestion code and enable runtime guards plus CI linting. +8. Roll forward CLI, Console, and dashboards so operators can monitor AOC status end-to-end. + +## 9. Observability and Diagnostics + +- **Metrics**: `ingestion_write_total{result=ok|reject}`, `aoc_violation_total{code}`, `ingestion_signature_verified_total{result}`, `ingestion_latency_seconds`, `advisory_revision_count`. +- **Traces**: spans `ingest.fetch`, `ingest.transform`, `ingest.write`, and `aoc.guard` with correlation IDs shared across workers. +- **Logs**: structured entries must include `tenant`, `source.vendor`, `upstream.upstream_id`, `content_hash`, and `violation_code` when applicable. +- **Dashboards**: DevOps should add panels for violation counts, signature failures, supersedes growth, and CLI verifier outcomes for each tenant. + +## 10. Security and Tenancy Checklist + +- Enforce Authority scopes (`advisory:ingest`, `vex:ingest`, `advisory:read`, `vex:read`) and require tenant claims on every request. +- Maintain pinned trust stores for signature verification; capture verification result in metrics and logs. +- Ensure collectors never log secrets or raw authentication headers; redact tokens before persistence. +- Validate that Policy Engine remains the only identity with permission to write `effective_finding_*` documents. +- Verify offline bundles include the raw collections, guard configuration, and verifier binaries so air-gapped installs can audit parity. +- Document operator steps for recovering from violations, including rollback to superseded revisions and re-running policy evaluation. + +## 11. Compliance Checklist + +- [ ] Deterministic guard enabled in Concelier and Excititor repositories. +- [ ] Mongo validators deployed for `advisory_raw` and `vex_raw`. +- [ ] Authority scopes and tenant enforcement verified via integration tests. +- [ ] CLI and CI pipelines run `stella aoc verify` against seeded snapshots. +- [ ] Observability feeds (metrics, logs, traces) wired into dashboards with alerts. +- [ ] Offline kit instructions updated to bundle validators and verifier tooling. +- [ ] Security review recorded covering ingestion, tenancy, and rollback procedures. + +--- + +*Last updated: 2025-10-27 (Sprint 19).* diff --git a/docs/install/docker.md b/docs/install/docker.md index 01a375af..e566a75b 100644 --- a/docs/install/docker.md +++ b/docs/install/docker.md @@ -1,207 +1,207 @@ -# StellaOps Console — Docker Install Recipes - -> **Audience:** Deployment Guild, Console Guild, platform operators. -> **Scope:** Acquire the `stellaops/web-ui` image, run it with Compose or Helm, mirror it for air‑gapped environments, and keep parity with CLI workflows. - -This guide focuses on the new **StellaOps Console** container. Start with the general [Installation Guide](../21_INSTALL_GUIDE.md) for shared prerequisites (Docker, registry access, TLS) and use the steps below to layer in the console. - ---- - -## 1 · Release artefacts - -| Artefact | Source | Verification | -|----------|--------|--------------| -| Console image | `registry.stella-ops.org/stellaops/web-ui@sha256:` | Listed in `deploy/releases/.yaml` (`yq '.services[] | select(.name=="web-ui") | .image'`). Signed with Cosign (`cosign verify --key https://stella-ops.org/keys/cosign.pub …`). | -| Compose bundles | `deploy/compose/docker-compose.{dev,stage,prod,airgap}.yaml` | Each profile already includes a `web-ui` service pinned to the release digest. Run `docker compose --env-file -f docker-compose..yaml config` to confirm the digest matches the manifest. | -| Helm values | `deploy/helm/stellaops/values-*.yaml` (`services.web-ui`) | CI lints the chart; use `helm template` to confirm the rendered Deployment/Service carry the expected digest and env vars. | -| Offline artefact (preview) | Generated via `oras copy registry.stella-ops.org/stellaops/web-ui@sha256: oci-archive:stellaops-web-ui-.tar` | Record SHA-256 in the downloads manifest (`DOWNLOADS-CONSOLE-23-001`) and sign with Cosign before shipping in the Offline Kit. | - -> **Tip:** Keep Compose/Helm digests in sync with the release manifest to preserve determinism. `deploy/tools/validate-profiles.sh` performs a quick cross-check. - ---- - -## 2 · Compose quickstart (connected host) - -1. **Prepare workspace** - - ```bash - mkdir stella-console && cd stella-console - cp /path/to/repo/deploy/compose/env/dev.env.example .env - ``` - -2. **Add console configuration** – append the following to `.env` (adjust per environment): - - ```bash - CONSOLE_PUBLIC_BASE_URL=https://console.dev.stella-ops.local - CONSOLE_GATEWAY_BASE_URL=https://api.dev.stella-ops.local - AUTHORITY_ISSUER=https://authority.dev.stella-ops.local - AUTHORITY_CLIENT_ID=console-ui - AUTHORITY_SCOPES="ui.read ui.admin findings:read advisory:read vex:read aoc:verify" - AUTHORITY_DPOP_ENABLED=true - ``` - - Optional extras from [`docs/deploy/console.md`](../deploy/console.md): - - ```bash - CONSOLE_FEATURE_FLAGS=runs,downloads,policies - CONSOLE_METRICS_ENABLED=true - CONSOLE_LOG_LEVEL=Information - ``` - -3. **Verify bundle provenance** - - ```bash - cosign verify-blob \ - --key https://stella-ops.org/keys/cosign.pub \ - --signature /path/to/repo/deploy/compose/docker-compose.dev.yaml.sig \ - /path/to/repo/deploy/compose/docker-compose.dev.yaml - ``` - -4. **Launch infrastructure + console** - - ```bash - docker compose --env-file .env -f /path/to/repo/deploy/compose/docker-compose.dev.yaml up -d mongo minio - docker compose --env-file .env -f /path/to/repo/deploy/compose/docker-compose.dev.yaml up -d web-ui - ``` - - The `web-ui` service exposes the console on port `8443` by default. Change the published port in the Compose file if you need to front it with an existing reverse proxy. - -5. **Health check** - - ```bash - curl -k https://console.dev.stella-ops.local/health/ready - ``` - - Expect `{"status":"Ready"}`. If the response is `401`, confirm Authority credentials and scopes. - ---- - -## 3 · Helm deployment (cluster) - -1. **Create an overlay** (example `console-values.yaml`): - - ```yaml - global: - release: - version: "2025.10.0-edge" - services: - web-ui: - image: registry.stella-ops.org/stellaops/web-ui@sha256:38b225fa7767a5b94ebae4dae8696044126aac429415e93de514d5dd95748dcf - service: - port: 8443 - env: - CONSOLE_PUBLIC_BASE_URL: "https://console.dev.stella-ops.local" - CONSOLE_GATEWAY_BASE_URL: "https://api.dev.stella-ops.local" - AUTHORITY_ISSUER: "https://authority.dev.stella-ops.local" - AUTHORITY_CLIENT_ID: "console-ui" - AUTHORITY_SCOPES: "ui.read ui.admin findings:read advisory:read vex:read aoc:verify" - AUTHORITY_DPOP_ENABLED: "true" - CONSOLE_FEATURE_FLAGS: "runs,downloads,policies" - CONSOLE_METRICS_ENABLED: "true" - ``` - -2. **Render and validate** - - ```bash - helm template stella-console ./deploy/helm/stellaops -f console-values.yaml | \ - grep -A2 'name: stellaops-web-ui' -A6 'image:' - ``` - -3. **Deploy** - - ```bash - helm upgrade --install stella-console ./deploy/helm/stellaops \ - -f deploy/helm/stellaops/values-dev.yaml \ - -f console-values.yaml - ``` - -4. **Post-deploy checks** - - ```bash - kubectl get pods -l app.kubernetes.io/name=stellaops-web-ui - kubectl port-forward deploy/stellaops-web-ui 8443:8443 - curl -k https://localhost:8443/health/ready - ``` - ---- - -## 4 · Offline packaging - -1. **Mirror the image to an OCI archive** - - ```bash - DIGEST=$(yq '.services[] | select(.name=="web-ui") | .image' deploy/releases/2025.10-edge.yaml | cut -d@ -f2) - oras copy registry.stella-ops.org/stellaops/web-ui@${DIGEST} \ - oci-archive:stellaops-web-ui-2025.10.0.tar - shasum -a 256 stellaops-web-ui-2025.10.0.tar - ``` - -2. **Sign the archive** - - ```bash - cosign sign-blob --key ~/keys/offline-kit.cosign \ - --output-signature stellaops-web-ui-2025.10.0.tar.sig \ - stellaops-web-ui-2025.10.0.tar - ``` - -3. **Load in the air-gap** - - ```bash - docker load --input stellaops-web-ui-2025.10.0.tar - docker tag stellaops/web-ui@${DIGEST} registry.airgap.local/stellaops/web-ui:2025.10.0 - ``` - -4. **Update the Offline Kit manifest** (once the downloads pipeline lands): - - ```bash - jq '.artifacts.console.webUi = { - "digest": "sha256:'"${DIGEST#sha256:}"'", - "archive": "stellaops-web-ui-2025.10.0.tar", - "signature": "stellaops-web-ui-2025.10.0.tar.sig" - }' downloads/manifest.json > downloads/manifest.json.tmp - mv downloads/manifest.json.tmp downloads/manifest.json - ``` - - Re-run `stella offline kit import downloads/manifest.json` to validate signatures inside the air‑gapped environment. - ---- - -## 5 · CLI parity - -Console operations map directly to scriptable workflows: - -| Action | CLI path | -|--------|----------| -| Fetch signed manifest entry | `stella downloads manifest show --artifact console/web-ui` *(CLI task `CONSOLE-DOC-23-502`, pending release)* | -| Mirror digest to OCI archive | `stella downloads mirror --artifact console/web-ui --to oci-archive:stellaops-web-ui.tar` *(planned alongside CLI AOC parity)* | -| Import offline kit | `stella offline kit import stellaops-web-ui-2025.10.0.tar` | -| Validate console health | `stella console status --endpoint https://console.dev.stella-ops.local` *(planned; fallback to `curl` as shown above)* | - -Track progress for the CLI commands via `DOCS-CONSOLE-23-014` (CLI vs UI parity matrix). - ---- - -## 6 · Compliance checklist - -- [ ] Image digest validated against the current release manifest. -- [ ] Compose/Helm deployments verified with `docker compose config` / `helm template`. -- [ ] Authority issuer, scopes, and DPoP settings documented and applied. -- [ ] Offline archive mirrored, signed, and recorded in the downloads manifest. -- [ ] CLI parity notes linked to the upcoming `docs/cli-vs-ui-parity.md` matrix. -- [ ] References cross-checked with `docs/deploy/console.md` and `docs/security/console-security.md`. -- [ ] Health checks documented for connected and air-gapped installs. - ---- - -## 7 · References - -- `deploy/releases/.yaml` – Release manifest (digests, SBOM metadata). -- `deploy/compose/README.md` – Compose profile overview. -- `deploy/helm/stellaops/values-*.yaml` – Helm defaults per environment. -- `/docs/deploy/console.md` – Detailed environment variables, CSP, health checks. -- `/docs/security/console-security.md` – Auth flows, scopes, DPoP, monitoring. -- `/docs/ui/downloads.md` – Downloads manifest workflow and offline parity guidance. - ---- - -*Last updated: 2025-10-28 (Sprint 23).* +# StellaOps Console — Docker Install Recipes + +> **Audience:** Deployment Guild, Console Guild, platform operators. +> **Scope:** Acquire the `stellaops/web-ui` image, run it with Compose or Helm, mirror it for air‑gapped environments, and keep parity with CLI workflows. + +This guide focuses on the new **StellaOps Console** container. Start with the general [Installation Guide](../21_INSTALL_GUIDE.md) for shared prerequisites (Docker, registry access, TLS) and use the steps below to layer in the console. + +--- + +## 1 · Release artefacts + +| Artefact | Source | Verification | +|----------|--------|--------------| +| Console image | `registry.stella-ops.org/stellaops/web-ui@sha256:` | Listed in `deploy/releases/.yaml` (`yq '.services[] | select(.name=="web-ui") | .image'`). Signed with Cosign (`cosign verify --key https://stella-ops.org/keys/cosign.pub …`). | +| Compose bundles | `deploy/compose/docker-compose.{dev,stage,prod,airgap}.yaml` | Each profile already includes a `web-ui` service pinned to the release digest. Run `docker compose --env-file -f docker-compose..yaml config` to confirm the digest matches the manifest. | +| Helm values | `deploy/helm/stellaops/values-*.yaml` (`services.web-ui`) | CI lints the chart; use `helm template` to confirm the rendered Deployment/Service carry the expected digest and env vars. | +| Offline artefact (preview) | Generated via `oras copy registry.stella-ops.org/stellaops/web-ui@sha256: oci-archive:stellaops-web-ui-.tar` | Record SHA-256 in the downloads manifest (`DOWNLOADS-CONSOLE-23-001`) and sign with Cosign before shipping in the Offline Kit. | + +> **Tip:** Keep Compose/Helm digests in sync with the release manifest to preserve determinism. `deploy/tools/validate-profiles.sh` performs a quick cross-check. + +--- + +## 2 · Compose quickstart (connected host) + +1. **Prepare workspace** + + ```bash + mkdir stella-console && cd stella-console + cp /path/to/repo/deploy/compose/env/dev.env.example .env + ``` + +2. **Add console configuration** – append the following to `.env` (adjust per environment): + + ```bash + CONSOLE_PUBLIC_BASE_URL=https://console.dev.stella-ops.local + CONSOLE_GATEWAY_BASE_URL=https://api.dev.stella-ops.local + AUTHORITY_ISSUER=https://authority.dev.stella-ops.local + AUTHORITY_CLIENT_ID=console-ui + AUTHORITY_SCOPES="ui.read ui.admin findings:read advisory:read vex:read aoc:verify" + AUTHORITY_DPOP_ENABLED=true + ``` + + Optional extras from [`docs/deploy/console.md`](../deploy/console.md): + + ```bash + CONSOLE_FEATURE_FLAGS=runs,downloads,policies + CONSOLE_METRICS_ENABLED=true + CONSOLE_LOG_LEVEL=Information + ``` + +3. **Verify bundle provenance** + + ```bash + cosign verify-blob \ + --key https://stella-ops.org/keys/cosign.pub \ + --signature /path/to/repo/deploy/compose/docker-compose.dev.yaml.sig \ + /path/to/repo/deploy/compose/docker-compose.dev.yaml + ``` + +4. **Launch infrastructure + console** + + ```bash + docker compose --env-file .env -f /path/to/repo/deploy/compose/docker-compose.dev.yaml up -d mongo minio + docker compose --env-file .env -f /path/to/repo/deploy/compose/docker-compose.dev.yaml up -d web-ui + ``` + + The `web-ui` service exposes the console on port `8443` by default. Change the published port in the Compose file if you need to front it with an existing reverse proxy. + +5. **Health check** + + ```bash + curl -k https://console.dev.stella-ops.local/health/ready + ``` + + Expect `{"status":"Ready"}`. If the response is `401`, confirm Authority credentials and scopes. + +--- + +## 3 · Helm deployment (cluster) + +1. **Create an overlay** (example `console-values.yaml`): + + ```yaml + global: + release: + version: "2025.10.0-edge" + services: + web-ui: + image: registry.stella-ops.org/stellaops/web-ui@sha256:38b225fa7767a5b94ebae4dae8696044126aac429415e93de514d5dd95748dcf + service: + port: 8443 + env: + CONSOLE_PUBLIC_BASE_URL: "https://console.dev.stella-ops.local" + CONSOLE_GATEWAY_BASE_URL: "https://api.dev.stella-ops.local" + AUTHORITY_ISSUER: "https://authority.dev.stella-ops.local" + AUTHORITY_CLIENT_ID: "console-ui" + AUTHORITY_SCOPES: "ui.read ui.admin findings:read advisory:read vex:read aoc:verify" + AUTHORITY_DPOP_ENABLED: "true" + CONSOLE_FEATURE_FLAGS: "runs,downloads,policies" + CONSOLE_METRICS_ENABLED: "true" + ``` + +2. **Render and validate** + + ```bash + helm template stella-console ./deploy/helm/stellaops -f console-values.yaml | \ + grep -A2 'name: stellaops-web-ui' -A6 'image:' + ``` + +3. **Deploy** + + ```bash + helm upgrade --install stella-console ./deploy/helm/stellaops \ + -f deploy/helm/stellaops/values-dev.yaml \ + -f console-values.yaml + ``` + +4. **Post-deploy checks** + + ```bash + kubectl get pods -l app.kubernetes.io/name=stellaops-web-ui + kubectl port-forward deploy/stellaops-web-ui 8443:8443 + curl -k https://localhost:8443/health/ready + ``` + +--- + +## 4 · Offline packaging + +1. **Mirror the image to an OCI archive** + + ```bash + DIGEST=$(yq '.services[] | select(.name=="web-ui") | .image' deploy/releases/2025.10-edge.yaml | cut -d@ -f2) + oras copy registry.stella-ops.org/stellaops/web-ui@${DIGEST} \ + oci-archive:stellaops-web-ui-2025.10.0.tar + shasum -a 256 stellaops-web-ui-2025.10.0.tar + ``` + +2. **Sign the archive** + + ```bash + cosign sign-blob --key ~/keys/offline-kit.cosign \ + --output-signature stellaops-web-ui-2025.10.0.tar.sig \ + stellaops-web-ui-2025.10.0.tar + ``` + +3. **Load in the air-gap** + + ```bash + docker load --input stellaops-web-ui-2025.10.0.tar + docker tag stellaops/web-ui@${DIGEST} registry.airgap.local/stellaops/web-ui:2025.10.0 + ``` + +4. **Update the Offline Kit manifest** (once the downloads pipeline lands): + + ```bash + jq '.artifacts.console.webUi = { + "digest": "sha256:'"${DIGEST#sha256:}"'", + "archive": "stellaops-web-ui-2025.10.0.tar", + "signature": "stellaops-web-ui-2025.10.0.tar.sig" + }' downloads/manifest.json > downloads/manifest.json.tmp + mv downloads/manifest.json.tmp downloads/manifest.json + ``` + + Re-run `stella offline kit import downloads/manifest.json` to validate signatures inside the air‑gapped environment. + +--- + +## 5 · CLI parity + +Console operations map directly to scriptable workflows: + +| Action | CLI path | +|--------|----------| +| Fetch signed manifest entry | `stella downloads manifest show --artifact console/web-ui` *(CLI task `CONSOLE-DOC-23-502`, pending release)* | +| Mirror digest to OCI archive | `stella downloads mirror --artifact console/web-ui --to oci-archive:stellaops-web-ui.tar` *(planned alongside CLI AOC parity)* | +| Import offline kit | `stella offline kit import stellaops-web-ui-2025.10.0.tar` | +| Validate console health | `stella console status --endpoint https://console.dev.stella-ops.local` *(planned; fallback to `curl` as shown above)* | + +Track progress for the CLI commands via `DOCS-CONSOLE-23-014` (CLI vs UI parity matrix). + +--- + +## 6 · Compliance checklist + +- [ ] Image digest validated against the current release manifest. +- [ ] Compose/Helm deployments verified with `docker compose config` / `helm template`. +- [ ] Authority issuer, scopes, and DPoP settings documented and applied. +- [ ] Offline archive mirrored, signed, and recorded in the downloads manifest. +- [ ] CLI parity notes linked to the upcoming `docs/cli-vs-ui-parity.md` matrix. +- [ ] References cross-checked with `docs/deploy/console.md` and `docs/security/console-security.md`. +- [ ] Health checks documented for connected and air-gapped installs. + +--- + +## 7 · References + +- `deploy/releases/.yaml` – Release manifest (digests, SBOM metadata). +- `deploy/compose/README.md` – Compose profile overview. +- `deploy/helm/stellaops/values-*.yaml` – Helm defaults per environment. +- `/docs/deploy/console.md` – Detailed environment variables, CSP, health checks. +- `/docs/security/console-security.md` – Auth flows, scopes, DPoP, monitoring. +- `/docs/ui/downloads.md` – Downloads manifest workflow and offline parity guidance. + +--- + +*Last updated: 2025-10-28 (Sprint 23).* diff --git a/docs/notifications/architecture.md b/docs/notifications/architecture.md index d984a74c..a91447cd 100644 --- a/docs/notifications/architecture.md +++ b/docs/notifications/architecture.md @@ -1,118 +1,118 @@ -# Notifications Architecture - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -This dossier distils the Notify architecture into implementation-ready guidance for service owners, SREs, and integrators. It complements the high-level overview by detailing process boundaries, persistence models, and extensibility points. - ---- - -## 1. Runtime shape - -``` - ┌──────────────────┐ - │ Authority (OpTok)│ - └───────┬──────────┘ - │ - ┌───────▼──────────┐ ┌───────────────┐ - │ Notify.WebService│◀──────▶│ MongoDB │ -Tenant API│ REST + gRPC WIP │ │ rules/channels│ - └───────▲──────────┘ │ deliveries │ - │ │ digests │ - Internal bus │ └───────────────┘ - (NATS/Redis/etc) │ - │ - ┌─────────▼─────────┐ ┌───────────────┐ - │ Notify.Worker │◀────▶│ Redis / Cache │ - │ rule eval + render│ │ throttles/locks│ - └─────────▲─────────┘ └───────▲───────┘ - │ │ - │ │ - ┌──────┴──────┐ ┌─────────┴────────┐ - │ Connectors │──────▶│ Slack/Teams/... │ - │ (plug-ins) │ │ External targets │ - └─────────────┘ └──────────────────┘ -``` - -- **WebService** hosts REST endpoints (`/channels`, `/rules`, `/templates`, `/deliveries`, `/digests`, `/stats`) and handles schema normalisation, validation, and Authority enforcement. -- **Worker** subscribes to the platform event bus, evaluates rules per tenant, applies throttles/digests, renders payloads, writes ledger entries, and invokes connectors. -- **Plug-ins** live under `plugins/notify/` and are loaded deterministically at service start (`orderedPlugins` list). Each implements connector contracts and optional health/test-preview providers. - -Both services share options via `notify.yaml` (see `etc/notify.yaml.sample`). For dev/test scenarios, an in-memory repository exists but production requires Mongo + Redis/NATS for durability and coordination. - ---- - -## 2. Event ingestion and rule evaluation - -1. **Subscription.** Workers attach to the internal bus (Redis Streams or NATS JetStream). Each partition key is `tenantId|scope.digest|event.kind` to preserve order for a given artefact. -2. **Normalisation.** Incoming events are hydrated into `NotifyEvent` envelopes. Payload JSON is normalised (sorted object keys) to preserve determinism and enable hashing. -3. **Rule snapshot.** Per-tenant rule sets are cached in memory. Change streams from Mongo trigger snapshot refreshes without restart. -4. **Match pipeline.** - - Tenant check (`rule.tenantId` vs. event tenant). - - Kind/namespace/repository/digest filters. - - Severity and KEV gating based on event deltas. - - VEX gating using `NotifyRuleMatchVex`. - - Action iteration with throttle/digest decisions. -5. **Idempotency.** Each action computes `hash(ruleId|actionId|event.kind|scope.digest|delta.hash|dayBucket)`; matches within throttle TTL record `status=Throttled` and stop. -6. **Dispatch.** If digest is `instant`, the renderer immediately processes the action. Otherwise the event is appended to the digest window for later flush. - -Failures during evaluation are logged with correlation IDs and surfaced through `/stats` and worker metrics (`notify_rule_eval_failures_total`, `notify_digest_flush_errors_total`). - ---- - -## 3. Rendering & connectors - -- **Template resolution.** The renderer picks the template in this order: action template → channel default template → locale fallback → built-in minimal template. Locale negotiation reduces `en-US` to `en-us`. -- **Helpers & partials.** Exposed helpers mirror the list in [`notifications/templates.md`](templates.md#3-variables-helpers-and-context). Plug-ins may register additional helpers but must remain deterministic and side-effect free. -- **Rendering output.** `NotifyDeliveryRendered` captures: - - `channelType`, `format`, `locale` - - `title`, `body`, optional `summary`, `textBody` - - `target` (redacted where necessary) - - `attachments[]` (safe URLs or references) - - `bodyHash` (lowercase SHA-256) for audit parity -- **Connector contract.** Connectors implement `INotifyConnector` (send + health) and can implement `INotifyChannelTestProvider` for `/channels/{id}/test`. All plugs are single-tenant aware; secrets are pulled via references at send time and never persisted in Mongo. -- **Retries.** Workers track attempts with exponential jitter. On permanent failure, deliveries are marked `Failed` with `statusReason`, and optional DLQ fan-out is slated for Sprint 40. - ---- - -## 4. Persistence model - -| Collection | Purpose | Key fields & indexes | -|------------|---------|----------------------| -| `rules` | Tenant rule definitions. | `_id`, `tenantId`, `enabled`; index on `{tenantId, enabled}`. | -| `channels` | Channel metadata + config references. | `_id`, `tenantId`, `type`; index on `{tenantId, type}`. | -| `templates` | Locale-specific render bodies. | `_id`, `tenantId`, `channelType`, `key`; index on `{tenantId, channelType, key}`. | -| `deliveries` | Ledger of rendered notifications. | `_id`, `tenantId`, `sentAt`; compound index on `{tenantId, sentAt:-1}` for history queries. | -| `digests` | Open digest windows per action. | `_id` (`tenantId:actionKey:window`), `status`; index on `{tenantId, actionKey}`. | -| `throttles` | Short-lived throttle tokens (Mongo or Redis). | Key format `idem:` with TTL aligned to throttle duration. | - -Documents are stored using the canonical JSON serializer (`NotifyCanonicalJsonSerializer`) to preserve property ordering and casing. Schema migration helpers upgrade stored documents when new versions ship. - ---- - -## 5. Deployment & configuration - -- **Configuration sources.** YAML files feed typed options (`NotifyMongoOptions`, `NotifyWorkerOptions`, etc.). Environment variables can override connection strings and rate limits for production. -- **Authority integration.** Two OAuth clients (`notify-web`, `notify-web-dev`) with scopes `notify.read` and `notify.admin` are required. Authority enforcement can be disabled for air-gapped dev use by providing `developmentSigningKey`. -- **Plug-in management.** `plugins.baseDirectory` and `orderedPlugins` guarantee deterministic loading. Offline Kits copy the plug-in tree verbatim; operations must keep the order aligned across environments. -- **Observability.** Workers expose structured logs (`ruleId`, `actionId`, `eventId`, `throttleKey`). Metrics include: - - `notify_rule_matches_total{tenant,eventKind}` - - `notify_delivery_attempts_total{channelType,status}` - - `notify_digest_open_windows{window}` - - Optional OpenTelemetry traces for rule evaluation and connector round-trips. -- **Scaling levers.** Increase worker replicas to cope with bus throughput; adjust `worker.prefetchCount` for Redis Streams or `ackWait` for NATS JetStream. WebService remains stateless and scales horizontally behind the gateway. - ---- - -## 6. Roadmap alignment - -| Backlog | Architectural note | -|---------|--------------------| -| `NOTIFY-SVC-38-001` | Standardise event envelope publication (idempotency keys) – ensure bus bindings use the documented key format. | -| `NOTIFY-SVC-38-002..004` | Introduce simulation endpoints and throttle dashboards – expect additional `/internal/notify/simulate` routes and metrics; update once merged. | -| `NOTIFY-SVC-39-001..004` | Correlation engine, digests generator, simulation API, quiet hours – anticipate new Mongo documents (`quietHours`, correlation caches) and connector metadata (quiet mode hints). Review this guide when implementations land. | - -Action: schedule a documentation sync with the Notifications Service Guild immediately after `NOTIFY-SVC-39-001..004` merge to confirm schema adjustments (e.g., correlation edge storage, quiet hour calendars) and add any new persistence or API details here. - ---- - -> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Notifications Architecture + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +This dossier distils the Notify architecture into implementation-ready guidance for service owners, SREs, and integrators. It complements the high-level overview by detailing process boundaries, persistence models, and extensibility points. + +--- + +## 1. Runtime shape + +``` + ┌──────────────────┐ + │ Authority (OpTok)│ + └───────┬──────────┘ + │ + ┌───────▼──────────┐ ┌───────────────┐ + │ Notify.WebService│◀──────▶│ MongoDB │ +Tenant API│ REST + gRPC WIP │ │ rules/channels│ + └───────▲──────────┘ │ deliveries │ + │ │ digests │ + Internal bus │ └───────────────┘ + (NATS/Redis/etc) │ + │ + ┌─────────▼─────────┐ ┌───────────────┐ + │ Notify.Worker │◀────▶│ Redis / Cache │ + │ rule eval + render│ │ throttles/locks│ + └─────────▲─────────┘ └───────▲───────┘ + │ │ + │ │ + ┌──────┴──────┐ ┌─────────┴────────┐ + │ Connectors │──────▶│ Slack/Teams/... │ + │ (plug-ins) │ │ External targets │ + └─────────────┘ └──────────────────┘ +``` + +- **WebService** hosts REST endpoints (`/channels`, `/rules`, `/templates`, `/deliveries`, `/digests`, `/stats`) and handles schema normalisation, validation, and Authority enforcement. +- **Worker** subscribes to the platform event bus, evaluates rules per tenant, applies throttles/digests, renders payloads, writes ledger entries, and invokes connectors. +- **Plug-ins** live under `plugins/notify/` and are loaded deterministically at service start (`orderedPlugins` list). Each implements connector contracts and optional health/test-preview providers. + +Both services share options via `notify.yaml` (see `etc/notify.yaml.sample`). For dev/test scenarios, an in-memory repository exists but production requires Mongo + Redis/NATS for durability and coordination. + +--- + +## 2. Event ingestion and rule evaluation + +1. **Subscription.** Workers attach to the internal bus (Redis Streams or NATS JetStream). Each partition key is `tenantId|scope.digest|event.kind` to preserve order for a given artefact. +2. **Normalisation.** Incoming events are hydrated into `NotifyEvent` envelopes. Payload JSON is normalised (sorted object keys) to preserve determinism and enable hashing. +3. **Rule snapshot.** Per-tenant rule sets are cached in memory. Change streams from Mongo trigger snapshot refreshes without restart. +4. **Match pipeline.** + - Tenant check (`rule.tenantId` vs. event tenant). + - Kind/namespace/repository/digest filters. + - Severity and KEV gating based on event deltas. + - VEX gating using `NotifyRuleMatchVex`. + - Action iteration with throttle/digest decisions. +5. **Idempotency.** Each action computes `hash(ruleId|actionId|event.kind|scope.digest|delta.hash|dayBucket)`; matches within throttle TTL record `status=Throttled` and stop. +6. **Dispatch.** If digest is `instant`, the renderer immediately processes the action. Otherwise the event is appended to the digest window for later flush. + +Failures during evaluation are logged with correlation IDs and surfaced through `/stats` and worker metrics (`notify_rule_eval_failures_total`, `notify_digest_flush_errors_total`). + +--- + +## 3. Rendering & connectors + +- **Template resolution.** The renderer picks the template in this order: action template → channel default template → locale fallback → built-in minimal template. Locale negotiation reduces `en-US` to `en-us`. +- **Helpers & partials.** Exposed helpers mirror the list in [`notifications/templates.md`](templates.md#3-variables-helpers-and-context). Plug-ins may register additional helpers but must remain deterministic and side-effect free. +- **Rendering output.** `NotifyDeliveryRendered` captures: + - `channelType`, `format`, `locale` + - `title`, `body`, optional `summary`, `textBody` + - `target` (redacted where necessary) + - `attachments[]` (safe URLs or references) + - `bodyHash` (lowercase SHA-256) for audit parity +- **Connector contract.** Connectors implement `INotifyConnector` (send + health) and can implement `INotifyChannelTestProvider` for `/channels/{id}/test`. All plugs are single-tenant aware; secrets are pulled via references at send time and never persisted in Mongo. +- **Retries.** Workers track attempts with exponential jitter. On permanent failure, deliveries are marked `Failed` with `statusReason`, and optional DLQ fan-out is slated for Sprint 40. + +--- + +## 4. Persistence model + +| Collection | Purpose | Key fields & indexes | +|------------|---------|----------------------| +| `rules` | Tenant rule definitions. | `_id`, `tenantId`, `enabled`; index on `{tenantId, enabled}`. | +| `channels` | Channel metadata + config references. | `_id`, `tenantId`, `type`; index on `{tenantId, type}`. | +| `templates` | Locale-specific render bodies. | `_id`, `tenantId`, `channelType`, `key`; index on `{tenantId, channelType, key}`. | +| `deliveries` | Ledger of rendered notifications. | `_id`, `tenantId`, `sentAt`; compound index on `{tenantId, sentAt:-1}` for history queries. | +| `digests` | Open digest windows per action. | `_id` (`tenantId:actionKey:window`), `status`; index on `{tenantId, actionKey}`. | +| `throttles` | Short-lived throttle tokens (Mongo or Redis). | Key format `idem:` with TTL aligned to throttle duration. | + +Documents are stored using the canonical JSON serializer (`NotifyCanonicalJsonSerializer`) to preserve property ordering and casing. Schema migration helpers upgrade stored documents when new versions ship. + +--- + +## 5. Deployment & configuration + +- **Configuration sources.** YAML files feed typed options (`NotifyMongoOptions`, `NotifyWorkerOptions`, etc.). Environment variables can override connection strings and rate limits for production. +- **Authority integration.** Two OAuth clients (`notify-web`, `notify-web-dev`) with scopes `notify.read` and `notify.admin` are required. Authority enforcement can be disabled for air-gapped dev use by providing `developmentSigningKey`. +- **Plug-in management.** `plugins.baseDirectory` and `orderedPlugins` guarantee deterministic loading. Offline Kits copy the plug-in tree verbatim; operations must keep the order aligned across environments. +- **Observability.** Workers expose structured logs (`ruleId`, `actionId`, `eventId`, `throttleKey`). Metrics include: + - `notify_rule_matches_total{tenant,eventKind}` + - `notify_delivery_attempts_total{channelType,status}` + - `notify_digest_open_windows{window}` + - Optional OpenTelemetry traces for rule evaluation and connector round-trips. +- **Scaling levers.** Increase worker replicas to cope with bus throughput; adjust `worker.prefetchCount` for Redis Streams or `ackWait` for NATS JetStream. WebService remains stateless and scales horizontally behind the gateway. + +--- + +## 6. Roadmap alignment + +| Backlog | Architectural note | +|---------|--------------------| +| `NOTIFY-SVC-38-001` | Standardise event envelope publication (idempotency keys) – ensure bus bindings use the documented key format. | +| `NOTIFY-SVC-38-002..004` | Introduce simulation endpoints and throttle dashboards – expect additional `/internal/notify/simulate` routes and metrics; update once merged. | +| `NOTIFY-SVC-39-001..004` | Correlation engine, digests generator, simulation API, quiet hours – anticipate new Mongo documents (`quietHours`, correlation caches) and connector metadata (quiet mode hints). Review this guide when implementations land. | + +Action: schedule a documentation sync with the Notifications Service Guild immediately after `NOTIFY-SVC-39-001..004` merge to confirm schema adjustments (e.g., correlation edge storage, quiet hour calendars) and add any new persistence or API details here. + +--- + +> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/notifications/digests.md b/docs/notifications/digests.md index 4324dae0..0e7780d2 100644 --- a/docs/notifications/digests.md +++ b/docs/notifications/digests.md @@ -1,92 +1,92 @@ -# Notifications Digests - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -Digests coalesce multiple matching events into a single notification when rules request batched delivery. They protect responders from alert storms while preserving a deterministic record of every input. - ---- - -## 1. Digest lifecycle - -1. **Window selection.** Rule actions opt into a digest cadence by setting `actions[].digest` (`instant`, `5m`, `15m`, `1h`, `1d`). `instant` skips digest logic entirely. -2. **Aggregation.** When an event matches, the worker appends it to the open digest window (`tenantId + actionId + window`). Events include the canonical scope, delta counts, and references. -3. **Flush.** When the window expires or hits the worker’s safety cap (configurable), the worker renders a digest template and emits a single delivery with status `Digested`. -4. **Audit.** The delivery ledger links back to the digest document so operators can inspect individual items and the aggregated summary. - ---- - -## 2. Storage model - -Digest state lives in Mongo (`digests` collection) and mirrors the schema described in [ARCHITECTURE_NOTIFY.md](../ARCHITECTURE_NOTIFY.md#7-data-model-mongo): - -```json -{ - "_id": "tenant-dev:act-email-compliance:1h", - "tenantId": "tenant-dev", - "actionKey": "act-email-compliance", - "window": "1h", - "openedAt": "2025-10-24T08:00:00Z", - "status": "open", - "items": [ - { - "eventId": "00000000-0000-0000-0000-000000000001", - "scope": { - "namespace": "prod-payments", - "repo": "ghcr.io/acme/api", - "digest": "sha256:…" - }, - "delta": { - "newCritical": 1, - "kev": 1 - } - } - ] -} -``` - -- `status` reflects whether the window is currently collecting (`open`) or has been completed (`closed`). Future revisions may introduce `flushing` for in-progress operations. -- `items[].delta` captures aggregated counts for reporting (e.g., new critical findings, KEV, quieted). -- Workers use optimistic concurrency on the document ID to avoid duplicate flushes across replicas. - ---- - -## 3. Rendering and templates - -- Digest deliveries use the same template engine as instant notifications. Templates receive an additional `digest` object with `window`, `openedAt`, `itemCount`, and `items` (findings grouped by namespace/repository when available). -- Provide digest-specific templates (e.g., `tmpl-digest-hourly`) so the body can enumerate top offenders, summarise totals, and link to detailed dashboards. -- When no template is specified, Notify falls back to channel defaults that emphasise summary counts and redirect to Console for detail. - ---- - -## 4. API surface - -| Endpoint | Description | Notes | -|----------|-------------|-------| -| `POST /digests` | Issues administrative commands (e.g., force flush, reopen) for a specific action/window. | Request body specifies the command target; requires `notify.admin`. | -| `GET /digests/{actionKey}` | Returns the currently open window (if any) for the referenced action. | Supports operators/CLI inspecting pending digests; requires `notify.read`. | -| `DELETE /digests/{actionKey}` | Drops the open window without notifying (emergency stop). | Emits an audit record; use sparingly. | - -All routes honour the tenant header and reuse the standard Notify rate limits. - ---- - -## 5. Worker behaviour and safety nets - -- **Idempotency.** Flush operations generate a deterministic digest delivery ID (`digest::::`). Retries reuse the same ID. -- **Throttles.** Digest generation respects action throttles; setting an aggressive throttle together with a digest window may result in deliberate skips (logged as `Throttled` in the delivery ledger). -- **Quiet hours.** Future sprint work (`NOTIFY-SVC-39-004`) integrates quiet-hour calendars. When enabled, flush timers pause during quiet windows and resume afterwards. -- **Back-pressure.** When the window reaches the configured item cap before the timer, the worker flushes early and starts a new window immediately. -- **Crash resilience.** Workers rebuild in-flight windows from Mongo on startup; partially flushed windows remain closed after success or reopened if the flush fails. - ---- - -## 6. Operator guidance - -- Choose hourly digests for high-volume compliance events; daily digests suit executive reporting. -- Pair digests with incident-focused instant rules so critical items surface immediately while less urgent noise is summarised. -- Monitor `/stats` output for `openDigestCount` to ensure windows are flushing; spikes may indicate downstream connector failures. -- When testing new digest templates, open a small (`5m`) window, trigger sample events, then call `POST /digests/{actionId}/flush` to validate rendering before moving to longer cadences. - ---- - -> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Notifications Digests + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +Digests coalesce multiple matching events into a single notification when rules request batched delivery. They protect responders from alert storms while preserving a deterministic record of every input. + +--- + +## 1. Digest lifecycle + +1. **Window selection.** Rule actions opt into a digest cadence by setting `actions[].digest` (`instant`, `5m`, `15m`, `1h`, `1d`). `instant` skips digest logic entirely. +2. **Aggregation.** When an event matches, the worker appends it to the open digest window (`tenantId + actionId + window`). Events include the canonical scope, delta counts, and references. +3. **Flush.** When the window expires or hits the worker’s safety cap (configurable), the worker renders a digest template and emits a single delivery with status `Digested`. +4. **Audit.** The delivery ledger links back to the digest document so operators can inspect individual items and the aggregated summary. + +--- + +## 2. Storage model + +Digest state lives in Mongo (`digests` collection) and mirrors the schema described in [ARCHITECTURE_NOTIFY.md](../ARCHITECTURE_NOTIFY.md#7-data-model-mongo): + +```json +{ + "_id": "tenant-dev:act-email-compliance:1h", + "tenantId": "tenant-dev", + "actionKey": "act-email-compliance", + "window": "1h", + "openedAt": "2025-10-24T08:00:00Z", + "status": "open", + "items": [ + { + "eventId": "00000000-0000-0000-0000-000000000001", + "scope": { + "namespace": "prod-payments", + "repo": "ghcr.io/acme/api", + "digest": "sha256:…" + }, + "delta": { + "newCritical": 1, + "kev": 1 + } + } + ] +} +``` + +- `status` reflects whether the window is currently collecting (`open`) or has been completed (`closed`). Future revisions may introduce `flushing` for in-progress operations. +- `items[].delta` captures aggregated counts for reporting (e.g., new critical findings, KEV, quieted). +- Workers use optimistic concurrency on the document ID to avoid duplicate flushes across replicas. + +--- + +## 3. Rendering and templates + +- Digest deliveries use the same template engine as instant notifications. Templates receive an additional `digest` object with `window`, `openedAt`, `itemCount`, and `items` (findings grouped by namespace/repository when available). +- Provide digest-specific templates (e.g., `tmpl-digest-hourly`) so the body can enumerate top offenders, summarise totals, and link to detailed dashboards. +- When no template is specified, Notify falls back to channel defaults that emphasise summary counts and redirect to Console for detail. + +--- + +## 4. API surface + +| Endpoint | Description | Notes | +|----------|-------------|-------| +| `POST /digests` | Issues administrative commands (e.g., force flush, reopen) for a specific action/window. | Request body specifies the command target; requires `notify.admin`. | +| `GET /digests/{actionKey}` | Returns the currently open window (if any) for the referenced action. | Supports operators/CLI inspecting pending digests; requires `notify.read`. | +| `DELETE /digests/{actionKey}` | Drops the open window without notifying (emergency stop). | Emits an audit record; use sparingly. | + +All routes honour the tenant header and reuse the standard Notify rate limits. + +--- + +## 5. Worker behaviour and safety nets + +- **Idempotency.** Flush operations generate a deterministic digest delivery ID (`digest::::`). Retries reuse the same ID. +- **Throttles.** Digest generation respects action throttles; setting an aggressive throttle together with a digest window may result in deliberate skips (logged as `Throttled` in the delivery ledger). +- **Quiet hours.** Future sprint work (`NOTIFY-SVC-39-004`) integrates quiet-hour calendars. When enabled, flush timers pause during quiet windows and resume afterwards. +- **Back-pressure.** When the window reaches the configured item cap before the timer, the worker flushes early and starts a new window immediately. +- **Crash resilience.** Workers rebuild in-flight windows from Mongo on startup; partially flushed windows remain closed after success or reopened if the flush fails. + +--- + +## 6. Operator guidance + +- Choose hourly digests for high-volume compliance events; daily digests suit executive reporting. +- Pair digests with incident-focused instant rules so critical items surface immediately while less urgent noise is summarised. +- Monitor `/stats` output for `openDigestCount` to ensure windows are flushing; spikes may indicate downstream connector failures. +- When testing new digest templates, open a small (`5m`) window, trigger sample events, then call `POST /digests/{actionId}/flush` to validate rendering before moving to longer cadences. + +--- + +> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/notifications/overview.md b/docs/notifications/overview.md index 7634826f..82a434fb 100644 --- a/docs/notifications/overview.md +++ b/docs/notifications/overview.md @@ -1,76 +1,76 @@ -# Notifications Overview - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -Notifications Studio turns raw platform events into concise, tenant-scoped alerts that reach the right responders without overwhelming them. The service is sovereign/offline-first, follows the Aggregation-Only Contract (AOC), and produces deterministic outputs so the same configuration yields identical deliveries across environments. - ---- - -## 1. Mission & value - -- **Reduce noise.** Only materially new or high-impact changes reach chat, email, or webhooks thanks to rule filters, throttles, and digest windows. -- **Explainable results.** Every delivery is traceable back to a rule, action, and event payload stored in the delivery ledger; operators can audit what fired and why. -- **Safe by default.** Secrets remain in external stores, templates are sandboxed, quiet hours and throttles prevent storms, and idempotency guarantees protect downstream systems. -- **Offline-aligned.** All configuration, templates, and plug-ins ship with Offline Kits; no external SaaS is required to send notifications. - ---- - -## 2. Core capabilities - -| Capability | What it does | Key docs | -|------------|--------------|----------| -| Rules engine | Declarative matchers for event kinds, severities, namespaces, VEX context, KEV flags, and more. | [`notifications/rules.md`](rules.md) | -| Channel catalog | Slack, Teams, Email, Webhook connectors loaded via restart-time plug-ins; metadata stored without secrets. | [`notifications/architecture.md`](architecture.md) | -| Templates | Locale-aware, deterministic rendering via safe helpers; channel defaults plus tenant-specific overrides. | [`notifications/templates.md`](templates.md) | -| Digests | Coalesce bursts into periodic summaries with deterministic IDs and audit trails. | [`notifications/digests.md`](digests.md) | -| Delivery ledger | Tracks rendered payload hashes, attempts, throttles, and outcomes for every action. | [`ARCHITECTURE_NOTIFY.md`](../ARCHITECTURE_NOTIFY.md#7-data-model-mongo) | - ---- - -## 3. How it fits into Stella Ops - -1. **Producers emit events.** Scanner, Scheduler, VEX Lens, Attestor, and Zastava publish canonical envelopes (`NotifyEvent`) onto the internal bus. -2. **Notify.Worker evaluates rules.** For each tenant, the worker applies match filters, VEX gates, throttles, and digest policies before rendering the action. -3. **Connectors deliver.** Channel plug-ins send the rendered payload to Slack/Teams/Email/Webhook targets and report back attempts and outcomes. -4. **Consumers investigate.** Operators pivot from message links into Console dashboards, SBOM views, or policy overlays with correlation IDs preserved. - -The Notify WebService fronts worker state with REST APIs used by the UI and CLI. Tenants authenticate via StellaOps Authority scopes `notify.read` and `notify.admin`. All operations require the tenant header (`X-StellaOps-Tenant`) to preserve sovereignty boundaries. - ---- - -## 4. Operating model - -| Area | Guidance | -|------|----------| -| **Tenancy** | Each rule, channel, template, and delivery belongs to exactly one tenant. Cross-tenant sharing is intentionally unsupported. | -| **Determinism** | Configuration persistence normalises strings and sorts collections. Template rendering produces identical `bodyHash` values when inputs match. | -| **Scaling** | Workers scale horizontally; per-tenant rule snapshots are cached and refreshed from Mongo change streams. Redis (or equivalent) guards throttles and locks. | -| **Offline** | Offline Kits include plug-ins, default templates, and seed rules. Operators can edit YAML/JSON manifests before air-gapped deployment. | -| **Security** | Channel secrets use indirection (`secretRef`), Authority-protected OAuth clients secure API access, and delivery payloads are redacted before storage where required. | - ---- - -## 5. Getting started (first 30 minutes) - -| Step | Goal | Reference | -|------|------|-----------| -| 1 | Deploy Notify WebService + Worker with Mongo and Redis | [`ARCHITECTURE_NOTIFY.md`](../ARCHITECTURE_NOTIFY.md#1-runtime-shape--projects) | -| 2 | Register OAuth clients/scopes in Authority | [`etc/authority.yaml.sample`](../etc/authority.yaml.sample) | -| 3 | Install channel plug-ins and capture secret references | [`plugins/notify`](../../plugins) | -| 4 | Create a tenant rule and test preview | [`POST /channels/{id}/test`](../ARCHITECTURE_NOTIFY.md#8-external-apis-webservice) | -| 5 | Inspect deliveries and digests | `/api/v1/notify/deliveries`, `/api/v1/notify/digests` | - ---- - -## 6. Alignment with implementation work - -| Backlog item | Impact on docs | Status | -|--------------|----------------|--------| -| `NOTIFY-SVC-38-001..004` | Foundational correlation, throttling, simulation hooks. | **In progress** – align behaviour once services publish beta APIs. | -| `NOTIFY-SVC-39-001..004` | Adds correlation engine, digest generator, simulation API, quiet hours. | **Pending** – revisit rule/digest sections when these tasks merge. | - -Action: coordinate with the Notifications Service Guild when `NOTIFY-SVC-39-001..004` land to validate payload fields, quiet-hours semantics, and any new connector metadata that should be documented here and in the channel-specific guides. - ---- - -> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Notifications Overview + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +Notifications Studio turns raw platform events into concise, tenant-scoped alerts that reach the right responders without overwhelming them. The service is sovereign/offline-first, follows the Aggregation-Only Contract (AOC), and produces deterministic outputs so the same configuration yields identical deliveries across environments. + +--- + +## 1. Mission & value + +- **Reduce noise.** Only materially new or high-impact changes reach chat, email, or webhooks thanks to rule filters, throttles, and digest windows. +- **Explainable results.** Every delivery is traceable back to a rule, action, and event payload stored in the delivery ledger; operators can audit what fired and why. +- **Safe by default.** Secrets remain in external stores, templates are sandboxed, quiet hours and throttles prevent storms, and idempotency guarantees protect downstream systems. +- **Offline-aligned.** All configuration, templates, and plug-ins ship with Offline Kits; no external SaaS is required to send notifications. + +--- + +## 2. Core capabilities + +| Capability | What it does | Key docs | +|------------|--------------|----------| +| Rules engine | Declarative matchers for event kinds, severities, namespaces, VEX context, KEV flags, and more. | [`notifications/rules.md`](rules.md) | +| Channel catalog | Slack, Teams, Email, Webhook connectors loaded via restart-time plug-ins; metadata stored without secrets. | [`notifications/architecture.md`](architecture.md) | +| Templates | Locale-aware, deterministic rendering via safe helpers; channel defaults plus tenant-specific overrides. | [`notifications/templates.md`](templates.md) | +| Digests | Coalesce bursts into periodic summaries with deterministic IDs and audit trails. | [`notifications/digests.md`](digests.md) | +| Delivery ledger | Tracks rendered payload hashes, attempts, throttles, and outcomes for every action. | [`ARCHITECTURE_NOTIFY.md`](../ARCHITECTURE_NOTIFY.md#7-data-model-mongo) | + +--- + +## 3. How it fits into Stella Ops + +1. **Producers emit events.** Scanner, Scheduler, VEX Lens, Attestor, and Zastava publish canonical envelopes (`NotifyEvent`) onto the internal bus. +2. **Notify.Worker evaluates rules.** For each tenant, the worker applies match filters, VEX gates, throttles, and digest policies before rendering the action. +3. **Connectors deliver.** Channel plug-ins send the rendered payload to Slack/Teams/Email/Webhook targets and report back attempts and outcomes. +4. **Consumers investigate.** Operators pivot from message links into Console dashboards, SBOM views, or policy overlays with correlation IDs preserved. + +The Notify WebService fronts worker state with REST APIs used by the UI and CLI. Tenants authenticate via StellaOps Authority scopes `notify.read` and `notify.admin`. All operations require the tenant header (`X-StellaOps-Tenant`) to preserve sovereignty boundaries. + +--- + +## 4. Operating model + +| Area | Guidance | +|------|----------| +| **Tenancy** | Each rule, channel, template, and delivery belongs to exactly one tenant. Cross-tenant sharing is intentionally unsupported. | +| **Determinism** | Configuration persistence normalises strings and sorts collections. Template rendering produces identical `bodyHash` values when inputs match. | +| **Scaling** | Workers scale horizontally; per-tenant rule snapshots are cached and refreshed from Mongo change streams. Redis (or equivalent) guards throttles and locks. | +| **Offline** | Offline Kits include plug-ins, default templates, and seed rules. Operators can edit YAML/JSON manifests before air-gapped deployment. | +| **Security** | Channel secrets use indirection (`secretRef`), Authority-protected OAuth clients secure API access, and delivery payloads are redacted before storage where required. | + +--- + +## 5. Getting started (first 30 minutes) + +| Step | Goal | Reference | +|------|------|-----------| +| 1 | Deploy Notify WebService + Worker with Mongo and Redis | [`ARCHITECTURE_NOTIFY.md`](../ARCHITECTURE_NOTIFY.md#1-runtime-shape--projects) | +| 2 | Register OAuth clients/scopes in Authority | [`etc/authority.yaml.sample`](../etc/authority.yaml.sample) | +| 3 | Install channel plug-ins and capture secret references | [`plugins/notify`](../../plugins) | +| 4 | Create a tenant rule and test preview | [`POST /channels/{id}/test`](../ARCHITECTURE_NOTIFY.md#8-external-apis-webservice) | +| 5 | Inspect deliveries and digests | `/api/v1/notify/deliveries`, `/api/v1/notify/digests` | + +--- + +## 6. Alignment with implementation work + +| Backlog item | Impact on docs | Status | +|--------------|----------------|--------| +| `NOTIFY-SVC-38-001..004` | Foundational correlation, throttling, simulation hooks. | **In progress** – align behaviour once services publish beta APIs. | +| `NOTIFY-SVC-39-001..004` | Adds correlation engine, digest generator, simulation API, quiet hours. | **Pending** – revisit rule/digest sections when these tasks merge. | + +Action: coordinate with the Notifications Service Guild when `NOTIFY-SVC-39-001..004` land to validate payload fields, quiet-hours semantics, and any new connector metadata that should be documented here and in the channel-specific guides. + +--- + +> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/notifications/pack-approvals-integration.md b/docs/notifications/pack-approvals-integration.md index 5257887f..23736334 100644 --- a/docs/notifications/pack-approvals-integration.md +++ b/docs/notifications/pack-approvals-integration.md @@ -1,62 +1,62 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Pack Approval Notification Integration — Requirements - -## Overview - -Task Runner now produces pack plans with explicit approval and policy-gate metadata. The Notifications service must ingest those events, persist their state, and fan out actionable alerts (approvals requested, policy holds, resumptions). This document captures the requirements for the first Notifications sprint dedicated to the Task Runner bridge. - -Deliverables feed Sprint 37 tasks (`NOTIFY-SVC-37-00x`) and unblock Task Runner sprint 43 (`TASKRUN-43-001`). - -## Functional Requirements - -### 1. Approval Event Contract -- Define a canonical schema for **PackApprovalRequested** and **PackApprovalUpdated** events. -- Fields must include `runId`, `approvalId`, tenant context, plan hash, required grants, step identifiers, message template, and resume callback metadata. -- Provide an OpenAPI fragment and x-go/x-cs models for Task Runner and CLI compatibility. -- Document error/acknowledgement semantics (success, retryable failure, validation failure). - -### 2. Ingestion & Persistence -- Expose a secure Notifications API endpoint (`POST /notifications/pack-approvals`) receiving Task Runner events. -- Validate scope (`Packs.Approve`, `Notifier.Events:Write`) and tenant match. -- Persist approval state transitions in Mongo (`notifications.pack_approvals`) with indexes on run/approval/tenant. -- Store outbound notification audit records with correlation IDs to support Task Runner resume flow. - -### 3. Notification Routing -- Derive recipients from new rule predicates (`event.kind == "pack.approval"`). -- Render approval templates (email + webhook JSON) including plan metadata and approval links (resume token). -- Emit policy gate notifications as “hold” incidents with context (parameters, messages). -- Support localization fallback and redaction of secrets (never ship approval tokens unencrypted). - -### 4. Resume & Ack Handshake -- Provide an approval ack endpoint (`POST /notifications/pack-approvals/{runId}/{approvalId}/ack`) that records decision metadata and forwards to Task Runner resume hook (HTTP callback + message bus placeholder). -- Return structured responses with resume token / status for CLI integration. -- Ensure idempotent updates (dedupe by runId + approvalId + decisionHash). - -### 5. Observability & Security -- Emit metrics for approval notifications queued/sent, outstanding approvals, and acknowledgement latency. -- Log audit trail events (`pack.approval.requested`, `pack.approval.acknowledged`, `pack.policy.hold`). -- Enforce HMAC or mTLS for Task Runner -> Notifier ingestion; support configurable IP allowlist. -- Provide chaos-test plan for notification failure modes (channel outage, storage failure). - -## Non-Functional Requirements - -- Deterministic processing: identical approval events lead to identical outbound notifications (idempotent). -- Timeouts: ingestion endpoint must respond < 500 ms under nominal load. -- Retry strategy: Task Runner expects 5xx/429 for transient errors; document backoff guidance. -- Data retention: approval records retained 90 days, purge job tracked under ops runbook. - -## Sprint 37 Task Mapping - -| Task ID | Scope | -| --- | --- | -| **NOTIFY-SVC-37-001** | Author this contract doc, OpenAPI fragment, and schema references. Coordinate with Task Runner/Authority guilds. | -| **NOTIFY-SVC-37-002** | Implement secure ingestion endpoint, Mongo persistence, and audit hooks. Provide integration tests with sample events. | -| **NOTIFY-SVC-37-003** | Build approval/policy notification templates, routing rules, and channel dispatch (email + webhook). | -| **NOTIFY-SVC-37-004** | Ship acknowledgement endpoint + Task Runner callback client, resume token handling, and metrics/dashboards. | - -## Open Questions - -1. Who owns approval resume callback (Task Runner Worker vs Orchestrator)? Resolve before NOTIFY-SVC-37-004. -2. Should approvals generate incidents in existing incident schema or dedicated collection? Decision impacts Mongo design. -3. Authority scopes for approval ingestion/ack — reuse `Packs.Approve` or introduce `Packs.Approve:notify`? Coordinate with Authority team. +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Pack Approval Notification Integration — Requirements + +## Overview + +Task Runner now produces pack plans with explicit approval and policy-gate metadata. The Notifications service must ingest those events, persist their state, and fan out actionable alerts (approvals requested, policy holds, resumptions). This document captures the requirements for the first Notifications sprint dedicated to the Task Runner bridge. + +Deliverables feed Sprint 37 tasks (`NOTIFY-SVC-37-00x`) and unblock Task Runner sprint 43 (`TASKRUN-43-001`). + +## Functional Requirements + +### 1. Approval Event Contract +- Define a canonical schema for **PackApprovalRequested** and **PackApprovalUpdated** events. +- Fields must include `runId`, `approvalId`, tenant context, plan hash, required grants, step identifiers, message template, and resume callback metadata. +- Provide an OpenAPI fragment and x-go/x-cs models for Task Runner and CLI compatibility. +- Document error/acknowledgement semantics (success, retryable failure, validation failure). + +### 2. Ingestion & Persistence +- Expose a secure Notifications API endpoint (`POST /notifications/pack-approvals`) receiving Task Runner events. +- Validate scope (`Packs.Approve`, `Notifier.Events:Write`) and tenant match. +- Persist approval state transitions in Mongo (`notifications.pack_approvals`) with indexes on run/approval/tenant. +- Store outbound notification audit records with correlation IDs to support Task Runner resume flow. + +### 3. Notification Routing +- Derive recipients from new rule predicates (`event.kind == "pack.approval"`). +- Render approval templates (email + webhook JSON) including plan metadata and approval links (resume token). +- Emit policy gate notifications as “hold” incidents with context (parameters, messages). +- Support localization fallback and redaction of secrets (never ship approval tokens unencrypted). + +### 4. Resume & Ack Handshake +- Provide an approval ack endpoint (`POST /notifications/pack-approvals/{runId}/{approvalId}/ack`) that records decision metadata and forwards to Task Runner resume hook (HTTP callback + message bus placeholder). +- Return structured responses with resume token / status for CLI integration. +- Ensure idempotent updates (dedupe by runId + approvalId + decisionHash). + +### 5. Observability & Security +- Emit metrics for approval notifications queued/sent, outstanding approvals, and acknowledgement latency. +- Log audit trail events (`pack.approval.requested`, `pack.approval.acknowledged`, `pack.policy.hold`). +- Enforce HMAC or mTLS for Task Runner -> Notifier ingestion; support configurable IP allowlist. +- Provide chaos-test plan for notification failure modes (channel outage, storage failure). + +## Non-Functional Requirements + +- Deterministic processing: identical approval events lead to identical outbound notifications (idempotent). +- Timeouts: ingestion endpoint must respond < 500 ms under nominal load. +- Retry strategy: Task Runner expects 5xx/429 for transient errors; document backoff guidance. +- Data retention: approval records retained 90 days, purge job tracked under ops runbook. + +## Sprint 37 Task Mapping + +| Task ID | Scope | +| --- | --- | +| **NOTIFY-SVC-37-001** | Author this contract doc, OpenAPI fragment, and schema references. Coordinate with Task Runner/Authority guilds. | +| **NOTIFY-SVC-37-002** | Implement secure ingestion endpoint, Mongo persistence, and audit hooks. Provide integration tests with sample events. | +| **NOTIFY-SVC-37-003** | Build approval/policy notification templates, routing rules, and channel dispatch (email + webhook). | +| **NOTIFY-SVC-37-004** | Ship acknowledgement endpoint + Task Runner callback client, resume token handling, and metrics/dashboards. | + +## Open Questions + +1. Who owns approval resume callback (Task Runner Worker vs Orchestrator)? Resolve before NOTIFY-SVC-37-004. +2. Should approvals generate incidents in existing incident schema or dedicated collection? Decision impacts Mongo design. +3. Authority scopes for approval ingestion/ack — reuse `Packs.Approve` or introduce `Packs.Approve:notify`? Coordinate with Authority team. diff --git a/docs/notifications/rules.md b/docs/notifications/rules.md index 8c690016..ce5ed2fe 100644 --- a/docs/notifications/rules.md +++ b/docs/notifications/rules.md @@ -1,147 +1,147 @@ -# Notifications Rules - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -Rules decide which platform events deserve a notification, how aggressively they should be throttled, and which channels/actions should run. They are tenant-scoped contracts that guarantee deterministic routing across Notify.Worker replicas. - ---- - -## 1. Rule lifecycle - -1. **Authoring.** Operators create or update rules through the Notify WebService (`POST /rules`, `PATCH /rules/{id}`) or UI. Payloads are normalised to the current `NotifyRule` schema version. -2. **Evaluation.** Notify.Worker evaluates enabled rules per incoming event. Tenancy is enforced first, followed by match filters, VEX gates, throttles, and digest handling. -3. **Delivery.** Matching actions are enqueued with an idempotency key to prevent storm loops. Throttle rejections and digest coalescing are recorded in the delivery ledger. -4. **Audit.** Every change carries `createdBy`/`updatedBy` plus timestamps; the delivery ledger references `ruleId`/`actionId` for traceability. - ---- - -## 2. Rule schema reference - -| Field | Type | Notes | -|-------|------|-------| -| `ruleId` | string | Stable identifier; clients may provide UUID/slug. | -| `tenantId` | string | Must match the tenant header supplied when the rule is created. | -| `name` | string | Display label shown in UI and audits. | -| `description` | string? | Optional operator-facing note. | -| `enabled` | bool | Disabled rules remain stored but skipped during evaluation. | -| `labels` | map | Sorted, trimmed key/value tags supporting filtering. | -| `metadata` | map | Reserved for automation; stored verbatim (sorted). | -| `match` | [`NotifyRuleMatch`](#3-match-filters) | Declarative filters applied before actions execute. | -| `actions[]` | [`NotifyRuleAction`](#4-actions-throttles-and-digests) | Ordered set of channel dispatchers; minimum one. | -| `createdBy`/`createdAt` | string?, instant | Populated automatically when omitted. | -| `updatedBy`/`updatedAt` | string?, instant | Defaults to creation values when unspecified. | -| `schemaVersion` | string | Auto-upgraded during persistence; use for migrations. | - -Rules are immutable snapshots; updates produce a full document write so workers observing change streams can refresh caches deterministically. - ---- - -## 3. Match filters - -`NotifyRuleMatch` narrows which events trigger the rule. All string collections are trimmed, deduplicated, and sorted to guarantee deterministic evaluation. - -| Field | Type | Behaviour | -|-------|------|-----------| -| `eventKinds[]` | string | Lower-cased; supports any canonical Notify event (`scanner.report.ready`, `scheduler.rescan.delta`, `zastava.admission`, etc.). Empty list matches all kinds. | -| `namespaces[]` | string | Exact match against `event.scope.namespace`. Supports glob-style filters via upstream enrichment (planned). | -| `repositories[]` | string | Matches `event.scope.repo`. | -| `digests[]` | string | Lower-cased; matches `event.scope.digest`. | -| `labels[]` | string | Matches event attributes or delta labels (`kev`, `critical`, `license`, …). | -| `componentPurls[]` | string | Matches component identifiers inside the event payload when provided. | -| `minSeverity` | string? | Lower-cased severity gate (e.g., `medium`, `high`, `critical`). Evaluated on new findings inside event deltas; events lacking severity bypass this gate unless set. | -| `verdicts[]` | string | Accepts scan/report verdicts (`fail`, `warn`, `block`, `escalate`, `deny`). | -| `kevOnly` | bool? | When `true`, only KEV-tagged findings fire. | -| `vex` | object | Additional gating aligned with VEX consensus; see below. | - -### 3.1 VEX gates - -`NotifyRuleMatchVex` offers fine-grained control when VEX findings accompany events: - -| Field | Default | Effect | -|-------|---------|--------| -| `includeAcceptedJustifications` | `true` | Include findings marked `not_affected`/`acceptable` in consensus. | -| `includeRejectedJustifications` | `false` | Surface findings the consensus rejected. | -| `includeUnknownJustifications` | `false` | Allow findings without explicit justification. | -| `justificationKinds[]` | `[]` | Optional allow-list of justification codes (e.g., `exploit_observed`, `component_not_present`). | - -If the VEX block filters out every applicable finding, the rule is treated as a non-match and no actions run. - ---- - -## 4. Actions, throttles, and digests - -Each rule requires at least one action. Actions are deduplicated and sorted by `actionId`, so prefer deterministic identifiers. - -| Field | Type | Notes | -|-------|------|-------| -| `actionId` | string | Stable identifier unique within the rule. | -| `channel` | string | Reference to a channel (`channelId`) configured in `/channels`. | -| `template` | string? | Template key to use for rendering; falls back to channel default when omitted. | -| `digest` | string? | Digest window key (`instant`, `5m`, `15m`, `1h`, `1d`). `instant` bypasses coalescing. | -| `throttle` | ISO8601 duration? | Optional throttle TTL (`PT300S`, `PT1H`). Prevents duplicate deliveries when the same idempotency hash appears before expiry. | -| `locale` | string? | BCP-47 tag (stored lower-case). Template lookup falls back to channel locale then `en-us`. | -| `enabled` | bool | Disabled actions skip rendering but remain stored. | -| `metadata` | map | Connector-specific hints (priority, layout, etc.). | - -### 4.1 Evaluation order - -1. Verify channel exists and is enabled; disabled channels mark the delivery as `Dropped`. -2. Apply throttle idempotency key: `hash(ruleId|actionId|event.kind|scope.digest|delta.hash|dayBucket)`. Hits are logged as `Throttled`. -3. If the action defines a digest window other than `instant`, append the event to the open window and defer delivery until flush. -4. When delivery proceeds, the renderer resolves the template, locale, and metadata before invoking the connector. - ---- - -## 5. Example rule payload - -```json -{ - "ruleId": "rule-critical-soc", - "tenantId": "tenant-dev", - "name": "Critical scanner verdicts", - "description": "Route KEV-tagged critical findings to SOC Slack with zero delay.", - "enabled": true, - "match": { - "eventKinds": ["scanner.report.ready"], - "labels": ["kev", "critical"], - "minSeverity": "critical", - "verdicts": ["fail", "block"], - "kevOnly": true - }, - "actions": [ - { - "actionId": "act-slack-critical", - "channel": "chn-slack-soc", - "template": "tmpl-critical", - "digest": "instant", - "throttle": "PT300S", - "locale": "en-us", - "metadata": { - "priority": "p1" - } - } - ], - "labels": { - "owner": "soc" - }, - "metadata": { - "revision": "12" - } -} -``` - -Dry-run calls (`POST /rules/{id}/test`) accept the same structure along with a sample Notify event payload to exercise match logic without invoking connectors. - ---- - -## 6. Operational guidance - -- Keep rule scopes narrow (namespace/repository) before relying on severity gates; this minimises noise and improves digest summarisation. -- Always configure a throttle window for instant actions to protect against repeated upstream retries. -- Use rule labels to organise dashboards and access control (e.g., `owner:soc`, `env:prod`). -- Prefer tenant-specific rule IDs so Offline Kit exports remain deterministic across environments. -- If a rule depends on derived metadata (e.g., policy verdict tags), list those dependencies in the rule description for audit readiness. - ---- - -> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Notifications Rules + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +Rules decide which platform events deserve a notification, how aggressively they should be throttled, and which channels/actions should run. They are tenant-scoped contracts that guarantee deterministic routing across Notify.Worker replicas. + +--- + +## 1. Rule lifecycle + +1. **Authoring.** Operators create or update rules through the Notify WebService (`POST /rules`, `PATCH /rules/{id}`) or UI. Payloads are normalised to the current `NotifyRule` schema version. +2. **Evaluation.** Notify.Worker evaluates enabled rules per incoming event. Tenancy is enforced first, followed by match filters, VEX gates, throttles, and digest handling. +3. **Delivery.** Matching actions are enqueued with an idempotency key to prevent storm loops. Throttle rejections and digest coalescing are recorded in the delivery ledger. +4. **Audit.** Every change carries `createdBy`/`updatedBy` plus timestamps; the delivery ledger references `ruleId`/`actionId` for traceability. + +--- + +## 2. Rule schema reference + +| Field | Type | Notes | +|-------|------|-------| +| `ruleId` | string | Stable identifier; clients may provide UUID/slug. | +| `tenantId` | string | Must match the tenant header supplied when the rule is created. | +| `name` | string | Display label shown in UI and audits. | +| `description` | string? | Optional operator-facing note. | +| `enabled` | bool | Disabled rules remain stored but skipped during evaluation. | +| `labels` | map | Sorted, trimmed key/value tags supporting filtering. | +| `metadata` | map | Reserved for automation; stored verbatim (sorted). | +| `match` | [`NotifyRuleMatch`](#3-match-filters) | Declarative filters applied before actions execute. | +| `actions[]` | [`NotifyRuleAction`](#4-actions-throttles-and-digests) | Ordered set of channel dispatchers; minimum one. | +| `createdBy`/`createdAt` | string?, instant | Populated automatically when omitted. | +| `updatedBy`/`updatedAt` | string?, instant | Defaults to creation values when unspecified. | +| `schemaVersion` | string | Auto-upgraded during persistence; use for migrations. | + +Rules are immutable snapshots; updates produce a full document write so workers observing change streams can refresh caches deterministically. + +--- + +## 3. Match filters + +`NotifyRuleMatch` narrows which events trigger the rule. All string collections are trimmed, deduplicated, and sorted to guarantee deterministic evaluation. + +| Field | Type | Behaviour | +|-------|------|-----------| +| `eventKinds[]` | string | Lower-cased; supports any canonical Notify event (`scanner.report.ready`, `scheduler.rescan.delta`, `zastava.admission`, etc.). Empty list matches all kinds. | +| `namespaces[]` | string | Exact match against `event.scope.namespace`. Supports glob-style filters via upstream enrichment (planned). | +| `repositories[]` | string | Matches `event.scope.repo`. | +| `digests[]` | string | Lower-cased; matches `event.scope.digest`. | +| `labels[]` | string | Matches event attributes or delta labels (`kev`, `critical`, `license`, …). | +| `componentPurls[]` | string | Matches component identifiers inside the event payload when provided. | +| `minSeverity` | string? | Lower-cased severity gate (e.g., `medium`, `high`, `critical`). Evaluated on new findings inside event deltas; events lacking severity bypass this gate unless set. | +| `verdicts[]` | string | Accepts scan/report verdicts (`fail`, `warn`, `block`, `escalate`, `deny`). | +| `kevOnly` | bool? | When `true`, only KEV-tagged findings fire. | +| `vex` | object | Additional gating aligned with VEX consensus; see below. | + +### 3.1 VEX gates + +`NotifyRuleMatchVex` offers fine-grained control when VEX findings accompany events: + +| Field | Default | Effect | +|-------|---------|--------| +| `includeAcceptedJustifications` | `true` | Include findings marked `not_affected`/`acceptable` in consensus. | +| `includeRejectedJustifications` | `false` | Surface findings the consensus rejected. | +| `includeUnknownJustifications` | `false` | Allow findings without explicit justification. | +| `justificationKinds[]` | `[]` | Optional allow-list of justification codes (e.g., `exploit_observed`, `component_not_present`). | + +If the VEX block filters out every applicable finding, the rule is treated as a non-match and no actions run. + +--- + +## 4. Actions, throttles, and digests + +Each rule requires at least one action. Actions are deduplicated and sorted by `actionId`, so prefer deterministic identifiers. + +| Field | Type | Notes | +|-------|------|-------| +| `actionId` | string | Stable identifier unique within the rule. | +| `channel` | string | Reference to a channel (`channelId`) configured in `/channels`. | +| `template` | string? | Template key to use for rendering; falls back to channel default when omitted. | +| `digest` | string? | Digest window key (`instant`, `5m`, `15m`, `1h`, `1d`). `instant` bypasses coalescing. | +| `throttle` | ISO8601 duration? | Optional throttle TTL (`PT300S`, `PT1H`). Prevents duplicate deliveries when the same idempotency hash appears before expiry. | +| `locale` | string? | BCP-47 tag (stored lower-case). Template lookup falls back to channel locale then `en-us`. | +| `enabled` | bool | Disabled actions skip rendering but remain stored. | +| `metadata` | map | Connector-specific hints (priority, layout, etc.). | + +### 4.1 Evaluation order + +1. Verify channel exists and is enabled; disabled channels mark the delivery as `Dropped`. +2. Apply throttle idempotency key: `hash(ruleId|actionId|event.kind|scope.digest|delta.hash|dayBucket)`. Hits are logged as `Throttled`. +3. If the action defines a digest window other than `instant`, append the event to the open window and defer delivery until flush. +4. When delivery proceeds, the renderer resolves the template, locale, and metadata before invoking the connector. + +--- + +## 5. Example rule payload + +```json +{ + "ruleId": "rule-critical-soc", + "tenantId": "tenant-dev", + "name": "Critical scanner verdicts", + "description": "Route KEV-tagged critical findings to SOC Slack with zero delay.", + "enabled": true, + "match": { + "eventKinds": ["scanner.report.ready"], + "labels": ["kev", "critical"], + "minSeverity": "critical", + "verdicts": ["fail", "block"], + "kevOnly": true + }, + "actions": [ + { + "actionId": "act-slack-critical", + "channel": "chn-slack-soc", + "template": "tmpl-critical", + "digest": "instant", + "throttle": "PT300S", + "locale": "en-us", + "metadata": { + "priority": "p1" + } + } + ], + "labels": { + "owner": "soc" + }, + "metadata": { + "revision": "12" + } +} +``` + +Dry-run calls (`POST /rules/{id}/test`) accept the same structure along with a sample Notify event payload to exercise match logic without invoking connectors. + +--- + +## 6. Operational guidance + +- Keep rule scopes narrow (namespace/repository) before relying on severity gates; this minimises noise and improves digest summarisation. +- Always configure a throttle window for instant actions to protect against repeated upstream retries. +- Use rule labels to organise dashboards and access control (e.g., `owner:soc`, `env:prod`). +- Prefer tenant-specific rule IDs so Offline Kit exports remain deterministic across environments. +- If a rule depends on derived metadata (e.g., policy verdict tags), list those dependencies in the rule description for audit readiness. + +--- + +> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/notifications/templates.md b/docs/notifications/templates.md index 0b41cbc7..fbb53cf8 100644 --- a/docs/notifications/templates.md +++ b/docs/notifications/templates.md @@ -1,130 +1,130 @@ -# Notifications Templates - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -Templates shape the payload rendered for each channel when a rule action fires. They are deterministic, locale-aware artefacts stored alongside rules so Notify.Worker replicas can render identical messages regardless of environment. - ---- - -## 1. Template lifecycle - -1. **Authoring.** Operators create templates via the API (`POST /templates`) or UI. Each template binds to a channel type (`Slack`, `Teams`, `Email`, `Webhook`, `Custom`) and a locale. -2. **Reference.** Rule actions opt in by referencing the template key (`actions[].template`). Channel defaults apply when no template is specified. -3. **Rendering.** During delivery, the worker resolves the template (locale fallbacks included), executes it using the safe Handlebars-style engine, and passes the rendered payload plus metadata to the connector. -4. **Audit.** Rendered payloads stored in the delivery ledger include the `templateId` so operators can trace which text was used. - ---- - -## 2. Template schema reference - -| Field | Type | Notes | -|-------|------|-------| -| `templateId` | string | Stable identifier (UUID/slug). | -| `tenantId` | string | Must match the tenant header in API calls. | -| `channelType` | [`NotifyChannelType`](../ARCHITECTURE_NOTIFY.md#5-channels--connectors-plug-ins) | Determines connector payload envelope. | -| `key` | string | Human-readable key referenced by rules (`tmpl-critical`). | -| `locale` | string | BCP-47 tag, stored lower-case (`en-us`, `bg-bg`). | -| `body` | string | Template body; rendered strictly without executing arbitrary code. | -| `renderMode` | enum | `Markdown`, `Html`, `AdaptiveCard`, `PlainText`, or `Json`. Guides connector sanitisation. | -| `format` | enum | `Slack`, `Teams`, `Email`, `Webhook`, or `Json`. Signals delivery payload structure. | -| `description` | string? | Optional operator note. | -| `metadata` | map | Sorted map for automation (layout hints, fallback text). | -| `createdBy`/`createdAt` | string?, instant | Auto-populated. | -| `updatedBy`/`updatedAt` | string?, instant | Auto-populated. | -| `schemaVersion` | string | Auto-upgraded on persistence. | - -Templates are normalised: string fields trimmed, locale lower-cased, metadata sorted to preserve determinism. - ---- - -## 3. Variables, helpers, and context - -Templates receive a structured context derived from the Notify event, rule match, and rendering metadata. - -| Path | Description | -|------|-------------| -| `event.*` | Canonical event envelope (`kind`, `tenant`, `ts`, `actor`). | -| `event.scope.*` | Namespace, repository, digest, image, component identifiers, labels, attributes. | -| `payload.*` | Raw event payload (e.g., `payload.verdict`, `payload.delta.*`, `payload.links.*`). | -| `rule.*` | Rule descriptor (`ruleId`, `name`, `labels`, `metadata`). | -| `action.*` | Action descriptor (`actionId`, `channel`, `digest`, `throttle`, `metadata`). | -| `policy.*` | Policy metadata when supplied (`revisionId`, `name`). | -| `topFindings[]` | Top-N findings summarised for convenience (vulnerability ID, severity, reachability). | -| `digest.*` | When rendering digest flushes: `window`, `openedAt`, `itemCount`. | - -Built-in helpers mirror the architecture dossier: - -| Helper | Usage | -|--------|-------| -| `severity_icon severity` | Returns emoji/text badge representing severity. | -| `link text url` | Produces channel-safe hyperlink. | -| `pluralize count "finding"` | Adds plural suffix when `count != 1`. | -| `truncate text maxLength` | Cuts strings while preserving determinism. | -| `code text` | Formats inline code (Markdown/HTML aware). | - -Connectors may expose additional helpers via partials, but must remain deterministic and side-effect free. - ---- - -## 4. Sample templates - -### 4.1 Slack (Markdown + block kit) - -```hbs -{{#*inline "findingLine"}} -- {{severity_icon severity}} {{vulnId}} ({{severity}}) in `{{component}}` -{{/inline}} - -*:rotating_light: {{payload.summary.total}} findings {{#if payload.delta.newCritical}}(new critical: {{payload.delta.newCritical}}){{/if}}* - -{{#if topFindings.length}} -Top findings: -{{#each topFindings}}{{> findingLine}}{{/each}} -{{/if}} - -{{link "Open report in Console" payload.links.ui}} -``` - -### 4.2 Email (HTML + text alternative) - -```hbs -

{{payload.verdict}} for {{event.scope.repo}}

-

{{payload.summary.total}} findings ({{payload.summary.blocked}} blocked, {{payload.summary.warned}} warned)

- - - - {{#each topFindings}} - - - - - - {{/each}} - -
FindingSeverityPackage
{{this.vulnId}}{{this.severity}}{{this.component}}
-

{{link "View full analysis" payload.links.ui}}

-``` - -When delivering via email, connectors automatically attach a plain-text alternative derived from the rendered content to preserve accessibility. - ---- - -## 5. Preview and validation - -- `POST /channels/{id}/test` accepts an optional `templateId` and sample payload to produce a rendered preview without dispatching the event. Results include channel type, target, title/summary, locale, body hash, and connector metadata. -- UI previews rely on the same API and highlight connector fallbacks (e.g., Teams adaptive card vs. text fallback). -- Offline Kit scenarios can call `/internal/notify/templates/normalize` to ensure bundled templates match the canonical schema before packaging. - ---- - -## 6. Best practices - -- Keep channel-specific limits in mind (Slack block/character quotas, Teams adaptive card size, email line length). Lean on digests to summarise long lists. -- Provide locale-specific versions for high-volume tenants; Notify selects the closest locale, falling back to `en-us`. -- Store connector-specific hints (`metadata.layout`, `metadata.emoji`) in template metadata rather than rules when they affect rendering. -- Version template bodies through metadata (e.g., `metadata.revision: "2025-10-28"`) so tenants can track changes over time. -- Run test previews whenever introducing new helpers to confirm body hashes remain stable across environments. - ---- - -> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Notifications Templates + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +Templates shape the payload rendered for each channel when a rule action fires. They are deterministic, locale-aware artefacts stored alongside rules so Notify.Worker replicas can render identical messages regardless of environment. + +--- + +## 1. Template lifecycle + +1. **Authoring.** Operators create templates via the API (`POST /templates`) or UI. Each template binds to a channel type (`Slack`, `Teams`, `Email`, `Webhook`, `Custom`) and a locale. +2. **Reference.** Rule actions opt in by referencing the template key (`actions[].template`). Channel defaults apply when no template is specified. +3. **Rendering.** During delivery, the worker resolves the template (locale fallbacks included), executes it using the safe Handlebars-style engine, and passes the rendered payload plus metadata to the connector. +4. **Audit.** Rendered payloads stored in the delivery ledger include the `templateId` so operators can trace which text was used. + +--- + +## 2. Template schema reference + +| Field | Type | Notes | +|-------|------|-------| +| `templateId` | string | Stable identifier (UUID/slug). | +| `tenantId` | string | Must match the tenant header in API calls. | +| `channelType` | [`NotifyChannelType`](../ARCHITECTURE_NOTIFY.md#5-channels--connectors-plug-ins) | Determines connector payload envelope. | +| `key` | string | Human-readable key referenced by rules (`tmpl-critical`). | +| `locale` | string | BCP-47 tag, stored lower-case (`en-us`, `bg-bg`). | +| `body` | string | Template body; rendered strictly without executing arbitrary code. | +| `renderMode` | enum | `Markdown`, `Html`, `AdaptiveCard`, `PlainText`, or `Json`. Guides connector sanitisation. | +| `format` | enum | `Slack`, `Teams`, `Email`, `Webhook`, or `Json`. Signals delivery payload structure. | +| `description` | string? | Optional operator note. | +| `metadata` | map | Sorted map for automation (layout hints, fallback text). | +| `createdBy`/`createdAt` | string?, instant | Auto-populated. | +| `updatedBy`/`updatedAt` | string?, instant | Auto-populated. | +| `schemaVersion` | string | Auto-upgraded on persistence. | + +Templates are normalised: string fields trimmed, locale lower-cased, metadata sorted to preserve determinism. + +--- + +## 3. Variables, helpers, and context + +Templates receive a structured context derived from the Notify event, rule match, and rendering metadata. + +| Path | Description | +|------|-------------| +| `event.*` | Canonical event envelope (`kind`, `tenant`, `ts`, `actor`). | +| `event.scope.*` | Namespace, repository, digest, image, component identifiers, labels, attributes. | +| `payload.*` | Raw event payload (e.g., `payload.verdict`, `payload.delta.*`, `payload.links.*`). | +| `rule.*` | Rule descriptor (`ruleId`, `name`, `labels`, `metadata`). | +| `action.*` | Action descriptor (`actionId`, `channel`, `digest`, `throttle`, `metadata`). | +| `policy.*` | Policy metadata when supplied (`revisionId`, `name`). | +| `topFindings[]` | Top-N findings summarised for convenience (vulnerability ID, severity, reachability). | +| `digest.*` | When rendering digest flushes: `window`, `openedAt`, `itemCount`. | + +Built-in helpers mirror the architecture dossier: + +| Helper | Usage | +|--------|-------| +| `severity_icon severity` | Returns emoji/text badge representing severity. | +| `link text url` | Produces channel-safe hyperlink. | +| `pluralize count "finding"` | Adds plural suffix when `count != 1`. | +| `truncate text maxLength` | Cuts strings while preserving determinism. | +| `code text` | Formats inline code (Markdown/HTML aware). | + +Connectors may expose additional helpers via partials, but must remain deterministic and side-effect free. + +--- + +## 4. Sample templates + +### 4.1 Slack (Markdown + block kit) + +```hbs +{{#*inline "findingLine"}} +- {{severity_icon severity}} {{vulnId}} ({{severity}}) in `{{component}}` +{{/inline}} + +*:rotating_light: {{payload.summary.total}} findings {{#if payload.delta.newCritical}}(new critical: {{payload.delta.newCritical}}){{/if}}* + +{{#if topFindings.length}} +Top findings: +{{#each topFindings}}{{> findingLine}}{{/each}} +{{/if}} + +{{link "Open report in Console" payload.links.ui}} +``` + +### 4.2 Email (HTML + text alternative) + +```hbs +

{{payload.verdict}} for {{event.scope.repo}}

+

{{payload.summary.total}} findings ({{payload.summary.blocked}} blocked, {{payload.summary.warned}} warned)

+ + + + {{#each topFindings}} + + + + + + {{/each}} + +
FindingSeverityPackage
{{this.vulnId}}{{this.severity}}{{this.component}}
+

{{link "View full analysis" payload.links.ui}}

+``` + +When delivering via email, connectors automatically attach a plain-text alternative derived from the rendered content to preserve accessibility. + +--- + +## 5. Preview and validation + +- `POST /channels/{id}/test` accepts an optional `templateId` and sample payload to produce a rendered preview without dispatching the event. Results include channel type, target, title/summary, locale, body hash, and connector metadata. +- UI previews rely on the same API and highlight connector fallbacks (e.g., Teams adaptive card vs. text fallback). +- Offline Kit scenarios can call `/internal/notify/templates/normalize` to ensure bundled templates match the canonical schema before packaging. + +--- + +## 6. Best practices + +- Keep channel-specific limits in mind (Slack block/character quotas, Teams adaptive card size, email line length). Lean on digests to summarise long lists. +- Provide locale-specific versions for high-volume tenants; Notify selects the closest locale, falling back to `en-us`. +- Store connector-specific hints (`metadata.layout`, `metadata.emoji`) in template metadata rather than rules when they affect rendering. +- Version template bodies through metadata (e.g., `metadata.revision: "2025-10-28"`) so tenants can track changes over time. +- Run test previews whenever introducing new helpers to confirm body hashes remain stable across environments. + +--- + +> **Imposed rule reminder:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/observability/policy.md b/docs/observability/policy.md index 7e5091e8..b2284575 100644 --- a/docs/observability/policy.md +++ b/docs/observability/policy.md @@ -1,166 +1,166 @@ -# Policy Engine Observability - -> **Audience:** Observability Guild, SRE/Platform operators, Policy Guild. -> **Scope:** Metrics, logs, traces, dashboards, alerting, sampling, and incident workflows for the Policy Engine service (Sprint 20). -> **Prerequisites:** Policy Engine v2 deployed with OpenTelemetry exporters enabled (`observability:enabled=true` in config). - ---- - -## 1 · Instrumentation Overview - -- **Telemetry stack:** OpenTelemetry SDK (metrics + traces), Serilog structured logging, OTLP exporters → Collector → Prometheus/Loki/Tempo. -- **Namespace conventions:** `policy.*` for metrics/traces/log categories; labels use `tenant`, `policy`, `mode`, `runId`. -- **Sampling:** Default 10 % trace sampling, 1 % rule-hit log sampling; incident mode overrides to 100 % (see §6). -- **Correlation IDs:** Every API request gets `traceId` + `requestId`. CLI/UI display IDs to streamline support. - ---- - -## 2 · Metrics - -### 2.1 Run Pipeline - -| Metric | Type | Labels | Notes | -|--------|------|--------|-------| -| `policy_run_seconds` | Histogram | `tenant`, `policy`, `mode` (`full`, `incremental`, `simulate`) | P95 target ≤ 5 min incremental, ≤ 30 min full. | -| `policy_run_queue_depth` | Gauge | `tenant` | Number of pending jobs per tenant (updated each enqueue/dequeue). | -| `policy_run_failures_total` | Counter | `tenant`, `policy`, `reason` (`err_pol_*`, `network`, `cancelled`) | Aligns with error codes. | -| `policy_run_retries_total` | Counter | `tenant`, `policy` | Helps identify noisy sources. | -| `policy_run_inputs_pending_bytes` | Gauge | `tenant` | Size of buffered change batches awaiting run. | - -### 2.2 Evaluator Insights - -| Metric | Type | Labels | Notes | -|--------|------|--------|-------| -| `policy_rules_fired_total` | Counter | `tenant`, `policy`, `rule` | Increment per rule match (sampled). | -| `policy_vex_overrides_total` | Counter | `tenant`, `policy`, `vendor`, `justification` | Tracks VEX precedence decisions. | -| `policy_suppressions_total` | Counter | `tenant`, `policy`, `action` (`ignore`, `warn`, `quiet`) | Audits suppression usage. | -| `policy_selection_batch_duration_seconds` | Histogram | `tenant`, `policy` | Measures joiner performance. | -| `policy_materialization_conflicts_total` | Counter | `tenant`, `policy` | Non-zero indicates optimistic concurrency retries. | - -### 2.3 API Surface - -| Metric | Type | Labels | Notes | -|--------|------|--------|-------| -| `policy_api_requests_total` | Counter | `endpoint`, `method`, `status` | Exposed via Minimal API instrumentation. | -| `policy_api_latency_seconds` | Histogram | `endpoint`, `method` | Budget ≤ 250 ms for GETs, ≤ 1 s for POSTs. | -| `policy_api_rate_limited_total` | Counter | `endpoint` | Tied to throttles (`429`). | - -### 2.4 Queue & Change Streams - -| Metric | Type | Labels | Notes | -|--------|------|--------|-------| -| `policy_queue_leases_active` | Gauge | `tenant` | Number of leased jobs. | -| `policy_queue_lease_expirations_total` | Counter | `tenant` | Alerts when workers fail to ack. | -| `policy_delta_backlog_age_seconds` | Gauge | `tenant`, `source` (`concelier`, `excititor`, `sbom`) | Age of oldest unprocessed change event. | - ---- - -## 3 · Logs - -- **Format:** JSON (`Serilog`). Core fields: `timestamp`, `level`, `message`, `policyId`, `policyVersion`, `tenant`, `runId`, `rule`, `traceId`, `env.sealed`, `error.code`. -- **Log categories:** - - `policy.run` (queue lifecycle, run begin/end, stats) - - `policy.evaluate` (batch execution summaries; rule-hit sampling) - - `policy.materialize` (Mongo operations, conflicts, retries) - - `policy.simulate` (diff results, CLI invocation metadata) - - `policy.lifecycle` (submit/review/approve events) -- **Sampling:** Rule-hit logs sample 1 % by default; toggled to 100 % in incident mode or when `--trace` flag used in CLI. -- **PII:** No user secrets recorded; user identities referenced as `user:` or `group:` only. - ---- - -## 4 · Traces - -- Spans emit via OpenTelemetry instrumentation. -- **Primary spans:** - - `policy.api` – wraps HTTP request, records `endpoint`, `status`, `scope`. - - `policy.select` – change stream ingestion and batch assembly (attributes: `candidateCount`, `cursor`). - - `policy.evaluate` – evaluation batch (attributes: `batchSize`, `ruleHits`, `severityChanges`). - - `policy.materialize` – Mongo writes (attributes: `writes`, `historyWrites`, `retryCount`). - - `policy.simulate` – simulation diff generation (attributes: `sbomCount`, `diffAdded`, `diffRemoved`). -- Trace context propagated to CLI via response headers `traceparent`; UI surfaces in run detail view. -- Incident mode forces span sampling to 100 % and extends retention via Collector config override. - ---- - -## 5 · Dashboards - -### 5.1 Policy Runs Overview - -Widgets: -- Run duration histogram (per mode/tenant). -- Queue depth + backlog age line charts. -- Failure rate stacked by error code. -- Incremental backlog heatmap (policy × age). -- Active vs scheduled runs table. - -### 5.2 Rule Impact & VEX - -- Top N rules by firings (bar chart). -- VEX overrides by vendor/justification (stacked chart). -- Suppression usage (pie + table with justifications). -- Quieted findings trend (line). - -### 5.3 Simulation & Approval Health - -- Simulation diff histogram (added vs removed). -- Pending approvals by age (table with SLA colour coding). -- Compliance checklist status (lint, determinism CI, simulation evidence). - -> Placeholders for Grafana panels should be replaced with actual screenshots once dashboards land (`../assets/policy-observability/*.png`). - ---- - -## 6 · Alerting - -| Alert | Condition | Suggested Action | -|-------|-----------|------------------| -| **PolicyRunSlaBreach** | `policy_run_seconds{mode="incremental"}` P95 > 300 s for 3 windows | Check queue depth, upstream services, scale worker pool. | -| **PolicyQueueStuck** | `policy_delta_backlog_age_seconds` > 600 | Investigate change stream connectivity. | -| **DeterminismMismatch** | Run status `failed` with `ERR_POL_004` OR CI replay diff | Switch to incident sampling, gather replay bundle, notify Policy Guild. | -| **SimulationDrift** | CLI/CI simulation exit `20` (blocking diff) over threshold | Review policy changes before approval. | -| **VexOverrideSpike** | `policy_vex_overrides_total` > configured baseline (per vendor) | Verify upstream VEX feed; ensure justification codes expected. | -| **SuppressionSurge** | `policy_suppressions_total` increase > 3σ vs baseline | Audit new suppress rules; check approvals. | - -Alerts integrate with Notifier channels (`policy.alerts`) and Ops on-call rotations. - ---- - -## 7 · Incident Mode & Forensics - -- Toggle via `POST /api/policy/incidents/activate` (requires `policy:operate` scope). -- Effects: - - Trace sampling → 100 %. - - Rule-hit log sampling → 100 %. - - Retention window extended to 30 days for incident duration. - - `policy.incident.activated` event emitted (Console + Notifier banners). -- Post-incident tasks: - - `stella policy run replay` for affected runs; attach bundles to incident record. - - Restore sampling defaults with `.../deactivate`. - - Update incident checklist in `/docs/policy/lifecycle.md` (section 8) with findings. - ---- - -## 8 · Integration Points - -- **Authority:** Exposes metric `policy_scope_denied_total` for failed authorisation; correlate with `policy_api_requests_total`. -- **Concelier/Excititor:** Shared trace IDs propagate via gRPC metadata to help debug upstream latency. -- **Scheduler:** Future integration will push run queues into shared scheduler dashboards (planned in SCHED-MODELS-20-002). -- **Offline Kit:** CLI exports logs + metrics snapshots (`stella offline bundle metrics`) for air-gapped audits. - ---- - -## 9 · Compliance Checklist - -- [ ] **Metrics registered:** All metrics listed above exported and documented in Grafana dashboards. -- [ ] **Alert policies configured:** Ops or Observability Guild created alerts matching table in §6. -- [ ] **Sampling overrides tested:** Incident mode toggles verified in staging; retention roll-back rehearsed. -- [ ] **Trace propagation validated:** CLI/UI display trace IDs and allow copy for support. -- [ ] **Log scrubbing enforced:** Unit tests guarantee no secrets/PII in logs; sampling respects configuration. -- [ ] **Offline capture rehearsed:** Metrics/log snapshot commands executed in sealed environment. -- [ ] **Docs cross-links:** Links to architecture, runs, lifecycle, CLI, API docs verified. - ---- - -*Last updated: 2025-10-26 (Sprint 20).* - +# Policy Engine Observability + +> **Audience:** Observability Guild, SRE/Platform operators, Policy Guild. +> **Scope:** Metrics, logs, traces, dashboards, alerting, sampling, and incident workflows for the Policy Engine service (Sprint 20). +> **Prerequisites:** Policy Engine v2 deployed with OpenTelemetry exporters enabled (`observability:enabled=true` in config). + +--- + +## 1 · Instrumentation Overview + +- **Telemetry stack:** OpenTelemetry SDK (metrics + traces), Serilog structured logging, OTLP exporters → Collector → Prometheus/Loki/Tempo. +- **Namespace conventions:** `policy.*` for metrics/traces/log categories; labels use `tenant`, `policy`, `mode`, `runId`. +- **Sampling:** Default 10 % trace sampling, 1 % rule-hit log sampling; incident mode overrides to 100 % (see §6). +- **Correlation IDs:** Every API request gets `traceId` + `requestId`. CLI/UI display IDs to streamline support. + +--- + +## 2 · Metrics + +### 2.1 Run Pipeline + +| Metric | Type | Labels | Notes | +|--------|------|--------|-------| +| `policy_run_seconds` | Histogram | `tenant`, `policy`, `mode` (`full`, `incremental`, `simulate`) | P95 target ≤ 5 min incremental, ≤ 30 min full. | +| `policy_run_queue_depth` | Gauge | `tenant` | Number of pending jobs per tenant (updated each enqueue/dequeue). | +| `policy_run_failures_total` | Counter | `tenant`, `policy`, `reason` (`err_pol_*`, `network`, `cancelled`) | Aligns with error codes. | +| `policy_run_retries_total` | Counter | `tenant`, `policy` | Helps identify noisy sources. | +| `policy_run_inputs_pending_bytes` | Gauge | `tenant` | Size of buffered change batches awaiting run. | + +### 2.2 Evaluator Insights + +| Metric | Type | Labels | Notes | +|--------|------|--------|-------| +| `policy_rules_fired_total` | Counter | `tenant`, `policy`, `rule` | Increment per rule match (sampled). | +| `policy_vex_overrides_total` | Counter | `tenant`, `policy`, `vendor`, `justification` | Tracks VEX precedence decisions. | +| `policy_suppressions_total` | Counter | `tenant`, `policy`, `action` (`ignore`, `warn`, `quiet`) | Audits suppression usage. | +| `policy_selection_batch_duration_seconds` | Histogram | `tenant`, `policy` | Measures joiner performance. | +| `policy_materialization_conflicts_total` | Counter | `tenant`, `policy` | Non-zero indicates optimistic concurrency retries. | + +### 2.3 API Surface + +| Metric | Type | Labels | Notes | +|--------|------|--------|-------| +| `policy_api_requests_total` | Counter | `endpoint`, `method`, `status` | Exposed via Minimal API instrumentation. | +| `policy_api_latency_seconds` | Histogram | `endpoint`, `method` | Budget ≤ 250 ms for GETs, ≤ 1 s for POSTs. | +| `policy_api_rate_limited_total` | Counter | `endpoint` | Tied to throttles (`429`). | + +### 2.4 Queue & Change Streams + +| Metric | Type | Labels | Notes | +|--------|------|--------|-------| +| `policy_queue_leases_active` | Gauge | `tenant` | Number of leased jobs. | +| `policy_queue_lease_expirations_total` | Counter | `tenant` | Alerts when workers fail to ack. | +| `policy_delta_backlog_age_seconds` | Gauge | `tenant`, `source` (`concelier`, `excititor`, `sbom`) | Age of oldest unprocessed change event. | + +--- + +## 3 · Logs + +- **Format:** JSON (`Serilog`). Core fields: `timestamp`, `level`, `message`, `policyId`, `policyVersion`, `tenant`, `runId`, `rule`, `traceId`, `env.sealed`, `error.code`. +- **Log categories:** + - `policy.run` (queue lifecycle, run begin/end, stats) + - `policy.evaluate` (batch execution summaries; rule-hit sampling) + - `policy.materialize` (Mongo operations, conflicts, retries) + - `policy.simulate` (diff results, CLI invocation metadata) + - `policy.lifecycle` (submit/review/approve events) +- **Sampling:** Rule-hit logs sample 1 % by default; toggled to 100 % in incident mode or when `--trace` flag used in CLI. +- **PII:** No user secrets recorded; user identities referenced as `user:` or `group:` only. + +--- + +## 4 · Traces + +- Spans emit via OpenTelemetry instrumentation. +- **Primary spans:** + - `policy.api` – wraps HTTP request, records `endpoint`, `status`, `scope`. + - `policy.select` – change stream ingestion and batch assembly (attributes: `candidateCount`, `cursor`). + - `policy.evaluate` – evaluation batch (attributes: `batchSize`, `ruleHits`, `severityChanges`). + - `policy.materialize` – Mongo writes (attributes: `writes`, `historyWrites`, `retryCount`). + - `policy.simulate` – simulation diff generation (attributes: `sbomCount`, `diffAdded`, `diffRemoved`). +- Trace context propagated to CLI via response headers `traceparent`; UI surfaces in run detail view. +- Incident mode forces span sampling to 100 % and extends retention via Collector config override. + +--- + +## 5 · Dashboards + +### 5.1 Policy Runs Overview + +Widgets: +- Run duration histogram (per mode/tenant). +- Queue depth + backlog age line charts. +- Failure rate stacked by error code. +- Incremental backlog heatmap (policy × age). +- Active vs scheduled runs table. + +### 5.2 Rule Impact & VEX + +- Top N rules by firings (bar chart). +- VEX overrides by vendor/justification (stacked chart). +- Suppression usage (pie + table with justifications). +- Quieted findings trend (line). + +### 5.3 Simulation & Approval Health + +- Simulation diff histogram (added vs removed). +- Pending approvals by age (table with SLA colour coding). +- Compliance checklist status (lint, determinism CI, simulation evidence). + +> Placeholders for Grafana panels should be replaced with actual screenshots once dashboards land (`../assets/policy-observability/*.png`). + +--- + +## 6 · Alerting + +| Alert | Condition | Suggested Action | +|-------|-----------|------------------| +| **PolicyRunSlaBreach** | `policy_run_seconds{mode="incremental"}` P95 > 300 s for 3 windows | Check queue depth, upstream services, scale worker pool. | +| **PolicyQueueStuck** | `policy_delta_backlog_age_seconds` > 600 | Investigate change stream connectivity. | +| **DeterminismMismatch** | Run status `failed` with `ERR_POL_004` OR CI replay diff | Switch to incident sampling, gather replay bundle, notify Policy Guild. | +| **SimulationDrift** | CLI/CI simulation exit `20` (blocking diff) over threshold | Review policy changes before approval. | +| **VexOverrideSpike** | `policy_vex_overrides_total` > configured baseline (per vendor) | Verify upstream VEX feed; ensure justification codes expected. | +| **SuppressionSurge** | `policy_suppressions_total` increase > 3σ vs baseline | Audit new suppress rules; check approvals. | + +Alerts integrate with Notifier channels (`policy.alerts`) and Ops on-call rotations. + +--- + +## 7 · Incident Mode & Forensics + +- Toggle via `POST /api/policy/incidents/activate` (requires `policy:operate` scope). +- Effects: + - Trace sampling → 100 %. + - Rule-hit log sampling → 100 %. + - Retention window extended to 30 days for incident duration. + - `policy.incident.activated` event emitted (Console + Notifier banners). +- Post-incident tasks: + - `stella policy run replay` for affected runs; attach bundles to incident record. + - Restore sampling defaults with `.../deactivate`. + - Update incident checklist in `/docs/policy/lifecycle.md` (section 8) with findings. + +--- + +## 8 · Integration Points + +- **Authority:** Exposes metric `policy_scope_denied_total` for failed authorisation; correlate with `policy_api_requests_total`. +- **Concelier/Excititor:** Shared trace IDs propagate via gRPC metadata to help debug upstream latency. +- **Scheduler:** Future integration will push run queues into shared scheduler dashboards (planned in SCHED-MODELS-20-002). +- **Offline Kit:** CLI exports logs + metrics snapshots (`stella offline bundle metrics`) for air-gapped audits. + +--- + +## 9 · Compliance Checklist + +- [ ] **Metrics registered:** All metrics listed above exported and documented in Grafana dashboards. +- [ ] **Alert policies configured:** Ops or Observability Guild created alerts matching table in §6. +- [ ] **Sampling overrides tested:** Incident mode toggles verified in staging; retention roll-back rehearsed. +- [ ] **Trace propagation validated:** CLI/UI display trace IDs and allow copy for support. +- [ ] **Log scrubbing enforced:** Unit tests guarantee no secrets/PII in logs; sampling respects configuration. +- [ ] **Offline capture rehearsed:** Metrics/log snapshot commands executed in sealed environment. +- [ ] **Docs cross-links:** Links to architecture, runs, lifecycle, CLI, API docs verified. + +--- + +*Last updated: 2025-10-26 (Sprint 20).* + diff --git a/docs/observability/ui-telemetry.md b/docs/observability/ui-telemetry.md index e504670e..3a803170 100644 --- a/docs/observability/ui-telemetry.md +++ b/docs/observability/ui-telemetry.md @@ -1,191 +1,191 @@ -# Console Observability - -> **Audience:** Observability Guild, Console Guild, SRE/operators. -> **Scope:** Metrics, logs, traces, dashboards, alerting, feature flags, and offline workflows for the StellaOps Console (Sprint 23). -> **Prerequisites:** Console deployed with metrics enabled (`CONSOLE_METRICS_ENABLED=true`) and OTLP exporters configured (`OTEL_EXPORTER_OTLP_*`). - ---- - -## 1 · Instrumentation Overview - -- **Telemetry stack:** OpenTelemetry Web SDK (browser) + Console telemetry bridge → OTLP collector (Tempo/Prometheus/Loki). Server-side endpoints expose `/metrics` (Prometheus) and `/health/*`. -- **Sampling:** Front-end spans sample at 5 % by default (`OTEL_TRACES_SAMPLER=parentbased_traceidratio`). Metrics are un-sampled; log sampling is handled per category (§3). -- **Correlation IDs:** Every API call carries `x-stellaops-correlation-id`; structured UI events mirror that value so operators can follow a request across gateway, backend, and UI. -- **Scope gating:** Operators need the `ui.telemetry` scope to view live charts in the Admin workspace; the scope also controls access to `/console/telemetry` SSE streams. - ---- - -## 2 · Metrics - -### 2.1 Experience & Navigation - -| Metric | Type | Labels | Notes | -|--------|------|--------|-------| -| `ui_route_render_seconds` | Histogram | `route`, `tenant`, `device` (`desktop`,`tablet`) | Time between route activation and first interactive paint. Target P95 ≤ 1.5 s (cached). | -| `ui_request_duration_seconds` | Histogram | `service`, `method`, `status`, `tenant` | Gateway proxy timing for backend calls performed by the console. Alerts when backend latency degrades. | -| `ui_filter_apply_total` | Counter | `route`, `filter`, `tenant` | Increments when a global filter or context chip is applied. Used to track adoption of saved views. | -| `ui_tenant_switch_total` | Counter | `fromTenant`, `toTenant`, `trigger` (`picker`, `shortcut`, `link`) | Emitted after a successful tenant switch; correlates with Authority `ui.tenant.switch` logs. | -| `ui_offline_banner_seconds` | Histogram | `reason` (`authority`, `manifest`, `gateway`), `tenant` | Duration of offline banner visibility; integrate with air-gap SLAs. | - -### 2.2 Security & Session - -| Metric | Type | Labels | Notes | -|--------|------|--------|-------| -| `ui_dpop_failure_total` | Counter | `endpoint`, `reason` (`nonce`, `jkt`, `clockSkew`) | Raised when DPoP validation fails; pair with Authority audit trail. | -| `ui_fresh_auth_prompt_total` | Counter | `action` (`token.revoke`, `policy.activate`, `client.create`), `tenant` | Counts fresh-auth modals; backlog above baseline indicates workflow friction. | -| `ui_fresh_auth_failure_total` | Counter | `action`, `reason` (`timeout`,`cancelled`,`auth_error`) | Optional metric (set `CONSOLE_FRESH_AUTH_METRICS=true` when feature flag lands). | - -### 2.3 Downloads & Offline Kit - -| Metric | Type | Labels | Notes | -|--------|------|--------|-------| -| `ui_download_manifest_refresh_seconds` | Histogram | `tenant`, `channel` (`edge`,`stable`,`airgap`) | Time to fetch and verify downloads manifest. Target < 3 s. | -| `ui_download_export_queue_depth` | Gauge | `tenant`, `artifactType` (`sbom`,`policy`,`attestation`,`console`) | Mirrors `/console/downloads` queue depth; triggers when offline bundles lag. | -| `ui_download_command_copied_total` | Counter | `tenant`, `artifactType` | Increments when users copy CLI commands from the UI. Useful to observe CLI parity adoption. | - -### 2.4 Telemetry Emission & Errors - -| Metric | Type | Labels | Notes | -|--------|------|--------|-------| -| `ui_telemetry_batch_failures_total` | Counter | `transport` (`otlp-http`,`otlp-grpc`), `reason` | Emitted by OTLP bridge when batches fail. Enable via `CONSOLE_METRICS_VERBOSE=true`. | -| `ui_telemetry_queue_depth` | Gauge | `priority` (`normal`,`high`), `tenant` | Browser-side buffer depth; monitor for spikes under degraded collectors. | - -> **Scraping tips:** -> - Enable `/metrics` via `CONSOLE_METRICS_ENABLED=true`. -> - Set `OTEL_EXPORTER_OTLP_ENDPOINT=https://otel.collector:4318` and relevant headers (`OTEL_EXPORTER_OTLP_HEADERS=authorization=Bearer `). -> - For air-gapped sites, point the exporter to the Offline Kit collector (`localhost:4318`) and forward the metrics snapshot using `stella offline bundle metrics`. - ---- - -## 3 · Logs - -- **Format:** JSON via Console log bridge; emitted to stdout and optional OTLP log exporter. Core fields: `timestamp`, `level`, `action`, `route`, `tenant`, `subject`, `correlationId`, `dpop.jkt`, `device`, `offlineMode`. -- **Categories:** - - `ui.action` – general user interactions (route changes, command palette, filter updates). Sampled 50 % by default; override with feature flag `telemetry.logVerbose`. - - `ui.tenant.switch` – always logged; includes `fromTenant`, `toTenant`, `tokenId`, and Authority audit correlation. - - `ui.download.commandCopied` – download commands copied; includes `artifactId`, `digest`, `manifestVersion`. - - `ui.security.anomaly` – DPoP mismatches, tenant header errors, CSP violations (level = `Warning`). - - `ui.telemetry.failure` – OTLP export errors; include `httpStatus`, `batchSize`, `retryCount`. -- **PII handling:** Full emails are scrubbed; only hashed values (`user:`) appear unless `ui.admin` + fresh-auth were granted for the action (still redacted in logs). -- **Retention:** Recommended 14 days for connected sites, 30 days for sealed/air-gap audits. Ship logs to Loki/Elastic with ingest label `service="stellaops-web-ui"`. - ---- - -## 4 · Traces - -- **Span names & attributes:** - - `ui.route.transition` – wraps route navigation; attributes: `route`, `tenant`, `renderMillis`, `prefetchHit`. - - `ui.api.fetch` – HTTP fetch to backend; attributes: `service`, `endpoint`, `status`, `networkTime`. - - `ui.sse.stream` – Server-sent event subscriptions (status ticker, runs); attributes: `channel`, `connectedMillis`, `reconnects`. - - `ui.telemetry.batch` – Browser OTLP flush; attributes: `batchSize`, `success`, `retryCount`. - - `ui.policy.action` – Policy workspace actions (simulate, approve, activate) per `docs/ui/policy-editor.md`. -- **Propagation:** Spans use W3C `traceparent`; gateway echoes header to backend APIs so traces stitch across UI → gateway → service. -- **Sampling controls:** `OTEL_TRACES_SAMPLER_ARG` (ratio) and feature flag `telemetry.forceSampling` (sets to 100 % for incident debugging). -- **Viewing traces:** Grafana Tempo or Jaeger via collector. Filter by `service.name = stellaops-console`. For cross-service debugging, filter on `correlationId` and `tenant`. - ---- - -## 5 · Dashboards - -### 5.1 Experience Overview - -Panels: -- Route render histogram (P50/P90/P99) by route. -- Backend call latency stacked by service (`ui_request_duration_seconds`). -- Offline banner duration trend (`ui_offline_banner_seconds`). -- Tenant switch volume vs failure rate (overlay `ui_dpop_failure_total`). -- Command palette usage (`ui_filter_apply_total` + `ui.action` log counts). - -### 5.2 Downloads & Offline Kit - -- Manifest refresh time chart (per channel). -- Export queue depth gauge with alert thresholds. -- CLI command adoption (bar chart per artifact type, using `ui_download_command_copied_total`). -- Offline parity banner occurrences (`downloads.offlineParity` flag from API → derived metric). -- Last Offline Kit import timestamp (join with Downloads API metadata). - -### 5.3 Security & Session - -- Fresh-auth prompt counts vs success/fail ratios. -- DPoP failure stacked by reason. -- Tenant mismatch warnings (from `ui.security.anomaly` logs). -- Scope usage heatmap (derived from Authority audit events + UI logs). -- CSP violation counts (browser `securitypolicyviolation` listener forwarded to logs). - -> Capture screenshots for Grafana once dashboards stabilise (`docs/assets/ui/observability/*.png`). Replace placeholders before releasing the doc. - ---- - -## 6 · Alerting - -| Alert | Condition | Suggested Action | -|-------|-----------|------------------| -| **ConsoleLatencyHigh** | `ui_route_render_seconds_bucket{le="1.5"}` drops below 0.95 for 3 intervals | Inspect route splits, check backend latencies, review CDN cache. | -| **BackendLatencyHigh** | `ui_request_duration_seconds_sum / ui_request_duration_seconds_count` > 1 s for any service | Correlate with gateway/service dashboards; escalate to owning guild. | -| **TenantSwitchFailures** | Increase in `ui_dpop_failure_total` or `ui.security.anomaly` (tenant mismatch) > 3/min | Validate Authority issuer, check clock skew, confirm tenant config. | -| **FreshAuthLoop** | `ui_fresh_auth_prompt_total` spikes with matching `ui_fresh_auth_failure_total` | Review Authority `/fresh-auth` endpoint, session timeout config, UX regressions. | -| **OfflineBannerLong** | `ui_offline_banner_seconds` P95 > 120 s | Investigate Authority/gateway availability; verify Offline Kit freshness. | -| **DownloadsBacklog** | `ui_download_export_queue_depth` > 5 for 10 min OR queue age > alert threshold | Ping Downloads service, ensure manifest pipeline (`DOWNLOADS-CONSOLE-23-001`) is healthy. | -| **TelemetryExportErrors** | `ui_telemetry_batch_failures_total` > 0 for ≥5 min | Check collector health, credentials, or TLS trust. | - -Integrate alerts with Notifier (`ui.alerts`) or existing Ops channels. Tag incidents with `component=console` for correlation. - ---- - -## 7 · Feature Flags & Configuration - -| Flag / Env Var | Purpose | Default | -|----------------|---------|---------| -| `CONSOLE_FEATURE_FLAGS` | Enables UI modules (`runs`, `downloads`, `policies`, `telemetry`). Telemetry panel requires `telemetry`. | `runs,downloads,policies` | -| `CONSOLE_METRICS_ENABLED` | Exposes `/metrics` for Prometheus scrape. | `true` | -| `CONSOLE_METRICS_VERBOSE` | Emits additional batching metrics (`ui_telemetry_*`). | `false` | -| `CONSOLE_LOG_LEVEL` | Minimum log level (`Information`, `Debug`). Use `Debug` for incident sampling. | `Information` | -| `CONSOLE_METRICS_SAMPLING` *(planned)* | Controls front-end span sampling ratio. Document once released. | `0.05` | -| `OTEL_EXPORTER_OTLP_ENDPOINT` | Collector URL; supports HTTPS. | unset | -| `OTEL_EXPORTER_OTLP_HEADERS` | Comma-separated headers (auth). | unset | -| `OTEL_EXPORTER_OTLP_INSECURE` | Allow HTTP (dev only). | `false` | -| `OTEL_SERVICE_NAME` | Service tag for traces/logs. Set to `stellaops-console`. | auto | -| `CONSOLE_TELEMETRY_SSE_ENABLED` | Enables `/console/telemetry` SSE feed for dashboards. | `true` | - -Feature flag changes should be tracked in release notes and mirrored in `/docs/ui/navigation.md` (shortcuts may change when modules toggle). - ---- - -## 8 · Offline / Air-Gapped Workflow - -- Mirror the console image and telemetry collector as part of the Offline Kit (see `/docs/install/docker.md` §4). -- Scrape metrics locally via `curl -k https://console.local/metrics > metrics.prom`; archive alongside logs for audits. -- Use `stella offline kit import` to keep the downloads manifest in sync; dashboards display staleness using `ui_download_manifest_refresh_seconds`. -- When collectors are unavailable, console queues OTLP batches (up to 5 min) and exposes backlog through `ui_telemetry_queue_depth`; export queue metrics to prove no data loss. -- After reconnecting, run `stella console status --telemetry` *(CLI parity pending; see DOCS-CONSOLE-23-014)* or verify `ui_telemetry_batch_failures_total` resets to zero. -- Retain telemetry bundles for 30 days per compliance guidelines; include Grafana JSON exports in audit packages. - ---- - -## 9 · Compliance Checklist - -- [ ] `/metrics` scraped in staging & production; dashboards display `ui_route_render_seconds`, `ui_request_duration_seconds`, and downloads metrics. -- [ ] OTLP traces/logs confirmed end-to-end (collector, Tempo/Loki). -- [ ] Alert rules from §6 implemented in monitoring stack with runbooks linked. -- [ ] Feature flags documented and change-controlled; telemetry disabled only with approval. -- [ ] DPoP/fresh-auth anomalies correlated with Authority audit logs during drill. -- [ ] Offline capture workflow exercised; evidence stored in audit vault. -- [ ] Screenshots of Grafana dashboards committed once they stabilise (update references). -- [ ] Cross-links verified (`docs/deploy/console.md`, `docs/security/console-security.md`, `docs/ui/downloads.md`, `docs/ui/console-overview.md`). - ---- - -## 10 · References - -- `/docs/deploy/console.md` – Metrics endpoint, OTLP config, health checks. -- `/docs/security/console-security.md` – Security metrics & alert hints. -- `/docs/ui/console-overview.md` – Telemetry primitives and performance budgets. -- `/docs/ui/downloads.md` – Downloads metrics and parity workflow. -- `/docs/observability/observability.md` – Platform-wide practices. -- `/ops/telemetry-collector.md` & `/ops/telemetry-storage.md` – Collector deployment. -- `/docs/install/docker.md` – Compose/Helm environment variables. - ---- - -*Last updated: 2025-10-28 (Sprint 23).* - +# Console Observability + +> **Audience:** Observability Guild, Console Guild, SRE/operators. +> **Scope:** Metrics, logs, traces, dashboards, alerting, feature flags, and offline workflows for the StellaOps Console (Sprint 23). +> **Prerequisites:** Console deployed with metrics enabled (`CONSOLE_METRICS_ENABLED=true`) and OTLP exporters configured (`OTEL_EXPORTER_OTLP_*`). + +--- + +## 1 · Instrumentation Overview + +- **Telemetry stack:** OpenTelemetry Web SDK (browser) + Console telemetry bridge → OTLP collector (Tempo/Prometheus/Loki). Server-side endpoints expose `/metrics` (Prometheus) and `/health/*`. +- **Sampling:** Front-end spans sample at 5 % by default (`OTEL_TRACES_SAMPLER=parentbased_traceidratio`). Metrics are un-sampled; log sampling is handled per category (§3). +- **Correlation IDs:** Every API call carries `x-stellaops-correlation-id`; structured UI events mirror that value so operators can follow a request across gateway, backend, and UI. +- **Scope gating:** Operators need the `ui.telemetry` scope to view live charts in the Admin workspace; the scope also controls access to `/console/telemetry` SSE streams. + +--- + +## 2 · Metrics + +### 2.1 Experience & Navigation + +| Metric | Type | Labels | Notes | +|--------|------|--------|-------| +| `ui_route_render_seconds` | Histogram | `route`, `tenant`, `device` (`desktop`,`tablet`) | Time between route activation and first interactive paint. Target P95 ≤ 1.5 s (cached). | +| `ui_request_duration_seconds` | Histogram | `service`, `method`, `status`, `tenant` | Gateway proxy timing for backend calls performed by the console. Alerts when backend latency degrades. | +| `ui_filter_apply_total` | Counter | `route`, `filter`, `tenant` | Increments when a global filter or context chip is applied. Used to track adoption of saved views. | +| `ui_tenant_switch_total` | Counter | `fromTenant`, `toTenant`, `trigger` (`picker`, `shortcut`, `link`) | Emitted after a successful tenant switch; correlates with Authority `ui.tenant.switch` logs. | +| `ui_offline_banner_seconds` | Histogram | `reason` (`authority`, `manifest`, `gateway`), `tenant` | Duration of offline banner visibility; integrate with air-gap SLAs. | + +### 2.2 Security & Session + +| Metric | Type | Labels | Notes | +|--------|------|--------|-------| +| `ui_dpop_failure_total` | Counter | `endpoint`, `reason` (`nonce`, `jkt`, `clockSkew`) | Raised when DPoP validation fails; pair with Authority audit trail. | +| `ui_fresh_auth_prompt_total` | Counter | `action` (`token.revoke`, `policy.activate`, `client.create`), `tenant` | Counts fresh-auth modals; backlog above baseline indicates workflow friction. | +| `ui_fresh_auth_failure_total` | Counter | `action`, `reason` (`timeout`,`cancelled`,`auth_error`) | Optional metric (set `CONSOLE_FRESH_AUTH_METRICS=true` when feature flag lands). | + +### 2.3 Downloads & Offline Kit + +| Metric | Type | Labels | Notes | +|--------|------|--------|-------| +| `ui_download_manifest_refresh_seconds` | Histogram | `tenant`, `channel` (`edge`,`stable`,`airgap`) | Time to fetch and verify downloads manifest. Target < 3 s. | +| `ui_download_export_queue_depth` | Gauge | `tenant`, `artifactType` (`sbom`,`policy`,`attestation`,`console`) | Mirrors `/console/downloads` queue depth; triggers when offline bundles lag. | +| `ui_download_command_copied_total` | Counter | `tenant`, `artifactType` | Increments when users copy CLI commands from the UI. Useful to observe CLI parity adoption. | + +### 2.4 Telemetry Emission & Errors + +| Metric | Type | Labels | Notes | +|--------|------|--------|-------| +| `ui_telemetry_batch_failures_total` | Counter | `transport` (`otlp-http`,`otlp-grpc`), `reason` | Emitted by OTLP bridge when batches fail. Enable via `CONSOLE_METRICS_VERBOSE=true`. | +| `ui_telemetry_queue_depth` | Gauge | `priority` (`normal`,`high`), `tenant` | Browser-side buffer depth; monitor for spikes under degraded collectors. | + +> **Scraping tips:** +> - Enable `/metrics` via `CONSOLE_METRICS_ENABLED=true`. +> - Set `OTEL_EXPORTER_OTLP_ENDPOINT=https://otel.collector:4318` and relevant headers (`OTEL_EXPORTER_OTLP_HEADERS=authorization=Bearer `). +> - For air-gapped sites, point the exporter to the Offline Kit collector (`localhost:4318`) and forward the metrics snapshot using `stella offline bundle metrics`. + +--- + +## 3 · Logs + +- **Format:** JSON via Console log bridge; emitted to stdout and optional OTLP log exporter. Core fields: `timestamp`, `level`, `action`, `route`, `tenant`, `subject`, `correlationId`, `dpop.jkt`, `device`, `offlineMode`. +- **Categories:** + - `ui.action` – general user interactions (route changes, command palette, filter updates). Sampled 50 % by default; override with feature flag `telemetry.logVerbose`. + - `ui.tenant.switch` – always logged; includes `fromTenant`, `toTenant`, `tokenId`, and Authority audit correlation. + - `ui.download.commandCopied` – download commands copied; includes `artifactId`, `digest`, `manifestVersion`. + - `ui.security.anomaly` – DPoP mismatches, tenant header errors, CSP violations (level = `Warning`). + - `ui.telemetry.failure` – OTLP export errors; include `httpStatus`, `batchSize`, `retryCount`. +- **PII handling:** Full emails are scrubbed; only hashed values (`user:`) appear unless `ui.admin` + fresh-auth were granted for the action (still redacted in logs). +- **Retention:** Recommended 14 days for connected sites, 30 days for sealed/air-gap audits. Ship logs to Loki/Elastic with ingest label `service="stellaops-web-ui"`. + +--- + +## 4 · Traces + +- **Span names & attributes:** + - `ui.route.transition` – wraps route navigation; attributes: `route`, `tenant`, `renderMillis`, `prefetchHit`. + - `ui.api.fetch` – HTTP fetch to backend; attributes: `service`, `endpoint`, `status`, `networkTime`. + - `ui.sse.stream` – Server-sent event subscriptions (status ticker, runs); attributes: `channel`, `connectedMillis`, `reconnects`. + - `ui.telemetry.batch` – Browser OTLP flush; attributes: `batchSize`, `success`, `retryCount`. + - `ui.policy.action` – Policy workspace actions (simulate, approve, activate) per `docs/ui/policy-editor.md`. +- **Propagation:** Spans use W3C `traceparent`; gateway echoes header to backend APIs so traces stitch across UI → gateway → service. +- **Sampling controls:** `OTEL_TRACES_SAMPLER_ARG` (ratio) and feature flag `telemetry.forceSampling` (sets to 100 % for incident debugging). +- **Viewing traces:** Grafana Tempo or Jaeger via collector. Filter by `service.name = stellaops-console`. For cross-service debugging, filter on `correlationId` and `tenant`. + +--- + +## 5 · Dashboards + +### 5.1 Experience Overview + +Panels: +- Route render histogram (P50/P90/P99) by route. +- Backend call latency stacked by service (`ui_request_duration_seconds`). +- Offline banner duration trend (`ui_offline_banner_seconds`). +- Tenant switch volume vs failure rate (overlay `ui_dpop_failure_total`). +- Command palette usage (`ui_filter_apply_total` + `ui.action` log counts). + +### 5.2 Downloads & Offline Kit + +- Manifest refresh time chart (per channel). +- Export queue depth gauge with alert thresholds. +- CLI command adoption (bar chart per artifact type, using `ui_download_command_copied_total`). +- Offline parity banner occurrences (`downloads.offlineParity` flag from API → derived metric). +- Last Offline Kit import timestamp (join with Downloads API metadata). + +### 5.3 Security & Session + +- Fresh-auth prompt counts vs success/fail ratios. +- DPoP failure stacked by reason. +- Tenant mismatch warnings (from `ui.security.anomaly` logs). +- Scope usage heatmap (derived from Authority audit events + UI logs). +- CSP violation counts (browser `securitypolicyviolation` listener forwarded to logs). + +> Capture screenshots for Grafana once dashboards stabilise (`docs/assets/ui/observability/*.png`). Replace placeholders before releasing the doc. + +--- + +## 6 · Alerting + +| Alert | Condition | Suggested Action | +|-------|-----------|------------------| +| **ConsoleLatencyHigh** | `ui_route_render_seconds_bucket{le="1.5"}` drops below 0.95 for 3 intervals | Inspect route splits, check backend latencies, review CDN cache. | +| **BackendLatencyHigh** | `ui_request_duration_seconds_sum / ui_request_duration_seconds_count` > 1 s for any service | Correlate with gateway/service dashboards; escalate to owning guild. | +| **TenantSwitchFailures** | Increase in `ui_dpop_failure_total` or `ui.security.anomaly` (tenant mismatch) > 3/min | Validate Authority issuer, check clock skew, confirm tenant config. | +| **FreshAuthLoop** | `ui_fresh_auth_prompt_total` spikes with matching `ui_fresh_auth_failure_total` | Review Authority `/fresh-auth` endpoint, session timeout config, UX regressions. | +| **OfflineBannerLong** | `ui_offline_banner_seconds` P95 > 120 s | Investigate Authority/gateway availability; verify Offline Kit freshness. | +| **DownloadsBacklog** | `ui_download_export_queue_depth` > 5 for 10 min OR queue age > alert threshold | Ping Downloads service, ensure manifest pipeline (`DOWNLOADS-CONSOLE-23-001`) is healthy. | +| **TelemetryExportErrors** | `ui_telemetry_batch_failures_total` > 0 for ≥5 min | Check collector health, credentials, or TLS trust. | + +Integrate alerts with Notifier (`ui.alerts`) or existing Ops channels. Tag incidents with `component=console` for correlation. + +--- + +## 7 · Feature Flags & Configuration + +| Flag / Env Var | Purpose | Default | +|----------------|---------|---------| +| `CONSOLE_FEATURE_FLAGS` | Enables UI modules (`runs`, `downloads`, `policies`, `telemetry`). Telemetry panel requires `telemetry`. | `runs,downloads,policies` | +| `CONSOLE_METRICS_ENABLED` | Exposes `/metrics` for Prometheus scrape. | `true` | +| `CONSOLE_METRICS_VERBOSE` | Emits additional batching metrics (`ui_telemetry_*`). | `false` | +| `CONSOLE_LOG_LEVEL` | Minimum log level (`Information`, `Debug`). Use `Debug` for incident sampling. | `Information` | +| `CONSOLE_METRICS_SAMPLING` *(planned)* | Controls front-end span sampling ratio. Document once released. | `0.05` | +| `OTEL_EXPORTER_OTLP_ENDPOINT` | Collector URL; supports HTTPS. | unset | +| `OTEL_EXPORTER_OTLP_HEADERS` | Comma-separated headers (auth). | unset | +| `OTEL_EXPORTER_OTLP_INSECURE` | Allow HTTP (dev only). | `false` | +| `OTEL_SERVICE_NAME` | Service tag for traces/logs. Set to `stellaops-console`. | auto | +| `CONSOLE_TELEMETRY_SSE_ENABLED` | Enables `/console/telemetry` SSE feed for dashboards. | `true` | + +Feature flag changes should be tracked in release notes and mirrored in `/docs/ui/navigation.md` (shortcuts may change when modules toggle). + +--- + +## 8 · Offline / Air-Gapped Workflow + +- Mirror the console image and telemetry collector as part of the Offline Kit (see `/docs/install/docker.md` §4). +- Scrape metrics locally via `curl -k https://console.local/metrics > metrics.prom`; archive alongside logs for audits. +- Use `stella offline kit import` to keep the downloads manifest in sync; dashboards display staleness using `ui_download_manifest_refresh_seconds`. +- When collectors are unavailable, console queues OTLP batches (up to 5 min) and exposes backlog through `ui_telemetry_queue_depth`; export queue metrics to prove no data loss. +- After reconnecting, run `stella console status --telemetry` *(CLI parity pending; see DOCS-CONSOLE-23-014)* or verify `ui_telemetry_batch_failures_total` resets to zero. +- Retain telemetry bundles for 30 days per compliance guidelines; include Grafana JSON exports in audit packages. + +--- + +## 9 · Compliance Checklist + +- [ ] `/metrics` scraped in staging & production; dashboards display `ui_route_render_seconds`, `ui_request_duration_seconds`, and downloads metrics. +- [ ] OTLP traces/logs confirmed end-to-end (collector, Tempo/Loki). +- [ ] Alert rules from §6 implemented in monitoring stack with runbooks linked. +- [ ] Feature flags documented and change-controlled; telemetry disabled only with approval. +- [ ] DPoP/fresh-auth anomalies correlated with Authority audit logs during drill. +- [ ] Offline capture workflow exercised; evidence stored in audit vault. +- [ ] Screenshots of Grafana dashboards committed once they stabilise (update references). +- [ ] Cross-links verified (`docs/deploy/console.md`, `docs/security/console-security.md`, `docs/ui/downloads.md`, `docs/ui/console-overview.md`). + +--- + +## 10 · References + +- `/docs/deploy/console.md` – Metrics endpoint, OTLP config, health checks. +- `/docs/security/console-security.md` – Security metrics & alert hints. +- `/docs/ui/console-overview.md` – Telemetry primitives and performance budgets. +- `/docs/ui/downloads.md` – Downloads metrics and parity workflow. +- `/docs/observability/observability.md` – Platform-wide practices. +- `/ops/telemetry-collector.md` & `/ops/telemetry-storage.md` – Collector deployment. +- `/docs/install/docker.md` – Compose/Helm environment variables. + +--- + +*Last updated: 2025-10-28 (Sprint 23).* + diff --git a/docs/operations/cli-release-and-packaging.md b/docs/operations/cli-release-and-packaging.md index 2906ee39..3176f22e 100644 --- a/docs/operations/cli-release-and-packaging.md +++ b/docs/operations/cli-release-and-packaging.md @@ -1,134 +1,134 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# CLI Release & Packaging Runbook - -This runbook describes how to build, sign, package, and distribute the StellaOps CLI with Task Pack support. It covers connected and air-gapped workflows, SBOM generation, parity gating, and distribution artifacts required by Sprint 43 (`DEVOPS-CLI-43-001`, `DEPLOY-PACKS-43-001`). - ---- - -## 1 · Release Artifacts - -| Artifact | Description | Notes | -|----------|-------------|-------| -| `stella--linux-x64.tar.gz` | Linux binary + completions | Includes man pages, localization files. | -| `stella--macos-universal.tar.gz` | macOS universal binary | Signed/notarized where applicable. | -| `stella--windows-x64.zip` | Windows binary + PowerShell modules | Code-signed. | -| `stella-cli-container:` | OCI image with CLI + pack runtime | Deterministic rootfs (scratch/distroless). | -| SBOM (`.cdx.json`) | CycloneDX SBOM per artifact | Generated via `stella sbom generate` or `syft`. | -| Checksums (`SHA256SUMS`) | Aggregated digest list | Signed with cosign. | -| Provenance (`.intoto.jsonl`) | DSSE attestation (SLSA L2) | Contains build metadata. | -| Release notes | Markdown summary | Links to task packs docs, parity matrix. | - ---- - -## 2 · Build Pipeline - -1. **Source checkout** – pinned commit, reproducible environment (Docker). -2. **Dependency lock** – `dotnet restore`, `npm ci` (for CLI frontends), ensure deterministic build flags. -3. **Build binaries** – cross-platform targets with reproducible timestamps. -4. **Run tests** – unit + integration; include `stella pack` commands (plan/run/verify) in CI. -5. **Generate SBOM** – `syft packages dist/stella-linux-x64 --output cyclonedx-json`. -6. **Bundle** – compress artifacts, include completions (`bash`, `zsh`, `fish`, PowerShell). -7. **Sign** – cosign signatures for binaries, checksums, container image. -8. **Publish** – upload to `downloads.stella-ops.org`, container registry, Packs Registry (for CLI container). -9. **Parity gating** – run CLI parity matrix tests vs Console features (automation in `DEVOPS-CLI-43-001`). - -CI must run in isolated environment (no network beyond allowlist). Cache dependencies for offline bundling. - ---- - -## 3 · Versioning & Channels - -- Semantic versioning (`YYYY.MM.patch`), e.g., `2025.10.0`. -- Channels: - - `edge` – nightly builds, limited support. - - `beta` – pre-release candidates. - - `stable` – production-ready, after parity gating. -- Release promotions mirror Task Pack channels; update downloads manifest (`deploy/downloads/manifest.json`). - ---- - -## 4 · Signing & Verification - -- Binaries signed with cosign (`cosign sign-blob`). -- Container image signed (`cosign sign stella-cli-container:`). -- DSSE provenance includes: - - Build pipeline ID. - - Source commit and repo. - - Dependencies SBOM digest. - - Test results summary. -- Verification command for operators: - -```bash -cosign verify-blob \ - --certificate-identity https://ci.stella-ops.org \ - --certificate-oidc-issuer https://fulcio.sigstore.dev \ - --signature stella-2025.10.0-linux-x64.sig \ - stella-2025.10.0-linux-x64.tar.gz -``` - ---- - -## 5 · Distribution - -### 5.1 Online - -- Publish artifacts to Downloads service; update manifest with digests, SBOM URLs, attestations. -- Update CLI parity docs (`docs/cli-vs-ui-parity.md`) and release notes. -- Push container image to registry with SBOM + attestations referenced as OCI referrers. -- Notify stakeholders via `#release-cli` channel and release mailing list. - -### 5.2 Offline / Air-Gap - -- Bundle CLI artifacts, Task Pack samples, and registry mirror: - -```bash -stella pack bundle export \ - --packs "sbom-remediation:1.3.0" \ - --output offline/packs-bundle-2025.10.0.tgz - -stella cli bundle export \ - --output offline/cli-2025.10.0.tgz \ - --include-container \ - --include-sbom -``` - -- Update Offline Kit manifest with new CLI version and pack bundle entries. -- Provide import scripts (`ouk import`) for sealed sites. - ---- - -## 6 · Parity Gating - -- `stella cli parity check` compares CLI commands vs parity matrix. -- CI fails release if any required command flagged `🟥` or `🟡` with severity > threshold. -- Parity report uploaded to Downloads workspace and linked in docs. -- Manual review required for new commands (ensure `man` pages and help text localized). - ---- - -## 7 · Localization & Documentation - -- CLI includes localization bundles; ensure `i18n.txz` packaged. -- Update man pages (`man/stella-pack.1`) and HTML docs. -- Sync docs: `docs/cli/overview.md`, pack authoring guide, release notes. -- Document new flags/commands in `docs/cli/commands/pack.md` (tracked in Sprint 42 tasks). - ---- - -## 8 · Release Checklist - -- [ ] All binaries built reproducibly (CI logs archived). -- [ ] Tests + parity matrix passing. -- [ ] SBOM + provenance generated and published. -- [ ] Cosign signatures created and verified. -- [ ] Downloads manifest updated (edge/beta/stable). -- [ ] Offline bundle exported and validated. -- [ ] Release notes + documentation updates merged. -- [ ] Notifications sent (chat/email). -- [ ] Imposed rule reminder present at top of document. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# CLI Release & Packaging Runbook + +This runbook describes how to build, sign, package, and distribute the StellaOps CLI with Task Pack support. It covers connected and air-gapped workflows, SBOM generation, parity gating, and distribution artifacts required by Sprint 43 (`DEVOPS-CLI-43-001`, `DEPLOY-PACKS-43-001`). + +--- + +## 1 · Release Artifacts + +| Artifact | Description | Notes | +|----------|-------------|-------| +| `stella--linux-x64.tar.gz` | Linux binary + completions | Includes man pages, localization files. | +| `stella--macos-universal.tar.gz` | macOS universal binary | Signed/notarized where applicable. | +| `stella--windows-x64.zip` | Windows binary + PowerShell modules | Code-signed. | +| `stella-cli-container:` | OCI image with CLI + pack runtime | Deterministic rootfs (scratch/distroless). | +| SBOM (`.cdx.json`) | CycloneDX SBOM per artifact | Generated via `stella sbom generate` or `syft`. | +| Checksums (`SHA256SUMS`) | Aggregated digest list | Signed with cosign. | +| Provenance (`.intoto.jsonl`) | DSSE attestation (SLSA L2) | Contains build metadata. | +| Release notes | Markdown summary | Links to task packs docs, parity matrix. | + +--- + +## 2 · Build Pipeline + +1. **Source checkout** – pinned commit, reproducible environment (Docker). +2. **Dependency lock** – `dotnet restore`, `npm ci` (for CLI frontends), ensure deterministic build flags. +3. **Build binaries** – cross-platform targets with reproducible timestamps. +4. **Run tests** – unit + integration; include `stella pack` commands (plan/run/verify) in CI. +5. **Generate SBOM** – `syft packages dist/stella-linux-x64 --output cyclonedx-json`. +6. **Bundle** – compress artifacts, include completions (`bash`, `zsh`, `fish`, PowerShell). +7. **Sign** – cosign signatures for binaries, checksums, container image. +8. **Publish** – upload to `downloads.stella-ops.org`, container registry, Packs Registry (for CLI container). +9. **Parity gating** – run CLI parity matrix tests vs Console features (automation in `DEVOPS-CLI-43-001`). + +CI must run in isolated environment (no network beyond allowlist). Cache dependencies for offline bundling. + +--- + +## 3 · Versioning & Channels + +- Semantic versioning (`YYYY.MM.patch`), e.g., `2025.10.0`. +- Channels: + - `edge` – nightly builds, limited support. + - `beta` – pre-release candidates. + - `stable` – production-ready, after parity gating. +- Release promotions mirror Task Pack channels; update downloads manifest (`deploy/downloads/manifest.json`). + +--- + +## 4 · Signing & Verification + +- Binaries signed with cosign (`cosign sign-blob`). +- Container image signed (`cosign sign stella-cli-container:`). +- DSSE provenance includes: + - Build pipeline ID. + - Source commit and repo. + - Dependencies SBOM digest. + - Test results summary. +- Verification command for operators: + +```bash +cosign verify-blob \ + --certificate-identity https://ci.stella-ops.org \ + --certificate-oidc-issuer https://fulcio.sigstore.dev \ + --signature stella-2025.10.0-linux-x64.sig \ + stella-2025.10.0-linux-x64.tar.gz +``` + +--- + +## 5 · Distribution + +### 5.1 Online + +- Publish artifacts to Downloads service; update manifest with digests, SBOM URLs, attestations. +- Update CLI parity docs (`docs/cli-vs-ui-parity.md`) and release notes. +- Push container image to registry with SBOM + attestations referenced as OCI referrers. +- Notify stakeholders via `#release-cli` channel and release mailing list. + +### 5.2 Offline / Air-Gap + +- Bundle CLI artifacts, Task Pack samples, and registry mirror: + +```bash +stella pack bundle export \ + --packs "sbom-remediation:1.3.0" \ + --output offline/packs-bundle-2025.10.0.tgz + +stella cli bundle export \ + --output offline/cli-2025.10.0.tgz \ + --include-container \ + --include-sbom +``` + +- Update Offline Kit manifest with new CLI version and pack bundle entries. +- Provide import scripts (`ouk import`) for sealed sites. + +--- + +## 6 · Parity Gating + +- `stella cli parity check` compares CLI commands vs parity matrix. +- CI fails release if any required command flagged `🟥` or `🟡` with severity > threshold. +- Parity report uploaded to Downloads workspace and linked in docs. +- Manual review required for new commands (ensure `man` pages and help text localized). + +--- + +## 7 · Localization & Documentation + +- CLI includes localization bundles; ensure `i18n.txz` packaged. +- Update man pages (`man/stella-pack.1`) and HTML docs. +- Sync docs: `docs/cli/overview.md`, pack authoring guide, release notes. +- Document new flags/commands in `docs/cli/commands/pack.md` (tracked in Sprint 42 tasks). + +--- + +## 8 · Release Checklist + +- [ ] All binaries built reproducibly (CI logs archived). +- [ ] Tests + parity matrix passing. +- [ ] SBOM + provenance generated and published. +- [ ] Cosign signatures created and verified. +- [ ] Downloads manifest updated (edge/beta/stable). +- [ ] Offline bundle exported and validated. +- [ ] Release notes + documentation updates merged. +- [ ] Notifications sent (chat/email). +- [ ] Imposed rule reminder present at top of document. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/operations/export-runbook.md b/docs/operations/export-runbook.md index d287609d..168dd10d 100644 --- a/docs/operations/export-runbook.md +++ b/docs/operations/export-runbook.md @@ -1,203 +1,203 @@ -# Export Center Operations Runbook - -> Export Center workers and API are landing across Sprints 35-37. This runbook captures the target operational procedures so DevOps can validate them as each milestone goes live. Update specific commands once `EXPORT-SVC-35-006`, `EXPORT-SVC-36-001..004`, and related CLI tasks ship. - -## 1. Service scope - -The Export Center packages StellaOps evidence and policy overlays into reproducible bundles (JSON, Trivy DB, mirror). Operations owns: - -- Worker scaling, queue management, and distribution storage. -- Monitoring and alerts for run throughput, failures, and verification issues. -- Runbook execution for recovery, retention, and compliance. -- Coordination with DevOps validation (cosign + `trivy module db import` smoke tests). - -Related documentation: - -- `docs/export-center/overview.md` -- `docs/export-center/architecture.md` -- `docs/export-center/profiles.md` -- `docs/export-center/trivy-adapter.md` -- `docs/export-center/mirror-bundles.md` -- `docs/export-center/api.md` -- `docs/export-center/cli.md` - -## 2. Contacts & tooling - -| Area | Owner(s) | Escalation | -|------|----------|------------| -| Export Center service | Exporter Service Guild | `#export-center-ops`, on-call rotation | -| Distribution & CI smoke | DevOps Guild | CI channel, PagerDuty `devops-export` | -| KMS / encryption | Authority Core | `#authority-core` | -| Offline Kit dissemination | Offline Kit Guild | `#offline-kit` | - -Primary tooling: - -- `stella export` CLI (submit, watch, download, verify). -- Export Center API (`/api/export/*`) for automation. -- Grafana dashboards (`Export Center / Run Health`, `Export Center / Distribution`). -- Alertmanager routes (`Export.Center.Failures`, `Export.Center.Verify`). - -## 3. Monitoring & SLOs - -Key metrics (exposed by workers and API): - -| Metric | SLO / Alert | Notes | -|--------|-------------|-------| -| `exporter_run_duration_seconds` | p95 < 300 s (full), < 120 s (delta) | Break down by profile (`profile_kind`). | -| `exporter_run_failures_total` | Alert when > 3 failures/15 min per profile | Include `error_code` label. | -| `exporter_run_bytes_total` | Track growth trends | Helps with storage planning. | -| `exporter_distribution_push_seconds` | p95 < 60 s | Covers OCI/object storage. | -| `exporter_verify_failures_total` | Alert on any non-zero | Raised when cosign/Trivy smoke tests fail. | -| `exporter_retention_pruned_total` | Should increase nightly | Confirms retention job success. | - -Dashboards must include: - -- Run throughput by profile. -- Failure breakdown (adapter, signing, distribution). -- Queue depth and worker concurrency (via Orchestrator metrics). -- Storage consumption (object storage buckets, local staging). - -Alerts (Alertmanager): - -- `ExportCenterRunFailureSpike` - `exporter_run_failures_total` increase rate > 3/15 min. -- `ExportCenterVerifyFailure` - any entry in `exporter_verify_failures_total` > 0. -- `ExportCenterWorkerLag` - queue backlog > threshold for 10 minutes. -- `ExportCenterRetentionStale` - no pruning events in 24 hours. - -## 4. Routine operations - -### 4.1 Daily checklist - -- Review dashboard for run throughput and error classes. -- Confirm CI smoke job (cosign + `trivy module db import`) passed. -- Check storage usage against capacity thresholds. -- Verify retention job executed (look for `exporter_retention_pruned_total` increment). -- Scan logs for `adapter.trivy.unsupported_schema_version` or `mirror.delta.apply_failed`. - -### 4.2 Weekly tasks - -- Rotate Download/OCI API tokens if configured with short-lived credentials. -- Review upcoming profile changes (new tenants, profile updates). -- Test `stella export verify` against a recent run for each profile. -- Exercise failover of workers (scale to zero one replica, ensure others pick up). - -### 4.3 Pre-release - -- Ensure bundles generated for release candidates pass cosign verification. -- Capture sample manifests (`export.json`, `manifest.yaml`) for documentation archives. -- Validate Offline Kit packaging includes latest full + delta mirror bundles. - -## 5. Capacity & scaling - -### 5.1 Worker sizing - -- Default workers handle ~2 full runs or 6 delta runs concurrently per 4 vCPU. -- Scale out when: - - Queue depth (`exporter_jobs_ready`) > 10 for 10 minutes. - - p95 durations exceed SLO for multiple runs without failures. -- Use Orchestrator quotas: ensure per-tenant concurrency (`max_active_runs`) is tuned. - -### 5.2 Storage planning - -- Staging storage (object store or filesystem) must hold at least: - - Latest full bundle per tenant per profile. - - Last `N` deltas (default N=5). -- Set retention policy via configuration: - -```yaml -ExportCenter: - Retention: - Mirror: - Mode: days - Value: 30 - Trivy: - Mode: count - Value: 10 -``` - -- Monitor `exporter_storage_bytes_total` (if available) or use bucket metrics from storage provider. - -## 6. Failure response - -| Symptom | Likely cause | Immediate action | Follow-up | -|---------|--------------|------------------|-----------| -| `ERR_EXPORT_UNSUPPORTED_SCHEMA` | Trivy schema mismatch | Pin `SchemaVersion` to previous value; rerun export | Coordinate with Exporter Guild to add new mapping | -| `ERR_EXPORT_BASE_MISSING` | Base manifest unavailable | Trigger full export (`mirror:full`), notify tenant | Investigate storage retention settings | -| Run stuck in `pending` | Worker unavailable / queue paused | Check worker pods / Orchestrator status | Scale workers or fix queue | -| Signing failure (`errorCode=signing`) | KMS outage or permission change | Verify KMS health; retry run; escalate to Authority | Document incident, review key rotation schedule | -| Distribution failure (`errorCode=distribution`) | OCI/object store outage | Switch profile distribution to download-only (`distribution: ["http"]`) | Restore distribution backend, resume normal config | -| CLI verification failure in CI | New bundle did not pass cosign or Trivy import | Inspect pipeline logs; download bundle; rerun verification manually | Engage Exporter Guild if data quality issue | -| Retention job skipped | Scheduler failure or misconfiguration | Run retention job manually (`stella export retention run`) | Audit scheduler configuration | - -Log locations: `exporter` service emits structured logs with `runId`, `profile`, `errorCode`. For Kubernetes deployments, check `kubectl logs deployment/export-center-worker`. - -## 7. Recovery playbooks - -### 7.1 Replaying a failed run - -1. Identify run (`runId`) and root cause via `GET /api/export/runs/{id}`. -2. If configuration changed, clone profile and adjust settings. -3. Resubmit run (`stella export run submit` or API) with `--allow-empty` if intentionally empty. -4. Monitor SSE stream or `stella export run watch`. -5. After success, prune failed run data if necessary. - -### 7.2 Restoring from previous full bundle - -1. Locate last successful full bundle (`mirror:full`) and associated manifest. -2. Download and verify signatures. -3. Extract into mirror staging area. -4. Apply subsequent delta bundles in order. -5. Trigger mirror verification script (`mirror verify `). - -### 7.3 KMS outage response - -1. Disable new export submissions temporarily (set per-tenant quota to 0). -2. Coordinate with Authority Core to restore KMS. -3. Once KMS back, run `stella export run submit --profile --selectors ... --priority catch-up` for affected tenants. - -## 8. Verification workflow - -All bundles must pass both signature and content verification. - -### 8.1 Trivy bundle validation (CI job) - -```bash -cosign verify-blob \ - --key tenants/acme/export-center.pub \ - --signature signatures/trivy-db.sig \ - trivy/db.bundle - -trivy module db import trivy/db.bundle --cache-dir /tmp/trivy-cache -``` - -Automation: `DEVOPS-EXPORT-36-001` ensures this runs on every pipeline. - -### 8.2 Mirror bundle validation - -```bash -cosign verify-blob \ - --key tenants/acme/export-center.pub \ - --signature signatures/export.sig \ - mirror/export.json - -./offline-kit/bin/mirror verify mirror-20251029-full.tar.zst -``` - -If encryption enabled, decrypt using age or AES key before verification. - -## 9. Change management - -- Profile changes require change record referencing tenant impact and expected bundle size. -- Distribution configuration updates (`OCI` vs `HTTP`) must be tested in staging. -- Schema upgrades (e.g., Trivy schema v3) need coordination with DevOps, Exporter, and Docs. -- Update runbook and related docs when processes change (tie updates to `DOCS-EXPORT-37-005`). - -## 10. References - -- `docs/export-center/trivy-adapter.md` -- `docs/export-center/mirror-bundles.md` -- `ops/devops/TASKS.md` (`DEVOPS-EXPORT-36-001`, `DEVOPS-EXPORT-37-001`) -- `docs/ingestion/aggregation-only-contract.md` -- `docs/24_OFFLINE_KIT.md` - -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. +# Export Center Operations Runbook + +> Export Center workers and API are landing across Sprints 35-37. This runbook captures the target operational procedures so DevOps can validate them as each milestone goes live. Update specific commands once `EXPORT-SVC-35-006`, `EXPORT-SVC-36-001..004`, and related CLI tasks ship. + +## 1. Service scope + +The Export Center packages StellaOps evidence and policy overlays into reproducible bundles (JSON, Trivy DB, mirror). Operations owns: + +- Worker scaling, queue management, and distribution storage. +- Monitoring and alerts for run throughput, failures, and verification issues. +- Runbook execution for recovery, retention, and compliance. +- Coordination with DevOps validation (cosign + `trivy module db import` smoke tests). + +Related documentation: + +- `docs/export-center/overview.md` +- `docs/export-center/architecture.md` +- `docs/export-center/profiles.md` +- `docs/export-center/trivy-adapter.md` +- `docs/export-center/mirror-bundles.md` +- `docs/export-center/api.md` +- `docs/export-center/cli.md` + +## 2. Contacts & tooling + +| Area | Owner(s) | Escalation | +|------|----------|------------| +| Export Center service | Exporter Service Guild | `#export-center-ops`, on-call rotation | +| Distribution & CI smoke | DevOps Guild | CI channel, PagerDuty `devops-export` | +| KMS / encryption | Authority Core | `#authority-core` | +| Offline Kit dissemination | Offline Kit Guild | `#offline-kit` | + +Primary tooling: + +- `stella export` CLI (submit, watch, download, verify). +- Export Center API (`/api/export/*`) for automation. +- Grafana dashboards (`Export Center / Run Health`, `Export Center / Distribution`). +- Alertmanager routes (`Export.Center.Failures`, `Export.Center.Verify`). + +## 3. Monitoring & SLOs + +Key metrics (exposed by workers and API): + +| Metric | SLO / Alert | Notes | +|--------|-------------|-------| +| `exporter_run_duration_seconds` | p95 < 300 s (full), < 120 s (delta) | Break down by profile (`profile_kind`). | +| `exporter_run_failures_total` | Alert when > 3 failures/15 min per profile | Include `error_code` label. | +| `exporter_run_bytes_total` | Track growth trends | Helps with storage planning. | +| `exporter_distribution_push_seconds` | p95 < 60 s | Covers OCI/object storage. | +| `exporter_verify_failures_total` | Alert on any non-zero | Raised when cosign/Trivy smoke tests fail. | +| `exporter_retention_pruned_total` | Should increase nightly | Confirms retention job success. | + +Dashboards must include: + +- Run throughput by profile. +- Failure breakdown (adapter, signing, distribution). +- Queue depth and worker concurrency (via Orchestrator metrics). +- Storage consumption (object storage buckets, local staging). + +Alerts (Alertmanager): + +- `ExportCenterRunFailureSpike` - `exporter_run_failures_total` increase rate > 3/15 min. +- `ExportCenterVerifyFailure` - any entry in `exporter_verify_failures_total` > 0. +- `ExportCenterWorkerLag` - queue backlog > threshold for 10 minutes. +- `ExportCenterRetentionStale` - no pruning events in 24 hours. + +## 4. Routine operations + +### 4.1 Daily checklist + +- Review dashboard for run throughput and error classes. +- Confirm CI smoke job (cosign + `trivy module db import`) passed. +- Check storage usage against capacity thresholds. +- Verify retention job executed (look for `exporter_retention_pruned_total` increment). +- Scan logs for `adapter.trivy.unsupported_schema_version` or `mirror.delta.apply_failed`. + +### 4.2 Weekly tasks + +- Rotate Download/OCI API tokens if configured with short-lived credentials. +- Review upcoming profile changes (new tenants, profile updates). +- Test `stella export verify` against a recent run for each profile. +- Exercise failover of workers (scale to zero one replica, ensure others pick up). + +### 4.3 Pre-release + +- Ensure bundles generated for release candidates pass cosign verification. +- Capture sample manifests (`export.json`, `manifest.yaml`) for documentation archives. +- Validate Offline Kit packaging includes latest full + delta mirror bundles. + +## 5. Capacity & scaling + +### 5.1 Worker sizing + +- Default workers handle ~2 full runs or 6 delta runs concurrently per 4 vCPU. +- Scale out when: + - Queue depth (`exporter_jobs_ready`) > 10 for 10 minutes. + - p95 durations exceed SLO for multiple runs without failures. +- Use Orchestrator quotas: ensure per-tenant concurrency (`max_active_runs`) is tuned. + +### 5.2 Storage planning + +- Staging storage (object store or filesystem) must hold at least: + - Latest full bundle per tenant per profile. + - Last `N` deltas (default N=5). +- Set retention policy via configuration: + +```yaml +ExportCenter: + Retention: + Mirror: + Mode: days + Value: 30 + Trivy: + Mode: count + Value: 10 +``` + +- Monitor `exporter_storage_bytes_total` (if available) or use bucket metrics from storage provider. + +## 6. Failure response + +| Symptom | Likely cause | Immediate action | Follow-up | +|---------|--------------|------------------|-----------| +| `ERR_EXPORT_UNSUPPORTED_SCHEMA` | Trivy schema mismatch | Pin `SchemaVersion` to previous value; rerun export | Coordinate with Exporter Guild to add new mapping | +| `ERR_EXPORT_BASE_MISSING` | Base manifest unavailable | Trigger full export (`mirror:full`), notify tenant | Investigate storage retention settings | +| Run stuck in `pending` | Worker unavailable / queue paused | Check worker pods / Orchestrator status | Scale workers or fix queue | +| Signing failure (`errorCode=signing`) | KMS outage or permission change | Verify KMS health; retry run; escalate to Authority | Document incident, review key rotation schedule | +| Distribution failure (`errorCode=distribution`) | OCI/object store outage | Switch profile distribution to download-only (`distribution: ["http"]`) | Restore distribution backend, resume normal config | +| CLI verification failure in CI | New bundle did not pass cosign or Trivy import | Inspect pipeline logs; download bundle; rerun verification manually | Engage Exporter Guild if data quality issue | +| Retention job skipped | Scheduler failure or misconfiguration | Run retention job manually (`stella export retention run`) | Audit scheduler configuration | + +Log locations: `exporter` service emits structured logs with `runId`, `profile`, `errorCode`. For Kubernetes deployments, check `kubectl logs deployment/export-center-worker`. + +## 7. Recovery playbooks + +### 7.1 Replaying a failed run + +1. Identify run (`runId`) and root cause via `GET /api/export/runs/{id}`. +2. If configuration changed, clone profile and adjust settings. +3. Resubmit run (`stella export run submit` or API) with `--allow-empty` if intentionally empty. +4. Monitor SSE stream or `stella export run watch`. +5. After success, prune failed run data if necessary. + +### 7.2 Restoring from previous full bundle + +1. Locate last successful full bundle (`mirror:full`) and associated manifest. +2. Download and verify signatures. +3. Extract into mirror staging area. +4. Apply subsequent delta bundles in order. +5. Trigger mirror verification script (`mirror verify `). + +### 7.3 KMS outage response + +1. Disable new export submissions temporarily (set per-tenant quota to 0). +2. Coordinate with Authority Core to restore KMS. +3. Once KMS back, run `stella export run submit --profile --selectors ... --priority catch-up` for affected tenants. + +## 8. Verification workflow + +All bundles must pass both signature and content verification. + +### 8.1 Trivy bundle validation (CI job) + +```bash +cosign verify-blob \ + --key tenants/acme/export-center.pub \ + --signature signatures/trivy-db.sig \ + trivy/db.bundle + +trivy module db import trivy/db.bundle --cache-dir /tmp/trivy-cache +``` + +Automation: `DEVOPS-EXPORT-36-001` ensures this runs on every pipeline. + +### 8.2 Mirror bundle validation + +```bash +cosign verify-blob \ + --key tenants/acme/export-center.pub \ + --signature signatures/export.sig \ + mirror/export.json + +./offline-kit/bin/mirror verify mirror-20251029-full.tar.zst +``` + +If encryption enabled, decrypt using age or AES key before verification. + +## 9. Change management + +- Profile changes require change record referencing tenant impact and expected bundle size. +- Distribution configuration updates (`OCI` vs `HTTP`) must be tested in staging. +- Schema upgrades (e.g., Trivy schema v3) need coordination with DevOps, Exporter, and Docs. +- Update runbook and related docs when processes change (tie updates to `DOCS-EXPORT-37-005`). + +## 10. References + +- `docs/export-center/trivy-adapter.md` +- `docs/export-center/mirror-bundles.md` +- `ops/devops/TASKS.md` (`DEVOPS-EXPORT-36-001`, `DEVOPS-EXPORT-37-001`) +- `docs/ingestion/aggregation-only-contract.md` +- `docs/24_OFFLINE_KIT.md` + +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. diff --git a/docs/ops/authority-backup-restore.md b/docs/ops/authority-backup-restore.md index 7c8f9d72..4890f749 100644 --- a/docs/ops/authority-backup-restore.md +++ b/docs/ops/authority-backup-restore.md @@ -1,97 +1,97 @@ -# Authority Backup & Restore Runbook - -## Scope -- **Applies to:** StellaOps Authority deployments running the official `ops/authority/docker-compose.authority.yaml` stack or equivalent Kubernetes packaging. -- **Artifacts covered:** MongoDB (`stellaops-authority` database), Authority configuration (`etc/authority.yaml`), plugin manifests under `etc/authority.plugins/`, and signing key material stored in the `authority-keys` volume (defaults to `/app/keys` inside the container). -- **Frequency:** Run the full procedure prior to upgrades, before rotating keys, and at least once per 24 h in production. Store snapshots in an encrypted, access-controlled vault. - -## Inventory Checklist -| Component | Location (compose default) | Notes | -| --- | --- | --- | -| Mongo data | `mongo-data` volume (`/var/lib/docker/volumes/.../mongo-data`) | Contains all Authority collections (`AuthorityUser`, `AuthorityClient`, `AuthorityToken`, etc.). | -| Configuration | `etc/authority.yaml` | Mounted read-only into the container at `/etc/authority.yaml`. | -| Plugin manifests | `etc/authority.plugins/*.yaml` | Includes `standard.yaml` with `tokenSigning.keyDirectory`. | -| Signing keys | `authority-keys` volume -> `/app/keys` | Path is derived from `tokenSigning.keyDirectory` (defaults to `../keys` relative to the manifest). | - -> **TIP:** Confirm the deployed key directory via `tokenSigning.keyDirectory` in `etc/authority.plugins/standard.yaml`; some installations relocate keys to `/var/lib/stellaops/authority/keys`. - -## Hot Backup (no downtime) -1. **Create output directory:** `mkdir -p backup/$(date +%Y-%m-%d)` on the host. -2. **Dump Mongo:** - ```bash - docker compose -f ops/authority/docker-compose.authority.yaml exec mongo \ - mongodump --archive=/dump/authority-$(date +%Y%m%dT%H%M%SZ).gz \ - --gzip --db stellaops-authority - docker compose -f ops/authority/docker-compose.authority.yaml cp \ - mongo:/dump/authority-$(date +%Y%m%dT%H%M%SZ).gz backup/ - ``` - The `mongodump` archive preserves indexes and can be restored with `mongorestore --archive --gzip`. -3. **Capture configuration + manifests:** - ```bash - cp etc/authority.yaml backup/ - rsync -a etc/authority.plugins/ backup/authority.plugins/ - ``` -4. **Export signing keys:** the compose file maps `authority-keys` to a local Docker volume. Snapshot it without stopping the service: - ```bash - docker run --rm \ - -v authority-keys:/keys \ - -v "$(pwd)/backup:/backup" \ - busybox tar czf /backup/authority-keys-$(date +%Y%m%dT%H%M%SZ).tar.gz -C /keys . - ``` -5. **Checksum:** generate SHA-256 digests for every file and store them alongside the artefacts. -6. **Encrypt & upload:** wrap the backup folder using your secrets management standard (e.g., age, GPG) and upload to the designated offline vault. - -## Cold Backup (planned downtime) -1. Notify stakeholders and drain traffic (CLI clients should refresh tokens afterwards). -2. Stop services: - ```bash - docker compose -f ops/authority/docker-compose.authority.yaml down - ``` -3. Back up volumes directly using `tar`: - ```bash - docker run --rm -v mongo-data:/data -v "$(pwd)/backup:/backup" \ - busybox tar czf /backup/mongo-data-$(date +%Y%m%d).tar.gz -C /data . - docker run --rm -v authority-keys:/keys -v "$(pwd)/backup:/backup" \ - busybox tar czf /backup/authority-keys-$(date +%Y%m%d).tar.gz -C /keys . - ``` -4. Copy configuration + manifests as in the hot backup (steps 3–6). -5. Restart services and verify health: - ```bash - docker compose -f ops/authority/docker-compose.authority.yaml up -d - curl -fsS http://localhost:8080/ready - ``` - -## Restore Procedure -1. **Provision clean volumes:** remove existing volumes if you’re rebuilding a node (`docker volume rm mongo-data authority-keys`), then recreate the compose stack so empty volumes exist. -2. **Restore Mongo:** - ```bash - docker compose exec -T mongo mongorestore --archive --gzip --drop < backup/authority-YYYYMMDDTHHMMSSZ.gz - ``` - Use `--drop` to replace collections; omit if doing a partial restore. -3. **Restore configuration/manifests:** copy `authority.yaml` and `authority.plugins/*` into place before starting the Authority container. -4. **Restore signing keys:** untar into the mounted volume: - ```bash - docker run --rm -v authority-keys:/keys -v "$(pwd)/backup:/backup" \ - busybox tar xzf /backup/authority-keys-YYYYMMDD.tar.gz -C /keys - ``` - Ensure file permissions remain `600` for private keys (`chmod -R 600`). -5. **Start services & validate:** - ```bash - docker compose up -d - curl -fsS http://localhost:8080/health - ``` -6. **Validate JWKS and tokens:** call `/jwks` and issue a short-lived token via the CLI to confirm key material matches expectations. If the restored environment requires a fresh signing key, follow the rotation SOP in [`docs/11_AUTHORITY.md`](../11_AUTHORITY.md) using `ops/authority/key-rotation.sh` to invoke `/internal/signing/rotate`. - -## Disaster Recovery Notes -- **Air-gapped replication:** replicate archives via the Offline Update Kit transport channels; never attach USB devices without scanning. -- **Retention:** maintain 30 daily snapshots + 12 monthly archival copies. Rotate encryption keys annually. -- **Key compromise:** if signing keys are suspected compromised, restore from the latest clean backup, rotate via OPS3 (see `ops/authority/key-rotation.sh` and `docs/11_AUTHORITY.md`), and publish a revocation notice. -- **Mongo version:** keep dump/restore images pinned to the deployment version (compose uses `mongo:7`). Driver 3.5.0 requires MongoDB **4.2+**—clusters still on 4.0 must be upgraded before restore, and future driver releases will drop 4.0 entirely. citeturn1open1 - -## Verification Checklist -- [ ] `/ready` reports all identity providers ready. -- [ ] OAuth flows issue tokens signed by the restored keys. -- [ ] `PluginRegistrationSummary` logs expected providers on startup. -- [ ] Revocation manifest export (`dotnet run --project src/StellaOps.Authority`) succeeds. -- [ ] Monitoring dashboards show metrics resuming (see OPS5 deliverables). - +# Authority Backup & Restore Runbook + +## Scope +- **Applies to:** StellaOps Authority deployments running the official `ops/authority/docker-compose.authority.yaml` stack or equivalent Kubernetes packaging. +- **Artifacts covered:** MongoDB (`stellaops-authority` database), Authority configuration (`etc/authority.yaml`), plugin manifests under `etc/authority.plugins/`, and signing key material stored in the `authority-keys` volume (defaults to `/app/keys` inside the container). +- **Frequency:** Run the full procedure prior to upgrades, before rotating keys, and at least once per 24 h in production. Store snapshots in an encrypted, access-controlled vault. + +## Inventory Checklist +| Component | Location (compose default) | Notes | +| --- | --- | --- | +| Mongo data | `mongo-data` volume (`/var/lib/docker/volumes/.../mongo-data`) | Contains all Authority collections (`AuthorityUser`, `AuthorityClient`, `AuthorityToken`, etc.). | +| Configuration | `etc/authority.yaml` | Mounted read-only into the container at `/etc/authority.yaml`. | +| Plugin manifests | `etc/authority.plugins/*.yaml` | Includes `standard.yaml` with `tokenSigning.keyDirectory`. | +| Signing keys | `authority-keys` volume -> `/app/keys` | Path is derived from `tokenSigning.keyDirectory` (defaults to `../keys` relative to the manifest). | + +> **TIP:** Confirm the deployed key directory via `tokenSigning.keyDirectory` in `etc/authority.plugins/standard.yaml`; some installations relocate keys to `/var/lib/stellaops/authority/keys`. + +## Hot Backup (no downtime) +1. **Create output directory:** `mkdir -p backup/$(date +%Y-%m-%d)` on the host. +2. **Dump Mongo:** + ```bash + docker compose -f ops/authority/docker-compose.authority.yaml exec mongo \ + mongodump --archive=/dump/authority-$(date +%Y%m%dT%H%M%SZ).gz \ + --gzip --db stellaops-authority + docker compose -f ops/authority/docker-compose.authority.yaml cp \ + mongo:/dump/authority-$(date +%Y%m%dT%H%M%SZ).gz backup/ + ``` + The `mongodump` archive preserves indexes and can be restored with `mongorestore --archive --gzip`. +3. **Capture configuration + manifests:** + ```bash + cp etc/authority.yaml backup/ + rsync -a etc/authority.plugins/ backup/authority.plugins/ + ``` +4. **Export signing keys:** the compose file maps `authority-keys` to a local Docker volume. Snapshot it without stopping the service: + ```bash + docker run --rm \ + -v authority-keys:/keys \ + -v "$(pwd)/backup:/backup" \ + busybox tar czf /backup/authority-keys-$(date +%Y%m%dT%H%M%SZ).tar.gz -C /keys . + ``` +5. **Checksum:** generate SHA-256 digests for every file and store them alongside the artefacts. +6. **Encrypt & upload:** wrap the backup folder using your secrets management standard (e.g., age, GPG) and upload to the designated offline vault. + +## Cold Backup (planned downtime) +1. Notify stakeholders and drain traffic (CLI clients should refresh tokens afterwards). +2. Stop services: + ```bash + docker compose -f ops/authority/docker-compose.authority.yaml down + ``` +3. Back up volumes directly using `tar`: + ```bash + docker run --rm -v mongo-data:/data -v "$(pwd)/backup:/backup" \ + busybox tar czf /backup/mongo-data-$(date +%Y%m%d).tar.gz -C /data . + docker run --rm -v authority-keys:/keys -v "$(pwd)/backup:/backup" \ + busybox tar czf /backup/authority-keys-$(date +%Y%m%d).tar.gz -C /keys . + ``` +4. Copy configuration + manifests as in the hot backup (steps 3–6). +5. Restart services and verify health: + ```bash + docker compose -f ops/authority/docker-compose.authority.yaml up -d + curl -fsS http://localhost:8080/ready + ``` + +## Restore Procedure +1. **Provision clean volumes:** remove existing volumes if you’re rebuilding a node (`docker volume rm mongo-data authority-keys`), then recreate the compose stack so empty volumes exist. +2. **Restore Mongo:** + ```bash + docker compose exec -T mongo mongorestore --archive --gzip --drop < backup/authority-YYYYMMDDTHHMMSSZ.gz + ``` + Use `--drop` to replace collections; omit if doing a partial restore. +3. **Restore configuration/manifests:** copy `authority.yaml` and `authority.plugins/*` into place before starting the Authority container. +4. **Restore signing keys:** untar into the mounted volume: + ```bash + docker run --rm -v authority-keys:/keys -v "$(pwd)/backup:/backup" \ + busybox tar xzf /backup/authority-keys-YYYYMMDD.tar.gz -C /keys + ``` + Ensure file permissions remain `600` for private keys (`chmod -R 600`). +5. **Start services & validate:** + ```bash + docker compose up -d + curl -fsS http://localhost:8080/health + ``` +6. **Validate JWKS and tokens:** call `/jwks` and issue a short-lived token via the CLI to confirm key material matches expectations. If the restored environment requires a fresh signing key, follow the rotation SOP in [`docs/11_AUTHORITY.md`](../11_AUTHORITY.md) using `ops/authority/key-rotation.sh` to invoke `/internal/signing/rotate`. + +## Disaster Recovery Notes +- **Air-gapped replication:** replicate archives via the Offline Update Kit transport channels; never attach USB devices without scanning. +- **Retention:** maintain 30 daily snapshots + 12 monthly archival copies. Rotate encryption keys annually. +- **Key compromise:** if signing keys are suspected compromised, restore from the latest clean backup, rotate via OPS3 (see `ops/authority/key-rotation.sh` and `docs/11_AUTHORITY.md`), and publish a revocation notice. +- **Mongo version:** keep dump/restore images pinned to the deployment version (compose uses `mongo:7`). Driver 3.5.0 requires MongoDB **4.2+**—clusters still on 4.0 must be upgraded before restore, and future driver releases will drop 4.0 entirely. citeturn1open1 + +## Verification Checklist +- [ ] `/ready` reports all identity providers ready. +- [ ] OAuth flows issue tokens signed by the restored keys. +- [ ] `PluginRegistrationSummary` logs expected providers on startup. +- [ ] Revocation manifest export (`dotnet run --project src/Authority/StellaOps.Authority`) succeeds. +- [ ] Monitoring dashboards show metrics resuming (see OPS5 deliverables). + diff --git a/docs/ops/authority-key-rotation.md b/docs/ops/authority-key-rotation.md index 4e633a65..1df76543 100644 --- a/docs/ops/authority-key-rotation.md +++ b/docs/ops/authority-key-rotation.md @@ -1,94 +1,94 @@ -# Authority Signing Key Rotation Playbook - -> **Status:** Authored 2025-10-12 as part of OPS3.KEY-ROTATION rollout. -> Use together with `docs/11_AUTHORITY.md` (Authority service guide) and the automation shipped under `ops/authority/`. - -## 1. Overview - -Authority publishes JWKS and revocation bundles signed with ES256 keys. To rotate those keys without downtime we now provide: - -- **Automation script:** `ops/authority/key-rotation.sh` - Shell helper that POSTS to `/internal/signing/rotate`, supports metadata, dry-run, and confirms JWKS afterwards. -- **CI workflow:** `.gitea/workflows/authority-key-rotation.yml` - Manual dispatch workflow that pulls environment-specific secrets, runs the script, and records the result. Works across staging/production by passing the `environment` input. - -This playbook documents the repeatable sequence for all environments. - -## 2. Pre-requisites - -1. **Generate a new PEM key (per environment)** - ```bash - openssl ecparam -name prime256v1 -genkey -noout \ - -out certificates/authority-signing--.pem - chmod 600 certificates/authority-signing--.pem - ``` -2. **Stash the previous key** under the same volume so it can be referenced in `signing.additionalKeys` after rotation. -3. **Ensure secrets/vars exist in Gitea** - - `_AUTHORITY_BOOTSTRAP_KEY` - - `_AUTHORITY_URL` - - Optional shared defaults `AUTHORITY_BOOTSTRAP_KEY`, `AUTHORITY_URL`. - -## 3. Executing the rotation - -### Option A – via CI workflow (recommended) - -1. Navigate to **Actions → Authority Key Rotation**. -2. Provide inputs: - - `environment`: `staging`, `production`, etc. - - `key_id`: new `kid` (e.g. `authority-signing-2025-dev`). - - `key_path`: path as seen by the Authority service (e.g. `../certificates/authority-signing-2025-dev.pem`). - - Optional `metadata`: comma-separated `key=value` pairs (for audit trails). -3. Trigger. The workflow: - - Reads the bootstrap key/URL from secrets. - - Runs `ops/authority/key-rotation.sh`. - - Prints the JWKS response for verification. - -### Option B – manual shell invocation - -```bash -AUTHORITY_BOOTSTRAP_KEY=$(cat /secure/authority-bootstrap.key) \ -./ops/authority/key-rotation.sh \ - --authority-url https://authority.example.com \ - --key-id authority-signing-2025-dev \ - --key-path ../certificates/authority-signing-2025-dev.pem \ - --meta rotatedBy=ops --meta changeTicket=OPS-1234 -``` - -Use `--dry-run` to inspect the payload before execution. - -## 4. Post-rotation checklist - -1. Update `authority.yaml` (or environment-specific overrides): - - Set `signing.activeKeyId` to the new key. - - Set `signing.keyPath` to the new PEM. - - Append the previous key into `signing.additionalKeys`. - - Ensure `keySource`/`provider` match the values passed to the script. -2. Run `stellaops-cli auth revoke export` so revocation bundles are re-signed with the new key. -3. Confirm `/jwks` lists the new `kid` with `status: "active"` and the previous one as `retired`. -4. Archive the old key securely; keep it available until all tokens/bundles signed with it have expired. - -## 5. Development key state - -For the sample configuration (`etc/authority.yaml.sample`) we minted a placeholder dev key: - -- Active: `authority-signing-2025-dev` (`certificates/authority-signing-2025-dev.pem`) -- Retired: `authority-signing-dev` - -Treat these as examples; real environments must maintain their own PEM material. - -## 6. References - -- `docs/11_AUTHORITY.md` – Architecture and rotation SOP (Section 5). -- `docs/ops/authority-backup-restore.md` – Recovery flow referencing this playbook. -- `ops/authority/README.md` – CLI usage and examples. -- `scripts/rotate-policy-cli-secret.sh` – Helper to mint new `policy-cli` shared secrets when policy scope bundles change. - -## 7. Appendix — Policy CLI secret rotation - -Scope migrations such as AUTH-POLICY-23-004 require issuing fresh credentials for the `policy-cli` client. Use the helper script committed with the repo to keep secrets deterministic across environments. - -```bash -./scripts/rotate-policy-cli-secret.sh --output etc/secrets/policy-cli.secret -``` - -The script writes a timestamped header and a random secret into the target file. Use `--dry-run` when generating material for external secret stores. After updating secrets in staging/production, recycle the Authority pods and confirm the new client credentials work before the next release freeze. +# Authority Signing Key Rotation Playbook + +> **Status:** Authored 2025-10-12 as part of OPS3.KEY-ROTATION rollout. +> Use together with `docs/11_AUTHORITY.md` (Authority service guide) and the automation shipped under `ops/authority/`. + +## 1. Overview + +Authority publishes JWKS and revocation bundles signed with ES256 keys. To rotate those keys without downtime we now provide: + +- **Automation script:** `ops/authority/key-rotation.sh` + Shell helper that POSTS to `/internal/signing/rotate`, supports metadata, dry-run, and confirms JWKS afterwards. +- **CI workflow:** `.gitea/workflows/authority-key-rotation.yml` + Manual dispatch workflow that pulls environment-specific secrets, runs the script, and records the result. Works across staging/production by passing the `environment` input. + +This playbook documents the repeatable sequence for all environments. + +## 2. Pre-requisites + +1. **Generate a new PEM key (per environment)** + ```bash + openssl ecparam -name prime256v1 -genkey -noout \ + -out certificates/authority-signing--.pem + chmod 600 certificates/authority-signing--.pem + ``` +2. **Stash the previous key** under the same volume so it can be referenced in `signing.additionalKeys` after rotation. +3. **Ensure secrets/vars exist in Gitea** + - `_AUTHORITY_BOOTSTRAP_KEY` + - `_AUTHORITY_URL` + - Optional shared defaults `AUTHORITY_BOOTSTRAP_KEY`, `AUTHORITY_URL`. + +## 3. Executing the rotation + +### Option A – via CI workflow (recommended) + +1. Navigate to **Actions → Authority Key Rotation**. +2. Provide inputs: + - `environment`: `staging`, `production`, etc. + - `key_id`: new `kid` (e.g. `authority-signing-2025-dev`). + - `key_path`: path as seen by the Authority service (e.g. `../certificates/authority-signing-2025-dev.pem`). + - Optional `metadata`: comma-separated `key=value` pairs (for audit trails). +3. Trigger. The workflow: + - Reads the bootstrap key/URL from secrets. + - Runs `ops/authority/key-rotation.sh`. + - Prints the JWKS response for verification. + +### Option B – manual shell invocation + +```bash +AUTHORITY_BOOTSTRAP_KEY=$(cat /secure/authority-bootstrap.key) \ +./ops/authority/key-rotation.sh \ + --authority-url https://authority.example.com \ + --key-id authority-signing-2025-dev \ + --key-path ../certificates/authority-signing-2025-dev.pem \ + --meta rotatedBy=ops --meta changeTicket=OPS-1234 +``` + +Use `--dry-run` to inspect the payload before execution. + +## 4. Post-rotation checklist + +1. Update `authority.yaml` (or environment-specific overrides): + - Set `signing.activeKeyId` to the new key. + - Set `signing.keyPath` to the new PEM. + - Append the previous key into `signing.additionalKeys`. + - Ensure `keySource`/`provider` match the values passed to the script. +2. Run `stellaops-cli auth revoke export` so revocation bundles are re-signed with the new key. +3. Confirm `/jwks` lists the new `kid` with `status: "active"` and the previous one as `retired`. +4. Archive the old key securely; keep it available until all tokens/bundles signed with it have expired. + +## 5. Development key state + +For the sample configuration (`etc/authority.yaml.sample`) we minted a placeholder dev key: + +- Active: `authority-signing-2025-dev` (`certificates/authority-signing-2025-dev.pem`) +- Retired: `authority-signing-dev` + +Treat these as examples; real environments must maintain their own PEM material. + +## 6. References + +- `docs/11_AUTHORITY.md` – Architecture and rotation SOP (Section 5). +- `docs/ops/authority-backup-restore.md` – Recovery flow referencing this playbook. +- `ops/authority/README.md` – CLI usage and examples. +- `scripts/rotate-policy-cli-secret.sh` – Helper to mint new `policy-cli` shared secrets when policy scope bundles change. + +## 7. Appendix — Policy CLI secret rotation + +Scope migrations such as AUTH-POLICY-23-004 require issuing fresh credentials for the `policy-cli` client. Use the helper script committed with the repo to keep secrets deterministic across environments. + +```bash +./scripts/rotate-policy-cli-secret.sh --output etc/secrets/policy-cli.secret +``` + +The script writes a timestamped header and a random secret into the target file. Use `--dry-run` when generating material for external secret stores. After updating secrets in staging/production, recycle the Authority pods and confirm the new client credentials work before the next release freeze. diff --git a/docs/ops/authority-monitoring.md b/docs/ops/authority-monitoring.md index a4a445b7..60717602 100644 --- a/docs/ops/authority-monitoring.md +++ b/docs/ops/authority-monitoring.md @@ -1,83 +1,83 @@ -# Authority Monitoring & Alerting Playbook - -## Telemetry Sources -- **Traces:** Activity source `StellaOps.Authority` emits spans for every token flow (`authority.token.validate_*`, `authority.token.handle_*`, `authority.token.validate_access`). Key tags include `authority.endpoint`, `authority.grant_type`, `authority.username`, `authority.client_id`, and `authority.identity_provider`. -- **Metrics:** OpenTelemetry instrumentation (`AddAspNetCoreInstrumentation`, `AddHttpClientInstrumentation`, custom meter `StellaOps.Authority`) exports: - - `http.server.request.duration` histogram (`http_route`, `http_status_code`, `authority.endpoint` tag via `aspnetcore` enrichment). - - `process.runtime.gc.*`, `process.runtime.dotnet.*` (from `AddRuntimeInstrumentation`). -- **Logs:** Serilog writes structured events to stdout. Notable templates: - - `"Password grant verification failed ..."` and `"Plugin {PluginName} denied access ... due to lockout"` (lockout spike detector). - - `"Password grant validation failed for {Username}: provider '{Provider}' does not support MFA required for exception approvals."` (identifies users attempting `exceptions:approve` without MFA support; tie to fresh-auth errors). - - `"Client credentials validation failed for {ClientId}: exception scopes require tenant assignment."` (signals misconfigured exception service identities). - - `"Granting StellaOps bypass for remote {RemoteIp}"` (bypass usage). - - `"Rate limit exceeded for path {Path} from {RemoteIp}"` (limiter alerts). - -## Prometheus Metrics to Collect -| Metric | Query | Purpose | -| --- | --- | --- | -| `token_requests_total` | `sum by (grant_type, status) (rate(http_server_duration_seconds_count{service_name="stellaops-authority", http_route="/token"}[5m]))` | Token issuance volume per grant type (`grant_type` comes via `authority.grant_type` span attribute → Exemplars in Grafana). | -| `token_failure_ratio` | `sum(rate(http_server_duration_seconds_count{service_name="stellaops-authority", http_route="/token", http_status_code=~"4..|5.."}[5m])) / sum(rate(http_server_duration_seconds_count{service_name="stellaops-authority", http_route="/token"}[5m]))` | Alert when > 5 % for 10 min. | -| `authorize_rate_limit_hits` | `sum(rate(aspnetcore_rate_limiting_rejections_total{service_name="stellaops-authority", limiter="authority-token"}[5m]))` | Detect rate limiting saturations (requires OTEL ASP.NET rate limiter exporter). | -| `lockout_events` | `sum by (plugin) (rate(log_messages_total{app="stellaops-authority", level="Warning", message_template="Plugin {PluginName} denied access for {Username} due to lockout (retry after {RetryAfter})."}[5m]))` | Derived from Loki/Promtail log counter. | -| `bypass_usage_total` | `sum(rate(log_messages_total{app="stellaops-authority", level="Information", message_template="Granting StellaOps bypass for remote {RemoteIp}; required scopes {RequiredScopes}."}[5m]))` | Track trusted bypass invocations. | - -> **Exporter note:** Enable `aspnetcore` meters (`dotnet-counters` name `Microsoft.AspNetCore.Hosting`), or configure the OpenTelemetry Collector `metrics` pipeline with `metric_statements` to remap histogram counts into the shown series. - -## Alert Rules -1. **Token Failure Surge** - - _Expression_: `token_failure_ratio > 0.05` - - _For_: `10m` - - _Labels_: `severity="critical"` - - _Annotations_: Include `topk(5, sum by (authority_identity_provider) (increase(authority_token_rejections_total[10m])))` as diagnostic hint (requires span → metric transformation). -2. **Lockout Spike** - - _Expression_: `sum(rate(log_messages_total{message_template="Plugin {PluginName} denied access for {Username} due to lockout (retry after {RetryAfter})."}[15m])) > 10` - - _For_: `15m` - - Investigate credential stuffing; consider temporarily tightening `RateLimiting.Token`. -3. **Bypass Threshold** - - _Expression_: `sum(rate(log_messages_total{message_template="Granting StellaOps bypass for remote {RemoteIp}; required scopes {RequiredScopes}."}[5m])) > 1` - - _For_: `5m` - - Alert severity `warning` — verify the calling host list. -4. **Rate Limiter Saturation** - - _Expression_: `sum(rate(aspnetcore_rate_limiting_rejections_total{service_name="stellaops-authority"}[5m])) > 0` - - Escalate if sustained for 5 min; confirm trusted clients aren’t misconfigured. - -## Grafana Dashboard -- Import `docs/ops/authority-grafana-dashboard.json` to provision baseline panels: - - **Token Success vs Failure** – stacked rate visualization split by grant type. - - **Rate Limiter Hits** – bar chart showing `authority-token` and `authority-authorize`. - - **Bypass & Lockout Events** – dual-stat panel using Loki-derived counters. - - **Trace Explorer Link** – panel links to `StellaOps.Authority` span search pre-filtered by `authority.grant_type`. - -## Collector Configuration Snippets -```yaml -receivers: - otlp: - protocols: - http: -exporters: - prometheus: - endpoint: "0.0.0.0:9464" -processors: - batch: - attributes/token_grant: - actions: - - key: grant_type - action: upsert - from_attribute: authority.grant_type -service: - pipelines: - metrics: - receivers: [otlp] - processors: [attributes/token_grant, batch] - exporters: [prometheus] - logs: - receivers: [otlp] - processors: [batch] - exporters: [loki] -``` - -## Operational Checklist -- [ ] Confirm `STELLAOPS_AUTHORITY__OBSERVABILITY__EXPORTERS` enables OTLP in production builds. -- [ ] Ensure Promtail captures container stdout with Serilog structured formatting. -- [ ] Periodically validate alert noise by running load tests that trigger the rate limiter. -- [ ] Include dashboard JSON in Offline Kit for air-gapped clusters; update version header when metrics change. +# Authority Monitoring & Alerting Playbook + +## Telemetry Sources +- **Traces:** Activity source `StellaOps.Authority` emits spans for every token flow (`authority.token.validate_*`, `authority.token.handle_*`, `authority.token.validate_access`). Key tags include `authority.endpoint`, `authority.grant_type`, `authority.username`, `authority.client_id`, and `authority.identity_provider`. +- **Metrics:** OpenTelemetry instrumentation (`AddAspNetCoreInstrumentation`, `AddHttpClientInstrumentation`, custom meter `StellaOps.Authority`) exports: + - `http.server.request.duration` histogram (`http_route`, `http_status_code`, `authority.endpoint` tag via `aspnetcore` enrichment). + - `process.runtime.gc.*`, `process.runtime.dotnet.*` (from `AddRuntimeInstrumentation`). +- **Logs:** Serilog writes structured events to stdout. Notable templates: + - `"Password grant verification failed ..."` and `"Plugin {PluginName} denied access ... due to lockout"` (lockout spike detector). + - `"Password grant validation failed for {Username}: provider '{Provider}' does not support MFA required for exception approvals."` (identifies users attempting `exceptions:approve` without MFA support; tie to fresh-auth errors). + - `"Client credentials validation failed for {ClientId}: exception scopes require tenant assignment."` (signals misconfigured exception service identities). + - `"Granting StellaOps bypass for remote {RemoteIp}"` (bypass usage). + - `"Rate limit exceeded for path {Path} from {RemoteIp}"` (limiter alerts). + +## Prometheus Metrics to Collect +| Metric | Query | Purpose | +| --- | --- | --- | +| `token_requests_total` | `sum by (grant_type, status) (rate(http_server_duration_seconds_count{service_name="stellaops-authority", http_route="/token"}[5m]))` | Token issuance volume per grant type (`grant_type` comes via `authority.grant_type` span attribute → Exemplars in Grafana). | +| `token_failure_ratio` | `sum(rate(http_server_duration_seconds_count{service_name="stellaops-authority", http_route="/token", http_status_code=~"4..|5.."}[5m])) / sum(rate(http_server_duration_seconds_count{service_name="stellaops-authority", http_route="/token"}[5m]))` | Alert when > 5 % for 10 min. | +| `authorize_rate_limit_hits` | `sum(rate(aspnetcore_rate_limiting_rejections_total{service_name="stellaops-authority", limiter="authority-token"}[5m]))` | Detect rate limiting saturations (requires OTEL ASP.NET rate limiter exporter). | +| `lockout_events` | `sum by (plugin) (rate(log_messages_total{app="stellaops-authority", level="Warning", message_template="Plugin {PluginName} denied access for {Username} due to lockout (retry after {RetryAfter})."}[5m]))` | Derived from Loki/Promtail log counter. | +| `bypass_usage_total` | `sum(rate(log_messages_total{app="stellaops-authority", level="Information", message_template="Granting StellaOps bypass for remote {RemoteIp}; required scopes {RequiredScopes}."}[5m]))` | Track trusted bypass invocations. | + +> **Exporter note:** Enable `aspnetcore` meters (`dotnet-counters` name `Microsoft.AspNetCore.Hosting`), or configure the OpenTelemetry Collector `metrics` pipeline with `metric_statements` to remap histogram counts into the shown series. + +## Alert Rules +1. **Token Failure Surge** + - _Expression_: `token_failure_ratio > 0.05` + - _For_: `10m` + - _Labels_: `severity="critical"` + - _Annotations_: Include `topk(5, sum by (authority_identity_provider) (increase(authority_token_rejections_total[10m])))` as diagnostic hint (requires span → metric transformation). +2. **Lockout Spike** + - _Expression_: `sum(rate(log_messages_total{message_template="Plugin {PluginName} denied access for {Username} due to lockout (retry after {RetryAfter})."}[15m])) > 10` + - _For_: `15m` + - Investigate credential stuffing; consider temporarily tightening `RateLimiting.Token`. +3. **Bypass Threshold** + - _Expression_: `sum(rate(log_messages_total{message_template="Granting StellaOps bypass for remote {RemoteIp}; required scopes {RequiredScopes}."}[5m])) > 1` + - _For_: `5m` + - Alert severity `warning` — verify the calling host list. +4. **Rate Limiter Saturation** + - _Expression_: `sum(rate(aspnetcore_rate_limiting_rejections_total{service_name="stellaops-authority"}[5m])) > 0` + - Escalate if sustained for 5 min; confirm trusted clients aren’t misconfigured. + +## Grafana Dashboard +- Import `docs/ops/authority-grafana-dashboard.json` to provision baseline panels: + - **Token Success vs Failure** – stacked rate visualization split by grant type. + - **Rate Limiter Hits** – bar chart showing `authority-token` and `authority-authorize`. + - **Bypass & Lockout Events** – dual-stat panel using Loki-derived counters. + - **Trace Explorer Link** – panel links to `StellaOps.Authority` span search pre-filtered by `authority.grant_type`. + +## Collector Configuration Snippets +```yaml +receivers: + otlp: + protocols: + http: +exporters: + prometheus: + endpoint: "0.0.0.0:9464" +processors: + batch: + attributes/token_grant: + actions: + - key: grant_type + action: upsert + from_attribute: authority.grant_type +service: + pipelines: + metrics: + receivers: [otlp] + processors: [attributes/token_grant, batch] + exporters: [prometheus] + logs: + receivers: [otlp] + processors: [batch] + exporters: [loki] +``` + +## Operational Checklist +- [ ] Confirm `STELLAOPS_AUTHORITY__OBSERVABILITY__EXPORTERS` enables OTLP in production builds. +- [ ] Ensure Promtail captures container stdout with Serilog structured formatting. +- [ ] Periodically validate alert noise by running load tests that trigger the rate limiter. +- [ ] Include dashboard JSON in Offline Kit for air-gapped clusters; update version header when metrics change. diff --git a/docs/ops/concelier-apple-operations.md b/docs/ops/concelier-apple-operations.md index ecf39fd2..cd5c2deb 100644 --- a/docs/ops/concelier-apple-operations.md +++ b/docs/ops/concelier-apple-operations.md @@ -1,77 +1,77 @@ -# Concelier Apple Security Update Connector Operations - -This runbook covers staging and production rollout for the Apple security updates connector (`source:vndr-apple:*`), including observability checks and fixture maintenance. - -## 1. Prerequisites - -- Network egress (or mirrored cache) for `https://gdmf.apple.com/v2/pmv` and the Apple Support domain (`https://support.apple.com/`). -- Optional: corporate proxy exclusions for the Apple hosts if outbound traffic is normally filtered. -- Updated configuration (environment variables or `concelier.yaml`) with an `apple` section. Example baseline: - -```yaml -concelier: - sources: - apple: - softwareLookupUri: "https://gdmf.apple.com/v2/pmv" - advisoryBaseUri: "https://support.apple.com/" - localeSegment: "en-us" - maxAdvisoriesPerFetch: 25 - initialBackfill: "120.00:00:00" - modifiedTolerance: "02:00:00" - failureBackoff: "00:05:00" -``` - -> ℹ️ `softwareLookupUri` and `advisoryBaseUri` must stay absolute and aligned with the HTTP allow-list; Concelier automatically adds both hosts to the connector HttpClient. - -## 2. Staging Smoke Test - -1. Deploy the configuration and restart the Concelier workers to ensure the Apple connector options are bound. -2. Trigger a full connector cycle: - - CLI: `stella db jobs run source:vndr-apple:fetch --and-then source:vndr-apple:parse --and-then source:vndr-apple:map` - - REST: `POST /jobs/run { "kind": "source:vndr-apple:fetch", "chain": ["source:vndr-apple:parse", "source:vndr-apple:map"] }` -3. Validate metrics exported under meter `StellaOps.Concelier.Connector.Vndr.Apple`: - - `apple.fetch.items` (documents fetched) - - `apple.fetch.failures` - - `apple.fetch.unchanged` - - `apple.parse.failures` - - `apple.map.affected.count` (histogram of affected package counts) -4. Cross-check the shared HTTP counters: - - `concelier.source.http.requests_total{concelier_source="vndr-apple"}` should increase for both index and detail phases. - - `concelier.source.http.failures_total{concelier_source="vndr-apple"}` should remain flat (0) during a healthy run. -5. Inspect the info logs: - - `Apple software index fetch … processed=X newDocuments=Y` - - `Apple advisory parse complete … aliases=… affected=…` - - `Mapped Apple advisory … pendingMappings=0` -6. Confirm MongoDB state: - - `raw_documents` store contains the HT article HTML with metadata (`apple.articleId`, `apple.postingDate`). - - `dtos` store has `schemaVersion="apple.security.update.v1"`. - - `advisories` collection includes keys `HTxxxxxx` with normalized SemVer rules. - - `source_states` entry for `apple` shows a recent `cursor.lastPosted`. - -## 3. Production Monitoring - -- **Dashboards** – Add the following expressions to your Concelier Grafana board (OTLP/Prometheus naming assumed): - - `rate(apple_fetch_items_total[15m])` vs `rate(concelier_source_http_requests_total{concelier_source="vndr-apple"}[15m])` - - `rate(apple_fetch_failures_total[5m])` for error spikes (`severity=warning` at `>0`) - - `histogram_quantile(0.95, rate(apple_map_affected_count_bucket[1h]))` to watch affected-package fan-out - - `increase(apple_parse_failures_total[6h])` to catch parser drift (alerts at `>0`) -- **Alerts** – Page if `rate(apple_fetch_items_total[2h]) == 0` during business hours while other connectors are active. This often indicates lookup feed failures or misconfigured allow-lists. -- **Logs** – Surface warnings `Apple document {DocumentId} missing GridFS payload` or `Apple parse failed`—repeated hits imply storage issues or HTML regressions. -- **Telemetry pipeline** – `StellaOps.Concelier.WebService` now exports `StellaOps.Concelier.Connector.Vndr.Apple` alongside existing Concelier meters; ensure your OTEL collector or Prometheus scraper includes it. - -## 4. Fixture Maintenance - -Regression fixtures live under `src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures`. Refresh them whenever Apple reshapes the HT layout or when new platforms appear. - -1. Run the helper script matching your platform: - - Bash: `./scripts/update-apple-fixtures.sh` - - PowerShell: `./scripts/update-apple-fixtures.ps1` -2. Each script exports `UPDATE_APPLE_FIXTURES=1`, updates the `WSLENV` passthrough, and touches `.update-apple-fixtures` so WSL+VS Code test runs observe the flag. The subsequent test execution fetches the live HT articles listed in `AppleFixtureManager`, sanitises the HTML, and rewrites the `.expected.json` DTO snapshots. -3. Review the diff for localisation or nav noise. Once satisfied, re-run the tests without the env var (`dotnet test src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj`) to verify determinism. -4. Commit fixture updates together with any parser/mapping changes that motivated them. - -## 5. Known Issues & Follow-up Tasks - -- Apple occasionally throttles anonymous requests after bursts. The connector backs off automatically, but persistent `apple.fetch.failures` spikes might require mirroring the HT content or scheduling wider fetch windows. -- Rapid Security Responses may appear before the general patch notes surface in the lookup JSON. When that happens, the fetch run will log `detailFailures>0`. Collect sample HTML and refresh fixtures to confirm parser coverage. -- Multi-locale content is still under regression sweep (`src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md`). Capture non-`en-us` snapshots once the fixture tooling stabilises. +# Concelier Apple Security Update Connector Operations + +This runbook covers staging and production rollout for the Apple security updates connector (`source:vndr-apple:*`), including observability checks and fixture maintenance. + +## 1. Prerequisites + +- Network egress (or mirrored cache) for `https://gdmf.apple.com/v2/pmv` and the Apple Support domain (`https://support.apple.com/`). +- Optional: corporate proxy exclusions for the Apple hosts if outbound traffic is normally filtered. +- Updated configuration (environment variables or `concelier.yaml`) with an `apple` section. Example baseline: + +```yaml +concelier: + sources: + apple: + softwareLookupUri: "https://gdmf.apple.com/v2/pmv" + advisoryBaseUri: "https://support.apple.com/" + localeSegment: "en-us" + maxAdvisoriesPerFetch: 25 + initialBackfill: "120.00:00:00" + modifiedTolerance: "02:00:00" + failureBackoff: "00:05:00" +``` + +> ℹ️ `softwareLookupUri` and `advisoryBaseUri` must stay absolute and aligned with the HTTP allow-list; Concelier automatically adds both hosts to the connector HttpClient. + +## 2. Staging Smoke Test + +1. Deploy the configuration and restart the Concelier workers to ensure the Apple connector options are bound. +2. Trigger a full connector cycle: + - CLI: `stella db jobs run source:vndr-apple:fetch --and-then source:vndr-apple:parse --and-then source:vndr-apple:map` + - REST: `POST /jobs/run { "kind": "source:vndr-apple:fetch", "chain": ["source:vndr-apple:parse", "source:vndr-apple:map"] }` +3. Validate metrics exported under meter `StellaOps.Concelier.Connector.Vndr.Apple`: + - `apple.fetch.items` (documents fetched) + - `apple.fetch.failures` + - `apple.fetch.unchanged` + - `apple.parse.failures` + - `apple.map.affected.count` (histogram of affected package counts) +4. Cross-check the shared HTTP counters: + - `concelier.source.http.requests_total{concelier_source="vndr-apple"}` should increase for both index and detail phases. + - `concelier.source.http.failures_total{concelier_source="vndr-apple"}` should remain flat (0) during a healthy run. +5. Inspect the info logs: + - `Apple software index fetch … processed=X newDocuments=Y` + - `Apple advisory parse complete … aliases=… affected=…` + - `Mapped Apple advisory … pendingMappings=0` +6. Confirm MongoDB state: + - `raw_documents` store contains the HT article HTML with metadata (`apple.articleId`, `apple.postingDate`). + - `dtos` store has `schemaVersion="apple.security.update.v1"`. + - `advisories` collection includes keys `HTxxxxxx` with normalized SemVer rules. + - `source_states` entry for `apple` shows a recent `cursor.lastPosted`. + +## 3. Production Monitoring + +- **Dashboards** – Add the following expressions to your Concelier Grafana board (OTLP/Prometheus naming assumed): + - `rate(apple_fetch_items_total[15m])` vs `rate(concelier_source_http_requests_total{concelier_source="vndr-apple"}[15m])` + - `rate(apple_fetch_failures_total[5m])` for error spikes (`severity=warning` at `>0`) + - `histogram_quantile(0.95, rate(apple_map_affected_count_bucket[1h]))` to watch affected-package fan-out + - `increase(apple_parse_failures_total[6h])` to catch parser drift (alerts at `>0`) +- **Alerts** – Page if `rate(apple_fetch_items_total[2h]) == 0` during business hours while other connectors are active. This often indicates lookup feed failures or misconfigured allow-lists. +- **Logs** – Surface warnings `Apple document {DocumentId} missing GridFS payload` or `Apple parse failed`—repeated hits imply storage issues or HTML regressions. +- **Telemetry pipeline** – `StellaOps.Concelier.WebService` now exports `StellaOps.Concelier.Connector.Vndr.Apple` alongside existing Concelier meters; ensure your OTEL collector or Prometheus scraper includes it. + +## 4. Fixture Maintenance + +Regression fixtures live under `src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures`. Refresh them whenever Apple reshapes the HT layout or when new platforms appear. + +1. Run the helper script matching your platform: + - Bash: `./scripts/update-apple-fixtures.sh` + - PowerShell: `./scripts/update-apple-fixtures.ps1` +2. Each script exports `UPDATE_APPLE_FIXTURES=1`, updates the `WSLENV` passthrough, and touches `.update-apple-fixtures` so WSL+VS Code test runs observe the flag. The subsequent test execution fetches the live HT articles listed in `AppleFixtureManager`, sanitises the HTML, and rewrites the `.expected.json` DTO snapshots. +3. Review the diff for localisation or nav noise. Once satisfied, re-run the tests without the env var (`dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj`) to verify determinism. +4. Commit fixture updates together with any parser/mapping changes that motivated them. + +## 5. Known Issues & Follow-up Tasks + +- Apple occasionally throttles anonymous requests after bursts. The connector backs off automatically, but persistent `apple.fetch.failures` spikes might require mirroring the HT content or scheduling wider fetch windows. +- Rapid Security Responses may appear before the general patch notes surface in the lookup JSON. When that happens, the fetch run will log `detailFailures>0`. Collect sample HTML and refresh fixtures to confirm parser coverage. +- Multi-locale content is still under regression sweep (`src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md`). Capture non-`en-us` snapshots once the fixture tooling stabilises. diff --git a/docs/ops/concelier-authority-audit-runbook.md b/docs/ops/concelier-authority-audit-runbook.md index c09b46b0..eb8a64c1 100644 --- a/docs/ops/concelier-authority-audit-runbook.md +++ b/docs/ops/concelier-authority-audit-runbook.md @@ -1,159 +1,159 @@ -# Concelier Authority Audit Runbook - -_Last updated: 2025-10-22_ - -This runbook helps operators verify and monitor the StellaOps Concelier ⇆ Authority integration. It focuses on the `/jobs*` surface, which now requires StellaOps Authority tokens, and the corresponding audit/metric signals that expose authentication and bypass activity. - -## 1. Prerequisites - -- Authority integration is enabled in `concelier.yaml` (or via `CONCELIER_AUTHORITY__*` environment variables) with a valid `clientId`, secret, audience, and required scopes. -- OTLP metrics/log exporters are configured (`concelier.telemetry.*`) or container stdout is shipped to your SIEM. -- Operators have access to the Concelier job trigger endpoints via CLI or REST for smoke tests. -- The rollout table in `docs/10_CONCELIER_CLI_QUICKSTART.md` has been reviewed so stakeholders align on the staged → enforced toggle timeline. - -### Configuration snippet - -```yaml -concelier: - authority: - enabled: true - allowAnonymousFallback: false # keep true only during initial rollout - issuer: "https://authority.internal" - audiences: - - "api://concelier" - requiredScopes: - - "concelier.jobs.trigger" - - "advisory:read" - - "advisory:ingest" - requiredTenants: - - "tenant-default" - bypassNetworks: - - "127.0.0.1/32" - - "::1/128" - clientId: "concelier-jobs" - clientSecretFile: "/run/secrets/concelier_authority_client" - tokenClockSkewSeconds: 60 - resilience: - enableRetries: true - retryDelays: - - "00:00:01" - - "00:00:02" - - "00:00:05" - allowOfflineCacheFallback: true - offlineCacheTolerance: "00:10:00" -``` - -> Store secrets outside source control. Concelier reads `clientSecretFile` on startup; rotate by updating the mounted file and restarting the service. - -### Resilience tuning - -- **Connected sites:** keep the default 1 s / 2 s / 5 s retry ladder so Concelier retries transient Authority hiccups but still surfaces outages quickly. Leave `allowOfflineCacheFallback=true` so cached discovery/JWKS data can bridge short Pathfinder restarts. -- **Air-gapped/Offline Kit installs:** extend `offlineCacheTolerance` (15–30 minutes) to keep the cached metadata valid between manual synchronisations. You can also disable retries (`enableRetries=false`) if infrastructure teams prefer to handle exponential backoff at the network layer; Concelier will fail fast but keep deterministic logs. -- Concelier resolves these knobs through `IOptionsMonitor`. Edits to `concelier.yaml` are applied on configuration reload; restart the container if you change environment variables or do not have file-watch reloads enabled. - -## 2. Key Signals - -### 2.1 Audit log channel - -Concelier emits structured audit entries via the `Concelier.Authorization.Audit` logger for every `/jobs*` request once Authority enforcement is active. - -``` -Concelier authorization audit route=/jobs/definitions status=200 subject=ops@example.com clientId=concelier-cli scopes=concelier.jobs.trigger advisory:ingest bypass=False remote=10.1.4.7 -``` - -| Field | Sample value | Meaning | -|--------------|-------------------------|------------------------------------------------------------------------------------------| -| `route` | `/jobs/definitions` | Endpoint that processed the request. | -| `status` | `200` / `401` / `409` | Final HTTP status code returned to the caller. | -| `subject` | `ops@example.com` | User or service principal subject (falls back to `(anonymous)` when unauthenticated). | -| `clientId` | `concelier-cli` | OAuth client ID provided by Authority (`(none)` if the token lacked the claim). | -| `scopes` | `concelier.jobs.trigger advisory:ingest advisory:read` | Normalised scope list extracted from token claims; `(none)` if the token carried none. | -| `tenant` | `tenant-default` | Tenant claim extracted from the Authority token (`(none)` when the token lacked it). | -| `bypass` | `True` / `False` | Indicates whether the request succeeded because its source IP matched a bypass CIDR. | -| `remote` | `10.1.4.7` | Remote IP recorded from the connection / forwarded header test hooks. | - -Use your logging backend (e.g., Loki) to index the logger name and filter for suspicious combinations: - -- `status=401 AND bypass=True` – bypass network accepted an unauthenticated call (should be temporary during rollout). -- `status=202 AND scopes="(none)"` – a token without scopes triggered a job; tighten client configuration. -- `status=202 AND NOT contains(scopes,"advisory:ingest")` – ingestion attempted without the new AOC scopes; confirm the Authority client registration matches the sample above. -- `tenant!=(tenant-default)` – indicates a cross-tenant token was accepted. Ensure Concelier `requiredTenants` is aligned with Authority client registration. -- Spike in `clientId="(none)"` – indicates upstream Authority is not issuing `client_id` claims or the CLI is outdated. - -### 2.2 Metrics - -Concelier publishes counters under the OTEL meter `StellaOps.Concelier.WebService.Jobs`. Tags: `job.kind`, `job.trigger`, `job.outcome`. - -| Metric name | Description | PromQL example | -|-------------------------------|----------------------------------------------------|----------------| -| `web.jobs.triggered` | Accepted job trigger requests. | `sum by (job_kind) (rate(web_jobs_triggered_total[5m]))` | -| `web.jobs.trigger.conflict` | Rejected triggers (already running, disabled…). | `sum(rate(web_jobs_trigger_conflict_total[5m]))` | -| `web.jobs.trigger.failed` | Server-side job failures. | `sum(rate(web_jobs_trigger_failed_total[5m]))` | - -> Prometheus/OTEL collectors typically surface counters with `_total` suffix. Adjust queries to match your pipeline’s generated metric names. - -Correlate audit logs with the following global meter exported via `Concelier.SourceDiagnostics`: - -- `concelier.source.http.requests_total{concelier_source="jobs-run"}` – ensures REST/manual triggers route through Authority. -- If Grafana dashboards are deployed, extend the “Concelier Jobs” board with the above counters plus a table of recent audit log entries. - -## 3. Alerting Guidance - -1. **Unauthorized bypass attempt** - - Query: `sum(rate(log_messages_total{logger="Concelier.Authorization.Audit", status="401", bypass="True"}[5m])) > 0` - - Action: verify `bypassNetworks` list; confirm expected maintenance windows; rotate credentials if suspicious. - -2. **Missing scopes** - - Query: `sum(rate(log_messages_total{logger="Concelier.Authorization.Audit", scopes="(none)", status="200"}[5m])) > 0` - - Action: audit Authority client registration; ensure `requiredScopes` includes `concelier.jobs.trigger`, `advisory:ingest`, and `advisory:read`. - -3. **Trigger failure surge** - - Query: `sum(rate(web_jobs_trigger_failed_total[10m])) > 0` with severity `warning` if sustained for 10 minutes. - - Action: inspect correlated audit entries and `Concelier.Telemetry` traces for job execution errors. - -4. **Conflict spike** - - Query: `sum(rate(web_jobs_trigger_conflict_total[10m])) > 5` (tune threshold). - - Action: downstream scheduling may be firing repetitive triggers; ensure precedence is configured properly. - -5. **Authority offline** - - Watch `Concelier.Authorization.Audit` logs for `status=503` or `status=500` along with `clientId="(none)"`. Investigate Authority availability before re-enabling anonymous fallback. - -## 4. Rollout & Verification Procedure - -1. **Pre-checks** - - Align with the rollout phases documented in `docs/10_CONCELIER_CLI_QUICKSTART.md` (validation → rehearsal → enforced) and record the target dates in your change request. - - Confirm `allowAnonymousFallback` is `false` in production; keep `true` only during staged validation. - - Validate Authority issuer metadata is reachable from Concelier (`curl https://authority.internal/.well-known/openid-configuration` from the host). - -2. **Smoke test with valid token** - - Obtain a token via CLI: `stella auth login --scope "concelier.jobs.trigger advisory:ingest" --scope advisory:read`. - - Trigger a read-only endpoint: `curl -H "Authorization: Bearer $TOKEN" https://concelier.internal/jobs/definitions`. - - Expect HTTP 200/202 and an audit log with `bypass=False`, `scopes=concelier.jobs.trigger advisory:ingest advisory:read`, and `tenant=tenant-default`. - -3. **Negative test without token** - - Call the same endpoint without a token. Expect HTTP 401, `bypass=False`. - - If the request succeeds, double-check `bypassNetworks` and ensure fallback is disabled. - -4. **Bypass check (if applicable)** - - From an allowed maintenance IP, call `/jobs/definitions` without a token. Confirm the audit log shows `bypass=True`. Review business justification and expiry date for such entries. - -5. **Metrics validation** - - Ensure `web.jobs.triggered` counter increments during accepted runs. - - Exporters should show corresponding spans (`concelier.job.trigger`) if tracing is enabled. - -## 5. Troubleshooting - -| Symptom | Probable cause | Remediation | -|---------|----------------|-------------| -| Audit log shows `clientId=(none)` for all requests | Authority not issuing `client_id` claim or CLI outdated | Update StellaOps Authority configuration (`StellaOpsAuthorityOptions.Token.Claims.ClientId`), or upgrade the CLI token acquisition flow. | -| Requests succeed with `bypass=True` unexpectedly | Local network added to `bypassNetworks` or fallback still enabled | Remove/adjust the CIDR list, disable anonymous fallback, restart Concelier. | -| HTTP 401 with valid token | `requiredScopes` missing from client registration or token audience mismatch | Verify Authority client scopes (`concelier.jobs.trigger`) and ensure the token audience matches `audiences` config. | -| Metrics missing from Prometheus | Telemetry exporters disabled or filter missing OTEL meter | Set `concelier.telemetry.enableMetrics=true`, ensure collector includes `StellaOps.Concelier.WebService.Jobs` meter. | -| Sudden spike in `web.jobs.trigger.failed` | Downstream job failure or Authority timeout mid-request | Inspect Concelier job logs, re-run with tracing enabled, validate Authority latency. | - -## 6. References - -- `docs/21_INSTALL_GUIDE.md` – Authority configuration quick start. -- `docs/17_SECURITY_HARDENING_GUIDE.md` – Security guardrails and enforcement deadlines. -- `docs/ops/authority-monitoring.md` – Authority-side monitoring and alerting playbook. -- `StellaOps.Concelier.WebService/Filters/JobAuthorizationAuditFilter.cs` – source of audit log fields. +# Concelier Authority Audit Runbook + +_Last updated: 2025-10-22_ + +This runbook helps operators verify and monitor the StellaOps Concelier ⇆ Authority integration. It focuses on the `/jobs*` surface, which now requires StellaOps Authority tokens, and the corresponding audit/metric signals that expose authentication and bypass activity. + +## 1. Prerequisites + +- Authority integration is enabled in `concelier.yaml` (or via `CONCELIER_AUTHORITY__*` environment variables) with a valid `clientId`, secret, audience, and required scopes. +- OTLP metrics/log exporters are configured (`concelier.telemetry.*`) or container stdout is shipped to your SIEM. +- Operators have access to the Concelier job trigger endpoints via CLI or REST for smoke tests. +- The rollout table in `docs/10_CONCELIER_CLI_QUICKSTART.md` has been reviewed so stakeholders align on the staged → enforced toggle timeline. + +### Configuration snippet + +```yaml +concelier: + authority: + enabled: true + allowAnonymousFallback: false # keep true only during initial rollout + issuer: "https://authority.internal" + audiences: + - "api://concelier" + requiredScopes: + - "concelier.jobs.trigger" + - "advisory:read" + - "advisory:ingest" + requiredTenants: + - "tenant-default" + bypassNetworks: + - "127.0.0.1/32" + - "::1/128" + clientId: "concelier-jobs" + clientSecretFile: "/run/secrets/concelier_authority_client" + tokenClockSkewSeconds: 60 + resilience: + enableRetries: true + retryDelays: + - "00:00:01" + - "00:00:02" + - "00:00:05" + allowOfflineCacheFallback: true + offlineCacheTolerance: "00:10:00" +``` + +> Store secrets outside source control. Concelier reads `clientSecretFile` on startup; rotate by updating the mounted file and restarting the service. + +### Resilience tuning + +- **Connected sites:** keep the default 1 s / 2 s / 5 s retry ladder so Concelier retries transient Authority hiccups but still surfaces outages quickly. Leave `allowOfflineCacheFallback=true` so cached discovery/JWKS data can bridge short Pathfinder restarts. +- **Air-gapped/Offline Kit installs:** extend `offlineCacheTolerance` (15–30 minutes) to keep the cached metadata valid between manual synchronisations. You can also disable retries (`enableRetries=false`) if infrastructure teams prefer to handle exponential backoff at the network layer; Concelier will fail fast but keep deterministic logs. +- Concelier resolves these knobs through `IOptionsMonitor`. Edits to `concelier.yaml` are applied on configuration reload; restart the container if you change environment variables or do not have file-watch reloads enabled. + +## 2. Key Signals + +### 2.1 Audit log channel + +Concelier emits structured audit entries via the `Concelier.Authorization.Audit` logger for every `/jobs*` request once Authority enforcement is active. + +``` +Concelier authorization audit route=/jobs/definitions status=200 subject=ops@example.com clientId=concelier-cli scopes=concelier.jobs.trigger advisory:ingest bypass=False remote=10.1.4.7 +``` + +| Field | Sample value | Meaning | +|--------------|-------------------------|------------------------------------------------------------------------------------------| +| `route` | `/jobs/definitions` | Endpoint that processed the request. | +| `status` | `200` / `401` / `409` | Final HTTP status code returned to the caller. | +| `subject` | `ops@example.com` | User or service principal subject (falls back to `(anonymous)` when unauthenticated). | +| `clientId` | `concelier-cli` | OAuth client ID provided by Authority (`(none)` if the token lacked the claim). | +| `scopes` | `concelier.jobs.trigger advisory:ingest advisory:read` | Normalised scope list extracted from token claims; `(none)` if the token carried none. | +| `tenant` | `tenant-default` | Tenant claim extracted from the Authority token (`(none)` when the token lacked it). | +| `bypass` | `True` / `False` | Indicates whether the request succeeded because its source IP matched a bypass CIDR. | +| `remote` | `10.1.4.7` | Remote IP recorded from the connection / forwarded header test hooks. | + +Use your logging backend (e.g., Loki) to index the logger name and filter for suspicious combinations: + +- `status=401 AND bypass=True` – bypass network accepted an unauthenticated call (should be temporary during rollout). +- `status=202 AND scopes="(none)"` – a token without scopes triggered a job; tighten client configuration. +- `status=202 AND NOT contains(scopes,"advisory:ingest")` – ingestion attempted without the new AOC scopes; confirm the Authority client registration matches the sample above. +- `tenant!=(tenant-default)` – indicates a cross-tenant token was accepted. Ensure Concelier `requiredTenants` is aligned with Authority client registration. +- Spike in `clientId="(none)"` – indicates upstream Authority is not issuing `client_id` claims or the CLI is outdated. + +### 2.2 Metrics + +Concelier publishes counters under the OTEL meter `StellaOps.Concelier.WebService.Jobs`. Tags: `job.kind`, `job.trigger`, `job.outcome`. + +| Metric name | Description | PromQL example | +|-------------------------------|----------------------------------------------------|----------------| +| `web.jobs.triggered` | Accepted job trigger requests. | `sum by (job_kind) (rate(web_jobs_triggered_total[5m]))` | +| `web.jobs.trigger.conflict` | Rejected triggers (already running, disabled…). | `sum(rate(web_jobs_trigger_conflict_total[5m]))` | +| `web.jobs.trigger.failed` | Server-side job failures. | `sum(rate(web_jobs_trigger_failed_total[5m]))` | + +> Prometheus/OTEL collectors typically surface counters with `_total` suffix. Adjust queries to match your pipeline’s generated metric names. + +Correlate audit logs with the following global meter exported via `Concelier.SourceDiagnostics`: + +- `concelier.source.http.requests_total{concelier_source="jobs-run"}` – ensures REST/manual triggers route through Authority. +- If Grafana dashboards are deployed, extend the “Concelier Jobs” board with the above counters plus a table of recent audit log entries. + +## 3. Alerting Guidance + +1. **Unauthorized bypass attempt** + - Query: `sum(rate(log_messages_total{logger="Concelier.Authorization.Audit", status="401", bypass="True"}[5m])) > 0` + - Action: verify `bypassNetworks` list; confirm expected maintenance windows; rotate credentials if suspicious. + +2. **Missing scopes** + - Query: `sum(rate(log_messages_total{logger="Concelier.Authorization.Audit", scopes="(none)", status="200"}[5m])) > 0` + - Action: audit Authority client registration; ensure `requiredScopes` includes `concelier.jobs.trigger`, `advisory:ingest`, and `advisory:read`. + +3. **Trigger failure surge** + - Query: `sum(rate(web_jobs_trigger_failed_total[10m])) > 0` with severity `warning` if sustained for 10 minutes. + - Action: inspect correlated audit entries and `Concelier.Telemetry` traces for job execution errors. + +4. **Conflict spike** + - Query: `sum(rate(web_jobs_trigger_conflict_total[10m])) > 5` (tune threshold). + - Action: downstream scheduling may be firing repetitive triggers; ensure precedence is configured properly. + +5. **Authority offline** + - Watch `Concelier.Authorization.Audit` logs for `status=503` or `status=500` along with `clientId="(none)"`. Investigate Authority availability before re-enabling anonymous fallback. + +## 4. Rollout & Verification Procedure + +1. **Pre-checks** + - Align with the rollout phases documented in `docs/10_CONCELIER_CLI_QUICKSTART.md` (validation → rehearsal → enforced) and record the target dates in your change request. + - Confirm `allowAnonymousFallback` is `false` in production; keep `true` only during staged validation. + - Validate Authority issuer metadata is reachable from Concelier (`curl https://authority.internal/.well-known/openid-configuration` from the host). + +2. **Smoke test with valid token** + - Obtain a token via CLI: `stella auth login --scope "concelier.jobs.trigger advisory:ingest" --scope advisory:read`. + - Trigger a read-only endpoint: `curl -H "Authorization: Bearer $TOKEN" https://concelier.internal/jobs/definitions`. + - Expect HTTP 200/202 and an audit log with `bypass=False`, `scopes=concelier.jobs.trigger advisory:ingest advisory:read`, and `tenant=tenant-default`. + +3. **Negative test without token** + - Call the same endpoint without a token. Expect HTTP 401, `bypass=False`. + - If the request succeeds, double-check `bypassNetworks` and ensure fallback is disabled. + +4. **Bypass check (if applicable)** + - From an allowed maintenance IP, call `/jobs/definitions` without a token. Confirm the audit log shows `bypass=True`. Review business justification and expiry date for such entries. + +5. **Metrics validation** + - Ensure `web.jobs.triggered` counter increments during accepted runs. + - Exporters should show corresponding spans (`concelier.job.trigger`) if tracing is enabled. + +## 5. Troubleshooting + +| Symptom | Probable cause | Remediation | +|---------|----------------|-------------| +| Audit log shows `clientId=(none)` for all requests | Authority not issuing `client_id` claim or CLI outdated | Update StellaOps Authority configuration (`StellaOpsAuthorityOptions.Token.Claims.ClientId`), or upgrade the CLI token acquisition flow. | +| Requests succeed with `bypass=True` unexpectedly | Local network added to `bypassNetworks` or fallback still enabled | Remove/adjust the CIDR list, disable anonymous fallback, restart Concelier. | +| HTTP 401 with valid token | `requiredScopes` missing from client registration or token audience mismatch | Verify Authority client scopes (`concelier.jobs.trigger`) and ensure the token audience matches `audiences` config. | +| Metrics missing from Prometheus | Telemetry exporters disabled or filter missing OTEL meter | Set `concelier.telemetry.enableMetrics=true`, ensure collector includes `StellaOps.Concelier.WebService.Jobs` meter. | +| Sudden spike in `web.jobs.trigger.failed` | Downstream job failure or Authority timeout mid-request | Inspect Concelier job logs, re-run with tracing enabled, validate Authority latency. | + +## 6. References + +- `docs/21_INSTALL_GUIDE.md` – Authority configuration quick start. +- `docs/17_SECURITY_HARDENING_GUIDE.md` – Security guardrails and enforcement deadlines. +- `docs/ops/authority-monitoring.md` – Authority-side monitoring and alerting playbook. +- `StellaOps.Concelier.WebService/Filters/JobAuthorizationAuditFilter.cs` – source of audit log fields. diff --git a/docs/ops/concelier-cccs-operations.md b/docs/ops/concelier-cccs-operations.md index 12917a2f..c5a11c46 100644 --- a/docs/ops/concelier-cccs-operations.md +++ b/docs/ops/concelier-cccs-operations.md @@ -1,72 +1,72 @@ -# Concelier CCCS Connector Operations - -This runbook covers day‑to‑day operation of the Canadian Centre for Cyber Security (`source:cccs:*`) connector, including configuration, telemetry, and historical backfill guidance for English/French advisories. - -## 1. Configuration Checklist - -- Network egress (or mirrored cache) for `https://www.cyber.gc.ca/` and the JSON API endpoints under `/api/cccs/`. -- Set the Concelier options before restarting workers. Example `concelier.yaml` snippet: - -```yaml -concelier: - sources: - cccs: - feeds: - - language: "en" - uri: "https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=en&content_type=cccs_threat" - - language: "fr" - uri: "https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=fr&content_type=cccs_threat" - maxEntriesPerFetch: 80 # increase temporarily for backfill runs - maxKnownEntries: 512 - requestTimeout: "00:00:30" - requestDelay: "00:00:00.250" - failureBackoff: "00:05:00" -``` - -> ℹ️ The `/api/cccs/threats/v1/get` endpoint returns thousands of records per language (≈5 100 rows each as of 2025‑10‑14). The connector honours `maxEntriesPerFetch`, so leave it low for steady‑state and raise it for planned backfills. - -## 2. Telemetry & Logging - -- **Metrics (Meter `StellaOps.Concelier.Connector.Cccs`):** - - `cccs.fetch.attempts`, `cccs.fetch.success`, `cccs.fetch.failures` - - `cccs.fetch.documents`, `cccs.fetch.unchanged` - - `cccs.parse.success`, `cccs.parse.failures`, `cccs.parse.quarantine` - - `cccs.map.success`, `cccs.map.failures` -- **Shared HTTP metrics** via `SourceDiagnostics`: - - `concelier.source.http.requests{concelier.source="cccs"}` - - `concelier.source.http.failures{concelier.source="cccs"}` - - `concelier.source.http.duration{concelier.source="cccs"}` -- **Structured logs** - - `CCCS fetch completed feeds=… items=… newDocuments=… pendingDocuments=…` - - `CCCS parse completed parsed=… failures=…` - - `CCCS map completed mapped=… failures=…` - - Warnings fire when GridFS payloads/DTOs go missing or parser sanitisation fails. - -Suggested Grafana alerts: -- `increase(cccs.fetch.failures_total[15m]) > 0` -- `rate(cccs.map.success_total[1h]) == 0` while other connectors are active -- `histogram_quantile(0.95, rate(concelier_source_http_duration_bucket{concelier_source="cccs"}[1h])) > 5s` - -## 3. Historical Backfill Plan - -1. **Snapshot the source** – the API accepts `page=` and `lang=` query parameters. `page=0` returns the full dataset (observed earliest `date_created`: 2018‑06‑08 for EN, 2018‑06‑08 for FR). Mirror those responses into Offline Kit storage when operating air‑gapped. -2. **Stage ingestion**: - - Temporarily raise `maxEntriesPerFetch` (e.g. 500) and restart Concelier workers. - - Run chained jobs until `pendingDocuments` drains: - `stella db jobs run source:cccs:fetch --and-then source:cccs:parse --and-then source:cccs:map` - - Monitor `cccs.fetch.unchanged` growth; once it approaches dataset size the backfill is complete. -3. **Optional pagination sweep** – for incremental mirrors, iterate `page=` (0…N) while `response.Count == 50`, persisting JSON to disk. Store alongside metadata (`language`, `page`, SHA256) so repeated runs detect drift. -4. **Language split** – keep EN/FR payloads separate to preserve canonical language fields. The connector emits `Language` directly from the feed entry, so mixed ingestion simply produces parallel advisories keyed by the same serial number. -5. **Throttle planning** – schedule backfills during maintenance windows; the API tolerates burst downloads but respect the 250 ms request delay or raise it if mirrored traffic is not available. - -## 4. Selector & Sanitiser Notes - -- `CccsHtmlParser` now parses the **unsanitised DOM** (via AngleSharp) and only sanitises when persisting `ContentHtml`. -- Product extraction walks headings (`Affected Products`, `Produits touchés`, `Mesures recommandées`) and consumes nested lists within `div/section/article` containers. -- `HtmlContentSanitizer` allows `

` and `
` so stored HTML keeps headings for UI rendering and downstream summarisation. - -## 5. Fixture Maintenance - -- Regression fixtures live in `src/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures`. -- Refresh via `UPDATE_CCCS_FIXTURES=1 dotnet test src/StellaOps.Concelier.Connector.Cccs.Tests/StellaOps.Concelier.Connector.Cccs.Tests.csproj`. -- Fixtures capture both EN/FR advisories with nested lists to guard against sanitiser regressions; review diffs for heading/list changes before committing. +# Concelier CCCS Connector Operations + +This runbook covers day‑to‑day operation of the Canadian Centre for Cyber Security (`source:cccs:*`) connector, including configuration, telemetry, and historical backfill guidance for English/French advisories. + +## 1. Configuration Checklist + +- Network egress (or mirrored cache) for `https://www.cyber.gc.ca/` and the JSON API endpoints under `/api/cccs/`. +- Set the Concelier options before restarting workers. Example `concelier.yaml` snippet: + +```yaml +concelier: + sources: + cccs: + feeds: + - language: "en" + uri: "https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=en&content_type=cccs_threat" + - language: "fr" + uri: "https://www.cyber.gc.ca/api/cccs/threats/v1/get?lang=fr&content_type=cccs_threat" + maxEntriesPerFetch: 80 # increase temporarily for backfill runs + maxKnownEntries: 512 + requestTimeout: "00:00:30" + requestDelay: "00:00:00.250" + failureBackoff: "00:05:00" +``` + +> ℹ️ The `/api/cccs/threats/v1/get` endpoint returns thousands of records per language (≈5 100 rows each as of 2025‑10‑14). The connector honours `maxEntriesPerFetch`, so leave it low for steady‑state and raise it for planned backfills. + +## 2. Telemetry & Logging + +- **Metrics (Meter `StellaOps.Concelier.Connector.Cccs`):** + - `cccs.fetch.attempts`, `cccs.fetch.success`, `cccs.fetch.failures` + - `cccs.fetch.documents`, `cccs.fetch.unchanged` + - `cccs.parse.success`, `cccs.parse.failures`, `cccs.parse.quarantine` + - `cccs.map.success`, `cccs.map.failures` +- **Shared HTTP metrics** via `SourceDiagnostics`: + - `concelier.source.http.requests{concelier.source="cccs"}` + - `concelier.source.http.failures{concelier.source="cccs"}` + - `concelier.source.http.duration{concelier.source="cccs"}` +- **Structured logs** + - `CCCS fetch completed feeds=… items=… newDocuments=… pendingDocuments=…` + - `CCCS parse completed parsed=… failures=…` + - `CCCS map completed mapped=… failures=…` + - Warnings fire when GridFS payloads/DTOs go missing or parser sanitisation fails. + +Suggested Grafana alerts: +- `increase(cccs.fetch.failures_total[15m]) > 0` +- `rate(cccs.map.success_total[1h]) == 0` while other connectors are active +- `histogram_quantile(0.95, rate(concelier_source_http_duration_bucket{concelier_source="cccs"}[1h])) > 5s` + +## 3. Historical Backfill Plan + +1. **Snapshot the source** – the API accepts `page=` and `lang=` query parameters. `page=0` returns the full dataset (observed earliest `date_created`: 2018‑06‑08 for EN, 2018‑06‑08 for FR). Mirror those responses into Offline Kit storage when operating air‑gapped. +2. **Stage ingestion**: + - Temporarily raise `maxEntriesPerFetch` (e.g. 500) and restart Concelier workers. + - Run chained jobs until `pendingDocuments` drains: + `stella db jobs run source:cccs:fetch --and-then source:cccs:parse --and-then source:cccs:map` + - Monitor `cccs.fetch.unchanged` growth; once it approaches dataset size the backfill is complete. +3. **Optional pagination sweep** – for incremental mirrors, iterate `page=` (0…N) while `response.Count == 50`, persisting JSON to disk. Store alongside metadata (`language`, `page`, SHA256) so repeated runs detect drift. +4. **Language split** – keep EN/FR payloads separate to preserve canonical language fields. The connector emits `Language` directly from the feed entry, so mixed ingestion simply produces parallel advisories keyed by the same serial number. +5. **Throttle planning** – schedule backfills during maintenance windows; the API tolerates burst downloads but respect the 250 ms request delay or raise it if mirrored traffic is not available. + +## 4. Selector & Sanitiser Notes + +- `CccsHtmlParser` now parses the **unsanitised DOM** (via AngleSharp) and only sanitises when persisting `ContentHtml`. +- Product extraction walks headings (`Affected Products`, `Produits touchés`, `Mesures recommandées`) and consumes nested lists within `div/section/article` containers. +- `HtmlContentSanitizer` allows `

` and `
` so stored HTML keeps headings for UI rendering and downstream summarisation. + +## 5. Fixture Maintenance + +- Regression fixtures live in `src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures`. +- Refresh via `UPDATE_CCCS_FIXTURES=1 dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/StellaOps.Concelier.Connector.Cccs.Tests.csproj`. +- Fixtures capture both EN/FR advisories with nested lists to guard against sanitiser regressions; review diffs for heading/list changes before committing. diff --git a/docs/ops/concelier-conflict-resolution.md b/docs/ops/concelier-conflict-resolution.md index 493c304c..93d59ef4 100644 --- a/docs/ops/concelier-conflict-resolution.md +++ b/docs/ops/concelier-conflict-resolution.md @@ -1,160 +1,160 @@ -# Concelier Conflict Resolution Runbook (Sprint 3) - -This runbook equips Concelier operators to detect, triage, and resolve advisory conflicts now that the Sprint 3 merge engine landed (`AdvisoryPrecedenceMerger`, merge-event hashing, and telemetry counters). It builds on the canonical rules defined in `src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md` and the metrics/logging instrumentation delivered this sprint. - ---- - -## 1. Precedence Model (recap) - -- **Default ranking:** `GHSA -> NVD -> OSV`, with distro/vendor PSIRTs outranking ecosystem feeds (`AdvisoryPrecedenceDefaults`). Use `concelier:merge:precedence:ranks` to override per source when incident response requires it. -- **Freshness override:** if a lower-ranked source is >= 48 hours newer for a freshness-sensitive field (title, summary, affected ranges, references, credits), it wins. Every override stamps `provenance[].decisionReason = freshness`. -- **Tie-breakers:** when precedence and freshness tie, the engine falls back to (1) primary source order, (2) shortest normalized text, (3) lowest stable hash. Merge-generated provenance records set `decisionReason = tie-breaker`. -- **Audit trail:** each merged advisory receives a `merge` provenance entry listing the participating sources plus a `merge_event` record with canonical before/after SHA-256 hashes. - ---- - -## 2. Telemetry Shipped This Sprint - -| Instrument | Type | Key Tags | Purpose | -|------------|------|----------|---------| -| `concelier.merge.operations` | Counter | `inputs` | Total precedence merges executed. | -| `concelier.merge.overrides` | Counter | `primary_source`, `suppressed_source`, `primary_rank`, `suppressed_rank` | Field-level overrides chosen by precedence. | -| `concelier.merge.range_overrides` | Counter | `advisory_key`, `package_type`, `primary_source`, `suppressed_source`, `primary_range_count`, `suppressed_range_count` | Package range overrides emitted by `AffectedPackagePrecedenceResolver`. | -| `concelier.merge.conflicts` | Counter | `type` (`severity`, `precedence_tie`), `reason` (`mismatch`, `primary_missing`, `equal_rank`) | Conflicts requiring operator review. | -| `concelier.merge.identity_conflicts` | Counter | `scheme`, `alias_value`, `advisory_count` | Alias collisions surfaced by the identity graph. | - -### Structured logs - -- `AdvisoryOverride` (EventId 1000) - logs merge suppressions with alias/provenance counts. -- `PackageRangeOverride` (EventId 1001) - logs package-level precedence decisions. -- `PrecedenceConflict` (EventId 1002) - logs mismatched severity or equal-rank scenarios. -- `Alias collision ...` (no EventId) - emitted when `concelier.merge.identity_conflicts` increments. - -Expect all logs at `Information`. Ensure OTEL exporters include the scope `StellaOps.Concelier.Merge`. - ---- - -## 3. Detection & Alerting - -1. **Dashboard panels** - - `concelier.merge.conflicts` - table grouped by `type/reason`. Alert when > 0 in a 15 minute window. - - `concelier.merge.range_overrides` - stacked bar by `package_type`. Spikes highlight vendor PSIRT overrides over registry data. - - `concelier.merge.overrides` with `primary_source|suppressed_source` - catches unexpected precedence flips (e.g., OSV overtaking GHSA). - - `concelier.merge.identity_conflicts` - single-stat; alert when alias collisions occur more than once per day. -2. **Log based alerts** - - `eventId=1002` with `reason="equal_rank"` - indicates precedence table gaps; page merge owners. - - `eventId=1002` with `reason="mismatch"` - severity disagreement; open connector bug if sustained. -3. **Job health** - - `stellaops-cli db merge` exit code `1` signifies unresolved conflicts. Pipe to automation that captures logs and notifies #concelier-ops. - -### Threshold updates (2025-10-12) - -- `concelier.merge.conflicts` – Page only when ≥ 2 events fire within 30 minutes; the synthetic conflict fixture run produces 0 conflicts, so the first event now routes to Slack for manual review instead of paging. -- `concelier.merge.overrides` – Raise a warning when the 30-minute sum exceeds 10 (canonical triple yields exactly 1 summary override with `primary_source=osv`, `suppressed_source=ghsa`). -- `concelier.merge.range_overrides` – Maintain the 15-minute alert at ≥ 3 but annotate dashboards that the regression triple emits a single `package_type=semver` override so ops can spot unexpected spikes. - ---- - -## 4. Triage Workflow - -1. **Confirm job context** - - `stellaops-cli db merge` (CLI) or `POST /jobs/merge:reconcile` (API) to rehydrate the merge job. Use `--verbose` to stream structured logs during triage. -2. **Inspect metrics** - - Correlate spikes in `concelier.merge.conflicts` with `primary_source`/`suppressed_source` tags from `concelier.merge.overrides`. -3. **Pull structured logs** - - Example (vector output): - ``` - jq 'select(.EventId.Name=="PrecedenceConflict") | {advisory: .State[0].Value, type: .ConflictType, reason: .Reason, primary: .PrimarySources, suppressed: .SuppressedSources}' stellaops-concelier.log - ``` -4. **Review merge events** - - `mongosh`: - ```javascript - use concelier; - db.merge_event.find({ advisoryKey: "CVE-2025-1234" }).sort({ mergedAt: -1 }).limit(5); - ``` - - Compare `beforeHash` vs `afterHash` to confirm the merge actually changed canonical output. -5. **Interrogate provenance** - - `db.advisories.findOne({ advisoryKey: "CVE-2025-1234" }, { title: 1, severity: 1, provenance: 1, "affectedPackages.provenance": 1 })` - - Check `provenance[].decisionReason` values (`precedence`, `freshness`, `tie-breaker`) to understand why the winning field was chosen. - ---- - -## 5. Conflict Classification Matrix - -| Signal | Likely Cause | Immediate Action | -|--------|--------------|------------------| -| `reason="mismatch"` with `type="severity"` | Upstream feeds disagree on CVSS vector/severity. | Verify which feed is freshest; if correctness is known, adjust connector mapping or precedence override. | -| `reason="primary_missing"` | Higher-ranked source lacks the field entirely. | Backfill connector data or temporarily allow lower-ranked source via precedence override. | -| `reason="equal_rank"` | Two feeds share the same precedence rank (custom config or missing entry). | Update `concelier:merge:precedence:ranks` to break the tie; restart merge job. | -| Rising `concelier.merge.range_overrides` for a package type | Vendor PSIRT now supplies richer ranges. | Validate connectors emit `decisionReason="precedence"` and update dashboards to treat registry ranges as fallback. | -| `concelier.merge.identity_conflicts` > 0 | Alias scheme mapping produced collisions (duplicate CVE <-> advisory pairs). | Inspect `Alias collision` log payload; reconcile the alias graph by adjusting connector alias output. | - ---- - -## 6. Resolution Playbook - -1. **Connector data fix** - - Re-run the offending connector stages (`stellaops-cli db fetch --source ghsa --stage map` etc.). - - Once fixed, rerun merge and verify `decisionReason` reflects `freshness` or `precedence` as expected. -2. **Temporary precedence override** - - Edit `etc/concelier.yaml`: - ```yaml - concelier: - merge: - precedence: - ranks: - osv: 1 - ghsa: 0 - ``` - - Restart Concelier workers; confirm tags in `concelier.merge.overrides` show the new ranks. - - Document the override with expiry in the change log. -3. **Alias remediation** - - Update connector mapping rules to weed out duplicate aliases (e.g., skip GHSA aliases that mirror CVE IDs). - - Flush cached alias graphs if necessary (`db.alias_graph.drop()` is destructive-coordinate with Storage before issuing). -4. **Escalation** - - If override metrics spike due to upstream regression, open an incident with Security Guild, referencing merge logs and `merge_event` IDs. - ---- - -## 7. Validation Checklist - -- [ ] Merge job rerun returns exit code `0`. -- [ ] `concelier.merge.conflicts` baseline returns to zero after corrective action. -- [ ] Latest `merge_event` entry shows expected hash delta. -- [ ] Affected advisory document shows updated `provenance[].decisionReason`. -- [ ] Ops change log updated with incident summary, config overrides, and rollback plan. - ---- - -## 8. Reference Material - -- Canonical conflict rules: `src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md`. -- Merge engine internals: `src/StellaOps.Concelier.Merge/Services/AdvisoryPrecedenceMerger.cs`. -- Metrics definitions: `src/StellaOps.Concelier.Merge/Services/AdvisoryMergeService.cs` (identity conflicts) and `AdvisoryPrecedenceMerger`. -- Storage audit trail: `src/StellaOps.Concelier.Merge/Services/MergeEventWriter.cs`, `src/StellaOps.Concelier.Storage.Mongo/MergeEvents`. - -Keep this runbook synchronized with future sprint notes and update alert thresholds as baseline volumes change. - ---- - -## 9. Synthetic Regression Fixtures - -- **Locations** – Canonical conflict snapshots now live at `src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/conflict-ghsa.canonical.json`, `src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/conflict-nvd.canonical.json`, and `src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/conflict-osv.canonical.json`. -- **Validation commands** – To regenerate and verify the fixtures offline, run: - -```bash -dotnet test src/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj --filter GhsaConflictFixtureTests -dotnet test src/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj --filter NvdConflictFixtureTests -dotnet test src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj --filter OsvConflictFixtureTests -dotnet test src/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj --filter MergeAsync_AppliesCanonicalRulesAndPersistsDecisions -``` - -- **Expected signals** – The triple produces one freshness-driven summary override (`primary_source=osv`, `suppressed_source=ghsa`) and one range override for the npm SemVer package while leaving `concelier.merge.conflicts` at zero. Use these values as the baseline when tuning dashboards or load-testing alert pipelines. - ---- - -## 10. Change Log - -| Date (UTC) | Change | Notes | -|------------|--------|-------| -| 2025-10-16 | Ops review signed off after connector expansion (CCCS, CERT-Bund, KISA, ICS CISA, MSRC) landed. Alert thresholds from §3 reaffirmed; dashboards updated to watch attachment signals emitted by ICS CISA connector. | Ops sign-off recorded by Concelier Ops Guild; no additional overrides required. | +# Concelier Conflict Resolution Runbook (Sprint 3) + +This runbook equips Concelier operators to detect, triage, and resolve advisory conflicts now that the Sprint 3 merge engine landed (`AdvisoryPrecedenceMerger`, merge-event hashing, and telemetry counters). It builds on the canonical rules defined in `src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md` and the metrics/logging instrumentation delivered this sprint. + +--- + +## 1. Precedence Model (recap) + +- **Default ranking:** `GHSA -> NVD -> OSV`, with distro/vendor PSIRTs outranking ecosystem feeds (`AdvisoryPrecedenceDefaults`). Use `concelier:merge:precedence:ranks` to override per source when incident response requires it. +- **Freshness override:** if a lower-ranked source is >= 48 hours newer for a freshness-sensitive field (title, summary, affected ranges, references, credits), it wins. Every override stamps `provenance[].decisionReason = freshness`. +- **Tie-breakers:** when precedence and freshness tie, the engine falls back to (1) primary source order, (2) shortest normalized text, (3) lowest stable hash. Merge-generated provenance records set `decisionReason = tie-breaker`. +- **Audit trail:** each merged advisory receives a `merge` provenance entry listing the participating sources plus a `merge_event` record with canonical before/after SHA-256 hashes. + +--- + +## 2. Telemetry Shipped This Sprint + +| Instrument | Type | Key Tags | Purpose | +|------------|------|----------|---------| +| `concelier.merge.operations` | Counter | `inputs` | Total precedence merges executed. | +| `concelier.merge.overrides` | Counter | `primary_source`, `suppressed_source`, `primary_rank`, `suppressed_rank` | Field-level overrides chosen by precedence. | +| `concelier.merge.range_overrides` | Counter | `advisory_key`, `package_type`, `primary_source`, `suppressed_source`, `primary_range_count`, `suppressed_range_count` | Package range overrides emitted by `AffectedPackagePrecedenceResolver`. | +| `concelier.merge.conflicts` | Counter | `type` (`severity`, `precedence_tie`), `reason` (`mismatch`, `primary_missing`, `equal_rank`) | Conflicts requiring operator review. | +| `concelier.merge.identity_conflicts` | Counter | `scheme`, `alias_value`, `advisory_count` | Alias collisions surfaced by the identity graph. | + +### Structured logs + +- `AdvisoryOverride` (EventId 1000) - logs merge suppressions with alias/provenance counts. +- `PackageRangeOverride` (EventId 1001) - logs package-level precedence decisions. +- `PrecedenceConflict` (EventId 1002) - logs mismatched severity or equal-rank scenarios. +- `Alias collision ...` (no EventId) - emitted when `concelier.merge.identity_conflicts` increments. + +Expect all logs at `Information`. Ensure OTEL exporters include the scope `StellaOps.Concelier.Merge`. + +--- + +## 3. Detection & Alerting + +1. **Dashboard panels** + - `concelier.merge.conflicts` - table grouped by `type/reason`. Alert when > 0 in a 15 minute window. + - `concelier.merge.range_overrides` - stacked bar by `package_type`. Spikes highlight vendor PSIRT overrides over registry data. + - `concelier.merge.overrides` with `primary_source|suppressed_source` - catches unexpected precedence flips (e.g., OSV overtaking GHSA). + - `concelier.merge.identity_conflicts` - single-stat; alert when alias collisions occur more than once per day. +2. **Log based alerts** + - `eventId=1002` with `reason="equal_rank"` - indicates precedence table gaps; page merge owners. + - `eventId=1002` with `reason="mismatch"` - severity disagreement; open connector bug if sustained. +3. **Job health** + - `stellaops-cli db merge` exit code `1` signifies unresolved conflicts. Pipe to automation that captures logs and notifies #concelier-ops. + +### Threshold updates (2025-10-12) + +- `concelier.merge.conflicts` – Page only when ≥ 2 events fire within 30 minutes; the synthetic conflict fixture run produces 0 conflicts, so the first event now routes to Slack for manual review instead of paging. +- `concelier.merge.overrides` – Raise a warning when the 30-minute sum exceeds 10 (canonical triple yields exactly 1 summary override with `primary_source=osv`, `suppressed_source=ghsa`). +- `concelier.merge.range_overrides` – Maintain the 15-minute alert at ≥ 3 but annotate dashboards that the regression triple emits a single `package_type=semver` override so ops can spot unexpected spikes. + +--- + +## 4. Triage Workflow + +1. **Confirm job context** + - `stellaops-cli db merge` (CLI) or `POST /jobs/merge:reconcile` (API) to rehydrate the merge job. Use `--verbose` to stream structured logs during triage. +2. **Inspect metrics** + - Correlate spikes in `concelier.merge.conflicts` with `primary_source`/`suppressed_source` tags from `concelier.merge.overrides`. +3. **Pull structured logs** + - Example (vector output): + ``` + jq 'select(.EventId.Name=="PrecedenceConflict") | {advisory: .State[0].Value, type: .ConflictType, reason: .Reason, primary: .PrimarySources, suppressed: .SuppressedSources}' stellaops-concelier.log + ``` +4. **Review merge events** + - `mongosh`: + ```javascript + use concelier; + db.merge_event.find({ advisoryKey: "CVE-2025-1234" }).sort({ mergedAt: -1 }).limit(5); + ``` + - Compare `beforeHash` vs `afterHash` to confirm the merge actually changed canonical output. +5. **Interrogate provenance** + - `db.advisories.findOne({ advisoryKey: "CVE-2025-1234" }, { title: 1, severity: 1, provenance: 1, "affectedPackages.provenance": 1 })` + - Check `provenance[].decisionReason` values (`precedence`, `freshness`, `tie-breaker`) to understand why the winning field was chosen. + +--- + +## 5. Conflict Classification Matrix + +| Signal | Likely Cause | Immediate Action | +|--------|--------------|------------------| +| `reason="mismatch"` with `type="severity"` | Upstream feeds disagree on CVSS vector/severity. | Verify which feed is freshest; if correctness is known, adjust connector mapping or precedence override. | +| `reason="primary_missing"` | Higher-ranked source lacks the field entirely. | Backfill connector data or temporarily allow lower-ranked source via precedence override. | +| `reason="equal_rank"` | Two feeds share the same precedence rank (custom config or missing entry). | Update `concelier:merge:precedence:ranks` to break the tie; restart merge job. | +| Rising `concelier.merge.range_overrides` for a package type | Vendor PSIRT now supplies richer ranges. | Validate connectors emit `decisionReason="precedence"` and update dashboards to treat registry ranges as fallback. | +| `concelier.merge.identity_conflicts` > 0 | Alias scheme mapping produced collisions (duplicate CVE <-> advisory pairs). | Inspect `Alias collision` log payload; reconcile the alias graph by adjusting connector alias output. | + +--- + +## 6. Resolution Playbook + +1. **Connector data fix** + - Re-run the offending connector stages (`stellaops-cli db fetch --source ghsa --stage map` etc.). + - Once fixed, rerun merge and verify `decisionReason` reflects `freshness` or `precedence` as expected. +2. **Temporary precedence override** + - Edit `etc/concelier.yaml`: + ```yaml + concelier: + merge: + precedence: + ranks: + osv: 1 + ghsa: 0 + ``` + - Restart Concelier workers; confirm tags in `concelier.merge.overrides` show the new ranks. + - Document the override with expiry in the change log. +3. **Alias remediation** + - Update connector mapping rules to weed out duplicate aliases (e.g., skip GHSA aliases that mirror CVE IDs). + - Flush cached alias graphs if necessary (`db.alias_graph.drop()` is destructive-coordinate with Storage before issuing). +4. **Escalation** + - If override metrics spike due to upstream regression, open an incident with Security Guild, referencing merge logs and `merge_event` IDs. + +--- + +## 7. Validation Checklist + +- [ ] Merge job rerun returns exit code `0`. +- [ ] `concelier.merge.conflicts` baseline returns to zero after corrective action. +- [ ] Latest `merge_event` entry shows expected hash delta. +- [ ] Affected advisory document shows updated `provenance[].decisionReason`. +- [ ] Ops change log updated with incident summary, config overrides, and rollback plan. + +--- + +## 8. Reference Material + +- Canonical conflict rules: `src/DEDUP_CONFLICTS_RESOLUTION_ALGO.md`. +- Merge engine internals: `src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AdvisoryPrecedenceMerger.cs`. +- Metrics definitions: `src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AdvisoryMergeService.cs` (identity conflicts) and `AdvisoryPrecedenceMerger`. +- Storage audit trail: `src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/MergeEventWriter.cs`, `src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents`. + +Keep this runbook synchronized with future sprint notes and update alert thresholds as baseline volumes change. + +--- + +## 9. Synthetic Regression Fixtures + +- **Locations** – Canonical conflict snapshots now live at `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/conflict-ghsa.canonical.json`, `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/conflict-nvd.canonical.json`, and `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/conflict-osv.canonical.json`. +- **Validation commands** – To regenerate and verify the fixtures offline, run: + +```bash +dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj --filter GhsaConflictFixtureTests +dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj --filter NvdConflictFixtureTests +dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj --filter OsvConflictFixtureTests +dotnet test src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj --filter MergeAsync_AppliesCanonicalRulesAndPersistsDecisions +``` + +- **Expected signals** – The triple produces one freshness-driven summary override (`primary_source=osv`, `suppressed_source=ghsa`) and one range override for the npm SemVer package while leaving `concelier.merge.conflicts` at zero. Use these values as the baseline when tuning dashboards or load-testing alert pipelines. + +--- + +## 10. Change Log + +| Date (UTC) | Change | Notes | +|------------|--------|-------| +| 2025-10-16 | Ops review signed off after connector expansion (CCCS, CERT-Bund, KISA, ICS CISA, MSRC) landed. Alert thresholds from §3 reaffirmed; dashboards updated to watch attachment signals emitted by ICS CISA connector. | Ops sign-off recorded by Concelier Ops Guild; no additional overrides required. | diff --git a/docs/ops/concelier-cve-kev-operations.md b/docs/ops/concelier-cve-kev-operations.md index 66f38377..cf5afe9c 100644 --- a/docs/ops/concelier-cve-kev-operations.md +++ b/docs/ops/concelier-cve-kev-operations.md @@ -58,7 +58,7 @@ concelier: While Ops finalises long-lived CVE Services credentials, we validated the connector end-to-end against the recorded CVE-2024-0001 payloads used in regression tests: -- Command: `dotnet test src/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj -l "console;verbosity=detailed"` +- Command: `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj -l "console;verbosity=detailed"` - Summary log emitted by the connector: ``` CVEs fetch window 2024-09-01T00:00:00Z->2024-10-01T00:00:00Z pages=1 listSuccess=1 detailDocuments=1 detailFailures=0 detailUnchanged=0 pendingDocuments=0->1 pendingMappings=0->1 hasMorePages=False nextWindowStart=2024-09-15T12:00:00Z nextWindowEnd=(none) nextPage=1 diff --git a/docs/ops/concelier-kisa-operations.md b/docs/ops/concelier-kisa-operations.md index 85d060bb..9061ed4d 100644 --- a/docs/ops/concelier-kisa-operations.md +++ b/docs/ops/concelier-kisa-operations.md @@ -1,74 +1,74 @@ -# Concelier KISA Connector Operations - -Operational guidance for the Korea Internet & Security Agency (KISA / KNVD) connector (`source:kisa:*`). Pair this with the engineering brief in `docs/dev/kisa_connector_notes.md`. - -## 1. Prerequisites - -- Outbound HTTPS (or mirrored cache) for `https://knvd.krcert.or.kr/`. -- Connector options defined under `concelier:sources:kisa`: - -```yaml -concelier: - sources: - kisa: - feedUri: "https://knvd.krcert.or.kr/rss/securityInfo.do" - detailApiUri: "https://knvd.krcert.or.kr/rssDetailData.do" - detailPageUri: "https://knvd.krcert.or.kr/detailDos.do" - maxAdvisoriesPerFetch: 10 - requestDelay: "00:00:01" - failureBackoff: "00:05:00" -``` - -> Ensure the URIs stay absolute—Concelier adds the `feedUri`/`detailApiUri` hosts to the HttpClient allow-list automatically. - -## 2. Staging Smoke Test - -1. Restart the Concelier workers so the KISA options bind. -2. Run a full connector cycle: - - CLI: `stella db jobs run source:kisa:fetch --and-then source:kisa:parse --and-then source:kisa:map` - - REST: `POST /jobs/run { "kind": "source:kisa:fetch", "chain": ["source:kisa:parse", "source:kisa:map"] }` -3. Confirm telemetry (Meter `StellaOps.Concelier.Connector.Kisa`): - - `kisa.feed.success`, `kisa.feed.items` - - `kisa.detail.success` / `.failures` - - `kisa.parse.success` / `.failures` - - `kisa.map.success` / `.failures` - - `kisa.cursor.updates` -4. Inspect logs for structured entries: - - `KISA feed returned {ItemCount}` - - `KISA fetched detail for {Idx} … category={Category}` - - `KISA mapped advisory {AdvisoryId} (severity={Severity})` - - Absence of warnings such as `document missing GridFS payload`. -5. Validate MongoDB state: - - `raw_documents.metadata` has `kisa.idx`, `kisa.category`, `kisa.title`. - - DTO store contains `schemaVersion="kisa.detail.v1"`. - - Advisories include aliases (`IDX`, CVE) and `language="ko"`. - - `source_states` entry for `kisa` shows recent `cursor.lastFetchAt`. - -## 3. Production Monitoring - -- **Dashboards** – Add the following Prometheus/OTEL expressions: - - `rate(kisa_feed_items_total[15m])` versus `rate(concelier_source_http_requests_total{concelier_source="kisa"}[15m])` - - `increase(kisa_detail_failures_total{reason!="empty-document"}[1h])` alert at `>0` - - `increase(kisa_parse_failures_total[1h])` for storage/JSON issues - - `increase(kisa_map_failures_total[1h])` to flag schema drift - - `increase(kisa_cursor_updates_total[6h]) == 0` during active windows → warn -- **Alerts** – Page when `rate(kisa_feed_success_total[2h]) == 0` while other connectors are active; back off for maintenance windows announced on `https://knvd.krcert.or.kr/`. -- **Logs** – Watch for repeated warnings (`document missing`, `DTO missing`) or errors with reason tags `HttpRequestException`, `download`, `parse`, `map`. - -## 4. Localisation Handling - -- Hangul categories (for example `취약점정보`) flow into telemetry tags (`category=…`) and logs. Dashboards must render UTF‑8 and avoid transliteration. -- HTML content is sanitised before storage; translation teams can consume the `ContentHtml` field safely. -- Advisory severity remains as provided by KISA (`High`, `Medium`, etc.). Map-level failures include the severity tag for filtering. - -## 5. Fixture & Regression Maintenance - -- Regression fixtures: `src/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-feed.xml` and `kisa-detail.json`. -- Refresh via `UPDATE_KISA_FIXTURES=1 dotnet test src/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj`. -- The telemetry regression (`KisaConnectorTests.Telemetry_RecordsMetrics`) will fail if counters/log wiring drifts—treat failures as gating. - -## 6. Known Issues - -- RSS feeds only expose the latest 10 advisories; long outages require replay via archived feeds or manual IDX seeds. -- Detail endpoint occasionally throttles; the connector honours `requestDelay` and reports failures with reason `HttpRequestException`. Consider increasing delay for weekend backfills. -- If `kisa.category` tags suddenly appear as `unknown`, verify KISA has not renamed RSS elements; update the parser fixtures before production rollout. +# Concelier KISA Connector Operations + +Operational guidance for the Korea Internet & Security Agency (KISA / KNVD) connector (`source:kisa:*`). Pair this with the engineering brief in `docs/dev/kisa_connector_notes.md`. + +## 1. Prerequisites + +- Outbound HTTPS (or mirrored cache) for `https://knvd.krcert.or.kr/`. +- Connector options defined under `concelier:sources:kisa`: + +```yaml +concelier: + sources: + kisa: + feedUri: "https://knvd.krcert.or.kr/rss/securityInfo.do" + detailApiUri: "https://knvd.krcert.or.kr/rssDetailData.do" + detailPageUri: "https://knvd.krcert.or.kr/detailDos.do" + maxAdvisoriesPerFetch: 10 + requestDelay: "00:00:01" + failureBackoff: "00:05:00" +``` + +> Ensure the URIs stay absolute—Concelier adds the `feedUri`/`detailApiUri` hosts to the HttpClient allow-list automatically. + +## 2. Staging Smoke Test + +1. Restart the Concelier workers so the KISA options bind. +2. Run a full connector cycle: + - CLI: `stella db jobs run source:kisa:fetch --and-then source:kisa:parse --and-then source:kisa:map` + - REST: `POST /jobs/run { "kind": "source:kisa:fetch", "chain": ["source:kisa:parse", "source:kisa:map"] }` +3. Confirm telemetry (Meter `StellaOps.Concelier.Connector.Kisa`): + - `kisa.feed.success`, `kisa.feed.items` + - `kisa.detail.success` / `.failures` + - `kisa.parse.success` / `.failures` + - `kisa.map.success` / `.failures` + - `kisa.cursor.updates` +4. Inspect logs for structured entries: + - `KISA feed returned {ItemCount}` + - `KISA fetched detail for {Idx} … category={Category}` + - `KISA mapped advisory {AdvisoryId} (severity={Severity})` + - Absence of warnings such as `document missing GridFS payload`. +5. Validate MongoDB state: + - `raw_documents.metadata` has `kisa.idx`, `kisa.category`, `kisa.title`. + - DTO store contains `schemaVersion="kisa.detail.v1"`. + - Advisories include aliases (`IDX`, CVE) and `language="ko"`. + - `source_states` entry for `kisa` shows recent `cursor.lastFetchAt`. + +## 3. Production Monitoring + +- **Dashboards** – Add the following Prometheus/OTEL expressions: + - `rate(kisa_feed_items_total[15m])` versus `rate(concelier_source_http_requests_total{concelier_source="kisa"}[15m])` + - `increase(kisa_detail_failures_total{reason!="empty-document"}[1h])` alert at `>0` + - `increase(kisa_parse_failures_total[1h])` for storage/JSON issues + - `increase(kisa_map_failures_total[1h])` to flag schema drift + - `increase(kisa_cursor_updates_total[6h]) == 0` during active windows → warn +- **Alerts** – Page when `rate(kisa_feed_success_total[2h]) == 0` while other connectors are active; back off for maintenance windows announced on `https://knvd.krcert.or.kr/`. +- **Logs** – Watch for repeated warnings (`document missing`, `DTO missing`) or errors with reason tags `HttpRequestException`, `download`, `parse`, `map`. + +## 4. Localisation Handling + +- Hangul categories (for example `취약점정보`) flow into telemetry tags (`category=…`) and logs. Dashboards must render UTF‑8 and avoid transliteration. +- HTML content is sanitised before storage; translation teams can consume the `ContentHtml` field safely. +- Advisory severity remains as provided by KISA (`High`, `Medium`, etc.). Map-level failures include the severity tag for filtering. + +## 5. Fixture & Regression Maintenance + +- Regression fixtures: `src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-feed.xml` and `kisa-detail.json`. +- Refresh via `UPDATE_KISA_FIXTURES=1 dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj`. +- The telemetry regression (`KisaConnectorTests.Telemetry_RecordsMetrics`) will fail if counters/log wiring drifts—treat failures as gating. + +## 6. Known Issues + +- RSS feeds only expose the latest 10 advisories; long outages require replay via archived feeds or manual IDX seeds. +- Detail endpoint occasionally throttles; the connector honours `requestDelay` and reports failures with reason `HttpRequestException`. Consider increasing delay for weekend backfills. +- If `kisa.category` tags suddenly appear as `unknown`, verify KISA has not renamed RSS elements; update the parser fixtures before production rollout. diff --git a/docs/ops/concelier-mirror-operations.md b/docs/ops/concelier-mirror-operations.md index c06da02d..77af169c 100644 --- a/docs/ops/concelier-mirror-operations.md +++ b/docs/ops/concelier-mirror-operations.md @@ -1,238 +1,238 @@ -# Concelier & Excititor Mirror Operations - -This runbook describes how Stella Ops operates the managed mirrors under `*.stella-ops.org`. -It covers Docker Compose and Helm deployment overlays, secret handling for multi-tenant -authn, CDN fronting, and the recurring sync pipeline that keeps mirror bundles current. - -## 1. Prerequisites - -- **Authority access** – client credentials (`client_id` + secret) authorised for - `concelier.mirror.read` and `excititor.mirror.read` scopes. Secrets live outside git. -- **Signed TLS certificates** – wildcard or per-domain (`mirror-primary`, `mirror-community`). - Store them under `deploy/compose/mirror-gateway/tls/` or in Kubernetes secrets. -- **Mirror gateway credentials** – Basic Auth htpasswd files per domain. Generate with - `htpasswd -B`. Operators distribute credentials to downstream consumers. -- **Export artifact source** – read access to the canonical S3 buckets (or rsync share) - that hold `concelier` JSON bundles and `excititor` VEX exports. -- **Persistent volumes** – storage for Concelier job metadata and mirror export trees. - For Helm, provision PVCs (`concelier-mirror-jobs`, `concelier-mirror-exports`, - `excititor-mirror-exports`, `mirror-mongo-data`, `mirror-minio-data`) before rollout. - -### 1.1 Service configuration quick reference - -Concelier.WebService exposes the mirror HTTP endpoints once `CONCELIER__MIRROR__ENABLED=true`. -Key knobs: - -- `CONCELIER__MIRROR__EXPORTROOT` – root folder containing export snapshots (`/mirror/*`). -- `CONCELIER__MIRROR__ACTIVEEXPORTID` – optional explicit export id; otherwise the service auto-falls back to the `latest/` symlink or newest directory. -- `CONCELIER__MIRROR__REQUIREAUTHENTICATION` – default auth requirement; override per domain with `CONCELIER__MIRROR__DOMAINS__{n}__REQUIREAUTHENTICATION`. -- `CONCELIER__MIRROR__MAXINDEXREQUESTSPERHOUR` – budget for `/concelier/exports/index.json`. Domains inherit this value unless they define `__MAXDOWNLOADREQUESTSPERHOUR`. -- `CONCELIER__MIRROR__DOMAINS__{n}__ID` – domain identifier matching the exporter manifest; additional keys configure display name and rate budgets. - -> The service honours Stella Ops Authority when `CONCELIER__AUTHORITY__ENABLED=true` and `ALLOWANONYMOUSFALLBACK=false`. Use the bypass CIDR list (`CONCELIER__AUTHORITY__BYPASSNETWORKS__*`) for in-cluster ingress gateways that terminate Basic Auth. Unauthorized requests emit `WWW-Authenticate: Bearer` so downstream automation can detect token failures. - -Mirror responses carry deterministic cache headers: `/index.json` returns `Cache-Control: public, max-age=60`, while per-domain manifests/bundles include `Cache-Control: public, max-age=300, immutable`. Rate limiting surfaces `Retry-After` when quotas are exceeded. - -### 1.2 Mirror connector configuration - -Downstream Concelier instances ingest published bundles using the `StellaOpsMirrorConnector`. Operators running the connector in air‑gapped or limited connectivity environments can tune the following options (environment prefix `CONCELIER__SOURCES__STELLAOPSMIRROR__`): - -- `BASEADDRESS` – absolute mirror root (e.g., `https://mirror-primary.stella-ops.org`). -- `INDEXPATH` – relative path to the mirror index (`/concelier/exports/index.json` by default). -- `DOMAINID` – mirror domain identifier from the index (`primary`, `community`, etc.). -- `HTTPTIMEOUT` – request timeout; raise when mirrors sit behind slow WAN links. -- `SIGNATURE__ENABLED` – require detached JWS verification for `bundle.json`. -- `SIGNATURE__KEYID` / `SIGNATURE__PROVIDER` – expected signing key metadata. -- `SIGNATURE__PUBLICKEYPATH` – PEM fallback used when the mirror key registry is offline. - -The connector keeps a per-export fingerprint (bundle digest + generated-at timestamp) and tracks outstanding document IDs. If a scan is interrupted, the next run resumes parse/map work using the stored fingerprint and pending document lists—no network requests are reissued unless the upstream digest changes. - -## 2. Secret & certificate layout - -### Docker Compose (`deploy/compose/docker-compose.mirror.yaml`) - -- `deploy/compose/env/mirror.env.example` – copy to `.env` and adjust quotas or domain IDs. -- `deploy/compose/mirror-secrets/` – mount read-only into `/run/secrets`. Place: - - `concelier-authority-client` – Authority client secret. - - `excititor-authority-client` (optional) – reserve for future authn. -- `deploy/compose/mirror-gateway/tls/` – PEM-encoded cert/key pairs: - - `mirror-primary.crt`, `mirror-primary.key` - - `mirror-community.crt`, `mirror-community.key` -- `deploy/compose/mirror-gateway/secrets/` – htpasswd files: - - `mirror-primary.htpasswd` - - `mirror-community.htpasswd` - -### Helm (`deploy/helm/stellaops/values-mirror.yaml`) - -Create secrets in the target namespace: - -```bash -kubectl create secret generic concelier-mirror-auth \ - --from-file=concelier-authority-client=concelier-authority-client - -kubectl create secret generic excititor-mirror-auth \ - --from-file=excititor-authority-client=excititor-authority-client - -kubectl create secret tls mirror-gateway-tls \ - --cert=mirror-primary.crt --key=mirror-primary.key - -kubectl create secret generic mirror-gateway-htpasswd \ - --from-file=mirror-primary.htpasswd --from-file=mirror-community.htpasswd -``` - -> Keep Basic Auth lists short-lived (rotate quarterly) and document credential recipients. - -## 3. Deployment - -### 3.1 Docker Compose (edge mirrors, lab validation) - -1. `cp deploy/compose/env/mirror.env.example deploy/compose/env/mirror.env` -2. Populate secrets/tls directories as described above. -3. Sync mirror bundles (see §4) into `deploy/compose/mirror-data/…` and ensure they are mounted - on the host path backing the `concelier-exports` and `excititor-exports` volumes. -4. Run the profile validator: `deploy/tools/validate-profiles.sh`. -5. Launch: `docker compose --env-file env/mirror.env -f docker-compose.mirror.yaml up -d`. - -### 3.2 Helm (production mirrors) - -1. Provision PVCs sized for mirror bundles (baseline: 20 GiB per domain). -2. Create secrets/tls config maps (§2). -3. `helm upgrade --install mirror deploy/helm/stellaops -f deploy/helm/stellaops/values-mirror.yaml`. -4. Annotate the `stellaops-mirror-gateway` service with ingress/LoadBalancer metadata required by - your CDN (e.g., AWS load balancer scheme internal + NLB idle timeout). - -## 4. Artifact sync workflow - -Mirrors never generate exports—they ingest signed bundles produced by the Concelier and Excititor -export jobs. Recommended sync pattern: - -### 4.1 Compose host (systemd timer) - -`/usr/local/bin/mirror-sync.sh`: - -```bash -#!/usr/bin/env bash -set -euo pipefail -export AWS_ACCESS_KEY_ID=… -export AWS_SECRET_ACCESS_KEY=… - -aws s3 sync s3://mirror-stellaops/concelier/latest \ - /opt/stellaops/mirror-data/concelier --delete --size-only - -aws s3 sync s3://mirror-stellaops/excititor/latest \ - /opt/stellaops/mirror-data/excititor --delete --size-only -``` - -Schedule with a systemd timer every 5 minutes. The Compose volumes mount `/opt/stellaops/mirror-data/*` -into the containers read-only, matching `CONCELIER__MIRROR__EXPORTROOT=/exports/json` and -`EXCITITOR__ARTIFACTS__FILESYSTEM__ROOT=/exports`. - -### 4.2 Kubernetes (CronJob) - -Create a CronJob running the AWS CLI (or rclone) in the same namespace, writing into the PVCs: - -```yaml -apiVersion: batch/v1 -kind: CronJob -metadata: - name: mirror-sync -spec: - schedule: "*/5 * * * *" - jobTemplate: - spec: - template: - spec: - containers: - - name: sync - image: public.ecr.aws/aws-cli/aws-cli@sha256:5df5f52c29f5e3ba46d0ad9e0e3afc98701c4a0f879400b4c5f80d943b5fadea - command: - - /bin/sh - - -c - - > - aws s3 sync s3://mirror-stellaops/concelier/latest /exports/concelier --delete --size-only && - aws s3 sync s3://mirror-stellaops/excititor/latest /exports/excititor --delete --size-only - volumeMounts: - - name: concelier-exports - mountPath: /exports/concelier - - name: excititor-exports - mountPath: /exports/excititor - envFrom: - - secretRef: - name: mirror-sync-aws - restartPolicy: OnFailure - volumes: - - name: concelier-exports - persistentVolumeClaim: - claimName: concelier-mirror-exports - - name: excititor-exports - persistentVolumeClaim: - claimName: excititor-mirror-exports -``` - -## 5. CDN integration - -1. Point the CDN origin at the mirror gateway (Compose host or Kubernetes LoadBalancer). -2. Honour the response headers emitted by the gateway and Concelier/Excititor: - `Cache-Control: public, max-age=300, immutable` for mirror payloads. -3. Configure origin shields in the CDN to prevent cache stampedes. Recommended TTLs: - - Index (`/concelier/exports/index.json`, `/excititor/mirror/*/index`) → 60 s. - - Bundle/manifest payloads → 300 s. -4. Forward the `Authorization` header—Basic Auth terminates at the gateway. -5. Enforce per-domain rate limits at the CDN (matching gateway budgets) and enable logging - to SIEM for anomaly detection. - -## 6. Smoke tests - -After each deployment or sync cycle (temporarily set low budgets if you need to observe 429 responses): - -```bash -# Index with Basic Auth -curl -u $PRIMARY_CREDS https://mirror-primary.stella-ops.org/concelier/exports/index.json | jq 'keys' - -# Mirror manifest signature and cache headers -curl -u $PRIMARY_CREDS -I https://mirror-primary.stella-ops.org/concelier/exports/mirror/primary/manifest.json \ - | tee /tmp/manifest-headers.txt -grep -E '^Cache-Control: ' /tmp/manifest-headers.txt # expect public, max-age=300, immutable - -# Excititor consensus bundle metadata -curl -u $COMMUNITY_CREDS https://mirror-community.stella-ops.org/excititor/mirror/community/index \ - | jq '.exports[].exportKey' - -# Signed bundle + detached JWS (spot check digests) -curl -u $PRIMARY_CREDS https://mirror-primary.stella-ops.org/concelier/exports/mirror/primary/bundle.json.jws \ - -o bundle.json.jws -cosign verify-blob --signature bundle.json.jws --key mirror-key.pub bundle.json - -# Service-level auth check (inside cluster – no gateway credentials) -kubectl exec deploy/stellaops-concelier -- curl -si http://localhost:8443/concelier/exports/mirror/primary/manifest.json \ - | head -n 5 # expect HTTP/1.1 401 with WWW-Authenticate: Bearer - -# Rate limit smoke (repeat quickly; second call should return 429 + Retry-After) -for i in 1 2; do - curl -s -o /dev/null -D - https://mirror-primary.stella-ops.org/concelier/exports/index.json \ - -u $PRIMARY_CREDS | grep -E '^(HTTP/|Retry-After:)' - sleep 1 -done -``` - -Watch the gateway metrics (`nginx_vts` or access logs) for cache hits. In Kubernetes, `kubectl logs deploy/stellaops-mirror-gateway` -should show `X-Cache-Status: HIT/MISS`. - -## 7. Maintenance & rotation - -- **Bundle freshness** – alert if sync job lag exceeds 15 minutes or if `concelier` logs - `Mirror export root is not configured`. -- **Secret rotation** – change Authority client secrets and Basic Auth credentials quarterly. - Update the mounted secrets and restart deployments (`docker compose restart concelier` or - `kubectl rollout restart deploy/stellaops-concelier`). -- **TLS renewal** – reissue certificates, place new files, and reload gateway (`docker compose exec mirror-gateway nginx -s reload`). -- **Quota tuning** – adjust per-domain `MAXDOWNLOADREQUESTSPERHOUR` in `.env` or values file. - Align CDN rate limits and inform downstreams. - -## 8. References - -- Deployment profiles: `deploy/compose/docker-compose.mirror.yaml`, - `deploy/helm/stellaops/values-mirror.yaml` -- Mirror architecture dossiers: `docs/ARCHITECTURE_CONCELIER.md`, - `docs/ARCHITECTURE_EXCITITOR_MIRRORS.md` -- Export bundling: `docs/ARCHITECTURE_DEVOPS.md` §3, `docs/ARCHITECTURE_EXCITITOR.md` §7 +# Concelier & Excititor Mirror Operations + +This runbook describes how Stella Ops operates the managed mirrors under `*.stella-ops.org`. +It covers Docker Compose and Helm deployment overlays, secret handling for multi-tenant +authn, CDN fronting, and the recurring sync pipeline that keeps mirror bundles current. + +## 1. Prerequisites + +- **Authority access** – client credentials (`client_id` + secret) authorised for + `concelier.mirror.read` and `excititor.mirror.read` scopes. Secrets live outside git. +- **Signed TLS certificates** – wildcard or per-domain (`mirror-primary`, `mirror-community`). + Store them under `deploy/compose/mirror-gateway/tls/` or in Kubernetes secrets. +- **Mirror gateway credentials** – Basic Auth htpasswd files per domain. Generate with + `htpasswd -B`. Operators distribute credentials to downstream consumers. +- **Export artifact source** – read access to the canonical S3 buckets (or rsync share) + that hold `concelier` JSON bundles and `excititor` VEX exports. +- **Persistent volumes** – storage for Concelier job metadata and mirror export trees. + For Helm, provision PVCs (`concelier-mirror-jobs`, `concelier-mirror-exports`, + `excititor-mirror-exports`, `mirror-mongo-data`, `mirror-minio-data`) before rollout. + +### 1.1 Service configuration quick reference + +Concelier.WebService exposes the mirror HTTP endpoints once `CONCELIER__MIRROR__ENABLED=true`. +Key knobs: + +- `CONCELIER__MIRROR__EXPORTROOT` – root folder containing export snapshots (`/mirror/*`). +- `CONCELIER__MIRROR__ACTIVEEXPORTID` – optional explicit export id; otherwise the service auto-falls back to the `latest/` symlink or newest directory. +- `CONCELIER__MIRROR__REQUIREAUTHENTICATION` – default auth requirement; override per domain with `CONCELIER__MIRROR__DOMAINS__{n}__REQUIREAUTHENTICATION`. +- `CONCELIER__MIRROR__MAXINDEXREQUESTSPERHOUR` – budget for `/concelier/exports/index.json`. Domains inherit this value unless they define `__MAXDOWNLOADREQUESTSPERHOUR`. +- `CONCELIER__MIRROR__DOMAINS__{n}__ID` – domain identifier matching the exporter manifest; additional keys configure display name and rate budgets. + +> The service honours Stella Ops Authority when `CONCELIER__AUTHORITY__ENABLED=true` and `ALLOWANONYMOUSFALLBACK=false`. Use the bypass CIDR list (`CONCELIER__AUTHORITY__BYPASSNETWORKS__*`) for in-cluster ingress gateways that terminate Basic Auth. Unauthorized requests emit `WWW-Authenticate: Bearer` so downstream automation can detect token failures. + +Mirror responses carry deterministic cache headers: `/index.json` returns `Cache-Control: public, max-age=60`, while per-domain manifests/bundles include `Cache-Control: public, max-age=300, immutable`. Rate limiting surfaces `Retry-After` when quotas are exceeded. + +### 1.2 Mirror connector configuration + +Downstream Concelier instances ingest published bundles using the `StellaOpsMirrorConnector`. Operators running the connector in air‑gapped or limited connectivity environments can tune the following options (environment prefix `CONCELIER__SOURCES__STELLAOPSMIRROR__`): + +- `BASEADDRESS` – absolute mirror root (e.g., `https://mirror-primary.stella-ops.org`). +- `INDEXPATH` – relative path to the mirror index (`/concelier/exports/index.json` by default). +- `DOMAINID` – mirror domain identifier from the index (`primary`, `community`, etc.). +- `HTTPTIMEOUT` – request timeout; raise when mirrors sit behind slow WAN links. +- `SIGNATURE__ENABLED` – require detached JWS verification for `bundle.json`. +- `SIGNATURE__KEYID` / `SIGNATURE__PROVIDER` – expected signing key metadata. +- `SIGNATURE__PUBLICKEYPATH` – PEM fallback used when the mirror key registry is offline. + +The connector keeps a per-export fingerprint (bundle digest + generated-at timestamp) and tracks outstanding document IDs. If a scan is interrupted, the next run resumes parse/map work using the stored fingerprint and pending document lists—no network requests are reissued unless the upstream digest changes. + +## 2. Secret & certificate layout + +### Docker Compose (`deploy/compose/docker-compose.mirror.yaml`) + +- `deploy/compose/env/mirror.env.example` – copy to `.env` and adjust quotas or domain IDs. +- `deploy/compose/mirror-secrets/` – mount read-only into `/run/secrets`. Place: + - `concelier-authority-client` – Authority client secret. + - `excititor-authority-client` (optional) – reserve for future authn. +- `deploy/compose/mirror-gateway/tls/` – PEM-encoded cert/key pairs: + - `mirror-primary.crt`, `mirror-primary.key` + - `mirror-community.crt`, `mirror-community.key` +- `deploy/compose/mirror-gateway/secrets/` – htpasswd files: + - `mirror-primary.htpasswd` + - `mirror-community.htpasswd` + +### Helm (`deploy/helm/stellaops/values-mirror.yaml`) + +Create secrets in the target namespace: + +```bash +kubectl create secret generic concelier-mirror-auth \ + --from-file=concelier-authority-client=concelier-authority-client + +kubectl create secret generic excititor-mirror-auth \ + --from-file=excititor-authority-client=excititor-authority-client + +kubectl create secret tls mirror-gateway-tls \ + --cert=mirror-primary.crt --key=mirror-primary.key + +kubectl create secret generic mirror-gateway-htpasswd \ + --from-file=mirror-primary.htpasswd --from-file=mirror-community.htpasswd +``` + +> Keep Basic Auth lists short-lived (rotate quarterly) and document credential recipients. + +## 3. Deployment + +### 3.1 Docker Compose (edge mirrors, lab validation) + +1. `cp deploy/compose/env/mirror.env.example deploy/compose/env/mirror.env` +2. Populate secrets/tls directories as described above. +3. Sync mirror bundles (see §4) into `deploy/compose/mirror-data/…` and ensure they are mounted + on the host path backing the `concelier-exports` and `excititor-exports` volumes. +4. Run the profile validator: `deploy/tools/validate-profiles.sh`. +5. Launch: `docker compose --env-file env/mirror.env -f docker-compose.mirror.yaml up -d`. + +### 3.2 Helm (production mirrors) + +1. Provision PVCs sized for mirror bundles (baseline: 20 GiB per domain). +2. Create secrets/tls config maps (§2). +3. `helm upgrade --install mirror deploy/helm/stellaops -f deploy/helm/stellaops/values-mirror.yaml`. +4. Annotate the `stellaops-mirror-gateway` service with ingress/LoadBalancer metadata required by + your CDN (e.g., AWS load balancer scheme internal + NLB idle timeout). + +## 4. Artifact sync workflow + +Mirrors never generate exports—they ingest signed bundles produced by the Concelier and Excititor +export jobs. Recommended sync pattern: + +### 4.1 Compose host (systemd timer) + +`/usr/local/bin/mirror-sync.sh`: + +```bash +#!/usr/bin/env bash +set -euo pipefail +export AWS_ACCESS_KEY_ID=… +export AWS_SECRET_ACCESS_KEY=… + +aws s3 sync s3://mirror-stellaops/concelier/latest \ + /opt/stellaops/mirror-data/concelier --delete --size-only + +aws s3 sync s3://mirror-stellaops/excititor/latest \ + /opt/stellaops/mirror-data/excititor --delete --size-only +``` + +Schedule with a systemd timer every 5 minutes. The Compose volumes mount `/opt/stellaops/mirror-data/*` +into the containers read-only, matching `CONCELIER__MIRROR__EXPORTROOT=/exports/json` and +`EXCITITOR__ARTIFACTS__FILESYSTEM__ROOT=/exports`. + +### 4.2 Kubernetes (CronJob) + +Create a CronJob running the AWS CLI (or rclone) in the same namespace, writing into the PVCs: + +```yaml +apiVersion: batch/v1 +kind: CronJob +metadata: + name: mirror-sync +spec: + schedule: "*/5 * * * *" + jobTemplate: + spec: + template: + spec: + containers: + - name: sync + image: public.ecr.aws/aws-cli/aws-cli@sha256:5df5f52c29f5e3ba46d0ad9e0e3afc98701c4a0f879400b4c5f80d943b5fadea + command: + - /bin/sh + - -c + - > + aws s3 sync s3://mirror-stellaops/concelier/latest /exports/concelier --delete --size-only && + aws s3 sync s3://mirror-stellaops/excititor/latest /exports/excititor --delete --size-only + volumeMounts: + - name: concelier-exports + mountPath: /exports/concelier + - name: excititor-exports + mountPath: /exports/excititor + envFrom: + - secretRef: + name: mirror-sync-aws + restartPolicy: OnFailure + volumes: + - name: concelier-exports + persistentVolumeClaim: + claimName: concelier-mirror-exports + - name: excititor-exports + persistentVolumeClaim: + claimName: excititor-mirror-exports +``` + +## 5. CDN integration + +1. Point the CDN origin at the mirror gateway (Compose host or Kubernetes LoadBalancer). +2. Honour the response headers emitted by the gateway and Concelier/Excititor: + `Cache-Control: public, max-age=300, immutable` for mirror payloads. +3. Configure origin shields in the CDN to prevent cache stampedes. Recommended TTLs: + - Index (`/concelier/exports/index.json`, `/excititor/mirror/*/index`) → 60 s. + - Bundle/manifest payloads → 300 s. +4. Forward the `Authorization` header—Basic Auth terminates at the gateway. +5. Enforce per-domain rate limits at the CDN (matching gateway budgets) and enable logging + to SIEM for anomaly detection. + +## 6. Smoke tests + +After each deployment or sync cycle (temporarily set low budgets if you need to observe 429 responses): + +```bash +# Index with Basic Auth +curl -u $PRIMARY_CREDS https://mirror-primary.stella-ops.org/concelier/exports/index.json | jq 'keys' + +# Mirror manifest signature and cache headers +curl -u $PRIMARY_CREDS -I https://mirror-primary.stella-ops.org/concelier/exports/mirror/primary/manifest.json \ + | tee /tmp/manifest-headers.txt +grep -E '^Cache-Control: ' /tmp/manifest-headers.txt # expect public, max-age=300, immutable + +# Excititor consensus bundle metadata +curl -u $COMMUNITY_CREDS https://mirror-community.stella-ops.org/excititor/mirror/community/index \ + | jq '.exports[].exportKey' + +# Signed bundle + detached JWS (spot check digests) +curl -u $PRIMARY_CREDS https://mirror-primary.stella-ops.org/concelier/exports/mirror/primary/bundle.json.jws \ + -o bundle.json.jws +cosign verify-blob --signature bundle.json.jws --key mirror-key.pub bundle.json + +# Service-level auth check (inside cluster – no gateway credentials) +kubectl exec deploy/stellaops-concelier -- curl -si http://localhost:8443/concelier/exports/mirror/primary/manifest.json \ + | head -n 5 # expect HTTP/1.1 401 with WWW-Authenticate: Bearer + +# Rate limit smoke (repeat quickly; second call should return 429 + Retry-After) +for i in 1 2; do + curl -s -o /dev/null -D - https://mirror-primary.stella-ops.org/concelier/exports/index.json \ + -u $PRIMARY_CREDS | grep -E '^(HTTP/|Retry-After:)' + sleep 1 +done +``` + +Watch the gateway metrics (`nginx_vts` or access logs) for cache hits. In Kubernetes, `kubectl logs deploy/stellaops-mirror-gateway` +should show `X-Cache-Status: HIT/MISS`. + +## 7. Maintenance & rotation + +- **Bundle freshness** – alert if sync job lag exceeds 15 minutes or if `concelier` logs + `Mirror export root is not configured`. +- **Secret rotation** – change Authority client secrets and Basic Auth credentials quarterly. + Update the mounted secrets and restart deployments (`docker compose restart concelier` or + `kubectl rollout restart deploy/stellaops-concelier`). +- **TLS renewal** – reissue certificates, place new files, and reload gateway (`docker compose exec mirror-gateway nginx -s reload`). +- **Quota tuning** – adjust per-domain `MAXDOWNLOADREQUESTSPERHOUR` in `.env` or values file. + Align CDN rate limits and inform downstreams. + +## 8. References + +- Deployment profiles: `deploy/compose/docker-compose.mirror.yaml`, + `deploy/helm/stellaops/values-mirror.yaml` +- Mirror architecture dossiers: `docs/ARCHITECTURE_CONCELIER.md`, + `docs/ARCHITECTURE_EXCITITOR_MIRRORS.md` +- Export bundling: `docs/ARCHITECTURE_DEVOPS.md` §3, `docs/ARCHITECTURE_EXCITITOR.md` §7 diff --git a/docs/ops/concelier-nkcki-operations.md b/docs/ops/concelier-nkcki-operations.md index 4be0ed72..2559c216 100644 --- a/docs/ops/concelier-nkcki-operations.md +++ b/docs/ops/concelier-nkcki-operations.md @@ -1,48 +1,48 @@ -# NKCKI Connector Operations Guide - -## Overview - -The NKCKI connector ingests JSON bulletin archives from cert.gov.ru, expanding each `*.json.zip` attachment into per-vulnerability DTOs before canonical mapping. The fetch pipeline now supports cache-backed recovery, deterministic pagination, and telemetry suitable for production monitoring. - -## Configuration - -Key options exposed through `concelier:sources:ru-nkcki:http`: - -- `maxBulletinsPerFetch` – limits new bulletin downloads in a single run (default `5`). -- `maxListingPagesPerFetch` – maximum listing pages visited during pagination (default `3`). -- `listingCacheDuration` – minimum interval between listing fetches before falling back to cached artefacts (default `00:10:00`). -- `cacheDirectory` – optional path for persisted bulletin archives used during offline or failure scenarios. -- `requestDelay` – delay inserted between bulletin downloads to respect upstream politeness. - -When operating in offline-first mode, set `cacheDirectory` to a writable path (e.g. `/var/lib/concelier/cache/ru-nkcki`) and pre-populate bulletin archives via the offline kit. - -## Telemetry - -`RuNkckiDiagnostics` emits the following metrics under meter `StellaOps.Concelier.Connector.Ru.Nkcki`: - -- `nkcki.listing.fetch.attempts` / `nkcki.listing.fetch.success` / `nkcki.listing.fetch.failures` -- `nkcki.listing.pages.visited` (histogram, `pages`) -- `nkcki.listing.attachments.discovered` / `nkcki.listing.attachments.new` -- `nkcki.bulletin.fetch.success` / `nkcki.bulletin.fetch.cached` / `nkcki.bulletin.fetch.failures` -- `nkcki.entries.processed` (histogram, `entries`) - -Integrate these counters into standard Concelier observability dashboards to track crawl coverage and cache hit rates. - -## Archive Backfill Strategy - -Bitrix pagination surfaces archives via `?PAGEN_1=n`. The connector now walks up to `maxListingPagesPerFetch` pages, deduplicating bulletin IDs and maintaining a rolling `knownBulletins` window. Backfill strategy: - -1. Enumerate pages from newest to oldest, respecting `maxListingPagesPerFetch` and `listingCacheDuration` to avoid refetch storms. -2. Persist every `*.json.zip` attachment to the configured cache directory. This enables replay when listing access is temporarily blocked. -3. During archive replay, `ProcessCachedBulletinsAsync` enqueues missing documents while respecting `maxVulnerabilitiesPerFetch`. -4. For historical HTML-only advisories, collect page URLs and metadata while offline (future work: HTML and PDF extraction pipeline documented in `docs/concelier-connector-research-20251011.md`). - -For large migrations, seed caches with archived zip bundles, then run fetch/parse/map cycles in chronological order to maintain deterministic outputs. - -## Failure Handling - -- Listing failures mark the source state with exponential backoff while attempting cache replay. -- Bulletin fetches fall back to cached copies before surfacing an error. -- Mongo integration tests rely on bundled OpenSSL 1.1 libraries (`tools/openssl/linux-x64`) to keep `Mongo2Go` operational on modern distros. - -Refer to `ru-nkcki` entries in `src/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md` for outstanding items. +# NKCKI Connector Operations Guide + +## Overview + +The NKCKI connector ingests JSON bulletin archives from cert.gov.ru, expanding each `*.json.zip` attachment into per-vulnerability DTOs before canonical mapping. The fetch pipeline now supports cache-backed recovery, deterministic pagination, and telemetry suitable for production monitoring. + +## Configuration + +Key options exposed through `concelier:sources:ru-nkcki:http`: + +- `maxBulletinsPerFetch` – limits new bulletin downloads in a single run (default `5`). +- `maxListingPagesPerFetch` – maximum listing pages visited during pagination (default `3`). +- `listingCacheDuration` – minimum interval between listing fetches before falling back to cached artefacts (default `00:10:00`). +- `cacheDirectory` – optional path for persisted bulletin archives used during offline or failure scenarios. +- `requestDelay` – delay inserted between bulletin downloads to respect upstream politeness. + +When operating in offline-first mode, set `cacheDirectory` to a writable path (e.g. `/var/lib/concelier/cache/ru-nkcki`) and pre-populate bulletin archives via the offline kit. + +## Telemetry + +`RuNkckiDiagnostics` emits the following metrics under meter `StellaOps.Concelier.Connector.Ru.Nkcki`: + +- `nkcki.listing.fetch.attempts` / `nkcki.listing.fetch.success` / `nkcki.listing.fetch.failures` +- `nkcki.listing.pages.visited` (histogram, `pages`) +- `nkcki.listing.attachments.discovered` / `nkcki.listing.attachments.new` +- `nkcki.bulletin.fetch.success` / `nkcki.bulletin.fetch.cached` / `nkcki.bulletin.fetch.failures` +- `nkcki.entries.processed` (histogram, `entries`) + +Integrate these counters into standard Concelier observability dashboards to track crawl coverage and cache hit rates. + +## Archive Backfill Strategy + +Bitrix pagination surfaces archives via `?PAGEN_1=n`. The connector now walks up to `maxListingPagesPerFetch` pages, deduplicating bulletin IDs and maintaining a rolling `knownBulletins` window. Backfill strategy: + +1. Enumerate pages from newest to oldest, respecting `maxListingPagesPerFetch` and `listingCacheDuration` to avoid refetch storms. +2. Persist every `*.json.zip` attachment to the configured cache directory. This enables replay when listing access is temporarily blocked. +3. During archive replay, `ProcessCachedBulletinsAsync` enqueues missing documents while respecting `maxVulnerabilitiesPerFetch`. +4. For historical HTML-only advisories, collect page URLs and metadata while offline (future work: HTML and PDF extraction pipeline documented in `docs/concelier-connector-research-20251011.md`). + +For large migrations, seed caches with archived zip bundles, then run fetch/parse/map cycles in chronological order to maintain deterministic outputs. + +## Failure Handling + +- Listing failures mark the source state with exponential backoff while attempting cache replay. +- Bulletin fetches fall back to cached copies before surfacing an error. +- Mongo integration tests rely on bundled OpenSSL 1.1 libraries (`tools/openssl/linux-x64`) to keep `Mongo2Go` operational on modern distros. + +Refer to `ru-nkcki` entries in `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md` for outstanding items. diff --git a/docs/ops/concelier-osv-operations.md b/docs/ops/concelier-osv-operations.md index bb035e0c..649f66f4 100644 --- a/docs/ops/concelier-osv-operations.md +++ b/docs/ops/concelier-osv-operations.md @@ -1,24 +1,24 @@ -# Concelier OSV Connector – Operations Notes - -_Last updated: 2025-10-16_ - -The OSV connector ingests advisories from OSV.dev across OSS ecosystems. This note highlights the additional merge/export expectations introduced with the canonical metric fallback work in Sprint 4. - -## 1. Canonical metric fallbacks -- When OSV omits CVSS vectors (common for CVSS v4-only payloads) the mapper now emits a deterministic canonical metric id in the form `osv:severity/` and normalises the advisory severity to the same ``. -- Metric: `osv.map.canonical_metric_fallbacks` (counter) with tags `severity`, `canonical_metric_id`, `ecosystem`, `reason=no_cvss`. Watch this alongside merge parity dashboards to catch spikes where OSV publishes severity-only advisories. -- Merge precedence still prefers GHSA over OSV; the shared severity-based canonical id keeps Merge/export parity deterministic even when only OSV supplies severity data. - -## 2. CWE provenance -- `database_specific.cwe_ids` now populates provenance decision reasons for every mapped weakness. Expect `decisionReason="database_specific.cwe_ids"` on OSV weakness provenance and confirm exporters preserve the value. -- If OSV ever attaches `database_specific.cwe_notes`, the connector will surface the joined note string in `decisionReason` instead of the default marker. - -## 3. Dashboards & alerts -- Extend existing merge dashboards with the new counter: - - Overlay `sum(osv.map.canonical_metric_fallbacks{ecosystem=~".+"})` with Merge severity overrides to confirm fallback advisories are reconciling cleanly. - - Alert when the 1-hour sum exceeds 50 for any ecosystem; baseline volume is currently <5 per day (mostly GHSA mirrors emitting CVSS v4 only). -- Exporters already surface `canonicalMetricId`; no schema change is required, but ORAS/Trivy bundles should be spot-checked after deploying the connector update. - -## 4. Runbook updates -- Fixture parity suites (`osv-ghsa.*`) now assert the fallback id and provenance notes. Regenerate via `dotnet test src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj`. -- When investigating merge severity conflicts, include the fallback counter and confirm OSV advisories carry the expected `osv:severity/` id before raising connector bugs. +# Concelier OSV Connector – Operations Notes + +_Last updated: 2025-10-16_ + +The OSV connector ingests advisories from OSV.dev across OSS ecosystems. This note highlights the additional merge/export expectations introduced with the canonical metric fallback work in Sprint 4. + +## 1. Canonical metric fallbacks +- When OSV omits CVSS vectors (common for CVSS v4-only payloads) the mapper now emits a deterministic canonical metric id in the form `osv:severity/` and normalises the advisory severity to the same ``. +- Metric: `osv.map.canonical_metric_fallbacks` (counter) with tags `severity`, `canonical_metric_id`, `ecosystem`, `reason=no_cvss`. Watch this alongside merge parity dashboards to catch spikes where OSV publishes severity-only advisories. +- Merge precedence still prefers GHSA over OSV; the shared severity-based canonical id keeps Merge/export parity deterministic even when only OSV supplies severity data. + +## 2. CWE provenance +- `database_specific.cwe_ids` now populates provenance decision reasons for every mapped weakness. Expect `decisionReason="database_specific.cwe_ids"` on OSV weakness provenance and confirm exporters preserve the value. +- If OSV ever attaches `database_specific.cwe_notes`, the connector will surface the joined note string in `decisionReason` instead of the default marker. + +## 3. Dashboards & alerts +- Extend existing merge dashboards with the new counter: + - Overlay `sum(osv.map.canonical_metric_fallbacks{ecosystem=~".+"})` with Merge severity overrides to confirm fallback advisories are reconciling cleanly. + - Alert when the 1-hour sum exceeds 50 for any ecosystem; baseline volume is currently <5 per day (mostly GHSA mirrors emitting CVSS v4 only). +- Exporters already surface `canonicalMetricId`; no schema change is required, but ORAS/Trivy bundles should be spot-checked after deploying the connector update. + +## 4. Runbook updates +- Fixture parity suites (`osv-ghsa.*`) now assert the fallback id and provenance notes. Regenerate via `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj`. +- When investigating merge severity conflicts, include the fallback counter and confirm OSV advisories carry the expected `osv:severity/` id before raising connector bugs. diff --git a/docs/ops/deployment-upgrade-runbook.md b/docs/ops/deployment-upgrade-runbook.md index 7fd585e2..ba4a3421 100644 --- a/docs/ops/deployment-upgrade-runbook.md +++ b/docs/ops/deployment-upgrade-runbook.md @@ -1,151 +1,151 @@ -# Stella Ops Deployment Upgrade & Rollback Runbook - -_Last updated: 2025-10-26 (Sprint 14 – DEVOPS-OPS-14-003)._ - -This runbook describes how to promote a new release across the supported deployment profiles (Helm and Docker Compose), how to roll back safely, and how to keep channels (`edge`, `stable`, `airgap`) aligned. All steps assume you are working from a clean checkout of the release branch/tag. - ---- - -## 1. Channel overview - -| Channel | Release manifest | Helm values | Compose profile | -|---------|------------------|-------------|-----------------| -| `edge` | `deploy/releases/2025.10-edge.yaml` | `deploy/helm/stellaops/values-dev.yaml` | `deploy/compose/docker-compose.dev.yaml` | -| `stable` | `deploy/releases/2025.09-stable.yaml` | `deploy/helm/stellaops/values-stage.yaml`, `deploy/helm/stellaops/values-prod.yaml` | `deploy/compose/docker-compose.stage.yaml`, `deploy/compose/docker-compose.prod.yaml` | -| `airgap` | `deploy/releases/2025.09-airgap.yaml` | `deploy/helm/stellaops/values-airgap.yaml` | `deploy/compose/docker-compose.airgap.yaml` | - -Infrastructure components (MongoDB, MinIO, RustFS) are pinned in the release manifests and inherited by the deployment profiles. Supporting dependencies such as `nats` remain on upstream LTS tags; review `deploy/compose/*.yaml` for the authoritative set. - ---- - -## 2. Pre-flight checklist - -1. **Refresh release manifest** - Pull the latest manifest for the channel you are promoting (`deploy/releases/-.yaml`). - -2. **Align deployment bundles with the manifest** - Run the alignment checker for every profile that should pick up the release. Pass `--ignore-repo nats` to skip auxiliary services. - ```bash - ./deploy/tools/check-channel-alignment.py \ - --release deploy/releases/2025.10-edge.yaml \ - --target deploy/helm/stellaops/values-dev.yaml \ - --target deploy/compose/docker-compose.dev.yaml \ - --ignore-repo nats - ``` - Repeat for other channels (`stable`, `airgap`), substituting the manifest and target files. - -3. **Lint and template profiles** - ```bash - ./deploy/tools/validate-profiles.sh - ``` - -4. **Smoke the Offline Kit debug store (edge/stable only)** - When the release pipeline has generated `out/release/debug/.build-id/**`, mirror the assets into the Offline Kit staging tree: - ```bash - ./ops/offline-kit/mirror_debug_store.py \ - --release-dir out/release \ - --offline-kit-dir out/offline-kit - ``` - Archive the resulting `out/offline-kit/metadata/debug-store.json` alongside the kit bundle. - -5. **Review compatibility matrix** - Confirm MongoDB, MinIO, and RustFS versions in the release manifest match platform SLOs. The default targets are `mongo@sha256:c258…`, `minio@sha256:14ce…`, `rustfs:2025.10.0-edge`. - -6. **Create a rollback bookmark** - Record the current Helm revision (`helm history stellaops -n stellaops`) and compose tag (`git describe --tags`) before applying changes. - ---- - -## 3. Helm upgrade procedure (staging → production) - -1. Switch to the deployment branch and ensure secrets/config maps are current. -2. Apply the upgrade in the staging cluster: - ```bash - helm upgrade stellaops deploy/helm/stellaops \ - -f deploy/helm/stellaops/values-stage.yaml \ - --namespace stellaops \ - --atomic \ - --timeout 15m - ``` -3. Run smoke tests (`scripts/smoke-tests.sh` or environment-specific checks). -4. Promote to production using the prod values file and the same command. -5. Record the new revision number and Git SHA in the change log. - -### Rollback (Helm) - -1. Identify the previous revision: `helm history stellaops -n stellaops`. -2. Execute: - ```bash - helm rollback stellaops \ - --namespace stellaops \ - --wait \ - --timeout 10m - ``` -3. Verify `kubectl get pods` returns healthy workloads; rerun smoke tests. -4. Update the incident/operations log with root cause and rollback details. - ---- - -## 4. Docker Compose upgrade procedure - -1. Update environment files (`deploy/compose/env/*.env.example`) with any new settings and sync secrets to hosts. -2. Pull the tagged repository state corresponding to the release (e.g. `git checkout 2025.09.2` for stable). -3. Apply the upgrade: - ```bash - docker compose \ - --env-file deploy/compose/env/prod.env \ - -f deploy/compose/docker-compose.prod.yaml \ - pull - - docker compose \ - --env-file deploy/compose/env/prod.env \ - -f deploy/compose/docker-compose.prod.yaml \ - up -d - ``` -4. Tail logs for critical services (`docker compose logs -f authority concelier`). -5. Update monitoring dashboards/alerts to confirm normal operation. - -### Rollback (Compose) - -1. Check out the previous release tag (e.g. `git checkout 2025.09.1`). -2. Re-run `docker compose pull` and `docker compose up -d` with that profile. Docker will restore the prior digests. -3. If reverting to a known-good snapshot is required, restore volume backups (see `docs/ops/authority-backup-restore.md` and associated service guides). -4. Log the rollback in the operations journal. - ---- - -## 5. Channel promotion workflow - -1. Author or update the channel manifest under `deploy/releases/`. -2. Mirror the new digests into Helm/Compose values and run the alignment script for each profile. -3. Commit the changes with a message that references the release version and channel (e.g. `deploy: promote 2025.10.0-edge`). -4. Publish release notes and update `deploy/releases/README.md` (if applicable). -5. Tag the repository when promoting stable or airgap builds. - ---- - -## 6. Upgrade rehearsal & rollback drill log - -Maintain rehearsal notes in `docs/ops/launch-cutover.md` or the relevant sprint planning document. After each drill capture: - -- Release version tested -- Date/time -- Participants -- Issues encountered & fixes -- Rollback duration (if executed) - -Attach the log to the sprint retro or operational wiki. - -| Date (UTC) | Channel | Outcome | Notes | -|------------|---------|---------|-------| -| 2025-10-26 | Documentation dry-run | Planned | Runbook refreshed; next live drill scheduled for 2025-11 edge → stable promotion. - ---- - -## 7. References - -- `deploy/README.md` – structure and validation workflow for deployment bundles. -- `docs/13_RELEASE_ENGINEERING_PLAYBOOK.md` – release automation and signing pipeline. -- `docs/ARCHITECTURE_DEVOPS.md` – high-level DevOps architecture, SLOs, and compliance requirements. -- `ops/offline-kit/mirror_debug_store.py` – debug-store mirroring helper. -- `deploy/tools/check-channel-alignment.py` – release vs deployment digest alignment checker. +# Stella Ops Deployment Upgrade & Rollback Runbook + +_Last updated: 2025-10-26 (Sprint 14 – DEVOPS-OPS-14-003)._ + +This runbook describes how to promote a new release across the supported deployment profiles (Helm and Docker Compose), how to roll back safely, and how to keep channels (`edge`, `stable`, `airgap`) aligned. All steps assume you are working from a clean checkout of the release branch/tag. + +--- + +## 1. Channel overview + +| Channel | Release manifest | Helm values | Compose profile | +|---------|------------------|-------------|-----------------| +| `edge` | `deploy/releases/2025.10-edge.yaml` | `deploy/helm/stellaops/values-dev.yaml` | `deploy/compose/docker-compose.dev.yaml` | +| `stable` | `deploy/releases/2025.09-stable.yaml` | `deploy/helm/stellaops/values-stage.yaml`, `deploy/helm/stellaops/values-prod.yaml` | `deploy/compose/docker-compose.stage.yaml`, `deploy/compose/docker-compose.prod.yaml` | +| `airgap` | `deploy/releases/2025.09-airgap.yaml` | `deploy/helm/stellaops/values-airgap.yaml` | `deploy/compose/docker-compose.airgap.yaml` | + +Infrastructure components (MongoDB, MinIO, RustFS) are pinned in the release manifests and inherited by the deployment profiles. Supporting dependencies such as `nats` remain on upstream LTS tags; review `deploy/compose/*.yaml` for the authoritative set. + +--- + +## 2. Pre-flight checklist + +1. **Refresh release manifest** + Pull the latest manifest for the channel you are promoting (`deploy/releases/-.yaml`). + +2. **Align deployment bundles with the manifest** + Run the alignment checker for every profile that should pick up the release. Pass `--ignore-repo nats` to skip auxiliary services. + ```bash + ./deploy/tools/check-channel-alignment.py \ + --release deploy/releases/2025.10-edge.yaml \ + --target deploy/helm/stellaops/values-dev.yaml \ + --target deploy/compose/docker-compose.dev.yaml \ + --ignore-repo nats + ``` + Repeat for other channels (`stable`, `airgap`), substituting the manifest and target files. + +3. **Lint and template profiles** + ```bash + ./deploy/tools/validate-profiles.sh + ``` + +4. **Smoke the Offline Kit debug store (edge/stable only)** + When the release pipeline has generated `out/release/debug/.build-id/**`, mirror the assets into the Offline Kit staging tree: + ```bash + ./ops/offline-kit/mirror_debug_store.py \ + --release-dir out/release \ + --offline-kit-dir out/offline-kit + ``` + Archive the resulting `out/offline-kit/metadata/debug-store.json` alongside the kit bundle. + +5. **Review compatibility matrix** + Confirm MongoDB, MinIO, and RustFS versions in the release manifest match platform SLOs. The default targets are `mongo@sha256:c258…`, `minio@sha256:14ce…`, `rustfs:2025.10.0-edge`. + +6. **Create a rollback bookmark** + Record the current Helm revision (`helm history stellaops -n stellaops`) and compose tag (`git describe --tags`) before applying changes. + +--- + +## 3. Helm upgrade procedure (staging → production) + +1. Switch to the deployment branch and ensure secrets/config maps are current. +2. Apply the upgrade in the staging cluster: + ```bash + helm upgrade stellaops deploy/helm/stellaops \ + -f deploy/helm/stellaops/values-stage.yaml \ + --namespace stellaops \ + --atomic \ + --timeout 15m + ``` +3. Run smoke tests (`scripts/smoke-tests.sh` or environment-specific checks). +4. Promote to production using the prod values file and the same command. +5. Record the new revision number and Git SHA in the change log. + +### Rollback (Helm) + +1. Identify the previous revision: `helm history stellaops -n stellaops`. +2. Execute: + ```bash + helm rollback stellaops \ + --namespace stellaops \ + --wait \ + --timeout 10m + ``` +3. Verify `kubectl get pods` returns healthy workloads; rerun smoke tests. +4. Update the incident/operations log with root cause and rollback details. + +--- + +## 4. Docker Compose upgrade procedure + +1. Update environment files (`deploy/compose/env/*.env.example`) with any new settings and sync secrets to hosts. +2. Pull the tagged repository state corresponding to the release (e.g. `git checkout 2025.09.2` for stable). +3. Apply the upgrade: + ```bash + docker compose \ + --env-file deploy/compose/env/prod.env \ + -f deploy/compose/docker-compose.prod.yaml \ + pull + + docker compose \ + --env-file deploy/compose/env/prod.env \ + -f deploy/compose/docker-compose.prod.yaml \ + up -d + ``` +4. Tail logs for critical services (`docker compose logs -f authority concelier`). +5. Update monitoring dashboards/alerts to confirm normal operation. + +### Rollback (Compose) + +1. Check out the previous release tag (e.g. `git checkout 2025.09.1`). +2. Re-run `docker compose pull` and `docker compose up -d` with that profile. Docker will restore the prior digests. +3. If reverting to a known-good snapshot is required, restore volume backups (see `docs/ops/authority-backup-restore.md` and associated service guides). +4. Log the rollback in the operations journal. + +--- + +## 5. Channel promotion workflow + +1. Author or update the channel manifest under `deploy/releases/`. +2. Mirror the new digests into Helm/Compose values and run the alignment script for each profile. +3. Commit the changes with a message that references the release version and channel (e.g. `deploy: promote 2025.10.0-edge`). +4. Publish release notes and update `deploy/releases/README.md` (if applicable). +5. Tag the repository when promoting stable or airgap builds. + +--- + +## 6. Upgrade rehearsal & rollback drill log + +Maintain rehearsal notes in `docs/ops/launch-cutover.md` or the relevant sprint planning document. After each drill capture: + +- Release version tested +- Date/time +- Participants +- Issues encountered & fixes +- Rollback duration (if executed) + +Attach the log to the sprint retro or operational wiki. + +| Date (UTC) | Channel | Outcome | Notes | +|------------|---------|---------|-------| +| 2025-10-26 | Documentation dry-run | Planned | Runbook refreshed; next live drill scheduled for 2025-11 edge → stable promotion. + +--- + +## 7. References + +- `deploy/README.md` – structure and validation workflow for deployment bundles. +- `docs/13_RELEASE_ENGINEERING_PLAYBOOK.md` – release automation and signing pipeline. +- `docs/ARCHITECTURE_DEVOPS.md` – high-level DevOps architecture, SLOs, and compliance requirements. +- `ops/offline-kit/mirror_debug_store.py` – debug-store mirroring helper. +- `deploy/tools/check-channel-alignment.py` – release vs deployment digest alignment checker. diff --git a/docs/ops/launch-cutover.md b/docs/ops/launch-cutover.md index fc807975..cf9c5c05 100644 --- a/docs/ops/launch-cutover.md +++ b/docs/ops/launch-cutover.md @@ -1,128 +1,128 @@ -# Launch Cutover Runbook - Stella Ops - -_Document owner: DevOps Guild (2025-10-26)_ -_Scope:_ Full-platform launch from staging to production for release `2025.09.2`. - -## 1. Roles and Communication - -| Role | Primary | Backup | Contact | -| --- | --- | --- | --- | -| Cutover lead | DevOps Guild (on-call engineer) | Platform Ops lead | `#launch-bridge` (Mattermost) | -| Authority stack | Authority Core guild rep | Security guild rep | `#authority` | -| Scanner / Queue | Scanner WebService guild rep | Runtime guild rep | `#scanner` | -| Storage | Mongo/MinIO operators | Backup DB admin | Pager escalation | -| Observability | Telemetry guild rep | SRE on-call | `#telemetry` | -| Approvals | Product owner + CTO | DevOps lead | Approval recorded in change ticket | - -Set up a bridge call 30 minutes before start and keep `#launch-bridge` updated every 10 minutes. - -## 2. Timeline Overview (UTC) - -| Time | Activity | Owner | -| --- | --- | --- | -| T-24h | Change ticket approved, prod secrets verified, offline kit build status checked (`DEVOPS-OFFLINE-18-005`). | DevOps lead | -| T-12h | Run `deploy/tools/validate-profiles.sh`; capture logs in ticket. | DevOps engineer | -| T-6h | Freeze non-launch deployments; notify guild leads. | Product owner | -| T-2h | Execute rehearsal in staging (Section 3) using `values-stage.yaml` to verify scripts. | DevOps + module reps | -| T-30m | Final go/no-go with guild leads; confirm monitoring dashboards green. | Cutover lead | -| T0 | Execute production cutover steps (Section 4). | Cutover team | -| T+45m | Smoke tests complete (Section 5); announce success or trigger rollback. | Cutover lead | -| T+4h | Post-cutover metrics review, notify stakeholders, close ticket. | DevOps + product owner | - -## 3. Rehearsal (Staging) Checklist - -1. `docker network create stellaops_frontdoor || true` (if not present on staging jump host). -2. Run `deploy/tools/validate-profiles.sh` and archive output. -3. Apply staging secrets (`kubectl apply -f secrets/stage/*.yaml` or `helm secrets upgrade`) ensuring `stellaops-stage` credentials align with `values-stage.yaml`. -4. Perform `helm upgrade stellaops deploy/helm/stellaops -f deploy/helm/stellaops/values-stage.yaml` in staging cluster. -5. Verify health endpoints: `curl https://authority.stage.../healthz`, `curl https://scanner.stage.../healthz`. -6. Execute smoke CLI: `stellaops-cli scan submit --profile staging --sbom samples/sbom/demo.json` and confirm report status in UI. -7. Document total wall time and any deviations in the rehearsal log. - -Rehearsal must complete without manual interventions before proceeding to production. - -## 4. Production Cutover Steps - -### 4.1 Pre-flight -- Confirm production secrets in the appropriate secret store (`stellaops-prod-core`, `stellaops-prod-mongo`, `stellaops-prod-minio`, `stellaops-prod-notify`) contain the keys referenced in `values-prod.yaml`. -- Ensure the external reverse proxy network exists: `docker network create stellaops_frontdoor || true` on each compose host. -- Back up current configuration and data: - - Mongo snapshot: `mongodump --uri "$MONGO_BACKUP_URI" --out /backups/launch-$(date -Iseconds)`. - - MinIO policy export: `mc mirror --overwrite minio/stellaops minio-backup/stellaops-$(date +%Y%m%d%H%M)`. - -### 4.2 Apply Updates (Compose) -1. On each compose node, pull updated images for release `2025.09.2`: - ```bash - docker compose --env-file prod.env -f deploy/compose/docker-compose.prod.yaml pull - ``` -2. Deploy changes: - ```bash - docker compose --env-file prod.env -f deploy/compose/docker-compose.prod.yaml up -d - ``` -3. Confirm containers healthy via `docker compose ps` and `docker logs --tail 50`. - -### 4.3 Apply Updates (Helm/Kubernetes) -If using Kubernetes, perform: -```bash -helm upgrade stellaops deploy/helm/stellaops -f deploy/helm/stellaops/values-prod.yaml --atomic --timeout 15m -``` -Monitor rollout with `kubectl get pods -n stellaops --watch` and `kubectl rollout status deployment/`. - -### 4.4 Configuration Validation -- Verify Authority issuer metadata: `curl https://authority.prod.../.well-known/openid-configuration`. -- Validate Signer DSSE endpoint: `stellaops-cli signer verify --base-url https://signer.prod... --bundle samples/dsse/demo.json`. -- Check Scanner queue connectivity: `docker exec stellaops-scanner-web dotnet StellaOps.Scanner.WebService.dll health queue` (returns success). -- Ensure Notify (legacy) still accessible while Notifier migration pending. - -## 5. Smoke Tests - -| Test | Command / Action | Expected Result | -| --- | --- | --- | -| API health | `curl https://scanner.prod.../healthz` | HTTP 200 with `status":"Healthy"` | -| Scan submit | `stellaops-cli scan submit --profile prod --sbom samples/sbom/demo.json` | Scan completes < 5 minutes; report accessible with signed DSSE | -| Runtime event ingest | Post sample event from Zastava observer fixture | `/runtime/events` responds 202 Accepted; record visible in Mongo `runtime_events` | -| Signing | `stellaops-cli signer sign --bundle demo.json` | Returns DSSE with matching SHA256 and signer metadata | -| Attestor verify | `stellaops-cli attestor verify --uuid ` | Verification result `ok=true` | -| Web UI | Manual login, verify dashboards render and latency within budget | UI loads under 2 seconds; policy views consistent | - -Log results in the change ticket with timestamps and screenshots where applicable. - -## 6. Rollback Procedure - -1. Assess failure scope; if systemic, initiate rollback immediately while preserving logs/artifacts. -2. For Compose: - ```bash - docker compose --env-file prod.env -f deploy/compose/docker-compose.prod.yaml down - docker compose --env-file stage.env -f deploy/compose/docker-compose.stage.yaml up -d - ``` -3. For Helm: - ```bash - helm rollback stellaops --namespace stellaops - ``` -4. Restore Mongo snapshot if data inconsistency detected: `mongorestore --uri "$MONGO_BACKUP_URI" --drop /backups/launch-`. -5. Restore MinIO mirror if required: `mc mirror minio-backup/stellaops- minio/stellaops`. -6. Notify stakeholders of rollback and capture root cause notes in incident ticket. - -## 7. Post-cutover Actions - -- Keep heightened monitoring for 4 hours post cutover; track latency, error rates, and queue depth. -- Confirm audit trails: Authority tokens issued, Scanner events recorded, Attestor submissions stored. -- Update `docs/ops/launch-readiness.md` if any new gaps or follow-ups discovered. -- Schedule retrospective within 48 hours; include DevOps, module guilds, and product owner. - -## 8. Approval Matrix - -| Step | Required Approvers | Record Location | -| --- | --- | --- | -| Production deployment plan | CTO + DevOps lead | Change ticket comment | -| Cutover start (T0) | DevOps lead + module reps | `#launch-bridge` summary | -| Post-smoke success | DevOps lead + product owner | Change ticket closure | -| Rollback (if invoked) | DevOps lead + CTO | Incident ticket | - -Retain all approvals and logs for audit. Update this runbook after each execution to record actual timings and lessons learned. - -## 9. Rehearsal Log - -| Date (UTC) | What We Exercised | Outcome | Follow-up | -| --- | --- | --- | --- | -| 2025-10-26 | Dry-run of compose/Helm validation via `deploy/tools/validate-profiles.sh` (dev/stage/prod/airgap/mirror). Network creation simulated (`docker network create stellaops_frontdoor` planned) and stage CLI submission reviewed. | Validation script succeeded; all profiles templated cleanly. Stage deployment apply deferred because no staging cluster is accessible from the current environment. | Schedule full stage rehearsal once staging cluster credentials are available; reuse this log section to capture timings. | +# Launch Cutover Runbook - Stella Ops + +_Document owner: DevOps Guild (2025-10-26)_ +_Scope:_ Full-platform launch from staging to production for release `2025.09.2`. + +## 1. Roles and Communication + +| Role | Primary | Backup | Contact | +| --- | --- | --- | --- | +| Cutover lead | DevOps Guild (on-call engineer) | Platform Ops lead | `#launch-bridge` (Mattermost) | +| Authority stack | Authority Core guild rep | Security guild rep | `#authority` | +| Scanner / Queue | Scanner WebService guild rep | Runtime guild rep | `#scanner` | +| Storage | Mongo/MinIO operators | Backup DB admin | Pager escalation | +| Observability | Telemetry guild rep | SRE on-call | `#telemetry` | +| Approvals | Product owner + CTO | DevOps lead | Approval recorded in change ticket | + +Set up a bridge call 30 minutes before start and keep `#launch-bridge` updated every 10 minutes. + +## 2. Timeline Overview (UTC) + +| Time | Activity | Owner | +| --- | --- | --- | +| T-24h | Change ticket approved, prod secrets verified, offline kit build status checked (`DEVOPS-OFFLINE-18-005`). | DevOps lead | +| T-12h | Run `deploy/tools/validate-profiles.sh`; capture logs in ticket. | DevOps engineer | +| T-6h | Freeze non-launch deployments; notify guild leads. | Product owner | +| T-2h | Execute rehearsal in staging (Section 3) using `values-stage.yaml` to verify scripts. | DevOps + module reps | +| T-30m | Final go/no-go with guild leads; confirm monitoring dashboards green. | Cutover lead | +| T0 | Execute production cutover steps (Section 4). | Cutover team | +| T+45m | Smoke tests complete (Section 5); announce success or trigger rollback. | Cutover lead | +| T+4h | Post-cutover metrics review, notify stakeholders, close ticket. | DevOps + product owner | + +## 3. Rehearsal (Staging) Checklist + +1. `docker network create stellaops_frontdoor || true` (if not present on staging jump host). +2. Run `deploy/tools/validate-profiles.sh` and archive output. +3. Apply staging secrets (`kubectl apply -f secrets/stage/*.yaml` or `helm secrets upgrade`) ensuring `stellaops-stage` credentials align with `values-stage.yaml`. +4. Perform `helm upgrade stellaops deploy/helm/stellaops -f deploy/helm/stellaops/values-stage.yaml` in staging cluster. +5. Verify health endpoints: `curl https://authority.stage.../healthz`, `curl https://scanner.stage.../healthz`. +6. Execute smoke CLI: `stellaops-cli scan submit --profile staging --sbom samples/sbom/demo.json` and confirm report status in UI. +7. Document total wall time and any deviations in the rehearsal log. + +Rehearsal must complete without manual interventions before proceeding to production. + +## 4. Production Cutover Steps + +### 4.1 Pre-flight +- Confirm production secrets in the appropriate secret store (`stellaops-prod-core`, `stellaops-prod-mongo`, `stellaops-prod-minio`, `stellaops-prod-notify`) contain the keys referenced in `values-prod.yaml`. +- Ensure the external reverse proxy network exists: `docker network create stellaops_frontdoor || true` on each compose host. +- Back up current configuration and data: + - Mongo snapshot: `mongodump --uri "$MONGO_BACKUP_URI" --out /backups/launch-$(date -Iseconds)`. + - MinIO policy export: `mc mirror --overwrite minio/stellaops minio-backup/stellaops-$(date +%Y%m%d%H%M)`. + +### 4.2 Apply Updates (Compose) +1. On each compose node, pull updated images for release `2025.09.2`: + ```bash + docker compose --env-file prod.env -f deploy/compose/docker-compose.prod.yaml pull + ``` +2. Deploy changes: + ```bash + docker compose --env-file prod.env -f deploy/compose/docker-compose.prod.yaml up -d + ``` +3. Confirm containers healthy via `docker compose ps` and `docker logs --tail 50`. + +### 4.3 Apply Updates (Helm/Kubernetes) +If using Kubernetes, perform: +```bash +helm upgrade stellaops deploy/helm/stellaops -f deploy/helm/stellaops/values-prod.yaml --atomic --timeout 15m +``` +Monitor rollout with `kubectl get pods -n stellaops --watch` and `kubectl rollout status deployment/`. + +### 4.4 Configuration Validation +- Verify Authority issuer metadata: `curl https://authority.prod.../.well-known/openid-configuration`. +- Validate Signer DSSE endpoint: `stellaops-cli signer verify --base-url https://signer.prod... --bundle samples/dsse/demo.json`. +- Check Scanner queue connectivity: `docker exec stellaops-scanner-web dotnet StellaOps.Scanner.WebService.dll health queue` (returns success). +- Ensure Notify (legacy) still accessible while Notifier migration pending. + +## 5. Smoke Tests + +| Test | Command / Action | Expected Result | +| --- | --- | --- | +| API health | `curl https://scanner.prod.../healthz` | HTTP 200 with `status":"Healthy"` | +| Scan submit | `stellaops-cli scan submit --profile prod --sbom samples/sbom/demo.json` | Scan completes < 5 minutes; report accessible with signed DSSE | +| Runtime event ingest | Post sample event from Zastava observer fixture | `/runtime/events` responds 202 Accepted; record visible in Mongo `runtime_events` | +| Signing | `stellaops-cli signer sign --bundle demo.json` | Returns DSSE with matching SHA256 and signer metadata | +| Attestor verify | `stellaops-cli attestor verify --uuid ` | Verification result `ok=true` | +| Web UI | Manual login, verify dashboards render and latency within budget | UI loads under 2 seconds; policy views consistent | + +Log results in the change ticket with timestamps and screenshots where applicable. + +## 6. Rollback Procedure + +1. Assess failure scope; if systemic, initiate rollback immediately while preserving logs/artifacts. +2. For Compose: + ```bash + docker compose --env-file prod.env -f deploy/compose/docker-compose.prod.yaml down + docker compose --env-file stage.env -f deploy/compose/docker-compose.stage.yaml up -d + ``` +3. For Helm: + ```bash + helm rollback stellaops --namespace stellaops + ``` +4. Restore Mongo snapshot if data inconsistency detected: `mongorestore --uri "$MONGO_BACKUP_URI" --drop /backups/launch-`. +5. Restore MinIO mirror if required: `mc mirror minio-backup/stellaops- minio/stellaops`. +6. Notify stakeholders of rollback and capture root cause notes in incident ticket. + +## 7. Post-cutover Actions + +- Keep heightened monitoring for 4 hours post cutover; track latency, error rates, and queue depth. +- Confirm audit trails: Authority tokens issued, Scanner events recorded, Attestor submissions stored. +- Update `docs/ops/launch-readiness.md` if any new gaps or follow-ups discovered. +- Schedule retrospective within 48 hours; include DevOps, module guilds, and product owner. + +## 8. Approval Matrix + +| Step | Required Approvers | Record Location | +| --- | --- | --- | +| Production deployment plan | CTO + DevOps lead | Change ticket comment | +| Cutover start (T0) | DevOps lead + module reps | `#launch-bridge` summary | +| Post-smoke success | DevOps lead + product owner | Change ticket closure | +| Rollback (if invoked) | DevOps lead + CTO | Incident ticket | + +Retain all approvals and logs for audit. Update this runbook after each execution to record actual timings and lessons learned. + +## 9. Rehearsal Log + +| Date (UTC) | What We Exercised | Outcome | Follow-up | +| --- | --- | --- | --- | +| 2025-10-26 | Dry-run of compose/Helm validation via `deploy/tools/validate-profiles.sh` (dev/stage/prod/airgap/mirror). Network creation simulated (`docker network create stellaops_frontdoor` planned) and stage CLI submission reviewed. | Validation script succeeded; all profiles templated cleanly. Stage deployment apply deferred because no staging cluster is accessible from the current environment. | Schedule full stage rehearsal once staging cluster credentials are available; reuse this log section to capture timings. | diff --git a/docs/ops/launch-readiness.md b/docs/ops/launch-readiness.md index 61ca387f..8dab5aeb 100644 --- a/docs/ops/launch-readiness.md +++ b/docs/ops/launch-readiness.md @@ -1,49 +1,49 @@ -# Launch Readiness Record - Stella Ops - -_Updated: 2025-10-26 (UTC)_ - -This document captures production launch sign-offs, deployment readiness checkpoints, and any open risks that must be tracked before GA cutover. - -## 1. Sign-off Summary - -| Module / Service | Guild / Point of Contact | Evidence (Task or Runbook) | Status | Timestamp (UTC) | Notes | -| --- | --- | --- | --- | --- | --- | -| Authority (Issuer) | Authority Core Guild | `AUTH-AOC-19-001` - scope issuance & configuration complete (DONE 2025-10-26) | READY | 2025-10-26T14:05Z | Tenant scope propagation follow-up (`AUTH-AOC-19-002`) tracked in gaps section. | -| Signer | Signer Guild | `SIGNER-API-11-101` / `SIGNER-REF-11-102` / `SIGNER-QUOTA-11-103` (DONE 2025-10-21) | READY | 2025-10-26T14:07Z | DSSE signing, referrer verification, and quota enforcement validated in CI. | -| Attestor | Attestor Guild | `ATTESTOR-API-11-201` / `ATTESTOR-VERIFY-11-202` / `ATTESTOR-OBS-11-203` (DONE 2025-10-19) | READY | 2025-10-26T14:10Z | Rekor submission/verification pipeline green; telemetry pack published. | -| Scanner Web + Worker | Scanner WebService Guild | `SCANNER-WEB-09-10x`, `SCANNER-RUNTIME-12-30x` (DONE 2025-10-18 -> 2025-10-24) | READY* | 2025-10-26T14:20Z | Orchestrator envelope work (`SCANNER-EVENTS-16-301/302`) still open; see gaps. | -| Concelier Core & Connectors | Concelier Core / Ops Guild | Ops runbook sign-off in `docs/ops/concelier-conflict-resolution.md` (2025-10-16) | READY | 2025-10-26T14:25Z | Conflict resolution & connector coverage accepted; Mongo schema hardening pending (see gaps). | -| Excititor API | Excititor Core Guild | Wave 0 connector ingest sign-offs (EXECPLAN.Section Wave 0) | READY | 2025-10-26T14:28Z | VEX linkset publishing complete for launch datasets. | -| Notify Web (legacy) | Notify Guild | Existing stack carried forward; Notifier program tracked separately (Sprint 38-40) | PENDING | 2025-10-26T14:32Z | Legacy notify web remains operational; migration to Notifier blocked on `SCANNER-EVENTS-16-301`. | -| Web UI | UI Guild | Stable build `registry.stella-ops.org/.../web-ui@sha256:10d9248...` deployed in stage and smoke-tested | READY | 2025-10-26T14:35Z | Policy editor GA items (Sprint 20) outside launch scope. | -| DevOps / Release | DevOps Guild | `deploy/tools/validate-profiles.sh` run (2025-10-26) covering dev/stage/prod/airgap/mirror | READY | 2025-10-26T15:02Z | Compose/Helm lint + docker compose config validated; see Section 2 for details. | -| Offline Kit | Offline Kit Guild | `DEVOPS-OFFLINE-18-004` (Go analyzer) and `DEVOPS-OFFLINE-18-005` (Python analyzer) complete; debug-store mirror pending (`DEVOPS-OFFLINE-17-004`). | PENDING | 2025-10-26T15:05Z | Awaiting release debug artefacts to finalise `DEVOPS-OFFLINE-17-004`; tracked in Section 3. | - -_\* READY with caveat - remaining work noted in Section 3._ - -## 2. Deployment Readiness Checklist - -- **Production profiles committed:** `deploy/compose/docker-compose.prod.yaml` and `deploy/helm/stellaops/values-prod.yaml` added with front-door network hand-off and secret references for Mongo/MinIO/core services. -- **Secrets placeholders documented:** `deploy/compose/env/prod.env.example` enumerates required credentials (`MONGO_INITDB_ROOT_PASSWORD`, `MINIO_ROOT_PASSWORD`, Redis/NATS endpoints, `FRONTDOOR_NETWORK`). Helm values reference Kubernetes secrets (`stellaops-prod-core`, `stellaops-prod-mongo`, `stellaops-prod-minio`, `stellaops-prod-notify`). -- **Static validation executed:** `deploy/tools/validate-profiles.sh` run on 2025-10-26 (docker compose config + helm lint/template) with all profiles passing. -- **Ingress model defined:** Production compose profile introduces external `frontdoor` network; README updated with creation instructions and scope of externally reachable services. -- **Observability hooks:** Authority/Signer/Attestor telemetry packs verified; scanner runtime build-id metrics landed (`SCANNER-RUNTIME-17-401`). Grafana dashboards referenced in component runbooks. -- **Rollback assets:** Stage Compose profile remains aligned (`docker-compose.stage.yaml`), enabling rehearsals before prod cutover; release manifests (`deploy/releases/2025.09-stable.yaml`) map digests for reproducible rollback. -- **Rehearsal status:** 2025-10-26 validation dry-run executed (`deploy/tools/validate-profiles.sh` across dev/stage/prod/airgap/mirror). Full stage Helm rollout pending access to the managed staging cluster; target to complete once credentials are provisioned. - -## 3. Outstanding Gaps & Follow-ups - -| Item | Owner | Tracking Ref | Target / Next Step | Impact | -| --- | --- | --- | --- | --- | -| Tenant scope propagation and audit coverage | Authority Core Guild | `AUTH-AOC-19-002` (DOING 2025-10-26) | Land enforcement + audit fixtures by Sprint 19 freeze | Medium - required for multi-tenant GA but does not block initial cutover if tenants scoped manually. | -| Orchestrator event envelopes + Notifier handshake | Scanner WebService Guild | `SCANNER-EVENTS-16-301` (BLOCKED), `SCANNER-EVENTS-16-302` (DOING) | Coordinate with Gateway/Notifier owners on preview package replacement or binding redirects; rerun `dotnet test` once patch lands and refresh schema docs. Share envelope samples in `docs/events/` after tests pass. | High — gating Notifier migration; legacy notify path remains functional meanwhile. | -| Offline Kit Python analyzer bundle | Offline Kit Guild + Scanner Guild | `DEVOPS-OFFLINE-18-005` (DONE 2025-10-26) | Monitor for follow-up manifest updates and rerun smoke script when analyzers change. | Medium - ensures language analyzer coverage stays current for offline installs. | -| Offline Kit debug store mirror | Offline Kit Guild + DevOps Guild | `DEVOPS-OFFLINE-17-004` (BLOCKED 2025-10-26) | Release pipeline must publish `out/release/debug` artefacts; once available, run `mirror_debug_store.py` and commit `metadata/debug-store.json`. | Low - symbol lookup remains accessible from staging assets but required before next Offline Kit tag. | -| Mongo schema validators for advisory ingestion | Concelier Storage Guild | `CONCELIER-STORE-AOC-19-001` (TODO) | Finalize JSON schema + migration toggles; coordinate with Ops for rollout window | Low - current validation handled in app layer; schema guard adds defense-in-depth. | -| Authority plugin telemetry alignment | Security Guild | `SEC2.PLG`, `SEC3.PLG`, `SEC5.PLG` (BLOCKED pending AUTH DPoP/MTLS tasks) | Resume once upstream auth surfacing stabilises | Low - plugin remains optional; launch uses default Authority configuration. | - -## 4. Approvals & Distribution - -- Record shared in `#launch-readiness` (Mattermost) 2025-10-26 15:15 UTC with DevOps + Guild leads for acknowledgement. -- Updates to this document require dual sign-off from DevOps Guild (owner) and impacted module guild lead; retain change log via Git history. -- Cutover rehearsal and rollback drills are tracked separately in `docs/ops/launch-cutover.md` (see associated Task `DEVOPS-LAUNCH-18-001`). *** End Patch +# Launch Readiness Record - Stella Ops + +_Updated: 2025-10-26 (UTC)_ + +This document captures production launch sign-offs, deployment readiness checkpoints, and any open risks that must be tracked before GA cutover. + +## 1. Sign-off Summary + +| Module / Service | Guild / Point of Contact | Evidence (Task or Runbook) | Status | Timestamp (UTC) | Notes | +| --- | --- | --- | --- | --- | --- | +| Authority (Issuer) | Authority Core Guild | `AUTH-AOC-19-001` - scope issuance & configuration complete (DONE 2025-10-26) | READY | 2025-10-26T14:05Z | Tenant scope propagation follow-up (`AUTH-AOC-19-002`) tracked in gaps section. | +| Signer | Signer Guild | `SIGNER-API-11-101` / `SIGNER-REF-11-102` / `SIGNER-QUOTA-11-103` (DONE 2025-10-21) | READY | 2025-10-26T14:07Z | DSSE signing, referrer verification, and quota enforcement validated in CI. | +| Attestor | Attestor Guild | `ATTESTOR-API-11-201` / `ATTESTOR-VERIFY-11-202` / `ATTESTOR-OBS-11-203` (DONE 2025-10-19) | READY | 2025-10-26T14:10Z | Rekor submission/verification pipeline green; telemetry pack published. | +| Scanner Web + Worker | Scanner WebService Guild | `SCANNER-WEB-09-10x`, `SCANNER-RUNTIME-12-30x` (DONE 2025-10-18 -> 2025-10-24) | READY* | 2025-10-26T14:20Z | Orchestrator envelope work (`SCANNER-EVENTS-16-301/302`) still open; see gaps. | +| Concelier Core & Connectors | Concelier Core / Ops Guild | Ops runbook sign-off in `docs/ops/concelier-conflict-resolution.md` (2025-10-16) | READY | 2025-10-26T14:25Z | Conflict resolution & connector coverage accepted; Mongo schema hardening pending (see gaps). | +| Excititor API | Excititor Core Guild | Wave 0 connector ingest sign-offs (EXECPLAN.Section Wave 0) | READY | 2025-10-26T14:28Z | VEX linkset publishing complete for launch datasets. | +| Notify Web (legacy) | Notify Guild | Existing stack carried forward; Notifier program tracked separately (Sprint 38-40) | PENDING | 2025-10-26T14:32Z | Legacy notify web remains operational; migration to Notifier blocked on `SCANNER-EVENTS-16-301`. | +| Web UI | UI Guild | Stable build `registry.stella-ops.org/.../web-ui@sha256:10d9248...` deployed in stage and smoke-tested | READY | 2025-10-26T14:35Z | Policy editor GA items (Sprint 20) outside launch scope. | +| DevOps / Release | DevOps Guild | `deploy/tools/validate-profiles.sh` run (2025-10-26) covering dev/stage/prod/airgap/mirror | READY | 2025-10-26T15:02Z | Compose/Helm lint + docker compose config validated; see Section 2 for details. | +| Offline Kit | Offline Kit Guild | `DEVOPS-OFFLINE-18-004` (Go analyzer) and `DEVOPS-OFFLINE-18-005` (Python analyzer) complete; debug-store mirror pending (`DEVOPS-OFFLINE-17-004`). | PENDING | 2025-10-26T15:05Z | Awaiting release debug artefacts to finalise `DEVOPS-OFFLINE-17-004`; tracked in Section 3. | + +_\* READY with caveat - remaining work noted in Section 3._ + +## 2. Deployment Readiness Checklist + +- **Production profiles committed:** `deploy/compose/docker-compose.prod.yaml` and `deploy/helm/stellaops/values-prod.yaml` added with front-door network hand-off and secret references for Mongo/MinIO/core services. +- **Secrets placeholders documented:** `deploy/compose/env/prod.env.example` enumerates required credentials (`MONGO_INITDB_ROOT_PASSWORD`, `MINIO_ROOT_PASSWORD`, Redis/NATS endpoints, `FRONTDOOR_NETWORK`). Helm values reference Kubernetes secrets (`stellaops-prod-core`, `stellaops-prod-mongo`, `stellaops-prod-minio`, `stellaops-prod-notify`). +- **Static validation executed:** `deploy/tools/validate-profiles.sh` run on 2025-10-26 (docker compose config + helm lint/template) with all profiles passing. +- **Ingress model defined:** Production compose profile introduces external `frontdoor` network; README updated with creation instructions and scope of externally reachable services. +- **Observability hooks:** Authority/Signer/Attestor telemetry packs verified; scanner runtime build-id metrics landed (`SCANNER-RUNTIME-17-401`). Grafana dashboards referenced in component runbooks. +- **Rollback assets:** Stage Compose profile remains aligned (`docker-compose.stage.yaml`), enabling rehearsals before prod cutover; release manifests (`deploy/releases/2025.09-stable.yaml`) map digests for reproducible rollback. +- **Rehearsal status:** 2025-10-26 validation dry-run executed (`deploy/tools/validate-profiles.sh` across dev/stage/prod/airgap/mirror). Full stage Helm rollout pending access to the managed staging cluster; target to complete once credentials are provisioned. + +## 3. Outstanding Gaps & Follow-ups + +| Item | Owner | Tracking Ref | Target / Next Step | Impact | +| --- | --- | --- | --- | --- | +| Tenant scope propagation and audit coverage | Authority Core Guild | `AUTH-AOC-19-002` (DOING 2025-10-26) | Land enforcement + audit fixtures by Sprint 19 freeze | Medium - required for multi-tenant GA but does not block initial cutover if tenants scoped manually. | +| Orchestrator event envelopes + Notifier handshake | Scanner WebService Guild | `SCANNER-EVENTS-16-301` (BLOCKED), `SCANNER-EVENTS-16-302` (DOING) | Coordinate with Gateway/Notifier owners on preview package replacement or binding redirects; rerun `dotnet test` once patch lands and refresh schema docs. Share envelope samples in `docs/events/` after tests pass. | High — gating Notifier migration; legacy notify path remains functional meanwhile. | +| Offline Kit Python analyzer bundle | Offline Kit Guild + Scanner Guild | `DEVOPS-OFFLINE-18-005` (DONE 2025-10-26) | Monitor for follow-up manifest updates and rerun smoke script when analyzers change. | Medium - ensures language analyzer coverage stays current for offline installs. | +| Offline Kit debug store mirror | Offline Kit Guild + DevOps Guild | `DEVOPS-OFFLINE-17-004` (BLOCKED 2025-10-26) | Release pipeline must publish `out/release/debug` artefacts; once available, run `mirror_debug_store.py` and commit `metadata/debug-store.json`. | Low - symbol lookup remains accessible from staging assets but required before next Offline Kit tag. | +| Mongo schema validators for advisory ingestion | Concelier Storage Guild | `CONCELIER-STORE-AOC-19-001` (TODO) | Finalize JSON schema + migration toggles; coordinate with Ops for rollout window | Low - current validation handled in app layer; schema guard adds defense-in-depth. | +| Authority plugin telemetry alignment | Security Guild | `SEC2.PLG`, `SEC3.PLG`, `SEC5.PLG` (BLOCKED pending AUTH DPoP/MTLS tasks) | Resume once upstream auth surfacing stabilises | Low - plugin remains optional; launch uses default Authority configuration. | + +## 4. Approvals & Distribution + +- Record shared in `#launch-readiness` (Mattermost) 2025-10-26 15:15 UTC with DevOps + Guild leads for acknowledgement. +- Updates to this document require dual sign-off from DevOps Guild (owner) and impacted module guild lead; retain change log via Git history. +- Cutover rehearsal and rollback drills are tracked separately in `docs/ops/launch-cutover.md` (see associated Task `DEVOPS-LAUNCH-18-001`). *** End Patch diff --git a/docs/ops/nuget-preview-bootstrap.md b/docs/ops/nuget-preview-bootstrap.md index 46593dbd..40710ffe 100644 --- a/docs/ops/nuget-preview-bootstrap.md +++ b/docs/ops/nuget-preview-bootstrap.md @@ -26,7 +26,7 @@ Follow the steps below whenever you refresh the repo or roll a new Offline Kit d From the repo root: ```bash -DOTNET_NOLOGO=1 dotnet restore src/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj \ +DOTNET_NOLOGO=1 dotnet restore src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj \ --configfile NuGet.config ``` diff --git a/docs/ops/registry-token-service.md b/docs/ops/registry-token-service.md index 5e17419b..bfaf7883 100644 --- a/docs/ops/registry-token-service.md +++ b/docs/ops/registry-token-service.md @@ -1,6 +1,6 @@ # Registry Token Service Operations -_Component_: `src/StellaOps.Registry.TokenService` +_Component_: `src/Registry/StellaOps.Registry.TokenService` The registry token service issues short-lived Docker registry bearer tokens after validating an Authority OpTok (DPoP/mTLS sender constraint) and the customer’s @@ -53,7 +53,7 @@ DPoP failures surface via the service logs (Serilog console output). ## Sample deployment ```bash -dotnet run --project src/StellaOps.Registry.TokenService \ +dotnet run --project src/Registry/StellaOps.Registry.TokenService \ --urls "http://0.0.0.0:8085" curl -H "Authorization: Bearer " \ diff --git a/docs/ops/scanner-analyzers-operations.md b/docs/ops/scanner-analyzers-operations.md index a90920f4..c4cf9547 100644 --- a/docs/ops/scanner-analyzers-operations.md +++ b/docs/ops/scanner-analyzers-operations.md @@ -9,10 +9,10 @@ Keep the language analyzer microbench under the < 5 s SBOM pledge. CI emits 1. CI (or engineers running locally) execute: ```bash dotnet run \ - --project src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj \ + --project src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj \ -- \ --repo-root . \ - --out src/StellaOps.Bench/Scanner.Analyzers/baseline.csv \ + --out src/Bench/StellaOps.Bench/Scanner.Analyzers/baseline.csv \ --json out/bench/scanner-analyzers/latest.json \ --prom out/bench/scanner-analyzers/latest.prom \ --commit "$(git rev-parse HEAD)" \ diff --git a/docs/ops/telemetry-collector.md b/docs/ops/telemetry-collector.md index 588115f6..c6915bb9 100644 --- a/docs/ops/telemetry-collector.md +++ b/docs/ops/telemetry-collector.md @@ -1,113 +1,113 @@ -# Telemetry Collector Deployment Guide - -> **Scope:** DevOps Guild, Observability Guild, and operators enabling the StellaOps telemetry pipeline (DEVOPS-OBS-50-001 / DEVOPS-OBS-50-003). - -This guide describes how to deploy the default OpenTelemetry Collector packaged with Stella Ops, validate its ingest endpoints, and prepare an offline-ready bundle for air-gapped environments. - ---- - -## 1. Overview - -The collector terminates OTLP traffic from Stella Ops services and exports metrics, traces, and logs. - -| Endpoint | Purpose | TLS | Authentication | -| -------- | ------- | --- | -------------- | -| `:4317` | OTLP gRPC ingest | mTLS | Client certificate issued by collector CA | -| `:4318` | OTLP HTTP ingest | mTLS | Client certificate issued by collector CA | -| `:9464` | Prometheus scrape | mTLS | Same client certificate | -| `:13133` | Health check | mTLS | Same client certificate | -| `:1777` | pprof diagnostics | mTLS | Same client certificate | - -The default configuration lives at `deploy/telemetry/otel-collector-config.yaml` and mirrors the Helm values in the `stellaops` chart. - ---- - -## 2. Local validation (Compose) - -```bash -# 1. Generate dev certificates (CA + collector + client) -./ops/devops/telemetry/generate_dev_tls.sh - -# 2. Start the collector overlay -cd deploy/compose -docker compose -f docker-compose.telemetry.yaml up -d - -# 3. Start the storage overlay (Prometheus, Tempo, Loki) -docker compose -f docker-compose.telemetry-storage.yaml up -d - -# 4. Run the smoke test (OTLP HTTP) -python ../../ops/devops/telemetry/smoke_otel_collector.py --host localhost -``` - -The smoke test posts sample traces, metrics, and logs and verifies that the collector increments the `otelcol_receiver_accepted_*` counters exposed via the Prometheus exporter. The storage overlay gives you a local Prometheus/Tempo/Loki stack to confirm end-to-end wiring. The same client certificate can be used by local services to weave traces together. See [`Telemetry Storage Deployment`](telemetry-storage.md) for the storage configuration guidelines used in staging/production. - ---- - -## 3. Kubernetes deployment - -Enable the collector in Helm by setting the following values (example shown for the dev profile): - -```yaml -telemetry: - collector: - enabled: true - defaultTenant: - tls: - secretName: stellaops-otel-tls- -``` - -Provide a Kubernetes secret named `stellaops-otel-tls-` (for staging: `stellaops-otel-tls-stage`) with the keys `tls.crt`, `tls.key`, and `ca.crt`. The secret must contain the collector certificate, private key, and issuing CA respectively. Example: - -```bash -kubectl create secret generic stellaops-otel-tls-stage \ - --from-file=tls.crt=collector.crt \ - --from-file=tls.key=collector.key \ - --from-file=ca.crt=ca.crt -``` - -Helm renders the collector deployment, service, and config map automatically: - -```bash -helm upgrade --install stellaops deploy/helm/stellaops -f deploy/helm/stellaops/values-dev.yaml -``` - -Update client workloads to trust `ca.crt` and present client certificates that chain back to the same CA. - ---- - -## 4. Offline packaging (DEVOPS-OBS-50-003) - -Use the packaging helper to produce a tarball that can be mirrored inside the Offline Kit or air-gapped sites: - -```bash -python ops/devops/telemetry/package_offline_bundle.py --output out/telemetry/telemetry-bundle.tar.gz -``` - -The script gathers: - -- `deploy/telemetry/README.md` -- Collector configuration (`deploy/telemetry/otel-collector-config.yaml` and Helm copy) -- Helm template/values for the collector -- Compose overlay (`deploy/compose/docker-compose.telemetry.yaml`) - -The tarball ships with a `.sha256` checksum. To attach a Cosign signature, add `--sign` and provide `COSIGN_KEY_REF`/`COSIGN_IDENTITY_TOKEN` env vars (or use the `--cosign-key` flag). - -Distribute the bundle alongside certificates generated by your PKI. For air-gapped installs, regenerate certificates inside the enclave and recreate the `stellaops-otel-tls` secret. - ---- - -## 5. Operational checks - -1. **Health probes** – `kubectl exec` into the collector pod and run `curl -fsSk --cert client.crt --key client.key --cacert ca.crt https://127.0.0.1:13133/healthz`. -2. **Metrics scrape** – confirm Prometheus ingests `otelcol_receiver_accepted_*` counters. -3. **Trace correlation** – ensure services propagate `trace_id` and `tenant.id` attributes; refer to `docs/observability/observability.md` for expected spans. -4. **Certificate rotation** – when rotating the CA, update the secret and restart the collector; roll out new client certificates before enabling `require_client_certificate` if staged. - ---- - -## 6. Related references - -- `deploy/telemetry/README.md` – source configuration and local workflow. -- `ops/devops/telemetry/smoke_otel_collector.py` – OTLP smoke test. -- `docs/observability/observability.md` – metrics/traces/logs taxonomy. -- `docs/13_RELEASE_ENGINEERING_PLAYBOOK.md` – release checklist for telemetry assets. +# Telemetry Collector Deployment Guide + +> **Scope:** DevOps Guild, Observability Guild, and operators enabling the StellaOps telemetry pipeline (DEVOPS-OBS-50-001 / DEVOPS-OBS-50-003). + +This guide describes how to deploy the default OpenTelemetry Collector packaged with Stella Ops, validate its ingest endpoints, and prepare an offline-ready bundle for air-gapped environments. + +--- + +## 1. Overview + +The collector terminates OTLP traffic from Stella Ops services and exports metrics, traces, and logs. + +| Endpoint | Purpose | TLS | Authentication | +| -------- | ------- | --- | -------------- | +| `:4317` | OTLP gRPC ingest | mTLS | Client certificate issued by collector CA | +| `:4318` | OTLP HTTP ingest | mTLS | Client certificate issued by collector CA | +| `:9464` | Prometheus scrape | mTLS | Same client certificate | +| `:13133` | Health check | mTLS | Same client certificate | +| `:1777` | pprof diagnostics | mTLS | Same client certificate | + +The default configuration lives at `deploy/telemetry/otel-collector-config.yaml` and mirrors the Helm values in the `stellaops` chart. + +--- + +## 2. Local validation (Compose) + +```bash +# 1. Generate dev certificates (CA + collector + client) +./ops/devops/telemetry/generate_dev_tls.sh + +# 2. Start the collector overlay +cd deploy/compose +docker compose -f docker-compose.telemetry.yaml up -d + +# 3. Start the storage overlay (Prometheus, Tempo, Loki) +docker compose -f docker-compose.telemetry-storage.yaml up -d + +# 4. Run the smoke test (OTLP HTTP) +python ../../ops/devops/telemetry/smoke_otel_collector.py --host localhost +``` + +The smoke test posts sample traces, metrics, and logs and verifies that the collector increments the `otelcol_receiver_accepted_*` counters exposed via the Prometheus exporter. The storage overlay gives you a local Prometheus/Tempo/Loki stack to confirm end-to-end wiring. The same client certificate can be used by local services to weave traces together. See [`Telemetry Storage Deployment`](telemetry-storage.md) for the storage configuration guidelines used in staging/production. + +--- + +## 3. Kubernetes deployment + +Enable the collector in Helm by setting the following values (example shown for the dev profile): + +```yaml +telemetry: + collector: + enabled: true + defaultTenant: + tls: + secretName: stellaops-otel-tls- +``` + +Provide a Kubernetes secret named `stellaops-otel-tls-` (for staging: `stellaops-otel-tls-stage`) with the keys `tls.crt`, `tls.key`, and `ca.crt`. The secret must contain the collector certificate, private key, and issuing CA respectively. Example: + +```bash +kubectl create secret generic stellaops-otel-tls-stage \ + --from-file=tls.crt=collector.crt \ + --from-file=tls.key=collector.key \ + --from-file=ca.crt=ca.crt +``` + +Helm renders the collector deployment, service, and config map automatically: + +```bash +helm upgrade --install stellaops deploy/helm/stellaops -f deploy/helm/stellaops/values-dev.yaml +``` + +Update client workloads to trust `ca.crt` and present client certificates that chain back to the same CA. + +--- + +## 4. Offline packaging (DEVOPS-OBS-50-003) + +Use the packaging helper to produce a tarball that can be mirrored inside the Offline Kit or air-gapped sites: + +```bash +python ops/devops/telemetry/package_offline_bundle.py --output out/telemetry/telemetry-bundle.tar.gz +``` + +The script gathers: + +- `deploy/telemetry/README.md` +- Collector configuration (`deploy/telemetry/otel-collector-config.yaml` and Helm copy) +- Helm template/values for the collector +- Compose overlay (`deploy/compose/docker-compose.telemetry.yaml`) + +The tarball ships with a `.sha256` checksum. To attach a Cosign signature, add `--sign` and provide `COSIGN_KEY_REF`/`COSIGN_IDENTITY_TOKEN` env vars (or use the `--cosign-key` flag). + +Distribute the bundle alongside certificates generated by your PKI. For air-gapped installs, regenerate certificates inside the enclave and recreate the `stellaops-otel-tls` secret. + +--- + +## 5. Operational checks + +1. **Health probes** – `kubectl exec` into the collector pod and run `curl -fsSk --cert client.crt --key client.key --cacert ca.crt https://127.0.0.1:13133/healthz`. +2. **Metrics scrape** – confirm Prometheus ingests `otelcol_receiver_accepted_*` counters. +3. **Trace correlation** – ensure services propagate `trace_id` and `tenant.id` attributes; refer to `docs/observability/observability.md` for expected spans. +4. **Certificate rotation** – when rotating the CA, update the secret and restart the collector; roll out new client certificates before enabling `require_client_certificate` if staged. + +--- + +## 6. Related references + +- `deploy/telemetry/README.md` – source configuration and local workflow. +- `ops/devops/telemetry/smoke_otel_collector.py` – OTLP smoke test. +- `docs/observability/observability.md` – metrics/traces/logs taxonomy. +- `docs/13_RELEASE_ENGINEERING_PLAYBOOK.md` – release checklist for telemetry assets. diff --git a/docs/ops/ui-auth-smoke.md b/docs/ops/ui-auth-smoke.md index fdfe1d17..5c117919 100644 --- a/docs/ops/ui-auth-smoke.md +++ b/docs/ops/ui-auth-smoke.md @@ -1,32 +1,32 @@ -# UI Auth Smoke Job (Playwright) - -The DevOps Guild tracks **DEVOPS-UI-13-006** to wire the new Playwright auth -smoke checks into CI and the Offline Kit pipeline. These tests exercise the -Angular UI login flow against a stubbed Authority instance to verify that -`/config.json` is discovered, DPoP proofs are minted, and error handling is -surfaced when the backend rejects a request. - -## What the job does - -1. Builds the UI bundle (or consumes the artifact from the release pipeline). -2. Copies the environment stub from `src/config/config.sample.json` into the - runtime directory as `config.json` so the UI can bootstrap without a live - gateway. -3. Runs `npm run test:e2e`, which launches Playwright with the auth fixtures - under `tests/e2e/auth.spec.ts`: - - Validates that the Sign-in button generates an Authorization Code + PKCE - redirect to `https://authority.local/connect/authorize`. - - Confirms the callback view shows an actionable error when the redirect is - missing the pending login state. -4. Publishes JUnit + Playwright traces (retain-on-failure) for troubleshooting. - -## Pipeline integration notes - -- Chromium must already be available (`npx playwright install --with-deps`). -- Set `PLAYWRIGHT_BASE_URL` if the UI serves on a non-default host/port. -- For Offline Kit packaging, bundle the Playwright browser cache under - `.cache/ms-playwright/` so the job runs without network access. -- Failures should block release promotion; export the traces to the artifacts - tab for debugging. - -Refer to `ops/devops/TASKS.md` (DEVOPS-UI-13-006) for progress and ownership. +# UI Auth Smoke Job (Playwright) + +The DevOps Guild tracks **DEVOPS-UI-13-006** to wire the new Playwright auth +smoke checks into CI and the Offline Kit pipeline. These tests exercise the +Angular UI login flow against a stubbed Authority instance to verify that +`/config.json` is discovered, DPoP proofs are minted, and error handling is +surfaced when the backend rejects a request. + +## What the job does + +1. Builds the UI bundle (or consumes the artifact from the release pipeline). +2. Copies the environment stub from `src/config/config.sample.json` into the + runtime directory as `config.json` so the UI can bootstrap without a live + gateway. +3. Runs `npm run test:e2e`, which launches Playwright with the auth fixtures + under `tests/e2e/auth.spec.ts`: + - Validates that the Sign-in button generates an Authorization Code + PKCE + redirect to `https://authority.local/connect/authorize`. + - Confirms the callback view shows an actionable error when the redirect is + missing the pending login state. +4. Publishes JUnit + Playwright traces (retain-on-failure) for troubleshooting. + +## Pipeline integration notes + +- Chromium must already be available (`npx playwright install --with-deps`). +- Set `PLAYWRIGHT_BASE_URL` if the UI serves on a non-default host/port. +- For Offline Kit packaging, bundle the Playwright browser cache under + `.cache/ms-playwright/` so the job runs without network access. +- Failures should block release promotion; export the traces to the artifacts + tab for debugging. + +Refer to `ops/devops/TASKS.md` (DEVOPS-UI-13-006) for progress and ownership. diff --git a/docs/ops/zastava-runtime-grafana-dashboard.json b/docs/ops/zastava-runtime-grafana-dashboard.json index 2132b2c5..beff470d 100644 --- a/docs/ops/zastava-runtime-grafana-dashboard.json +++ b/docs/ops/zastava-runtime-grafana-dashboard.json @@ -1,205 +1,205 @@ -{ - "title": "Zastava Runtime Plane", - "uid": "zastava-runtime", - "timezone": "utc", - "schemaVersion": 38, - "version": 1, - "refresh": "30s", - "time": { - "from": "now-6h", - "to": "now" - }, - "panels": [ - { - "id": 1, - "type": "timeseries", - "title": "Observer Event Rate", - "datasource": { - "type": "prometheus", - "uid": "${datasource}" - }, - "targets": [ - { - "expr": "sum by (tenant,component,kind) (rate(zastava_runtime_events_total{tenant=~\"$tenant\"}[5m]))", - "legendFormat": "{{tenant}}/{{component}}/{{kind}}" - } - ], - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 0 - }, - "fieldConfig": { - "defaults": { - "unit": "1/s", - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green" - } - ] - } - }, - "overrides": [] - }, - "options": { - "legend": { - "showLegend": true, - "placement": "bottom" - }, - "tooltip": { - "mode": "multi" - } - } - }, - { - "id": 2, - "type": "timeseries", - "title": "Admission Decisions", - "datasource": { - "type": "prometheus", - "uid": "${datasource}" - }, - "targets": [ - { - "expr": "sum by (decision) (rate(zastava_admission_decisions_total{tenant=~\"$tenant\"}[5m]))", - "legendFormat": "{{decision}}" - } - ], - "gridPos": { - "h": 8, - "w": 12, - "x": 12, - "y": 0 - }, - "fieldConfig": { - "defaults": { - "unit": "1/s", - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green" - }, - { - "color": "red", - "value": 20 - } - ] - } - }, - "overrides": [] - }, - "options": { - "legend": { - "showLegend": true, - "placement": "bottom" - }, - "tooltip": { - "mode": "multi" - } - } - }, - { - "id": 3, - "type": "timeseries", - "title": "Backend Latency P95", - "datasource": { - "type": "prometheus", - "uid": "${datasource}" - }, - "targets": [ - { - "expr": "histogram_quantile(0.95, sum by (le) (rate(zastava_runtime_backend_latency_ms_bucket{tenant=~\"$tenant\"}[5m])))", - "legendFormat": "p95 latency" - } - ], - "gridPos": { - "h": 8, - "w": 12, - "x": 0, - "y": 8 - }, - "fieldConfig": { - "defaults": { - "unit": "ms", - "thresholds": { - "mode": "absolute", - "steps": [ - { - "color": "green" - }, - { - "color": "orange", - "value": 500 - }, - { - "color": "red", - "value": 750 - } - ] - } - }, - "overrides": [] - }, - "options": { - "legend": { - "showLegend": true, - "placement": "bottom" - }, - "tooltip": { - "mode": "multi" - } - } - } - ], - "templating": { - "list": [ - { - "name": "datasource", - "type": "datasource", - "query": "prometheus", - "label": "Prometheus", - "current": { - "text": "Prometheus", - "value": "Prometheus" - } - }, - { - "name": "tenant", - "type": "query", - "datasource": { - "type": "prometheus", - "uid": "${datasource}" - }, - "definition": "label_values(zastava_runtime_events_total, tenant)", - "refresh": 1, - "hide": 0, - "current": { - "text": ".*", - "value": ".*" - }, - "regex": "", - "includeAll": true, - "multi": true, - "sort": 1 - } - ] - }, - "annotations": { - "list": [ - { - "name": "Deployments", - "type": "tags", - "datasource": { - "type": "prometheus", - "uid": "${datasource}" - }, - "enable": true, - "iconColor": "rgba(255, 96, 96, 1)" - } - ] - } -} +{ + "title": "Zastava Runtime Plane", + "uid": "zastava-runtime", + "timezone": "utc", + "schemaVersion": 38, + "version": 1, + "refresh": "30s", + "time": { + "from": "now-6h", + "to": "now" + }, + "panels": [ + { + "id": 1, + "type": "timeseries", + "title": "Observer Event Rate", + "datasource": { + "type": "prometheus", + "uid": "${datasource}" + }, + "targets": [ + { + "expr": "sum by (tenant,component,kind) (rate(zastava_runtime_events_total{tenant=~\"$tenant\"}[5m]))", + "legendFormat": "{{tenant}}/{{component}}/{{kind}}" + } + ], + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 0 + }, + "fieldConfig": { + "defaults": { + "unit": "1/s", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + } + ] + } + }, + "overrides": [] + }, + "options": { + "legend": { + "showLegend": true, + "placement": "bottom" + }, + "tooltip": { + "mode": "multi" + } + } + }, + { + "id": 2, + "type": "timeseries", + "title": "Admission Decisions", + "datasource": { + "type": "prometheus", + "uid": "${datasource}" + }, + "targets": [ + { + "expr": "sum by (decision) (rate(zastava_admission_decisions_total{tenant=~\"$tenant\"}[5m]))", + "legendFormat": "{{decision}}" + } + ], + "gridPos": { + "h": 8, + "w": 12, + "x": 12, + "y": 0 + }, + "fieldConfig": { + "defaults": { + "unit": "1/s", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "red", + "value": 20 + } + ] + } + }, + "overrides": [] + }, + "options": { + "legend": { + "showLegend": true, + "placement": "bottom" + }, + "tooltip": { + "mode": "multi" + } + } + }, + { + "id": 3, + "type": "timeseries", + "title": "Backend Latency P95", + "datasource": { + "type": "prometheus", + "uid": "${datasource}" + }, + "targets": [ + { + "expr": "histogram_quantile(0.95, sum by (le) (rate(zastava_runtime_backend_latency_ms_bucket{tenant=~\"$tenant\"}[5m])))", + "legendFormat": "p95 latency" + } + ], + "gridPos": { + "h": 8, + "w": 12, + "x": 0, + "y": 8 + }, + "fieldConfig": { + "defaults": { + "unit": "ms", + "thresholds": { + "mode": "absolute", + "steps": [ + { + "color": "green" + }, + { + "color": "orange", + "value": 500 + }, + { + "color": "red", + "value": 750 + } + ] + } + }, + "overrides": [] + }, + "options": { + "legend": { + "showLegend": true, + "placement": "bottom" + }, + "tooltip": { + "mode": "multi" + } + } + } + ], + "templating": { + "list": [ + { + "name": "datasource", + "type": "datasource", + "query": "prometheus", + "label": "Prometheus", + "current": { + "text": "Prometheus", + "value": "Prometheus" + } + }, + { + "name": "tenant", + "type": "query", + "datasource": { + "type": "prometheus", + "uid": "${datasource}" + }, + "definition": "label_values(zastava_runtime_events_total, tenant)", + "refresh": 1, + "hide": 0, + "current": { + "text": ".*", + "value": ".*" + }, + "regex": "", + "includeAll": true, + "multi": true, + "sort": 1 + } + ] + }, + "annotations": { + "list": [ + { + "name": "Deployments", + "type": "tags", + "datasource": { + "type": "prometheus", + "uid": "${datasource}" + }, + "enable": true, + "iconColor": "rgba(255, 96, 96, 1)" + } + ] + } +} diff --git a/docs/ops/zastava-runtime-operations.md b/docs/ops/zastava-runtime-operations.md index c437a3f2..700dfb9a 100644 --- a/docs/ops/zastava-runtime-operations.md +++ b/docs/ops/zastava-runtime-operations.md @@ -1,174 +1,174 @@ -# Zastava Runtime Operations Runbook - -This runbook covers the runtime plane (Observer DaemonSet + Admission Webhook). -It aligns with `Sprint 12 – Runtime Guardrails` and assumes components consume -`StellaOps.Zastava.Core` (`AddZastavaRuntimeCore(...)`). - -## 1. Prerequisites - -- **Authority client credentials** – service principal `zastava-runtime` with scopes - `aud:scanner` and `api:scanner.runtime.write`. Provision DPoP keys and mTLS client - certs before rollout. -- **Scanner/WebService reachability** – cluster DNS entry (e.g. `scanner.internal`) - resolvable from every node running Observer/Webhook. -- **Host mounts** – read-only access to `/proc`, container runtime state - (`/var/lib/containerd`, `/var/run/containerd/containerd.sock`) and scratch space - (`/var/run/zastava`). -- **Offline kit bundle** – operators staging air-gapped installs must download - `offline-kit/zastava-runtime-{version}.tar.zst` containing container images, - Grafana dashboards, and Prometheus rules referenced below. -- **Secrets** – Authority OpTok cache dir, DPoP private keys, and webhook TLS secrets - live outside git. For air-gapped installs copy them to the sealed secrets vault. - -### 1.1 Telemetry quick reference - -| Metric | Description | Notes | -|--------|-------------|-------| -| `zastava.runtime.events.total{tenant,component,kind}` | Rate of observer events sent to Scanner | Expect >0 on busy nodes. | -| `zastava.runtime.backend.latency.ms` | Histogram (ms) for `/runtime/events` and `/policy/runtime` calls | P95 & P99 drive alerting. | -| `zastava.admission.decisions.total{decision}` | Admission verdict counts | Track deny spikes or fail-open fallbacks. | -| `zastava.admission.cache.hits.total` | (future) Cache utilisation once Observer batches land | Placeholder until Observer tasks 12-004 complete. | - -## 2. Deployment workflows - -### 2.1 Fresh install (Helm overlay) - -1. Load offline kit bundle: `oras cp offline-kit/zastava-runtime-*.tar.zst oci:registry.internal/zastava`. -2. Render values: - - `zastava.runtime.tenant`, `environment`, `deployment` (cluster identifier). - - `zastava.runtime.authority` block (issuer, clientId, audience, DPoP toggle). - - `zastava.runtime.metrics.commonTags.cluster` for Prometheus labels. -3. Pre-create secrets: - - `zastava-authority-dpop` (JWK + private key). - - `zastava-authority-mtls` (client cert/key chain). - - `zastava-webhook-tls` (serving cert; CSR bundle if using auto-approval). -4. Deploy Observer DaemonSet and Webhook chart: - ```sh - helm upgrade --install zastava-runtime deploy/helm/zastava \ - -f values/zastava-runtime.yaml \ - --namespace stellaops \ - --create-namespace - ``` -5. Verify: - - `kubectl -n stellaops get pods -l app=zastava-observer` ready. - - `kubectl -n stellaops logs ds/zastava-observer --tail=20` shows - `Issued runtime OpTok` audit line with DPoP token type. - - Admission webhook registered: `kubectl get validatingwebhookconfiguration zastava-webhook`. - -### 2.2 Upgrades - -1. Scale webhook deployment to `--replicas=3` (rolling). -2. Drain one node per AZ to ensure Observer tolerates disruption. -3. Apply chart upgrade; watch `zastava.runtime.backend.latency.ms` P95 (<250 ms). -4. Post-upgrade, run smoke tests: - - Apply unsigned Pod manifest → expect `deny` (policy fail). - - Apply signed Pod manifest → expect `allow`. -5. Record upgrade in ops log with Git SHA + Helm chart version. - -### 2.3 Rollback - -1. Use Helm revision history: `helm history zastava-runtime`. -2. Rollback: `helm rollback zastava-runtime `. -3. Invalidate cached OpToks: - ```sh - kubectl -n stellaops exec deploy/zastava-webhook -- \ - zastava-webhook invalidate-op-token --audience scanner - ``` -4. Confirm observers reconnect via metrics (`rate(zastava_runtime_events_total[5m])`). - -## 3. Authority & security guardrails - -- Tokens must be `DPoP` type when `requireDpop=true`. Logs emit - `authority.token.issue` scope with decision data; absence indicates misconfig. -- `requireMutualTls=true` enforces mTLS during token acquisition. Disable only in - lab clusters; expect warning log `Mutual TLS requirement disabled`. -- Static fallback tokens (`allowStaticTokenFallback=true`) should exist only during - initial bootstrap. Rotate nightly; preference is to disable once Authority reachable. -- Audit every change in `zastava.runtime.authority` through change management. - Use `kubectl get secret zastava-authority-dpop -o jsonpath='{.metadata.annotations.revision}'` - to confirm key rotation. - -## 4. Incident response - -### 4.1 Authority offline - -1. Check Prometheus alert `ZastavaAuthorityTokenStale`. -2. Inspect Observer logs for `authority.token.fallback` scope. -3. If fallback engaged, verify static token validity duration; rotate secret if older than 24 h. -4. Once Authority restored, delete static fallback secret and restart pods to rebind DPoP keys. - -### 4.2 Scanner/WebService latency spike - -1. Alert `ZastavaRuntimeBackendLatencyHigh` fires at P95 > 750 ms for 5 minutes. -2. Run backend health: `kubectl -n scanner exec deploy/scanner-web -- curl -f localhost:8080/healthz/ready`. -3. If backend degraded, auto buffer may throttle. Confirm disk-backed queue size via - `kubectl logs ds/zastava-observer | grep buffer.drops`. -4. Consider enabling fail-open for namespaces listed in runbook Appendix B (temporary). - -### 4.3 Admission deny storm - -1. Alert `ZastavaAdmissionDenySpike` indicates >20 denies/minute. -2. Pull sample: `kubectl logs deploy/zastava-webhook --since=10m | jq '.decision'`. -3. Cross-check policy backlog in Scanner (`/policy/runtime` logs). Engage application - owner; optionally set namespace to `failOpenNamespaces` after risk assessment. - -## 5. Offline kit & air-gapped notes - -- Bundle contents: - - Observer/Webhook container images (multi-arch). - - `docs/ops/zastava-runtime-prometheus-rules.yaml` + Grafana dashboard JSON. - - Sample `zastava-runtime.values.yaml`. -- Verification: - - Validate signature: `cosign verify-blob offline-kit/zastava-runtime-*.tar.zst --certificate offline-kit/zastava-runtime.cert`. - - Extract Prometheus rules into offline monitoring cluster (`/etc/prometheus/rules.d`). - - Import Grafana dashboard via `grafana-cli --config ...`. - -## 6. Observability assets - -- Prometheus alert rules: `docs/ops/zastava-runtime-prometheus-rules.yaml`. -- Grafana dashboard JSON: `docs/ops/zastava-runtime-grafana-dashboard.json`. -- Add both to the monitoring repo (`ops/monitoring/zastava`) and reference them in - the Offline Kit manifest. - -## 7. Build-id correlation & symbol retrieval - -Runtime events emitted by Observer now include `process.buildId` (from the ELF -`NT_GNU_BUILD_ID` note) and Scanner `/policy/runtime` surfaces the most recent -`buildIds` list per digest. Operators can use these hashes to locate debug -artifacts during incident response: - -1. Capture the hash from CLI/webhook/Scanner API—for example: - ```bash - stellaops-cli runtime policy test --image --namespace - ``` - Copy one of the `Build IDs` (e.g. - `5f0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789`). -2. Derive the debug path (`/` under `.build-id`) and check it exists: - ```bash - ls /var/opt/debug/.build-id/5f/0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789.debug - ``` -3. If the file is missing, rehydrate it from Offline Kit bundles or the - `debug-store` object bucket (mirror of release artefacts): - ```bash - oras cp oci://registry.internal/debug-store:latest . --include \ - "5f/0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789.debug" - ``` -4. Confirm the running process advertises the same GNU build-id before - symbolising: - ```bash - readelf -n /proc/$(pgrep -f payments-api | head -n1)/exe | grep -i 'Build ID' - ``` -5. Attach the `.debug` file in `gdb`/`lldb`, feed it to `eu-unstrip`, or cache it - in `debuginfod` for fleet-wide symbol resolution: - ```bash - debuginfod-find debuginfo 5f0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789 >/tmp/payments-api.debug - ``` -6. For musl-based images, expect shorter build-id footprints. Missing hashes in - runtime events indicate stripped binaries without the GNU note—schedule a - rebuild with `-Wl,--build-id` enabled or add the binary to the debug-store - allowlist so the scanner can surface a fallback symbol package. - -Monitor `scanner.policy.runtime` responses for the `buildIds` field; absence of -data after ZASTAVA-OBS-17-005 implies containers launched before the Observer -upgrade or non-ELF entrypoints (static scripts). Re-run the workload or restart -Observer to trigger a fresh capture if symbol parity is required. +# Zastava Runtime Operations Runbook + +This runbook covers the runtime plane (Observer DaemonSet + Admission Webhook). +It aligns with `Sprint 12 – Runtime Guardrails` and assumes components consume +`StellaOps.Zastava.Core` (`AddZastavaRuntimeCore(...)`). + +## 1. Prerequisites + +- **Authority client credentials** – service principal `zastava-runtime` with scopes + `aud:scanner` and `api:scanner.runtime.write`. Provision DPoP keys and mTLS client + certs before rollout. +- **Scanner/WebService reachability** – cluster DNS entry (e.g. `scanner.internal`) + resolvable from every node running Observer/Webhook. +- **Host mounts** – read-only access to `/proc`, container runtime state + (`/var/lib/containerd`, `/var/run/containerd/containerd.sock`) and scratch space + (`/var/run/zastava`). +- **Offline kit bundle** – operators staging air-gapped installs must download + `offline-kit/zastava-runtime-{version}.tar.zst` containing container images, + Grafana dashboards, and Prometheus rules referenced below. +- **Secrets** – Authority OpTok cache dir, DPoP private keys, and webhook TLS secrets + live outside git. For air-gapped installs copy them to the sealed secrets vault. + +### 1.1 Telemetry quick reference + +| Metric | Description | Notes | +|--------|-------------|-------| +| `zastava.runtime.events.total{tenant,component,kind}` | Rate of observer events sent to Scanner | Expect >0 on busy nodes. | +| `zastava.runtime.backend.latency.ms` | Histogram (ms) for `/runtime/events` and `/policy/runtime` calls | P95 & P99 drive alerting. | +| `zastava.admission.decisions.total{decision}` | Admission verdict counts | Track deny spikes or fail-open fallbacks. | +| `zastava.admission.cache.hits.total` | (future) Cache utilisation once Observer batches land | Placeholder until Observer tasks 12-004 complete. | + +## 2. Deployment workflows + +### 2.1 Fresh install (Helm overlay) + +1. Load offline kit bundle: `oras cp offline-kit/zastava-runtime-*.tar.zst oci:registry.internal/zastava`. +2. Render values: + - `zastava.runtime.tenant`, `environment`, `deployment` (cluster identifier). + - `zastava.runtime.authority` block (issuer, clientId, audience, DPoP toggle). + - `zastava.runtime.metrics.commonTags.cluster` for Prometheus labels. +3. Pre-create secrets: + - `zastava-authority-dpop` (JWK + private key). + - `zastava-authority-mtls` (client cert/key chain). + - `zastava-webhook-tls` (serving cert; CSR bundle if using auto-approval). +4. Deploy Observer DaemonSet and Webhook chart: + ```sh + helm upgrade --install zastava-runtime deploy/helm/zastava \ + -f values/zastava-runtime.yaml \ + --namespace stellaops \ + --create-namespace + ``` +5. Verify: + - `kubectl -n stellaops get pods -l app=zastava-observer` ready. + - `kubectl -n stellaops logs ds/zastava-observer --tail=20` shows + `Issued runtime OpTok` audit line with DPoP token type. + - Admission webhook registered: `kubectl get validatingwebhookconfiguration zastava-webhook`. + +### 2.2 Upgrades + +1. Scale webhook deployment to `--replicas=3` (rolling). +2. Drain one node per AZ to ensure Observer tolerates disruption. +3. Apply chart upgrade; watch `zastava.runtime.backend.latency.ms` P95 (<250 ms). +4. Post-upgrade, run smoke tests: + - Apply unsigned Pod manifest → expect `deny` (policy fail). + - Apply signed Pod manifest → expect `allow`. +5. Record upgrade in ops log with Git SHA + Helm chart version. + +### 2.3 Rollback + +1. Use Helm revision history: `helm history zastava-runtime`. +2. Rollback: `helm rollback zastava-runtime `. +3. Invalidate cached OpToks: + ```sh + kubectl -n stellaops exec deploy/zastava-webhook -- \ + zastava-webhook invalidate-op-token --audience scanner + ``` +4. Confirm observers reconnect via metrics (`rate(zastava_runtime_events_total[5m])`). + +## 3. Authority & security guardrails + +- Tokens must be `DPoP` type when `requireDpop=true`. Logs emit + `authority.token.issue` scope with decision data; absence indicates misconfig. +- `requireMutualTls=true` enforces mTLS during token acquisition. Disable only in + lab clusters; expect warning log `Mutual TLS requirement disabled`. +- Static fallback tokens (`allowStaticTokenFallback=true`) should exist only during + initial bootstrap. Rotate nightly; preference is to disable once Authority reachable. +- Audit every change in `zastava.runtime.authority` through change management. + Use `kubectl get secret zastava-authority-dpop -o jsonpath='{.metadata.annotations.revision}'` + to confirm key rotation. + +## 4. Incident response + +### 4.1 Authority offline + +1. Check Prometheus alert `ZastavaAuthorityTokenStale`. +2. Inspect Observer logs for `authority.token.fallback` scope. +3. If fallback engaged, verify static token validity duration; rotate secret if older than 24 h. +4. Once Authority restored, delete static fallback secret and restart pods to rebind DPoP keys. + +### 4.2 Scanner/WebService latency spike + +1. Alert `ZastavaRuntimeBackendLatencyHigh` fires at P95 > 750 ms for 5 minutes. +2. Run backend health: `kubectl -n scanner exec deploy/scanner-web -- curl -f localhost:8080/healthz/ready`. +3. If backend degraded, auto buffer may throttle. Confirm disk-backed queue size via + `kubectl logs ds/zastava-observer | grep buffer.drops`. +4. Consider enabling fail-open for namespaces listed in runbook Appendix B (temporary). + +### 4.3 Admission deny storm + +1. Alert `ZastavaAdmissionDenySpike` indicates >20 denies/minute. +2. Pull sample: `kubectl logs deploy/zastava-webhook --since=10m | jq '.decision'`. +3. Cross-check policy backlog in Scanner (`/policy/runtime` logs). Engage application + owner; optionally set namespace to `failOpenNamespaces` after risk assessment. + +## 5. Offline kit & air-gapped notes + +- Bundle contents: + - Observer/Webhook container images (multi-arch). + - `docs/ops/zastava-runtime-prometheus-rules.yaml` + Grafana dashboard JSON. + - Sample `zastava-runtime.values.yaml`. +- Verification: + - Validate signature: `cosign verify-blob offline-kit/zastava-runtime-*.tar.zst --certificate offline-kit/zastava-runtime.cert`. + - Extract Prometheus rules into offline monitoring cluster (`/etc/prometheus/rules.d`). + - Import Grafana dashboard via `grafana-cli --config ...`. + +## 6. Observability assets + +- Prometheus alert rules: `docs/ops/zastava-runtime-prometheus-rules.yaml`. +- Grafana dashboard JSON: `docs/ops/zastava-runtime-grafana-dashboard.json`. +- Add both to the monitoring repo (`ops/monitoring/zastava`) and reference them in + the Offline Kit manifest. + +## 7. Build-id correlation & symbol retrieval + +Runtime events emitted by Observer now include `process.buildId` (from the ELF +`NT_GNU_BUILD_ID` note) and Scanner `/policy/runtime` surfaces the most recent +`buildIds` list per digest. Operators can use these hashes to locate debug +artifacts during incident response: + +1. Capture the hash from CLI/webhook/Scanner API—for example: + ```bash + stellaops-cli runtime policy test --image --namespace + ``` + Copy one of the `Build IDs` (e.g. + `5f0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789`). +2. Derive the debug path (`/` under `.build-id`) and check it exists: + ```bash + ls /var/opt/debug/.build-id/5f/0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789.debug + ``` +3. If the file is missing, rehydrate it from Offline Kit bundles or the + `debug-store` object bucket (mirror of release artefacts): + ```bash + oras cp oci://registry.internal/debug-store:latest . --include \ + "5f/0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789.debug" + ``` +4. Confirm the running process advertises the same GNU build-id before + symbolising: + ```bash + readelf -n /proc/$(pgrep -f payments-api | head -n1)/exe | grep -i 'Build ID' + ``` +5. Attach the `.debug` file in `gdb`/`lldb`, feed it to `eu-unstrip`, or cache it + in `debuginfod` for fleet-wide symbol resolution: + ```bash + debuginfod-find debuginfo 5f0c7c3cb4d9f8a4f1c1d5c6b7e8f90123456789 >/tmp/payments-api.debug + ``` +6. For musl-based images, expect shorter build-id footprints. Missing hashes in + runtime events indicate stripped binaries without the GNU note—schedule a + rebuild with `-Wl,--build-id` enabled or add the binary to the debug-store + allowlist so the scanner can surface a fallback symbol package. + +Monitor `scanner.policy.runtime` responses for the `buildIds` field; absence of +data after ZASTAVA-OBS-17-005 implies containers launched before the Observer +upgrade or non-ELF entrypoints (static scripts). Re-run the workload or restart +Observer to trigger a fresh capture if symbol parity is required. diff --git a/docs/ops/zastava-runtime-prometheus-rules.yaml b/docs/ops/zastava-runtime-prometheus-rules.yaml index aefdfeb6..b5630af6 100644 --- a/docs/ops/zastava-runtime-prometheus-rules.yaml +++ b/docs/ops/zastava-runtime-prometheus-rules.yaml @@ -1,31 +1,31 @@ -groups: - - name: zastava-runtime - interval: 30s - rules: - - alert: ZastavaRuntimeEventsSilent - expr: sum(rate(zastava_runtime_events_total[10m])) == 0 - for: 15m - labels: - severity: warning - service: zastava-runtime - annotations: - summary: "Observer events stalled" - description: "No runtime events emitted in the last 15 minutes. Check observer DaemonSet health and container runtime mounts." - - alert: ZastavaRuntimeBackendLatencyHigh - expr: histogram_quantile(0.95, sum by (le) (rate(zastava_runtime_backend_latency_ms_bucket[5m]))) > 0.75 - for: 10m - labels: - severity: critical - service: zastava-runtime - annotations: - summary: "Runtime backend latency p95 above 750 ms" - description: "Latency to Scanner runtime APIs is elevated. Inspect Scanner.WebService readiness, Authority OpTok issuance, and cluster network." - - alert: ZastavaAdmissionDenySpike - expr: sum(rate(zastava_admission_decisions_total{decision="deny"}[5m])) > 20 - for: 5m - labels: - severity: warning - service: zastava-runtime - annotations: - summary: "Admission webhook denies exceeding threshold" - description: "Webhook is denying more than 20 pod admissions per minute. Confirm policy verdicts and consider fail-open exception for impacted namespaces." +groups: + - name: zastava-runtime + interval: 30s + rules: + - alert: ZastavaRuntimeEventsSilent + expr: sum(rate(zastava_runtime_events_total[10m])) == 0 + for: 15m + labels: + severity: warning + service: zastava-runtime + annotations: + summary: "Observer events stalled" + description: "No runtime events emitted in the last 15 minutes. Check observer DaemonSet health and container runtime mounts." + - alert: ZastavaRuntimeBackendLatencyHigh + expr: histogram_quantile(0.95, sum by (le) (rate(zastava_runtime_backend_latency_ms_bucket[5m]))) > 0.75 + for: 10m + labels: + severity: critical + service: zastava-runtime + annotations: + summary: "Runtime backend latency p95 above 750 ms" + description: "Latency to Scanner runtime APIs is elevated. Inspect Scanner.WebService readiness, Authority OpTok issuance, and cluster network." + - alert: ZastavaAdmissionDenySpike + expr: sum(rate(zastava_admission_decisions_total{decision="deny"}[5m])) > 20 + for: 5m + labels: + severity: warning + service: zastava-runtime + annotations: + summary: "Admission webhook denies exceeding threshold" + description: "Webhook is denying more than 20 pod admissions per minute. Confirm policy verdicts and consider fail-open exception for impacted namespaces." diff --git a/docs/policy/dsl.md b/docs/policy/dsl.md index 7d541807..0cde9e5e 100644 --- a/docs/policy/dsl.md +++ b/docs/policy/dsl.md @@ -1,294 +1,294 @@ -# Stella Policy DSL (`stella-dsl@1`) - -> **Audience:** Policy authors, reviewers, and tooling engineers building lint/compile flows for the Policy Engine v2 rollout (Sprint 20). - -This document specifies the `stella-dsl@1` grammar, semantics, and guardrails used by Stella Ops to transform SBOM facts, Concelier advisories, and Excititor VEX statements into effective findings. Use it with the [Policy Engine Overview](overview.md) for architectural context and the upcoming lifecycle/run guides for operational workflows. - ---- - -## 1 · Design Goals - -- **Deterministic:** Same policy + same inputs ⇒ identical findings on every machine. -- **Declarative:** No arbitrary loops, network calls, or clock access. -- **Explainable:** Every decision records the rule, inputs, and rationale in the explain trace. -- **Lean authoring:** Common precedence, severity, and suppression patterns are first-class. -- **Offline-friendly:** Grammar and built-ins avoid cloud dependencies, run the same in sealed deployments. - ---- - -## 2 · Document Structure - -Policy packs ship one or more `.stella` files. Each file contains exactly one `policy` block: - -```dsl -policy "Default Org Policy" syntax "stella-dsl@1" { - metadata { - description = "Baseline severity + VEX precedence" - tags = ["baseline","vex"] - } - - profile severity { - map vendor_weight { - source "GHSA" => +0.5 - source "OSV" => +0.0 - source "VendorX" => -0.2 - } - env exposure_adjustments { - if env.runtime == "serverless" then -0.5 - if env.exposure == "internal-only" then -1.0 - } - } - - rule vex_precedence priority 10 { - when vex.any(status in ["not_affected","fixed"]) - and vex.justification in ["component_not_present","vulnerable_code_not_present"] - then status := vex.status - because "Strong vendor justification prevails"; - } -} -``` - -High-level layout: - -| Section | Purpose | -|---------|---------| -| `metadata` | Optional descriptive fields surfaced in Console/CLI. | -| `imports` | Reserved for future reuse (not yet implemented in `@1`). | -| `profile` blocks | Declarative scoring modifiers (`severity`, `trust`, `reachability`). | -| `rule` blocks | When/then logic applied to each `(component, advisory, vex[])` tuple. | -| `settings` | Optional evaluation toggles (sampling, default status overrides). | - ---- - -## 3 · Lexical Rules - -- **Case sensitivity:** Keywords are lowercase; identifiers are case-sensitive. -- **Whitespace:** Space, tab, newline act as separators. Indentation is cosmetic. -- **Comments:** `// inline` and `/* block */` are ignored. -- **Literals:** - - Strings use double quotes (`"text"`); escape with `\"`, `\n`, `\t`. - - Numbers are decimal; suffix `%` allowed for percentage weights (`-2.5%` becomes `-0.025`). - - Booleans: `true`, `false`. - - Lists: `[1, 2, 3]`, `["a","b"]`. -- **Identifiers:** Start with letter or underscore, continue with letters, digits, `_`. -- **Operators:** `=`, `==`, `!=`, `<`, `<=`, `>`, `>=`, `in`, `not in`, `and`, `or`, `not`, `:=`. - ---- - -## 4 · Grammar (EBNF) - -```ebnf -policy = "policy", string, "syntax", string, "{", policy-body, "}" ; -policy-body = { metadata | profile | settings | rule | helper } ; - -metadata = "metadata", "{", { meta-entry }, "}" ; -meta-entry = identifier, "=", (string | list) ; - -profile = "profile", identifier, "{", { profile-item }, "}" ; -profile-item= map | env-map | scalar ; -map = "map", identifier, "{", { "source", string, "=>", number, ";" }, "}" ; -env-map = "env", identifier, "{", { "if", expression, "then", number, ";" }, "}" ; -scalar = identifier, "=", (number | string | list), ";" ; - -settings = "settings", "{", { setting-entry }, "}" ; -setting-entry = identifier, "=", (number | string | boolean), ";" ; - -rule = "rule", identifier, [ "priority", integer ], "{", - "when", predicate, - { "and", predicate }, - "then", { action }, - [ "else", { action } ], - [ "because", string ], - "}" ; - -predicate = expression ; -expression = term, { ("and" | "or"), term } ; -term = ["not"], factor ; -factor = comparison | membership | function-call | literal | identifier | "(" expression ")" ; -comparison = value, comparator, value ; -membership = value, ("in" | "not in"), list ; -value = identifier | literal | function-call | field-access ; -field-access= identifier, { ".", identifier | "[" literal "]" } ; -function-call = identifier, "(", [ arg-list ], ")" ; -arg-list = expression, { ",", expression } ; -literal = string | number | boolean | list ; - -action = assignment | ignore | escalate | require | warn | defer | annotate ; -assignment = target, ":=", expression, ";" ; -target = identifier, { ".", identifier } ; -ignore = "ignore", [ "until", expression ], [ "because", string ], ";" ; -escalate = "escalate", [ "to", expression ], [ "when", expression ], ";" ; -require = "requireVex", "{", require-fields, "}", ";" ; -warn = "warn", [ "message", string ], ";" ; -defer = "defer", [ "until", expression ], ";" ; -annotate = "annotate", identifier, ":=", expression, ";" ; -``` - -Notes: - -- `helper` is reserved for shared calculcations (not yet implemented in `@1`). -- `else` branch executes only if `when` predicates evaluate truthy **and** no prior rule earlier in priority handled the tuple. -- Semicolons inside rule bodies are optional when each clause is on its own line; the compiler emits canonical semicolons in IR. - ---- - -## 5 · Evaluation Context - -Within predicates and actions you may reference the following namespaces: - -| Namespace | Fields | Description | -|-----------|--------|-------------| -| `sbom` | `purl`, `name`, `version`, `licenses`, `layerDigest`, `tags`, `usedByEntrypoint` | Component metadata from Scanner. | -| `advisory` | `id`, `source`, `aliases`, `severity`, `cvss`, `publishedAt`, `modifiedAt`, `content.raw` | Canonical Concelier advisory view. | -| `vex` | `status`, `justification`, `statementId`, `timestamp`, `scope` | Current VEX statement when iterating; aggregator helpers available. | -| `vex.any(...)`, `vex.all(...)`, `vex.count(...)` | Functions operating over all matching statements. | -| `run` | `policyId`, `policyVersion`, `tenant`, `timestamp` | Metadata for explain annotations. | -| `env` | Arbitrary key/value pairs injected per run (e.g., `environment`, `runtime`). | -| `telemetry` | Optional reachability signals; missing fields evaluate to `unknown`. | -| `profile.` | Values computed inside profile blocks (maps, scalars). | - -Missing fields evaluate to `null`, which is falsey in boolean context and propagates through comparisons unless explicitly checked. - ---- - -## 6 · Built-ins (v1) - -| Function / Property | Signature | Description | -|---------------------|-----------|-------------| -| `normalize_cvss(advisory)` | `Advisory → SeverityScalar` | Parses `advisory.content.raw` for CVSS data; falls back to policy maps. | -| `cvss(score, vector)` | `double × string → SeverityScalar` | Constructs a severity object manually. | -| `severity_band(value)` | `string → SeverityBand` | Normalises strings like `"critical"`, `"medium"`. | -| `risk_score(base, modifiers...)` | Variadic | Multiplies numeric modifiers (severity × trust × reachability). | -| `vex.any(predicate)` | `(Statement → bool) → bool` | `true` if any statement satisfies predicate. | -| `vex.all(predicate)` | `(Statement → bool) → bool` | `true` if all statements satisfy predicate. | -| `vex.latest()` | `→ Statement` | Lexicographically newest statement. | -| `advisory.has_tag(tag)` | `string → bool` | Checks advisory metadata tags. | -| `advisory.matches(pattern)` | `string → bool` | Glob match against advisory identifiers. | -| `sbom.has_tag(tag)` | `string → bool` | Uses SBOM inventory tags (usage vs inventory). | -| `exists(expression)` | `→ bool` | `true` when value is non-null/empty. | -| `coalesce(a, b, ...)` | `→ value` | First non-null argument. | -| `days_between(dateA, dateB)` | `→ int` | Absolute day difference (UTC). | -| `percent_of(part, whole)` | `→ double` | Fractions for scoring adjustments. | -| `lowercase(text)` | `string → string` | Normalises casing deterministically (InvariantCulture). | - -All built-ins are pure; if inputs are null the result is null unless otherwise noted. - ---- - -## 7 · Rule Semantics - -1. **Ordering:** Rules execute in ascending `priority`. When priorities tie, lexical order defines precedence. -2. **Short-circuit:** Once a rule sets `status`, subsequent rules only execute if they use `combine`. Use this sparingly to avoid ambiguity. -3. **Actions:** - - `status := ` – Allowed values: `affected`, `not_affected`, `fixed`, `suppressed`, `under_investigation`, `escalated`. - - `severity := ` – Either from `normalize_cvss`, `cvss`, or numeric map; ensures `normalized` and `score`. - - `ignore until ` – Temporarily treats finding as suppressed until timestamp; recorded in explain trace. - - `warn message ""` – Adds warn verdict and deducts `warnPenalty`. - - `escalate to severity_band("critical") when condition` – Forces verdict severity upward when condition true. - - `requireVex { vendors = ["VendorX"], justifications = ["component_not_present"] }` – Fails evaluation if matching VEX evidence absent. - - `annotate reason := "text"` – Adds free-form key/value pairs to explain payload. -4. **Because clause:** Mandatory for actions changing status or severity; captured verbatim in explain traces. - ---- - -## 8 · Scoping Helpers - -- **Maps:** Use `profile severity { map vendor_weight { ... } }` to declare additive factors. Retrieve with `profile.severity.vendor_weight["GHSA"]`. -- **Environment overrides:** `env` profiles allow conditional adjustments based on runtime metadata. -- **Tenancy:** `run.tenant` ensures policies remain tenant-aware; avoid hardcoding single-tenant IDs. -- **Default values:** Use `settings { default_status = "affected"; }` to override built-in defaults. - ---- - -## 9 · Examples - -### 9.1 Baseline Severity Normalisation - -```dsl -rule advisory_normalization { - when advisory.source in ["GHSA","OSV"] - then severity := normalize_cvss(advisory) - because "Align vendor severity to CVSS baseline"; -} -``` - -### 9.2 VEX Override with Quiet Mode - -```dsl -rule vex_strong_claim priority 5 { - when vex.any(status == "not_affected") - and vex.justification in ["component_not_present","vulnerable_code_not_present"] - then status := vex.status - annotate winning_statement := vex.latest().statementId - warn message "VEX override applied" - because "Strong VEX justification"; -} -``` - -### 9.3 Environment-Specific Escalation - -```dsl -rule internet_exposed_guard { - when env.exposure == "internet" - and severity.normalized >= "High" - then escalate to severity_band("Critical") - because "Internet-exposed assets require critical posture"; -} -``` - -### 9.4 Anti-pattern (flagged by linter) - -```dsl -rule catch_all { - when true - then status := "suppressed" - because "Suppress everything" // ❌ Fails lint: unbounded suppression -} -``` - ---- - -## 10 · Validation & Tooling - -- `stella policy lint` ensures: - - Grammar compliance and canonical formatting. - - Static determinism guard (no forbidden namespaces). - - Anti-pattern detection (e.g., unconditional suppression, missing `because`). -- `stella policy compile` emits IR (`.stella.ir.json`) and SHA-256 digest used in `policy_runs`. -- CI pipelines (see `DEVOPS-POLICY-20-001`) compile sample packs and fail on lint violations. -- Simulation harnesses (`stella policy simulate`) highlight provided/queried fields so policy authors affirm assumptions before promotion. - ---- - -## 11 · Anti-patterns & Mitigations - -| Anti-pattern | Risk | Mitigation | -|--------------|------|------------| -| Catch-all suppress/ignore without scope | Masks all findings | Linter blocks rules with `when true` unless `priority` > 1000 and justification includes remediation plan. | -| Comparing strings with inconsistent casing | Missed matches | Wrap comparisons in `lowercase(value)` to align casing or normalise metadata during ingest. | -| Referencing `telemetry` without fallback | Null propagation | Wrap access in `exists(telemetry.reachability)`. | -| Hardcoding tenant IDs | Breaks multi-tenant | Prefer `env.tenantTag` or metadata-sourced predicates. | -| Duplicated rule names | Explain trace ambiguity | Compiler enforces unique `rule` identifiers within a policy. | - ---- - -## 12 · Versioning & Compatibility - -- `syntax "stella-dsl@1"` is mandatory. -- Future revisions (`@2`, …) will be additive; existing packs continue to compile with their declared version. -- The compiler canonicalises documents (sorted keys, normalised whitespace) before hashing to ensure reproducibility. - ---- - -## 13 · Compliance Checklist - -- [ ] **Grammar validated:** Policy compiles with `stella policy lint` and matches `syntax "stella-dsl@1"`. -- [ ] **Deterministic constructs only:** No use of forbidden namespaces (`DateTime.Now`, `Guid.NewGuid`, external services). -- [ ] **Rationales present:** Every status/severity change includes a `because` clause or `annotate` entry. -- [ ] **Scoped suppressions:** Rules that ignore/suppress findings reference explicit components, vendors, or VEX justifications. -- [ ] **Explain fields verified:** `annotate` keys align with Console/CLI expectations (documented in upcoming lifecycle guide). -- [ ] **Offline parity tested:** Policy pack simulated in sealed mode (`--sealed`) to confirm absence of network dependencies. - ---- - -*Last updated: 2025-10-26 (Sprint 20).* +# Stella Policy DSL (`stella-dsl@1`) + +> **Audience:** Policy authors, reviewers, and tooling engineers building lint/compile flows for the Policy Engine v2 rollout (Sprint 20). + +This document specifies the `stella-dsl@1` grammar, semantics, and guardrails used by Stella Ops to transform SBOM facts, Concelier advisories, and Excititor VEX statements into effective findings. Use it with the [Policy Engine Overview](overview.md) for architectural context and the upcoming lifecycle/run guides for operational workflows. + +--- + +## 1 · Design Goals + +- **Deterministic:** Same policy + same inputs ⇒ identical findings on every machine. +- **Declarative:** No arbitrary loops, network calls, or clock access. +- **Explainable:** Every decision records the rule, inputs, and rationale in the explain trace. +- **Lean authoring:** Common precedence, severity, and suppression patterns are first-class. +- **Offline-friendly:** Grammar and built-ins avoid cloud dependencies, run the same in sealed deployments. + +--- + +## 2 · Document Structure + +Policy packs ship one or more `.stella` files. Each file contains exactly one `policy` block: + +```dsl +policy "Default Org Policy" syntax "stella-dsl@1" { + metadata { + description = "Baseline severity + VEX precedence" + tags = ["baseline","vex"] + } + + profile severity { + map vendor_weight { + source "GHSA" => +0.5 + source "OSV" => +0.0 + source "VendorX" => -0.2 + } + env exposure_adjustments { + if env.runtime == "serverless" then -0.5 + if env.exposure == "internal-only" then -1.0 + } + } + + rule vex_precedence priority 10 { + when vex.any(status in ["not_affected","fixed"]) + and vex.justification in ["component_not_present","vulnerable_code_not_present"] + then status := vex.status + because "Strong vendor justification prevails"; + } +} +``` + +High-level layout: + +| Section | Purpose | +|---------|---------| +| `metadata` | Optional descriptive fields surfaced in Console/CLI. | +| `imports` | Reserved for future reuse (not yet implemented in `@1`). | +| `profile` blocks | Declarative scoring modifiers (`severity`, `trust`, `reachability`). | +| `rule` blocks | When/then logic applied to each `(component, advisory, vex[])` tuple. | +| `settings` | Optional evaluation toggles (sampling, default status overrides). | + +--- + +## 3 · Lexical Rules + +- **Case sensitivity:** Keywords are lowercase; identifiers are case-sensitive. +- **Whitespace:** Space, tab, newline act as separators. Indentation is cosmetic. +- **Comments:** `// inline` and `/* block */` are ignored. +- **Literals:** + - Strings use double quotes (`"text"`); escape with `\"`, `\n`, `\t`. + - Numbers are decimal; suffix `%` allowed for percentage weights (`-2.5%` becomes `-0.025`). + - Booleans: `true`, `false`. + - Lists: `[1, 2, 3]`, `["a","b"]`. +- **Identifiers:** Start with letter or underscore, continue with letters, digits, `_`. +- **Operators:** `=`, `==`, `!=`, `<`, `<=`, `>`, `>=`, `in`, `not in`, `and`, `or`, `not`, `:=`. + +--- + +## 4 · Grammar (EBNF) + +```ebnf +policy = "policy", string, "syntax", string, "{", policy-body, "}" ; +policy-body = { metadata | profile | settings | rule | helper } ; + +metadata = "metadata", "{", { meta-entry }, "}" ; +meta-entry = identifier, "=", (string | list) ; + +profile = "profile", identifier, "{", { profile-item }, "}" ; +profile-item= map | env-map | scalar ; +map = "map", identifier, "{", { "source", string, "=>", number, ";" }, "}" ; +env-map = "env", identifier, "{", { "if", expression, "then", number, ";" }, "}" ; +scalar = identifier, "=", (number | string | list), ";" ; + +settings = "settings", "{", { setting-entry }, "}" ; +setting-entry = identifier, "=", (number | string | boolean), ";" ; + +rule = "rule", identifier, [ "priority", integer ], "{", + "when", predicate, + { "and", predicate }, + "then", { action }, + [ "else", { action } ], + [ "because", string ], + "}" ; + +predicate = expression ; +expression = term, { ("and" | "or"), term } ; +term = ["not"], factor ; +factor = comparison | membership | function-call | literal | identifier | "(" expression ")" ; +comparison = value, comparator, value ; +membership = value, ("in" | "not in"), list ; +value = identifier | literal | function-call | field-access ; +field-access= identifier, { ".", identifier | "[" literal "]" } ; +function-call = identifier, "(", [ arg-list ], ")" ; +arg-list = expression, { ",", expression } ; +literal = string | number | boolean | list ; + +action = assignment | ignore | escalate | require | warn | defer | annotate ; +assignment = target, ":=", expression, ";" ; +target = identifier, { ".", identifier } ; +ignore = "ignore", [ "until", expression ], [ "because", string ], ";" ; +escalate = "escalate", [ "to", expression ], [ "when", expression ], ";" ; +require = "requireVex", "{", require-fields, "}", ";" ; +warn = "warn", [ "message", string ], ";" ; +defer = "defer", [ "until", expression ], ";" ; +annotate = "annotate", identifier, ":=", expression, ";" ; +``` + +Notes: + +- `helper` is reserved for shared calculcations (not yet implemented in `@1`). +- `else` branch executes only if `when` predicates evaluate truthy **and** no prior rule earlier in priority handled the tuple. +- Semicolons inside rule bodies are optional when each clause is on its own line; the compiler emits canonical semicolons in IR. + +--- + +## 5 · Evaluation Context + +Within predicates and actions you may reference the following namespaces: + +| Namespace | Fields | Description | +|-----------|--------|-------------| +| `sbom` | `purl`, `name`, `version`, `licenses`, `layerDigest`, `tags`, `usedByEntrypoint` | Component metadata from Scanner. | +| `advisory` | `id`, `source`, `aliases`, `severity`, `cvss`, `publishedAt`, `modifiedAt`, `content.raw` | Canonical Concelier advisory view. | +| `vex` | `status`, `justification`, `statementId`, `timestamp`, `scope` | Current VEX statement when iterating; aggregator helpers available. | +| `vex.any(...)`, `vex.all(...)`, `vex.count(...)` | Functions operating over all matching statements. | +| `run` | `policyId`, `policyVersion`, `tenant`, `timestamp` | Metadata for explain annotations. | +| `env` | Arbitrary key/value pairs injected per run (e.g., `environment`, `runtime`). | +| `telemetry` | Optional reachability signals; missing fields evaluate to `unknown`. | +| `profile.` | Values computed inside profile blocks (maps, scalars). | + +Missing fields evaluate to `null`, which is falsey in boolean context and propagates through comparisons unless explicitly checked. + +--- + +## 6 · Built-ins (v1) + +| Function / Property | Signature | Description | +|---------------------|-----------|-------------| +| `normalize_cvss(advisory)` | `Advisory → SeverityScalar` | Parses `advisory.content.raw` for CVSS data; falls back to policy maps. | +| `cvss(score, vector)` | `double × string → SeverityScalar` | Constructs a severity object manually. | +| `severity_band(value)` | `string → SeverityBand` | Normalises strings like `"critical"`, `"medium"`. | +| `risk_score(base, modifiers...)` | Variadic | Multiplies numeric modifiers (severity × trust × reachability). | +| `vex.any(predicate)` | `(Statement → bool) → bool` | `true` if any statement satisfies predicate. | +| `vex.all(predicate)` | `(Statement → bool) → bool` | `true` if all statements satisfy predicate. | +| `vex.latest()` | `→ Statement` | Lexicographically newest statement. | +| `advisory.has_tag(tag)` | `string → bool` | Checks advisory metadata tags. | +| `advisory.matches(pattern)` | `string → bool` | Glob match against advisory identifiers. | +| `sbom.has_tag(tag)` | `string → bool` | Uses SBOM inventory tags (usage vs inventory). | +| `exists(expression)` | `→ bool` | `true` when value is non-null/empty. | +| `coalesce(a, b, ...)` | `→ value` | First non-null argument. | +| `days_between(dateA, dateB)` | `→ int` | Absolute day difference (UTC). | +| `percent_of(part, whole)` | `→ double` | Fractions for scoring adjustments. | +| `lowercase(text)` | `string → string` | Normalises casing deterministically (InvariantCulture). | + +All built-ins are pure; if inputs are null the result is null unless otherwise noted. + +--- + +## 7 · Rule Semantics + +1. **Ordering:** Rules execute in ascending `priority`. When priorities tie, lexical order defines precedence. +2. **Short-circuit:** Once a rule sets `status`, subsequent rules only execute if they use `combine`. Use this sparingly to avoid ambiguity. +3. **Actions:** + - `status := ` – Allowed values: `affected`, `not_affected`, `fixed`, `suppressed`, `under_investigation`, `escalated`. + - `severity := ` – Either from `normalize_cvss`, `cvss`, or numeric map; ensures `normalized` and `score`. + - `ignore until ` – Temporarily treats finding as suppressed until timestamp; recorded in explain trace. + - `warn message ""` – Adds warn verdict and deducts `warnPenalty`. + - `escalate to severity_band("critical") when condition` – Forces verdict severity upward when condition true. + - `requireVex { vendors = ["VendorX"], justifications = ["component_not_present"] }` – Fails evaluation if matching VEX evidence absent. + - `annotate reason := "text"` – Adds free-form key/value pairs to explain payload. +4. **Because clause:** Mandatory for actions changing status or severity; captured verbatim in explain traces. + +--- + +## 8 · Scoping Helpers + +- **Maps:** Use `profile severity { map vendor_weight { ... } }` to declare additive factors. Retrieve with `profile.severity.vendor_weight["GHSA"]`. +- **Environment overrides:** `env` profiles allow conditional adjustments based on runtime metadata. +- **Tenancy:** `run.tenant` ensures policies remain tenant-aware; avoid hardcoding single-tenant IDs. +- **Default values:** Use `settings { default_status = "affected"; }` to override built-in defaults. + +--- + +## 9 · Examples + +### 9.1 Baseline Severity Normalisation + +```dsl +rule advisory_normalization { + when advisory.source in ["GHSA","OSV"] + then severity := normalize_cvss(advisory) + because "Align vendor severity to CVSS baseline"; +} +``` + +### 9.2 VEX Override with Quiet Mode + +```dsl +rule vex_strong_claim priority 5 { + when vex.any(status == "not_affected") + and vex.justification in ["component_not_present","vulnerable_code_not_present"] + then status := vex.status + annotate winning_statement := vex.latest().statementId + warn message "VEX override applied" + because "Strong VEX justification"; +} +``` + +### 9.3 Environment-Specific Escalation + +```dsl +rule internet_exposed_guard { + when env.exposure == "internet" + and severity.normalized >= "High" + then escalate to severity_band("Critical") + because "Internet-exposed assets require critical posture"; +} +``` + +### 9.4 Anti-pattern (flagged by linter) + +```dsl +rule catch_all { + when true + then status := "suppressed" + because "Suppress everything" // ❌ Fails lint: unbounded suppression +} +``` + +--- + +## 10 · Validation & Tooling + +- `stella policy lint` ensures: + - Grammar compliance and canonical formatting. + - Static determinism guard (no forbidden namespaces). + - Anti-pattern detection (e.g., unconditional suppression, missing `because`). +- `stella policy compile` emits IR (`.stella.ir.json`) and SHA-256 digest used in `policy_runs`. +- CI pipelines (see `DEVOPS-POLICY-20-001`) compile sample packs and fail on lint violations. +- Simulation harnesses (`stella policy simulate`) highlight provided/queried fields so policy authors affirm assumptions before promotion. + +--- + +## 11 · Anti-patterns & Mitigations + +| Anti-pattern | Risk | Mitigation | +|--------------|------|------------| +| Catch-all suppress/ignore without scope | Masks all findings | Linter blocks rules with `when true` unless `priority` > 1000 and justification includes remediation plan. | +| Comparing strings with inconsistent casing | Missed matches | Wrap comparisons in `lowercase(value)` to align casing or normalise metadata during ingest. | +| Referencing `telemetry` without fallback | Null propagation | Wrap access in `exists(telemetry.reachability)`. | +| Hardcoding tenant IDs | Breaks multi-tenant | Prefer `env.tenantTag` or metadata-sourced predicates. | +| Duplicated rule names | Explain trace ambiguity | Compiler enforces unique `rule` identifiers within a policy. | + +--- + +## 12 · Versioning & Compatibility + +- `syntax "stella-dsl@1"` is mandatory. +- Future revisions (`@2`, …) will be additive; existing packs continue to compile with their declared version. +- The compiler canonicalises documents (sorted keys, normalised whitespace) before hashing to ensure reproducibility. + +--- + +## 13 · Compliance Checklist + +- [ ] **Grammar validated:** Policy compiles with `stella policy lint` and matches `syntax "stella-dsl@1"`. +- [ ] **Deterministic constructs only:** No use of forbidden namespaces (`DateTime.Now`, `Guid.NewGuid`, external services). +- [ ] **Rationales present:** Every status/severity change includes a `because` clause or `annotate` entry. +- [ ] **Scoped suppressions:** Rules that ignore/suppress findings reference explicit components, vendors, or VEX justifications. +- [ ] **Explain fields verified:** `annotate` keys align with Console/CLI expectations (documented in upcoming lifecycle guide). +- [ ] **Offline parity tested:** Policy pack simulated in sealed mode (`--sealed`) to confirm absence of network dependencies. + +--- + +*Last updated: 2025-10-26 (Sprint 20).* diff --git a/docs/policy/exception-effects.md b/docs/policy/exception-effects.md index d450e2f8..ec9b808a 100644 --- a/docs/policy/exception-effects.md +++ b/docs/policy/exception-effects.md @@ -32,7 +32,7 @@ Effects are validated at bind time (`PolicyBinder`), while instances are ingeste | `maxDurationDays` | — | Soft limit for temporary waivers. | Must be > 0 when provided. | | `description` | — | Rich-text rationale. | Displayed in approvals centre (optional). | -Authoring invalid combinations returns structured errors with JSON paths, preventing packs from compiling (see `src/StellaOps.Policy.Tests/PolicyBinderTests.cs:33`). Routing templates additionally declare `authorityRouteId` and `requireMfa` flags for governance routing. +Authoring invalid combinations returns structured errors with JSON paths, preventing packs from compiling (see `src/Policy/__Tests/StellaOps.Policy.Tests/PolicyBinderTests.cs:33`). Routing templates additionally declare `authorityRouteId` and `requireMfa` flags for governance routing. --- @@ -68,7 +68,7 @@ Only one exception effect is applied per finding. Evaluation proceeds as follows - `tags` ⇒ `100 + (count × 5)` 4. Highest score wins. Ties fall back to the newest `createdAt`, then lexical `id` (stable sorting). -These rules guarantee deterministic selection even when multiple waivers overlap. See `src/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:209` for tie-break coverage. +These rules guarantee deterministic selection even when multiple waivers overlap. See `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:209` for tie-break coverage. --- @@ -81,7 +81,7 @@ These rules guarantee deterministic selection even when multiple waivers overlap | `downgrade` | No change. | Sets severity to configured `downgradeSeverity`. | `exception.severity` annotation. | | `requireControl` | No change. | No change. | Adds warning `Exception '' requires control ''`. Annotation `exception.requiredControl`. | -All effects stamp shared annotations: `exception.id`, `exception.effectId`, `exception.effectType`, optional `exception.effectName`, optional `exception.routingTemplate`, plus `exception.maxDurationDays`. Instance metadata is surfaced both in annotations (`exception.meta.`) and the structured `AppliedException.Metadata` payload for downstream APIs. Behaviour is validated by unit tests (`src/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:130` & `src/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:169`). +All effects stamp shared annotations: `exception.id`, `exception.effectId`, `exception.effectType`, optional `exception.effectName`, optional `exception.routingTemplate`, plus `exception.maxDurationDays`. Instance metadata is surfaced both in annotations (`exception.meta.`) and the structured `AppliedException.Metadata` payload for downstream APIs. Behaviour is validated by unit tests (`src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:130` & `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:169`). --- @@ -132,9 +132,9 @@ Example verdict excerpt (JSON): ## 8 · Testing References -- `src/StellaOps.Policy.Tests/PolicyBinderTests.cs:33` – Validates schema rules for defining effects, routing templates, and downgrade guardrails. -- `src/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:130` – Covers suppression, downgrade, and metadata propagation. -- `src/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:209` – Confirms specificity ordering and metadata forwarding for competing exceptions. +- `src/Policy/__Tests/StellaOps.Policy.Tests/PolicyBinderTests.cs:33` – Validates schema rules for defining effects, routing templates, and downgrade guardrails. +- `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:130` – Covers suppression, downgrade, and metadata propagation. +- `src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs:209` – Confirms specificity ordering and metadata forwarding for competing exceptions. --- diff --git a/docs/policy/gateway.md b/docs/policy/gateway.md index afa4234c..03d4ff73 100644 --- a/docs/policy/gateway.md +++ b/docs/policy/gateway.md @@ -1,124 +1,124 @@ -# Policy Gateway - -> **Delivery scope:** `StellaOps.Policy.Gateway` minimal API service fronting Policy Engine pack CRUD + activation endpoints for UI/CLI clients. Sender-constrained with DPoP and tenant headers, suitable for online and Offline Kit deployments. - -## 1 · Responsibilities - -- Proxy policy pack CRUD and activation requests to Policy Engine while enforcing scope policies (`policy:read`, `policy:author`, `policy:review`, `policy:operate`, `policy:activate`). -- Normalise responses (DTO + `ProblemDetails`) so Console, CLI, and automation receive consistent payloads. -- Guard activation actions with structured logging and metrics so approvals are auditable. -- Support dual auth modes: - - Forwarded caller tokens (Console/CLI) with DPoP proofs + `X-Stella-Tenant` header. - - Gateway client credentials (DPoP) for service automation or Offline Kit flows when no caller token is present. - -## 2 · Endpoints - -| Route | Method | Description | Required scope(s) | -|-------|--------|-------------|-------------------| -| `/api/policy/packs` | `GET` | List policy packs and revisions for the active tenant. | `policy:read` | -| `/api/policy/packs` | `POST` | Create a policy pack shell or upsert display metadata. | `policy:author` | -| `/api/policy/packs/{packId}/revisions` | `POST` | Create or update a policy revision (draft/approved). | `policy:author` | -| `/api/policy/packs/{packId}/revisions/{version}:activate` | `POST` | Activate a revision, enforcing single/two-person approvals. | `policy:operate`, `policy:activate` | - -### Response shapes - -- Successful responses return camel-case DTOs matching `PolicyPackDto`, `PolicyRevisionDto`, or `PolicyRevisionActivationDto` as described in the Policy Engine API doc (`/docs/api/policy.md`). -- Errors always return RFC 7807 `ProblemDetails` with deterministic fields (`title`, `detail`, `status`). Missing caller credentials now surface `401` with `"Upstream authorization missing"` detail. - -## 3 · Authentication & headers - -| Header | Source | Notes | -|--------|--------|-------| -| `Authorization` | Forwarded caller token *or* gateway client credentials. | Caller tokens must include tenant scope; gateway tokens default to `DPoP` scheme. | -| `DPoP` | Caller or gateway. | Required when Authority mandates proof-of-possession (default). Generated per request; gateway keeps ES256/ES384 key material under `etc/policy-gateway-dpop.pem`. | -| `X-Stella-Tenant` | Caller | Tenant isolation header. Forwarded unchanged; gateway automation omits it. | - -Gateway client credentials are configured in `policy-gateway.yaml`: - -```yaml -policyEngine: - baseAddress: "https://policy-engine.internal" - audience: "api://policy-engine" - clientCredentials: - enabled: true - clientId: "policy-gateway" - clientSecret: "" - scopes: - - policy:read - - policy:author - - policy:review - - policy:operate - - policy:activate - dpop: - enabled: true - keyPath: "../etc/policy-gateway-dpop.pem" - algorithm: "ES256" -``` - -> 🔐 **DPoP key** – store the private key alongside Offline Kit secrets; rotate it whenever the gateway identity or Authority configuration changes. - -## 4 · Metrics & logging - -All activation calls emit: - -- `policy_gateway_activation_requests_total{outcome,source}` – counter labelled with `outcome` (`activated`, `pending_second_approval`, `already_active`, `bad_request`, `not_found`, `unauthorized`, `forbidden`, `error`) and `source` (`caller`, `service`). -- `policy_gateway_activation_latency_ms{outcome,source}` – histogram measuring proxy latency. - -Structured logs (category `StellaOps.Policy.Gateway.Activation`) include `PackId`, `Version`, `Outcome`, `Source`, and upstream status code for audit trails. - -## 5 · Sample `curl` workflows - -Assuming you already obtained a DPoP-bound access token (`$TOKEN`) for tenant `acme`: - -```bash -# Generate a DPoP proof for GET via the CLI helper -DPoP_PROOF=$(stella auth dpop proof \ - --htu https://gateway.example.com/api/policy/packs \ - --htm GET \ - --token "$TOKEN") - -curl -sS https://gateway.example.com/api/policy/packs \ - -H "Authorization: DPoP $TOKEN" \ - -H "DPoP: $DPoP_PROOF" \ - -H "X-Stella-Tenant: acme" - -# Draft a new revision -DPoP_PROOF=$(stella auth dpop proof \ - --htu https://gateway.example.com/api/policy/packs/policy.core/revisions \ - --htm POST \ - --token "$TOKEN") - -curl -sS https://gateway.example.com/api/policy/packs/policy.core/revisions \ - -H "Authorization: DPoP $TOKEN" \ - -H "DPoP: $DPoP_PROOF" \ - -H "X-Stella-Tenant: acme" \ - -H "Content-Type: application/json" \ - -d '{"version":5,"requiresTwoPersonApproval":true,"initialStatus":"Draft"}' - -# Activate revision 5 (returns 202 when awaiting the second approver) -DPoP_PROOF=$(stella auth dpop proof \ - --htu https://gateway.example.com/api/policy/packs/policy.core/revisions/5:activate \ - --htm POST \ - --token "$TOKEN") - -curl -sS https://gateway.example.com/api/policy/packs/policy.core/revisions/5:activate \ - -H "Authorization: DPoP $TOKEN" \ - -H "DPoP: $DPoP_PROOF" \ - -H "X-Stella-Tenant: acme" \ - -H "Content-Type: application/json" \ - -d '{"comment":"Rollout baseline"}' -``` - -For air-gapped environments, bundle `policy-gateway.yaml` and the DPoP key in the Offline Kit (see `/docs/24_OFFLINE_KIT.md` §5.7). - -> **DPoP proof helper:** Use `stella auth dpop proof` to mint sender-constrained proofs locally. The command accepts `--htu`, `--htm`, and `--token` arguments and emits a ready-to-use header value. Teams maintaining alternate tooling (for example, `scripts/make-dpop.sh`) can substitute it as long as the inputs and output match the CLI behaviour. - -## 6 · Offline Kit guidance - -- Include `policy-gateway.yaml.sample` and the resolved runtime config in the Offline Kit’s `config/` tree. -- Place the DPoP private key under `secrets/policy-gateway-dpop.pem` with restricted permissions; document rotation steps in the manifest. -- When building verification scripts, use the gateway endpoints above instead of hitting Policy Engine directly. The Offline Kit validator now expects `policy_gateway_activation_requests_total` metrics in the Prometheus snapshot. - -## 7 · Change log - -- **2025-10-27 – Sprint 18.5**: Initial gateway bootstrap + activation metrics + DPoP client credentials. +# Policy Gateway + +> **Delivery scope:** `StellaOps.Policy.Gateway` minimal API service fronting Policy Engine pack CRUD + activation endpoints for UI/CLI clients. Sender-constrained with DPoP and tenant headers, suitable for online and Offline Kit deployments. + +## 1 · Responsibilities + +- Proxy policy pack CRUD and activation requests to Policy Engine while enforcing scope policies (`policy:read`, `policy:author`, `policy:review`, `policy:operate`, `policy:activate`). +- Normalise responses (DTO + `ProblemDetails`) so Console, CLI, and automation receive consistent payloads. +- Guard activation actions with structured logging and metrics so approvals are auditable. +- Support dual auth modes: + - Forwarded caller tokens (Console/CLI) with DPoP proofs + `X-Stella-Tenant` header. + - Gateway client credentials (DPoP) for service automation or Offline Kit flows when no caller token is present. + +## 2 · Endpoints + +| Route | Method | Description | Required scope(s) | +|-------|--------|-------------|-------------------| +| `/api/policy/packs` | `GET` | List policy packs and revisions for the active tenant. | `policy:read` | +| `/api/policy/packs` | `POST` | Create a policy pack shell or upsert display metadata. | `policy:author` | +| `/api/policy/packs/{packId}/revisions` | `POST` | Create or update a policy revision (draft/approved). | `policy:author` | +| `/api/policy/packs/{packId}/revisions/{version}:activate` | `POST` | Activate a revision, enforcing single/two-person approvals. | `policy:operate`, `policy:activate` | + +### Response shapes + +- Successful responses return camel-case DTOs matching `PolicyPackDto`, `PolicyRevisionDto`, or `PolicyRevisionActivationDto` as described in the Policy Engine API doc (`/docs/api/policy.md`). +- Errors always return RFC 7807 `ProblemDetails` with deterministic fields (`title`, `detail`, `status`). Missing caller credentials now surface `401` with `"Upstream authorization missing"` detail. + +## 3 · Authentication & headers + +| Header | Source | Notes | +|--------|--------|-------| +| `Authorization` | Forwarded caller token *or* gateway client credentials. | Caller tokens must include tenant scope; gateway tokens default to `DPoP` scheme. | +| `DPoP` | Caller or gateway. | Required when Authority mandates proof-of-possession (default). Generated per request; gateway keeps ES256/ES384 key material under `etc/policy-gateway-dpop.pem`. | +| `X-Stella-Tenant` | Caller | Tenant isolation header. Forwarded unchanged; gateway automation omits it. | + +Gateway client credentials are configured in `policy-gateway.yaml`: + +```yaml +policyEngine: + baseAddress: "https://policy-engine.internal" + audience: "api://policy-engine" + clientCredentials: + enabled: true + clientId: "policy-gateway" + clientSecret: "" + scopes: + - policy:read + - policy:author + - policy:review + - policy:operate + - policy:activate + dpop: + enabled: true + keyPath: "../etc/policy-gateway-dpop.pem" + algorithm: "ES256" +``` + +> 🔐 **DPoP key** – store the private key alongside Offline Kit secrets; rotate it whenever the gateway identity or Authority configuration changes. + +## 4 · Metrics & logging + +All activation calls emit: + +- `policy_gateway_activation_requests_total{outcome,source}` – counter labelled with `outcome` (`activated`, `pending_second_approval`, `already_active`, `bad_request`, `not_found`, `unauthorized`, `forbidden`, `error`) and `source` (`caller`, `service`). +- `policy_gateway_activation_latency_ms{outcome,source}` – histogram measuring proxy latency. + +Structured logs (category `StellaOps.Policy.Gateway.Activation`) include `PackId`, `Version`, `Outcome`, `Source`, and upstream status code for audit trails. + +## 5 · Sample `curl` workflows + +Assuming you already obtained a DPoP-bound access token (`$TOKEN`) for tenant `acme`: + +```bash +# Generate a DPoP proof for GET via the CLI helper +DPoP_PROOF=$(stella auth dpop proof \ + --htu https://gateway.example.com/api/policy/packs \ + --htm GET \ + --token "$TOKEN") + +curl -sS https://gateway.example.com/api/policy/packs \ + -H "Authorization: DPoP $TOKEN" \ + -H "DPoP: $DPoP_PROOF" \ + -H "X-Stella-Tenant: acme" + +# Draft a new revision +DPoP_PROOF=$(stella auth dpop proof \ + --htu https://gateway.example.com/api/policy/packs/policy.core/revisions \ + --htm POST \ + --token "$TOKEN") + +curl -sS https://gateway.example.com/api/policy/packs/policy.core/revisions \ + -H "Authorization: DPoP $TOKEN" \ + -H "DPoP: $DPoP_PROOF" \ + -H "X-Stella-Tenant: acme" \ + -H "Content-Type: application/json" \ + -d '{"version":5,"requiresTwoPersonApproval":true,"initialStatus":"Draft"}' + +# Activate revision 5 (returns 202 when awaiting the second approver) +DPoP_PROOF=$(stella auth dpop proof \ + --htu https://gateway.example.com/api/policy/packs/policy.core/revisions/5:activate \ + --htm POST \ + --token "$TOKEN") + +curl -sS https://gateway.example.com/api/policy/packs/policy.core/revisions/5:activate \ + -H "Authorization: DPoP $TOKEN" \ + -H "DPoP: $DPoP_PROOF" \ + -H "X-Stella-Tenant: acme" \ + -H "Content-Type: application/json" \ + -d '{"comment":"Rollout baseline"}' +``` + +For air-gapped environments, bundle `policy-gateway.yaml` and the DPoP key in the Offline Kit (see `/docs/24_OFFLINE_KIT.md` §5.7). + +> **DPoP proof helper:** Use `stella auth dpop proof` to mint sender-constrained proofs locally. The command accepts `--htu`, `--htm`, and `--token` arguments and emits a ready-to-use header value. Teams maintaining alternate tooling (for example, `scripts/make-dpop.sh`) can substitute it as long as the inputs and output match the CLI behaviour. + +## 6 · Offline Kit guidance + +- Include `policy-gateway.yaml.sample` and the resolved runtime config in the Offline Kit’s `config/` tree. +- Place the DPoP private key under `secrets/policy-gateway-dpop.pem` with restricted permissions; document rotation steps in the manifest. +- When building verification scripts, use the gateway endpoints above instead of hitting Policy Engine directly. The Offline Kit validator now expects `policy_gateway_activation_requests_total` metrics in the Prometheus snapshot. + +## 7 · Change log + +- **2025-10-27 – Sprint 18.5**: Initial gateway bootstrap + activation metrics + DPoP client credentials. diff --git a/docs/policy/lifecycle.md b/docs/policy/lifecycle.md index ad779ce2..7a02cac2 100644 --- a/docs/policy/lifecycle.md +++ b/docs/policy/lifecycle.md @@ -1,238 +1,238 @@ -# Policy Lifecycle & Approvals - -> **Audience:** Policy authors, reviewers, security approvers, release engineers. -> **Scope:** End-to-end flow for `stella-dsl@1` policies from draft through archival, including CLI/Console touch-points, Authority scopes, audit artefacts, and offline considerations. - -This guide explains how a policy progresses through Stella Ops, which roles are involved, and the artefacts produced at every step. Pair it with the [Policy Engine Overview](overview.md), [DSL reference](dsl.md), and upcoming run documentation to ensure consistent authoring and rollout. - ---- - -## 1 · Protocol Summary - -- Policies are **immutable versions** attached to a stable `policy_id`. -- Lifecycle states: `draft → submitted → approved → active → archived`. -- Every transition requires explicit Authority scopes and produces structured events + storage artefacts (`policies`, `policy_runs`, audit log collections). -- Simulation and CI gating happen **before** approvals can be granted. -- Activation triggers (runs, bundle exports, CLI `promote`) operate on the **latest approved** version per tenant. - -```mermaid -stateDiagram-v2 - [*] --> Draft - Draft --> Draft: edit/save (policy:author) - Draft --> Submitted: submit(reviewers) (policy:author) - Submitted --> Draft: requestChanges (policy:review) - Submitted --> Approved: approve (policy:approve) - Approved --> Active: activate/run (policy:operate) - Active --> Archived: archive (policy:operate) - Approved --> Archived: superseded/explicit archive - Archived --> [*] -``` - ---- - -## 2 · Roles & Authority Scopes - -| Role (suggested) | Required scopes | Responsibilities | -|------------------|-----------------|------------------| -| **Policy Author** | `policy:author`, `policy:simulate`, `findings:read` | Draft DSL, run local/CI simulations, submit for review. | -| **Policy Reviewer** | `policy:review`, `policy:simulate`, `findings:read` | Comment on submissions, demand additional simulations, request changes. | -| **Policy Approver** | `policy:approve`, `policy:audit`, `findings:read` | Grant final approval, ensure sign-off evidence captured. | -| **Policy Operator** | `policy:operate`, `policy:run`, `policy:activate`, `findings:read` | Trigger full/incremental runs, monitor results, roll back to previous version. | -| **Policy Auditor** | `policy:audit`, `findings:read` | Review past versions, verify attestations, respond to compliance requests. | -| **Policy Engine Service** | `effective:write`, `findings:read` | Materialise effective findings during runs; no approval capabilities. | - -> Scopes are issued by Authority (`AUTH-POLICY-20-001`). Tenants may map organisational roles (e.g., `secops.approver`) to these scopes via issuer policy. - ---- - -## 3 · Lifecycle Stages in Detail - -### 3.1 Draft - -- **Who:** Authors (`policy:author`). -- **Tools:** Console editor, `stella policy edit`, policy DSL files. -- **Actions:** - - Author DSL leveraging [stella-dsl@1](dsl.md). - - Run `stella policy lint` and `stella policy simulate --sbom ` locally. - - Attach rationale metadata (`metadata.description`, tags). -- **Artefacts:** - - `policies` document with `status=draft`, `version=n`, `provenance.created_by`. - - Local IR cache (`.stella.ir.json`) generated by CLI compile. -- **Guards:** - - Draft versions never run in production. - - CI must lint drafts before allowing submission PRs (see `DEVOPS-POLICY-20-001`). - -### 3.2 Submission - -- **Who:** Authors (`policy:author`). -- **Tools:** Console “Submit for review” button, `stella policy submit --reviewers ...`. -- **Actions:** - - Provide review notes and required simulations (CLI uploads attachments). - - Choose reviewer groups; Authority records them in submission metadata. -- **Artefacts:** - - Policy document transitions to `status=submitted`, capturing `submitted_by`, `submitted_at`, reviewer list, simulation digest references. - - Audit event `policy.submitted` (Authority timeline / Notifier integration). -- **Guards:** - - Submission blocked unless latest lint + compile succeed (<24 h freshness). - - Must reference at least one simulation artefact (CLI enforces via `--attach`). - -### 3.3 Review (Submitted) - -- **Who:** Reviewers (`policy:review`), optionally authors responding. -- **Tools:** Console review pane (line comments, overall verdict), `stella policy review`. -- **Actions:** - - Inspect DSL diff vs previous approved version. - - Run additional `simulate` jobs (UI button or CLI). - - Request changes → policy returns to `draft` with comment log. -- **Artefacts:** - - Comments stored in `policy_reviews` collection with timestamps, resolved flag. - - Additional simulation run records appended to submission metadata. -- **Guards:** - - Approval cannot proceed until all blocking comments resolved. - - Required reviewers (Authority rule) must vote before approver sees “Approve” button. - -### 3.4 Approval - -- **Who:** Approvers (`policy:approve`). -- **Tools:** Console “Approve”, CLI `stella policy approve --version n --note "rationale"`. -- **Actions:** - - Confirm compliance checks (see §6) all green. - - Provide approval note (mandatory string captured in audit trail). -- **Artefacts:** - - Policy `status=approved`, `approved_by`, `approved_at`, `approval_note`. - - Audit event `policy.approved` plus optional Notifier broadcast. - - Immutable approval record stored in `policy_history`. -- **Guards:** - - Approver cannot be same identity as author (enforced by Authority config). - - Approver must attest to successful simulation diff review (`--attach diff.json`). - -### 3.5 Activation & Runs - -- **Who:** Operators (`policy:operate`, `policy:run`, `policy:activate`). -- **Tools:** Console “Promote to active”, CLI `stella policy activate --version n`, `stella policy run`. -- **Actions:** - - Mark approved version as tenant’s active policy. - - Trigger full run or rely on orchestrator for incremental runs. - - Monitor results via Console dashboards or CLI run logs. -- **Artefacts:** - - `policy_runs` entries with `mode=full|incremental`, `policy_version=n`. - - Effective findings collections updated; explain traces stored. - - Activation event `policy.activated` with `runId`. -- **Guards:** - - Activation blocked if previous full run <24 h old failed or is pending. - - Selection of SBOM/advisory snapshots uses consistent cursors recorded for reproducibility. - -### 3.6 Archival / Rollback - -- **Who:** Approvers or Operators with `policy:archive`. -- **Tools:** Console menu, CLI `stella policy archive --version n --reason`. -- **Actions:** - - Retire policies superseded by newer versions or revert to older approved version (`stella policy activate --version n-1`). - - Export archived version for audit bundles (Offline Kit integration). -- **Artefacts:** - - Policy `status=archived`, `archived_by`, `archived_at`, reason. - - Audit event `policy.archived`. - - Exported DSSE-signed policy pack stored if requested. -- **Guards:** - - Archival cannot proceed while runs using that version are in-flight. - - Rollback requires documented incident reference. - ---- - -## 4 · Tooling Touchpoints - -| Stage | Console | CLI | API | -|-------|---------|-----|-----| -| Draft | Inline linting, simulation panel | `stella policy lint`, `edit`, `simulate` | `POST /policies`, `PUT /policies/{id}/versions/{v}` | -| Submit | Submit modal (attach simulations) | `stella policy submit` | `POST /policies/{id}/submit` | -| Review | Comment threads, diff viewer | `stella policy review --approve/--request-changes` | `POST /policies/{id}/reviews` | -| Approve | Approve dialog | `stella policy approve` | `POST /policies/{id}/approve` | -| Activate | Promote button, run scheduler | `stella policy activate`, `run`, `simulate` | `POST /policies/{id}/run`, `POST /policies/{id}/activate` | -| Archive | Archive / rollback menu | `stella policy archive` | `POST /policies/{id}/archive` | - -All CLI commands emit structured JSON by default; use `--format table` for human review. - ---- - -## 5 · Audit & Observability - -- **Storage:** - - `policies` retains all versions with provenance metadata. - - `policy_reviews` stores reviewer comments, timestamps, attachments. - - `policy_history` summarises transitions (state, actor, note, diff digest). - - `policy_runs` retains input cursors and determinism hash per run. -- **Events:** - - `policy.submitted`, `policy.review.requested`, `policy.approved`, `policy.activated`, `policy.archived`, `policy.rollback`. - - Routed to Notifier + Timeline Indexer; offline deployments log to local event store. -- **Logs & metrics:** - - Policy Engine logs include `policyId`, `policyVersion`, `runId`, `approvalNote`. - - Observability dashboards (see forthcoming `/docs/observability/policy.md`) highlight pending approvals, run SLA, VEX overrides. -- **Reproducibility:** - - Each state transition stores IR checksum and simulation diff digests, enabling offline audit replay. - ---- - -## 6 · Compliance Gates - -| Gate | Stage | Enforced by | Requirement | -|------|-------|-------------|-------------| -| **DSL lint** | Draft → Submit | CLI/CI | `stella policy lint` successful within 24 h. | -| **Simulation evidence** | Submit | CLI/Console | Attach diff from `stella policy simulate` covering baseline SBOM set. | -| **Reviewer quorum** | Submit → Approve | Authority | Minimum approver/reviewer count configurable per tenant. | -| **Determinism CI** | Approve | DevOps job | Twin run diff passes (`DEVOPS-POLICY-20-003`). | -| **Activation health** | Approve → Activate | Policy Engine | Last run status succeeded; orchestrator queue healthy. | -| **Export validation** | Archive | Offline Kit | DSSE-signed policy pack generated for long-term retention. | - -Failure of any gate emits a `policy.lifecycle.violation` event and blocks transition until resolved. - ---- - -## 7 · Offline / Air-Gap Considerations - -- Offline Kit bundles include: - - Approved policy packs (`.policy.bundle` + DSSE signatures). - - Submission/approval audit logs. - - Simulation diff JSON for reproducibility. -- Air-gapped sites operate with the same lifecycle: - - Approvals happen locally; Authority runs in enclave. - - Rollout requires manual import of policy packs from connected environment via signed bundles. - - `stella policy simulate --sealed` ensures no outbound calls; required before approval in sealed mode. - ---- - -## 8 · Incident Response & Rollback - -- Incident mode (triggered via `policy incident activate`) forces: - - Immediate incremental run to evaluate mitigation policies. - - Expanded trace retention for affected runs. - - Automatic snapshot of currently active policies for evidence locker. -- Rollback path: - 1. `stella policy activate --version ` with incident note. - 2. Orchestrator schedules full run to ensure findings align. - 3. Archive problematic version with reason referencing incident ticket. -- Post-incident review must confirm new version passes gates before re-activation. - ---- - -## 9 · CI/CD Integration (Reference) - -- **Pre-merge:** run lint + simulation jobs against golden SBOM fixtures. -- **Post-merge (main):** compile, compute IR checksum, stage for Offline Kit. -- **Nightly:** determinism replay, `policy simulate` diff drift alerts, backlog of pending approvals. -- **Notifications:** Slack/Email via Notifier when submissions await review > SLA or approvals succeed. - ---- - -## 10 · Compliance Checklist - -- [ ] **Role mapping validated:** Authority issuer config maps organisational roles to required `policy:*` scopes (per tenant). -- [ ] **Submission evidence attached:** Latest simulation diff and lint artefacts linked to submission. -- [ ] **Reviewer quorum met:** All required reviewers approved or acknowledged; no unresolved blocking comments. -- [ ] **Approval note logged:** Approver justification recorded in audit trail alongside IR checksum. -- [ ] **Activation guard passed:** Latest run status success, orchestrator queue healthy, determinism job green. -- [ ] **Archive bundles produced:** When archiving, DSSE-signed policy pack exported and stored for offline retention. -- [ ] **Offline parity proven:** For sealed deployments, `--sealed` simulations executed and logged before approval. - ---- - -*Last updated: 2025-10-26 (Sprint 20).* +# Policy Lifecycle & Approvals + +> **Audience:** Policy authors, reviewers, security approvers, release engineers. +> **Scope:** End-to-end flow for `stella-dsl@1` policies from draft through archival, including CLI/Console touch-points, Authority scopes, audit artefacts, and offline considerations. + +This guide explains how a policy progresses through Stella Ops, which roles are involved, and the artefacts produced at every step. Pair it with the [Policy Engine Overview](overview.md), [DSL reference](dsl.md), and upcoming run documentation to ensure consistent authoring and rollout. + +--- + +## 1 · Protocol Summary + +- Policies are **immutable versions** attached to a stable `policy_id`. +- Lifecycle states: `draft → submitted → approved → active → archived`. +- Every transition requires explicit Authority scopes and produces structured events + storage artefacts (`policies`, `policy_runs`, audit log collections). +- Simulation and CI gating happen **before** approvals can be granted. +- Activation triggers (runs, bundle exports, CLI `promote`) operate on the **latest approved** version per tenant. + +```mermaid +stateDiagram-v2 + [*] --> Draft + Draft --> Draft: edit/save (policy:author) + Draft --> Submitted: submit(reviewers) (policy:author) + Submitted --> Draft: requestChanges (policy:review) + Submitted --> Approved: approve (policy:approve) + Approved --> Active: activate/run (policy:operate) + Active --> Archived: archive (policy:operate) + Approved --> Archived: superseded/explicit archive + Archived --> [*] +``` + +--- + +## 2 · Roles & Authority Scopes + +| Role (suggested) | Required scopes | Responsibilities | +|------------------|-----------------|------------------| +| **Policy Author** | `policy:author`, `policy:simulate`, `findings:read` | Draft DSL, run local/CI simulations, submit for review. | +| **Policy Reviewer** | `policy:review`, `policy:simulate`, `findings:read` | Comment on submissions, demand additional simulations, request changes. | +| **Policy Approver** | `policy:approve`, `policy:audit`, `findings:read` | Grant final approval, ensure sign-off evidence captured. | +| **Policy Operator** | `policy:operate`, `policy:run`, `policy:activate`, `findings:read` | Trigger full/incremental runs, monitor results, roll back to previous version. | +| **Policy Auditor** | `policy:audit`, `findings:read` | Review past versions, verify attestations, respond to compliance requests. | +| **Policy Engine Service** | `effective:write`, `findings:read` | Materialise effective findings during runs; no approval capabilities. | + +> Scopes are issued by Authority (`AUTH-POLICY-20-001`). Tenants may map organisational roles (e.g., `secops.approver`) to these scopes via issuer policy. + +--- + +## 3 · Lifecycle Stages in Detail + +### 3.1 Draft + +- **Who:** Authors (`policy:author`). +- **Tools:** Console editor, `stella policy edit`, policy DSL files. +- **Actions:** + - Author DSL leveraging [stella-dsl@1](dsl.md). + - Run `stella policy lint` and `stella policy simulate --sbom ` locally. + - Attach rationale metadata (`metadata.description`, tags). +- **Artefacts:** + - `policies` document with `status=draft`, `version=n`, `provenance.created_by`. + - Local IR cache (`.stella.ir.json`) generated by CLI compile. +- **Guards:** + - Draft versions never run in production. + - CI must lint drafts before allowing submission PRs (see `DEVOPS-POLICY-20-001`). + +### 3.2 Submission + +- **Who:** Authors (`policy:author`). +- **Tools:** Console “Submit for review” button, `stella policy submit --reviewers ...`. +- **Actions:** + - Provide review notes and required simulations (CLI uploads attachments). + - Choose reviewer groups; Authority records them in submission metadata. +- **Artefacts:** + - Policy document transitions to `status=submitted`, capturing `submitted_by`, `submitted_at`, reviewer list, simulation digest references. + - Audit event `policy.submitted` (Authority timeline / Notifier integration). +- **Guards:** + - Submission blocked unless latest lint + compile succeed (<24 h freshness). + - Must reference at least one simulation artefact (CLI enforces via `--attach`). + +### 3.3 Review (Submitted) + +- **Who:** Reviewers (`policy:review`), optionally authors responding. +- **Tools:** Console review pane (line comments, overall verdict), `stella policy review`. +- **Actions:** + - Inspect DSL diff vs previous approved version. + - Run additional `simulate` jobs (UI button or CLI). + - Request changes → policy returns to `draft` with comment log. +- **Artefacts:** + - Comments stored in `policy_reviews` collection with timestamps, resolved flag. + - Additional simulation run records appended to submission metadata. +- **Guards:** + - Approval cannot proceed until all blocking comments resolved. + - Required reviewers (Authority rule) must vote before approver sees “Approve” button. + +### 3.4 Approval + +- **Who:** Approvers (`policy:approve`). +- **Tools:** Console “Approve”, CLI `stella policy approve --version n --note "rationale"`. +- **Actions:** + - Confirm compliance checks (see §6) all green. + - Provide approval note (mandatory string captured in audit trail). +- **Artefacts:** + - Policy `status=approved`, `approved_by`, `approved_at`, `approval_note`. + - Audit event `policy.approved` plus optional Notifier broadcast. + - Immutable approval record stored in `policy_history`. +- **Guards:** + - Approver cannot be same identity as author (enforced by Authority config). + - Approver must attest to successful simulation diff review (`--attach diff.json`). + +### 3.5 Activation & Runs + +- **Who:** Operators (`policy:operate`, `policy:run`, `policy:activate`). +- **Tools:** Console “Promote to active”, CLI `stella policy activate --version n`, `stella policy run`. +- **Actions:** + - Mark approved version as tenant’s active policy. + - Trigger full run or rely on orchestrator for incremental runs. + - Monitor results via Console dashboards or CLI run logs. +- **Artefacts:** + - `policy_runs` entries with `mode=full|incremental`, `policy_version=n`. + - Effective findings collections updated; explain traces stored. + - Activation event `policy.activated` with `runId`. +- **Guards:** + - Activation blocked if previous full run <24 h old failed or is pending. + - Selection of SBOM/advisory snapshots uses consistent cursors recorded for reproducibility. + +### 3.6 Archival / Rollback + +- **Who:** Approvers or Operators with `policy:archive`. +- **Tools:** Console menu, CLI `stella policy archive --version n --reason`. +- **Actions:** + - Retire policies superseded by newer versions or revert to older approved version (`stella policy activate --version n-1`). + - Export archived version for audit bundles (Offline Kit integration). +- **Artefacts:** + - Policy `status=archived`, `archived_by`, `archived_at`, reason. + - Audit event `policy.archived`. + - Exported DSSE-signed policy pack stored if requested. +- **Guards:** + - Archival cannot proceed while runs using that version are in-flight. + - Rollback requires documented incident reference. + +--- + +## 4 · Tooling Touchpoints + +| Stage | Console | CLI | API | +|-------|---------|-----|-----| +| Draft | Inline linting, simulation panel | `stella policy lint`, `edit`, `simulate` | `POST /policies`, `PUT /policies/{id}/versions/{v}` | +| Submit | Submit modal (attach simulations) | `stella policy submit` | `POST /policies/{id}/submit` | +| Review | Comment threads, diff viewer | `stella policy review --approve/--request-changes` | `POST /policies/{id}/reviews` | +| Approve | Approve dialog | `stella policy approve` | `POST /policies/{id}/approve` | +| Activate | Promote button, run scheduler | `stella policy activate`, `run`, `simulate` | `POST /policies/{id}/run`, `POST /policies/{id}/activate` | +| Archive | Archive / rollback menu | `stella policy archive` | `POST /policies/{id}/archive` | + +All CLI commands emit structured JSON by default; use `--format table` for human review. + +--- + +## 5 · Audit & Observability + +- **Storage:** + - `policies` retains all versions with provenance metadata. + - `policy_reviews` stores reviewer comments, timestamps, attachments. + - `policy_history` summarises transitions (state, actor, note, diff digest). + - `policy_runs` retains input cursors and determinism hash per run. +- **Events:** + - `policy.submitted`, `policy.review.requested`, `policy.approved`, `policy.activated`, `policy.archived`, `policy.rollback`. + - Routed to Notifier + Timeline Indexer; offline deployments log to local event store. +- **Logs & metrics:** + - Policy Engine logs include `policyId`, `policyVersion`, `runId`, `approvalNote`. + - Observability dashboards (see forthcoming `/docs/observability/policy.md`) highlight pending approvals, run SLA, VEX overrides. +- **Reproducibility:** + - Each state transition stores IR checksum and simulation diff digests, enabling offline audit replay. + +--- + +## 6 · Compliance Gates + +| Gate | Stage | Enforced by | Requirement | +|------|-------|-------------|-------------| +| **DSL lint** | Draft → Submit | CLI/CI | `stella policy lint` successful within 24 h. | +| **Simulation evidence** | Submit | CLI/Console | Attach diff from `stella policy simulate` covering baseline SBOM set. | +| **Reviewer quorum** | Submit → Approve | Authority | Minimum approver/reviewer count configurable per tenant. | +| **Determinism CI** | Approve | DevOps job | Twin run diff passes (`DEVOPS-POLICY-20-003`). | +| **Activation health** | Approve → Activate | Policy Engine | Last run status succeeded; orchestrator queue healthy. | +| **Export validation** | Archive | Offline Kit | DSSE-signed policy pack generated for long-term retention. | + +Failure of any gate emits a `policy.lifecycle.violation` event and blocks transition until resolved. + +--- + +## 7 · Offline / Air-Gap Considerations + +- Offline Kit bundles include: + - Approved policy packs (`.policy.bundle` + DSSE signatures). + - Submission/approval audit logs. + - Simulation diff JSON for reproducibility. +- Air-gapped sites operate with the same lifecycle: + - Approvals happen locally; Authority runs in enclave. + - Rollout requires manual import of policy packs from connected environment via signed bundles. + - `stella policy simulate --sealed` ensures no outbound calls; required before approval in sealed mode. + +--- + +## 8 · Incident Response & Rollback + +- Incident mode (triggered via `policy incident activate`) forces: + - Immediate incremental run to evaluate mitigation policies. + - Expanded trace retention for affected runs. + - Automatic snapshot of currently active policies for evidence locker. +- Rollback path: + 1. `stella policy activate --version ` with incident note. + 2. Orchestrator schedules full run to ensure findings align. + 3. Archive problematic version with reason referencing incident ticket. +- Post-incident review must confirm new version passes gates before re-activation. + +--- + +## 9 · CI/CD Integration (Reference) + +- **Pre-merge:** run lint + simulation jobs against golden SBOM fixtures. +- **Post-merge (main):** compile, compute IR checksum, stage for Offline Kit. +- **Nightly:** determinism replay, `policy simulate` diff drift alerts, backlog of pending approvals. +- **Notifications:** Slack/Email via Notifier when submissions await review > SLA or approvals succeed. + +--- + +## 10 · Compliance Checklist + +- [ ] **Role mapping validated:** Authority issuer config maps organisational roles to required `policy:*` scopes (per tenant). +- [ ] **Submission evidence attached:** Latest simulation diff and lint artefacts linked to submission. +- [ ] **Reviewer quorum met:** All required reviewers approved or acknowledged; no unresolved blocking comments. +- [ ] **Approval note logged:** Approver justification recorded in audit trail alongside IR checksum. +- [ ] **Activation guard passed:** Latest run status success, orchestrator queue healthy, determinism job green. +- [ ] **Archive bundles produced:** When archiving, DSSE-signed policy pack exported and stored for offline retention. +- [ ] **Offline parity proven:** For sealed deployments, `--sealed` simulations executed and logged before approval. + +--- + +*Last updated: 2025-10-26 (Sprint 20).* diff --git a/docs/policy/overview.md b/docs/policy/overview.md index d94384b8..7790c29b 100644 --- a/docs/policy/overview.md +++ b/docs/policy/overview.md @@ -1,173 +1,173 @@ -# Policy Engine Overview - -> **Goal:** Evaluate organisation policies deterministically against scanner SBOMs, Concelier advisories, and Excititor VEX evidence, then publish effective findings that downstream services can trust. - -This document introduces the v2 Policy Engine: how the service fits into Stella Ops, the artefacts it produces, the contracts it honours, and the guardrails that keep policy decisions reproducible across air-gapped and connected deployments. - ---- - -## 1 · Role in the Platform - -- **Purpose:** Compose policy verdicts by reconciling SBOM inventory, advisory metadata, VEX statements, and organisation rules. -- **Form factor:** Dedicated `.NET 10` Minimal API host (`StellaOps.Policy.Engine`) plus worker orchestration. Policies are defined in `stella-dsl@1` packs compiled to an intermediate representation (IR) with a stable SHA-256 digest. -- **Tenancy:** All workloads run under Authority-enforced scopes (`policy:*`, `findings:read`, `effective:write`). Only the Policy Engine identity may materialise effective findings collections. -- **Consumption:** Findings ledger, Console, CLI, and Notify read the published `effective_finding_{policyId}` materialisations and policy run ledger (`policy_runs`). -- **Offline parity:** Bundled policies import/export alongside advisories and VEX. In sealed mode the engine degrades gracefully, annotating explanations whenever cached signals replace live lookups. - ---- - -## 2 · High-Level Architecture - -```mermaid -flowchart LR - subgraph Inputs - A[Scanner SBOMs
Inventory & Usage] - B[Concelier Advisories
Canonical linksets] - C[Excititor VEX
Consensus status] - D[Policy Packs
stella-dsl@1] - end - subgraph PolicyEngine["StellaOps.Policy.Engine"] - P1[DSL Compiler
IR + Digest] - P2[Joiners
SBOM ↔ Advisory ↔ VEX] - P3[Deterministic Evaluator
Rule hits + scoring] - P4[Materialisers
effective findings] - P5[Run Orchestrator
Full & incremental] - end - subgraph Outputs - O1[Effective Findings Collections] - O2[Explain Traces
Rule hit lineage] - O3[Metrics & Traces
policy_run_seconds,
rules_fired_total] - O4[Simulation/Preview Feeds
CLI & Studio] - end - - A --> P2 - B --> P2 - C --> P2 - D --> P1 --> P3 - P2 --> P3 --> P4 --> O1 - P3 --> O2 - P5 --> P3 - P3 --> O3 - P3 --> O4 -``` - ---- - -## 3 · Core Concepts - -| Concept | Description | -|---------|-------------| -| **Policy Pack** | Versioned bundle of DSL documents, metadata, and checksum manifest. Packs import/export via CLI and Offline Kit bundles. | -| **Policy Digest** | SHA-256 of the canonical IR; used for caching, explain trace attribution, and audit proofs. | -| **Effective Findings** | Append-only Mongo collections (`effective_finding_{policyId}`) storing the latest verdict per finding, plus history sidecars. | -| **Policy Run** | Execution record persisted in `policy_runs` capturing inputs, run mode, timings, and determinism hash. | -| **Explain Trace** | Structured tree showing rule matches, data provenance, and scoring components for UI/CLI explain features. | -| **Simulation** | Dry-run evaluation that compares a candidate pack against the active pack and produces verdict diffs without persisting results. | -| **Incident Mode** | Elevated sampling/trace capture toggled automatically when SLOs breach; emits events for Notifier and Timeline Indexer. | - ---- - -## 4 · Inputs & Pre-processing - -### 4.1 SBOM Inventory - -- **Source:** Scanner.WebService publishes inventory/usage SBOMs plus BOM-Index (roaring bitmap) metadata. -- **Consumption:** Policy joiners use the index to expand candidate components quickly, keeping evaluation under the `< 5 s` warm path budget. -- **Schema:** CycloneDX Protobuf + JSON views; Policy Engine reads canonical projections via shared SBOM adapters. - -### 4.2 Advisory Corpus - -- **Source:** Concelier exports canonical advisories with deterministic identifiers, linksets, and equivalence tables. -- **Contract:** Policy Engine only consumes raw `content.raw`, `identifiers`, and `linkset` fields per Aggregation-Only Contract (AOC); derived precedence remains a policy concern. - -### 4.3 VEX Evidence - -- **Source:** Excititor consensus service resolves OpenVEX / CSAF statements, preserving conflicts. -- **Usage:** Policy rules can require specific VEX vendors or justification codes; evaluator records when cached evidence substitutes for live statements (sealed mode). - -### 4.4 Policy Packs - -- Authored in Policy Studio or CLI, validated against the `stella-dsl@1` schema. -- Compiler performs canonicalisation (ordering, defaulting) before emitting IR and digest. -- Packs bundle scoring profiles, allowlist metadata, and optional reachability weighting tables. - ---- - -## 5 · Evaluation Flow - -1. **Run selection** – Orchestrator accepts `full`, `incremental`, or `simulate` jobs. Incremental runs listen to change streams from Concelier, Excititor, and SBOM imports to scope re-evaluation. -2. **Input staging** – Candidates fetched in deterministic batches; identity graph from Concelier strengthens PURL lookups. -3. **Rule execution** – Evaluator walks rules in lexical order (first-match wins). Actions available: `block`, `ignore`, `warn`, `defer`, `escalate`, `requireVex`, each supporting quieting semantics where permitted. -4. **Scoring** – `PolicyScoringConfig` applies severity, trust, reachability weights plus penalties (`warnPenalty`, `ignorePenalty`, `quietPenalty`). -5. **Verdict and explain** – Engine constructs `PolicyVerdict` records with inputs, quiet flags, unknown confidence bands, and provenance markers; explain trees capture rule lineage. -6. **Materialisation** – Effective findings collections are upserted append-only, stamped with run identifier, policy digest, and tenant. -7. **Publishing** – Completed run writes to `policy_runs`, emits metrics (`policy_run_seconds`, `rules_fired_total`, `vex_overrides_total`), and raises events for Console/Notify subscribers. - ---- - -## 6 · Run Modes - -| Mode | Trigger | Scope | Persistence | Typical Use | -|------|---------|-------|-------------|-------------| -| **Full** | Manual CLI (`stella policy run`), scheduled nightly, or emergency rebaseline | Entire tenant | Writes effective findings and run record | After policy publish or major advisory/VEX import | -| **Incremental** | Change-stream queue driven by Concelier/Excititor/SBOM deltas | Only affected artefacts | Writes effective findings and run record | Continuous upkeep; ensures SLA ≤ 5 min from source change | -| **Simulate** | CLI/Studio preview, CI pipelines | Candidate subset (diff against baseline) | No materialisation; produces explain & diff payloads | Policy authoring, CI regression suites | - -All modes are cancellation-aware and checkpoint progress for replay in case of deployment restarts. - ---- - -## 7 · Outputs & Integrations - -- **APIs** – Minimal API exposes policy CRUD, run orchestration, explain fetches, and cursor-based listing of effective findings (see `/docs/api/policy.md` once published). -- **CLI** – `stella policy simulate/run/show` commands surface JSON verdicts, exit codes, and diff summaries suitable for CI gating. -- **Console / Policy Studio** – UI reads explain traces, policy metadata, approval workflow status, and simulation diffs to guide reviewers. -- **Findings Ledger** – Effective findings feed downstream export, Notify, and risk scoring jobs. -- **Air-gap bundles** – Offline Kit includes policy packs, scoring configs, and explain indexes; export commands generate DSSE-signed bundles for transfer. - ---- - -## 8 · Determinism & Guardrails - -- **Deterministic inputs** – All joins rely on canonical linksets and equivalence tables; batches are sorted, and random/wall-clock APIs are blocked by static analysis plus runtime guards (`ERR_POL_004`). -- **Stable outputs** – Canonical JSON serializers sort keys; digests recorded in run metadata enable reproducible diffs across machines. -- **Idempotent writes** – Materialisers upsert using `{policyId, findingId, tenant}` keys and retain prior versions with append-only history. -- **Sandboxing** – Policy evaluation executes in-process with timeouts; restart-only plug-ins guarantee no runtime DLL injection. -- **Compliance proof** – Every run stores digest of inputs (policy, SBOM batch, advisory snapshot) so auditors can replay decisions offline. - ---- - -## 9 · Security, Tenancy & Offline Notes - -- **Authority scopes:** Gateway enforces `policy:read`, `policy:write`, `policy:simulate`, `policy:runs`, `findings:read`, `effective:write`. Service identities must present DPoP-bound tokens. -- **Tenant isolation:** Collections partition by tenant identifier; cross-tenant queries require explicit admin scopes and return audit warnings. -- **Sealed mode:** In air-gapped deployments the engine surfaces `sealed=true` hints in explain traces, warning about cached EPSS/KEV data and suggesting bundle refreshes (see `docs/airgap/EPIC_16_AIRGAP_MODE.md` §3.7). -- **Observability:** Structured logs carry correlation IDs matching orchestrator job IDs; metrics integrate with OpenTelemetry exporters; sampled rule-hit logs redact policy secrets. -- **Incident response:** Incident mode can be forced via API, boosting trace retention and notifying Notifier through `policy.incident.activated` events. - ---- - -## 10 · Working with Policy Packs - -1. **Author** in Policy Studio or edit DSL files locally. Validate with `stella policy lint`. -2. **Simulate** against golden SBOM fixtures (`stella policy simulate --sbom fixtures/*.json`). Inspect explain traces for unexpected overrides. -3. **Publish** via API or CLI; Authority enforces review/approval workflows (`draft → review → approve → rollout`). -4. **Monitor** the subsequent incremental runs; if determinism diff fails in CI, roll back pack while investigating digests. -5. **Bundle** packs for offline sites with `stella policy bundle export` and distribute via Offline Kit. - ---- - -## 11 · Compliance Checklist - -- [ ] **Scopes enforced:** Confirm gateway policy requires `policy:*` and `effective:write` scopes for all mutating endpoints. -- [ ] **Determinism guard active:** Static analyzer blocks clock/RNG usage; CI determinism job diffing repeated runs passes. -- [ ] **Materialisation audit:** Effective findings collections use append-only writers and retain history per policy run. -- [ ] **Explain availability:** UI/CLI expose explain traces for every verdict; sealed-mode warnings display when cached evidence is used. -- [ ] **Offline parity:** Policy bundles (import/export) tested in sealed environment; air-gap degradations documented for operators. -- [ ] **Observability wired:** Metrics (`policy_run_seconds`, `rules_fired_total`, `vex_overrides_total`) and sampled rule hit logs emit to the shared telemetry pipeline with correlation IDs. -- [ ] **Documentation synced:** API (`/docs/api/policy.md`), DSL grammar (`/docs/policy/dsl.md`), lifecycle (`/docs/policy/lifecycle.md`), and run modes (`/docs/policy/runs.md`) cross-link back to this overview. - ---- - -*Last updated: 2025-10-26 (Sprint 20).* - +# Policy Engine Overview + +> **Goal:** Evaluate organisation policies deterministically against scanner SBOMs, Concelier advisories, and Excititor VEX evidence, then publish effective findings that downstream services can trust. + +This document introduces the v2 Policy Engine: how the service fits into Stella Ops, the artefacts it produces, the contracts it honours, and the guardrails that keep policy decisions reproducible across air-gapped and connected deployments. + +--- + +## 1 · Role in the Platform + +- **Purpose:** Compose policy verdicts by reconciling SBOM inventory, advisory metadata, VEX statements, and organisation rules. +- **Form factor:** Dedicated `.NET 10` Minimal API host (`StellaOps.Policy.Engine`) plus worker orchestration. Policies are defined in `stella-dsl@1` packs compiled to an intermediate representation (IR) with a stable SHA-256 digest. +- **Tenancy:** All workloads run under Authority-enforced scopes (`policy:*`, `findings:read`, `effective:write`). Only the Policy Engine identity may materialise effective findings collections. +- **Consumption:** Findings ledger, Console, CLI, and Notify read the published `effective_finding_{policyId}` materialisations and policy run ledger (`policy_runs`). +- **Offline parity:** Bundled policies import/export alongside advisories and VEX. In sealed mode the engine degrades gracefully, annotating explanations whenever cached signals replace live lookups. + +--- + +## 2 · High-Level Architecture + +```mermaid +flowchart LR + subgraph Inputs + A[Scanner SBOMs
Inventory & Usage] + B[Concelier Advisories
Canonical linksets] + C[Excititor VEX
Consensus status] + D[Policy Packs
stella-dsl@1] + end + subgraph PolicyEngine["StellaOps.Policy.Engine"] + P1[DSL Compiler
IR + Digest] + P2[Joiners
SBOM ↔ Advisory ↔ VEX] + P3[Deterministic Evaluator
Rule hits + scoring] + P4[Materialisers
effective findings] + P5[Run Orchestrator
Full & incremental] + end + subgraph Outputs + O1[Effective Findings Collections] + O2[Explain Traces
Rule hit lineage] + O3[Metrics & Traces
policy_run_seconds,
rules_fired_total] + O4[Simulation/Preview Feeds
CLI & Studio] + end + + A --> P2 + B --> P2 + C --> P2 + D --> P1 --> P3 + P2 --> P3 --> P4 --> O1 + P3 --> O2 + P5 --> P3 + P3 --> O3 + P3 --> O4 +``` + +--- + +## 3 · Core Concepts + +| Concept | Description | +|---------|-------------| +| **Policy Pack** | Versioned bundle of DSL documents, metadata, and checksum manifest. Packs import/export via CLI and Offline Kit bundles. | +| **Policy Digest** | SHA-256 of the canonical IR; used for caching, explain trace attribution, and audit proofs. | +| **Effective Findings** | Append-only Mongo collections (`effective_finding_{policyId}`) storing the latest verdict per finding, plus history sidecars. | +| **Policy Run** | Execution record persisted in `policy_runs` capturing inputs, run mode, timings, and determinism hash. | +| **Explain Trace** | Structured tree showing rule matches, data provenance, and scoring components for UI/CLI explain features. | +| **Simulation** | Dry-run evaluation that compares a candidate pack against the active pack and produces verdict diffs without persisting results. | +| **Incident Mode** | Elevated sampling/trace capture toggled automatically when SLOs breach; emits events for Notifier and Timeline Indexer. | + +--- + +## 4 · Inputs & Pre-processing + +### 4.1 SBOM Inventory + +- **Source:** Scanner.WebService publishes inventory/usage SBOMs plus BOM-Index (roaring bitmap) metadata. +- **Consumption:** Policy joiners use the index to expand candidate components quickly, keeping evaluation under the `< 5 s` warm path budget. +- **Schema:** CycloneDX Protobuf + JSON views; Policy Engine reads canonical projections via shared SBOM adapters. + +### 4.2 Advisory Corpus + +- **Source:** Concelier exports canonical advisories with deterministic identifiers, linksets, and equivalence tables. +- **Contract:** Policy Engine only consumes raw `content.raw`, `identifiers`, and `linkset` fields per Aggregation-Only Contract (AOC); derived precedence remains a policy concern. + +### 4.3 VEX Evidence + +- **Source:** Excititor consensus service resolves OpenVEX / CSAF statements, preserving conflicts. +- **Usage:** Policy rules can require specific VEX vendors or justification codes; evaluator records when cached evidence substitutes for live statements (sealed mode). + +### 4.4 Policy Packs + +- Authored in Policy Studio or CLI, validated against the `stella-dsl@1` schema. +- Compiler performs canonicalisation (ordering, defaulting) before emitting IR and digest. +- Packs bundle scoring profiles, allowlist metadata, and optional reachability weighting tables. + +--- + +## 5 · Evaluation Flow + +1. **Run selection** – Orchestrator accepts `full`, `incremental`, or `simulate` jobs. Incremental runs listen to change streams from Concelier, Excititor, and SBOM imports to scope re-evaluation. +2. **Input staging** – Candidates fetched in deterministic batches; identity graph from Concelier strengthens PURL lookups. +3. **Rule execution** – Evaluator walks rules in lexical order (first-match wins). Actions available: `block`, `ignore`, `warn`, `defer`, `escalate`, `requireVex`, each supporting quieting semantics where permitted. +4. **Scoring** – `PolicyScoringConfig` applies severity, trust, reachability weights plus penalties (`warnPenalty`, `ignorePenalty`, `quietPenalty`). +5. **Verdict and explain** – Engine constructs `PolicyVerdict` records with inputs, quiet flags, unknown confidence bands, and provenance markers; explain trees capture rule lineage. +6. **Materialisation** – Effective findings collections are upserted append-only, stamped with run identifier, policy digest, and tenant. +7. **Publishing** – Completed run writes to `policy_runs`, emits metrics (`policy_run_seconds`, `rules_fired_total`, `vex_overrides_total`), and raises events for Console/Notify subscribers. + +--- + +## 6 · Run Modes + +| Mode | Trigger | Scope | Persistence | Typical Use | +|------|---------|-------|-------------|-------------| +| **Full** | Manual CLI (`stella policy run`), scheduled nightly, or emergency rebaseline | Entire tenant | Writes effective findings and run record | After policy publish or major advisory/VEX import | +| **Incremental** | Change-stream queue driven by Concelier/Excititor/SBOM deltas | Only affected artefacts | Writes effective findings and run record | Continuous upkeep; ensures SLA ≤ 5 min from source change | +| **Simulate** | CLI/Studio preview, CI pipelines | Candidate subset (diff against baseline) | No materialisation; produces explain & diff payloads | Policy authoring, CI regression suites | + +All modes are cancellation-aware and checkpoint progress for replay in case of deployment restarts. + +--- + +## 7 · Outputs & Integrations + +- **APIs** – Minimal API exposes policy CRUD, run orchestration, explain fetches, and cursor-based listing of effective findings (see `/docs/api/policy.md` once published). +- **CLI** – `stella policy simulate/run/show` commands surface JSON verdicts, exit codes, and diff summaries suitable for CI gating. +- **Console / Policy Studio** – UI reads explain traces, policy metadata, approval workflow status, and simulation diffs to guide reviewers. +- **Findings Ledger** – Effective findings feed downstream export, Notify, and risk scoring jobs. +- **Air-gap bundles** – Offline Kit includes policy packs, scoring configs, and explain indexes; export commands generate DSSE-signed bundles for transfer. + +--- + +## 8 · Determinism & Guardrails + +- **Deterministic inputs** – All joins rely on canonical linksets and equivalence tables; batches are sorted, and random/wall-clock APIs are blocked by static analysis plus runtime guards (`ERR_POL_004`). +- **Stable outputs** – Canonical JSON serializers sort keys; digests recorded in run metadata enable reproducible diffs across machines. +- **Idempotent writes** – Materialisers upsert using `{policyId, findingId, tenant}` keys and retain prior versions with append-only history. +- **Sandboxing** – Policy evaluation executes in-process with timeouts; restart-only plug-ins guarantee no runtime DLL injection. +- **Compliance proof** – Every run stores digest of inputs (policy, SBOM batch, advisory snapshot) so auditors can replay decisions offline. + +--- + +## 9 · Security, Tenancy & Offline Notes + +- **Authority scopes:** Gateway enforces `policy:read`, `policy:write`, `policy:simulate`, `policy:runs`, `findings:read`, `effective:write`. Service identities must present DPoP-bound tokens. +- **Tenant isolation:** Collections partition by tenant identifier; cross-tenant queries require explicit admin scopes and return audit warnings. +- **Sealed mode:** In air-gapped deployments the engine surfaces `sealed=true` hints in explain traces, warning about cached EPSS/KEV data and suggesting bundle refreshes (see `docs/airgap/EPIC_16_AIRGAP_MODE.md` §3.7). +- **Observability:** Structured logs carry correlation IDs matching orchestrator job IDs; metrics integrate with OpenTelemetry exporters; sampled rule-hit logs redact policy secrets. +- **Incident response:** Incident mode can be forced via API, boosting trace retention and notifying Notifier through `policy.incident.activated` events. + +--- + +## 10 · Working with Policy Packs + +1. **Author** in Policy Studio or edit DSL files locally. Validate with `stella policy lint`. +2. **Simulate** against golden SBOM fixtures (`stella policy simulate --sbom fixtures/*.json`). Inspect explain traces for unexpected overrides. +3. **Publish** via API or CLI; Authority enforces review/approval workflows (`draft → review → approve → rollout`). +4. **Monitor** the subsequent incremental runs; if determinism diff fails in CI, roll back pack while investigating digests. +5. **Bundle** packs for offline sites with `stella policy bundle export` and distribute via Offline Kit. + +--- + +## 11 · Compliance Checklist + +- [ ] **Scopes enforced:** Confirm gateway policy requires `policy:*` and `effective:write` scopes for all mutating endpoints. +- [ ] **Determinism guard active:** Static analyzer blocks clock/RNG usage; CI determinism job diffing repeated runs passes. +- [ ] **Materialisation audit:** Effective findings collections use append-only writers and retain history per policy run. +- [ ] **Explain availability:** UI/CLI expose explain traces for every verdict; sealed-mode warnings display when cached evidence is used. +- [ ] **Offline parity:** Policy bundles (import/export) tested in sealed environment; air-gap degradations documented for operators. +- [ ] **Observability wired:** Metrics (`policy_run_seconds`, `rules_fired_total`, `vex_overrides_total`) and sampled rule hit logs emit to the shared telemetry pipeline with correlation IDs. +- [ ] **Documentation synced:** API (`/docs/api/policy.md`), DSL grammar (`/docs/policy/dsl.md`), lifecycle (`/docs/policy/lifecycle.md`), and run modes (`/docs/policy/runs.md`) cross-link back to this overview. + +--- + +*Last updated: 2025-10-26 (Sprint 20).* + diff --git a/docs/policy/runs.md b/docs/policy/runs.md index a2d74d26..cef23b01 100644 --- a/docs/policy/runs.md +++ b/docs/policy/runs.md @@ -43,7 +43,7 @@ All modes record their status in `policy_runs` with deterministic metadata: } ``` -> **Schemas & samples:** see `src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md` and the fixtures in `samples/api/scheduler/policy-*.json` for canonical payloads consumed by CLI/UI/worker integrations. +> **Schemas & samples:** see `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md` and the fixtures in `samples/api/scheduler/policy-*.json` for canonical payloads consumed by CLI/UI/worker integrations. --- diff --git a/docs/risk/EPIC_18_RISK_PROFILES.md b/docs/risk/EPIC_18_RISK_PROFILES.md index d894ca01..8ca147c0 100644 --- a/docs/risk/EPIC_18_RISK_PROFILES.md +++ b/docs/risk/EPIC_18_RISK_PROFILES.md @@ -191,13 +191,13 @@ Profiles work offline; providers rely on bundled datasets produced by Export Cen ### 4.1 New modules -* `src/StellaOps.RiskEngine/` -* `src/StellaOps.RiskEngine/providers/` -* `src/StellaOps.Policy.RiskProfile/` +* `src/RiskEngine/StellaOps.RiskEngine/` +* `src/RiskEngine/StellaOps.RiskEngine/providers/` +* `src/Policy/StellaOps.Policy.RiskProfile/` * Database migrations for profiles/results/explanations -* `src/StellaOps.UI` -* `src/StellaOps.Cli` -* `src/StellaOps.ExportCenter.RiskBundles` +* `src/UI/StellaOps.UI` +* `src/Cli/StellaOps.Cli` +* `src/ExportCenter/StellaOps.ExportCenter.RiskBundles` ### 4.2 Data model diff --git a/docs/scanner-core-contracts.md b/docs/scanner-core-contracts.md index 5745b306..19e2b453 100644 --- a/docs/scanner-core-contracts.md +++ b/docs/scanner-core-contracts.md @@ -1,147 +1,147 @@ -# Scanner Core Contracts - -The **Scanner Core** library provides shared contracts, observability helpers, and security utilities consumed by `Scanner.WebService`, `Scanner.Worker`, analyzers, and tooling. These primitives guarantee deterministic identifiers, timestamps, and log context for all scanning flows. - -## Canonical DTOs - -- `ScanJob` & `ScanJobStatus` – canonical job metadata (image reference/digest, tenant, correlation ID, timestamps, failure details). Constructors normalise timestamps to UTC microsecond precision and canonicalise image digests. Round-trips with `JsonSerializerDefaults.Web` using `ScannerJsonOptions`. -- `ScanProgressEvent` & `ScanStage`/`ScanProgressEventKind` – stage-level progress surface for queue/stream consumers. Includes deterministic sequence numbers, optional progress percentage, attributes, and attached `ScannerError`. -- `ScannerError` & `ScannerErrorCode` – shared error taxonomy spanning queue, analyzers, storage, exporters, and signing. Carries severity, retryability, structured details, and microsecond-precision timestamps. -- `ScanJobId` – strongly-typed identifier rendered as `Guid` (lowercase `N` format) with deterministic parsing. - -### Canonical JSON samples - -The golden fixtures consumed by `ScannerCoreContractsTests` document the wire shape shared with downstream services. They live under `src/StellaOps.Scanner.Core.Tests/Fixtures/` and a representative extract is shown below. - -```json -{ - "id": "8f4cc9c582454b9d9b4f5ae049631b7d", - "status": "running", - "imageReference": "registry.example.com/stellaops/scanner:1.2.3", - "imageDigest": "sha256:abcdef", - "createdAt": "2025-10-18T14:30:15.123456+00:00", - "updatedAt": "2025-10-18T14:30:20.123456+00:00", - "correlationId": "scan-analyzeoperatingsystem-8f4cc9c582454b9d9b4f5ae049631b7d", - "tenantId": "tenant-a", - "metadata": { - "requestId": "req-1234", - "source": "ci" - }, - "failure": { - "code": "analyzerFailure", - "severity": "error", - "message": "Analyzer failed to parse layer", - "timestamp": "2025-10-18T14:30:15.123456+00:00", - "retryable": false, - "stage": "AnalyzeOperatingSystem", - "component": "os-analyzer", - "details": { - "layerDigest": "sha256:deadbeef", - "attempt": "1" - } - } -} -``` - -Progress events follow the same conventions (`jobId`, `stage`, `kind`, `timestamp`, `attributes`, optional embedded `ScannerError`). The fixtures are verified via deterministic JSON comparison in every CI run. - -## Deterministic helpers - -- `ScannerIdentifiers` – derives `ScanJobId`, correlation IDs, and SHA-256 hashes from normalised inputs (image reference/digest, tenant, salt). Ensures case-insensitive stability and reproducible metric keys. -- `ScannerTimestamps` – trims to microsecond precision, provides ISO-8601 (`yyyy-MM-ddTHH:mm:ss.ffffffZ`) rendering, and parsing helpers. -- `ScannerJsonOptions` – standard JSON options (web defaults, camel-case enums) shared by services/tests. -- `ScanAnalysisStore` & `ScanAnalysisKeys` – shared in-memory analysis cache flowing through Worker stages. OS analyzers populate - `analysis.os.packages` (raw output), `analysis.os.fragments` (per-analyzer component fragments), and merge into - `analysis.layers.fragments` so emit/diff stages can compose SBOMs and diffs without knowledge of individual analyzer - implementations. - -## Observability primitives - -- `ScannerDiagnostics` – global `ActivitySource`/`Meter` for scanner components. `StartActivity` seeds deterministic tags (`job_id`, `stage`, `component`, `correlation_id`). -- `ScannerMetricNames` – centralises metric prefixes (`stellaops.scanner.*`) and deterministic job/event tag builders. -- `ScannerCorrelationContext` & `ScannerCorrelationContextAccessor` – ambient correlation propagation via `AsyncLocal` for log scopes, metrics, and diagnostics. -- `ScannerLogExtensions` – `ILogger` scopes for jobs/progress events with automatic correlation context push, minimal allocations, and consistent structured fields. - -### Observability overhead validation - -A micro-benchmark executed on 2025-10-19 (4 vCPU runner, .NET 10.0.100-rc.1) measured the average scope cost across 1 000 000 iterations: - -| Scope | Mean (µs/call) | -|-------|----------------| -| `BeginScanScope` (logger attached) | 0.80 | -| `BeginScanScope` (noop logger) | 0.31 | -| `BeginProgressScope` | 0.57 | - -To reproduce, run `dotnet test src/StellaOps.Scanner.Core.Tests -c Release` (see `ScannerLogExtensionsPerformanceTests`) or copy the snippet below into a throwaway `dotnet run` console project and execute it with `dotnet run -c Release`: - -```csharp -using System.Collections.Generic; -using System.Diagnostics; -using Microsoft.Extensions.Logging; -using StellaOps.Scanner.Core.Contracts; -using StellaOps.Scanner.Core.Observability; -using StellaOps.Scanner.Core.Utility; - -var factory = LoggerFactory.Create(builder => builder.AddFilter(static _ => true)); -var logger = factory.CreateLogger("bench"); - -var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", "tenant-a", "benchmark"); -var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, nameof(ScanStage.AnalyzeOperatingSystem)); -var now = ScannerTimestamps.Normalize(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero)); - -var job = new ScanJob(jobId, ScanJobStatus.Running, "registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", now, now, correlationId, "tenant-a", new Dictionary(StringComparer.Ordinal) { ["requestId"] = "req-bench" }); -var progress = new ScanProgressEvent(jobId, ScanStage.AnalyzeOperatingSystem, ScanProgressEventKind.Progress, 42, now, 10.5, "benchmark", new Dictionary(StringComparer.Ordinal) { ["sample"] = "true" }); - -Console.WriteLine("Scanner Core Observability micro-bench (1,000,000 iterations)"); -Report("BeginScanScope (logger)", Measure(static ctx => ctx.Logger.BeginScanScope(ctx.Job, ctx.Stage, ctx.Component), new ScopeContext(logger, job, nameof(ScanStage.AnalyzeOperatingSystem), "os-analyzer"))); -Report("BeginScanScope (no logger)", Measure(static ctx => ScannerLogExtensions.BeginScanScope(null, ctx.Job, ctx.Stage, ctx.Component), new ScopeContext(logger, job, nameof(ScanStage.AnalyzeOperatingSystem), "os-analyzer"))); -Report("BeginProgressScope", Measure(static ctx => ctx.Logger.BeginProgressScope(ctx.Progress!, ctx.Component), new ScopeContext(logger, job, nameof(ScanStage.AnalyzeOperatingSystem), "os-analyzer", progress))); - -static double Measure(Func factory, ScopeContext context) -{ - const int iterations = 1_000_000; - for (var i = 0; i < 10_000; i++) - { - using var scope = factory(context); - } - - GC.Collect(); - GC.WaitForPendingFinalizers(); - GC.Collect(); - - var sw = Stopwatch.StartNew(); - for (var i = 0; i < iterations; i++) - { - using var scope = factory(context); - } - - sw.Stop(); - return sw.Elapsed.TotalSeconds * 1_000_000 / iterations; -} - -static void Report(string label, double microseconds) - => Console.WriteLine($"{label,-28}: {microseconds:F3} µs"); - -readonly record struct ScopeContext(ILogger Logger, ScanJob Job, string? Stage, string? Component, ScanProgressEvent? Progress = null); -``` - -Both guardrails enforce the ≤ 5 µs acceptance target for SP9-G1. - -## Security utilities - -- `AuthorityTokenSource` – caches short-lived OpToks per audience+scope using deterministic keys and refresh skew (default 30 s). Integrates with `StellaOps.Auth.Client`. -- `DpopProofValidator` – validates DPoP proofs (alg allowlist, `htm`/`htu`, nonce, replay window, signature) backed by pluggable `IDpopReplayCache`. Ships with `InMemoryDpopReplayCache` for restart-only deployments. -- `RestartOnlyPluginGuard` – enforces restart-time plug-in registration (deterministic path normalisation; throws if new plug-ins added post-seal). -- `ServiceCollectionExtensions.AddScannerAuthorityCore` – DI helper wiring Authority client, OpTok source, DPoP validation, replay cache, and plug-in guard. - -## Testing guarantees - -Unit tests (`StellaOps.Scanner.Core.Tests`) assert: - -- DTO JSON round-trips are stable and deterministic (`ScannerCoreContractsTests` + golden fixtures). -- Identifier/hash helpers ignore case and emit lowercase hex. -- Timestamp normalisation retains UTC semantics. -- Log scopes push/pop correlation context predictably while staying under the 5 µs envelope. -- Authority token caching honours refresh skew and invalidation. -- DPoP validator accepts valid proofs, rejects nonce mismatch/replay, and enforces signature validation. -- Restart-only plug-in guard blocks runtime additions post-seal. +# Scanner Core Contracts + +The **Scanner Core** library provides shared contracts, observability helpers, and security utilities consumed by `Scanner.WebService`, `Scanner.Worker`, analyzers, and tooling. These primitives guarantee deterministic identifiers, timestamps, and log context for all scanning flows. + +## Canonical DTOs + +- `ScanJob` & `ScanJobStatus` – canonical job metadata (image reference/digest, tenant, correlation ID, timestamps, failure details). Constructors normalise timestamps to UTC microsecond precision and canonicalise image digests. Round-trips with `JsonSerializerDefaults.Web` using `ScannerJsonOptions`. +- `ScanProgressEvent` & `ScanStage`/`ScanProgressEventKind` – stage-level progress surface for queue/stream consumers. Includes deterministic sequence numbers, optional progress percentage, attributes, and attached `ScannerError`. +- `ScannerError` & `ScannerErrorCode` – shared error taxonomy spanning queue, analyzers, storage, exporters, and signing. Carries severity, retryability, structured details, and microsecond-precision timestamps. +- `ScanJobId` – strongly-typed identifier rendered as `Guid` (lowercase `N` format) with deterministic parsing. + +### Canonical JSON samples + +The golden fixtures consumed by `ScannerCoreContractsTests` document the wire shape shared with downstream services. They live under `src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Fixtures/` and a representative extract is shown below. + +```json +{ + "id": "8f4cc9c582454b9d9b4f5ae049631b7d", + "status": "running", + "imageReference": "registry.example.com/stellaops/scanner:1.2.3", + "imageDigest": "sha256:abcdef", + "createdAt": "2025-10-18T14:30:15.123456+00:00", + "updatedAt": "2025-10-18T14:30:20.123456+00:00", + "correlationId": "scan-analyzeoperatingsystem-8f4cc9c582454b9d9b4f5ae049631b7d", + "tenantId": "tenant-a", + "metadata": { + "requestId": "req-1234", + "source": "ci" + }, + "failure": { + "code": "analyzerFailure", + "severity": "error", + "message": "Analyzer failed to parse layer", + "timestamp": "2025-10-18T14:30:15.123456+00:00", + "retryable": false, + "stage": "AnalyzeOperatingSystem", + "component": "os-analyzer", + "details": { + "layerDigest": "sha256:deadbeef", + "attempt": "1" + } + } +} +``` + +Progress events follow the same conventions (`jobId`, `stage`, `kind`, `timestamp`, `attributes`, optional embedded `ScannerError`). The fixtures are verified via deterministic JSON comparison in every CI run. + +## Deterministic helpers + +- `ScannerIdentifiers` – derives `ScanJobId`, correlation IDs, and SHA-256 hashes from normalised inputs (image reference/digest, tenant, salt). Ensures case-insensitive stability and reproducible metric keys. +- `ScannerTimestamps` – trims to microsecond precision, provides ISO-8601 (`yyyy-MM-ddTHH:mm:ss.ffffffZ`) rendering, and parsing helpers. +- `ScannerJsonOptions` – standard JSON options (web defaults, camel-case enums) shared by services/tests. +- `ScanAnalysisStore` & `ScanAnalysisKeys` – shared in-memory analysis cache flowing through Worker stages. OS analyzers populate + `analysis.os.packages` (raw output), `analysis.os.fragments` (per-analyzer component fragments), and merge into + `analysis.layers.fragments` so emit/diff stages can compose SBOMs and diffs without knowledge of individual analyzer + implementations. + +## Observability primitives + +- `ScannerDiagnostics` – global `ActivitySource`/`Meter` for scanner components. `StartActivity` seeds deterministic tags (`job_id`, `stage`, `component`, `correlation_id`). +- `ScannerMetricNames` – centralises metric prefixes (`stellaops.scanner.*`) and deterministic job/event tag builders. +- `ScannerCorrelationContext` & `ScannerCorrelationContextAccessor` – ambient correlation propagation via `AsyncLocal` for log scopes, metrics, and diagnostics. +- `ScannerLogExtensions` – `ILogger` scopes for jobs/progress events with automatic correlation context push, minimal allocations, and consistent structured fields. + +### Observability overhead validation + +A micro-benchmark executed on 2025-10-19 (4 vCPU runner, .NET 10.0.100-rc.1) measured the average scope cost across 1 000 000 iterations: + +| Scope | Mean (µs/call) | +|-------|----------------| +| `BeginScanScope` (logger attached) | 0.80 | +| `BeginScanScope` (noop logger) | 0.31 | +| `BeginProgressScope` | 0.57 | + +To reproduce, run `dotnet test src/Scanner/__Tests/StellaOps.Scanner.Core.Tests -c Release` (see `ScannerLogExtensionsPerformanceTests`) or copy the snippet below into a throwaway `dotnet run` console project and execute it with `dotnet run -c Release`: + +```csharp +using System.Collections.Generic; +using System.Diagnostics; +using Microsoft.Extensions.Logging; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Core.Observability; +using StellaOps.Scanner.Core.Utility; + +var factory = LoggerFactory.Create(builder => builder.AddFilter(static _ => true)); +var logger = factory.CreateLogger("bench"); + +var jobId = ScannerIdentifiers.CreateJobId("registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", "tenant-a", "benchmark"); +var correlationId = ScannerIdentifiers.CreateCorrelationId(jobId, nameof(ScanStage.AnalyzeOperatingSystem)); +var now = ScannerTimestamps.Normalize(new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero)); + +var job = new ScanJob(jobId, ScanJobStatus.Running, "registry.example.com/stellaops/scanner:1.2.3", "sha256:abcdef", now, now, correlationId, "tenant-a", new Dictionary(StringComparer.Ordinal) { ["requestId"] = "req-bench" }); +var progress = new ScanProgressEvent(jobId, ScanStage.AnalyzeOperatingSystem, ScanProgressEventKind.Progress, 42, now, 10.5, "benchmark", new Dictionary(StringComparer.Ordinal) { ["sample"] = "true" }); + +Console.WriteLine("Scanner Core Observability micro-bench (1,000,000 iterations)"); +Report("BeginScanScope (logger)", Measure(static ctx => ctx.Logger.BeginScanScope(ctx.Job, ctx.Stage, ctx.Component), new ScopeContext(logger, job, nameof(ScanStage.AnalyzeOperatingSystem), "os-analyzer"))); +Report("BeginScanScope (no logger)", Measure(static ctx => ScannerLogExtensions.BeginScanScope(null, ctx.Job, ctx.Stage, ctx.Component), new ScopeContext(logger, job, nameof(ScanStage.AnalyzeOperatingSystem), "os-analyzer"))); +Report("BeginProgressScope", Measure(static ctx => ctx.Logger.BeginProgressScope(ctx.Progress!, ctx.Component), new ScopeContext(logger, job, nameof(ScanStage.AnalyzeOperatingSystem), "os-analyzer", progress))); + +static double Measure(Func factory, ScopeContext context) +{ + const int iterations = 1_000_000; + for (var i = 0; i < 10_000; i++) + { + using var scope = factory(context); + } + + GC.Collect(); + GC.WaitForPendingFinalizers(); + GC.Collect(); + + var sw = Stopwatch.StartNew(); + for (var i = 0; i < iterations; i++) + { + using var scope = factory(context); + } + + sw.Stop(); + return sw.Elapsed.TotalSeconds * 1_000_000 / iterations; +} + +static void Report(string label, double microseconds) + => Console.WriteLine($"{label,-28}: {microseconds:F3} µs"); + +readonly record struct ScopeContext(ILogger Logger, ScanJob Job, string? Stage, string? Component, ScanProgressEvent? Progress = null); +``` + +Both guardrails enforce the ≤ 5 µs acceptance target for SP9-G1. + +## Security utilities + +- `AuthorityTokenSource` – caches short-lived OpToks per audience+scope using deterministic keys and refresh skew (default 30 s). Integrates with `StellaOps.Auth.Client`. +- `DpopProofValidator` – validates DPoP proofs (alg allowlist, `htm`/`htu`, nonce, replay window, signature) backed by pluggable `IDpopReplayCache`. Ships with `InMemoryDpopReplayCache` for restart-only deployments. +- `RestartOnlyPluginGuard` – enforces restart-time plug-in registration (deterministic path normalisation; throws if new plug-ins added post-seal). +- `ServiceCollectionExtensions.AddScannerAuthorityCore` – DI helper wiring Authority client, OpTok source, DPoP validation, replay cache, and plug-in guard. + +## Testing guarantees + +Unit tests (`StellaOps.Scanner.Core.Tests`) assert: + +- DTO JSON round-trips are stable and deterministic (`ScannerCoreContractsTests` + golden fixtures). +- Identifier/hash helpers ignore case and emit lowercase hex. +- Timestamp normalisation retains UTC semantics. +- Log scopes push/pop correlation context predictably while staying under the 5 µs envelope. +- Authority token caching honours refresh skew and invalidation. +- DPoP validator accepts valid proofs, rejects nonce mismatch/replay, and enforces signature validation. +- Restart-only plug-in guard blocks runtime additions post-seal. diff --git a/docs/schemas/policy-diff-summary.schema.json b/docs/schemas/policy-diff-summary.schema.json index 64b1a4d0..c7f0484d 100644 --- a/docs/schemas/policy-diff-summary.schema.json +++ b/docs/schemas/policy-diff-summary.schema.json @@ -1,71 +1,71 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "PolicyDiffSummary", - "type": "object", - "additionalProperties": false, - "properties": { - "SchemaVersion": { - "type": "string" - }, - "Added": { - "type": "integer", - "format": "int32" - }, - "Removed": { - "type": "integer", - "format": "int32" - }, - "Unchanged": { - "type": "integer", - "format": "int32" - }, - "BySeverity": { - "type": "object", - "additionalProperties": { - "$ref": "#/definitions/PolicyDiffSeverityDelta" - } - }, - "RuleHits": { - "type": "array", - "items": { - "$ref": "#/definitions/PolicyDiffRuleDelta" - } - } - }, - "definitions": { - "PolicyDiffSeverityDelta": { - "type": "object", - "additionalProperties": false, - "properties": { - "Up": { - "type": "integer", - "format": "int32" - }, - "Down": { - "type": "integer", - "format": "int32" - } - } - }, - "PolicyDiffRuleDelta": { - "type": "object", - "additionalProperties": false, - "properties": { - "RuleId": { - "type": "string" - }, - "RuleName": { - "type": "string" - }, - "Up": { - "type": "integer", - "format": "int32" - }, - "Down": { - "type": "integer", - "format": "int32" - } - } - } - } -} +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "PolicyDiffSummary", + "type": "object", + "additionalProperties": false, + "properties": { + "SchemaVersion": { + "type": "string" + }, + "Added": { + "type": "integer", + "format": "int32" + }, + "Removed": { + "type": "integer", + "format": "int32" + }, + "Unchanged": { + "type": "integer", + "format": "int32" + }, + "BySeverity": { + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/PolicyDiffSeverityDelta" + } + }, + "RuleHits": { + "type": "array", + "items": { + "$ref": "#/definitions/PolicyDiffRuleDelta" + } + } + }, + "definitions": { + "PolicyDiffSeverityDelta": { + "type": "object", + "additionalProperties": false, + "properties": { + "Up": { + "type": "integer", + "format": "int32" + }, + "Down": { + "type": "integer", + "format": "int32" + } + } + }, + "PolicyDiffRuleDelta": { + "type": "object", + "additionalProperties": false, + "properties": { + "RuleId": { + "type": "string" + }, + "RuleName": { + "type": "string" + }, + "Up": { + "type": "integer", + "format": "int32" + }, + "Down": { + "type": "integer", + "format": "int32" + } + } + } + } +} diff --git a/docs/schemas/policy-explain-trace.schema.json b/docs/schemas/policy-explain-trace.schema.json index 8f928212..41808ad7 100644 --- a/docs/schemas/policy-explain-trace.schema.json +++ b/docs/schemas/policy-explain-trace.schema.json @@ -1,258 +1,258 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "PolicyExplainTrace", - "type": "object", - "additionalProperties": false, - "properties": { - "SchemaVersion": { - "type": "string" - }, - "FindingId": { - "type": "string" - }, - "PolicyId": { - "type": "string" - }, - "PolicyVersion": { - "type": "integer", - "format": "int32" - }, - "TenantId": { - "type": "string" - }, - "RunId": { - "type": "string" - }, - "EvaluatedAt": { - "type": "string", - "format": "date-time" - }, - "Verdict": { - "$ref": "#/definitions/PolicyExplainVerdict" - }, - "RuleChain": { - "type": "array", - "items": { - "$ref": "#/definitions/PolicyExplainRule" - } - }, - "Evidence": { - "type": "array", - "items": { - "$ref": "#/definitions/PolicyExplainEvidence" - } - }, - "VexImpacts": { - "type": "array", - "items": { - "$ref": "#/definitions/PolicyExplainVexImpact" - } - }, - "History": { - "type": "array", - "items": { - "$ref": "#/definitions/PolicyExplainHistoryEvent" - } - }, - "Metadata": { - "type": "object", - "additionalProperties": { - "type": "string" - } - } - }, - "definitions": { - "PolicyExplainVerdict": { - "type": "object", - "additionalProperties": false, - "properties": { - "Status": { - "$ref": "#/definitions/PolicyVerdictStatus" - }, - "Severity": { - "oneOf": [ - { - "type": "null" - }, - { - "$ref": "#/definitions/SeverityRank" - } - ] - }, - "Quiet": { - "type": "boolean" - }, - "Score": { - "type": [ - "null", - "number" - ], - "format": "double" - }, - "Rationale": { - "type": [ - "null", - "string" - ] - } - } - }, - "PolicyVerdictStatus": { - "type": "integer", - "description": "", - "x-enumNames": [ - "Passed", - "Warned", - "Blocked", - "Quieted", - "Ignored" - ], - "enum": [ - 0, - 1, - 2, - 3, - 4 - ] - }, - "SeverityRank": { - "type": "integer", - "description": "", - "x-enumNames": [ - "None", - "Info", - "Low", - "Medium", - "High", - "Critical", - "Unknown" - ], - "enum": [ - 0, - 1, - 2, - 3, - 4, - 5, - 6 - ] - }, - "PolicyExplainRule": { - "type": "object", - "additionalProperties": false, - "properties": { - "RuleId": { - "type": "string" - }, - "RuleName": { - "type": "string" - }, - "Action": { - "type": "string" - }, - "Decision": { - "type": "string" - }, - "Score": { - "type": "number", - "format": "double" - }, - "Condition": { - "type": [ - "null", - "string" - ] - } - } - }, - "PolicyExplainEvidence": { - "type": "object", - "additionalProperties": false, - "properties": { - "Type": { - "type": "string" - }, - "Reference": { - "type": "string" - }, - "Source": { - "type": "string" - }, - "Status": { - "type": "string" - }, - "Weight": { - "type": "number", - "format": "double" - }, - "Justification": { - "type": [ - "null", - "string" - ] - }, - "Metadata": { - "type": "object", - "additionalProperties": { - "type": "string" - } - } - } - }, - "PolicyExplainVexImpact": { - "type": "object", - "additionalProperties": false, - "properties": { - "StatementId": { - "type": "string" - }, - "Provider": { - "type": "string" - }, - "Status": { - "type": "string" - }, - "Accepted": { - "type": "boolean" - }, - "Justification": { - "type": [ - "null", - "string" - ] - }, - "Confidence": { - "type": [ - "null", - "string" - ] - } - } - }, - "PolicyExplainHistoryEvent": { - "type": "object", - "additionalProperties": false, - "properties": { - "Status": { - "type": "string" - }, - "OccurredAt": { - "type": "string", - "format": "date-time" - }, - "Actor": { - "type": [ - "null", - "string" - ] - }, - "Note": { - "type": [ - "null", - "string" - ] - } - } - } - } -} +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "PolicyExplainTrace", + "type": "object", + "additionalProperties": false, + "properties": { + "SchemaVersion": { + "type": "string" + }, + "FindingId": { + "type": "string" + }, + "PolicyId": { + "type": "string" + }, + "PolicyVersion": { + "type": "integer", + "format": "int32" + }, + "TenantId": { + "type": "string" + }, + "RunId": { + "type": "string" + }, + "EvaluatedAt": { + "type": "string", + "format": "date-time" + }, + "Verdict": { + "$ref": "#/definitions/PolicyExplainVerdict" + }, + "RuleChain": { + "type": "array", + "items": { + "$ref": "#/definitions/PolicyExplainRule" + } + }, + "Evidence": { + "type": "array", + "items": { + "$ref": "#/definitions/PolicyExplainEvidence" + } + }, + "VexImpacts": { + "type": "array", + "items": { + "$ref": "#/definitions/PolicyExplainVexImpact" + } + }, + "History": { + "type": "array", + "items": { + "$ref": "#/definitions/PolicyExplainHistoryEvent" + } + }, + "Metadata": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + }, + "definitions": { + "PolicyExplainVerdict": { + "type": "object", + "additionalProperties": false, + "properties": { + "Status": { + "$ref": "#/definitions/PolicyVerdictStatus" + }, + "Severity": { + "oneOf": [ + { + "type": "null" + }, + { + "$ref": "#/definitions/SeverityRank" + } + ] + }, + "Quiet": { + "type": "boolean" + }, + "Score": { + "type": [ + "null", + "number" + ], + "format": "double" + }, + "Rationale": { + "type": [ + "null", + "string" + ] + } + } + }, + "PolicyVerdictStatus": { + "type": "integer", + "description": "", + "x-enumNames": [ + "Passed", + "Warned", + "Blocked", + "Quieted", + "Ignored" + ], + "enum": [ + 0, + 1, + 2, + 3, + 4 + ] + }, + "SeverityRank": { + "type": "integer", + "description": "", + "x-enumNames": [ + "None", + "Info", + "Low", + "Medium", + "High", + "Critical", + "Unknown" + ], + "enum": [ + 0, + 1, + 2, + 3, + 4, + 5, + 6 + ] + }, + "PolicyExplainRule": { + "type": "object", + "additionalProperties": false, + "properties": { + "RuleId": { + "type": "string" + }, + "RuleName": { + "type": "string" + }, + "Action": { + "type": "string" + }, + "Decision": { + "type": "string" + }, + "Score": { + "type": "number", + "format": "double" + }, + "Condition": { + "type": [ + "null", + "string" + ] + } + } + }, + "PolicyExplainEvidence": { + "type": "object", + "additionalProperties": false, + "properties": { + "Type": { + "type": "string" + }, + "Reference": { + "type": "string" + }, + "Source": { + "type": "string" + }, + "Status": { + "type": "string" + }, + "Weight": { + "type": "number", + "format": "double" + }, + "Justification": { + "type": [ + "null", + "string" + ] + }, + "Metadata": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "PolicyExplainVexImpact": { + "type": "object", + "additionalProperties": false, + "properties": { + "StatementId": { + "type": "string" + }, + "Provider": { + "type": "string" + }, + "Status": { + "type": "string" + }, + "Accepted": { + "type": "boolean" + }, + "Justification": { + "type": [ + "null", + "string" + ] + }, + "Confidence": { + "type": [ + "null", + "string" + ] + } + } + }, + "PolicyExplainHistoryEvent": { + "type": "object", + "additionalProperties": false, + "properties": { + "Status": { + "type": "string" + }, + "OccurredAt": { + "type": "string", + "format": "date-time" + }, + "Actor": { + "type": [ + "null", + "string" + ] + }, + "Note": { + "type": [ + "null", + "string" + ] + } + } + } + } +} diff --git a/docs/schemas/policy-run-request.schema.json b/docs/schemas/policy-run-request.schema.json index 7883ea10..7a179ac0 100644 --- a/docs/schemas/policy-run-request.schema.json +++ b/docs/schemas/policy-run-request.schema.json @@ -1,130 +1,130 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "PolicyRunRequest", - "type": "object", - "additionalProperties": false, - "properties": { - "SchemaVersion": { - "type": "string" - }, - "TenantId": { - "type": "string" - }, - "PolicyId": { - "type": "string" - }, - "PolicyVersion": { - "type": [ - "integer", - "null" - ], - "format": "int32" - }, - "Mode": { - "$ref": "#/definitions/PolicyRunMode" - }, - "Priority": { - "$ref": "#/definitions/PolicyRunPriority" - }, - "RunId": { - "type": [ - "null", - "string" - ] - }, - "QueuedAt": { - "type": [ - "null", - "string" - ], - "format": "date-time" - }, - "RequestedBy": { - "type": [ - "null", - "string" - ] - }, - "CorrelationId": { - "type": [ - "null", - "string" - ] - }, - "Metadata": { - "type": [ - "null", - "object" - ], - "additionalProperties": { - "type": "string" - } - }, - "Inputs": { - "$ref": "#/definitions/PolicyRunInputs" - } - }, - "definitions": { - "PolicyRunMode": { - "type": "integer", - "description": "", - "x-enumNames": [ - "Full", - "Incremental", - "Simulate" - ], - "enum": [ - 0, - 1, - 2 - ] - }, - "PolicyRunPriority": { - "type": "integer", - "description": "", - "x-enumNames": [ - "Normal", - "High", - "Emergency" - ], - "enum": [ - 0, - 1, - 2 - ] - }, - "PolicyRunInputs": { - "type": "object", - "additionalProperties": false, - "properties": { - "SbomSet": { - "type": "array", - "items": { - "type": "string" - } - }, - "AdvisoryCursor": { - "type": [ - "null", - "string" - ], - "format": "date-time" - }, - "VexCursor": { - "type": [ - "null", - "string" - ], - "format": "date-time" - }, - "Environment": { - "type": "object", - "additionalProperties": {} - }, - "CaptureExplain": { - "type": "boolean" - } - } - } - } -} +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "PolicyRunRequest", + "type": "object", + "additionalProperties": false, + "properties": { + "SchemaVersion": { + "type": "string" + }, + "TenantId": { + "type": "string" + }, + "PolicyId": { + "type": "string" + }, + "PolicyVersion": { + "type": [ + "integer", + "null" + ], + "format": "int32" + }, + "Mode": { + "$ref": "#/definitions/PolicyRunMode" + }, + "Priority": { + "$ref": "#/definitions/PolicyRunPriority" + }, + "RunId": { + "type": [ + "null", + "string" + ] + }, + "QueuedAt": { + "type": [ + "null", + "string" + ], + "format": "date-time" + }, + "RequestedBy": { + "type": [ + "null", + "string" + ] + }, + "CorrelationId": { + "type": [ + "null", + "string" + ] + }, + "Metadata": { + "type": [ + "null", + "object" + ], + "additionalProperties": { + "type": "string" + } + }, + "Inputs": { + "$ref": "#/definitions/PolicyRunInputs" + } + }, + "definitions": { + "PolicyRunMode": { + "type": "integer", + "description": "", + "x-enumNames": [ + "Full", + "Incremental", + "Simulate" + ], + "enum": [ + 0, + 1, + 2 + ] + }, + "PolicyRunPriority": { + "type": "integer", + "description": "", + "x-enumNames": [ + "Normal", + "High", + "Emergency" + ], + "enum": [ + 0, + 1, + 2 + ] + }, + "PolicyRunInputs": { + "type": "object", + "additionalProperties": false, + "properties": { + "SbomSet": { + "type": "array", + "items": { + "type": "string" + } + }, + "AdvisoryCursor": { + "type": [ + "null", + "string" + ], + "format": "date-time" + }, + "VexCursor": { + "type": [ + "null", + "string" + ], + "format": "date-time" + }, + "Environment": { + "type": "object", + "additionalProperties": {} + }, + "CaptureExplain": { + "type": "boolean" + } + } + } + } +} diff --git a/docs/schemas/policy-run-status.schema.json b/docs/schemas/policy-run-status.schema.json index 81f268a2..90b6c136 100644 --- a/docs/schemas/policy-run-status.schema.json +++ b/docs/schemas/policy-run-status.schema.json @@ -1,217 +1,217 @@ -{ - "$schema": "http://json-schema.org/draft-04/schema#", - "title": "PolicyRunStatus", - "type": "object", - "additionalProperties": false, - "properties": { - "SchemaVersion": { - "type": "string" - }, - "RunId": { - "type": "string" - }, - "TenantId": { - "type": "string" - }, - "PolicyId": { - "type": "string" - }, - "PolicyVersion": { - "type": "integer", - "format": "int32" - }, - "Mode": { - "$ref": "#/definitions/PolicyRunMode" - }, - "Status": { - "$ref": "#/definitions/PolicyRunExecutionStatus" - }, - "Priority": { - "$ref": "#/definitions/PolicyRunPriority" - }, - "QueuedAt": { - "type": "string", - "format": "date-time" - }, - "StartedAt": { - "type": [ - "null", - "string" - ], - "format": "date-time" - }, - "FinishedAt": { - "type": [ - "null", - "string" - ], - "format": "date-time" - }, - "DeterminismHash": { - "type": [ - "null", - "string" - ] - }, - "ErrorCode": { - "type": [ - "null", - "string" - ] - }, - "Error": { - "type": [ - "null", - "string" - ] - }, - "Attempts": { - "type": "integer", - "format": "int32" - }, - "TraceId": { - "type": [ - "null", - "string" - ] - }, - "ExplainUri": { - "type": [ - "null", - "string" - ] - }, - "Metadata": { - "type": "object", - "additionalProperties": { - "type": "string" - } - }, - "Stats": { - "$ref": "#/definitions/PolicyRunStats" - }, - "Inputs": { - "$ref": "#/definitions/PolicyRunInputs" - } - }, - "definitions": { - "PolicyRunMode": { - "type": "integer", - "description": "", - "x-enumNames": [ - "Full", - "Incremental", - "Simulate" - ], - "enum": [ - 0, - 1, - 2 - ] - }, - "PolicyRunExecutionStatus": { - "type": "integer", - "description": "", - "x-enumNames": [ - "Queued", - "Running", - "Succeeded", - "Failed", - "Cancelled", - "ReplayPending" - ], - "enum": [ - 0, - 1, - 2, - 3, - 4, - 5 - ] - }, - "PolicyRunPriority": { - "type": "integer", - "description": "", - "x-enumNames": [ - "Normal", - "High", - "Emergency" - ], - "enum": [ - 0, - 1, - 2 - ] - }, - "PolicyRunStats": { - "type": "object", - "additionalProperties": false, - "properties": { - "Components": { - "type": "integer", - "format": "int32" - }, - "RulesFired": { - "type": "integer", - "format": "int32" - }, - "FindingsWritten": { - "type": "integer", - "format": "int32" - }, - "VexOverrides": { - "type": "integer", - "format": "int32" - }, - "Quieted": { - "type": "integer", - "format": "int32" - }, - "Suppressed": { - "type": "integer", - "format": "int32" - }, - "DurationSeconds": { - "type": [ - "null", - "number" - ], - "format": "double" - } - } - }, - "PolicyRunInputs": { - "type": "object", - "additionalProperties": false, - "properties": { - "SbomSet": { - "type": "array", - "items": { - "type": "string" - } - }, - "AdvisoryCursor": { - "type": [ - "null", - "string" - ], - "format": "date-time" - }, - "VexCursor": { - "type": [ - "null", - "string" - ], - "format": "date-time" - }, - "Environment": { - "type": "object", - "additionalProperties": {} - }, - "CaptureExplain": { - "type": "boolean" - } - } - } - } -} +{ + "$schema": "http://json-schema.org/draft-04/schema#", + "title": "PolicyRunStatus", + "type": "object", + "additionalProperties": false, + "properties": { + "SchemaVersion": { + "type": "string" + }, + "RunId": { + "type": "string" + }, + "TenantId": { + "type": "string" + }, + "PolicyId": { + "type": "string" + }, + "PolicyVersion": { + "type": "integer", + "format": "int32" + }, + "Mode": { + "$ref": "#/definitions/PolicyRunMode" + }, + "Status": { + "$ref": "#/definitions/PolicyRunExecutionStatus" + }, + "Priority": { + "$ref": "#/definitions/PolicyRunPriority" + }, + "QueuedAt": { + "type": "string", + "format": "date-time" + }, + "StartedAt": { + "type": [ + "null", + "string" + ], + "format": "date-time" + }, + "FinishedAt": { + "type": [ + "null", + "string" + ], + "format": "date-time" + }, + "DeterminismHash": { + "type": [ + "null", + "string" + ] + }, + "ErrorCode": { + "type": [ + "null", + "string" + ] + }, + "Error": { + "type": [ + "null", + "string" + ] + }, + "Attempts": { + "type": "integer", + "format": "int32" + }, + "TraceId": { + "type": [ + "null", + "string" + ] + }, + "ExplainUri": { + "type": [ + "null", + "string" + ] + }, + "Metadata": { + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "Stats": { + "$ref": "#/definitions/PolicyRunStats" + }, + "Inputs": { + "$ref": "#/definitions/PolicyRunInputs" + } + }, + "definitions": { + "PolicyRunMode": { + "type": "integer", + "description": "", + "x-enumNames": [ + "Full", + "Incremental", + "Simulate" + ], + "enum": [ + 0, + 1, + 2 + ] + }, + "PolicyRunExecutionStatus": { + "type": "integer", + "description": "", + "x-enumNames": [ + "Queued", + "Running", + "Succeeded", + "Failed", + "Cancelled", + "ReplayPending" + ], + "enum": [ + 0, + 1, + 2, + 3, + 4, + 5 + ] + }, + "PolicyRunPriority": { + "type": "integer", + "description": "", + "x-enumNames": [ + "Normal", + "High", + "Emergency" + ], + "enum": [ + 0, + 1, + 2 + ] + }, + "PolicyRunStats": { + "type": "object", + "additionalProperties": false, + "properties": { + "Components": { + "type": "integer", + "format": "int32" + }, + "RulesFired": { + "type": "integer", + "format": "int32" + }, + "FindingsWritten": { + "type": "integer", + "format": "int32" + }, + "VexOverrides": { + "type": "integer", + "format": "int32" + }, + "Quieted": { + "type": "integer", + "format": "int32" + }, + "Suppressed": { + "type": "integer", + "format": "int32" + }, + "DurationSeconds": { + "type": [ + "null", + "number" + ], + "format": "double" + } + } + }, + "PolicyRunInputs": { + "type": "object", + "additionalProperties": false, + "properties": { + "SbomSet": { + "type": "array", + "items": { + "type": "string" + } + }, + "AdvisoryCursor": { + "type": [ + "null", + "string" + ], + "format": "date-time" + }, + "VexCursor": { + "type": [ + "null", + "string" + ], + "format": "date-time" + }, + "Environment": { + "type": "object", + "additionalProperties": {} + }, + "CaptureExplain": { + "type": "boolean" + } + } + } + } +} diff --git a/docs/security/authority-scopes.md b/docs/security/authority-scopes.md index 1423dc17..c7d8def2 100644 --- a/docs/security/authority-scopes.md +++ b/docs/security/authority-scopes.md @@ -1,261 +1,261 @@ -# Authority Scopes & Tenancy — AOC Update - -> **Audience:** Authority Core, platform security engineers, DevOps owners. -> **Scope:** Scope taxonomy, tenancy enforcement, rollout guidance for the Aggregation-Only Contract (Sprint 19). - -Authority issues short-lived tokens bound to tenants and scopes. Sprint 19 introduces new scopes to support the AOC guardrails in Concelier and Excititor. This document lists the canonical scope catalogue, describes tenancy propagation, and outlines operational safeguards. - ---- - -## 1 · Scope catalogue (post AOC) - -| Scope | Surface | Purpose | Notes | -|-------|---------|---------|-------| -| `advisory:ingest` | Concelier ingestion APIs | Append-only writes to `advisory_raw` collections. | Requires tenant claim; blocked for global clients. | -| `advisory:read` | `/aoc/verify`, Concelier dashboards, CLI | Read-only access to stored advisories and guard results. | Must be requested with `aoc:verify`; Authority rejects tokens missing the pairing. | -| `vex:ingest` | Excititor ingestion APIs | Append-only writes to `vex_raw`. | Mirrors `advisory:ingest`; tenant required. | -| `vex:read` | `/aoc/verify`, Excititor dashboards, CLI | Read-only access to stored VEX material. | Must be requested with `aoc:verify`; Authority rejects tokens missing the pairing. | -| `aoc:verify` | CLI/CI pipelines, Console verification jobs | Execute Aggregation-Only Contract guard runs. | Always issued with tenant; required whenever requesting `advisory:read`, `vex:read`, or any `signals:*` scope. | -| `signals:read` | Signals API, reachability dashboards | Read-only access to stored reachability signals. | Tenant and `aoc:verify` required; missing pairing returns `invalid_scope`. | -| `signals:write` | Signals ingestion APIs | Append-only writes for reachability signals. | Requires tenant and `aoc:verify`; Authority logs `authority.aoc_scope_violation` on mismatch. | -| `signals:admin` | Signals administration tooling | Rotate credentials, manage reachability sensors, purge stale data. | Reserved for automation; `aoc:verify` + tenant mandatory; violations are audited. | -| `graph:write` | Cartographer pipeline | Enqueue graph build/overlay jobs. | Reserved for Cartographer service identity; tenant required. | -| `graph:read` | Graph API, Scheduler overlays, UI | Read graph projections/overlays. | Tenant required; granted to Cartographer, Graph API, Scheduler. | -| `graph:export` | Graph export endpoints | Stream GraphML/JSONL artefacts. | UI/gateway automation only; tenant required. | -| `graph:simulate` | Policy simulation overlays | Trigger what-if overlays on graphs. | Restricted to automation; tenant required. | -| `effective:write` | Policy Engine | Create/update `effective_finding_*` collections. | **Only** the Policy Engine service client may hold this scope; tenant required. | -| `findings:read` | Console, CLI, exports | Read derived findings materialised by Policy Engine. | Shared across tenants with RBAC; tenant claim still enforced. | -| `policy:author` | Policy Studio (Console, CLI) | Author drafts, run lint, execute quick simulations. | Tenant required; typically granted via `role/policy-author`. | -| `policy:review` | Policy Studio review panes | Review drafts, leave comments, request changes. | Tenant required; pair with `policy:simulate` for diff previews. | -| `policy:approve` | Policy Studio approvals | Approve or reject policy drafts. | Tenant required; fresh-auth enforced by Console UI. | -| `policy:operate` | Policy Studio promotion controls | Trigger batch simulations, promotions, and canary runs. | Tenant required; combine with `policy:run`/`policy:activate`. | -| `policy:audit` | Policy audit exports | Access immutable policy history, comments, and signatures. | Tenant required; read-only access. | -| `policy:simulate` | Policy Studio / CLI simulations | Run simulations against tenant inventories. | Tenant required; available to authors, reviewers, operators. | -| `vuln:read` | Vuln Explorer API/UI | Read normalized vulnerability data. | Tenant required. | -| `export.viewer` | Export Center APIs | List export profiles/runs, fetch manifests and bundles. | Tenant required; read-only access. | -| `export.operator` | Export Center APIs | Trigger export runs, manage schedules, request verifications. | Tenant required; pair with `export.admin` for retention/encryption changes. | -| `export.admin` | Export Center administrative APIs | Configure retention policies, encryption keys, and scheduling defaults. | Tenant required; token requests must include `export_reason` + `export_ticket`; Authority audits denials. | -| `orch:read` | Orchestrator dashboards/API | Read queued jobs, worker state, and rate-limit telemetry. | Tenant required; never grants mutation rights. | -| `orch:operate` | Orchestrator control actions | Execute pause/resume, retry, sync-now, and backfill operations. Requires tenant assignment **and** `operator_reason`/`operator_ticket` parameters when requesting tokens. | -| `exceptions:read` | Exception service APIs, Console | Enumerate exception definitions, routing templates, and approval state. | Tenant and approval routing metadata required for audit replay. | -| `exceptions:write` | Policy Engine → Authority bridge | Persist exception evaluations, lifecycle events, and status changes. | Tenant required; only service principals should hold this scope. | -| `exceptions:approve` | Console fresh-auth flows, delegated admins | Approve or reject exception requests routed through Authority. | Tenant required; Authority enforces MFA when any bound routing template has `requireMfa=true`. | -| `ui.read` | Console base APIs | Retrieve tenant catalog, profile metadata, and token introspection results. | Tenant header required; responses are DPoP-bound and audit logged. | -| `authority:tenants.read` | Console admin workspace | Enumerate configured tenants, default roles, and isolation metadata. | Tenant claim must match header; access audited via `authority.console.tenants.read`. | -| Existing scopes | (e.g., `policy:*`, `concelier.jobs.trigger`) | Unchanged. | `concelier.merge` is retired — clients must request `advisory:ingest`/`advisory:read`; requests continue to fail with `invalid_client`. Review `/docs/security/policy-governance.md` for policy-specific scopes. | - -### 1.1 Scope bundles (roles) - -- **`role/concelier-ingest`** → `advisory:ingest`, `advisory:read`. -- **`role/excititor-ingest`** → `vex:ingest`, `vex:read`. -- **`role/signals-uploader`** → `signals:write`, `signals:read`, `aoc:verify`. -- **`role/aoc-operator`** → `aoc:verify`, `advisory:read`, `vex:read`. -- **`role/policy-engine`** → `effective:write`, `findings:read`. -- **`role/cartographer-service`** → `graph:write`, `graph:read`. -- **`role/graph-gateway`** → `graph:read`, `graph:export`, `graph:simulate`. -- **`role/console`** → `ui.read`, `advisory:read`, `vex:read`, `exceptions:read`, `aoc:verify`, `findings:read`, `orch:read`, `vuln:read`. -- **`role/ui-console-admin`** → `ui.read`, `authority:tenants.read`, `authority:roles.read`, `authority:tokens.read`, `authority:clients.read` (paired with write scopes where required). -- **`role/orch-viewer`** *(Authority role: `Orch.Viewer`)* → `orch:read`. -- **`role/orch-operator`** *(Authority role: `Orch.Operator`)* → `orch:read`, `orch:operate`. -- **`role/policy-author`** → `policy:author`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/policy-reviewer`** → `policy:review`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/policy-approver`** → `policy:approve`, `policy:review`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/policy-operator`** → `policy:operate`, `policy:run`, `policy:activate`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/policy-auditor`** → `policy:audit`, `policy:read`, `policy:simulate`, `findings:read`. -- **`role/export-viewer`** *(Authority role: `Export.Viewer`)* → `export.viewer`. -- **`role/export-operator`** *(Authority role: `Export.Operator`)* → `export.viewer`, `export.operator`. -- **`role/export-admin`** *(Authority role: `Export.Admin`)* → `export.viewer`, `export.operator`, `export.admin`. -- **`role/exceptions-service`** → `exceptions:read`, `exceptions:write`. -- **`role/exceptions-approver`** → `exceptions:read`, `exceptions:approve`. - -Roles are declared per tenant in `authority.yaml`: - -```yaml -tenants: - - name: default - roles: - concelier-ingest: - scopes: [advisory:ingest, advisory:read] - signals-uploader: - scopes: [signals:write, signals:read, aoc:verify] - aoc-operator: - scopes: [aoc:verify, advisory:read, vex:read] - orch-viewer: - scopes: [orch:read] - orch-operator: - scopes: [orch:read, orch:operate] - policy-author: - scopes: [policy:author, policy:read, policy:simulate, findings:read] - policy-reviewer: - scopes: [policy:review, policy:read, policy:simulate, findings:read] - policy-approver: - scopes: [policy:approve, policy:review, policy:read, policy:simulate, findings:read] - policy-operator: - scopes: [policy:operate, policy:run, policy:activate, policy:read, policy:simulate, findings:read] - policy-auditor: - scopes: [policy:audit, policy:read, policy:simulate, findings:read] - policy-engine: - scopes: [effective:write, findings:read] - exceptions-service: - scopes: [exceptions:read, exceptions:write] - exceptions-approver: - scopes: [exceptions:read, exceptions:approve] -``` - -> **MFA requirement:** When any `exceptions.routingTemplates` entry sets `requireMfa: true`, Authority refuses to mint tokens containing `exceptions:approve` unless the authenticating identity provider advertises MFA support. Password/OIDC flows produce `authority.password.grant` audit events with `reason="Exception approval scope requires an MFA-capable identity provider."` when the requirement is violated. - ---- - -## 2 · Tenancy enforcement - -### 2.1 Token claims - -Tokens now include: - -- `tenant` claim (string) — required for all ingestion and verification scopes. -- `service_identity` (optional) — e.g., `policy-engine`, `cartographer`. Required when requesting `effective:write` or `graph:write`. -- `delegation_allowed` (boolean) — defaults `false`. Prevents console tokens from delegating ingest scopes. - -Authority rejects requests when: - -- `tenant` is missing while requesting `advisory:ingest`, `advisory:read`, `vex:ingest`, `vex:read`, or `aoc:verify` scopes. -- `aoc:verify` is absent while tokens request `advisory:read`, `vex:read`, or any `signals:*` scope (`invalid_scope` with deterministic message). -- `service_identity != policy-engine` but `effective:write` is present (`ERR_AOC_006` enforcement). -- `service_identity != cartographer` but `graph:write` is present (graph pipeline enforcement). -- Tokens attempt to combine `advisory:ingest` with `effective:write` (separation of duties). -- `exceptions:approve` is requested by a client without a tenant assignment or via an identity provider lacking MFA when `RequireMfaForApprovals=true`. - -### 2.2 Propagation - -- API Gateway forwards `tenant` claim as header (`X-Stella-Tenant`). Services refuse requests lacking the header. -- Concelier/Excititor stamp tenant into raw documents and structured logs. -- Policy Engine copies `tenant` from tokens into `effective_finding_*` collections. -- Exception lifecycle services persist tenant and the selected routing template identifier alongside approval decisions. Authority audit events (`authority.password.grant`, `authority.client_credentials.grant`) surface `audit.scopes` and, on denials, a `scope.invalid` metadata entry so operators can trace exception approval attempts without inspecting downstream services. - -### 2.3 Cross-tenant scenarios - -- Platform operators with `tenant:admin` can assume other tenants via `/authority/tenant/switch` if explicitly permitted. -- CLI commands accept `--tenant ` to override environment default; Authority logs tenant switch events (`authority.tenant.switch`). -- Console tenant picker uses delegated token exchange (`/token/exchange`) to obtain scoped tenant tokens without exposing raw credentials. - ---- - -## 3 · Configuration changes - -### 3.1 Authority configuration (`authority.yaml`) - -Add new scopes and optional claims transformations: - -```yaml -security: - scopes: - - name: advisory:ingest - description: Concelier raw ingestion (append-only) - - name: advisory:read - description: Read Concelier advisories and guard verdicts - - name: vex:ingest - description: Excititor raw ingestion - - name: vex:read - description: Read Excititor VEX records - - name: aoc:verify - description: Run AOC verification - - name: effective:write - description: Policy Engine materialisation - - name: findings:read - description: Read derived findings - - name: graph:write - description: Cartographer build submissions - - name: graph:read - description: Read graph overlays - - name: graph:export - description: Export graph artefacts - - name: graph:simulate - description: Run graph what-if simulations - - name: vuln:read - description: Read Vuln Explorer data - claimTransforms: - - match: { scope: "effective:write" } - require: - serviceIdentity: policy-engine - - match: { scope: "graph:write" } - require: - serviceIdentity: cartographer -``` - -### 3.2 Client registration - -Update service clients: - -- `Concelier.WebService` → request `advisory:ingest`, `advisory:read`. -- `Excititor.WebService` → request `vex:ingest`, `vex:read`. -- `Policy.Engine` → request `effective:write`, `findings:read`; set `properties.serviceIdentity=policy-engine`. -- `Cartographer.Service` → request `graph:write`, `graph:read`; set `properties.serviceIdentity=cartographer`. -- `Graph API Gateway` → request `graph:read`, `graph:export`, `graph:simulate`; tenant hint required. -- `Console` → request `advisory:read`, `vex:read`, `aoc:verify`, `findings:read`, `vuln:read` plus existing UI scopes. -- `CLI automation` → request `aoc:verify`, `advisory:read`, `vex:read` as needed. - -Client definition snippet: - -```yaml -clients: - - clientId: concelier-web - grantTypes: [client_credentials] - scopes: [advisory:ingest, advisory:read] - tenants: [default] - - clientId: policy-engine - grantTypes: [client_credentials] - scopes: [effective:write, findings:read] - properties: - serviceIdentity: policy-engine - - clientId: cartographer-service - grantTypes: [client_credentials] - scopes: [graph:write, graph:read] - properties: - serviceIdentity: cartographer -``` - ---- - -## 4 · Operational safeguards - -- **Audit events:** Authority emits `authority.scope.granted` and `authority.scope.revoked` events with `scope` and `tenant`. Monitor for unexpected grants. -- **Rate limiting:** Apply stricter limits on `/token` endpoints for clients requesting `advisory:ingest` or `vex:ingest` to mitigate brute-force ingestion attempts. -- **Incident response:** Link AOC alerts to Authority audit logs to confirm whether violations come from expected identities. -- **Rotation:** Rotate ingest client secrets alongside guard deployments; add rotation steps to `ops/authority-key-rotation.md`. -- **Testing:** Integration tests must fail if tokens lacking `tenant` attempt ingestion; add coverage in Concelier/Excititor smoke suites (see `CONCELIER-CORE-AOC-19-013`). - ---- - -## 5 · Offline & air-gap notes - -- Offline Kit bundles include tenant-scoped service credentials. Ensure ingest bundles ship without `advisory:ingest` scopes unless strictly required. -- CLI verification in offline environments uses pre-issued `aoc:verify` tokens; document expiration and renewal processes. -- Authority replicas in air-gapped environments should restrict scope issuance to known tenants and log all `/token` interactions for later replay. - ---- - -## 6 · References - -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) -- [Architecture overview](../architecture/overview.md) -- [Concelier architecture](../ARCHITECTURE_CONCELIER.md) -- [Excititor architecture](../ARCHITECTURE_EXCITITOR.md) -- [Policy governance](policy-governance.md) -- [Authority key rotation playbook](../ops/authority-key-rotation.md) - ---- - -## 7 · Compliance checklist - -- [ ] Scope catalogue updated in Authority configuration templates. -- [ ] Role mappings documented for each tenant profile. -- [ ] Claim transforms enforce `serviceIdentity` for `effective:write`. -- [ ] Claim transforms enforce `serviceIdentity` for `graph:write`. -- [ ] Concelier/Excititor smoke tests cover missing tenant rejection. -- [ ] Offline kit credentials reviewed for least privilege. -- [ ] Audit/monitoring guidance validated with Observability Guild. -- [ ] Authority Core sign-off recorded (owner: @authority-core, due 2025-10-28). - ---- - -*Last updated: 2025-10-27 (Sprint 19).* +# Authority Scopes & Tenancy — AOC Update + +> **Audience:** Authority Core, platform security engineers, DevOps owners. +> **Scope:** Scope taxonomy, tenancy enforcement, rollout guidance for the Aggregation-Only Contract (Sprint 19). + +Authority issues short-lived tokens bound to tenants and scopes. Sprint 19 introduces new scopes to support the AOC guardrails in Concelier and Excititor. This document lists the canonical scope catalogue, describes tenancy propagation, and outlines operational safeguards. + +--- + +## 1 · Scope catalogue (post AOC) + +| Scope | Surface | Purpose | Notes | +|-------|---------|---------|-------| +| `advisory:ingest` | Concelier ingestion APIs | Append-only writes to `advisory_raw` collections. | Requires tenant claim; blocked for global clients. | +| `advisory:read` | `/aoc/verify`, Concelier dashboards, CLI | Read-only access to stored advisories and guard results. | Must be requested with `aoc:verify`; Authority rejects tokens missing the pairing. | +| `vex:ingest` | Excititor ingestion APIs | Append-only writes to `vex_raw`. | Mirrors `advisory:ingest`; tenant required. | +| `vex:read` | `/aoc/verify`, Excititor dashboards, CLI | Read-only access to stored VEX material. | Must be requested with `aoc:verify`; Authority rejects tokens missing the pairing. | +| `aoc:verify` | CLI/CI pipelines, Console verification jobs | Execute Aggregation-Only Contract guard runs. | Always issued with tenant; required whenever requesting `advisory:read`, `vex:read`, or any `signals:*` scope. | +| `signals:read` | Signals API, reachability dashboards | Read-only access to stored reachability signals. | Tenant and `aoc:verify` required; missing pairing returns `invalid_scope`. | +| `signals:write` | Signals ingestion APIs | Append-only writes for reachability signals. | Requires tenant and `aoc:verify`; Authority logs `authority.aoc_scope_violation` on mismatch. | +| `signals:admin` | Signals administration tooling | Rotate credentials, manage reachability sensors, purge stale data. | Reserved for automation; `aoc:verify` + tenant mandatory; violations are audited. | +| `graph:write` | Cartographer pipeline | Enqueue graph build/overlay jobs. | Reserved for Cartographer service identity; tenant required. | +| `graph:read` | Graph API, Scheduler overlays, UI | Read graph projections/overlays. | Tenant required; granted to Cartographer, Graph API, Scheduler. | +| `graph:export` | Graph export endpoints | Stream GraphML/JSONL artefacts. | UI/gateway automation only; tenant required. | +| `graph:simulate` | Policy simulation overlays | Trigger what-if overlays on graphs. | Restricted to automation; tenant required. | +| `effective:write` | Policy Engine | Create/update `effective_finding_*` collections. | **Only** the Policy Engine service client may hold this scope; tenant required. | +| `findings:read` | Console, CLI, exports | Read derived findings materialised by Policy Engine. | Shared across tenants with RBAC; tenant claim still enforced. | +| `policy:author` | Policy Studio (Console, CLI) | Author drafts, run lint, execute quick simulations. | Tenant required; typically granted via `role/policy-author`. | +| `policy:review` | Policy Studio review panes | Review drafts, leave comments, request changes. | Tenant required; pair with `policy:simulate` for diff previews. | +| `policy:approve` | Policy Studio approvals | Approve or reject policy drafts. | Tenant required; fresh-auth enforced by Console UI. | +| `policy:operate` | Policy Studio promotion controls | Trigger batch simulations, promotions, and canary runs. | Tenant required; combine with `policy:run`/`policy:activate`. | +| `policy:audit` | Policy audit exports | Access immutable policy history, comments, and signatures. | Tenant required; read-only access. | +| `policy:simulate` | Policy Studio / CLI simulations | Run simulations against tenant inventories. | Tenant required; available to authors, reviewers, operators. | +| `vuln:read` | Vuln Explorer API/UI | Read normalized vulnerability data. | Tenant required. | +| `export.viewer` | Export Center APIs | List export profiles/runs, fetch manifests and bundles. | Tenant required; read-only access. | +| `export.operator` | Export Center APIs | Trigger export runs, manage schedules, request verifications. | Tenant required; pair with `export.admin` for retention/encryption changes. | +| `export.admin` | Export Center administrative APIs | Configure retention policies, encryption keys, and scheduling defaults. | Tenant required; token requests must include `export_reason` + `export_ticket`; Authority audits denials. | +| `orch:read` | Orchestrator dashboards/API | Read queued jobs, worker state, and rate-limit telemetry. | Tenant required; never grants mutation rights. | +| `orch:operate` | Orchestrator control actions | Execute pause/resume, retry, sync-now, and backfill operations. Requires tenant assignment **and** `operator_reason`/`operator_ticket` parameters when requesting tokens. | +| `exceptions:read` | Exception service APIs, Console | Enumerate exception definitions, routing templates, and approval state. | Tenant and approval routing metadata required for audit replay. | +| `exceptions:write` | Policy Engine → Authority bridge | Persist exception evaluations, lifecycle events, and status changes. | Tenant required; only service principals should hold this scope. | +| `exceptions:approve` | Console fresh-auth flows, delegated admins | Approve or reject exception requests routed through Authority. | Tenant required; Authority enforces MFA when any bound routing template has `requireMfa=true`. | +| `ui.read` | Console base APIs | Retrieve tenant catalog, profile metadata, and token introspection results. | Tenant header required; responses are DPoP-bound and audit logged. | +| `authority:tenants.read` | Console admin workspace | Enumerate configured tenants, default roles, and isolation metadata. | Tenant claim must match header; access audited via `authority.console.tenants.read`. | +| Existing scopes | (e.g., `policy:*`, `concelier.jobs.trigger`) | Unchanged. | `concelier.merge` is retired — clients must request `advisory:ingest`/`advisory:read`; requests continue to fail with `invalid_client`. Review `/docs/security/policy-governance.md` for policy-specific scopes. | + +### 1.1 Scope bundles (roles) + +- **`role/concelier-ingest`** → `advisory:ingest`, `advisory:read`. +- **`role/excititor-ingest`** → `vex:ingest`, `vex:read`. +- **`role/signals-uploader`** → `signals:write`, `signals:read`, `aoc:verify`. +- **`role/aoc-operator`** → `aoc:verify`, `advisory:read`, `vex:read`. +- **`role/policy-engine`** → `effective:write`, `findings:read`. +- **`role/cartographer-service`** → `graph:write`, `graph:read`. +- **`role/graph-gateway`** → `graph:read`, `graph:export`, `graph:simulate`. +- **`role/console`** → `ui.read`, `advisory:read`, `vex:read`, `exceptions:read`, `aoc:verify`, `findings:read`, `orch:read`, `vuln:read`. +- **`role/ui-console-admin`** → `ui.read`, `authority:tenants.read`, `authority:roles.read`, `authority:tokens.read`, `authority:clients.read` (paired with write scopes where required). +- **`role/orch-viewer`** *(Authority role: `Orch.Viewer`)* → `orch:read`. +- **`role/orch-operator`** *(Authority role: `Orch.Operator`)* → `orch:read`, `orch:operate`. +- **`role/policy-author`** → `policy:author`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/policy-reviewer`** → `policy:review`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/policy-approver`** → `policy:approve`, `policy:review`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/policy-operator`** → `policy:operate`, `policy:run`, `policy:activate`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/policy-auditor`** → `policy:audit`, `policy:read`, `policy:simulate`, `findings:read`. +- **`role/export-viewer`** *(Authority role: `Export.Viewer`)* → `export.viewer`. +- **`role/export-operator`** *(Authority role: `Export.Operator`)* → `export.viewer`, `export.operator`. +- **`role/export-admin`** *(Authority role: `Export.Admin`)* → `export.viewer`, `export.operator`, `export.admin`. +- **`role/exceptions-service`** → `exceptions:read`, `exceptions:write`. +- **`role/exceptions-approver`** → `exceptions:read`, `exceptions:approve`. + +Roles are declared per tenant in `authority.yaml`: + +```yaml +tenants: + - name: default + roles: + concelier-ingest: + scopes: [advisory:ingest, advisory:read] + signals-uploader: + scopes: [signals:write, signals:read, aoc:verify] + aoc-operator: + scopes: [aoc:verify, advisory:read, vex:read] + orch-viewer: + scopes: [orch:read] + orch-operator: + scopes: [orch:read, orch:operate] + policy-author: + scopes: [policy:author, policy:read, policy:simulate, findings:read] + policy-reviewer: + scopes: [policy:review, policy:read, policy:simulate, findings:read] + policy-approver: + scopes: [policy:approve, policy:review, policy:read, policy:simulate, findings:read] + policy-operator: + scopes: [policy:operate, policy:run, policy:activate, policy:read, policy:simulate, findings:read] + policy-auditor: + scopes: [policy:audit, policy:read, policy:simulate, findings:read] + policy-engine: + scopes: [effective:write, findings:read] + exceptions-service: + scopes: [exceptions:read, exceptions:write] + exceptions-approver: + scopes: [exceptions:read, exceptions:approve] +``` + +> **MFA requirement:** When any `exceptions.routingTemplates` entry sets `requireMfa: true`, Authority refuses to mint tokens containing `exceptions:approve` unless the authenticating identity provider advertises MFA support. Password/OIDC flows produce `authority.password.grant` audit events with `reason="Exception approval scope requires an MFA-capable identity provider."` when the requirement is violated. + +--- + +## 2 · Tenancy enforcement + +### 2.1 Token claims + +Tokens now include: + +- `tenant` claim (string) — required for all ingestion and verification scopes. +- `service_identity` (optional) — e.g., `policy-engine`, `cartographer`. Required when requesting `effective:write` or `graph:write`. +- `delegation_allowed` (boolean) — defaults `false`. Prevents console tokens from delegating ingest scopes. + +Authority rejects requests when: + +- `tenant` is missing while requesting `advisory:ingest`, `advisory:read`, `vex:ingest`, `vex:read`, or `aoc:verify` scopes. +- `aoc:verify` is absent while tokens request `advisory:read`, `vex:read`, or any `signals:*` scope (`invalid_scope` with deterministic message). +- `service_identity != policy-engine` but `effective:write` is present (`ERR_AOC_006` enforcement). +- `service_identity != cartographer` but `graph:write` is present (graph pipeline enforcement). +- Tokens attempt to combine `advisory:ingest` with `effective:write` (separation of duties). +- `exceptions:approve` is requested by a client without a tenant assignment or via an identity provider lacking MFA when `RequireMfaForApprovals=true`. + +### 2.2 Propagation + +- API Gateway forwards `tenant` claim as header (`X-Stella-Tenant`). Services refuse requests lacking the header. +- Concelier/Excititor stamp tenant into raw documents and structured logs. +- Policy Engine copies `tenant` from tokens into `effective_finding_*` collections. +- Exception lifecycle services persist tenant and the selected routing template identifier alongside approval decisions. Authority audit events (`authority.password.grant`, `authority.client_credentials.grant`) surface `audit.scopes` and, on denials, a `scope.invalid` metadata entry so operators can trace exception approval attempts without inspecting downstream services. + +### 2.3 Cross-tenant scenarios + +- Platform operators with `tenant:admin` can assume other tenants via `/authority/tenant/switch` if explicitly permitted. +- CLI commands accept `--tenant ` to override environment default; Authority logs tenant switch events (`authority.tenant.switch`). +- Console tenant picker uses delegated token exchange (`/token/exchange`) to obtain scoped tenant tokens without exposing raw credentials. + +--- + +## 3 · Configuration changes + +### 3.1 Authority configuration (`authority.yaml`) + +Add new scopes and optional claims transformations: + +```yaml +security: + scopes: + - name: advisory:ingest + description: Concelier raw ingestion (append-only) + - name: advisory:read + description: Read Concelier advisories and guard verdicts + - name: vex:ingest + description: Excititor raw ingestion + - name: vex:read + description: Read Excititor VEX records + - name: aoc:verify + description: Run AOC verification + - name: effective:write + description: Policy Engine materialisation + - name: findings:read + description: Read derived findings + - name: graph:write + description: Cartographer build submissions + - name: graph:read + description: Read graph overlays + - name: graph:export + description: Export graph artefacts + - name: graph:simulate + description: Run graph what-if simulations + - name: vuln:read + description: Read Vuln Explorer data + claimTransforms: + - match: { scope: "effective:write" } + require: + serviceIdentity: policy-engine + - match: { scope: "graph:write" } + require: + serviceIdentity: cartographer +``` + +### 3.2 Client registration + +Update service clients: + +- `Concelier.WebService` → request `advisory:ingest`, `advisory:read`. +- `Excititor.WebService` → request `vex:ingest`, `vex:read`. +- `Policy.Engine` → request `effective:write`, `findings:read`; set `properties.serviceIdentity=policy-engine`. +- `Cartographer.Service` → request `graph:write`, `graph:read`; set `properties.serviceIdentity=cartographer`. +- `Graph API Gateway` → request `graph:read`, `graph:export`, `graph:simulate`; tenant hint required. +- `Console` → request `advisory:read`, `vex:read`, `aoc:verify`, `findings:read`, `vuln:read` plus existing UI scopes. +- `CLI automation` → request `aoc:verify`, `advisory:read`, `vex:read` as needed. + +Client definition snippet: + +```yaml +clients: + - clientId: concelier-web + grantTypes: [client_credentials] + scopes: [advisory:ingest, advisory:read] + tenants: [default] + - clientId: policy-engine + grantTypes: [client_credentials] + scopes: [effective:write, findings:read] + properties: + serviceIdentity: policy-engine + - clientId: cartographer-service + grantTypes: [client_credentials] + scopes: [graph:write, graph:read] + properties: + serviceIdentity: cartographer +``` + +--- + +## 4 · Operational safeguards + +- **Audit events:** Authority emits `authority.scope.granted` and `authority.scope.revoked` events with `scope` and `tenant`. Monitor for unexpected grants. +- **Rate limiting:** Apply stricter limits on `/token` endpoints for clients requesting `advisory:ingest` or `vex:ingest` to mitigate brute-force ingestion attempts. +- **Incident response:** Link AOC alerts to Authority audit logs to confirm whether violations come from expected identities. +- **Rotation:** Rotate ingest client secrets alongside guard deployments; add rotation steps to `ops/authority-key-rotation.md`. +- **Testing:** Integration tests must fail if tokens lacking `tenant` attempt ingestion; add coverage in Concelier/Excititor smoke suites (see `CONCELIER-CORE-AOC-19-013`). + +--- + +## 5 · Offline & air-gap notes + +- Offline Kit bundles include tenant-scoped service credentials. Ensure ingest bundles ship without `advisory:ingest` scopes unless strictly required. +- CLI verification in offline environments uses pre-issued `aoc:verify` tokens; document expiration and renewal processes. +- Authority replicas in air-gapped environments should restrict scope issuance to known tenants and log all `/token` interactions for later replay. + +--- + +## 6 · References + +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) +- [Architecture overview](../architecture/overview.md) +- [Concelier architecture](../ARCHITECTURE_CONCELIER.md) +- [Excititor architecture](../ARCHITECTURE_EXCITITOR.md) +- [Policy governance](policy-governance.md) +- [Authority key rotation playbook](../ops/authority-key-rotation.md) + +--- + +## 7 · Compliance checklist + +- [ ] Scope catalogue updated in Authority configuration templates. +- [ ] Role mappings documented for each tenant profile. +- [ ] Claim transforms enforce `serviceIdentity` for `effective:write`. +- [ ] Claim transforms enforce `serviceIdentity` for `graph:write`. +- [ ] Concelier/Excititor smoke tests cover missing tenant rejection. +- [ ] Offline kit credentials reviewed for least privilege. +- [ ] Audit/monitoring guidance validated with Observability Guild. +- [ ] Authority Core sign-off recorded (owner: @authority-core, due 2025-10-28). + +--- + +*Last updated: 2025-10-27 (Sprint 19).* diff --git a/docs/security/authority-threat-model.md b/docs/security/authority-threat-model.md index 350e0f23..398bdd28 100644 --- a/docs/security/authority-threat-model.md +++ b/docs/security/authority-threat-model.md @@ -1,106 +1,106 @@ -# Authority Threat Model (STRIDE) - -> Prepared by Security Guild — 2025-10-12. Scope covers Authority host, Standard plug-in, CLI, bootstrap workflow, and offline revocation distribution. - -## 1. Scope & Method - -- Methodology: STRIDE applied to primary Authority surfaces (token issuance, bootstrap, revocation, operator tooling, plug-in extensibility). -- Assets in scope: identity credentials, OAuth tokens (access/refresh), bootstrap invites, revocation manifests, signing keys, audit telemetry. -- Out of scope: Third-party IdPs federated via OpenIddict (tracked separately in SEC6 backlog). - -## 2. Assets & Entry Points - -| Asset / Surface | Description | Primary Actors | -|-----------------|-------------|----------------| -| Token issuance APIs (`/token`, `/authorize`) | OAuth/OIDC endpoints mediated by OpenIddict | CLI, UI, automation agents | -| Bootstrap channel | Initial admin invite + bootstrap CLI workflow | Platform operators | -| Revocation bundle | Offline JSON + detached JWS consumed by agents | Concelier, Agents, Zastava | -| Plug-in manifests | Standard plug-in configuration and password policy overrides | Operators, DevOps | -| Signing keys | ES256 signing keys backing tokens and revocation manifests | Security Guild, HSM/KeyOps | -| Audit telemetry | Structured login/audit stream persisted to Mongo/observability stack | SOC, SecOps | - -## 3. Trust Boundaries - -| Boundary | Rationale | Controls | -|----------|-----------|----------| -| TB1 — Public network ↔️ Authority ingress | Internet/extranet exposure for `/token`, `/authorize`, `/bootstrap` | TLS 1.3, reverse proxy ACLs, rate limiting (SEC3.A / CORE8.RL) | -| TB2 — Authority host ↔️ Mongo storage | Credential store, revocation state, audit log persistence | Authenticated Mongo, network segmentation, deterministic serializers | -| TB3 — Authority host ↔️ Plug-in sandbox | Plug-ins may override password policy and bootstrap flows | Code signing, manifest validation, restart-time loading only | -| TB4 — Operator workstation ↔️ CLI | CLI holds bootstrap secrets and revocation bundles | OS keychain storage, MFA on workstations, offline kit checksum | -| TB5 — Authority ↔️ Downstream agents | Revocation bundle consumption, token validation | Mutual TLS (planned), detached JWS signatures, bundle freshness checks | - -## 4. Data Flow Diagrams - -### 4.1 Runtime token issuance - -```mermaid -flowchart LR - subgraph Client Tier - CLI[StellaOps CLI] - UI[UI / Automation] - end - subgraph Perimeter - RP[Reverse Proxy / WAF] - end - subgraph Authority - AUTH[Authority Host] - PLGIN[Standard Plug-in] - STORE[(Mongo Credential Store)] - end - CLI -->|OAuth password / client creds| RP --> AUTH - UI -->|OAuth flows| RP - AUTH -->|PasswordHashOptions + Secrets| PLGIN - AUTH -->|Verify / Persist hashes| STORE - STORE -->|Rehash needed| AUTH - AUTH -->|Access / refresh token| RP --> Client Tier -``` - -### 4.2 Bootstrap & revocation - -```mermaid -flowchart LR - subgraph Operator - OPS[Operator Workstation] - end - subgraph Authority - AUTH[Authority Host] - STORE[(Mongo)] - end - subgraph Distribution - OFFKIT[Offline Kit Bundle] - AGENT[Authorized Agent / Concelier] - end - OPS -->|Bootstrap CLI (`stellaops auth bootstrap`)| AUTH - AUTH -->|One-time invite + Argon2 hash| STORE - AUTH -->|Revocation export (`stellaops auth revoke export`)| OFFKIT - OFFKIT -->|Signed JSON + .jws| AGENT - AGENT -->|Revocation ACK / telemetry| AUTH -``` - -## 5. STRIDE Analysis - -| Threat | STRIDE Vector | Surface | Risk (L×I) | Existing Controls | Gaps / Actions | Owner | -|--------|---------------|---------|------------|-------------------|----------------|-------| -| Spoofed revocation bundle | Spoofing | TB5 — Authority ↔️ Agents | Med×High | Detached JWS signature (planned), offline kit checksums | Finalise signing key registry & verification script (SEC4.B/SEC4.HOST); add bundle freshness requirement | Security Guild (follow-up: **SEC5.B**) | -| Parameter tampering on `/token` | Tampering | TB1 — Public ingress | Med×High | ASP.NET model validation, OpenIddict, rate limiter (CORE8.RL) | Tampered requests emit `authority.token.tamper` audit events (`request.tampered`, unexpected parameter names) correlating with `/token` outcomes (SEC5.C) | Security Guild + Authority Core (follow-up: **SEC5.C**) | -| Bootstrap invite replay | Repudiation | TB4 — Operator CLI ↔️ Authority | Low×High | One-time bootstrap tokens, Argon2id hashing on creation | Invites expire automatically and emit audit events on consumption/expiration (SEC5.D) | Security Guild | -| Token replay by stolen agent | Information Disclosure | TB5 | Med×High | Signed revocation bundles, device fingerprint heuristics, optional mTLS | Monitor revocation acknowledgement latency via Zastava and tune replay alerting thresholds | Security Guild + Zastava (follow-up: **SEC5.E**) | -| Privilege escalation via plug-in override | Elevation of Privilege | TB3 — Plug-in sandbox | Med×High | Signed plug-ins, restart-only loading, configuration validation | Add static analysis on manifest overrides + runtime warning when policy weaker than host | Security Guild + DevOps (follow-up: **SEC5.F**) | -| Offline bundle tampering | Tampering | Distribution | Low×High | SHA256 manifest, signed bundles (planned) | Add supply-chain attestation for Offline Kit, publish verification CLI in docs | Security Guild + Ops (follow-up: **SEC5.G**) | -| Failure to log denied tokens | Repudiation | TB2 — Authority ↔️ Mongo | Med×Med | Serilog structured events (partial), Mongo persistence path (planned) | Finalise audit schema (SEC2.A) and ensure `/token` denies include subject/client/IP fields | Security Guild + Authority Core (follow-up: **SEC5.H**) | - -Risk scoring uses qualitative scale (Low/Med/High) for likelihood × impact; mitigation priority follows High > Med > Low. - -## 6. Follow-up Backlog Hooks - -| Backlog ID | Linked Threat | Summary | Target Owners | -|------------|---------------|---------|---------------| -| SEC5.B | Spoofed revocation bundle | Complete libsodium/Core signing integration and ship revocation verification script. | Security Guild + Authority Core | -| SEC5.C | Parameter tampering on `/token` | Finalise audit contract (`SEC2.A`) and add request tamper logging. | Security Guild + Authority Core | -| SEC5.D | Bootstrap invite replay | Implement expiry enforcement + audit coverage for unused bootstrap invites. | Security Guild | -| SEC5.E | Token replay by stolen agent | Coordinate Zastava alerting with the new device fingerprint heuristics and surface stale revocation acknowledgements. | Security Guild + Zastava | -| SEC5.F | Plug-in override escalation | Static analysis of plug-in manifests; warn on weaker password policy overrides. | Security Guild + DevOps | -| SEC5.G | Offline bundle tampering | Extend Offline Kit build to include attested manifest + verification CLI sample. | Security Guild + Ops | -| SEC5.H | Failure to log denied tokens | Ensure audit persistence for all `/token` denials with correlation IDs. | Security Guild + Authority Core | - -Update `src/StellaOps.Cryptography/TASKS.md` (Security Guild board) with the above backlog entries to satisfy SEC5.A exit criteria. +# Authority Threat Model (STRIDE) + +> Prepared by Security Guild — 2025-10-12. Scope covers Authority host, Standard plug-in, CLI, bootstrap workflow, and offline revocation distribution. + +## 1. Scope & Method + +- Methodology: STRIDE applied to primary Authority surfaces (token issuance, bootstrap, revocation, operator tooling, plug-in extensibility). +- Assets in scope: identity credentials, OAuth tokens (access/refresh), bootstrap invites, revocation manifests, signing keys, audit telemetry. +- Out of scope: Third-party IdPs federated via OpenIddict (tracked separately in SEC6 backlog). + +## 2. Assets & Entry Points + +| Asset / Surface | Description | Primary Actors | +|-----------------|-------------|----------------| +| Token issuance APIs (`/token`, `/authorize`) | OAuth/OIDC endpoints mediated by OpenIddict | CLI, UI, automation agents | +| Bootstrap channel | Initial admin invite + bootstrap CLI workflow | Platform operators | +| Revocation bundle | Offline JSON + detached JWS consumed by agents | Concelier, Agents, Zastava | +| Plug-in manifests | Standard plug-in configuration and password policy overrides | Operators, DevOps | +| Signing keys | ES256 signing keys backing tokens and revocation manifests | Security Guild, HSM/KeyOps | +| Audit telemetry | Structured login/audit stream persisted to Mongo/observability stack | SOC, SecOps | + +## 3. Trust Boundaries + +| Boundary | Rationale | Controls | +|----------|-----------|----------| +| TB1 — Public network ↔️ Authority ingress | Internet/extranet exposure for `/token`, `/authorize`, `/bootstrap` | TLS 1.3, reverse proxy ACLs, rate limiting (SEC3.A / CORE8.RL) | +| TB2 — Authority host ↔️ Mongo storage | Credential store, revocation state, audit log persistence | Authenticated Mongo, network segmentation, deterministic serializers | +| TB3 — Authority host ↔️ Plug-in sandbox | Plug-ins may override password policy and bootstrap flows | Code signing, manifest validation, restart-time loading only | +| TB4 — Operator workstation ↔️ CLI | CLI holds bootstrap secrets and revocation bundles | OS keychain storage, MFA on workstations, offline kit checksum | +| TB5 — Authority ↔️ Downstream agents | Revocation bundle consumption, token validation | Mutual TLS (planned), detached JWS signatures, bundle freshness checks | + +## 4. Data Flow Diagrams + +### 4.1 Runtime token issuance + +```mermaid +flowchart LR + subgraph Client Tier + CLI[StellaOps CLI] + UI[UI / Automation] + end + subgraph Perimeter + RP[Reverse Proxy / WAF] + end + subgraph Authority + AUTH[Authority Host] + PLGIN[Standard Plug-in] + STORE[(Mongo Credential Store)] + end + CLI -->|OAuth password / client creds| RP --> AUTH + UI -->|OAuth flows| RP + AUTH -->|PasswordHashOptions + Secrets| PLGIN + AUTH -->|Verify / Persist hashes| STORE + STORE -->|Rehash needed| AUTH + AUTH -->|Access / refresh token| RP --> Client Tier +``` + +### 4.2 Bootstrap & revocation + +```mermaid +flowchart LR + subgraph Operator + OPS[Operator Workstation] + end + subgraph Authority + AUTH[Authority Host] + STORE[(Mongo)] + end + subgraph Distribution + OFFKIT[Offline Kit Bundle] + AGENT[Authorized Agent / Concelier] + end + OPS -->|Bootstrap CLI (`stellaops auth bootstrap`)| AUTH + AUTH -->|One-time invite + Argon2 hash| STORE + AUTH -->|Revocation export (`stellaops auth revoke export`)| OFFKIT + OFFKIT -->|Signed JSON + .jws| AGENT + AGENT -->|Revocation ACK / telemetry| AUTH +``` + +## 5. STRIDE Analysis + +| Threat | STRIDE Vector | Surface | Risk (L×I) | Existing Controls | Gaps / Actions | Owner | +|--------|---------------|---------|------------|-------------------|----------------|-------| +| Spoofed revocation bundle | Spoofing | TB5 — Authority ↔️ Agents | Med×High | Detached JWS signature (planned), offline kit checksums | Finalise signing key registry & verification script (SEC4.B/SEC4.HOST); add bundle freshness requirement | Security Guild (follow-up: **SEC5.B**) | +| Parameter tampering on `/token` | Tampering | TB1 — Public ingress | Med×High | ASP.NET model validation, OpenIddict, rate limiter (CORE8.RL) | Tampered requests emit `authority.token.tamper` audit events (`request.tampered`, unexpected parameter names) correlating with `/token` outcomes (SEC5.C) | Security Guild + Authority Core (follow-up: **SEC5.C**) | +| Bootstrap invite replay | Repudiation | TB4 — Operator CLI ↔️ Authority | Low×High | One-time bootstrap tokens, Argon2id hashing on creation | Invites expire automatically and emit audit events on consumption/expiration (SEC5.D) | Security Guild | +| Token replay by stolen agent | Information Disclosure | TB5 | Med×High | Signed revocation bundles, device fingerprint heuristics, optional mTLS | Monitor revocation acknowledgement latency via Zastava and tune replay alerting thresholds | Security Guild + Zastava (follow-up: **SEC5.E**) | +| Privilege escalation via plug-in override | Elevation of Privilege | TB3 — Plug-in sandbox | Med×High | Signed plug-ins, restart-only loading, configuration validation | Add static analysis on manifest overrides + runtime warning when policy weaker than host | Security Guild + DevOps (follow-up: **SEC5.F**) | +| Offline bundle tampering | Tampering | Distribution | Low×High | SHA256 manifest, signed bundles (planned) | Add supply-chain attestation for Offline Kit, publish verification CLI in docs | Security Guild + Ops (follow-up: **SEC5.G**) | +| Failure to log denied tokens | Repudiation | TB2 — Authority ↔️ Mongo | Med×Med | Serilog structured events (partial), Mongo persistence path (planned) | Finalise audit schema (SEC2.A) and ensure `/token` denies include subject/client/IP fields | Security Guild + Authority Core (follow-up: **SEC5.H**) | + +Risk scoring uses qualitative scale (Low/Med/High) for likelihood × impact; mitigation priority follows High > Med > Low. + +## 6. Follow-up Backlog Hooks + +| Backlog ID | Linked Threat | Summary | Target Owners | +|------------|---------------|---------|---------------| +| SEC5.B | Spoofed revocation bundle | Complete libsodium/Core signing integration and ship revocation verification script. | Security Guild + Authority Core | +| SEC5.C | Parameter tampering on `/token` | Finalise audit contract (`SEC2.A`) and add request tamper logging. | Security Guild + Authority Core | +| SEC5.D | Bootstrap invite replay | Implement expiry enforcement + audit coverage for unused bootstrap invites. | Security Guild | +| SEC5.E | Token replay by stolen agent | Coordinate Zastava alerting with the new device fingerprint heuristics and surface stale revocation acknowledgements. | Security Guild + Zastava | +| SEC5.F | Plug-in override escalation | Static analysis of plug-in manifests; warn on weaker password policy overrides. | Security Guild + DevOps | +| SEC5.G | Offline bundle tampering | Extend Offline Kit build to include attested manifest + verification CLI sample. | Security Guild + Ops | +| SEC5.H | Failure to log denied tokens | Ensure audit persistence for all `/token` denials with correlation IDs. | Security Guild + Authority Core | + +Update `src/__Libraries/StellaOps.Cryptography/TASKS.md` (Security Guild board) with the above backlog entries to satisfy SEC5.A exit criteria. diff --git a/docs/security/console-security.md b/docs/security/console-security.md index 2a665b8e..21f3b39d 100644 --- a/docs/security/console-security.md +++ b/docs/security/console-security.md @@ -1,183 +1,183 @@ -# StellaOps Console Security Posture - -> **Audience:** Security Guild, Console & Authority teams, deployment engineers. -> **Scope:** OIDC/DPoP flows, scope model, session controls, CSP and transport headers, evidence handling, offline posture, and monitoring expectations for the StellaOps Console (Sprint 23). - -The console is an Angular SPA fronted by the StellaOps Web gateway. It consumes Authority for identity, Concelier/Excititor for aggregation data, Policy Engine for findings, and Attestor for evidence bundles. This guide captures the security guarantees and required hardening so that the console can ship alongside the Aggregation-Only Contract (AOC) without introducing new attack surface. - ---- - -## 1 · Identity & Authentication - -### 1.1 Authorization sequence - -1. Browser→Authority uses **OAuth 2.1 Authorization Code + PKCE** (`S256`). -2. Upon code exchange the console requests a **DPoP-bound access token** (`aud=console`, `tenant=`) with **120 s TTL** and optional **rotating refresh token** (`rotate=true`). -3. Authority includes `cnf.jkt` for the ephemeral WebCrypto keypair; console stores the private key in **IndexedDB** (non-exportable) and keeps the public JWK in memory. -4. All API calls attach `Authorization: Bearer ` + `DPoP` proof header. Nonces from the gateway are replay-protected (`dpopt-nonce` header). -5. Tenanted API calls flow through the Web gateway which forwards `X-Stella-Tenant` and enforces tenancy headers. Missing or mismatched tenants trigger `403` with `ERR_TENANT_MISMATCH`. - -### 1.2 Fresh-auth gating - -- Sensitive actions (tenant edits, token revocation, policy promote, signing key rotation) call `Authority /fresh-auth` using `prompt=login` + `max_age=300`. -- Successful fresh-auth yields a **300 s** scoped token (`fresh_auth=true`) stored only in memory; the UI disables guarded buttons when the timer expires. -- Audit events: `authority.fresh_auth.start`, `authority.fresh_auth.success`, `authority.fresh_auth.expired` (link to correlation IDs for the gated action). - -### 1.3 Offline & sealed mode - -- When `console.offlineMode=true` the console presents an offline banner and suppresses fresh-auth prompts, replacing them with CLI guidance (`stella auth fresh-auth --offline`). -- Offline mode requires pre-issued tenant-scoped tokens bundled with the Offline Kit; tokens must include `offline=true` claim and 15 m TTL. -- Authority availability health is polled via `/api/console/status`. HTTP failures raise the offline banner and switch to read-only behaviour. - ---- - -## 2 · Session & Device Binding - -- Access and refresh tokens live in memory; metadata (subject, tenant, expiry) persists in `sessionStorage` for reload continuity. **Never** store raw JWTs in `localStorage`. -- Inactivity timeout defaults to **15 minutes**. Idle sessions trigger silent refresh; on failure the UI shows a modal requiring re-auth. -- Tokens are device-bound through DPoP; if a new device logs in, Authority revokes the previous DPoP key and emits `authority.token.binding_changed`. -- CSRF mitigations: bearer tokens plus DPoP remove cookie reliance. If cookies are required (e.g., same-origin analytics) they must be `HttpOnly`, `SameSite=Lax`, `Secure`. -- Browser hardening: enforce `Strict-Transport-Security`, `X-Content-Type-Options: nosniff`, `Referrer-Policy: no-referrer`, `Permissions-Policy: camera=(), microphone=(), geolocation=()`. - ---- - -## 3 · Authorization & Scope Model - -The console client is registered in Authority as `console-ui` with scopes: - -| Feature area | Required scopes | Notes | -|--------------|----------------|-------| -| Base navigation (Dashboard, Findings, SBOM, Runs) | `ui.read`, `findings:read`, `advisory:read`, `vex:read`, `aoc:verify` | `findings:read` enables Policy Engine overlays; `advisory:read`/`vex:read` load ingestion panes; `aoc:verify` allows on-demand guard runs. | -| Admin workspace | `ui.admin`, `authority:tenants.read`, `authority:tenants.write`, `authority:roles.read`, `authority:roles.write`, `authority:tokens.read`, `authority:tokens.revoke`, `authority:clients.read`, `authority:clients.write`, `authority:audit.read` | Scope combinations are tenant constrained. Role changes require fresh-auth. | -| Policy approvals | `policy:read`, `policy:review`, `policy:approve`, `policy:operate`, `policy:simulate` | `policy:operate` (promote/activate/run) requires fresh-auth. | -| Observability panes (status ticker, telemetry) | `ui.telemetry`, `scheduler:runs.read`, `advisory:read`, `vex:read` | `ui.telemetry` drives OTLP export toggles. | -| Orchestrator dashboard (queues, workers, rate limits) | `orch:read` | Provision via `Orch.Viewer` role; read-only access to job state and telemetry. | -| Orchestrator control actions (pause/resume, retry, sync-now, backfill) | `orch:operate` (plus `orch:read`) | CLI/Console must request tokens with `operator_reason` and `operator_ticket`; Authority denies issuance when either value is missing. | -| Downloads parity (SBOM, attestation) | `downloads:read`, `attestation:verify`, `sbom:export` | Console surfaces digests only; download links require CLI parity for write operations. | - -Guidance: - -- **Role mapping**: Provision Authority role `role/ui-console-admin` encapsulating the admin scopes above. -- **Orchestrator viewers**: Assign Authority role `role/orch-viewer` (Authority role string `Orch.Viewer`) to consoles that require read-only access to Orchestrator telemetry. -- **Orchestrator operators**: Assign Authority role `role/orch-operator` (Authority role string `Orch.Operator`) to identities allowed to pause/resume or backfill. Tokens must include `operator_reason` (≤256 chars) and `operator_ticket` (≤128 chars); Authority records the values in audit logs. -- **Tenant enforcement**: Gateway injects `X-Stella-Tenant` from token claims. Requests missing the header must be rejected by downstream services (Concelier, Excititor, Policy Engine) and logged. -- **Separation of duties**: Never grant `ui.admin` and `policy:approve`/`policy:operate` to the same human role without SOC sign-off; automation accounts should use least-privilege dedicated clients. - ---- - -### 3.1 Console Authority endpoints - -Console uses dedicated Authority endpoints scoped under `/console/*`. All requests must include the tenant header injected by the gateway (`X-Stella-Tenant`); calls without the header fail with `tenant_header_missing` and emit a structured audit event. Keep reverse proxies configured to pass the header end-to-end. - -| Endpoint | Required scopes | Purpose | Notes | -|----------|-----------------|---------|-------| -| `GET /console/tenants` | `authority:tenants.read` | Returns the tenant catalogue for the authenticated principal. | Validates `X-Stella-Tenant`; rejects tenants not configured in Authority. | -| `GET /console/profile` | `ui.read` | Surfaces subject metadata (roles, scopes, session id, fresh-auth state). | Response includes `freshAuth` (bool) based on a 300 s window since `auth_time`. | -| `POST /console/token/introspect` | `ui.read` | Introspects the access token currently in use and reports expiry + tenant. | Console polls this endpoint to drive session inactivity prompts; intended for SPA usage via fetch POST. | - -**Fresh-auth & session inactivity:** Authority stamps `auth_time` on issued tokens and considers privileged actions “fresh” for five minutes. When `/console/profile` returns `freshAuth: false`, the UI must require an interactive re-authentication before allowing admin operations (`ui.admin`, `authority:*` mutations, `policy:activate`, `exceptions:approve`). Access tokens remain short-lived (`00:02:00` by default); pair this with Console session timeouts so idle dashboards prompt the user before two minutes of inactivity. - -**DPoP + tenant binding:** All `/console/*` endpoints require DPoP-bound access tokens. Audit events include `tenant.resolved`, `scope`, `correlationId`, and (when applicable) `token.expires_at`. Staple the same headers into downstream services so cross-component troubleshooting uses the same correlation identifiers. - ---- - -## 4 · Transport, CSP & Browser Hardening - -### 4.1 Gateway requirements - -- TLS 1.2+ with modern cipher suites; enable HTTP/2 for SSE streams. -- Terminate TLS at the reverse proxy (Traefik, NGINX) and forward `X-Forwarded-*` headers (`ASPNETCORE_FORWARDEDHEADERS_ENABLED=true`). -- Rate-limit `/authorize` and `/token` according to [Authority rate-limit guidance](rate-limits.md). - -### 4.2 Content Security Policy - -Default CSP served by the console container: - -``` -default-src 'self'; -connect-src 'self' https://*.stella-ops.local; -img-src 'self' data:; -script-src 'self'; -style-src 'self' 'unsafe-inline'; -font-src 'self'; -frame-ancestors 'none'; -``` - -Recommendations: - -- Extend `connect-src` only for known internal APIs (e.g., telemetry collector). Use `console.config.cspOverrides` instead of editing NGINX directly. -- Enable **COOP/COEP** (`Cross-Origin-Opener-Policy: same-origin`, `Cross-Origin-Embedder-Policy: require-corp`) to support WASM policy previews. -- Use **Subresource Integrity (SRI)** hashes when adding third-party fonts or scripts. -- For embedded screenshots/GIFs sourced from Offline Kit, use `img-src 'self' data: blob:` and verify assets during build. -- Enforce `X-Frame-Options: DENY`, `X-XSS-Protection: 0`, and `Cache-Control: no-store` on JSON API responses (HTML assets remain cacheable). - -### 4.3 SSE & WebSocket hygiene - -- SSE endpoints (`/console/status/stream`, `/console/runs/{id}/events`) must set `Cache-Control: no-store` and disable proxy buffering. -- Gate SSE behind the same DPoP tokens; reject without `Authorization`. -- Proxy timeouts ≥ 60 s to avoid disconnect storms; clients use exponential backoff with jitter. - ---- - -## 5 · Evidence & Data Handling - -- **Evidence bundles**: Download links trigger `attestor.verify` or `downloads.manifest` APIs. The UI never caches bundle contents; it only surfaces SHA-256 digests and cosign signatures. Operators must use CLI to fetch the signed artefact. -- **Secrets**: UI redacts tokens, emails, and attachment paths in logs. Structured logs include only `subject`, `tenant`, `action`, `correlationId`. -- **Aggregation data**: Console honours Aggregation-Only contract—no client-side rewriting of Concelier/Excititor precedence. Provenance badges display source IDs and merge-event hashes. -- **PII minimisation**: User lists show minimal identity (display name, email hash). Full email addresses require `ui.admin` + fresh-auth. -- **Downloads parity**: Every downloadable artefact includes a CLI parity link (e.g., `stella downloads fetch --artifact `). If CLI parity fails, the console displays a warning banner and links to troubleshooting docs. - ---- - -## 6 · Logging, Monitoring & Alerts - -- Structured logs: `ui.action`, `tenantId`, `subject`, `scope`, `correlationId`, `dpop.jkt`. Log level `Information` for key actions; `Warning` for security anomalies (failed DPoP, tenant mismatch). -- Metrics (Prometheus): `ui_request_duration_seconds`, `ui_dpop_failure_total`, `ui_fresh_auth_prompt_total`, `ui_tenant_switch_total`, `ui_offline_banner_seconds`. -- Alerts: - 1. **Fresh-auth failures** > 5 per minute per tenant → security review. - 2. **DPoP mismatches** sustained > 1 % of requests → potential replay attempt. - 3. **Tenant mismatches** > 0 triggers an audit incident (could indicate scope misconfiguration). -- Correlate with Authority audit events (`authority.scope.granted`, `authority.token.revoked`) and Concelier/Excititor ingestion logs to trace user impact. - ---- - -## 7 · Offline & Air-Gapped Posture - -- Offline deployments require mirrored container images and Offline Kit manifest verification (see `/docs/deploy/console.md` §7). -- Console reads `offlineManifest.json` at boot to validate asset digests; mismatches block startup until the manifest is refreshed. -- Tenant and role edits queue change manifests for export; UI instructs operators to run `stella auth apply --bundle ` on the offline Authority host. -– Evidence viewing remains read-only; download buttons provide scripts to export from local Attestor snapshots. -- Fresh-auth prompts display instructions for hardware-token usage on bastion hosts; system logs mark actions executed under offline fallback. - ---- - -## 8 · Threat Model Alignment - -| Threat (Authority TM §5) | Console control | -|--------------------------|-----------------| -| Spoofed revocation bundle | Console verifies manifest signatures before showing revocation status; links to `stella auth revoke verify`. | -| Parameter tampering on `/token` | PKCE + DPoP enforced; console propagates correlation IDs so Authority logs can link anomalies. | -| Bootstrap invite replay | Admin UI surfaces invite status with expiry; fresh-auth required before issuing new invites. | -| Token replay by stolen agent | DPoP binding prevents reuse; console surfaces revocation latency warnings sourced from Zastava metrics. | -| Offline bundle tampering | Console refuses unsigned Offline Kit assets; prompts operators to re-import verified bundles. | -| Privilege escalation via plug-in overrides | Plug-in manifest viewer warns when a plug-in downgrades password policy; UI restricts plug-in activation to fresh-auth + `ui.admin` scoped users. | - -Document gaps and remediation hooks in `SEC5.*` backlog as they are addressed. - ---- - -## 9 · Compliance checklist - -- [x] Authority client `console-ui` registered with PKCE, DPoP, tenant claim requirement, and scopes from §3. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#authority-client-validation)) -- [x] CSP enforced per §4 with overrides documented in deployment manifests. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#csp-enforcement)) -- [x] Fresh-auth timer (300 s) validated for admin and policy actions; audit events captured. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#fresh-auth-timer)) -- [x] DPoP binding tested (replay attempt blocked; logs show `ui_dpop_failure_total` increment). (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#dpop-binding-test)) -- [x] Offline mode exercises performed (banner, CLI guidance, manifest verification). (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#offline-mode-exercise)) -- [x] Evidence download parity verified with CLI scripts; console never caches sensitive artefacts. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#evidence-parity)) -- [x] Monitoring dashboards show metrics and alerts outlined in §6; alert runbooks reviewed with Security Guild. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#monitoring--alerts)) -- [x] Security review sign-off recorded in sprint log with links to Authority threat model references. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#sign-off)) -- [x] `/console` Authority endpoints validated for tenant header enforcement, fresh-auth prompts, and introspection flows (Audit IDs `authority.console.tenants.read`, `authority.console.profile.read`, `authority.console.token.introspect`). (see [console security sign-off](../updates/2025-10-31-console-security-refresh.md)) - ---- - -*Last updated: 2025-10-31 (Sprint 23).* +# StellaOps Console Security Posture + +> **Audience:** Security Guild, Console & Authority teams, deployment engineers. +> **Scope:** OIDC/DPoP flows, scope model, session controls, CSP and transport headers, evidence handling, offline posture, and monitoring expectations for the StellaOps Console (Sprint 23). + +The console is an Angular SPA fronted by the StellaOps Web gateway. It consumes Authority for identity, Concelier/Excititor for aggregation data, Policy Engine for findings, and Attestor for evidence bundles. This guide captures the security guarantees and required hardening so that the console can ship alongside the Aggregation-Only Contract (AOC) without introducing new attack surface. + +--- + +## 1 · Identity & Authentication + +### 1.1 Authorization sequence + +1. Browser→Authority uses **OAuth 2.1 Authorization Code + PKCE** (`S256`). +2. Upon code exchange the console requests a **DPoP-bound access token** (`aud=console`, `tenant=`) with **120 s TTL** and optional **rotating refresh token** (`rotate=true`). +3. Authority includes `cnf.jkt` for the ephemeral WebCrypto keypair; console stores the private key in **IndexedDB** (non-exportable) and keeps the public JWK in memory. +4. All API calls attach `Authorization: Bearer ` + `DPoP` proof header. Nonces from the gateway are replay-protected (`dpopt-nonce` header). +5. Tenanted API calls flow through the Web gateway which forwards `X-Stella-Tenant` and enforces tenancy headers. Missing or mismatched tenants trigger `403` with `ERR_TENANT_MISMATCH`. + +### 1.2 Fresh-auth gating + +- Sensitive actions (tenant edits, token revocation, policy promote, signing key rotation) call `Authority /fresh-auth` using `prompt=login` + `max_age=300`. +- Successful fresh-auth yields a **300 s** scoped token (`fresh_auth=true`) stored only in memory; the UI disables guarded buttons when the timer expires. +- Audit events: `authority.fresh_auth.start`, `authority.fresh_auth.success`, `authority.fresh_auth.expired` (link to correlation IDs for the gated action). + +### 1.3 Offline & sealed mode + +- When `console.offlineMode=true` the console presents an offline banner and suppresses fresh-auth prompts, replacing them with CLI guidance (`stella auth fresh-auth --offline`). +- Offline mode requires pre-issued tenant-scoped tokens bundled with the Offline Kit; tokens must include `offline=true` claim and 15 m TTL. +- Authority availability health is polled via `/api/console/status`. HTTP failures raise the offline banner and switch to read-only behaviour. + +--- + +## 2 · Session & Device Binding + +- Access and refresh tokens live in memory; metadata (subject, tenant, expiry) persists in `sessionStorage` for reload continuity. **Never** store raw JWTs in `localStorage`. +- Inactivity timeout defaults to **15 minutes**. Idle sessions trigger silent refresh; on failure the UI shows a modal requiring re-auth. +- Tokens are device-bound through DPoP; if a new device logs in, Authority revokes the previous DPoP key and emits `authority.token.binding_changed`. +- CSRF mitigations: bearer tokens plus DPoP remove cookie reliance. If cookies are required (e.g., same-origin analytics) they must be `HttpOnly`, `SameSite=Lax`, `Secure`. +- Browser hardening: enforce `Strict-Transport-Security`, `X-Content-Type-Options: nosniff`, `Referrer-Policy: no-referrer`, `Permissions-Policy: camera=(), microphone=(), geolocation=()`. + +--- + +## 3 · Authorization & Scope Model + +The console client is registered in Authority as `console-ui` with scopes: + +| Feature area | Required scopes | Notes | +|--------------|----------------|-------| +| Base navigation (Dashboard, Findings, SBOM, Runs) | `ui.read`, `findings:read`, `advisory:read`, `vex:read`, `aoc:verify` | `findings:read` enables Policy Engine overlays; `advisory:read`/`vex:read` load ingestion panes; `aoc:verify` allows on-demand guard runs. | +| Admin workspace | `ui.admin`, `authority:tenants.read`, `authority:tenants.write`, `authority:roles.read`, `authority:roles.write`, `authority:tokens.read`, `authority:tokens.revoke`, `authority:clients.read`, `authority:clients.write`, `authority:audit.read` | Scope combinations are tenant constrained. Role changes require fresh-auth. | +| Policy approvals | `policy:read`, `policy:review`, `policy:approve`, `policy:operate`, `policy:simulate` | `policy:operate` (promote/activate/run) requires fresh-auth. | +| Observability panes (status ticker, telemetry) | `ui.telemetry`, `scheduler:runs.read`, `advisory:read`, `vex:read` | `ui.telemetry` drives OTLP export toggles. | +| Orchestrator dashboard (queues, workers, rate limits) | `orch:read` | Provision via `Orch.Viewer` role; read-only access to job state and telemetry. | +| Orchestrator control actions (pause/resume, retry, sync-now, backfill) | `orch:operate` (plus `orch:read`) | CLI/Console must request tokens with `operator_reason` and `operator_ticket`; Authority denies issuance when either value is missing. | +| Downloads parity (SBOM, attestation) | `downloads:read`, `attestation:verify`, `sbom:export` | Console surfaces digests only; download links require CLI parity for write operations. | + +Guidance: + +- **Role mapping**: Provision Authority role `role/ui-console-admin` encapsulating the admin scopes above. +- **Orchestrator viewers**: Assign Authority role `role/orch-viewer` (Authority role string `Orch.Viewer`) to consoles that require read-only access to Orchestrator telemetry. +- **Orchestrator operators**: Assign Authority role `role/orch-operator` (Authority role string `Orch.Operator`) to identities allowed to pause/resume or backfill. Tokens must include `operator_reason` (≤256 chars) and `operator_ticket` (≤128 chars); Authority records the values in audit logs. +- **Tenant enforcement**: Gateway injects `X-Stella-Tenant` from token claims. Requests missing the header must be rejected by downstream services (Concelier, Excititor, Policy Engine) and logged. +- **Separation of duties**: Never grant `ui.admin` and `policy:approve`/`policy:operate` to the same human role without SOC sign-off; automation accounts should use least-privilege dedicated clients. + +--- + +### 3.1 Console Authority endpoints + +Console uses dedicated Authority endpoints scoped under `/console/*`. All requests must include the tenant header injected by the gateway (`X-Stella-Tenant`); calls without the header fail with `tenant_header_missing` and emit a structured audit event. Keep reverse proxies configured to pass the header end-to-end. + +| Endpoint | Required scopes | Purpose | Notes | +|----------|-----------------|---------|-------| +| `GET /console/tenants` | `authority:tenants.read` | Returns the tenant catalogue for the authenticated principal. | Validates `X-Stella-Tenant`; rejects tenants not configured in Authority. | +| `GET /console/profile` | `ui.read` | Surfaces subject metadata (roles, scopes, session id, fresh-auth state). | Response includes `freshAuth` (bool) based on a 300 s window since `auth_time`. | +| `POST /console/token/introspect` | `ui.read` | Introspects the access token currently in use and reports expiry + tenant. | Console polls this endpoint to drive session inactivity prompts; intended for SPA usage via fetch POST. | + +**Fresh-auth & session inactivity:** Authority stamps `auth_time` on issued tokens and considers privileged actions “fresh” for five minutes. When `/console/profile` returns `freshAuth: false`, the UI must require an interactive re-authentication before allowing admin operations (`ui.admin`, `authority:*` mutations, `policy:activate`, `exceptions:approve`). Access tokens remain short-lived (`00:02:00` by default); pair this with Console session timeouts so idle dashboards prompt the user before two minutes of inactivity. + +**DPoP + tenant binding:** All `/console/*` endpoints require DPoP-bound access tokens. Audit events include `tenant.resolved`, `scope`, `correlationId`, and (when applicable) `token.expires_at`. Staple the same headers into downstream services so cross-component troubleshooting uses the same correlation identifiers. + +--- + +## 4 · Transport, CSP & Browser Hardening + +### 4.1 Gateway requirements + +- TLS 1.2+ with modern cipher suites; enable HTTP/2 for SSE streams. +- Terminate TLS at the reverse proxy (Traefik, NGINX) and forward `X-Forwarded-*` headers (`ASPNETCORE_FORWARDEDHEADERS_ENABLED=true`). +- Rate-limit `/authorize` and `/token` according to [Authority rate-limit guidance](rate-limits.md). + +### 4.2 Content Security Policy + +Default CSP served by the console container: + +``` +default-src 'self'; +connect-src 'self' https://*.stella-ops.local; +img-src 'self' data:; +script-src 'self'; +style-src 'self' 'unsafe-inline'; +font-src 'self'; +frame-ancestors 'none'; +``` + +Recommendations: + +- Extend `connect-src` only for known internal APIs (e.g., telemetry collector). Use `console.config.cspOverrides` instead of editing NGINX directly. +- Enable **COOP/COEP** (`Cross-Origin-Opener-Policy: same-origin`, `Cross-Origin-Embedder-Policy: require-corp`) to support WASM policy previews. +- Use **Subresource Integrity (SRI)** hashes when adding third-party fonts or scripts. +- For embedded screenshots/GIFs sourced from Offline Kit, use `img-src 'self' data: blob:` and verify assets during build. +- Enforce `X-Frame-Options: DENY`, `X-XSS-Protection: 0`, and `Cache-Control: no-store` on JSON API responses (HTML assets remain cacheable). + +### 4.3 SSE & WebSocket hygiene + +- SSE endpoints (`/console/status/stream`, `/console/runs/{id}/events`) must set `Cache-Control: no-store` and disable proxy buffering. +- Gate SSE behind the same DPoP tokens; reject without `Authorization`. +- Proxy timeouts ≥ 60 s to avoid disconnect storms; clients use exponential backoff with jitter. + +--- + +## 5 · Evidence & Data Handling + +- **Evidence bundles**: Download links trigger `attestor.verify` or `downloads.manifest` APIs. The UI never caches bundle contents; it only surfaces SHA-256 digests and cosign signatures. Operators must use CLI to fetch the signed artefact. +- **Secrets**: UI redacts tokens, emails, and attachment paths in logs. Structured logs include only `subject`, `tenant`, `action`, `correlationId`. +- **Aggregation data**: Console honours Aggregation-Only contract—no client-side rewriting of Concelier/Excititor precedence. Provenance badges display source IDs and merge-event hashes. +- **PII minimisation**: User lists show minimal identity (display name, email hash). Full email addresses require `ui.admin` + fresh-auth. +- **Downloads parity**: Every downloadable artefact includes a CLI parity link (e.g., `stella downloads fetch --artifact `). If CLI parity fails, the console displays a warning banner and links to troubleshooting docs. + +--- + +## 6 · Logging, Monitoring & Alerts + +- Structured logs: `ui.action`, `tenantId`, `subject`, `scope`, `correlationId`, `dpop.jkt`. Log level `Information` for key actions; `Warning` for security anomalies (failed DPoP, tenant mismatch). +- Metrics (Prometheus): `ui_request_duration_seconds`, `ui_dpop_failure_total`, `ui_fresh_auth_prompt_total`, `ui_tenant_switch_total`, `ui_offline_banner_seconds`. +- Alerts: + 1. **Fresh-auth failures** > 5 per minute per tenant → security review. + 2. **DPoP mismatches** sustained > 1 % of requests → potential replay attempt. + 3. **Tenant mismatches** > 0 triggers an audit incident (could indicate scope misconfiguration). +- Correlate with Authority audit events (`authority.scope.granted`, `authority.token.revoked`) and Concelier/Excititor ingestion logs to trace user impact. + +--- + +## 7 · Offline & Air-Gapped Posture + +- Offline deployments require mirrored container images and Offline Kit manifest verification (see `/docs/deploy/console.md` §7). +- Console reads `offlineManifest.json` at boot to validate asset digests; mismatches block startup until the manifest is refreshed. +- Tenant and role edits queue change manifests for export; UI instructs operators to run `stella auth apply --bundle ` on the offline Authority host. +– Evidence viewing remains read-only; download buttons provide scripts to export from local Attestor snapshots. +- Fresh-auth prompts display instructions for hardware-token usage on bastion hosts; system logs mark actions executed under offline fallback. + +--- + +## 8 · Threat Model Alignment + +| Threat (Authority TM §5) | Console control | +|--------------------------|-----------------| +| Spoofed revocation bundle | Console verifies manifest signatures before showing revocation status; links to `stella auth revoke verify`. | +| Parameter tampering on `/token` | PKCE + DPoP enforced; console propagates correlation IDs so Authority logs can link anomalies. | +| Bootstrap invite replay | Admin UI surfaces invite status with expiry; fresh-auth required before issuing new invites. | +| Token replay by stolen agent | DPoP binding prevents reuse; console surfaces revocation latency warnings sourced from Zastava metrics. | +| Offline bundle tampering | Console refuses unsigned Offline Kit assets; prompts operators to re-import verified bundles. | +| Privilege escalation via plug-in overrides | Plug-in manifest viewer warns when a plug-in downgrades password policy; UI restricts plug-in activation to fresh-auth + `ui.admin` scoped users. | + +Document gaps and remediation hooks in `SEC5.*` backlog as they are addressed. + +--- + +## 9 · Compliance checklist + +- [x] Authority client `console-ui` registered with PKCE, DPoP, tenant claim requirement, and scopes from §3. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#authority-client-validation)) +- [x] CSP enforced per §4 with overrides documented in deployment manifests. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#csp-enforcement)) +- [x] Fresh-auth timer (300 s) validated for admin and policy actions; audit events captured. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#fresh-auth-timer)) +- [x] DPoP binding tested (replay attempt blocked; logs show `ui_dpop_failure_total` increment). (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#dpop-binding-test)) +- [x] Offline mode exercises performed (banner, CLI guidance, manifest verification). (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#offline-mode-exercise)) +- [x] Evidence download parity verified with CLI scripts; console never caches sensitive artefacts. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#evidence-parity)) +- [x] Monitoring dashboards show metrics and alerts outlined in §6; alert runbooks reviewed with Security Guild. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#monitoring--alerts)) +- [x] Security review sign-off recorded in sprint log with links to Authority threat model references. (see [console security sign-off](../updates/2025-10-27-console-security-signoff.md#sign-off)) +- [x] `/console` Authority endpoints validated for tenant header enforcement, fresh-auth prompts, and introspection flows (Audit IDs `authority.console.tenants.read`, `authority.console.profile.read`, `authority.console.token.introspect`). (see [console security sign-off](../updates/2025-10-31-console-security-refresh.md)) + +--- + +*Last updated: 2025-10-31 (Sprint 23).* diff --git a/docs/security/pack-signing-and-rbac.md b/docs/security/pack-signing-and-rbac.md index b644b3d6..87dc35e1 100644 --- a/docs/security/pack-signing-and-rbac.md +++ b/docs/security/pack-signing-and-rbac.md @@ -1,165 +1,165 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Pack Signing & RBAC Controls - -This document defines signing, verification, and authorization requirements for Task Packs across the CLI, Packs Registry, Task Runner, and Offline Kit. It aligns with Authority sprint tasks (`AUTH-PACKS-41-001`, `AUTH-PACKS-43-001`) and security guild expectations. - ---- - -## 1 · Threat Model Highlights - -| Threat | Mitigation | -|--------|------------| -| Unsigned or tampered pack uploaded to registry | Mandatory cosign/DSSE verification before acceptance. | -| Unauthorized user publishing or promoting packs | Authority scopes (`Packs.Write`) + registry policy checks. | -| Privilege escalation during approvals | Approval gates require `Packs.Approve` + audit logging; fresh-auth recommended. | -| Secret exfiltration via pack steps | Secrets injection sandbox with redaction, sealed-mode network guardrails, evidence review. | -| Replay of old approval tokens | Approval payloads carry plan hash + expiry; Task Runner rejects mismatches. | -| Malicious pack in Offline Kit | Mirror verification using signed manifest and DSSE provenance. | - ---- - -## 2 · Signing Requirements - -- **Cosign** signatures required for all bundles. Keys can be: - - Keyless (Fulcio OIDC). - - KMS-backed (HSM, cloud KMS). - - Offline keys stored in secure vault (air-gapped mode). -- **DSSE Attestations** recommended to embed: - - Manifest digest. - - Build metadata (repo, commit, CI run). - - CLI version (`stella/pack`). -- Signatures stored alongside bundle in registry object storage. -- `stella pack push` refuses to publish without signature (unless `--insecure-publish` used in dev). -- Registry enforces trust policy: - -| Policy | Description | -|--------|-------------| -| `anyOf` | Accepts any key in configured trust store. | -| `keyRef` | Accepts specific key ID (`kid`). | -| `oidcIssuer` | Accepts Fulcio certificates from allowed issuers (e.g., `https://fulcio.sigstore.dev`). | -| `threshold` | Requires N-of-M signatures (future release). | - ---- - -## 3 · RBAC & Scopes - -Authority exposes pack-related scopes: - -| Scope | Description | -|-------|-------------| -| `Packs.Read` | View packs, download manifests/bundles. | -| `Packs.Write` | Publish, promote, deprecate packs. | -| `Packs.Run` | Execute packs (Task Runner, CLI). | -| `Packs.Approve` | Approve pack gates, override tenant visibility. | - -### 3.1 Role Mapping - -| Role | Scopes | Use Cases | -|------|--------|-----------| -| `pack.viewer` | `Packs.Read` | Inspect packs, plan runs. | -| `pack.publisher` | `Packs.Read`, `Packs.Write` | Publish new versions, manage channels. | -| `pack.operator` | `Packs.Read`, `Packs.Run` | Execute packs, monitor runs. | -| `pack.approver` | `Packs.Read`, `Packs.Approve` | Fulfil approvals, authorize promotions. | -| `pack.admin` | All | Full lifecycle management (rare). | - -Roles are tenant-scoped; cross-tenant access requires explicit addition. - -### 3.2 CLI Enforcement - -- CLI requests scopes based on command: - - `stella pack plan` → `Packs.Read`. - - `stella pack run` → `Packs.Run`. - - `stella pack push` → `Packs.Write`. - - `stella pack approve` → `Packs.Approve`. -- Offline tokens must include same scopes; CLI warns if missing. - ---- - -## 4 · Approvals & Fresh Auth - -- Approval commands require recent fresh-auth (< 5 minutes). CLI prompts automatically; Console enforces via Authority. -- Approval payload includes: - - `runId` - - `gateId` - - `planHash` - - `approver` - - `timestamp` -- Task Runner logs approval event and verifies plan hash to prevent rerouting. - ---- - -## 5 · Secret Management - -- Secrets defined in pack manifest map to Authority secret providers (e.g., HSM, Vault). -- Task Runner obtains secrets using service account with scoped access; CLI may prompt or read from profile. -- Secret audit trail: - - `secretRequested` event with reason, pack, step. - - `secretDelivered` event omitted (only aggregate metrics) to avoid leakage. - - Evidence bundle includes hashed secret metadata (no values). - -Sealed mode requires secrets to originate from sealed vault; external endpoints blocked. - ---- - -## 6 · Audit & Evidence - -- Registry, Task Runner, and Authority emit audit events to central timeline. -- Required events: - - `pack.version.published` - - `pack.version.promoted` - - `pack.run.started/completed` - - `pack.approval.requested/granted` - - `pack.secret.requested` -- Evidence Locker stores DSSE attestations and run bundles for 90 days (configurable). -- Auditors can use `stella pack audit --run ` to retrieve audit trail. - ---- - -## 7 · Offline / Air-Gap Policies - -- Offline Kit includes: - - Pack bundles + signatures. - - Trusted key store (`trust-bundle.pem`). - - Approval workflow instructions for manual signing. -- Air-gapped approvals: - - CLI generates approval request file (`.approval-request.json`). - - Approver uses offline CLI to sign with offline key. - - Response imported to Task Runner. -- Mirror process verifies signatures prior to import; failure aborts import with `ERR_PACK_SIGNATURE_INVALID`. - ---- - -## 8 · Incident Response - -- Compromised pack signature: - - Revoke key via Authority trust store. - - Deprecate affected versions (`registry deprecate`). - - Notify consumers via Notifier (`pack.security.alert`). - - Forensically review run evidence for impacted tenants. -- Unauthorized approval: - - Review audit log for `Packs.Approve` events. - - Trigger `pack.run.freeze` (pauses run pending investigation). - - Rotate approver credentials and require fresh-auth. -- Secret leak suspicion: - - Quarantine evidence bundles. - - Rotate secrets referenced by pack. - - Run sealed-mode audit script to confirm guardrails. - ---- - -## 9 · Compliance Checklist - -- [ ] Signing requirements (cosign/DSSE, trust policies) documented. -- [ ] Authority scope mapping and CLI enforcement captured. -- [ ] Approval workflow + fresh-auth expectations defined. -- [ ] Secret lifecycle (request, injection, audit) described. -- [ ] Audit/evidence integration noted (timeline, Evidence Locker). -- [ ] Offline/air-gap controls outlined. -- [ ] Incident response playbook provided. -- [ ] Imposed rule reminder retained at top. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Pack Signing & RBAC Controls + +This document defines signing, verification, and authorization requirements for Task Packs across the CLI, Packs Registry, Task Runner, and Offline Kit. It aligns with Authority sprint tasks (`AUTH-PACKS-41-001`, `AUTH-PACKS-43-001`) and security guild expectations. + +--- + +## 1 · Threat Model Highlights + +| Threat | Mitigation | +|--------|------------| +| Unsigned or tampered pack uploaded to registry | Mandatory cosign/DSSE verification before acceptance. | +| Unauthorized user publishing or promoting packs | Authority scopes (`Packs.Write`) + registry policy checks. | +| Privilege escalation during approvals | Approval gates require `Packs.Approve` + audit logging; fresh-auth recommended. | +| Secret exfiltration via pack steps | Secrets injection sandbox with redaction, sealed-mode network guardrails, evidence review. | +| Replay of old approval tokens | Approval payloads carry plan hash + expiry; Task Runner rejects mismatches. | +| Malicious pack in Offline Kit | Mirror verification using signed manifest and DSSE provenance. | + +--- + +## 2 · Signing Requirements + +- **Cosign** signatures required for all bundles. Keys can be: + - Keyless (Fulcio OIDC). + - KMS-backed (HSM, cloud KMS). + - Offline keys stored in secure vault (air-gapped mode). +- **DSSE Attestations** recommended to embed: + - Manifest digest. + - Build metadata (repo, commit, CI run). + - CLI version (`stella/pack`). +- Signatures stored alongside bundle in registry object storage. +- `stella pack push` refuses to publish without signature (unless `--insecure-publish` used in dev). +- Registry enforces trust policy: + +| Policy | Description | +|--------|-------------| +| `anyOf` | Accepts any key in configured trust store. | +| `keyRef` | Accepts specific key ID (`kid`). | +| `oidcIssuer` | Accepts Fulcio certificates from allowed issuers (e.g., `https://fulcio.sigstore.dev`). | +| `threshold` | Requires N-of-M signatures (future release). | + +--- + +## 3 · RBAC & Scopes + +Authority exposes pack-related scopes: + +| Scope | Description | +|-------|-------------| +| `Packs.Read` | View packs, download manifests/bundles. | +| `Packs.Write` | Publish, promote, deprecate packs. | +| `Packs.Run` | Execute packs (Task Runner, CLI). | +| `Packs.Approve` | Approve pack gates, override tenant visibility. | + +### 3.1 Role Mapping + +| Role | Scopes | Use Cases | +|------|--------|-----------| +| `pack.viewer` | `Packs.Read` | Inspect packs, plan runs. | +| `pack.publisher` | `Packs.Read`, `Packs.Write` | Publish new versions, manage channels. | +| `pack.operator` | `Packs.Read`, `Packs.Run` | Execute packs, monitor runs. | +| `pack.approver` | `Packs.Read`, `Packs.Approve` | Fulfil approvals, authorize promotions. | +| `pack.admin` | All | Full lifecycle management (rare). | + +Roles are tenant-scoped; cross-tenant access requires explicit addition. + +### 3.2 CLI Enforcement + +- CLI requests scopes based on command: + - `stella pack plan` → `Packs.Read`. + - `stella pack run` → `Packs.Run`. + - `stella pack push` → `Packs.Write`. + - `stella pack approve` → `Packs.Approve`. +- Offline tokens must include same scopes; CLI warns if missing. + +--- + +## 4 · Approvals & Fresh Auth + +- Approval commands require recent fresh-auth (< 5 minutes). CLI prompts automatically; Console enforces via Authority. +- Approval payload includes: + - `runId` + - `gateId` + - `planHash` + - `approver` + - `timestamp` +- Task Runner logs approval event and verifies plan hash to prevent rerouting. + +--- + +## 5 · Secret Management + +- Secrets defined in pack manifest map to Authority secret providers (e.g., HSM, Vault). +- Task Runner obtains secrets using service account with scoped access; CLI may prompt or read from profile. +- Secret audit trail: + - `secretRequested` event with reason, pack, step. + - `secretDelivered` event omitted (only aggregate metrics) to avoid leakage. + - Evidence bundle includes hashed secret metadata (no values). + +Sealed mode requires secrets to originate from sealed vault; external endpoints blocked. + +--- + +## 6 · Audit & Evidence + +- Registry, Task Runner, and Authority emit audit events to central timeline. +- Required events: + - `pack.version.published` + - `pack.version.promoted` + - `pack.run.started/completed` + - `pack.approval.requested/granted` + - `pack.secret.requested` +- Evidence Locker stores DSSE attestations and run bundles for 90 days (configurable). +- Auditors can use `stella pack audit --run ` to retrieve audit trail. + +--- + +## 7 · Offline / Air-Gap Policies + +- Offline Kit includes: + - Pack bundles + signatures. + - Trusted key store (`trust-bundle.pem`). + - Approval workflow instructions for manual signing. +- Air-gapped approvals: + - CLI generates approval request file (`.approval-request.json`). + - Approver uses offline CLI to sign with offline key. + - Response imported to Task Runner. +- Mirror process verifies signatures prior to import; failure aborts import with `ERR_PACK_SIGNATURE_INVALID`. + +--- + +## 8 · Incident Response + +- Compromised pack signature: + - Revoke key via Authority trust store. + - Deprecate affected versions (`registry deprecate`). + - Notify consumers via Notifier (`pack.security.alert`). + - Forensically review run evidence for impacted tenants. +- Unauthorized approval: + - Review audit log for `Packs.Approve` events. + - Trigger `pack.run.freeze` (pauses run pending investigation). + - Rotate approver credentials and require fresh-auth. +- Secret leak suspicion: + - Quarantine evidence bundles. + - Rotate secrets referenced by pack. + - Run sealed-mode audit script to confirm guardrails. + +--- + +## 9 · Compliance Checklist + +- [ ] Signing requirements (cosign/DSSE, trust policies) documented. +- [ ] Authority scope mapping and CLI enforcement captured. +- [ ] Approval workflow + fresh-auth expectations defined. +- [ ] Secret lifecycle (request, injection, audit) described. +- [ ] Audit/evidence integration noted (timeline, Evidence Locker). +- [ ] Offline/air-gap controls outlined. +- [ ] Incident response playbook provided. +- [ ] Imposed rule reminder retained at top. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/security/policy-governance.md b/docs/security/policy-governance.md index 0b99dcfd..6f8abd8c 100644 --- a/docs/security/policy-governance.md +++ b/docs/security/policy-governance.md @@ -1,112 +1,112 @@ -# Policy Governance & Least Privilege - -> **Audience:** Security Guild, Policy Guild, Authority Core, auditors. -> **Scope:** Scopes, RBAC, approval controls, tenancy, auditing, and compliance requirements for Policy Engine v2. - ---- - -## 1 · Governance Principles - -1. **Least privilege by scope** – API clients receive only the `policy:*` scopes required for their role; `effective:write` reserved for service identity. -2. **Immutable history** – All policy changes, approvals, runs, and suppressions produce audit artefacts retrievable offline. -3. **Separation of duties** – Authors cannot approve their own submissions; approvers require distinct scope and should not have deployment rights. -4. **Deterministic verification** – Simulations, determinism checks, and incident replay bundles provide reproducible evidence for auditors. -5. **Tenant isolation** – Policies, runs, and findings scoped to tenants; cross-tenant access requires explicit admin scopes and is logged. -6. **Offline parity** – Air-gapped sites follow the same governance workflow with sealed-mode safeguards and signed bundles. - ---- - -## 2 · Authority Scopes & Role Mapping - -| Scope | Description | Recommended role | -|-------|-------------|------------------| -| `policy:read` | View policies, revisions, runs, findings. | Readers, auditors. | -| `policy:author` | Create/edit drafts, lint/compile, quick simulate. | `role/policy-author`. | -| `policy:review` | Comment, request changes, approve in-progress drafts. | `role/policy-reviewer`. | -| `policy:approve` | Final approval; archive decisions. | `role/policy-approver`. | -| `policy:operate` | Promote revisions, trigger runs, manage rollouts. | `role/policy-operator`, automation bots. | -| `policy:audit` | Access immutable history and evidence bundles. | `role/policy-auditor`, compliance teams. | -| `policy:simulate` | Execute simulations via API/CLI. | Authors, reviewers, CI. | -| `policy:run` | Trigger runs, inspect live status. | Operators, automation bots. | -| `policy:activate` | Promote approved version, schedule activation. | Runtime operators / release managers. | -| `findings:read` | View effective findings/explain. | Analysts, auditors, CLI. | -| `effective:write` | **Service only** – materialise findings. | Policy Engine service principal. | - -> Map organisation roles to scopes via Authority issuer config (`authority.tenants[].roles`). Document assignments in tenant onboarding checklist. - -> **Authority configuration tip:** the Policy Engine service client must include `properties.serviceIdentity: policy-engine` and a tenant hint in `authority.yaml`. Authority rejects `effective:write` tokens that lack this marker. See [Authority scopes](authority-scopes.md) for the full scope catalogue. - ---- - -## 3 · Workflow Controls - -- **Submit gate:** CLI/UI require fresh lint + simulation artefacts (<24 h). Submissions store reviewer list and diff attachments. -- **Review quorum:** Authority policy enforces minimum reviewers (e.g., 2) and optional separation between functional/security domains. -- **Approval guard:** Approvers must acknowledge simulation + determinism check completion. CLI enforces `--note` and `--attach` fields. -- **Activation guard:** Policy Engine refuses activation when latest full run status ≠ success or incremental backlog aged > SLA. -- **Rollback policy:** Rollbacks require incident reference and produce `policy.rollback` audit events. - ---- - -## 4 · Tenancy & Data Access - -- Policies stored per tenant; `tenant-global` used for shared baselines. -- API filters all requests by `X-Stella-Tenant` (default from token). Cross-tenant requests require `policy:tenant-admin`. -- Effective findings collections include `tenant` field and unique indexes preventing cross-tenant writes. -- CLI/Console display tenant context prominently; switching tenant triggers warnings when active policy differs. -- Offline bundles encode tenant metadata; import commands validate compatibility before applying. - ---- - -## 5 · Audit & Evidence - -- **Collections:** `policies`, `policy_reviews`, `policy_history`, `policy_runs`, `policy_run_events`, `effective_finding_*_history`. -- **Events:** `policy.submitted`, `policy.review.requested`, `policy.approved`, `policy.activated`, `policy.archived`, `policy.run.*`, `policy.incident.*`. -- **Explain traces:** Stored for critical findings (sampled); available via CLI/UI for auditors (requires `findings:read`). -- **Offline evidence:** `stella policy bundle export` produces DSSE-signed packages containing DSL, IR digest, simulations, approval notes, run summaries, trace metadata. -- **Retention:** Default 365 days for run history, extendable per compliance requirements; incident mode extends to 30 days minimum. - ---- - -## 6 · Secrets & Configuration Hygiene - -- Policy Engine configuration loaded from environment/secret stores; no secrets in repo. -- CLI profiles should store tokens encrypted (`stella profile set --secret`). -- UI/CLI logs redact tokens, reviewer emails, and attachments. -- Rotating tokens/keys: Authority exposes `policy scopes` in discovery docs; follow `/docs/security/authority-scopes.md` for rotation. -- Use `policy:operate` to disable self-service simulation temporarily during incident response if needed. - ---- - -## 7 · Incident Response - -- Trigger incident mode for determinism violations, backlog surges, or suspected policy abuse. -- Capture replay bundles and run `stella policy run replay` for affected runs. -- Coordinate with Observability dashboards (see `/docs/observability/policy.md`) to monitor queue depth, failures. -- After resolution, document remediation in Lifecycle guide (§8) and attach to approval history. - ---- - -## 8 · Offline / Air-Gapped Governance - -- Same scopes apply; tokens issued by local Authority. -- Approvers must use offline UI/CLI to sign submissions; attachments stored locally. -- Bundle import/export must be signed (DSSE + cosign). CLI warns if signatures missing. -- Sealed-mode banner reminds operators to refresh bundles when staleness thresholds exceeded. -- Offline audits rely on evidence bundles and local `policy_runs` snapshot. - ---- - -## 9 · Compliance Checklist - -- [ ] **Scope mapping reviewed:** Authority issuer config updated; RBAC matrix stored with change request. -- [ ] **Separation enforced:** Automated checks block self-approval; review quorum satisfied. -- [ ] **Activation guard documented:** Operators trained on run health checks before promoting. -- [ ] **Audit exports tested:** Evidence bundles verified (hash/signature) and stored per compliance policy. -- [ ] **Incident drills rehearsed:** Replay/rollback procedures executed and logged. -- [ ] **Offline parity confirmed:** Air-gapped site executes submit/approve flow with sealed-mode guidance. -- [ ] **Documentation cross-links:** References to lifecycle, runs, observability, CLI, and API docs validated. - ---- - -*Last updated: 2025-10-26 (Sprint 20).* +# Policy Governance & Least Privilege + +> **Audience:** Security Guild, Policy Guild, Authority Core, auditors. +> **Scope:** Scopes, RBAC, approval controls, tenancy, auditing, and compliance requirements for Policy Engine v2. + +--- + +## 1 · Governance Principles + +1. **Least privilege by scope** – API clients receive only the `policy:*` scopes required for their role; `effective:write` reserved for service identity. +2. **Immutable history** – All policy changes, approvals, runs, and suppressions produce audit artefacts retrievable offline. +3. **Separation of duties** – Authors cannot approve their own submissions; approvers require distinct scope and should not have deployment rights. +4. **Deterministic verification** – Simulations, determinism checks, and incident replay bundles provide reproducible evidence for auditors. +5. **Tenant isolation** – Policies, runs, and findings scoped to tenants; cross-tenant access requires explicit admin scopes and is logged. +6. **Offline parity** – Air-gapped sites follow the same governance workflow with sealed-mode safeguards and signed bundles. + +--- + +## 2 · Authority Scopes & Role Mapping + +| Scope | Description | Recommended role | +|-------|-------------|------------------| +| `policy:read` | View policies, revisions, runs, findings. | Readers, auditors. | +| `policy:author` | Create/edit drafts, lint/compile, quick simulate. | `role/policy-author`. | +| `policy:review` | Comment, request changes, approve in-progress drafts. | `role/policy-reviewer`. | +| `policy:approve` | Final approval; archive decisions. | `role/policy-approver`. | +| `policy:operate` | Promote revisions, trigger runs, manage rollouts. | `role/policy-operator`, automation bots. | +| `policy:audit` | Access immutable history and evidence bundles. | `role/policy-auditor`, compliance teams. | +| `policy:simulate` | Execute simulations via API/CLI. | Authors, reviewers, CI. | +| `policy:run` | Trigger runs, inspect live status. | Operators, automation bots. | +| `policy:activate` | Promote approved version, schedule activation. | Runtime operators / release managers. | +| `findings:read` | View effective findings/explain. | Analysts, auditors, CLI. | +| `effective:write` | **Service only** – materialise findings. | Policy Engine service principal. | + +> Map organisation roles to scopes via Authority issuer config (`authority.tenants[].roles`). Document assignments in tenant onboarding checklist. + +> **Authority configuration tip:** the Policy Engine service client must include `properties.serviceIdentity: policy-engine` and a tenant hint in `authority.yaml`. Authority rejects `effective:write` tokens that lack this marker. See [Authority scopes](authority-scopes.md) for the full scope catalogue. + +--- + +## 3 · Workflow Controls + +- **Submit gate:** CLI/UI require fresh lint + simulation artefacts (<24 h). Submissions store reviewer list and diff attachments. +- **Review quorum:** Authority policy enforces minimum reviewers (e.g., 2) and optional separation between functional/security domains. +- **Approval guard:** Approvers must acknowledge simulation + determinism check completion. CLI enforces `--note` and `--attach` fields. +- **Activation guard:** Policy Engine refuses activation when latest full run status ≠ success or incremental backlog aged > SLA. +- **Rollback policy:** Rollbacks require incident reference and produce `policy.rollback` audit events. + +--- + +## 4 · Tenancy & Data Access + +- Policies stored per tenant; `tenant-global` used for shared baselines. +- API filters all requests by `X-Stella-Tenant` (default from token). Cross-tenant requests require `policy:tenant-admin`. +- Effective findings collections include `tenant` field and unique indexes preventing cross-tenant writes. +- CLI/Console display tenant context prominently; switching tenant triggers warnings when active policy differs. +- Offline bundles encode tenant metadata; import commands validate compatibility before applying. + +--- + +## 5 · Audit & Evidence + +- **Collections:** `policies`, `policy_reviews`, `policy_history`, `policy_runs`, `policy_run_events`, `effective_finding_*_history`. +- **Events:** `policy.submitted`, `policy.review.requested`, `policy.approved`, `policy.activated`, `policy.archived`, `policy.run.*`, `policy.incident.*`. +- **Explain traces:** Stored for critical findings (sampled); available via CLI/UI for auditors (requires `findings:read`). +- **Offline evidence:** `stella policy bundle export` produces DSSE-signed packages containing DSL, IR digest, simulations, approval notes, run summaries, trace metadata. +- **Retention:** Default 365 days for run history, extendable per compliance requirements; incident mode extends to 30 days minimum. + +--- + +## 6 · Secrets & Configuration Hygiene + +- Policy Engine configuration loaded from environment/secret stores; no secrets in repo. +- CLI profiles should store tokens encrypted (`stella profile set --secret`). +- UI/CLI logs redact tokens, reviewer emails, and attachments. +- Rotating tokens/keys: Authority exposes `policy scopes` in discovery docs; follow `/docs/security/authority-scopes.md` for rotation. +- Use `policy:operate` to disable self-service simulation temporarily during incident response if needed. + +--- + +## 7 · Incident Response + +- Trigger incident mode for determinism violations, backlog surges, or suspected policy abuse. +- Capture replay bundles and run `stella policy run replay` for affected runs. +- Coordinate with Observability dashboards (see `/docs/observability/policy.md`) to monitor queue depth, failures. +- After resolution, document remediation in Lifecycle guide (§8) and attach to approval history. + +--- + +## 8 · Offline / Air-Gapped Governance + +- Same scopes apply; tokens issued by local Authority. +- Approvers must use offline UI/CLI to sign submissions; attachments stored locally. +- Bundle import/export must be signed (DSSE + cosign). CLI warns if signatures missing. +- Sealed-mode banner reminds operators to refresh bundles when staleness thresholds exceeded. +- Offline audits rely on evidence bundles and local `policy_runs` snapshot. + +--- + +## 9 · Compliance Checklist + +- [ ] **Scope mapping reviewed:** Authority issuer config updated; RBAC matrix stored with change request. +- [ ] **Separation enforced:** Automated checks block self-approval; review quorum satisfied. +- [ ] **Activation guard documented:** Operators trained on run health checks before promoting. +- [ ] **Audit exports tested:** Evidence bundles verified (hash/signature) and stored per compliance policy. +- [ ] **Incident drills rehearsed:** Replay/rollback procedures executed and logged. +- [ ] **Offline parity confirmed:** Air-gapped site executes submit/approve flow with sealed-mode guidance. +- [ ] **Documentation cross-links:** References to lifecycle, runs, observability, CLI, and API docs validated. + +--- + +*Last updated: 2025-10-26 (Sprint 20).* diff --git a/docs/task-packs/authoring-guide.md b/docs/task-packs/authoring-guide.md index 6366ecd2..c43496c0 100644 --- a/docs/task-packs/authoring-guide.md +++ b/docs/task-packs/authoring-guide.md @@ -1,208 +1,208 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Task Pack Authoring Guide - -This guide teaches engineers how to design, validate, and publish Task Packs that align with the Sprint 43 specification. Follow these steps to ensure deterministic behaviour, secure approvals, and smooth hand-off to operators. - ---- - -## 1 · Prerequisites - -- StellaOps CLI `>= 2025.10.0` with pack commands enabled. -- Authority client configured with `Packs.Write` (publish) and `Packs.Run` (local testing) scopes. -- Access to Task Runner staging environment for validation runs. -- Familiarity with the [Task Pack Specification](spec.md) and [Packs Registry](registry.md). -- Optional: connection to DevOps staging registry or Offline Kit mirror for publishing. - ---- - -## 2 · Design Checklist - -1. **Define objective.** Document the operational need, inputs, expected outputs, and rollback strategy. -2. **Identify approvals.** Determine which scopes/roles must sign off (`Packs.Approve` assignments). -3. **Plan security posture.** Limit secrets usage, set tenant visibility, and note network constraints (sealed mode). -4. **Model observability.** Decide which metrics, logs, and evidence artifacts are critical for post-run audits. -5. **Reuse libraries.** Prefer built-in modules or shared pack fragments to reduce drift. - -Capture the above in `docs/summary.md` (optional but recommended) for future maintainers. - ---- - -## 3 · Authoring Workflow - -### 3.1 Scaffold project - -```bash -mkdir my-pack -cd my-pack -stella pack init --name sbom-remediation -``` - -`stella pack init` creates baseline files: - -- `pack.yaml` with metadata placeholders. -- `schemas/inputs.schema.json` (sample). -- `docs/usage.md` (template for human instructions). -- `.packignore` to exclude build artifacts. - -### 3.2 Define inputs & schemas - -- Use JSON Schema (`draft-2020-12`) for input validation. -- Avoid optional inputs unless there is a deterministic default. -- Store schemas under `schemas/` and reference via relative paths. - -### 3.3 Compose steps - -- Break workflow into small deterministic steps. -- Name each step with stable `id`. -- Wrap scripts/tools using built-in modules; copy scripts to `assets/` if necessary. -- Use `when` expressions for branch logic; ensure expressions rely solely on inputs or previous outputs. -- For loops, adopt `map` with capped iteration count; avoid data-dependent randomness. - -### 3.4 Configure approvals - -- Add `spec.approvals` entries for each required review. -- Provide informative `reasonTemplate` with placeholders. -- Set `expiresAfter` to match operational policy (e.g., 4 h for security reviews). -- Document fallback contacts in `docs/runbook.md`. - -### 3.5 Manage secrets - -- Declare secrets under `spec.secrets`. -- Reference secrets via expressions (e.g., `{{ secrets.jiraToken.value }}`) inside modules that support secure injection. -- Never bake secrets or tokens into pack assets. -- If secret optional, set `optional: true` and handle absence in step logic. - -### 3.6 Document outputs - -- List expected artifacts under `spec.outputs`. -- Include human-friendly docs (Markdown) describing each output and how to access it through CLI or Console. - ---- - -## 4 · Validation - -### 4.1 Static validation - -```bash -stella pack validate -``` - -Checks performed: - -- Schema compliance (YAML, JSON Schema). -- Determinism guard (forbidden functions, clock usage, network allowlist). -- Reference integrity (assets, schemas, documentation). -- Approval/secret scope availability. - -### 4.2 Simulation & plan hash - -```bash -stella pack plan --inputs samples/inputs.json --output .artifacts/plan.json -stella pack simulate --inputs samples/inputs.json --output .artifacts/sim.json -``` - -- Review plan graph to ensure step ordering and gating align with expectations. -- Store simulation output with pack metadata for future audits. - -### 4.3 Local rehearsal - -```bash -stella pack run \ - --inputs samples/inputs.json \ - --secrets jiraToken=@secrets/jira.txt \ - --dry-run -``` - -- Use `--dry-run` to verify approvals and outputs without side effects. -- Real runs require `Packs.Run` and all approval gates satisfied (e.g., via CLI prompts or Console). - -### 4.4 Unit tests (optional but encouraged) - -- Create a `tests/` folder with CLI-driven regression scripts (e.g., using `stella pack plan` + `jq` assertions). -- Integrate into CI pipelines; ensure tests run offline using cached assets. - ---- - -## 5 · Publishing - -### 5.1 Build bundle - -```bash -stella pack build \ - --output dist/sbom-remediation-1.3.0.stella-pack.tgz \ - --manifest pack.yaml -``` - -### 5.2 Sign bundle - -```bash -cosign sign-blob \ - --yes \ - --output-signature dist/sbom-remediation-1.3.0.sig \ - dist/sbom-remediation-1.3.0.stella-pack.tgz -``` - -Store signature alongside bundle; DSSE optional but recommended (see [security guidance](../security/pack-signing-and-rbac.md)). - -### 5.3 Publish to registry - -```bash -stella pack push \ - registry.stella-ops.org/packs/sbom-remediation:1.3.0 \ - --bundle dist/sbom-remediation-1.3.0.stella-pack.tgz \ - --signature dist/sbom-remediation-1.3.0.sig -``` - -Registry verifies signature, stores provenance, and updates index. - -### 5.4 Offline distribution - -- Export bundle + signature + provenance into Offline Kit using `stella pack bundle export`. -- Update mirror manifest (`manifest/offline-manifest.json`) with new pack entries. - ---- - -## 6 · Versioning & Compatibility - -- Follow SemVer (increment major when breaking schema/behaviour). -- Document compatibility in `docs/compatibility.md` (recommended). -- Registry retains immutable history; use `metadata.deprecated: true` to indicate retirement. - ---- - -## 7 · Best Practices - -- **Keep steps idempotent.** Support manual retries without side effects. -- **Surface evidence early.** Export intermediate artifacts (plans, logs) for operators. -- **Localize messages.** Provide `locales/en-US.json` for CLI/Console strings (Sprint 43 requirement). -- **Avoid long-running commands.** Split heavy tasks into smaller steps with progress telemetry. -- **Guard network usage.** Use `when: "{{ env.isSealed }}"` to block disallowed network operations or provide offline instructions. -- **Document fallbacks.** Include manual recovery instructions in `docs/runbook.md`. - ---- - -## 8 · Hand-off & Review - -- Submit PR including pack bundle metadata, docs, and validation evidence. -- Request review from Task Runner + Security + DevOps stakeholders. -- Attach `stella pack plan` output and signature digest to review notes. -- After approval, update change log (`docs/CHANGELOG.md`) and notify Task Runner operations. - ---- - -## 9 · Compliance Checklist - -- [ ] Metadata, inputs, steps, approvals, secrets, and outputs defined per spec. -- [ ] Schemas provided for all object inputs and outputs. -- [ ] Determinism validation (`stella pack validate`) executed with evidence stored. -- [ ] Plan + simulation artifacts committed in `.artifacts/` or CI evidence store. -- [ ] Bundle signed (cosign/DSSE) and signature recorded. -- [ ] Runbook and troubleshooting notes documented. -- [ ] Offline distribution steps prepared (bundle export + manifest update). -- [ ] Imposed rule reminder retained at top. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Task Pack Authoring Guide + +This guide teaches engineers how to design, validate, and publish Task Packs that align with the Sprint 43 specification. Follow these steps to ensure deterministic behaviour, secure approvals, and smooth hand-off to operators. + +--- + +## 1 · Prerequisites + +- StellaOps CLI `>= 2025.10.0` with pack commands enabled. +- Authority client configured with `Packs.Write` (publish) and `Packs.Run` (local testing) scopes. +- Access to Task Runner staging environment for validation runs. +- Familiarity with the [Task Pack Specification](spec.md) and [Packs Registry](registry.md). +- Optional: connection to DevOps staging registry or Offline Kit mirror for publishing. + +--- + +## 2 · Design Checklist + +1. **Define objective.** Document the operational need, inputs, expected outputs, and rollback strategy. +2. **Identify approvals.** Determine which scopes/roles must sign off (`Packs.Approve` assignments). +3. **Plan security posture.** Limit secrets usage, set tenant visibility, and note network constraints (sealed mode). +4. **Model observability.** Decide which metrics, logs, and evidence artifacts are critical for post-run audits. +5. **Reuse libraries.** Prefer built-in modules or shared pack fragments to reduce drift. + +Capture the above in `docs/summary.md` (optional but recommended) for future maintainers. + +--- + +## 3 · Authoring Workflow + +### 3.1 Scaffold project + +```bash +mkdir my-pack +cd my-pack +stella pack init --name sbom-remediation +``` + +`stella pack init` creates baseline files: + +- `pack.yaml` with metadata placeholders. +- `schemas/inputs.schema.json` (sample). +- `docs/usage.md` (template for human instructions). +- `.packignore` to exclude build artifacts. + +### 3.2 Define inputs & schemas + +- Use JSON Schema (`draft-2020-12`) for input validation. +- Avoid optional inputs unless there is a deterministic default. +- Store schemas under `schemas/` and reference via relative paths. + +### 3.3 Compose steps + +- Break workflow into small deterministic steps. +- Name each step with stable `id`. +- Wrap scripts/tools using built-in modules; copy scripts to `assets/` if necessary. +- Use `when` expressions for branch logic; ensure expressions rely solely on inputs or previous outputs. +- For loops, adopt `map` with capped iteration count; avoid data-dependent randomness. + +### 3.4 Configure approvals + +- Add `spec.approvals` entries for each required review. +- Provide informative `reasonTemplate` with placeholders. +- Set `expiresAfter` to match operational policy (e.g., 4 h for security reviews). +- Document fallback contacts in `docs/runbook.md`. + +### 3.5 Manage secrets + +- Declare secrets under `spec.secrets`. +- Reference secrets via expressions (e.g., `{{ secrets.jiraToken.value }}`) inside modules that support secure injection. +- Never bake secrets or tokens into pack assets. +- If secret optional, set `optional: true` and handle absence in step logic. + +### 3.6 Document outputs + +- List expected artifacts under `spec.outputs`. +- Include human-friendly docs (Markdown) describing each output and how to access it through CLI or Console. + +--- + +## 4 · Validation + +### 4.1 Static validation + +```bash +stella pack validate +``` + +Checks performed: + +- Schema compliance (YAML, JSON Schema). +- Determinism guard (forbidden functions, clock usage, network allowlist). +- Reference integrity (assets, schemas, documentation). +- Approval/secret scope availability. + +### 4.2 Simulation & plan hash + +```bash +stella pack plan --inputs samples/inputs.json --output .artifacts/plan.json +stella pack simulate --inputs samples/inputs.json --output .artifacts/sim.json +``` + +- Review plan graph to ensure step ordering and gating align with expectations. +- Store simulation output with pack metadata for future audits. + +### 4.3 Local rehearsal + +```bash +stella pack run \ + --inputs samples/inputs.json \ + --secrets jiraToken=@secrets/jira.txt \ + --dry-run +``` + +- Use `--dry-run` to verify approvals and outputs without side effects. +- Real runs require `Packs.Run` and all approval gates satisfied (e.g., via CLI prompts or Console). + +### 4.4 Unit tests (optional but encouraged) + +- Create a `tests/` folder with CLI-driven regression scripts (e.g., using `stella pack plan` + `jq` assertions). +- Integrate into CI pipelines; ensure tests run offline using cached assets. + +--- + +## 5 · Publishing + +### 5.1 Build bundle + +```bash +stella pack build \ + --output dist/sbom-remediation-1.3.0.stella-pack.tgz \ + --manifest pack.yaml +``` + +### 5.2 Sign bundle + +```bash +cosign sign-blob \ + --yes \ + --output-signature dist/sbom-remediation-1.3.0.sig \ + dist/sbom-remediation-1.3.0.stella-pack.tgz +``` + +Store signature alongside bundle; DSSE optional but recommended (see [security guidance](../security/pack-signing-and-rbac.md)). + +### 5.3 Publish to registry + +```bash +stella pack push \ + registry.stella-ops.org/packs/sbom-remediation:1.3.0 \ + --bundle dist/sbom-remediation-1.3.0.stella-pack.tgz \ + --signature dist/sbom-remediation-1.3.0.sig +``` + +Registry verifies signature, stores provenance, and updates index. + +### 5.4 Offline distribution + +- Export bundle + signature + provenance into Offline Kit using `stella pack bundle export`. +- Update mirror manifest (`manifest/offline-manifest.json`) with new pack entries. + +--- + +## 6 · Versioning & Compatibility + +- Follow SemVer (increment major when breaking schema/behaviour). +- Document compatibility in `docs/compatibility.md` (recommended). +- Registry retains immutable history; use `metadata.deprecated: true` to indicate retirement. + +--- + +## 7 · Best Practices + +- **Keep steps idempotent.** Support manual retries without side effects. +- **Surface evidence early.** Export intermediate artifacts (plans, logs) for operators. +- **Localize messages.** Provide `locales/en-US.json` for CLI/Console strings (Sprint 43 requirement). +- **Avoid long-running commands.** Split heavy tasks into smaller steps with progress telemetry. +- **Guard network usage.** Use `when: "{{ env.isSealed }}"` to block disallowed network operations or provide offline instructions. +- **Document fallbacks.** Include manual recovery instructions in `docs/runbook.md`. + +--- + +## 8 · Hand-off & Review + +- Submit PR including pack bundle metadata, docs, and validation evidence. +- Request review from Task Runner + Security + DevOps stakeholders. +- Attach `stella pack plan` output and signature digest to review notes. +- After approval, update change log (`docs/CHANGELOG.md`) and notify Task Runner operations. + +--- + +## 9 · Compliance Checklist + +- [ ] Metadata, inputs, steps, approvals, secrets, and outputs defined per spec. +- [ ] Schemas provided for all object inputs and outputs. +- [ ] Determinism validation (`stella pack validate`) executed with evidence stored. +- [ ] Plan + simulation artifacts committed in `.artifacts/` or CI evidence store. +- [ ] Bundle signed (cosign/DSSE) and signature recorded. +- [ ] Runbook and troubleshooting notes documented. +- [ ] Offline distribution steps prepared (bundle export + manifest update). +- [ ] Imposed rule reminder retained at top. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/task-packs/registry.md b/docs/task-packs/registry.md index 5b988422..3dd874b7 100644 --- a/docs/task-packs/registry.md +++ b/docs/task-packs/registry.md @@ -1,174 +1,174 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Packs Registry Architecture & Operations - -The Packs Registry stores, verifies, and serves Task Pack bundles across environments. It integrates with Authority for RBAC, Task Runner for execution, DevOps for release automation, and Offline Kit for air-gapped distribution. - ---- - -## 1 · Service Overview - -- **Service name:** `StellaOps.PacksRegistry` -- **Interfaces:** REST/GraphQL API, OCI-compatible registry endpoints, event streams for mirroring. -- **Data stores:** MongoDB (`packs`, `pack_versions`, `pack_provenance`), object storage (bundle blobs, signatures), timeline events. -- **Dependencies:** Authority scopes (`Packs.*`), Export Center (manifests), DevOps signing service, Notifications (optional). - ---- - -## 2 · Core Concepts - -| Concept | Description | -|---------|-------------| -| **Pack record** | Immutable entry representing a pack version; includes metadata, digest, signatures, tenant visibility. | -| **Channel** | Logical distribution channel (`stable`, `edge`, `beta`, custom). Controls mirroring/promotion flows. | -| **Provenance** | DSSE statements + SBOM linking pack bundle to source repo, CLI build, and Task Runner compatibility. | -| **Mirroring policy** | Rules specifying which packs replicate to downstream registries or Offline Kit bundles. | -| **Audit trail** | Append-only log capturing publish/update/delete actions, approvals, and policy evaluations. | - ---- - -## 3 · API Surface - -### 3.1 REST Endpoints - -| Method | Path | Description | Scopes | -|--------|------|-------------|--------| -| `GET` | `/api/packs` | List packs with filters (`name`, `channel`, `tenant`, `tag`). | `Packs.Read` | -| `GET` | `/api/packs/{packId}/versions` | List versions with metadata, provenance. | `Packs.Read` | -| `GET` | `/api/packs/{packId}/versions/{version}` | Retrieve manifest, signatures, compatibility matrix. | `Packs.Read` | -| `POST` | `/api/packs/{packId}/versions` | Publish new version (bundle upload or OCI reference). | `Packs.Write` | -| `POST` | `/api/packs/{packId}/promote` | Promote version between channels (edge→stable). | `Packs.Write` + approval policy | -| `DELETE` | `/api/packs/{packId}/versions/{version}` | Deprecate version (soft delete, immutability preserved). | `Packs.Write` | -| `GET` | `/api/packs/{packId}/events` | Stream audit events (SSE). | `Packs.Read` | - -### 3.2 OCI Endpoints - -The registry exposes OCI-compatible endpoints (`/v2///...`) supporting: - -- `PUT`/`PATCH`/`GET` for manifests and blobs. -- Content-addressed digests using SHA-256. -- Annotations for pack metadata (`org.opencontainers.image.title`, `io.stellaops.pack.metadata`). - -### 3.3 GraphQL (Optional) - -GraphQL endpoint (`/api/graphql`) enables advanced queries (filter by approvals, tags, compatibility). Under active development; reference API schema once published. - ---- - -## 4 · Publishing Workflow - -1. CLI/CI calls `POST /api/packs/{id}/versions` with signed bundle. -2. Registry verifies: - - Manifest schema compliance. - - Signature (cosign/DSSE) validity. - - Authority scopes (`Packs.Write`). - - Tenant visibility constraints. -3. On success, registry stores bundle, provenance, and emits event (`pack.version.published`). -4. Optional promotion requires additional approvals or integration with DevOps release boards. - -All actions recorded in audit log: - -```json -{ - "id": "evt_01HF...", - "type": "pack.version.published", - "packId": "sbom-remediation", - "version": "1.3.0", - "actor": "user:alice", - "tenant": "west-prod", - "source": "cli/2025.10.0", - "signatures": ["sha256:..."], - "metadataHash": "sha256:..." -} -``` - ---- - -## 5 · Mirroring & Offline Support - -- **Automatic mirroring:** Configure policies to push packs to secondary registries (edge clusters, regional mirrors) or object stores. -- **Offline Kit integration:** `ops/offline-kit` pipeline pulls packs matching specified channels and writes them to `offline/packs/manifest.json` with signatures. -- **Checksum manifest:** Registry maintains `digestmap.json` listing pack digests + signatures; offline installers verify before import. -- **Sealed mode:** Registry can operate in read-only mode for sealed environments; publishing disabled except via offline import command (`stella pack mirror import`). - ---- - -## 6 · Security & Compliance - -- Enforce Authority scopes; tokens without tenant or required scope are rejected (`ERR_PACK_SCOPE`). -- Signatures verified using trusted Fulcio/KMS roots; optional mirror trust bundles configured via `registry.trustBundle`. -- RBAC mapping: - -| Role | Scopes | Capabilities | -|------|--------|--------------| -| `PackViewer` | `Packs.Read` | Browse, fetch manifests/bundles. | -| `PackPublisher` | `Packs.Read`, `Packs.Write` | Publish/promote, manage channels (subject to policy). | -| `PackApprover` | `Packs.Read`, `Packs.Approve` | Approve promotions, override tenant visibility (with audit logging). | -| `PackOperator` | `Packs.Read`, `Packs.Run` | Execute packs (via CLI/Task Runner). | - -- Audit events forwarded to Authority + Evidence Locker. -- Built-in malware/secret scanning runs on bundle upload (configurable via DevOps pipeline). - -See [pack signing & RBAC guidance](../security/pack-signing-and-rbac.md) for deeper controls. - ---- - -## 7 · Observability - -- Metrics (`registry` namespace): - - `pack_publish_total{result}` – success/failure counts. - - `pack_signature_verify_seconds` – verification latency. - - `pack_channel_promotions_total` – promotions per channel. - - `pack_mirror_queue_depth` – pending mirror jobs. -- Logs (structured JSON with `packId`, `version`, `actor`, `tenant`, `digest`). -- Traces instrument bundle verification, storage writes, and mirror pushes. -- Alerting suggestions: - - Publish failure rate > 5 % (5 m window) triggers DevOps escalation. - - Mirror lag > 15 m surfaces to Ops dashboard. - ---- - -## 8 · Schema & Metadata Extensions - -- Default metadata stored under `metadata.*` from manifest. -- Registry supplements with: - - `compatibility.cli` (supported CLI versions). - - `compatibility.runner` (Task Runner build requirements). - - `provenance.attestations[]` (URIs). - - `channels[]` (current channel assignments). - - `tenantVisibility[]`. - - `deprecated` flag + replacement hints. - -Extensions must be deterministic and derived from signed bundle data. - ---- - -## 9 · Operations - -- **Backups:** Daily snapshots of Mongo collections + object storage, retained for 30 days. -- **Retention:** Old versions retained indefinitely; mark as `deprecated` instead of deleting. -- **Maintenance:** - - Run `registry vacuum` weekly to prune orphaned blobs. - - Rotate signing keys per security policy (document in `pack-signing-and-rbac`). - - Validate trust bundles quarterly. -- **Disaster recovery:** - - Restore database + object storage. - - Rebuild OCI indexes (`registry rebuild-index`). - - Replay audit events for downstream systems. - ---- - -## 10 · Compliance Checklist - -- [ ] REST + OCI endpoints documented with required scopes. -- [ ] Publishing flow covers signature verification, audit logging, and promotion policies. -- [ ] Mirroring/offline strategy recorded (policies, manifests, sealed mode notes). -- [ ] RBAC roles and scope mapping defined. -- [ ] Observability metrics, logs, and alerts described. -- [ ] Operations guidance covers backups, rotation, disaster recovery. -- [ ] Imposed rule reminder included at top of document. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Packs Registry Architecture & Operations + +The Packs Registry stores, verifies, and serves Task Pack bundles across environments. It integrates with Authority for RBAC, Task Runner for execution, DevOps for release automation, and Offline Kit for air-gapped distribution. + +--- + +## 1 · Service Overview + +- **Service name:** `StellaOps.PacksRegistry` +- **Interfaces:** REST/GraphQL API, OCI-compatible registry endpoints, event streams for mirroring. +- **Data stores:** MongoDB (`packs`, `pack_versions`, `pack_provenance`), object storage (bundle blobs, signatures), timeline events. +- **Dependencies:** Authority scopes (`Packs.*`), Export Center (manifests), DevOps signing service, Notifications (optional). + +--- + +## 2 · Core Concepts + +| Concept | Description | +|---------|-------------| +| **Pack record** | Immutable entry representing a pack version; includes metadata, digest, signatures, tenant visibility. | +| **Channel** | Logical distribution channel (`stable`, `edge`, `beta`, custom). Controls mirroring/promotion flows. | +| **Provenance** | DSSE statements + SBOM linking pack bundle to source repo, CLI build, and Task Runner compatibility. | +| **Mirroring policy** | Rules specifying which packs replicate to downstream registries or Offline Kit bundles. | +| **Audit trail** | Append-only log capturing publish/update/delete actions, approvals, and policy evaluations. | + +--- + +## 3 · API Surface + +### 3.1 REST Endpoints + +| Method | Path | Description | Scopes | +|--------|------|-------------|--------| +| `GET` | `/api/packs` | List packs with filters (`name`, `channel`, `tenant`, `tag`). | `Packs.Read` | +| `GET` | `/api/packs/{packId}/versions` | List versions with metadata, provenance. | `Packs.Read` | +| `GET` | `/api/packs/{packId}/versions/{version}` | Retrieve manifest, signatures, compatibility matrix. | `Packs.Read` | +| `POST` | `/api/packs/{packId}/versions` | Publish new version (bundle upload or OCI reference). | `Packs.Write` | +| `POST` | `/api/packs/{packId}/promote` | Promote version between channels (edge→stable). | `Packs.Write` + approval policy | +| `DELETE` | `/api/packs/{packId}/versions/{version}` | Deprecate version (soft delete, immutability preserved). | `Packs.Write` | +| `GET` | `/api/packs/{packId}/events` | Stream audit events (SSE). | `Packs.Read` | + +### 3.2 OCI Endpoints + +The registry exposes OCI-compatible endpoints (`/v2///...`) supporting: + +- `PUT`/`PATCH`/`GET` for manifests and blobs. +- Content-addressed digests using SHA-256. +- Annotations for pack metadata (`org.opencontainers.image.title`, `io.stellaops.pack.metadata`). + +### 3.3 GraphQL (Optional) + +GraphQL endpoint (`/api/graphql`) enables advanced queries (filter by approvals, tags, compatibility). Under active development; reference API schema once published. + +--- + +## 4 · Publishing Workflow + +1. CLI/CI calls `POST /api/packs/{id}/versions` with signed bundle. +2. Registry verifies: + - Manifest schema compliance. + - Signature (cosign/DSSE) validity. + - Authority scopes (`Packs.Write`). + - Tenant visibility constraints. +3. On success, registry stores bundle, provenance, and emits event (`pack.version.published`). +4. Optional promotion requires additional approvals or integration with DevOps release boards. + +All actions recorded in audit log: + +```json +{ + "id": "evt_01HF...", + "type": "pack.version.published", + "packId": "sbom-remediation", + "version": "1.3.0", + "actor": "user:alice", + "tenant": "west-prod", + "source": "cli/2025.10.0", + "signatures": ["sha256:..."], + "metadataHash": "sha256:..." +} +``` + +--- + +## 5 · Mirroring & Offline Support + +- **Automatic mirroring:** Configure policies to push packs to secondary registries (edge clusters, regional mirrors) or object stores. +- **Offline Kit integration:** `ops/offline-kit` pipeline pulls packs matching specified channels and writes them to `offline/packs/manifest.json` with signatures. +- **Checksum manifest:** Registry maintains `digestmap.json` listing pack digests + signatures; offline installers verify before import. +- **Sealed mode:** Registry can operate in read-only mode for sealed environments; publishing disabled except via offline import command (`stella pack mirror import`). + +--- + +## 6 · Security & Compliance + +- Enforce Authority scopes; tokens without tenant or required scope are rejected (`ERR_PACK_SCOPE`). +- Signatures verified using trusted Fulcio/KMS roots; optional mirror trust bundles configured via `registry.trustBundle`. +- RBAC mapping: + +| Role | Scopes | Capabilities | +|------|--------|--------------| +| `PackViewer` | `Packs.Read` | Browse, fetch manifests/bundles. | +| `PackPublisher` | `Packs.Read`, `Packs.Write` | Publish/promote, manage channels (subject to policy). | +| `PackApprover` | `Packs.Read`, `Packs.Approve` | Approve promotions, override tenant visibility (with audit logging). | +| `PackOperator` | `Packs.Read`, `Packs.Run` | Execute packs (via CLI/Task Runner). | + +- Audit events forwarded to Authority + Evidence Locker. +- Built-in malware/secret scanning runs on bundle upload (configurable via DevOps pipeline). + +See [pack signing & RBAC guidance](../security/pack-signing-and-rbac.md) for deeper controls. + +--- + +## 7 · Observability + +- Metrics (`registry` namespace): + - `pack_publish_total{result}` – success/failure counts. + - `pack_signature_verify_seconds` – verification latency. + - `pack_channel_promotions_total` – promotions per channel. + - `pack_mirror_queue_depth` – pending mirror jobs. +- Logs (structured JSON with `packId`, `version`, `actor`, `tenant`, `digest`). +- Traces instrument bundle verification, storage writes, and mirror pushes. +- Alerting suggestions: + - Publish failure rate > 5 % (5 m window) triggers DevOps escalation. + - Mirror lag > 15 m surfaces to Ops dashboard. + +--- + +## 8 · Schema & Metadata Extensions + +- Default metadata stored under `metadata.*` from manifest. +- Registry supplements with: + - `compatibility.cli` (supported CLI versions). + - `compatibility.runner` (Task Runner build requirements). + - `provenance.attestations[]` (URIs). + - `channels[]` (current channel assignments). + - `tenantVisibility[]`. + - `deprecated` flag + replacement hints. + +Extensions must be deterministic and derived from signed bundle data. + +--- + +## 9 · Operations + +- **Backups:** Daily snapshots of Mongo collections + object storage, retained for 30 days. +- **Retention:** Old versions retained indefinitely; mark as `deprecated` instead of deleting. +- **Maintenance:** + - Run `registry vacuum` weekly to prune orphaned blobs. + - Rotate signing keys per security policy (document in `pack-signing-and-rbac`). + - Validate trust bundles quarterly. +- **Disaster recovery:** + - Restore database + object storage. + - Rebuild OCI indexes (`registry rebuild-index`). + - Replay audit events for downstream systems. + +--- + +## 10 · Compliance Checklist + +- [ ] REST + OCI endpoints documented with required scopes. +- [ ] Publishing flow covers signature verification, audit logging, and promotion policies. +- [ ] Mirroring/offline strategy recorded (policies, manifests, sealed mode notes). +- [ ] RBAC roles and scope mapping defined. +- [ ] Observability metrics, logs, and alerts described. +- [ ] Operations guidance covers backups, rotation, disaster recovery. +- [ ] Imposed rule reminder included at top of document. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/task-packs/runbook.md b/docs/task-packs/runbook.md index de2025d6..7167b514 100644 --- a/docs/task-packs/runbook.md +++ b/docs/task-packs/runbook.md @@ -1,162 +1,162 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Task Pack Operations Runbook - -This runbook guides SREs and on-call engineers through executing, monitoring, and troubleshooting Task Packs using the Task Runner service, Packs Registry, and StellaOps CLI. It aligns with Sprint 43 deliverables (approvals workflow, notifications, chaos resilience). - ---- - -## 1 · Quick Reference - -| Action | Command / UI | Notes | -|--------|--------------|-------| -| Validate pack | `stella pack validate --bundle ` | Run before publishing or importing. | -| Plan pack run | `stella pack plan --inputs inputs.json` | Outputs plan hash, required approvals, secret summary. | -| Execute pack | `stella pack run --pack :` | Streams logs; prompts for secrets/approvals if allowed. | -| Approve gate | Console notifications or `stella pack approve --run --gate ` | Requires `Packs.Approve`. | -| View run | Console `/console/packs/runs/:id` or `stella pack runs show ` | SSE stream available for live status. | -| Export evidence | `stella pack runs export --run ` | Produces bundle with plan, logs, artifacts, attestations. | - ---- - -## 2 · Run Lifecycle - -1. **Submission** - - CLI/Orchestrator submits run with inputs, pack version, tenant context. - - Task Runner validates pack hash, scopes, sealed-mode constraints. -2. **Plan & Simulation** - - Runner caches plan graph; optional simulation diff recorded. -3. **Approvals** - - Gates emit notifications (`NOTIFY-SVC-40-001`). - - Approvers can approve/resume via CLI, Console, or API. -4. **Execution** - - Steps executed per plan (sequential/parallel). - - Logs streamed via SSE (`/task-runner/runs/{id}/logs`). -5. **Evidence & Attestation** - - On completion, DSSE attestation + evidence bundle stored. - - Exports available via Export Center. -6. **Cleanup** - - Artifacts retained per retention policy (default 30 d). - - Mirror pack run manifest to Offline Kit if configured. - ---- - -## 3 · Monitoring & Telemetry - -- **Metrics dashboards:** `task-runner` Grafana board. - - `pack_run_active` – active runs per tenant. - - `pack_step_duration_seconds` – histograms per step type. - - `pack_gate_wait_seconds` – approval wait time (alert > 30 m). - - `pack_run_success_ratio` – success vs failure rate. -- **Logs:** Search by `runId`, `packId`, `tenant`, `stepId`. -- **Traces:** Query `taskrunner.run` span in Tempo/Jaeger. -- **Notifications:** Subscribe to `pack.run.*` topics via Notifier for Slack/email/PagerDuty hooks. - -Observability configuration referenced in Task Runner tasks (OBS-50-001..55-001). - ---- - -## 4 · Approvals Workflow - -- Approvals may be requested via Console banner, CLI prompt, or email/Slack. -- Approver roles: `Packs.Approve` + tenant membership. -- CLI command: - -```bash -stella pack approve \ - --run run:tenant:timestamp \ - --gate security-review \ - --comment "Validated remediation scope; proceeding." -``` - -- Auto-expiry triggers run cancellation (configurable per gate). -- Approval events logged and included in evidence bundle. - ---- - -## 5 · Secrets Handling - -- Secrets retrieved via Authority secure channel or CLI profile. -- Task Runner injects secrets into isolated environment variables or temp files (auto-shredded). -- Logs redact secrets; evidence bundles include only secret metadata (name, scope, last four characters). -- For sealed mode, secrets must originate from sealed vault (configured via `TASKRUNNER_SEALED_VAULT_URL`). - ---- - -## 6 · Failure Recovery - -| Scenario | Symptom | Resolution | -|----------|---------|------------| -| **Plan hash mismatch** | Run aborted with `ERR_PACK_HASH_MISMATCH`. | Re-run `stella pack plan`; ensure pack not modified post-plan. | -| **Approval timeout** | `ERR_PACK_APPROVAL_TIMEOUT`. | Requeue run with extended TTL or escalate to approver; verify notifications delivered. | -| **Secret missing** | Run fails at injection step. | Provide secret via CLI (`--secrets`) or configure profile; check Authority scope. | -| **Network blocked (sealed)** | `ERR_PACK_NETWORK_BLOCKED`. | Update pack to avoid external calls or whitelist domain via AirGap policy. | -| **Artifact upload failure** | Evidence missing, logs show storage errors. | Retry run with `--resume` (if supported); verify object storage health. | -| **Runner chaos trigger** | Run paused with chaos event note. | Review chaos test plan; resume if acceptable or cancel run. | - -`stella pack runs resume --run ` resumes paused runs post-remediation (approvals or transient failures). - ---- - -## 7 · Chaos & Resilience - -- Chaos hooks pause runs, drop network, or delay approvals to test resilience. -- Track chaos events via `pack.chaos.injected` timeline entries. -- Post-chaos, ensure metrics return to baseline; record findings in Ops log. - ---- - -## 8 · Offline & Air-Gapped Execution - -- Use `stella pack mirror pull` to import packs into sealed registry. -- CLI caches bundles under `~/.stella/packs/` for offline runs. -- Approvals require offline process: - - Generate approval request bundle (`stella pack approve --offline-request`). - - Approver signs bundle using offline CLI. - - Import approval via `stella pack approve --offline-response`. -- Evidence bundles exported to removable media; verify checksums before upload to online systems. - ---- - -## 9 · Runbooks for Common Packs - -Maintain per-pack playbooks in `docs/task-packs/runbook/.md`. Include: - -- Purpose and scope. -- Required inputs and secrets. -- Approval stakeholders. -- Pre-checks and post-checks. -- Rollback procedures. - -The Docs Guild can use this root runbook as a template. - ---- - -## 10 · Escalation Matrix - -| Issue | Primary | Secondary | Notes | -|-------|---------|-----------|-------| -| Pack validation errors | DevEx/CLI Guild | Task Runner Guild | Provide pack bundle + validation output. | -| Approval pipeline failure | Task Runner Guild | Authority Core | Confirm scope/role mapping. | -| Registry outage | Packs Registry Guild | DevOps Guild | Use mirror fallback if possible. | -| Evidence integrity issues | Evidence Locker Guild | Security Guild | Validate DSSE attestations, escalate if tampered. | - -Escalations must include run ID, tenant, pack version, plan hash, and timestamps. - ---- - -## 11 · Compliance Checklist - -- [ ] Run lifecycle documented (submission → evidence). -- [ ] Monitoring metrics, logs, traces, and notifications captured. -- [ ] Approvals workflow instructions provided (CLI + Console). -- [ ] Secret handling, sealed-mode constraints, and offline process described. -- [ ] Failure scenarios + recovery steps listed. -- [ ] Chaos/resilience guidance included. -- [ ] Escalation matrix defined. -- [ ] Imposed rule reminder included at top. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Task Pack Operations Runbook + +This runbook guides SREs and on-call engineers through executing, monitoring, and troubleshooting Task Packs using the Task Runner service, Packs Registry, and StellaOps CLI. It aligns with Sprint 43 deliverables (approvals workflow, notifications, chaos resilience). + +--- + +## 1 · Quick Reference + +| Action | Command / UI | Notes | +|--------|--------------|-------| +| Validate pack | `stella pack validate --bundle ` | Run before publishing or importing. | +| Plan pack run | `stella pack plan --inputs inputs.json` | Outputs plan hash, required approvals, secret summary. | +| Execute pack | `stella pack run --pack :` | Streams logs; prompts for secrets/approvals if allowed. | +| Approve gate | Console notifications or `stella pack approve --run --gate ` | Requires `Packs.Approve`. | +| View run | Console `/console/packs/runs/:id` or `stella pack runs show ` | SSE stream available for live status. | +| Export evidence | `stella pack runs export --run ` | Produces bundle with plan, logs, artifacts, attestations. | + +--- + +## 2 · Run Lifecycle + +1. **Submission** + - CLI/Orchestrator submits run with inputs, pack version, tenant context. + - Task Runner validates pack hash, scopes, sealed-mode constraints. +2. **Plan & Simulation** + - Runner caches plan graph; optional simulation diff recorded. +3. **Approvals** + - Gates emit notifications (`NOTIFY-SVC-40-001`). + - Approvers can approve/resume via CLI, Console, or API. +4. **Execution** + - Steps executed per plan (sequential/parallel). + - Logs streamed via SSE (`/task-runner/runs/{id}/logs`). +5. **Evidence & Attestation** + - On completion, DSSE attestation + evidence bundle stored. + - Exports available via Export Center. +6. **Cleanup** + - Artifacts retained per retention policy (default 30 d). + - Mirror pack run manifest to Offline Kit if configured. + +--- + +## 3 · Monitoring & Telemetry + +- **Metrics dashboards:** `task-runner` Grafana board. + - `pack_run_active` – active runs per tenant. + - `pack_step_duration_seconds` – histograms per step type. + - `pack_gate_wait_seconds` – approval wait time (alert > 30 m). + - `pack_run_success_ratio` – success vs failure rate. +- **Logs:** Search by `runId`, `packId`, `tenant`, `stepId`. +- **Traces:** Query `taskrunner.run` span in Tempo/Jaeger. +- **Notifications:** Subscribe to `pack.run.*` topics via Notifier for Slack/email/PagerDuty hooks. + +Observability configuration referenced in Task Runner tasks (OBS-50-001..55-001). + +--- + +## 4 · Approvals Workflow + +- Approvals may be requested via Console banner, CLI prompt, or email/Slack. +- Approver roles: `Packs.Approve` + tenant membership. +- CLI command: + +```bash +stella pack approve \ + --run run:tenant:timestamp \ + --gate security-review \ + --comment "Validated remediation scope; proceeding." +``` + +- Auto-expiry triggers run cancellation (configurable per gate). +- Approval events logged and included in evidence bundle. + +--- + +## 5 · Secrets Handling + +- Secrets retrieved via Authority secure channel or CLI profile. +- Task Runner injects secrets into isolated environment variables or temp files (auto-shredded). +- Logs redact secrets; evidence bundles include only secret metadata (name, scope, last four characters). +- For sealed mode, secrets must originate from sealed vault (configured via `TASKRUNNER_SEALED_VAULT_URL`). + +--- + +## 6 · Failure Recovery + +| Scenario | Symptom | Resolution | +|----------|---------|------------| +| **Plan hash mismatch** | Run aborted with `ERR_PACK_HASH_MISMATCH`. | Re-run `stella pack plan`; ensure pack not modified post-plan. | +| **Approval timeout** | `ERR_PACK_APPROVAL_TIMEOUT`. | Requeue run with extended TTL or escalate to approver; verify notifications delivered. | +| **Secret missing** | Run fails at injection step. | Provide secret via CLI (`--secrets`) or configure profile; check Authority scope. | +| **Network blocked (sealed)** | `ERR_PACK_NETWORK_BLOCKED`. | Update pack to avoid external calls or whitelist domain via AirGap policy. | +| **Artifact upload failure** | Evidence missing, logs show storage errors. | Retry run with `--resume` (if supported); verify object storage health. | +| **Runner chaos trigger** | Run paused with chaos event note. | Review chaos test plan; resume if acceptable or cancel run. | + +`stella pack runs resume --run ` resumes paused runs post-remediation (approvals or transient failures). + +--- + +## 7 · Chaos & Resilience + +- Chaos hooks pause runs, drop network, or delay approvals to test resilience. +- Track chaos events via `pack.chaos.injected` timeline entries. +- Post-chaos, ensure metrics return to baseline; record findings in Ops log. + +--- + +## 8 · Offline & Air-Gapped Execution + +- Use `stella pack mirror pull` to import packs into sealed registry. +- CLI caches bundles under `~/.stella/packs/` for offline runs. +- Approvals require offline process: + - Generate approval request bundle (`stella pack approve --offline-request`). + - Approver signs bundle using offline CLI. + - Import approval via `stella pack approve --offline-response`. +- Evidence bundles exported to removable media; verify checksums before upload to online systems. + +--- + +## 9 · Runbooks for Common Packs + +Maintain per-pack playbooks in `docs/task-packs/runbook/.md`. Include: + +- Purpose and scope. +- Required inputs and secrets. +- Approval stakeholders. +- Pre-checks and post-checks. +- Rollback procedures. + +The Docs Guild can use this root runbook as a template. + +--- + +## 10 · Escalation Matrix + +| Issue | Primary | Secondary | Notes | +|-------|---------|-----------|-------| +| Pack validation errors | DevEx/CLI Guild | Task Runner Guild | Provide pack bundle + validation output. | +| Approval pipeline failure | Task Runner Guild | Authority Core | Confirm scope/role mapping. | +| Registry outage | Packs Registry Guild | DevOps Guild | Use mirror fallback if possible. | +| Evidence integrity issues | Evidence Locker Guild | Security Guild | Validate DSSE attestations, escalate if tampered. | + +Escalations must include run ID, tenant, pack version, plan hash, and timestamps. + +--- + +## 11 · Compliance Checklist + +- [ ] Run lifecycle documented (submission → evidence). +- [ ] Monitoring metrics, logs, traces, and notifications captured. +- [ ] Approvals workflow instructions provided (CLI + Console). +- [ ] Secret handling, sealed-mode constraints, and offline process described. +- [ ] Failure scenarios + recovery steps listed. +- [ ] Chaos/resilience guidance included. +- [ ] Escalation matrix defined. +- [ ] Imposed rule reminder included at top. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/task-packs/spec.md b/docs/task-packs/spec.md index 49c16078..d6e7c98e 100644 --- a/docs/task-packs/spec.md +++ b/docs/task-packs/spec.md @@ -1,249 +1,249 @@ -> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. - -# Task Pack Specification (Sprint 43 Draft) - -The Task Pack specification defines a deterministic, auditable format that enables operators to encode multi-step maintenance, validation, and deployment workflows. Packs are executed by the Task Runner service, distributed through the Packs Registry, and invoked via the StellaOps CLI (`stella pack ...`) or Orchestrator integrations. - ---- - -## 1 · Goals & Scope - -- **Deterministic execution.** Identical inputs yield identical run graphs, output manifests, and evidence bundles across environments (online, sealed, or offline). -- **Secure-by-default.** Pack metadata must capture provenance, signatures, RBAC requirements, and secret usage; execution enforces tenant scopes and approvals. -- **Portable.** Packs are distributed as signed OCI artifacts or tarballs that work in connected and air-gapped deployments, including Offline Kit mirrors. -- **Composable.** Packs can reference reusable steps, expressions, and shared libraries without sacrificing determinism or auditability. - -Non-goals: full-blown workflow orchestration, unbounded scripting, or remote code injection. All logic is declarative and constrained to Task Runner capabilities. - ---- - -## 2 · Terminology - -| Term | Definition | -|------|------------| -| **Pack manifest** | Primary YAML document (`pack.yaml`) describing metadata, inputs, steps, policies, and evidence expectations. | -| **Step** | Atomic unit of work executed by Task Runner (e.g., command, API call, policy gate, approval). Steps can be sequential or parallel. | -| **Expression** | Deterministic evaluation (JMESPath-like) used for branching, templating, and conditionals. | -| **Policy gate** | Declarative rule that blocks execution until conditions are met (e.g., approval recorded, external signal received). | -| **Artifact** | File, JSON blob, or OCI object produced by a step, referenced in manifests and evidence bundles. | -| **Pack bundle** | Distribution archive (`.stella-pack.tgz` or OCI ref) containing manifest, assets, schemas, and provenance metadata. | - ---- - -## 3 · Pack Layout - -``` -my-pack/ - ├─ pack.yaml # Required manifest - ├─ assets/ # Optional static assets (scripts, templates) - ├─ schemas/ # JSON schemas for inputs/outputs - ├─ docs/ # Markdown docs rendered in Console/CLI help - ├─ provenance/ # DSSE statements, SBOM, attestations - └─ README.md # Author-facing summary (optional) -``` - -Publishing via Packs Registry or OCI ensures the directory is canonical and hashed. - ---- - -## 4 · Manifest Schema (v1.0) - -```yaml -apiVersion: stellaops.io/pack.v1 -kind: TaskPack -metadata: - name: sbom-remediation - version: 1.3.0 - description: > - Audit SBOM drift, quiet high-risk findings, and export mitigation evidence. - tags: [sbom, remediation, policy] - tenantVisibility: ["west-prod", "east-stage"] # optional allowlist - maintainers: - - name: Jane Doe - email: jane@example.com - license: AGPL-3.0-or-later - annotations: - imposedRuleReminder: true - -spec: - inputs: - - name: sbomBundle - type: object - schema: schemas/sbom-bundle.schema.json - required: true - - name: dryRun - type: boolean - default: false - secrets: - - name: jiraToken - scope: Packs.Run # Authority scope required - description: Optional token for ticket automation - approvals: - - id: security-review - grants: ["Packs.Approve"] - expiresAfter: PT4H - reasonTemplate: "Approve remediation for SBOM {{ inputs.sbomBundle.metadata.image }}" - steps: - - id: validate-input - run: - uses: builtin:validate-schema - with: - target: "{{ inputs.sbomBundle }}" - schema: schemas/sbom-bundle.schema.json - - id: plan-remediation - when: "{{ not inputs.dryRun }}" - run: - uses: builtin:policy-simulate - with: - sbom: "{{ inputs.sbomBundle }}" - policy: "policies/remediation.yaml" - - id: approval-gate - gate: - approval: security-review - message: "Security must approve remediation before changes apply." - - id: apply-remediation - run: - uses: builtin:cli-command - with: - command: ["stella", "policy", "promote", "--from-pack"] - - id: export-evidence - run: - uses: builtin:evidence-export - with: - includeArtifacts: ["{{ steps.plan-remediation.outputs.planPath }}"] - outputs: - - name: evidenceBundle - type: file - path: "{{ steps.export-evidence.outputs.bundlePath }}" - success: - message: "Remediation applied; evidence bundle ready." - failure: - retries: - maxAttempts: 1 - backoffSeconds: 0 - message: "Remediation failed; see evidence bundle for context." -``` - -### 4.1 Field Summary - -| Field | Description | Requirements | -|-------|-------------|--------------| -| `metadata` | Human-facing metadata; used for registry listings and RBAC hints. | `name` (DNS-1123), `version` (SemVer), `description` ≤ 2048 chars. | -| `spec.inputs` | Declarative inputs validated at plan time. | Must include type; custom schema optional but recommended. | -| `spec.secrets` | Secrets requested at runtime; never stored in pack bundle. | Each secret references Authority scope; CLI prompts or injects from profiles. | -| `spec.approvals` | Named approval gates with required grants and TTL. | ID unique per pack; `grants` map to Authority roles. | -| `spec.steps` | Execution graph; each step is `run`, `gate`, `parallel`, or `map`. | Steps must declare deterministic `uses` module and `id`. | -| `spec.outputs` | Declared artifacts for downstream automation. | `type` can be `file`, `object`, or `url`; path/expression required. | -| `success` / `failure` | Messages + retry policy. | `failure.retries.maxAttempts` + `backoffSeconds` default to 0. | - ---- - -## 5 · Step Types - -| Type | Schema | Notes | -|------|--------|-------| -| `run` | Executes a built-in module (`builtin:*`) or registry-provided module. | Modules must be deterministic, side-effect constrained, and versioned. | -| `parallel` | Executes sub-steps concurrently; `maxParallel` optional. | Results aggregated; failures trigger abort unless `continueOnError`. | -| `map` | Iterates over deterministic list; each iteration spawns sub-step. | Sequence derived from expression result; ordering stable. | -| `gate.approval` | Blocks until approval recorded with required grants. | Supports `autoExpire` to cancel run on timeout. | -| `gate.policy` | Calls Policy Engine to ensure criteria met (e.g., no critical findings). | Fails run if gate not satisfied. | - -`when` expressions must be pure (no side effects) and rely only on declared inputs or prior outputs. - ---- - -## 6 · Determinism & Validation - -1. **Plan phase** (`stella pack plan`, `TaskRunner.Plan` API) parses manifest, resolves expressions, validates schemas, and emits canonical graph with hash. -2. **Simulation** compares plan vs dry-run results, capturing differences in `planDiff`. Required for approvals in sealed environments. -3. **Execution** uses plan hash to ensure runtime graph matches simulation. Divergence aborts run. -4. **Evidence**: Task Runner emits DSSE attestation referencing plan hash, input digests, and output artifacts. - -Validation pipeline: - -```text -pack.yaml ──▶ schema validation ──▶ expression audit ──▶ determinism guard ──▶ signing -``` - -Packs must pass CLI validation before publishing. - ---- - -## 7 · Signatures & Provenance - -- Pack bundles are signed with **cosign** (keyless Fulcio/KMS supported) and optionally DSSE envelopes. -- `provenance/` directory stores signed statements (SLSA Build L1+) linking source repo, CI run, and manifest hash. -- Registry verifies signatures on push/pull; Task Runner refuses unsigned packs unless in development mode. -- Attestations include: - - Pack manifest digest (`sha256`) - - Pack bundle digest - - Build metadata (`git.ref`, `ci.workflow`, `cli.version`) - ---- - -## 8 · RBAC & Scopes - -Authority scopes introduced by `AUTH-PACKS-41-001`: - -| Scope | Purpose | -|-------|---------| -| `Packs.Read` | Discover packs, download manifests. | -| `Packs.Write` | Publish/update packs in registry (requires signature). | -| `Packs.Run` | Execute packs via CLI/Task Runner. | -| `Packs.Approve` | Fulfil approval gates defined in packs. | - -Task Runner enforces scopes per tenant; pack metadata may further restrict tenant visibility (`metadata.tenantVisibility`). - ---- - -## 9 · Observability & Evidence - -- Metrics: `pack_run_duration_seconds`, `pack_step_retry_total`, `pack_gate_wait_seconds`. -- Logs: Structured JSON per step with scrubbed inputs (`secretMask` applied). -- Timeline events: `pack.started`, `pack.approval.requested`, `pack.approval.granted`, `pack.completed`. -- Evidence bundle includes: - - Plan manifest (canonical JSON) - - Step transcripts (redacted) - - Artifacts manifest (sha256, size) - - Attestations references - ---- - -## 10 · Compatibility Matrix - -| CLI Version | Pack API | Task Runner | Notes | -|-------------|----------|-------------|-------| -| 2025.10.x | `pack.v1` | Runner build `>=2025.10.0` | Approvals optional, loops disabled. | -| 2025.12.x | `pack.v1` | Runner build `>=2025.12.0` | Approvals resume, secrets injection, localization strings. | -| Future | `pack.v2` | TBD | Will introduce typed outputs & partial replay (track in Epic 13). | - -CLI enforces compatibility: running pack with unsupported features yields `ERR_PACK_UNSUPPORTED`. - ---- - -## 11 · Publishing Workflow - -1. Author pack (`pack.yaml`, assets, docs). -2. Run `stella pack validate` (schema + determinism). -3. Generate bundle: `stella pack build --output my-pack.stella-pack.tgz`. -4. Sign: `cosign sign-blob my-pack.stella-pack.tgz`. -5. Publish: `stella pack push registry.example.com/org/my-pack:1.3.0`. -6. Registry verifies signature, records provenance, and exposes pack via API. - ---- - -## 12 · Compliance Checklist - -- [ ] Manifest schema documented for all fields, including approvals, secrets, and outputs. -- [ ] Determinism requirements outlined with plan/simulate semantics and CLI validation steps. -- [ ] Signing + provenance expectations spelled out with cosign/DSSE references. -- [ ] RBAC scopes (`Packs.*`) and tenant visibility rules captured. -- [ ] Observability (metrics, logs, evidence) described for Task Runner integrations. -- [ ] Compatibility matrix enumerates CLI/Runner requirements. -- [ ] Publishing workflow documented with CLI commands. -- [ ] Imposed rule reminder included at top of document. - ---- - -*Last updated: 2025-10-27 (Sprint 43).* - +> **Imposed rule:** Work of this type or tasks of this type on this component must also be applied everywhere else it should be applied. + +# Task Pack Specification (Sprint 43 Draft) + +The Task Pack specification defines a deterministic, auditable format that enables operators to encode multi-step maintenance, validation, and deployment workflows. Packs are executed by the Task Runner service, distributed through the Packs Registry, and invoked via the StellaOps CLI (`stella pack ...`) or Orchestrator integrations. + +--- + +## 1 · Goals & Scope + +- **Deterministic execution.** Identical inputs yield identical run graphs, output manifests, and evidence bundles across environments (online, sealed, or offline). +- **Secure-by-default.** Pack metadata must capture provenance, signatures, RBAC requirements, and secret usage; execution enforces tenant scopes and approvals. +- **Portable.** Packs are distributed as signed OCI artifacts or tarballs that work in connected and air-gapped deployments, including Offline Kit mirrors. +- **Composable.** Packs can reference reusable steps, expressions, and shared libraries without sacrificing determinism or auditability. + +Non-goals: full-blown workflow orchestration, unbounded scripting, or remote code injection. All logic is declarative and constrained to Task Runner capabilities. + +--- + +## 2 · Terminology + +| Term | Definition | +|------|------------| +| **Pack manifest** | Primary YAML document (`pack.yaml`) describing metadata, inputs, steps, policies, and evidence expectations. | +| **Step** | Atomic unit of work executed by Task Runner (e.g., command, API call, policy gate, approval). Steps can be sequential or parallel. | +| **Expression** | Deterministic evaluation (JMESPath-like) used for branching, templating, and conditionals. | +| **Policy gate** | Declarative rule that blocks execution until conditions are met (e.g., approval recorded, external signal received). | +| **Artifact** | File, JSON blob, or OCI object produced by a step, referenced in manifests and evidence bundles. | +| **Pack bundle** | Distribution archive (`.stella-pack.tgz` or OCI ref) containing manifest, assets, schemas, and provenance metadata. | + +--- + +## 3 · Pack Layout + +``` +my-pack/ + ├─ pack.yaml # Required manifest + ├─ assets/ # Optional static assets (scripts, templates) + ├─ schemas/ # JSON schemas for inputs/outputs + ├─ docs/ # Markdown docs rendered in Console/CLI help + ├─ provenance/ # DSSE statements, SBOM, attestations + └─ README.md # Author-facing summary (optional) +``` + +Publishing via Packs Registry or OCI ensures the directory is canonical and hashed. + +--- + +## 4 · Manifest Schema (v1.0) + +```yaml +apiVersion: stellaops.io/pack.v1 +kind: TaskPack +metadata: + name: sbom-remediation + version: 1.3.0 + description: > + Audit SBOM drift, quiet high-risk findings, and export mitigation evidence. + tags: [sbom, remediation, policy] + tenantVisibility: ["west-prod", "east-stage"] # optional allowlist + maintainers: + - name: Jane Doe + email: jane@example.com + license: AGPL-3.0-or-later + annotations: + imposedRuleReminder: true + +spec: + inputs: + - name: sbomBundle + type: object + schema: schemas/sbom-bundle.schema.json + required: true + - name: dryRun + type: boolean + default: false + secrets: + - name: jiraToken + scope: Packs.Run # Authority scope required + description: Optional token for ticket automation + approvals: + - id: security-review + grants: ["Packs.Approve"] + expiresAfter: PT4H + reasonTemplate: "Approve remediation for SBOM {{ inputs.sbomBundle.metadata.image }}" + steps: + - id: validate-input + run: + uses: builtin:validate-schema + with: + target: "{{ inputs.sbomBundle }}" + schema: schemas/sbom-bundle.schema.json + - id: plan-remediation + when: "{{ not inputs.dryRun }}" + run: + uses: builtin:policy-simulate + with: + sbom: "{{ inputs.sbomBundle }}" + policy: "policies/remediation.yaml" + - id: approval-gate + gate: + approval: security-review + message: "Security must approve remediation before changes apply." + - id: apply-remediation + run: + uses: builtin:cli-command + with: + command: ["stella", "policy", "promote", "--from-pack"] + - id: export-evidence + run: + uses: builtin:evidence-export + with: + includeArtifacts: ["{{ steps.plan-remediation.outputs.planPath }}"] + outputs: + - name: evidenceBundle + type: file + path: "{{ steps.export-evidence.outputs.bundlePath }}" + success: + message: "Remediation applied; evidence bundle ready." + failure: + retries: + maxAttempts: 1 + backoffSeconds: 0 + message: "Remediation failed; see evidence bundle for context." +``` + +### 4.1 Field Summary + +| Field | Description | Requirements | +|-------|-------------|--------------| +| `metadata` | Human-facing metadata; used for registry listings and RBAC hints. | `name` (DNS-1123), `version` (SemVer), `description` ≤ 2048 chars. | +| `spec.inputs` | Declarative inputs validated at plan time. | Must include type; custom schema optional but recommended. | +| `spec.secrets` | Secrets requested at runtime; never stored in pack bundle. | Each secret references Authority scope; CLI prompts or injects from profiles. | +| `spec.approvals` | Named approval gates with required grants and TTL. | ID unique per pack; `grants` map to Authority roles. | +| `spec.steps` | Execution graph; each step is `run`, `gate`, `parallel`, or `map`. | Steps must declare deterministic `uses` module and `id`. | +| `spec.outputs` | Declared artifacts for downstream automation. | `type` can be `file`, `object`, or `url`; path/expression required. | +| `success` / `failure` | Messages + retry policy. | `failure.retries.maxAttempts` + `backoffSeconds` default to 0. | + +--- + +## 5 · Step Types + +| Type | Schema | Notes | +|------|--------|-------| +| `run` | Executes a built-in module (`builtin:*`) or registry-provided module. | Modules must be deterministic, side-effect constrained, and versioned. | +| `parallel` | Executes sub-steps concurrently; `maxParallel` optional. | Results aggregated; failures trigger abort unless `continueOnError`. | +| `map` | Iterates over deterministic list; each iteration spawns sub-step. | Sequence derived from expression result; ordering stable. | +| `gate.approval` | Blocks until approval recorded with required grants. | Supports `autoExpire` to cancel run on timeout. | +| `gate.policy` | Calls Policy Engine to ensure criteria met (e.g., no critical findings). | Fails run if gate not satisfied. | + +`when` expressions must be pure (no side effects) and rely only on declared inputs or prior outputs. + +--- + +## 6 · Determinism & Validation + +1. **Plan phase** (`stella pack plan`, `TaskRunner.Plan` API) parses manifest, resolves expressions, validates schemas, and emits canonical graph with hash. +2. **Simulation** compares plan vs dry-run results, capturing differences in `planDiff`. Required for approvals in sealed environments. +3. **Execution** uses plan hash to ensure runtime graph matches simulation. Divergence aborts run. +4. **Evidence**: Task Runner emits DSSE attestation referencing plan hash, input digests, and output artifacts. + +Validation pipeline: + +```text +pack.yaml ──▶ schema validation ──▶ expression audit ──▶ determinism guard ──▶ signing +``` + +Packs must pass CLI validation before publishing. + +--- + +## 7 · Signatures & Provenance + +- Pack bundles are signed with **cosign** (keyless Fulcio/KMS supported) and optionally DSSE envelopes. +- `provenance/` directory stores signed statements (SLSA Build L1+) linking source repo, CI run, and manifest hash. +- Registry verifies signatures on push/pull; Task Runner refuses unsigned packs unless in development mode. +- Attestations include: + - Pack manifest digest (`sha256`) + - Pack bundle digest + - Build metadata (`git.ref`, `ci.workflow`, `cli.version`) + +--- + +## 8 · RBAC & Scopes + +Authority scopes introduced by `AUTH-PACKS-41-001`: + +| Scope | Purpose | +|-------|---------| +| `Packs.Read` | Discover packs, download manifests. | +| `Packs.Write` | Publish/update packs in registry (requires signature). | +| `Packs.Run` | Execute packs via CLI/Task Runner. | +| `Packs.Approve` | Fulfil approval gates defined in packs. | + +Task Runner enforces scopes per tenant; pack metadata may further restrict tenant visibility (`metadata.tenantVisibility`). + +--- + +## 9 · Observability & Evidence + +- Metrics: `pack_run_duration_seconds`, `pack_step_retry_total`, `pack_gate_wait_seconds`. +- Logs: Structured JSON per step with scrubbed inputs (`secretMask` applied). +- Timeline events: `pack.started`, `pack.approval.requested`, `pack.approval.granted`, `pack.completed`. +- Evidence bundle includes: + - Plan manifest (canonical JSON) + - Step transcripts (redacted) + - Artifacts manifest (sha256, size) + - Attestations references + +--- + +## 10 · Compatibility Matrix + +| CLI Version | Pack API | Task Runner | Notes | +|-------------|----------|-------------|-------| +| 2025.10.x | `pack.v1` | Runner build `>=2025.10.0` | Approvals optional, loops disabled. | +| 2025.12.x | `pack.v1` | Runner build `>=2025.12.0` | Approvals resume, secrets injection, localization strings. | +| Future | `pack.v2` | TBD | Will introduce typed outputs & partial replay (track in Epic 13). | + +CLI enforces compatibility: running pack with unsupported features yields `ERR_PACK_UNSUPPORTED`. + +--- + +## 11 · Publishing Workflow + +1. Author pack (`pack.yaml`, assets, docs). +2. Run `stella pack validate` (schema + determinism). +3. Generate bundle: `stella pack build --output my-pack.stella-pack.tgz`. +4. Sign: `cosign sign-blob my-pack.stella-pack.tgz`. +5. Publish: `stella pack push registry.example.com/org/my-pack:1.3.0`. +6. Registry verifies signature, records provenance, and exposes pack via API. + +--- + +## 12 · Compliance Checklist + +- [ ] Manifest schema documented for all fields, including approvals, secrets, and outputs. +- [ ] Determinism requirements outlined with plan/simulate semantics and CLI validation steps. +- [ ] Signing + provenance expectations spelled out with cosign/DSSE references. +- [ ] RBAC scopes (`Packs.*`) and tenant visibility rules captured. +- [ ] Observability (metrics, logs, evidence) described for Task Runner integrations. +- [ ] Compatibility matrix enumerates CLI/Runner requirements. +- [ ] Publishing workflow documented with CLI commands. +- [ ] Imposed rule reminder included at top of document. + +--- + +*Last updated: 2025-10-27 (Sprint 43).* + diff --git a/docs/ui/admin.md b/docs/ui/admin.md index c95d6078..00fd03e4 100644 --- a/docs/ui/admin.md +++ b/docs/ui/admin.md @@ -1,174 +1,174 @@ -# StellaOps Console - Admin Workspace - -> **Audience:** Authority Guild, Console admins, support engineers, tenant operators. -> **Scope:** Tenant management, role mapping, token lifecycle, integrations, fresh-auth prompts, security guardrails, offline behaviour, and compliance checklist for Sprint 23. - -The Admin workspace centralises Authority-facing controls: tenants, roles, API clients, tokens, and integrations. It surfaces RBAC mappings, token issuance logs, and bootstrap flows with the same offline-first guarantees as the rest of the console. - ---- - -## 1. Access and prerequisites - -- **Route:** `/console/admin` with sub-routes for tenants, users, roles, tokens, integrations, audit, and bootstrap. -- **Scopes:** - - `ui.admin` (base access) - - `authority:tenants.read` / `authority:tenants.write` - - `authority:roles.read` / `authority:roles.write` - - `authority:tokens.read` / `authority:tokens.revoke` - - `authority:clients.read` / `authority:clients.write` - - `authority:audit.read` (view audit trails) -- **Fresh-auth:** Sensitive actions (token revoke, bootstrap key issue, signing key rotation) require fresh-auth challenge. -- **Dependencies:** Authority service (`/internal/*` APIs), revocation export, JWKS, licensing posture endpoint, integration config store. - ---- - -## 2. Layout overview - -``` -+--------------------------------------------------------------------+ -| Header: Tenant picker - environment badge - security banner | -+--------------------------------------------------------------------+ -| Tabs: Tenants | Roles & Scopes | Users & Tokens | Integrations | Audit | -+--------------------------------------------------------------------+ -| Sidebar: Quick actions (Invite user, Create client, Export revocations) -| Main panel varies per tab | -+--------------------------------------------------------------------+ -``` - -The header includes offline status indicator and link to Authority health page. - ---- - -## 3. Tenants tab - -| Field | Description | -|-------|-------------| -| **Tenant ID** | Lowercase slug used in tokens and client registrations. | -| **Display name** | Human-friendly name. | -| **Status** | `active`, `suspended`, `pending`. Suspended tenants block token issuance. | -| **Isolation mode** | `dedicated`, `shared`, or `sandbox`. Drives RBAC defaults. | -| **Default roles** | Roles automatically assigned to new users within the tenant. | -| **Offline snapshots** | Latest snapshot timestamp, checksum, operator. | - -Actions: - -- `Create tenant` (requires `authority:tenants.write`). Form captures display name, slug, isolation mode, default roles, bootstrap contact, optional plan metadata. -- `Suspend/Resume` toggles token issuance and surfaces audit entry. -- `Export tenant bundle` downloads tenant-specific revocation + JWKS package for air-gap distribution. -- CLI parity: `stella auth tenant create --tenant `, `stella auth tenant suspend --tenant `. - ---- - -## 4. Roles & scopes tab - -- Table lists roles with mapped scopes and audiences. -- Inline editor supports adding/removing scopes (with validation). -- Scope categories: UI, Scanner, Concelier, Excititor, Policy, Attestor, Notifier, Scheduler, Offline kit. -- Visual diff shows impact of changes on linked clients/users before committing. -- "Effective permissions" view summarises what each role grants per service. -- CLI parity: `stella auth role update --role ui.admin --add-scope authority:tokens.revoke`. - ---- - -## 5. Users & tokens tab - -Sections: - -1. **User list** - identity, tenant, roles, last login, MFA status. Actions include reset password (if plugin supports), enforce fresh-auth, disable user. -2. **Token inventory** - lists active tokens (access/refresh/device). Columns: token ID, type, subject, audience, issued at, expires, status. Toggle to show revoked tokens. -3. **Token details** drawer shows claims, sender constraint (`cnf`), issuance metadata, revocation history. -4. **Revoke token** action requires fresh-auth and prompts for reason (incident, user request, compromise). -5. **Bulk revoke** (per tenant or role) triggers Authority revocation export to ensure downstream services purge caches. - -Audit entries appear for every user/token change. CLI parity: `stella auth token revoke --token `. - ---- - -## 6. Integrations tab - -- **Authority clients** list (service accounts) with grant types, allowed scopes, DPoP/mTLS settings, tenant hints, and rotation status. -- **Bootstrap bundles** - downloadable templates for new clients/users; includes configuration YAML and CLI instructions. -- **External IdP connectors** (optional) - displays status for SAML/OIDC plugins; includes metadata upload field and test login result. -- **Licensing posture** - read-only panel summarising plan tier, entitlement expiry, and contact info (pulled from licensing service). -- **Notifications** - optional webhook configuration for token events (on revoke, on failure). -- CLI parity: `stella auth client create --client concelier --grant client_credentials --tenant prod`. - ---- - -## 7. Audit tab - -- Timeline view of administrative events (user changes, role updates, token revocations, bootstrap actions, key rotations). -- Filters: event type, actor, tenant, scope, correlation ID. -- Export button downloads CSV/JSON for SOC ingestion. -- "Open in logs" copies search query pre-populated with correlation IDs. -- CLI parity: `stella auth audit export --from 2025-10-20`. - ---- - -## 8. Fresh-auth prompts - -- High-risk actions (revoke all tokens, rotate signing key, create privileged client) trigger modal requiring credential re-entry or hardware key touch. -- Fresh-auth window is 5 minutes; countdown displayed. -- UI surface indicates when current session is outside fresh-auth window; sensitive buttons disabled until re-auth. -- Audit log records fresh-auth events (`authority.fresh_auth.start`, `authority.fresh_auth.success`). -- CLI parity: `stella auth fresh-auth` obtains short-lived token for scriptable flows. - ---- - -## 9. Security guardrails - -- DPoP enforcement reminders for UI clients; console warns if any client lacks sender constraint. -- mTLS enforcement summary for high-value audiences (Signer/Attestor). -- Token policy checklists (access token TTL, refresh token policy) with alerts when deviating from defaults. -- Revocation bundle export status (timestamp, digest, operator). -- Key rotation panel showing current `kid`, last rotation, next scheduled rotation, and manual trigger button (ties into Authority rotate API). -- CLI parity: `stella auth signing rotate` for script automation. - ---- - -## 10. Offline and air-gap behaviour - -- Offline banner indicates snapshot version; disables direct remote calls. -- Tenant/role edits queue change manifests; UI instructs users to apply via CLI (`stella auth apply --bundle `). -- Token inventory shows snapshot state; revoke buttons generate scripts for offline Authority host. -- Integrations tab offers manual download/upload for client definitions and IdP metadata. -- Audit exports default to local storage with checksum output for transfer. - ---- - -## 11. Screenshot coordination - -- Placeholders: - - `![Admin tenants placeholder](../assets/ui/admin/tenants-placeholder.png)` - - `![Admin roles placeholder](../assets/ui/admin/roles-placeholder.png)` - - `![Admin tokens placeholder](../assets/ui/admin/tokens-placeholder.png)` -- Capture real screenshots with Authority Guild once Sprint 23 UI is final (tracked in `#console-screenshots`, 2025-10-26 entry). Provide both light and dark theme variants. - ---- - -## 12. References - -- `/docs/ARCHITECTURE_AUTHORITY.md` - Authority architecture. -- `/docs/11_AUTHORITY.md` - Authority service overview. -- `/docs/security/authority-scopes.md` - scope definitions. -- `/docs/ui/policies.md` - policy approvals requiring fresh-auth. -- `/docs/ui/console-overview.md` - navigation shell. -- `/docs/cli/authentication.md` (pending) and `/docs/cli/policy.md` for CLI flows. -- `/docs/ops/scheduler-runbook.md` for integration with scheduler token rotation. - ---- - -## 13. Compliance checklist - -- [ ] Tenants, roles/scopes, and token management documented with actions and CLI parity. -- [ ] Integrations and audit views covered. -- [ ] Fresh-auth prompts and guardrails described. -- [ ] Security controls (DPoP, mTLS, key rotation, revocations) captured. -- [ ] Offline behaviour explained with script guidance. -- [ ] Screenshot placeholders and coordination noted. -- [ ] References validated. - ---- - -*Last updated: 2025-10-26 (Sprint 23).* - +# StellaOps Console - Admin Workspace + +> **Audience:** Authority Guild, Console admins, support engineers, tenant operators. +> **Scope:** Tenant management, role mapping, token lifecycle, integrations, fresh-auth prompts, security guardrails, offline behaviour, and compliance checklist for Sprint 23. + +The Admin workspace centralises Authority-facing controls: tenants, roles, API clients, tokens, and integrations. It surfaces RBAC mappings, token issuance logs, and bootstrap flows with the same offline-first guarantees as the rest of the console. + +--- + +## 1. Access and prerequisites + +- **Route:** `/console/admin` with sub-routes for tenants, users, roles, tokens, integrations, audit, and bootstrap. +- **Scopes:** + - `ui.admin` (base access) + - `authority:tenants.read` / `authority:tenants.write` + - `authority:roles.read` / `authority:roles.write` + - `authority:tokens.read` / `authority:tokens.revoke` + - `authority:clients.read` / `authority:clients.write` + - `authority:audit.read` (view audit trails) +- **Fresh-auth:** Sensitive actions (token revoke, bootstrap key issue, signing key rotation) require fresh-auth challenge. +- **Dependencies:** Authority service (`/internal/*` APIs), revocation export, JWKS, licensing posture endpoint, integration config store. + +--- + +## 2. Layout overview + +``` ++--------------------------------------------------------------------+ +| Header: Tenant picker - environment badge - security banner | ++--------------------------------------------------------------------+ +| Tabs: Tenants | Roles & Scopes | Users & Tokens | Integrations | Audit | ++--------------------------------------------------------------------+ +| Sidebar: Quick actions (Invite user, Create client, Export revocations) +| Main panel varies per tab | ++--------------------------------------------------------------------+ +``` + +The header includes offline status indicator and link to Authority health page. + +--- + +## 3. Tenants tab + +| Field | Description | +|-------|-------------| +| **Tenant ID** | Lowercase slug used in tokens and client registrations. | +| **Display name** | Human-friendly name. | +| **Status** | `active`, `suspended`, `pending`. Suspended tenants block token issuance. | +| **Isolation mode** | `dedicated`, `shared`, or `sandbox`. Drives RBAC defaults. | +| **Default roles** | Roles automatically assigned to new users within the tenant. | +| **Offline snapshots** | Latest snapshot timestamp, checksum, operator. | + +Actions: + +- `Create tenant` (requires `authority:tenants.write`). Form captures display name, slug, isolation mode, default roles, bootstrap contact, optional plan metadata. +- `Suspend/Resume` toggles token issuance and surfaces audit entry. +- `Export tenant bundle` downloads tenant-specific revocation + JWKS package for air-gap distribution. +- CLI parity: `stella auth tenant create --tenant `, `stella auth tenant suspend --tenant `. + +--- + +## 4. Roles & scopes tab + +- Table lists roles with mapped scopes and audiences. +- Inline editor supports adding/removing scopes (with validation). +- Scope categories: UI, Scanner, Concelier, Excititor, Policy, Attestor, Notifier, Scheduler, Offline kit. +- Visual diff shows impact of changes on linked clients/users before committing. +- "Effective permissions" view summarises what each role grants per service. +- CLI parity: `stella auth role update --role ui.admin --add-scope authority:tokens.revoke`. + +--- + +## 5. Users & tokens tab + +Sections: + +1. **User list** - identity, tenant, roles, last login, MFA status. Actions include reset password (if plugin supports), enforce fresh-auth, disable user. +2. **Token inventory** - lists active tokens (access/refresh/device). Columns: token ID, type, subject, audience, issued at, expires, status. Toggle to show revoked tokens. +3. **Token details** drawer shows claims, sender constraint (`cnf`), issuance metadata, revocation history. +4. **Revoke token** action requires fresh-auth and prompts for reason (incident, user request, compromise). +5. **Bulk revoke** (per tenant or role) triggers Authority revocation export to ensure downstream services purge caches. + +Audit entries appear for every user/token change. CLI parity: `stella auth token revoke --token `. + +--- + +## 6. Integrations tab + +- **Authority clients** list (service accounts) with grant types, allowed scopes, DPoP/mTLS settings, tenant hints, and rotation status. +- **Bootstrap bundles** - downloadable templates for new clients/users; includes configuration YAML and CLI instructions. +- **External IdP connectors** (optional) - displays status for SAML/OIDC plugins; includes metadata upload field and test login result. +- **Licensing posture** - read-only panel summarising plan tier, entitlement expiry, and contact info (pulled from licensing service). +- **Notifications** - optional webhook configuration for token events (on revoke, on failure). +- CLI parity: `stella auth client create --client concelier --grant client_credentials --tenant prod`. + +--- + +## 7. Audit tab + +- Timeline view of administrative events (user changes, role updates, token revocations, bootstrap actions, key rotations). +- Filters: event type, actor, tenant, scope, correlation ID. +- Export button downloads CSV/JSON for SOC ingestion. +- "Open in logs" copies search query pre-populated with correlation IDs. +- CLI parity: `stella auth audit export --from 2025-10-20`. + +--- + +## 8. Fresh-auth prompts + +- High-risk actions (revoke all tokens, rotate signing key, create privileged client) trigger modal requiring credential re-entry or hardware key touch. +- Fresh-auth window is 5 minutes; countdown displayed. +- UI surface indicates when current session is outside fresh-auth window; sensitive buttons disabled until re-auth. +- Audit log records fresh-auth events (`authority.fresh_auth.start`, `authority.fresh_auth.success`). +- CLI parity: `stella auth fresh-auth` obtains short-lived token for scriptable flows. + +--- + +## 9. Security guardrails + +- DPoP enforcement reminders for UI clients; console warns if any client lacks sender constraint. +- mTLS enforcement summary for high-value audiences (Signer/Attestor). +- Token policy checklists (access token TTL, refresh token policy) with alerts when deviating from defaults. +- Revocation bundle export status (timestamp, digest, operator). +- Key rotation panel showing current `kid`, last rotation, next scheduled rotation, and manual trigger button (ties into Authority rotate API). +- CLI parity: `stella auth signing rotate` for script automation. + +--- + +## 10. Offline and air-gap behaviour + +- Offline banner indicates snapshot version; disables direct remote calls. +- Tenant/role edits queue change manifests; UI instructs users to apply via CLI (`stella auth apply --bundle `). +- Token inventory shows snapshot state; revoke buttons generate scripts for offline Authority host. +- Integrations tab offers manual download/upload for client definitions and IdP metadata. +- Audit exports default to local storage with checksum output for transfer. + +--- + +## 11. Screenshot coordination + +- Placeholders: + - `![Admin tenants placeholder](../assets/ui/admin/tenants-placeholder.png)` + - `![Admin roles placeholder](../assets/ui/admin/roles-placeholder.png)` + - `![Admin tokens placeholder](../assets/ui/admin/tokens-placeholder.png)` +- Capture real screenshots with Authority Guild once Sprint 23 UI is final (tracked in `#console-screenshots`, 2025-10-26 entry). Provide both light and dark theme variants. + +--- + +## 12. References + +- `/docs/ARCHITECTURE_AUTHORITY.md` - Authority architecture. +- `/docs/11_AUTHORITY.md` - Authority service overview. +- `/docs/security/authority-scopes.md` - scope definitions. +- `/docs/ui/policies.md` - policy approvals requiring fresh-auth. +- `/docs/ui/console-overview.md` - navigation shell. +- `/docs/cli/authentication.md` (pending) and `/docs/cli/policy.md` for CLI flows. +- `/docs/ops/scheduler-runbook.md` for integration with scheduler token rotation. + +--- + +## 13. Compliance checklist + +- [ ] Tenants, roles/scopes, and token management documented with actions and CLI parity. +- [ ] Integrations and audit views covered. +- [ ] Fresh-auth prompts and guardrails described. +- [ ] Security controls (DPoP, mTLS, key rotation, revocations) captured. +- [ ] Offline behaviour explained with script guidance. +- [ ] Screenshot placeholders and coordination noted. +- [ ] References validated. + +--- + +*Last updated: 2025-10-26 (Sprint 23).* + diff --git a/docs/ui/advisories-and-vex.md b/docs/ui/advisories-and-vex.md index 722a6940..aa58daa6 100644 --- a/docs/ui/advisories-and-vex.md +++ b/docs/ui/advisories-and-vex.md @@ -1,199 +1,199 @@ -# StellaOps Console - Advisories and VEX - -> **Audience:** Console UX team, Concelier and Excititor guilds, support and compliance engineers. -> **Scope:** Advisory aggregation UX, VEX consensus display, conflict indicators, raw document viewer, provenance banners, CLI parity, and Aggregation-Only Contract (AOC) guardrails for Sprint 23. - -The Advisories and VEX surfaces expose Concelier and Excititor outputs without mutating the underlying data. Operators can review upstream statements, check consensus summaries, inspect conflicts, and hand off evidence to downstream tooling while staying within the Aggregation-Only Contract. - ---- - -## 1. Access and prerequisites - -- **Routes:** - - `/console/advisories` (advisory list and detail) - - `/console/vex` (VEX consensus and raw claim explorer) -- **Scopes:** `advisory.read` and `vex.read` (base access), `advisory.verify` / `vex.verify` for verification actions, `downloads.read` for evidence exports. -- **Feature flags:** `advisoryExplorer.enabled`, `vexExplorer.enabled`, `aggregation.conflictIndicators`. -- **Dependencies:** Concelier WebService (aggregation API + delta metrics), Excititor WebService (consensus API + conflict feeds), Policy Engine explain hints (optional link-outs), Authority tenant enforcement. -- **Offline behaviour:** Uses Offline Kit snapshots when gateway is in sealed mode; verify buttons queue until connectivity resumes. - ---- - -## 2. Layout overview - -``` -+---------------------------------------------------------------------+ -| Header: Tenant badge - global filters - status ticker - actions | -+---------------------------------------------------------------------+ -| Left rail: Saved views - provider filters - verification queue | -+---------------------------------------------------------------------+ -| Main split pane | -| - Advisories tab (grid + detail drawer) | -| - VEX tab (consensus table + claim drawer) | -| Tabs remember last active view per tenant. | -+---------------------------------------------------------------------+ -``` - -The header reuses console-wide context chips (`Tenant`, `Severity`, `Source`, `Time`) and the status ticker that streams Concelier and Excititor deltas. - ---- - -## 3. Advisory aggregation view - -| Element | Description | -|---------|-------------| -| **Grid columns** | Vulnerability key (CVE/GHSA/vendor), Title, Source set, Last merged, Severity badge, KEV flag, Affected product count, Merge hash. | -| **Source chips** | Show contributing providers (NVD, Red Hat, Debian, vendor PSIRT). Hover reveals precedence order and timestamps. | -| **Severity** | Displays the highest severity declared by any source; tooltip lists per-source severities and vectors. | -| **KEV / Exploit status** | Badge highlights known exploited status from Concelier enrichment; links to KEV reference. | -| **Merge hash** | Deterministic hash from Concelier `merge_event`. Clicking copies hash and opens provenance banner. | -| **Filters** | Vulnerability identifier search, provider multi-select, severity picker, KEV toggle, affected product range slider, time window. | -| **List actions** | `Open detail`, `Copy CLI` (`stella advisory show ...`), `Compare sources`, `Queue verify`. | - -The grid virtualises up to 15,000 advisories per tenant. Beyond that, the UI engages server-side pagination with cursor hints supplied by Concelier. - ---- - -## 4. Advisory detail drawer - -Sections within the drawer: - -1. **Summary cards** (title, published/modified timestamps, advisory merge hash, total sources, exploited flag). -2. **Sources timeline** listing each contributing document with signature status, fetched timestamps, precedence rank, and quick links to raw view. -3. **Affected products** table (product key, introduced/fixed, range semantics, distro qualifiers, notes). Column toggles allow switching between SemVer and distro notation. -4. **Conflict indicators** show when sources disagree on fixed versions, severity, or affected sets. Each conflict row links to an explainer panel that describes the winning value, losing sources, and precedence rule. -5. **References** collapsible list (patches, advisories, exploits). -6. **Raw JSON** viewer (read-only) using canonical Concelier payload. Users can copy JSON or download via `GET /console/advisories/raw/{id}`. -7. **CLI parity** card with commands: - - `stella advisory show --tenant --vuln ` - - `stella advisory sources --tenant --vuln ` - - `stella advisory export --tenant --vuln --format cdx-json` - -Provenance banner at the top indicates whether all sources are signed, partially signed, or unsigned, referencing AOC guardrails. Unsigned sources trigger a warning and link to the verification checklist. - ---- - -## 5. VEX explorer - -| Feature | Description | -|---------|-------------| -| **Consensus table** | Rows keyed by `(vulnId, productKey)` with rollup status (affected, not affected, fixed, under investigation), confidence score, provider count, and last evaluation timestamp. | -| **Status badges** | Colour-coded (red affected, green not affected, blue fixed, amber under investigation). Tooltips show justification and policy revision used. | -| **Provider breakdown** | Hover or expand to see source list with accepted/ignored flag, status, justification code, signature state, weight. | -| **Filters** | Product search (PURL), status filter, provider filter, justification codes, confidence threshold slider. | -| **Saved views** | Prebuilt presets: `Vendor consensus`, `Distro overrides`, `Conflicts`, `Pending investigation`. | - ---- - -## 6. VEX detail drawer - -Tabs within the drawer: - -- **Consensus summary**: Restates rollup status, policy revision, confidence benchmarks, and referencing runs. -- **Claims list**: Every raw claim from Excititor with provenance, signature result, justification, supersedes chain, evidence snippets. Claims are grouped by provider tier (vendor, distro, ecosystem, CERT). -- **Conflict explainers**: For conflicting claims, shows why a claim was ignored (weight, stale timestamp, failing justification gate). Includes inline diff between competing claims. -- **Events**: Timeline of claim arrivals and consensus evaluations with correlation IDs, accessible for debugging. -- **Raw JSON**: Canonical `VexClaim` or `VexConsensus` payloads with copy/download. CLI parity callouts: - - `stella vex consensus show --tenant --vuln --product ` - - `stella vex claims show --tenant --vuln --provider ` - ---- - -## 7. Raw viewers and provenance - -- Raw viewers display canonical payloads with syntax highlighting and copy-as-JSON support. -- Provenance banner presents: source URI, document digest, signature status, fetch timestamps, collector version. -- Users can open raw documents in a modal that includes: - - `sha256` digest with copy button - - Signature verification summary (passing keys, missing signatures, errors) - - `Download DSSE bundle` button when the document is attested - - `Open in logs` link that copies search query (`correlationId=...`) for log aggregation tools. - -All raw views are read-only to maintain Aggregation-Only guarantees. - ---- - -## 8. Conflict indicators and aggregation-not-merge UX - -- Concelier retains every source; the UI surfaces conflicts rather than merging them. -- Conflict badges appear in grids and detail views when sources disagree on affected ranges, fixed versions, severity, or exploit flags. -- Clicking a badge opens the conflict explainer panel (powered by Concelier merge metadata) that lists winning/losing sources, ranks, and reasoning (e.g., "Vendor PSIRT overrides ecosystem advisory"). -- Excititor conflicts highlight discarded claims with reasons (stale, failing justification, low weight). Operators can override weights downstream via Policy Engine if needed. -- UI copy explicitly reminds users that policy decisions happen elsewhere; these views show aggregated facts only. - ---- - -## 9. Verification workflows - -- **Run verify** buttons call Concelier or Excititor verification endpoints (`POST /console/advisories/verify`, `POST /console/vex/verify`) scoped by tenant and source filters. -- Verification results appear as banners summarising documents checked, signatures verified, and guard violations. -- Failed verifications show actionable error IDs (`ERR_AOC_00x`), matching CLI output. -- Verification history accessible via the status ticker dropdown; entries include operator, scope, and correlation IDs. - ---- - -## 10. Exports and automation - -- Advisory tab exposes export actions: `Download normalized advisory`, `Download affected products CSV`, `Download source bundle` (raw documents packaged with manifest). -- VEX tab supports exports for consensus snapshots, raw claims, and provider deltas. -- Export manifests include merge hash or consensus digest, tenant ID, timestamp, and signature state. -- CLI parity snippets accompany each export (e.g., `stella advisory export`, `stella vex export`). -- Automation: copy buttons for webhook subscription (`/downloads/hooks/subscribe`) and ORAS push commands when using remote registries. - ---- - -## 11. Observability and SSE updates - -- Status ticker shows ingest lag (`advisory_delta_minutes`, `vex_delta_minutes`), last merge event hash, and verification queue depth. -- Advisory and VEX grids refresh via SSE channels; updates animate row badges (new source, conflict resolved). -- Metrics surfaced in drawers: ingestion age, signature pass rate, consensus evaluation duration. -- Errors display correlation IDs linking to Concelier/Excititor logs. - ---- - -## 12. Offline and air-gap behaviour - -- When offline, list views display snapshot badge, staleness timer, and disable real-time verification. -- Raw downloads reference local snapshot directories and include checksum instructions. -- Exports queue locally; UI offers `Copy to removable media` instructions. -- CLI parity switches to offline commands (`--offline`, `--snapshot`). -- Tenant picker hides tenants not present in the snapshot to avoid partial data views. - ---- - -## 13. Screenshot coordination - -- Placeholders: - - `![Advisory grid placeholder](../assets/ui/advisories/grid-placeholder.png)` - - `![VEX consensus placeholder](../assets/ui/advisories/vex-placeholder.png)` -- Coordinate with Console Guild to capture updated screenshots (dark and light themes) once Sprint 23 build candidate is tagged. Tracking in Slack channel `#console-screenshots` (entry 2025-10-26). - ---- - -## 14. References - -- `/docs/ui/console-overview.md` - shell, filters, tenant model. -- `/docs/ui/navigation.md` - command palette, deep-link schema. -- `/docs/ingestion/aggregation-only-contract.md` - AOC guardrails. -- `/docs/architecture/CONCELIER.md` - merge rules, provenance. -- `/docs/architecture/EXCITITOR.md` - VEX consensus model. -- `/docs/security/console-security.md` - scopes, DPoP, CSP. -- `/docs/cli-vs-ui-parity.md` - CLI equivalence matrix. - ---- - -## 15. Compliance checklist - -- [ ] Advisory grid columns, filters, and merge hash behaviour documented. -- [ ] VEX consensus view covers status badges, provider breakdown, and filters. -- [ ] Raw viewer and provenance banners explained with AOC alignment. -- [ ] Conflict indicators and explainers tied to aggregation-not-merge rules. -- [ ] Verification workflow and CLI parity documented. -- [ ] Offline behaviour and automation paths captured. -- [ ] Screenshot placeholders and coordination notes recorded. -- [ ] References validated. - ---- - -*Last updated: 2025-10-26 (Sprint 23).* - +# StellaOps Console - Advisories and VEX + +> **Audience:** Console UX team, Concelier and Excititor guilds, support and compliance engineers. +> **Scope:** Advisory aggregation UX, VEX consensus display, conflict indicators, raw document viewer, provenance banners, CLI parity, and Aggregation-Only Contract (AOC) guardrails for Sprint 23. + +The Advisories and VEX surfaces expose Concelier and Excititor outputs without mutating the underlying data. Operators can review upstream statements, check consensus summaries, inspect conflicts, and hand off evidence to downstream tooling while staying within the Aggregation-Only Contract. + +--- + +## 1. Access and prerequisites + +- **Routes:** + - `/console/advisories` (advisory list and detail) + - `/console/vex` (VEX consensus and raw claim explorer) +- **Scopes:** `advisory.read` and `vex.read` (base access), `advisory.verify` / `vex.verify` for verification actions, `downloads.read` for evidence exports. +- **Feature flags:** `advisoryExplorer.enabled`, `vexExplorer.enabled`, `aggregation.conflictIndicators`. +- **Dependencies:** Concelier WebService (aggregation API + delta metrics), Excititor WebService (consensus API + conflict feeds), Policy Engine explain hints (optional link-outs), Authority tenant enforcement. +- **Offline behaviour:** Uses Offline Kit snapshots when gateway is in sealed mode; verify buttons queue until connectivity resumes. + +--- + +## 2. Layout overview + +``` ++---------------------------------------------------------------------+ +| Header: Tenant badge - global filters - status ticker - actions | ++---------------------------------------------------------------------+ +| Left rail: Saved views - provider filters - verification queue | ++---------------------------------------------------------------------+ +| Main split pane | +| - Advisories tab (grid + detail drawer) | +| - VEX tab (consensus table + claim drawer) | +| Tabs remember last active view per tenant. | ++---------------------------------------------------------------------+ +``` + +The header reuses console-wide context chips (`Tenant`, `Severity`, `Source`, `Time`) and the status ticker that streams Concelier and Excititor deltas. + +--- + +## 3. Advisory aggregation view + +| Element | Description | +|---------|-------------| +| **Grid columns** | Vulnerability key (CVE/GHSA/vendor), Title, Source set, Last merged, Severity badge, KEV flag, Affected product count, Merge hash. | +| **Source chips** | Show contributing providers (NVD, Red Hat, Debian, vendor PSIRT). Hover reveals precedence order and timestamps. | +| **Severity** | Displays the highest severity declared by any source; tooltip lists per-source severities and vectors. | +| **KEV / Exploit status** | Badge highlights known exploited status from Concelier enrichment; links to KEV reference. | +| **Merge hash** | Deterministic hash from Concelier `merge_event`. Clicking copies hash and opens provenance banner. | +| **Filters** | Vulnerability identifier search, provider multi-select, severity picker, KEV toggle, affected product range slider, time window. | +| **List actions** | `Open detail`, `Copy CLI` (`stella advisory show ...`), `Compare sources`, `Queue verify`. | + +The grid virtualises up to 15,000 advisories per tenant. Beyond that, the UI engages server-side pagination with cursor hints supplied by Concelier. + +--- + +## 4. Advisory detail drawer + +Sections within the drawer: + +1. **Summary cards** (title, published/modified timestamps, advisory merge hash, total sources, exploited flag). +2. **Sources timeline** listing each contributing document with signature status, fetched timestamps, precedence rank, and quick links to raw view. +3. **Affected products** table (product key, introduced/fixed, range semantics, distro qualifiers, notes). Column toggles allow switching between SemVer and distro notation. +4. **Conflict indicators** show when sources disagree on fixed versions, severity, or affected sets. Each conflict row links to an explainer panel that describes the winning value, losing sources, and precedence rule. +5. **References** collapsible list (patches, advisories, exploits). +6. **Raw JSON** viewer (read-only) using canonical Concelier payload. Users can copy JSON or download via `GET /console/advisories/raw/{id}`. +7. **CLI parity** card with commands: + - `stella advisory show --tenant --vuln ` + - `stella advisory sources --tenant --vuln ` + - `stella advisory export --tenant --vuln --format cdx-json` + +Provenance banner at the top indicates whether all sources are signed, partially signed, or unsigned, referencing AOC guardrails. Unsigned sources trigger a warning and link to the verification checklist. + +--- + +## 5. VEX explorer + +| Feature | Description | +|---------|-------------| +| **Consensus table** | Rows keyed by `(vulnId, productKey)` with rollup status (affected, not affected, fixed, under investigation), confidence score, provider count, and last evaluation timestamp. | +| **Status badges** | Colour-coded (red affected, green not affected, blue fixed, amber under investigation). Tooltips show justification and policy revision used. | +| **Provider breakdown** | Hover or expand to see source list with accepted/ignored flag, status, justification code, signature state, weight. | +| **Filters** | Product search (PURL), status filter, provider filter, justification codes, confidence threshold slider. | +| **Saved views** | Prebuilt presets: `Vendor consensus`, `Distro overrides`, `Conflicts`, `Pending investigation`. | + +--- + +## 6. VEX detail drawer + +Tabs within the drawer: + +- **Consensus summary**: Restates rollup status, policy revision, confidence benchmarks, and referencing runs. +- **Claims list**: Every raw claim from Excititor with provenance, signature result, justification, supersedes chain, evidence snippets. Claims are grouped by provider tier (vendor, distro, ecosystem, CERT). +- **Conflict explainers**: For conflicting claims, shows why a claim was ignored (weight, stale timestamp, failing justification gate). Includes inline diff between competing claims. +- **Events**: Timeline of claim arrivals and consensus evaluations with correlation IDs, accessible for debugging. +- **Raw JSON**: Canonical `VexClaim` or `VexConsensus` payloads with copy/download. CLI parity callouts: + - `stella vex consensus show --tenant --vuln --product ` + - `stella vex claims show --tenant --vuln --provider ` + +--- + +## 7. Raw viewers and provenance + +- Raw viewers display canonical payloads with syntax highlighting and copy-as-JSON support. +- Provenance banner presents: source URI, document digest, signature status, fetch timestamps, collector version. +- Users can open raw documents in a modal that includes: + - `sha256` digest with copy button + - Signature verification summary (passing keys, missing signatures, errors) + - `Download DSSE bundle` button when the document is attested + - `Open in logs` link that copies search query (`correlationId=...`) for log aggregation tools. + +All raw views are read-only to maintain Aggregation-Only guarantees. + +--- + +## 8. Conflict indicators and aggregation-not-merge UX + +- Concelier retains every source; the UI surfaces conflicts rather than merging them. +- Conflict badges appear in grids and detail views when sources disagree on affected ranges, fixed versions, severity, or exploit flags. +- Clicking a badge opens the conflict explainer panel (powered by Concelier merge metadata) that lists winning/losing sources, ranks, and reasoning (e.g., "Vendor PSIRT overrides ecosystem advisory"). +- Excititor conflicts highlight discarded claims with reasons (stale, failing justification, low weight). Operators can override weights downstream via Policy Engine if needed. +- UI copy explicitly reminds users that policy decisions happen elsewhere; these views show aggregated facts only. + +--- + +## 9. Verification workflows + +- **Run verify** buttons call Concelier or Excititor verification endpoints (`POST /console/advisories/verify`, `POST /console/vex/verify`) scoped by tenant and source filters. +- Verification results appear as banners summarising documents checked, signatures verified, and guard violations. +- Failed verifications show actionable error IDs (`ERR_AOC_00x`), matching CLI output. +- Verification history accessible via the status ticker dropdown; entries include operator, scope, and correlation IDs. + +--- + +## 10. Exports and automation + +- Advisory tab exposes export actions: `Download normalized advisory`, `Download affected products CSV`, `Download source bundle` (raw documents packaged with manifest). +- VEX tab supports exports for consensus snapshots, raw claims, and provider deltas. +- Export manifests include merge hash or consensus digest, tenant ID, timestamp, and signature state. +- CLI parity snippets accompany each export (e.g., `stella advisory export`, `stella vex export`). +- Automation: copy buttons for webhook subscription (`/downloads/hooks/subscribe`) and ORAS push commands when using remote registries. + +--- + +## 11. Observability and SSE updates + +- Status ticker shows ingest lag (`advisory_delta_minutes`, `vex_delta_minutes`), last merge event hash, and verification queue depth. +- Advisory and VEX grids refresh via SSE channels; updates animate row badges (new source, conflict resolved). +- Metrics surfaced in drawers: ingestion age, signature pass rate, consensus evaluation duration. +- Errors display correlation IDs linking to Concelier/Excititor logs. + +--- + +## 12. Offline and air-gap behaviour + +- When offline, list views display snapshot badge, staleness timer, and disable real-time verification. +- Raw downloads reference local snapshot directories and include checksum instructions. +- Exports queue locally; UI offers `Copy to removable media` instructions. +- CLI parity switches to offline commands (`--offline`, `--snapshot`). +- Tenant picker hides tenants not present in the snapshot to avoid partial data views. + +--- + +## 13. Screenshot coordination + +- Placeholders: + - `![Advisory grid placeholder](../assets/ui/advisories/grid-placeholder.png)` + - `![VEX consensus placeholder](../assets/ui/advisories/vex-placeholder.png)` +- Coordinate with Console Guild to capture updated screenshots (dark and light themes) once Sprint 23 build candidate is tagged. Tracking in Slack channel `#console-screenshots` (entry 2025-10-26). + +--- + +## 14. References + +- `/docs/ui/console-overview.md` - shell, filters, tenant model. +- `/docs/ui/navigation.md` - command palette, deep-link schema. +- `/docs/ingestion/aggregation-only-contract.md` - AOC guardrails. +- `/docs/architecture/CONCELIER.md` - merge rules, provenance. +- `/docs/architecture/EXCITITOR.md` - VEX consensus model. +- `/docs/security/console-security.md` - scopes, DPoP, CSP. +- `/docs/cli-vs-ui-parity.md` - CLI equivalence matrix. + +--- + +## 15. Compliance checklist + +- [ ] Advisory grid columns, filters, and merge hash behaviour documented. +- [ ] VEX consensus view covers status badges, provider breakdown, and filters. +- [ ] Raw viewer and provenance banners explained with AOC alignment. +- [ ] Conflict indicators and explainers tied to aggregation-not-merge rules. +- [ ] Verification workflow and CLI parity documented. +- [ ] Offline behaviour and automation paths captured. +- [ ] Screenshot placeholders and coordination notes recorded. +- [ ] References validated. + +--- + +*Last updated: 2025-10-26 (Sprint 23).* + diff --git a/docs/ui/console-overview.md b/docs/ui/console-overview.md index bf87706c..0e2fed7c 100644 --- a/docs/ui/console-overview.md +++ b/docs/ui/console-overview.md @@ -1,130 +1,130 @@ -# StellaOps Console – Overview - -> **Audience:** Console product leads, Docs Guild writers, backend/API partners. -> **Scope:** Information architecture, tenant scoping, global filters, and Aggregation‑Only Contract (AOC) alignment for the unified StellaOps Console that lands with Sprint 23. - -The StellaOps Console is the single entry point for operators to explore SBOMs, advisories, policies, runs, and administrative surfaces. This overview explains how the console is organised, how users move between tenants, and how shared filters keep data views consistent across modules while respecting AOC boundaries. - ---- - -## 1 · Mission & Principles - -- **Deterministic navigation.** Every route is stable and deep-link friendly. URLs carry enough context (tenant, filter tokens, view modes) to let operators resume work without reapplying filters. -- **Tenant isolation first.** Any cross-tenant action requires fresh authority, and cross-tenant comparisons are made explicit so users never accidentally mix data sets. -- **Aggregation-not-merge UX.** Console surfaces advisory and VEX rollups exactly as produced by Concelier and Excititor—no client-side re-weighting or mutation. -- **Offline parity.** Every view has an offline equivalent powered by Offline Kit bundles or cached data, and exposes the staleness budget prominently. - ---- - -## 2 · Information Architecture - -### 2.1 Primary navigation - -``` -Console Root - ├─ Dashboard # KPIs, alerts, feed age, queue depth - ├─ Findings # Aggregated vulns + explanations (Policy Engine) - ├─ SBOM Explorer # Catalog, component graph, overlays - ├─ Advisories & VEX # Concelier / Excititor aggregation outputs - ├─ Runs # Scheduler runs, scan evidence, retry controls - ├─ Policies # Editor, simulations, approvals - ├─ Downloads # Signed artifacts, Offline Kit parity - ├─ Admin # Tenants, roles, tokens, integrations - └─ Help & Tours # Contextual docs, guided walkthroughs -``` - -Routes lazy-load feature shells so the UI can grow without increasing first-paint cost. Each feature owns its sub-navigation and exposes a `KeyboardShortcuts` modal describing the available accelerators. - -### 2.2 Shared surfaces - -| Surface | Purpose | Notes | -|---------|---------|-------| -| **Top bar** | Shows active tenant, environment badge (prod/non-prod), offline status pill, user menu, notifications inbox, and the command palette trigger (`⌘/Ctrl K`). | Offline status turns amber when data staleness exceeds configured thresholds. | -| **Global filter tray** | Expands from the right edge (`Shift F`). Hosts universal filters (tenant, time window, tags, severity) that apply across compatible routes. | Filter tray remembers per-tenant presets; stored in IndexedDB (non-sensitive). | -| **Context chips** | Display active global filters underneath page titles, with one-click removal (`⌫`). | Chips include the origin (e.g., `Tenant: west-prod`). | -| **Status ticker** | SSE-driven strip that surfaces Concelier/Excititor ingestion deltas, scheduler lag, and attestor queue depth. | Pulls from `/console/status` proxy (see WEB-CONSOLE-23-002). | - ---- - -## 3 · Tenant Model - -| Aspect | Detail | -|--------|--------| -| **Tenant sources** | The console obtains the tenant list and metadata from Authority `/v1/tenants` after login. Tenant descriptors include display name, slug, environment tag, and RBAC hints (role mask). | -| **Selection workflow** | First visit prompts for a default tenant. Afterwards, the tenant picker (`⌘/Ctrl T`) switches context without full reload, issuing `Authorization` refresh with the new tenant scope. | -| **Token handling** | Each tenant change generates a short-lived, DPoP-bound access token (`aud=console`, `tenant=`). Tokens live in memory; metadata persists in `sessionStorage` for reload continuity. | -| **Cross-tenant comparisons** | Side-by-side dashboards (Dashboard, Findings, SBOM Explorer) allow multi-tenant comparison only via explicit *"Add tenant"* control. Requests issue parallel API calls with separate tokens; results render in split panes labelled per tenant. | -| **Fresh-auth gated actions** | Admin and policy approvals call `Authority /fresh-auth` before executing. UI enforces a 5-minute window; afterwards, actions remain visible but disabled pending re-auth. | -| **Audit trail** | Tenant switches emit structured logs (`action=ui.tenant.switch`, `tenantId`, `subject`, `previousTenant`) and appear in Authority audit exports. | - -### 3.1 Offline operation - -In offline or sealed environments, the tenant picker only lists tenants bundled within the Offline Kit snapshot. Switching tenants prompts an "offline snapshot" banner showing the snapshot timestamp. Actions that require round-trips to Authority (fresh-auth, token rotation) show guidance to perform the step on an online bastion and import credentials later. - ---- - -## 4 · Global Filters & Context Tokens - -| Filter | Applies To | Source & Behaviour | -|--------|------------|--------------------| -| **Tenant** | All modules | Primary isolation control. Stored in URL (`?tenant=`) and via `x-tenant-id` header injected by the web proxy. Changes invalidate cached data stores. | -| **Time window** | Dashboard, Findings, Advisories & VEX, Runs | Options: `24 h`, `7 d`, `30 d`, custom ISO range. Default aligns with Compliance/Authority reporting window. Shared via query param `since=`/`until=`. | -| **Severity / Impact** | Findings, Advisories & VEX, SBOM Explorer overlays | Multi-select (Critical/High/Medium/Low/Informational, plus `Exploited` tag). Values map to Policy Engine impact buckets and Concelier KEV flags. | -| **Component tags** | SBOM Explorer, Findings | Tags drawn from SBOM metadata (`component.tags[]`). Includes search-as-you-type with scoped suggestions (package type, supplier, license). | -| **Source providers** | Advisories & VEX | Filter by provider IDs (e.g., NVD, GHSA, vendor VEX). Tied to Aggregation-Only provenance; filtering never alters base precedence. | -| **Run status** | Runs, Dashboard | States: `queued`, `running`, `completed`, `failed`, `cancelled`. Pulled from Scheduler SSE stream; default shows non-terminal states. | -| **Policy view** | Findings, Policies | Toggles between Active policy, Staged policy, and Simulation snapshots. Selecting Simulation requires prior simulation run; console links to create one if absent. | - -Filters emit deterministic tokens placed in the URL hash for copy/paste parity with CLI commands (see `/docs/cli-vs-ui-parity.md`). The console warns when a filter combination has no effect on the current view and offers to reset to defaults. - -### 4.1 Presets & Saved Views - -Users can save a set of global filters as named presets (stored per tenant). Presets show up in the command palette and the dashboard landing cards for quick access (`⌘/Ctrl 1..9`). - ---- - -## 5 · Aggregation-Only Alignment - -- **Read-only aggregation.** Pages that list advisories or VEX claims consume the canonical aggregation endpoints (`/console/advisories`, `/console/vex`). They never merge or reconcile records client-side. Instead, they highlight the source lineage and precedence as supplied by Concelier and Excititor. -- **Consistency indicators.** Each aggregated item displays source badges, precedence order, and a "last merge event hash" so operators can cross-reference Concelier logs. When a source is missing or stale, the UI surfaces a provenance banner linking to the raw document. -- **AOC guardrails.** Workflow actions (e.g., "request verify", "download evidence bundle") route through Concelier WebService guard endpoints that enforce Aggregation-Only rules. UI strings reinforce that policy decisions happen in Policy Engine, not here. -- **Audit alignment.** Any cross-navigation from aggregated data into findings or policies preserves the underlying IDs so analysts can track how aggregated data influences policy verdicts without altering the data itself. -- **CLI parity.** Inline callouts copy the equivalent `stella` CLI commands, ensuring console users can recreate the exact aggregation query offline. - ---- - -## 6 · Performance & Telemetry Anchors - -- Initial boot target: **< 2.5 s** `LargestContentfulPaint` on 4 vCPU air-gapped runner with cached assets. -- Route budget: each feature shell must keep first interaction (hydrated data + filters) under **1.5 s** once tokens resolve. -- Telemetry: console emits metrics via the `/console/telemetry` batch endpoint—`ui_route_render_seconds`, `ui_filter_apply_total`, `ui_tenant_switch_total`, `ui_offline_banner_seconds`. Logs carry correlation IDs matching backend responses for unified tracing. -- Lighthouse CI runs in the console pipeline (see `DEVOPS-CONSOLE-23-001`) and asserts budgets above; failing runs gate releases. - ---- - -## 7 · References - -- `/docs/architecture/console.md` – component-level diagrams (pending Sprint 23 task). -- `/docs/ui/navigation.md` – detailed routes, breadcrumbs, keyboard shortcuts. -- `/docs/ui/downloads.md` – downloads manifest, parity workflows, offline guidance. -- `/docs/ui/sbom-explorer.md` – SBOM-specific flows and overlays. -- `/docs/ui/advisories-and-vex.md` – aggregation UX details. -- `/docs/ui/findings.md` – explain drawer and filter matrix. -- `/docs/security/console-security.md` – OIDC, scopes, CSP, evidence handling. -- `/docs/cli-vs-ui-parity.md` – CLI equivalents and regression automation. - ---- - -## 8 · Compliance Checklist - -- [ ] Tenant picker enforces Authority-issued scopes and logs `ui.tenant.switch`. -- [ ] Global filters update URLs/query tokens for deterministic deep links. -- [ ] Aggregation views show provenance badges and merge hash indicators. -- [ ] CLI parity callouts aligned with `stella` commands for equivalent queries. -- [ ] Offline banner tested with Offline Kit snapshot import and documented staleness thresholds. -- [ ] Accessibility audit covers global filter tray, tenant picker, and keyboard shortcuts (WCAG 2.2 AA). -- [ ] Telemetry and Lighthouse budgets tracked in console CI (`DEVOPS-CONSOLE-23-001`). - ---- - -*Last updated: 2025-10-26 (Sprint 23).* +# StellaOps Console – Overview + +> **Audience:** Console product leads, Docs Guild writers, backend/API partners. +> **Scope:** Information architecture, tenant scoping, global filters, and Aggregation‑Only Contract (AOC) alignment for the unified StellaOps Console that lands with Sprint 23. + +The StellaOps Console is the single entry point for operators to explore SBOMs, advisories, policies, runs, and administrative surfaces. This overview explains how the console is organised, how users move between tenants, and how shared filters keep data views consistent across modules while respecting AOC boundaries. + +--- + +## 1 · Mission & Principles + +- **Deterministic navigation.** Every route is stable and deep-link friendly. URLs carry enough context (tenant, filter tokens, view modes) to let operators resume work without reapplying filters. +- **Tenant isolation first.** Any cross-tenant action requires fresh authority, and cross-tenant comparisons are made explicit so users never accidentally mix data sets. +- **Aggregation-not-merge UX.** Console surfaces advisory and VEX rollups exactly as produced by Concelier and Excititor—no client-side re-weighting or mutation. +- **Offline parity.** Every view has an offline equivalent powered by Offline Kit bundles or cached data, and exposes the staleness budget prominently. + +--- + +## 2 · Information Architecture + +### 2.1 Primary navigation + +``` +Console Root + ├─ Dashboard # KPIs, alerts, feed age, queue depth + ├─ Findings # Aggregated vulns + explanations (Policy Engine) + ├─ SBOM Explorer # Catalog, component graph, overlays + ├─ Advisories & VEX # Concelier / Excititor aggregation outputs + ├─ Runs # Scheduler runs, scan evidence, retry controls + ├─ Policies # Editor, simulations, approvals + ├─ Downloads # Signed artifacts, Offline Kit parity + ├─ Admin # Tenants, roles, tokens, integrations + └─ Help & Tours # Contextual docs, guided walkthroughs +``` + +Routes lazy-load feature shells so the UI can grow without increasing first-paint cost. Each feature owns its sub-navigation and exposes a `KeyboardShortcuts` modal describing the available accelerators. + +### 2.2 Shared surfaces + +| Surface | Purpose | Notes | +|---------|---------|-------| +| **Top bar** | Shows active tenant, environment badge (prod/non-prod), offline status pill, user menu, notifications inbox, and the command palette trigger (`⌘/Ctrl K`). | Offline status turns amber when data staleness exceeds configured thresholds. | +| **Global filter tray** | Expands from the right edge (`Shift F`). Hosts universal filters (tenant, time window, tags, severity) that apply across compatible routes. | Filter tray remembers per-tenant presets; stored in IndexedDB (non-sensitive). | +| **Context chips** | Display active global filters underneath page titles, with one-click removal (`⌫`). | Chips include the origin (e.g., `Tenant: west-prod`). | +| **Status ticker** | SSE-driven strip that surfaces Concelier/Excititor ingestion deltas, scheduler lag, and attestor queue depth. | Pulls from `/console/status` proxy (see WEB-CONSOLE-23-002). | + +--- + +## 3 · Tenant Model + +| Aspect | Detail | +|--------|--------| +| **Tenant sources** | The console obtains the tenant list and metadata from Authority `/v1/tenants` after login. Tenant descriptors include display name, slug, environment tag, and RBAC hints (role mask). | +| **Selection workflow** | First visit prompts for a default tenant. Afterwards, the tenant picker (`⌘/Ctrl T`) switches context without full reload, issuing `Authorization` refresh with the new tenant scope. | +| **Token handling** | Each tenant change generates a short-lived, DPoP-bound access token (`aud=console`, `tenant=`). Tokens live in memory; metadata persists in `sessionStorage` for reload continuity. | +| **Cross-tenant comparisons** | Side-by-side dashboards (Dashboard, Findings, SBOM Explorer) allow multi-tenant comparison only via explicit *"Add tenant"* control. Requests issue parallel API calls with separate tokens; results render in split panes labelled per tenant. | +| **Fresh-auth gated actions** | Admin and policy approvals call `Authority /fresh-auth` before executing. UI enforces a 5-minute window; afterwards, actions remain visible but disabled pending re-auth. | +| **Audit trail** | Tenant switches emit structured logs (`action=ui.tenant.switch`, `tenantId`, `subject`, `previousTenant`) and appear in Authority audit exports. | + +### 3.1 Offline operation + +In offline or sealed environments, the tenant picker only lists tenants bundled within the Offline Kit snapshot. Switching tenants prompts an "offline snapshot" banner showing the snapshot timestamp. Actions that require round-trips to Authority (fresh-auth, token rotation) show guidance to perform the step on an online bastion and import credentials later. + +--- + +## 4 · Global Filters & Context Tokens + +| Filter | Applies To | Source & Behaviour | +|--------|------------|--------------------| +| **Tenant** | All modules | Primary isolation control. Stored in URL (`?tenant=`) and via `x-tenant-id` header injected by the web proxy. Changes invalidate cached data stores. | +| **Time window** | Dashboard, Findings, Advisories & VEX, Runs | Options: `24 h`, `7 d`, `30 d`, custom ISO range. Default aligns with Compliance/Authority reporting window. Shared via query param `since=`/`until=`. | +| **Severity / Impact** | Findings, Advisories & VEX, SBOM Explorer overlays | Multi-select (Critical/High/Medium/Low/Informational, plus `Exploited` tag). Values map to Policy Engine impact buckets and Concelier KEV flags. | +| **Component tags** | SBOM Explorer, Findings | Tags drawn from SBOM metadata (`component.tags[]`). Includes search-as-you-type with scoped suggestions (package type, supplier, license). | +| **Source providers** | Advisories & VEX | Filter by provider IDs (e.g., NVD, GHSA, vendor VEX). Tied to Aggregation-Only provenance; filtering never alters base precedence. | +| **Run status** | Runs, Dashboard | States: `queued`, `running`, `completed`, `failed`, `cancelled`. Pulled from Scheduler SSE stream; default shows non-terminal states. | +| **Policy view** | Findings, Policies | Toggles between Active policy, Staged policy, and Simulation snapshots. Selecting Simulation requires prior simulation run; console links to create one if absent. | + +Filters emit deterministic tokens placed in the URL hash for copy/paste parity with CLI commands (see `/docs/cli-vs-ui-parity.md`). The console warns when a filter combination has no effect on the current view and offers to reset to defaults. + +### 4.1 Presets & Saved Views + +Users can save a set of global filters as named presets (stored per tenant). Presets show up in the command palette and the dashboard landing cards for quick access (`⌘/Ctrl 1..9`). + +--- + +## 5 · Aggregation-Only Alignment + +- **Read-only aggregation.** Pages that list advisories or VEX claims consume the canonical aggregation endpoints (`/console/advisories`, `/console/vex`). They never merge or reconcile records client-side. Instead, they highlight the source lineage and precedence as supplied by Concelier and Excititor. +- **Consistency indicators.** Each aggregated item displays source badges, precedence order, and a "last merge event hash" so operators can cross-reference Concelier logs. When a source is missing or stale, the UI surfaces a provenance banner linking to the raw document. +- **AOC guardrails.** Workflow actions (e.g., "request verify", "download evidence bundle") route through Concelier WebService guard endpoints that enforce Aggregation-Only rules. UI strings reinforce that policy decisions happen in Policy Engine, not here. +- **Audit alignment.** Any cross-navigation from aggregated data into findings or policies preserves the underlying IDs so analysts can track how aggregated data influences policy verdicts without altering the data itself. +- **CLI parity.** Inline callouts copy the equivalent `stella` CLI commands, ensuring console users can recreate the exact aggregation query offline. + +--- + +## 6 · Performance & Telemetry Anchors + +- Initial boot target: **< 2.5 s** `LargestContentfulPaint` on 4 vCPU air-gapped runner with cached assets. +- Route budget: each feature shell must keep first interaction (hydrated data + filters) under **1.5 s** once tokens resolve. +- Telemetry: console emits metrics via the `/console/telemetry` batch endpoint—`ui_route_render_seconds`, `ui_filter_apply_total`, `ui_tenant_switch_total`, `ui_offline_banner_seconds`. Logs carry correlation IDs matching backend responses for unified tracing. +- Lighthouse CI runs in the console pipeline (see `DEVOPS-CONSOLE-23-001`) and asserts budgets above; failing runs gate releases. + +--- + +## 7 · References + +- `/docs/architecture/console.md` – component-level diagrams (pending Sprint 23 task). +- `/docs/ui/navigation.md` – detailed routes, breadcrumbs, keyboard shortcuts. +- `/docs/ui/downloads.md` – downloads manifest, parity workflows, offline guidance. +- `/docs/ui/sbom-explorer.md` – SBOM-specific flows and overlays. +- `/docs/ui/advisories-and-vex.md` – aggregation UX details. +- `/docs/ui/findings.md` – explain drawer and filter matrix. +- `/docs/security/console-security.md` – OIDC, scopes, CSP, evidence handling. +- `/docs/cli-vs-ui-parity.md` – CLI equivalents and regression automation. + +--- + +## 8 · Compliance Checklist + +- [ ] Tenant picker enforces Authority-issued scopes and logs `ui.tenant.switch`. +- [ ] Global filters update URLs/query tokens for deterministic deep links. +- [ ] Aggregation views show provenance badges and merge hash indicators. +- [ ] CLI parity callouts aligned with `stella` commands for equivalent queries. +- [ ] Offline banner tested with Offline Kit snapshot import and documented staleness thresholds. +- [ ] Accessibility audit covers global filter tray, tenant picker, and keyboard shortcuts (WCAG 2.2 AA). +- [ ] Telemetry and Lighthouse budgets tracked in console CI (`DEVOPS-CONSOLE-23-001`). + +--- + +*Last updated: 2025-10-26 (Sprint 23).* diff --git a/docs/ui/console.md b/docs/ui/console.md index 64445040..c6c9f905 100644 --- a/docs/ui/console.md +++ b/docs/ui/console.md @@ -1,144 +1,144 @@ -# Console AOC Dashboard - -> **Audience:** Console PMs, UI engineers, Concelier/Excititor operators, SREs monitoring ingestion health. -> **Scope:** Layout, RBAC, workflow, and observability for the Aggregation-Only Contract (AOC) dashboard that ships with Sprint 19. - -The Console AOC dashboard gives operators a live view of ingestion guardrails across all configured sources. It surfaces raw Concelier/Excititor health, highlights violations raised by `AOCWriteGuard`, and lets on-call staff trigger verification without leaving the browser. Use it alongside the [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) and the [architecture overview](../architecture/overview.md) when rolling out AOC changes. - ---- - -## 1 · Access & prerequisites - -- **Route:** `/console/sources` (dashboard) with contextual drawer routes `/console/sources/:sourceKey` and `/console/sources/:sourceKey/violations/:documentId`. -- **Feature flag:** `aocDashboard.enabled` (default `true` once Concelier WebService exposes `/aoc/verify`). Toggle is tenant-scoped to support phased rollout. -- **Scopes:** - - `ui.read` (base navigation) plus `advisory:read` to view Concelier ingestion metrics/violations. - - `vex:read` to see Excititor entries and run VEX verifications. - - `aoc:verify` to trigger guard runs from the dashboard action bar. - - `advisory:ingest` / `vex:ingest` **not** required; the dashboard uses read-only APIs. -- **Tenancy:** All data is filtered by the active tenant selector. Switching tenants re-fetches tiles and drill-down tables with tenant-scoped tokens. -- **Back-end contracts:** Requires Concelier/Excititor 19.x (AOC guards enabled) and Authority scopes updated per [Authority service docs](../ARCHITECTURE_AUTHORITY.md#new-aoc-scopes). - ---- - -## 2 · Layout overview - -``` -┌────────────────────────────────────────────────────────────────────────────┐ -│ Header: tenant picker • live status pill • Last verify (“2h ago”) │ -├────────────────────────────────────────────────────────────────────────────┤ -│ Tile grid (4 per row) │ -│ ┌───── Concelier sources ─────┐ ┌────── Excititor sources ────────┐ │ -│ │ Red Hat | Ubuntu | OSV ... │ │ Vendor VEX | CSAF feeds ... │ │ -├────────────────────────────────────────────────────────────────────────────┤ -│ Violations & history table │ -│ • Filters: timeframe, source, ERR_AOC code, severity (warning/block) │ -│ • Columns: timestamp, source, code, summary, supersedes link, actions │ -├────────────────────────────────────────────────────────────────────────────┤ -│ Action bar: Run Verify • Download CSV • Open Concelier raw doc • Help │ -└────────────────────────────────────────────────────────────────────────────┘ -``` - -Tiles summarise the latest ingestion runs. The table and drawers provide drill-down views, and the action bar launches verifier workflows or exports evidence for audits. - ---- - -## 3 · Source tiles - -Each tile represents a Concelier or Excititor source and contains the fields below. - -| Field | Description | Thresholds & colours | -| ------ | ----------- | -------------------- | -| **Status badge** | Aggregated health computed from the latest job. | `Healthy` (green) when last job finished < 30 min ago and `violations24h = 0`; `Warning` (amber) when age ≥ 30 min or ≤ 5 violations; `Critical` (red) on any guard rejection (`ERR_AOC_00x`) or if job age > 2 h. | -| **Last ingest** | Timestamp and relative age of last successful append to `advisory_raw`/`vex_raw`. | Clicking opens job detail drawer. | -| **Violations (24 h)** | Count of guard failures grouped by `ERR_AOC` code across the last 24 hours. | Shows pill per code (e.g., `ERR_AOC_001 × 2`). | -| **Supersedes depth** | Average length of supersedes chain for the source over the last day. | Helps spot runaway revisions. | -| **Signature pass rate** | % of documents where signature/checksum verification succeeded. | Derived from `ingestion_signature_verified_total`. | -| **Latency P95** | Write latency recorded by ingestion spans / histograms. | Mirrors `ingestion_latency_seconds{quantile=0.95}`. | - -Tile menus expose quick actions: - -- **View history** – jumps to table filtered by the selected source. -- **Open metrics** – deep links to Grafana panel seeded with `source=` for `ingestion_write_total` and `aoc_violation_total`. -- **Download raw sample** – fetches the most recent document via `GET /advisories/raw/{id}` (or VEX equivalent) for debugging. - ---- - -## 4 · Violation drill-down workflow - -1. **Select a tile** or use table filters to focus on a source, timeframe, or `ERR_AOC` code. -2. **Inspect the violation row:** summary shows offending field, guard code, and document hash. -3. **Open detail drawer:** reveals provenance (source URI, signature info), supersedes chain, and raw JSON (redacted secrets). Drawer also lists linked `effective_finding_*` entries if Policy Engine has already materialised overlays. -4. **Remediate / annotate:** operators can add notes (stored as structured annotations) or flag as *acknowledged* (for on-call rotations). Annotations sync to Concelier audit logs. -5. **Escalate:** “Create incident” button opens the standard incident template pre-filled with context (requires `ui.incidents` scope). - -The drill-down retains filter state, so back navigation returns to the scoped table without reloading the entire dashboard. - ---- - -## 5 · Verification & actions - -- **Run Verify:** calls `POST /aoc/verify` with the chosen `since` window (default 24 h). UI displays summary cards (documents checked, violations found, top codes) and stores reports for 7 days. Results include a downloadable JSON manifest mirroring CLI output. -- **Schedule verify:** schedule modal configures automated verification (daily/weekly) and optional email/Notifier hooks. -- **Export evidence:** CSV/JSON export buttons include tile metrics, verification summaries, and violation annotations—useful for audits. -- **Open in CLI:** copies `stella aoc verify --tenant --since ` for parity with automation scripts. - -All verify actions are scoped by tenant and recorded in Authority audit logs (`action=aoc.verify.ui`). - ---- - -## 6 · Metrics & observability - -The dashboard consumes the same metrics emitted by Concelier/Excititor (documented in the [AOC reference](../ingestion/aggregation-only-contract.md#9-observability-and-diagnostics)): - -- `ingestion_write_total{source,tenant,result}` – populates success/error sparklines beneath each tile. -- `aoc_violation_total{source,tenant,code}` – feeds violation pills and trend chart. -- `ingestion_signature_verified_total{source,result}` – renders signature pass-rate gauge. -- `ingestion_latency_seconds{source,quantile}` – used for latency badges and alert banners. -- `advisory_revision_count{source}` – displayed in supersedes depth tooltip. - -The page shows the correlation ID for each violation entry, matching structured logs emitted by Concelier and Excititor, enabling quick log pivoting. - ---- - -## 7 · Security & tenancy - -- Tokens are DPoP-bound; every API call includes the UI’s DPoP proof and inherits tenant scoping from Authority. -- Violations drawer hides sensitive fields (credentials, private keys) using the same redaction rules as Concelier events. -- Run Verify honours rate limits to avoid overloading ingestion services; repeated failures trigger a cool-down banner. -- The dashboard never exposes derived severity or policy status—only raw ingestion facts and guard results, preserving AOC separation of duties. - ---- - -## 8 · Offline & air-gap behaviour - -- In sealed/offline mode the dashboard switches to **“offline snapshot”** banner, reading from Offline Kit snapshots seeded via `ouk` imports. -- Verification requests queue until connectivity resumes; UI provides `Download script` to run `stella aoc verify` on a workstation and upload results later. -- Tiles display the timestamp of the last imported snapshot and flag when it exceeds the configured staleness threshold (default 48 h offline). -- CSV/JSON exports include checksums so operators can transfer evidence across air gaps securely. - ---- - -## 9 · Related references - -- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) -- [Architecture overview](../architecture/overview.md) -- [Concelier architecture](../ARCHITECTURE_CONCELIER.md) -- [Excititor architecture](../ARCHITECTURE_EXCITITOR.md) -- [CLI AOC commands](../cli/cli-reference.md) - ---- - -## 10 · Compliance checklist - -- [ ] Dashboard wired to live AOC metrics (`ingestion_*`, `aoc_violation_total`). -- [ ] Verify action logs to Authority audit trail with tenant context. -- [ ] UI enforces read-only access to raw stores; no mutation endpoints invoked. -- [ ] Offline/air-gap mode documented and validated with Offline Kit snapshots. -- [ ] Violation exports include provenance and `ERR_AOC_00x` codes. -- [ ] Accessibility tested (WCAG 2.2 AA) for tiles, tables, and drawers. -- [ ] Screenshot/recording captured for Docs release notes (pending UI capture). - ---- - -*Last updated: 2025-10-26 (Sprint 19).* +# Console AOC Dashboard + +> **Audience:** Console PMs, UI engineers, Concelier/Excititor operators, SREs monitoring ingestion health. +> **Scope:** Layout, RBAC, workflow, and observability for the Aggregation-Only Contract (AOC) dashboard that ships with Sprint 19. + +The Console AOC dashboard gives operators a live view of ingestion guardrails across all configured sources. It surfaces raw Concelier/Excititor health, highlights violations raised by `AOCWriteGuard`, and lets on-call staff trigger verification without leaving the browser. Use it alongside the [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) and the [architecture overview](../architecture/overview.md) when rolling out AOC changes. + +--- + +## 1 · Access & prerequisites + +- **Route:** `/console/sources` (dashboard) with contextual drawer routes `/console/sources/:sourceKey` and `/console/sources/:sourceKey/violations/:documentId`. +- **Feature flag:** `aocDashboard.enabled` (default `true` once Concelier WebService exposes `/aoc/verify`). Toggle is tenant-scoped to support phased rollout. +- **Scopes:** + - `ui.read` (base navigation) plus `advisory:read` to view Concelier ingestion metrics/violations. + - `vex:read` to see Excititor entries and run VEX verifications. + - `aoc:verify` to trigger guard runs from the dashboard action bar. + - `advisory:ingest` / `vex:ingest` **not** required; the dashboard uses read-only APIs. +- **Tenancy:** All data is filtered by the active tenant selector. Switching tenants re-fetches tiles and drill-down tables with tenant-scoped tokens. +- **Back-end contracts:** Requires Concelier/Excititor 19.x (AOC guards enabled) and Authority scopes updated per [Authority service docs](../ARCHITECTURE_AUTHORITY.md#new-aoc-scopes). + +--- + +## 2 · Layout overview + +``` +┌────────────────────────────────────────────────────────────────────────────┐ +│ Header: tenant picker • live status pill • Last verify (“2h ago”) │ +├────────────────────────────────────────────────────────────────────────────┤ +│ Tile grid (4 per row) │ +│ ┌───── Concelier sources ─────┐ ┌────── Excititor sources ────────┐ │ +│ │ Red Hat | Ubuntu | OSV ... │ │ Vendor VEX | CSAF feeds ... │ │ +├────────────────────────────────────────────────────────────────────────────┤ +│ Violations & history table │ +│ • Filters: timeframe, source, ERR_AOC code, severity (warning/block) │ +│ • Columns: timestamp, source, code, summary, supersedes link, actions │ +├────────────────────────────────────────────────────────────────────────────┤ +│ Action bar: Run Verify • Download CSV • Open Concelier raw doc • Help │ +└────────────────────────────────────────────────────────────────────────────┘ +``` + +Tiles summarise the latest ingestion runs. The table and drawers provide drill-down views, and the action bar launches verifier workflows or exports evidence for audits. + +--- + +## 3 · Source tiles + +Each tile represents a Concelier or Excititor source and contains the fields below. + +| Field | Description | Thresholds & colours | +| ------ | ----------- | -------------------- | +| **Status badge** | Aggregated health computed from the latest job. | `Healthy` (green) when last job finished < 30 min ago and `violations24h = 0`; `Warning` (amber) when age ≥ 30 min or ≤ 5 violations; `Critical` (red) on any guard rejection (`ERR_AOC_00x`) or if job age > 2 h. | +| **Last ingest** | Timestamp and relative age of last successful append to `advisory_raw`/`vex_raw`. | Clicking opens job detail drawer. | +| **Violations (24 h)** | Count of guard failures grouped by `ERR_AOC` code across the last 24 hours. | Shows pill per code (e.g., `ERR_AOC_001 × 2`). | +| **Supersedes depth** | Average length of supersedes chain for the source over the last day. | Helps spot runaway revisions. | +| **Signature pass rate** | % of documents where signature/checksum verification succeeded. | Derived from `ingestion_signature_verified_total`. | +| **Latency P95** | Write latency recorded by ingestion spans / histograms. | Mirrors `ingestion_latency_seconds{quantile=0.95}`. | + +Tile menus expose quick actions: + +- **View history** – jumps to table filtered by the selected source. +- **Open metrics** – deep links to Grafana panel seeded with `source=` for `ingestion_write_total` and `aoc_violation_total`. +- **Download raw sample** – fetches the most recent document via `GET /advisories/raw/{id}` (or VEX equivalent) for debugging. + +--- + +## 4 · Violation drill-down workflow + +1. **Select a tile** or use table filters to focus on a source, timeframe, or `ERR_AOC` code. +2. **Inspect the violation row:** summary shows offending field, guard code, and document hash. +3. **Open detail drawer:** reveals provenance (source URI, signature info), supersedes chain, and raw JSON (redacted secrets). Drawer also lists linked `effective_finding_*` entries if Policy Engine has already materialised overlays. +4. **Remediate / annotate:** operators can add notes (stored as structured annotations) or flag as *acknowledged* (for on-call rotations). Annotations sync to Concelier audit logs. +5. **Escalate:** “Create incident” button opens the standard incident template pre-filled with context (requires `ui.incidents` scope). + +The drill-down retains filter state, so back navigation returns to the scoped table without reloading the entire dashboard. + +--- + +## 5 · Verification & actions + +- **Run Verify:** calls `POST /aoc/verify` with the chosen `since` window (default 24 h). UI displays summary cards (documents checked, violations found, top codes) and stores reports for 7 days. Results include a downloadable JSON manifest mirroring CLI output. +- **Schedule verify:** schedule modal configures automated verification (daily/weekly) and optional email/Notifier hooks. +- **Export evidence:** CSV/JSON export buttons include tile metrics, verification summaries, and violation annotations—useful for audits. +- **Open in CLI:** copies `stella aoc verify --tenant --since ` for parity with automation scripts. + +All verify actions are scoped by tenant and recorded in Authority audit logs (`action=aoc.verify.ui`). + +--- + +## 6 · Metrics & observability + +The dashboard consumes the same metrics emitted by Concelier/Excititor (documented in the [AOC reference](../ingestion/aggregation-only-contract.md#9-observability-and-diagnostics)): + +- `ingestion_write_total{source,tenant,result}` – populates success/error sparklines beneath each tile. +- `aoc_violation_total{source,tenant,code}` – feeds violation pills and trend chart. +- `ingestion_signature_verified_total{source,result}` – renders signature pass-rate gauge. +- `ingestion_latency_seconds{source,quantile}` – used for latency badges and alert banners. +- `advisory_revision_count{source}` – displayed in supersedes depth tooltip. + +The page shows the correlation ID for each violation entry, matching structured logs emitted by Concelier and Excititor, enabling quick log pivoting. + +--- + +## 7 · Security & tenancy + +- Tokens are DPoP-bound; every API call includes the UI’s DPoP proof and inherits tenant scoping from Authority. +- Violations drawer hides sensitive fields (credentials, private keys) using the same redaction rules as Concelier events. +- Run Verify honours rate limits to avoid overloading ingestion services; repeated failures trigger a cool-down banner. +- The dashboard never exposes derived severity or policy status—only raw ingestion facts and guard results, preserving AOC separation of duties. + +--- + +## 8 · Offline & air-gap behaviour + +- In sealed/offline mode the dashboard switches to **“offline snapshot”** banner, reading from Offline Kit snapshots seeded via `ouk` imports. +- Verification requests queue until connectivity resumes; UI provides `Download script` to run `stella aoc verify` on a workstation and upload results later. +- Tiles display the timestamp of the last imported snapshot and flag when it exceeds the configured staleness threshold (default 48 h offline). +- CSV/JSON exports include checksums so operators can transfer evidence across air gaps securely. + +--- + +## 9 · Related references + +- [Aggregation-Only Contract reference](../ingestion/aggregation-only-contract.md) +- [Architecture overview](../architecture/overview.md) +- [Concelier architecture](../ARCHITECTURE_CONCELIER.md) +- [Excititor architecture](../ARCHITECTURE_EXCITITOR.md) +- [CLI AOC commands](../cli/cli-reference.md) + +--- + +## 10 · Compliance checklist + +- [ ] Dashboard wired to live AOC metrics (`ingestion_*`, `aoc_violation_total`). +- [ ] Verify action logs to Authority audit trail with tenant context. +- [ ] UI enforces read-only access to raw stores; no mutation endpoints invoked. +- [ ] Offline/air-gap mode documented and validated with Offline Kit snapshots. +- [ ] Violation exports include provenance and `ERR_AOC_00x` codes. +- [ ] Accessibility tested (WCAG 2.2 AA) for tiles, tables, and drawers. +- [ ] Screenshot/recording captured for Docs release notes (pending UI capture). + +--- + +*Last updated: 2025-10-26 (Sprint 19).* diff --git a/docs/ui/downloads.md b/docs/ui/downloads.md index f7cd1c4e..f741c56a 100644 --- a/docs/ui/downloads.md +++ b/docs/ui/downloads.md @@ -1,212 +1,212 @@ -# StellaOps Console - Downloads Manager - -> **Audience:** DevOps guild, Console engineers, enablement writers, and operators who promote releases or maintain offline mirrors. -> **Scope:** `/console/downloads` workspace covering artifact catalog, signed manifest plumbing, export status handling, CLI parity, automation hooks, and offline guidance (Sprint 23). - -The Downloads workspace centralises every artefact required to deploy or validate StellaOps in connected and air-gapped environments. It keeps Console operators aligned with release engineering by surfacing the signed downloads manifest, live export jobs, parity checks against Offline Kit bundles, and automation hooks that mirror the CLI experience. - ---- - -## 1 - Access and prerequisites - -- **Route:** `/console/downloads` (list) with detail drawer `/console/downloads/:artifactId`. -- **Scopes:** `downloads.read` (baseline) and `downloads.manage` for cancelling or expiring stale exports. Evidence bundles inherit the originating scope (`runs.read`, `findings.read`, etc.). -- **Dependencies:** Web gateway `/console/downloads` API (WEB-CONSOLE-23-005), DevOps manifest pipeline (`deploy/downloads/manifest.json`), Offline Kit metadata (`manifest/offline-manifest.json`), and export orchestrator `/console/exports`. -- **Feature flags:** `downloads.workspace.enabled`, `downloads.exportQueue`, `downloads.offlineParity`. -- **Tenancy:** Artefacts are tenant-agnostic except evidence bundles, which are tagged with originating tenant and require matching Authority scopes. - ---- - -## 2 - Workspace layout - -``` -+---------------------------------------------------------------+ -| Header: Snapshot timestamp - Manifest signature status | -+---------------------------------------------------------------+ -| Cards: Latest release - Offline kit parity - Export queue | -+---------------------------------------------------------------+ -| Tabs: Artefacts | Exports | Offline Kits | Webhooks | -+---------------------------------------------------------------+ -| Filter bar: Channel - Kind - Architecture - Scope tags | -+---------------------------------------------------------------+ -| Table (virtualised): Artifact | Channel | Digest | Status | -| Detail drawer: Metadata | Commands | Provenance | History | -+---------------------------------------------------------------+ -``` - -- **Snapshot banner:** shows `manifest.version`, `generatedAt`, and cosign verification state. If verification fails, the banner turns red and links to troubleshooting guidance. -- **Quick actions:** Copy manifest URL, download attestation bundle, trigger parity check, open CLI parity doc (`/docs/cli-vs-ui-parity.md`). -- **Filters:** allow narrowing by channel (`edge`, `stable`, `airgap`), artefact kind (`container.image`, `helm.chart`, `compose.bundle`, `offline.bundle`, `export.bundle`), architecture (`linux/amd64`, `linux/arm64`), and scope tags (`console`, `scheduler`, `authority`). - ---- - -## 3 - Artefact catalogue - -| Category | Artefacts surfaced | Source | Notes | -|----------|-------------------|--------|-------| -| **Core containers** | `stellaops/web-ui`, `stellaops/web`, `stellaops/concelier`, `stellaops/excititor`, `stellaops/scanner-*`, `stellaops/authority`, `stellaops/attestor`, `stellaops/scheduler-*` | `deploy/downloads/manifest.json` (`artifacts[].kind = "container.image"`) | Digest-only pulls with copy-to-clipboard `docker pull` and `oras copy` commands; badges show arch availability. | -| **Helm charts** | `deploy/helm/stellaops-*.tgz` plus values files | Manifest entries where `kind = "helm.chart"` | Commands reference `helm repo add` (online) and `helm install --values` (offline). UI links to values matrix in `/docs/install/helm-prod.md` when available. | -| **Compose bundles** | `deploy/compose/docker-compose.*.yaml`, `.env` seeds | `kind = "compose.bundle"` | Inline diff viewer highlights digest changes vs previous snapshot; `docker compose pull` command copies digest pins. | -| **Offline kit** | `stella-ops-offline-kit--.tar.gz` + signatures and manifest | Offline Kit metadata (`manifest/offline-manifest.json`) merged into downloads view | Drawer shows bundle size, signed manifest digest, cosign verification command (mirrors `/docs/24_OFFLINE_KIT.md`). | -| **Evidence exports** | Completed jobs from `/console/exports` (findings delta, policy explain, run evidence) | Export orchestrator job queue | Entries expire after retention window; UI exposes `stella runs export` and `stella findings export` parity buttons. | -| **Webhooks & parity** | `/downloads/hooks/subscribe` configs, CI parity reports | Manifest extras (`kind = "webhook.config"`, `kind = "parity.report"`) | Operators can download webhook payload templates and review the latest CLI parity check report generated by docs CI. | - ---- - -## 4 - Manifest structure - -The DevOps pipeline publishes a deterministic manifest at `deploy/downloads/manifest.json`, signed with the release Cosign key (`DOWNLOADS-CONSOLE-23-001`). The Console fetches it on workspace load and caches it with `If-None-Match` headers to avoid redundant pulls. The manifest schema: - -- **`version`** - monotonically increasing integer tied to pipeline run. -- **`generatedAt`** - ISO-8601 UTC timestamp. -- **`signature`** - URL to detached Cosign signature (`manifest.json.sig`). -- **`artifacts[]`** - ordered list keyed by `id`. - -Each artefact contains: - -| Field | Description | -|-------|-------------| -| `id` | Stable identifier (`::`). | -| `kind` | One of `container.image`, `helm.chart`, `compose.bundle`, `offline.bundle`, `export.bundle`, `webhook.config`, `parity.report`. | -| `channel` | `edge`, `stable`, or `airgap`. | -| `version` | Semantic or calendar version (for containers, matches release manifest). | -| `architectures` | Array of supported platforms (empty for arch-agnostic artefacts). | -| `digest` | SHA-256 for immutable artefacts; Compose bundles include file hash. | -| `sizeBytes` | File size (optional for export bundles that stream). | -| `downloadUrl` | HTTPS endpoint (registry, object store, or mirror). | -| `signatureUrl` | Detached signature (Cosign, DSSE, or attestation) if available. | -| `sbomUrl` | Optional SBOM pointer (CycloneDX JSON). | -| `attestationUrl` | Optional in-toto/SLSA attestation. | -| `docs` | Array of documentation links (e.g., `/docs/install/docker.md`). | -| `tags` | Free-form tags (e.g., `["console","ui","offline"]`). | - -### 4.1 Example excerpt - -```json -{ - "version": 42, - "generatedAt": "2025-10-27T04:00:00Z", - "signature": "https://downloads.stella-ops.org/manifest/manifest.json.sig", - "artifacts": [ - { - "id": "container.image:web-ui:2025.10.0-edge", - "kind": "container.image", - "channel": "edge", - "version": "2025.10.0-edge", - "architectures": ["linux/amd64", "linux/arm64"], - "digest": "sha256:38b225fa7767a5b94ebae4dae8696044126aac429415e93de514d5dd95748dcf", - "sizeBytes": 187563210, - "downloadUrl": "https://registry.stella-ops.org/v2/stellaops/web-ui/manifests/sha256:38b225fa7767a5b94ebae4dae8696044126aac429415e93de514d5dd95748dcf", - "signatureUrl": "https://downloads.stella-ops.org/signatures/web-ui-2025.10.0-edge.cosign.sig", - "sbomUrl": "https://downloads.stella-ops.org/sbom/web-ui-2025.10.0-edge.cdx.json", - "attestationUrl": "https://downloads.stella-ops.org/attestations/web-ui-2025.10.0-edge.intoto.jsonl", - "docs": ["/docs/install/docker.md", "/docs/security/console-security.md"], - "tags": ["console", "ui"] - }, - { - "id": "offline.bundle:ouk:2025.10.0-edge", - "kind": "offline.bundle", - "channel": "edge", - "version": "2025.10.0-edge", - "digest": "sha256:4f7d2f7a8d0cf4b5f3af689f6c74cd213f4c1b3a1d76d24f6f9f3d9075e51f90", - "downloadUrl": "https://downloads.stella-ops.org/offline/stella-ops-offline-kit-2025.10.0-edge.tar.gz", - "signatureUrl": "https://downloads.stella-ops.org/offline/stella-ops-offline-kit-2025.10.0-edge.tar.gz.sig", - "sbomUrl": "https://downloads.stella-ops.org/offline/offline-manifest-2025.10.0-edge.json", - "docs": ["/docs/24_OFFLINE_KIT.md"], - "tags": ["offline", "airgap"] - } - ] -} -``` - -Console caches the manifest hash and surfaces differences when a new version lands, helping operators confirm digests drift only when expected. - ---- - -## 5 - Download workflows and statuses - -| Status | Applies to | Behaviour | -|--------|------------|-----------| -| **Ready** | Immutable artefacts (images, Helm/Compose bundles, offline kit) | Commands available immediately. Digest, size, and last verification timestamp display in the table. | -| **Pending export** | Async exports queued via `/console/exports` | Shows job owner, scope, and estimated completion time. UI polls every 15 s and updates progress bar. | -| **Processing** | Long-running export (evidence bundle, large SBOM) | Drawer shows current stage (`collecting`, `compressing`, `signing`). Operators can cancel if they own the request and hold `downloads.manage`. | -| **Delivered** | Completed export within retention window | Provides download links, resume token, and parity snippet for CLI. | -| **Expired** | Export past retention or manually expired | Row grays out; clicking opens housekeeping guidance with CLI command to regenerate (`stella runs export --run `). | - -Exports inherit retention defaults defined in policy (`downloads.retentionDays`, min 3, max 30). Operators can override per tenant if they have the appropriate scope. - ---- - -## 6 - CLI parity and copy-to-clipboard - -- **Digest pulls:** Each container entry exposes `docker pull @` and `oras copy @ --to-dir ./downloads` buttons. Commands include architecture hints for multi-platform images. -- **Helm/Compose:** Buttons output `helm pull` / `helm install` with the manifest URL and `docker compose --env-file` commands referencing the downloaded bundle. -- **Offline kit:** Copy buttons produce the full verification sequence: - -```bash -curl -LO https://downloads.stella-ops.org/offline/stella-ops-offline-kit-2025.10.0-edge.tar.gz -curl -LO https://downloads.stella-ops.org/offline/stella-ops-offline-kit-2025.10.0-edge.tar.gz.sig -cosign verify-blob \ - --key https://stella-ops.org/keys/cosign.pub \ - --signature stella-ops-offline-kit-2025.10.0-edge.tar.gz.sig \ - stella-ops-offline-kit-2025.10.0-edge.tar.gz -``` - -- **Exports:** Drawer lists CLI equivalents (for example, `stella findings export --run `). When the CLI supports resume tokens, the command includes `--resume-token` from the manifest entry. -- **Automation:** Webhook tab copies `curl` snippets to subscribe to `/downloads/hooks/subscribe?topic=` and includes payload schema for integration tests. - -Parity buttons write commands to the clipboard and display a toast confirming scope hints (for example, `Requires downloads.read + tenant scope`). Accessibility shortcuts (`Shift+D`) trigger the primary copy action for keyboard users. - ---- - -## 7 - Offline and air-gap workflow - -- **Manifest sync:** Offline users download `manifest/offline-manifest.json` plus detached JWS and import it via `stella offline kit import`. Console highlights if the offline manifest predates the online manifest by more than 7 days. -- **Artefact staging:** The workspace enumerates removable media instructions (export to `./staging//`) and warns when artefacts exceed configured media size thresholds. -- **Mirrors:** Buttons copy `oras copy` commands that mirror images to an internal registry (`registry..internal`). Operators can toggle `--insecure-policy` if the destination uses custom trust roots. -- **Parity checks:** `downloads.offlineParity` flag surfaces the latest parity report verifying that Offline Kit contents match the downloads manifest digests. If diff detected, UI raises a banner linking to remediation steps. -- **Audit logging:** Every download command triggered from the UI emits `ui.download.commandCopied` with artifact ID, digest, and tenant. Logs feed the evidence locker so air-gap imports can demonstrate provenance. - ---- - -## 8 - Observability and quotas - -| Signal | Source | Description | -|--------|--------|-------------| -| `ui_download_manifest_refresh_seconds` | Console metrics | Measures time to fetch and verify manifest. Targets < 3 s. | -| `ui_download_export_queue_depth` | `/console/downloads` API | Number of pending exports (per tenant). Surfaces as card and Grafana panel. | -| `ui_download_command_copied_total` | Console logs | Count of copy actions by artifact type, used to gauge CLI parity adoption. | -| `downloads.export.duration` | Export orchestrator | Duration histograms for bundle generation; alerts if P95 > 60 s. | -| `downloads.quota.remaining` | Authority quota service | Anonymous users limited to 33 exports/day, verified users 333/day. Banner turns amber at 90 % usage as per platform policy. | - -Telemetry entries include correlation IDs that match backend manifest refresh logs and export job records to keep troubleshooting deterministic. - ---- - -## 9 - References - -- `/docs/ui/console-overview.md` - primary shell, tenant controls, SSE ticker. -- `/docs/ui/navigation.md` - route ownership and keyboard shortcuts. -- `/docs/ui/sbom-explorer.md` - export flows feeding the downloads queue. -- `/docs/ui/runs.md` - evidence bundle integration. -- `/docs/24_OFFLINE_KIT.md` - offline kit packaging and verification. -- `/docs/security/console-security.md` - scopes, CSP, and download token handling. -- `/docs/cli-vs-ui-parity.md` - CLI equivalence checks (pending). -- `deploy/releases/*.yaml` - source of container digests mirrored into the manifest. - ---- - -## 10 - Compliance checklist - -- [ ] Manifest schema documented (fields, signature, caching) and sample kept current. -- [ ] Artefact categories mapped to manifest entries and parity workflows. -- [ ] Download statuses, retention, and cancellation rules explained. -- [ ] CLI copy-to-clipboard commands mirror console actions with scope hints. -- [ ] Offline/air-gap parity workflow, mirror commands, and audit logging captured. -- [ ] Observability metrics and quota signalling documented. -- [ ] References cross-linked to adjacent docs (navigation, exports, offline kit). -- [ ] Accessibility shortcuts and copy-to-clipboard behaviour noted with compliance reminder. - ---- - -*Last updated: 2025-10-27 (Sprint 23).* +# StellaOps Console - Downloads Manager + +> **Audience:** DevOps guild, Console engineers, enablement writers, and operators who promote releases or maintain offline mirrors. +> **Scope:** `/console/downloads` workspace covering artifact catalog, signed manifest plumbing, export status handling, CLI parity, automation hooks, and offline guidance (Sprint 23). + +The Downloads workspace centralises every artefact required to deploy or validate StellaOps in connected and air-gapped environments. It keeps Console operators aligned with release engineering by surfacing the signed downloads manifest, live export jobs, parity checks against Offline Kit bundles, and automation hooks that mirror the CLI experience. + +--- + +## 1 - Access and prerequisites + +- **Route:** `/console/downloads` (list) with detail drawer `/console/downloads/:artifactId`. +- **Scopes:** `downloads.read` (baseline) and `downloads.manage` for cancelling or expiring stale exports. Evidence bundles inherit the originating scope (`runs.read`, `findings.read`, etc.). +- **Dependencies:** Web gateway `/console/downloads` API (WEB-CONSOLE-23-005), DevOps manifest pipeline (`deploy/downloads/manifest.json`), Offline Kit metadata (`manifest/offline-manifest.json`), and export orchestrator `/console/exports`. +- **Feature flags:** `downloads.workspace.enabled`, `downloads.exportQueue`, `downloads.offlineParity`. +- **Tenancy:** Artefacts are tenant-agnostic except evidence bundles, which are tagged with originating tenant and require matching Authority scopes. + +--- + +## 2 - Workspace layout + +``` ++---------------------------------------------------------------+ +| Header: Snapshot timestamp - Manifest signature status | ++---------------------------------------------------------------+ +| Cards: Latest release - Offline kit parity - Export queue | ++---------------------------------------------------------------+ +| Tabs: Artefacts | Exports | Offline Kits | Webhooks | ++---------------------------------------------------------------+ +| Filter bar: Channel - Kind - Architecture - Scope tags | ++---------------------------------------------------------------+ +| Table (virtualised): Artifact | Channel | Digest | Status | +| Detail drawer: Metadata | Commands | Provenance | History | ++---------------------------------------------------------------+ +``` + +- **Snapshot banner:** shows `manifest.version`, `generatedAt`, and cosign verification state. If verification fails, the banner turns red and links to troubleshooting guidance. +- **Quick actions:** Copy manifest URL, download attestation bundle, trigger parity check, open CLI parity doc (`/docs/cli-vs-ui-parity.md`). +- **Filters:** allow narrowing by channel (`edge`, `stable`, `airgap`), artefact kind (`container.image`, `helm.chart`, `compose.bundle`, `offline.bundle`, `export.bundle`), architecture (`linux/amd64`, `linux/arm64`), and scope tags (`console`, `scheduler`, `authority`). + +--- + +## 3 - Artefact catalogue + +| Category | Artefacts surfaced | Source | Notes | +|----------|-------------------|--------|-------| +| **Core containers** | `stellaops/web-ui`, `stellaops/web`, `stellaops/concelier`, `stellaops/excititor`, `stellaops/scanner-*`, `stellaops/authority`, `stellaops/attestor`, `stellaops/scheduler-*` | `deploy/downloads/manifest.json` (`artifacts[].kind = "container.image"`) | Digest-only pulls with copy-to-clipboard `docker pull` and `oras copy` commands; badges show arch availability. | +| **Helm charts** | `deploy/helm/stellaops-*.tgz` plus values files | Manifest entries where `kind = "helm.chart"` | Commands reference `helm repo add` (online) and `helm install --values` (offline). UI links to values matrix in `/docs/install/helm-prod.md` when available. | +| **Compose bundles** | `deploy/compose/docker-compose.*.yaml`, `.env` seeds | `kind = "compose.bundle"` | Inline diff viewer highlights digest changes vs previous snapshot; `docker compose pull` command copies digest pins. | +| **Offline kit** | `stella-ops-offline-kit--.tar.gz` + signatures and manifest | Offline Kit metadata (`manifest/offline-manifest.json`) merged into downloads view | Drawer shows bundle size, signed manifest digest, cosign verification command (mirrors `/docs/24_OFFLINE_KIT.md`). | +| **Evidence exports** | Completed jobs from `/console/exports` (findings delta, policy explain, run evidence) | Export orchestrator job queue | Entries expire after retention window; UI exposes `stella runs export` and `stella findings export` parity buttons. | +| **Webhooks & parity** | `/downloads/hooks/subscribe` configs, CI parity reports | Manifest extras (`kind = "webhook.config"`, `kind = "parity.report"`) | Operators can download webhook payload templates and review the latest CLI parity check report generated by docs CI. | + +--- + +## 4 - Manifest structure + +The DevOps pipeline publishes a deterministic manifest at `deploy/downloads/manifest.json`, signed with the release Cosign key (`DOWNLOADS-CONSOLE-23-001`). The Console fetches it on workspace load and caches it with `If-None-Match` headers to avoid redundant pulls. The manifest schema: + +- **`version`** - monotonically increasing integer tied to pipeline run. +- **`generatedAt`** - ISO-8601 UTC timestamp. +- **`signature`** - URL to detached Cosign signature (`manifest.json.sig`). +- **`artifacts[]`** - ordered list keyed by `id`. + +Each artefact contains: + +| Field | Description | +|-------|-------------| +| `id` | Stable identifier (`::`). | +| `kind` | One of `container.image`, `helm.chart`, `compose.bundle`, `offline.bundle`, `export.bundle`, `webhook.config`, `parity.report`. | +| `channel` | `edge`, `stable`, or `airgap`. | +| `version` | Semantic or calendar version (for containers, matches release manifest). | +| `architectures` | Array of supported platforms (empty for arch-agnostic artefacts). | +| `digest` | SHA-256 for immutable artefacts; Compose bundles include file hash. | +| `sizeBytes` | File size (optional for export bundles that stream). | +| `downloadUrl` | HTTPS endpoint (registry, object store, or mirror). | +| `signatureUrl` | Detached signature (Cosign, DSSE, or attestation) if available. | +| `sbomUrl` | Optional SBOM pointer (CycloneDX JSON). | +| `attestationUrl` | Optional in-toto/SLSA attestation. | +| `docs` | Array of documentation links (e.g., `/docs/install/docker.md`). | +| `tags` | Free-form tags (e.g., `["console","ui","offline"]`). | + +### 4.1 Example excerpt + +```json +{ + "version": 42, + "generatedAt": "2025-10-27T04:00:00Z", + "signature": "https://downloads.stella-ops.org/manifest/manifest.json.sig", + "artifacts": [ + { + "id": "container.image:web-ui:2025.10.0-edge", + "kind": "container.image", + "channel": "edge", + "version": "2025.10.0-edge", + "architectures": ["linux/amd64", "linux/arm64"], + "digest": "sha256:38b225fa7767a5b94ebae4dae8696044126aac429415e93de514d5dd95748dcf", + "sizeBytes": 187563210, + "downloadUrl": "https://registry.stella-ops.org/v2/stellaops/web-ui/manifests/sha256:38b225fa7767a5b94ebae4dae8696044126aac429415e93de514d5dd95748dcf", + "signatureUrl": "https://downloads.stella-ops.org/signatures/web-ui-2025.10.0-edge.cosign.sig", + "sbomUrl": "https://downloads.stella-ops.org/sbom/web-ui-2025.10.0-edge.cdx.json", + "attestationUrl": "https://downloads.stella-ops.org/attestations/web-ui-2025.10.0-edge.intoto.jsonl", + "docs": ["/docs/install/docker.md", "/docs/security/console-security.md"], + "tags": ["console", "ui"] + }, + { + "id": "offline.bundle:ouk:2025.10.0-edge", + "kind": "offline.bundle", + "channel": "edge", + "version": "2025.10.0-edge", + "digest": "sha256:4f7d2f7a8d0cf4b5f3af689f6c74cd213f4c1b3a1d76d24f6f9f3d9075e51f90", + "downloadUrl": "https://downloads.stella-ops.org/offline/stella-ops-offline-kit-2025.10.0-edge.tar.gz", + "signatureUrl": "https://downloads.stella-ops.org/offline/stella-ops-offline-kit-2025.10.0-edge.tar.gz.sig", + "sbomUrl": "https://downloads.stella-ops.org/offline/offline-manifest-2025.10.0-edge.json", + "docs": ["/docs/24_OFFLINE_KIT.md"], + "tags": ["offline", "airgap"] + } + ] +} +``` + +Console caches the manifest hash and surfaces differences when a new version lands, helping operators confirm digests drift only when expected. + +--- + +## 5 - Download workflows and statuses + +| Status | Applies to | Behaviour | +|--------|------------|-----------| +| **Ready** | Immutable artefacts (images, Helm/Compose bundles, offline kit) | Commands available immediately. Digest, size, and last verification timestamp display in the table. | +| **Pending export** | Async exports queued via `/console/exports` | Shows job owner, scope, and estimated completion time. UI polls every 15 s and updates progress bar. | +| **Processing** | Long-running export (evidence bundle, large SBOM) | Drawer shows current stage (`collecting`, `compressing`, `signing`). Operators can cancel if they own the request and hold `downloads.manage`. | +| **Delivered** | Completed export within retention window | Provides download links, resume token, and parity snippet for CLI. | +| **Expired** | Export past retention or manually expired | Row grays out; clicking opens housekeeping guidance with CLI command to regenerate (`stella runs export --run `). | + +Exports inherit retention defaults defined in policy (`downloads.retentionDays`, min 3, max 30). Operators can override per tenant if they have the appropriate scope. + +--- + +## 6 - CLI parity and copy-to-clipboard + +- **Digest pulls:** Each container entry exposes `docker pull @` and `oras copy @ --to-dir ./downloads` buttons. Commands include architecture hints for multi-platform images. +- **Helm/Compose:** Buttons output `helm pull` / `helm install` with the manifest URL and `docker compose --env-file` commands referencing the downloaded bundle. +- **Offline kit:** Copy buttons produce the full verification sequence: + +```bash +curl -LO https://downloads.stella-ops.org/offline/stella-ops-offline-kit-2025.10.0-edge.tar.gz +curl -LO https://downloads.stella-ops.org/offline/stella-ops-offline-kit-2025.10.0-edge.tar.gz.sig +cosign verify-blob \ + --key https://stella-ops.org/keys/cosign.pub \ + --signature stella-ops-offline-kit-2025.10.0-edge.tar.gz.sig \ + stella-ops-offline-kit-2025.10.0-edge.tar.gz +``` + +- **Exports:** Drawer lists CLI equivalents (for example, `stella findings export --run `). When the CLI supports resume tokens, the command includes `--resume-token` from the manifest entry. +- **Automation:** Webhook tab copies `curl` snippets to subscribe to `/downloads/hooks/subscribe?topic=` and includes payload schema for integration tests. + +Parity buttons write commands to the clipboard and display a toast confirming scope hints (for example, `Requires downloads.read + tenant scope`). Accessibility shortcuts (`Shift+D`) trigger the primary copy action for keyboard users. + +--- + +## 7 - Offline and air-gap workflow + +- **Manifest sync:** Offline users download `manifest/offline-manifest.json` plus detached JWS and import it via `stella offline kit import`. Console highlights if the offline manifest predates the online manifest by more than 7 days. +- **Artefact staging:** The workspace enumerates removable media instructions (export to `./staging//`) and warns when artefacts exceed configured media size thresholds. +- **Mirrors:** Buttons copy `oras copy` commands that mirror images to an internal registry (`registry..internal`). Operators can toggle `--insecure-policy` if the destination uses custom trust roots. +- **Parity checks:** `downloads.offlineParity` flag surfaces the latest parity report verifying that Offline Kit contents match the downloads manifest digests. If diff detected, UI raises a banner linking to remediation steps. +- **Audit logging:** Every download command triggered from the UI emits `ui.download.commandCopied` with artifact ID, digest, and tenant. Logs feed the evidence locker so air-gap imports can demonstrate provenance. + +--- + +## 8 - Observability and quotas + +| Signal | Source | Description | +|--------|--------|-------------| +| `ui_download_manifest_refresh_seconds` | Console metrics | Measures time to fetch and verify manifest. Targets < 3 s. | +| `ui_download_export_queue_depth` | `/console/downloads` API | Number of pending exports (per tenant). Surfaces as card and Grafana panel. | +| `ui_download_command_copied_total` | Console logs | Count of copy actions by artifact type, used to gauge CLI parity adoption. | +| `downloads.export.duration` | Export orchestrator | Duration histograms for bundle generation; alerts if P95 > 60 s. | +| `downloads.quota.remaining` | Authority quota service | Anonymous users limited to 33 exports/day, verified users 333/day. Banner turns amber at 90 % usage as per platform policy. | + +Telemetry entries include correlation IDs that match backend manifest refresh logs and export job records to keep troubleshooting deterministic. + +--- + +## 9 - References + +- `/docs/ui/console-overview.md` - primary shell, tenant controls, SSE ticker. +- `/docs/ui/navigation.md` - route ownership and keyboard shortcuts. +- `/docs/ui/sbom-explorer.md` - export flows feeding the downloads queue. +- `/docs/ui/runs.md` - evidence bundle integration. +- `/docs/24_OFFLINE_KIT.md` - offline kit packaging and verification. +- `/docs/security/console-security.md` - scopes, CSP, and download token handling. +- `/docs/cli-vs-ui-parity.md` - CLI equivalence checks (pending). +- `deploy/releases/*.yaml` - source of container digests mirrored into the manifest. + +--- + +## 10 - Compliance checklist + +- [ ] Manifest schema documented (fields, signature, caching) and sample kept current. +- [ ] Artefact categories mapped to manifest entries and parity workflows. +- [ ] Download statuses, retention, and cancellation rules explained. +- [ ] CLI copy-to-clipboard commands mirror console actions with scope hints. +- [ ] Offline/air-gap parity workflow, mirror commands, and audit logging captured. +- [ ] Observability metrics and quota signalling documented. +- [ ] References cross-linked to adjacent docs (navigation, exports, offline kit). +- [ ] Accessibility shortcuts and copy-to-clipboard behaviour noted with compliance reminder. + +--- + +*Last updated: 2025-10-27 (Sprint 23).* diff --git a/docs/ui/findings.md b/docs/ui/findings.md index 776b6713..2812ae7e 100644 --- a/docs/ui/findings.md +++ b/docs/ui/findings.md @@ -1,179 +1,179 @@ -# StellaOps Console - Findings - -> **Audience:** Policy Guild, Console UX team, security analysts, customer enablement. -> **Scope:** Findings list UX, filters, saved views, explain drawer, exports, CLI parity, real-time updates, and offline considerations for Sprint 23. - -The Findings workspace visualises materialised policy verdicts produced by the Policy Engine. It lets analysts triage affected components, inspect explain traces, compare policy views, and export evidence while respecting Aggregation-Only guardrails. - ---- - -## 1. Access and prerequisites - -- **Route:** `/console/findings` with optional panel parameters (e.g., `/console/findings?panel=explain&finding=`). -- **Scopes:** `findings.read` (list), `policy:runs` (view run metadata), `policy:simulate` (stage simulations), `downloads.read` (export bundles). -- **Prerequisites:** Policy Engine v2 (`policy_run` and `effective_finding_*` endpoints), Concelier/Excititor feeds for provenance, SBOM Service for component metadata. -- **Feature flags:** `findings.explain.enabled`, `findings.savedViews.enabled`, `findings.simulationDiff.enabled`. -- **Tenancy:** All queries include tenant context; cross-tenant comparisons require explicit admin scope and render split-pane view. - ---- - -## 2. Layout overview - -``` -+-------------------------------------------------------------------+ -| Header: Tenant badge - policy selector - global filters - actions | -+-------------------------------------------------------------------+ -| Top row cards: Affected assets - Critical count - KEV count | -+-------------------------------------------------------------------+ -| Findings grid (virtualised) | -| Columns: Status | Severity | Component | Policy | Source | Age | -| Row badges: KEV, Quieted, Override, Simulation only | -+-------------------------------------------------------------------+ -| Right drawer / full view tabs: Summary | Explain | Evidence | Run | -+-------------------------------------------------------------------+ -``` - -The policy selector includes Active, Staged, and Simulation snapshots. Switching snapshots triggers diff banners to highlight changes. - ---- - -## 3. Filters and saved views - -| Filter | Description | Notes | -|--------|-------------|-------| -| **Status** | `affected`, `at_risk`, `quieted`, `fixed`, `not_applicable`, `mitigated`. | Status definitions align with Policy Engine taxonomy. | -| **Severity** | Critical, High, Medium, Low, Informational, Untriaged. | Derived from policy scoring; UI displays numeric score tooltip. | -| **KEV** | Toggle to show only Known Exploited Vulnerabilities. | Pulls from Concelier enrichment. | -| **Policy** | Active, Staged, Simulation snapshots. | Simulation requires recent run; otherwise greyed out. | -| **Component** | PURL or substring search. | Autocomplete draws from current tenant findings. | -| **SBOM** | Filter by image digest or SBOM ID. | Includes quick links to SBOM Explorer. | -| **Tag** | Team or environment tags emitted by Policy Engine (`tags[]`). | Supports multi-select. | -| **Run window** | `Last 24h`, `Last 7d`, `Custom range`. | Applies to run timestamp. | -| **Explain hints** | Filter by explain artefact (rule ID, justification, VEX provider). | Uses server-side filter parameters. | - -Saved views persist filter combinations per tenant and policy. Users can mark views as shared; shared views appear in the left rail with owner and last updated timestamp. Keyboard shortcuts align with global presets (`Cmd+1-9 / Ctrl+1-9`). - ---- - -## 4. Findings grid - -| Column | Details | -|--------|---------| -| **Status** | Badge with tooltip describing resolution path (e.g., "Affected - blocked by policy rule R-105"). Quieted findings show a muted badge with expiry. | -| **Severity** | Numeric score and label. Hover reveals scoring formula and evidence sources. | -| **Component** | PURL plus human-friendly name. Includes SBOM badge linking to SBOM Explorer detail. | -| **Policy** | Policy name + revision digest; clicking opens policy diff in new tab. | -| **Source signals** | Icons for VEX, Advisory, Runtime overlays. Hover shows counts and last updated timestamps. | -| **Age** | Time since finding was last evaluated; colour-coded when exceeding SLA. | - -Row indicators: - -- **KEV** badge when Concelier marks the vulnerability as exploited. -- **Override** badge when policy override or exemption applied. -- **Simulation only** badge when viewing simulation snapshot; warns that finding is not yet active. -- **Determinism alert** icon if latest run reported a determinism mismatch (links to run detail). - -Bulk actions (multi-select): - -- `Open explains` (launch explain drawer for up to 10 findings). -- `Export CSV/JSON`. -- `Copy CLI` commands for batch explains (`stella findings explain --batch file`). -- `Create ticket` (integrates with integrations configured under Admin). - ---- - -## 5. Explain drawer - -Tabs inside the explain drawer: - -1. **Summary** - status, severity, policy decision, rule ID, last evaluated timestamp, SBOM link, run ID. -2. **Rule chain** - ordered list of policy rules triggered; each entry shows rule ID, name, action (block/warn/quiet), score contribution, and condition snippet. -3. **Evidence** - references to Concelier advisories, Excititor consensus, runtime signals, and overrides. Evidence entries link to their respective explorers. -4. **VEX impact** - table of VEX claims considered; displays provider, status, justification, acceptance (accepted/ignored), weight. -5. **History** - timeline of state transitions (affected -> quieted -> mitigated) with timestamps and operators (if override applied). -6. **Raw trace** - canonical JSON trace from Policy Engine (read-only). CLI parity snippet: - - `stella findings explain --policy --finding --format json`. - -Explain drawer includes copy-to-clipboard buttons for rule chain and evidence JSON to support audit workflows. When sealed mode is active, a banner highlights which evidence was sourced from cached data. - ---- - -## 6. Simulations and comparisons - -- Simulation toggle lets analysts compare Active vs Staged/Sandbox policies. -- Diff banner summarises added, removed, and changed findings. -- Side-by-side view shows baseline vs simulation verdicts with change badges (`added`, `removed`, `severity up`, `severity down`). -- CLI parity callout: `stella policy simulate --policy --sbom --format diff`. -- Simulation results persist for 7 days; stale simulations prompt re-run recommendation. - ---- - -## 7. Exports and automation - -- Immediate exports: CSV, JSON, Markdown summary for selected findings. -- Scheduled exports: asynchronous job to generate full tenant report (JSON + CSV) with manifest digests. -- Explain bundle export packages traces for a set of findings; includes manifest and hash for offline review. -- CLI parity: - - `stella findings ls --policy --format json --output findings.json` - - `stella findings export --policy --format csv --output findings.csv` - - `stella findings explain --batch batch.txt --output explains/` -- Automation: webhook copy button for `/downloads/hooks/subscribe?topic=findings.report.ready`. - ---- - -## 8. Real-time updates and observability - -- SSE channel `/console/findings/stream` pushes new findings, status changes, and quieted expirations; UI animates affected rows. -- Header cards show metrics: `findings_critical_total`, `findings_quieted_total`, `findings_kev_total`. -- Run ticker lists latest policy runs with status, duration, determinism hash. -- Error banners include correlation IDs linking to Policy Engine run logs. -- Metrics drill-down links to dashboards (OpenTelemetry, Prometheus). - ---- - -## 9. Offline and air-gap behaviour - -- Offline banner indicates snapshot ID and timestamp used for findings. -- Explain drawer notes when evidence references offline bundles; suggests importing updated advisories/VEX to refresh results. -- Exports default to local storage paths; UI provides manual transfer instructions. -- CLI examples switch to include `--sealed` or `--offline` flags. -- Tenant selector hides tenants without corresponding offline findings data to avoid partial views. - ---- - -## 10. Screenshot coordination - -- Placeholders: - - `![Findings grid placeholder](../assets/ui/findings/grid-placeholder.png)` - - `![Explain drawer placeholder](../assets/ui/findings/explain-placeholder.png)` -- Coordinate with Console Guild (Slack `#console-screenshots`, entry 2025-10-26) to capture updated light and dark theme shots before release. - ---- - -## 11. References - -- `/docs/ui/console-overview.md` - shell, filters, tenant model. -- `/docs/ui/navigation.md` - route list, deep-link schema. -- `/docs/ui/advisories-and-vex.md` - advisory and VEX context feeding findings. -- `/docs/ui/policies.md` (pending) - editor and policy lifecycle. -- `/docs/policy/overview.md` - Policy Engine outputs. -- `/docs/policy/runs.md` - run orchestration. -- `/docs/cli/policy.md` - CLI parity for findings commands. - ---- - -## 12. Compliance checklist - -- [ ] Filters and saved view behaviour documented with CLI alignment. -- [ ] Findings grid columns, badges, and bulk actions captured. -- [ ] Explain drawer walkthrough includes rule chain, evidence, and raw trace. -- [ ] Simulation diff behaviour and CLI callouts described. -- [ ] Exports (immediate and scheduled) plus webhook integration covered. -- [ ] Real-time updates, metrics, and error correlation documented. -- [ ] Offline behaviour and screenshot coordination noted. -- [ ] References validated. - ---- - -*Last updated: 2025-10-26 (Sprint 23).* - +# StellaOps Console - Findings + +> **Audience:** Policy Guild, Console UX team, security analysts, customer enablement. +> **Scope:** Findings list UX, filters, saved views, explain drawer, exports, CLI parity, real-time updates, and offline considerations for Sprint 23. + +The Findings workspace visualises materialised policy verdicts produced by the Policy Engine. It lets analysts triage affected components, inspect explain traces, compare policy views, and export evidence while respecting Aggregation-Only guardrails. + +--- + +## 1. Access and prerequisites + +- **Route:** `/console/findings` with optional panel parameters (e.g., `/console/findings?panel=explain&finding=`). +- **Scopes:** `findings.read` (list), `policy:runs` (view run metadata), `policy:simulate` (stage simulations), `downloads.read` (export bundles). +- **Prerequisites:** Policy Engine v2 (`policy_run` and `effective_finding_*` endpoints), Concelier/Excititor feeds for provenance, SBOM Service for component metadata. +- **Feature flags:** `findings.explain.enabled`, `findings.savedViews.enabled`, `findings.simulationDiff.enabled`. +- **Tenancy:** All queries include tenant context; cross-tenant comparisons require explicit admin scope and render split-pane view. + +--- + +## 2. Layout overview + +``` ++-------------------------------------------------------------------+ +| Header: Tenant badge - policy selector - global filters - actions | ++-------------------------------------------------------------------+ +| Top row cards: Affected assets - Critical count - KEV count | ++-------------------------------------------------------------------+ +| Findings grid (virtualised) | +| Columns: Status | Severity | Component | Policy | Source | Age | +| Row badges: KEV, Quieted, Override, Simulation only | ++-------------------------------------------------------------------+ +| Right drawer / full view tabs: Summary | Explain | Evidence | Run | ++-------------------------------------------------------------------+ +``` + +The policy selector includes Active, Staged, and Simulation snapshots. Switching snapshots triggers diff banners to highlight changes. + +--- + +## 3. Filters and saved views + +| Filter | Description | Notes | +|--------|-------------|-------| +| **Status** | `affected`, `at_risk`, `quieted`, `fixed`, `not_applicable`, `mitigated`. | Status definitions align with Policy Engine taxonomy. | +| **Severity** | Critical, High, Medium, Low, Informational, Untriaged. | Derived from policy scoring; UI displays numeric score tooltip. | +| **KEV** | Toggle to show only Known Exploited Vulnerabilities. | Pulls from Concelier enrichment. | +| **Policy** | Active, Staged, Simulation snapshots. | Simulation requires recent run; otherwise greyed out. | +| **Component** | PURL or substring search. | Autocomplete draws from current tenant findings. | +| **SBOM** | Filter by image digest or SBOM ID. | Includes quick links to SBOM Explorer. | +| **Tag** | Team or environment tags emitted by Policy Engine (`tags[]`). | Supports multi-select. | +| **Run window** | `Last 24h`, `Last 7d`, `Custom range`. | Applies to run timestamp. | +| **Explain hints** | Filter by explain artefact (rule ID, justification, VEX provider). | Uses server-side filter parameters. | + +Saved views persist filter combinations per tenant and policy. Users can mark views as shared; shared views appear in the left rail with owner and last updated timestamp. Keyboard shortcuts align with global presets (`Cmd+1-9 / Ctrl+1-9`). + +--- + +## 4. Findings grid + +| Column | Details | +|--------|---------| +| **Status** | Badge with tooltip describing resolution path (e.g., "Affected - blocked by policy rule R-105"). Quieted findings show a muted badge with expiry. | +| **Severity** | Numeric score and label. Hover reveals scoring formula and evidence sources. | +| **Component** | PURL plus human-friendly name. Includes SBOM badge linking to SBOM Explorer detail. | +| **Policy** | Policy name + revision digest; clicking opens policy diff in new tab. | +| **Source signals** | Icons for VEX, Advisory, Runtime overlays. Hover shows counts and last updated timestamps. | +| **Age** | Time since finding was last evaluated; colour-coded when exceeding SLA. | + +Row indicators: + +- **KEV** badge when Concelier marks the vulnerability as exploited. +- **Override** badge when policy override or exemption applied. +- **Simulation only** badge when viewing simulation snapshot; warns that finding is not yet active. +- **Determinism alert** icon if latest run reported a determinism mismatch (links to run detail). + +Bulk actions (multi-select): + +- `Open explains` (launch explain drawer for up to 10 findings). +- `Export CSV/JSON`. +- `Copy CLI` commands for batch explains (`stella findings explain --batch file`). +- `Create ticket` (integrates with integrations configured under Admin). + +--- + +## 5. Explain drawer + +Tabs inside the explain drawer: + +1. **Summary** - status, severity, policy decision, rule ID, last evaluated timestamp, SBOM link, run ID. +2. **Rule chain** - ordered list of policy rules triggered; each entry shows rule ID, name, action (block/warn/quiet), score contribution, and condition snippet. +3. **Evidence** - references to Concelier advisories, Excititor consensus, runtime signals, and overrides. Evidence entries link to their respective explorers. +4. **VEX impact** - table of VEX claims considered; displays provider, status, justification, acceptance (accepted/ignored), weight. +5. **History** - timeline of state transitions (affected -> quieted -> mitigated) with timestamps and operators (if override applied). +6. **Raw trace** - canonical JSON trace from Policy Engine (read-only). CLI parity snippet: + - `stella findings explain --policy --finding --format json`. + +Explain drawer includes copy-to-clipboard buttons for rule chain and evidence JSON to support audit workflows. When sealed mode is active, a banner highlights which evidence was sourced from cached data. + +--- + +## 6. Simulations and comparisons + +- Simulation toggle lets analysts compare Active vs Staged/Sandbox policies. +- Diff banner summarises added, removed, and changed findings. +- Side-by-side view shows baseline vs simulation verdicts with change badges (`added`, `removed`, `severity up`, `severity down`). +- CLI parity callout: `stella policy simulate --policy --sbom --format diff`. +- Simulation results persist for 7 days; stale simulations prompt re-run recommendation. + +--- + +## 7. Exports and automation + +- Immediate exports: CSV, JSON, Markdown summary for selected findings. +- Scheduled exports: asynchronous job to generate full tenant report (JSON + CSV) with manifest digests. +- Explain bundle export packages traces for a set of findings; includes manifest and hash for offline review. +- CLI parity: + - `stella findings ls --policy --format json --output findings.json` + - `stella findings export --policy --format csv --output findings.csv` + - `stella findings explain --batch batch.txt --output explains/` +- Automation: webhook copy button for `/downloads/hooks/subscribe?topic=findings.report.ready`. + +--- + +## 8. Real-time updates and observability + +- SSE channel `/console/findings/stream` pushes new findings, status changes, and quieted expirations; UI animates affected rows. +- Header cards show metrics: `findings_critical_total`, `findings_quieted_total`, `findings_kev_total`. +- Run ticker lists latest policy runs with status, duration, determinism hash. +- Error banners include correlation IDs linking to Policy Engine run logs. +- Metrics drill-down links to dashboards (OpenTelemetry, Prometheus). + +--- + +## 9. Offline and air-gap behaviour + +- Offline banner indicates snapshot ID and timestamp used for findings. +- Explain drawer notes when evidence references offline bundles; suggests importing updated advisories/VEX to refresh results. +- Exports default to local storage paths; UI provides manual transfer instructions. +- CLI examples switch to include `--sealed` or `--offline` flags. +- Tenant selector hides tenants without corresponding offline findings data to avoid partial views. + +--- + +## 10. Screenshot coordination + +- Placeholders: + - `![Findings grid placeholder](../assets/ui/findings/grid-placeholder.png)` + - `![Explain drawer placeholder](../assets/ui/findings/explain-placeholder.png)` +- Coordinate with Console Guild (Slack `#console-screenshots`, entry 2025-10-26) to capture updated light and dark theme shots before release. + +--- + +## 11. References + +- `/docs/ui/console-overview.md` - shell, filters, tenant model. +- `/docs/ui/navigation.md` - route list, deep-link schema. +- `/docs/ui/advisories-and-vex.md` - advisory and VEX context feeding findings. +- `/docs/ui/policies.md` (pending) - editor and policy lifecycle. +- `/docs/policy/overview.md` - Policy Engine outputs. +- `/docs/policy/runs.md` - run orchestration. +- `/docs/cli/policy.md` - CLI parity for findings commands. + +--- + +## 12. Compliance checklist + +- [ ] Filters and saved view behaviour documented with CLI alignment. +- [ ] Findings grid columns, badges, and bulk actions captured. +- [ ] Explain drawer walkthrough includes rule chain, evidence, and raw trace. +- [ ] Simulation diff behaviour and CLI callouts described. +- [ ] Exports (immediate and scheduled) plus webhook integration covered. +- [ ] Real-time updates, metrics, and error correlation documented. +- [ ] Offline behaviour and screenshot coordination noted. +- [ ] References validated. + +--- + +*Last updated: 2025-10-26 (Sprint 23).* + diff --git a/docs/ui/navigation.md b/docs/ui/navigation.md index 066558ab..6bd6b473 100644 --- a/docs/ui/navigation.md +++ b/docs/ui/navigation.md @@ -1,163 +1,163 @@ -# StellaOps Console - Navigation - -> **Audience:** Console UX writers, UI engineers, QA, and enablement teams. -> **Scope:** Primary route map, layout conventions, keyboard shortcuts, deep-link patterns, and tenant context switching for the StellaOps Console (Sprint 23). - -The navigation framework keeps Console workflows predictable across tenants and deployment modes. This guide explains how the global shell, feature routes, and context tokens cooperate so operators can jump between findings, SBOMs, advisories, policies, and runs without losing scope. - ---- - -## 1. Information Architecture - -### 1.1 Primary routes - -| Route pattern | Module owner | Purpose | Required scopes (minimum) | Core services | -|---------------|--------------|---------|---------------------------|---------------| -| `/console/dashboard` | Web gateway | Landing KPIs, feed age, queue depth, alerts | `ui.read` | Web, Scheduler WebService, Concelier WebService, Excititor WebService | -| `/console/findings` | Policy Engine | Aggregated findings, explain drawer, export | `findings.read` | Policy Engine, Concelier WebService, SBOM Service | -| `/console/sbom` | SBOM Service | Catalog view, component graph, overlays | `sbom.read` | SBOM Service, Policy Engine (overlays) | -| `/console/advisories` | Concelier | Advisory aggregation with provenance banners | `advisory.read` | Concelier WebService | -| `/console/vex` | Excititor | VEX aggregation, consensus, conflicts | `vex.read` | Excititor WebService | -| `/console/runs` | Scheduler | Run list, live progress, evidence downloads | `runs.read` | Scheduler WebService, Policy Engine, Scanner WebService | -| `/console/policies` | Policy Engine | Editor, simulations, approvals | `policy.read` (read) / `policy.write` (edit) | Policy Engine, Authority | -| `/console/downloads` | DevOps | Signed artifacts, Offline Kit parity checklist | `downloads.read` | DevOps manifest API, Offline Kit | -| `/console/admin` | Authority | Tenants, roles, tokens, integrations | `ui.admin` (plus scoped `authority:*`) | Authority | -| `/console/help` | Docs Guild | Guides, tours, release notes | `ui.read` | Docs static assets | - -### 1.2 Secondary navigation elements - -- **Left rail:** highlights the active top-level route, exposes quick metrics, and shows pinned saved views. Keyboard focus cycles through rail entries with `Tab`/`Shift+Tab`. -- **Breadcrumb bar:** renders `Home / Module / Detail` format. Detail crumbs include IDs and titles for shareable context (for example, `Findings / High Severity / CVE-2025-1234`). -- **Action shelf:** right-aligned controls for context actions (export, verify, retry). Buttons disable automatically if the current subject lacks the requisite scope. - ---- - -## 2. Command Palette and Search - -- **Trigger:** `Ctrl/Cmd + K`. Palette opens in place, keeps focus, and announces results via ARIA live region. -- **Capabilities:** jump to routes, saved views, tenants, recent entities (findings, SBOMs, advisories), and command actions (for example, "Start verification", "Open explain drawer"). -- **Result tokens:** palette entries carry metadata (`type`, `tenant`, `filters`). Selecting an item updates the URL and applies stored filters without a full reload. -- **Offline fallback:** in sealed/offline mode, palette restricts actions to cached routes and saved views; remote-only items show a grayed-out badge. - ---- - -## 3. Global Filters and Context Chips - -| Control | Shortcut | Persistence | Notes | -|---------|----------|-------------|-------| -| **Tenant picker** | `Ctrl/Cmd + T` | SessionStorage + URL `tenant` query | Issues fresh Authority token, invalidates caches, emits `ui.tenant.switch` log. | -| **Filter tray** | `Shift + F` | IndexedDB (per tenant) + URL query (`since`, `severity`, `tags`, `source`, `status`, `policyView`) | Applies instantly to compatible routes; incompatible filters show a reset suggestion. | -| **Component search** | `/` when filters closed | URL `component` query | Context-aware; scopes results to current tenant and module. | -| **Time window** | `Ctrl/Cmd + Shift + 1-4` | URL `since`/`until`, palette preset | Mapped to preset windows: 24 h, 7 d, 30 d, custom. | - -Context chips appear beneath page titles summarising active filters (for example, `Tenant: west-prod`, `Severity: Critical+High`, `Time: Last 7 days`). Removing a chip updates the tray and URL atomically. - ---- - -## 4. Keyboard Shortcut Matrix - -| Scope | Shortcut (Mac / Windows) | Action | Notes | -|-------|--------------------------|--------|-------| -| Global | `Cmd+K / Ctrl+K` | Open command palette | Accessible from any route except modal dialogs. | -| Global | `Cmd+T / Ctrl+T` | Open tenant switcher | Requires `ui.read`. Confirm selection with `Enter`; `Esc` cancels without switching. | -| Global | `Shift+F` | Toggle global filter tray | Focus lands on first filter control. | -| Global | `Cmd+1-9 / Ctrl+1-9` | Load saved view preset | Each preset bound per tenant; non-assigned keys show tooltip. | -| Global | `?` | Show keyboard reference overlay | Overlay lists context-specific shortcuts; closes with `Esc`. | -| Findings module | `Cmd+/ / Ctrl+/` | Focus explain search | Works when explain drawer is open. | -| SBOM module | `Cmd+G / Ctrl+G` | Toggle graph overlays | Persists per session. | -| Advisories & VEX | `Cmd+Opt+F / Ctrl+Alt+F` | Focus provider filter | Highlights provider chip strip. | -| Runs module | `Cmd+R / Ctrl+R` | Refresh SSE snapshot | Schedules soft refresh (no hard reload). | -| Policies module | `Cmd+S / Ctrl+S` | Save draft (if edit rights) | Mirrors Policy Editor behaviour. | - -Shortcut handling follows WCAG 2.2 best practices: all accelerators are remappable via Settings -> Accessibility -> Keyboard shortcuts, and the overlay documents platform differences. - ---- - -## 5. Deep-Link Patterns - -### 5.1 URL schema - -Console URLs adopt the format: - -``` -/console/[/:id][/:tab]?tenant=&since=&severity=&view=&panel=&component= -``` - -- **`tenant`** is mandatory and matches Authority slugs (e.g., `acme-prod`). -- **`since` / `until`** use ISO-8601 timestamps (UTC). Preset ranges set only `since`; UI computes `until` on load. -- **`severity`** accepts comma-separated policy buckets (e.g., `critical,high,kev`). -- **`view`** stores module-specific state (e.g., `sbomView=usage`, `findingsPreset=threat-hunting`). -- **`panel`** selects drawers or tabs (`panel=explain`, `panel=timeline`). - -### 5.2 Copyable links - -- Share links from the action shelf or context chips; both copy canonical URLs with all active filters. -- CLI parity: inline callouts provide `stella` commands derived from the URL parameters to ensure console/CLI equivalence. -- Offline note: links copied in sealed mode include the snapshot ID (`snapshot=`) so recipients know which offline data set to load. - -### 5.3 Examples - -- **`since` / `until`** use ISO-8601 timestamps (UTC). Preset ranges set only `since`; UI computes `until` on load. -- **`severity`** accepts comma-separated policy buckets (e.g., `critical,high,kev`). -- **`view`** stores module-specific state (e.g., `sbomView=usage`, `findingsPreset=threat-hunting`). -- **`panel`** selects drawers or tabs (`panel=explain`, `panel=timeline`). -- **`component`** encodes package selection using percent-encoded PURL syntax. -- **`snapshot`** appears when copying links offline to reference Offline Kit build hash. -@@ -| Use case | Example URL | Description | -|----------|-------------|-------------| -| Findings triage | `/console/findings?v=table&severity=critical,high&tenant=west-prod&since=2025-10-20T00:00:00Z` | Opens the findings table limited to critical/high for west-prod, last 7 days. | -| SBOM component focus | `/console/sbom/sha256:abcd?tenant=west-prod&component=pkg:npm/react@18.3.0&view=usage` | Deep-links to a specific image digest and highlights an NPM package in Usage view. | -| Advisory explain | `/console/advisories?tenant=west-prod&source=nvd&panel=detail&documentId=CVE-2025-1234` | Opens advisory list filtered to NVD and expands CVE detail drawer. | -| Run monitor | `/console/runs/42?tenant=west-prod&panel=progress` | Focuses run ID 42 with progress drawer active (SSE stream attached). | - ---- - -## 6. Tenant Switching Lifecycle - -1. **Initiate:** User triggers `Ctrl/Cmd + T` or clicks the tenant badge. Switcher modal lists authorised tenants and recent selections. -2. **Preview:** Selecting a tenant shows summary (environment, last snapshot, role coverage). The modal flags tenants missing required scopes for the current route. -3. **Confirm:** On confirmation, the UI requests a new DPoP-bound access token from Authority (`aud=console`, `tenant=`). -4. **Invalidate caches:** Stores keyed by tenant purge automatically; modules emit `tenantChanged` events so in-flight SSE streams reconnect with new headers. -5. **Restore state:** Global filters reapply where valid. Incompatible filters (for example, a saved view unavailable in the new tenant) prompt users to pick a fallback. -6. **Audit and telemetry:** `ui.tenant.switch` log writes subject, from/to tenant, correlation ID. Metric `ui_tenant_switch_total` increments for observability dashboards. -7. **Offline behaviour:** If the target tenant is absent from the offline snapshot, switcher displays guidance to import updated Offline Kit data before proceeding. - ---- - -## 7. Breadcrumbs, Tabs, and Focus Management - -- Breadcrumb titles update synchronously with route data loads. When fragments change (for example, selecting a finding), the breadcrumb text updates without pushing a new history entry to keep back/forward predictable. -- Detail views rely on accessible tabs (`role="tablist"`) with keyboard support (`ArrowLeft/Right`). Tab selection updates the URL `tab` parameter for deep linking. -- Focus management: - - Route changes send focus to the primary heading (`h1`) using the live region announcer. - - Opening drawers or modals traps focus until closed; ESC returns focus to the triggering element. - - Keyboard-only navigation is validated via automated Playwright accessibility checks as part of `DEVOPS-CONSOLE-23-001`. - ---- - -## 8. References - -- `/docs/ui/console-overview.md` - structural overview, tenant model, global filters. -- `/docs/ui/sbom-explorer.md` - SBOM-specific navigation and graphs (pending). -- `/docs/ui/advisories-and-vex.md` - aggregation UX details (pending). -- `/docs/ui/findings.md` - findings filters and explain drawer (pending). -- `/docs/security/console-security.md` - Authority, scopes, CSP. -- `/docs/cli-vs-ui-parity.md` - CLI equivalence matrix. -- `/docs/accessibility.md` - keyboard remapping, WCAG validation checklists. - ---- - -## 9. Compliance Checklist - -- [ ] Route table matches Console build (paths, scopes, owners verified with Console Guild). -- [ ] Keyboard shortcut matrix reflects implemented accelerators and accessibility overlay. -- [ ] Deep-link examples tested for copy/share parity and CLI alignment. -- [ ] Tenant switching flow documents cache invalidation and audit logging. -- [ ] Filter tray, command palette, and presets cross-referenced with accessibility guidance. -- [ ] Offline/air-gap notes included for palette, tenant switcher, and deep-link metadata. -- [ ] Links to dependent docs (`/docs/ui/*`, `/docs/security/*`) validated. - ---- - -*Last updated: 2025-10-26 (Sprint 23).* +# StellaOps Console - Navigation + +> **Audience:** Console UX writers, UI engineers, QA, and enablement teams. +> **Scope:** Primary route map, layout conventions, keyboard shortcuts, deep-link patterns, and tenant context switching for the StellaOps Console (Sprint 23). + +The navigation framework keeps Console workflows predictable across tenants and deployment modes. This guide explains how the global shell, feature routes, and context tokens cooperate so operators can jump between findings, SBOMs, advisories, policies, and runs without losing scope. + +--- + +## 1. Information Architecture + +### 1.1 Primary routes + +| Route pattern | Module owner | Purpose | Required scopes (minimum) | Core services | +|---------------|--------------|---------|---------------------------|---------------| +| `/console/dashboard` | Web gateway | Landing KPIs, feed age, queue depth, alerts | `ui.read` | Web, Scheduler WebService, Concelier WebService, Excititor WebService | +| `/console/findings` | Policy Engine | Aggregated findings, explain drawer, export | `findings.read` | Policy Engine, Concelier WebService, SBOM Service | +| `/console/sbom` | SBOM Service | Catalog view, component graph, overlays | `sbom.read` | SBOM Service, Policy Engine (overlays) | +| `/console/advisories` | Concelier | Advisory aggregation with provenance banners | `advisory.read` | Concelier WebService | +| `/console/vex` | Excititor | VEX aggregation, consensus, conflicts | `vex.read` | Excititor WebService | +| `/console/runs` | Scheduler | Run list, live progress, evidence downloads | `runs.read` | Scheduler WebService, Policy Engine, Scanner WebService | +| `/console/policies` | Policy Engine | Editor, simulations, approvals | `policy.read` (read) / `policy.write` (edit) | Policy Engine, Authority | +| `/console/downloads` | DevOps | Signed artifacts, Offline Kit parity checklist | `downloads.read` | DevOps manifest API, Offline Kit | +| `/console/admin` | Authority | Tenants, roles, tokens, integrations | `ui.admin` (plus scoped `authority:*`) | Authority | +| `/console/help` | Docs Guild | Guides, tours, release notes | `ui.read` | Docs static assets | + +### 1.2 Secondary navigation elements + +- **Left rail:** highlights the active top-level route, exposes quick metrics, and shows pinned saved views. Keyboard focus cycles through rail entries with `Tab`/`Shift+Tab`. +- **Breadcrumb bar:** renders `Home / Module / Detail` format. Detail crumbs include IDs and titles for shareable context (for example, `Findings / High Severity / CVE-2025-1234`). +- **Action shelf:** right-aligned controls for context actions (export, verify, retry). Buttons disable automatically if the current subject lacks the requisite scope. + +--- + +## 2. Command Palette and Search + +- **Trigger:** `Ctrl/Cmd + K`. Palette opens in place, keeps focus, and announces results via ARIA live region. +- **Capabilities:** jump to routes, saved views, tenants, recent entities (findings, SBOMs, advisories), and command actions (for example, "Start verification", "Open explain drawer"). +- **Result tokens:** palette entries carry metadata (`type`, `tenant`, `filters`). Selecting an item updates the URL and applies stored filters without a full reload. +- **Offline fallback:** in sealed/offline mode, palette restricts actions to cached routes and saved views; remote-only items show a grayed-out badge. + +--- + +## 3. Global Filters and Context Chips + +| Control | Shortcut | Persistence | Notes | +|---------|----------|-------------|-------| +| **Tenant picker** | `Ctrl/Cmd + T` | SessionStorage + URL `tenant` query | Issues fresh Authority token, invalidates caches, emits `ui.tenant.switch` log. | +| **Filter tray** | `Shift + F` | IndexedDB (per tenant) + URL query (`since`, `severity`, `tags`, `source`, `status`, `policyView`) | Applies instantly to compatible routes; incompatible filters show a reset suggestion. | +| **Component search** | `/` when filters closed | URL `component` query | Context-aware; scopes results to current tenant and module. | +| **Time window** | `Ctrl/Cmd + Shift + 1-4` | URL `since`/`until`, palette preset | Mapped to preset windows: 24 h, 7 d, 30 d, custom. | + +Context chips appear beneath page titles summarising active filters (for example, `Tenant: west-prod`, `Severity: Critical+High`, `Time: Last 7 days`). Removing a chip updates the tray and URL atomically. + +--- + +## 4. Keyboard Shortcut Matrix + +| Scope | Shortcut (Mac / Windows) | Action | Notes | +|-------|--------------------------|--------|-------| +| Global | `Cmd+K / Ctrl+K` | Open command palette | Accessible from any route except modal dialogs. | +| Global | `Cmd+T / Ctrl+T` | Open tenant switcher | Requires `ui.read`. Confirm selection with `Enter`; `Esc` cancels without switching. | +| Global | `Shift+F` | Toggle global filter tray | Focus lands on first filter control. | +| Global | `Cmd+1-9 / Ctrl+1-9` | Load saved view preset | Each preset bound per tenant; non-assigned keys show tooltip. | +| Global | `?` | Show keyboard reference overlay | Overlay lists context-specific shortcuts; closes with `Esc`. | +| Findings module | `Cmd+/ / Ctrl+/` | Focus explain search | Works when explain drawer is open. | +| SBOM module | `Cmd+G / Ctrl+G` | Toggle graph overlays | Persists per session. | +| Advisories & VEX | `Cmd+Opt+F / Ctrl+Alt+F` | Focus provider filter | Highlights provider chip strip. | +| Runs module | `Cmd+R / Ctrl+R` | Refresh SSE snapshot | Schedules soft refresh (no hard reload). | +| Policies module | `Cmd+S / Ctrl+S` | Save draft (if edit rights) | Mirrors Policy Editor behaviour. | + +Shortcut handling follows WCAG 2.2 best practices: all accelerators are remappable via Settings -> Accessibility -> Keyboard shortcuts, and the overlay documents platform differences. + +--- + +## 5. Deep-Link Patterns + +### 5.1 URL schema + +Console URLs adopt the format: + +``` +/console/[/:id][/:tab]?tenant=&since=&severity=&view=&panel=&component= +``` + +- **`tenant`** is mandatory and matches Authority slugs (e.g., `acme-prod`). +- **`since` / `until`** use ISO-8601 timestamps (UTC). Preset ranges set only `since`; UI computes `until` on load. +- **`severity`** accepts comma-separated policy buckets (e.g., `critical,high,kev`). +- **`view`** stores module-specific state (e.g., `sbomView=usage`, `findingsPreset=threat-hunting`). +- **`panel`** selects drawers or tabs (`panel=explain`, `panel=timeline`). + +### 5.2 Copyable links + +- Share links from the action shelf or context chips; both copy canonical URLs with all active filters. +- CLI parity: inline callouts provide `stella` commands derived from the URL parameters to ensure console/CLI equivalence. +- Offline note: links copied in sealed mode include the snapshot ID (`snapshot=`) so recipients know which offline data set to load. + +### 5.3 Examples + +- **`since` / `until`** use ISO-8601 timestamps (UTC). Preset ranges set only `since`; UI computes `until` on load. +- **`severity`** accepts comma-separated policy buckets (e.g., `critical,high,kev`). +- **`view`** stores module-specific state (e.g., `sbomView=usage`, `findingsPreset=threat-hunting`). +- **`panel`** selects drawers or tabs (`panel=explain`, `panel=timeline`). +- **`component`** encodes package selection using percent-encoded PURL syntax. +- **`snapshot`** appears when copying links offline to reference Offline Kit build hash. +@@ +| Use case | Example URL | Description | +|----------|-------------|-------------| +| Findings triage | `/console/findings?v=table&severity=critical,high&tenant=west-prod&since=2025-10-20T00:00:00Z` | Opens the findings table limited to critical/high for west-prod, last 7 days. | +| SBOM component focus | `/console/sbom/sha256:abcd?tenant=west-prod&component=pkg:npm/react@18.3.0&view=usage` | Deep-links to a specific image digest and highlights an NPM package in Usage view. | +| Advisory explain | `/console/advisories?tenant=west-prod&source=nvd&panel=detail&documentId=CVE-2025-1234` | Opens advisory list filtered to NVD and expands CVE detail drawer. | +| Run monitor | `/console/runs/42?tenant=west-prod&panel=progress` | Focuses run ID 42 with progress drawer active (SSE stream attached). | + +--- + +## 6. Tenant Switching Lifecycle + +1. **Initiate:** User triggers `Ctrl/Cmd + T` or clicks the tenant badge. Switcher modal lists authorised tenants and recent selections. +2. **Preview:** Selecting a tenant shows summary (environment, last snapshot, role coverage). The modal flags tenants missing required scopes for the current route. +3. **Confirm:** On confirmation, the UI requests a new DPoP-bound access token from Authority (`aud=console`, `tenant=`). +4. **Invalidate caches:** Stores keyed by tenant purge automatically; modules emit `tenantChanged` events so in-flight SSE streams reconnect with new headers. +5. **Restore state:** Global filters reapply where valid. Incompatible filters (for example, a saved view unavailable in the new tenant) prompt users to pick a fallback. +6. **Audit and telemetry:** `ui.tenant.switch` log writes subject, from/to tenant, correlation ID. Metric `ui_tenant_switch_total` increments for observability dashboards. +7. **Offline behaviour:** If the target tenant is absent from the offline snapshot, switcher displays guidance to import updated Offline Kit data before proceeding. + +--- + +## 7. Breadcrumbs, Tabs, and Focus Management + +- Breadcrumb titles update synchronously with route data loads. When fragments change (for example, selecting a finding), the breadcrumb text updates without pushing a new history entry to keep back/forward predictable. +- Detail views rely on accessible tabs (`role="tablist"`) with keyboard support (`ArrowLeft/Right`). Tab selection updates the URL `tab` parameter for deep linking. +- Focus management: + - Route changes send focus to the primary heading (`h1`) using the live region announcer. + - Opening drawers or modals traps focus until closed; ESC returns focus to the triggering element. + - Keyboard-only navigation is validated via automated Playwright accessibility checks as part of `DEVOPS-CONSOLE-23-001`. + +--- + +## 8. References + +- `/docs/ui/console-overview.md` - structural overview, tenant model, global filters. +- `/docs/ui/sbom-explorer.md` - SBOM-specific navigation and graphs (pending). +- `/docs/ui/advisories-and-vex.md` - aggregation UX details (pending). +- `/docs/ui/findings.md` - findings filters and explain drawer (pending). +- `/docs/security/console-security.md` - Authority, scopes, CSP. +- `/docs/cli-vs-ui-parity.md` - CLI equivalence matrix. +- `/docs/accessibility.md` - keyboard remapping, WCAG validation checklists. + +--- + +## 9. Compliance Checklist + +- [ ] Route table matches Console build (paths, scopes, owners verified with Console Guild). +- [ ] Keyboard shortcut matrix reflects implemented accelerators and accessibility overlay. +- [ ] Deep-link examples tested for copy/share parity and CLI alignment. +- [ ] Tenant switching flow documents cache invalidation and audit logging. +- [ ] Filter tray, command palette, and presets cross-referenced with accessibility guidance. +- [ ] Offline/air-gap notes included for palette, tenant switcher, and deep-link metadata. +- [ ] Links to dependent docs (`/docs/ui/*`, `/docs/security/*`) validated. + +--- + +*Last updated: 2025-10-26 (Sprint 23).* diff --git a/docs/ui/policies.md b/docs/ui/policies.md index 49666c6e..3378a563 100644 --- a/docs/ui/policies.md +++ b/docs/ui/policies.md @@ -1,192 +1,192 @@ -# StellaOps Console - Policies Workspace - -> **Audience:** Policy Guild, Console UX, product ops, review leads. -> **Scope:** Policy workspace navigation, editor surfaces, simulation, approvals, RBAC, observability, offline behaviour, and CLI parity for Sprint 23. - -The Policies workspace centralises authoring, simulation, review, and promotion for `stella-dsl@1` packs. It builds on the Policy Editor (`docs/ui/policy-editor.md`) and adds list views, governance workflows, and integrations with runs and findings. - ---- - -## 1. Access and prerequisites - -- **Routes:** - - `/console/policies` (list) - - `/console/policies/:policyId` (details) - - `/console/policies/:policyId/:revision` (editor, approvals, runs) -- **Scopes / roles:** - - `policy:read` (list and details) - - `policy:author` (edit drafts, run lint/compile) - - `policy:review`, `policy:approve` (workflow actions) - - `policy:operate` (promotions, run orchestration) - - `policy:simulate` (run simulations) - - `policy:audit` (download audit bundles) - - `effective:write` (promotion visibility only; actual write remains server-side) -- **Feature flags:** `policy.studio.enabled`, `policy.simulation.diff`, `policy.runCharts.enabled`, `policy.offline.bundleUpload`. -- **Dependencies:** Policy Engine v2 APIs (`/policies`, `/policy/runs`, `/policy/simulations`), Policy Studio Monaco assets, Authority fresh-auth flows for critical operations. - ---- - -## 2. List and detail views - -### 2.1 Policy list - -| Column | Description | -|--------|-------------| -| **Policy** | Human-readable name plus policy ID (e.g., `P-7`). | -| **State** | `Active`, `Draft`, `Staged`, `Simulation`, `Archived`. Badge colours align with Policy Engine status. | -| **Revision** | Latest revision digest (short SHA). | -| **Owner** | Primary maintainer or owning team tag. | -| **Last change** | Timestamp and actor of last update (edit, submit, approve). | -| **Pending approvals** | Count of outstanding approval requests (with tooltip listing reviewers). | - -Row actions: `Open`, `Duplicate`, `Export pack`, `Run simulation`, `Compare revisions`. - -Filters: owning team, state, tag, pending approvals, contains staged changes, last change window, simulation warnings (determinism, failed run). - -### 2.2 Policy detail header - -- Summary cards: current state, digest, active revision, staged revision (if any), simulation status, last production run (timestamp, duration, determinism hash). -- Action bar: `Edit draft`, `Run simulation`, `Submit for review`, `Promote`, `Export pack`, `View findings`. - ---- - -## 3. Editor shell - -The editor view reuses the structure documented in `/docs/ui/policy-editor.md` and adds: - -- **Context banner** showing tenant, policy ID, revision digest, and simulation badge if editing sandbox copy. -- **Lint and compile status** displayed inline with time since last run. -- **Checklist sidebar** summarising required steps (lint pass, simulation run, deterministic CI, security review). Each item links to evidence (e.g., latest simulation diff). -- **Monaco integration** with policy-specific snippets, schema hover, code actions (`Insert allowlist`, `Add justification`). -- **Draft autosave** every 30 seconds with conflict detection (merges disabled; last write wins with warning). - ---- - -## 4. Simulation workflows - -- Simulation modal accepts SBOM filter (golden set, specific SBOM IDs, tenant-wide) and options for VEX weighting overrides. -- Simulations run asynchronously; progress shown in run ticker with status updates. -- Diff view summarises totals: affected findings added/removed, severity up/down counts, quieted changes. -- Side-by-side diff (Active vs Simulation) accessible directly from policy detail. -- Export options: JSON diff, Markdown summary, CLI snippet `stella policy simulate --policy --sbom `. -- Simulation results cached per draft revision. Cache invalidates when draft changes or SBOM snapshot updates. -- Simulation compliance card requires at least one up-to-date simulation before submission. - ---- - -## 5. Review and approval - -- **Review requests:** Authors tag reviewers; review sidebar lists pending reviewers, due dates, and escalation contact. -- **Comments:** Threaded comments support markdown, mentions, and attachments (redacted before persistence). Comment resolution required before approval. -- **Approval checklist:** - - Lint/compile success - - Simulation fresh (within configured SLA) - - Determinism verification passed - - Security review (if flagged) - - Offline bundle prepared (optional) -- **Fresh-auth:** Approve/promote buttons require fresh authentication; modal prompts for credentials and enforces short-lived token (<5 minutes). -- **Approval audit:** Approval events recorded with correlation ID, digests, reviewer note, effective date, and optional ticket link. - ---- - -## 6. Promotion and rollout - -- Promotion dialog summarises staged changes, target tenants, release windows, and run plan (full vs incremental). -- Operators can schedule promotion or apply immediately. -- Promotion triggers Policy Engine to materialise new revision; console reflects status and shows run progress. -- CLI parity: `stella policy promote --policy --revision --run-mode full`. -- Rollback guidance accessible from action bar (`Open rollback instructions`) linking to CLI command and documentation. - ---- - -## 7. Runs and observability - -- Runs tab displays table of recent runs with columns: run ID, type (`full`, `incremental`, `simulation`), duration, determinism hash, findings delta counts, triggered by. -- Charts: findings trend, quieted findings trend, rule hit heatmap (top rules vs recent runs). -- Clicking a run opens run detail drawer showing inputs (policy digest, SBOM batch hash, advisory snapshot hash), output summary, and explain bundle download. -- Error runs display red badge; detail drawer includes correlation ID and link to Policy Engine logs. -- SSE updates stream run status changes to keep UI real-time. - ---- - -## 8. RBAC and governance - -| Role | Scopes | Capabilities | -|------|--------|--------------| -| **Author** | `policy:read`, `policy:author`, `policy:simulate` | Create drafts, run lint/simulations, comment. | -| **Reviewer** | `policy:read`, `policy:review`, `policy:simulate` | Leave review comments, request changes. | -| **Approver** | `policy:read`, `policy:approve`, `policy:operate`, `policy:simulate` | Approve/promote, trigger runs, view run history. | -| **Operator** | `policy:read`, `policy:operate`, `policy:simulate`, `effective:write` | Schedule promotions, monitor runs (no editing). | -| **Auditor** | `policy:read`, `policy:audit`, `policy:simulate` | View immutable history, export audit bundles. | -| **Admin** | Above plus Authority admin scopes | Manage roles, configure escalation chains. | - -UI disables controls not allowed by current scope and surfaces tooltip with required scope names. Audit log captures denied attempts (`policy.ui.action_denied`). - ---- - -## 9. Exports and offline bundles - -- `Export pack` button downloads policy pack (zip) with metadata, digest manifest, and README. -- Offline bundle uploader allows importing reviewed packs; UI verifies signatures and digests before applying. -- Explain bundle export collects latest run explain traces for audit. -- CLI parity: - - `stella policy export --policy --revision ` - - `stella policy bundle import --file ` - - `stella policy bundle export --policy --revision ` -- Offline mode displays banner and disables direct promotion; provides script instructions for offline runner. - ---- - -## 10. Observability and alerts - -- Metrics cards show `policy_run_seconds`, `policy_rules_fired_total`, `policy_determinism_failures_total`. -- Alert banners surfaced for determinism failures, simulation stale warnings, approval SLA breaches. -- Links to dashboards (Grafana) pre-filtered with policy ID. -- Telemetry panel lists last emitted events (policy.promoted, policy.simulation.completed). - ---- - -## 11. Offline and air-gap considerations - -- In sealed mode, editor warns about cached enrichment data; simulation run button adds tooltip explaining degraded evidence. -- Promotions queue and require manual CLI execution on authorised host; UI provides downloadable job manifest. -- Run charts switch to snapshot data; SSE streams disabled, replaced by manual refresh button. -- Export/download buttons label file paths for removable media transfer. - ---- - -## 12. Screenshot coordination - -- Placeholders: - - `![Policy list placeholder](../assets/ui/policies/list-placeholder.png)` - - `![Policy approval placeholder](../assets/ui/policies/approval-placeholder.png)` - - `![Simulation diff placeholder](../assets/ui/policies/simulation-placeholder.png)` -- Coordinate with Console Guild via `#console-screenshots` (entry 2025-10-26) to replace placeholders once UI captures are ready (light and dark themes). - ---- - -## 13. References - -- `/docs/ui/policy-editor.md` - detailed editor mechanics. -- `/docs/ui/findings.md` - downstream findings view and explain drawer. -- `/docs/policy/overview.md` and `/docs/policy/runs.md` - Policy Engine contracts. -- `/docs/security/authority-scopes.md` - scope definitions. -- `/docs/cli/policy.md` - CLI commands for policy management. -- `/docs/ui/console-overview.md` - navigation shell and filters. - ---- - -## 14. Compliance checklist - -- [ ] Policy list and detail workflow documented (columns, filters, actions). -- [ ] Editor shell extends Policy Studio guidance with checklists and lint/simulation integration. -- [ ] Simulation flow, diff presentation, and CLI parity captured. -- [ ] Review, approval, and promotion workflows detailed with scope gating. -- [ ] Runs dashboard, metrics, and SSE behaviour described. -- [ ] Exports and offline bundle handling included. -- [ ] Offline/air-gap behaviour and screenshot coordination recorded. -- [ ] References validated. - ---- - -*Last updated: 2025-10-26 (Sprint 23).* +# StellaOps Console - Policies Workspace + +> **Audience:** Policy Guild, Console UX, product ops, review leads. +> **Scope:** Policy workspace navigation, editor surfaces, simulation, approvals, RBAC, observability, offline behaviour, and CLI parity for Sprint 23. + +The Policies workspace centralises authoring, simulation, review, and promotion for `stella-dsl@1` packs. It builds on the Policy Editor (`docs/ui/policy-editor.md`) and adds list views, governance workflows, and integrations with runs and findings. + +--- + +## 1. Access and prerequisites + +- **Routes:** + - `/console/policies` (list) + - `/console/policies/:policyId` (details) + - `/console/policies/:policyId/:revision` (editor, approvals, runs) +- **Scopes / roles:** + - `policy:read` (list and details) + - `policy:author` (edit drafts, run lint/compile) + - `policy:review`, `policy:approve` (workflow actions) + - `policy:operate` (promotions, run orchestration) + - `policy:simulate` (run simulations) + - `policy:audit` (download audit bundles) + - `effective:write` (promotion visibility only; actual write remains server-side) +- **Feature flags:** `policy.studio.enabled`, `policy.simulation.diff`, `policy.runCharts.enabled`, `policy.offline.bundleUpload`. +- **Dependencies:** Policy Engine v2 APIs (`/policies`, `/policy/runs`, `/policy/simulations`), Policy Studio Monaco assets, Authority fresh-auth flows for critical operations. + +--- + +## 2. List and detail views + +### 2.1 Policy list + +| Column | Description | +|--------|-------------| +| **Policy** | Human-readable name plus policy ID (e.g., `P-7`). | +| **State** | `Active`, `Draft`, `Staged`, `Simulation`, `Archived`. Badge colours align with Policy Engine status. | +| **Revision** | Latest revision digest (short SHA). | +| **Owner** | Primary maintainer or owning team tag. | +| **Last change** | Timestamp and actor of last update (edit, submit, approve). | +| **Pending approvals** | Count of outstanding approval requests (with tooltip listing reviewers). | + +Row actions: `Open`, `Duplicate`, `Export pack`, `Run simulation`, `Compare revisions`. + +Filters: owning team, state, tag, pending approvals, contains staged changes, last change window, simulation warnings (determinism, failed run). + +### 2.2 Policy detail header + +- Summary cards: current state, digest, active revision, staged revision (if any), simulation status, last production run (timestamp, duration, determinism hash). +- Action bar: `Edit draft`, `Run simulation`, `Submit for review`, `Promote`, `Export pack`, `View findings`. + +--- + +## 3. Editor shell + +The editor view reuses the structure documented in `/docs/ui/policy-editor.md` and adds: + +- **Context banner** showing tenant, policy ID, revision digest, and simulation badge if editing sandbox copy. +- **Lint and compile status** displayed inline with time since last run. +- **Checklist sidebar** summarising required steps (lint pass, simulation run, deterministic CI, security review). Each item links to evidence (e.g., latest simulation diff). +- **Monaco integration** with policy-specific snippets, schema hover, code actions (`Insert allowlist`, `Add justification`). +- **Draft autosave** every 30 seconds with conflict detection (merges disabled; last write wins with warning). + +--- + +## 4. Simulation workflows + +- Simulation modal accepts SBOM filter (golden set, specific SBOM IDs, tenant-wide) and options for VEX weighting overrides. +- Simulations run asynchronously; progress shown in run ticker with status updates. +- Diff view summarises totals: affected findings added/removed, severity up/down counts, quieted changes. +- Side-by-side diff (Active vs Simulation) accessible directly from policy detail. +- Export options: JSON diff, Markdown summary, CLI snippet `stella policy simulate --policy --sbom `. +- Simulation results cached per draft revision. Cache invalidates when draft changes or SBOM snapshot updates. +- Simulation compliance card requires at least one up-to-date simulation before submission. + +--- + +## 5. Review and approval + +- **Review requests:** Authors tag reviewers; review sidebar lists pending reviewers, due dates, and escalation contact. +- **Comments:** Threaded comments support markdown, mentions, and attachments (redacted before persistence). Comment resolution required before approval. +- **Approval checklist:** + - Lint/compile success + - Simulation fresh (within configured SLA) + - Determinism verification passed + - Security review (if flagged) + - Offline bundle prepared (optional) +- **Fresh-auth:** Approve/promote buttons require fresh authentication; modal prompts for credentials and enforces short-lived token (<5 minutes). +- **Approval audit:** Approval events recorded with correlation ID, digests, reviewer note, effective date, and optional ticket link. + +--- + +## 6. Promotion and rollout + +- Promotion dialog summarises staged changes, target tenants, release windows, and run plan (full vs incremental). +- Operators can schedule promotion or apply immediately. +- Promotion triggers Policy Engine to materialise new revision; console reflects status and shows run progress. +- CLI parity: `stella policy promote --policy --revision --run-mode full`. +- Rollback guidance accessible from action bar (`Open rollback instructions`) linking to CLI command and documentation. + +--- + +## 7. Runs and observability + +- Runs tab displays table of recent runs with columns: run ID, type (`full`, `incremental`, `simulation`), duration, determinism hash, findings delta counts, triggered by. +- Charts: findings trend, quieted findings trend, rule hit heatmap (top rules vs recent runs). +- Clicking a run opens run detail drawer showing inputs (policy digest, SBOM batch hash, advisory snapshot hash), output summary, and explain bundle download. +- Error runs display red badge; detail drawer includes correlation ID and link to Policy Engine logs. +- SSE updates stream run status changes to keep UI real-time. + +--- + +## 8. RBAC and governance + +| Role | Scopes | Capabilities | +|------|--------|--------------| +| **Author** | `policy:read`, `policy:author`, `policy:simulate` | Create drafts, run lint/simulations, comment. | +| **Reviewer** | `policy:read`, `policy:review`, `policy:simulate` | Leave review comments, request changes. | +| **Approver** | `policy:read`, `policy:approve`, `policy:operate`, `policy:simulate` | Approve/promote, trigger runs, view run history. | +| **Operator** | `policy:read`, `policy:operate`, `policy:simulate`, `effective:write` | Schedule promotions, monitor runs (no editing). | +| **Auditor** | `policy:read`, `policy:audit`, `policy:simulate` | View immutable history, export audit bundles. | +| **Admin** | Above plus Authority admin scopes | Manage roles, configure escalation chains. | + +UI disables controls not allowed by current scope and surfaces tooltip with required scope names. Audit log captures denied attempts (`policy.ui.action_denied`). + +--- + +## 9. Exports and offline bundles + +- `Export pack` button downloads policy pack (zip) with metadata, digest manifest, and README. +- Offline bundle uploader allows importing reviewed packs; UI verifies signatures and digests before applying. +- Explain bundle export collects latest run explain traces for audit. +- CLI parity: + - `stella policy export --policy --revision ` + - `stella policy bundle import --file ` + - `stella policy bundle export --policy --revision ` +- Offline mode displays banner and disables direct promotion; provides script instructions for offline runner. + +--- + +## 10. Observability and alerts + +- Metrics cards show `policy_run_seconds`, `policy_rules_fired_total`, `policy_determinism_failures_total`. +- Alert banners surfaced for determinism failures, simulation stale warnings, approval SLA breaches. +- Links to dashboards (Grafana) pre-filtered with policy ID. +- Telemetry panel lists last emitted events (policy.promoted, policy.simulation.completed). + +--- + +## 11. Offline and air-gap considerations + +- In sealed mode, editor warns about cached enrichment data; simulation run button adds tooltip explaining degraded evidence. +- Promotions queue and require manual CLI execution on authorised host; UI provides downloadable job manifest. +- Run charts switch to snapshot data; SSE streams disabled, replaced by manual refresh button. +- Export/download buttons label file paths for removable media transfer. + +--- + +## 12. Screenshot coordination + +- Placeholders: + - `![Policy list placeholder](../assets/ui/policies/list-placeholder.png)` + - `![Policy approval placeholder](../assets/ui/policies/approval-placeholder.png)` + - `![Simulation diff placeholder](../assets/ui/policies/simulation-placeholder.png)` +- Coordinate with Console Guild via `#console-screenshots` (entry 2025-10-26) to replace placeholders once UI captures are ready (light and dark themes). + +--- + +## 13. References + +- `/docs/ui/policy-editor.md` - detailed editor mechanics. +- `/docs/ui/findings.md` - downstream findings view and explain drawer. +- `/docs/policy/overview.md` and `/docs/policy/runs.md` - Policy Engine contracts. +- `/docs/security/authority-scopes.md` - scope definitions. +- `/docs/cli/policy.md` - CLI commands for policy management. +- `/docs/ui/console-overview.md` - navigation shell and filters. + +--- + +## 14. Compliance checklist + +- [ ] Policy list and detail workflow documented (columns, filters, actions). +- [ ] Editor shell extends Policy Studio guidance with checklists and lint/simulation integration. +- [ ] Simulation flow, diff presentation, and CLI parity captured. +- [ ] Review, approval, and promotion workflows detailed with scope gating. +- [ ] Runs dashboard, metrics, and SSE behaviour described. +- [ ] Exports and offline bundle handling included. +- [ ] Offline/air-gap behaviour and screenshot coordination recorded. +- [ ] References validated. + +--- + +*Last updated: 2025-10-26 (Sprint 23).* diff --git a/docs/ui/policy-editor.md b/docs/ui/policy-editor.md index 7e898831..f8f8b77b 100644 --- a/docs/ui/policy-editor.md +++ b/docs/ui/policy-editor.md @@ -1,179 +1,179 @@ -# Policy Editor Workspace - -> **Audience:** Product/UX, UI engineers, policy authors/reviewers using the Console. -> **Scope:** Layout, features, RBAC, a11y, simulation workflow, approvals, run dashboards, and offline considerations for the Policy Engine v2 editor (“Policy Studio”). - -The Policy Editor is the primary Console workspace for composing, simulating, and approving `stella-dsl@1` policies. It combines Monaco-based editing, diff visualisations, and governance tools so authors and reviewers can collaborate without leaving the browser. - ---- - -## 1 · Access & Prerequisites - -- **Routes:** `/console/policy` (list) → `/console/policy/:policyId/:version?`. -- **Scopes / roles:** - - `policy:author` (role `policy-author`) to edit drafts, run lint/compile, and execute quick simulations. - - `policy:review` (role `policy-reviewer`) to review drafts, leave comments, and request changes. - - `policy:approve` (role `policy-approver`) to approve or reject submissions. - - `policy:operate` (role `policy-operator`) to trigger batch simulations, promotions, and canary runs. - - `policy:audit` (role `policy-auditor`) to access immutable history and audit exports. - - `policy:simulate` to run simulations from Console; `findings:read` to open explain drawers. -- **Feature flags:** `policyStudio.enabled` (defaults true once Policy Engine v2 API available). -- **Browser support:** Evergreen Chrome, Edge, Firefox, Safari (last two versions). Uses WASM OPA sandbox; ensure COOP/COEP enabled per [UI architecture](../ARCHITECTURE_UI.md). - ---- - -## 2 · Workspace Layout - -``` -┌────────────────────────────────────────────────────────────────────────────┐ -│ Header: Policy selector • tenant switch • last activation banner │ -├────────────────────────────────────────────────────────────────────────────┤ -│ Sidebar (left) │ Main content (right) │ -│ - Revision list │ ┌───────────── Editor tabs ───────────────┐ │ -│ - Checklist status │ │ DSL │ Simulation │ Approvals │ ... │ │ -│ - Pending reviews │ └─────────────────────────────────────────┘ │ -│ - Run backlog │ │ -│ │ Editor pane / Simulation diff / Run viewer │ -└────────────────────────────────────────────────────────────────────────────┘ -``` - -- **Sidebar:** Revision timeline (draft, submitted, approved), compliance checklist cards, outstanding review requests, run backlog (incremental queue depth and SLA). -- **Editor tabs:** - - *DSL* (primary Monaco editor) - - *Simulation* (pre/post diff charts) - - *Approvals* (comments, audit log) - - *Runs* (heatmap dashboards) - - *Explain Explorer* (optional drawer for findings) -- **Right rail:** context cards for VEX providers, policy metadata, quick links to CLI/API docs. - -> Placeholder screenshot: `![Policy editor workspace](../assets/policy-editor/workspace.png)` (add after UI team captures latest build). - ---- - -## 3 · Editing Experience - -- Monaco editor configured for `stella-dsl@1`: - - Syntax highlighting, IntelliSense for rule/action names, snippets for common patterns. - - Inline diagnostics sourced from `/policies/{id}/lint` and `/compile`. - - Code actions (“Fix indentation”, “Insert requireVex block”). - - Mini-map disabled by default to reduce contrast noise; toggle available. -- **Keyboard shortcuts (accessible via `?`):** - - `Ctrl/Cmd + S` – Save draft (uploads to API if changed). - - `Ctrl/Cmd + Shift + Enter` – Run lint + compile. - - `Ctrl/Cmd + Shift + D` – Open diff view vs baseline. - - `Alt + Shift + F` – Format document (canonical ordering). -- **Schema tooltips:** Hover on `profile`, `rule`, `action` to view documentation (sourced from DSL doc). -- **Offline warnings:** When `sealed` mode detected, banner reminds authors to validate with offline bundle. - ---- - -## 4 · Simulation & Diff Panel - -- Triggered via “Run simulation” (toolbar) or automatically after compile. -- Displays: - - **Summary cards:** total findings added/removed/unchanged; severity up/down counts. - - **Rule hit table:** top rules contributing to diffs with percentage change. - - **Component list:** virtualised table linking to explain drawer; supports filters (severity, status, VEX outcome). - - **Visualisations:** stacked bar chart (severity deltas), sparkline for incremental backlog impact. -- Supports run presets: - - `Golden SBOM set` (default) - - Custom SBOM selection (via multi-select and search) - - Import sample JSON from CLI (`Upload diff`). -- Diff export options: - - `Download JSON` (same schema as CLI output) - - `Copy as Markdown` for review comments -- Simulation results persist per draft version; history accessible via timeline. - ---- - -## 5 · Review & Approval Workflow - -- **Commenting:** Line-level comments anchored to DSL lines; global comments supported. Uses rich text (Markdown subset) with mention support (`@group/sec-reviewers`). -- **Resolution:** Approvers/reviewers can mark comment resolved; history preserved in timeline. -- **Approval pane:** - - Checklist (lint, simulation, determinism CI) with status indicators; links to evidence. - - Reviewer checklist (quorum, blocking comments). - - Approval button only enabled when checklist satisfied. -- **Audit log:** Chronological view of submit/review/approve/archive events with actor, timestamp, note, attachments. -- **RBAC feedback:** When user lacks permission, actions are disabled with tooltip referencing required scope(s). -- **Notifications:** Integration with Notifier—subscribe/unsubscribe from review reminders within panel. - ---- - -## 6 · Runs & Observability - -- **Run tab** consumes `/policy/runs` data: - - Heatmap of rule hits per run (rows = runs, columns = top rules). - - VEX override counter, suppressions, quieted findings metrics. - - Incremental backlog widget (queue depth vs SLA). - - Export CSV/JSON button. -- **Replay/Download:** For each run, actions to download sealed replay bundle or open CLI command snippet. -- **Alert banners:** - - Determinism mismatch (red) - - SLA breach (amber) - - Pending replay (info) - ---- - -## 7 · Explain & Findings Integration - -- Inline “Open in Findings” button for any diff entry; opens side drawer with explain trace (same schema as `/findings/*/explain`). -- Drawer includes: - - Rule sequence with badges (block/warn/quiet). - - VEX evidence and justification codes. - - Links to advisories (Concelier) and SBOM components. -- Copy-to-clipboard (JSON) and “Share permalink” features (permalinks encode tenant, policy version, component). - ---- - -## 8 · Accessibility & i18n - -- WCAG 2.2 AA: - - Focus order follows logical workflow; skip link available. - - All actionable icons paired with text or `aria-label`. - - Simulation charts include table equivalents for screen readers. -- Keyboard support: - - `Alt+1/2/3/4` to switch tabs. - - `Shift+?` toggles help overlay (with key map). -- Internationalisation: - - Translations sourced from `/locales/{lang}.json`. - - Date/time displayed using user locale via `Intl.DateTimeFormat`. -- Theming: - - Light/dark CSS tokens; Monaco theme syncs with overall theme. - - High-contrast mode toggled via user preferences. - ---- - -## 9 · Offline & Air-Gap Behaviour - -- When console operates in sealed enclave: - - Editor displays “Sealed mode” banner with import timestamp. - - Simulation uses cached SBOM/advisory/VEX data only; results flagged accordingly. - - “Export bundle” button packages draft + simulations for transfer. -- Approvals require local Authority; UI blocks actions if `policy:approve` scope absent due to offline token limitations. -- Run tab surfaces bundle staleness warnings (`policy_runs.inputs.env.sealed=true`). - ---- - -## 10 · Telemetry & Testing Hooks - -- User actions (simulate, submit, approve, activate) emit telemetry (`ui.policy.action` spans) with anonymised metadata. -- Console surfaces correlation IDs for lint/compile errors to ease support triage. -- Cypress/Playwright fixtures available under `ui/policy-editor/examples/`; docs should note to re-record after significant UI changes. - ---- - -## 11 · Compliance Checklist - -- [ ] **Lint integration:** Editor surfaces diagnostics from API compile endpoint; errors link to DSL documentation. -- [ ] **Simulation parity:** Diff panel mirrors CLI schema; export button tested. -- [ ] **Workflow RBAC:** Buttons enable/disable correctly per scope (`policy:write/submit/review/approve`). -- [ ] **A11y verified:** Keyboard navigation, focus management, colour contrast (light/dark) pass automated Axe checks. -- [ ] **Offline safeguards:** Sealed-mode banner and bundle export flows present; no network calls trigger in sealed mode. -- [ ] **Telemetry wired:** Action spans and error logs include policyId, version, traceId. -- [ ] **Docs cross-links:** Links to DSL, lifecycle, runs, API, CLI guides validated. -- [ ] **Screenshot placeholders updated:** Replace TODO images with latest UI captures before GA. - ---- - -*Last updated: 2025-10-26 (Sprint 20).* +# Policy Editor Workspace + +> **Audience:** Product/UX, UI engineers, policy authors/reviewers using the Console. +> **Scope:** Layout, features, RBAC, a11y, simulation workflow, approvals, run dashboards, and offline considerations for the Policy Engine v2 editor (“Policy Studio”). + +The Policy Editor is the primary Console workspace for composing, simulating, and approving `stella-dsl@1` policies. It combines Monaco-based editing, diff visualisations, and governance tools so authors and reviewers can collaborate without leaving the browser. + +--- + +## 1 · Access & Prerequisites + +- **Routes:** `/console/policy` (list) → `/console/policy/:policyId/:version?`. +- **Scopes / roles:** + - `policy:author` (role `policy-author`) to edit drafts, run lint/compile, and execute quick simulations. + - `policy:review` (role `policy-reviewer`) to review drafts, leave comments, and request changes. + - `policy:approve` (role `policy-approver`) to approve or reject submissions. + - `policy:operate` (role `policy-operator`) to trigger batch simulations, promotions, and canary runs. + - `policy:audit` (role `policy-auditor`) to access immutable history and audit exports. + - `policy:simulate` to run simulations from Console; `findings:read` to open explain drawers. +- **Feature flags:** `policyStudio.enabled` (defaults true once Policy Engine v2 API available). +- **Browser support:** Evergreen Chrome, Edge, Firefox, Safari (last two versions). Uses WASM OPA sandbox; ensure COOP/COEP enabled per [UI architecture](../ARCHITECTURE_UI.md). + +--- + +## 2 · Workspace Layout + +``` +┌────────────────────────────────────────────────────────────────────────────┐ +│ Header: Policy selector • tenant switch • last activation banner │ +├────────────────────────────────────────────────────────────────────────────┤ +│ Sidebar (left) │ Main content (right) │ +│ - Revision list │ ┌───────────── Editor tabs ───────────────┐ │ +│ - Checklist status │ │ DSL │ Simulation │ Approvals │ ... │ │ +│ - Pending reviews │ └─────────────────────────────────────────┘ │ +│ - Run backlog │ │ +│ │ Editor pane / Simulation diff / Run viewer │ +└────────────────────────────────────────────────────────────────────────────┘ +``` + +- **Sidebar:** Revision timeline (draft, submitted, approved), compliance checklist cards, outstanding review requests, run backlog (incremental queue depth and SLA). +- **Editor tabs:** + - *DSL* (primary Monaco editor) + - *Simulation* (pre/post diff charts) + - *Approvals* (comments, audit log) + - *Runs* (heatmap dashboards) + - *Explain Explorer* (optional drawer for findings) +- **Right rail:** context cards for VEX providers, policy metadata, quick links to CLI/API docs. + +> Placeholder screenshot: `![Policy editor workspace](../assets/policy-editor/workspace.png)` (add after UI team captures latest build). + +--- + +## 3 · Editing Experience + +- Monaco editor configured for `stella-dsl@1`: + - Syntax highlighting, IntelliSense for rule/action names, snippets for common patterns. + - Inline diagnostics sourced from `/policies/{id}/lint` and `/compile`. + - Code actions (“Fix indentation”, “Insert requireVex block”). + - Mini-map disabled by default to reduce contrast noise; toggle available. +- **Keyboard shortcuts (accessible via `?`):** + - `Ctrl/Cmd + S` – Save draft (uploads to API if changed). + - `Ctrl/Cmd + Shift + Enter` – Run lint + compile. + - `Ctrl/Cmd + Shift + D` – Open diff view vs baseline. + - `Alt + Shift + F` – Format document (canonical ordering). +- **Schema tooltips:** Hover on `profile`, `rule`, `action` to view documentation (sourced from DSL doc). +- **Offline warnings:** When `sealed` mode detected, banner reminds authors to validate with offline bundle. + +--- + +## 4 · Simulation & Diff Panel + +- Triggered via “Run simulation” (toolbar) or automatically after compile. +- Displays: + - **Summary cards:** total findings added/removed/unchanged; severity up/down counts. + - **Rule hit table:** top rules contributing to diffs with percentage change. + - **Component list:** virtualised table linking to explain drawer; supports filters (severity, status, VEX outcome). + - **Visualisations:** stacked bar chart (severity deltas), sparkline for incremental backlog impact. +- Supports run presets: + - `Golden SBOM set` (default) + - Custom SBOM selection (via multi-select and search) + - Import sample JSON from CLI (`Upload diff`). +- Diff export options: + - `Download JSON` (same schema as CLI output) + - `Copy as Markdown` for review comments +- Simulation results persist per draft version; history accessible via timeline. + +--- + +## 5 · Review & Approval Workflow + +- **Commenting:** Line-level comments anchored to DSL lines; global comments supported. Uses rich text (Markdown subset) with mention support (`@group/sec-reviewers`). +- **Resolution:** Approvers/reviewers can mark comment resolved; history preserved in timeline. +- **Approval pane:** + - Checklist (lint, simulation, determinism CI) with status indicators; links to evidence. + - Reviewer checklist (quorum, blocking comments). + - Approval button only enabled when checklist satisfied. +- **Audit log:** Chronological view of submit/review/approve/archive events with actor, timestamp, note, attachments. +- **RBAC feedback:** When user lacks permission, actions are disabled with tooltip referencing required scope(s). +- **Notifications:** Integration with Notifier—subscribe/unsubscribe from review reminders within panel. + +--- + +## 6 · Runs & Observability + +- **Run tab** consumes `/policy/runs` data: + - Heatmap of rule hits per run (rows = runs, columns = top rules). + - VEX override counter, suppressions, quieted findings metrics. + - Incremental backlog widget (queue depth vs SLA). + - Export CSV/JSON button. +- **Replay/Download:** For each run, actions to download sealed replay bundle or open CLI command snippet. +- **Alert banners:** + - Determinism mismatch (red) + - SLA breach (amber) + - Pending replay (info) + +--- + +## 7 · Explain & Findings Integration + +- Inline “Open in Findings” button for any diff entry; opens side drawer with explain trace (same schema as `/findings/*/explain`). +- Drawer includes: + - Rule sequence with badges (block/warn/quiet). + - VEX evidence and justification codes. + - Links to advisories (Concelier) and SBOM components. +- Copy-to-clipboard (JSON) and “Share permalink” features (permalinks encode tenant, policy version, component). + +--- + +## 8 · Accessibility & i18n + +- WCAG 2.2 AA: + - Focus order follows logical workflow; skip link available. + - All actionable icons paired with text or `aria-label`. + - Simulation charts include table equivalents for screen readers. +- Keyboard support: + - `Alt+1/2/3/4` to switch tabs. + - `Shift+?` toggles help overlay (with key map). +- Internationalisation: + - Translations sourced from `/locales/{lang}.json`. + - Date/time displayed using user locale via `Intl.DateTimeFormat`. +- Theming: + - Light/dark CSS tokens; Monaco theme syncs with overall theme. + - High-contrast mode toggled via user preferences. + +--- + +## 9 · Offline & Air-Gap Behaviour + +- When console operates in sealed enclave: + - Editor displays “Sealed mode” banner with import timestamp. + - Simulation uses cached SBOM/advisory/VEX data only; results flagged accordingly. + - “Export bundle” button packages draft + simulations for transfer. +- Approvals require local Authority; UI blocks actions if `policy:approve` scope absent due to offline token limitations. +- Run tab surfaces bundle staleness warnings (`policy_runs.inputs.env.sealed=true`). + +--- + +## 10 · Telemetry & Testing Hooks + +- User actions (simulate, submit, approve, activate) emit telemetry (`ui.policy.action` spans) with anonymised metadata. +- Console surfaces correlation IDs for lint/compile errors to ease support triage. +- Cypress/Playwright fixtures available under `ui/policy-editor/examples/`; docs should note to re-record after significant UI changes. + +--- + +## 11 · Compliance Checklist + +- [ ] **Lint integration:** Editor surfaces diagnostics from API compile endpoint; errors link to DSL documentation. +- [ ] **Simulation parity:** Diff panel mirrors CLI schema; export button tested. +- [ ] **Workflow RBAC:** Buttons enable/disable correctly per scope (`policy:write/submit/review/approve`). +- [ ] **A11y verified:** Keyboard navigation, focus management, colour contrast (light/dark) pass automated Axe checks. +- [ ] **Offline safeguards:** Sealed-mode banner and bundle export flows present; no network calls trigger in sealed mode. +- [ ] **Telemetry wired:** Action spans and error logs include policyId, version, traceId. +- [ ] **Docs cross-links:** Links to DSL, lifecycle, runs, API, CLI guides validated. +- [ ] **Screenshot placeholders updated:** Replace TODO images with latest UI captures before GA. + +--- + +*Last updated: 2025-10-26 (Sprint 20).* diff --git a/docs/ui/runs.md b/docs/ui/runs.md index 1e7815cd..14a140d4 100644 --- a/docs/ui/runs.md +++ b/docs/ui/runs.md @@ -1,169 +1,169 @@ -# StellaOps Console - Runs Workspace - -> **Audience:** Scheduler Guild, Console UX, operators, support engineers. -> **Scope:** Runs dashboard, live progress, queue management, diffs, retries, evidence downloads, observability, troubleshooting, and offline behaviour (Sprint 23). - -The Runs workspace surfaces Scheduler activity across tenants: upcoming schedules, active runs, progress, deltas, and evidence bundles. It helps operators monitor backlog, drill into run segments, and recover from failures without leaving the console. - ---- - -## 1. Access and prerequisites - -- **Route:** `/console/runs` (list) with detail drawer `/console/runs/:runId`. SSE stream at `/console/runs/:runId/stream`. -- **Scopes:** `runs.read` (baseline), `runs.manage` (cancel/retry), `policy:runs` (view policy deltas), `downloads.read` (evidence bundles). -- **Dependencies:** Scheduler WebService (`/runs`, `/schedules`, `/preview`), Scheduler Worker event feeds, Policy Engine run summaries, Scanner WebService evidence endpoints. -- **Feature flags:** `runs.dashboard.enabled`, `runs.sse.enabled`, `runs.retry.enabled`, `runs.evidenceBundles`. -- **Tenancy:** Tenant selector filters list; cross-tenant admins can pin multiple tenants side-by-side (split view). - ---- - -## 2. Layout overview - -``` -+-------------------------------------------------------------------+ -| Header: Tenant badge - schedule selector - backlog metrics | -+-------------------------------------------------------------------+ -| Cards: Active runs - Queue depth - New findings - KEV deltas | -+-------------------------------------------------------------------+ -| Tabs: Active | Completed | Scheduled | Failures | -+-------------------------------------------------------------------+ -| Runs table (virtualised) | -| Columns: Run ID | Trigger | State | Progress | Duration | Deltas | -+-------------------------------------------------------------------+ -| Detail drawer: Summary | Segments | Deltas | Evidence | Logs | -+-------------------------------------------------------------------+ -``` - -The header integrates the status ticker to show ingestion deltas and planner heartbeat. - ---- - -## 3. Runs table - -| Column | Description | -|--------|-------------| -| **Run ID** | Deterministic identifier (`run:::`). Clicking opens detail drawer. | -| **Trigger** | `cron`, `manual`, `feedser`, `vexer`, `policy`, `content-refresh`. Tooltip lists schedule and initiator. | -| **State** | Badges: `planning`, `queued`, `running`, `completed`, `cancelled`, `error`. Errors include error code (e.g., `ERR_RUN_005`). | -| **Progress** | Percentage + processed/total candidates. SSE updates increment in real time. | -| **Duration** | Elapsed time (auto-updating). Completed runs show total duration; running runs show timer. | -| **Deltas** | Count of findings deltas (`+critical`, `+high`, `-quieted`, etc.). Tooltip expands severity breakdown. | - -Row badges include `KEV first`, `Content refresh`, `Policy promotion follow-up`, and `Retry`. Selecting multiple rows enables bulk downloads and exports. - -Filters: trigger type, state, schedule, severity impact (critical/high), policy revision, timeframe, planner shard, error code. - ---- - -## 4. Detail drawer - -Sections: - -1. **Summary** - run metadata (tenant, trigger, linked schedule, planner shard count, started/finished timestamps, correlation ID). -2. **Progress** - segmented progress bar (planner, queue, execution, post-processing). Real-time updates via SSE; includes throughput (targets per minute). -3. **Segments** - table of run segments with state, target count, executor, retry count. Operators can retry failed segments individually (requires `runs.manage`). -4. **Deltas** - summary of findings changes (new findings, resolved findings, severity shifts, KEV additions). Links to Findings view filtered by run ID. -5. **Evidence** - links to evidence bundles (JSON manifest, DSSE attestation), policy run records, and explain bundles. Download buttons use `/console/exports` orchestration. -6. **Logs** - last 50 structured log entries with severity, message, correlation ID; scroll-to-live for streaming logs. `Open in logs` copies query for external log tooling. - ---- - -## 5. Queue and schedule management - -- Schedule side panel lists upcoming jobs with cron expressions, time zones, and enable toggles. -- Queue depth chart shows current backlog per tenant and schedule (planner backlog, executor backlog). -- "Preview impact" button opens modal for manual run planning (purls or vuln IDs) and shows impacted image count before launch. CLI parity: `stella runs preview --tenant --file keys.json`. -- Manual run form allows selecting mode (`analysis-only`, `content-refresh`), scope, and optional policy snapshot. -- Pausing a schedule requires confirmation; UI displays earliest next run after resume. - ---- - -## 6. Live updates and SSE stream - -- SSE endpoint `/console/runs/{id}/stream` streams JSON events (`stateChanged`, `segmentProgress`, `deltaSummary`, `log`). UI reconnects with exponential backoff and heartbeat. -- Global ticker shows planner heartbeat age; banner warns after 90 seconds of silence. -- Offline mode disables SSE and falls back to polling every 30 seconds. - ---- - -## 7. Retry and remediation - -- Failed segments show retry button; UI displays reason and cooldown timers. Retry actions are scope-gated and logged. -- Full run retry resets segments while preserving original run metadata; new run ID references previous run in `retryOf` field. -- "Escalate to support" button opens incident template pre-filled with run context and correlation IDs. -- Troubleshooting quick links: - - `ERR_RUN_001` (planner lock) - - `ERR_RUN_005` (Scanner timeout) - - `ERR_RUN_009` (impact index stale) - Each link points to corresponding runbook sections (`docs/ops/scheduler-runbook.md`). -- CLI parity: `stella runs retry --run `, `stella runs cancel --run `. - ---- - -## 8. Evidence downloads - -- Evidence tab aggregates: - - Policy run summary (`/policy/runs/{id}`) - - Findings delta CSV (`/downloads/findings/{runId}.csv`) - - Scanner evidence bundle (compressed JSON with manifest) -- Downloads show size, hash, signature status. -- "Bundle for offline" packages all evidence into single tarball with manifest/digest; UI notes CLI parity (`stella runs export --run --bundle`). -- Completed bundles stored in Downloads workspace for reuse (links provided). - ---- - -## 9. Observability - -- Metrics cards: `scheduler_queue_depth`, `scheduler_runs_active`, `scheduler_runs_error_total`, `scheduler_runs_duration_seconds`. -- Trend charts: queue depth (last 24h), runs per trigger, average duration, determinism score. -- Alert banners: planner lag > SLA, queue depth > threshold, repeated error codes. -- Telemetry panel lists latest events (e.g., `scheduler.run.started`, `scheduler.run.completed`, `scheduler.run.failed`). - ---- - -## 10. Offline and air-gap behaviour - -- Offline banner highlights snapshot timestamp and indicates SSE disabled. -- Manual run form switches to generate CLI script for offline execution (`stella runs submit --bundle `). -- Evidence download buttons output local paths; UI reminds to copy to removable media. -- Queue charts use snapshot data; manual refresh button loads latest records from Offline Kit. -- Tenants absent from snapshot hidden to avoid partial data. - ---- - -## 11. Screenshot coordination - -- Placeholders: - - `![Runs dashboard placeholder](../assets/ui/runs/dashboard-placeholder.png)` - - `![Run detail placeholder](../assets/ui/runs/detail-placeholder.png)` -- Coordinate with Scheduler Guild for updated screenshots after Sprint 23 UI stabilises (tracked in `#console-screenshots`, entry 2025-10-26). - ---- - -## 12. References - -- `/docs/ui/console-overview.md` - shell, SSE ticker. -- `/docs/ui/navigation.md` - route map and deep links. -- `/docs/ui/findings.md` - findings filtered by run. -- `/docs/ui/downloads.md` - download manager, export retention, CLI parity. -- `/docs/ARCHITECTURE_SCHEDULER.md` - scheduler architecture and data model. -- `/docs/policy/runs.md` - policy run integration. -- `/docs/cli/policy.md` and `/docs/cli/policy.md` section 5 for CLI parity (runs commands pending). -- `/docs/ops/scheduler-runbook.md` - troubleshooting. - ---- - -## 13. Compliance checklist - -- [ ] Runs table columns, filters, and states described. -- [ ] Detail drawer sections documented (segments, deltas, evidence, logs). -- [ ] Queue management, manual run, and preview coverage included. -- [ ] SSE and live update behaviour detailed. -- [ ] Retry, remediation, and runbook references provided. -- [ ] Evidence downloads and bundle workflows documented with CLI parity. -- [ ] Offline behaviour and screenshot coordination recorded. -- [ ] References validated. - ---- - -*Last updated: 2025-10-26 (Sprint 23).* +# StellaOps Console - Runs Workspace + +> **Audience:** Scheduler Guild, Console UX, operators, support engineers. +> **Scope:** Runs dashboard, live progress, queue management, diffs, retries, evidence downloads, observability, troubleshooting, and offline behaviour (Sprint 23). + +The Runs workspace surfaces Scheduler activity across tenants: upcoming schedules, active runs, progress, deltas, and evidence bundles. It helps operators monitor backlog, drill into run segments, and recover from failures without leaving the console. + +--- + +## 1. Access and prerequisites + +- **Route:** `/console/runs` (list) with detail drawer `/console/runs/:runId`. SSE stream at `/console/runs/:runId/stream`. +- **Scopes:** `runs.read` (baseline), `runs.manage` (cancel/retry), `policy:runs` (view policy deltas), `downloads.read` (evidence bundles). +- **Dependencies:** Scheduler WebService (`/runs`, `/schedules`, `/preview`), Scheduler Worker event feeds, Policy Engine run summaries, Scanner WebService evidence endpoints. +- **Feature flags:** `runs.dashboard.enabled`, `runs.sse.enabled`, `runs.retry.enabled`, `runs.evidenceBundles`. +- **Tenancy:** Tenant selector filters list; cross-tenant admins can pin multiple tenants side-by-side (split view). + +--- + +## 2. Layout overview + +``` ++-------------------------------------------------------------------+ +| Header: Tenant badge - schedule selector - backlog metrics | ++-------------------------------------------------------------------+ +| Cards: Active runs - Queue depth - New findings - KEV deltas | ++-------------------------------------------------------------------+ +| Tabs: Active | Completed | Scheduled | Failures | ++-------------------------------------------------------------------+ +| Runs table (virtualised) | +| Columns: Run ID | Trigger | State | Progress | Duration | Deltas | ++-------------------------------------------------------------------+ +| Detail drawer: Summary | Segments | Deltas | Evidence | Logs | ++-------------------------------------------------------------------+ +``` + +The header integrates the status ticker to show ingestion deltas and planner heartbeat. + +--- + +## 3. Runs table + +| Column | Description | +|--------|-------------| +| **Run ID** | Deterministic identifier (`run:::`). Clicking opens detail drawer. | +| **Trigger** | `cron`, `manual`, `feedser`, `vexer`, `policy`, `content-refresh`. Tooltip lists schedule and initiator. | +| **State** | Badges: `planning`, `queued`, `running`, `completed`, `cancelled`, `error`. Errors include error code (e.g., `ERR_RUN_005`). | +| **Progress** | Percentage + processed/total candidates. SSE updates increment in real time. | +| **Duration** | Elapsed time (auto-updating). Completed runs show total duration; running runs show timer. | +| **Deltas** | Count of findings deltas (`+critical`, `+high`, `-quieted`, etc.). Tooltip expands severity breakdown. | + +Row badges include `KEV first`, `Content refresh`, `Policy promotion follow-up`, and `Retry`. Selecting multiple rows enables bulk downloads and exports. + +Filters: trigger type, state, schedule, severity impact (critical/high), policy revision, timeframe, planner shard, error code. + +--- + +## 4. Detail drawer + +Sections: + +1. **Summary** - run metadata (tenant, trigger, linked schedule, planner shard count, started/finished timestamps, correlation ID). +2. **Progress** - segmented progress bar (planner, queue, execution, post-processing). Real-time updates via SSE; includes throughput (targets per minute). +3. **Segments** - table of run segments with state, target count, executor, retry count. Operators can retry failed segments individually (requires `runs.manage`). +4. **Deltas** - summary of findings changes (new findings, resolved findings, severity shifts, KEV additions). Links to Findings view filtered by run ID. +5. **Evidence** - links to evidence bundles (JSON manifest, DSSE attestation), policy run records, and explain bundles. Download buttons use `/console/exports` orchestration. +6. **Logs** - last 50 structured log entries with severity, message, correlation ID; scroll-to-live for streaming logs. `Open in logs` copies query for external log tooling. + +--- + +## 5. Queue and schedule management + +- Schedule side panel lists upcoming jobs with cron expressions, time zones, and enable toggles. +- Queue depth chart shows current backlog per tenant and schedule (planner backlog, executor backlog). +- "Preview impact" button opens modal for manual run planning (purls or vuln IDs) and shows impacted image count before launch. CLI parity: `stella runs preview --tenant --file keys.json`. +- Manual run form allows selecting mode (`analysis-only`, `content-refresh`), scope, and optional policy snapshot. +- Pausing a schedule requires confirmation; UI displays earliest next run after resume. + +--- + +## 6. Live updates and SSE stream + +- SSE endpoint `/console/runs/{id}/stream` streams JSON events (`stateChanged`, `segmentProgress`, `deltaSummary`, `log`). UI reconnects with exponential backoff and heartbeat. +- Global ticker shows planner heartbeat age; banner warns after 90 seconds of silence. +- Offline mode disables SSE and falls back to polling every 30 seconds. + +--- + +## 7. Retry and remediation + +- Failed segments show retry button; UI displays reason and cooldown timers. Retry actions are scope-gated and logged. +- Full run retry resets segments while preserving original run metadata; new run ID references previous run in `retryOf` field. +- "Escalate to support" button opens incident template pre-filled with run context and correlation IDs. +- Troubleshooting quick links: + - `ERR_RUN_001` (planner lock) + - `ERR_RUN_005` (Scanner timeout) + - `ERR_RUN_009` (impact index stale) + Each link points to corresponding runbook sections (`docs/ops/scheduler-runbook.md`). +- CLI parity: `stella runs retry --run `, `stella runs cancel --run `. + +--- + +## 8. Evidence downloads + +- Evidence tab aggregates: + - Policy run summary (`/policy/runs/{id}`) + - Findings delta CSV (`/downloads/findings/{runId}.csv`) + - Scanner evidence bundle (compressed JSON with manifest) +- Downloads show size, hash, signature status. +- "Bundle for offline" packages all evidence into single tarball with manifest/digest; UI notes CLI parity (`stella runs export --run --bundle`). +- Completed bundles stored in Downloads workspace for reuse (links provided). + +--- + +## 9. Observability + +- Metrics cards: `scheduler_queue_depth`, `scheduler_runs_active`, `scheduler_runs_error_total`, `scheduler_runs_duration_seconds`. +- Trend charts: queue depth (last 24h), runs per trigger, average duration, determinism score. +- Alert banners: planner lag > SLA, queue depth > threshold, repeated error codes. +- Telemetry panel lists latest events (e.g., `scheduler.run.started`, `scheduler.run.completed`, `scheduler.run.failed`). + +--- + +## 10. Offline and air-gap behaviour + +- Offline banner highlights snapshot timestamp and indicates SSE disabled. +- Manual run form switches to generate CLI script for offline execution (`stella runs submit --bundle `). +- Evidence download buttons output local paths; UI reminds to copy to removable media. +- Queue charts use snapshot data; manual refresh button loads latest records from Offline Kit. +- Tenants absent from snapshot hidden to avoid partial data. + +--- + +## 11. Screenshot coordination + +- Placeholders: + - `![Runs dashboard placeholder](../assets/ui/runs/dashboard-placeholder.png)` + - `![Run detail placeholder](../assets/ui/runs/detail-placeholder.png)` +- Coordinate with Scheduler Guild for updated screenshots after Sprint 23 UI stabilises (tracked in `#console-screenshots`, entry 2025-10-26). + +--- + +## 12. References + +- `/docs/ui/console-overview.md` - shell, SSE ticker. +- `/docs/ui/navigation.md` - route map and deep links. +- `/docs/ui/findings.md` - findings filtered by run. +- `/docs/ui/downloads.md` - download manager, export retention, CLI parity. +- `/docs/ARCHITECTURE_SCHEDULER.md` - scheduler architecture and data model. +- `/docs/policy/runs.md` - policy run integration. +- `/docs/cli/policy.md` and `/docs/cli/policy.md` section 5 for CLI parity (runs commands pending). +- `/docs/ops/scheduler-runbook.md` - troubleshooting. + +--- + +## 13. Compliance checklist + +- [ ] Runs table columns, filters, and states described. +- [ ] Detail drawer sections documented (segments, deltas, evidence, logs). +- [ ] Queue management, manual run, and preview coverage included. +- [ ] SSE and live update behaviour detailed. +- [ ] Retry, remediation, and runbook references provided. +- [ ] Evidence downloads and bundle workflows documented with CLI parity. +- [ ] Offline behaviour and screenshot coordination recorded. +- [ ] References validated. + +--- + +*Last updated: 2025-10-26 (Sprint 23).* diff --git a/docs/ui/sbom-explorer.md b/docs/ui/sbom-explorer.md index 87d16d78..6137fe4f 100644 --- a/docs/ui/sbom-explorer.md +++ b/docs/ui/sbom-explorer.md @@ -1,195 +1,195 @@ -# StellaOps Console - SBOM Explorer - -> **Audience:** Console UX, SBOM Service Guild, enablement teams, customer onboarding. -> **Scope:** Catalog listing, component detail, graph overlays, exports, performance hints, and offline behaviour for the SBOM Explorer that ships in Sprint 23. - -The SBOM Explorer lets operators inspect software bills of materials collected by Scanner and normalised by the SBOM Service. It provides tenant-scoped catalogs, usage overlays, provenance-aware graphs, and deterministic export paths that align with CLI workflows. - ---- - -## 1. Access and prerequisites - -- **Routes:** `/console/sbom` (catalog) and `/console/sbom/:digest` (detail). -- **Scopes:** `sbom.read` (required), `sbom.export` for large export jobs, `findings.read` to open explain drawers, `policy.read` to view overlay metadata. -- **Feature flags:** `sbomExplorer.enabled` (default true when SBOM Service v3 API is enabled) and `graph.overlays.enabled` for Cartographer-backed overlays. -- **Tenant scoping:** All queries include `tenant` tokens; switching tenants triggers catalog refetch and clears cached overlays. -- **Data dependencies:** Requires SBOM Service 3.1+ with Cartographer overlays and Policy Engine explain hints enabled. - ---- - -## 2. Layout overview - -``` -+-----------------------------------------------------------------------+ -| Header: Tenant badge - global filters - offline indicator - actions | -+-----------------------------------------------------------------------+ -| Left rail: Saved views - pinned tags - export queue status | -+-----------------------------------------------------------------------+ -| Catalog table (virtualised) | -| - Columns: Image digest - Source - Scan timestamp - Policy verdict | -| - Badges: Delta SBOM, Attested, Offline snapshot | -+-----------------------------------------------------------------------+ -| Detail drawer or full page tabs (Inventory | Usage | Components | | -| Overlays | Explain | Exports) | -+-----------------------------------------------------------------------+ -``` - -The catalog and detail views reuse the shared command palette, context chips, and SSE status ticker described in `/docs/ui/navigation.md`. - ---- - -## 3. Catalog view - -| Feature | Description | -|---------|-------------| -| **Virtual table** | Uses Angular CDK virtual scroll to render up to 10,000 records per tenant without layout jank. Sorting and filtering are client-side for <= 20k rows; the UI upgrades to server-side queries automatically when more records exist. | -| **Preset segments** | Quick toggles for `All`, `Recent (7 d)`, `Delta-ready`, `Attested`, and `Offline snapshots`. Each preset maps to saved view tokens for CLI parity. | -| **Search** | Global search field supports image digests, repository tags, SBOM IDs, and component PURLs. Search terms propagate to the detail view when opened. | -| **Badges** | - `Delta` badge indicates SBOM produced via delta mode (layers reuse).
- `Attested` badge links to Attestor proof and Rekor record.
- `Snapshot` badge shows offline import hash.
- `Policy` badge references last policy verdict summary. | -| **Bulk actions** | Multi-select rows to stage export jobs, trigger async explain generation, or copy CLI commands. Actions enforce per-tenant rate limits and show authority scopes in tooltips. | - ---- - -## 4. Detail tabs - -### 4.1 Inventory tab - -- Default view summarising all components with columns for package name (PURL), version, supplier, license, size, and counts of referencing layers. -- Filters: severity, ecosystem (OS, NPM, PyPI, Maven, Go, NuGet, Rust, containers), usage flag (true/false), package tags. -- Sorting: by severity (desc), version (asc), supplier. -- Cell tooltips reference Concelier advisories and Policy Engine findings when available. -- Total component count, unique suppliers, and critical severity counts appear in the header cards. - -### 4.2 Usage tab - -- Focuses on runtime usage (EntryTrace, runtime sensors, allow lists). -- Columns include process names, entry points, and `usedByEntrypoint` flags. -- Grouping: by entry point, by package, or by namespace (Kubernetes). -- Highlights mismatches between declared dependencies and observed usage for drift detection. - -### 4.3 Components tab - -- Deep dive for a single component selected from Inventory or Usage. -- Shows provenance timeline (introduced in layer, modified, removed), file paths, cryptographic hashes, and linked evidence (DSSE, Attestor bundles). -- Links to CLI commands: `stella sbom component show ` and `stella sbom component export`. -- Drawer supports multi-component comparison through tabbed interface. - -### 4.4 Overlays tab - -- Displays Cartographer overlays: vulnerability overlays (policy verdicts), runtime overlays (process traces), and vendor advisories. -- Each overlay card lists source, generation timestamp, precedence, and staleness relative to tenant SLA. -- Toggle overlays on/off to see impact on component status; UI does not mutate canonical SBOM, it only enriches the view. -- Graph preview button opens force-directed component graph (limited to <= 500 nodes) with filters for dependency depth and relationship type. -- Overlay metadata includes the CLI parity snippet: `stella sbom overlay apply --overlay --digest `. - -### 4.5 Explain tab - -- Integrates Policy Engine explain drawer. -- Shows rule hits, VEX overrides, and evidence per component. -- Provides "Open in Findings" link that preserves tenant and filters. - -### 4.6 Exports tab - -- Lists available exports (CycloneDX JSON, CycloneDX Protobuf, SPDX JSON, SPDX Tag-Value, Delta bundle, Evidence bundle). -- Each export entry shows size, hash (SHA-256), format version, and generation time. -- Download buttons respect RBAC and offline quotas; CLI callouts mirror `stella sbom export`. -- "Schedule export" launches async job for large bundles; job status integrates with `/console/downloads`. -- Includes copy-to-clipboard path for offline transfers (`/offline-kits/export///`). - ---- - -## 5. Filters and presets - -| Filter | Applies to | Notes | -|--------|------------|-------| -| **Severity** | Inventory, Overlays, Explain | Uses Policy Engine severity buckets and KEV flag. | -| **Ecosystem** | Inventory, Usage | Multi-select list with search; maps to package type derived from PURL. | -| **License** | Inventory | Groups by SPDX identifiers; warns on copyleft obligations. | -| **Supplier** | Inventory, Components | Autocomplete backed by SBOM metadata. | -| **Tags** | Inventory, Usage | Tags provided by Scanner or user-defined metadata. | -| **Component search** | Components, Overlays | Accepts PURL or substring; retains highlight when switching tabs. | -| **Snapshot** | Catalog | Filters to SBOMs sourced from Offline Kit or local import. | -| **Attested only** | Catalog, Exports | Limits to SBOMs signed by Attestor; displays Rekor badge. | - -Saved views store combinations of these filters and expose command palette shortcuts (`Cmd+1-9 / Ctrl+1-9`). - ---- - -## 6. Graph overlays and cartography - -- Graph view is powered by Cartographer projections (tenant-scoped graph snapshots). -- Supported overlays: - - **Dependency graph** (default) - nodes represent components, edges represent dependencies with direction (introducer -> introduced). - - **Runtime call graph** - optional overlay layering process calls on top of dependencies. - - **Vulnerability overlay** - colours nodes by highest severity and outlines exploited components. -- Controls: depth slider (1-6), include transitive flag, hide dev dependencies toggle, highlight vendor-specified critical paths. -- Export options: GraphML, JSON Lines, and screenshot capture (requires `graph.export`). -- Performance guardrails: overlays warn when node count exceeds 2,000; user can queue background job to render static graph for download instead. - ---- - -## 7. Exports and automation - -- **Instant exports:** Inline downloads for CycloneDX JSON/Protobuf (<= 25 MB) and SPDX JSON (<= 25 MB). -- **Async exports:** Larger bundles stream through the download manager with resume support. UI polls `/console/downloads` every 15 seconds while export is in progress. -- **CLI parity:** Each export card displays the equivalent CLI command and environment variables (proxy, offline). -- **Compliance metadata:** Export manifests include SBOM ID, component count, hash, signature state, and policy verdict summary so auditors can validate offline. -- **Automation hooks:** Webhook button copies the `/downloads/hooks/subscribe` call for integration with CI pipelines. - ---- - -## 8. Performance tips - -- Virtual scroll keeps initial render under 70 ms for 10k rows; server-side pagination engages beyond that threshold. -- Graph overlay rendering uses Web Workers to keep main thread responsive; heavy layouts show "Background layout in progress" banner. -- SSE updates (new SBOM ready) refresh header cards and prepend rows without full table redraw. -- Prefetching: opening a detail drawer preloads overlays and exports concurrently; these requests cancel automatically if the user navigates away. -- Local cache (IndexedDB) stores last viewed SBOM detail for each tenant (up to 20 entries). Cache invalidates when new merge hash is observed. - ---- - -## 9. Offline and air-gap behaviour - -- Catalog reads from Offline Kit snapshot if gateway is in sealed mode; offline banner lists snapshot ID and staleness. -- Overlays limited to data included in snapshot; missing overlays show guidance to import updated Cartographer package. -- Exports queue locally and generate tarballs ready to copy to removable media. -- CLI parity callouts switch to offline examples (using `stella sbom export --offline`). -- Tenants unavailable in snapshot are hidden from the tenant picker to prevent inconsistent views. - ---- - -## 10. Screenshot coordination - -- Placeholder images: - - `![SBOM catalog view placeholder](../assets/ui/sbom/catalog-placeholder.png)` - - `![Overlay graph placeholder](../assets/ui/sbom/overlay-placeholder.png)` -- Coordinate with Console Guild to capture updated screenshots (dark and light theme) once Sprint 23 UI stabilises. Track follow-up in Console Guild thread `#console-screenshots` dated 2025-10-26. - ---- - -## 11. References - -- `/docs/ui/console-overview.md` - navigation shell, tenant model, filters. -- `/docs/ui/navigation.md` - command palette, deep-link schema. -- `/docs/ui/downloads.md` - download queue, manifest parity, offline export handling. -- `/docs/security/console-security.md` - scopes, DPoP, CSP. -- `/docs/cli-vs-ui-parity.md` - CLI equivalence matrix. -- `/docs/architecture/console.md` (pending) - component data flows. -- `/docs/architecture/overview.md` - high-level module relationships. -- `/docs/ingestion/aggregation-only-contract.md` - provenance and guard rails. - ---- - -## 12. Compliance checklist - -- [ ] Catalog table and detail tabs documented with columns, filters, and presets. -- [ ] Overlay behaviour describes Cartographer integration and CLI parity. -- [ ] Export section includes instant vs async workflow and compliance metadata. -- [ ] Performance considerations align with UI benchmarks (virtual scroll, workers). -- [ ] Offline behaviour captured for catalog, overlays, exports. -- [ ] Screenshot placeholders and coordination notes recorded with Console Guild follow-up. -- [ ] All referenced docs verified and accessible. - ---- - -*Last updated: 2025-10-26 (Sprint 23).* +# StellaOps Console - SBOM Explorer + +> **Audience:** Console UX, SBOM Service Guild, enablement teams, customer onboarding. +> **Scope:** Catalog listing, component detail, graph overlays, exports, performance hints, and offline behaviour for the SBOM Explorer that ships in Sprint 23. + +The SBOM Explorer lets operators inspect software bills of materials collected by Scanner and normalised by the SBOM Service. It provides tenant-scoped catalogs, usage overlays, provenance-aware graphs, and deterministic export paths that align with CLI workflows. + +--- + +## 1. Access and prerequisites + +- **Routes:** `/console/sbom` (catalog) and `/console/sbom/:digest` (detail). +- **Scopes:** `sbom.read` (required), `sbom.export` for large export jobs, `findings.read` to open explain drawers, `policy.read` to view overlay metadata. +- **Feature flags:** `sbomExplorer.enabled` (default true when SBOM Service v3 API is enabled) and `graph.overlays.enabled` for Cartographer-backed overlays. +- **Tenant scoping:** All queries include `tenant` tokens; switching tenants triggers catalog refetch and clears cached overlays. +- **Data dependencies:** Requires SBOM Service 3.1+ with Cartographer overlays and Policy Engine explain hints enabled. + +--- + +## 2. Layout overview + +``` ++-----------------------------------------------------------------------+ +| Header: Tenant badge - global filters - offline indicator - actions | ++-----------------------------------------------------------------------+ +| Left rail: Saved views - pinned tags - export queue status | ++-----------------------------------------------------------------------+ +| Catalog table (virtualised) | +| - Columns: Image digest - Source - Scan timestamp - Policy verdict | +| - Badges: Delta SBOM, Attested, Offline snapshot | ++-----------------------------------------------------------------------+ +| Detail drawer or full page tabs (Inventory | Usage | Components | | +| Overlays | Explain | Exports) | ++-----------------------------------------------------------------------+ +``` + +The catalog and detail views reuse the shared command palette, context chips, and SSE status ticker described in `/docs/ui/navigation.md`. + +--- + +## 3. Catalog view + +| Feature | Description | +|---------|-------------| +| **Virtual table** | Uses Angular CDK virtual scroll to render up to 10,000 records per tenant without layout jank. Sorting and filtering are client-side for <= 20k rows; the UI upgrades to server-side queries automatically when more records exist. | +| **Preset segments** | Quick toggles for `All`, `Recent (7 d)`, `Delta-ready`, `Attested`, and `Offline snapshots`. Each preset maps to saved view tokens for CLI parity. | +| **Search** | Global search field supports image digests, repository tags, SBOM IDs, and component PURLs. Search terms propagate to the detail view when opened. | +| **Badges** | - `Delta` badge indicates SBOM produced via delta mode (layers reuse).
- `Attested` badge links to Attestor proof and Rekor record.
- `Snapshot` badge shows offline import hash.
- `Policy` badge references last policy verdict summary. | +| **Bulk actions** | Multi-select rows to stage export jobs, trigger async explain generation, or copy CLI commands. Actions enforce per-tenant rate limits and show authority scopes in tooltips. | + +--- + +## 4. Detail tabs + +### 4.1 Inventory tab + +- Default view summarising all components with columns for package name (PURL), version, supplier, license, size, and counts of referencing layers. +- Filters: severity, ecosystem (OS, NPM, PyPI, Maven, Go, NuGet, Rust, containers), usage flag (true/false), package tags. +- Sorting: by severity (desc), version (asc), supplier. +- Cell tooltips reference Concelier advisories and Policy Engine findings when available. +- Total component count, unique suppliers, and critical severity counts appear in the header cards. + +### 4.2 Usage tab + +- Focuses on runtime usage (EntryTrace, runtime sensors, allow lists). +- Columns include process names, entry points, and `usedByEntrypoint` flags. +- Grouping: by entry point, by package, or by namespace (Kubernetes). +- Highlights mismatches between declared dependencies and observed usage for drift detection. + +### 4.3 Components tab + +- Deep dive for a single component selected from Inventory or Usage. +- Shows provenance timeline (introduced in layer, modified, removed), file paths, cryptographic hashes, and linked evidence (DSSE, Attestor bundles). +- Links to CLI commands: `stella sbom component show ` and `stella sbom component export`. +- Drawer supports multi-component comparison through tabbed interface. + +### 4.4 Overlays tab + +- Displays Cartographer overlays: vulnerability overlays (policy verdicts), runtime overlays (process traces), and vendor advisories. +- Each overlay card lists source, generation timestamp, precedence, and staleness relative to tenant SLA. +- Toggle overlays on/off to see impact on component status; UI does not mutate canonical SBOM, it only enriches the view. +- Graph preview button opens force-directed component graph (limited to <= 500 nodes) with filters for dependency depth and relationship type. +- Overlay metadata includes the CLI parity snippet: `stella sbom overlay apply --overlay --digest `. + +### 4.5 Explain tab + +- Integrates Policy Engine explain drawer. +- Shows rule hits, VEX overrides, and evidence per component. +- Provides "Open in Findings" link that preserves tenant and filters. + +### 4.6 Exports tab + +- Lists available exports (CycloneDX JSON, CycloneDX Protobuf, SPDX JSON, SPDX Tag-Value, Delta bundle, Evidence bundle). +- Each export entry shows size, hash (SHA-256), format version, and generation time. +- Download buttons respect RBAC and offline quotas; CLI callouts mirror `stella sbom export`. +- "Schedule export" launches async job for large bundles; job status integrates with `/console/downloads`. +- Includes copy-to-clipboard path for offline transfers (`/offline-kits/export///`). + +--- + +## 5. Filters and presets + +| Filter | Applies to | Notes | +|--------|------------|-------| +| **Severity** | Inventory, Overlays, Explain | Uses Policy Engine severity buckets and KEV flag. | +| **Ecosystem** | Inventory, Usage | Multi-select list with search; maps to package type derived from PURL. | +| **License** | Inventory | Groups by SPDX identifiers; warns on copyleft obligations. | +| **Supplier** | Inventory, Components | Autocomplete backed by SBOM metadata. | +| **Tags** | Inventory, Usage | Tags provided by Scanner or user-defined metadata. | +| **Component search** | Components, Overlays | Accepts PURL or substring; retains highlight when switching tabs. | +| **Snapshot** | Catalog | Filters to SBOMs sourced from Offline Kit or local import. | +| **Attested only** | Catalog, Exports | Limits to SBOMs signed by Attestor; displays Rekor badge. | + +Saved views store combinations of these filters and expose command palette shortcuts (`Cmd+1-9 / Ctrl+1-9`). + +--- + +## 6. Graph overlays and cartography + +- Graph view is powered by Cartographer projections (tenant-scoped graph snapshots). +- Supported overlays: + - **Dependency graph** (default) - nodes represent components, edges represent dependencies with direction (introducer -> introduced). + - **Runtime call graph** - optional overlay layering process calls on top of dependencies. + - **Vulnerability overlay** - colours nodes by highest severity and outlines exploited components. +- Controls: depth slider (1-6), include transitive flag, hide dev dependencies toggle, highlight vendor-specified critical paths. +- Export options: GraphML, JSON Lines, and screenshot capture (requires `graph.export`). +- Performance guardrails: overlays warn when node count exceeds 2,000; user can queue background job to render static graph for download instead. + +--- + +## 7. Exports and automation + +- **Instant exports:** Inline downloads for CycloneDX JSON/Protobuf (<= 25 MB) and SPDX JSON (<= 25 MB). +- **Async exports:** Larger bundles stream through the download manager with resume support. UI polls `/console/downloads` every 15 seconds while export is in progress. +- **CLI parity:** Each export card displays the equivalent CLI command and environment variables (proxy, offline). +- **Compliance metadata:** Export manifests include SBOM ID, component count, hash, signature state, and policy verdict summary so auditors can validate offline. +- **Automation hooks:** Webhook button copies the `/downloads/hooks/subscribe` call for integration with CI pipelines. + +--- + +## 8. Performance tips + +- Virtual scroll keeps initial render under 70 ms for 10k rows; server-side pagination engages beyond that threshold. +- Graph overlay rendering uses Web Workers to keep main thread responsive; heavy layouts show "Background layout in progress" banner. +- SSE updates (new SBOM ready) refresh header cards and prepend rows without full table redraw. +- Prefetching: opening a detail drawer preloads overlays and exports concurrently; these requests cancel automatically if the user navigates away. +- Local cache (IndexedDB) stores last viewed SBOM detail for each tenant (up to 20 entries). Cache invalidates when new merge hash is observed. + +--- + +## 9. Offline and air-gap behaviour + +- Catalog reads from Offline Kit snapshot if gateway is in sealed mode; offline banner lists snapshot ID and staleness. +- Overlays limited to data included in snapshot; missing overlays show guidance to import updated Cartographer package. +- Exports queue locally and generate tarballs ready to copy to removable media. +- CLI parity callouts switch to offline examples (using `stella sbom export --offline`). +- Tenants unavailable in snapshot are hidden from the tenant picker to prevent inconsistent views. + +--- + +## 10. Screenshot coordination + +- Placeholder images: + - `![SBOM catalog view placeholder](../assets/ui/sbom/catalog-placeholder.png)` + - `![Overlay graph placeholder](../assets/ui/sbom/overlay-placeholder.png)` +- Coordinate with Console Guild to capture updated screenshots (dark and light theme) once Sprint 23 UI stabilises. Track follow-up in Console Guild thread `#console-screenshots` dated 2025-10-26. + +--- + +## 11. References + +- `/docs/ui/console-overview.md` - navigation shell, tenant model, filters. +- `/docs/ui/navigation.md` - command palette, deep-link schema. +- `/docs/ui/downloads.md` - download queue, manifest parity, offline export handling. +- `/docs/security/console-security.md` - scopes, DPoP, CSP. +- `/docs/cli-vs-ui-parity.md` - CLI equivalence matrix. +- `/docs/architecture/console.md` (pending) - component data flows. +- `/docs/architecture/overview.md` - high-level module relationships. +- `/docs/ingestion/aggregation-only-contract.md` - provenance and guard rails. + +--- + +## 12. Compliance checklist + +- [ ] Catalog table and detail tabs documented with columns, filters, and presets. +- [ ] Overlay behaviour describes Cartographer integration and CLI parity. +- [ ] Export section includes instant vs async workflow and compliance metadata. +- [ ] Performance considerations align with UI benchmarks (virtual scroll, workers). +- [ ] Offline behaviour captured for catalog, overlays, exports. +- [ ] Screenshot placeholders and coordination notes recorded with Console Guild follow-up. +- [ ] All referenced docs verified and accessible. + +--- + +*Last updated: 2025-10-26 (Sprint 23).* diff --git a/docs/updates/2025-10-20-authority-identity-registry.md b/docs/updates/2025-10-20-authority-identity-registry.md index 9282d44c..7f254e66 100644 --- a/docs/updates/2025-10-20-authority-identity-registry.md +++ b/docs/updates/2025-10-20-authority-identity-registry.md @@ -1,14 +1,14 @@ -# 2025-10-20 — Authority Identity Provider Registry & DPoP nonce updates - -## Summary -- Authority host now resolves identity providers through the new metadata/handle pattern introduced in `StellaOps.Authority.Plugins.Abstractions`. Runtime handlers (`ValidateClientCredentialsHandler`, `ValidatePasswordGrantHandler`, `ValidateAccessTokenHandler`, bootstrap endpoints) acquire providers with `IAuthorityIdentityProviderRegistry.AcquireAsync` and rely on metadata (`AuthorityIdentityProviderMetadata`) for capability checks. -- Unit and integration tests build lightweight `ServiceProvider` instances with test plugins, matching production DI behaviour and ensuring the new registry contract is exercised. -- DPoP nonce enforcement now prefers `NormalizedAudiences` when populated and gracefully falls back to the configured `RequiredAudiences`, eliminating the runtime type mismatch that previously surfaced during test runs. - -## Operator impact -- No configuration changes are required; existing YAML and environment-based settings continue to function. -- Documentation examples referencing password/mTLS bootstrap flows remain accurate. The new registry logic simply ensures providers advertised in configuration are resolved deterministically and capability-gated before use. - -## Developer notes -- When adding new identity providers or tests, register plugins via `ServiceCollection` and call `new AuthorityIdentityProviderRegistry(serviceProvider, logger)`. -- For DPoP-required endpoints, populate `security.senderConstraints.dpop.nonce.requiredAudiences` or rely on defaults; both now funnel through the normalized set. +# 2025-10-20 — Authority Identity Provider Registry & DPoP nonce updates + +## Summary +- Authority host now resolves identity providers through the new metadata/handle pattern introduced in `StellaOps.Authority.Plugins.Abstractions`. Runtime handlers (`ValidateClientCredentialsHandler`, `ValidatePasswordGrantHandler`, `ValidateAccessTokenHandler`, bootstrap endpoints) acquire providers with `IAuthorityIdentityProviderRegistry.AcquireAsync` and rely on metadata (`AuthorityIdentityProviderMetadata`) for capability checks. +- Unit and integration tests build lightweight `ServiceProvider` instances with test plugins, matching production DI behaviour and ensuring the new registry contract is exercised. +- DPoP nonce enforcement now prefers `NormalizedAudiences` when populated and gracefully falls back to the configured `RequiredAudiences`, eliminating the runtime type mismatch that previously surfaced during test runs. + +## Operator impact +- No configuration changes are required; existing YAML and environment-based settings continue to function. +- Documentation examples referencing password/mTLS bootstrap flows remain accurate. The new registry logic simply ensures providers advertised in configuration are resolved deterministically and capability-gated before use. + +## Developer notes +- When adding new identity providers or tests, register plugins via `ServiceCollection` and call `new AuthorityIdentityProviderRegistry(serviceProvider, logger)`. +- For DPoP-required endpoints, populate `security.senderConstraints.dpop.nonce.requiredAudiences` or rely on defaults; both now funnel through the normalized set. diff --git a/docs/updates/2025-10-20-scanner-events.md b/docs/updates/2025-10-20-scanner-events.md index 8166e2e2..7ce7d735 100644 --- a/docs/updates/2025-10-20-scanner-events.md +++ b/docs/updates/2025-10-20-scanner-events.md @@ -2,4 +2,4 @@ - Scanner WebService now wires a reusable `IRedisConnectionFactory`, simplifying redis transport testing and reuse for future adapters. - `/api/v1/reports` integration test (`ReportsEndpointPublishesPlatformEvents`) asserts both report-ready and scan-completed envelopes carry DSSE payloads, scope metadata, and deterministic verdicts. -- Task `SCANNER-EVENTS-15-201` closed after verifying `dotnet test src/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj`. +- Task `SCANNER-EVENTS-15-201` closed after verifying `dotnet test src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj`. diff --git a/docs/updates/2025-10-22-docs-guild.md b/docs/updates/2025-10-22-docs-guild.md index 6ffadc98..177a7c3b 100644 --- a/docs/updates/2025-10-22-docs-guild.md +++ b/docs/updates/2025-10-22-docs-guild.md @@ -1,13 +1,13 @@ -# Docs Guild Update — 2025-10-22 - -**Subject:** Concelier Authority toggle rollout polish -**Audience:** Docs Guild, Concelier WebService Guild, Authority Core - -- Added a rollout phase table to `docs/10_CONCELIER_CLI_QUICKSTART.md`, clarifying how `authority.enabled` and `authority.allowAnonymousFallback` move from validation to enforced mode and highlighting the audit/metric signals to watch at each step. -- Extended the Authority integration checklist in the same quickstart so operators tie CLI smoke tests to audit counters before flipping enforcement. -- Refreshed `docs/ops/concelier-authority-audit-runbook.md` with the latest date stamp, prerequisites, and pre-check guidance that reference the quickstart timeline; keeps change-request templates aligned. -- Documented the new Go analyzer artefacts in `docs/24_OFFLINE_KIT.md` (manifest excerpt + tarball smoke test) so Ops can confirm the plug-in ships in the 2025‑10‑22 bundle before promoting it to mirrors. - -Next steps: -- Concelier WebService owners to link this update in the next deployment bulletin once FEEDWEB-DOCS-01-001 clears review. -- Docs Guild to verify the Offline Kit doc bundle picks up the quickstart/runbook changes after the nightly build. +# Docs Guild Update — 2025-10-22 + +**Subject:** Concelier Authority toggle rollout polish +**Audience:** Docs Guild, Concelier WebService Guild, Authority Core + +- Added a rollout phase table to `docs/10_CONCELIER_CLI_QUICKSTART.md`, clarifying how `authority.enabled` and `authority.allowAnonymousFallback` move from validation to enforced mode and highlighting the audit/metric signals to watch at each step. +- Extended the Authority integration checklist in the same quickstart so operators tie CLI smoke tests to audit counters before flipping enforcement. +- Refreshed `docs/ops/concelier-authority-audit-runbook.md` with the latest date stamp, prerequisites, and pre-check guidance that reference the quickstart timeline; keeps change-request templates aligned. +- Documented the new Go analyzer artefacts in `docs/24_OFFLINE_KIT.md` (manifest excerpt + tarball smoke test) so Ops can confirm the plug-in ships in the 2025‑10‑22 bundle before promoting it to mirrors. + +Next steps: +- Concelier WebService owners to link this update in the next deployment bulletin once FEEDWEB-DOCS-01-001 clears review. +- Docs Guild to verify the Offline Kit doc bundle picks up the quickstart/runbook changes after the nightly build. diff --git a/docs/updates/2025-10-26-authority-graph-scopes.md b/docs/updates/2025-10-26-authority-graph-scopes.md index a3fe752e..0bd8a0fa 100644 --- a/docs/updates/2025-10-26-authority-graph-scopes.md +++ b/docs/updates/2025-10-26-authority-graph-scopes.md @@ -1,15 +1,15 @@ -# 2025-10-26 — Authority graph scopes documentation refresh - -## Summary - -- Documented least-privilege guidance for the new `graph:*` scopes in `docs/11_AUTHORITY.md` (scope mapping, tenant propagation, and DPoP expectations). -- Extended the sample client table/config to include Cartographer and Graph API registrations so downstream teams can copy/paste the correct defaults. -- Highlighted the requirement to consume `StellaOpsScopes` constants instead of hard-coded scope strings across services. - -## Next steps - -| Team | Follow-up | Target | -|------|-----------|--------| -| Authority Core | Ensure `/jwks` changelog references graph scope rollout in next release note. | 2025-10-28 | -| Graph API Guild | Update gateway scaffolding to request scopes from `StellaOpsScopes` once the host project lands. | Sprint 21 stand-up | -| Scheduler Guild | Confirm Cartographer client onboarding uses the new sample secret templates. | Sprint 21 stand-up | +# 2025-10-26 — Authority graph scopes documentation refresh + +## Summary + +- Documented least-privilege guidance for the new `graph:*` scopes in `docs/11_AUTHORITY.md` (scope mapping, tenant propagation, and DPoP expectations). +- Extended the sample client table/config to include Cartographer and Graph API registrations so downstream teams can copy/paste the correct defaults. +- Highlighted the requirement to consume `StellaOpsScopes` constants instead of hard-coded scope strings across services. + +## Next steps + +| Team | Follow-up | Target | +|------|-----------|--------| +| Authority Core | Ensure `/jwks` changelog references graph scope rollout in next release note. | 2025-10-28 | +| Graph API Guild | Update gateway scaffolding to request scopes from `StellaOpsScopes` once the host project lands. | Sprint 21 stand-up | +| Scheduler Guild | Confirm Cartographer client onboarding uses the new sample secret templates. | Sprint 21 stand-up | diff --git a/docs/updates/2025-10-26-scheduler-graph-jobs.md b/docs/updates/2025-10-26-scheduler-graph-jobs.md index 701955c0..c88e64ff 100644 --- a/docs/updates/2025-10-26-scheduler-graph-jobs.md +++ b/docs/updates/2025-10-26-scheduler-graph-jobs.md @@ -6,10 +6,10 @@ SCHED-MODELS-21-001 delivered the new `GraphBuildJob`/`GraphOverlayJob` contract Key links: -- Schema doc: `src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md` +- Schema doc: `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md` - Samples (round-trip tested): `samples/api/scheduler/graph-build-job.json`, `samples/api/scheduler/graph-overlay-job.json` - Event schema + sample: `docs/events/scheduler.graph.job.completed@1.json`, `docs/events/samples/scheduler.graph.job.completed@1.sample.json` -- API doc: `src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-21-001-GRAPH-APIS.md` +- API doc: `src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-21-001-GRAPH-APIS.md` - Tests: `StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs`, `GraphJobStateMachineTests.cs` ## Action items @@ -28,7 +28,7 @@ Key links: > Suggested message for Slack `#scheduler-guild` & `#cartographer-guild`: > > ``` -> Graph job DTOs/docs are live (SCHED-MODELS-21-001/002). Samples under samples/api/scheduler, schema notes in src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md. Please review before wiring SCHED-WEB-21-001/201. GraphJobStateMachine enforces status/attempt invariants—shout if you need additional states. +> Graph job DTOs/docs are live (SCHED-MODELS-21-001/002). Samples under samples/api/scheduler, schema notes in src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md. Please review before wiring SCHED-WEB-21-001/201. GraphJobStateMachine enforces status/attempt invariants—shout if you need additional states. > ``` Record notifications here once posted. diff --git a/docs/updates/2025-10-27-console-security-signoff.md b/docs/updates/2025-10-27-console-security-signoff.md index af2c6cac..6c684a1f 100644 --- a/docs/updates/2025-10-27-console-security-signoff.md +++ b/docs/updates/2025-10-27-console-security-signoff.md @@ -1,48 +1,48 @@ -# Console Security Checklist Sign-off — 2025-10-27 - -## Summary - -- Security Guild completed the console security compliance checklist from [`docs/security/console-security.md`](../security/console-security.md) against the Sprint 23 build. -- No blocking findings. One observability note (raise Grafana burn-rate alert to SLO board) was addressed during the run; no follow-up tickets required. -- Result: **PASS** – console may progress with Sprint 23 release gating. - -## Authority client validation - -- Ran `stella authority clients show console-ui` in staging; confirmed `pkce.enforced=true`, `dpop.required=true`, and `claim.requireTenant=true`. -- Verified scope bundle matches §3 (baseline `ui.read`, admin set, and per-feature scopes). Results archived under `ops/evidence/console-ui-client-2025-10-27.json`. - -## CSP enforcement - -- Inspected rendered response headers via `curl -I https://console.stg.stellaops.local/` – CSP matches §4 defaults (`default-src 'self'`, `connect-src 'self' https://*.internal`), HSTS + Referrer-Policy present. -- Helm overrides reviewed (`deploy/helm/stellaops/values-prod.yaml`); no extra origins declared. - -## Fresh-auth timer - -- Executed Playwright admin flow: promoted policy revisions twice; observed fresh-auth modal after 5 minutes idle. -- Authority audit feed shows `authority.fresh_auth.success` and `authority.policy.promote` entries sharing correlation IDs. - -## DPoP binding test - -- Replayed captured bearer token without DPoP proof; Gateway returned `401` and incremented `ui_dpop_failure_total`. -- Confirmed logs contain `ui.security.anomaly` event with matching `traceId`. - -## Offline mode exercise - -- Deployed console with `console.offlineMode=true`; Offline banner rendered, SSE disabled, CLI guidance surfaced on runs/downloads pages. -- Imported Offline Kit manifest; parity checks report `OK` status. - -## Evidence parity - -- Downloaded run evidence bundle via UI, re-exported via CLI `stella runs export --run `; SHA-256 digests match. -- Verified Downloads workspace never caches bundle contents (only manifest metadata stored). - -## Monitoring & alerts - -- Grafana board `console-security.json` linked to alerts: `ui_request_duration_seconds` burn-rate, DPoP failure count, downloads manifest verification failures. -- PagerDuty playbook references `docs/security/console-security.md` §6 for incident steps. - -## Sign-off - -- Reviewed by **Security Guild** (lead: `@sec-lfox`). -- Sign-off recorded in Sprint 23 tracker (`SPRINTS.md`, `DOCS-CONSOLE-23-018`). - +# Console Security Checklist Sign-off — 2025-10-27 + +## Summary + +- Security Guild completed the console security compliance checklist from [`docs/security/console-security.md`](../security/console-security.md) against the Sprint 23 build. +- No blocking findings. One observability note (raise Grafana burn-rate alert to SLO board) was addressed during the run; no follow-up tickets required. +- Result: **PASS** – console may progress with Sprint 23 release gating. + +## Authority client validation + +- Ran `stella authority clients show console-ui` in staging; confirmed `pkce.enforced=true`, `dpop.required=true`, and `claim.requireTenant=true`. +- Verified scope bundle matches §3 (baseline `ui.read`, admin set, and per-feature scopes). Results archived under `ops/evidence/console-ui-client-2025-10-27.json`. + +## CSP enforcement + +- Inspected rendered response headers via `curl -I https://console.stg.stellaops.local/` – CSP matches §4 defaults (`default-src 'self'`, `connect-src 'self' https://*.internal`), HSTS + Referrer-Policy present. +- Helm overrides reviewed (`deploy/helm/stellaops/values-prod.yaml`); no extra origins declared. + +## Fresh-auth timer + +- Executed Playwright admin flow: promoted policy revisions twice; observed fresh-auth modal after 5 minutes idle. +- Authority audit feed shows `authority.fresh_auth.success` and `authority.policy.promote` entries sharing correlation IDs. + +## DPoP binding test + +- Replayed captured bearer token without DPoP proof; Gateway returned `401` and incremented `ui_dpop_failure_total`. +- Confirmed logs contain `ui.security.anomaly` event with matching `traceId`. + +## Offline mode exercise + +- Deployed console with `console.offlineMode=true`; Offline banner rendered, SSE disabled, CLI guidance surfaced on runs/downloads pages. +- Imported Offline Kit manifest; parity checks report `OK` status. + +## Evidence parity + +- Downloaded run evidence bundle via UI, re-exported via CLI `stella runs export --run `; SHA-256 digests match. +- Verified Downloads workspace never caches bundle contents (only manifest metadata stored). + +## Monitoring & alerts + +- Grafana board `console-security.json` linked to alerts: `ui_request_duration_seconds` burn-rate, DPoP failure count, downloads manifest verification failures. +- PagerDuty playbook references `docs/security/console-security.md` §6 for incident steps. + +## Sign-off + +- Reviewed by **Security Guild** (lead: `@sec-lfox`). +- Sign-off recorded in Sprint 23 tracker (`../implplan/SPRINTS.md`, `DOCS-CONSOLE-23-018`). + diff --git a/docs/updates/2025-10-27-orch-operator-scope.md b/docs/updates/2025-10-27-orch-operator-scope.md index 7a71ee7a..64134c86 100644 --- a/docs/updates/2025-10-27-orch-operator-scope.md +++ b/docs/updates/2025-10-27-orch-operator-scope.md @@ -1,15 +1,15 @@ -# 2025-10-27 — Orchestrator operator scope & audit metadata - -## Summary - -- Introduced the `orch:operate` scope and `Orch.Operator` role in Authority to unlock Orchestrator control actions while keeping read-only access under `Orch.Viewer`. -- Authority now enforces `operator_reason` and `operator_ticket` parameters on `/token` requests that include `orch:operate`; missing values yield `invalid_request` and no token is issued. -- Client credentials audit events capture both fields (`request.reason`, `request.ticket`), giving SecOps traceability for every control action. - -## Next steps - -| Team | Follow-up | Target | -|------|-----------|--------| -| Console Guild | Wire UI control panels to request `operator_reason`/`operator_ticket` when exchanging tokens for orchestrator actions. | Sprint 23 stand-up | -| CLI Guild | Add flags to `stella orch` subcommands to pass reason/ticket metadata before enabling mutations. | Sprint 23 stand-up | -| Orchestrator Service | Enforce presence of `X-Stella-Reason`/`X-Stella-Ticket` (or equivalent metadata) on mutate endpoints and align audit logging. | ORCH-SVC-33-001 implementation | +# 2025-10-27 — Orchestrator operator scope & audit metadata + +## Summary + +- Introduced the `orch:operate` scope and `Orch.Operator` role in Authority to unlock Orchestrator control actions while keeping read-only access under `Orch.Viewer`. +- Authority now enforces `operator_reason` and `operator_ticket` parameters on `/token` requests that include `orch:operate`; missing values yield `invalid_request` and no token is issued. +- Client credentials audit events capture both fields (`request.reason`, `request.ticket`), giving SecOps traceability for every control action. + +## Next steps + +| Team | Follow-up | Target | +|------|-----------|--------| +| Console Guild | Wire UI control panels to request `operator_reason`/`operator_ticket` when exchanging tokens for orchestrator actions. | Sprint 23 stand-up | +| CLI Guild | Add flags to `stella orch` subcommands to pass reason/ticket metadata before enabling mutations. | Sprint 23 stand-up | +| Orchestrator Service | Enforce presence of `X-Stella-Reason`/`X-Stella-Ticket` (or equivalent metadata) on mutate endpoints and align audit logging. | ORCH-SVC-33-001 implementation | diff --git a/docs/updates/2025-10-27-policy-scope-migration.md b/docs/updates/2025-10-27-policy-scope-migration.md index 72d68a48..df90e1ad 100644 --- a/docs/updates/2025-10-27-policy-scope-migration.md +++ b/docs/updates/2025-10-27-policy-scope-migration.md @@ -1,15 +1,15 @@ -# 2025-10-27 — Policy scope migration guidance - -## Summary - -- Updated Authority defaults (`etc/authority.yaml`) to register a `policy-cli` client using the fine-grained scope set introduced by AUTH-POLICY-23-001 (`policy:read`, `policy:author`, `policy:review`, `policy:simulate`, `findings:read`). -- Added release/CI documentation call-outs instructing operators to reissue tokens that previously relied on `policy:write`/`policy:submit`/`policy:run` scopes. -- Introduced a repo verification script so future config changes fail CI when policy clients regress to the legacy scope bundles. - -## Next steps - -| Team | Follow-up | Target | -|------|-----------|--------| -| Authority Core | Rotate long-lived policy CLI tokens in staging to confirm new scope set before freezing release 2025.10. | 2025-10-29 | -| DevOps Guild | Update automation secrets (CI/CD, offline kit) to point at the regenerated `policy-cli` credentials. | Sprint 23 stand-up | -| Docs Guild | Fold the broader scope matrix refresh into AUTH-POLICY-23-003 once the dual-approval workflow lands. | Blocked on AUTH-POLICY-23-002 | +# 2025-10-27 — Policy scope migration guidance + +## Summary + +- Updated Authority defaults (`etc/authority.yaml`) to register a `policy-cli` client using the fine-grained scope set introduced by AUTH-POLICY-23-001 (`policy:read`, `policy:author`, `policy:review`, `policy:simulate`, `findings:read`). +- Added release/CI documentation call-outs instructing operators to reissue tokens that previously relied on `policy:write`/`policy:submit`/`policy:run` scopes. +- Introduced a repo verification script so future config changes fail CI when policy clients regress to the legacy scope bundles. + +## Next steps + +| Team | Follow-up | Target | +|------|-----------|--------| +| Authority Core | Rotate long-lived policy CLI tokens in staging to confirm new scope set before freezing release 2025.10. | 2025-10-29 | +| DevOps Guild | Update automation secrets (CI/CD, offline kit) to point at the regenerated `policy-cli` credentials. | Sprint 23 stand-up | +| Docs Guild | Fold the broader scope matrix refresh into AUTH-POLICY-23-003 once the dual-approval workflow lands. | Blocked on AUTH-POLICY-23-002 | diff --git a/docs/updates/2025-10-27-task-packs-docs.md b/docs/updates/2025-10-27-task-packs-docs.md index 446cb80b..22d6b275 100644 --- a/docs/updates/2025-10-27-task-packs-docs.md +++ b/docs/updates/2025-10-27-task-packs-docs.md @@ -1,15 +1,15 @@ -# Docs Guild Update — Task Pack Docs (2025-10-27) - -- Added Task Pack core documentation set: - - `/docs/task-packs/spec.md` - - `/docs/task-packs/authoring-guide.md` - - `/docs/task-packs/registry.md` - - `/docs/task-packs/runbook.md` - - `/docs/security/pack-signing-and-rbac.md` - - `/docs/operations/cli-release-and-packaging.md` -- Each doc includes imposed-rule reminder, compliance checklist, and cross-links to Task Runner, Packs Registry, CLI release tasks. -- Created asset staging instructions at `docs/assets/ui/tours/README.md` (shared with CLI enablement). -- Circulated spec + authoring guide links to Task Runner, Packs Registry, Authority, and DevOps guild channels for technical review (2025-10-27). Target follow-up review once CLI parity tasks (`CLI-PACKS-42-001`, `CLI-PACKS-43-001`) land; tentative sync held for 2025-11-03 (Docs Guild to confirm). -- Sprint tracker `DOCS-PACKS-43-001` marked DOING→DONE; follow-up reviews scheduled with Task Runner and Security guilds. - -Artifacts: [Spec](../task-packs/spec.md), [Authoring guide](../task-packs/authoring-guide.md), [Registry](../task-packs/registry.md), [Runbook](../task-packs/runbook.md), [Signing/RBAC](../security/pack-signing-and-rbac.md), [CLI release runbook](../operations/cli-release-and-packaging.md). +# Docs Guild Update — Task Pack Docs (2025-10-27) + +- Added Task Pack core documentation set: + - `/docs/task-packs/spec.md` + - `/docs/task-packs/authoring-guide.md` + - `/docs/task-packs/registry.md` + - `/docs/task-packs/runbook.md` + - `/docs/security/pack-signing-and-rbac.md` + - `/docs/operations/cli-release-and-packaging.md` +- Each doc includes imposed-rule reminder, compliance checklist, and cross-links to Task Runner, Packs Registry, CLI release tasks. +- Created asset staging instructions at `docs/assets/ui/tours/README.md` (shared with CLI enablement). +- Circulated spec + authoring guide links to Task Runner, Packs Registry, Authority, and DevOps guild channels for technical review (2025-10-27). Target follow-up review once CLI parity tasks (`CLI-PACKS-42-001`, `CLI-PACKS-43-001`) land; tentative sync held for 2025-11-03 (Docs Guild to confirm). +- Sprint tracker `DOCS-PACKS-43-001` marked DOING→DONE; follow-up reviews scheduled with Task Runner and Security guilds. + +Artifacts: [Spec](../task-packs/spec.md), [Authoring guide](../task-packs/authoring-guide.md), [Registry](../task-packs/registry.md), [Runbook](../task-packs/runbook.md), [Signing/RBAC](../security/pack-signing-and-rbac.md), [CLI release runbook](../operations/cli-release-and-packaging.md). diff --git a/docs/updates/2025-10-28-docs-guild.md b/docs/updates/2025-10-28-docs-guild.md index 0540c95d..cf4733ed 100644 --- a/docs/updates/2025-10-28-docs-guild.md +++ b/docs/updates/2025-10-28-docs-guild.md @@ -1,26 +1,26 @@ -# Docs Guild Update — 2025-10-28 - -## Console security posture draft - -- Published `docs/security/console-security.md` covering console OIDC/DPoP flow, scope map, fresh-auth sequence, CSP defaults, evidence handling, and monitoring checklist. -- Authority owners (`AUTH-CONSOLE-23-003`) to verify `/fresh-auth` token semantics (120 s OpTok, 300 s fresh-auth window) and confirm scope bundles before closing the sprint task. -- Security Guild requested to execute the compliance checklist in §9 and record sign-off in SPRINT 23 log once alerts/dashboards are wired (metrics references: `ui_request_duration_seconds`, `ui_dpop_failure_total`, Grafana board `console-security.json`). - -## Console CLI parity matrix - -- Added `/docs/cli-vs-ui-parity.md` with feature-level status tracking (✅/🟡/🟩). Pending commands reference CLI backlog (`CLI-EXPORT-35-001`, `CLI-POLICY-23-005`, `CONSOLE-DOC-23-502`). -- DevEx/CLI Guild to wire parity CI workflow when CLI downloads commands ship; Downloads workspace already links to the forthcoming parity report slot. - -## Accessibility refresh - -- Published `/docs/accessibility.md` describing keyboard flows, screen-reader behaviour, colour tokens, testing rig (Storybook axe, Playwright a11y), and offline guidance. -- Accessibility Guild (CONSOLE-QA-23-402) to log the next Playwright a11y sweep results against the new checklist; design tokens follow-up tracked via CONSOLE-FEAT-23-102. - -Artifacts: - -- Doc: `/docs/security/console-security.md` -- Doc: `/docs/cli-vs-ui-parity.md` -- Doc: `/docs/accessibility.md` -- Sprint tracker: `SPRINTS.md` (DOCS-CONSOLE-23-012 now DONE) - -cc: `@authority-core`, `@security-guild`, `@docs-guild` +# Docs Guild Update — 2025-10-28 + +## Console security posture draft + +- Published `docs/security/console-security.md` covering console OIDC/DPoP flow, scope map, fresh-auth sequence, CSP defaults, evidence handling, and monitoring checklist. +- Authority owners (`AUTH-CONSOLE-23-003`) to verify `/fresh-auth` token semantics (120 s OpTok, 300 s fresh-auth window) and confirm scope bundles before closing the sprint task. +- Security Guild requested to execute the compliance checklist in §9 and record sign-off in SPRINT 23 log once alerts/dashboards are wired (metrics references: `ui_request_duration_seconds`, `ui_dpop_failure_total`, Grafana board `console-security.json`). + +## Console CLI parity matrix + +- Added `/docs/cli-vs-ui-parity.md` with feature-level status tracking (✅/🟡/🟩). Pending commands reference CLI backlog (`CLI-EXPORT-35-001`, `CLI-POLICY-23-005`, `CONSOLE-DOC-23-502`). +- DevEx/CLI Guild to wire parity CI workflow when CLI downloads commands ship; Downloads workspace already links to the forthcoming parity report slot. + +## Accessibility refresh + +- Published `/docs/accessibility.md` describing keyboard flows, screen-reader behaviour, colour tokens, testing rig (Storybook axe, Playwright a11y), and offline guidance. +- Accessibility Guild (CONSOLE-QA-23-402) to log the next Playwright a11y sweep results against the new checklist; design tokens follow-up tracked via CONSOLE-FEAT-23-102. + +Artifacts: + +- Doc: `/docs/security/console-security.md` +- Doc: `/docs/cli-vs-ui-parity.md` +- Doc: `/docs/accessibility.md` +- Sprint tracker: `../implplan/SPRINTS.md` (DOCS-CONSOLE-23-012 now DONE) + +cc: `@authority-core`, `@security-guild`, `@docs-guild` diff --git a/docs/updates/2025-10-29-export-center-provenance.md b/docs/updates/2025-10-29-export-center-provenance.md index abe713dd..35d63c46 100644 --- a/docs/updates/2025-10-29-export-center-provenance.md +++ b/docs/updates/2025-10-29-export-center-provenance.md @@ -1,9 +1,9 @@ -# 2025-10-29 – Export Center provenance/signing doc - -## Summary -- Authored `docs/export-center/provenance-and-signing.md`, covering manifest/provenance artefacts, cosign/SLSA signing pipeline, verification workflows (CLI/CI/offline), and compliance checklist. -- Cross-linked the new guide from the docs index (`docs/README.md`) and referenced outstanding CLI automation (`CLI-EXPORT-37-001`) to keep verification guidance aligned with upcoming tooling. - -## Follow-ups -- [ ] Revisit once `CLI-EXPORT-37-001` lands to confirm command names/flags and update the verification section if necessary. -- [ ] Sync with DevOps (`DEVOPS-EXPORT-37-001`) after dashboards/alerts ship to embed direct links in the failure handling section. +# 2025-10-29 – Export Center provenance/signing doc + +## Summary +- Authored `docs/export-center/provenance-and-signing.md`, covering manifest/provenance artefacts, cosign/SLSA signing pipeline, verification workflows (CLI/CI/offline), and compliance checklist. +- Cross-linked the new guide from the docs index (`docs/README.md`) and referenced outstanding CLI automation (`CLI-EXPORT-37-001`) to keep verification guidance aligned with upcoming tooling. + +## Follow-ups +- [ ] Revisit once `CLI-EXPORT-37-001` lands to confirm command names/flags and update the verification section if necessary. +- [ ] Sync with DevOps (`DEVOPS-EXPORT-37-001`) after dashboards/alerts ship to embed direct links in the failure handling section. diff --git a/docs/updates/2025-10-29-notify-docs.md b/docs/updates/2025-10-29-notify-docs.md index 962d0c1a..3d586cd2 100644 --- a/docs/updates/2025-10-29-notify-docs.md +++ b/docs/updates/2025-10-29-notify-docs.md @@ -1,10 +1,10 @@ -# 2025-10-29 – Notifications Studio docs sync prep - -## Summary -- Published Notifications Studio overview (`notifications/overview.md`) and architecture dossier (`notifications/architecture.md`), complementing the rules/templates/digests deep dives landed earlier in Sprint 39. -- Captured action items to validate connector metadata, quiet-hours semantics, and simulation endpoints once `NOTIFY-SVC-39-001..004` merge. -- Alerted Notifications Service Guild that documentation handoff is pending those feature drops; ready to iterate as soon as the implementation surfaces schemas. - -## Follow-ups -- [ ] Review merged notifier correlation/quiet-hours work (`NOTIFY-SVC-39-001..004`) and refresh overview + architecture docs with any new persistence/API details. -- [ ] Coordinate with DevOps dashboards work (`DEVOPS-NOTIFY-39-002`) to document alert references once metrics names are finalised. +# 2025-10-29 – Notifications Studio docs sync prep + +## Summary +- Published Notifications Studio overview (`notifications/overview.md`) and architecture dossier (`notifications/architecture.md`), complementing the rules/templates/digests deep dives landed earlier in Sprint 39. +- Captured action items to validate connector metadata, quiet-hours semantics, and simulation endpoints once `NOTIFY-SVC-39-001..004` merge. +- Alerted Notifications Service Guild that documentation handoff is pending those feature drops; ready to iterate as soon as the implementation surfaces schemas. + +## Follow-ups +- [ ] Review merged notifier correlation/quiet-hours work (`NOTIFY-SVC-39-001..004`) and refresh overview + architecture docs with any new persistence/API details. +- [ ] Coordinate with DevOps dashboards work (`DEVOPS-NOTIFY-39-002`) to document alert references once metrics names are finalised. diff --git a/docs/updates/2025-10-29-scheduler-policy-doc-refresh.md b/docs/updates/2025-10-29-scheduler-policy-doc-refresh.md index 3bd793f7..b339f515 100644 --- a/docs/updates/2025-10-29-scheduler-policy-doc-refresh.md +++ b/docs/updates/2025-10-29-scheduler-policy-doc-refresh.md @@ -10,9 +10,9 @@ > **Message:** > ``` > Policy Engine run DTO docs just picked up a refresh (environment metadata, lifecycle+retry table, diff payload notes). -> • Doc: src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md +> • Doc: src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md > • Samples: samples/api/scheduler/policy-*.json -> • Tests: dotnet test src/StellaOps.Scheduler.Models.Tests +> • Tests: dotnet test src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests > Please review for orchestration + API consumer work; ping back if other fields need coverage. > ``` diff --git a/docs/updates/2025-10-31-console-security-refresh.md b/docs/updates/2025-10-31-console-security-refresh.md index c2e63c1a..7ad84a93 100644 --- a/docs/updates/2025-10-31-console-security-refresh.md +++ b/docs/updates/2025-10-31-console-security-refresh.md @@ -1,12 +1,12 @@ -# 2025-10-31 — Console Security Docs Refresh - -## Summary -- Documented the new Authority `/console` endpoints (`/tenants`, `/profile`, `/token/introspect`) including tenant header enforcement, DPoP requirements, and five-minute fresh-auth behaviour. -- Reduced the default Authority access-token lifetime to 120 seconds to match OpTok guidance and updated tests accordingly. -- Updated Console security guidance to cover the newly issued `orch:read` scope and clarified session inactivity expectations. -- Annotated `authority.yaml.sample` and the Authority ops runbook so operators forward `X-Stella-Tenant` and understand fresh-auth prompts. - -## Impact -- Console release notes now reference the dedicated `/console` endpoints and their audit identifiers. -- Security Guild can rely on the updated compliance checklist when executing Sprint 23 sign-off. +# 2025-10-31 — Console Security Docs Refresh + +## Summary +- Documented the new Authority `/console` endpoints (`/tenants`, `/profile`, `/token/introspect`) including tenant header enforcement, DPoP requirements, and five-minute fresh-auth behaviour. +- Reduced the default Authority access-token lifetime to 120 seconds to match OpTok guidance and updated tests accordingly. +- Updated Console security guidance to cover the newly issued `orch:read` scope and clarified session inactivity expectations. +- Annotated `authority.yaml.sample` and the Authority ops runbook so operators forward `X-Stella-Tenant` and understand fresh-auth prompts. + +## Impact +- Console release notes now reference the dedicated `/console` endpoints and their audit identifiers. +- Security Guild can rely on the updated compliance checklist when executing Sprint 23 sign-off. - Deployment teams have explicit configuration reminders for tenants and orchestrator dashboard access. \ No newline at end of file diff --git a/docs/vex/aggregation.md b/docs/vex/aggregation.md index e1952779..a2acd260 100644 --- a/docs/vex/aggregation.md +++ b/docs/vex/aggregation.md @@ -1,229 +1,229 @@ -# VEX Observations & Linksets - -> Imposed rule: Work of this type or tasks of this type on this component must -> also be applied everywhere else it should be applied. - -Link-Not-Merge brings the same immutable observation model to Excititor that -Concelier now uses for advisories. VEX statements are stored as append-only -observations; linksets correlate them, capture conflicts, and keep provenance so -Policy Engine and UI surfaces can explain decisions without collapsing sources. - ---- - -## 1. Model overview - -### 1.1 Observation lifecycle - -1. **Ingest** – Connectors fetch OpenVEX, CSAF VEX, CycloneDX VEX, or VEX - attestations, validate signatures, and strip any derived consensus data - forbidden by the Aggregation-Only Contract (AOC). -2. **Persist** – Excititor writes immutable `vex_observations` keyed by tenant, - provider, upstream identifier, and `contentHash`. Supersedes chains record - revisions; the original payload is never mutated. -3. **Expose** – WebService will surface paginated observation APIs and Offline - Kit snapshots mirror the same data for air-gapped sites. - -Observation schema sketch (final shape lands with `EXCITITOR-LNM-21-001`): - -```text -observationId = {tenant}:{providerId}:{upstreamId}:{revision} -tenant, providerId, streamId -upstream{ upstreamId, documentVersion, fetchedAt, receivedAt, - contentHash, signature{present, format?, keyId?, signature?} } -content{ format, specVersion, raw } -statements[ - { vulnerabilityId, productKey, status, justification?, - introducedVersion?, fixedVersion?, locator } -] -linkset{ purls[], cpes[], aliases[], references[], - reconciledFrom[], conflicts[]? } -attributes{ batchId?, replayCursor? } -createdAt -``` - -- **Raw payload** (`content.raw`) remains lossless (Relaxed Extended JSON). -- **Statements** provide normalized tuples for each claim contained in the - document, including justification and version hints. -- **Linkset** mirrors identifiers extracted during ingestion, retaining JSON - pointer metadata so audits can trace back to the source fragment. - -### 1.2 Linkset lifecycle - -Linksets correlate claims referring to the same `(vulnerabilityId, productKey)` -pair across providers. - -1. **Seed** – Observations push normalized identifiers (CVE, GHSA, vendor IDs) - plus canonical product keys (purl preferred, cpe fallback). Platform-scoped - statements remain marked `non_joinable`. -2. **Correlate** – The linkset builder groups statements by tenant and identity, - combines alias graphs from Concelier, and uses justification/product overlap - to assign correlation confidence. -3. **Annotate** – Conflicts (status disagreement, justification mismatch, range - inconsistencies) are recorded as structured entries. -4. **Persist** – Results land in `vex_linksets` with deterministic IDs (hash of - sorted `(vulnerabilityId, productKey, observationIds)`) and append-only - history for replay/debugging. - -Linksets never override statements or invent consensus; they simply align -evidence for Policy Engine and consumers. - ---- - -## 2. Observation vs. linkset - -- **Purpose** - - Observation: Immutable record of a single upstream VEX document. - - Linkset: Correlated evidence spanning observations that describe the same - product-vulnerability pair. -- **Mutation** - - Observation: Append-only via supersedes. - - Linkset: Regenerated deterministically by correlation jobs. -- **Allowed fields** - - Observation: Raw payload, provenance, normalized statement tuples, join - hints. - - Linkset: Observation references, statement IDs, confidence metrics, conflict - annotations. -- **Forbidden fields** - - Observation: Derived consensus, suppression flags, risk scores. - - Linkset: Derived severity or policy decisions (only evidence + conflicts). -- **Consumers** - - Observation: Evidence exports, Offline Kit mirrors, CLI raw dumps. - - Linkset: Policy Engine VEX overlay, Console evidence panes, Vuln Explorer. - -### 2.1 Example sequence - -1. Canonical vendor issues an attested OpenVEX declaring `CVE-2025-2222` as - `not_affected` for `pkg:rpm/redhat/openssl@1.1.1w-12`. Excititor inserts a - new observation referencing that statement. -2. Upstream CycloneDX VEX from a distro reports the same product as `affected` - with `under_investigation` justification. -3. Linkset builder groups both statements by alias overlap and product key, - setting confidence `high` because CVE and purl match. -4. Conflict annotation records `status-mismatch` and retains both justifications; - Policy Engine uses this to explain why suppression cannot proceed without - policy override. - ---- - -## 3. Conflict handling - -Structured conflicts capture disagreements without mutating source statements. - -```json -{ - "type": "status-mismatch", - "vulnerabilityId": "CVE-2025-2222", - "productKey": "pkg:rpm/redhat/openssl@1.1.1w-12", - "statements": [ - { - "observationId": "tenant:redhat:openvex:3", - "providerId": "redhat", - "status": "not_affected", - "justification": "component_not_present" - }, - { - "observationId": "tenant:ubuntu:cyclonedx:12", - "providerId": "ubuntu", - "status": "affected", - "justification": "under_investigation" - } - ], - "confidence": "medium", - "detectedAt": "2025-10-27T14:30:00Z" -} -``` - -Conflict classes (tracked via `EXCITITOR-LNM-21-003`): - -- `status-mismatch` – Different statuses for the same pair (affected vs - not_affected vs fixed vs under_investigation). -- `justification-divergence` – Same status but incompatible justifications or - missing justification where policy requires it. -- `version-range-clash` – Introduced/fixed ranges contradict each other. -- `non-joinable-overlap` – Platform-scoped statements collide with package - statements; flagged as warning but retained. -- `metadata-gap` – Missing provenance/signature field on specific statements. - -Conflicts surface through: - -- `/vex/linksets/{id}` APIs (`conflicts[]` payload). -- Console evidence panels (badges + drawer detail). -- CLI exports (`stella vex linkset …` planned in `CLI-LNM-22-002`). -- Metrics dashboards (`vex_linkset_conflicts_total{type}`). - ---- - -## 4. AOC alignment - -- **Raw-first** – `content.raw` and `statements[]` mirror upstream input; no - derived consensus or suppression values are written by ingestion. -- **No merges** – Each upstream statement persists independently; linksets refer - back via `observationId`. -- **Provenance mandatory** – Missing signature or source metadata yields - `ERR_AOC_004`; ingestion blocks until connectors fix the feed. -- **Idempotent writes** – Duplicate `(providerId, upstreamId, contentHash)` - results in a no-op; revisions append with a `supersedes` pointer. -- **Deterministic output** – Correlator sorts identifiers, normalizes timestamps - (UTC ISO-8601), and hashes canonical JSON to generate stable linkset IDs. -- **Scope-aware** – Tenant claims enforced on write/read; Authority scopes - `vex:ingest` / `vex:read` are required (see `AUTH-AOC-22-001`). - -Violations raise `ERR_AOC_00x`, emit `aoc_violation_total`, and prevent the data -from landing downstream. - ---- - -## 5. Downstream consumption - -- **Policy Engine** – Evaluates VEX evidence alongside advisory linksets to gate - suppression, severity downgrades, or explainability. -- **Console UI** – Evidence panel renders VEX statements grouped by provider and - highlights conflicts or missing signatures. -- **CLI** – Planned commands export observations/linksets for offline analysis - (`CLI-LNM-22-002`). -- **Offline Kit** – Bundled snapshots keep VEX data aligned with advisory - observations for air-gapped parity. -- **Observability** – Dashboards track ingestion latency, conflict counts, and - supersedes depth per provider. - -New consumers must treat both collections as read-only and preserve deterministic -ordering when caching. - ---- - -## 6. Validation & testing - -- **Unit tests** (`StellaOps.Excititor.Core.Tests`) to cover schema guards, - deterministic linkset hashing, conflict classification, and supersedes - behaviour. -- **Mongo integration tests** (`StellaOps.Excititor.Storage.Mongo.Tests`) to - verify indexes, shard keys, and idempotent writes across tenants. -- **CLI smoke suites** (`stella vex observations`, `stella vex linksets`) for - JSON determinism and exit code coverage. -- **Replay determinism** – Feed identical upstream payloads twice and ensure - observation/linkset hashes match across runs. -- **Offline kit verification** – Validate VEX exports packaged in Offline Kit - snapshots against live service outputs. -- **Fixture refresh** – Samples (`SAMPLES-LNM-22-002`) must include multi-source - conflicts and justification variants used by docs and UI tests. - ---- - -## 7. Reviewer checklist - -- Observation schema aligns with `EXCITITOR-LNM-21-001` once the schema lands; - update references as soon as the final contract is published. -- Linkset lifecycle covers correlation signals (alias graphs, product keys, - justification rules) and deterministic ID strategy. -- Conflict classes include status, justification, version range, platform overlap - scenarios. -- AOC guardrails called out with relevant error codes and Authority scopes. -- Downstream consumer list matches active APIs/CLI features (update when - `CLI-LNM-22-002` and WebService endpoints ship). -- Validation section references Core, Storage, CLI, and Offline test suites plus - fixture requirements. -- Imposed rule reminder retained at top. - -Dependencies outstanding (2025-10-27): `EXCITITOR-LNM-21-001..005` and -`EXCITITOR-LNM-21-101..102` are still TODO; revisit this document once schemas, -APIs, and fixtures are implemented. +# VEX Observations & Linksets + +> Imposed rule: Work of this type or tasks of this type on this component must +> also be applied everywhere else it should be applied. + +Link-Not-Merge brings the same immutable observation model to Excititor that +Concelier now uses for advisories. VEX statements are stored as append-only +observations; linksets correlate them, capture conflicts, and keep provenance so +Policy Engine and UI surfaces can explain decisions without collapsing sources. + +--- + +## 1. Model overview + +### 1.1 Observation lifecycle + +1. **Ingest** – Connectors fetch OpenVEX, CSAF VEX, CycloneDX VEX, or VEX + attestations, validate signatures, and strip any derived consensus data + forbidden by the Aggregation-Only Contract (AOC). +2. **Persist** – Excititor writes immutable `vex_observations` keyed by tenant, + provider, upstream identifier, and `contentHash`. Supersedes chains record + revisions; the original payload is never mutated. +3. **Expose** – WebService will surface paginated observation APIs and Offline + Kit snapshots mirror the same data for air-gapped sites. + +Observation schema sketch (final shape lands with `EXCITITOR-LNM-21-001`): + +```text +observationId = {tenant}:{providerId}:{upstreamId}:{revision} +tenant, providerId, streamId +upstream{ upstreamId, documentVersion, fetchedAt, receivedAt, + contentHash, signature{present, format?, keyId?, signature?} } +content{ format, specVersion, raw } +statements[ + { vulnerabilityId, productKey, status, justification?, + introducedVersion?, fixedVersion?, locator } +] +linkset{ purls[], cpes[], aliases[], references[], + reconciledFrom[], conflicts[]? } +attributes{ batchId?, replayCursor? } +createdAt +``` + +- **Raw payload** (`content.raw`) remains lossless (Relaxed Extended JSON). +- **Statements** provide normalized tuples for each claim contained in the + document, including justification and version hints. +- **Linkset** mirrors identifiers extracted during ingestion, retaining JSON + pointer metadata so audits can trace back to the source fragment. + +### 1.2 Linkset lifecycle + +Linksets correlate claims referring to the same `(vulnerabilityId, productKey)` +pair across providers. + +1. **Seed** – Observations push normalized identifiers (CVE, GHSA, vendor IDs) + plus canonical product keys (purl preferred, cpe fallback). Platform-scoped + statements remain marked `non_joinable`. +2. **Correlate** – The linkset builder groups statements by tenant and identity, + combines alias graphs from Concelier, and uses justification/product overlap + to assign correlation confidence. +3. **Annotate** – Conflicts (status disagreement, justification mismatch, range + inconsistencies) are recorded as structured entries. +4. **Persist** – Results land in `vex_linksets` with deterministic IDs (hash of + sorted `(vulnerabilityId, productKey, observationIds)`) and append-only + history for replay/debugging. + +Linksets never override statements or invent consensus; they simply align +evidence for Policy Engine and consumers. + +--- + +## 2. Observation vs. linkset + +- **Purpose** + - Observation: Immutable record of a single upstream VEX document. + - Linkset: Correlated evidence spanning observations that describe the same + product-vulnerability pair. +- **Mutation** + - Observation: Append-only via supersedes. + - Linkset: Regenerated deterministically by correlation jobs. +- **Allowed fields** + - Observation: Raw payload, provenance, normalized statement tuples, join + hints. + - Linkset: Observation references, statement IDs, confidence metrics, conflict + annotations. +- **Forbidden fields** + - Observation: Derived consensus, suppression flags, risk scores. + - Linkset: Derived severity or policy decisions (only evidence + conflicts). +- **Consumers** + - Observation: Evidence exports, Offline Kit mirrors, CLI raw dumps. + - Linkset: Policy Engine VEX overlay, Console evidence panes, Vuln Explorer. + +### 2.1 Example sequence + +1. Canonical vendor issues an attested OpenVEX declaring `CVE-2025-2222` as + `not_affected` for `pkg:rpm/redhat/openssl@1.1.1w-12`. Excititor inserts a + new observation referencing that statement. +2. Upstream CycloneDX VEX from a distro reports the same product as `affected` + with `under_investigation` justification. +3. Linkset builder groups both statements by alias overlap and product key, + setting confidence `high` because CVE and purl match. +4. Conflict annotation records `status-mismatch` and retains both justifications; + Policy Engine uses this to explain why suppression cannot proceed without + policy override. + +--- + +## 3. Conflict handling + +Structured conflicts capture disagreements without mutating source statements. + +```json +{ + "type": "status-mismatch", + "vulnerabilityId": "CVE-2025-2222", + "productKey": "pkg:rpm/redhat/openssl@1.1.1w-12", + "statements": [ + { + "observationId": "tenant:redhat:openvex:3", + "providerId": "redhat", + "status": "not_affected", + "justification": "component_not_present" + }, + { + "observationId": "tenant:ubuntu:cyclonedx:12", + "providerId": "ubuntu", + "status": "affected", + "justification": "under_investigation" + } + ], + "confidence": "medium", + "detectedAt": "2025-10-27T14:30:00Z" +} +``` + +Conflict classes (tracked via `EXCITITOR-LNM-21-003`): + +- `status-mismatch` – Different statuses for the same pair (affected vs + not_affected vs fixed vs under_investigation). +- `justification-divergence` – Same status but incompatible justifications or + missing justification where policy requires it. +- `version-range-clash` – Introduced/fixed ranges contradict each other. +- `non-joinable-overlap` – Platform-scoped statements collide with package + statements; flagged as warning but retained. +- `metadata-gap` – Missing provenance/signature field on specific statements. + +Conflicts surface through: + +- `/vex/linksets/{id}` APIs (`conflicts[]` payload). +- Console evidence panels (badges + drawer detail). +- CLI exports (`stella vex linkset …` planned in `CLI-LNM-22-002`). +- Metrics dashboards (`vex_linkset_conflicts_total{type}`). + +--- + +## 4. AOC alignment + +- **Raw-first** – `content.raw` and `statements[]` mirror upstream input; no + derived consensus or suppression values are written by ingestion. +- **No merges** – Each upstream statement persists independently; linksets refer + back via `observationId`. +- **Provenance mandatory** – Missing signature or source metadata yields + `ERR_AOC_004`; ingestion blocks until connectors fix the feed. +- **Idempotent writes** – Duplicate `(providerId, upstreamId, contentHash)` + results in a no-op; revisions append with a `supersedes` pointer. +- **Deterministic output** – Correlator sorts identifiers, normalizes timestamps + (UTC ISO-8601), and hashes canonical JSON to generate stable linkset IDs. +- **Scope-aware** – Tenant claims enforced on write/read; Authority scopes + `vex:ingest` / `vex:read` are required (see `AUTH-AOC-22-001`). + +Violations raise `ERR_AOC_00x`, emit `aoc_violation_total`, and prevent the data +from landing downstream. + +--- + +## 5. Downstream consumption + +- **Policy Engine** – Evaluates VEX evidence alongside advisory linksets to gate + suppression, severity downgrades, or explainability. +- **Console UI** – Evidence panel renders VEX statements grouped by provider and + highlights conflicts or missing signatures. +- **CLI** – Planned commands export observations/linksets for offline analysis + (`CLI-LNM-22-002`). +- **Offline Kit** – Bundled snapshots keep VEX data aligned with advisory + observations for air-gapped parity. +- **Observability** – Dashboards track ingestion latency, conflict counts, and + supersedes depth per provider. + +New consumers must treat both collections as read-only and preserve deterministic +ordering when caching. + +--- + +## 6. Validation & testing + +- **Unit tests** (`StellaOps.Excititor.Core.Tests`) to cover schema guards, + deterministic linkset hashing, conflict classification, and supersedes + behaviour. +- **Mongo integration tests** (`StellaOps.Excititor.Storage.Mongo.Tests`) to + verify indexes, shard keys, and idempotent writes across tenants. +- **CLI smoke suites** (`stella vex observations`, `stella vex linksets`) for + JSON determinism and exit code coverage. +- **Replay determinism** – Feed identical upstream payloads twice and ensure + observation/linkset hashes match across runs. +- **Offline kit verification** – Validate VEX exports packaged in Offline Kit + snapshots against live service outputs. +- **Fixture refresh** – Samples (`SAMPLES-LNM-22-002`) must include multi-source + conflicts and justification variants used by docs and UI tests. + +--- + +## 7. Reviewer checklist + +- Observation schema aligns with `EXCITITOR-LNM-21-001` once the schema lands; + update references as soon as the final contract is published. +- Linkset lifecycle covers correlation signals (alias graphs, product keys, + justification rules) and deterministic ID strategy. +- Conflict classes include status, justification, version range, platform overlap + scenarios. +- AOC guardrails called out with relevant error codes and Authority scopes. +- Downstream consumer list matches active APIs/CLI features (update when + `CLI-LNM-22-002` and WebService endpoints ship). +- Validation section references Core, Storage, CLI, and Offline test suites plus + fixture requirements. +- Imposed rule reminder retained at top. + +Dependencies outstanding (2025-10-27): `EXCITITOR-LNM-21-001..005` and +`EXCITITOR-LNM-21-101..102` are still TODO; revisit this document once schemas, +APIs, and fixtures are implemented. diff --git a/etc/authority.yaml b/etc/authority.yaml index 5fa09f77..31de7611 100644 --- a/etc/authority.yaml +++ b/etc/authority.yaml @@ -1,206 +1,206 @@ -# StellaOps Authority configuration (dev profile) -# Derived from etc/authority.yaml.sample; trimmed to the services needed for local -# stacks and kept under version control so compose/helm bundles mount a working config. - -schemaVersion: 1 - -issuer: "https://authority.localtest.me" - -accessTokenLifetime: "00:02:00" -refreshTokenLifetime: "30.00:00:00" -identityTokenLifetime: "00:05:00" -authorizationCodeLifetime: "00:05:00" -deviceCodeLifetime: "00:15:00" - -storage: - connectionString: "mongodb://stellaops:stellaops@mongo:27017/stellaops_authority" - databaseName: "stellaops_authority" - commandTimeout: "00:00:30" - -signing: - enabled: true - activeKeyId: "authority-signing-dev" - keyPath: "../certificates/authority-signing-dev.pem" - algorithm: "ES256" - keySource: "file" - -bootstrap: - enabled: false - apiKey: "change-me" - defaultIdentityProvider: "standard" - -pluginDirectories: - - "../StellaOps.Authority.PluginBinaries" - -plugins: - configurationDirectory: "../etc/authority.plugins" - descriptors: - standard: - type: "standard" - assemblyName: "StellaOps.Authority.Plugin.Standard" - enabled: true - configFile: "standard.yaml" - capabilities: - - password - - bootstrap - - clientProvisioning - metadata: - defaultRole: "operators" - -clients: - - clientId: "policy-engine" - displayName: "Policy Engine Service" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "policy:run", "findings:read", "effective:write" ] - tenant: "tenant-default" - properties: - serviceIdentity: "policy-engine" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/policy-engine.secret" - - - clientId: "policy-cli" - displayName: "Policy Automation CLI" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "policy:read", "policy:author", "policy:review", "policy:simulate", "findings:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/policy-cli.secret" - - - clientId: "cartographer-service" - displayName: "Cartographer Service" - grantTypes: [ "client_credentials" ] - audiences: [ "api://cartographer" ] - scopes: [ "graph:write", "graph:read" ] - tenant: "tenant-default" - properties: - serviceIdentity: "cartographer" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/cartographer-service.secret" - - - clientId: "graph-api" - displayName: "Graph API Gateway" - grantTypes: [ "client_credentials" ] - audiences: [ "api://graph-api" ] - scopes: [ "graph:read", "graph:export", "graph:simulate" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/graph-api.secret" - - clientId: "export-center-operator" - displayName: "Export Center Operator" - grantTypes: [ "client_credentials" ] - audiences: [ "api://export-center" ] - scopes: [ "export.viewer", "export.operator" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/export-center-operator.secret" - - clientId: "export-center-admin" - displayName: "Export Center Admin" - grantTypes: [ "client_credentials" ] - audiences: [ "api://export-center" ] - scopes: [ "export.viewer", "export.operator", "export.admin" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/export-center-admin.secret" - - - clientId: "concelier-ingest" - displayName: "Concelier Ingestion" - grantTypes: [ "client_credentials" ] - audiences: [ "api://concelier" ] - scopes: [ "advisory:ingest", "advisory:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/concelier-ingest.secret" - - - clientId: "excitor-ingest" - displayName: "Excititor VEX Ingestion" - grantTypes: [ "client_credentials" ] - audiences: [ "api://excitor" ] - scopes: [ "vex:ingest", "vex:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/excitor-ingest.secret" - - - clientId: "graph-api-cli" - displayName: "Graph Explorer CLI" - grantTypes: [ "client_credentials" ] - audiences: [ "api://graph-api" ] - scopes: [ "graph:read", "graph:export" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/graph-api-cli.secret" - -tenants: - - name: "tenant-default" - roles: - orch-viewer: - scopes: [ "orch:read" ] - orch-operator: - scopes: [ "orch:read", "orch:operate" ] - export-viewer: - scopes: [ "export.viewer" ] - export-operator: - scopes: [ "export.viewer", "export.operator" ] - export-admin: - scopes: [ "export.viewer", "export.operator", "export.admin" ] - policy-author: - scopes: [ "policy:author", "policy:read", "policy:simulate", "findings:read" ] - policy-reviewer: - scopes: [ "policy:review", "policy:read", "policy:simulate", "findings:read" ] - policy-approver: - scopes: [ "policy:approve", "policy:review", "policy:read", "policy:simulate", "findings:read" ] - policy-operator: - scopes: [ "policy:operate", "policy:run", "policy:activate", "policy:read", "policy:simulate", "findings:read" ] - policy-auditor: - scopes: [ "policy:audit", "policy:read", "policy:simulate", "findings:read" ] - -security: - rateLimiting: - token: - enabled: true - permitLimit: 30 - window: "00:01:00" - queueLimit: 0 - authorize: - enabled: true - permitLimit: 60 - window: "00:01:00" - queueLimit: 10 - passwordHashing: - algorithm: "Argon2id" - memorySizeInKib: 19456 - iterations: 2 - parallelism: 1 - senderConstraints: - dpop: - enabled: true - proofLifetime: "00:05:00" - allowedClockSkew: "00:00:10" - replayWindow: "00:10:00" - nonce: - enabled: false - mtls: - enabled: false - -bypassNetworks: - - "127.0.0.1/32" - - "::1/128" +# StellaOps Authority configuration (dev profile) +# Derived from etc/authority.yaml.sample; trimmed to the services needed for local +# stacks and kept under version control so compose/helm bundles mount a working config. + +schemaVersion: 1 + +issuer: "https://authority.localtest.me" + +accessTokenLifetime: "00:02:00" +refreshTokenLifetime: "30.00:00:00" +identityTokenLifetime: "00:05:00" +authorizationCodeLifetime: "00:05:00" +deviceCodeLifetime: "00:15:00" + +storage: + connectionString: "mongodb://stellaops:stellaops@mongo:27017/stellaops_authority" + databaseName: "stellaops_authority" + commandTimeout: "00:00:30" + +signing: + enabled: true + activeKeyId: "authority-signing-dev" + keyPath: "../certificates/authority-signing-dev.pem" + algorithm: "ES256" + keySource: "file" + +bootstrap: + enabled: false + apiKey: "change-me" + defaultIdentityProvider: "standard" + +pluginDirectories: + - "../StellaOps.Authority.PluginBinaries" + +plugins: + configurationDirectory: "../etc/authority.plugins" + descriptors: + standard: + type: "standard" + assemblyName: "StellaOps.Authority.Plugin.Standard" + enabled: true + configFile: "standard.yaml" + capabilities: + - password + - bootstrap + - clientProvisioning + metadata: + defaultRole: "operators" + +clients: + - clientId: "policy-engine" + displayName: "Policy Engine Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "policy:run", "findings:read", "effective:write" ] + tenant: "tenant-default" + properties: + serviceIdentity: "policy-engine" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/policy-engine.secret" + + - clientId: "policy-cli" + displayName: "Policy Automation CLI" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "policy:read", "policy:author", "policy:review", "policy:simulate", "findings:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/policy-cli.secret" + + - clientId: "cartographer-service" + displayName: "Cartographer Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://cartographer" ] + scopes: [ "graph:write", "graph:read" ] + tenant: "tenant-default" + properties: + serviceIdentity: "cartographer" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/cartographer-service.secret" + + - clientId: "graph-api" + displayName: "Graph API Gateway" + grantTypes: [ "client_credentials" ] + audiences: [ "api://graph-api" ] + scopes: [ "graph:read", "graph:export", "graph:simulate" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/graph-api.secret" + - clientId: "export-center-operator" + displayName: "Export Center Operator" + grantTypes: [ "client_credentials" ] + audiences: [ "api://export-center" ] + scopes: [ "export.viewer", "export.operator" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/export-center-operator.secret" + - clientId: "export-center-admin" + displayName: "Export Center Admin" + grantTypes: [ "client_credentials" ] + audiences: [ "api://export-center" ] + scopes: [ "export.viewer", "export.operator", "export.admin" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/export-center-admin.secret" + + - clientId: "concelier-ingest" + displayName: "Concelier Ingestion" + grantTypes: [ "client_credentials" ] + audiences: [ "api://concelier" ] + scopes: [ "advisory:ingest", "advisory:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/concelier-ingest.secret" + + - clientId: "excitor-ingest" + displayName: "Excititor VEX Ingestion" + grantTypes: [ "client_credentials" ] + audiences: [ "api://excitor" ] + scopes: [ "vex:ingest", "vex:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/excitor-ingest.secret" + + - clientId: "graph-api-cli" + displayName: "Graph Explorer CLI" + grantTypes: [ "client_credentials" ] + audiences: [ "api://graph-api" ] + scopes: [ "graph:read", "graph:export" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/graph-api-cli.secret" + +tenants: + - name: "tenant-default" + roles: + orch-viewer: + scopes: [ "orch:read" ] + orch-operator: + scopes: [ "orch:read", "orch:operate" ] + export-viewer: + scopes: [ "export.viewer" ] + export-operator: + scopes: [ "export.viewer", "export.operator" ] + export-admin: + scopes: [ "export.viewer", "export.operator", "export.admin" ] + policy-author: + scopes: [ "policy:author", "policy:read", "policy:simulate", "findings:read" ] + policy-reviewer: + scopes: [ "policy:review", "policy:read", "policy:simulate", "findings:read" ] + policy-approver: + scopes: [ "policy:approve", "policy:review", "policy:read", "policy:simulate", "findings:read" ] + policy-operator: + scopes: [ "policy:operate", "policy:run", "policy:activate", "policy:read", "policy:simulate", "findings:read" ] + policy-auditor: + scopes: [ "policy:audit", "policy:read", "policy:simulate", "findings:read" ] + +security: + rateLimiting: + token: + enabled: true + permitLimit: 30 + window: "00:01:00" + queueLimit: 0 + authorize: + enabled: true + permitLimit: 60 + window: "00:01:00" + queueLimit: 10 + passwordHashing: + algorithm: "Argon2id" + memorySizeInKib: 19456 + iterations: 2 + parallelism: 1 + senderConstraints: + dpop: + enabled: true + proofLifetime: "00:05:00" + allowedClockSkew: "00:00:10" + replayWindow: "00:10:00" + nonce: + enabled: false + mtls: + enabled: false + +bypassNetworks: + - "127.0.0.1/32" + - "::1/128" diff --git a/etc/authority.yaml.sample b/etc/authority.yaml.sample index c9e484d8..84513cc3 100644 --- a/etc/authority.yaml.sample +++ b/etc/authority.yaml.sample @@ -1,337 +1,337 @@ -# StellaOps Authority configuration template. -# Copy to ../etc/authority.yaml (relative to the Authority content root) -# and adjust values to fit your environment. Environment variables -# prefixed with STELLAOPS_AUTHORITY_ override these values at runtime. -# Example: STELLAOPS_AUTHORITY__ISSUER=https://authority.example.com - -schemaVersion: 1 - -# Absolute issuer URI advertised to clients. Use HTTPS for anything -# beyond loopback development. -issuer: "https://authority.stella-ops.local" - -# Token lifetimes expressed as HH:MM:SS or DD.HH:MM:SS. -accessTokenLifetime: "00:02:00" -refreshTokenLifetime: "30.00:00:00" -identityTokenLifetime: "00:05:00" -authorizationCodeLifetime: "00:05:00" -deviceCodeLifetime: "00:15:00" - -# MongoDB storage connection details. -storage: - connectionString: "mongodb://localhost:27017/stellaops-authority" - # databaseName: "stellaops_authority" - commandTimeout: "00:00:30" - -# Signing configuration for revocation bundles and JWKS. -signing: - enabled: true - activeKeyId: "authority-signing-2025-dev" - keyPath: "../certificates/authority-signing-2025-dev.pem" - algorithm: "ES256" - keySource: "file" - # provider: "default" - additionalKeys: - - keyId: "authority-signing-dev" - path: "../certificates/authority-signing-dev.pem" - source: "file" - # Rotation flow: - # 1. Generate a new PEM under ./certificates (e.g. authority-signing-2026-dev.pem). - # 2. Trigger the .gitea/workflows/authority-key-rotation.yml workflow (or run - # ops/authority/key-rotation.sh) with the new keyId/keyPath. - # 3. Update activeKeyId/keyPath above and move the previous key into additionalKeys - # so restarts retain retired material for JWKS consumers. - -# Bootstrap administrative endpoints (initial provisioning). -bootstrap: - enabled: false - apiKey: "change-me" - defaultIdentityProvider: "standard" - -# Directories scanned for Authority plug-ins. Relative paths resolve -# against the application content root, enabling air-gapped deployments -# that package plug-ins alongside binaries. -pluginDirectories: - - "../StellaOps.Authority.PluginBinaries" - # "/var/lib/stellaops/authority/plugins" - -# Plug-in manifests live in descriptors below; per-plugin settings are stored -# in the configurationDirectory (YAML files). Authority will load any enabled -# plugins and surface their metadata/capabilities to the host. -plugins: - configurationDirectory: "../etc/authority.plugins" - descriptors: - standard: - type: "standard" - assemblyName: "StellaOps.Authority.Plugin.Standard" - enabled: true - configFile: "standard.yaml" - capabilities: - - password - - bootstrap - - clientProvisioning - metadata: - defaultRole: "operators" - # Example for an external identity provider plugin. Leave disabled unless - # the plug-in package exists under StellaOps.Authority.PluginBinaries. - ldap: - type: "ldap" - assemblyName: "StellaOps.Authority.Plugin.Ldap" - enabled: false - configFile: "ldap.yaml" - capabilities: - - password - - mfa - -# OAuth client registrations issued by Authority. These examples cover Notify WebService -# in dev (notify.dev audience) and production (notify audience). Replace the secret files -# with paths to your sealed credentials before enabling bootstrap mode. -clients: - - clientId: "notify-web-dev" - displayName: "Notify WebService (dev)" - grantTypes: [ "client_credentials" ] - audiences: [ "notify.dev" ] - scopes: [ "notify.read", "notify.admin" ] - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/notify-web-dev.secret" - - clientId: "notify-web" - displayName: "Notify WebService" - grantTypes: [ "client_credentials" ] - audiences: [ "notify" ] - scopes: [ "notify.read", "notify.admin" ] - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/notify-web.secret" - - clientId: "concelier-ingest" - displayName: "Concelier Ingestion" - grantTypes: [ "client_credentials" ] - audiences: [ "api://concelier" ] - scopes: [ "advisory:ingest", "advisory:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/concelier-ingest.secret" - - clientId: "excitor-ingest" - displayName: "Excititor VEX Ingestion" - grantTypes: [ "client_credentials" ] - audiences: [ "api://excitor" ] - scopes: [ "vex:ingest", "vex:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/excitor-ingest.secret" - - clientId: "aoc-verifier" - displayName: "AOC Verification Agent" - grantTypes: [ "client_credentials" ] - audiences: [ "api://concelier", "api://excitor" ] - scopes: [ "aoc:verify", "advisory:read", "vex:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/aoc-verifier.secret" - - clientId: "policy-engine" - displayName: "Policy Engine Service" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "policy:run", "findings:read", "effective:write" ] - tenant: "tenant-default" - properties: - serviceIdentity: "policy-engine" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/policy-engine.secret" - - clientId: "policy-cli" - displayName: "Policy Automation CLI" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "policy:read", "policy:author", "policy:review", "policy:simulate", "findings:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/policy-cli.secret" - - clientId: "exceptions-service" - displayName: "Policy Engine Exceptions Worker" - grantTypes: [ "client_credentials" ] - audiences: [ "api://policy-engine" ] - scopes: [ "exceptions:read", "exceptions:write" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/exceptions-service.secret" - - clientId: "console-web" - displayName: "StellaOps Console" - grantTypes: [ "authorization_code", "refresh_token" ] - audiences: [ "console" ] - scopes: [ "openid", "profile", "email", "ui.read", "authority:tenants.read", "advisory:read", "vex:read", "exceptions:read", "exceptions:approve", "aoc:verify", "findings:read", "orch:read", "vuln:read" ] - # exceptions:approve is elevated via fresh-auth and requires an MFA-capable identity provider. - tenant: "tenant-default" - senderConstraint: "dpop" - redirectUris: - - "https://console.stella-ops.local/oidc/callback" - postLogoutRedirectUris: - - "https://console.stella-ops.local/" - # Gateway must forward X-Stella-Tenant for /console endpoints; fresh-auth window (300s) - # returned by /console/profile governs admin actions in the Console UI. - auth: - type: "client_secret" - secretFile: "../secrets/console-web.secret" - - clientId: "cartographer-service" - displayName: "Cartographer Service" - grantTypes: [ "client_credentials" ] - audiences: [ "api://cartographer" ] - scopes: [ "graph:write", "graph:read" ] - tenant: "tenant-default" - properties: - serviceIdentity: "cartographer" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/cartographer-service.secret" - - clientId: "graph-api" - displayName: "Graph API Gateway" - grantTypes: [ "client_credentials" ] - audiences: [ "api://graph-api" ] - scopes: [ "graph:read", "graph:export", "graph:simulate" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/graph-api.secret" - - clientId: "export-center-operator" - displayName: "Export Center Operator" - grantTypes: [ "client_credentials" ] - audiences: [ "api://export-center" ] - scopes: [ "export.viewer", "export.operator" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/export-center-operator.secret" - - clientId: "export-center-admin" - displayName: "Export Center Admin" - grantTypes: [ "client_credentials" ] - audiences: [ "api://export-center" ] - scopes: [ "export.viewer", "export.operator", "export.admin" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/export-center-admin.secret" - - clientId: "vuln-explorer-ui" - displayName: "Vuln Explorer UI" - grantTypes: [ "client_credentials" ] - audiences: [ "api://vuln-explorer" ] - scopes: [ "vuln:read" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/vuln-explorer-ui.secret" - # Signals sensors must request aoc:verify alongside write scope. - - clientId: "signals-uploader" - displayName: "Signals Sensor" - grantTypes: [ "client_credentials" ] - audiences: [ "api://signals" ] - scopes: [ "signals:write", "signals:read", "aoc:verify" ] - tenant: "tenant-default" - senderConstraint: "dpop" - auth: - type: "client_secret" - secretFile: "../secrets/signals-uploader.secret" - -tenants: - - name: "tenant-default" - roles: - orch-viewer: - scopes: [ "orch:read" ] - orch-operator: - scopes: [ "orch:read", "orch:operate" ] - policy-author: - scopes: [ "policy:author", "policy:read", "policy:simulate", "findings:read" ] - policy-reviewer: - scopes: [ "policy:review", "policy:read", "policy:simulate", "findings:read" ] - policy-approver: - scopes: [ "policy:approve", "policy:review", "policy:read", "policy:simulate", "findings:read" ] - policy-operator: - scopes: [ "policy:operate", "policy:run", "policy:activate", "policy:read", "policy:simulate", "findings:read" ] - policy-auditor: - scopes: [ "policy:audit", "policy:read", "policy:simulate", "findings:read" ] - export-viewer: - scopes: [ "export.viewer" ] - export-operator: - scopes: [ "export.viewer", "export.operator" ] - export-admin: - scopes: [ "export.viewer", "export.operator", "export.admin" ] - -# Exception approval routing templates used by Policy Engine and Console. -exceptions: - routingTemplates: - - id: "secops" - authorityRouteId: "approvals/secops" - requireMfa: true - description: "Security Operations approval chain" - - id: "governance" - authorityRouteId: "approvals/governance" - requireMfa: false - description: "Governance review (non-production)" - -# CIDR ranges that bypass network-sensitive policies (e.g. on-host cron jobs). -# Keep the list tight: localhost is sufficient for most air-gapped installs. -bypassNetworks: - - "127.0.0.1/32" - - "::1/128" - -# Security posture (rate limiting + sender constraints). -security: - rateLimiting: - token: - enabled: true - permitLimit: 30 - window: "00:01:00" - queueLimit: 0 - authorize: - enabled: true - permitLimit: 60 - window: "00:01:00" - queueLimit: 10 - internal: - enabled: false - permitLimit: 5 - window: "00:01:00" - queueLimit: 0 - senderConstraints: - dpop: - enabled: true - allowedAlgorithms: [ "ES256", "ES384" ] - proofLifetime: "00:02:00" - allowedClockSkew: "00:00:30" - replayWindow: "00:05:00" - nonce: - enabled: true - ttl: "00:10:00" - maxIssuancePerMinute: 120 - store: "memory" # Set to "redis" for multi-node Authority deployments. - requiredAudiences: - - "signer" - - "attestor" - # redisConnectionString: "redis://authority-redis:6379?ssl=false" - mtls: - enabled: false - requireChainValidation: true - rotationGrace: "00:15:00" - enforceForAudiences: - - "signer" # Requests for these audiences force mTLS sender constraints - allowedSanTypes: - - "dns" - - "uri" - allowedCertificateAuthorities: [ ] - allowedSubjectPatterns: [ ] +# StellaOps Authority configuration template. +# Copy to ../etc/authority.yaml (relative to the Authority content root) +# and adjust values to fit your environment. Environment variables +# prefixed with STELLAOPS_AUTHORITY_ override these values at runtime. +# Example: STELLAOPS_AUTHORITY__ISSUER=https://authority.example.com + +schemaVersion: 1 + +# Absolute issuer URI advertised to clients. Use HTTPS for anything +# beyond loopback development. +issuer: "https://authority.stella-ops.local" + +# Token lifetimes expressed as HH:MM:SS or DD.HH:MM:SS. +accessTokenLifetime: "00:02:00" +refreshTokenLifetime: "30.00:00:00" +identityTokenLifetime: "00:05:00" +authorizationCodeLifetime: "00:05:00" +deviceCodeLifetime: "00:15:00" + +# MongoDB storage connection details. +storage: + connectionString: "mongodb://localhost:27017/stellaops-authority" + # databaseName: "stellaops_authority" + commandTimeout: "00:00:30" + +# Signing configuration for revocation bundles and JWKS. +signing: + enabled: true + activeKeyId: "authority-signing-2025-dev" + keyPath: "../certificates/authority-signing-2025-dev.pem" + algorithm: "ES256" + keySource: "file" + # provider: "default" + additionalKeys: + - keyId: "authority-signing-dev" + path: "../certificates/authority-signing-dev.pem" + source: "file" + # Rotation flow: + # 1. Generate a new PEM under ./certificates (e.g. authority-signing-2026-dev.pem). + # 2. Trigger the .gitea/workflows/authority-key-rotation.yml workflow (or run + # ops/authority/key-rotation.sh) with the new keyId/keyPath. + # 3. Update activeKeyId/keyPath above and move the previous key into additionalKeys + # so restarts retain retired material for JWKS consumers. + +# Bootstrap administrative endpoints (initial provisioning). +bootstrap: + enabled: false + apiKey: "change-me" + defaultIdentityProvider: "standard" + +# Directories scanned for Authority plug-ins. Relative paths resolve +# against the application content root, enabling air-gapped deployments +# that package plug-ins alongside binaries. +pluginDirectories: + - "../StellaOps.Authority.PluginBinaries" + # "/var/lib/stellaops/authority/plugins" + +# Plug-in manifests live in descriptors below; per-plugin settings are stored +# in the configurationDirectory (YAML files). Authority will load any enabled +# plugins and surface their metadata/capabilities to the host. +plugins: + configurationDirectory: "../etc/authority.plugins" + descriptors: + standard: + type: "standard" + assemblyName: "StellaOps.Authority.Plugin.Standard" + enabled: true + configFile: "standard.yaml" + capabilities: + - password + - bootstrap + - clientProvisioning + metadata: + defaultRole: "operators" + # Example for an external identity provider plugin. Leave disabled unless + # the plug-in package exists under StellaOps.Authority.PluginBinaries. + ldap: + type: "ldap" + assemblyName: "StellaOps.Authority.Plugin.Ldap" + enabled: false + configFile: "ldap.yaml" + capabilities: + - password + - mfa + +# OAuth client registrations issued by Authority. These examples cover Notify WebService +# in dev (notify.dev audience) and production (notify audience). Replace the secret files +# with paths to your sealed credentials before enabling bootstrap mode. +clients: + - clientId: "notify-web-dev" + displayName: "Notify WebService (dev)" + grantTypes: [ "client_credentials" ] + audiences: [ "notify.dev" ] + scopes: [ "notify.read", "notify.admin" ] + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/notify-web-dev.secret" + - clientId: "notify-web" + displayName: "Notify WebService" + grantTypes: [ "client_credentials" ] + audiences: [ "notify" ] + scopes: [ "notify.read", "notify.admin" ] + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/notify-web.secret" + - clientId: "concelier-ingest" + displayName: "Concelier Ingestion" + grantTypes: [ "client_credentials" ] + audiences: [ "api://concelier" ] + scopes: [ "advisory:ingest", "advisory:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/concelier-ingest.secret" + - clientId: "excitor-ingest" + displayName: "Excititor VEX Ingestion" + grantTypes: [ "client_credentials" ] + audiences: [ "api://excitor" ] + scopes: [ "vex:ingest", "vex:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/excitor-ingest.secret" + - clientId: "aoc-verifier" + displayName: "AOC Verification Agent" + grantTypes: [ "client_credentials" ] + audiences: [ "api://concelier", "api://excitor" ] + scopes: [ "aoc:verify", "advisory:read", "vex:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/aoc-verifier.secret" + - clientId: "policy-engine" + displayName: "Policy Engine Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "policy:run", "findings:read", "effective:write" ] + tenant: "tenant-default" + properties: + serviceIdentity: "policy-engine" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/policy-engine.secret" + - clientId: "policy-cli" + displayName: "Policy Automation CLI" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "policy:read", "policy:author", "policy:review", "policy:simulate", "findings:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/policy-cli.secret" + - clientId: "exceptions-service" + displayName: "Policy Engine Exceptions Worker" + grantTypes: [ "client_credentials" ] + audiences: [ "api://policy-engine" ] + scopes: [ "exceptions:read", "exceptions:write" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/exceptions-service.secret" + - clientId: "console-web" + displayName: "StellaOps Console" + grantTypes: [ "authorization_code", "refresh_token" ] + audiences: [ "console" ] + scopes: [ "openid", "profile", "email", "ui.read", "authority:tenants.read", "advisory:read", "vex:read", "exceptions:read", "exceptions:approve", "aoc:verify", "findings:read", "orch:read", "vuln:read" ] + # exceptions:approve is elevated via fresh-auth and requires an MFA-capable identity provider. + tenant: "tenant-default" + senderConstraint: "dpop" + redirectUris: + - "https://console.stella-ops.local/oidc/callback" + postLogoutRedirectUris: + - "https://console.stella-ops.local/" + # Gateway must forward X-Stella-Tenant for /console endpoints; fresh-auth window (300s) + # returned by /console/profile governs admin actions in the Console UI. + auth: + type: "client_secret" + secretFile: "../secrets/console-web.secret" + - clientId: "cartographer-service" + displayName: "Cartographer Service" + grantTypes: [ "client_credentials" ] + audiences: [ "api://cartographer" ] + scopes: [ "graph:write", "graph:read" ] + tenant: "tenant-default" + properties: + serviceIdentity: "cartographer" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/cartographer-service.secret" + - clientId: "graph-api" + displayName: "Graph API Gateway" + grantTypes: [ "client_credentials" ] + audiences: [ "api://graph-api" ] + scopes: [ "graph:read", "graph:export", "graph:simulate" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/graph-api.secret" + - clientId: "export-center-operator" + displayName: "Export Center Operator" + grantTypes: [ "client_credentials" ] + audiences: [ "api://export-center" ] + scopes: [ "export.viewer", "export.operator" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/export-center-operator.secret" + - clientId: "export-center-admin" + displayName: "Export Center Admin" + grantTypes: [ "client_credentials" ] + audiences: [ "api://export-center" ] + scopes: [ "export.viewer", "export.operator", "export.admin" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/export-center-admin.secret" + - clientId: "vuln-explorer-ui" + displayName: "Vuln Explorer UI" + grantTypes: [ "client_credentials" ] + audiences: [ "api://vuln-explorer" ] + scopes: [ "vuln:read" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/vuln-explorer-ui.secret" + # Signals sensors must request aoc:verify alongside write scope. + - clientId: "signals-uploader" + displayName: "Signals Sensor" + grantTypes: [ "client_credentials" ] + audiences: [ "api://signals" ] + scopes: [ "signals:write", "signals:read", "aoc:verify" ] + tenant: "tenant-default" + senderConstraint: "dpop" + auth: + type: "client_secret" + secretFile: "../secrets/signals-uploader.secret" + +tenants: + - name: "tenant-default" + roles: + orch-viewer: + scopes: [ "orch:read" ] + orch-operator: + scopes: [ "orch:read", "orch:operate" ] + policy-author: + scopes: [ "policy:author", "policy:read", "policy:simulate", "findings:read" ] + policy-reviewer: + scopes: [ "policy:review", "policy:read", "policy:simulate", "findings:read" ] + policy-approver: + scopes: [ "policy:approve", "policy:review", "policy:read", "policy:simulate", "findings:read" ] + policy-operator: + scopes: [ "policy:operate", "policy:run", "policy:activate", "policy:read", "policy:simulate", "findings:read" ] + policy-auditor: + scopes: [ "policy:audit", "policy:read", "policy:simulate", "findings:read" ] + export-viewer: + scopes: [ "export.viewer" ] + export-operator: + scopes: [ "export.viewer", "export.operator" ] + export-admin: + scopes: [ "export.viewer", "export.operator", "export.admin" ] + +# Exception approval routing templates used by Policy Engine and Console. +exceptions: + routingTemplates: + - id: "secops" + authorityRouteId: "approvals/secops" + requireMfa: true + description: "Security Operations approval chain" + - id: "governance" + authorityRouteId: "approvals/governance" + requireMfa: false + description: "Governance review (non-production)" + +# CIDR ranges that bypass network-sensitive policies (e.g. on-host cron jobs). +# Keep the list tight: localhost is sufficient for most air-gapped installs. +bypassNetworks: + - "127.0.0.1/32" + - "::1/128" + +# Security posture (rate limiting + sender constraints). +security: + rateLimiting: + token: + enabled: true + permitLimit: 30 + window: "00:01:00" + queueLimit: 0 + authorize: + enabled: true + permitLimit: 60 + window: "00:01:00" + queueLimit: 10 + internal: + enabled: false + permitLimit: 5 + window: "00:01:00" + queueLimit: 0 + senderConstraints: + dpop: + enabled: true + allowedAlgorithms: [ "ES256", "ES384" ] + proofLifetime: "00:02:00" + allowedClockSkew: "00:00:30" + replayWindow: "00:05:00" + nonce: + enabled: true + ttl: "00:10:00" + maxIssuancePerMinute: 120 + store: "memory" # Set to "redis" for multi-node Authority deployments. + requiredAudiences: + - "signer" + - "attestor" + # redisConnectionString: "redis://authority-redis:6379?ssl=false" + mtls: + enabled: false + requireChainValidation: true + rotationGrace: "00:15:00" + enforceForAudiences: + - "signer" # Requests for these audiences force mTLS sender constraints + allowedSanTypes: + - "dns" + - "uri" + allowedCertificateAuthorities: [ ] + allowedSubjectPatterns: [ ] diff --git a/etc/concelier.yaml.sample b/etc/concelier.yaml.sample index e614e438..53bdeeea 100644 --- a/etc/concelier.yaml.sample +++ b/etc/concelier.yaml.sample @@ -1,119 +1,119 @@ -# Concelier configuration template for StellaOps deployments. -# Copy to ../etc/concelier.yaml (relative to the web service content root) -# and adjust the values to match your environment. Environment variables -# (prefixed with CONCELIER_) override these settings at runtime. - -storage: - driver: mongo - # Mongo connection string. Use SRV URI or standard connection string. - dsn: "mongodb://concelier:concelier@mongo:27017/concelier?authSource=admin" - # Optional database name; defaults to the name embedded in the DSN or 'concelier'. - database: "concelier" - # Mongo command timeout in seconds. - commandTimeoutSeconds: 30 - -plugins: - # Concelier resolves plug-ins relative to the content root; override as needed. - baseDirectory: ".." - directory: "StellaOps.Concelier.PluginBinaries" - searchPatterns: - - "StellaOps.Concelier.Plugin.*.dll" - -telemetry: - enabled: true - enableTracing: false - enableMetrics: false - enableLogging: true - minimumLogLevel: "Information" - serviceName: "stellaops-concelier" - # Configure OTLP endpoint when shipping traces/metrics/logs out-of-band. - otlpEndpoint: "" - # Optional headers for OTLP exporters, for example authentication tokens. - otlpHeaders: {} - # Attach additional resource attributes to telemetry exports. - resourceAttributes: - deployment.environment: "local" - # Emit console exporters for local debugging. - exportConsole: true - -authority: - enabled: false - # Temporary rollout flag. When true, Concelier logs anonymous access but does not fail requests - # without tokens. Set to false before 2025-12-31 UTC to enforce authentication fully. - allowAnonymousFallback: true - # Issuer advertised by StellaOps Authority (e.g. https://authority.stella-ops.local). - issuer: "https://authority.stella-ops.local" - # Optional explicit metadata address; defaults to {issuer}/.well-known/openid-configuration. - metadataAddress: "" - requireHttpsMetadata: true - backchannelTimeoutSeconds: 30 - tokenClockSkewSeconds: 60 - audiences: - - "api://concelier" - requiredScopes: - - "concelier.jobs.trigger" - - "advisory:read" - - "advisory:ingest" - requiredTenants: - - "tenant-default" - # Outbound credentials Concelier can use to call Authority (client credentials flow). - clientId: "concelier-jobs" - # Prefer storing the secret outside of the config file. Provide either clientSecret or clientSecretFile. - clientSecret: "" - clientSecretFile: "" - clientScopes: - - "concelier.jobs.trigger" - - "advisory:read" - - "advisory:ingest" - resilience: - # Enable deterministic retry/backoff when Authority is briefly unavailable. - enableRetries: true - retryDelays: - - "00:00:01" - - "00:00:02" - - "00:00:05" - # Allow stale discovery/JWKS responses when Authority is offline (extend tolerance as needed for air-gapped mirrors). - allowOfflineCacheFallback: true - offlineCacheTolerance: "00:10:00" - # Networks allowed to bypass authentication (loopback by default for on-host cron jobs). - bypassNetworks: - - "127.0.0.1/32" - - "::1/128" - -mirror: - enabled: false - # Directory containing JSON exporter outputs (absolute or relative to content root). - exportRoot: "exports/json" - # Optional explicit export identifier; defaults to `latest` symlink or most recent export. - activeExportId: "" - latestDirectoryName: "latest" - mirrorDirectoryName: "mirror" - requireAuthentication: false - maxIndexRequestsPerHour: 600 - domains: - - id: "primary" - displayName: "Primary Mirror" - requireAuthentication: false - maxDownloadRequestsPerHour: 1200 - -sources: - ghsa: - apiToken: "${GITHUB_PAT}" - pageSize: 50 - maxPagesPerFetch: 5 - requestDelay: "00:00:00.200" - failureBackoff: "00:05:00" - rateLimitWarningThreshold: 500 - secondaryRateLimitBackoff: "00:02:00" - cve: - baseEndpoint: "https://cveawg.mitre.org/api/" - apiOrg: "" - apiUser: "" - apiKey: "" - # Optional mirror used when credentials are unavailable. - seedDirectory: "./seed-data/cve" - pageSize: 200 - maxPagesPerFetch: 5 - initialBackfill: "30.00:00:00" - requestDelay: "00:00:00.250" - failureBackoff: "00:10:00" +# Concelier configuration template for StellaOps deployments. +# Copy to ../etc/concelier.yaml (relative to the web service content root) +# and adjust the values to match your environment. Environment variables +# (prefixed with CONCELIER_) override these settings at runtime. + +storage: + driver: mongo + # Mongo connection string. Use SRV URI or standard connection string. + dsn: "mongodb://concelier:concelier@mongo:27017/concelier?authSource=admin" + # Optional database name; defaults to the name embedded in the DSN or 'concelier'. + database: "concelier" + # Mongo command timeout in seconds. + commandTimeoutSeconds: 30 + +plugins: + # Concelier resolves plug-ins relative to the content root; override as needed. + baseDirectory: ".." + directory: "StellaOps.Concelier.PluginBinaries" + searchPatterns: + - "StellaOps.Concelier.Plugin.*.dll" + +telemetry: + enabled: true + enableTracing: false + enableMetrics: false + enableLogging: true + minimumLogLevel: "Information" + serviceName: "stellaops-concelier" + # Configure OTLP endpoint when shipping traces/metrics/logs out-of-band. + otlpEndpoint: "" + # Optional headers for OTLP exporters, for example authentication tokens. + otlpHeaders: {} + # Attach additional resource attributes to telemetry exports. + resourceAttributes: + deployment.environment: "local" + # Emit console exporters for local debugging. + exportConsole: true + +authority: + enabled: false + # Temporary rollout flag. When true, Concelier logs anonymous access but does not fail requests + # without tokens. Set to false before 2025-12-31 UTC to enforce authentication fully. + allowAnonymousFallback: true + # Issuer advertised by StellaOps Authority (e.g. https://authority.stella-ops.local). + issuer: "https://authority.stella-ops.local" + # Optional explicit metadata address; defaults to {issuer}/.well-known/openid-configuration. + metadataAddress: "" + requireHttpsMetadata: true + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + audiences: + - "api://concelier" + requiredScopes: + - "concelier.jobs.trigger" + - "advisory:read" + - "advisory:ingest" + requiredTenants: + - "tenant-default" + # Outbound credentials Concelier can use to call Authority (client credentials flow). + clientId: "concelier-jobs" + # Prefer storing the secret outside of the config file. Provide either clientSecret or clientSecretFile. + clientSecret: "" + clientSecretFile: "" + clientScopes: + - "concelier.jobs.trigger" + - "advisory:read" + - "advisory:ingest" + resilience: + # Enable deterministic retry/backoff when Authority is briefly unavailable. + enableRetries: true + retryDelays: + - "00:00:01" + - "00:00:02" + - "00:00:05" + # Allow stale discovery/JWKS responses when Authority is offline (extend tolerance as needed for air-gapped mirrors). + allowOfflineCacheFallback: true + offlineCacheTolerance: "00:10:00" + # Networks allowed to bypass authentication (loopback by default for on-host cron jobs). + bypassNetworks: + - "127.0.0.1/32" + - "::1/128" + +mirror: + enabled: false + # Directory containing JSON exporter outputs (absolute or relative to content root). + exportRoot: "exports/json" + # Optional explicit export identifier; defaults to `latest` symlink or most recent export. + activeExportId: "" + latestDirectoryName: "latest" + mirrorDirectoryName: "mirror" + requireAuthentication: false + maxIndexRequestsPerHour: 600 + domains: + - id: "primary" + displayName: "Primary Mirror" + requireAuthentication: false + maxDownloadRequestsPerHour: 1200 + +sources: + ghsa: + apiToken: "${GITHUB_PAT}" + pageSize: 50 + maxPagesPerFetch: 5 + requestDelay: "00:00:00.200" + failureBackoff: "00:05:00" + rateLimitWarningThreshold: 500 + secondaryRateLimitBackoff: "00:02:00" + cve: + baseEndpoint: "https://cveawg.mitre.org/api/" + apiOrg: "" + apiUser: "" + apiKey: "" + # Optional mirror used when credentials are unavailable. + seedDirectory: "./seed-data/cve" + pageSize: 200 + maxPagesPerFetch: 5 + initialBackfill: "30.00:00:00" + requestDelay: "00:00:00.250" + failureBackoff: "00:10:00" diff --git a/etc/policy-engine.yaml.sample b/etc/policy-engine.yaml.sample index f68b8f73..03fd2e26 100644 --- a/etc/policy-engine.yaml.sample +++ b/etc/policy-engine.yaml.sample @@ -1,33 +1,33 @@ -# StellaOps Policy Engine configuration template. -# Copy to ../etc/policy-engine.yaml (relative to the Policy Engine content root) -# and adjust values to fit your environment. Environment variables prefixed with -# STELLAOPS_POLICY_ENGINE_ override these values at runtime. - -schemaVersion: 1 - -authority: - enabled: true - issuer: "https://authority.stella-ops.local" - clientId: "policy-engine" - clientSecret: "change-me" - scopes: [ "policy:run", "findings:read", "effective:write" ] - backchannelTimeoutSeconds: 30 - -storage: - connectionString: "mongodb://localhost:27017/policy-engine" - databaseName: "policy_engine" - commandTimeoutSeconds: 30 - -workers: - schedulerIntervalSeconds: 15 - maxConcurrentEvaluations: 4 - -resourceServer: - authority: "https://authority.stella-ops.local" - requireHttpsMetadata: true - audiences: [ "api://policy-engine" ] - requiredScopes: [ "policy:run" ] - requiredTenants: [ ] - bypassNetworks: - - "127.0.0.1/32" - - "::1/128" +# StellaOps Policy Engine configuration template. +# Copy to ../etc/policy-engine.yaml (relative to the Policy Engine content root) +# and adjust values to fit your environment. Environment variables prefixed with +# STELLAOPS_POLICY_ENGINE_ override these values at runtime. + +schemaVersion: 1 + +authority: + enabled: true + issuer: "https://authority.stella-ops.local" + clientId: "policy-engine" + clientSecret: "change-me" + scopes: [ "policy:run", "findings:read", "effective:write" ] + backchannelTimeoutSeconds: 30 + +storage: + connectionString: "mongodb://localhost:27017/policy-engine" + databaseName: "policy_engine" + commandTimeoutSeconds: 30 + +workers: + schedulerIntervalSeconds: 15 + maxConcurrentEvaluations: 4 + +resourceServer: + authority: "https://authority.stella-ops.local" + requireHttpsMetadata: true + audiences: [ "api://policy-engine" ] + requiredScopes: [ "policy:run" ] + requiredTenants: [ ] + bypassNetworks: + - "127.0.0.1/32" + - "::1/128" diff --git a/etc/policy-gateway.yaml.sample b/etc/policy-gateway.yaml.sample index 6c38cb20..1b2b10ba 100644 --- a/etc/policy-gateway.yaml.sample +++ b/etc/policy-gateway.yaml.sample @@ -1,39 +1,39 @@ -# StellaOps Policy Gateway configuration template. -# Copy to ../etc/policy-gateway.yaml (relative to the gateway content root) -# and adjust values to fit your environment. Environment variables prefixed with -# STELLAOPS_POLICY_GATEWAY_ override these values at runtime. - -schemaVersion: 1 - -telemetry: - minimumLogLevel: Information - -resourceServer: - authority: "https://authority.stella-ops.local" - metadataAddress: "https://authority.stella-ops.local/.well-known/openid-configuration" - audiences: [ "api://policy-gateway" ] - requiredScopes: [ "policy:read", "policy:author", "policy:review", "policy:approve", "policy:operate", "policy:simulate", "policy:run", "policy:activate" ] - requiredTenants: [ ] - bypassNetworks: - - "127.0.0.1/32" - - "::1/128" - requireHttpsMetadata: true - backchannelTimeoutSeconds: 30 - tokenClockSkewSeconds: 60 - -policyEngine: - baseAddress: "https://policy-engine.stella-ops.local" - audience: "api://policy-engine" - clientCredentials: - enabled: true - clientId: "policy-gateway" - clientSecret: "change-me" - scopes: [ "policy:read", "policy:author", "policy:review", "policy:approve", "policy:operate", "policy:simulate", "policy:run", "policy:activate" ] - backchannelTimeoutSeconds: 30 - dpop: - enabled: false - keyPath: "../etc/policy-gateway-dpop.pem" - keyPassphrase: "" - algorithm: "ES256" - proofLifetime: "00:02:00" - clockSkew: "00:00:30" +# StellaOps Policy Gateway configuration template. +# Copy to ../etc/policy-gateway.yaml (relative to the gateway content root) +# and adjust values to fit your environment. Environment variables prefixed with +# STELLAOPS_POLICY_GATEWAY_ override these values at runtime. + +schemaVersion: 1 + +telemetry: + minimumLogLevel: Information + +resourceServer: + authority: "https://authority.stella-ops.local" + metadataAddress: "https://authority.stella-ops.local/.well-known/openid-configuration" + audiences: [ "api://policy-gateway" ] + requiredScopes: [ "policy:read", "policy:author", "policy:review", "policy:approve", "policy:operate", "policy:simulate", "policy:run", "policy:activate" ] + requiredTenants: [ ] + bypassNetworks: + - "127.0.0.1/32" + - "::1/128" + requireHttpsMetadata: true + backchannelTimeoutSeconds: 30 + tokenClockSkewSeconds: 60 + +policyEngine: + baseAddress: "https://policy-engine.stella-ops.local" + audience: "api://policy-engine" + clientCredentials: + enabled: true + clientId: "policy-gateway" + clientSecret: "change-me" + scopes: [ "policy:read", "policy:author", "policy:review", "policy:approve", "policy:operate", "policy:simulate", "policy:run", "policy:activate" ] + backchannelTimeoutSeconds: 30 + dpop: + enabled: false + keyPath: "../etc/policy-gateway-dpop.pem" + keyPassphrase: "" + algorithm: "ES256" + proofLifetime: "00:02:00" + clockSkew: "00:00:30" diff --git a/etc/registry-signing-sample.pem b/etc/registry-signing-sample.pem index df9e353a..48ceced9 100644 --- a/etc/registry-signing-sample.pem +++ b/etc/registry-signing-sample.pem @@ -1,27 +1,27 @@ ------BEGIN PRIVATE KEY----- -MIIEpAIBAAKCAQEApK0BkUaZC26/J7el9fnYx1Y6Uwh0b3F08r5zixK9QmuaZ0+d -Zn2m5yA/ty/G6uSVn/YU5YZd7zFTy9P7egfa/tVU5tB2Lk5/v/+6JCTG0uzQjZ1e -tfx+/j/iKnD7Z3S2CQyq4F2VQQ2xxF+SaVQ9zbmqRaWhzVtzxz6pXPVH3YYBXFjC -OXD2gG+437lPlm3CRWWPnk0hxK6SLlqVvFyP34PO8TdQF5VAcez5vFfwrkqXDBHC -vQ156P6rWTeM1g9UUfPjCmaJC9k6uM4DUUDOVOA7xemlAf+QvOIbd1Yq+XNfYo6I -WrE1bCnHgYG/Y2J17YrCX7bZ06WpDjXRHuEaiwIDAQABAoIBAQCm0sCcdwuDA1yS -g52qZ2vBEtKgeAM9H9XD7VxVMdzJx/CbCcdE289kQTZrBp3fgpovvzgzjYQeojmJ -1oU3tEJX7AD1OCXikxBvl/EunzQ7Vm25Iw2zDX/a6li3jbDHNb/P5sNhoUqS2R7z -gKqoq2oGOV3R43bHZ6N5UVoVDoRF0oZtl4Mw2aF/3JcBCQr9qsS1SoD/CPcaFc5e -2CiYtn0N+L61+Z1YblFXxUD6YJn/1XlonyDtNzW6ybMcyTj3y5DsKFnmQbF1R837 -f6LS7IZzJ9Bod3lTY2QQoW8GrYc1Y4zM0P4ZfKw7u6nLLA/A4Ngpk+kuJoha+ffq -/BQx3xU5AoGBANOltyQwXBuo5omywFbGi2+Z7UFip3yz1Vi6Yo4QzfdP/UVfhrlP -IhiIC6cvI6bf7MwwN4vShBALTbctFRZpFj5Cil1Bh/n7dJE3KKdcBFDLndCszbb5 -21N4vR7BkfGav0PpVmeRlmy3FqnpYwm0KziqFwH2tsnegegcQFzN/Q0LAoGBAMok -2PKDwYhz523kn2AVVB8pB4X0ZPrZmtHh9bPlsJS7HwioX8Oo8CD5c+WQ4u4KnYV5 -B2X2y8WTDdYH50SUSzjYCaec6Mce6CO5XrCK0pC6mvJMQBoBI35Snu461FahAE6U -zNEp2bqMx8nKnuNRw1bI8gMlMrk0dBrJRvfUsycvAoGAcOVlC5+iB9YydUzFefvK -xjBvXvG9Y60tdkN4Kd1/XiN7UjsfOCvy9EhRL1u5//JLi0O3bCtCO6fsziS0PFAO -QX9WhCok0Ifn2GwzVDfteMoqmHhPmlKL0g7G70m2JdHMIiFAMJWpbD9gWKk0o9v0 -Bk0zF0hjWG9ipN1fAv61TRUCgYEAk4fQVxbRyWYVvHHcH4scr1jYIE7so9+boQ3c -O0YDyId+rLo0Orers/5OEI2gTgLz7HzFMr2SfLWaNqMy2Beo9/C8VM5ijx2zYNvM -oN+xsZLFYoA7KM0jb5dLZ1UL84sHynwYLPy+EsB7mP+OpclAqY6cHx47CL4yxo2J -cz0KkOUCgYA4hmgwRq3di82plhMnNs14UruI9SNpZbgZ7kFCCkQbGyRhPqbwIa1U -AWhaI4SqdOskwj6B+GScXMiF49cDG4xW4Cr2/pr9F1ZcUEAWESihrINZhCg5kS2d -FSKbJ2Xqs0GGx5xAxlzUaRF8NDH6cqfynlHC9HjDJSLXquGMEmXcnw== ------END PRIVATE KEY----- +-----BEGIN PRIVATE KEY----- +MIIEpAIBAAKCAQEApK0BkUaZC26/J7el9fnYx1Y6Uwh0b3F08r5zixK9QmuaZ0+d +Zn2m5yA/ty/G6uSVn/YU5YZd7zFTy9P7egfa/tVU5tB2Lk5/v/+6JCTG0uzQjZ1e +tfx+/j/iKnD7Z3S2CQyq4F2VQQ2xxF+SaVQ9zbmqRaWhzVtzxz6pXPVH3YYBXFjC +OXD2gG+437lPlm3CRWWPnk0hxK6SLlqVvFyP34PO8TdQF5VAcez5vFfwrkqXDBHC +vQ156P6rWTeM1g9UUfPjCmaJC9k6uM4DUUDOVOA7xemlAf+QvOIbd1Yq+XNfYo6I +WrE1bCnHgYG/Y2J17YrCX7bZ06WpDjXRHuEaiwIDAQABAoIBAQCm0sCcdwuDA1yS +g52qZ2vBEtKgeAM9H9XD7VxVMdzJx/CbCcdE289kQTZrBp3fgpovvzgzjYQeojmJ +1oU3tEJX7AD1OCXikxBvl/EunzQ7Vm25Iw2zDX/a6li3jbDHNb/P5sNhoUqS2R7z +gKqoq2oGOV3R43bHZ6N5UVoVDoRF0oZtl4Mw2aF/3JcBCQr9qsS1SoD/CPcaFc5e +2CiYtn0N+L61+Z1YblFXxUD6YJn/1XlonyDtNzW6ybMcyTj3y5DsKFnmQbF1R837 +f6LS7IZzJ9Bod3lTY2QQoW8GrYc1Y4zM0P4ZfKw7u6nLLA/A4Ngpk+kuJoha+ffq +/BQx3xU5AoGBANOltyQwXBuo5omywFbGi2+Z7UFip3yz1Vi6Yo4QzfdP/UVfhrlP +IhiIC6cvI6bf7MwwN4vShBALTbctFRZpFj5Cil1Bh/n7dJE3KKdcBFDLndCszbb5 +21N4vR7BkfGav0PpVmeRlmy3FqnpYwm0KziqFwH2tsnegegcQFzN/Q0LAoGBAMok +2PKDwYhz523kn2AVVB8pB4X0ZPrZmtHh9bPlsJS7HwioX8Oo8CD5c+WQ4u4KnYV5 +B2X2y8WTDdYH50SUSzjYCaec6Mce6CO5XrCK0pC6mvJMQBoBI35Snu461FahAE6U +zNEp2bqMx8nKnuNRw1bI8gMlMrk0dBrJRvfUsycvAoGAcOVlC5+iB9YydUzFefvK +xjBvXvG9Y60tdkN4Kd1/XiN7UjsfOCvy9EhRL1u5//JLi0O3bCtCO6fsziS0PFAO +QX9WhCok0Ifn2GwzVDfteMoqmHhPmlKL0g7G70m2JdHMIiFAMJWpbD9gWKk0o9v0 +Bk0zF0hjWG9ipN1fAv61TRUCgYEAk4fQVxbRyWYVvHHcH4scr1jYIE7so9+boQ3c +O0YDyId+rLo0Orers/5OEI2gTgLz7HzFMr2SfLWaNqMy2Beo9/C8VM5ijx2zYNvM +oN+xsZLFYoA7KM0jb5dLZ1UL84sHynwYLPy+EsB7mP+OpclAqY6cHx47CL4yxo2J +cz0KkOUCgYA4hmgwRq3di82plhMnNs14UruI9SNpZbgZ7kFCCkQbGyRhPqbwIa1U +AWhaI4SqdOskwj6B+GScXMiF49cDG4xW4Cr2/pr9F1ZcUEAWESihrINZhCg5kS2d +FSKbJ2Xqs0GGx5xAxlzUaRF8NDH6cqfynlHC9HjDJSLXquGMEmXcnw== +-----END PRIVATE KEY----- diff --git a/etc/registry-token.yaml b/etc/registry-token.yaml index e5d1e61f..e6a332ff 100644 --- a/etc/registry-token.yaml +++ b/etc/registry-token.yaml @@ -1,30 +1,30 @@ -registryTokenService: - authority: - issuer: "https://authority.localhost" - requireHttpsMetadata: false - audiences: - - "registry" - requiredScopes: - - "registry.token.issue" - signing: - issuer: "https://registry.localhost/token" - keyPath: "etc/registry-signing-sample.pem" - keyPassword: "" - lifetime: "00:05:00" - registry: - realm: "https://registry.localhost/v2/token" - allowedServices: - - "registry.localhost" - defaultPlan: "community" - plans: - - name: "community" - repositories: - - pattern: "stella-ops/public/*" - actions: [ "pull" ] - - name: "enterprise" - repositories: - - pattern: "stella-ops/public/*" - actions: [ "pull" ] - - pattern: "stella-ops/enterprise/*" - actions: [ "pull" ] - revokedLicenses: [] +registryTokenService: + authority: + issuer: "https://authority.localhost" + requireHttpsMetadata: false + audiences: + - "registry" + requiredScopes: + - "registry.token.issue" + signing: + issuer: "https://registry.localhost/token" + keyPath: "etc/registry-signing-sample.pem" + keyPassword: "" + lifetime: "00:05:00" + registry: + realm: "https://registry.localhost/v2/token" + allowedServices: + - "registry.localhost" + defaultPlan: "community" + plans: + - name: "community" + repositories: + - pattern: "stella-ops/public/*" + actions: [ "pull" ] + - name: "enterprise" + repositories: + - pattern: "stella-ops/public/*" + actions: [ "pull" ] + - pattern: "stella-ops/enterprise/*" + actions: [ "pull" ] + revokedLicenses: [] diff --git a/etc/secrets/cartographer-service.secret b/etc/secrets/cartographer-service.secret index a222272d..c520329a 100644 --- a/etc/secrets/cartographer-service.secret +++ b/etc/secrets/cartographer-service.secret @@ -1,2 +1,2 @@ -# replace with a strong shared secret for the cartographer-service client -cartographer-service-secret-change-me +# replace with a strong shared secret for the cartographer-service client +cartographer-service-secret-change-me diff --git a/etc/secrets/concelier-ingest.secret b/etc/secrets/concelier-ingest.secret index f21fc33f..2f166da4 100644 --- a/etc/secrets/concelier-ingest.secret +++ b/etc/secrets/concelier-ingest.secret @@ -1,2 +1,2 @@ -# replace with a strong shared secret for the concelier-ingest client -concelier-ingest-secret-change-me +# replace with a strong shared secret for the concelier-ingest client +concelier-ingest-secret-change-me diff --git a/etc/secrets/console-web.secret b/etc/secrets/console-web.secret index ecbc6f7b..39f0bf49 100644 --- a/etc/secrets/console-web.secret +++ b/etc/secrets/console-web.secret @@ -1,2 +1,2 @@ -# replace with a strong shared secret for the console-web client -console-web-secret-change-me +# replace with a strong shared secret for the console-web client +console-web-secret-change-me diff --git a/etc/secrets/excitor-ingest.secret b/etc/secrets/excitor-ingest.secret index a1fb7f12..c95d2f35 100644 --- a/etc/secrets/excitor-ingest.secret +++ b/etc/secrets/excitor-ingest.secret @@ -1,2 +1,2 @@ -# replace with a strong shared secret for the excitor-ingest client -excitor-ingest-secret-change-me +# replace with a strong shared secret for the excitor-ingest client +excitor-ingest-secret-change-me diff --git a/etc/secrets/graph-api-cli.secret b/etc/secrets/graph-api-cli.secret index 9d05d27b..6e44aba4 100644 --- a/etc/secrets/graph-api-cli.secret +++ b/etc/secrets/graph-api-cli.secret @@ -1,2 +1,2 @@ -# replace with a strong shared secret for the graph-api-cli client -graph-api-cli-secret-change-me +# replace with a strong shared secret for the graph-api-cli client +graph-api-cli-secret-change-me diff --git a/etc/secrets/graph-api.secret b/etc/secrets/graph-api.secret index c6f84327..38a7cb90 100644 --- a/etc/secrets/graph-api.secret +++ b/etc/secrets/graph-api.secret @@ -1,2 +1,2 @@ -# replace with a strong shared secret for the graph-api client -graph-api-secret-change-me +# replace with a strong shared secret for the graph-api client +graph-api-secret-change-me diff --git a/etc/secrets/policy-cli.secret b/etc/secrets/policy-cli.secret index 1e3a1a77..c5706ffe 100644 --- a/etc/secrets/policy-cli.secret +++ b/etc/secrets/policy-cli.secret @@ -1,2 +1,2 @@ -# generated 2025-10-27T21:55:11Z via scripts/rotate-policy-cli-secret.sh -policy-cli-iOHhrE+K1sx+iyWQOd9pqYh0LwbXRauO/zdv0AeFUvLKAtZsc1wTIB5qZ8YIfKEo +# generated 2025-10-27T21:55:11Z via scripts/rotate-policy-cli-secret.sh +policy-cli-iOHhrE+K1sx+iyWQOd9pqYh0LwbXRauO/zdv0AeFUvLKAtZsc1wTIB5qZ8YIfKEo diff --git a/etc/secrets/policy-engine.secret b/etc/secrets/policy-engine.secret index 13456083..c07dd9e3 100644 --- a/etc/secrets/policy-engine.secret +++ b/etc/secrets/policy-engine.secret @@ -1,2 +1,2 @@ -# replace with a strong shared secret for the policy-engine client -policy-engine-secret-change-me +# replace with a strong shared secret for the policy-engine client +policy-engine-secret-change-me diff --git a/etc/signals.yaml.sample b/etc/signals.yaml.sample index 5ff49f20..867fbaa5 100644 --- a/etc/signals.yaml.sample +++ b/etc/signals.yaml.sample @@ -1,28 +1,28 @@ -# Signals service configuration template. -# Copy to ../etc/signals.yaml (relative to the Signals content root) -# and adjust values to fit your environment. - -schemaVersion: 1 - -Signals: - Authority: - Enabled: true - Issuer: "https://authority.stella-ops.local" - AllowAnonymousFallback: false - Audiences: - - "api://signals" - RequiredTenants: - - "tenant-default" - RequiredScopes: - - "signals:read" - - "signals:write" - - "signals:admin" - BypassNetworks: - - "127.0.0.1/32" - - "::1/128" - Mongo: - ConnectionString: "mongodb://localhost:27017/signals" - Database: "signals" - CallgraphsCollection: "callgraphs" - Storage: - RootPath: "../data/signals-artifacts" +# Signals service configuration template. +# Copy to ../etc/signals.yaml (relative to the Signals content root) +# and adjust values to fit your environment. + +schemaVersion: 1 + +Signals: + Authority: + Enabled: true + Issuer: "https://authority.stella-ops.local" + AllowAnonymousFallback: false + Audiences: + - "api://signals" + RequiredTenants: + - "tenant-default" + RequiredScopes: + - "signals:read" + - "signals:write" + - "signals:admin" + BypassNetworks: + - "127.0.0.1/32" + - "::1/128" + Mongo: + ConnectionString: "mongodb://localhost:27017/signals" + Database: "signals" + CallgraphsCollection: "callgraphs" + Storage: + RootPath: "../data/signals-artifacts" diff --git a/ops/authority/Dockerfile b/ops/authority/Dockerfile index af76ade5..cb1d7bee 100644 --- a/ops/authority/Dockerfile +++ b/ops/authority/Dockerfile @@ -15,7 +15,7 @@ WORKDIR /src # Restore & publish COPY . . RUN dotnet restore src/StellaOps.sln -RUN dotnet publish src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj \ +RUN dotnet publish src/Authority/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj \ -c Release \ -o /app/publish \ /p:UseAppHost=false diff --git a/ops/authority/README.md b/ops/authority/README.md index 27a9c81c..d00eaeb9 100644 --- a/ops/authority/README.md +++ b/ops/authority/README.md @@ -1,62 +1,62 @@ -# StellaOps Authority Container Scaffold - -This directory provides a distroless Dockerfile and `docker-compose` sample for bootstrapping the Authority service alongside MongoDB (required) and Redis (optional). - -## Prerequisites - -- Docker Engine 25+ and Compose V2 -- .NET 10 preview SDK (only required when building locally outside of Compose) -- Populated Authority configuration at `etc/authority.yaml` and plugin manifests under `etc/authority.plugins/` - -## Usage - -```bash -# 1. Ensure configuration files exist (copied from etc/authority.yaml.sample, etc/authority.plugins/*.yaml) -# 2. Build and start the stack -docker compose -f ops/authority/docker-compose.authority.yaml up --build -``` - -`authority.yaml` is mounted read-only at `/etc/authority.yaml` inside the container. Plugin manifests are mounted to `/app/etc/authority.plugins`. Update the issuer URL plus any Mongo credentials in the compose file or via an `.env`. - -To run with pre-built images, replace the `build:` block in the compose file with an `image:` reference. - -## Volumes - -- `mongo-data` – persists MongoDB state. -- `redis-data` – optional Redis persistence (enable the service before use). -- `authority-keys` – writable volume for Authority signing keys. - -## Environment overrides - -Key environment variables (mirroring `StellaOpsAuthorityOptions`): - -| Variable | Description | -| --- | --- | -| `STELLAOPS_AUTHORITY__ISSUER` | Public issuer URL advertised by Authority | -| `STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0` | Primary plugin binaries directory inside the container | -| `STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY` | Path to plugin manifest directory | - -For additional options, see `etc/authority.yaml.sample`. - -> **Graph Explorer reminder:** When enabling Cartographer or Graph API components, update `etc/authority.yaml` so the `cartographer-service` client includes `properties.serviceIdentity: "cartographer"` and a tenant hint. Authority now rejects `graph:write` tokens that lack this marker, so existing deployments must apply the update before rolling out the new build. - -> **Console endpoint reminder:** The Console UI now calls `/console/tenants`, `/console/profile`, and `/console/token/introspect`. Reverse proxies must forward the `X-Stella-Tenant` header (derived from the access token) so Authority can enforce tenancy; audit events are logged under `authority.console.*`. Admin actions obey a five-minute fresh-auth window reported by `/console/profile`, so keep session timeout prompts aligned with that value. - -## Key rotation automation (OPS3) - -The `key-rotation.sh` helper wraps the `/internal/signing/rotate` endpoint delivered with CORE10. It can run in CI/CD once the new PEM key is staged on the Authority host volume. - -```bash -AUTHORITY_BOOTSTRAP_KEY=$(cat ~/.secrets/authority-bootstrap.key) \ -./key-rotation.sh \ - --authority-url https://authority.stella-ops.local \ - --key-id authority-signing-2025 \ - --key-path ../certificates/authority-signing-2025.pem \ - --meta rotatedBy=pipeline --meta changeTicket=OPS-1234 -``` - -- `--key-path` should resolve from the Authority content root (same as `docs/11_AUTHORITY.md` SOP). -- Provide `--source`/`--provider` if the key loader differs from the default file-based provider. -- Pass `--dry-run` during rehearsals to inspect the JSON payload without invoking the API. - -After rotation, export a fresh revocation bundle (`stellaops-cli auth revoke export`) so downstream mirrors consume signatures from the new `kid`. The canonical operational steps live in `docs/11_AUTHORITY.md` – make sure any local automation keeps that guide as source of truth. +# StellaOps Authority Container Scaffold + +This directory provides a distroless Dockerfile and `docker-compose` sample for bootstrapping the Authority service alongside MongoDB (required) and Redis (optional). + +## Prerequisites + +- Docker Engine 25+ and Compose V2 +- .NET 10 preview SDK (only required when building locally outside of Compose) +- Populated Authority configuration at `etc/authority.yaml` and plugin manifests under `etc/authority.plugins/` + +## Usage + +```bash +# 1. Ensure configuration files exist (copied from etc/authority.yaml.sample, etc/authority.plugins/*.yaml) +# 2. Build and start the stack +docker compose -f ops/authority/docker-compose.authority.yaml up --build +``` + +`authority.yaml` is mounted read-only at `/etc/authority.yaml` inside the container. Plugin manifests are mounted to `/app/etc/authority.plugins`. Update the issuer URL plus any Mongo credentials in the compose file or via an `.env`. + +To run with pre-built images, replace the `build:` block in the compose file with an `image:` reference. + +## Volumes + +- `mongo-data` – persists MongoDB state. +- `redis-data` – optional Redis persistence (enable the service before use). +- `authority-keys` – writable volume for Authority signing keys. + +## Environment overrides + +Key environment variables (mirroring `StellaOpsAuthorityOptions`): + +| Variable | Description | +| --- | --- | +| `STELLAOPS_AUTHORITY__ISSUER` | Public issuer URL advertised by Authority | +| `STELLAOPS_AUTHORITY__PLUGINDIRECTORIES__0` | Primary plugin binaries directory inside the container | +| `STELLAOPS_AUTHORITY__PLUGINS__CONFIGURATIONDIRECTORY` | Path to plugin manifest directory | + +For additional options, see `etc/authority.yaml.sample`. + +> **Graph Explorer reminder:** When enabling Cartographer or Graph API components, update `etc/authority.yaml` so the `cartographer-service` client includes `properties.serviceIdentity: "cartographer"` and a tenant hint. Authority now rejects `graph:write` tokens that lack this marker, so existing deployments must apply the update before rolling out the new build. + +> **Console endpoint reminder:** The Console UI now calls `/console/tenants`, `/console/profile`, and `/console/token/introspect`. Reverse proxies must forward the `X-Stella-Tenant` header (derived from the access token) so Authority can enforce tenancy; audit events are logged under `authority.console.*`. Admin actions obey a five-minute fresh-auth window reported by `/console/profile`, so keep session timeout prompts aligned with that value. + +## Key rotation automation (OPS3) + +The `key-rotation.sh` helper wraps the `/internal/signing/rotate` endpoint delivered with CORE10. It can run in CI/CD once the new PEM key is staged on the Authority host volume. + +```bash +AUTHORITY_BOOTSTRAP_KEY=$(cat ~/.secrets/authority-bootstrap.key) \ +./key-rotation.sh \ + --authority-url https://authority.stella-ops.local \ + --key-id authority-signing-2025 \ + --key-path ../certificates/authority-signing-2025.pem \ + --meta rotatedBy=pipeline --meta changeTicket=OPS-1234 +``` + +- `--key-path` should resolve from the Authority content root (same as `docs/11_AUTHORITY.md` SOP). +- Provide `--source`/`--provider` if the key loader differs from the default file-based provider. +- Pass `--dry-run` during rehearsals to inspect the JSON payload without invoking the API. + +After rotation, export a fresh revocation bundle (`stellaops-cli auth revoke export`) so downstream mirrors consume signatures from the new `kid`. The canonical operational steps live in `docs/11_AUTHORITY.md` – make sure any local automation keeps that guide as source of truth. diff --git a/ops/devops/README.md b/ops/devops/README.md index 3bebdd38..fd988aa7 100644 --- a/ops/devops/README.md +++ b/ops/devops/README.md @@ -1,92 +1,92 @@ -# DevOps Release Automation - -The **release** workflow builds and signs the StellaOps service containers, -generates SBOM + provenance attestations, and emits a canonical -`release.yaml`. The logic lives under `ops/devops/release/` and is invoked -by the new `.gitea/workflows/release.yml` pipeline. - -## Local dry run - -```bash -./ops/devops/release/build_release.py \ - --version 2025.10.0-edge \ - --channel edge \ - --dry-run -``` - -Outputs land under `out/release/`. Use `--no-push` to run full builds without -pushing to the registry. - -After the build completes, run the verifier to validate recorded hashes and artefact -presence: - -```bash -python ops/devops/release/verify_release.py --release-dir out/release -``` - -## Python analyzer smoke & signing - -`dotnet run --project tools/LanguageAnalyzerSmoke` exercises the Python language -analyzer plug-in against the golden fixtures (cold/warm timings, determinism). The -release workflow runs this harness automatically and then produces Cosign -signatures + SHA-256 sidecars for `StellaOps.Scanner.Analyzers.Lang.Python.dll` -and its `manifest.json`. Keep `COSIGN_KEY_REF`/`COSIGN_IDENTITY_TOKEN` populated so -the step can sign the artefacts; the generated `.sig`/`.sha256` files ship with the -Offline Kit bundle. - -## Required tooling - -- Docker 25+ with Buildx -- .NET 10 preview SDK (builds container stages and the SBOM generator) -- Node.js 20 (Angular UI build) -- Helm 3.16+ -- Cosign 2.2+ - -Supply signing material via environment variables: - -- `COSIGN_KEY_REF` – e.g. `file:./keys/cosign.key` or `azurekms://…` -- `COSIGN_PASSWORD` – password protecting the above key - -The workflow defaults to multi-arch (`linux/amd64,linux/arm64`), SBOM in -CycloneDX, and SLSA provenance (`https://slsa.dev/provenance/v1`). - -## Debug store extraction - -`build_release.py` now exports stripped debug artefacts for every ELF discovered in the published images. The files land under `out/release/debug/.build-id//.debug`, with metadata captured in `debug/debug-manifest.json` (and a `.sha256` sidecar). Use `jq` to inspect the manifest or `readelf -n` to spot-check a build-id. Offline Kit packaging should reuse the `debug/` directory as-is. - -## UI auth smoke (Playwright) - -As part of **DEVOPS-UI-13-006** the pipelines will execute the UI auth smoke -tests (`npm run test:e2e`) after building the Angular bundle. See -`docs/ops/ui-auth-smoke.md` for the job design, environment stubs, and -offline runner considerations. - -## NuGet preview bootstrap - -`.NET 10` preview packages (Microsoft.Extensions.*, JwtBearer 10.0 RC, Sqlite 9 RC) -ship from the public `dotnet-public` Azure DevOps feed. We mirror them into -`./local-nuget` so restores succeed inside Offline Kit. - -1. Run `./ops/devops/sync-preview-nuget.sh` whenever you update the manifest. -2. The script now understands the optional `SourceBase` column (V3 flat container) - and writes packages alongside their SHA-256 checks. -3. `NuGet.config` registers the mirror (`local`), dotnet-public, and nuget.org. - -Use `python3 ops/devops/validate_restore_sources.py` to prove the repo still -prefers the local mirror and that `Directory.Build.props` enforces the same order. -The validator now runs automatically in the `build-test-deploy` and `release` -workflows so CI fails fast when a feed priority regression slips in. - -Detailed operator instructions live in `docs/ops/nuget-preview-bootstrap.md`. - -## Telemetry collector tooling (DEVOPS-OBS-50-001) - -- `ops/devops/telemetry/generate_dev_tls.sh` – generates a development CA and - client/server certificates for the OpenTelemetry collector overlay (mutual TLS). -- `ops/devops/telemetry/smoke_otel_collector.py` – sends OTLP traces/metrics/logs - over TLS and validates that the collector increments its receiver counters. -- `ops/devops/telemetry/package_offline_bundle.py` – re-packages collector assets for the Offline Kit. -- `deploy/compose/docker-compose.telemetry-storage.yaml` – Prometheus/Tempo/Loki stack for staging validation. - -Combine these helpers with `deploy/compose/docker-compose.telemetry.yaml` to run -a secured collector locally before rolling out the Helm-based deployment. +# DevOps Release Automation + +The **release** workflow builds and signs the StellaOps service containers, +generates SBOM + provenance attestations, and emits a canonical +`release.yaml`. The logic lives under `ops/devops/release/` and is invoked +by the new `.gitea/workflows/release.yml` pipeline. + +## Local dry run + +```bash +./ops/devops/release/build_release.py \ + --version 2025.10.0-edge \ + --channel edge \ + --dry-run +``` + +Outputs land under `out/release/`. Use `--no-push` to run full builds without +pushing to the registry. + +After the build completes, run the verifier to validate recorded hashes and artefact +presence: + +```bash +python ops/devops/release/verify_release.py --release-dir out/release +``` + +## Python analyzer smoke & signing + +`dotnet run --project tools/LanguageAnalyzerSmoke` exercises the Python language +analyzer plug-in against the golden fixtures (cold/warm timings, determinism). The +release workflow runs this harness automatically and then produces Cosign +signatures + SHA-256 sidecars for `StellaOps.Scanner.Analyzers.Lang.Python.dll` +and its `manifest.json`. Keep `COSIGN_KEY_REF`/`COSIGN_IDENTITY_TOKEN` populated so +the step can sign the artefacts; the generated `.sig`/`.sha256` files ship with the +Offline Kit bundle. + +## Required tooling + +- Docker 25+ with Buildx +- .NET 10 preview SDK (builds container stages and the SBOM generator) +- Node.js 20 (Angular UI build) +- Helm 3.16+ +- Cosign 2.2+ + +Supply signing material via environment variables: + +- `COSIGN_KEY_REF` – e.g. `file:./keys/cosign.key` or `azurekms://…` +- `COSIGN_PASSWORD` – password protecting the above key + +The workflow defaults to multi-arch (`linux/amd64,linux/arm64`), SBOM in +CycloneDX, and SLSA provenance (`https://slsa.dev/provenance/v1`). + +## Debug store extraction + +`build_release.py` now exports stripped debug artefacts for every ELF discovered in the published images. The files land under `out/release/debug/.build-id//.debug`, with metadata captured in `debug/debug-manifest.json` (and a `.sha256` sidecar). Use `jq` to inspect the manifest or `readelf -n` to spot-check a build-id. Offline Kit packaging should reuse the `debug/` directory as-is. + +## UI auth smoke (Playwright) + +As part of **DEVOPS-UI-13-006** the pipelines will execute the UI auth smoke +tests (`npm run test:e2e`) after building the Angular bundle. See +`docs/ops/ui-auth-smoke.md` for the job design, environment stubs, and +offline runner considerations. + +## NuGet preview bootstrap + +`.NET 10` preview packages (Microsoft.Extensions.*, JwtBearer 10.0 RC, Sqlite 9 RC) +ship from the public `dotnet-public` Azure DevOps feed. We mirror them into +`./local-nuget` so restores succeed inside Offline Kit. + +1. Run `./ops/devops/sync-preview-nuget.sh` whenever you update the manifest. +2. The script now understands the optional `SourceBase` column (V3 flat container) + and writes packages alongside their SHA-256 checks. +3. `NuGet.config` registers the mirror (`local`), dotnet-public, and nuget.org. + +Use `python3 ops/devops/validate_restore_sources.py` to prove the repo still +prefers the local mirror and that `Directory.Build.props` enforces the same order. +The validator now runs automatically in the `build-test-deploy` and `release` +workflows so CI fails fast when a feed priority regression slips in. + +Detailed operator instructions live in `docs/ops/nuget-preview-bootstrap.md`. + +## Telemetry collector tooling (DEVOPS-OBS-50-001) + +- `ops/devops/telemetry/generate_dev_tls.sh` – generates a development CA and + client/server certificates for the OpenTelemetry collector overlay (mutual TLS). +- `ops/devops/telemetry/smoke_otel_collector.py` – sends OTLP traces/metrics/logs + over TLS and validates that the collector increments its receiver counters. +- `ops/devops/telemetry/package_offline_bundle.py` – re-packages collector assets for the Offline Kit. +- `deploy/compose/docker-compose.telemetry-storage.yaml` – Prometheus/Tempo/Loki stack for staging validation. + +Combine these helpers with `deploy/compose/docker-compose.telemetry.yaml` to run +a secured collector locally before rolling out the Helm-based deployment. diff --git a/ops/devops/TASKS.md b/ops/devops/TASKS.md index b1a13106..89a88fb5 100644 --- a/ops/devops/TASKS.md +++ b/ops/devops/TASKS.md @@ -1,172 +1,172 @@ -# DevOps Task Board - -## Governance & Rules - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-RULES-33-001 | DOING (2025-10-26) | DevOps Guild, Platform Leads | — | Contracts & Rules anchor:
• Gateway proxies only; Policy Engine composes overlays/simulations.
• AOC ingestion cannot merge; only lossless canonicalization.
• One graph platform: Graph Indexer + Graph API. Cartographer retired. | Rules posted in SPRINTS/TASKS; duplicates cleaned per guidance; reviewers acknowledge in changelog. | - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-HELM-09-001 | DONE | DevOps Guild | SCANNER-WEB-09-101 | Create Helm/Compose environment profiles (dev, staging, airgap) with deterministic digests. | Profiles committed under `deploy/`; docs updated; CI smoke deploy passes. | -| DEVOPS-SCANNER-09-204 | DONE (2025-10-21) | DevOps Guild, Scanner WebService Guild | SCANNER-EVENTS-15-201 | Surface `SCANNER__EVENTS__*` environment variables across docker-compose (dev/stage/airgap) and Helm values, defaulting to share the Redis queue DSN. | Compose/Helm configs ship enabled Redis event publishing with documented overrides; lint jobs updated; docs cross-link to new knobs. | -| DEVOPS-SCANNER-09-205 | DONE (2025-10-21) | DevOps Guild, Notify Guild | DEVOPS-SCANNER-09-204 | Add Notify smoke stage that tails the Redis stream and asserts `scanner.report.ready`/`scanner.scan.completed` reach Notify WebService in staging. | CI job reads Redis stream during scanner smoke deploy, confirms Notify ingestion via API, alerts on failure. | -| DEVOPS-PERF-10-001 | DONE | DevOps Guild | BENCH-SCANNER-10-001 | Add perf smoke job (SBOM compose <5 s target) to CI. | CI job runs sample build verifying <5 s; alerts configured. | -| DEVOPS-PERF-10-002 | DONE (2025-10-23) | DevOps Guild | BENCH-SCANNER-10-002 | Publish analyzer bench metrics to Grafana/perf workbook and alarm on ≥20 % regressions. | CI exports JSON for dashboards; Grafana panel wired; Ops on-call doc updated with alert hook. | -| DEVOPS-AOC-19-001 | BLOCKED (2025-10-26) | DevOps Guild, Platform Guild | WEB-AOC-19-003 | Integrate the AOC Roslyn analyzer and guard tests into CI, failing builds when ingestion projects attempt banned writes. | Analyzer runs in PR/CI pipelines, results surfaced in build summary, docs updated under `docs/ops/ci-aoc.md`. | -> Docs hand-off (2025-10-26): see `docs/ingestion/aggregation-only-contract.md` §5, `docs/architecture/overview.md`, and `docs/cli/cli-reference.md` for guard + verifier expectations. -| DEVOPS-AOC-19-002 | BLOCKED (2025-10-26) | DevOps Guild | CLI-AOC-19-002, CONCELIER-WEB-AOC-19-004, EXCITITOR-WEB-AOC-19-004 | Add pipeline stage executing `stella aoc verify --since` against seeded Mongo snapshots for Concelier + Excititor, publishing violation report artefacts. | Stage runs on main/nightly, fails on violations, artifacts retained, runbook documented. | -> Blocked: waiting on CLI verifier command and Concelier/Excititor guard endpoints to land (CLI-AOC-19-002, CONCELIER-WEB-AOC-19-004, EXCITITOR-WEB-AOC-19-004). -| DEVOPS-AOC-19-003 | BLOCKED (2025-10-26) | DevOps Guild, QA Guild | CONCELIER-WEB-AOC-19-003, EXCITITOR-WEB-AOC-19-003 | Enforce unit test coverage thresholds for AOC guard suites and ensure coverage exported to dashboards. | Coverage report includes guard projects, threshold gate passes/fails as expected, dashboards refreshed with new metrics. | -> Blocked: guard coverage suites and exporter hooks pending in Concelier/Excititor (CONCELIER-WEB-AOC-19-003, EXCITITOR-WEB-AOC-19-003). -| DEVOPS-AOC-19-101 | TODO (2025-10-28) | DevOps Guild, Concelier Storage Guild | CONCELIER-STORE-AOC-19-002 | Draft supersedes backfill rollout (freeze window, dry-run steps, rollback) once advisory_raw idempotency index passes staging verification. | Runbook committed in `docs/deploy/containers.md` + Offline Kit notes, staging rehearsal scheduled with dependencies captured in SPRINTS. | -| DEVOPS-OBS-50-001 | DONE (2025-10-26) | DevOps Guild, Observability Guild | TELEMETRY-OBS-50-001 | Deliver default OpenTelemetry collector deployment (Compose/Helm manifests), OTLP ingestion endpoints, and secure pipeline (authN, mTLS, tenant partitioning). Provide smoke test verifying traces/logs/metrics ingestion. | Collector manifests committed; smoke test green; docs updated; imposed rule banner reminder noted. | -| DEVOPS-OBS-50-002 | DOING (2025-10-26) | DevOps Guild, Security Guild | DEVOPS-OBS-50-001, TELEMETRY-OBS-51-002 | Stand up multi-tenant storage backends (Prometheus, Tempo/Jaeger, Loki) with retention policies, tenant isolation, and redaction guard rails. Integrate with Authority scopes for read paths. | Storage stack deployed with auth; retention configured; integration tests verify tenant isolation; runbook drafted. | -> Coordination started with Observability Guild (2025-10-26) to schedule staging rollout and provision service accounts. Staging bootstrap commands and secret names documented in `docs/ops/telemetry-storage.md`. -| DEVOPS-OBS-50-003 | DONE (2025-10-26) | DevOps Guild, Offline Kit Guild | DEVOPS-OBS-50-001 | Package telemetry stack configs for air-gapped installs (Offline Kit bundle, documented overrides, sample values) and automate checksum/signature generation. | Offline bundle includes collector+storage configs; checksums published; docs cross-linked; imposed rule annotation recorded. | -| DEVOPS-OBS-51-001 | TODO | DevOps Guild, Observability Guild | WEB-OBS-51-001, DEVOPS-OBS-50-001 | Implement SLO evaluator service (burn rate calculators, webhook emitters), Grafana dashboards, and alert routing to Notifier. Provide Terraform/Helm automation. | Dashboards live; evaluator emits webhooks; alert runbook referenced; staging alert fired in test. | -| DEVOPS-OBS-52-001 | TODO | DevOps Guild, Timeline Indexer Guild | TIMELINE-OBS-52-002 | Configure streaming pipeline (NATS/Redis/Kafka) with retention, partitioning, and backpressure tuning for timeline events; add CI validation of schema + rate caps. | Pipeline deployed; load test meets SLA; schema validation job passes; documentation updated. | -| DEVOPS-OBS-53-001 | TODO | DevOps Guild, Evidence Locker Guild | EVID-OBS-53-001 | Provision object storage with WORM/retention options (S3 Object Lock / MinIO immutability), legal hold automation, and backup/restore scripts for evidence locker. | Storage configured with WORM; legal hold script documented; backup test performed; runbook updated. | -| DEVOPS-OBS-54-001 | TODO | DevOps Guild, Security Guild | PROV-OBS-53-002, EVID-OBS-54-001 | Manage provenance signing infrastructure (KMS keys, rotation schedule, timestamp authority integration) and integrate verification jobs into CI. | Keys provisioned with rotation policy; timestamp authority configured; CI verifies sample bundles; audit trail stored. | -| DEVOPS-OBS-55-001 | TODO | DevOps Guild, Ops Guild | DEVOPS-OBS-51-001, WEB-OBS-55-001 | Implement incident mode automation: feature flag service, auto-activation via SLO burn-rate, retention override management, and post-incident reset job. | Incident mode toggles via API/CLI; automation tested in staging; reset job verified; runbook referenced. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-AIRGAP-56-001 | TODO | DevOps Guild | AIRGAP-CTL-56-001 | Ship deny-all egress policies for Kubernetes (NetworkPolicy/eBPF) and docker-compose firewall rules; provide verification script for sealed mode. | Policies committed with tests; verification script passes/fails as expected; docs cross-linked. | -| DEVOPS-AIRGAP-56-002 | TODO | DevOps Guild, AirGap Importer Guild | AIRGAP-IMP-57-002 | Provide import tooling for bundle staging: checksum validation, offline object-store loader scripts, removable media guidance. | Scripts documented; smoke tests validate import; runbook updated. | -| DEVOPS-AIRGAP-56-003 | TODO | DevOps Guild, Container Distribution Guild | EXPORT-AIRGAP-56-002 | Build Bootstrap Pack pipeline bundling images/charts, generating checksums, and publishing manifest for offline transfer. | Pipeline runs in connected env; pack verified in air-gap smoke test; manifest recorded. | -| DEVOPS-AIRGAP-57-001 | TODO | DevOps Guild, Mirror Creator Guild | MIRROR-CRT-56-002 | Automate Mirror Bundle creation jobs with dual-control approvals, artifact signing, and checksum publication. | Approval workflow enforced; CI artifact includes DSSE/TUF metadata; audit logs stored. | -| DEVOPS-AIRGAP-57-002 | TODO | DevOps Guild, Authority Guild | AUTH-OBS-50-001 | Configure sealed-mode CI tests that run services with sealed flag and ensure no egress occurs (iptables + mock DNS). | CI suite fails on attempted egress; reports remediation; documentation updated. | -| DEVOPS-AIRGAP-58-001 | TODO | DevOps Guild, Notifications Guild | NOTIFY-AIRGAP-56-002 | Provide local SMTP/syslog container templates and health checks for sealed environments; integrate into Bootstrap Pack. | Templates deployed successfully; health checks in CI; docs updated. | -| DEVOPS-AIRGAP-58-002 | TODO | DevOps Guild, Observability Guild | DEVOPS-AIRGAP-56-001, DEVOPS-OBS-51-001 | Ship sealed-mode observability stack (Prometheus/Grafana/Tempo/Loki) pre-configured with offline dashboards and no remote exporters. | Stack boots offline; dashboards available; verification script confirms zero egress. | -| DEVOPS-REL-14-001 | DONE (2025-10-26) | DevOps Guild | SIGNER-API-11-101, ATTESTOR-API-11-201 | Deterministic build/release pipeline with SBOM/provenance, signing, manifest generation. | CI pipeline produces signed images + SBOM/attestations, manifests published with verified hashes, docs updated. | -| DEVOPS-REL-14-004 | DONE (2025-10-26) | DevOps Guild, Scanner Guild | DEVOPS-REL-14-001, SCANNER-ANALYZERS-LANG-10-309P | Extend release/offline smoke jobs to exercise the Python analyzer plug-in (warm/cold scans, determinism, signature checks). | Release/Offline pipelines run Python analyzer smoke suite; alerts hooked; docs updated with new coverage matrix. | -| DEVOPS-REL-17-002 | DONE (2025-10-26) | DevOps Guild | DEVOPS-REL-14-001, SCANNER-EMIT-17-701 | Persist stripped-debug artifacts organised by GNU build-id and bundle them into release/offline kits with checksum manifests. | CI job writes `.debug` files under `artifacts/debug/.build-id/`, manifest + checksums published, offline kit includes cache, smoke job proves symbol lookup via build-id. | -| DEVOPS-REL-17-004 | BLOCKED (2025-10-26) | DevOps Guild | DEVOPS-REL-17-002 | Ensure release workflow publishes `out/release/debug` (build-id tree + manifest) and fails when symbols are missing. | Release job emits debug artefacts, `mirror_debug_store.py` summary committed, warning cleared from build logs, docs updated. | -| DEVOPS-MIRROR-08-001 | DONE (2025-10-19) | DevOps Guild | DEVOPS-REL-14-001 | Stand up managed mirror profiles for `*.stella-ops.org` (Concelier/Excititor), including Helm/Compose overlays, multi-tenant secrets, CDN caching, and sync documentation. | Infra overlays committed, CI smoke deploy hits mirror endpoints, runbooks published for downstream sync and quota management. | -> Note (2025-10-26, BLOCKED): IdentityModel.Tokens patched for logging 9.x, but release bundle still fails because Docker cannot stream multi-arch build context (`unix:///var/run/docker.sock` unavailable, EOF during copy). Retry once docker daemon/socket is healthy; until then `out/release/debug` cannot be generated. -| DEVOPS-CONSOLE-23-001 | BLOCKED (2025-10-26) | DevOps Guild, Console Guild | CONSOLE-CORE-23-001 | Add console CI workflow (pnpm cache, lint, type-check, unit, Storybook a11y, Playwright, Lighthouse) with offline runners and artifact retention for screenshots/reports. | Workflow runs on PR & main, caches reduce install time, failing checks block merges, artifacts uploaded for triage, docs updated. | -> Blocked: Console workspace and package scripts (CONSOLE-CORE-23-001..005) are not yet present; CI cannot execute pnpm/Playwright/Lighthouse until the Next.js app lands. -| DEVOPS-CONSOLE-23-002 | TODO | DevOps Guild, Console Guild | DEVOPS-CONSOLE-23-001, CONSOLE-REL-23-301 | Produce `stella-console` container build + Helm chart overlays with deterministic digests, SBOM/provenance artefacts, and offline bundle packaging scripts. | Container published to registry mirror, Helm values committed, SBOM/attestations generated, offline kit job passes smoke test, docs updated. | -| DEVOPS-LAUNCH-18-100 | DONE (2025-10-26) | DevOps Guild | - | Finalise production environment footprint (clusters, secrets, network overlays) for full-platform go-live. | IaC/compose overlays committed, secrets placeholders documented, dry-run deploy succeeds in staging. | -| DEVOPS-LAUNCH-18-900 | DONE (2025-10-26) | DevOps Guild, Module Leads | Wave 0 completion | Collect “full implementation” sign-off from module owners and consolidate launch readiness checklist. | Sign-off record stored under `docs/ops/launch-readiness.md`; outstanding gaps triaged; checklist approved. | -| DEVOPS-LAUNCH-18-001 | DONE (2025-10-26) | DevOps Guild | DEVOPS-LAUNCH-18-100, DEVOPS-LAUNCH-18-900 | Production launch cutover rehearsal and runbook publication. | `docs/ops/launch-cutover.md` drafted, rehearsal executed with rollback drill, approvals captured. | -| DEVOPS-NUGET-13-001 | DONE (2025-10-25) | DevOps Guild, Platform Leads | DEVOPS-REL-14-001 | Add .NET 10 preview feeds / local mirrors so `Microsoft.Extensions.*` 10.0 preview packages restore offline; refresh restore docs. | NuGet.config maps preview feeds (or local mirrored packages), `dotnet restore` succeeds for Excititor/Concelier solutions without ad-hoc feed edits, docs updated for offline bootstrap. | -| DEVOPS-NUGET-13-002 | DONE (2025-10-26) | DevOps Guild | DEVOPS-NUGET-13-001 | Ensure all solutions/projects prefer `local-nuget` before public sources and document restore order validation. | `NuGet.config` and solution-level configs resolve from `local-nuget` first; automated check verifies priority; docs updated for restore ordering. | -| DEVOPS-NUGET-13-003 | DONE (2025-10-26) | DevOps Guild, Platform Leads | DEVOPS-NUGET-13-002 | Sweep `Microsoft.*` NuGet dependencies pinned to 8.* and upgrade to latest .NET 10 equivalents (or .NET 9 when 10 unavailable), updating restore guidance. | Dependency audit shows no 8.* `Microsoft.*` packages remaining; CI builds green; changelog/doc sections capture upgrade rationale. | - -## Policy Engine v2 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-POLICY-20-001 | DONE (2025-10-26) | DevOps Guild, Policy Guild | POLICY-ENGINE-20-001 | Integrate DSL linting in CI (parser/compile) to block invalid policies; add pipeline step compiling sample policies. | CI fails on syntax errors; lint logs surfaced; docs updated with pipeline instructions. | -| DEVOPS-POLICY-20-003 | DONE (2025-10-26) | DevOps Guild, QA Guild | DEVOPS-POLICY-20-001, POLICY-ENGINE-20-005 | Determinism CI: run Policy Engine twice with identical inputs and diff outputs to guard non-determinism. | CI job compares outputs, fails on differences, logs stored; documentation updated. | -| DEVOPS-POLICY-20-004 | DONE (2025-10-27) | DevOps Guild, Scheduler Guild, CLI Guild | SCHED-MODELS-20-001, CLI-POLICY-20-002 | Automate policy schema exports: generate JSON Schema from `PolicyRun*` DTOs during CI, publish artefacts, and emit change alerts for CLI consumers (Slack + changelog). | CI stage outputs versioned schema files, uploads artefacts, notifies #policy-engine channel on change; docs/CLI references updated. | -> 2025-10-27: `.gitea/workflows/build-test-deploy.yml` publishes the `policy-schema-exports` artefact under `artifacts/policy-schemas//` and posts Slack diffs via `POLICY_ENGINE_SCHEMA_WEBHOOK`; diff stored as `policy-schema-diff.patch`. - -## Graph Explorer v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| - -## Orchestrator Dashboard - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-ORCH-32-001 | TODO | DevOps Guild, Orchestrator Service Guild | ORCH-SVC-32-001 | Provision orchestrator Postgres/message-bus infrastructure, add CI smoke deploy, seed Grafana dashboards (queue depth, inflight jobs), and document bootstrap. | Helm/Compose profiles committed; CI smoke deploy runs; dashboards live with metrics; runbook updated. | -| DEVOPS-ORCH-33-001 | TODO | DevOps Guild, Observability Guild | DEVOPS-ORCH-32-001, ORCH-SVC-33-001..003 | Publish Grafana dashboards/alerts for rate limiter, backpressure, error clustering, and DLQ depth; integrate with on-call rotations. | Dashboards and alerts configured; synthetic tests validate thresholds; on-call playbook updated. | -| DEVOPS-ORCH-34-001 | TODO | DevOps Guild, Orchestrator Service Guild | DEVOPS-ORCH-33-001, ORCH-SVC-34-001..003 | Harden production monitoring (synthetic probes, burn-rate alerts, replay smoke), document incident response, and prep GA readiness checklist. | Synthetic probes created; burn-rate alerts firing on test scenario; GA checklist approved; runbook linked. | - -## Link-Not-Merge v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-LNM-22-001 | BLOCKED (2025-10-27) | DevOps Guild, Concelier Guild | CONCELIER-LNM-21-102 | Run migration/backfill pipelines for advisory observations/linksets in staging, validate counts/conflicts, and automate deployment steps. Awaiting storage backfill tooling. | -| DEVOPS-LNM-22-002 | BLOCKED (2025-10-27) | DevOps Guild, Excititor Guild | EXCITITOR-LNM-21-102 | Execute VEX observation/linkset backfill with monitoring; ensure NATS/Redis events integrated; document ops runbook. Blocked until Excititor storage migration lands. | -| DEVOPS-LNM-22-003 | TODO | DevOps Guild, Observability Guild | CONCELIER-LNM-21-005, EXCITITOR-LNM-21-005 | Add CI/monitoring coverage for new metrics (`advisory_observations_total`, `linksets_total`, etc.) and alerts on ingest-to-API SLA breaches. | Metrics scraped into Grafana; alert thresholds set; CI job verifies metric emission. | - -## Graph & Vuln Explorer v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-GRAPH-24-001 | TODO | DevOps Guild, SBOM Service Guild | SBOM-GRAPH-24-002 | Load test graph index/adjacency APIs with 40k-node assets; capture perf dashboards and alert thresholds. | Perf suite added; dashboards live; alerts configured. | -| DEVOPS-GRAPH-24-002 | TODO | DevOps Guild, UI Guild | UI-GRAPH-24-001..005 | Integrate synthetic UI perf runs (Playwright/WebGL metrics) for Graph/Vuln explorers; fail builds on regression. | CI job runs UI perf tests; baseline stored; documentation updated. | -| DEVOPS-GRAPH-24-003 | TODO | DevOps Guild | WEB-GRAPH-24-002 | Implement smoke job for simulation endpoints ensuring we stay within SLA (<3s upgrade) and log results. | Smoke job in CI; alerts when SLA breached; runbook documented. | -| DEVOPS-POLICY-27-001 | TODO | DevOps Guild, DevEx/CLI Guild | CLI-POLICY-27-001, REGISTRY-API-27-001 | Add CI pipeline stages to run `stella policy lint|compile|test` with secret scanning on policy sources for PRs touching `/policies/**`; publish diagnostics artifacts. | Pipeline executes on PR/main, failures block merges, secret scan summary uploaded, docs updated. | -| DEVOPS-POLICY-27-002 | TODO | DevOps Guild, Policy Registry Guild | REGISTRY-API-27-005, SCHED-WORKER-27-301 | Provide optional batch simulation CI job (staging inventory) that triggers Registry run, polls results, and posts markdown summary to PR; enforce drift thresholds. | Job configurable via label, summary comment generated, drift threshold gates merges, runbook documented. | -| DEVOPS-POLICY-27-003 | TODO | DevOps Guild, Security Guild | AUTH-POLICY-27-002, REGISTRY-API-27-007 | Manage signing key material for policy publish pipeline (OIDC workload identity + cosign), rotate keys, and document verification steps; integrate attestation verification stage. | Keys stored in secure vault, rotation procedure documented, CI verifies attestations, audit logs recorded. | -| DEVOPS-POLICY-27-004 | TODO | DevOps Guild, Observability Guild | WEB-POLICY-27-005, TELEMETRY-CONSOLE-27-001 | Create dashboards/alerts for policy compile latency, simulation queue depth, approval latency, and promotion outcomes; integrate with on-call playbooks. | Grafana dashboards live, alerts tuned, runbooks updated, observability tests verify metric ingestion. | -> Remark (2025-10-20): Repacked `Mongo2Go` local feed to require MongoDB.Driver 3.5.0 + SharpCompress 0.41.0; cache regression tests green and NU1902/NU1903 suppressed. -> Remark (2025-10-21): Compose/Helm profiles now surface `SCANNER__EVENTS__*` toggles with docs pointing at new `.env` placeholders. - -## Reachability v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-SIG-26-001 | TODO | DevOps Guild, Signals Guild | SIGNALS-24-001 | Provision CI/CD pipelines, Helm/Compose manifests for Signals service, including artifact storage and Redis dependencies. | Pipelines ship Signals service; deployment docs updated; smoke tests green. | -| DEVOPS-SIG-26-002 | TODO | DevOps Guild, Observability Guild | SIGNALS-24-004 | Create dashboards/alerts for reachability scoring latency, cache hit rates, sensor staleness. | Dashboards live; alert thresholds configured; documentation updated. | -| DEVOPS-VULN-29-001 | TODO | DevOps Guild, Findings Ledger Guild | LEDGER-29-002..009 | Provision CI jobs for ledger projector (replay, determinism), set up backups, monitor Merkle anchoring, and automate verification. | CI job verifies hash chains; backups documented; alerts for anchoring failures configured. | -| DEVOPS-VULN-29-002 | TODO | DevOps Guild, Vuln Explorer API Guild | VULN-API-29-002..009 | Configure load/perf tests (5M findings/tenant), query budget enforcement, API SLO dashboards, and alerts for `vuln_list_latency` and `projection_lag`. | Perf suite integrated; dashboards live; alerts firing; runbooks updated. | -| DEVOPS-VULN-29-003 | TODO | DevOps Guild, Console Guild | WEB-VULN-29-004, CONSOLE-VULN-29-007 | Instrument analytics pipeline for Vuln Explorer (telemetry ingestion, query hashes), ensure compliance with privacy/PII guardrails, and update observability docs. | Telemetry pipeline operational; PII redaction verified; docs updated with checklist. | -| DEVOPS-VEX-30-001 | TODO | DevOps Guild, VEX Lens Guild | VEXLENS-30-009, ISSUER-30-005 | Provision CI, load tests, dashboards, alerts for VEX Lens and Issuer Directory (compute latency, disputed totals, signature verification rates). | CI/perf suites running; dashboards live; alerts configured; docs updated. | -| DEVOPS-AIAI-31-001 | TODO | DevOps Guild, Advisory AI Guild | AIAI-31-006..007 | Stand up CI pipelines, inference monitoring, privacy logging review, and perf dashboards for Advisory AI (summaries/conflicts/remediation). | CI covers golden outputs, telemetry dashboards live, privacy controls reviewed, alerts configured. | - -## Export Center -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-EXPORT-35-001 | BLOCKED (2025-10-29) | DevOps Guild, Exporter Service Guild | EXPORT-SVC-35-001..006 | Establish exporter CI pipeline (lint/test/perf smoke), configure object storage fixtures, seed Grafana dashboards, and document bootstrap steps. | CI pipeline running; smoke export job seeded; dashboards live; runbook updated. | -| DEVOPS-EXPORT-36-001 | TODO | DevOps Guild, Exporter Service Guild | DEVOPS-EXPORT-35-001, EXPORT-SVC-36-001..004 | Integrate Trivy compatibility validation, cosign signature checks, `trivy module db import` smoke tests, OCI distribution verification, and throughput/error dashboards. | CI executes cosign + Trivy import validation; OCI push smoke passes; dashboards/alerts configured. | -| DEVOPS-EXPORT-37-001 | TODO | DevOps Guild, Exporter Service Guild | DEVOPS-EXPORT-36-001, EXPORT-SVC-37-001..004 | Finalize exporter monitoring (failure alerts, verify metrics, retention jobs) and chaos/latency tests ahead of GA. | Alerts tuned; chaos tests documented; retention monitoring active; runbook updated. | - -## CLI Parity & Task Packs - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-CLI-41-001 | TODO | DevOps Guild, DevEx/CLI Guild | CLI-CORE-41-001 | Establish CLI build pipeline (multi-platform binaries, SBOM, checksums), parity matrix CI enforcement, and release artifact signing. | Build pipeline operational; SBOM/checksums published; parity gate failing on drift; docs updated. | -| DEVOPS-CLI-42-001 | TODO | DevOps Guild | DEVOPS-CLI-41-001, CLI-PARITY-41-001 | Add CLI golden output tests, parity diff automation, pack run CI harness, and artifact cache for remote mode. | Golden tests running; parity diff automation in CI; pack run harness executes sample packs; documentation updated. | -| DEVOPS-CLI-43-001 | DOING (2025-10-27) | DevOps Guild | DEVOPS-CLI-42-001, TASKRUN-42-001 | Finalize multi-platform release automation, SBOM signing, parity gate enforcement, and Task Pack chaos tests. | Release automation verified; SBOM signed; parity gate enforced; chaos tests documented. | -> 2025-10-27: Release pipeline now packages CLI multi-platform artefacts with SBOM/signature coverage and enforces the CLI parity gate (`ops/devops/check_cli_parity.py`). Task Pack chaos smoke still pending CLI pack command delivery. -| DEVOPS-CLI-43-002 | TODO | DevOps Guild, Task Runner Guild | CLI-PACKS-43-001, TASKRUN-43-001 | Implement Task Pack chaos smoke in CI (random failure injection, resume, sealed-mode toggle) and publish evidence bundles for review. | Chaos smoke job runs nightly; failures alert Slack; evidence stored in `out/pack-chaos`; runbook updated. | -| DEVOPS-CLI-43-003 | TODO | DevOps Guild, DevEx/CLI Guild | CLI-PARITY-41-001, CLI-PACKS-42-001 | Integrate CLI golden output/parity diff automation into release gating; export parity report artifact consumed by Console Downloads workspace. | `check_cli_parity.py` wired to compare parity matrix and CLI outputs; artifact uploaded; release fails on regressions. - -## Containerized Distribution (Epic 13) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-CONTAINERS-44-001 | TODO | DevOps Guild | DOCKER-44-001..003 | Automate multi-arch image builds with buildx, SBOM generation, cosign signing, and signature verification in CI. | Pipeline builds amd64/arm64; SBOMs pushed as referrers; cosign verify job passes. | -| DEVOPS-CONTAINERS-45-001 | TODO | DevOps Guild | HELM-45-001 | Add Compose and Helm smoke tests (fresh VM + kind cluster) to CI; publish test artifacts and logs. | CI jobs running; failures block releases; documentation updated. | -| DEVOPS-CONTAINERS-46-001 | TODO | DevOps Guild | DEPLOY-PACKS-43-001 | Build air-gap bundle generator (`tools/make-airgap-bundle.sh`), produce signed bundle, and verify in CI using private registry. | Bundle artifact produced with signatures/checksums; verification job passes; instructions documented. | - -### Container Images (Epic 13) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DOCKER-44-001 | TODO | DevOps Guild, Service Owners | DEVOPS-CLI-41-001 | Author multi-stage Dockerfiles for all core services (API, Console, Orchestrator, Task Runner, Conseiller, Excitator, Policy, Notify, Export, AI) with non-root users, read-only file systems, and health scripts. | Dockerfiles committed; images build successfully; container security scans clean; health endpoints reachable. | -| DOCKER-44-002 | TODO | DevOps Guild | DOCKER-44-001 | Generate SBOMs and cosign attestations for each image and integrate verification into CI. | SBOMs attached as OCI artifacts; cosign signatures published; CI verifies signatures prior to release. | -| DOCKER-44-003 | TODO | DevOps Guild | DOCKER-44-001 | Implement `/health/liveness`, `/health/readiness`, `/version`, `/metrics`, and ensure capability endpoint returns `merge=false` for Conseiller/Excitator. | Endpoints available across services; automated tests confirm responses; documentation updated with imposed rule reminder. | - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-TEN-47-001 | TODO | DevOps Guild | AUTH-TEN-47-001 | Add JWKS cache monitoring, signature verification regression tests, and token expiration chaos tests to CI. | CI verifies tokens using cached keys; chaos test for expired keys passes; documentation updated. | -| DEVOPS-TEN-48-001 | TODO | DevOps Guild | WEB-TEN-48-001 | Build integration tests to assert RLS enforcement, tenant-prefixed object storage, and audit event emission; set up lint to prevent raw SQL bypass. | Tests fail on cross-tenant access; lint enforced; dashboards capture audit events. | -| DEVOPS-TEN-49-001 | TODO | DevOps Guild | AUTH-TEN-49-001 | Deploy audit pipeline, scope usage metrics, JWKS outage chaos tests, and tenant load/perf benchmarks. | Audit pipeline live; metrics dashboards updated; chaos tests documented; perf benchmarks recorded. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-OAS-61-001 | TODO | DevOps Guild, API Contracts Guild | OAS-61-002 | Add CI stages for OpenAPI linting, validation, and compatibility diff; enforce gating on PRs. | Pipeline active; merge blocked on failures; documentation updated. | -| DEVOPS-OAS-61-002 | TODO | DevOps Guild, Contract Testing Guild | CONTR-62-002 | Integrate mock server + contract test suite into PR and nightly workflows; publish artifacts. | Tests run in CI; artifacts stored; failures alert. | -| DEVOPS-SDK-63-001 | TODO | DevOps Guild, SDK Release Guild | SDKREL-63-001 | Provision registry credentials, signing keys, and secure storage for SDK publishing pipelines. | Keys stored/rotated; publish pipeline authenticated; audit logs recorded. | -| DEVOPS-DEVPORT-63-001 | TODO | DevOps Guild, Developer Portal Guild | DEVPORT-62-001 | Automate developer portal build pipeline with caching, link & accessibility checks, performance budgets. | Pipeline enforced; reports archived; failures gate merges. | -| DEVOPS-DEVPORT-64-001 | TODO | DevOps Guild, DevPortal Offline Guild | DVOFF-64-001 | Schedule `devportal --offline` nightly builds with checksum validation and artifact retention policies. | Nightly job running; checksums published; retention policy documented. | - -## Attestor Console (Epic 19) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVOPS-ATTEST-73-001 | TODO | DevOps Guild, Attestor Service Guild | ATTESTOR-72-002 | Provision CI pipelines for attestor service (lint/test/security scan, seed data) and manage secrets for KMS drivers. | CI pipeline running; secrets stored securely; docs updated. | -| DEVOPS-ATTEST-73-002 | TODO | DevOps Guild, KMS Guild | KMS-72-001 | Establish secure storage for signing keys (vault integration, rotation schedule) and audit logging. | Key storage configured; rotation documented; audit logs verified. | -| DEVOPS-ATTEST-74-001 | TODO | DevOps Guild, Transparency Guild | TRANSP-74-001 | Deploy transparency log witness infrastructure and monitoring. | Witness service deployed; dashboards/alerts live. | -| DEVOPS-ATTEST-74-002 | TODO | DevOps Guild, Export Attestation Guild | EXPORT-ATTEST-74-001 | Integrate attestation bundle builds into release/offline pipelines with checksum verification. | Bundle job in CI; checksum verification passes; docs updated. | -| DEVOPS-ATTEST-75-001 | TODO | DevOps Guild, Observability Guild | ATTEST-VERIFY-74-001 | Add dashboards/alerts for signing latency, verification failures, key rotation events. | Dashboards live; alerts configured. | +# DevOps Task Board + +## Governance & Rules + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-RULES-33-001 | DOING (2025-10-26) | DevOps Guild, Platform Leads | — | Contracts & Rules anchor:
• Gateway proxies only; Policy Engine composes overlays/simulations.
• AOC ingestion cannot merge; only lossless canonicalization.
• One graph platform: Graph Indexer + Graph API. Cartographer retired. | Rules posted in SPRINTS/TASKS; duplicates cleaned per guidance; reviewers acknowledge in changelog. | + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-HELM-09-001 | DONE | DevOps Guild | SCANNER-WEB-09-101 | Create Helm/Compose environment profiles (dev, staging, airgap) with deterministic digests. | Profiles committed under `deploy/`; docs updated; CI smoke deploy passes. | +| DEVOPS-SCANNER-09-204 | DONE (2025-10-21) | DevOps Guild, Scanner WebService Guild | SCANNER-EVENTS-15-201 | Surface `SCANNER__EVENTS__*` environment variables across docker-compose (dev/stage/airgap) and Helm values, defaulting to share the Redis queue DSN. | Compose/Helm configs ship enabled Redis event publishing with documented overrides; lint jobs updated; docs cross-link to new knobs. | +| DEVOPS-SCANNER-09-205 | DONE (2025-10-21) | DevOps Guild, Notify Guild | DEVOPS-SCANNER-09-204 | Add Notify smoke stage that tails the Redis stream and asserts `scanner.report.ready`/`scanner.scan.completed` reach Notify WebService in staging. | CI job reads Redis stream during scanner smoke deploy, confirms Notify ingestion via API, alerts on failure. | +| DEVOPS-PERF-10-001 | DONE | DevOps Guild | BENCH-SCANNER-10-001 | Add perf smoke job (SBOM compose <5 s target) to CI. | CI job runs sample build verifying <5 s; alerts configured. | +| DEVOPS-PERF-10-002 | DONE (2025-10-23) | DevOps Guild | BENCH-SCANNER-10-002 | Publish analyzer bench metrics to Grafana/perf workbook and alarm on ≥20 % regressions. | CI exports JSON for dashboards; Grafana panel wired; Ops on-call doc updated with alert hook. | +| DEVOPS-AOC-19-001 | BLOCKED (2025-10-26) | DevOps Guild, Platform Guild | WEB-AOC-19-003 | Integrate the AOC Roslyn analyzer and guard tests into CI, failing builds when ingestion projects attempt banned writes. | Analyzer runs in PR/CI pipelines, results surfaced in build summary, docs updated under `docs/ops/ci-aoc.md`. | +> Docs hand-off (2025-10-26): see `docs/ingestion/aggregation-only-contract.md` §5, `docs/architecture/overview.md`, and `docs/cli/cli-reference.md` for guard + verifier expectations. +| DEVOPS-AOC-19-002 | BLOCKED (2025-10-26) | DevOps Guild | CLI-AOC-19-002, CONCELIER-WEB-AOC-19-004, EXCITITOR-WEB-AOC-19-004 | Add pipeline stage executing `stella aoc verify --since` against seeded Mongo snapshots for Concelier + Excititor, publishing violation report artefacts. | Stage runs on main/nightly, fails on violations, artifacts retained, runbook documented. | +> Blocked: waiting on CLI verifier command and Concelier/Excititor guard endpoints to land (CLI-AOC-19-002, CONCELIER-WEB-AOC-19-004, EXCITITOR-WEB-AOC-19-004). +| DEVOPS-AOC-19-003 | BLOCKED (2025-10-26) | DevOps Guild, QA Guild | CONCELIER-WEB-AOC-19-003, EXCITITOR-WEB-AOC-19-003 | Enforce unit test coverage thresholds for AOC guard suites and ensure coverage exported to dashboards. | Coverage report includes guard projects, threshold gate passes/fails as expected, dashboards refreshed with new metrics. | +> Blocked: guard coverage suites and exporter hooks pending in Concelier/Excititor (CONCELIER-WEB-AOC-19-003, EXCITITOR-WEB-AOC-19-003). +| DEVOPS-AOC-19-101 | TODO (2025-10-28) | DevOps Guild, Concelier Storage Guild | CONCELIER-STORE-AOC-19-002 | Draft supersedes backfill rollout (freeze window, dry-run steps, rollback) once advisory_raw idempotency index passes staging verification. | Runbook committed in `docs/deploy/containers.md` + Offline Kit notes, staging rehearsal scheduled with dependencies captured in SPRINTS. | +| DEVOPS-OBS-50-001 | DONE (2025-10-26) | DevOps Guild, Observability Guild | TELEMETRY-OBS-50-001 | Deliver default OpenTelemetry collector deployment (Compose/Helm manifests), OTLP ingestion endpoints, and secure pipeline (authN, mTLS, tenant partitioning). Provide smoke test verifying traces/logs/metrics ingestion. | Collector manifests committed; smoke test green; docs updated; imposed rule banner reminder noted. | +| DEVOPS-OBS-50-002 | DOING (2025-10-26) | DevOps Guild, Security Guild | DEVOPS-OBS-50-001, TELEMETRY-OBS-51-002 | Stand up multi-tenant storage backends (Prometheus, Tempo/Jaeger, Loki) with retention policies, tenant isolation, and redaction guard rails. Integrate with Authority scopes for read paths. | Storage stack deployed with auth; retention configured; integration tests verify tenant isolation; runbook drafted. | +> Coordination started with Observability Guild (2025-10-26) to schedule staging rollout and provision service accounts. Staging bootstrap commands and secret names documented in `docs/ops/telemetry-storage.md`. +| DEVOPS-OBS-50-003 | DONE (2025-10-26) | DevOps Guild, Offline Kit Guild | DEVOPS-OBS-50-001 | Package telemetry stack configs for air-gapped installs (Offline Kit bundle, documented overrides, sample values) and automate checksum/signature generation. | Offline bundle includes collector+storage configs; checksums published; docs cross-linked; imposed rule annotation recorded. | +| DEVOPS-OBS-51-001 | TODO | DevOps Guild, Observability Guild | WEB-OBS-51-001, DEVOPS-OBS-50-001 | Implement SLO evaluator service (burn rate calculators, webhook emitters), Grafana dashboards, and alert routing to Notifier. Provide Terraform/Helm automation. | Dashboards live; evaluator emits webhooks; alert runbook referenced; staging alert fired in test. | +| DEVOPS-OBS-52-001 | TODO | DevOps Guild, Timeline Indexer Guild | TIMELINE-OBS-52-002 | Configure streaming pipeline (NATS/Redis/Kafka) with retention, partitioning, and backpressure tuning for timeline events; add CI validation of schema + rate caps. | Pipeline deployed; load test meets SLA; schema validation job passes; documentation updated. | +| DEVOPS-OBS-53-001 | TODO | DevOps Guild, Evidence Locker Guild | EVID-OBS-53-001 | Provision object storage with WORM/retention options (S3 Object Lock / MinIO immutability), legal hold automation, and backup/restore scripts for evidence locker. | Storage configured with WORM; legal hold script documented; backup test performed; runbook updated. | +| DEVOPS-OBS-54-001 | TODO | DevOps Guild, Security Guild | PROV-OBS-53-002, EVID-OBS-54-001 | Manage provenance signing infrastructure (KMS keys, rotation schedule, timestamp authority integration) and integrate verification jobs into CI. | Keys provisioned with rotation policy; timestamp authority configured; CI verifies sample bundles; audit trail stored. | +| DEVOPS-OBS-55-001 | TODO | DevOps Guild, Ops Guild | DEVOPS-OBS-51-001, WEB-OBS-55-001 | Implement incident mode automation: feature flag service, auto-activation via SLO burn-rate, retention override management, and post-incident reset job. | Incident mode toggles via API/CLI; automation tested in staging; reset job verified; runbook referenced. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-AIRGAP-56-001 | TODO | DevOps Guild | AIRGAP-CTL-56-001 | Ship deny-all egress policies for Kubernetes (NetworkPolicy/eBPF) and docker-compose firewall rules; provide verification script for sealed mode. | Policies committed with tests; verification script passes/fails as expected; docs cross-linked. | +| DEVOPS-AIRGAP-56-002 | TODO | DevOps Guild, AirGap Importer Guild | AIRGAP-IMP-57-002 | Provide import tooling for bundle staging: checksum validation, offline object-store loader scripts, removable media guidance. | Scripts documented; smoke tests validate import; runbook updated. | +| DEVOPS-AIRGAP-56-003 | TODO | DevOps Guild, Container Distribution Guild | EXPORT-AIRGAP-56-002 | Build Bootstrap Pack pipeline bundling images/charts, generating checksums, and publishing manifest for offline transfer. | Pipeline runs in connected env; pack verified in air-gap smoke test; manifest recorded. | +| DEVOPS-AIRGAP-57-001 | TODO | DevOps Guild, Mirror Creator Guild | MIRROR-CRT-56-002 | Automate Mirror Bundle creation jobs with dual-control approvals, artifact signing, and checksum publication. | Approval workflow enforced; CI artifact includes DSSE/TUF metadata; audit logs stored. | +| DEVOPS-AIRGAP-57-002 | TODO | DevOps Guild, Authority Guild | AUTH-OBS-50-001 | Configure sealed-mode CI tests that run services with sealed flag and ensure no egress occurs (iptables + mock DNS). | CI suite fails on attempted egress; reports remediation; documentation updated. | +| DEVOPS-AIRGAP-58-001 | TODO | DevOps Guild, Notifications Guild | NOTIFY-AIRGAP-56-002 | Provide local SMTP/syslog container templates and health checks for sealed environments; integrate into Bootstrap Pack. | Templates deployed successfully; health checks in CI; docs updated. | +| DEVOPS-AIRGAP-58-002 | TODO | DevOps Guild, Observability Guild | DEVOPS-AIRGAP-56-001, DEVOPS-OBS-51-001 | Ship sealed-mode observability stack (Prometheus/Grafana/Tempo/Loki) pre-configured with offline dashboards and no remote exporters. | Stack boots offline; dashboards available; verification script confirms zero egress. | +| DEVOPS-REL-14-001 | DONE (2025-10-26) | DevOps Guild | SIGNER-API-11-101, ATTESTOR-API-11-201 | Deterministic build/release pipeline with SBOM/provenance, signing, manifest generation. | CI pipeline produces signed images + SBOM/attestations, manifests published with verified hashes, docs updated. | +| DEVOPS-REL-14-004 | DONE (2025-10-26) | DevOps Guild, Scanner Guild | DEVOPS-REL-14-001, SCANNER-ANALYZERS-LANG-10-309P | Extend release/offline smoke jobs to exercise the Python analyzer plug-in (warm/cold scans, determinism, signature checks). | Release/Offline pipelines run Python analyzer smoke suite; alerts hooked; docs updated with new coverage matrix. | +| DEVOPS-REL-17-002 | DONE (2025-10-26) | DevOps Guild | DEVOPS-REL-14-001, SCANNER-EMIT-17-701 | Persist stripped-debug artifacts organised by GNU build-id and bundle them into release/offline kits with checksum manifests. | CI job writes `.debug` files under `artifacts/debug/.build-id/`, manifest + checksums published, offline kit includes cache, smoke job proves symbol lookup via build-id. | +| DEVOPS-REL-17-004 | BLOCKED (2025-10-26) | DevOps Guild | DEVOPS-REL-17-002 | Ensure release workflow publishes `out/release/debug` (build-id tree + manifest) and fails when symbols are missing. | Release job emits debug artefacts, `mirror_debug_store.py` summary committed, warning cleared from build logs, docs updated. | +| DEVOPS-MIRROR-08-001 | DONE (2025-10-19) | DevOps Guild | DEVOPS-REL-14-001 | Stand up managed mirror profiles for `*.stella-ops.org` (Concelier/Excititor), including Helm/Compose overlays, multi-tenant secrets, CDN caching, and sync documentation. | Infra overlays committed, CI smoke deploy hits mirror endpoints, runbooks published for downstream sync and quota management. | +> Note (2025-10-26, BLOCKED): IdentityModel.Tokens patched for logging 9.x, but release bundle still fails because Docker cannot stream multi-arch build context (`unix:///var/run/docker.sock` unavailable, EOF during copy). Retry once docker daemon/socket is healthy; until then `out/release/debug` cannot be generated. +| DEVOPS-CONSOLE-23-001 | BLOCKED (2025-10-26) | DevOps Guild, Console Guild | CONSOLE-CORE-23-001 | Add console CI workflow (pnpm cache, lint, type-check, unit, Storybook a11y, Playwright, Lighthouse) with offline runners and artifact retention for screenshots/reports. | Workflow runs on PR & main, caches reduce install time, failing checks block merges, artifacts uploaded for triage, docs updated. | +> Blocked: Console workspace and package scripts (CONSOLE-CORE-23-001..005) are not yet present; CI cannot execute pnpm/Playwright/Lighthouse until the Next.js app lands. +| DEVOPS-CONSOLE-23-002 | TODO | DevOps Guild, Console Guild | DEVOPS-CONSOLE-23-001, CONSOLE-REL-23-301 | Produce `stella-console` container build + Helm chart overlays with deterministic digests, SBOM/provenance artefacts, and offline bundle packaging scripts. | Container published to registry mirror, Helm values committed, SBOM/attestations generated, offline kit job passes smoke test, docs updated. | +| DEVOPS-LAUNCH-18-100 | DONE (2025-10-26) | DevOps Guild | - | Finalise production environment footprint (clusters, secrets, network overlays) for full-platform go-live. | IaC/compose overlays committed, secrets placeholders documented, dry-run deploy succeeds in staging. | +| DEVOPS-LAUNCH-18-900 | DONE (2025-10-26) | DevOps Guild, Module Leads | Wave 0 completion | Collect “full implementation” sign-off from module owners and consolidate launch readiness checklist. | Sign-off record stored under `docs/ops/launch-readiness.md`; outstanding gaps triaged; checklist approved. | +| DEVOPS-LAUNCH-18-001 | DONE (2025-10-26) | DevOps Guild | DEVOPS-LAUNCH-18-100, DEVOPS-LAUNCH-18-900 | Production launch cutover rehearsal and runbook publication. | `docs/ops/launch-cutover.md` drafted, rehearsal executed with rollback drill, approvals captured. | +| DEVOPS-NUGET-13-001 | DONE (2025-10-25) | DevOps Guild, Platform Leads | DEVOPS-REL-14-001 | Add .NET 10 preview feeds / local mirrors so `Microsoft.Extensions.*` 10.0 preview packages restore offline; refresh restore docs. | NuGet.config maps preview feeds (or local mirrored packages), `dotnet restore` succeeds for Excititor/Concelier solutions without ad-hoc feed edits, docs updated for offline bootstrap. | +| DEVOPS-NUGET-13-002 | DONE (2025-10-26) | DevOps Guild | DEVOPS-NUGET-13-001 | Ensure all solutions/projects prefer `local-nuget` before public sources and document restore order validation. | `NuGet.config` and solution-level configs resolve from `local-nuget` first; automated check verifies priority; docs updated for restore ordering. | +| DEVOPS-NUGET-13-003 | DONE (2025-10-26) | DevOps Guild, Platform Leads | DEVOPS-NUGET-13-002 | Sweep `Microsoft.*` NuGet dependencies pinned to 8.* and upgrade to latest .NET 10 equivalents (or .NET 9 when 10 unavailable), updating restore guidance. | Dependency audit shows no 8.* `Microsoft.*` packages remaining; CI builds green; changelog/doc sections capture upgrade rationale. | + +## Policy Engine v2 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-POLICY-20-001 | DONE (2025-10-26) | DevOps Guild, Policy Guild | POLICY-ENGINE-20-001 | Integrate DSL linting in CI (parser/compile) to block invalid policies; add pipeline step compiling sample policies. | CI fails on syntax errors; lint logs surfaced; docs updated with pipeline instructions. | +| DEVOPS-POLICY-20-003 | DONE (2025-10-26) | DevOps Guild, QA Guild | DEVOPS-POLICY-20-001, POLICY-ENGINE-20-005 | Determinism CI: run Policy Engine twice with identical inputs and diff outputs to guard non-determinism. | CI job compares outputs, fails on differences, logs stored; documentation updated. | +| DEVOPS-POLICY-20-004 | DONE (2025-10-27) | DevOps Guild, Scheduler Guild, CLI Guild | SCHED-MODELS-20-001, CLI-POLICY-20-002 | Automate policy schema exports: generate JSON Schema from `PolicyRun*` DTOs during CI, publish artefacts, and emit change alerts for CLI consumers (Slack + changelog). | CI stage outputs versioned schema files, uploads artefacts, notifies #policy-engine channel on change; docs/CLI references updated. | +> 2025-10-27: `.gitea/workflows/build-test-deploy.yml` publishes the `policy-schema-exports` artefact under `artifacts/policy-schemas//` and posts Slack diffs via `POLICY_ENGINE_SCHEMA_WEBHOOK`; diff stored as `policy-schema-diff.patch`. + +## Graph Explorer v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| + +## Orchestrator Dashboard + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-ORCH-32-001 | TODO | DevOps Guild, Orchestrator Service Guild | ORCH-SVC-32-001 | Provision orchestrator Postgres/message-bus infrastructure, add CI smoke deploy, seed Grafana dashboards (queue depth, inflight jobs), and document bootstrap. | Helm/Compose profiles committed; CI smoke deploy runs; dashboards live with metrics; runbook updated. | +| DEVOPS-ORCH-33-001 | TODO | DevOps Guild, Observability Guild | DEVOPS-ORCH-32-001, ORCH-SVC-33-001..003 | Publish Grafana dashboards/alerts for rate limiter, backpressure, error clustering, and DLQ depth; integrate with on-call rotations. | Dashboards and alerts configured; synthetic tests validate thresholds; on-call playbook updated. | +| DEVOPS-ORCH-34-001 | TODO | DevOps Guild, Orchestrator Service Guild | DEVOPS-ORCH-33-001, ORCH-SVC-34-001..003 | Harden production monitoring (synthetic probes, burn-rate alerts, replay smoke), document incident response, and prep GA readiness checklist. | Synthetic probes created; burn-rate alerts firing on test scenario; GA checklist approved; runbook linked. | + +## Link-Not-Merge v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-LNM-22-001 | BLOCKED (2025-10-27) | DevOps Guild, Concelier Guild | CONCELIER-LNM-21-102 | Run migration/backfill pipelines for advisory observations/linksets in staging, validate counts/conflicts, and automate deployment steps. Awaiting storage backfill tooling. | +| DEVOPS-LNM-22-002 | BLOCKED (2025-10-27) | DevOps Guild, Excititor Guild | EXCITITOR-LNM-21-102 | Execute VEX observation/linkset backfill with monitoring; ensure NATS/Redis events integrated; document ops runbook. Blocked until Excititor storage migration lands. | +| DEVOPS-LNM-22-003 | TODO | DevOps Guild, Observability Guild | CONCELIER-LNM-21-005, EXCITITOR-LNM-21-005 | Add CI/monitoring coverage for new metrics (`advisory_observations_total`, `linksets_total`, etc.) and alerts on ingest-to-API SLA breaches. | Metrics scraped into Grafana; alert thresholds set; CI job verifies metric emission. | + +## Graph & Vuln Explorer v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-GRAPH-24-001 | TODO | DevOps Guild, SBOM Service Guild | SBOM-GRAPH-24-002 | Load test graph index/adjacency APIs with 40k-node assets; capture perf dashboards and alert thresholds. | Perf suite added; dashboards live; alerts configured. | +| DEVOPS-GRAPH-24-002 | TODO | DevOps Guild, UI Guild | UI-GRAPH-24-001..005 | Integrate synthetic UI perf runs (Playwright/WebGL metrics) for Graph/Vuln explorers; fail builds on regression. | CI job runs UI perf tests; baseline stored; documentation updated. | +| DEVOPS-GRAPH-24-003 | TODO | DevOps Guild | WEB-GRAPH-24-002 | Implement smoke job for simulation endpoints ensuring we stay within SLA (<3s upgrade) and log results. | Smoke job in CI; alerts when SLA breached; runbook documented. | +| DEVOPS-POLICY-27-001 | TODO | DevOps Guild, DevEx/CLI Guild | CLI-POLICY-27-001, REGISTRY-API-27-001 | Add CI pipeline stages to run `stella policy lint|compile|test` with secret scanning on policy sources for PRs touching `/policies/**`; publish diagnostics artifacts. | Pipeline executes on PR/main, failures block merges, secret scan summary uploaded, docs updated. | +| DEVOPS-POLICY-27-002 | TODO | DevOps Guild, Policy Registry Guild | REGISTRY-API-27-005, SCHED-WORKER-27-301 | Provide optional batch simulation CI job (staging inventory) that triggers Registry run, polls results, and posts markdown summary to PR; enforce drift thresholds. | Job configurable via label, summary comment generated, drift threshold gates merges, runbook documented. | +| DEVOPS-POLICY-27-003 | TODO | DevOps Guild, Security Guild | AUTH-POLICY-27-002, REGISTRY-API-27-007 | Manage signing key material for policy publish pipeline (OIDC workload identity + cosign), rotate keys, and document verification steps; integrate attestation verification stage. | Keys stored in secure vault, rotation procedure documented, CI verifies attestations, audit logs recorded. | +| DEVOPS-POLICY-27-004 | TODO | DevOps Guild, Observability Guild | WEB-POLICY-27-005, TELEMETRY-CONSOLE-27-001 | Create dashboards/alerts for policy compile latency, simulation queue depth, approval latency, and promotion outcomes; integrate with on-call playbooks. | Grafana dashboards live, alerts tuned, runbooks updated, observability tests verify metric ingestion. | +> Remark (2025-10-20): Repacked `Mongo2Go` local feed to require MongoDB.Driver 3.5.0 + SharpCompress 0.41.0; cache regression tests green and NU1902/NU1903 suppressed. +> Remark (2025-10-21): Compose/Helm profiles now surface `SCANNER__EVENTS__*` toggles with docs pointing at new `.env` placeholders. + +## Reachability v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-SIG-26-001 | TODO | DevOps Guild, Signals Guild | SIGNALS-24-001 | Provision CI/CD pipelines, Helm/Compose manifests for Signals service, including artifact storage and Redis dependencies. | Pipelines ship Signals service; deployment docs updated; smoke tests green. | +| DEVOPS-SIG-26-002 | TODO | DevOps Guild, Observability Guild | SIGNALS-24-004 | Create dashboards/alerts for reachability scoring latency, cache hit rates, sensor staleness. | Dashboards live; alert thresholds configured; documentation updated. | +| DEVOPS-VULN-29-001 | TODO | DevOps Guild, Findings Ledger Guild | LEDGER-29-002..009 | Provision CI jobs for ledger projector (replay, determinism), set up backups, monitor Merkle anchoring, and automate verification. | CI job verifies hash chains; backups documented; alerts for anchoring failures configured. | +| DEVOPS-VULN-29-002 | TODO | DevOps Guild, Vuln Explorer API Guild | VULN-API-29-002..009 | Configure load/perf tests (5M findings/tenant), query budget enforcement, API SLO dashboards, and alerts for `vuln_list_latency` and `projection_lag`. | Perf suite integrated; dashboards live; alerts firing; runbooks updated. | +| DEVOPS-VULN-29-003 | TODO | DevOps Guild, Console Guild | WEB-VULN-29-004, CONSOLE-VULN-29-007 | Instrument analytics pipeline for Vuln Explorer (telemetry ingestion, query hashes), ensure compliance with privacy/PII guardrails, and update observability docs. | Telemetry pipeline operational; PII redaction verified; docs updated with checklist. | +| DEVOPS-VEX-30-001 | TODO | DevOps Guild, VEX Lens Guild | VEXLENS-30-009, ISSUER-30-005 | Provision CI, load tests, dashboards, alerts for VEX Lens and Issuer Directory (compute latency, disputed totals, signature verification rates). | CI/perf suites running; dashboards live; alerts configured; docs updated. | +| DEVOPS-AIAI-31-001 | TODO | DevOps Guild, Advisory AI Guild | AIAI-31-006..007 | Stand up CI pipelines, inference monitoring, privacy logging review, and perf dashboards for Advisory AI (summaries/conflicts/remediation). | CI covers golden outputs, telemetry dashboards live, privacy controls reviewed, alerts configured. | + +## Export Center +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-EXPORT-35-001 | BLOCKED (2025-10-29) | DevOps Guild, Exporter Service Guild | EXPORT-SVC-35-001..006 | Establish exporter CI pipeline (lint/test/perf smoke), configure object storage fixtures, seed Grafana dashboards, and document bootstrap steps. | CI pipeline running; smoke export job seeded; dashboards live; runbook updated. | +| DEVOPS-EXPORT-36-001 | TODO | DevOps Guild, Exporter Service Guild | DEVOPS-EXPORT-35-001, EXPORT-SVC-36-001..004 | Integrate Trivy compatibility validation, cosign signature checks, `trivy module db import` smoke tests, OCI distribution verification, and throughput/error dashboards. | CI executes cosign + Trivy import validation; OCI push smoke passes; dashboards/alerts configured. | +| DEVOPS-EXPORT-37-001 | TODO | DevOps Guild, Exporter Service Guild | DEVOPS-EXPORT-36-001, EXPORT-SVC-37-001..004 | Finalize exporter monitoring (failure alerts, verify metrics, retention jobs) and chaos/latency tests ahead of GA. | Alerts tuned; chaos tests documented; retention monitoring active; runbook updated. | + +## CLI Parity & Task Packs + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-CLI-41-001 | TODO | DevOps Guild, DevEx/CLI Guild | CLI-CORE-41-001 | Establish CLI build pipeline (multi-platform binaries, SBOM, checksums), parity matrix CI enforcement, and release artifact signing. | Build pipeline operational; SBOM/checksums published; parity gate failing on drift; docs updated. | +| DEVOPS-CLI-42-001 | TODO | DevOps Guild | DEVOPS-CLI-41-001, CLI-PARITY-41-001 | Add CLI golden output tests, parity diff automation, pack run CI harness, and artifact cache for remote mode. | Golden tests running; parity diff automation in CI; pack run harness executes sample packs; documentation updated. | +| DEVOPS-CLI-43-001 | DOING (2025-10-27) | DevOps Guild | DEVOPS-CLI-42-001, TASKRUN-42-001 | Finalize multi-platform release automation, SBOM signing, parity gate enforcement, and Task Pack chaos tests. | Release automation verified; SBOM signed; parity gate enforced; chaos tests documented. | +> 2025-10-27: Release pipeline now packages CLI multi-platform artefacts with SBOM/signature coverage and enforces the CLI parity gate (`ops/devops/check_cli_parity.py`). Task Pack chaos smoke still pending CLI pack command delivery. +| DEVOPS-CLI-43-002 | TODO | DevOps Guild, Task Runner Guild | CLI-PACKS-43-001, TASKRUN-43-001 | Implement Task Pack chaos smoke in CI (random failure injection, resume, sealed-mode toggle) and publish evidence bundles for review. | Chaos smoke job runs nightly; failures alert Slack; evidence stored in `out/pack-chaos`; runbook updated. | +| DEVOPS-CLI-43-003 | TODO | DevOps Guild, DevEx/CLI Guild | CLI-PARITY-41-001, CLI-PACKS-42-001 | Integrate CLI golden output/parity diff automation into release gating; export parity report artifact consumed by Console Downloads workspace. | `check_cli_parity.py` wired to compare parity matrix and CLI outputs; artifact uploaded; release fails on regressions. + +## Containerized Distribution (Epic 13) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-CONTAINERS-44-001 | TODO | DevOps Guild | DOCKER-44-001..003 | Automate multi-arch image builds with buildx, SBOM generation, cosign signing, and signature verification in CI. | Pipeline builds amd64/arm64; SBOMs pushed as referrers; cosign verify job passes. | +| DEVOPS-CONTAINERS-45-001 | TODO | DevOps Guild | HELM-45-001 | Add Compose and Helm smoke tests (fresh VM + kind cluster) to CI; publish test artifacts and logs. | CI jobs running; failures block releases; documentation updated. | +| DEVOPS-CONTAINERS-46-001 | TODO | DevOps Guild | DEPLOY-PACKS-43-001 | Build air-gap bundle generator (`tools/make-airgap-bundle.sh`), produce signed bundle, and verify in CI using private registry. | Bundle artifact produced with signatures/checksums; verification job passes; instructions documented. | + +### Container Images (Epic 13) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DOCKER-44-001 | TODO | DevOps Guild, Service Owners | DEVOPS-CLI-41-001 | Author multi-stage Dockerfiles for all core services (API, Console, Orchestrator, Task Runner, Conseiller, Excitator, Policy, Notify, Export, AI) with non-root users, read-only file systems, and health scripts. | Dockerfiles committed; images build successfully; container security scans clean; health endpoints reachable. | +| DOCKER-44-002 | TODO | DevOps Guild | DOCKER-44-001 | Generate SBOMs and cosign attestations for each image and integrate verification into CI. | SBOMs attached as OCI artifacts; cosign signatures published; CI verifies signatures prior to release. | +| DOCKER-44-003 | TODO | DevOps Guild | DOCKER-44-001 | Implement `/health/liveness`, `/health/readiness`, `/version`, `/metrics`, and ensure capability endpoint returns `merge=false` for Conseiller/Excitator. | Endpoints available across services; automated tests confirm responses; documentation updated with imposed rule reminder. | + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-TEN-47-001 | TODO | DevOps Guild | AUTH-TEN-47-001 | Add JWKS cache monitoring, signature verification regression tests, and token expiration chaos tests to CI. | CI verifies tokens using cached keys; chaos test for expired keys passes; documentation updated. | +| DEVOPS-TEN-48-001 | TODO | DevOps Guild | WEB-TEN-48-001 | Build integration tests to assert RLS enforcement, tenant-prefixed object storage, and audit event emission; set up lint to prevent raw SQL bypass. | Tests fail on cross-tenant access; lint enforced; dashboards capture audit events. | +| DEVOPS-TEN-49-001 | TODO | DevOps Guild | AUTH-TEN-49-001 | Deploy audit pipeline, scope usage metrics, JWKS outage chaos tests, and tenant load/perf benchmarks. | Audit pipeline live; metrics dashboards updated; chaos tests documented; perf benchmarks recorded. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-OAS-61-001 | TODO | DevOps Guild, API Contracts Guild | OAS-61-002 | Add CI stages for OpenAPI linting, validation, and compatibility diff; enforce gating on PRs. | Pipeline active; merge blocked on failures; documentation updated. | +| DEVOPS-OAS-61-002 | TODO | DevOps Guild, Contract Testing Guild | CONTR-62-002 | Integrate mock server + contract test suite into PR and nightly workflows; publish artifacts. | Tests run in CI; artifacts stored; failures alert. | +| DEVOPS-SDK-63-001 | TODO | DevOps Guild, SDK Release Guild | SDKREL-63-001 | Provision registry credentials, signing keys, and secure storage for SDK publishing pipelines. | Keys stored/rotated; publish pipeline authenticated; audit logs recorded. | +| DEVOPS-DEVPORT-63-001 | TODO | DevOps Guild, Developer Portal Guild | DEVPORT-62-001 | Automate developer portal build pipeline with caching, link & accessibility checks, performance budgets. | Pipeline enforced; reports archived; failures gate merges. | +| DEVOPS-DEVPORT-64-001 | TODO | DevOps Guild, DevPortal Offline Guild | DVOFF-64-001 | Schedule `devportal --offline` nightly builds with checksum validation and artifact retention policies. | Nightly job running; checksums published; retention policy documented. | + +## Attestor Console (Epic 19) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVOPS-ATTEST-73-001 | TODO | DevOps Guild, Attestor Service Guild | ATTESTOR-72-002 | Provision CI pipelines for attestor service (lint/test/security scan, seed data) and manage secrets for KMS drivers. | CI pipeline running; secrets stored securely; docs updated. | +| DEVOPS-ATTEST-73-002 | TODO | DevOps Guild, KMS Guild | KMS-72-001 | Establish secure storage for signing keys (vault integration, rotation schedule) and audit logging. | Key storage configured; rotation documented; audit logs verified. | +| DEVOPS-ATTEST-74-001 | TODO | DevOps Guild, Transparency Guild | TRANSP-74-001 | Deploy transparency log witness infrastructure and monitoring. | Witness service deployed; dashboards/alerts live. | +| DEVOPS-ATTEST-74-002 | TODO | DevOps Guild, Export Attestation Guild | EXPORT-ATTEST-74-001 | Integrate attestation bundle builds into release/offline pipelines with checksum verification. | Bundle job in CI; checksum verification passes; docs updated. | +| DEVOPS-ATTEST-75-001 | TODO | DevOps Guild, Observability Guild | ATTEST-VERIFY-74-001 | Add dashboards/alerts for signing latency, verification failures, key rotation events. | Dashboards live; alerts configured. | diff --git a/ops/devops/check_cli_parity.py b/ops/devops/check_cli_parity.py index 79bed880..ccbd7ee9 100644 --- a/ops/devops/check_cli_parity.py +++ b/ops/devops/check_cli_parity.py @@ -1,53 +1,53 @@ -#!/usr/bin/env python3 -"""Ensure CLI parity matrix contains no outstanding blockers before release.""" -from __future__ import annotations - -import pathlib -import re -import sys - -REPO_ROOT = pathlib.Path(__file__).resolve().parents[2] -PARITY_DOC = REPO_ROOT / "docs/cli-vs-ui-parity.md" - -BLOCKERS = { - "🟥": "blocking gap", - "❌": "missing feature", - "🚫": "unsupported", -} -WARNINGS = { - "🟡": "partial support", - "⚠️": "warning", -} - - -def main() -> int: - if not PARITY_DOC.exists(): - print(f"❌ Parity matrix not found at {PARITY_DOC}", file=sys.stderr) - return 1 - text = PARITY_DOC.read_text(encoding="utf-8") - blockers: list[str] = [] - warnings: list[str] = [] - for line in text.splitlines(): - for symbol, label in BLOCKERS.items(): - if symbol in line: - blockers.append(f"{label}: {line.strip()}") - for symbol, label in WARNINGS.items(): - if symbol in line: - warnings.append(f"{label}: {line.strip()}") - if blockers: - print("❌ CLI parity gate failed — blocking items present:", file=sys.stderr) - for item in blockers: - print(f" - {item}", file=sys.stderr) - return 1 - if warnings: - print("⚠️ CLI parity gate warnings detected:", file=sys.stderr) - for item in warnings: - print(f" - {item}", file=sys.stderr) - print("Treat warnings as failures until parity matrix is fully green.", file=sys.stderr) - return 1 - print("✅ CLI parity matrix has no blocking or warning entries.") - return 0 - - -if __name__ == "__main__": - raise SystemExit(main()) +#!/usr/bin/env python3 +"""Ensure CLI parity matrix contains no outstanding blockers before release.""" +from __future__ import annotations + +import pathlib +import re +import sys + +REPO_ROOT = pathlib.Path(__file__).resolve().parents[2] +PARITY_DOC = REPO_ROOT / "docs/cli-vs-ui-parity.md" + +BLOCKERS = { + "🟥": "blocking gap", + "❌": "missing feature", + "🚫": "unsupported", +} +WARNINGS = { + "🟡": "partial support", + "⚠️": "warning", +} + + +def main() -> int: + if not PARITY_DOC.exists(): + print(f"❌ Parity matrix not found at {PARITY_DOC}", file=sys.stderr) + return 1 + text = PARITY_DOC.read_text(encoding="utf-8") + blockers: list[str] = [] + warnings: list[str] = [] + for line in text.splitlines(): + for symbol, label in BLOCKERS.items(): + if symbol in line: + blockers.append(f"{label}: {line.strip()}") + for symbol, label in WARNINGS.items(): + if symbol in line: + warnings.append(f"{label}: {line.strip()}") + if blockers: + print("❌ CLI parity gate failed — blocking items present:", file=sys.stderr) + for item in blockers: + print(f" - {item}", file=sys.stderr) + return 1 + if warnings: + print("⚠️ CLI parity gate warnings detected:", file=sys.stderr) + for item in warnings: + print(f" - {item}", file=sys.stderr) + print("Treat warnings as failures until parity matrix is fully green.", file=sys.stderr) + return 1 + print("✅ CLI parity matrix has no blocking or warning entries.") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/ops/devops/nuget-preview-packages.csv b/ops/devops/nuget-preview-packages.csv index 54780663..1300c9a2 100644 --- a/ops/devops/nuget-preview-packages.csv +++ b/ops/devops/nuget-preview-packages.csv @@ -1,30 +1,30 @@ -# Package,Version,SHA256,SourceBase(optional) -# DotNetPublicFlat=https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.AspNetCore.Authentication.JwtBearer,10.0.0-rc.2.25502.107,3223f447bde9a3620477305a89520e8becafe23b481a0b423552af572439f8c2,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.AspNetCore.Mvc.Testing,10.0.0-rc.2.25502.107,b6b53c62e0abefdca30e6ca08ab8357e395177dd9f368ab3ad4bbbd07e517229,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.AspNetCore.OpenApi,10.0.0-rc.2.25502.107,f64de1fe870306053346a31263e53e29f2fdfe0eae432a3156f8d7d705c81d85,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Data.Sqlite,9.0.0-rc.1.24451.1,770b637317e1e924f1b13587b31af0787c8c668b1d9f53f2fccae8ee8704e167,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Caching.Memory,10.0.0-rc.2.25502.107,6ec6d156ed06b07cbee9fa1c0803b8d54a5f904a0bf0183172f87b63c4044426,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Configuration,10.0.0-rc.2.25502.107,0716f72cdc99b03946c98c418c39d42208fc65f20301bd1f26a6c174646870f6,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Configuration.Abstractions,10.0.0-rc.2.25502.107,db6e2cd37c40b5ac5ca7a4f40f5edafda2b6a8690f95a8c64b54c777a1d757c0,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Configuration.Binder,10.0.0-rc.2.25502.107,80f04da6beef001d3c357584485c2ddc6fdbf3776cfd10f0d7b40dfe8a79ee43,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Configuration.CommandLine,10.0.0-rc.2.25502.107,91974a95ae35bcfcd5e977427f3d0e6d3416e78678a159f5ec9e55f33a2e19af,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Configuration.EnvironmentVariables,10.0.0-rc.2.25502.107,74d65a20e2764d5f42863f5f203b216533fc51b22fb02a8491036feb98ae5fef,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Configuration.FileExtensions,10.0.0-rc.2.25502.107,5f97b56ea2ba3a1b252022504060351ce457f78ac9055d5fdd1311678721c1a1,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Configuration.Json,10.0.0-rc.2.25502.107,0ba362c479213eb3425f8e14d8a8495250dbaf2d5dad7c0a4ca8d3239b03c392,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.DependencyInjection,10.0.0-rc.2.25502.107,2e1b51b4fa196f0819adf69a15ad8c3432b64c3b196f2ed3d14b65136a6a8709,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.DependencyInjection.Abstractions,10.0.0-rc.2.25502.107,d6787ccf69e09428b3424974896c09fdabb8040bae06ed318212871817933352,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Diagnostics.Abstractions,10.0.0-rc.2.25502.107,b4bc47b4b4ded4ab2f134d318179537cbe16aed511bb3672553ea197929dc7d8,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Diagnostics.HealthChecks,10.0.0-rc.2.25502.107,855fd4da26b955b6b1d036390b1af10564986067b5cc6356cffa081c83eec158,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions,10.0.0-rc.2.25502.107,59f4724daed68a067a661e208f0a934f253b91ec5d52310d008e185bc2c9294c,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Hosting,10.0.0-rc.2.25502.107,ea9b1fa8e50acae720294671e6c36d4c58e20cfc9720335ab4f5ad4eba92cf62,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Hosting.Abstractions,10.0.0-rc.2.25502.107,98fa23ac82e19be221a598fc6f4b469e8b00c4ca2b7a42ad0bfea8b63bbaa9a2,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Http,10.0.0-rc.2.25502.107,c63c8bf4ca637137a561ca487b674859c2408918c4838a871bb26eb0c809a665,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Http.Polly,10.0.0-rc.2.25502.107,0b436196bcedd484796795f6a795d7a191294f1190f7a477f1a4937ef7f78110,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Logging.Abstractions,10.0.0-rc.2.25502.107,92b9a5ed62fe945ee88983af43c347429ec15691c9acb207872c548241cef961,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Logging.Console,10.0.0-rc.2.25502.107,fa1e10b5d6261675d9d2e97b9584ff9aaea2a2276eac584dfa77a1e35dcc58f5,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Options,10.0.0-rc.2.25502.107,d208acec60bec3350989694fd443e2d2f0ab583ad5f2c53a2879ade16908e5b4,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.Options.ConfigurationExtensions,10.0.0-rc.2.25502.107,c2863bb28c36fd67f308dd4af486897b512d62ecff2d96613ef954f5bef443e2,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.Extensions.TimeProvider.Testing,9.10.0,919a47156fc13f756202702cacc6e853123c84f1b696970445d89f16dfa45829,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.IdentityModel.Tokens,8.14.0,00b78c7b7023132e1d6b31d305e47524732dce6faca92dd16eb8d05a835bba7a,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 -Microsoft.SourceLink.GitLab,8.0.0,a7efb9c177888f952ea8c88bc5714fc83c64af32b70fb080a1323b8d32233973,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +# Package,Version,SHA256,SourceBase(optional) +# DotNetPublicFlat=https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.AspNetCore.Authentication.JwtBearer,10.0.0-rc.2.25502.107,3223f447bde9a3620477305a89520e8becafe23b481a0b423552af572439f8c2,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.AspNetCore.Mvc.Testing,10.0.0-rc.2.25502.107,b6b53c62e0abefdca30e6ca08ab8357e395177dd9f368ab3ad4bbbd07e517229,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.AspNetCore.OpenApi,10.0.0-rc.2.25502.107,f64de1fe870306053346a31263e53e29f2fdfe0eae432a3156f8d7d705c81d85,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Data.Sqlite,9.0.0-rc.1.24451.1,770b637317e1e924f1b13587b31af0787c8c668b1d9f53f2fccae8ee8704e167,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Caching.Memory,10.0.0-rc.2.25502.107,6ec6d156ed06b07cbee9fa1c0803b8d54a5f904a0bf0183172f87b63c4044426,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Configuration,10.0.0-rc.2.25502.107,0716f72cdc99b03946c98c418c39d42208fc65f20301bd1f26a6c174646870f6,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Configuration.Abstractions,10.0.0-rc.2.25502.107,db6e2cd37c40b5ac5ca7a4f40f5edafda2b6a8690f95a8c64b54c777a1d757c0,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Configuration.Binder,10.0.0-rc.2.25502.107,80f04da6beef001d3c357584485c2ddc6fdbf3776cfd10f0d7b40dfe8a79ee43,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Configuration.CommandLine,10.0.0-rc.2.25502.107,91974a95ae35bcfcd5e977427f3d0e6d3416e78678a159f5ec9e55f33a2e19af,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Configuration.EnvironmentVariables,10.0.0-rc.2.25502.107,74d65a20e2764d5f42863f5f203b216533fc51b22fb02a8491036feb98ae5fef,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Configuration.FileExtensions,10.0.0-rc.2.25502.107,5f97b56ea2ba3a1b252022504060351ce457f78ac9055d5fdd1311678721c1a1,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Configuration.Json,10.0.0-rc.2.25502.107,0ba362c479213eb3425f8e14d8a8495250dbaf2d5dad7c0a4ca8d3239b03c392,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.DependencyInjection,10.0.0-rc.2.25502.107,2e1b51b4fa196f0819adf69a15ad8c3432b64c3b196f2ed3d14b65136a6a8709,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.DependencyInjection.Abstractions,10.0.0-rc.2.25502.107,d6787ccf69e09428b3424974896c09fdabb8040bae06ed318212871817933352,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Diagnostics.Abstractions,10.0.0-rc.2.25502.107,b4bc47b4b4ded4ab2f134d318179537cbe16aed511bb3672553ea197929dc7d8,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Diagnostics.HealthChecks,10.0.0-rc.2.25502.107,855fd4da26b955b6b1d036390b1af10564986067b5cc6356cffa081c83eec158,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions,10.0.0-rc.2.25502.107,59f4724daed68a067a661e208f0a934f253b91ec5d52310d008e185bc2c9294c,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Hosting,10.0.0-rc.2.25502.107,ea9b1fa8e50acae720294671e6c36d4c58e20cfc9720335ab4f5ad4eba92cf62,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Hosting.Abstractions,10.0.0-rc.2.25502.107,98fa23ac82e19be221a598fc6f4b469e8b00c4ca2b7a42ad0bfea8b63bbaa9a2,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Http,10.0.0-rc.2.25502.107,c63c8bf4ca637137a561ca487b674859c2408918c4838a871bb26eb0c809a665,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Http.Polly,10.0.0-rc.2.25502.107,0b436196bcedd484796795f6a795d7a191294f1190f7a477f1a4937ef7f78110,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Logging.Abstractions,10.0.0-rc.2.25502.107,92b9a5ed62fe945ee88983af43c347429ec15691c9acb207872c548241cef961,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Logging.Console,10.0.0-rc.2.25502.107,fa1e10b5d6261675d9d2e97b9584ff9aaea2a2276eac584dfa77a1e35dcc58f5,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Options,10.0.0-rc.2.25502.107,d208acec60bec3350989694fd443e2d2f0ab583ad5f2c53a2879ade16908e5b4,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.Options.ConfigurationExtensions,10.0.0-rc.2.25502.107,c2863bb28c36fd67f308dd4af486897b512d62ecff2d96613ef954f5bef443e2,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.Extensions.TimeProvider.Testing,9.10.0,919a47156fc13f756202702cacc6e853123c84f1b696970445d89f16dfa45829,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.IdentityModel.Tokens,8.14.0,00b78c7b7023132e1d6b31d305e47524732dce6faca92dd16eb8d05a835bba7a,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 +Microsoft.SourceLink.GitLab,8.0.0,a7efb9c177888f952ea8c88bc5714fc83c64af32b70fb080a1323b8d32233973,https://pkgs.dev.azure.com/dnceng/public/_packaging/dotnet-public/nuget/v3/flat2 diff --git a/ops/devops/release/build_release.py b/ops/devops/release/build_release.py index 12b06d98..d6524558 100644 --- a/ops/devops/release/build_release.py +++ b/ops/devops/release/build_release.py @@ -1,1103 +1,1103 @@ -#!/usr/bin/env python3 -"""Deterministic release pipeline helper for StellaOps. - -This script builds service containers, generates SBOM and provenance artefacts, -signs them with cosign, and writes a channel-specific release manifest. - -The workflow expects external tooling to be available on PATH: -- docker (with buildx) -- cosign -- helm -- npm / node (for the UI build) -- dotnet SDK (for BuildX plugin publication) -""" -from __future__ import annotations - -import argparse -import contextlib -import datetime as dt -import hashlib -import json -import os -import pathlib -import re -import shlex -import shutil -import stat -import subprocess -import sys -import tarfile -import tempfile -import uuid -import zipfile -from collections import OrderedDict -from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Sequence, Tuple - -REPO_ROOT = pathlib.Path(__file__).resolve().parents[3] -DEFAULT_CONFIG = REPO_ROOT / "ops/devops/release/components.json" - -class CommandError(RuntimeError): - pass - -def run(cmd: Sequence[str], *, cwd: Optional[pathlib.Path] = None, env: Optional[Mapping[str, str]] = None, capture: bool = True) -> str: - """Run a subprocess command, returning stdout (text).""" - process_env = os.environ.copy() - if env: - process_env.update(env) - result = subprocess.run( - list(cmd), - cwd=str(cwd) if cwd else None, - env=process_env, - check=False, - capture_output=capture, - text=True, - ) - if process_env.get("STELLAOPS_RELEASE_DEBUG"): - sys.stderr.write(f"[debug] {' '.join(shlex.quote(c) for c in cmd)}\n") - if capture: - sys.stderr.write(result.stdout) - sys.stderr.write(result.stderr) - if result.returncode != 0: - stdout = result.stdout if capture else "" - stderr = result.stderr if capture else "" - raise CommandError(f"Command failed ({result.returncode}): {' '.join(cmd)}\nSTDOUT:\n{stdout}\nSTDERR:\n{stderr}") - - return result.stdout if capture else "" - - -def load_json_config(path: pathlib.Path) -> Dict[str, Any]: - with path.open("r", encoding="utf-8") as handle: - return json.load(handle) - - -def ensure_directory(path: pathlib.Path) -> pathlib.Path: - path.mkdir(parents=True, exist_ok=True) - return path - - -def compute_sha256(path: pathlib.Path) -> str: - sha = hashlib.sha256() - with path.open("rb") as handle: - for chunk in iter(lambda: handle.read(1024 * 1024), b""): - sha.update(chunk) - return sha.hexdigest() - - -def format_scalar(value: Any) -> str: - if isinstance(value, bool): - return "true" if value else "false" - if value is None: - return "null" - if isinstance(value, (int, float)): - return str(value) - text = str(value) - if text == "": - return '""' - if re.search(r"[\s:#\-\[\]\{\}]", text): - return json.dumps(text, ensure_ascii=False) - return text - - -def _yaml_lines(value: Any, indent: int = 0) -> List[str]: - pad = " " * indent - if isinstance(value, Mapping): - lines: List[str] = [] - for key, val in value.items(): - if isinstance(val, (Mapping, list)): - lines.append(f"{pad}{key}:") - lines.extend(_yaml_lines(val, indent + 1)) - else: - lines.append(f"{pad}{key}: {format_scalar(val)}") - if not lines: - lines.append(f"{pad}{{}}") - return lines - if isinstance(value, list): - lines = [] - if not value: - lines.append(f"{pad}[]") - return lines - for item in value: - if isinstance(item, (Mapping, list)): - lines.append(f"{pad}-") - lines.extend(_yaml_lines(item, indent + 1)) - else: - lines.append(f"{pad}- {format_scalar(item)}") - return lines - return [f"{pad}{format_scalar(value)}"] - - -def dump_yaml(data: Mapping[str, Any]) -> str: - lines: List[str] = _yaml_lines(data) - return "\n".join(lines) + "\n" - - -def utc_now_iso() -> str: - return dt.datetime.now(tz=dt.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z") - - -def sanitize_calendar(version: str, explicit: Optional[str]) -> str: - if explicit: - return explicit - # Expect version like 2025.10.0-edge or 2.4.1 - parts = re.findall(r"\d+", version) - if len(parts) >= 2: - return f"{parts[0]}.{parts[1]}" - return dt.datetime.now(tz=dt.timezone.utc).strftime("%Y.%m") - - -class ReleaseBuilder: - def __init__( - self, - *, - repo_root: pathlib.Path, - config: Mapping[str, Any], - version: str, - channel: str, - calendar: str, - release_date: str, - git_sha: str, - output_dir: pathlib.Path, - push: bool, - dry_run: bool, - registry_override: Optional[str] = None, - platforms_override: Optional[Sequence[str]] = None, - skip_signing: bool = False, - cosign_key_ref: Optional[str] = None, - cosign_password: Optional[str] = None, - cosign_identity_token: Optional[str] = None, - tlog_upload: bool = True, - ) -> None: - self.repo_root = repo_root - self.config = config - self.version = version - self.channel = channel - self.calendar = calendar - self.release_date = release_date - self.git_sha = git_sha - self.output_dir = ensure_directory(output_dir) - self.push = push - self.dry_run = dry_run - self.registry = registry_override or config.get("registry") - if not self.registry: - raise ValueError("Config missing 'registry'") - platforms = list(platforms_override) if platforms_override else config.get("platforms") - if not platforms: - platforms = ["linux/amd64", "linux/arm64"] - self.platforms = list(platforms) - self.source_date_epoch = str(int(dt.datetime.fromisoformat(release_date.replace("Z", "+00:00")).timestamp())) - self.artifacts_dir = ensure_directory(self.output_dir / "artifacts") - self.sboms_dir = ensure_directory(self.artifacts_dir / "sboms") - self.provenance_dir = ensure_directory(self.artifacts_dir / "provenance") - self.signature_dir = ensure_directory(self.artifacts_dir / "signatures") - self.metadata_dir = ensure_directory(self.artifacts_dir / "metadata") - self.debug_dir = ensure_directory(self.output_dir / "debug") - self.debug_store_dir = ensure_directory(self.debug_dir / ".build-id") - self.cli_config = config.get("cli") - self.cli_output_dir = ensure_directory(self.output_dir / "cli") if self.cli_config else None - self.temp_dir = pathlib.Path(tempfile.mkdtemp(prefix="stellaops-release-")) - self.skip_signing = skip_signing - self.tlog_upload = tlog_upload - self.cosign_key_ref = cosign_key_ref or os.environ.get("COSIGN_KEY_REF") - self.cosign_identity_token = cosign_identity_token or os.environ.get("COSIGN_IDENTITY_TOKEN") - password = cosign_password if cosign_password is not None else os.environ.get("COSIGN_PASSWORD", "") - self.cosign_env = { - "COSIGN_PASSWORD": password, - "COSIGN_EXPERIMENTAL": "1", - "COSIGN_ALLOW_HTTP_REGISTRY": os.environ.get("COSIGN_ALLOW_HTTP_REGISTRY", "1"), - "COSIGN_DOCKER_MEDIA_TYPES": os.environ.get("COSIGN_DOCKER_MEDIA_TYPES", "1"), - } - # Cache resolved objcopy binaries keyed by machine identifier to avoid repeated lookups. - self._objcopy_cache: Dict[str, Optional[str]] = {} - self._missing_symbol_platforms: Dict[str, int] = {} - - # ---------------- - # Build steps - # ---------------- - def run(self) -> Dict[str, Any]: - components_result = [] - if self.dry_run: - print("⚠️ Dry-run enabled; commands will be skipped") - self._prime_buildx_plugin() - for component in self.config.get("components", []): - result = self._build_component(component) - components_result.append(result) - helm_meta = self._package_helm() - compose_meta = self._digest_compose_files() - debug_meta = self._collect_debug_store(components_result) - cli_meta = self._build_cli_artifacts() - manifest = self._compose_manifest(components_result, helm_meta, compose_meta, debug_meta, cli_meta) - return manifest - - def _prime_buildx_plugin(self) -> None: - plugin_cfg = self.config.get("buildxPlugin") - if not plugin_cfg: - return - project = plugin_cfg.get("project") - if not project: - return - out_dir = ensure_directory(self.temp_dir / "buildx") - if not self.dry_run: - run([ - "dotnet", - "publish", - project, - "-c", - "Release", - "-o", - str(out_dir), - ]) - cas_dir = ensure_directory(self.temp_dir / "cas") - run([ - "dotnet", - str(out_dir / "StellaOps.Scanner.Sbomer.BuildXPlugin.dll"), - "handshake", - "--manifest", - str(out_dir), - "--cas", - str(cas_dir), - ]) - - def _component_tags(self, repo: str) -> List[str]: - base = f"{self.registry}/{repo}" - tags = [f"{base}:{self.version}"] - if self.channel: - tags.append(f"{base}:{self.channel}") - return tags - - def _component_ref(self, repo: str, digest: str) -> str: - return f"{self.registry}/{repo}@{digest}" - - def _relative_path(self, path: pathlib.Path) -> str: - try: - return str(path.relative_to(self.output_dir.parent)) - except ValueError: - return str(path) - - def _build_component(self, component: Mapping[str, Any]) -> Mapping[str, Any]: - name = component["name"] - repo = component.get("repository", name) - kind = component.get("kind", "dotnet-service") - dockerfile = component.get("dockerfile") - if not dockerfile: - raise ValueError(f"Component {name} missing dockerfile") - context = component.get("context", ".") - iid_file = self.temp_dir / f"{name}.iid" - metadata_file = self.metadata_dir / f"{name}.metadata.json" - - build_args = { - "VERSION": self.version, - "CHANNEL": self.channel, - "GIT_SHA": self.git_sha, - "SOURCE_DATE_EPOCH": self.source_date_epoch, - } - docker_cfg = self.config.get("docker", {}) - if kind == "dotnet-service": - build_args.update({ - "PROJECT": component["project"], - "ENTRYPOINT_DLL": component["entrypoint"], - "SDK_IMAGE": docker_cfg.get("sdkImage", "mcr.microsoft.com/dotnet/nightly/sdk:10.0"), - "RUNTIME_IMAGE": docker_cfg.get("runtimeImage", "gcr.io/distroless/dotnet/aspnet:latest"), - }) - elif kind == "angular-ui": - build_args.update({ - "NODE_IMAGE": docker_cfg.get("nodeImage", "node:20.14.0-bookworm"), - "NGINX_IMAGE": docker_cfg.get("nginxImage", "nginx:1.27-alpine"), - }) - else: - raise ValueError(f"Unsupported component kind {kind}") - - tags = self._component_tags(repo) - build_cmd = [ - "docker", - "buildx", - "build", - "--file", - dockerfile, - "--metadata-file", - str(metadata_file), - "--iidfile", - str(iid_file), - "--progress", - "plain", - "--platform", - ",".join(self.platforms), - ] - for key, value in build_args.items(): - build_cmd.extend(["--build-arg", f"{key}={value}"]) - for tag in tags: - build_cmd.extend(["--tag", tag]) - build_cmd.extend([ - "--attest", - "type=sbom", - "--attest", - "type=provenance,mode=max", - ]) - if self.push: - build_cmd.append("--push") - else: - build_cmd.append("--load") - build_cmd.append(context) - - if not self.dry_run: - run(build_cmd, cwd=self.repo_root) - - digest = iid_file.read_text(encoding="utf-8").strip() if iid_file.exists() else "" - image_ref = self._component_ref(repo, digest) if digest else "" - - bundle_info = self._sign_image(name, image_ref, tags) - sbom_info = self._generate_sbom(name, image_ref) - provenance_info = self._attach_provenance(name, image_ref) - - component_entry = OrderedDict() - component_entry["name"] = name - if digest: - component_entry["image"] = image_ref - component_entry["tags"] = tags - if sbom_info: - component_entry["sbom"] = sbom_info - if provenance_info: - component_entry["provenance"] = provenance_info - if bundle_info: - component_entry["signature"] = bundle_info - if metadata_file.exists(): - metadata_rel = ( - str(metadata_file.relative_to(self.output_dir.parent)) - if metadata_file.is_relative_to(self.output_dir.parent) - else str(metadata_file) - ) - component_entry["metadata"] = OrderedDict(( - ("path", metadata_rel), - ("sha256", compute_sha256(metadata_file)), - )) - return component_entry - - def _sign_image(self, name: str, image_ref: str, tags: Sequence[str]) -> Optional[Mapping[str, Any]]: - if self.skip_signing: - return None - if not image_ref: - return None - if not (self.cosign_key_ref or self.cosign_identity_token): - raise ValueError("Signing requested but no cosign key or identity token provided. Use --skip-signing to bypass.") - signature_path = self.signature_dir / f"{name}.signature" - cmd = ["cosign", "sign", "--yes"] - if self.cosign_key_ref: - cmd.extend(["--key", self.cosign_key_ref]) - if self.cosign_identity_token: - cmd.extend(["--identity-token", self.cosign_identity_token]) - if not self.tlog_upload: - cmd.append("--tlog-upload=false") - cmd.append("--allow-http-registry") - cmd.append(image_ref) - if self.dry_run: - return None - run(cmd, env=self.cosign_env) - signature_data = run([ - "cosign", - "download", - "signature", - "--allow-http-registry", - image_ref, - ]) - signature_path.write_text(signature_data, encoding="utf-8") - signature_sha = compute_sha256(signature_path) - signature_ref = run([ - "cosign", - "triangulate", - "--allow-http-registry", - image_ref, - ]).strip() - return OrderedDict( - ( - ("signature", OrderedDict(( - ("path", str(signature_path.relative_to(self.output_dir.parent)) if signature_path.is_relative_to(self.output_dir.parent) else str(signature_path)), - ("sha256", signature_sha), - ("ref", signature_ref), - ("tlogUploaded", self.tlog_upload), - ))), - ) - ) - - def _generate_sbom(self, name: str, image_ref: str) -> Optional[Mapping[str, Any]]: - if not image_ref or self.dry_run: - return None - sbom_path = self.sboms_dir / f"{name}.cyclonedx.json" - run([ - "docker", - "sbom", - image_ref, - "--format", - "cyclonedx-json", - "--output", - str(sbom_path), - ]) - entry = OrderedDict(( - ("path", str(sbom_path.relative_to(self.output_dir.parent)) if sbom_path.is_relative_to(self.output_dir.parent) else str(sbom_path)), - ("sha256", compute_sha256(sbom_path)), - )) - if self.skip_signing: - return entry - attach_cmd = [ - "cosign", - "attach", - "sbom", - "--sbom", - str(sbom_path), - "--type", - "cyclonedx", - ] - if self.cosign_key_ref: - attach_cmd.extend(["--key", self.cosign_key_ref]) - attach_cmd.append("--allow-http-registry") - attach_cmd.append(image_ref) - run(attach_cmd, env=self.cosign_env) - reference = run(["cosign", "triangulate", "--type", "sbom", "--allow-http-registry", image_ref]).strip() - entry["ref"] = reference - return entry - - def _attach_provenance(self, name: str, image_ref: str) -> Optional[Mapping[str, Any]]: - if not image_ref or self.dry_run: - return None - predicate = OrderedDict() - predicate["buildDefinition"] = OrderedDict( - ( - ("buildType", "https://git.stella-ops.org/stellaops/release"), - ("externalParameters", OrderedDict(( - ("component", name), - ("version", self.version), - ("channel", self.channel), - ))), - ) - ) - predicate["runDetails"] = OrderedDict( - ( - ("builder", OrderedDict((("id", "https://github.com/actions"),))), - ("metadata", OrderedDict((("finishedOn", self.release_date),))), - ) - ) - predicate_path = self.provenance_dir / f"{name}.provenance.json" - with predicate_path.open("w", encoding="utf-8") as handle: - json.dump(predicate, handle, indent=2, sort_keys=True) - handle.write("\n") - entry = OrderedDict(( - ("path", str(predicate_path.relative_to(self.output_dir.parent)) if predicate_path.is_relative_to(self.output_dir.parent) else str(predicate_path)), - ("sha256", compute_sha256(predicate_path)), - )) - if self.skip_signing: - return entry - cmd = [ - "cosign", - "attest", - "--predicate", - str(predicate_path), - "--type", - "https://slsa.dev/provenance/v1", - ] - if self.cosign_key_ref: - cmd.extend(["--key", self.cosign_key_ref]) - if not self.tlog_upload: - cmd.append("--tlog-upload=false") - cmd.append("--allow-http-registry") - cmd.append(image_ref) - run(cmd, env=self.cosign_env) - ref = run([ - "cosign", - "triangulate", - "--type", - "https://slsa.dev/provenance/v1", - "--allow-http-registry", - image_ref, - ]).strip() - entry["ref"] = ref - return entry - - def _collect_debug_store(self, components: Sequence[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]: - if self.dry_run: - return None - debug_records: Dict[Tuple[str, str], OrderedDict[str, Any]] = {} - for component in components: - image_ref = component.get("image") - if not image_ref: - continue - name = component.get("name", "unknown") - entries = self._extract_debug_entries(name, image_ref) - for entry in entries: - key = (entry["platform"], entry["buildId"]) - existing = debug_records.get(key) - if existing is None: - record = OrderedDict(( - ("buildId", entry["buildId"]), - ("platform", entry["platform"]), - ("debugPath", entry["debugPath"]), - ("sha256", entry["sha256"]), - ("size", entry["size"]), - ("components", [entry["component"]]), - ("images", [entry["image"]]), - ("sources", list(entry["sources"])), - )) - debug_records[key] = record - else: - if entry["sha256"] != existing["sha256"]: - raise RuntimeError( - f"Build-id {entry['buildId']} for platform {entry['platform']} produced conflicting hashes" - ) - if entry["component"] not in existing["components"]: - existing["components"].append(entry["component"]) - if entry["image"] not in existing["images"]: - existing["images"].append(entry["image"]) - for source in entry["sources"]: - if source not in existing["sources"]: - existing["sources"].append(source) - if not debug_records: - sys.stderr.write( - "[error] release build produced no debug artefacts; enable symbol extraction so out/release/debug is populated (DEVOPS-REL-17-004).\n" - ) - # Remove empty directories before failing - with contextlib.suppress(FileNotFoundError, OSError): - if not any(self.debug_store_dir.iterdir()): - self.debug_store_dir.rmdir() - with contextlib.suppress(FileNotFoundError, OSError): - if not any(self.debug_dir.iterdir()): - self.debug_dir.rmdir() - raise RuntimeError( - "Debug store collection produced no build-id artefacts (DEVOPS-REL-17-004)." - ) - entries = [] - for record in debug_records.values(): - entry = OrderedDict(( - ("buildId", record["buildId"]), - ("platform", record["platform"]), - ("debugPath", record["debugPath"]), - ("sha256", record["sha256"]), - ("size", record["size"]), - ("components", sorted(record["components"])), - ("images", sorted(record["images"])), - ("sources", sorted(record["sources"])), - )) - entries.append(entry) - entries.sort(key=lambda item: (item["platform"], item["buildId"])) - manifest_path = self.debug_dir / "debug-manifest.json" - platform_counts: Dict[str, int] = {} - for entry in entries: - platform_counts[entry["platform"]] = platform_counts.get(entry["platform"], 0) + 1 - missing_platforms = [ - platform - for platform in self._missing_symbol_platforms - if platform_counts.get(platform, 0) == 0 - ] - if missing_platforms: - raise RuntimeError( - "Debug extraction skipped all binaries for platforms without objcopy support: " - + ", ".join(sorted(missing_platforms)) - ) - manifest_data = OrderedDict(( - ("generatedAt", self.release_date), - ("version", self.version), - ("channel", self.channel), - ("artifacts", entries), - )) - with manifest_path.open("w", encoding="utf-8") as handle: - json.dump(manifest_data, handle, indent=2) - handle.write("\n") - manifest_sha = compute_sha256(manifest_path) - sha_path = manifest_path.with_suffix(manifest_path.suffix + ".sha256") - sha_path.write_text(f"{manifest_sha} {manifest_path.name}\n", encoding="utf-8") - manifest_rel = manifest_path.relative_to(self.output_dir).as_posix() - store_rel = self.debug_store_dir.relative_to(self.output_dir).as_posix() - platforms = sorted({entry["platform"] for entry in entries}) - return OrderedDict(( - ("manifest", manifest_rel), - ("sha256", manifest_sha), - ("entries", len(entries)), - ("platforms", platforms), - ("directory", store_rel), - )) - - # ---------------- - # CLI packaging - # ---------------- - def _build_cli_artifacts(self) -> List[Mapping[str, Any]]: - if not self.cli_config or self.dry_run: - return [] - project_rel = self.cli_config.get("project") - if not project_rel: - return [] - project_path = (self.repo_root / project_rel).resolve() - if not project_path.exists(): - raise FileNotFoundError(f"CLI project not found at {project_path}") - runtimes: Sequence[str] = self.cli_config.get("runtimes", []) - if not runtimes: - runtimes = ("linux-x64",) - package_prefix = self.cli_config.get("packagePrefix", "stella") - ensure_directory(self.cli_output_dir or (self.output_dir / "cli")) - - cli_entries: List[Mapping[str, Any]] = [] - for runtime in runtimes: - entry = self._build_cli_for_runtime(project_path, runtime, package_prefix) - cli_entries.append(entry) - return cli_entries - - def _build_cli_for_runtime( - self, - project_path: pathlib.Path, - runtime: str, - package_prefix: str, - ) -> Mapping[str, Any]: - publish_dir = ensure_directory(self.temp_dir / f"cli-publish-{runtime}") - publish_cmd = [ - "dotnet", - "publish", - str(project_path), - "--configuration", - "Release", - "--runtime", - runtime, - "--self-contained", - "true", - "/p:PublishSingleFile=true", - "/p:IncludeNativeLibrariesForSelfExtract=true", - "/p:EnableCompressionInSingleFile=true", - "/p:InvariantGlobalization=true", - "--output", - str(publish_dir), - ] - run(publish_cmd, cwd=self.repo_root) - - original_name = "StellaOps.Cli" - if runtime.startswith("win"): - source = publish_dir / f"{original_name}.exe" - target = publish_dir / "stella.exe" - else: - source = publish_dir / original_name - target = publish_dir / "stella" - if source.exists(): - if target.exists(): - target.unlink() - source.rename(target) - if not runtime.startswith("win"): - target.chmod(target.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) - - package_dir = self.cli_output_dir or (self.output_dir / "cli") - ensure_directory(package_dir) - archive_name = f"{package_prefix}-{self.version}-{runtime}" - if runtime.startswith("win"): - package_path = package_dir / f"{archive_name}.zip" - self._archive_zip(publish_dir, package_path) - else: - package_path = package_dir / f"{archive_name}.tar.gz" - self._archive_tar(publish_dir, package_path) - - digest = compute_sha256(package_path) - sha_path = package_path.with_suffix(package_path.suffix + ".sha256") - sha_path.write_text(f"{digest} {package_path.name}\n", encoding="utf-8") - - archive_info = OrderedDict(( - ("path", self._relative_path(package_path)), - ("sha256", digest), - )) - signature_info = self._sign_file(package_path) - if signature_info: - archive_info["signature"] = signature_info - - sbom_info = self._generate_cli_sbom(runtime, publish_dir) - - entry = OrderedDict(( - ("runtime", runtime), - ("archive", archive_info), - )) - if sbom_info: - entry["sbom"] = sbom_info - return entry - - def _archive_tar(self, source_dir: pathlib.Path, archive_path: pathlib.Path) -> None: - with tarfile.open(archive_path, "w:gz") as tar: - for item in sorted(source_dir.rglob("*")): - arcname = item.relative_to(source_dir) - tar.add(item, arcname=arcname) - - def _archive_zip(self, source_dir: pathlib.Path, archive_path: pathlib.Path) -> None: - with zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED) as zipf: - for item in sorted(source_dir.rglob("*")): - if item.is_dir(): - continue - arcname = item.relative_to(source_dir).as_posix() - zip_info = zipfile.ZipInfo(arcname) - zip_info.external_attr = (item.stat().st_mode & 0xFFFF) << 16 - with item.open("rb") as handle: - zipf.writestr(zip_info, handle.read()) - - def _generate_cli_sbom(self, runtime: str, publish_dir: pathlib.Path) -> Optional[Mapping[str, Any]]: - if self.dry_run: - return None - sbom_dir = ensure_directory(self.sboms_dir / "cli") - sbom_path = sbom_dir / f"cli-{runtime}.cyclonedx.json" - run([ - "syft", - f"dir:{publish_dir}", - "--output", - f"cyclonedx-json={sbom_path}", - ]) - entry = OrderedDict(( - ("path", self._relative_path(sbom_path)), - ("sha256", compute_sha256(sbom_path)), - )) - signature_info = self._sign_file(sbom_path) - if signature_info: - entry["signature"] = signature_info - return entry - - def _sign_file(self, path: pathlib.Path) -> Optional[Mapping[str, Any]]: - if self.skip_signing: - return None - if not (self.cosign_key_ref or self.cosign_identity_token): - raise ValueError( - "Signing requested but no cosign key or identity token provided. Use --skip-signing to bypass." - ) - signature_path = path.with_suffix(path.suffix + ".sig") - sha_path = path.with_suffix(path.suffix + ".sha256") - digest = compute_sha256(path) - sha_path.write_text(f"{digest} {path.name}\n", encoding="utf-8") - cmd = ["cosign", "sign-blob", "--yes", str(path)] - if self.cosign_key_ref: - cmd.extend(["--key", self.cosign_key_ref]) - if self.cosign_identity_token: - cmd.extend(["--identity-token", self.cosign_identity_token]) - if not self.tlog_upload: - cmd.append("--tlog-upload=false") - signature_data = run(cmd, env=self.cosign_env).strip() - signature_path.write_text(signature_data + "\n", encoding="utf-8") - return OrderedDict(( - ("path", self._relative_path(signature_path)), - ("sha256", compute_sha256(signature_path)), - ("tlogUploaded", self.tlog_upload), - )) - - def _extract_debug_entries(self, component_name: str, image_ref: str) -> List[OrderedDict[str, Any]]: - if self.dry_run: - return [] - entries: List[OrderedDict[str, Any]] = [] - platforms = self.platforms if self.push else [None] - for platform in platforms: - platform_label = platform or (self.platforms[0] if self.platforms else "linux/amd64") - if self.push: - pull_cmd = ["docker", "pull"] - if platform: - pull_cmd.extend(["--platform", platform]) - pull_cmd.append(image_ref) - run(pull_cmd) - create_cmd = ["docker", "create"] - if platform: - create_cmd.extend(["--platform", platform]) - create_cmd.append(image_ref) - container_id = run(create_cmd).strip() - export_path = self.temp_dir / f"{container_id}.tar" - try: - run(["docker", "export", container_id, "-o", str(export_path)], capture=False) - finally: - run(["docker", "rm", container_id], capture=False) - rootfs_dir = ensure_directory(self.temp_dir / f"{component_name}-{platform_label}-{uuid.uuid4().hex}") - try: - with tarfile.open(export_path, "r:*") as tar: - self._safe_extract_tar(tar, rootfs_dir) - finally: - export_path.unlink(missing_ok=True) - try: - for file_path in rootfs_dir.rglob("*"): - if not file_path.is_file() or file_path.is_symlink(): - continue - if not self._is_elf(file_path): - continue - build_id, machine = self._read_build_id_and_machine(file_path) - if not build_id: - continue - debug_file = self._debug_file_for_build_id(build_id) - if not debug_file.exists(): - debug_file.parent.mkdir(parents=True, exist_ok=True) - temp_debug = self.temp_dir / f"{build_id}.debug" - with contextlib.suppress(FileNotFoundError): - temp_debug.unlink() - objcopy_tool = self._resolve_objcopy_tool(machine) - if not objcopy_tool: - self._emit_objcopy_warning(machine, platform_label, file_path) - with contextlib.suppress(FileNotFoundError): - temp_debug.unlink() - continue - try: - run([objcopy_tool, "--only-keep-debug", str(file_path), str(temp_debug)], capture=False) - except CommandError: - with contextlib.suppress(FileNotFoundError): - temp_debug.unlink() - continue - debug_file.parent.mkdir(parents=True, exist_ok=True) - shutil.move(str(temp_debug), str(debug_file)) - sha = compute_sha256(debug_file) - rel_debug = debug_file.relative_to(self.output_dir).as_posix() - source_rel = file_path.relative_to(rootfs_dir).as_posix() - entry = OrderedDict(( - ("component", component_name), - ("image", image_ref), - ("platform", platform_label), - ("buildId", build_id), - ("debugPath", rel_debug), - ("sha256", sha), - ("size", debug_file.stat().st_size), - ("sources", [source_rel]), - )) - entries.append(entry) - finally: - shutil.rmtree(rootfs_dir, ignore_errors=True) - return entries - - def _debug_file_for_build_id(self, build_id: str) -> pathlib.Path: - normalized = build_id.lower() - prefix = normalized[:2] - remainder = normalized[2:] - return self.debug_store_dir / prefix / f"{remainder}.debug" - - @staticmethod - def _safe_extract_tar(tar: tarfile.TarFile, dest: pathlib.Path) -> None: - dest_root = dest.resolve() - members = tar.getmembers() - for member in members: - member_path = (dest / member.name).resolve() - if not str(member_path).startswith(str(dest_root)): - raise RuntimeError(f"Refusing to extract '{member.name}' outside of destination directory") - tar.extractall(dest) - - @staticmethod - def _is_elf(path: pathlib.Path) -> bool: - try: - with path.open("rb") as handle: - return handle.read(4) == b"\x7fELF" - except OSError: - return False - - def _read_build_id_and_machine(self, path: pathlib.Path) -> Tuple[Optional[str], Optional[str]]: - try: - header_output = run(["readelf", "-nh", str(path)]) - except CommandError: - return None, None - build_id: Optional[str] = None - machine: Optional[str] = None - for line in header_output.splitlines(): - stripped = line.strip() - if stripped.startswith("Build ID:"): - build_id = stripped.split("Build ID:", 1)[1].strip().lower() - elif stripped.startswith("Machine:"): - machine = stripped.split("Machine:", 1)[1].strip() - return build_id, machine - - def _resolve_objcopy_tool(self, machine: Optional[str]) -> Optional[str]: - key = (machine or "generic").lower() - if key in self._objcopy_cache: - return self._objcopy_cache[key] - - env_override = None - if machine and "aarch64" in machine.lower(): - env_override = os.environ.get("STELLAOPS_OBJCOPY_AARCH64") - candidates = [ - env_override, - "aarch64-linux-gnu-objcopy", - "llvm-objcopy", - "objcopy", - ] - elif machine and any(token in machine.lower() for token in ("x86-64", "amd", "x86_64")): - env_override = os.environ.get("STELLAOPS_OBJCOPY_AMD64") - candidates = [ - env_override, - "objcopy", - "llvm-objcopy", - ] - else: - env_override = os.environ.get("STELLAOPS_OBJCOPY_DEFAULT") - candidates = [ - env_override, - "objcopy", - "llvm-objcopy", - ] - - for candidate in candidates: - if not candidate: - continue - tool = shutil.which(candidate) - if tool: - self._objcopy_cache[key] = tool - return tool - self._objcopy_cache[key] = None - return None - - def _emit_objcopy_warning(self, machine: Optional[str], platform: str, file_path: pathlib.Path) -> None: - machine_label = machine or "unknown-machine" - count = self._missing_symbol_platforms.get(platform, 0) - self._missing_symbol_platforms[platform] = count + 1 - if count == 0: - sys.stderr.write( - f"[warn] no objcopy tool available for {machine_label}; skipping debug extraction for {file_path}.\n" - ) - - # ---------------- - # Helm + compose - # ---------------- - def _package_helm(self) -> Optional[Mapping[str, Any]]: - helm_cfg = self.config.get("helm") - if not helm_cfg: - return None - chart_path = helm_cfg.get("chartPath") - if not chart_path: - return None - chart_dir = self.repo_root / chart_path - output_dir = ensure_directory(self.output_dir / "helm") - archive_path = output_dir / f"stellaops-{self.version}.tgz" - if not self.dry_run: - cmd = [ - "helm", - "package", - str(chart_dir), - "--destination", - str(output_dir), - "--version", - self.version, - "--app-version", - self.version, - ] - run(cmd) - packaged = next(output_dir.glob("*.tgz"), None) - if packaged and packaged != archive_path: - packaged.rename(archive_path) - digest = compute_sha256(archive_path) if archive_path.exists() else None - if archive_path.exists() and archive_path.is_relative_to(self.output_dir): - manifest_path = str(archive_path.relative_to(self.output_dir)) - elif archive_path.exists() and archive_path.is_relative_to(self.output_dir.parent): - manifest_path = str(archive_path.relative_to(self.output_dir.parent)) - else: - manifest_path = f"helm/{archive_path.name}" - return OrderedDict(( - ("name", "stellaops"), - ("version", self.version), - ("path", manifest_path), - ("sha256", digest), - )) - - def _digest_compose_files(self) -> List[Mapping[str, Any]]: - compose_cfg = self.config.get("compose", {}) - files = compose_cfg.get("files", []) - entries: List[Mapping[str, Any]] = [] - for rel_path in files: - src = self.repo_root / rel_path - if not src.exists(): - continue - digest = compute_sha256(src) - entries.append(OrderedDict(( - ("name", pathlib.Path(rel_path).name), - ("path", rel_path), - ("sha256", digest), - ))) - return entries - - # ---------------- - # Manifest assembly - # ---------------- - def _compose_manifest( - self, - components: List[Mapping[str, Any]], - helm_meta: Optional[Mapping[str, Any]], - compose_meta: List[Mapping[str, Any]], - debug_meta: Optional[Mapping[str, Any]], - cli_meta: Sequence[Mapping[str, Any]], - ) -> Dict[str, Any]: - manifest = OrderedDict() - manifest["release"] = OrderedDict(( - ("version", self.version), - ("channel", self.channel), - ("date", self.release_date), - ("calendar", self.calendar), - )) - manifest["components"] = components - if helm_meta: - manifest["charts"] = [helm_meta] - if compose_meta: - manifest["compose"] = compose_meta - if debug_meta: - manifest["debugStore"] = debug_meta - if cli_meta: - manifest["cli"] = list(cli_meta) - return manifest - - -def parse_args(argv: Optional[Sequence[str]] = None) -> argparse.Namespace: - parser = argparse.ArgumentParser(description="Build StellaOps release artefacts deterministically") - parser.add_argument("--config", type=pathlib.Path, default=DEFAULT_CONFIG, help="Path to release config JSON") - parser.add_argument("--version", required=True, help="Release version string (e.g. 2025.10.0-edge)") - parser.add_argument("--channel", required=True, help="Release channel (edge|stable|lts)") - parser.add_argument("--calendar", help="Calendar tag (YYYY.MM); defaults derived from version") - parser.add_argument("--git-sha", default=os.environ.get("GIT_COMMIT", "unknown"), help="Git revision to embed") - parser.add_argument("--output", type=pathlib.Path, default=REPO_ROOT / "out/release", help="Output directory for artefacts") - parser.add_argument("--no-push", action="store_true", help="Do not push images (use docker load)") - parser.add_argument("--dry-run", action="store_true", help="Print steps without executing commands") - parser.add_argument("--registry", help="Override registry root (e.g. localhost:5000/stellaops)") - parser.add_argument("--platform", dest="platforms", action="append", metavar="PLATFORM", help="Override build platforms (repeatable)") - parser.add_argument("--skip-signing", action="store_true", help="Skip cosign signing/attestation steps") - parser.add_argument("--cosign-key", dest="cosign_key", help="Override COSIGN_KEY_REF value") - parser.add_argument("--cosign-password", dest="cosign_password", help="Password for cosign key") - parser.add_argument("--cosign-identity-token", dest="cosign_identity_token", help="Identity token for keyless cosign flows") - parser.add_argument("--no-transparency", action="store_true", help="Disable Rekor transparency log upload during signing") - return parser.parse_args(argv) - - -def write_manifest(manifest: Mapping[str, Any], output_dir: pathlib.Path) -> pathlib.Path: - # Copy manifest to avoid mutating input when computing checksum - base_manifest = OrderedDict(manifest) - yaml_without_checksum = dump_yaml(base_manifest) - digest = hashlib.sha256(yaml_without_checksum.encode("utf-8")).hexdigest() - manifest_with_checksum = OrderedDict(base_manifest) - manifest_with_checksum["checksums"] = OrderedDict((("sha256", digest),)) - final_yaml = dump_yaml(manifest_with_checksum) - output_path = output_dir / "release.yaml" - with output_path.open("w", encoding="utf-8") as handle: - handle.write(final_yaml) - sha_path = output_path.with_name(output_path.name + ".sha256") - yaml_file_digest = compute_sha256(output_path) - sha_path.write_text(f"{yaml_file_digest} {output_path.name}\n", encoding="utf-8") - - json_text = json.dumps(manifest_with_checksum, indent=2) - json_path = output_dir / "release.json" - with json_path.open("w", encoding="utf-8") as handle: - handle.write(json_text) - handle.write("\n") - json_digest = compute_sha256(json_path) - json_sha_path = json_path.with_name(json_path.name + ".sha256") - json_sha_path.write_text(f"{json_digest} {json_path.name}\n", encoding="utf-8") - return output_path - - -def main(argv: Optional[Sequence[str]] = None) -> int: - args = parse_args(argv) - config = load_json_config(args.config) - release_date = utc_now_iso() - calendar = sanitize_calendar(args.version, args.calendar) - builder = ReleaseBuilder( - repo_root=REPO_ROOT, - config=config, - version=args.version, - channel=args.channel, - calendar=calendar, - release_date=release_date, - git_sha=args.git_sha, - output_dir=args.output, - push=not args.no_push, - dry_run=args.dry_run, - registry_override=args.registry, - platforms_override=args.platforms, - skip_signing=args.skip_signing, - cosign_key_ref=args.cosign_key, - cosign_password=args.cosign_password, - cosign_identity_token=args.cosign_identity_token, - tlog_upload=not args.no_transparency, - ) - manifest = builder.run() - manifest_path = write_manifest(manifest, builder.output_dir) - print(f"✅ Release manifest written to {manifest_path}") - return 0 - - -if __name__ == "__main__": - raise SystemExit(main()) +#!/usr/bin/env python3 +"""Deterministic release pipeline helper for StellaOps. + +This script builds service containers, generates SBOM and provenance artefacts, +signs them with cosign, and writes a channel-specific release manifest. + +The workflow expects external tooling to be available on PATH: +- docker (with buildx) +- cosign +- helm +- npm / node (for the UI build) +- dotnet SDK (for BuildX plugin publication) +""" +from __future__ import annotations + +import argparse +import contextlib +import datetime as dt +import hashlib +import json +import os +import pathlib +import re +import shlex +import shutil +import stat +import subprocess +import sys +import tarfile +import tempfile +import uuid +import zipfile +from collections import OrderedDict +from typing import Any, Dict, Iterable, List, Mapping, MutableMapping, Optional, Sequence, Tuple + +REPO_ROOT = pathlib.Path(__file__).resolve().parents[3] +DEFAULT_CONFIG = REPO_ROOT / "ops/devops/release/components.json" + +class CommandError(RuntimeError): + pass + +def run(cmd: Sequence[str], *, cwd: Optional[pathlib.Path] = None, env: Optional[Mapping[str, str]] = None, capture: bool = True) -> str: + """Run a subprocess command, returning stdout (text).""" + process_env = os.environ.copy() + if env: + process_env.update(env) + result = subprocess.run( + list(cmd), + cwd=str(cwd) if cwd else None, + env=process_env, + check=False, + capture_output=capture, + text=True, + ) + if process_env.get("STELLAOPS_RELEASE_DEBUG"): + sys.stderr.write(f"[debug] {' '.join(shlex.quote(c) for c in cmd)}\n") + if capture: + sys.stderr.write(result.stdout) + sys.stderr.write(result.stderr) + if result.returncode != 0: + stdout = result.stdout if capture else "" + stderr = result.stderr if capture else "" + raise CommandError(f"Command failed ({result.returncode}): {' '.join(cmd)}\nSTDOUT:\n{stdout}\nSTDERR:\n{stderr}") + + return result.stdout if capture else "" + + +def load_json_config(path: pathlib.Path) -> Dict[str, Any]: + with path.open("r", encoding="utf-8") as handle: + return json.load(handle) + + +def ensure_directory(path: pathlib.Path) -> pathlib.Path: + path.mkdir(parents=True, exist_ok=True) + return path + + +def compute_sha256(path: pathlib.Path) -> str: + sha = hashlib.sha256() + with path.open("rb") as handle: + for chunk in iter(lambda: handle.read(1024 * 1024), b""): + sha.update(chunk) + return sha.hexdigest() + + +def format_scalar(value: Any) -> str: + if isinstance(value, bool): + return "true" if value else "false" + if value is None: + return "null" + if isinstance(value, (int, float)): + return str(value) + text = str(value) + if text == "": + return '""' + if re.search(r"[\s:#\-\[\]\{\}]", text): + return json.dumps(text, ensure_ascii=False) + return text + + +def _yaml_lines(value: Any, indent: int = 0) -> List[str]: + pad = " " * indent + if isinstance(value, Mapping): + lines: List[str] = [] + for key, val in value.items(): + if isinstance(val, (Mapping, list)): + lines.append(f"{pad}{key}:") + lines.extend(_yaml_lines(val, indent + 1)) + else: + lines.append(f"{pad}{key}: {format_scalar(val)}") + if not lines: + lines.append(f"{pad}{{}}") + return lines + if isinstance(value, list): + lines = [] + if not value: + lines.append(f"{pad}[]") + return lines + for item in value: + if isinstance(item, (Mapping, list)): + lines.append(f"{pad}-") + lines.extend(_yaml_lines(item, indent + 1)) + else: + lines.append(f"{pad}- {format_scalar(item)}") + return lines + return [f"{pad}{format_scalar(value)}"] + + +def dump_yaml(data: Mapping[str, Any]) -> str: + lines: List[str] = _yaml_lines(data) + return "\n".join(lines) + "\n" + + +def utc_now_iso() -> str: + return dt.datetime.now(tz=dt.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z") + + +def sanitize_calendar(version: str, explicit: Optional[str]) -> str: + if explicit: + return explicit + # Expect version like 2025.10.0-edge or 2.4.1 + parts = re.findall(r"\d+", version) + if len(parts) >= 2: + return f"{parts[0]}.{parts[1]}" + return dt.datetime.now(tz=dt.timezone.utc).strftime("%Y.%m") + + +class ReleaseBuilder: + def __init__( + self, + *, + repo_root: pathlib.Path, + config: Mapping[str, Any], + version: str, + channel: str, + calendar: str, + release_date: str, + git_sha: str, + output_dir: pathlib.Path, + push: bool, + dry_run: bool, + registry_override: Optional[str] = None, + platforms_override: Optional[Sequence[str]] = None, + skip_signing: bool = False, + cosign_key_ref: Optional[str] = None, + cosign_password: Optional[str] = None, + cosign_identity_token: Optional[str] = None, + tlog_upload: bool = True, + ) -> None: + self.repo_root = repo_root + self.config = config + self.version = version + self.channel = channel + self.calendar = calendar + self.release_date = release_date + self.git_sha = git_sha + self.output_dir = ensure_directory(output_dir) + self.push = push + self.dry_run = dry_run + self.registry = registry_override or config.get("registry") + if not self.registry: + raise ValueError("Config missing 'registry'") + platforms = list(platforms_override) if platforms_override else config.get("platforms") + if not platforms: + platforms = ["linux/amd64", "linux/arm64"] + self.platforms = list(platforms) + self.source_date_epoch = str(int(dt.datetime.fromisoformat(release_date.replace("Z", "+00:00")).timestamp())) + self.artifacts_dir = ensure_directory(self.output_dir / "artifacts") + self.sboms_dir = ensure_directory(self.artifacts_dir / "sboms") + self.provenance_dir = ensure_directory(self.artifacts_dir / "provenance") + self.signature_dir = ensure_directory(self.artifacts_dir / "signatures") + self.metadata_dir = ensure_directory(self.artifacts_dir / "metadata") + self.debug_dir = ensure_directory(self.output_dir / "debug") + self.debug_store_dir = ensure_directory(self.debug_dir / ".build-id") + self.cli_config = config.get("cli") + self.cli_output_dir = ensure_directory(self.output_dir / "cli") if self.cli_config else None + self.temp_dir = pathlib.Path(tempfile.mkdtemp(prefix="stellaops-release-")) + self.skip_signing = skip_signing + self.tlog_upload = tlog_upload + self.cosign_key_ref = cosign_key_ref or os.environ.get("COSIGN_KEY_REF") + self.cosign_identity_token = cosign_identity_token or os.environ.get("COSIGN_IDENTITY_TOKEN") + password = cosign_password if cosign_password is not None else os.environ.get("COSIGN_PASSWORD", "") + self.cosign_env = { + "COSIGN_PASSWORD": password, + "COSIGN_EXPERIMENTAL": "1", + "COSIGN_ALLOW_HTTP_REGISTRY": os.environ.get("COSIGN_ALLOW_HTTP_REGISTRY", "1"), + "COSIGN_DOCKER_MEDIA_TYPES": os.environ.get("COSIGN_DOCKER_MEDIA_TYPES", "1"), + } + # Cache resolved objcopy binaries keyed by machine identifier to avoid repeated lookups. + self._objcopy_cache: Dict[str, Optional[str]] = {} + self._missing_symbol_platforms: Dict[str, int] = {} + + # ---------------- + # Build steps + # ---------------- + def run(self) -> Dict[str, Any]: + components_result = [] + if self.dry_run: + print("⚠️ Dry-run enabled; commands will be skipped") + self._prime_buildx_plugin() + for component in self.config.get("components", []): + result = self._build_component(component) + components_result.append(result) + helm_meta = self._package_helm() + compose_meta = self._digest_compose_files() + debug_meta = self._collect_debug_store(components_result) + cli_meta = self._build_cli_artifacts() + manifest = self._compose_manifest(components_result, helm_meta, compose_meta, debug_meta, cli_meta) + return manifest + + def _prime_buildx_plugin(self) -> None: + plugin_cfg = self.config.get("buildxPlugin") + if not plugin_cfg: + return + project = plugin_cfg.get("project") + if not project: + return + out_dir = ensure_directory(self.temp_dir / "buildx") + if not self.dry_run: + run([ + "dotnet", + "publish", + project, + "-c", + "Release", + "-o", + str(out_dir), + ]) + cas_dir = ensure_directory(self.temp_dir / "cas") + run([ + "dotnet", + str(out_dir / "StellaOps.Scanner.Sbomer.BuildXPlugin.dll"), + "handshake", + "--manifest", + str(out_dir), + "--cas", + str(cas_dir), + ]) + + def _component_tags(self, repo: str) -> List[str]: + base = f"{self.registry}/{repo}" + tags = [f"{base}:{self.version}"] + if self.channel: + tags.append(f"{base}:{self.channel}") + return tags + + def _component_ref(self, repo: str, digest: str) -> str: + return f"{self.registry}/{repo}@{digest}" + + def _relative_path(self, path: pathlib.Path) -> str: + try: + return str(path.relative_to(self.output_dir.parent)) + except ValueError: + return str(path) + + def _build_component(self, component: Mapping[str, Any]) -> Mapping[str, Any]: + name = component["name"] + repo = component.get("repository", name) + kind = component.get("kind", "dotnet-service") + dockerfile = component.get("dockerfile") + if not dockerfile: + raise ValueError(f"Component {name} missing dockerfile") + context = component.get("context", ".") + iid_file = self.temp_dir / f"{name}.iid" + metadata_file = self.metadata_dir / f"{name}.metadata.json" + + build_args = { + "VERSION": self.version, + "CHANNEL": self.channel, + "GIT_SHA": self.git_sha, + "SOURCE_DATE_EPOCH": self.source_date_epoch, + } + docker_cfg = self.config.get("docker", {}) + if kind == "dotnet-service": + build_args.update({ + "PROJECT": component["project"], + "ENTRYPOINT_DLL": component["entrypoint"], + "SDK_IMAGE": docker_cfg.get("sdkImage", "mcr.microsoft.com/dotnet/nightly/sdk:10.0"), + "RUNTIME_IMAGE": docker_cfg.get("runtimeImage", "gcr.io/distroless/dotnet/aspnet:latest"), + }) + elif kind == "angular-ui": + build_args.update({ + "NODE_IMAGE": docker_cfg.get("nodeImage", "node:20.14.0-bookworm"), + "NGINX_IMAGE": docker_cfg.get("nginxImage", "nginx:1.27-alpine"), + }) + else: + raise ValueError(f"Unsupported component kind {kind}") + + tags = self._component_tags(repo) + build_cmd = [ + "docker", + "buildx", + "build", + "--file", + dockerfile, + "--metadata-file", + str(metadata_file), + "--iidfile", + str(iid_file), + "--progress", + "plain", + "--platform", + ",".join(self.platforms), + ] + for key, value in build_args.items(): + build_cmd.extend(["--build-arg", f"{key}={value}"]) + for tag in tags: + build_cmd.extend(["--tag", tag]) + build_cmd.extend([ + "--attest", + "type=sbom", + "--attest", + "type=provenance,mode=max", + ]) + if self.push: + build_cmd.append("--push") + else: + build_cmd.append("--load") + build_cmd.append(context) + + if not self.dry_run: + run(build_cmd, cwd=self.repo_root) + + digest = iid_file.read_text(encoding="utf-8").strip() if iid_file.exists() else "" + image_ref = self._component_ref(repo, digest) if digest else "" + + bundle_info = self._sign_image(name, image_ref, tags) + sbom_info = self._generate_sbom(name, image_ref) + provenance_info = self._attach_provenance(name, image_ref) + + component_entry = OrderedDict() + component_entry["name"] = name + if digest: + component_entry["image"] = image_ref + component_entry["tags"] = tags + if sbom_info: + component_entry["sbom"] = sbom_info + if provenance_info: + component_entry["provenance"] = provenance_info + if bundle_info: + component_entry["signature"] = bundle_info + if metadata_file.exists(): + metadata_rel = ( + str(metadata_file.relative_to(self.output_dir.parent)) + if metadata_file.is_relative_to(self.output_dir.parent) + else str(metadata_file) + ) + component_entry["metadata"] = OrderedDict(( + ("path", metadata_rel), + ("sha256", compute_sha256(metadata_file)), + )) + return component_entry + + def _sign_image(self, name: str, image_ref: str, tags: Sequence[str]) -> Optional[Mapping[str, Any]]: + if self.skip_signing: + return None + if not image_ref: + return None + if not (self.cosign_key_ref or self.cosign_identity_token): + raise ValueError("Signing requested but no cosign key or identity token provided. Use --skip-signing to bypass.") + signature_path = self.signature_dir / f"{name}.signature" + cmd = ["cosign", "sign", "--yes"] + if self.cosign_key_ref: + cmd.extend(["--key", self.cosign_key_ref]) + if self.cosign_identity_token: + cmd.extend(["--identity-token", self.cosign_identity_token]) + if not self.tlog_upload: + cmd.append("--tlog-upload=false") + cmd.append("--allow-http-registry") + cmd.append(image_ref) + if self.dry_run: + return None + run(cmd, env=self.cosign_env) + signature_data = run([ + "cosign", + "download", + "signature", + "--allow-http-registry", + image_ref, + ]) + signature_path.write_text(signature_data, encoding="utf-8") + signature_sha = compute_sha256(signature_path) + signature_ref = run([ + "cosign", + "triangulate", + "--allow-http-registry", + image_ref, + ]).strip() + return OrderedDict( + ( + ("signature", OrderedDict(( + ("path", str(signature_path.relative_to(self.output_dir.parent)) if signature_path.is_relative_to(self.output_dir.parent) else str(signature_path)), + ("sha256", signature_sha), + ("ref", signature_ref), + ("tlogUploaded", self.tlog_upload), + ))), + ) + ) + + def _generate_sbom(self, name: str, image_ref: str) -> Optional[Mapping[str, Any]]: + if not image_ref or self.dry_run: + return None + sbom_path = self.sboms_dir / f"{name}.cyclonedx.json" + run([ + "docker", + "sbom", + image_ref, + "--format", + "cyclonedx-json", + "--output", + str(sbom_path), + ]) + entry = OrderedDict(( + ("path", str(sbom_path.relative_to(self.output_dir.parent)) if sbom_path.is_relative_to(self.output_dir.parent) else str(sbom_path)), + ("sha256", compute_sha256(sbom_path)), + )) + if self.skip_signing: + return entry + attach_cmd = [ + "cosign", + "attach", + "sbom", + "--sbom", + str(sbom_path), + "--type", + "cyclonedx", + ] + if self.cosign_key_ref: + attach_cmd.extend(["--key", self.cosign_key_ref]) + attach_cmd.append("--allow-http-registry") + attach_cmd.append(image_ref) + run(attach_cmd, env=self.cosign_env) + reference = run(["cosign", "triangulate", "--type", "sbom", "--allow-http-registry", image_ref]).strip() + entry["ref"] = reference + return entry + + def _attach_provenance(self, name: str, image_ref: str) -> Optional[Mapping[str, Any]]: + if not image_ref or self.dry_run: + return None + predicate = OrderedDict() + predicate["buildDefinition"] = OrderedDict( + ( + ("buildType", "https://git.stella-ops.org/stellaops/release"), + ("externalParameters", OrderedDict(( + ("component", name), + ("version", self.version), + ("channel", self.channel), + ))), + ) + ) + predicate["runDetails"] = OrderedDict( + ( + ("builder", OrderedDict((("id", "https://github.com/actions"),))), + ("metadata", OrderedDict((("finishedOn", self.release_date),))), + ) + ) + predicate_path = self.provenance_dir / f"{name}.provenance.json" + with predicate_path.open("w", encoding="utf-8") as handle: + json.dump(predicate, handle, indent=2, sort_keys=True) + handle.write("\n") + entry = OrderedDict(( + ("path", str(predicate_path.relative_to(self.output_dir.parent)) if predicate_path.is_relative_to(self.output_dir.parent) else str(predicate_path)), + ("sha256", compute_sha256(predicate_path)), + )) + if self.skip_signing: + return entry + cmd = [ + "cosign", + "attest", + "--predicate", + str(predicate_path), + "--type", + "https://slsa.dev/provenance/v1", + ] + if self.cosign_key_ref: + cmd.extend(["--key", self.cosign_key_ref]) + if not self.tlog_upload: + cmd.append("--tlog-upload=false") + cmd.append("--allow-http-registry") + cmd.append(image_ref) + run(cmd, env=self.cosign_env) + ref = run([ + "cosign", + "triangulate", + "--type", + "https://slsa.dev/provenance/v1", + "--allow-http-registry", + image_ref, + ]).strip() + entry["ref"] = ref + return entry + + def _collect_debug_store(self, components: Sequence[Mapping[str, Any]]) -> Optional[Mapping[str, Any]]: + if self.dry_run: + return None + debug_records: Dict[Tuple[str, str], OrderedDict[str, Any]] = {} + for component in components: + image_ref = component.get("image") + if not image_ref: + continue + name = component.get("name", "unknown") + entries = self._extract_debug_entries(name, image_ref) + for entry in entries: + key = (entry["platform"], entry["buildId"]) + existing = debug_records.get(key) + if existing is None: + record = OrderedDict(( + ("buildId", entry["buildId"]), + ("platform", entry["platform"]), + ("debugPath", entry["debugPath"]), + ("sha256", entry["sha256"]), + ("size", entry["size"]), + ("components", [entry["component"]]), + ("images", [entry["image"]]), + ("sources", list(entry["sources"])), + )) + debug_records[key] = record + else: + if entry["sha256"] != existing["sha256"]: + raise RuntimeError( + f"Build-id {entry['buildId']} for platform {entry['platform']} produced conflicting hashes" + ) + if entry["component"] not in existing["components"]: + existing["components"].append(entry["component"]) + if entry["image"] not in existing["images"]: + existing["images"].append(entry["image"]) + for source in entry["sources"]: + if source not in existing["sources"]: + existing["sources"].append(source) + if not debug_records: + sys.stderr.write( + "[error] release build produced no debug artefacts; enable symbol extraction so out/release/debug is populated (DEVOPS-REL-17-004).\n" + ) + # Remove empty directories before failing + with contextlib.suppress(FileNotFoundError, OSError): + if not any(self.debug_store_dir.iterdir()): + self.debug_store_dir.rmdir() + with contextlib.suppress(FileNotFoundError, OSError): + if not any(self.debug_dir.iterdir()): + self.debug_dir.rmdir() + raise RuntimeError( + "Debug store collection produced no build-id artefacts (DEVOPS-REL-17-004)." + ) + entries = [] + for record in debug_records.values(): + entry = OrderedDict(( + ("buildId", record["buildId"]), + ("platform", record["platform"]), + ("debugPath", record["debugPath"]), + ("sha256", record["sha256"]), + ("size", record["size"]), + ("components", sorted(record["components"])), + ("images", sorted(record["images"])), + ("sources", sorted(record["sources"])), + )) + entries.append(entry) + entries.sort(key=lambda item: (item["platform"], item["buildId"])) + manifest_path = self.debug_dir / "debug-manifest.json" + platform_counts: Dict[str, int] = {} + for entry in entries: + platform_counts[entry["platform"]] = platform_counts.get(entry["platform"], 0) + 1 + missing_platforms = [ + platform + for platform in self._missing_symbol_platforms + if platform_counts.get(platform, 0) == 0 + ] + if missing_platforms: + raise RuntimeError( + "Debug extraction skipped all binaries for platforms without objcopy support: " + + ", ".join(sorted(missing_platforms)) + ) + manifest_data = OrderedDict(( + ("generatedAt", self.release_date), + ("version", self.version), + ("channel", self.channel), + ("artifacts", entries), + )) + with manifest_path.open("w", encoding="utf-8") as handle: + json.dump(manifest_data, handle, indent=2) + handle.write("\n") + manifest_sha = compute_sha256(manifest_path) + sha_path = manifest_path.with_suffix(manifest_path.suffix + ".sha256") + sha_path.write_text(f"{manifest_sha} {manifest_path.name}\n", encoding="utf-8") + manifest_rel = manifest_path.relative_to(self.output_dir).as_posix() + store_rel = self.debug_store_dir.relative_to(self.output_dir).as_posix() + platforms = sorted({entry["platform"] for entry in entries}) + return OrderedDict(( + ("manifest", manifest_rel), + ("sha256", manifest_sha), + ("entries", len(entries)), + ("platforms", platforms), + ("directory", store_rel), + )) + + # ---------------- + # CLI packaging + # ---------------- + def _build_cli_artifacts(self) -> List[Mapping[str, Any]]: + if not self.cli_config or self.dry_run: + return [] + project_rel = self.cli_config.get("project") + if not project_rel: + return [] + project_path = (self.repo_root / project_rel).resolve() + if not project_path.exists(): + raise FileNotFoundError(f"CLI project not found at {project_path}") + runtimes: Sequence[str] = self.cli_config.get("runtimes", []) + if not runtimes: + runtimes = ("linux-x64",) + package_prefix = self.cli_config.get("packagePrefix", "stella") + ensure_directory(self.cli_output_dir or (self.output_dir / "cli")) + + cli_entries: List[Mapping[str, Any]] = [] + for runtime in runtimes: + entry = self._build_cli_for_runtime(project_path, runtime, package_prefix) + cli_entries.append(entry) + return cli_entries + + def _build_cli_for_runtime( + self, + project_path: pathlib.Path, + runtime: str, + package_prefix: str, + ) -> Mapping[str, Any]: + publish_dir = ensure_directory(self.temp_dir / f"cli-publish-{runtime}") + publish_cmd = [ + "dotnet", + "publish", + str(project_path), + "--configuration", + "Release", + "--runtime", + runtime, + "--self-contained", + "true", + "/p:PublishSingleFile=true", + "/p:IncludeNativeLibrariesForSelfExtract=true", + "/p:EnableCompressionInSingleFile=true", + "/p:InvariantGlobalization=true", + "--output", + str(publish_dir), + ] + run(publish_cmd, cwd=self.repo_root) + + original_name = "StellaOps.Cli" + if runtime.startswith("win"): + source = publish_dir / f"{original_name}.exe" + target = publish_dir / "stella.exe" + else: + source = publish_dir / original_name + target = publish_dir / "stella" + if source.exists(): + if target.exists(): + target.unlink() + source.rename(target) + if not runtime.startswith("win"): + target.chmod(target.stat().st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH) + + package_dir = self.cli_output_dir or (self.output_dir / "cli") + ensure_directory(package_dir) + archive_name = f"{package_prefix}-{self.version}-{runtime}" + if runtime.startswith("win"): + package_path = package_dir / f"{archive_name}.zip" + self._archive_zip(publish_dir, package_path) + else: + package_path = package_dir / f"{archive_name}.tar.gz" + self._archive_tar(publish_dir, package_path) + + digest = compute_sha256(package_path) + sha_path = package_path.with_suffix(package_path.suffix + ".sha256") + sha_path.write_text(f"{digest} {package_path.name}\n", encoding="utf-8") + + archive_info = OrderedDict(( + ("path", self._relative_path(package_path)), + ("sha256", digest), + )) + signature_info = self._sign_file(package_path) + if signature_info: + archive_info["signature"] = signature_info + + sbom_info = self._generate_cli_sbom(runtime, publish_dir) + + entry = OrderedDict(( + ("runtime", runtime), + ("archive", archive_info), + )) + if sbom_info: + entry["sbom"] = sbom_info + return entry + + def _archive_tar(self, source_dir: pathlib.Path, archive_path: pathlib.Path) -> None: + with tarfile.open(archive_path, "w:gz") as tar: + for item in sorted(source_dir.rglob("*")): + arcname = item.relative_to(source_dir) + tar.add(item, arcname=arcname) + + def _archive_zip(self, source_dir: pathlib.Path, archive_path: pathlib.Path) -> None: + with zipfile.ZipFile(archive_path, "w", compression=zipfile.ZIP_DEFLATED) as zipf: + for item in sorted(source_dir.rglob("*")): + if item.is_dir(): + continue + arcname = item.relative_to(source_dir).as_posix() + zip_info = zipfile.ZipInfo(arcname) + zip_info.external_attr = (item.stat().st_mode & 0xFFFF) << 16 + with item.open("rb") as handle: + zipf.writestr(zip_info, handle.read()) + + def _generate_cli_sbom(self, runtime: str, publish_dir: pathlib.Path) -> Optional[Mapping[str, Any]]: + if self.dry_run: + return None + sbom_dir = ensure_directory(self.sboms_dir / "cli") + sbom_path = sbom_dir / f"cli-{runtime}.cyclonedx.json" + run([ + "syft", + f"dir:{publish_dir}", + "--output", + f"cyclonedx-json={sbom_path}", + ]) + entry = OrderedDict(( + ("path", self._relative_path(sbom_path)), + ("sha256", compute_sha256(sbom_path)), + )) + signature_info = self._sign_file(sbom_path) + if signature_info: + entry["signature"] = signature_info + return entry + + def _sign_file(self, path: pathlib.Path) -> Optional[Mapping[str, Any]]: + if self.skip_signing: + return None + if not (self.cosign_key_ref or self.cosign_identity_token): + raise ValueError( + "Signing requested but no cosign key or identity token provided. Use --skip-signing to bypass." + ) + signature_path = path.with_suffix(path.suffix + ".sig") + sha_path = path.with_suffix(path.suffix + ".sha256") + digest = compute_sha256(path) + sha_path.write_text(f"{digest} {path.name}\n", encoding="utf-8") + cmd = ["cosign", "sign-blob", "--yes", str(path)] + if self.cosign_key_ref: + cmd.extend(["--key", self.cosign_key_ref]) + if self.cosign_identity_token: + cmd.extend(["--identity-token", self.cosign_identity_token]) + if not self.tlog_upload: + cmd.append("--tlog-upload=false") + signature_data = run(cmd, env=self.cosign_env).strip() + signature_path.write_text(signature_data + "\n", encoding="utf-8") + return OrderedDict(( + ("path", self._relative_path(signature_path)), + ("sha256", compute_sha256(signature_path)), + ("tlogUploaded", self.tlog_upload), + )) + + def _extract_debug_entries(self, component_name: str, image_ref: str) -> List[OrderedDict[str, Any]]: + if self.dry_run: + return [] + entries: List[OrderedDict[str, Any]] = [] + platforms = self.platforms if self.push else [None] + for platform in platforms: + platform_label = platform or (self.platforms[0] if self.platforms else "linux/amd64") + if self.push: + pull_cmd = ["docker", "pull"] + if platform: + pull_cmd.extend(["--platform", platform]) + pull_cmd.append(image_ref) + run(pull_cmd) + create_cmd = ["docker", "create"] + if platform: + create_cmd.extend(["--platform", platform]) + create_cmd.append(image_ref) + container_id = run(create_cmd).strip() + export_path = self.temp_dir / f"{container_id}.tar" + try: + run(["docker", "export", container_id, "-o", str(export_path)], capture=False) + finally: + run(["docker", "rm", container_id], capture=False) + rootfs_dir = ensure_directory(self.temp_dir / f"{component_name}-{platform_label}-{uuid.uuid4().hex}") + try: + with tarfile.open(export_path, "r:*") as tar: + self._safe_extract_tar(tar, rootfs_dir) + finally: + export_path.unlink(missing_ok=True) + try: + for file_path in rootfs_dir.rglob("*"): + if not file_path.is_file() or file_path.is_symlink(): + continue + if not self._is_elf(file_path): + continue + build_id, machine = self._read_build_id_and_machine(file_path) + if not build_id: + continue + debug_file = self._debug_file_for_build_id(build_id) + if not debug_file.exists(): + debug_file.parent.mkdir(parents=True, exist_ok=True) + temp_debug = self.temp_dir / f"{build_id}.debug" + with contextlib.suppress(FileNotFoundError): + temp_debug.unlink() + objcopy_tool = self._resolve_objcopy_tool(machine) + if not objcopy_tool: + self._emit_objcopy_warning(machine, platform_label, file_path) + with contextlib.suppress(FileNotFoundError): + temp_debug.unlink() + continue + try: + run([objcopy_tool, "--only-keep-debug", str(file_path), str(temp_debug)], capture=False) + except CommandError: + with contextlib.suppress(FileNotFoundError): + temp_debug.unlink() + continue + debug_file.parent.mkdir(parents=True, exist_ok=True) + shutil.move(str(temp_debug), str(debug_file)) + sha = compute_sha256(debug_file) + rel_debug = debug_file.relative_to(self.output_dir).as_posix() + source_rel = file_path.relative_to(rootfs_dir).as_posix() + entry = OrderedDict(( + ("component", component_name), + ("image", image_ref), + ("platform", platform_label), + ("buildId", build_id), + ("debugPath", rel_debug), + ("sha256", sha), + ("size", debug_file.stat().st_size), + ("sources", [source_rel]), + )) + entries.append(entry) + finally: + shutil.rmtree(rootfs_dir, ignore_errors=True) + return entries + + def _debug_file_for_build_id(self, build_id: str) -> pathlib.Path: + normalized = build_id.lower() + prefix = normalized[:2] + remainder = normalized[2:] + return self.debug_store_dir / prefix / f"{remainder}.debug" + + @staticmethod + def _safe_extract_tar(tar: tarfile.TarFile, dest: pathlib.Path) -> None: + dest_root = dest.resolve() + members = tar.getmembers() + for member in members: + member_path = (dest / member.name).resolve() + if not str(member_path).startswith(str(dest_root)): + raise RuntimeError(f"Refusing to extract '{member.name}' outside of destination directory") + tar.extractall(dest) + + @staticmethod + def _is_elf(path: pathlib.Path) -> bool: + try: + with path.open("rb") as handle: + return handle.read(4) == b"\x7fELF" + except OSError: + return False + + def _read_build_id_and_machine(self, path: pathlib.Path) -> Tuple[Optional[str], Optional[str]]: + try: + header_output = run(["readelf", "-nh", str(path)]) + except CommandError: + return None, None + build_id: Optional[str] = None + machine: Optional[str] = None + for line in header_output.splitlines(): + stripped = line.strip() + if stripped.startswith("Build ID:"): + build_id = stripped.split("Build ID:", 1)[1].strip().lower() + elif stripped.startswith("Machine:"): + machine = stripped.split("Machine:", 1)[1].strip() + return build_id, machine + + def _resolve_objcopy_tool(self, machine: Optional[str]) -> Optional[str]: + key = (machine or "generic").lower() + if key in self._objcopy_cache: + return self._objcopy_cache[key] + + env_override = None + if machine and "aarch64" in machine.lower(): + env_override = os.environ.get("STELLAOPS_OBJCOPY_AARCH64") + candidates = [ + env_override, + "aarch64-linux-gnu-objcopy", + "llvm-objcopy", + "objcopy", + ] + elif machine and any(token in machine.lower() for token in ("x86-64", "amd", "x86_64")): + env_override = os.environ.get("STELLAOPS_OBJCOPY_AMD64") + candidates = [ + env_override, + "objcopy", + "llvm-objcopy", + ] + else: + env_override = os.environ.get("STELLAOPS_OBJCOPY_DEFAULT") + candidates = [ + env_override, + "objcopy", + "llvm-objcopy", + ] + + for candidate in candidates: + if not candidate: + continue + tool = shutil.which(candidate) + if tool: + self._objcopy_cache[key] = tool + return tool + self._objcopy_cache[key] = None + return None + + def _emit_objcopy_warning(self, machine: Optional[str], platform: str, file_path: pathlib.Path) -> None: + machine_label = machine or "unknown-machine" + count = self._missing_symbol_platforms.get(platform, 0) + self._missing_symbol_platforms[platform] = count + 1 + if count == 0: + sys.stderr.write( + f"[warn] no objcopy tool available for {machine_label}; skipping debug extraction for {file_path}.\n" + ) + + # ---------------- + # Helm + compose + # ---------------- + def _package_helm(self) -> Optional[Mapping[str, Any]]: + helm_cfg = self.config.get("helm") + if not helm_cfg: + return None + chart_path = helm_cfg.get("chartPath") + if not chart_path: + return None + chart_dir = self.repo_root / chart_path + output_dir = ensure_directory(self.output_dir / "helm") + archive_path = output_dir / f"stellaops-{self.version}.tgz" + if not self.dry_run: + cmd = [ + "helm", + "package", + str(chart_dir), + "--destination", + str(output_dir), + "--version", + self.version, + "--app-version", + self.version, + ] + run(cmd) + packaged = next(output_dir.glob("*.tgz"), None) + if packaged and packaged != archive_path: + packaged.rename(archive_path) + digest = compute_sha256(archive_path) if archive_path.exists() else None + if archive_path.exists() and archive_path.is_relative_to(self.output_dir): + manifest_path = str(archive_path.relative_to(self.output_dir)) + elif archive_path.exists() and archive_path.is_relative_to(self.output_dir.parent): + manifest_path = str(archive_path.relative_to(self.output_dir.parent)) + else: + manifest_path = f"helm/{archive_path.name}" + return OrderedDict(( + ("name", "stellaops"), + ("version", self.version), + ("path", manifest_path), + ("sha256", digest), + )) + + def _digest_compose_files(self) -> List[Mapping[str, Any]]: + compose_cfg = self.config.get("compose", {}) + files = compose_cfg.get("files", []) + entries: List[Mapping[str, Any]] = [] + for rel_path in files: + src = self.repo_root / rel_path + if not src.exists(): + continue + digest = compute_sha256(src) + entries.append(OrderedDict(( + ("name", pathlib.Path(rel_path).name), + ("path", rel_path), + ("sha256", digest), + ))) + return entries + + # ---------------- + # Manifest assembly + # ---------------- + def _compose_manifest( + self, + components: List[Mapping[str, Any]], + helm_meta: Optional[Mapping[str, Any]], + compose_meta: List[Mapping[str, Any]], + debug_meta: Optional[Mapping[str, Any]], + cli_meta: Sequence[Mapping[str, Any]], + ) -> Dict[str, Any]: + manifest = OrderedDict() + manifest["release"] = OrderedDict(( + ("version", self.version), + ("channel", self.channel), + ("date", self.release_date), + ("calendar", self.calendar), + )) + manifest["components"] = components + if helm_meta: + manifest["charts"] = [helm_meta] + if compose_meta: + manifest["compose"] = compose_meta + if debug_meta: + manifest["debugStore"] = debug_meta + if cli_meta: + manifest["cli"] = list(cli_meta) + return manifest + + +def parse_args(argv: Optional[Sequence[str]] = None) -> argparse.Namespace: + parser = argparse.ArgumentParser(description="Build StellaOps release artefacts deterministically") + parser.add_argument("--config", type=pathlib.Path, default=DEFAULT_CONFIG, help="Path to release config JSON") + parser.add_argument("--version", required=True, help="Release version string (e.g. 2025.10.0-edge)") + parser.add_argument("--channel", required=True, help="Release channel (edge|stable|lts)") + parser.add_argument("--calendar", help="Calendar tag (YYYY.MM); defaults derived from version") + parser.add_argument("--git-sha", default=os.environ.get("GIT_COMMIT", "unknown"), help="Git revision to embed") + parser.add_argument("--output", type=pathlib.Path, default=REPO_ROOT / "out/release", help="Output directory for artefacts") + parser.add_argument("--no-push", action="store_true", help="Do not push images (use docker load)") + parser.add_argument("--dry-run", action="store_true", help="Print steps without executing commands") + parser.add_argument("--registry", help="Override registry root (e.g. localhost:5000/stellaops)") + parser.add_argument("--platform", dest="platforms", action="append", metavar="PLATFORM", help="Override build platforms (repeatable)") + parser.add_argument("--skip-signing", action="store_true", help="Skip cosign signing/attestation steps") + parser.add_argument("--cosign-key", dest="cosign_key", help="Override COSIGN_KEY_REF value") + parser.add_argument("--cosign-password", dest="cosign_password", help="Password for cosign key") + parser.add_argument("--cosign-identity-token", dest="cosign_identity_token", help="Identity token for keyless cosign flows") + parser.add_argument("--no-transparency", action="store_true", help="Disable Rekor transparency log upload during signing") + return parser.parse_args(argv) + + +def write_manifest(manifest: Mapping[str, Any], output_dir: pathlib.Path) -> pathlib.Path: + # Copy manifest to avoid mutating input when computing checksum + base_manifest = OrderedDict(manifest) + yaml_without_checksum = dump_yaml(base_manifest) + digest = hashlib.sha256(yaml_without_checksum.encode("utf-8")).hexdigest() + manifest_with_checksum = OrderedDict(base_manifest) + manifest_with_checksum["checksums"] = OrderedDict((("sha256", digest),)) + final_yaml = dump_yaml(manifest_with_checksum) + output_path = output_dir / "release.yaml" + with output_path.open("w", encoding="utf-8") as handle: + handle.write(final_yaml) + sha_path = output_path.with_name(output_path.name + ".sha256") + yaml_file_digest = compute_sha256(output_path) + sha_path.write_text(f"{yaml_file_digest} {output_path.name}\n", encoding="utf-8") + + json_text = json.dumps(manifest_with_checksum, indent=2) + json_path = output_dir / "release.json" + with json_path.open("w", encoding="utf-8") as handle: + handle.write(json_text) + handle.write("\n") + json_digest = compute_sha256(json_path) + json_sha_path = json_path.with_name(json_path.name + ".sha256") + json_sha_path.write_text(f"{json_digest} {json_path.name}\n", encoding="utf-8") + return output_path + + +def main(argv: Optional[Sequence[str]] = None) -> int: + args = parse_args(argv) + config = load_json_config(args.config) + release_date = utc_now_iso() + calendar = sanitize_calendar(args.version, args.calendar) + builder = ReleaseBuilder( + repo_root=REPO_ROOT, + config=config, + version=args.version, + channel=args.channel, + calendar=calendar, + release_date=release_date, + git_sha=args.git_sha, + output_dir=args.output, + push=not args.no_push, + dry_run=args.dry_run, + registry_override=args.registry, + platforms_override=args.platforms, + skip_signing=args.skip_signing, + cosign_key_ref=args.cosign_key, + cosign_password=args.cosign_password, + cosign_identity_token=args.cosign_identity_token, + tlog_upload=not args.no_transparency, + ) + manifest = builder.run() + manifest_path = write_manifest(manifest, builder.output_dir) + print(f"✅ Release manifest written to {manifest_path}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/ops/devops/release/components.json b/ops/devops/release/components.json index ad4c1f37..3993c9b0 100644 --- a/ops/devops/release/components.json +++ b/ops/devops/release/components.json @@ -15,7 +15,7 @@ "kind": "dotnet-service", "context": ".", "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", - "project": "src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj", + "project": "src/Authority/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj", "entrypoint": "StellaOps.Authority.dll" }, { @@ -24,7 +24,7 @@ "kind": "dotnet-service", "context": ".", "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", - "project": "src/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj", + "project": "src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj", "entrypoint": "StellaOps.Signer.WebService.dll" }, { @@ -33,7 +33,7 @@ "kind": "dotnet-service", "context": ".", "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", - "project": "src/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj", + "project": "src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj", "entrypoint": "StellaOps.Attestor.WebService.dll" }, { @@ -42,7 +42,7 @@ "kind": "dotnet-service", "context": ".", "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", - "project": "src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj", + "project": "src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj", "entrypoint": "StellaOps.Scanner.WebService.dll" }, { @@ -51,7 +51,7 @@ "kind": "dotnet-service", "context": ".", "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", - "project": "src/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj", + "project": "src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj", "entrypoint": "StellaOps.Scanner.Worker.dll" }, { @@ -60,7 +60,7 @@ "kind": "dotnet-service", "context": ".", "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", - "project": "src/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj", + "project": "src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj", "entrypoint": "StellaOps.Concelier.WebService.dll" }, { @@ -69,7 +69,7 @@ "kind": "dotnet-service", "context": ".", "dockerfile": "ops/devops/release/docker/Dockerfile.dotnet-service", - "project": "src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj", + "project": "src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj", "entrypoint": "StellaOps.Excititor.WebService.dll" }, { @@ -81,7 +81,7 @@ } ], "cli": { - "project": "src/StellaOps.Cli/StellaOps.Cli.csproj", + "project": "src/Cli/StellaOps.Cli/StellaOps.Cli.csproj", "runtimes": [ "linux-x64", "linux-arm64", @@ -104,6 +104,6 @@ ] }, "buildxPlugin": { - "project": "src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj" + "project": "src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj" } } diff --git a/ops/devops/release/docker/Dockerfile.angular-ui b/ops/devops/release/docker/Dockerfile.angular-ui index 080c590f..11c1523f 100644 --- a/ops/devops/release/docker/Dockerfile.angular-ui +++ b/ops/devops/release/docker/Dockerfile.angular-ui @@ -11,9 +11,9 @@ FROM ${NODE_IMAGE} AS build WORKDIR /workspace ENV CI=1 \ SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH} -COPY src/StellaOps.Web/package.json src/StellaOps.Web/package-lock.json ./ +COPY src/Web/StellaOps.Web/package.json src/Web/StellaOps.Web/package-lock.json ./ RUN npm ci --prefer-offline --no-audit --no-fund -COPY src/StellaOps.Web/ ./ +COPY src/Web/StellaOps.Web/ ./ RUN npm run build -- --configuration=production FROM ${NGINX_IMAGE} AS runtime diff --git a/ops/devops/release/docker/Dockerfile.dotnet-service b/ops/devops/release/docker/Dockerfile.dotnet-service index 19c5fc57..9587fb89 100644 --- a/ops/devops/release/docker/Dockerfile.dotnet-service +++ b/ops/devops/release/docker/Dockerfile.dotnet-service @@ -1,52 +1,52 @@ -# syntax=docker/dockerfile:1.7-labs - -ARG SDK_IMAGE=mcr.microsoft.com/dotnet/nightly/sdk:10.0 -ARG RUNTIME_IMAGE=gcr.io/distroless/dotnet/aspnet:latest - -ARG PROJECT -ARG ENTRYPOINT_DLL -ARG VERSION=0.0.0 -ARG CHANNEL=dev -ARG GIT_SHA=0000000 -ARG SOURCE_DATE_EPOCH=0 - -FROM ${SDK_IMAGE} AS build -ARG PROJECT -ARG GIT_SHA -ARG SOURCE_DATE_EPOCH -WORKDIR /src -ENV DOTNET_CLI_TELEMETRY_OPTOUT=1 \ - DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1 \ - NUGET_XMLDOC_MODE=skip \ - SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH} -COPY . . -RUN --mount=type=cache,target=/root/.nuget/packages \ - dotnet restore "${PROJECT}" -RUN --mount=type=cache,target=/root/.nuget/packages \ - dotnet publish "${PROJECT}" \ - -c Release \ - -o /app/publish \ - /p:UseAppHost=false \ - /p:ContinuousIntegrationBuild=true \ - /p:SourceRevisionId=${GIT_SHA} \ - /p:Deterministic=true \ - /p:TreatWarningsAsErrors=true - -FROM ${RUNTIME_IMAGE} AS runtime -WORKDIR /app -ARG ENTRYPOINT_DLL -ARG VERSION -ARG CHANNEL -ARG GIT_SHA -ENV DOTNET_EnableDiagnostics=0 \ - ASPNETCORE_URLS=http://0.0.0.0:8080 -COPY --from=build /app/publish/ ./ -RUN set -eu; \ - printf '#!/usr/bin/env sh\nset -e\nexec dotnet %s "$@"\n' "${ENTRYPOINT_DLL}" > /entrypoint.sh; \ - chmod +x /entrypoint.sh -EXPOSE 8080 -LABEL org.opencontainers.image.version="${VERSION}" \ - org.opencontainers.image.revision="${GIT_SHA}" \ - org.opencontainers.image.source="https://git.stella-ops.org/stella-ops/feedser" \ - org.stellaops.release.channel="${CHANNEL}" -ENTRYPOINT ["/entrypoint.sh"] +# syntax=docker/dockerfile:1.7-labs + +ARG SDK_IMAGE=mcr.microsoft.com/dotnet/nightly/sdk:10.0 +ARG RUNTIME_IMAGE=gcr.io/distroless/dotnet/aspnet:latest + +ARG PROJECT +ARG ENTRYPOINT_DLL +ARG VERSION=0.0.0 +ARG CHANNEL=dev +ARG GIT_SHA=0000000 +ARG SOURCE_DATE_EPOCH=0 + +FROM ${SDK_IMAGE} AS build +ARG PROJECT +ARG GIT_SHA +ARG SOURCE_DATE_EPOCH +WORKDIR /src +ENV DOTNET_CLI_TELEMETRY_OPTOUT=1 \ + DOTNET_SKIP_FIRST_TIME_EXPERIENCE=1 \ + NUGET_XMLDOC_MODE=skip \ + SOURCE_DATE_EPOCH=${SOURCE_DATE_EPOCH} +COPY . . +RUN --mount=type=cache,target=/root/.nuget/packages \ + dotnet restore "${PROJECT}" +RUN --mount=type=cache,target=/root/.nuget/packages \ + dotnet publish "${PROJECT}" \ + -c Release \ + -o /app/publish \ + /p:UseAppHost=false \ + /p:ContinuousIntegrationBuild=true \ + /p:SourceRevisionId=${GIT_SHA} \ + /p:Deterministic=true \ + /p:TreatWarningsAsErrors=true + +FROM ${RUNTIME_IMAGE} AS runtime +WORKDIR /app +ARG ENTRYPOINT_DLL +ARG VERSION +ARG CHANNEL +ARG GIT_SHA +ENV DOTNET_EnableDiagnostics=0 \ + ASPNETCORE_URLS=http://0.0.0.0:8080 +COPY --from=build /app/publish/ ./ +RUN set -eu; \ + printf '#!/usr/bin/env sh\nset -e\nexec dotnet %s "$@"\n' "${ENTRYPOINT_DLL}" > /entrypoint.sh; \ + chmod +x /entrypoint.sh +EXPOSE 8080 +LABEL org.opencontainers.image.version="${VERSION}" \ + org.opencontainers.image.revision="${GIT_SHA}" \ + org.opencontainers.image.source="https://git.stella-ops.org/stella-ops/feedser" \ + org.stellaops.release.channel="${CHANNEL}" +ENTRYPOINT ["/entrypoint.sh"] diff --git a/ops/devops/release/docker/nginx-default.conf b/ops/devops/release/docker/nginx-default.conf index 14d6f071..7b75ce62 100644 --- a/ops/devops/release/docker/nginx-default.conf +++ b/ops/devops/release/docker/nginx-default.conf @@ -1,22 +1,22 @@ -server { - listen 8080; - listen [::]:8080; - server_name _; - - root /usr/share/nginx/html; - index index.html; - - location / { - try_files $uri $uri/ /index.html; - } - - location ~* \.(?:js|css|svg|png|jpg|jpeg|gif|ico|woff2?)$ { - add_header Cache-Control "public, max-age=2592000"; - } - - location = /healthz { - access_log off; - add_header Content-Type text/plain; - return 200 'ok'; - } -} +server { + listen 8080; + listen [::]:8080; + server_name _; + + root /usr/share/nginx/html; + index index.html; + + location / { + try_files $uri $uri/ /index.html; + } + + location ~* \.(?:js|css|svg|png|jpg|jpeg|gif|ico|woff2?)$ { + add_header Cache-Control "public, max-age=2592000"; + } + + location = /healthz { + access_log off; + add_header Content-Type text/plain; + return 200 'ok'; + } +} diff --git a/ops/devops/release/test_verify_release.py b/ops/devops/release/test_verify_release.py index 884a2c02..286425e6 100644 --- a/ops/devops/release/test_verify_release.py +++ b/ops/devops/release/test_verify_release.py @@ -1,232 +1,232 @@ -from __future__ import annotations - -import json -import tempfile -import unittest -from collections import OrderedDict -from pathlib import Path -import sys - -sys.path.append(str(Path(__file__).resolve().parent)) - -from build_release import write_manifest # type: ignore import-not-found -from verify_release import VerificationError, compute_sha256, verify_release - - -class VerifyReleaseTests(unittest.TestCase): - def setUp(self) -> None: - self._temp = tempfile.TemporaryDirectory() - self.base_path = Path(self._temp.name) - self.out_dir = self.base_path / "out" - self.release_dir = self.out_dir / "release" - self.release_dir.mkdir(parents=True, exist_ok=True) - - def tearDown(self) -> None: - self._temp.cleanup() - - def _relative_to_out(self, path: Path) -> str: - return path.relative_to(self.out_dir).as_posix() - - def _write_json(self, path: Path, payload: dict[str, object]) -> None: - path.parent.mkdir(parents=True, exist_ok=True) - with path.open("w", encoding="utf-8") as handle: - json.dump(payload, handle, indent=2) - handle.write("\n") - - def _create_sample_release(self) -> None: - sbom_path = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" - sbom_path.parent.mkdir(parents=True, exist_ok=True) - sbom_path.write_text('{"bomFormat":"CycloneDX","specVersion":"1.5"}\n', encoding="utf-8") - sbom_sha = compute_sha256(sbom_path) - - provenance_path = self.release_dir / "artifacts/provenance/sample.provenance.json" - self._write_json( - provenance_path, - { - "buildDefinition": {"buildType": "https://example/build", "externalParameters": {}}, - "runDetails": {"builder": {"id": "https://example/ci"}}, - }, - ) - provenance_sha = compute_sha256(provenance_path) - - signature_path = self.release_dir / "artifacts/signatures/sample.signature" - signature_path.parent.mkdir(parents=True, exist_ok=True) - signature_path.write_text("signature-data\n", encoding="utf-8") - signature_sha = compute_sha256(signature_path) - - metadata_path = self.release_dir / "artifacts/metadata/sample.metadata.json" - self._write_json(metadata_path, {"digest": "sha256:1234"}) - metadata_sha = compute_sha256(metadata_path) - - chart_path = self.release_dir / "helm/stellaops-1.0.0.tgz" - chart_path.parent.mkdir(parents=True, exist_ok=True) - chart_path.write_bytes(b"helm-chart-data") - chart_sha = compute_sha256(chart_path) - - compose_path = self.release_dir.parent / "deploy/compose/docker-compose.dev.yaml" - compose_path.parent.mkdir(parents=True, exist_ok=True) - compose_path.write_text("services: {}\n", encoding="utf-8") - compose_sha = compute_sha256(compose_path) - - debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" - debug_file.parent.mkdir(parents=True, exist_ok=True) - debug_file.write_bytes(b"\x7fELFDEBUGDATA") - debug_sha = compute_sha256(debug_file) - - debug_manifest_path = self.release_dir / "debug/debug-manifest.json" - debug_manifest = OrderedDict( - ( - ("generatedAt", "2025-10-26T00:00:00Z"), - ("version", "1.0.0"), - ("channel", "edge"), - ( - "artifacts", - [ - OrderedDict( - ( - ("buildId", "abcdef1234"), - ("platform", "linux/amd64"), - ("debugPath", "debug/.build-id/ab/cdef.debug"), - ("sha256", debug_sha), - ("size", debug_file.stat().st_size), - ("components", ["sample"]), - ("images", ["registry.example/sample@sha256:feedface"]), - ("sources", ["app/sample.dll"]), - ) - ) - ], - ), - ) - ) - self._write_json(debug_manifest_path, debug_manifest) - debug_manifest_sha = compute_sha256(debug_manifest_path) - (debug_manifest_path.with_suffix(debug_manifest_path.suffix + ".sha256")).write_text( - f"{debug_manifest_sha} {debug_manifest_path.name}\n", encoding="utf-8" - ) - - manifest = OrderedDict( - ( - ( - "release", - OrderedDict( - ( - ("version", "1.0.0"), - ("channel", "edge"), - ("date", "2025-10-26T00:00:00Z"), - ("calendar", "2025.10"), - ) - ), - ), - ( - "components", - [ - OrderedDict( - ( - ("name", "sample"), - ("image", "registry.example/sample@sha256:feedface"), - ("tags", ["registry.example/sample:1.0.0"]), - ( - "sbom", - OrderedDict( - ( - ("path", self._relative_to_out(sbom_path)), - ("sha256", sbom_sha), - ) - ), - ), - ( - "provenance", - OrderedDict( - ( - ("path", self._relative_to_out(provenance_path)), - ("sha256", provenance_sha), - ) - ), - ), - ( - "signature", - OrderedDict( - ( - ("path", self._relative_to_out(signature_path)), - ("sha256", signature_sha), - ("ref", "sigstore://example"), - ("tlogUploaded", True), - ) - ), - ), - ( - "metadata", - OrderedDict( - ( - ("path", self._relative_to_out(metadata_path)), - ("sha256", metadata_sha), - ) - ), - ), - ) - ) - ], - ), - ( - "charts", - [ - OrderedDict( - ( - ("name", "stellaops"), - ("version", "1.0.0"), - ("path", self._relative_to_out(chart_path)), - ("sha256", chart_sha), - ) - ) - ], - ), - ( - "compose", - [ - OrderedDict( - ( - ("name", "docker-compose.dev.yaml"), - ("path", compose_path.relative_to(self.out_dir).as_posix()), - ("sha256", compose_sha), - ) - ) - ], - ), - ( - "debugStore", - OrderedDict( - ( - ("manifest", "debug/debug-manifest.json"), - ("sha256", debug_manifest_sha), - ("entries", 1), - ("platforms", ["linux/amd64"]), - ("directory", "debug/.build-id"), - ) - ), - ), - ) - ) - write_manifest(manifest, self.release_dir) - - def test_verify_release_success(self) -> None: - self._create_sample_release() - # Should not raise - verify_release(self.release_dir) - - def test_verify_release_detects_sha_mismatch(self) -> None: - self._create_sample_release() - tampered = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" - tampered.write_text("tampered\n", encoding="utf-8") - with self.assertRaises(VerificationError): - verify_release(self.release_dir) - - def test_verify_release_detects_missing_debug_file(self) -> None: - self._create_sample_release() - debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" - debug_file.unlink() - with self.assertRaises(VerificationError): - verify_release(self.release_dir) - - -if __name__ == "__main__": - unittest.main() +from __future__ import annotations + +import json +import tempfile +import unittest +from collections import OrderedDict +from pathlib import Path +import sys + +sys.path.append(str(Path(__file__).resolve().parent)) + +from build_release import write_manifest # type: ignore import-not-found +from verify_release import VerificationError, compute_sha256, verify_release + + +class VerifyReleaseTests(unittest.TestCase): + def setUp(self) -> None: + self._temp = tempfile.TemporaryDirectory() + self.base_path = Path(self._temp.name) + self.out_dir = self.base_path / "out" + self.release_dir = self.out_dir / "release" + self.release_dir.mkdir(parents=True, exist_ok=True) + + def tearDown(self) -> None: + self._temp.cleanup() + + def _relative_to_out(self, path: Path) -> str: + return path.relative_to(self.out_dir).as_posix() + + def _write_json(self, path: Path, payload: dict[str, object]) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + with path.open("w", encoding="utf-8") as handle: + json.dump(payload, handle, indent=2) + handle.write("\n") + + def _create_sample_release(self) -> None: + sbom_path = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" + sbom_path.parent.mkdir(parents=True, exist_ok=True) + sbom_path.write_text('{"bomFormat":"CycloneDX","specVersion":"1.5"}\n', encoding="utf-8") + sbom_sha = compute_sha256(sbom_path) + + provenance_path = self.release_dir / "artifacts/provenance/sample.provenance.json" + self._write_json( + provenance_path, + { + "buildDefinition": {"buildType": "https://example/build", "externalParameters": {}}, + "runDetails": {"builder": {"id": "https://example/ci"}}, + }, + ) + provenance_sha = compute_sha256(provenance_path) + + signature_path = self.release_dir / "artifacts/signatures/sample.signature" + signature_path.parent.mkdir(parents=True, exist_ok=True) + signature_path.write_text("signature-data\n", encoding="utf-8") + signature_sha = compute_sha256(signature_path) + + metadata_path = self.release_dir / "artifacts/metadata/sample.metadata.json" + self._write_json(metadata_path, {"digest": "sha256:1234"}) + metadata_sha = compute_sha256(metadata_path) + + chart_path = self.release_dir / "helm/stellaops-1.0.0.tgz" + chart_path.parent.mkdir(parents=True, exist_ok=True) + chart_path.write_bytes(b"helm-chart-data") + chart_sha = compute_sha256(chart_path) + + compose_path = self.release_dir.parent / "deploy/compose/docker-compose.dev.yaml" + compose_path.parent.mkdir(parents=True, exist_ok=True) + compose_path.write_text("services: {}\n", encoding="utf-8") + compose_sha = compute_sha256(compose_path) + + debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" + debug_file.parent.mkdir(parents=True, exist_ok=True) + debug_file.write_bytes(b"\x7fELFDEBUGDATA") + debug_sha = compute_sha256(debug_file) + + debug_manifest_path = self.release_dir / "debug/debug-manifest.json" + debug_manifest = OrderedDict( + ( + ("generatedAt", "2025-10-26T00:00:00Z"), + ("version", "1.0.0"), + ("channel", "edge"), + ( + "artifacts", + [ + OrderedDict( + ( + ("buildId", "abcdef1234"), + ("platform", "linux/amd64"), + ("debugPath", "debug/.build-id/ab/cdef.debug"), + ("sha256", debug_sha), + ("size", debug_file.stat().st_size), + ("components", ["sample"]), + ("images", ["registry.example/sample@sha256:feedface"]), + ("sources", ["app/sample.dll"]), + ) + ) + ], + ), + ) + ) + self._write_json(debug_manifest_path, debug_manifest) + debug_manifest_sha = compute_sha256(debug_manifest_path) + (debug_manifest_path.with_suffix(debug_manifest_path.suffix + ".sha256")).write_text( + f"{debug_manifest_sha} {debug_manifest_path.name}\n", encoding="utf-8" + ) + + manifest = OrderedDict( + ( + ( + "release", + OrderedDict( + ( + ("version", "1.0.0"), + ("channel", "edge"), + ("date", "2025-10-26T00:00:00Z"), + ("calendar", "2025.10"), + ) + ), + ), + ( + "components", + [ + OrderedDict( + ( + ("name", "sample"), + ("image", "registry.example/sample@sha256:feedface"), + ("tags", ["registry.example/sample:1.0.0"]), + ( + "sbom", + OrderedDict( + ( + ("path", self._relative_to_out(sbom_path)), + ("sha256", sbom_sha), + ) + ), + ), + ( + "provenance", + OrderedDict( + ( + ("path", self._relative_to_out(provenance_path)), + ("sha256", provenance_sha), + ) + ), + ), + ( + "signature", + OrderedDict( + ( + ("path", self._relative_to_out(signature_path)), + ("sha256", signature_sha), + ("ref", "sigstore://example"), + ("tlogUploaded", True), + ) + ), + ), + ( + "metadata", + OrderedDict( + ( + ("path", self._relative_to_out(metadata_path)), + ("sha256", metadata_sha), + ) + ), + ), + ) + ) + ], + ), + ( + "charts", + [ + OrderedDict( + ( + ("name", "stellaops"), + ("version", "1.0.0"), + ("path", self._relative_to_out(chart_path)), + ("sha256", chart_sha), + ) + ) + ], + ), + ( + "compose", + [ + OrderedDict( + ( + ("name", "docker-compose.dev.yaml"), + ("path", compose_path.relative_to(self.out_dir).as_posix()), + ("sha256", compose_sha), + ) + ) + ], + ), + ( + "debugStore", + OrderedDict( + ( + ("manifest", "debug/debug-manifest.json"), + ("sha256", debug_manifest_sha), + ("entries", 1), + ("platforms", ["linux/amd64"]), + ("directory", "debug/.build-id"), + ) + ), + ), + ) + ) + write_manifest(manifest, self.release_dir) + + def test_verify_release_success(self) -> None: + self._create_sample_release() + # Should not raise + verify_release(self.release_dir) + + def test_verify_release_detects_sha_mismatch(self) -> None: + self._create_sample_release() + tampered = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" + tampered.write_text("tampered\n", encoding="utf-8") + with self.assertRaises(VerificationError): + verify_release(self.release_dir) + + def test_verify_release_detects_missing_debug_file(self) -> None: + self._create_sample_release() + debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" + debug_file.unlink() + with self.assertRaises(VerificationError): + verify_release(self.release_dir) + + +if __name__ == "__main__": + unittest.main() diff --git a/ops/devops/release/verify_release.py b/ops/devops/release/verify_release.py index 9c1881d2..f722173d 100644 --- a/ops/devops/release/verify_release.py +++ b/ops/devops/release/verify_release.py @@ -1,334 +1,334 @@ -#!/usr/bin/env python3 -"""Verify release artefacts (SBOMs, provenance, signatures, manifest hashes).""" - -from __future__ import annotations - -import argparse -import hashlib -import json -import pathlib -import sys -from collections import OrderedDict -from typing import Any, Mapping, Optional - -from build_release import dump_yaml # type: ignore import-not-found - - -class VerificationError(Exception): - """Raised when release artefacts fail verification.""" - - -def compute_sha256(path: pathlib.Path) -> str: - sha = hashlib.sha256() - with path.open("rb") as handle: - for chunk in iter(lambda: handle.read(1024 * 1024), b""): - sha.update(chunk) - return sha.hexdigest() - - -def parse_sha_file(path: pathlib.Path) -> Optional[str]: - if not path.exists(): - return None - content = path.read_text(encoding="utf-8").strip() - if not content: - return None - return content.split()[0] - - -def resolve_path(path_str: str, release_dir: pathlib.Path) -> pathlib.Path: - candidate = pathlib.Path(path_str.replace("\\", "/")) - if candidate.is_absolute(): - return candidate - - for base in (release_dir, release_dir.parent, release_dir.parent.parent): - resolved = (base / candidate).resolve() - if resolved.exists(): - return resolved - # Fall back to release_dir joined path even if missing to surface in caller. - return (release_dir / candidate).resolve() - - -def load_manifest(release_dir: pathlib.Path) -> OrderedDict[str, Any]: - manifest_path = release_dir / "release.json" - if not manifest_path.exists(): - raise VerificationError(f"Release manifest JSON missing at {manifest_path}") - try: - with manifest_path.open("r", encoding="utf-8") as handle: - return json.load(handle, object_pairs_hook=OrderedDict) - except json.JSONDecodeError as exc: - raise VerificationError(f"Failed to parse {manifest_path}: {exc}") from exc - - -def verify_manifest_hashes( - manifest: Mapping[str, Any], - release_dir: pathlib.Path, - errors: list[str], -) -> None: - yaml_path = release_dir / "release.yaml" - if not yaml_path.exists(): - errors.append(f"Missing release.yaml at {yaml_path}") - return - - recorded_yaml_sha = parse_sha_file(yaml_path.with_name(yaml_path.name + ".sha256")) - actual_yaml_sha = compute_sha256(yaml_path) - if recorded_yaml_sha and recorded_yaml_sha != actual_yaml_sha: - errors.append( - f"release.yaml.sha256 recorded {recorded_yaml_sha} but file hashes to {actual_yaml_sha}" - ) - - json_path = release_dir / "release.json" - recorded_json_sha = parse_sha_file(json_path.with_name(json_path.name + ".sha256")) - actual_json_sha = compute_sha256(json_path) - if recorded_json_sha and recorded_json_sha != actual_json_sha: - errors.append( - f"release.json.sha256 recorded {recorded_json_sha} but file hashes to {actual_json_sha}" - ) - - checksums = manifest.get("checksums") - if isinstance(checksums, Mapping): - recorded_digest = checksums.get("sha256") - base_manifest = OrderedDict(manifest) - base_manifest.pop("checksums", None) - yaml_without_checksums = dump_yaml(base_manifest) - computed_digest = hashlib.sha256(yaml_without_checksums.encode("utf-8")).hexdigest() - if recorded_digest != computed_digest: - errors.append( - "Manifest checksum mismatch: " - f"recorded {recorded_digest}, computed {computed_digest}" - ) - - -def verify_artifact_entry( - entry: Mapping[str, Any], - release_dir: pathlib.Path, - label: str, - component_name: str, - errors: list[str], -) -> None: - path_str = entry.get("path") - if not path_str: - errors.append(f"{component_name}: {label} missing 'path' field.") - return - resolved = resolve_path(str(path_str), release_dir) - if not resolved.exists(): - errors.append(f"{component_name}: {label} path does not exist → {resolved}") - return - recorded_sha = entry.get("sha256") - if recorded_sha: - actual_sha = compute_sha256(resolved) - if actual_sha != recorded_sha: - errors.append( - f"{component_name}: {label} SHA mismatch for {resolved} " - f"(recorded {recorded_sha}, computed {actual_sha})" - ) - - -def verify_components(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: - for component in manifest.get("components", []): - if not isinstance(component, Mapping): - errors.append("Component entry is not a mapping.") - continue - name = str(component.get("name", "")) - for key, label in ( - ("sbom", "SBOM"), - ("provenance", "provenance"), - ("signature", "signature"), - ("metadata", "metadata"), - ): - entry = component.get(key) - if not entry: - continue - if not isinstance(entry, Mapping): - errors.append(f"{name}: {label} entry must be a mapping.") - continue - verify_artifact_entry(entry, release_dir, label, name, errors) - - -def verify_collections(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: - for collection, label in ( - ("charts", "chart"), - ("compose", "compose file"), - ): - for item in manifest.get(collection, []): - if not isinstance(item, Mapping): - errors.append(f"{collection} entry is not a mapping.") - continue - path_value = item.get("path") - if not path_value: - errors.append(f"{collection} entry missing path.") - continue - resolved = resolve_path(str(path_value), release_dir) - if not resolved.exists(): - errors.append(f"{label} missing file → {resolved}") - continue - recorded_sha = item.get("sha256") - if recorded_sha: - actual_sha = compute_sha256(resolved) - if actual_sha != recorded_sha: - errors.append( - f"{label} SHA mismatch for {resolved} " - f"(recorded {recorded_sha}, computed {actual_sha})" - ) - - -def verify_debug_store(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: - debug = manifest.get("debugStore") - if not isinstance(debug, Mapping): - return - manifest_path_str = debug.get("manifest") - manifest_data: Optional[Mapping[str, Any]] = None - if manifest_path_str: - manifest_path = resolve_path(str(manifest_path_str), release_dir) - if not manifest_path.exists(): - errors.append(f"Debug manifest missing → {manifest_path}") - else: - recorded_sha = debug.get("sha256") - if recorded_sha: - actual_sha = compute_sha256(manifest_path) - if actual_sha != recorded_sha: - errors.append( - f"Debug manifest SHA mismatch (recorded {recorded_sha}, computed {actual_sha})" - ) - sha_sidecar = manifest_path.with_suffix(manifest_path.suffix + ".sha256") - sidecar_sha = parse_sha_file(sha_sidecar) - if sidecar_sha and recorded_sha and sidecar_sha != recorded_sha: - errors.append( - f"Debug manifest sidecar digest {sidecar_sha} disagrees with recorded {recorded_sha}" - ) - try: - with manifest_path.open("r", encoding="utf-8") as handle: - manifest_data = json.load(handle) - except json.JSONDecodeError as exc: - errors.append(f"Debug manifest JSON invalid: {exc}") - directory = debug.get("directory") - if directory: - debug_dir = resolve_path(str(directory), release_dir) - if not debug_dir.exists(): - errors.append(f"Debug directory missing → {debug_dir}") - - if manifest_data: - artifacts = manifest_data.get("artifacts") - if not isinstance(artifacts, list) or not artifacts: - errors.append("Debug manifest contains no artefacts.") - return - - declared_entries = debug.get("entries") - if isinstance(declared_entries, int) and declared_entries != len(artifacts): - errors.append( - f"Debug manifest reports {declared_entries} entries but contains {len(artifacts)} artefacts." - ) - - for artefact in artifacts: - if not isinstance(artefact, Mapping): - errors.append("Debug manifest artefact entry is not a mapping.") - continue - debug_path = artefact.get("debugPath") - artefact_sha = artefact.get("sha256") - if not debug_path or not artefact_sha: - errors.append("Debug manifest artefact missing debugPath or sha256.") - continue - resolved_debug = resolve_path(str(debug_path), release_dir) - if not resolved_debug.exists(): - errors.append(f"Debug artefact missing → {resolved_debug}") - continue - actual_sha = compute_sha256(resolved_debug) - if actual_sha != artefact_sha: - errors.append( - f"Debug artefact SHA mismatch for {resolved_debug} " - f"(recorded {artefact_sha}, computed {actual_sha})" - ) - -def verify_signature(signature: Mapping[str, Any], release_dir: pathlib.Path, label: str, component_name: str, errors: list[str]) -> None: - sig_path_value = signature.get("path") - if not sig_path_value: - errors.append(f"{component_name}: {label} signature missing path.") - return - sig_path = resolve_path(str(sig_path_value), release_dir) - if not sig_path.exists(): - errors.append(f"{component_name}: {label} signature missing → {sig_path}") - return - recorded_sha = signature.get("sha256") - if recorded_sha: - actual_sha = compute_sha256(sig_path) - if actual_sha != recorded_sha: - errors.append( - f"{component_name}: {label} signature SHA mismatch for {sig_path} " - f"(recorded {recorded_sha}, computed {actual_sha})" - ) - - -def verify_cli_entries(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: - cli_entries = manifest.get("cli") - if not cli_entries: - return - if not isinstance(cli_entries, list): - errors.append("CLI manifest section must be a list.") - return - for entry in cli_entries: - if not isinstance(entry, Mapping): - errors.append("CLI entry must be a mapping.") - continue - runtime = entry.get("runtime", "") - component_name = f"cli[{runtime}]" - archive = entry.get("archive") - if not isinstance(archive, Mapping): - errors.append(f"{component_name}: archive metadata missing or invalid.") - else: - verify_artifact_entry(archive, release_dir, "archive", component_name, errors) - signature = archive.get("signature") - if isinstance(signature, Mapping): - verify_signature(signature, release_dir, "archive", component_name, errors) - elif signature is not None: - errors.append(f"{component_name}: archive signature must be an object.") - sbom = entry.get("sbom") - if sbom: - if not isinstance(sbom, Mapping): - errors.append(f"{component_name}: sbom entry must be a mapping.") - else: - verify_artifact_entry(sbom, release_dir, "sbom", component_name, errors) - signature = sbom.get("signature") - if isinstance(signature, Mapping): - verify_signature(signature, release_dir, "sbom", component_name, errors) - elif signature is not None: - errors.append(f"{component_name}: sbom signature must be an object.") - - -def verify_release(release_dir: pathlib.Path) -> None: - if not release_dir.exists(): - raise VerificationError(f"Release directory not found: {release_dir}") - manifest = load_manifest(release_dir) - errors: list[str] = [] - verify_manifest_hashes(manifest, release_dir, errors) - verify_components(manifest, release_dir, errors) - verify_cli_entries(manifest, release_dir, errors) - verify_collections(manifest, release_dir, errors) - verify_debug_store(manifest, release_dir, errors) - if errors: - bullet_list = "\n - ".join(errors) - raise VerificationError(f"Release verification failed:\n - {bullet_list}") - - -def parse_args(argv: list[str] | None = None) -> argparse.Namespace: - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--release-dir", - type=pathlib.Path, - default=pathlib.Path("out/release"), - help="Path to the release artefact directory (default: %(default)s)", - ) - return parser.parse_args(argv) - - -def main(argv: list[str] | None = None) -> int: - args = parse_args(argv) - try: - verify_release(args.release_dir.resolve()) - except VerificationError as exc: - print(str(exc), file=sys.stderr) - return 1 - print(f"✅ Release artefacts verified OK in {args.release_dir}") - return 0 - - -if __name__ == "__main__": - raise SystemExit(main()) +#!/usr/bin/env python3 +"""Verify release artefacts (SBOMs, provenance, signatures, manifest hashes).""" + +from __future__ import annotations + +import argparse +import hashlib +import json +import pathlib +import sys +from collections import OrderedDict +from typing import Any, Mapping, Optional + +from build_release import dump_yaml # type: ignore import-not-found + + +class VerificationError(Exception): + """Raised when release artefacts fail verification.""" + + +def compute_sha256(path: pathlib.Path) -> str: + sha = hashlib.sha256() + with path.open("rb") as handle: + for chunk in iter(lambda: handle.read(1024 * 1024), b""): + sha.update(chunk) + return sha.hexdigest() + + +def parse_sha_file(path: pathlib.Path) -> Optional[str]: + if not path.exists(): + return None + content = path.read_text(encoding="utf-8").strip() + if not content: + return None + return content.split()[0] + + +def resolve_path(path_str: str, release_dir: pathlib.Path) -> pathlib.Path: + candidate = pathlib.Path(path_str.replace("\\", "/")) + if candidate.is_absolute(): + return candidate + + for base in (release_dir, release_dir.parent, release_dir.parent.parent): + resolved = (base / candidate).resolve() + if resolved.exists(): + return resolved + # Fall back to release_dir joined path even if missing to surface in caller. + return (release_dir / candidate).resolve() + + +def load_manifest(release_dir: pathlib.Path) -> OrderedDict[str, Any]: + manifest_path = release_dir / "release.json" + if not manifest_path.exists(): + raise VerificationError(f"Release manifest JSON missing at {manifest_path}") + try: + with manifest_path.open("r", encoding="utf-8") as handle: + return json.load(handle, object_pairs_hook=OrderedDict) + except json.JSONDecodeError as exc: + raise VerificationError(f"Failed to parse {manifest_path}: {exc}") from exc + + +def verify_manifest_hashes( + manifest: Mapping[str, Any], + release_dir: pathlib.Path, + errors: list[str], +) -> None: + yaml_path = release_dir / "release.yaml" + if not yaml_path.exists(): + errors.append(f"Missing release.yaml at {yaml_path}") + return + + recorded_yaml_sha = parse_sha_file(yaml_path.with_name(yaml_path.name + ".sha256")) + actual_yaml_sha = compute_sha256(yaml_path) + if recorded_yaml_sha and recorded_yaml_sha != actual_yaml_sha: + errors.append( + f"release.yaml.sha256 recorded {recorded_yaml_sha} but file hashes to {actual_yaml_sha}" + ) + + json_path = release_dir / "release.json" + recorded_json_sha = parse_sha_file(json_path.with_name(json_path.name + ".sha256")) + actual_json_sha = compute_sha256(json_path) + if recorded_json_sha and recorded_json_sha != actual_json_sha: + errors.append( + f"release.json.sha256 recorded {recorded_json_sha} but file hashes to {actual_json_sha}" + ) + + checksums = manifest.get("checksums") + if isinstance(checksums, Mapping): + recorded_digest = checksums.get("sha256") + base_manifest = OrderedDict(manifest) + base_manifest.pop("checksums", None) + yaml_without_checksums = dump_yaml(base_manifest) + computed_digest = hashlib.sha256(yaml_without_checksums.encode("utf-8")).hexdigest() + if recorded_digest != computed_digest: + errors.append( + "Manifest checksum mismatch: " + f"recorded {recorded_digest}, computed {computed_digest}" + ) + + +def verify_artifact_entry( + entry: Mapping[str, Any], + release_dir: pathlib.Path, + label: str, + component_name: str, + errors: list[str], +) -> None: + path_str = entry.get("path") + if not path_str: + errors.append(f"{component_name}: {label} missing 'path' field.") + return + resolved = resolve_path(str(path_str), release_dir) + if not resolved.exists(): + errors.append(f"{component_name}: {label} path does not exist → {resolved}") + return + recorded_sha = entry.get("sha256") + if recorded_sha: + actual_sha = compute_sha256(resolved) + if actual_sha != recorded_sha: + errors.append( + f"{component_name}: {label} SHA mismatch for {resolved} " + f"(recorded {recorded_sha}, computed {actual_sha})" + ) + + +def verify_components(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: + for component in manifest.get("components", []): + if not isinstance(component, Mapping): + errors.append("Component entry is not a mapping.") + continue + name = str(component.get("name", "")) + for key, label in ( + ("sbom", "SBOM"), + ("provenance", "provenance"), + ("signature", "signature"), + ("metadata", "metadata"), + ): + entry = component.get(key) + if not entry: + continue + if not isinstance(entry, Mapping): + errors.append(f"{name}: {label} entry must be a mapping.") + continue + verify_artifact_entry(entry, release_dir, label, name, errors) + + +def verify_collections(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: + for collection, label in ( + ("charts", "chart"), + ("compose", "compose file"), + ): + for item in manifest.get(collection, []): + if not isinstance(item, Mapping): + errors.append(f"{collection} entry is not a mapping.") + continue + path_value = item.get("path") + if not path_value: + errors.append(f"{collection} entry missing path.") + continue + resolved = resolve_path(str(path_value), release_dir) + if not resolved.exists(): + errors.append(f"{label} missing file → {resolved}") + continue + recorded_sha = item.get("sha256") + if recorded_sha: + actual_sha = compute_sha256(resolved) + if actual_sha != recorded_sha: + errors.append( + f"{label} SHA mismatch for {resolved} " + f"(recorded {recorded_sha}, computed {actual_sha})" + ) + + +def verify_debug_store(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: + debug = manifest.get("debugStore") + if not isinstance(debug, Mapping): + return + manifest_path_str = debug.get("manifest") + manifest_data: Optional[Mapping[str, Any]] = None + if manifest_path_str: + manifest_path = resolve_path(str(manifest_path_str), release_dir) + if not manifest_path.exists(): + errors.append(f"Debug manifest missing → {manifest_path}") + else: + recorded_sha = debug.get("sha256") + if recorded_sha: + actual_sha = compute_sha256(manifest_path) + if actual_sha != recorded_sha: + errors.append( + f"Debug manifest SHA mismatch (recorded {recorded_sha}, computed {actual_sha})" + ) + sha_sidecar = manifest_path.with_suffix(manifest_path.suffix + ".sha256") + sidecar_sha = parse_sha_file(sha_sidecar) + if sidecar_sha and recorded_sha and sidecar_sha != recorded_sha: + errors.append( + f"Debug manifest sidecar digest {sidecar_sha} disagrees with recorded {recorded_sha}" + ) + try: + with manifest_path.open("r", encoding="utf-8") as handle: + manifest_data = json.load(handle) + except json.JSONDecodeError as exc: + errors.append(f"Debug manifest JSON invalid: {exc}") + directory = debug.get("directory") + if directory: + debug_dir = resolve_path(str(directory), release_dir) + if not debug_dir.exists(): + errors.append(f"Debug directory missing → {debug_dir}") + + if manifest_data: + artifacts = manifest_data.get("artifacts") + if not isinstance(artifacts, list) or not artifacts: + errors.append("Debug manifest contains no artefacts.") + return + + declared_entries = debug.get("entries") + if isinstance(declared_entries, int) and declared_entries != len(artifacts): + errors.append( + f"Debug manifest reports {declared_entries} entries but contains {len(artifacts)} artefacts." + ) + + for artefact in artifacts: + if not isinstance(artefact, Mapping): + errors.append("Debug manifest artefact entry is not a mapping.") + continue + debug_path = artefact.get("debugPath") + artefact_sha = artefact.get("sha256") + if not debug_path or not artefact_sha: + errors.append("Debug manifest artefact missing debugPath or sha256.") + continue + resolved_debug = resolve_path(str(debug_path), release_dir) + if not resolved_debug.exists(): + errors.append(f"Debug artefact missing → {resolved_debug}") + continue + actual_sha = compute_sha256(resolved_debug) + if actual_sha != artefact_sha: + errors.append( + f"Debug artefact SHA mismatch for {resolved_debug} " + f"(recorded {artefact_sha}, computed {actual_sha})" + ) + +def verify_signature(signature: Mapping[str, Any], release_dir: pathlib.Path, label: str, component_name: str, errors: list[str]) -> None: + sig_path_value = signature.get("path") + if not sig_path_value: + errors.append(f"{component_name}: {label} signature missing path.") + return + sig_path = resolve_path(str(sig_path_value), release_dir) + if not sig_path.exists(): + errors.append(f"{component_name}: {label} signature missing → {sig_path}") + return + recorded_sha = signature.get("sha256") + if recorded_sha: + actual_sha = compute_sha256(sig_path) + if actual_sha != recorded_sha: + errors.append( + f"{component_name}: {label} signature SHA mismatch for {sig_path} " + f"(recorded {recorded_sha}, computed {actual_sha})" + ) + + +def verify_cli_entries(manifest: Mapping[str, Any], release_dir: pathlib.Path, errors: list[str]) -> None: + cli_entries = manifest.get("cli") + if not cli_entries: + return + if not isinstance(cli_entries, list): + errors.append("CLI manifest section must be a list.") + return + for entry in cli_entries: + if not isinstance(entry, Mapping): + errors.append("CLI entry must be a mapping.") + continue + runtime = entry.get("runtime", "") + component_name = f"cli[{runtime}]" + archive = entry.get("archive") + if not isinstance(archive, Mapping): + errors.append(f"{component_name}: archive metadata missing or invalid.") + else: + verify_artifact_entry(archive, release_dir, "archive", component_name, errors) + signature = archive.get("signature") + if isinstance(signature, Mapping): + verify_signature(signature, release_dir, "archive", component_name, errors) + elif signature is not None: + errors.append(f"{component_name}: archive signature must be an object.") + sbom = entry.get("sbom") + if sbom: + if not isinstance(sbom, Mapping): + errors.append(f"{component_name}: sbom entry must be a mapping.") + else: + verify_artifact_entry(sbom, release_dir, "sbom", component_name, errors) + signature = sbom.get("signature") + if isinstance(signature, Mapping): + verify_signature(signature, release_dir, "sbom", component_name, errors) + elif signature is not None: + errors.append(f"{component_name}: sbom signature must be an object.") + + +def verify_release(release_dir: pathlib.Path) -> None: + if not release_dir.exists(): + raise VerificationError(f"Release directory not found: {release_dir}") + manifest = load_manifest(release_dir) + errors: list[str] = [] + verify_manifest_hashes(manifest, release_dir, errors) + verify_components(manifest, release_dir, errors) + verify_cli_entries(manifest, release_dir, errors) + verify_collections(manifest, release_dir, errors) + verify_debug_store(manifest, release_dir, errors) + if errors: + bullet_list = "\n - ".join(errors) + raise VerificationError(f"Release verification failed:\n - {bullet_list}") + + +def parse_args(argv: list[str] | None = None) -> argparse.Namespace: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--release-dir", + type=pathlib.Path, + default=pathlib.Path("out/release"), + help="Path to the release artefact directory (default: %(default)s)", + ) + return parser.parse_args(argv) + + +def main(argv: list[str] | None = None) -> int: + args = parse_args(argv) + try: + verify_release(args.release_dir.resolve()) + except VerificationError as exc: + print(str(exc), file=sys.stderr) + return 1 + print(f"✅ Release artefacts verified OK in {args.release_dir}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/ops/devops/scripts/check-advisory-raw-duplicates.js b/ops/devops/scripts/check-advisory-raw-duplicates.js index b86cf2e2..41acf4e1 100644 --- a/ops/devops/scripts/check-advisory-raw-duplicates.js +++ b/ops/devops/scripts/check-advisory-raw-duplicates.js @@ -1,77 +1,77 @@ -/** - * Aggregation helper that surfaces advisory_raw duplicate candidates prior to enabling the - * idempotency unique index. Intended for staging/offline snapshots. - * - * Usage: - * mongo concelier ops/devops/scripts/check-advisory-raw-duplicates.js - * - * Environment variables: - * LIMIT - optional cap on number of duplicate groups to print (default 50). - */ -(function () { - function toInt(value, fallback) { - var parsed = parseInt(value, 10); - return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback; - } - - var limit = typeof LIMIT !== "undefined" ? toInt(LIMIT, 50) : 50; - var database = db.getName ? db.getSiblingDB(db.getName()) : db; - if (!database) { - throw new Error("Unable to resolve database handle"); - } - - print(""); - print("== advisory_raw duplicate audit =="); - print("Database: " + database.getName()); - print("Limit : " + limit); - print(""); - - var pipeline = [ - { - $group: { - _id: { - vendor: "$source.vendor", - upstreamId: "$upstream.upstream_id", - contentHash: "$upstream.content_hash", - tenant: "$tenant" - }, - ids: { $addToSet: "$_id" }, - count: { $sum: 1 } - } - }, - { $match: { count: { $gt: 1 } } }, - { - $project: { - _id: 0, - vendor: "$_id.vendor", - upstreamId: "$_id.upstreamId", - contentHash: "$_id.contentHash", - tenant: "$_id.tenant", - count: 1, - ids: 1 - } - }, - { $sort: { count: -1, vendor: 1, upstreamId: 1 } }, - { $limit: limit } - ]; - - var cursor = database.getCollection("advisory_raw").aggregate(pipeline, { allowDiskUse: true }); - var any = false; - while (cursor.hasNext()) { - var doc = cursor.next(); - any = true; - print("---"); - print("vendor : " + doc.vendor); - print("upstream_id : " + doc.upstreamId); - print("tenant : " + doc.tenant); - print("content_hash: " + doc.contentHash); - print("count : " + doc.count); - print("ids : " + doc.ids.join(", ")); - } - - if (!any) { - print("No duplicate advisory_raw documents detected."); - } - - print(""); -})(); +/** + * Aggregation helper that surfaces advisory_raw duplicate candidates prior to enabling the + * idempotency unique index. Intended for staging/offline snapshots. + * + * Usage: + * mongo concelier ops/devops/scripts/check-advisory-raw-duplicates.js + * + * Environment variables: + * LIMIT - optional cap on number of duplicate groups to print (default 50). + */ +(function () { + function toInt(value, fallback) { + var parsed = parseInt(value, 10); + return Number.isFinite(parsed) && parsed > 0 ? parsed : fallback; + } + + var limit = typeof LIMIT !== "undefined" ? toInt(LIMIT, 50) : 50; + var database = db.getName ? db.getSiblingDB(db.getName()) : db; + if (!database) { + throw new Error("Unable to resolve database handle"); + } + + print(""); + print("== advisory_raw duplicate audit =="); + print("Database: " + database.getName()); + print("Limit : " + limit); + print(""); + + var pipeline = [ + { + $group: { + _id: { + vendor: "$source.vendor", + upstreamId: "$upstream.upstream_id", + contentHash: "$upstream.content_hash", + tenant: "$tenant" + }, + ids: { $addToSet: "$_id" }, + count: { $sum: 1 } + } + }, + { $match: { count: { $gt: 1 } } }, + { + $project: { + _id: 0, + vendor: "$_id.vendor", + upstreamId: "$_id.upstreamId", + contentHash: "$_id.contentHash", + tenant: "$_id.tenant", + count: 1, + ids: 1 + } + }, + { $sort: { count: -1, vendor: 1, upstreamId: 1 } }, + { $limit: limit } + ]; + + var cursor = database.getCollection("advisory_raw").aggregate(pipeline, { allowDiskUse: true }); + var any = false; + while (cursor.hasNext()) { + var doc = cursor.next(); + any = true; + print("---"); + print("vendor : " + doc.vendor); + print("upstream_id : " + doc.upstreamId); + print("tenant : " + doc.tenant); + print("content_hash: " + doc.contentHash); + print("count : " + doc.count); + print("ids : " + doc.ids.join(", ")); + } + + if (!any) { + print("No duplicate advisory_raw documents detected."); + } + + print(""); +})(); diff --git a/ops/devops/sync-preview-nuget.sh b/ops/devops/sync-preview-nuget.sh index f2487b96..ff8f15ee 100644 --- a/ops/devops/sync-preview-nuget.sh +++ b/ops/devops/sync-preview-nuget.sh @@ -1,71 +1,71 @@ -#!/usr/bin/env bash - -# Sync preview NuGet packages into the local offline feed. -# Reads package metadata from ops/devops/nuget-preview-packages.csv -# and ensures ./local-nuget holds the expected artefacts (with SHA-256 verification). -# Optional 4th CSV column can override the download base (e.g. dotnet-public flat container). - -set -euo pipefail - -repo_root="$(git -C "${BASH_SOURCE%/*}/.." rev-parse --show-toplevel 2>/dev/null || pwd)" -manifest="${repo_root}/ops/devops/nuget-preview-packages.csv" -dest="${repo_root}/local-nuget" -nuget_v2_base="${NUGET_V2_BASE:-https://www.nuget.org/api/v2/package}" - -if [[ ! -f "$manifest" ]]; then - echo "Manifest not found: $manifest" >&2 - exit 1 -fi - -mkdir -p "$dest" - -fetch_package() { - local package="$1" - local version="$2" - local expected_sha="$3" - local source_base="$4" - local target="$dest/${package}.${version}.nupkg" - local url - - if [[ -n "$source_base" ]]; then - local package_lower - package_lower="${package,,}" - url="${source_base%/}/${package_lower}/${version}/${package_lower}.${version}.nupkg" - else - url="${nuget_v2_base%/}/${package}/${version}" - fi - - echo "[sync-nuget] Fetching ${package} ${version}" - local tmp - tmp="$(mktemp)" - trap 'rm -f "$tmp"' RETURN - curl -fsSL --retry 3 --retry-delay 1 "$url" -o "$tmp" - local actual_sha - actual_sha="$(sha256sum "$tmp" | awk '{print $1}')" - if [[ "$actual_sha" != "$expected_sha" ]]; then - echo "Checksum mismatch for ${package} ${version}" >&2 - echo " expected: $expected_sha" >&2 - echo " actual: $actual_sha" >&2 - exit 1 - fi - mv "$tmp" "$target" - trap - RETURN -} - -while IFS=',' read -r package version sha source_base; do - [[ -z "$package" || "$package" == \#* ]] && continue - - local_path="$dest/${package}.${version}.nupkg" - if [[ -f "$local_path" ]]; then - current_sha="$(sha256sum "$local_path" | awk '{print $1}')" - if [[ "$current_sha" == "$sha" ]]; then - echo "[sync-nuget] OK ${package} ${version}" - continue - fi - echo "[sync-nuget] SHA mismatch for ${package} ${version}, refreshing" - else - echo "[sync-nuget] Missing ${package} ${version}" - fi - - fetch_package "$package" "$version" "$sha" "${source_base:-}" -done < "$manifest" +#!/usr/bin/env bash + +# Sync preview NuGet packages into the local offline feed. +# Reads package metadata from ops/devops/nuget-preview-packages.csv +# and ensures ./local-nuget holds the expected artefacts (with SHA-256 verification). +# Optional 4th CSV column can override the download base (e.g. dotnet-public flat container). + +set -euo pipefail + +repo_root="$(git -C "${BASH_SOURCE%/*}/.." rev-parse --show-toplevel 2>/dev/null || pwd)" +manifest="${repo_root}/ops/devops/nuget-preview-packages.csv" +dest="${repo_root}/local-nuget" +nuget_v2_base="${NUGET_V2_BASE:-https://www.nuget.org/api/v2/package}" + +if [[ ! -f "$manifest" ]]; then + echo "Manifest not found: $manifest" >&2 + exit 1 +fi + +mkdir -p "$dest" + +fetch_package() { + local package="$1" + local version="$2" + local expected_sha="$3" + local source_base="$4" + local target="$dest/${package}.${version}.nupkg" + local url + + if [[ -n "$source_base" ]]; then + local package_lower + package_lower="${package,,}" + url="${source_base%/}/${package_lower}/${version}/${package_lower}.${version}.nupkg" + else + url="${nuget_v2_base%/}/${package}/${version}" + fi + + echo "[sync-nuget] Fetching ${package} ${version}" + local tmp + tmp="$(mktemp)" + trap 'rm -f "$tmp"' RETURN + curl -fsSL --retry 3 --retry-delay 1 "$url" -o "$tmp" + local actual_sha + actual_sha="$(sha256sum "$tmp" | awk '{print $1}')" + if [[ "$actual_sha" != "$expected_sha" ]]; then + echo "Checksum mismatch for ${package} ${version}" >&2 + echo " expected: $expected_sha" >&2 + echo " actual: $actual_sha" >&2 + exit 1 + fi + mv "$tmp" "$target" + trap - RETURN +} + +while IFS=',' read -r package version sha source_base; do + [[ -z "$package" || "$package" == \#* ]] && continue + + local_path="$dest/${package}.${version}.nupkg" + if [[ -f "$local_path" ]]; then + current_sha="$(sha256sum "$local_path" | awk '{print $1}')" + if [[ "$current_sha" == "$sha" ]]; then + echo "[sync-nuget] OK ${package} ${version}" + continue + fi + echo "[sync-nuget] SHA mismatch for ${package} ${version}, refreshing" + else + echo "[sync-nuget] Missing ${package} ${version}" + fi + + fetch_package "$package" "$version" "$sha" "${source_base:-}" +done < "$manifest" diff --git a/ops/devops/telemetry/generate_dev_tls.sh b/ops/devops/telemetry/generate_dev_tls.sh index 348a3516..8742af9b 100644 --- a/ops/devops/telemetry/generate_dev_tls.sh +++ b/ops/devops/telemetry/generate_dev_tls.sh @@ -1,77 +1,77 @@ -#!/usr/bin/env bash - -set -euo pipefail - -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -CERT_DIR="${SCRIPT_DIR}/../../deploy/telemetry/certs" - -mkdir -p "${CERT_DIR}" - -CA_KEY="${CERT_DIR}/ca.key" -CA_CRT="${CERT_DIR}/ca.crt" -COL_KEY="${CERT_DIR}/collector.key" -COL_CSR="${CERT_DIR}/collector.csr" -COL_CRT="${CERT_DIR}/collector.crt" -CLIENT_KEY="${CERT_DIR}/client.key" -CLIENT_CSR="${CERT_DIR}/client.csr" -CLIENT_CRT="${CERT_DIR}/client.crt" - -echo "[*] Generating OpenTelemetry dev CA and certificates in ${CERT_DIR}" - -# Root CA -if [[ ! -f "${CA_KEY}" ]]; then - openssl genrsa -out "${CA_KEY}" 4096 >/dev/null 2>&1 -fi -openssl req -x509 -new -key "${CA_KEY}" -days 365 -sha256 \ - -out "${CA_CRT}" -subj "/CN=StellaOps Dev Telemetry CA" \ - -config <(cat <<'EOF' -[req] -distinguished_name = req_distinguished_name -prompt = no -[req_distinguished_name] -EOF -) >/dev/null 2>&1 - -# Collector certificate (server + client auth) -openssl req -new -nodes -newkey rsa:4096 \ - -keyout "${COL_KEY}" \ - -out "${COL_CSR}" \ - -subj "/CN=stellaops-otel-collector" >/dev/null 2>&1 - -openssl x509 -req -in "${COL_CSR}" -CA "${CA_CRT}" -CAkey "${CA_KEY}" \ - -CAcreateserial -out "${COL_CRT}" -days 365 -sha256 \ - -extensions v3_req -extfile <(cat <<'EOF' -[v3_req] -subjectAltName = @alt_names -extendedKeyUsage = serverAuth, clientAuth -[alt_names] -DNS.1 = stellaops-otel-collector -DNS.2 = localhost -IP.1 = 127.0.0.1 -EOF -) >/dev/null 2>&1 - -# Client certificate -openssl req -new -nodes -newkey rsa:4096 \ - -keyout "${CLIENT_KEY}" \ - -out "${CLIENT_CSR}" \ - -subj "/CN=stellaops-otel-client" >/dev/null 2>&1 - -openssl x509 -req -in "${CLIENT_CSR}" -CA "${CA_CRT}" -CAkey "${CA_KEY}" \ - -CAcreateserial -out "${CLIENT_CRT}" -days 365 -sha256 \ - -extensions v3_req -extfile <(cat <<'EOF' -[v3_req] -extendedKeyUsage = clientAuth -subjectAltName = @alt_names -[alt_names] -DNS.1 = stellaops-otel-client -DNS.2 = localhost -IP.1 = 127.0.0.1 -EOF -) >/dev/null 2>&1 - -rm -f "${COL_CSR}" "${CLIENT_CSR}" -rm -f "${CERT_DIR}/ca.srl" - -echo "[✓] Certificates ready:" -ls -1 "${CERT_DIR}" +#!/usr/bin/env bash + +set -euo pipefail + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +CERT_DIR="${SCRIPT_DIR}/../../deploy/telemetry/certs" + +mkdir -p "${CERT_DIR}" + +CA_KEY="${CERT_DIR}/ca.key" +CA_CRT="${CERT_DIR}/ca.crt" +COL_KEY="${CERT_DIR}/collector.key" +COL_CSR="${CERT_DIR}/collector.csr" +COL_CRT="${CERT_DIR}/collector.crt" +CLIENT_KEY="${CERT_DIR}/client.key" +CLIENT_CSR="${CERT_DIR}/client.csr" +CLIENT_CRT="${CERT_DIR}/client.crt" + +echo "[*] Generating OpenTelemetry dev CA and certificates in ${CERT_DIR}" + +# Root CA +if [[ ! -f "${CA_KEY}" ]]; then + openssl genrsa -out "${CA_KEY}" 4096 >/dev/null 2>&1 +fi +openssl req -x509 -new -key "${CA_KEY}" -days 365 -sha256 \ + -out "${CA_CRT}" -subj "/CN=StellaOps Dev Telemetry CA" \ + -config <(cat <<'EOF' +[req] +distinguished_name = req_distinguished_name +prompt = no +[req_distinguished_name] +EOF +) >/dev/null 2>&1 + +# Collector certificate (server + client auth) +openssl req -new -nodes -newkey rsa:4096 \ + -keyout "${COL_KEY}" \ + -out "${COL_CSR}" \ + -subj "/CN=stellaops-otel-collector" >/dev/null 2>&1 + +openssl x509 -req -in "${COL_CSR}" -CA "${CA_CRT}" -CAkey "${CA_KEY}" \ + -CAcreateserial -out "${COL_CRT}" -days 365 -sha256 \ + -extensions v3_req -extfile <(cat <<'EOF' +[v3_req] +subjectAltName = @alt_names +extendedKeyUsage = serverAuth, clientAuth +[alt_names] +DNS.1 = stellaops-otel-collector +DNS.2 = localhost +IP.1 = 127.0.0.1 +EOF +) >/dev/null 2>&1 + +# Client certificate +openssl req -new -nodes -newkey rsa:4096 \ + -keyout "${CLIENT_KEY}" \ + -out "${CLIENT_CSR}" \ + -subj "/CN=stellaops-otel-client" >/dev/null 2>&1 + +openssl x509 -req -in "${CLIENT_CSR}" -CA "${CA_CRT}" -CAkey "${CA_KEY}" \ + -CAcreateserial -out "${CLIENT_CRT}" -days 365 -sha256 \ + -extensions v3_req -extfile <(cat <<'EOF' +[v3_req] +extendedKeyUsage = clientAuth +subjectAltName = @alt_names +[alt_names] +DNS.1 = stellaops-otel-client +DNS.2 = localhost +IP.1 = 127.0.0.1 +EOF +) >/dev/null 2>&1 + +rm -f "${COL_CSR}" "${CLIENT_CSR}" +rm -f "${CERT_DIR}/ca.srl" + +echo "[✓] Certificates ready:" +ls -1 "${CERT_DIR}" diff --git a/ops/devops/telemetry/package_offline_bundle.py b/ops/devops/telemetry/package_offline_bundle.py index 28ea0908..8c9a6232 100644 --- a/ops/devops/telemetry/package_offline_bundle.py +++ b/ops/devops/telemetry/package_offline_bundle.py @@ -1,136 +1,136 @@ -#!/usr/bin/env python3 -"""Package telemetry collector assets for offline/air-gapped installs. - -Outputs a tarball containing the collector configuration, Compose overlay, -Helm defaults, and operator README. A SHA-256 checksum sidecar is emitted, and -optional Cosign signing can be enabled with --sign. -""" -from __future__ import annotations - -import argparse -import hashlib -import os -import subprocess -import sys -import tarfile -from pathlib import Path -from typing import Iterable - -REPO_ROOT = Path(__file__).resolve().parents[3] -DEFAULT_OUTPUT = REPO_ROOT / "out" / "telemetry" / "telemetry-offline-bundle.tar.gz" -BUNDLE_CONTENTS: tuple[Path, ...] = ( - Path("deploy/telemetry/README.md"), - Path("deploy/telemetry/otel-collector-config.yaml"), - Path("deploy/telemetry/storage/README.md"), - Path("deploy/telemetry/storage/prometheus.yaml"), - Path("deploy/telemetry/storage/tempo.yaml"), - Path("deploy/telemetry/storage/loki.yaml"), - Path("deploy/telemetry/storage/tenants/tempo-overrides.yaml"), - Path("deploy/telemetry/storage/tenants/loki-overrides.yaml"), - Path("deploy/helm/stellaops/files/otel-collector-config.yaml"), - Path("deploy/helm/stellaops/values.yaml"), - Path("deploy/helm/stellaops/templates/otel-collector.yaml"), - Path("deploy/compose/docker-compose.telemetry.yaml"), - Path("deploy/compose/docker-compose.telemetry-storage.yaml"), - Path("docs/ops/telemetry-collector.md"), - Path("docs/ops/telemetry-storage.md"), -) - - -def compute_sha256(path: Path) -> str: - sha = hashlib.sha256() - with path.open("rb") as handle: - for chunk in iter(lambda: handle.read(1024 * 1024), b""): - sha.update(chunk) - return sha.hexdigest() - - -def validate_files(paths: Iterable[Path]) -> None: - missing = [str(p) for p in paths if not (REPO_ROOT / p).exists()] - if missing: - raise FileNotFoundError(f"Missing bundle artefacts: {', '.join(missing)}") - - -def create_bundle(output_path: Path) -> Path: - output_path.parent.mkdir(parents=True, exist_ok=True) - with tarfile.open(output_path, "w:gz") as tar: - for rel_path in BUNDLE_CONTENTS: - abs_path = REPO_ROOT / rel_path - tar.add(abs_path, arcname=str(rel_path)) - return output_path - - -def write_checksum(bundle_path: Path) -> Path: - digest = compute_sha256(bundle_path) - sha_path = bundle_path.with_suffix(bundle_path.suffix + ".sha256") - sha_path.write_text(f"{digest} {bundle_path.name}\n", encoding="utf-8") - return sha_path - - -def cosign_sign(bundle_path: Path, key_ref: str | None, identity_token: str | None) -> None: - cmd = ["cosign", "sign-blob", "--yes", str(bundle_path)] - if key_ref: - cmd.extend(["--key", key_ref]) - env = os.environ.copy() - if identity_token: - env["COSIGN_IDENTITY_TOKEN"] = identity_token - try: - subprocess.run(cmd, check=True, env=env) - except FileNotFoundError as exc: - raise RuntimeError("cosign not found on PATH; install cosign or omit --sign") from exc - except subprocess.CalledProcessError as exc: - raise RuntimeError(f"cosign sign-blob failed: {exc}") from exc - - -def parse_args(argv: list[str] | None = None) -> argparse.Namespace: - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--output", - type=Path, - default=DEFAULT_OUTPUT, - help=f"Output bundle path (default: {DEFAULT_OUTPUT})", - ) - parser.add_argument( - "--sign", - action="store_true", - help="Sign the bundle using cosign (requires cosign on PATH)", - ) - parser.add_argument( - "--cosign-key", - type=str, - default=os.environ.get("COSIGN_KEY_REF"), - help="Cosign key reference (file:..., azurekms://..., etc.)", - ) - parser.add_argument( - "--identity-token", - type=str, - default=os.environ.get("COSIGN_IDENTITY_TOKEN"), - help="OIDC identity token for keyless signing", - ) - return parser.parse_args(argv) - - -def main(argv: list[str] | None = None) -> int: - args = parse_args(argv) - validate_files(BUNDLE_CONTENTS) - - bundle_path = args.output.resolve() - print(f"[*] Creating telemetry bundle at {bundle_path}") - create_bundle(bundle_path) - sha_path = write_checksum(bundle_path) - print(f"[✓] SHA-256 written to {sha_path}") - - if args.sign: - print("[*] Signing bundle with cosign") - cosign_sign(bundle_path, args.cosign_key, args.identity_token) - sig_path = bundle_path.with_suffix(bundle_path.suffix + ".sig") - if sig_path.exists(): - print(f"[✓] Cosign signature written to {sig_path}") - else: - print("[!] Cosign completed but signature file not found (ensure cosign version >= 2.2)") - - return 0 - - -if __name__ == "__main__": - sys.exit(main()) +#!/usr/bin/env python3 +"""Package telemetry collector assets for offline/air-gapped installs. + +Outputs a tarball containing the collector configuration, Compose overlay, +Helm defaults, and operator README. A SHA-256 checksum sidecar is emitted, and +optional Cosign signing can be enabled with --sign. +""" +from __future__ import annotations + +import argparse +import hashlib +import os +import subprocess +import sys +import tarfile +from pathlib import Path +from typing import Iterable + +REPO_ROOT = Path(__file__).resolve().parents[3] +DEFAULT_OUTPUT = REPO_ROOT / "out" / "telemetry" / "telemetry-offline-bundle.tar.gz" +BUNDLE_CONTENTS: tuple[Path, ...] = ( + Path("deploy/telemetry/README.md"), + Path("deploy/telemetry/otel-collector-config.yaml"), + Path("deploy/telemetry/storage/README.md"), + Path("deploy/telemetry/storage/prometheus.yaml"), + Path("deploy/telemetry/storage/tempo.yaml"), + Path("deploy/telemetry/storage/loki.yaml"), + Path("deploy/telemetry/storage/tenants/tempo-overrides.yaml"), + Path("deploy/telemetry/storage/tenants/loki-overrides.yaml"), + Path("deploy/helm/stellaops/files/otel-collector-config.yaml"), + Path("deploy/helm/stellaops/values.yaml"), + Path("deploy/helm/stellaops/templates/otel-collector.yaml"), + Path("deploy/compose/docker-compose.telemetry.yaml"), + Path("deploy/compose/docker-compose.telemetry-storage.yaml"), + Path("docs/ops/telemetry-collector.md"), + Path("docs/ops/telemetry-storage.md"), +) + + +def compute_sha256(path: Path) -> str: + sha = hashlib.sha256() + with path.open("rb") as handle: + for chunk in iter(lambda: handle.read(1024 * 1024), b""): + sha.update(chunk) + return sha.hexdigest() + + +def validate_files(paths: Iterable[Path]) -> None: + missing = [str(p) for p in paths if not (REPO_ROOT / p).exists()] + if missing: + raise FileNotFoundError(f"Missing bundle artefacts: {', '.join(missing)}") + + +def create_bundle(output_path: Path) -> Path: + output_path.parent.mkdir(parents=True, exist_ok=True) + with tarfile.open(output_path, "w:gz") as tar: + for rel_path in BUNDLE_CONTENTS: + abs_path = REPO_ROOT / rel_path + tar.add(abs_path, arcname=str(rel_path)) + return output_path + + +def write_checksum(bundle_path: Path) -> Path: + digest = compute_sha256(bundle_path) + sha_path = bundle_path.with_suffix(bundle_path.suffix + ".sha256") + sha_path.write_text(f"{digest} {bundle_path.name}\n", encoding="utf-8") + return sha_path + + +def cosign_sign(bundle_path: Path, key_ref: str | None, identity_token: str | None) -> None: + cmd = ["cosign", "sign-blob", "--yes", str(bundle_path)] + if key_ref: + cmd.extend(["--key", key_ref]) + env = os.environ.copy() + if identity_token: + env["COSIGN_IDENTITY_TOKEN"] = identity_token + try: + subprocess.run(cmd, check=True, env=env) + except FileNotFoundError as exc: + raise RuntimeError("cosign not found on PATH; install cosign or omit --sign") from exc + except subprocess.CalledProcessError as exc: + raise RuntimeError(f"cosign sign-blob failed: {exc}") from exc + + +def parse_args(argv: list[str] | None = None) -> argparse.Namespace: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--output", + type=Path, + default=DEFAULT_OUTPUT, + help=f"Output bundle path (default: {DEFAULT_OUTPUT})", + ) + parser.add_argument( + "--sign", + action="store_true", + help="Sign the bundle using cosign (requires cosign on PATH)", + ) + parser.add_argument( + "--cosign-key", + type=str, + default=os.environ.get("COSIGN_KEY_REF"), + help="Cosign key reference (file:..., azurekms://..., etc.)", + ) + parser.add_argument( + "--identity-token", + type=str, + default=os.environ.get("COSIGN_IDENTITY_TOKEN"), + help="OIDC identity token for keyless signing", + ) + return parser.parse_args(argv) + + +def main(argv: list[str] | None = None) -> int: + args = parse_args(argv) + validate_files(BUNDLE_CONTENTS) + + bundle_path = args.output.resolve() + print(f"[*] Creating telemetry bundle at {bundle_path}") + create_bundle(bundle_path) + sha_path = write_checksum(bundle_path) + print(f"[✓] SHA-256 written to {sha_path}") + + if args.sign: + print("[*] Signing bundle with cosign") + cosign_sign(bundle_path, args.cosign_key, args.identity_token) + sig_path = bundle_path.with_suffix(bundle_path.suffix + ".sig") + if sig_path.exists(): + print(f"[✓] Cosign signature written to {sig_path}") + else: + print("[!] Cosign completed but signature file not found (ensure cosign version >= 2.2)") + + return 0 + + +if __name__ == "__main__": + sys.exit(main()) diff --git a/ops/devops/telemetry/smoke_otel_collector.py b/ops/devops/telemetry/smoke_otel_collector.py index 771fa986..33cb01bf 100644 --- a/ops/devops/telemetry/smoke_otel_collector.py +++ b/ops/devops/telemetry/smoke_otel_collector.py @@ -1,197 +1,197 @@ -#!/usr/bin/env python3 -""" -Smoke test for the StellaOps OpenTelemetry Collector deployment. - -The script sends sample traces, metrics, and logs over OTLP/HTTP with mutual TLS -and asserts that the collector accepted the payloads by checking its Prometheus -metrics endpoint. -""" - -from __future__ import annotations - -import argparse -import json -import ssl -import sys -import time -import urllib.request -from pathlib import Path - -TRACE_PAYLOAD = { - "resourceSpans": [ - { - "resource": { - "attributes": [ - {"key": "service.name", "value": {"stringValue": "smoke-client"}}, - {"key": "tenant.id", "value": {"stringValue": "dev"}}, - ] - }, - "scopeSpans": [ - { - "scope": {"name": "smoke-test"}, - "spans": [ - { - "traceId": "00000000000000000000000000000001", - "spanId": "0000000000000001", - "name": "smoke-span", - "kind": 1, - "startTimeUnixNano": "1730000000000000000", - "endTimeUnixNano": "1730000000500000000", - "status": {"code": 0}, - } - ], - } - ], - } - ] -} - -METRIC_PAYLOAD = { - "resourceMetrics": [ - { - "resource": { - "attributes": [ - {"key": "service.name", "value": {"stringValue": "smoke-client"}}, - {"key": "tenant.id", "value": {"stringValue": "dev"}}, - ] - }, - "scopeMetrics": [ - { - "scope": {"name": "smoke-test"}, - "metrics": [ - { - "name": "smoke_gauge", - "gauge": { - "dataPoints": [ - { - "asDouble": 1.0, - "timeUnixNano": "1730000001000000000", - "attributes": [ - {"key": "phase", "value": {"stringValue": "ingest"}} - ], - } - ] - }, - } - ], - } - ], - } - ] -} - -LOG_PAYLOAD = { - "resourceLogs": [ - { - "resource": { - "attributes": [ - {"key": "service.name", "value": {"stringValue": "smoke-client"}}, - {"key": "tenant.id", "value": {"stringValue": "dev"}}, - ] - }, - "scopeLogs": [ - { - "scope": {"name": "smoke-test"}, - "logRecords": [ - { - "timeUnixNano": "1730000002000000000", - "severityNumber": 9, - "severityText": "Info", - "body": {"stringValue": "StellaOps collector smoke log"}, - } - ], - } - ], - } - ] -} - - -def _load_context(ca: Path, cert: Path, key: Path) -> ssl.SSLContext: - context = ssl.create_default_context(cafile=str(ca)) - context.check_hostname = False - context.verify_mode = ssl.CERT_REQUIRED - context.load_cert_chain(certfile=str(cert), keyfile=str(key)) - return context - - -def _post_json(url: str, payload: dict, context: ssl.SSLContext) -> None: - data = json.dumps(payload).encode("utf-8") - request = urllib.request.Request( - url, - data=data, - headers={ - "Content-Type": "application/json", - "User-Agent": "stellaops-otel-smoke/1.0", - }, - method="POST", - ) - with urllib.request.urlopen(request, context=context, timeout=10) as response: - if response.status // 100 != 2: - raise RuntimeError(f"{url} returned HTTP {response.status}") - - -def _fetch_metrics(url: str, context: ssl.SSLContext) -> str: - request = urllib.request.Request( - url, - headers={ - "User-Agent": "stellaops-otel-smoke/1.0", - }, - ) - with urllib.request.urlopen(request, context=context, timeout=10) as response: - return response.read().decode("utf-8") - - -def _assert_counter(metrics: str, metric_name: str) -> None: - for line in metrics.splitlines(): - if line.startswith(metric_name): - try: - _, value = line.split(" ") - if float(value) > 0: - return - except ValueError: - continue - raise AssertionError(f"{metric_name} not incremented") - - -def main() -> int: - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument("--host", default="localhost", help="Collector host (default: %(default)s)") - parser.add_argument("--otlp-port", type=int, default=4318, help="OTLP/HTTP port") - parser.add_argument("--metrics-port", type=int, default=9464, help="Prometheus metrics port") - parser.add_argument("--health-port", type=int, default=13133, help="Health check port") - parser.add_argument("--ca", type=Path, default=Path("deploy/telemetry/certs/ca.crt"), help="CA certificate path") - parser.add_argument("--cert", type=Path, default=Path("deploy/telemetry/certs/client.crt"), help="Client certificate path") - parser.add_argument("--key", type=Path, default=Path("deploy/telemetry/certs/client.key"), help="Client key path") - args = parser.parse_args() - - for path in (args.ca, args.cert, args.key): - if not path.exists(): - print(f"[!] missing TLS material: {path}", file=sys.stderr) - return 1 - - context = _load_context(args.ca, args.cert, args.key) - - otlp_base = f"https://{args.host}:{args.otlp_port}/v1" - print(f"[*] Sending OTLP traffic to {otlp_base}") - _post_json(f"{otlp_base}/traces", TRACE_PAYLOAD, context) - _post_json(f"{otlp_base}/metrics", METRIC_PAYLOAD, context) - _post_json(f"{otlp_base}/logs", LOG_PAYLOAD, context) - - # Allow Prometheus exporter to update metrics - time.sleep(2) - - metrics_url = f"https://{args.host}:{args.metrics_port}/metrics" - print(f"[*] Fetching collector metrics from {metrics_url}") - metrics = _fetch_metrics(metrics_url, context) - - _assert_counter(metrics, "otelcol_receiver_accepted_spans") - _assert_counter(metrics, "otelcol_receiver_accepted_logs") - _assert_counter(metrics, "otelcol_receiver_accepted_metric_points") - - print("[✓] Collector accepted traces, logs, and metrics.") - return 0 - - -if __name__ == "__main__": - raise SystemExit(main()) +#!/usr/bin/env python3 +""" +Smoke test for the StellaOps OpenTelemetry Collector deployment. + +The script sends sample traces, metrics, and logs over OTLP/HTTP with mutual TLS +and asserts that the collector accepted the payloads by checking its Prometheus +metrics endpoint. +""" + +from __future__ import annotations + +import argparse +import json +import ssl +import sys +import time +import urllib.request +from pathlib import Path + +TRACE_PAYLOAD = { + "resourceSpans": [ + { + "resource": { + "attributes": [ + {"key": "service.name", "value": {"stringValue": "smoke-client"}}, + {"key": "tenant.id", "value": {"stringValue": "dev"}}, + ] + }, + "scopeSpans": [ + { + "scope": {"name": "smoke-test"}, + "spans": [ + { + "traceId": "00000000000000000000000000000001", + "spanId": "0000000000000001", + "name": "smoke-span", + "kind": 1, + "startTimeUnixNano": "1730000000000000000", + "endTimeUnixNano": "1730000000500000000", + "status": {"code": 0}, + } + ], + } + ], + } + ] +} + +METRIC_PAYLOAD = { + "resourceMetrics": [ + { + "resource": { + "attributes": [ + {"key": "service.name", "value": {"stringValue": "smoke-client"}}, + {"key": "tenant.id", "value": {"stringValue": "dev"}}, + ] + }, + "scopeMetrics": [ + { + "scope": {"name": "smoke-test"}, + "metrics": [ + { + "name": "smoke_gauge", + "gauge": { + "dataPoints": [ + { + "asDouble": 1.0, + "timeUnixNano": "1730000001000000000", + "attributes": [ + {"key": "phase", "value": {"stringValue": "ingest"}} + ], + } + ] + }, + } + ], + } + ], + } + ] +} + +LOG_PAYLOAD = { + "resourceLogs": [ + { + "resource": { + "attributes": [ + {"key": "service.name", "value": {"stringValue": "smoke-client"}}, + {"key": "tenant.id", "value": {"stringValue": "dev"}}, + ] + }, + "scopeLogs": [ + { + "scope": {"name": "smoke-test"}, + "logRecords": [ + { + "timeUnixNano": "1730000002000000000", + "severityNumber": 9, + "severityText": "Info", + "body": {"stringValue": "StellaOps collector smoke log"}, + } + ], + } + ], + } + ] +} + + +def _load_context(ca: Path, cert: Path, key: Path) -> ssl.SSLContext: + context = ssl.create_default_context(cafile=str(ca)) + context.check_hostname = False + context.verify_mode = ssl.CERT_REQUIRED + context.load_cert_chain(certfile=str(cert), keyfile=str(key)) + return context + + +def _post_json(url: str, payload: dict, context: ssl.SSLContext) -> None: + data = json.dumps(payload).encode("utf-8") + request = urllib.request.Request( + url, + data=data, + headers={ + "Content-Type": "application/json", + "User-Agent": "stellaops-otel-smoke/1.0", + }, + method="POST", + ) + with urllib.request.urlopen(request, context=context, timeout=10) as response: + if response.status // 100 != 2: + raise RuntimeError(f"{url} returned HTTP {response.status}") + + +def _fetch_metrics(url: str, context: ssl.SSLContext) -> str: + request = urllib.request.Request( + url, + headers={ + "User-Agent": "stellaops-otel-smoke/1.0", + }, + ) + with urllib.request.urlopen(request, context=context, timeout=10) as response: + return response.read().decode("utf-8") + + +def _assert_counter(metrics: str, metric_name: str) -> None: + for line in metrics.splitlines(): + if line.startswith(metric_name): + try: + _, value = line.split(" ") + if float(value) > 0: + return + except ValueError: + continue + raise AssertionError(f"{metric_name} not incremented") + + +def main() -> int: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument("--host", default="localhost", help="Collector host (default: %(default)s)") + parser.add_argument("--otlp-port", type=int, default=4318, help="OTLP/HTTP port") + parser.add_argument("--metrics-port", type=int, default=9464, help="Prometheus metrics port") + parser.add_argument("--health-port", type=int, default=13133, help="Health check port") + parser.add_argument("--ca", type=Path, default=Path("deploy/telemetry/certs/ca.crt"), help="CA certificate path") + parser.add_argument("--cert", type=Path, default=Path("deploy/telemetry/certs/client.crt"), help="Client certificate path") + parser.add_argument("--key", type=Path, default=Path("deploy/telemetry/certs/client.key"), help="Client key path") + args = parser.parse_args() + + for path in (args.ca, args.cert, args.key): + if not path.exists(): + print(f"[!] missing TLS material: {path}", file=sys.stderr) + return 1 + + context = _load_context(args.ca, args.cert, args.key) + + otlp_base = f"https://{args.host}:{args.otlp_port}/v1" + print(f"[*] Sending OTLP traffic to {otlp_base}") + _post_json(f"{otlp_base}/traces", TRACE_PAYLOAD, context) + _post_json(f"{otlp_base}/metrics", METRIC_PAYLOAD, context) + _post_json(f"{otlp_base}/logs", LOG_PAYLOAD, context) + + # Allow Prometheus exporter to update metrics + time.sleep(2) + + metrics_url = f"https://{args.host}:{args.metrics_port}/metrics" + print(f"[*] Fetching collector metrics from {metrics_url}") + metrics = _fetch_metrics(metrics_url, context) + + _assert_counter(metrics, "otelcol_receiver_accepted_spans") + _assert_counter(metrics, "otelcol_receiver_accepted_logs") + _assert_counter(metrics, "otelcol_receiver_accepted_metric_points") + + print("[✓] Collector accepted traces, logs, and metrics.") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/ops/devops/validate_restore_sources.py b/ops/devops/validate_restore_sources.py index 06bb2bc5..6ce70157 100644 --- a/ops/devops/validate_restore_sources.py +++ b/ops/devops/validate_restore_sources.py @@ -1,183 +1,183 @@ -#!/usr/bin/env python3 - -""" -Validate NuGet source ordering for StellaOps. - -Ensures `local-nuget` is the highest priority feed in both NuGet.config and the -Directory.Build.props restore configuration. Fails fast with actionable errors -so CI/offline kit workflows can assert deterministic restore ordering. -""" - -from __future__ import annotations - -import argparse -import subprocess -import sys -import xml.etree.ElementTree as ET -from pathlib import Path - - -REPO_ROOT = Path(__file__).resolve().parents[2] -NUGET_CONFIG = REPO_ROOT / "NuGet.config" -ROOT_PROPS = REPO_ROOT / "Directory.Build.props" -EXPECTED_SOURCE_KEYS = ["local", "dotnet-public", "nuget.org"] - - -class ValidationError(Exception): - """Raised when validation fails.""" - - -def _fail(message: str) -> None: - raise ValidationError(message) - - -def _parse_xml(path: Path) -> ET.ElementTree: - try: - return ET.parse(path) - except FileNotFoundError as exc: - _fail(f"Missing required file: {path}") - except ET.ParseError as exc: - _fail(f"Could not parse XML for {path}: {exc}") - - -def validate_nuget_config() -> None: - tree = _parse_xml(NUGET_CONFIG) - root = tree.getroot() - - package_sources = root.find("packageSources") - if package_sources is None: - _fail("NuGet.config must declare a section.") - - children = list(package_sources) - if not children or children[0].tag != "clear": - _fail("NuGet.config packageSources must begin with a element.") - - adds = [child for child in children if child.tag == "add"] - if not adds: - _fail("NuGet.config packageSources must define at least one entry.") - - keys = [add.attrib.get("key") for add in adds] - if keys[: len(EXPECTED_SOURCE_KEYS)] != EXPECTED_SOURCE_KEYS: - formatted = ", ".join(keys) or "" - _fail( - "NuGet.config packageSources must list feeds in the order " - f"{EXPECTED_SOURCE_KEYS}. Found: {formatted}" - ) - - local_value = adds[0].attrib.get("value", "") - if Path(local_value).name != "local-nuget": - _fail( - "NuGet.config local feed should point at the repo-local mirror " - f"'local-nuget', found value '{local_value}'." - ) - - clear = package_sources.find("clear") - if clear is None: - _fail("NuGet.config packageSources must start with to avoid inherited feeds.") - - -def validate_directory_build_props() -> None: - tree = _parse_xml(ROOT_PROPS) - root = tree.getroot() - defaults = None - for element in root.findall(".//_StellaOpsDefaultRestoreSources"): - defaults = [fragment.strip() for fragment in element.text.split(";") if fragment.strip()] - break - - if defaults is None: - _fail("Directory.Build.props must define _StellaOpsDefaultRestoreSources.") - - expected_props = [ - "$(StellaOpsLocalNuGetSource)", - "$(StellaOpsDotNetPublicSource)", - "$(StellaOpsNuGetOrgSource)", - ] - if defaults != expected_props: - _fail( - "Directory.Build.props _StellaOpsDefaultRestoreSources must list feeds " - f"in the order {expected_props}. Found: {defaults}" - ) - - restore_nodes = root.findall(".//RestoreSources") - if not restore_nodes: - _fail("Directory.Build.props must override RestoreSources to force deterministic ordering.") - - uses_default_first = any( - node.text - and node.text.strip().startswith("$(_StellaOpsDefaultRestoreSources)") - for node in restore_nodes - ) - if not uses_default_first: - _fail( - "Directory.Build.props RestoreSources override must place " - "$(_StellaOpsDefaultRestoreSources) at the beginning." - ) - - -def assert_single_nuget_config() -> None: - extra_configs: list[Path] = [] - configs: set[Path] = set() - for glob in ("NuGet.config", "nuget.config"): - try: - result = subprocess.run( - ["rg", "--files", f"-g{glob}"], - check=False, - capture_output=True, - text=True, - cwd=REPO_ROOT, - ) - except FileNotFoundError as exc: - _fail("ripgrep (rg) is required for validation but was not found on PATH.") - if result.returncode not in (0, 1): - _fail( - f"ripgrep failed while searching for {glob}: {result.stderr.strip() or result.returncode}" - ) - for line in result.stdout.splitlines(): - configs.add((REPO_ROOT / line).resolve()) - - configs.discard(NUGET_CONFIG.resolve()) - extra_configs.extend(sorted(configs)) - if extra_configs: - formatted = "\n ".join(str(path.relative_to(REPO_ROOT)) for path in extra_configs) - _fail( - "Unexpected additional NuGet.config files detected. " - "Consolidate feed configuration in the repo root:\n " - f"{formatted}" - ) - - -def parse_args(argv: list[str]) -> argparse.Namespace: - parser = argparse.ArgumentParser( - description="Verify StellaOps NuGet feeds prioritise the local mirror." - ) - parser.add_argument( - "--skip-rg", - action="store_true", - help="Skip ripgrep discovery of extra NuGet.config files (useful for focused runs).", - ) - return parser.parse_args(argv) - - -def main(argv: list[str]) -> int: - args = parse_args(argv) - validations = [ - ("NuGet.config ordering", validate_nuget_config), - ("Directory.Build.props restore override", validate_directory_build_props), - ] - if not args.skip_rg: - validations.append(("single NuGet.config", assert_single_nuget_config)) - - for label, check in validations: - try: - check() - except ValidationError as exc: - sys.stderr.write(f"[FAIL] {label}: {exc}\n") - return 1 - else: - sys.stdout.write(f"[OK] {label}\n") - - return 0 - - -if __name__ == "__main__": - sys.exit(main(sys.argv[1:])) +#!/usr/bin/env python3 + +""" +Validate NuGet source ordering for StellaOps. + +Ensures `local-nuget` is the highest priority feed in both NuGet.config and the +Directory.Build.props restore configuration. Fails fast with actionable errors +so CI/offline kit workflows can assert deterministic restore ordering. +""" + +from __future__ import annotations + +import argparse +import subprocess +import sys +import xml.etree.ElementTree as ET +from pathlib import Path + + +REPO_ROOT = Path(__file__).resolve().parents[2] +NUGET_CONFIG = REPO_ROOT / "NuGet.config" +ROOT_PROPS = REPO_ROOT / "Directory.Build.props" +EXPECTED_SOURCE_KEYS = ["local", "dotnet-public", "nuget.org"] + + +class ValidationError(Exception): + """Raised when validation fails.""" + + +def _fail(message: str) -> None: + raise ValidationError(message) + + +def _parse_xml(path: Path) -> ET.ElementTree: + try: + return ET.parse(path) + except FileNotFoundError as exc: + _fail(f"Missing required file: {path}") + except ET.ParseError as exc: + _fail(f"Could not parse XML for {path}: {exc}") + + +def validate_nuget_config() -> None: + tree = _parse_xml(NUGET_CONFIG) + root = tree.getroot() + + package_sources = root.find("packageSources") + if package_sources is None: + _fail("NuGet.config must declare a section.") + + children = list(package_sources) + if not children or children[0].tag != "clear": + _fail("NuGet.config packageSources must begin with a element.") + + adds = [child for child in children if child.tag == "add"] + if not adds: + _fail("NuGet.config packageSources must define at least one entry.") + + keys = [add.attrib.get("key") for add in adds] + if keys[: len(EXPECTED_SOURCE_KEYS)] != EXPECTED_SOURCE_KEYS: + formatted = ", ".join(keys) or "" + _fail( + "NuGet.config packageSources must list feeds in the order " + f"{EXPECTED_SOURCE_KEYS}. Found: {formatted}" + ) + + local_value = adds[0].attrib.get("value", "") + if Path(local_value).name != "local-nuget": + _fail( + "NuGet.config local feed should point at the repo-local mirror " + f"'local-nuget', found value '{local_value}'." + ) + + clear = package_sources.find("clear") + if clear is None: + _fail("NuGet.config packageSources must start with to avoid inherited feeds.") + + +def validate_directory_build_props() -> None: + tree = _parse_xml(ROOT_PROPS) + root = tree.getroot() + defaults = None + for element in root.findall(".//_StellaOpsDefaultRestoreSources"): + defaults = [fragment.strip() for fragment in element.text.split(";") if fragment.strip()] + break + + if defaults is None: + _fail("Directory.Build.props must define _StellaOpsDefaultRestoreSources.") + + expected_props = [ + "$(StellaOpsLocalNuGetSource)", + "$(StellaOpsDotNetPublicSource)", + "$(StellaOpsNuGetOrgSource)", + ] + if defaults != expected_props: + _fail( + "Directory.Build.props _StellaOpsDefaultRestoreSources must list feeds " + f"in the order {expected_props}. Found: {defaults}" + ) + + restore_nodes = root.findall(".//RestoreSources") + if not restore_nodes: + _fail("Directory.Build.props must override RestoreSources to force deterministic ordering.") + + uses_default_first = any( + node.text + and node.text.strip().startswith("$(_StellaOpsDefaultRestoreSources)") + for node in restore_nodes + ) + if not uses_default_first: + _fail( + "Directory.Build.props RestoreSources override must place " + "$(_StellaOpsDefaultRestoreSources) at the beginning." + ) + + +def assert_single_nuget_config() -> None: + extra_configs: list[Path] = [] + configs: set[Path] = set() + for glob in ("NuGet.config", "nuget.config"): + try: + result = subprocess.run( + ["rg", "--files", f"-g{glob}"], + check=False, + capture_output=True, + text=True, + cwd=REPO_ROOT, + ) + except FileNotFoundError as exc: + _fail("ripgrep (rg) is required for validation but was not found on PATH.") + if result.returncode not in (0, 1): + _fail( + f"ripgrep failed while searching for {glob}: {result.stderr.strip() or result.returncode}" + ) + for line in result.stdout.splitlines(): + configs.add((REPO_ROOT / line).resolve()) + + configs.discard(NUGET_CONFIG.resolve()) + extra_configs.extend(sorted(configs)) + if extra_configs: + formatted = "\n ".join(str(path.relative_to(REPO_ROOT)) for path in extra_configs) + _fail( + "Unexpected additional NuGet.config files detected. " + "Consolidate feed configuration in the repo root:\n " + f"{formatted}" + ) + + +def parse_args(argv: list[str]) -> argparse.Namespace: + parser = argparse.ArgumentParser( + description="Verify StellaOps NuGet feeds prioritise the local mirror." + ) + parser.add_argument( + "--skip-rg", + action="store_true", + help="Skip ripgrep discovery of extra NuGet.config files (useful for focused runs).", + ) + return parser.parse_args(argv) + + +def main(argv: list[str]) -> int: + args = parse_args(argv) + validations = [ + ("NuGet.config ordering", validate_nuget_config), + ("Directory.Build.props restore override", validate_directory_build_props), + ] + if not args.skip_rg: + validations.append(("single NuGet.config", assert_single_nuget_config)) + + for label, check in validations: + try: + check() + except ValidationError as exc: + sys.stderr.write(f"[FAIL] {label}: {exc}\n") + return 1 + else: + sys.stdout.write(f"[OK] {label}\n") + + return 0 + + +if __name__ == "__main__": + sys.exit(main(sys.argv[1:])) diff --git a/ops/offline-kit/build_offline_kit.py b/ops/offline-kit/build_offline_kit.py index 8b36945e..d1500934 100644 --- a/ops/offline-kit/build_offline_kit.py +++ b/ops/offline-kit/build_offline_kit.py @@ -1,445 +1,445 @@ -#!/usr/bin/env python3 -"""Package the StellaOps Offline Kit with deterministic artefacts and manifest.""" - -from __future__ import annotations - -import argparse -import datetime as dt -import hashlib -import json -import os -import re -import shutil -import subprocess -import sys -import tarfile -from collections import OrderedDict -from pathlib import Path -from typing import Any, Iterable, Mapping, MutableMapping, Optional - -REPO_ROOT = Path(__file__).resolve().parents[2] -RELEASE_TOOLS_DIR = REPO_ROOT / "ops" / "devops" / "release" -TELEMETRY_TOOLS_DIR = REPO_ROOT / "ops" / "devops" / "telemetry" -TELEMETRY_BUNDLE_PATH = REPO_ROOT / "out" / "telemetry" / "telemetry-offline-bundle.tar.gz" - -if str(RELEASE_TOOLS_DIR) not in sys.path: - sys.path.insert(0, str(RELEASE_TOOLS_DIR)) - -from verify_release import ( # type: ignore import-not-found - load_manifest, - resolve_path, - verify_release, -) - -import mirror_debug_store # type: ignore import-not-found - -DEFAULT_RELEASE_DIR = REPO_ROOT / "out" / "release" -DEFAULT_STAGING_DIR = REPO_ROOT / "out" / "offline-kit" / "staging" -DEFAULT_OUTPUT_DIR = REPO_ROOT / "out" / "offline-kit" / "dist" - -ARTIFACT_TARGETS = { - "sbom": Path("sboms"), - "provenance": Path("attest"), - "signature": Path("signatures"), - "metadata": Path("metadata/docker"), -} - - -class CommandError(RuntimeError): - """Raised when an external command fails.""" - - -def run(cmd: Iterable[str], *, cwd: Optional[Path] = None, env: Optional[Mapping[str, str]] = None) -> str: - process_env = dict(os.environ) - if env: - process_env.update(env) - result = subprocess.run( - list(cmd), - cwd=str(cwd) if cwd else None, - env=process_env, - check=False, - capture_output=True, - text=True, - ) - if result.returncode != 0: - raise CommandError( - f"Command failed ({result.returncode}): {' '.join(cmd)}\nSTDOUT:\n{result.stdout}\nSTDERR:\n{result.stderr}" - ) - return result.stdout - - -def compute_sha256(path: Path) -> str: - sha = hashlib.sha256() - with path.open("rb") as handle: - for chunk in iter(lambda: handle.read(1024 * 1024), b""): - sha.update(chunk) - return sha.hexdigest() - - -def utc_now_iso() -> str: - return dt.datetime.now(tz=dt.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z") - - -def safe_component_name(name: str) -> str: - return re.sub(r"[^A-Za-z0-9_.-]", "-", name.strip().lower()) - - -def clean_directory(path: Path) -> None: - if path.exists(): - shutil.rmtree(path) - path.mkdir(parents=True, exist_ok=True) - - -def run_python_analyzer_smoke() -> None: - script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh" - run(["bash", str(script)], cwd=REPO_ROOT) - - -def copy_if_exists(source: Path, target: Path) -> None: - if source.is_dir(): - shutil.copytree(source, target, dirs_exist_ok=True) - elif source.is_file(): - target.parent.mkdir(parents=True, exist_ok=True) - shutil.copy2(source, target) - - -def copy_release_manifests(release_dir: Path, staging_dir: Path) -> None: - manifest_dir = staging_dir / "manifest" - manifest_dir.mkdir(parents=True, exist_ok=True) - for name in ("release.yaml", "release.yaml.sha256", "release.json", "release.json.sha256"): - source = release_dir / name - if source.exists(): - shutil.copy2(source, manifest_dir / source.name) - - -def copy_component_artifacts( - manifest: Mapping[str, Any], - release_dir: Path, - staging_dir: Path, -) -> None: - components = manifest.get("components") or [] - for component in sorted(components, key=lambda entry: str(entry.get("name", ""))): - if not isinstance(component, Mapping): - continue - component_name = safe_component_name(str(component.get("name", "component"))) - for key, target_root in ARTIFACT_TARGETS.items(): - entry = component.get(key) - if not entry or not isinstance(entry, Mapping): - continue - path_str = entry.get("path") - if not path_str: - continue - resolved = resolve_path(str(path_str), release_dir) - if not resolved.exists(): - raise FileNotFoundError(f"Component '{component_name}' {key} artefact not found: {resolved}") - target_dir = staging_dir / target_root - target_dir.mkdir(parents=True, exist_ok=True) - target_name = f"{component_name}-{resolved.name}" if resolved.name else component_name - shutil.copy2(resolved, target_dir / target_name) - - -def copy_collections( - manifest: Mapping[str, Any], - release_dir: Path, - staging_dir: Path, -) -> None: - for collection, subdir in (("charts", Path("charts")), ("compose", Path("compose"))): - entries = manifest.get(collection) or [] - for entry in entries: - if not isinstance(entry, Mapping): - continue - path_str = entry.get("path") - if not path_str: - continue - resolved = resolve_path(str(path_str), release_dir) - if not resolved.exists(): - raise FileNotFoundError(f"{collection} artefact not found: {resolved}") - target_dir = staging_dir / subdir - target_dir.mkdir(parents=True, exist_ok=True) - shutil.copy2(resolved, target_dir / resolved.name) - - -def copy_debug_store(release_dir: Path, staging_dir: Path) -> None: - mirror_debug_store.main( - [ - "--release-dir", - str(release_dir), - "--offline-kit-dir", - str(staging_dir), - ] - ) - - -def copy_plugins_and_assets(staging_dir: Path) -> None: - copy_if_exists(REPO_ROOT / "plugins" / "scanner", staging_dir / "plugins" / "scanner") - copy_if_exists(REPO_ROOT / "certificates", staging_dir / "certificates") - copy_if_exists(REPO_ROOT / "seed-data", staging_dir / "seed-data") - docs_dir = staging_dir / "docs" - docs_dir.mkdir(parents=True, exist_ok=True) - copy_if_exists(REPO_ROOT / "docs" / "24_OFFLINE_KIT.md", docs_dir / "24_OFFLINE_KIT.md") - copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-collector.md", docs_dir / "telemetry-collector.md") - copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-storage.md", docs_dir / "telemetry-storage.md") - - -def package_telemetry_bundle(staging_dir: Path) -> None: - script = TELEMETRY_TOOLS_DIR / "package_offline_bundle.py" - if not script.exists(): - return - TELEMETRY_BUNDLE_PATH.parent.mkdir(parents=True, exist_ok=True) - run(["python", str(script), "--output", str(TELEMETRY_BUNDLE_PATH)], cwd=REPO_ROOT) - telemetry_dir = staging_dir / "telemetry" - telemetry_dir.mkdir(parents=True, exist_ok=True) - shutil.copy2(TELEMETRY_BUNDLE_PATH, telemetry_dir / TELEMETRY_BUNDLE_PATH.name) - sha_path = TELEMETRY_BUNDLE_PATH.with_suffix(TELEMETRY_BUNDLE_PATH.suffix + ".sha256") - if sha_path.exists(): - shutil.copy2(sha_path, telemetry_dir / sha_path.name) - - -def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[OrderedDict[str, Any]]: - entries: list[OrderedDict[str, Any]] = [] - exclude = exclude or set() - for path in sorted(staging_dir.rglob("*")): - if not path.is_file(): - continue - rel = path.relative_to(staging_dir).as_posix() - if rel in exclude: - continue - entries.append( - OrderedDict( - ( - ("name", rel), - ("sha256", compute_sha256(path)), - ("size", path.stat().st_size), - ) - ) - ) - return entries - - -def write_offline_manifest( - staging_dir: Path, - version: str, - channel: str, - release_manifest_sha: Optional[str], -) -> tuple[Path, str]: - manifest_dir = staging_dir / "manifest" - manifest_dir.mkdir(parents=True, exist_ok=True) - offline_manifest_path = manifest_dir / "offline-manifest.json" - files = scan_files(staging_dir, exclude={"manifest/offline-manifest.json", "manifest/offline-manifest.json.sha256"}) - manifest_data = OrderedDict( - ( - ( - "bundle", - OrderedDict( - ( - ("version", version), - ("channel", channel), - ("capturedAt", utc_now_iso()), - ("releaseManifestSha256", release_manifest_sha), - ) - ), - ), - ("artifacts", files), - ) - ) - with offline_manifest_path.open("w", encoding="utf-8") as handle: - json.dump(manifest_data, handle, indent=2) - handle.write("\n") - manifest_sha = compute_sha256(offline_manifest_path) - (offline_manifest_path.with_suffix(".json.sha256")).write_text( - f"{manifest_sha} {offline_manifest_path.name}\n", - encoding="utf-8", - ) - return offline_manifest_path, manifest_sha - - -def tarinfo_filter(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo: - tarinfo.uid = 0 - tarinfo.gid = 0 - tarinfo.uname = "" - tarinfo.gname = "" - tarinfo.mtime = 0 - return tarinfo - - -def create_tarball(staging_dir: Path, output_dir: Path, bundle_name: str) -> Path: - output_dir.mkdir(parents=True, exist_ok=True) - bundle_path = output_dir / f"{bundle_name}.tar.gz" - if bundle_path.exists(): - bundle_path.unlink() - with tarfile.open(bundle_path, "w:gz", compresslevel=9) as tar: - for path in sorted(staging_dir.rglob("*")): - if path.is_file(): - arcname = path.relative_to(staging_dir).as_posix() - tar.add(path, arcname=arcname, filter=tarinfo_filter) - return bundle_path - - -def sign_blob( - path: Path, - *, - key_ref: Optional[str], - identity_token: Optional[str], - password: Optional[str], - tlog_upload: bool, -) -> Optional[Path]: - if not key_ref and not identity_token: - return None - cmd = ["cosign", "sign-blob", "--yes", str(path)] - if key_ref: - cmd.extend(["--key", key_ref]) - if identity_token: - cmd.extend(["--identity-token", identity_token]) - if not tlog_upload: - cmd.append("--tlog-upload=false") - env = {"COSIGN_PASSWORD": password or ""} - signature = run(cmd, env=env) - sig_path = path.with_suffix(path.suffix + ".sig") - sig_path.write_text(signature, encoding="utf-8") - return sig_path - - -def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]: - release_dir = args.release_dir.resolve() - staging_dir = args.staging_dir.resolve() - output_dir = args.output_dir.resolve() - - verify_release(release_dir) - if not args.skip_smoke: - run_python_analyzer_smoke() - clean_directory(staging_dir) - copy_debug_store(release_dir, staging_dir) - - manifest_data = load_manifest(release_dir) - release_manifest_sha = None - checksums = manifest_data.get("checksums") - if isinstance(checksums, Mapping): - release_manifest_sha = checksums.get("sha256") - - copy_release_manifests(release_dir, staging_dir) - copy_component_artifacts(manifest_data, release_dir, staging_dir) - copy_collections(manifest_data, release_dir, staging_dir) - copy_plugins_and_assets(staging_dir) - package_telemetry_bundle(staging_dir) - - offline_manifest_path, offline_manifest_sha = write_offline_manifest( - staging_dir, - args.version, - args.channel, - release_manifest_sha, - ) - bundle_name = f"stella-ops-offline-kit-{args.version}-{args.channel}" - bundle_path = create_tarball(staging_dir, output_dir, bundle_name) - bundle_sha = compute_sha256(bundle_path) - bundle_sha_prefixed = f"sha256:{bundle_sha}" - (bundle_path.with_suffix(".tar.gz.sha256")).write_text( - f"{bundle_sha} {bundle_path.name}\n", - encoding="utf-8", - ) - - signature_paths: dict[str, str] = {} - sig = sign_blob( - bundle_path, - key_ref=args.cosign_key, - identity_token=args.cosign_identity_token, - password=args.cosign_password, - tlog_upload=not args.no_transparency, - ) - if sig: - signature_paths["bundleSignature"] = str(sig) - manifest_sig = sign_blob( - offline_manifest_path, - key_ref=args.cosign_key, - identity_token=args.cosign_identity_token, - password=args.cosign_password, - tlog_upload=not args.no_transparency, - ) - if manifest_sig: - signature_paths["manifestSignature"] = str(manifest_sig) - - metadata = OrderedDict( - ( - ("bundleId", args.bundle_id or f"{args.version}-{args.channel}-{utc_now_iso()}"), - ("bundleName", bundle_path.name), - ("bundleSha256", bundle_sha_prefixed), - ("bundleSize", bundle_path.stat().st_size), - ("manifestName", offline_manifest_path.name), - ("manifestSha256", f"sha256:{offline_manifest_sha}"), - ("manifestSize", offline_manifest_path.stat().st_size), - ("channel", args.channel), - ("version", args.version), - ("capturedAt", utc_now_iso()), - ) - ) - - if sig: - metadata["bundleSignatureName"] = Path(sig).name - if manifest_sig: - metadata["manifestSignatureName"] = Path(manifest_sig).name - - metadata_path = output_dir / f"{bundle_name}.metadata.json" - with metadata_path.open("w", encoding="utf-8") as handle: - json.dump(metadata, handle, indent=2) - handle.write("\n") - - return OrderedDict( - ( - ("bundlePath", str(bundle_path)), - ("bundleSha256", bundle_sha), - ("manifestPath", str(offline_manifest_path)), - ("metadataPath", str(metadata_path)), - ("signatures", signature_paths), - ) - ) - - -def parse_args(argv: Optional[list[str]] = None) -> argparse.Namespace: - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument("--version", required=True, help="Bundle version (e.g. 2025.10.0)") - parser.add_argument("--channel", default="edge", help="Release channel (default: %(default)s)") - parser.add_argument("--bundle-id", help="Optional explicit bundle identifier") - parser.add_argument( - "--release-dir", - type=Path, - default=DEFAULT_RELEASE_DIR, - help="Release artefact directory (default: %(default)s)", - ) - parser.add_argument( - "--staging-dir", - type=Path, - default=DEFAULT_STAGING_DIR, - help="Temporary staging directory (default: %(default)s)", - ) - parser.add_argument( - "--output-dir", - type=Path, - default=DEFAULT_OUTPUT_DIR, - help="Destination directory for packaged bundles (default: %(default)s)", - ) - parser.add_argument("--cosign-key", dest="cosign_key", help="Cosign key reference for signing") - parser.add_argument("--cosign-password", dest="cosign_password", help="Cosign key password (if applicable)") - parser.add_argument("--cosign-identity-token", dest="cosign_identity_token", help="Cosign identity token") - parser.add_argument("--no-transparency", action="store_true", help="Disable Rekor transparency log uploads") - parser.add_argument("--skip-smoke", action="store_true", help="Skip analyzer smoke execution (testing only)") - return parser.parse_args(argv) - - -def main(argv: Optional[list[str]] = None) -> int: - args = parse_args(argv) - try: - result = build_offline_kit(args) - except Exception as exc: # pylint: disable=broad-except - print(f"offline-kit packaging failed: {exc}", file=sys.stderr) - return 1 - print("✅ Offline kit packaged") - for key, value in result.items(): - if isinstance(value, dict): - for sub_key, sub_val in value.items(): - print(f" - {key}.{sub_key}: {sub_val}") - else: - print(f" - {key}: {value}") - return 0 - - -if __name__ == "__main__": - raise SystemExit(main()) +#!/usr/bin/env python3 +"""Package the StellaOps Offline Kit with deterministic artefacts and manifest.""" + +from __future__ import annotations + +import argparse +import datetime as dt +import hashlib +import json +import os +import re +import shutil +import subprocess +import sys +import tarfile +from collections import OrderedDict +from pathlib import Path +from typing import Any, Iterable, Mapping, MutableMapping, Optional + +REPO_ROOT = Path(__file__).resolve().parents[2] +RELEASE_TOOLS_DIR = REPO_ROOT / "ops" / "devops" / "release" +TELEMETRY_TOOLS_DIR = REPO_ROOT / "ops" / "devops" / "telemetry" +TELEMETRY_BUNDLE_PATH = REPO_ROOT / "out" / "telemetry" / "telemetry-offline-bundle.tar.gz" + +if str(RELEASE_TOOLS_DIR) not in sys.path: + sys.path.insert(0, str(RELEASE_TOOLS_DIR)) + +from verify_release import ( # type: ignore import-not-found + load_manifest, + resolve_path, + verify_release, +) + +import mirror_debug_store # type: ignore import-not-found + +DEFAULT_RELEASE_DIR = REPO_ROOT / "out" / "release" +DEFAULT_STAGING_DIR = REPO_ROOT / "out" / "offline-kit" / "staging" +DEFAULT_OUTPUT_DIR = REPO_ROOT / "out" / "offline-kit" / "dist" + +ARTIFACT_TARGETS = { + "sbom": Path("sboms"), + "provenance": Path("attest"), + "signature": Path("signatures"), + "metadata": Path("metadata/docker"), +} + + +class CommandError(RuntimeError): + """Raised when an external command fails.""" + + +def run(cmd: Iterable[str], *, cwd: Optional[Path] = None, env: Optional[Mapping[str, str]] = None) -> str: + process_env = dict(os.environ) + if env: + process_env.update(env) + result = subprocess.run( + list(cmd), + cwd=str(cwd) if cwd else None, + env=process_env, + check=False, + capture_output=True, + text=True, + ) + if result.returncode != 0: + raise CommandError( + f"Command failed ({result.returncode}): {' '.join(cmd)}\nSTDOUT:\n{result.stdout}\nSTDERR:\n{result.stderr}" + ) + return result.stdout + + +def compute_sha256(path: Path) -> str: + sha = hashlib.sha256() + with path.open("rb") as handle: + for chunk in iter(lambda: handle.read(1024 * 1024), b""): + sha.update(chunk) + return sha.hexdigest() + + +def utc_now_iso() -> str: + return dt.datetime.now(tz=dt.timezone.utc).replace(microsecond=0).isoformat().replace("+00:00", "Z") + + +def safe_component_name(name: str) -> str: + return re.sub(r"[^A-Za-z0-9_.-]", "-", name.strip().lower()) + + +def clean_directory(path: Path) -> None: + if path.exists(): + shutil.rmtree(path) + path.mkdir(parents=True, exist_ok=True) + + +def run_python_analyzer_smoke() -> None: + script = REPO_ROOT / "ops" / "offline-kit" / "run-python-analyzer-smoke.sh" + run(["bash", str(script)], cwd=REPO_ROOT) + + +def copy_if_exists(source: Path, target: Path) -> None: + if source.is_dir(): + shutil.copytree(source, target, dirs_exist_ok=True) + elif source.is_file(): + target.parent.mkdir(parents=True, exist_ok=True) + shutil.copy2(source, target) + + +def copy_release_manifests(release_dir: Path, staging_dir: Path) -> None: + manifest_dir = staging_dir / "manifest" + manifest_dir.mkdir(parents=True, exist_ok=True) + for name in ("release.yaml", "release.yaml.sha256", "release.json", "release.json.sha256"): + source = release_dir / name + if source.exists(): + shutil.copy2(source, manifest_dir / source.name) + + +def copy_component_artifacts( + manifest: Mapping[str, Any], + release_dir: Path, + staging_dir: Path, +) -> None: + components = manifest.get("components") or [] + for component in sorted(components, key=lambda entry: str(entry.get("name", ""))): + if not isinstance(component, Mapping): + continue + component_name = safe_component_name(str(component.get("name", "component"))) + for key, target_root in ARTIFACT_TARGETS.items(): + entry = component.get(key) + if not entry or not isinstance(entry, Mapping): + continue + path_str = entry.get("path") + if not path_str: + continue + resolved = resolve_path(str(path_str), release_dir) + if not resolved.exists(): + raise FileNotFoundError(f"Component '{component_name}' {key} artefact not found: {resolved}") + target_dir = staging_dir / target_root + target_dir.mkdir(parents=True, exist_ok=True) + target_name = f"{component_name}-{resolved.name}" if resolved.name else component_name + shutil.copy2(resolved, target_dir / target_name) + + +def copy_collections( + manifest: Mapping[str, Any], + release_dir: Path, + staging_dir: Path, +) -> None: + for collection, subdir in (("charts", Path("charts")), ("compose", Path("compose"))): + entries = manifest.get(collection) or [] + for entry in entries: + if not isinstance(entry, Mapping): + continue + path_str = entry.get("path") + if not path_str: + continue + resolved = resolve_path(str(path_str), release_dir) + if not resolved.exists(): + raise FileNotFoundError(f"{collection} artefact not found: {resolved}") + target_dir = staging_dir / subdir + target_dir.mkdir(parents=True, exist_ok=True) + shutil.copy2(resolved, target_dir / resolved.name) + + +def copy_debug_store(release_dir: Path, staging_dir: Path) -> None: + mirror_debug_store.main( + [ + "--release-dir", + str(release_dir), + "--offline-kit-dir", + str(staging_dir), + ] + ) + + +def copy_plugins_and_assets(staging_dir: Path) -> None: + copy_if_exists(REPO_ROOT / "plugins" / "scanner", staging_dir / "plugins" / "scanner") + copy_if_exists(REPO_ROOT / "certificates", staging_dir / "certificates") + copy_if_exists(REPO_ROOT / "seed-data", staging_dir / "seed-data") + docs_dir = staging_dir / "docs" + docs_dir.mkdir(parents=True, exist_ok=True) + copy_if_exists(REPO_ROOT / "docs" / "24_OFFLINE_KIT.md", docs_dir / "24_OFFLINE_KIT.md") + copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-collector.md", docs_dir / "telemetry-collector.md") + copy_if_exists(REPO_ROOT / "docs" / "ops" / "telemetry-storage.md", docs_dir / "telemetry-storage.md") + + +def package_telemetry_bundle(staging_dir: Path) -> None: + script = TELEMETRY_TOOLS_DIR / "package_offline_bundle.py" + if not script.exists(): + return + TELEMETRY_BUNDLE_PATH.parent.mkdir(parents=True, exist_ok=True) + run(["python", str(script), "--output", str(TELEMETRY_BUNDLE_PATH)], cwd=REPO_ROOT) + telemetry_dir = staging_dir / "telemetry" + telemetry_dir.mkdir(parents=True, exist_ok=True) + shutil.copy2(TELEMETRY_BUNDLE_PATH, telemetry_dir / TELEMETRY_BUNDLE_PATH.name) + sha_path = TELEMETRY_BUNDLE_PATH.with_suffix(TELEMETRY_BUNDLE_PATH.suffix + ".sha256") + if sha_path.exists(): + shutil.copy2(sha_path, telemetry_dir / sha_path.name) + + +def scan_files(staging_dir: Path, exclude: Optional[set[str]] = None) -> list[OrderedDict[str, Any]]: + entries: list[OrderedDict[str, Any]] = [] + exclude = exclude or set() + for path in sorted(staging_dir.rglob("*")): + if not path.is_file(): + continue + rel = path.relative_to(staging_dir).as_posix() + if rel in exclude: + continue + entries.append( + OrderedDict( + ( + ("name", rel), + ("sha256", compute_sha256(path)), + ("size", path.stat().st_size), + ) + ) + ) + return entries + + +def write_offline_manifest( + staging_dir: Path, + version: str, + channel: str, + release_manifest_sha: Optional[str], +) -> tuple[Path, str]: + manifest_dir = staging_dir / "manifest" + manifest_dir.mkdir(parents=True, exist_ok=True) + offline_manifest_path = manifest_dir / "offline-manifest.json" + files = scan_files(staging_dir, exclude={"manifest/offline-manifest.json", "manifest/offline-manifest.json.sha256"}) + manifest_data = OrderedDict( + ( + ( + "bundle", + OrderedDict( + ( + ("version", version), + ("channel", channel), + ("capturedAt", utc_now_iso()), + ("releaseManifestSha256", release_manifest_sha), + ) + ), + ), + ("artifacts", files), + ) + ) + with offline_manifest_path.open("w", encoding="utf-8") as handle: + json.dump(manifest_data, handle, indent=2) + handle.write("\n") + manifest_sha = compute_sha256(offline_manifest_path) + (offline_manifest_path.with_suffix(".json.sha256")).write_text( + f"{manifest_sha} {offline_manifest_path.name}\n", + encoding="utf-8", + ) + return offline_manifest_path, manifest_sha + + +def tarinfo_filter(tarinfo: tarfile.TarInfo) -> tarfile.TarInfo: + tarinfo.uid = 0 + tarinfo.gid = 0 + tarinfo.uname = "" + tarinfo.gname = "" + tarinfo.mtime = 0 + return tarinfo + + +def create_tarball(staging_dir: Path, output_dir: Path, bundle_name: str) -> Path: + output_dir.mkdir(parents=True, exist_ok=True) + bundle_path = output_dir / f"{bundle_name}.tar.gz" + if bundle_path.exists(): + bundle_path.unlink() + with tarfile.open(bundle_path, "w:gz", compresslevel=9) as tar: + for path in sorted(staging_dir.rglob("*")): + if path.is_file(): + arcname = path.relative_to(staging_dir).as_posix() + tar.add(path, arcname=arcname, filter=tarinfo_filter) + return bundle_path + + +def sign_blob( + path: Path, + *, + key_ref: Optional[str], + identity_token: Optional[str], + password: Optional[str], + tlog_upload: bool, +) -> Optional[Path]: + if not key_ref and not identity_token: + return None + cmd = ["cosign", "sign-blob", "--yes", str(path)] + if key_ref: + cmd.extend(["--key", key_ref]) + if identity_token: + cmd.extend(["--identity-token", identity_token]) + if not tlog_upload: + cmd.append("--tlog-upload=false") + env = {"COSIGN_PASSWORD": password or ""} + signature = run(cmd, env=env) + sig_path = path.with_suffix(path.suffix + ".sig") + sig_path.write_text(signature, encoding="utf-8") + return sig_path + + +def build_offline_kit(args: argparse.Namespace) -> MutableMapping[str, Any]: + release_dir = args.release_dir.resolve() + staging_dir = args.staging_dir.resolve() + output_dir = args.output_dir.resolve() + + verify_release(release_dir) + if not args.skip_smoke: + run_python_analyzer_smoke() + clean_directory(staging_dir) + copy_debug_store(release_dir, staging_dir) + + manifest_data = load_manifest(release_dir) + release_manifest_sha = None + checksums = manifest_data.get("checksums") + if isinstance(checksums, Mapping): + release_manifest_sha = checksums.get("sha256") + + copy_release_manifests(release_dir, staging_dir) + copy_component_artifacts(manifest_data, release_dir, staging_dir) + copy_collections(manifest_data, release_dir, staging_dir) + copy_plugins_and_assets(staging_dir) + package_telemetry_bundle(staging_dir) + + offline_manifest_path, offline_manifest_sha = write_offline_manifest( + staging_dir, + args.version, + args.channel, + release_manifest_sha, + ) + bundle_name = f"stella-ops-offline-kit-{args.version}-{args.channel}" + bundle_path = create_tarball(staging_dir, output_dir, bundle_name) + bundle_sha = compute_sha256(bundle_path) + bundle_sha_prefixed = f"sha256:{bundle_sha}" + (bundle_path.with_suffix(".tar.gz.sha256")).write_text( + f"{bundle_sha} {bundle_path.name}\n", + encoding="utf-8", + ) + + signature_paths: dict[str, str] = {} + sig = sign_blob( + bundle_path, + key_ref=args.cosign_key, + identity_token=args.cosign_identity_token, + password=args.cosign_password, + tlog_upload=not args.no_transparency, + ) + if sig: + signature_paths["bundleSignature"] = str(sig) + manifest_sig = sign_blob( + offline_manifest_path, + key_ref=args.cosign_key, + identity_token=args.cosign_identity_token, + password=args.cosign_password, + tlog_upload=not args.no_transparency, + ) + if manifest_sig: + signature_paths["manifestSignature"] = str(manifest_sig) + + metadata = OrderedDict( + ( + ("bundleId", args.bundle_id or f"{args.version}-{args.channel}-{utc_now_iso()}"), + ("bundleName", bundle_path.name), + ("bundleSha256", bundle_sha_prefixed), + ("bundleSize", bundle_path.stat().st_size), + ("manifestName", offline_manifest_path.name), + ("manifestSha256", f"sha256:{offline_manifest_sha}"), + ("manifestSize", offline_manifest_path.stat().st_size), + ("channel", args.channel), + ("version", args.version), + ("capturedAt", utc_now_iso()), + ) + ) + + if sig: + metadata["bundleSignatureName"] = Path(sig).name + if manifest_sig: + metadata["manifestSignatureName"] = Path(manifest_sig).name + + metadata_path = output_dir / f"{bundle_name}.metadata.json" + with metadata_path.open("w", encoding="utf-8") as handle: + json.dump(metadata, handle, indent=2) + handle.write("\n") + + return OrderedDict( + ( + ("bundlePath", str(bundle_path)), + ("bundleSha256", bundle_sha), + ("manifestPath", str(offline_manifest_path)), + ("metadataPath", str(metadata_path)), + ("signatures", signature_paths), + ) + ) + + +def parse_args(argv: Optional[list[str]] = None) -> argparse.Namespace: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument("--version", required=True, help="Bundle version (e.g. 2025.10.0)") + parser.add_argument("--channel", default="edge", help="Release channel (default: %(default)s)") + parser.add_argument("--bundle-id", help="Optional explicit bundle identifier") + parser.add_argument( + "--release-dir", + type=Path, + default=DEFAULT_RELEASE_DIR, + help="Release artefact directory (default: %(default)s)", + ) + parser.add_argument( + "--staging-dir", + type=Path, + default=DEFAULT_STAGING_DIR, + help="Temporary staging directory (default: %(default)s)", + ) + parser.add_argument( + "--output-dir", + type=Path, + default=DEFAULT_OUTPUT_DIR, + help="Destination directory for packaged bundles (default: %(default)s)", + ) + parser.add_argument("--cosign-key", dest="cosign_key", help="Cosign key reference for signing") + parser.add_argument("--cosign-password", dest="cosign_password", help="Cosign key password (if applicable)") + parser.add_argument("--cosign-identity-token", dest="cosign_identity_token", help="Cosign identity token") + parser.add_argument("--no-transparency", action="store_true", help="Disable Rekor transparency log uploads") + parser.add_argument("--skip-smoke", action="store_true", help="Skip analyzer smoke execution (testing only)") + return parser.parse_args(argv) + + +def main(argv: Optional[list[str]] = None) -> int: + args = parse_args(argv) + try: + result = build_offline_kit(args) + except Exception as exc: # pylint: disable=broad-except + print(f"offline-kit packaging failed: {exc}", file=sys.stderr) + return 1 + print("✅ Offline kit packaged") + for key, value in result.items(): + if isinstance(value, dict): + for sub_key, sub_val in value.items(): + print(f" - {key}.{sub_key}: {sub_val}") + else: + print(f" - {key}: {value}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/ops/offline-kit/mirror_debug_store.py b/ops/offline-kit/mirror_debug_store.py index 334e40d9..c32f3a69 100644 --- a/ops/offline-kit/mirror_debug_store.py +++ b/ops/offline-kit/mirror_debug_store.py @@ -1,221 +1,221 @@ -#!/usr/bin/env python3 -"""Mirror release debug-store artefacts into the Offline Kit staging tree. - -This helper copies the release `debug/` directory (including `.build-id/`, -`debug-manifest.json`, and the `.sha256` companion) into the Offline Kit -output directory and verifies the manifest hashes after the copy. A summary -document is written under `metadata/debug-store.json` so packaging jobs can -surface the available build-ids and validation status. -""" - -from __future__ import annotations - -import argparse -import datetime as dt -import json -import pathlib -import shutil -import sys -from typing import Iterable, Tuple - -REPO_ROOT = pathlib.Path(__file__).resolve().parents[2] - - -def compute_sha256(path: pathlib.Path) -> str: - import hashlib - - sha = hashlib.sha256() - with path.open("rb") as handle: - for chunk in iter(lambda: handle.read(1024 * 1024), b""): - sha.update(chunk) - return sha.hexdigest() - - -def load_manifest(manifest_path: pathlib.Path) -> dict: - with manifest_path.open("r", encoding="utf-8") as handle: - return json.load(handle) - - -def parse_manifest_sha(sha_path: pathlib.Path) -> str | None: - if not sha_path.exists(): - return None - text = sha_path.read_text(encoding="utf-8").strip() - if not text: - return None - # Allow either "" or " filename" formats. - return text.split()[0] - - -def iter_debug_files(base_dir: pathlib.Path) -> Iterable[pathlib.Path]: - for path in base_dir.rglob("*"): - if path.is_file(): - yield path - - -def copy_debug_store(source_root: pathlib.Path, target_root: pathlib.Path, *, dry_run: bool) -> None: - if dry_run: - print(f"[dry-run] Would copy '{source_root}' -> '{target_root}'") - return - - if target_root.exists(): - shutil.rmtree(target_root) - shutil.copytree(source_root, target_root) - - -def verify_debug_store(manifest: dict, offline_root: pathlib.Path) -> Tuple[int, int]: - """Return (verified_count, total_entries).""" - - artifacts = manifest.get("artifacts", []) - verified = 0 - for entry in artifacts: - debug_path = entry.get("debugPath") - expected_sha = entry.get("sha256") - expected_size = entry.get("size") - - if not debug_path or not expected_sha: - continue - - relative = pathlib.PurePosixPath(debug_path) - resolved = (offline_root.parent / relative).resolve() - - if not resolved.exists(): - raise FileNotFoundError(f"Debug artefact missing after mirror: {relative}") - - actual_sha = compute_sha256(resolved) - if actual_sha != expected_sha: - raise ValueError( - f"Digest mismatch for {relative}: expected {expected_sha}, found {actual_sha}" - ) - - if expected_size is not None: - actual_size = resolved.stat().st_size - if actual_size != expected_size: - raise ValueError( - f"Size mismatch for {relative}: expected {expected_size}, found {actual_size}" - ) - - verified += 1 - - return verified, len(artifacts) - - -def summarize_store(manifest: dict, manifest_sha: str | None, offline_root: pathlib.Path, summary_path: pathlib.Path) -> None: - debug_files = [ - path - for path in iter_debug_files(offline_root) - if path.suffix == ".debug" - ] - - total_size = sum(path.stat().st_size for path in debug_files) - build_ids = sorted( - {entry.get("buildId") for entry in manifest.get("artifacts", []) if entry.get("buildId")} - ) - - summary = { - "generatedAt": dt.datetime.now(tz=dt.timezone.utc) - .replace(microsecond=0) - .isoformat() - .replace("+00:00", "Z"), - "manifestGeneratedAt": manifest.get("generatedAt"), - "manifestSha256": manifest_sha, - "platforms": manifest.get("platforms") - or sorted({entry.get("platform") for entry in manifest.get("artifacts", []) if entry.get("platform")}), - "artifactCount": len(manifest.get("artifacts", [])), - "buildIds": { - "total": len(build_ids), - "samples": build_ids[:10], - }, - "debugFiles": { - "count": len(debug_files), - "totalSizeBytes": total_size, - }, - } - - summary_path.parent.mkdir(parents=True, exist_ok=True) - with summary_path.open("w", encoding="utf-8") as handle: - json.dump(summary, handle, indent=2) - handle.write("\n") - - -def resolve_release_debug_dir(base: pathlib.Path) -> pathlib.Path: - debug_dir = base / "debug" - if debug_dir.exists(): - return debug_dir - - # Allow specifying the channel directory directly (e.g. out/release/stable) - if base.name == "debug": - return base - - raise FileNotFoundError(f"Debug directory not found under '{base}'") - - -def parse_args(argv: list[str] | None = None) -> argparse.Namespace: - parser = argparse.ArgumentParser(description=__doc__) - parser.add_argument( - "--release-dir", - type=pathlib.Path, - default=REPO_ROOT / "out" / "release", - help="Release output directory containing the debug store (default: %(default)s)", - ) - parser.add_argument( - "--offline-kit-dir", - type=pathlib.Path, - default=REPO_ROOT / "out" / "offline-kit", - help="Offline Kit staging directory (default: %(default)s)", - ) - parser.add_argument( - "--verify-only", - action="store_true", - help="Skip copying and only verify the existing offline kit debug store", - ) - parser.add_argument( - "--dry-run", - action="store_true", - help="Print actions without copying files", - ) - return parser.parse_args(argv) - - -def main(argv: list[str] | None = None) -> int: - args = parse_args(argv) - - try: - source_debug = resolve_release_debug_dir(args.release_dir.resolve()) - except FileNotFoundError as exc: - print(f"error: {exc}", file=sys.stderr) - return 2 - - target_root = (args.offline_kit_dir / "debug").resolve() - - if not args.verify_only: - copy_debug_store(source_debug, target_root, dry_run=args.dry_run) - if args.dry_run: - return 0 - - manifest_path = target_root / "debug-manifest.json" - if not manifest_path.exists(): - print(f"error: offline kit manifest missing at {manifest_path}", file=sys.stderr) - return 3 - - manifest = load_manifest(manifest_path) - manifest_sha_path = manifest_path.with_suffix(manifest_path.suffix + ".sha256") - recorded_sha = parse_manifest_sha(manifest_sha_path) - recomputed_sha = compute_sha256(manifest_path) - if recorded_sha and recorded_sha != recomputed_sha: - print( - f"warning: manifest SHA mismatch (recorded {recorded_sha}, recomputed {recomputed_sha}); updating checksum", - file=sys.stderr, - ) - manifest_sha_path.write_text(f"{recomputed_sha} {manifest_path.name}\n", encoding="utf-8") - - verified, total = verify_debug_store(manifest, target_root) - print(f"✔ verified {verified}/{total} debug artefacts (manifest SHA {recomputed_sha})") - - summary_path = args.offline_kit_dir / "metadata" / "debug-store.json" - summarize_store(manifest, recomputed_sha, target_root, summary_path) - print(f"ℹ summary written to {summary_path}") - return 0 - - -if __name__ == "__main__": - raise SystemExit(main()) +#!/usr/bin/env python3 +"""Mirror release debug-store artefacts into the Offline Kit staging tree. + +This helper copies the release `debug/` directory (including `.build-id/`, +`debug-manifest.json`, and the `.sha256` companion) into the Offline Kit +output directory and verifies the manifest hashes after the copy. A summary +document is written under `metadata/debug-store.json` so packaging jobs can +surface the available build-ids and validation status. +""" + +from __future__ import annotations + +import argparse +import datetime as dt +import json +import pathlib +import shutil +import sys +from typing import Iterable, Tuple + +REPO_ROOT = pathlib.Path(__file__).resolve().parents[2] + + +def compute_sha256(path: pathlib.Path) -> str: + import hashlib + + sha = hashlib.sha256() + with path.open("rb") as handle: + for chunk in iter(lambda: handle.read(1024 * 1024), b""): + sha.update(chunk) + return sha.hexdigest() + + +def load_manifest(manifest_path: pathlib.Path) -> dict: + with manifest_path.open("r", encoding="utf-8") as handle: + return json.load(handle) + + +def parse_manifest_sha(sha_path: pathlib.Path) -> str | None: + if not sha_path.exists(): + return None + text = sha_path.read_text(encoding="utf-8").strip() + if not text: + return None + # Allow either "" or " filename" formats. + return text.split()[0] + + +def iter_debug_files(base_dir: pathlib.Path) -> Iterable[pathlib.Path]: + for path in base_dir.rglob("*"): + if path.is_file(): + yield path + + +def copy_debug_store(source_root: pathlib.Path, target_root: pathlib.Path, *, dry_run: bool) -> None: + if dry_run: + print(f"[dry-run] Would copy '{source_root}' -> '{target_root}'") + return + + if target_root.exists(): + shutil.rmtree(target_root) + shutil.copytree(source_root, target_root) + + +def verify_debug_store(manifest: dict, offline_root: pathlib.Path) -> Tuple[int, int]: + """Return (verified_count, total_entries).""" + + artifacts = manifest.get("artifacts", []) + verified = 0 + for entry in artifacts: + debug_path = entry.get("debugPath") + expected_sha = entry.get("sha256") + expected_size = entry.get("size") + + if not debug_path or not expected_sha: + continue + + relative = pathlib.PurePosixPath(debug_path) + resolved = (offline_root.parent / relative).resolve() + + if not resolved.exists(): + raise FileNotFoundError(f"Debug artefact missing after mirror: {relative}") + + actual_sha = compute_sha256(resolved) + if actual_sha != expected_sha: + raise ValueError( + f"Digest mismatch for {relative}: expected {expected_sha}, found {actual_sha}" + ) + + if expected_size is not None: + actual_size = resolved.stat().st_size + if actual_size != expected_size: + raise ValueError( + f"Size mismatch for {relative}: expected {expected_size}, found {actual_size}" + ) + + verified += 1 + + return verified, len(artifacts) + + +def summarize_store(manifest: dict, manifest_sha: str | None, offline_root: pathlib.Path, summary_path: pathlib.Path) -> None: + debug_files = [ + path + for path in iter_debug_files(offline_root) + if path.suffix == ".debug" + ] + + total_size = sum(path.stat().st_size for path in debug_files) + build_ids = sorted( + {entry.get("buildId") for entry in manifest.get("artifacts", []) if entry.get("buildId")} + ) + + summary = { + "generatedAt": dt.datetime.now(tz=dt.timezone.utc) + .replace(microsecond=0) + .isoformat() + .replace("+00:00", "Z"), + "manifestGeneratedAt": manifest.get("generatedAt"), + "manifestSha256": manifest_sha, + "platforms": manifest.get("platforms") + or sorted({entry.get("platform") for entry in manifest.get("artifacts", []) if entry.get("platform")}), + "artifactCount": len(manifest.get("artifacts", [])), + "buildIds": { + "total": len(build_ids), + "samples": build_ids[:10], + }, + "debugFiles": { + "count": len(debug_files), + "totalSizeBytes": total_size, + }, + } + + summary_path.parent.mkdir(parents=True, exist_ok=True) + with summary_path.open("w", encoding="utf-8") as handle: + json.dump(summary, handle, indent=2) + handle.write("\n") + + +def resolve_release_debug_dir(base: pathlib.Path) -> pathlib.Path: + debug_dir = base / "debug" + if debug_dir.exists(): + return debug_dir + + # Allow specifying the channel directory directly (e.g. out/release/stable) + if base.name == "debug": + return base + + raise FileNotFoundError(f"Debug directory not found under '{base}'") + + +def parse_args(argv: list[str] | None = None) -> argparse.Namespace: + parser = argparse.ArgumentParser(description=__doc__) + parser.add_argument( + "--release-dir", + type=pathlib.Path, + default=REPO_ROOT / "out" / "release", + help="Release output directory containing the debug store (default: %(default)s)", + ) + parser.add_argument( + "--offline-kit-dir", + type=pathlib.Path, + default=REPO_ROOT / "out" / "offline-kit", + help="Offline Kit staging directory (default: %(default)s)", + ) + parser.add_argument( + "--verify-only", + action="store_true", + help="Skip copying and only verify the existing offline kit debug store", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="Print actions without copying files", + ) + return parser.parse_args(argv) + + +def main(argv: list[str] | None = None) -> int: + args = parse_args(argv) + + try: + source_debug = resolve_release_debug_dir(args.release_dir.resolve()) + except FileNotFoundError as exc: + print(f"error: {exc}", file=sys.stderr) + return 2 + + target_root = (args.offline_kit_dir / "debug").resolve() + + if not args.verify_only: + copy_debug_store(source_debug, target_root, dry_run=args.dry_run) + if args.dry_run: + return 0 + + manifest_path = target_root / "debug-manifest.json" + if not manifest_path.exists(): + print(f"error: offline kit manifest missing at {manifest_path}", file=sys.stderr) + return 3 + + manifest = load_manifest(manifest_path) + manifest_sha_path = manifest_path.with_suffix(manifest_path.suffix + ".sha256") + recorded_sha = parse_manifest_sha(manifest_sha_path) + recomputed_sha = compute_sha256(manifest_path) + if recorded_sha and recorded_sha != recomputed_sha: + print( + f"warning: manifest SHA mismatch (recorded {recorded_sha}, recomputed {recomputed_sha}); updating checksum", + file=sys.stderr, + ) + manifest_sha_path.write_text(f"{recomputed_sha} {manifest_path.name}\n", encoding="utf-8") + + verified, total = verify_debug_store(manifest, target_root) + print(f"✔ verified {verified}/{total} debug artefacts (manifest SHA {recomputed_sha})") + + summary_path = args.offline_kit_dir / "metadata" / "debug-store.json" + summarize_store(manifest, recomputed_sha, target_root, summary_path) + print(f"ℹ summary written to {summary_path}") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/ops/offline-kit/run-python-analyzer-smoke.sh b/ops/offline-kit/run-python-analyzer-smoke.sh index b4164c93..535b92e9 100644 --- a/ops/offline-kit/run-python-analyzer-smoke.sh +++ b/ops/offline-kit/run-python-analyzer-smoke.sh @@ -2,7 +2,7 @@ set -euo pipefail repo_root="$(git -C "${BASH_SOURCE%/*}/.." rev-parse --show-toplevel 2>/dev/null || pwd)" -project_path="${repo_root}/src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj" +project_path="${repo_root}/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj" output_dir="${repo_root}/out/analyzers/python" plugin_dir="${repo_root}/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python" diff --git a/ops/offline-kit/test_build_offline_kit.py b/ops/offline-kit/test_build_offline_kit.py index 298737c0..9b420da5 100644 --- a/ops/offline-kit/test_build_offline_kit.py +++ b/ops/offline-kit/test_build_offline_kit.py @@ -1,256 +1,256 @@ -from __future__ import annotations - -import json -import tarfile -import tempfile -import unittest -import argparse -import sys -from collections import OrderedDict -from pathlib import Path - -sys.path.append(str(Path(__file__).resolve().parent)) - -from build_release import write_manifest # type: ignore import-not-found - -from build_offline_kit import build_offline_kit, compute_sha256 # type: ignore import-not-found - - -class OfflineKitBuilderTests(unittest.TestCase): - def setUp(self) -> None: - self._temp = tempfile.TemporaryDirectory() - self.base_path = Path(self._temp.name) - self.out_dir = self.base_path / "out" - self.release_dir = self.out_dir / "release" - self.staging_dir = self.base_path / "staging" - self.output_dir = self.base_path / "dist" - self._create_sample_release() - - def tearDown(self) -> None: - self._temp.cleanup() - - def _relative_to_out(self, path: Path) -> str: - return path.relative_to(self.out_dir).as_posix() - - def _write_json(self, path: Path, payload: dict[str, object]) -> None: - path.parent.mkdir(parents=True, exist_ok=True) - with path.open("w", encoding="utf-8") as handle: - json.dump(payload, handle, indent=2) - handle.write("\n") - - def _create_sample_release(self) -> None: - self.release_dir.mkdir(parents=True, exist_ok=True) - - sbom_path = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" - sbom_path.parent.mkdir(parents=True, exist_ok=True) - sbom_path.write_text('{"bomFormat":"CycloneDX","specVersion":"1.5"}\n', encoding="utf-8") - sbom_sha = compute_sha256(sbom_path) - - provenance_path = self.release_dir / "artifacts/provenance/sample.provenance.json" - self._write_json( - provenance_path, - { - "buildDefinition": {"buildType": "https://example/build"}, - "runDetails": {"builder": {"id": "https://example/ci"}}, - }, - ) - provenance_sha = compute_sha256(provenance_path) - - signature_path = self.release_dir / "artifacts/signatures/sample.signature" - signature_path.parent.mkdir(parents=True, exist_ok=True) - signature_path.write_text("signature-data\n", encoding="utf-8") - signature_sha = compute_sha256(signature_path) - - metadata_path = self.release_dir / "artifacts/metadata/sample.metadata.json" - self._write_json(metadata_path, {"digest": "sha256:1234"}) - metadata_sha = compute_sha256(metadata_path) - - chart_path = self.release_dir / "helm/stellaops-1.0.0.tgz" - chart_path.parent.mkdir(parents=True, exist_ok=True) - chart_path.write_bytes(b"helm-chart-data") - chart_sha = compute_sha256(chart_path) - - compose_path = self.release_dir.parent / "deploy/compose/docker-compose.dev.yaml" - compose_path.parent.mkdir(parents=True, exist_ok=True) - compose_path.write_text("services: {}\n", encoding="utf-8") - compose_sha = compute_sha256(compose_path) - - debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" - debug_file.parent.mkdir(parents=True, exist_ok=True) - debug_file.write_bytes(b"\x7fELFDEBUGDATA") - debug_sha = compute_sha256(debug_file) - - debug_manifest_path = self.release_dir / "debug/debug-manifest.json" - debug_manifest = OrderedDict( - ( - ("generatedAt", "2025-10-26T00:00:00Z"), - ("version", "1.0.0"), - ("channel", "edge"), - ( - "artifacts", - [ - OrderedDict( - ( - ("buildId", "abcdef1234"), - ("platform", "linux/amd64"), - ("debugPath", "debug/.build-id/ab/cdef.debug"), - ("sha256", debug_sha), - ("size", debug_file.stat().st_size), - ("components", ["sample"]), - ("images", ["registry.example/sample@sha256:feedface"]), - ("sources", ["app/sample.dll"]), - ) - ) - ], - ), - ) - ) - self._write_json(debug_manifest_path, debug_manifest) - debug_manifest_sha = compute_sha256(debug_manifest_path) - (debug_manifest_path.with_suffix(debug_manifest_path.suffix + ".sha256")).write_text( - f"{debug_manifest_sha} {debug_manifest_path.name}\n", - encoding="utf-8", - ) - - manifest = OrderedDict( - ( - ( - "release", - OrderedDict( - ( - ("version", "1.0.0"), - ("channel", "edge"), - ("date", "2025-10-26T00:00:00Z"), - ("calendar", "2025.10"), - ) - ), - ), - ( - "components", - [ - OrderedDict( - ( - ("name", "sample"), - ("image", "registry.example/sample@sha256:feedface"), - ("tags", ["registry.example/sample:1.0.0"]), - ( - "sbom", - OrderedDict( - ( - ("path", self._relative_to_out(sbom_path)), - ("sha256", sbom_sha), - ) - ), - ), - ( - "provenance", - OrderedDict( - ( - ("path", self._relative_to_out(provenance_path)), - ("sha256", provenance_sha), - ) - ), - ), - ( - "signature", - OrderedDict( - ( - ("path", self._relative_to_out(signature_path)), - ("sha256", signature_sha), - ("ref", "sigstore://example"), - ("tlogUploaded", True), - ) - ), - ), - ( - "metadata", - OrderedDict( - ( - ("path", self._relative_to_out(metadata_path)), - ("sha256", metadata_sha), - ) - ), - ), - ) - ) - ], - ), - ( - "charts", - [ - OrderedDict( - ( - ("name", "stellaops"), - ("version", "1.0.0"), - ("path", self._relative_to_out(chart_path)), - ("sha256", chart_sha), - ) - ) - ], - ), - ( - "compose", - [ - OrderedDict( - ( - ("name", "docker-compose.dev.yaml"), - ("path", compose_path.relative_to(self.out_dir).as_posix()), - ("sha256", compose_sha), - ) - ) - ], - ), - ( - "debugStore", - OrderedDict( - ( - ("manifest", "debug/debug-manifest.json"), - ("sha256", debug_manifest_sha), - ("entries", 1), - ("platforms", ["linux/amd64"]), - ("directory", "debug/.build-id"), - ) - ), - ), - ) - ) - write_manifest(manifest, self.release_dir) - - def test_build_offline_kit(self) -> None: - args = argparse.Namespace( - version="2025.10.0", - channel="edge", - bundle_id="bundle-001", - release_dir=self.release_dir, - staging_dir=self.staging_dir, - output_dir=self.output_dir, - cosign_key=None, - cosign_password=None, - cosign_identity_token=None, - no_transparency=False, - skip_smoke=True, - ) - result = build_offline_kit(args) - bundle_path = Path(result["bundlePath"]) - self.assertTrue(bundle_path.exists()) - offline_manifest = self.output_dir.parent / "staging" / "manifest" / "offline-manifest.json" - self.assertTrue(offline_manifest.exists()) - - with offline_manifest.open("r", encoding="utf-8") as handle: - manifest_data = json.load(handle) - artifacts = manifest_data["artifacts"] - self.assertTrue(any(item["name"].startswith("sboms/") for item in artifacts)) - - metadata_path = Path(result["metadataPath"]) - data = json.loads(metadata_path.read_text(encoding="utf-8")) - self.assertTrue(data["bundleSha256"].startswith("sha256:")) - self.assertTrue(data["manifestSha256"].startswith("sha256:")) - - with tarfile.open(bundle_path, "r:gz") as tar: - members = tar.getnames() - self.assertIn("manifest/release.yaml", members) - self.assertTrue(any(name.startswith("sboms/sample-") for name in members)) - - -if __name__ == "__main__": - unittest.main() +from __future__ import annotations + +import json +import tarfile +import tempfile +import unittest +import argparse +import sys +from collections import OrderedDict +from pathlib import Path + +sys.path.append(str(Path(__file__).resolve().parent)) + +from build_release import write_manifest # type: ignore import-not-found + +from build_offline_kit import build_offline_kit, compute_sha256 # type: ignore import-not-found + + +class OfflineKitBuilderTests(unittest.TestCase): + def setUp(self) -> None: + self._temp = tempfile.TemporaryDirectory() + self.base_path = Path(self._temp.name) + self.out_dir = self.base_path / "out" + self.release_dir = self.out_dir / "release" + self.staging_dir = self.base_path / "staging" + self.output_dir = self.base_path / "dist" + self._create_sample_release() + + def tearDown(self) -> None: + self._temp.cleanup() + + def _relative_to_out(self, path: Path) -> str: + return path.relative_to(self.out_dir).as_posix() + + def _write_json(self, path: Path, payload: dict[str, object]) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + with path.open("w", encoding="utf-8") as handle: + json.dump(payload, handle, indent=2) + handle.write("\n") + + def _create_sample_release(self) -> None: + self.release_dir.mkdir(parents=True, exist_ok=True) + + sbom_path = self.release_dir / "artifacts/sboms/sample.cyclonedx.json" + sbom_path.parent.mkdir(parents=True, exist_ok=True) + sbom_path.write_text('{"bomFormat":"CycloneDX","specVersion":"1.5"}\n', encoding="utf-8") + sbom_sha = compute_sha256(sbom_path) + + provenance_path = self.release_dir / "artifacts/provenance/sample.provenance.json" + self._write_json( + provenance_path, + { + "buildDefinition": {"buildType": "https://example/build"}, + "runDetails": {"builder": {"id": "https://example/ci"}}, + }, + ) + provenance_sha = compute_sha256(provenance_path) + + signature_path = self.release_dir / "artifacts/signatures/sample.signature" + signature_path.parent.mkdir(parents=True, exist_ok=True) + signature_path.write_text("signature-data\n", encoding="utf-8") + signature_sha = compute_sha256(signature_path) + + metadata_path = self.release_dir / "artifacts/metadata/sample.metadata.json" + self._write_json(metadata_path, {"digest": "sha256:1234"}) + metadata_sha = compute_sha256(metadata_path) + + chart_path = self.release_dir / "helm/stellaops-1.0.0.tgz" + chart_path.parent.mkdir(parents=True, exist_ok=True) + chart_path.write_bytes(b"helm-chart-data") + chart_sha = compute_sha256(chart_path) + + compose_path = self.release_dir.parent / "deploy/compose/docker-compose.dev.yaml" + compose_path.parent.mkdir(parents=True, exist_ok=True) + compose_path.write_text("services: {}\n", encoding="utf-8") + compose_sha = compute_sha256(compose_path) + + debug_file = self.release_dir / "debug/.build-id/ab/cdef.debug" + debug_file.parent.mkdir(parents=True, exist_ok=True) + debug_file.write_bytes(b"\x7fELFDEBUGDATA") + debug_sha = compute_sha256(debug_file) + + debug_manifest_path = self.release_dir / "debug/debug-manifest.json" + debug_manifest = OrderedDict( + ( + ("generatedAt", "2025-10-26T00:00:00Z"), + ("version", "1.0.0"), + ("channel", "edge"), + ( + "artifacts", + [ + OrderedDict( + ( + ("buildId", "abcdef1234"), + ("platform", "linux/amd64"), + ("debugPath", "debug/.build-id/ab/cdef.debug"), + ("sha256", debug_sha), + ("size", debug_file.stat().st_size), + ("components", ["sample"]), + ("images", ["registry.example/sample@sha256:feedface"]), + ("sources", ["app/sample.dll"]), + ) + ) + ], + ), + ) + ) + self._write_json(debug_manifest_path, debug_manifest) + debug_manifest_sha = compute_sha256(debug_manifest_path) + (debug_manifest_path.with_suffix(debug_manifest_path.suffix + ".sha256")).write_text( + f"{debug_manifest_sha} {debug_manifest_path.name}\n", + encoding="utf-8", + ) + + manifest = OrderedDict( + ( + ( + "release", + OrderedDict( + ( + ("version", "1.0.0"), + ("channel", "edge"), + ("date", "2025-10-26T00:00:00Z"), + ("calendar", "2025.10"), + ) + ), + ), + ( + "components", + [ + OrderedDict( + ( + ("name", "sample"), + ("image", "registry.example/sample@sha256:feedface"), + ("tags", ["registry.example/sample:1.0.0"]), + ( + "sbom", + OrderedDict( + ( + ("path", self._relative_to_out(sbom_path)), + ("sha256", sbom_sha), + ) + ), + ), + ( + "provenance", + OrderedDict( + ( + ("path", self._relative_to_out(provenance_path)), + ("sha256", provenance_sha), + ) + ), + ), + ( + "signature", + OrderedDict( + ( + ("path", self._relative_to_out(signature_path)), + ("sha256", signature_sha), + ("ref", "sigstore://example"), + ("tlogUploaded", True), + ) + ), + ), + ( + "metadata", + OrderedDict( + ( + ("path", self._relative_to_out(metadata_path)), + ("sha256", metadata_sha), + ) + ), + ), + ) + ) + ], + ), + ( + "charts", + [ + OrderedDict( + ( + ("name", "stellaops"), + ("version", "1.0.0"), + ("path", self._relative_to_out(chart_path)), + ("sha256", chart_sha), + ) + ) + ], + ), + ( + "compose", + [ + OrderedDict( + ( + ("name", "docker-compose.dev.yaml"), + ("path", compose_path.relative_to(self.out_dir).as_posix()), + ("sha256", compose_sha), + ) + ) + ], + ), + ( + "debugStore", + OrderedDict( + ( + ("manifest", "debug/debug-manifest.json"), + ("sha256", debug_manifest_sha), + ("entries", 1), + ("platforms", ["linux/amd64"]), + ("directory", "debug/.build-id"), + ) + ), + ), + ) + ) + write_manifest(manifest, self.release_dir) + + def test_build_offline_kit(self) -> None: + args = argparse.Namespace( + version="2025.10.0", + channel="edge", + bundle_id="bundle-001", + release_dir=self.release_dir, + staging_dir=self.staging_dir, + output_dir=self.output_dir, + cosign_key=None, + cosign_password=None, + cosign_identity_token=None, + no_transparency=False, + skip_smoke=True, + ) + result = build_offline_kit(args) + bundle_path = Path(result["bundlePath"]) + self.assertTrue(bundle_path.exists()) + offline_manifest = self.output_dir.parent / "staging" / "manifest" / "offline-manifest.json" + self.assertTrue(offline_manifest.exists()) + + with offline_manifest.open("r", encoding="utf-8") as handle: + manifest_data = json.load(handle) + artifacts = manifest_data["artifacts"] + self.assertTrue(any(item["name"].startswith("sboms/") for item in artifacts)) + + metadata_path = Path(result["metadataPath"]) + data = json.loads(metadata_path.read_text(encoding="utf-8")) + self.assertTrue(data["bundleSha256"].startswith("sha256:")) + self.assertTrue(data["manifestSha256"].startswith("sha256:")) + + with tarfile.open(bundle_path, "r:gz") as tar: + members = tar.getnames() + self.assertIn("manifest/release.yaml", members) + self.assertTrue(any(name.startswith("sboms/sample-") for name in members)) + + +if __name__ == "__main__": + unittest.main() diff --git a/out/analyzers/python/StellaOps.Auth.Abstractions.xml b/out/analyzers/python/StellaOps.Auth.Abstractions.xml index 5b77e659..d533443e 100644 --- a/out/analyzers/python/StellaOps.Auth.Abstractions.xml +++ b/out/analyzers/python/StellaOps.Auth.Abstractions.xml @@ -1,422 +1,422 @@ - - - - StellaOps.Auth.Abstractions - - - - - Canonical telemetry metadata for the StellaOps Authority stack. - - - - - service.name resource attribute recorded by Authority components. - - - - - service.namespace resource attribute aligning Authority with other StellaOps services. - - - - - Activity source identifier used by Authority instrumentation. - - - - - Meter name used by Authority instrumentation. - - - - - Builds the default set of resource attributes (service name/namespace/version). - - Optional assembly used to resolve the service version. - - - - Resolves the service version string from the provided assembly (defaults to the Authority telemetry assembly). - - - - - Represents an IP network expressed in CIDR notation. - - - - - Initialises a new . - - Canonical network address with host bits zeroed. - Prefix length (0-32 for IPv4, 0-128 for IPv6). - - - - Canonical network address with host bits zeroed. - - - - - Prefix length. - - - - - Attempts to parse the supplied value as CIDR notation or a single IP address. - - Thrown when the input is not recognised. - - - - Attempts to parse the supplied value as CIDR notation or a single IP address. - - - - - Determines whether the provided address belongs to this network. - - - - - - - - Evaluates remote addresses against configured network masks. - - - - - Creates a matcher from raw CIDR strings. - - Sequence of CIDR entries or IP addresses. - Thrown when a value cannot be parsed. - - - - Creates a matcher from already parsed masks. - - Sequence of network masks. - - - - Gets a matcher that allows every address. - - - - - Gets a matcher that denies every address (no masks configured). - - - - - Indicates whether this matcher has no masks configured and does not allow all. - - - - - Returns the configured masks. - - - - - Checks whether the provided address matches any of the configured masks. - - Remote address to test. - true when the address is allowed. - - - - Default authentication constants used by StellaOps resource servers and clients. - - - - - Default authentication scheme for StellaOps bearer tokens. - - - - - Logical authentication type attached to . - - - - - Policy prefix applied to named authorization policies. - - - - - Canonical claim type identifiers used across StellaOps services. - - - - - Subject identifier claim (maps to sub in JWTs). - - - - - StellaOps tenant identifier claim (multi-tenant deployments). - - - - - OAuth2/OIDC client identifier claim (maps to client_id). - - - - - Unique token identifier claim (maps to jti). - - - - - Authentication method reference claim (amr). - - - - - Space separated scope list (scope). - - - - - Individual scope items (scp). - - - - - OAuth2 resource audiences (aud). - - - - - Identity provider hint for downstream services. - - - - - Session identifier claim (sid). - - - - - Fluent helper used to construct instances that follow StellaOps conventions. - - - - - Adds or replaces the canonical subject identifier. - - - - - Adds or replaces the canonical client identifier. - - - - - Adds or replaces the tenant identifier claim. - - - - - Adds or replaces the user display name claim. - - - - - Adds or replaces the identity provider claim. - - - - - Adds or replaces the session identifier claim. - - - - - Adds or replaces the token identifier claim. - - - - - Adds or replaces the authentication method reference claim. - - - - - Sets the name claim type appended when building the . - - - - - Sets the role claim type appended when building the . - - - - - Sets the authentication type stamped on the . - - - - - Registers the supplied scopes (normalised to lower-case, deduplicated, sorted). - - - - - Registers the supplied audiences (trimmed, deduplicated, sorted). - - - - - Adds a single audience. - - - - - Adds an arbitrary claim (no deduplication is performed). - - - - - Adds multiple claims (incoming claims are cloned to enforce value trimming). - - - - - Adds an iat (issued at) claim using Unix time seconds. - - - - - Adds an nbf (not before) claim using Unix time seconds. - - - - - Adds an exp (expires) claim using Unix time seconds. - - - - - Returns the normalised scope list (deduplicated + sorted). - - - - - Returns the normalised audience list (deduplicated + sorted). - - - - - Builds the immutable instance based on the registered data. - - - - - Factory helpers for returning RFC 7807 problem responses using StellaOps conventions. - - - - - Produces a 401 problem response indicating authentication is required. - - - - - Produces a 401 problem response for invalid, expired, or revoked tokens. - - - - - Produces a 403 problem response when access is denied. - - - - - Produces a 403 problem response for insufficient scopes. - - - - - Canonical scope names supported by StellaOps services. - - - - - Scope required to trigger Concelier jobs. - - - - - Scope required to manage Concelier merge operations. - - - - - Scope granting administrative access to Authority user management. - - - - - Scope granting administrative access to Authority client registrations. - - - - - Scope granting read-only access to Authority audit logs. - - - - - Synthetic scope representing trusted network bypass. - - - - - Scope granting read-only access to raw advisory ingestion data. - - - - - Scope granting write access for raw advisory ingestion. - - - - - Scope granting read-only access to raw VEX ingestion data. - - - - - Scope granting write access for raw VEX ingestion. - - - - - Scope granting permission to execute aggregation-only contract verification. - - - - - Normalises a scope string (trim/convert to lower case). - - Scope raw value. - Normalised scope or null when the input is blank. - - - - Checks whether the provided scope is registered as a built-in StellaOps scope. - - - - - Returns the full set of built-in scopes. - - - - + + + + StellaOps.Auth.Abstractions + + + + + Canonical telemetry metadata for the StellaOps Authority stack. + + + + + service.name resource attribute recorded by Authority components. + + + + + service.namespace resource attribute aligning Authority with other StellaOps services. + + + + + Activity source identifier used by Authority instrumentation. + + + + + Meter name used by Authority instrumentation. + + + + + Builds the default set of resource attributes (service name/namespace/version). + + Optional assembly used to resolve the service version. + + + + Resolves the service version string from the provided assembly (defaults to the Authority telemetry assembly). + + + + + Represents an IP network expressed in CIDR notation. + + + + + Initialises a new . + + Canonical network address with host bits zeroed. + Prefix length (0-32 for IPv4, 0-128 for IPv6). + + + + Canonical network address with host bits zeroed. + + + + + Prefix length. + + + + + Attempts to parse the supplied value as CIDR notation or a single IP address. + + Thrown when the input is not recognised. + + + + Attempts to parse the supplied value as CIDR notation or a single IP address. + + + + + Determines whether the provided address belongs to this network. + + + + + + + + Evaluates remote addresses against configured network masks. + + + + + Creates a matcher from raw CIDR strings. + + Sequence of CIDR entries or IP addresses. + Thrown when a value cannot be parsed. + + + + Creates a matcher from already parsed masks. + + Sequence of network masks. + + + + Gets a matcher that allows every address. + + + + + Gets a matcher that denies every address (no masks configured). + + + + + Indicates whether this matcher has no masks configured and does not allow all. + + + + + Returns the configured masks. + + + + + Checks whether the provided address matches any of the configured masks. + + Remote address to test. + true when the address is allowed. + + + + Default authentication constants used by StellaOps resource servers and clients. + + + + + Default authentication scheme for StellaOps bearer tokens. + + + + + Logical authentication type attached to . + + + + + Policy prefix applied to named authorization policies. + + + + + Canonical claim type identifiers used across StellaOps services. + + + + + Subject identifier claim (maps to sub in JWTs). + + + + + StellaOps tenant identifier claim (multi-tenant deployments). + + + + + OAuth2/OIDC client identifier claim (maps to client_id). + + + + + Unique token identifier claim (maps to jti). + + + + + Authentication method reference claim (amr). + + + + + Space separated scope list (scope). + + + + + Individual scope items (scp). + + + + + OAuth2 resource audiences (aud). + + + + + Identity provider hint for downstream services. + + + + + Session identifier claim (sid). + + + + + Fluent helper used to construct instances that follow StellaOps conventions. + + + + + Adds or replaces the canonical subject identifier. + + + + + Adds or replaces the canonical client identifier. + + + + + Adds or replaces the tenant identifier claim. + + + + + Adds or replaces the user display name claim. + + + + + Adds or replaces the identity provider claim. + + + + + Adds or replaces the session identifier claim. + + + + + Adds or replaces the token identifier claim. + + + + + Adds or replaces the authentication method reference claim. + + + + + Sets the name claim type appended when building the . + + + + + Sets the role claim type appended when building the . + + + + + Sets the authentication type stamped on the . + + + + + Registers the supplied scopes (normalised to lower-case, deduplicated, sorted). + + + + + Registers the supplied audiences (trimmed, deduplicated, sorted). + + + + + Adds a single audience. + + + + + Adds an arbitrary claim (no deduplication is performed). + + + + + Adds multiple claims (incoming claims are cloned to enforce value trimming). + + + + + Adds an iat (issued at) claim using Unix time seconds. + + + + + Adds an nbf (not before) claim using Unix time seconds. + + + + + Adds an exp (expires) claim using Unix time seconds. + + + + + Returns the normalised scope list (deduplicated + sorted). + + + + + Returns the normalised audience list (deduplicated + sorted). + + + + + Builds the immutable instance based on the registered data. + + + + + Factory helpers for returning RFC 7807 problem responses using StellaOps conventions. + + + + + Produces a 401 problem response indicating authentication is required. + + + + + Produces a 401 problem response for invalid, expired, or revoked tokens. + + + + + Produces a 403 problem response when access is denied. + + + + + Produces a 403 problem response for insufficient scopes. + + + + + Canonical scope names supported by StellaOps services. + + + + + Scope required to trigger Concelier jobs. + + + + + Scope required to manage Concelier merge operations. + + + + + Scope granting administrative access to Authority user management. + + + + + Scope granting administrative access to Authority client registrations. + + + + + Scope granting read-only access to Authority audit logs. + + + + + Synthetic scope representing trusted network bypass. + + + + + Scope granting read-only access to raw advisory ingestion data. + + + + + Scope granting write access for raw advisory ingestion. + + + + + Scope granting read-only access to raw VEX ingestion data. + + + + + Scope granting write access for raw VEX ingestion. + + + + + Scope granting permission to execute aggregation-only contract verification. + + + + + Normalises a scope string (trim/convert to lower case). + + Scope raw value. + Normalised scope or null when the input is blank. + + + + Checks whether the provided scope is registered as a built-in StellaOps scope. + + + + + Returns the full set of built-in scopes. + + + + diff --git a/out/analyzers/python/StellaOps.Auth.Client.xml b/out/analyzers/python/StellaOps.Auth.Client.xml index 3a8a3b98..a3b44557 100644 --- a/out/analyzers/python/StellaOps.Auth.Client.xml +++ b/out/analyzers/python/StellaOps.Auth.Client.xml @@ -1,233 +1,233 @@ - - - - StellaOps.Auth.Client - - - - - File-based token cache suitable for CLI/offline usage. - - - - - In-memory token cache suitable for service scenarios. - - - - - Abstraction for caching StellaOps tokens. - - - - - Retrieves a cached token entry, if present. - - - - - Stores or updates a token entry for the specified key. - - - - - Removes the cached entry for the specified key. - - - - - Abstraction for requesting tokens from StellaOps Authority. - - - - - Requests an access token using the resource owner password credentials flow. - - - - - Requests an access token using the client credentials flow. - - - - - Retrieves the cached JWKS document. - - - - - Retrieves a cached token entry. - - - - - Persists a token entry in the cache. - - - - - Removes a cached entry. - - - - - DI helpers for the StellaOps auth client. - - - - - Registers the StellaOps auth client with the provided configuration. - - - - - Registers a file-backed token cache implementation. - - - - - Options controlling the StellaOps authentication client. - - - - - Authority (issuer) base URL. - - - - - OAuth client identifier (optional for password flow). - - - - - OAuth client secret (optional for public clients). - - - - - Default scopes requested for flows that do not explicitly override them. - - - - - Retry delays applied by HTTP retry policy (empty uses defaults). - - - - - Gets or sets a value indicating whether HTTP retry policies are enabled. - - - - - Timeout applied to discovery and token HTTP requests. - - - - - Lifetime of cached discovery metadata. - - - - - Lifetime of cached JWKS metadata. - - - - - Buffer applied when determining cache expiration (default: 30 seconds). - - - - - Gets or sets a value indicating whether cached discovery/JWKS responses may be served when the Authority is unreachable. - - - - - Additional tolerance window during which stale cache entries remain valid if offline fallback is allowed. - - - - - Parsed Authority URI (populated after validation). - - - - - Normalised scope list (populated after validation). - - - - - Normalised retry delays (populated after validation). - - - - - Validates required values and normalises scope entries. - - - - - Caches Authority discovery metadata. - - - - - Minimal OpenID Connect configuration representation. - - - - - Minimal OpenID Connect configuration representation. - - - - - Caches JWKS documents for Authority. - - - - - Represents a cached token entry. - - - - - Represents a cached token entry. - - - - - Determines whether the token is expired given the provided . - - - - - Creates a copy with scopes normalised. - - - - - Default implementation of . - - - - - Represents an issued token with metadata. - - - - - Represents an issued token with metadata. - - - - - Converts the result to a cache entry. - - - - + + + + StellaOps.Auth.Client + + + + + File-based token cache suitable for CLI/offline usage. + + + + + In-memory token cache suitable for service scenarios. + + + + + Abstraction for caching StellaOps tokens. + + + + + Retrieves a cached token entry, if present. + + + + + Stores or updates a token entry for the specified key. + + + + + Removes the cached entry for the specified key. + + + + + Abstraction for requesting tokens from StellaOps Authority. + + + + + Requests an access token using the resource owner password credentials flow. + + + + + Requests an access token using the client credentials flow. + + + + + Retrieves the cached JWKS document. + + + + + Retrieves a cached token entry. + + + + + Persists a token entry in the cache. + + + + + Removes a cached entry. + + + + + DI helpers for the StellaOps auth client. + + + + + Registers the StellaOps auth client with the provided configuration. + + + + + Registers a file-backed token cache implementation. + + + + + Options controlling the StellaOps authentication client. + + + + + Authority (issuer) base URL. + + + + + OAuth client identifier (optional for password flow). + + + + + OAuth client secret (optional for public clients). + + + + + Default scopes requested for flows that do not explicitly override them. + + + + + Retry delays applied by HTTP retry policy (empty uses defaults). + + + + + Gets or sets a value indicating whether HTTP retry policies are enabled. + + + + + Timeout applied to discovery and token HTTP requests. + + + + + Lifetime of cached discovery metadata. + + + + + Lifetime of cached JWKS metadata. + + + + + Buffer applied when determining cache expiration (default: 30 seconds). + + + + + Gets or sets a value indicating whether cached discovery/JWKS responses may be served when the Authority is unreachable. + + + + + Additional tolerance window during which stale cache entries remain valid if offline fallback is allowed. + + + + + Parsed Authority URI (populated after validation). + + + + + Normalised scope list (populated after validation). + + + + + Normalised retry delays (populated after validation). + + + + + Validates required values and normalises scope entries. + + + + + Caches Authority discovery metadata. + + + + + Minimal OpenID Connect configuration representation. + + + + + Minimal OpenID Connect configuration representation. + + + + + Caches JWKS documents for Authority. + + + + + Represents a cached token entry. + + + + + Represents a cached token entry. + + + + + Determines whether the token is expired given the provided . + + + + + Creates a copy with scopes normalised. + + + + + Default implementation of . + + + + + Represents an issued token with metadata. + + + + + Represents an issued token with metadata. + + + + + Converts the result to a cache entry. + + + + diff --git a/out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.deps.json b/out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.deps.json index 1e6bd61b..d49edca0 100644 --- a/out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.deps.json +++ b/out/analyzers/python/StellaOps.Scanner.Analyzers.Lang.Python.deps.json @@ -1,858 +1,858 @@ -{ - "runtimeTarget": { - "name": ".NETCoreApp,Version=v10.0", - "signature": "" - }, - "compilationOptions": {}, - "targets": { - ".NETCoreApp,Version=v10.0": { - "StellaOps.Scanner.Analyzers.Lang.Python/1.0.0": { - "dependencies": { - "SharpCompress": "0.41.0", - "StellaOps.Scanner.Analyzers.Lang": "1.0.0" - }, - "runtime": { - "StellaOps.Scanner.Analyzers.Lang.Python.dll": {} - } - }, - "Konscious.Security.Cryptography.Argon2/1.3.1": { - "dependencies": { - "Konscious.Security.Cryptography.Blake2": "1.1.1" - }, - "runtime": { - "lib/net8.0/Konscious.Security.Cryptography.Argon2.dll": { - "assemblyVersion": "1.3.1.0", - "fileVersion": "1.3.1.0" - } - } - }, - "Konscious.Security.Cryptography.Blake2/1.1.1": { - "runtime": { - "lib/net8.0/Konscious.Security.Cryptography.Blake2.dll": { - "assemblyVersion": "1.1.1.0", - "fileVersion": "1.1.1.0" - } - } - }, - "Microsoft.Extensions.Configuration/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Configuration.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Configuration.Abstractions/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Configuration.Abstractions.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Configuration.Binder/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Configuration.Binder.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Configuration.EnvironmentVariables/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Configuration.EnvironmentVariables.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Configuration.FileExtensions/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.FileProviders.Physical": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Configuration.FileExtensions.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Configuration.Json/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Configuration.Json.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.DependencyInjection/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.DependencyInjection.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.DependencyInjection.Abstractions/10.0.0-rc.2.25502.107": { - "runtime": { - "lib/net10.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Diagnostics/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Diagnostics.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Options.ConfigurationExtensions": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Diagnostics.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Diagnostics.Abstractions/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Diagnostics.Abstractions.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.FileProviders.Abstractions/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.FileProviders.Abstractions.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.FileProviders.Physical/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.FileSystemGlobbing": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.FileProviders.Physical.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.FileSystemGlobbing/10.0.0-rc.2.25502.107": { - "runtime": { - "lib/net10.0/Microsoft.Extensions.FileSystemGlobbing.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Http/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Diagnostics": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Logging": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Http.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Http.Polly/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Http": "10.0.0-rc.2.25502.107", - "Polly": "7.2.4", - "Polly.Extensions.Http": "3.0.0" - }, - "runtime": { - "lib/netstandard2.0/Microsoft.Extensions.Http.Polly.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Logging/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.DependencyInjection": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Logging.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Logging.Abstractions/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Logging.Abstractions.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Options/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Options.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Options.ConfigurationExtensions/10.0.0-rc.2.25502.107": { - "dependencies": { - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.Binder": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" - }, - "runtime": { - "lib/net10.0/Microsoft.Extensions.Options.ConfigurationExtensions.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.Extensions.Primitives/10.0.0-rc.2.25502.107": { - "runtime": { - "lib/net10.0/Microsoft.Extensions.Primitives.dll": { - "assemblyVersion": "10.0.0.0", - "fileVersion": "10.0.25.50307" - } - } - }, - "Microsoft.IdentityModel.Abstractions/8.14.0": { - "runtime": { - "lib/net9.0/Microsoft.IdentityModel.Abstractions.dll": { - "assemblyVersion": "8.14.0.0", - "fileVersion": "8.14.0.60815" - } - } - }, - "Microsoft.IdentityModel.JsonWebTokens/7.2.0": { - "dependencies": { - "Microsoft.IdentityModel.Tokens": "8.14.0" - }, - "runtime": { - "lib/net8.0/Microsoft.IdentityModel.JsonWebTokens.dll": { - "assemblyVersion": "7.2.0.0", - "fileVersion": "7.2.0.50110" - } - } - }, - "Microsoft.IdentityModel.Logging/8.14.0": { - "dependencies": { - "Microsoft.IdentityModel.Abstractions": "8.14.0" - }, - "runtime": { - "lib/net9.0/Microsoft.IdentityModel.Logging.dll": { - "assemblyVersion": "8.14.0.0", - "fileVersion": "8.14.0.60815" - } - } - }, - "Microsoft.IdentityModel.Tokens/8.14.0": { - "dependencies": { - "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.IdentityModel.Logging": "8.14.0" - }, - "runtime": { - "lib/net9.0/Microsoft.IdentityModel.Tokens.dll": { - "assemblyVersion": "8.14.0.0", - "fileVersion": "8.14.0.60815" - } - } - }, - "NetEscapades.Configuration.Yaml/2.1.0": { - "dependencies": { - "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", - "YamlDotNet": "9.1.0" - }, - "runtime": { - "lib/netstandard2.0/NetEscapades.Configuration.Yaml.dll": { - "assemblyVersion": "2.1.0.0", - "fileVersion": "2.1.0.0" - } - } - }, - "Pipelines.Sockets.Unofficial/2.2.8": { - "runtime": { - "lib/net5.0/Pipelines.Sockets.Unofficial.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "2.2.8.1080" - } - } - }, - "Polly/7.2.4": { - "runtime": { - "lib/netstandard2.0/Polly.dll": { - "assemblyVersion": "7.0.0.0", - "fileVersion": "7.2.4.982" - } - } - }, - "Polly.Extensions.Http/3.0.0": { - "dependencies": { - "Polly": "7.2.4" - }, - "runtime": { - "lib/netstandard2.0/Polly.Extensions.Http.dll": { - "assemblyVersion": "3.0.0.0", - "fileVersion": "3.0.0.0" - } - } - }, - "SharpCompress/0.41.0": { - "dependencies": { - "ZstdSharp.Port": "0.8.6" - }, - "runtime": { - "lib/net8.0/SharpCompress.dll": { - "assemblyVersion": "0.41.0.0", - "fileVersion": "0.41.0.0" - } - } - }, - "StackExchange.Redis/2.8.24": { - "dependencies": { - "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", - "Pipelines.Sockets.Unofficial": "2.2.8" - }, - "runtime": { - "lib/net8.0/StackExchange.Redis.dll": { - "assemblyVersion": "2.0.0.0", - "fileVersion": "2.8.24.3255" - } - } - }, - "System.IdentityModel.Tokens.Jwt/7.2.0": { - "dependencies": { - "Microsoft.IdentityModel.JsonWebTokens": "7.2.0", - "Microsoft.IdentityModel.Tokens": "8.14.0" - }, - "runtime": { - "lib/net8.0/System.IdentityModel.Tokens.Jwt.dll": { - "assemblyVersion": "7.2.0.0", - "fileVersion": "7.2.0.50110" - } - } - }, - "YamlDotNet/9.1.0": { - "runtime": { - "lib/netstandard2.1/YamlDotNet.dll": { - "assemblyVersion": "9.0.0.0", - "fileVersion": "9.1.0.0" - } - } - }, - "ZstdSharp.Port/0.8.6": { - "runtime": { - "lib/net9.0/ZstdSharp.dll": { - "assemblyVersion": "0.8.6.0", - "fileVersion": "0.8.6.0" - } - } - }, - "StellaOps.Auth.Abstractions/1.0.0-preview.1": { - "dependencies": { - "SharpCompress": "0.41.0" - }, - "runtime": { - "StellaOps.Auth.Abstractions.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - }, - "StellaOps.Auth.Client/1.0.0-preview.1": { - "dependencies": { - "Microsoft.Extensions.Http.Polly": "10.0.0-rc.2.25502.107", - "Microsoft.IdentityModel.Tokens": "8.14.0", - "SharpCompress": "0.41.0", - "StellaOps.Auth.Abstractions": "1.0.0-preview.1", - "StellaOps.Configuration": "1.0.0" - }, - "runtime": { - "StellaOps.Auth.Client.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - }, - "StellaOps.Auth.Security/1.0.0-preview.1": { - "dependencies": { - "Microsoft.IdentityModel.Tokens": "8.14.0", - "SharpCompress": "0.41.0", - "StackExchange.Redis": "2.8.24", - "System.IdentityModel.Tokens.Jwt": "7.2.0" - }, - "runtime": { - "StellaOps.Auth.Security.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - }, - "StellaOps.Authority.Plugins.Abstractions/1.0.0": { - "dependencies": { - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", - "SharpCompress": "0.41.0", - "StellaOps.Auth.Abstractions": "1.0.0-preview.1", - "StellaOps.Cryptography": "1.0.0" - }, - "runtime": { - "StellaOps.Authority.Plugins.Abstractions.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - }, - "StellaOps.Configuration/1.0.0": { - "dependencies": { - "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.Binder": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.EnvironmentVariables": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Configuration.Json": "10.0.0-rc.2.25502.107", - "NetEscapades.Configuration.Yaml": "2.1.0", - "SharpCompress": "0.41.0", - "StellaOps.Authority.Plugins.Abstractions": "1.0.0", - "StellaOps.Cryptography": "1.0.0" - }, - "runtime": { - "StellaOps.Configuration.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - }, - "StellaOps.Cryptography/1.0.0": { - "dependencies": { - "Konscious.Security.Cryptography.Argon2": "1.3.1", - "Microsoft.IdentityModel.Tokens": "8.14.0", - "SharpCompress": "0.41.0" - }, - "runtime": { - "StellaOps.Cryptography.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - }, - "StellaOps.DependencyInjection/1.0.0": { - "dependencies": { - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", - "SharpCompress": "0.41.0" - }, - "runtime": { - "StellaOps.DependencyInjection.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - }, - "StellaOps.Plugin/1.0.0": { - "dependencies": { - "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", - "SharpCompress": "0.41.0", - "StellaOps.DependencyInjection": "1.0.0" - }, - "runtime": { - "StellaOps.Plugin.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - }, - "StellaOps.Scanner.Analyzers.Lang/1.0.0": { - "dependencies": { - "SharpCompress": "0.41.0", - "StellaOps.Plugin": "1.0.0", - "StellaOps.Scanner.Core": "1.0.0" - }, - "runtime": { - "StellaOps.Scanner.Analyzers.Lang.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - }, - "StellaOps.Scanner.Core/1.0.0": { - "dependencies": { - "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", - "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107", - "SharpCompress": "0.41.0", - "StellaOps.Auth.Client": "1.0.0-preview.1", - "StellaOps.Auth.Security": "1.0.0-preview.1" - }, - "runtime": { - "StellaOps.Scanner.Core.dll": { - "assemblyVersion": "1.0.0.0", - "fileVersion": "1.0.0.0" - } - } - } - } - }, - "libraries": { - "StellaOps.Scanner.Analyzers.Lang.Python/1.0.0": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "Konscious.Security.Cryptography.Argon2/1.3.1": { - "type": "package", - "serviceable": true, - "sha512": "sha512-T+OAGwzYYXftahpOxO7J4xA5K6urxwGnWQf3M+Jpi+76Azv/0T3M5SuN+h7/QvXuiqNw3ZEZ5QqVLI5ygDAylw==", - "path": "konscious.security.cryptography.argon2/1.3.1", - "hashPath": "konscious.security.cryptography.argon2.1.3.1.nupkg.sha512" - }, - "Konscious.Security.Cryptography.Blake2/1.1.1": { - "type": "package", - "serviceable": true, - "sha512": "sha512-odwOyzj/J/lHJZNwFWJGU/LRecBShupAJ2S8TQqZfhUe9niHzu/voBYK5wuVKsvSpzbfupKQYZguVyIk1sgOkQ==", - "path": "konscious.security.cryptography.blake2/1.1.1", - "hashPath": "konscious.security.cryptography.blake2.1.1.1.nupkg.sha512" - }, - "Microsoft.Extensions.Configuration/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-273Ggibh3DdVrj47ENbUGIirOiqmLTAizpkvOD584Ps6NL/CMXPzesijnJgsjp7Fv/UCp69FKYBaSxZZ3q5R9g==", - "path": "microsoft.extensions.configuration/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.configuration.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Configuration.Abstractions/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-H+i/Qy30Rg/K9BcW2Z6DCHPCzwMH3bCwNOjEz31shWTUDK8GeeeMnrKVusprTcRA2Y6yPST+hg2zc3whPEs14Q==", - "path": "microsoft.extensions.configuration.abstractions/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.configuration.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Configuration.Binder/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-aA6/V6lw1Gueyb1PqhHAl/i/qUUuv+Fusfk4oaMOzzOjspBkYtPpNHCmml/0t1x0/DnZoed+u2WwpP+mSwd8Dg==", - "path": "microsoft.extensions.configuration.binder/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.configuration.binder.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Configuration.EnvironmentVariables/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-2SV60IUAWfluZv2YHNZ+nUOljYHGIsy96FpJs+N9/bgKDYs9qr6DdzPeIhiHrz+XvRzbybvcwtTBf5dKrYN4oA==", - "path": "microsoft.extensions.configuration.environmentvariables/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.configuration.environmentvariables.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Configuration.FileExtensions/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-5KrgXSTFR8cFLmDXXoT7GLVvDyHNw0Z9xG4doD78Q/HdlAR4jiMzmLLS9GFXrPGopmC6qqEZr2VBJHEu16INcA==", - "path": "microsoft.extensions.configuration.fileextensions/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.configuration.fileextensions.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Configuration.Json/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-USwHuFz4BFKoaqSydHWH/d7Mr+fVsAh9S0S9pdsdHro1IixMbqQ9Gpo2sEZf25e3tZSq/ts6XsVmrQWmxmDhYA==", - "path": "microsoft.extensions.configuration.json/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.configuration.json.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.DependencyInjection/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-mDw80K98jBWCyLFCra51PRv+Ttnjse1lZIzXEFybKby0/ajBFTEeHj/4r/QJexmb8Uun0yaFH1HlFtmHP1YEVA==", - "path": "microsoft.extensions.dependencyinjection/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.dependencyinjection.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.DependencyInjection.Abstractions/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-8jujunpkNNfTkE9PFHp9/aD6GPKVfNCuz8tUbzOcyU5tQOCoIZId4hwQNVx3Tb8XEWw9BYdh0k5vPpqdCM+UtA==", - "path": "microsoft.extensions.dependencyinjection.abstractions/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.dependencyinjection.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Diagnostics/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-tQfQFXI+ZQcL2RzDarDLx3Amh0WCp1KPGp1ie3y/CMV5hDhEq98WTmcMoXrFY0GkYLEaCQlVi2A6qVLcooG2Ow==", - "path": "microsoft.extensions.diagnostics/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.diagnostics.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Diagnostics.Abstractions/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-x6XVv3RiwOlN2unjyX/Zat0gI0HiRoDDdjkwBCwsMftYWpbJu4SiyRwDbrv2zAF8v8nbEEvcWi3/pUxZfaqLQw==", - "path": "microsoft.extensions.diagnostics.abstractions/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.diagnostics.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.FileProviders.Abstractions/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-dOpmW14MkOZIwV6269iXhoMp6alCHBoxqCR4pJ37GLjFaBIyzsIy+Ra8tsGmjHtFvEHKq0JRDIsb1PUkrK+yxw==", - "path": "microsoft.extensions.fileproviders.abstractions/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.fileproviders.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.FileProviders.Physical/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-3+RiR6TEakDL0dCUqR7PjFffyrVMLdx/vAVBiN1mGmwScKYCTePIkYVkWsX85CTKh7R9J4M9C1MHzVdjbKcg3g==", - "path": "microsoft.extensions.fileproviders.physical/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.fileproviders.physical.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.FileSystemGlobbing/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-XtcPOKB7sMFzj8SxaOglZV3eaqZ1GxUMVZTwaz4pRpBt0S45ghb836uUej4YaI8EzsnUJoqzOIKrTW4CDJMfVw==", - "path": "microsoft.extensions.filesystemglobbing/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.filesystemglobbing.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Http/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-d60bvi/NpzkpVlSpxZqOfdjX1hrQgL/byWVc3PryjbmB7zvfLtqQbYifjEWToqtS0Fb1rGnkuVI5JEdOnK1tNQ==", - "path": "microsoft.extensions.http/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.http.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Http.Polly/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-aY5vLcrhdXCHsCjYI2lNwfat2vdSuiPs0FFZiy7IM6zcyqdxaefG8J8ezTKkZyiuAtznjVJJT70B660l/WlsxA==", - "path": "microsoft.extensions.http.polly/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.http.polly.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Logging/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-q2C5gq86qkTmcYSJJSnw8sgTUyuqENYSOjk/NOYjHnYlKSrK3oI9Rjv1bWFpx2I3Btq9ZBEJb9aMM+IUQ0PvZA==", - "path": "microsoft.extensions.logging/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.logging.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Logging.Abstractions/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-SKKKZjyCpBaDQ7yuFjdk6ELnRBRWeZsbnzUfo59Wc4PGhgf92chE3we/QlT6nk6NqlWcUgH/jogM+B/uq/Qdnw==", - "path": "microsoft.extensions.logging.abstractions/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.logging.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Options/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-Ib6BCCjisp7ZUdhtNpSulFO0ODhz/IE4ZZd8OCqQWoRs363BQ0QOZi9KwpqpiEWo51S0kIXWqNicDPGXwpt9pQ==", - "path": "microsoft.extensions.options/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.options.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Options.ConfigurationExtensions/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-MFbT8+JKX49YCXEFvlZDzQzI/R3QKzRZlb4dSud+569cMgA9hWbndjWWvOgGASoRcXynGRrBSq1Bw3PeCsB5/Q==", - "path": "microsoft.extensions.options.configurationextensions/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.options.configurationextensions.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.Extensions.Primitives/10.0.0-rc.2.25502.107": { - "type": "package", - "serviceable": true, - "sha512": "sha512-9pm2zqqn5u/OsKs2zgkhJEQQeMx9KkVOWPdHrs7Kt5sfpk+eIh/gmpi/mMH/ljS2T/PFsFdCEtm+GS/6l7zoZA==", - "path": "microsoft.extensions.primitives/10.0.0-rc.2.25502.107", - "hashPath": "microsoft.extensions.primitives.10.0.0-rc.2.25502.107.nupkg.sha512" - }, - "Microsoft.IdentityModel.Abstractions/8.14.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-iwbCpSjD3ehfTwBhtSNEtKPK0ICun6ov7Ibx6ISNA9bfwIyzI2Siwyi9eJFCJBwxowK9xcA1mj+jBWiigeqgcQ==", - "path": "microsoft.identitymodel.abstractions/8.14.0", - "hashPath": "microsoft.identitymodel.abstractions.8.14.0.nupkg.sha512" - }, - "Microsoft.IdentityModel.JsonWebTokens/7.2.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-zLFA9IBxDWw6Y1nz2PPZyQvF+ZZ4aW1pwgtwusQB39lgxOc2xVqZ8gitsuT1rwyuIbchGOWbax4fsJ8OgGRxSQ==", - "path": "microsoft.identitymodel.jsonwebtokens/7.2.0", - "hashPath": "microsoft.identitymodel.jsonwebtokens.7.2.0.nupkg.sha512" - }, - "Microsoft.IdentityModel.Logging/8.14.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-eqqnemdW38CKZEHS6diA50BV94QICozDZEvSrsvN3SJXUFwVB9gy+/oz76gldP7nZliA16IglXjXTCTdmU/Ejg==", - "path": "microsoft.identitymodel.logging/8.14.0", - "hashPath": "microsoft.identitymodel.logging.8.14.0.nupkg.sha512" - }, - "Microsoft.IdentityModel.Tokens/8.14.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-lKIZiBiGd36k02TCdMHp1KlNWisyIvQxcYJvIkz7P4gSQ9zi8dgh6S5Grj8NNG7HWYIPfQymGyoZ6JB5d1Lo1g==", - "path": "microsoft.identitymodel.tokens/8.14.0", - "hashPath": "microsoft.identitymodel.tokens.8.14.0.nupkg.sha512" - }, - "NetEscapades.Configuration.Yaml/2.1.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-kNTX7kvRvbzBpLd3Vg9iu6t60tTyhVxsruAPgH6kl1GkAZIHLZw9cQysvjUenDU7JEnUgyxQnzfL8627ARDn+g==", - "path": "netescapades.configuration.yaml/2.1.0", - "hashPath": "netescapades.configuration.yaml.2.1.0.nupkg.sha512" - }, - "Pipelines.Sockets.Unofficial/2.2.8": { - "type": "package", - "serviceable": true, - "sha512": "sha512-zG2FApP5zxSx6OcdJQLbZDk2AVlN2BNQD6MorwIfV6gVj0RRxWPEp2LXAxqDGZqeNV1Zp0BNPcNaey/GXmTdvQ==", - "path": "pipelines.sockets.unofficial/2.2.8", - "hashPath": "pipelines.sockets.unofficial.2.2.8.nupkg.sha512" - }, - "Polly/7.2.4": { - "type": "package", - "serviceable": true, - "sha512": "sha512-bw00Ck5sh6ekduDE3mnCo1ohzuad946uslCDEENu3091+6UKnBuKLo4e+yaNcCzXxOZCXWY2gV4a35+K1d4LDA==", - "path": "polly/7.2.4", - "hashPath": "polly.7.2.4.nupkg.sha512" - }, - "Polly.Extensions.Http/3.0.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-drrG+hB3pYFY7w1c3BD+lSGYvH2oIclH8GRSehgfyP5kjnFnHKQuuBhuHLv+PWyFuaTDyk/vfRpnxOzd11+J8g==", - "path": "polly.extensions.http/3.0.0", - "hashPath": "polly.extensions.http.3.0.0.nupkg.sha512" - }, - "SharpCompress/0.41.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-z04dBVdTIAFTRKi38f0LkajaKA++bR+M8kYCbasXePILD2H+qs7CkLpyiippB24CSbTrWIgpBKm6BenZqkUwvw==", - "path": "sharpcompress/0.41.0", - "hashPath": "sharpcompress.0.41.0.nupkg.sha512" - }, - "StackExchange.Redis/2.8.24": { - "type": "package", - "serviceable": true, - "sha512": "sha512-GWllmsFAtLyhm4C47cOCipGxyEi1NQWTFUHXnJ8hiHOsK/bH3T5eLkWPVW+LRL6jDiB3g3izW3YEHgLuPoJSyA==", - "path": "stackexchange.redis/2.8.24", - "hashPath": "stackexchange.redis.2.8.24.nupkg.sha512" - }, - "System.IdentityModel.Tokens.Jwt/7.2.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-Z3Fmkrxkp+o51ANMO/PqASRRlEz8dH4mTWwZXMFMXZt2bUGztBiNcIDnwBCElYLYpzpmz4sIqHb6aW8QVLe6YQ==", - "path": "system.identitymodel.tokens.jwt/7.2.0", - "hashPath": "system.identitymodel.tokens.jwt.7.2.0.nupkg.sha512" - }, - "YamlDotNet/9.1.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-fuvGXU4Ec5HrsmEc+BiFTNPCRf1cGBI2kh/3RzMWgddM2M4ALhbSPoI3X3mhXZUD1qqQd9oSkFAtWjpz8z9eRg==", - "path": "yamldotnet/9.1.0", - "hashPath": "yamldotnet.9.1.0.nupkg.sha512" - }, - "ZstdSharp.Port/0.8.6": { - "type": "package", - "serviceable": true, - "sha512": "sha512-iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A==", - "path": "zstdsharp.port/0.8.6", - "hashPath": "zstdsharp.port.0.8.6.nupkg.sha512" - }, - "StellaOps.Auth.Abstractions/1.0.0-preview.1": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "StellaOps.Auth.Client/1.0.0-preview.1": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "StellaOps.Auth.Security/1.0.0-preview.1": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "StellaOps.Authority.Plugins.Abstractions/1.0.0": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "StellaOps.Configuration/1.0.0": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "StellaOps.Cryptography/1.0.0": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "StellaOps.DependencyInjection/1.0.0": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "StellaOps.Plugin/1.0.0": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "StellaOps.Scanner.Analyzers.Lang/1.0.0": { - "type": "project", - "serviceable": false, - "sha512": "" - }, - "StellaOps.Scanner.Core/1.0.0": { - "type": "project", - "serviceable": false, - "sha512": "" - } - } +{ + "runtimeTarget": { + "name": ".NETCoreApp,Version=v10.0", + "signature": "" + }, + "compilationOptions": {}, + "targets": { + ".NETCoreApp,Version=v10.0": { + "StellaOps.Scanner.Analyzers.Lang.Python/1.0.0": { + "dependencies": { + "SharpCompress": "0.41.0", + "StellaOps.Scanner.Analyzers.Lang": "1.0.0" + }, + "runtime": { + "StellaOps.Scanner.Analyzers.Lang.Python.dll": {} + } + }, + "Konscious.Security.Cryptography.Argon2/1.3.1": { + "dependencies": { + "Konscious.Security.Cryptography.Blake2": "1.1.1" + }, + "runtime": { + "lib/net8.0/Konscious.Security.Cryptography.Argon2.dll": { + "assemblyVersion": "1.3.1.0", + "fileVersion": "1.3.1.0" + } + } + }, + "Konscious.Security.Cryptography.Blake2/1.1.1": { + "runtime": { + "lib/net8.0/Konscious.Security.Cryptography.Blake2.dll": { + "assemblyVersion": "1.1.1.0", + "fileVersion": "1.1.1.0" + } + } + }, + "Microsoft.Extensions.Configuration/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.Binder/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.Binder.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.EnvironmentVariables/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.EnvironmentVariables.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.FileExtensions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileProviders.Physical": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.FileExtensions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Configuration.Json/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Configuration.Json.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.DependencyInjection/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.DependencyInjection.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.DependencyInjection.Abstractions/10.0.0-rc.2.25502.107": { + "runtime": { + "lib/net10.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Diagnostics/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Diagnostics.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options.ConfigurationExtensions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Diagnostics.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Diagnostics.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Diagnostics.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.FileProviders.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.FileProviders.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.FileProviders.Physical/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.FileProviders.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.FileSystemGlobbing": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.FileProviders.Physical.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.FileSystemGlobbing/10.0.0-rc.2.25502.107": { + "runtime": { + "lib/net10.0/Microsoft.Extensions.FileSystemGlobbing.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Http/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Diagnostics": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Http.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Http.Polly/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Http": "10.0.0-rc.2.25502.107", + "Polly": "7.2.4", + "Polly.Extensions.Http": "3.0.0" + }, + "runtime": { + "lib/netstandard2.0/Microsoft.Extensions.Http.Polly.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Logging/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Logging.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Logging.Abstractions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Logging.Abstractions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Options/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Options.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Options.ConfigurationExtensions/10.0.0-rc.2.25502.107": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Binder": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Primitives": "10.0.0-rc.2.25502.107" + }, + "runtime": { + "lib/net10.0/Microsoft.Extensions.Options.ConfigurationExtensions.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.Extensions.Primitives/10.0.0-rc.2.25502.107": { + "runtime": { + "lib/net10.0/Microsoft.Extensions.Primitives.dll": { + "assemblyVersion": "10.0.0.0", + "fileVersion": "10.0.25.50307" + } + } + }, + "Microsoft.IdentityModel.Abstractions/8.14.0": { + "runtime": { + "lib/net9.0/Microsoft.IdentityModel.Abstractions.dll": { + "assemblyVersion": "8.14.0.0", + "fileVersion": "8.14.0.60815" + } + } + }, + "Microsoft.IdentityModel.JsonWebTokens/7.2.0": { + "dependencies": { + "Microsoft.IdentityModel.Tokens": "8.14.0" + }, + "runtime": { + "lib/net8.0/Microsoft.IdentityModel.JsonWebTokens.dll": { + "assemblyVersion": "7.2.0.0", + "fileVersion": "7.2.0.50110" + } + } + }, + "Microsoft.IdentityModel.Logging/8.14.0": { + "dependencies": { + "Microsoft.IdentityModel.Abstractions": "8.14.0" + }, + "runtime": { + "lib/net9.0/Microsoft.IdentityModel.Logging.dll": { + "assemblyVersion": "8.14.0.0", + "fileVersion": "8.14.0.60815" + } + } + }, + "Microsoft.IdentityModel.Tokens/8.14.0": { + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.IdentityModel.Logging": "8.14.0" + }, + "runtime": { + "lib/net9.0/Microsoft.IdentityModel.Tokens.dll": { + "assemblyVersion": "8.14.0.0", + "fileVersion": "8.14.0.60815" + } + } + }, + "NetEscapades.Configuration.Yaml/2.1.0": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", + "YamlDotNet": "9.1.0" + }, + "runtime": { + "lib/netstandard2.0/NetEscapades.Configuration.Yaml.dll": { + "assemblyVersion": "2.1.0.0", + "fileVersion": "2.1.0.0" + } + } + }, + "Pipelines.Sockets.Unofficial/2.2.8": { + "runtime": { + "lib/net5.0/Pipelines.Sockets.Unofficial.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "2.2.8.1080" + } + } + }, + "Polly/7.2.4": { + "runtime": { + "lib/netstandard2.0/Polly.dll": { + "assemblyVersion": "7.0.0.0", + "fileVersion": "7.2.4.982" + } + } + }, + "Polly.Extensions.Http/3.0.0": { + "dependencies": { + "Polly": "7.2.4" + }, + "runtime": { + "lib/netstandard2.0/Polly.Extensions.Http.dll": { + "assemblyVersion": "3.0.0.0", + "fileVersion": "3.0.0.0" + } + } + }, + "SharpCompress/0.41.0": { + "dependencies": { + "ZstdSharp.Port": "0.8.6" + }, + "runtime": { + "lib/net8.0/SharpCompress.dll": { + "assemblyVersion": "0.41.0.0", + "fileVersion": "0.41.0.0" + } + } + }, + "StackExchange.Redis/2.8.24": { + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Pipelines.Sockets.Unofficial": "2.2.8" + }, + "runtime": { + "lib/net8.0/StackExchange.Redis.dll": { + "assemblyVersion": "2.0.0.0", + "fileVersion": "2.8.24.3255" + } + } + }, + "System.IdentityModel.Tokens.Jwt/7.2.0": { + "dependencies": { + "Microsoft.IdentityModel.JsonWebTokens": "7.2.0", + "Microsoft.IdentityModel.Tokens": "8.14.0" + }, + "runtime": { + "lib/net8.0/System.IdentityModel.Tokens.Jwt.dll": { + "assemblyVersion": "7.2.0.0", + "fileVersion": "7.2.0.50110" + } + } + }, + "YamlDotNet/9.1.0": { + "runtime": { + "lib/netstandard2.1/YamlDotNet.dll": { + "assemblyVersion": "9.0.0.0", + "fileVersion": "9.1.0.0" + } + } + }, + "ZstdSharp.Port/0.8.6": { + "runtime": { + "lib/net9.0/ZstdSharp.dll": { + "assemblyVersion": "0.8.6.0", + "fileVersion": "0.8.6.0" + } + } + }, + "StellaOps.Auth.Abstractions/1.0.0-preview.1": { + "dependencies": { + "SharpCompress": "0.41.0" + }, + "runtime": { + "StellaOps.Auth.Abstractions.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Auth.Client/1.0.0-preview.1": { + "dependencies": { + "Microsoft.Extensions.Http.Polly": "10.0.0-rc.2.25502.107", + "Microsoft.IdentityModel.Tokens": "8.14.0", + "SharpCompress": "0.41.0", + "StellaOps.Auth.Abstractions": "1.0.0-preview.1", + "StellaOps.Configuration": "1.0.0" + }, + "runtime": { + "StellaOps.Auth.Client.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Auth.Security/1.0.0-preview.1": { + "dependencies": { + "Microsoft.IdentityModel.Tokens": "8.14.0", + "SharpCompress": "0.41.0", + "StackExchange.Redis": "2.8.24", + "System.IdentityModel.Tokens.Jwt": "7.2.0" + }, + "runtime": { + "StellaOps.Auth.Security.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Authority.Plugins.Abstractions/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0", + "StellaOps.Auth.Abstractions": "1.0.0-preview.1", + "StellaOps.Cryptography": "1.0.0" + }, + "runtime": { + "StellaOps.Authority.Plugins.Abstractions.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Configuration/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Binder": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.EnvironmentVariables": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.FileExtensions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Configuration.Json": "10.0.0-rc.2.25502.107", + "NetEscapades.Configuration.Yaml": "2.1.0", + "SharpCompress": "0.41.0", + "StellaOps.Authority.Plugins.Abstractions": "1.0.0", + "StellaOps.Cryptography": "1.0.0" + }, + "runtime": { + "StellaOps.Configuration.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Cryptography/1.0.0": { + "dependencies": { + "Konscious.Security.Cryptography.Argon2": "1.3.1", + "Microsoft.IdentityModel.Tokens": "8.14.0", + "SharpCompress": "0.41.0" + }, + "runtime": { + "StellaOps.Cryptography.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.DependencyInjection/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0" + }, + "runtime": { + "StellaOps.DependencyInjection.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Plugin/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Configuration.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.DependencyInjection.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0", + "StellaOps.DependencyInjection": "1.0.0" + }, + "runtime": { + "StellaOps.Plugin.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Scanner.Analyzers.Lang/1.0.0": { + "dependencies": { + "SharpCompress": "0.41.0", + "StellaOps.Plugin": "1.0.0", + "StellaOps.Scanner.Core": "1.0.0" + }, + "runtime": { + "StellaOps.Scanner.Analyzers.Lang.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + }, + "StellaOps.Scanner.Core/1.0.0": { + "dependencies": { + "Microsoft.Extensions.Logging.Abstractions": "10.0.0-rc.2.25502.107", + "Microsoft.Extensions.Options": "10.0.0-rc.2.25502.107", + "SharpCompress": "0.41.0", + "StellaOps.Auth.Client": "1.0.0-preview.1", + "StellaOps.Auth.Security": "1.0.0-preview.1" + }, + "runtime": { + "StellaOps.Scanner.Core.dll": { + "assemblyVersion": "1.0.0.0", + "fileVersion": "1.0.0.0" + } + } + } + } + }, + "libraries": { + "StellaOps.Scanner.Analyzers.Lang.Python/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "Konscious.Security.Cryptography.Argon2/1.3.1": { + "type": "package", + "serviceable": true, + "sha512": "sha512-T+OAGwzYYXftahpOxO7J4xA5K6urxwGnWQf3M+Jpi+76Azv/0T3M5SuN+h7/QvXuiqNw3ZEZ5QqVLI5ygDAylw==", + "path": "konscious.security.cryptography.argon2/1.3.1", + "hashPath": "konscious.security.cryptography.argon2.1.3.1.nupkg.sha512" + }, + "Konscious.Security.Cryptography.Blake2/1.1.1": { + "type": "package", + "serviceable": true, + "sha512": "sha512-odwOyzj/J/lHJZNwFWJGU/LRecBShupAJ2S8TQqZfhUe9niHzu/voBYK5wuVKsvSpzbfupKQYZguVyIk1sgOkQ==", + "path": "konscious.security.cryptography.blake2/1.1.1", + "hashPath": "konscious.security.cryptography.blake2.1.1.1.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-273Ggibh3DdVrj47ENbUGIirOiqmLTAizpkvOD584Ps6NL/CMXPzesijnJgsjp7Fv/UCp69FKYBaSxZZ3q5R9g==", + "path": "microsoft.extensions.configuration/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-H+i/Qy30Rg/K9BcW2Z6DCHPCzwMH3bCwNOjEz31shWTUDK8GeeeMnrKVusprTcRA2Y6yPST+hg2zc3whPEs14Q==", + "path": "microsoft.extensions.configuration.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.Binder/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-aA6/V6lw1Gueyb1PqhHAl/i/qUUuv+Fusfk4oaMOzzOjspBkYtPpNHCmml/0t1x0/DnZoed+u2WwpP+mSwd8Dg==", + "path": "microsoft.extensions.configuration.binder/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.binder.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.EnvironmentVariables/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-2SV60IUAWfluZv2YHNZ+nUOljYHGIsy96FpJs+N9/bgKDYs9qr6DdzPeIhiHrz+XvRzbybvcwtTBf5dKrYN4oA==", + "path": "microsoft.extensions.configuration.environmentvariables/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.environmentvariables.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.FileExtensions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-5KrgXSTFR8cFLmDXXoT7GLVvDyHNw0Z9xG4doD78Q/HdlAR4jiMzmLLS9GFXrPGopmC6qqEZr2VBJHEu16INcA==", + "path": "microsoft.extensions.configuration.fileextensions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.fileextensions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Configuration.Json/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-USwHuFz4BFKoaqSydHWH/d7Mr+fVsAh9S0S9pdsdHro1IixMbqQ9Gpo2sEZf25e3tZSq/ts6XsVmrQWmxmDhYA==", + "path": "microsoft.extensions.configuration.json/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.configuration.json.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.DependencyInjection/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-mDw80K98jBWCyLFCra51PRv+Ttnjse1lZIzXEFybKby0/ajBFTEeHj/4r/QJexmb8Uun0yaFH1HlFtmHP1YEVA==", + "path": "microsoft.extensions.dependencyinjection/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.dependencyinjection.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.DependencyInjection.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-8jujunpkNNfTkE9PFHp9/aD6GPKVfNCuz8tUbzOcyU5tQOCoIZId4hwQNVx3Tb8XEWw9BYdh0k5vPpqdCM+UtA==", + "path": "microsoft.extensions.dependencyinjection.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.dependencyinjection.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Diagnostics/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-tQfQFXI+ZQcL2RzDarDLx3Amh0WCp1KPGp1ie3y/CMV5hDhEq98WTmcMoXrFY0GkYLEaCQlVi2A6qVLcooG2Ow==", + "path": "microsoft.extensions.diagnostics/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.diagnostics.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Diagnostics.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-x6XVv3RiwOlN2unjyX/Zat0gI0HiRoDDdjkwBCwsMftYWpbJu4SiyRwDbrv2zAF8v8nbEEvcWi3/pUxZfaqLQw==", + "path": "microsoft.extensions.diagnostics.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.diagnostics.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.FileProviders.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-dOpmW14MkOZIwV6269iXhoMp6alCHBoxqCR4pJ37GLjFaBIyzsIy+Ra8tsGmjHtFvEHKq0JRDIsb1PUkrK+yxw==", + "path": "microsoft.extensions.fileproviders.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.fileproviders.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.FileProviders.Physical/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-3+RiR6TEakDL0dCUqR7PjFffyrVMLdx/vAVBiN1mGmwScKYCTePIkYVkWsX85CTKh7R9J4M9C1MHzVdjbKcg3g==", + "path": "microsoft.extensions.fileproviders.physical/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.fileproviders.physical.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.FileSystemGlobbing/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-XtcPOKB7sMFzj8SxaOglZV3eaqZ1GxUMVZTwaz4pRpBt0S45ghb836uUej4YaI8EzsnUJoqzOIKrTW4CDJMfVw==", + "path": "microsoft.extensions.filesystemglobbing/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.filesystemglobbing.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Http/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-d60bvi/NpzkpVlSpxZqOfdjX1hrQgL/byWVc3PryjbmB7zvfLtqQbYifjEWToqtS0Fb1rGnkuVI5JEdOnK1tNQ==", + "path": "microsoft.extensions.http/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.http.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Http.Polly/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-aY5vLcrhdXCHsCjYI2lNwfat2vdSuiPs0FFZiy7IM6zcyqdxaefG8J8ezTKkZyiuAtznjVJJT70B660l/WlsxA==", + "path": "microsoft.extensions.http.polly/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.http.polly.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Logging/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-q2C5gq86qkTmcYSJJSnw8sgTUyuqENYSOjk/NOYjHnYlKSrK3oI9Rjv1bWFpx2I3Btq9ZBEJb9aMM+IUQ0PvZA==", + "path": "microsoft.extensions.logging/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.logging.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Logging.Abstractions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-SKKKZjyCpBaDQ7yuFjdk6ELnRBRWeZsbnzUfo59Wc4PGhgf92chE3we/QlT6nk6NqlWcUgH/jogM+B/uq/Qdnw==", + "path": "microsoft.extensions.logging.abstractions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.logging.abstractions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Options/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-Ib6BCCjisp7ZUdhtNpSulFO0ODhz/IE4ZZd8OCqQWoRs363BQ0QOZi9KwpqpiEWo51S0kIXWqNicDPGXwpt9pQ==", + "path": "microsoft.extensions.options/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.options.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Options.ConfigurationExtensions/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-MFbT8+JKX49YCXEFvlZDzQzI/R3QKzRZlb4dSud+569cMgA9hWbndjWWvOgGASoRcXynGRrBSq1Bw3PeCsB5/Q==", + "path": "microsoft.extensions.options.configurationextensions/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.options.configurationextensions.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.Extensions.Primitives/10.0.0-rc.2.25502.107": { + "type": "package", + "serviceable": true, + "sha512": "sha512-9pm2zqqn5u/OsKs2zgkhJEQQeMx9KkVOWPdHrs7Kt5sfpk+eIh/gmpi/mMH/ljS2T/PFsFdCEtm+GS/6l7zoZA==", + "path": "microsoft.extensions.primitives/10.0.0-rc.2.25502.107", + "hashPath": "microsoft.extensions.primitives.10.0.0-rc.2.25502.107.nupkg.sha512" + }, + "Microsoft.IdentityModel.Abstractions/8.14.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-iwbCpSjD3ehfTwBhtSNEtKPK0ICun6ov7Ibx6ISNA9bfwIyzI2Siwyi9eJFCJBwxowK9xcA1mj+jBWiigeqgcQ==", + "path": "microsoft.identitymodel.abstractions/8.14.0", + "hashPath": "microsoft.identitymodel.abstractions.8.14.0.nupkg.sha512" + }, + "Microsoft.IdentityModel.JsonWebTokens/7.2.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-zLFA9IBxDWw6Y1nz2PPZyQvF+ZZ4aW1pwgtwusQB39lgxOc2xVqZ8gitsuT1rwyuIbchGOWbax4fsJ8OgGRxSQ==", + "path": "microsoft.identitymodel.jsonwebtokens/7.2.0", + "hashPath": "microsoft.identitymodel.jsonwebtokens.7.2.0.nupkg.sha512" + }, + "Microsoft.IdentityModel.Logging/8.14.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-eqqnemdW38CKZEHS6diA50BV94QICozDZEvSrsvN3SJXUFwVB9gy+/oz76gldP7nZliA16IglXjXTCTdmU/Ejg==", + "path": "microsoft.identitymodel.logging/8.14.0", + "hashPath": "microsoft.identitymodel.logging.8.14.0.nupkg.sha512" + }, + "Microsoft.IdentityModel.Tokens/8.14.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-lKIZiBiGd36k02TCdMHp1KlNWisyIvQxcYJvIkz7P4gSQ9zi8dgh6S5Grj8NNG7HWYIPfQymGyoZ6JB5d1Lo1g==", + "path": "microsoft.identitymodel.tokens/8.14.0", + "hashPath": "microsoft.identitymodel.tokens.8.14.0.nupkg.sha512" + }, + "NetEscapades.Configuration.Yaml/2.1.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-kNTX7kvRvbzBpLd3Vg9iu6t60tTyhVxsruAPgH6kl1GkAZIHLZw9cQysvjUenDU7JEnUgyxQnzfL8627ARDn+g==", + "path": "netescapades.configuration.yaml/2.1.0", + "hashPath": "netescapades.configuration.yaml.2.1.0.nupkg.sha512" + }, + "Pipelines.Sockets.Unofficial/2.2.8": { + "type": "package", + "serviceable": true, + "sha512": "sha512-zG2FApP5zxSx6OcdJQLbZDk2AVlN2BNQD6MorwIfV6gVj0RRxWPEp2LXAxqDGZqeNV1Zp0BNPcNaey/GXmTdvQ==", + "path": "pipelines.sockets.unofficial/2.2.8", + "hashPath": "pipelines.sockets.unofficial.2.2.8.nupkg.sha512" + }, + "Polly/7.2.4": { + "type": "package", + "serviceable": true, + "sha512": "sha512-bw00Ck5sh6ekduDE3mnCo1ohzuad946uslCDEENu3091+6UKnBuKLo4e+yaNcCzXxOZCXWY2gV4a35+K1d4LDA==", + "path": "polly/7.2.4", + "hashPath": "polly.7.2.4.nupkg.sha512" + }, + "Polly.Extensions.Http/3.0.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-drrG+hB3pYFY7w1c3BD+lSGYvH2oIclH8GRSehgfyP5kjnFnHKQuuBhuHLv+PWyFuaTDyk/vfRpnxOzd11+J8g==", + "path": "polly.extensions.http/3.0.0", + "hashPath": "polly.extensions.http.3.0.0.nupkg.sha512" + }, + "SharpCompress/0.41.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-z04dBVdTIAFTRKi38f0LkajaKA++bR+M8kYCbasXePILD2H+qs7CkLpyiippB24CSbTrWIgpBKm6BenZqkUwvw==", + "path": "sharpcompress/0.41.0", + "hashPath": "sharpcompress.0.41.0.nupkg.sha512" + }, + "StackExchange.Redis/2.8.24": { + "type": "package", + "serviceable": true, + "sha512": "sha512-GWllmsFAtLyhm4C47cOCipGxyEi1NQWTFUHXnJ8hiHOsK/bH3T5eLkWPVW+LRL6jDiB3g3izW3YEHgLuPoJSyA==", + "path": "stackexchange.redis/2.8.24", + "hashPath": "stackexchange.redis.2.8.24.nupkg.sha512" + }, + "System.IdentityModel.Tokens.Jwt/7.2.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-Z3Fmkrxkp+o51ANMO/PqASRRlEz8dH4mTWwZXMFMXZt2bUGztBiNcIDnwBCElYLYpzpmz4sIqHb6aW8QVLe6YQ==", + "path": "system.identitymodel.tokens.jwt/7.2.0", + "hashPath": "system.identitymodel.tokens.jwt.7.2.0.nupkg.sha512" + }, + "YamlDotNet/9.1.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-fuvGXU4Ec5HrsmEc+BiFTNPCRf1cGBI2kh/3RzMWgddM2M4ALhbSPoI3X3mhXZUD1qqQd9oSkFAtWjpz8z9eRg==", + "path": "yamldotnet/9.1.0", + "hashPath": "yamldotnet.9.1.0.nupkg.sha512" + }, + "ZstdSharp.Port/0.8.6": { + "type": "package", + "serviceable": true, + "sha512": "sha512-iP4jVLQoQmUjMU88g1WObiNr6YKZGvh4aOXn3yOJsHqZsflwRsxZPcIBvNXgjXO3vQKSLctXGLTpcBPLnWPS8A==", + "path": "zstdsharp.port/0.8.6", + "hashPath": "zstdsharp.port.0.8.6.nupkg.sha512" + }, + "StellaOps.Auth.Abstractions/1.0.0-preview.1": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Auth.Client/1.0.0-preview.1": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Auth.Security/1.0.0-preview.1": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Authority.Plugins.Abstractions/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Configuration/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Cryptography/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.DependencyInjection/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Plugin/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Scanner.Analyzers.Lang/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + }, + "StellaOps.Scanner.Core/1.0.0": { + "type": "project", + "serviceable": false, + "sha512": "" + } + } } \ No newline at end of file diff --git a/out/linknotmerge-bench.csv b/out/linknotmerge-bench.csv index 7e141e9a..7070a4e0 100644 --- a/out/linknotmerge-bench.csv +++ b/out/linknotmerge-bench.csv @@ -1,4 +1,4 @@ -scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb -lnm_ingest_baseline,5,5000,500,6000,555.1984,823.4957,866.6236,366.2635,188.9349,9877.7916,5769.5175,15338.0851,8405.1257,62.4477 -lnm_ingest_fanout_medium,5,10000,800,14800,785.8909,841.6247,842.8815,453.5087,332.3822,12794.9550,11864.0639,22086.0320,20891.0579,145.8328 -lnm_ingest_fanout_high,5,15000,1200,17400,1299.3458,1367.0934,1369.9430,741.6265,557.7193,11571.0991,10949.3607,20232.5180,19781.6762,238.3450 +scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb +lnm_ingest_baseline,5,5000,500,6000,555.1984,823.4957,866.6236,366.2635,188.9349,9877.7916,5769.5175,15338.0851,8405.1257,62.4477 +lnm_ingest_fanout_medium,5,10000,800,14800,785.8909,841.6247,842.8815,453.5087,332.3822,12794.9550,11864.0639,22086.0320,20891.0579,145.8328 +lnm_ingest_fanout_high,5,15000,1200,17400,1299.3458,1367.0934,1369.9430,741.6265,557.7193,11571.0991,10949.3607,20232.5180,19781.6762,238.3450 diff --git a/out/linknotmerge-bench.json b/out/linknotmerge-bench.json index 9ac354d3..cb8871e8 100644 --- a/out/linknotmerge-bench.json +++ b/out/linknotmerge-bench.json @@ -1,84 +1,84 @@ -{ - "schemaVersion": "linknotmerge-bench/1.0", - "capturedAt": "2025-10-26T21:09:17.6345283+00:00", - "scenarios": [ - { - "id": "lnm_ingest_baseline", - "label": "5k observations, 500 aliases", - "iterations": 5, - "observations": 5000, - "aliases": 500, - "linksets": 6000, - "meanTotalMs": 555.1983600000001, - "p95TotalMs": 823.49568, - "maxTotalMs": 866.6236, - "meanInsertMs": 366.2635, - "meanCorrelationMs": 188.93486000000001, - "meanThroughputPerSecond": 9877.791561756272, - "minThroughputPerSecond": 5769.517469868118, - "meanMongoThroughputPerSecond": 15338.085148262326, - "minMongoThroughputPerSecond": 8405.1257146248, - "maxAllocatedMb": 62.44767761230469, - "thresholdMs": 900, - "minThroughputThresholdPerSecond": 5500, - "minMongoThroughputThresholdPerSecond": 8000, - "maxAllocatedThresholdMb": 160, - "regression": { - "limit": 1.15, - "breached": false - } - }, - { - "id": "lnm_ingest_fanout_medium", - "label": "10k observations, 800 aliases", - "iterations": 5, - "observations": 10000, - "aliases": 800, - "linksets": 14800, - "meanTotalMs": 785.89092, - "p95TotalMs": 841.6247, - "maxTotalMs": 842.8815, - "meanInsertMs": 453.50868, - "meanCorrelationMs": 332.38224, - "meanThroughputPerSecond": 12794.954951406156, - "minThroughputPerSecond": 11864.063928322072, - "meanMongoThroughputPerSecond": 22086.032034175576, - "minMongoThroughputPerSecond": 20891.057937797712, - "maxAllocatedMb": 145.83282470703125, - "thresholdMs": 1300, - "minThroughputThresholdPerSecond": 8000, - "minMongoThroughputThresholdPerSecond": 13000, - "maxAllocatedThresholdMb": 220, - "regression": { - "limit": 1.15, - "breached": false - } - }, - { - "id": "lnm_ingest_fanout_high", - "label": "15k observations, 1200 aliases", - "iterations": 5, - "observations": 15000, - "aliases": 1200, - "linksets": 17400, - "meanTotalMs": 1299.3458, - "p95TotalMs": 1367.09344, - "maxTotalMs": 1369.943, - "meanInsertMs": 741.62654, - "meanCorrelationMs": 557.71926, - "meanThroughputPerSecond": 11571.099129140825, - "minThroughputPerSecond": 10949.360666830664, - "meanMongoThroughputPerSecond": 20232.5179777937, - "minMongoThroughputPerSecond": 19781.676233305086, - "maxAllocatedMb": 238.34496307373047, - "thresholdMs": 2200, - "minThroughputThresholdPerSecond": 7000, - "minMongoThroughputThresholdPerSecond": 13000, - "maxAllocatedThresholdMb": 300, - "regression": { - "limit": 1.15, - "breached": false - } - } - ] +{ + "schemaVersion": "linknotmerge-bench/1.0", + "capturedAt": "2025-10-26T21:09:17.6345283+00:00", + "scenarios": [ + { + "id": "lnm_ingest_baseline", + "label": "5k observations, 500 aliases", + "iterations": 5, + "observations": 5000, + "aliases": 500, + "linksets": 6000, + "meanTotalMs": 555.1983600000001, + "p95TotalMs": 823.49568, + "maxTotalMs": 866.6236, + "meanInsertMs": 366.2635, + "meanCorrelationMs": 188.93486000000001, + "meanThroughputPerSecond": 9877.791561756272, + "minThroughputPerSecond": 5769.517469868118, + "meanMongoThroughputPerSecond": 15338.085148262326, + "minMongoThroughputPerSecond": 8405.1257146248, + "maxAllocatedMb": 62.44767761230469, + "thresholdMs": 900, + "minThroughputThresholdPerSecond": 5500, + "minMongoThroughputThresholdPerSecond": 8000, + "maxAllocatedThresholdMb": 160, + "regression": { + "limit": 1.15, + "breached": false + } + }, + { + "id": "lnm_ingest_fanout_medium", + "label": "10k observations, 800 aliases", + "iterations": 5, + "observations": 10000, + "aliases": 800, + "linksets": 14800, + "meanTotalMs": 785.89092, + "p95TotalMs": 841.6247, + "maxTotalMs": 842.8815, + "meanInsertMs": 453.50868, + "meanCorrelationMs": 332.38224, + "meanThroughputPerSecond": 12794.954951406156, + "minThroughputPerSecond": 11864.063928322072, + "meanMongoThroughputPerSecond": 22086.032034175576, + "minMongoThroughputPerSecond": 20891.057937797712, + "maxAllocatedMb": 145.83282470703125, + "thresholdMs": 1300, + "minThroughputThresholdPerSecond": 8000, + "minMongoThroughputThresholdPerSecond": 13000, + "maxAllocatedThresholdMb": 220, + "regression": { + "limit": 1.15, + "breached": false + } + }, + { + "id": "lnm_ingest_fanout_high", + "label": "15k observations, 1200 aliases", + "iterations": 5, + "observations": 15000, + "aliases": 1200, + "linksets": 17400, + "meanTotalMs": 1299.3458, + "p95TotalMs": 1367.09344, + "maxTotalMs": 1369.943, + "meanInsertMs": 741.62654, + "meanCorrelationMs": 557.71926, + "meanThroughputPerSecond": 11571.099129140825, + "minThroughputPerSecond": 10949.360666830664, + "meanMongoThroughputPerSecond": 20232.5179777937, + "minMongoThroughputPerSecond": 19781.676233305086, + "maxAllocatedMb": 238.34496307373047, + "thresholdMs": 2200, + "minThroughputThresholdPerSecond": 7000, + "minMongoThroughputThresholdPerSecond": 13000, + "maxAllocatedThresholdMb": 300, + "regression": { + "limit": 1.15, + "breached": false + } + } + ] } \ No newline at end of file diff --git a/out/linknotmerge-bench.prom b/out/linknotmerge-bench.prom index 2401ba2f..1f40b174 100644 --- a/out/linknotmerge-bench.prom +++ b/out/linknotmerge-bench.prom @@ -1,60 +1,60 @@ -# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds). -# TYPE linknotmerge_bench_total_ms gauge -# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds). -# TYPE linknotmerge_bench_correlation_ms gauge -# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds). -# TYPE linknotmerge_bench_insert_ms gauge -# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second). -# TYPE linknotmerge_bench_throughput_per_sec gauge -# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second). -# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge -# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes). -# TYPE linknotmerge_bench_allocated_mb gauge -linknotmerge_bench_mean_total_ms{scenario="lnm_ingest_baseline"} 555.19836000000009 -linknotmerge_bench_p95_total_ms{scenario="lnm_ingest_baseline"} 823.49567999999999 -linknotmerge_bench_max_total_ms{scenario="lnm_ingest_baseline"} 866.62360000000001 -linknotmerge_bench_threshold_ms{scenario="lnm_ingest_baseline"} 900 -linknotmerge_bench_mean_correlation_ms{scenario="lnm_ingest_baseline"} 188.93486000000001 -linknotmerge_bench_mean_insert_ms{scenario="lnm_ingest_baseline"} 366.26350000000002 -linknotmerge_bench_mean_throughput_per_sec{scenario="lnm_ingest_baseline"} 9877.7915617562721 -linknotmerge_bench_min_throughput_per_sec{scenario="lnm_ingest_baseline"} 5769.5174698681176 -linknotmerge_bench_throughput_floor_per_sec{scenario="lnm_ingest_baseline"} 5500 -linknotmerge_bench_mean_mongo_throughput_per_sec{scenario="lnm_ingest_baseline"} 15338.085148262326 -linknotmerge_bench_min_mongo_throughput_per_sec{scenario="lnm_ingest_baseline"} 8405.1257146248008 -linknotmerge_bench_mongo_throughput_floor_per_sec{scenario="lnm_ingest_baseline"} 8000 -linknotmerge_bench_max_allocated_mb{scenario="lnm_ingest_baseline"} 62.447677612304688 -linknotmerge_bench_max_allocated_threshold_mb{scenario="lnm_ingest_baseline"} 160 -linknotmerge_bench_regression_limit{scenario="lnm_ingest_baseline"} 1.1499999999999999 -linknotmerge_bench_regression_breached{scenario="lnm_ingest_baseline"} 0 -linknotmerge_bench_mean_total_ms{scenario="lnm_ingest_fanout_medium"} 785.89092000000005 -linknotmerge_bench_p95_total_ms{scenario="lnm_ingest_fanout_medium"} 841.62469999999996 -linknotmerge_bench_max_total_ms{scenario="lnm_ingest_fanout_medium"} 842.88149999999996 -linknotmerge_bench_threshold_ms{scenario="lnm_ingest_fanout_medium"} 1300 -linknotmerge_bench_mean_correlation_ms{scenario="lnm_ingest_fanout_medium"} 332.38224000000002 -linknotmerge_bench_mean_insert_ms{scenario="lnm_ingest_fanout_medium"} 453.50868000000003 -linknotmerge_bench_mean_throughput_per_sec{scenario="lnm_ingest_fanout_medium"} 12794.954951406156 -linknotmerge_bench_min_throughput_per_sec{scenario="lnm_ingest_fanout_medium"} 11864.063928322072 -linknotmerge_bench_throughput_floor_per_sec{scenario="lnm_ingest_fanout_medium"} 8000 -linknotmerge_bench_mean_mongo_throughput_per_sec{scenario="lnm_ingest_fanout_medium"} 22086.032034175576 -linknotmerge_bench_min_mongo_throughput_per_sec{scenario="lnm_ingest_fanout_medium"} 20891.057937797712 -linknotmerge_bench_mongo_throughput_floor_per_sec{scenario="lnm_ingest_fanout_medium"} 13000 -linknotmerge_bench_max_allocated_mb{scenario="lnm_ingest_fanout_medium"} 145.83282470703125 -linknotmerge_bench_max_allocated_threshold_mb{scenario="lnm_ingest_fanout_medium"} 220 -linknotmerge_bench_regression_limit{scenario="lnm_ingest_fanout_medium"} 1.1499999999999999 -linknotmerge_bench_regression_breached{scenario="lnm_ingest_fanout_medium"} 0 -linknotmerge_bench_mean_total_ms{scenario="lnm_ingest_fanout_high"} 1299.3458000000001 -linknotmerge_bench_p95_total_ms{scenario="lnm_ingest_fanout_high"} 1367.0934400000001 -linknotmerge_bench_max_total_ms{scenario="lnm_ingest_fanout_high"} 1369.943 -linknotmerge_bench_threshold_ms{scenario="lnm_ingest_fanout_high"} 2200 -linknotmerge_bench_mean_correlation_ms{scenario="lnm_ingest_fanout_high"} 557.71925999999996 -linknotmerge_bench_mean_insert_ms{scenario="lnm_ingest_fanout_high"} 741.62653999999998 -linknotmerge_bench_mean_throughput_per_sec{scenario="lnm_ingest_fanout_high"} 11571.099129140825 -linknotmerge_bench_min_throughput_per_sec{scenario="lnm_ingest_fanout_high"} 10949.360666830664 -linknotmerge_bench_throughput_floor_per_sec{scenario="lnm_ingest_fanout_high"} 7000 -linknotmerge_bench_mean_mongo_throughput_per_sec{scenario="lnm_ingest_fanout_high"} 20232.517977793701 -linknotmerge_bench_min_mongo_throughput_per_sec{scenario="lnm_ingest_fanout_high"} 19781.676233305086 -linknotmerge_bench_mongo_throughput_floor_per_sec{scenario="lnm_ingest_fanout_high"} 13000 -linknotmerge_bench_max_allocated_mb{scenario="lnm_ingest_fanout_high"} 238.34496307373047 -linknotmerge_bench_max_allocated_threshold_mb{scenario="lnm_ingest_fanout_high"} 300 -linknotmerge_bench_regression_limit{scenario="lnm_ingest_fanout_high"} 1.1499999999999999 -linknotmerge_bench_regression_breached{scenario="lnm_ingest_fanout_high"} 0 +# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds). +# TYPE linknotmerge_bench_total_ms gauge +# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds). +# TYPE linknotmerge_bench_correlation_ms gauge +# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds). +# TYPE linknotmerge_bench_insert_ms gauge +# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second). +# TYPE linknotmerge_bench_throughput_per_sec gauge +# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second). +# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge +# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes). +# TYPE linknotmerge_bench_allocated_mb gauge +linknotmerge_bench_mean_total_ms{scenario="lnm_ingest_baseline"} 555.19836000000009 +linknotmerge_bench_p95_total_ms{scenario="lnm_ingest_baseline"} 823.49567999999999 +linknotmerge_bench_max_total_ms{scenario="lnm_ingest_baseline"} 866.62360000000001 +linknotmerge_bench_threshold_ms{scenario="lnm_ingest_baseline"} 900 +linknotmerge_bench_mean_correlation_ms{scenario="lnm_ingest_baseline"} 188.93486000000001 +linknotmerge_bench_mean_insert_ms{scenario="lnm_ingest_baseline"} 366.26350000000002 +linknotmerge_bench_mean_throughput_per_sec{scenario="lnm_ingest_baseline"} 9877.7915617562721 +linknotmerge_bench_min_throughput_per_sec{scenario="lnm_ingest_baseline"} 5769.5174698681176 +linknotmerge_bench_throughput_floor_per_sec{scenario="lnm_ingest_baseline"} 5500 +linknotmerge_bench_mean_mongo_throughput_per_sec{scenario="lnm_ingest_baseline"} 15338.085148262326 +linknotmerge_bench_min_mongo_throughput_per_sec{scenario="lnm_ingest_baseline"} 8405.1257146248008 +linknotmerge_bench_mongo_throughput_floor_per_sec{scenario="lnm_ingest_baseline"} 8000 +linknotmerge_bench_max_allocated_mb{scenario="lnm_ingest_baseline"} 62.447677612304688 +linknotmerge_bench_max_allocated_threshold_mb{scenario="lnm_ingest_baseline"} 160 +linknotmerge_bench_regression_limit{scenario="lnm_ingest_baseline"} 1.1499999999999999 +linknotmerge_bench_regression_breached{scenario="lnm_ingest_baseline"} 0 +linknotmerge_bench_mean_total_ms{scenario="lnm_ingest_fanout_medium"} 785.89092000000005 +linknotmerge_bench_p95_total_ms{scenario="lnm_ingest_fanout_medium"} 841.62469999999996 +linknotmerge_bench_max_total_ms{scenario="lnm_ingest_fanout_medium"} 842.88149999999996 +linknotmerge_bench_threshold_ms{scenario="lnm_ingest_fanout_medium"} 1300 +linknotmerge_bench_mean_correlation_ms{scenario="lnm_ingest_fanout_medium"} 332.38224000000002 +linknotmerge_bench_mean_insert_ms{scenario="lnm_ingest_fanout_medium"} 453.50868000000003 +linknotmerge_bench_mean_throughput_per_sec{scenario="lnm_ingest_fanout_medium"} 12794.954951406156 +linknotmerge_bench_min_throughput_per_sec{scenario="lnm_ingest_fanout_medium"} 11864.063928322072 +linknotmerge_bench_throughput_floor_per_sec{scenario="lnm_ingest_fanout_medium"} 8000 +linknotmerge_bench_mean_mongo_throughput_per_sec{scenario="lnm_ingest_fanout_medium"} 22086.032034175576 +linknotmerge_bench_min_mongo_throughput_per_sec{scenario="lnm_ingest_fanout_medium"} 20891.057937797712 +linknotmerge_bench_mongo_throughput_floor_per_sec{scenario="lnm_ingest_fanout_medium"} 13000 +linknotmerge_bench_max_allocated_mb{scenario="lnm_ingest_fanout_medium"} 145.83282470703125 +linknotmerge_bench_max_allocated_threshold_mb{scenario="lnm_ingest_fanout_medium"} 220 +linknotmerge_bench_regression_limit{scenario="lnm_ingest_fanout_medium"} 1.1499999999999999 +linknotmerge_bench_regression_breached{scenario="lnm_ingest_fanout_medium"} 0 +linknotmerge_bench_mean_total_ms{scenario="lnm_ingest_fanout_high"} 1299.3458000000001 +linknotmerge_bench_p95_total_ms{scenario="lnm_ingest_fanout_high"} 1367.0934400000001 +linknotmerge_bench_max_total_ms{scenario="lnm_ingest_fanout_high"} 1369.943 +linknotmerge_bench_threshold_ms{scenario="lnm_ingest_fanout_high"} 2200 +linknotmerge_bench_mean_correlation_ms{scenario="lnm_ingest_fanout_high"} 557.71925999999996 +linknotmerge_bench_mean_insert_ms{scenario="lnm_ingest_fanout_high"} 741.62653999999998 +linknotmerge_bench_mean_throughput_per_sec{scenario="lnm_ingest_fanout_high"} 11571.099129140825 +linknotmerge_bench_min_throughput_per_sec{scenario="lnm_ingest_fanout_high"} 10949.360666830664 +linknotmerge_bench_throughput_floor_per_sec{scenario="lnm_ingest_fanout_high"} 7000 +linknotmerge_bench_mean_mongo_throughput_per_sec{scenario="lnm_ingest_fanout_high"} 20232.517977793701 +linknotmerge_bench_min_mongo_throughput_per_sec{scenario="lnm_ingest_fanout_high"} 19781.676233305086 +linknotmerge_bench_mongo_throughput_floor_per_sec{scenario="lnm_ingest_fanout_high"} 13000 +linknotmerge_bench_max_allocated_mb{scenario="lnm_ingest_fanout_high"} 238.34496307373047 +linknotmerge_bench_max_allocated_threshold_mb{scenario="lnm_ingest_fanout_high"} 300 +linknotmerge_bench_regression_limit{scenario="lnm_ingest_fanout_high"} 1.1499999999999999 +linknotmerge_bench_regression_breached{scenario="lnm_ingest_fanout_high"} 0 diff --git a/out/linknotmerge-vex-bench.csv b/out/linknotmerge-vex-bench.csv index 34578590..b78eeb39 100644 --- a/out/linknotmerge-vex-bench.csv +++ b/out/linknotmerge-vex-bench.csv @@ -1,4 +1,4 @@ -scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb -vex_ingest_baseline,5,4000,24000,21326,842.8191,1319.3038,1432.7675,346.7277,496.0915,5349.8940,2791.7998,48942.4901,24653.0556,138.6365 -vex_ingest_medium,5,8000,64000,56720,1525.9929,1706.8900,1748.9056,533.3378,992.6552,5274.5883,4574.2892,57654.9190,48531.7353,326.8638 -vex_ingest_high,5,12000,120000,106910,2988.5094,3422.1728,3438.9364,903.3927,2085.1167,4066.2300,3489.4510,52456.9493,42358.0556,583.9903 +scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb +vex_ingest_baseline,5,4000,24000,21326,842.8191,1319.3038,1432.7675,346.7277,496.0915,5349.8940,2791.7998,48942.4901,24653.0556,138.6365 +vex_ingest_medium,5,8000,64000,56720,1525.9929,1706.8900,1748.9056,533.3378,992.6552,5274.5883,4574.2892,57654.9190,48531.7353,326.8638 +vex_ingest_high,5,12000,120000,106910,2988.5094,3422.1728,3438.9364,903.3927,2085.1167,4066.2300,3489.4510,52456.9493,42358.0556,583.9903 diff --git a/out/linknotmerge-vex-bench.json b/out/linknotmerge-vex-bench.json index cb140eef..439275b6 100644 --- a/out/linknotmerge-vex-bench.json +++ b/out/linknotmerge-vex-bench.json @@ -1,84 +1,84 @@ -{ - "schemaVersion": "linknotmerge-vex-bench/1.0", - "capturedAt": "2025-10-26T21:29:34.4007212+00:00", - "scenarios": [ - { - "id": "vex_ingest_baseline", - "label": "4k observations, 400 aliases", - "iterations": 5, - "observations": 4000, - "statements": 24000, - "events": 21326, - "meanTotalMs": 842.81914, - "p95TotalMs": 1319.3037799999997, - "maxTotalMs": 1432.7675, - "meanInsertMs": 346.72766, - "meanCorrelationMs": 496.09147999999993, - "meanObservationThroughputPerSecond": 5349.894040882909, - "minObservationThroughputPerSecond": 2791.7997860783416, - "meanEventThroughputPerSecond": 48942.49008943273, - "minEventThroughputPerSecond": 24653.055581276763, - "maxAllocatedMb": 138.63648986816406, - "thresholdMs": 2300, - "minObservationThroughputThresholdPerSecond": 1800, - "minEventThroughputThresholdPerSecond": 2000, - "maxAllocatedThresholdMb": 220, - "regression": { - "limit": 1.15, - "breached": false - } - }, - { - "id": "vex_ingest_medium", - "label": "8k observations, 700 aliases", - "iterations": 5, - "observations": 8000, - "statements": 64000, - "events": 56720, - "meanTotalMs": 1525.99294, - "p95TotalMs": 1706.89, - "maxTotalMs": 1748.9056, - "meanInsertMs": 533.3377800000001, - "meanCorrelationMs": 992.6551599999999, - "meanObservationThroughputPerSecond": 5274.588273225903, - "minObservationThroughputPerSecond": 4574.289201201025, - "meanEventThroughputPerSecond": 57654.91903920916, - "minEventThroughputPerSecond": 48531.73532270095, - "maxAllocatedMb": 326.8638000488281, - "thresholdMs": 3200, - "minObservationThroughputThresholdPerSecond": 2200, - "minEventThroughputThresholdPerSecond": 2500, - "maxAllocatedThresholdMb": 400, - "regression": { - "limit": 1.15, - "breached": false - } - }, - { - "id": "vex_ingest_high", - "label": "12k observations, 1100 aliases", - "iterations": 5, - "observations": 12000, - "statements": 120000, - "events": 106910, - "meanTotalMs": 2988.50936, - "p95TotalMs": 3422.1728, - "maxTotalMs": 3438.9364, - "meanInsertMs": 903.3926800000002, - "meanCorrelationMs": 2085.11668, - "meanObservationThroughputPerSecond": 4066.2299506870645, - "minObservationThroughputPerSecond": 3489.450982577055, - "meanEventThroughputPerSecond": 52456.94928323016, - "minEventThroughputPerSecond": 42358.05564361166, - "maxAllocatedMb": 583.9903411865234, - "thresholdMs": 4200, - "minObservationThroughputThresholdPerSecond": 2200, - "minEventThroughputThresholdPerSecond": 2500, - "maxAllocatedThresholdMb": 700, - "regression": { - "limit": 1.15, - "breached": false - } - } - ] +{ + "schemaVersion": "linknotmerge-vex-bench/1.0", + "capturedAt": "2025-10-26T21:29:34.4007212+00:00", + "scenarios": [ + { + "id": "vex_ingest_baseline", + "label": "4k observations, 400 aliases", + "iterations": 5, + "observations": 4000, + "statements": 24000, + "events": 21326, + "meanTotalMs": 842.81914, + "p95TotalMs": 1319.3037799999997, + "maxTotalMs": 1432.7675, + "meanInsertMs": 346.72766, + "meanCorrelationMs": 496.09147999999993, + "meanObservationThroughputPerSecond": 5349.894040882909, + "minObservationThroughputPerSecond": 2791.7997860783416, + "meanEventThroughputPerSecond": 48942.49008943273, + "minEventThroughputPerSecond": 24653.055581276763, + "maxAllocatedMb": 138.63648986816406, + "thresholdMs": 2300, + "minObservationThroughputThresholdPerSecond": 1800, + "minEventThroughputThresholdPerSecond": 2000, + "maxAllocatedThresholdMb": 220, + "regression": { + "limit": 1.15, + "breached": false + } + }, + { + "id": "vex_ingest_medium", + "label": "8k observations, 700 aliases", + "iterations": 5, + "observations": 8000, + "statements": 64000, + "events": 56720, + "meanTotalMs": 1525.99294, + "p95TotalMs": 1706.89, + "maxTotalMs": 1748.9056, + "meanInsertMs": 533.3377800000001, + "meanCorrelationMs": 992.6551599999999, + "meanObservationThroughputPerSecond": 5274.588273225903, + "minObservationThroughputPerSecond": 4574.289201201025, + "meanEventThroughputPerSecond": 57654.91903920916, + "minEventThroughputPerSecond": 48531.73532270095, + "maxAllocatedMb": 326.8638000488281, + "thresholdMs": 3200, + "minObservationThroughputThresholdPerSecond": 2200, + "minEventThroughputThresholdPerSecond": 2500, + "maxAllocatedThresholdMb": 400, + "regression": { + "limit": 1.15, + "breached": false + } + }, + { + "id": "vex_ingest_high", + "label": "12k observations, 1100 aliases", + "iterations": 5, + "observations": 12000, + "statements": 120000, + "events": 106910, + "meanTotalMs": 2988.50936, + "p95TotalMs": 3422.1728, + "maxTotalMs": 3438.9364, + "meanInsertMs": 903.3926800000002, + "meanCorrelationMs": 2085.11668, + "meanObservationThroughputPerSecond": 4066.2299506870645, + "minObservationThroughputPerSecond": 3489.450982577055, + "meanEventThroughputPerSecond": 52456.94928323016, + "minEventThroughputPerSecond": 42358.05564361166, + "maxAllocatedMb": 583.9903411865234, + "thresholdMs": 4200, + "minObservationThroughputThresholdPerSecond": 2200, + "minEventThroughputThresholdPerSecond": 2500, + "maxAllocatedThresholdMb": 700, + "regression": { + "limit": 1.15, + "breached": false + } + } + ] } \ No newline at end of file diff --git a/out/linknotmerge-vex-bench.prom b/out/linknotmerge-vex-bench.prom index e18eefe3..08cd3d88 100644 --- a/out/linknotmerge-vex-bench.prom +++ b/out/linknotmerge-vex-bench.prom @@ -1,50 +1,50 @@ -# HELP linknotmerge_vex_bench_total_ms Link-Not-Merge VEX benchmark total duration (milliseconds). -# TYPE linknotmerge_vex_bench_total_ms gauge -# HELP linknotmerge_vex_bench_throughput_per_sec Link-Not-Merge VEX benchmark observation throughput (observations per second). -# TYPE linknotmerge_vex_bench_throughput_per_sec gauge -# HELP linknotmerge_vex_bench_event_throughput_per_sec Link-Not-Merge VEX benchmark event throughput (events per second). -# TYPE linknotmerge_vex_bench_event_throughput_per_sec gauge -# HELP linknotmerge_vex_bench_allocated_mb Link-Not-Merge VEX benchmark max allocations (megabytes). -# TYPE linknotmerge_vex_bench_allocated_mb gauge -linknotmerge_vex_bench_mean_total_ms{scenario="vex_ingest_baseline"} 842.81913999999995 -linknotmerge_vex_bench_p95_total_ms{scenario="vex_ingest_baseline"} 1319.3037799999997 -linknotmerge_vex_bench_max_total_ms{scenario="vex_ingest_baseline"} 1432.7674999999999 -linknotmerge_vex_bench_threshold_ms{scenario="vex_ingest_baseline"} 2300 -linknotmerge_vex_bench_mean_observation_throughput_per_sec{scenario="vex_ingest_baseline"} 5349.8940408829094 -linknotmerge_vex_bench_min_observation_throughput_per_sec{scenario="vex_ingest_baseline"} 2791.7997860783416 -linknotmerge_vex_bench_observation_throughput_floor_per_sec{scenario="vex_ingest_baseline"} 1800 -linknotmerge_vex_bench_mean_event_throughput_per_sec{scenario="vex_ingest_baseline"} 48942.490089432729 -linknotmerge_vex_bench_min_event_throughput_per_sec{scenario="vex_ingest_baseline"} 24653.055581276763 -linknotmerge_vex_bench_event_throughput_floor_per_sec{scenario="vex_ingest_baseline"} 2000 -linknotmerge_vex_bench_max_allocated_mb{scenario="vex_ingest_baseline"} 138.63648986816406 -linknotmerge_vex_bench_max_allocated_threshold_mb{scenario="vex_ingest_baseline"} 220 -linknotmerge_vex_bench_regression_limit{scenario="vex_ingest_baseline"} 1.1499999999999999 -linknotmerge_vex_bench_regression_breached{scenario="vex_ingest_baseline"} 0 -linknotmerge_vex_bench_mean_total_ms{scenario="vex_ingest_medium"} 1525.9929400000001 -linknotmerge_vex_bench_p95_total_ms{scenario="vex_ingest_medium"} 1706.8900000000001 -linknotmerge_vex_bench_max_total_ms{scenario="vex_ingest_medium"} 1748.9056 -linknotmerge_vex_bench_threshold_ms{scenario="vex_ingest_medium"} 3200 -linknotmerge_vex_bench_mean_observation_throughput_per_sec{scenario="vex_ingest_medium"} 5274.5882732259033 -linknotmerge_vex_bench_min_observation_throughput_per_sec{scenario="vex_ingest_medium"} 4574.2892012010252 -linknotmerge_vex_bench_observation_throughput_floor_per_sec{scenario="vex_ingest_medium"} 2200 -linknotmerge_vex_bench_mean_event_throughput_per_sec{scenario="vex_ingest_medium"} 57654.919039209162 -linknotmerge_vex_bench_min_event_throughput_per_sec{scenario="vex_ingest_medium"} 48531.735322700952 -linknotmerge_vex_bench_event_throughput_floor_per_sec{scenario="vex_ingest_medium"} 2500 -linknotmerge_vex_bench_max_allocated_mb{scenario="vex_ingest_medium"} 326.86380004882812 -linknotmerge_vex_bench_max_allocated_threshold_mb{scenario="vex_ingest_medium"} 400 -linknotmerge_vex_bench_regression_limit{scenario="vex_ingest_medium"} 1.1499999999999999 -linknotmerge_vex_bench_regression_breached{scenario="vex_ingest_medium"} 0 -linknotmerge_vex_bench_mean_total_ms{scenario="vex_ingest_high"} 2988.50936 -linknotmerge_vex_bench_p95_total_ms{scenario="vex_ingest_high"} 3422.1727999999998 -linknotmerge_vex_bench_max_total_ms{scenario="vex_ingest_high"} 3438.9364 -linknotmerge_vex_bench_threshold_ms{scenario="vex_ingest_high"} 4200 -linknotmerge_vex_bench_mean_observation_throughput_per_sec{scenario="vex_ingest_high"} 4066.2299506870645 -linknotmerge_vex_bench_min_observation_throughput_per_sec{scenario="vex_ingest_high"} 3489.4509825770551 -linknotmerge_vex_bench_observation_throughput_floor_per_sec{scenario="vex_ingest_high"} 2200 -linknotmerge_vex_bench_mean_event_throughput_per_sec{scenario="vex_ingest_high"} 52456.949283230162 -linknotmerge_vex_bench_min_event_throughput_per_sec{scenario="vex_ingest_high"} 42358.05564361166 -linknotmerge_vex_bench_event_throughput_floor_per_sec{scenario="vex_ingest_high"} 2500 -linknotmerge_vex_bench_max_allocated_mb{scenario="vex_ingest_high"} 583.99034118652344 -linknotmerge_vex_bench_max_allocated_threshold_mb{scenario="vex_ingest_high"} 700 -linknotmerge_vex_bench_regression_limit{scenario="vex_ingest_high"} 1.1499999999999999 -linknotmerge_vex_bench_regression_breached{scenario="vex_ingest_high"} 0 +# HELP linknotmerge_vex_bench_total_ms Link-Not-Merge VEX benchmark total duration (milliseconds). +# TYPE linknotmerge_vex_bench_total_ms gauge +# HELP linknotmerge_vex_bench_throughput_per_sec Link-Not-Merge VEX benchmark observation throughput (observations per second). +# TYPE linknotmerge_vex_bench_throughput_per_sec gauge +# HELP linknotmerge_vex_bench_event_throughput_per_sec Link-Not-Merge VEX benchmark event throughput (events per second). +# TYPE linknotmerge_vex_bench_event_throughput_per_sec gauge +# HELP linknotmerge_vex_bench_allocated_mb Link-Not-Merge VEX benchmark max allocations (megabytes). +# TYPE linknotmerge_vex_bench_allocated_mb gauge +linknotmerge_vex_bench_mean_total_ms{scenario="vex_ingest_baseline"} 842.81913999999995 +linknotmerge_vex_bench_p95_total_ms{scenario="vex_ingest_baseline"} 1319.3037799999997 +linknotmerge_vex_bench_max_total_ms{scenario="vex_ingest_baseline"} 1432.7674999999999 +linknotmerge_vex_bench_threshold_ms{scenario="vex_ingest_baseline"} 2300 +linknotmerge_vex_bench_mean_observation_throughput_per_sec{scenario="vex_ingest_baseline"} 5349.8940408829094 +linknotmerge_vex_bench_min_observation_throughput_per_sec{scenario="vex_ingest_baseline"} 2791.7997860783416 +linknotmerge_vex_bench_observation_throughput_floor_per_sec{scenario="vex_ingest_baseline"} 1800 +linknotmerge_vex_bench_mean_event_throughput_per_sec{scenario="vex_ingest_baseline"} 48942.490089432729 +linknotmerge_vex_bench_min_event_throughput_per_sec{scenario="vex_ingest_baseline"} 24653.055581276763 +linknotmerge_vex_bench_event_throughput_floor_per_sec{scenario="vex_ingest_baseline"} 2000 +linknotmerge_vex_bench_max_allocated_mb{scenario="vex_ingest_baseline"} 138.63648986816406 +linknotmerge_vex_bench_max_allocated_threshold_mb{scenario="vex_ingest_baseline"} 220 +linknotmerge_vex_bench_regression_limit{scenario="vex_ingest_baseline"} 1.1499999999999999 +linknotmerge_vex_bench_regression_breached{scenario="vex_ingest_baseline"} 0 +linknotmerge_vex_bench_mean_total_ms{scenario="vex_ingest_medium"} 1525.9929400000001 +linknotmerge_vex_bench_p95_total_ms{scenario="vex_ingest_medium"} 1706.8900000000001 +linknotmerge_vex_bench_max_total_ms{scenario="vex_ingest_medium"} 1748.9056 +linknotmerge_vex_bench_threshold_ms{scenario="vex_ingest_medium"} 3200 +linknotmerge_vex_bench_mean_observation_throughput_per_sec{scenario="vex_ingest_medium"} 5274.5882732259033 +linknotmerge_vex_bench_min_observation_throughput_per_sec{scenario="vex_ingest_medium"} 4574.2892012010252 +linknotmerge_vex_bench_observation_throughput_floor_per_sec{scenario="vex_ingest_medium"} 2200 +linknotmerge_vex_bench_mean_event_throughput_per_sec{scenario="vex_ingest_medium"} 57654.919039209162 +linknotmerge_vex_bench_min_event_throughput_per_sec{scenario="vex_ingest_medium"} 48531.735322700952 +linknotmerge_vex_bench_event_throughput_floor_per_sec{scenario="vex_ingest_medium"} 2500 +linknotmerge_vex_bench_max_allocated_mb{scenario="vex_ingest_medium"} 326.86380004882812 +linknotmerge_vex_bench_max_allocated_threshold_mb{scenario="vex_ingest_medium"} 400 +linknotmerge_vex_bench_regression_limit{scenario="vex_ingest_medium"} 1.1499999999999999 +linknotmerge_vex_bench_regression_breached{scenario="vex_ingest_medium"} 0 +linknotmerge_vex_bench_mean_total_ms{scenario="vex_ingest_high"} 2988.50936 +linknotmerge_vex_bench_p95_total_ms{scenario="vex_ingest_high"} 3422.1727999999998 +linknotmerge_vex_bench_max_total_ms{scenario="vex_ingest_high"} 3438.9364 +linknotmerge_vex_bench_threshold_ms{scenario="vex_ingest_high"} 4200 +linknotmerge_vex_bench_mean_observation_throughput_per_sec{scenario="vex_ingest_high"} 4066.2299506870645 +linknotmerge_vex_bench_min_observation_throughput_per_sec{scenario="vex_ingest_high"} 3489.4509825770551 +linknotmerge_vex_bench_observation_throughput_floor_per_sec{scenario="vex_ingest_high"} 2200 +linknotmerge_vex_bench_mean_event_throughput_per_sec{scenario="vex_ingest_high"} 52456.949283230162 +linknotmerge_vex_bench_min_event_throughput_per_sec{scenario="vex_ingest_high"} 42358.05564361166 +linknotmerge_vex_bench_event_throughput_floor_per_sec{scenario="vex_ingest_high"} 2500 +linknotmerge_vex_bench_max_allocated_mb{scenario="vex_ingest_high"} 583.99034118652344 +linknotmerge_vex_bench_max_allocated_threshold_mb{scenario="vex_ingest_high"} 700 +linknotmerge_vex_bench_regression_limit{scenario="vex_ingest_high"} 1.1499999999999999 +linknotmerge_vex_bench_regression_breached{scenario="vex_ingest_high"} 0 diff --git a/out/notify-bench.csv b/out/notify-bench.csv index d79c2c45..0030cde3 100644 --- a/out/notify-bench.csv +++ b/out/notify-bench.csv @@ -1,4 +1,4 @@ -scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb -notify_dispatch_density_05,5,5000,20000,3.4150,4.1722,4.3039,6053938.5172,4646948.1168,0.0000 -notify_dispatch_density_20,5,7500,675000,24.2274,25.8517,26.0526,27923335.5855,25909122.3141,0.0000 -notify_dispatch_density_40,5,10000,4000080,138.7387,147.7174,149.1124,28916602.9214,26825938.0172,0.0000 +scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb +notify_dispatch_density_05,5,5000,20000,3.4150,4.1722,4.3039,6053938.5172,4646948.1168,0.0000 +notify_dispatch_density_20,5,7500,675000,24.2274,25.8517,26.0526,27923335.5855,25909122.3141,0.0000 +notify_dispatch_density_40,5,10000,4000080,138.7387,147.7174,149.1124,28916602.9214,26825938.0172,0.0000 diff --git a/out/notify-bench.json b/out/notify-bench.json index 11c7f7a8..8c3e075d 100644 --- a/out/notify-bench.json +++ b/out/notify-bench.json @@ -1,84 +1,84 @@ -{ - "schemaVersion": "notify-dispatch-bench/1.0", - "capturedAt": "2025-10-26T20:28:56.3603045+00:00", - "scenarios": [ - { - "id": "notify_dispatch_density_05", - "label": "50 rules / 5% fanout", - "iterations": 5, - "totalEvents": 5000, - "totalRules": 50, - "actionsPerRule": 2, - "averageMatchesPerEvent": 2, - "minMatchesPerEvent": 2, - "maxMatchesPerEvent": 2, - "averageDeliveriesPerEvent": 4, - "totalDeliveries": 20000, - "meanMs": 3.41498, - "p95Ms": 4.17216, - "maxMs": 4.3039, - "meanThroughputPerSecond": 6053938.51717893, - "minThroughputPerSecond": 4646948.116824276, - "maxAllocatedMb": 0, - "thresholdMs": 400, - "minThroughputThresholdPerSecond": 15000, - "maxAllocatedThresholdMb": 128, - "regression": { - "limit": 1.15, - "breached": false - } - }, - { - "id": "notify_dispatch_density_20", - "label": "150 rules / 20% fanout", - "iterations": 5, - "totalEvents": 7500, - "totalRules": 150, - "actionsPerRule": 3, - "averageMatchesPerEvent": 30, - "minMatchesPerEvent": 30, - "maxMatchesPerEvent": 30, - "averageDeliveriesPerEvent": 90, - "totalDeliveries": 675000, - "meanMs": 24.2274, - "p95Ms": 25.85172, - "maxMs": 26.0526, - "meanThroughputPerSecond": 27923335.585545264, - "minThroughputPerSecond": 25909122.314087655, - "maxAllocatedMb": 0, - "thresholdMs": 650, - "minThroughputThresholdPerSecond": 30000, - "maxAllocatedThresholdMb": 192, - "regression": { - "limit": 1.15, - "breached": false - } - }, - { - "id": "notify_dispatch_density_40", - "label": "300 rules / 40% fanout", - "iterations": 5, - "totalEvents": 10000, - "totalRules": 300, - "actionsPerRule": 4, - "averageMatchesPerEvent": 100.002, - "minMatchesPerEvent": 60, - "maxMatchesPerEvent": 120, - "averageDeliveriesPerEvent": 400.008, - "totalDeliveries": 4000080, - "meanMs": 138.73866, - "p95Ms": 147.71738000000002, - "maxMs": 149.1124, - "meanThroughputPerSecond": 28916602.921385907, - "minThroughputPerSecond": 26825938.017227273, - "maxAllocatedMb": 0, - "thresholdMs": 900, - "minThroughputThresholdPerSecond": 45000, - "maxAllocatedThresholdMb": 256, - "regression": { - "limit": 1.15, - "breached": false - } - } - ] +{ + "schemaVersion": "notify-dispatch-bench/1.0", + "capturedAt": "2025-10-26T20:28:56.3603045+00:00", + "scenarios": [ + { + "id": "notify_dispatch_density_05", + "label": "50 rules / 5% fanout", + "iterations": 5, + "totalEvents": 5000, + "totalRules": 50, + "actionsPerRule": 2, + "averageMatchesPerEvent": 2, + "minMatchesPerEvent": 2, + "maxMatchesPerEvent": 2, + "averageDeliveriesPerEvent": 4, + "totalDeliveries": 20000, + "meanMs": 3.41498, + "p95Ms": 4.17216, + "maxMs": 4.3039, + "meanThroughputPerSecond": 6053938.51717893, + "minThroughputPerSecond": 4646948.116824276, + "maxAllocatedMb": 0, + "thresholdMs": 400, + "minThroughputThresholdPerSecond": 15000, + "maxAllocatedThresholdMb": 128, + "regression": { + "limit": 1.15, + "breached": false + } + }, + { + "id": "notify_dispatch_density_20", + "label": "150 rules / 20% fanout", + "iterations": 5, + "totalEvents": 7500, + "totalRules": 150, + "actionsPerRule": 3, + "averageMatchesPerEvent": 30, + "minMatchesPerEvent": 30, + "maxMatchesPerEvent": 30, + "averageDeliveriesPerEvent": 90, + "totalDeliveries": 675000, + "meanMs": 24.2274, + "p95Ms": 25.85172, + "maxMs": 26.0526, + "meanThroughputPerSecond": 27923335.585545264, + "minThroughputPerSecond": 25909122.314087655, + "maxAllocatedMb": 0, + "thresholdMs": 650, + "minThroughputThresholdPerSecond": 30000, + "maxAllocatedThresholdMb": 192, + "regression": { + "limit": 1.15, + "breached": false + } + }, + { + "id": "notify_dispatch_density_40", + "label": "300 rules / 40% fanout", + "iterations": 5, + "totalEvents": 10000, + "totalRules": 300, + "actionsPerRule": 4, + "averageMatchesPerEvent": 100.002, + "minMatchesPerEvent": 60, + "maxMatchesPerEvent": 120, + "averageDeliveriesPerEvent": 400.008, + "totalDeliveries": 4000080, + "meanMs": 138.73866, + "p95Ms": 147.71738000000002, + "maxMs": 149.1124, + "meanThroughputPerSecond": 28916602.921385907, + "minThroughputPerSecond": 26825938.017227273, + "maxAllocatedMb": 0, + "thresholdMs": 900, + "minThroughputThresholdPerSecond": 45000, + "maxAllocatedThresholdMb": 256, + "regression": { + "limit": 1.15, + "breached": false + } + } + ] } \ No newline at end of file diff --git a/out/notify-bench.prom b/out/notify-bench.prom index 27f97f19..e7ebe961 100644 --- a/out/notify-bench.prom +++ b/out/notify-bench.prom @@ -1,39 +1,39 @@ -# HELP notify_dispatch_bench_duration_ms Notify dispatch benchmark duration metrics (milliseconds). -# TYPE notify_dispatch_bench_duration_ms gauge -# HELP notify_dispatch_bench_throughput_per_sec Notify dispatch benchmark throughput metrics (deliveries per second). -# TYPE notify_dispatch_bench_throughput_per_sec gauge -# HELP notify_dispatch_bench_allocation_mb Notify dispatch benchmark allocation metrics (megabytes). -# TYPE notify_dispatch_bench_allocation_mb gauge -notify_dispatch_bench_mean_ms{scenario="notify_dispatch_density_05"} 3.4149799999999999 -notify_dispatch_bench_p95_ms{scenario="notify_dispatch_density_05"} 4.1721599999999999 -notify_dispatch_bench_max_ms{scenario="notify_dispatch_density_05"} 4.3038999999999996 -notify_dispatch_bench_threshold_ms{scenario="notify_dispatch_density_05"} 400 -notify_dispatch_bench_mean_throughput_per_sec{scenario="notify_dispatch_density_05"} 6053938.5171789303 -notify_dispatch_bench_min_throughput_per_sec{scenario="notify_dispatch_density_05"} 4646948.1168242758 -notify_dispatch_bench_min_throughput_threshold_per_sec{scenario="notify_dispatch_density_05"} 15000 -notify_dispatch_bench_max_allocated_mb{scenario="notify_dispatch_density_05"} 0 -notify_dispatch_bench_max_allocated_threshold_mb{scenario="notify_dispatch_density_05"} 128 -notify_dispatch_bench_regression_limit{scenario="notify_dispatch_density_05"} 1.1499999999999999 -notify_dispatch_bench_regression_breached{scenario="notify_dispatch_density_05"} 0 -notify_dispatch_bench_mean_ms{scenario="notify_dispatch_density_20"} 24.227399999999999 -notify_dispatch_bench_p95_ms{scenario="notify_dispatch_density_20"} 25.85172 -notify_dispatch_bench_max_ms{scenario="notify_dispatch_density_20"} 26.052600000000002 -notify_dispatch_bench_threshold_ms{scenario="notify_dispatch_density_20"} 650 -notify_dispatch_bench_mean_throughput_per_sec{scenario="notify_dispatch_density_20"} 27923335.585545264 -notify_dispatch_bench_min_throughput_per_sec{scenario="notify_dispatch_density_20"} 25909122.314087655 -notify_dispatch_bench_min_throughput_threshold_per_sec{scenario="notify_dispatch_density_20"} 30000 -notify_dispatch_bench_max_allocated_mb{scenario="notify_dispatch_density_20"} 0 -notify_dispatch_bench_max_allocated_threshold_mb{scenario="notify_dispatch_density_20"} 192 -notify_dispatch_bench_regression_limit{scenario="notify_dispatch_density_20"} 1.1499999999999999 -notify_dispatch_bench_regression_breached{scenario="notify_dispatch_density_20"} 0 -notify_dispatch_bench_mean_ms{scenario="notify_dispatch_density_40"} 138.73866000000001 -notify_dispatch_bench_p95_ms{scenario="notify_dispatch_density_40"} 147.71738000000002 -notify_dispatch_bench_max_ms{scenario="notify_dispatch_density_40"} 149.11240000000001 -notify_dispatch_bench_threshold_ms{scenario="notify_dispatch_density_40"} 900 -notify_dispatch_bench_mean_throughput_per_sec{scenario="notify_dispatch_density_40"} 28916602.921385907 -notify_dispatch_bench_min_throughput_per_sec{scenario="notify_dispatch_density_40"} 26825938.017227273 -notify_dispatch_bench_min_throughput_threshold_per_sec{scenario="notify_dispatch_density_40"} 45000 -notify_dispatch_bench_max_allocated_mb{scenario="notify_dispatch_density_40"} 0 -notify_dispatch_bench_max_allocated_threshold_mb{scenario="notify_dispatch_density_40"} 256 -notify_dispatch_bench_regression_limit{scenario="notify_dispatch_density_40"} 1.1499999999999999 -notify_dispatch_bench_regression_breached{scenario="notify_dispatch_density_40"} 0 +# HELP notify_dispatch_bench_duration_ms Notify dispatch benchmark duration metrics (milliseconds). +# TYPE notify_dispatch_bench_duration_ms gauge +# HELP notify_dispatch_bench_throughput_per_sec Notify dispatch benchmark throughput metrics (deliveries per second). +# TYPE notify_dispatch_bench_throughput_per_sec gauge +# HELP notify_dispatch_bench_allocation_mb Notify dispatch benchmark allocation metrics (megabytes). +# TYPE notify_dispatch_bench_allocation_mb gauge +notify_dispatch_bench_mean_ms{scenario="notify_dispatch_density_05"} 3.4149799999999999 +notify_dispatch_bench_p95_ms{scenario="notify_dispatch_density_05"} 4.1721599999999999 +notify_dispatch_bench_max_ms{scenario="notify_dispatch_density_05"} 4.3038999999999996 +notify_dispatch_bench_threshold_ms{scenario="notify_dispatch_density_05"} 400 +notify_dispatch_bench_mean_throughput_per_sec{scenario="notify_dispatch_density_05"} 6053938.5171789303 +notify_dispatch_bench_min_throughput_per_sec{scenario="notify_dispatch_density_05"} 4646948.1168242758 +notify_dispatch_bench_min_throughput_threshold_per_sec{scenario="notify_dispatch_density_05"} 15000 +notify_dispatch_bench_max_allocated_mb{scenario="notify_dispatch_density_05"} 0 +notify_dispatch_bench_max_allocated_threshold_mb{scenario="notify_dispatch_density_05"} 128 +notify_dispatch_bench_regression_limit{scenario="notify_dispatch_density_05"} 1.1499999999999999 +notify_dispatch_bench_regression_breached{scenario="notify_dispatch_density_05"} 0 +notify_dispatch_bench_mean_ms{scenario="notify_dispatch_density_20"} 24.227399999999999 +notify_dispatch_bench_p95_ms{scenario="notify_dispatch_density_20"} 25.85172 +notify_dispatch_bench_max_ms{scenario="notify_dispatch_density_20"} 26.052600000000002 +notify_dispatch_bench_threshold_ms{scenario="notify_dispatch_density_20"} 650 +notify_dispatch_bench_mean_throughput_per_sec{scenario="notify_dispatch_density_20"} 27923335.585545264 +notify_dispatch_bench_min_throughput_per_sec{scenario="notify_dispatch_density_20"} 25909122.314087655 +notify_dispatch_bench_min_throughput_threshold_per_sec{scenario="notify_dispatch_density_20"} 30000 +notify_dispatch_bench_max_allocated_mb{scenario="notify_dispatch_density_20"} 0 +notify_dispatch_bench_max_allocated_threshold_mb{scenario="notify_dispatch_density_20"} 192 +notify_dispatch_bench_regression_limit{scenario="notify_dispatch_density_20"} 1.1499999999999999 +notify_dispatch_bench_regression_breached{scenario="notify_dispatch_density_20"} 0 +notify_dispatch_bench_mean_ms{scenario="notify_dispatch_density_40"} 138.73866000000001 +notify_dispatch_bench_p95_ms{scenario="notify_dispatch_density_40"} 147.71738000000002 +notify_dispatch_bench_max_ms{scenario="notify_dispatch_density_40"} 149.11240000000001 +notify_dispatch_bench_threshold_ms{scenario="notify_dispatch_density_40"} 900 +notify_dispatch_bench_mean_throughput_per_sec{scenario="notify_dispatch_density_40"} 28916602.921385907 +notify_dispatch_bench_min_throughput_per_sec{scenario="notify_dispatch_density_40"} 26825938.017227273 +notify_dispatch_bench_min_throughput_threshold_per_sec{scenario="notify_dispatch_density_40"} 45000 +notify_dispatch_bench_max_allocated_mb{scenario="notify_dispatch_density_40"} 0 +notify_dispatch_bench_max_allocated_threshold_mb{scenario="notify_dispatch_density_40"} 256 +notify_dispatch_bench_regression_limit{scenario="notify_dispatch_density_40"} 1.1499999999999999 +notify_dispatch_bench_regression_breached{scenario="notify_dispatch_density_40"} 0 diff --git a/out/policy-bench.csv b/out/policy-bench.csv index 79cdb0d4..b5be42ff 100644 --- a/out/policy-bench.csv +++ b/out/policy-bench.csv @@ -1,2 +1,2 @@ -scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb -policy_eval_baseline,3,1000000,1109.3542,1257.7493,1280.1721,912094.5581,781144.9726,563.6901 +scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb +policy_eval_baseline,3,1000000,1109.3542,1257.7493,1280.1721,912094.5581,781144.9726,563.6901 diff --git a/out/policy-bench.json b/out/policy-bench.json index ddf1fc54..30c8f568 100644 --- a/out/policy-bench.json +++ b/out/policy-bench.json @@ -1,25 +1,25 @@ -{ - "schemaVersion": "policy-bench/1.0", - "capturedAt": "2025-10-26T19:57:27.4363234+00:00", - "scenarios": [ - { - "id": "policy_eval_baseline", - "label": "Policy evaluation (100k components, 1M findings)", - "iterations": 3, - "findingCount": 1000000, - "meanMs": 1109.3542333333335, - "p95Ms": 1257.74929, - "maxMs": 1280.1721, - "meanThroughputPerSecond": 912094.5580512757, - "minThroughputPerSecond": 781144.9726173537, - "maxAllocatedMb": 563.6900634765625, - "thresholdMs": 20000, - "minThroughputThresholdPerSecond": 60000, - "maxAllocatedThresholdMb": 900, - "regression": { - "limit": 1.15, - "breached": false - } - } - ] +{ + "schemaVersion": "policy-bench/1.0", + "capturedAt": "2025-10-26T19:57:27.4363234+00:00", + "scenarios": [ + { + "id": "policy_eval_baseline", + "label": "Policy evaluation (100k components, 1M findings)", + "iterations": 3, + "findingCount": 1000000, + "meanMs": 1109.3542333333335, + "p95Ms": 1257.74929, + "maxMs": 1280.1721, + "meanThroughputPerSecond": 912094.5580512757, + "minThroughputPerSecond": 781144.9726173537, + "maxAllocatedMb": 563.6900634765625, + "thresholdMs": 20000, + "minThroughputThresholdPerSecond": 60000, + "maxAllocatedThresholdMb": 900, + "regression": { + "limit": 1.15, + "breached": false + } + } + ] } \ No newline at end of file diff --git a/out/policy-bench.prom b/out/policy-bench.prom index b9ecd0c0..4ee76ee8 100644 --- a/out/policy-bench.prom +++ b/out/policy-bench.prom @@ -1,17 +1,17 @@ -# HELP policy_engine_bench_duration_ms Policy Engine benchmark duration metrics (milliseconds). -# TYPE policy_engine_bench_duration_ms gauge -# HELP policy_engine_bench_throughput_per_sec Policy Engine benchmark throughput metrics (findings per second). -# TYPE policy_engine_bench_throughput_per_sec gauge -# HELP policy_engine_bench_allocation_mb Policy Engine benchmark allocation metrics (megabytes). -# TYPE policy_engine_bench_allocation_mb gauge -policy_engine_bench_mean_ms{scenario="policy_eval_baseline"} 1109.3542333333335 -policy_engine_bench_p95_ms{scenario="policy_eval_baseline"} 1257.74929 -policy_engine_bench_max_ms{scenario="policy_eval_baseline"} 1280.1721 -policy_engine_bench_threshold_ms{scenario="policy_eval_baseline"} 20000 -policy_engine_bench_mean_throughput_per_sec{scenario="policy_eval_baseline"} 912094.55805127567 -policy_engine_bench_min_throughput_per_sec{scenario="policy_eval_baseline"} 781144.97261735366 -policy_engine_bench_min_throughput_threshold_per_sec{scenario="policy_eval_baseline"} 60000 -policy_engine_bench_max_allocated_mb{scenario="policy_eval_baseline"} 563.6900634765625 -policy_engine_bench_max_allocated_threshold_mb{scenario="policy_eval_baseline"} 900 -policy_engine_bench_regression_limit{scenario="policy_eval_baseline"} 1.1499999999999999 -policy_engine_bench_regression_breached{scenario="policy_eval_baseline"} 0 +# HELP policy_engine_bench_duration_ms Policy Engine benchmark duration metrics (milliseconds). +# TYPE policy_engine_bench_duration_ms gauge +# HELP policy_engine_bench_throughput_per_sec Policy Engine benchmark throughput metrics (findings per second). +# TYPE policy_engine_bench_throughput_per_sec gauge +# HELP policy_engine_bench_allocation_mb Policy Engine benchmark allocation metrics (megabytes). +# TYPE policy_engine_bench_allocation_mb gauge +policy_engine_bench_mean_ms{scenario="policy_eval_baseline"} 1109.3542333333335 +policy_engine_bench_p95_ms{scenario="policy_eval_baseline"} 1257.74929 +policy_engine_bench_max_ms{scenario="policy_eval_baseline"} 1280.1721 +policy_engine_bench_threshold_ms{scenario="policy_eval_baseline"} 20000 +policy_engine_bench_mean_throughput_per_sec{scenario="policy_eval_baseline"} 912094.55805127567 +policy_engine_bench_min_throughput_per_sec{scenario="policy_eval_baseline"} 781144.97261735366 +policy_engine_bench_min_throughput_threshold_per_sec{scenario="policy_eval_baseline"} 60000 +policy_engine_bench_max_allocated_mb{scenario="policy_eval_baseline"} 563.6900634765625 +policy_engine_bench_max_allocated_threshold_mb{scenario="policy_eval_baseline"} 900 +policy_engine_bench_regression_limit{scenario="policy_eval_baseline"} 1.1499999999999999 +policy_engine_bench_regression_breached{scenario="policy_eval_baseline"} 0 diff --git a/out/policy-simulations/policy-simulation-summary.json b/out/policy-simulations/policy-simulation-summary.json index 57e092e5..53a2ce58 100644 --- a/out/policy-simulations/policy-simulation-summary.json +++ b/out/policy-simulations/policy-simulation-summary.json @@ -1,32 +1,32 @@ -[ - { - "ScenarioName": "baseline", - "Success": true, - "ChangedCount": 2, - "Failures": [], - "ActualStatuses": { - "library:pkg/openssl@1.1.1w": "Blocked", - "library:pkg/internal-runtime@1.0.0": "Warned" - } - }, - { - "ScenarioName": "internal-only", - "Success": true, - "ChangedCount": 2, - "Failures": [], - "ActualStatuses": { - "library:pkg/internal-app@2.0.0": "RequiresVex", - "library:pkg/kev-component@3.1.4": "RequiresVex" - } - }, - { - "ScenarioName": "serverless", - "Success": true, - "ChangedCount": 2, - "Failures": [], - "ActualStatuses": { - "library:pkg/aws-lambda@1.0.0": "Blocked", - "image:sha256:untrusted-base": "Blocked" - } - } +[ + { + "ScenarioName": "baseline", + "Success": true, + "ChangedCount": 2, + "Failures": [], + "ActualStatuses": { + "library:pkg/openssl@1.1.1w": "Blocked", + "library:pkg/internal-runtime@1.0.0": "Warned" + } + }, + { + "ScenarioName": "internal-only", + "Success": true, + "ChangedCount": 2, + "Failures": [], + "ActualStatuses": { + "library:pkg/internal-app@2.0.0": "RequiresVex", + "library:pkg/kev-component@3.1.4": "RequiresVex" + } + }, + { + "ScenarioName": "serverless", + "Success": true, + "ChangedCount": 2, + "Failures": [], + "ActualStatuses": { + "library:pkg/aws-lambda@1.0.0": "Blocked", + "image:sha256:untrusted-base": "Blocked" + } + } ] \ No newline at end of file diff --git a/out/tmp-cdx/Program.cs b/out/tmp-cdx/Program.cs index cbdb864e..2a5e0526 100644 --- a/out/tmp-cdx/Program.cs +++ b/out/tmp-cdx/Program.cs @@ -1,4 +1,4 @@ -using System; -using CycloneDX.Models; - -Console.WriteLine(string.Join(", ", Enum.GetNames(typeof(Component.Classification)))); +using System; +using CycloneDX.Models; + +Console.WriteLine(string.Join(", ", Enum.GetNames(typeof(Component.Classification)))); diff --git a/plugins/cli/StellaOps.Cli.Plugins.NonCore/manifest.json b/plugins/cli/StellaOps.Cli.Plugins.NonCore/manifest.json index fa42fecc..455bd987 100644 --- a/plugins/cli/StellaOps.Cli.Plugins.NonCore/manifest.json +++ b/plugins/cli/StellaOps.Cli.Plugins.NonCore/manifest.json @@ -1,21 +1,21 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.cli.plugins.noncore", - "displayName": "StellaOps CLI Non-core Verbs", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Cli.Plugins.NonCore.dll", - "typeName": "StellaOps.Cli.Plugins.NonCore.NonCoreCliCommandModule" - }, - "capabilities": [ - "cli", - "excititor", - "runtime-policy", - "offline-kit" - ], - "metadata": { - "org.stellaops.restart.required": "true" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.cli.plugins.noncore", + "displayName": "StellaOps CLI Non-core Verbs", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Cli.Plugins.NonCore.dll", + "typeName": "StellaOps.Cli.Plugins.NonCore.NonCoreCliCommandModule" + }, + "capabilities": [ + "cli", + "excititor", + "runtime-policy", + "offline-kit" + ], + "metadata": { + "org.stellaops.restart.required": "true" + } +} diff --git a/plugins/notify/email/notify-plugin.json b/plugins/notify/email/notify-plugin.json index 56407f5f..097ec83d 100644 --- a/plugins/notify/email/notify-plugin.json +++ b/plugins/notify/email/notify-plugin.json @@ -1,18 +1,18 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.notify.connector.email", - "displayName": "StellaOps Email Notify Connector", - "version": "0.1.0-alpha", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Notify.Connectors.Email.dll" - }, - "capabilities": [ - "notify-connector", - "email" - ], - "metadata": { - "org.stellaops.notify.channel.type": "email" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.notify.connector.email", + "displayName": "StellaOps Email Notify Connector", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Notify.Connectors.Email.dll" + }, + "capabilities": [ + "notify-connector", + "email" + ], + "metadata": { + "org.stellaops.notify.channel.type": "email" + } +} diff --git a/plugins/notify/slack/notify-plugin.json b/plugins/notify/slack/notify-plugin.json index 95fb1dfb..1d7efde1 100644 --- a/plugins/notify/slack/notify-plugin.json +++ b/plugins/notify/slack/notify-plugin.json @@ -1,19 +1,19 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.notify.connector.slack", - "displayName": "StellaOps Slack Notify Connector", - "version": "0.1.0-alpha", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Notify.Connectors.Slack.dll" - }, - "capabilities": [ - "notify-connector", - "slack" - ], - "metadata": { - "org.stellaops.notify.channel.type": "slack", - "org.stellaops.notify.connector.requiredScopes": "chat:write,chat:write.public" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.notify.connector.slack", + "displayName": "StellaOps Slack Notify Connector", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Notify.Connectors.Slack.dll" + }, + "capabilities": [ + "notify-connector", + "slack" + ], + "metadata": { + "org.stellaops.notify.channel.type": "slack", + "org.stellaops.notify.connector.requiredScopes": "chat:write,chat:write.public" + } +} diff --git a/plugins/notify/teams/notify-plugin.json b/plugins/notify/teams/notify-plugin.json index 78239596..756b1298 100644 --- a/plugins/notify/teams/notify-plugin.json +++ b/plugins/notify/teams/notify-plugin.json @@ -1,19 +1,19 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.notify.connector.teams", - "displayName": "StellaOps Teams Notify Connector", - "version": "0.1.0-alpha", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Notify.Connectors.Teams.dll" - }, - "capabilities": [ - "notify-connector", - "teams" - ], - "metadata": { - "org.stellaops.notify.channel.type": "teams", - "org.stellaops.notify.connector.cardVersion": "1.5" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.notify.connector.teams", + "displayName": "StellaOps Teams Notify Connector", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Notify.Connectors.Teams.dll" + }, + "capabilities": [ + "notify-connector", + "teams" + ], + "metadata": { + "org.stellaops.notify.channel.type": "teams", + "org.stellaops.notify.connector.cardVersion": "1.5" + } +} diff --git a/plugins/notify/webhook/notify-plugin.json b/plugins/notify/webhook/notify-plugin.json index 32b4ead7..fb17679b 100644 --- a/plugins/notify/webhook/notify-plugin.json +++ b/plugins/notify/webhook/notify-plugin.json @@ -1,18 +1,18 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.notify.connector.webhook", - "displayName": "StellaOps Webhook Notify Connector", - "version": "0.1.0-alpha", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Notify.Connectors.Webhook.dll" - }, - "capabilities": [ - "notify-connector", - "webhook" - ], - "metadata": { - "org.stellaops.notify.channel.type": "webhook" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.notify.connector.webhook", + "displayName": "StellaOps Webhook Notify Connector", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Notify.Connectors.Webhook.dll" + }, + "capabilities": [ + "notify-connector", + "webhook" + ], + "metadata": { + "org.stellaops.notify.channel.type": "webhook" + } +} diff --git a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json index e8c3b8f8..33ba4243 100644 --- a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json +++ b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json @@ -1,23 +1,23 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.dotnet", - "displayName": "StellaOps .NET Analyzer (preview)", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.DotNet.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.DotNet.DotNetAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "dotnet", - "nuget" - ], - "metadata": { - "org.stellaops.analyzer.language": "dotnet", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true", - "org.stellaops.analyzer.status": "preview" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.dotnet", + "displayName": "StellaOps .NET Analyzer (preview)", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.DotNet.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.DotNet.DotNetAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "dotnet", + "nuget" + ], + "metadata": { + "org.stellaops.analyzer.language": "dotnet", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true", + "org.stellaops.analyzer.status": "preview" + } +} diff --git a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json index d00c941a..3a3998fa 100644 --- a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json +++ b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json @@ -1,23 +1,23 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.go", - "displayName": "StellaOps Go Analyzer (preview)", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.Go.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.Go.GoAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "golang", - "go" - ], - "metadata": { - "org.stellaops.analyzer.language": "go", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true", - "org.stellaops.analyzer.status": "preview" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.go", + "displayName": "StellaOps Go Analyzer (preview)", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Go.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Go.GoAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "golang", + "go" + ], + "metadata": { + "org.stellaops.analyzer.language": "go", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true", + "org.stellaops.analyzer.status": "preview" + } +} diff --git a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json index 29c9a0d1..a5759e60 100644 --- a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json +++ b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json @@ -1,22 +1,22 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.node", - "displayName": "StellaOps Node.js Analyzer", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.Node.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.Node.NodeAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "node", - "npm" - ], - "metadata": { - "org.stellaops.analyzer.language": "node", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.node", + "displayName": "StellaOps Node.js Analyzer", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Node.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Node.NodeAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "node", + "npm" + ], + "metadata": { + "org.stellaops.analyzer.language": "node", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true" + } +} diff --git a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json index ada19bcc..36c377ea 100644 --- a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json +++ b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json @@ -1,23 +1,23 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.python", - "displayName": "StellaOps Python Analyzer (preview)", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.Python.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.Python.PythonAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "python", - "pypi" - ], - "metadata": { - "org.stellaops.analyzer.language": "python", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true", - "org.stellaops.analyzer.status": "preview" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.python", + "displayName": "StellaOps Python Analyzer (preview)", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Python.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Python.PythonAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "python", + "pypi" + ], + "metadata": { + "org.stellaops.analyzer.language": "python", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true", + "org.stellaops.analyzer.status": "preview" + } +} diff --git a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json index 24775978..744bf8ef 100644 --- a/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json +++ b/plugins/scanner/analyzers/lang/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json @@ -1,23 +1,23 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.rust", - "displayName": "StellaOps Rust Analyzer (preview)", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.Rust.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.Rust.RustAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "rust", - "cargo" - ], - "metadata": { - "org.stellaops.analyzer.language": "rust", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true", - "org.stellaops.analyzer.status": "preview" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.rust", + "displayName": "StellaOps Rust Analyzer (preview)", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Rust.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Rust.RustAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "rust", + "cargo" + ], + "metadata": { + "org.stellaops.analyzer.language": "rust", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true", + "org.stellaops.analyzer.status": "preview" + } +} diff --git a/samples/api/scheduler/graph-build-job.json b/samples/api/scheduler/graph-build-job.json index c75184f3..c116c97d 100644 --- a/samples/api/scheduler/graph-build-job.json +++ b/samples/api/scheduler/graph-build-job.json @@ -1,19 +1,19 @@ -{ - "schemaVersion": "scheduler.graph-build-job@1", - "id": "gbj_20251026a", - "tenantId": "tenant-alpha", - "sbomId": "sbom_20251026", - "sbomVersionId": "sbom_ver_20251026", - "sbomDigest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", - "graphSnapshotId": "graph_snap_20251026", - "status": "running", - "trigger": "sbom-version", - "attempts": 1, - "cartographerJobId": "carto_job_42", - "correlationId": "evt_svc_987", - "createdAt": "2025-10-26T12:00:00+00:00", - "startedAt": "2025-10-26T12:00:05+00:00", - "metadata": { - "sbomEventId": "sbom_evt_20251026" - } -} +{ + "schemaVersion": "scheduler.graph-build-job@1", + "id": "gbj_20251026a", + "tenantId": "tenant-alpha", + "sbomId": "sbom_20251026", + "sbomVersionId": "sbom_ver_20251026", + "sbomDigest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + "graphSnapshotId": "graph_snap_20251026", + "status": "running", + "trigger": "sbom-version", + "attempts": 1, + "cartographerJobId": "carto_job_42", + "correlationId": "evt_svc_987", + "createdAt": "2025-10-26T12:00:00+00:00", + "startedAt": "2025-10-26T12:00:05+00:00", + "metadata": { + "sbomEventId": "sbom_evt_20251026" + } +} diff --git a/samples/api/scheduler/graph-overlay-job.json b/samples/api/scheduler/graph-overlay-job.json index f6242514..b9241bc1 100644 --- a/samples/api/scheduler/graph-overlay-job.json +++ b/samples/api/scheduler/graph-overlay-job.json @@ -1,21 +1,21 @@ -{ - "schemaVersion": "scheduler.graph-overlay-job@1", - "id": "goj_20251026a", - "tenantId": "tenant-alpha", - "graphSnapshotId": "graph_snap_20251026", - "buildJobId": "gbj_20251026a", - "overlayKind": "policy", - "overlayKey": "policy@2025-10-01", - "subjects": [ - "artifact:service-api", - "artifact:service-worker" - ], - "status": "queued", - "trigger": "policy", - "attempts": 0, - "correlationId": "policy_run_321", - "createdAt": "2025-10-26T12:05:00+00:00", - "metadata": { - "policyRunId": "policy_run_321" - } -} +{ + "schemaVersion": "scheduler.graph-overlay-job@1", + "id": "goj_20251026a", + "tenantId": "tenant-alpha", + "graphSnapshotId": "graph_snap_20251026", + "buildJobId": "gbj_20251026a", + "overlayKind": "policy", + "overlayKey": "policy@2025-10-01", + "subjects": [ + "artifact:service-api", + "artifact:service-worker" + ], + "status": "queued", + "trigger": "policy", + "attempts": 0, + "correlationId": "policy_run_321", + "createdAt": "2025-10-26T12:05:00+00:00", + "metadata": { + "policyRunId": "policy_run_321" + } +} diff --git a/samples/api/scheduler/policy-diff-summary.json b/samples/api/scheduler/policy-diff-summary.json index a8560c1b..550ce23b 100644 --- a/samples/api/scheduler/policy-diff-summary.json +++ b/samples/api/scheduler/policy-diff-summary.json @@ -1,31 +1,31 @@ -{ - "schemaVersion": "scheduler.policy-diff-summary@1", - "added": 12, - "removed": 8, - "unchanged": 657, - "bySeverity": { - "critical": { - "up": 1 - }, - "high": { - "up": 3, - "down": 4 - }, - "medium": { - "up": 2, - "down": 1 - } - }, - "ruleHits": [ - { - "ruleId": "rule-block-critical", - "ruleName": "Block Critical Findings", - "up": 1 - }, - { - "ruleId": "rule-quiet-low", - "ruleName": "Quiet Low Risk", - "down": 2 - } - ] -} +{ + "schemaVersion": "scheduler.policy-diff-summary@1", + "added": 12, + "removed": 8, + "unchanged": 657, + "bySeverity": { + "critical": { + "up": 1 + }, + "high": { + "up": 3, + "down": 4 + }, + "medium": { + "up": 2, + "down": 1 + } + }, + "ruleHits": [ + { + "ruleId": "rule-block-critical", + "ruleName": "Block Critical Findings", + "up": 1 + }, + { + "ruleId": "rule-quiet-low", + "ruleName": "Quiet Low Risk", + "down": 2 + } + ] +} diff --git a/samples/api/scheduler/policy-explain-trace.json b/samples/api/scheduler/policy-explain-trace.json index 834ecd76..b4b0a96e 100644 --- a/samples/api/scheduler/policy-explain-trace.json +++ b/samples/api/scheduler/policy-explain-trace.json @@ -1,83 +1,83 @@ -{ - "schemaVersion": "scheduler.policy-explain-trace@1", - "findingId": "finding:sbom:S-42/pkg:npm/lodash@4.17.21", - "policyId": "P-7", - "policyVersion": 4, - "tenantId": "default", - "runId": "run:P-7:2025-10-26:auto", - "evaluatedAt": "2025-10-26T14:06:01+00:00", - "verdict": { - "status": "blocked", - "severity": "critical", - "score": 19.5, - "rationale": "Matches rule-block-critical" - }, - "ruleChain": [ - { - "ruleId": "rule-allow-known", - "ruleName": "Allow Known Vendors", - "action": "allow", - "decision": "skipped", - "condition": "when vendor == \"trusted\"" - }, - { - "ruleId": "rule-block-critical", - "ruleName": "Block Critical Findings", - "action": "block", - "decision": "matched", - "score": 19.5, - "condition": "when severity >= Critical" - } - ], - "evidence": [ - { - "type": "advisory", - "reference": "CVE-2025-12345", - "source": "nvd", - "status": "affected", - "weight": 1, - "justification": "Vendor advisory", - "metadata": {} - }, - { - "type": "vex", - "reference": "vex:ghsa-2025-0001", - "source": "vendor", - "status": "not_affected", - "weight": 0.5, - "justification": "Runtime unreachable", - "metadata": { - "justificationid": "csaf:justification/123" - } - } - ], - "vexImpacts": [ - { - "statementId": "vex:ghsa-2025-0001", - "provider": "vendor", - "status": "not_affected", - "accepted": true, - "justification": "Runtime unreachable", - "confidence": "medium" - } - ], - "history": [ - { - "status": "blocked", - "occurredAt": "2025-10-26T14:06:01+00:00", - "actor": "policy-engine", - "note": "Initial evaluation" - }, - { - "status": "blocked", - "occurredAt": "2025-10-26T14:16:01+00:00", - "actor": "policy-engine", - "note": "Replay verification" - } - ], - "metadata": { - "componentpurl": "pkg:npm/lodash@4.17.21", - "sbomid": "sbom:S-42", - "traceid": "01HE0BJX5S4T9YCN6ZT0" - } -} +{ + "schemaVersion": "scheduler.policy-explain-trace@1", + "findingId": "finding:sbom:S-42/pkg:npm/lodash@4.17.21", + "policyId": "P-7", + "policyVersion": 4, + "tenantId": "default", + "runId": "run:P-7:2025-10-26:auto", + "evaluatedAt": "2025-10-26T14:06:01+00:00", + "verdict": { + "status": "blocked", + "severity": "critical", + "score": 19.5, + "rationale": "Matches rule-block-critical" + }, + "ruleChain": [ + { + "ruleId": "rule-allow-known", + "ruleName": "Allow Known Vendors", + "action": "allow", + "decision": "skipped", + "condition": "when vendor == \"trusted\"" + }, + { + "ruleId": "rule-block-critical", + "ruleName": "Block Critical Findings", + "action": "block", + "decision": "matched", + "score": 19.5, + "condition": "when severity >= Critical" + } + ], + "evidence": [ + { + "type": "advisory", + "reference": "CVE-2025-12345", + "source": "nvd", + "status": "affected", + "weight": 1, + "justification": "Vendor advisory", + "metadata": {} + }, + { + "type": "vex", + "reference": "vex:ghsa-2025-0001", + "source": "vendor", + "status": "not_affected", + "weight": 0.5, + "justification": "Runtime unreachable", + "metadata": { + "justificationid": "csaf:justification/123" + } + } + ], + "vexImpacts": [ + { + "statementId": "vex:ghsa-2025-0001", + "provider": "vendor", + "status": "not_affected", + "accepted": true, + "justification": "Runtime unreachable", + "confidence": "medium" + } + ], + "history": [ + { + "status": "blocked", + "occurredAt": "2025-10-26T14:06:01+00:00", + "actor": "policy-engine", + "note": "Initial evaluation" + }, + { + "status": "blocked", + "occurredAt": "2025-10-26T14:16:01+00:00", + "actor": "policy-engine", + "note": "Replay verification" + } + ], + "metadata": { + "componentpurl": "pkg:npm/lodash@4.17.21", + "sbomid": "sbom:S-42", + "traceid": "01HE0BJX5S4T9YCN6ZT0" + } +} diff --git a/samples/api/scheduler/policy-run-request.json b/samples/api/scheduler/policy-run-request.json index d46c7a63..a903b7f4 100644 --- a/samples/api/scheduler/policy-run-request.json +++ b/samples/api/scheduler/policy-run-request.json @@ -1,29 +1,29 @@ -{ - "schemaVersion": "scheduler.policy-run-request@1", - "tenantId": "default", - "policyId": "P-7", - "policyVersion": 4, - "mode": "incremental", - "priority": "normal", - "runId": "run:P-7:2025-10-26:auto", - "queuedAt": "2025-10-26T14:05:00+00:00", - "requestedBy": "user:cli", - "correlationId": "req-20251026T140500Z", - "metadata": { - "source": "stella policy run", - "trigger": "cli" - }, - "inputs": { - "sbomSet": [ - "sbom:S-318", - "sbom:S-42" - ], - "advisoryCursor": "2025-10-26T13:59:00+00:00", - "vexCursor": "2025-10-26T13:58:30+00:00", - "environment": { - "exposure": "internet", - "sealed": false - }, - "captureExplain": true - } -} +{ + "schemaVersion": "scheduler.policy-run-request@1", + "tenantId": "default", + "policyId": "P-7", + "policyVersion": 4, + "mode": "incremental", + "priority": "normal", + "runId": "run:P-7:2025-10-26:auto", + "queuedAt": "2025-10-26T14:05:00+00:00", + "requestedBy": "user:cli", + "correlationId": "req-20251026T140500Z", + "metadata": { + "source": "stella policy run", + "trigger": "cli" + }, + "inputs": { + "sbomSet": [ + "sbom:S-318", + "sbom:S-42" + ], + "advisoryCursor": "2025-10-26T13:59:00+00:00", + "vexCursor": "2025-10-26T13:58:30+00:00", + "environment": { + "exposure": "internet", + "sealed": false + }, + "captureExplain": true + } +} diff --git a/samples/api/scheduler/policy-run-status.json b/samples/api/scheduler/policy-run-status.json index c5a46631..cf890587 100644 --- a/samples/api/scheduler/policy-run-status.json +++ b/samples/api/scheduler/policy-run-status.json @@ -1,41 +1,41 @@ -{ - "schemaVersion": "scheduler.policy-run-status@1", - "runId": "run:P-7:2025-10-26:auto", - "tenantId": "default", - "policyId": "P-7", - "policyVersion": 4, - "mode": "incremental", - "status": "succeeded", - "priority": "normal", - "queuedAt": "2025-10-26T14:05:00+00:00", - "startedAt": "2025-10-26T14:05:11+00:00", - "finishedAt": "2025-10-26T14:06:01+00:00", - "determinismHash": "sha256:e3c2b2f3b1aa4567890abcdef1234567890abcdef1234567890abcdef123456", - "traceId": "01HE0BJX5S4T9YCN6ZT0", - "explainUri": "blob://policy/P-7/runs/2025-10-26T14-06-01Z.json", - "metadata": { - "orchestrator": "scheduler", - "sbombatchhash": "sha256:abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234" - }, - "stats": { - "components": 1742, - "rulesFired": 68023, - "findingsWritten": 4321, - "vexOverrides": 210, - "quieted": 12, - "durationSeconds": 50.8 - }, - "inputs": { - "sbomSet": [ - "sbom:S-318", - "sbom:S-42" - ], - "advisoryCursor": "2025-10-26T13:59:00+00:00", - "vexCursor": "2025-10-26T13:58:30+00:00", - "environment": { - "exposure": "internet", - "sealed": false - }, - "captureExplain": true - } -} +{ + "schemaVersion": "scheduler.policy-run-status@1", + "runId": "run:P-7:2025-10-26:auto", + "tenantId": "default", + "policyId": "P-7", + "policyVersion": 4, + "mode": "incremental", + "status": "succeeded", + "priority": "normal", + "queuedAt": "2025-10-26T14:05:00+00:00", + "startedAt": "2025-10-26T14:05:11+00:00", + "finishedAt": "2025-10-26T14:06:01+00:00", + "determinismHash": "sha256:e3c2b2f3b1aa4567890abcdef1234567890abcdef1234567890abcdef123456", + "traceId": "01HE0BJX5S4T9YCN6ZT0", + "explainUri": "blob://policy/P-7/runs/2025-10-26T14-06-01Z.json", + "metadata": { + "orchestrator": "scheduler", + "sbombatchhash": "sha256:abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234" + }, + "stats": { + "components": 1742, + "rulesFired": 68023, + "findingsWritten": 4321, + "vexOverrides": 210, + "quieted": 12, + "durationSeconds": 50.8 + }, + "inputs": { + "sbomSet": [ + "sbom:S-318", + "sbom:S-42" + ], + "advisoryCursor": "2025-10-26T13:59:00+00:00", + "vexCursor": "2025-10-26T13:58:30+00:00", + "environment": { + "exposure": "internet", + "sealed": false + }, + "captureExplain": true + } +} diff --git a/samples/api/scheduler/run-summary.json b/samples/api/scheduler/run-summary.json index c9734ebd..b1942ec0 100644 --- a/samples/api/scheduler/run-summary.json +++ b/samples/api/scheduler/run-summary.json @@ -1,101 +1,101 @@ -{ - "tenantId": "tenant-alpha", - "scheduleId": "sch_20251018a", - "updatedAt": "2025-10-18T22:10:10Z", - "lastRun": { - "runId": "run_20251018_0001", - "trigger": "feedser", - "state": "completed", - "createdAt": "2025-10-18T22:03:14Z", - "startedAt": "2025-10-18T22:03:20Z", - "finishedAt": "2025-10-18T22:08:45Z", - "stats": { - "candidates": 1280, - "deduped": 910, - "queued": 0, - "completed": 910, - "deltas": 42, - "newCriticals": 7, - "newHigh": 11, - "newMedium": 18, - "newLow": 6 - }, - "error": null - }, - "recent": [ - { - "runId": "run_20251018_0001", - "trigger": "feedser", - "state": "completed", - "createdAt": "2025-10-18T22:03:14Z", - "startedAt": "2025-10-18T22:03:20Z", - "finishedAt": "2025-10-18T22:08:45Z", - "stats": { - "candidates": 1280, - "deduped": 910, - "queued": 0, - "completed": 910, - "deltas": 42, - "newCriticals": 7, - "newHigh": 11, - "newMedium": 18, - "newLow": 6 - }, - "error": null - }, - { - "runId": "run_20251017_0003", - "trigger": "cron", - "state": "error", - "createdAt": "2025-10-17T22:01:02Z", - "startedAt": "2025-10-17T22:01:08Z", - "finishedAt": "2025-10-17T22:04:11Z", - "stats": { - "candidates": 1040, - "deduped": 812, - "queued": 0, - "completed": 640, - "deltas": 18, - "newCriticals": 2, - "newHigh": 4, - "newMedium": 7, - "newLow": 3 - }, - "error": "scanner timeout" - }, - { - "runId": "run_20251016_0007", - "trigger": "manual", - "state": "cancelled", - "createdAt": "2025-10-16T20:00:00Z", - "startedAt": "2025-10-16T20:00:04Z", - "finishedAt": null, - "stats": { - "candidates": 820, - "deduped": 640, - "queued": 0, - "completed": 0, - "deltas": 0, - "newCriticals": 0, - "newHigh": 0, - "newMedium": 0, - "newLow": 0 - }, - "error": null - } - ], - "counters": { - "total": 3, - "planning": 0, - "queued": 0, - "running": 0, - "completed": 1, - "error": 1, - "cancelled": 1, - "totalDeltas": 60, - "totalNewCriticals": 9, - "totalNewHigh": 15, - "totalNewMedium": 25, - "totalNewLow": 9 - } -} +{ + "tenantId": "tenant-alpha", + "scheduleId": "sch_20251018a", + "updatedAt": "2025-10-18T22:10:10Z", + "lastRun": { + "runId": "run_20251018_0001", + "trigger": "feedser", + "state": "completed", + "createdAt": "2025-10-18T22:03:14Z", + "startedAt": "2025-10-18T22:03:20Z", + "finishedAt": "2025-10-18T22:08:45Z", + "stats": { + "candidates": 1280, + "deduped": 910, + "queued": 0, + "completed": 910, + "deltas": 42, + "newCriticals": 7, + "newHigh": 11, + "newMedium": 18, + "newLow": 6 + }, + "error": null + }, + "recent": [ + { + "runId": "run_20251018_0001", + "trigger": "feedser", + "state": "completed", + "createdAt": "2025-10-18T22:03:14Z", + "startedAt": "2025-10-18T22:03:20Z", + "finishedAt": "2025-10-18T22:08:45Z", + "stats": { + "candidates": 1280, + "deduped": 910, + "queued": 0, + "completed": 910, + "deltas": 42, + "newCriticals": 7, + "newHigh": 11, + "newMedium": 18, + "newLow": 6 + }, + "error": null + }, + { + "runId": "run_20251017_0003", + "trigger": "cron", + "state": "error", + "createdAt": "2025-10-17T22:01:02Z", + "startedAt": "2025-10-17T22:01:08Z", + "finishedAt": "2025-10-17T22:04:11Z", + "stats": { + "candidates": 1040, + "deduped": 812, + "queued": 0, + "completed": 640, + "deltas": 18, + "newCriticals": 2, + "newHigh": 4, + "newMedium": 7, + "newLow": 3 + }, + "error": "scanner timeout" + }, + { + "runId": "run_20251016_0007", + "trigger": "manual", + "state": "cancelled", + "createdAt": "2025-10-16T20:00:00Z", + "startedAt": "2025-10-16T20:00:04Z", + "finishedAt": null, + "stats": { + "candidates": 820, + "deduped": 640, + "queued": 0, + "completed": 0, + "deltas": 0, + "newCriticals": 0, + "newHigh": 0, + "newMedium": 0, + "newLow": 0 + }, + "error": null + } + ], + "counters": { + "total": 3, + "planning": 0, + "queued": 0, + "running": 0, + "completed": 1, + "error": 1, + "cancelled": 1, + "totalDeltas": 60, + "totalNewCriticals": 9, + "totalNewHigh": 15, + "totalNewMedium": 25, + "totalNewLow": 9 + } +} diff --git a/samples/ci/buildx-demo/README.md b/samples/ci/buildx-demo/README.md index adc99eeb..3b6759f0 100644 --- a/samples/ci/buildx-demo/README.md +++ b/samples/ci/buildx-demo/README.md @@ -1,42 +1,42 @@ -# Buildx SBOM Demo Workflow - -This sample GitHub Actions workflow shows how to run the StellaOps BuildX generator alongside a container build. - -## What it does - -1. Publishes the `StellaOps.Scanner.Sbomer.BuildXPlugin` with the manifest copied beside the binaries. -2. Calls the plug-in `handshake` command to verify the local CAS directory. -3. Builds a tiny Alpine-based image via `docker buildx`. -4. Generates a CycloneDX SBOM from the built image with `docker sbom`. -5. Emits a descriptor + provenance placeholder referencing the freshly generated SBOM with the `descriptor` command. -6. Sends the placeholder to a mock Attestor endpoint and uploads the descriptor, SBOM, and captured request as artefacts. (Swap the mock step with your real Attestor URL + `STELLAOPS_ATTESTOR_TOKEN` secret when ready.) - -## Files - -- `github-actions-buildx-demo.yml` – workflow definition (`workflow_dispatch` + `demo/buildx` branch trigger). -- `Dockerfile` – minimal demo image. -- `github-actions-buildx-demo.yml` now captures a real SBOM via `docker sbom`. - -## Running locally - -```bash -dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj -c Release -o out/buildx - -dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \ - --manifest out/buildx \ - --cas out/cas - -docker buildx build --load -t stellaops/buildx-demo:ci samples/ci/buildx-demo -DIGEST=$(docker image inspect stellaops/buildx-demo:ci --format '{{index .RepoDigests 0}}') - -docker sbom stellaops/buildx-demo:ci --format cyclonedx-json > out/buildx-sbom.cdx.json - -dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \ - --manifest out/buildx \ - --image "$DIGEST" \ - --sbom out/buildx-sbom.cdx.json \ - --sbom-name buildx-sbom.cdx.json \ - > out/buildx-descriptor.json -``` - -The descriptor JSON contains deterministic annotations and provenance placeholders ready for the Attestor. +# Buildx SBOM Demo Workflow + +This sample GitHub Actions workflow shows how to run the StellaOps BuildX generator alongside a container build. + +## What it does + +1. Publishes the `StellaOps.Scanner.Sbomer.BuildXPlugin` with the manifest copied beside the binaries. +2. Calls the plug-in `handshake` command to verify the local CAS directory. +3. Builds a tiny Alpine-based image via `docker buildx`. +4. Generates a CycloneDX SBOM from the built image with `docker sbom`. +5. Emits a descriptor + provenance placeholder referencing the freshly generated SBOM with the `descriptor` command. +6. Sends the placeholder to a mock Attestor endpoint and uploads the descriptor, SBOM, and captured request as artefacts. (Swap the mock step with your real Attestor URL + `STELLAOPS_ATTESTOR_TOKEN` secret when ready.) + +## Files + +- `github-actions-buildx-demo.yml` – workflow definition (`workflow_dispatch` + `demo/buildx` branch trigger). +- `Dockerfile` – minimal demo image. +- `github-actions-buildx-demo.yml` now captures a real SBOM via `docker sbom`. + +## Running locally + +```bash +dotnet publish src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj -c Release -o out/buildx + +dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \ + --manifest out/buildx \ + --cas out/cas + +docker buildx build --load -t stellaops/buildx-demo:ci samples/ci/buildx-demo +DIGEST=$(docker image inspect stellaops/buildx-demo:ci --format '{{index .RepoDigests 0}}') + +docker sbom stellaops/buildx-demo:ci --format cyclonedx-json > out/buildx-sbom.cdx.json + +dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll descriptor \ + --manifest out/buildx \ + --image "$DIGEST" \ + --sbom out/buildx-sbom.cdx.json \ + --sbom-name buildx-sbom.cdx.json \ + > out/buildx-descriptor.json +``` + +The descriptor JSON contains deterministic annotations and provenance placeholders ready for the Attestor. diff --git a/samples/ci/buildx-demo/github-actions-buildx-demo.yml b/samples/ci/buildx-demo/github-actions-buildx-demo.yml index c79a08ab..d434f6f6 100644 --- a/samples/ci/buildx-demo/github-actions-buildx-demo.yml +++ b/samples/ci/buildx-demo/github-actions-buildx-demo.yml @@ -1,85 +1,85 @@ -name: Buildx SBOM Demo -on: - workflow_dispatch: - push: - branches: [ demo/buildx ] - -jobs: - buildx-sbom: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - - - name: Set up .NET 10 preview - uses: actions/setup-dotnet@v4 - with: - dotnet-version: '10.0.x' - - - name: Publish StellaOps BuildX generator - run: | - dotnet publish src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \ - -c Release \ - -o out/buildx - - - name: Handshake CAS - run: | - dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \ - --manifest out/buildx \ - --cas out/cas - - - name: Build demo container image - run: | - docker buildx build --load -t stellaops/buildx-demo:ci samples/ci/buildx-demo - - - name: Capture image digest - id: digest - run: | - DIGEST=$(docker image inspect stellaops/buildx-demo:ci --format '{{index .RepoDigests 0}}') - echo "digest=$DIGEST" >> "$GITHUB_OUTPUT" - - - name: Generate SBOM from built image - run: | - mkdir -p out - docker sbom stellaops/buildx-demo:ci --format cyclonedx-json > out/buildx-sbom.cdx.json - - - name: Start mock Attestor - id: attestor - run: | - mkdir -p out - cat <<'PY' > out/mock-attestor.py -import json -import os -from http.server import BaseHTTPRequestHandler, HTTPServer - -class Handler(BaseHTTPRequestHandler): - def do_POST(self): - length = int(self.headers.get('Content-Length') or 0) - body = self.rfile.read(length) - with open(os.path.join('out', 'provenance-request.json'), 'wb') as fp: - fp.write(body) - self.send_response(202) - self.end_headers() - self.wfile.write(b'accepted') - - def log_message(self, format, *args): - return - -if __name__ == '__main__': - server = HTTPServer(('127.0.0.1', 8085), Handler) - try: - server.serve_forever() - except KeyboardInterrupt: - pass - finally: - server.server_close() -PY - touch out/provenance-request.json - python3 out/mock-attestor.py & - echo $! > out/mock-attestor.pid - +name: Buildx SBOM Demo +on: + workflow_dispatch: + push: + branches: [ demo/buildx ] + +jobs: + buildx-sbom: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Set up .NET 10 preview + uses: actions/setup-dotnet@v4 + with: + dotnet-version: '10.0.x' + + - name: Publish StellaOps BuildX generator + run: | + dotnet publish src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj \ + -c Release \ + -o out/buildx + + - name: Handshake CAS + run: | + dotnet out/buildx/StellaOps.Scanner.Sbomer.BuildXPlugin.dll handshake \ + --manifest out/buildx \ + --cas out/cas + + - name: Build demo container image + run: | + docker buildx build --load -t stellaops/buildx-demo:ci samples/ci/buildx-demo + + - name: Capture image digest + id: digest + run: | + DIGEST=$(docker image inspect stellaops/buildx-demo:ci --format '{{index .RepoDigests 0}}') + echo "digest=$DIGEST" >> "$GITHUB_OUTPUT" + + - name: Generate SBOM from built image + run: | + mkdir -p out + docker sbom stellaops/buildx-demo:ci --format cyclonedx-json > out/buildx-sbom.cdx.json + + - name: Start mock Attestor + id: attestor + run: | + mkdir -p out + cat <<'PY' > out/mock-attestor.py +import json +import os +from http.server import BaseHTTPRequestHandler, HTTPServer + +class Handler(BaseHTTPRequestHandler): + def do_POST(self): + length = int(self.headers.get('Content-Length') or 0) + body = self.rfile.read(length) + with open(os.path.join('out', 'provenance-request.json'), 'wb') as fp: + fp.write(body) + self.send_response(202) + self.end_headers() + self.wfile.write(b'accepted') + + def log_message(self, format, *args): + return + +if __name__ == '__main__': + server = HTTPServer(('127.0.0.1', 8085), Handler) + try: + server.serve_forever() + except KeyboardInterrupt: + pass + finally: + server.server_close() +PY + touch out/provenance-request.json + python3 out/mock-attestor.py & + echo $! > out/mock-attestor.pid + - name: Emit descriptor with provenance placeholder env: IMAGE_DIGEST: ${{ steps.digest.outputs.digest }} @@ -135,19 +135,19 @@ PY if: always() run: | if [ -f out/mock-attestor.pid ]; then - kill $(cat out/mock-attestor.pid) - fi - - - name: Upload artifacts - uses: actions/upload-artifact@v4 - with: - name: stellaops-buildx-demo + kill $(cat out/mock-attestor.pid) + fi + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: stellaops-buildx-demo path: | out/buildx-descriptor.json out/buildx-sbom.cdx.json out/provenance-request.json out/buildx-descriptor-repeat.json - - - name: Show descriptor summary - run: | - cat out/buildx-descriptor.json + + - name: Show descriptor summary + run: | + cat out/buildx-descriptor.json diff --git a/samples/policy/README.md b/samples/policy/README.md index fdb6d6b8..e1cbcb7e 100644 --- a/samples/policy/README.md +++ b/samples/policy/README.md @@ -1,25 +1,25 @@ -# Policy Samples - -Curated fixtures used by CI smoke/determinism checks and example documentation. - -| Scenario | Policy | Findings | Expected Diff | UI/CLI Diff Fixture | -|----------|--------|----------|---------------|---------------------| -| `baseline` | `docs/examples/policies/baseline.yaml` | `samples/policy/baseline/findings.json` | `samples/policy/baseline/diffs.json` | `samples/policy/simulations/baseline/diff.json` | -| `serverless` | `docs/examples/policies/serverless.yaml` | `samples/policy/serverless/findings.json` | `samples/policy/serverless/diffs.json` | `samples/policy/simulations/serverless/diff.json` | -| `internal-only` | `docs/examples/policies/internal-only.yaml` | `samples/policy/internal-only/findings.json` | `samples/policy/internal-only/diffs.json` | `samples/policy/simulations/internal-only/diff.json` | - -Run the simulation harness locally: - -```bash -dotnet run \ - --project tools/PolicySimulationSmoke/PolicySimulationSmoke.csproj \ - -- \ - --scenario-root samples/policy/simulations \ - --output out/policy-simulations -``` - -Then inspect `out/policy-simulations/policy-simulation-summary.json` for verdict changes. - ---- - -*Last updated: 2025-10-26.* +# Policy Samples + +Curated fixtures used by CI smoke/determinism checks and example documentation. + +| Scenario | Policy | Findings | Expected Diff | UI/CLI Diff Fixture | +|----------|--------|----------|---------------|---------------------| +| `baseline` | `docs/examples/policies/baseline.yaml` | `samples/policy/baseline/findings.json` | `samples/policy/baseline/diffs.json` | `samples/policy/simulations/baseline/diff.json` | +| `serverless` | `docs/examples/policies/serverless.yaml` | `samples/policy/serverless/findings.json` | `samples/policy/serverless/diffs.json` | `samples/policy/simulations/serverless/diff.json` | +| `internal-only` | `docs/examples/policies/internal-only.yaml` | `samples/policy/internal-only/findings.json` | `samples/policy/internal-only/diffs.json` | `samples/policy/simulations/internal-only/diff.json` | + +Run the simulation harness locally: + +```bash +dotnet run \ + --project tools/PolicySimulationSmoke/PolicySimulationSmoke.csproj \ + -- \ + --scenario-root samples/policy/simulations \ + --output out/policy-simulations +``` + +Then inspect `out/policy-simulations/policy-simulation-summary.json` for verdict changes. + +--- + +*Last updated: 2025-10-26.* diff --git a/samples/policy/baseline/diffs.json b/samples/policy/baseline/diffs.json index ddf10cb0..d588a353 100644 --- a/samples/policy/baseline/diffs.json +++ b/samples/policy/baseline/diffs.json @@ -1,12 +1,12 @@ -[ - { - "findingId": "library:pkg/openssl@1.1.1w", - "status": "Blocked", - "rule": "block_critical" - }, - { - "findingId": "library:pkg/internal-runtime@1.0.0", - "status": "Warned", - "rule": "alert_warn_eol_runtime" - } -] +[ + { + "findingId": "library:pkg/openssl@1.1.1w", + "status": "Blocked", + "rule": "block_critical" + }, + { + "findingId": "library:pkg/internal-runtime@1.0.0", + "status": "Warned", + "rule": "alert_warn_eol_runtime" + } +] diff --git a/samples/policy/baseline/findings.json b/samples/policy/baseline/findings.json index 565e7688..b9a008e7 100644 --- a/samples/policy/baseline/findings.json +++ b/samples/policy/baseline/findings.json @@ -1,14 +1,14 @@ -[ - { - "findingId": "library:pkg/openssl@1.1.1w", - "severity": "Critical", - "source": "NVD", - "environment": "internet" - }, - { - "findingId": "library:pkg/internal-runtime@1.0.0", - "severity": "Low", - "source": "NVD", - "tags": ["runtime:eol"] - } -] +[ + { + "findingId": "library:pkg/openssl@1.1.1w", + "severity": "Critical", + "source": "NVD", + "environment": "internet" + }, + { + "findingId": "library:pkg/internal-runtime@1.0.0", + "severity": "Low", + "source": "NVD", + "tags": ["runtime:eol"] + } +] diff --git a/samples/policy/internal-only/diffs.json b/samples/policy/internal-only/diffs.json index 749e20c3..d6b414b6 100644 --- a/samples/policy/internal-only/diffs.json +++ b/samples/policy/internal-only/diffs.json @@ -1,12 +1,12 @@ -[ - { - "findingId": "library:pkg/internal-app@2.0.0", - "status": "RequiresVex", - "rule": "accept_vendor_vex" - }, - { - "findingId": "library:pkg/kev-component@3.1.4", - "status": "RequiresVex", - "rule": "accept_vendor_vex" - } -] +[ + { + "findingId": "library:pkg/internal-app@2.0.0", + "status": "RequiresVex", + "rule": "accept_vendor_vex" + }, + { + "findingId": "library:pkg/kev-component@3.1.4", + "status": "RequiresVex", + "rule": "accept_vendor_vex" + } +] diff --git a/samples/policy/internal-only/findings.json b/samples/policy/internal-only/findings.json index c8cdb5a8..123599a5 100644 --- a/samples/policy/internal-only/findings.json +++ b/samples/policy/internal-only/findings.json @@ -1,15 +1,15 @@ -[ - { - "findingId": "library:pkg/internal-app@2.0.0", - "severity": "Medium", - "source": "GHSA", - "environment": "internal" - }, - { - "findingId": "library:pkg/kev-component@3.1.4", - "severity": "High", - "source": "NVD", - "tags": ["kev"], - "environment": "internal" - } -] +[ + { + "findingId": "library:pkg/internal-app@2.0.0", + "severity": "Medium", + "source": "GHSA", + "environment": "internal" + }, + { + "findingId": "library:pkg/kev-component@3.1.4", + "severity": "High", + "source": "NVD", + "tags": ["kev"], + "environment": "internal" + } +] diff --git a/samples/policy/serverless/diffs.json b/samples/policy/serverless/diffs.json index d956d08f..d5edb9d0 100644 --- a/samples/policy/serverless/diffs.json +++ b/samples/policy/serverless/diffs.json @@ -1,12 +1,12 @@ -[ - { - "findingId": "library:pkg/aws-lambda@1.0.0", - "status": "Blocked", - "rule": "block_any_high" - }, - { - "findingId": "image:sha256:untrusted-base", - "status": "Blocked", - "rule": "forbid_unpinned_base" - } -] +[ + { + "findingId": "library:pkg/aws-lambda@1.0.0", + "status": "Blocked", + "rule": "block_any_high" + }, + { + "findingId": "image:sha256:untrusted-base", + "status": "Blocked", + "rule": "forbid_unpinned_base" + } +] diff --git a/samples/policy/serverless/findings.json b/samples/policy/serverless/findings.json index 23a3fdc4..c97b5971 100644 --- a/samples/policy/serverless/findings.json +++ b/samples/policy/serverless/findings.json @@ -1,15 +1,15 @@ -[ - { - "findingId": "library:pkg/aws-lambda@1.0.0", - "severity": "High", - "source": "NVD", - "environment": "serverless" - }, - { - "findingId": "image:sha256:untrusted-base", - "severity": "Medium", - "source": "NVD", - "tags": ["image:latest-tag"], - "environment": "serverless" - } -] +[ + { + "findingId": "library:pkg/aws-lambda@1.0.0", + "severity": "High", + "source": "NVD", + "environment": "serverless" + }, + { + "findingId": "image:sha256:untrusted-base", + "severity": "Medium", + "source": "NVD", + "tags": ["image:latest-tag"], + "environment": "serverless" + } +] diff --git a/samples/policy/simulations/baseline/diff.json b/samples/policy/simulations/baseline/diff.json index 3bc68419..c7c552ad 100644 --- a/samples/policy/simulations/baseline/diff.json +++ b/samples/policy/simulations/baseline/diff.json @@ -1,23 +1,23 @@ -{ - "summary": { - "policy": "baseline", - "policyDigest": "sha256:simulation-baseline", - "changed": 2 - }, - "diffs": [ - { - "findingId": "library:pkg/openssl@1.1.1w", - "baselineStatus": "Pass", - "projectedStatus": "Blocked", - "rule": "block_critical", - "notes": "Critical severity must be remediated before deploy." - }, - { - "findingId": "library:pkg/internal-runtime@1.0.0", - "baselineStatus": "Pass", - "projectedStatus": "Warned", - "rule": "alert_warn_eol_runtime", - "notes": "Runtime marked as EOL; upgrade recommended." - } - ] -} +{ + "summary": { + "policy": "baseline", + "policyDigest": "sha256:simulation-baseline", + "changed": 2 + }, + "diffs": [ + { + "findingId": "library:pkg/openssl@1.1.1w", + "baselineStatus": "Pass", + "projectedStatus": "Blocked", + "rule": "block_critical", + "notes": "Critical severity must be remediated before deploy." + }, + { + "findingId": "library:pkg/internal-runtime@1.0.0", + "baselineStatus": "Pass", + "projectedStatus": "Warned", + "rule": "alert_warn_eol_runtime", + "notes": "Runtime marked as EOL; upgrade recommended." + } + ] +} diff --git a/samples/policy/simulations/baseline/scenario.json b/samples/policy/simulations/baseline/scenario.json index b96342af..7aff55a4 100644 --- a/samples/policy/simulations/baseline/scenario.json +++ b/samples/policy/simulations/baseline/scenario.json @@ -1,21 +1,21 @@ -{ - "name": "baseline", - "policyPath": "docs/examples/policies/baseline.yaml", - "findings": [ - { - "findingId": "library:pkg/openssl@1.1.1w", - "severity": "Critical", - "source": "NVD" - }, - { - "findingId": "library:pkg/internal-runtime@1.0.0", - "severity": "Low", - "source": "NVD", - "tags": ["runtime:eol"] - } - ], - "expectedDiffs": [ - { "findingId": "library:pkg/openssl@1.1.1w", "status": "Blocked" }, - { "findingId": "library:pkg/internal-runtime@1.0.0", "status": "Warned" } - ] -} +{ + "name": "baseline", + "policyPath": "docs/examples/policies/baseline.yaml", + "findings": [ + { + "findingId": "library:pkg/openssl@1.1.1w", + "severity": "Critical", + "source": "NVD" + }, + { + "findingId": "library:pkg/internal-runtime@1.0.0", + "severity": "Low", + "source": "NVD", + "tags": ["runtime:eol"] + } + ], + "expectedDiffs": [ + { "findingId": "library:pkg/openssl@1.1.1w", "status": "Blocked" }, + { "findingId": "library:pkg/internal-runtime@1.0.0", "status": "Warned" } + ] +} diff --git a/samples/policy/simulations/internal-only/diff.json b/samples/policy/simulations/internal-only/diff.json index 4d730801..ed4fd93b 100644 --- a/samples/policy/simulations/internal-only/diff.json +++ b/samples/policy/simulations/internal-only/diff.json @@ -1,23 +1,23 @@ -{ - "summary": { - "policy": "internal-only", - "policyDigest": "sha256:simulation-internal-only", - "changed": 2 - }, - "diffs": [ - { - "findingId": "library:pkg/internal-app@2.0.0", - "baselineStatus": "Pass", - "projectedStatus": "RequiresVex", - "rule": "accept_vendor_vex", - "notes": "Trust vendor VEX statements for internal scope." - }, - { - "findingId": "library:pkg/kev-component@3.1.4", - "baselineStatus": "Pass", - "projectedStatus": "RequiresVex", - "rule": "accept_vendor_vex", - "notes": "Trust vendor VEX statements for internal scope." - } - ] -} +{ + "summary": { + "policy": "internal-only", + "policyDigest": "sha256:simulation-internal-only", + "changed": 2 + }, + "diffs": [ + { + "findingId": "library:pkg/internal-app@2.0.0", + "baselineStatus": "Pass", + "projectedStatus": "RequiresVex", + "rule": "accept_vendor_vex", + "notes": "Trust vendor VEX statements for internal scope." + }, + { + "findingId": "library:pkg/kev-component@3.1.4", + "baselineStatus": "Pass", + "projectedStatus": "RequiresVex", + "rule": "accept_vendor_vex", + "notes": "Trust vendor VEX statements for internal scope." + } + ] +} diff --git a/samples/policy/simulations/internal-only/scenario.json b/samples/policy/simulations/internal-only/scenario.json index 12711308..e53b992f 100644 --- a/samples/policy/simulations/internal-only/scenario.json +++ b/samples/policy/simulations/internal-only/scenario.json @@ -1,23 +1,23 @@ -{ - "name": "internal-only", - "policyPath": "docs/examples/policies/internal-only.yaml", - "findings": [ - { - "findingId": "library:pkg/internal-app@2.0.0", - "severity": "Medium", - "source": "GHSA", - "environment": "internal" - }, - { - "findingId": "library:pkg/kev-component@3.1.4", - "severity": "High", - "source": "NVD", - "tags": ["kev"], - "environment": "internal" - } - ], - "expectedDiffs": [ - { "findingId": "library:pkg/internal-app@2.0.0", "status": "RequiresVex" }, - { "findingId": "library:pkg/kev-component@3.1.4", "status": "RequiresVex" } - ] -} +{ + "name": "internal-only", + "policyPath": "docs/examples/policies/internal-only.yaml", + "findings": [ + { + "findingId": "library:pkg/internal-app@2.0.0", + "severity": "Medium", + "source": "GHSA", + "environment": "internal" + }, + { + "findingId": "library:pkg/kev-component@3.1.4", + "severity": "High", + "source": "NVD", + "tags": ["kev"], + "environment": "internal" + } + ], + "expectedDiffs": [ + { "findingId": "library:pkg/internal-app@2.0.0", "status": "RequiresVex" }, + { "findingId": "library:pkg/kev-component@3.1.4", "status": "RequiresVex" } + ] +} diff --git a/samples/policy/simulations/serverless/diff.json b/samples/policy/simulations/serverless/diff.json index 3ff0de3c..6e9457ea 100644 --- a/samples/policy/simulations/serverless/diff.json +++ b/samples/policy/simulations/serverless/diff.json @@ -1,23 +1,23 @@ -{ - "summary": { - "policy": "serverless", - "policyDigest": "sha256:simulation-serverless", - "changed": 2 - }, - "diffs": [ - { - "findingId": "library:pkg/aws-lambda@1.0.0", - "baselineStatus": "Pass", - "projectedStatus": "Blocked", - "rule": "block_any_high", - "notes": "Serverless workloads block High+ severities." - }, - { - "findingId": "image:sha256:untrusted-base", - "baselineStatus": "Pass", - "projectedStatus": "Blocked", - "rule": "forbid_unpinned_base", - "notes": "Base image must be pinned (no :latest)." - } - ] -} +{ + "summary": { + "policy": "serverless", + "policyDigest": "sha256:simulation-serverless", + "changed": 2 + }, + "diffs": [ + { + "findingId": "library:pkg/aws-lambda@1.0.0", + "baselineStatus": "Pass", + "projectedStatus": "Blocked", + "rule": "block_any_high", + "notes": "Serverless workloads block High+ severities." + }, + { + "findingId": "image:sha256:untrusted-base", + "baselineStatus": "Pass", + "projectedStatus": "Blocked", + "rule": "forbid_unpinned_base", + "notes": "Base image must be pinned (no :latest)." + } + ] +} diff --git a/samples/policy/simulations/serverless/scenario.json b/samples/policy/simulations/serverless/scenario.json index 8c465196..5a573ccb 100644 --- a/samples/policy/simulations/serverless/scenario.json +++ b/samples/policy/simulations/serverless/scenario.json @@ -1,23 +1,23 @@ -{ - "name": "serverless", - "policyPath": "docs/examples/policies/serverless.yaml", - "findings": [ - { - "findingId": "library:pkg/aws-lambda@1.0.0", - "severity": "High", - "source": "NVD", - "environment": "serverless" - }, - { - "findingId": "image:sha256:untrusted-base", - "severity": "Medium", - "source": "NVD", - "tags": ["image:latest-tag"], - "environment": "serverless" - } - ], - "expectedDiffs": [ - { "findingId": "library:pkg/aws-lambda@1.0.0", "status": "Blocked" }, - { "findingId": "image:sha256:untrusted-base", "status": "Blocked" } - ] -} +{ + "name": "serverless", + "policyPath": "docs/examples/policies/serverless.yaml", + "findings": [ + { + "findingId": "library:pkg/aws-lambda@1.0.0", + "severity": "High", + "source": "NVD", + "environment": "serverless" + }, + { + "findingId": "image:sha256:untrusted-base", + "severity": "Medium", + "source": "NVD", + "tags": ["image:latest-tag"], + "environment": "serverless" + } + ], + "expectedDiffs": [ + { "findingId": "library:pkg/aws-lambda@1.0.0", "status": "Blocked" }, + { "findingId": "image:sha256:untrusted-base", "status": "Blocked" } + ] +} diff --git a/samples/runtime/java-demo/README.md b/samples/runtime/java-demo/README.md index a9315813..6b2c3a17 100644 --- a/samples/runtime/java-demo/README.md +++ b/samples/runtime/java-demo/README.md @@ -1,5 +1,5 @@ -# Java Demo Fixture - -Minimal archive tree that exercises the Java language analyzer during microbenchmarks. The `libs/demo.jar` -artefact ships `META-INF/MANIFEST.MF` and `META-INF/maven/com.example/demo/pom.properties` entries so the -analyzer can extract Maven coordinates and manifest metadata without pulling in large third-party jars. +# Java Demo Fixture + +Minimal archive tree that exercises the Java language analyzer during microbenchmarks. The `libs/demo.jar` +artefact ships `META-INF/MANIFEST.MF` and `META-INF/maven/com.example/demo/pom.properties` entries so the +analyzer can extract Maven coordinates and manifest metadata without pulling in large third-party jars. diff --git a/scripts/export-policy-schemas.sh b/scripts/export-policy-schemas.sh index 58dc46eb..13dc1d1a 100644 --- a/scripts/export-policy-schemas.sh +++ b/scripts/export-policy-schemas.sh @@ -1,11 +1,11 @@ -#!/usr/bin/env bash -set -euo pipefail - -REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. && pwd)" -OUTPUT_DIR="${1:-$REPO_ROOT/docs/schemas}" - -pushd "$REPO_ROOT" > /dev/null - -dotnet run --project tools/PolicySchemaExporter -- "$OUTPUT_DIR" - -popd > /dev/null +#!/usr/bin/env bash +set -euo pipefail + +REPO_ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")"/.. && pwd)" +OUTPUT_DIR="${1:-$REPO_ROOT/docs/schemas}" + +pushd "$REPO_ROOT" > /dev/null + +dotnet run --project tools/PolicySchemaExporter -- "$OUTPUT_DIR" + +popd > /dev/null diff --git a/scripts/rotate-policy-cli-secret.sh b/scripts/rotate-policy-cli-secret.sh index 47d99c7c..7a3dbebb 100644 --- a/scripts/rotate-policy-cli-secret.sh +++ b/scripts/rotate-policy-cli-secret.sh @@ -1,63 +1,63 @@ -#!/usr/bin/env bash -set -euo pipefail - -usage() { - cat <<'EOF' -Usage: rotate-policy-cli-secret.sh [--output ] [--dry-run] - -Generates a new random shared secret suitable for the Authority -`policy-cli` client and optionally writes it to the target file -in `etc/secrets/` with the standard header comment. - -Options: - --output Destination file (default: etc/secrets/policy-cli.secret) - --dry-run Print the generated secret to stdout without writing. - -h, --help Show this help. -EOF -} - -OUTPUT="etc/secrets/policy-cli.secret" -DRY_RUN=0 - -while [[ $# -gt 0 ]]; do - case "$1" in - --output) - OUTPUT="$2" - shift 2 - ;; - --dry-run) - DRY_RUN=1 - shift - ;; - -h|--help) - usage - exit 0 - ;; - *) - echo "Unknown argument: $1" >&2 - usage >&2 - exit 1 - ;; - esac -done - -if ! command -v openssl >/dev/null 2>&1; then - echo "openssl is required to generate secrets" >&2 - exit 1 -fi - -# Generate a 48-byte random secret, base64 encoded without padding. -RAW_SECRET=$(openssl rand -base64 48 | tr -d '\n=') -SECRET="policy-cli-${RAW_SECRET}" - -if [[ "$DRY_RUN" -eq 1 ]]; then - echo "$SECRET" - exit 0 -fi - -cat < "$OUTPUT" -# generated $(date -u +%Y-%m-%dT%H:%M:%SZ) via scripts/rotate-policy-cli-secret.sh -$SECRET -EOF - -echo "Wrote new policy-cli secret to $OUTPUT" +#!/usr/bin/env bash +set -euo pipefail + +usage() { + cat <<'EOF' +Usage: rotate-policy-cli-secret.sh [--output ] [--dry-run] + +Generates a new random shared secret suitable for the Authority +`policy-cli` client and optionally writes it to the target file +in `etc/secrets/` with the standard header comment. + +Options: + --output Destination file (default: etc/secrets/policy-cli.secret) + --dry-run Print the generated secret to stdout without writing. + -h, --help Show this help. +EOF +} + +OUTPUT="etc/secrets/policy-cli.secret" +DRY_RUN=0 + +while [[ $# -gt 0 ]]; do + case "$1" in + --output) + OUTPUT="$2" + shift 2 + ;; + --dry-run) + DRY_RUN=1 + shift + ;; + -h|--help) + usage + exit 0 + ;; + *) + echo "Unknown argument: $1" >&2 + usage >&2 + exit 1 + ;; + esac +done + +if ! command -v openssl >/dev/null 2>&1; then + echo "openssl is required to generate secrets" >&2 + exit 1 +fi + +# Generate a 48-byte random secret, base64 encoded without padding. +RAW_SECRET=$(openssl rand -base64 48 | tr -d '\n=') +SECRET="policy-cli-${RAW_SECRET}" + +if [[ "$DRY_RUN" -eq 1 ]]; then + echo "$SECRET" + exit 0 +fi + +cat < "$OUTPUT" +# generated $(date -u +%Y-%m-%dT%H:%M:%SZ) via scripts/rotate-policy-cli-secret.sh +$SECRET +EOF + +echo "Wrote new policy-cli secret to $OUTPUT" diff --git a/scripts/update-apple-fixtures.ps1 b/scripts/update-apple-fixtures.ps1 index 63cdf4b8..34241e36 100644 --- a/scripts/update-apple-fixtures.ps1 +++ b/scripts/update-apple-fixtures.ps1 @@ -1,19 +1,19 @@ -#!/usr/bin/env pwsh -Set-StrictMode -Version Latest -$ErrorActionPreference = "Stop" - -$rootDir = Split-Path -Parent $PSCommandPath -$rootDir = Join-Path $rootDir ".." -$rootDir = Resolve-Path $rootDir - -$env:UPDATE_APPLE_FIXTURES = "1" - -Push-Location $rootDir -try { - $sentinel = Join-Path $rootDir "src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/.update-apple-fixtures" - New-Item -ItemType File -Path $sentinel -Force | Out-Null - dotnet test "src\StellaOps.Concelier.Connector.Vndr.Apple.Tests\StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj" @Args -} -finally { - Pop-Location -} +#!/usr/bin/env pwsh +Set-StrictMode -Version Latest +$ErrorActionPreference = "Stop" + +$rootDir = Split-Path -Parent $PSCommandPath +$rootDir = Join-Path $rootDir ".." +$rootDir = Resolve-Path $rootDir + +$env:UPDATE_APPLE_FIXTURES = "1" + +Push-Location $rootDir +try { + $sentinel = Join-Path $rootDir "src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/.update-apple-fixtures" + New-Item -ItemType File -Path $sentinel -Force | Out-Null + dotnet test "src\StellaOps.Concelier.Connector.Vndr.Apple.Tests\StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj" @Args +} +finally { + Pop-Location +} diff --git a/scripts/update-apple-fixtures.sh b/scripts/update-apple-fixtures.sh index 96008c98..0640d0ed 100644 --- a/scripts/update-apple-fixtures.sh +++ b/scripts/update-apple-fixtures.sh @@ -1,14 +1,14 @@ -#!/usr/bin/env bash -set -euo pipefail - -ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" - -export UPDATE_APPLE_FIXTURES=1 -if [ -n "${WSLENV-}" ]; then - export WSLENV="${WSLENV}:UPDATE_APPLE_FIXTURES/up" -else - export WSLENV="UPDATE_APPLE_FIXTURES/up" -fi - -touch "$ROOT_DIR/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/.update-apple-fixtures" -( cd "$ROOT_DIR" && dotnet test "src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj" "$@" ) +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" + +export UPDATE_APPLE_FIXTURES=1 +if [ -n "${WSLENV-}" ]; then + export WSLENV="${WSLENV}:UPDATE_APPLE_FIXTURES/up" +else + export WSLENV="UPDATE_APPLE_FIXTURES/up" +fi + +touch "$ROOT_DIR/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/.update-apple-fixtures" +( cd "$ROOT_DIR" && dotnet test "src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj" "$@" ) diff --git a/scripts/update-model-goldens.ps1 b/scripts/update-model-goldens.ps1 index 9bb20554..9c648365 100644 --- a/scripts/update-model-goldens.ps1 +++ b/scripts/update-model-goldens.ps1 @@ -1,9 +1,9 @@ -Param( - [Parameter(ValueFromRemainingArguments = $true)] - [string[]] $RestArgs -) - -$Root = Split-Path -Parent $PSScriptRoot -$env:UPDATE_GOLDENS = "1" - -dotnet test (Join-Path $Root "src/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj") @RestArgs +Param( + [Parameter(ValueFromRemainingArguments = $true)] + [string[]] $RestArgs +) + +$Root = Split-Path -Parent $PSScriptRoot +$env:UPDATE_GOLDENS = "1" + +dotnet test (Join-Path $Root "src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj") @RestArgs diff --git a/scripts/update-model-goldens.sh b/scripts/update-model-goldens.sh index 5e0d0984..c5d48c62 100644 --- a/scripts/update-model-goldens.sh +++ b/scripts/update-model-goldens.sh @@ -1,8 +1,8 @@ -#!/usr/bin/env bash -set -euo pipefail - -ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" - -export UPDATE_GOLDENS=1 - -dotnet test "$ROOT_DIR/src/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj" "$@" +#!/usr/bin/env bash +set -euo pipefail + +ROOT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" + +export UPDATE_GOLDENS=1 + +dotnet test "$ROOT_DIR/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj" "$@" diff --git a/scripts/verify-notify-plugins.ps1 b/scripts/verify-notify-plugins.ps1 index ff03ae41..047eaae6 100644 --- a/scripts/verify-notify-plugins.ps1 +++ b/scripts/verify-notify-plugins.ps1 @@ -1,57 +1,57 @@ -Set-StrictMode -Version Latest -$ErrorActionPreference = 'Stop' - -$repoRoot = Split-Path -Parent $PSScriptRoot -$pluginsDir = Join-Path $repoRoot 'plugins\notify' - -$assemblies = @{ - slack = 'StellaOps.Notify.Connectors.Slack.dll' - teams = 'StellaOps.Notify.Connectors.Teams.dll' - email = 'StellaOps.Notify.Connectors.Email.dll' - webhook = 'StellaOps.Notify.Connectors.Webhook.dll' -} - -$hasFailures = $false - -foreach ($channel in $assemblies.Keys) { - $dir = Join-Path $pluginsDir $channel - if (-not (Test-Path -LiteralPath $dir -PathType Container)) { - Write-Host "ERROR: Missing plug-in directory '$dir'." - $hasFailures = $true - continue - } - - $manifest = Join-Path $dir 'notify-plugin.json' - $assembly = Join-Path $dir $assemblies[$channel] - $baseName = [System.IO.Path]::GetFileNameWithoutExtension($assemblies[$channel]) - $pdb = Join-Path $dir "$baseName.pdb" - $deps = Join-Path $dir "$baseName.deps.json" - - if (-not (Test-Path -LiteralPath $manifest -PathType Leaf)) { - Write-Host "ERROR: Missing manifest for '$channel' connector ($manifest)." - $hasFailures = $true - } - - if (-not (Test-Path -LiteralPath $assembly -PathType Leaf)) { - Write-Host "ERROR: Missing assembly for '$channel' connector ($assembly)." - $hasFailures = $true - } - - Get-ChildItem -LiteralPath $dir -File | ForEach-Object { - switch ($_.Name) { - 'notify-plugin.json' { return } - { $_.Name -eq $assemblies[$channel] } { return } - { $_.Name -eq "$baseName.pdb" } { return } - { $_.Name -eq "$baseName.deps.json" } { return } - default { - Write-Host "ERROR: Unexpected file '$($_.Name)' in '$dir'." - $hasFailures = $true - } - } - } -} - -if ($hasFailures) { - exit 1 -} -exit 0 +Set-StrictMode -Version Latest +$ErrorActionPreference = 'Stop' + +$repoRoot = Split-Path -Parent $PSScriptRoot +$pluginsDir = Join-Path $repoRoot 'plugins\notify' + +$assemblies = @{ + slack = 'StellaOps.Notify.Connectors.Slack.dll' + teams = 'StellaOps.Notify.Connectors.Teams.dll' + email = 'StellaOps.Notify.Connectors.Email.dll' + webhook = 'StellaOps.Notify.Connectors.Webhook.dll' +} + +$hasFailures = $false + +foreach ($channel in $assemblies.Keys) { + $dir = Join-Path $pluginsDir $channel + if (-not (Test-Path -LiteralPath $dir -PathType Container)) { + Write-Host "ERROR: Missing plug-in directory '$dir'." + $hasFailures = $true + continue + } + + $manifest = Join-Path $dir 'notify-plugin.json' + $assembly = Join-Path $dir $assemblies[$channel] + $baseName = [System.IO.Path]::GetFileNameWithoutExtension($assemblies[$channel]) + $pdb = Join-Path $dir "$baseName.pdb" + $deps = Join-Path $dir "$baseName.deps.json" + + if (-not (Test-Path -LiteralPath $manifest -PathType Leaf)) { + Write-Host "ERROR: Missing manifest for '$channel' connector ($manifest)." + $hasFailures = $true + } + + if (-not (Test-Path -LiteralPath $assembly -PathType Leaf)) { + Write-Host "ERROR: Missing assembly for '$channel' connector ($assembly)." + $hasFailures = $true + } + + Get-ChildItem -LiteralPath $dir -File | ForEach-Object { + switch ($_.Name) { + 'notify-plugin.json' { return } + { $_.Name -eq $assemblies[$channel] } { return } + { $_.Name -eq "$baseName.pdb" } { return } + { $_.Name -eq "$baseName.deps.json" } { return } + default { + Write-Host "ERROR: Unexpected file '$($_.Name)' in '$dir'." + $hasFailures = $true + } + } + } +} + +if ($hasFailures) { + exit 1 +} +exit 0 diff --git a/scripts/verify-notify-plugins.sh b/scripts/verify-notify-plugins.sh index fe998e3c..6d732d86 100644 --- a/scripts/verify-notify-plugins.sh +++ b/scripts/verify-notify-plugins.sh @@ -1,56 +1,56 @@ -#!/usr/bin/env bash -set -euo pipefail - -repo_root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" -plugins_dir="${repo_root}/plugins/notify" - -declare -A assemblies=( - [slack]="StellaOps.Notify.Connectors.Slack.dll" - [teams]="StellaOps.Notify.Connectors.Teams.dll" - [email]="StellaOps.Notify.Connectors.Email.dll" - [webhook]="StellaOps.Notify.Connectors.Webhook.dll" -) - -status=0 - -for channel in "${!assemblies[@]}"; do - dir="${plugins_dir}/${channel}" - if [[ ! -d "${dir}" ]]; then - echo "ERROR: Missing plug-in directory '${dir}'." - status=1 - continue - fi - - manifest="${dir}/notify-plugin.json" - assembly="${dir}/${assemblies[$channel]}" - base="${assemblies[$channel]%.dll}" - pdb="${dir}/${base}.pdb" - deps="${dir}/${base}.deps.json" - - if [[ ! -f "${manifest}" ]]; then - echo "ERROR: Missing manifest for '${channel}' connector (${manifest})." - status=1 - fi - - if [[ ! -f "${assembly}" ]]; then - echo "ERROR: Missing assembly for '${channel}' connector (${assembly})." - status=1 - fi - - while IFS= read -r -d '' file; do - name="$(basename "${file}")" - case "${name}" in - "notify-plugin.json" \ - | "${assemblies[$channel]}" \ - | "${base}.pdb" \ - | "${base}.deps.json") - ;; - *) - echo "ERROR: Unexpected file '${name}' in '${dir}'." - status=1 - ;; - esac - done < <(find "${dir}" -maxdepth 1 -type f -print0) -done - -exit "${status}" +#!/usr/bin/env bash +set -euo pipefail + +repo_root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)" +plugins_dir="${repo_root}/plugins/notify" + +declare -A assemblies=( + [slack]="StellaOps.Notify.Connectors.Slack.dll" + [teams]="StellaOps.Notify.Connectors.Teams.dll" + [email]="StellaOps.Notify.Connectors.Email.dll" + [webhook]="StellaOps.Notify.Connectors.Webhook.dll" +) + +status=0 + +for channel in "${!assemblies[@]}"; do + dir="${plugins_dir}/${channel}" + if [[ ! -d "${dir}" ]]; then + echo "ERROR: Missing plug-in directory '${dir}'." + status=1 + continue + fi + + manifest="${dir}/notify-plugin.json" + assembly="${dir}/${assemblies[$channel]}" + base="${assemblies[$channel]%.dll}" + pdb="${dir}/${base}.pdb" + deps="${dir}/${base}.deps.json" + + if [[ ! -f "${manifest}" ]]; then + echo "ERROR: Missing manifest for '${channel}' connector (${manifest})." + status=1 + fi + + if [[ ! -f "${assembly}" ]]; then + echo "ERROR: Missing assembly for '${channel}' connector (${assembly})." + status=1 + fi + + while IFS= read -r -d '' file; do + name="$(basename "${file}")" + case "${name}" in + "notify-plugin.json" \ + | "${assemblies[$channel]}" \ + | "${base}.pdb" \ + | "${base}.deps.json") + ;; + *) + echo "ERROR: Unexpected file '${name}' in '${dir}'." + status=1 + ;; + esac + done < <(find "${dir}" -maxdepth 1 -type f -print0) +done + +exit "${status}" diff --git a/scripts/verify-policy-scopes.py b/scripts/verify-policy-scopes.py index 94b48538..e69d1606 100644 --- a/scripts/verify-policy-scopes.py +++ b/scripts/verify-policy-scopes.py @@ -1,86 +1,86 @@ -#!/usr/bin/env python3 -"""Ensure Authority policy client configs use the fine-grained scope set.""" - -from __future__ import annotations - -import sys -from pathlib import Path - -EXPECTED_SCOPES = ( - "policy:read", - "policy:author", - "policy:review", - "policy:simulate", - "findings:read", -) - - -def extract_scopes(lines: list[str], start_index: int) -> tuple[str, ...] | None: - for offset in range(1, 12): - if start_index + offset >= len(lines): - break - line = lines[start_index + offset].strip() - if not line: - continue - if line.startswith("scopes:"): - try: - raw = line.split("[", 1)[1].rsplit("]", 1)[0] - except IndexError: - return None - scopes = tuple(scope.strip().strip('"') for scope in raw.split(",")) - scopes = tuple(scope for scope in scopes if scope) - return scopes - return None - - -def validate(path: Path) -> list[str]: - errors: list[str] = [] - try: - text = path.read_text(encoding="utf-8") - except FileNotFoundError: - return [f"{path}: missing file"] - - if "policy:write" in text or "policy:submit" in text: - errors.append(f"{path}: contains legacy policy scope names (policy:write/policy:submit)") - - lines = text.splitlines() - client_indices = [idx for idx, line in enumerate(lines) if 'clientId: "policy-cli"' in line] - if not client_indices: - errors.append(f"{path}: policy-cli client registration not found") - return errors - - for idx in client_indices: - scopes = extract_scopes(lines, idx) - if scopes is None: - errors.append(f"{path}: unable to parse scopes for policy-cli client") - continue - if tuple(sorted(scopes)) != tuple(sorted(EXPECTED_SCOPES)): - errors.append( - f"{path}: unexpected policy-cli scopes {scopes}; expected {EXPECTED_SCOPES}" - ) - - return errors - - -def main(argv: list[str]) -> int: - repo_root = Path(__file__).resolve().parents[1] - targets = [ - repo_root / "etc" / "authority.yaml", - repo_root / "etc" / "authority.yaml.sample", - ] - - failures: list[str] = [] - for target in targets: - failures.extend(validate(target)) - - if failures: - for message in failures: - print(f"error: {message}", file=sys.stderr) - return 1 - - print("policy scope verification passed") - return 0 - - -if __name__ == "__main__": - raise SystemExit(main(sys.argv)) +#!/usr/bin/env python3 +"""Ensure Authority policy client configs use the fine-grained scope set.""" + +from __future__ import annotations + +import sys +from pathlib import Path + +EXPECTED_SCOPES = ( + "policy:read", + "policy:author", + "policy:review", + "policy:simulate", + "findings:read", +) + + +def extract_scopes(lines: list[str], start_index: int) -> tuple[str, ...] | None: + for offset in range(1, 12): + if start_index + offset >= len(lines): + break + line = lines[start_index + offset].strip() + if not line: + continue + if line.startswith("scopes:"): + try: + raw = line.split("[", 1)[1].rsplit("]", 1)[0] + except IndexError: + return None + scopes = tuple(scope.strip().strip('"') for scope in raw.split(",")) + scopes = tuple(scope for scope in scopes if scope) + return scopes + return None + + +def validate(path: Path) -> list[str]: + errors: list[str] = [] + try: + text = path.read_text(encoding="utf-8") + except FileNotFoundError: + return [f"{path}: missing file"] + + if "policy:write" in text or "policy:submit" in text: + errors.append(f"{path}: contains legacy policy scope names (policy:write/policy:submit)") + + lines = text.splitlines() + client_indices = [idx for idx, line in enumerate(lines) if 'clientId: "policy-cli"' in line] + if not client_indices: + errors.append(f"{path}: policy-cli client registration not found") + return errors + + for idx in client_indices: + scopes = extract_scopes(lines, idx) + if scopes is None: + errors.append(f"{path}: unable to parse scopes for policy-cli client") + continue + if tuple(sorted(scopes)) != tuple(sorted(EXPECTED_SCOPES)): + errors.append( + f"{path}: unexpected policy-cli scopes {scopes}; expected {EXPECTED_SCOPES}" + ) + + return errors + + +def main(argv: list[str]) -> int: + repo_root = Path(__file__).resolve().parents[1] + targets = [ + repo_root / "etc" / "authority.yaml", + repo_root / "etc" / "authority.yaml.sample", + ] + + failures: list[str] = [] + for target in targets: + failures.extend(validate(target)) + + if failures: + for message in failures: + print(f"error: {message}", file=sys.stderr) + return 1 + + print("policy scope verification passed") + return 0 + + +if __name__ == "__main__": + raise SystemExit(main(sys.argv)) diff --git a/src/StellaOps.AdvisoryAI/AGENTS.md b/src/AdvisoryAI/StellaOps.AdvisoryAI/AGENTS.md similarity index 90% rename from src/StellaOps.AdvisoryAI/AGENTS.md rename to src/AdvisoryAI/StellaOps.AdvisoryAI/AGENTS.md index 309c45dd..ff59c22f 100644 --- a/src/StellaOps.AdvisoryAI/AGENTS.md +++ b/src/AdvisoryAI/StellaOps.AdvisoryAI/AGENTS.md @@ -4,7 +4,7 @@ Deliver the Advisory AI assistant service that synthesizes advisory/VEX evidence, policy context, and SBOM data into summaries, conflict explanations, and remediation hints—always with citations and guardrails. ## Scope -- Service under `src/StellaOps.AdvisoryAI` (retrievers, deterministics, orchestrator, guardrails, inference adapters, REST APIs). +- Service under `src/AdvisoryAI/StellaOps.AdvisoryAI` (retrievers, deterministics, orchestrator, guardrails, inference adapters, REST APIs). - Batch processing for CLI/automation, caching, observability, and integration with Console, CLI, and downstream systems. - Coordination across Conseiller, Excitator, VEX Lens, SBOM Service, Policy Engine, Findings Ledger, Web Gateway, Authority, DevOps, and Docs. diff --git a/src/StellaOps.AdvisoryAI/TASKS.md b/src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md similarity index 99% rename from src/StellaOps.AdvisoryAI/TASKS.md rename to src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md index 63a016d2..075019da 100644 --- a/src/StellaOps.AdvisoryAI/TASKS.md +++ b/src/AdvisoryAI/StellaOps.AdvisoryAI/TASKS.md @@ -1,12 +1,12 @@ -# Advisory AI Task Board — Epic 8 -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIAI-31-001 | TODO | Advisory AI Guild | CONCELIER-VULN-29-001, EXCITITOR-VULN-29-001 | Implement structured and vector retrievers for advisories/VEX with paragraph anchors and citation metadata. | Retrievers return deterministic chunks with source IDs/sections; unit tests cover CSAF/OSV/vendor formats. | -| AIAI-31-002 | TODO | Advisory AI Guild, SBOM Service Guild | SBOM-VULN-29-001 | Build SBOM context retriever (purl version timelines, dependency paths, env flags, blast radius estimator). | Retriever returns paths/metrics under SLA; tests cover ecosystems. | -| AIAI-31-003 | TODO | Advisory AI Guild | AIAI-31-001..002 | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. | Tools validated with property tests; outputs cached; docs updated. | -| AIAI-31-004 | TODO | Advisory AI Guild | AIAI-31-001..003, AUTH-VULN-29-001 | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). | Pipeline executes tasks deterministically; caches keyed by tuple+policy; integration tests cover tasks. | -| AIAI-31-005 | TODO | Advisory AI Guild, Security Guild | AIAI-31-004 | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. | Guardrails block adversarial inputs; output validator enforces schemas; security tests pass. | -| AIAI-31-006 | TODO | Advisory AI Guild | AIAI-31-004..005 | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. | Endpoints deployed with schema validation; rate limits enforced; integration tests cover error codes. | -| AIAI-31-007 | TODO | Advisory AI Guild, Observability Guild | AIAI-31-004..006 | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. | Telemetry live; dashboards approved; alerts configured. | -| AIAI-31-008 | TODO | Advisory AI Guild, DevOps Guild | AIAI-31-006..007 | Package inference on-prem container, remote inference toggle, Helm/Compose manifests, scaling guidance, offline kit instructions. | Deployment docs merged; smoke deploy executed; offline kit updated; feature flags documented. | -| AIAI-31-009 | TODO | Advisory AI Guild, QA Guild | AIAI-31-001..006 | Develop unit/golden/property/perf tests, injection harness, and regression suite; ensure determinism with seeded caches. | Test suite green; golden outputs stored; injection tests pass; perf targets documented. | +# Advisory AI Task Board — Epic 8 +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIAI-31-001 | TODO | Advisory AI Guild | CONCELIER-VULN-29-001, EXCITITOR-VULN-29-001 | Implement structured and vector retrievers for advisories/VEX with paragraph anchors and citation metadata. | Retrievers return deterministic chunks with source IDs/sections; unit tests cover CSAF/OSV/vendor formats. | +| AIAI-31-002 | TODO | Advisory AI Guild, SBOM Service Guild | SBOM-VULN-29-001 | Build SBOM context retriever (purl version timelines, dependency paths, env flags, blast radius estimator). | Retriever returns paths/metrics under SLA; tests cover ecosystems. | +| AIAI-31-003 | TODO | Advisory AI Guild | AIAI-31-001..002 | Implement deterministic toolset (version comparators, range checks, dependency analysis, policy lookup) exposed via orchestrator. | Tools validated with property tests; outputs cached; docs updated. | +| AIAI-31-004 | TODO | Advisory AI Guild | AIAI-31-001..003, AUTH-VULN-29-001 | Build orchestration pipeline for Summary/Conflict/Remediation tasks (prompt templates, tool calls, token budgets, caching). | Pipeline executes tasks deterministically; caches keyed by tuple+policy; integration tests cover tasks. | +| AIAI-31-005 | TODO | Advisory AI Guild, Security Guild | AIAI-31-004 | Implement guardrails (redaction, injection defense, output validation, citation enforcement) and fail-safe handling. | Guardrails block adversarial inputs; output validator enforces schemas; security tests pass. | +| AIAI-31-006 | TODO | Advisory AI Guild | AIAI-31-004..005 | Expose REST API endpoints (`/advisory/ai/*`) with RBAC, rate limits, OpenAPI schemas, and batching support. | Endpoints deployed with schema validation; rate limits enforced; integration tests cover error codes. | +| AIAI-31-007 | TODO | Advisory AI Guild, Observability Guild | AIAI-31-004..006 | Instrument metrics (`advisory_ai_latency`, `guardrail_blocks`, `validation_failures`, `citation_coverage`), logs, and traces; publish dashboards/alerts. | Telemetry live; dashboards approved; alerts configured. | +| AIAI-31-008 | TODO | Advisory AI Guild, DevOps Guild | AIAI-31-006..007 | Package inference on-prem container, remote inference toggle, Helm/Compose manifests, scaling guidance, offline kit instructions. | Deployment docs merged; smoke deploy executed; offline kit updated; feature flags documented. | +| AIAI-31-009 | TODO | Advisory AI Guild, QA Guild | AIAI-31-001..006 | Develop unit/golden/property/perf tests, injection harness, and regression suite; ensure determinism with seeded caches. | Test suite green; golden outputs stored; injection tests pass; perf targets documented. | diff --git a/src/StellaOps.AirGap.Controller/AGENTS.md b/src/AirGap/StellaOps.AirGap.Controller/AGENTS.md similarity index 98% rename from src/StellaOps.AirGap.Controller/AGENTS.md rename to src/AirGap/StellaOps.AirGap.Controller/AGENTS.md index 8ba4d55e..ef1fee8b 100644 --- a/src/StellaOps.AirGap.Controller/AGENTS.md +++ b/src/AirGap/StellaOps.AirGap.Controller/AGENTS.md @@ -1,16 +1,16 @@ -# StellaOps AirGap Controller Guild Charter - -## Mission -Own the sealing state machine, status APIs, and enforcement hooks that keep StellaOps compliant in sealed air-gapped environments while respecting the imposed rule. - -## Scope -- Persisted air-gap state (`sealed`, policy hash, time anchor metadata) and RBAC enforcement. -- HTTP endpoints for seal/unseal/status and integration with Authority scopes. -- Startup diagnostics that refuse to run when sealing requirements are unmet. -- Coordination with DevOps for Kubernetes/Compose egress policies. -- Telemetry and audit events reflecting sealing actions and violations. - -## Definition of Done -- Deterministic tests for seal/unseal transitions and audit logging. -- Integration tests covering RBAC, sealed-mode refusal, and policy hash validation. -- Documentation hooks updated in `/docs/airgap/` for each shipped feature. +# StellaOps AirGap Controller Guild Charter + +## Mission +Own the sealing state machine, status APIs, and enforcement hooks that keep StellaOps compliant in sealed air-gapped environments while respecting the imposed rule. + +## Scope +- Persisted air-gap state (`sealed`, policy hash, time anchor metadata) and RBAC enforcement. +- HTTP endpoints for seal/unseal/status and integration with Authority scopes. +- Startup diagnostics that refuse to run when sealing requirements are unmet. +- Coordination with DevOps for Kubernetes/Compose egress policies. +- Telemetry and audit events reflecting sealing actions and violations. + +## Definition of Done +- Deterministic tests for seal/unseal transitions and audit logging. +- Integration tests covering RBAC, sealed-mode refusal, and policy hash validation. +- Documentation hooks updated in `/docs/airgap/` for each shipped feature. diff --git a/src/StellaOps.AirGap.Controller/TASKS.md b/src/AirGap/StellaOps.AirGap.Controller/TASKS.md similarity index 99% rename from src/StellaOps.AirGap.Controller/TASKS.md rename to src/AirGap/StellaOps.AirGap.Controller/TASKS.md index 4a5f657f..b16a62b6 100644 --- a/src/StellaOps.AirGap.Controller/TASKS.md +++ b/src/AirGap/StellaOps.AirGap.Controller/TASKS.md @@ -1,18 +1,18 @@ -# AirGap Controller Task Board — Epic 16: Air-Gapped Mode - -## Sprint 56 – Sealing Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-CTL-56-001 | TODO | AirGap Controller Guild | AUTH-OBS-50-001 | Implement `airgap_state` persistence, seal/unseal state machine, and Authority scope checks (`airgap:seal`, `airgap:status:read`). | State table created with migrations; seal/unseal transitions audited; unit tests cover happy/error paths. | -| AIRGAP-CTL-56-002 | TODO | AirGap Controller Guild, DevOps Guild | AIRGAP-CTL-56-001, DEVOPS-AIRGAP-56-001 | Expose `GET /system/airgap/status`, `POST /system/airgap/seal`, integrate policy hash validation, and return staleness/time anchor placeholders. | APIs documented with OpenAPI; RBAC enforced; integration tests cover unauthorized/sealed states. | - -## Sprint 57 – Enforcement & Diagnostics -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-CTL-57-001 | TODO | AirGap Controller Guild | AIRGAP-CTL-56-002 | Add startup diagnostics that block application run when sealed flag set but egress policies missing; emit audit + telemetry. | Startup guard tested with simulated failure; telemetry includes `airgap_sealed=true`; docs updated. | -| AIRGAP-CTL-57-002 | TODO | AirGap Controller Guild, Observability Guild | AIRGAP-CTL-56-002, TELEMETRY-OBS-50-001 | Instrument seal/unseal events with trace/log fields and timeline emission (`airgap.sealed`, `airgap.unsealed`). | Timeline events validated; logs include actor/tenant/policy hash; integration test covers duplication suppression. | - -## Sprint 58 – Time Anchor & Drift -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-CTL-58-001 | TODO | AirGap Controller Guild, AirGap Time Guild | AIRGAP-CTL-56-002, AIRGAP-TIME-57-001 | Persist time anchor metadata, compute drift seconds, and surface staleness budgets in status API. | Time anchor stored with bundle ID; drift calculation validated in tests; status API returns staleness metrics. | +# AirGap Controller Task Board — Epic 16: Air-Gapped Mode + +## Sprint 56 – Sealing Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-CTL-56-001 | TODO | AirGap Controller Guild | AUTH-OBS-50-001 | Implement `airgap_state` persistence, seal/unseal state machine, and Authority scope checks (`airgap:seal`, `airgap:status:read`). | State table created with migrations; seal/unseal transitions audited; unit tests cover happy/error paths. | +| AIRGAP-CTL-56-002 | TODO | AirGap Controller Guild, DevOps Guild | AIRGAP-CTL-56-001, DEVOPS-AIRGAP-56-001 | Expose `GET /system/airgap/status`, `POST /system/airgap/seal`, integrate policy hash validation, and return staleness/time anchor placeholders. | APIs documented with OpenAPI; RBAC enforced; integration tests cover unauthorized/sealed states. | + +## Sprint 57 – Enforcement & Diagnostics +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-CTL-57-001 | TODO | AirGap Controller Guild | AIRGAP-CTL-56-002 | Add startup diagnostics that block application run when sealed flag set but egress policies missing; emit audit + telemetry. | Startup guard tested with simulated failure; telemetry includes `airgap_sealed=true`; docs updated. | +| AIRGAP-CTL-57-002 | TODO | AirGap Controller Guild, Observability Guild | AIRGAP-CTL-56-002, TELEMETRY-OBS-50-001 | Instrument seal/unseal events with trace/log fields and timeline emission (`airgap.sealed`, `airgap.unsealed`). | Timeline events validated; logs include actor/tenant/policy hash; integration test covers duplication suppression. | + +## Sprint 58 – Time Anchor & Drift +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-CTL-58-001 | TODO | AirGap Controller Guild, AirGap Time Guild | AIRGAP-CTL-56-002, AIRGAP-TIME-57-001 | Persist time anchor metadata, compute drift seconds, and surface staleness budgets in status API. | Time anchor stored with bundle ID; drift calculation validated in tests; status API returns staleness metrics. | diff --git a/src/StellaOps.AirGap.Importer/AGENTS.md b/src/AirGap/StellaOps.AirGap.Importer/AGENTS.md similarity index 98% rename from src/StellaOps.AirGap.Importer/AGENTS.md rename to src/AirGap/StellaOps.AirGap.Importer/AGENTS.md index fbe5d555..1935f15c 100644 --- a/src/StellaOps.AirGap.Importer/AGENTS.md +++ b/src/AirGap/StellaOps.AirGap.Importer/AGENTS.md @@ -1,16 +1,16 @@ -# StellaOps AirGap Importer Guild Charter - -## Mission -Deliver offline bundle verification and ingestion tooling for sealed environments, covering DSSE/TUF validation, catalog updates, and audit logging under the imposed rule. - -## Scope -- TUF metadata verification, DSSE signature checks, Merkle root validation. -- Import pipelines writing bundle catalogs, object-store layouts, and audit entries. -- CLI + API surfaces for dry-run verification, import, and status queries. -- Integration hooks for Conseiller, Excitator, Policy Engine, and Export Center. -- Negative-case handling (tampering, expired signatures, root rotation) with operator guidance. - -## Definition of Done -- Deterministic fixtures for valid/invalid bundles committed. -- Integration tests prove catalog + object-store updates are idempotent. -- Import audit trail viewable via API and timeline events. +# StellaOps AirGap Importer Guild Charter + +## Mission +Deliver offline bundle verification and ingestion tooling for sealed environments, covering DSSE/TUF validation, catalog updates, and audit logging under the imposed rule. + +## Scope +- TUF metadata verification, DSSE signature checks, Merkle root validation. +- Import pipelines writing bundle catalogs, object-store layouts, and audit entries. +- CLI + API surfaces for dry-run verification, import, and status queries. +- Integration hooks for Conseiller, Excitator, Policy Engine, and Export Center. +- Negative-case handling (tampering, expired signatures, root rotation) with operator guidance. + +## Definition of Done +- Deterministic fixtures for valid/invalid bundles committed. +- Integration tests prove catalog + object-store updates are idempotent. +- Import audit trail viewable via API and timeline events. diff --git a/src/StellaOps.AirGap.Importer/TASKS.md b/src/AirGap/StellaOps.AirGap.Importer/TASKS.md similarity index 99% rename from src/StellaOps.AirGap.Importer/TASKS.md rename to src/AirGap/StellaOps.AirGap.Importer/TASKS.md index 6eaf8f93..b8719377 100644 --- a/src/StellaOps.AirGap.Importer/TASKS.md +++ b/src/AirGap/StellaOps.AirGap.Importer/TASKS.md @@ -1,19 +1,19 @@ -# AirGap Importer Task Board — Epic 16: Air-Gapped Mode - -## Sprint 56 – Verification Primitives -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-IMP-56-001 | TODO | AirGap Importer Guild | PROV-OBS-53-001 | Implement DSSE verification helpers, TUF metadata parser (`root.json`, `snapshot.json`, `timestamp.json`), and Merkle root calculator. | Verifier returns structured results; unit tests cover valid/invalid signatures and tampering scenarios. | -| AIRGAP-IMP-56-002 | TODO | AirGap Importer Guild, Security Guild | AIRGAP-IMP-56-001 | Introduce root rotation policy validation (dual approval) and signer trust store management. | Rotation policy enforced; tests cover valid rotation and rollback; docs stub updated. | - -## Sprint 57 – Catalog & Storage Writes -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-IMP-57-001 | TODO | AirGap Importer Guild | AIRGAP-IMP-56-001, DEVOPS-AIRGAP-56-002 | Write `bundle_catalog` and `bundle_items` repositories with RLS + deterministic migrations. | Catalog tables created; integration tests ensure tenant/global scoping; determinism check passes. | -| AIRGAP-IMP-57-002 | TODO | AirGap Importer Guild, DevOps Guild | AIRGAP-IMP-57-001 | Implement object-store loader storing artifacts under tenant/global mirror paths with Zstandard decompression and checksum validation. | Import writes deduplicated objects; checksum mismatches raise errors; storage layout documented. | - -## Sprint 58 – Import Workflows -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-IMP-58-001 | TODO | AirGap Importer Guild, CLI Guild | AIRGAP-IMP-57-002, CLI-AIRGAP-56-001 | Implement API (`POST /airgap/import`, `/airgap/verify`) and CLI commands wiring verification + catalog updates, including diff preview. | CLI/API share validation engine; diff preview surfaces metadata changes; audit entries recorded with trace IDs. | -| AIRGAP-IMP-58-002 | TODO | AirGap Importer Guild, Observability Guild | AIRGAP-IMP-58-001, TELEMETRY-OBS-50-001 | Emit timeline events (`airgap.import.started|completed|failed`) and telemetry metrics (bundle bytes, duration, warnings). | Events/metrics validated in integration tests; docs cross-link to observability dashboards. | +# AirGap Importer Task Board — Epic 16: Air-Gapped Mode + +## Sprint 56 – Verification Primitives +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-IMP-56-001 | TODO | AirGap Importer Guild | PROV-OBS-53-001 | Implement DSSE verification helpers, TUF metadata parser (`root.json`, `snapshot.json`, `timestamp.json`), and Merkle root calculator. | Verifier returns structured results; unit tests cover valid/invalid signatures and tampering scenarios. | +| AIRGAP-IMP-56-002 | TODO | AirGap Importer Guild, Security Guild | AIRGAP-IMP-56-001 | Introduce root rotation policy validation (dual approval) and signer trust store management. | Rotation policy enforced; tests cover valid rotation and rollback; docs stub updated. | + +## Sprint 57 – Catalog & Storage Writes +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-IMP-57-001 | TODO | AirGap Importer Guild | AIRGAP-IMP-56-001, DEVOPS-AIRGAP-56-002 | Write `bundle_catalog` and `bundle_items` repositories with RLS + deterministic migrations. | Catalog tables created; integration tests ensure tenant/global scoping; determinism check passes. | +| AIRGAP-IMP-57-002 | TODO | AirGap Importer Guild, DevOps Guild | AIRGAP-IMP-57-001 | Implement object-store loader storing artifacts under tenant/global mirror paths with Zstandard decompression and checksum validation. | Import writes deduplicated objects; checksum mismatches raise errors; storage layout documented. | + +## Sprint 58 – Import Workflows +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-IMP-58-001 | TODO | AirGap Importer Guild, CLI Guild | AIRGAP-IMP-57-002, CLI-AIRGAP-56-001 | Implement API (`POST /airgap/import`, `/airgap/verify`) and CLI commands wiring verification + catalog updates, including diff preview. | CLI/API share validation engine; diff preview surfaces metadata changes; audit entries recorded with trace IDs. | +| AIRGAP-IMP-58-002 | TODO | AirGap Importer Guild, Observability Guild | AIRGAP-IMP-58-001, TELEMETRY-OBS-50-001 | Emit timeline events (`airgap.import.started|completed|failed`) and telemetry metrics (bundle bytes, duration, warnings). | Events/metrics validated in integration tests; docs cross-link to observability dashboards. | diff --git a/src/StellaOps.AirGap.Policy/AGENTS.md b/src/AirGap/StellaOps.AirGap.Policy/AGENTS.md similarity index 98% rename from src/StellaOps.AirGap.Policy/AGENTS.md rename to src/AirGap/StellaOps.AirGap.Policy/AGENTS.md index 8cd703e5..1f5aea43 100644 --- a/src/StellaOps.AirGap.Policy/AGENTS.md +++ b/src/AirGap/StellaOps.AirGap.Policy/AGENTS.md @@ -1,16 +1,16 @@ -# StellaOps AirGap Policy Guild Charter - -## Mission -Provide the shared enforcement layer (`EgressPolicy`, job plan validators, sealed-mode gates) that keeps all services compliant with Air-Gapped Mode requirements. - -## Scope -- `EgressPolicy` facade replacing raw HTTP client usage. -- Static analysis/linting to detect unauthorized network calls. -- Task Runner and orchestrator validators flagging disallowed destinations. -- Shared error contract (`AIRGAP_EGRESS_BLOCKED`) and remediation messages. -- Test harnesses simulating sealed/unsealed execution paths. - -## Definition of Done -- Every service imports the facade; CI fails on direct HTTP client usage. -- Sealed-mode unit tests cover panic/remediation behavior across host types. -- Documentation updated in `/docs/dev/airgap-contracts.md` for adoption patterns. +# StellaOps AirGap Policy Guild Charter + +## Mission +Provide the shared enforcement layer (`EgressPolicy`, job plan validators, sealed-mode gates) that keeps all services compliant with Air-Gapped Mode requirements. + +## Scope +- `EgressPolicy` facade replacing raw HTTP client usage. +- Static analysis/linting to detect unauthorized network calls. +- Task Runner and orchestrator validators flagging disallowed destinations. +- Shared error contract (`AIRGAP_EGRESS_BLOCKED`) and remediation messages. +- Test harnesses simulating sealed/unsealed execution paths. + +## Definition of Done +- Every service imports the facade; CI fails on direct HTTP client usage. +- Sealed-mode unit tests cover panic/remediation behavior across host types. +- Documentation updated in `/docs/dev/airgap-contracts.md` for adoption patterns. diff --git a/src/StellaOps.AirGap.Policy/TASKS.md b/src/AirGap/StellaOps.AirGap.Policy/TASKS.md similarity index 99% rename from src/StellaOps.AirGap.Policy/TASKS.md rename to src/AirGap/StellaOps.AirGap.Policy/TASKS.md index 676ca97c..840b4542 100644 --- a/src/StellaOps.AirGap.Policy/TASKS.md +++ b/src/AirGap/StellaOps.AirGap.Policy/TASKS.md @@ -1,19 +1,19 @@ -# AirGap Policy Task Board — Epic 16: Air-Gapped Mode - -## Sprint 56 – Facade & Contracts -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-POL-56-001 | TODO | AirGap Policy Guild | TELEMETRY-OBS-50-001 | Implement `StellaOps.AirGap.Policy` package exposing `EgressPolicy` facade with sealed/unsealed branches and remediation-friendly errors. | Facade package builds/tests; integration tests simulate sealed/unsealed; error contract documented. | -| AIRGAP-POL-56-002 | TODO | AirGap Policy Guild, DevEx Guild | AIRGAP-POL-56-001 | Create Roslyn analyzer/code fix warning on raw `HttpClient` usage outside approved wrappers; add CI integration. | Analyzer packaged; CI fails on intentional violation; docs updated for opt-in. | - -## Sprint 57 – Service Adoption Wave 1 -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-POL-57-001 | TODO | AirGap Policy Guild, BE-Base Platform Guild | AIRGAP-POL-56-001 | Update core web services (Web, Exporter, Policy, Findings, Authority) to use `EgressPolicy`; ensure configuration wiring for sealed mode. | Services compile with facade; sealed-mode tests run in CI; configuration docs updated. | -| AIRGAP-POL-57-002 | TODO | AirGap Policy Guild, Task Runner Guild | AIRGAP-POL-56-001, TASKRUN-OBS-50-001 | Implement Task Runner job plan validator rejecting network steps unless marked internal allow-list. | Validator blocks forbidden steps; tests cover allow/deny; error surfaces remediation text. | - -## Sprint 58 – Service Adoption Wave 2 -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-POL-58-001 | TODO | AirGap Policy Guild, Observability Guild | AIRGAP-POL-57-001 | Ensure Observability exporters only target local endpoints in sealed mode; disable remote sinks with warning. | Exporters respect sealed flag; timeline/log message emitted; docs updated. | -| AIRGAP-POL-58-002 | TODO | AirGap Policy Guild, CLI Guild | AIRGAP-POL-56-001, CLI-OBS-50-001 | Add CLI sealed-mode guard that refuses commands needing egress and surfaces remediation. | CLI returns `AIRGAP_EGRESS_BLOCKED`; tests cover sealed/unsealed flows; help text updated. | +# AirGap Policy Task Board — Epic 16: Air-Gapped Mode + +## Sprint 56 – Facade & Contracts +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-POL-56-001 | TODO | AirGap Policy Guild | TELEMETRY-OBS-50-001 | Implement `StellaOps.AirGap.Policy` package exposing `EgressPolicy` facade with sealed/unsealed branches and remediation-friendly errors. | Facade package builds/tests; integration tests simulate sealed/unsealed; error contract documented. | +| AIRGAP-POL-56-002 | TODO | AirGap Policy Guild, DevEx Guild | AIRGAP-POL-56-001 | Create Roslyn analyzer/code fix warning on raw `HttpClient` usage outside approved wrappers; add CI integration. | Analyzer packaged; CI fails on intentional violation; docs updated for opt-in. | + +## Sprint 57 – Service Adoption Wave 1 +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-POL-57-001 | TODO | AirGap Policy Guild, BE-Base Platform Guild | AIRGAP-POL-56-001 | Update core web services (Web, Exporter, Policy, Findings, Authority) to use `EgressPolicy`; ensure configuration wiring for sealed mode. | Services compile with facade; sealed-mode tests run in CI; configuration docs updated. | +| AIRGAP-POL-57-002 | TODO | AirGap Policy Guild, Task Runner Guild | AIRGAP-POL-56-001, TASKRUN-OBS-50-001 | Implement Task Runner job plan validator rejecting network steps unless marked internal allow-list. | Validator blocks forbidden steps; tests cover allow/deny; error surfaces remediation text. | + +## Sprint 58 – Service Adoption Wave 2 +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-POL-58-001 | TODO | AirGap Policy Guild, Observability Guild | AIRGAP-POL-57-001 | Ensure Observability exporters only target local endpoints in sealed mode; disable remote sinks with warning. | Exporters respect sealed flag; timeline/log message emitted; docs updated. | +| AIRGAP-POL-58-002 | TODO | AirGap Policy Guild, CLI Guild | AIRGAP-POL-56-001, CLI-OBS-50-001 | Add CLI sealed-mode guard that refuses commands needing egress and surfaces remediation. | CLI returns `AIRGAP_EGRESS_BLOCKED`; tests cover sealed/unsealed flows; help text updated. | diff --git a/src/StellaOps.AirGap.Time/AGENTS.md b/src/AirGap/StellaOps.AirGap.Time/AGENTS.md similarity index 97% rename from src/StellaOps.AirGap.Time/AGENTS.md rename to src/AirGap/StellaOps.AirGap.Time/AGENTS.md index b42e1eb1..f37ec8e8 100644 --- a/src/StellaOps.AirGap.Time/AGENTS.md +++ b/src/AirGap/StellaOps.AirGap.Time/AGENTS.md @@ -1,15 +1,15 @@ -# StellaOps AirGap Time Guild Charter - -## Mission -Manage trusted time anchors and staleness budgets for sealed environments, ensuring deterministic behavior when external time sources are unavailable. - -## Scope -- Parse signed time tokens from Mirror Bundles and validate signatures. -- Persist `time_anchor` metadata and compute drift/staleness metrics. -- Provide helpers for UI/API staleness badges and job gating. -- Integrate with Notifications to alert on approaching drift thresholds. - -## Definition of Done -- Test vectors for time tokens committed alongside verification code. -- Drift calculations deterministic and configurable per tenant. -- Documentation updates for `/docs/airgap/staleness-and-time.md` with examples. +# StellaOps AirGap Time Guild Charter + +## Mission +Manage trusted time anchors and staleness budgets for sealed environments, ensuring deterministic behavior when external time sources are unavailable. + +## Scope +- Parse signed time tokens from Mirror Bundles and validate signatures. +- Persist `time_anchor` metadata and compute drift/staleness metrics. +- Provide helpers for UI/API staleness badges and job gating. +- Integrate with Notifications to alert on approaching drift thresholds. + +## Definition of Done +- Test vectors for time tokens committed alongside verification code. +- Drift calculations deterministic and configurable per tenant. +- Documentation updates for `/docs/airgap/staleness-and-time.md` with examples. diff --git a/src/StellaOps.AirGap.Time/TASKS.md b/src/AirGap/StellaOps.AirGap.Time/TASKS.md similarity index 99% rename from src/StellaOps.AirGap.Time/TASKS.md rename to src/AirGap/StellaOps.AirGap.Time/TASKS.md index bbcc31c9..934ff6da 100644 --- a/src/StellaOps.AirGap.Time/TASKS.md +++ b/src/AirGap/StellaOps.AirGap.Time/TASKS.md @@ -1,13 +1,13 @@ -# AirGap Time Task Board — Epic 16: Air-Gapped Mode - -## Sprint 57 – Time Anchor Validation -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-TIME-57-001 | TODO | AirGap Time Guild | PROV-OBS-54-001, AIRGAP-IMP-56-001 | Implement signed time token parser (Roughtime/RFC3161), verify signatures against bundle trust roots, and expose normalized anchor representation. | Parser handles both token formats; tests cover valid/expired/tampered tokens; documentation stubbed. | -| AIRGAP-TIME-57-002 | TODO | AirGap Time Guild, Observability Guild | AIRGAP-TIME-57-001 | Add telemetry counters for time anchors (`airgap_time_anchor_age_seconds`) and alerts for approaching thresholds. | Metrics registered; alert templates created; integration test ensures emission on stale anchor. | - -## Sprint 58 – Drift & Staleness Enforcement -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| AIRGAP-TIME-58-001 | TODO | AirGap Time Guild | AIRGAP-TIME-57-001, AIRGAP-CTL-56-002 | Persist drift baseline, compute per-content staleness (advisories, VEX, policy) based on bundle metadata, and surface through controller status API. | Drift/staleness values exposed via API; unit tests cover threshold calculations; docs updated. | -| AIRGAP-TIME-58-002 | TODO | AirGap Time Guild, Notifications Guild | AIRGAP-TIME-58-001, NOTIFY-OBS-51-001 | Emit notifications and timeline events when staleness budgets breached or approaching. | Notifications dispatched with remediation; timeline events recorded; CLI shows warning banner. | +# AirGap Time Task Board — Epic 16: Air-Gapped Mode + +## Sprint 57 – Time Anchor Validation +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-TIME-57-001 | TODO | AirGap Time Guild | PROV-OBS-54-001, AIRGAP-IMP-56-001 | Implement signed time token parser (Roughtime/RFC3161), verify signatures against bundle trust roots, and expose normalized anchor representation. | Parser handles both token formats; tests cover valid/expired/tampered tokens; documentation stubbed. | +| AIRGAP-TIME-57-002 | TODO | AirGap Time Guild, Observability Guild | AIRGAP-TIME-57-001 | Add telemetry counters for time anchors (`airgap_time_anchor_age_seconds`) and alerts for approaching thresholds. | Metrics registered; alert templates created; integration test ensures emission on stale anchor. | + +## Sprint 58 – Drift & Staleness Enforcement +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| AIRGAP-TIME-58-001 | TODO | AirGap Time Guild | AIRGAP-TIME-57-001, AIRGAP-CTL-56-002 | Persist drift baseline, compute per-content staleness (advisories, VEX, policy) based on bundle metadata, and surface through controller status API. | Drift/staleness values exposed via API; unit tests cover threshold calculations; docs updated. | +| AIRGAP-TIME-58-002 | TODO | AirGap Time Guild, Notifications Guild | AIRGAP-TIME-58-001, NOTIFY-OBS-51-001 | Emit notifications and timeline events when staleness budgets breached or approaching. | Notifications dispatched with remediation; timeline events recorded; CLI shows warning banner. | diff --git a/src/Aoc/StellaOps.Aoc.sln b/src/Aoc/StellaOps.Aoc.sln new file mode 100644 index 00000000..a2906621 --- /dev/null +++ b/src/Aoc/StellaOps.Aoc.sln @@ -0,0 +1,56 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "__Libraries\StellaOps.Aoc\StellaOps.Aoc.csproj", "{54CD9E36-B119-4970-B652-826363055F7D}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.Tests", "__Tests\StellaOps.Aoc.Tests\StellaOps.Aoc.Tests.csproj", "{5CF1158D-64F6-4981-85CB-B43453A37329}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {54CD9E36-B119-4970-B652-826363055F7D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Debug|x64.ActiveCfg = Debug|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Debug|x64.Build.0 = Debug|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Debug|x86.ActiveCfg = Debug|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Debug|x86.Build.0 = Debug|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Release|Any CPU.Build.0 = Release|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Release|x64.ActiveCfg = Release|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Release|x64.Build.0 = Release|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Release|x86.ActiveCfg = Release|Any CPU + {54CD9E36-B119-4970-B652-826363055F7D}.Release|x86.Build.0 = Release|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Debug|x64.ActiveCfg = Debug|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Debug|x64.Build.0 = Debug|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Debug|x86.ActiveCfg = Debug|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Debug|x86.Build.0 = Debug|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Release|Any CPU.Build.0 = Release|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Release|x64.ActiveCfg = Release|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Release|x64.Build.0 = Release|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Release|x86.ActiveCfg = Release|Any CPU + {5CF1158D-64F6-4981-85CB-B43453A37329}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {54CD9E36-B119-4970-B652-826363055F7D} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {5CF1158D-64F6-4981-85CB-B43453A37329} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Aoc/AocForbiddenKeys.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocForbiddenKeys.cs similarity index 96% rename from src/StellaOps.Aoc/AocForbiddenKeys.cs rename to src/Aoc/__Libraries/StellaOps.Aoc/AocForbiddenKeys.cs index 1ad51f67..7b94be8e 100644 --- a/src/StellaOps.Aoc/AocForbiddenKeys.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocForbiddenKeys.cs @@ -1,25 +1,25 @@ -using System.Collections.Immutable; - -namespace StellaOps.Aoc; - -public static class AocForbiddenKeys -{ - private static readonly ImmutableHashSet ForbiddenTopLevel = new[] - { - "severity", - "cvss", - "cvss_vector", - "effective_status", - "effective_range", - "merged_from", - "consensus_provider", - "reachability", - "asset_criticality", - "risk_score", - }.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase); - - public static bool IsForbiddenTopLevel(string propertyName) => ForbiddenTopLevel.Contains(propertyName); - - public static bool IsDerivedField(string propertyName) - => propertyName.StartsWith("effective_", StringComparison.OrdinalIgnoreCase); -} +using System.Collections.Immutable; + +namespace StellaOps.Aoc; + +public static class AocForbiddenKeys +{ + private static readonly ImmutableHashSet ForbiddenTopLevel = new[] + { + "severity", + "cvss", + "cvss_vector", + "effective_status", + "effective_range", + "merged_from", + "consensus_provider", + "reachability", + "asset_criticality", + "risk_score", + }.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase); + + public static bool IsForbiddenTopLevel(string propertyName) => ForbiddenTopLevel.Contains(propertyName); + + public static bool IsDerivedField(string propertyName) + => propertyName.StartsWith("effective_", StringComparison.OrdinalIgnoreCase); +} diff --git a/src/StellaOps.Aoc/AocGuardException.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardException.cs similarity index 96% rename from src/StellaOps.Aoc/AocGuardException.cs rename to src/Aoc/__Libraries/StellaOps.Aoc/AocGuardException.cs index 3c0db884..cd3256ef 100644 --- a/src/StellaOps.Aoc/AocGuardException.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardException.cs @@ -1,17 +1,17 @@ -using System; -using System.Collections.Immutable; - -namespace StellaOps.Aoc; - -public sealed class AocGuardException : Exception -{ - public AocGuardException(AocGuardResult result) - : base("AOC guard validation failed.") - { - Result = result ?? throw new ArgumentNullException(nameof(result)); - } - - public AocGuardResult Result { get; } - - public ImmutableArray Violations => Result.Violations; -} +using System; +using System.Collections.Immutable; + +namespace StellaOps.Aoc; + +public sealed class AocGuardException : Exception +{ + public AocGuardException(AocGuardResult result) + : base("AOC guard validation failed.") + { + Result = result ?? throw new ArgumentNullException(nameof(result)); + } + + public AocGuardResult Result { get; } + + public ImmutableArray Violations => Result.Violations; +} diff --git a/src/StellaOps.Aoc/AocGuardExtensions.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardExtensions.cs similarity index 95% rename from src/StellaOps.Aoc/AocGuardExtensions.cs rename to src/Aoc/__Libraries/StellaOps.Aoc/AocGuardExtensions.cs index f6083c39..2eaaaf15 100644 --- a/src/StellaOps.Aoc/AocGuardExtensions.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardExtensions.cs @@ -1,22 +1,22 @@ -using System.Text.Json; - -namespace StellaOps.Aoc; - -public static class AocGuardExtensions -{ - public static AocGuardResult ValidateOrThrow(this IAocGuard guard, JsonElement document, AocGuardOptions? options = null) - { - if (guard is null) - { - throw new ArgumentNullException(nameof(guard)); - } - - var result = guard.Validate(document, options); - if (!result.IsValid) - { - throw new AocGuardException(result); - } - - return result; - } -} +using System.Text.Json; + +namespace StellaOps.Aoc; + +public static class AocGuardExtensions +{ + public static AocGuardResult ValidateOrThrow(this IAocGuard guard, JsonElement document, AocGuardOptions? options = null) + { + if (guard is null) + { + throw new ArgumentNullException(nameof(guard)); + } + + var result = guard.Validate(document, options); + if (!result.IsValid) + { + throw new AocGuardException(result); + } + + return result; + } +} diff --git a/src/StellaOps.Aoc/AocGuardOptions.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardOptions.cs similarity index 96% rename from src/StellaOps.Aoc/AocGuardOptions.cs rename to src/Aoc/__Libraries/StellaOps.Aoc/AocGuardOptions.cs index 27e46af1..e1d754be 100644 --- a/src/StellaOps.Aoc/AocGuardOptions.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardOptions.cs @@ -1,29 +1,29 @@ -using System.Collections.Immutable; - -namespace StellaOps.Aoc; - -public sealed record AocGuardOptions -{ - private static readonly ImmutableHashSet DefaultRequiredTopLevel = new[] - { - "tenant", - "source", - "upstream", - "content", - "linkset", - }.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase); - - public static AocGuardOptions Default { get; } = new(); - - public ImmutableHashSet RequiredTopLevelFields { get; init; } = DefaultRequiredTopLevel; - - /// - /// When true, signature metadata is required under upstream.signature. - /// - public bool RequireSignatureMetadata { get; init; } = true; - - /// - /// When true, tenant must be a non-empty string. - /// - public bool RequireTenant { get; init; } = true; -} +using System.Collections.Immutable; + +namespace StellaOps.Aoc; + +public sealed record AocGuardOptions +{ + private static readonly ImmutableHashSet DefaultRequiredTopLevel = new[] + { + "tenant", + "source", + "upstream", + "content", + "linkset", + }.ToImmutableHashSet(StringComparer.OrdinalIgnoreCase); + + public static AocGuardOptions Default { get; } = new(); + + public ImmutableHashSet RequiredTopLevelFields { get; init; } = DefaultRequiredTopLevel; + + /// + /// When true, signature metadata is required under upstream.signature. + /// + public bool RequireSignatureMetadata { get; init; } = true; + + /// + /// When true, tenant must be a non-empty string. + /// + public bool RequireTenant { get; init; } = true; +} diff --git a/src/StellaOps.Aoc/AocGuardResult.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardResult.cs similarity index 97% rename from src/StellaOps.Aoc/AocGuardResult.cs rename to src/Aoc/__Libraries/StellaOps.Aoc/AocGuardResult.cs index 9ea1a3b1..35a1a822 100644 --- a/src/StellaOps.Aoc/AocGuardResult.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocGuardResult.cs @@ -1,14 +1,14 @@ -using System.Collections.Immutable; - -namespace StellaOps.Aoc; - -public sealed record AocGuardResult(bool IsValid, ImmutableArray Violations) -{ - public static AocGuardResult Success { get; } = new(true, ImmutableArray.Empty); - - public static AocGuardResult FromViolations(IEnumerable violations) - { - var array = violations.ToImmutableArray(); - return array.IsDefaultOrEmpty ? Success : new(false, array); - } -} +using System.Collections.Immutable; + +namespace StellaOps.Aoc; + +public sealed record AocGuardResult(bool IsValid, ImmutableArray Violations) +{ + public static AocGuardResult Success { get; } = new(true, ImmutableArray.Empty); + + public static AocGuardResult FromViolations(IEnumerable violations) + { + var array = violations.ToImmutableArray(); + return array.IsDefaultOrEmpty ? Success : new(false, array); + } +} diff --git a/src/StellaOps.Aoc/AocViolation.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocViolation.cs similarity index 97% rename from src/StellaOps.Aoc/AocViolation.cs rename to src/Aoc/__Libraries/StellaOps.Aoc/AocViolation.cs index e9337566..6a43e1aa 100644 --- a/src/StellaOps.Aoc/AocViolation.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocViolation.cs @@ -1,13 +1,13 @@ -using System.Text.Json.Serialization; - -namespace StellaOps.Aoc; - -public sealed record AocViolation( - [property: JsonPropertyName("code")] AocViolationCode Code, - [property: JsonPropertyName("errorCode")] string ErrorCode, - [property: JsonPropertyName("path")] string Path, - [property: JsonPropertyName("message")] string Message) -{ - public static AocViolation Create(AocViolationCode code, string path, string message) - => new(code, code.ToErrorCode(), path, message); -} +using System.Text.Json.Serialization; + +namespace StellaOps.Aoc; + +public sealed record AocViolation( + [property: JsonPropertyName("code")] AocViolationCode Code, + [property: JsonPropertyName("errorCode")] string ErrorCode, + [property: JsonPropertyName("path")] string Path, + [property: JsonPropertyName("message")] string Message) +{ + public static AocViolation Create(AocViolationCode code, string path, string message) + => new(code, code.ToErrorCode(), path, message); +} diff --git a/src/StellaOps.Aoc/AocViolationCode.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCode.cs similarity index 96% rename from src/StellaOps.Aoc/AocViolationCode.cs rename to src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCode.cs index 7b31555e..8ba74471 100644 --- a/src/StellaOps.Aoc/AocViolationCode.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocViolationCode.cs @@ -1,34 +1,34 @@ -namespace StellaOps.Aoc; - -public enum AocViolationCode -{ - None = 0, - ForbiddenField, - MergeAttempt, - IdempotencyViolation, - MissingProvenance, - SignatureInvalid, - DerivedFindingDetected, - UnknownField, - MissingRequiredField, - InvalidTenant, - InvalidSignatureMetadata, -} - -public static class AocViolationCodeExtensions -{ - public static string ToErrorCode(this AocViolationCode code) => code switch - { - AocViolationCode.ForbiddenField => "ERR_AOC_001", - AocViolationCode.MergeAttempt => "ERR_AOC_002", - AocViolationCode.IdempotencyViolation => "ERR_AOC_003", - AocViolationCode.MissingProvenance => "ERR_AOC_004", - AocViolationCode.SignatureInvalid => "ERR_AOC_005", - AocViolationCode.DerivedFindingDetected => "ERR_AOC_006", - AocViolationCode.UnknownField => "ERR_AOC_007", - AocViolationCode.MissingRequiredField => "ERR_AOC_004", - AocViolationCode.InvalidTenant => "ERR_AOC_004", - AocViolationCode.InvalidSignatureMetadata => "ERR_AOC_005", - _ => "ERR_AOC_000", - }; -} +namespace StellaOps.Aoc; + +public enum AocViolationCode +{ + None = 0, + ForbiddenField, + MergeAttempt, + IdempotencyViolation, + MissingProvenance, + SignatureInvalid, + DerivedFindingDetected, + UnknownField, + MissingRequiredField, + InvalidTenant, + InvalidSignatureMetadata, +} + +public static class AocViolationCodeExtensions +{ + public static string ToErrorCode(this AocViolationCode code) => code switch + { + AocViolationCode.ForbiddenField => "ERR_AOC_001", + AocViolationCode.MergeAttempt => "ERR_AOC_002", + AocViolationCode.IdempotencyViolation => "ERR_AOC_003", + AocViolationCode.MissingProvenance => "ERR_AOC_004", + AocViolationCode.SignatureInvalid => "ERR_AOC_005", + AocViolationCode.DerivedFindingDetected => "ERR_AOC_006", + AocViolationCode.UnknownField => "ERR_AOC_007", + AocViolationCode.MissingRequiredField => "ERR_AOC_004", + AocViolationCode.InvalidTenant => "ERR_AOC_004", + AocViolationCode.InvalidSignatureMetadata => "ERR_AOC_005", + _ => "ERR_AOC_000", + }; +} diff --git a/src/StellaOps.Aoc/AocWriteGuard.cs b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.cs similarity index 97% rename from src/StellaOps.Aoc/AocWriteGuard.cs rename to src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.cs index 861b34d5..1f62c509 100644 --- a/src/StellaOps.Aoc/AocWriteGuard.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/AocWriteGuard.cs @@ -1,127 +1,127 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Text.Json; - -namespace StellaOps.Aoc; - -public interface IAocGuard -{ - AocGuardResult Validate(JsonElement document, AocGuardOptions? options = null); -} - -public sealed class AocWriteGuard : IAocGuard -{ - public AocGuardResult Validate(JsonElement document, AocGuardOptions? options = null) - { - options ??= AocGuardOptions.Default; - var violations = ImmutableArray.CreateBuilder(); - var presentTopLevel = new HashSet(StringComparer.OrdinalIgnoreCase); - - foreach (var property in document.EnumerateObject()) - { - presentTopLevel.Add(property.Name); - - if (AocForbiddenKeys.IsForbiddenTopLevel(property.Name)) - { - violations.Add(AocViolation.Create(AocViolationCode.ForbiddenField, $"/{property.Name}", $"Field '{property.Name}' is forbidden in AOC documents.")); - continue; - } - - if (AocForbiddenKeys.IsDerivedField(property.Name)) - { - violations.Add(AocViolation.Create(AocViolationCode.DerivedFindingDetected, $"/{property.Name}", $"Derived field '{property.Name}' must not be written during ingestion.")); - } - } - - foreach (var required in options.RequiredTopLevelFields) - { - if (!document.TryGetProperty(required, out var element) || element.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, $"/{required}", $"Required field '{required}' is missing.")); - continue; - } - - if (options.RequireTenant && string.Equals(required, "tenant", StringComparison.OrdinalIgnoreCase)) - { - if (element.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(element.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidTenant, "/tenant", "Tenant must be a non-empty string.")); - } - } - } - - if (document.TryGetProperty("upstream", out var upstream) && upstream.ValueKind == JsonValueKind.Object) - { - if (!upstream.TryGetProperty("content_hash", out var contentHash) || contentHash.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(contentHash.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingProvenance, "/upstream/content_hash", "Upstream content hash is required.")); - } - - if (!upstream.TryGetProperty("signature", out var signature) || signature.ValueKind != JsonValueKind.Object) - { - if (options.RequireSignatureMetadata) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingProvenance, "/upstream/signature", "Signature metadata is required.")); - } - } - else if (options.RequireSignatureMetadata) - { - ValidateSignature(signature, violations); - } - } - else - { - violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, "/upstream", "Upstream metadata is required.")); - } - - if (document.TryGetProperty("content", out var content) && content.ValueKind == JsonValueKind.Object) - { - if (!content.TryGetProperty("raw", out var raw) || raw.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingProvenance, "/content/raw", "Raw upstream payload must be preserved.")); - } - } - else - { - violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, "/content", "Content metadata is required.")); - } - - if (!document.TryGetProperty("linkset", out var linkset) || linkset.ValueKind != JsonValueKind.Object) - { - violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, "/linkset", "Linkset metadata is required.")); - } - - return AocGuardResult.FromViolations(violations); - } - - private static void ValidateSignature(JsonElement signature, ImmutableArray.Builder violations) - { - if (!signature.TryGetProperty("present", out var presentElement) || presentElement.ValueKind is not (JsonValueKind.True or JsonValueKind.False)) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/present", "Signature metadata must include 'present' boolean.")); - return; - } - - var signaturePresent = presentElement.GetBoolean(); - - if (!signaturePresent) - { - return; - } - - if (!signature.TryGetProperty("format", out var formatElement) || formatElement.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(formatElement.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/format", "Signature format is required when signature is present.")); - } - - if (!signature.TryGetProperty("sig", out var sigElement) || sigElement.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(sigElement.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.SignatureInvalid, "/upstream/signature/sig", "Signature payload is required when signature is present.")); - } - - if (!signature.TryGetProperty("key_id", out var keyIdElement) || keyIdElement.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(keyIdElement.GetString())) - { - violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/key_id", "Signature key identifier is required when signature is present.")); - } - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Text.Json; + +namespace StellaOps.Aoc; + +public interface IAocGuard +{ + AocGuardResult Validate(JsonElement document, AocGuardOptions? options = null); +} + +public sealed class AocWriteGuard : IAocGuard +{ + public AocGuardResult Validate(JsonElement document, AocGuardOptions? options = null) + { + options ??= AocGuardOptions.Default; + var violations = ImmutableArray.CreateBuilder(); + var presentTopLevel = new HashSet(StringComparer.OrdinalIgnoreCase); + + foreach (var property in document.EnumerateObject()) + { + presentTopLevel.Add(property.Name); + + if (AocForbiddenKeys.IsForbiddenTopLevel(property.Name)) + { + violations.Add(AocViolation.Create(AocViolationCode.ForbiddenField, $"/{property.Name}", $"Field '{property.Name}' is forbidden in AOC documents.")); + continue; + } + + if (AocForbiddenKeys.IsDerivedField(property.Name)) + { + violations.Add(AocViolation.Create(AocViolationCode.DerivedFindingDetected, $"/{property.Name}", $"Derived field '{property.Name}' must not be written during ingestion.")); + } + } + + foreach (var required in options.RequiredTopLevelFields) + { + if (!document.TryGetProperty(required, out var element) || element.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) + { + violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, $"/{required}", $"Required field '{required}' is missing.")); + continue; + } + + if (options.RequireTenant && string.Equals(required, "tenant", StringComparison.OrdinalIgnoreCase)) + { + if (element.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(element.GetString())) + { + violations.Add(AocViolation.Create(AocViolationCode.InvalidTenant, "/tenant", "Tenant must be a non-empty string.")); + } + } + } + + if (document.TryGetProperty("upstream", out var upstream) && upstream.ValueKind == JsonValueKind.Object) + { + if (!upstream.TryGetProperty("content_hash", out var contentHash) || contentHash.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(contentHash.GetString())) + { + violations.Add(AocViolation.Create(AocViolationCode.MissingProvenance, "/upstream/content_hash", "Upstream content hash is required.")); + } + + if (!upstream.TryGetProperty("signature", out var signature) || signature.ValueKind != JsonValueKind.Object) + { + if (options.RequireSignatureMetadata) + { + violations.Add(AocViolation.Create(AocViolationCode.MissingProvenance, "/upstream/signature", "Signature metadata is required.")); + } + } + else if (options.RequireSignatureMetadata) + { + ValidateSignature(signature, violations); + } + } + else + { + violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, "/upstream", "Upstream metadata is required.")); + } + + if (document.TryGetProperty("content", out var content) && content.ValueKind == JsonValueKind.Object) + { + if (!content.TryGetProperty("raw", out var raw) || raw.ValueKind is JsonValueKind.Null or JsonValueKind.Undefined) + { + violations.Add(AocViolation.Create(AocViolationCode.MissingProvenance, "/content/raw", "Raw upstream payload must be preserved.")); + } + } + else + { + violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, "/content", "Content metadata is required.")); + } + + if (!document.TryGetProperty("linkset", out var linkset) || linkset.ValueKind != JsonValueKind.Object) + { + violations.Add(AocViolation.Create(AocViolationCode.MissingRequiredField, "/linkset", "Linkset metadata is required.")); + } + + return AocGuardResult.FromViolations(violations); + } + + private static void ValidateSignature(JsonElement signature, ImmutableArray.Builder violations) + { + if (!signature.TryGetProperty("present", out var presentElement) || presentElement.ValueKind is not (JsonValueKind.True or JsonValueKind.False)) + { + violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/present", "Signature metadata must include 'present' boolean.")); + return; + } + + var signaturePresent = presentElement.GetBoolean(); + + if (!signaturePresent) + { + return; + } + + if (!signature.TryGetProperty("format", out var formatElement) || formatElement.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(formatElement.GetString())) + { + violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/format", "Signature format is required when signature is present.")); + } + + if (!signature.TryGetProperty("sig", out var sigElement) || sigElement.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(sigElement.GetString())) + { + violations.Add(AocViolation.Create(AocViolationCode.SignatureInvalid, "/upstream/signature/sig", "Signature payload is required when signature is present.")); + } + + if (!signature.TryGetProperty("key_id", out var keyIdElement) || keyIdElement.ValueKind != JsonValueKind.String || string.IsNullOrWhiteSpace(keyIdElement.GetString())) + { + violations.Add(AocViolation.Create(AocViolationCode.InvalidSignatureMetadata, "/upstream/signature/key_id", "Signature key identifier is required when signature is present.")); + } + } +} diff --git a/src/StellaOps.Aoc/ServiceCollectionExtensions.cs b/src/Aoc/__Libraries/StellaOps.Aoc/ServiceCollectionExtensions.cs similarity index 96% rename from src/StellaOps.Aoc/ServiceCollectionExtensions.cs rename to src/Aoc/__Libraries/StellaOps.Aoc/ServiceCollectionExtensions.cs index ac9369d3..2108a18a 100644 --- a/src/StellaOps.Aoc/ServiceCollectionExtensions.cs +++ b/src/Aoc/__Libraries/StellaOps.Aoc/ServiceCollectionExtensions.cs @@ -1,17 +1,17 @@ -using Microsoft.Extensions.DependencyInjection; - -namespace StellaOps.Aoc; - -public static class ServiceCollectionExtensions -{ - public static IServiceCollection AddAocGuard(this IServiceCollection services) - { - if (services is null) - { - throw new ArgumentNullException(nameof(services)); - } - - services.AddSingleton(); - return services; - } -} +using Microsoft.Extensions.DependencyInjection; + +namespace StellaOps.Aoc; + +public static class ServiceCollectionExtensions +{ + public static IServiceCollection AddAocGuard(this IServiceCollection services) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + services.AddSingleton(); + return services; + } +} diff --git a/src/StellaOps.Aoc/StellaOps.Aoc.csproj b/src/Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj similarity index 97% rename from src/StellaOps.Aoc/StellaOps.Aoc.csproj rename to src/Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj index febd1315..67e27893 100644 --- a/src/StellaOps.Aoc/StellaOps.Aoc.csproj +++ b/src/Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj @@ -1,12 +1,12 @@ - - - net10.0 - preview - enable - enable - true - - - - - + + + net10.0 + preview + enable + enable + true + + + + + diff --git a/src/StellaOps.Aoc.Tests/AocWriteGuardTests.cs b/src/Aoc/__Tests/StellaOps.Aoc.Tests/AocWriteGuardTests.cs similarity index 96% rename from src/StellaOps.Aoc.Tests/AocWriteGuardTests.cs rename to src/Aoc/__Tests/StellaOps.Aoc.Tests/AocWriteGuardTests.cs index cf4632de..39a753d3 100644 --- a/src/StellaOps.Aoc.Tests/AocWriteGuardTests.cs +++ b/src/Aoc/__Tests/StellaOps.Aoc.Tests/AocWriteGuardTests.cs @@ -1,113 +1,113 @@ -using System.Text.Json; -using StellaOps.Aoc; - -namespace StellaOps.Aoc.Tests; - -public sealed class AocWriteGuardTests -{ - private static readonly AocWriteGuard Guard = new(); - - [Fact] - public void Validate_ReturnsSuccess_ForMinimalValidDocument() - { - using var document = JsonDocument.Parse(""" - { - "tenant": "default", - "source": {"vendor": "osv"}, - "upstream": { - "upstream_id": "GHSA-xxxx", - "content_hash": "sha256:abc", - "signature": { "present": false } - }, - "content": { - "format": "OSV", - "raw": {"id": "GHSA-xxxx"} - }, - "linkset": {} - } - """); - - var result = Guard.Validate(document.RootElement); - - Assert.True(result.IsValid); - Assert.Empty(result.Violations); - } - - [Fact] - public void Validate_FlagsMissingTenant() - { - using var document = JsonDocument.Parse(""" - { - "source": {"vendor": "osv"}, - "upstream": { - "upstream_id": "GHSA-xxxx", - "content_hash": "sha256:abc", - "signature": { "present": false } - }, - "content": { - "format": "OSV", - "raw": {"id": "GHSA-xxxx"} - }, - "linkset": {} - } - """); - - var result = Guard.Validate(document.RootElement); - - Assert.False(result.IsValid); - Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_004" && v.Path == "/tenant"); - } - - [Fact] - public void Validate_FlagsForbiddenField() - { - using var document = JsonDocument.Parse(""" - { - "tenant": "default", - "severity": "high", - "source": {"vendor": "osv"}, - "upstream": { - "upstream_id": "GHSA-xxxx", - "content_hash": "sha256:abc", - "signature": { "present": false } - }, - "content": { - "format": "OSV", - "raw": {"id": "GHSA-xxxx"} - }, - "linkset": {} - } - """); - - var result = Guard.Validate(document.RootElement); - - Assert.False(result.IsValid); - Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_001" && v.Path == "/severity"); - } - - [Fact] - public void Validate_FlagsInvalidSignatureMetadata() - { - using var document = JsonDocument.Parse(""" - { - "tenant": "default", - "source": {"vendor": "osv"}, - "upstream": { - "upstream_id": "GHSA-xxxx", - "content_hash": "sha256:abc", - "signature": { "present": true, "format": "dsse" } - }, - "content": { - "format": "OSV", - "raw": {"id": "GHSA-xxxx"} - }, - "linkset": {} - } - """); - - var result = Guard.Validate(document.RootElement); - - Assert.False(result.IsValid); - Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_005" && v.Path.Contains("/sig")); - } -} +using System.Text.Json; +using StellaOps.Aoc; + +namespace StellaOps.Aoc.Tests; + +public sealed class AocWriteGuardTests +{ + private static readonly AocWriteGuard Guard = new(); + + [Fact] + public void Validate_ReturnsSuccess_ForMinimalValidDocument() + { + using var document = JsonDocument.Parse(""" + { + "tenant": "default", + "source": {"vendor": "osv"}, + "upstream": { + "upstream_id": "GHSA-xxxx", + "content_hash": "sha256:abc", + "signature": { "present": false } + }, + "content": { + "format": "OSV", + "raw": {"id": "GHSA-xxxx"} + }, + "linkset": {} + } + """); + + var result = Guard.Validate(document.RootElement); + + Assert.True(result.IsValid); + Assert.Empty(result.Violations); + } + + [Fact] + public void Validate_FlagsMissingTenant() + { + using var document = JsonDocument.Parse(""" + { + "source": {"vendor": "osv"}, + "upstream": { + "upstream_id": "GHSA-xxxx", + "content_hash": "sha256:abc", + "signature": { "present": false } + }, + "content": { + "format": "OSV", + "raw": {"id": "GHSA-xxxx"} + }, + "linkset": {} + } + """); + + var result = Guard.Validate(document.RootElement); + + Assert.False(result.IsValid); + Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_004" && v.Path == "/tenant"); + } + + [Fact] + public void Validate_FlagsForbiddenField() + { + using var document = JsonDocument.Parse(""" + { + "tenant": "default", + "severity": "high", + "source": {"vendor": "osv"}, + "upstream": { + "upstream_id": "GHSA-xxxx", + "content_hash": "sha256:abc", + "signature": { "present": false } + }, + "content": { + "format": "OSV", + "raw": {"id": "GHSA-xxxx"} + }, + "linkset": {} + } + """); + + var result = Guard.Validate(document.RootElement); + + Assert.False(result.IsValid); + Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_001" && v.Path == "/severity"); + } + + [Fact] + public void Validate_FlagsInvalidSignatureMetadata() + { + using var document = JsonDocument.Parse(""" + { + "tenant": "default", + "source": {"vendor": "osv"}, + "upstream": { + "upstream_id": "GHSA-xxxx", + "content_hash": "sha256:abc", + "signature": { "present": true, "format": "dsse" } + }, + "content": { + "format": "OSV", + "raw": {"id": "GHSA-xxxx"} + }, + "linkset": {} + } + """); + + var result = Guard.Validate(document.RootElement); + + Assert.False(result.IsValid); + Assert.Contains(result.Violations, v => v.ErrorCode == "ERR_AOC_005" && v.Path.Contains("/sig")); + } +} diff --git a/src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj b/src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj new file mode 100644 index 00000000..cdc9945d --- /dev/null +++ b/src/Aoc/__Tests/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj @@ -0,0 +1,42 @@ + + + + + net10.0 + preview + enable + enable + true + Exe + false + false + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/StellaOps.Aoc.Tests/UnitTest1.cs b/src/Aoc/__Tests/StellaOps.Aoc.Tests/UnitTest1.cs similarity index 91% rename from src/StellaOps.Aoc.Tests/UnitTest1.cs rename to src/Aoc/__Tests/StellaOps.Aoc.Tests/UnitTest1.cs index 515425ec..c2ca5e14 100644 --- a/src/StellaOps.Aoc.Tests/UnitTest1.cs +++ b/src/Aoc/__Tests/StellaOps.Aoc.Tests/UnitTest1.cs @@ -1,10 +1,10 @@ -namespace StellaOps.Aoc.Tests; - -public class UnitTest1 -{ - [Fact] - public void Test1() - { - - } -} +namespace StellaOps.Aoc.Tests; + +public class UnitTest1 +{ + [Fact] + public void Test1() + { + + } +} diff --git a/src/StellaOps.Cli.Tests/xunit.runner.json b/src/Aoc/__Tests/StellaOps.Aoc.Tests/xunit.runner.json similarity index 100% rename from src/StellaOps.Cli.Tests/xunit.runner.json rename to src/Aoc/__Tests/StellaOps.Aoc.Tests/xunit.runner.json diff --git a/src/StellaOps.Api.Governance/AGENTS.md b/src/Api/StellaOps.Api.Governance/AGENTS.md similarity index 97% rename from src/StellaOps.Api.Governance/AGENTS.md rename to src/Api/StellaOps.Api.Governance/AGENTS.md index 0a26f9a4..6b20f66d 100644 --- a/src/StellaOps.Api.Governance/AGENTS.md +++ b/src/Api/StellaOps.Api.Governance/AGENTS.md @@ -1,15 +1,15 @@ -# API Governance Guild Charter - -## Mission -Enforce API contract quality through linting, compatibility checks, version policy automation, and changelog generation. - -## Scope -- Maintain lint rule set, compatibility diff tooling, and CI integration. -- Gate PRs on contract validation, example coverage, and naming conventions. -- Produce automated changelogs and deprecation notices from OAS diffs. -- Coordinate with Notifications Studio for deprecation broadcasts. - -## Definition of Done -- CI gate prevents merging incompatible or non-conforming specs. -- Version bump tooling produces signed changelog artifacts per release. -- Governance documentation kept current in `/docs/contributing/api-contracts.md`. +# API Governance Guild Charter + +## Mission +Enforce API contract quality through linting, compatibility checks, version policy automation, and changelog generation. + +## Scope +- Maintain lint rule set, compatibility diff tooling, and CI integration. +- Gate PRs on contract validation, example coverage, and naming conventions. +- Produce automated changelogs and deprecation notices from OAS diffs. +- Coordinate with Notifications Studio for deprecation broadcasts. + +## Definition of Done +- CI gate prevents merging incompatible or non-conforming specs. +- Version bump tooling produces signed changelog artifacts per release. +- Governance documentation kept current in `/docs/contributing/api-contracts.md`. diff --git a/src/StellaOps.Api.Governance/TASKS.md b/src/Api/StellaOps.Api.Governance/TASKS.md similarity index 92% rename from src/StellaOps.Api.Governance/TASKS.md rename to src/Api/StellaOps.Api.Governance/TASKS.md index 2d1c40d9..20875746 100644 --- a/src/StellaOps.Api.Governance/TASKS.md +++ b/src/Api/StellaOps.Api.Governance/TASKS.md @@ -10,7 +10,7 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| | APIGOV-62-001 | TODO | API Governance Guild | APIGOV-61-001 | Build compatibility diff tool producing additive/breaking reports comparing prior release. | Diff output consumed in CI; failing on breaking changes unless override provided. | -| APIGOV-62-002 | TODO | API Governance Guild, DevOps Guild | APIGOV-62-001 | Automate changelog generation and publish signed artifacts to `src/StellaOps.Sdk.Release` pipeline. | Changelog pipeline produces markdown + JSON; signatures verified; docs updated. | +| APIGOV-62-002 | TODO | API Governance Guild, DevOps Guild | APIGOV-62-001 | Automate changelog generation and publish signed artifacts to `src/Sdk/StellaOps.Sdk.Release` pipeline. | Changelog pipeline produces markdown + JSON; signatures verified; docs updated. | ## Sprint 63 – Deprecation & Notifications | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | diff --git a/src/StellaOps.Api.OpenApi/AGENTS.md b/src/Api/StellaOps.Api.OpenApi/AGENTS.md similarity index 78% rename from src/StellaOps.Api.OpenApi/AGENTS.md rename to src/Api/StellaOps.Api.OpenApi/AGENTS.md index 42f57474..5e696b14 100644 --- a/src/StellaOps.Api.OpenApi/AGENTS.md +++ b/src/Api/StellaOps.Api.OpenApi/AGENTS.md @@ -4,8 +4,8 @@ Maintain OpenAPI 3.1 specifications for every StellaOps service, compose the aggregate spec, and ensure API contract consistency across releases. ## Scope -- Author and review per-service OAS documents in `src/StellaOps.Api.OpenApi//openapi.yaml`. -- Operate the aggregate composer producing `src/StellaOps.Api.OpenApi/stella.yaml`. +- Author and review per-service OAS documents in `src/Api/StellaOps.Api.OpenApi//openapi.yaml`. +- Operate the aggregate composer producing `src/Api/StellaOps.Api.OpenApi/stella.yaml`. - Provide shared components, schema libraries, and example catalogs. - Coordinate with service guilds on contract changes, examples, and versioning. - Own CI validation, linting, and compatibility diff tooling for OAS artifacts. diff --git a/src/StellaOps.Api.OpenApi/TASKS.md b/src/Api/StellaOps.Api.OpenApi/TASKS.md similarity index 99% rename from src/StellaOps.Api.OpenApi/TASKS.md rename to src/Api/StellaOps.Api.OpenApi/TASKS.md index 34207fdc..8dd29168 100644 --- a/src/StellaOps.Api.OpenApi/TASKS.md +++ b/src/Api/StellaOps.Api.OpenApi/TASKS.md @@ -1,19 +1,19 @@ -# API OpenAPI Task Board — Epic 17: SDKs & OpenAPI Docs - -## Sprint 61 – Spec Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| OAS-61-001 | TODO | API Contracts Guild | — | Scaffold per-service OpenAPI 3.1 files with shared components, info blocks, and initial path stubs. | All services have baseline `openapi.yaml`; shared components library established; lint passes. | -| OAS-61-002 | TODO | API Contracts Guild, DevOps Guild | OAS-61-001 | Implement aggregate composer (`stella.yaml`) resolving `$ref`s and merging shared components; wire into CI. | Aggregate spec builds deterministically; CI artifact published; documentation updated. | - -## Sprint 62 – Examples & Error Envelope -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| OAS-62-001 | TODO | API Contracts Guild, Service Guilds | OAS-61-001 | Populate request/response examples for top 50 endpoints, including standard error envelope. | Examples validated via CI; error envelope consistent across services. | -| OAS-62-002 | TODO | API Contracts Guild | OAS-61-002 | Add custom lint rules enforcing pagination, idempotency headers, naming conventions, and example coverage. | Lint job fails on violations; documentation for rules published. | - -## Sprint 63 – Compatibility & Discovery -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| OAS-63-001 | TODO | API Contracts Guild | OAS-61-002 | Implement compatibility diff tooling comparing previous release specs; classify breaking vs additive changes. | Diff tool integrated in CI; PRs flagged on breaking changes. | -| OAS-63-002 | TODO | API Contracts Guild, Gateway Guild | OAS-62-002 | Add `/.well-known/openapi` discovery endpoint schema metadata (extensions, version info). | Discovery endpoints defined in spec; linked to implementation tasks. | +# API OpenAPI Task Board — Epic 17: SDKs & OpenAPI Docs + +## Sprint 61 – Spec Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| OAS-61-001 | TODO | API Contracts Guild | — | Scaffold per-service OpenAPI 3.1 files with shared components, info blocks, and initial path stubs. | All services have baseline `openapi.yaml`; shared components library established; lint passes. | +| OAS-61-002 | TODO | API Contracts Guild, DevOps Guild | OAS-61-001 | Implement aggregate composer (`stella.yaml`) resolving `$ref`s and merging shared components; wire into CI. | Aggregate spec builds deterministically; CI artifact published; documentation updated. | + +## Sprint 62 – Examples & Error Envelope +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| OAS-62-001 | TODO | API Contracts Guild, Service Guilds | OAS-61-001 | Populate request/response examples for top 50 endpoints, including standard error envelope. | Examples validated via CI; error envelope consistent across services. | +| OAS-62-002 | TODO | API Contracts Guild | OAS-61-002 | Add custom lint rules enforcing pagination, idempotency headers, naming conventions, and example coverage. | Lint job fails on violations; documentation for rules published. | + +## Sprint 63 – Compatibility & Discovery +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| OAS-63-001 | TODO | API Contracts Guild | OAS-61-002 | Implement compatibility diff tooling comparing previous release specs; classify breaking vs additive changes. | Diff tool integrated in CI; PRs flagged on breaking changes. | +| OAS-63-002 | TODO | API Contracts Guild, Gateway Guild | OAS-62-002 | Add `/.well-known/openapi` discovery endpoint schema metadata (extensions, version info). | Discovery endpoints defined in spec; linked to implementation tasks. | diff --git a/src/StellaOps.Api.OpenApi/authority/openapi.yaml b/src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml similarity index 97% rename from src/StellaOps.Api.OpenApi/authority/openapi.yaml rename to src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml index 6557f13b..cd1aa678 100644 --- a/src/StellaOps.Api.OpenApi/authority/openapi.yaml +++ b/src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml @@ -1,689 +1,689 @@ -openapi: 3.1.0 -info: - title: StellaOps Authority Authentication API - summary: Token issuance, introspection, revocation, and key discovery endpoints exposed by the Authority service. - description: | - The Authority service issues OAuth 2.1 access tokens for StellaOps components, enforcing tenant and scope - restrictions configured per client. This specification describes the authentication surface only; domain APIs - are documented by their owning services. - version: 0.1.0 -jsonSchemaDialect: https://json-schema.org/draft/2020-12/schema -servers: - - url: https://authority.stellaops.local - description: Example Authority deployment -tags: - - name: Authentication - description: OAuth 2.1 token exchange, introspection, and revocation flows. - - name: Keys - description: JSON Web Key Set discovery. -components: - securitySchemes: - ClientSecretBasic: - type: http - scheme: basic - description: HTTP Basic authentication with `client_id` and `client_secret`. - OAuthPassword: - type: oauth2 - description: Resource owner password exchange for Authority-managed identities. - flows: - password: - tokenUrl: /token - refreshUrl: /token - scopes: - advisory:ingest: Submit advisory ingestion payloads. - advisory:read: Read advisory ingestion data. - aoc:verify: Execute Aggregation-Only Contract verification workflows. - authority.audit.read: Read Authority audit logs. - authority.clients.manage: Manage Authority client registrations. - authority.users.manage: Manage Authority users. - authority:tenants.read: Read the Authority tenant catalog. - concelier.jobs.trigger: Trigger Concelier aggregation jobs. - concelier.merge: Manage Concelier merge operations. - effective:write: Write effective findings (Policy Engine service identity only). - email: Access email claim data. - exceptions:approve: Approve exception workflows. - findings:read: Read effective findings emitted by Policy Engine. - graph:export: Export graph artefacts. - graph:read: Read graph explorer data. - graph:simulate: Run graph what-if simulations. - graph:write: Enqueue or mutate graph build jobs. - offline_access: Request refresh tokens for offline access. - openid: Request OpenID Connect identity tokens. - orch:operate: Execute privileged Orchestrator control actions. - orch:read: Read Orchestrator job state. - policy:author: Author Policy Studio drafts and workspaces. - policy:activate: Activate policy revisions. - policy:approve: Approve or reject policy drafts. - policy:audit: Inspect Policy Studio audit history. - policy:edit: Edit policy definitions. - policy:operate: Operate Policy Studio promotions and runs. - policy:read: Read policy definitions and metadata. - policy:run: Trigger policy executions. - policy:submit: Submit policy drafts for review. - policy:review: Review Policy Studio drafts and leave feedback. - policy:simulate: Execute Policy Studio simulations. - policy:write: Create or update policy drafts. - profile: Access profile claim data. - signals:admin: Administer Signals ingestion and routing settings. - signals:read: Read Signals events and state. - signals:write: Publish Signals events or mutate state. - stellaops.bypass: Bypass trust boundary protections (restricted identities only). - ui.read: Read Console UX resources. - vex:ingest: Submit VEX ingestion payloads. - vex:read: Read VEX ingestion data. - vuln:read: Read vulnerability permalinks and overlays. - authorizationCode: - authorizationUrl: /authorize - tokenUrl: /token - refreshUrl: /token - scopes: - advisory:ingest: Submit advisory ingestion payloads. - advisory:read: Read advisory ingestion data. - aoc:verify: Execute Aggregation-Only Contract verification workflows. - authority.audit.read: Read Authority audit logs. - authority.clients.manage: Manage Authority client registrations. - authority.users.manage: Manage Authority users. - authority:tenants.read: Read the Authority tenant catalog. - concelier.jobs.trigger: Trigger Concelier aggregation jobs. - concelier.merge: Manage Concelier merge operations. - effective:write: Write effective findings (Policy Engine service identity only). - email: Access email claim data. - exceptions:approve: Approve exception workflows. - findings:read: Read effective findings emitted by Policy Engine. - graph:export: Export graph artefacts. - graph:read: Read graph explorer data. - graph:simulate: Run graph what-if simulations. - graph:write: Enqueue or mutate graph build jobs. - offline_access: Request refresh tokens for offline access. - openid: Request OpenID Connect identity tokens. - orch:operate: Execute privileged Orchestrator control actions. - orch:read: Read Orchestrator job state. - policy:author: Author Policy Studio drafts and workspaces. - policy:activate: Activate policy revisions. - policy:approve: Approve or reject policy drafts. - policy:audit: Inspect Policy Studio audit history. - policy:edit: Edit policy definitions. - policy:operate: Operate Policy Studio promotions and runs. - policy:read: Read policy definitions and metadata. - policy:run: Trigger policy executions. - policy:submit: Submit policy drafts for review. - policy:review: Review Policy Studio drafts and leave feedback. - policy:simulate: Execute Policy Studio simulations. - policy:write: Create or update policy drafts. - profile: Access profile claim data. - signals:admin: Administer Signals ingestion and routing settings. - signals:read: Read Signals events and state. - signals:write: Publish Signals events or mutate state. - stellaops.bypass: Bypass trust boundary protections (restricted identities only). - ui.read: Read Console UX resources. - vex:ingest: Submit VEX ingestion payloads. - vex:read: Read VEX ingestion data. - vuln:read: Read vulnerability permalinks and overlays. - OAuthClientCredentials: - type: oauth2 - description: Client credential exchange for machine-to-machine identities. - flows: - clientCredentials: - tokenUrl: /token - scopes: - advisory:ingest: Submit advisory ingestion payloads. - advisory:read: Read advisory ingestion data. - aoc:verify: Execute Aggregation-Only Contract verification workflows. - authority.audit.read: Read Authority audit logs. - authority.clients.manage: Manage Authority client registrations. - authority.users.manage: Manage Authority users. - authority:tenants.read: Read the Authority tenant catalog. - concelier.jobs.trigger: Trigger Concelier aggregation jobs. - concelier.merge: Manage Concelier merge operations. - effective:write: Write effective findings (Policy Engine service identity only). - email: Access email claim data. - exceptions:approve: Approve exception workflows. - findings:read: Read effective findings emitted by Policy Engine. - graph:export: Export graph artefacts. - graph:read: Read graph explorer data. - graph:simulate: Run graph what-if simulations. - graph:write: Enqueue or mutate graph build jobs. - offline_access: Request refresh tokens for offline access. - openid: Request OpenID Connect identity tokens. - orch:operate: Execute privileged Orchestrator control actions. - orch:read: Read Orchestrator job state. - policy:author: Author Policy Studio drafts and workspaces. - policy:activate: Activate policy revisions. - policy:approve: Approve or reject policy drafts. - policy:audit: Inspect Policy Studio audit history. - policy:edit: Edit policy definitions. - policy:operate: Operate Policy Studio promotions and runs. - policy:read: Read policy definitions and metadata. - policy:run: Trigger policy executions. - policy:submit: Submit policy drafts for review. - policy:review: Review Policy Studio drafts and leave feedback. - policy:simulate: Execute Policy Studio simulations. - policy:write: Create or update policy drafts. - profile: Access profile claim data. - signals:admin: Administer Signals ingestion and routing settings. - signals:read: Read Signals events and state. - signals:write: Publish Signals events or mutate state. - stellaops.bypass: Bypass trust boundary protections (restricted identities only). - ui.read: Read Console UX resources. - vex:ingest: Submit VEX ingestion payloads. - vex:read: Read VEX ingestion data. - vuln:read: Read vulnerability permalinks and overlays. - schemas: - TokenResponse: - type: object - description: OAuth 2.1 bearer token response. - properties: - access_token: - type: string - description: Access token encoded as JWT. - token_type: - type: string - description: Token type indicator. Always `Bearer`. - expires_in: - type: integer - description: Lifetime of the access token, in seconds. - minimum: 1 - refresh_token: - type: string - description: Refresh token issued when the grant allows offline access. - scope: - type: string - description: Space-delimited scopes granted in the response. - id_token: - type: string - description: ID token issued for authorization-code flows. - required: - - access_token - - token_type - - expires_in - OAuthErrorResponse: - type: object - description: RFC 6749 compliant error envelope. - properties: - error: - type: string - description: Machine-readable error code. - error_description: - type: string - description: Human-readable error description. - error_uri: - type: string - format: uri - description: Link to documentation about the error. - required: - - error - PasswordGrantRequest: - type: object - required: - - grant_type - - client_id - - username - - password - properties: - grant_type: - type: string - const: password - client_id: - type: string - description: Registered client identifier. May also be supplied via HTTP Basic auth. - client_secret: - type: string - description: Client secret. Required for confidential clients when not using HTTP Basic auth. - scope: - type: string - description: Space-delimited scopes being requested. - username: - type: string - description: Resource owner username. - password: - type: string - description: Resource owner password. - authority_provider: - type: string - description: Optional identity provider hint. Required when multiple password-capable providers are registered. - description: Form-encoded payload for password grant exchange. - ClientCredentialsGrantRequest: - type: object - required: - - grant_type - - client_id - properties: - grant_type: - type: string - const: client_credentials - client_id: - type: string - description: Registered client identifier. May also be supplied via HTTP Basic auth. - client_secret: - type: string - description: Client secret. Required for confidential clients when not using HTTP Basic auth. - scope: - type: string - description: Space-delimited scopes being requested. - authority_provider: - type: string - description: Optional identity provider hint for plugin-backed clients. - operator_reason: - type: string - description: Required when requesting `orch:operate`; explains the operator action. - maxLength: 256 - operator_ticket: - type: string - description: Required when requesting `orch:operate`; tracks the external change ticket or incident. - maxLength: 128 - description: Form-encoded payload for client credentials exchange. - RefreshTokenGrantRequest: - type: object - required: - - grant_type - - refresh_token - properties: - grant_type: - type: string - const: refresh_token - client_id: - type: string - description: Registered client identifier. May also be supplied via HTTP Basic auth. - client_secret: - type: string - description: Client secret. Required for confidential clients when not using HTTP Basic auth. - refresh_token: - type: string - description: Previously issued refresh token. - scope: - type: string - description: Optional scope list to narrow the requested access. - description: Form-encoded payload for refresh token exchange. - RevocationRequest: - type: object - required: - - token - properties: - token: - type: string - description: Token value or token identifier to revoke. - token_type_hint: - type: string - description: Optional token type hint (`access_token` or `refresh_token`). - description: Form-encoded payload for token revocation. - IntrospectionRequest: - type: object - required: - - token - properties: - token: - type: string - description: Token value whose state should be introspected. - token_type_hint: - type: string - description: Optional token type hint (`access_token` or `refresh_token`). - description: Form-encoded payload for token introspection. - IntrospectionResponse: - type: object - description: Active token descriptor compliant with RFC 7662. - properties: - active: - type: boolean - description: Indicates whether the token is currently active. - scope: - type: string - description: Space-delimited list of scopes granted to the token. - client_id: - type: string - description: Client identifier associated with the token. - sub: - type: string - description: Subject identifier when the token represents an end-user. - username: - type: string - description: Preferred username associated with the subject. - token_type: - type: string - description: Type of the token (e.g., `Bearer`). - exp: - type: integer - description: Expiration timestamp (seconds since UNIX epoch). - iat: - type: integer - description: Issued-at timestamp (seconds since UNIX epoch). - nbf: - type: integer - description: Not-before timestamp (seconds since UNIX epoch). - aud: - type: array - description: Audience values associated with the token. - items: - type: string - iss: - type: string - description: Issuer identifier. - jti: - type: string - description: JWT identifier corresponding to the token. - tenant: - type: string - description: Tenant associated with the token, when assigned. - confirmation: - type: object - description: Sender-constrained confirmation data (e.g., mTLS thumbprint, DPoP JWK thumbprint). - required: - - active - JwksDocument: - type: object - description: JSON Web Key Set published by the Authority. - properties: - keys: - type: array - items: - $ref: '#/components/schemas/Jwk' - required: - - keys - Jwk: - type: object - description: Public key material for token signature validation. - properties: - kid: - type: string - description: Key identifier. - kty: - type: string - description: Key type (e.g., `EC`, `RSA`). - use: - type: string - description: Intended key use (`sig`). - alg: - type: string - description: Signing algorithm (e.g., `ES384`). - crv: - type: string - description: Elliptic curve identifier when applicable. - x: - type: string - description: X coordinate for EC keys. - y: - type: string - description: Y coordinate for EC keys. - status: - type: string - description: Operational status metadata for the key (e.g., `active`, `retiring`). -paths: - /token: - post: - tags: - - Authentication - summary: Exchange credentials for tokens - description: | - Issues OAuth 2.1 bearer tokens for StellaOps clients. Supports password, client credentials, - authorization-code, device, and refresh token grants. Confidential clients must authenticate using - HTTP Basic auth or `client_secret` form fields. - security: - - ClientSecretBasic: [] - - {} - requestBody: - required: true - content: - application/x-www-form-urlencoded: - schema: - oneOf: - - $ref: '#/components/schemas/PasswordGrantRequest' - - $ref: '#/components/schemas/ClientCredentialsGrantRequest' - - $ref: '#/components/schemas/RefreshTokenGrantRequest' - encoding: - authority_provider: - style: form - explode: false - examples: - passwordGrant: - summary: Password grant for tenant-scoped ingestion bot - value: - grant_type: password - client_id: ingest-cli - client_secret: s3cr3t - username: ingest-bot - password: pa55w0rd! - scope: advisory:ingest vex:ingest - authority_provider: primary-directory - authorizationCode: - summary: Authorization code exchange for Console UI session - value: - grant_type: authorization_code - client_id: console-ui - code: 2Lba1WtwPLfZ2b0Z9uPrsQ - redirect_uri: https://console.stellaops.local/auth/callback - code_verifier: g3ZnL91QJ6i4zO_86oI4CDnZ7gS0bSeK - clientCredentials: - summary: Client credentials exchange for Policy Engine - value: - grant_type: client_credentials - client_id: policy-engine - client_secret: 9c39f602-2f2b-4f29 - scope: effective:write findings:read - operator_reason: Deploying policy change 1234 - operator_ticket: CHG-004211 - refreshToken: - summary: Refresh token rotation for console session - value: - grant_type: refresh_token - client_id: console-ui - refresh_token: 0.rg9pVlsGzXE8Q - responses: - '200': - description: Token exchange succeeded. - content: - application/json: - schema: - $ref: '#/components/schemas/TokenResponse' - examples: - passwordGrant: - summary: Password grant success response - value: - access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... - token_type: Bearer - expires_in: 3600 - refresh_token: OxGdVtZJ-mk49cFd38uRUw - scope: advisory:ingest vex:ingest - clientCredentials: - summary: Client credentials success response - value: - access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... - token_type: Bearer - expires_in: 900 - scope: effective:write findings:read - authorizationCode: - summary: Authorization code success response - value: - access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... - token_type: Bearer - expires_in: 900 - refresh_token: VxKpc9Vj9QjYV6gLrhQHTw - scope: ui.read authority:tenants.read - id_token: eyJhbGciOiJFUzM4NCIsImtpZCI6ImNvbnNvbGUifQ... - '400': - description: Malformed request, unsupported grant type, or invalid credentials. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - invalidProvider: - summary: Unknown identity provider hint - value: - error: invalid_request - error_description: "Unknown identity provider 'legacy-directory'." - invalidScope: - summary: Scope not permitted for client - value: - error: invalid_scope - error_description: Scope 'effective:write' is not permitted for this client. - '401': - description: Client authentication failed. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - badClientSecret: - summary: Invalid client secret - value: - error: invalid_client - error_description: Client authentication failed. - /revoke: - post: - tags: - - Authentication - summary: Revoke an access or refresh token - security: - - ClientSecretBasic: [] - requestBody: - required: true - content: - application/x-www-form-urlencoded: - schema: - $ref: '#/components/schemas/RevocationRequest' - examples: - revokeRefreshToken: - summary: Revoke refresh token after logout - value: - token: 0.rg9pVlsGzXE8Q - token_type_hint: refresh_token - responses: - '200': - description: Token revoked or already invalid. The response body is intentionally blank. - '400': - description: Malformed request. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - missingToken: - summary: Token parameter omitted - value: - error: invalid_request - error_description: The revocation request is missing the token parameter. - '401': - description: Client authentication failed. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - badClientSecret: - summary: Invalid client credentials - value: - error: invalid_client - error_description: Client authentication failed. - /introspect: - post: - tags: - - Authentication - summary: Introspect token state - description: Returns the active status and claims for a given token. Requires a privileged client. - security: - - ClientSecretBasic: [] - requestBody: - required: true - content: - application/x-www-form-urlencoded: - schema: - $ref: '#/components/schemas/IntrospectionRequest' - examples: - introspectToken: - summary: Validate an access token issued to Orchestrator - value: - token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... - token_type_hint: access_token - responses: - '200': - description: Token state evaluated. - content: - application/json: - schema: - $ref: '#/components/schemas/IntrospectionResponse' - examples: - activeToken: - summary: Active token response - value: - active: true - scope: orch:operate orch:read - client_id: orch-control - sub: operator-7f12 - username: ops.engineer@tenant.example - token_type: Bearer - exp: 1761628800 - iat: 1761625200 - nbf: 1761625200 - iss: https://authority.stellaops.local - aud: - - https://orch.stellaops.local - jti: 01J8KYRAMG7FWBPRRV5XG20T7S - tenant: tenant-alpha - confirmation: - mtls_thumbprint: 079871b8c9a0f2e6 - inactiveToken: - summary: Revoked token response - value: - active: false - '400': - description: Malformed request. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - missingToken: - summary: Token missing - value: - error: invalid_request - error_description: token parameter is required. - '401': - description: Client authentication failed or client lacks introspection permission. - content: - application/json: - schema: - $ref: '#/components/schemas/OAuthErrorResponse' - examples: - unauthorizedClient: - summary: Client not allowed to introspect tokens - value: - error: invalid_client - error_description: Client authentication failed. - /jwks: - get: - tags: - - Keys - summary: Retrieve signing keys - description: Returns the JSON Web Key Set used to validate Authority-issued tokens. - responses: - '200': - description: JWKS document. - headers: - Cache-Control: - schema: - type: string - description: Standard caching headers apply; keys rotate infrequently. - content: - application/json: - schema: - $ref: '#/components/schemas/JwksDocument' - examples: - ecKeySet: - summary: EC signing keys - value: - keys: - - kid: auth-tokens-es384-202510 - kty: EC - use: sig - alg: ES384 - crv: P-384 - x: 7UchU5R77LtChrJx6uWg9mYjFvV6RIpSgZPDIj7d1q0 - y: v98nHe8a7mGZ9Fn1t4Jp9PTJv1ma35QPmhUrE4pH7H0 - status: active - - kid: auth-tokens-es384-202409 - kty: EC - use: sig - alg: ES384 - crv: P-384 - x: hjdKc0r8jvVHJ7S9mP0y0mU9bqN7v5PxS21SwclTzfc - y: yk6J3pz4TUpymN4mG-6th3dYvJ5N1lQvDK0PLuFv3Pg - status: retiring +openapi: 3.1.0 +info: + title: StellaOps Authority Authentication API + summary: Token issuance, introspection, revocation, and key discovery endpoints exposed by the Authority service. + description: | + The Authority service issues OAuth 2.1 access tokens for StellaOps components, enforcing tenant and scope + restrictions configured per client. This specification describes the authentication surface only; domain APIs + are documented by their owning services. + version: 0.1.0 +jsonSchemaDialect: https://json-schema.org/draft/2020-12/schema +servers: + - url: https://authority.stellaops.local + description: Example Authority deployment +tags: + - name: Authentication + description: OAuth 2.1 token exchange, introspection, and revocation flows. + - name: Keys + description: JSON Web Key Set discovery. +components: + securitySchemes: + ClientSecretBasic: + type: http + scheme: basic + description: HTTP Basic authentication with `client_id` and `client_secret`. + OAuthPassword: + type: oauth2 + description: Resource owner password exchange for Authority-managed identities. + flows: + password: + tokenUrl: /token + refreshUrl: /token + scopes: + advisory:ingest: Submit advisory ingestion payloads. + advisory:read: Read advisory ingestion data. + aoc:verify: Execute Aggregation-Only Contract verification workflows. + authority.audit.read: Read Authority audit logs. + authority.clients.manage: Manage Authority client registrations. + authority.users.manage: Manage Authority users. + authority:tenants.read: Read the Authority tenant catalog. + concelier.jobs.trigger: Trigger Concelier aggregation jobs. + concelier.merge: Manage Concelier merge operations. + effective:write: Write effective findings (Policy Engine service identity only). + email: Access email claim data. + exceptions:approve: Approve exception workflows. + findings:read: Read effective findings emitted by Policy Engine. + graph:export: Export graph artefacts. + graph:read: Read graph explorer data. + graph:simulate: Run graph what-if simulations. + graph:write: Enqueue or mutate graph build jobs. + offline_access: Request refresh tokens for offline access. + openid: Request OpenID Connect identity tokens. + orch:operate: Execute privileged Orchestrator control actions. + orch:read: Read Orchestrator job state. + policy:author: Author Policy Studio drafts and workspaces. + policy:activate: Activate policy revisions. + policy:approve: Approve or reject policy drafts. + policy:audit: Inspect Policy Studio audit history. + policy:edit: Edit policy definitions. + policy:operate: Operate Policy Studio promotions and runs. + policy:read: Read policy definitions and metadata. + policy:run: Trigger policy executions. + policy:submit: Submit policy drafts for review. + policy:review: Review Policy Studio drafts and leave feedback. + policy:simulate: Execute Policy Studio simulations. + policy:write: Create or update policy drafts. + profile: Access profile claim data. + signals:admin: Administer Signals ingestion and routing settings. + signals:read: Read Signals events and state. + signals:write: Publish Signals events or mutate state. + stellaops.bypass: Bypass trust boundary protections (restricted identities only). + ui.read: Read Console UX resources. + vex:ingest: Submit VEX ingestion payloads. + vex:read: Read VEX ingestion data. + vuln:read: Read vulnerability permalinks and overlays. + authorizationCode: + authorizationUrl: /authorize + tokenUrl: /token + refreshUrl: /token + scopes: + advisory:ingest: Submit advisory ingestion payloads. + advisory:read: Read advisory ingestion data. + aoc:verify: Execute Aggregation-Only Contract verification workflows. + authority.audit.read: Read Authority audit logs. + authority.clients.manage: Manage Authority client registrations. + authority.users.manage: Manage Authority users. + authority:tenants.read: Read the Authority tenant catalog. + concelier.jobs.trigger: Trigger Concelier aggregation jobs. + concelier.merge: Manage Concelier merge operations. + effective:write: Write effective findings (Policy Engine service identity only). + email: Access email claim data. + exceptions:approve: Approve exception workflows. + findings:read: Read effective findings emitted by Policy Engine. + graph:export: Export graph artefacts. + graph:read: Read graph explorer data. + graph:simulate: Run graph what-if simulations. + graph:write: Enqueue or mutate graph build jobs. + offline_access: Request refresh tokens for offline access. + openid: Request OpenID Connect identity tokens. + orch:operate: Execute privileged Orchestrator control actions. + orch:read: Read Orchestrator job state. + policy:author: Author Policy Studio drafts and workspaces. + policy:activate: Activate policy revisions. + policy:approve: Approve or reject policy drafts. + policy:audit: Inspect Policy Studio audit history. + policy:edit: Edit policy definitions. + policy:operate: Operate Policy Studio promotions and runs. + policy:read: Read policy definitions and metadata. + policy:run: Trigger policy executions. + policy:submit: Submit policy drafts for review. + policy:review: Review Policy Studio drafts and leave feedback. + policy:simulate: Execute Policy Studio simulations. + policy:write: Create or update policy drafts. + profile: Access profile claim data. + signals:admin: Administer Signals ingestion and routing settings. + signals:read: Read Signals events and state. + signals:write: Publish Signals events or mutate state. + stellaops.bypass: Bypass trust boundary protections (restricted identities only). + ui.read: Read Console UX resources. + vex:ingest: Submit VEX ingestion payloads. + vex:read: Read VEX ingestion data. + vuln:read: Read vulnerability permalinks and overlays. + OAuthClientCredentials: + type: oauth2 + description: Client credential exchange for machine-to-machine identities. + flows: + clientCredentials: + tokenUrl: /token + scopes: + advisory:ingest: Submit advisory ingestion payloads. + advisory:read: Read advisory ingestion data. + aoc:verify: Execute Aggregation-Only Contract verification workflows. + authority.audit.read: Read Authority audit logs. + authority.clients.manage: Manage Authority client registrations. + authority.users.manage: Manage Authority users. + authority:tenants.read: Read the Authority tenant catalog. + concelier.jobs.trigger: Trigger Concelier aggregation jobs. + concelier.merge: Manage Concelier merge operations. + effective:write: Write effective findings (Policy Engine service identity only). + email: Access email claim data. + exceptions:approve: Approve exception workflows. + findings:read: Read effective findings emitted by Policy Engine. + graph:export: Export graph artefacts. + graph:read: Read graph explorer data. + graph:simulate: Run graph what-if simulations. + graph:write: Enqueue or mutate graph build jobs. + offline_access: Request refresh tokens for offline access. + openid: Request OpenID Connect identity tokens. + orch:operate: Execute privileged Orchestrator control actions. + orch:read: Read Orchestrator job state. + policy:author: Author Policy Studio drafts and workspaces. + policy:activate: Activate policy revisions. + policy:approve: Approve or reject policy drafts. + policy:audit: Inspect Policy Studio audit history. + policy:edit: Edit policy definitions. + policy:operate: Operate Policy Studio promotions and runs. + policy:read: Read policy definitions and metadata. + policy:run: Trigger policy executions. + policy:submit: Submit policy drafts for review. + policy:review: Review Policy Studio drafts and leave feedback. + policy:simulate: Execute Policy Studio simulations. + policy:write: Create or update policy drafts. + profile: Access profile claim data. + signals:admin: Administer Signals ingestion and routing settings. + signals:read: Read Signals events and state. + signals:write: Publish Signals events or mutate state. + stellaops.bypass: Bypass trust boundary protections (restricted identities only). + ui.read: Read Console UX resources. + vex:ingest: Submit VEX ingestion payloads. + vex:read: Read VEX ingestion data. + vuln:read: Read vulnerability permalinks and overlays. + schemas: + TokenResponse: + type: object + description: OAuth 2.1 bearer token response. + properties: + access_token: + type: string + description: Access token encoded as JWT. + token_type: + type: string + description: Token type indicator. Always `Bearer`. + expires_in: + type: integer + description: Lifetime of the access token, in seconds. + minimum: 1 + refresh_token: + type: string + description: Refresh token issued when the grant allows offline access. + scope: + type: string + description: Space-delimited scopes granted in the response. + id_token: + type: string + description: ID token issued for authorization-code flows. + required: + - access_token + - token_type + - expires_in + OAuthErrorResponse: + type: object + description: RFC 6749 compliant error envelope. + properties: + error: + type: string + description: Machine-readable error code. + error_description: + type: string + description: Human-readable error description. + error_uri: + type: string + format: uri + description: Link to documentation about the error. + required: + - error + PasswordGrantRequest: + type: object + required: + - grant_type + - client_id + - username + - password + properties: + grant_type: + type: string + const: password + client_id: + type: string + description: Registered client identifier. May also be supplied via HTTP Basic auth. + client_secret: + type: string + description: Client secret. Required for confidential clients when not using HTTP Basic auth. + scope: + type: string + description: Space-delimited scopes being requested. + username: + type: string + description: Resource owner username. + password: + type: string + description: Resource owner password. + authority_provider: + type: string + description: Optional identity provider hint. Required when multiple password-capable providers are registered. + description: Form-encoded payload for password grant exchange. + ClientCredentialsGrantRequest: + type: object + required: + - grant_type + - client_id + properties: + grant_type: + type: string + const: client_credentials + client_id: + type: string + description: Registered client identifier. May also be supplied via HTTP Basic auth. + client_secret: + type: string + description: Client secret. Required for confidential clients when not using HTTP Basic auth. + scope: + type: string + description: Space-delimited scopes being requested. + authority_provider: + type: string + description: Optional identity provider hint for plugin-backed clients. + operator_reason: + type: string + description: Required when requesting `orch:operate`; explains the operator action. + maxLength: 256 + operator_ticket: + type: string + description: Required when requesting `orch:operate`; tracks the external change ticket or incident. + maxLength: 128 + description: Form-encoded payload for client credentials exchange. + RefreshTokenGrantRequest: + type: object + required: + - grant_type + - refresh_token + properties: + grant_type: + type: string + const: refresh_token + client_id: + type: string + description: Registered client identifier. May also be supplied via HTTP Basic auth. + client_secret: + type: string + description: Client secret. Required for confidential clients when not using HTTP Basic auth. + refresh_token: + type: string + description: Previously issued refresh token. + scope: + type: string + description: Optional scope list to narrow the requested access. + description: Form-encoded payload for refresh token exchange. + RevocationRequest: + type: object + required: + - token + properties: + token: + type: string + description: Token value or token identifier to revoke. + token_type_hint: + type: string + description: Optional token type hint (`access_token` or `refresh_token`). + description: Form-encoded payload for token revocation. + IntrospectionRequest: + type: object + required: + - token + properties: + token: + type: string + description: Token value whose state should be introspected. + token_type_hint: + type: string + description: Optional token type hint (`access_token` or `refresh_token`). + description: Form-encoded payload for token introspection. + IntrospectionResponse: + type: object + description: Active token descriptor compliant with RFC 7662. + properties: + active: + type: boolean + description: Indicates whether the token is currently active. + scope: + type: string + description: Space-delimited list of scopes granted to the token. + client_id: + type: string + description: Client identifier associated with the token. + sub: + type: string + description: Subject identifier when the token represents an end-user. + username: + type: string + description: Preferred username associated with the subject. + token_type: + type: string + description: Type of the token (e.g., `Bearer`). + exp: + type: integer + description: Expiration timestamp (seconds since UNIX epoch). + iat: + type: integer + description: Issued-at timestamp (seconds since UNIX epoch). + nbf: + type: integer + description: Not-before timestamp (seconds since UNIX epoch). + aud: + type: array + description: Audience values associated with the token. + items: + type: string + iss: + type: string + description: Issuer identifier. + jti: + type: string + description: JWT identifier corresponding to the token. + tenant: + type: string + description: Tenant associated with the token, when assigned. + confirmation: + type: object + description: Sender-constrained confirmation data (e.g., mTLS thumbprint, DPoP JWK thumbprint). + required: + - active + JwksDocument: + type: object + description: JSON Web Key Set published by the Authority. + properties: + keys: + type: array + items: + $ref: '#/components/schemas/Jwk' + required: + - keys + Jwk: + type: object + description: Public key material for token signature validation. + properties: + kid: + type: string + description: Key identifier. + kty: + type: string + description: Key type (e.g., `EC`, `RSA`). + use: + type: string + description: Intended key use (`sig`). + alg: + type: string + description: Signing algorithm (e.g., `ES384`). + crv: + type: string + description: Elliptic curve identifier when applicable. + x: + type: string + description: X coordinate for EC keys. + y: + type: string + description: Y coordinate for EC keys. + status: + type: string + description: Operational status metadata for the key (e.g., `active`, `retiring`). +paths: + /token: + post: + tags: + - Authentication + summary: Exchange credentials for tokens + description: | + Issues OAuth 2.1 bearer tokens for StellaOps clients. Supports password, client credentials, + authorization-code, device, and refresh token grants. Confidential clients must authenticate using + HTTP Basic auth or `client_secret` form fields. + security: + - ClientSecretBasic: [] + - {} + requestBody: + required: true + content: + application/x-www-form-urlencoded: + schema: + oneOf: + - $ref: '#/components/schemas/PasswordGrantRequest' + - $ref: '#/components/schemas/ClientCredentialsGrantRequest' + - $ref: '#/components/schemas/RefreshTokenGrantRequest' + encoding: + authority_provider: + style: form + explode: false + examples: + passwordGrant: + summary: Password grant for tenant-scoped ingestion bot + value: + grant_type: password + client_id: ingest-cli + client_secret: s3cr3t + username: ingest-bot + password: pa55w0rd! + scope: advisory:ingest vex:ingest + authority_provider: primary-directory + authorizationCode: + summary: Authorization code exchange for Console UI session + value: + grant_type: authorization_code + client_id: console-ui + code: 2Lba1WtwPLfZ2b0Z9uPrsQ + redirect_uri: https://console.stellaops.local/auth/callback + code_verifier: g3ZnL91QJ6i4zO_86oI4CDnZ7gS0bSeK + clientCredentials: + summary: Client credentials exchange for Policy Engine + value: + grant_type: client_credentials + client_id: policy-engine + client_secret: 9c39f602-2f2b-4f29 + scope: effective:write findings:read + operator_reason: Deploying policy change 1234 + operator_ticket: CHG-004211 + refreshToken: + summary: Refresh token rotation for console session + value: + grant_type: refresh_token + client_id: console-ui + refresh_token: 0.rg9pVlsGzXE8Q + responses: + '200': + description: Token exchange succeeded. + content: + application/json: + schema: + $ref: '#/components/schemas/TokenResponse' + examples: + passwordGrant: + summary: Password grant success response + value: + access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... + token_type: Bearer + expires_in: 3600 + refresh_token: OxGdVtZJ-mk49cFd38uRUw + scope: advisory:ingest vex:ingest + clientCredentials: + summary: Client credentials success response + value: + access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... + token_type: Bearer + expires_in: 900 + scope: effective:write findings:read + authorizationCode: + summary: Authorization code success response + value: + access_token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... + token_type: Bearer + expires_in: 900 + refresh_token: VxKpc9Vj9QjYV6gLrhQHTw + scope: ui.read authority:tenants.read + id_token: eyJhbGciOiJFUzM4NCIsImtpZCI6ImNvbnNvbGUifQ... + '400': + description: Malformed request, unsupported grant type, or invalid credentials. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + invalidProvider: + summary: Unknown identity provider hint + value: + error: invalid_request + error_description: "Unknown identity provider 'legacy-directory'." + invalidScope: + summary: Scope not permitted for client + value: + error: invalid_scope + error_description: Scope 'effective:write' is not permitted for this client. + '401': + description: Client authentication failed. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + badClientSecret: + summary: Invalid client secret + value: + error: invalid_client + error_description: Client authentication failed. + /revoke: + post: + tags: + - Authentication + summary: Revoke an access or refresh token + security: + - ClientSecretBasic: [] + requestBody: + required: true + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/RevocationRequest' + examples: + revokeRefreshToken: + summary: Revoke refresh token after logout + value: + token: 0.rg9pVlsGzXE8Q + token_type_hint: refresh_token + responses: + '200': + description: Token revoked or already invalid. The response body is intentionally blank. + '400': + description: Malformed request. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + missingToken: + summary: Token parameter omitted + value: + error: invalid_request + error_description: The revocation request is missing the token parameter. + '401': + description: Client authentication failed. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + badClientSecret: + summary: Invalid client credentials + value: + error: invalid_client + error_description: Client authentication failed. + /introspect: + post: + tags: + - Authentication + summary: Introspect token state + description: Returns the active status and claims for a given token. Requires a privileged client. + security: + - ClientSecretBasic: [] + requestBody: + required: true + content: + application/x-www-form-urlencoded: + schema: + $ref: '#/components/schemas/IntrospectionRequest' + examples: + introspectToken: + summary: Validate an access token issued to Orchestrator + value: + token: eyJhbGciOiJFUzM4NCIsInR5cCI6IkpXVCJ9... + token_type_hint: access_token + responses: + '200': + description: Token state evaluated. + content: + application/json: + schema: + $ref: '#/components/schemas/IntrospectionResponse' + examples: + activeToken: + summary: Active token response + value: + active: true + scope: orch:operate orch:read + client_id: orch-control + sub: operator-7f12 + username: ops.engineer@tenant.example + token_type: Bearer + exp: 1761628800 + iat: 1761625200 + nbf: 1761625200 + iss: https://authority.stellaops.local + aud: + - https://orch.stellaops.local + jti: 01J8KYRAMG7FWBPRRV5XG20T7S + tenant: tenant-alpha + confirmation: + mtls_thumbprint: 079871b8c9a0f2e6 + inactiveToken: + summary: Revoked token response + value: + active: false + '400': + description: Malformed request. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + missingToken: + summary: Token missing + value: + error: invalid_request + error_description: token parameter is required. + '401': + description: Client authentication failed or client lacks introspection permission. + content: + application/json: + schema: + $ref: '#/components/schemas/OAuthErrorResponse' + examples: + unauthorizedClient: + summary: Client not allowed to introspect tokens + value: + error: invalid_client + error_description: Client authentication failed. + /jwks: + get: + tags: + - Keys + summary: Retrieve signing keys + description: Returns the JSON Web Key Set used to validate Authority-issued tokens. + responses: + '200': + description: JWKS document. + headers: + Cache-Control: + schema: + type: string + description: Standard caching headers apply; keys rotate infrequently. + content: + application/json: + schema: + $ref: '#/components/schemas/JwksDocument' + examples: + ecKeySet: + summary: EC signing keys + value: + keys: + - kid: auth-tokens-es384-202510 + kty: EC + use: sig + alg: ES384 + crv: P-384 + x: 7UchU5R77LtChrJx6uWg9mYjFvV6RIpSgZPDIj7d1q0 + y: v98nHe8a7mGZ9Fn1t4Jp9PTJv1ma35QPmhUrE4pH7H0 + status: active + - kid: auth-tokens-es384-202409 + kty: EC + use: sig + alg: ES384 + crv: P-384 + x: hjdKc0r8jvVHJ7S9mP0y0mU9bqN7v5PxS21SwclTzfc + y: yk6J3pz4TUpymN4mG-6th3dYvJ5N1lQvDK0PLuFv3Pg + status: retiring diff --git a/src/StellaOps.Attestor.Envelope/AGENTS.md b/src/Attestor/StellaOps.Attestor.Envelope/AGENTS.md similarity index 98% rename from src/StellaOps.Attestor.Envelope/AGENTS.md rename to src/Attestor/StellaOps.Attestor.Envelope/AGENTS.md index 78139cc3..1a284b71 100644 --- a/src/StellaOps.Attestor.Envelope/AGENTS.md +++ b/src/Attestor/StellaOps.Attestor.Envelope/AGENTS.md @@ -1,15 +1,15 @@ -# Attestation Envelope Guild Charter - -## Mission -Provide deterministic DSSE envelope handling with multi-signature support, canonical serialization, hashing, and integrity safeguards for all Stella attestations. - -## Scope -- DSSE encoding/decoding, canonical JSON handling, and detached payload support. -- Multi-signature verification, key identification, and cryptographic primitives. -- Integration with KMS drivers and transparency log witness utilities. -- Fuzz and property testing for envelope parsing and normalization. - -## Definition of Done -- Envelope APIs produce canonical payloads and support multiple signatures deterministically. -- Verification detects tampering, mismatched subjects, and unsupported algorithms. -- Property and fuzz tests cover canonicalization and signature edge cases. +# Attestation Envelope Guild Charter + +## Mission +Provide deterministic DSSE envelope handling with multi-signature support, canonical serialization, hashing, and integrity safeguards for all Stella attestations. + +## Scope +- DSSE encoding/decoding, canonical JSON handling, and detached payload support. +- Multi-signature verification, key identification, and cryptographic primitives. +- Integration with KMS drivers and transparency log witness utilities. +- Fuzz and property testing for envelope parsing and normalization. + +## Definition of Done +- Envelope APIs produce canonical payloads and support multiple signatures deterministically. +- Verification detects tampering, mismatched subjects, and unsupported algorithms. +- Property and fuzz tests cover canonicalization and signature edge cases. diff --git a/src/StellaOps.Attestor.Envelope/TASKS.md b/src/Attestor/StellaOps.Attestor.Envelope/TASKS.md similarity index 99% rename from src/StellaOps.Attestor.Envelope/TASKS.md rename to src/Attestor/StellaOps.Attestor.Envelope/TASKS.md index 7e31de53..2ac75e5c 100644 --- a/src/StellaOps.Attestor.Envelope/TASKS.md +++ b/src/Attestor/StellaOps.Attestor.Envelope/TASKS.md @@ -1,13 +1,13 @@ -# Attestation Envelope Task Board — Epic 19: Attestor Console - -## Sprint 72 – Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ATTEST-ENVELOPE-72-001 | TODO | Envelope Guild | — | Implement DSSE canonicalization, JSON normalization, multi-signature structures, and hashing helpers. | Canonicalization deterministic (property tests); hash matches DSSE spec; unit tests green. | -| ATTEST-ENVELOPE-72-002 | TODO | Envelope Guild | ATTEST-ENVELOPE-72-001 | Support compact and expanded JSON output, payload compression, and detached payload references. | API returns both variants; payload compression toggles tested; docs updated. | - -## Sprint 73 – Crypto Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ATTEST-ENVELOPE-73-001 | TODO | Envelope Guild, KMS Guild | ATTEST-ENVELOPE-72-001 | Implement Ed25519 & ECDSA signature create/verify helpers, key identification (`keyid`) scheme, and error mapping. | Sign/verify tests pass with fixtures; invalid signatures produce deterministic errors. | -| ATTEST-ENVELOPE-73-002 | TODO | Envelope Guild | ATTEST-ENVELOPE-73-001 | Add fuzz tests for envelope parsing, signature verification, and canonical JSON round-trips. | Fuzz suite integrated; coverage metrics recorded; no regressions. | +# Attestation Envelope Task Board — Epic 19: Attestor Console + +## Sprint 72 – Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ATTEST-ENVELOPE-72-001 | TODO | Envelope Guild | — | Implement DSSE canonicalization, JSON normalization, multi-signature structures, and hashing helpers. | Canonicalization deterministic (property tests); hash matches DSSE spec; unit tests green. | +| ATTEST-ENVELOPE-72-002 | TODO | Envelope Guild | ATTEST-ENVELOPE-72-001 | Support compact and expanded JSON output, payload compression, and detached payload references. | API returns both variants; payload compression toggles tested; docs updated. | + +## Sprint 73 – Crypto Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ATTEST-ENVELOPE-73-001 | TODO | Envelope Guild, KMS Guild | ATTEST-ENVELOPE-72-001 | Implement Ed25519 & ECDSA signature create/verify helpers, key identification (`keyid`) scheme, and error mapping. | Sign/verify tests pass with fixtures; invalid signatures produce deterministic errors. | +| ATTEST-ENVELOPE-73-002 | TODO | Envelope Guild | ATTEST-ENVELOPE-73-001 | Add fuzz tests for envelope parsing, signature verification, and canonical JSON round-trips. | Fuzz suite integrated; coverage metrics recorded; no regressions. | diff --git a/src/StellaOps.Attestor.Types/AGENTS.md b/src/Attestor/StellaOps.Attestor.Types/AGENTS.md similarity index 97% rename from src/StellaOps.Attestor.Types/AGENTS.md rename to src/Attestor/StellaOps.Attestor.Types/AGENTS.md index b0d74b61..8f1b79af 100644 --- a/src/StellaOps.Attestor.Types/AGENTS.md +++ b/src/Attestor/StellaOps.Attestor.Types/AGENTS.md @@ -1,14 +1,14 @@ -# Attestation Payloads Guild Charter - -## Mission -Define strongly typed, versioned schemas for all attestation payloads and provide validation utilities for generating and verifying evidence. - -## Scope -- JSON Schemas, code generation, and documentation for each attestation type. -- Normalization and validation logic shared across services, CLI, and SDKs. -- Sample payloads and golden fixtures used in contract tests and docs. - -## Definition of Done -- Payload types compiled into Go/TypeScript models with validation helpers. -- Schemas published with semantic versioning and change logs. -- Golden samples maintained with acceptance tests and doc integration. +# Attestation Payloads Guild Charter + +## Mission +Define strongly typed, versioned schemas for all attestation payloads and provide validation utilities for generating and verifying evidence. + +## Scope +- JSON Schemas, code generation, and documentation for each attestation type. +- Normalization and validation logic shared across services, CLI, and SDKs. +- Sample payloads and golden fixtures used in contract tests and docs. + +## Definition of Done +- Payload types compiled into Go/TypeScript models with validation helpers. +- Schemas published with semantic versioning and change logs. +- Golden samples maintained with acceptance tests and doc integration. diff --git a/src/StellaOps.Attestor.Types/TASKS.md b/src/Attestor/StellaOps.Attestor.Types/TASKS.md similarity index 99% rename from src/StellaOps.Attestor.Types/TASKS.md rename to src/Attestor/StellaOps.Attestor.Types/TASKS.md index fbdb8d02..d2eb61f3 100644 --- a/src/StellaOps.Attestor.Types/TASKS.md +++ b/src/Attestor/StellaOps.Attestor.Types/TASKS.md @@ -1,13 +1,13 @@ -# Attestation Payloads Task Board — Epic 19: Attestor Console - -## Sprint 72 – Schema Definition -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ATTEST-TYPES-72-001 | TODO | Attestation Payloads Guild | — | Draft JSON Schemas for BuildProvenance v1, SBOMAttestation v1, VEXAttestation v1, ScanResults v1, PolicyEvaluation v1, RiskProfileEvidence v1, CustomEvidence v1. | Schemas validated with test fixtures; docs stubbed; versioned under `schemas/`. | -| ATTEST-TYPES-72-002 | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Generate Go/TS models from schemas with validation helpers and canonical JSON serialization. | Code generation integrated; lints pass; unit tests cover round-trips. | - -## Sprint 73 – Fixtures & Docs -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ATTEST-TYPES-73-001 | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Create golden payload samples for each type; integrate into tests and documentation. | Golden fixtures stored; tests compare outputs; docs embed examples. | -| ATTEST-TYPES-73-002 | TODO | Attestation Payloads Guild, Docs Guild | ATTEST-TYPES-73-001 | Publish schema reference docs (`/docs/attestor/payloads.md`) with annotated JSON examples. | Doc merged with banner; examples validated by tests. | +# Attestation Payloads Task Board — Epic 19: Attestor Console + +## Sprint 72 – Schema Definition +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ATTEST-TYPES-72-001 | TODO | Attestation Payloads Guild | — | Draft JSON Schemas for BuildProvenance v1, SBOMAttestation v1, VEXAttestation v1, ScanResults v1, PolicyEvaluation v1, RiskProfileEvidence v1, CustomEvidence v1. | Schemas validated with test fixtures; docs stubbed; versioned under `schemas/`. | +| ATTEST-TYPES-72-002 | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-001 | Generate Go/TS models from schemas with validation helpers and canonical JSON serialization. | Code generation integrated; lints pass; unit tests cover round-trips. | + +## Sprint 73 – Fixtures & Docs +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ATTEST-TYPES-73-001 | TODO | Attestation Payloads Guild | ATTEST-TYPES-72-002 | Create golden payload samples for each type; integrate into tests and documentation. | Golden fixtures stored; tests compare outputs; docs embed examples. | +| ATTEST-TYPES-73-002 | TODO | Attestation Payloads Guild, Docs Guild | ATTEST-TYPES-73-001 | Publish schema reference docs (`/docs/attestor/payloads.md`) with annotated JSON examples. | Doc merged with banner; examples validated by tests. | diff --git a/src/StellaOps.Attestor.Verify/AGENTS.md b/src/Attestor/StellaOps.Attestor.Verify/AGENTS.md similarity index 98% rename from src/StellaOps.Attestor.Verify/AGENTS.md rename to src/Attestor/StellaOps.Attestor.Verify/AGENTS.md index d7aa85e7..a9270df3 100644 --- a/src/StellaOps.Attestor.Verify/AGENTS.md +++ b/src/Attestor/StellaOps.Attestor.Verify/AGENTS.md @@ -1,14 +1,14 @@ -# Attestation Verification Guild Charter - -## Mission -Implement the verification engine that enforces attestation policies, issuer trust, transparency requirements, and produces audit-ready reports. - -## Scope -- Verification pipeline integrating DSSE validation, issuer/key trust, Policy Studio rules, freshness checks, and transparency proofs. -- Caching and reporting for verification results. -- Error codes and explainability artifacts for UI/CLI consumption. - -## Definition of Done -- Verification passes/fails deterministically with detailed report structures. -- Caching improves performance without sacrificing correctness. -- Policies enforce scope-based rules and waivers, with unit/integration coverage. +# Attestation Verification Guild Charter + +## Mission +Implement the verification engine that enforces attestation policies, issuer trust, transparency requirements, and produces audit-ready reports. + +## Scope +- Verification pipeline integrating DSSE validation, issuer/key trust, Policy Studio rules, freshness checks, and transparency proofs. +- Caching and reporting for verification results. +- Error codes and explainability artifacts for UI/CLI consumption. + +## Definition of Done +- Verification passes/fails deterministically with detailed report structures. +- Caching improves performance without sacrificing correctness. +- Policies enforce scope-based rules and waivers, with unit/integration coverage. diff --git a/src/StellaOps.Attestor.Verify/TASKS.md b/src/Attestor/StellaOps.Attestor.Verify/TASKS.md similarity index 99% rename from src/StellaOps.Attestor.Verify/TASKS.md rename to src/Attestor/StellaOps.Attestor.Verify/TASKS.md index ba882811..f4c3c5cf 100644 --- a/src/StellaOps.Attestor.Verify/TASKS.md +++ b/src/Attestor/StellaOps.Attestor.Verify/TASKS.md @@ -1,13 +1,13 @@ -# Attestation Verification Task Board — Epic 19: Attestor Console - -## Sprint 73 – Policy Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ATTEST-VERIFY-73-001 | TODO | Verification Guild, Policy Guild | VERPOL-73-001, ATTESTOR-73-002 | Implement verification engine: policy evaluation, issuer trust resolution, freshness, signature count, transparency checks; produce structured reports. | Engine returns report DTOs; policy rules honored; unit tests cover pass/fail scenarios. | -| ATTEST-VERIFY-73-002 | TODO | Verification Guild | ATTEST-VERIFY-73-001 | Add caching layer keyed by `(subject, envelope_id, policy_version)` with TTL and invalidation on new evidence. | Cache reduces repeated verification cost; tests cover cache hits/misses. | - -## Sprint 74 – Explainability & Observability -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ATTEST-VERIFY-74-001 | TODO | Verification Guild, Observability Guild | ATTEST-VERIFY-73-001 | Emit telemetry (spans/metrics) tagged by subject, issuer, policy, result; integrate with dashboards. | Metrics visible; spans present; SLO thresholds defined. | -| ATTEST-VERIFY-74-002 | TODO | Verification Guild, Docs Guild | ATTEST-VERIFY-73-001 | Document verification report schema and explainability in `/docs/attestor/workflows.md`. | Documentation merged; examples verified via tests. | +# Attestation Verification Task Board — Epic 19: Attestor Console + +## Sprint 73 – Policy Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ATTEST-VERIFY-73-001 | TODO | Verification Guild, Policy Guild | VERPOL-73-001, ATTESTOR-73-002 | Implement verification engine: policy evaluation, issuer trust resolution, freshness, signature count, transparency checks; produce structured reports. | Engine returns report DTOs; policy rules honored; unit tests cover pass/fail scenarios. | +| ATTEST-VERIFY-73-002 | TODO | Verification Guild | ATTEST-VERIFY-73-001 | Add caching layer keyed by `(subject, envelope_id, policy_version)` with TTL and invalidation on new evidence. | Cache reduces repeated verification cost; tests cover cache hits/misses. | + +## Sprint 74 – Explainability & Observability +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ATTEST-VERIFY-74-001 | TODO | Verification Guild, Observability Guild | ATTEST-VERIFY-73-001 | Emit telemetry (spans/metrics) tagged by subject, issuer, policy, result; integrate with dashboards. | Metrics visible; spans present; SLO thresholds defined. | +| ATTEST-VERIFY-74-002 | TODO | Verification Guild, Docs Guild | ATTEST-VERIFY-73-001 | Document verification report schema and explainability in `/docs/attestor/workflows.md`. | Documentation merged; examples verified via tests. | diff --git a/src/Attestor/StellaOps.Attestor.sln b/src/Attestor/StellaOps.Attestor.sln new file mode 100644 index 00000000..6f62a526 --- /dev/null +++ b/src/Attestor/StellaOps.Attestor.sln @@ -0,0 +1,182 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Attestor", "StellaOps.Attestor", "{78C966F5-2242-D8EC-ADCA-A1A9C7F723A6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Core", "StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj", "{D44872A3-772A-43D7-B340-61253543F02B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Infrastructure", "StellaOps.Attestor\StellaOps.Attestor.Infrastructure\StellaOps.Attestor.Infrastructure.csproj", "{BFADAB55-9D9D-456F-987B-A4536027BA77}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Tests", "StellaOps.Attestor\StellaOps.Attestor.Tests\StellaOps.Attestor.Tests.csproj", "{E2546302-F0CD-43E6-9CD6-D4B5E711454C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.WebService", "StellaOps.Attestor\StellaOps.Attestor.WebService\StellaOps.Attestor.WebService.csproj", "{39CCDD3E-5802-4E72-BE0F-25F7172C74E6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{0792B7D7-E298-4639-B3DC-AFAF427810E9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{E93D1212-2745-4AD7-AD42-7666952A60C5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{ED2AB277-AA70-4593-869A-BB13DA55FD12}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{6E844D37-2714-496B-8557-8FA2BF1744E8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{44EB6890-FB96-405B-8CEC-A1EEB38474CE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{36FBCE51-0429-4F2B-87FD-95B37941001D}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D44872A3-772A-43D7-B340-61253543F02B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Debug|x64.ActiveCfg = Debug|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Debug|x64.Build.0 = Debug|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Debug|x86.ActiveCfg = Debug|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Debug|x86.Build.0 = Debug|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Release|Any CPU.Build.0 = Release|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Release|x64.ActiveCfg = Release|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Release|x64.Build.0 = Release|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Release|x86.ActiveCfg = Release|Any CPU + {D44872A3-772A-43D7-B340-61253543F02B}.Release|x86.Build.0 = Release|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Debug|x64.ActiveCfg = Debug|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Debug|x64.Build.0 = Debug|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Debug|x86.ActiveCfg = Debug|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Debug|x86.Build.0 = Debug|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Release|Any CPU.Build.0 = Release|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Release|x64.ActiveCfg = Release|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Release|x64.Build.0 = Release|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Release|x86.ActiveCfg = Release|Any CPU + {BFADAB55-9D9D-456F-987B-A4536027BA77}.Release|x86.Build.0 = Release|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Debug|x64.ActiveCfg = Debug|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Debug|x64.Build.0 = Debug|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Debug|x86.ActiveCfg = Debug|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Debug|x86.Build.0 = Debug|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Release|Any CPU.Build.0 = Release|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Release|x64.ActiveCfg = Release|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Release|x64.Build.0 = Release|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Release|x86.ActiveCfg = Release|Any CPU + {E2546302-F0CD-43E6-9CD6-D4B5E711454C}.Release|x86.Build.0 = Release|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Debug|x64.ActiveCfg = Debug|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Debug|x64.Build.0 = Debug|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Debug|x86.ActiveCfg = Debug|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Debug|x86.Build.0 = Debug|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Release|Any CPU.Build.0 = Release|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Release|x64.ActiveCfg = Release|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Release|x64.Build.0 = Release|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Release|x86.ActiveCfg = Release|Any CPU + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6}.Release|x86.Build.0 = Release|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Debug|x64.ActiveCfg = Debug|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Debug|x64.Build.0 = Debug|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Debug|x86.ActiveCfg = Debug|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Debug|x86.Build.0 = Debug|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Release|Any CPU.Build.0 = Release|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Release|x64.ActiveCfg = Release|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Release|x64.Build.0 = Release|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Release|x86.ActiveCfg = Release|Any CPU + {0792B7D7-E298-4639-B3DC-AFAF427810E9}.Release|x86.Build.0 = Release|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Debug|x64.ActiveCfg = Debug|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Debug|x64.Build.0 = Debug|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Debug|x86.ActiveCfg = Debug|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Debug|x86.Build.0 = Debug|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Release|Any CPU.Build.0 = Release|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Release|x64.ActiveCfg = Release|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Release|x64.Build.0 = Release|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Release|x86.ActiveCfg = Release|Any CPU + {E93D1212-2745-4AD7-AD42-7666952A60C5}.Release|x86.Build.0 = Release|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Debug|x64.ActiveCfg = Debug|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Debug|x64.Build.0 = Debug|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Debug|x86.ActiveCfg = Debug|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Debug|x86.Build.0 = Debug|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Release|Any CPU.Build.0 = Release|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Release|x64.ActiveCfg = Release|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Release|x64.Build.0 = Release|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Release|x86.ActiveCfg = Release|Any CPU + {9AE76C3A-0712-4DDA-A751-D0E8D59BD7A1}.Release|x86.Build.0 = Release|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Debug|x64.ActiveCfg = Debug|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Debug|x64.Build.0 = Debug|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Debug|x86.ActiveCfg = Debug|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Debug|x86.Build.0 = Debug|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Release|Any CPU.Build.0 = Release|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Release|x64.ActiveCfg = Release|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Release|x64.Build.0 = Release|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Release|x86.ActiveCfg = Release|Any CPU + {ED2AB277-AA70-4593-869A-BB13DA55FD12}.Release|x86.Build.0 = Release|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Debug|x64.ActiveCfg = Debug|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Debug|x64.Build.0 = Debug|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Debug|x86.ActiveCfg = Debug|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Debug|x86.Build.0 = Debug|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Release|Any CPU.Build.0 = Release|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Release|x64.ActiveCfg = Release|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Release|x64.Build.0 = Release|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Release|x86.ActiveCfg = Release|Any CPU + {6E844D37-2714-496B-8557-8FA2BF1744E8}.Release|x86.Build.0 = Release|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Debug|x64.ActiveCfg = Debug|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Debug|x64.Build.0 = Debug|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Debug|x86.ActiveCfg = Debug|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Debug|x86.Build.0 = Debug|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Release|Any CPU.Build.0 = Release|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Release|x64.ActiveCfg = Release|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Release|x64.Build.0 = Release|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Release|x86.ActiveCfg = Release|Any CPU + {44EB6890-FB96-405B-8CEC-A1EEB38474CE}.Release|x86.Build.0 = Release|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Debug|x64.ActiveCfg = Debug|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Debug|x64.Build.0 = Debug|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Debug|x86.ActiveCfg = Debug|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Debug|x86.Build.0 = Debug|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Release|Any CPU.Build.0 = Release|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Release|x64.ActiveCfg = Release|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Release|x64.Build.0 = Release|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Release|x86.ActiveCfg = Release|Any CPU + {36FBCE51-0429-4F2B-87FD-95B37941001D}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {D44872A3-772A-43D7-B340-61253543F02B} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {BFADAB55-9D9D-456F-987B-A4536027BA77} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {E2546302-F0CD-43E6-9CD6-D4B5E711454C} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {39CCDD3E-5802-4E72-BE0F-25F7172C74E6} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Attestor/AGENTS.md b/src/Attestor/StellaOps.Attestor/AGENTS.md similarity index 78% rename from src/StellaOps.Attestor/AGENTS.md rename to src/Attestor/StellaOps.Attestor/AGENTS.md index 624d10d3..0baead27 100644 --- a/src/StellaOps.Attestor/AGENTS.md +++ b/src/Attestor/StellaOps.Attestor/AGENTS.md @@ -1,24 +1,24 @@ -# Attestor Guild - -## Mission -Operate the StellaOps Attestor service: accept signed DSSE envelopes from the Signer over mTLS, submit them to Rekor v2, persist inclusion proofs, and expose verification APIs for downstream services and operators. - -## Teams On Call -- Team 11 (Attestor API) -- Team 12 (Attestor Observability) — partners on logging, metrics, and alerting - -## Operating Principles -- Enforce mTLS + Authority tokens for every submission; never accept anonymous callers. -- Deterministic hashing, canonical JSON, and idempotent Rekor interactions (`bundleSha256` is the source of truth). -- Persist everything (entries, dedupe, audit) before acknowledging; background jobs must be resumable. -- Structured logs + metrics for each stage (`validate`, `submit`, `proof`, `persist`, `archive`). -- Update `TASKS.md`, architecture docs, and tests whenever behaviour changes. - +# Attestor Guild + +## Mission +Operate the StellaOps Attestor service: accept signed DSSE envelopes from the Signer over mTLS, submit them to Rekor v2, persist inclusion proofs, and expose verification APIs for downstream services and operators. + +## Teams On Call +- Team 11 (Attestor API) +- Team 12 (Attestor Observability) — partners on logging, metrics, and alerting + +## Operating Principles +- Enforce mTLS + Authority tokens for every submission; never accept anonymous callers. +- Deterministic hashing, canonical JSON, and idempotent Rekor interactions (`bundleSha256` is the source of truth). +- Persist everything (entries, dedupe, audit) before acknowledging; background jobs must be resumable. +- Structured logs + metrics for each stage (`validate`, `submit`, `proof`, `persist`, `archive`). +- Update `TASKS.md`, architecture docs, and tests whenever behaviour changes. + ## Key Directories -- `src/StellaOps.Attestor/StellaOps.Attestor.WebService/` — Minimal API host and HTTP surface. -- `src/StellaOps.Attestor/StellaOps.Attestor.Core/` — Domain contracts, submission/verification pipelines. -- `src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/` — Mongo, Redis, Rekor, and archival implementations. -- `src/StellaOps.Attestor/StellaOps.Attestor.Tests/` — Unit and integration tests. +- `src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/` — Minimal API host and HTTP surface. +- `src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/` — Domain contracts, submission/verification pipelines. +- `src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/` — Mongo, Redis, Rekor, and archival implementations. +- `src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/` — Unit and integration tests. --- diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Audit/AttestorAuditRecord.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Audit/AttestorAuditRecord.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Audit/AttestorAuditRecord.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Audit/AttestorAuditRecord.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Observability/AttestorMetrics.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Options/AttestorOptions.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/IRekorClient.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/IRekorClient.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/IRekorClient.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/IRekorClient.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorBackend.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorBackend.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorBackend.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorBackend.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorProofResponse.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorProofResponse.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorProofResponse.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorProofResponse.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorSubmissionResponse.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorSubmissionResponse.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorSubmissionResponse.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Rekor/RekorSubmissionResponse.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/StellaOps.Attestor.Core.csproj diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorArchiveBundle.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorArchiveBundle.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorArchiveBundle.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorArchiveBundle.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/AttestorEntry.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorArchiveStore.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorAuditSink.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorAuditSink.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorAuditSink.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorAuditSink.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorDedupeStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorDedupeStore.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorDedupeStore.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorDedupeStore.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Storage/IAttestorEntryRepository.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionRequest.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionRequest.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionRequest.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionRequest.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionResult.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidationResult.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidationResult.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidationResult.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidationResult.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorSubmissionValidator.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorValidationException.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorValidationException.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorValidationException.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/AttestorValidationException.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IAttestorSubmissionService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IAttestorSubmissionService.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IAttestorSubmissionService.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IAttestorSubmissionService.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IDsseCanonicalizer.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IDsseCanonicalizer.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IDsseCanonicalizer.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/IDsseCanonicalizer.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/SubmissionContext.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/SubmissionContext.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/SubmissionContext.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Submission/SubmissionContext.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationException.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationException.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationException.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationException.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationRequest.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/AttestorVerificationResult.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationService.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationService.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Core/Verification/IAttestorVerificationService.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Properties/AssemblyInfo.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Properties/AssemblyInfo.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/HttpRekorClient.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/HttpRekorClient.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/HttpRekorClient.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/HttpRekorClient.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/StubRekorClient.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/StubRekorClient.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/StubRekorClient.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Rekor/StubRekorClient.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj similarity index 98% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj index ff30e46c..69ea4f38 100644 --- a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/StellaOps.Attestor.Infrastructure.csproj @@ -1,21 +1,21 @@ - - - net10.0 - preview - enable - enable - true - - - - - - - - - - - - - - + + + net10.0 + preview + enable + enable + true + + + + + + + + + + + + + + diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/InMemoryAttestorDedupeStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/InMemoryAttestorDedupeStore.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/InMemoryAttestorDedupeStore.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/InMemoryAttestorDedupeStore.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorAuditSink.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/MongoAttestorEntryRepository.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/NullAttestorArchiveStore.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/RedisAttestorDedupeStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/RedisAttestorDedupeStore.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/RedisAttestorDedupeStore.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/RedisAttestorDedupeStore.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Storage/S3AttestorArchiveStore.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/AttestorSubmissionService.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/DefaultDsseCanonicalizer.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/DefaultDsseCanonicalizer.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/DefaultDsseCanonicalizer.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Submission/DefaultDsseCanonicalizer.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Infrastructure/Verification/AttestorVerificationService.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorSubmissionServiceTests.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/AttestorVerificationServiceTests.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpRekorClientTests.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpRekorClientTests.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpRekorClientTests.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/HttpRekorClientTests.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj similarity index 80% rename from src/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj index dcc734a2..736aea19 100644 --- a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/StellaOps.Attestor.Tests.csproj @@ -1,3 +1,4 @@ + net10.0 @@ -19,7 +20,7 @@ - - + + - + \ No newline at end of file diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.Tests/TestDoubles.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/Program.cs diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj similarity index 61% rename from src/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj index a0f87bb7..7e69f3a2 100644 --- a/src/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj +++ b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.WebService/StellaOps.Attestor.WebService.csproj @@ -1,3 +1,4 @@ + net10.0 @@ -20,11 +21,11 @@ - - - - - - + + + + + + - + \ No newline at end of file diff --git a/src/StellaOps.Attestor/StellaOps.Attestor.sln b/src/Attestor/StellaOps.Attestor/StellaOps.Attestor.sln similarity index 100% rename from src/StellaOps.Attestor/StellaOps.Attestor.sln rename to src/Attestor/StellaOps.Attestor/StellaOps.Attestor.sln diff --git a/src/StellaOps.Attestor/TASKS.md b/src/Attestor/StellaOps.Attestor/TASKS.md similarity index 100% rename from src/StellaOps.Attestor/TASKS.md rename to src/Attestor/StellaOps.Attestor/TASKS.md diff --git a/src/Authority/StellaOps.Authority.sln b/src/Authority/StellaOps.Authority.sln new file mode 100644 index 00000000..d662f3f9 --- /dev/null +++ b/src/Authority/StellaOps.Authority.sln @@ -0,0 +1,303 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Authority", "StellaOps.Authority", "{BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{336F7E73-0D75-4308-A20B-E8AB7964D27C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions.Tests", "StellaOps.Authority\StellaOps.Auth.Abstractions.Tests\StellaOps.Auth.Abstractions.Tests.csproj", "{CD7D0B36-386B-455D-A14B-E7857C255C42}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{F2CEB8F7-C65B-407E-A11F-B02A39237355}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{BF48C3E7-E1E8-4869-973F-22554F146FCE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{91C7B100-D04A-4486-8A26-9D55234876D7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{00E2F0AF-32EC-4755-81AD-907532F48BBB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client.Tests", "StellaOps.Authority\StellaOps.Auth.Client.Tests\StellaOps.Auth.Client.Tests.csproj", "{2346E499-C1F4-46C5-BB03-859FC56881D4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{412DAFA7-FDEA-418C-995B-7C7F51D89E00}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{79CB2323-2370-419A-8B22-A193B3F3CE68}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration.Tests", "StellaOps.Authority\StellaOps.Auth.ServerIntegration.Tests\StellaOps.Auth.ServerIntegration.Tests.csproj", "{BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority", "StellaOps.Authority\StellaOps.Authority\StellaOps.Authority.csproj", "{614EDC46-4654-40F7-A779-8F127B8FD956}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugin.Standard", "StellaOps.Authority\StellaOps.Authority.Plugin.Standard\StellaOps.Authority.Plugin.Standard.csproj", "{4B12E120-E39B-44A7-A25E-D3151D5AE914}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Storage.Mongo", "StellaOps.Authority\StellaOps.Authority.Storage.Mongo\StellaOps.Authority.Storage.Mongo.csproj", "{1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.DependencyInjection", "..\__Libraries\StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj", "{208FE840-FFDD-43A5-9F64-F1F3C45C51F7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "..\__Libraries\StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugin.Standard.Tests", "StellaOps.Authority\StellaOps.Authority.Plugin.Standard.Tests\StellaOps.Authority.Plugin.Standard.Tests.csproj", "{168986E2-E127-4E03-BE45-4CC306E4E880}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions.Tests", "StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions.Tests\StellaOps.Authority.Plugins.Abstractions.Tests.csproj", "{A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Tests", "StellaOps.Authority\StellaOps.Authority.Tests\StellaOps.Authority.Tests.csproj", "{24BBDF59-7B30-4620-8464-BDACB1AEF49D}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Debug|x64.ActiveCfg = Debug|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Debug|x64.Build.0 = Debug|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Debug|x86.ActiveCfg = Debug|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Debug|x86.Build.0 = Debug|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Release|Any CPU.Build.0 = Release|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Release|x64.ActiveCfg = Release|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Release|x64.Build.0 = Release|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Release|x86.ActiveCfg = Release|Any CPU + {336F7E73-0D75-4308-A20B-E8AB7964D27C}.Release|x86.Build.0 = Release|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Debug|x64.ActiveCfg = Debug|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Debug|x64.Build.0 = Debug|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Debug|x86.ActiveCfg = Debug|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Debug|x86.Build.0 = Debug|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Release|Any CPU.Build.0 = Release|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Release|x64.ActiveCfg = Release|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Release|x64.Build.0 = Release|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Release|x86.ActiveCfg = Release|Any CPU + {CD7D0B36-386B-455D-A14B-E7857C255C42}.Release|x86.Build.0 = Release|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Debug|x64.ActiveCfg = Debug|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Debug|x64.Build.0 = Debug|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Debug|x86.ActiveCfg = Debug|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Debug|x86.Build.0 = Debug|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Release|Any CPU.Build.0 = Release|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Release|x64.ActiveCfg = Release|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Release|x64.Build.0 = Release|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Release|x86.ActiveCfg = Release|Any CPU + {F2CEB8F7-C65B-407E-A11F-B02A39237355}.Release|x86.Build.0 = Release|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Debug|x64.ActiveCfg = Debug|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Debug|x64.Build.0 = Debug|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Debug|x86.ActiveCfg = Debug|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Debug|x86.Build.0 = Debug|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Release|Any CPU.Build.0 = Release|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Release|x64.ActiveCfg = Release|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Release|x64.Build.0 = Release|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Release|x86.ActiveCfg = Release|Any CPU + {BF48C3E7-E1E8-4869-973F-22554F146FCE}.Release|x86.Build.0 = Release|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Debug|x64.ActiveCfg = Debug|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Debug|x64.Build.0 = Debug|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Debug|x86.ActiveCfg = Debug|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Debug|x86.Build.0 = Debug|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Release|Any CPU.Build.0 = Release|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Release|x64.ActiveCfg = Release|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Release|x64.Build.0 = Release|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Release|x86.ActiveCfg = Release|Any CPU + {91C7B100-D04A-4486-8A26-9D55234876D7}.Release|x86.Build.0 = Release|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Debug|x64.ActiveCfg = Debug|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Debug|x64.Build.0 = Debug|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Debug|x86.ActiveCfg = Debug|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Debug|x86.Build.0 = Debug|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Release|Any CPU.Build.0 = Release|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Release|x64.ActiveCfg = Release|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Release|x64.Build.0 = Release|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Release|x86.ActiveCfg = Release|Any CPU + {00E2F0AF-32EC-4755-81AD-907532F48BBB}.Release|x86.Build.0 = Release|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Debug|x64.ActiveCfg = Debug|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Debug|x64.Build.0 = Debug|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Debug|x86.ActiveCfg = Debug|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Debug|x86.Build.0 = Debug|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Release|Any CPU.Build.0 = Release|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Release|x64.ActiveCfg = Release|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Release|x64.Build.0 = Release|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Release|x86.ActiveCfg = Release|Any CPU + {2346E499-C1F4-46C5-BB03-859FC56881D4}.Release|x86.Build.0 = Release|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Debug|Any CPU.Build.0 = Debug|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Debug|x64.ActiveCfg = Debug|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Debug|x64.Build.0 = Debug|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Debug|x86.ActiveCfg = Debug|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Debug|x86.Build.0 = Debug|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Release|Any CPU.ActiveCfg = Release|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Release|Any CPU.Build.0 = Release|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Release|x64.ActiveCfg = Release|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Release|x64.Build.0 = Release|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Release|x86.ActiveCfg = Release|Any CPU + {412DAFA7-FDEA-418C-995B-7C7F51D89E00}.Release|x86.Build.0 = Release|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Debug|Any CPU.Build.0 = Debug|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Debug|x64.ActiveCfg = Debug|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Debug|x64.Build.0 = Debug|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Debug|x86.ActiveCfg = Debug|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Debug|x86.Build.0 = Debug|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Release|Any CPU.ActiveCfg = Release|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Release|Any CPU.Build.0 = Release|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Release|x64.ActiveCfg = Release|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Release|x64.Build.0 = Release|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Release|x86.ActiveCfg = Release|Any CPU + {79CB2323-2370-419A-8B22-A193B3F3CE68}.Release|x86.Build.0 = Release|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Debug|x64.ActiveCfg = Debug|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Debug|x64.Build.0 = Debug|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Debug|x86.ActiveCfg = Debug|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Debug|x86.Build.0 = Debug|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Release|Any CPU.Build.0 = Release|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Release|x64.ActiveCfg = Release|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Release|x64.Build.0 = Release|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Release|x86.ActiveCfg = Release|Any CPU + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3}.Release|x86.Build.0 = Release|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Debug|Any CPU.Build.0 = Debug|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Debug|x64.ActiveCfg = Debug|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Debug|x64.Build.0 = Debug|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Debug|x86.ActiveCfg = Debug|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Debug|x86.Build.0 = Debug|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Release|Any CPU.ActiveCfg = Release|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Release|Any CPU.Build.0 = Release|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Release|x64.ActiveCfg = Release|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Release|x64.Build.0 = Release|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Release|x86.ActiveCfg = Release|Any CPU + {614EDC46-4654-40F7-A779-8F127B8FD956}.Release|x86.Build.0 = Release|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Debug|x64.ActiveCfg = Debug|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Debug|x64.Build.0 = Debug|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Debug|x86.ActiveCfg = Debug|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Debug|x86.Build.0 = Debug|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Release|Any CPU.Build.0 = Release|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Release|x64.ActiveCfg = Release|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Release|x64.Build.0 = Release|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Release|x86.ActiveCfg = Release|Any CPU + {4B12E120-E39B-44A7-A25E-D3151D5AE914}.Release|x86.Build.0 = Release|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Debug|x64.ActiveCfg = Debug|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Debug|x64.Build.0 = Debug|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Debug|x86.ActiveCfg = Debug|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Debug|x86.Build.0 = Debug|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Release|Any CPU.Build.0 = Release|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Release|x64.ActiveCfg = Release|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Release|x64.Build.0 = Release|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Release|x86.ActiveCfg = Release|Any CPU + {7F9552C7-7E41-4EA6-9F5E-17E8049C9F10}.Release|x86.Build.0 = Release|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Debug|x64.ActiveCfg = Debug|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Debug|x64.Build.0 = Debug|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Debug|x86.ActiveCfg = Debug|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Debug|x86.Build.0 = Debug|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Release|Any CPU.Build.0 = Release|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Release|x64.ActiveCfg = Release|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Release|x64.Build.0 = Release|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Release|x86.ActiveCfg = Release|Any CPU + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A}.Release|x86.Build.0 = Release|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Debug|x64.ActiveCfg = Debug|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Debug|x64.Build.0 = Debug|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Debug|x86.ActiveCfg = Debug|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Debug|x86.Build.0 = Debug|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Release|Any CPU.Build.0 = Release|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Release|x64.ActiveCfg = Release|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Release|x64.Build.0 = Release|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Release|x86.ActiveCfg = Release|Any CPU + {208FE840-FFDD-43A5-9F64-F1F3C45C51F7}.Release|x86.Build.0 = Release|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Debug|x64.ActiveCfg = Debug|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Debug|x64.Build.0 = Debug|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Debug|x86.ActiveCfg = Debug|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Debug|x86.Build.0 = Debug|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Release|Any CPU.Build.0 = Release|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Release|x64.ActiveCfg = Release|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Release|x64.Build.0 = Release|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Release|x86.ActiveCfg = Release|Any CPU + {6EE9BB3A-A55F-4FDC-95F1-9304DB341AB1}.Release|x86.Build.0 = Release|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Debug|Any CPU.Build.0 = Debug|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Debug|x64.ActiveCfg = Debug|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Debug|x64.Build.0 = Debug|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Debug|x86.ActiveCfg = Debug|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Debug|x86.Build.0 = Debug|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Release|Any CPU.ActiveCfg = Release|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Release|Any CPU.Build.0 = Release|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Release|x64.ActiveCfg = Release|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Release|x64.Build.0 = Release|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Release|x86.ActiveCfg = Release|Any CPU + {168986E2-E127-4E03-BE45-4CC306E4E880}.Release|x86.Build.0 = Release|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Debug|x64.ActiveCfg = Debug|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Debug|x64.Build.0 = Debug|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Debug|x86.ActiveCfg = Debug|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Debug|x86.Build.0 = Debug|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Release|Any CPU.Build.0 = Release|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Release|x64.ActiveCfg = Release|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Release|x64.Build.0 = Release|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Release|x86.ActiveCfg = Release|Any CPU + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3}.Release|x86.Build.0 = Release|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Debug|x64.ActiveCfg = Debug|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Debug|x64.Build.0 = Debug|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Debug|x86.ActiveCfg = Debug|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Debug|x86.Build.0 = Debug|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Release|Any CPU.Build.0 = Release|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Release|x64.ActiveCfg = Release|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Release|x64.Build.0 = Release|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Release|x86.ActiveCfg = Release|Any CPU + {24BBDF59-7B30-4620-8464-BDACB1AEF49D}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {336F7E73-0D75-4308-A20B-E8AB7964D27C} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {CD7D0B36-386B-455D-A14B-E7857C255C42} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {F2CEB8F7-C65B-407E-A11F-B02A39237355} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {91C7B100-D04A-4486-8A26-9D55234876D7} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {2346E499-C1F4-46C5-BB03-859FC56881D4} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {412DAFA7-FDEA-418C-995B-7C7F51D89E00} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {BE1E685F-33D8-47E5-B4FA-BC4DDED255D3} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {614EDC46-4654-40F7-A779-8F127B8FD956} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {4B12E120-E39B-44A7-A25E-D3151D5AE914} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {1FFF91AB-C2D2-4A12-A77B-AB9806116F7A} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {168986E2-E127-4E03-BE45-4CC306E4E880} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {A461EFE2-CBB1-4650-9CA0-05CECFAC3AE3} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + {24BBDF59-7B30-4620-8464-BDACB1AEF49D} = {BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Authority/AGENTS.md b/src/Authority/StellaOps.Authority/AGENTS.md similarity index 65% rename from src/StellaOps.Authority/AGENTS.md rename to src/Authority/StellaOps.Authority/AGENTS.md index 63d2f8f6..29c2d12f 100644 --- a/src/StellaOps.Authority/AGENTS.md +++ b/src/Authority/StellaOps.Authority/AGENTS.md @@ -14,7 +14,7 @@ Own the StellaOps Authority host service: ASP.NET minimal API, OpenIddict flows, - Coordinate with plugin teams before altering plugin-facing contracts. ## Key Directories -- `src/StellaOps.Authority/` — host app -- `src/StellaOps.Authority.Tests/` — integration/unit tests -- `src/StellaOps.Authority.Storage.Mongo/` — data access helpers -- `src/StellaOps.Authority.Plugin.Standard/` — default identity provider plugin +- `src/Authority/StellaOps.Authority/` — host app +- `src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/` — integration/unit tests +- `src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/` — data access helpers +- `src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/` — default identity provider plugin diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/NetworkMaskMatcherTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/NetworkMaskMatcherTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/NetworkMaskMatcherTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/NetworkMaskMatcherTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOps.Auth.Abstractions.Tests.csproj b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOps.Auth.Abstractions.Tests.csproj similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOps.Auth.Abstractions.Tests.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOps.Auth.Abstractions.Tests.csproj diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsPrincipalBuilderTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsPrincipalBuilderTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsPrincipalBuilderTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsPrincipalBuilderTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsProblemResultFactoryTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsProblemResultFactoryTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsProblemResultFactoryTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsProblemResultFactoryTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs index 93da557d..fd261ff2 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions.Tests/StellaOpsScopesTests.cs @@ -1,54 +1,54 @@ -using StellaOps.Auth.Abstractions; -using Xunit; - -namespace StellaOps.Auth.Abstractions.Tests; - -public class StellaOpsScopesTests -{ - [Theory] - [InlineData(StellaOpsScopes.AdvisoryRead)] - [InlineData(StellaOpsScopes.AdvisoryIngest)] - [InlineData(StellaOpsScopes.VexRead)] - [InlineData(StellaOpsScopes.VexIngest)] - [InlineData(StellaOpsScopes.AocVerify)] - [InlineData(StellaOpsScopes.SignalsRead)] - [InlineData(StellaOpsScopes.SignalsWrite)] - [InlineData(StellaOpsScopes.SignalsAdmin)] - [InlineData(StellaOpsScopes.PolicyWrite)] - [InlineData(StellaOpsScopes.PolicyAuthor)] - [InlineData(StellaOpsScopes.PolicySubmit)] - [InlineData(StellaOpsScopes.PolicyApprove)] - [InlineData(StellaOpsScopes.PolicyReview)] - [InlineData(StellaOpsScopes.PolicyOperate)] - [InlineData(StellaOpsScopes.PolicyAudit)] - [InlineData(StellaOpsScopes.PolicyRun)] - [InlineData(StellaOpsScopes.PolicySimulate)] - [InlineData(StellaOpsScopes.FindingsRead)] - [InlineData(StellaOpsScopes.EffectiveWrite)] - [InlineData(StellaOpsScopes.GraphRead)] - [InlineData(StellaOpsScopes.VulnRead)] - [InlineData(StellaOpsScopes.GraphWrite)] - [InlineData(StellaOpsScopes.GraphExport)] - [InlineData(StellaOpsScopes.GraphSimulate)] - [InlineData(StellaOpsScopes.OrchRead)] - [InlineData(StellaOpsScopes.OrchOperate)] - [InlineData(StellaOpsScopes.ExportViewer)] - [InlineData(StellaOpsScopes.ExportOperator)] - [InlineData(StellaOpsScopes.ExportAdmin)] - public void All_IncludesNewScopes(string scope) - { - Assert.Contains(scope, StellaOpsScopes.All); - } - - [Theory] - [InlineData("Advisory:Read", StellaOpsScopes.AdvisoryRead)] - [InlineData(" VEX:Ingest ", StellaOpsScopes.VexIngest)] - [InlineData("AOC:VERIFY", StellaOpsScopes.AocVerify)] - [InlineData(" Signals:Write ", StellaOpsScopes.SignalsWrite)] - [InlineData("Policy:Author", StellaOpsScopes.PolicyAuthor)] - [InlineData("Export.Admin", StellaOpsScopes.ExportAdmin)] - public void Normalize_NormalizesToLowerCase(string input, string expected) - { - Assert.Equal(expected, StellaOpsScopes.Normalize(input)); - } -} +using StellaOps.Auth.Abstractions; +using Xunit; + +namespace StellaOps.Auth.Abstractions.Tests; + +public class StellaOpsScopesTests +{ + [Theory] + [InlineData(StellaOpsScopes.AdvisoryRead)] + [InlineData(StellaOpsScopes.AdvisoryIngest)] + [InlineData(StellaOpsScopes.VexRead)] + [InlineData(StellaOpsScopes.VexIngest)] + [InlineData(StellaOpsScopes.AocVerify)] + [InlineData(StellaOpsScopes.SignalsRead)] + [InlineData(StellaOpsScopes.SignalsWrite)] + [InlineData(StellaOpsScopes.SignalsAdmin)] + [InlineData(StellaOpsScopes.PolicyWrite)] + [InlineData(StellaOpsScopes.PolicyAuthor)] + [InlineData(StellaOpsScopes.PolicySubmit)] + [InlineData(StellaOpsScopes.PolicyApprove)] + [InlineData(StellaOpsScopes.PolicyReview)] + [InlineData(StellaOpsScopes.PolicyOperate)] + [InlineData(StellaOpsScopes.PolicyAudit)] + [InlineData(StellaOpsScopes.PolicyRun)] + [InlineData(StellaOpsScopes.PolicySimulate)] + [InlineData(StellaOpsScopes.FindingsRead)] + [InlineData(StellaOpsScopes.EffectiveWrite)] + [InlineData(StellaOpsScopes.GraphRead)] + [InlineData(StellaOpsScopes.VulnRead)] + [InlineData(StellaOpsScopes.GraphWrite)] + [InlineData(StellaOpsScopes.GraphExport)] + [InlineData(StellaOpsScopes.GraphSimulate)] + [InlineData(StellaOpsScopes.OrchRead)] + [InlineData(StellaOpsScopes.OrchOperate)] + [InlineData(StellaOpsScopes.ExportViewer)] + [InlineData(StellaOpsScopes.ExportOperator)] + [InlineData(StellaOpsScopes.ExportAdmin)] + public void All_IncludesNewScopes(string scope) + { + Assert.Contains(scope, StellaOpsScopes.All); + } + + [Theory] + [InlineData("Advisory:Read", StellaOpsScopes.AdvisoryRead)] + [InlineData(" VEX:Ingest ", StellaOpsScopes.VexIngest)] + [InlineData("AOC:VERIFY", StellaOpsScopes.AocVerify)] + [InlineData(" Signals:Write ", StellaOpsScopes.SignalsWrite)] + [InlineData("Policy:Author", StellaOpsScopes.PolicyAuthor)] + [InlineData("Export.Admin", StellaOpsScopes.ExportAdmin)] + public void Normalize_NormalizesToLowerCase(string input, string expected) + { + Assert.Equal(expected, StellaOpsScopes.Normalize(input)); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/AuthorityTelemetry.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/AuthorityTelemetry.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/AuthorityTelemetry.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/AuthorityTelemetry.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMask.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMask.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMask.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMask.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMaskMatcher.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMaskMatcher.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMaskMatcher.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/NetworkMaskMatcher.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/README.NuGet.md b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/README.NuGet.md similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/README.NuGet.md rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/README.NuGet.md diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsAuthenticationDefaults.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsAuthenticationDefaults.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsAuthenticationDefaults.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsAuthenticationDefaults.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs index a3ce9f0e..3fbe42eb 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsClaimTypes.cs @@ -1,62 +1,62 @@ -namespace StellaOps.Auth.Abstractions; - -/// -/// Canonical claim type identifiers used across StellaOps services. -/// -public static class StellaOpsClaimTypes -{ - /// - /// Subject identifier claim (maps to sub in JWTs). - /// - public const string Subject = "sub"; - - /// - /// StellaOps tenant identifier claim (multi-tenant deployments). - /// - public const string Tenant = "stellaops:tenant"; - - /// - /// StellaOps project identifier claim (optional project scoping within a tenant). - /// - public const string Project = "stellaops:project"; - - /// - /// OAuth2/OIDC client identifier claim (maps to client_id). - /// - public const string ClientId = "client_id"; - - /// - /// Unique token identifier claim (maps to jti). - /// - public const string TokenId = "jti"; - - /// - /// Authentication method reference claim (amr). - /// - public const string AuthenticationMethod = "amr"; - - /// - /// Space separated scope list (scope). - /// - public const string Scope = "scope"; - - /// - /// Individual scope items (scp). - /// - public const string ScopeItem = "scp"; - - /// - /// OAuth2 resource audiences (aud). - /// - public const string Audience = "aud"; - - /// - /// Identity provider hint for downstream services. - /// - public const string IdentityProvider = "stellaops:idp"; - - /// - /// Session identifier claim (sid). - /// - public const string SessionId = "sid"; -} +namespace StellaOps.Auth.Abstractions; + +/// +/// Canonical claim type identifiers used across StellaOps services. +/// +public static class StellaOpsClaimTypes +{ + /// + /// Subject identifier claim (maps to sub in JWTs). + /// + public const string Subject = "sub"; + + /// + /// StellaOps tenant identifier claim (multi-tenant deployments). + /// + public const string Tenant = "stellaops:tenant"; + + /// + /// StellaOps project identifier claim (optional project scoping within a tenant). + /// + public const string Project = "stellaops:project"; + + /// + /// OAuth2/OIDC client identifier claim (maps to client_id). + /// + public const string ClientId = "client_id"; + + /// + /// Unique token identifier claim (maps to jti). + /// + public const string TokenId = "jti"; + + /// + /// Authentication method reference claim (amr). + /// + public const string AuthenticationMethod = "amr"; + + /// + /// Space separated scope list (scope). + /// + public const string Scope = "scope"; + + /// + /// Individual scope items (scp). + /// + public const string ScopeItem = "scp"; + + /// + /// OAuth2 resource audiences (aud). + /// + public const string Audience = "aud"; + + /// + /// Identity provider hint for downstream services. + /// + public const string IdentityProvider = "stellaops:idp"; + + /// + /// Session identifier claim (sid). + /// + public const string SessionId = "sid"; +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsPrincipalBuilder.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsPrincipalBuilder.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsPrincipalBuilder.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsPrincipalBuilder.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsProblemResultFactory.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsProblemResultFactory.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsProblemResultFactory.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsProblemResultFactory.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs index 3df551bb..0201d293 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsScopes.cs @@ -1,289 +1,289 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Auth.Abstractions; - -/// -/// Canonical scope names supported by StellaOps services. -/// -public static class StellaOpsScopes -{ - /// - /// Scope required to trigger Concelier jobs. - /// - public const string ConcelierJobsTrigger = "concelier.jobs.trigger"; - - /// - /// Scope required to manage Concelier merge operations. - /// - public const string ConcelierMerge = "concelier.merge"; - - /// - /// Scope granting administrative access to Authority user management. - /// - public const string AuthorityUsersManage = "authority.users.manage"; - - /// - /// Scope granting administrative access to Authority client registrations. - /// - public const string AuthorityClientsManage = "authority.clients.manage"; - - /// - /// Scope granting read-only access to Authority audit logs. - /// - public const string AuthorityAuditRead = "authority.audit.read"; - - /// - /// Synthetic scope representing trusted network bypass. - /// - public const string Bypass = "stellaops.bypass"; - - /// - /// Scope granting read-only access to console UX features. - /// - public const string UiRead = "ui.read"; - - /// - /// Scope granting permission to approve exceptions. - /// - public const string ExceptionsApprove = "exceptions:approve"; - - /// - /// Scope granting read-only access to raw advisory ingestion data. - /// - public const string AdvisoryRead = "advisory:read"; - - /// - /// Scope granting write access for raw advisory ingestion. - /// - public const string AdvisoryIngest = "advisory:ingest"; - - /// - /// Scope granting read-only access to raw VEX ingestion data. - /// - public const string VexRead = "vex:read"; - - /// - /// Scope granting write access for raw VEX ingestion. - /// - public const string VexIngest = "vex:ingest"; - - /// - /// Scope granting permission to execute aggregation-only contract verification. - /// - public const string AocVerify = "aoc:verify"; - - /// - /// Scope granting read-only access to reachability signals. - /// - public const string SignalsRead = "signals:read"; - - /// - /// Scope granting permission to write reachability signals. - /// - public const string SignalsWrite = "signals:write"; - - /// - /// Scope granting administrative access to reachability signal ingestion. - /// - public const string SignalsAdmin = "signals:admin"; - - /// - /// Scope granting permission to create or edit policy drafts. - /// - public const string PolicyWrite = "policy:write"; - - /// - /// Scope granting permission to author Policy Studio workspaces. - /// - public const string PolicyAuthor = "policy:author"; - - /// - /// Scope granting permission to edit policy configurations. - /// - public const string PolicyEdit = "policy:edit"; - - /// - /// Scope granting read-only access to policy metadata. - /// - public const string PolicyRead = "policy:read"; - - /// - /// Scope granting permission to review Policy Studio drafts. - /// - public const string PolicyReview = "policy:review"; - - /// - /// Scope granting permission to submit drafts for review. - /// - public const string PolicySubmit = "policy:submit"; - - /// - /// Scope granting permission to approve or reject policies. - /// - public const string PolicyApprove = "policy:approve"; - - /// - /// Scope granting permission to operate Policy Studio promotions and runs. - /// - public const string PolicyOperate = "policy:operate"; - - /// - /// Scope granting permission to audit Policy Studio activity. - /// - public const string PolicyAudit = "policy:audit"; - - /// - /// Scope granting permission to trigger policy runs and activation workflows. - /// - public const string PolicyRun = "policy:run"; - - /// - /// Scope granting permission to activate policies. - /// - public const string PolicyActivate = "policy:activate"; - - /// - /// Scope granting read-only access to effective findings materialised by Policy Engine. - /// - public const string FindingsRead = "findings:read"; - - /// - /// Scope granting permission to run Policy Studio simulations. - /// - public const string PolicySimulate = "policy:simulate"; - - /// - /// Scope granted to Policy Engine service identity for writing effective findings. - /// - public const string EffectiveWrite = "effective:write"; - - /// - /// Scope granting read-only access to graph queries and overlays. - /// - public const string GraphRead = "graph:read"; - - /// - /// Scope granting read-only access to Vuln Explorer resources and permalinks. - /// - public const string VulnRead = "vuln:read"; - - /// - /// Scope granting read-only access to export center runs and bundles. - /// - public const string ExportViewer = "export.viewer"; - - /// - /// Scope granting permission to operate export center scheduling and run execution. - /// - public const string ExportOperator = "export.operator"; - - /// - /// Scope granting administrative control over export center retention, encryption keys, and scheduling policies. - /// - public const string ExportAdmin = "export.admin"; - - /// - /// Scope granting permission to enqueue or mutate graph build jobs. - /// - public const string GraphWrite = "graph:write"; - - /// - /// Scope granting permission to export graph artefacts (GraphML/JSONL/etc.). - /// - public const string GraphExport = "graph:export"; - - /// - /// Scope granting permission to trigger what-if simulations on graphs. - /// - public const string GraphSimulate = "graph:simulate"; - - /// - /// Scope granting read-only access to Orchestrator job state and telemetry. - /// - public const string OrchRead = "orch:read"; - - /// - /// Scope granting permission to execute Orchestrator control actions. - /// - public const string OrchOperate = "orch:operate"; - - /// - /// Scope granting read-only access to Authority tenant catalog APIs. - /// - public const string AuthorityTenantsRead = "authority:tenants.read"; - - private static readonly HashSet KnownScopes = new(StringComparer.OrdinalIgnoreCase) - { - ConcelierJobsTrigger, - ConcelierMerge, - AuthorityUsersManage, - AuthorityClientsManage, - AuthorityAuditRead, - Bypass, - UiRead, - ExceptionsApprove, - AdvisoryRead, - AdvisoryIngest, - VexRead, - VexIngest, - AocVerify, - SignalsRead, - SignalsWrite, - SignalsAdmin, - PolicyWrite, - PolicyAuthor, - PolicyEdit, - PolicyRead, - PolicyReview, - PolicySubmit, - PolicyApprove, - PolicyOperate, - PolicyAudit, - PolicyRun, - PolicyActivate, - PolicySimulate, - FindingsRead, - EffectiveWrite, - GraphRead, - VulnRead, - ExportViewer, - ExportOperator, - ExportAdmin, - GraphWrite, - GraphExport, - GraphSimulate, - OrchRead, - OrchOperate, - AuthorityTenantsRead - }; - - /// - /// Normalises a scope string (trim/convert to lower case). - /// - /// Scope raw value. - /// Normalised scope or null when the input is blank. - public static string? Normalize(string? scope) - { - if (string.IsNullOrWhiteSpace(scope)) - { - return null; - } - - return scope.Trim().ToLowerInvariant(); - } - - /// - /// Checks whether the provided scope is registered as a built-in StellaOps scope. - /// - public static bool IsKnown(string scope) - { - ArgumentNullException.ThrowIfNull(scope); - return KnownScopes.Contains(scope); - } - - /// - /// Returns the full set of built-in scopes. - /// - public static IReadOnlyCollection All => KnownScopes; -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Auth.Abstractions; + +/// +/// Canonical scope names supported by StellaOps services. +/// +public static class StellaOpsScopes +{ + /// + /// Scope required to trigger Concelier jobs. + /// + public const string ConcelierJobsTrigger = "concelier.jobs.trigger"; + + /// + /// Scope required to manage Concelier merge operations. + /// + public const string ConcelierMerge = "concelier.merge"; + + /// + /// Scope granting administrative access to Authority user management. + /// + public const string AuthorityUsersManage = "authority.users.manage"; + + /// + /// Scope granting administrative access to Authority client registrations. + /// + public const string AuthorityClientsManage = "authority.clients.manage"; + + /// + /// Scope granting read-only access to Authority audit logs. + /// + public const string AuthorityAuditRead = "authority.audit.read"; + + /// + /// Synthetic scope representing trusted network bypass. + /// + public const string Bypass = "stellaops.bypass"; + + /// + /// Scope granting read-only access to console UX features. + /// + public const string UiRead = "ui.read"; + + /// + /// Scope granting permission to approve exceptions. + /// + public const string ExceptionsApprove = "exceptions:approve"; + + /// + /// Scope granting read-only access to raw advisory ingestion data. + /// + public const string AdvisoryRead = "advisory:read"; + + /// + /// Scope granting write access for raw advisory ingestion. + /// + public const string AdvisoryIngest = "advisory:ingest"; + + /// + /// Scope granting read-only access to raw VEX ingestion data. + /// + public const string VexRead = "vex:read"; + + /// + /// Scope granting write access for raw VEX ingestion. + /// + public const string VexIngest = "vex:ingest"; + + /// + /// Scope granting permission to execute aggregation-only contract verification. + /// + public const string AocVerify = "aoc:verify"; + + /// + /// Scope granting read-only access to reachability signals. + /// + public const string SignalsRead = "signals:read"; + + /// + /// Scope granting permission to write reachability signals. + /// + public const string SignalsWrite = "signals:write"; + + /// + /// Scope granting administrative access to reachability signal ingestion. + /// + public const string SignalsAdmin = "signals:admin"; + + /// + /// Scope granting permission to create or edit policy drafts. + /// + public const string PolicyWrite = "policy:write"; + + /// + /// Scope granting permission to author Policy Studio workspaces. + /// + public const string PolicyAuthor = "policy:author"; + + /// + /// Scope granting permission to edit policy configurations. + /// + public const string PolicyEdit = "policy:edit"; + + /// + /// Scope granting read-only access to policy metadata. + /// + public const string PolicyRead = "policy:read"; + + /// + /// Scope granting permission to review Policy Studio drafts. + /// + public const string PolicyReview = "policy:review"; + + /// + /// Scope granting permission to submit drafts for review. + /// + public const string PolicySubmit = "policy:submit"; + + /// + /// Scope granting permission to approve or reject policies. + /// + public const string PolicyApprove = "policy:approve"; + + /// + /// Scope granting permission to operate Policy Studio promotions and runs. + /// + public const string PolicyOperate = "policy:operate"; + + /// + /// Scope granting permission to audit Policy Studio activity. + /// + public const string PolicyAudit = "policy:audit"; + + /// + /// Scope granting permission to trigger policy runs and activation workflows. + /// + public const string PolicyRun = "policy:run"; + + /// + /// Scope granting permission to activate policies. + /// + public const string PolicyActivate = "policy:activate"; + + /// + /// Scope granting read-only access to effective findings materialised by Policy Engine. + /// + public const string FindingsRead = "findings:read"; + + /// + /// Scope granting permission to run Policy Studio simulations. + /// + public const string PolicySimulate = "policy:simulate"; + + /// + /// Scope granted to Policy Engine service identity for writing effective findings. + /// + public const string EffectiveWrite = "effective:write"; + + /// + /// Scope granting read-only access to graph queries and overlays. + /// + public const string GraphRead = "graph:read"; + + /// + /// Scope granting read-only access to Vuln Explorer resources and permalinks. + /// + public const string VulnRead = "vuln:read"; + + /// + /// Scope granting read-only access to export center runs and bundles. + /// + public const string ExportViewer = "export.viewer"; + + /// + /// Scope granting permission to operate export center scheduling and run execution. + /// + public const string ExportOperator = "export.operator"; + + /// + /// Scope granting administrative control over export center retention, encryption keys, and scheduling policies. + /// + public const string ExportAdmin = "export.admin"; + + /// + /// Scope granting permission to enqueue or mutate graph build jobs. + /// + public const string GraphWrite = "graph:write"; + + /// + /// Scope granting permission to export graph artefacts (GraphML/JSONL/etc.). + /// + public const string GraphExport = "graph:export"; + + /// + /// Scope granting permission to trigger what-if simulations on graphs. + /// + public const string GraphSimulate = "graph:simulate"; + + /// + /// Scope granting read-only access to Orchestrator job state and telemetry. + /// + public const string OrchRead = "orch:read"; + + /// + /// Scope granting permission to execute Orchestrator control actions. + /// + public const string OrchOperate = "orch:operate"; + + /// + /// Scope granting read-only access to Authority tenant catalog APIs. + /// + public const string AuthorityTenantsRead = "authority:tenants.read"; + + private static readonly HashSet KnownScopes = new(StringComparer.OrdinalIgnoreCase) + { + ConcelierJobsTrigger, + ConcelierMerge, + AuthorityUsersManage, + AuthorityClientsManage, + AuthorityAuditRead, + Bypass, + UiRead, + ExceptionsApprove, + AdvisoryRead, + AdvisoryIngest, + VexRead, + VexIngest, + AocVerify, + SignalsRead, + SignalsWrite, + SignalsAdmin, + PolicyWrite, + PolicyAuthor, + PolicyEdit, + PolicyRead, + PolicyReview, + PolicySubmit, + PolicyApprove, + PolicyOperate, + PolicyAudit, + PolicyRun, + PolicyActivate, + PolicySimulate, + FindingsRead, + EffectiveWrite, + GraphRead, + VulnRead, + ExportViewer, + ExportOperator, + ExportAdmin, + GraphWrite, + GraphExport, + GraphSimulate, + OrchRead, + OrchOperate, + AuthorityTenantsRead + }; + + /// + /// Normalises a scope string (trim/convert to lower case). + /// + /// Scope raw value. + /// Normalised scope or null when the input is blank. + public static string? Normalize(string? scope) + { + if (string.IsNullOrWhiteSpace(scope)) + { + return null; + } + + return scope.Trim().ToLowerInvariant(); + } + + /// + /// Checks whether the provided scope is registered as a built-in StellaOps scope. + /// + public static bool IsKnown(string scope) + { + ArgumentNullException.ThrowIfNull(scope); + return KnownScopes.Contains(scope); + } + + /// + /// Returns the full set of built-in scopes. + /// + public static IReadOnlyCollection All => KnownScopes; +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsServiceIdentities.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsServiceIdentities.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsServiceIdentities.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsServiceIdentities.cs index d77775c4..2b29d011 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsServiceIdentities.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsServiceIdentities.cs @@ -1,27 +1,27 @@ -namespace StellaOps.Auth.Abstractions; - -/// -/// Canonical identifiers for StellaOps service principals. -/// -public static class StellaOpsServiceIdentities -{ - /// - /// Service identity used by Policy Engine when materialising effective findings. - /// - public const string PolicyEngine = "policy-engine"; - - /// - /// Service identity used by Cartographer when constructing and maintaining graph projections. - /// - public const string Cartographer = "cartographer"; - - /// - /// Service identity used by Vuln Explorer when issuing scoped permalink requests. - /// - public const string VulnExplorer = "vuln-explorer"; - - /// - /// Service identity used by Signals components when managing reachability facts. - /// - public const string Signals = "signals"; -} +namespace StellaOps.Auth.Abstractions; + +/// +/// Canonical identifiers for StellaOps service principals. +/// +public static class StellaOpsServiceIdentities +{ + /// + /// Service identity used by Policy Engine when materialising effective findings. + /// + public const string PolicyEngine = "policy-engine"; + + /// + /// Service identity used by Cartographer when constructing and maintaining graph projections. + /// + public const string Cartographer = "cartographer"; + + /// + /// Service identity used by Vuln Explorer when issuing scoped permalink requests. + /// + public const string VulnExplorer = "vuln-explorer"; + + /// + /// Service identity used by Signals components when managing reachability facts. + /// + public const string Signals = "signals"; +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsTenancyDefaults.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsTenancyDefaults.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsTenancyDefaults.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsTenancyDefaults.cs index 2c5544cb..694de6c7 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsTenancyDefaults.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOpsTenancyDefaults.cs @@ -1,12 +1,12 @@ -namespace StellaOps.Auth.Abstractions; - -/// -/// Shared tenancy default values used across StellaOps services. -/// -public static class StellaOpsTenancyDefaults -{ - /// - /// Sentinel value indicating the token is not scoped to a specific project. - /// - public const string AnyProject = "*"; -} +namespace StellaOps.Auth.Abstractions; + +/// +/// Shared tenancy default values used across StellaOps services. +/// +public static class StellaOpsTenancyDefaults +{ + /// + /// Sentinel value indicating the token is not scoped to a specific project. + /// + public const string AnyProject = "*"; +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client.Tests/ServiceCollectionExtensionsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/ServiceCollectionExtensionsTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client.Tests/ServiceCollectionExtensionsTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/ServiceCollectionExtensionsTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOps.Auth.Client.Tests.csproj b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOps.Auth.Client.Tests.csproj similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOps.Auth.Client.Tests.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOps.Auth.Client.Tests.csproj index e2e8acdc..566aa612 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOps.Auth.Client.Tests.csproj +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOps.Auth.Client.Tests.csproj @@ -1,15 +1,15 @@ - - - net10.0 - enable - enable - - - - - - - - - - + + + net10.0 + enable + enable + + + + + + + + + + diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsAuthClientOptionsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsAuthClientOptionsTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsAuthClientOptionsTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsAuthClientOptionsTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsDiscoveryCacheTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsDiscoveryCacheTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsDiscoveryCacheTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsDiscoveryCacheTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsTokenClientTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsTokenClientTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsTokenClientTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/StellaOpsTokenClientTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client.Tests/TokenCacheTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/TokenCacheTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client.Tests/TokenCacheTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client.Tests/TokenCacheTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/FileTokenCache.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/FileTokenCache.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/FileTokenCache.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/FileTokenCache.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenCache.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenCache.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenCache.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenCache.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenClient.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenClient.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenClient.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenClient.cs index 574e53d8..031be95e 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenClient.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/IStellaOpsTokenClient.cs @@ -1,42 +1,42 @@ -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.IdentityModel.Tokens; - -namespace StellaOps.Auth.Client; - -/// -/// Abstraction for requesting tokens from StellaOps Authority. -/// -public interface IStellaOpsTokenClient -{ - /// - /// Requests an access token using the resource owner password credentials flow. - /// - Task RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default); - - /// - /// Requests an access token using the client credentials flow. - /// - Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default); - - /// - /// Retrieves the cached JWKS document. - /// - Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default); - - /// - /// Retrieves a cached token entry. - /// - ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default); - - /// - /// Persists a token entry in the cache. - /// - ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default); - - /// - /// Removes a cached entry. - /// - ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default); -} +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.IdentityModel.Tokens; + +namespace StellaOps.Auth.Client; + +/// +/// Abstraction for requesting tokens from StellaOps Authority. +/// +public interface IStellaOpsTokenClient +{ + /// + /// Requests an access token using the resource owner password credentials flow. + /// + Task RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default); + + /// + /// Requests an access token using the client credentials flow. + /// + Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default); + + /// + /// Retrieves the cached JWKS document. + /// + Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default); + + /// + /// Retrieves a cached token entry. + /// + ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default); + + /// + /// Persists a token entry in the cache. + /// + ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default); + + /// + /// Removes a cached entry. + /// + ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/InMemoryTokenCache.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/InMemoryTokenCache.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/InMemoryTokenCache.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/InMemoryTokenCache.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/README.NuGet.md b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/README.NuGet.md similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/README.NuGet.md rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/README.NuGet.md diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/ServiceCollectionExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/ServiceCollectionExtensions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj similarity index 91% rename from src/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj index 9b864848..c6654381 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj @@ -1,4 +1,5 @@ - + + net10.0 preview @@ -27,7 +28,7 @@ - + @@ -43,4 +44,4 @@ <_Parameter1>StellaOps.Auth.Client.Tests - + \ No newline at end of file diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsAuthClientOptions.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsAuthClientOptions.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsAuthClientOptions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsAuthClientOptions.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsDiscoveryCache.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsDiscoveryCache.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsDiscoveryCache.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsDiscoveryCache.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsJwksCache.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsJwksCache.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsJwksCache.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsJwksCache.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenCacheEntry.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenCacheEntry.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenCacheEntry.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenCacheEntry.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenClient.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenClient.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenClient.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenClient.cs index 958ce3f5..748e770d 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenClient.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenClient.cs @@ -1,236 +1,236 @@ -using System; -using System.Collections.Generic; -using System.Globalization; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using Microsoft.IdentityModel.Tokens; - -namespace StellaOps.Auth.Client; - -/// -/// Default implementation of . -/// -public sealed class StellaOpsTokenClient : IStellaOpsTokenClient -{ - private static readonly MediaTypeHeaderValue JsonMediaType = new("application/json"); - - private readonly HttpClient httpClient; - private readonly StellaOpsDiscoveryCache discoveryCache; - private readonly StellaOpsJwksCache jwksCache; - private readonly IOptionsMonitor optionsMonitor; - private readonly IStellaOpsTokenCache tokenCache; - private readonly TimeProvider timeProvider; - private readonly ILogger? logger; - private readonly JsonSerializerOptions serializerOptions = new(JsonSerializerDefaults.Web); - - public StellaOpsTokenClient( - HttpClient httpClient, - StellaOpsDiscoveryCache discoveryCache, - StellaOpsJwksCache jwksCache, - IOptionsMonitor optionsMonitor, - IStellaOpsTokenCache tokenCache, - TimeProvider? timeProvider = null, - ILogger? logger = null) - { - this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - this.discoveryCache = discoveryCache ?? throw new ArgumentNullException(nameof(discoveryCache)); - this.jwksCache = jwksCache ?? throw new ArgumentNullException(nameof(jwksCache)); - this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - this.tokenCache = tokenCache ?? throw new ArgumentNullException(nameof(tokenCache)); - this.timeProvider = timeProvider ?? TimeProvider.System; - this.logger = logger; - } - - public Task RequestPasswordTokenAsync( - string username, - string password, - string? scope = null, - IReadOnlyDictionary? additionalParameters = null, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(username); - ArgumentException.ThrowIfNullOrWhiteSpace(password); - - var options = optionsMonitor.CurrentValue; - - var parameters = new Dictionary(StringComparer.Ordinal) - { - ["grant_type"] = "password", - ["username"] = username, - ["password"] = password, - ["client_id"] = options.ClientId - }; - - if (!string.IsNullOrEmpty(options.ClientSecret)) - { - parameters["client_secret"] = options.ClientSecret; - } - - AppendScope(parameters, scope, options); - - if (additionalParameters is not null) - { - foreach (var (key, value) in additionalParameters) - { - if (string.IsNullOrWhiteSpace(key) || value is null) - { - continue; - } - - parameters[key] = value; - } - } - - return RequestTokenAsync(parameters, cancellationToken); - } - - public Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) - { - var options = optionsMonitor.CurrentValue; - if (string.IsNullOrWhiteSpace(options.ClientId)) - { - throw new InvalidOperationException("Client credentials flow requires ClientId to be configured."); - } - - var parameters = new Dictionary(StringComparer.Ordinal) - { - ["grant_type"] = "client_credentials", - ["client_id"] = options.ClientId - }; - - if (!string.IsNullOrEmpty(options.ClientSecret)) - { - parameters["client_secret"] = options.ClientSecret; - } - - AppendScope(parameters, scope, options); - - if (additionalParameters is not null) - { - foreach (var (key, value) in additionalParameters) - { - if (string.IsNullOrWhiteSpace(key) || value is null) - { - continue; - } - - parameters[key] = value; - } - } - - return RequestTokenAsync(parameters, cancellationToken); - } - - public Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) - => jwksCache.GetAsync(cancellationToken); - - public ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => tokenCache.GetAsync(key, cancellationToken); - - public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) - => tokenCache.SetAsync(key, entry, cancellationToken); - - public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => tokenCache.RemoveAsync(key, cancellationToken); - - private async Task RequestTokenAsync(Dictionary parameters, CancellationToken cancellationToken) - { - var options = optionsMonitor.CurrentValue; - var configuration = await discoveryCache.GetAsync(cancellationToken).ConfigureAwait(false); - - using var request = new HttpRequestMessage(HttpMethod.Post, configuration.TokenEndpoint) - { - Content = new FormUrlEncodedContent(parameters) - }; - request.Headers.Accept.TryParseAdd(JsonMediaType.ToString()); - - using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - logger?.LogWarning("Token request failed with status {StatusCode}: {Payload}", response.StatusCode, payload); - throw new InvalidOperationException($"Token request failed with status {(int)response.StatusCode}."); - } - - var document = JsonSerializer.Deserialize(payload, serializerOptions); - if (document is null || string.IsNullOrWhiteSpace(document.AccessToken)) - { - throw new InvalidOperationException("Token response did not contain an access_token."); - } - - var expiresIn = document.ExpiresIn ?? 3600; - var expiresAt = timeProvider.GetUtcNow() + TimeSpan.FromSeconds(expiresIn); - var normalizedScopes = ParseScopes(document.Scope ?? parameters.GetValueOrDefault("scope")); - - var result = new StellaOpsTokenResult( - document.AccessToken, - document.TokenType ?? "Bearer", - expiresAt, - normalizedScopes, - document.RefreshToken, - document.IdToken, - payload); - - logger?.LogDebug("Token issued; expires at {ExpiresAt}.", expiresAt); - - return result; - } - - private static void AppendScope(IDictionary parameters, string? scope, StellaOpsAuthClientOptions options) - { - var resolvedScope = scope; - if (string.IsNullOrWhiteSpace(resolvedScope) && options.NormalizedScopes.Count > 0) - { - resolvedScope = string.Join(' ', options.NormalizedScopes); - } - - if (!string.IsNullOrWhiteSpace(resolvedScope)) - { - parameters["scope"] = resolvedScope; - } - } - - private static string[] ParseScopes(string? scope) - { - if (string.IsNullOrWhiteSpace(scope)) - { - return Array.Empty(); - } - - var parts = scope.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - if (parts.Length == 0) - { - return Array.Empty(); - } - - var unique = new HashSet(parts.Length, StringComparer.Ordinal); - foreach (var part in parts) - { - unique.Add(part); - } - - var result = new string[unique.Count]; - unique.CopyTo(result); - Array.Sort(result, StringComparer.Ordinal); - return result; - } - - private sealed record TokenResponseDocument( - [property: JsonPropertyName("access_token")] string? AccessToken, - [property: JsonPropertyName("refresh_token")] string? RefreshToken, - [property: JsonPropertyName("id_token")] string? IdToken, - [property: JsonPropertyName("token_type")] string? TokenType, - [property: JsonPropertyName("expires_in")] int? ExpiresIn, - [property: JsonPropertyName("scope")] string? Scope, - [property: JsonPropertyName("error")] string? Error, - [property: JsonPropertyName("error_description")] string? ErrorDescription); -} +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; + +namespace StellaOps.Auth.Client; + +/// +/// Default implementation of . +/// +public sealed class StellaOpsTokenClient : IStellaOpsTokenClient +{ + private static readonly MediaTypeHeaderValue JsonMediaType = new("application/json"); + + private readonly HttpClient httpClient; + private readonly StellaOpsDiscoveryCache discoveryCache; + private readonly StellaOpsJwksCache jwksCache; + private readonly IOptionsMonitor optionsMonitor; + private readonly IStellaOpsTokenCache tokenCache; + private readonly TimeProvider timeProvider; + private readonly ILogger? logger; + private readonly JsonSerializerOptions serializerOptions = new(JsonSerializerDefaults.Web); + + public StellaOpsTokenClient( + HttpClient httpClient, + StellaOpsDiscoveryCache discoveryCache, + StellaOpsJwksCache jwksCache, + IOptionsMonitor optionsMonitor, + IStellaOpsTokenCache tokenCache, + TimeProvider? timeProvider = null, + ILogger? logger = null) + { + this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + this.discoveryCache = discoveryCache ?? throw new ArgumentNullException(nameof(discoveryCache)); + this.jwksCache = jwksCache ?? throw new ArgumentNullException(nameof(jwksCache)); + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + this.tokenCache = tokenCache ?? throw new ArgumentNullException(nameof(tokenCache)); + this.timeProvider = timeProvider ?? TimeProvider.System; + this.logger = logger; + } + + public Task RequestPasswordTokenAsync( + string username, + string password, + string? scope = null, + IReadOnlyDictionary? additionalParameters = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(username); + ArgumentException.ThrowIfNullOrWhiteSpace(password); + + var options = optionsMonitor.CurrentValue; + + var parameters = new Dictionary(StringComparer.Ordinal) + { + ["grant_type"] = "password", + ["username"] = username, + ["password"] = password, + ["client_id"] = options.ClientId + }; + + if (!string.IsNullOrEmpty(options.ClientSecret)) + { + parameters["client_secret"] = options.ClientSecret; + } + + AppendScope(parameters, scope, options); + + if (additionalParameters is not null) + { + foreach (var (key, value) in additionalParameters) + { + if (string.IsNullOrWhiteSpace(key) || value is null) + { + continue; + } + + parameters[key] = value; + } + } + + return RequestTokenAsync(parameters, cancellationToken); + } + + public Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) + { + var options = optionsMonitor.CurrentValue; + if (string.IsNullOrWhiteSpace(options.ClientId)) + { + throw new InvalidOperationException("Client credentials flow requires ClientId to be configured."); + } + + var parameters = new Dictionary(StringComparer.Ordinal) + { + ["grant_type"] = "client_credentials", + ["client_id"] = options.ClientId + }; + + if (!string.IsNullOrEmpty(options.ClientSecret)) + { + parameters["client_secret"] = options.ClientSecret; + } + + AppendScope(parameters, scope, options); + + if (additionalParameters is not null) + { + foreach (var (key, value) in additionalParameters) + { + if (string.IsNullOrWhiteSpace(key) || value is null) + { + continue; + } + + parameters[key] = value; + } + } + + return RequestTokenAsync(parameters, cancellationToken); + } + + public Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) + => jwksCache.GetAsync(cancellationToken); + + public ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => tokenCache.GetAsync(key, cancellationToken); + + public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) + => tokenCache.SetAsync(key, entry, cancellationToken); + + public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => tokenCache.RemoveAsync(key, cancellationToken); + + private async Task RequestTokenAsync(Dictionary parameters, CancellationToken cancellationToken) + { + var options = optionsMonitor.CurrentValue; + var configuration = await discoveryCache.GetAsync(cancellationToken).ConfigureAwait(false); + + using var request = new HttpRequestMessage(HttpMethod.Post, configuration.TokenEndpoint) + { + Content = new FormUrlEncodedContent(parameters) + }; + request.Headers.Accept.TryParseAdd(JsonMediaType.ToString()); + + using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + logger?.LogWarning("Token request failed with status {StatusCode}: {Payload}", response.StatusCode, payload); + throw new InvalidOperationException($"Token request failed with status {(int)response.StatusCode}."); + } + + var document = JsonSerializer.Deserialize(payload, serializerOptions); + if (document is null || string.IsNullOrWhiteSpace(document.AccessToken)) + { + throw new InvalidOperationException("Token response did not contain an access_token."); + } + + var expiresIn = document.ExpiresIn ?? 3600; + var expiresAt = timeProvider.GetUtcNow() + TimeSpan.FromSeconds(expiresIn); + var normalizedScopes = ParseScopes(document.Scope ?? parameters.GetValueOrDefault("scope")); + + var result = new StellaOpsTokenResult( + document.AccessToken, + document.TokenType ?? "Bearer", + expiresAt, + normalizedScopes, + document.RefreshToken, + document.IdToken, + payload); + + logger?.LogDebug("Token issued; expires at {ExpiresAt}.", expiresAt); + + return result; + } + + private static void AppendScope(IDictionary parameters, string? scope, StellaOpsAuthClientOptions options) + { + var resolvedScope = scope; + if (string.IsNullOrWhiteSpace(resolvedScope) && options.NormalizedScopes.Count > 0) + { + resolvedScope = string.Join(' ', options.NormalizedScopes); + } + + if (!string.IsNullOrWhiteSpace(resolvedScope)) + { + parameters["scope"] = resolvedScope; + } + } + + private static string[] ParseScopes(string? scope) + { + if (string.IsNullOrWhiteSpace(scope)) + { + return Array.Empty(); + } + + var parts = scope.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (parts.Length == 0) + { + return Array.Empty(); + } + + var unique = new HashSet(parts.Length, StringComparer.Ordinal); + foreach (var part in parts) + { + unique.Add(part); + } + + var result = new string[unique.Count]; + unique.CopyTo(result); + Array.Sort(result, StringComparer.Ordinal); + return result; + } + + private sealed record TokenResponseDocument( + [property: JsonPropertyName("access_token")] string? AccessToken, + [property: JsonPropertyName("refresh_token")] string? RefreshToken, + [property: JsonPropertyName("id_token")] string? IdToken, + [property: JsonPropertyName("token_type")] string? TokenType, + [property: JsonPropertyName("expires_in")] int? ExpiresIn, + [property: JsonPropertyName("scope")] string? Scope, + [property: JsonPropertyName("error")] string? Error, + [property: JsonPropertyName("error_description")] string? ErrorDescription); +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenResult.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenResult.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenResult.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOpsTokenResult.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/ServiceCollectionExtensionsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/ServiceCollectionExtensionsTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/ServiceCollectionExtensionsTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/ServiceCollectionExtensionsTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOps.Auth.ServerIntegration.Tests.csproj b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOps.Auth.ServerIntegration.Tests.csproj similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOps.Auth.ServerIntegration.Tests.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOps.Auth.ServerIntegration.Tests.csproj diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerOptionsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerOptionsTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerOptionsTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerOptionsTests.cs index 09c4c8a1..eac5f988 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerOptionsTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsResourceServerOptionsTests.cs @@ -1,55 +1,55 @@ -using System; -using System.Net; -using StellaOps.Auth.ServerIntegration; -using Xunit; - -namespace StellaOps.Auth.ServerIntegration.Tests; - -public class StellaOpsResourceServerOptionsTests -{ - [Fact] - public void Validate_NormalisesCollections() - { - var options = new StellaOpsResourceServerOptions - { - Authority = "https://authority.stella-ops.test", - BackchannelTimeout = TimeSpan.FromSeconds(10), - TokenClockSkew = TimeSpan.FromSeconds(30) - }; - - options.Audiences.Add(" api://concelier "); - options.Audiences.Add("api://concelier"); - options.Audiences.Add("api://concelier-admin"); - - options.RequiredScopes.Add(" Concelier.Jobs.Trigger "); - options.RequiredScopes.Add("concelier.jobs.trigger"); - options.RequiredScopes.Add("AUTHORITY.USERS.MANAGE"); - - options.RequiredTenants.Add(" Tenant-Alpha "); - options.RequiredTenants.Add("tenant-alpha"); - options.RequiredTenants.Add("Tenant-Beta"); - - options.BypassNetworks.Add("127.0.0.1/32"); - options.BypassNetworks.Add(" 127.0.0.1/32 "); - options.BypassNetworks.Add("::1/128"); - - options.Validate(); - - Assert.Equal(new Uri("https://authority.stella-ops.test"), options.AuthorityUri); - Assert.Equal(new[] { "api://concelier", "api://concelier-admin" }, options.Audiences); - Assert.Equal(new[] { "authority.users.manage", "concelier.jobs.trigger" }, options.NormalizedScopes); - Assert.Equal(new[] { "tenant-alpha", "tenant-beta" }, options.NormalizedTenants); - Assert.True(options.BypassMatcher.IsAllowed(IPAddress.Parse("127.0.0.1"))); - Assert.True(options.BypassMatcher.IsAllowed(IPAddress.IPv6Loopback)); - } - - [Fact] - public void Validate_Throws_When_AuthorityMissing() - { - var options = new StellaOpsResourceServerOptions(); - - var exception = Assert.Throws(() => options.Validate()); - - Assert.Contains("Authority", exception.Message, StringComparison.OrdinalIgnoreCase); - } -} +using System; +using System.Net; +using StellaOps.Auth.ServerIntegration; +using Xunit; + +namespace StellaOps.Auth.ServerIntegration.Tests; + +public class StellaOpsResourceServerOptionsTests +{ + [Fact] + public void Validate_NormalisesCollections() + { + var options = new StellaOpsResourceServerOptions + { + Authority = "https://authority.stella-ops.test", + BackchannelTimeout = TimeSpan.FromSeconds(10), + TokenClockSkew = TimeSpan.FromSeconds(30) + }; + + options.Audiences.Add(" api://concelier "); + options.Audiences.Add("api://concelier"); + options.Audiences.Add("api://concelier-admin"); + + options.RequiredScopes.Add(" Concelier.Jobs.Trigger "); + options.RequiredScopes.Add("concelier.jobs.trigger"); + options.RequiredScopes.Add("AUTHORITY.USERS.MANAGE"); + + options.RequiredTenants.Add(" Tenant-Alpha "); + options.RequiredTenants.Add("tenant-alpha"); + options.RequiredTenants.Add("Tenant-Beta"); + + options.BypassNetworks.Add("127.0.0.1/32"); + options.BypassNetworks.Add(" 127.0.0.1/32 "); + options.BypassNetworks.Add("::1/128"); + + options.Validate(); + + Assert.Equal(new Uri("https://authority.stella-ops.test"), options.AuthorityUri); + Assert.Equal(new[] { "api://concelier", "api://concelier-admin" }, options.Audiences); + Assert.Equal(new[] { "authority.users.manage", "concelier.jobs.trigger" }, options.NormalizedScopes); + Assert.Equal(new[] { "tenant-alpha", "tenant-beta" }, options.NormalizedTenants); + Assert.True(options.BypassMatcher.IsAllowed(IPAddress.Parse("127.0.0.1"))); + Assert.True(options.BypassMatcher.IsAllowed(IPAddress.IPv6Loopback)); + } + + [Fact] + public void Validate_Throws_When_AuthorityMissing() + { + var options = new StellaOpsResourceServerOptions(); + + var exception = Assert.Throws(() => options.Validate()); + + Assert.Contains("Authority", exception.Message, StringComparison.OrdinalIgnoreCase); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs index 76339c8e..bcefb986 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration.Tests/StellaOpsScopeAuthorizationHandlerTests.cs @@ -1,199 +1,199 @@ -using System; -using System.Net; -using System.Security.Claims; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Authorization; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.ServerIntegration; -using Xunit; - -namespace StellaOps.Auth.ServerIntegration.Tests; - -public class StellaOpsScopeAuthorizationHandlerTests -{ - [Fact] - public async Task HandleRequirement_Succeeds_WhenScopePresent() - { - var optionsMonitor = CreateOptionsMonitor(options => - { - options.Authority = "https://authority.example"; - options.RequiredTenants.Add("tenant-alpha"); - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("10.0.0.1")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new StellaOpsPrincipalBuilder() - .WithSubject("user-1") - .WithTenant("tenant-alpha") - .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) - .Build(); - - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.True(context.HasSucceeded); - } - - [Fact] - [Fact] - public async Task HandleRequirement_Fails_WhenTenantMismatch() - { - var optionsMonitor = CreateOptionsMonitor(options => - { - options.Authority = "https://authority.example"; - options.RequiredTenants.Add("tenant-alpha"); - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("10.0.0.1")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new StellaOpsPrincipalBuilder() - .WithSubject("user-1") - .WithTenant("tenant-beta") - .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) - .Build(); - - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.False(context.HasSucceeded); - } - - public async Task HandleRequirement_Succeeds_WhenBypassNetworkMatches() - { - var optionsMonitor = CreateOptionsMonitor(options => - { - options.Authority = "https://authority.example"; - options.BypassNetworks.Add("127.0.0.1/32"); - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("127.0.0.1")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new ClaimsPrincipal(new ClaimsIdentity()); - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.True(context.HasSucceeded); - } - - [Fact] - public async Task HandleRequirement_Fails_WhenScopeMissingAndNoBypass() - { - var optionsMonitor = CreateOptionsMonitor(options => - { - options.Authority = "https://authority.example"; - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("203.0.113.10")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new ClaimsPrincipal(new ClaimsIdentity()); - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.False(context.HasSucceeded); - } - - [Fact] - public async Task HandleRequirement_Fails_WhenDefaultScopeMissing() - { - var optionsMonitor = CreateOptionsMonitor(options => - { - options.Authority = "https://authority.example"; - options.RequiredScopes.Add(StellaOpsScopes.PolicyRun); - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("198.51.100.5")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new StellaOpsPrincipalBuilder() - .WithSubject("user-tenant") - .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) - .Build(); - - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.False(context.HasSucceeded); - } - - [Fact] - public async Task HandleRequirement_Succeeds_WhenDefaultScopePresent() - { - var optionsMonitor = CreateOptionsMonitor(options => - { - options.Authority = "https://authority.example"; - options.RequiredScopes.Add(StellaOpsScopes.PolicyRun); - options.Validate(); - }); - - var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("198.51.100.5")); - var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); - var principal = new StellaOpsPrincipalBuilder() - .WithSubject("user-tenant") - .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger, StellaOpsScopes.PolicyRun }) - .Build(); - - var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); - - await handler.HandleAsync(context); - - Assert.True(context.HasSucceeded); - } - - private static (StellaOpsScopeAuthorizationHandler Handler, IHttpContextAccessor Accessor) CreateHandler(IOptionsMonitor optionsMonitor, IPAddress remoteAddress) - { - var accessor = new HttpContextAccessor(); - var httpContext = new DefaultHttpContext(); - httpContext.Connection.RemoteIpAddress = remoteAddress; - accessor.HttpContext = httpContext; - - var bypassEvaluator = new StellaOpsBypassEvaluator(optionsMonitor, NullLogger.Instance); - - var handler = new StellaOpsScopeAuthorizationHandler( - accessor, - bypassEvaluator, - optionsMonitor, - NullLogger.Instance); - return (handler, accessor); - } - - private static IOptionsMonitor CreateOptionsMonitor(Action configure) - => new TestOptionsMonitor(configure); - - private sealed class TestOptionsMonitor : IOptionsMonitor - where TOptions : class, new() - { - private readonly TOptions value; - - public TestOptionsMonitor(Action configure) - { - value = new TOptions(); - configure(value); - } - - public TOptions CurrentValue => value; - - public TOptions Get(string? name) => value; - - public IDisposable OnChange(Action listener) => NullDisposable.Instance; - - private sealed class NullDisposable : IDisposable - { - public static NullDisposable Instance { get; } = new(); - public void Dispose() - { - } - } - } -} +using System; +using System.Net; +using System.Security.Claims; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using Xunit; + +namespace StellaOps.Auth.ServerIntegration.Tests; + +public class StellaOpsScopeAuthorizationHandlerTests +{ + [Fact] + public async Task HandleRequirement_Succeeds_WhenScopePresent() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.RequiredTenants.Add("tenant-alpha"); + options.Validate(); + }); + + var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("10.0.0.1")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-1") + .WithTenant("tenant-alpha") + .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.True(context.HasSucceeded); + } + + [Fact] + [Fact] + public async Task HandleRequirement_Fails_WhenTenantMismatch() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.RequiredTenants.Add("tenant-alpha"); + options.Validate(); + }); + + var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("10.0.0.1")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-1") + .WithTenant("tenant-beta") + .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.False(context.HasSucceeded); + } + + public async Task HandleRequirement_Succeeds_WhenBypassNetworkMatches() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.BypassNetworks.Add("127.0.0.1/32"); + options.Validate(); + }); + + var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("127.0.0.1")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new ClaimsPrincipal(new ClaimsIdentity()); + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.True(context.HasSucceeded); + } + + [Fact] + public async Task HandleRequirement_Fails_WhenScopeMissingAndNoBypass() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.Validate(); + }); + + var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("203.0.113.10")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new ClaimsPrincipal(new ClaimsIdentity()); + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.False(context.HasSucceeded); + } + + [Fact] + public async Task HandleRequirement_Fails_WhenDefaultScopeMissing() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.RequiredScopes.Add(StellaOpsScopes.PolicyRun); + options.Validate(); + }); + + var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("198.51.100.5")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-tenant") + .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger }) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.False(context.HasSucceeded); + } + + [Fact] + public async Task HandleRequirement_Succeeds_WhenDefaultScopePresent() + { + var optionsMonitor = CreateOptionsMonitor(options => + { + options.Authority = "https://authority.example"; + options.RequiredScopes.Add(StellaOpsScopes.PolicyRun); + options.Validate(); + }); + + var (handler, accessor) = CreateHandler(optionsMonitor, remoteAddress: IPAddress.Parse("198.51.100.5")); + var requirement = new StellaOpsScopeRequirement(new[] { StellaOpsScopes.ConcelierJobsTrigger }); + var principal = new StellaOpsPrincipalBuilder() + .WithSubject("user-tenant") + .WithScopes(new[] { StellaOpsScopes.ConcelierJobsTrigger, StellaOpsScopes.PolicyRun }) + .Build(); + + var context = new AuthorizationHandlerContext(new[] { requirement }, principal, accessor.HttpContext); + + await handler.HandleAsync(context); + + Assert.True(context.HasSucceeded); + } + + private static (StellaOpsScopeAuthorizationHandler Handler, IHttpContextAccessor Accessor) CreateHandler(IOptionsMonitor optionsMonitor, IPAddress remoteAddress) + { + var accessor = new HttpContextAccessor(); + var httpContext = new DefaultHttpContext(); + httpContext.Connection.RemoteIpAddress = remoteAddress; + accessor.HttpContext = httpContext; + + var bypassEvaluator = new StellaOpsBypassEvaluator(optionsMonitor, NullLogger.Instance); + + var handler = new StellaOpsScopeAuthorizationHandler( + accessor, + bypassEvaluator, + optionsMonitor, + NullLogger.Instance); + return (handler, accessor); + } + + private static IOptionsMonitor CreateOptionsMonitor(Action configure) + => new TestOptionsMonitor(configure); + + private sealed class TestOptionsMonitor : IOptionsMonitor + where TOptions : class, new() + { + private readonly TOptions value; + + public TestOptionsMonitor(Action configure) + { + value = new TOptions(); + configure(value); + } + + public TOptions CurrentValue => value; + + public TOptions Get(string? name) => value; + + public IDisposable OnChange(Action listener) => NullDisposable.Instance; + + private sealed class NullDisposable : IDisposable + { + public static NullDisposable Instance { get; } = new(); + public void Dispose() + { + } + } + } +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/README.NuGet.md b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/README.NuGet.md similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/README.NuGet.md rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/README.NuGet.md diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/ServiceCollectionExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/ServiceCollectionExtensions.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/ServiceCollectionExtensions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/ServiceCollectionExtensions.cs index 01b3f717..b850d72f 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/ServiceCollectionExtensions.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/ServiceCollectionExtensions.cs @@ -1,92 +1,92 @@ -using System; -using System.Security.Claims; -using Microsoft.AspNetCore.Authentication.JwtBearer; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Options; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Auth.ServerIntegration; - -/// -/// Dependency injection helpers for configuring StellaOps resource server authentication. -/// -public static class ServiceCollectionExtensions -{ - /// - /// Registers JWT bearer authentication and related authorisation helpers using the provided configuration section. - /// - /// The service collection. - /// Application configuration. - /// - /// Optional configuration section path. Defaults to Authority:ResourceServer. Provide null to skip binding. - /// - /// Optional callback allowing additional mutation of . - public static IServiceCollection AddStellaOpsResourceServerAuthentication( - this IServiceCollection services, - IConfiguration configuration, - string? configurationSection = "Authority:ResourceServer", - Action? configure = null) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - - services.AddHttpContextAccessor(); - services.AddAuthorization(); - services.AddStellaOpsScopeHandler(); - services.TryAddSingleton(); - services.TryAddSingleton(_ => TimeProvider.System); - services.AddHttpClient(StellaOpsAuthorityConfigurationManager.HttpClientName); - services.AddSingleton(); - - var optionsBuilder = services.AddOptions(); - if (!string.IsNullOrWhiteSpace(configurationSection)) - { - optionsBuilder.Bind(configuration.GetSection(configurationSection)); - } - - if (configure is not null) - { - optionsBuilder.Configure(configure); - } - - optionsBuilder.PostConfigure(static options => options.Validate()); - - var authenticationBuilder = services.AddAuthentication(options => - { - options.DefaultAuthenticateScheme ??= StellaOpsAuthenticationDefaults.AuthenticationScheme; - options.DefaultChallengeScheme ??= StellaOpsAuthenticationDefaults.AuthenticationScheme; - }); - - authenticationBuilder.AddJwtBearer(StellaOpsAuthenticationDefaults.AuthenticationScheme); - - services.AddOptions(StellaOpsAuthenticationDefaults.AuthenticationScheme) - .Configure>((jwt, provider, monitor) => - { - var resourceOptions = monitor.CurrentValue; - - jwt.Authority = resourceOptions.AuthorityUri.ToString(); - if (!string.IsNullOrWhiteSpace(resourceOptions.MetadataAddress)) - { - jwt.MetadataAddress = resourceOptions.MetadataAddress; - } - jwt.RequireHttpsMetadata = resourceOptions.RequireHttpsMetadata; - jwt.BackchannelTimeout = resourceOptions.BackchannelTimeout; - jwt.MapInboundClaims = false; - jwt.SaveToken = false; - - jwt.TokenValidationParameters ??= new TokenValidationParameters(); - jwt.TokenValidationParameters.ValidIssuer = resourceOptions.AuthorityUri.ToString(); - jwt.TokenValidationParameters.ValidateAudience = resourceOptions.Audiences.Count > 0; - jwt.TokenValidationParameters.ValidAudiences = resourceOptions.Audiences; - jwt.TokenValidationParameters.ClockSkew = resourceOptions.TokenClockSkew; - jwt.TokenValidationParameters.NameClaimType = ClaimTypes.Name; - jwt.TokenValidationParameters.RoleClaimType = ClaimTypes.Role; - jwt.ConfigurationManager = provider.GetRequiredService(); - }); - - return services; - } -} +using System; +using System.Security.Claims; +using Microsoft.AspNetCore.Authentication.JwtBearer; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Auth.ServerIntegration; + +/// +/// Dependency injection helpers for configuring StellaOps resource server authentication. +/// +public static class ServiceCollectionExtensions +{ + /// + /// Registers JWT bearer authentication and related authorisation helpers using the provided configuration section. + /// + /// The service collection. + /// Application configuration. + /// + /// Optional configuration section path. Defaults to Authority:ResourceServer. Provide null to skip binding. + /// + /// Optional callback allowing additional mutation of . + public static IServiceCollection AddStellaOpsResourceServerAuthentication( + this IServiceCollection services, + IConfiguration configuration, + string? configurationSection = "Authority:ResourceServer", + Action? configure = null) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services.AddHttpContextAccessor(); + services.AddAuthorization(); + services.AddStellaOpsScopeHandler(); + services.TryAddSingleton(); + services.TryAddSingleton(_ => TimeProvider.System); + services.AddHttpClient(StellaOpsAuthorityConfigurationManager.HttpClientName); + services.AddSingleton(); + + var optionsBuilder = services.AddOptions(); + if (!string.IsNullOrWhiteSpace(configurationSection)) + { + optionsBuilder.Bind(configuration.GetSection(configurationSection)); + } + + if (configure is not null) + { + optionsBuilder.Configure(configure); + } + + optionsBuilder.PostConfigure(static options => options.Validate()); + + var authenticationBuilder = services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme ??= StellaOpsAuthenticationDefaults.AuthenticationScheme; + options.DefaultChallengeScheme ??= StellaOpsAuthenticationDefaults.AuthenticationScheme; + }); + + authenticationBuilder.AddJwtBearer(StellaOpsAuthenticationDefaults.AuthenticationScheme); + + services.AddOptions(StellaOpsAuthenticationDefaults.AuthenticationScheme) + .Configure>((jwt, provider, monitor) => + { + var resourceOptions = monitor.CurrentValue; + + jwt.Authority = resourceOptions.AuthorityUri.ToString(); + if (!string.IsNullOrWhiteSpace(resourceOptions.MetadataAddress)) + { + jwt.MetadataAddress = resourceOptions.MetadataAddress; + } + jwt.RequireHttpsMetadata = resourceOptions.RequireHttpsMetadata; + jwt.BackchannelTimeout = resourceOptions.BackchannelTimeout; + jwt.MapInboundClaims = false; + jwt.SaveToken = false; + + jwt.TokenValidationParameters ??= new TokenValidationParameters(); + jwt.TokenValidationParameters.ValidIssuer = resourceOptions.AuthorityUri.ToString(); + jwt.TokenValidationParameters.ValidateAudience = resourceOptions.Audiences.Count > 0; + jwt.TokenValidationParameters.ValidAudiences = resourceOptions.Audiences; + jwt.TokenValidationParameters.ClockSkew = resourceOptions.TokenClockSkew; + jwt.TokenValidationParameters.NameClaimType = ClaimTypes.Name; + jwt.TokenValidationParameters.RoleClaimType = ClaimTypes.Role; + jwt.ConfigurationManager = provider.GetRequiredService(); + }); + + return services; + } +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj similarity index 86% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj index 33cf5438..a700ef21 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj @@ -1,4 +1,5 @@ - + + net10.0 preview @@ -27,8 +28,8 @@ - - + + @@ -43,4 +44,4 @@ <_Parameter1>StellaOps.Auth.ServerIntegration.Tests - + \ No newline at end of file diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorityConfigurationManager.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorityConfigurationManager.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorityConfigurationManager.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorityConfigurationManager.cs index dd0f65ba..d39c2942 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorityConfigurationManager.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorityConfigurationManager.cs @@ -1,116 +1,116 @@ -using System; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using Microsoft.IdentityModel.Protocols; -using Microsoft.IdentityModel.Protocols.OpenIdConnect; -using Microsoft.IdentityModel.Tokens; - -namespace StellaOps.Auth.ServerIntegration; - -/// -/// Cached configuration manager for StellaOps Authority metadata and JWKS. -/// -internal sealed class StellaOpsAuthorityConfigurationManager : IConfigurationManager -{ - internal const string HttpClientName = "StellaOps.Auth.ServerIntegration.Metadata"; - - private readonly IHttpClientFactory httpClientFactory; - private readonly IOptionsMonitor optionsMonitor; - private readonly TimeProvider timeProvider; - private readonly ILogger logger; - private readonly SemaphoreSlim refreshLock = new(1, 1); - - private OpenIdConnectConfiguration? cachedConfiguration; - private DateTimeOffset cacheExpiresAt; - - public StellaOpsAuthorityConfigurationManager( - IHttpClientFactory httpClientFactory, - IOptionsMonitor optionsMonitor, - TimeProvider timeProvider, - ILogger logger) - { - this.httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); - this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task GetConfigurationAsync(CancellationToken cancellationToken) - { - var now = timeProvider.GetUtcNow(); - var current = Volatile.Read(ref cachedConfiguration); - if (current is not null && now < cacheExpiresAt) - { - return current; - } - - await refreshLock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (cachedConfiguration is not null && now < cacheExpiresAt) - { - return cachedConfiguration; - } - - var options = optionsMonitor.CurrentValue; - var metadataAddress = ResolveMetadataAddress(options); - var httpClient = httpClientFactory.CreateClient(HttpClientName); - httpClient.Timeout = options.BackchannelTimeout; - - var retriever = new HttpDocumentRetriever(httpClient) - { - RequireHttps = options.RequireHttpsMetadata - }; - - logger.LogDebug("Fetching OpenID Connect configuration from {MetadataAddress}.", metadataAddress); - - var configuration = await OpenIdConnectConfigurationRetriever.GetAsync(metadataAddress, retriever, cancellationToken).ConfigureAwait(false); - configuration.Issuer ??= options.AuthorityUri.ToString(); - - if (!string.IsNullOrWhiteSpace(configuration.JwksUri)) - { - logger.LogDebug("Fetching JWKS from {JwksUri}.", configuration.JwksUri); - var jwksDocument = await retriever.GetDocumentAsync(configuration.JwksUri, cancellationToken).ConfigureAwait(false); - var jsonWebKeySet = new JsonWebKeySet(jwksDocument); - configuration.SigningKeys.Clear(); - foreach (JsonWebKey key in jsonWebKeySet.Keys) - { - configuration.SigningKeys.Add(key); - } - } - - cachedConfiguration = configuration; - cacheExpiresAt = now + options.MetadataCacheLifetime; - return configuration; - } - finally - { - refreshLock.Release(); - } - } - - public void RequestRefresh() - { - Volatile.Write(ref cachedConfiguration, null); - cacheExpiresAt = DateTimeOffset.MinValue; - } - - private static string ResolveMetadataAddress(StellaOpsResourceServerOptions options) - { - if (!string.IsNullOrWhiteSpace(options.MetadataAddress)) - { - return options.MetadataAddress; - } - - var authority = options.AuthorityUri; - if (!authority.AbsoluteUri.EndsWith("/", StringComparison.Ordinal)) - { - authority = new Uri(authority.AbsoluteUri + "/", UriKind.Absolute); - } - - return new Uri(authority, ".well-known/openid-configuration").AbsoluteUri; - } -} +using System; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Protocols; +using Microsoft.IdentityModel.Protocols.OpenIdConnect; +using Microsoft.IdentityModel.Tokens; + +namespace StellaOps.Auth.ServerIntegration; + +/// +/// Cached configuration manager for StellaOps Authority metadata and JWKS. +/// +internal sealed class StellaOpsAuthorityConfigurationManager : IConfigurationManager +{ + internal const string HttpClientName = "StellaOps.Auth.ServerIntegration.Metadata"; + + private readonly IHttpClientFactory httpClientFactory; + private readonly IOptionsMonitor optionsMonitor; + private readonly TimeProvider timeProvider; + private readonly ILogger logger; + private readonly SemaphoreSlim refreshLock = new(1, 1); + + private OpenIdConnectConfiguration? cachedConfiguration; + private DateTimeOffset cacheExpiresAt; + + public StellaOpsAuthorityConfigurationManager( + IHttpClientFactory httpClientFactory, + IOptionsMonitor optionsMonitor, + TimeProvider timeProvider, + ILogger logger) + { + this.httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task GetConfigurationAsync(CancellationToken cancellationToken) + { + var now = timeProvider.GetUtcNow(); + var current = Volatile.Read(ref cachedConfiguration); + if (current is not null && now < cacheExpiresAt) + { + return current; + } + + await refreshLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (cachedConfiguration is not null && now < cacheExpiresAt) + { + return cachedConfiguration; + } + + var options = optionsMonitor.CurrentValue; + var metadataAddress = ResolveMetadataAddress(options); + var httpClient = httpClientFactory.CreateClient(HttpClientName); + httpClient.Timeout = options.BackchannelTimeout; + + var retriever = new HttpDocumentRetriever(httpClient) + { + RequireHttps = options.RequireHttpsMetadata + }; + + logger.LogDebug("Fetching OpenID Connect configuration from {MetadataAddress}.", metadataAddress); + + var configuration = await OpenIdConnectConfigurationRetriever.GetAsync(metadataAddress, retriever, cancellationToken).ConfigureAwait(false); + configuration.Issuer ??= options.AuthorityUri.ToString(); + + if (!string.IsNullOrWhiteSpace(configuration.JwksUri)) + { + logger.LogDebug("Fetching JWKS from {JwksUri}.", configuration.JwksUri); + var jwksDocument = await retriever.GetDocumentAsync(configuration.JwksUri, cancellationToken).ConfigureAwait(false); + var jsonWebKeySet = new JsonWebKeySet(jwksDocument); + configuration.SigningKeys.Clear(); + foreach (JsonWebKey key in jsonWebKeySet.Keys) + { + configuration.SigningKeys.Add(key); + } + } + + cachedConfiguration = configuration; + cacheExpiresAt = now + options.MetadataCacheLifetime; + return configuration; + } + finally + { + refreshLock.Release(); + } + } + + public void RequestRefresh() + { + Volatile.Write(ref cachedConfiguration, null); + cacheExpiresAt = DateTimeOffset.MinValue; + } + + private static string ResolveMetadataAddress(StellaOpsResourceServerOptions options) + { + if (!string.IsNullOrWhiteSpace(options.MetadataAddress)) + { + return options.MetadataAddress; + } + + var authority = options.AuthorityUri; + if (!authority.AbsoluteUri.EndsWith("/", StringComparison.Ordinal)) + { + authority = new Uri(authority.AbsoluteUri + "/", UriKind.Absolute); + } + + return new Uri(authority, ".well-known/openid-configuration").AbsoluteUri; + } +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorizationPolicyBuilderExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorizationPolicyBuilderExtensions.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorizationPolicyBuilderExtensions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsAuthorizationPolicyBuilderExtensions.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsBypassEvaluator.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsBypassEvaluator.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsBypassEvaluator.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsBypassEvaluator.cs diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerOptions.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerOptions.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerOptions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerOptions.cs index 697566c1..eca6e374 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerOptions.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsResourceServerOptions.cs @@ -1,178 +1,178 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Auth.ServerIntegration; - -/// -/// Options controlling StellaOps resource server authentication. -/// -public sealed class StellaOpsResourceServerOptions -{ - private readonly List audiences = new(); - private readonly List requiredScopes = new(); - private readonly List requiredTenants = new(); - private readonly List bypassNetworks = new(); - - /// - /// Gets or sets the Authority (issuer) URL that exposes OpenID discovery. - /// - public string Authority { get; set; } = string.Empty; - - /// - /// Optional explicit OpenID Connect metadata address. - /// - public string? MetadataAddress { get; set; } - - /// - /// Audiences accepted by the resource server (validated against the aud claim). - /// - public IList Audiences => audiences; - - /// - /// Scopes enforced by default authorisation policies. - /// - public IList RequiredScopes => requiredScopes; - - /// - /// Tenants permitted to access the resource server (empty list disables tenant checks). - /// - public IList RequiredTenants => requiredTenants; - - /// - /// Networks permitted to bypass authentication (used for trusted on-host automation). - /// - public IList BypassNetworks => bypassNetworks; - - /// - /// Whether HTTPS metadata is required when communicating with Authority. - /// - public bool RequireHttpsMetadata { get; set; } = true; - - /// - /// Back-channel timeout when fetching metadata/JWKS. - /// - public TimeSpan BackchannelTimeout { get; set; } = TimeSpan.FromSeconds(30); - - /// - /// Clock skew tolerated when validating tokens. - /// - public TimeSpan TokenClockSkew { get; set; } = TimeSpan.FromSeconds(60); - - /// - /// Lifetime for cached discovery/JWKS metadata before forcing a refresh. - /// - public TimeSpan MetadataCacheLifetime { get; set; } = TimeSpan.FromMinutes(5); - - /// - /// Gets the canonical Authority URI (populated during validation). - /// - public Uri AuthorityUri { get; private set; } = null!; - - /// - /// Gets the normalised scope list (populated during validation). - /// - public IReadOnlyList NormalizedScopes { get; private set; } = Array.Empty(); - - /// - /// Gets the normalised tenant list (populated during validation). - /// - public IReadOnlyList NormalizedTenants { get; private set; } = Array.Empty(); - - /// - /// Gets the network matcher used for bypass checks (populated during validation). - /// - public NetworkMaskMatcher BypassMatcher { get; private set; } = NetworkMaskMatcher.DenyAll; - - /// - /// Validates provided configuration and normalises collections. - /// - public void Validate() - { - if (string.IsNullOrWhiteSpace(Authority)) - { - throw new InvalidOperationException("Resource server authentication requires an Authority URL."); - } - - if (!Uri.TryCreate(Authority.Trim(), UriKind.Absolute, out var authorityUri)) - { - throw new InvalidOperationException("Resource server Authority URL must be an absolute URI."); - } - - if (RequireHttpsMetadata && - !authorityUri.IsLoopback && - !string.Equals(authorityUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Resource server Authority URL must use HTTPS when HTTPS metadata is required."); - } - - if (BackchannelTimeout <= TimeSpan.Zero) - { - throw new InvalidOperationException("Resource server back-channel timeout must be greater than zero."); - } - - if (TokenClockSkew < TimeSpan.Zero || TokenClockSkew > TimeSpan.FromMinutes(5)) - { - throw new InvalidOperationException("Resource server token clock skew must be between 0 seconds and 5 minutes."); - } - - if (MetadataCacheLifetime <= TimeSpan.Zero || MetadataCacheLifetime > TimeSpan.FromHours(24)) - { - throw new InvalidOperationException("Resource server metadata cache lifetime must be greater than zero and less than or equal to 24 hours."); - } - - AuthorityUri = authorityUri; - - NormalizeList(audiences, toLower: false); - NormalizeList(requiredScopes, toLower: true); - NormalizeList(requiredTenants, toLower: true); - NormalizeList(bypassNetworks, toLower: false); - - NormalizedScopes = requiredScopes.Count == 0 - ? Array.Empty() - : requiredScopes.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); - - NormalizedTenants = requiredTenants.Count == 0 - ? Array.Empty() - : requiredTenants.OrderBy(static tenant => tenant, StringComparer.Ordinal).ToArray(); - - BypassMatcher = bypassNetworks.Count == 0 - ? NetworkMaskMatcher.DenyAll - : new NetworkMaskMatcher(bypassNetworks); - } - - private static void NormalizeList(IList values, bool toLower) - { - if (values.Count == 0) - { - return; - } - - var seen = new HashSet(StringComparer.OrdinalIgnoreCase); - - for (var index = values.Count - 1; index >= 0; index--) - { - var value = values[index]; - if (string.IsNullOrWhiteSpace(value)) - { - values.RemoveAt(index); - continue; - } - - var trimmed = value.Trim(); - if (toLower) - { - trimmed = trimmed.ToLowerInvariant(); - } - - if (!seen.Add(trimmed)) - { - values.RemoveAt(index); - continue; - } - - values[index] = trimmed; - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Auth.ServerIntegration; + +/// +/// Options controlling StellaOps resource server authentication. +/// +public sealed class StellaOpsResourceServerOptions +{ + private readonly List audiences = new(); + private readonly List requiredScopes = new(); + private readonly List requiredTenants = new(); + private readonly List bypassNetworks = new(); + + /// + /// Gets or sets the Authority (issuer) URL that exposes OpenID discovery. + /// + public string Authority { get; set; } = string.Empty; + + /// + /// Optional explicit OpenID Connect metadata address. + /// + public string? MetadataAddress { get; set; } + + /// + /// Audiences accepted by the resource server (validated against the aud claim). + /// + public IList Audiences => audiences; + + /// + /// Scopes enforced by default authorisation policies. + /// + public IList RequiredScopes => requiredScopes; + + /// + /// Tenants permitted to access the resource server (empty list disables tenant checks). + /// + public IList RequiredTenants => requiredTenants; + + /// + /// Networks permitted to bypass authentication (used for trusted on-host automation). + /// + public IList BypassNetworks => bypassNetworks; + + /// + /// Whether HTTPS metadata is required when communicating with Authority. + /// + public bool RequireHttpsMetadata { get; set; } = true; + + /// + /// Back-channel timeout when fetching metadata/JWKS. + /// + public TimeSpan BackchannelTimeout { get; set; } = TimeSpan.FromSeconds(30); + + /// + /// Clock skew tolerated when validating tokens. + /// + public TimeSpan TokenClockSkew { get; set; } = TimeSpan.FromSeconds(60); + + /// + /// Lifetime for cached discovery/JWKS metadata before forcing a refresh. + /// + public TimeSpan MetadataCacheLifetime { get; set; } = TimeSpan.FromMinutes(5); + + /// + /// Gets the canonical Authority URI (populated during validation). + /// + public Uri AuthorityUri { get; private set; } = null!; + + /// + /// Gets the normalised scope list (populated during validation). + /// + public IReadOnlyList NormalizedScopes { get; private set; } = Array.Empty(); + + /// + /// Gets the normalised tenant list (populated during validation). + /// + public IReadOnlyList NormalizedTenants { get; private set; } = Array.Empty(); + + /// + /// Gets the network matcher used for bypass checks (populated during validation). + /// + public NetworkMaskMatcher BypassMatcher { get; private set; } = NetworkMaskMatcher.DenyAll; + + /// + /// Validates provided configuration and normalises collections. + /// + public void Validate() + { + if (string.IsNullOrWhiteSpace(Authority)) + { + throw new InvalidOperationException("Resource server authentication requires an Authority URL."); + } + + if (!Uri.TryCreate(Authority.Trim(), UriKind.Absolute, out var authorityUri)) + { + throw new InvalidOperationException("Resource server Authority URL must be an absolute URI."); + } + + if (RequireHttpsMetadata && + !authorityUri.IsLoopback && + !string.Equals(authorityUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Resource server Authority URL must use HTTPS when HTTPS metadata is required."); + } + + if (BackchannelTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException("Resource server back-channel timeout must be greater than zero."); + } + + if (TokenClockSkew < TimeSpan.Zero || TokenClockSkew > TimeSpan.FromMinutes(5)) + { + throw new InvalidOperationException("Resource server token clock skew must be between 0 seconds and 5 minutes."); + } + + if (MetadataCacheLifetime <= TimeSpan.Zero || MetadataCacheLifetime > TimeSpan.FromHours(24)) + { + throw new InvalidOperationException("Resource server metadata cache lifetime must be greater than zero and less than or equal to 24 hours."); + } + + AuthorityUri = authorityUri; + + NormalizeList(audiences, toLower: false); + NormalizeList(requiredScopes, toLower: true); + NormalizeList(requiredTenants, toLower: true); + NormalizeList(bypassNetworks, toLower: false); + + NormalizedScopes = requiredScopes.Count == 0 + ? Array.Empty() + : requiredScopes.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); + + NormalizedTenants = requiredTenants.Count == 0 + ? Array.Empty() + : requiredTenants.OrderBy(static tenant => tenant, StringComparer.Ordinal).ToArray(); + + BypassMatcher = bypassNetworks.Count == 0 + ? NetworkMaskMatcher.DenyAll + : new NetworkMaskMatcher(bypassNetworks); + } + + private static void NormalizeList(IList values, bool toLower) + { + if (values.Count == 0) + { + return; + } + + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + + for (var index = values.Count - 1; index >= 0; index--) + { + var value = values[index]; + if (string.IsNullOrWhiteSpace(value)) + { + values.RemoveAt(index); + continue; + } + + var trimmed = value.Trim(); + if (toLower) + { + trimmed = trimmed.ToLowerInvariant(); + } + + if (!seen.Add(trimmed)) + { + values.RemoveAt(index); + continue; + } + + values[index] = trimmed; + } + } +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs index 21152b22..8f713c99 100644 --- a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeAuthorizationHandler.cs @@ -1,202 +1,202 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Security.Claims; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Authorization; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Auth.ServerIntegration; - -/// -/// Handles evaluation. -/// -internal sealed class StellaOpsScopeAuthorizationHandler : AuthorizationHandler -{ - private readonly IHttpContextAccessor httpContextAccessor; - private readonly StellaOpsBypassEvaluator bypassEvaluator; - private readonly IOptionsMonitor optionsMonitor; - private readonly ILogger logger; - - public StellaOpsScopeAuthorizationHandler( - IHttpContextAccessor httpContextAccessor, - StellaOpsBypassEvaluator bypassEvaluator, - IOptionsMonitor optionsMonitor, - ILogger logger) - { - this.httpContextAccessor = httpContextAccessor; - this.bypassEvaluator = bypassEvaluator; - this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - this.logger = logger; - } - - protected override Task HandleRequirementAsync( - AuthorizationHandlerContext context, - StellaOpsScopeRequirement requirement) - { - var resourceOptions = optionsMonitor.CurrentValue; - var httpContext = httpContextAccessor.HttpContext; - var combinedScopes = CombineRequiredScopes(resourceOptions.NormalizedScopes, requirement.RequiredScopes); - HashSet? userScopes = null; - - if (context.User?.Identity?.IsAuthenticated == true) - { - userScopes = ExtractScopes(context.User); - - foreach (var scope in combinedScopes) - { - if (!userScopes.Contains(scope)) - { - continue; - } - - if (TenantAllowed(context.User, resourceOptions, out var normalizedTenant)) - { - context.Succeed(requirement); - return Task.CompletedTask; - } - - if (logger.IsEnabled(LogLevel.Debug)) - { - var allowedTenants = resourceOptions.NormalizedTenants.Count == 0 - ? "(none)" - : string.Join(", ", resourceOptions.NormalizedTenants); - - logger.LogDebug( - "Tenant requirement not satisfied. RequiredTenants={RequiredTenants}; PrincipalTenant={PrincipalTenant}; Remote={Remote}", - allowedTenants, - normalizedTenant ?? "(none)", - httpContext?.Connection.RemoteIpAddress); - } - - // tenant mismatch cannot be resolved by checking additional scopes for this principal - break; - } - } - - if (httpContext is not null && bypassEvaluator.ShouldBypass(httpContext, combinedScopes)) - { - context.Succeed(requirement); - return Task.CompletedTask; - } - - if (logger.IsEnabled(LogLevel.Debug)) - { - var required = string.Join(", ", combinedScopes); - var principalScopes = userScopes is null || userScopes.Count == 0 - ? "(none)" - : string.Join(", ", userScopes); - var tenantValue = context.User?.FindFirstValue(StellaOpsClaimTypes.Tenant) ?? "(none)"; - - logger.LogDebug( - "Scope requirement not satisfied. Required={RequiredScopes}; PrincipalScopes={PrincipalScopes}; Tenant={Tenant}; Remote={Remote}", - required, - principalScopes, - tenantValue, - httpContext?.Connection.RemoteIpAddress); - } - - return Task.CompletedTask; - } - - private static bool TenantAllowed(ClaimsPrincipal principal, StellaOpsResourceServerOptions options, out string? normalizedTenant) - { - normalizedTenant = null; - - if (options.NormalizedTenants.Count == 0) - { - return true; - } - - var rawTenant = principal.FindFirstValue(StellaOpsClaimTypes.Tenant); - if (string.IsNullOrWhiteSpace(rawTenant)) - { - return false; - } - - normalizedTenant = rawTenant.Trim().ToLowerInvariant(); - - foreach (var allowed in options.NormalizedTenants) - { - if (string.Equals(allowed, normalizedTenant, StringComparison.Ordinal)) - { - return true; - } - } - - return false; - } - - private static HashSet ExtractScopes(ClaimsPrincipal principal) - { - var scopes = new HashSet(StringComparer.Ordinal); - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - scopes.Add(claim.Value); - } - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - - foreach (var part in parts) - { - var normalized = StellaOpsScopes.Normalize(part); - if (normalized is not null) - { - scopes.Add(normalized); - } - } - } - - return scopes; - } - - private static IReadOnlyList CombineRequiredScopes( - IReadOnlyList defaultScopes, - IReadOnlyCollection requirementScopes) - { - if ((defaultScopes is null || defaultScopes.Count == 0) && (requirementScopes is null || requirementScopes.Count == 0)) - { - return Array.Empty(); - } - - if (defaultScopes is null || defaultScopes.Count == 0) - { - return requirementScopes is string[] requirementArray - ? requirementArray - : requirementScopes.ToArray(); - } - - var combined = new HashSet(defaultScopes, StringComparer.Ordinal); - - if (requirementScopes is not null) - { - foreach (var scope in requirementScopes) - { - if (!string.IsNullOrWhiteSpace(scope)) - { - combined.Add(scope); - } - } - } - - return combined.Count == defaultScopes.Count && requirementScopes is null - ? defaultScopes - : combined.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Security.Claims; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Auth.ServerIntegration; + +/// +/// Handles evaluation. +/// +internal sealed class StellaOpsScopeAuthorizationHandler : AuthorizationHandler +{ + private readonly IHttpContextAccessor httpContextAccessor; + private readonly StellaOpsBypassEvaluator bypassEvaluator; + private readonly IOptionsMonitor optionsMonitor; + private readonly ILogger logger; + + public StellaOpsScopeAuthorizationHandler( + IHttpContextAccessor httpContextAccessor, + StellaOpsBypassEvaluator bypassEvaluator, + IOptionsMonitor optionsMonitor, + ILogger logger) + { + this.httpContextAccessor = httpContextAccessor; + this.bypassEvaluator = bypassEvaluator; + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + this.logger = logger; + } + + protected override Task HandleRequirementAsync( + AuthorizationHandlerContext context, + StellaOpsScopeRequirement requirement) + { + var resourceOptions = optionsMonitor.CurrentValue; + var httpContext = httpContextAccessor.HttpContext; + var combinedScopes = CombineRequiredScopes(resourceOptions.NormalizedScopes, requirement.RequiredScopes); + HashSet? userScopes = null; + + if (context.User?.Identity?.IsAuthenticated == true) + { + userScopes = ExtractScopes(context.User); + + foreach (var scope in combinedScopes) + { + if (!userScopes.Contains(scope)) + { + continue; + } + + if (TenantAllowed(context.User, resourceOptions, out var normalizedTenant)) + { + context.Succeed(requirement); + return Task.CompletedTask; + } + + if (logger.IsEnabled(LogLevel.Debug)) + { + var allowedTenants = resourceOptions.NormalizedTenants.Count == 0 + ? "(none)" + : string.Join(", ", resourceOptions.NormalizedTenants); + + logger.LogDebug( + "Tenant requirement not satisfied. RequiredTenants={RequiredTenants}; PrincipalTenant={PrincipalTenant}; Remote={Remote}", + allowedTenants, + normalizedTenant ?? "(none)", + httpContext?.Connection.RemoteIpAddress); + } + + // tenant mismatch cannot be resolved by checking additional scopes for this principal + break; + } + } + + if (httpContext is not null && bypassEvaluator.ShouldBypass(httpContext, combinedScopes)) + { + context.Succeed(requirement); + return Task.CompletedTask; + } + + if (logger.IsEnabled(LogLevel.Debug)) + { + var required = string.Join(", ", combinedScopes); + var principalScopes = userScopes is null || userScopes.Count == 0 + ? "(none)" + : string.Join(", ", userScopes); + var tenantValue = context.User?.FindFirstValue(StellaOpsClaimTypes.Tenant) ?? "(none)"; + + logger.LogDebug( + "Scope requirement not satisfied. Required={RequiredScopes}; PrincipalScopes={PrincipalScopes}; Tenant={Tenant}; Remote={Remote}", + required, + principalScopes, + tenantValue, + httpContext?.Connection.RemoteIpAddress); + } + + return Task.CompletedTask; + } + + private static bool TenantAllowed(ClaimsPrincipal principal, StellaOpsResourceServerOptions options, out string? normalizedTenant) + { + normalizedTenant = null; + + if (options.NormalizedTenants.Count == 0) + { + return true; + } + + var rawTenant = principal.FindFirstValue(StellaOpsClaimTypes.Tenant); + if (string.IsNullOrWhiteSpace(rawTenant)) + { + return false; + } + + normalizedTenant = rawTenant.Trim().ToLowerInvariant(); + + foreach (var allowed in options.NormalizedTenants) + { + if (string.Equals(allowed, normalizedTenant, StringComparison.Ordinal)) + { + return true; + } + } + + return false; + } + + private static HashSet ExtractScopes(ClaimsPrincipal principal) + { + var scopes = new HashSet(StringComparer.Ordinal); + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + scopes.Add(claim.Value); + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + + foreach (var part in parts) + { + var normalized = StellaOpsScopes.Normalize(part); + if (normalized is not null) + { + scopes.Add(normalized); + } + } + } + + return scopes; + } + + private static IReadOnlyList CombineRequiredScopes( + IReadOnlyList defaultScopes, + IReadOnlyCollection requirementScopes) + { + if ((defaultScopes is null || defaultScopes.Count == 0) && (requirementScopes is null || requirementScopes.Count == 0)) + { + return Array.Empty(); + } + + if (defaultScopes is null || defaultScopes.Count == 0) + { + return requirementScopes is string[] requirementArray + ? requirementArray + : requirementScopes.ToArray(); + } + + var combined = new HashSet(defaultScopes, StringComparer.Ordinal); + + if (requirementScopes is not null) + { + foreach (var scope in requirementScopes) + { + if (!string.IsNullOrWhiteSpace(scope)) + { + combined.Add(scope); + } + } + } + + return combined.Count == defaultScopes.Count && requirementScopes is null + ? defaultScopes + : combined.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeRequirement.cs b/src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeRequirement.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeRequirement.cs rename to src/Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOpsScopeRequirement.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/Security/CryptoPasswordHasherTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/Security/CryptoPasswordHasherTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/Security/CryptoPasswordHasherTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/Security/CryptoPasswordHasherTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs index b2898c66..4610e4a1 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardClientProvisioningStoreTests.cs @@ -1,185 +1,185 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using MongoDB.Driver; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.Plugin.Standard.Storage; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Stores; -using Xunit; - -namespace StellaOps.Authority.Plugin.Standard.Tests; - -public class StandardClientProvisioningStoreTests -{ - [Fact] - public async Task CreateOrUpdateAsync_HashesSecretAndPersistsDocument() - { - var store = new TrackingClientStore(); - var revocations = new TrackingRevocationStore(); - var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System); - - var registration = new AuthorityClientRegistration( - clientId: "bootstrap-client", - confidential: true, - displayName: "Bootstrap", - clientSecret: "SuperSecret1!", - allowedGrantTypes: new[] { "client_credentials" }, - allowedScopes: new[] { "scopeA" }); - - var result = await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None); - - Assert.True(result.Succeeded); - Assert.True(store.Documents.TryGetValue("bootstrap-client", out var document)); - Assert.NotNull(document); - Assert.Equal(AuthoritySecretHasher.ComputeHash("SuperSecret1!"), document!.SecretHash); - Assert.Equal("standard", document.Plugin); - - var descriptor = await provisioning.FindByClientIdAsync("bootstrap-client", CancellationToken.None); - Assert.NotNull(descriptor); - Assert.Equal("bootstrap-client", descriptor!.ClientId); - Assert.True(descriptor.Confidential); - Assert.Contains("client_credentials", descriptor.AllowedGrantTypes); - Assert.Contains("scopea", descriptor.AllowedScopes); - } - - [Fact] - [Fact] - public async Task CreateOrUpdateAsync_NormalisesTenant() - { - var store = new TrackingClientStore(); - var revocations = new TrackingRevocationStore(); - var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System); - - var registration = new AuthorityClientRegistration( - clientId: "tenant-client", - confidential: false, - displayName: "Tenant Client", - clientSecret: null, - allowedGrantTypes: new[] { "client_credentials" }, - allowedScopes: new[] { "scopeA" }, - tenant: " Tenant-Alpha " ); - - await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None); - - Assert.True(store.Documents.TryGetValue("tenant-client", out var document)); - Assert.NotNull(document); - Assert.Equal("tenant-alpha", document!.Properties[AuthorityClientMetadataKeys.Tenant]); - - var descriptor = await provisioning.FindByClientIdAsync("tenant-client", CancellationToken.None); - Assert.NotNull(descriptor); - Assert.Equal("tenant-alpha", descriptor!.Tenant); - } - - - public async Task CreateOrUpdateAsync_StoresAudiences() - { - var store = new TrackingClientStore(); - var revocations = new TrackingRevocationStore(); - var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System); - - var registration = new AuthorityClientRegistration( - clientId: "signer", - confidential: false, - displayName: "Signer", - clientSecret: null, - allowedGrantTypes: new[] { "client_credentials" }, - allowedScopes: new[] { "signer.sign" }, - allowedAudiences: new[] { "attestor", "signer" }); - - var result = await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None); - - Assert.True(result.Succeeded); - Assert.True(store.Documents.TryGetValue("signer", out var document)); - Assert.NotNull(document); - Assert.Equal("attestor signer", document!.Properties[AuthorityClientMetadataKeys.Audiences]); - - var descriptor = await provisioning.FindByClientIdAsync("signer", CancellationToken.None); - Assert.NotNull(descriptor); - Assert.Equal(new[] { "attestor", "signer" }, descriptor!.AllowedAudiences.OrderBy(value => value, StringComparer.Ordinal)); - } - - [Fact] - public async Task CreateOrUpdateAsync_MapsCertificateBindings() - { - var store = new TrackingClientStore(); - var revocations = new TrackingRevocationStore(); - var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System); - - var bindingRegistration = new AuthorityClientCertificateBindingRegistration( - thumbprint: "aa:bb:cc:dd", - serialNumber: "01ff", - subject: "CN=mtls-client", - issuer: "CN=test-ca", - subjectAlternativeNames: new[] { "client.mtls.test", "spiffe://client" }, - notBefore: DateTimeOffset.UtcNow.AddMinutes(-5), - notAfter: DateTimeOffset.UtcNow.AddHours(1), - label: "primary"); - - var registration = new AuthorityClientRegistration( - clientId: "mtls-client", - confidential: true, - displayName: "MTLS Client", - clientSecret: "secret", - allowedGrantTypes: new[] { "client_credentials" }, - allowedScopes: new[] { "signer.sign" }, - allowedAudiences: new[] { "signer" }, - certificateBindings: new[] { bindingRegistration }); - - await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None); - - Assert.True(store.Documents.TryGetValue("mtls-client", out var document)); - Assert.NotNull(document); - var binding = Assert.Single(document!.CertificateBindings); - Assert.Equal("AABBCCDD", binding.Thumbprint); - Assert.Equal("01ff", binding.SerialNumber); - Assert.Equal("CN=mtls-client", binding.Subject); - Assert.Equal("CN=test-ca", binding.Issuer); - Assert.Equal(new[] { "client.mtls.test", "spiffe://client" }, binding.SubjectAlternativeNames); - Assert.Equal(bindingRegistration.NotBefore, binding.NotBefore); - Assert.Equal(bindingRegistration.NotAfter, binding.NotAfter); - Assert.Equal("primary", binding.Label); - } - - private sealed class TrackingClientStore : IAuthorityClientStore - { - public Dictionary Documents { get; } = new(StringComparer.OrdinalIgnoreCase); - - public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - Documents.TryGetValue(clientId, out var document); - return ValueTask.FromResult(document); - } - - public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - Documents[document.ClientId] = document; - return ValueTask.CompletedTask; - } - - public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - var removed = Documents.Remove(clientId); - return ValueTask.FromResult(removed); - } - } - - private sealed class TrackingRevocationStore : IAuthorityRevocationStore - { - public List Upserts { get; } = new(); - - public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - Upserts.Add(document); - return ValueTask.CompletedTask; - } - - public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(true); - - public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult>(Array.Empty()); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Driver; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.Plugin.Standard.Storage; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; +using Xunit; + +namespace StellaOps.Authority.Plugin.Standard.Tests; + +public class StandardClientProvisioningStoreTests +{ + [Fact] + public async Task CreateOrUpdateAsync_HashesSecretAndPersistsDocument() + { + var store = new TrackingClientStore(); + var revocations = new TrackingRevocationStore(); + var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System); + + var registration = new AuthorityClientRegistration( + clientId: "bootstrap-client", + confidential: true, + displayName: "Bootstrap", + clientSecret: "SuperSecret1!", + allowedGrantTypes: new[] { "client_credentials" }, + allowedScopes: new[] { "scopeA" }); + + var result = await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None); + + Assert.True(result.Succeeded); + Assert.True(store.Documents.TryGetValue("bootstrap-client", out var document)); + Assert.NotNull(document); + Assert.Equal(AuthoritySecretHasher.ComputeHash("SuperSecret1!"), document!.SecretHash); + Assert.Equal("standard", document.Plugin); + + var descriptor = await provisioning.FindByClientIdAsync("bootstrap-client", CancellationToken.None); + Assert.NotNull(descriptor); + Assert.Equal("bootstrap-client", descriptor!.ClientId); + Assert.True(descriptor.Confidential); + Assert.Contains("client_credentials", descriptor.AllowedGrantTypes); + Assert.Contains("scopea", descriptor.AllowedScopes); + } + + [Fact] + [Fact] + public async Task CreateOrUpdateAsync_NormalisesTenant() + { + var store = new TrackingClientStore(); + var revocations = new TrackingRevocationStore(); + var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System); + + var registration = new AuthorityClientRegistration( + clientId: "tenant-client", + confidential: false, + displayName: "Tenant Client", + clientSecret: null, + allowedGrantTypes: new[] { "client_credentials" }, + allowedScopes: new[] { "scopeA" }, + tenant: " Tenant-Alpha " ); + + await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None); + + Assert.True(store.Documents.TryGetValue("tenant-client", out var document)); + Assert.NotNull(document); + Assert.Equal("tenant-alpha", document!.Properties[AuthorityClientMetadataKeys.Tenant]); + + var descriptor = await provisioning.FindByClientIdAsync("tenant-client", CancellationToken.None); + Assert.NotNull(descriptor); + Assert.Equal("tenant-alpha", descriptor!.Tenant); + } + + + public async Task CreateOrUpdateAsync_StoresAudiences() + { + var store = new TrackingClientStore(); + var revocations = new TrackingRevocationStore(); + var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System); + + var registration = new AuthorityClientRegistration( + clientId: "signer", + confidential: false, + displayName: "Signer", + clientSecret: null, + allowedGrantTypes: new[] { "client_credentials" }, + allowedScopes: new[] { "signer.sign" }, + allowedAudiences: new[] { "attestor", "signer" }); + + var result = await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None); + + Assert.True(result.Succeeded); + Assert.True(store.Documents.TryGetValue("signer", out var document)); + Assert.NotNull(document); + Assert.Equal("attestor signer", document!.Properties[AuthorityClientMetadataKeys.Audiences]); + + var descriptor = await provisioning.FindByClientIdAsync("signer", CancellationToken.None); + Assert.NotNull(descriptor); + Assert.Equal(new[] { "attestor", "signer" }, descriptor!.AllowedAudiences.OrderBy(value => value, StringComparer.Ordinal)); + } + + [Fact] + public async Task CreateOrUpdateAsync_MapsCertificateBindings() + { + var store = new TrackingClientStore(); + var revocations = new TrackingRevocationStore(); + var provisioning = new StandardClientProvisioningStore("standard", store, revocations, TimeProvider.System); + + var bindingRegistration = new AuthorityClientCertificateBindingRegistration( + thumbprint: "aa:bb:cc:dd", + serialNumber: "01ff", + subject: "CN=mtls-client", + issuer: "CN=test-ca", + subjectAlternativeNames: new[] { "client.mtls.test", "spiffe://client" }, + notBefore: DateTimeOffset.UtcNow.AddMinutes(-5), + notAfter: DateTimeOffset.UtcNow.AddHours(1), + label: "primary"); + + var registration = new AuthorityClientRegistration( + clientId: "mtls-client", + confidential: true, + displayName: "MTLS Client", + clientSecret: "secret", + allowedGrantTypes: new[] { "client_credentials" }, + allowedScopes: new[] { "signer.sign" }, + allowedAudiences: new[] { "signer" }, + certificateBindings: new[] { bindingRegistration }); + + await provisioning.CreateOrUpdateAsync(registration, CancellationToken.None); + + Assert.True(store.Documents.TryGetValue("mtls-client", out var document)); + Assert.NotNull(document); + var binding = Assert.Single(document!.CertificateBindings); + Assert.Equal("AABBCCDD", binding.Thumbprint); + Assert.Equal("01ff", binding.SerialNumber); + Assert.Equal("CN=mtls-client", binding.Subject); + Assert.Equal("CN=test-ca", binding.Issuer); + Assert.Equal(new[] { "client.mtls.test", "spiffe://client" }, binding.SubjectAlternativeNames); + Assert.Equal(bindingRegistration.NotBefore, binding.NotBefore); + Assert.Equal(bindingRegistration.NotAfter, binding.NotAfter); + Assert.Equal("primary", binding.Label); + } + + private sealed class TrackingClientStore : IAuthorityClientStore + { + public Dictionary Documents { get; } = new(StringComparer.OrdinalIgnoreCase); + + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + Documents.TryGetValue(clientId, out var document); + return ValueTask.FromResult(document); + } + + public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + Documents[document.ClientId] = document; + return ValueTask.CompletedTask; + } + + public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var removed = Documents.Remove(clientId); + return ValueTask.FromResult(removed); + } + } + + private sealed class TrackingRevocationStore : IAuthorityRevocationStore + { + public List Upserts { get; } = new(); + + public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + Upserts.Add(document); + return ValueTask.CompletedTask; + } + + public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(true); + + public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult>(Array.Empty()); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginOptionsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginOptionsTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginOptionsTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginOptionsTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs index 1b365ddf..e30c0e12 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardPluginRegistrarTests.cs @@ -1,354 +1,354 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Options; -using Mongo2Go; -using MongoDB.Driver; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.Plugin.Standard; -using StellaOps.Authority.Plugin.Standard.Bootstrap; -using StellaOps.Authority.Plugin.Standard.Storage; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Stores; - -namespace StellaOps.Authority.Plugin.Standard.Tests; - -public class StandardPluginRegistrarTests -{ - [Fact] - public async Task Register_ConfiguresIdentityProviderAndSeedsBootstrapUser() - { - using var runner = MongoDbRunner.Start(singleNodeReplSet: true); - var client = new MongoClient(runner.ConnectionString); - var database = client.GetDatabase("registrar-tests"); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary - { - ["passwordPolicy:minimumLength"] = "8", - ["passwordPolicy:requireDigit"] = "false", - ["passwordPolicy:requireSymbol"] = "false", - ["lockout:enabled"] = "false", - ["passwordHashing:memorySizeInKib"] = "8192", - ["passwordHashing:iterations"] = "2", - ["passwordHashing:parallelism"] = "1", - ["bootstrapUser:username"] = "bootstrap", - ["bootstrapUser:password"] = "Bootstrap1!", - ["bootstrapUser:requirePasswordReset"] = "true" - }) - .Build(); - - var manifest = new AuthorityPluginManifest( - "standard", - "standard", - true, - typeof(StandardPluginRegistrar).Assembly.GetName().Name, - typeof(StandardPluginRegistrar).Assembly.Location, - new[] { AuthorityPluginCapabilities.Password, AuthorityPluginCapabilities.Bootstrap, AuthorityPluginCapabilities.ClientProvisioning }, - new Dictionary(), - "standard.yaml"); - - var pluginContext = new AuthorityPluginContext(manifest, configuration); - var services = new ServiceCollection(); - services.AddLogging(); - services.AddSingleton(database); - services.AddSingleton(new InMemoryClientStore()); - services.AddSingleton(new StubRevocationStore()); - services.AddSingleton(TimeProvider.System); - services.AddSingleton(new StubRevocationStore()); - services.AddSingleton(TimeProvider.System); - services.AddSingleton(new StubRevocationStore()); - services.AddSingleton(TimeProvider.System); - services.AddSingleton(new StubRevocationStore()); - services.AddSingleton(TimeProvider.System); - services.AddSingleton(new StubRevocationStore()); - services.AddSingleton(TimeProvider.System); - services.AddSingleton(TimeProvider.System); - services.AddSingleton(TimeProvider.System); - services.AddSingleton(new StubRevocationStore()); - services.AddSingleton(new StubRevocationStore()); - services.AddSingleton(new StubRevocationStore()); - - var registrar = new StandardPluginRegistrar(); - registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); - - using var provider = services.BuildServiceProvider(); - var hostedServices = provider.GetServices(); - foreach (var hosted in hostedServices) - { - if (hosted is StandardPluginBootstrapper bootstrapper) - { - await bootstrapper.StartAsync(CancellationToken.None); - } - } - - using var scope = provider.CreateScope(); - var plugin = scope.ServiceProvider.GetRequiredService(); - Assert.Equal("standard", plugin.Type); - Assert.True(plugin.Capabilities.SupportsPassword); - Assert.False(plugin.Capabilities.SupportsMfa); - Assert.True(plugin.Capabilities.SupportsClientProvisioning); - - var verification = await plugin.Credentials.VerifyPasswordAsync("bootstrap", "Bootstrap1!", CancellationToken.None); - Assert.True(verification.Succeeded); - Assert.True(verification.User?.RequiresPasswordReset); - } - - [Fact] - public void Register_LogsWarning_WhenPasswordPolicyWeaker() - { - using var runner = MongoDbRunner.Start(singleNodeReplSet: true); - var client = new MongoClient(runner.ConnectionString); - var database = client.GetDatabase("registrar-password-policy"); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary - { - ["passwordPolicy:minimumLength"] = "6", - ["passwordPolicy:requireUppercase"] = "false", - ["passwordPolicy:requireLowercase"] = "false", - ["passwordPolicy:requireDigit"] = "false", - ["passwordPolicy:requireSymbol"] = "false" - }) - .Build(); - - var manifest = new AuthorityPluginManifest( - "standard", - "standard", - true, - typeof(StandardPluginRegistrar).Assembly.GetName().Name, - typeof(StandardPluginRegistrar).Assembly.Location, - new[] { AuthorityPluginCapabilities.Password }, - new Dictionary(), - "standard.yaml"); - - var pluginContext = new AuthorityPluginContext(manifest, configuration); - var services = new ServiceCollection(); - var loggerProvider = new CapturingLoggerProvider(); - services.AddLogging(builder => builder.AddProvider(loggerProvider)); - services.AddSingleton(database); - services.AddSingleton(new InMemoryClientStore()); - - var registrar = new StandardPluginRegistrar(); - registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); - - using var provider = services.BuildServiceProvider(); - using var scope = provider.CreateScope(); - _ = scope.ServiceProvider.GetRequiredService(); - - Assert.Contains(loggerProvider.Entries, entry => - entry.Level == LogLevel.Warning && - entry.Category.Contains(typeof(StandardPluginRegistrar).FullName!, StringComparison.Ordinal) && - entry.Message.Contains("weaker password policy", StringComparison.OrdinalIgnoreCase)); - } - - [Fact] - public void Register_ForcesPasswordCapability_WhenManifestMissing() - { - using var runner = MongoDbRunner.Start(singleNodeReplSet: true); - var client = new MongoClient(runner.ConnectionString); - var database = client.GetDatabase("registrar-capabilities"); - - var configuration = new ConfigurationBuilder().Build(); - var manifest = new AuthorityPluginManifest( - "standard", - "standard", - true, - typeof(StandardPluginRegistrar).Assembly.GetName().Name, - typeof(StandardPluginRegistrar).Assembly.Location, - Array.Empty(), - new Dictionary(), - "standard.yaml"); - - var pluginContext = new AuthorityPluginContext(manifest, configuration); - var services = new ServiceCollection(); - services.AddLogging(); - services.AddSingleton(database); - services.AddSingleton(new InMemoryClientStore()); - services.AddSingleton(new StubRevocationStore()); - services.AddSingleton(TimeProvider.System); - - var registrar = new StandardPluginRegistrar(); - registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); - - using var provider = services.BuildServiceProvider(); - using var scope = provider.CreateScope(); - var plugin = scope.ServiceProvider.GetRequiredService(); - - Assert.True(plugin.Capabilities.SupportsPassword); - } - - [Fact] - public void Register_Throws_WhenBootstrapConfigurationIncomplete() - { - using var runner = MongoDbRunner.Start(singleNodeReplSet: true); - var client = new MongoClient(runner.ConnectionString); - var database = client.GetDatabase("registrar-bootstrap-validation"); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary - { - ["bootstrapUser:username"] = "bootstrap" - }) - .Build(); - - var manifest = new AuthorityPluginManifest( - "standard", - "standard", - true, - typeof(StandardPluginRegistrar).Assembly.GetName().Name, - typeof(StandardPluginRegistrar).Assembly.Location, - new[] { AuthorityPluginCapabilities.Password }, - new Dictionary(), - "standard.yaml"); - - var pluginContext = new AuthorityPluginContext(manifest, configuration); - var services = new ServiceCollection(); - services.AddLogging(); - services.AddSingleton(database); - services.AddSingleton(new InMemoryClientStore()); - - var registrar = new StandardPluginRegistrar(); - registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); - - using var provider = services.BuildServiceProvider(); - using var scope = provider.CreateScope(); - Assert.Throws(() => scope.ServiceProvider.GetRequiredService()); - } - - [Fact] - public void Register_NormalizesTokenSigningKeyDirectory() - { - using var runner = MongoDbRunner.Start(singleNodeReplSet: true); - var client = new MongoClient(runner.ConnectionString); - var database = client.GetDatabase("registrar-token-signing"); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary - { - ["tokenSigning:keyDirectory"] = "../keys" - }) - .Build(); - - var configDir = Path.Combine(Path.GetTempPath(), "stellaops-standard-plugin", Guid.NewGuid().ToString("N")); - Directory.CreateDirectory(configDir); - - try - { - var configPath = Path.Combine(configDir, "standard.yaml"); - var manifest = new AuthorityPluginManifest( - "standard", - "standard", - true, - typeof(StandardPluginRegistrar).Assembly.GetName().Name, - typeof(StandardPluginRegistrar).Assembly.Location, - new[] { AuthorityPluginCapabilities.Password }, - new Dictionary(), - configPath); - - var pluginContext = new AuthorityPluginContext(manifest, configuration); - var services = new ServiceCollection(); - services.AddLogging(); - services.AddSingleton(database); - services.AddSingleton(new InMemoryClientStore()); - - var registrar = new StandardPluginRegistrar(); - registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); - - using var provider = services.BuildServiceProvider(); - var optionsMonitor = provider.GetRequiredService>(); - var options = optionsMonitor.Get("standard"); - - var expected = Path.GetFullPath(Path.Combine(configDir, "../keys")); - Assert.Equal(expected, options.TokenSigning.KeyDirectory); - } - finally - { - if (Directory.Exists(configDir)) - { - Directory.Delete(configDir, recursive: true); - } - } - } -} - -internal sealed record CapturedLogEntry(string Category, LogLevel Level, string Message); - -internal sealed class CapturingLoggerProvider : ILoggerProvider -{ - public List Entries { get; } = new(); - - public ILogger CreateLogger(string categoryName) => new CapturingLogger(categoryName, Entries); - - public void Dispose() - { - } - - private sealed class CapturingLogger : ILogger - { - private readonly string category; - private readonly List entries; - - public CapturingLogger(string category, List entries) - { - this.category = category; - this.entries = entries; - } - - public IDisposable BeginScope(TState state) where TState : notnull => NullScope.Instance; - - public bool IsEnabled(LogLevel logLevel) => true; - - public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) - { - entries.Add(new CapturedLogEntry(category, logLevel, formatter(state, exception))); - } - - private sealed class NullScope : IDisposable - { - public static readonly NullScope Instance = new(); - - public void Dispose() - { - } - } - } -} - -internal sealed class StubRevocationStore : IAuthorityRevocationStore -{ - public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.CompletedTask; - - public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(false); - - public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult>(Array.Empty()); -} - -internal sealed class InMemoryClientStore : IAuthorityClientStore -{ - private readonly Dictionary clients = new(StringComparer.OrdinalIgnoreCase); - - public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - clients.TryGetValue(clientId, out var document); - return ValueTask.FromResult(document); - } - - public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - clients[document.ClientId] = document; - return ValueTask.CompletedTask; - } - - public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(clients.Remove(clientId)); -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Options; +using Mongo2Go; +using MongoDB.Driver; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.Plugin.Standard; +using StellaOps.Authority.Plugin.Standard.Bootstrap; +using StellaOps.Authority.Plugin.Standard.Storage; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; + +namespace StellaOps.Authority.Plugin.Standard.Tests; + +public class StandardPluginRegistrarTests +{ + [Fact] + public async Task Register_ConfiguresIdentityProviderAndSeedsBootstrapUser() + { + using var runner = MongoDbRunner.Start(singleNodeReplSet: true); + var client = new MongoClient(runner.ConnectionString); + var database = client.GetDatabase("registrar-tests"); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + ["passwordPolicy:minimumLength"] = "8", + ["passwordPolicy:requireDigit"] = "false", + ["passwordPolicy:requireSymbol"] = "false", + ["lockout:enabled"] = "false", + ["passwordHashing:memorySizeInKib"] = "8192", + ["passwordHashing:iterations"] = "2", + ["passwordHashing:parallelism"] = "1", + ["bootstrapUser:username"] = "bootstrap", + ["bootstrapUser:password"] = "Bootstrap1!", + ["bootstrapUser:requirePasswordReset"] = "true" + }) + .Build(); + + var manifest = new AuthorityPluginManifest( + "standard", + "standard", + true, + typeof(StandardPluginRegistrar).Assembly.GetName().Name, + typeof(StandardPluginRegistrar).Assembly.Location, + new[] { AuthorityPluginCapabilities.Password, AuthorityPluginCapabilities.Bootstrap, AuthorityPluginCapabilities.ClientProvisioning }, + new Dictionary(), + "standard.yaml"); + + var pluginContext = new AuthorityPluginContext(manifest, configuration); + var services = new ServiceCollection(); + services.AddLogging(); + services.AddSingleton(database); + services.AddSingleton(new InMemoryClientStore()); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(TimeProvider.System); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(new StubRevocationStore()); + + var registrar = new StandardPluginRegistrar(); + registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); + + using var provider = services.BuildServiceProvider(); + var hostedServices = provider.GetServices(); + foreach (var hosted in hostedServices) + { + if (hosted is StandardPluginBootstrapper bootstrapper) + { + await bootstrapper.StartAsync(CancellationToken.None); + } + } + + using var scope = provider.CreateScope(); + var plugin = scope.ServiceProvider.GetRequiredService(); + Assert.Equal("standard", plugin.Type); + Assert.True(plugin.Capabilities.SupportsPassword); + Assert.False(plugin.Capabilities.SupportsMfa); + Assert.True(plugin.Capabilities.SupportsClientProvisioning); + + var verification = await plugin.Credentials.VerifyPasswordAsync("bootstrap", "Bootstrap1!", CancellationToken.None); + Assert.True(verification.Succeeded); + Assert.True(verification.User?.RequiresPasswordReset); + } + + [Fact] + public void Register_LogsWarning_WhenPasswordPolicyWeaker() + { + using var runner = MongoDbRunner.Start(singleNodeReplSet: true); + var client = new MongoClient(runner.ConnectionString); + var database = client.GetDatabase("registrar-password-policy"); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + ["passwordPolicy:minimumLength"] = "6", + ["passwordPolicy:requireUppercase"] = "false", + ["passwordPolicy:requireLowercase"] = "false", + ["passwordPolicy:requireDigit"] = "false", + ["passwordPolicy:requireSymbol"] = "false" + }) + .Build(); + + var manifest = new AuthorityPluginManifest( + "standard", + "standard", + true, + typeof(StandardPluginRegistrar).Assembly.GetName().Name, + typeof(StandardPluginRegistrar).Assembly.Location, + new[] { AuthorityPluginCapabilities.Password }, + new Dictionary(), + "standard.yaml"); + + var pluginContext = new AuthorityPluginContext(manifest, configuration); + var services = new ServiceCollection(); + var loggerProvider = new CapturingLoggerProvider(); + services.AddLogging(builder => builder.AddProvider(loggerProvider)); + services.AddSingleton(database); + services.AddSingleton(new InMemoryClientStore()); + + var registrar = new StandardPluginRegistrar(); + registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); + + using var provider = services.BuildServiceProvider(); + using var scope = provider.CreateScope(); + _ = scope.ServiceProvider.GetRequiredService(); + + Assert.Contains(loggerProvider.Entries, entry => + entry.Level == LogLevel.Warning && + entry.Category.Contains(typeof(StandardPluginRegistrar).FullName!, StringComparison.Ordinal) && + entry.Message.Contains("weaker password policy", StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public void Register_ForcesPasswordCapability_WhenManifestMissing() + { + using var runner = MongoDbRunner.Start(singleNodeReplSet: true); + var client = new MongoClient(runner.ConnectionString); + var database = client.GetDatabase("registrar-capabilities"); + + var configuration = new ConfigurationBuilder().Build(); + var manifest = new AuthorityPluginManifest( + "standard", + "standard", + true, + typeof(StandardPluginRegistrar).Assembly.GetName().Name, + typeof(StandardPluginRegistrar).Assembly.Location, + Array.Empty(), + new Dictionary(), + "standard.yaml"); + + var pluginContext = new AuthorityPluginContext(manifest, configuration); + var services = new ServiceCollection(); + services.AddLogging(); + services.AddSingleton(database); + services.AddSingleton(new InMemoryClientStore()); + services.AddSingleton(new StubRevocationStore()); + services.AddSingleton(TimeProvider.System); + + var registrar = new StandardPluginRegistrar(); + registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); + + using var provider = services.BuildServiceProvider(); + using var scope = provider.CreateScope(); + var plugin = scope.ServiceProvider.GetRequiredService(); + + Assert.True(plugin.Capabilities.SupportsPassword); + } + + [Fact] + public void Register_Throws_WhenBootstrapConfigurationIncomplete() + { + using var runner = MongoDbRunner.Start(singleNodeReplSet: true); + var client = new MongoClient(runner.ConnectionString); + var database = client.GetDatabase("registrar-bootstrap-validation"); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + ["bootstrapUser:username"] = "bootstrap" + }) + .Build(); + + var manifest = new AuthorityPluginManifest( + "standard", + "standard", + true, + typeof(StandardPluginRegistrar).Assembly.GetName().Name, + typeof(StandardPluginRegistrar).Assembly.Location, + new[] { AuthorityPluginCapabilities.Password }, + new Dictionary(), + "standard.yaml"); + + var pluginContext = new AuthorityPluginContext(manifest, configuration); + var services = new ServiceCollection(); + services.AddLogging(); + services.AddSingleton(database); + services.AddSingleton(new InMemoryClientStore()); + + var registrar = new StandardPluginRegistrar(); + registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); + + using var provider = services.BuildServiceProvider(); + using var scope = provider.CreateScope(); + Assert.Throws(() => scope.ServiceProvider.GetRequiredService()); + } + + [Fact] + public void Register_NormalizesTokenSigningKeyDirectory() + { + using var runner = MongoDbRunner.Start(singleNodeReplSet: true); + var client = new MongoClient(runner.ConnectionString); + var database = client.GetDatabase("registrar-token-signing"); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary + { + ["tokenSigning:keyDirectory"] = "../keys" + }) + .Build(); + + var configDir = Path.Combine(Path.GetTempPath(), "stellaops-standard-plugin", Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(configDir); + + try + { + var configPath = Path.Combine(configDir, "standard.yaml"); + var manifest = new AuthorityPluginManifest( + "standard", + "standard", + true, + typeof(StandardPluginRegistrar).Assembly.GetName().Name, + typeof(StandardPluginRegistrar).Assembly.Location, + new[] { AuthorityPluginCapabilities.Password }, + new Dictionary(), + configPath); + + var pluginContext = new AuthorityPluginContext(manifest, configuration); + var services = new ServiceCollection(); + services.AddLogging(); + services.AddSingleton(database); + services.AddSingleton(new InMemoryClientStore()); + + var registrar = new StandardPluginRegistrar(); + registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); + + using var provider = services.BuildServiceProvider(); + var optionsMonitor = provider.GetRequiredService>(); + var options = optionsMonitor.Get("standard"); + + var expected = Path.GetFullPath(Path.Combine(configDir, "../keys")); + Assert.Equal(expected, options.TokenSigning.KeyDirectory); + } + finally + { + if (Directory.Exists(configDir)) + { + Directory.Delete(configDir, recursive: true); + } + } + } +} + +internal sealed record CapturedLogEntry(string Category, LogLevel Level, string Message); + +internal sealed class CapturingLoggerProvider : ILoggerProvider +{ + public List Entries { get; } = new(); + + public ILogger CreateLogger(string categoryName) => new CapturingLogger(categoryName, Entries); + + public void Dispose() + { + } + + private sealed class CapturingLogger : ILogger + { + private readonly string category; + private readonly List entries; + + public CapturingLogger(string category, List entries) + { + this.category = category; + this.entries = entries; + } + + public IDisposable BeginScope(TState state) where TState : notnull => NullScope.Instance; + + public bool IsEnabled(LogLevel logLevel) => true; + + public void Log(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func formatter) + { + entries.Add(new CapturedLogEntry(category, logLevel, formatter(state, exception))); + } + + private sealed class NullScope : IDisposable + { + public static readonly NullScope Instance = new(); + + public void Dispose() + { + } + } + } +} + +internal sealed class StubRevocationStore : IAuthorityRevocationStore +{ + public ValueTask UpsertAsync(AuthorityRevocationDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.CompletedTask; + + public ValueTask RemoveAsync(string category, string revocationId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(false); + + public ValueTask> GetActiveAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult>(Array.Empty()); +} + +internal sealed class InMemoryClientStore : IAuthorityClientStore +{ + private readonly Dictionary clients = new(StringComparer.OrdinalIgnoreCase); + + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + clients.TryGetValue(clientId, out var document); + return ValueTask.FromResult(document); + } + + public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + clients[document.ClientId] = document; + return ValueTask.CompletedTask; + } + + public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(clients.Remove(clientId)); +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardUserCredentialStoreTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardUserCredentialStoreTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardUserCredentialStoreTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StandardUserCredentialStoreTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/AGENTS.md b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/AGENTS.md similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/AGENTS.md rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/AGENTS.md diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Bootstrap/StandardPluginBootstrapper.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Bootstrap/StandardPluginBootstrapper.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Bootstrap/StandardPluginBootstrapper.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Bootstrap/StandardPluginBootstrapper.cs index 54d8607a..d6c6d1ad 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Bootstrap/StandardPluginBootstrapper.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Bootstrap/StandardPluginBootstrapper.cs @@ -1,44 +1,44 @@ -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Authority.Plugin.Standard.Storage; - -namespace StellaOps.Authority.Plugin.Standard.Bootstrap; - -internal sealed class StandardPluginBootstrapper : IHostedService -{ - private readonly string pluginName; - private readonly IServiceScopeFactory scopeFactory; - private readonly ILogger logger; - - public StandardPluginBootstrapper( - string pluginName, - IServiceScopeFactory scopeFactory, - ILogger logger) - { - this.pluginName = pluginName; - this.scopeFactory = scopeFactory; - this.logger = logger; - } - - public async Task StartAsync(CancellationToken cancellationToken) - { - using var scope = scopeFactory.CreateScope(); - var optionsMonitor = scope.ServiceProvider.GetRequiredService>(); - var credentialStore = scope.ServiceProvider.GetRequiredService(); - - var options = optionsMonitor.Get(pluginName); - if (options.BootstrapUser is null || !options.BootstrapUser.IsConfigured) - { - return; - } - - logger.LogInformation("Standard Authority plugin '{PluginName}' ensuring bootstrap user.", pluginName); - await credentialStore.EnsureBootstrapUserAsync(options.BootstrapUser, cancellationToken).ConfigureAwait(false); - } - - public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; -} +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Authority.Plugin.Standard.Storage; + +namespace StellaOps.Authority.Plugin.Standard.Bootstrap; + +internal sealed class StandardPluginBootstrapper : IHostedService +{ + private readonly string pluginName; + private readonly IServiceScopeFactory scopeFactory; + private readonly ILogger logger; + + public StandardPluginBootstrapper( + string pluginName, + IServiceScopeFactory scopeFactory, + ILogger logger) + { + this.pluginName = pluginName; + this.scopeFactory = scopeFactory; + this.logger = logger; + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + using var scope = scopeFactory.CreateScope(); + var optionsMonitor = scope.ServiceProvider.GetRequiredService>(); + var credentialStore = scope.ServiceProvider.GetRequiredService(); + + var options = optionsMonitor.Get(pluginName); + if (options.BootstrapUser is null || !options.BootstrapUser.IsConfigured) + { + return; + } + + logger.LogInformation("Standard Authority plugin '{PluginName}' ensuring bootstrap user.", pluginName); + await credentialStore.EnsureBootstrapUserAsync(options.BootstrapUser, cancellationToken).ConfigureAwait(false); + } + + public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Properties/AssemblyInfo.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Properties/AssemblyInfo.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Security/IPasswordHasher.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Security/IPasswordHasher.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Security/IPasswordHasher.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Security/IPasswordHasher.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardClaimsEnricher.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardClaimsEnricher.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardClaimsEnricher.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardClaimsEnricher.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardIdentityProviderPlugin.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardIdentityProviderPlugin.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardIdentityProviderPlugin.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardIdentityProviderPlugin.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginOptions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginOptions.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginOptions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginOptions.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs index eeb9a7c0..ddf7948e 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StandardPluginRegistrar.cs @@ -1,112 +1,112 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using MongoDB.Driver; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.Plugin.Standard.Bootstrap; -using StellaOps.Authority.Plugin.Standard.Security; -using StellaOps.Authority.Plugin.Standard.Storage; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Cryptography; -using StellaOps.Cryptography.DependencyInjection; - -namespace StellaOps.Authority.Plugin.Standard; - -internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar -{ - public string PluginType => "standard"; - - public void Register(AuthorityPluginRegistrationContext context) - { - if (context is null) - { - throw new ArgumentNullException(nameof(context)); - } - - var pluginName = context.Plugin.Manifest.Name; - - context.Services.AddSingleton(); - context.Services.AddSingleton(sp => sp.GetRequiredService()); - - context.Services.AddStellaOpsCrypto(); - - var configPath = context.Plugin.Manifest.ConfigPath; - - context.Services.AddOptions(pluginName) - .Bind(context.Plugin.Configuration) - .PostConfigure(options => - { - options.Normalize(configPath); - options.Validate(pluginName); - }) - .ValidateOnStart(); - - context.Services.AddScoped(sp => - { - var database = sp.GetRequiredService(); - var optionsMonitor = sp.GetRequiredService>(); - var pluginOptions = optionsMonitor.Get(pluginName); - var cryptoProvider = sp.GetRequiredService(); - var passwordHasher = new CryptoPasswordHasher(pluginOptions, cryptoProvider); - var loggerFactory = sp.GetRequiredService(); - var registrarLogger = loggerFactory.CreateLogger(); - - var baselinePolicy = new PasswordPolicyOptions(); - if (pluginOptions.PasswordPolicy.IsWeakerThan(baselinePolicy)) - { - registrarLogger.LogWarning( - "Standard plugin '{Plugin}' configured a weaker password policy (minLength={Length}, requireUpper={Upper}, requireLower={Lower}, requireDigit={Digit}, requireSymbol={Symbol}) than the baseline (minLength={BaseLength}, requireUpper={BaseUpper}, requireLower={BaseLower}, requireDigit={BaseDigit}, requireSymbol={BaseSymbol}).", - pluginName, - pluginOptions.PasswordPolicy.MinimumLength, - pluginOptions.PasswordPolicy.RequireUppercase, - pluginOptions.PasswordPolicy.RequireLowercase, - pluginOptions.PasswordPolicy.RequireDigit, - pluginOptions.PasswordPolicy.RequireSymbol, - baselinePolicy.MinimumLength, - baselinePolicy.RequireUppercase, - baselinePolicy.RequireLowercase, - baselinePolicy.RequireDigit, - baselinePolicy.RequireSymbol); - } - - return new StandardUserCredentialStore( - pluginName, - database, - pluginOptions, - passwordHasher, - loggerFactory.CreateLogger()); - }); - - context.Services.AddScoped(sp => - { - var clientStore = sp.GetRequiredService(); - var revocationStore = sp.GetRequiredService(); - var timeProvider = sp.GetRequiredService(); - return new StandardClientProvisioningStore(pluginName, clientStore, revocationStore, timeProvider); - }); - - context.Services.AddScoped(sp => - { - var store = sp.GetRequiredService(); - var clientProvisioningStore = sp.GetRequiredService(); - var loggerFactory = sp.GetRequiredService(); - return new StandardIdentityProviderPlugin( - context.Plugin, - store, - clientProvisioningStore, - sp.GetRequiredService(), - loggerFactory.CreateLogger()); - }); - - context.Services.AddScoped(sp => - sp.GetRequiredService()); - - context.Services.AddSingleton(sp => - new StandardPluginBootstrapper( - pluginName, - sp.GetRequiredService(), - sp.GetRequiredService>())); - } -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.Plugin.Standard.Bootstrap; +using StellaOps.Authority.Plugin.Standard.Security; +using StellaOps.Authority.Plugin.Standard.Storage; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Cryptography; +using StellaOps.Cryptography.DependencyInjection; + +namespace StellaOps.Authority.Plugin.Standard; + +internal sealed class StandardPluginRegistrar : IAuthorityPluginRegistrar +{ + public string PluginType => "standard"; + + public void Register(AuthorityPluginRegistrationContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + var pluginName = context.Plugin.Manifest.Name; + + context.Services.AddSingleton(); + context.Services.AddSingleton(sp => sp.GetRequiredService()); + + context.Services.AddStellaOpsCrypto(); + + var configPath = context.Plugin.Manifest.ConfigPath; + + context.Services.AddOptions(pluginName) + .Bind(context.Plugin.Configuration) + .PostConfigure(options => + { + options.Normalize(configPath); + options.Validate(pluginName); + }) + .ValidateOnStart(); + + context.Services.AddScoped(sp => + { + var database = sp.GetRequiredService(); + var optionsMonitor = sp.GetRequiredService>(); + var pluginOptions = optionsMonitor.Get(pluginName); + var cryptoProvider = sp.GetRequiredService(); + var passwordHasher = new CryptoPasswordHasher(pluginOptions, cryptoProvider); + var loggerFactory = sp.GetRequiredService(); + var registrarLogger = loggerFactory.CreateLogger(); + + var baselinePolicy = new PasswordPolicyOptions(); + if (pluginOptions.PasswordPolicy.IsWeakerThan(baselinePolicy)) + { + registrarLogger.LogWarning( + "Standard plugin '{Plugin}' configured a weaker password policy (minLength={Length}, requireUpper={Upper}, requireLower={Lower}, requireDigit={Digit}, requireSymbol={Symbol}) than the baseline (minLength={BaseLength}, requireUpper={BaseUpper}, requireLower={BaseLower}, requireDigit={BaseDigit}, requireSymbol={BaseSymbol}).", + pluginName, + pluginOptions.PasswordPolicy.MinimumLength, + pluginOptions.PasswordPolicy.RequireUppercase, + pluginOptions.PasswordPolicy.RequireLowercase, + pluginOptions.PasswordPolicy.RequireDigit, + pluginOptions.PasswordPolicy.RequireSymbol, + baselinePolicy.MinimumLength, + baselinePolicy.RequireUppercase, + baselinePolicy.RequireLowercase, + baselinePolicy.RequireDigit, + baselinePolicy.RequireSymbol); + } + + return new StandardUserCredentialStore( + pluginName, + database, + pluginOptions, + passwordHasher, + loggerFactory.CreateLogger()); + }); + + context.Services.AddScoped(sp => + { + var clientStore = sp.GetRequiredService(); + var revocationStore = sp.GetRequiredService(); + var timeProvider = sp.GetRequiredService(); + return new StandardClientProvisioningStore(pluginName, clientStore, revocationStore, timeProvider); + }); + + context.Services.AddScoped(sp => + { + var store = sp.GetRequiredService(); + var clientProvisioningStore = sp.GetRequiredService(); + var loggerFactory = sp.GetRequiredService(); + return new StandardIdentityProviderPlugin( + context.Plugin, + store, + clientProvisioningStore, + sp.GetRequiredService(), + loggerFactory.CreateLogger()); + }); + + context.Services.AddScoped(sp => + sp.GetRequiredService()); + + context.Services.AddSingleton(sp => + new StandardPluginBootstrapper( + pluginName, + sp.GetRequiredService(), + sp.GetRequiredService>())); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj similarity index 71% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj index dba9ce07..2b50f947 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/StellaOps.Authority.Plugin.Standard.csproj @@ -1,4 +1,5 @@ - + + net10.0 preview @@ -16,9 +17,9 @@ - + - - + + - + \ No newline at end of file diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardClientProvisioningStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardClientProvisioningStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardClientProvisioningStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardClientProvisioningStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserCredentialStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserCredentialStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserCredentialStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserCredentialStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserDocument.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserDocument.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/Storage/StandardUserDocument.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md similarity index 99% rename from src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md index 4b7fb53b..20b9b35f 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard/TASKS.md @@ -1,20 +1,20 @@ -# Team 8 / Plugin Standard Backlog (UTC 2025-10-10) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| PLG6.DOC | DONE (2025-10-11) | BE-Auth Plugin, Docs Guild | PLG1–PLG5 | Final polish + diagrams for plugin developer guide (AUTHPLUG-DOCS-01-001). | Docs team delivers copy-edit + exported diagrams; PR merged. | -| SEC1.PLG | DONE (2025-10-11) | Security Guild, BE-Auth Plugin | SEC1.A (StellaOps.Cryptography) | Swap Standard plugin hashing to Argon2id via `StellaOps.Cryptography` abstractions; keep PBKDF2 verification for legacy. | ✅ `StandardUserCredentialStore` uses `ICryptoProvider` to hash/check; ✅ Transparent rehash on success; ✅ Unit tests cover tamper + legacy rehash. | -| SEC1.OPT | DONE (2025-10-11) | Security Guild | SEC1.PLG | Expose password hashing knobs in `StandardPluginOptions` (`memoryKiB`, `iterations`, `parallelism`, `algorithm`) with validation. | ✅ Options bound from YAML; ✅ Invalid configs throw; ✅ Docs include tuning guidance. | -| SEC2.PLG | BLOCKED (2025-10-21) | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`.
⛔ Waiting on AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 to stabilise Authority auth surfaces before final verification + publish. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. | -| SEC3.PLG | BLOCKED (2025-10-21) | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after).
⛔ Pending AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 so limiter telemetry contract matches final authority surface. | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. | -| SEC4.PLG | DONE (2025-10-12) | Security Guild | SEC4.A (revocation schema) | Provide plugin hooks so revoked users/clients write reasons for revocation bundle export. | ✅ Revocation exporter consumes plugin data; ✅ Tests cover revoked user/client output. | -| SEC5.PLG | BLOCKED (2025-10-21) | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog.
⛔ Final documentation depends on AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 outcomes. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. | -| PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | BE-Auth Plugin, Docs Guild | PLG1–PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles.
⛔ Blocked awaiting Authority rate-limiter stream (CORE8/SEC3) to resume so doc updates reflect final limiter behaviour. | -| PLG7.RFC | REVIEW | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. | -| PLG6.DIAGRAM | TODO | Docs Guild | PLG6.DOC | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | ✅ Mermaid sources committed; ✅ Rendered SVG/PNG linked from Section 2 + Section 9; ✅ Docs build preview shared with Plugin + Docs guilds. | - -> Update statuses to DOING/DONE/BLOCKED as you make progress. Always run `dotnet test` for touched projects before marking DONE. - -> Remark (2025-10-13, PLG6.DOC/PLG6.DIAGRAM): Security Guild delivered `docs/security/rate-limits.md`; Docs team can lift Section 3 (tuning table + alerts) into the developer guide diagrams when rendering assets. - -> Check-in (2025-10-19): Wave 0A dependencies (AUTH-DPOP-11-001, AUTH-MTLS-11-002, PLUGIN-DI-08-001) still open, so SEC2/SEC3/SEC5 remain in progress without new scope until upstream limiter updates land. +# Team 8 / Plugin Standard Backlog (UTC 2025-10-10) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| PLG6.DOC | DONE (2025-10-11) | BE-Auth Plugin, Docs Guild | PLG1–PLG5 | Final polish + diagrams for plugin developer guide (AUTHPLUG-DOCS-01-001). | Docs team delivers copy-edit + exported diagrams; PR merged. | +| SEC1.PLG | DONE (2025-10-11) | Security Guild, BE-Auth Plugin | SEC1.A (StellaOps.Cryptography) | Swap Standard plugin hashing to Argon2id via `StellaOps.Cryptography` abstractions; keep PBKDF2 verification for legacy. | ✅ `StandardUserCredentialStore` uses `ICryptoProvider` to hash/check; ✅ Transparent rehash on success; ✅ Unit tests cover tamper + legacy rehash. | +| SEC1.OPT | DONE (2025-10-11) | Security Guild | SEC1.PLG | Expose password hashing knobs in `StandardPluginOptions` (`memoryKiB`, `iterations`, `parallelism`, `algorithm`) with validation. | ✅ Options bound from YAML; ✅ Invalid configs throw; ✅ Docs include tuning guidance. | +| SEC2.PLG | BLOCKED (2025-10-21) | Security Guild, Storage Guild | SEC2.A (audit contract) | Emit audit events from password verification outcomes and persist via `IAuthorityLoginAttemptStore`.
⛔ Waiting on AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 to stabilise Authority auth surfaces before final verification + publish. | ✅ Serilog events enriched with subject/client/IP/outcome; ✅ Mongo records written per attempt; ✅ Tests assert success/lockout/failure cases. | +| SEC3.PLG | BLOCKED (2025-10-21) | Security Guild, BE-Auth Plugin | CORE8, SEC3.A (rate limiter) | Ensure lockout responses and rate-limit metadata flow through plugin logs/events (include retry-after).
⛔ Pending AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 so limiter telemetry contract matches final authority surface. | ✅ Audit record includes retry-after; ✅ Tests confirm lockout + limiter interplay. | +| SEC4.PLG | DONE (2025-10-12) | Security Guild | SEC4.A (revocation schema) | Provide plugin hooks so revoked users/clients write reasons for revocation bundle export. | ✅ Revocation exporter consumes plugin data; ✅ Tests cover revoked user/client output. | +| SEC5.PLG | BLOCKED (2025-10-21) | Security Guild | SEC5.A (threat model) | Address plugin-specific mitigations (bootstrap user handling, password policy docs) in threat model backlog.
⛔ Final documentation depends on AUTH-DPOP-11-001 / AUTH-MTLS-11-002 / PLUGIN-DI-08-001 outcomes. | ✅ Threat model lists plugin attack surfaces; ✅ Mitigation items filed. | +| PLG4-6.CAPABILITIES | BLOCKED (2025-10-12) | BE-Auth Plugin, Docs Guild | PLG1–PLG3 | Finalise capability metadata exposure, config validation, and developer guide updates; remaining action is Docs polish/diagram export. | ✅ Capability metadata + validation merged; ✅ Plugin guide updated with final copy & diagrams; ✅ Release notes mention new toggles.
⛔ Blocked awaiting Authority rate-limiter stream (CORE8/SEC3) to resume so doc updates reflect final limiter behaviour. | +| PLG7.RFC | REVIEW | BE-Auth Plugin, Security Guild | PLG4 | Socialize LDAP plugin RFC (`docs/rfcs/authority-plugin-ldap.md`) and capture guild feedback. | ✅ Guild review sign-off recorded; ✅ Follow-up issues filed in module boards. | +| PLG6.DIAGRAM | TODO | Docs Guild | PLG6.DOC | Export final sequence/component diagrams for the developer guide and add offline-friendly assets under `docs/assets/authority`. | ✅ Mermaid sources committed; ✅ Rendered SVG/PNG linked from Section 2 + Section 9; ✅ Docs build preview shared with Plugin + Docs guilds. | + +> Update statuses to DOING/DONE/BLOCKED as you make progress. Always run `dotnet test` for touched projects before marking DONE. + +> Remark (2025-10-13, PLG6.DOC/PLG6.DIAGRAM): Security Guild delivered `docs/security/rate-limits.md`; Docs team can lift Section 3 (tuning table + alerts) into the developer guide diagrams when rendering assets. + +> Check-in (2025-10-19): Wave 0A dependencies (AUTH-DPOP-11-001, AUTH-MTLS-11-002, PLUGIN-DI-08-001) still open, so SEC2/SEC3/SEC5 remain in progress without new scope until upstream limiter updates land. diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityClientRegistrationTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityClientRegistrationTests.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityClientRegistrationTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityClientRegistrationTests.cs index 7dbd9d9d..5bd09810 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityClientRegistrationTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityClientRegistrationTests.cs @@ -1,32 +1,32 @@ -using System; -using StellaOps.Authority.Plugins.Abstractions; - -namespace StellaOps.Authority.Plugins.Abstractions.Tests; - -public class AuthorityClientRegistrationTests -{ - [Fact] - public void Constructor_Throws_WhenClientIdMissing() - { - Assert.Throws(() => new AuthorityClientRegistration(string.Empty, false, null, null)); - } - - [Fact] - public void Constructor_RequiresSecret_ForConfidentialClients() - { - Assert.Throws(() => new AuthorityClientRegistration("cli", true, null, null)); - } - - [Fact] - public void WithClientSecret_ReturnsCopy() - { - var registration = new AuthorityClientRegistration("cli", false, null, null, tenant: "Tenant-Alpha"); - - var updated = registration.WithClientSecret("secret"); - - Assert.Equal("cli", updated.ClientId); - Assert.Equal("secret", updated.ClientSecret); - Assert.False(updated.Confidential); - Assert.Equal("tenant-alpha", updated.Tenant); - } -} +using System; +using StellaOps.Authority.Plugins.Abstractions; + +namespace StellaOps.Authority.Plugins.Abstractions.Tests; + +public class AuthorityClientRegistrationTests +{ + [Fact] + public void Constructor_Throws_WhenClientIdMissing() + { + Assert.Throws(() => new AuthorityClientRegistration(string.Empty, false, null, null)); + } + + [Fact] + public void Constructor_RequiresSecret_ForConfidentialClients() + { + Assert.Throws(() => new AuthorityClientRegistration("cli", true, null, null)); + } + + [Fact] + public void WithClientSecret_ReturnsCopy() + { + var registration = new AuthorityClientRegistration("cli", false, null, null, tenant: "Tenant-Alpha"); + + var updated = registration.WithClientSecret("secret"); + + Assert.Equal("cli", updated.ClientId); + Assert.Equal("secret", updated.ClientSecret); + Assert.False(updated.Confidential); + Assert.Equal("tenant-alpha", updated.Tenant); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityCredentialVerificationResultTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityCredentialVerificationResultTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityCredentialVerificationResultTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityCredentialVerificationResultTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityIdentityProviderCapabilitiesTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityIdentityProviderCapabilitiesTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityIdentityProviderCapabilitiesTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityIdentityProviderCapabilitiesTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginHealthResultTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginHealthResultTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginHealthResultTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginHealthResultTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginOperationResultTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginOperationResultTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginOperationResultTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityPluginOperationResultTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserDescriptorTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserDescriptorTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserDescriptorTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserDescriptorTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserRegistrationTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserRegistrationTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserRegistrationTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/AuthorityUserRegistrationTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/StellaOps.Authority.Plugins.Abstractions.Tests.csproj b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/StellaOps.Authority.Plugins.Abstractions.Tests.csproj similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/StellaOps.Authority.Plugins.Abstractions.Tests.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions.Tests/StellaOps.Authority.Plugins.Abstractions.Tests.csproj diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityClientMetadataKeys.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginContracts.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginContracts.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginContracts.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginContracts.cs index 6ad7a6ef..d343c99e 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginContracts.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginContracts.cs @@ -1,211 +1,211 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics.CodeAnalysis; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; - -namespace StellaOps.Authority.Plugins.Abstractions; - -/// -/// Well-known Authority plugin capability identifiers. -/// -public static class AuthorityPluginCapabilities -{ - public const string Password = "password"; - public const string Bootstrap = "bootstrap"; - public const string Mfa = "mfa"; - public const string ClientProvisioning = "clientProvisioning"; -} - -/// -/// Immutable description of an Authority plugin loaded from configuration. -/// -/// Logical name derived from configuration key. -/// Plugin type identifier (used for capability routing). -/// Whether the plugin is enabled. -/// Assembly name without extension. -/// Explicit assembly path override. -/// Capability hints exposed by the plugin. -/// Additional metadata forwarded to plugin implementations. -/// Absolute path to the plugin configuration manifest. -public sealed record AuthorityPluginManifest( - string Name, - string Type, - bool Enabled, - string? AssemblyName, - string? AssemblyPath, - IReadOnlyList Capabilities, - IReadOnlyDictionary Metadata, - string ConfigPath) -{ - /// - /// Determines whether the manifest declares the specified capability. - /// - /// Capability identifier to check. - public bool HasCapability(string capability) - { - if (string.IsNullOrWhiteSpace(capability)) - { - return false; - } - - foreach (var entry in Capabilities) - { - if (string.Equals(entry, capability, StringComparison.OrdinalIgnoreCase)) - { - return true; - } - } - - return false; - } -} - -/// -/// Runtime context combining plugin manifest metadata and its bound configuration. -/// -/// Manifest describing the plugin. -/// Root configuration built from the plugin YAML manifest. -public sealed record AuthorityPluginContext( - AuthorityPluginManifest Manifest, - IConfiguration Configuration); - -/// -/// Registry exposing the set of Authority plugins loaded at runtime. -/// -public interface IAuthorityPluginRegistry -{ - IReadOnlyCollection Plugins { get; } - - bool TryGet(string name, [NotNullWhen(true)] out AuthorityPluginContext? context); - - AuthorityPluginContext GetRequired(string name) - { - if (TryGet(name, out var context)) - { - return context; - } - - throw new KeyNotFoundException($"Authority plugin '{name}' is not registered."); - } -} - -/// -/// Registry exposing loaded identity provider plugins and their capabilities. -/// -public interface IAuthorityIdentityProviderRegistry -{ - /// - /// Gets metadata for all registered identity provider plugins. - /// - IReadOnlyCollection Providers { get; } - - /// - /// Gets metadata for identity providers that advertise password support. - /// - IReadOnlyCollection PasswordProviders { get; } - - /// - /// Gets metadata for identity providers that advertise multi-factor authentication support. - /// - IReadOnlyCollection MfaProviders { get; } - - /// - /// Gets metadata for identity providers that advertise client provisioning support. - /// - IReadOnlyCollection ClientProvisioningProviders { get; } - - /// - /// Aggregate capability flags across all registered providers. - /// - AuthorityIdentityProviderCapabilities AggregateCapabilities { get; } - - /// - /// Attempts to resolve identity provider metadata by name. - /// - bool TryGet(string name, [NotNullWhen(true)] out AuthorityIdentityProviderMetadata? metadata); - - /// - /// Resolves identity provider metadata by name or throws when not found. - /// - AuthorityIdentityProviderMetadata GetRequired(string name) - { - if (TryGet(name, out var metadata)) - { - return metadata; - } - - throw new KeyNotFoundException($"Identity provider plugin '{name}' is not registered."); - } - - /// - /// Acquires a scoped handle to the specified identity provider. - /// - /// Logical provider name. - /// Cancellation token. - /// Handle managing the provider instance lifetime. - ValueTask AcquireAsync(string name, CancellationToken cancellationToken); -} - -/// -/// Immutable metadata describing a registered identity provider. -/// -/// Logical provider name from the manifest. -/// Provider type identifier. -/// Capability flags advertised by the provider. -public sealed record AuthorityIdentityProviderMetadata( - string Name, - string Type, - AuthorityIdentityProviderCapabilities Capabilities); - -/// -/// Represents a scoped identity provider instance and manages its disposal. -/// -public sealed class AuthorityIdentityProviderHandle : IAsyncDisposable, IDisposable -{ - private readonly AsyncServiceScope scope; - private bool disposed; - - public AuthorityIdentityProviderHandle(AsyncServiceScope scope, AuthorityIdentityProviderMetadata metadata, IIdentityProviderPlugin provider) - { - this.scope = scope; - Metadata = metadata ?? throw new ArgumentNullException(nameof(metadata)); - Provider = provider ?? throw new ArgumentNullException(nameof(provider)); - } - - /// - /// Gets the metadata associated with the provider instance. - /// - public AuthorityIdentityProviderMetadata Metadata { get; } - - /// - /// Gets the active provider instance. - /// - public IIdentityProviderPlugin Provider { get; } - - /// - public void Dispose() - { - if (disposed) - { - return; - } - - disposed = true; - scope.Dispose(); - } - - /// - public async ValueTask DisposeAsync() - { - if (disposed) - { - return; - } - - disposed = true; - await scope.DisposeAsync().ConfigureAwait(false); - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics.CodeAnalysis; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; + +namespace StellaOps.Authority.Plugins.Abstractions; + +/// +/// Well-known Authority plugin capability identifiers. +/// +public static class AuthorityPluginCapabilities +{ + public const string Password = "password"; + public const string Bootstrap = "bootstrap"; + public const string Mfa = "mfa"; + public const string ClientProvisioning = "clientProvisioning"; +} + +/// +/// Immutable description of an Authority plugin loaded from configuration. +/// +/// Logical name derived from configuration key. +/// Plugin type identifier (used for capability routing). +/// Whether the plugin is enabled. +/// Assembly name without extension. +/// Explicit assembly path override. +/// Capability hints exposed by the plugin. +/// Additional metadata forwarded to plugin implementations. +/// Absolute path to the plugin configuration manifest. +public sealed record AuthorityPluginManifest( + string Name, + string Type, + bool Enabled, + string? AssemblyName, + string? AssemblyPath, + IReadOnlyList Capabilities, + IReadOnlyDictionary Metadata, + string ConfigPath) +{ + /// + /// Determines whether the manifest declares the specified capability. + /// + /// Capability identifier to check. + public bool HasCapability(string capability) + { + if (string.IsNullOrWhiteSpace(capability)) + { + return false; + } + + foreach (var entry in Capabilities) + { + if (string.Equals(entry, capability, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; + } +} + +/// +/// Runtime context combining plugin manifest metadata and its bound configuration. +/// +/// Manifest describing the plugin. +/// Root configuration built from the plugin YAML manifest. +public sealed record AuthorityPluginContext( + AuthorityPluginManifest Manifest, + IConfiguration Configuration); + +/// +/// Registry exposing the set of Authority plugins loaded at runtime. +/// +public interface IAuthorityPluginRegistry +{ + IReadOnlyCollection Plugins { get; } + + bool TryGet(string name, [NotNullWhen(true)] out AuthorityPluginContext? context); + + AuthorityPluginContext GetRequired(string name) + { + if (TryGet(name, out var context)) + { + return context; + } + + throw new KeyNotFoundException($"Authority plugin '{name}' is not registered."); + } +} + +/// +/// Registry exposing loaded identity provider plugins and their capabilities. +/// +public interface IAuthorityIdentityProviderRegistry +{ + /// + /// Gets metadata for all registered identity provider plugins. + /// + IReadOnlyCollection Providers { get; } + + /// + /// Gets metadata for identity providers that advertise password support. + /// + IReadOnlyCollection PasswordProviders { get; } + + /// + /// Gets metadata for identity providers that advertise multi-factor authentication support. + /// + IReadOnlyCollection MfaProviders { get; } + + /// + /// Gets metadata for identity providers that advertise client provisioning support. + /// + IReadOnlyCollection ClientProvisioningProviders { get; } + + /// + /// Aggregate capability flags across all registered providers. + /// + AuthorityIdentityProviderCapabilities AggregateCapabilities { get; } + + /// + /// Attempts to resolve identity provider metadata by name. + /// + bool TryGet(string name, [NotNullWhen(true)] out AuthorityIdentityProviderMetadata? metadata); + + /// + /// Resolves identity provider metadata by name or throws when not found. + /// + AuthorityIdentityProviderMetadata GetRequired(string name) + { + if (TryGet(name, out var metadata)) + { + return metadata; + } + + throw new KeyNotFoundException($"Identity provider plugin '{name}' is not registered."); + } + + /// + /// Acquires a scoped handle to the specified identity provider. + /// + /// Logical provider name. + /// Cancellation token. + /// Handle managing the provider instance lifetime. + ValueTask AcquireAsync(string name, CancellationToken cancellationToken); +} + +/// +/// Immutable metadata describing a registered identity provider. +/// +/// Logical provider name from the manifest. +/// Provider type identifier. +/// Capability flags advertised by the provider. +public sealed record AuthorityIdentityProviderMetadata( + string Name, + string Type, + AuthorityIdentityProviderCapabilities Capabilities); + +/// +/// Represents a scoped identity provider instance and manages its disposal. +/// +public sealed class AuthorityIdentityProviderHandle : IAsyncDisposable, IDisposable +{ + private readonly AsyncServiceScope scope; + private bool disposed; + + public AuthorityIdentityProviderHandle(AsyncServiceScope scope, AuthorityIdentityProviderMetadata metadata, IIdentityProviderPlugin provider) + { + this.scope = scope; + Metadata = metadata ?? throw new ArgumentNullException(nameof(metadata)); + Provider = provider ?? throw new ArgumentNullException(nameof(provider)); + } + + /// + /// Gets the metadata associated with the provider instance. + /// + public AuthorityIdentityProviderMetadata Metadata { get; } + + /// + /// Gets the active provider instance. + /// + public IIdentityProviderPlugin Provider { get; } + + /// + public void Dispose() + { + if (disposed) + { + return; + } + + disposed = true; + scope.Dispose(); + } + + /// + public async ValueTask DisposeAsync() + { + if (disposed) + { + return; + } + + disposed = true; + await scope.DisposeAsync().ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginRegistrationContext.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginRegistrationContext.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginRegistrationContext.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthorityPluginRegistrationContext.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthoritySecretHasher.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthoritySecretHasher.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthoritySecretHasher.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/AuthoritySecretHasher.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/IdentityProviderContracts.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/IdentityProviderContracts.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/IdentityProviderContracts.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/IdentityProviderContracts.cs index 0aff6c78..e8392398 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/IdentityProviderContracts.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/IdentityProviderContracts.cs @@ -1,897 +1,897 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Security.Claims; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Cryptography.Audit; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Authority.Plugins.Abstractions; - -/// -/// Describes feature support advertised by an identity provider plugin. -/// -public sealed record AuthorityIdentityProviderCapabilities( - bool SupportsPassword, - bool SupportsMfa, - bool SupportsClientProvisioning) -{ - /// - /// Builds capabilities metadata from a list of capability identifiers. - /// - public static AuthorityIdentityProviderCapabilities FromCapabilities(IEnumerable capabilities) - { - if (capabilities is null) - { - return new AuthorityIdentityProviderCapabilities(false, false, false); - } - - var seen = new HashSet(StringComparer.OrdinalIgnoreCase); - foreach (var entry in capabilities) - { - if (string.IsNullOrWhiteSpace(entry)) - { - continue; - } - - seen.Add(entry.Trim()); - } - - return new AuthorityIdentityProviderCapabilities( - SupportsPassword: seen.Contains(AuthorityPluginCapabilities.Password), - SupportsMfa: seen.Contains(AuthorityPluginCapabilities.Mfa), - SupportsClientProvisioning: seen.Contains(AuthorityPluginCapabilities.ClientProvisioning)); - } -} - -/// -/// Represents a loaded Authority identity provider plugin instance. -/// -public interface IIdentityProviderPlugin -{ - /// - /// Gets the logical name of the plugin instance (matches the manifest key). - /// - string Name { get; } - - /// - /// Gets the plugin type identifier (e.g. standard, ldap). - /// - string Type { get; } - - /// - /// Gets the plugin context comprising the manifest and bound configuration. - /// - AuthorityPluginContext Context { get; } - - /// - /// Gets the credential store responsible for authenticator validation and user provisioning. - /// - IUserCredentialStore Credentials { get; } - - /// - /// Gets the claims enricher applied to issued principals. - /// - IClaimsEnricher ClaimsEnricher { get; } - - /// - /// Gets the optional client provisioning store exposed by the plugin. - /// - IClientProvisioningStore? ClientProvisioning { get; } - - /// - /// Gets the capability metadata advertised by the plugin. - /// - AuthorityIdentityProviderCapabilities Capabilities { get; } - - /// - /// Evaluates the health of the plugin and backing data stores. - /// - /// Token used to cancel the operation. - /// Health result describing the plugin status. - ValueTask CheckHealthAsync(CancellationToken cancellationToken); -} - -/// -/// Supplies operations for validating credentials and managing user records. -/// -public interface IUserCredentialStore -{ - /// - /// Verifies the supplied username/password combination. - /// - ValueTask VerifyPasswordAsync( - string username, - string password, - CancellationToken cancellationToken); - - /// - /// Creates or updates a user record based on the supplied registration data. - /// - ValueTask> UpsertUserAsync( - AuthorityUserRegistration registration, - CancellationToken cancellationToken); - - /// - /// Attempts to resolve a user descriptor by its canonical subject identifier. - /// - ValueTask FindBySubjectAsync( - string subjectId, - CancellationToken cancellationToken); -} - -/// -/// Enriches issued principals with additional claims based on plugin-specific rules. -/// -public interface IClaimsEnricher -{ - /// - /// Adds or adjusts claims on the provided identity. - /// - ValueTask EnrichAsync( - ClaimsIdentity identity, - AuthorityClaimsEnrichmentContext context, - CancellationToken cancellationToken); -} - -/// -/// Manages client (machine-to-machine) provisioning for Authority. -/// -public interface IClientProvisioningStore -{ - /// - /// Creates or updates a client registration. - /// - ValueTask> CreateOrUpdateAsync( - AuthorityClientRegistration registration, - CancellationToken cancellationToken); - - /// - /// Attempts to resolve a client descriptor by its identifier. - /// - ValueTask FindByClientIdAsync( - string clientId, - CancellationToken cancellationToken); - - /// - /// Removes a client registration. - /// - ValueTask DeleteAsync( - string clientId, - CancellationToken cancellationToken); -} - -/// -/// Represents the health state of a plugin or backing store. -/// -public enum AuthorityPluginHealthStatus -{ - /// - /// Plugin is healthy and operational. - /// - Healthy, - - /// - /// Plugin is degraded but still usable (e.g. transient connectivity issues). - /// - Degraded, - - /// - /// Plugin is unavailable and cannot service requests. - /// - Unavailable -} - -/// -/// Result of a plugin health probe. -/// -public sealed record AuthorityPluginHealthResult -{ - private AuthorityPluginHealthResult( - AuthorityPluginHealthStatus status, - string? message, - IReadOnlyDictionary details) - { - Status = status; - Message = message; - Details = details; - } - - /// - /// Gets the overall status of the plugin. - /// - public AuthorityPluginHealthStatus Status { get; } - - /// - /// Gets an optional human-readable status description. - /// - public string? Message { get; } - - /// - /// Gets optional structured details for diagnostics. - /// - public IReadOnlyDictionary Details { get; } - - /// - /// Creates a healthy result. - /// - public static AuthorityPluginHealthResult Healthy( - string? message = null, - IReadOnlyDictionary? details = null) - => new(AuthorityPluginHealthStatus.Healthy, message, details ?? EmptyDetails); - - /// - /// Creates a degraded result. - /// - public static AuthorityPluginHealthResult Degraded( - string? message = null, - IReadOnlyDictionary? details = null) - => new(AuthorityPluginHealthStatus.Degraded, message, details ?? EmptyDetails); - - /// - /// Creates an unavailable result. - /// - public static AuthorityPluginHealthResult Unavailable( - string? message = null, - IReadOnlyDictionary? details = null) - => new(AuthorityPluginHealthStatus.Unavailable, message, details ?? EmptyDetails); - - private static readonly IReadOnlyDictionary EmptyDetails = - new Dictionary(StringComparer.OrdinalIgnoreCase); -} - -/// -/// Describes a canonical Authority user surfaced by a plugin. -/// -public sealed record AuthorityUserDescriptor -{ - /// - /// Initialises a new user descriptor. - /// - public AuthorityUserDescriptor( - string subjectId, - string username, - string? displayName, - bool requiresPasswordReset, - IReadOnlyCollection? roles = null, - IReadOnlyDictionary? attributes = null) - { - SubjectId = ValidateRequired(subjectId, nameof(subjectId)); - Username = ValidateRequired(username, nameof(username)); - DisplayName = displayName; - RequiresPasswordReset = requiresPasswordReset; - Roles = roles is null ? Array.Empty() : roles.ToArray(); - Attributes = attributes is null - ? new Dictionary(StringComparer.OrdinalIgnoreCase) - : new Dictionary(attributes, StringComparer.OrdinalIgnoreCase); - } - - /// - /// Stable subject identifier for token issuance. - /// - public string SubjectId { get; } - - /// - /// Canonical username (case-normalised). - /// - public string Username { get; } - - /// - /// Optional human-friendly display name. - /// - public string? DisplayName { get; } - - /// - /// Indicates whether the user must reset their password. - /// - public bool RequiresPasswordReset { get; } - - /// - /// Collection of role identifiers associated with the user. - /// - public IReadOnlyCollection Roles { get; } - - /// - /// Arbitrary plugin-defined attributes (used by claims enricher). - /// - public IReadOnlyDictionary Attributes { get; } - - private static string ValidateRequired(string value, string paramName) - => string.IsNullOrWhiteSpace(value) - ? throw new ArgumentException("Value cannot be null or whitespace.", paramName) - : value; -} - -/// -/// Outcome of a credential verification attempt. -/// -public sealed record AuthorityCredentialVerificationResult -{ - private AuthorityCredentialVerificationResult( - bool succeeded, - AuthorityUserDescriptor? user, - AuthorityCredentialFailureCode? failureCode, - string? message, - TimeSpan? retryAfter, - IReadOnlyList auditProperties) - { - Succeeded = succeeded; - User = user; - FailureCode = failureCode; - Message = message; - RetryAfter = retryAfter; - AuditProperties = auditProperties ?? Array.Empty(); - } - - /// - /// Indicates whether the verification succeeded. - /// - public bool Succeeded { get; } - - /// - /// Resolved user descriptor when successful. - /// - public AuthorityUserDescriptor? User { get; } - - /// - /// Failure classification when unsuccessful. - /// - public AuthorityCredentialFailureCode? FailureCode { get; } - - /// - /// Optional message describing the outcome. - /// - public string? Message { get; } - - /// - /// Optional suggested retry interval (e.g. for lockouts). - /// - public TimeSpan? RetryAfter { get; } - - /// - /// Additional audit properties emitted by the credential store. - /// - public IReadOnlyList AuditProperties { get; } - - /// - /// Builds a successful verification result. - /// - public static AuthorityCredentialVerificationResult Success( - AuthorityUserDescriptor user, - string? message = null, - IReadOnlyList? auditProperties = null) - => new(true, user ?? throw new ArgumentNullException(nameof(user)), null, message, null, auditProperties ?? Array.Empty()); - - /// - /// Builds a failed verification result. - /// - public static AuthorityCredentialVerificationResult Failure( - AuthorityCredentialFailureCode failureCode, - string? message = null, - TimeSpan? retryAfter = null, - IReadOnlyList? auditProperties = null) - => new(false, null, failureCode, message, retryAfter, auditProperties ?? Array.Empty()); -} - -/// -/// Classifies credential verification failures. -/// -public enum AuthorityCredentialFailureCode -{ - /// - /// Username/password combination is invalid. - /// - InvalidCredentials, - - /// - /// Account is locked out (retry after a specified duration). - /// - LockedOut, - - /// - /// Password has expired and must be reset. - /// - PasswordExpired, - - /// - /// User must reset password before proceeding. - /// - RequiresPasswordReset, - - /// - /// Additional multi-factor authentication is required. - /// - RequiresMfa, - - /// - /// Unexpected failure occurred (see message for details). - /// - UnknownError -} - -/// -/// Represents a user provisioning request. -/// -public sealed record AuthorityUserRegistration -{ - /// - /// Initialises a new registration. - /// - public AuthorityUserRegistration( - string username, - string? password, - string? displayName, - string? email, - bool requirePasswordReset, - IReadOnlyCollection? roles = null, - IReadOnlyDictionary? attributes = null) - { - Username = ValidateRequired(username, nameof(username)); - Password = password; - DisplayName = displayName; - Email = email; - RequirePasswordReset = requirePasswordReset; - Roles = roles is null ? Array.Empty() : roles.ToArray(); - Attributes = attributes is null - ? new Dictionary(StringComparer.OrdinalIgnoreCase) - : new Dictionary(attributes, StringComparer.OrdinalIgnoreCase); - } - - /// - /// Canonical username (unique). - /// - public string Username { get; } - - /// - /// Optional raw password (hashed by plugin). - /// - public string? Password { get; init; } - - /// - /// Optional human-friendly display name. - /// - public string? DisplayName { get; } - - /// - /// Optional contact email. - /// - public string? Email { get; } - - /// - /// Indicates whether the user must reset their password at next login. - /// - public bool RequirePasswordReset { get; } - - /// - /// Associated roles. - /// - public IReadOnlyCollection Roles { get; } - - /// - /// Plugin-defined attributes. - /// - public IReadOnlyDictionary Attributes { get; } - - /// - /// Creates a copy with the provided password while preserving other fields. - /// - public AuthorityUserRegistration WithPassword(string? password) - => new(Username, password, DisplayName, Email, RequirePasswordReset, Roles, Attributes); - - private static string ValidateRequired(string value, string paramName) - => string.IsNullOrWhiteSpace(value) - ? throw new ArgumentException("Value cannot be null or whitespace.", paramName) - : value; -} - -/// -/// Generic operation result utilised by plugins. -/// -public sealed record AuthorityPluginOperationResult -{ - private AuthorityPluginOperationResult(bool succeeded, string? errorCode, string? message) - { - Succeeded = succeeded; - ErrorCode = errorCode; - Message = message; - } - - /// - /// Indicates whether the operation succeeded. - /// - public bool Succeeded { get; } - - /// - /// Machine-readable error code (populated on failure). - /// - public string? ErrorCode { get; } - - /// - /// Optional human-readable message. - /// - public string? Message { get; } - - /// - /// Returns a successful result. - /// - public static AuthorityPluginOperationResult Success(string? message = null) - => new(true, null, message); - - /// - /// Returns a failed result with the supplied error code. - /// - public static AuthorityPluginOperationResult Failure(string errorCode, string? message = null) - => new(false, ValidateErrorCode(errorCode), message); - - internal static string ValidateErrorCode(string errorCode) - => string.IsNullOrWhiteSpace(errorCode) - ? throw new ArgumentException("Error code is required for failures.", nameof(errorCode)) - : errorCode; -} - -/// -/// Generic operation result that returns a value. -/// -public sealed record AuthorityPluginOperationResult -{ - private AuthorityPluginOperationResult( - bool succeeded, - TValue? value, - string? errorCode, - string? message) - { - Succeeded = succeeded; - Value = value; - ErrorCode = errorCode; - Message = message; - } - - /// - /// Indicates whether the operation succeeded. - /// - public bool Succeeded { get; } - - /// - /// Returned value when successful. - /// - public TValue? Value { get; } - - /// - /// Machine-readable error code (on failure). - /// - public string? ErrorCode { get; } - - /// - /// Optional human-readable message. - /// - public string? Message { get; } - - /// - /// Returns a successful result with the provided value. - /// - public static AuthorityPluginOperationResult Success(TValue value, string? message = null) - => new(true, value, null, message); - - /// - /// Returns a successful result without a value (defaults to default). - /// - public static AuthorityPluginOperationResult Success(string? message = null) - => new(true, default, null, message); - - /// - /// Returns a failed result with the supplied error code. - /// - public static AuthorityPluginOperationResult Failure(string errorCode, string? message = null) - => new(false, default, AuthorityPluginOperationResult.ValidateErrorCode(errorCode), message); -} - -/// -/// Context supplied to claims enrichment routines. -/// -public sealed class AuthorityClaimsEnrichmentContext -{ - private readonly Dictionary items; - - /// - /// Initialises a new context instance. - /// - public AuthorityClaimsEnrichmentContext( - AuthorityPluginContext plugin, - AuthorityUserDescriptor? user, - AuthorityClientDescriptor? client) - { - Plugin = plugin ?? throw new ArgumentNullException(nameof(plugin)); - User = user; - Client = client; - items = new Dictionary(StringComparer.OrdinalIgnoreCase); - } - - /// - /// Gets the plugin context associated with the principal. - /// - public AuthorityPluginContext Plugin { get; } - - /// - /// Gets the user descriptor when available. - /// - public AuthorityUserDescriptor? User { get; } - - /// - /// Gets the client descriptor when available. - /// - public AuthorityClientDescriptor? Client { get; } - - /// - /// Extensible bag for plugin-specific data passed between enrichment stages. - /// - public IDictionary Items => items; -} - -/// -/// Represents a registered OAuth/OpenID client. -/// -public sealed record AuthorityClientDescriptor -{ - public AuthorityClientDescriptor( - string clientId, - string? displayName, - bool confidential, - IReadOnlyCollection? allowedGrantTypes = null, - IReadOnlyCollection? allowedScopes = null, - IReadOnlyCollection? allowedAudiences = null, - IReadOnlyCollection? redirectUris = null, - IReadOnlyCollection? postLogoutRedirectUris = null, - IReadOnlyDictionary? properties = null) - { - ClientId = ValidateRequired(clientId, nameof(clientId)); - DisplayName = displayName; - Confidential = confidential; - AllowedGrantTypes = Normalize(allowedGrantTypes); - AllowedScopes = NormalizeScopes(allowedScopes); - AllowedAudiences = Normalize(allowedAudiences); - RedirectUris = redirectUris is null ? Array.Empty() : redirectUris.ToArray(); - PostLogoutRedirectUris = postLogoutRedirectUris is null ? Array.Empty() : postLogoutRedirectUris.ToArray(); - var propertyBag = properties is null - ? new Dictionary(StringComparer.OrdinalIgnoreCase) - : new Dictionary(properties, StringComparer.OrdinalIgnoreCase); - Tenant = propertyBag.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantValue) - ? AuthorityClientRegistration.NormalizeTenantValue(tenantValue) - : null; - var normalizedProject = propertyBag.TryGetValue(AuthorityClientMetadataKeys.Project, out var projectValue) - ? AuthorityClientRegistration.NormalizeProjectValue(projectValue) - : null; - Project = normalizedProject ?? StellaOpsTenancyDefaults.AnyProject; - propertyBag[AuthorityClientMetadataKeys.Project] = Project; - Properties = propertyBag; - } - - public string ClientId { get; } - public string? DisplayName { get; } - public bool Confidential { get; } - public IReadOnlyCollection AllowedGrantTypes { get; } - public IReadOnlyCollection AllowedScopes { get; } - public IReadOnlyCollection AllowedAudiences { get; } - public IReadOnlyCollection RedirectUris { get; } - public IReadOnlyCollection PostLogoutRedirectUris { get; } - public string? Tenant { get; } - public string? Project { get; } - public IReadOnlyDictionary Properties { get; } - - private static IReadOnlyCollection Normalize(IReadOnlyCollection? values) - => values is null || values.Count == 0 - ? Array.Empty() - : values - .Where(value => !string.IsNullOrWhiteSpace(value)) - .Select(value => value.Trim()) - .Distinct(StringComparer.Ordinal) - .ToArray(); - - private static IReadOnlyCollection NormalizeScopes(IReadOnlyCollection? values) - { - if (values is null || values.Count == 0) - { - return Array.Empty(); - } - - var unique = new HashSet(StringComparer.Ordinal); - - foreach (var value in values) - { - var normalized = StellaOpsScopes.Normalize(value); - if (normalized is null) - { - continue; - } - - unique.Add(normalized); - } - - if (unique.Count == 0) - { - return Array.Empty(); - } - - return unique.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); - } - - private static string ValidateRequired(string value, string paramName) - => string.IsNullOrWhiteSpace(value) - ? throw new ArgumentException("Value cannot be null or whitespace.", paramName) - : value; -} - -public sealed record AuthorityClientCertificateBindingRegistration -{ - public AuthorityClientCertificateBindingRegistration( - string thumbprint, - string? serialNumber = null, - string? subject = null, - string? issuer = null, - IReadOnlyCollection? subjectAlternativeNames = null, - DateTimeOffset? notBefore = null, - DateTimeOffset? notAfter = null, - string? label = null) - { - Thumbprint = NormalizeThumbprint(thumbprint); - SerialNumber = Normalize(serialNumber); - Subject = Normalize(subject); - Issuer = Normalize(issuer); - SubjectAlternativeNames = subjectAlternativeNames is null || subjectAlternativeNames.Count == 0 - ? Array.Empty() - : subjectAlternativeNames - .Where(value => !string.IsNullOrWhiteSpace(value)) - .Select(value => value.Trim()) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToArray(); - NotBefore = notBefore; - NotAfter = notAfter; - Label = Normalize(label); - } - - public string Thumbprint { get; } - public string? SerialNumber { get; } - public string? Subject { get; } - public string? Issuer { get; } - public IReadOnlyCollection SubjectAlternativeNames { get; } - public DateTimeOffset? NotBefore { get; } - public DateTimeOffset? NotAfter { get; } - public string? Label { get; } - - private static string NormalizeThumbprint(string value) - { - if (string.IsNullOrWhiteSpace(value)) - { - throw new ArgumentException("Thumbprint is required.", nameof(value)); - } - - return value - .Replace(":", string.Empty, StringComparison.Ordinal) - .Replace(" ", string.Empty, StringComparison.Ordinal) - .ToUpperInvariant(); - } - - private static string? Normalize(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); -} - -public sealed record AuthorityClientRegistration -{ - public AuthorityClientRegistration( - string clientId, - bool confidential, - string? displayName, - string? clientSecret, - IReadOnlyCollection? allowedGrantTypes = null, - IReadOnlyCollection? allowedScopes = null, - IReadOnlyCollection? allowedAudiences = null, - IReadOnlyCollection? redirectUris = null, - IReadOnlyCollection? postLogoutRedirectUris = null, - string? tenant = null, - string? project = null, - IReadOnlyDictionary? properties = null, - IReadOnlyCollection? certificateBindings = null) - { - ClientId = ValidateRequired(clientId, nameof(clientId)); - Confidential = confidential; - DisplayName = displayName; - ClientSecret = confidential - ? ValidateRequired(clientSecret ?? string.Empty, nameof(clientSecret)) - : clientSecret; - AllowedGrantTypes = Normalize(allowedGrantTypes); - AllowedScopes = NormalizeScopes(allowedScopes); - AllowedAudiences = Normalize(allowedAudiences); - RedirectUris = redirectUris is null ? Array.Empty() : redirectUris.ToArray(); - PostLogoutRedirectUris = postLogoutRedirectUris is null ? Array.Empty() : postLogoutRedirectUris.ToArray(); - Tenant = NormalizeTenantValue(tenant); - var propertyBag = properties is null - ? new Dictionary(StringComparer.OrdinalIgnoreCase) - : new Dictionary(properties, StringComparer.OrdinalIgnoreCase); - var normalizedProject = NormalizeProjectValue(project ?? (propertyBag.TryGetValue(AuthorityClientMetadataKeys.Project, out var projectValue) ? projectValue : null)); - Project = normalizedProject ?? StellaOpsTenancyDefaults.AnyProject; - propertyBag[AuthorityClientMetadataKeys.Project] = Project; - Properties = propertyBag; - CertificateBindings = certificateBindings is null - ? Array.Empty() - : certificateBindings.ToArray(); - } - - public string ClientId { get; } - public bool Confidential { get; } - public string? DisplayName { get; } - public string? ClientSecret { get; init; } - public IReadOnlyCollection AllowedGrantTypes { get; } - public IReadOnlyCollection AllowedScopes { get; } - public IReadOnlyCollection AllowedAudiences { get; } - public IReadOnlyCollection RedirectUris { get; } - public IReadOnlyCollection PostLogoutRedirectUris { get; } - public string? Tenant { get; } - public string? Project { get; } - public IReadOnlyDictionary Properties { get; } - public IReadOnlyCollection CertificateBindings { get; } - - public AuthorityClientRegistration WithClientSecret(string? clientSecret) - => new(ClientId, Confidential, DisplayName, clientSecret, AllowedGrantTypes, AllowedScopes, AllowedAudiences, RedirectUris, PostLogoutRedirectUris, Tenant, Project, Properties, CertificateBindings); - - private static IReadOnlyCollection Normalize(IReadOnlyCollection? values) - => values is null || values.Count == 0 - ? Array.Empty() - : values - .Where(value => !string.IsNullOrWhiteSpace(value)) - .Select(value => value.Trim()) - .Distinct(StringComparer.Ordinal) - .ToArray(); - - private static IReadOnlyCollection NormalizeScopes(IReadOnlyCollection? values) - { - if (values is null || values.Count == 0) - { - return Array.Empty(); - } - - var unique = new HashSet(StringComparer.Ordinal); - - foreach (var value in values) - { - var normalized = StellaOpsScopes.Normalize(value); - if (normalized is null) - { - continue; - } - - unique.Add(normalized); - } - - if (unique.Count == 0) - { - return Array.Empty(); - } - - return unique.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); - } - - internal static string? NormalizeTenantValue(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - return value.Trim().ToLowerInvariant(); - } - - internal static string? NormalizeProjectValue(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - return value.Trim().ToLowerInvariant(); - } - - private static string ValidateRequired(string value, string paramName) - => string.IsNullOrWhiteSpace(value) - ? throw new ArgumentException("Value cannot be null or whitespace.", paramName) - : value; -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Security.Claims; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Cryptography.Audit; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Authority.Plugins.Abstractions; + +/// +/// Describes feature support advertised by an identity provider plugin. +/// +public sealed record AuthorityIdentityProviderCapabilities( + bool SupportsPassword, + bool SupportsMfa, + bool SupportsClientProvisioning) +{ + /// + /// Builds capabilities metadata from a list of capability identifiers. + /// + public static AuthorityIdentityProviderCapabilities FromCapabilities(IEnumerable capabilities) + { + if (capabilities is null) + { + return new AuthorityIdentityProviderCapabilities(false, false, false); + } + + var seen = new HashSet(StringComparer.OrdinalIgnoreCase); + foreach (var entry in capabilities) + { + if (string.IsNullOrWhiteSpace(entry)) + { + continue; + } + + seen.Add(entry.Trim()); + } + + return new AuthorityIdentityProviderCapabilities( + SupportsPassword: seen.Contains(AuthorityPluginCapabilities.Password), + SupportsMfa: seen.Contains(AuthorityPluginCapabilities.Mfa), + SupportsClientProvisioning: seen.Contains(AuthorityPluginCapabilities.ClientProvisioning)); + } +} + +/// +/// Represents a loaded Authority identity provider plugin instance. +/// +public interface IIdentityProviderPlugin +{ + /// + /// Gets the logical name of the plugin instance (matches the manifest key). + /// + string Name { get; } + + /// + /// Gets the plugin type identifier (e.g. standard, ldap). + /// + string Type { get; } + + /// + /// Gets the plugin context comprising the manifest and bound configuration. + /// + AuthorityPluginContext Context { get; } + + /// + /// Gets the credential store responsible for authenticator validation and user provisioning. + /// + IUserCredentialStore Credentials { get; } + + /// + /// Gets the claims enricher applied to issued principals. + /// + IClaimsEnricher ClaimsEnricher { get; } + + /// + /// Gets the optional client provisioning store exposed by the plugin. + /// + IClientProvisioningStore? ClientProvisioning { get; } + + /// + /// Gets the capability metadata advertised by the plugin. + /// + AuthorityIdentityProviderCapabilities Capabilities { get; } + + /// + /// Evaluates the health of the plugin and backing data stores. + /// + /// Token used to cancel the operation. + /// Health result describing the plugin status. + ValueTask CheckHealthAsync(CancellationToken cancellationToken); +} + +/// +/// Supplies operations for validating credentials and managing user records. +/// +public interface IUserCredentialStore +{ + /// + /// Verifies the supplied username/password combination. + /// + ValueTask VerifyPasswordAsync( + string username, + string password, + CancellationToken cancellationToken); + + /// + /// Creates or updates a user record based on the supplied registration data. + /// + ValueTask> UpsertUserAsync( + AuthorityUserRegistration registration, + CancellationToken cancellationToken); + + /// + /// Attempts to resolve a user descriptor by its canonical subject identifier. + /// + ValueTask FindBySubjectAsync( + string subjectId, + CancellationToken cancellationToken); +} + +/// +/// Enriches issued principals with additional claims based on plugin-specific rules. +/// +public interface IClaimsEnricher +{ + /// + /// Adds or adjusts claims on the provided identity. + /// + ValueTask EnrichAsync( + ClaimsIdentity identity, + AuthorityClaimsEnrichmentContext context, + CancellationToken cancellationToken); +} + +/// +/// Manages client (machine-to-machine) provisioning for Authority. +/// +public interface IClientProvisioningStore +{ + /// + /// Creates or updates a client registration. + /// + ValueTask> CreateOrUpdateAsync( + AuthorityClientRegistration registration, + CancellationToken cancellationToken); + + /// + /// Attempts to resolve a client descriptor by its identifier. + /// + ValueTask FindByClientIdAsync( + string clientId, + CancellationToken cancellationToken); + + /// + /// Removes a client registration. + /// + ValueTask DeleteAsync( + string clientId, + CancellationToken cancellationToken); +} + +/// +/// Represents the health state of a plugin or backing store. +/// +public enum AuthorityPluginHealthStatus +{ + /// + /// Plugin is healthy and operational. + /// + Healthy, + + /// + /// Plugin is degraded but still usable (e.g. transient connectivity issues). + /// + Degraded, + + /// + /// Plugin is unavailable and cannot service requests. + /// + Unavailable +} + +/// +/// Result of a plugin health probe. +/// +public sealed record AuthorityPluginHealthResult +{ + private AuthorityPluginHealthResult( + AuthorityPluginHealthStatus status, + string? message, + IReadOnlyDictionary details) + { + Status = status; + Message = message; + Details = details; + } + + /// + /// Gets the overall status of the plugin. + /// + public AuthorityPluginHealthStatus Status { get; } + + /// + /// Gets an optional human-readable status description. + /// + public string? Message { get; } + + /// + /// Gets optional structured details for diagnostics. + /// + public IReadOnlyDictionary Details { get; } + + /// + /// Creates a healthy result. + /// + public static AuthorityPluginHealthResult Healthy( + string? message = null, + IReadOnlyDictionary? details = null) + => new(AuthorityPluginHealthStatus.Healthy, message, details ?? EmptyDetails); + + /// + /// Creates a degraded result. + /// + public static AuthorityPluginHealthResult Degraded( + string? message = null, + IReadOnlyDictionary? details = null) + => new(AuthorityPluginHealthStatus.Degraded, message, details ?? EmptyDetails); + + /// + /// Creates an unavailable result. + /// + public static AuthorityPluginHealthResult Unavailable( + string? message = null, + IReadOnlyDictionary? details = null) + => new(AuthorityPluginHealthStatus.Unavailable, message, details ?? EmptyDetails); + + private static readonly IReadOnlyDictionary EmptyDetails = + new Dictionary(StringComparer.OrdinalIgnoreCase); +} + +/// +/// Describes a canonical Authority user surfaced by a plugin. +/// +public sealed record AuthorityUserDescriptor +{ + /// + /// Initialises a new user descriptor. + /// + public AuthorityUserDescriptor( + string subjectId, + string username, + string? displayName, + bool requiresPasswordReset, + IReadOnlyCollection? roles = null, + IReadOnlyDictionary? attributes = null) + { + SubjectId = ValidateRequired(subjectId, nameof(subjectId)); + Username = ValidateRequired(username, nameof(username)); + DisplayName = displayName; + RequiresPasswordReset = requiresPasswordReset; + Roles = roles is null ? Array.Empty() : roles.ToArray(); + Attributes = attributes is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(attributes, StringComparer.OrdinalIgnoreCase); + } + + /// + /// Stable subject identifier for token issuance. + /// + public string SubjectId { get; } + + /// + /// Canonical username (case-normalised). + /// + public string Username { get; } + + /// + /// Optional human-friendly display name. + /// + public string? DisplayName { get; } + + /// + /// Indicates whether the user must reset their password. + /// + public bool RequiresPasswordReset { get; } + + /// + /// Collection of role identifiers associated with the user. + /// + public IReadOnlyCollection Roles { get; } + + /// + /// Arbitrary plugin-defined attributes (used by claims enricher). + /// + public IReadOnlyDictionary Attributes { get; } + + private static string ValidateRequired(string value, string paramName) + => string.IsNullOrWhiteSpace(value) + ? throw new ArgumentException("Value cannot be null or whitespace.", paramName) + : value; +} + +/// +/// Outcome of a credential verification attempt. +/// +public sealed record AuthorityCredentialVerificationResult +{ + private AuthorityCredentialVerificationResult( + bool succeeded, + AuthorityUserDescriptor? user, + AuthorityCredentialFailureCode? failureCode, + string? message, + TimeSpan? retryAfter, + IReadOnlyList auditProperties) + { + Succeeded = succeeded; + User = user; + FailureCode = failureCode; + Message = message; + RetryAfter = retryAfter; + AuditProperties = auditProperties ?? Array.Empty(); + } + + /// + /// Indicates whether the verification succeeded. + /// + public bool Succeeded { get; } + + /// + /// Resolved user descriptor when successful. + /// + public AuthorityUserDescriptor? User { get; } + + /// + /// Failure classification when unsuccessful. + /// + public AuthorityCredentialFailureCode? FailureCode { get; } + + /// + /// Optional message describing the outcome. + /// + public string? Message { get; } + + /// + /// Optional suggested retry interval (e.g. for lockouts). + /// + public TimeSpan? RetryAfter { get; } + + /// + /// Additional audit properties emitted by the credential store. + /// + public IReadOnlyList AuditProperties { get; } + + /// + /// Builds a successful verification result. + /// + public static AuthorityCredentialVerificationResult Success( + AuthorityUserDescriptor user, + string? message = null, + IReadOnlyList? auditProperties = null) + => new(true, user ?? throw new ArgumentNullException(nameof(user)), null, message, null, auditProperties ?? Array.Empty()); + + /// + /// Builds a failed verification result. + /// + public static AuthorityCredentialVerificationResult Failure( + AuthorityCredentialFailureCode failureCode, + string? message = null, + TimeSpan? retryAfter = null, + IReadOnlyList? auditProperties = null) + => new(false, null, failureCode, message, retryAfter, auditProperties ?? Array.Empty()); +} + +/// +/// Classifies credential verification failures. +/// +public enum AuthorityCredentialFailureCode +{ + /// + /// Username/password combination is invalid. + /// + InvalidCredentials, + + /// + /// Account is locked out (retry after a specified duration). + /// + LockedOut, + + /// + /// Password has expired and must be reset. + /// + PasswordExpired, + + /// + /// User must reset password before proceeding. + /// + RequiresPasswordReset, + + /// + /// Additional multi-factor authentication is required. + /// + RequiresMfa, + + /// + /// Unexpected failure occurred (see message for details). + /// + UnknownError +} + +/// +/// Represents a user provisioning request. +/// +public sealed record AuthorityUserRegistration +{ + /// + /// Initialises a new registration. + /// + public AuthorityUserRegistration( + string username, + string? password, + string? displayName, + string? email, + bool requirePasswordReset, + IReadOnlyCollection? roles = null, + IReadOnlyDictionary? attributes = null) + { + Username = ValidateRequired(username, nameof(username)); + Password = password; + DisplayName = displayName; + Email = email; + RequirePasswordReset = requirePasswordReset; + Roles = roles is null ? Array.Empty() : roles.ToArray(); + Attributes = attributes is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(attributes, StringComparer.OrdinalIgnoreCase); + } + + /// + /// Canonical username (unique). + /// + public string Username { get; } + + /// + /// Optional raw password (hashed by plugin). + /// + public string? Password { get; init; } + + /// + /// Optional human-friendly display name. + /// + public string? DisplayName { get; } + + /// + /// Optional contact email. + /// + public string? Email { get; } + + /// + /// Indicates whether the user must reset their password at next login. + /// + public bool RequirePasswordReset { get; } + + /// + /// Associated roles. + /// + public IReadOnlyCollection Roles { get; } + + /// + /// Plugin-defined attributes. + /// + public IReadOnlyDictionary Attributes { get; } + + /// + /// Creates a copy with the provided password while preserving other fields. + /// + public AuthorityUserRegistration WithPassword(string? password) + => new(Username, password, DisplayName, Email, RequirePasswordReset, Roles, Attributes); + + private static string ValidateRequired(string value, string paramName) + => string.IsNullOrWhiteSpace(value) + ? throw new ArgumentException("Value cannot be null or whitespace.", paramName) + : value; +} + +/// +/// Generic operation result utilised by plugins. +/// +public sealed record AuthorityPluginOperationResult +{ + private AuthorityPluginOperationResult(bool succeeded, string? errorCode, string? message) + { + Succeeded = succeeded; + ErrorCode = errorCode; + Message = message; + } + + /// + /// Indicates whether the operation succeeded. + /// + public bool Succeeded { get; } + + /// + /// Machine-readable error code (populated on failure). + /// + public string? ErrorCode { get; } + + /// + /// Optional human-readable message. + /// + public string? Message { get; } + + /// + /// Returns a successful result. + /// + public static AuthorityPluginOperationResult Success(string? message = null) + => new(true, null, message); + + /// + /// Returns a failed result with the supplied error code. + /// + public static AuthorityPluginOperationResult Failure(string errorCode, string? message = null) + => new(false, ValidateErrorCode(errorCode), message); + + internal static string ValidateErrorCode(string errorCode) + => string.IsNullOrWhiteSpace(errorCode) + ? throw new ArgumentException("Error code is required for failures.", nameof(errorCode)) + : errorCode; +} + +/// +/// Generic operation result that returns a value. +/// +public sealed record AuthorityPluginOperationResult +{ + private AuthorityPluginOperationResult( + bool succeeded, + TValue? value, + string? errorCode, + string? message) + { + Succeeded = succeeded; + Value = value; + ErrorCode = errorCode; + Message = message; + } + + /// + /// Indicates whether the operation succeeded. + /// + public bool Succeeded { get; } + + /// + /// Returned value when successful. + /// + public TValue? Value { get; } + + /// + /// Machine-readable error code (on failure). + /// + public string? ErrorCode { get; } + + /// + /// Optional human-readable message. + /// + public string? Message { get; } + + /// + /// Returns a successful result with the provided value. + /// + public static AuthorityPluginOperationResult Success(TValue value, string? message = null) + => new(true, value, null, message); + + /// + /// Returns a successful result without a value (defaults to default). + /// + public static AuthorityPluginOperationResult Success(string? message = null) + => new(true, default, null, message); + + /// + /// Returns a failed result with the supplied error code. + /// + public static AuthorityPluginOperationResult Failure(string errorCode, string? message = null) + => new(false, default, AuthorityPluginOperationResult.ValidateErrorCode(errorCode), message); +} + +/// +/// Context supplied to claims enrichment routines. +/// +public sealed class AuthorityClaimsEnrichmentContext +{ + private readonly Dictionary items; + + /// + /// Initialises a new context instance. + /// + public AuthorityClaimsEnrichmentContext( + AuthorityPluginContext plugin, + AuthorityUserDescriptor? user, + AuthorityClientDescriptor? client) + { + Plugin = plugin ?? throw new ArgumentNullException(nameof(plugin)); + User = user; + Client = client; + items = new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + /// + /// Gets the plugin context associated with the principal. + /// + public AuthorityPluginContext Plugin { get; } + + /// + /// Gets the user descriptor when available. + /// + public AuthorityUserDescriptor? User { get; } + + /// + /// Gets the client descriptor when available. + /// + public AuthorityClientDescriptor? Client { get; } + + /// + /// Extensible bag for plugin-specific data passed between enrichment stages. + /// + public IDictionary Items => items; +} + +/// +/// Represents a registered OAuth/OpenID client. +/// +public sealed record AuthorityClientDescriptor +{ + public AuthorityClientDescriptor( + string clientId, + string? displayName, + bool confidential, + IReadOnlyCollection? allowedGrantTypes = null, + IReadOnlyCollection? allowedScopes = null, + IReadOnlyCollection? allowedAudiences = null, + IReadOnlyCollection? redirectUris = null, + IReadOnlyCollection? postLogoutRedirectUris = null, + IReadOnlyDictionary? properties = null) + { + ClientId = ValidateRequired(clientId, nameof(clientId)); + DisplayName = displayName; + Confidential = confidential; + AllowedGrantTypes = Normalize(allowedGrantTypes); + AllowedScopes = NormalizeScopes(allowedScopes); + AllowedAudiences = Normalize(allowedAudiences); + RedirectUris = redirectUris is null ? Array.Empty() : redirectUris.ToArray(); + PostLogoutRedirectUris = postLogoutRedirectUris is null ? Array.Empty() : postLogoutRedirectUris.ToArray(); + var propertyBag = properties is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(properties, StringComparer.OrdinalIgnoreCase); + Tenant = propertyBag.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantValue) + ? AuthorityClientRegistration.NormalizeTenantValue(tenantValue) + : null; + var normalizedProject = propertyBag.TryGetValue(AuthorityClientMetadataKeys.Project, out var projectValue) + ? AuthorityClientRegistration.NormalizeProjectValue(projectValue) + : null; + Project = normalizedProject ?? StellaOpsTenancyDefaults.AnyProject; + propertyBag[AuthorityClientMetadataKeys.Project] = Project; + Properties = propertyBag; + } + + public string ClientId { get; } + public string? DisplayName { get; } + public bool Confidential { get; } + public IReadOnlyCollection AllowedGrantTypes { get; } + public IReadOnlyCollection AllowedScopes { get; } + public IReadOnlyCollection AllowedAudiences { get; } + public IReadOnlyCollection RedirectUris { get; } + public IReadOnlyCollection PostLogoutRedirectUris { get; } + public string? Tenant { get; } + public string? Project { get; } + public IReadOnlyDictionary Properties { get; } + + private static IReadOnlyCollection Normalize(IReadOnlyCollection? values) + => values is null || values.Count == 0 + ? Array.Empty() + : values + .Where(value => !string.IsNullOrWhiteSpace(value)) + .Select(value => value.Trim()) + .Distinct(StringComparer.Ordinal) + .ToArray(); + + private static IReadOnlyCollection NormalizeScopes(IReadOnlyCollection? values) + { + if (values is null || values.Count == 0) + { + return Array.Empty(); + } + + var unique = new HashSet(StringComparer.Ordinal); + + foreach (var value in values) + { + var normalized = StellaOpsScopes.Normalize(value); + if (normalized is null) + { + continue; + } + + unique.Add(normalized); + } + + if (unique.Count == 0) + { + return Array.Empty(); + } + + return unique.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); + } + + private static string ValidateRequired(string value, string paramName) + => string.IsNullOrWhiteSpace(value) + ? throw new ArgumentException("Value cannot be null or whitespace.", paramName) + : value; +} + +public sealed record AuthorityClientCertificateBindingRegistration +{ + public AuthorityClientCertificateBindingRegistration( + string thumbprint, + string? serialNumber = null, + string? subject = null, + string? issuer = null, + IReadOnlyCollection? subjectAlternativeNames = null, + DateTimeOffset? notBefore = null, + DateTimeOffset? notAfter = null, + string? label = null) + { + Thumbprint = NormalizeThumbprint(thumbprint); + SerialNumber = Normalize(serialNumber); + Subject = Normalize(subject); + Issuer = Normalize(issuer); + SubjectAlternativeNames = subjectAlternativeNames is null || subjectAlternativeNames.Count == 0 + ? Array.Empty() + : subjectAlternativeNames + .Where(value => !string.IsNullOrWhiteSpace(value)) + .Select(value => value.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + NotBefore = notBefore; + NotAfter = notAfter; + Label = Normalize(label); + } + + public string Thumbprint { get; } + public string? SerialNumber { get; } + public string? Subject { get; } + public string? Issuer { get; } + public IReadOnlyCollection SubjectAlternativeNames { get; } + public DateTimeOffset? NotBefore { get; } + public DateTimeOffset? NotAfter { get; } + public string? Label { get; } + + private static string NormalizeThumbprint(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Thumbprint is required.", nameof(value)); + } + + return value + .Replace(":", string.Empty, StringComparison.Ordinal) + .Replace(" ", string.Empty, StringComparison.Ordinal) + .ToUpperInvariant(); + } + + private static string? Normalize(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); +} + +public sealed record AuthorityClientRegistration +{ + public AuthorityClientRegistration( + string clientId, + bool confidential, + string? displayName, + string? clientSecret, + IReadOnlyCollection? allowedGrantTypes = null, + IReadOnlyCollection? allowedScopes = null, + IReadOnlyCollection? allowedAudiences = null, + IReadOnlyCollection? redirectUris = null, + IReadOnlyCollection? postLogoutRedirectUris = null, + string? tenant = null, + string? project = null, + IReadOnlyDictionary? properties = null, + IReadOnlyCollection? certificateBindings = null) + { + ClientId = ValidateRequired(clientId, nameof(clientId)); + Confidential = confidential; + DisplayName = displayName; + ClientSecret = confidential + ? ValidateRequired(clientSecret ?? string.Empty, nameof(clientSecret)) + : clientSecret; + AllowedGrantTypes = Normalize(allowedGrantTypes); + AllowedScopes = NormalizeScopes(allowedScopes); + AllowedAudiences = Normalize(allowedAudiences); + RedirectUris = redirectUris is null ? Array.Empty() : redirectUris.ToArray(); + PostLogoutRedirectUris = postLogoutRedirectUris is null ? Array.Empty() : postLogoutRedirectUris.ToArray(); + Tenant = NormalizeTenantValue(tenant); + var propertyBag = properties is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(properties, StringComparer.OrdinalIgnoreCase); + var normalizedProject = NormalizeProjectValue(project ?? (propertyBag.TryGetValue(AuthorityClientMetadataKeys.Project, out var projectValue) ? projectValue : null)); + Project = normalizedProject ?? StellaOpsTenancyDefaults.AnyProject; + propertyBag[AuthorityClientMetadataKeys.Project] = Project; + Properties = propertyBag; + CertificateBindings = certificateBindings is null + ? Array.Empty() + : certificateBindings.ToArray(); + } + + public string ClientId { get; } + public bool Confidential { get; } + public string? DisplayName { get; } + public string? ClientSecret { get; init; } + public IReadOnlyCollection AllowedGrantTypes { get; } + public IReadOnlyCollection AllowedScopes { get; } + public IReadOnlyCollection AllowedAudiences { get; } + public IReadOnlyCollection RedirectUris { get; } + public IReadOnlyCollection PostLogoutRedirectUris { get; } + public string? Tenant { get; } + public string? Project { get; } + public IReadOnlyDictionary Properties { get; } + public IReadOnlyCollection CertificateBindings { get; } + + public AuthorityClientRegistration WithClientSecret(string? clientSecret) + => new(ClientId, Confidential, DisplayName, clientSecret, AllowedGrantTypes, AllowedScopes, AllowedAudiences, RedirectUris, PostLogoutRedirectUris, Tenant, Project, Properties, CertificateBindings); + + private static IReadOnlyCollection Normalize(IReadOnlyCollection? values) + => values is null || values.Count == 0 + ? Array.Empty() + : values + .Where(value => !string.IsNullOrWhiteSpace(value)) + .Select(value => value.Trim()) + .Distinct(StringComparer.Ordinal) + .ToArray(); + + private static IReadOnlyCollection NormalizeScopes(IReadOnlyCollection? values) + { + if (values is null || values.Count == 0) + { + return Array.Empty(); + } + + var unique = new HashSet(StringComparer.Ordinal); + + foreach (var value in values) + { + var normalized = StellaOpsScopes.Normalize(value); + if (normalized is null) + { + continue; + } + + unique.Add(normalized); + } + + if (unique.Count == 0) + { + return Array.Empty(); + } + + return unique.OrderBy(static scope => scope, StringComparer.Ordinal).ToArray(); + } + + internal static string? NormalizeTenantValue(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return value.Trim().ToLowerInvariant(); + } + + internal static string? NormalizeProjectValue(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return value.Trim().ToLowerInvariant(); + } + + private static string ValidateRequired(string value, string paramName) + => string.IsNullOrWhiteSpace(value) + ? throw new ArgumentException("Value cannot be null or whitespace.", paramName) + : value; +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/StellaOps.Authority.Plugins.Abstractions.csproj b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/StellaOps.Authority.Plugins.Abstractions.csproj similarity index 86% rename from src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/StellaOps.Authority.Plugins.Abstractions.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/StellaOps.Authority.Plugins.Abstractions.csproj index 35ded50f..b521678a 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/StellaOps.Authority.Plugins.Abstractions.csproj +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/StellaOps.Authority.Plugins.Abstractions.csproj @@ -1,4 +1,5 @@ - + + net10.0 preview @@ -19,7 +20,7 @@ - + - + \ No newline at end of file diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/AuthorityMongoDefaults.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Class1.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Class1.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Class1.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Class1.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityBootstrapInviteDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityBootstrapInviteDocument.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityBootstrapInviteDocument.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityBootstrapInviteDocument.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientCertificateBinding.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientCertificateBinding.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientCertificateBinding.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientCertificateBinding.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientDocument.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientDocument.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityClientDocument.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityLoginAttemptDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityLoginAttemptDocument.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityLoginAttemptDocument.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityLoginAttemptDocument.cs index e9fc79e8..5dcc088d 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityLoginAttemptDocument.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityLoginAttemptDocument.cs @@ -1,82 +1,82 @@ -using MongoDB.Bson; -using MongoDB.Bson.Serialization.Attributes; - -namespace StellaOps.Authority.Storage.Mongo.Documents; - -/// -/// Represents a recorded login attempt for audit and lockout purposes. -/// -[BsonIgnoreExtraElements] -public sealed class AuthorityLoginAttemptDocument -{ - [BsonId] - [BsonRepresentation(BsonType.ObjectId)] - public string Id { get; set; } = ObjectId.GenerateNewId().ToString(); - - [BsonElement("eventType")] - public string EventType { get; set; } = "authority.unknown"; - - [BsonElement("outcome")] - public string Outcome { get; set; } = "unknown"; - - [BsonElement("correlationId")] - [BsonIgnoreIfNull] - public string? CorrelationId { get; set; } - - [BsonElement("subjectId")] - [BsonIgnoreIfNull] - public string? SubjectId { get; set; } - - [BsonElement("username")] - [BsonIgnoreIfNull] - public string? Username { get; set; } - - [BsonElement("clientId")] - [BsonIgnoreIfNull] - public string? ClientId { get; set; } - - [BsonElement("tenant")] - [BsonIgnoreIfNull] - public string? Tenant { get; set; } - - [BsonElement("plugin")] - [BsonIgnoreIfNull] - public string? Plugin { get; set; } - - [BsonElement("successful")] - public bool Successful { get; set; } - - [BsonElement("scopes")] - public List Scopes { get; set; } = new(); - - [BsonElement("reason")] - [BsonIgnoreIfNull] - public string? Reason { get; set; } - - [BsonElement("remoteAddress")] - [BsonIgnoreIfNull] - public string? RemoteAddress { get; set; } - - [BsonElement("properties")] - public List Properties { get; set; } = new(); - - [BsonElement("occurredAt")] - public DateTimeOffset OccurredAt { get; set; } = DateTimeOffset.UtcNow; -} - -/// -/// Represents an additional classified property captured for an authority login attempt. -/// -[BsonIgnoreExtraElements] -public sealed class AuthorityLoginAttemptPropertyDocument -{ - [BsonElement("name")] - public string Name { get; set; } = string.Empty; - - [BsonElement("value")] - [BsonIgnoreIfNull] - public string? Value { get; set; } - - [BsonElement("classification")] - public string Classification { get; set; } = "none"; -} +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Authority.Storage.Mongo.Documents; + +/// +/// Represents a recorded login attempt for audit and lockout purposes. +/// +[BsonIgnoreExtraElements] +public sealed class AuthorityLoginAttemptDocument +{ + [BsonId] + [BsonRepresentation(BsonType.ObjectId)] + public string Id { get; set; } = ObjectId.GenerateNewId().ToString(); + + [BsonElement("eventType")] + public string EventType { get; set; } = "authority.unknown"; + + [BsonElement("outcome")] + public string Outcome { get; set; } = "unknown"; + + [BsonElement("correlationId")] + [BsonIgnoreIfNull] + public string? CorrelationId { get; set; } + + [BsonElement("subjectId")] + [BsonIgnoreIfNull] + public string? SubjectId { get; set; } + + [BsonElement("username")] + [BsonIgnoreIfNull] + public string? Username { get; set; } + + [BsonElement("clientId")] + [BsonIgnoreIfNull] + public string? ClientId { get; set; } + + [BsonElement("tenant")] + [BsonIgnoreIfNull] + public string? Tenant { get; set; } + + [BsonElement("plugin")] + [BsonIgnoreIfNull] + public string? Plugin { get; set; } + + [BsonElement("successful")] + public bool Successful { get; set; } + + [BsonElement("scopes")] + public List Scopes { get; set; } = new(); + + [BsonElement("reason")] + [BsonIgnoreIfNull] + public string? Reason { get; set; } + + [BsonElement("remoteAddress")] + [BsonIgnoreIfNull] + public string? RemoteAddress { get; set; } + + [BsonElement("properties")] + public List Properties { get; set; } = new(); + + [BsonElement("occurredAt")] + public DateTimeOffset OccurredAt { get; set; } = DateTimeOffset.UtcNow; +} + +/// +/// Represents an additional classified property captured for an authority login attempt. +/// +[BsonIgnoreExtraElements] +public sealed class AuthorityLoginAttemptPropertyDocument +{ + [BsonElement("name")] + public string Name { get; set; } = string.Empty; + + [BsonElement("value")] + [BsonIgnoreIfNull] + public string? Value { get; set; } + + [BsonElement("classification")] + public string Classification { get; set; } = "none"; +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationDocument.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationDocument.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationDocument.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationExportStateDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationExportStateDocument.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationExportStateDocument.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityRevocationExportStateDocument.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityScopeDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityScopeDocument.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityScopeDocument.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityScopeDocument.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs index f667ca47..a815fe39 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityTokenDocument.cs @@ -1,92 +1,92 @@ -using System; -using System.Collections.Generic; -using MongoDB.Bson; -using MongoDB.Bson.Serialization.Attributes; - -namespace StellaOps.Authority.Storage.Mongo.Documents; - -/// -/// Represents an OAuth token issued by Authority. -/// -[BsonIgnoreExtraElements] -public sealed class AuthorityTokenDocument -{ - [BsonId] - [BsonRepresentation(BsonType.ObjectId)] - public string Id { get; set; } = ObjectId.GenerateNewId().ToString(); - - [BsonElement("tokenId")] - public string TokenId { get; set; } = Guid.NewGuid().ToString("N"); - - [BsonElement("type")] - public string Type { get; set; } = string.Empty; - - [BsonElement("subjectId")] - [BsonIgnoreIfNull] - public string? SubjectId { get; set; } - - [BsonElement("clientId")] - [BsonIgnoreIfNull] - public string? ClientId { get; set; } - - [BsonElement("scope")] - public List Scope { get; set; } = new(); - - [BsonElement("referenceId")] - [BsonIgnoreIfNull] - public string? ReferenceId { get; set; } - - [BsonElement("status")] - public string Status { get; set; } = "valid"; - - [BsonElement("payload")] - [BsonIgnoreIfNull] - public string? Payload { get; set; } - - [BsonElement("createdAt")] - public DateTimeOffset CreatedAt { get; set; } = DateTimeOffset.UtcNow; - - [BsonElement("expiresAt")] - [BsonIgnoreIfNull] - public DateTimeOffset? ExpiresAt { get; set; } - - [BsonElement("revokedAt")] - [BsonIgnoreIfNull] - public DateTimeOffset? RevokedAt { get; set; } - - [BsonElement("revokedReason")] - [BsonIgnoreIfNull] - public string? RevokedReason { get; set; } - - [BsonElement("revokedReasonDescription")] - [BsonIgnoreIfNull] - public string? RevokedReasonDescription { get; set; } - - [BsonElement("senderConstraint")] - [BsonIgnoreIfNull] - public string? SenderConstraint { get; set; } - - [BsonElement("senderKeyThumbprint")] - [BsonIgnoreIfNull] - public string? SenderKeyThumbprint { get; set; } - - [BsonElement("senderNonce")] - [BsonIgnoreIfNull] - public string? SenderNonce { get; set; } - - [BsonElement("tenant")] - [BsonIgnoreIfNull] - public string? Tenant { get; set; } - - [BsonElement("project")] - [BsonIgnoreIfNull] - public string? Project { get; set; } - - [BsonElement("devices")] - [BsonIgnoreIfNull] - public List? Devices { get; set; } - - [BsonElement("revokedMetadata")] - [BsonIgnoreIfNull] - public Dictionary? RevokedMetadata { get; set; } -} +using System; +using System.Collections.Generic; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Authority.Storage.Mongo.Documents; + +/// +/// Represents an OAuth token issued by Authority. +/// +[BsonIgnoreExtraElements] +public sealed class AuthorityTokenDocument +{ + [BsonId] + [BsonRepresentation(BsonType.ObjectId)] + public string Id { get; set; } = ObjectId.GenerateNewId().ToString(); + + [BsonElement("tokenId")] + public string TokenId { get; set; } = Guid.NewGuid().ToString("N"); + + [BsonElement("type")] + public string Type { get; set; } = string.Empty; + + [BsonElement("subjectId")] + [BsonIgnoreIfNull] + public string? SubjectId { get; set; } + + [BsonElement("clientId")] + [BsonIgnoreIfNull] + public string? ClientId { get; set; } + + [BsonElement("scope")] + public List Scope { get; set; } = new(); + + [BsonElement("referenceId")] + [BsonIgnoreIfNull] + public string? ReferenceId { get; set; } + + [BsonElement("status")] + public string Status { get; set; } = "valid"; + + [BsonElement("payload")] + [BsonIgnoreIfNull] + public string? Payload { get; set; } + + [BsonElement("createdAt")] + public DateTimeOffset CreatedAt { get; set; } = DateTimeOffset.UtcNow; + + [BsonElement("expiresAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? ExpiresAt { get; set; } + + [BsonElement("revokedAt")] + [BsonIgnoreIfNull] + public DateTimeOffset? RevokedAt { get; set; } + + [BsonElement("revokedReason")] + [BsonIgnoreIfNull] + public string? RevokedReason { get; set; } + + [BsonElement("revokedReasonDescription")] + [BsonIgnoreIfNull] + public string? RevokedReasonDescription { get; set; } + + [BsonElement("senderConstraint")] + [BsonIgnoreIfNull] + public string? SenderConstraint { get; set; } + + [BsonElement("senderKeyThumbprint")] + [BsonIgnoreIfNull] + public string? SenderKeyThumbprint { get; set; } + + [BsonElement("senderNonce")] + [BsonIgnoreIfNull] + public string? SenderNonce { get; set; } + + [BsonElement("tenant")] + [BsonIgnoreIfNull] + public string? Tenant { get; set; } + + [BsonElement("project")] + [BsonIgnoreIfNull] + public string? Project { get; set; } + + [BsonElement("devices")] + [BsonIgnoreIfNull] + public List? Devices { get; set; } + + [BsonElement("revokedMetadata")] + [BsonIgnoreIfNull] + public Dictionary? RevokedMetadata { get; set; } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityUserDocument.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityUserDocument.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityUserDocument.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Documents/AuthorityUserDocument.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Extensions/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityBootstrapInviteCollectionInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityBootstrapInviteCollectionInitializer.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityBootstrapInviteCollectionInitializer.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityBootstrapInviteCollectionInitializer.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityClientCollectionInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityClientCollectionInitializer.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityClientCollectionInitializer.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityClientCollectionInitializer.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityLoginAttemptCollectionInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityLoginAttemptCollectionInitializer.cs similarity index 98% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityLoginAttemptCollectionInitializer.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityLoginAttemptCollectionInitializer.cs index 1a392025..9973657c 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityLoginAttemptCollectionInitializer.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityLoginAttemptCollectionInitializer.cs @@ -1,35 +1,35 @@ -using MongoDB.Driver; -using StellaOps.Authority.Storage.Mongo.Documents; - -namespace StellaOps.Authority.Storage.Mongo.Initialization; - -internal sealed class AuthorityLoginAttemptCollectionInitializer : IAuthorityCollectionInitializer -{ - public async ValueTask EnsureIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken) - { - var collection = database.GetCollection(AuthorityMongoDefaults.Collections.LoginAttempts); - - var indexModels = new[] - { - new CreateIndexModel( - Builders.IndexKeys - .Ascending(a => a.SubjectId) - .Descending(a => a.OccurredAt), - new CreateIndexOptions { Name = "login_attempt_subject_time" }), - new CreateIndexModel( - Builders.IndexKeys.Descending(a => a.OccurredAt), - new CreateIndexOptions { Name = "login_attempt_time" }), - new CreateIndexModel( - Builders.IndexKeys - .Ascending(a => a.CorrelationId), - new CreateIndexOptions { Name = "login_attempt_correlation", Sparse = true }), - new CreateIndexModel( - Builders.IndexKeys - .Ascending(a => a.Tenant) - .Descending(a => a.OccurredAt), - new CreateIndexOptions { Name = "login_attempt_tenant_time", Sparse = true }) - }; - - await collection.Indexes.CreateManyAsync(indexModels, cancellationToken).ConfigureAwait(false); - } -} +using MongoDB.Driver; +using StellaOps.Authority.Storage.Mongo.Documents; + +namespace StellaOps.Authority.Storage.Mongo.Initialization; + +internal sealed class AuthorityLoginAttemptCollectionInitializer : IAuthorityCollectionInitializer +{ + public async ValueTask EnsureIndexesAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + var collection = database.GetCollection(AuthorityMongoDefaults.Collections.LoginAttempts); + + var indexModels = new[] + { + new CreateIndexModel( + Builders.IndexKeys + .Ascending(a => a.SubjectId) + .Descending(a => a.OccurredAt), + new CreateIndexOptions { Name = "login_attempt_subject_time" }), + new CreateIndexModel( + Builders.IndexKeys.Descending(a => a.OccurredAt), + new CreateIndexOptions { Name = "login_attempt_time" }), + new CreateIndexModel( + Builders.IndexKeys + .Ascending(a => a.CorrelationId), + new CreateIndexOptions { Name = "login_attempt_correlation", Sparse = true }), + new CreateIndexModel( + Builders.IndexKeys + .Ascending(a => a.Tenant) + .Descending(a => a.OccurredAt), + new CreateIndexOptions { Name = "login_attempt_tenant_time", Sparse = true }) + }; + + await collection.Indexes.CreateManyAsync(indexModels, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityMongoInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityMongoInitializer.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityMongoInitializer.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityMongoInitializer.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityRevocationCollectionInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityRevocationCollectionInitializer.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityRevocationCollectionInitializer.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityRevocationCollectionInitializer.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityScopeCollectionInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityScopeCollectionInitializer.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityScopeCollectionInitializer.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityScopeCollectionInitializer.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityTokenCollectionInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityTokenCollectionInitializer.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityTokenCollectionInitializer.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityTokenCollectionInitializer.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityUserCollectionInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityUserCollectionInitializer.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityUserCollectionInitializer.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/AuthorityUserCollectionInitializer.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/IAuthorityCollectionInitializer.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/IAuthorityCollectionInitializer.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/IAuthorityCollectionInitializer.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Initialization/IAuthorityCollectionInitializer.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/AuthorityMongoMigrationRunner.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/AuthorityMongoMigrationRunner.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/AuthorityMongoMigrationRunner.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/AuthorityMongoMigrationRunner.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/EnsureAuthorityCollectionsMigration.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/EnsureAuthorityCollectionsMigration.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/EnsureAuthorityCollectionsMigration.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/EnsureAuthorityCollectionsMigration.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/IAuthorityMongoMigration.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/IAuthorityMongoMigration.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/IAuthorityMongoMigration.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Migrations/IAuthorityMongoMigration.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Options/AuthorityMongoOptions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Options/AuthorityMongoOptions.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Options/AuthorityMongoOptions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Options/AuthorityMongoOptions.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Sessions/AuthorityMongoSessionAccessor.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Sessions/AuthorityMongoSessionAccessor.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Sessions/AuthorityMongoSessionAccessor.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Sessions/AuthorityMongoSessionAccessor.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj similarity index 78% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj index f06e4c35..f4b64078 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/StellaOps.Authority.Storage.Mongo.csproj @@ -1,4 +1,5 @@ - + + net10.0 preview @@ -13,6 +14,6 @@ - + - + \ No newline at end of file diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityBootstrapInviteStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityClientStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityClientStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityClientStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityClientStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityLoginAttemptStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityLoginAttemptStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityLoginAttemptStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityLoginAttemptStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationExportStateStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationExportStateStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationExportStateStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationExportStateStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityRevocationStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityScopeStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityScopeStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityScopeStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityScopeStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityTokenStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityUserStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityUserStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityUserStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/AuthorityUserStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityBootstrapInviteStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityClientStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityClientStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityClientStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityClientStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityLoginAttemptStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityLoginAttemptStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityLoginAttemptStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityLoginAttemptStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationExportStateStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationExportStateStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationExportStateStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationExportStateStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityRevocationStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityScopeStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityScopeStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityScopeStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityScopeStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityTokenStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityUserStore.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityUserStore.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityUserStore.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo/Stores/IAuthorityUserStore.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Bootstrap/BootstrapInviteCleanupServiceTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs index d5a2b683..a3edbc31 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Console/ConsoleEndpointsTests.cs @@ -1,339 +1,339 @@ -using System.Net; -using System.Net.Http.Headers; -using System.Security.Claims; -using System.Text.Encodings.Web; -using System.Text.Json; -using Microsoft.AspNetCore.Authentication; -using Microsoft.AspNetCore.Builder; -using Microsoft.AspNetCore.Hosting; -using Microsoft.AspNetCore.TestHost; -using Microsoft.AspNetCore.Hosting.Server; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using Microsoft.Extensions.Time.Testing; -using OpenIddict.Abstractions; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.ServerIntegration; -using StellaOps.Authority.Console; -using StellaOps.Authority.Tenants; -using StellaOps.Cryptography.Audit; -using Xunit; - -namespace StellaOps.Authority.Tests.Console; - -public sealed class ConsoleEndpointsTests -{ - [Fact] - public async Task Tenants_ReturnsTenant_WhenHeaderMatchesClaim() - { - var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); - var sink = new RecordingAuthEventSink(); - await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); - - var accessor = app.Services.GetRequiredService(); - accessor.Principal = CreatePrincipal( - tenant: "tenant-default", - scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, - expiresAt: timeProvider.GetUtcNow().AddMinutes(5)); - - var client = app.CreateTestClient(); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); - client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); - - var response = await client.GetAsync("/console/tenants"); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - - var payload = await response.Content.ReadAsStringAsync(); - using var json = JsonDocument.Parse(payload); - var tenants = json.RootElement.GetProperty("tenants"); - Assert.Equal(1, tenants.GetArrayLength()); - Assert.Equal("tenant-default", tenants[0].GetProperty("id").GetString()); - - var audit = Assert.Single(sink.Events); - Assert.Equal("authority.console.tenants.read", audit.EventType); - Assert.Equal(AuthEventOutcome.Success, audit.Outcome); - Assert.Contains("tenant.resolved", audit.Properties.Select(property => property.Name)); - } - - [Fact] - public async Task Tenants_ReturnsBadRequest_WhenHeaderMissing() - { - var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); - var sink = new RecordingAuthEventSink(); - await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); - - var accessor = app.Services.GetRequiredService(); - accessor.Principal = CreatePrincipal( - tenant: "tenant-default", - scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, - expiresAt: timeProvider.GetUtcNow().AddMinutes(5)); - - var client = app.CreateTestClient(); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); - - var response = await client.GetAsync("/console/tenants"); - Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); - Assert.Empty(sink.Events); - } - - [Fact] - public async Task Tenants_ReturnsForbid_WhenHeaderDoesNotMatchClaim() - { - var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); - var sink = new RecordingAuthEventSink(); - await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); - - var accessor = app.Services.GetRequiredService(); - accessor.Principal = CreatePrincipal( - tenant: "tenant-default", - scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, - expiresAt: timeProvider.GetUtcNow().AddMinutes(5)); - - var client = app.CreateTestClient(); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); - client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "other-tenant"); - - var response = await client.GetAsync("/console/tenants"); - Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); - Assert.Empty(sink.Events); - } - - [Fact] - public async Task Profile_ReturnsProfileMetadata() - { - var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); - var sink = new RecordingAuthEventSink(); - await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); - - var principal = CreatePrincipal( - tenant: "tenant-default", - scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, - expiresAt: timeProvider.GetUtcNow().AddMinutes(5), - issuedAt: timeProvider.GetUtcNow().AddMinutes(-1), - authenticationTime: timeProvider.GetUtcNow().AddMinutes(-1), - subject: "user-123", - username: "console-user", - displayName: "Console User"); - - var accessor = app.Services.GetRequiredService(); - accessor.Principal = principal; - - var client = app.CreateTestClient(); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); - client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); - - var response = await client.GetAsync("/console/profile"); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - - var payload = await response.Content.ReadAsStringAsync(); - using var json = JsonDocument.Parse(payload); - Assert.Equal("user-123", json.RootElement.GetProperty("subjectId").GetString()); - Assert.Equal("console-user", json.RootElement.GetProperty("username").GetString()); - Assert.Equal("tenant-default", json.RootElement.GetProperty("tenant").GetString()); - - var audit = Assert.Single(sink.Events); - Assert.Equal("authority.console.profile.read", audit.EventType); - Assert.Equal(AuthEventOutcome.Success, audit.Outcome); - } - - [Fact] - public async Task TokenIntrospect_FlagsInactive_WhenExpired() - { - var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); - var sink = new RecordingAuthEventSink(); - await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); - - var principal = CreatePrincipal( - tenant: "tenant-default", - scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, - expiresAt: timeProvider.GetUtcNow().AddMinutes(-1), - issuedAt: timeProvider.GetUtcNow().AddMinutes(-10), - tokenId: "token-abc"); - - var accessor = app.Services.GetRequiredService(); - accessor.Principal = principal; - - var client = app.CreateTestClient(); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); - client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); - - var response = await client.PostAsync("/console/token/introspect", content: null); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - - var payload = await response.Content.ReadAsStringAsync(); - using var json = JsonDocument.Parse(payload); - Assert.False(json.RootElement.GetProperty("active").GetBoolean()); - Assert.Equal("token-abc", json.RootElement.GetProperty("tokenId").GetString()); - - var audit = Assert.Single(sink.Events); - Assert.Equal("authority.console.token.introspect", audit.EventType); - Assert.Equal(AuthEventOutcome.Success, audit.Outcome); - } - - private static ClaimsPrincipal CreatePrincipal( - string tenant, - IReadOnlyCollection scopes, - DateTimeOffset expiresAt, - DateTimeOffset? issuedAt = null, - DateTimeOffset? authenticationTime = null, - string? subject = null, - string? username = null, - string? displayName = null, - string? tokenId = null) - { - var claims = new List - { - new(StellaOpsClaimTypes.Tenant, tenant), - new(StellaOpsClaimTypes.Scope, string.Join(' ', scopes)), - new("exp", expiresAt.ToUnixTimeSeconds().ToString()), - new(OpenIddictConstants.Claims.Audience, "console") - }; - - if (!string.IsNullOrWhiteSpace(subject)) - { - claims.Add(new Claim(StellaOpsClaimTypes.Subject, subject)); - } - - if (!string.IsNullOrWhiteSpace(username)) - { - claims.Add(new Claim(OpenIddictConstants.Claims.PreferredUsername, username)); - } - - if (!string.IsNullOrWhiteSpace(displayName)) - { - claims.Add(new Claim(OpenIddictConstants.Claims.Name, displayName)); - } - - if (issuedAt is not null) - { - claims.Add(new Claim("iat", issuedAt.Value.ToUnixTimeSeconds().ToString())); - } - - if (authenticationTime is not null) - { - claims.Add(new Claim("auth_time", authenticationTime.Value.ToUnixTimeSeconds().ToString())); - } - - if (!string.IsNullOrWhiteSpace(tokenId)) - { - claims.Add(new Claim(StellaOpsClaimTypes.TokenId, tokenId)); - } - - var identity = new ClaimsIdentity(claims, TestAuthenticationDefaults.AuthenticationScheme); - return new ClaimsPrincipal(identity); - } - - private static async Task CreateApplicationAsync( - FakeTimeProvider timeProvider, - RecordingAuthEventSink sink, - params AuthorityTenantView[] tenants) - { - var builder = WebApplication.CreateBuilder(new WebApplicationOptions - { - EnvironmentName = Environments.Development - }); - builder.WebHost.UseTestServer(); - - builder.Services.AddSingleton(timeProvider); - builder.Services.AddSingleton(sink); - builder.Services.AddSingleton(new FakeTenantCatalog(tenants)); - builder.Services.AddSingleton(); - builder.Services.AddHttpContextAccessor(); - builder.Services.AddSingleton(); - - var authBuilder = builder.Services.AddAuthentication(options => - { - options.DefaultAuthenticateScheme = TestAuthenticationDefaults.AuthenticationScheme; - options.DefaultChallengeScheme = TestAuthenticationDefaults.AuthenticationScheme; - }); - - authBuilder.AddScheme(TestAuthenticationDefaults.AuthenticationScheme, static _ => { }); - authBuilder.AddScheme(StellaOpsAuthenticationDefaults.AuthenticationScheme, static _ => { }); - - builder.Services.AddAuthorization(); - builder.Services.AddStellaOpsScopeHandler(); - - builder.Services.AddOptions() - .Configure(options => - { - options.Authority = "https://authority.integration.test"; - }) - .PostConfigure(static options => options.Validate()); - - var app = builder.Build(); - app.UseAuthentication(); - app.UseAuthorization(); - app.MapConsoleEndpoints(); - - await app.StartAsync().ConfigureAwait(false); - return app; - } - - private sealed class FakeTenantCatalog : IAuthorityTenantCatalog - { - private readonly IReadOnlyList tenants; - - public FakeTenantCatalog(IEnumerable tenants) - { - this.tenants = tenants.ToArray(); - } - - public IReadOnlyList GetTenants() => tenants; - } - - private sealed class RecordingAuthEventSink : IAuthEventSink - { - public List Events { get; } = new(); - - public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) - { - Events.Add(record); - return ValueTask.CompletedTask; - } - } - - private sealed class TestPrincipalAccessor - { - public ClaimsPrincipal? Principal { get; set; } - } - - private sealed class TestAuthenticationHandler : AuthenticationHandler - { - public TestAuthenticationHandler( - IOptionsMonitor options, - ILoggerFactory logger, - UrlEncoder encoder, - ISystemClock clock) - : base(options, logger, encoder, clock) - { - } - - protected override Task HandleAuthenticateAsync() - { - var accessor = Context.RequestServices.GetRequiredService(); - if (accessor.Principal is null) - { - return Task.FromResult(AuthenticateResult.Fail("No principal configured.")); - } - - var ticket = new AuthenticationTicket(accessor.Principal, Scheme.Name); - return Task.FromResult(AuthenticateResult.Success(ticket)); - } - } -} - -internal static class HostTestClientExtensions -{ - public static HttpClient CreateTestClient(this WebApplication app) - { - var server = app.Services.GetRequiredService() as TestServer - ?? throw new InvalidOperationException("TestServer is not available. Ensure UseTestServer() is configured."); - return server.CreateClient(); - } -} -internal static class TestAuthenticationDefaults -{ - public const string AuthenticationScheme = "AuthorityConsoleTests"; -} +using System.Net; +using System.Net.Http.Headers; +using System.Security.Claims; +using System.Text.Encodings.Web; +using System.Text.Json; +using Microsoft.AspNetCore.Authentication; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.TestHost; +using Microsoft.AspNetCore.Hosting.Server; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.Time.Testing; +using OpenIddict.Abstractions; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Authority.Console; +using StellaOps.Authority.Tenants; +using StellaOps.Cryptography.Audit; +using Xunit; + +namespace StellaOps.Authority.Tests.Console; + +public sealed class ConsoleEndpointsTests +{ + [Fact] + public async Task Tenants_ReturnsTenant_WhenHeaderMatchesClaim() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(5)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/tenants"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var payload = await response.Content.ReadAsStringAsync(); + using var json = JsonDocument.Parse(payload); + var tenants = json.RootElement.GetProperty("tenants"); + Assert.Equal(1, tenants.GetArrayLength()); + Assert.Equal("tenant-default", tenants[0].GetProperty("id").GetString()); + + var audit = Assert.Single(sink.Events); + Assert.Equal("authority.console.tenants.read", audit.EventType); + Assert.Equal(AuthEventOutcome.Success, audit.Outcome); + Assert.Contains("tenant.resolved", audit.Properties.Select(property => property.Name)); + } + + [Fact] + public async Task Tenants_ReturnsBadRequest_WhenHeaderMissing() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(5)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + + var response = await client.GetAsync("/console/tenants"); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + Assert.Empty(sink.Events); + } + + [Fact] + public async Task Tenants_ReturnsForbid_WhenHeaderDoesNotMatchClaim() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(5)); + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "other-tenant"); + + var response = await client.GetAsync("/console/tenants"); + Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); + Assert.Empty(sink.Events); + } + + [Fact] + public async Task Profile_ReturnsProfileMetadata() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(5), + issuedAt: timeProvider.GetUtcNow().AddMinutes(-1), + authenticationTime: timeProvider.GetUtcNow().AddMinutes(-1), + subject: "user-123", + username: "console-user", + displayName: "Console User"); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = principal; + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.GetAsync("/console/profile"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var payload = await response.Content.ReadAsStringAsync(); + using var json = JsonDocument.Parse(payload); + Assert.Equal("user-123", json.RootElement.GetProperty("subjectId").GetString()); + Assert.Equal("console-user", json.RootElement.GetProperty("username").GetString()); + Assert.Equal("tenant-default", json.RootElement.GetProperty("tenant").GetString()); + + var audit = Assert.Single(sink.Events); + Assert.Equal("authority.console.profile.read", audit.EventType); + Assert.Equal(AuthEventOutcome.Success, audit.Outcome); + } + + [Fact] + public async Task TokenIntrospect_FlagsInactive_WhenExpired() + { + var timeProvider = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-31T12:00:00Z")); + var sink = new RecordingAuthEventSink(); + await using var app = await CreateApplicationAsync(timeProvider, sink, new AuthorityTenantView("tenant-default", "Default", "active", "shared", Array.Empty(), Array.Empty())); + + var principal = CreatePrincipal( + tenant: "tenant-default", + scopes: new[] { StellaOpsScopes.UiRead, StellaOpsScopes.AuthorityTenantsRead }, + expiresAt: timeProvider.GetUtcNow().AddMinutes(-1), + issuedAt: timeProvider.GetUtcNow().AddMinutes(-10), + tokenId: "token-abc"); + + var accessor = app.Services.GetRequiredService(); + accessor.Principal = principal; + + var client = app.CreateTestClient(); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue(TestAuthenticationDefaults.AuthenticationScheme); + client.DefaultRequestHeaders.Add(AuthorityHttpHeaders.Tenant, "tenant-default"); + + var response = await client.PostAsync("/console/token/introspect", content: null); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var payload = await response.Content.ReadAsStringAsync(); + using var json = JsonDocument.Parse(payload); + Assert.False(json.RootElement.GetProperty("active").GetBoolean()); + Assert.Equal("token-abc", json.RootElement.GetProperty("tokenId").GetString()); + + var audit = Assert.Single(sink.Events); + Assert.Equal("authority.console.token.introspect", audit.EventType); + Assert.Equal(AuthEventOutcome.Success, audit.Outcome); + } + + private static ClaimsPrincipal CreatePrincipal( + string tenant, + IReadOnlyCollection scopes, + DateTimeOffset expiresAt, + DateTimeOffset? issuedAt = null, + DateTimeOffset? authenticationTime = null, + string? subject = null, + string? username = null, + string? displayName = null, + string? tokenId = null) + { + var claims = new List + { + new(StellaOpsClaimTypes.Tenant, tenant), + new(StellaOpsClaimTypes.Scope, string.Join(' ', scopes)), + new("exp", expiresAt.ToUnixTimeSeconds().ToString()), + new(OpenIddictConstants.Claims.Audience, "console") + }; + + if (!string.IsNullOrWhiteSpace(subject)) + { + claims.Add(new Claim(StellaOpsClaimTypes.Subject, subject)); + } + + if (!string.IsNullOrWhiteSpace(username)) + { + claims.Add(new Claim(OpenIddictConstants.Claims.PreferredUsername, username)); + } + + if (!string.IsNullOrWhiteSpace(displayName)) + { + claims.Add(new Claim(OpenIddictConstants.Claims.Name, displayName)); + } + + if (issuedAt is not null) + { + claims.Add(new Claim("iat", issuedAt.Value.ToUnixTimeSeconds().ToString())); + } + + if (authenticationTime is not null) + { + claims.Add(new Claim("auth_time", authenticationTime.Value.ToUnixTimeSeconds().ToString())); + } + + if (!string.IsNullOrWhiteSpace(tokenId)) + { + claims.Add(new Claim(StellaOpsClaimTypes.TokenId, tokenId)); + } + + var identity = new ClaimsIdentity(claims, TestAuthenticationDefaults.AuthenticationScheme); + return new ClaimsPrincipal(identity); + } + + private static async Task CreateApplicationAsync( + FakeTimeProvider timeProvider, + RecordingAuthEventSink sink, + params AuthorityTenantView[] tenants) + { + var builder = WebApplication.CreateBuilder(new WebApplicationOptions + { + EnvironmentName = Environments.Development + }); + builder.WebHost.UseTestServer(); + + builder.Services.AddSingleton(timeProvider); + builder.Services.AddSingleton(sink); + builder.Services.AddSingleton(new FakeTenantCatalog(tenants)); + builder.Services.AddSingleton(); + builder.Services.AddHttpContextAccessor(); + builder.Services.AddSingleton(); + + var authBuilder = builder.Services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = TestAuthenticationDefaults.AuthenticationScheme; + options.DefaultChallengeScheme = TestAuthenticationDefaults.AuthenticationScheme; + }); + + authBuilder.AddScheme(TestAuthenticationDefaults.AuthenticationScheme, static _ => { }); + authBuilder.AddScheme(StellaOpsAuthenticationDefaults.AuthenticationScheme, static _ => { }); + + builder.Services.AddAuthorization(); + builder.Services.AddStellaOpsScopeHandler(); + + builder.Services.AddOptions() + .Configure(options => + { + options.Authority = "https://authority.integration.test"; + }) + .PostConfigure(static options => options.Validate()); + + var app = builder.Build(); + app.UseAuthentication(); + app.UseAuthorization(); + app.MapConsoleEndpoints(); + + await app.StartAsync().ConfigureAwait(false); + return app; + } + + private sealed class FakeTenantCatalog : IAuthorityTenantCatalog + { + private readonly IReadOnlyList tenants; + + public FakeTenantCatalog(IEnumerable tenants) + { + this.tenants = tenants.ToArray(); + } + + public IReadOnlyList GetTenants() => tenants; + } + + private sealed class RecordingAuthEventSink : IAuthEventSink + { + public List Events { get; } = new(); + + public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + Events.Add(record); + return ValueTask.CompletedTask; + } + } + + private sealed class TestPrincipalAccessor + { + public ClaimsPrincipal? Principal { get; set; } + } + + private sealed class TestAuthenticationHandler : AuthenticationHandler + { + public TestAuthenticationHandler( + IOptionsMonitor options, + ILoggerFactory logger, + UrlEncoder encoder, + ISystemClock clock) + : base(options, logger, encoder, clock) + { + } + + protected override Task HandleAuthenticateAsync() + { + var accessor = Context.RequestServices.GetRequiredService(); + if (accessor.Principal is null) + { + return Task.FromResult(AuthenticateResult.Fail("No principal configured.")); + } + + var ticket = new AuthenticationTicket(accessor.Principal, Scheme.Name); + return Task.FromResult(AuthenticateResult.Success(ticket)); + } + } +} + +internal static class HostTestClientExtensions +{ + public static HttpClient CreateTestClient(this WebApplication app) + { + var server = app.Services.GetRequiredService() as TestServer + ?? throw new InvalidOperationException("TestServer is not available. Ensure UseTestServer() is configured."); + return server.CreateClient(); + } +} +internal static class TestAuthenticationDefaults +{ + public const string AuthenticationScheme = "AuthorityConsoleTests"; +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderRegistryTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderRegistryTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderRegistryTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderRegistryTests.cs index 3f0a4034..92e0fd0a 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderRegistryTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderRegistryTests.cs @@ -1,210 +1,210 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Authority.Plugins.Abstractions; -using Xunit; - -namespace StellaOps.Authority.Tests.Identity; - -public class AuthorityIdentityProviderRegistryTests -{ - [Fact] - public async Task RegistryIndexesProvidersAndAggregatesCapabilities() - { - var providers = new[] - { - CreateProvider("standard", type: "standard", supportsPassword: true, supportsMfa: false, supportsClientProvisioning: false), - CreateProvider("sso", type: "saml", supportsPassword: false, supportsMfa: true, supportsClientProvisioning: true) - }; - - using var serviceProvider = BuildServiceProvider(providers); - var registry = new AuthorityIdentityProviderRegistry(serviceProvider, NullLogger.Instance); - - Assert.Equal(2, registry.Providers.Count); - Assert.True(registry.TryGet("standard", out var standard)); - Assert.Equal("standard", standard!.Name); - Assert.Single(registry.PasswordProviders); - Assert.Single(registry.MfaProviders); - Assert.Single(registry.ClientProvisioningProviders); - Assert.True(registry.AggregateCapabilities.SupportsPassword); - Assert.True(registry.AggregateCapabilities.SupportsMfa); - Assert.True(registry.AggregateCapabilities.SupportsClientProvisioning); - - await using var handle = await registry.AcquireAsync("standard", default); - Assert.Same(providers[0], handle.Provider); - } - - [Fact] - public async Task RegistryIgnoresDuplicateNames() - { - var duplicate = CreateProvider("standard", "ldap", supportsPassword: true, supportsMfa: false, supportsClientProvisioning: false); - var providers = new[] - { - CreateProvider("standard", type: "standard", supportsPassword: true, supportsMfa: false, supportsClientProvisioning: false), - duplicate - }; - - using var serviceProvider = BuildServiceProvider(providers); - var registry = new AuthorityIdentityProviderRegistry(serviceProvider, NullLogger.Instance); - - Assert.Single(registry.Providers); - Assert.Equal("standard", registry.Providers.First().Name); - Assert.True(registry.TryGet("standard", out var provider)); - await using var handle = await registry.AcquireAsync("standard", default); - Assert.Same(providers[0], handle.Provider); - Assert.Equal("standard", provider!.Name); - } - - [Fact] - public async Task AcquireAsync_ReturnsScopedProviderInstances() - { - var configuration = new ConfigurationBuilder().Build(); - var manifest = new AuthorityPluginManifest( - "scoped", - "scoped", - true, - AssemblyName: null, - AssemblyPath: null, - Capabilities: new[] { AuthorityPluginCapabilities.Password }, - Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), - ConfigPath: string.Empty); - - var context = new AuthorityPluginContext(manifest, configuration); - - var services = new ServiceCollection(); - services.AddScoped(_ => new ScopedIdentityProviderPlugin(context)); - - using var serviceProvider = services.BuildServiceProvider(); - var registry = new AuthorityIdentityProviderRegistry(serviceProvider, NullLogger.Instance); - - await using var first = await registry.AcquireAsync("scoped", default); - await using var second = await registry.AcquireAsync("scoped", default); - - var firstPlugin = Assert.IsType(first.Provider); - var secondPlugin = Assert.IsType(second.Provider); - Assert.NotEqual(firstPlugin.InstanceId, secondPlugin.InstanceId); - } - - private static ServiceProvider BuildServiceProvider(IEnumerable providers) - { - var services = new ServiceCollection(); - foreach (var provider in providers) - { - services.AddSingleton(provider); - } - - return services.BuildServiceProvider(); - } - - private static IIdentityProviderPlugin CreateProvider( - string name, - string type, - bool supportsPassword, - bool supportsMfa, - bool supportsClientProvisioning) - { - var manifest = new AuthorityPluginManifest( - name, - type, - true, - AssemblyName: null, - AssemblyPath: null, - Capabilities: BuildCapabilities(supportsPassword, supportsMfa, supportsClientProvisioning), - Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), - ConfigPath: string.Empty); - - var context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); - return new TestIdentityProviderPlugin(context, supportsPassword, supportsMfa, supportsClientProvisioning); - } - - private static IReadOnlyList BuildCapabilities(bool password, bool mfa, bool clientProvisioning) - { - var capabilities = new List(); - if (password) - { - capabilities.Add(AuthorityPluginCapabilities.Password); - } - - if (mfa) - { - capabilities.Add(AuthorityPluginCapabilities.Mfa); - } - - if (clientProvisioning) - { - capabilities.Add(AuthorityPluginCapabilities.ClientProvisioning); - } - - return capabilities; - } - - private sealed class TestIdentityProviderPlugin : IIdentityProviderPlugin - { - public TestIdentityProviderPlugin( - AuthorityPluginContext context, - bool supportsPassword, - bool supportsMfa, - bool supportsClientProvisioning) - { - Context = context; - Capabilities = new AuthorityIdentityProviderCapabilities( - SupportsPassword: supportsPassword, - SupportsMfa: supportsMfa, - SupportsClientProvisioning: supportsClientProvisioning); - } - - public string Name => Context.Manifest.Name; - - public string Type => Context.Manifest.Type; - - public AuthorityPluginContext Context { get; } - - public IUserCredentialStore Credentials => throw new NotImplementedException(); - - public IClaimsEnricher ClaimsEnricher => throw new NotImplementedException(); - - public IClientProvisioningStore? ClientProvisioning => null; - - public AuthorityIdentityProviderCapabilities Capabilities { get; } - - public ValueTask CheckHealthAsync(CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); - } - - private sealed class ScopedIdentityProviderPlugin : IIdentityProviderPlugin - { - public ScopedIdentityProviderPlugin(AuthorityPluginContext context) - { - Context = context; - InstanceId = Guid.NewGuid(); - Capabilities = new AuthorityIdentityProviderCapabilities( - SupportsPassword: true, - SupportsMfa: false, - SupportsClientProvisioning: false); - } - - public Guid InstanceId { get; } - - public string Name => Context.Manifest.Name; - - public string Type => Context.Manifest.Type; - - public AuthorityPluginContext Context { get; } - - public IUserCredentialStore Credentials => throw new NotImplementedException(); - - public IClaimsEnricher ClaimsEnricher => throw new NotImplementedException(); - - public IClientProvisioningStore? ClientProvisioning => null; - - public AuthorityIdentityProviderCapabilities Capabilities { get; } - - public ValueTask CheckHealthAsync(CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Authority.Plugins.Abstractions; +using Xunit; + +namespace StellaOps.Authority.Tests.Identity; + +public class AuthorityIdentityProviderRegistryTests +{ + [Fact] + public async Task RegistryIndexesProvidersAndAggregatesCapabilities() + { + var providers = new[] + { + CreateProvider("standard", type: "standard", supportsPassword: true, supportsMfa: false, supportsClientProvisioning: false), + CreateProvider("sso", type: "saml", supportsPassword: false, supportsMfa: true, supportsClientProvisioning: true) + }; + + using var serviceProvider = BuildServiceProvider(providers); + var registry = new AuthorityIdentityProviderRegistry(serviceProvider, NullLogger.Instance); + + Assert.Equal(2, registry.Providers.Count); + Assert.True(registry.TryGet("standard", out var standard)); + Assert.Equal("standard", standard!.Name); + Assert.Single(registry.PasswordProviders); + Assert.Single(registry.MfaProviders); + Assert.Single(registry.ClientProvisioningProviders); + Assert.True(registry.AggregateCapabilities.SupportsPassword); + Assert.True(registry.AggregateCapabilities.SupportsMfa); + Assert.True(registry.AggregateCapabilities.SupportsClientProvisioning); + + await using var handle = await registry.AcquireAsync("standard", default); + Assert.Same(providers[0], handle.Provider); + } + + [Fact] + public async Task RegistryIgnoresDuplicateNames() + { + var duplicate = CreateProvider("standard", "ldap", supportsPassword: true, supportsMfa: false, supportsClientProvisioning: false); + var providers = new[] + { + CreateProvider("standard", type: "standard", supportsPassword: true, supportsMfa: false, supportsClientProvisioning: false), + duplicate + }; + + using var serviceProvider = BuildServiceProvider(providers); + var registry = new AuthorityIdentityProviderRegistry(serviceProvider, NullLogger.Instance); + + Assert.Single(registry.Providers); + Assert.Equal("standard", registry.Providers.First().Name); + Assert.True(registry.TryGet("standard", out var provider)); + await using var handle = await registry.AcquireAsync("standard", default); + Assert.Same(providers[0], handle.Provider); + Assert.Equal("standard", provider!.Name); + } + + [Fact] + public async Task AcquireAsync_ReturnsScopedProviderInstances() + { + var configuration = new ConfigurationBuilder().Build(); + var manifest = new AuthorityPluginManifest( + "scoped", + "scoped", + true, + AssemblyName: null, + AssemblyPath: null, + Capabilities: new[] { AuthorityPluginCapabilities.Password }, + Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), + ConfigPath: string.Empty); + + var context = new AuthorityPluginContext(manifest, configuration); + + var services = new ServiceCollection(); + services.AddScoped(_ => new ScopedIdentityProviderPlugin(context)); + + using var serviceProvider = services.BuildServiceProvider(); + var registry = new AuthorityIdentityProviderRegistry(serviceProvider, NullLogger.Instance); + + await using var first = await registry.AcquireAsync("scoped", default); + await using var second = await registry.AcquireAsync("scoped", default); + + var firstPlugin = Assert.IsType(first.Provider); + var secondPlugin = Assert.IsType(second.Provider); + Assert.NotEqual(firstPlugin.InstanceId, secondPlugin.InstanceId); + } + + private static ServiceProvider BuildServiceProvider(IEnumerable providers) + { + var services = new ServiceCollection(); + foreach (var provider in providers) + { + services.AddSingleton(provider); + } + + return services.BuildServiceProvider(); + } + + private static IIdentityProviderPlugin CreateProvider( + string name, + string type, + bool supportsPassword, + bool supportsMfa, + bool supportsClientProvisioning) + { + var manifest = new AuthorityPluginManifest( + name, + type, + true, + AssemblyName: null, + AssemblyPath: null, + Capabilities: BuildCapabilities(supportsPassword, supportsMfa, supportsClientProvisioning), + Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), + ConfigPath: string.Empty); + + var context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); + return new TestIdentityProviderPlugin(context, supportsPassword, supportsMfa, supportsClientProvisioning); + } + + private static IReadOnlyList BuildCapabilities(bool password, bool mfa, bool clientProvisioning) + { + var capabilities = new List(); + if (password) + { + capabilities.Add(AuthorityPluginCapabilities.Password); + } + + if (mfa) + { + capabilities.Add(AuthorityPluginCapabilities.Mfa); + } + + if (clientProvisioning) + { + capabilities.Add(AuthorityPluginCapabilities.ClientProvisioning); + } + + return capabilities; + } + + private sealed class TestIdentityProviderPlugin : IIdentityProviderPlugin + { + public TestIdentityProviderPlugin( + AuthorityPluginContext context, + bool supportsPassword, + bool supportsMfa, + bool supportsClientProvisioning) + { + Context = context; + Capabilities = new AuthorityIdentityProviderCapabilities( + SupportsPassword: supportsPassword, + SupportsMfa: supportsMfa, + SupportsClientProvisioning: supportsClientProvisioning); + } + + public string Name => Context.Manifest.Name; + + public string Type => Context.Manifest.Type; + + public AuthorityPluginContext Context { get; } + + public IUserCredentialStore Credentials => throw new NotImplementedException(); + + public IClaimsEnricher ClaimsEnricher => throw new NotImplementedException(); + + public IClientProvisioningStore? ClientProvisioning => null; + + public AuthorityIdentityProviderCapabilities Capabilities { get; } + + public ValueTask CheckHealthAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); + } + + private sealed class ScopedIdentityProviderPlugin : IIdentityProviderPlugin + { + public ScopedIdentityProviderPlugin(AuthorityPluginContext context) + { + Context = context; + InstanceId = Guid.NewGuid(); + Capabilities = new AuthorityIdentityProviderCapabilities( + SupportsPassword: true, + SupportsMfa: false, + SupportsClientProvisioning: false); + } + + public Guid InstanceId { get; } + + public string Name => Context.Manifest.Name; + + public string Type => Context.Manifest.Type; + + public AuthorityPluginContext Context { get; } + + public IUserCredentialStore Credentials => throw new NotImplementedException(); + + public IClaimsEnricher ClaimsEnricher => throw new NotImplementedException(); + + public IClientProvisioningStore? ClientProvisioning => null; + + public AuthorityIdentityProviderCapabilities Capabilities { get; } + + public ValueTask CheckHealthAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderSelectorTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderSelectorTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderSelectorTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderSelectorTests.cs index 2c15d020..17cbe893 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderSelectorTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Identity/AuthorityIdentityProviderSelectorTests.cs @@ -1,125 +1,125 @@ -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using OpenIddict.Abstractions; -using StellaOps.Authority.OpenIddict; -using StellaOps.Authority.Plugins.Abstractions; -using Xunit; - -namespace StellaOps.Authority.Tests.Identity; - -public class AuthorityIdentityProviderSelectorTests -{ - [Fact] - public void ResolvePasswordProvider_UsesSingleProviderWhenNoParameter() - { - var registry = CreateRegistry(passwordProviders: new[] { CreateProvider("standard", supportsPassword: true) }); - var request = new OpenIddictRequest(); - - var result = AuthorityIdentityProviderSelector.ResolvePasswordProvider(request, registry); - - Assert.True(result.Succeeded); - Assert.Equal("standard", result.Provider!.Name); - } - - [Fact] - public void ResolvePasswordProvider_FailsWhenNoProviders() - { - var registry = CreateRegistry(passwordProviders: Array.Empty()); - var request = new OpenIddictRequest(); - - var result = AuthorityIdentityProviderSelector.ResolvePasswordProvider(request, registry); - - Assert.False(result.Succeeded); - Assert.Equal(OpenIddictConstants.Errors.UnsupportedGrantType, result.Error); - } - - [Fact] - public void ResolvePasswordProvider_RequiresParameterWhenMultipleProviders() - { - var registry = CreateRegistry(passwordProviders: new[] - { - CreateProvider("standard", supportsPassword: true), - CreateProvider("ldap", supportsPassword: true) - }); - var request = new OpenIddictRequest(); - - var result = AuthorityIdentityProviderSelector.ResolvePasswordProvider(request, registry); - - Assert.False(result.Succeeded); - Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, result.Error); - } - - [Fact] - public void ResolvePasswordProvider_HonoursProviderParameter() - { - var registry = CreateRegistry(passwordProviders: new[] - { - CreateProvider("standard", supportsPassword: true), - CreateProvider("ldap", supportsPassword: true) - }); - var request = new OpenIddictRequest(); - request.SetParameter(AuthorityOpenIddictConstants.ProviderParameterName, "ldap"); - - var result = AuthorityIdentityProviderSelector.ResolvePasswordProvider(request, registry); - - Assert.True(result.Succeeded); - Assert.Equal("ldap", result.Provider!.Name); - } - - private static AuthorityIdentityProviderRegistry CreateRegistry(IEnumerable passwordProviders) - { - var services = new ServiceCollection(); - foreach (var provider in passwordProviders) - { - services.AddSingleton(provider); - } - - var serviceProvider = services.BuildServiceProvider(); - return new AuthorityIdentityProviderRegistry(serviceProvider, Microsoft.Extensions.Logging.Abstractions.NullLogger.Instance); - } - - private static IIdentityProviderPlugin CreateProvider(string name, bool supportsPassword) - { - var manifest = new AuthorityPluginManifest( - name, - "standard", - true, - AssemblyName: null, - AssemblyPath: null, - Capabilities: supportsPassword ? new[] { AuthorityPluginCapabilities.Password } : Array.Empty(), - Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), - ConfigPath: string.Empty); - - var context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); - return new SelectorTestProvider(context, supportsPassword); - } - - private sealed class SelectorTestProvider : IIdentityProviderPlugin - { - public SelectorTestProvider(AuthorityPluginContext context, bool supportsPassword) - { - Context = context; - Capabilities = new AuthorityIdentityProviderCapabilities( - SupportsPassword: supportsPassword, - SupportsMfa: false, - SupportsClientProvisioning: false); - } - - public string Name => Context.Manifest.Name; - - public string Type => Context.Manifest.Type; - - public AuthorityPluginContext Context { get; } - - public IUserCredentialStore Credentials => throw new NotImplementedException(); - - public IClaimsEnricher ClaimsEnricher => throw new NotImplementedException(); - - public IClientProvisioningStore? ClientProvisioning => null; - - public AuthorityIdentityProviderCapabilities Capabilities { get; } - - public ValueTask CheckHealthAsync(CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); - } -} +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using OpenIddict.Abstractions; +using StellaOps.Authority.OpenIddict; +using StellaOps.Authority.Plugins.Abstractions; +using Xunit; + +namespace StellaOps.Authority.Tests.Identity; + +public class AuthorityIdentityProviderSelectorTests +{ + [Fact] + public void ResolvePasswordProvider_UsesSingleProviderWhenNoParameter() + { + var registry = CreateRegistry(passwordProviders: new[] { CreateProvider("standard", supportsPassword: true) }); + var request = new OpenIddictRequest(); + + var result = AuthorityIdentityProviderSelector.ResolvePasswordProvider(request, registry); + + Assert.True(result.Succeeded); + Assert.Equal("standard", result.Provider!.Name); + } + + [Fact] + public void ResolvePasswordProvider_FailsWhenNoProviders() + { + var registry = CreateRegistry(passwordProviders: Array.Empty()); + var request = new OpenIddictRequest(); + + var result = AuthorityIdentityProviderSelector.ResolvePasswordProvider(request, registry); + + Assert.False(result.Succeeded); + Assert.Equal(OpenIddictConstants.Errors.UnsupportedGrantType, result.Error); + } + + [Fact] + public void ResolvePasswordProvider_RequiresParameterWhenMultipleProviders() + { + var registry = CreateRegistry(passwordProviders: new[] + { + CreateProvider("standard", supportsPassword: true), + CreateProvider("ldap", supportsPassword: true) + }); + var request = new OpenIddictRequest(); + + var result = AuthorityIdentityProviderSelector.ResolvePasswordProvider(request, registry); + + Assert.False(result.Succeeded); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, result.Error); + } + + [Fact] + public void ResolvePasswordProvider_HonoursProviderParameter() + { + var registry = CreateRegistry(passwordProviders: new[] + { + CreateProvider("standard", supportsPassword: true), + CreateProvider("ldap", supportsPassword: true) + }); + var request = new OpenIddictRequest(); + request.SetParameter(AuthorityOpenIddictConstants.ProviderParameterName, "ldap"); + + var result = AuthorityIdentityProviderSelector.ResolvePasswordProvider(request, registry); + + Assert.True(result.Succeeded); + Assert.Equal("ldap", result.Provider!.Name); + } + + private static AuthorityIdentityProviderRegistry CreateRegistry(IEnumerable passwordProviders) + { + var services = new ServiceCollection(); + foreach (var provider in passwordProviders) + { + services.AddSingleton(provider); + } + + var serviceProvider = services.BuildServiceProvider(); + return new AuthorityIdentityProviderRegistry(serviceProvider, Microsoft.Extensions.Logging.Abstractions.NullLogger.Instance); + } + + private static IIdentityProviderPlugin CreateProvider(string name, bool supportsPassword) + { + var manifest = new AuthorityPluginManifest( + name, + "standard", + true, + AssemblyName: null, + AssemblyPath: null, + Capabilities: supportsPassword ? new[] { AuthorityPluginCapabilities.Password } : Array.Empty(), + Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), + ConfigPath: string.Empty); + + var context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); + return new SelectorTestProvider(context, supportsPassword); + } + + private sealed class SelectorTestProvider : IIdentityProviderPlugin + { + public SelectorTestProvider(AuthorityPluginContext context, bool supportsPassword) + { + Context = context; + Capabilities = new AuthorityIdentityProviderCapabilities( + SupportsPassword: supportsPassword, + SupportsMfa: false, + SupportsClientProvisioning: false); + } + + public string Name => Context.Manifest.Name; + + public string Type => Context.Manifest.Type; + + public AuthorityPluginContext Context { get; } + + public IUserCredentialStore Credentials => throw new NotImplementedException(); + + public IClaimsEnricher ClaimsEnricher => throw new NotImplementedException(); + + public IClientProvisioningStore? ClientProvisioning => null; + + public AuthorityIdentityProviderCapabilities Capabilities { get; } + + public ValueTask CheckHealthAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs index c3e3f9e1..6a12c92d 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Infrastructure/AuthorityWebApplicationFactory.cs @@ -1,48 +1,48 @@ -using System; -using System.Collections.Generic; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.Extensions.Hosting; -using Microsoft.AspNetCore.Hosting; -using Microsoft.Extensions.Configuration; -using Mongo2Go; -using Xunit; - -namespace StellaOps.Authority.Tests.Infrastructure; - -public sealed class AuthorityWebApplicationFactory : WebApplicationFactory, IAsyncLifetime -{ - private readonly MongoDbRunner mongoRunner; - - public AuthorityWebApplicationFactory() - { - mongoRunner = MongoDbRunner.Start(singleNodeReplSet: true); - } - - public string ConnectionString => mongoRunner.ConnectionString; - - protected override void ConfigureWebHost(IWebHostBuilder builder) - { - builder.UseEnvironment("Development"); - builder.ConfigureAppConfiguration((_, configuration) => - { - var settings = new Dictionary - { - ["Authority:Issuer"] = "https://authority.test", - ["Authority:Storage:ConnectionString"] = mongoRunner.ConnectionString, - ["Authority:Storage:DatabaseName"] = "authority-tests", - ["Authority:Signing:Enabled"] = "false" - }; - - configuration.AddInMemoryCollection(settings); - }); - } - - public Task InitializeAsync() => Task.CompletedTask; - - public Task DisposeAsync() - { - mongoRunner.Dispose(); - return Task.CompletedTask; - } -} +using System; +using System.Collections.Generic; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.Extensions.Hosting; +using Microsoft.AspNetCore.Hosting; +using Microsoft.Extensions.Configuration; +using Mongo2Go; +using Xunit; + +namespace StellaOps.Authority.Tests.Infrastructure; + +public sealed class AuthorityWebApplicationFactory : WebApplicationFactory, IAsyncLifetime +{ + private readonly MongoDbRunner mongoRunner; + + public AuthorityWebApplicationFactory() + { + mongoRunner = MongoDbRunner.Start(singleNodeReplSet: true); + } + + public string ConnectionString => mongoRunner.ConnectionString; + + protected override void ConfigureWebHost(IWebHostBuilder builder) + { + builder.UseEnvironment("Development"); + builder.ConfigureAppConfiguration((_, configuration) => + { + var settings = new Dictionary + { + ["Authority:Issuer"] = "https://authority.test", + ["Authority:Storage:ConnectionString"] = mongoRunner.ConnectionString, + ["Authority:Storage:DatabaseName"] = "authority-tests", + ["Authority:Signing:Enabled"] = "false" + }; + + configuration.AddInMemoryCollection(settings); + }); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() + { + mongoRunner.Dispose(); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs index bbb8401d..09fe1716 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenApi/OpenApiDiscoveryEndpointTests.cs @@ -1,90 +1,90 @@ -using System.Collections.Generic; -using System.Linq; -using System.Net; -using System.Net.Http.Headers; -using System.Text.Json; -using Microsoft.AspNetCore.Hosting; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using StellaOps.Authority.Tests.Infrastructure; -using StellaOps.Configuration; -using Xunit; - -namespace StellaOps.Authority.Tests.OpenApi; - -public sealed class OpenApiDiscoveryEndpointTests : IClassFixture -{ - private readonly AuthorityWebApplicationFactory factory; - - public OpenApiDiscoveryEndpointTests(AuthorityWebApplicationFactory factory) - { - this.factory = factory; - } - - [Fact] - public async Task ReturnsJsonSpecificationByDefault() - { - using var client = factory.CreateClient(); - - using var response = await client.GetAsync("/.well-known/openapi").ConfigureAwait(false); - - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - Assert.NotNull(response.Headers.ETag); - Assert.Equal("public, max-age=300", response.Headers.CacheControl?.ToString()); - - var contentType = response.Content.Headers.ContentType?.ToString(); - Assert.Equal("application/openapi+json; charset=utf-8", contentType); - - var payload = await response.Content.ReadAsStringAsync().ConfigureAwait(false); - using var document = JsonDocument.Parse(payload); - Assert.Equal("3.1.0", document.RootElement.GetProperty("openapi").GetString()); - - var info = document.RootElement.GetProperty("info"); - Assert.Equal("authority", info.GetProperty("x-stella-service").GetString()); - Assert.True(info.TryGetProperty("x-stella-grant-types", out var grantsNode)); - Assert.Contains("authorization_code", grantsNode.EnumerateArray().Select(element => element.GetString())); - - var grantsHeader = Assert.Single(response.Headers.GetValues("X-StellaOps-OAuth-Grants")); - Assert.Contains("authorization_code", grantsHeader.Split(' ', StringSplitOptions.RemoveEmptyEntries)); - - var scopesHeader = Assert.Single(response.Headers.GetValues("X-StellaOps-OAuth-Scopes")); - Assert.Contains("policy:read", scopesHeader.Split(' ', StringSplitOptions.RemoveEmptyEntries)); - } - - [Fact] - public async Task ReturnsYamlWhenRequested() - { - using var client = factory.CreateClient(); - using var request = new HttpRequestMessage(HttpMethod.Get, "/.well-known/openapi"); - request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/openapi+yaml")); - - using var response = await client.SendAsync(request).ConfigureAwait(false); - - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - Assert.Equal("application/openapi+yaml; charset=utf-8", response.Content.Headers.ContentType?.ToString()); - - var payload = await response.Content.ReadAsStringAsync().ConfigureAwait(false); - Assert.StartsWith("openapi: 3.1.0", payload.TrimStart(), StringComparison.Ordinal); - } - - [Fact] - public async Task ReturnsNotModifiedWhenEtagMatches() - { - using var client = factory.CreateClient(); - - using var initialResponse = await client.GetAsync("/.well-known/openapi").ConfigureAwait(false); - var etag = initialResponse.Headers.ETag; - Assert.NotNull(etag); - - using var conditionalRequest = new HttpRequestMessage(HttpMethod.Get, "/.well-known/openapi"); - conditionalRequest.Headers.IfNoneMatch.Add(etag!); - - using var conditionalResponse = await client.SendAsync(conditionalRequest).ConfigureAwait(false); - - Assert.Equal(HttpStatusCode.NotModified, conditionalResponse.StatusCode); - Assert.Equal(etag!.Tag, conditionalResponse.Headers.ETag?.Tag); - Assert.Equal("public, max-age=300", conditionalResponse.Headers.CacheControl?.ToString()); - Assert.True(conditionalResponse.Content.Headers.ContentLength == 0 || conditionalResponse.Content.Headers.ContentLength is null); - } -} +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http.Headers; +using System.Text.Json; +using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Authority.Tests.Infrastructure; +using StellaOps.Configuration; +using Xunit; + +namespace StellaOps.Authority.Tests.OpenApi; + +public sealed class OpenApiDiscoveryEndpointTests : IClassFixture +{ + private readonly AuthorityWebApplicationFactory factory; + + public OpenApiDiscoveryEndpointTests(AuthorityWebApplicationFactory factory) + { + this.factory = factory; + } + + [Fact] + public async Task ReturnsJsonSpecificationByDefault() + { + using var client = factory.CreateClient(); + + using var response = await client.GetAsync("/.well-known/openapi").ConfigureAwait(false); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.NotNull(response.Headers.ETag); + Assert.Equal("public, max-age=300", response.Headers.CacheControl?.ToString()); + + var contentType = response.Content.Headers.ContentType?.ToString(); + Assert.Equal("application/openapi+json; charset=utf-8", contentType); + + var payload = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + using var document = JsonDocument.Parse(payload); + Assert.Equal("3.1.0", document.RootElement.GetProperty("openapi").GetString()); + + var info = document.RootElement.GetProperty("info"); + Assert.Equal("authority", info.GetProperty("x-stella-service").GetString()); + Assert.True(info.TryGetProperty("x-stella-grant-types", out var grantsNode)); + Assert.Contains("authorization_code", grantsNode.EnumerateArray().Select(element => element.GetString())); + + var grantsHeader = Assert.Single(response.Headers.GetValues("X-StellaOps-OAuth-Grants")); + Assert.Contains("authorization_code", grantsHeader.Split(' ', StringSplitOptions.RemoveEmptyEntries)); + + var scopesHeader = Assert.Single(response.Headers.GetValues("X-StellaOps-OAuth-Scopes")); + Assert.Contains("policy:read", scopesHeader.Split(' ', StringSplitOptions.RemoveEmptyEntries)); + } + + [Fact] + public async Task ReturnsYamlWhenRequested() + { + using var client = factory.CreateClient(); + using var request = new HttpRequestMessage(HttpMethod.Get, "/.well-known/openapi"); + request.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/openapi+yaml")); + + using var response = await client.SendAsync(request).ConfigureAwait(false); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + Assert.Equal("application/openapi+yaml; charset=utf-8", response.Content.Headers.ContentType?.ToString()); + + var payload = await response.Content.ReadAsStringAsync().ConfigureAwait(false); + Assert.StartsWith("openapi: 3.1.0", payload.TrimStart(), StringComparison.Ordinal); + } + + [Fact] + public async Task ReturnsNotModifiedWhenEtagMatches() + { + using var client = factory.CreateClient(); + + using var initialResponse = await client.GetAsync("/.well-known/openapi").ConfigureAwait(false); + var etag = initialResponse.Headers.ETag; + Assert.NotNull(etag); + + using var conditionalRequest = new HttpRequestMessage(HttpMethod.Get, "/.well-known/openapi"); + conditionalRequest.Headers.IfNoneMatch.Add(etag!); + + using var conditionalResponse = await client.SendAsync(conditionalRequest).ConfigureAwait(false); + + Assert.Equal(HttpStatusCode.NotModified, conditionalResponse.StatusCode); + Assert.Equal(etag!.Tag, conditionalResponse.Headers.ETag?.Tag); + Assert.Equal("public, max-age=300", conditionalResponse.Headers.CacheControl?.ToString()); + Assert.True(conditionalResponse.Content.Headers.ContentLength == 0 || conditionalResponse.Content.Headers.ContentLength is null); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs index 3d91dca9..087f069a 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/ClientCredentialsAndTokenHandlersTests.cs @@ -1,2675 +1,2675 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.IdentityModel.Tokens.Jwt; -using System.Security.Claims; -using System.Security.Cryptography; -using System.Security.Cryptography.X509Certificates; -using System.Text.Json; -using System.Linq; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Http.Extensions; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Options; -using Microsoft.Extensions.Primitives; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Configuration; -using StellaOps.Authority.Security; -using StellaOps.Auth.Security.Dpop; -using OpenIddict.Abstractions; -using OpenIddict.Extensions; -using OpenIddict.Server; -using OpenIddict.Server.AspNetCore; -using StellaOps.Auth.Abstractions; -using StellaOps.Authority.OpenIddict; -using StellaOps.Authority.OpenIddict.Handlers; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Sessions; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Authority.RateLimiting; -using StellaOps.Cryptography.Audit; -using Xunit; -using MongoDB.Bson; -using MongoDB.Driver; -using static StellaOps.Authority.Tests.OpenIddict.TestHelpers; - -namespace StellaOps.Authority.Tests.OpenIddict; - -public class ClientCredentialsHandlersTests -{ - private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests"); - - [Fact] - public async Task ValidateClientCredentials_Rejects_WhenScopeNotAllowed() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'jobs:write' is not allowed for this client.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_Allows_WhenConfigurationMatches() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read jobs:trigger"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - Assert.False(context.Transaction.Properties.ContainsKey(AuthorityOpenIddictConstants.ClientTenantProperty)); - Assert.Same(clientDocument, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty]); - - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "jobs:read" }, grantedScopes); - Assert.Equal(clientDocument.Plugin, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_Allows_NewIngestionScopes() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:ingest advisory:read", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:ingest"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "advisory:ingest" }, grantedScopes); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsAdvisoryReadWithoutAocVerify() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:read aoc:verify", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'aoc:verify' is required when requesting advisory/vex read scopes.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsSignalsScopeWithoutAocVerify() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "signals:read signals:write signals:admin aoc:verify", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "signals:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'aoc:verify' is required when requesting signals scopes.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsPolicyAuthorWithoutTenant() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "policy:author"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "policy:author"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Policy Studio scopes require a tenant assignment.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.PolicyAuthor, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsPolicyAuthorWithTenant() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "policy:author", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "policy:author"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "policy:author" }, grantedScopes); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsAdvisoryReadWithAocVerify() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:read aoc:verify", - tenant: "tenant-alpha"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read aoc:verify"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "advisory:read", "aoc:verify" }, grantedScopes); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsAocVerifyWithoutTenant() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "aoc:verify"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "aoc:verify"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Scope 'aoc:verify' requires a tenant assignment.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsEffectiveWrite_WhenServiceIdentityMissing() - { - var clientDocument = CreateClient( - clientId: "policy-engine", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "effective:write findings:read policy:run", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - Assert.True(clientDocument.Properties.ContainsKey(AuthorityClientMetadataKeys.Tenant)); - Assert.Equal("tenant-default", clientDocument.Properties[AuthorityClientMetadataKeys.Tenant]); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); - Assert.Equal("Scope 'effective:write' is reserved for the Policy Engine service identity.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsEffectiveWrite_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "policy-engine", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "effective:write findings:read policy:run"); - clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Policy Engine service identity requires a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsEffectiveWrite_ForPolicyEngineServiceIdentity() - { - var clientDocument = CreateClient( - clientId: "policy-engine", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "effective:write findings:read policy:run", - tenant: "tenant-default"); - clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "effective:write" }, grantedScopes); - - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsOrchOperate_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "orch-operator", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read orch:operate"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Orchestrator scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsOrchOperate_WhenReasonMissing() - { - var clientDocument = CreateClient( - clientId: "orch-operator", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read orch:operate", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); - Assert.Equal("Operator actions require 'operator_reason'.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsOrchOperate_WhenTicketMissing() - { - var clientDocument = CreateClient( - clientId: "orch-operator", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read orch:operate", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); - Assert.Equal("Operator actions require 'operator_ticket'.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsOrchOperate_WithReasonAndTicket() - { - var clientDocument = CreateClient( - clientId: "orch-operator", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read orch:operate", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "orch:operate" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - var reason = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.OperatorReasonProperty]); - Assert.Equal("resume source after maintenance", reason); - var ticket = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.OperatorTicketProperty]); - Assert.Equal("INC-2045", ticket); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsExportViewer_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "export-viewer", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.viewer"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.viewer"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Export scopes require a tenant assignment.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.ExportViewer, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsExportViewer_WithTenant() - { - var clientDocument = CreateClient( - clientId: "export-viewer", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.viewer", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.viewer"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "export.viewer" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsExportAdmin_WhenReasonMissing() - { - var clientDocument = CreateClient( - clientId: "export-admin", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.admin", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminTicketParameterName, "INC-9001"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); - Assert.Equal("Export admin actions require 'export_reason'.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsExportAdmin_WhenTicketMissing() - { - var clientDocument = CreateClient( - clientId: "export-admin", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.admin", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminReasonParameterName, "Rotate encryption keys after incident postmortem"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); - Assert.Equal("Export admin actions require 'export_ticket'.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsExportAdmin_WithReasonAndTicket() - { - var clientDocument = CreateClient( - clientId: "export-admin", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "export.admin", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - TestHelpers.CreateAuthorityOptions(), - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminReasonParameterName, "Rotate encryption keys after incident postmortem"); - transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminTicketParameterName, "INC-9001"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "export.admin" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - var reason = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ExportAdminReasonProperty]); - Assert.Equal("Rotate encryption keys after incident postmortem", reason); - var ticket = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ExportAdminTicketProperty]); - Assert.Equal("INC-9001", ticket); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsGraphWrite_WhenServiceIdentityMissing() - { - var clientDocument = CreateClient( - clientId: "cartographer-service", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:write graph:read", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); - Assert.Equal("Scope 'graph:write' is reserved for the Cartographer service identity.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsGraphWrite_WhenServiceIdentityMismatch() - { - var clientDocument = CreateClient( - clientId: "cartographer-service", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:write graph:read", - tenant: "tenant-default"); - clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); - Assert.Equal("Scope 'graph:write' is reserved for the Cartographer service identity.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsGraphScopes_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "graph-api", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:read graph:export"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Graph scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsGraphRead_WithTenant() - { - var clientDocument = CreateClient( - clientId: "graph-api", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:read graph:export", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "graph:read" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsOrchRead_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "orch-dashboard", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Orchestrator scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsOrchRead_WithTenant() - { - var clientDocument = CreateClient( - clientId: "orch-dashboard", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "orch:read", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "orch:read" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsAdvisoryScopes_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "concelier-ingestor", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:ingest advisory:read"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:ingest"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Advisory scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsVexScopes_WhenTenantMissing() - { - var clientDocument = CreateClient( - clientId: "excitor-ingestor", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "vex:ingest vex:read"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "vex:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("VEX scopes require a tenant assignment.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsAdvisoryScopes_WithTenant() - { - var clientDocument = CreateClient( - clientId: "concelier-ingestor", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "advisory:ingest advisory:read aoc:verify", - tenant: "tenant-default"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read aoc:verify"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "advisory:read", "aoc:verify" }, grantedScopes); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsGraphWrite_ForCartographerServiceIdentity() - { - var clientDocument = CreateClient( - clientId: "cartographer-service", - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "graph:write graph:read", - tenant: "tenant-default"); - clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.Cartographer; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); - Assert.Equal(new[] { "graph:write" }, grantedScopes); - var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); - Assert.Equal("tenant-default", tenant); - } - - [Fact] - public async Task ValidateClientCredentials_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() - { - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read"); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var sink = new TestAuthEventSink(); - var options = TestHelpers.CreateAuthorityOptions(); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - sink, - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - transaction.Request?.SetParameter("unexpected_param", "value"); - - await handler.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - - var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); - Assert.Contains(tamperEvent.Properties, property => - string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && - string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); - } - - [Fact] - public async Task ValidateDpopProof_AllowsSenderConstrainedClient() - { - var options = TestHelpers.CreateAuthorityOptions(opts => - { - opts.Security.SenderConstraints.Dpop.Enabled = true; - opts.Security.SenderConstraints.Dpop.Nonce.Enabled = false; - }); - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; - - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var securityKey = new ECDsaSecurityKey(ecdsa) - { - KeyId = Guid.NewGuid().ToString("N") - }; - var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(securityKey); - var expectedThumbprint = ConvertThumbprintToString(jwk.ComputeJwkThumbprint()); - - var clientStore = new TestClientStore(clientDocument); - var auditSink = new TestAuthEventSink(); - var rateMetadata = new TestRateLimiterMetadataAccessor(); - - var dpopValidator = new DpopProofValidator( - Options.Create(new DpopValidationOptions()), - new InMemoryDpopReplayCache(TimeProvider.System), - TimeProvider.System, - NullLogger.Instance); - - var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger.Instance); - - var dpopHandler = new ValidateDpopProofHandler( - options, - clientStore, - dpopValidator, - nonceStore, - rateMetadata, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - transaction.Options = new OpenIddictServerOptions(); - - var httpContext = new DefaultHttpContext(); - httpContext.Request.Method = "POST"; - httpContext.Request.Scheme = "https"; - httpContext.Request.Host = new HostString("authority.test"); - httpContext.Request.Path = "/token"; - - var now = TimeProvider.System.GetUtcNow(); - var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds()); - httpContext.Request.Headers["DPoP"] = proof; - - transaction.Properties[typeof(HttpContext).FullName!] = httpContext; - - var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - await dpopHandler.HandleAsync(validateContext); - - Assert.False(validateContext.IsRejected); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var validateHandler = new ValidateClientCredentialsHandler( - clientStore, - registry, - TestActivitySource, - auditSink, - rateMetadata, - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - await validateHandler.HandleAsync(validateContext); - Assert.False(validateContext.IsRejected); - - var tokenStore = new TestTokenStore(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handleHandler = new HandleClientCredentialsHandler( - registry, - tokenStore, - sessionAccessor, - rateMetadata, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); - await handleHandler.HandleAsync(handleContext); - Assert.True(handleContext.IsRequestHandled); - - var persistHandler = new PersistTokensHandler( - tokenStore, - sessionAccessor, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) - { - Principal = handleContext.Principal, - AccessTokenPrincipal = handleContext.Principal - }; - - await persistHandler.HandleAsync(signInContext); - - var confirmationClaim = handleContext.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType); - Assert.False(string.IsNullOrWhiteSpace(confirmationClaim)); - - using (var confirmationJson = JsonDocument.Parse(confirmationClaim!)) - { - Assert.Equal(expectedThumbprint, confirmationJson.RootElement.GetProperty("jkt").GetString()); - } - - Assert.NotNull(tokenStore.Inserted); - Assert.Equal(AuthoritySenderConstraintKinds.Dpop, tokenStore.Inserted!.SenderConstraint); - Assert.Equal(expectedThumbprint, tokenStore.Inserted!.SenderKeyThumbprint); - } - - [Fact] - public async Task ValidateDpopProof_IssuesNonceChallenge_WhenNonceMissing() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Dpop.Enabled = true; - options.Security.SenderConstraints.Dpop.Nonce.Enabled = true; - options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Clear(); - options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Add("signer"); - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - Assert.Contains("signer", options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences); - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read", - allowedAudiences: "signer"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; - - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var securityKey = new ECDsaSecurityKey(ecdsa) - { - KeyId = Guid.NewGuid().ToString("N") - }; - - var clientStore = new TestClientStore(clientDocument); - var auditSink = new TestAuthEventSink(); - var rateMetadata = new TestRateLimiterMetadataAccessor(); - - var dpopValidator = new DpopProofValidator( - Options.Create(new DpopValidationOptions()), - new InMemoryDpopReplayCache(TimeProvider.System), - TimeProvider.System, - NullLogger.Instance); - - var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger.Instance); - - var dpopHandler = new ValidateDpopProofHandler( - options, - clientStore, - dpopValidator, - nonceStore, - rateMetadata, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - transaction.Options = new OpenIddictServerOptions(); - - var httpContext = new DefaultHttpContext(); - httpContext.Request.Method = "POST"; - httpContext.Request.Scheme = "https"; - httpContext.Request.Host = new HostString("authority.test"); - httpContext.Request.Path = "/token"; - - var now = TimeProvider.System.GetUtcNow(); - var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds()); - httpContext.Request.Headers["DPoP"] = proof; - - transaction.Properties[typeof(HttpContext).FullName!] = httpContext; - - var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - await dpopHandler.HandleAsync(validateContext); - - Assert.True(validateContext.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, validateContext.Error); - var authenticateHeader = Assert.Single(httpContext.Response.Headers.Select(header => header) - .Where(header => string.Equals(header.Key, "WWW-Authenticate", StringComparison.OrdinalIgnoreCase))).Value; - Assert.Contains("use_dpop_nonce", authenticateHeader.ToString()); - Assert.True(httpContext.Response.Headers.TryGetValue("DPoP-Nonce", out var nonceValues)); - Assert.False(StringValues.IsNullOrEmpty(nonceValues)); - Assert.Contains(auditSink.Events, record => record.EventType == "authority.dpop.proof.challenge"); - } - - [Fact] - public async Task ValidateClientCredentials_AllowsMtlsClient_WithValidCertificate() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear(); - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls; - - using var rsa = RSA.Create(2048); - var certificateRequest = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - using var certificate = certificateRequest.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); - var hexThumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)); - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = hexThumbprint - }); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var auditSink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; - httpContextAccessor.HttpContext!.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - auditSink, - metadataAccessor, - TimeProvider.System, - validator, - httpContextAccessor, - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected, context.ErrorDescription ?? context.Error); - Assert.Equal(AuthoritySenderConstraintKinds.Mtls, context.Transaction.Properties[AuthorityOpenIddictConstants.SenderConstraintProperty]); - - var expectedBase64 = Base64UrlEncoder.Encode(certificate.GetCertHash(HashAlgorithmName.SHA256)); - Assert.Equal(expectedBase64, context.Transaction.Properties[AuthorityOpenIddictConstants.MtlsCertificateThumbprintProperty]); - } - - [Fact] - public async Task ValidateClientCredentials_RejectsMtlsClient_WhenCertificateMissing() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - validator, - httpContextAccessor, - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - } - - [Fact] - public async Task ValidateClientCredentials_Rejects_WhenAudienceRequiresMtlsButClientConfiguredForDpop() - { - var options = TestHelpers.CreateAuthorityOptions(opts => - { - opts.Security.SenderConstraints.Mtls.Enabled = true; - opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Clear(); - opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Add("signer"); - }); - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read", - allowedAudiences: "signer"); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; - clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Requested audiences require mutual TLS sender constraint.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateClientCredentials_RequiresMtlsWhenAudienceMatchesEnforcement() - { - var options = TestHelpers.CreateAuthorityOptions(opts => - { - opts.Security.SenderConstraints.Mtls.Enabled = true; - opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Clear(); - opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Add("signer"); - }); - - var clientDocument = CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:read", - allowedAudiences: "signer"); - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = "DEADBEEF" - }); - - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); - var certificateValidator = new RecordingCertificateValidator(); - var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; - - var handler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - new TestAuthEventSink(), - new TestRateLimiterMetadataAccessor(), - TimeProvider.System, - certificateValidator, - httpContextAccessor, - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("client_certificate_required", context.ErrorDescription); - Assert.True(certificateValidator.Invoked); - } - - [Fact] - public async Task HandleClientCredentials_PersistsTokenAndEnrichesClaims() - { - var clientDocument = CreateClient( - secret: null, - clientType: "public", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:trigger", - allowedAudiences: "signer", - tenant: "Tenant-Alpha"); - - var descriptor = CreateDescriptor(clientDocument); - var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: descriptor); - var tokenStore = new TestTokenStore(); - var sessionAccessor = new NullMongoSessionAccessor(); - var authSink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var options = TestHelpers.CreateAuthorityOptions(); - var validateHandler = new ValidateClientCredentialsHandler( - new TestClientStore(clientDocument), - registry, - TestActivitySource, - authSink, - metadataAccessor, - TimeProvider.System, - new NoopCertificateValidator(), - new HttpContextAccessor(), - options, - NullLogger.Instance); - - var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger"); - transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30); - - var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - await validateHandler.HandleAsync(validateContext); - Assert.False(validateContext.IsRejected); - - var handler = new HandleClientCredentialsHandler( - registry, - tokenStore, - sessionAccessor, - metadataAccessor, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, TimeProvider.System, TestActivitySource, NullLogger.Instance); - - var context = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); - - await handler.HandleAsync(context); - - Assert.True(context.IsRequestHandled); - Assert.NotNull(context.Principal); - Assert.Contains("signer", context.Principal!.GetAudiences()); - - Assert.Contains(authSink.Events, record => record.EventType == "authority.client_credentials.grant" && record.Outcome == AuthEventOutcome.Success); - - var identityProviderClaim = context.Principal?.GetClaim(StellaOpsClaimTypes.IdentityProvider); - Assert.Equal(clientDocument.Plugin, identityProviderClaim); - - var principal = context.Principal ?? throw new InvalidOperationException("Principal missing"); - Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); - var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId); - Assert.False(string.IsNullOrWhiteSpace(tokenId)); - - var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) - { - Principal = principal, - AccessTokenPrincipal = principal - }; - - await persistHandler.HandleAsync(signInContext); - - var persisted = Assert.IsType(tokenStore.Inserted); - Assert.Equal(tokenId, persisted.TokenId); - Assert.Equal(clientDocument.ClientId, persisted.ClientId); - Assert.Equal("valid", persisted.Status); - Assert.Equal("tenant-alpha", persisted.Tenant); - Assert.Equal(new[] { "jobs:trigger" }, persisted.Scope); - } -} - -public class TokenValidationHandlersTests -{ - private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests.TokenValidation"); - - [Fact] - public async Task ValidateAccessTokenHandler_Rejects_WhenTokenRevoked() - { - var tokenStore = new TestTokenStore(); - tokenStore.Inserted = new AuthorityTokenDocument - { - TokenId = "token-1", - Status = "revoked", - ClientId = "concelier" - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(CreateClient()), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(CreateClient())), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal("concelier", "token-1", "standard"); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-1" - }; - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); - } - - [Fact] - public async Task ValidateAccessTokenHandler_AddsTenantClaim_FromTokenDocument() - { - var clientDocument = CreateClient(tenant: "tenant-alpha"); - var tokenStore = new TestTokenStore - { - Inserted = new AuthorityTokenDocument - { - TokenId = "token-tenant", - Status = "valid", - ClientId = clientDocument.ClientId, - Tenant = "tenant-alpha" - } - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-tenant" - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); - Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, principal.FindFirstValue(StellaOpsClaimTypes.Project)); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, principal.FindFirstValue(StellaOpsClaimTypes.Project)); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); - } - - [Fact] - public async Task ValidateAccessTokenHandler_Rejects_WhenTenantDiffersFromToken() - { - var clientDocument = CreateClient(tenant: "tenant-alpha"); - var tokenStore = new TestTokenStore - { - Inserted = new AuthorityTokenDocument - { - TokenId = "token-tenant", - Status = "valid", - ClientId = clientDocument.ClientId, - Tenant = "tenant-alpha" - } - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); - principal.Identities.First().AddClaim(new Claim(StellaOpsClaimTypes.Tenant, "tenant-beta")); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-tenant" - }; - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); - Assert.Equal("The token tenant does not match the issued tenant.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateAccessTokenHandler_AssignsTenant_FromClientWhenTokenMissing() - { - var clientDocument = CreateClient(tenant: "tenant-alpha"); - var tokenStore = new TestTokenStore - { - Inserted = new AuthorityTokenDocument - { - TokenId = "token-tenant", - Status = "valid", - ClientId = clientDocument.ClientId - } - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-tenant" - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); - Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); - } - - [Fact] - public async Task ValidateAccessTokenHandler_Rejects_WhenClientTenantDiffers() - { - var clientDocument = CreateClient(tenant: "tenant-beta"); - var tokenStore = new TestTokenStore - { - Inserted = new AuthorityTokenDocument - { - TokenId = "token-tenant", - Status = "valid", - ClientId = clientDocument.ClientId - } - }; - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); - principal.Identities.First().AddClaim(new Claim(StellaOpsClaimTypes.Tenant, "tenant-alpha")); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-tenant" - }; - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); - Assert.Equal("The token tenant does not match the registered client tenant.", context.ErrorDescription); - } - - [Fact] - public async Task ValidateAccessTokenHandler_EnrichesClaims_WhenProviderAvailable() - { - var clientDocument = CreateClient(); - var userDescriptor = new AuthorityUserDescriptor("user-1", "alice", displayName: "Alice", requiresPasswordReset: false); - - var plugin = CreatePlugin( - name: "standard", - supportsClientProvisioning: true, - descriptor: CreateDescriptor(clientDocument), - user: userDescriptor); - - var registry = CreateRegistryFromPlugins(plugin); - - var metadataAccessorSuccess = new TestRateLimiterMetadataAccessor(); - var auditSinkSuccess = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - new TestTokenStore(), - sessionAccessor, - new TestClientStore(clientDocument), - registry, - metadataAccessorSuccess, - auditSinkSuccess, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Token, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, "token-123", plugin.Name, subject: userDescriptor.SubjectId); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - Assert.Contains(principal.Claims, claim => claim.Type == "enriched" && claim.Value == "true"); - } - - [Fact] - public async Task ValidateAccessTokenHandler_AddsConfirmationClaim_ForMtlsToken() - { - var tokenDocument = new AuthorityTokenDocument - { - TokenId = "token-mtls", - Status = "valid", - ClientId = "mtls-client", - SenderConstraint = AuthoritySenderConstraintKinds.Mtls, - SenderKeyThumbprint = "thumb-print" - }; - - var tokenStore = new TestTokenStore - { - Inserted = tokenDocument - }; - - var clientDocument = CreateClient(); - var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - var sessionAccessor = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - new TestClientStore(clientDocument), - registry, - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Introspection, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal(clientDocument.ClientId, tokenDocument.TokenId, clientDocument.Plugin); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = tokenDocument.TokenId - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - var confirmation = context.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType); - Assert.False(string.IsNullOrWhiteSpace(confirmation)); - using var json = JsonDocument.Parse(confirmation!); - Assert.Equal(tokenDocument.SenderKeyThumbprint, json.RootElement.GetProperty("x5t#S256").GetString()); - } - - [Fact] - public async Task ValidateAccessTokenHandler_EmitsReplayAudit_WhenStoreDetectsSuspectedReplay() - { - var tokenStore = new TestTokenStore(); - tokenStore.Inserted = new AuthorityTokenDocument - { - TokenId = "token-replay", - Status = "valid", - ClientId = "agent", - Devices = new List - { - new BsonDocument - { - { "remoteAddress", "10.0.0.1" }, - { "userAgent", "agent/1.0" }, - { "firstSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-15)) }, - { "lastSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-5)) }, - { "useCount", 2 } - } - } - }; - - tokenStore.UsageCallback = (remote, agent) => new TokenUsageUpdateResult(TokenUsageUpdateStatus.SuspectedReplay, remote, agent); - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var metadata = metadataAccessor.GetMetadata(); - if (metadata is not null) - { - metadata.RemoteIp = "203.0.113.7"; - metadata.UserAgent = "agent/2.0"; - } - - var clientDocument = CreateClient(); - clientDocument.ClientId = "agent"; - var auditSink = new TestAuthEventSink(); - var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); - var sessionAccessorReplay = new NullMongoSessionAccessor(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessorReplay, - new TestClientStore(clientDocument), - registry, - metadataAccessor, - auditSink, - TimeProvider.System, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - Options = new OpenIddictServerOptions(), - EndpointType = OpenIddictServerEndpointType.Introspection, - Request = new OpenIddictRequest() - }; - - var principal = CreatePrincipal("agent", "token-replay", "standard"); - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = "token-replay" - }; - - await handler.HandleAsync(context); - - Assert.False(context.IsRejected); - var replayEvent = Assert.Single(auditSink.Events, record => record.EventType == "authority.token.replay.suspected"); - Assert.Equal(AuthEventOutcome.Error, replayEvent.Outcome); - Assert.NotNull(replayEvent.Network); - Assert.Equal("203.0.113.7", replayEvent.Network?.RemoteAddress.Value); - Assert.Contains(replayEvent.Properties, property => property.Name == "token.devices.total"); - } -} - -public class AuthorityClientCertificateValidatorTests -{ - [Fact] - public async Task ValidateAsync_Rejects_WhenSanTypeNotAllowed() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear(); - options.Security.SenderConstraints.Mtls.AllowedSanTypes.Add("uri"); - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - using var rsa = RSA.Create(2048); - var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - var sanBuilder = new SubjectAlternativeNameBuilder(); - sanBuilder.AddDnsName("client.mtls.test"); - request.CertificateExtensions.Add(sanBuilder.Build()); - using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); - - var clientDocument = CreateClient(); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)) - }); - - var httpContext = new DefaultHttpContext(); - httpContext.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); - - Assert.False(result.Succeeded); - Assert.Equal("certificate_san_type", result.Error); - } - - [Fact] - public async Task ValidateAsync_AllowsBindingWithinRotationGrace() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Security.SenderConstraints.Mtls.RotationGrace = TimeSpan.FromMinutes(5); - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - using var rsa = RSA.Create(2048); - var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - var sanBuilder = new SubjectAlternativeNameBuilder(); - sanBuilder.AddDnsName("client.mtls.test"); - request.CertificateExtensions.Add(sanBuilder.Build()); - using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(10)); - - var thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)); - - var clientDocument = CreateClient(); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = thumbprint, - NotBefore = TimeProvider.System.GetUtcNow().AddMinutes(2) - }); - - var httpContext = new DefaultHttpContext(); - httpContext.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); - - Assert.True(result.Succeeded); - Assert.Equal(thumbprint, result.HexThumbprint); - } - - [Fact] - public async Task ValidateAsync_Rejects_WhenBindingSubjectMismatch() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - using var rsa = RSA.Create(2048); - var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - var sanBuilder = new SubjectAlternativeNameBuilder(); - sanBuilder.AddDnsName("client.mtls.test"); - request.CertificateExtensions.Add(sanBuilder.Build()); - using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); - - var clientDocument = CreateClient(); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)), - Subject = "CN=different-client" - }); - - var httpContext = new DefaultHttpContext(); - httpContext.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); - - Assert.False(result.Succeeded); - Assert.Equal("certificate_binding_subject_mismatch", result.Error); - } - - [Fact] - public async Task ValidateAsync_Rejects_WhenBindingSansMissing() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Security.SenderConstraints.Mtls.Enabled = true; - options.Security.SenderConstraints.Mtls.RequireChainValidation = false; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - using var rsa = RSA.Create(2048); - var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - var sanBuilder = new SubjectAlternativeNameBuilder(); - sanBuilder.AddDnsName("client.mtls.test"); - request.CertificateExtensions.Add(sanBuilder.Build()); - using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); - - var clientDocument = CreateClient(); - clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; - clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding - { - Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)), - SubjectAlternativeNames = new List { "spiffe://client" } - }); - - var httpContext = new DefaultHttpContext(); - httpContext.Connection.ClientCertificate = certificate; - - var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); - var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); - - Assert.False(result.Succeeded); - Assert.Equal("certificate_binding_san_mismatch", result.Error); - } -} - -internal sealed class TestClientStore : IAuthorityClientStore -{ - private readonly Dictionary clients = new(StringComparer.OrdinalIgnoreCase); - - public TestClientStore(params AuthorityClientDocument[] documents) - { - foreach (var document in documents) - { - clients[document.ClientId] = document; - } - } - - public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - clients.TryGetValue(clientId, out var document); - return ValueTask.FromResult(document); - } - - public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - clients[document.ClientId] = document; - return ValueTask.CompletedTask; - } - - public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(clients.Remove(clientId)); -} - -internal sealed class TestTokenStore : IAuthorityTokenStore -{ - public AuthorityTokenDocument? Inserted { get; set; } - - public Func? UsageCallback { get; set; } - - public ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - Inserted = document; - return ValueTask.CompletedTask; - } - - public ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(Inserted is not null && string.Equals(Inserted.TokenId, tokenId, StringComparison.OrdinalIgnoreCase) ? Inserted : null); - - public ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(null); - - public ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, string? reason, string? reasonDescription, IReadOnlyDictionary? metadata, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.CompletedTask; - - public ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(0L); - - public ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(UsageCallback?.Invoke(remoteAddress, userAgent) ?? new TokenUsageUpdateResult(TokenUsageUpdateStatus.Recorded, remoteAddress, userAgent)); - - public ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult>(Array.Empty()); -} - -internal sealed class TestClaimsEnricher : IClaimsEnricher -{ - public ValueTask EnrichAsync(ClaimsIdentity identity, AuthorityClaimsEnrichmentContext context, CancellationToken cancellationToken) - { - if (!identity.HasClaim(c => c.Type == "enriched")) - { - identity.AddClaim(new Claim("enriched", "true")); - } - - return ValueTask.CompletedTask; - } -} - -internal sealed class TestUserCredentialStore : IUserCredentialStore -{ - private readonly AuthorityUserDescriptor? user; - - public TestUserCredentialStore(AuthorityUserDescriptor? user) - { - this.user = user; - } - - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials)); - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginOperationResult.Failure("unsupported", "not implemented")); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(user); -} - -internal sealed class TestClientProvisioningStore : IClientProvisioningStore -{ - private readonly AuthorityClientDescriptor? descriptor; - - public TestClientProvisioningStore(AuthorityClientDescriptor? descriptor) - { - this.descriptor = descriptor; - } - - public ValueTask> CreateOrUpdateAsync(AuthorityClientRegistration registration, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginOperationResult.Failure("unsupported", "not implemented")); - - public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken) - => ValueTask.FromResult(descriptor); - - public ValueTask DeleteAsync(string clientId, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginOperationResult.Success()); -} - -internal sealed class TestIdentityProviderPlugin : IIdentityProviderPlugin -{ - public TestIdentityProviderPlugin( - AuthorityPluginContext context, - IUserCredentialStore credentialStore, - IClaimsEnricher claimsEnricher, - IClientProvisioningStore? clientProvisioning, - AuthorityIdentityProviderCapabilities capabilities) - { - Context = context; - Credentials = credentialStore; - ClaimsEnricher = claimsEnricher; - ClientProvisioning = clientProvisioning; - Capabilities = capabilities; - } - - public string Name => Context.Manifest.Name; - - public string Type => Context.Manifest.Type; - - public AuthorityPluginContext Context { get; } - - public IUserCredentialStore Credentials { get; } - - public IClaimsEnricher ClaimsEnricher { get; } - - public IClientProvisioningStore? ClientProvisioning { get; } - - public AuthorityIdentityProviderCapabilities Capabilities { get; } - - public ValueTask CheckHealthAsync(CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); -} - -internal sealed class TestAuthEventSink : IAuthEventSink -{ - public List Events { get; } = new(); - - public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) - { - Events.Add(record); - return ValueTask.CompletedTask; - } -} - -internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor -{ - private readonly AuthorityRateLimiterMetadata metadata = new(); - - public AuthorityRateLimiterMetadata? GetMetadata() => metadata; - - public void SetClientId(string? clientId) => metadata.ClientId = clientId; - - public void SetSubjectId(string? subjectId) => metadata.SubjectId = subjectId; - - public void SetTenant(string? tenant) - { - metadata.Tenant = string.IsNullOrWhiteSpace(tenant) ? null : tenant.Trim().ToLowerInvariant(); - metadata.SetTag("authority.tenant", metadata.Tenant); - } - - public void SetProject(string? project) - { - metadata.Project = string.IsNullOrWhiteSpace(project) ? null : project.Trim().ToLowerInvariant(); - metadata.SetTag("authority.project", metadata.Project); - } - - public void SetTag(string name, string? value) => metadata.SetTag(name, value); -} - -internal sealed class NoopCertificateValidator : IAuthorityClientCertificateValidator -{ - public ValueTask ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken) - { - var binding = new AuthorityClientCertificateBinding - { - Thumbprint = "stub" - }; - - return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success("stub", "stub", binding)); - } -} - -internal sealed class RecordingCertificateValidator : IAuthorityClientCertificateValidator -{ - public bool Invoked { get; private set; } - - public ValueTask ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken) - { - Invoked = true; - - if (httpContext.Connection.ClientCertificate is null) - { - return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("client_certificate_required")); - } - - AuthorityClientCertificateBinding binding; - if (client.CertificateBindings.Count > 0) - { - binding = client.CertificateBindings[0]; - } - else - { - binding = new AuthorityClientCertificateBinding { Thumbprint = "stub" }; - } - - return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success("stub", binding.Thumbprint, binding)); - } -} - -internal sealed class NullMongoSessionAccessor : IAuthorityMongoSessionAccessor -{ - public ValueTask GetSessionAsync(CancellationToken cancellationToken = default) - => ValueTask.FromResult(null!); - - public ValueTask DisposeAsync() => ValueTask.CompletedTask; -} - -internal static class TestHelpers -{ - public static StellaOpsAuthorityOptions CreateAuthorityOptions(Action? configure = null) - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost/test"; - - configure?.Invoke(options); - return options; - } - - public static AuthorityClientDocument CreateClient( - string clientId = "concelier", - string? secret = "s3cr3t!", - string clientType = "confidential", - string allowedGrantTypes = "client_credentials", - string allowedScopes = "jobs:read", - string allowedAudiences = "", - string? tenant = null) - { - var document = new AuthorityClientDocument - { - ClientId = clientId, - ClientType = clientType, - SecretHash = secret is null ? null : AuthoritySecretHasher.ComputeHash(secret), - Plugin = "standard", - Properties = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - [AuthorityClientMetadataKeys.AllowedGrantTypes] = allowedGrantTypes, - [AuthorityClientMetadataKeys.AllowedScopes] = allowedScopes - } - }; - - if (!string.IsNullOrWhiteSpace(allowedAudiences)) - { - document.Properties[AuthorityClientMetadataKeys.Audiences] = allowedAudiences; - } - - var normalizedTenant = NormalizeTenant(tenant); - if (normalizedTenant is not null) - { - document.Properties[AuthorityClientMetadataKeys.Tenant] = normalizedTenant; - } - - return document; - } - - private static string? NormalizeTenant(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); - - public static AuthorityClientDescriptor CreateDescriptor(AuthorityClientDocument document) - { - var allowedGrantTypes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedGrantTypes, out var grants) ? grants?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); - var allowedScopes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedScopes, out var scopes) ? scopes?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); - var allowedAudiences = document.Properties.TryGetValue(AuthorityClientMetadataKeys.Audiences, out var audiences) ? audiences?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); - - return new AuthorityClientDescriptor( - document.ClientId, - document.DisplayName, - confidential: string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase), - allowedGrantTypes, - allowedScopes, - allowedAudiences, - redirectUris: Array.Empty(), - postLogoutRedirectUris: Array.Empty(), - properties: document.Properties); - } - - public static AuthorityIdentityProviderRegistry CreateRegistry(bool withClientProvisioning, AuthorityClientDescriptor? clientDescriptor) - { - var plugin = CreatePlugin( - name: "standard", - supportsClientProvisioning: withClientProvisioning, - descriptor: clientDescriptor, - user: null); - - return CreateRegistryFromPlugins(plugin); - } - - public static TestIdentityProviderPlugin CreatePlugin( - string name, - bool supportsClientProvisioning, - AuthorityClientDescriptor? descriptor, - AuthorityUserDescriptor? user) - { - var capabilities = supportsClientProvisioning - ? new[] { AuthorityPluginCapabilities.ClientProvisioning } - : Array.Empty(); - - var manifest = new AuthorityPluginManifest( - name, - "standard", - true, - null, - null, - capabilities, - new Dictionary(StringComparer.OrdinalIgnoreCase), - $"{name}.yaml"); - - var context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); - - return new TestIdentityProviderPlugin( - context, - new TestUserCredentialStore(user), - new TestClaimsEnricher(), - supportsClientProvisioning ? new TestClientProvisioningStore(descriptor) : null, - new AuthorityIdentityProviderCapabilities( - SupportsPassword: true, - SupportsMfa: false, - SupportsClientProvisioning: supportsClientProvisioning)); - } - - public static AuthorityIdentityProviderRegistry CreateRegistryFromPlugins(params IIdentityProviderPlugin[] plugins) - { - var services = new ServiceCollection(); - services.AddLogging(); - foreach (var plugin in plugins) - { - services.AddSingleton(plugin); - } - - var provider = services.BuildServiceProvider(); - return new AuthorityIdentityProviderRegistry(provider, NullLogger.Instance); - } - - public static OpenIddictServerTransaction CreateTokenTransaction(string clientId, string? secret, string? scope) - { - var request = new OpenIddictRequest - { - GrantType = OpenIddictConstants.GrantTypes.ClientCredentials, - ClientId = clientId, - ClientSecret = secret - }; - - if (!string.IsNullOrWhiteSpace(scope)) - { - request.Scope = scope; - } - - return new OpenIddictServerTransaction - { - EndpointType = OpenIddictServerEndpointType.Token, - Options = new OpenIddictServerOptions(), - Request = request - }; - } - - public static string ConvertThumbprintToString(object thumbprint) - => thumbprint switch - { - string value => value, - byte[] bytes => Base64UrlEncoder.Encode(bytes), - _ => throw new InvalidOperationException("Unsupported thumbprint representation.") - }; - - public static string CreateDpopProof(ECDsaSecurityKey key, string method, string url, long issuedAt, string? nonce = null) - { - var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(key); - jwk.KeyId ??= key.KeyId ?? Guid.NewGuid().ToString("N"); - - var signingCredentials = new SigningCredentials(key, SecurityAlgorithms.EcdsaSha256); - var header = new JwtHeader(signingCredentials) - { - ["typ"] = "dpop+jwt", - ["jwk"] = new Dictionary - { - ["kty"] = jwk.Kty, - ["crv"] = jwk.Crv, - ["x"] = jwk.X, - ["y"] = jwk.Y, - ["kid"] = jwk.Kid ?? jwk.KeyId - } - }; - - var payload = new JwtPayload - { - ["htm"] = method.ToUpperInvariant(), - ["htu"] = url, - ["iat"] = issuedAt, - ["jti"] = Guid.NewGuid().ToString("N") - }; - - if (!string.IsNullOrWhiteSpace(nonce)) - { - payload["nonce"] = nonce; - } - - var token = new JwtSecurityToken(header, payload); - return new JwtSecurityTokenHandler().WriteToken(token); - } - - public static X509Certificate2 CreateTestCertificate(string subjectName) - { - using var rsa = RSA.Create(2048); - var request = new CertificateRequest(subjectName, rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - return request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); - } - - public static ClaimsPrincipal CreatePrincipal(string clientId, string tokenId, string provider, string? subject = null) - { - var identity = new ClaimsIdentity(OpenIddictServerAspNetCoreDefaults.AuthenticationScheme); - identity.AddClaim(new Claim(OpenIddictConstants.Claims.ClientId, clientId)); - identity.AddClaim(new Claim(OpenIddictConstants.Claims.JwtId, tokenId)); - identity.AddClaim(new Claim(StellaOpsClaimTypes.IdentityProvider, provider)); - identity.AddClaim(new Claim(StellaOpsClaimTypes.Project, StellaOpsTenancyDefaults.AnyProject)); - - if (!string.IsNullOrWhiteSpace(subject)) - { - identity.AddClaim(new Claim(OpenIddictConstants.Claims.Subject, subject)); - } - - return new ClaimsPrincipal(identity); - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.IdentityModel.Tokens.Jwt; +using System.Security.Claims; +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; +using System.Text.Json; +using System.Linq; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Http.Extensions; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.Primitives; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Configuration; +using StellaOps.Authority.Security; +using StellaOps.Auth.Security.Dpop; +using OpenIddict.Abstractions; +using OpenIddict.Extensions; +using OpenIddict.Server; +using OpenIddict.Server.AspNetCore; +using StellaOps.Auth.Abstractions; +using StellaOps.Authority.OpenIddict; +using StellaOps.Authority.OpenIddict.Handlers; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Sessions; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Authority.RateLimiting; +using StellaOps.Cryptography.Audit; +using Xunit; +using MongoDB.Bson; +using MongoDB.Driver; +using static StellaOps.Authority.Tests.OpenIddict.TestHelpers; + +namespace StellaOps.Authority.Tests.OpenIddict; + +public class ClientCredentialsHandlersTests +{ + private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests"); + + [Fact] + public async Task ValidateClientCredentials_Rejects_WhenScopeNotAllowed() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'jobs:write' is not allowed for this client.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_Allows_WhenConfigurationMatches() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read jobs:trigger"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + Assert.False(context.Transaction.Properties.ContainsKey(AuthorityOpenIddictConstants.ClientTenantProperty)); + Assert.Same(clientDocument, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty]); + + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "jobs:read" }, grantedScopes); + Assert.Equal(clientDocument.Plugin, context.Transaction.Properties[AuthorityOpenIddictConstants.ClientProviderTransactionProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_Allows_NewIngestionScopes() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:ingest advisory:read", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:ingest"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "advisory:ingest" }, grantedScopes); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsAdvisoryReadWithoutAocVerify() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:read aoc:verify", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'aoc:verify' is required when requesting advisory/vex read scopes.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsSignalsScopeWithoutAocVerify() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "signals:read signals:write signals:admin aoc:verify", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "signals:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'aoc:verify' is required when requesting signals scopes.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsPolicyAuthorWithoutTenant() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "policy:author"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "policy:author"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Policy Studio scopes require a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.PolicyAuthor, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsPolicyAuthorWithTenant() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "policy:author", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "policy:author"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "policy:author" }, grantedScopes); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsAdvisoryReadWithAocVerify() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:read aoc:verify", + tenant: "tenant-alpha"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read aoc:verify"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "advisory:read", "aoc:verify" }, grantedScopes); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsAocVerifyWithoutTenant() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "aoc:verify"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "aoc:verify"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Scope 'aoc:verify' requires a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsEffectiveWrite_WhenServiceIdentityMissing() + { + var clientDocument = CreateClient( + clientId: "policy-engine", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "effective:write findings:read policy:run", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + Assert.True(clientDocument.Properties.ContainsKey(AuthorityClientMetadataKeys.Tenant)); + Assert.Equal("tenant-default", clientDocument.Properties[AuthorityClientMetadataKeys.Tenant]); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); + Assert.Equal("Scope 'effective:write' is reserved for the Policy Engine service identity.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsEffectiveWrite_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "policy-engine", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "effective:write findings:read policy:run"); + clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Policy Engine service identity requires a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsEffectiveWrite_ForPolicyEngineServiceIdentity() + { + var clientDocument = CreateClient( + clientId: "policy-engine", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "effective:write findings:read policy:run", + tenant: "tenant-default"); + clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "effective:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "effective:write" }, grantedScopes); + + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchOperate_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "orch-operator", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read orch:operate"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Orchestrator scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchOperate_WhenReasonMissing() + { + var clientDocument = CreateClient( + clientId: "orch-operator", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read orch:operate", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Operator actions require 'operator_reason'.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchOperate_WhenTicketMissing() + { + var clientDocument = CreateClient( + clientId: "orch-operator", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read orch:operate", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Operator actions require 'operator_ticket'.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsOrchOperate_WithReasonAndTicket() + { + var clientDocument = CreateClient( + clientId: "orch-operator", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read orch:operate", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:operate"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorReasonParameterName, "resume source after maintenance"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.OperatorTicketParameterName, "INC-2045"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "orch:operate" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + var reason = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.OperatorReasonProperty]); + Assert.Equal("resume source after maintenance", reason); + var ticket = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.OperatorTicketProperty]); + Assert.Equal("INC-2045", ticket); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsExportViewer_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "export-viewer", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.viewer"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.viewer"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Export scopes require a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.ExportViewer, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsExportViewer_WithTenant() + { + var clientDocument = CreateClient( + clientId: "export-viewer", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.viewer", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.viewer"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "export.viewer" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsExportAdmin_WhenReasonMissing() + { + var clientDocument = CreateClient( + clientId: "export-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.admin", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminTicketParameterName, "INC-9001"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Export admin actions require 'export_reason'.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsExportAdmin_WhenTicketMissing() + { + var clientDocument = CreateClient( + clientId: "export-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.admin", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminReasonParameterName, "Rotate encryption keys after incident postmortem"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidRequest, context.Error); + Assert.Equal("Export admin actions require 'export_ticket'.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsExportAdmin_WithReasonAndTicket() + { + var clientDocument = CreateClient( + clientId: "export-admin", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "export.admin", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + TestHelpers.CreateAuthorityOptions(), + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "export.admin"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminReasonParameterName, "Rotate encryption keys after incident postmortem"); + transaction.Request?.SetParameter(AuthorityOpenIddictConstants.ExportAdminTicketParameterName, "INC-9001"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "export.admin" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + var reason = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ExportAdminReasonProperty]); + Assert.Equal("Rotate encryption keys after incident postmortem", reason); + var ticket = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ExportAdminTicketProperty]); + Assert.Equal("INC-9001", ticket); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsGraphWrite_WhenServiceIdentityMissing() + { + var clientDocument = CreateClient( + clientId: "cartographer-service", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:write graph:read", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); + Assert.Equal("Scope 'graph:write' is reserved for the Cartographer service identity.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsGraphWrite_WhenServiceIdentityMismatch() + { + var clientDocument = CreateClient( + clientId: "cartographer-service", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:write graph:read", + tenant: "tenant-default"); + clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.PolicyEngine; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.UnauthorizedClient, context.Error); + Assert.Equal("Scope 'graph:write' is reserved for the Cartographer service identity.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsGraphScopes_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "graph-api", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:read graph:export"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Graph scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsGraphRead_WithTenant() + { + var clientDocument = CreateClient( + clientId: "graph-api", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:read graph:export", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "graph:read" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsOrchRead_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "orch-dashboard", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Orchestrator scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsOrchRead_WithTenant() + { + var clientDocument = CreateClient( + clientId: "orch-dashboard", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "orch:read", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "orch:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "orch:read" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsAdvisoryScopes_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "concelier-ingestor", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:ingest advisory:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:ingest"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Advisory scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsVexScopes_WhenTenantMissing() + { + var clientDocument = CreateClient( + clientId: "excitor-ingestor", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "vex:ingest vex:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "vex:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("VEX scopes require a tenant assignment.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsAdvisoryScopes_WithTenant() + { + var clientDocument = CreateClient( + clientId: "concelier-ingestor", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "advisory:ingest advisory:read aoc:verify", + tenant: "tenant-default"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "advisory:read aoc:verify"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "advisory:read", "aoc:verify" }, grantedScopes); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsGraphWrite_ForCartographerServiceIdentity() + { + var clientDocument = CreateClient( + clientId: "cartographer-service", + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "graph:write graph:read", + tenant: "tenant-default"); + clientDocument.Properties[AuthorityClientMetadataKeys.ServiceIdentity] = StellaOpsServiceIdentities.Cartographer; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "graph:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + var grantedScopes = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty]); + Assert.Equal(new[] { "graph:write" }, grantedScopes); + var tenant = Assert.IsType(context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty]); + Assert.Equal("tenant-default", tenant); + } + + [Fact] + public async Task ValidateClientCredentials_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() + { + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var sink = new TestAuthEventSink(); + var options = TestHelpers.CreateAuthorityOptions(); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + sink, + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + transaction.Request?.SetParameter("unexpected_param", "value"); + + await handler.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + + var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); + Assert.Contains(tamperEvent.Properties, property => + string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && + string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public async Task ValidateDpopProof_AllowsSenderConstrainedClient() + { + var options = TestHelpers.CreateAuthorityOptions(opts => + { + opts.Security.SenderConstraints.Dpop.Enabled = true; + opts.Security.SenderConstraints.Dpop.Nonce.Enabled = false; + }); + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; + + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var securityKey = new ECDsaSecurityKey(ecdsa) + { + KeyId = Guid.NewGuid().ToString("N") + }; + var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(securityKey); + var expectedThumbprint = ConvertThumbprintToString(jwk.ComputeJwkThumbprint()); + + var clientStore = new TestClientStore(clientDocument); + var auditSink = new TestAuthEventSink(); + var rateMetadata = new TestRateLimiterMetadataAccessor(); + + var dpopValidator = new DpopProofValidator( + Options.Create(new DpopValidationOptions()), + new InMemoryDpopReplayCache(TimeProvider.System), + TimeProvider.System, + NullLogger.Instance); + + var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger.Instance); + + var dpopHandler = new ValidateDpopProofHandler( + options, + clientStore, + dpopValidator, + nonceStore, + rateMetadata, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + transaction.Options = new OpenIddictServerOptions(); + + var httpContext = new DefaultHttpContext(); + httpContext.Request.Method = "POST"; + httpContext.Request.Scheme = "https"; + httpContext.Request.Host = new HostString("authority.test"); + httpContext.Request.Path = "/token"; + + var now = TimeProvider.System.GetUtcNow(); + var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds()); + httpContext.Request.Headers["DPoP"] = proof; + + transaction.Properties[typeof(HttpContext).FullName!] = httpContext; + + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await dpopHandler.HandleAsync(validateContext); + + Assert.False(validateContext.IsRejected); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var validateHandler = new ValidateClientCredentialsHandler( + clientStore, + registry, + TestActivitySource, + auditSink, + rateMetadata, + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + await validateHandler.HandleAsync(validateContext); + Assert.False(validateContext.IsRejected); + + var tokenStore = new TestTokenStore(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handleHandler = new HandleClientCredentialsHandler( + registry, + tokenStore, + sessionAccessor, + rateMetadata, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); + await handleHandler.HandleAsync(handleContext); + Assert.True(handleContext.IsRequestHandled); + + var persistHandler = new PersistTokensHandler( + tokenStore, + sessionAccessor, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) + { + Principal = handleContext.Principal, + AccessTokenPrincipal = handleContext.Principal + }; + + await persistHandler.HandleAsync(signInContext); + + var confirmationClaim = handleContext.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType); + Assert.False(string.IsNullOrWhiteSpace(confirmationClaim)); + + using (var confirmationJson = JsonDocument.Parse(confirmationClaim!)) + { + Assert.Equal(expectedThumbprint, confirmationJson.RootElement.GetProperty("jkt").GetString()); + } + + Assert.NotNull(tokenStore.Inserted); + Assert.Equal(AuthoritySenderConstraintKinds.Dpop, tokenStore.Inserted!.SenderConstraint); + Assert.Equal(expectedThumbprint, tokenStore.Inserted!.SenderKeyThumbprint); + } + + [Fact] + public async Task ValidateDpopProof_IssuesNonceChallenge_WhenNonceMissing() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Dpop.Enabled = true; + options.Security.SenderConstraints.Dpop.Nonce.Enabled = true; + options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Clear(); + options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences.Add("signer"); + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + Assert.Contains("signer", options.Security.SenderConstraints.Dpop.Nonce.RequiredAudiences); + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read", + allowedAudiences: "signer"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; + + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var securityKey = new ECDsaSecurityKey(ecdsa) + { + KeyId = Guid.NewGuid().ToString("N") + }; + + var clientStore = new TestClientStore(clientDocument); + var auditSink = new TestAuthEventSink(); + var rateMetadata = new TestRateLimiterMetadataAccessor(); + + var dpopValidator = new DpopProofValidator( + Options.Create(new DpopValidationOptions()), + new InMemoryDpopReplayCache(TimeProvider.System), + TimeProvider.System, + NullLogger.Instance); + + var nonceStore = new InMemoryDpopNonceStore(TimeProvider.System, NullLogger.Instance); + + var dpopHandler = new ValidateDpopProofHandler( + options, + clientStore, + dpopValidator, + nonceStore, + rateMetadata, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + transaction.Options = new OpenIddictServerOptions(); + + var httpContext = new DefaultHttpContext(); + httpContext.Request.Method = "POST"; + httpContext.Request.Scheme = "https"; + httpContext.Request.Host = new HostString("authority.test"); + httpContext.Request.Path = "/token"; + + var now = TimeProvider.System.GetUtcNow(); + var proof = TestHelpers.CreateDpopProof(securityKey, httpContext.Request.Method, httpContext.Request.GetDisplayUrl(), now.ToUnixTimeSeconds()); + httpContext.Request.Headers["DPoP"] = proof; + + transaction.Properties[typeof(HttpContext).FullName!] = httpContext; + + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await dpopHandler.HandleAsync(validateContext); + + Assert.True(validateContext.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, validateContext.Error); + var authenticateHeader = Assert.Single(httpContext.Response.Headers.Select(header => header) + .Where(header => string.Equals(header.Key, "WWW-Authenticate", StringComparison.OrdinalIgnoreCase))).Value; + Assert.Contains("use_dpop_nonce", authenticateHeader.ToString()); + Assert.True(httpContext.Response.Headers.TryGetValue("DPoP-Nonce", out var nonceValues)); + Assert.False(StringValues.IsNullOrEmpty(nonceValues)); + Assert.Contains(auditSink.Events, record => record.EventType == "authority.dpop.proof.challenge"); + } + + [Fact] + public async Task ValidateClientCredentials_AllowsMtlsClient_WithValidCertificate() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear(); + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls; + + using var rsa = RSA.Create(2048); + var certificateRequest = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + using var certificate = certificateRequest.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); + var hexThumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)); + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = hexThumbprint + }); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var auditSink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; + httpContextAccessor.HttpContext!.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + auditSink, + metadataAccessor, + TimeProvider.System, + validator, + httpContextAccessor, + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected, context.ErrorDescription ?? context.Error); + Assert.Equal(AuthoritySenderConstraintKinds.Mtls, context.Transaction.Properties[AuthorityOpenIddictConstants.SenderConstraintProperty]); + + var expectedBase64 = Base64UrlEncoder.Encode(certificate.GetCertHash(HashAlgorithmName.SHA256)); + Assert.Equal(expectedBase64, context.Transaction.Properties[AuthorityOpenIddictConstants.MtlsCertificateThumbprintProperty]); + } + + [Fact] + public async Task ValidateClientCredentials_RejectsMtlsClient_WhenCertificateMissing() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Mtls; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + validator, + httpContextAccessor, + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + } + + [Fact] + public async Task ValidateClientCredentials_Rejects_WhenAudienceRequiresMtlsButClientConfiguredForDpop() + { + var options = TestHelpers.CreateAuthorityOptions(opts => + { + opts.Security.SenderConstraints.Mtls.Enabled = true; + opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Clear(); + opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Add("signer"); + }); + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read", + allowedAudiences: "signer"); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Dpop; + clientDocument.Properties[AuthorityClientMetadataKeys.SenderConstraint] = AuthoritySenderConstraintKinds.Dpop; + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Requested audiences require mutual TLS sender constraint.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateClientCredentials_RequiresMtlsWhenAudienceMatchesEnforcement() + { + var options = TestHelpers.CreateAuthorityOptions(opts => + { + opts.Security.SenderConstraints.Mtls.Enabled = true; + opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Clear(); + opts.Security.SenderConstraints.Mtls.EnforceForAudiences.Add("signer"); + }); + + var clientDocument = CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:read", + allowedAudiences: "signer"); + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = "DEADBEEF" + }); + + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)); + var certificateValidator = new RecordingCertificateValidator(); + var httpContextAccessor = new HttpContextAccessor { HttpContext = new DefaultHttpContext() }; + + var handler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + new TestAuthEventSink(), + new TestRateLimiterMetadataAccessor(), + TimeProvider.System, + certificateValidator, + httpContextAccessor, + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("client_certificate_required", context.ErrorDescription); + Assert.True(certificateValidator.Invoked); + } + + [Fact] + public async Task HandleClientCredentials_PersistsTokenAndEnrichesClaims() + { + var clientDocument = CreateClient( + secret: null, + clientType: "public", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:trigger", + allowedAudiences: "signer", + tenant: "Tenant-Alpha"); + + var descriptor = CreateDescriptor(clientDocument); + var registry = CreateRegistry(withClientProvisioning: true, clientDescriptor: descriptor); + var tokenStore = new TestTokenStore(); + var sessionAccessor = new NullMongoSessionAccessor(); + var authSink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var options = TestHelpers.CreateAuthorityOptions(); + var validateHandler = new ValidateClientCredentialsHandler( + new TestClientStore(clientDocument), + registry, + TestActivitySource, + authSink, + metadataAccessor, + TimeProvider.System, + new NoopCertificateValidator(), + new HttpContextAccessor(), + options, + NullLogger.Instance); + + var transaction = CreateTokenTransaction(clientDocument.ClientId, secret: null, scope: "jobs:trigger"); + transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(30); + + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await validateHandler.HandleAsync(validateContext); + Assert.False(validateContext.IsRejected); + + var handler = new HandleClientCredentialsHandler( + registry, + tokenStore, + sessionAccessor, + metadataAccessor, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, TimeProvider.System, TestActivitySource, NullLogger.Instance); + + var context = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); + + await handler.HandleAsync(context); + + Assert.True(context.IsRequestHandled); + Assert.NotNull(context.Principal); + Assert.Contains("signer", context.Principal!.GetAudiences()); + + Assert.Contains(authSink.Events, record => record.EventType == "authority.client_credentials.grant" && record.Outcome == AuthEventOutcome.Success); + + var identityProviderClaim = context.Principal?.GetClaim(StellaOpsClaimTypes.IdentityProvider); + Assert.Equal(clientDocument.Plugin, identityProviderClaim); + + var principal = context.Principal ?? throw new InvalidOperationException("Principal missing"); + Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); + var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId); + Assert.False(string.IsNullOrWhiteSpace(tokenId)); + + var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) + { + Principal = principal, + AccessTokenPrincipal = principal + }; + + await persistHandler.HandleAsync(signInContext); + + var persisted = Assert.IsType(tokenStore.Inserted); + Assert.Equal(tokenId, persisted.TokenId); + Assert.Equal(clientDocument.ClientId, persisted.ClientId); + Assert.Equal("valid", persisted.Status); + Assert.Equal("tenant-alpha", persisted.Tenant); + Assert.Equal(new[] { "jobs:trigger" }, persisted.Scope); + } +} + +public class TokenValidationHandlersTests +{ + private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests.TokenValidation"); + + [Fact] + public async Task ValidateAccessTokenHandler_Rejects_WhenTokenRevoked() + { + var tokenStore = new TestTokenStore(); + tokenStore.Inserted = new AuthorityTokenDocument + { + TokenId = "token-1", + Status = "revoked", + ClientId = "concelier" + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(CreateClient()), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(CreateClient())), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal("concelier", "token-1", "standard"); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-1" + }; + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); + } + + [Fact] + public async Task ValidateAccessTokenHandler_AddsTenantClaim_FromTokenDocument() + { + var clientDocument = CreateClient(tenant: "tenant-alpha"); + var tokenStore = new TestTokenStore + { + Inserted = new AuthorityTokenDocument + { + TokenId = "token-tenant", + Status = "valid", + ClientId = clientDocument.ClientId, + Tenant = "tenant-alpha" + } + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-tenant" + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); + Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, principal.FindFirstValue(StellaOpsClaimTypes.Project)); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, principal.FindFirstValue(StellaOpsClaimTypes.Project)); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); + } + + [Fact] + public async Task ValidateAccessTokenHandler_Rejects_WhenTenantDiffersFromToken() + { + var clientDocument = CreateClient(tenant: "tenant-alpha"); + var tokenStore = new TestTokenStore + { + Inserted = new AuthorityTokenDocument + { + TokenId = "token-tenant", + Status = "valid", + ClientId = clientDocument.ClientId, + Tenant = "tenant-alpha" + } + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); + principal.Identities.First().AddClaim(new Claim(StellaOpsClaimTypes.Tenant, "tenant-beta")); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-tenant" + }; + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); + Assert.Equal("The token tenant does not match the issued tenant.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateAccessTokenHandler_AssignsTenant_FromClientWhenTokenMissing() + { + var clientDocument = CreateClient(tenant: "tenant-alpha"); + var tokenStore = new TestTokenStore + { + Inserted = new AuthorityTokenDocument + { + TokenId = "token-tenant", + Status = "valid", + ClientId = clientDocument.ClientId + } + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-tenant" + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + Assert.Equal("tenant-alpha", principal.FindFirstValue(StellaOpsClaimTypes.Tenant)); + Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); + } + + [Fact] + public async Task ValidateAccessTokenHandler_Rejects_WhenClientTenantDiffers() + { + var clientDocument = CreateClient(tenant: "tenant-beta"); + var tokenStore = new TestTokenStore + { + Inserted = new AuthorityTokenDocument + { + TokenId = "token-tenant", + Status = "valid", + ClientId = clientDocument.ClientId + } + }; + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + CreateRegistry(withClientProvisioning: true, clientDescriptor: CreateDescriptor(clientDocument)), + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-tenant", clientDocument.Plugin); + principal.Identities.First().AddClaim(new Claim(StellaOpsClaimTypes.Tenant, "tenant-alpha")); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-tenant" + }; + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); + Assert.Equal("The token tenant does not match the registered client tenant.", context.ErrorDescription); + } + + [Fact] + public async Task ValidateAccessTokenHandler_EnrichesClaims_WhenProviderAvailable() + { + var clientDocument = CreateClient(); + var userDescriptor = new AuthorityUserDescriptor("user-1", "alice", displayName: "Alice", requiresPasswordReset: false); + + var plugin = CreatePlugin( + name: "standard", + supportsClientProvisioning: true, + descriptor: CreateDescriptor(clientDocument), + user: userDescriptor); + + var registry = CreateRegistryFromPlugins(plugin); + + var metadataAccessorSuccess = new TestRateLimiterMetadataAccessor(); + var auditSinkSuccess = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + new TestTokenStore(), + sessionAccessor, + new TestClientStore(clientDocument), + registry, + metadataAccessorSuccess, + auditSinkSuccess, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Token, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, "token-123", plugin.Name, subject: userDescriptor.SubjectId); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + Assert.Contains(principal.Claims, claim => claim.Type == "enriched" && claim.Value == "true"); + } + + [Fact] + public async Task ValidateAccessTokenHandler_AddsConfirmationClaim_ForMtlsToken() + { + var tokenDocument = new AuthorityTokenDocument + { + TokenId = "token-mtls", + Status = "valid", + ClientId = "mtls-client", + SenderConstraint = AuthoritySenderConstraintKinds.Mtls, + SenderKeyThumbprint = "thumb-print" + }; + + var tokenStore = new TestTokenStore + { + Inserted = tokenDocument + }; + + var clientDocument = CreateClient(); + var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + var sessionAccessor = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + new TestClientStore(clientDocument), + registry, + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Introspection, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal(clientDocument.ClientId, tokenDocument.TokenId, clientDocument.Plugin); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = tokenDocument.TokenId + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + var confirmation = context.Principal?.GetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType); + Assert.False(string.IsNullOrWhiteSpace(confirmation)); + using var json = JsonDocument.Parse(confirmation!); + Assert.Equal(tokenDocument.SenderKeyThumbprint, json.RootElement.GetProperty("x5t#S256").GetString()); + } + + [Fact] + public async Task ValidateAccessTokenHandler_EmitsReplayAudit_WhenStoreDetectsSuspectedReplay() + { + var tokenStore = new TestTokenStore(); + tokenStore.Inserted = new AuthorityTokenDocument + { + TokenId = "token-replay", + Status = "valid", + ClientId = "agent", + Devices = new List + { + new BsonDocument + { + { "remoteAddress", "10.0.0.1" }, + { "userAgent", "agent/1.0" }, + { "firstSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-15)) }, + { "lastSeen", BsonDateTime.Create(DateTimeOffset.UtcNow.AddMinutes(-5)) }, + { "useCount", 2 } + } + } + }; + + tokenStore.UsageCallback = (remote, agent) => new TokenUsageUpdateResult(TokenUsageUpdateStatus.SuspectedReplay, remote, agent); + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var metadata = metadataAccessor.GetMetadata(); + if (metadata is not null) + { + metadata.RemoteIp = "203.0.113.7"; + metadata.UserAgent = "agent/2.0"; + } + + var clientDocument = CreateClient(); + clientDocument.ClientId = "agent"; + var auditSink = new TestAuthEventSink(); + var registry = CreateRegistry(withClientProvisioning: false, clientDescriptor: null); + var sessionAccessorReplay = new NullMongoSessionAccessor(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessorReplay, + new TestClientStore(clientDocument), + registry, + metadataAccessor, + auditSink, + TimeProvider.System, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + Options = new OpenIddictServerOptions(), + EndpointType = OpenIddictServerEndpointType.Introspection, + Request = new OpenIddictRequest() + }; + + var principal = CreatePrincipal("agent", "token-replay", "standard"); + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = "token-replay" + }; + + await handler.HandleAsync(context); + + Assert.False(context.IsRejected); + var replayEvent = Assert.Single(auditSink.Events, record => record.EventType == "authority.token.replay.suspected"); + Assert.Equal(AuthEventOutcome.Error, replayEvent.Outcome); + Assert.NotNull(replayEvent.Network); + Assert.Equal("203.0.113.7", replayEvent.Network?.RemoteAddress.Value); + Assert.Contains(replayEvent.Properties, property => property.Name == "token.devices.total"); + } +} + +public class AuthorityClientCertificateValidatorTests +{ + [Fact] + public async Task ValidateAsync_Rejects_WhenSanTypeNotAllowed() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Security.SenderConstraints.Mtls.AllowedSanTypes.Clear(); + options.Security.SenderConstraints.Mtls.AllowedSanTypes.Add("uri"); + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + var sanBuilder = new SubjectAlternativeNameBuilder(); + sanBuilder.AddDnsName("client.mtls.test"); + request.CertificateExtensions.Add(sanBuilder.Build()); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); + + var clientDocument = CreateClient(); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)) + }); + + var httpContext = new DefaultHttpContext(); + httpContext.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); + + Assert.False(result.Succeeded); + Assert.Equal("certificate_san_type", result.Error); + } + + [Fact] + public async Task ValidateAsync_AllowsBindingWithinRotationGrace() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Security.SenderConstraints.Mtls.RotationGrace = TimeSpan.FromMinutes(5); + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + var sanBuilder = new SubjectAlternativeNameBuilder(); + sanBuilder.AddDnsName("client.mtls.test"); + request.CertificateExtensions.Add(sanBuilder.Build()); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(10)); + + var thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)); + + var clientDocument = CreateClient(); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = thumbprint, + NotBefore = TimeProvider.System.GetUtcNow().AddMinutes(2) + }); + + var httpContext = new DefaultHttpContext(); + httpContext.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); + + Assert.True(result.Succeeded); + Assert.Equal(thumbprint, result.HexThumbprint); + } + + [Fact] + public async Task ValidateAsync_Rejects_WhenBindingSubjectMismatch() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + var sanBuilder = new SubjectAlternativeNameBuilder(); + sanBuilder.AddDnsName("client.mtls.test"); + request.CertificateExtensions.Add(sanBuilder.Build()); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); + + var clientDocument = CreateClient(); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)), + Subject = "CN=different-client" + }); + + var httpContext = new DefaultHttpContext(); + httpContext.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); + + Assert.False(result.Succeeded); + Assert.Equal("certificate_binding_subject_mismatch", result.Error); + } + + [Fact] + public async Task ValidateAsync_Rejects_WhenBindingSansMissing() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Security.SenderConstraints.Mtls.Enabled = true; + options.Security.SenderConstraints.Mtls.RequireChainValidation = false; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=mtls-client", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + var sanBuilder = new SubjectAlternativeNameBuilder(); + sanBuilder.AddDnsName("client.mtls.test"); + request.CertificateExtensions.Add(sanBuilder.Build()); + using var certificate = request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddMinutes(5)); + + var clientDocument = CreateClient(); + clientDocument.SenderConstraint = AuthoritySenderConstraintKinds.Mtls; + clientDocument.CertificateBindings.Add(new AuthorityClientCertificateBinding + { + Thumbprint = Convert.ToHexString(certificate.GetCertHash(HashAlgorithmName.SHA256)), + SubjectAlternativeNames = new List { "spiffe://client" } + }); + + var httpContext = new DefaultHttpContext(); + httpContext.Connection.ClientCertificate = certificate; + + var validator = new AuthorityClientCertificateValidator(options, TimeProvider.System, NullLogger.Instance); + var result = await validator.ValidateAsync(httpContext, clientDocument, CancellationToken.None); + + Assert.False(result.Succeeded); + Assert.Equal("certificate_binding_san_mismatch", result.Error); + } +} + +internal sealed class TestClientStore : IAuthorityClientStore +{ + private readonly Dictionary clients = new(StringComparer.OrdinalIgnoreCase); + + public TestClientStore(params AuthorityClientDocument[] documents) + { + foreach (var document in documents) + { + clients[document.ClientId] = document; + } + } + + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + clients.TryGetValue(clientId, out var document); + return ValueTask.FromResult(document); + } + + public ValueTask UpsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + clients[document.ClientId] = document; + return ValueTask.CompletedTask; + } + + public ValueTask DeleteByClientIdAsync(string clientId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(clients.Remove(clientId)); +} + +internal sealed class TestTokenStore : IAuthorityTokenStore +{ + public AuthorityTokenDocument? Inserted { get; set; } + + public Func? UsageCallback { get; set; } + + public ValueTask InsertAsync(AuthorityTokenDocument document, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + Inserted = document; + return ValueTask.CompletedTask; + } + + public ValueTask FindByTokenIdAsync(string tokenId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(Inserted is not null && string.Equals(Inserted.TokenId, tokenId, StringComparison.OrdinalIgnoreCase) ? Inserted : null); + + public ValueTask FindByReferenceIdAsync(string referenceId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(null); + + public ValueTask UpdateStatusAsync(string tokenId, string status, DateTimeOffset? revokedAt, string? reason, string? reasonDescription, IReadOnlyDictionary? metadata, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.CompletedTask; + + public ValueTask DeleteExpiredAsync(DateTimeOffset threshold, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(0L); + + public ValueTask RecordUsageAsync(string tokenId, string? remoteAddress, string? userAgent, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(UsageCallback?.Invoke(remoteAddress, userAgent) ?? new TokenUsageUpdateResult(TokenUsageUpdateStatus.Recorded, remoteAddress, userAgent)); + + public ValueTask> ListRevokedAsync(DateTimeOffset? issuedAfter, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult>(Array.Empty()); +} + +internal sealed class TestClaimsEnricher : IClaimsEnricher +{ + public ValueTask EnrichAsync(ClaimsIdentity identity, AuthorityClaimsEnrichmentContext context, CancellationToken cancellationToken) + { + if (!identity.HasClaim(c => c.Type == "enriched")) + { + identity.AddClaim(new Claim("enriched", "true")); + } + + return ValueTask.CompletedTask; + } +} + +internal sealed class TestUserCredentialStore : IUserCredentialStore +{ + private readonly AuthorityUserDescriptor? user; + + public TestUserCredentialStore(AuthorityUserDescriptor? user) + { + this.user = user; + } + + public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials)); + + public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginOperationResult.Failure("unsupported", "not implemented")); + + public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) + => ValueTask.FromResult(user); +} + +internal sealed class TestClientProvisioningStore : IClientProvisioningStore +{ + private readonly AuthorityClientDescriptor? descriptor; + + public TestClientProvisioningStore(AuthorityClientDescriptor? descriptor) + { + this.descriptor = descriptor; + } + + public ValueTask> CreateOrUpdateAsync(AuthorityClientRegistration registration, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginOperationResult.Failure("unsupported", "not implemented")); + + public ValueTask FindByClientIdAsync(string clientId, CancellationToken cancellationToken) + => ValueTask.FromResult(descriptor); + + public ValueTask DeleteAsync(string clientId, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginOperationResult.Success()); +} + +internal sealed class TestIdentityProviderPlugin : IIdentityProviderPlugin +{ + public TestIdentityProviderPlugin( + AuthorityPluginContext context, + IUserCredentialStore credentialStore, + IClaimsEnricher claimsEnricher, + IClientProvisioningStore? clientProvisioning, + AuthorityIdentityProviderCapabilities capabilities) + { + Context = context; + Credentials = credentialStore; + ClaimsEnricher = claimsEnricher; + ClientProvisioning = clientProvisioning; + Capabilities = capabilities; + } + + public string Name => Context.Manifest.Name; + + public string Type => Context.Manifest.Type; + + public AuthorityPluginContext Context { get; } + + public IUserCredentialStore Credentials { get; } + + public IClaimsEnricher ClaimsEnricher { get; } + + public IClientProvisioningStore? ClientProvisioning { get; } + + public AuthorityIdentityProviderCapabilities Capabilities { get; } + + public ValueTask CheckHealthAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); +} + +internal sealed class TestAuthEventSink : IAuthEventSink +{ + public List Events { get; } = new(); + + public ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + Events.Add(record); + return ValueTask.CompletedTask; + } +} + +internal sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor +{ + private readonly AuthorityRateLimiterMetadata metadata = new(); + + public AuthorityRateLimiterMetadata? GetMetadata() => metadata; + + public void SetClientId(string? clientId) => metadata.ClientId = clientId; + + public void SetSubjectId(string? subjectId) => metadata.SubjectId = subjectId; + + public void SetTenant(string? tenant) + { + metadata.Tenant = string.IsNullOrWhiteSpace(tenant) ? null : tenant.Trim().ToLowerInvariant(); + metadata.SetTag("authority.tenant", metadata.Tenant); + } + + public void SetProject(string? project) + { + metadata.Project = string.IsNullOrWhiteSpace(project) ? null : project.Trim().ToLowerInvariant(); + metadata.SetTag("authority.project", metadata.Project); + } + + public void SetTag(string name, string? value) => metadata.SetTag(name, value); +} + +internal sealed class NoopCertificateValidator : IAuthorityClientCertificateValidator +{ + public ValueTask ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken) + { + var binding = new AuthorityClientCertificateBinding + { + Thumbprint = "stub" + }; + + return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success("stub", "stub", binding)); + } +} + +internal sealed class RecordingCertificateValidator : IAuthorityClientCertificateValidator +{ + public bool Invoked { get; private set; } + + public ValueTask ValidateAsync(HttpContext httpContext, AuthorityClientDocument client, CancellationToken cancellationToken) + { + Invoked = true; + + if (httpContext.Connection.ClientCertificate is null) + { + return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Failure("client_certificate_required")); + } + + AuthorityClientCertificateBinding binding; + if (client.CertificateBindings.Count > 0) + { + binding = client.CertificateBindings[0]; + } + else + { + binding = new AuthorityClientCertificateBinding { Thumbprint = "stub" }; + } + + return ValueTask.FromResult(AuthorityClientCertificateValidationResult.Success("stub", binding.Thumbprint, binding)); + } +} + +internal sealed class NullMongoSessionAccessor : IAuthorityMongoSessionAccessor +{ + public ValueTask GetSessionAsync(CancellationToken cancellationToken = default) + => ValueTask.FromResult(null!); + + public ValueTask DisposeAsync() => ValueTask.CompletedTask; +} + +internal static class TestHelpers +{ + public static StellaOpsAuthorityOptions CreateAuthorityOptions(Action? configure = null) + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost/test"; + + configure?.Invoke(options); + return options; + } + + public static AuthorityClientDocument CreateClient( + string clientId = "concelier", + string? secret = "s3cr3t!", + string clientType = "confidential", + string allowedGrantTypes = "client_credentials", + string allowedScopes = "jobs:read", + string allowedAudiences = "", + string? tenant = null) + { + var document = new AuthorityClientDocument + { + ClientId = clientId, + ClientType = clientType, + SecretHash = secret is null ? null : AuthoritySecretHasher.ComputeHash(secret), + Plugin = "standard", + Properties = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + [AuthorityClientMetadataKeys.AllowedGrantTypes] = allowedGrantTypes, + [AuthorityClientMetadataKeys.AllowedScopes] = allowedScopes + } + }; + + if (!string.IsNullOrWhiteSpace(allowedAudiences)) + { + document.Properties[AuthorityClientMetadataKeys.Audiences] = allowedAudiences; + } + + var normalizedTenant = NormalizeTenant(tenant); + if (normalizedTenant is not null) + { + document.Properties[AuthorityClientMetadataKeys.Tenant] = normalizedTenant; + } + + return document; + } + + private static string? NormalizeTenant(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); + + public static AuthorityClientDescriptor CreateDescriptor(AuthorityClientDocument document) + { + var allowedGrantTypes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedGrantTypes, out var grants) ? grants?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); + var allowedScopes = document.Properties.TryGetValue(AuthorityClientMetadataKeys.AllowedScopes, out var scopes) ? scopes?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); + var allowedAudiences = document.Properties.TryGetValue(AuthorityClientMetadataKeys.Audiences, out var audiences) ? audiences?.Split(' ', StringSplitOptions.RemoveEmptyEntries) : Array.Empty(); + + return new AuthorityClientDescriptor( + document.ClientId, + document.DisplayName, + confidential: string.Equals(document.ClientType, "confidential", StringComparison.OrdinalIgnoreCase), + allowedGrantTypes, + allowedScopes, + allowedAudiences, + redirectUris: Array.Empty(), + postLogoutRedirectUris: Array.Empty(), + properties: document.Properties); + } + + public static AuthorityIdentityProviderRegistry CreateRegistry(bool withClientProvisioning, AuthorityClientDescriptor? clientDescriptor) + { + var plugin = CreatePlugin( + name: "standard", + supportsClientProvisioning: withClientProvisioning, + descriptor: clientDescriptor, + user: null); + + return CreateRegistryFromPlugins(plugin); + } + + public static TestIdentityProviderPlugin CreatePlugin( + string name, + bool supportsClientProvisioning, + AuthorityClientDescriptor? descriptor, + AuthorityUserDescriptor? user) + { + var capabilities = supportsClientProvisioning + ? new[] { AuthorityPluginCapabilities.ClientProvisioning } + : Array.Empty(); + + var manifest = new AuthorityPluginManifest( + name, + "standard", + true, + null, + null, + capabilities, + new Dictionary(StringComparer.OrdinalIgnoreCase), + $"{name}.yaml"); + + var context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); + + return new TestIdentityProviderPlugin( + context, + new TestUserCredentialStore(user), + new TestClaimsEnricher(), + supportsClientProvisioning ? new TestClientProvisioningStore(descriptor) : null, + new AuthorityIdentityProviderCapabilities( + SupportsPassword: true, + SupportsMfa: false, + SupportsClientProvisioning: supportsClientProvisioning)); + } + + public static AuthorityIdentityProviderRegistry CreateRegistryFromPlugins(params IIdentityProviderPlugin[] plugins) + { + var services = new ServiceCollection(); + services.AddLogging(); + foreach (var plugin in plugins) + { + services.AddSingleton(plugin); + } + + var provider = services.BuildServiceProvider(); + return new AuthorityIdentityProviderRegistry(provider, NullLogger.Instance); + } + + public static OpenIddictServerTransaction CreateTokenTransaction(string clientId, string? secret, string? scope) + { + var request = new OpenIddictRequest + { + GrantType = OpenIddictConstants.GrantTypes.ClientCredentials, + ClientId = clientId, + ClientSecret = secret + }; + + if (!string.IsNullOrWhiteSpace(scope)) + { + request.Scope = scope; + } + + return new OpenIddictServerTransaction + { + EndpointType = OpenIddictServerEndpointType.Token, + Options = new OpenIddictServerOptions(), + Request = request + }; + } + + public static string ConvertThumbprintToString(object thumbprint) + => thumbprint switch + { + string value => value, + byte[] bytes => Base64UrlEncoder.Encode(bytes), + _ => throw new InvalidOperationException("Unsupported thumbprint representation.") + }; + + public static string CreateDpopProof(ECDsaSecurityKey key, string method, string url, long issuedAt, string? nonce = null) + { + var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(key); + jwk.KeyId ??= key.KeyId ?? Guid.NewGuid().ToString("N"); + + var signingCredentials = new SigningCredentials(key, SecurityAlgorithms.EcdsaSha256); + var header = new JwtHeader(signingCredentials) + { + ["typ"] = "dpop+jwt", + ["jwk"] = new Dictionary + { + ["kty"] = jwk.Kty, + ["crv"] = jwk.Crv, + ["x"] = jwk.X, + ["y"] = jwk.Y, + ["kid"] = jwk.Kid ?? jwk.KeyId + } + }; + + var payload = new JwtPayload + { + ["htm"] = method.ToUpperInvariant(), + ["htu"] = url, + ["iat"] = issuedAt, + ["jti"] = Guid.NewGuid().ToString("N") + }; + + if (!string.IsNullOrWhiteSpace(nonce)) + { + payload["nonce"] = nonce; + } + + var token = new JwtSecurityToken(header, payload); + return new JwtSecurityTokenHandler().WriteToken(token); + } + + public static X509Certificate2 CreateTestCertificate(string subjectName) + { + using var rsa = RSA.Create(2048); + var request = new CertificateRequest(subjectName, rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + return request.CreateSelfSigned(DateTimeOffset.UtcNow.AddMinutes(-5), DateTimeOffset.UtcNow.AddHours(1)); + } + + public static ClaimsPrincipal CreatePrincipal(string clientId, string tokenId, string provider, string? subject = null) + { + var identity = new ClaimsIdentity(OpenIddictServerAspNetCoreDefaults.AuthenticationScheme); + identity.AddClaim(new Claim(OpenIddictConstants.Claims.ClientId, clientId)); + identity.AddClaim(new Claim(OpenIddictConstants.Claims.JwtId, tokenId)); + identity.AddClaim(new Claim(StellaOpsClaimTypes.IdentityProvider, provider)); + identity.AddClaim(new Claim(StellaOpsClaimTypes.Project, StellaOpsTenancyDefaults.AnyProject)); + + if (!string.IsNullOrWhiteSpace(subject)) + { + identity.AddClaim(new Claim(OpenIddictConstants.Claims.Subject, subject)); + } + + return new ClaimsPrincipal(identity); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs similarity index 98% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs index 88bcc42f..79e8c509 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/PasswordGrantHandlersTests.cs @@ -1,516 +1,516 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Security.Claims; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using MongoDB.Driver; -using OpenIddict.Abstractions; -using OpenIddict.Server; -using OpenIddict.Server.AspNetCore; -using StellaOps.Authority.OpenIddict; -using StellaOps.Authority.OpenIddict.Handlers; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.RateLimiting; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Cryptography.Audit; -using StellaOps.Configuration; -using StellaOps.Auth.Abstractions; -using Xunit; - -namespace StellaOps.Authority.Tests.OpenIddict; - -public class PasswordGrantHandlersTests -{ - private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests"); - - [Fact] - public async Task HandlePasswordGrant_EmitsSuccessAuditEvent() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument()); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!"); - - await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); - - var successEvent = Assert.Single(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); - Assert.Equal("tenant-alpha", successEvent.Tenant.Value); - - var metadata = metadataAccessor.GetMetadata(); - Assert.Equal("tenant-alpha", metadata?.Tenant); - } - - [Fact] - public async Task HandlePasswordGrant_EmitsFailureAuditEvent() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new FailureCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument()); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "BadPassword!"); - - await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); - - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task ValidatePasswordGrant_RejectsAdvisoryReadWithoutAocVerify() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument("advisory:read aoc:verify")); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "advisory:read"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'aoc:verify' is required when requesting advisory/vex read scopes.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task ValidatePasswordGrant_RejectsSignalsScopeWithoutAocVerify() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument("signals:write signals:read signals:admin aoc:verify")); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "signals:write"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Scope 'aoc:verify' is required when requesting signals scopes.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task ValidatePasswordGrant_RejectsPolicyAuthorWithoutTenant() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientDocument = CreateClientDocument("policy:author"); - clientDocument.Properties.Remove(AuthorityClientMetadataKeys.Tenant); - var clientStore = new StubClientStore(clientDocument); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "policy:author"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); - Assert.Equal("Policy Studio scopes require a tenant assignment.", context.ErrorDescription); - Assert.Equal(StellaOpsScopes.PolicyAuthor, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task ValidatePasswordGrant_AllowsPolicyAuthor() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument("policy:author")); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "policy:author"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); - } - - [Fact] - public async Task HandlePasswordGrant_EmitsLockoutAuditEvent() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new LockoutCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument()); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Locked!"); - - await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); - - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.LockedOut); - } - - [Fact] - public async Task ValidatePasswordGrant_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore()); - var clientStore = new StubClientStore(CreateClientDocument()); - var authorityOptions = CreateAuthorityOptions(); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!"); - transaction.Request?.SetParameter("unexpected_param", "value"); - - await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); - - var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); - Assert.Equal(AuthEventOutcome.Failure, tamperEvent.Outcome); - Assert.Contains(tamperEvent.Properties, property => - string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && - string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); - } - - [Fact] - public async Task ValidatePasswordGrant_RejectsExceptionsApprove_WhenMfaRequiredAndProviderLacksSupport() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore(), supportsMfa: false); - var clientStore = new StubClientStore(CreateClientDocument("exceptions:approve")); - var authorityOptions = CreateAuthorityOptions(opts => - { - opts.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions - { - Id = "secops", - AuthorityRouteId = "approvals/secops", - RequireMfa = true - }); - }); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "exceptions:approve"); - var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - - await validate.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); - Assert.Equal("Exception approval scope requires an MFA-capable identity provider.", context.ErrorDescription); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); - } - - [Fact] - public async Task HandlePasswordGrant_AllowsExceptionsApprove_WhenMfaSupported() - { - var sink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var registry = CreateRegistry(new SuccessCredentialStore(), supportsMfa: true); - var clientStore = new StubClientStore(CreateClientDocument("exceptions:approve")); - var authorityOptions = CreateAuthorityOptions(opts => - { - opts.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions - { - Id = "secops", - AuthorityRouteId = "approvals/secops", - RequireMfa = true - }); - }); - var optionsAccessor = Options.Create(authorityOptions); - var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); - var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); - - var transaction = CreatePasswordTransaction("alice", "Password1!", "exceptions:approve"); - var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - await validate.HandleAsync(validateContext); - Assert.False(validateContext.IsRejected); - - var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); - await handle.HandleAsync(handleContext); - - Assert.False(handleContext.IsRejected); - Assert.NotNull(handleContext.Principal); - Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); - } - - private static AuthorityIdentityProviderRegistry CreateRegistry(IUserCredentialStore store, bool supportsMfa = false) - { - var plugin = new StubIdentityProviderPlugin("stub", store, supportsMfa); - - var services = new ServiceCollection(); - services.AddLogging(); - services.AddSingleton(plugin); - var provider = services.BuildServiceProvider(); - - return new AuthorityIdentityProviderRegistry(provider, NullLogger.Instance); - } - - private static OpenIddictServerTransaction CreatePasswordTransaction(string username, string password, string scope = "jobs:trigger") - { - var request = new OpenIddictRequest - { - GrantType = OpenIddictConstants.GrantTypes.Password, - Username = username, - Password = password, - ClientId = "cli-app", - Scope = scope - }; - - return new OpenIddictServerTransaction - { - EndpointType = OpenIddictServerEndpointType.Token, - Options = new OpenIddictServerOptions(), - Request = request - }; - } - - private static StellaOpsAuthorityOptions CreateAuthorityOptions(Action? configure = null) - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test") - }; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; - - configure?.Invoke(options); - return options; - } - - private static AuthorityClientDocument CreateClientDocument(string allowedScopes = "jobs:trigger") - { - var document = new AuthorityClientDocument - { - ClientId = "cli-app", - ClientType = "public" - }; - - document.Properties[AuthorityClientMetadataKeys.AllowedGrantTypes] = "password"; - document.Properties[AuthorityClientMetadataKeys.AllowedScopes] = allowedScopes; - document.Properties[AuthorityClientMetadataKeys.Tenant] = "tenant-alpha"; - - return document; - } - - private sealed class StubIdentityProviderPlugin : IIdentityProviderPlugin - { - public StubIdentityProviderPlugin(string name, IUserCredentialStore store, bool supportsMfa) - { - Name = name; - Type = "stub"; - var capabilities = supportsMfa - ? new[] { AuthorityPluginCapabilities.Password, AuthorityPluginCapabilities.Mfa } - : new[] { AuthorityPluginCapabilities.Password }; - var manifest = new AuthorityPluginManifest( - Name: name, - Type: "stub", - Enabled: true, - AssemblyName: null, - AssemblyPath: null, - Capabilities: capabilities, - Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), - ConfigPath: $"{name}.yaml"); - Context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); - Credentials = store; - ClaimsEnricher = new NoopClaimsEnricher(); - Capabilities = new AuthorityIdentityProviderCapabilities(SupportsPassword: true, SupportsMfa: supportsMfa, SupportsClientProvisioning: false); - } - - public string Name { get; } - public string Type { get; } - public AuthorityPluginContext Context { get; } - public IUserCredentialStore Credentials { get; } - public IClaimsEnricher ClaimsEnricher { get; } - public IClientProvisioningStore? ClientProvisioning => null; - public AuthorityIdentityProviderCapabilities Capabilities { get; } - - public ValueTask CheckHealthAsync(CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); - } - - private sealed class NoopClaimsEnricher : IClaimsEnricher - { - public ValueTask EnrichAsync(ClaimsIdentity identity, AuthorityClaimsEnrichmentContext context, CancellationToken cancellationToken) - => ValueTask.CompletedTask; - } - - private sealed class SuccessCredentialStore : IUserCredentialStore - { - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - { - var descriptor = new AuthorityUserDescriptor("subject", username, "User", requiresPasswordReset: false); - return ValueTask.FromResult(AuthorityCredentialVerificationResult.Success(descriptor)); - } - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(null); - } - - private sealed class FailureCredentialStore : IUserCredentialStore - { - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - => ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials, "Invalid username or password.")); - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(null); - } - - private sealed class LockoutCredentialStore : IUserCredentialStore - { - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - { - var retry = TimeSpan.FromMinutes(5); - var properties = new[] - { - new AuthEventProperty - { - Name = "plugin.lockout_until", - Value = ClassifiedString.Public(timeProvider.GetUtcNow().Add(retry).ToString("O", CultureInfo.InvariantCulture)) - } - }; - - return ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure( - AuthorityCredentialFailureCode.LockedOut, - "Account locked.", - retry, - properties)); - } - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(null); - } - - private sealed class StubClientStore : IAuthorityClientStore - { - private readonly AuthorityClientDocument document; - - public StubClientStore(AuthorityClientDocument document) - { - this.document = document; - } - - public Task> ListAsync(CancellationToken cancellationToken) - => Task.FromResult>(new[] { document }); - - public Task FindAsync(string id, CancellationToken cancellationToken) - => Task.FromResult(id == document.Id ? document : null); - - public Task FindByClientIdAsync(string clientId, CancellationToken cancellationToken) - => Task.FromResult(string.Equals(clientId, document.ClientId, StringComparison.Ordinal) ? document : null); - - public Task InsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public Task UpdateAsync(string id, UpdateDefinition update, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public Task DeleteAsync(string id, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public Task ExistsAsync(string id, CancellationToken cancellationToken) - => throw new NotImplementedException(); - } - - private sealed class TestAuthEventSink : IAuthEventSink - { - public List Events { get; } = new(); - - public Task WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) - { - Events.Add(record); - return Task.CompletedTask; - } - } - - private sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor - { - private AuthorityRateLimiterMetadata? metadata; - - public AuthorityRateLimiterMetadata? GetMetadata() => metadata; - - public void SetClientId(string? clientId) - { - metadata ??= new AuthorityRateLimiterMetadata(); - metadata.ClientId = clientId; - } - - public void SetTenant(string? tenant) - { - metadata ??= new AuthorityRateLimiterMetadata(); - metadata.Tenant = tenant; - } - - public void SetProject(string? project) - { - metadata ??= new AuthorityRateLimiterMetadata(); - metadata.Project = project; - } - - public void Clear() - { - metadata = null; - } - } - - private sealed class SuccessCredentialStore : IUserCredentialStore - { - public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) - { - var descriptor = new AuthorityUserDescriptor("subject", username, "User", requiresPasswordReset: false); - return ValueTask.FromResult(AuthorityCredentialVerificationResult.Success(descriptor)); - } - - public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) - => ValueTask.FromResult(null); - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Security.Claims; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using OpenIddict.Abstractions; +using OpenIddict.Server; +using OpenIddict.Server.AspNetCore; +using StellaOps.Authority.OpenIddict; +using StellaOps.Authority.OpenIddict.Handlers; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.RateLimiting; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Cryptography.Audit; +using StellaOps.Configuration; +using StellaOps.Auth.Abstractions; +using Xunit; + +namespace StellaOps.Authority.Tests.OpenIddict; + +public class PasswordGrantHandlersTests +{ + private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests"); + + [Fact] + public async Task HandlePasswordGrant_EmitsSuccessAuditEvent() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument()); + var authorityOptions = CreateAuthorityOptions(); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!"); + + await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); + + var successEvent = Assert.Single(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); + Assert.Equal("tenant-alpha", successEvent.Tenant.Value); + + var metadata = metadataAccessor.GetMetadata(); + Assert.Equal("tenant-alpha", metadata?.Tenant); + } + + [Fact] + public async Task HandlePasswordGrant_EmitsFailureAuditEvent() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new FailureCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument()); + var authorityOptions = CreateAuthorityOptions(); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "BadPassword!"); + + await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); + + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsAdvisoryReadWithoutAocVerify() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument("advisory:read aoc:verify")); + var authorityOptions = CreateAuthorityOptions(); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "advisory:read"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'aoc:verify' is required when requesting advisory/vex read scopes.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsSignalsScopeWithoutAocVerify() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument("signals:write signals:read signals:admin aoc:verify")); + var authorityOptions = CreateAuthorityOptions(); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "signals:write"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Scope 'aoc:verify' is required when requesting signals scopes.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.AocVerify, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsPolicyAuthorWithoutTenant() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientDocument = CreateClientDocument("policy:author"); + clientDocument.Properties.Remove(AuthorityClientMetadataKeys.Tenant); + var clientStore = new StubClientStore(clientDocument); + var authorityOptions = CreateAuthorityOptions(); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "policy:author"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidClient, context.Error); + Assert.Equal("Policy Studio scopes require a tenant assignment.", context.ErrorDescription); + Assert.Equal(StellaOpsScopes.PolicyAuthor, context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty]); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task ValidatePasswordGrant_AllowsPolicyAuthor() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument("policy:author")); + var authorityOptions = CreateAuthorityOptions(); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "policy:author"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.False(context.IsRejected, $"Rejected: {context.Error} - {context.ErrorDescription}"); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); + } + + [Fact] + public async Task HandlePasswordGrant_EmitsLockoutAuditEvent() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new LockoutCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument()); + var authorityOptions = CreateAuthorityOptions(); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Locked!"); + + await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + await handle.HandleAsync(new OpenIddictServerEvents.HandleTokenRequestContext(transaction)); + + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.LockedOut); + } + + [Fact] + public async Task ValidatePasswordGrant_EmitsTamperAuditEvent_WhenUnexpectedParametersPresent() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore()); + var clientStore = new StubClientStore(CreateClientDocument()); + var authorityOptions = CreateAuthorityOptions(); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!"); + transaction.Request?.SetParameter("unexpected_param", "value"); + + await validate.HandleAsync(new OpenIddictServerEvents.ValidateTokenRequestContext(transaction)); + + var tamperEvent = Assert.Single(sink.Events, record => record.EventType == "authority.token.tamper"); + Assert.Equal(AuthEventOutcome.Failure, tamperEvent.Outcome); + Assert.Contains(tamperEvent.Properties, property => + string.Equals(property.Name, "request.unexpected_parameter", StringComparison.OrdinalIgnoreCase) && + string.Equals(property.Value.Value, "unexpected_param", StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public async Task ValidatePasswordGrant_RejectsExceptionsApprove_WhenMfaRequiredAndProviderLacksSupport() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore(), supportsMfa: false); + var clientStore = new StubClientStore(CreateClientDocument("exceptions:approve")); + var authorityOptions = CreateAuthorityOptions(opts => + { + opts.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions + { + Id = "secops", + AuthorityRouteId = "approvals/secops", + RequireMfa = true + }); + }); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "exceptions:approve"); + var context = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + + await validate.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidScope, context.Error); + Assert.Equal("Exception approval scope requires an MFA-capable identity provider.", context.ErrorDescription); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Failure); + } + + [Fact] + public async Task HandlePasswordGrant_AllowsExceptionsApprove_WhenMfaSupported() + { + var sink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var registry = CreateRegistry(new SuccessCredentialStore(), supportsMfa: true); + var clientStore = new StubClientStore(CreateClientDocument("exceptions:approve")); + var authorityOptions = CreateAuthorityOptions(opts => + { + opts.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions + { + Id = "secops", + AuthorityRouteId = "approvals/secops", + RequireMfa = true + }); + }); + var optionsAccessor = Options.Create(authorityOptions); + var validate = new ValidatePasswordGrantHandler(registry, TestActivitySource, sink, metadataAccessor, clientStore, TimeProvider.System, optionsAccessor, NullLogger.Instance); + var handle = new HandlePasswordGrantHandler(registry, clientStore, TestActivitySource, sink, metadataAccessor, TimeProvider.System, optionsAccessor, NullLogger.Instance); + + var transaction = CreatePasswordTransaction("alice", "Password1!", "exceptions:approve"); + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await validate.HandleAsync(validateContext); + Assert.False(validateContext.IsRejected); + + var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); + await handle.HandleAsync(handleContext); + + Assert.False(handleContext.IsRejected); + Assert.NotNull(handleContext.Principal); + Assert.Contains(sink.Events, record => record.EventType == "authority.password.grant" && record.Outcome == AuthEventOutcome.Success); + } + + private static AuthorityIdentityProviderRegistry CreateRegistry(IUserCredentialStore store, bool supportsMfa = false) + { + var plugin = new StubIdentityProviderPlugin("stub", store, supportsMfa); + + var services = new ServiceCollection(); + services.AddLogging(); + services.AddSingleton(plugin); + var provider = services.BuildServiceProvider(); + + return new AuthorityIdentityProviderRegistry(provider, NullLogger.Instance); + } + + private static OpenIddictServerTransaction CreatePasswordTransaction(string username, string password, string scope = "jobs:trigger") + { + var request = new OpenIddictRequest + { + GrantType = OpenIddictConstants.GrantTypes.Password, + Username = username, + Password = password, + ClientId = "cli-app", + Scope = scope + }; + + return new OpenIddictServerTransaction + { + EndpointType = OpenIddictServerEndpointType.Token, + Options = new OpenIddictServerOptions(), + Request = request + }; + } + + private static StellaOpsAuthorityOptions CreateAuthorityOptions(Action? configure = null) + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test") + }; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + + configure?.Invoke(options); + return options; + } + + private static AuthorityClientDocument CreateClientDocument(string allowedScopes = "jobs:trigger") + { + var document = new AuthorityClientDocument + { + ClientId = "cli-app", + ClientType = "public" + }; + + document.Properties[AuthorityClientMetadataKeys.AllowedGrantTypes] = "password"; + document.Properties[AuthorityClientMetadataKeys.AllowedScopes] = allowedScopes; + document.Properties[AuthorityClientMetadataKeys.Tenant] = "tenant-alpha"; + + return document; + } + + private sealed class StubIdentityProviderPlugin : IIdentityProviderPlugin + { + public StubIdentityProviderPlugin(string name, IUserCredentialStore store, bool supportsMfa) + { + Name = name; + Type = "stub"; + var capabilities = supportsMfa + ? new[] { AuthorityPluginCapabilities.Password, AuthorityPluginCapabilities.Mfa } + : new[] { AuthorityPluginCapabilities.Password }; + var manifest = new AuthorityPluginManifest( + Name: name, + Type: "stub", + Enabled: true, + AssemblyName: null, + AssemblyPath: null, + Capabilities: capabilities, + Metadata: new Dictionary(StringComparer.OrdinalIgnoreCase), + ConfigPath: $"{name}.yaml"); + Context = new AuthorityPluginContext(manifest, new ConfigurationBuilder().Build()); + Credentials = store; + ClaimsEnricher = new NoopClaimsEnricher(); + Capabilities = new AuthorityIdentityProviderCapabilities(SupportsPassword: true, SupportsMfa: supportsMfa, SupportsClientProvisioning: false); + } + + public string Name { get; } + public string Type { get; } + public AuthorityPluginContext Context { get; } + public IUserCredentialStore Credentials { get; } + public IClaimsEnricher ClaimsEnricher { get; } + public IClientProvisioningStore? ClientProvisioning => null; + public AuthorityIdentityProviderCapabilities Capabilities { get; } + + public ValueTask CheckHealthAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityPluginHealthResult.Healthy()); + } + + private sealed class NoopClaimsEnricher : IClaimsEnricher + { + public ValueTask EnrichAsync(ClaimsIdentity identity, AuthorityClaimsEnrichmentContext context, CancellationToken cancellationToken) + => ValueTask.CompletedTask; + } + + private sealed class SuccessCredentialStore : IUserCredentialStore + { + public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) + { + var descriptor = new AuthorityUserDescriptor("subject", username, "User", requiresPasswordReset: false); + return ValueTask.FromResult(AuthorityCredentialVerificationResult.Success(descriptor)); + } + + public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } + + private sealed class FailureCredentialStore : IUserCredentialStore + { + public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) + => ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure(AuthorityCredentialFailureCode.InvalidCredentials, "Invalid username or password.")); + + public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } + + private sealed class LockoutCredentialStore : IUserCredentialStore + { + public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) + { + var retry = TimeSpan.FromMinutes(5); + var properties = new[] + { + new AuthEventProperty + { + Name = "plugin.lockout_until", + Value = ClassifiedString.Public(timeProvider.GetUtcNow().Add(retry).ToString("O", CultureInfo.InvariantCulture)) + } + }; + + return ValueTask.FromResult(AuthorityCredentialVerificationResult.Failure( + AuthorityCredentialFailureCode.LockedOut, + "Account locked.", + retry, + properties)); + } + + public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } + + private sealed class StubClientStore : IAuthorityClientStore + { + private readonly AuthorityClientDocument document; + + public StubClientStore(AuthorityClientDocument document) + { + this.document = document; + } + + public Task> ListAsync(CancellationToken cancellationToken) + => Task.FromResult>(new[] { document }); + + public Task FindAsync(string id, CancellationToken cancellationToken) + => Task.FromResult(id == document.Id ? document : null); + + public Task FindByClientIdAsync(string clientId, CancellationToken cancellationToken) + => Task.FromResult(string.Equals(clientId, document.ClientId, StringComparison.Ordinal) ? document : null); + + public Task InsertAsync(AuthorityClientDocument document, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task UpdateAsync(string id, UpdateDefinition update, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task DeleteAsync(string id, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task ExistsAsync(string id, CancellationToken cancellationToken) + => throw new NotImplementedException(); + } + + private sealed class TestAuthEventSink : IAuthEventSink + { + public List Events { get; } = new(); + + public Task WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + Events.Add(record); + return Task.CompletedTask; + } + } + + private sealed class TestRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor + { + private AuthorityRateLimiterMetadata? metadata; + + public AuthorityRateLimiterMetadata? GetMetadata() => metadata; + + public void SetClientId(string? clientId) + { + metadata ??= new AuthorityRateLimiterMetadata(); + metadata.ClientId = clientId; + } + + public void SetTenant(string? tenant) + { + metadata ??= new AuthorityRateLimiterMetadata(); + metadata.Tenant = tenant; + } + + public void SetProject(string? project) + { + metadata ??= new AuthorityRateLimiterMetadata(); + metadata.Project = project; + } + + public void Clear() + { + metadata = null; + } + } + + private sealed class SuccessCredentialStore : IUserCredentialStore + { + public ValueTask VerifyPasswordAsync(string username, string password, CancellationToken cancellationToken) + { + var descriptor = new AuthorityUserDescriptor("subject", username, "User", requiresPasswordReset: false); + return ValueTask.FromResult(AuthorityCredentialVerificationResult.Success(descriptor)); + } + + public ValueTask> UpsertUserAsync(AuthorityUserRegistration registration, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public ValueTask FindBySubjectAsync(string subjectId, CancellationToken cancellationToken) + => ValueTask.FromResult(null); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs index c2af0209..8025b1f0 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/OpenIddict/TokenPersistenceIntegrationTests.cs @@ -1,396 +1,396 @@ -using System.Collections.Generic; -using System.Diagnostics; -using System.Security.Claims; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Time.Testing; -using MongoDB.Driver; -using MongoDB.Bson; -using OpenIddict.Abstractions; -using OpenIddict.Server; -using StellaOps.Authority; -using StellaOps.Authority.OpenIddict; -using StellaOps.Authority.OpenIddict.Handlers; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.Storage.Mongo; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Extensions; -using StellaOps.Authority.Storage.Mongo.Initialization; -using StellaOps.Authority.Storage.Mongo.Sessions; -using StellaOps.Auth.Abstractions; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Concelier.Testing; -using StellaOps.Authority.RateLimiting; -using StellaOps.Authority.Security; -using StellaOps.Cryptography.Audit; -using Xunit; - -namespace StellaOps.Authority.Tests.OpenIddict; - -[Collection("mongo-fixture")] -public sealed class TokenPersistenceIntegrationTests -{ - private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests.Persistence"); - private readonly MongoIntegrationFixture fixture; - - public TokenPersistenceIntegrationTests(MongoIntegrationFixture fixture) - => this.fixture = fixture ?? throw new ArgumentNullException(nameof(fixture)); - - [Fact] - public async Task HandleClientCredentials_PersistsTokenInMongo() - { - await ResetCollectionsAsync(); - - var issuedAt = new DateTimeOffset(2025, 10, 10, 12, 0, 0, TimeSpan.Zero); - var clock = new FakeTimeProvider(issuedAt); - - await using var provider = await BuildMongoProviderAsync(clock); - - var clientStore = provider.GetRequiredService(); - var tokenStore = provider.GetRequiredService(); - - var clientDocument = TestHelpers.CreateClient( - secret: "s3cr3t!", - allowedGrantTypes: "client_credentials", - allowedScopes: "jobs:trigger jobs:read", - tenant: "tenant-alpha"); - - await clientStore.UpsertAsync(clientDocument, CancellationToken.None); - - var registry = TestHelpers.CreateRegistry( - withClientProvisioning: true, - clientDescriptor: TestHelpers.CreateDescriptor(clientDocument)); - - var authSink = new TestAuthEventSink(); - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - await using var scope = provider.CreateAsyncScope(); - var sessionAccessor = scope.ServiceProvider.GetRequiredService(); - var options = TestHelpers.CreateAuthorityOptions(); - var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, authSink, metadataAccessor, clock, new NoopCertificateValidator(), new HttpContextAccessor(), options, NullLogger.Instance); - var handleHandler = new HandleClientCredentialsHandler(registry, tokenStore, sessionAccessor, metadataAccessor, clock, TestActivitySource, NullLogger.Instance); - var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, clock, TestActivitySource, NullLogger.Instance); - - var transaction = TestHelpers.CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:trigger"); - transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(15); - - var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); - await validateHandler.HandleAsync(validateContext); - Assert.False(validateContext.IsRejected); - - var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); - await handleHandler.HandleAsync(handleContext); - - Assert.True(handleContext.IsRequestHandled); - var principal = Assert.IsType(handleContext.Principal); - var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId); - Assert.False(string.IsNullOrWhiteSpace(tokenId)); - Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); - - var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) - { - Principal = principal, - AccessTokenPrincipal = principal - }; - - await persistHandler.HandleAsync(signInContext); - - var stored = await tokenStore.FindByTokenIdAsync(tokenId!, CancellationToken.None); - Assert.NotNull(stored); - Assert.Equal(clientDocument.ClientId, stored!.ClientId); - Assert.Equal(OpenIddictConstants.TokenTypeHints.AccessToken, stored.Type); - Assert.Equal("valid", stored.Status); - Assert.Equal(issuedAt, stored.CreatedAt); - Assert.Equal(issuedAt.AddMinutes(15), stored.ExpiresAt); - Assert.Equal(new[] { "jobs:trigger" }, stored.Scope); - Assert.Equal("tenant-alpha", stored.Tenant); - Assert.Equal(StellaOpsTenancyDefaults.AnyProject, stored.Project); - } - - [Fact] - public async Task ValidateAccessTokenHandler_RejectsRevokedRefreshTokenPersistedInMongo() - { - await ResetCollectionsAsync(); - - var now = new DateTimeOffset(2025, 10, 10, 14, 0, 0, TimeSpan.Zero); - var clock = new FakeTimeProvider(now); - - await using var provider = await BuildMongoProviderAsync(clock); - - var clientStore = provider.GetRequiredService(); - var tokenStore = provider.GetRequiredService(); - - var clientDocument = TestHelpers.CreateClient( - secret: null, - clientType: "public", - allowedGrantTypes: "password refresh_token", - allowedScopes: "openid profile jobs:read", - tenant: "tenant-alpha"); - - await clientStore.UpsertAsync(clientDocument, CancellationToken.None); - - var descriptor = TestHelpers.CreateDescriptor(clientDocument); - var userDescriptor = new AuthorityUserDescriptor("subject-1", "alice", displayName: "Alice", requiresPasswordReset: false); - - var plugin = TestHelpers.CreatePlugin( - name: clientDocument.Plugin ?? "standard", - supportsClientProvisioning: true, - descriptor, - userDescriptor); - - var registry = TestHelpers.CreateRegistryFromPlugins(plugin); - - const string revokedTokenId = "refresh-token-1"; - var refreshToken = new AuthorityTokenDocument - { - TokenId = revokedTokenId, - Type = OpenIddictConstants.TokenTypeHints.RefreshToken, - SubjectId = userDescriptor.SubjectId, - ClientId = clientDocument.ClientId, - Scope = new List { "openid", "profile" }, - Status = "valid", - CreatedAt = now.AddMinutes(-5), - ExpiresAt = now.AddHours(4), - ReferenceId = "refresh-reference-1" - }; - - await tokenStore.InsertAsync(refreshToken, CancellationToken.None); - - var revokedAt = now.AddMinutes(1); - await tokenStore.UpdateStatusAsync(revokedTokenId, "revoked", revokedAt, "manual", null, null, CancellationToken.None); - - var metadataAccessor = new TestRateLimiterMetadataAccessor(); - var auditSink = new TestAuthEventSink(); - await using var scope = provider.CreateAsyncScope(); - var sessionAccessor = scope.ServiceProvider.GetRequiredService(); - var handler = new ValidateAccessTokenHandler( - tokenStore, - sessionAccessor, - clientStore, - registry, - metadataAccessor, - auditSink, - clock, - TestActivitySource, - NullLogger.Instance); - - var transaction = new OpenIddictServerTransaction - { - EndpointType = OpenIddictServerEndpointType.Token, - Options = new OpenIddictServerOptions(), - Request = new OpenIddictRequest - { - GrantType = OpenIddictConstants.GrantTypes.RefreshToken - } - }; - - var principal = TestHelpers.CreatePrincipal( - clientDocument.ClientId, - revokedTokenId, - plugin.Name, - userDescriptor.SubjectId); - - var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) - { - Principal = principal, - TokenId = revokedTokenId - }; - - await handler.HandleAsync(context); - - Assert.True(context.IsRejected); - Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); - - var stored = await tokenStore.FindByTokenIdAsync(revokedTokenId, CancellationToken.None); - Assert.NotNull(stored); - Assert.Equal("revoked", stored!.Status); - Assert.Equal(revokedAt, stored.RevokedAt); - Assert.Equal("manual", stored.RevokedReason); - } - - [Fact] - public async Task RecordUsageAsync_FlagsSuspectedReplay_OnNewDeviceFingerprint() - { - await ResetCollectionsAsync(); - - var issuedAt = new DateTimeOffset(2025, 10, 14, 8, 0, 0, TimeSpan.Zero); - var clock = new FakeTimeProvider(issuedAt); - - await using var provider = await BuildMongoProviderAsync(clock); - - var tokenStore = provider.GetRequiredService(); - - var tokenDocument = new AuthorityTokenDocument - { - TokenId = "token-replay", - Type = OpenIddictConstants.TokenTypeHints.AccessToken, - ClientId = "client-1", - Status = "valid", - CreatedAt = issuedAt, - Devices = new List - { - new BsonDocument - { - { "remoteAddress", "10.0.0.1" }, - { "userAgent", "agent/1.0" }, - { "firstSeen", BsonDateTime.Create(issuedAt.AddMinutes(-10).UtcDateTime) }, - { "lastSeen", BsonDateTime.Create(issuedAt.AddMinutes(-5).UtcDateTime) }, - { "useCount", 2 } - } - } - }; - - await tokenStore.InsertAsync(tokenDocument, CancellationToken.None); - - var result = await tokenStore.RecordUsageAsync( - "token-replay", - remoteAddress: "10.0.0.2", - userAgent: "agent/2.0", - observedAt: clock.GetUtcNow(), - CancellationToken.None); - - Assert.Equal(TokenUsageUpdateStatus.SuspectedReplay, result.Status); - - var stored = await tokenStore.FindByTokenIdAsync("token-replay", CancellationToken.None); - Assert.NotNull(stored); - Assert.Equal(2, stored!.Devices?.Count); - Assert.Contains(stored.Devices!, doc => - { - var remote = doc.TryGetValue("remoteAddress", out var ra) && ra.IsString ? ra.AsString : null; - var agentValue = doc.TryGetValue("userAgent", out var ua) && ua.IsString ? ua.AsString : null; - return remote == "10.0.0.2" && agentValue == "agent/2.0"; - }); - } - - [Fact] - public async Task MongoSessions_ProvideReadYourWriteAfterPrimaryElection() - { - await ResetCollectionsAsync(); - - var clock = new FakeTimeProvider(DateTimeOffset.UtcNow); - await using var provider = await BuildMongoProviderAsync(clock); - - var tokenStore = provider.GetRequiredService(); - - await using var scope = provider.CreateAsyncScope(); - var sessionAccessor = scope.ServiceProvider.GetRequiredService(); - var session = await sessionAccessor.GetSessionAsync(CancellationToken.None); - - var tokenId = $"election-token-{Guid.NewGuid():N}"; - var document = new AuthorityTokenDocument - { - TokenId = tokenId, - Type = OpenIddictConstants.TokenTypeHints.AccessToken, - SubjectId = "session-subject", - ClientId = "session-client", - Scope = new List { "jobs:read" }, - Status = "valid", - CreatedAt = clock.GetUtcNow(), - ExpiresAt = clock.GetUtcNow().AddMinutes(30) - }; - - await tokenStore.InsertAsync(document, CancellationToken.None, session); - - await StepDownPrimaryAsync(fixture.Client, CancellationToken.None); - - AuthorityTokenDocument? fetched = null; - for (var attempt = 0; attempt < 5; attempt++) - { - try - { - fetched = await tokenStore.FindByTokenIdAsync(tokenId, CancellationToken.None, session); - if (fetched is not null) - { - break; - } - } - catch (MongoException) - { - await Task.Delay(250); - } - } - - Assert.NotNull(fetched); - Assert.Equal(tokenId, fetched!.TokenId); - } - - private static async Task StepDownPrimaryAsync(IMongoClient client, CancellationToken cancellationToken) - { - var admin = client.GetDatabase("admin"); - try - { - var command = new BsonDocument - { - { "replSetStepDown", 5 }, - { "force", true } - }; - - await admin.RunCommandAsync(command, cancellationToken: cancellationToken); - } - catch (MongoCommandException) - { - // Expected when the current primary steps down. - } - catch (MongoConnectionException) - { - // Connection may drop during election; ignore and continue. - } - - await WaitForPrimaryAsync(admin, cancellationToken); - } - - private static async Task WaitForPrimaryAsync(IMongoDatabase adminDatabase, CancellationToken cancellationToken) - { - for (var attempt = 0; attempt < 40; attempt++) - { - cancellationToken.ThrowIfCancellationRequested(); - try - { - var status = await adminDatabase.RunCommandAsync(new BsonDocument { { "replSetGetStatus", 1 } }, cancellationToken: cancellationToken); - if (status.TryGetValue("myState", out var state) && state.ToInt32() == 1) - { - return; - } - } - catch (MongoCommandException) - { - // Ignore intermediate states and retry. - } - - await Task.Delay(250, cancellationToken); - } - - throw new TimeoutException("Replica set primary election did not complete in time."); - } - - private async Task ResetCollectionsAsync() - { - var tokens = fixture.Database.GetCollection(AuthorityMongoDefaults.Collections.Tokens); - await tokens.DeleteManyAsync(Builders.Filter.Empty); - - var clients = fixture.Database.GetCollection(AuthorityMongoDefaults.Collections.Clients); - await clients.DeleteManyAsync(Builders.Filter.Empty); - } - - private async Task BuildMongoProviderAsync(FakeTimeProvider clock) - { - var services = new ServiceCollection(); - services.AddSingleton(clock); - services.AddLogging(); - services.AddAuthorityMongoStorage(options => - { - options.ConnectionString = fixture.Runner.ConnectionString; - options.DatabaseName = fixture.Database.DatabaseNamespace.DatabaseName; - options.CommandTimeout = TimeSpan.FromSeconds(5); - }); - - var provider = services.BuildServiceProvider(); - - var initializer = provider.GetRequiredService(); - var database = provider.GetRequiredService(); - await initializer.InitialiseAsync(database, CancellationToken.None); - - return provider; - } -} +using System.Collections.Generic; +using System.Diagnostics; +using System.Security.Claims; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Time.Testing; +using MongoDB.Driver; +using MongoDB.Bson; +using OpenIddict.Abstractions; +using OpenIddict.Server; +using StellaOps.Authority; +using StellaOps.Authority.OpenIddict; +using StellaOps.Authority.OpenIddict.Handlers; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.Storage.Mongo; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Extensions; +using StellaOps.Authority.Storage.Mongo.Initialization; +using StellaOps.Authority.Storage.Mongo.Sessions; +using StellaOps.Auth.Abstractions; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Concelier.Testing; +using StellaOps.Authority.RateLimiting; +using StellaOps.Authority.Security; +using StellaOps.Cryptography.Audit; +using Xunit; + +namespace StellaOps.Authority.Tests.OpenIddict; + +[Collection("mongo-fixture")] +public sealed class TokenPersistenceIntegrationTests +{ + private static readonly ActivitySource TestActivitySource = new("StellaOps.Authority.Tests.Persistence"); + private readonly MongoIntegrationFixture fixture; + + public TokenPersistenceIntegrationTests(MongoIntegrationFixture fixture) + => this.fixture = fixture ?? throw new ArgumentNullException(nameof(fixture)); + + [Fact] + public async Task HandleClientCredentials_PersistsTokenInMongo() + { + await ResetCollectionsAsync(); + + var issuedAt = new DateTimeOffset(2025, 10, 10, 12, 0, 0, TimeSpan.Zero); + var clock = new FakeTimeProvider(issuedAt); + + await using var provider = await BuildMongoProviderAsync(clock); + + var clientStore = provider.GetRequiredService(); + var tokenStore = provider.GetRequiredService(); + + var clientDocument = TestHelpers.CreateClient( + secret: "s3cr3t!", + allowedGrantTypes: "client_credentials", + allowedScopes: "jobs:trigger jobs:read", + tenant: "tenant-alpha"); + + await clientStore.UpsertAsync(clientDocument, CancellationToken.None); + + var registry = TestHelpers.CreateRegistry( + withClientProvisioning: true, + clientDescriptor: TestHelpers.CreateDescriptor(clientDocument)); + + var authSink = new TestAuthEventSink(); + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + await using var scope = provider.CreateAsyncScope(); + var sessionAccessor = scope.ServiceProvider.GetRequiredService(); + var options = TestHelpers.CreateAuthorityOptions(); + var validateHandler = new ValidateClientCredentialsHandler(clientStore, registry, TestActivitySource, authSink, metadataAccessor, clock, new NoopCertificateValidator(), new HttpContextAccessor(), options, NullLogger.Instance); + var handleHandler = new HandleClientCredentialsHandler(registry, tokenStore, sessionAccessor, metadataAccessor, clock, TestActivitySource, NullLogger.Instance); + var persistHandler = new PersistTokensHandler(tokenStore, sessionAccessor, clock, TestActivitySource, NullLogger.Instance); + + var transaction = TestHelpers.CreateTokenTransaction(clientDocument.ClientId, "s3cr3t!", scope: "jobs:trigger"); + transaction.Options.AccessTokenLifetime = TimeSpan.FromMinutes(15); + + var validateContext = new OpenIddictServerEvents.ValidateTokenRequestContext(transaction); + await validateHandler.HandleAsync(validateContext); + Assert.False(validateContext.IsRejected); + + var handleContext = new OpenIddictServerEvents.HandleTokenRequestContext(transaction); + await handleHandler.HandleAsync(handleContext); + + Assert.True(handleContext.IsRequestHandled); + var principal = Assert.IsType(handleContext.Principal); + var tokenId = principal.GetClaim(OpenIddictConstants.Claims.JwtId); + Assert.False(string.IsNullOrWhiteSpace(tokenId)); + Assert.Equal("tenant-alpha", metadataAccessor.GetMetadata()?.Tenant); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, metadataAccessor.GetMetadata()?.Project); + + var signInContext = new OpenIddictServerEvents.ProcessSignInContext(transaction) + { + Principal = principal, + AccessTokenPrincipal = principal + }; + + await persistHandler.HandleAsync(signInContext); + + var stored = await tokenStore.FindByTokenIdAsync(tokenId!, CancellationToken.None); + Assert.NotNull(stored); + Assert.Equal(clientDocument.ClientId, stored!.ClientId); + Assert.Equal(OpenIddictConstants.TokenTypeHints.AccessToken, stored.Type); + Assert.Equal("valid", stored.Status); + Assert.Equal(issuedAt, stored.CreatedAt); + Assert.Equal(issuedAt.AddMinutes(15), stored.ExpiresAt); + Assert.Equal(new[] { "jobs:trigger" }, stored.Scope); + Assert.Equal("tenant-alpha", stored.Tenant); + Assert.Equal(StellaOpsTenancyDefaults.AnyProject, stored.Project); + } + + [Fact] + public async Task ValidateAccessTokenHandler_RejectsRevokedRefreshTokenPersistedInMongo() + { + await ResetCollectionsAsync(); + + var now = new DateTimeOffset(2025, 10, 10, 14, 0, 0, TimeSpan.Zero); + var clock = new FakeTimeProvider(now); + + await using var provider = await BuildMongoProviderAsync(clock); + + var clientStore = provider.GetRequiredService(); + var tokenStore = provider.GetRequiredService(); + + var clientDocument = TestHelpers.CreateClient( + secret: null, + clientType: "public", + allowedGrantTypes: "password refresh_token", + allowedScopes: "openid profile jobs:read", + tenant: "tenant-alpha"); + + await clientStore.UpsertAsync(clientDocument, CancellationToken.None); + + var descriptor = TestHelpers.CreateDescriptor(clientDocument); + var userDescriptor = new AuthorityUserDescriptor("subject-1", "alice", displayName: "Alice", requiresPasswordReset: false); + + var plugin = TestHelpers.CreatePlugin( + name: clientDocument.Plugin ?? "standard", + supportsClientProvisioning: true, + descriptor, + userDescriptor); + + var registry = TestHelpers.CreateRegistryFromPlugins(plugin); + + const string revokedTokenId = "refresh-token-1"; + var refreshToken = new AuthorityTokenDocument + { + TokenId = revokedTokenId, + Type = OpenIddictConstants.TokenTypeHints.RefreshToken, + SubjectId = userDescriptor.SubjectId, + ClientId = clientDocument.ClientId, + Scope = new List { "openid", "profile" }, + Status = "valid", + CreatedAt = now.AddMinutes(-5), + ExpiresAt = now.AddHours(4), + ReferenceId = "refresh-reference-1" + }; + + await tokenStore.InsertAsync(refreshToken, CancellationToken.None); + + var revokedAt = now.AddMinutes(1); + await tokenStore.UpdateStatusAsync(revokedTokenId, "revoked", revokedAt, "manual", null, null, CancellationToken.None); + + var metadataAccessor = new TestRateLimiterMetadataAccessor(); + var auditSink = new TestAuthEventSink(); + await using var scope = provider.CreateAsyncScope(); + var sessionAccessor = scope.ServiceProvider.GetRequiredService(); + var handler = new ValidateAccessTokenHandler( + tokenStore, + sessionAccessor, + clientStore, + registry, + metadataAccessor, + auditSink, + clock, + TestActivitySource, + NullLogger.Instance); + + var transaction = new OpenIddictServerTransaction + { + EndpointType = OpenIddictServerEndpointType.Token, + Options = new OpenIddictServerOptions(), + Request = new OpenIddictRequest + { + GrantType = OpenIddictConstants.GrantTypes.RefreshToken + } + }; + + var principal = TestHelpers.CreatePrincipal( + clientDocument.ClientId, + revokedTokenId, + plugin.Name, + userDescriptor.SubjectId); + + var context = new OpenIddictServerEvents.ValidateTokenContext(transaction) + { + Principal = principal, + TokenId = revokedTokenId + }; + + await handler.HandleAsync(context); + + Assert.True(context.IsRejected); + Assert.Equal(OpenIddictConstants.Errors.InvalidToken, context.Error); + + var stored = await tokenStore.FindByTokenIdAsync(revokedTokenId, CancellationToken.None); + Assert.NotNull(stored); + Assert.Equal("revoked", stored!.Status); + Assert.Equal(revokedAt, stored.RevokedAt); + Assert.Equal("manual", stored.RevokedReason); + } + + [Fact] + public async Task RecordUsageAsync_FlagsSuspectedReplay_OnNewDeviceFingerprint() + { + await ResetCollectionsAsync(); + + var issuedAt = new DateTimeOffset(2025, 10, 14, 8, 0, 0, TimeSpan.Zero); + var clock = new FakeTimeProvider(issuedAt); + + await using var provider = await BuildMongoProviderAsync(clock); + + var tokenStore = provider.GetRequiredService(); + + var tokenDocument = new AuthorityTokenDocument + { + TokenId = "token-replay", + Type = OpenIddictConstants.TokenTypeHints.AccessToken, + ClientId = "client-1", + Status = "valid", + CreatedAt = issuedAt, + Devices = new List + { + new BsonDocument + { + { "remoteAddress", "10.0.0.1" }, + { "userAgent", "agent/1.0" }, + { "firstSeen", BsonDateTime.Create(issuedAt.AddMinutes(-10).UtcDateTime) }, + { "lastSeen", BsonDateTime.Create(issuedAt.AddMinutes(-5).UtcDateTime) }, + { "useCount", 2 } + } + } + }; + + await tokenStore.InsertAsync(tokenDocument, CancellationToken.None); + + var result = await tokenStore.RecordUsageAsync( + "token-replay", + remoteAddress: "10.0.0.2", + userAgent: "agent/2.0", + observedAt: clock.GetUtcNow(), + CancellationToken.None); + + Assert.Equal(TokenUsageUpdateStatus.SuspectedReplay, result.Status); + + var stored = await tokenStore.FindByTokenIdAsync("token-replay", CancellationToken.None); + Assert.NotNull(stored); + Assert.Equal(2, stored!.Devices?.Count); + Assert.Contains(stored.Devices!, doc => + { + var remote = doc.TryGetValue("remoteAddress", out var ra) && ra.IsString ? ra.AsString : null; + var agentValue = doc.TryGetValue("userAgent", out var ua) && ua.IsString ? ua.AsString : null; + return remote == "10.0.0.2" && agentValue == "agent/2.0"; + }); + } + + [Fact] + public async Task MongoSessions_ProvideReadYourWriteAfterPrimaryElection() + { + await ResetCollectionsAsync(); + + var clock = new FakeTimeProvider(DateTimeOffset.UtcNow); + await using var provider = await BuildMongoProviderAsync(clock); + + var tokenStore = provider.GetRequiredService(); + + await using var scope = provider.CreateAsyncScope(); + var sessionAccessor = scope.ServiceProvider.GetRequiredService(); + var session = await sessionAccessor.GetSessionAsync(CancellationToken.None); + + var tokenId = $"election-token-{Guid.NewGuid():N}"; + var document = new AuthorityTokenDocument + { + TokenId = tokenId, + Type = OpenIddictConstants.TokenTypeHints.AccessToken, + SubjectId = "session-subject", + ClientId = "session-client", + Scope = new List { "jobs:read" }, + Status = "valid", + CreatedAt = clock.GetUtcNow(), + ExpiresAt = clock.GetUtcNow().AddMinutes(30) + }; + + await tokenStore.InsertAsync(document, CancellationToken.None, session); + + await StepDownPrimaryAsync(fixture.Client, CancellationToken.None); + + AuthorityTokenDocument? fetched = null; + for (var attempt = 0; attempt < 5; attempt++) + { + try + { + fetched = await tokenStore.FindByTokenIdAsync(tokenId, CancellationToken.None, session); + if (fetched is not null) + { + break; + } + } + catch (MongoException) + { + await Task.Delay(250); + } + } + + Assert.NotNull(fetched); + Assert.Equal(tokenId, fetched!.TokenId); + } + + private static async Task StepDownPrimaryAsync(IMongoClient client, CancellationToken cancellationToken) + { + var admin = client.GetDatabase("admin"); + try + { + var command = new BsonDocument + { + { "replSetStepDown", 5 }, + { "force", true } + }; + + await admin.RunCommandAsync(command, cancellationToken: cancellationToken); + } + catch (MongoCommandException) + { + // Expected when the current primary steps down. + } + catch (MongoConnectionException) + { + // Connection may drop during election; ignore and continue. + } + + await WaitForPrimaryAsync(admin, cancellationToken); + } + + private static async Task WaitForPrimaryAsync(IMongoDatabase adminDatabase, CancellationToken cancellationToken) + { + for (var attempt = 0; attempt < 40; attempt++) + { + cancellationToken.ThrowIfCancellationRequested(); + try + { + var status = await adminDatabase.RunCommandAsync(new BsonDocument { { "replSetGetStatus", 1 } }, cancellationToken: cancellationToken); + if (status.TryGetValue("myState", out var state) && state.ToInt32() == 1) + { + return; + } + } + catch (MongoCommandException) + { + // Ignore intermediate states and retry. + } + + await Task.Delay(250, cancellationToken); + } + + throw new TimeoutException("Replica set primary election did not complete in time."); + } + + private async Task ResetCollectionsAsync() + { + var tokens = fixture.Database.GetCollection(AuthorityMongoDefaults.Collections.Tokens); + await tokens.DeleteManyAsync(Builders.Filter.Empty); + + var clients = fixture.Database.GetCollection(AuthorityMongoDefaults.Collections.Clients); + await clients.DeleteManyAsync(Builders.Filter.Empty); + } + + private async Task BuildMongoProviderAsync(FakeTimeProvider clock) + { + var services = new ServiceCollection(); + services.AddSingleton(clock); + services.AddLogging(); + services.AddAuthorityMongoStorage(options => + { + options.ConnectionString = fixture.Runner.ConnectionString; + options.DatabaseName = fixture.Database.DatabaseNamespace.DatabaseName; + options.CommandTimeout = TimeSpan.FromSeconds(5); + }); + + var provider = services.BuildServiceProvider(); + + var initializer = provider.GetRequiredService(); + var database = provider.GetRequiredService(); + await initializer.InitialiseAsync(database, CancellationToken.None); + + return provider; + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs index da59557c..0a8cb197 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Permalinks/VulnPermalinkServiceTests.cs @@ -1,151 +1,151 @@ -using System; -using System.IO; -using System.Linq; -using System.Text; -using System.Text.Json; -using System.Threading; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.FileProviders; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using Microsoft.Extensions.Time.Testing; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Authority.Permalinks; -using StellaOps.Authority.Signing; -using StellaOps.Configuration; -using StellaOps.Cryptography; -using StellaOps.Cryptography.DependencyInjection; -using StellaOps.Auth.Abstractions; -using Xunit; - -namespace StellaOps.Authority.Tests.Permalinks; - -public sealed class VulnPermalinkServiceTests -{ - [Fact] - public async Task CreateAsync_IssuesSignedTokenWithExpectedClaims() - { - var tempDir = Directory.CreateTempSubdirectory("authority-permalink-tests").FullName; - var keyRelative = "permalink.pem"; - try - { - CreateEcPrivateKey(Path.Combine(tempDir, keyRelative)); - - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.test"), - Storage = { ConnectionString = "mongodb://localhost/test" }, - Signing = - { - Enabled = true, - ActiveKeyId = "permalink-key", - KeyPath = keyRelative, - Algorithm = SignatureAlgorithms.Es256, - KeySource = "file", - Provider = "default" - } - }; - - var fakeTime = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-26T12:00:00Z")); - - using var provider = BuildProvider(tempDir, options, fakeTime); - // Ensure signing keys are loaded - provider.GetRequiredService(); - - var service = provider.GetRequiredService(); - var state = JsonDocument.Parse("{\"vulnId\":\"CVE-2025-1234\"}").RootElement; - var request = new VulnPermalinkRequest( - Tenant: "tenant-a", - ResourceKind: "vulnerability", - State: state, - ExpiresInSeconds: null, - Environment: "prod"); - - var expectedNow = fakeTime.GetUtcNow(); - - var response = await service.CreateAsync(request, default); - - Assert.NotNull(response.Token); - Assert.Equal(expectedNow, response.IssuedAt); - Assert.Equal(expectedNow.AddHours(24), response.ExpiresAt); - Assert.Contains(StellaOpsScopes.VulnRead, response.Scopes); - - var parts = response.Token.Split('.'); - Assert.Equal(3, parts.Length); - - var payloadBytes = Base64UrlEncoder.DecodeBytes(parts[1]); - using var payloadDocument = JsonDocument.Parse(payloadBytes); - var payload = payloadDocument.RootElement; - - Assert.Equal("vulnerability", payload.GetProperty("type").GetString()); - Assert.Equal("tenant-a", payload.GetProperty("tenant").GetString()); - Assert.Equal("prod", payload.GetProperty("environment").GetString()); - Assert.Equal(expectedNow.ToUnixTimeSeconds(), payload.GetProperty("iat").GetInt64()); - Assert.Equal(expectedNow.ToUnixTimeSeconds(), payload.GetProperty("nbf").GetInt64()); - Assert.Equal(expectedNow.AddHours(24).ToUnixTimeSeconds(), payload.GetProperty("exp").GetInt64()); - - var scopes = payload.GetProperty("scopes").EnumerateArray().Select(element => element.GetString()).ToArray(); - Assert.Contains(StellaOpsScopes.VulnRead, scopes); - - var resource = payload.GetProperty("resource"); - Assert.Equal("vulnerability", resource.GetProperty("kind").GetString()); - Assert.Equal("CVE-2025-1234", resource.GetProperty("state").GetProperty("vulnId").GetString()); - } - finally - { - try - { - Directory.Delete(tempDir, recursive: true); - } - catch - { - // ignore cleanup failures - } - } - } - - private static ServiceProvider BuildProvider(string basePath, StellaOpsAuthorityOptions options, TimeProvider timeProvider) - { - var services = new ServiceCollection(); - services.AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug)); - services.AddSingleton(new TestHostEnvironment(basePath)); - services.AddSingleton(options); - services.AddSingleton>(Options.Create(options)); - services.AddSingleton(timeProvider); - services.AddStellaOpsCrypto(); - services.TryAddEnumerable(ServiceDescriptor.Singleton()); - services.AddSingleton(); - services.AddSingleton(); - - return services.BuildServiceProvider(); - } - - private static void CreateEcPrivateKey(string path) - { - Directory.CreateDirectory(Path.GetDirectoryName(path)!); - using var ecdsa = System.Security.Cryptography.ECDsa.Create(System.Security.Cryptography.ECCurve.NamedCurves.nistP256); - var pem = ecdsa.ExportECPrivateKeyPem(); - File.WriteAllText(path, pem); - } - - private sealed class TestHostEnvironment : IHostEnvironment - { - public TestHostEnvironment(string contentRoot) - { - ContentRootPath = contentRoot; - ContentRootFileProvider = new PhysicalFileProvider(contentRoot); - EnvironmentName = Environments.Development; - ApplicationName = "StellaOps.Authority.Tests"; - } - - public string EnvironmentName { get; set; } - - public string ApplicationName { get; set; } - - public string ContentRootPath { get; set; } - - public IFileProvider ContentRootFileProvider { get; set; } - } -} +using System; +using System.IO; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Threading; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.FileProviders; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.Time.Testing; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Authority.Permalinks; +using StellaOps.Authority.Signing; +using StellaOps.Configuration; +using StellaOps.Cryptography; +using StellaOps.Cryptography.DependencyInjection; +using StellaOps.Auth.Abstractions; +using Xunit; + +namespace StellaOps.Authority.Tests.Permalinks; + +public sealed class VulnPermalinkServiceTests +{ + [Fact] + public async Task CreateAsync_IssuesSignedTokenWithExpectedClaims() + { + var tempDir = Directory.CreateTempSubdirectory("authority-permalink-tests").FullName; + var keyRelative = "permalink.pem"; + try + { + CreateEcPrivateKey(Path.Combine(tempDir, keyRelative)); + + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.test"), + Storage = { ConnectionString = "mongodb://localhost/test" }, + Signing = + { + Enabled = true, + ActiveKeyId = "permalink-key", + KeyPath = keyRelative, + Algorithm = SignatureAlgorithms.Es256, + KeySource = "file", + Provider = "default" + } + }; + + var fakeTime = new FakeTimeProvider(DateTimeOffset.Parse("2025-10-26T12:00:00Z")); + + using var provider = BuildProvider(tempDir, options, fakeTime); + // Ensure signing keys are loaded + provider.GetRequiredService(); + + var service = provider.GetRequiredService(); + var state = JsonDocument.Parse("{\"vulnId\":\"CVE-2025-1234\"}").RootElement; + var request = new VulnPermalinkRequest( + Tenant: "tenant-a", + ResourceKind: "vulnerability", + State: state, + ExpiresInSeconds: null, + Environment: "prod"); + + var expectedNow = fakeTime.GetUtcNow(); + + var response = await service.CreateAsync(request, default); + + Assert.NotNull(response.Token); + Assert.Equal(expectedNow, response.IssuedAt); + Assert.Equal(expectedNow.AddHours(24), response.ExpiresAt); + Assert.Contains(StellaOpsScopes.VulnRead, response.Scopes); + + var parts = response.Token.Split('.'); + Assert.Equal(3, parts.Length); + + var payloadBytes = Base64UrlEncoder.DecodeBytes(parts[1]); + using var payloadDocument = JsonDocument.Parse(payloadBytes); + var payload = payloadDocument.RootElement; + + Assert.Equal("vulnerability", payload.GetProperty("type").GetString()); + Assert.Equal("tenant-a", payload.GetProperty("tenant").GetString()); + Assert.Equal("prod", payload.GetProperty("environment").GetString()); + Assert.Equal(expectedNow.ToUnixTimeSeconds(), payload.GetProperty("iat").GetInt64()); + Assert.Equal(expectedNow.ToUnixTimeSeconds(), payload.GetProperty("nbf").GetInt64()); + Assert.Equal(expectedNow.AddHours(24).ToUnixTimeSeconds(), payload.GetProperty("exp").GetInt64()); + + var scopes = payload.GetProperty("scopes").EnumerateArray().Select(element => element.GetString()).ToArray(); + Assert.Contains(StellaOpsScopes.VulnRead, scopes); + + var resource = payload.GetProperty("resource"); + Assert.Equal("vulnerability", resource.GetProperty("kind").GetString()); + Assert.Equal("CVE-2025-1234", resource.GetProperty("state").GetProperty("vulnId").GetString()); + } + finally + { + try + { + Directory.Delete(tempDir, recursive: true); + } + catch + { + // ignore cleanup failures + } + } + } + + private static ServiceProvider BuildProvider(string basePath, StellaOpsAuthorityOptions options, TimeProvider timeProvider) + { + var services = new ServiceCollection(); + services.AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug)); + services.AddSingleton(new TestHostEnvironment(basePath)); + services.AddSingleton(options); + services.AddSingleton>(Options.Create(options)); + services.AddSingleton(timeProvider); + services.AddStellaOpsCrypto(); + services.TryAddEnumerable(ServiceDescriptor.Singleton()); + services.AddSingleton(); + services.AddSingleton(); + + return services.BuildServiceProvider(); + } + + private static void CreateEcPrivateKey(string path) + { + Directory.CreateDirectory(Path.GetDirectoryName(path)!); + using var ecdsa = System.Security.Cryptography.ECDsa.Create(System.Security.Cryptography.ECCurve.NamedCurves.nistP256); + var pem = ecdsa.ExportECPrivateKeyPem(); + File.WriteAllText(path, pem); + } + + private sealed class TestHostEnvironment : IHostEnvironment + { + public TestHostEnvironment(string contentRoot) + { + ContentRootPath = contentRoot; + ContentRootFileProvider = new PhysicalFileProvider(contentRoot); + EnvironmentName = Environments.Development; + ApplicationName = "StellaOps.Authority.Tests"; + } + + public string EnvironmentName { get; set; } + + public string ApplicationName { get; set; } + + public string ContentRootPath { get; set; } + + public IFileProvider ContentRootFileProvider { get; set; } + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Plugins/AuthorityPluginLoaderTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Plugins/AuthorityPluginLoaderTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/Plugins/AuthorityPluginLoaderTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Plugins/AuthorityPluginLoaderTests.cs index 4fd137ae..bcdcd351 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/Plugins/AuthorityPluginLoaderTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Plugins/AuthorityPluginLoaderTests.cs @@ -1,193 +1,193 @@ -using System; -using System.Collections.Generic; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Authority.Plugins; -using StellaOps.Authority.Plugins.Abstractions; - -namespace StellaOps.Authority.Tests.Plugins; - -public class AuthorityPluginLoaderTests -{ - [Fact] - public void RegisterPlugins_ReturnsEmptySummary_WhenNoPluginsConfigured() - { - var services = new ServiceCollection(); - var configuration = new ConfigurationBuilder().Build(); - - var summary = AuthorityPluginLoader.RegisterPluginsCore( - services, - configuration, - Array.Empty(), - Array.Empty(), - Array.Empty(), - NullLogger.Instance); - - Assert.Empty(summary.RegisteredPlugins); - Assert.Empty(summary.Failures); - Assert.Empty(summary.MissingOrderedPlugins); - } - - [Fact] - public void RegisterPlugins_RecordsFailure_WhenAssemblyMissing() - { - var services = new ServiceCollection(); - var hostConfiguration = new ConfigurationBuilder().Build(); - - var manifest = new AuthorityPluginManifest( - "standard", - "standard", - true, - "StellaOps.Authority.Plugin.Standard", - null, - Array.Empty(), - new Dictionary(), - "standard.yaml"); - - var contexts = new[] - { - new AuthorityPluginContext(manifest, hostConfiguration) - }; - - var summary = AuthorityPluginLoader.RegisterPluginsCore( - services, - hostConfiguration, - contexts, - Array.Empty(), - Array.Empty(), - NullLogger.Instance); - - var failure = Assert.Single(summary.Failures); - Assert.Equal("standard", failure.PluginName); - Assert.Contains("Assembly", failure.Reason, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void RegisterPlugins_RegistersEnabledPlugin_WhenRegistrarAvailable() - { - var services = new ServiceCollection(); - services.AddLogging(); - var hostConfiguration = new ConfigurationBuilder().Build(); - - var manifest = new AuthorityPluginManifest( - "test", - TestAuthorityPluginRegistrar.PluginTypeIdentifier, - true, - typeof(TestAuthorityPluginRegistrar).Assembly.GetName().Name, - typeof(TestAuthorityPluginRegistrar).Assembly.Location, - Array.Empty(), - new Dictionary(), - "test.yaml"); - - var pluginContext = new AuthorityPluginContext(manifest, hostConfiguration); - var descriptor = new AuthorityPluginLoader.LoadedPluginDescriptor( - typeof(TestAuthorityPluginRegistrar).Assembly, - typeof(TestAuthorityPluginRegistrar).Assembly.Location); - - var summary = AuthorityPluginLoader.RegisterPluginsCore( - services, - hostConfiguration, - new[] { pluginContext }, - new[] { descriptor }, - Array.Empty(), - NullLogger.Instance); - - Assert.Contains("test", summary.RegisteredPlugins); - Assert.Empty(summary.Failures); - - var provider = services.BuildServiceProvider(); - Assert.NotNull(provider.GetRequiredService()); - } - - [Fact] - public void RegisterPlugins_ActivatesRegistrarUsingDependencyInjection() - { - var services = new ServiceCollection(); - services.AddLogging(); - services.AddSingleton(TimeProvider.System); - - var hostConfiguration = new ConfigurationBuilder().Build(); - - var manifest = new AuthorityPluginManifest( - "di-test", - DiAuthorityPluginRegistrar.PluginTypeIdentifier, - true, - typeof(DiAuthorityPluginRegistrar).Assembly.GetName().Name, - typeof(DiAuthorityPluginRegistrar).Assembly.Location, - Array.Empty(), - new Dictionary(), - "di-test.yaml"); - - var pluginContext = new AuthorityPluginContext(manifest, hostConfiguration); - var descriptor = new AuthorityPluginLoader.LoadedPluginDescriptor( - typeof(DiAuthorityPluginRegistrar).Assembly, - typeof(DiAuthorityPluginRegistrar).Assembly.Location); - - var summary = AuthorityPluginLoader.RegisterPluginsCore( - services, - hostConfiguration, - new[] { pluginContext }, - new[] { descriptor }, - Array.Empty(), - NullLogger.Instance); - - Assert.Contains("di-test", summary.RegisteredPlugins); - - var provider = services.BuildServiceProvider(); - var dependent = provider.GetRequiredService(); - Assert.True(dependent.LoggerWasResolved); - Assert.True(dependent.TimeProviderResolved); - } - - private sealed class TestAuthorityPluginRegistrar : IAuthorityPluginRegistrar - { - public const string PluginTypeIdentifier = "test-plugin"; - - public string PluginType => PluginTypeIdentifier; - - public void Register(AuthorityPluginRegistrationContext context) - { - context.Services.AddSingleton(); - } - } - - private sealed class TestMarkerService - { - } - - private sealed class DiAuthorityPluginRegistrar : IAuthorityPluginRegistrar - { - public const string PluginTypeIdentifier = "test-plugin-di"; - - private readonly ILogger logger; - private readonly TimeProvider timeProvider; - - public DiAuthorityPluginRegistrar(ILogger logger, TimeProvider timeProvider) - { - this.logger = logger; - this.timeProvider = timeProvider; - } - - public string PluginType => PluginTypeIdentifier; - - public void Register(AuthorityPluginRegistrationContext context) - { - context.Services.AddSingleton(new DependentService(logger != null, timeProvider != null)); - } - } - - private sealed class DependentService - { - public DependentService(bool loggerResolved, bool timeProviderResolved) - { - LoggerWasResolved = loggerResolved; - TimeProviderResolved = timeProviderResolved; - } - - public bool LoggerWasResolved { get; } - - public bool TimeProviderResolved { get; } - } -} +using System; +using System.Collections.Generic; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Authority.Plugins; +using StellaOps.Authority.Plugins.Abstractions; + +namespace StellaOps.Authority.Tests.Plugins; + +public class AuthorityPluginLoaderTests +{ + [Fact] + public void RegisterPlugins_ReturnsEmptySummary_WhenNoPluginsConfigured() + { + var services = new ServiceCollection(); + var configuration = new ConfigurationBuilder().Build(); + + var summary = AuthorityPluginLoader.RegisterPluginsCore( + services, + configuration, + Array.Empty(), + Array.Empty(), + Array.Empty(), + NullLogger.Instance); + + Assert.Empty(summary.RegisteredPlugins); + Assert.Empty(summary.Failures); + Assert.Empty(summary.MissingOrderedPlugins); + } + + [Fact] + public void RegisterPlugins_RecordsFailure_WhenAssemblyMissing() + { + var services = new ServiceCollection(); + var hostConfiguration = new ConfigurationBuilder().Build(); + + var manifest = new AuthorityPluginManifest( + "standard", + "standard", + true, + "StellaOps.Authority.Plugin.Standard", + null, + Array.Empty(), + new Dictionary(), + "standard.yaml"); + + var contexts = new[] + { + new AuthorityPluginContext(manifest, hostConfiguration) + }; + + var summary = AuthorityPluginLoader.RegisterPluginsCore( + services, + hostConfiguration, + contexts, + Array.Empty(), + Array.Empty(), + NullLogger.Instance); + + var failure = Assert.Single(summary.Failures); + Assert.Equal("standard", failure.PluginName); + Assert.Contains("Assembly", failure.Reason, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void RegisterPlugins_RegistersEnabledPlugin_WhenRegistrarAvailable() + { + var services = new ServiceCollection(); + services.AddLogging(); + var hostConfiguration = new ConfigurationBuilder().Build(); + + var manifest = new AuthorityPluginManifest( + "test", + TestAuthorityPluginRegistrar.PluginTypeIdentifier, + true, + typeof(TestAuthorityPluginRegistrar).Assembly.GetName().Name, + typeof(TestAuthorityPluginRegistrar).Assembly.Location, + Array.Empty(), + new Dictionary(), + "test.yaml"); + + var pluginContext = new AuthorityPluginContext(manifest, hostConfiguration); + var descriptor = new AuthorityPluginLoader.LoadedPluginDescriptor( + typeof(TestAuthorityPluginRegistrar).Assembly, + typeof(TestAuthorityPluginRegistrar).Assembly.Location); + + var summary = AuthorityPluginLoader.RegisterPluginsCore( + services, + hostConfiguration, + new[] { pluginContext }, + new[] { descriptor }, + Array.Empty(), + NullLogger.Instance); + + Assert.Contains("test", summary.RegisteredPlugins); + Assert.Empty(summary.Failures); + + var provider = services.BuildServiceProvider(); + Assert.NotNull(provider.GetRequiredService()); + } + + [Fact] + public void RegisterPlugins_ActivatesRegistrarUsingDependencyInjection() + { + var services = new ServiceCollection(); + services.AddLogging(); + services.AddSingleton(TimeProvider.System); + + var hostConfiguration = new ConfigurationBuilder().Build(); + + var manifest = new AuthorityPluginManifest( + "di-test", + DiAuthorityPluginRegistrar.PluginTypeIdentifier, + true, + typeof(DiAuthorityPluginRegistrar).Assembly.GetName().Name, + typeof(DiAuthorityPluginRegistrar).Assembly.Location, + Array.Empty(), + new Dictionary(), + "di-test.yaml"); + + var pluginContext = new AuthorityPluginContext(manifest, hostConfiguration); + var descriptor = new AuthorityPluginLoader.LoadedPluginDescriptor( + typeof(DiAuthorityPluginRegistrar).Assembly, + typeof(DiAuthorityPluginRegistrar).Assembly.Location); + + var summary = AuthorityPluginLoader.RegisterPluginsCore( + services, + hostConfiguration, + new[] { pluginContext }, + new[] { descriptor }, + Array.Empty(), + NullLogger.Instance); + + Assert.Contains("di-test", summary.RegisteredPlugins); + + var provider = services.BuildServiceProvider(); + var dependent = provider.GetRequiredService(); + Assert.True(dependent.LoggerWasResolved); + Assert.True(dependent.TimeProviderResolved); + } + + private sealed class TestAuthorityPluginRegistrar : IAuthorityPluginRegistrar + { + public const string PluginTypeIdentifier = "test-plugin"; + + public string PluginType => PluginTypeIdentifier; + + public void Register(AuthorityPluginRegistrationContext context) + { + context.Services.AddSingleton(); + } + } + + private sealed class TestMarkerService + { + } + + private sealed class DiAuthorityPluginRegistrar : IAuthorityPluginRegistrar + { + public const string PluginTypeIdentifier = "test-plugin-di"; + + private readonly ILogger logger; + private readonly TimeProvider timeProvider; + + public DiAuthorityPluginRegistrar(ILogger logger, TimeProvider timeProvider) + { + this.logger = logger; + this.timeProvider = timeProvider; + } + + public string PluginType => PluginTypeIdentifier; + + public void Register(AuthorityPluginRegistrationContext context) + { + context.Services.AddSingleton(new DependentService(logger != null, timeProvider != null)); + } + } + + private sealed class DependentService + { + public DependentService(bool loggerResolved, bool timeProviderResolved) + { + LoggerWasResolved = loggerResolved; + TimeProviderResolved = timeProviderResolved; + } + + public bool LoggerWasResolved { get; } + + public bool TimeProviderResolved { get; } + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterIntegrationTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterIntegrationTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterIntegrationTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterIntegrationTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataAccessorTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataAccessorTests.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataAccessorTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataAccessorTests.cs index 2b884b6f..d23e8ca0 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataAccessorTests.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataAccessorTests.cs @@ -1,36 +1,36 @@ -using Microsoft.AspNetCore.Http; -using StellaOps.Authority.RateLimiting; -using Xunit; - -namespace StellaOps.Authority.Tests.RateLimiting; - -public class AuthorityRateLimiterMetadataAccessorTests -{ - [Fact] - public void SetClientId_UpdatesFeatureMetadata() - { - var context = new DefaultHttpContext(); - var feature = new AuthorityRateLimiterFeature(new AuthorityRateLimiterMetadata()); - context.Features.Set(feature); - - var accessor = new AuthorityRateLimiterMetadataAccessor(new HttpContextAccessor { HttpContext = context }); - - accessor.SetClientId("client-123"); - accessor.SetTag("custom", "tag"); - accessor.SetSubjectId("subject-1"); - accessor.SetTenant("Tenant-Alpha"); - accessor.SetProject("Project-Beta"); - - var metadata = accessor.GetMetadata(); - Assert.NotNull(metadata); - Assert.Equal("client-123", metadata!.ClientId); - Assert.Equal("subject-1", metadata.SubjectId); - Assert.Equal("client-123", metadata.Tags["authority.client_id"]); - Assert.Equal("subject-1", metadata.Tags["authority.subject_id"]); - Assert.Equal("tenant-alpha", metadata.Tenant); - Assert.Equal("tenant-alpha", metadata.Tags["authority.tenant"]); - Assert.Equal("project-beta", metadata.Project); - Assert.Equal("project-beta", metadata.Tags["authority.project"]); - Assert.Equal("tag", metadata.Tags["custom"]); - } -} +using Microsoft.AspNetCore.Http; +using StellaOps.Authority.RateLimiting; +using Xunit; + +namespace StellaOps.Authority.Tests.RateLimiting; + +public class AuthorityRateLimiterMetadataAccessorTests +{ + [Fact] + public void SetClientId_UpdatesFeatureMetadata() + { + var context = new DefaultHttpContext(); + var feature = new AuthorityRateLimiterFeature(new AuthorityRateLimiterMetadata()); + context.Features.Set(feature); + + var accessor = new AuthorityRateLimiterMetadataAccessor(new HttpContextAccessor { HttpContext = context }); + + accessor.SetClientId("client-123"); + accessor.SetTag("custom", "tag"); + accessor.SetSubjectId("subject-1"); + accessor.SetTenant("Tenant-Alpha"); + accessor.SetProject("Project-Beta"); + + var metadata = accessor.GetMetadata(); + Assert.NotNull(metadata); + Assert.Equal("client-123", metadata!.ClientId); + Assert.Equal("subject-1", metadata.SubjectId); + Assert.Equal("client-123", metadata.Tags["authority.client_id"]); + Assert.Equal("subject-1", metadata.Tags["authority.subject_id"]); + Assert.Equal("tenant-alpha", metadata.Tenant); + Assert.Equal("tenant-alpha", metadata.Tags["authority.tenant"]); + Assert.Equal("project-beta", metadata.Project); + Assert.Equal("project-beta", metadata.Tags["authority.project"]); + Assert.Equal("tag", metadata.Tags["custom"]); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataMiddlewareTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataMiddlewareTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataMiddlewareTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterMetadataMiddlewareTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/RateLimiting/AuthorityRateLimiterTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/Signing/AuthoritySigningKeyManagerTests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Signing/AuthoritySigningKeyManagerTests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/Signing/AuthoritySigningKeyManagerTests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/Signing/AuthoritySigningKeyManagerTests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj similarity index 73% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj index bd00628f..a83d6b60 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj @@ -1,16 +1,17 @@ - - - net10.0 - enable - enable - false - - - - - - - - - - + + + + net10.0 + enable + enable + false + + + + + + + + + + \ No newline at end of file diff --git a/src/StellaOps.Authority/StellaOps.Authority.Tests/TestEnvironment.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/TestEnvironment.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority.Tests/TestEnvironment.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/TestEnvironment.cs index 69d4b0d1..d4119693 100644 --- a/src/StellaOps.Authority/StellaOps.Authority.Tests/TestEnvironment.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/TestEnvironment.cs @@ -1,13 +1,13 @@ -using System; -using System.Runtime.CompilerServices; - -internal static class TestEnvironment -{ - [ModuleInitializer] - public static void Initialize() - { - Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_ISSUER", "https://authority.test"); - Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_STORAGE__CONNECTIONSTRING", "mongodb://localhost/authority"); - Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_SIGNING__ENABLED", "false"); - } -} +using System; +using System.Runtime.CompilerServices; + +internal static class TestEnvironment +{ + [ModuleInitializer] + public static void Initialize() + { + Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_ISSUER", "https://authority.test"); + Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_STORAGE__CONNECTIONSTRING", "mongodb://localhost/authority"); + Environment.SetEnvironmentVariable("STELLAOPS_AUTHORITY_SIGNING__ENABLED", "false"); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority.sln b/src/Authority/StellaOps.Authority/StellaOps.Authority.sln similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority.sln rename to src/Authority/StellaOps.Authority/StellaOps.Authority.sln diff --git a/src/StellaOps.Authority/StellaOps.Authority/Audit/AuthorityAuditSink.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Audit/AuthorityAuditSink.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/Audit/AuthorityAuditSink.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Audit/AuthorityAuditSink.cs index 3b7451f9..18d3fc3c 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Audit/AuthorityAuditSink.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Audit/AuthorityAuditSink.cs @@ -1,237 +1,237 @@ -using System; -using System.Collections.Generic; -using System.Globalization; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Cryptography.Audit; - -namespace StellaOps.Authority.Audit; - -internal sealed class AuthorityAuditSink : IAuthEventSink -{ - private static readonly StringComparer OrdinalComparer = StringComparer.Ordinal; - - private readonly IAuthorityLoginAttemptStore loginAttemptStore; - private readonly ILogger logger; - - public AuthorityAuditSink( - IAuthorityLoginAttemptStore loginAttemptStore, - ILogger logger) - { - this.loginAttemptStore = loginAttemptStore ?? throw new ArgumentNullException(nameof(loginAttemptStore)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(record); - - var logState = BuildLogScope(record); - using (logger.BeginScope(logState)) - { - logger.LogInformation( - "Authority audit event {EventType} emitted with outcome {Outcome}.", - record.EventType, - NormalizeOutcome(record.Outcome)); - } - - var document = MapToDocument(record); - await loginAttemptStore.InsertAsync(document, cancellationToken).ConfigureAwait(false); - } - - private static AuthorityLoginAttemptDocument MapToDocument(AuthEventRecord record) - { - var document = new AuthorityLoginAttemptDocument - { - EventType = record.EventType, - Outcome = NormalizeOutcome(record.Outcome), - CorrelationId = Normalize(record.CorrelationId), - SubjectId = record.Subject?.SubjectId.Value, - Username = record.Subject?.Username.Value, - ClientId = record.Client?.ClientId.Value, - Plugin = record.Client?.Provider.Value, - Successful = record.Outcome == AuthEventOutcome.Success, - Reason = Normalize(record.Reason), - RemoteAddress = record.Network?.RemoteAddress.Value ?? record.Network?.ForwardedFor.Value, - OccurredAt = record.OccurredAt - }; - - if (record.Tenant.HasValue) - { - document.Tenant = record.Tenant.Value; - } - - if (record.Scopes is { Count: > 0 }) - { - document.Scopes = record.Scopes - .Where(static scope => !string.IsNullOrWhiteSpace(scope)) - .Select(static scope => scope.Trim()) - .Where(static scope => scope.Length > 0) - .Distinct(OrdinalComparer) - .OrderBy(static scope => scope, OrdinalComparer) - .ToList(); - } - - var properties = new List(); - - if (record.Subject is { } subject) - { - AddProperty(properties, "subject.display_name", subject.DisplayName); - AddProperty(properties, "subject.realm", subject.Realm); - - if (subject.Attributes is { Count: > 0 }) - { - foreach (var attribute in subject.Attributes) - { - AddProperty(properties, $"subject.attr.{attribute.Name}", attribute.Value); - } - } - } - - if (record.Client is { } client) - { - AddProperty(properties, "client.name", client.Name); - } - - if (record.Network is { } network) - { - AddProperty(properties, "network.remote", network.RemoteAddress); - AddProperty(properties, "network.forwarded_for", network.ForwardedFor); - AddProperty(properties, "network.user_agent", network.UserAgent); - } - - if (record.Properties is { Count: > 0 }) - { - foreach (var property in record.Properties) - { - AddProperty(properties, property.Name, property.Value); - } - } - - if (properties.Count > 0) - { - document.Properties = properties; - } - - return document; - } - - private static IReadOnlyCollection> BuildLogScope(AuthEventRecord record) - { - var entries = new List> - { - new("audit.event_type", record.EventType), - new("audit.outcome", NormalizeOutcome(record.Outcome)), - new("audit.timestamp", record.OccurredAt.ToString("O", CultureInfo.InvariantCulture)) - }; - - AddValue(entries, "audit.correlation_id", Normalize(record.CorrelationId)); - AddValue(entries, "audit.reason", Normalize(record.Reason)); - - if (record.Subject is { } subject) - { - AddClassified(entries, "audit.subject.id", subject.SubjectId); - AddClassified(entries, "audit.subject.username", subject.Username); - AddClassified(entries, "audit.subject.display_name", subject.DisplayName); - AddClassified(entries, "audit.subject.realm", subject.Realm); - } - - if (record.Client is { } client) - { - AddClassified(entries, "audit.client.id", client.ClientId); - AddClassified(entries, "audit.client.name", client.Name); - AddClassified(entries, "audit.client.provider", client.Provider); - } - - AddClassified(entries, "audit.tenant", record.Tenant); - - if (record.Network is { } network) - { - AddClassified(entries, "audit.network.remote", network.RemoteAddress); - AddClassified(entries, "audit.network.forwarded_for", network.ForwardedFor); - AddClassified(entries, "audit.network.user_agent", network.UserAgent); - } - - if (record.Scopes is { Count: > 0 }) - { - entries.Add(new KeyValuePair( - "audit.scopes", - record.Scopes.Where(static scope => !string.IsNullOrWhiteSpace(scope)).ToArray())); - } - - if (record.Properties is { Count: > 0 }) - { - foreach (var property in record.Properties) - { - AddClassified(entries, $"audit.property.{property.Name}", property.Value); - } - } - - return entries; - } - - private static void AddProperty(ICollection properties, string name, ClassifiedString value) - { - if (!value.HasValue || string.IsNullOrWhiteSpace(name)) - { - return; - } - - properties.Add(new AuthorityLoginAttemptPropertyDocument - { - Name = name, - Value = value.Value, - Classification = NormalizeClassification(value.Classification) - }); - } - - private static void AddValue(ICollection> entries, string key, string? value) - { - if (string.IsNullOrWhiteSpace(key) || string.IsNullOrWhiteSpace(value)) - { - return; - } - - entries.Add(new KeyValuePair(key, value)); - } - - private static void AddClassified(ICollection> entries, string key, ClassifiedString value) - { - if (!value.HasValue || string.IsNullOrWhiteSpace(key)) - { - return; - } - - entries.Add(new KeyValuePair(key, new - { - value.Value, - classification = NormalizeClassification(value.Classification) - })); - } - - private static string NormalizeOutcome(AuthEventOutcome outcome) - => outcome switch - { - AuthEventOutcome.Success => "success", - AuthEventOutcome.Failure => "failure", - AuthEventOutcome.LockedOut => "locked_out", - AuthEventOutcome.RateLimited => "rate_limited", - AuthEventOutcome.Error => "error", - _ => "unknown" - }; - - private static string NormalizeClassification(AuthEventDataClassification classification) - => classification switch - { - AuthEventDataClassification.Personal => "personal", - AuthEventDataClassification.Sensitive => "sensitive", - _ => "none" - }; - - private static string? Normalize(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); -} +using System; +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Cryptography.Audit; + +namespace StellaOps.Authority.Audit; + +internal sealed class AuthorityAuditSink : IAuthEventSink +{ + private static readonly StringComparer OrdinalComparer = StringComparer.Ordinal; + + private readonly IAuthorityLoginAttemptStore loginAttemptStore; + private readonly ILogger logger; + + public AuthorityAuditSink( + IAuthorityLoginAttemptStore loginAttemptStore, + ILogger logger) + { + this.loginAttemptStore = loginAttemptStore ?? throw new ArgumentNullException(nameof(loginAttemptStore)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(record); + + var logState = BuildLogScope(record); + using (logger.BeginScope(logState)) + { + logger.LogInformation( + "Authority audit event {EventType} emitted with outcome {Outcome}.", + record.EventType, + NormalizeOutcome(record.Outcome)); + } + + var document = MapToDocument(record); + await loginAttemptStore.InsertAsync(document, cancellationToken).ConfigureAwait(false); + } + + private static AuthorityLoginAttemptDocument MapToDocument(AuthEventRecord record) + { + var document = new AuthorityLoginAttemptDocument + { + EventType = record.EventType, + Outcome = NormalizeOutcome(record.Outcome), + CorrelationId = Normalize(record.CorrelationId), + SubjectId = record.Subject?.SubjectId.Value, + Username = record.Subject?.Username.Value, + ClientId = record.Client?.ClientId.Value, + Plugin = record.Client?.Provider.Value, + Successful = record.Outcome == AuthEventOutcome.Success, + Reason = Normalize(record.Reason), + RemoteAddress = record.Network?.RemoteAddress.Value ?? record.Network?.ForwardedFor.Value, + OccurredAt = record.OccurredAt + }; + + if (record.Tenant.HasValue) + { + document.Tenant = record.Tenant.Value; + } + + if (record.Scopes is { Count: > 0 }) + { + document.Scopes = record.Scopes + .Where(static scope => !string.IsNullOrWhiteSpace(scope)) + .Select(static scope => scope.Trim()) + .Where(static scope => scope.Length > 0) + .Distinct(OrdinalComparer) + .OrderBy(static scope => scope, OrdinalComparer) + .ToList(); + } + + var properties = new List(); + + if (record.Subject is { } subject) + { + AddProperty(properties, "subject.display_name", subject.DisplayName); + AddProperty(properties, "subject.realm", subject.Realm); + + if (subject.Attributes is { Count: > 0 }) + { + foreach (var attribute in subject.Attributes) + { + AddProperty(properties, $"subject.attr.{attribute.Name}", attribute.Value); + } + } + } + + if (record.Client is { } client) + { + AddProperty(properties, "client.name", client.Name); + } + + if (record.Network is { } network) + { + AddProperty(properties, "network.remote", network.RemoteAddress); + AddProperty(properties, "network.forwarded_for", network.ForwardedFor); + AddProperty(properties, "network.user_agent", network.UserAgent); + } + + if (record.Properties is { Count: > 0 }) + { + foreach (var property in record.Properties) + { + AddProperty(properties, property.Name, property.Value); + } + } + + if (properties.Count > 0) + { + document.Properties = properties; + } + + return document; + } + + private static IReadOnlyCollection> BuildLogScope(AuthEventRecord record) + { + var entries = new List> + { + new("audit.event_type", record.EventType), + new("audit.outcome", NormalizeOutcome(record.Outcome)), + new("audit.timestamp", record.OccurredAt.ToString("O", CultureInfo.InvariantCulture)) + }; + + AddValue(entries, "audit.correlation_id", Normalize(record.CorrelationId)); + AddValue(entries, "audit.reason", Normalize(record.Reason)); + + if (record.Subject is { } subject) + { + AddClassified(entries, "audit.subject.id", subject.SubjectId); + AddClassified(entries, "audit.subject.username", subject.Username); + AddClassified(entries, "audit.subject.display_name", subject.DisplayName); + AddClassified(entries, "audit.subject.realm", subject.Realm); + } + + if (record.Client is { } client) + { + AddClassified(entries, "audit.client.id", client.ClientId); + AddClassified(entries, "audit.client.name", client.Name); + AddClassified(entries, "audit.client.provider", client.Provider); + } + + AddClassified(entries, "audit.tenant", record.Tenant); + + if (record.Network is { } network) + { + AddClassified(entries, "audit.network.remote", network.RemoteAddress); + AddClassified(entries, "audit.network.forwarded_for", network.ForwardedFor); + AddClassified(entries, "audit.network.user_agent", network.UserAgent); + } + + if (record.Scopes is { Count: > 0 }) + { + entries.Add(new KeyValuePair( + "audit.scopes", + record.Scopes.Where(static scope => !string.IsNullOrWhiteSpace(scope)).ToArray())); + } + + if (record.Properties is { Count: > 0 }) + { + foreach (var property in record.Properties) + { + AddClassified(entries, $"audit.property.{property.Name}", property.Value); + } + } + + return entries; + } + + private static void AddProperty(ICollection properties, string name, ClassifiedString value) + { + if (!value.HasValue || string.IsNullOrWhiteSpace(name)) + { + return; + } + + properties.Add(new AuthorityLoginAttemptPropertyDocument + { + Name = name, + Value = value.Value, + Classification = NormalizeClassification(value.Classification) + }); + } + + private static void AddValue(ICollection> entries, string key, string? value) + { + if (string.IsNullOrWhiteSpace(key) || string.IsNullOrWhiteSpace(value)) + { + return; + } + + entries.Add(new KeyValuePair(key, value)); + } + + private static void AddClassified(ICollection> entries, string key, ClassifiedString value) + { + if (!value.HasValue || string.IsNullOrWhiteSpace(key)) + { + return; + } + + entries.Add(new KeyValuePair(key, new + { + value.Value, + classification = NormalizeClassification(value.Classification) + })); + } + + private static string NormalizeOutcome(AuthEventOutcome outcome) + => outcome switch + { + AuthEventOutcome.Success => "success", + AuthEventOutcome.Failure => "failure", + AuthEventOutcome.LockedOut => "locked_out", + AuthEventOutcome.RateLimited => "rate_limited", + AuthEventOutcome.Error => "error", + _ => "unknown" + }; + + private static string NormalizeClassification(AuthEventDataClassification classification) + => classification switch + { + AuthEventDataClassification.Personal => "personal", + AuthEventDataClassification.Sensitive => "sensitive", + _ => "none" + }; + + private static string? Normalize(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/AuthorityHttpHeaders.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityHttpHeaders.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Authority/AuthorityHttpHeaders.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityHttpHeaders.cs index bf39edad..8ba06946 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/AuthorityHttpHeaders.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityHttpHeaders.cs @@ -1,7 +1,7 @@ -namespace StellaOps.Authority; - -internal static class AuthorityHttpHeaders -{ - public const string Tenant = "X-StellaOps-Tenant"; - public const string Project = "X-StellaOps-Project"; -} +namespace StellaOps.Authority; + +internal static class AuthorityHttpHeaders +{ + public const string Tenant = "X-StellaOps-Tenant"; + public const string Project = "X-StellaOps-Project"; +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/AuthorityIdentityProviderRegistry.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityIdentityProviderRegistry.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/AuthorityIdentityProviderRegistry.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityIdentityProviderRegistry.cs index 2c9b4a06..5dca8615 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/AuthorityIdentityProviderRegistry.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityIdentityProviderRegistry.cs @@ -1,146 +1,146 @@ -using System.Collections.ObjectModel; -using System.Diagnostics.CodeAnalysis; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using StellaOps.Authority.Plugins.Abstractions; - -namespace StellaOps.Authority; - -internal sealed class AuthorityIdentityProviderRegistry : IAuthorityIdentityProviderRegistry -{ - private readonly IServiceProvider serviceProvider; - private readonly IReadOnlyDictionary providersByName; - private readonly ReadOnlyCollection providers; - private readonly ReadOnlyCollection passwordProviders; - private readonly ReadOnlyCollection mfaProviders; - private readonly ReadOnlyCollection clientProvisioningProviders; - - public AuthorityIdentityProviderRegistry( - IServiceProvider serviceProvider, - ILogger logger) - { - this.serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); - logger = logger ?? throw new ArgumentNullException(nameof(logger)); - - using var scope = serviceProvider.CreateScope(); - var providerInstances = scope.ServiceProvider.GetServices(); - - var orderedProviders = providerInstances? - .Where(static p => p is not null) - .OrderBy(static p => p.Name, StringComparer.OrdinalIgnoreCase) - .ToList() ?? new List(); - - var uniqueProviders = new List(orderedProviders.Count); - var password = new List(); - var mfa = new List(); - var clientProvisioning = new List(); - - var dictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); - - foreach (var provider in orderedProviders) - { - if (string.IsNullOrWhiteSpace(provider.Name)) - { - logger.LogWarning( - "Identity provider plugin of type '{PluginType}' was registered with an empty name and will be ignored.", - provider.Type); - continue; - } - - var metadata = new AuthorityIdentityProviderMetadata(provider.Name, provider.Type, provider.Capabilities); - - if (!dictionary.TryAdd(provider.Name, metadata)) - { - logger.LogWarning( - "Duplicate identity provider name '{PluginName}' detected; ignoring additional registration for type '{PluginType}'.", - provider.Name, - provider.Type); - continue; - } - - uniqueProviders.Add(metadata); - - if (metadata.Capabilities.SupportsPassword) - { - password.Add(metadata); - } - - if (metadata.Capabilities.SupportsMfa) - { - mfa.Add(metadata); - } - - if (metadata.Capabilities.SupportsClientProvisioning) - { - clientProvisioning.Add(metadata); - } - } - - providersByName = dictionary; - providers = new ReadOnlyCollection(uniqueProviders); - passwordProviders = new ReadOnlyCollection(password); - mfaProviders = new ReadOnlyCollection(mfa); - clientProvisioningProviders = new ReadOnlyCollection(clientProvisioning); - - AggregateCapabilities = new AuthorityIdentityProviderCapabilities( - SupportsPassword: passwordProviders.Count > 0, - SupportsMfa: mfaProviders.Count > 0, - SupportsClientProvisioning: clientProvisioningProviders.Count > 0); - } - - public IReadOnlyCollection Providers => providers; - - public IReadOnlyCollection PasswordProviders => passwordProviders; - - public IReadOnlyCollection MfaProviders => mfaProviders; - - public IReadOnlyCollection ClientProvisioningProviders => clientProvisioningProviders; - - public AuthorityIdentityProviderCapabilities AggregateCapabilities { get; } - - public bool TryGet(string name, [NotNullWhen(true)] out AuthorityIdentityProviderMetadata? metadata) - { - if (string.IsNullOrWhiteSpace(name)) - { - metadata = null; - return false; - } - - return providersByName.TryGetValue(name, out metadata); - } - - public async ValueTask AcquireAsync(string name, CancellationToken cancellationToken) - { - if (!providersByName.TryGetValue(name, out var metadata)) - { - throw new KeyNotFoundException($"Identity provider plugin '{name}' is not registered."); - } - - cancellationToken.ThrowIfCancellationRequested(); - - var scope = serviceProvider.CreateAsyncScope(); - try - { - var provider = scope.ServiceProvider - .GetServices() - .FirstOrDefault(p => string.Equals(p.Name, metadata.Name, StringComparison.OrdinalIgnoreCase)); - - if (provider is null) - { - await scope.DisposeAsync().ConfigureAwait(false); - throw new InvalidOperationException($"Identity provider plugin '{metadata.Name}' could not be resolved."); - } - - cancellationToken.ThrowIfCancellationRequested(); - return new AuthorityIdentityProviderHandle(scope, metadata, provider); - } - catch - { - await scope.DisposeAsync().ConfigureAwait(false); - throw; - } - } -} +using System.Collections.ObjectModel; +using System.Diagnostics.CodeAnalysis; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Authority.Plugins.Abstractions; + +namespace StellaOps.Authority; + +internal sealed class AuthorityIdentityProviderRegistry : IAuthorityIdentityProviderRegistry +{ + private readonly IServiceProvider serviceProvider; + private readonly IReadOnlyDictionary providersByName; + private readonly ReadOnlyCollection providers; + private readonly ReadOnlyCollection passwordProviders; + private readonly ReadOnlyCollection mfaProviders; + private readonly ReadOnlyCollection clientProvisioningProviders; + + public AuthorityIdentityProviderRegistry( + IServiceProvider serviceProvider, + ILogger logger) + { + this.serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); + logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + using var scope = serviceProvider.CreateScope(); + var providerInstances = scope.ServiceProvider.GetServices(); + + var orderedProviders = providerInstances? + .Where(static p => p is not null) + .OrderBy(static p => p.Name, StringComparer.OrdinalIgnoreCase) + .ToList() ?? new List(); + + var uniqueProviders = new List(orderedProviders.Count); + var password = new List(); + var mfa = new List(); + var clientProvisioning = new List(); + + var dictionary = new Dictionary(StringComparer.OrdinalIgnoreCase); + + foreach (var provider in orderedProviders) + { + if (string.IsNullOrWhiteSpace(provider.Name)) + { + logger.LogWarning( + "Identity provider plugin of type '{PluginType}' was registered with an empty name and will be ignored.", + provider.Type); + continue; + } + + var metadata = new AuthorityIdentityProviderMetadata(provider.Name, provider.Type, provider.Capabilities); + + if (!dictionary.TryAdd(provider.Name, metadata)) + { + logger.LogWarning( + "Duplicate identity provider name '{PluginName}' detected; ignoring additional registration for type '{PluginType}'.", + provider.Name, + provider.Type); + continue; + } + + uniqueProviders.Add(metadata); + + if (metadata.Capabilities.SupportsPassword) + { + password.Add(metadata); + } + + if (metadata.Capabilities.SupportsMfa) + { + mfa.Add(metadata); + } + + if (metadata.Capabilities.SupportsClientProvisioning) + { + clientProvisioning.Add(metadata); + } + } + + providersByName = dictionary; + providers = new ReadOnlyCollection(uniqueProviders); + passwordProviders = new ReadOnlyCollection(password); + mfaProviders = new ReadOnlyCollection(mfa); + clientProvisioningProviders = new ReadOnlyCollection(clientProvisioning); + + AggregateCapabilities = new AuthorityIdentityProviderCapabilities( + SupportsPassword: passwordProviders.Count > 0, + SupportsMfa: mfaProviders.Count > 0, + SupportsClientProvisioning: clientProvisioningProviders.Count > 0); + } + + public IReadOnlyCollection Providers => providers; + + public IReadOnlyCollection PasswordProviders => passwordProviders; + + public IReadOnlyCollection MfaProviders => mfaProviders; + + public IReadOnlyCollection ClientProvisioningProviders => clientProvisioningProviders; + + public AuthorityIdentityProviderCapabilities AggregateCapabilities { get; } + + public bool TryGet(string name, [NotNullWhen(true)] out AuthorityIdentityProviderMetadata? metadata) + { + if (string.IsNullOrWhiteSpace(name)) + { + metadata = null; + return false; + } + + return providersByName.TryGetValue(name, out metadata); + } + + public async ValueTask AcquireAsync(string name, CancellationToken cancellationToken) + { + if (!providersByName.TryGetValue(name, out var metadata)) + { + throw new KeyNotFoundException($"Identity provider plugin '{name}' is not registered."); + } + + cancellationToken.ThrowIfCancellationRequested(); + + var scope = serviceProvider.CreateAsyncScope(); + try + { + var provider = scope.ServiceProvider + .GetServices() + .FirstOrDefault(p => string.Equals(p.Name, metadata.Name, StringComparison.OrdinalIgnoreCase)); + + if (provider is null) + { + await scope.DisposeAsync().ConfigureAwait(false); + throw new InvalidOperationException($"Identity provider plugin '{metadata.Name}' could not be resolved."); + } + + cancellationToken.ThrowIfCancellationRequested(); + return new AuthorityIdentityProviderHandle(scope, metadata, provider); + } + catch + { + await scope.DisposeAsync().ConfigureAwait(false); + throw; + } + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/AuthorityPluginRegistry.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityPluginRegistry.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/AuthorityPluginRegistry.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityPluginRegistry.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/AuthorityRateLimiter.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityRateLimiter.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/AuthorityRateLimiter.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityRateLimiter.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/AuthorityTelemetryConfiguration.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityTelemetryConfiguration.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/AuthorityTelemetryConfiguration.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/AuthorityTelemetryConfiguration.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapApiKeyFilter.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapApiKeyFilter.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapApiKeyFilter.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapApiKeyFilter.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapInviteCleanupService.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapInviteCleanupService.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapInviteCleanupService.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapInviteCleanupService.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapRequests.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapRequests.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapRequests.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Bootstrap/BootstrapRequests.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs index 4ead891c..dab43f90 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/ConsoleEndpointExtensions.cs @@ -1,547 +1,547 @@ -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Net; -using System.Security.Claims; -using System.Linq; -using Microsoft.AspNetCore.Builder; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Primitives; -using OpenIddict.Abstractions; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.ServerIntegration; -using StellaOps.Cryptography.Audit; -using StellaOps.Authority.Tenants; - -namespace StellaOps.Authority.Console; - -internal static class ConsoleEndpointExtensions -{ - public static void MapConsoleEndpoints(this WebApplication app) - { - ArgumentNullException.ThrowIfNull(app); - - var group = app.MapGroup("/console") - .RequireAuthorization() - .WithTags("Console"); - - group.AddEndpointFilter(new TenantHeaderFilter()); - - group.MapGet("/tenants", GetTenants) - .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.AuthorityTenantsRead)) - .WithName("ConsoleTenants") - .WithSummary("List the tenant metadata for the authenticated principal."); - - group.MapGet("/profile", GetProfile) - .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.UiRead)) - .WithName("ConsoleProfile") - .WithSummary("Return the authenticated principal profile metadata."); - - group.MapPost("/token/introspect", IntrospectToken) - .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.UiRead)) - .WithName("ConsoleTokenIntrospect") - .WithSummary("Introspect the current access token and return expiry, scope, and tenant metadata."); - } - - private static async Task GetTenants( - HttpContext httpContext, - IAuthorityTenantCatalog tenantCatalog, - IAuthEventSink auditSink, - TimeProvider timeProvider, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(httpContext); - ArgumentNullException.ThrowIfNull(tenantCatalog); - ArgumentNullException.ThrowIfNull(auditSink); - ArgumentNullException.ThrowIfNull(timeProvider); - - var normalizedTenant = TenantHeaderFilter.GetTenant(httpContext); - if (string.IsNullOrWhiteSpace(normalizedTenant)) - { - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.tenants.read", - AuthEventOutcome.Failure, - "tenant_header_missing", - BuildProperties(("tenant.header", null)), - cancellationToken).ConfigureAwait(false); - - return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); - } - - var tenants = tenantCatalog.GetTenants(); - var selected = tenants.FirstOrDefault(tenant => - string.Equals(tenant.Id, normalizedTenant, StringComparison.Ordinal)); - - if (selected is null) - { - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.tenants.read", - AuthEventOutcome.Failure, - "tenant_not_configured", - BuildProperties(("tenant.requested", normalizedTenant)), - cancellationToken).ConfigureAwait(false); - - return Results.NotFound(new { error = "tenant_not_configured", message = $"Tenant '{normalizedTenant}' is not configured." }); - } - - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.tenants.read", - AuthEventOutcome.Success, - null, - BuildProperties(("tenant.resolved", selected.Id)), - cancellationToken).ConfigureAwait(false); - - var response = new TenantCatalogResponse(new[] { selected }); - return Results.Ok(response); - } - - private static async Task GetProfile( - HttpContext httpContext, - TimeProvider timeProvider, - IAuthEventSink auditSink, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(httpContext); - ArgumentNullException.ThrowIfNull(timeProvider); - ArgumentNullException.ThrowIfNull(auditSink); - - var principal = httpContext.User; - if (principal?.Identity?.IsAuthenticated is not true) - { - return Results.Unauthorized(); - } - - var profile = BuildProfile(principal, timeProvider); - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.profile.read", - AuthEventOutcome.Success, - null, - BuildProperties(("tenant.resolved", profile.Tenant)), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(profile); - } - - private static async Task IntrospectToken( - HttpContext httpContext, - TimeProvider timeProvider, - IAuthEventSink auditSink, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(httpContext); - ArgumentNullException.ThrowIfNull(timeProvider); - ArgumentNullException.ThrowIfNull(auditSink); - - var principal = httpContext.User; - if (principal?.Identity?.IsAuthenticated is not true) - { - return Results.Unauthorized(); - } - - var introspection = BuildTokenIntrospection(principal, timeProvider); - - await WriteAuditAsync( - httpContext, - auditSink, - timeProvider, - "authority.console.token.introspect", - AuthEventOutcome.Success, - null, - BuildProperties( - ("token.active", introspection.Active ? "true" : "false"), - ("token.expires_at", FormatInstant(introspection.ExpiresAt)), - ("tenant.resolved", introspection.Tenant)), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(introspection); - } - - private static ConsoleProfileResponse BuildProfile(ClaimsPrincipal principal, TimeProvider timeProvider) - { - var tenant = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.Tenant)) ?? string.Empty; - var subject = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.Subject)); - var username = Normalize(principal.FindFirstValue(OpenIddictConstants.Claims.PreferredUsername)); - var displayName = Normalize(principal.FindFirstValue(OpenIddictConstants.Claims.Name)); - var sessionId = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.SessionId)); - var audiences = ExtractAudiences(principal); - var authenticationMethods = ExtractAuthenticationMethods(principal); - var roles = ExtractRoles(principal); - var scopes = ExtractScopes(principal); - - var issuedAt = ExtractInstant(principal, OpenIddictConstants.Claims.IssuedAt, "iat"); - var authTime = ExtractInstant(principal, OpenIddictConstants.Claims.AuthenticationTime, "auth_time"); - var expiresAt = ExtractInstant(principal, OpenIddictConstants.Claims.ExpiresAt, "exp"); - var now = timeProvider.GetUtcNow(); - var freshAuth = DetermineFreshAuth(principal, now); - - return new ConsoleProfileResponse( - SubjectId: subject, - Username: username, - DisplayName: displayName, - Tenant: tenant, - SessionId: sessionId, - Roles: roles, - Scopes: scopes, - Audiences: audiences, - AuthenticationMethods: authenticationMethods, - IssuedAt: issuedAt, - AuthenticationTime: authTime, - ExpiresAt: expiresAt, - FreshAuth: freshAuth); - } - - private static ConsoleTokenIntrospectionResponse BuildTokenIntrospection(ClaimsPrincipal principal, TimeProvider timeProvider) - { - var now = timeProvider.GetUtcNow(); - var expiresAt = ExtractInstant(principal, OpenIddictConstants.Claims.ExpiresAt, "exp"); - var issuedAt = ExtractInstant(principal, OpenIddictConstants.Claims.IssuedAt, "iat"); - var authTime = ExtractInstant(principal, OpenIddictConstants.Claims.AuthenticationTime, "auth_time"); - var scopes = ExtractScopes(principal); - var audiences = ExtractAudiences(principal); - var tenant = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.Tenant)) ?? string.Empty; - var subject = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.Subject)); - var tokenId = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.TokenId)); - var clientId = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.ClientId)); - var active = expiresAt is null || expiresAt > now; - var freshAuth = DetermineFreshAuth(principal, now); - - return new ConsoleTokenIntrospectionResponse( - Active: active, - Tenant: tenant, - Subject: subject, - ClientId: clientId, - TokenId: tokenId, - Scopes: scopes, - Audiences: audiences, - IssuedAt: issuedAt, - AuthenticationTime: authTime, - ExpiresAt: expiresAt, - FreshAuth: freshAuth); - } - - private static bool DetermineFreshAuth(ClaimsPrincipal principal, DateTimeOffset now) - { - var flag = principal.FindFirst("stellaops:fresh_auth") ?? principal.FindFirst("fresh_auth"); - if (flag is not null && bool.TryParse(flag.Value, out var freshFlag)) - { - if (freshFlag) - { - return true; - } - } - - var authTime = ExtractInstant(principal, OpenIddictConstants.Claims.AuthenticationTime, "auth_time"); - if (authTime is null) - { - return false; - } - - var ttlClaim = principal.FindFirst("stellaops:fresh_auth_ttl"); - if (ttlClaim is not null && TimeSpan.TryParse(ttlClaim.Value, CultureInfo.InvariantCulture, out var ttl)) - { - return authTime.Value.Add(ttl) > now; - } - - const int defaultFreshAuthWindowSeconds = 300; - return authTime.Value.AddSeconds(defaultFreshAuthWindowSeconds) > now; - } - - private static IReadOnlyList ExtractRoles(ClaimsPrincipal principal) - { - var roles = principal.FindAll(OpenIddictConstants.Claims.Role) - .Select(static claim => Normalize(claim.Value)) - .Where(static value => value is not null) - .Select(static value => value!) - .Distinct(StringComparer.Ordinal) - .OrderBy(static value => value, StringComparer.Ordinal) - .ToArray(); - - return roles.Length == 0 ? Array.Empty() : roles; - } - - private static IReadOnlyList ExtractScopes(ClaimsPrincipal principal) - { - var set = new HashSet(StringComparer.Ordinal); - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) - { - var normalized = Normalize(claim.Value); - if (normalized is not null) - { - set.Add(normalized); - } - } - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - foreach (var part in parts) - { - var normalized = StellaOpsScopes.Normalize(part); - if (normalized is not null) - { - set.Add(normalized); - } - } - } - - if (set.Count == 0) - { - return Array.Empty(); - } - - return set.OrderBy(static value => value, StringComparer.Ordinal).ToArray(); - } - - private static IReadOnlyList ExtractAudiences(ClaimsPrincipal principal) - { - var audiences = new HashSet(StringComparer.Ordinal); - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Audience)) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - foreach (var part in parts) - { - audiences.Add(part); - } - } - - if (audiences.Count == 0) - { - return Array.Empty(); - } - - return audiences.OrderBy(static value => value, StringComparer.Ordinal).ToArray(); - } - - private static IReadOnlyList ExtractAuthenticationMethods(ClaimsPrincipal principal) - { - var methods = principal.FindAll(StellaOpsClaimTypes.AuthenticationMethod) - .Select(static claim => Normalize(claim.Value)) - .Where(static value => value is not null) - .Select(static value => value!) - .Distinct(StringComparer.Ordinal) - .OrderBy(static value => value, StringComparer.Ordinal) - .ToArray(); - - return methods.Length == 0 ? Array.Empty() : methods; - } - - private static DateTimeOffset? ExtractInstant(ClaimsPrincipal principal, string primaryClaim, string fallbackClaim) - { - var claim = principal.FindFirst(primaryClaim) ?? principal.FindFirst(fallbackClaim); - if (claim is null || string.IsNullOrWhiteSpace(claim.Value)) - { - return null; - } - - if (long.TryParse(claim.Value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var epoch)) - { - return DateTimeOffset.FromUnixTimeSeconds(epoch); - } - - if (DateTimeOffset.TryParse(claim.Value, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal, out var parsed)) - { - return parsed; - } - - return null; - } - - private static async Task WriteAuditAsync( - HttpContext httpContext, - IAuthEventSink auditSink, - TimeProvider timeProvider, - string eventType, - AuthEventOutcome outcome, - string? reason, - IReadOnlyList properties, - CancellationToken cancellationToken) - { - var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier; - - var tenant = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.Tenant)); - var subjectId = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.Subject)); - var username = Normalize(httpContext.User.FindFirstValue(OpenIddictConstants.Claims.PreferredUsername)); - var displayName = Normalize(httpContext.User.FindFirstValue(OpenIddictConstants.Claims.Name)); - var identityProvider = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.IdentityProvider)); - var email = Normalize(httpContext.User.FindFirstValue(OpenIddictConstants.Claims.Email)); - - var subjectProperties = new List(); - if (!string.IsNullOrWhiteSpace(email)) - { - subjectProperties.Add(new AuthEventProperty - { - Name = "subject.email", - Value = ClassifiedString.Personal(email) - }); - } - - var subject = subjectId is null && username is null && displayName is null && identityProvider is null && subjectProperties.Count == 0 - ? null - : new AuthEventSubject - { - SubjectId = ClassifiedString.Personal(subjectId), - Username = ClassifiedString.Personal(username), - DisplayName = ClassifiedString.Personal(displayName), - Realm = ClassifiedString.Public(identityProvider), - Attributes = subjectProperties - }; - - var clientId = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.ClientId)); - var client = string.IsNullOrWhiteSpace(clientId) - ? null - : new AuthEventClient - { - ClientId = ClassifiedString.Personal(clientId), - Name = ClassifiedString.Empty, - Provider = ClassifiedString.Empty - }; - - var network = BuildNetwork(httpContext); - var scopes = ExtractScopes(httpContext.User); - - var record = new AuthEventRecord - { - EventType = eventType, - OccurredAt = timeProvider.GetUtcNow(), - CorrelationId = correlationId, - Outcome = outcome, - Reason = reason, - Subject = subject, - Client = client, - Tenant = ClassifiedString.Public(tenant), - Scopes = scopes, - Network = network, - Properties = properties - }; - - await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); - } - - private static AuthEventNetwork? BuildNetwork(HttpContext httpContext) - { - var remote = httpContext.Connection.RemoteIpAddress; - var remoteAddress = remote is null || Equals(remote, IPAddress.IPv6None) || Equals(remote, IPAddress.None) - ? null - : remote.ToString(); - - var forwarded = Normalize(httpContext.Request.Headers[XForwardedForHeader]); - var userAgent = Normalize(httpContext.Request.Headers.UserAgent.ToString()); - - if (string.IsNullOrWhiteSpace(remoteAddress) && - string.IsNullOrWhiteSpace(forwarded) && - string.IsNullOrWhiteSpace(userAgent)) - { - return null; - } - - return new AuthEventNetwork - { - RemoteAddress = ClassifiedString.Personal(remoteAddress), - ForwardedFor = ClassifiedString.Personal(forwarded), - UserAgent = ClassifiedString.Personal(userAgent) - }; - } - - private static IReadOnlyList BuildProperties(params (string Name, string? Value)[] entries) - { - if (entries.Length == 0) - { - return Array.Empty(); - } - - var list = new List(entries.Length); - foreach (var (name, value) in entries) - { - if (string.IsNullOrWhiteSpace(name)) - { - continue; - } - - list.Add(new AuthEventProperty - { - Name = name, - Value = string.IsNullOrWhiteSpace(value) - ? ClassifiedString.Empty - : ClassifiedString.Public(value) - }); - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static string? Normalize(StringValues values) - { - var value = values.ToString(); - return Normalize(value); - } - - private static string? Normalize(string? input) - { - if (string.IsNullOrWhiteSpace(input)) - { - return null; - } - - return input.Trim(); - } - - private static string? FormatInstant(DateTimeOffset? instant) - { - return instant?.ToString("O", CultureInfo.InvariantCulture); - } - - private const string XForwardedForHeader = "X-Forwarded-For"; -} - -internal sealed record TenantCatalogResponse(IReadOnlyList Tenants); - -internal sealed record ConsoleProfileResponse( - string? SubjectId, - string? Username, - string? DisplayName, - string Tenant, - string? SessionId, - IReadOnlyList Roles, - IReadOnlyList Scopes, - IReadOnlyList Audiences, - IReadOnlyList AuthenticationMethods, - DateTimeOffset? IssuedAt, - DateTimeOffset? AuthenticationTime, - DateTimeOffset? ExpiresAt, - bool FreshAuth); - -internal sealed record ConsoleTokenIntrospectionResponse( - bool Active, - string Tenant, - string? Subject, - string? ClientId, - string? TokenId, - IReadOnlyList Scopes, - IReadOnlyList Audiences, - DateTimeOffset? IssuedAt, - DateTimeOffset? AuthenticationTime, - DateTimeOffset? ExpiresAt, - bool FreshAuth); +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Net; +using System.Security.Claims; +using System.Linq; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Primitives; +using OpenIddict.Abstractions; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Cryptography.Audit; +using StellaOps.Authority.Tenants; + +namespace StellaOps.Authority.Console; + +internal static class ConsoleEndpointExtensions +{ + public static void MapConsoleEndpoints(this WebApplication app) + { + ArgumentNullException.ThrowIfNull(app); + + var group = app.MapGroup("/console") + .RequireAuthorization() + .WithTags("Console"); + + group.AddEndpointFilter(new TenantHeaderFilter()); + + group.MapGet("/tenants", GetTenants) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.AuthorityTenantsRead)) + .WithName("ConsoleTenants") + .WithSummary("List the tenant metadata for the authenticated principal."); + + group.MapGet("/profile", GetProfile) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.UiRead)) + .WithName("ConsoleProfile") + .WithSummary("Return the authenticated principal profile metadata."); + + group.MapPost("/token/introspect", IntrospectToken) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.UiRead)) + .WithName("ConsoleTokenIntrospect") + .WithSummary("Introspect the current access token and return expiry, scope, and tenant metadata."); + } + + private static async Task GetTenants( + HttpContext httpContext, + IAuthorityTenantCatalog tenantCatalog, + IAuthEventSink auditSink, + TimeProvider timeProvider, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(tenantCatalog); + ArgumentNullException.ThrowIfNull(auditSink); + ArgumentNullException.ThrowIfNull(timeProvider); + + var normalizedTenant = TenantHeaderFilter.GetTenant(httpContext); + if (string.IsNullOrWhiteSpace(normalizedTenant)) + { + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.tenants.read", + AuthEventOutcome.Failure, + "tenant_header_missing", + BuildProperties(("tenant.header", null)), + cancellationToken).ConfigureAwait(false); + + return Results.BadRequest(new { error = "tenant_header_missing", message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." }); + } + + var tenants = tenantCatalog.GetTenants(); + var selected = tenants.FirstOrDefault(tenant => + string.Equals(tenant.Id, normalizedTenant, StringComparison.Ordinal)); + + if (selected is null) + { + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.tenants.read", + AuthEventOutcome.Failure, + "tenant_not_configured", + BuildProperties(("tenant.requested", normalizedTenant)), + cancellationToken).ConfigureAwait(false); + + return Results.NotFound(new { error = "tenant_not_configured", message = $"Tenant '{normalizedTenant}' is not configured." }); + } + + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.tenants.read", + AuthEventOutcome.Success, + null, + BuildProperties(("tenant.resolved", selected.Id)), + cancellationToken).ConfigureAwait(false); + + var response = new TenantCatalogResponse(new[] { selected }); + return Results.Ok(response); + } + + private static async Task GetProfile( + HttpContext httpContext, + TimeProvider timeProvider, + IAuthEventSink auditSink, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(timeProvider); + ArgumentNullException.ThrowIfNull(auditSink); + + var principal = httpContext.User; + if (principal?.Identity?.IsAuthenticated is not true) + { + return Results.Unauthorized(); + } + + var profile = BuildProfile(principal, timeProvider); + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.profile.read", + AuthEventOutcome.Success, + null, + BuildProperties(("tenant.resolved", profile.Tenant)), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(profile); + } + + private static async Task IntrospectToken( + HttpContext httpContext, + TimeProvider timeProvider, + IAuthEventSink auditSink, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(httpContext); + ArgumentNullException.ThrowIfNull(timeProvider); + ArgumentNullException.ThrowIfNull(auditSink); + + var principal = httpContext.User; + if (principal?.Identity?.IsAuthenticated is not true) + { + return Results.Unauthorized(); + } + + var introspection = BuildTokenIntrospection(principal, timeProvider); + + await WriteAuditAsync( + httpContext, + auditSink, + timeProvider, + "authority.console.token.introspect", + AuthEventOutcome.Success, + null, + BuildProperties( + ("token.active", introspection.Active ? "true" : "false"), + ("token.expires_at", FormatInstant(introspection.ExpiresAt)), + ("tenant.resolved", introspection.Tenant)), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(introspection); + } + + private static ConsoleProfileResponse BuildProfile(ClaimsPrincipal principal, TimeProvider timeProvider) + { + var tenant = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.Tenant)) ?? string.Empty; + var subject = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.Subject)); + var username = Normalize(principal.FindFirstValue(OpenIddictConstants.Claims.PreferredUsername)); + var displayName = Normalize(principal.FindFirstValue(OpenIddictConstants.Claims.Name)); + var sessionId = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.SessionId)); + var audiences = ExtractAudiences(principal); + var authenticationMethods = ExtractAuthenticationMethods(principal); + var roles = ExtractRoles(principal); + var scopes = ExtractScopes(principal); + + var issuedAt = ExtractInstant(principal, OpenIddictConstants.Claims.IssuedAt, "iat"); + var authTime = ExtractInstant(principal, OpenIddictConstants.Claims.AuthenticationTime, "auth_time"); + var expiresAt = ExtractInstant(principal, OpenIddictConstants.Claims.ExpiresAt, "exp"); + var now = timeProvider.GetUtcNow(); + var freshAuth = DetermineFreshAuth(principal, now); + + return new ConsoleProfileResponse( + SubjectId: subject, + Username: username, + DisplayName: displayName, + Tenant: tenant, + SessionId: sessionId, + Roles: roles, + Scopes: scopes, + Audiences: audiences, + AuthenticationMethods: authenticationMethods, + IssuedAt: issuedAt, + AuthenticationTime: authTime, + ExpiresAt: expiresAt, + FreshAuth: freshAuth); + } + + private static ConsoleTokenIntrospectionResponse BuildTokenIntrospection(ClaimsPrincipal principal, TimeProvider timeProvider) + { + var now = timeProvider.GetUtcNow(); + var expiresAt = ExtractInstant(principal, OpenIddictConstants.Claims.ExpiresAt, "exp"); + var issuedAt = ExtractInstant(principal, OpenIddictConstants.Claims.IssuedAt, "iat"); + var authTime = ExtractInstant(principal, OpenIddictConstants.Claims.AuthenticationTime, "auth_time"); + var scopes = ExtractScopes(principal); + var audiences = ExtractAudiences(principal); + var tenant = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.Tenant)) ?? string.Empty; + var subject = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.Subject)); + var tokenId = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.TokenId)); + var clientId = Normalize(principal.FindFirstValue(StellaOpsClaimTypes.ClientId)); + var active = expiresAt is null || expiresAt > now; + var freshAuth = DetermineFreshAuth(principal, now); + + return new ConsoleTokenIntrospectionResponse( + Active: active, + Tenant: tenant, + Subject: subject, + ClientId: clientId, + TokenId: tokenId, + Scopes: scopes, + Audiences: audiences, + IssuedAt: issuedAt, + AuthenticationTime: authTime, + ExpiresAt: expiresAt, + FreshAuth: freshAuth); + } + + private static bool DetermineFreshAuth(ClaimsPrincipal principal, DateTimeOffset now) + { + var flag = principal.FindFirst("stellaops:fresh_auth") ?? principal.FindFirst("fresh_auth"); + if (flag is not null && bool.TryParse(flag.Value, out var freshFlag)) + { + if (freshFlag) + { + return true; + } + } + + var authTime = ExtractInstant(principal, OpenIddictConstants.Claims.AuthenticationTime, "auth_time"); + if (authTime is null) + { + return false; + } + + var ttlClaim = principal.FindFirst("stellaops:fresh_auth_ttl"); + if (ttlClaim is not null && TimeSpan.TryParse(ttlClaim.Value, CultureInfo.InvariantCulture, out var ttl)) + { + return authTime.Value.Add(ttl) > now; + } + + const int defaultFreshAuthWindowSeconds = 300; + return authTime.Value.AddSeconds(defaultFreshAuthWindowSeconds) > now; + } + + private static IReadOnlyList ExtractRoles(ClaimsPrincipal principal) + { + var roles = principal.FindAll(OpenIddictConstants.Claims.Role) + .Select(static claim => Normalize(claim.Value)) + .Where(static value => value is not null) + .Select(static value => value!) + .Distinct(StringComparer.Ordinal) + .OrderBy(static value => value, StringComparer.Ordinal) + .ToArray(); + + return roles.Length == 0 ? Array.Empty() : roles; + } + + private static IReadOnlyList ExtractScopes(ClaimsPrincipal principal) + { + var set = new HashSet(StringComparer.Ordinal); + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + var normalized = Normalize(claim.Value); + if (normalized is not null) + { + set.Add(normalized); + } + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + foreach (var part in parts) + { + var normalized = StellaOpsScopes.Normalize(part); + if (normalized is not null) + { + set.Add(normalized); + } + } + } + + if (set.Count == 0) + { + return Array.Empty(); + } + + return set.OrderBy(static value => value, StringComparer.Ordinal).ToArray(); + } + + private static IReadOnlyList ExtractAudiences(ClaimsPrincipal principal) + { + var audiences = new HashSet(StringComparer.Ordinal); + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Audience)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + foreach (var part in parts) + { + audiences.Add(part); + } + } + + if (audiences.Count == 0) + { + return Array.Empty(); + } + + return audiences.OrderBy(static value => value, StringComparer.Ordinal).ToArray(); + } + + private static IReadOnlyList ExtractAuthenticationMethods(ClaimsPrincipal principal) + { + var methods = principal.FindAll(StellaOpsClaimTypes.AuthenticationMethod) + .Select(static claim => Normalize(claim.Value)) + .Where(static value => value is not null) + .Select(static value => value!) + .Distinct(StringComparer.Ordinal) + .OrderBy(static value => value, StringComparer.Ordinal) + .ToArray(); + + return methods.Length == 0 ? Array.Empty() : methods; + } + + private static DateTimeOffset? ExtractInstant(ClaimsPrincipal principal, string primaryClaim, string fallbackClaim) + { + var claim = principal.FindFirst(primaryClaim) ?? principal.FindFirst(fallbackClaim); + if (claim is null || string.IsNullOrWhiteSpace(claim.Value)) + { + return null; + } + + if (long.TryParse(claim.Value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var epoch)) + { + return DateTimeOffset.FromUnixTimeSeconds(epoch); + } + + if (DateTimeOffset.TryParse(claim.Value, CultureInfo.InvariantCulture, DateTimeStyles.AdjustToUniversal, out var parsed)) + { + return parsed; + } + + return null; + } + + private static async Task WriteAuditAsync( + HttpContext httpContext, + IAuthEventSink auditSink, + TimeProvider timeProvider, + string eventType, + AuthEventOutcome outcome, + string? reason, + IReadOnlyList properties, + CancellationToken cancellationToken) + { + var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier; + + var tenant = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.Tenant)); + var subjectId = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.Subject)); + var username = Normalize(httpContext.User.FindFirstValue(OpenIddictConstants.Claims.PreferredUsername)); + var displayName = Normalize(httpContext.User.FindFirstValue(OpenIddictConstants.Claims.Name)); + var identityProvider = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.IdentityProvider)); + var email = Normalize(httpContext.User.FindFirstValue(OpenIddictConstants.Claims.Email)); + + var subjectProperties = new List(); + if (!string.IsNullOrWhiteSpace(email)) + { + subjectProperties.Add(new AuthEventProperty + { + Name = "subject.email", + Value = ClassifiedString.Personal(email) + }); + } + + var subject = subjectId is null && username is null && displayName is null && identityProvider is null && subjectProperties.Count == 0 + ? null + : new AuthEventSubject + { + SubjectId = ClassifiedString.Personal(subjectId), + Username = ClassifiedString.Personal(username), + DisplayName = ClassifiedString.Personal(displayName), + Realm = ClassifiedString.Public(identityProvider), + Attributes = subjectProperties + }; + + var clientId = Normalize(httpContext.User.FindFirstValue(StellaOpsClaimTypes.ClientId)); + var client = string.IsNullOrWhiteSpace(clientId) + ? null + : new AuthEventClient + { + ClientId = ClassifiedString.Personal(clientId), + Name = ClassifiedString.Empty, + Provider = ClassifiedString.Empty + }; + + var network = BuildNetwork(httpContext); + var scopes = ExtractScopes(httpContext.User); + + var record = new AuthEventRecord + { + EventType = eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = correlationId, + Outcome = outcome, + Reason = reason, + Subject = subject, + Client = client, + Tenant = ClassifiedString.Public(tenant), + Scopes = scopes, + Network = network, + Properties = properties + }; + + await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); + } + + private static AuthEventNetwork? BuildNetwork(HttpContext httpContext) + { + var remote = httpContext.Connection.RemoteIpAddress; + var remoteAddress = remote is null || Equals(remote, IPAddress.IPv6None) || Equals(remote, IPAddress.None) + ? null + : remote.ToString(); + + var forwarded = Normalize(httpContext.Request.Headers[XForwardedForHeader]); + var userAgent = Normalize(httpContext.Request.Headers.UserAgent.ToString()); + + if (string.IsNullOrWhiteSpace(remoteAddress) && + string.IsNullOrWhiteSpace(forwarded) && + string.IsNullOrWhiteSpace(userAgent)) + { + return null; + } + + return new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(remoteAddress), + ForwardedFor = ClassifiedString.Personal(forwarded), + UserAgent = ClassifiedString.Personal(userAgent) + }; + } + + private static IReadOnlyList BuildProperties(params (string Name, string? Value)[] entries) + { + if (entries.Length == 0) + { + return Array.Empty(); + } + + var list = new List(entries.Length); + foreach (var (name, value) in entries) + { + if (string.IsNullOrWhiteSpace(name)) + { + continue; + } + + list.Add(new AuthEventProperty + { + Name = name, + Value = string.IsNullOrWhiteSpace(value) + ? ClassifiedString.Empty + : ClassifiedString.Public(value) + }); + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string? Normalize(StringValues values) + { + var value = values.ToString(); + return Normalize(value); + } + + private static string? Normalize(string? input) + { + if (string.IsNullOrWhiteSpace(input)) + { + return null; + } + + return input.Trim(); + } + + private static string? FormatInstant(DateTimeOffset? instant) + { + return instant?.ToString("O", CultureInfo.InvariantCulture); + } + + private const string XForwardedForHeader = "X-Forwarded-For"; +} + +internal sealed record TenantCatalogResponse(IReadOnlyList Tenants); + +internal sealed record ConsoleProfileResponse( + string? SubjectId, + string? Username, + string? DisplayName, + string Tenant, + string? SessionId, + IReadOnlyList Roles, + IReadOnlyList Scopes, + IReadOnlyList Audiences, + IReadOnlyList AuthenticationMethods, + DateTimeOffset? IssuedAt, + DateTimeOffset? AuthenticationTime, + DateTimeOffset? ExpiresAt, + bool FreshAuth); + +internal sealed record ConsoleTokenIntrospectionResponse( + bool Active, + string Tenant, + string? Subject, + string? ClientId, + string? TokenId, + IReadOnlyList Scopes, + IReadOnlyList Audiences, + DateTimeOffset? IssuedAt, + DateTimeOffset? AuthenticationTime, + DateTimeOffset? ExpiresAt, + bool FreshAuth); diff --git a/src/StellaOps.Authority/StellaOps.Authority/Console/TenantHeaderFilter.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/TenantHeaderFilter.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Authority/Console/TenantHeaderFilter.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Console/TenantHeaderFilter.cs index c137f3f3..e138063b 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Console/TenantHeaderFilter.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Console/TenantHeaderFilter.cs @@ -1,75 +1,75 @@ -using System.Security.Claims; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Primitives; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Authority.Console; - -internal sealed class TenantHeaderFilter : IEndpointFilter -{ - private const string TenantItemKey = "__authority-console-tenant"; - - public ValueTask InvokeAsync(EndpointFilterInvocationContext context, EndpointFilterDelegate next) - { - ArgumentNullException.ThrowIfNull(context); - ArgumentNullException.ThrowIfNull(next); - - var httpContext = context.HttpContext; - var principal = httpContext.User; - - if (principal?.Identity?.IsAuthenticated is not true) - { - return ValueTask.FromResult(Results.Unauthorized()); - } - - var tenantHeader = httpContext.Request.Headers[AuthorityHttpHeaders.Tenant]; - if (IsMissing(tenantHeader)) - { - return ValueTask.FromResult(Results.BadRequest(new - { - error = "tenant_header_missing", - message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." - })); - } - - var normalizedHeader = tenantHeader.ToString().Trim().ToLowerInvariant(); - var claimTenant = principal.FindFirstValue(StellaOpsClaimTypes.Tenant); - - if (string.IsNullOrWhiteSpace(claimTenant)) - { - return ValueTask.FromResult(Results.Forbid()); - } - - var normalizedClaim = claimTenant.Trim().ToLowerInvariant(); - if (!string.Equals(normalizedClaim, normalizedHeader, StringComparison.Ordinal)) - { - return ValueTask.FromResult(Results.Forbid()); - } - - httpContext.Items[TenantItemKey] = normalizedHeader; - return next(context); - } - - internal static string? GetTenant(HttpContext httpContext) - { - ArgumentNullException.ThrowIfNull(httpContext); - - if (httpContext.Items.TryGetValue(TenantItemKey, out var value) && value is string tenant && !string.IsNullOrWhiteSpace(tenant)) - { - return tenant; - } - - return null; - } - - private static bool IsMissing(StringValues values) - { - if (StringValues.IsNullOrEmpty(values)) - { - return true; - } - - var value = values.ToString(); - return string.IsNullOrWhiteSpace(value); - } -} +using System.Security.Claims; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Primitives; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Authority.Console; + +internal sealed class TenantHeaderFilter : IEndpointFilter +{ + private const string TenantItemKey = "__authority-console-tenant"; + + public ValueTask InvokeAsync(EndpointFilterInvocationContext context, EndpointFilterDelegate next) + { + ArgumentNullException.ThrowIfNull(context); + ArgumentNullException.ThrowIfNull(next); + + var httpContext = context.HttpContext; + var principal = httpContext.User; + + if (principal?.Identity?.IsAuthenticated is not true) + { + return ValueTask.FromResult(Results.Unauthorized()); + } + + var tenantHeader = httpContext.Request.Headers[AuthorityHttpHeaders.Tenant]; + if (IsMissing(tenantHeader)) + { + return ValueTask.FromResult(Results.BadRequest(new + { + error = "tenant_header_missing", + message = $"Header '{AuthorityHttpHeaders.Tenant}' is required." + })); + } + + var normalizedHeader = tenantHeader.ToString().Trim().ToLowerInvariant(); + var claimTenant = principal.FindFirstValue(StellaOpsClaimTypes.Tenant); + + if (string.IsNullOrWhiteSpace(claimTenant)) + { + return ValueTask.FromResult(Results.Forbid()); + } + + var normalizedClaim = claimTenant.Trim().ToLowerInvariant(); + if (!string.Equals(normalizedClaim, normalizedHeader, StringComparison.Ordinal)) + { + return ValueTask.FromResult(Results.Forbid()); + } + + httpContext.Items[TenantItemKey] = normalizedHeader; + return next(context); + } + + internal static string? GetTenant(HttpContext httpContext) + { + ArgumentNullException.ThrowIfNull(httpContext); + + if (httpContext.Items.TryGetValue(TenantItemKey, out var value) && value is string tenant && !string.IsNullOrWhiteSpace(tenant)) + { + return tenant; + } + + return null; + } + + private static bool IsMissing(StringValues values) + { + if (StringValues.IsNullOrEmpty(values)) + { + return true; + } + + var value = values.ToString(); + return string.IsNullOrWhiteSpace(value); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs index 20f2f541..fd375d39 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/AuthorityOpenApiDocumentProvider.cs @@ -1,314 +1,314 @@ -using System.Collections.Generic; -using System.IO; -using System.Globalization; -using System.Linq; -using System.Reflection; -using System.Security.Cryptography; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using YamlDotNet.Core; -using YamlDotNet.RepresentationModel; -using YamlDotNet.Serialization; - -namespace StellaOps.Authority.OpenApi; - -internal sealed class AuthorityOpenApiDocumentProvider -{ - private readonly string specificationPath; - private readonly ILogger logger; - private readonly SemaphoreSlim refreshLock = new(1, 1); - private OpenApiDocumentSnapshot? cached; - - public AuthorityOpenApiDocumentProvider(IWebHostEnvironment environment, ILogger logger) - { - ArgumentNullException.ThrowIfNull(environment); - ArgumentNullException.ThrowIfNull(logger); - - specificationPath = Path.Combine(environment.ContentRootPath, "OpenApi", "authority.yaml"); - this.logger = logger; - } - - public async ValueTask GetDocumentAsync(CancellationToken cancellationToken) - { - var lastWriteUtc = GetLastWriteTimeUtc(); - var current = cached; - if (current is not null && current.LastWriteUtc == lastWriteUtc) - { - return current; - } - - await refreshLock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - current = cached; - lastWriteUtc = GetLastWriteTimeUtc(); - if (current is not null && current.LastWriteUtc == lastWriteUtc) - { - return current; - } - - var snapshot = LoadSnapshot(lastWriteUtc); - cached = snapshot; - return snapshot; - } - finally - { - refreshLock.Release(); - } - } - - private DateTime GetLastWriteTimeUtc() - { - var file = new FileInfo(specificationPath); - if (!file.Exists) - { - throw new FileNotFoundException($"Authority OpenAPI specification was not found at '{specificationPath}'.", specificationPath); - } - - return file.LastWriteTimeUtc; - } - - private OpenApiDocumentSnapshot LoadSnapshot(DateTime lastWriteUtc) - { - string yamlText; - try - { - yamlText = File.ReadAllText(specificationPath); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to read Authority OpenAPI specification from {Path}.", specificationPath); - throw; - } - - var yamlStream = new YamlStream(); - using (var reader = new StringReader(yamlText)) - { - yamlStream.Load(reader); - } - - if (yamlStream.Documents.Count == 0 || yamlStream.Documents[0].RootNode is not YamlMappingNode rootNode) - { - throw new InvalidOperationException("Authority OpenAPI specification does not contain a valid root mapping node."); - } - - var (grants, scopes) = CollectGrantsAndScopes(rootNode); - - if (!TryGetMapping(rootNode, "info", out var infoNode)) - { - infoNode = new YamlMappingNode(); - rootNode.Children[new YamlScalarNode("info")] = infoNode; - } - - var serviceName = "StellaOps.Authority"; - var buildVersion = ResolveBuildVersion(); - ApplyInfoMetadata(infoNode, serviceName, buildVersion, grants, scopes); - - var apiVersion = TryGetScalar(infoNode, "version", out var version) - ? version - : "0.0.0"; - - var updatedYaml = WriteYaml(yamlStream); - var json = ConvertYamlToJson(updatedYaml); - var etag = CreateStrongEtag(json); - - return new OpenApiDocumentSnapshot( - serviceName, - apiVersion, - buildVersion, - json, - updatedYaml, - etag, - lastWriteUtc, - grants, - scopes); - } - - private static (IReadOnlyList Grants, IReadOnlyList Scopes) CollectGrantsAndScopes(YamlMappingNode root) - { - if (!TryGetMapping(root, "components", out var components) || - !TryGetMapping(components, "securitySchemes", out var securitySchemes)) - { - return (Array.Empty(), Array.Empty()); - } - - var grants = new SortedSet(StringComparer.Ordinal); - var scopes = new SortedSet(StringComparer.Ordinal); - - foreach (var scheme in securitySchemes.Children.Values.OfType()) - { - if (!TryGetMapping(scheme, "flows", out var flows)) - { - continue; - } - - foreach (var flowEntry in flows.Children) - { - if (flowEntry.Key is not YamlScalarNode flowNameNode || flowEntry.Value is not YamlMappingNode flowMapping) - { - continue; - } - - var grant = NormalizeGrantName(flowNameNode.Value); - if (grant is not null) - { - grants.Add(grant); - } - - if (TryGetMapping(flowMapping, "scopes", out var scopesMapping)) - { - foreach (var scope in scopesMapping.Children.Keys.OfType()) - { - if (!string.IsNullOrWhiteSpace(scope.Value)) - { - scopes.Add(scope.Value); - } - } - } - - if (flowMapping.Children.TryGetValue(new YamlScalarNode("refreshUrl"), out var refreshNode) && - refreshNode is YamlScalarNode refreshScalar && !string.IsNullOrWhiteSpace(refreshScalar.Value)) - { - grants.Add("refresh_token"); - } - } - } - - return ( - grants.Count == 0 ? Array.Empty() : grants.ToArray(), - scopes.Count == 0 ? Array.Empty() : scopes.ToArray()); - } - - private static string? NormalizeGrantName(string? flowName) - => flowName switch - { - null or "" => null, - "authorizationCode" => "authorization_code", - "clientCredentials" => "client_credentials", - "password" => "password", - "implicit" => "implicit", - "deviceCode" => "device_code", - _ => flowName - }; - - private static void ApplyInfoMetadata( - YamlMappingNode infoNode, - string serviceName, - string buildVersion, - IReadOnlyList grants, - IReadOnlyList scopes) - { - infoNode.Children[new YamlScalarNode("x-stella-service")] = new YamlScalarNode(serviceName); - infoNode.Children[new YamlScalarNode("x-stella-build-version")] = new YamlScalarNode(buildVersion); - infoNode.Children[new YamlScalarNode("x-stella-grant-types")] = CreateSequence(grants); - infoNode.Children[new YamlScalarNode("x-stella-scopes")] = CreateSequence(scopes); - } - - private static YamlSequenceNode CreateSequence(IEnumerable values) - { - var sequence = new YamlSequenceNode(); - foreach (var value in values) - { - sequence.Add(new YamlScalarNode(value)); - } - - return sequence; - } - - private static bool TryGetMapping(YamlMappingNode node, string key, out YamlMappingNode mapping) - { - foreach (var entry in node.Children) - { - if (entry.Key is YamlScalarNode scalar && string.Equals(scalar.Value, key, StringComparison.Ordinal)) - { - if (entry.Value is YamlMappingNode mappingNode) - { - mapping = mappingNode; - return true; - } - - break; - } - } - - mapping = null!; - return false; - } - - private static bool TryGetScalar(YamlMappingNode node, string key, out string value) - { - foreach (var entry in node.Children) - { - if (entry.Key is YamlScalarNode scalar && string.Equals(scalar.Value, key, StringComparison.Ordinal)) - { - if (entry.Value is YamlScalarNode valueNode) - { - value = valueNode.Value ?? string.Empty; - return true; - } - - break; - } - } - - value = string.Empty; - return false; - } - - private static string WriteYaml(YamlStream yamlStream) - { - using var writer = new StringWriter(CultureInfo.InvariantCulture); - yamlStream.Save(writer, assignAnchors: false); - return writer.ToString(); - } - - private static string ConvertYamlToJson(string yaml) - { - var deserializer = new DeserializerBuilder().Build(); - var yamlObject = deserializer.Deserialize(new StringReader(yaml)); - - var serializer = new SerializerBuilder() - .JsonCompatible() - .Build(); - - var json = serializer.Serialize(yamlObject); - return string.IsNullOrWhiteSpace(json) ? "{}" : json.Trim(); - } - - private static string CreateStrongEtag(string jsonRepresentation) - { - var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(jsonRepresentation)); - var hash = Convert.ToHexString(bytes).ToLowerInvariant(); - return $"\"{hash}\""; - } - - private static string ResolveBuildVersion() - { - var assembly = typeof(AuthorityOpenApiDocumentProvider).Assembly; - var informational = assembly - .GetCustomAttribute()? - .InformationalVersion; - - if (!string.IsNullOrWhiteSpace(informational)) - { - return informational!; - } - - var version = assembly.GetName().Version; - return version?.ToString() ?? "unknown"; - } -} - -internal sealed record OpenApiDocumentSnapshot( - string ServiceName, - string ApiVersion, - string BuildVersion, - string Json, - string Yaml, - string ETag, - DateTime LastWriteUtc, - IReadOnlyList GrantTypes, - IReadOnlyList Scopes); +using System.Collections.Generic; +using System.IO; +using System.Globalization; +using System.Linq; +using System.Reflection; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using YamlDotNet.Core; +using YamlDotNet.RepresentationModel; +using YamlDotNet.Serialization; + +namespace StellaOps.Authority.OpenApi; + +internal sealed class AuthorityOpenApiDocumentProvider +{ + private readonly string specificationPath; + private readonly ILogger logger; + private readonly SemaphoreSlim refreshLock = new(1, 1); + private OpenApiDocumentSnapshot? cached; + + public AuthorityOpenApiDocumentProvider(IWebHostEnvironment environment, ILogger logger) + { + ArgumentNullException.ThrowIfNull(environment); + ArgumentNullException.ThrowIfNull(logger); + + specificationPath = Path.Combine(environment.ContentRootPath, "OpenApi", "authority.yaml"); + this.logger = logger; + } + + public async ValueTask GetDocumentAsync(CancellationToken cancellationToken) + { + var lastWriteUtc = GetLastWriteTimeUtc(); + var current = cached; + if (current is not null && current.LastWriteUtc == lastWriteUtc) + { + return current; + } + + await refreshLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + current = cached; + lastWriteUtc = GetLastWriteTimeUtc(); + if (current is not null && current.LastWriteUtc == lastWriteUtc) + { + return current; + } + + var snapshot = LoadSnapshot(lastWriteUtc); + cached = snapshot; + return snapshot; + } + finally + { + refreshLock.Release(); + } + } + + private DateTime GetLastWriteTimeUtc() + { + var file = new FileInfo(specificationPath); + if (!file.Exists) + { + throw new FileNotFoundException($"Authority OpenAPI specification was not found at '{specificationPath}'.", specificationPath); + } + + return file.LastWriteTimeUtc; + } + + private OpenApiDocumentSnapshot LoadSnapshot(DateTime lastWriteUtc) + { + string yamlText; + try + { + yamlText = File.ReadAllText(specificationPath); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to read Authority OpenAPI specification from {Path}.", specificationPath); + throw; + } + + var yamlStream = new YamlStream(); + using (var reader = new StringReader(yamlText)) + { + yamlStream.Load(reader); + } + + if (yamlStream.Documents.Count == 0 || yamlStream.Documents[0].RootNode is not YamlMappingNode rootNode) + { + throw new InvalidOperationException("Authority OpenAPI specification does not contain a valid root mapping node."); + } + + var (grants, scopes) = CollectGrantsAndScopes(rootNode); + + if (!TryGetMapping(rootNode, "info", out var infoNode)) + { + infoNode = new YamlMappingNode(); + rootNode.Children[new YamlScalarNode("info")] = infoNode; + } + + var serviceName = "StellaOps.Authority"; + var buildVersion = ResolveBuildVersion(); + ApplyInfoMetadata(infoNode, serviceName, buildVersion, grants, scopes); + + var apiVersion = TryGetScalar(infoNode, "version", out var version) + ? version + : "0.0.0"; + + var updatedYaml = WriteYaml(yamlStream); + var json = ConvertYamlToJson(updatedYaml); + var etag = CreateStrongEtag(json); + + return new OpenApiDocumentSnapshot( + serviceName, + apiVersion, + buildVersion, + json, + updatedYaml, + etag, + lastWriteUtc, + grants, + scopes); + } + + private static (IReadOnlyList Grants, IReadOnlyList Scopes) CollectGrantsAndScopes(YamlMappingNode root) + { + if (!TryGetMapping(root, "components", out var components) || + !TryGetMapping(components, "securitySchemes", out var securitySchemes)) + { + return (Array.Empty(), Array.Empty()); + } + + var grants = new SortedSet(StringComparer.Ordinal); + var scopes = new SortedSet(StringComparer.Ordinal); + + foreach (var scheme in securitySchemes.Children.Values.OfType()) + { + if (!TryGetMapping(scheme, "flows", out var flows)) + { + continue; + } + + foreach (var flowEntry in flows.Children) + { + if (flowEntry.Key is not YamlScalarNode flowNameNode || flowEntry.Value is not YamlMappingNode flowMapping) + { + continue; + } + + var grant = NormalizeGrantName(flowNameNode.Value); + if (grant is not null) + { + grants.Add(grant); + } + + if (TryGetMapping(flowMapping, "scopes", out var scopesMapping)) + { + foreach (var scope in scopesMapping.Children.Keys.OfType()) + { + if (!string.IsNullOrWhiteSpace(scope.Value)) + { + scopes.Add(scope.Value); + } + } + } + + if (flowMapping.Children.TryGetValue(new YamlScalarNode("refreshUrl"), out var refreshNode) && + refreshNode is YamlScalarNode refreshScalar && !string.IsNullOrWhiteSpace(refreshScalar.Value)) + { + grants.Add("refresh_token"); + } + } + } + + return ( + grants.Count == 0 ? Array.Empty() : grants.ToArray(), + scopes.Count == 0 ? Array.Empty() : scopes.ToArray()); + } + + private static string? NormalizeGrantName(string? flowName) + => flowName switch + { + null or "" => null, + "authorizationCode" => "authorization_code", + "clientCredentials" => "client_credentials", + "password" => "password", + "implicit" => "implicit", + "deviceCode" => "device_code", + _ => flowName + }; + + private static void ApplyInfoMetadata( + YamlMappingNode infoNode, + string serviceName, + string buildVersion, + IReadOnlyList grants, + IReadOnlyList scopes) + { + infoNode.Children[new YamlScalarNode("x-stella-service")] = new YamlScalarNode(serviceName); + infoNode.Children[new YamlScalarNode("x-stella-build-version")] = new YamlScalarNode(buildVersion); + infoNode.Children[new YamlScalarNode("x-stella-grant-types")] = CreateSequence(grants); + infoNode.Children[new YamlScalarNode("x-stella-scopes")] = CreateSequence(scopes); + } + + private static YamlSequenceNode CreateSequence(IEnumerable values) + { + var sequence = new YamlSequenceNode(); + foreach (var value in values) + { + sequence.Add(new YamlScalarNode(value)); + } + + return sequence; + } + + private static bool TryGetMapping(YamlMappingNode node, string key, out YamlMappingNode mapping) + { + foreach (var entry in node.Children) + { + if (entry.Key is YamlScalarNode scalar && string.Equals(scalar.Value, key, StringComparison.Ordinal)) + { + if (entry.Value is YamlMappingNode mappingNode) + { + mapping = mappingNode; + return true; + } + + break; + } + } + + mapping = null!; + return false; + } + + private static bool TryGetScalar(YamlMappingNode node, string key, out string value) + { + foreach (var entry in node.Children) + { + if (entry.Key is YamlScalarNode scalar && string.Equals(scalar.Value, key, StringComparison.Ordinal)) + { + if (entry.Value is YamlScalarNode valueNode) + { + value = valueNode.Value ?? string.Empty; + return true; + } + + break; + } + } + + value = string.Empty; + return false; + } + + private static string WriteYaml(YamlStream yamlStream) + { + using var writer = new StringWriter(CultureInfo.InvariantCulture); + yamlStream.Save(writer, assignAnchors: false); + return writer.ToString(); + } + + private static string ConvertYamlToJson(string yaml) + { + var deserializer = new DeserializerBuilder().Build(); + var yamlObject = deserializer.Deserialize(new StringReader(yaml)); + + var serializer = new SerializerBuilder() + .JsonCompatible() + .Build(); + + var json = serializer.Serialize(yamlObject); + return string.IsNullOrWhiteSpace(json) ? "{}" : json.Trim(); + } + + private static string CreateStrongEtag(string jsonRepresentation) + { + var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(jsonRepresentation)); + var hash = Convert.ToHexString(bytes).ToLowerInvariant(); + return $"\"{hash}\""; + } + + private static string ResolveBuildVersion() + { + var assembly = typeof(AuthorityOpenApiDocumentProvider).Assembly; + var informational = assembly + .GetCustomAttribute()? + .InformationalVersion; + + if (!string.IsNullOrWhiteSpace(informational)) + { + return informational!; + } + + var version = assembly.GetName().Version; + return version?.ToString() ?? "unknown"; + } +} + +internal sealed record OpenApiDocumentSnapshot( + string ServiceName, + string ApiVersion, + string BuildVersion, + string Json, + string Yaml, + string ETag, + DateTime LastWriteUtc, + IReadOnlyList GrantTypes, + IReadOnlyList Scopes); diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs index 39ab4014..fb493656 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenApi/OpenApiDiscoveryEndpointExtensions.cs @@ -1,141 +1,141 @@ -using System.Collections.Generic; -using System.Globalization; -using System.Linq; -using Microsoft.AspNetCore.Builder; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Primitives; -using Microsoft.Net.Http.Headers; - -namespace StellaOps.Authority.OpenApi; - -internal static class OpenApiDiscoveryEndpointExtensions -{ - private const string JsonMediaType = "application/openapi+json"; - private const string YamlMediaType = "application/openapi+yaml"; - private static readonly string[] AdditionalYamlMediaTypes = { "application/yaml", "text/yaml" }; - private static readonly string[] AdditionalJsonMediaTypes = { "application/json" }; - - public static IEndpointConventionBuilder MapAuthorityOpenApiDiscovery(this IEndpointRouteBuilder endpoints) - { - ArgumentNullException.ThrowIfNull(endpoints); - - var builder = endpoints.MapGet("/.well-known/openapi", async (HttpContext context, AuthorityOpenApiDocumentProvider provider, CancellationToken cancellationToken) => - { - var snapshot = await provider.GetDocumentAsync(cancellationToken).ConfigureAwait(false); - - var preferYaml = ShouldReturnYaml(context.Request.GetTypedHeaders().Accept); - var payload = preferYaml ? snapshot.Yaml : snapshot.Json; - var mediaType = preferYaml ? YamlMediaType : JsonMediaType; - - ApplyMetadataHeaders(context.Response, snapshot); - - if (MatchesEtag(context.Request.Headers[HeaderNames.IfNoneMatch], snapshot.ETag)) - { - context.Response.StatusCode = StatusCodes.Status304NotModified; - return; - } - - context.Response.StatusCode = StatusCodes.Status200OK; - context.Response.ContentType = mediaType; - await context.Response.WriteAsync(payload, cancellationToken).ConfigureAwait(false); - }); - - return builder.WithName("AuthorityOpenApiDiscovery"); - } - - private static bool ShouldReturnYaml(IList? accept) - { - if (accept is null || accept.Count == 0) - { - return false; - } - - var ordered = accept - .OrderByDescending(value => value.Quality ?? 1.0) - .ThenByDescending(value => value.MediaType.HasValue && IsYaml(value.MediaType.Value)); - - foreach (var value in ordered) - { - if (!value.MediaType.HasValue) - { - continue; - } - - var mediaType = value.MediaType.Value; - if (IsYaml(mediaType)) - { - return true; - } - - if (IsJson(mediaType) || mediaType.Equals("*/*", StringComparison.Ordinal)) - { - return false; - } - } - - return false; - } - - private static bool IsYaml(string mediaType) - => mediaType.Equals(YamlMediaType, StringComparison.OrdinalIgnoreCase) - || AdditionalYamlMediaTypes.Any(candidate => candidate.Equals(mediaType, StringComparison.OrdinalIgnoreCase)); - - private static bool IsJson(string mediaType) - => mediaType.Equals(JsonMediaType, StringComparison.OrdinalIgnoreCase) - || AdditionalJsonMediaTypes.Any(candidate => candidate.Equals(mediaType, StringComparison.OrdinalIgnoreCase)); - - private static void ApplyMetadataHeaders(HttpResponse response, OpenApiDocumentSnapshot snapshot) - { - response.Headers[HeaderNames.ETag] = snapshot.ETag; - response.Headers[HeaderNames.LastModified] = snapshot.LastWriteUtc.ToString("R", CultureInfo.InvariantCulture); - response.Headers[HeaderNames.CacheControl] = "public, max-age=300"; - response.Headers[HeaderNames.Vary] = "Accept"; - response.Headers["X-StellaOps-Service"] = snapshot.ServiceName; - response.Headers["X-StellaOps-Api-Version"] = snapshot.ApiVersion; - response.Headers["X-StellaOps-Build-Version"] = snapshot.BuildVersion; - - if (snapshot.GrantTypes.Count > 0) - { - response.Headers["X-StellaOps-OAuth-Grants"] = string.Join(' ', snapshot.GrantTypes); - } - - if (snapshot.Scopes.Count > 0) - { - response.Headers["X-StellaOps-OAuth-Scopes"] = string.Join(' ', snapshot.Scopes); - } - } - - private static bool MatchesEtag(StringValues etagValues, string currentEtag) - { - if (etagValues.Count == 0) - { - return false; - } - - foreach (var value in etagValues) - { - if (string.IsNullOrWhiteSpace(value)) - { - continue; - } - - var tokens = value.Split(','); - foreach (var token in tokens) - { - var trimmed = token.Trim(); - if (trimmed.Length == 0) - { - continue; - } - - if (trimmed.Equals("*", StringComparison.Ordinal) || trimmed.Equals(currentEtag, StringComparison.Ordinal)) - { - return true; - } - } - } - - return false; - } -} +using System.Collections.Generic; +using System.Globalization; +using System.Linq; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Primitives; +using Microsoft.Net.Http.Headers; + +namespace StellaOps.Authority.OpenApi; + +internal static class OpenApiDiscoveryEndpointExtensions +{ + private const string JsonMediaType = "application/openapi+json"; + private const string YamlMediaType = "application/openapi+yaml"; + private static readonly string[] AdditionalYamlMediaTypes = { "application/yaml", "text/yaml" }; + private static readonly string[] AdditionalJsonMediaTypes = { "application/json" }; + + public static IEndpointConventionBuilder MapAuthorityOpenApiDiscovery(this IEndpointRouteBuilder endpoints) + { + ArgumentNullException.ThrowIfNull(endpoints); + + var builder = endpoints.MapGet("/.well-known/openapi", async (HttpContext context, AuthorityOpenApiDocumentProvider provider, CancellationToken cancellationToken) => + { + var snapshot = await provider.GetDocumentAsync(cancellationToken).ConfigureAwait(false); + + var preferYaml = ShouldReturnYaml(context.Request.GetTypedHeaders().Accept); + var payload = preferYaml ? snapshot.Yaml : snapshot.Json; + var mediaType = preferYaml ? YamlMediaType : JsonMediaType; + + ApplyMetadataHeaders(context.Response, snapshot); + + if (MatchesEtag(context.Request.Headers[HeaderNames.IfNoneMatch], snapshot.ETag)) + { + context.Response.StatusCode = StatusCodes.Status304NotModified; + return; + } + + context.Response.StatusCode = StatusCodes.Status200OK; + context.Response.ContentType = mediaType; + await context.Response.WriteAsync(payload, cancellationToken).ConfigureAwait(false); + }); + + return builder.WithName("AuthorityOpenApiDiscovery"); + } + + private static bool ShouldReturnYaml(IList? accept) + { + if (accept is null || accept.Count == 0) + { + return false; + } + + var ordered = accept + .OrderByDescending(value => value.Quality ?? 1.0) + .ThenByDescending(value => value.MediaType.HasValue && IsYaml(value.MediaType.Value)); + + foreach (var value in ordered) + { + if (!value.MediaType.HasValue) + { + continue; + } + + var mediaType = value.MediaType.Value; + if (IsYaml(mediaType)) + { + return true; + } + + if (IsJson(mediaType) || mediaType.Equals("*/*", StringComparison.Ordinal)) + { + return false; + } + } + + return false; + } + + private static bool IsYaml(string mediaType) + => mediaType.Equals(YamlMediaType, StringComparison.OrdinalIgnoreCase) + || AdditionalYamlMediaTypes.Any(candidate => candidate.Equals(mediaType, StringComparison.OrdinalIgnoreCase)); + + private static bool IsJson(string mediaType) + => mediaType.Equals(JsonMediaType, StringComparison.OrdinalIgnoreCase) + || AdditionalJsonMediaTypes.Any(candidate => candidate.Equals(mediaType, StringComparison.OrdinalIgnoreCase)); + + private static void ApplyMetadataHeaders(HttpResponse response, OpenApiDocumentSnapshot snapshot) + { + response.Headers[HeaderNames.ETag] = snapshot.ETag; + response.Headers[HeaderNames.LastModified] = snapshot.LastWriteUtc.ToString("R", CultureInfo.InvariantCulture); + response.Headers[HeaderNames.CacheControl] = "public, max-age=300"; + response.Headers[HeaderNames.Vary] = "Accept"; + response.Headers["X-StellaOps-Service"] = snapshot.ServiceName; + response.Headers["X-StellaOps-Api-Version"] = snapshot.ApiVersion; + response.Headers["X-StellaOps-Build-Version"] = snapshot.BuildVersion; + + if (snapshot.GrantTypes.Count > 0) + { + response.Headers["X-StellaOps-OAuth-Grants"] = string.Join(' ', snapshot.GrantTypes); + } + + if (snapshot.Scopes.Count > 0) + { + response.Headers["X-StellaOps-OAuth-Scopes"] = string.Join(' ', snapshot.Scopes); + } + } + + private static bool MatchesEtag(StringValues etagValues, string currentEtag) + { + if (etagValues.Count == 0) + { + return false; + } + + foreach (var value in etagValues) + { + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + var tokens = value.Split(','); + foreach (var token in tokens) + { + var trimmed = token.Trim(); + if (trimmed.Length == 0) + { + continue; + } + + if (trimmed.Equals("*", StringComparison.Ordinal) || trimmed.Equals(currentEtag, StringComparison.Ordinal)) + { + return true; + } + } + } + + return false; + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityIdentityProviderSelector.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityIdentityProviderSelector.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityIdentityProviderSelector.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityIdentityProviderSelector.cs index fb5d40f3..6f36a7cd 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityIdentityProviderSelector.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityIdentityProviderSelector.cs @@ -1,64 +1,64 @@ -using System.Linq; -using OpenIddict.Abstractions; -using StellaOps.Authority.Plugins.Abstractions; - -namespace StellaOps.Authority.OpenIddict; - -internal static class AuthorityIdentityProviderSelector -{ - public static ProviderSelectionResult ResolvePasswordProvider(OpenIddictRequest request, IAuthorityIdentityProviderRegistry registry) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentNullException.ThrowIfNull(registry); - - if (registry.PasswordProviders.Count == 0) - { - return ProviderSelectionResult.Failure( - OpenIddictConstants.Errors.UnsupportedGrantType, - "Password grants are not enabled because no identity providers support password authentication."); - } - - var providerName = request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString(); - if (string.IsNullOrWhiteSpace(providerName)) - { - if (registry.PasswordProviders.Count == 1) - { - var provider = registry.PasswordProviders.First(); - return ProviderSelectionResult.Success(provider); - } - - return ProviderSelectionResult.Failure( - OpenIddictConstants.Errors.InvalidRequest, - "identity_provider parameter is required when multiple password-capable providers are registered."); - } - - if (!registry.TryGet(providerName!, out var selected)) - { - return ProviderSelectionResult.Failure( - OpenIddictConstants.Errors.InvalidRequest, - $"Unknown identity provider '{providerName}'."); - } - - if (!selected.Capabilities.SupportsPassword) - { - return ProviderSelectionResult.Failure( - OpenIddictConstants.Errors.InvalidRequest, - $"Identity provider '{providerName}' does not support password authentication."); - } - - return ProviderSelectionResult.Success(selected); - } - - internal sealed record ProviderSelectionResult( - bool Succeeded, - AuthorityIdentityProviderMetadata? Provider, - string? Error, - string? Description) - { - public static ProviderSelectionResult Success(AuthorityIdentityProviderMetadata provider) - => new(true, provider, null, null); - - public static ProviderSelectionResult Failure(string error, string description) - => new(false, null, error, description); - } -} +using System.Linq; +using OpenIddict.Abstractions; +using StellaOps.Authority.Plugins.Abstractions; + +namespace StellaOps.Authority.OpenIddict; + +internal static class AuthorityIdentityProviderSelector +{ + public static ProviderSelectionResult ResolvePasswordProvider(OpenIddictRequest request, IAuthorityIdentityProviderRegistry registry) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(registry); + + if (registry.PasswordProviders.Count == 0) + { + return ProviderSelectionResult.Failure( + OpenIddictConstants.Errors.UnsupportedGrantType, + "Password grants are not enabled because no identity providers support password authentication."); + } + + var providerName = request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString(); + if (string.IsNullOrWhiteSpace(providerName)) + { + if (registry.PasswordProviders.Count == 1) + { + var provider = registry.PasswordProviders.First(); + return ProviderSelectionResult.Success(provider); + } + + return ProviderSelectionResult.Failure( + OpenIddictConstants.Errors.InvalidRequest, + "identity_provider parameter is required when multiple password-capable providers are registered."); + } + + if (!registry.TryGet(providerName!, out var selected)) + { + return ProviderSelectionResult.Failure( + OpenIddictConstants.Errors.InvalidRequest, + $"Unknown identity provider '{providerName}'."); + } + + if (!selected.Capabilities.SupportsPassword) + { + return ProviderSelectionResult.Failure( + OpenIddictConstants.Errors.InvalidRequest, + $"Identity provider '{providerName}' does not support password authentication."); + } + + return ProviderSelectionResult.Success(selected); + } + + internal sealed record ProviderSelectionResult( + bool Succeeded, + AuthorityIdentityProviderMetadata? Provider, + string? Error, + string? Description) + { + public static ProviderSelectionResult Success(AuthorityIdentityProviderMetadata provider) + => new(true, provider, null, null); + + public static ProviderSelectionResult Failure(string error, string description) + => new(false, null, error, description); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityOpenIddictConstants.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityOpenIddictConstants.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityOpenIddictConstants.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/AuthorityOpenIddictConstants.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs index 5aed5650..e44519c1 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsAuditHelper.cs @@ -1,269 +1,269 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Linq; -using OpenIddict.Abstractions; -using OpenIddict.Server; -using StellaOps.Authority.RateLimiting; -using StellaOps.Cryptography.Audit; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Authority.OpenIddict.Handlers; - -internal static class ClientCredentialsAuditHelper -{ - internal static string EnsureCorrelationId(OpenIddictServerTransaction transaction) - { - ArgumentNullException.ThrowIfNull(transaction); - - if (transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditCorrelationProperty, out var value) && - value is string existing && - !string.IsNullOrWhiteSpace(existing)) - { - return existing; - } - - var correlation = Activity.Current?.TraceId.ToString() ?? - Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); - - transaction.Properties[AuthorityOpenIddictConstants.AuditCorrelationProperty] = correlation; - return correlation; - } - - internal static AuthEventRecord CreateRecord( - TimeProvider timeProvider, - OpenIddictServerTransaction transaction, - AuthorityRateLimiterMetadata? metadata, - string? clientSecret, - AuthEventOutcome outcome, - string? reason, - string? clientId, - string? providerName, - string? tenant, - string? project, - bool? confidential, - IReadOnlyList requestedScopes, - IReadOnlyList grantedScopes, - string? invalidScope, - IEnumerable? extraProperties = null, - string? eventType = null) - { - ArgumentNullException.ThrowIfNull(timeProvider); - ArgumentNullException.ThrowIfNull(transaction); - - var correlationId = EnsureCorrelationId(transaction); - var client = BuildClient(clientId, providerName); - var network = BuildNetwork(metadata); - var normalizedGranted = NormalizeScopes(grantedScopes); - var properties = BuildProperties(confidential, requestedScopes, invalidScope, extraProperties); - var normalizedTenant = NormalizeTenant(tenant); - var normalizedProject = NormalizeProject(project); - - return new AuthEventRecord - { - EventType = string.IsNullOrWhiteSpace(eventType) ? "authority.client_credentials.grant" : eventType, - OccurredAt = timeProvider.GetUtcNow(), - CorrelationId = correlationId, - Outcome = outcome, - Reason = Normalize(reason), - Subject = null, - Client = client, - Scopes = normalizedGranted, - Network = network, - Tenant = ClassifiedString.Public(normalizedTenant), - Project = ClassifiedString.Public(normalizedProject), - Properties = properties - }; - } - - internal static AuthEventRecord CreateTamperRecord( - TimeProvider timeProvider, - OpenIddictServerTransaction transaction, - AuthorityRateLimiterMetadata? metadata, - string? clientId, - string? providerName, - string? tenant, - string? project, - bool? confidential, - IEnumerable unexpectedParameters) - { - var properties = new List - { - new() - { - Name = "request.tampered", - Value = ClassifiedString.Public("true") - } - }; - - if (confidential.HasValue) - { - properties.Add(new AuthEventProperty - { - Name = "client.confidential", - Value = ClassifiedString.Public(confidential.Value ? "true" : "false") - }); - } - - if (unexpectedParameters is not null) - { - foreach (var parameter in unexpectedParameters) - { - if (string.IsNullOrWhiteSpace(parameter)) - { - continue; - } - - properties.Add(new AuthEventProperty - { - Name = "request.unexpected_parameter", - Value = ClassifiedString.Public(parameter) - }); - } - } - - var reason = unexpectedParameters is null - ? "Unexpected parameters supplied to client credentials request." - : $"Unexpected parameters supplied to client credentials request: {string.Join(", ", unexpectedParameters)}."; - - return CreateRecord( - timeProvider, - transaction, - metadata, - clientSecret: null, - outcome: AuthEventOutcome.Failure, - reason: reason, - clientId: clientId, - providerName: providerName, - tenant: tenant, - project: project, - confidential: confidential, - requestedScopes: Array.Empty(), - grantedScopes: Array.Empty(), - invalidScope: null, - extraProperties: properties, - eventType: "authority.token.tamper"); - } - - private static AuthEventClient? BuildClient(string? clientId, string? providerName) - { - if (string.IsNullOrWhiteSpace(clientId) && string.IsNullOrWhiteSpace(providerName)) - { - return null; - } - - return new AuthEventClient - { - ClientId = ClassifiedString.Personal(Normalize(clientId)), - Name = ClassifiedString.Empty, - Provider = ClassifiedString.Public(Normalize(providerName)) - }; - } - - private static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata) - { - var remote = Normalize(metadata?.RemoteIp); - var forwarded = Normalize(metadata?.ForwardedFor); - var userAgent = Normalize(metadata?.UserAgent); - - if (string.IsNullOrWhiteSpace(remote) && string.IsNullOrWhiteSpace(forwarded) && string.IsNullOrWhiteSpace(userAgent)) - { - return null; - } - - return new AuthEventNetwork - { - RemoteAddress = ClassifiedString.Personal(remote), - ForwardedFor = ClassifiedString.Personal(forwarded), - UserAgent = ClassifiedString.Personal(userAgent) - }; - } - - private static IReadOnlyList BuildProperties( - bool? confidential, - IReadOnlyList requestedScopes, - string? invalidScope, - IEnumerable? extraProperties) - { - var properties = new List(); - - if (confidential.HasValue) - { - properties.Add(new AuthEventProperty - { - Name = "client.confidential", - Value = ClassifiedString.Public(confidential.Value ? "true" : "false") - }); - } - - var normalizedRequested = NormalizeScopes(requestedScopes); - if (normalizedRequested is { Count: > 0 }) - { - foreach (var scope in normalizedRequested) - { - if (string.IsNullOrWhiteSpace(scope)) - { - continue; - } - - properties.Add(new AuthEventProperty - { - Name = "scope.requested", - Value = ClassifiedString.Public(scope) - }); - } - } - - if (!string.IsNullOrWhiteSpace(invalidScope)) - { - properties.Add(new AuthEventProperty - { - Name = "scope.invalid", - Value = ClassifiedString.Public(invalidScope) - }); - } - - if (extraProperties is not null) - { - foreach (var property in extraProperties) - { - if (property is null || string.IsNullOrWhiteSpace(property.Name)) - { - continue; - } - - properties.Add(property); - } - } - - return properties.Count == 0 ? Array.Empty() : properties; - } - - private static IReadOnlyList NormalizeScopes(IReadOnlyList? scopes) - { - if (scopes is null || scopes.Count == 0) - { - return Array.Empty(); - } - - var normalized = scopes - .Where(static scope => !string.IsNullOrWhiteSpace(scope)) - .Select(static scope => scope.Trim()) - .Where(static scope => scope.Length > 0) - .Distinct(StringComparer.Ordinal) - .OrderBy(static scope => scope, StringComparer.Ordinal) - .ToArray(); - - return normalized.Length == 0 ? Array.Empty() : normalized; - } - - private static string? Normalize(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); - - private static string? NormalizeTenant(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); - - private static string NormalizeProject(string? value) - => string.IsNullOrWhiteSpace(value) ? StellaOpsTenancyDefaults.AnyProject : value.Trim().ToLowerInvariant(); -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using OpenIddict.Abstractions; +using OpenIddict.Server; +using StellaOps.Authority.RateLimiting; +using StellaOps.Cryptography.Audit; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Authority.OpenIddict.Handlers; + +internal static class ClientCredentialsAuditHelper +{ + internal static string EnsureCorrelationId(OpenIddictServerTransaction transaction) + { + ArgumentNullException.ThrowIfNull(transaction); + + if (transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditCorrelationProperty, out var value) && + value is string existing && + !string.IsNullOrWhiteSpace(existing)) + { + return existing; + } + + var correlation = Activity.Current?.TraceId.ToString() ?? + Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); + + transaction.Properties[AuthorityOpenIddictConstants.AuditCorrelationProperty] = correlation; + return correlation; + } + + internal static AuthEventRecord CreateRecord( + TimeProvider timeProvider, + OpenIddictServerTransaction transaction, + AuthorityRateLimiterMetadata? metadata, + string? clientSecret, + AuthEventOutcome outcome, + string? reason, + string? clientId, + string? providerName, + string? tenant, + string? project, + bool? confidential, + IReadOnlyList requestedScopes, + IReadOnlyList grantedScopes, + string? invalidScope, + IEnumerable? extraProperties = null, + string? eventType = null) + { + ArgumentNullException.ThrowIfNull(timeProvider); + ArgumentNullException.ThrowIfNull(transaction); + + var correlationId = EnsureCorrelationId(transaction); + var client = BuildClient(clientId, providerName); + var network = BuildNetwork(metadata); + var normalizedGranted = NormalizeScopes(grantedScopes); + var properties = BuildProperties(confidential, requestedScopes, invalidScope, extraProperties); + var normalizedTenant = NormalizeTenant(tenant); + var normalizedProject = NormalizeProject(project); + + return new AuthEventRecord + { + EventType = string.IsNullOrWhiteSpace(eventType) ? "authority.client_credentials.grant" : eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = correlationId, + Outcome = outcome, + Reason = Normalize(reason), + Subject = null, + Client = client, + Scopes = normalizedGranted, + Network = network, + Tenant = ClassifiedString.Public(normalizedTenant), + Project = ClassifiedString.Public(normalizedProject), + Properties = properties + }; + } + + internal static AuthEventRecord CreateTamperRecord( + TimeProvider timeProvider, + OpenIddictServerTransaction transaction, + AuthorityRateLimiterMetadata? metadata, + string? clientId, + string? providerName, + string? tenant, + string? project, + bool? confidential, + IEnumerable unexpectedParameters) + { + var properties = new List + { + new() + { + Name = "request.tampered", + Value = ClassifiedString.Public("true") + } + }; + + if (confidential.HasValue) + { + properties.Add(new AuthEventProperty + { + Name = "client.confidential", + Value = ClassifiedString.Public(confidential.Value ? "true" : "false") + }); + } + + if (unexpectedParameters is not null) + { + foreach (var parameter in unexpectedParameters) + { + if (string.IsNullOrWhiteSpace(parameter)) + { + continue; + } + + properties.Add(new AuthEventProperty + { + Name = "request.unexpected_parameter", + Value = ClassifiedString.Public(parameter) + }); + } + } + + var reason = unexpectedParameters is null + ? "Unexpected parameters supplied to client credentials request." + : $"Unexpected parameters supplied to client credentials request: {string.Join(", ", unexpectedParameters)}."; + + return CreateRecord( + timeProvider, + transaction, + metadata, + clientSecret: null, + outcome: AuthEventOutcome.Failure, + reason: reason, + clientId: clientId, + providerName: providerName, + tenant: tenant, + project: project, + confidential: confidential, + requestedScopes: Array.Empty(), + grantedScopes: Array.Empty(), + invalidScope: null, + extraProperties: properties, + eventType: "authority.token.tamper"); + } + + private static AuthEventClient? BuildClient(string? clientId, string? providerName) + { + if (string.IsNullOrWhiteSpace(clientId) && string.IsNullOrWhiteSpace(providerName)) + { + return null; + } + + return new AuthEventClient + { + ClientId = ClassifiedString.Personal(Normalize(clientId)), + Name = ClassifiedString.Empty, + Provider = ClassifiedString.Public(Normalize(providerName)) + }; + } + + private static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata) + { + var remote = Normalize(metadata?.RemoteIp); + var forwarded = Normalize(metadata?.ForwardedFor); + var userAgent = Normalize(metadata?.UserAgent); + + if (string.IsNullOrWhiteSpace(remote) && string.IsNullOrWhiteSpace(forwarded) && string.IsNullOrWhiteSpace(userAgent)) + { + return null; + } + + return new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(remote), + ForwardedFor = ClassifiedString.Personal(forwarded), + UserAgent = ClassifiedString.Personal(userAgent) + }; + } + + private static IReadOnlyList BuildProperties( + bool? confidential, + IReadOnlyList requestedScopes, + string? invalidScope, + IEnumerable? extraProperties) + { + var properties = new List(); + + if (confidential.HasValue) + { + properties.Add(new AuthEventProperty + { + Name = "client.confidential", + Value = ClassifiedString.Public(confidential.Value ? "true" : "false") + }); + } + + var normalizedRequested = NormalizeScopes(requestedScopes); + if (normalizedRequested is { Count: > 0 }) + { + foreach (var scope in normalizedRequested) + { + if (string.IsNullOrWhiteSpace(scope)) + { + continue; + } + + properties.Add(new AuthEventProperty + { + Name = "scope.requested", + Value = ClassifiedString.Public(scope) + }); + } + } + + if (!string.IsNullOrWhiteSpace(invalidScope)) + { + properties.Add(new AuthEventProperty + { + Name = "scope.invalid", + Value = ClassifiedString.Public(invalidScope) + }); + } + + if (extraProperties is not null) + { + foreach (var property in extraProperties) + { + if (property is null || string.IsNullOrWhiteSpace(property.Name)) + { + continue; + } + + properties.Add(property); + } + } + + return properties.Count == 0 ? Array.Empty() : properties; + } + + private static IReadOnlyList NormalizeScopes(IReadOnlyList? scopes) + { + if (scopes is null || scopes.Count == 0) + { + return Array.Empty(); + } + + var normalized = scopes + .Where(static scope => !string.IsNullOrWhiteSpace(scope)) + .Select(static scope => scope.Trim()) + .Where(static scope => scope.Length > 0) + .Distinct(StringComparer.Ordinal) + .OrderBy(static scope => scope, StringComparer.Ordinal) + .ToArray(); + + return normalized.Length == 0 ? Array.Empty() : normalized; + } + + private static string? Normalize(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + + private static string? NormalizeTenant(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); + + private static string NormalizeProject(string? value) + => string.IsNullOrWhiteSpace(value) ? StellaOpsTenancyDefaults.AnyProject : value.Trim().ToLowerInvariant(); +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/ClientCredentialsHandlers.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/DpopHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/DpopHandlers.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/DpopHandlers.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/DpopHandlers.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs index ea737677..9ab8fa7b 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/PasswordGrantHandlers.cs @@ -1,876 +1,876 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Diagnostics; -using System.Globalization; -using System.Linq; -using System.Security.Claims; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Logging; -using OpenIddict.Abstractions; -using OpenIddict.Extensions; -using OpenIddict.Server; -using OpenIddict.Server.AspNetCore; -using StellaOps.Auth.Abstractions; -using StellaOps.Authority.OpenIddict; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.RateLimiting; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Cryptography.Audit; - -namespace StellaOps.Authority.OpenIddict.Handlers; - -internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler -{ - private readonly IAuthorityIdentityProviderRegistry registry; - private readonly ActivitySource activitySource; - private readonly IAuthEventSink auditSink; - private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; - private readonly IAuthorityClientStore clientStore; - private readonly TimeProvider timeProvider; - private readonly ILogger logger; - - public ValidatePasswordGrantHandler( - IAuthorityIdentityProviderRegistry registry, - ActivitySource activitySource, - IAuthEventSink auditSink, - IAuthorityRateLimiterMetadataAccessor metadataAccessor, - IAuthorityClientStore clientStore, - TimeProvider timeProvider, - ILogger logger) - { - this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); - this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); - this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); - this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); - this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); - this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async ValueTask HandleAsync(OpenIddictServerEvents.ValidateTokenRequestContext context) - { - ArgumentNullException.ThrowIfNull(context); - - if (!context.Request.IsPasswordGrantType()) - { - return; - } - - using var activity = activitySource.StartActivity("authority.token.validate_password_grant", ActivityKind.Internal); - activity?.SetTag("authority.endpoint", "/token"); - activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.Password); - activity?.SetTag("authority.username", context.Request.Username ?? string.Empty); - - PasswordGrantAuditHelper.EnsureCorrelationId(context.Transaction); - - var metadata = metadataAccessor.GetMetadata(); - var clientId = context.ClientId ?? context.Request.ClientId; - if (!string.IsNullOrWhiteSpace(clientId)) - { - metadataAccessor.SetClientId(clientId); - } - - var requestedScopesInput = context.Request.GetScopes(); - var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty() : requestedScopesInput.ToArray(); - context.Transaction.Properties[AuthorityOpenIddictConstants.AuditRequestedScopesProperty] = requestedScopes; - - if (string.IsNullOrWhiteSpace(clientId)) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Client identifier is required for password grant.", - clientId: null, - providerName: null, - tenant: null, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client identifier is required."); - logger.LogWarning("Password grant validation failed: missing client_id for {Username}.", context.Request.Username); - return; - } - - var clientDocument = await clientStore.FindByClientIdAsync(clientId, context.CancellationToken).ConfigureAwait(false); - if (clientDocument is null || clientDocument.Disabled) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Client is not permitted for password grant.", - clientId, - providerName: null, - tenant: null, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidClient, "The specified client is not permitted."); - logger.LogWarning("Password grant validation failed: client {ClientId} disabled or missing.", clientId); - return; - } - - context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = clientDocument; - context.Transaction.Properties[AuthorityOpenIddictConstants.AuditClientIdProperty] = clientId; - - var tenant = PasswordGrantAuditHelper.NormalizeTenant(clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantValue) ? tenantValue : null); - if (!string.IsNullOrWhiteSpace(tenant)) - { - context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty] = tenant; - metadataAccessor.SetTenant(tenant); - activity?.SetTag("authority.tenant", tenant); - } - - var allowedGrantTypes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedGrantTypes); - if (allowedGrantTypes.Count > 0 && - !allowedGrantTypes.Any(static grant => string.Equals(grant, OpenIddictConstants.GrantTypes.Password, StringComparison.Ordinal))) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Password grant is not permitted for this client.", - clientId, - providerName: null, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.UnauthorizedClient, "Password grant is not permitted for this client."); - logger.LogWarning("Password grant validation failed for client {ClientId}: grant type not allowed.", clientId); - return; - } - - var allowedScopes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedScopes); - var resolvedScopes = ClientCredentialHandlerHelpers.ResolveGrantedScopes(allowedScopes, requestedScopes); - - if (resolvedScopes.InvalidScope is not null) - { - context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = resolvedScopes.InvalidScope; - - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - $"Scope '{resolvedScopes.InvalidScope}' is not permitted for this client.", - clientId, - providerName: null, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidScope, $"Scope '{resolvedScopes.InvalidScope}' is not allowed for this client."); - logger.LogWarning("Password grant validation failed for client {ClientId}: scope {Scope} not permitted.", clientId, resolvedScopes.InvalidScope); - return; - } - - context.Transaction.Properties[AuthorityOpenIddictConstants.AuditGrantedScopesProperty] = resolvedScopes.Scopes; - context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = resolvedScopes.Scopes; - - var unexpectedParameters = TokenRequestTamperInspector.GetUnexpectedPasswordGrantParameters(context.Request); - if (unexpectedParameters.Count > 0) - { - var providerHint = context.Request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString(); - var tamperRecord = PasswordGrantAuditHelper.CreateTamperRecord( - timeProvider, - context.Transaction, - metadata, - clientId, - providerHint, - tenant, - context.Request.Username, - requestedScopes, - unexpectedParameters); - - await auditSink.WriteAsync(tamperRecord, context.CancellationToken).ConfigureAwait(false); - } - - var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry); - if (!selection.Succeeded) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - selection.Description, - clientId, - providerName: null, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(selection.Error!, selection.Description); - logger.LogWarning("Password grant validation failed for {Username}: {Reason}.", context.Request.Username, selection.Description); - return; - } - - var selectedProvider = selection.Provider!; - - if (string.IsNullOrWhiteSpace(context.Request.Username) || string.IsNullOrEmpty(context.Request.Password)) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Both username and password must be provided.", - clientId, - providerName: selectedProvider.Name, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided."); - logger.LogWarning("Password grant validation failed: missing credentials for {Username}.", context.Request.Username); - return; - } - - context.Transaction.Properties[AuthorityOpenIddictConstants.ProviderTransactionProperty] = selectedProvider.Name; - activity?.SetTag("authority.identity_provider", selectedProvider.Name); - logger.LogInformation("Password grant validation succeeded for {Username} using provider {Provider}.", context.Request.Username, selectedProvider.Name); - } -} - -internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler -{ - private readonly IAuthorityIdentityProviderRegistry registry; - private readonly IAuthorityClientStore clientStore; - private readonly ActivitySource activitySource; - private readonly IAuthEventSink auditSink; - private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; - private readonly TimeProvider timeProvider; - private readonly ILogger logger; - - public HandlePasswordGrantHandler( - IAuthorityIdentityProviderRegistry registry, - IAuthorityClientStore clientStore, - ActivitySource activitySource, - IAuthEventSink auditSink, - IAuthorityRateLimiterMetadataAccessor metadataAccessor, - TimeProvider timeProvider, - ILogger logger) - { - this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); - this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); - this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); - this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); - this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); - this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async ValueTask HandleAsync(OpenIddictServerEvents.HandleTokenRequestContext context) - { - ArgumentNullException.ThrowIfNull(context); - - if (!context.Request.IsPasswordGrantType()) - { - return; - } - - using var activity = activitySource.StartActivity("authority.token.handle_password_grant", ActivityKind.Internal); - activity?.SetTag("authority.endpoint", "/token"); - activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.Password); - activity?.SetTag("authority.username", context.Request.Username ?? string.Empty); - - PasswordGrantAuditHelper.EnsureCorrelationId(context.Transaction); - - var metadata = metadataAccessor.GetMetadata(); - var clientId = context.ClientId ?? context.Request.ClientId; - if (!string.IsNullOrWhiteSpace(clientId)) - { - metadataAccessor.SetClientId(clientId); - } - - var requestedScopesInput = context.Request.GetScopes(); - var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty() : requestedScopesInput.ToArray(); - var grantedScopes = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientGrantedScopesProperty, out var grantedValue) && - grantedValue is string[] grantedArray - ? (IReadOnlyList)grantedArray - : requestedScopes; - - AuthorityClientDocument? clientDocument = null; - if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientTransactionProperty, out var clientValue) && - clientValue is AuthorityClientDocument storedClient) - { - clientDocument = storedClient; - } - else if (!string.IsNullOrWhiteSpace(clientId)) - { - clientDocument = await clientStore.FindByClientIdAsync(clientId, context.CancellationToken).ConfigureAwait(false); - } - - if (clientDocument is null || clientDocument.Disabled) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Client is not permitted for password grant.", - clientId, - providerName: null, - tenant: null, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidClient, "The specified client is not permitted."); - logger.LogWarning("Password grant handling failed: client {ClientId} disabled or missing.", clientId); - return; - } - - context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = clientDocument; - - if (grantedScopes.Count == 0) - { - var allowedScopes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedScopes); - var resolvedScopes = ClientCredentialHandlerHelpers.ResolveGrantedScopes(allowedScopes, requestedScopes); - grantedScopes = resolvedScopes.InvalidScope is null ? resolvedScopes.Scopes : Array.Empty(); - context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = grantedScopes; - } - - var tenant = PasswordGrantAuditHelper.NormalizeTenant( - clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantValue) ? tenantValue : null); - if (!string.IsNullOrWhiteSpace(tenant)) - { - context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty] = tenant; - metadataAccessor.SetTenant(tenant); - activity?.SetTag("authority.tenant", tenant); - } - - var providerName = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ProviderTransactionProperty, out var value) - ? value as string - : null; - - AuthorityIdentityProviderMetadata? providerMetadata = null; - if (!string.IsNullOrWhiteSpace(providerName)) - { - if (!registry.TryGet(providerName!, out providerMetadata)) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Unable to resolve the requested identity provider.", - clientId, - providerName, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.UnknownError, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.ServerError, "Unable to resolve the requested identity provider."); - logger.LogError("Password grant handling failed: provider {Provider} not found for user {Username}.", providerName, context.Request.Username); - return; - } - } - else - { - var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry); - if (!selection.Succeeded) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - selection.Description, - clientId, - providerName: null, - tenant, - user: null, - username: context.Request.Username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(selection.Error!, selection.Description); - logger.LogWarning("Password grant handling rejected {Username}: {Reason}.", context.Request.Username, selection.Description); - return; - } - - providerMetadata = selection.Provider; - providerName = providerMetadata?.Name; - } - - if (providerMetadata is null) - { - throw new InvalidOperationException("No identity provider metadata resolved for password grant."); - } - - await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, context.CancellationToken).ConfigureAwait(false); - var provider = providerHandle.Provider; - - var username = context.Request.Username; - var password = context.Request.Password; - if (string.IsNullOrWhiteSpace(username) || string.IsNullOrEmpty(password)) - { - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Failure, - "Both username and password must be provided.", - clientId, - providerMetadata.Name, - tenant, - user: null, - username: username, - scopes: requestedScopes, - retryAfter: null, - failureCode: AuthorityCredentialFailureCode.InvalidCredentials, - extraProperties: null); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided."); - logger.LogWarning("Password grant handling rejected: missing credentials for {Username}.", username); - return; - } - - var verification = await provider.Credentials.VerifyPasswordAsync( - username, - password, - context.CancellationToken).ConfigureAwait(false); - - if (!verification.Succeeded || verification.User is null) - { - var outcome = verification.FailureCode == AuthorityCredentialFailureCode.LockedOut - ? AuthEventOutcome.LockedOut - : AuthEventOutcome.Failure; - - var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - outcome, - verification.Message, - clientId, - providerMetadata.Name, - tenant, - verification.User, - username, - scopes: requestedScopes, - retryAfter: verification.RetryAfter, - failureCode: verification.FailureCode, - extraProperties: verification.AuditProperties); - - await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); - - context.Reject( - OpenIddictConstants.Errors.InvalidGrant, - verification.Message ?? "Invalid username or password."); - logger.LogWarning("Password verification failed for {Username}: {Message}.", username, verification.Message); - return; - } - - metadataAccessor.SetSubjectId(verification.User.SubjectId); - - var identity = new ClaimsIdentity( - OpenIddictServerAspNetCoreDefaults.AuthenticationScheme, - OpenIddictConstants.Claims.Name, - OpenIddictConstants.Claims.Role); - - identity.AddClaim(new Claim(OpenIddictConstants.Claims.Subject, verification.User.SubjectId)); - identity.AddClaim(new Claim(OpenIddictConstants.Claims.PreferredUsername, verification.User.Username)); - - if (!string.IsNullOrWhiteSpace(verification.User.DisplayName)) - { - identity.AddClaim(new Claim(OpenIddictConstants.Claims.Name, verification.User.DisplayName!)); - } - - foreach (var role in verification.User.Roles) - { - identity.AddClaim(new Claim(OpenIddictConstants.Claims.Role, role)); - } - - if (!string.IsNullOrWhiteSpace(tenant)) - { - identity.SetClaim(StellaOpsClaimTypes.Tenant, tenant); - } - - identity.SetDestinations(static claim => claim.Type switch - { - OpenIddictConstants.Claims.Subject => new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, - OpenIddictConstants.Claims.Name => new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, - OpenIddictConstants.Claims.PreferredUsername => new[] { OpenIddictConstants.Destinations.AccessToken }, - OpenIddictConstants.Claims.Role => new[] { OpenIddictConstants.Destinations.AccessToken }, - _ => new[] { OpenIddictConstants.Destinations.AccessToken } - }); - - var principal = new ClaimsPrincipal(identity); - principal.SetScopes(grantedScopes); - - var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, verification.User, null); - await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false); - - var successRecord = PasswordGrantAuditHelper.CreatePasswordGrantRecord( - timeProvider, - context.Transaction, - metadata, - AuthEventOutcome.Success, - verification.Message, - clientId, - providerMetadata.Name, - tenant, - verification.User, - username, - scopes: grantedScopes, - retryAfter: null, - failureCode: null, - extraProperties: verification.AuditProperties); - - await auditSink.WriteAsync(successRecord, context.CancellationToken).ConfigureAwait(false); - - context.Principal = principal; - context.HandleRequest(); - activity?.SetTag("authority.subject_id", verification.User.SubjectId); - logger.LogInformation("Password grant issued for {Username} with subject {SubjectId}.", verification.User.Username, verification.User.SubjectId); - } -} - -internal static class PasswordGrantAuditHelper -{ - internal static string EnsureCorrelationId(OpenIddictServerTransaction transaction) - { - ArgumentNullException.ThrowIfNull(transaction); - - if (transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditCorrelationProperty, out var value) && - value is string existing && !string.IsNullOrWhiteSpace(existing)) - { - return existing; - } - - var correlation = Activity.Current?.TraceId.ToString() ?? - Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); - - transaction.Properties[AuthorityOpenIddictConstants.AuditCorrelationProperty] = correlation; - return correlation; - } - - internal static AuthEventRecord CreatePasswordGrantRecord( - TimeProvider timeProvider, - OpenIddictServerTransaction transaction, - AuthorityRateLimiterMetadata? metadata, - AuthEventOutcome outcome, - string? reason = null, - string? clientId = null, - string? providerName = null, - string? tenant = null, - AuthorityUserDescriptor? user = null, - string? username = null, - IEnumerable? scopes = null, - TimeSpan? retryAfter = null, - AuthorityCredentialFailureCode? failureCode = null, - IEnumerable? extraProperties = null, - string? eventType = null) - { - ArgumentNullException.ThrowIfNull(timeProvider); - ArgumentNullException.ThrowIfNull(transaction); - - var correlationId = EnsureCorrelationId(transaction); - var normalizedScopes = NormalizeScopes(scopes); - var normalizedTenant = NormalizeTenant(tenant); - var subject = BuildSubject(user, username, providerName); - var client = BuildClient(clientId, providerName); - var network = BuildNetwork(metadata); - var properties = BuildProperties(user, retryAfter, failureCode, extraProperties); - - return new AuthEventRecord - { - EventType = string.IsNullOrWhiteSpace(eventType) ? "authority.password.grant" : eventType, - OccurredAt = timeProvider.GetUtcNow(), - CorrelationId = correlationId, - Outcome = outcome, - Reason = Normalize(reason), - Subject = subject, - Client = client, - Scopes = normalizedScopes, - Network = network, - Tenant = ClassifiedString.Public(normalizedTenant), - Properties = properties - }; - } - - private static AuthEventSubject? BuildSubject(AuthorityUserDescriptor? user, string? username, string? providerName) - { - var attributes = user?.Attributes; - var normalizedUsername = Normalize(username) ?? Normalize(user?.Username); - var subjectId = Normalize(user?.SubjectId); - var displayName = Normalize(user?.DisplayName); - var attributeProperties = BuildSubjectAttributes(attributes); - - if (string.IsNullOrWhiteSpace(subjectId) && - string.IsNullOrWhiteSpace(normalizedUsername) && - string.IsNullOrWhiteSpace(displayName) && - attributeProperties.Count == 0 && - string.IsNullOrWhiteSpace(providerName)) - { - return null; - } - - return new AuthEventSubject - { - SubjectId = ClassifiedString.Personal(subjectId), - Username = ClassifiedString.Personal(normalizedUsername), - DisplayName = ClassifiedString.Personal(displayName), - Realm = ClassifiedString.Public(Normalize(providerName)), - Attributes = attributeProperties - }; - } - - private static IReadOnlyList BuildSubjectAttributes(IReadOnlyDictionary? attributes) - { - if (attributes is null || attributes.Count == 0) - { - return Array.Empty(); - } - - var items = new List(attributes.Count); - foreach (var pair in attributes) - { - if (string.IsNullOrWhiteSpace(pair.Key)) - { - continue; - } - - items.Add(new AuthEventProperty - { - Name = pair.Key, - Value = ClassifiedString.Personal(Normalize(pair.Value)) - }); - } - - return items.Count == 0 ? Array.Empty() : items; - } - - private static AuthEventClient? BuildClient(string? clientId, string? providerName) - { - var normalizedClientId = Normalize(clientId); - var provider = Normalize(providerName); - - if (string.IsNullOrWhiteSpace(normalizedClientId) && string.IsNullOrWhiteSpace(provider)) - { - return null; - } - - return new AuthEventClient - { - ClientId = ClassifiedString.Personal(normalizedClientId), - Name = ClassifiedString.Empty, - Provider = ClassifiedString.Public(provider) - }; - } - - private static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata) - { - var remote = Normalize(metadata?.RemoteIp); - var forwarded = Normalize(metadata?.ForwardedFor); - var userAgent = Normalize(metadata?.UserAgent); - - if (string.IsNullOrWhiteSpace(remote) && string.IsNullOrWhiteSpace(forwarded) && string.IsNullOrWhiteSpace(userAgent)) - { - return null; - } - - return new AuthEventNetwork - { - RemoteAddress = ClassifiedString.Personal(remote), - ForwardedFor = ClassifiedString.Personal(forwarded), - UserAgent = ClassifiedString.Personal(userAgent) - }; - } - - private static IReadOnlyList BuildProperties( - AuthorityUserDescriptor? user, - TimeSpan? retryAfter, - AuthorityCredentialFailureCode? failureCode, - IEnumerable? extraProperties) - { - var properties = new List(); - - if (failureCode is { } code) - { - properties.Add(new AuthEventProperty - { - Name = "failure.code", - Value = ClassifiedString.Public(code.ToString()) - }); - } - - if (retryAfter is { } retry && retry > TimeSpan.Zero) - { - var seconds = Math.Ceiling(retry.TotalSeconds).ToString(CultureInfo.InvariantCulture); - properties.Add(new AuthEventProperty - { - Name = "policy.retry_after_seconds", - Value = ClassifiedString.Public(seconds) - }); - } - - if (user is not null) - { - properties.Add(new AuthEventProperty - { - Name = "subject.requires_password_reset", - Value = ClassifiedString.Public(user.RequiresPasswordReset ? "true" : "false") - }); - } - - if (extraProperties is not null) - { - foreach (var property in extraProperties) - { - if (property is null || string.IsNullOrWhiteSpace(property.Name)) - { - continue; - } - - properties.Add(property); - } - } - - return properties.Count == 0 ? Array.Empty() : properties; - } - - private static IReadOnlyList NormalizeScopes(IEnumerable? scopes) - { - if (scopes is null) - { - return Array.Empty(); - } - - var normalized = scopes - .Where(static scope => !string.IsNullOrWhiteSpace(scope)) - .Select(static scope => scope.Trim()) - .Where(static scope => scope.Length > 0) - .Distinct(StringComparer.Ordinal) - .OrderBy(static scope => scope, StringComparer.Ordinal) - .ToArray(); - - return normalized.Length == 0 ? Array.Empty() : normalized; - } - - private static string? Normalize(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); - - internal static string? NormalizeTenant(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); - - internal static AuthEventRecord CreateTamperRecord( - TimeProvider timeProvider, - OpenIddictServerTransaction transaction, - AuthorityRateLimiterMetadata? metadata, - string? clientId, - string? providerName, - string? tenant, - string? username, - IEnumerable? scopes, - IEnumerable unexpectedParameters) - { - var properties = new List - { - new() - { - Name = "request.tampered", - Value = ClassifiedString.Public("true") - } - }; - - if (unexpectedParameters is not null) - { - foreach (var parameter in unexpectedParameters) - { - if (string.IsNullOrWhiteSpace(parameter)) - { - continue; - } - - properties.Add(new AuthEventProperty - { - Name = "request.unexpected_parameter", - Value = ClassifiedString.Public(parameter) - }); - } - } - - var reason = unexpectedParameters is null - ? "Unexpected parameters supplied to password grant request." - : $"Unexpected parameters supplied to password grant request: {string.Join(", ", unexpectedParameters)}."; - - return CreatePasswordGrantRecord( - timeProvider, - transaction, - metadata, - AuthEventOutcome.Failure, - reason, - clientId, - providerName, - tenant, - user: null, - username, - scopes, - retryAfter: null, - failureCode: null, - extraProperties: properties, - eventType: "authority.token.tamper"); - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using System.Security.Claims; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using OpenIddict.Abstractions; +using OpenIddict.Extensions; +using OpenIddict.Server; +using OpenIddict.Server.AspNetCore; +using StellaOps.Auth.Abstractions; +using StellaOps.Authority.OpenIddict; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.RateLimiting; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Cryptography.Audit; + +namespace StellaOps.Authority.OpenIddict.Handlers; + +internal sealed class ValidatePasswordGrantHandler : IOpenIddictServerHandler +{ + private readonly IAuthorityIdentityProviderRegistry registry; + private readonly ActivitySource activitySource; + private readonly IAuthEventSink auditSink; + private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; + private readonly IAuthorityClientStore clientStore; + private readonly TimeProvider timeProvider; + private readonly ILogger logger; + + public ValidatePasswordGrantHandler( + IAuthorityIdentityProviderRegistry registry, + ActivitySource activitySource, + IAuthEventSink auditSink, + IAuthorityRateLimiterMetadataAccessor metadataAccessor, + IAuthorityClientStore clientStore, + TimeProvider timeProvider, + ILogger logger) + { + this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); + this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); + this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); + this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async ValueTask HandleAsync(OpenIddictServerEvents.ValidateTokenRequestContext context) + { + ArgumentNullException.ThrowIfNull(context); + + if (!context.Request.IsPasswordGrantType()) + { + return; + } + + using var activity = activitySource.StartActivity("authority.token.validate_password_grant", ActivityKind.Internal); + activity?.SetTag("authority.endpoint", "/token"); + activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.Password); + activity?.SetTag("authority.username", context.Request.Username ?? string.Empty); + + PasswordGrantAuditHelper.EnsureCorrelationId(context.Transaction); + + var metadata = metadataAccessor.GetMetadata(); + var clientId = context.ClientId ?? context.Request.ClientId; + if (!string.IsNullOrWhiteSpace(clientId)) + { + metadataAccessor.SetClientId(clientId); + } + + var requestedScopesInput = context.Request.GetScopes(); + var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty() : requestedScopesInput.ToArray(); + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditRequestedScopesProperty] = requestedScopes; + + if (string.IsNullOrWhiteSpace(clientId)) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Client identifier is required for password grant.", + clientId: null, + providerName: null, + tenant: null, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidClient, "Client identifier is required."); + logger.LogWarning("Password grant validation failed: missing client_id for {Username}.", context.Request.Username); + return; + } + + var clientDocument = await clientStore.FindByClientIdAsync(clientId, context.CancellationToken).ConfigureAwait(false); + if (clientDocument is null || clientDocument.Disabled) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Client is not permitted for password grant.", + clientId, + providerName: null, + tenant: null, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidClient, "The specified client is not permitted."); + logger.LogWarning("Password grant validation failed: client {ClientId} disabled or missing.", clientId); + return; + } + + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = clientDocument; + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditClientIdProperty] = clientId; + + var tenant = PasswordGrantAuditHelper.NormalizeTenant(clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantValue) ? tenantValue : null); + if (!string.IsNullOrWhiteSpace(tenant)) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty] = tenant; + metadataAccessor.SetTenant(tenant); + activity?.SetTag("authority.tenant", tenant); + } + + var allowedGrantTypes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedGrantTypes); + if (allowedGrantTypes.Count > 0 && + !allowedGrantTypes.Any(static grant => string.Equals(grant, OpenIddictConstants.GrantTypes.Password, StringComparison.Ordinal))) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Password grant is not permitted for this client.", + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.UnauthorizedClient, "Password grant is not permitted for this client."); + logger.LogWarning("Password grant validation failed for client {ClientId}: grant type not allowed.", clientId); + return; + } + + var allowedScopes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedScopes); + var resolvedScopes = ClientCredentialHandlerHelpers.ResolveGrantedScopes(allowedScopes, requestedScopes); + + if (resolvedScopes.InvalidScope is not null) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditInvalidScopeProperty] = resolvedScopes.InvalidScope; + + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + $"Scope '{resolvedScopes.InvalidScope}' is not permitted for this client.", + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidScope, $"Scope '{resolvedScopes.InvalidScope}' is not allowed for this client."); + logger.LogWarning("Password grant validation failed for client {ClientId}: scope {Scope} not permitted.", clientId, resolvedScopes.InvalidScope); + return; + } + + context.Transaction.Properties[AuthorityOpenIddictConstants.AuditGrantedScopesProperty] = resolvedScopes.Scopes; + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = resolvedScopes.Scopes; + + var unexpectedParameters = TokenRequestTamperInspector.GetUnexpectedPasswordGrantParameters(context.Request); + if (unexpectedParameters.Count > 0) + { + var providerHint = context.Request.GetParameter(AuthorityOpenIddictConstants.ProviderParameterName)?.Value?.ToString(); + var tamperRecord = PasswordGrantAuditHelper.CreateTamperRecord( + timeProvider, + context.Transaction, + metadata, + clientId, + providerHint, + tenant, + context.Request.Username, + requestedScopes, + unexpectedParameters); + + await auditSink.WriteAsync(tamperRecord, context.CancellationToken).ConfigureAwait(false); + } + + var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry); + if (!selection.Succeeded) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + selection.Description, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(selection.Error!, selection.Description); + logger.LogWarning("Password grant validation failed for {Username}: {Reason}.", context.Request.Username, selection.Description); + return; + } + + var selectedProvider = selection.Provider!; + + if (string.IsNullOrWhiteSpace(context.Request.Username) || string.IsNullOrEmpty(context.Request.Password)) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Both username and password must be provided.", + clientId, + providerName: selectedProvider.Name, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided."); + logger.LogWarning("Password grant validation failed: missing credentials for {Username}.", context.Request.Username); + return; + } + + context.Transaction.Properties[AuthorityOpenIddictConstants.ProviderTransactionProperty] = selectedProvider.Name; + activity?.SetTag("authority.identity_provider", selectedProvider.Name); + logger.LogInformation("Password grant validation succeeded for {Username} using provider {Provider}.", context.Request.Username, selectedProvider.Name); + } +} + +internal sealed class HandlePasswordGrantHandler : IOpenIddictServerHandler +{ + private readonly IAuthorityIdentityProviderRegistry registry; + private readonly IAuthorityClientStore clientStore; + private readonly ActivitySource activitySource; + private readonly IAuthEventSink auditSink; + private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; + private readonly TimeProvider timeProvider; + private readonly ILogger logger; + + public HandlePasswordGrantHandler( + IAuthorityIdentityProviderRegistry registry, + IAuthorityClientStore clientStore, + ActivitySource activitySource, + IAuthEventSink auditSink, + IAuthorityRateLimiterMetadataAccessor metadataAccessor, + TimeProvider timeProvider, + ILogger logger) + { + this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); + this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); + this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); + this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async ValueTask HandleAsync(OpenIddictServerEvents.HandleTokenRequestContext context) + { + ArgumentNullException.ThrowIfNull(context); + + if (!context.Request.IsPasswordGrantType()) + { + return; + } + + using var activity = activitySource.StartActivity("authority.token.handle_password_grant", ActivityKind.Internal); + activity?.SetTag("authority.endpoint", "/token"); + activity?.SetTag("authority.grant_type", OpenIddictConstants.GrantTypes.Password); + activity?.SetTag("authority.username", context.Request.Username ?? string.Empty); + + PasswordGrantAuditHelper.EnsureCorrelationId(context.Transaction); + + var metadata = metadataAccessor.GetMetadata(); + var clientId = context.ClientId ?? context.Request.ClientId; + if (!string.IsNullOrWhiteSpace(clientId)) + { + metadataAccessor.SetClientId(clientId); + } + + var requestedScopesInput = context.Request.GetScopes(); + var requestedScopes = requestedScopesInput.IsDefaultOrEmpty ? Array.Empty() : requestedScopesInput.ToArray(); + var grantedScopes = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientGrantedScopesProperty, out var grantedValue) && + grantedValue is string[] grantedArray + ? (IReadOnlyList)grantedArray + : requestedScopes; + + AuthorityClientDocument? clientDocument = null; + if (context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ClientTransactionProperty, out var clientValue) && + clientValue is AuthorityClientDocument storedClient) + { + clientDocument = storedClient; + } + else if (!string.IsNullOrWhiteSpace(clientId)) + { + clientDocument = await clientStore.FindByClientIdAsync(clientId, context.CancellationToken).ConfigureAwait(false); + } + + if (clientDocument is null || clientDocument.Disabled) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Client is not permitted for password grant.", + clientId, + providerName: null, + tenant: null, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidClient, "The specified client is not permitted."); + logger.LogWarning("Password grant handling failed: client {ClientId} disabled or missing.", clientId); + return; + } + + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTransactionProperty] = clientDocument; + + if (grantedScopes.Count == 0) + { + var allowedScopes = ClientCredentialHandlerHelpers.Split(clientDocument.Properties, AuthorityClientMetadataKeys.AllowedScopes); + var resolvedScopes = ClientCredentialHandlerHelpers.ResolveGrantedScopes(allowedScopes, requestedScopes); + grantedScopes = resolvedScopes.InvalidScope is null ? resolvedScopes.Scopes : Array.Empty(); + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientGrantedScopesProperty] = grantedScopes; + } + + var tenant = PasswordGrantAuditHelper.NormalizeTenant( + clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantValue) ? tenantValue : null); + if (!string.IsNullOrWhiteSpace(tenant)) + { + context.Transaction.Properties[AuthorityOpenIddictConstants.ClientTenantProperty] = tenant; + metadataAccessor.SetTenant(tenant); + activity?.SetTag("authority.tenant", tenant); + } + + var providerName = context.Transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.ProviderTransactionProperty, out var value) + ? value as string + : null; + + AuthorityIdentityProviderMetadata? providerMetadata = null; + if (!string.IsNullOrWhiteSpace(providerName)) + { + if (!registry.TryGet(providerName!, out providerMetadata)) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Unable to resolve the requested identity provider.", + clientId, + providerName, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.UnknownError, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.ServerError, "Unable to resolve the requested identity provider."); + logger.LogError("Password grant handling failed: provider {Provider} not found for user {Username}.", providerName, context.Request.Username); + return; + } + } + else + { + var selection = AuthorityIdentityProviderSelector.ResolvePasswordProvider(context.Request, registry); + if (!selection.Succeeded) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + selection.Description, + clientId, + providerName: null, + tenant, + user: null, + username: context.Request.Username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(selection.Error!, selection.Description); + logger.LogWarning("Password grant handling rejected {Username}: {Reason}.", context.Request.Username, selection.Description); + return; + } + + providerMetadata = selection.Provider; + providerName = providerMetadata?.Name; + } + + if (providerMetadata is null) + { + throw new InvalidOperationException("No identity provider metadata resolved for password grant."); + } + + await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, context.CancellationToken).ConfigureAwait(false); + var provider = providerHandle.Provider; + + var username = context.Request.Username; + var password = context.Request.Password; + if (string.IsNullOrWhiteSpace(username) || string.IsNullOrEmpty(password)) + { + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Failure, + "Both username and password must be provided.", + clientId, + providerMetadata.Name, + tenant, + user: null, + username: username, + scopes: requestedScopes, + retryAfter: null, + failureCode: AuthorityCredentialFailureCode.InvalidCredentials, + extraProperties: null); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject(OpenIddictConstants.Errors.InvalidRequest, "Both username and password must be provided."); + logger.LogWarning("Password grant handling rejected: missing credentials for {Username}.", username); + return; + } + + var verification = await provider.Credentials.VerifyPasswordAsync( + username, + password, + context.CancellationToken).ConfigureAwait(false); + + if (!verification.Succeeded || verification.User is null) + { + var outcome = verification.FailureCode == AuthorityCredentialFailureCode.LockedOut + ? AuthEventOutcome.LockedOut + : AuthEventOutcome.Failure; + + var record = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + outcome, + verification.Message, + clientId, + providerMetadata.Name, + tenant, + verification.User, + username, + scopes: requestedScopes, + retryAfter: verification.RetryAfter, + failureCode: verification.FailureCode, + extraProperties: verification.AuditProperties); + + await auditSink.WriteAsync(record, context.CancellationToken).ConfigureAwait(false); + + context.Reject( + OpenIddictConstants.Errors.InvalidGrant, + verification.Message ?? "Invalid username or password."); + logger.LogWarning("Password verification failed for {Username}: {Message}.", username, verification.Message); + return; + } + + metadataAccessor.SetSubjectId(verification.User.SubjectId); + + var identity = new ClaimsIdentity( + OpenIddictServerAspNetCoreDefaults.AuthenticationScheme, + OpenIddictConstants.Claims.Name, + OpenIddictConstants.Claims.Role); + + identity.AddClaim(new Claim(OpenIddictConstants.Claims.Subject, verification.User.SubjectId)); + identity.AddClaim(new Claim(OpenIddictConstants.Claims.PreferredUsername, verification.User.Username)); + + if (!string.IsNullOrWhiteSpace(verification.User.DisplayName)) + { + identity.AddClaim(new Claim(OpenIddictConstants.Claims.Name, verification.User.DisplayName!)); + } + + foreach (var role in verification.User.Roles) + { + identity.AddClaim(new Claim(OpenIddictConstants.Claims.Role, role)); + } + + if (!string.IsNullOrWhiteSpace(tenant)) + { + identity.SetClaim(StellaOpsClaimTypes.Tenant, tenant); + } + + identity.SetDestinations(static claim => claim.Type switch + { + OpenIddictConstants.Claims.Subject => new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, + OpenIddictConstants.Claims.Name => new[] { OpenIddictConstants.Destinations.AccessToken, OpenIddictConstants.Destinations.IdentityToken }, + OpenIddictConstants.Claims.PreferredUsername => new[] { OpenIddictConstants.Destinations.AccessToken }, + OpenIddictConstants.Claims.Role => new[] { OpenIddictConstants.Destinations.AccessToken }, + _ => new[] { OpenIddictConstants.Destinations.AccessToken } + }); + + var principal = new ClaimsPrincipal(identity); + principal.SetScopes(grantedScopes); + + var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, verification.User, null); + await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false); + + var successRecord = PasswordGrantAuditHelper.CreatePasswordGrantRecord( + timeProvider, + context.Transaction, + metadata, + AuthEventOutcome.Success, + verification.Message, + clientId, + providerMetadata.Name, + tenant, + verification.User, + username, + scopes: grantedScopes, + retryAfter: null, + failureCode: null, + extraProperties: verification.AuditProperties); + + await auditSink.WriteAsync(successRecord, context.CancellationToken).ConfigureAwait(false); + + context.Principal = principal; + context.HandleRequest(); + activity?.SetTag("authority.subject_id", verification.User.SubjectId); + logger.LogInformation("Password grant issued for {Username} with subject {SubjectId}.", verification.User.Username, verification.User.SubjectId); + } +} + +internal static class PasswordGrantAuditHelper +{ + internal static string EnsureCorrelationId(OpenIddictServerTransaction transaction) + { + ArgumentNullException.ThrowIfNull(transaction); + + if (transaction.Properties.TryGetValue(AuthorityOpenIddictConstants.AuditCorrelationProperty, out var value) && + value is string existing && !string.IsNullOrWhiteSpace(existing)) + { + return existing; + } + + var correlation = Activity.Current?.TraceId.ToString() ?? + Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); + + transaction.Properties[AuthorityOpenIddictConstants.AuditCorrelationProperty] = correlation; + return correlation; + } + + internal static AuthEventRecord CreatePasswordGrantRecord( + TimeProvider timeProvider, + OpenIddictServerTransaction transaction, + AuthorityRateLimiterMetadata? metadata, + AuthEventOutcome outcome, + string? reason = null, + string? clientId = null, + string? providerName = null, + string? tenant = null, + AuthorityUserDescriptor? user = null, + string? username = null, + IEnumerable? scopes = null, + TimeSpan? retryAfter = null, + AuthorityCredentialFailureCode? failureCode = null, + IEnumerable? extraProperties = null, + string? eventType = null) + { + ArgumentNullException.ThrowIfNull(timeProvider); + ArgumentNullException.ThrowIfNull(transaction); + + var correlationId = EnsureCorrelationId(transaction); + var normalizedScopes = NormalizeScopes(scopes); + var normalizedTenant = NormalizeTenant(tenant); + var subject = BuildSubject(user, username, providerName); + var client = BuildClient(clientId, providerName); + var network = BuildNetwork(metadata); + var properties = BuildProperties(user, retryAfter, failureCode, extraProperties); + + return new AuthEventRecord + { + EventType = string.IsNullOrWhiteSpace(eventType) ? "authority.password.grant" : eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = correlationId, + Outcome = outcome, + Reason = Normalize(reason), + Subject = subject, + Client = client, + Scopes = normalizedScopes, + Network = network, + Tenant = ClassifiedString.Public(normalizedTenant), + Properties = properties + }; + } + + private static AuthEventSubject? BuildSubject(AuthorityUserDescriptor? user, string? username, string? providerName) + { + var attributes = user?.Attributes; + var normalizedUsername = Normalize(username) ?? Normalize(user?.Username); + var subjectId = Normalize(user?.SubjectId); + var displayName = Normalize(user?.DisplayName); + var attributeProperties = BuildSubjectAttributes(attributes); + + if (string.IsNullOrWhiteSpace(subjectId) && + string.IsNullOrWhiteSpace(normalizedUsername) && + string.IsNullOrWhiteSpace(displayName) && + attributeProperties.Count == 0 && + string.IsNullOrWhiteSpace(providerName)) + { + return null; + } + + return new AuthEventSubject + { + SubjectId = ClassifiedString.Personal(subjectId), + Username = ClassifiedString.Personal(normalizedUsername), + DisplayName = ClassifiedString.Personal(displayName), + Realm = ClassifiedString.Public(Normalize(providerName)), + Attributes = attributeProperties + }; + } + + private static IReadOnlyList BuildSubjectAttributes(IReadOnlyDictionary? attributes) + { + if (attributes is null || attributes.Count == 0) + { + return Array.Empty(); + } + + var items = new List(attributes.Count); + foreach (var pair in attributes) + { + if (string.IsNullOrWhiteSpace(pair.Key)) + { + continue; + } + + items.Add(new AuthEventProperty + { + Name = pair.Key, + Value = ClassifiedString.Personal(Normalize(pair.Value)) + }); + } + + return items.Count == 0 ? Array.Empty() : items; + } + + private static AuthEventClient? BuildClient(string? clientId, string? providerName) + { + var normalizedClientId = Normalize(clientId); + var provider = Normalize(providerName); + + if (string.IsNullOrWhiteSpace(normalizedClientId) && string.IsNullOrWhiteSpace(provider)) + { + return null; + } + + return new AuthEventClient + { + ClientId = ClassifiedString.Personal(normalizedClientId), + Name = ClassifiedString.Empty, + Provider = ClassifiedString.Public(provider) + }; + } + + private static AuthEventNetwork? BuildNetwork(AuthorityRateLimiterMetadata? metadata) + { + var remote = Normalize(metadata?.RemoteIp); + var forwarded = Normalize(metadata?.ForwardedFor); + var userAgent = Normalize(metadata?.UserAgent); + + if (string.IsNullOrWhiteSpace(remote) && string.IsNullOrWhiteSpace(forwarded) && string.IsNullOrWhiteSpace(userAgent)) + { + return null; + } + + return new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(remote), + ForwardedFor = ClassifiedString.Personal(forwarded), + UserAgent = ClassifiedString.Personal(userAgent) + }; + } + + private static IReadOnlyList BuildProperties( + AuthorityUserDescriptor? user, + TimeSpan? retryAfter, + AuthorityCredentialFailureCode? failureCode, + IEnumerable? extraProperties) + { + var properties = new List(); + + if (failureCode is { } code) + { + properties.Add(new AuthEventProperty + { + Name = "failure.code", + Value = ClassifiedString.Public(code.ToString()) + }); + } + + if (retryAfter is { } retry && retry > TimeSpan.Zero) + { + var seconds = Math.Ceiling(retry.TotalSeconds).ToString(CultureInfo.InvariantCulture); + properties.Add(new AuthEventProperty + { + Name = "policy.retry_after_seconds", + Value = ClassifiedString.Public(seconds) + }); + } + + if (user is not null) + { + properties.Add(new AuthEventProperty + { + Name = "subject.requires_password_reset", + Value = ClassifiedString.Public(user.RequiresPasswordReset ? "true" : "false") + }); + } + + if (extraProperties is not null) + { + foreach (var property in extraProperties) + { + if (property is null || string.IsNullOrWhiteSpace(property.Name)) + { + continue; + } + + properties.Add(property); + } + } + + return properties.Count == 0 ? Array.Empty() : properties; + } + + private static IReadOnlyList NormalizeScopes(IEnumerable? scopes) + { + if (scopes is null) + { + return Array.Empty(); + } + + var normalized = scopes + .Where(static scope => !string.IsNullOrWhiteSpace(scope)) + .Select(static scope => scope.Trim()) + .Where(static scope => scope.Length > 0) + .Distinct(StringComparer.Ordinal) + .OrderBy(static scope => scope, StringComparer.Ordinal) + .ToArray(); + + return normalized.Length == 0 ? Array.Empty() : normalized; + } + + private static string? Normalize(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + + internal static string? NormalizeTenant(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); + + internal static AuthEventRecord CreateTamperRecord( + TimeProvider timeProvider, + OpenIddictServerTransaction transaction, + AuthorityRateLimiterMetadata? metadata, + string? clientId, + string? providerName, + string? tenant, + string? username, + IEnumerable? scopes, + IEnumerable unexpectedParameters) + { + var properties = new List + { + new() + { + Name = "request.tampered", + Value = ClassifiedString.Public("true") + } + }; + + if (unexpectedParameters is not null) + { + foreach (var parameter in unexpectedParameters) + { + if (string.IsNullOrWhiteSpace(parameter)) + { + continue; + } + + properties.Add(new AuthEventProperty + { + Name = "request.unexpected_parameter", + Value = ClassifiedString.Public(parameter) + }); + } + } + + var reason = unexpectedParameters is null + ? "Unexpected parameters supplied to password grant request." + : $"Unexpected parameters supplied to password grant request: {string.Join(", ", unexpectedParameters)}."; + + return CreatePasswordGrantRecord( + timeProvider, + transaction, + metadata, + AuthEventOutcome.Failure, + reason, + clientId, + providerName, + tenant, + user: null, + username, + scopes, + retryAfter: null, + failureCode: null, + extraProperties: properties, + eventType: "authority.token.tamper"); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/RevocationHandlers.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenPersistenceHandlers.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs index 14940be3..b7c8cf44 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/Handlers/TokenValidationHandlers.cs @@ -1,494 +1,494 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Globalization; -using System.Security.Claims; -using System.Text.Json; -using Microsoft.Extensions.Logging; -using OpenIddict.Abstractions; -using OpenIddict.Extensions; -using OpenIddict.Server; -using StellaOps.Auth.Abstractions; -using MongoDB.Driver; -using StellaOps.Authority.OpenIddict; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.RateLimiting; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Storage.Mongo.Sessions; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Cryptography.Audit; -using StellaOps.Authority.Security; - -namespace StellaOps.Authority.OpenIddict.Handlers; - -internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler -{ - private readonly IAuthorityTokenStore tokenStore; - private readonly IAuthorityMongoSessionAccessor sessionAccessor; - private readonly IAuthorityClientStore clientStore; - private readonly IAuthorityIdentityProviderRegistry registry; - private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; - private readonly IAuthEventSink auditSink; - private readonly TimeProvider clock; - private readonly ActivitySource activitySource; - private readonly ILogger logger; - - public ValidateAccessTokenHandler( - IAuthorityTokenStore tokenStore, - IAuthorityMongoSessionAccessor sessionAccessor, - IAuthorityClientStore clientStore, - IAuthorityIdentityProviderRegistry registry, - IAuthorityRateLimiterMetadataAccessor metadataAccessor, - IAuthEventSink auditSink, - TimeProvider clock, - ActivitySource activitySource, - ILogger logger) - { - this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore)); - this.sessionAccessor = sessionAccessor ?? throw new ArgumentNullException(nameof(sessionAccessor)); - this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); - this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); - this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); - this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); - this.clock = clock ?? throw new ArgumentNullException(nameof(clock)); - this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async ValueTask HandleAsync(OpenIddictServerEvents.ValidateTokenContext context) - { - ArgumentNullException.ThrowIfNull(context); - - if (context.Principal is null) - { - return; - } - - if (context.EndpointType is not (OpenIddictServerEndpointType.Token or OpenIddictServerEndpointType.Introspection)) - { - return; - } - - static string? NormalizeTenant(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); - - static string NormalizeProject(string? value) - => string.IsNullOrWhiteSpace(value) - ? StellaOpsTenancyDefaults.AnyProject - : value.Trim().ToLowerInvariant(); - - var identity = context.Principal.Identity as ClaimsIdentity; - var principalTenant = NormalizeTenant(context.Principal.GetClaim(StellaOpsClaimTypes.Tenant)); - var principalProject = NormalizeProject(context.Principal.GetClaim(StellaOpsClaimTypes.Project)); - - using var activity = activitySource.StartActivity("authority.token.validate_access", ActivityKind.Internal); - activity?.SetTag("authority.endpoint", context.EndpointType switch - { - OpenIddictServerEndpointType.Token => "/token", - OpenIddictServerEndpointType.Introspection => "/introspect", - _ => context.EndpointType.ToString() - }); - - var tokenId = !string.IsNullOrWhiteSpace(context.TokenId) - ? context.TokenId - : context.Principal.GetClaim(OpenIddictConstants.Claims.JwtId); - - var session = await sessionAccessor.GetSessionAsync(context.CancellationToken).ConfigureAwait(false); - - AuthorityTokenDocument? tokenDocument = null; - if (!string.IsNullOrWhiteSpace(tokenId)) - { - tokenDocument = await tokenStore.FindByTokenIdAsync(tokenId, context.CancellationToken, session).ConfigureAwait(false); - if (tokenDocument is not null) - { - if (!string.Equals(tokenDocument.Status, "valid", StringComparison.OrdinalIgnoreCase)) - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token is no longer active."); - logger.LogWarning("Access token {TokenId} rejected: status {Status}.", tokenId, tokenDocument.Status); - return; - } - - if (tokenDocument.ExpiresAt is { } expiresAt && expiresAt <= clock.GetUtcNow()) - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token has expired."); - logger.LogWarning("Access token {TokenId} rejected: expired at {ExpiresAt:o}.", tokenId, expiresAt); - return; - } - - context.Transaction.Properties[AuthorityOpenIddictConstants.TokenTransactionProperty] = tokenDocument; - activity?.SetTag("authority.token_id", tokenDocument.TokenId); - } - } - - if (tokenDocument is not null) - { - EnsureSenderConstraintClaims(context.Principal, tokenDocument); - - var documentTenant = NormalizeTenant(tokenDocument.Tenant); - if (documentTenant is not null) - { - if (principalTenant is null) - { - if (identity is not null) - { - identity.SetClaim(StellaOpsClaimTypes.Tenant, documentTenant); - principalTenant = documentTenant; - } - } - else if (!string.Equals(principalTenant, documentTenant, StringComparison.Ordinal)) - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token tenant does not match the issued tenant."); - logger.LogWarning( - "Access token validation failed: tenant mismatch for token {TokenId}. PrincipalTenant={PrincipalTenant}; DocumentTenant={DocumentTenant}.", - tokenDocument.TokenId, - principalTenant, - documentTenant); - return; - } - - metadataAccessor.SetTenant(documentTenant); - } - - var documentProject = NormalizeProject(tokenDocument.Project); - if (identity is not null) - { - var existingProject = identity.FindFirst(StellaOpsClaimTypes.Project)?.Value; - if (string.IsNullOrWhiteSpace(existingProject)) - { - identity.SetClaim(StellaOpsClaimTypes.Project, documentProject); - principalProject = documentProject; - } - else - { - var normalizedExistingProject = NormalizeProject(existingProject); - if (!string.Equals(normalizedExistingProject, documentProject, StringComparison.Ordinal)) - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token project does not match the issued project."); - logger.LogWarning( - "Access token validation failed: project mismatch for token {TokenId}. PrincipalProject={PrincipalProject}; DocumentProject={DocumentProject}.", - tokenDocument.TokenId, - normalizedExistingProject, - documentProject); - return; - } - - principalProject = normalizedExistingProject; - } - } - else if (!string.Equals(principalProject, documentProject, StringComparison.Ordinal)) - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token project does not match the issued project."); - logger.LogWarning( - "Access token validation failed: project mismatch for token {TokenId}. PrincipalProject={PrincipalProject}; DocumentProject={DocumentProject}.", - tokenDocument.TokenId, - principalProject, - documentProject); - return; - } - else - { - principalProject = documentProject; - } - - metadataAccessor.SetProject(documentProject); - } - - if (!context.IsRejected && tokenDocument is not null) - { - await TrackTokenUsageAsync(context, tokenDocument, context.Principal, session).ConfigureAwait(false); - } - - var clientId = context.Principal.GetClaim(OpenIddictConstants.Claims.ClientId); - AuthorityClientDocument? clientDocument = null; - if (!string.IsNullOrWhiteSpace(clientId)) - { - clientDocument = await clientStore.FindByClientIdAsync(clientId, context.CancellationToken, session).ConfigureAwait(false); - if (clientDocument is null || clientDocument.Disabled) - { - context.Reject(OpenIddictConstants.Errors.InvalidClient, "The client associated with the token is not permitted."); - logger.LogWarning("Access token validation failed: client {ClientId} disabled or missing.", clientId); - return; - } - } - - if (clientDocument is not null && - clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var clientTenantRaw)) - { - var clientTenant = NormalizeTenant(clientTenantRaw); - if (clientTenant is not null) - { - if (principalTenant is null) - { - if (identity is not null) - { - identity.SetClaim(StellaOpsClaimTypes.Tenant, clientTenant); - principalTenant = clientTenant; - } - } - else if (!string.Equals(principalTenant, clientTenant, StringComparison.Ordinal)) - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token tenant does not match the registered client tenant."); - logger.LogWarning( - "Access token validation failed: tenant mismatch for client {ClientId}. PrincipalTenant={PrincipalTenant}; ClientTenant={ClientTenant}.", - clientId, - principalTenant, - clientTenant); - return; - } - - metadataAccessor.SetTenant(clientTenant); - } - } - - if (clientDocument is not null && - clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Project, out var clientProjectRaw)) - { - var clientProject = NormalizeProject(clientProjectRaw); - if (!string.Equals(principalProject, clientProject, StringComparison.Ordinal)) - { - if (identity is not null) - { - var existingProject = identity.FindFirst(StellaOpsClaimTypes.Project)?.Value; - if (string.IsNullOrWhiteSpace(existingProject)) - { - identity.SetClaim(StellaOpsClaimTypes.Project, clientProject); - principalProject = clientProject; - } - else - { - var normalizedExistingProject = NormalizeProject(existingProject); - if (!string.Equals(normalizedExistingProject, clientProject, StringComparison.Ordinal)) - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token project does not match the registered client project."); - logger.LogWarning( - "Access token validation failed: project mismatch for client {ClientId}. PrincipalProject={PrincipalProject}; ClientProject={ClientProject}.", - clientId, - normalizedExistingProject, - clientProject); - return; - } - - principalProject = normalizedExistingProject; - } - } - else - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token project does not match the registered client project."); - logger.LogWarning( - "Access token validation failed: project mismatch for client {ClientId}. PrincipalProject={PrincipalProject}; ClientProject={ClientProject}.", - clientId, - principalProject, - clientProject); - return; - } - } - - metadataAccessor.SetProject(clientProject); - } - - if (identity is null) - { - return; - } - - if (principalTenant is not null) - { - metadataAccessor.SetTenant(principalTenant); - } - - if (!string.IsNullOrWhiteSpace(principalProject)) - { - metadataAccessor.SetProject(principalProject); - activity?.SetTag("authority.project", principalProject); - } - - var providerName = context.Principal.GetClaim(StellaOpsClaimTypes.IdentityProvider); - if (string.IsNullOrWhiteSpace(providerName)) - { - return; - } - - if (!registry.TryGet(providerName, out var providerMetadata)) - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The identity provider associated with the token is unavailable."); - logger.LogWarning("Access token validation failed: provider {Provider} unavailable for subject {Subject}.", providerName, context.Principal.GetClaim(OpenIddictConstants.Claims.Subject)); - return; - } - - await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, context.CancellationToken).ConfigureAwait(false); - var provider = providerHandle.Provider; - - AuthorityUserDescriptor? user = null; - AuthorityClientDescriptor? client = null; - - var subject = context.Principal.GetClaim(OpenIddictConstants.Claims.Subject); - if (!string.IsNullOrWhiteSpace(subject)) - { - user = await provider.Credentials.FindBySubjectAsync(subject, context.CancellationToken).ConfigureAwait(false); - if (user is null) - { - context.Reject(OpenIddictConstants.Errors.InvalidToken, "The subject referenced by the token no longer exists."); - logger.LogWarning("Access token validation failed: subject {SubjectId} not found.", subject); - return; - } - activity?.SetTag("authority.subject_id", subject); - } - - if (!string.IsNullOrWhiteSpace(clientId) && provider.ClientProvisioning is not null) - { - client = await provider.ClientProvisioning.FindByClientIdAsync(clientId, context.CancellationToken).ConfigureAwait(false); - } - - var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, user, client); - await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false); - logger.LogInformation("Access token validated for subject {Subject} and client {ClientId}.", - identity.GetClaim(OpenIddictConstants.Claims.Subject), - identity.GetClaim(OpenIddictConstants.Claims.ClientId)); - } - - private async ValueTask TrackTokenUsageAsync( - OpenIddictServerEvents.ValidateTokenContext context, - AuthorityTokenDocument tokenDocument, - ClaimsPrincipal principal, - IClientSessionHandle session) - { - var metadata = metadataAccessor.GetMetadata(); - var remoteAddress = metadata?.RemoteIp; - var userAgent = metadata?.UserAgent; - - var observedAt = clock.GetUtcNow(); - var result = await tokenStore.RecordUsageAsync(tokenDocument.TokenId, remoteAddress, userAgent, observedAt, context.CancellationToken, session) - .ConfigureAwait(false); - - switch (result.Status) - { - case TokenUsageUpdateStatus.MissingMetadata: - logger.LogDebug("Token usage metadata missing for token {TokenId}; replay detection skipped.", tokenDocument.TokenId); - break; - case TokenUsageUpdateStatus.NotFound: - logger.LogWarning("Token usage recording failed: token {TokenId} not found.", tokenDocument.TokenId); - break; - case TokenUsageUpdateStatus.Recorded: - metadataAccessor.SetTag("authority.token_usage", "recorded"); - break; - case TokenUsageUpdateStatus.SuspectedReplay: - metadataAccessor.SetTag("authority.token_usage", "suspected_replay"); - await EmitReplayAuditAsync(tokenDocument, principal, metadata, result, observedAt, context.CancellationToken).ConfigureAwait(false); - break; - } - } - - private async ValueTask EmitReplayAuditAsync( - AuthorityTokenDocument tokenDocument, - ClaimsPrincipal principal, - AuthorityRateLimiterMetadata? metadata, - TokenUsageUpdateResult result, - DateTimeOffset observedAt, - CancellationToken cancellationToken) - { - var clientId = principal.GetClaim(OpenIddictConstants.Claims.ClientId); - var subjectId = principal.GetClaim(OpenIddictConstants.Claims.Subject); - var realm = principal.GetClaim(StellaOpsClaimTypes.IdentityProvider); - - var subject = string.IsNullOrWhiteSpace(subjectId) && string.IsNullOrWhiteSpace(realm) - ? null - : new AuthEventSubject - { - SubjectId = ClassifiedString.Personal(subjectId), - Realm = ClassifiedString.Public(string.IsNullOrWhiteSpace(realm) ? null : realm) - }; - - var client = string.IsNullOrWhiteSpace(clientId) - ? null - : new AuthEventClient - { - ClientId = ClassifiedString.Personal(clientId) - }; - - var network = metadata is null && result.RemoteAddress is null && result.UserAgent is null - ? null - : new AuthEventNetwork - { - RemoteAddress = ClassifiedString.Personal(result.RemoteAddress ?? metadata?.RemoteIp), - ForwardedFor = ClassifiedString.Personal(metadata?.ForwardedFor), - UserAgent = ClassifiedString.Personal(result.UserAgent ?? metadata?.UserAgent) - }; - - var previousCount = tokenDocument.Devices?.Count ?? 0; - var properties = new List - { - new() { Name = "token.id", Value = ClassifiedString.Sensitive(tokenDocument.TokenId) }, - new() { Name = "token.type", Value = ClassifiedString.Public(tokenDocument.Type) }, - new() { Name = "token.devices.total", Value = ClassifiedString.Public((previousCount + 1).ToString(CultureInfo.InvariantCulture)) } - }; - - if (!string.IsNullOrWhiteSpace(tokenDocument.ClientId)) - { - properties.Add(new AuthEventProperty - { - Name = "token.client_id", - Value = ClassifiedString.Personal(tokenDocument.ClientId) - }); - } - - logger.LogWarning("Detected suspected token replay for token {TokenId} (client {ClientId}).", tokenDocument.TokenId, clientId ?? ""); - - var record = new AuthEventRecord - { - EventType = "authority.token.replay.suspected", - OccurredAt = observedAt, - CorrelationId = Activity.Current?.TraceId.ToString() ?? Guid.NewGuid().ToString("N"), - Outcome = AuthEventOutcome.Error, - Reason = "Token observed from a new device fingerprint.", - Subject = subject, - Client = client, - Scopes = Array.Empty(), - Network = network, - Properties = properties - }; - - await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); - } - - private static void EnsureSenderConstraintClaims(ClaimsPrincipal? principal, AuthorityTokenDocument tokenDocument) - { - if (principal?.Identity is not ClaimsIdentity identity) - { - return; - } - - if (!string.IsNullOrWhiteSpace(tokenDocument.SenderConstraint) && - !identity.HasClaim(claim => claim.Type == AuthorityOpenIddictConstants.SenderConstraintClaimType)) - { - identity.SetClaim(AuthorityOpenIddictConstants.SenderConstraintClaimType, tokenDocument.SenderConstraint); - } - - if (identity.HasClaim(claim => claim.Type == AuthorityOpenIddictConstants.ConfirmationClaimType)) - { - return; - } - - if (string.IsNullOrWhiteSpace(tokenDocument.SenderConstraint) || string.IsNullOrWhiteSpace(tokenDocument.SenderKeyThumbprint)) - { - return; - } - - string confirmation = tokenDocument.SenderConstraint switch - { - AuthoritySenderConstraintKinds.Dpop => JsonSerializer.Serialize(new Dictionary - { - ["jkt"] = tokenDocument.SenderKeyThumbprint - }), - AuthoritySenderConstraintKinds.Mtls => JsonSerializer.Serialize(new Dictionary - { - ["x5t#S256"] = tokenDocument.SenderKeyThumbprint - }), - _ => string.Empty - }; - - if (!string.IsNullOrEmpty(confirmation)) - { - identity.SetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType, confirmation); - } - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Globalization; +using System.Security.Claims; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using OpenIddict.Abstractions; +using OpenIddict.Extensions; +using OpenIddict.Server; +using StellaOps.Auth.Abstractions; +using MongoDB.Driver; +using StellaOps.Authority.OpenIddict; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.RateLimiting; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Storage.Mongo.Sessions; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Cryptography.Audit; +using StellaOps.Authority.Security; + +namespace StellaOps.Authority.OpenIddict.Handlers; + +internal sealed class ValidateAccessTokenHandler : IOpenIddictServerHandler +{ + private readonly IAuthorityTokenStore tokenStore; + private readonly IAuthorityMongoSessionAccessor sessionAccessor; + private readonly IAuthorityClientStore clientStore; + private readonly IAuthorityIdentityProviderRegistry registry; + private readonly IAuthorityRateLimiterMetadataAccessor metadataAccessor; + private readonly IAuthEventSink auditSink; + private readonly TimeProvider clock; + private readonly ActivitySource activitySource; + private readonly ILogger logger; + + public ValidateAccessTokenHandler( + IAuthorityTokenStore tokenStore, + IAuthorityMongoSessionAccessor sessionAccessor, + IAuthorityClientStore clientStore, + IAuthorityIdentityProviderRegistry registry, + IAuthorityRateLimiterMetadataAccessor metadataAccessor, + IAuthEventSink auditSink, + TimeProvider clock, + ActivitySource activitySource, + ILogger logger) + { + this.tokenStore = tokenStore ?? throw new ArgumentNullException(nameof(tokenStore)); + this.sessionAccessor = sessionAccessor ?? throw new ArgumentNullException(nameof(sessionAccessor)); + this.clientStore = clientStore ?? throw new ArgumentNullException(nameof(clientStore)); + this.registry = registry ?? throw new ArgumentNullException(nameof(registry)); + this.metadataAccessor = metadataAccessor ?? throw new ArgumentNullException(nameof(metadataAccessor)); + this.auditSink = auditSink ?? throw new ArgumentNullException(nameof(auditSink)); + this.clock = clock ?? throw new ArgumentNullException(nameof(clock)); + this.activitySource = activitySource ?? throw new ArgumentNullException(nameof(activitySource)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async ValueTask HandleAsync(OpenIddictServerEvents.ValidateTokenContext context) + { + ArgumentNullException.ThrowIfNull(context); + + if (context.Principal is null) + { + return; + } + + if (context.EndpointType is not (OpenIddictServerEndpointType.Token or OpenIddictServerEndpointType.Introspection)) + { + return; + } + + static string? NormalizeTenant(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); + + static string NormalizeProject(string? value) + => string.IsNullOrWhiteSpace(value) + ? StellaOpsTenancyDefaults.AnyProject + : value.Trim().ToLowerInvariant(); + + var identity = context.Principal.Identity as ClaimsIdentity; + var principalTenant = NormalizeTenant(context.Principal.GetClaim(StellaOpsClaimTypes.Tenant)); + var principalProject = NormalizeProject(context.Principal.GetClaim(StellaOpsClaimTypes.Project)); + + using var activity = activitySource.StartActivity("authority.token.validate_access", ActivityKind.Internal); + activity?.SetTag("authority.endpoint", context.EndpointType switch + { + OpenIddictServerEndpointType.Token => "/token", + OpenIddictServerEndpointType.Introspection => "/introspect", + _ => context.EndpointType.ToString() + }); + + var tokenId = !string.IsNullOrWhiteSpace(context.TokenId) + ? context.TokenId + : context.Principal.GetClaim(OpenIddictConstants.Claims.JwtId); + + var session = await sessionAccessor.GetSessionAsync(context.CancellationToken).ConfigureAwait(false); + + AuthorityTokenDocument? tokenDocument = null; + if (!string.IsNullOrWhiteSpace(tokenId)) + { + tokenDocument = await tokenStore.FindByTokenIdAsync(tokenId, context.CancellationToken, session).ConfigureAwait(false); + if (tokenDocument is not null) + { + if (!string.Equals(tokenDocument.Status, "valid", StringComparison.OrdinalIgnoreCase)) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token is no longer active."); + logger.LogWarning("Access token {TokenId} rejected: status {Status}.", tokenId, tokenDocument.Status); + return; + } + + if (tokenDocument.ExpiresAt is { } expiresAt && expiresAt <= clock.GetUtcNow()) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token has expired."); + logger.LogWarning("Access token {TokenId} rejected: expired at {ExpiresAt:o}.", tokenId, expiresAt); + return; + } + + context.Transaction.Properties[AuthorityOpenIddictConstants.TokenTransactionProperty] = tokenDocument; + activity?.SetTag("authority.token_id", tokenDocument.TokenId); + } + } + + if (tokenDocument is not null) + { + EnsureSenderConstraintClaims(context.Principal, tokenDocument); + + var documentTenant = NormalizeTenant(tokenDocument.Tenant); + if (documentTenant is not null) + { + if (principalTenant is null) + { + if (identity is not null) + { + identity.SetClaim(StellaOpsClaimTypes.Tenant, documentTenant); + principalTenant = documentTenant; + } + } + else if (!string.Equals(principalTenant, documentTenant, StringComparison.Ordinal)) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token tenant does not match the issued tenant."); + logger.LogWarning( + "Access token validation failed: tenant mismatch for token {TokenId}. PrincipalTenant={PrincipalTenant}; DocumentTenant={DocumentTenant}.", + tokenDocument.TokenId, + principalTenant, + documentTenant); + return; + } + + metadataAccessor.SetTenant(documentTenant); + } + + var documentProject = NormalizeProject(tokenDocument.Project); + if (identity is not null) + { + var existingProject = identity.FindFirst(StellaOpsClaimTypes.Project)?.Value; + if (string.IsNullOrWhiteSpace(existingProject)) + { + identity.SetClaim(StellaOpsClaimTypes.Project, documentProject); + principalProject = documentProject; + } + else + { + var normalizedExistingProject = NormalizeProject(existingProject); + if (!string.Equals(normalizedExistingProject, documentProject, StringComparison.Ordinal)) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token project does not match the issued project."); + logger.LogWarning( + "Access token validation failed: project mismatch for token {TokenId}. PrincipalProject={PrincipalProject}; DocumentProject={DocumentProject}.", + tokenDocument.TokenId, + normalizedExistingProject, + documentProject); + return; + } + + principalProject = normalizedExistingProject; + } + } + else if (!string.Equals(principalProject, documentProject, StringComparison.Ordinal)) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token project does not match the issued project."); + logger.LogWarning( + "Access token validation failed: project mismatch for token {TokenId}. PrincipalProject={PrincipalProject}; DocumentProject={DocumentProject}.", + tokenDocument.TokenId, + principalProject, + documentProject); + return; + } + else + { + principalProject = documentProject; + } + + metadataAccessor.SetProject(documentProject); + } + + if (!context.IsRejected && tokenDocument is not null) + { + await TrackTokenUsageAsync(context, tokenDocument, context.Principal, session).ConfigureAwait(false); + } + + var clientId = context.Principal.GetClaim(OpenIddictConstants.Claims.ClientId); + AuthorityClientDocument? clientDocument = null; + if (!string.IsNullOrWhiteSpace(clientId)) + { + clientDocument = await clientStore.FindByClientIdAsync(clientId, context.CancellationToken, session).ConfigureAwait(false); + if (clientDocument is null || clientDocument.Disabled) + { + context.Reject(OpenIddictConstants.Errors.InvalidClient, "The client associated with the token is not permitted."); + logger.LogWarning("Access token validation failed: client {ClientId} disabled or missing.", clientId); + return; + } + } + + if (clientDocument is not null && + clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var clientTenantRaw)) + { + var clientTenant = NormalizeTenant(clientTenantRaw); + if (clientTenant is not null) + { + if (principalTenant is null) + { + if (identity is not null) + { + identity.SetClaim(StellaOpsClaimTypes.Tenant, clientTenant); + principalTenant = clientTenant; + } + } + else if (!string.Equals(principalTenant, clientTenant, StringComparison.Ordinal)) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token tenant does not match the registered client tenant."); + logger.LogWarning( + "Access token validation failed: tenant mismatch for client {ClientId}. PrincipalTenant={PrincipalTenant}; ClientTenant={ClientTenant}.", + clientId, + principalTenant, + clientTenant); + return; + } + + metadataAccessor.SetTenant(clientTenant); + } + } + + if (clientDocument is not null && + clientDocument.Properties.TryGetValue(AuthorityClientMetadataKeys.Project, out var clientProjectRaw)) + { + var clientProject = NormalizeProject(clientProjectRaw); + if (!string.Equals(principalProject, clientProject, StringComparison.Ordinal)) + { + if (identity is not null) + { + var existingProject = identity.FindFirst(StellaOpsClaimTypes.Project)?.Value; + if (string.IsNullOrWhiteSpace(existingProject)) + { + identity.SetClaim(StellaOpsClaimTypes.Project, clientProject); + principalProject = clientProject; + } + else + { + var normalizedExistingProject = NormalizeProject(existingProject); + if (!string.Equals(normalizedExistingProject, clientProject, StringComparison.Ordinal)) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token project does not match the registered client project."); + logger.LogWarning( + "Access token validation failed: project mismatch for client {ClientId}. PrincipalProject={PrincipalProject}; ClientProject={ClientProject}.", + clientId, + normalizedExistingProject, + clientProject); + return; + } + + principalProject = normalizedExistingProject; + } + } + else + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The token project does not match the registered client project."); + logger.LogWarning( + "Access token validation failed: project mismatch for client {ClientId}. PrincipalProject={PrincipalProject}; ClientProject={ClientProject}.", + clientId, + principalProject, + clientProject); + return; + } + } + + metadataAccessor.SetProject(clientProject); + } + + if (identity is null) + { + return; + } + + if (principalTenant is not null) + { + metadataAccessor.SetTenant(principalTenant); + } + + if (!string.IsNullOrWhiteSpace(principalProject)) + { + metadataAccessor.SetProject(principalProject); + activity?.SetTag("authority.project", principalProject); + } + + var providerName = context.Principal.GetClaim(StellaOpsClaimTypes.IdentityProvider); + if (string.IsNullOrWhiteSpace(providerName)) + { + return; + } + + if (!registry.TryGet(providerName, out var providerMetadata)) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The identity provider associated with the token is unavailable."); + logger.LogWarning("Access token validation failed: provider {Provider} unavailable for subject {Subject}.", providerName, context.Principal.GetClaim(OpenIddictConstants.Claims.Subject)); + return; + } + + await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, context.CancellationToken).ConfigureAwait(false); + var provider = providerHandle.Provider; + + AuthorityUserDescriptor? user = null; + AuthorityClientDescriptor? client = null; + + var subject = context.Principal.GetClaim(OpenIddictConstants.Claims.Subject); + if (!string.IsNullOrWhiteSpace(subject)) + { + user = await provider.Credentials.FindBySubjectAsync(subject, context.CancellationToken).ConfigureAwait(false); + if (user is null) + { + context.Reject(OpenIddictConstants.Errors.InvalidToken, "The subject referenced by the token no longer exists."); + logger.LogWarning("Access token validation failed: subject {SubjectId} not found.", subject); + return; + } + activity?.SetTag("authority.subject_id", subject); + } + + if (!string.IsNullOrWhiteSpace(clientId) && provider.ClientProvisioning is not null) + { + client = await provider.ClientProvisioning.FindByClientIdAsync(clientId, context.CancellationToken).ConfigureAwait(false); + } + + var enrichmentContext = new AuthorityClaimsEnrichmentContext(provider.Context, user, client); + await provider.ClaimsEnricher.EnrichAsync(identity, enrichmentContext, context.CancellationToken).ConfigureAwait(false); + logger.LogInformation("Access token validated for subject {Subject} and client {ClientId}.", + identity.GetClaim(OpenIddictConstants.Claims.Subject), + identity.GetClaim(OpenIddictConstants.Claims.ClientId)); + } + + private async ValueTask TrackTokenUsageAsync( + OpenIddictServerEvents.ValidateTokenContext context, + AuthorityTokenDocument tokenDocument, + ClaimsPrincipal principal, + IClientSessionHandle session) + { + var metadata = metadataAccessor.GetMetadata(); + var remoteAddress = metadata?.RemoteIp; + var userAgent = metadata?.UserAgent; + + var observedAt = clock.GetUtcNow(); + var result = await tokenStore.RecordUsageAsync(tokenDocument.TokenId, remoteAddress, userAgent, observedAt, context.CancellationToken, session) + .ConfigureAwait(false); + + switch (result.Status) + { + case TokenUsageUpdateStatus.MissingMetadata: + logger.LogDebug("Token usage metadata missing for token {TokenId}; replay detection skipped.", tokenDocument.TokenId); + break; + case TokenUsageUpdateStatus.NotFound: + logger.LogWarning("Token usage recording failed: token {TokenId} not found.", tokenDocument.TokenId); + break; + case TokenUsageUpdateStatus.Recorded: + metadataAccessor.SetTag("authority.token_usage", "recorded"); + break; + case TokenUsageUpdateStatus.SuspectedReplay: + metadataAccessor.SetTag("authority.token_usage", "suspected_replay"); + await EmitReplayAuditAsync(tokenDocument, principal, metadata, result, observedAt, context.CancellationToken).ConfigureAwait(false); + break; + } + } + + private async ValueTask EmitReplayAuditAsync( + AuthorityTokenDocument tokenDocument, + ClaimsPrincipal principal, + AuthorityRateLimiterMetadata? metadata, + TokenUsageUpdateResult result, + DateTimeOffset observedAt, + CancellationToken cancellationToken) + { + var clientId = principal.GetClaim(OpenIddictConstants.Claims.ClientId); + var subjectId = principal.GetClaim(OpenIddictConstants.Claims.Subject); + var realm = principal.GetClaim(StellaOpsClaimTypes.IdentityProvider); + + var subject = string.IsNullOrWhiteSpace(subjectId) && string.IsNullOrWhiteSpace(realm) + ? null + : new AuthEventSubject + { + SubjectId = ClassifiedString.Personal(subjectId), + Realm = ClassifiedString.Public(string.IsNullOrWhiteSpace(realm) ? null : realm) + }; + + var client = string.IsNullOrWhiteSpace(clientId) + ? null + : new AuthEventClient + { + ClientId = ClassifiedString.Personal(clientId) + }; + + var network = metadata is null && result.RemoteAddress is null && result.UserAgent is null + ? null + : new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(result.RemoteAddress ?? metadata?.RemoteIp), + ForwardedFor = ClassifiedString.Personal(metadata?.ForwardedFor), + UserAgent = ClassifiedString.Personal(result.UserAgent ?? metadata?.UserAgent) + }; + + var previousCount = tokenDocument.Devices?.Count ?? 0; + var properties = new List + { + new() { Name = "token.id", Value = ClassifiedString.Sensitive(tokenDocument.TokenId) }, + new() { Name = "token.type", Value = ClassifiedString.Public(tokenDocument.Type) }, + new() { Name = "token.devices.total", Value = ClassifiedString.Public((previousCount + 1).ToString(CultureInfo.InvariantCulture)) } + }; + + if (!string.IsNullOrWhiteSpace(tokenDocument.ClientId)) + { + properties.Add(new AuthEventProperty + { + Name = "token.client_id", + Value = ClassifiedString.Personal(tokenDocument.ClientId) + }); + } + + logger.LogWarning("Detected suspected token replay for token {TokenId} (client {ClientId}).", tokenDocument.TokenId, clientId ?? ""); + + var record = new AuthEventRecord + { + EventType = "authority.token.replay.suspected", + OccurredAt = observedAt, + CorrelationId = Activity.Current?.TraceId.ToString() ?? Guid.NewGuid().ToString("N"), + Outcome = AuthEventOutcome.Error, + Reason = "Token observed from a new device fingerprint.", + Subject = subject, + Client = client, + Scopes = Array.Empty(), + Network = network, + Properties = properties + }; + + await auditSink.WriteAsync(record, cancellationToken).ConfigureAwait(false); + } + + private static void EnsureSenderConstraintClaims(ClaimsPrincipal? principal, AuthorityTokenDocument tokenDocument) + { + if (principal?.Identity is not ClaimsIdentity identity) + { + return; + } + + if (!string.IsNullOrWhiteSpace(tokenDocument.SenderConstraint) && + !identity.HasClaim(claim => claim.Type == AuthorityOpenIddictConstants.SenderConstraintClaimType)) + { + identity.SetClaim(AuthorityOpenIddictConstants.SenderConstraintClaimType, tokenDocument.SenderConstraint); + } + + if (identity.HasClaim(claim => claim.Type == AuthorityOpenIddictConstants.ConfirmationClaimType)) + { + return; + } + + if (string.IsNullOrWhiteSpace(tokenDocument.SenderConstraint) || string.IsNullOrWhiteSpace(tokenDocument.SenderKeyThumbprint)) + { + return; + } + + string confirmation = tokenDocument.SenderConstraint switch + { + AuthoritySenderConstraintKinds.Dpop => JsonSerializer.Serialize(new Dictionary + { + ["jkt"] = tokenDocument.SenderKeyThumbprint + }), + AuthoritySenderConstraintKinds.Mtls => JsonSerializer.Serialize(new Dictionary + { + ["x5t#S256"] = tokenDocument.SenderKeyThumbprint + }), + _ => string.Empty + }; + + if (!string.IsNullOrEmpty(confirmation)) + { + identity.SetClaim(AuthorityOpenIddictConstants.ConfirmationClaimType, confirmation); + } + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs index 78b5138c..1e496565 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/OpenIddict/TokenRequestTamperInspector.cs @@ -1,114 +1,114 @@ -using System.Collections.Generic; -using System.Linq; -using OpenIddict.Abstractions; - -namespace StellaOps.Authority.OpenIddict; - -internal static class TokenRequestTamperInspector -{ - private static readonly HashSet CommonParameters = new(StringComparer.OrdinalIgnoreCase) - { - OpenIddictConstants.Parameters.GrantType, - OpenIddictConstants.Parameters.Scope, - OpenIddictConstants.Parameters.Resource, - OpenIddictConstants.Parameters.ClientId, - OpenIddictConstants.Parameters.ClientSecret, - OpenIddictConstants.Parameters.ClientAssertion, - OpenIddictConstants.Parameters.ClientAssertionType, - OpenIddictConstants.Parameters.RefreshToken, - OpenIddictConstants.Parameters.DeviceCode, - OpenIddictConstants.Parameters.Code, - OpenIddictConstants.Parameters.CodeVerifier, - OpenIddictConstants.Parameters.CodeChallenge, - OpenIddictConstants.Parameters.CodeChallengeMethod, - OpenIddictConstants.Parameters.RedirectUri, - OpenIddictConstants.Parameters.Assertion, - OpenIddictConstants.Parameters.Nonce, - OpenIddictConstants.Parameters.Prompt, - OpenIddictConstants.Parameters.MaxAge, - OpenIddictConstants.Parameters.UiLocales, - OpenIddictConstants.Parameters.AcrValues, - OpenIddictConstants.Parameters.LoginHint, - OpenIddictConstants.Parameters.Claims, - OpenIddictConstants.Parameters.Token, - OpenIddictConstants.Parameters.TokenTypeHint, - OpenIddictConstants.Parameters.AccessToken, - OpenIddictConstants.Parameters.IdToken - }; - - private static readonly HashSet PasswordGrantParameters = new(StringComparer.OrdinalIgnoreCase) - { - OpenIddictConstants.Parameters.Username, - OpenIddictConstants.Parameters.Password, - AuthorityOpenIddictConstants.ProviderParameterName - }; - - private static readonly HashSet ClientCredentialsParameters = new(StringComparer.OrdinalIgnoreCase) - { - AuthorityOpenIddictConstants.ProviderParameterName, - AuthorityOpenIddictConstants.OperatorReasonParameterName, - AuthorityOpenIddictConstants.OperatorTicketParameterName - }; - - internal static IReadOnlyList GetUnexpectedPasswordGrantParameters(OpenIddictRequest request) - => DetectUnexpectedParameters(request, PasswordGrantParameters); - - internal static IReadOnlyList GetUnexpectedClientCredentialsParameters(OpenIddictRequest request) - => DetectUnexpectedParameters(request, ClientCredentialsParameters); - - private static IReadOnlyList DetectUnexpectedParameters( - OpenIddictRequest request, - HashSet grantSpecific) - { - if (request is null) - { - return Array.Empty(); - } - - var unexpected = new HashSet(StringComparer.OrdinalIgnoreCase); - - foreach (var pair in request.GetParameters()) - { - var name = pair.Key; - if (string.IsNullOrWhiteSpace(name)) - { - continue; - } - - if (IsAllowed(name, grantSpecific)) - { - continue; - } - - unexpected.Add(name); - } - - return unexpected.Count == 0 - ? Array.Empty() - : unexpected - .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) - .ToArray(); - } - - private static bool IsAllowed(string parameterName, HashSet grantSpecific) - { - if (CommonParameters.Contains(parameterName) || grantSpecific.Contains(parameterName)) - { - return true; - } - - if (parameterName.StartsWith("ext_", StringComparison.OrdinalIgnoreCase) || - parameterName.StartsWith("x-", StringComparison.OrdinalIgnoreCase) || - parameterName.StartsWith("custom_", StringComparison.OrdinalIgnoreCase)) - { - return true; - } - - if (parameterName.Contains(':', StringComparison.Ordinal)) - { - return true; - } - - return false; - } -} +using System.Collections.Generic; +using System.Linq; +using OpenIddict.Abstractions; + +namespace StellaOps.Authority.OpenIddict; + +internal static class TokenRequestTamperInspector +{ + private static readonly HashSet CommonParameters = new(StringComparer.OrdinalIgnoreCase) + { + OpenIddictConstants.Parameters.GrantType, + OpenIddictConstants.Parameters.Scope, + OpenIddictConstants.Parameters.Resource, + OpenIddictConstants.Parameters.ClientId, + OpenIddictConstants.Parameters.ClientSecret, + OpenIddictConstants.Parameters.ClientAssertion, + OpenIddictConstants.Parameters.ClientAssertionType, + OpenIddictConstants.Parameters.RefreshToken, + OpenIddictConstants.Parameters.DeviceCode, + OpenIddictConstants.Parameters.Code, + OpenIddictConstants.Parameters.CodeVerifier, + OpenIddictConstants.Parameters.CodeChallenge, + OpenIddictConstants.Parameters.CodeChallengeMethod, + OpenIddictConstants.Parameters.RedirectUri, + OpenIddictConstants.Parameters.Assertion, + OpenIddictConstants.Parameters.Nonce, + OpenIddictConstants.Parameters.Prompt, + OpenIddictConstants.Parameters.MaxAge, + OpenIddictConstants.Parameters.UiLocales, + OpenIddictConstants.Parameters.AcrValues, + OpenIddictConstants.Parameters.LoginHint, + OpenIddictConstants.Parameters.Claims, + OpenIddictConstants.Parameters.Token, + OpenIddictConstants.Parameters.TokenTypeHint, + OpenIddictConstants.Parameters.AccessToken, + OpenIddictConstants.Parameters.IdToken + }; + + private static readonly HashSet PasswordGrantParameters = new(StringComparer.OrdinalIgnoreCase) + { + OpenIddictConstants.Parameters.Username, + OpenIddictConstants.Parameters.Password, + AuthorityOpenIddictConstants.ProviderParameterName + }; + + private static readonly HashSet ClientCredentialsParameters = new(StringComparer.OrdinalIgnoreCase) + { + AuthorityOpenIddictConstants.ProviderParameterName, + AuthorityOpenIddictConstants.OperatorReasonParameterName, + AuthorityOpenIddictConstants.OperatorTicketParameterName + }; + + internal static IReadOnlyList GetUnexpectedPasswordGrantParameters(OpenIddictRequest request) + => DetectUnexpectedParameters(request, PasswordGrantParameters); + + internal static IReadOnlyList GetUnexpectedClientCredentialsParameters(OpenIddictRequest request) + => DetectUnexpectedParameters(request, ClientCredentialsParameters); + + private static IReadOnlyList DetectUnexpectedParameters( + OpenIddictRequest request, + HashSet grantSpecific) + { + if (request is null) + { + return Array.Empty(); + } + + var unexpected = new HashSet(StringComparer.OrdinalIgnoreCase); + + foreach (var pair in request.GetParameters()) + { + var name = pair.Key; + if (string.IsNullOrWhiteSpace(name)) + { + continue; + } + + if (IsAllowed(name, grantSpecific)) + { + continue; + } + + unexpected.Add(name); + } + + return unexpected.Count == 0 + ? Array.Empty() + : unexpected + .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static bool IsAllowed(string parameterName, HashSet grantSpecific) + { + if (CommonParameters.Contains(parameterName) || grantSpecific.Contains(parameterName)) + { + return true; + } + + if (parameterName.StartsWith("ext_", StringComparison.OrdinalIgnoreCase) || + parameterName.StartsWith("x-", StringComparison.OrdinalIgnoreCase) || + parameterName.StartsWith("custom_", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + if (parameterName.Contains(':', StringComparison.Ordinal)) + { + return true; + } + + return false; + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkRequest.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkRequest.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkRequest.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkRequest.cs index f9b48047..240ef175 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkRequest.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkRequest.cs @@ -1,11 +1,11 @@ -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Authority.Permalinks; - -public sealed record VulnPermalinkRequest( - [property: JsonPropertyName("tenant")] string Tenant, - [property: JsonPropertyName("resourceKind")] string ResourceKind, - [property: JsonPropertyName("state")] JsonElement State, - [property: JsonPropertyName("expiresInSeconds")] int? ExpiresInSeconds, - [property: JsonPropertyName("environment")] string? Environment); +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Authority.Permalinks; + +public sealed record VulnPermalinkRequest( + [property: JsonPropertyName("tenant")] string Tenant, + [property: JsonPropertyName("resourceKind")] string ResourceKind, + [property: JsonPropertyName("state")] JsonElement State, + [property: JsonPropertyName("expiresInSeconds")] int? ExpiresInSeconds, + [property: JsonPropertyName("environment")] string? Environment); diff --git a/src/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkResponse.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkResponse.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkResponse.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkResponse.cs index a678bb20..6e2885fe 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkResponse.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkResponse.cs @@ -1,11 +1,11 @@ -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace StellaOps.Authority.Permalinks; - -public sealed record VulnPermalinkResponse( - [property: JsonPropertyName("token")] string Token, - [property: JsonPropertyName("issuedAt")] DateTimeOffset IssuedAt, - [property: JsonPropertyName("expiresAt")] DateTimeOffset ExpiresAt, - [property: JsonPropertyName("scopes")] IReadOnlyList Scopes); +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Authority.Permalinks; + +public sealed record VulnPermalinkResponse( + [property: JsonPropertyName("token")] string Token, + [property: JsonPropertyName("issuedAt")] DateTimeOffset IssuedAt, + [property: JsonPropertyName("expiresAt")] DateTimeOffset ExpiresAt, + [property: JsonPropertyName("scopes")] IReadOnlyList Scopes); diff --git a/src/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkService.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkService.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkService.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkService.cs index c1aac64a..fd7ec4fd 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkService.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Permalinks/VulnPermalinkService.cs @@ -1,181 +1,181 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Auth.Abstractions; -using StellaOps.Configuration; -using StellaOps.Cryptography; - -namespace StellaOps.Authority.Permalinks; - -internal sealed class VulnPermalinkService -{ - private static readonly JsonSerializerOptions PayloadSerializerOptions = new(JsonSerializerDefaults.Web) - { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - PropertyNamingPolicy = JsonNamingPolicy.CamelCase - }; - - private static readonly JsonSerializerOptions HeaderSerializerOptions = new(JsonSerializerDefaults.General) - { - PropertyNamingPolicy = null, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - WriteIndented = false - }; - - private static readonly TimeSpan DefaultLifetime = TimeSpan.FromHours(24); - private static readonly TimeSpan MaxLifetime = TimeSpan.FromDays(30); - private const int MaxStateBytes = 8 * 1024; - - private readonly ICryptoProviderRegistry providerRegistry; - private readonly IOptions authorityOptions; - private readonly TimeProvider timeProvider; - private readonly ILogger logger; - - public VulnPermalinkService( - ICryptoProviderRegistry providerRegistry, - IOptions authorityOptions, - TimeProvider timeProvider, - ILogger logger) - { - this.providerRegistry = providerRegistry ?? throw new ArgumentNullException(nameof(providerRegistry)); - this.authorityOptions = authorityOptions ?? throw new ArgumentNullException(nameof(authorityOptions)); - this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task CreateAsync(VulnPermalinkRequest request, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - - var tenant = request.Tenant?.Trim(); - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new ArgumentException("Tenant is required.", nameof(request)); - } - - var resourceKind = request.ResourceKind?.Trim(); - if (string.IsNullOrWhiteSpace(resourceKind)) - { - throw new ArgumentException("Resource kind is required.", nameof(request)); - } - - var stateJson = request.State.ValueKind == JsonValueKind.Undefined - ? "{}" - : request.State.GetRawText(); - - if (Encoding.UTF8.GetByteCount(stateJson) > MaxStateBytes) - { - throw new ArgumentException("State payload exceeds 8 KB limit.", nameof(request)); - } - - JsonElement stateElement; - using (var stateDocument = JsonDocument.Parse(string.IsNullOrWhiteSpace(stateJson) ? "{}" : stateJson)) - { - stateElement = stateDocument.RootElement.Clone(); - } - - var lifetime = request.ExpiresInSeconds.HasValue - ? TimeSpan.FromSeconds(request.ExpiresInSeconds.Value) - : DefaultLifetime; - - if (lifetime <= TimeSpan.Zero) - { - throw new ArgumentException("Expiration must be positive.", nameof(request)); - } - - if (lifetime > MaxLifetime) - { - lifetime = MaxLifetime; - } - - var signing = authorityOptions.Value.Signing - ?? throw new InvalidOperationException("Authority signing configuration is required to issue permalinks."); - - if (!signing.Enabled) - { - throw new InvalidOperationException("Authority signing is disabled. Enable signing to issue permalinks."); - } - - if (string.IsNullOrWhiteSpace(signing.ActiveKeyId)) - { - throw new InvalidOperationException("Authority signing configuration requires an active key identifier."); - } - - var algorithm = string.IsNullOrWhiteSpace(signing.Algorithm) - ? SignatureAlgorithms.Es256 - : signing.Algorithm.Trim(); - - var issuedAt = timeProvider.GetUtcNow(); - var expiresAt = issuedAt.Add(lifetime); - - var keyReference = new CryptoKeyReference(signing.ActiveKeyId, signing.Provider); - var resolution = providerRegistry.ResolveSigner( - CryptoCapability.Signing, - algorithm, - keyReference, - signing.Provider); - var signer = resolution.Signer; - - var payload = new VulnPermalinkPayload( - Subject: "vuln:permalink", - Audience: "stellaops:vuln-explorer", - Type: resourceKind, - Tenant: tenant, - Environment: string.IsNullOrWhiteSpace(request.Environment) ? null : request.Environment.Trim(), - Scopes: new[] { StellaOpsScopes.VulnRead }, - IssuedAt: issuedAt.ToUnixTimeSeconds(), - NotBefore: issuedAt.ToUnixTimeSeconds(), - ExpiresAt: expiresAt.ToUnixTimeSeconds(), - TokenId: Guid.NewGuid().ToString("N"), - Resource: new VulnPermalinkResource(resourceKind, stateElement)); - - var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(payload, PayloadSerializerOptions); - var header = new Dictionary - { - ["alg"] = algorithm, - ["typ"] = "JWT", - ["kid"] = signer.KeyId - }; - - var headerBytes = JsonSerializer.SerializeToUtf8Bytes(header, HeaderSerializerOptions); - var encodedHeader = Base64UrlEncoder.Encode(headerBytes); - var encodedPayload = Base64UrlEncoder.Encode(payloadBytes); - - var signingInput = Encoding.ASCII.GetBytes(string.Concat(encodedHeader, '.', encodedPayload)); - var signatureBytes = await signer.SignAsync(signingInput, cancellationToken).ConfigureAwait(false); - var encodedSignature = Base64UrlEncoder.Encode(signatureBytes); - var token = string.Concat(encodedHeader, '.', encodedPayload, '.', encodedSignature); - - logger.LogDebug("Issued Vuln Explorer permalink for tenant {Tenant} with resource kind {Resource}.", tenant, resourceKind); - - return new VulnPermalinkResponse( - Token: token, - IssuedAt: issuedAt, - ExpiresAt: expiresAt, - Scopes: new[] { StellaOpsScopes.VulnRead }); - } - - private sealed record VulnPermalinkPayload( - [property: JsonPropertyName("sub")] string Subject, - [property: JsonPropertyName("aud")] string Audience, - [property: JsonPropertyName("type")] string Type, - [property: JsonPropertyName("tenant")] string Tenant, - [property: JsonPropertyName("environment")] string? Environment, - [property: JsonPropertyName("scopes")] IReadOnlyList Scopes, - [property: JsonPropertyName("iat")] long IssuedAt, - [property: JsonPropertyName("nbf")] long NotBefore, - [property: JsonPropertyName("exp")] long ExpiresAt, - [property: JsonPropertyName("jti")] string TokenId, - [property: JsonPropertyName("resource")] VulnPermalinkResource Resource); - - private sealed record VulnPermalinkResource( - [property: JsonPropertyName("kind")] string Kind, - [property: JsonPropertyName("state")] JsonElement State); -} +using System; +using System.Collections.Generic; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Auth.Abstractions; +using StellaOps.Configuration; +using StellaOps.Cryptography; + +namespace StellaOps.Authority.Permalinks; + +internal sealed class VulnPermalinkService +{ + private static readonly JsonSerializerOptions PayloadSerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase + }; + + private static readonly JsonSerializerOptions HeaderSerializerOptions = new(JsonSerializerDefaults.General) + { + PropertyNamingPolicy = null, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + WriteIndented = false + }; + + private static readonly TimeSpan DefaultLifetime = TimeSpan.FromHours(24); + private static readonly TimeSpan MaxLifetime = TimeSpan.FromDays(30); + private const int MaxStateBytes = 8 * 1024; + + private readonly ICryptoProviderRegistry providerRegistry; + private readonly IOptions authorityOptions; + private readonly TimeProvider timeProvider; + private readonly ILogger logger; + + public VulnPermalinkService( + ICryptoProviderRegistry providerRegistry, + IOptions authorityOptions, + TimeProvider timeProvider, + ILogger logger) + { + this.providerRegistry = providerRegistry ?? throw new ArgumentNullException(nameof(providerRegistry)); + this.authorityOptions = authorityOptions ?? throw new ArgumentNullException(nameof(authorityOptions)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task CreateAsync(VulnPermalinkRequest request, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + var tenant = request.Tenant?.Trim(); + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new ArgumentException("Tenant is required.", nameof(request)); + } + + var resourceKind = request.ResourceKind?.Trim(); + if (string.IsNullOrWhiteSpace(resourceKind)) + { + throw new ArgumentException("Resource kind is required.", nameof(request)); + } + + var stateJson = request.State.ValueKind == JsonValueKind.Undefined + ? "{}" + : request.State.GetRawText(); + + if (Encoding.UTF8.GetByteCount(stateJson) > MaxStateBytes) + { + throw new ArgumentException("State payload exceeds 8 KB limit.", nameof(request)); + } + + JsonElement stateElement; + using (var stateDocument = JsonDocument.Parse(string.IsNullOrWhiteSpace(stateJson) ? "{}" : stateJson)) + { + stateElement = stateDocument.RootElement.Clone(); + } + + var lifetime = request.ExpiresInSeconds.HasValue + ? TimeSpan.FromSeconds(request.ExpiresInSeconds.Value) + : DefaultLifetime; + + if (lifetime <= TimeSpan.Zero) + { + throw new ArgumentException("Expiration must be positive.", nameof(request)); + } + + if (lifetime > MaxLifetime) + { + lifetime = MaxLifetime; + } + + var signing = authorityOptions.Value.Signing + ?? throw new InvalidOperationException("Authority signing configuration is required to issue permalinks."); + + if (!signing.Enabled) + { + throw new InvalidOperationException("Authority signing is disabled. Enable signing to issue permalinks."); + } + + if (string.IsNullOrWhiteSpace(signing.ActiveKeyId)) + { + throw new InvalidOperationException("Authority signing configuration requires an active key identifier."); + } + + var algorithm = string.IsNullOrWhiteSpace(signing.Algorithm) + ? SignatureAlgorithms.Es256 + : signing.Algorithm.Trim(); + + var issuedAt = timeProvider.GetUtcNow(); + var expiresAt = issuedAt.Add(lifetime); + + var keyReference = new CryptoKeyReference(signing.ActiveKeyId, signing.Provider); + var resolution = providerRegistry.ResolveSigner( + CryptoCapability.Signing, + algorithm, + keyReference, + signing.Provider); + var signer = resolution.Signer; + + var payload = new VulnPermalinkPayload( + Subject: "vuln:permalink", + Audience: "stellaops:vuln-explorer", + Type: resourceKind, + Tenant: tenant, + Environment: string.IsNullOrWhiteSpace(request.Environment) ? null : request.Environment.Trim(), + Scopes: new[] { StellaOpsScopes.VulnRead }, + IssuedAt: issuedAt.ToUnixTimeSeconds(), + NotBefore: issuedAt.ToUnixTimeSeconds(), + ExpiresAt: expiresAt.ToUnixTimeSeconds(), + TokenId: Guid.NewGuid().ToString("N"), + Resource: new VulnPermalinkResource(resourceKind, stateElement)); + + var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(payload, PayloadSerializerOptions); + var header = new Dictionary + { + ["alg"] = algorithm, + ["typ"] = "JWT", + ["kid"] = signer.KeyId + }; + + var headerBytes = JsonSerializer.SerializeToUtf8Bytes(header, HeaderSerializerOptions); + var encodedHeader = Base64UrlEncoder.Encode(headerBytes); + var encodedPayload = Base64UrlEncoder.Encode(payloadBytes); + + var signingInput = Encoding.ASCII.GetBytes(string.Concat(encodedHeader, '.', encodedPayload)); + var signatureBytes = await signer.SignAsync(signingInput, cancellationToken).ConfigureAwait(false); + var encodedSignature = Base64UrlEncoder.Encode(signatureBytes); + var token = string.Concat(encodedHeader, '.', encodedPayload, '.', encodedSignature); + + logger.LogDebug("Issued Vuln Explorer permalink for tenant {Tenant} with resource kind {Resource}.", tenant, resourceKind); + + return new VulnPermalinkResponse( + Token: token, + IssuedAt: issuedAt, + ExpiresAt: expiresAt, + Scopes: new[] { StellaOpsScopes.VulnRead }); + } + + private sealed record VulnPermalinkPayload( + [property: JsonPropertyName("sub")] string Subject, + [property: JsonPropertyName("aud")] string Audience, + [property: JsonPropertyName("type")] string Type, + [property: JsonPropertyName("tenant")] string Tenant, + [property: JsonPropertyName("environment")] string? Environment, + [property: JsonPropertyName("scopes")] IReadOnlyList Scopes, + [property: JsonPropertyName("iat")] long IssuedAt, + [property: JsonPropertyName("nbf")] long NotBefore, + [property: JsonPropertyName("exp")] long ExpiresAt, + [property: JsonPropertyName("jti")] string TokenId, + [property: JsonPropertyName("resource")] VulnPermalinkResource Resource); + + private sealed record VulnPermalinkResource( + [property: JsonPropertyName("kind")] string Kind, + [property: JsonPropertyName("state")] JsonElement State); +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginLoader.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginLoader.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginLoader.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginLoader.cs index 51cabbb4..97fe4cfc 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginLoader.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginLoader.cs @@ -1,342 +1,342 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Reflection; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Plugin.DependencyInjection; -using StellaOps.Plugin.Hosting; - -namespace StellaOps.Authority.Plugins; - -internal static class AuthorityPluginLoader -{ - public static AuthorityPluginRegistrationSummary RegisterPlugins( - IServiceCollection services, - IConfiguration configuration, - PluginHostOptions hostOptions, - IReadOnlyCollection pluginContexts, - ILogger? logger) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - ArgumentNullException.ThrowIfNull(hostOptions); - ArgumentNullException.ThrowIfNull(pluginContexts); - - if (pluginContexts.Count == 0) - { - return AuthorityPluginRegistrationSummary.Empty; - } - - var loadResult = PluginHost.LoadPlugins(hostOptions, logger); - var descriptors = loadResult.Plugins - .Select(p => new LoadedPluginDescriptor(p.Assembly, p.AssemblyPath)) - .ToArray(); - - return RegisterPluginsCore( - services, - configuration, - pluginContexts, - descriptors, - loadResult.MissingOrderedPlugins, - logger); - } - - internal static AuthorityPluginRegistrationSummary RegisterPluginsCore( - IServiceCollection services, - IConfiguration configuration, - IReadOnlyCollection pluginContexts, - IReadOnlyCollection loadedAssemblies, - IReadOnlyCollection missingOrdered, - ILogger? logger) - { - var registrarCandidates = DiscoverRegistrars(loadedAssemblies); - var pluginTypeLookup = new Dictionary(StringComparer.OrdinalIgnoreCase); - var registrarTypeLookup = new Dictionary(); - var registered = new List(); - var failures = new List(); - - foreach (var pluginContext in pluginContexts) - { - var manifest = pluginContext.Manifest; - - if (!manifest.Enabled) - { - logger?.LogInformation( - "Skipping disabled Authority plugin '{PluginName}' ({PluginType}).", - manifest.Name, - manifest.Type); - continue; - } - - if (!IsAssemblyLoaded(manifest, loadedAssemblies)) - { - var reason = $"Assembly '{manifest.AssemblyName ?? manifest.AssemblyPath ?? manifest.Type}' was not loaded."; - logger?.LogError( - "Failed to register Authority plugin '{PluginName}': {Reason}", - manifest.Name, - reason); - failures.Add(new AuthorityPluginRegistrationFailure(manifest.Name, reason)); - continue; - } - - var activation = TryResolveActivationForManifest( - services, - manifest.Type, - registrarCandidates, - pluginTypeLookup, - registrarTypeLookup, - logger, - out var registrarType); - - if (activation is null || registrarType is null) - { - var reason = $"No registrar found for plugin type '{manifest.Type}'."; - logger?.LogError( - "Failed to register Authority plugin '{PluginName}': {Reason}", - manifest.Name, - reason); - failures.Add(new AuthorityPluginRegistrationFailure(manifest.Name, reason)); - continue; - } - - try - { - PluginServiceRegistration.RegisterAssemblyMetadata(services, registrarType.Assembly, logger); - - activation.Registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); - registered.Add(manifest.Name); - - logger?.LogInformation( - "Registered Authority plugin '{PluginName}' ({PluginType}).", - manifest.Name, - manifest.Type); - } - catch (Exception ex) - { - var reason = $"Registration threw {ex.GetType().Name}."; - logger?.LogError( - ex, - "Failed to register Authority plugin '{PluginName}'.", - manifest.Name); - failures.Add(new AuthorityPluginRegistrationFailure(manifest.Name, reason)); - } - finally - { - activation.Dispose(); - } - } - - if (missingOrdered.Count > 0) - { - foreach (var missing in missingOrdered) - { - logger?.LogWarning( - "Configured plugin '{PluginName}' was not found in the plugin directory.", - missing); - } - } - - return new AuthorityPluginRegistrationSummary(registered, failures, missingOrdered); - } - - private static IReadOnlyList DiscoverRegistrars(IReadOnlyCollection loadedAssemblies) - { - var registrars = new List(); - - foreach (var descriptor in loadedAssemblies) - { - foreach (var type in GetLoadableTypes(descriptor.Assembly)) - { - if (!typeof(IAuthorityPluginRegistrar).IsAssignableFrom(type) || type.IsAbstract || type.IsInterface) - { - continue; - } - - registrars.Add(type); - } - } - - return registrars; - } - - private static RegistrarActivation? TryResolveActivationForManifest( - IServiceCollection services, - string pluginType, - IReadOnlyList registrarCandidates, - IDictionary pluginTypeLookup, - IDictionary registrarTypeLookup, - ILogger? logger, - out Type? resolvedType) - { - resolvedType = null; - - if (pluginTypeLookup.TryGetValue(pluginType, out var cachedType)) - { - var cachedActivation = CreateRegistrarActivation(services, cachedType, logger); - if (cachedActivation is null) - { - pluginTypeLookup.Remove(pluginType); - registrarTypeLookup.Remove(cachedType); - return null; - } - - resolvedType = cachedType; - return cachedActivation; - } - - foreach (var candidate in registrarCandidates) - { - if (registrarTypeLookup.TryGetValue(candidate, out var knownType)) - { - if (string.IsNullOrWhiteSpace(knownType)) - { - continue; - } - - if (string.Equals(knownType, pluginType, StringComparison.OrdinalIgnoreCase)) - { - var activation = CreateRegistrarActivation(services, candidate, logger); - if (activation is null) - { - registrarTypeLookup.Remove(candidate); - pluginTypeLookup.Remove(knownType); - return null; - } - - resolvedType = candidate; - return activation; - } - - continue; - } - - var attempt = CreateRegistrarActivation(services, candidate, logger); - if (attempt is null) - { - registrarTypeLookup[candidate] = string.Empty; - continue; - } - - var candidateType = attempt.Registrar.PluginType; - if (string.IsNullOrWhiteSpace(candidateType)) - { - logger?.LogWarning( - "Authority plugin registrar '{RegistrarType}' reported an empty plugin type and will be ignored.", - candidate.FullName); - registrarTypeLookup[candidate] = string.Empty; - attempt.Dispose(); - continue; - } - - registrarTypeLookup[candidate] = candidateType; - pluginTypeLookup[candidateType] = candidate; - - if (string.Equals(candidateType, pluginType, StringComparison.OrdinalIgnoreCase)) - { - resolvedType = candidate; - return attempt; - } - - attempt.Dispose(); - } - - return null; - } - - private static RegistrarActivation? CreateRegistrarActivation(IServiceCollection services, Type registrarType, ILogger? logger) - { - ServiceProvider? provider = null; - IServiceScope? scope = null; - try - { - provider = services.BuildServiceProvider(new ServiceProviderOptions - { - ValidateScopes = true - }); - - scope = provider.CreateScope(); - var registrar = (IAuthorityPluginRegistrar)ActivatorUtilities.GetServiceOrCreateInstance(scope.ServiceProvider, registrarType); - return new RegistrarActivation(provider, scope, registrar); - } - catch (Exception ex) - { - logger?.LogError( - ex, - "Failed to activate Authority plugin registrar '{RegistrarType}'.", - registrarType.FullName); - - scope?.Dispose(); - provider?.Dispose(); - return null; - } - } - - private sealed class RegistrarActivation : IDisposable - { - private readonly ServiceProvider provider; - private readonly IServiceScope scope; - - public RegistrarActivation(ServiceProvider provider, IServiceScope scope, IAuthorityPluginRegistrar registrar) - { - this.provider = provider; - this.scope = scope; - Registrar = registrar; - } - - public IAuthorityPluginRegistrar Registrar { get; } - - public void Dispose() - { - scope.Dispose(); - provider.Dispose(); - } - } - - private static bool IsAssemblyLoaded( - AuthorityPluginManifest manifest, - IReadOnlyCollection loadedAssemblies) - { - if (!string.IsNullOrWhiteSpace(manifest.AssemblyName) && - loadedAssemblies.Any(descriptor => - string.Equals( - descriptor.Assembly.GetName().Name, - manifest.AssemblyName, - StringComparison.OrdinalIgnoreCase))) - { - return true; - } - - if (!string.IsNullOrWhiteSpace(manifest.AssemblyPath) && - loadedAssemblies.Any(descriptor => - string.Equals( - descriptor.AssemblyPath, - manifest.AssemblyPath, - StringComparison.OrdinalIgnoreCase))) - { - return true; - } - - // As a fallback, assume any loaded assembly whose simple name contains the plugin type is a match. - return loadedAssemblies.Any(descriptor => - descriptor.Assembly.GetName().Name?.Contains(manifest.Type, StringComparison.OrdinalIgnoreCase) == true); - } - - private static IEnumerable GetLoadableTypes(Assembly assembly) - { - try - { - return assembly.GetTypes(); - } - catch (ReflectionTypeLoadException ex) - { - return ex.Types.Where(static type => type is not null)!; - } - } - - internal readonly record struct LoadedPluginDescriptor( - Assembly Assembly, - string AssemblyPath); -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Reflection; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Plugin.DependencyInjection; +using StellaOps.Plugin.Hosting; + +namespace StellaOps.Authority.Plugins; + +internal static class AuthorityPluginLoader +{ + public static AuthorityPluginRegistrationSummary RegisterPlugins( + IServiceCollection services, + IConfiguration configuration, + PluginHostOptions hostOptions, + IReadOnlyCollection pluginContexts, + ILogger? logger) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + ArgumentNullException.ThrowIfNull(hostOptions); + ArgumentNullException.ThrowIfNull(pluginContexts); + + if (pluginContexts.Count == 0) + { + return AuthorityPluginRegistrationSummary.Empty; + } + + var loadResult = PluginHost.LoadPlugins(hostOptions, logger); + var descriptors = loadResult.Plugins + .Select(p => new LoadedPluginDescriptor(p.Assembly, p.AssemblyPath)) + .ToArray(); + + return RegisterPluginsCore( + services, + configuration, + pluginContexts, + descriptors, + loadResult.MissingOrderedPlugins, + logger); + } + + internal static AuthorityPluginRegistrationSummary RegisterPluginsCore( + IServiceCollection services, + IConfiguration configuration, + IReadOnlyCollection pluginContexts, + IReadOnlyCollection loadedAssemblies, + IReadOnlyCollection missingOrdered, + ILogger? logger) + { + var registrarCandidates = DiscoverRegistrars(loadedAssemblies); + var pluginTypeLookup = new Dictionary(StringComparer.OrdinalIgnoreCase); + var registrarTypeLookup = new Dictionary(); + var registered = new List(); + var failures = new List(); + + foreach (var pluginContext in pluginContexts) + { + var manifest = pluginContext.Manifest; + + if (!manifest.Enabled) + { + logger?.LogInformation( + "Skipping disabled Authority plugin '{PluginName}' ({PluginType}).", + manifest.Name, + manifest.Type); + continue; + } + + if (!IsAssemblyLoaded(manifest, loadedAssemblies)) + { + var reason = $"Assembly '{manifest.AssemblyName ?? manifest.AssemblyPath ?? manifest.Type}' was not loaded."; + logger?.LogError( + "Failed to register Authority plugin '{PluginName}': {Reason}", + manifest.Name, + reason); + failures.Add(new AuthorityPluginRegistrationFailure(manifest.Name, reason)); + continue; + } + + var activation = TryResolveActivationForManifest( + services, + manifest.Type, + registrarCandidates, + pluginTypeLookup, + registrarTypeLookup, + logger, + out var registrarType); + + if (activation is null || registrarType is null) + { + var reason = $"No registrar found for plugin type '{manifest.Type}'."; + logger?.LogError( + "Failed to register Authority plugin '{PluginName}': {Reason}", + manifest.Name, + reason); + failures.Add(new AuthorityPluginRegistrationFailure(manifest.Name, reason)); + continue; + } + + try + { + PluginServiceRegistration.RegisterAssemblyMetadata(services, registrarType.Assembly, logger); + + activation.Registrar.Register(new AuthorityPluginRegistrationContext(services, pluginContext, configuration)); + registered.Add(manifest.Name); + + logger?.LogInformation( + "Registered Authority plugin '{PluginName}' ({PluginType}).", + manifest.Name, + manifest.Type); + } + catch (Exception ex) + { + var reason = $"Registration threw {ex.GetType().Name}."; + logger?.LogError( + ex, + "Failed to register Authority plugin '{PluginName}'.", + manifest.Name); + failures.Add(new AuthorityPluginRegistrationFailure(manifest.Name, reason)); + } + finally + { + activation.Dispose(); + } + } + + if (missingOrdered.Count > 0) + { + foreach (var missing in missingOrdered) + { + logger?.LogWarning( + "Configured plugin '{PluginName}' was not found in the plugin directory.", + missing); + } + } + + return new AuthorityPluginRegistrationSummary(registered, failures, missingOrdered); + } + + private static IReadOnlyList DiscoverRegistrars(IReadOnlyCollection loadedAssemblies) + { + var registrars = new List(); + + foreach (var descriptor in loadedAssemblies) + { + foreach (var type in GetLoadableTypes(descriptor.Assembly)) + { + if (!typeof(IAuthorityPluginRegistrar).IsAssignableFrom(type) || type.IsAbstract || type.IsInterface) + { + continue; + } + + registrars.Add(type); + } + } + + return registrars; + } + + private static RegistrarActivation? TryResolveActivationForManifest( + IServiceCollection services, + string pluginType, + IReadOnlyList registrarCandidates, + IDictionary pluginTypeLookup, + IDictionary registrarTypeLookup, + ILogger? logger, + out Type? resolvedType) + { + resolvedType = null; + + if (pluginTypeLookup.TryGetValue(pluginType, out var cachedType)) + { + var cachedActivation = CreateRegistrarActivation(services, cachedType, logger); + if (cachedActivation is null) + { + pluginTypeLookup.Remove(pluginType); + registrarTypeLookup.Remove(cachedType); + return null; + } + + resolvedType = cachedType; + return cachedActivation; + } + + foreach (var candidate in registrarCandidates) + { + if (registrarTypeLookup.TryGetValue(candidate, out var knownType)) + { + if (string.IsNullOrWhiteSpace(knownType)) + { + continue; + } + + if (string.Equals(knownType, pluginType, StringComparison.OrdinalIgnoreCase)) + { + var activation = CreateRegistrarActivation(services, candidate, logger); + if (activation is null) + { + registrarTypeLookup.Remove(candidate); + pluginTypeLookup.Remove(knownType); + return null; + } + + resolvedType = candidate; + return activation; + } + + continue; + } + + var attempt = CreateRegistrarActivation(services, candidate, logger); + if (attempt is null) + { + registrarTypeLookup[candidate] = string.Empty; + continue; + } + + var candidateType = attempt.Registrar.PluginType; + if (string.IsNullOrWhiteSpace(candidateType)) + { + logger?.LogWarning( + "Authority plugin registrar '{RegistrarType}' reported an empty plugin type and will be ignored.", + candidate.FullName); + registrarTypeLookup[candidate] = string.Empty; + attempt.Dispose(); + continue; + } + + registrarTypeLookup[candidate] = candidateType; + pluginTypeLookup[candidateType] = candidate; + + if (string.Equals(candidateType, pluginType, StringComparison.OrdinalIgnoreCase)) + { + resolvedType = candidate; + return attempt; + } + + attempt.Dispose(); + } + + return null; + } + + private static RegistrarActivation? CreateRegistrarActivation(IServiceCollection services, Type registrarType, ILogger? logger) + { + ServiceProvider? provider = null; + IServiceScope? scope = null; + try + { + provider = services.BuildServiceProvider(new ServiceProviderOptions + { + ValidateScopes = true + }); + + scope = provider.CreateScope(); + var registrar = (IAuthorityPluginRegistrar)ActivatorUtilities.GetServiceOrCreateInstance(scope.ServiceProvider, registrarType); + return new RegistrarActivation(provider, scope, registrar); + } + catch (Exception ex) + { + logger?.LogError( + ex, + "Failed to activate Authority plugin registrar '{RegistrarType}'.", + registrarType.FullName); + + scope?.Dispose(); + provider?.Dispose(); + return null; + } + } + + private sealed class RegistrarActivation : IDisposable + { + private readonly ServiceProvider provider; + private readonly IServiceScope scope; + + public RegistrarActivation(ServiceProvider provider, IServiceScope scope, IAuthorityPluginRegistrar registrar) + { + this.provider = provider; + this.scope = scope; + Registrar = registrar; + } + + public IAuthorityPluginRegistrar Registrar { get; } + + public void Dispose() + { + scope.Dispose(); + provider.Dispose(); + } + } + + private static bool IsAssemblyLoaded( + AuthorityPluginManifest manifest, + IReadOnlyCollection loadedAssemblies) + { + if (!string.IsNullOrWhiteSpace(manifest.AssemblyName) && + loadedAssemblies.Any(descriptor => + string.Equals( + descriptor.Assembly.GetName().Name, + manifest.AssemblyName, + StringComparison.OrdinalIgnoreCase))) + { + return true; + } + + if (!string.IsNullOrWhiteSpace(manifest.AssemblyPath) && + loadedAssemblies.Any(descriptor => + string.Equals( + descriptor.AssemblyPath, + manifest.AssemblyPath, + StringComparison.OrdinalIgnoreCase))) + { + return true; + } + + // As a fallback, assume any loaded assembly whose simple name contains the plugin type is a match. + return loadedAssemblies.Any(descriptor => + descriptor.Assembly.GetName().Name?.Contains(manifest.Type, StringComparison.OrdinalIgnoreCase) == true); + } + + private static IEnumerable GetLoadableTypes(Assembly assembly) + { + try + { + return assembly.GetTypes(); + } + catch (ReflectionTypeLoadException ex) + { + return ex.Types.Where(static type => type is not null)!; + } + } + + internal readonly record struct LoadedPluginDescriptor( + Assembly Assembly, + string AssemblyPath); +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginRegistrationSummary.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginRegistrationSummary.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginRegistrationSummary.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Plugins/AuthorityPluginRegistrationSummary.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Program.Partial.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Program.Partial.cs similarity index 91% rename from src/StellaOps.Authority/StellaOps.Authority/Program.Partial.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Program.Partial.cs index 03bc67ee..3a751f96 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Program.Partial.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Program.Partial.cs @@ -1,3 +1,3 @@ -public partial class Program -{ -} +public partial class Program +{ +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/Program.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Program.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/Program.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Program.cs index 522a7314..df9f1f66 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Program.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Program.cs @@ -1,1335 +1,1335 @@ -using System; -using System.Diagnostics; -using System.Globalization; -using Microsoft.AspNetCore.Builder; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using Microsoft.AspNetCore.RateLimiting; -using Microsoft.AspNetCore.Server.Kestrel.Https; -using Microsoft.Extensions.Logging.Abstractions; -using OpenIddict.Abstractions; -using OpenIddict.Server; -using OpenIddict.Server.AspNetCore; -using MongoDB.Driver; -using Serilog; -using Serilog.Events; -using StellaOps.Authority; -using StellaOps.Authority.Audit; -using StellaOps.Authority.Plugins.Abstractions; -using StellaOps.Authority.Plugins; -using StellaOps.Authority.Bootstrap; -using StellaOps.Authority.Storage.Mongo.Extensions; -using StellaOps.Authority.Storage.Mongo.Initialization; -using StellaOps.Authority.Storage.Mongo.Stores; -using StellaOps.Authority.RateLimiting; -using StellaOps.Configuration; -using StellaOps.Plugin.DependencyInjection; -using StellaOps.Plugin.Hosting; -using StellaOps.Authority.OpenIddict.Handlers; -using System.Linq; -using StellaOps.Cryptography.Audit; -using StellaOps.Cryptography.DependencyInjection; -using StellaOps.Authority.Permalinks; -using StellaOps.Authority.Revocation; -using StellaOps.Authority.Signing; -using StellaOps.Cryptography; -using StellaOps.Authority.Storage.Mongo.Documents; -using StellaOps.Authority.Security; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.ServerIntegration; -#if STELLAOPS_AUTH_SECURITY -using StellaOps.Auth.Security.Dpop; -using StackExchange.Redis; -#endif - -var builder = WebApplication.CreateBuilder(args); - -Activity.DefaultIdFormat = ActivityIdFormat.W3C; -Activity.ForceDefaultIdFormat = true; - -AuthorityTelemetryConfiguration.Configure(builder); - -var authorityConfiguration = StellaOpsAuthorityConfiguration.Build(options => -{ - options.BasePath = builder.Environment.ContentRootPath; - options.EnvironmentPrefix = "STELLAOPS_AUTHORITY_"; - options.ConfigureBuilder = configurationBuilder => - { - var contentRoot = builder.Environment.ContentRootPath; - foreach (var relative in new[] - { - "../etc/authority.yaml", - "../etc/authority.local.yaml", - "authority.yaml", - "authority.local.yaml" - }) - { - var path = Path.Combine(contentRoot, relative); - configurationBuilder.AddYamlFile(path, optional: true); - } - }; -}); - -builder.WebHost.ConfigureKestrel(options => -{ - options.ConfigureHttpsDefaults(https => - { - https.ClientCertificateMode = ClientCertificateMode.AllowCertificate; - https.CheckCertificateRevocation = true; - }); -}); - -builder.Configuration.AddConfiguration(authorityConfiguration.Configuration); - -builder.Host.UseSerilog((context, _, loggerConfiguration) => -{ - loggerConfiguration - .ReadFrom.Configuration(context.Configuration) - .Enrich.FromLogContext() - .MinimumLevel.Override("Microsoft.AspNetCore.Hosting.Diagnostics", LogEventLevel.Warning) - .WriteTo.Console(); -}); - -var authorityOptions = authorityConfiguration.Options; -var issuerUri = authorityOptions.Issuer; -if (issuerUri is null) -{ - var issuerValue = builder.Configuration["Authority:Issuer"]; - if (string.IsNullOrWhiteSpace(issuerValue)) - { - throw new InvalidOperationException("Authority issuer configuration is required."); - } - - issuerUri = new Uri(issuerValue, UriKind.Absolute); -} - -authorityOptions.Issuer = issuerUri; -builder.Services.AddSingleton(authorityOptions); -builder.Services.AddSingleton>(Options.Create(authorityOptions)); -builder.Services.AddHttpContextAccessor(); -builder.Services.TryAddSingleton(_ => TimeProvider.System); -builder.Services.TryAddSingleton(); -builder.Services.TryAddSingleton(); -builder.Services.AddSingleton(); - -#if STELLAOPS_AUTH_SECURITY -var senderConstraints = authorityOptions.Security.SenderConstraints; - -builder.Services.AddOptions() - .Configure(options => - { - options.ProofLifetime = senderConstraints.Dpop.ProofLifetime; - options.AllowedClockSkew = senderConstraints.Dpop.AllowedClockSkew; - options.ReplayWindow = senderConstraints.Dpop.ReplayWindow; - - options.AllowedAlgorithms.Clear(); - foreach (var algorithm in senderConstraints.Dpop.NormalizedAlgorithms) - { - options.AllowedAlgorithms.Add(algorithm); - } - }) - .PostConfigure(static options => options.Validate()); - -builder.Services.TryAddSingleton(provider => new InMemoryDpopReplayCache(provider.GetService())); -builder.Services.TryAddSingleton(); -if (string.Equals(senderConstraints.Dpop.Nonce.Store, "redis", StringComparison.OrdinalIgnoreCase)) -{ - builder.Services.TryAddSingleton(_ => - ConnectionMultiplexer.Connect(senderConstraints.Dpop.Nonce.RedisConnectionString!)); - - builder.Services.TryAddSingleton(provider => - { - var multiplexer = provider.GetRequiredService(); - var timeProvider = provider.GetService(); - return new RedisDpopNonceStore(multiplexer, timeProvider); - }); -} -else -{ - builder.Services.TryAddSingleton(provider => - { - var timeProvider = provider.GetService(); - var nonceLogger = provider.GetService>(); - return new InMemoryDpopNonceStore(timeProvider, nonceLogger); - }); -} - -builder.Services.AddScoped(); -#endif - -builder.Services.AddRateLimiter(rateLimiterOptions => -{ - AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions); -}); - -builder.Services.AddStellaOpsCrypto(); -builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton()); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); - -AuthorityPluginContext[] pluginContexts = AuthorityPluginConfigurationLoader - .Load(authorityOptions, builder.Environment.ContentRootPath) - .ToArray(); - -builder.Services.AddSingleton>(pluginContexts); -builder.Services.AddSingleton(_ => new AuthorityPluginRegistry(pluginContexts)); - -var pluginHostOptions = BuildPluginHostOptions(authorityOptions, builder.Environment.ContentRootPath); -builder.Services.AddSingleton(pluginHostOptions); -builder.Services.RegisterPluginRoutines(authorityConfiguration.Configuration, pluginHostOptions); - -builder.Services.AddAuthorityMongoStorage(storageOptions => -{ - storageOptions.ConnectionString = authorityOptions.Storage.ConnectionString; - storageOptions.DatabaseName = authorityOptions.Storage.DatabaseName; - storageOptions.CommandTimeout = authorityOptions.Storage.CommandTimeout; -}); - -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); -builder.Services.AddScoped(); -builder.Services.AddScoped(); -builder.Services.AddScoped(); -builder.Services.AddScoped(); -builder.Services.AddScoped(); -builder.Services.AddScoped(); -builder.Services.AddScoped(); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); -builder.Services.AddSingleton(); -builder.Services.AddHostedService(); - -var pluginRegistrationSummary = AuthorityPluginLoader.RegisterPlugins( - builder.Services, - authorityConfiguration.Configuration, - pluginHostOptions, - pluginContexts, - NullLogger.Instance); - -builder.Services.AddSingleton(pluginRegistrationSummary); - -builder.Services.AddRouting(options => options.LowercaseUrls = true); -builder.Services.AddProblemDetails(); -builder.Services.AddAuthentication(); -builder.Services.AddAuthorization(); - -builder.Services.AddOpenIddict() - .AddServer(options => - { - options.SetIssuer(issuerUri); - options.SetTokenEndpointUris("/token"); - options.SetAuthorizationEndpointUris("/authorize"); - options.SetIntrospectionEndpointUris("/introspect"); - options.SetRevocationEndpointUris("/revoke"); - options.SetJsonWebKeySetEndpointUris("/jwks"); - - options.AllowPasswordFlow(); - options.AllowClientCredentialsFlow(); - options.AllowRefreshTokenFlow(); - - options.SetAccessTokenLifetime(authorityOptions.AccessTokenLifetime); - options.SetRefreshTokenLifetime(authorityOptions.RefreshTokenLifetime); - options.SetIdentityTokenLifetime(authorityOptions.IdentityTokenLifetime); - options.SetAuthorizationCodeLifetime(authorityOptions.AuthorizationCodeLifetime); - options.SetDeviceCodeLifetime(authorityOptions.DeviceCodeLifetime); - - options.DisableAccessTokenEncryption(); - options.DisableTokenStorage(); - options.DisableAuthorizationStorage(); - - options.RegisterScopes( - new[] - { - OpenIddictConstants.Scopes.OpenId, - OpenIddictConstants.Scopes.Email, - OpenIddictConstants.Scopes.Profile, - OpenIddictConstants.Scopes.OfflineAccess - } - .Concat(StellaOpsScopes.All) - .Distinct(StringComparer.Ordinal) - .ToArray()); - - options.AddEphemeralEncryptionKey() - .AddEphemeralSigningKey(); - - var aspNetCoreBuilder = options.UseAspNetCore() - .EnableAuthorizationEndpointPassthrough() - .EnableTokenEndpointPassthrough(); - - if (builder.Environment.IsDevelopment()) - { - aspNetCoreBuilder.DisableTransportSecurityRequirement(); - } - -#if STELLAOPS_AUTH_SECURITY - options.AddEventHandler(descriptor => - { - descriptor.UseScopedHandler(); - }); -#endif - - options.AddEventHandler(descriptor => - { - descriptor.UseScopedHandler(); - }); - - options.AddEventHandler(descriptor => - { - descriptor.UseScopedHandler(); - }); - - options.AddEventHandler(descriptor => - { - descriptor.UseScopedHandler(); - }); - - options.AddEventHandler(descriptor => - { - descriptor.UseScopedHandler(); - }); - - options.AddEventHandler(descriptor => - { - descriptor.UseScopedHandler(); - }); - - options.AddEventHandler(descriptor => - { - descriptor.UseScopedHandler(); - }); - - options.AddEventHandler(descriptor => - { - descriptor.UseScopedHandler(); - }); - }); - -builder.Services.Configure(options => -{ - options.DisableSlidingRefreshTokenExpiration = false; - options.DisableRollingRefreshTokens = false; -}); - -var app = builder.Build(); - -var mongoInitializer = app.Services.GetRequiredService(); -var mongoDatabase = app.Services.GetRequiredService(); -await mongoInitializer.InitialiseAsync(mongoDatabase, CancellationToken.None); - -var registrationSummary = app.Services.GetRequiredService(); -if (registrationSummary.RegisteredPlugins.Count > 0) -{ - app.Logger.LogInformation( - "Authority plugins registered: {Plugins}", - string.Join(", ", registrationSummary.RegisteredPlugins)); -} - -foreach (var failure in registrationSummary.Failures) -{ - app.Logger.LogError( - "Authority plugin '{PluginName}' failed to register: {Reason}", - failure.PluginName, - failure.Reason); -} - -foreach (var missing in registrationSummary.MissingOrderedPlugins) -{ - app.Logger.LogWarning( - "Configured Authority plugin '{PluginName}' was not discovered during startup.", - missing); -} - -var identityProviderRegistry = app.Services.GetRequiredService(); -if (identityProviderRegistry.Providers.Count == 0) -{ - app.Logger.LogWarning("No identity provider plugins were registered."); -} -else -{ - foreach (var provider in identityProviderRegistry.Providers) - { - var caps = provider.Capabilities; - app.Logger.LogInformation( - "Identity provider plugin '{PluginName}' (type {PluginType}) capabilities: password={Password}, mfa={Mfa}, clientProvisioning={ClientProvisioning}.", - provider.Name, - provider.Type, - caps.SupportsPassword, - caps.SupportsMfa, - caps.SupportsClientProvisioning); - } -} - -if (authorityOptions.Bootstrap.Enabled) -{ - var bootstrapGroup = app.MapGroup("/internal"); - bootstrapGroup.AddEndpointFilter(new BootstrapApiKeyFilter(authorityOptions)); - - bootstrapGroup.MapPost("/users", async ( - HttpContext httpContext, - BootstrapUserRequest request, - IAuthorityIdentityProviderRegistry registry, - IAuthorityBootstrapInviteStore inviteStore, - IAuthEventSink auditSink, - TimeProvider timeProvider, - CancellationToken cancellationToken) => - { - if (request is null) - { - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty(), null).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); - } - - var now = timeProvider.GetUtcNow(); - var inviteToken = string.IsNullOrWhiteSpace(request.InviteToken) ? null : request.InviteToken.Trim(); - AuthorityBootstrapInviteDocument? invite = null; - var inviteReserved = false; - - async Task ReleaseInviteAsync(string reason) - { - if (inviteToken is null) - { - return; - } - - if (inviteReserved) - { - await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false); - } - - await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, reason, invite, inviteToken).ConfigureAwait(false); - } - - if (inviteToken is not null) - { - var reservation = await inviteStore.TryReserveAsync(inviteToken, BootstrapInviteTypes.User, now, request.Username, cancellationToken).ConfigureAwait(false); - - switch (reservation.Status) - { - case BootstrapInviteReservationStatus.Reserved: - inviteReserved = true; - invite = reservation.Invite; - break; - case BootstrapInviteReservationStatus.Expired: - await WriteInviteAuditAsync("authority.bootstrap.invite.expired", AuthEventOutcome.Failure, "Invite expired before use.", reservation.Invite, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invite_expired", message = "Invite has expired." }); - case BootstrapInviteReservationStatus.AlreadyUsed: - await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token already consumed.", reservation.Invite, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invite_used", message = "Invite token has already been used." }); - default: - await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token not found.", reservation.Invite, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_invite", message = "Invite token is invalid." }); - } - } - - var providerName = string.IsNullOrWhiteSpace(request.Provider) - ? invite?.Provider ?? authorityOptions.Bootstrap.DefaultIdentityProvider - : request.Provider; - - if (invite is not null && !string.IsNullOrWhiteSpace(invite.Provider) && - !string.Equals(invite.Provider, providerName, StringComparison.OrdinalIgnoreCase)) - { - await ReleaseInviteAsync("Invite provider does not match requested provider."); - return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." }); - } - - if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var providerMetadata)) - { - await ReleaseInviteAsync("Specified identity provider was not found."); - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", null, request.Username, providerName, request.Roles ?? Array.Empty(), inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." }); - } - - if (!providerMetadata.Capabilities.SupportsPassword) - { - await ReleaseInviteAsync("Selected provider does not support password provisioning."); - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support password provisioning.", null, request.Username, providerMetadata.Name, request.Roles ?? Array.Empty(), inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support password provisioning." }); - } - - if (string.IsNullOrWhiteSpace(request.Username) || string.IsNullOrEmpty(request.Password)) - { - await ReleaseInviteAsync("Username and password are required."); - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Username and password are required.", null, request.Username, providerMetadata.Name, request.Roles ?? Array.Empty(), inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = "Username and password are required." }); - } - - if (invite is not null && !string.IsNullOrWhiteSpace(invite.Target) && - !string.Equals(invite.Target, request.Username, StringComparison.OrdinalIgnoreCase)) - { - await ReleaseInviteAsync("Invite target does not match requested username."); - return Results.BadRequest(new { error = "invite_target_mismatch", message = "Invite target does not match username." }); - } - - var roles = request.Roles is null ? Array.Empty() : request.Roles.ToArray(); - var attributes = request.Attributes is null - ? new Dictionary(StringComparer.OrdinalIgnoreCase) - : new Dictionary(request.Attributes, StringComparer.OrdinalIgnoreCase); - - var registration = new AuthorityUserRegistration( - request.Username, - request.Password, - request.DisplayName, - request.Email, - request.RequirePasswordReset, - roles, - attributes); - - await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, cancellationToken).ConfigureAwait(false); - var provider = providerHandle.Provider; - - try - { - var result = await provider.Credentials.UpsertUserAsync(registration, cancellationToken).ConfigureAwait(false); - - if (!result.Succeeded || result.Value is null) - { - await ReleaseInviteAsync(result.Message ?? "User provisioning failed."); - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, result.Message ?? "User provisioning failed.", null, request.Username, providerMetadata.Name, roles, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "User provisioning failed." }); - } - - if (inviteReserved && inviteToken is not null) - { - var consumed = await inviteStore.MarkConsumedAsync(inviteToken, result.Value.SubjectId ?? result.Value.Username, now, cancellationToken).ConfigureAwait(false); - if (consumed) - { - await WriteInviteAuditAsync("authority.bootstrap.invite.consumed", AuthEventOutcome.Success, null, invite, inviteToken).ConfigureAwait(false); - } - } - - await WriteBootstrapUserAuditAsync(AuthEventOutcome.Success, null, result.Value.SubjectId, result.Value.Username, providerMetadata.Name, roles, inviteToken).ConfigureAwait(false); - - return Results.Ok(new - { - provider = providerMetadata.Name, - subjectId = result.Value.SubjectId, - username = result.Value.Username - }); - } - catch - { - if (inviteReserved && inviteToken is not null) - { - await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false); - await WriteInviteAuditAsync("authority.bootstrap.invite.released", AuthEventOutcome.Error, "Invite released due to provisioning failure.", invite, inviteToken).ConfigureAwait(false); - } - - throw; - } - - async Task WriteBootstrapUserAuditAsync(AuthEventOutcome outcome, string? reason, string? subjectId, string? usernameValue, string? providerValue, IReadOnlyCollection rolesValue, string? inviteValue) - { - var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); - AuthEventNetwork? network = null; - var remoteAddress = httpContext.Connection.RemoteIpAddress?.ToString(); - var userAgent = httpContext.Request.Headers.UserAgent.ToString(); - - if (!string.IsNullOrWhiteSpace(remoteAddress) || !string.IsNullOrWhiteSpace(userAgent)) - { - network = new AuthEventNetwork - { - RemoteAddress = ClassifiedString.Personal(remoteAddress), - UserAgent = ClassifiedString.Personal(string.IsNullOrWhiteSpace(userAgent) ? null : userAgent) - }; - } - - var subject = subjectId is null && string.IsNullOrWhiteSpace(usernameValue) && string.IsNullOrWhiteSpace(providerValue) - ? null - : new AuthEventSubject - { - SubjectId = ClassifiedString.Personal(subjectId), - Username = ClassifiedString.Personal(usernameValue), - Realm = ClassifiedString.Public(providerValue) - }; - - var properties = new List(); - if (!string.IsNullOrWhiteSpace(providerValue)) - { - properties.Add(new AuthEventProperty - { - Name = "bootstrap.provider", - Value = ClassifiedString.Public(providerValue) - }); - } - - if (!string.IsNullOrWhiteSpace(inviteValue)) - { - properties.Add(new AuthEventProperty - { - Name = "bootstrap.invite_token", - Value = ClassifiedString.Public(inviteValue) - }); - } - - var scopes = rolesValue is { Count: > 0 } - ? rolesValue.ToArray() - : Array.Empty(); - - var record = new AuthEventRecord - { - EventType = "authority.bootstrap.user", - OccurredAt = timeProvider.GetUtcNow(), - CorrelationId = correlationId, - Outcome = outcome, - Reason = reason, - Subject = subject, - Client = null, - Scopes = scopes, - Network = network, - Properties = properties.Count == 0 ? Array.Empty() : properties - }; - - await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); - } - - async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument? document, string? tokenValue) - { - var record = new AuthEventRecord - { - EventType = eventType, - OccurredAt = timeProvider.GetUtcNow(), - CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), - Outcome = outcome, - Reason = reason, - Subject = null, - Client = null, - Scopes = Array.Empty(), - Network = null, - Properties = BuildInviteProperties(document, tokenValue) - }; - - await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); - } - - static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument? document, string? token) - { - var properties = new List(); - if (!string.IsNullOrWhiteSpace(token)) - { - properties.Add(new AuthEventProperty - { - Name = "invite.token", - Value = ClassifiedString.Public(token) - }); - } - - if (document is not null) - { - if (!string.IsNullOrWhiteSpace(document.Type)) - { - properties.Add(new AuthEventProperty - { - Name = "invite.type", - Value = ClassifiedString.Public(document.Type) - }); - } - - if (!string.IsNullOrWhiteSpace(document.Provider)) - { - properties.Add(new AuthEventProperty - { - Name = "invite.provider", - Value = ClassifiedString.Public(document.Provider) - }); - } - - if (!string.IsNullOrWhiteSpace(document.Target)) - { - properties.Add(new AuthEventProperty - { - Name = "invite.target", - Value = ClassifiedString.Public(document.Target) - }); - } - - properties.Add(new AuthEventProperty - { - Name = "invite.expires_at", - Value = ClassifiedString.Public(document.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) - }); - } - - return properties.Count == 0 ? Array.Empty() : properties.ToArray(); - } - }); - - bootstrapGroup.MapPost("/clients", async ( - HttpContext httpContext, - BootstrapClientRequest request, - IAuthorityIdentityProviderRegistry registry, - IAuthorityBootstrapInviteStore inviteStore, - IAuthEventSink auditSink, - TimeProvider timeProvider, - CancellationToken cancellationToken) => - { - if (request is null) - { - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty(), null, null).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); - } - - var now = timeProvider.GetUtcNow(); - var inviteToken = string.IsNullOrWhiteSpace(request.InviteToken) ? null : request.InviteToken.Trim(); - AuthorityBootstrapInviteDocument? invite = null; - var inviteReserved = false; - - async Task ReleaseInviteAsync(string reason) - { - if (inviteToken is null) - { - return; - } - - if (inviteReserved) - { - await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false); - } - - await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, reason, invite, inviteToken).ConfigureAwait(false); - } - - if (inviteToken is not null) - { - var reservation = await inviteStore.TryReserveAsync(inviteToken, BootstrapInviteTypes.Client, now, request.ClientId, cancellationToken).ConfigureAwait(false); - switch (reservation.Status) - { - case BootstrapInviteReservationStatus.Reserved: - inviteReserved = true; - invite = reservation.Invite; - break; - case BootstrapInviteReservationStatus.Expired: - await WriteInviteAuditAsync("authority.bootstrap.invite.expired", AuthEventOutcome.Failure, "Invite expired before use.", reservation.Invite, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invite_expired", message = "Invite has expired." }); - case BootstrapInviteReservationStatus.AlreadyUsed: - await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token already consumed.", reservation.Invite, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invite_used", message = "Invite token has already been used." }); - default: - await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token is invalid.", reservation.Invite, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_invite", message = "Invite token is invalid." }); - } - } - - var providerName = string.IsNullOrWhiteSpace(request.Provider) - ? invite?.Provider ?? authorityOptions.Bootstrap.DefaultIdentityProvider - : request.Provider; - - if (invite is not null && !string.IsNullOrWhiteSpace(invite.Provider) && - !string.Equals(invite.Provider, providerName, StringComparison.OrdinalIgnoreCase)) - { - await ReleaseInviteAsync("Invite provider does not match requested provider."); - return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." }); - } - - if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var providerMetadata)) - { - await ReleaseInviteAsync("Specified identity provider was not found."); - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", request.ClientId, null, providerName, request.AllowedScopes ?? Array.Empty(), request?.Confidential, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." }); - } - - if (!providerMetadata.Capabilities.SupportsClientProvisioning) - { - await ReleaseInviteAsync("Selected provider does not support client provisioning."); - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." }); - } - - await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, cancellationToken).ConfigureAwait(false); - var provider = providerHandle.Provider; - - if (provider.ClientProvisioning is null) - { - await ReleaseInviteAsync("Selected provider does not support client provisioning."); - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." }); - } - - if (string.IsNullOrWhiteSpace(request.ClientId)) - { - await ReleaseInviteAsync("ClientId is required."); - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "ClientId is required.", null, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = "ClientId is required." }); - } - - if (invite is not null && !string.IsNullOrWhiteSpace(invite.Target) && - !string.Equals(invite.Target, request.ClientId, StringComparison.OrdinalIgnoreCase)) - { - await ReleaseInviteAsync("Invite target does not match requested client id."); - return Results.BadRequest(new { error = "invite_target_mismatch", message = "Invite target does not match client id." }); - } - - if (request.Confidential && string.IsNullOrWhiteSpace(request.ClientSecret)) - { - await ReleaseInviteAsync("Confidential clients require a client secret."); - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Confidential clients require a client secret.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = "Confidential clients require a client secret." }); - } - - if (!TryParseUris(request.RedirectUris, out var redirectUris, out var redirectError)) - { - var errorMessage = redirectError ?? "Redirect URI validation failed."; - await ReleaseInviteAsync(errorMessage); - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = errorMessage }); - } - - if (!TryParseUris(request.PostLogoutRedirectUris, out var postLogoutUris, out var postLogoutError)) - { - var errorMessage = postLogoutError ?? "Post-logout redirect URI validation failed."; - await ReleaseInviteAsync(errorMessage); - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = errorMessage }); - } - - var properties = request.Properties is null - ? new Dictionary(StringComparer.OrdinalIgnoreCase) - : new Dictionary(request.Properties, StringComparer.OrdinalIgnoreCase); - - IReadOnlyCollection? certificateBindings = null; - if (request.CertificateBindings is not null) - { - var bindingRegistrations = new List(request.CertificateBindings.Count); - foreach (var binding in request.CertificateBindings) - { - if (binding is null || string.IsNullOrWhiteSpace(binding.Thumbprint)) - { - await ReleaseInviteAsync("Certificate binding thumbprint is required."); - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Certificate binding thumbprint is required.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = "invalid_request", message = "Certificate binding thumbprint is required." }); - } - - bindingRegistrations.Add(new AuthorityClientCertificateBindingRegistration( - binding.Thumbprint, - binding.SerialNumber, - binding.Subject, - binding.Issuer, - binding.SubjectAlternativeNames, - binding.NotBefore, - binding.NotAfter, - binding.Label)); - } - - certificateBindings = bindingRegistrations; - } - - var requestedTenant = properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantMetadata) - ? ClientCredentialHandlerHelpers.NormalizeTenant(tenantMetadata) - : null; - if (!string.IsNullOrWhiteSpace(requestedTenant)) - { - properties[AuthorityClientMetadataKeys.Tenant] = requestedTenant; - } - - var requestedProject = properties.TryGetValue(AuthorityClientMetadataKeys.Project, out var projectMetadata) - ? ClientCredentialHandlerHelpers.NormalizeProject(projectMetadata) - : null; - requestedProject ??= StellaOpsTenancyDefaults.AnyProject; - properties[AuthorityClientMetadataKeys.Project] = requestedProject; - - var registration = new AuthorityClientRegistration( - request.ClientId, - request.Confidential, - request.DisplayName, - request.ClientSecret, - request.AllowedGrantTypes ?? Array.Empty(), - request.AllowedScopes ?? Array.Empty(), - request.AllowedAudiences ?? Array.Empty(), - redirectUris, - postLogoutUris, - requestedTenant, - requestedProject, - properties, - certificateBindings); - - var result = await provider.ClientProvisioning.CreateOrUpdateAsync(registration, cancellationToken).ConfigureAwait(false); - - if (!result.Succeeded || result.Value is null) - { - await ReleaseInviteAsync(result.Message ?? "Client provisioning failed."); - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, result.Message ?? "Client provisioning failed.", request.ClientId, result.Value?.ClientId, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); - return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "Client provisioning failed." }); - } - - if (inviteReserved && inviteToken is not null) - { - var consumed = await inviteStore.MarkConsumedAsync(inviteToken, result.Value.ClientId, now, cancellationToken).ConfigureAwait(false); - if (consumed) - { - await WriteInviteAuditAsync("authority.bootstrap.invite.consumed", AuthEventOutcome.Success, null, invite, inviteToken).ConfigureAwait(false); - } - } - - await WriteBootstrapClientAuditAsync(AuthEventOutcome.Success, null, request.ClientId, result.Value.ClientId, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); - - return Results.Ok(new - { - provider = providerMetadata.Name, - clientId = result.Value.ClientId, - confidential = result.Value.Confidential - }); - - async Task WriteBootstrapClientAuditAsync(AuthEventOutcome outcome, string? reason, string? requestedClientId, string? assignedClientId, string? providerValue, IReadOnlyCollection scopes, bool? confidentialFlag, string? inviteValue) - { - var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); - AuthEventNetwork? network = null; - var remoteAddress = httpContext.Connection.RemoteIpAddress?.ToString(); - var userAgent = httpContext.Request.Headers.UserAgent.ToString(); - - if (!string.IsNullOrWhiteSpace(remoteAddress) || !string.IsNullOrWhiteSpace(userAgent)) - { - network = new AuthEventNetwork - { - RemoteAddress = ClassifiedString.Personal(remoteAddress), - UserAgent = ClassifiedString.Personal(string.IsNullOrWhiteSpace(userAgent) ? null : userAgent) - }; - } - - var clientIdValue = assignedClientId ?? requestedClientId; - var client = clientIdValue is null && string.IsNullOrWhiteSpace(providerValue) - ? null - : new AuthEventClient - { - ClientId = ClassifiedString.Personal(clientIdValue), - Name = ClassifiedString.Empty, - Provider = ClassifiedString.Public(providerValue) - }; - - var properties = new List(); - if (!string.IsNullOrWhiteSpace(requestedClientId) && !string.Equals(requestedClientId, assignedClientId, StringComparison.Ordinal)) - { - properties.Add(new AuthEventProperty - { - Name = "bootstrap.requested_client_id", - Value = ClassifiedString.Public(requestedClientId) - }); - } - - if (confidentialFlag == true) - { - properties.Add(new AuthEventProperty - { - Name = "bootstrap.confidential", - Value = ClassifiedString.Public("true") - }); - } - - if (!string.IsNullOrWhiteSpace(inviteValue)) - { - properties.Add(new AuthEventProperty - { - Name = "bootstrap.invite_token", - Value = ClassifiedString.Public(inviteValue) - }); - } - - var record = new AuthEventRecord - { - EventType = "authority.bootstrap.client", - OccurredAt = timeProvider.GetUtcNow(), - CorrelationId = correlationId, - Outcome = outcome, - Reason = reason, - Subject = null, - Client = client, - Scopes = scopes is { Count: > 0 } ? scopes.ToArray() : Array.Empty(), - Network = network, - Properties = properties.Count == 0 ? Array.Empty() : properties.ToArray() - }; - - await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); - } - - async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument? document, string? tokenValue) - { - var record = new AuthEventRecord - { - EventType = eventType, - OccurredAt = timeProvider.GetUtcNow(), - CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), - Outcome = outcome, - Reason = reason, - Subject = null, - Client = null, - Scopes = Array.Empty(), - Network = null, - Properties = BuildInviteProperties(document, tokenValue) - }; - - await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); - } - - static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument? document, string? token) - { - var properties = new List(); - if (!string.IsNullOrWhiteSpace(token)) - { - properties.Add(new AuthEventProperty - { - Name = "invite.token", - Value = ClassifiedString.Public(token) - }); - } - - if (document is not null) - { - if (!string.IsNullOrWhiteSpace(document.Type)) - { - properties.Add(new AuthEventProperty - { - Name = "invite.type", - Value = ClassifiedString.Public(document.Type) - }); - } - - if (!string.IsNullOrWhiteSpace(document.Provider)) - { - properties.Add(new AuthEventProperty - { - Name = "invite.provider", - Value = ClassifiedString.Public(document.Provider) - }); - } - - if (!string.IsNullOrWhiteSpace(document.Target)) - { - properties.Add(new AuthEventProperty - { - Name = "invite.target", - Value = ClassifiedString.Public(document.Target) - }); - } - - properties.Add(new AuthEventProperty - { - Name = "invite.expires_at", - Value = ClassifiedString.Public(document.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) - }); - } - - return properties.Count == 0 ? Array.Empty() : properties.ToArray(); - } - }); - bootstrapGroup.MapPost("/invites", async ( - HttpContext httpContext, - BootstrapInviteRequest request, - IAuthorityBootstrapInviteStore inviteStore, - IAuthEventSink auditSink, - TimeProvider timeProvider, - CancellationToken cancellationToken) => - { - if (request is null) - { - return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); - } - - if (string.IsNullOrWhiteSpace(request.Type) || - ( !string.Equals(request.Type, BootstrapInviteTypes.User, StringComparison.OrdinalIgnoreCase) && - !string.Equals(request.Type, BootstrapInviteTypes.Client, StringComparison.OrdinalIgnoreCase))) - { - return Results.BadRequest(new { error = "invalid_request", message = "Invite type must be 'user' or 'client'." }); - } - - var now = timeProvider.GetUtcNow(); - var expiresAt = request.ExpiresAt ?? now.AddDays(2); - if (expiresAt <= now) - { - return Results.BadRequest(new { error = "invalid_request", message = "ExpiresAt must be in the future." }); - } - - var token = string.IsNullOrWhiteSpace(request.Token) ? Guid.NewGuid().ToString("N") : request.Token.Trim(); - - var document = new AuthorityBootstrapInviteDocument - { - Token = token, - Type = request.Type.ToLowerInvariant(), - Provider = string.IsNullOrWhiteSpace(request.Provider) ? null : request.Provider.Trim(), - Target = string.IsNullOrWhiteSpace(request.Target) ? null : request.Target.Trim(), - IssuedAt = now, - IssuedBy = string.IsNullOrWhiteSpace(request.IssuedBy) ? httpContext.User?.Identity?.Name : request.IssuedBy, - ExpiresAt = expiresAt, - Metadata = request.Metadata is null ? null : new Dictionary(request.Metadata, StringComparer.OrdinalIgnoreCase) - }; - - await inviteStore.CreateAsync(document, cancellationToken).ConfigureAwait(false); - await WriteInviteAuditAsync("authority.bootstrap.invite.created", AuthEventOutcome.Success, null, document).ConfigureAwait(false); - - return Results.Ok(new - { - document.Token, - document.Type, - document.Provider, - document.Target, - document.ExpiresAt - }); - - async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument invite) - { - var record = new AuthEventRecord - { - EventType = eventType, - OccurredAt = timeProvider.GetUtcNow(), - CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), - Outcome = outcome, - Reason = reason, - Subject = null, - Client = null, - Scopes = Array.Empty(), - Network = null, - Properties = BuildInviteProperties(invite) - }; - - await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); - } - - static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument invite) - { - var properties = new List - { - new() { Name = "invite.token", Value = ClassifiedString.Public(invite.Token) }, - new() { Name = "invite.type", Value = ClassifiedString.Public(invite.Type) }, - new() { Name = "invite.expires_at", Value = ClassifiedString.Public(invite.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) } - }; - - if (!string.IsNullOrWhiteSpace(invite.Provider)) - { - properties.Add(new AuthEventProperty { Name = "invite.provider", Value = ClassifiedString.Public(invite.Provider) }); - } - - if (!string.IsNullOrWhiteSpace(invite.Target)) - { - properties.Add(new AuthEventProperty { Name = "invite.target", Value = ClassifiedString.Public(invite.Target) }); - } - - if (!string.IsNullOrWhiteSpace(invite.IssuedBy)) - { - properties.Add(new AuthEventProperty { Name = "invite.issued_by", Value = ClassifiedString.Public(invite.IssuedBy) }); - } - - return properties.ToArray(); - } - }); - - bootstrapGroup.MapGet("/revocations/export", async ( - AuthorityRevocationExportService exportService, - CancellationToken cancellationToken) => - { - var package = await exportService.ExportAsync(cancellationToken).ConfigureAwait(false); - var build = package.Bundle; - - var response = new RevocationExportResponse - { - SchemaVersion = build.Bundle.SchemaVersion, - BundleId = build.Bundle.BundleId ?? build.Sha256, - Sequence = build.Sequence, - IssuedAt = build.IssuedAt, - SigningKeyId = package.Signature.KeyId, - Bundle = new RevocationExportPayload - { - Data = Convert.ToBase64String(build.CanonicalJson) - }, - Signature = new RevocationExportSignature - { - Algorithm = package.Signature.Algorithm, - KeyId = package.Signature.KeyId, - Provider = package.Signature.Provider, - Value = package.Signature.Value - }, - Digest = new RevocationExportDigest - { - Value = build.Sha256 - } - }; - - return Results.Ok(response); - }); - - bootstrapGroup.MapPost("/signing/rotate", ( - SigningRotationRequest? request, - AuthoritySigningKeyManager signingManager, - ILogger signingLogger) => - { - if (request is null) - { - signingLogger.LogWarning("Signing rotation request payload missing."); - return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); - } - - try - { - var result = signingManager.Rotate(request); - signingLogger.LogInformation("Signing key rotation completed. Active key {KeyId}.", result.ActiveKeyId); - - return Results.Ok(new - { - activeKeyId = result.ActiveKeyId, - provider = result.ActiveProvider, - source = result.ActiveSource, - location = result.ActiveLocation, - previousKeyId = result.PreviousKeyId, - retiredKeyIds = result.RetiredKeyIds - }); - } - catch (InvalidOperationException ex) - { - signingLogger.LogWarning(ex, "Signing rotation failed due to invalid input."); - return Results.BadRequest(new { error = "rotation_failed", message = ex.Message }); - } - catch (Exception ex) - { - signingLogger.LogError(ex, "Unexpected failure rotating signing key."); - return Results.Problem("Failed to rotate signing key."); - } - }); -} - -app.UseSerilogRequestLogging(options => -{ - options.EnrichDiagnosticContext = (diagnosticContext, httpContext) => - { - diagnosticContext.Set("TraceId", Activity.Current?.TraceId.ToString()); - diagnosticContext.Set("UserAgent", httpContext.Request.Headers.UserAgent.ToString()); - }; -}); - -app.UseExceptionHandler(static errorApp => -{ - errorApp.Run(async context => - { - context.Response.ContentType = "application/problem+json"; - var problem = Results.Problem( - statusCode: StatusCodes.Status500InternalServerError, - title: "Unhandled server error", - detail: "Unexpected failure while processing the request."); - - await problem.ExecuteAsync(context); - }); -}); - -app.UseRouting(); -app.UseAuthorityRateLimiterContext(); -app.UseRateLimiter(); -app.UseAuthentication(); -app.UseAuthorization(); - -app.MapGet("/health", async (IAuthorityIdentityProviderRegistry registry, CancellationToken cancellationToken) => - { - var pluginHealth = new List(); - foreach (var providerMetadata in registry.Providers) - { - await using var handle = await registry.AcquireAsync(providerMetadata.Name, cancellationToken).ConfigureAwait(false); - var health = await handle.Provider.CheckHealthAsync(cancellationToken).ConfigureAwait(false); - pluginHealth.Add(new - { - provider = providerMetadata.Name, - status = health.Status.ToString().ToLowerInvariant(), - message = health.Message - }); - } - - return Results.Ok(new - { - status = "healthy", - identityProviders = pluginHealth - }); - }) - .WithName("HealthCheck"); - -app.MapGet("/ready", (IAuthorityIdentityProviderRegistry registry) => - Results.Ok(new - { - status = registry.Providers.Count > 0 ? "ready" : "degraded", - identityProviders = registry.Providers.Select(p => p.Name).ToArray() - })) - .WithName("ReadinessCheck"); - -app.MapPost("/permalinks/vuln", async ( - VulnPermalinkRequest request, - VulnPermalinkService service, - CancellationToken cancellationToken) => -{ - try - { - var response = await service.CreateAsync(request, cancellationToken).ConfigureAwait(false); - return Results.Ok(response); - } - catch (ArgumentException ex) - { - return Results.BadRequest(new { error = "invalid_request", message = ex.Message }); - } - catch (InvalidOperationException ex) - { - return Results.Problem(ex.Message); - } -}) - .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.VulnRead)) - .WithName("CreateVulnPermalink"); - -app.MapGet("/jwks", (AuthorityJwksService jwksService) => Results.Ok(jwksService.Build())) - .WithName("JsonWebKeySet"); - -// Ensure signing key manager initialises key material on startup. -app.Services.GetRequiredService(); - -app.Run(); - -static PluginHostOptions BuildPluginHostOptions(StellaOpsAuthorityOptions options, string basePath) -{ - var pluginDirectory = options.PluginDirectories.FirstOrDefault(); - var hostOptions = new PluginHostOptions - { - BaseDirectory = basePath, - PluginsDirectory = string.IsNullOrWhiteSpace(pluginDirectory) - ? "StellaOps.Authority.PluginBinaries" - : pluginDirectory, - PrimaryPrefix = "StellaOps.Authority" - }; - - if (!hostOptions.SearchPatterns.Any(pattern => string.Equals(pattern, "StellaOps.Authority.Plugin.*.dll", StringComparison.OrdinalIgnoreCase))) - { - hostOptions.SearchPatterns.Add("StellaOps.Authority.Plugin.*.dll"); - } - - foreach (var pair in options.Plugins.Descriptors.OrderBy(static p => p.Key, StringComparer.OrdinalIgnoreCase)) - { - var descriptor = pair.Value; - if (descriptor.Enabled && !string.IsNullOrWhiteSpace(descriptor.AssemblyName)) - { - hostOptions.PluginOrder.Add(descriptor.AssemblyName!); - } - } - - return hostOptions; -} - -static bool TryParseUris(IReadOnlyCollection? values, out IReadOnlyCollection uris, out string? error) -{ - error = null; - - if (values is null || values.Count == 0) - { - uris = Array.Empty(); - return true; - } - - var parsed = new List(values.Count); - foreach (var entry in values) - { - if (string.IsNullOrWhiteSpace(entry) || !Uri.TryCreate(entry, UriKind.Absolute, out var uri)) - { - uris = Array.Empty(); - error = $"Invalid URI value '{entry}'."; - return false; - } - - parsed.Add(uri); - } - - uris = parsed; - return true; -} +using System; +using System.Diagnostics; +using System.Globalization; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.AspNetCore.RateLimiting; +using Microsoft.AspNetCore.Server.Kestrel.Https; +using Microsoft.Extensions.Logging.Abstractions; +using OpenIddict.Abstractions; +using OpenIddict.Server; +using OpenIddict.Server.AspNetCore; +using MongoDB.Driver; +using Serilog; +using Serilog.Events; +using StellaOps.Authority; +using StellaOps.Authority.Audit; +using StellaOps.Authority.Plugins.Abstractions; +using StellaOps.Authority.Plugins; +using StellaOps.Authority.Bootstrap; +using StellaOps.Authority.Storage.Mongo.Extensions; +using StellaOps.Authority.Storage.Mongo.Initialization; +using StellaOps.Authority.Storage.Mongo.Stores; +using StellaOps.Authority.RateLimiting; +using StellaOps.Configuration; +using StellaOps.Plugin.DependencyInjection; +using StellaOps.Plugin.Hosting; +using StellaOps.Authority.OpenIddict.Handlers; +using System.Linq; +using StellaOps.Cryptography.Audit; +using StellaOps.Cryptography.DependencyInjection; +using StellaOps.Authority.Permalinks; +using StellaOps.Authority.Revocation; +using StellaOps.Authority.Signing; +using StellaOps.Cryptography; +using StellaOps.Authority.Storage.Mongo.Documents; +using StellaOps.Authority.Security; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +#if STELLAOPS_AUTH_SECURITY +using StellaOps.Auth.Security.Dpop; +using StackExchange.Redis; +#endif + +var builder = WebApplication.CreateBuilder(args); + +Activity.DefaultIdFormat = ActivityIdFormat.W3C; +Activity.ForceDefaultIdFormat = true; + +AuthorityTelemetryConfiguration.Configure(builder); + +var authorityConfiguration = StellaOpsAuthorityConfiguration.Build(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "STELLAOPS_AUTHORITY_"; + options.ConfigureBuilder = configurationBuilder => + { + var contentRoot = builder.Environment.ContentRootPath; + foreach (var relative in new[] + { + "../etc/authority.yaml", + "../etc/authority.local.yaml", + "authority.yaml", + "authority.local.yaml" + }) + { + var path = Path.Combine(contentRoot, relative); + configurationBuilder.AddYamlFile(path, optional: true); + } + }; +}); + +builder.WebHost.ConfigureKestrel(options => +{ + options.ConfigureHttpsDefaults(https => + { + https.ClientCertificateMode = ClientCertificateMode.AllowCertificate; + https.CheckCertificateRevocation = true; + }); +}); + +builder.Configuration.AddConfiguration(authorityConfiguration.Configuration); + +builder.Host.UseSerilog((context, _, loggerConfiguration) => +{ + loggerConfiguration + .ReadFrom.Configuration(context.Configuration) + .Enrich.FromLogContext() + .MinimumLevel.Override("Microsoft.AspNetCore.Hosting.Diagnostics", LogEventLevel.Warning) + .WriteTo.Console(); +}); + +var authorityOptions = authorityConfiguration.Options; +var issuerUri = authorityOptions.Issuer; +if (issuerUri is null) +{ + var issuerValue = builder.Configuration["Authority:Issuer"]; + if (string.IsNullOrWhiteSpace(issuerValue)) + { + throw new InvalidOperationException("Authority issuer configuration is required."); + } + + issuerUri = new Uri(issuerValue, UriKind.Absolute); +} + +authorityOptions.Issuer = issuerUri; +builder.Services.AddSingleton(authorityOptions); +builder.Services.AddSingleton>(Options.Create(authorityOptions)); +builder.Services.AddHttpContextAccessor(); +builder.Services.TryAddSingleton(_ => TimeProvider.System); +builder.Services.TryAddSingleton(); +builder.Services.TryAddSingleton(); +builder.Services.AddSingleton(); + +#if STELLAOPS_AUTH_SECURITY +var senderConstraints = authorityOptions.Security.SenderConstraints; + +builder.Services.AddOptions() + .Configure(options => + { + options.ProofLifetime = senderConstraints.Dpop.ProofLifetime; + options.AllowedClockSkew = senderConstraints.Dpop.AllowedClockSkew; + options.ReplayWindow = senderConstraints.Dpop.ReplayWindow; + + options.AllowedAlgorithms.Clear(); + foreach (var algorithm in senderConstraints.Dpop.NormalizedAlgorithms) + { + options.AllowedAlgorithms.Add(algorithm); + } + }) + .PostConfigure(static options => options.Validate()); + +builder.Services.TryAddSingleton(provider => new InMemoryDpopReplayCache(provider.GetService())); +builder.Services.TryAddSingleton(); +if (string.Equals(senderConstraints.Dpop.Nonce.Store, "redis", StringComparison.OrdinalIgnoreCase)) +{ + builder.Services.TryAddSingleton(_ => + ConnectionMultiplexer.Connect(senderConstraints.Dpop.Nonce.RedisConnectionString!)); + + builder.Services.TryAddSingleton(provider => + { + var multiplexer = provider.GetRequiredService(); + var timeProvider = provider.GetService(); + return new RedisDpopNonceStore(multiplexer, timeProvider); + }); +} +else +{ + builder.Services.TryAddSingleton(provider => + { + var timeProvider = provider.GetService(); + var nonceLogger = provider.GetService>(); + return new InMemoryDpopNonceStore(timeProvider, nonceLogger); + }); +} + +builder.Services.AddScoped(); +#endif + +builder.Services.AddRateLimiter(rateLimiterOptions => +{ + AuthorityRateLimiter.Configure(rateLimiterOptions, authorityOptions); +}); + +builder.Services.AddStellaOpsCrypto(); +builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton()); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); + +AuthorityPluginContext[] pluginContexts = AuthorityPluginConfigurationLoader + .Load(authorityOptions, builder.Environment.ContentRootPath) + .ToArray(); + +builder.Services.AddSingleton>(pluginContexts); +builder.Services.AddSingleton(_ => new AuthorityPluginRegistry(pluginContexts)); + +var pluginHostOptions = BuildPluginHostOptions(authorityOptions, builder.Environment.ContentRootPath); +builder.Services.AddSingleton(pluginHostOptions); +builder.Services.RegisterPluginRoutines(authorityConfiguration.Configuration, pluginHostOptions); + +builder.Services.AddAuthorityMongoStorage(storageOptions => +{ + storageOptions.ConnectionString = authorityOptions.Storage.ConnectionString; + storageOptions.DatabaseName = authorityOptions.Storage.DatabaseName; + storageOptions.CommandTimeout = authorityOptions.Storage.CommandTimeout; +}); + +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddScoped(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddSingleton(); +builder.Services.AddHostedService(); + +var pluginRegistrationSummary = AuthorityPluginLoader.RegisterPlugins( + builder.Services, + authorityConfiguration.Configuration, + pluginHostOptions, + pluginContexts, + NullLogger.Instance); + +builder.Services.AddSingleton(pluginRegistrationSummary); + +builder.Services.AddRouting(options => options.LowercaseUrls = true); +builder.Services.AddProblemDetails(); +builder.Services.AddAuthentication(); +builder.Services.AddAuthorization(); + +builder.Services.AddOpenIddict() + .AddServer(options => + { + options.SetIssuer(issuerUri); + options.SetTokenEndpointUris("/token"); + options.SetAuthorizationEndpointUris("/authorize"); + options.SetIntrospectionEndpointUris("/introspect"); + options.SetRevocationEndpointUris("/revoke"); + options.SetJsonWebKeySetEndpointUris("/jwks"); + + options.AllowPasswordFlow(); + options.AllowClientCredentialsFlow(); + options.AllowRefreshTokenFlow(); + + options.SetAccessTokenLifetime(authorityOptions.AccessTokenLifetime); + options.SetRefreshTokenLifetime(authorityOptions.RefreshTokenLifetime); + options.SetIdentityTokenLifetime(authorityOptions.IdentityTokenLifetime); + options.SetAuthorizationCodeLifetime(authorityOptions.AuthorizationCodeLifetime); + options.SetDeviceCodeLifetime(authorityOptions.DeviceCodeLifetime); + + options.DisableAccessTokenEncryption(); + options.DisableTokenStorage(); + options.DisableAuthorizationStorage(); + + options.RegisterScopes( + new[] + { + OpenIddictConstants.Scopes.OpenId, + OpenIddictConstants.Scopes.Email, + OpenIddictConstants.Scopes.Profile, + OpenIddictConstants.Scopes.OfflineAccess + } + .Concat(StellaOpsScopes.All) + .Distinct(StringComparer.Ordinal) + .ToArray()); + + options.AddEphemeralEncryptionKey() + .AddEphemeralSigningKey(); + + var aspNetCoreBuilder = options.UseAspNetCore() + .EnableAuthorizationEndpointPassthrough() + .EnableTokenEndpointPassthrough(); + + if (builder.Environment.IsDevelopment()) + { + aspNetCoreBuilder.DisableTransportSecurityRequirement(); + } + +#if STELLAOPS_AUTH_SECURITY + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); +#endif + + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); + + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); + + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); + + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); + + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); + + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); + + options.AddEventHandler(descriptor => + { + descriptor.UseScopedHandler(); + }); + }); + +builder.Services.Configure(options => +{ + options.DisableSlidingRefreshTokenExpiration = false; + options.DisableRollingRefreshTokens = false; +}); + +var app = builder.Build(); + +var mongoInitializer = app.Services.GetRequiredService(); +var mongoDatabase = app.Services.GetRequiredService(); +await mongoInitializer.InitialiseAsync(mongoDatabase, CancellationToken.None); + +var registrationSummary = app.Services.GetRequiredService(); +if (registrationSummary.RegisteredPlugins.Count > 0) +{ + app.Logger.LogInformation( + "Authority plugins registered: {Plugins}", + string.Join(", ", registrationSummary.RegisteredPlugins)); +} + +foreach (var failure in registrationSummary.Failures) +{ + app.Logger.LogError( + "Authority plugin '{PluginName}' failed to register: {Reason}", + failure.PluginName, + failure.Reason); +} + +foreach (var missing in registrationSummary.MissingOrderedPlugins) +{ + app.Logger.LogWarning( + "Configured Authority plugin '{PluginName}' was not discovered during startup.", + missing); +} + +var identityProviderRegistry = app.Services.GetRequiredService(); +if (identityProviderRegistry.Providers.Count == 0) +{ + app.Logger.LogWarning("No identity provider plugins were registered."); +} +else +{ + foreach (var provider in identityProviderRegistry.Providers) + { + var caps = provider.Capabilities; + app.Logger.LogInformation( + "Identity provider plugin '{PluginName}' (type {PluginType}) capabilities: password={Password}, mfa={Mfa}, clientProvisioning={ClientProvisioning}.", + provider.Name, + provider.Type, + caps.SupportsPassword, + caps.SupportsMfa, + caps.SupportsClientProvisioning); + } +} + +if (authorityOptions.Bootstrap.Enabled) +{ + var bootstrapGroup = app.MapGroup("/internal"); + bootstrapGroup.AddEndpointFilter(new BootstrapApiKeyFilter(authorityOptions)); + + bootstrapGroup.MapPost("/users", async ( + HttpContext httpContext, + BootstrapUserRequest request, + IAuthorityIdentityProviderRegistry registry, + IAuthorityBootstrapInviteStore inviteStore, + IAuthEventSink auditSink, + TimeProvider timeProvider, + CancellationToken cancellationToken) => + { + if (request is null) + { + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty(), null).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); + } + + var now = timeProvider.GetUtcNow(); + var inviteToken = string.IsNullOrWhiteSpace(request.InviteToken) ? null : request.InviteToken.Trim(); + AuthorityBootstrapInviteDocument? invite = null; + var inviteReserved = false; + + async Task ReleaseInviteAsync(string reason) + { + if (inviteToken is null) + { + return; + } + + if (inviteReserved) + { + await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false); + } + + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, reason, invite, inviteToken).ConfigureAwait(false); + } + + if (inviteToken is not null) + { + var reservation = await inviteStore.TryReserveAsync(inviteToken, BootstrapInviteTypes.User, now, request.Username, cancellationToken).ConfigureAwait(false); + + switch (reservation.Status) + { + case BootstrapInviteReservationStatus.Reserved: + inviteReserved = true; + invite = reservation.Invite; + break; + case BootstrapInviteReservationStatus.Expired: + await WriteInviteAuditAsync("authority.bootstrap.invite.expired", AuthEventOutcome.Failure, "Invite expired before use.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invite_expired", message = "Invite has expired." }); + case BootstrapInviteReservationStatus.AlreadyUsed: + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token already consumed.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invite_used", message = "Invite token has already been used." }); + default: + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token not found.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_invite", message = "Invite token is invalid." }); + } + } + + var providerName = string.IsNullOrWhiteSpace(request.Provider) + ? invite?.Provider ?? authorityOptions.Bootstrap.DefaultIdentityProvider + : request.Provider; + + if (invite is not null && !string.IsNullOrWhiteSpace(invite.Provider) && + !string.Equals(invite.Provider, providerName, StringComparison.OrdinalIgnoreCase)) + { + await ReleaseInviteAsync("Invite provider does not match requested provider."); + return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." }); + } + + if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var providerMetadata)) + { + await ReleaseInviteAsync("Specified identity provider was not found."); + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", null, request.Username, providerName, request.Roles ?? Array.Empty(), inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." }); + } + + if (!providerMetadata.Capabilities.SupportsPassword) + { + await ReleaseInviteAsync("Selected provider does not support password provisioning."); + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support password provisioning.", null, request.Username, providerMetadata.Name, request.Roles ?? Array.Empty(), inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support password provisioning." }); + } + + if (string.IsNullOrWhiteSpace(request.Username) || string.IsNullOrEmpty(request.Password)) + { + await ReleaseInviteAsync("Username and password are required."); + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, "Username and password are required.", null, request.Username, providerMetadata.Name, request.Roles ?? Array.Empty(), inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = "Username and password are required." }); + } + + if (invite is not null && !string.IsNullOrWhiteSpace(invite.Target) && + !string.Equals(invite.Target, request.Username, StringComparison.OrdinalIgnoreCase)) + { + await ReleaseInviteAsync("Invite target does not match requested username."); + return Results.BadRequest(new { error = "invite_target_mismatch", message = "Invite target does not match username." }); + } + + var roles = request.Roles is null ? Array.Empty() : request.Roles.ToArray(); + var attributes = request.Attributes is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(request.Attributes, StringComparer.OrdinalIgnoreCase); + + var registration = new AuthorityUserRegistration( + request.Username, + request.Password, + request.DisplayName, + request.Email, + request.RequirePasswordReset, + roles, + attributes); + + await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, cancellationToken).ConfigureAwait(false); + var provider = providerHandle.Provider; + + try + { + var result = await provider.Credentials.UpsertUserAsync(registration, cancellationToken).ConfigureAwait(false); + + if (!result.Succeeded || result.Value is null) + { + await ReleaseInviteAsync(result.Message ?? "User provisioning failed."); + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Failure, result.Message ?? "User provisioning failed.", null, request.Username, providerMetadata.Name, roles, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "User provisioning failed." }); + } + + if (inviteReserved && inviteToken is not null) + { + var consumed = await inviteStore.MarkConsumedAsync(inviteToken, result.Value.SubjectId ?? result.Value.Username, now, cancellationToken).ConfigureAwait(false); + if (consumed) + { + await WriteInviteAuditAsync("authority.bootstrap.invite.consumed", AuthEventOutcome.Success, null, invite, inviteToken).ConfigureAwait(false); + } + } + + await WriteBootstrapUserAuditAsync(AuthEventOutcome.Success, null, result.Value.SubjectId, result.Value.Username, providerMetadata.Name, roles, inviteToken).ConfigureAwait(false); + + return Results.Ok(new + { + provider = providerMetadata.Name, + subjectId = result.Value.SubjectId, + username = result.Value.Username + }); + } + catch + { + if (inviteReserved && inviteToken is not null) + { + await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false); + await WriteInviteAuditAsync("authority.bootstrap.invite.released", AuthEventOutcome.Error, "Invite released due to provisioning failure.", invite, inviteToken).ConfigureAwait(false); + } + + throw; + } + + async Task WriteBootstrapUserAuditAsync(AuthEventOutcome outcome, string? reason, string? subjectId, string? usernameValue, string? providerValue, IReadOnlyCollection rolesValue, string? inviteValue) + { + var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); + AuthEventNetwork? network = null; + var remoteAddress = httpContext.Connection.RemoteIpAddress?.ToString(); + var userAgent = httpContext.Request.Headers.UserAgent.ToString(); + + if (!string.IsNullOrWhiteSpace(remoteAddress) || !string.IsNullOrWhiteSpace(userAgent)) + { + network = new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(remoteAddress), + UserAgent = ClassifiedString.Personal(string.IsNullOrWhiteSpace(userAgent) ? null : userAgent) + }; + } + + var subject = subjectId is null && string.IsNullOrWhiteSpace(usernameValue) && string.IsNullOrWhiteSpace(providerValue) + ? null + : new AuthEventSubject + { + SubjectId = ClassifiedString.Personal(subjectId), + Username = ClassifiedString.Personal(usernameValue), + Realm = ClassifiedString.Public(providerValue) + }; + + var properties = new List(); + if (!string.IsNullOrWhiteSpace(providerValue)) + { + properties.Add(new AuthEventProperty + { + Name = "bootstrap.provider", + Value = ClassifiedString.Public(providerValue) + }); + } + + if (!string.IsNullOrWhiteSpace(inviteValue)) + { + properties.Add(new AuthEventProperty + { + Name = "bootstrap.invite_token", + Value = ClassifiedString.Public(inviteValue) + }); + } + + var scopes = rolesValue is { Count: > 0 } + ? rolesValue.ToArray() + : Array.Empty(); + + var record = new AuthEventRecord + { + EventType = "authority.bootstrap.user", + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = correlationId, + Outcome = outcome, + Reason = reason, + Subject = subject, + Client = null, + Scopes = scopes, + Network = network, + Properties = properties.Count == 0 ? Array.Empty() : properties + }; + + await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); + } + + async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument? document, string? tokenValue) + { + var record = new AuthEventRecord + { + EventType = eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), + Outcome = outcome, + Reason = reason, + Subject = null, + Client = null, + Scopes = Array.Empty(), + Network = null, + Properties = BuildInviteProperties(document, tokenValue) + }; + + await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); + } + + static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument? document, string? token) + { + var properties = new List(); + if (!string.IsNullOrWhiteSpace(token)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.token", + Value = ClassifiedString.Public(token) + }); + } + + if (document is not null) + { + if (!string.IsNullOrWhiteSpace(document.Type)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.type", + Value = ClassifiedString.Public(document.Type) + }); + } + + if (!string.IsNullOrWhiteSpace(document.Provider)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.provider", + Value = ClassifiedString.Public(document.Provider) + }); + } + + if (!string.IsNullOrWhiteSpace(document.Target)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.target", + Value = ClassifiedString.Public(document.Target) + }); + } + + properties.Add(new AuthEventProperty + { + Name = "invite.expires_at", + Value = ClassifiedString.Public(document.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) + }); + } + + return properties.Count == 0 ? Array.Empty() : properties.ToArray(); + } + }); + + bootstrapGroup.MapPost("/clients", async ( + HttpContext httpContext, + BootstrapClientRequest request, + IAuthorityIdentityProviderRegistry registry, + IAuthorityBootstrapInviteStore inviteStore, + IAuthEventSink auditSink, + TimeProvider timeProvider, + CancellationToken cancellationToken) => + { + if (request is null) + { + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Request payload is required.", null, null, null, Array.Empty(), null, null).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); + } + + var now = timeProvider.GetUtcNow(); + var inviteToken = string.IsNullOrWhiteSpace(request.InviteToken) ? null : request.InviteToken.Trim(); + AuthorityBootstrapInviteDocument? invite = null; + var inviteReserved = false; + + async Task ReleaseInviteAsync(string reason) + { + if (inviteToken is null) + { + return; + } + + if (inviteReserved) + { + await inviteStore.ReleaseAsync(inviteToken, cancellationToken).ConfigureAwait(false); + } + + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, reason, invite, inviteToken).ConfigureAwait(false); + } + + if (inviteToken is not null) + { + var reservation = await inviteStore.TryReserveAsync(inviteToken, BootstrapInviteTypes.Client, now, request.ClientId, cancellationToken).ConfigureAwait(false); + switch (reservation.Status) + { + case BootstrapInviteReservationStatus.Reserved: + inviteReserved = true; + invite = reservation.Invite; + break; + case BootstrapInviteReservationStatus.Expired: + await WriteInviteAuditAsync("authority.bootstrap.invite.expired", AuthEventOutcome.Failure, "Invite expired before use.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invite_expired", message = "Invite has expired." }); + case BootstrapInviteReservationStatus.AlreadyUsed: + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token already consumed.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invite_used", message = "Invite token has already been used." }); + default: + await WriteInviteAuditAsync("authority.bootstrap.invite.rejected", AuthEventOutcome.Failure, "Invite token is invalid.", reservation.Invite, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_invite", message = "Invite token is invalid." }); + } + } + + var providerName = string.IsNullOrWhiteSpace(request.Provider) + ? invite?.Provider ?? authorityOptions.Bootstrap.DefaultIdentityProvider + : request.Provider; + + if (invite is not null && !string.IsNullOrWhiteSpace(invite.Provider) && + !string.Equals(invite.Provider, providerName, StringComparison.OrdinalIgnoreCase)) + { + await ReleaseInviteAsync("Invite provider does not match requested provider."); + return Results.BadRequest(new { error = "invite_provider_mismatch", message = "Invite is limited to a different identity provider." }); + } + + if (string.IsNullOrWhiteSpace(providerName) || !registry.TryGet(providerName!, out var providerMetadata)) + { + await ReleaseInviteAsync("Specified identity provider was not found."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Specified identity provider was not found.", request.ClientId, null, providerName, request.AllowedScopes ?? Array.Empty(), request?.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_provider", message = "Specified identity provider was not found." }); + } + + if (!providerMetadata.Capabilities.SupportsClientProvisioning) + { + await ReleaseInviteAsync("Selected provider does not support client provisioning."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." }); + } + + await using var providerHandle = await registry.AcquireAsync(providerMetadata.Name, cancellationToken).ConfigureAwait(false); + var provider = providerHandle.Provider; + + if (provider.ClientProvisioning is null) + { + await ReleaseInviteAsync("Selected provider does not support client provisioning."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Selected provider does not support client provisioning.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "unsupported_provider", message = "Selected provider does not support client provisioning." }); + } + + if (string.IsNullOrWhiteSpace(request.ClientId)) + { + await ReleaseInviteAsync("ClientId is required."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "ClientId is required.", null, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = "ClientId is required." }); + } + + if (invite is not null && !string.IsNullOrWhiteSpace(invite.Target) && + !string.Equals(invite.Target, request.ClientId, StringComparison.OrdinalIgnoreCase)) + { + await ReleaseInviteAsync("Invite target does not match requested client id."); + return Results.BadRequest(new { error = "invite_target_mismatch", message = "Invite target does not match client id." }); + } + + if (request.Confidential && string.IsNullOrWhiteSpace(request.ClientSecret)) + { + await ReleaseInviteAsync("Confidential clients require a client secret."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Confidential clients require a client secret.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = "Confidential clients require a client secret." }); + } + + if (!TryParseUris(request.RedirectUris, out var redirectUris, out var redirectError)) + { + var errorMessage = redirectError ?? "Redirect URI validation failed."; + await ReleaseInviteAsync(errorMessage); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = errorMessage }); + } + + if (!TryParseUris(request.PostLogoutRedirectUris, out var postLogoutUris, out var postLogoutError)) + { + var errorMessage = postLogoutError ?? "Post-logout redirect URI validation failed."; + await ReleaseInviteAsync(errorMessage); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, errorMessage, request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = errorMessage }); + } + + var properties = request.Properties is null + ? new Dictionary(StringComparer.OrdinalIgnoreCase) + : new Dictionary(request.Properties, StringComparer.OrdinalIgnoreCase); + + IReadOnlyCollection? certificateBindings = null; + if (request.CertificateBindings is not null) + { + var bindingRegistrations = new List(request.CertificateBindings.Count); + foreach (var binding in request.CertificateBindings) + { + if (binding is null || string.IsNullOrWhiteSpace(binding.Thumbprint)) + { + await ReleaseInviteAsync("Certificate binding thumbprint is required."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, "Certificate binding thumbprint is required.", request.ClientId, null, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = "invalid_request", message = "Certificate binding thumbprint is required." }); + } + + bindingRegistrations.Add(new AuthorityClientCertificateBindingRegistration( + binding.Thumbprint, + binding.SerialNumber, + binding.Subject, + binding.Issuer, + binding.SubjectAlternativeNames, + binding.NotBefore, + binding.NotAfter, + binding.Label)); + } + + certificateBindings = bindingRegistrations; + } + + var requestedTenant = properties.TryGetValue(AuthorityClientMetadataKeys.Tenant, out var tenantMetadata) + ? ClientCredentialHandlerHelpers.NormalizeTenant(tenantMetadata) + : null; + if (!string.IsNullOrWhiteSpace(requestedTenant)) + { + properties[AuthorityClientMetadataKeys.Tenant] = requestedTenant; + } + + var requestedProject = properties.TryGetValue(AuthorityClientMetadataKeys.Project, out var projectMetadata) + ? ClientCredentialHandlerHelpers.NormalizeProject(projectMetadata) + : null; + requestedProject ??= StellaOpsTenancyDefaults.AnyProject; + properties[AuthorityClientMetadataKeys.Project] = requestedProject; + + var registration = new AuthorityClientRegistration( + request.ClientId, + request.Confidential, + request.DisplayName, + request.ClientSecret, + request.AllowedGrantTypes ?? Array.Empty(), + request.AllowedScopes ?? Array.Empty(), + request.AllowedAudiences ?? Array.Empty(), + redirectUris, + postLogoutUris, + requestedTenant, + requestedProject, + properties, + certificateBindings); + + var result = await provider.ClientProvisioning.CreateOrUpdateAsync(registration, cancellationToken).ConfigureAwait(false); + + if (!result.Succeeded || result.Value is null) + { + await ReleaseInviteAsync(result.Message ?? "Client provisioning failed."); + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Failure, result.Message ?? "Client provisioning failed.", request.ClientId, result.Value?.ClientId, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + return Results.BadRequest(new { error = result.ErrorCode ?? "bootstrap_failed", message = result.Message ?? "Client provisioning failed." }); + } + + if (inviteReserved && inviteToken is not null) + { + var consumed = await inviteStore.MarkConsumedAsync(inviteToken, result.Value.ClientId, now, cancellationToken).ConfigureAwait(false); + if (consumed) + { + await WriteInviteAuditAsync("authority.bootstrap.invite.consumed", AuthEventOutcome.Success, null, invite, inviteToken).ConfigureAwait(false); + } + } + + await WriteBootstrapClientAuditAsync(AuthEventOutcome.Success, null, request.ClientId, result.Value.ClientId, providerMetadata.Name, request.AllowedScopes ?? Array.Empty(), request.Confidential, inviteToken).ConfigureAwait(false); + + return Results.Ok(new + { + provider = providerMetadata.Name, + clientId = result.Value.ClientId, + confidential = result.Value.Confidential + }); + + async Task WriteBootstrapClientAuditAsync(AuthEventOutcome outcome, string? reason, string? requestedClientId, string? assignedClientId, string? providerValue, IReadOnlyCollection scopes, bool? confidentialFlag, string? inviteValue) + { + var correlationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture); + AuthEventNetwork? network = null; + var remoteAddress = httpContext.Connection.RemoteIpAddress?.ToString(); + var userAgent = httpContext.Request.Headers.UserAgent.ToString(); + + if (!string.IsNullOrWhiteSpace(remoteAddress) || !string.IsNullOrWhiteSpace(userAgent)) + { + network = new AuthEventNetwork + { + RemoteAddress = ClassifiedString.Personal(remoteAddress), + UserAgent = ClassifiedString.Personal(string.IsNullOrWhiteSpace(userAgent) ? null : userAgent) + }; + } + + var clientIdValue = assignedClientId ?? requestedClientId; + var client = clientIdValue is null && string.IsNullOrWhiteSpace(providerValue) + ? null + : new AuthEventClient + { + ClientId = ClassifiedString.Personal(clientIdValue), + Name = ClassifiedString.Empty, + Provider = ClassifiedString.Public(providerValue) + }; + + var properties = new List(); + if (!string.IsNullOrWhiteSpace(requestedClientId) && !string.Equals(requestedClientId, assignedClientId, StringComparison.Ordinal)) + { + properties.Add(new AuthEventProperty + { + Name = "bootstrap.requested_client_id", + Value = ClassifiedString.Public(requestedClientId) + }); + } + + if (confidentialFlag == true) + { + properties.Add(new AuthEventProperty + { + Name = "bootstrap.confidential", + Value = ClassifiedString.Public("true") + }); + } + + if (!string.IsNullOrWhiteSpace(inviteValue)) + { + properties.Add(new AuthEventProperty + { + Name = "bootstrap.invite_token", + Value = ClassifiedString.Public(inviteValue) + }); + } + + var record = new AuthEventRecord + { + EventType = "authority.bootstrap.client", + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = correlationId, + Outcome = outcome, + Reason = reason, + Subject = null, + Client = client, + Scopes = scopes is { Count: > 0 } ? scopes.ToArray() : Array.Empty(), + Network = network, + Properties = properties.Count == 0 ? Array.Empty() : properties.ToArray() + }; + + await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); + } + + async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument? document, string? tokenValue) + { + var record = new AuthEventRecord + { + EventType = eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), + Outcome = outcome, + Reason = reason, + Subject = null, + Client = null, + Scopes = Array.Empty(), + Network = null, + Properties = BuildInviteProperties(document, tokenValue) + }; + + await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); + } + + static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument? document, string? token) + { + var properties = new List(); + if (!string.IsNullOrWhiteSpace(token)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.token", + Value = ClassifiedString.Public(token) + }); + } + + if (document is not null) + { + if (!string.IsNullOrWhiteSpace(document.Type)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.type", + Value = ClassifiedString.Public(document.Type) + }); + } + + if (!string.IsNullOrWhiteSpace(document.Provider)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.provider", + Value = ClassifiedString.Public(document.Provider) + }); + } + + if (!string.IsNullOrWhiteSpace(document.Target)) + { + properties.Add(new AuthEventProperty + { + Name = "invite.target", + Value = ClassifiedString.Public(document.Target) + }); + } + + properties.Add(new AuthEventProperty + { + Name = "invite.expires_at", + Value = ClassifiedString.Public(document.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) + }); + } + + return properties.Count == 0 ? Array.Empty() : properties.ToArray(); + } + }); + bootstrapGroup.MapPost("/invites", async ( + HttpContext httpContext, + BootstrapInviteRequest request, + IAuthorityBootstrapInviteStore inviteStore, + IAuthEventSink auditSink, + TimeProvider timeProvider, + CancellationToken cancellationToken) => + { + if (request is null) + { + return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); + } + + if (string.IsNullOrWhiteSpace(request.Type) || + ( !string.Equals(request.Type, BootstrapInviteTypes.User, StringComparison.OrdinalIgnoreCase) && + !string.Equals(request.Type, BootstrapInviteTypes.Client, StringComparison.OrdinalIgnoreCase))) + { + return Results.BadRequest(new { error = "invalid_request", message = "Invite type must be 'user' or 'client'." }); + } + + var now = timeProvider.GetUtcNow(); + var expiresAt = request.ExpiresAt ?? now.AddDays(2); + if (expiresAt <= now) + { + return Results.BadRequest(new { error = "invalid_request", message = "ExpiresAt must be in the future." }); + } + + var token = string.IsNullOrWhiteSpace(request.Token) ? Guid.NewGuid().ToString("N") : request.Token.Trim(); + + var document = new AuthorityBootstrapInviteDocument + { + Token = token, + Type = request.Type.ToLowerInvariant(), + Provider = string.IsNullOrWhiteSpace(request.Provider) ? null : request.Provider.Trim(), + Target = string.IsNullOrWhiteSpace(request.Target) ? null : request.Target.Trim(), + IssuedAt = now, + IssuedBy = string.IsNullOrWhiteSpace(request.IssuedBy) ? httpContext.User?.Identity?.Name : request.IssuedBy, + ExpiresAt = expiresAt, + Metadata = request.Metadata is null ? null : new Dictionary(request.Metadata, StringComparer.OrdinalIgnoreCase) + }; + + await inviteStore.CreateAsync(document, cancellationToken).ConfigureAwait(false); + await WriteInviteAuditAsync("authority.bootstrap.invite.created", AuthEventOutcome.Success, null, document).ConfigureAwait(false); + + return Results.Ok(new + { + document.Token, + document.Type, + document.Provider, + document.Target, + document.ExpiresAt + }); + + async Task WriteInviteAuditAsync(string eventType, AuthEventOutcome outcome, string? reason, AuthorityBootstrapInviteDocument invite) + { + var record = new AuthEventRecord + { + EventType = eventType, + OccurredAt = timeProvider.GetUtcNow(), + CorrelationId = Activity.Current?.TraceId.ToString() ?? httpContext.TraceIdentifier ?? Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture), + Outcome = outcome, + Reason = reason, + Subject = null, + Client = null, + Scopes = Array.Empty(), + Network = null, + Properties = BuildInviteProperties(invite) + }; + + await auditSink.WriteAsync(record, httpContext.RequestAborted).ConfigureAwait(false); + } + + static AuthEventProperty[] BuildInviteProperties(AuthorityBootstrapInviteDocument invite) + { + var properties = new List + { + new() { Name = "invite.token", Value = ClassifiedString.Public(invite.Token) }, + new() { Name = "invite.type", Value = ClassifiedString.Public(invite.Type) }, + new() { Name = "invite.expires_at", Value = ClassifiedString.Public(invite.ExpiresAt.ToString("O", CultureInfo.InvariantCulture)) } + }; + + if (!string.IsNullOrWhiteSpace(invite.Provider)) + { + properties.Add(new AuthEventProperty { Name = "invite.provider", Value = ClassifiedString.Public(invite.Provider) }); + } + + if (!string.IsNullOrWhiteSpace(invite.Target)) + { + properties.Add(new AuthEventProperty { Name = "invite.target", Value = ClassifiedString.Public(invite.Target) }); + } + + if (!string.IsNullOrWhiteSpace(invite.IssuedBy)) + { + properties.Add(new AuthEventProperty { Name = "invite.issued_by", Value = ClassifiedString.Public(invite.IssuedBy) }); + } + + return properties.ToArray(); + } + }); + + bootstrapGroup.MapGet("/revocations/export", async ( + AuthorityRevocationExportService exportService, + CancellationToken cancellationToken) => + { + var package = await exportService.ExportAsync(cancellationToken).ConfigureAwait(false); + var build = package.Bundle; + + var response = new RevocationExportResponse + { + SchemaVersion = build.Bundle.SchemaVersion, + BundleId = build.Bundle.BundleId ?? build.Sha256, + Sequence = build.Sequence, + IssuedAt = build.IssuedAt, + SigningKeyId = package.Signature.KeyId, + Bundle = new RevocationExportPayload + { + Data = Convert.ToBase64String(build.CanonicalJson) + }, + Signature = new RevocationExportSignature + { + Algorithm = package.Signature.Algorithm, + KeyId = package.Signature.KeyId, + Provider = package.Signature.Provider, + Value = package.Signature.Value + }, + Digest = new RevocationExportDigest + { + Value = build.Sha256 + } + }; + + return Results.Ok(response); + }); + + bootstrapGroup.MapPost("/signing/rotate", ( + SigningRotationRequest? request, + AuthoritySigningKeyManager signingManager, + ILogger signingLogger) => + { + if (request is null) + { + signingLogger.LogWarning("Signing rotation request payload missing."); + return Results.BadRequest(new { error = "invalid_request", message = "Request payload is required." }); + } + + try + { + var result = signingManager.Rotate(request); + signingLogger.LogInformation("Signing key rotation completed. Active key {KeyId}.", result.ActiveKeyId); + + return Results.Ok(new + { + activeKeyId = result.ActiveKeyId, + provider = result.ActiveProvider, + source = result.ActiveSource, + location = result.ActiveLocation, + previousKeyId = result.PreviousKeyId, + retiredKeyIds = result.RetiredKeyIds + }); + } + catch (InvalidOperationException ex) + { + signingLogger.LogWarning(ex, "Signing rotation failed due to invalid input."); + return Results.BadRequest(new { error = "rotation_failed", message = ex.Message }); + } + catch (Exception ex) + { + signingLogger.LogError(ex, "Unexpected failure rotating signing key."); + return Results.Problem("Failed to rotate signing key."); + } + }); +} + +app.UseSerilogRequestLogging(options => +{ + options.EnrichDiagnosticContext = (diagnosticContext, httpContext) => + { + diagnosticContext.Set("TraceId", Activity.Current?.TraceId.ToString()); + diagnosticContext.Set("UserAgent", httpContext.Request.Headers.UserAgent.ToString()); + }; +}); + +app.UseExceptionHandler(static errorApp => +{ + errorApp.Run(async context => + { + context.Response.ContentType = "application/problem+json"; + var problem = Results.Problem( + statusCode: StatusCodes.Status500InternalServerError, + title: "Unhandled server error", + detail: "Unexpected failure while processing the request."); + + await problem.ExecuteAsync(context); + }); +}); + +app.UseRouting(); +app.UseAuthorityRateLimiterContext(); +app.UseRateLimiter(); +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapGet("/health", async (IAuthorityIdentityProviderRegistry registry, CancellationToken cancellationToken) => + { + var pluginHealth = new List(); + foreach (var providerMetadata in registry.Providers) + { + await using var handle = await registry.AcquireAsync(providerMetadata.Name, cancellationToken).ConfigureAwait(false); + var health = await handle.Provider.CheckHealthAsync(cancellationToken).ConfigureAwait(false); + pluginHealth.Add(new + { + provider = providerMetadata.Name, + status = health.Status.ToString().ToLowerInvariant(), + message = health.Message + }); + } + + return Results.Ok(new + { + status = "healthy", + identityProviders = pluginHealth + }); + }) + .WithName("HealthCheck"); + +app.MapGet("/ready", (IAuthorityIdentityProviderRegistry registry) => + Results.Ok(new + { + status = registry.Providers.Count > 0 ? "ready" : "degraded", + identityProviders = registry.Providers.Select(p => p.Name).ToArray() + })) + .WithName("ReadinessCheck"); + +app.MapPost("/permalinks/vuln", async ( + VulnPermalinkRequest request, + VulnPermalinkService service, + CancellationToken cancellationToken) => +{ + try + { + var response = await service.CreateAsync(request, cancellationToken).ConfigureAwait(false); + return Results.Ok(response); + } + catch (ArgumentException ex) + { + return Results.BadRequest(new { error = "invalid_request", message = ex.Message }); + } + catch (InvalidOperationException ex) + { + return Results.Problem(ex.Message); + } +}) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.VulnRead)) + .WithName("CreateVulnPermalink"); + +app.MapGet("/jwks", (AuthorityJwksService jwksService) => Results.Ok(jwksService.Build())) + .WithName("JsonWebKeySet"); + +// Ensure signing key manager initialises key material on startup. +app.Services.GetRequiredService(); + +app.Run(); + +static PluginHostOptions BuildPluginHostOptions(StellaOpsAuthorityOptions options, string basePath) +{ + var pluginDirectory = options.PluginDirectories.FirstOrDefault(); + var hostOptions = new PluginHostOptions + { + BaseDirectory = basePath, + PluginsDirectory = string.IsNullOrWhiteSpace(pluginDirectory) + ? "StellaOps.Authority.PluginBinaries" + : pluginDirectory, + PrimaryPrefix = "StellaOps.Authority" + }; + + if (!hostOptions.SearchPatterns.Any(pattern => string.Equals(pattern, "StellaOps.Authority.Plugin.*.dll", StringComparison.OrdinalIgnoreCase))) + { + hostOptions.SearchPatterns.Add("StellaOps.Authority.Plugin.*.dll"); + } + + foreach (var pair in options.Plugins.Descriptors.OrderBy(static p => p.Key, StringComparer.OrdinalIgnoreCase)) + { + var descriptor = pair.Value; + if (descriptor.Enabled && !string.IsNullOrWhiteSpace(descriptor.AssemblyName)) + { + hostOptions.PluginOrder.Add(descriptor.AssemblyName!); + } + } + + return hostOptions; +} + +static bool TryParseUris(IReadOnlyCollection? values, out IReadOnlyCollection uris, out string? error) +{ + error = null; + + if (values is null || values.Count == 0) + { + uris = Array.Empty(); + return true; + } + + var parsed = new List(values.Count); + foreach (var entry in values) + { + if (string.IsNullOrWhiteSpace(entry) || !Uri.TryCreate(entry, UriKind.Absolute, out var uri)) + { + uris = Array.Empty(); + error = $"Invalid URI value '{entry}'."; + return false; + } + + parsed.Add(uri); + } + + uris = parsed; + return true; +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/Properties/AssemblyInfo.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Properties/AssemblyInfo.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Properties/launchSettings.json b/src/Authority/StellaOps.Authority/StellaOps.Authority/Properties/launchSettings.json similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Properties/launchSettings.json rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Properties/launchSettings.json diff --git a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterFeature.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterFeature.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterFeature.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterFeature.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs index 33f5cfa7..af78bffe 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadata.cs @@ -1,80 +1,80 @@ -using System.Collections.Generic; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Authority.RateLimiting; - -/// -/// Metadata captured for the current request to assist rate limiter partitioning and diagnostics. -/// -internal sealed class AuthorityRateLimiterMetadata -{ - private static readonly StringComparer OrdinalIgnoreCase = StringComparer.OrdinalIgnoreCase; - - private readonly Dictionary tags = new(OrdinalIgnoreCase); - - /// - /// Canonical endpoint associated with the request (e.g. "/token"). - /// - public string? Endpoint { get; set; } - - /// - /// Remote IP address observed for the request (post proxy resolution where available). - /// - public string? RemoteIp { get; set; } - - /// - /// Forwarded IP address extracted from proxy headers (if present). - /// - public string? ForwardedFor { get; set; } - - /// - /// OAuth client identifier associated with the request, when available. - /// - public string? ClientId { get; set; } - - /// - /// Subject identifier (user) associated with the request, when available. - /// - public string? SubjectId { get; set; } - - /// - /// Tenant identifier associated with the request, when available. - /// - public string? Tenant { get; set; } - - /// - /// Project identifier associated with the request, when available. - /// - public string? Project { get; set; } = StellaOpsTenancyDefaults.AnyProject; - - /// - /// Additional metadata tags that can be attached by later handlers. - /// - public IReadOnlyDictionary Tags => tags; - - /// - /// User agent string associated with the request, if captured. - /// - public string? UserAgent { get; set; } - - /// - /// Adds or updates an arbitrary metadata tag for downstream consumers. - /// - /// The tag name. - /// The tag value (removed when null/whitespace). - public void SetTag(string name, string? value) - { - if (string.IsNullOrWhiteSpace(name)) - { - return; - } - - if (string.IsNullOrWhiteSpace(value)) - { - tags.Remove(name); - return; - } - - tags[name] = value; - } -} +using System.Collections.Generic; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Authority.RateLimiting; + +/// +/// Metadata captured for the current request to assist rate limiter partitioning and diagnostics. +/// +internal sealed class AuthorityRateLimiterMetadata +{ + private static readonly StringComparer OrdinalIgnoreCase = StringComparer.OrdinalIgnoreCase; + + private readonly Dictionary tags = new(OrdinalIgnoreCase); + + /// + /// Canonical endpoint associated with the request (e.g. "/token"). + /// + public string? Endpoint { get; set; } + + /// + /// Remote IP address observed for the request (post proxy resolution where available). + /// + public string? RemoteIp { get; set; } + + /// + /// Forwarded IP address extracted from proxy headers (if present). + /// + public string? ForwardedFor { get; set; } + + /// + /// OAuth client identifier associated with the request, when available. + /// + public string? ClientId { get; set; } + + /// + /// Subject identifier (user) associated with the request, when available. + /// + public string? SubjectId { get; set; } + + /// + /// Tenant identifier associated with the request, when available. + /// + public string? Tenant { get; set; } + + /// + /// Project identifier associated with the request, when available. + /// + public string? Project { get; set; } = StellaOpsTenancyDefaults.AnyProject; + + /// + /// Additional metadata tags that can be attached by later handlers. + /// + public IReadOnlyDictionary Tags => tags; + + /// + /// User agent string associated with the request, if captured. + /// + public string? UserAgent { get; set; } + + /// + /// Adds or updates an arbitrary metadata tag for downstream consumers. + /// + /// The tag name. + /// The tag value (removed when null/whitespace). + public void SetTag(string name, string? value) + { + if (string.IsNullOrWhiteSpace(name)) + { + return; + } + + if (string.IsNullOrWhiteSpace(value)) + { + tags.Remove(name); + return; + } + + tags[name] = value; + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataAccessor.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataAccessor.cs similarity index 96% rename from src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataAccessor.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataAccessor.cs index c1784aa1..29cf8eaa 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataAccessor.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataAccessor.cs @@ -1,129 +1,129 @@ -using System; -using Microsoft.AspNetCore.Http; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Authority.RateLimiting; - -/// -/// Provides access to the rate limiter metadata for the current HTTP request. -/// -internal interface IAuthorityRateLimiterMetadataAccessor -{ - /// - /// Retrieves the metadata for the current request, if available. - /// - /// The metadata instance or null when no HTTP context is present. - AuthorityRateLimiterMetadata? GetMetadata(); - - /// - /// Updates the client identifier associated with the current request. - /// - void SetClientId(string? clientId); - - /// - /// Updates the subject identifier associated with the current request. - /// - void SetSubjectId(string? subjectId); - - /// - /// Updates the tenant identifier associated with the current request. - /// - void SetTenant(string? tenant); - - /// - /// Updates the project identifier associated with the current request. - /// - void SetProject(string? project); - - /// - /// Adds or removes a metadata tag for the current request. - /// - void SetTag(string name, string? value); -} - -internal sealed class AuthorityRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor -{ - private readonly IHttpContextAccessor httpContextAccessor; - - public AuthorityRateLimiterMetadataAccessor(IHttpContextAccessor httpContextAccessor) - { - this.httpContextAccessor = httpContextAccessor ?? throw new ArgumentNullException(nameof(httpContextAccessor)); - } - - public AuthorityRateLimiterMetadata? GetMetadata() - { - return TryGetMetadata(); - } - - public void SetClientId(string? clientId) - { - var metadata = TryGetMetadata(); - if (metadata is not null) - { - metadata.ClientId = Normalize(clientId); - metadata.SetTag("authority.client_id", metadata.ClientId); - } - } - - public void SetSubjectId(string? subjectId) - { - var metadata = TryGetMetadata(); - if (metadata is not null) - { - metadata.SubjectId = Normalize(subjectId); - metadata.SetTag("authority.subject_id", metadata.SubjectId); - } - } - - public void SetTenant(string? tenant) - { - var metadata = TryGetMetadata(); - if (metadata is not null) - { - metadata.Tenant = NormalizeTenant(tenant); - metadata.SetTag("authority.tenant", metadata.Tenant); - } - } - - public void SetProject(string? project) - { - var metadata = TryGetMetadata(); - if (metadata is not null) - { - metadata.Project = NormalizeProject(project); - metadata.SetTag("authority.project", metadata.Project); - } - } - - public void SetTag(string name, string? value) - { - var metadata = TryGetMetadata(); - metadata?.SetTag(name, value); - } - - private AuthorityRateLimiterMetadata? TryGetMetadata() - { - var context = httpContextAccessor.HttpContext; - return context?.Features.Get()?.Metadata; - } - - private static string? Normalize(string? value) - { - return string.IsNullOrWhiteSpace(value) ? null : value; - } - - private static string? NormalizeTenant(string? value) - { - return string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); - } - - private static string? NormalizeProject(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return StellaOpsTenancyDefaults.AnyProject; - } - - return value.Trim().ToLowerInvariant(); - } -} +using System; +using Microsoft.AspNetCore.Http; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Authority.RateLimiting; + +/// +/// Provides access to the rate limiter metadata for the current HTTP request. +/// +internal interface IAuthorityRateLimiterMetadataAccessor +{ + /// + /// Retrieves the metadata for the current request, if available. + /// + /// The metadata instance or null when no HTTP context is present. + AuthorityRateLimiterMetadata? GetMetadata(); + + /// + /// Updates the client identifier associated with the current request. + /// + void SetClientId(string? clientId); + + /// + /// Updates the subject identifier associated with the current request. + /// + void SetSubjectId(string? subjectId); + + /// + /// Updates the tenant identifier associated with the current request. + /// + void SetTenant(string? tenant); + + /// + /// Updates the project identifier associated with the current request. + /// + void SetProject(string? project); + + /// + /// Adds or removes a metadata tag for the current request. + /// + void SetTag(string name, string? value); +} + +internal sealed class AuthorityRateLimiterMetadataAccessor : IAuthorityRateLimiterMetadataAccessor +{ + private readonly IHttpContextAccessor httpContextAccessor; + + public AuthorityRateLimiterMetadataAccessor(IHttpContextAccessor httpContextAccessor) + { + this.httpContextAccessor = httpContextAccessor ?? throw new ArgumentNullException(nameof(httpContextAccessor)); + } + + public AuthorityRateLimiterMetadata? GetMetadata() + { + return TryGetMetadata(); + } + + public void SetClientId(string? clientId) + { + var metadata = TryGetMetadata(); + if (metadata is not null) + { + metadata.ClientId = Normalize(clientId); + metadata.SetTag("authority.client_id", metadata.ClientId); + } + } + + public void SetSubjectId(string? subjectId) + { + var metadata = TryGetMetadata(); + if (metadata is not null) + { + metadata.SubjectId = Normalize(subjectId); + metadata.SetTag("authority.subject_id", metadata.SubjectId); + } + } + + public void SetTenant(string? tenant) + { + var metadata = TryGetMetadata(); + if (metadata is not null) + { + metadata.Tenant = NormalizeTenant(tenant); + metadata.SetTag("authority.tenant", metadata.Tenant); + } + } + + public void SetProject(string? project) + { + var metadata = TryGetMetadata(); + if (metadata is not null) + { + metadata.Project = NormalizeProject(project); + metadata.SetTag("authority.project", metadata.Project); + } + } + + public void SetTag(string name, string? value) + { + var metadata = TryGetMetadata(); + metadata?.SetTag(name, value); + } + + private AuthorityRateLimiterMetadata? TryGetMetadata() + { + var context = httpContextAccessor.HttpContext; + return context?.Features.Get()?.Metadata; + } + + private static string? Normalize(string? value) + { + return string.IsNullOrWhiteSpace(value) ? null : value; + } + + private static string? NormalizeTenant(string? value) + { + return string.IsNullOrWhiteSpace(value) ? null : value.Trim().ToLowerInvariant(); + } + + private static string? NormalizeProject(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return StellaOpsTenancyDefaults.AnyProject; + } + + return value.Trim().ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataMiddleware.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataMiddleware.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataMiddleware.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterMetadataMiddleware.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterPartitionKeyResolver.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterPartitionKeyResolver.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterPartitionKeyResolver.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimiterPartitionKeyResolver.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimitingApplicationBuilderExtensions.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimitingApplicationBuilderExtensions.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimitingApplicationBuilderExtensions.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/RateLimiting/AuthorityRateLimitingApplicationBuilderExtensions.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/AuthorityRevocationExportService.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/AuthorityRevocationExportService.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Revocation/AuthorityRevocationExportService.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/AuthorityRevocationExportService.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuildResult.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuildResult.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuildResult.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuildResult.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuilder.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuilder.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuilder.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleBuilder.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleModel.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleModel.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleModel.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleModel.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSignature.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSignature.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSignature.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSignature.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSigner.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSigner.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSigner.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationBundleSigner.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationEntryModel.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationEntryModel.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationEntryModel.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationEntryModel.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportPackage.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportPackage.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportPackage.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportPackage.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportResponse.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportResponse.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportResponse.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Revocation/RevocationExportResponse.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidationResult.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidationResult.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidationResult.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidationResult.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidator.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidator.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidator.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Security/AuthorityClientCertificateValidator.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Security/AuthoritySenderConstraintKinds.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Security/AuthoritySenderConstraintKinds.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Security/AuthoritySenderConstraintKinds.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Security/AuthoritySenderConstraintKinds.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Security/IAuthorityClientCertificateValidator.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Security/IAuthorityClientCertificateValidator.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Security/IAuthorityClientCertificateValidator.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Security/IAuthorityClientCertificateValidator.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Signing/AuthorityJwksService.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthorityJwksService.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Signing/AuthorityJwksService.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthorityJwksService.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyManager.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyManager.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyManager.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyManager.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyRequest.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyRequest.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyRequest.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyRequest.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyStatus.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyStatus.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyStatus.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/AuthoritySigningKeyStatus.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Signing/FileAuthoritySigningKeySource.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/FileAuthoritySigningKeySource.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Signing/FileAuthoritySigningKeySource.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/FileAuthoritySigningKeySource.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Signing/IAuthoritySigningKeySource.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/IAuthoritySigningKeySource.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Signing/IAuthoritySigningKeySource.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/IAuthoritySigningKeySource.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/Signing/SigningRotationRequest.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/SigningRotationRequest.cs similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/Signing/SigningRotationRequest.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Signing/SigningRotationRequest.cs diff --git a/src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj b/src/Authority/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj similarity index 80% rename from src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj rename to src/Authority/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj index 80070ef9..c6eb2550 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/StellaOps.Authority.csproj @@ -1,18 +1,19 @@ - - - net10.0 - preview - enable - enable - true - $(DefineConstants);STELLAOPS_AUTH_SECURITY - - - - - - - + + + + net10.0 + preview + enable + enable + true + $(DefineConstants);STELLAOPS_AUTH_SECURITY + + + + + + + @@ -24,15 +25,15 @@ - - - - - + + + + + PreserveNewest - + \ No newline at end of file diff --git a/src/StellaOps.Authority/StellaOps.Authority/Tenants/AuthorityTenantCatalog.cs b/src/Authority/StellaOps.Authority/StellaOps.Authority/Tenants/AuthorityTenantCatalog.cs similarity index 97% rename from src/StellaOps.Authority/StellaOps.Authority/Tenants/AuthorityTenantCatalog.cs rename to src/Authority/StellaOps.Authority/StellaOps.Authority/Tenants/AuthorityTenantCatalog.cs index 14cb6b17..30896224 100644 --- a/src/StellaOps.Authority/StellaOps.Authority/Tenants/AuthorityTenantCatalog.cs +++ b/src/Authority/StellaOps.Authority/StellaOps.Authority/Tenants/AuthorityTenantCatalog.cs @@ -1,43 +1,43 @@ -using StellaOps.Configuration; - -namespace StellaOps.Authority.Tenants; - -public interface IAuthorityTenantCatalog -{ - IReadOnlyList GetTenants(); -} - -public sealed class AuthorityTenantCatalog : IAuthorityTenantCatalog -{ - private readonly IReadOnlyList tenants; - - public AuthorityTenantCatalog(StellaOpsAuthorityOptions authorityOptions) - { - if (authorityOptions is null) - { - throw new ArgumentNullException(nameof(authorityOptions)); - } - - tenants = authorityOptions.Tenants.Count == 0 - ? Array.Empty() - : authorityOptions.Tenants - .Select(t => new AuthorityTenantView( - t.Id, - string.IsNullOrWhiteSpace(t.DisplayName) ? t.Id : t.DisplayName, - string.IsNullOrWhiteSpace(t.Status) ? "active" : t.Status, - string.IsNullOrWhiteSpace(t.IsolationMode) ? "shared" : t.IsolationMode, - t.DefaultRoles.Count == 0 ? Array.Empty() : t.DefaultRoles.ToArray(), - t.Projects.Count == 0 ? Array.Empty() : t.Projects.ToArray())) - .ToArray(); - } - - public IReadOnlyList GetTenants() => tenants; -} - -public sealed record AuthorityTenantView( - string Id, - string DisplayName, - string Status, - string IsolationMode, - IReadOnlyList DefaultRoles, - IReadOnlyList Projects); +using StellaOps.Configuration; + +namespace StellaOps.Authority.Tenants; + +public interface IAuthorityTenantCatalog +{ + IReadOnlyList GetTenants(); +} + +public sealed class AuthorityTenantCatalog : IAuthorityTenantCatalog +{ + private readonly IReadOnlyList tenants; + + public AuthorityTenantCatalog(StellaOpsAuthorityOptions authorityOptions) + { + if (authorityOptions is null) + { + throw new ArgumentNullException(nameof(authorityOptions)); + } + + tenants = authorityOptions.Tenants.Count == 0 + ? Array.Empty() + : authorityOptions.Tenants + .Select(t => new AuthorityTenantView( + t.Id, + string.IsNullOrWhiteSpace(t.DisplayName) ? t.Id : t.DisplayName, + string.IsNullOrWhiteSpace(t.Status) ? "active" : t.Status, + string.IsNullOrWhiteSpace(t.IsolationMode) ? "shared" : t.IsolationMode, + t.DefaultRoles.Count == 0 ? Array.Empty() : t.DefaultRoles.ToArray(), + t.Projects.Count == 0 ? Array.Empty() : t.Projects.ToArray())) + .ToArray(); + } + + public IReadOnlyList GetTenants() => tenants; +} + +public sealed record AuthorityTenantView( + string Id, + string DisplayName, + string Status, + string IsolationMode, + IReadOnlyList DefaultRoles, + IReadOnlyList Projects); diff --git a/src/StellaOps.Authority/StellaOps.Authority/appsettings.Development.json b/src/Authority/StellaOps.Authority/StellaOps.Authority/appsettings.Development.json similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/appsettings.Development.json rename to src/Authority/StellaOps.Authority/StellaOps.Authority/appsettings.Development.json diff --git a/src/StellaOps.Authority/StellaOps.Authority/appsettings.json b/src/Authority/StellaOps.Authority/StellaOps.Authority/appsettings.json similarity index 100% rename from src/StellaOps.Authority/StellaOps.Authority/appsettings.json rename to src/Authority/StellaOps.Authority/StellaOps.Authority/appsettings.json diff --git a/src/StellaOps.Authority/TASKS.md b/src/Authority/StellaOps.Authority/TASKS.md similarity index 98% rename from src/StellaOps.Authority/TASKS.md rename to src/Authority/StellaOps.Authority/TASKS.md index 8194d200..5aac58bc 100644 --- a/src/StellaOps.Authority/TASKS.md +++ b/src/Authority/StellaOps.Authority/TASKS.md @@ -170,8 +170,8 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| | AUTH-OAS-61-001 | DONE (2025-10-28) | Authority Core & Security Guild, API Contracts Guild | OAS-61-001 | Document Authority authentication/token endpoints in OAS with scopes, examples, and error envelopes. | Spec complete with security schemes; lint passes. | -> 2025-10-28: Auth OpenAPI authored at `src/StellaOps.Api.OpenApi/authority/openapi.yaml` covering `/token`, `/introspect`, `/revoke`, `/jwks`, scope catalog, and error envelopes; parsed via PyYAML sanity check and referenced in Epic 17 docs. +> 2025-10-28: Auth OpenAPI authored at `src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml` covering `/token`, `/introspect`, `/revoke`, `/jwks`, scope catalog, and error envelopes; parsed via PyYAML sanity check and referenced in Epic 17 docs. | AUTH-OAS-61-002 | DONE (2025-10-28) | Authority Core & Security Guild | AUTH-OAS-61-001 | Implement `/.well-known/openapi` with scope metadata, supported grant types, and build version. | Endpoint deployed; contract tests cover discovery. | -> 2025-10-28: Added `/.well-known/openapi` endpoint wiring cached spec metadata, YAML/JSON negotiation, HTTP cache headers, and tests verifying ETag + Accept handling. Authority spec (`src/StellaOps.Api.OpenApi/authority/openapi.yaml`) now includes grant/scope extensions. +> 2025-10-28: Added `/.well-known/openapi` endpoint wiring cached spec metadata, YAML/JSON negotiation, HTTP cache headers, and tests verifying ETag + Accept handling. Authority spec (`src/Api/StellaOps.Api.OpenApi/authority/openapi.yaml`) now includes grant/scope extensions. | AUTH-OAS-62-001 | TODO | Authority Core & Security Guild, SDK Generator Guild | AUTH-OAS-61-001, SDKGEN-63-001 | Provide SDK helpers for OAuth2/PAT flows, tenancy override header; add integration tests. | SDKs expose auth helpers; tests cover token issuance; docs updated. | | AUTH-OAS-63-001 | TODO | Authority Core & Security Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and notifications for legacy auth endpoints. | Headers emitted; notifications verified; migration guide published. | diff --git a/src/Bench/StellaOps.Bench.sln b/src/Bench/StellaOps.Bench.sln new file mode 100644 index 00000000..d1f90214 --- /dev/null +++ b/src/Bench/StellaOps.Bench.sln @@ -0,0 +1,412 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Bench", "StellaOps.Bench", "{1553F566-661E-A2F5-811B-F74BF45C44CC}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "LinkNotMerge", "LinkNotMerge", "{69949CE0-F59D-CF46-D9C1-E95AB6BB2E4D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.LinkNotMerge", "StellaOps.Bench\LinkNotMerge\StellaOps.Bench.LinkNotMerge\StellaOps.Bench.LinkNotMerge.csproj", "{D9111701-26D8-4264-9EEA-6447BE21359B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.LinkNotMerge.Tests", "StellaOps.Bench\LinkNotMerge\StellaOps.Bench.LinkNotMerge.Tests\StellaOps.Bench.LinkNotMerge.Tests.csproj", "{DA2C6C19-DE17-4774-9FF4-DD09006DC07D}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "LinkNotMerge.Vex", "LinkNotMerge.Vex", "{4C3B55EE-3F9B-9266-8221-1CC629B4666E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.LinkNotMerge.Vex", "StellaOps.Bench\LinkNotMerge.Vex\StellaOps.Bench.LinkNotMerge.Vex\StellaOps.Bench.LinkNotMerge.Vex.csproj", "{EDD78A51-769D-4BEF-954C-F216D4B6A588}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.LinkNotMerge.Vex.Tests", "StellaOps.Bench\LinkNotMerge.Vex\StellaOps.Bench.LinkNotMerge.Vex.Tests\StellaOps.Bench.LinkNotMerge.Vex.Tests.csproj", "{E2A981C4-E682-4988-AE0A-AFEF708B394A}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Notify", "Notify", "{2A739D1E-B671-CFCB-8E07-CB70CFCF6480}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.Notify", "StellaOps.Bench\Notify\StellaOps.Bench.Notify\StellaOps.Bench.Notify.csproj", "{57AE6DC2-209F-4B09-B4DF-B6A9CC559605}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Models", "..\Notify\__Libraries\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj", "{E7F8F028-FD58-4E88-A01A-ED462D9AE154}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.Notify.Tests", "StellaOps.Bench\Notify\StellaOps.Bench.Notify.Tests\StellaOps.Bench.Notify.Tests.csproj", "{1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PolicyEngine", "PolicyEngine", "{CBDF819E-923F-A07F-78D9-D599DD28197E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.PolicyEngine", "StellaOps.Bench\PolicyEngine\StellaOps.Bench.PolicyEngine\StellaOps.Bench.PolicyEngine.csproj", "{01D6C08E-A4CD-4B11-9021-E6756C3BB850}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy", "..\Policy\__Libraries\StellaOps.Policy\StellaOps.Policy.csproj", "{80FD3504-F8B2-44F3-8141-33F2570EB405}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "Scanner.Analyzers", "Scanner.Analyzers", "{697EB1FA-E633-9F7D-F6B7-BDABA06A15F7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.ScannerAnalyzers", "StellaOps.Bench\Scanner.Analyzers\StellaOps.Bench.ScannerAnalyzers\StellaOps.Bench.ScannerAnalyzers.csproj", "{7B734C11-9112-49CE-907F-A36B8D71C409}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang", "..\Scanner\__Libraries\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj", "{515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Core", "..\Scanner\__Libraries\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj", "{82C007C4-8589-4592-AF79-636F16CEEA3E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{6595ACF8-0109-4717-9D65-EB5B646AAC33}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{93618630-2606-423B-9F43-84A5198E1FFB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{4E2D2CC1-C671-4067-80DB-348B94E7B1BB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{5734241F-0E96-4BC7-9D02-C49006DBCEAB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "..\__Libraries\StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{4BD97E56-6D2A-4280-B006-6D4F9590F254}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{02B6B193-C762-4A23-B09E-83FC97C3DBAA}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Go", "..\Scanner\__Libraries\StellaOps.Scanner.Analyzers.Lang.Go\StellaOps.Scanner.Analyzers.Lang.Go.csproj", "{DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Node", "..\Scanner\__Libraries\StellaOps.Scanner.Analyzers.Lang.Node\StellaOps.Scanner.Analyzers.Lang.Node.csproj", "{454B823F-3521-491D-B8AF-11D821929D38}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Java", "..\Scanner\__Libraries\StellaOps.Scanner.Analyzers.Lang.Java\StellaOps.Scanner.Analyzers.Lang.Java.csproj", "{8579FCCC-2E97-4681-AB26-E93357D1F26B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.DotNet", "..\Scanner\__Libraries\StellaOps.Scanner.Analyzers.Lang.DotNet\StellaOps.Scanner.Analyzers.Lang.DotNet.csproj", "{6D0186CB-461C-4DD6-9305-23F5AF5F41A2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Python", "..\Scanner\__Libraries\StellaOps.Scanner.Analyzers.Lang.Python\StellaOps.Scanner.Analyzers.Lang.Python.csproj", "{9594C6EC-25BA-45BF-A91F-B97D6D2126CD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.ScannerAnalyzers.Tests", "StellaOps.Bench\Scanner.Analyzers\StellaOps.Bench.ScannerAnalyzers.Tests\StellaOps.Bench.ScannerAnalyzers.Tests.csproj", "{7B774DFD-2A52-4E34-90AE-2479DD820B78}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D9111701-26D8-4264-9EEA-6447BE21359B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Debug|x64.ActiveCfg = Debug|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Debug|x64.Build.0 = Debug|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Debug|x86.ActiveCfg = Debug|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Debug|x86.Build.0 = Debug|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Release|Any CPU.Build.0 = Release|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Release|x64.ActiveCfg = Release|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Release|x64.Build.0 = Release|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Release|x86.ActiveCfg = Release|Any CPU + {D9111701-26D8-4264-9EEA-6447BE21359B}.Release|x86.Build.0 = Release|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Debug|x64.ActiveCfg = Debug|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Debug|x64.Build.0 = Debug|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Debug|x86.ActiveCfg = Debug|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Debug|x86.Build.0 = Debug|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Release|Any CPU.Build.0 = Release|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Release|x64.ActiveCfg = Release|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Release|x64.Build.0 = Release|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Release|x86.ActiveCfg = Release|Any CPU + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D}.Release|x86.Build.0 = Release|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Debug|x64.ActiveCfg = Debug|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Debug|x64.Build.0 = Debug|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Debug|x86.ActiveCfg = Debug|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Debug|x86.Build.0 = Debug|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Release|Any CPU.Build.0 = Release|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Release|x64.ActiveCfg = Release|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Release|x64.Build.0 = Release|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Release|x86.ActiveCfg = Release|Any CPU + {EDD78A51-769D-4BEF-954C-F216D4B6A588}.Release|x86.Build.0 = Release|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Debug|x64.ActiveCfg = Debug|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Debug|x64.Build.0 = Debug|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Debug|x86.ActiveCfg = Debug|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Debug|x86.Build.0 = Debug|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Release|Any CPU.Build.0 = Release|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Release|x64.ActiveCfg = Release|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Release|x64.Build.0 = Release|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Release|x86.ActiveCfg = Release|Any CPU + {E2A981C4-E682-4988-AE0A-AFEF708B394A}.Release|x86.Build.0 = Release|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Debug|Any CPU.Build.0 = Debug|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Debug|x64.ActiveCfg = Debug|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Debug|x64.Build.0 = Debug|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Debug|x86.ActiveCfg = Debug|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Debug|x86.Build.0 = Debug|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Release|Any CPU.ActiveCfg = Release|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Release|Any CPU.Build.0 = Release|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Release|x64.ActiveCfg = Release|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Release|x64.Build.0 = Release|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Release|x86.ActiveCfg = Release|Any CPU + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605}.Release|x86.Build.0 = Release|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Debug|x64.ActiveCfg = Debug|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Debug|x64.Build.0 = Debug|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Debug|x86.ActiveCfg = Debug|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Debug|x86.Build.0 = Debug|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Release|Any CPU.Build.0 = Release|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Release|x64.ActiveCfg = Release|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Release|x64.Build.0 = Release|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Release|x86.ActiveCfg = Release|Any CPU + {E7F8F028-FD58-4E88-A01A-ED462D9AE154}.Release|x86.Build.0 = Release|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Debug|x64.ActiveCfg = Debug|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Debug|x64.Build.0 = Debug|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Debug|x86.ActiveCfg = Debug|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Debug|x86.Build.0 = Debug|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Release|Any CPU.Build.0 = Release|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Release|x64.ActiveCfg = Release|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Release|x64.Build.0 = Release|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Release|x86.ActiveCfg = Release|Any CPU + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800}.Release|x86.Build.0 = Release|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Debug|Any CPU.Build.0 = Debug|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Debug|x64.ActiveCfg = Debug|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Debug|x64.Build.0 = Debug|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Debug|x86.ActiveCfg = Debug|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Debug|x86.Build.0 = Debug|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Release|Any CPU.ActiveCfg = Release|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Release|Any CPU.Build.0 = Release|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Release|x64.ActiveCfg = Release|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Release|x64.Build.0 = Release|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Release|x86.ActiveCfg = Release|Any CPU + {01D6C08E-A4CD-4B11-9021-E6756C3BB850}.Release|x86.Build.0 = Release|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Debug|Any CPU.Build.0 = Debug|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Debug|x64.ActiveCfg = Debug|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Debug|x64.Build.0 = Debug|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Debug|x86.ActiveCfg = Debug|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Debug|x86.Build.0 = Debug|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Release|Any CPU.ActiveCfg = Release|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Release|Any CPU.Build.0 = Release|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Release|x64.ActiveCfg = Release|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Release|x64.Build.0 = Release|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Release|x86.ActiveCfg = Release|Any CPU + {80FD3504-F8B2-44F3-8141-33F2570EB405}.Release|x86.Build.0 = Release|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Debug|x64.ActiveCfg = Debug|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Debug|x64.Build.0 = Debug|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Debug|x86.ActiveCfg = Debug|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Debug|x86.Build.0 = Debug|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Release|Any CPU.Build.0 = Release|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Release|x64.ActiveCfg = Release|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Release|x64.Build.0 = Release|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Release|x86.ActiveCfg = Release|Any CPU + {7B734C11-9112-49CE-907F-A36B8D71C409}.Release|x86.Build.0 = Release|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Debug|x64.ActiveCfg = Debug|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Debug|x64.Build.0 = Debug|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Debug|x86.ActiveCfg = Debug|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Debug|x86.Build.0 = Debug|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Release|Any CPU.Build.0 = Release|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Release|x64.ActiveCfg = Release|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Release|x64.Build.0 = Release|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Release|x86.ActiveCfg = Release|Any CPU + {515F0CFB-E9B5-4199-BECF-CCF98DE8B29D}.Release|x86.Build.0 = Release|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Debug|x64.ActiveCfg = Debug|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Debug|x64.Build.0 = Debug|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Debug|x86.ActiveCfg = Debug|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Debug|x86.Build.0 = Debug|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Release|Any CPU.Build.0 = Release|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Release|x64.ActiveCfg = Release|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Release|x64.Build.0 = Release|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Release|x86.ActiveCfg = Release|Any CPU + {82C007C4-8589-4592-AF79-636F16CEEA3E}.Release|x86.Build.0 = Release|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Debug|x64.ActiveCfg = Debug|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Debug|x64.Build.0 = Debug|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Debug|x86.ActiveCfg = Debug|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Debug|x86.Build.0 = Debug|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Release|Any CPU.Build.0 = Release|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Release|x64.ActiveCfg = Release|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Release|x64.Build.0 = Release|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Release|x86.ActiveCfg = Release|Any CPU + {6595ACF8-0109-4717-9D65-EB5B646AAC33}.Release|x86.Build.0 = Release|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Debug|x64.ActiveCfg = Debug|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Debug|x64.Build.0 = Debug|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Debug|x86.ActiveCfg = Debug|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Debug|x86.Build.0 = Debug|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Release|Any CPU.Build.0 = Release|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Release|x64.ActiveCfg = Release|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Release|x64.Build.0 = Release|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Release|x86.ActiveCfg = Release|Any CPU + {93618630-2606-423B-9F43-84A5198E1FFB}.Release|x86.Build.0 = Release|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Debug|x64.ActiveCfg = Debug|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Debug|x64.Build.0 = Debug|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Debug|x86.ActiveCfg = Debug|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Debug|x86.Build.0 = Debug|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Release|Any CPU.Build.0 = Release|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Release|x64.ActiveCfg = Release|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Release|x64.Build.0 = Release|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Release|x86.ActiveCfg = Release|Any CPU + {4E2D2CC1-C671-4067-80DB-348B94E7B1BB}.Release|x86.Build.0 = Release|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Debug|x64.ActiveCfg = Debug|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Debug|x64.Build.0 = Debug|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Debug|x86.ActiveCfg = Debug|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Debug|x86.Build.0 = Debug|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Release|Any CPU.Build.0 = Release|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Release|x64.ActiveCfg = Release|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Release|x64.Build.0 = Release|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Release|x86.ActiveCfg = Release|Any CPU + {CAF75FA7-B552-4ED6-AE28-31542C8D3DF2}.Release|x86.Build.0 = Release|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Debug|x64.ActiveCfg = Debug|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Debug|x64.Build.0 = Debug|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Debug|x86.ActiveCfg = Debug|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Debug|x86.Build.0 = Debug|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Release|Any CPU.Build.0 = Release|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Release|x64.ActiveCfg = Release|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Release|x64.Build.0 = Release|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Release|x86.ActiveCfg = Release|Any CPU + {5734241F-0E96-4BC7-9D02-C49006DBCEAB}.Release|x86.Build.0 = Release|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Debug|x64.ActiveCfg = Debug|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Debug|x64.Build.0 = Debug|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Debug|x86.ActiveCfg = Debug|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Debug|x86.Build.0 = Debug|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Release|Any CPU.Build.0 = Release|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Release|x64.ActiveCfg = Release|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Release|x64.Build.0 = Release|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Release|x86.ActiveCfg = Release|Any CPU + {4BD97E56-6D2A-4280-B006-6D4F9590F254}.Release|x86.Build.0 = Release|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Debug|x64.ActiveCfg = Debug|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Debug|x64.Build.0 = Debug|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Debug|x86.ActiveCfg = Debug|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Debug|x86.Build.0 = Debug|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Release|Any CPU.Build.0 = Release|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Release|x64.ActiveCfg = Release|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Release|x64.Build.0 = Release|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Release|x86.ActiveCfg = Release|Any CPU + {6A1616E0-F2F0-4BC6-8FF0-6B1139CFEF07}.Release|x86.Build.0 = Release|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Debug|x64.ActiveCfg = Debug|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Debug|x64.Build.0 = Debug|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Debug|x86.ActiveCfg = Debug|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Debug|x86.Build.0 = Debug|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Release|Any CPU.Build.0 = Release|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Release|x64.ActiveCfg = Release|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Release|x64.Build.0 = Release|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Release|x86.ActiveCfg = Release|Any CPU + {02B6B193-C762-4A23-B09E-83FC97C3DBAA}.Release|x86.Build.0 = Release|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Debug|x64.ActiveCfg = Debug|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Debug|x64.Build.0 = Debug|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Debug|x86.ActiveCfg = Debug|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Debug|x86.Build.0 = Debug|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Release|Any CPU.Build.0 = Release|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Release|x64.ActiveCfg = Release|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Release|x64.Build.0 = Release|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Release|x86.ActiveCfg = Release|Any CPU + {DF2EBD7F-BDA8-4E1E-AF38-28374D57CB2F}.Release|x86.Build.0 = Release|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Debug|Any CPU.Build.0 = Debug|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Debug|x64.ActiveCfg = Debug|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Debug|x64.Build.0 = Debug|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Debug|x86.ActiveCfg = Debug|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Debug|x86.Build.0 = Debug|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Release|Any CPU.ActiveCfg = Release|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Release|Any CPU.Build.0 = Release|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Release|x64.ActiveCfg = Release|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Release|x64.Build.0 = Release|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Release|x86.ActiveCfg = Release|Any CPU + {454B823F-3521-491D-B8AF-11D821929D38}.Release|x86.Build.0 = Release|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Debug|x64.ActiveCfg = Debug|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Debug|x64.Build.0 = Debug|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Debug|x86.ActiveCfg = Debug|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Debug|x86.Build.0 = Debug|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Release|Any CPU.Build.0 = Release|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Release|x64.ActiveCfg = Release|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Release|x64.Build.0 = Release|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Release|x86.ActiveCfg = Release|Any CPU + {8579FCCC-2E97-4681-AB26-E93357D1F26B}.Release|x86.Build.0 = Release|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Debug|x64.ActiveCfg = Debug|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Debug|x64.Build.0 = Debug|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Debug|x86.ActiveCfg = Debug|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Debug|x86.Build.0 = Debug|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Release|Any CPU.Build.0 = Release|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Release|x64.ActiveCfg = Release|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Release|x64.Build.0 = Release|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Release|x86.ActiveCfg = Release|Any CPU + {6D0186CB-461C-4DD6-9305-23F5AF5F41A2}.Release|x86.Build.0 = Release|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Debug|x64.ActiveCfg = Debug|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Debug|x64.Build.0 = Debug|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Debug|x86.ActiveCfg = Debug|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Debug|x86.Build.0 = Debug|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Release|Any CPU.Build.0 = Release|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Release|x64.ActiveCfg = Release|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Release|x64.Build.0 = Release|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Release|x86.ActiveCfg = Release|Any CPU + {9594C6EC-25BA-45BF-A91F-B97D6D2126CD}.Release|x86.Build.0 = Release|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Debug|x64.ActiveCfg = Debug|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Debug|x64.Build.0 = Debug|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Debug|x86.ActiveCfg = Debug|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Debug|x86.Build.0 = Debug|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Release|Any CPU.Build.0 = Release|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Release|x64.ActiveCfg = Release|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Release|x64.Build.0 = Release|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Release|x86.ActiveCfg = Release|Any CPU + {7B774DFD-2A52-4E34-90AE-2479DD820B78}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {69949CE0-F59D-CF46-D9C1-E95AB6BB2E4D} = {1553F566-661E-A2F5-811B-F74BF45C44CC} + {D9111701-26D8-4264-9EEA-6447BE21359B} = {69949CE0-F59D-CF46-D9C1-E95AB6BB2E4D} + {DA2C6C19-DE17-4774-9FF4-DD09006DC07D} = {69949CE0-F59D-CF46-D9C1-E95AB6BB2E4D} + {4C3B55EE-3F9B-9266-8221-1CC629B4666E} = {1553F566-661E-A2F5-811B-F74BF45C44CC} + {EDD78A51-769D-4BEF-954C-F216D4B6A588} = {4C3B55EE-3F9B-9266-8221-1CC629B4666E} + {E2A981C4-E682-4988-AE0A-AFEF708B394A} = {4C3B55EE-3F9B-9266-8221-1CC629B4666E} + {2A739D1E-B671-CFCB-8E07-CB70CFCF6480} = {1553F566-661E-A2F5-811B-F74BF45C44CC} + {57AE6DC2-209F-4B09-B4DF-B6A9CC559605} = {2A739D1E-B671-CFCB-8E07-CB70CFCF6480} + {1D83E3BC-5753-4D32-84BC-7D6F4BD7B800} = {2A739D1E-B671-CFCB-8E07-CB70CFCF6480} + {CBDF819E-923F-A07F-78D9-D599DD28197E} = {1553F566-661E-A2F5-811B-F74BF45C44CC} + {01D6C08E-A4CD-4B11-9021-E6756C3BB850} = {CBDF819E-923F-A07F-78D9-D599DD28197E} + {697EB1FA-E633-9F7D-F6B7-BDABA06A15F7} = {1553F566-661E-A2F5-811B-F74BF45C44CC} + {7B734C11-9112-49CE-907F-A36B8D71C409} = {697EB1FA-E633-9F7D-F6B7-BDABA06A15F7} + {7B774DFD-2A52-4E34-90AE-2479DD820B78} = {697EB1FA-E633-9F7D-F6B7-BDABA06A15F7} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/README.md b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/README.md similarity index 87% rename from src/StellaOps.Bench/LinkNotMerge.Vex/README.md rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/README.md index 74efc234..e2e1c1c0 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/README.md +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/README.md @@ -16,7 +16,7 @@ Measures synthetic VEX observation ingest and event emission throughput for the ```bash dotnet run \ - --project src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj \ + --project src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj \ -- \ --csv out/linknotmerge-vex-bench.csv \ --json out/linknotmerge-vex-bench.json \ diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BaselineLoaderTests.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BaselineLoaderTests.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BaselineLoaderTests.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BaselineLoaderTests.cs index 7c00f17f..fc2859de 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BaselineLoaderTests.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BaselineLoaderTests.cs @@ -1,37 +1,37 @@ -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Bench.LinkNotMerge.Vex.Baseline; -using Xunit; - -namespace StellaOps.Bench.LinkNotMerge.Vex.Tests; - -public sealed class BaselineLoaderTests -{ - [Fact] - public async Task LoadAsync_ReadsEntries() - { - var path = Path.GetTempFileName(); - try - { - await File.WriteAllTextAsync( - path, - "scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb\n" + - "vex_ingest_baseline,5,4000,24000,12000,620.5,700.1,820.9,320.5,300.0,9800.0,9100.0,4200.0,3900.0,150.0\n"); - - var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None); - var entry = Assert.Single(baseline); - - Assert.Equal("vex_ingest_baseline", entry.Key); - Assert.Equal(4000, entry.Value.Observations); - Assert.Equal(24000, entry.Value.Statements); - Assert.Equal(12000, entry.Value.Events); - Assert.Equal(700.1, entry.Value.P95TotalMs); - Assert.Equal(3900.0, entry.Value.MinEventThroughputPerSecond); - } - finally - { - File.Delete(path); - } - } -} +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Bench.LinkNotMerge.Vex.Baseline; +using Xunit; + +namespace StellaOps.Bench.LinkNotMerge.Vex.Tests; + +public sealed class BaselineLoaderTests +{ + [Fact] + public async Task LoadAsync_ReadsEntries() + { + var path = Path.GetTempFileName(); + try + { + await File.WriteAllTextAsync( + path, + "scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb\n" + + "vex_ingest_baseline,5,4000,24000,12000,620.5,700.1,820.9,320.5,300.0,9800.0,9100.0,4200.0,3900.0,150.0\n"); + + var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None); + var entry = Assert.Single(baseline); + + Assert.Equal("vex_ingest_baseline", entry.Key); + Assert.Equal(4000, entry.Value.Observations); + Assert.Equal(24000, entry.Value.Statements); + Assert.Equal(12000, entry.Value.Events); + Assert.Equal(700.1, entry.Value.P95TotalMs); + Assert.Equal(3900.0, entry.Value.MinEventThroughputPerSecond); + } + finally + { + File.Delete(path); + } + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BenchmarkScenarioReportTests.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BenchmarkScenarioReportTests.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BenchmarkScenarioReportTests.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BenchmarkScenarioReportTests.cs index 2f4064ef..a8f1769a 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BenchmarkScenarioReportTests.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/BenchmarkScenarioReportTests.cs @@ -1,83 +1,83 @@ -using StellaOps.Bench.LinkNotMerge.Vex.Baseline; -using StellaOps.Bench.LinkNotMerge.Vex.Reporting; -using Xunit; - -namespace StellaOps.Bench.LinkNotMerge.Vex.Tests; - -public sealed class BenchmarkScenarioReportTests -{ - [Fact] - public void RegressionDetection_FlagsBreaches() - { - var result = new VexScenarioResult( - Id: "scenario", - Label: "Scenario", - Iterations: 3, - ObservationCount: 1000, - AliasGroups: 100, - StatementCount: 6000, - EventCount: 3200, - TotalStatistics: new DurationStatistics(600, 700, 750), - InsertStatistics: new DurationStatistics(320, 360, 380), - CorrelationStatistics: new DurationStatistics(280, 320, 340), - ObservationThroughputStatistics: new ThroughputStatistics(8000, 7000), - EventThroughputStatistics: new ThroughputStatistics(3500, 3200), - AllocationStatistics: new AllocationStatistics(180), - ThresholdMs: null, - MinObservationThroughputPerSecond: null, - MinEventThroughputPerSecond: null, - MaxAllocatedThresholdMb: null); - - var baseline = new BaselineEntry( - ScenarioId: "scenario", - Iterations: 3, - Observations: 1000, - Statements: 6000, - Events: 3200, - MeanTotalMs: 520, - P95TotalMs: 560, - MaxTotalMs: 580, - MeanInsertMs: 250, - MeanCorrelationMs: 260, - MeanObservationThroughputPerSecond: 9000, - MinObservationThroughputPerSecond: 8500, - MeanEventThroughputPerSecond: 4200, - MinEventThroughputPerSecond: 3800, - MaxAllocatedMb: 140); - - var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1); - - Assert.True(report.DurationRegressionBreached); - Assert.True(report.ObservationThroughputRegressionBreached); - Assert.True(report.EventThroughputRegressionBreached); - Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("event throughput")); - } - - [Fact] - public void RegressionDetection_NoBaseline_NoBreaches() - { - var result = new VexScenarioResult( - Id: "scenario", - Label: "Scenario", - Iterations: 3, - ObservationCount: 1000, - AliasGroups: 100, - StatementCount: 6000, - EventCount: 3200, - TotalStatistics: new DurationStatistics(480, 520, 540), - InsertStatistics: new DurationStatistics(260, 280, 300), - CorrelationStatistics: new DurationStatistics(220, 240, 260), - ObservationThroughputStatistics: new ThroughputStatistics(9000, 8800), - EventThroughputStatistics: new ThroughputStatistics(4200, 4100), - AllocationStatistics: new AllocationStatistics(150), - ThresholdMs: null, - MinObservationThroughputPerSecond: null, - MinEventThroughputPerSecond: null, - MaxAllocatedThresholdMb: null); - - var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null); - - Assert.False(report.RegressionBreached); - Assert.Empty(report.BuildRegressionFailureMessages()); - } -} +using StellaOps.Bench.LinkNotMerge.Vex.Baseline; +using StellaOps.Bench.LinkNotMerge.Vex.Reporting; +using Xunit; + +namespace StellaOps.Bench.LinkNotMerge.Vex.Tests; + +public sealed class BenchmarkScenarioReportTests +{ + [Fact] + public void RegressionDetection_FlagsBreaches() + { + var result = new VexScenarioResult( + Id: "scenario", + Label: "Scenario", + Iterations: 3, + ObservationCount: 1000, + AliasGroups: 100, + StatementCount: 6000, + EventCount: 3200, + TotalStatistics: new DurationStatistics(600, 700, 750), + InsertStatistics: new DurationStatistics(320, 360, 380), + CorrelationStatistics: new DurationStatistics(280, 320, 340), + ObservationThroughputStatistics: new ThroughputStatistics(8000, 7000), + EventThroughputStatistics: new ThroughputStatistics(3500, 3200), + AllocationStatistics: new AllocationStatistics(180), + ThresholdMs: null, + MinObservationThroughputPerSecond: null, + MinEventThroughputPerSecond: null, + MaxAllocatedThresholdMb: null); + + var baseline = new BaselineEntry( + ScenarioId: "scenario", + Iterations: 3, + Observations: 1000, + Statements: 6000, + Events: 3200, + MeanTotalMs: 520, + P95TotalMs: 560, + MaxTotalMs: 580, + MeanInsertMs: 250, + MeanCorrelationMs: 260, + MeanObservationThroughputPerSecond: 9000, + MinObservationThroughputPerSecond: 8500, + MeanEventThroughputPerSecond: 4200, + MinEventThroughputPerSecond: 3800, + MaxAllocatedMb: 140); + + var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1); + + Assert.True(report.DurationRegressionBreached); + Assert.True(report.ObservationThroughputRegressionBreached); + Assert.True(report.EventThroughputRegressionBreached); + Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("event throughput")); + } + + [Fact] + public void RegressionDetection_NoBaseline_NoBreaches() + { + var result = new VexScenarioResult( + Id: "scenario", + Label: "Scenario", + Iterations: 3, + ObservationCount: 1000, + AliasGroups: 100, + StatementCount: 6000, + EventCount: 3200, + TotalStatistics: new DurationStatistics(480, 520, 540), + InsertStatistics: new DurationStatistics(260, 280, 300), + CorrelationStatistics: new DurationStatistics(220, 240, 260), + ObservationThroughputStatistics: new ThroughputStatistics(9000, 8800), + EventThroughputStatistics: new ThroughputStatistics(4200, 4100), + AllocationStatistics: new AllocationStatistics(150), + ThresholdMs: null, + MinObservationThroughputPerSecond: null, + MinEventThroughputPerSecond: null, + MaxAllocatedThresholdMb: null); + + var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null); + + Assert.False(report.RegressionBreached); + Assert.Empty(report.BuildRegressionFailureMessages()); + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/StellaOps.Bench.LinkNotMerge.Vex.Tests.csproj b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/StellaOps.Bench.LinkNotMerge.Vex.Tests.csproj similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/StellaOps.Bench.LinkNotMerge.Vex.Tests.csproj rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/StellaOps.Bench.LinkNotMerge.Vex.Tests.csproj index f734d08a..e9c8e765 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/StellaOps.Bench.LinkNotMerge.Vex.Tests.csproj +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/StellaOps.Bench.LinkNotMerge.Vex.Tests.csproj @@ -1,28 +1,28 @@ - - - net10.0 - enable - enable - preview - true - false - - - - - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - - - - - - + + + net10.0 + enable + enable + preview + true + false + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/VexScenarioRunnerTests.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/VexScenarioRunnerTests.cs similarity index 96% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/VexScenarioRunnerTests.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/VexScenarioRunnerTests.cs index 6858203f..7ea2a3a5 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/VexScenarioRunnerTests.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.Tests/VexScenarioRunnerTests.cs @@ -1,34 +1,34 @@ -using System.Linq; -using System.Threading; -using Xunit; - -namespace StellaOps.Bench.LinkNotMerge.Vex.Tests; - -public sealed class VexScenarioRunnerTests -{ - [Fact] - public void Execute_ComputesEvents() - { - var config = new VexScenarioConfig - { - Id = "unit", - Observations = 600, - AliasGroups = 120, - StatementsPerObservation = 5, - ProductsPerObservation = 3, - Tenants = 2, - BatchSize = 120, - Seed = 12345, - }; - - var runner = new VexScenarioRunner(config); - var result = runner.Execute(2, CancellationToken.None); - - Assert.Equal(600, result.ObservationCount); - Assert.True(result.StatementCount > 0); - Assert.True(result.EventCount > 0); - Assert.All(result.TotalDurationsMs, duration => Assert.True(duration > 0)); - Assert.All(result.EventThroughputsPerSecond, throughput => Assert.True(throughput > 0)); - Assert.Equal(result.AggregationResult.EventCount, result.EventCount); - } -} +using System.Linq; +using System.Threading; +using Xunit; + +namespace StellaOps.Bench.LinkNotMerge.Vex.Tests; + +public sealed class VexScenarioRunnerTests +{ + [Fact] + public void Execute_ComputesEvents() + { + var config = new VexScenarioConfig + { + Id = "unit", + Observations = 600, + AliasGroups = 120, + StatementsPerObservation = 5, + ProductsPerObservation = 3, + Tenants = 2, + BatchSize = 120, + Seed = 12345, + }; + + var runner = new VexScenarioRunner(config); + var result = runner.Execute(2, CancellationToken.None); + + Assert.Equal(600, result.ObservationCount); + Assert.True(result.StatementCount > 0); + Assert.True(result.EventCount > 0); + Assert.All(result.TotalDurationsMs, duration => Assert.True(duration > 0)); + Assert.All(result.EventThroughputsPerSecond, throughput => Assert.True(throughput > 0)); + Assert.Equal(result.AggregationResult.EventCount, result.EventCount); + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineEntry.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineEntry.cs similarity index 96% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineEntry.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineEntry.cs index 5c53af34..826fff96 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineEntry.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineEntry.cs @@ -1,18 +1,18 @@ -namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline; - -internal sealed record BaselineEntry( - string ScenarioId, - int Iterations, - int Observations, - int Statements, - int Events, - double MeanTotalMs, - double P95TotalMs, - double MaxTotalMs, - double MeanInsertMs, - double MeanCorrelationMs, - double MeanObservationThroughputPerSecond, - double MinObservationThroughputPerSecond, - double MeanEventThroughputPerSecond, - double MinEventThroughputPerSecond, - double MaxAllocatedMb); +namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline; + +internal sealed record BaselineEntry( + string ScenarioId, + int Iterations, + int Observations, + int Statements, + int Events, + double MeanTotalMs, + double P95TotalMs, + double MaxTotalMs, + double MeanInsertMs, + double MeanCorrelationMs, + double MeanObservationThroughputPerSecond, + double MinObservationThroughputPerSecond, + double MeanEventThroughputPerSecond, + double MinEventThroughputPerSecond, + double MaxAllocatedMb); diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineLoader.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineLoader.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineLoader.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineLoader.cs index a541384a..b7577084 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineLoader.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Baseline/BaselineLoader.cs @@ -1,87 +1,87 @@ -using System.Globalization; - -namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline; - -internal static class BaselineLoader -{ - public static async Task> LoadAsync(string path, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - - var resolved = Path.GetFullPath(path); - if (!File.Exists(resolved)) - { - return new Dictionary(StringComparer.OrdinalIgnoreCase); - } - - var result = new Dictionary(StringComparer.OrdinalIgnoreCase); - - await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream); - - var lineNumber = 0; - while (true) - { - cancellationToken.ThrowIfCancellationRequested(); - - var line = await reader.ReadLineAsync().ConfigureAwait(false); - if (line is null) - { - break; - } - - lineNumber++; - if (lineNumber == 1 || string.IsNullOrWhiteSpace(line)) - { - continue; - } - - var parts = line.Split(',', StringSplitOptions.TrimEntries); - if (parts.Length < 15) - { - throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length})."); - } - - var entry = new BaselineEntry( - ScenarioId: parts[0], - Iterations: ParseInt(parts[1], resolved, lineNumber), - Observations: ParseInt(parts[2], resolved, lineNumber), - Statements: ParseInt(parts[3], resolved, lineNumber), - Events: ParseInt(parts[4], resolved, lineNumber), - MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber), - P95TotalMs: ParseDouble(parts[6], resolved, lineNumber), - MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber), - MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber), - MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber), - MeanObservationThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber), - MinObservationThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber), - MeanEventThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber), - MinEventThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber), - MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber)); - - result[entry.ScenarioId] = entry; - } - - return result; - } - - private static int ParseInt(string value, string file, int line) - { - if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) - { - return parsed; - } - - throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'."); - } - - private static double ParseDouble(string value, string file, int line) - { - if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed)) - { - return parsed; - } - - throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'."); - } -} +using System.Globalization; + +namespace StellaOps.Bench.LinkNotMerge.Vex.Baseline; + +internal static class BaselineLoader +{ + public static async Task> LoadAsync(string path, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + + var resolved = Path.GetFullPath(path); + if (!File.Exists(resolved)) + { + return new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + + await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream); + + var lineNumber = 0; + while (true) + { + cancellationToken.ThrowIfCancellationRequested(); + + var line = await reader.ReadLineAsync().ConfigureAwait(false); + if (line is null) + { + break; + } + + lineNumber++; + if (lineNumber == 1 || string.IsNullOrWhiteSpace(line)) + { + continue; + } + + var parts = line.Split(',', StringSplitOptions.TrimEntries); + if (parts.Length < 15) + { + throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length})."); + } + + var entry = new BaselineEntry( + ScenarioId: parts[0], + Iterations: ParseInt(parts[1], resolved, lineNumber), + Observations: ParseInt(parts[2], resolved, lineNumber), + Statements: ParseInt(parts[3], resolved, lineNumber), + Events: ParseInt(parts[4], resolved, lineNumber), + MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber), + P95TotalMs: ParseDouble(parts[6], resolved, lineNumber), + MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber), + MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber), + MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber), + MeanObservationThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber), + MinObservationThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber), + MeanEventThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber), + MinEventThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber), + MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber)); + + result[entry.ScenarioId] = entry; + } + + return result; + } + + private static int ParseInt(string value, string file, int line) + { + if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + return parsed; + } + + throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'."); + } + + private static double ParseDouble(string value, string file, int line) + { + if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed)) + { + return parsed; + } + + throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'."); + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Program.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Program.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Program.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Program.cs index e70d359a..ca2f3e7b 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Program.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Program.cs @@ -1,376 +1,376 @@ -using System.Globalization; -using StellaOps.Bench.LinkNotMerge.Vex.Baseline; -using StellaOps.Bench.LinkNotMerge.Vex.Reporting; - -namespace StellaOps.Bench.LinkNotMerge.Vex; - -internal static class Program -{ - public static async Task Main(string[] args) - { - try - { - var options = ProgramOptions.Parse(args); - var config = await VexBenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); - var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); - - var results = new List(); - var reports = new List(); - var failures = new List(); - - foreach (var scenario in config.Scenarios) - { - var iterations = scenario.ResolveIterations(config.Iterations); - var runner = new VexScenarioRunner(scenario); - var execution = runner.Execute(iterations, CancellationToken.None); - - var totalStats = DurationStatistics.From(execution.TotalDurationsMs); - var insertStats = DurationStatistics.From(execution.InsertDurationsMs); - var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs); - var allocationStats = AllocationStatistics.From(execution.AllocatedMb); - var observationThroughputStats = ThroughputStatistics.From(execution.ObservationThroughputsPerSecond); - var eventThroughputStats = ThroughputStatistics.From(execution.EventThroughputsPerSecond); - - var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs; - var observationFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond; - var eventFloor = scenario.MinEventThroughputPerSecond ?? options.MinEventThroughputPerSecond ?? config.MinEventThroughputPerSecond; - var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb; - - var result = new VexScenarioResult( - scenario.ScenarioId, - scenario.DisplayLabel, - iterations, - execution.ObservationCount, - execution.AliasGroups, - execution.StatementCount, - execution.EventCount, - totalStats, - insertStats, - correlationStats, - observationThroughputStats, - eventThroughputStats, - allocationStats, - thresholdMs, - observationFloor, - eventFloor, - allocationLimit); - - results.Add(result); - - if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold) - { - failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms"); - } - - if (observationFloor is { } obsFloor && result.ObservationThroughputStatistics.MinPerSecond < obsFloor) - { - failures.Add($"{result.Id} fell below observation throughput floor: {result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s < {obsFloor:N0} obs/s"); - } - - if (eventFloor is { } evtFloor && result.EventThroughputStatistics.MinPerSecond < evtFloor) - { - failures.Add($"{result.Id} fell below event throughput floor: {result.EventThroughputStatistics.MinPerSecond:N0} events/s < {evtFloor:N0} events/s"); - } - - if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit) - { - failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB"); - } - - baseline.TryGetValue(result.Id, out var baselineEntry); - var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit); - reports.Add(report); - failures.AddRange(report.BuildRegressionFailureMessages()); - } - - TablePrinter.Print(results); - - if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) - { - CsvWriter.Write(options.CsvOutPath!, results); - } - - if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) - { - var metadata = new BenchmarkJsonMetadata( - SchemaVersion: "linknotmerge-vex-bench/1.0", - CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(), - Commit: options.Commit, - Environment: options.Environment); - - await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false); - } - - if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) - { - PrometheusWriter.Write(options.PrometheusOutPath!, reports); - } - - if (failures.Count > 0) - { - Console.Error.WriteLine(); - Console.Error.WriteLine("Benchmark failures detected:"); - foreach (var failure in failures.Distinct()) - { - Console.Error.WriteLine($" - {failure}"); - } - - return 1; - } - - return 0; - } - catch (Exception ex) - { - Console.Error.WriteLine($"linknotmerge-vex-bench error: {ex.Message}"); - return 1; - } - } - - private sealed record ProgramOptions( - string ConfigPath, - int? Iterations, - double? ThresholdMs, - double? MinThroughputPerSecond, - double? MinEventThroughputPerSecond, - double? MaxAllocatedMb, - string? CsvOutPath, - string? JsonOutPath, - string? PrometheusOutPath, - string BaselinePath, - DateTimeOffset? CapturedAtUtc, - string? Commit, - string? Environment, - double? RegressionLimit) - { - public static ProgramOptions Parse(string[] args) - { - var configPath = DefaultConfigPath(); - var baselinePath = DefaultBaselinePath(); - - int? iterations = null; - double? thresholdMs = null; - double? minThroughput = null; - double? minEventThroughput = null; - double? maxAllocated = null; - string? csvOut = null; - string? jsonOut = null; - string? promOut = null; - DateTimeOffset? capturedAt = null; - string? commit = null; - string? environment = null; - double? regressionLimit = null; - - for (var index = 0; index < args.Length; index++) - { - var current = args[index]; - switch (current) - { - case "--config": - EnsureNext(args, index); - configPath = Path.GetFullPath(args[++index]); - break; - case "--iterations": - EnsureNext(args, index); - iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--threshold-ms": - EnsureNext(args, index); - thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--min-throughput": - EnsureNext(args, index); - minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--min-event-throughput": - EnsureNext(args, index); - minEventThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--max-allocated-mb": - EnsureNext(args, index); - maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--csv": - EnsureNext(args, index); - csvOut = args[++index]; - break; - case "--json": - EnsureNext(args, index); - jsonOut = args[++index]; - break; - case "--prometheus": - EnsureNext(args, index); - promOut = args[++index]; - break; - case "--baseline": - EnsureNext(args, index); - baselinePath = Path.GetFullPath(args[++index]); - break; - case "--captured-at": - EnsureNext(args, index); - capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); - break; - case "--commit": - EnsureNext(args, index); - commit = args[++index]; - break; - case "--environment": - EnsureNext(args, index); - environment = args[++index]; - break; - case "--regression-limit": - EnsureNext(args, index); - regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--help": - case "-h": - PrintUsage(); - System.Environment.Exit(0); - break; - default: - throw new ArgumentException($"Unknown argument '{current}'."); - } - } - - return new ProgramOptions( - configPath, - iterations, - thresholdMs, - minThroughput, - minEventThroughput, - maxAllocated, - csvOut, - jsonOut, - promOut, - baselinePath, - capturedAt, - commit, - environment, - regressionLimit); - } - - private static string DefaultConfigPath() - { - var binaryDir = AppContext.BaseDirectory; - var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); - return Path.Combine(benchRoot, "config.json"); - } - - private static string DefaultBaselinePath() - { - var binaryDir = AppContext.BaseDirectory; - var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); - return Path.Combine(benchRoot, "baseline.csv"); - } - - private static void EnsureNext(string[] args, int index) - { - if (index + 1 >= args.Length) - { - throw new ArgumentException("Missing value for argument."); - } - } - - private static void PrintUsage() - { - Console.WriteLine("Usage: linknotmerge-vex-bench [options]"); - Console.WriteLine(); - Console.WriteLine("Options:"); - Console.WriteLine(" --config Path to benchmark configuration JSON."); - Console.WriteLine(" --iterations Override iteration count."); - Console.WriteLine(" --threshold-ms Global latency threshold in milliseconds."); - Console.WriteLine(" --min-throughput Observation throughput floor (observations/second)."); - Console.WriteLine(" --min-event-throughput Event emission throughput floor (events/second)."); - Console.WriteLine(" --max-allocated-mb Global allocation ceiling (MB)."); - Console.WriteLine(" --csv Write CSV results to path."); - Console.WriteLine(" --json Write JSON results to path."); - Console.WriteLine(" --prometheus Write Prometheus exposition metrics to path."); - Console.WriteLine(" --baseline Baseline CSV path."); - Console.WriteLine(" --captured-at Timestamp to embed in JSON metadata."); - Console.WriteLine(" --commit Commit identifier for metadata."); - Console.WriteLine(" --environment Environment label for metadata."); - Console.WriteLine(" --regression-limit Regression multiplier (default 1.15)."); - } - } -} - -internal static class TablePrinter -{ - public static void Print(IEnumerable results) - { - Console.WriteLine("Scenario | Observations | Statements | Events | Total(ms) | Correl(ms) | Insert(ms) | Obs k/s | Evnt k/s | Alloc(MB)"); - Console.WriteLine("---------------------------- | ------------- | ---------- | ------- | ---------- | ---------- | ----------- | ------- | -------- | --------"); - foreach (var row in results) - { - Console.WriteLine(string.Join(" | ", new[] - { - row.IdColumn, - row.ObservationsColumn, - row.StatementColumn, - row.EventColumn, - row.TotalMeanColumn, - row.CorrelationMeanColumn, - row.InsertMeanColumn, - row.ObservationThroughputColumn, - row.EventThroughputColumn, - row.AllocatedColumn, - })); - } - } -} - -internal static class CsvWriter -{ - public static void Write(string path, IEnumerable results) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(results); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); - using var writer = new StreamWriter(stream); - writer.WriteLine("scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb"); - - foreach (var result in results) - { - writer.Write(result.Id); - writer.Write(','); - writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.StatementCount.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.EventCount.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.ObservationThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.ObservationThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.EventThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.EventThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture)); - writer.WriteLine(); - } - } -} +using System.Globalization; +using StellaOps.Bench.LinkNotMerge.Vex.Baseline; +using StellaOps.Bench.LinkNotMerge.Vex.Reporting; + +namespace StellaOps.Bench.LinkNotMerge.Vex; + +internal static class Program +{ + public static async Task Main(string[] args) + { + try + { + var options = ProgramOptions.Parse(args); + var config = await VexBenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); + var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); + + var results = new List(); + var reports = new List(); + var failures = new List(); + + foreach (var scenario in config.Scenarios) + { + var iterations = scenario.ResolveIterations(config.Iterations); + var runner = new VexScenarioRunner(scenario); + var execution = runner.Execute(iterations, CancellationToken.None); + + var totalStats = DurationStatistics.From(execution.TotalDurationsMs); + var insertStats = DurationStatistics.From(execution.InsertDurationsMs); + var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs); + var allocationStats = AllocationStatistics.From(execution.AllocatedMb); + var observationThroughputStats = ThroughputStatistics.From(execution.ObservationThroughputsPerSecond); + var eventThroughputStats = ThroughputStatistics.From(execution.EventThroughputsPerSecond); + + var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs; + var observationFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond; + var eventFloor = scenario.MinEventThroughputPerSecond ?? options.MinEventThroughputPerSecond ?? config.MinEventThroughputPerSecond; + var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb; + + var result = new VexScenarioResult( + scenario.ScenarioId, + scenario.DisplayLabel, + iterations, + execution.ObservationCount, + execution.AliasGroups, + execution.StatementCount, + execution.EventCount, + totalStats, + insertStats, + correlationStats, + observationThroughputStats, + eventThroughputStats, + allocationStats, + thresholdMs, + observationFloor, + eventFloor, + allocationLimit); + + results.Add(result); + + if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold) + { + failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms"); + } + + if (observationFloor is { } obsFloor && result.ObservationThroughputStatistics.MinPerSecond < obsFloor) + { + failures.Add($"{result.Id} fell below observation throughput floor: {result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s < {obsFloor:N0} obs/s"); + } + + if (eventFloor is { } evtFloor && result.EventThroughputStatistics.MinPerSecond < evtFloor) + { + failures.Add($"{result.Id} fell below event throughput floor: {result.EventThroughputStatistics.MinPerSecond:N0} events/s < {evtFloor:N0} events/s"); + } + + if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit) + { + failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB"); + } + + baseline.TryGetValue(result.Id, out var baselineEntry); + var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit); + reports.Add(report); + failures.AddRange(report.BuildRegressionFailureMessages()); + } + + TablePrinter.Print(results); + + if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) + { + CsvWriter.Write(options.CsvOutPath!, results); + } + + if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) + { + var metadata = new BenchmarkJsonMetadata( + SchemaVersion: "linknotmerge-vex-bench/1.0", + CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(), + Commit: options.Commit, + Environment: options.Environment); + + await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false); + } + + if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) + { + PrometheusWriter.Write(options.PrometheusOutPath!, reports); + } + + if (failures.Count > 0) + { + Console.Error.WriteLine(); + Console.Error.WriteLine("Benchmark failures detected:"); + foreach (var failure in failures.Distinct()) + { + Console.Error.WriteLine($" - {failure}"); + } + + return 1; + } + + return 0; + } + catch (Exception ex) + { + Console.Error.WriteLine($"linknotmerge-vex-bench error: {ex.Message}"); + return 1; + } + } + + private sealed record ProgramOptions( + string ConfigPath, + int? Iterations, + double? ThresholdMs, + double? MinThroughputPerSecond, + double? MinEventThroughputPerSecond, + double? MaxAllocatedMb, + string? CsvOutPath, + string? JsonOutPath, + string? PrometheusOutPath, + string BaselinePath, + DateTimeOffset? CapturedAtUtc, + string? Commit, + string? Environment, + double? RegressionLimit) + { + public static ProgramOptions Parse(string[] args) + { + var configPath = DefaultConfigPath(); + var baselinePath = DefaultBaselinePath(); + + int? iterations = null; + double? thresholdMs = null; + double? minThroughput = null; + double? minEventThroughput = null; + double? maxAllocated = null; + string? csvOut = null; + string? jsonOut = null; + string? promOut = null; + DateTimeOffset? capturedAt = null; + string? commit = null; + string? environment = null; + double? regressionLimit = null; + + for (var index = 0; index < args.Length; index++) + { + var current = args[index]; + switch (current) + { + case "--config": + EnsureNext(args, index); + configPath = Path.GetFullPath(args[++index]); + break; + case "--iterations": + EnsureNext(args, index); + iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--threshold-ms": + EnsureNext(args, index); + thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--min-throughput": + EnsureNext(args, index); + minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--min-event-throughput": + EnsureNext(args, index); + minEventThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--max-allocated-mb": + EnsureNext(args, index); + maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--csv": + EnsureNext(args, index); + csvOut = args[++index]; + break; + case "--json": + EnsureNext(args, index); + jsonOut = args[++index]; + break; + case "--prometheus": + EnsureNext(args, index); + promOut = args[++index]; + break; + case "--baseline": + EnsureNext(args, index); + baselinePath = Path.GetFullPath(args[++index]); + break; + case "--captured-at": + EnsureNext(args, index); + capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); + break; + case "--commit": + EnsureNext(args, index); + commit = args[++index]; + break; + case "--environment": + EnsureNext(args, index); + environment = args[++index]; + break; + case "--regression-limit": + EnsureNext(args, index); + regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--help": + case "-h": + PrintUsage(); + System.Environment.Exit(0); + break; + default: + throw new ArgumentException($"Unknown argument '{current}'."); + } + } + + return new ProgramOptions( + configPath, + iterations, + thresholdMs, + minThroughput, + minEventThroughput, + maxAllocated, + csvOut, + jsonOut, + promOut, + baselinePath, + capturedAt, + commit, + environment, + regressionLimit); + } + + private static string DefaultConfigPath() + { + var binaryDir = AppContext.BaseDirectory; + var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); + return Path.Combine(benchRoot, "config.json"); + } + + private static string DefaultBaselinePath() + { + var binaryDir = AppContext.BaseDirectory; + var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); + return Path.Combine(benchRoot, "baseline.csv"); + } + + private static void EnsureNext(string[] args, int index) + { + if (index + 1 >= args.Length) + { + throw new ArgumentException("Missing value for argument."); + } + } + + private static void PrintUsage() + { + Console.WriteLine("Usage: linknotmerge-vex-bench [options]"); + Console.WriteLine(); + Console.WriteLine("Options:"); + Console.WriteLine(" --config Path to benchmark configuration JSON."); + Console.WriteLine(" --iterations Override iteration count."); + Console.WriteLine(" --threshold-ms Global latency threshold in milliseconds."); + Console.WriteLine(" --min-throughput Observation throughput floor (observations/second)."); + Console.WriteLine(" --min-event-throughput Event emission throughput floor (events/second)."); + Console.WriteLine(" --max-allocated-mb Global allocation ceiling (MB)."); + Console.WriteLine(" --csv Write CSV results to path."); + Console.WriteLine(" --json Write JSON results to path."); + Console.WriteLine(" --prometheus Write Prometheus exposition metrics to path."); + Console.WriteLine(" --baseline Baseline CSV path."); + Console.WriteLine(" --captured-at Timestamp to embed in JSON metadata."); + Console.WriteLine(" --commit Commit identifier for metadata."); + Console.WriteLine(" --environment Environment label for metadata."); + Console.WriteLine(" --regression-limit Regression multiplier (default 1.15)."); + } + } +} + +internal static class TablePrinter +{ + public static void Print(IEnumerable results) + { + Console.WriteLine("Scenario | Observations | Statements | Events | Total(ms) | Correl(ms) | Insert(ms) | Obs k/s | Evnt k/s | Alloc(MB)"); + Console.WriteLine("---------------------------- | ------------- | ---------- | ------- | ---------- | ---------- | ----------- | ------- | -------- | --------"); + foreach (var row in results) + { + Console.WriteLine(string.Join(" | ", new[] + { + row.IdColumn, + row.ObservationsColumn, + row.StatementColumn, + row.EventColumn, + row.TotalMeanColumn, + row.CorrelationMeanColumn, + row.InsertMeanColumn, + row.ObservationThroughputColumn, + row.EventThroughputColumn, + row.AllocatedColumn, + })); + } + } +} + +internal static class CsvWriter +{ + public static void Write(string path, IEnumerable results) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(results); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); + using var writer = new StreamWriter(stream); + writer.WriteLine("scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb"); + + foreach (var result in results) + { + writer.Write(result.Id); + writer.Write(','); + writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.StatementCount.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.EventCount.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.ObservationThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.ObservationThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.EventThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.EventThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture)); + writer.WriteLine(); + } + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Properties/AssemblyInfo.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Properties/AssemblyInfo.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Properties/AssemblyInfo.cs index cb24d2c2..b3ec28cf 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Properties/AssemblyInfo.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Vex.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Vex.Tests")] diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkJsonWriter.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkJsonWriter.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkJsonWriter.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkJsonWriter.cs index 89d04b43..e5ca313b 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkJsonWriter.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkJsonWriter.cs @@ -1,151 +1,151 @@ -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting; - -internal static class BenchmarkJsonWriter -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - WriteIndented = true, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - }; - - public static async Task WriteAsync( - string path, - BenchmarkJsonMetadata metadata, - IReadOnlyList reports, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(metadata); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var document = new BenchmarkJsonDocument( - metadata.SchemaVersion, - metadata.CapturedAtUtc, - metadata.Commit, - metadata.Environment, - reports.Select(CreateScenario).ToArray()); - - await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); - await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); - await stream.FlushAsync(cancellationToken).ConfigureAwait(false); - } - - private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) - { - var baseline = report.Baseline; - return new BenchmarkJsonScenario( - report.Result.Id, - report.Result.Label, - report.Result.Iterations, - report.Result.ObservationCount, - report.Result.StatementCount, - report.Result.EventCount, - report.Result.TotalStatistics.MeanMs, - report.Result.TotalStatistics.P95Ms, - report.Result.TotalStatistics.MaxMs, - report.Result.InsertStatistics.MeanMs, - report.Result.CorrelationStatistics.MeanMs, - report.Result.ObservationThroughputStatistics.MeanPerSecond, - report.Result.ObservationThroughputStatistics.MinPerSecond, - report.Result.EventThroughputStatistics.MeanPerSecond, - report.Result.EventThroughputStatistics.MinPerSecond, - report.Result.AllocationStatistics.MaxAllocatedMb, - report.Result.ThresholdMs, - report.Result.MinObservationThroughputPerSecond, - report.Result.MinEventThroughputPerSecond, - report.Result.MaxAllocatedThresholdMb, - baseline is null - ? null - : new BenchmarkJsonScenarioBaseline( - baseline.Iterations, - baseline.Observations, - baseline.Statements, - baseline.Events, - baseline.MeanTotalMs, - baseline.P95TotalMs, - baseline.MaxTotalMs, - baseline.MeanInsertMs, - baseline.MeanCorrelationMs, - baseline.MeanObservationThroughputPerSecond, - baseline.MinObservationThroughputPerSecond, - baseline.MeanEventThroughputPerSecond, - baseline.MinEventThroughputPerSecond, - baseline.MaxAllocatedMb), - new BenchmarkJsonScenarioRegression( - report.DurationRegressionRatio, - report.ObservationThroughputRegressionRatio, - report.EventThroughputRegressionRatio, - report.RegressionLimit, - report.RegressionBreached)); - } - - private sealed record BenchmarkJsonDocument( - string SchemaVersion, - DateTimeOffset CapturedAt, - string? Commit, - string? Environment, - IReadOnlyList Scenarios); - - private sealed record BenchmarkJsonScenario( - string Id, - string Label, - int Iterations, - int Observations, - int Statements, - int Events, - double MeanTotalMs, - double P95TotalMs, - double MaxTotalMs, - double MeanInsertMs, - double MeanCorrelationMs, - double MeanObservationThroughputPerSecond, - double MinObservationThroughputPerSecond, - double MeanEventThroughputPerSecond, - double MinEventThroughputPerSecond, - double MaxAllocatedMb, - double? ThresholdMs, - double? MinObservationThroughputThresholdPerSecond, - double? MinEventThroughputThresholdPerSecond, - double? MaxAllocatedThresholdMb, - BenchmarkJsonScenarioBaseline? Baseline, - BenchmarkJsonScenarioRegression Regression); - - private sealed record BenchmarkJsonScenarioBaseline( - int Iterations, - int Observations, - int Statements, - int Events, - double MeanTotalMs, - double P95TotalMs, - double MaxTotalMs, - double MeanInsertMs, - double MeanCorrelationMs, - double MeanObservationThroughputPerSecond, - double MinObservationThroughputPerSecond, - double MeanEventThroughputPerSecond, - double MinEventThroughputPerSecond, - double MaxAllocatedMb); - - private sealed record BenchmarkJsonScenarioRegression( - double? DurationRatio, - double? ObservationThroughputRatio, - double? EventThroughputRatio, - double Limit, - bool Breached); -} - -internal sealed record BenchmarkJsonMetadata( - string SchemaVersion, - DateTimeOffset CapturedAtUtc, - string? Commit, - string? Environment); +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting; + +internal static class BenchmarkJsonWriter +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + public static async Task WriteAsync( + string path, + BenchmarkJsonMetadata metadata, + IReadOnlyList reports, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(metadata); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var document = new BenchmarkJsonDocument( + metadata.SchemaVersion, + metadata.CapturedAtUtc, + metadata.Commit, + metadata.Environment, + reports.Select(CreateScenario).ToArray()); + + await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); + await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); + await stream.FlushAsync(cancellationToken).ConfigureAwait(false); + } + + private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) + { + var baseline = report.Baseline; + return new BenchmarkJsonScenario( + report.Result.Id, + report.Result.Label, + report.Result.Iterations, + report.Result.ObservationCount, + report.Result.StatementCount, + report.Result.EventCount, + report.Result.TotalStatistics.MeanMs, + report.Result.TotalStatistics.P95Ms, + report.Result.TotalStatistics.MaxMs, + report.Result.InsertStatistics.MeanMs, + report.Result.CorrelationStatistics.MeanMs, + report.Result.ObservationThroughputStatistics.MeanPerSecond, + report.Result.ObservationThroughputStatistics.MinPerSecond, + report.Result.EventThroughputStatistics.MeanPerSecond, + report.Result.EventThroughputStatistics.MinPerSecond, + report.Result.AllocationStatistics.MaxAllocatedMb, + report.Result.ThresholdMs, + report.Result.MinObservationThroughputPerSecond, + report.Result.MinEventThroughputPerSecond, + report.Result.MaxAllocatedThresholdMb, + baseline is null + ? null + : new BenchmarkJsonScenarioBaseline( + baseline.Iterations, + baseline.Observations, + baseline.Statements, + baseline.Events, + baseline.MeanTotalMs, + baseline.P95TotalMs, + baseline.MaxTotalMs, + baseline.MeanInsertMs, + baseline.MeanCorrelationMs, + baseline.MeanObservationThroughputPerSecond, + baseline.MinObservationThroughputPerSecond, + baseline.MeanEventThroughputPerSecond, + baseline.MinEventThroughputPerSecond, + baseline.MaxAllocatedMb), + new BenchmarkJsonScenarioRegression( + report.DurationRegressionRatio, + report.ObservationThroughputRegressionRatio, + report.EventThroughputRegressionRatio, + report.RegressionLimit, + report.RegressionBreached)); + } + + private sealed record BenchmarkJsonDocument( + string SchemaVersion, + DateTimeOffset CapturedAt, + string? Commit, + string? Environment, + IReadOnlyList Scenarios); + + private sealed record BenchmarkJsonScenario( + string Id, + string Label, + int Iterations, + int Observations, + int Statements, + int Events, + double MeanTotalMs, + double P95TotalMs, + double MaxTotalMs, + double MeanInsertMs, + double MeanCorrelationMs, + double MeanObservationThroughputPerSecond, + double MinObservationThroughputPerSecond, + double MeanEventThroughputPerSecond, + double MinEventThroughputPerSecond, + double MaxAllocatedMb, + double? ThresholdMs, + double? MinObservationThroughputThresholdPerSecond, + double? MinEventThroughputThresholdPerSecond, + double? MaxAllocatedThresholdMb, + BenchmarkJsonScenarioBaseline? Baseline, + BenchmarkJsonScenarioRegression Regression); + + private sealed record BenchmarkJsonScenarioBaseline( + int Iterations, + int Observations, + int Statements, + int Events, + double MeanTotalMs, + double P95TotalMs, + double MaxTotalMs, + double MeanInsertMs, + double MeanCorrelationMs, + double MeanObservationThroughputPerSecond, + double MinObservationThroughputPerSecond, + double MeanEventThroughputPerSecond, + double MinEventThroughputPerSecond, + double MaxAllocatedMb); + + private sealed record BenchmarkJsonScenarioRegression( + double? DurationRatio, + double? ObservationThroughputRatio, + double? EventThroughputRatio, + double Limit, + bool Breached); +} + +internal sealed record BenchmarkJsonMetadata( + string SchemaVersion, + DateTimeOffset CapturedAtUtc, + string? Commit, + string? Environment); diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkScenarioReport.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkScenarioReport.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkScenarioReport.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkScenarioReport.cs index cc933b4e..1be7aa40 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkScenarioReport.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/BenchmarkScenarioReport.cs @@ -1,89 +1,89 @@ -using StellaOps.Bench.LinkNotMerge.Vex.Baseline; - -namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting; - -internal sealed class BenchmarkScenarioReport -{ - private const double DefaultRegressionLimit = 1.15d; - - public BenchmarkScenarioReport(VexScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) - { - Result = result ?? throw new ArgumentNullException(nameof(result)); - Baseline = baseline; - RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit; - DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs); - ObservationThroughputRegressionRatio = CalculateInverseRatio(result.ObservationThroughputStatistics.MinPerSecond, baseline?.MinObservationThroughputPerSecond); - EventThroughputRegressionRatio = CalculateInverseRatio(result.EventThroughputStatistics.MinPerSecond, baseline?.MinEventThroughputPerSecond); - } - - public VexScenarioResult Result { get; } - - public BaselineEntry? Baseline { get; } - - public double RegressionLimit { get; } - - public double? DurationRegressionRatio { get; } - - public double? ObservationThroughputRegressionRatio { get; } - - public double? EventThroughputRegressionRatio { get; } - - public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit; - - public bool ObservationThroughputRegressionBreached => ObservationThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit; - - public bool EventThroughputRegressionBreached => EventThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit; - - public bool RegressionBreached => DurationRegressionBreached || ObservationThroughputRegressionBreached || EventThroughputRegressionBreached; - - public IEnumerable BuildRegressionFailureMessages() - { - if (Baseline is null) - { - yield break; - } - - if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio) - { - var delta = (durationRatio - 1d) * 100d; - yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%)."; - } - - if (ObservationThroughputRegressionBreached && ObservationThroughputRegressionRatio is { } obsRatio) - { - var delta = (obsRatio - 1d) * 100d; - yield return $"{Result.Id} observation throughput regressed: min {Result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinObservationThroughputPerSecond:N0} obs/s (-{delta:F1}%)."; - } - - if (EventThroughputRegressionBreached && EventThroughputRegressionRatio is { } evtRatio) - { - var delta = (evtRatio - 1d) * 100d; - yield return $"{Result.Id} event throughput regressed: min {Result.EventThroughputStatistics.MinPerSecond:N0} events/s vs baseline {Baseline.MinEventThroughputPerSecond:N0} events/s (-{delta:F1}%)."; - } - } - - private static double? CalculateRatio(double current, double? baseline) - { - if (!baseline.HasValue || baseline.Value <= 0d) - { - return null; - } - - return current / baseline.Value; - } - - private static double? CalculateInverseRatio(double current, double? baseline) - { - if (!baseline.HasValue || baseline.Value <= 0d) - { - return null; - } - - if (current <= 0d) - { - return double.PositiveInfinity; - } - - return baseline.Value / current; - } -} +using StellaOps.Bench.LinkNotMerge.Vex.Baseline; + +namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting; + +internal sealed class BenchmarkScenarioReport +{ + private const double DefaultRegressionLimit = 1.15d; + + public BenchmarkScenarioReport(VexScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) + { + Result = result ?? throw new ArgumentNullException(nameof(result)); + Baseline = baseline; + RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit; + DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs); + ObservationThroughputRegressionRatio = CalculateInverseRatio(result.ObservationThroughputStatistics.MinPerSecond, baseline?.MinObservationThroughputPerSecond); + EventThroughputRegressionRatio = CalculateInverseRatio(result.EventThroughputStatistics.MinPerSecond, baseline?.MinEventThroughputPerSecond); + } + + public VexScenarioResult Result { get; } + + public BaselineEntry? Baseline { get; } + + public double RegressionLimit { get; } + + public double? DurationRegressionRatio { get; } + + public double? ObservationThroughputRegressionRatio { get; } + + public double? EventThroughputRegressionRatio { get; } + + public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit; + + public bool ObservationThroughputRegressionBreached => ObservationThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit; + + public bool EventThroughputRegressionBreached => EventThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit; + + public bool RegressionBreached => DurationRegressionBreached || ObservationThroughputRegressionBreached || EventThroughputRegressionBreached; + + public IEnumerable BuildRegressionFailureMessages() + { + if (Baseline is null) + { + yield break; + } + + if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio) + { + var delta = (durationRatio - 1d) * 100d; + yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%)."; + } + + if (ObservationThroughputRegressionBreached && ObservationThroughputRegressionRatio is { } obsRatio) + { + var delta = (obsRatio - 1d) * 100d; + yield return $"{Result.Id} observation throughput regressed: min {Result.ObservationThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinObservationThroughputPerSecond:N0} obs/s (-{delta:F1}%)."; + } + + if (EventThroughputRegressionBreached && EventThroughputRegressionRatio is { } evtRatio) + { + var delta = (evtRatio - 1d) * 100d; + yield return $"{Result.Id} event throughput regressed: min {Result.EventThroughputStatistics.MinPerSecond:N0} events/s vs baseline {Baseline.MinEventThroughputPerSecond:N0} events/s (-{delta:F1}%)."; + } + } + + private static double? CalculateRatio(double current, double? baseline) + { + if (!baseline.HasValue || baseline.Value <= 0d) + { + return null; + } + + return current / baseline.Value; + } + + private static double? CalculateInverseRatio(double current, double? baseline) + { + if (!baseline.HasValue || baseline.Value <= 0d) + { + return null; + } + + if (current <= 0d) + { + return double.PositiveInfinity; + } + + return baseline.Value / current; + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/PrometheusWriter.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/PrometheusWriter.cs similarity index 98% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/PrometheusWriter.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/PrometheusWriter.cs index c8c2adfb..bcc60f66 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/PrometheusWriter.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Reporting/PrometheusWriter.cs @@ -1,94 +1,94 @@ -using System.Globalization; -using System.Text; - -namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting; - -internal static class PrometheusWriter -{ - public static void Write(string path, IReadOnlyList reports) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var builder = new StringBuilder(); - builder.AppendLine("# HELP linknotmerge_vex_bench_total_ms Link-Not-Merge VEX benchmark total duration (milliseconds)."); - builder.AppendLine("# TYPE linknotmerge_vex_bench_total_ms gauge"); - builder.AppendLine("# HELP linknotmerge_vex_bench_throughput_per_sec Link-Not-Merge VEX benchmark observation throughput (observations per second)."); - builder.AppendLine("# TYPE linknotmerge_vex_bench_throughput_per_sec gauge"); - builder.AppendLine("# HELP linknotmerge_vex_bench_event_throughput_per_sec Link-Not-Merge VEX benchmark event throughput (events per second)."); - builder.AppendLine("# TYPE linknotmerge_vex_bench_event_throughput_per_sec gauge"); - builder.AppendLine("# HELP linknotmerge_vex_bench_allocated_mb Link-Not-Merge VEX benchmark max allocations (megabytes)."); - builder.AppendLine("# TYPE linknotmerge_vex_bench_allocated_mb gauge"); - - foreach (var report in reports) - { - var scenario = Escape(report.Result.Id); - AppendMetric(builder, "linknotmerge_vex_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs); - AppendMetric(builder, "linknotmerge_vex_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms); - AppendMetric(builder, "linknotmerge_vex_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs); - AppendMetric(builder, "linknotmerge_vex_bench_threshold_ms", scenario, report.Result.ThresholdMs); - - AppendMetric(builder, "linknotmerge_vex_bench_mean_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MeanPerSecond); - AppendMetric(builder, "linknotmerge_vex_bench_min_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MinPerSecond); - AppendMetric(builder, "linknotmerge_vex_bench_observation_throughput_floor_per_sec", scenario, report.Result.MinObservationThroughputPerSecond); - - AppendMetric(builder, "linknotmerge_vex_bench_mean_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MeanPerSecond); - AppendMetric(builder, "linknotmerge_vex_bench_min_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MinPerSecond); - AppendMetric(builder, "linknotmerge_vex_bench_event_throughput_floor_per_sec", scenario, report.Result.MinEventThroughputPerSecond); - - AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb); - AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb); - - if (report.Baseline is { } baseline) - { - AppendMetric(builder, "linknotmerge_vex_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs); - AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_observation_throughput_per_sec", scenario, baseline.MinObservationThroughputPerSecond); - AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_event_throughput_per_sec", scenario, baseline.MinEventThroughputPerSecond); - } - - if (report.DurationRegressionRatio is { } durationRatio) - { - AppendMetric(builder, "linknotmerge_vex_bench_duration_regression_ratio", scenario, durationRatio); - } - - if (report.ObservationThroughputRegressionRatio is { } obsRatio) - { - AppendMetric(builder, "linknotmerge_vex_bench_observation_regression_ratio", scenario, obsRatio); - } - - if (report.EventThroughputRegressionRatio is { } evtRatio) - { - AppendMetric(builder, "linknotmerge_vex_bench_event_regression_ratio", scenario, evtRatio); - } - - AppendMetric(builder, "linknotmerge_vex_bench_regression_limit", scenario, report.RegressionLimit); - AppendMetric(builder, "linknotmerge_vex_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0); - } - - File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); - } - - private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value) - { - if (!value.HasValue) - { - return; - } - - builder.Append(metric); - builder.Append("{scenario=\""); - builder.Append(scenario); - builder.Append("\"} "); - builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture)); - } - - private static string Escape(string value) => - value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); -} +using System.Globalization; +using System.Text; + +namespace StellaOps.Bench.LinkNotMerge.Vex.Reporting; + +internal static class PrometheusWriter +{ + public static void Write(string path, IReadOnlyList reports) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var builder = new StringBuilder(); + builder.AppendLine("# HELP linknotmerge_vex_bench_total_ms Link-Not-Merge VEX benchmark total duration (milliseconds)."); + builder.AppendLine("# TYPE linknotmerge_vex_bench_total_ms gauge"); + builder.AppendLine("# HELP linknotmerge_vex_bench_throughput_per_sec Link-Not-Merge VEX benchmark observation throughput (observations per second)."); + builder.AppendLine("# TYPE linknotmerge_vex_bench_throughput_per_sec gauge"); + builder.AppendLine("# HELP linknotmerge_vex_bench_event_throughput_per_sec Link-Not-Merge VEX benchmark event throughput (events per second)."); + builder.AppendLine("# TYPE linknotmerge_vex_bench_event_throughput_per_sec gauge"); + builder.AppendLine("# HELP linknotmerge_vex_bench_allocated_mb Link-Not-Merge VEX benchmark max allocations (megabytes)."); + builder.AppendLine("# TYPE linknotmerge_vex_bench_allocated_mb gauge"); + + foreach (var report in reports) + { + var scenario = Escape(report.Result.Id); + AppendMetric(builder, "linknotmerge_vex_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs); + AppendMetric(builder, "linknotmerge_vex_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms); + AppendMetric(builder, "linknotmerge_vex_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs); + AppendMetric(builder, "linknotmerge_vex_bench_threshold_ms", scenario, report.Result.ThresholdMs); + + AppendMetric(builder, "linknotmerge_vex_bench_mean_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MeanPerSecond); + AppendMetric(builder, "linknotmerge_vex_bench_min_observation_throughput_per_sec", scenario, report.Result.ObservationThroughputStatistics.MinPerSecond); + AppendMetric(builder, "linknotmerge_vex_bench_observation_throughput_floor_per_sec", scenario, report.Result.MinObservationThroughputPerSecond); + + AppendMetric(builder, "linknotmerge_vex_bench_mean_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MeanPerSecond); + AppendMetric(builder, "linknotmerge_vex_bench_min_event_throughput_per_sec", scenario, report.Result.EventThroughputStatistics.MinPerSecond); + AppendMetric(builder, "linknotmerge_vex_bench_event_throughput_floor_per_sec", scenario, report.Result.MinEventThroughputPerSecond); + + AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb); + AppendMetric(builder, "linknotmerge_vex_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb); + + if (report.Baseline is { } baseline) + { + AppendMetric(builder, "linknotmerge_vex_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs); + AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_observation_throughput_per_sec", scenario, baseline.MinObservationThroughputPerSecond); + AppendMetric(builder, "linknotmerge_vex_bench_baseline_min_event_throughput_per_sec", scenario, baseline.MinEventThroughputPerSecond); + } + + if (report.DurationRegressionRatio is { } durationRatio) + { + AppendMetric(builder, "linknotmerge_vex_bench_duration_regression_ratio", scenario, durationRatio); + } + + if (report.ObservationThroughputRegressionRatio is { } obsRatio) + { + AppendMetric(builder, "linknotmerge_vex_bench_observation_regression_ratio", scenario, obsRatio); + } + + if (report.EventThroughputRegressionRatio is { } evtRatio) + { + AppendMetric(builder, "linknotmerge_vex_bench_event_regression_ratio", scenario, evtRatio); + } + + AppendMetric(builder, "linknotmerge_vex_bench_regression_limit", scenario, report.RegressionLimit); + AppendMetric(builder, "linknotmerge_vex_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0); + } + + File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); + } + + private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value) + { + if (!value.HasValue) + { + return; + } + + builder.Append(metric); + builder.Append("{scenario=\""); + builder.Append(scenario); + builder.Append("\"} "); + builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture)); + } + + private static string Escape(string value) => + value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Statistics.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Statistics.cs similarity index 96% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Statistics.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Statistics.cs index a9277c97..98ab4df9 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Statistics.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/Statistics.cs @@ -1,84 +1,84 @@ -namespace StellaOps.Bench.LinkNotMerge.Vex; - -internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs) -{ - public static DurationStatistics From(IReadOnlyList values) - { - if (values.Count == 0) - { - return new DurationStatistics(0, 0, 0); - } - - var sorted = values.ToArray(); - Array.Sort(sorted); - - var total = 0d; - foreach (var value in values) - { - total += value; - } - - var mean = total / values.Count; - var p95 = Percentile(sorted, 95); - var max = sorted[^1]; - - return new DurationStatistics(mean, p95, max); - } - - private static double Percentile(IReadOnlyList sorted, double percentile) - { - if (sorted.Count == 0) - { - return 0; - } - - var rank = (percentile / 100d) * (sorted.Count - 1); - var lower = (int)Math.Floor(rank); - var upper = (int)Math.Ceiling(rank); - var weight = rank - lower; - - if (upper >= sorted.Count) - { - return sorted[lower]; - } - - return sorted[lower] + weight * (sorted[upper] - sorted[lower]); - } -} - -internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond) -{ - public static ThroughputStatistics From(IReadOnlyList values) - { - if (values.Count == 0) - { - return new ThroughputStatistics(0, 0); - } - - var total = 0d; - var min = double.MaxValue; - - foreach (var value in values) - { - total += value; - min = Math.Min(min, value); - } - - var mean = total / values.Count; - return new ThroughputStatistics(mean, min); - } -} - -internal readonly record struct AllocationStatistics(double MaxAllocatedMb) -{ - public static AllocationStatistics From(IReadOnlyList values) - { - var max = 0d; - foreach (var value in values) - { - max = Math.Max(max, value); - } - - return new AllocationStatistics(max); - } -} +namespace StellaOps.Bench.LinkNotMerge.Vex; + +internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs) +{ + public static DurationStatistics From(IReadOnlyList values) + { + if (values.Count == 0) + { + return new DurationStatistics(0, 0, 0); + } + + var sorted = values.ToArray(); + Array.Sort(sorted); + + var total = 0d; + foreach (var value in values) + { + total += value; + } + + var mean = total / values.Count; + var p95 = Percentile(sorted, 95); + var max = sorted[^1]; + + return new DurationStatistics(mean, p95, max); + } + + private static double Percentile(IReadOnlyList sorted, double percentile) + { + if (sorted.Count == 0) + { + return 0; + } + + var rank = (percentile / 100d) * (sorted.Count - 1); + var lower = (int)Math.Floor(rank); + var upper = (int)Math.Ceiling(rank); + var weight = rank - lower; + + if (upper >= sorted.Count) + { + return sorted[lower]; + } + + return sorted[lower] + weight * (sorted[upper] - sorted[lower]); + } +} + +internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond) +{ + public static ThroughputStatistics From(IReadOnlyList values) + { + if (values.Count == 0) + { + return new ThroughputStatistics(0, 0); + } + + var total = 0d; + var min = double.MaxValue; + + foreach (var value in values) + { + total += value; + min = Math.Min(min, value); + } + + var mean = total / values.Count; + return new ThroughputStatistics(mean, min); + } +} + +internal readonly record struct AllocationStatistics(double MaxAllocatedMb) +{ + public static AllocationStatistics From(IReadOnlyList values) + { + var max = 0d; + foreach (var value in values) + { + max = Math.Max(max, value); + } + + return new AllocationStatistics(max); + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj index 7167d07e..a57ee461 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex.csproj @@ -1,16 +1,16 @@ - - - Exe - net10.0 - enable - enable - preview - true - - - - - - - - + + + Exe + net10.0 + enable + enable + preview + true + + + + + + + + diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexLinksetAggregator.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexLinksetAggregator.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexLinksetAggregator.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexLinksetAggregator.cs index d2349087..721cdff1 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexLinksetAggregator.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexLinksetAggregator.cs @@ -1,166 +1,166 @@ -using MongoDB.Bson; - -namespace StellaOps.Bench.LinkNotMerge.Vex; - -internal sealed class VexLinksetAggregator -{ - public VexAggregationResult Correlate(IEnumerable documents) - { - ArgumentNullException.ThrowIfNull(documents); - - var groups = new Dictionary(StringComparer.Ordinal); - var statementsSeen = 0; - - foreach (var document in documents) - { - var tenant = document.GetValue("tenant", "unknown").AsString; - var linksetValue = document.GetValue("linkset", new BsonDocument()); - var linkset = linksetValue.IsBsonDocument ? linksetValue.AsBsonDocument : new BsonDocument(); - var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray; - - var statementsValue = document.GetValue("statements", new BsonArray()); - var statements = statementsValue.IsBsonArray ? statementsValue.AsBsonArray : new BsonArray(); - - foreach (var statementValue in statements) - { - if (!statementValue.IsBsonDocument) - { - continue; - } - - statementsSeen++; - - var statement = statementValue.AsBsonDocument; - var status = statement.GetValue("status", "unknown").AsString; - var justification = statement.GetValue("justification", BsonNull.Value); - var lastUpdated = statement.GetValue("last_updated", BsonNull.Value); - var productValue = statement.GetValue("product", new BsonDocument()); - var product = productValue.IsBsonDocument ? productValue.AsBsonDocument : new BsonDocument(); - var productKey = product.GetValue("purl", "unknown").AsString; - - foreach (var aliasValue in aliases) - { - if (!aliasValue.IsString) - { - continue; - } - - var alias = aliasValue.AsString; - var key = string.Create(alias.Length + tenant.Length + productKey.Length + 2, (tenant, alias, productKey), static (span, data) => - { - var (tenantValue, aliasValue, productValue) = data; - var offset = 0; - tenantValue.AsSpan().CopyTo(span); - offset += tenantValue.Length; - span[offset++] = '|'; - aliasValue.AsSpan().CopyTo(span[offset..]); - offset += aliasValue.Length; - span[offset++] = '|'; - productValue.AsSpan().CopyTo(span[offset..]); - }); - - if (!groups.TryGetValue(key, out var accumulator)) - { - accumulator = new VexAccumulator(tenant, alias, productKey); - groups[key] = accumulator; - } - - accumulator.AddStatement(status, justification, lastUpdated); - } - } - } - - var eventDocuments = new List(groups.Count); - foreach (var accumulator in groups.Values) - { - if (accumulator.ShouldEmitEvent) - { - eventDocuments.Add(accumulator.ToEvent()); - } - } - - return new VexAggregationResult( - LinksetCount: groups.Count, - StatementCount: statementsSeen, - EventCount: eventDocuments.Count, - EventDocuments: eventDocuments); - } - - private sealed class VexAccumulator - { - private readonly Dictionary _statusCounts = new(StringComparer.Ordinal); - private readonly HashSet _justifications = new(StringComparer.Ordinal); - private readonly string _tenant; - private readonly string _alias; - private readonly string _product; - private DateTime? _latest; - - public VexAccumulator(string tenant, string alias, string product) - { - _tenant = tenant; - _alias = alias; - _product = product; - } - - public void AddStatement(string status, BsonValue justification, BsonValue updatedAt) - { - if (!_statusCounts.TryAdd(status, 1)) - { - _statusCounts[status]++; - } - - if (justification.IsString) - { - _justifications.Add(justification.AsString); - } - - if (updatedAt.IsValidDateTime) - { - var value = updatedAt.ToUniversalTime(); - if (!_latest.HasValue || value > _latest) - { - _latest = value; - } - } - } - - public bool ShouldEmitEvent - { - get - { - if (_statusCounts.TryGetValue("affected", out var affected) && affected > 0) - { - return true; - } - - if (_statusCounts.TryGetValue("under_investigation", out var investigating) && investigating > 0) - { - return true; - } - - return false; - } - } - - public BsonDocument ToEvent() - { - var payload = new BsonDocument - { - ["tenant"] = _tenant, - ["alias"] = _alias, - ["product"] = _product, - ["statuses"] = new BsonDocument(_statusCounts.Select(kvp => new BsonElement(kvp.Key, kvp.Value))), - ["justifications"] = new BsonArray(_justifications.Select(justification => justification)), - ["last_updated"] = _latest.HasValue ? _latest.Value : (BsonValue)BsonNull.Value, - }; - - return payload; - } - } -} - -internal sealed record VexAggregationResult( - int LinksetCount, - int StatementCount, - int EventCount, - IReadOnlyList EventDocuments); +using MongoDB.Bson; + +namespace StellaOps.Bench.LinkNotMerge.Vex; + +internal sealed class VexLinksetAggregator +{ + public VexAggregationResult Correlate(IEnumerable documents) + { + ArgumentNullException.ThrowIfNull(documents); + + var groups = new Dictionary(StringComparer.Ordinal); + var statementsSeen = 0; + + foreach (var document in documents) + { + var tenant = document.GetValue("tenant", "unknown").AsString; + var linksetValue = document.GetValue("linkset", new BsonDocument()); + var linkset = linksetValue.IsBsonDocument ? linksetValue.AsBsonDocument : new BsonDocument(); + var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray; + + var statementsValue = document.GetValue("statements", new BsonArray()); + var statements = statementsValue.IsBsonArray ? statementsValue.AsBsonArray : new BsonArray(); + + foreach (var statementValue in statements) + { + if (!statementValue.IsBsonDocument) + { + continue; + } + + statementsSeen++; + + var statement = statementValue.AsBsonDocument; + var status = statement.GetValue("status", "unknown").AsString; + var justification = statement.GetValue("justification", BsonNull.Value); + var lastUpdated = statement.GetValue("last_updated", BsonNull.Value); + var productValue = statement.GetValue("product", new BsonDocument()); + var product = productValue.IsBsonDocument ? productValue.AsBsonDocument : new BsonDocument(); + var productKey = product.GetValue("purl", "unknown").AsString; + + foreach (var aliasValue in aliases) + { + if (!aliasValue.IsString) + { + continue; + } + + var alias = aliasValue.AsString; + var key = string.Create(alias.Length + tenant.Length + productKey.Length + 2, (tenant, alias, productKey), static (span, data) => + { + var (tenantValue, aliasValue, productValue) = data; + var offset = 0; + tenantValue.AsSpan().CopyTo(span); + offset += tenantValue.Length; + span[offset++] = '|'; + aliasValue.AsSpan().CopyTo(span[offset..]); + offset += aliasValue.Length; + span[offset++] = '|'; + productValue.AsSpan().CopyTo(span[offset..]); + }); + + if (!groups.TryGetValue(key, out var accumulator)) + { + accumulator = new VexAccumulator(tenant, alias, productKey); + groups[key] = accumulator; + } + + accumulator.AddStatement(status, justification, lastUpdated); + } + } + } + + var eventDocuments = new List(groups.Count); + foreach (var accumulator in groups.Values) + { + if (accumulator.ShouldEmitEvent) + { + eventDocuments.Add(accumulator.ToEvent()); + } + } + + return new VexAggregationResult( + LinksetCount: groups.Count, + StatementCount: statementsSeen, + EventCount: eventDocuments.Count, + EventDocuments: eventDocuments); + } + + private sealed class VexAccumulator + { + private readonly Dictionary _statusCounts = new(StringComparer.Ordinal); + private readonly HashSet _justifications = new(StringComparer.Ordinal); + private readonly string _tenant; + private readonly string _alias; + private readonly string _product; + private DateTime? _latest; + + public VexAccumulator(string tenant, string alias, string product) + { + _tenant = tenant; + _alias = alias; + _product = product; + } + + public void AddStatement(string status, BsonValue justification, BsonValue updatedAt) + { + if (!_statusCounts.TryAdd(status, 1)) + { + _statusCounts[status]++; + } + + if (justification.IsString) + { + _justifications.Add(justification.AsString); + } + + if (updatedAt.IsValidDateTime) + { + var value = updatedAt.ToUniversalTime(); + if (!_latest.HasValue || value > _latest) + { + _latest = value; + } + } + } + + public bool ShouldEmitEvent + { + get + { + if (_statusCounts.TryGetValue("affected", out var affected) && affected > 0) + { + return true; + } + + if (_statusCounts.TryGetValue("under_investigation", out var investigating) && investigating > 0) + { + return true; + } + + return false; + } + } + + public BsonDocument ToEvent() + { + var payload = new BsonDocument + { + ["tenant"] = _tenant, + ["alias"] = _alias, + ["product"] = _product, + ["statuses"] = new BsonDocument(_statusCounts.Select(kvp => new BsonElement(kvp.Key, kvp.Value))), + ["justifications"] = new BsonArray(_justifications.Select(justification => justification)), + ["last_updated"] = _latest.HasValue ? _latest.Value : (BsonValue)BsonNull.Value, + }; + + return payload; + } + } +} + +internal sealed record VexAggregationResult( + int LinksetCount, + int StatementCount, + int EventCount, + IReadOnlyList EventDocuments); diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexObservationGenerator.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexObservationGenerator.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexObservationGenerator.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexObservationGenerator.cs index 27bd9678..0ae6f5fa 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexObservationGenerator.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexObservationGenerator.cs @@ -1,252 +1,252 @@ -using System.Collections.Immutable; -using System.Security.Cryptography; -using MongoDB.Bson; - -namespace StellaOps.Bench.LinkNotMerge.Vex; - -internal static class VexObservationGenerator -{ - private static readonly ImmutableArray StatusPool = ImmutableArray.Create( - "affected", - "not_affected", - "under_investigation"); - - private static readonly ImmutableArray JustificationPool = ImmutableArray.Create( - "exploitation_mitigated", - "component_not_present", - "vulnerable_code_not_present", - "vulnerable_code_not_in_execute_path"); - - public static IReadOnlyList Generate(VexScenarioConfig config) - { - ArgumentNullException.ThrowIfNull(config); - - var observationCount = config.ResolveObservationCount(); - var aliasGroups = config.ResolveAliasGroups(); - var statementsPerObservation = config.ResolveStatementsPerObservation(); - var tenantCount = config.ResolveTenantCount(); - var productsPerObservation = config.ResolveProductsPerObservation(); - var seed = config.ResolveSeed(); - - var seeds = new VexObservationSeed[observationCount]; - var random = new Random(seed); - var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero); - - for (var index = 0; index < observationCount; index++) - { - var tenantIndex = index % tenantCount; - var tenant = $"tenant-{tenantIndex:D2}"; - var group = index % aliasGroups; - var revision = index / aliasGroups; - var vulnerabilityAlias = $"CVE-2025-{group:D4}"; - var upstreamId = $"VEX-{group:D4}-{revision:D3}"; - var observationId = $"{tenant}:vex:{group:D5}:{revision:D6}"; - - var fetchedAt = baseTime.AddMinutes(revision); - var receivedAt = fetchedAt.AddSeconds(2); - var documentVersion = fetchedAt.AddSeconds(15).ToString("O"); - - var products = CreateProducts(group, revision, productsPerObservation); - var statements = CreateStatements(vulnerabilityAlias, products, statementsPerObservation, random, fetchedAt); - var rawPayload = CreateRawPayload(upstreamId, vulnerabilityAlias, statements); - var contentHash = ComputeContentHash(rawPayload, tenant, group, revision); - - var aliases = ImmutableArray.Create(vulnerabilityAlias, $"GHSA-{group:D4}-{revision % 26 + 'a'}{revision % 26 + 'a'}"); - var references = ImmutableArray.Create( - new VexReference("advisory", $"https://vendor.example/advisories/{vulnerabilityAlias.ToLowerInvariant()}"), - new VexReference("fix", $"https://vendor.example/patch/{vulnerabilityAlias.ToLowerInvariant()}")); - - seeds[index] = new VexObservationSeed( - ObservationId: observationId, - Tenant: tenant, - Vendor: "excititor-bench", - Stream: "simulated", - Api: $"https://bench.stella/vex/{group:D4}/{revision:D3}", - CollectorVersion: "1.0.0-bench", - UpstreamId: upstreamId, - DocumentVersion: documentVersion, - FetchedAt: fetchedAt, - ReceivedAt: receivedAt, - ContentHash: contentHash, - VulnerabilityAlias: vulnerabilityAlias, - Aliases: aliases, - Products: products, - Statements: statements, - References: references, - ContentFormat: "CycloneDX-VEX", - SpecVersion: "1.4", - RawPayload: rawPayload); - } - - return seeds; - } - - private static ImmutableArray CreateProducts(int group, int revision, int count) - { - var builder = ImmutableArray.CreateBuilder(count); - for (var index = 0; index < count; index++) - { - var purl = $"pkg:generic/stella/product-{group:D4}-{index}@{1 + revision % 5}.{index + 1}.{revision % 9}"; - builder.Add(new VexProduct(purl, $"component-{group % 30:D2}", $"namespace-{group % 10:D2}")); - } - - return builder.MoveToImmutable(); - } - - private static ImmutableArray CreateStatements( - string vulnerabilityAlias, - ImmutableArray products, - int statementsPerObservation, - Random random, - DateTimeOffset baseTime) - { - var builder = ImmutableArray.CreateBuilder(statementsPerObservation); - for (var index = 0; index < statementsPerObservation; index++) - { - var statusIndex = random.Next(StatusPool.Length); - var status = StatusPool[statusIndex]; - var justification = JustificationPool[random.Next(JustificationPool.Length)]; - var product = products[index % products.Length]; - var statementId = $"stmt-{vulnerabilityAlias}-{index:D2}"; - - var document = new BsonDocument - { - ["statement_id"] = statementId, - ["vulnerability_alias"] = vulnerabilityAlias, - ["product"] = new BsonDocument - { - ["purl"] = product.Purl, - ["component"] = product.Component, - ["namespace"] = product.Namespace, - }, - ["status"] = status, - ["justification"] = justification, - ["impact"] = status == "affected" ? "high" : "none", - ["last_updated"] = baseTime.AddMinutes(index).UtcDateTime, - }; - - builder.Add(document); - } - - return builder.MoveToImmutable(); - } - - private static BsonDocument CreateRawPayload(string upstreamId, string vulnerabilityAlias, ImmutableArray statements) - { - var doc = new BsonDocument - { - ["documentId"] = upstreamId, - ["title"] = $"Simulated VEX report {upstreamId}", - ["summary"] = $"Synthetic VEX payload for {vulnerabilityAlias}.", - ["statements"] = new BsonArray(statements), - }; - - return doc; - } - - private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision) - { - using var sha256 = SHA256.Create(); - var seed = $"{tenant}|{group}|{revision}"; - var rawBytes = rawPayload.ToBson(); - var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed); - var combined = new byte[rawBytes.Length + seedBytes.Length]; - Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length); - Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length); - var hash = sha256.ComputeHash(combined); - return $"sha256:{Convert.ToHexString(hash)}"; - } -} - -internal sealed record VexObservationSeed( - string ObservationId, - string Tenant, - string Vendor, - string Stream, - string Api, - string CollectorVersion, - string UpstreamId, - string DocumentVersion, - DateTimeOffset FetchedAt, - DateTimeOffset ReceivedAt, - string ContentHash, - string VulnerabilityAlias, - ImmutableArray Aliases, - ImmutableArray Products, - ImmutableArray Statements, - ImmutableArray References, - string ContentFormat, - string SpecVersion, - BsonDocument RawPayload) -{ - public BsonDocument ToBsonDocument() - { - var aliases = new BsonArray(Aliases.Select(alias => alias)); - var statements = new BsonArray(Statements); - var productsArray = new BsonArray(Products.Select(product => new BsonDocument - { - ["purl"] = product.Purl, - ["component"] = product.Component, - ["namespace"] = product.Namespace, - })); - var references = new BsonArray(References.Select(reference => new BsonDocument - { - ["type"] = reference.Type, - ["url"] = reference.Url, - })); - - var document = new BsonDocument - { - ["_id"] = ObservationId, - ["tenant"] = Tenant, - ["source"] = new BsonDocument - { - ["vendor"] = Vendor, - ["stream"] = Stream, - ["api"] = Api, - ["collector_version"] = CollectorVersion, - }, - ["upstream"] = new BsonDocument - { - ["upstream_id"] = UpstreamId, - ["document_version"] = DocumentVersion, - ["fetched_at"] = FetchedAt.UtcDateTime, - ["received_at"] = ReceivedAt.UtcDateTime, - ["content_hash"] = ContentHash, - ["signature"] = new BsonDocument - { - ["present"] = false, - ["format"] = BsonNull.Value, - ["key_id"] = BsonNull.Value, - ["signature"] = BsonNull.Value, - }, - }, - ["content"] = new BsonDocument - { - ["format"] = ContentFormat, - ["spec_version"] = SpecVersion, - ["raw"] = RawPayload, - }, - ["identifiers"] = new BsonDocument - { - ["aliases"] = aliases, - ["primary"] = VulnerabilityAlias, - }, - ["statements"] = statements, - ["linkset"] = new BsonDocument - { - ["aliases"] = aliases, - ["products"] = productsArray, - ["references"] = references, - ["reconciled_from"] = new BsonArray { "/statements" }, - }, - ["supersedes"] = BsonNull.Value, - }; - - return document; - } -} - -internal sealed record VexProduct(string Purl, string Component, string Namespace); - -internal sealed record VexReference(string Type, string Url); +using System.Collections.Immutable; +using System.Security.Cryptography; +using MongoDB.Bson; + +namespace StellaOps.Bench.LinkNotMerge.Vex; + +internal static class VexObservationGenerator +{ + private static readonly ImmutableArray StatusPool = ImmutableArray.Create( + "affected", + "not_affected", + "under_investigation"); + + private static readonly ImmutableArray JustificationPool = ImmutableArray.Create( + "exploitation_mitigated", + "component_not_present", + "vulnerable_code_not_present", + "vulnerable_code_not_in_execute_path"); + + public static IReadOnlyList Generate(VexScenarioConfig config) + { + ArgumentNullException.ThrowIfNull(config); + + var observationCount = config.ResolveObservationCount(); + var aliasGroups = config.ResolveAliasGroups(); + var statementsPerObservation = config.ResolveStatementsPerObservation(); + var tenantCount = config.ResolveTenantCount(); + var productsPerObservation = config.ResolveProductsPerObservation(); + var seed = config.ResolveSeed(); + + var seeds = new VexObservationSeed[observationCount]; + var random = new Random(seed); + var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero); + + for (var index = 0; index < observationCount; index++) + { + var tenantIndex = index % tenantCount; + var tenant = $"tenant-{tenantIndex:D2}"; + var group = index % aliasGroups; + var revision = index / aliasGroups; + var vulnerabilityAlias = $"CVE-2025-{group:D4}"; + var upstreamId = $"VEX-{group:D4}-{revision:D3}"; + var observationId = $"{tenant}:vex:{group:D5}:{revision:D6}"; + + var fetchedAt = baseTime.AddMinutes(revision); + var receivedAt = fetchedAt.AddSeconds(2); + var documentVersion = fetchedAt.AddSeconds(15).ToString("O"); + + var products = CreateProducts(group, revision, productsPerObservation); + var statements = CreateStatements(vulnerabilityAlias, products, statementsPerObservation, random, fetchedAt); + var rawPayload = CreateRawPayload(upstreamId, vulnerabilityAlias, statements); + var contentHash = ComputeContentHash(rawPayload, tenant, group, revision); + + var aliases = ImmutableArray.Create(vulnerabilityAlias, $"GHSA-{group:D4}-{revision % 26 + 'a'}{revision % 26 + 'a'}"); + var references = ImmutableArray.Create( + new VexReference("advisory", $"https://vendor.example/advisories/{vulnerabilityAlias.ToLowerInvariant()}"), + new VexReference("fix", $"https://vendor.example/patch/{vulnerabilityAlias.ToLowerInvariant()}")); + + seeds[index] = new VexObservationSeed( + ObservationId: observationId, + Tenant: tenant, + Vendor: "excititor-bench", + Stream: "simulated", + Api: $"https://bench.stella/vex/{group:D4}/{revision:D3}", + CollectorVersion: "1.0.0-bench", + UpstreamId: upstreamId, + DocumentVersion: documentVersion, + FetchedAt: fetchedAt, + ReceivedAt: receivedAt, + ContentHash: contentHash, + VulnerabilityAlias: vulnerabilityAlias, + Aliases: aliases, + Products: products, + Statements: statements, + References: references, + ContentFormat: "CycloneDX-VEX", + SpecVersion: "1.4", + RawPayload: rawPayload); + } + + return seeds; + } + + private static ImmutableArray CreateProducts(int group, int revision, int count) + { + var builder = ImmutableArray.CreateBuilder(count); + for (var index = 0; index < count; index++) + { + var purl = $"pkg:generic/stella/product-{group:D4}-{index}@{1 + revision % 5}.{index + 1}.{revision % 9}"; + builder.Add(new VexProduct(purl, $"component-{group % 30:D2}", $"namespace-{group % 10:D2}")); + } + + return builder.MoveToImmutable(); + } + + private static ImmutableArray CreateStatements( + string vulnerabilityAlias, + ImmutableArray products, + int statementsPerObservation, + Random random, + DateTimeOffset baseTime) + { + var builder = ImmutableArray.CreateBuilder(statementsPerObservation); + for (var index = 0; index < statementsPerObservation; index++) + { + var statusIndex = random.Next(StatusPool.Length); + var status = StatusPool[statusIndex]; + var justification = JustificationPool[random.Next(JustificationPool.Length)]; + var product = products[index % products.Length]; + var statementId = $"stmt-{vulnerabilityAlias}-{index:D2}"; + + var document = new BsonDocument + { + ["statement_id"] = statementId, + ["vulnerability_alias"] = vulnerabilityAlias, + ["product"] = new BsonDocument + { + ["purl"] = product.Purl, + ["component"] = product.Component, + ["namespace"] = product.Namespace, + }, + ["status"] = status, + ["justification"] = justification, + ["impact"] = status == "affected" ? "high" : "none", + ["last_updated"] = baseTime.AddMinutes(index).UtcDateTime, + }; + + builder.Add(document); + } + + return builder.MoveToImmutable(); + } + + private static BsonDocument CreateRawPayload(string upstreamId, string vulnerabilityAlias, ImmutableArray statements) + { + var doc = new BsonDocument + { + ["documentId"] = upstreamId, + ["title"] = $"Simulated VEX report {upstreamId}", + ["summary"] = $"Synthetic VEX payload for {vulnerabilityAlias}.", + ["statements"] = new BsonArray(statements), + }; + + return doc; + } + + private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision) + { + using var sha256 = SHA256.Create(); + var seed = $"{tenant}|{group}|{revision}"; + var rawBytes = rawPayload.ToBson(); + var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed); + var combined = new byte[rawBytes.Length + seedBytes.Length]; + Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length); + Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length); + var hash = sha256.ComputeHash(combined); + return $"sha256:{Convert.ToHexString(hash)}"; + } +} + +internal sealed record VexObservationSeed( + string ObservationId, + string Tenant, + string Vendor, + string Stream, + string Api, + string CollectorVersion, + string UpstreamId, + string DocumentVersion, + DateTimeOffset FetchedAt, + DateTimeOffset ReceivedAt, + string ContentHash, + string VulnerabilityAlias, + ImmutableArray Aliases, + ImmutableArray Products, + ImmutableArray Statements, + ImmutableArray References, + string ContentFormat, + string SpecVersion, + BsonDocument RawPayload) +{ + public BsonDocument ToBsonDocument() + { + var aliases = new BsonArray(Aliases.Select(alias => alias)); + var statements = new BsonArray(Statements); + var productsArray = new BsonArray(Products.Select(product => new BsonDocument + { + ["purl"] = product.Purl, + ["component"] = product.Component, + ["namespace"] = product.Namespace, + })); + var references = new BsonArray(References.Select(reference => new BsonDocument + { + ["type"] = reference.Type, + ["url"] = reference.Url, + })); + + var document = new BsonDocument + { + ["_id"] = ObservationId, + ["tenant"] = Tenant, + ["source"] = new BsonDocument + { + ["vendor"] = Vendor, + ["stream"] = Stream, + ["api"] = Api, + ["collector_version"] = CollectorVersion, + }, + ["upstream"] = new BsonDocument + { + ["upstream_id"] = UpstreamId, + ["document_version"] = DocumentVersion, + ["fetched_at"] = FetchedAt.UtcDateTime, + ["received_at"] = ReceivedAt.UtcDateTime, + ["content_hash"] = ContentHash, + ["signature"] = new BsonDocument + { + ["present"] = false, + ["format"] = BsonNull.Value, + ["key_id"] = BsonNull.Value, + ["signature"] = BsonNull.Value, + }, + }, + ["content"] = new BsonDocument + { + ["format"] = ContentFormat, + ["spec_version"] = SpecVersion, + ["raw"] = RawPayload, + }, + ["identifiers"] = new BsonDocument + { + ["aliases"] = aliases, + ["primary"] = VulnerabilityAlias, + }, + ["statements"] = statements, + ["linkset"] = new BsonDocument + { + ["aliases"] = aliases, + ["products"] = productsArray, + ["references"] = references, + ["reconciled_from"] = new BsonArray { "/statements" }, + }, + ["supersedes"] = BsonNull.Value, + }; + + return document; + } +} + +internal sealed record VexProduct(string Purl, string Component, string Namespace); + +internal sealed record VexReference(string Type, string Url); diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioConfig.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioConfig.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioConfig.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioConfig.cs index 9feeb997..210f1fd6 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioConfig.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioConfig.cs @@ -1,183 +1,183 @@ -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Bench.LinkNotMerge.Vex; - -internal sealed record VexBenchmarkConfig( - double? ThresholdMs, - double? MinThroughputPerSecond, - double? MinEventThroughputPerSecond, - double? MaxAllocatedMb, - int? Iterations, - IReadOnlyList Scenarios) -{ - public static async Task LoadAsync(string path) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - - var resolved = Path.GetFullPath(path); - if (!File.Exists(resolved)) - { - throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved); - } - - await using var stream = File.OpenRead(resolved); - var model = await JsonSerializer.DeserializeAsync( - stream, - new JsonSerializerOptions(JsonSerializerDefaults.Web) - { - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - AllowTrailingCommas = true, - }).ConfigureAwait(false); - - if (model is null) - { - throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed."); - } - - if (model.Scenarios.Count == 0) - { - throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios."); - } - - foreach (var scenario in model.Scenarios) - { - scenario.Validate(); - } - - return new VexBenchmarkConfig( - model.ThresholdMs, - model.MinThroughputPerSecond, - model.MinEventThroughputPerSecond, - model.MaxAllocatedMb, - model.Iterations, - model.Scenarios); - } - - private sealed class VexBenchmarkConfigModel - { - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - [JsonPropertyName("minThroughputPerSecond")] - public double? MinThroughputPerSecond { get; init; } - - [JsonPropertyName("minEventThroughputPerSecond")] - public double? MinEventThroughputPerSecond { get; init; } - - [JsonPropertyName("maxAllocatedMb")] - public double? MaxAllocatedMb { get; init; } - - [JsonPropertyName("iterations")] - public int? Iterations { get; init; } - - [JsonPropertyName("scenarios")] - public List Scenarios { get; init; } = new(); - } -} - -internal sealed class VexScenarioConfig -{ - private const int DefaultObservationCount = 4_000; - private const int DefaultAliasGroups = 400; - private const int DefaultStatementsPerObservation = 6; - private const int DefaultProductsPerObservation = 3; - private const int DefaultTenants = 3; - private const int DefaultBatchSize = 250; - private const int DefaultSeed = 520_025; - - [JsonPropertyName("id")] - public string? Id { get; init; } - - [JsonPropertyName("label")] - public string? Label { get; init; } - - [JsonPropertyName("observations")] - public int? Observations { get; init; } - - [JsonPropertyName("aliasGroups")] - public int? AliasGroups { get; init; } - - [JsonPropertyName("statementsPerObservation")] - public int? StatementsPerObservation { get; init; } - - [JsonPropertyName("productsPerObservation")] - public int? ProductsPerObservation { get; init; } - - [JsonPropertyName("tenants")] - public int? Tenants { get; init; } - - [JsonPropertyName("batchSize")] - public int? BatchSize { get; init; } - - [JsonPropertyName("seed")] - public int? Seed { get; init; } - - [JsonPropertyName("iterations")] - public int? Iterations { get; init; } - - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - [JsonPropertyName("minThroughputPerSecond")] - public double? MinThroughputPerSecond { get; init; } - - [JsonPropertyName("minEventThroughputPerSecond")] - public double? MinEventThroughputPerSecond { get; init; } - - [JsonPropertyName("maxAllocatedMb")] - public double? MaxAllocatedMb { get; init; } - - public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "vex" : Id!.Trim(); - - public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim(); - - public int ResolveObservationCount() => Observations is > 0 ? Observations.Value : DefaultObservationCount; - - public int ResolveAliasGroups() => AliasGroups is > 0 ? AliasGroups.Value : DefaultAliasGroups; - - public int ResolveStatementsPerObservation() => StatementsPerObservation is > 0 ? StatementsPerObservation.Value : DefaultStatementsPerObservation; - - public int ResolveProductsPerObservation() => ProductsPerObservation is > 0 ? ProductsPerObservation.Value : DefaultProductsPerObservation; - - public int ResolveTenantCount() => Tenants is > 0 ? Tenants.Value : DefaultTenants; - - public int ResolveBatchSize() => BatchSize is > 0 ? BatchSize.Value : DefaultBatchSize; - - public int ResolveSeed() => Seed is > 0 ? Seed.Value : DefaultSeed; - - public int ResolveIterations(int? defaultIterations) - { - var iterations = Iterations ?? defaultIterations ?? 3; - if (iterations <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0."); - } - - return iterations; - } - - public void Validate() - { - if (ResolveObservationCount() <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0."); - } - - if (ResolveAliasGroups() <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0."); - } - - if (ResolveStatementsPerObservation() <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires statementsPerObservation > 0."); - } - - if (ResolveProductsPerObservation() <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires productsPerObservation > 0."); - } - } -} +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Bench.LinkNotMerge.Vex; + +internal sealed record VexBenchmarkConfig( + double? ThresholdMs, + double? MinThroughputPerSecond, + double? MinEventThroughputPerSecond, + double? MaxAllocatedMb, + int? Iterations, + IReadOnlyList Scenarios) +{ + public static async Task LoadAsync(string path) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + + var resolved = Path.GetFullPath(path); + if (!File.Exists(resolved)) + { + throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved); + } + + await using var stream = File.OpenRead(resolved); + var model = await JsonSerializer.DeserializeAsync( + stream, + new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true, + }).ConfigureAwait(false); + + if (model is null) + { + throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed."); + } + + if (model.Scenarios.Count == 0) + { + throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios."); + } + + foreach (var scenario in model.Scenarios) + { + scenario.Validate(); + } + + return new VexBenchmarkConfig( + model.ThresholdMs, + model.MinThroughputPerSecond, + model.MinEventThroughputPerSecond, + model.MaxAllocatedMb, + model.Iterations, + model.Scenarios); + } + + private sealed class VexBenchmarkConfigModel + { + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + [JsonPropertyName("minThroughputPerSecond")] + public double? MinThroughputPerSecond { get; init; } + + [JsonPropertyName("minEventThroughputPerSecond")] + public double? MinEventThroughputPerSecond { get; init; } + + [JsonPropertyName("maxAllocatedMb")] + public double? MaxAllocatedMb { get; init; } + + [JsonPropertyName("iterations")] + public int? Iterations { get; init; } + + [JsonPropertyName("scenarios")] + public List Scenarios { get; init; } = new(); + } +} + +internal sealed class VexScenarioConfig +{ + private const int DefaultObservationCount = 4_000; + private const int DefaultAliasGroups = 400; + private const int DefaultStatementsPerObservation = 6; + private const int DefaultProductsPerObservation = 3; + private const int DefaultTenants = 3; + private const int DefaultBatchSize = 250; + private const int DefaultSeed = 520_025; + + [JsonPropertyName("id")] + public string? Id { get; init; } + + [JsonPropertyName("label")] + public string? Label { get; init; } + + [JsonPropertyName("observations")] + public int? Observations { get; init; } + + [JsonPropertyName("aliasGroups")] + public int? AliasGroups { get; init; } + + [JsonPropertyName("statementsPerObservation")] + public int? StatementsPerObservation { get; init; } + + [JsonPropertyName("productsPerObservation")] + public int? ProductsPerObservation { get; init; } + + [JsonPropertyName("tenants")] + public int? Tenants { get; init; } + + [JsonPropertyName("batchSize")] + public int? BatchSize { get; init; } + + [JsonPropertyName("seed")] + public int? Seed { get; init; } + + [JsonPropertyName("iterations")] + public int? Iterations { get; init; } + + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + [JsonPropertyName("minThroughputPerSecond")] + public double? MinThroughputPerSecond { get; init; } + + [JsonPropertyName("minEventThroughputPerSecond")] + public double? MinEventThroughputPerSecond { get; init; } + + [JsonPropertyName("maxAllocatedMb")] + public double? MaxAllocatedMb { get; init; } + + public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "vex" : Id!.Trim(); + + public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim(); + + public int ResolveObservationCount() => Observations is > 0 ? Observations.Value : DefaultObservationCount; + + public int ResolveAliasGroups() => AliasGroups is > 0 ? AliasGroups.Value : DefaultAliasGroups; + + public int ResolveStatementsPerObservation() => StatementsPerObservation is > 0 ? StatementsPerObservation.Value : DefaultStatementsPerObservation; + + public int ResolveProductsPerObservation() => ProductsPerObservation is > 0 ? ProductsPerObservation.Value : DefaultProductsPerObservation; + + public int ResolveTenantCount() => Tenants is > 0 ? Tenants.Value : DefaultTenants; + + public int ResolveBatchSize() => BatchSize is > 0 ? BatchSize.Value : DefaultBatchSize; + + public int ResolveSeed() => Seed is > 0 ? Seed.Value : DefaultSeed; + + public int ResolveIterations(int? defaultIterations) + { + var iterations = Iterations ?? defaultIterations ?? 3; + if (iterations <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0."); + } + + return iterations; + } + + public void Validate() + { + if (ResolveObservationCount() <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0."); + } + + if (ResolveAliasGroups() <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0."); + } + + if (ResolveStatementsPerObservation() <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires statementsPerObservation > 0."); + } + + if (ResolveProductsPerObservation() <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires productsPerObservation > 0."); + } + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioExecutionResult.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioExecutionResult.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioExecutionResult.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioExecutionResult.cs index f0cd962b..b1691264 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioExecutionResult.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioExecutionResult.cs @@ -1,14 +1,14 @@ -namespace StellaOps.Bench.LinkNotMerge.Vex; - -internal sealed record VexScenarioExecutionResult( - IReadOnlyList TotalDurationsMs, - IReadOnlyList InsertDurationsMs, - IReadOnlyList CorrelationDurationsMs, - IReadOnlyList AllocatedMb, - IReadOnlyList ObservationThroughputsPerSecond, - IReadOnlyList EventThroughputsPerSecond, - int ObservationCount, - int AliasGroups, - int StatementCount, - int EventCount, - VexAggregationResult AggregationResult); +namespace StellaOps.Bench.LinkNotMerge.Vex; + +internal sealed record VexScenarioExecutionResult( + IReadOnlyList TotalDurationsMs, + IReadOnlyList InsertDurationsMs, + IReadOnlyList CorrelationDurationsMs, + IReadOnlyList AllocatedMb, + IReadOnlyList ObservationThroughputsPerSecond, + IReadOnlyList EventThroughputsPerSecond, + int ObservationCount, + int AliasGroups, + int StatementCount, + int EventCount, + VexAggregationResult AggregationResult); diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioResult.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioResult.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioResult.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioResult.cs index f69e3143..06e3e60f 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioResult.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioResult.cs @@ -1,43 +1,43 @@ -using System.Globalization; - -namespace StellaOps.Bench.LinkNotMerge.Vex; - -internal sealed record VexScenarioResult( - string Id, - string Label, - int Iterations, - int ObservationCount, - int AliasGroups, - int StatementCount, - int EventCount, - DurationStatistics TotalStatistics, - DurationStatistics InsertStatistics, - DurationStatistics CorrelationStatistics, - ThroughputStatistics ObservationThroughputStatistics, - ThroughputStatistics EventThroughputStatistics, - AllocationStatistics AllocationStatistics, - double? ThresholdMs, - double? MinObservationThroughputPerSecond, - double? MinEventThroughputPerSecond, - double? MaxAllocatedThresholdMb) -{ - public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; - - public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12); - - public string StatementColumn => StatementCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(10); - - public string EventColumn => EventCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8); - - public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - - public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - - public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - - public string ObservationThroughputColumn => (ObservationThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); - - public string EventThroughputColumn => (EventThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); - - public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); -} +using System.Globalization; + +namespace StellaOps.Bench.LinkNotMerge.Vex; + +internal sealed record VexScenarioResult( + string Id, + string Label, + int Iterations, + int ObservationCount, + int AliasGroups, + int StatementCount, + int EventCount, + DurationStatistics TotalStatistics, + DurationStatistics InsertStatistics, + DurationStatistics CorrelationStatistics, + ThroughputStatistics ObservationThroughputStatistics, + ThroughputStatistics EventThroughputStatistics, + AllocationStatistics AllocationStatistics, + double? ThresholdMs, + double? MinObservationThroughputPerSecond, + double? MinEventThroughputPerSecond, + double? MaxAllocatedThresholdMb) +{ + public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; + + public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12); + + public string StatementColumn => StatementCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(10); + + public string EventColumn => EventCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8); + + public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + + public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + + public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + + public string ObservationThroughputColumn => (ObservationThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); + + public string EventThroughputColumn => (EventThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); + + public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioRunner.cs b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioRunner.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioRunner.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioRunner.cs index 50bf21f2..fd88881e 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioRunner.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/StellaOps.Bench.LinkNotMerge.Vex/VexScenarioRunner.cs @@ -1,138 +1,138 @@ -using System.Diagnostics; -using EphemeralMongo; -using MongoDB.Bson; -using MongoDB.Driver; - -namespace StellaOps.Bench.LinkNotMerge.Vex; - -internal sealed class VexScenarioRunner -{ - private readonly VexScenarioConfig _config; - private readonly IReadOnlyList _seeds; - - public VexScenarioRunner(VexScenarioConfig config) - { - _config = config ?? throw new ArgumentNullException(nameof(config)); - _seeds = VexObservationGenerator.Generate(config); - } - - public VexScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken) - { - if (iterations <= 0) - { - throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); - } - - var totalDurations = new double[iterations]; - var insertDurations = new double[iterations]; - var correlationDurations = new double[iterations]; - var allocated = new double[iterations]; - var observationThroughputs = new double[iterations]; - var eventThroughputs = new double[iterations]; - VexAggregationResult lastAggregation = new(0, 0, 0, Array.Empty()); - - for (var iteration = 0; iteration < iterations; iteration++) - { - cancellationToken.ThrowIfCancellationRequested(); - - using var runner = MongoRunner.Run(new MongoRunnerOptions - { - UseSingleNodeReplicaSet = false, - }); - - var client = new MongoClient(runner.ConnectionString); - var database = client.GetDatabase("linknotmerge_vex_bench"); - var collection = database.GetCollection("vex_observations"); - - CreateIndexes(collection, cancellationToken); - - var beforeAllocated = GC.GetTotalAllocatedBytes(); - - var insertStopwatch = Stopwatch.StartNew(); - InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken); - insertStopwatch.Stop(); - - var correlationStopwatch = Stopwatch.StartNew(); - var documents = collection - .Find(FilterDefinition.Empty) - .Project(Builders.Projection - .Include("tenant") - .Include("statements") - .Include("linkset")) - .ToList(cancellationToken); - - var aggregator = new VexLinksetAggregator(); - lastAggregation = aggregator.Correlate(documents); - correlationStopwatch.Stop(); - - var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed; - var afterAllocated = GC.GetTotalAllocatedBytes(); - - totalDurations[iteration] = totalElapsed.TotalMilliseconds; - insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds; - correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds; - allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d); - - var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d); - observationThroughputs[iteration] = _seeds.Count / totalSeconds; - - var eventSeconds = Math.Max(correlationStopwatch.Elapsed.TotalSeconds, 0.0001d); - var eventCount = Math.Max(lastAggregation.EventCount, 1); - eventThroughputs[iteration] = eventCount / eventSeconds; - } - - return new VexScenarioExecutionResult( - totalDurations, - insertDurations, - correlationDurations, - allocated, - observationThroughputs, - eventThroughputs, - ObservationCount: _seeds.Count, - AliasGroups: _config.ResolveAliasGroups(), - StatementCount: lastAggregation.StatementCount, - EventCount: lastAggregation.EventCount, - AggregationResult: lastAggregation); - } - - private static void InsertObservations( - IMongoCollection collection, - IReadOnlyList seeds, - int batchSize, - CancellationToken cancellationToken) - { - for (var offset = 0; offset < seeds.Count; offset += batchSize) - { - cancellationToken.ThrowIfCancellationRequested(); - - var remaining = Math.Min(batchSize, seeds.Count - offset); - var batch = new List(remaining); - for (var index = 0; index < remaining; index++) - { - batch.Add(seeds[offset + index].ToBsonDocument()); - } - - collection.InsertMany(batch, new InsertManyOptions - { - IsOrdered = false, - BypassDocumentValidation = true, - }, cancellationToken); - } - } - - private static void CreateIndexes(IMongoCollection collection, CancellationToken cancellationToken) - { - var indexKeys = Builders.IndexKeys - .Ascending("tenant") - .Ascending("linkset.aliases"); - - try - { - collection.Indexes.CreateOne(new CreateIndexModel(indexKeys), cancellationToken: cancellationToken); - } - catch - { - // non-fatal - } - } -} +using System.Diagnostics; +using EphemeralMongo; +using MongoDB.Bson; +using MongoDB.Driver; + +namespace StellaOps.Bench.LinkNotMerge.Vex; + +internal sealed class VexScenarioRunner +{ + private readonly VexScenarioConfig _config; + private readonly IReadOnlyList _seeds; + + public VexScenarioRunner(VexScenarioConfig config) + { + _config = config ?? throw new ArgumentNullException(nameof(config)); + _seeds = VexObservationGenerator.Generate(config); + } + + public VexScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken) + { + if (iterations <= 0) + { + throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); + } + + var totalDurations = new double[iterations]; + var insertDurations = new double[iterations]; + var correlationDurations = new double[iterations]; + var allocated = new double[iterations]; + var observationThroughputs = new double[iterations]; + var eventThroughputs = new double[iterations]; + VexAggregationResult lastAggregation = new(0, 0, 0, Array.Empty()); + + for (var iteration = 0; iteration < iterations; iteration++) + { + cancellationToken.ThrowIfCancellationRequested(); + + using var runner = MongoRunner.Run(new MongoRunnerOptions + { + UseSingleNodeReplicaSet = false, + }); + + var client = new MongoClient(runner.ConnectionString); + var database = client.GetDatabase("linknotmerge_vex_bench"); + var collection = database.GetCollection("vex_observations"); + + CreateIndexes(collection, cancellationToken); + + var beforeAllocated = GC.GetTotalAllocatedBytes(); + + var insertStopwatch = Stopwatch.StartNew(); + InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken); + insertStopwatch.Stop(); + + var correlationStopwatch = Stopwatch.StartNew(); + var documents = collection + .Find(FilterDefinition.Empty) + .Project(Builders.Projection + .Include("tenant") + .Include("statements") + .Include("linkset")) + .ToList(cancellationToken); + + var aggregator = new VexLinksetAggregator(); + lastAggregation = aggregator.Correlate(documents); + correlationStopwatch.Stop(); + + var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed; + var afterAllocated = GC.GetTotalAllocatedBytes(); + + totalDurations[iteration] = totalElapsed.TotalMilliseconds; + insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds; + correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds; + allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d); + + var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d); + observationThroughputs[iteration] = _seeds.Count / totalSeconds; + + var eventSeconds = Math.Max(correlationStopwatch.Elapsed.TotalSeconds, 0.0001d); + var eventCount = Math.Max(lastAggregation.EventCount, 1); + eventThroughputs[iteration] = eventCount / eventSeconds; + } + + return new VexScenarioExecutionResult( + totalDurations, + insertDurations, + correlationDurations, + allocated, + observationThroughputs, + eventThroughputs, + ObservationCount: _seeds.Count, + AliasGroups: _config.ResolveAliasGroups(), + StatementCount: lastAggregation.StatementCount, + EventCount: lastAggregation.EventCount, + AggregationResult: lastAggregation); + } + + private static void InsertObservations( + IMongoCollection collection, + IReadOnlyList seeds, + int batchSize, + CancellationToken cancellationToken) + { + for (var offset = 0; offset < seeds.Count; offset += batchSize) + { + cancellationToken.ThrowIfCancellationRequested(); + + var remaining = Math.Min(batchSize, seeds.Count - offset); + var batch = new List(remaining); + for (var index = 0; index < remaining; index++) + { + batch.Add(seeds[offset + index].ToBsonDocument()); + } + + collection.InsertMany(batch, new InsertManyOptions + { + IsOrdered = false, + BypassDocumentValidation = true, + }, cancellationToken); + } + } + + private static void CreateIndexes(IMongoCollection collection, CancellationToken cancellationToken) + { + var indexKeys = Builders.IndexKeys + .Ascending("tenant") + .Ascending("linkset.aliases"); + + try + { + collection.Indexes.CreateOne(new CreateIndexModel(indexKeys), cancellationToken: cancellationToken); + } + catch + { + // non-fatal + } + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/baseline.csv b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/baseline.csv similarity index 99% rename from src/StellaOps.Bench/LinkNotMerge.Vex/baseline.csv rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/baseline.csv index 34578590..b78eeb39 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/baseline.csv +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/baseline.csv @@ -1,4 +1,4 @@ -scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb -vex_ingest_baseline,5,4000,24000,21326,842.8191,1319.3038,1432.7675,346.7277,496.0915,5349.8940,2791.7998,48942.4901,24653.0556,138.6365 -vex_ingest_medium,5,8000,64000,56720,1525.9929,1706.8900,1748.9056,533.3378,992.6552,5274.5883,4574.2892,57654.9190,48531.7353,326.8638 -vex_ingest_high,5,12000,120000,106910,2988.5094,3422.1728,3438.9364,903.3927,2085.1167,4066.2300,3489.4510,52456.9493,42358.0556,583.9903 +scenario,iterations,observations,statements,events,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_observation_throughput_per_sec,min_observation_throughput_per_sec,mean_event_throughput_per_sec,min_event_throughput_per_sec,max_allocated_mb +vex_ingest_baseline,5,4000,24000,21326,842.8191,1319.3038,1432.7675,346.7277,496.0915,5349.8940,2791.7998,48942.4901,24653.0556,138.6365 +vex_ingest_medium,5,8000,64000,56720,1525.9929,1706.8900,1748.9056,533.3378,992.6552,5274.5883,4574.2892,57654.9190,48531.7353,326.8638 +vex_ingest_high,5,12000,120000,106910,2988.5094,3422.1728,3438.9364,903.3927,2085.1167,4066.2300,3489.4510,52456.9493,42358.0556,583.9903 diff --git a/src/StellaOps.Bench/LinkNotMerge.Vex/config.json b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/config.json similarity index 96% rename from src/StellaOps.Bench/LinkNotMerge.Vex/config.json rename to src/Bench/StellaOps.Bench/LinkNotMerge.Vex/config.json index e20cbd7f..2d2fde68 100644 --- a/src/StellaOps.Bench/LinkNotMerge.Vex/config.json +++ b/src/Bench/StellaOps.Bench/LinkNotMerge.Vex/config.json @@ -1,54 +1,54 @@ -{ - "thresholdMs": 4200, - "minThroughputPerSecond": 1800, - "minEventThroughputPerSecond": 2000, - "maxAllocatedMb": 800, - "iterations": 5, - "scenarios": [ - { - "id": "vex_ingest_baseline", - "label": "4k observations, 400 aliases", - "observations": 4000, - "aliasGroups": 400, - "statementsPerObservation": 6, - "productsPerObservation": 3, - "tenants": 3, - "batchSize": 200, - "seed": 420020, - "thresholdMs": 2300, - "minThroughputPerSecond": 1800, - "minEventThroughputPerSecond": 2000, - "maxAllocatedMb": 220 - }, - { - "id": "vex_ingest_medium", - "label": "8k observations, 700 aliases", - "observations": 8000, - "aliasGroups": 700, - "statementsPerObservation": 8, - "productsPerObservation": 4, - "tenants": 5, - "batchSize": 300, - "seed": 520020, - "thresholdMs": 3200, - "minThroughputPerSecond": 2200, - "minEventThroughputPerSecond": 2500, - "maxAllocatedMb": 400 - }, - { - "id": "vex_ingest_high", - "label": "12k observations, 1100 aliases", - "observations": 12000, - "aliasGroups": 1100, - "statementsPerObservation": 10, - "productsPerObservation": 5, - "tenants": 7, - "batchSize": 400, - "seed": 620020, - "thresholdMs": 4200, - "minThroughputPerSecond": 2200, - "minEventThroughputPerSecond": 2500, - "maxAllocatedMb": 700 - } - ] -} +{ + "thresholdMs": 4200, + "minThroughputPerSecond": 1800, + "minEventThroughputPerSecond": 2000, + "maxAllocatedMb": 800, + "iterations": 5, + "scenarios": [ + { + "id": "vex_ingest_baseline", + "label": "4k observations, 400 aliases", + "observations": 4000, + "aliasGroups": 400, + "statementsPerObservation": 6, + "productsPerObservation": 3, + "tenants": 3, + "batchSize": 200, + "seed": 420020, + "thresholdMs": 2300, + "minThroughputPerSecond": 1800, + "minEventThroughputPerSecond": 2000, + "maxAllocatedMb": 220 + }, + { + "id": "vex_ingest_medium", + "label": "8k observations, 700 aliases", + "observations": 8000, + "aliasGroups": 700, + "statementsPerObservation": 8, + "productsPerObservation": 4, + "tenants": 5, + "batchSize": 300, + "seed": 520020, + "thresholdMs": 3200, + "minThroughputPerSecond": 2200, + "minEventThroughputPerSecond": 2500, + "maxAllocatedMb": 400 + }, + { + "id": "vex_ingest_high", + "label": "12k observations, 1100 aliases", + "observations": 12000, + "aliasGroups": 1100, + "statementsPerObservation": 10, + "productsPerObservation": 5, + "tenants": 7, + "batchSize": 400, + "seed": 620020, + "thresholdMs": 4200, + "minThroughputPerSecond": 2200, + "minEventThroughputPerSecond": 2500, + "maxAllocatedMb": 700 + } + ] +} diff --git a/src/StellaOps.Bench/LinkNotMerge/README.md b/src/Bench/StellaOps.Bench/LinkNotMerge/README.md similarity index 88% rename from src/StellaOps.Bench/LinkNotMerge/README.md rename to src/Bench/StellaOps.Bench/LinkNotMerge/README.md index e2e5dfc5..e6696fc2 100644 --- a/src/StellaOps.Bench/LinkNotMerge/README.md +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/README.md @@ -16,7 +16,7 @@ Synthetic workload that measures advisory observation ingestion and linkset corr ```bash dotnet run \ - --project src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj \ + --project src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj \ -- \ --csv out/linknotmerge-bench.csv \ --json out/linknotmerge-bench.json \ diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BaselineLoaderTests.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BaselineLoaderTests.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BaselineLoaderTests.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BaselineLoaderTests.cs index eec70e2b..af74be3f 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BaselineLoaderTests.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BaselineLoaderTests.cs @@ -1,38 +1,38 @@ -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Bench.LinkNotMerge.Baseline; -using Xunit; - -namespace StellaOps.Bench.LinkNotMerge.Tests; - -public sealed class BaselineLoaderTests -{ - [Fact] - public async Task LoadAsync_ReadsEntries() - { - var path = Path.GetTempFileName(); - try - { - await File.WriteAllTextAsync( - path, - "scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb\n" + - "lnm_ingest_baseline,5,5000,500,450,320.5,340.1,360.9,120.2,210.3,15000.0,13500.0,18000.0,16500.0,96.5\n"); - - var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None); - var entry = Assert.Single(baseline); - - Assert.Equal("lnm_ingest_baseline", entry.Key); - Assert.Equal(5, entry.Value.Iterations); - Assert.Equal(5000, entry.Value.Observations); - Assert.Equal(500, entry.Value.Aliases); - Assert.Equal(360.9, entry.Value.MaxTotalMs); - Assert.Equal(16500.0, entry.Value.MinMongoThroughputPerSecond); - Assert.Equal(96.5, entry.Value.MaxAllocatedMb); - } - finally - { - File.Delete(path); - } - } -} +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Bench.LinkNotMerge.Baseline; +using Xunit; + +namespace StellaOps.Bench.LinkNotMerge.Tests; + +public sealed class BaselineLoaderTests +{ + [Fact] + public async Task LoadAsync_ReadsEntries() + { + var path = Path.GetTempFileName(); + try + { + await File.WriteAllTextAsync( + path, + "scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb\n" + + "lnm_ingest_baseline,5,5000,500,450,320.5,340.1,360.9,120.2,210.3,15000.0,13500.0,18000.0,16500.0,96.5\n"); + + var baseline = await BaselineLoader.LoadAsync(path, CancellationToken.None); + var entry = Assert.Single(baseline); + + Assert.Equal("lnm_ingest_baseline", entry.Key); + Assert.Equal(5, entry.Value.Iterations); + Assert.Equal(5000, entry.Value.Observations); + Assert.Equal(500, entry.Value.Aliases); + Assert.Equal(360.9, entry.Value.MaxTotalMs); + Assert.Equal(16500.0, entry.Value.MinMongoThroughputPerSecond); + Assert.Equal(96.5, entry.Value.MaxAllocatedMb); + } + finally + { + File.Delete(path); + } + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BenchmarkScenarioReportTests.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BenchmarkScenarioReportTests.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BenchmarkScenarioReportTests.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BenchmarkScenarioReportTests.cs index 3f3312fa..9eccd141 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BenchmarkScenarioReportTests.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/BenchmarkScenarioReportTests.cs @@ -1,81 +1,81 @@ -using StellaOps.Bench.LinkNotMerge.Baseline; -using StellaOps.Bench.LinkNotMerge.Reporting; -using Xunit; - -namespace StellaOps.Bench.LinkNotMerge.Tests; - -public sealed class BenchmarkScenarioReportTests -{ - [Fact] - public void RegressionDetection_FlagsBreaches() - { - var result = new ScenarioResult( - Id: "scenario", - Label: "Scenario", - Iterations: 3, - ObservationCount: 1000, - AliasGroups: 100, - LinksetCount: 90, - TotalStatistics: new DurationStatistics(200, 240, 260), - InsertStatistics: new DurationStatistics(80, 90, 100), - CorrelationStatistics: new DurationStatistics(120, 150, 170), - TotalThroughputStatistics: new ThroughputStatistics(8000, 7000), - InsertThroughputStatistics: new ThroughputStatistics(9000, 8000), - AllocationStatistics: new AllocationStatistics(120), - ThresholdMs: null, - MinThroughputThresholdPerSecond: null, - MinMongoThroughputThresholdPerSecond: null, - MaxAllocatedThresholdMb: null); - - var baseline = new BaselineEntry( - ScenarioId: "scenario", - Iterations: 3, - Observations: 1000, - Aliases: 100, - Linksets: 90, - MeanTotalMs: 150, - P95TotalMs: 170, - MaxTotalMs: 180, - MeanInsertMs: 60, - MeanCorrelationMs: 90, - MeanThroughputPerSecond: 9000, - MinThroughputPerSecond: 8500, - MeanMongoThroughputPerSecond: 10000, - MinMongoThroughputPerSecond: 9500, - MaxAllocatedMb: 100); - - var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1); - - Assert.True(report.DurationRegressionBreached); - Assert.True(report.ThroughputRegressionBreached); - Assert.True(report.MongoThroughputRegressionBreached); - Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration")); - } - - [Fact] - public void RegressionDetection_NoBaseline_NoBreaches() - { - var result = new ScenarioResult( - Id: "scenario", - Label: "Scenario", - Iterations: 3, - ObservationCount: 1000, - AliasGroups: 100, - LinksetCount: 90, - TotalStatistics: new DurationStatistics(200, 220, 230), - InsertStatistics: new DurationStatistics(90, 100, 110), - CorrelationStatistics: new DurationStatistics(110, 120, 130), - TotalThroughputStatistics: new ThroughputStatistics(8000, 7900), - InsertThroughputStatistics: new ThroughputStatistics(9000, 8900), - AllocationStatistics: new AllocationStatistics(64), - ThresholdMs: null, - MinThroughputThresholdPerSecond: null, - MinMongoThroughputThresholdPerSecond: null, - MaxAllocatedThresholdMb: null); - - var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null); - - Assert.False(report.RegressionBreached); - Assert.Empty(report.BuildRegressionFailureMessages()); - } -} +using StellaOps.Bench.LinkNotMerge.Baseline; +using StellaOps.Bench.LinkNotMerge.Reporting; +using Xunit; + +namespace StellaOps.Bench.LinkNotMerge.Tests; + +public sealed class BenchmarkScenarioReportTests +{ + [Fact] + public void RegressionDetection_FlagsBreaches() + { + var result = new ScenarioResult( + Id: "scenario", + Label: "Scenario", + Iterations: 3, + ObservationCount: 1000, + AliasGroups: 100, + LinksetCount: 90, + TotalStatistics: new DurationStatistics(200, 240, 260), + InsertStatistics: new DurationStatistics(80, 90, 100), + CorrelationStatistics: new DurationStatistics(120, 150, 170), + TotalThroughputStatistics: new ThroughputStatistics(8000, 7000), + InsertThroughputStatistics: new ThroughputStatistics(9000, 8000), + AllocationStatistics: new AllocationStatistics(120), + ThresholdMs: null, + MinThroughputThresholdPerSecond: null, + MinMongoThroughputThresholdPerSecond: null, + MaxAllocatedThresholdMb: null); + + var baseline = new BaselineEntry( + ScenarioId: "scenario", + Iterations: 3, + Observations: 1000, + Aliases: 100, + Linksets: 90, + MeanTotalMs: 150, + P95TotalMs: 170, + MaxTotalMs: 180, + MeanInsertMs: 60, + MeanCorrelationMs: 90, + MeanThroughputPerSecond: 9000, + MinThroughputPerSecond: 8500, + MeanMongoThroughputPerSecond: 10000, + MinMongoThroughputPerSecond: 9500, + MaxAllocatedMb: 100); + + var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1); + + Assert.True(report.DurationRegressionBreached); + Assert.True(report.ThroughputRegressionBreached); + Assert.True(report.MongoThroughputRegressionBreached); + Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration")); + } + + [Fact] + public void RegressionDetection_NoBaseline_NoBreaches() + { + var result = new ScenarioResult( + Id: "scenario", + Label: "Scenario", + Iterations: 3, + ObservationCount: 1000, + AliasGroups: 100, + LinksetCount: 90, + TotalStatistics: new DurationStatistics(200, 220, 230), + InsertStatistics: new DurationStatistics(90, 100, 110), + CorrelationStatistics: new DurationStatistics(110, 120, 130), + TotalThroughputStatistics: new ThroughputStatistics(8000, 7900), + InsertThroughputStatistics: new ThroughputStatistics(9000, 8900), + AllocationStatistics: new AllocationStatistics(64), + ThresholdMs: null, + MinThroughputThresholdPerSecond: null, + MinMongoThroughputThresholdPerSecond: null, + MaxAllocatedThresholdMb: null); + + var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null); + + Assert.False(report.RegressionBreached); + Assert.Empty(report.BuildRegressionFailureMessages()); + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/LinkNotMergeScenarioRunnerTests.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/LinkNotMergeScenarioRunnerTests.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/LinkNotMergeScenarioRunnerTests.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/LinkNotMergeScenarioRunnerTests.cs index e98fcb15..3cdf7997 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/LinkNotMergeScenarioRunnerTests.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/LinkNotMergeScenarioRunnerTests.cs @@ -1,38 +1,38 @@ -using System.Linq; -using System.Threading; -using StellaOps.Bench.LinkNotMerge.Baseline; -using Xunit; - -namespace StellaOps.Bench.LinkNotMerge.Tests; - -public sealed class LinkNotMergeScenarioRunnerTests -{ - [Fact] - public void Execute_BuildsDeterministicAggregation() - { - var config = new LinkNotMergeScenarioConfig - { - Id = "unit", - Observations = 120, - AliasGroups = 24, - PurlsPerObservation = 3, - CpesPerObservation = 2, - ReferencesPerObservation = 2, - Tenants = 3, - BatchSize = 40, - Seed = 1337, - }; - - var runner = new LinkNotMergeScenarioRunner(config); - var result = runner.Execute(iterations: 2, CancellationToken.None); - - Assert.Equal(120, result.ObservationCount); - Assert.Equal(24, result.AliasGroups); - Assert.True(result.TotalDurationsMs.All(value => value > 0)); - Assert.True(result.InsertThroughputsPerSecond.All(value => value > 0)); - Assert.True(result.TotalThroughputsPerSecond.All(value => value > 0)); - Assert.True(result.AllocatedMb.All(value => value >= 0)); - Assert.Equal(result.AggregationResult.LinksetCount, result.LinksetCount); - Assert.Equal(result.AggregationResult.ObservationCount, result.ObservationCount); - } -} +using System.Linq; +using System.Threading; +using StellaOps.Bench.LinkNotMerge.Baseline; +using Xunit; + +namespace StellaOps.Bench.LinkNotMerge.Tests; + +public sealed class LinkNotMergeScenarioRunnerTests +{ + [Fact] + public void Execute_BuildsDeterministicAggregation() + { + var config = new LinkNotMergeScenarioConfig + { + Id = "unit", + Observations = 120, + AliasGroups = 24, + PurlsPerObservation = 3, + CpesPerObservation = 2, + ReferencesPerObservation = 2, + Tenants = 3, + BatchSize = 40, + Seed = 1337, + }; + + var runner = new LinkNotMergeScenarioRunner(config); + var result = runner.Execute(iterations: 2, CancellationToken.None); + + Assert.Equal(120, result.ObservationCount); + Assert.Equal(24, result.AliasGroups); + Assert.True(result.TotalDurationsMs.All(value => value > 0)); + Assert.True(result.InsertThroughputsPerSecond.All(value => value > 0)); + Assert.True(result.TotalThroughputsPerSecond.All(value => value > 0)); + Assert.True(result.AllocatedMb.All(value => value >= 0)); + Assert.Equal(result.AggregationResult.LinksetCount, result.LinksetCount); + Assert.Equal(result.AggregationResult.ObservationCount, result.ObservationCount); + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/StellaOps.Bench.LinkNotMerge.Tests.csproj b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/StellaOps.Bench.LinkNotMerge.Tests.csproj similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/StellaOps.Bench.LinkNotMerge.Tests.csproj rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/StellaOps.Bench.LinkNotMerge.Tests.csproj index 3a6c039d..3548f6d5 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/StellaOps.Bench.LinkNotMerge.Tests.csproj +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge.Tests/StellaOps.Bench.LinkNotMerge.Tests.csproj @@ -1,28 +1,28 @@ - - - net10.0 - enable - enable - preview - true - false - - - - - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - - - - - - + + + net10.0 + enable + enable + preview + true + false + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + + diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineEntry.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineEntry.cs similarity index 96% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineEntry.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineEntry.cs index a5b3bfc2..a31503e6 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineEntry.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineEntry.cs @@ -1,18 +1,18 @@ -namespace StellaOps.Bench.LinkNotMerge.Baseline; - -internal sealed record BaselineEntry( - string ScenarioId, - int Iterations, - int Observations, - int Aliases, - int Linksets, - double MeanTotalMs, - double P95TotalMs, - double MaxTotalMs, - double MeanInsertMs, - double MeanCorrelationMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MeanMongoThroughputPerSecond, - double MinMongoThroughputPerSecond, - double MaxAllocatedMb); +namespace StellaOps.Bench.LinkNotMerge.Baseline; + +internal sealed record BaselineEntry( + string ScenarioId, + int Iterations, + int Observations, + int Aliases, + int Linksets, + double MeanTotalMs, + double P95TotalMs, + double MaxTotalMs, + double MeanInsertMs, + double MeanCorrelationMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MeanMongoThroughputPerSecond, + double MinMongoThroughputPerSecond, + double MaxAllocatedMb); diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineLoader.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineLoader.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineLoader.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineLoader.cs index c7f67b68..a574ec06 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineLoader.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Baseline/BaselineLoader.cs @@ -1,87 +1,87 @@ -using System.Globalization; - -namespace StellaOps.Bench.LinkNotMerge.Baseline; - -internal static class BaselineLoader -{ - public static async Task> LoadAsync(string path, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - - var resolved = Path.GetFullPath(path); - if (!File.Exists(resolved)) - { - return new Dictionary(StringComparer.OrdinalIgnoreCase); - } - - var result = new Dictionary(StringComparer.OrdinalIgnoreCase); - - await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream); - - var lineNumber = 0; - while (true) - { - cancellationToken.ThrowIfCancellationRequested(); - - var line = await reader.ReadLineAsync().ConfigureAwait(false); - if (line is null) - { - break; - } - - lineNumber++; - if (lineNumber == 1 || string.IsNullOrWhiteSpace(line)) - { - continue; - } - - var parts = line.Split(',', StringSplitOptions.TrimEntries); - if (parts.Length < 15) - { - throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length})."); - } - - var entry = new BaselineEntry( - ScenarioId: parts[0], - Iterations: ParseInt(parts[1], resolved, lineNumber), - Observations: ParseInt(parts[2], resolved, lineNumber), - Aliases: ParseInt(parts[3], resolved, lineNumber), - Linksets: ParseInt(parts[4], resolved, lineNumber), - MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber), - P95TotalMs: ParseDouble(parts[6], resolved, lineNumber), - MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber), - MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber), - MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber), - MeanThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber), - MinThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber), - MeanMongoThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber), - MinMongoThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber), - MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber)); - - result[entry.ScenarioId] = entry; - } - - return result; - } - - private static int ParseInt(string value, string file, int line) - { - if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result)) - { - return result; - } - - throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'."); - } - - private static double ParseDouble(string value, string file, int line) - { - if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result)) - { - return result; - } - - throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'."); - } -} +using System.Globalization; + +namespace StellaOps.Bench.LinkNotMerge.Baseline; + +internal static class BaselineLoader +{ + public static async Task> LoadAsync(string path, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + + var resolved = Path.GetFullPath(path); + if (!File.Exists(resolved)) + { + return new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + + await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream); + + var lineNumber = 0; + while (true) + { + cancellationToken.ThrowIfCancellationRequested(); + + var line = await reader.ReadLineAsync().ConfigureAwait(false); + if (line is null) + { + break; + } + + lineNumber++; + if (lineNumber == 1 || string.IsNullOrWhiteSpace(line)) + { + continue; + } + + var parts = line.Split(',', StringSplitOptions.TrimEntries); + if (parts.Length < 15) + { + throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 15 columns, found {parts.Length})."); + } + + var entry = new BaselineEntry( + ScenarioId: parts[0], + Iterations: ParseInt(parts[1], resolved, lineNumber), + Observations: ParseInt(parts[2], resolved, lineNumber), + Aliases: ParseInt(parts[3], resolved, lineNumber), + Linksets: ParseInt(parts[4], resolved, lineNumber), + MeanTotalMs: ParseDouble(parts[5], resolved, lineNumber), + P95TotalMs: ParseDouble(parts[6], resolved, lineNumber), + MaxTotalMs: ParseDouble(parts[7], resolved, lineNumber), + MeanInsertMs: ParseDouble(parts[8], resolved, lineNumber), + MeanCorrelationMs: ParseDouble(parts[9], resolved, lineNumber), + MeanThroughputPerSecond: ParseDouble(parts[10], resolved, lineNumber), + MinThroughputPerSecond: ParseDouble(parts[11], resolved, lineNumber), + MeanMongoThroughputPerSecond: ParseDouble(parts[12], resolved, lineNumber), + MinMongoThroughputPerSecond: ParseDouble(parts[13], resolved, lineNumber), + MaxAllocatedMb: ParseDouble(parts[14], resolved, lineNumber)); + + result[entry.ScenarioId] = entry; + } + + return result; + } + + private static int ParseInt(string value, string file, int line) + { + if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result)) + { + return result; + } + + throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'."); + } + + private static double ParseDouble(string value, string file, int line) + { + if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result)) + { + return result; + } + + throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'."); + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/BenchmarkConfig.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/BenchmarkConfig.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/BenchmarkConfig.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/BenchmarkConfig.cs index 2aebc423..0eff448a 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/BenchmarkConfig.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/BenchmarkConfig.cs @@ -1,210 +1,210 @@ -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Bench.LinkNotMerge; - -internal sealed record BenchmarkConfig( - double? ThresholdMs, - double? MinThroughputPerSecond, - double? MinMongoThroughputPerSecond, - double? MaxAllocatedMb, - int? Iterations, - IReadOnlyList Scenarios) -{ - public static async Task LoadAsync(string path) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - - var resolved = Path.GetFullPath(path); - if (!File.Exists(resolved)) - { - throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved); - } - - await using var stream = File.OpenRead(resolved); - var model = await JsonSerializer.DeserializeAsync( - stream, - new JsonSerializerOptions(JsonSerializerDefaults.Web) - { - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - AllowTrailingCommas = true, - }).ConfigureAwait(false); - - if (model is null) - { - throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed."); - } - - if (model.Scenarios.Count == 0) - { - throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios."); - } - - foreach (var scenario in model.Scenarios) - { - scenario.Validate(); - } - - return new BenchmarkConfig( - model.ThresholdMs, - model.MinThroughputPerSecond, - model.MinMongoThroughputPerSecond, - model.MaxAllocatedMb, - model.Iterations, - model.Scenarios); - } - - private sealed class BenchmarkConfigModel - { - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - [JsonPropertyName("minThroughputPerSecond")] - public double? MinThroughputPerSecond { get; init; } - - [JsonPropertyName("minMongoThroughputPerSecond")] - public double? MinMongoThroughputPerSecond { get; init; } - - [JsonPropertyName("maxAllocatedMb")] - public double? MaxAllocatedMb { get; init; } - - [JsonPropertyName("iterations")] - public int? Iterations { get; init; } - - [JsonPropertyName("scenarios")] - public List Scenarios { get; init; } = new(); - } -} - -internal sealed class LinkNotMergeScenarioConfig -{ - private const int DefaultObservationCount = 5_000; - private const int DefaultAliasGroups = 500; - private const int DefaultPurlsPerObservation = 4; - private const int DefaultCpesPerObservation = 2; - private const int DefaultReferencesPerObservation = 3; - private const int DefaultTenants = 4; - private const int DefaultBatchSize = 500; - private const int DefaultSeed = 42_022; - - [JsonPropertyName("id")] - public string? Id { get; init; } - - [JsonPropertyName("label")] - public string? Label { get; init; } - - [JsonPropertyName("observations")] - public int? Observations { get; init; } - - [JsonPropertyName("aliasGroups")] - public int? AliasGroups { get; init; } - - [JsonPropertyName("purlsPerObservation")] - public int? PurlsPerObservation { get; init; } - - [JsonPropertyName("cpesPerObservation")] - public int? CpesPerObservation { get; init; } - - [JsonPropertyName("referencesPerObservation")] - public int? ReferencesPerObservation { get; init; } - - [JsonPropertyName("tenants")] - public int? Tenants { get; init; } - - [JsonPropertyName("batchSize")] - public int? BatchSize { get; init; } - - [JsonPropertyName("seed")] - public int? Seed { get; init; } - - [JsonPropertyName("iterations")] - public int? Iterations { get; init; } - - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - [JsonPropertyName("minThroughputPerSecond")] - public double? MinThroughputPerSecond { get; init; } - - [JsonPropertyName("minMongoThroughputPerSecond")] - public double? MinMongoThroughputPerSecond { get; init; } - - [JsonPropertyName("maxAllocatedMb")] - public double? MaxAllocatedMb { get; init; } - - public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "linknotmerge" : Id!.Trim(); - - public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim(); - - public int ResolveObservationCount() => Observations.HasValue && Observations.Value > 0 - ? Observations.Value - : DefaultObservationCount; - - public int ResolveAliasGroups() => AliasGroups.HasValue && AliasGroups.Value > 0 - ? AliasGroups.Value - : DefaultAliasGroups; - - public int ResolvePurlsPerObservation() => PurlsPerObservation.HasValue && PurlsPerObservation.Value > 0 - ? PurlsPerObservation.Value - : DefaultPurlsPerObservation; - - public int ResolveCpesPerObservation() => CpesPerObservation.HasValue && CpesPerObservation.Value >= 0 - ? CpesPerObservation.Value - : DefaultCpesPerObservation; - - public int ResolveReferencesPerObservation() => ReferencesPerObservation.HasValue && ReferencesPerObservation.Value >= 0 - ? ReferencesPerObservation.Value - : DefaultReferencesPerObservation; - - public int ResolveTenantCount() => Tenants.HasValue && Tenants.Value > 0 - ? Tenants.Value - : DefaultTenants; - - public int ResolveBatchSize() => BatchSize.HasValue && BatchSize.Value > 0 - ? BatchSize.Value - : DefaultBatchSize; - - public int ResolveSeed() => Seed.HasValue && Seed.Value > 0 - ? Seed.Value - : DefaultSeed; - - public int ResolveIterations(int? defaultIterations) - { - var iterations = Iterations ?? defaultIterations ?? 3; - if (iterations <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0."); - } - - return iterations; - } - - public void Validate() - { - if (ResolveObservationCount() <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0."); - } - - if (ResolveAliasGroups() <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0."); - } - - if (ResolvePurlsPerObservation() <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires purlsPerObservation > 0."); - } - - if (ResolveTenantCount() <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenants > 0."); - } - - if (ResolveBatchSize() > ResolveObservationCount()) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' batchSize cannot exceed observations."); - } - } -} +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Bench.LinkNotMerge; + +internal sealed record BenchmarkConfig( + double? ThresholdMs, + double? MinThroughputPerSecond, + double? MinMongoThroughputPerSecond, + double? MaxAllocatedMb, + int? Iterations, + IReadOnlyList Scenarios) +{ + public static async Task LoadAsync(string path) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + + var resolved = Path.GetFullPath(path); + if (!File.Exists(resolved)) + { + throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved); + } + + await using var stream = File.OpenRead(resolved); + var model = await JsonSerializer.DeserializeAsync( + stream, + new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true, + }).ConfigureAwait(false); + + if (model is null) + { + throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed."); + } + + if (model.Scenarios.Count == 0) + { + throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios."); + } + + foreach (var scenario in model.Scenarios) + { + scenario.Validate(); + } + + return new BenchmarkConfig( + model.ThresholdMs, + model.MinThroughputPerSecond, + model.MinMongoThroughputPerSecond, + model.MaxAllocatedMb, + model.Iterations, + model.Scenarios); + } + + private sealed class BenchmarkConfigModel + { + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + [JsonPropertyName("minThroughputPerSecond")] + public double? MinThroughputPerSecond { get; init; } + + [JsonPropertyName("minMongoThroughputPerSecond")] + public double? MinMongoThroughputPerSecond { get; init; } + + [JsonPropertyName("maxAllocatedMb")] + public double? MaxAllocatedMb { get; init; } + + [JsonPropertyName("iterations")] + public int? Iterations { get; init; } + + [JsonPropertyName("scenarios")] + public List Scenarios { get; init; } = new(); + } +} + +internal sealed class LinkNotMergeScenarioConfig +{ + private const int DefaultObservationCount = 5_000; + private const int DefaultAliasGroups = 500; + private const int DefaultPurlsPerObservation = 4; + private const int DefaultCpesPerObservation = 2; + private const int DefaultReferencesPerObservation = 3; + private const int DefaultTenants = 4; + private const int DefaultBatchSize = 500; + private const int DefaultSeed = 42_022; + + [JsonPropertyName("id")] + public string? Id { get; init; } + + [JsonPropertyName("label")] + public string? Label { get; init; } + + [JsonPropertyName("observations")] + public int? Observations { get; init; } + + [JsonPropertyName("aliasGroups")] + public int? AliasGroups { get; init; } + + [JsonPropertyName("purlsPerObservation")] + public int? PurlsPerObservation { get; init; } + + [JsonPropertyName("cpesPerObservation")] + public int? CpesPerObservation { get; init; } + + [JsonPropertyName("referencesPerObservation")] + public int? ReferencesPerObservation { get; init; } + + [JsonPropertyName("tenants")] + public int? Tenants { get; init; } + + [JsonPropertyName("batchSize")] + public int? BatchSize { get; init; } + + [JsonPropertyName("seed")] + public int? Seed { get; init; } + + [JsonPropertyName("iterations")] + public int? Iterations { get; init; } + + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + [JsonPropertyName("minThroughputPerSecond")] + public double? MinThroughputPerSecond { get; init; } + + [JsonPropertyName("minMongoThroughputPerSecond")] + public double? MinMongoThroughputPerSecond { get; init; } + + [JsonPropertyName("maxAllocatedMb")] + public double? MaxAllocatedMb { get; init; } + + public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "linknotmerge" : Id!.Trim(); + + public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim(); + + public int ResolveObservationCount() => Observations.HasValue && Observations.Value > 0 + ? Observations.Value + : DefaultObservationCount; + + public int ResolveAliasGroups() => AliasGroups.HasValue && AliasGroups.Value > 0 + ? AliasGroups.Value + : DefaultAliasGroups; + + public int ResolvePurlsPerObservation() => PurlsPerObservation.HasValue && PurlsPerObservation.Value > 0 + ? PurlsPerObservation.Value + : DefaultPurlsPerObservation; + + public int ResolveCpesPerObservation() => CpesPerObservation.HasValue && CpesPerObservation.Value >= 0 + ? CpesPerObservation.Value + : DefaultCpesPerObservation; + + public int ResolveReferencesPerObservation() => ReferencesPerObservation.HasValue && ReferencesPerObservation.Value >= 0 + ? ReferencesPerObservation.Value + : DefaultReferencesPerObservation; + + public int ResolveTenantCount() => Tenants.HasValue && Tenants.Value > 0 + ? Tenants.Value + : DefaultTenants; + + public int ResolveBatchSize() => BatchSize.HasValue && BatchSize.Value > 0 + ? BatchSize.Value + : DefaultBatchSize; + + public int ResolveSeed() => Seed.HasValue && Seed.Value > 0 + ? Seed.Value + : DefaultSeed; + + public int ResolveIterations(int? defaultIterations) + { + var iterations = Iterations ?? defaultIterations ?? 3; + if (iterations <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires iterations > 0."); + } + + return iterations; + } + + public void Validate() + { + if (ResolveObservationCount() <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires observations > 0."); + } + + if (ResolveAliasGroups() <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires aliasGroups > 0."); + } + + if (ResolvePurlsPerObservation() <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires purlsPerObservation > 0."); + } + + if (ResolveTenantCount() <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenants > 0."); + } + + if (ResolveBatchSize() > ResolveObservationCount()) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' batchSize cannot exceed observations."); + } + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinkNotMergeScenarioRunner.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinkNotMergeScenarioRunner.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinkNotMergeScenarioRunner.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinkNotMergeScenarioRunner.cs index 93a48cdd..7e64fa69 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinkNotMergeScenarioRunner.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinkNotMergeScenarioRunner.cs @@ -1,135 +1,135 @@ -using System.Diagnostics; -using EphemeralMongo; -using MongoDB.Bson; -using MongoDB.Driver; - -namespace StellaOps.Bench.LinkNotMerge; - -internal sealed class LinkNotMergeScenarioRunner -{ - private readonly LinkNotMergeScenarioConfig _config; - private readonly IReadOnlyList _seeds; - - public LinkNotMergeScenarioRunner(LinkNotMergeScenarioConfig config) - { - _config = config ?? throw new ArgumentNullException(nameof(config)); - _seeds = ObservationGenerator.Generate(config); - } - - public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken) - { - if (iterations <= 0) - { - throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); - } - - var totalDurations = new double[iterations]; - var insertDurations = new double[iterations]; - var correlationDurations = new double[iterations]; - var allocated = new double[iterations]; - var totalThroughputs = new double[iterations]; - var insertThroughputs = new double[iterations]; - LinksetAggregationResult lastAggregation = new(0, 0, 0, 0, 0); - - for (var iteration = 0; iteration < iterations; iteration++) - { - cancellationToken.ThrowIfCancellationRequested(); - - using var runner = MongoRunner.Run(new MongoRunnerOptions - { - UseSingleNodeReplicaSet = false, - }); - - var client = new MongoClient(runner.ConnectionString); - var database = client.GetDatabase("linknotmerge_bench"); - var collection = database.GetCollection("advisory_observations"); - - CreateIndexes(collection, cancellationToken); - - var beforeAllocated = GC.GetTotalAllocatedBytes(); - var insertStopwatch = Stopwatch.StartNew(); - InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken); - insertStopwatch.Stop(); - - var correlationStopwatch = Stopwatch.StartNew(); - var documents = collection - .Find(FilterDefinition.Empty) - .Project(Builders.Projection - .Include("tenant") - .Include("linkset")) - .ToList(cancellationToken); - - var correlator = new LinksetAggregator(); - lastAggregation = correlator.Correlate(documents); - correlationStopwatch.Stop(); - - var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed; - var afterAllocated = GC.GetTotalAllocatedBytes(); - - totalDurations[iteration] = totalElapsed.TotalMilliseconds; - insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds; - correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds; - allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d); - - var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d); - totalThroughputs[iteration] = _seeds.Count / totalSeconds; - - var insertSeconds = Math.Max(insertStopwatch.Elapsed.TotalSeconds, 0.0001d); - insertThroughputs[iteration] = _seeds.Count / insertSeconds; - } - - return new ScenarioExecutionResult( - totalDurations, - insertDurations, - correlationDurations, - allocated, - totalThroughputs, - insertThroughputs, - ObservationCount: _seeds.Count, - AliasGroups: _config.ResolveAliasGroups(), - LinksetCount: lastAggregation.LinksetCount, - TenantCount: _config.ResolveTenantCount(), - AggregationResult: lastAggregation); - } - - private static void InsertObservations( - IMongoCollection collection, - IReadOnlyList seeds, - int batchSize, - CancellationToken cancellationToken) - { - for (var offset = 0; offset < seeds.Count; offset += batchSize) - { - cancellationToken.ThrowIfCancellationRequested(); - - var remaining = Math.Min(batchSize, seeds.Count - offset); - var batch = new List(remaining); - for (var index = 0; index < remaining; index++) - { - batch.Add(seeds[offset + index].ToBsonDocument()); - } - - collection.InsertMany(batch, new InsertManyOptions - { - IsOrdered = false, - BypassDocumentValidation = true, - }, cancellationToken); - } - } - - private static void CreateIndexes(IMongoCollection collection, CancellationToken cancellationToken) - { - var indexKeys = Builders.IndexKeys - .Ascending("tenant") - .Ascending("identifiers.aliases"); - - try - { - collection.Indexes.CreateOne(new CreateIndexModel(indexKeys), cancellationToken: cancellationToken); - } - catch - { - // Index creation failures should not abort the benchmark; they may occur when running multiple iterations concurrently. - } - } -} +using System.Diagnostics; +using EphemeralMongo; +using MongoDB.Bson; +using MongoDB.Driver; + +namespace StellaOps.Bench.LinkNotMerge; + +internal sealed class LinkNotMergeScenarioRunner +{ + private readonly LinkNotMergeScenarioConfig _config; + private readonly IReadOnlyList _seeds; + + public LinkNotMergeScenarioRunner(LinkNotMergeScenarioConfig config) + { + _config = config ?? throw new ArgumentNullException(nameof(config)); + _seeds = ObservationGenerator.Generate(config); + } + + public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken) + { + if (iterations <= 0) + { + throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); + } + + var totalDurations = new double[iterations]; + var insertDurations = new double[iterations]; + var correlationDurations = new double[iterations]; + var allocated = new double[iterations]; + var totalThroughputs = new double[iterations]; + var insertThroughputs = new double[iterations]; + LinksetAggregationResult lastAggregation = new(0, 0, 0, 0, 0); + + for (var iteration = 0; iteration < iterations; iteration++) + { + cancellationToken.ThrowIfCancellationRequested(); + + using var runner = MongoRunner.Run(new MongoRunnerOptions + { + UseSingleNodeReplicaSet = false, + }); + + var client = new MongoClient(runner.ConnectionString); + var database = client.GetDatabase("linknotmerge_bench"); + var collection = database.GetCollection("advisory_observations"); + + CreateIndexes(collection, cancellationToken); + + var beforeAllocated = GC.GetTotalAllocatedBytes(); + var insertStopwatch = Stopwatch.StartNew(); + InsertObservations(collection, _seeds, _config.ResolveBatchSize(), cancellationToken); + insertStopwatch.Stop(); + + var correlationStopwatch = Stopwatch.StartNew(); + var documents = collection + .Find(FilterDefinition.Empty) + .Project(Builders.Projection + .Include("tenant") + .Include("linkset")) + .ToList(cancellationToken); + + var correlator = new LinksetAggregator(); + lastAggregation = correlator.Correlate(documents); + correlationStopwatch.Stop(); + + var totalElapsed = insertStopwatch.Elapsed + correlationStopwatch.Elapsed; + var afterAllocated = GC.GetTotalAllocatedBytes(); + + totalDurations[iteration] = totalElapsed.TotalMilliseconds; + insertDurations[iteration] = insertStopwatch.Elapsed.TotalMilliseconds; + correlationDurations[iteration] = correlationStopwatch.Elapsed.TotalMilliseconds; + allocated[iteration] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d); + + var totalSeconds = Math.Max(totalElapsed.TotalSeconds, 0.0001d); + totalThroughputs[iteration] = _seeds.Count / totalSeconds; + + var insertSeconds = Math.Max(insertStopwatch.Elapsed.TotalSeconds, 0.0001d); + insertThroughputs[iteration] = _seeds.Count / insertSeconds; + } + + return new ScenarioExecutionResult( + totalDurations, + insertDurations, + correlationDurations, + allocated, + totalThroughputs, + insertThroughputs, + ObservationCount: _seeds.Count, + AliasGroups: _config.ResolveAliasGroups(), + LinksetCount: lastAggregation.LinksetCount, + TenantCount: _config.ResolveTenantCount(), + AggregationResult: lastAggregation); + } + + private static void InsertObservations( + IMongoCollection collection, + IReadOnlyList seeds, + int batchSize, + CancellationToken cancellationToken) + { + for (var offset = 0; offset < seeds.Count; offset += batchSize) + { + cancellationToken.ThrowIfCancellationRequested(); + + var remaining = Math.Min(batchSize, seeds.Count - offset); + var batch = new List(remaining); + for (var index = 0; index < remaining; index++) + { + batch.Add(seeds[offset + index].ToBsonDocument()); + } + + collection.InsertMany(batch, new InsertManyOptions + { + IsOrdered = false, + BypassDocumentValidation = true, + }, cancellationToken); + } + } + + private static void CreateIndexes(IMongoCollection collection, CancellationToken cancellationToken) + { + var indexKeys = Builders.IndexKeys + .Ascending("tenant") + .Ascending("identifiers.aliases"); + + try + { + collection.Indexes.CreateOne(new CreateIndexModel(indexKeys), cancellationToken: cancellationToken); + } + catch + { + // Index creation failures should not abort the benchmark; they may occur when running multiple iterations concurrently. + } + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinksetAggregator.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinksetAggregator.cs similarity index 96% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinksetAggregator.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinksetAggregator.cs index 1c1910ce..aa5e8875 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinksetAggregator.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/LinksetAggregator.cs @@ -1,140 +1,140 @@ -using MongoDB.Bson; - -namespace StellaOps.Bench.LinkNotMerge; - -internal sealed class LinksetAggregator -{ - public LinksetAggregationResult Correlate(IEnumerable documents) - { - ArgumentNullException.ThrowIfNull(documents); - - var groups = new Dictionary(StringComparer.Ordinal); - var totalObservations = 0; - - foreach (var document in documents) - { - totalObservations++; - - var tenant = document.GetValue("tenant", "unknown").AsString; - var linkset = document.GetValue("linkset", new BsonDocument()).AsBsonDocument; - var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray; - var purls = linkset.GetValue("purls", new BsonArray()).AsBsonArray; - var cpes = linkset.GetValue("cpes", new BsonArray()).AsBsonArray; - var references = linkset.GetValue("references", new BsonArray()).AsBsonArray; - - foreach (var aliasValue in aliases) - { - if (!aliasValue.IsString) - { - continue; - } - - var alias = aliasValue.AsString; - var key = string.Create(alias.Length + tenant.Length + 1, (tenant, alias), static (span, data) => - { - var (tenantValue, aliasValue) = data; - tenantValue.AsSpan().CopyTo(span); - span[tenantValue.Length] = '|'; - aliasValue.AsSpan().CopyTo(span[(tenantValue.Length + 1)..]); - }); - - if (!groups.TryGetValue(key, out var accumulator)) - { - accumulator = new LinksetAccumulator(tenant, alias); - groups[key] = accumulator; - } - - accumulator.AddPurls(purls); - accumulator.AddCpes(cpes); - accumulator.AddReferences(references); - } - } - - var totalReferences = 0; - var totalPurls = 0; - var totalCpes = 0; - - foreach (var accumulator in groups.Values) - { - totalReferences += accumulator.ReferenceCount; - totalPurls += accumulator.PurlCount; - totalCpes += accumulator.CpeCount; - } - - return new LinksetAggregationResult( - LinksetCount: groups.Count, - ObservationCount: totalObservations, - TotalPurls: totalPurls, - TotalCpes: totalCpes, - TotalReferences: totalReferences); - } - - private sealed class LinksetAccumulator - { - private readonly HashSet _purls = new(StringComparer.Ordinal); - private readonly HashSet _cpes = new(StringComparer.Ordinal); - private readonly HashSet _references = new(StringComparer.Ordinal); - - public LinksetAccumulator(string tenant, string alias) - { - Tenant = tenant; - Alias = alias; - } - - public string Tenant { get; } - - public string Alias { get; } - - public int PurlCount => _purls.Count; - - public int CpeCount => _cpes.Count; - - public int ReferenceCount => _references.Count; - - public void AddPurls(BsonArray array) - { - foreach (var item in array) - { - if (item.IsString) - { - _purls.Add(item.AsString); - } - } - } - - public void AddCpes(BsonArray array) - { - foreach (var item in array) - { - if (item.IsString) - { - _cpes.Add(item.AsString); - } - } - } - - public void AddReferences(BsonArray array) - { - foreach (var item in array) - { - if (!item.IsBsonDocument) - { - continue; - } - - var document = item.AsBsonDocument; - if (document.TryGetValue("url", out var urlValue) && urlValue.IsString) - { - _references.Add(urlValue.AsString); - } - } - } - } -} - -internal sealed record LinksetAggregationResult( - int LinksetCount, - int ObservationCount, - int TotalPurls, - int TotalCpes, - int TotalReferences); +using MongoDB.Bson; + +namespace StellaOps.Bench.LinkNotMerge; + +internal sealed class LinksetAggregator +{ + public LinksetAggregationResult Correlate(IEnumerable documents) + { + ArgumentNullException.ThrowIfNull(documents); + + var groups = new Dictionary(StringComparer.Ordinal); + var totalObservations = 0; + + foreach (var document in documents) + { + totalObservations++; + + var tenant = document.GetValue("tenant", "unknown").AsString; + var linkset = document.GetValue("linkset", new BsonDocument()).AsBsonDocument; + var aliases = linkset.GetValue("aliases", new BsonArray()).AsBsonArray; + var purls = linkset.GetValue("purls", new BsonArray()).AsBsonArray; + var cpes = linkset.GetValue("cpes", new BsonArray()).AsBsonArray; + var references = linkset.GetValue("references", new BsonArray()).AsBsonArray; + + foreach (var aliasValue in aliases) + { + if (!aliasValue.IsString) + { + continue; + } + + var alias = aliasValue.AsString; + var key = string.Create(alias.Length + tenant.Length + 1, (tenant, alias), static (span, data) => + { + var (tenantValue, aliasValue) = data; + tenantValue.AsSpan().CopyTo(span); + span[tenantValue.Length] = '|'; + aliasValue.AsSpan().CopyTo(span[(tenantValue.Length + 1)..]); + }); + + if (!groups.TryGetValue(key, out var accumulator)) + { + accumulator = new LinksetAccumulator(tenant, alias); + groups[key] = accumulator; + } + + accumulator.AddPurls(purls); + accumulator.AddCpes(cpes); + accumulator.AddReferences(references); + } + } + + var totalReferences = 0; + var totalPurls = 0; + var totalCpes = 0; + + foreach (var accumulator in groups.Values) + { + totalReferences += accumulator.ReferenceCount; + totalPurls += accumulator.PurlCount; + totalCpes += accumulator.CpeCount; + } + + return new LinksetAggregationResult( + LinksetCount: groups.Count, + ObservationCount: totalObservations, + TotalPurls: totalPurls, + TotalCpes: totalCpes, + TotalReferences: totalReferences); + } + + private sealed class LinksetAccumulator + { + private readonly HashSet _purls = new(StringComparer.Ordinal); + private readonly HashSet _cpes = new(StringComparer.Ordinal); + private readonly HashSet _references = new(StringComparer.Ordinal); + + public LinksetAccumulator(string tenant, string alias) + { + Tenant = tenant; + Alias = alias; + } + + public string Tenant { get; } + + public string Alias { get; } + + public int PurlCount => _purls.Count; + + public int CpeCount => _cpes.Count; + + public int ReferenceCount => _references.Count; + + public void AddPurls(BsonArray array) + { + foreach (var item in array) + { + if (item.IsString) + { + _purls.Add(item.AsString); + } + } + } + + public void AddCpes(BsonArray array) + { + foreach (var item in array) + { + if (item.IsString) + { + _cpes.Add(item.AsString); + } + } + } + + public void AddReferences(BsonArray array) + { + foreach (var item in array) + { + if (!item.IsBsonDocument) + { + continue; + } + + var document = item.AsBsonDocument; + if (document.TryGetValue("url", out var urlValue) && urlValue.IsString) + { + _references.Add(urlValue.AsString); + } + } + } + } +} + +internal sealed record LinksetAggregationResult( + int LinksetCount, + int ObservationCount, + int TotalPurls, + int TotalCpes, + int TotalReferences); diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ObservationData.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ObservationData.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ObservationData.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ObservationData.cs index 8f2e544b..c8f49f18 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ObservationData.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ObservationData.cs @@ -1,270 +1,270 @@ -using System.Collections.Immutable; -using System.Security.Cryptography; -using MongoDB.Bson; - -namespace StellaOps.Bench.LinkNotMerge; - -internal static class ObservationGenerator -{ - public static IReadOnlyList Generate(LinkNotMergeScenarioConfig config) - { - ArgumentNullException.ThrowIfNull(config); - - var observationCount = config.ResolveObservationCount(); - var aliasGroups = config.ResolveAliasGroups(); - var purlsPerObservation = config.ResolvePurlsPerObservation(); - var cpesPerObservation = config.ResolveCpesPerObservation(); - var referencesPerObservation = config.ResolveReferencesPerObservation(); - var tenantCount = config.ResolveTenantCount(); - var seed = config.ResolveSeed(); - - var seeds = new ObservationSeed[observationCount]; - var random = new Random(seed); - var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero); - - for (var index = 0; index < observationCount; index++) - { - var tenantIndex = index % tenantCount; - var tenant = $"tenant-{tenantIndex:D2}"; - var group = index % aliasGroups; - var revision = index / aliasGroups; - var primaryAlias = $"CVE-2025-{group:D4}"; - var vendorAlias = $"VENDOR-{group:D4}"; - var thirdAlias = $"GHSA-{group:D4}-{(revision % 26 + 'a')}{(revision % 26 + 'a')}"; - var aliases = ImmutableArray.Create(primaryAlias, vendorAlias, thirdAlias); - - var observationId = $"{tenant}:advisory:{group:D5}:{revision:D6}"; - var upstreamId = primaryAlias; - var documentVersion = baseTime.AddMinutes(revision).ToString("O"); - var fetchedAt = baseTime.AddSeconds(index % 1_800); - var receivedAt = fetchedAt.AddSeconds(1); - - var purls = CreatePurls(group, revision, purlsPerObservation); - var cpes = CreateCpes(group, revision, cpesPerObservation); - var references = CreateReferences(primaryAlias, referencesPerObservation); - - var rawPayload = CreateRawPayload(primaryAlias, vendorAlias, purls, cpes, references); - var contentHash = ComputeContentHash(rawPayload, tenant, group, revision); - - seeds[index] = new ObservationSeed( - ObservationId: observationId, - Tenant: tenant, - Vendor: "concelier-bench", - Stream: "simulated", - Api: $"https://bench.stella/{group:D4}/{revision:D2}", - CollectorVersion: "1.0.0-bench", - UpstreamId: upstreamId, - DocumentVersion: documentVersion, - FetchedAt: fetchedAt, - ReceivedAt: receivedAt, - ContentHash: contentHash, - Aliases: aliases, - Purls: purls, - Cpes: cpes, - References: references, - ContentFormat: "CSAF", - SpecVersion: "2.0", - RawPayload: rawPayload); - } - - return seeds; - } - - private static ImmutableArray CreatePurls(int group, int revision, int count) - { - if (count <= 0) - { - return ImmutableArray.Empty; - } - - var builder = ImmutableArray.CreateBuilder(count); - for (var index = 0; index < count; index++) - { - var version = $"{revision % 9 + 1}.{index + 1}.{group % 10}"; - builder.Add($"pkg:generic/stella/sample-{group:D4}-{index}@{version}"); - } - - return builder.MoveToImmutable(); - } - - private static ImmutableArray CreateCpes(int group, int revision, int count) - { - if (count <= 0) - { - return ImmutableArray.Empty; - } - - var builder = ImmutableArray.CreateBuilder(count); - for (var index = 0; index < count; index++) - { - var component = $"benchtool{group % 50:D2}"; - var version = $"{revision % 5}.{index}"; - builder.Add($"cpe:2.3:a:stellaops:{component}:{version}:*:*:*:*:*:*:*"); - } - - return builder.MoveToImmutable(); - } - - private static ImmutableArray CreateReferences(string primaryAlias, int count) - { - if (count <= 0) - { - return ImmutableArray.Empty; - } - - var builder = ImmutableArray.CreateBuilder(count); - for (var index = 0; index < count; index++) - { - builder.Add(new ObservationReference( - Type: index % 2 == 0 ? "advisory" : "patch", - Url: $"https://vendor.example/{primaryAlias.ToLowerInvariant()}/ref/{index:D2}")); - } - - return builder.MoveToImmutable(); - } - - private static BsonDocument CreateRawPayload( - string primaryAlias, - string vendorAlias, - IReadOnlyCollection purls, - IReadOnlyCollection cpes, - IReadOnlyCollection references) - { - var document = new BsonDocument - { - ["id"] = primaryAlias, - ["vendorId"] = vendorAlias, - ["title"] = $"Simulated advisory {primaryAlias}", - ["summary"] = "Synthetic payload produced by Link-Not-Merge benchmark.", - ["metrics"] = new BsonArray - { - new BsonDocument - { - ["kind"] = "cvss:v3.1", - ["score"] = 7.5, - ["vector"] = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N", - }, - }, - }; - - if (purls.Count > 0) - { - document["purls"] = new BsonArray(purls); - } - - if (cpes.Count > 0) - { - document["cpes"] = new BsonArray(cpes); - } - - if (references.Count > 0) - { - document["references"] = new BsonArray(references.Select(reference => new BsonDocument - { - ["type"] = reference.Type, - ["url"] = reference.Url, - })); - } - - return document; - } - - private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision) - { - using var sha256 = SHA256.Create(); - var seed = $"{tenant}|{group}|{revision}"; - var rawBytes = rawPayload.ToBson(); - var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed); - var combined = new byte[rawBytes.Length + seedBytes.Length]; - Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length); - Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length); - var hash = sha256.ComputeHash(combined); - return $"sha256:{Convert.ToHexString(hash)}"; - } -} - -internal sealed record ObservationSeed( - string ObservationId, - string Tenant, - string Vendor, - string Stream, - string Api, - string CollectorVersion, - string UpstreamId, - string DocumentVersion, - DateTimeOffset FetchedAt, - DateTimeOffset ReceivedAt, - string ContentHash, - ImmutableArray Aliases, - ImmutableArray Purls, - ImmutableArray Cpes, - ImmutableArray References, - string ContentFormat, - string SpecVersion, - BsonDocument RawPayload) -{ - public BsonDocument ToBsonDocument() - { - var aliases = new BsonArray(Aliases.Select(alias => alias)); - var purls = new BsonArray(Purls.Select(purl => purl)); - var cpes = new BsonArray(Cpes.Select(cpe => cpe)); - var references = new BsonArray(References.Select(reference => new BsonDocument - { - ["type"] = reference.Type, - ["url"] = reference.Url, - })); - - var document = new BsonDocument - { - ["_id"] = ObservationId, - ["tenant"] = Tenant, - ["source"] = new BsonDocument - { - ["vendor"] = Vendor, - ["stream"] = Stream, - ["api"] = Api, - ["collector_version"] = CollectorVersion, - }, - ["upstream"] = new BsonDocument - { - ["upstream_id"] = UpstreamId, - ["document_version"] = DocumentVersion, - ["fetched_at"] = FetchedAt.UtcDateTime, - ["received_at"] = ReceivedAt.UtcDateTime, - ["content_hash"] = ContentHash, - ["signature"] = new BsonDocument - { - ["present"] = false, - ["format"] = BsonNull.Value, - ["key_id"] = BsonNull.Value, - ["signature"] = BsonNull.Value, - }, - }, - ["content"] = new BsonDocument - { - ["format"] = ContentFormat, - ["spec_version"] = SpecVersion, - ["raw"] = RawPayload, - }, - ["identifiers"] = new BsonDocument - { - ["aliases"] = aliases, - ["primary"] = UpstreamId, - ["cve"] = Aliases.FirstOrDefault(alias => alias.StartsWith("CVE-", StringComparison.Ordinal)) ?? UpstreamId, - }, - ["linkset"] = new BsonDocument - { - ["aliases"] = aliases, - ["purls"] = purls, - ["cpes"] = cpes, - ["references"] = references, - ["reconciled_from"] = new BsonArray { "/content/product_tree" }, - }, - ["supersedes"] = BsonNull.Value, - }; - - return document; - } -} - -internal sealed record ObservationReference(string Type, string Url); +using System.Collections.Immutable; +using System.Security.Cryptography; +using MongoDB.Bson; + +namespace StellaOps.Bench.LinkNotMerge; + +internal static class ObservationGenerator +{ + public static IReadOnlyList Generate(LinkNotMergeScenarioConfig config) + { + ArgumentNullException.ThrowIfNull(config); + + var observationCount = config.ResolveObservationCount(); + var aliasGroups = config.ResolveAliasGroups(); + var purlsPerObservation = config.ResolvePurlsPerObservation(); + var cpesPerObservation = config.ResolveCpesPerObservation(); + var referencesPerObservation = config.ResolveReferencesPerObservation(); + var tenantCount = config.ResolveTenantCount(); + var seed = config.ResolveSeed(); + + var seeds = new ObservationSeed[observationCount]; + var random = new Random(seed); + var baseTime = new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero); + + for (var index = 0; index < observationCount; index++) + { + var tenantIndex = index % tenantCount; + var tenant = $"tenant-{tenantIndex:D2}"; + var group = index % aliasGroups; + var revision = index / aliasGroups; + var primaryAlias = $"CVE-2025-{group:D4}"; + var vendorAlias = $"VENDOR-{group:D4}"; + var thirdAlias = $"GHSA-{group:D4}-{(revision % 26 + 'a')}{(revision % 26 + 'a')}"; + var aliases = ImmutableArray.Create(primaryAlias, vendorAlias, thirdAlias); + + var observationId = $"{tenant}:advisory:{group:D5}:{revision:D6}"; + var upstreamId = primaryAlias; + var documentVersion = baseTime.AddMinutes(revision).ToString("O"); + var fetchedAt = baseTime.AddSeconds(index % 1_800); + var receivedAt = fetchedAt.AddSeconds(1); + + var purls = CreatePurls(group, revision, purlsPerObservation); + var cpes = CreateCpes(group, revision, cpesPerObservation); + var references = CreateReferences(primaryAlias, referencesPerObservation); + + var rawPayload = CreateRawPayload(primaryAlias, vendorAlias, purls, cpes, references); + var contentHash = ComputeContentHash(rawPayload, tenant, group, revision); + + seeds[index] = new ObservationSeed( + ObservationId: observationId, + Tenant: tenant, + Vendor: "concelier-bench", + Stream: "simulated", + Api: $"https://bench.stella/{group:D4}/{revision:D2}", + CollectorVersion: "1.0.0-bench", + UpstreamId: upstreamId, + DocumentVersion: documentVersion, + FetchedAt: fetchedAt, + ReceivedAt: receivedAt, + ContentHash: contentHash, + Aliases: aliases, + Purls: purls, + Cpes: cpes, + References: references, + ContentFormat: "CSAF", + SpecVersion: "2.0", + RawPayload: rawPayload); + } + + return seeds; + } + + private static ImmutableArray CreatePurls(int group, int revision, int count) + { + if (count <= 0) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(count); + for (var index = 0; index < count; index++) + { + var version = $"{revision % 9 + 1}.{index + 1}.{group % 10}"; + builder.Add($"pkg:generic/stella/sample-{group:D4}-{index}@{version}"); + } + + return builder.MoveToImmutable(); + } + + private static ImmutableArray CreateCpes(int group, int revision, int count) + { + if (count <= 0) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(count); + for (var index = 0; index < count; index++) + { + var component = $"benchtool{group % 50:D2}"; + var version = $"{revision % 5}.{index}"; + builder.Add($"cpe:2.3:a:stellaops:{component}:{version}:*:*:*:*:*:*:*"); + } + + return builder.MoveToImmutable(); + } + + private static ImmutableArray CreateReferences(string primaryAlias, int count) + { + if (count <= 0) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(count); + for (var index = 0; index < count; index++) + { + builder.Add(new ObservationReference( + Type: index % 2 == 0 ? "advisory" : "patch", + Url: $"https://vendor.example/{primaryAlias.ToLowerInvariant()}/ref/{index:D2}")); + } + + return builder.MoveToImmutable(); + } + + private static BsonDocument CreateRawPayload( + string primaryAlias, + string vendorAlias, + IReadOnlyCollection purls, + IReadOnlyCollection cpes, + IReadOnlyCollection references) + { + var document = new BsonDocument + { + ["id"] = primaryAlias, + ["vendorId"] = vendorAlias, + ["title"] = $"Simulated advisory {primaryAlias}", + ["summary"] = "Synthetic payload produced by Link-Not-Merge benchmark.", + ["metrics"] = new BsonArray + { + new BsonDocument + { + ["kind"] = "cvss:v3.1", + ["score"] = 7.5, + ["vector"] = "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:N/A:N", + }, + }, + }; + + if (purls.Count > 0) + { + document["purls"] = new BsonArray(purls); + } + + if (cpes.Count > 0) + { + document["cpes"] = new BsonArray(cpes); + } + + if (references.Count > 0) + { + document["references"] = new BsonArray(references.Select(reference => new BsonDocument + { + ["type"] = reference.Type, + ["url"] = reference.Url, + })); + } + + return document; + } + + private static string ComputeContentHash(BsonDocument rawPayload, string tenant, int group, int revision) + { + using var sha256 = SHA256.Create(); + var seed = $"{tenant}|{group}|{revision}"; + var rawBytes = rawPayload.ToBson(); + var seedBytes = System.Text.Encoding.UTF8.GetBytes(seed); + var combined = new byte[rawBytes.Length + seedBytes.Length]; + Buffer.BlockCopy(rawBytes, 0, combined, 0, rawBytes.Length); + Buffer.BlockCopy(seedBytes, 0, combined, rawBytes.Length, seedBytes.Length); + var hash = sha256.ComputeHash(combined); + return $"sha256:{Convert.ToHexString(hash)}"; + } +} + +internal sealed record ObservationSeed( + string ObservationId, + string Tenant, + string Vendor, + string Stream, + string Api, + string CollectorVersion, + string UpstreamId, + string DocumentVersion, + DateTimeOffset FetchedAt, + DateTimeOffset ReceivedAt, + string ContentHash, + ImmutableArray Aliases, + ImmutableArray Purls, + ImmutableArray Cpes, + ImmutableArray References, + string ContentFormat, + string SpecVersion, + BsonDocument RawPayload) +{ + public BsonDocument ToBsonDocument() + { + var aliases = new BsonArray(Aliases.Select(alias => alias)); + var purls = new BsonArray(Purls.Select(purl => purl)); + var cpes = new BsonArray(Cpes.Select(cpe => cpe)); + var references = new BsonArray(References.Select(reference => new BsonDocument + { + ["type"] = reference.Type, + ["url"] = reference.Url, + })); + + var document = new BsonDocument + { + ["_id"] = ObservationId, + ["tenant"] = Tenant, + ["source"] = new BsonDocument + { + ["vendor"] = Vendor, + ["stream"] = Stream, + ["api"] = Api, + ["collector_version"] = CollectorVersion, + }, + ["upstream"] = new BsonDocument + { + ["upstream_id"] = UpstreamId, + ["document_version"] = DocumentVersion, + ["fetched_at"] = FetchedAt.UtcDateTime, + ["received_at"] = ReceivedAt.UtcDateTime, + ["content_hash"] = ContentHash, + ["signature"] = new BsonDocument + { + ["present"] = false, + ["format"] = BsonNull.Value, + ["key_id"] = BsonNull.Value, + ["signature"] = BsonNull.Value, + }, + }, + ["content"] = new BsonDocument + { + ["format"] = ContentFormat, + ["spec_version"] = SpecVersion, + ["raw"] = RawPayload, + }, + ["identifiers"] = new BsonDocument + { + ["aliases"] = aliases, + ["primary"] = UpstreamId, + ["cve"] = Aliases.FirstOrDefault(alias => alias.StartsWith("CVE-", StringComparison.Ordinal)) ?? UpstreamId, + }, + ["linkset"] = new BsonDocument + { + ["aliases"] = aliases, + ["purls"] = purls, + ["cpes"] = cpes, + ["references"] = references, + ["reconciled_from"] = new BsonArray { "/content/product_tree" }, + }, + ["supersedes"] = BsonNull.Value, + }; + + return document; + } +} + +internal sealed record ObservationReference(string Type, string Url); diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Program.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Program.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Program.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Program.cs index 68407b03..7bc7821c 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Program.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Program.cs @@ -1,375 +1,375 @@ -using System.Globalization; -using StellaOps.Bench.LinkNotMerge.Baseline; -using StellaOps.Bench.LinkNotMerge.Reporting; - -namespace StellaOps.Bench.LinkNotMerge; - -internal static class Program -{ - public static async Task Main(string[] args) - { - try - { - var options = ProgramOptions.Parse(args); - var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); - var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); - - var results = new List(); - var reports = new List(); - var failures = new List(); - - foreach (var scenario in config.Scenarios) - { - var iterations = scenario.ResolveIterations(config.Iterations); - var runner = new LinkNotMergeScenarioRunner(scenario); - var execution = runner.Execute(iterations, CancellationToken.None); - - var totalStats = DurationStatistics.From(execution.TotalDurationsMs); - var insertStats = DurationStatistics.From(execution.InsertDurationsMs); - var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs); - var allocationStats = AllocationStatistics.From(execution.AllocatedMb); - var throughputStats = ThroughputStatistics.From(execution.TotalThroughputsPerSecond); - var mongoThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond); - - var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs; - var throughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond; - var mongoThroughputFloor = scenario.MinMongoThroughputPerSecond ?? options.MinMongoThroughputPerSecond ?? config.MinMongoThroughputPerSecond; - var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb; - - var result = new ScenarioResult( - scenario.ScenarioId, - scenario.DisplayLabel, - iterations, - execution.ObservationCount, - execution.AliasGroups, - execution.LinksetCount, - totalStats, - insertStats, - correlationStats, - throughputStats, - mongoThroughputStats, - allocationStats, - thresholdMs, - throughputFloor, - mongoThroughputFloor, - allocationLimit); - - results.Add(result); - - if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold) - { - failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms"); - } - - if (throughputFloor is { } floor && result.TotalThroughputStatistics.MinPerSecond < floor) - { - failures.Add($"{result.Id} fell below throughput floor: {result.TotalThroughputStatistics.MinPerSecond:N0} obs/s < {floor:N0} obs/s"); - } - - if (mongoThroughputFloor is { } mongoFloor && result.InsertThroughputStatistics.MinPerSecond < mongoFloor) - { - failures.Add($"{result.Id} fell below Mongo throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {mongoFloor:N0} ops/s"); - } - - if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit) - { - failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB"); - } - - baseline.TryGetValue(result.Id, out var baselineEntry); - var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit); - reports.Add(report); - failures.AddRange(report.BuildRegressionFailureMessages()); - } - - TablePrinter.Print(results); - - if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) - { - CsvWriter.Write(options.CsvOutPath!, results); - } - - if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) - { - var metadata = new BenchmarkJsonMetadata( - SchemaVersion: "linknotmerge-bench/1.0", - CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(), - Commit: options.Commit, - Environment: options.Environment); - - await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false); - } - - if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) - { - PrometheusWriter.Write(options.PrometheusOutPath!, reports); - } - - if (failures.Count > 0) - { - Console.Error.WriteLine(); - Console.Error.WriteLine("Benchmark failures detected:"); - foreach (var failure in failures.Distinct()) - { - Console.Error.WriteLine($" - {failure}"); - } - - return 1; - } - - return 0; - } - catch (Exception ex) - { - Console.Error.WriteLine($"linknotmerge-bench error: {ex.Message}"); - return 1; - } - } - - private sealed record ProgramOptions( - string ConfigPath, - int? Iterations, - double? ThresholdMs, - double? MinThroughputPerSecond, - double? MinMongoThroughputPerSecond, - double? MaxAllocatedMb, - string? CsvOutPath, - string? JsonOutPath, - string? PrometheusOutPath, - string BaselinePath, - DateTimeOffset? CapturedAtUtc, - string? Commit, - string? Environment, - double? RegressionLimit) - { - public static ProgramOptions Parse(string[] args) - { - var configPath = DefaultConfigPath(); - var baselinePath = DefaultBaselinePath(); - - int? iterations = null; - double? thresholdMs = null; - double? minThroughput = null; - double? minMongoThroughput = null; - double? maxAllocated = null; - string? csvOut = null; - string? jsonOut = null; - string? promOut = null; - DateTimeOffset? capturedAt = null; - string? commit = null; - string? environment = null; - double? regressionLimit = null; - - for (var index = 0; index < args.Length; index++) - { - var current = args[index]; - switch (current) - { - case "--config": - EnsureNext(args, index); - configPath = Path.GetFullPath(args[++index]); - break; - case "--iterations": - EnsureNext(args, index); - iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--threshold-ms": - EnsureNext(args, index); - thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--min-throughput": - EnsureNext(args, index); - minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--min-mongo-throughput": - EnsureNext(args, index); - minMongoThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--max-allocated-mb": - EnsureNext(args, index); - maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--csv": - EnsureNext(args, index); - csvOut = args[++index]; - break; - case "--json": - EnsureNext(args, index); - jsonOut = args[++index]; - break; - case "--prometheus": - EnsureNext(args, index); - promOut = args[++index]; - break; - case "--baseline": - EnsureNext(args, index); - baselinePath = Path.GetFullPath(args[++index]); - break; - case "--captured-at": - EnsureNext(args, index); - capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); - break; - case "--commit": - EnsureNext(args, index); - commit = args[++index]; - break; - case "--environment": - EnsureNext(args, index); - environment = args[++index]; - break; - case "--regression-limit": - EnsureNext(args, index); - regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--help": - case "-h": - PrintUsage(); - System.Environment.Exit(0); - break; - default: - throw new ArgumentException($"Unknown argument '{current}'."); - } - } - - return new ProgramOptions( - configPath, - iterations, - thresholdMs, - minThroughput, - minMongoThroughput, - maxAllocated, - csvOut, - jsonOut, - promOut, - baselinePath, - capturedAt, - commit, - environment, - regressionLimit); - } - - private static string DefaultConfigPath() - { - var binaryDir = AppContext.BaseDirectory; - var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); - return Path.Combine(benchRoot, "config.json"); - } - - private static string DefaultBaselinePath() - { - var binaryDir = AppContext.BaseDirectory; - var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); - return Path.Combine(benchRoot, "baseline.csv"); - } - - private static void EnsureNext(string[] args, int index) - { - if (index + 1 >= args.Length) - { - throw new ArgumentException("Missing value for argument."); - } - } - - private static void PrintUsage() - { - Console.WriteLine("Usage: linknotmerge-bench [options]"); - Console.WriteLine(); - Console.WriteLine("Options:"); - Console.WriteLine(" --config Path to benchmark configuration JSON."); - Console.WriteLine(" --iterations Override iteration count."); - Console.WriteLine(" --threshold-ms Global latency threshold in milliseconds."); - Console.WriteLine(" --min-throughput Global throughput floor (observations/second)."); - Console.WriteLine(" --min-mongo-throughput Mongo insert throughput floor (ops/second)."); - Console.WriteLine(" --max-allocated-mb Global allocation ceiling (MB)."); - Console.WriteLine(" --csv Write CSV results to path."); - Console.WriteLine(" --json Write JSON results to path."); - Console.WriteLine(" --prometheus Write Prometheus exposition metrics to path."); - Console.WriteLine(" --baseline Baseline CSV path."); - Console.WriteLine(" --captured-at Timestamp to embed in JSON metadata."); - Console.WriteLine(" --commit Commit identifier for metadata."); - Console.WriteLine(" --environment Environment label for metadata."); - Console.WriteLine(" --regression-limit Regression multiplier (default 1.15)."); - } - } -} - -internal static class TablePrinter -{ - public static void Print(IEnumerable results) - { - Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Mongo k/s | Alloc(MB)"); - Console.WriteLine("---------------------------- | ------------- | ------- | -------- | ---------- | ---------- | ----------- | -------- | --------- | --------"); - foreach (var row in results) - { - Console.WriteLine(string.Join(" | ", new[] - { - row.IdColumn, - row.ObservationsColumn, - row.AliasColumn, - row.LinksetColumn, - row.TotalMeanColumn, - row.CorrelationMeanColumn, - row.InsertMeanColumn, - row.ThroughputColumn, - row.MongoThroughputColumn, - row.AllocatedColumn, - })); - } - } -} - -internal static class CsvWriter -{ - public static void Write(string path, IEnumerable results) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(results); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); - using var writer = new StreamWriter(stream); - writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb"); - - foreach (var result in results) - { - writer.Write(result.Id); - writer.Write(','); - writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.AliasGroups.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.LinksetCount.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.TotalThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.TotalThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.InsertThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.InsertThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture)); - writer.WriteLine(); - } - } -} +using System.Globalization; +using StellaOps.Bench.LinkNotMerge.Baseline; +using StellaOps.Bench.LinkNotMerge.Reporting; + +namespace StellaOps.Bench.LinkNotMerge; + +internal static class Program +{ + public static async Task Main(string[] args) + { + try + { + var options = ProgramOptions.Parse(args); + var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); + var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); + + var results = new List(); + var reports = new List(); + var failures = new List(); + + foreach (var scenario in config.Scenarios) + { + var iterations = scenario.ResolveIterations(config.Iterations); + var runner = new LinkNotMergeScenarioRunner(scenario); + var execution = runner.Execute(iterations, CancellationToken.None); + + var totalStats = DurationStatistics.From(execution.TotalDurationsMs); + var insertStats = DurationStatistics.From(execution.InsertDurationsMs); + var correlationStats = DurationStatistics.From(execution.CorrelationDurationsMs); + var allocationStats = AllocationStatistics.From(execution.AllocatedMb); + var throughputStats = ThroughputStatistics.From(execution.TotalThroughputsPerSecond); + var mongoThroughputStats = ThroughputStatistics.From(execution.InsertThroughputsPerSecond); + + var thresholdMs = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs; + var throughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond; + var mongoThroughputFloor = scenario.MinMongoThroughputPerSecond ?? options.MinMongoThroughputPerSecond ?? config.MinMongoThroughputPerSecond; + var allocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb; + + var result = new ScenarioResult( + scenario.ScenarioId, + scenario.DisplayLabel, + iterations, + execution.ObservationCount, + execution.AliasGroups, + execution.LinksetCount, + totalStats, + insertStats, + correlationStats, + throughputStats, + mongoThroughputStats, + allocationStats, + thresholdMs, + throughputFloor, + mongoThroughputFloor, + allocationLimit); + + results.Add(result); + + if (thresholdMs is { } threshold && result.TotalStatistics.MaxMs > threshold) + { + failures.Add($"{result.Id} exceeded total latency threshold: {result.TotalStatistics.MaxMs:F2} ms > {threshold:F2} ms"); + } + + if (throughputFloor is { } floor && result.TotalThroughputStatistics.MinPerSecond < floor) + { + failures.Add($"{result.Id} fell below throughput floor: {result.TotalThroughputStatistics.MinPerSecond:N0} obs/s < {floor:N0} obs/s"); + } + + if (mongoThroughputFloor is { } mongoFloor && result.InsertThroughputStatistics.MinPerSecond < mongoFloor) + { + failures.Add($"{result.Id} fell below Mongo throughput floor: {result.InsertThroughputStatistics.MinPerSecond:N0} ops/s < {mongoFloor:N0} ops/s"); + } + + if (allocationLimit is { } limit && result.AllocationStatistics.MaxAllocatedMb > limit) + { + failures.Add($"{result.Id} exceeded allocation budget: {result.AllocationStatistics.MaxAllocatedMb:F2} MB > {limit:F2} MB"); + } + + baseline.TryGetValue(result.Id, out var baselineEntry); + var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit); + reports.Add(report); + failures.AddRange(report.BuildRegressionFailureMessages()); + } + + TablePrinter.Print(results); + + if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) + { + CsvWriter.Write(options.CsvOutPath!, results); + } + + if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) + { + var metadata = new BenchmarkJsonMetadata( + SchemaVersion: "linknotmerge-bench/1.0", + CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(), + Commit: options.Commit, + Environment: options.Environment); + + await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false); + } + + if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) + { + PrometheusWriter.Write(options.PrometheusOutPath!, reports); + } + + if (failures.Count > 0) + { + Console.Error.WriteLine(); + Console.Error.WriteLine("Benchmark failures detected:"); + foreach (var failure in failures.Distinct()) + { + Console.Error.WriteLine($" - {failure}"); + } + + return 1; + } + + return 0; + } + catch (Exception ex) + { + Console.Error.WriteLine($"linknotmerge-bench error: {ex.Message}"); + return 1; + } + } + + private sealed record ProgramOptions( + string ConfigPath, + int? Iterations, + double? ThresholdMs, + double? MinThroughputPerSecond, + double? MinMongoThroughputPerSecond, + double? MaxAllocatedMb, + string? CsvOutPath, + string? JsonOutPath, + string? PrometheusOutPath, + string BaselinePath, + DateTimeOffset? CapturedAtUtc, + string? Commit, + string? Environment, + double? RegressionLimit) + { + public static ProgramOptions Parse(string[] args) + { + var configPath = DefaultConfigPath(); + var baselinePath = DefaultBaselinePath(); + + int? iterations = null; + double? thresholdMs = null; + double? minThroughput = null; + double? minMongoThroughput = null; + double? maxAllocated = null; + string? csvOut = null; + string? jsonOut = null; + string? promOut = null; + DateTimeOffset? capturedAt = null; + string? commit = null; + string? environment = null; + double? regressionLimit = null; + + for (var index = 0; index < args.Length; index++) + { + var current = args[index]; + switch (current) + { + case "--config": + EnsureNext(args, index); + configPath = Path.GetFullPath(args[++index]); + break; + case "--iterations": + EnsureNext(args, index); + iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--threshold-ms": + EnsureNext(args, index); + thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--min-throughput": + EnsureNext(args, index); + minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--min-mongo-throughput": + EnsureNext(args, index); + minMongoThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--max-allocated-mb": + EnsureNext(args, index); + maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--csv": + EnsureNext(args, index); + csvOut = args[++index]; + break; + case "--json": + EnsureNext(args, index); + jsonOut = args[++index]; + break; + case "--prometheus": + EnsureNext(args, index); + promOut = args[++index]; + break; + case "--baseline": + EnsureNext(args, index); + baselinePath = Path.GetFullPath(args[++index]); + break; + case "--captured-at": + EnsureNext(args, index); + capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); + break; + case "--commit": + EnsureNext(args, index); + commit = args[++index]; + break; + case "--environment": + EnsureNext(args, index); + environment = args[++index]; + break; + case "--regression-limit": + EnsureNext(args, index); + regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--help": + case "-h": + PrintUsage(); + System.Environment.Exit(0); + break; + default: + throw new ArgumentException($"Unknown argument '{current}'."); + } + } + + return new ProgramOptions( + configPath, + iterations, + thresholdMs, + minThroughput, + minMongoThroughput, + maxAllocated, + csvOut, + jsonOut, + promOut, + baselinePath, + capturedAt, + commit, + environment, + regressionLimit); + } + + private static string DefaultConfigPath() + { + var binaryDir = AppContext.BaseDirectory; + var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); + return Path.Combine(benchRoot, "config.json"); + } + + private static string DefaultBaselinePath() + { + var binaryDir = AppContext.BaseDirectory; + var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); + return Path.Combine(benchRoot, "baseline.csv"); + } + + private static void EnsureNext(string[] args, int index) + { + if (index + 1 >= args.Length) + { + throw new ArgumentException("Missing value for argument."); + } + } + + private static void PrintUsage() + { + Console.WriteLine("Usage: linknotmerge-bench [options]"); + Console.WriteLine(); + Console.WriteLine("Options:"); + Console.WriteLine(" --config Path to benchmark configuration JSON."); + Console.WriteLine(" --iterations Override iteration count."); + Console.WriteLine(" --threshold-ms Global latency threshold in milliseconds."); + Console.WriteLine(" --min-throughput Global throughput floor (observations/second)."); + Console.WriteLine(" --min-mongo-throughput Mongo insert throughput floor (ops/second)."); + Console.WriteLine(" --max-allocated-mb Global allocation ceiling (MB)."); + Console.WriteLine(" --csv Write CSV results to path."); + Console.WriteLine(" --json Write JSON results to path."); + Console.WriteLine(" --prometheus Write Prometheus exposition metrics to path."); + Console.WriteLine(" --baseline Baseline CSV path."); + Console.WriteLine(" --captured-at Timestamp to embed in JSON metadata."); + Console.WriteLine(" --commit Commit identifier for metadata."); + Console.WriteLine(" --environment Environment label for metadata."); + Console.WriteLine(" --regression-limit Regression multiplier (default 1.15)."); + } + } +} + +internal static class TablePrinter +{ + public static void Print(IEnumerable results) + { + Console.WriteLine("Scenario | Observations | Aliases | Linksets | Total(ms) | Correl(ms) | Insert(ms) | Min k/s | Mongo k/s | Alloc(MB)"); + Console.WriteLine("---------------------------- | ------------- | ------- | -------- | ---------- | ---------- | ----------- | -------- | --------- | --------"); + foreach (var row in results) + { + Console.WriteLine(string.Join(" | ", new[] + { + row.IdColumn, + row.ObservationsColumn, + row.AliasColumn, + row.LinksetColumn, + row.TotalMeanColumn, + row.CorrelationMeanColumn, + row.InsertMeanColumn, + row.ThroughputColumn, + row.MongoThroughputColumn, + row.AllocatedColumn, + })); + } + } +} + +internal static class CsvWriter +{ + public static void Write(string path, IEnumerable results) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(results); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); + using var writer = new StreamWriter(stream); + writer.WriteLine("scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb"); + + foreach (var result in results) + { + writer.Write(result.Id); + writer.Write(','); + writer.Write(result.Iterations.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.ObservationCount.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.AliasGroups.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.LinksetCount.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.TotalStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.TotalStatistics.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.TotalStatistics.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.InsertStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.CorrelationStatistics.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.TotalThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.TotalThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.InsertThroughputStatistics.MeanPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.InsertThroughputStatistics.MinPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(result.AllocationStatistics.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture)); + writer.WriteLine(); + } + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Properties/AssemblyInfo.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Properties/AssemblyInfo.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Properties/AssemblyInfo.cs index 66f83687..7e3eb1ff 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Properties/AssemblyInfo.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Bench.LinkNotMerge.Tests")] diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkJsonWriter.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkJsonWriter.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkJsonWriter.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkJsonWriter.cs index f1cf6ea7..2bc6b246 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkJsonWriter.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkJsonWriter.cs @@ -1,151 +1,151 @@ -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Bench.LinkNotMerge.Reporting; - -internal static class BenchmarkJsonWriter -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - WriteIndented = true, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - }; - - public static async Task WriteAsync( - string path, - BenchmarkJsonMetadata metadata, - IReadOnlyList reports, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(metadata); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var document = new BenchmarkJsonDocument( - metadata.SchemaVersion, - metadata.CapturedAtUtc, - metadata.Commit, - metadata.Environment, - reports.Select(CreateScenario).ToArray()); - - await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); - await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); - await stream.FlushAsync(cancellationToken).ConfigureAwait(false); - } - - private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) - { - var baseline = report.Baseline; - return new BenchmarkJsonScenario( - report.Result.Id, - report.Result.Label, - report.Result.Iterations, - report.Result.ObservationCount, - report.Result.AliasGroups, - report.Result.LinksetCount, - report.Result.TotalStatistics.MeanMs, - report.Result.TotalStatistics.P95Ms, - report.Result.TotalStatistics.MaxMs, - report.Result.InsertStatistics.MeanMs, - report.Result.CorrelationStatistics.MeanMs, - report.Result.TotalThroughputStatistics.MeanPerSecond, - report.Result.TotalThroughputStatistics.MinPerSecond, - report.Result.InsertThroughputStatistics.MeanPerSecond, - report.Result.InsertThroughputStatistics.MinPerSecond, - report.Result.AllocationStatistics.MaxAllocatedMb, - report.Result.ThresholdMs, - report.Result.MinThroughputThresholdPerSecond, - report.Result.MinMongoThroughputThresholdPerSecond, - report.Result.MaxAllocatedThresholdMb, - baseline is null - ? null - : new BenchmarkJsonScenarioBaseline( - baseline.Iterations, - baseline.Observations, - baseline.Aliases, - baseline.Linksets, - baseline.MeanTotalMs, - baseline.P95TotalMs, - baseline.MaxTotalMs, - baseline.MeanInsertMs, - baseline.MeanCorrelationMs, - baseline.MeanThroughputPerSecond, - baseline.MinThroughputPerSecond, - baseline.MeanMongoThroughputPerSecond, - baseline.MinMongoThroughputPerSecond, - baseline.MaxAllocatedMb), - new BenchmarkJsonScenarioRegression( - report.DurationRegressionRatio, - report.ThroughputRegressionRatio, - report.MongoThroughputRegressionRatio, - report.RegressionLimit, - report.RegressionBreached)); - } - - private sealed record BenchmarkJsonDocument( - string SchemaVersion, - DateTimeOffset CapturedAt, - string? Commit, - string? Environment, - IReadOnlyList Scenarios); - - private sealed record BenchmarkJsonScenario( - string Id, - string Label, - int Iterations, - int Observations, - int Aliases, - int Linksets, - double MeanTotalMs, - double P95TotalMs, - double MaxTotalMs, - double MeanInsertMs, - double MeanCorrelationMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MeanMongoThroughputPerSecond, - double MinMongoThroughputPerSecond, - double MaxAllocatedMb, - double? ThresholdMs, - double? MinThroughputThresholdPerSecond, - double? MinMongoThroughputThresholdPerSecond, - double? MaxAllocatedThresholdMb, - BenchmarkJsonScenarioBaseline? Baseline, - BenchmarkJsonScenarioRegression Regression); - - private sealed record BenchmarkJsonScenarioBaseline( - int Iterations, - int Observations, - int Aliases, - int Linksets, - double MeanTotalMs, - double P95TotalMs, - double MaxTotalMs, - double MeanInsertMs, - double MeanCorrelationMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MeanMongoThroughputPerSecond, - double MinMongoThroughputPerSecond, - double MaxAllocatedMb); - - private sealed record BenchmarkJsonScenarioRegression( - double? DurationRatio, - double? ThroughputRatio, - double? MongoThroughputRatio, - double Limit, - bool Breached); -} - -internal sealed record BenchmarkJsonMetadata( - string SchemaVersion, - DateTimeOffset CapturedAtUtc, - string? Commit, - string? Environment); +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Bench.LinkNotMerge.Reporting; + +internal static class BenchmarkJsonWriter +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + public static async Task WriteAsync( + string path, + BenchmarkJsonMetadata metadata, + IReadOnlyList reports, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(metadata); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var document = new BenchmarkJsonDocument( + metadata.SchemaVersion, + metadata.CapturedAtUtc, + metadata.Commit, + metadata.Environment, + reports.Select(CreateScenario).ToArray()); + + await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); + await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); + await stream.FlushAsync(cancellationToken).ConfigureAwait(false); + } + + private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) + { + var baseline = report.Baseline; + return new BenchmarkJsonScenario( + report.Result.Id, + report.Result.Label, + report.Result.Iterations, + report.Result.ObservationCount, + report.Result.AliasGroups, + report.Result.LinksetCount, + report.Result.TotalStatistics.MeanMs, + report.Result.TotalStatistics.P95Ms, + report.Result.TotalStatistics.MaxMs, + report.Result.InsertStatistics.MeanMs, + report.Result.CorrelationStatistics.MeanMs, + report.Result.TotalThroughputStatistics.MeanPerSecond, + report.Result.TotalThroughputStatistics.MinPerSecond, + report.Result.InsertThroughputStatistics.MeanPerSecond, + report.Result.InsertThroughputStatistics.MinPerSecond, + report.Result.AllocationStatistics.MaxAllocatedMb, + report.Result.ThresholdMs, + report.Result.MinThroughputThresholdPerSecond, + report.Result.MinMongoThroughputThresholdPerSecond, + report.Result.MaxAllocatedThresholdMb, + baseline is null + ? null + : new BenchmarkJsonScenarioBaseline( + baseline.Iterations, + baseline.Observations, + baseline.Aliases, + baseline.Linksets, + baseline.MeanTotalMs, + baseline.P95TotalMs, + baseline.MaxTotalMs, + baseline.MeanInsertMs, + baseline.MeanCorrelationMs, + baseline.MeanThroughputPerSecond, + baseline.MinThroughputPerSecond, + baseline.MeanMongoThroughputPerSecond, + baseline.MinMongoThroughputPerSecond, + baseline.MaxAllocatedMb), + new BenchmarkJsonScenarioRegression( + report.DurationRegressionRatio, + report.ThroughputRegressionRatio, + report.MongoThroughputRegressionRatio, + report.RegressionLimit, + report.RegressionBreached)); + } + + private sealed record BenchmarkJsonDocument( + string SchemaVersion, + DateTimeOffset CapturedAt, + string? Commit, + string? Environment, + IReadOnlyList Scenarios); + + private sealed record BenchmarkJsonScenario( + string Id, + string Label, + int Iterations, + int Observations, + int Aliases, + int Linksets, + double MeanTotalMs, + double P95TotalMs, + double MaxTotalMs, + double MeanInsertMs, + double MeanCorrelationMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MeanMongoThroughputPerSecond, + double MinMongoThroughputPerSecond, + double MaxAllocatedMb, + double? ThresholdMs, + double? MinThroughputThresholdPerSecond, + double? MinMongoThroughputThresholdPerSecond, + double? MaxAllocatedThresholdMb, + BenchmarkJsonScenarioBaseline? Baseline, + BenchmarkJsonScenarioRegression Regression); + + private sealed record BenchmarkJsonScenarioBaseline( + int Iterations, + int Observations, + int Aliases, + int Linksets, + double MeanTotalMs, + double P95TotalMs, + double MaxTotalMs, + double MeanInsertMs, + double MeanCorrelationMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MeanMongoThroughputPerSecond, + double MinMongoThroughputPerSecond, + double MaxAllocatedMb); + + private sealed record BenchmarkJsonScenarioRegression( + double? DurationRatio, + double? ThroughputRatio, + double? MongoThroughputRatio, + double Limit, + bool Breached); +} + +internal sealed record BenchmarkJsonMetadata( + string SchemaVersion, + DateTimeOffset CapturedAtUtc, + string? Commit, + string? Environment); diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkScenarioReport.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkScenarioReport.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkScenarioReport.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkScenarioReport.cs index 9da927a2..01356073 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkScenarioReport.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/BenchmarkScenarioReport.cs @@ -1,89 +1,89 @@ -using StellaOps.Bench.LinkNotMerge.Baseline; - -namespace StellaOps.Bench.LinkNotMerge.Reporting; - -internal sealed class BenchmarkScenarioReport -{ - private const double DefaultRegressionLimit = 1.15d; - - public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) - { - Result = result ?? throw new ArgumentNullException(nameof(result)); - Baseline = baseline; - RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit; - DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs); - ThroughputRegressionRatio = CalculateInverseRatio(result.TotalThroughputStatistics.MinPerSecond, baseline?.MinThroughputPerSecond); - MongoThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinMongoThroughputPerSecond); - } - - public ScenarioResult Result { get; } - - public BaselineEntry? Baseline { get; } - - public double RegressionLimit { get; } - - public double? DurationRegressionRatio { get; } - - public double? ThroughputRegressionRatio { get; } - - public double? MongoThroughputRegressionRatio { get; } - - public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit; - - public bool ThroughputRegressionBreached => ThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit; - - public bool MongoThroughputRegressionBreached => MongoThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit; - - public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || MongoThroughputRegressionBreached; - - public IEnumerable BuildRegressionFailureMessages() - { - if (Baseline is null) - { - yield break; - } - - if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio) - { - var delta = (durationRatio - 1d) * 100d; - yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%)."; - } - - if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio) - { - var delta = (throughputRatio - 1d) * 100d; - yield return $"{Result.Id} throughput regressed: min {Result.TotalThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinThroughputPerSecond:N0} obs/s (-{delta:F1}%)."; - } - - if (MongoThroughputRegressionBreached && MongoThroughputRegressionRatio is { } mongoRatio) - { - var delta = (mongoRatio - 1d) * 100d; - yield return $"{Result.Id} Mongo throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinMongoThroughputPerSecond:N0} ops/s (-{delta:F1}%)."; - } - } - - private static double? CalculateRatio(double current, double? baseline) - { - if (!baseline.HasValue || baseline.Value <= 0d) - { - return null; - } - - return current / baseline.Value; - } - - private static double? CalculateInverseRatio(double current, double? baseline) - { - if (!baseline.HasValue || baseline.Value <= 0d) - { - return null; - } - - if (current <= 0d) - { - return double.PositiveInfinity; - } - - return baseline.Value / current; - } -} +using StellaOps.Bench.LinkNotMerge.Baseline; + +namespace StellaOps.Bench.LinkNotMerge.Reporting; + +internal sealed class BenchmarkScenarioReport +{ + private const double DefaultRegressionLimit = 1.15d; + + public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) + { + Result = result ?? throw new ArgumentNullException(nameof(result)); + Baseline = baseline; + RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit; + DurationRegressionRatio = CalculateRatio(result.TotalStatistics.MaxMs, baseline?.MaxTotalMs); + ThroughputRegressionRatio = CalculateInverseRatio(result.TotalThroughputStatistics.MinPerSecond, baseline?.MinThroughputPerSecond); + MongoThroughputRegressionRatio = CalculateInverseRatio(result.InsertThroughputStatistics.MinPerSecond, baseline?.MinMongoThroughputPerSecond); + } + + public ScenarioResult Result { get; } + + public BaselineEntry? Baseline { get; } + + public double RegressionLimit { get; } + + public double? DurationRegressionRatio { get; } + + public double? ThroughputRegressionRatio { get; } + + public double? MongoThroughputRegressionRatio { get; } + + public bool DurationRegressionBreached => DurationRegressionRatio is { } ratio && ratio >= RegressionLimit; + + public bool ThroughputRegressionBreached => ThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit; + + public bool MongoThroughputRegressionBreached => MongoThroughputRegressionRatio is { } ratio && ratio >= RegressionLimit; + + public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached || MongoThroughputRegressionBreached; + + public IEnumerable BuildRegressionFailureMessages() + { + if (Baseline is null) + { + yield break; + } + + if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio) + { + var delta = (durationRatio - 1d) * 100d; + yield return $"{Result.Id} exceeded max duration budget: {Result.TotalStatistics.MaxMs:F2} ms vs baseline {Baseline.MaxTotalMs:F2} ms (+{delta:F1}%)."; + } + + if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio) + { + var delta = (throughputRatio - 1d) * 100d; + yield return $"{Result.Id} throughput regressed: min {Result.TotalThroughputStatistics.MinPerSecond:N0} obs/s vs baseline {Baseline.MinThroughputPerSecond:N0} obs/s (-{delta:F1}%)."; + } + + if (MongoThroughputRegressionBreached && MongoThroughputRegressionRatio is { } mongoRatio) + { + var delta = (mongoRatio - 1d) * 100d; + yield return $"{Result.Id} Mongo throughput regressed: min {Result.InsertThroughputStatistics.MinPerSecond:N0} ops/s vs baseline {Baseline.MinMongoThroughputPerSecond:N0} ops/s (-{delta:F1}%)."; + } + } + + private static double? CalculateRatio(double current, double? baseline) + { + if (!baseline.HasValue || baseline.Value <= 0d) + { + return null; + } + + return current / baseline.Value; + } + + private static double? CalculateInverseRatio(double current, double? baseline) + { + if (!baseline.HasValue || baseline.Value <= 0d) + { + return null; + } + + if (current <= 0d) + { + return double.PositiveInfinity; + } + + return baseline.Value / current; + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/PrometheusWriter.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/PrometheusWriter.cs similarity index 98% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/PrometheusWriter.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/PrometheusWriter.cs index 93a1c571..5324b0f0 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/PrometheusWriter.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/Reporting/PrometheusWriter.cs @@ -1,101 +1,101 @@ -using System.Globalization; -using System.Text; - -namespace StellaOps.Bench.LinkNotMerge.Reporting; - -internal static class PrometheusWriter -{ - public static void Write(string path, IReadOnlyList reports) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var builder = new StringBuilder(); - builder.AppendLine("# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds)."); - builder.AppendLine("# TYPE linknotmerge_bench_total_ms gauge"); - builder.AppendLine("# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds)."); - builder.AppendLine("# TYPE linknotmerge_bench_correlation_ms gauge"); - builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds)."); - builder.AppendLine("# TYPE linknotmerge_bench_insert_ms gauge"); - builder.AppendLine("# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second)."); - builder.AppendLine("# TYPE linknotmerge_bench_throughput_per_sec gauge"); - builder.AppendLine("# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second)."); - builder.AppendLine("# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge"); - builder.AppendLine("# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes)."); - builder.AppendLine("# TYPE linknotmerge_bench_allocated_mb gauge"); - - foreach (var report in reports) - { - var scenario = Escape(report.Result.Id); - AppendMetric(builder, "linknotmerge_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs); - AppendMetric(builder, "linknotmerge_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms); - AppendMetric(builder, "linknotmerge_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs); - AppendMetric(builder, "linknotmerge_bench_threshold_ms", scenario, report.Result.ThresholdMs); - - AppendMetric(builder, "linknotmerge_bench_mean_correlation_ms", scenario, report.Result.CorrelationStatistics.MeanMs); - AppendMetric(builder, "linknotmerge_bench_mean_insert_ms", scenario, report.Result.InsertStatistics.MeanMs); - - AppendMetric(builder, "linknotmerge_bench_mean_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MeanPerSecond); - AppendMetric(builder, "linknotmerge_bench_min_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MinPerSecond); - AppendMetric(builder, "linknotmerge_bench_throughput_floor_per_sec", scenario, report.Result.MinThroughputThresholdPerSecond); - - AppendMetric(builder, "linknotmerge_bench_mean_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond); - AppendMetric(builder, "linknotmerge_bench_min_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond); - AppendMetric(builder, "linknotmerge_bench_mongo_throughput_floor_per_sec", scenario, report.Result.MinMongoThroughputThresholdPerSecond); - - AppendMetric(builder, "linknotmerge_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb); - AppendMetric(builder, "linknotmerge_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb); - - if (report.Baseline is { } baseline) - { - AppendMetric(builder, "linknotmerge_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs); - AppendMetric(builder, "linknotmerge_bench_baseline_min_throughput_per_sec", scenario, baseline.MinThroughputPerSecond); - AppendMetric(builder, "linknotmerge_bench_baseline_min_mongo_throughput_per_sec", scenario, baseline.MinMongoThroughputPerSecond); - } - - if (report.DurationRegressionRatio is { } durationRatio) - { - AppendMetric(builder, "linknotmerge_bench_duration_regression_ratio", scenario, durationRatio); - } - - if (report.ThroughputRegressionRatio is { } throughputRatio) - { - AppendMetric(builder, "linknotmerge_bench_throughput_regression_ratio", scenario, throughputRatio); - } - - if (report.MongoThroughputRegressionRatio is { } mongoRatio) - { - AppendMetric(builder, "linknotmerge_bench_mongo_throughput_regression_ratio", scenario, mongoRatio); - } - - AppendMetric(builder, "linknotmerge_bench_regression_limit", scenario, report.RegressionLimit); - AppendMetric(builder, "linknotmerge_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0); - } - - File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); - } - - private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value) - { - if (!value.HasValue) - { - return; - } - - builder.Append(metric); - builder.Append("{scenario=\""); - builder.Append(scenario); - builder.Append("\"} "); - builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture)); - } - - private static string Escape(string value) => - value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); -} +using System.Globalization; +using System.Text; + +namespace StellaOps.Bench.LinkNotMerge.Reporting; + +internal static class PrometheusWriter +{ + public static void Write(string path, IReadOnlyList reports) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var builder = new StringBuilder(); + builder.AppendLine("# HELP linknotmerge_bench_total_ms Link-Not-Merge benchmark total duration metrics (milliseconds)."); + builder.AppendLine("# TYPE linknotmerge_bench_total_ms gauge"); + builder.AppendLine("# HELP linknotmerge_bench_correlation_ms Link-Not-Merge benchmark correlation duration metrics (milliseconds)."); + builder.AppendLine("# TYPE linknotmerge_bench_correlation_ms gauge"); + builder.AppendLine("# HELP linknotmerge_bench_insert_ms Link-Not-Merge benchmark Mongo insert duration metrics (milliseconds)."); + builder.AppendLine("# TYPE linknotmerge_bench_insert_ms gauge"); + builder.AppendLine("# HELP linknotmerge_bench_throughput_per_sec Link-Not-Merge benchmark throughput metrics (observations per second)."); + builder.AppendLine("# TYPE linknotmerge_bench_throughput_per_sec gauge"); + builder.AppendLine("# HELP linknotmerge_bench_mongo_throughput_per_sec Link-Not-Merge benchmark Mongo throughput metrics (operations per second)."); + builder.AppendLine("# TYPE linknotmerge_bench_mongo_throughput_per_sec gauge"); + builder.AppendLine("# HELP linknotmerge_bench_allocated_mb Link-Not-Merge benchmark allocation metrics (megabytes)."); + builder.AppendLine("# TYPE linknotmerge_bench_allocated_mb gauge"); + + foreach (var report in reports) + { + var scenario = Escape(report.Result.Id); + AppendMetric(builder, "linknotmerge_bench_mean_total_ms", scenario, report.Result.TotalStatistics.MeanMs); + AppendMetric(builder, "linknotmerge_bench_p95_total_ms", scenario, report.Result.TotalStatistics.P95Ms); + AppendMetric(builder, "linknotmerge_bench_max_total_ms", scenario, report.Result.TotalStatistics.MaxMs); + AppendMetric(builder, "linknotmerge_bench_threshold_ms", scenario, report.Result.ThresholdMs); + + AppendMetric(builder, "linknotmerge_bench_mean_correlation_ms", scenario, report.Result.CorrelationStatistics.MeanMs); + AppendMetric(builder, "linknotmerge_bench_mean_insert_ms", scenario, report.Result.InsertStatistics.MeanMs); + + AppendMetric(builder, "linknotmerge_bench_mean_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MeanPerSecond); + AppendMetric(builder, "linknotmerge_bench_min_throughput_per_sec", scenario, report.Result.TotalThroughputStatistics.MinPerSecond); + AppendMetric(builder, "linknotmerge_bench_throughput_floor_per_sec", scenario, report.Result.MinThroughputThresholdPerSecond); + + AppendMetric(builder, "linknotmerge_bench_mean_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MeanPerSecond); + AppendMetric(builder, "linknotmerge_bench_min_mongo_throughput_per_sec", scenario, report.Result.InsertThroughputStatistics.MinPerSecond); + AppendMetric(builder, "linknotmerge_bench_mongo_throughput_floor_per_sec", scenario, report.Result.MinMongoThroughputThresholdPerSecond); + + AppendMetric(builder, "linknotmerge_bench_max_allocated_mb", scenario, report.Result.AllocationStatistics.MaxAllocatedMb); + AppendMetric(builder, "linknotmerge_bench_max_allocated_threshold_mb", scenario, report.Result.MaxAllocatedThresholdMb); + + if (report.Baseline is { } baseline) + { + AppendMetric(builder, "linknotmerge_bench_baseline_max_total_ms", scenario, baseline.MaxTotalMs); + AppendMetric(builder, "linknotmerge_bench_baseline_min_throughput_per_sec", scenario, baseline.MinThroughputPerSecond); + AppendMetric(builder, "linknotmerge_bench_baseline_min_mongo_throughput_per_sec", scenario, baseline.MinMongoThroughputPerSecond); + } + + if (report.DurationRegressionRatio is { } durationRatio) + { + AppendMetric(builder, "linknotmerge_bench_duration_regression_ratio", scenario, durationRatio); + } + + if (report.ThroughputRegressionRatio is { } throughputRatio) + { + AppendMetric(builder, "linknotmerge_bench_throughput_regression_ratio", scenario, throughputRatio); + } + + if (report.MongoThroughputRegressionRatio is { } mongoRatio) + { + AppendMetric(builder, "linknotmerge_bench_mongo_throughput_regression_ratio", scenario, mongoRatio); + } + + AppendMetric(builder, "linknotmerge_bench_regression_limit", scenario, report.RegressionLimit); + AppendMetric(builder, "linknotmerge_bench_regression_breached", scenario, report.RegressionBreached ? 1 : 0); + } + + File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); + } + + private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value) + { + if (!value.HasValue) + { + return; + } + + builder.Append(metric); + builder.Append("{scenario=\""); + builder.Append(scenario); + builder.Append("\"} "); + builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture)); + } + + private static string Escape(string value) => + value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioExecutionResult.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioExecutionResult.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioExecutionResult.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioExecutionResult.cs index e5c1e9af..9740f773 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioExecutionResult.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioExecutionResult.cs @@ -1,14 +1,14 @@ -namespace StellaOps.Bench.LinkNotMerge; - -internal sealed record ScenarioExecutionResult( - IReadOnlyList TotalDurationsMs, - IReadOnlyList InsertDurationsMs, - IReadOnlyList CorrelationDurationsMs, - IReadOnlyList AllocatedMb, - IReadOnlyList TotalThroughputsPerSecond, - IReadOnlyList InsertThroughputsPerSecond, - int ObservationCount, - int AliasGroups, - int LinksetCount, - int TenantCount, - LinksetAggregationResult AggregationResult); +namespace StellaOps.Bench.LinkNotMerge; + +internal sealed record ScenarioExecutionResult( + IReadOnlyList TotalDurationsMs, + IReadOnlyList InsertDurationsMs, + IReadOnlyList CorrelationDurationsMs, + IReadOnlyList AllocatedMb, + IReadOnlyList TotalThroughputsPerSecond, + IReadOnlyList InsertThroughputsPerSecond, + int ObservationCount, + int AliasGroups, + int LinksetCount, + int TenantCount, + LinksetAggregationResult AggregationResult); diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioResult.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioResult.cs similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioResult.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioResult.cs index 65ec9ffc..513de642 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioResult.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioResult.cs @@ -1,42 +1,42 @@ -using System.Globalization; - -namespace StellaOps.Bench.LinkNotMerge; - -internal sealed record ScenarioResult( - string Id, - string Label, - int Iterations, - int ObservationCount, - int AliasGroups, - int LinksetCount, - DurationStatistics TotalStatistics, - DurationStatistics InsertStatistics, - DurationStatistics CorrelationStatistics, - ThroughputStatistics TotalThroughputStatistics, - ThroughputStatistics InsertThroughputStatistics, - AllocationStatistics AllocationStatistics, - double? ThresholdMs, - double? MinThroughputThresholdPerSecond, - double? MinMongoThroughputThresholdPerSecond, - double? MaxAllocatedThresholdMb) -{ - public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; - - public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12); - - public string AliasColumn => AliasGroups.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8); - - public string LinksetColumn => LinksetCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9); - - public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - - public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - - public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - - public string ThroughputColumn => (TotalThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); - - public string MongoThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); - - public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); -} +using System.Globalization; + +namespace StellaOps.Bench.LinkNotMerge; + +internal sealed record ScenarioResult( + string Id, + string Label, + int Iterations, + int ObservationCount, + int AliasGroups, + int LinksetCount, + DurationStatistics TotalStatistics, + DurationStatistics InsertStatistics, + DurationStatistics CorrelationStatistics, + ThroughputStatistics TotalThroughputStatistics, + ThroughputStatistics InsertThroughputStatistics, + AllocationStatistics AllocationStatistics, + double? ThresholdMs, + double? MinThroughputThresholdPerSecond, + double? MinMongoThroughputThresholdPerSecond, + double? MaxAllocatedThresholdMb) +{ + public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; + + public string ObservationsColumn => ObservationCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12); + + public string AliasColumn => AliasGroups.ToString("N0", CultureInfo.InvariantCulture).PadLeft(8); + + public string LinksetColumn => LinksetCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9); + + public string TotalMeanColumn => TotalStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + + public string CorrelationMeanColumn => CorrelationStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + + public string InsertMeanColumn => InsertStatistics.MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + + public string ThroughputColumn => (TotalThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); + + public string MongoThroughputColumn => (InsertThroughputStatistics.MinPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); + + public string AllocatedColumn => AllocationStatistics.MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioStatistics.cs b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioStatistics.cs similarity index 96% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioStatistics.cs rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioStatistics.cs index f9cd565f..ac024430 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioStatistics.cs +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/ScenarioStatistics.cs @@ -1,84 +1,84 @@ -namespace StellaOps.Bench.LinkNotMerge; - -internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs) -{ - public static DurationStatistics From(IReadOnlyList values) - { - if (values.Count == 0) - { - return new DurationStatistics(0, 0, 0); - } - - var sorted = values.ToArray(); - Array.Sort(sorted); - - var total = 0d; - foreach (var value in values) - { - total += value; - } - - var mean = total / values.Count; - var p95 = Percentile(sorted, 95); - var max = sorted[^1]; - - return new DurationStatistics(mean, p95, max); - } - - private static double Percentile(IReadOnlyList sorted, double percentile) - { - if (sorted.Count == 0) - { - return 0; - } - - var rank = (percentile / 100d) * (sorted.Count - 1); - var lower = (int)Math.Floor(rank); - var upper = (int)Math.Ceiling(rank); - var weight = rank - lower; - - if (upper >= sorted.Count) - { - return sorted[lower]; - } - - return sorted[lower] + weight * (sorted[upper] - sorted[lower]); - } -} - -internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond) -{ - public static ThroughputStatistics From(IReadOnlyList values) - { - if (values.Count == 0) - { - return new ThroughputStatistics(0, 0); - } - - var total = 0d; - var min = double.MaxValue; - - foreach (var value in values) - { - total += value; - min = Math.Min(min, value); - } - - var mean = total / values.Count; - return new ThroughputStatistics(mean, min); - } -} - -internal readonly record struct AllocationStatistics(double MaxAllocatedMb) -{ - public static AllocationStatistics From(IReadOnlyList values) - { - var max = 0d; - foreach (var value in values) - { - max = Math.Max(max, value); - } - - return new AllocationStatistics(max); - } -} +namespace StellaOps.Bench.LinkNotMerge; + +internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs) +{ + public static DurationStatistics From(IReadOnlyList values) + { + if (values.Count == 0) + { + return new DurationStatistics(0, 0, 0); + } + + var sorted = values.ToArray(); + Array.Sort(sorted); + + var total = 0d; + foreach (var value in values) + { + total += value; + } + + var mean = total / values.Count; + var p95 = Percentile(sorted, 95); + var max = sorted[^1]; + + return new DurationStatistics(mean, p95, max); + } + + private static double Percentile(IReadOnlyList sorted, double percentile) + { + if (sorted.Count == 0) + { + return 0; + } + + var rank = (percentile / 100d) * (sorted.Count - 1); + var lower = (int)Math.Floor(rank); + var upper = (int)Math.Ceiling(rank); + var weight = rank - lower; + + if (upper >= sorted.Count) + { + return sorted[lower]; + } + + return sorted[lower] + weight * (sorted[upper] - sorted[lower]); + } +} + +internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond) +{ + public static ThroughputStatistics From(IReadOnlyList values) + { + if (values.Count == 0) + { + return new ThroughputStatistics(0, 0); + } + + var total = 0d; + var min = double.MaxValue; + + foreach (var value in values) + { + total += value; + min = Math.Min(min, value); + } + + var mean = total / values.Count; + return new ThroughputStatistics(mean, min); + } +} + +internal readonly record struct AllocationStatistics(double MaxAllocatedMb) +{ + public static AllocationStatistics From(IReadOnlyList values) + { + var max = 0d; + foreach (var value in values) + { + max = Math.Max(max, value); + } + + return new AllocationStatistics(max); + } +} diff --git a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj similarity index 97% rename from src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj rename to src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj index 7167d07e..a57ee461 100644 --- a/src/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/StellaOps.Bench.LinkNotMerge/StellaOps.Bench.LinkNotMerge.csproj @@ -1,16 +1,16 @@ - - - Exe - net10.0 - enable - enable - preview - true - - - - - - - - + + + Exe + net10.0 + enable + enable + preview + true + + + + + + + + diff --git a/src/StellaOps.Bench/LinkNotMerge/baseline.csv b/src/Bench/StellaOps.Bench/LinkNotMerge/baseline.csv similarity index 99% rename from src/StellaOps.Bench/LinkNotMerge/baseline.csv rename to src/Bench/StellaOps.Bench/LinkNotMerge/baseline.csv index 7e141e9a..7070a4e0 100644 --- a/src/StellaOps.Bench/LinkNotMerge/baseline.csv +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/baseline.csv @@ -1,4 +1,4 @@ -scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb -lnm_ingest_baseline,5,5000,500,6000,555.1984,823.4957,866.6236,366.2635,188.9349,9877.7916,5769.5175,15338.0851,8405.1257,62.4477 -lnm_ingest_fanout_medium,5,10000,800,14800,785.8909,841.6247,842.8815,453.5087,332.3822,12794.9550,11864.0639,22086.0320,20891.0579,145.8328 -lnm_ingest_fanout_high,5,15000,1200,17400,1299.3458,1367.0934,1369.9430,741.6265,557.7193,11571.0991,10949.3607,20232.5180,19781.6762,238.3450 +scenario,iterations,observations,aliases,linksets,mean_total_ms,p95_total_ms,max_total_ms,mean_insert_ms,mean_correlation_ms,mean_throughput_per_sec,min_throughput_per_sec,mean_mongo_throughput_per_sec,min_mongo_throughput_per_sec,max_allocated_mb +lnm_ingest_baseline,5,5000,500,6000,555.1984,823.4957,866.6236,366.2635,188.9349,9877.7916,5769.5175,15338.0851,8405.1257,62.4477 +lnm_ingest_fanout_medium,5,10000,800,14800,785.8909,841.6247,842.8815,453.5087,332.3822,12794.9550,11864.0639,22086.0320,20891.0579,145.8328 +lnm_ingest_fanout_high,5,15000,1200,17400,1299.3458,1367.0934,1369.9430,741.6265,557.7193,11571.0991,10949.3607,20232.5180,19781.6762,238.3450 diff --git a/src/StellaOps.Bench/LinkNotMerge/config.json b/src/Bench/StellaOps.Bench/LinkNotMerge/config.json similarity index 96% rename from src/StellaOps.Bench/LinkNotMerge/config.json rename to src/Bench/StellaOps.Bench/LinkNotMerge/config.json index e3bdbf76..0d255cbd 100644 --- a/src/StellaOps.Bench/LinkNotMerge/config.json +++ b/src/Bench/StellaOps.Bench/LinkNotMerge/config.json @@ -1,57 +1,57 @@ -{ - "thresholdMs": 2000, - "minThroughputPerSecond": 7000, - "minMongoThroughputPerSecond": 12000, - "maxAllocatedMb": 600, - "iterations": 5, - "scenarios": [ - { - "id": "lnm_ingest_baseline", - "label": "5k observations, 500 aliases", - "observations": 5000, - "aliasGroups": 500, - "purlsPerObservation": 4, - "cpesPerObservation": 2, - "referencesPerObservation": 3, - "tenants": 4, - "batchSize": 250, - "seed": 42022, - "thresholdMs": 900, - "minThroughputPerSecond": 5500, - "minMongoThroughputPerSecond": 8000, - "maxAllocatedMb": 160 - }, - { - "id": "lnm_ingest_fanout_medium", - "label": "10k observations, 800 aliases", - "observations": 10000, - "aliasGroups": 800, - "purlsPerObservation": 6, - "cpesPerObservation": 3, - "referencesPerObservation": 4, - "tenants": 6, - "batchSize": 400, - "seed": 52022, - "thresholdMs": 1300, - "minThroughputPerSecond": 8000, - "minMongoThroughputPerSecond": 13000, - "maxAllocatedMb": 220 - }, - { - "id": "lnm_ingest_fanout_high", - "label": "15k observations, 1200 aliases", - "observations": 15000, - "aliasGroups": 1200, - "purlsPerObservation": 8, - "cpesPerObservation": 4, - "referencesPerObservation": 5, - "tenants": 8, - "batchSize": 500, - "seed": 62022, - "thresholdMs": 2200, - "minThroughputPerSecond": 7000, - "minMongoThroughputPerSecond": 13000, - "maxAllocatedMb": 300 - } - ] -} +{ + "thresholdMs": 2000, + "minThroughputPerSecond": 7000, + "minMongoThroughputPerSecond": 12000, + "maxAllocatedMb": 600, + "iterations": 5, + "scenarios": [ + { + "id": "lnm_ingest_baseline", + "label": "5k observations, 500 aliases", + "observations": 5000, + "aliasGroups": 500, + "purlsPerObservation": 4, + "cpesPerObservation": 2, + "referencesPerObservation": 3, + "tenants": 4, + "batchSize": 250, + "seed": 42022, + "thresholdMs": 900, + "minThroughputPerSecond": 5500, + "minMongoThroughputPerSecond": 8000, + "maxAllocatedMb": 160 + }, + { + "id": "lnm_ingest_fanout_medium", + "label": "10k observations, 800 aliases", + "observations": 10000, + "aliasGroups": 800, + "purlsPerObservation": 6, + "cpesPerObservation": 3, + "referencesPerObservation": 4, + "tenants": 6, + "batchSize": 400, + "seed": 52022, + "thresholdMs": 1300, + "minThroughputPerSecond": 8000, + "minMongoThroughputPerSecond": 13000, + "maxAllocatedMb": 220 + }, + { + "id": "lnm_ingest_fanout_high", + "label": "15k observations, 1200 aliases", + "observations": 15000, + "aliasGroups": 1200, + "purlsPerObservation": 8, + "cpesPerObservation": 4, + "referencesPerObservation": 5, + "tenants": 8, + "batchSize": 500, + "seed": 62022, + "thresholdMs": 2200, + "minThroughputPerSecond": 7000, + "minMongoThroughputPerSecond": 13000, + "maxAllocatedMb": 300 + } + ] +} diff --git a/src/StellaOps.Bench/Notify/README.md b/src/Bench/StellaOps.Bench/Notify/README.md similarity index 89% rename from src/StellaOps.Bench/Notify/README.md rename to src/Bench/StellaOps.Bench/Notify/README.md index 22d83f52..219e1e24 100644 --- a/src/StellaOps.Bench/Notify/README.md +++ b/src/Bench/StellaOps.Bench/Notify/README.md @@ -15,7 +15,7 @@ Synthetic workload measuring rule evaluation and channel dispatch fan-out under ```bash dotnet run \ - --project src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj \ + --project src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj \ -- \ --csv out/notify-bench.csv \ --json out/notify-bench.json \ diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BaselineLoaderTests.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BaselineLoaderTests.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BaselineLoaderTests.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BaselineLoaderTests.cs index 4bf83801..3fe15792 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BaselineLoaderTests.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BaselineLoaderTests.cs @@ -1,38 +1,38 @@ -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Bench.Notify.Baseline; -using Xunit; - -namespace StellaOps.Bench.Notify.Tests; - -public sealed class BaselineLoaderTests -{ - [Fact] - public async Task LoadAsync_ReadsBaselineEntries() - { - var path = Path.GetTempFileName(); - try - { - await File.WriteAllTextAsync( - path, - "scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb\n" + - "notify_dispatch_density_05,5,5000,25000,120.5,150.1,199.9,42000.5,39000.2,85.7\n"); - - var entries = await BaselineLoader.LoadAsync(path, CancellationToken.None); - var entry = Assert.Single(entries); - - Assert.Equal("notify_dispatch_density_05", entry.Key); - Assert.Equal(5, entry.Value.Iterations); - Assert.Equal(5000, entry.Value.EventCount); - Assert.Equal(25000, entry.Value.DeliveryCount); - Assert.Equal(120.5, entry.Value.MeanMs); - Assert.Equal(39000.2, entry.Value.MinThroughputPerSecond); - Assert.Equal(85.7, entry.Value.MaxAllocatedMb); - } - finally - { - File.Delete(path); - } - } -} +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Bench.Notify.Baseline; +using Xunit; + +namespace StellaOps.Bench.Notify.Tests; + +public sealed class BaselineLoaderTests +{ + [Fact] + public async Task LoadAsync_ReadsBaselineEntries() + { + var path = Path.GetTempFileName(); + try + { + await File.WriteAllTextAsync( + path, + "scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb\n" + + "notify_dispatch_density_05,5,5000,25000,120.5,150.1,199.9,42000.5,39000.2,85.7\n"); + + var entries = await BaselineLoader.LoadAsync(path, CancellationToken.None); + var entry = Assert.Single(entries); + + Assert.Equal("notify_dispatch_density_05", entry.Key); + Assert.Equal(5, entry.Value.Iterations); + Assert.Equal(5000, entry.Value.EventCount); + Assert.Equal(25000, entry.Value.DeliveryCount); + Assert.Equal(120.5, entry.Value.MeanMs); + Assert.Equal(39000.2, entry.Value.MinThroughputPerSecond); + Assert.Equal(85.7, entry.Value.MaxAllocatedMb); + } + finally + { + File.Delete(path); + } + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BenchmarkScenarioReportTests.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BenchmarkScenarioReportTests.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BenchmarkScenarioReportTests.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BenchmarkScenarioReportTests.cs index 5ac43b76..bb304c82 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BenchmarkScenarioReportTests.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/BenchmarkScenarioReportTests.cs @@ -1,85 +1,85 @@ -using System.Linq; -using StellaOps.Bench.Notify.Baseline; -using StellaOps.Bench.Notify.Reporting; -using Xunit; - -namespace StellaOps.Bench.Notify.Tests; - -public sealed class BenchmarkScenarioReportTests -{ - [Fact] - public void RegressionDetection_FlagsLatencies() - { - var result = new ScenarioResult( - Id: "scenario", - Label: "Scenario", - Iterations: 3, - TotalEvents: 1000, - TotalRules: 100, - ActionsPerRule: 2, - AverageMatchesPerEvent: 10, - MinMatchesPerEvent: 8, - MaxMatchesPerEvent: 12, - AverageDeliveriesPerEvent: 20, - TotalDeliveries: 20000, - MeanMs: 200, - P95Ms: 250, - MaxMs: 300, - MeanThroughputPerSecond: 50000, - MinThroughputPerSecond: 40000, - MaxAllocatedMb: 100, - ThresholdMs: null, - MinThroughputThresholdPerSecond: null, - MaxAllocatedThresholdMb: null); - - var baseline = new BaselineEntry( - ScenarioId: "scenario", - Iterations: 3, - EventCount: 1000, - DeliveryCount: 20000, - MeanMs: 150, - P95Ms: 180, - MaxMs: 200, - MeanThroughputPerSecond: 60000, - MinThroughputPerSecond: 50000, - MaxAllocatedMb: 90); - - var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1); - - Assert.True(report.DurationRegressionBreached); - Assert.True(report.ThroughputRegressionBreached); - Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration")); - } - - [Fact] - public void RegressionDetection_NoBaseline_NoBreaches() - { - var result = new ScenarioResult( - Id: "scenario", - Label: "Scenario", - Iterations: 3, - TotalEvents: 1000, - TotalRules: 100, - ActionsPerRule: 2, - AverageMatchesPerEvent: 10, - MinMatchesPerEvent: 8, - MaxMatchesPerEvent: 12, - AverageDeliveriesPerEvent: 20, - TotalDeliveries: 20000, - MeanMs: 200, - P95Ms: 250, - MaxMs: 300, - MeanThroughputPerSecond: 50000, - MinThroughputPerSecond: 40000, - MaxAllocatedMb: 100, - ThresholdMs: null, - MinThroughputThresholdPerSecond: null, - MaxAllocatedThresholdMb: null); - - var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null); - - Assert.False(report.DurationRegressionBreached); - Assert.False(report.ThroughputRegressionBreached); - Assert.Empty(report.BuildRegressionFailureMessages()); - } -} +using System.Linq; +using StellaOps.Bench.Notify.Baseline; +using StellaOps.Bench.Notify.Reporting; +using Xunit; + +namespace StellaOps.Bench.Notify.Tests; + +public sealed class BenchmarkScenarioReportTests +{ + [Fact] + public void RegressionDetection_FlagsLatencies() + { + var result = new ScenarioResult( + Id: "scenario", + Label: "Scenario", + Iterations: 3, + TotalEvents: 1000, + TotalRules: 100, + ActionsPerRule: 2, + AverageMatchesPerEvent: 10, + MinMatchesPerEvent: 8, + MaxMatchesPerEvent: 12, + AverageDeliveriesPerEvent: 20, + TotalDeliveries: 20000, + MeanMs: 200, + P95Ms: 250, + MaxMs: 300, + MeanThroughputPerSecond: 50000, + MinThroughputPerSecond: 40000, + MaxAllocatedMb: 100, + ThresholdMs: null, + MinThroughputThresholdPerSecond: null, + MaxAllocatedThresholdMb: null); + + var baseline = new BaselineEntry( + ScenarioId: "scenario", + Iterations: 3, + EventCount: 1000, + DeliveryCount: 20000, + MeanMs: 150, + P95Ms: 180, + MaxMs: 200, + MeanThroughputPerSecond: 60000, + MinThroughputPerSecond: 50000, + MaxAllocatedMb: 90); + + var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.1); + + Assert.True(report.DurationRegressionBreached); + Assert.True(report.ThroughputRegressionBreached); + Assert.Contains(report.BuildRegressionFailureMessages(), message => message.Contains("max duration")); + } + + [Fact] + public void RegressionDetection_NoBaseline_NoBreaches() + { + var result = new ScenarioResult( + Id: "scenario", + Label: "Scenario", + Iterations: 3, + TotalEvents: 1000, + TotalRules: 100, + ActionsPerRule: 2, + AverageMatchesPerEvent: 10, + MinMatchesPerEvent: 8, + MaxMatchesPerEvent: 12, + AverageDeliveriesPerEvent: 20, + TotalDeliveries: 20000, + MeanMs: 200, + P95Ms: 250, + MaxMs: 300, + MeanThroughputPerSecond: 50000, + MinThroughputPerSecond: 40000, + MaxAllocatedMb: 100, + ThresholdMs: null, + MinThroughputThresholdPerSecond: null, + MaxAllocatedThresholdMb: null); + + var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: null); + + Assert.False(report.DurationRegressionBreached); + Assert.False(report.ThroughputRegressionBreached); + Assert.Empty(report.BuildRegressionFailureMessages()); + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/NotifyScenarioRunnerTests.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/NotifyScenarioRunnerTests.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/NotifyScenarioRunnerTests.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/NotifyScenarioRunnerTests.cs index 2df58760..41d5c0ea 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/NotifyScenarioRunnerTests.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/NotifyScenarioRunnerTests.cs @@ -1,33 +1,33 @@ -using System.Threading; -using Xunit; - -namespace StellaOps.Bench.Notify.Tests; - -public sealed class NotifyScenarioRunnerTests -{ - [Fact] - public void Execute_ComputesDeterministicMetrics() - { - var config = new NotifyScenarioConfig - { - Id = "unit_test", - EventCount = 500, - RuleCount = 40, - ActionsPerRule = 3, - MatchRate = 0.25, - TenantCount = 4, - ChannelCount = 16 - }; - - var runner = new NotifyScenarioRunner(config); - var result = runner.Execute(2, CancellationToken.None); - - Assert.Equal(config.ResolveEventCount(), result.TotalEvents); - Assert.Equal(config.ResolveRuleCount(), result.TotalRules); - Assert.Equal(config.ResolveActionsPerRule(), result.ActionsPerRule); - Assert.True(result.TotalMatches > 0); - Assert.Equal(result.TotalMatches * result.ActionsPerRule, result.TotalDeliveries); - Assert.Equal(2, result.Durations.Count); - Assert.All(result.Durations, value => Assert.True(value > 0)); - } -} +using System.Threading; +using Xunit; + +namespace StellaOps.Bench.Notify.Tests; + +public sealed class NotifyScenarioRunnerTests +{ + [Fact] + public void Execute_ComputesDeterministicMetrics() + { + var config = new NotifyScenarioConfig + { + Id = "unit_test", + EventCount = 500, + RuleCount = 40, + ActionsPerRule = 3, + MatchRate = 0.25, + TenantCount = 4, + ChannelCount = 16 + }; + + var runner = new NotifyScenarioRunner(config); + var result = runner.Execute(2, CancellationToken.None); + + Assert.Equal(config.ResolveEventCount(), result.TotalEvents); + Assert.Equal(config.ResolveRuleCount(), result.TotalRules); + Assert.Equal(config.ResolveActionsPerRule(), result.ActionsPerRule); + Assert.True(result.TotalMatches > 0); + Assert.Equal(result.TotalMatches * result.ActionsPerRule, result.TotalDeliveries); + Assert.Equal(2, result.Durations.Count); + Assert.All(result.Durations, value => Assert.True(value > 0)); + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/PrometheusWriterTests.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/PrometheusWriterTests.cs similarity index 96% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/PrometheusWriterTests.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/PrometheusWriterTests.cs index 309f85e3..91c62203 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/PrometheusWriterTests.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/PrometheusWriterTests.cs @@ -1,64 +1,64 @@ -using System.IO; -using StellaOps.Bench.Notify.Baseline; -using StellaOps.Bench.Notify.Reporting; -using Xunit; - -namespace StellaOps.Bench.Notify.Tests; - -public sealed class PrometheusWriterTests -{ - [Fact] - public void Write_EmitsScenarioMetrics() - { - var result = new ScenarioResult( - Id: "scenario", - Label: "Scenario", - Iterations: 3, - TotalEvents: 1000, - TotalRules: 100, - ActionsPerRule: 2, - AverageMatchesPerEvent: 10, - MinMatchesPerEvent: 8, - MaxMatchesPerEvent: 12, - AverageDeliveriesPerEvent: 20, - TotalDeliveries: 20000, - MeanMs: 200, - P95Ms: 250, - MaxMs: 300, - MeanThroughputPerSecond: 50000, - MinThroughputPerSecond: 40000, - MaxAllocatedMb: 100, - ThresholdMs: 900, - MinThroughputThresholdPerSecond: 35000, - MaxAllocatedThresholdMb: 150); - - var baseline = new BaselineEntry( - ScenarioId: "scenario", - Iterations: 3, - EventCount: 1000, - DeliveryCount: 20000, - MeanMs: 180, - P95Ms: 210, - MaxMs: 240, - MeanThroughputPerSecond: 52000, - MinThroughputPerSecond: 41000, - MaxAllocatedMb: 95); - - var report = new BenchmarkScenarioReport(result, baseline); - - var path = Path.GetTempFileName(); - try - { - PrometheusWriter.Write(path, new[] { report }); - var content = File.ReadAllText(path); - - Assert.Contains("notify_dispatch_bench_mean_ms", content); - Assert.Contains("scenario\"} 200", content); - Assert.Contains("notify_dispatch_bench_baseline_mean_ms", content); - } - finally - { - File.Delete(path); - } - } -} +using System.IO; +using StellaOps.Bench.Notify.Baseline; +using StellaOps.Bench.Notify.Reporting; +using Xunit; + +namespace StellaOps.Bench.Notify.Tests; + +public sealed class PrometheusWriterTests +{ + [Fact] + public void Write_EmitsScenarioMetrics() + { + var result = new ScenarioResult( + Id: "scenario", + Label: "Scenario", + Iterations: 3, + TotalEvents: 1000, + TotalRules: 100, + ActionsPerRule: 2, + AverageMatchesPerEvent: 10, + MinMatchesPerEvent: 8, + MaxMatchesPerEvent: 12, + AverageDeliveriesPerEvent: 20, + TotalDeliveries: 20000, + MeanMs: 200, + P95Ms: 250, + MaxMs: 300, + MeanThroughputPerSecond: 50000, + MinThroughputPerSecond: 40000, + MaxAllocatedMb: 100, + ThresholdMs: 900, + MinThroughputThresholdPerSecond: 35000, + MaxAllocatedThresholdMb: 150); + + var baseline = new BaselineEntry( + ScenarioId: "scenario", + Iterations: 3, + EventCount: 1000, + DeliveryCount: 20000, + MeanMs: 180, + P95Ms: 210, + MaxMs: 240, + MeanThroughputPerSecond: 52000, + MinThroughputPerSecond: 41000, + MaxAllocatedMb: 95); + + var report = new BenchmarkScenarioReport(result, baseline); + + var path = Path.GetTempFileName(); + try + { + PrometheusWriter.Write(path, new[] { report }); + var content = File.ReadAllText(path); + + Assert.Contains("notify_dispatch_bench_mean_ms", content); + Assert.Contains("scenario\"} 200", content); + Assert.Contains("notify_dispatch_bench_baseline_mean_ms", content); + } + finally + { + File.Delete(path); + } + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/StellaOps.Bench.Notify.Tests.csproj b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/StellaOps.Bench.Notify.Tests.csproj similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/StellaOps.Bench.Notify.Tests.csproj rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/StellaOps.Bench.Notify.Tests.csproj index f7540b8e..83d07395 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/StellaOps.Bench.Notify.Tests.csproj +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify.Tests/StellaOps.Bench.Notify.Tests.csproj @@ -1,27 +1,27 @@ - - - net10.0 - enable - enable - preview - true - false - - - - - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - - - - - + + + net10.0 + enable + enable + preview + true + false + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineEntry.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineEntry.cs similarity index 96% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineEntry.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineEntry.cs index c03e8b49..b770c914 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineEntry.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineEntry.cs @@ -1,13 +1,13 @@ -namespace StellaOps.Bench.Notify.Baseline; - -internal sealed record BaselineEntry( - string ScenarioId, - int Iterations, - int EventCount, - int DeliveryCount, - double MeanMs, - double P95Ms, - double MaxMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MaxAllocatedMb); +namespace StellaOps.Bench.Notify.Baseline; + +internal sealed record BaselineEntry( + string ScenarioId, + int Iterations, + int EventCount, + int DeliveryCount, + double MeanMs, + double P95Ms, + double MaxMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MaxAllocatedMb); diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineLoader.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineLoader.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineLoader.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineLoader.cs index 792880da..5b70d029 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineLoader.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Baseline/BaselineLoader.cs @@ -1,87 +1,87 @@ -using System; -using System.Collections.Generic; -using System.Globalization; -using System.IO; -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Bench.Notify.Baseline; - -internal static class BaselineLoader -{ - public static async Task> LoadAsync(string path, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - - var resolved = Path.GetFullPath(path); - if (!File.Exists(resolved)) - { - return new Dictionary(StringComparer.OrdinalIgnoreCase); - } - - var results = new Dictionary(StringComparer.OrdinalIgnoreCase); - - await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream); - - var lineNumber = 0; - while (true) - { - cancellationToken.ThrowIfCancellationRequested(); - - var line = await reader.ReadLineAsync().ConfigureAwait(false); - if (line is null) - { - break; - } - - lineNumber++; - if (lineNumber == 1 || string.IsNullOrWhiteSpace(line)) - { - continue; - } - - var parts = line.Split(',', StringSplitOptions.TrimEntries); - if (parts.Length < 10) - { - throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 10 columns, found {parts.Length})."); - } - - var entry = new BaselineEntry( - ScenarioId: parts[0], - Iterations: ParseInt(parts[1], resolved, lineNumber), - EventCount: ParseInt(parts[2], resolved, lineNumber), - DeliveryCount: ParseInt(parts[3], resolved, lineNumber), - MeanMs: ParseDouble(parts[4], resolved, lineNumber), - P95Ms: ParseDouble(parts[5], resolved, lineNumber), - MaxMs: ParseDouble(parts[6], resolved, lineNumber), - MeanThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber), - MinThroughputPerSecond: ParseDouble(parts[8], resolved, lineNumber), - MaxAllocatedMb: ParseDouble(parts[9], resolved, lineNumber)); - - results[entry.ScenarioId] = entry; - } - - return results; - } - - private static int ParseInt(string value, string file, int line) - { - if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) - { - return parsed; - } - - throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'."); - } - - private static double ParseDouble(string value, string file, int line) - { - if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed)) - { - return parsed; - } - - throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'."); - } -} +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Bench.Notify.Baseline; + +internal static class BaselineLoader +{ + public static async Task> LoadAsync(string path, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + + var resolved = Path.GetFullPath(path); + if (!File.Exists(resolved)) + { + return new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + var results = new Dictionary(StringComparer.OrdinalIgnoreCase); + + await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream); + + var lineNumber = 0; + while (true) + { + cancellationToken.ThrowIfCancellationRequested(); + + var line = await reader.ReadLineAsync().ConfigureAwait(false); + if (line is null) + { + break; + } + + lineNumber++; + if (lineNumber == 1 || string.IsNullOrWhiteSpace(line)) + { + continue; + } + + var parts = line.Split(',', StringSplitOptions.TrimEntries); + if (parts.Length < 10) + { + throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 10 columns, found {parts.Length})."); + } + + var entry = new BaselineEntry( + ScenarioId: parts[0], + Iterations: ParseInt(parts[1], resolved, lineNumber), + EventCount: ParseInt(parts[2], resolved, lineNumber), + DeliveryCount: ParseInt(parts[3], resolved, lineNumber), + MeanMs: ParseDouble(parts[4], resolved, lineNumber), + P95Ms: ParseDouble(parts[5], resolved, lineNumber), + MaxMs: ParseDouble(parts[6], resolved, lineNumber), + MeanThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber), + MinThroughputPerSecond: ParseDouble(parts[8], resolved, lineNumber), + MaxAllocatedMb: ParseDouble(parts[9], resolved, lineNumber)); + + results[entry.ScenarioId] = entry; + } + + return results; + } + + private static int ParseInt(string value, string file, int line) + { + if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + return parsed; + } + + throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'."); + } + + private static double ParseDouble(string value, string file, int line) + { + if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed)) + { + return parsed; + } + + throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'."); + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/BenchmarkConfig.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/BenchmarkConfig.cs similarity index 96% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/BenchmarkConfig.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/BenchmarkConfig.cs index 49b53a31..bc30b42e 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/BenchmarkConfig.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/BenchmarkConfig.cs @@ -1,220 +1,220 @@ -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Bench.Notify; - -internal sealed record BenchmarkConfig( - double? ThresholdMs, - double? MinThroughputPerSecond, - double? MaxAllocatedMb, - int? Iterations, - IReadOnlyList Scenarios) -{ - public static async Task LoadAsync(string path) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - - var resolved = Path.GetFullPath(path); - if (!File.Exists(resolved)) - { - throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved); - } - - await using var stream = File.OpenRead(resolved); - var model = await JsonSerializer.DeserializeAsync( - stream, - new JsonSerializerOptions(JsonSerializerDefaults.Web) - { - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - AllowTrailingCommas = true - }).ConfigureAwait(false); - - if (model is null) - { - throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed."); - } - - if (model.Scenarios.Count == 0) - { - throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios."); - } - - foreach (var scenario in model.Scenarios) - { - scenario.Validate(); - } - - return new BenchmarkConfig( - model.ThresholdMs, - model.MinThroughputPerSecond, - model.MaxAllocatedMb, - model.Iterations, - model.Scenarios); - } - - private sealed class BenchmarkConfigModel - { - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - [JsonPropertyName("minThroughputPerSecond")] - public double? MinThroughputPerSecond { get; init; } - - [JsonPropertyName("maxAllocatedMb")] - public double? MaxAllocatedMb { get; init; } - - [JsonPropertyName("iterations")] - public int? Iterations { get; init; } - - [JsonPropertyName("scenarios")] - public List Scenarios { get; init; } = new(); - } -} - -internal sealed class NotifyScenarioConfig -{ - private const int DefaultEventCount = 10_000; - private const int DefaultRuleCount = 200; - private const int DefaultActionsPerRule = 3; - private const double DefaultMatchRate = 0.25d; - private const int DefaultTenantCount = 4; - private const int DefaultChannelCount = 8; - private const int BaseSeed = 2025_10_26; - - [JsonPropertyName("id")] - public string? Id { get; init; } - - [JsonPropertyName("label")] - public string? Label { get; init; } - - [JsonPropertyName("eventCount")] - public int EventCount { get; init; } = DefaultEventCount; - - [JsonPropertyName("ruleCount")] - public int RuleCount { get; init; } = DefaultRuleCount; - - [JsonPropertyName("actionsPerRule")] - public int ActionsPerRule { get; init; } = DefaultActionsPerRule; - - [JsonPropertyName("matchRate")] - public double? MatchRate { get; init; } - - [JsonPropertyName("tenantCount")] - public int? TenantCount { get; init; } - - [JsonPropertyName("channelCount")] - public int? ChannelCount { get; init; } - - [JsonPropertyName("seed")] - public int? Seed { get; init; } - - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - [JsonPropertyName("minThroughputPerSecond")] - public double? MinThroughputPerSecond { get; init; } - - [JsonPropertyName("maxAllocatedMb")] - public double? MaxAllocatedMb { get; init; } - - [JsonPropertyName("iterations")] - public int? Iterations { get; init; } - - public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "notify_dispatch" : Id!.Trim(); - - public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim(); - - public int ResolveEventCount() - { - if (EventCount <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires eventCount > 0."); - } - - return EventCount; - } - - public int ResolveRuleCount() - { - if (RuleCount <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires ruleCount > 0."); - } - - return RuleCount; - } - - public int ResolveActionsPerRule() - { - if (ActionsPerRule <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires actionsPerRule > 0."); - } - - return ActionsPerRule; - } - - public double ResolveMatchRate() - { - var rate = MatchRate ?? DefaultMatchRate; - if (!double.IsFinite(rate) || rate <= 0d || rate > 1d) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires matchRate within (0, 1]."); - } - - return rate; - } - - public int ResolveTenantCount() - { - var tenants = TenantCount ?? DefaultTenantCount; - if (tenants <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenantCount > 0."); - } - - return tenants; - } - - public int ResolveChannelCount() - { - var channels = ChannelCount ?? DefaultChannelCount; - if (channels <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires channelCount > 0."); - } - - return channels; - } - - public int ResolveSeed() - { - if (Seed is { } explicitSeed && explicitSeed > 0) - { - return explicitSeed; - } - - var material = Encoding.UTF8.GetBytes($"stellaops-notify-bench::{ScenarioId}"); - var hash = SHA256.HashData(material); - var derived = BitConverter.ToInt32(hash, 0) & int.MaxValue; - if (derived == 0) - { - derived = BaseSeed; - } - - return derived; - } - - public void Validate() - { - ResolveEventCount(); - ResolveRuleCount(); - ResolveActionsPerRule(); - ResolveMatchRate(); - ResolveTenantCount(); - ResolveChannelCount(); - } -} +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Bench.Notify; + +internal sealed record BenchmarkConfig( + double? ThresholdMs, + double? MinThroughputPerSecond, + double? MaxAllocatedMb, + int? Iterations, + IReadOnlyList Scenarios) +{ + public static async Task LoadAsync(string path) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + + var resolved = Path.GetFullPath(path); + if (!File.Exists(resolved)) + { + throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved); + } + + await using var stream = File.OpenRead(resolved); + var model = await JsonSerializer.DeserializeAsync( + stream, + new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true + }).ConfigureAwait(false); + + if (model is null) + { + throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed."); + } + + if (model.Scenarios.Count == 0) + { + throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios."); + } + + foreach (var scenario in model.Scenarios) + { + scenario.Validate(); + } + + return new BenchmarkConfig( + model.ThresholdMs, + model.MinThroughputPerSecond, + model.MaxAllocatedMb, + model.Iterations, + model.Scenarios); + } + + private sealed class BenchmarkConfigModel + { + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + [JsonPropertyName("minThroughputPerSecond")] + public double? MinThroughputPerSecond { get; init; } + + [JsonPropertyName("maxAllocatedMb")] + public double? MaxAllocatedMb { get; init; } + + [JsonPropertyName("iterations")] + public int? Iterations { get; init; } + + [JsonPropertyName("scenarios")] + public List Scenarios { get; init; } = new(); + } +} + +internal sealed class NotifyScenarioConfig +{ + private const int DefaultEventCount = 10_000; + private const int DefaultRuleCount = 200; + private const int DefaultActionsPerRule = 3; + private const double DefaultMatchRate = 0.25d; + private const int DefaultTenantCount = 4; + private const int DefaultChannelCount = 8; + private const int BaseSeed = 2025_10_26; + + [JsonPropertyName("id")] + public string? Id { get; init; } + + [JsonPropertyName("label")] + public string? Label { get; init; } + + [JsonPropertyName("eventCount")] + public int EventCount { get; init; } = DefaultEventCount; + + [JsonPropertyName("ruleCount")] + public int RuleCount { get; init; } = DefaultRuleCount; + + [JsonPropertyName("actionsPerRule")] + public int ActionsPerRule { get; init; } = DefaultActionsPerRule; + + [JsonPropertyName("matchRate")] + public double? MatchRate { get; init; } + + [JsonPropertyName("tenantCount")] + public int? TenantCount { get; init; } + + [JsonPropertyName("channelCount")] + public int? ChannelCount { get; init; } + + [JsonPropertyName("seed")] + public int? Seed { get; init; } + + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + [JsonPropertyName("minThroughputPerSecond")] + public double? MinThroughputPerSecond { get; init; } + + [JsonPropertyName("maxAllocatedMb")] + public double? MaxAllocatedMb { get; init; } + + [JsonPropertyName("iterations")] + public int? Iterations { get; init; } + + public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "notify_dispatch" : Id!.Trim(); + + public string DisplayLabel => string.IsNullOrWhiteSpace(Label) ? ScenarioId : Label!.Trim(); + + public int ResolveEventCount() + { + if (EventCount <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires eventCount > 0."); + } + + return EventCount; + } + + public int ResolveRuleCount() + { + if (RuleCount <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires ruleCount > 0."); + } + + return RuleCount; + } + + public int ResolveActionsPerRule() + { + if (ActionsPerRule <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires actionsPerRule > 0."); + } + + return ActionsPerRule; + } + + public double ResolveMatchRate() + { + var rate = MatchRate ?? DefaultMatchRate; + if (!double.IsFinite(rate) || rate <= 0d || rate > 1d) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires matchRate within (0, 1]."); + } + + return rate; + } + + public int ResolveTenantCount() + { + var tenants = TenantCount ?? DefaultTenantCount; + if (tenants <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires tenantCount > 0."); + } + + return tenants; + } + + public int ResolveChannelCount() + { + var channels = ChannelCount ?? DefaultChannelCount; + if (channels <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires channelCount > 0."); + } + + return channels; + } + + public int ResolveSeed() + { + if (Seed is { } explicitSeed && explicitSeed > 0) + { + return explicitSeed; + } + + var material = Encoding.UTF8.GetBytes($"stellaops-notify-bench::{ScenarioId}"); + var hash = SHA256.HashData(material); + var derived = BitConverter.ToInt32(hash, 0) & int.MaxValue; + if (derived == 0) + { + derived = BaseSeed; + } + + return derived; + } + + public void Validate() + { + ResolveEventCount(); + ResolveRuleCount(); + ResolveActionsPerRule(); + ResolveMatchRate(); + ResolveTenantCount(); + ResolveChannelCount(); + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/DispatchAccumulator.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/DispatchAccumulator.cs similarity index 95% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/DispatchAccumulator.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/DispatchAccumulator.cs index 9080ff00..92c14940 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/DispatchAccumulator.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/DispatchAccumulator.cs @@ -1,26 +1,26 @@ -using System; - -namespace StellaOps.Bench.Notify; - -internal sealed class DispatchAccumulator -{ - private long _value = 17; - - public void Add(int ruleHash, int actionHash, int eventHash) - { - unchecked - { - _value = (_value * 31) ^ ruleHash; - _value = (_value * 31) ^ actionHash; - _value = (_value * 31) ^ eventHash; - } - } - - public void AssertConsumed() - { - if (_value == 17) - { - throw new InvalidOperationException("Dispatch accumulator did not receive any values."); - } - } -} +using System; + +namespace StellaOps.Bench.Notify; + +internal sealed class DispatchAccumulator +{ + private long _value = 17; + + public void Add(int ruleHash, int actionHash, int eventHash) + { + unchecked + { + _value = (_value * 31) ^ ruleHash; + _value = (_value * 31) ^ actionHash; + _value = (_value * 31) ^ eventHash; + } + } + + public void AssertConsumed() + { + if (_value == 17) + { + throw new InvalidOperationException("Dispatch accumulator did not receive any values."); + } + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/NotifyScenarioRunner.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/NotifyScenarioRunner.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/NotifyScenarioRunner.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/NotifyScenarioRunner.cs index 2b4739b5..be4b84ea 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/NotifyScenarioRunner.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/NotifyScenarioRunner.cs @@ -1,386 +1,386 @@ -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; -using System.Text; -using StellaOps.Notify.Models; - -namespace StellaOps.Bench.Notify; - -internal sealed class NotifyScenarioRunner -{ - private static readonly DateTimeOffset BaseTimestamp = new(2025, 10, 26, 0, 0, 0, TimeSpan.Zero); - private const string EventKind = NotifyEventKinds.ScannerReportReady; - - private readonly NotifyScenarioConfig _config; - private readonly EventDescriptor[] _events; - private readonly RuleDescriptor[][] _rulesByTenant; - private readonly int _totalEvents; - private readonly int _ruleCount; - private readonly int _actionsPerRule; - private readonly int _totalMatches; - private readonly int _totalDeliveries; - private readonly double _averageMatchesPerEvent; - private readonly double _averageDeliveriesPerEvent; - private readonly int _minMatchesPerEvent; - private readonly int _maxMatchesPerEvent; - - public NotifyScenarioRunner(NotifyScenarioConfig config) - { - _config = config ?? throw new ArgumentNullException(nameof(config)); - - var eventCount = config.ResolveEventCount(); - var ruleCount = config.ResolveRuleCount(); - var actionsPerRule = config.ResolveActionsPerRule(); - var matchRate = config.ResolveMatchRate(); - var tenantCount = config.ResolveTenantCount(); - var channelCount = config.ResolveChannelCount(); - var seed = config.ResolveSeed(); - - if (tenantCount > ruleCount) - { - tenantCount = Math.Max(1, ruleCount); - } - - _totalEvents = eventCount; - _ruleCount = ruleCount; - _actionsPerRule = actionsPerRule; - - var tenants = BuildTenants(tenantCount); - var channels = BuildChannels(channelCount); - var random = new Random(seed); - - var targetMatchesPerEvent = Math.Max(1, (int)Math.Round(ruleCount * matchRate)); - targetMatchesPerEvent = Math.Min(targetMatchesPerEvent, ruleCount); - - var ruleDescriptors = new List(ruleCount); - var groups = new List(); - - var ruleIndex = 0; - var groupIndex = 0; - var channelCursor = 0; - - while (ruleIndex < ruleCount) - { - var groupSize = Math.Min(targetMatchesPerEvent, ruleCount - ruleIndex); - var tenantIndex = groupIndex % tenantCount; - var tenantId = tenants[tenantIndex]; - - var namespaceValue = $"svc-{tenantIndex:D2}-{groupIndex:D3}"; - var repositoryValue = $"registry.local/{tenantId}/service-{groupIndex:D3}"; - var digestValue = GenerateDigest(random, groupIndex); - - var rules = new RuleDescriptor[groupSize]; - for (var local = 0; local < groupSize && ruleIndex < ruleCount; local++, ruleIndex++) - { - var ruleId = $"rule-{groupIndex:D3}-{local:D3}"; - var actions = new ActionDescriptor[actionsPerRule]; - - for (var actionIndex = 0; actionIndex < actionsPerRule; actionIndex++) - { - var channel = channels[channelCursor % channelCount]; - channelCursor++; - - var actionId = $"{ruleId}-act-{actionIndex:D2}"; - actions[actionIndex] = new ActionDescriptor( - actionId, - channel, - StableHash($"{actionId}|{channel}")); - } - - rules[local] = new RuleDescriptor( - ruleId, - StableHash(ruleId), - tenantIndex, - namespaceValue, - repositoryValue, - digestValue, - actions); - - ruleDescriptors.Add(rules[local]); - } - - groups.Add(new RuleGroup(tenantIndex, namespaceValue, repositoryValue, digestValue, rules)); - groupIndex++; - } - - _rulesByTenant = BuildRulesByTenant(tenantCount, ruleDescriptors); - - var events = new EventDescriptor[eventCount]; - long totalMatches = 0; - var minMatches = int.MaxValue; - var maxMatches = 0; - - for (var eventIndex = 0; eventIndex < eventCount; eventIndex++) - { - var group = groups[eventIndex % groups.Count]; - var matchingRules = group.Rules.Length; - - totalMatches += matchingRules; - if (matchingRules < minMatches) - { - minMatches = matchingRules; - } - - if (matchingRules > maxMatches) - { - maxMatches = matchingRules; - } - - var eventId = GenerateEventId(random, group.TenantIndex, eventIndex); - var timestamp = BaseTimestamp.AddMilliseconds(eventIndex * 10d); - - // Materialize NotifyEvent to reflect production payload shape. - _ = NotifyEvent.Create( - eventId, - EventKind, - tenants[group.TenantIndex], - timestamp, - payload: null, - scope: NotifyEventScope.Create( - @namespace: group.Namespace, - repo: group.Repository, - digest: group.Digest)); - - events[eventIndex] = new EventDescriptor( - group.TenantIndex, - EventKind, - group.Namespace, - group.Repository, - group.Digest, - ComputeEventHash(eventId)); - } - - _events = events; - _totalMatches = checked((int)totalMatches); - _totalDeliveries = checked(_totalMatches * actionsPerRule); - _averageMatchesPerEvent = totalMatches / (double)eventCount; - _averageDeliveriesPerEvent = _averageMatchesPerEvent * actionsPerRule; - _minMatchesPerEvent = minMatches; - _maxMatchesPerEvent = maxMatches; - } - - public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken) - { - if (iterations <= 0) - { - throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); - } - - var durations = new double[iterations]; - var throughputs = new double[iterations]; - var allocations = new double[iterations]; - - for (var index = 0; index < iterations; index++) - { - cancellationToken.ThrowIfCancellationRequested(); - - var beforeAllocated = GC.GetTotalAllocatedBytes(); - var stopwatch = Stopwatch.StartNew(); - - var accumulator = new DispatchAccumulator(); - var observedMatches = 0; - var observedDeliveries = 0; - - foreach (ref readonly var @event in _events.AsSpan()) - { - var tenantRules = _rulesByTenant[@event.TenantIndex]; - foreach (var rule in tenantRules) - { - if (!Matches(rule, @event)) - { - continue; - } - - observedMatches++; - - var actions = rule.Actions; - for (var actionIndex = 0; actionIndex < actions.Length; actionIndex++) - { - observedDeliveries++; - accumulator.Add(rule.RuleHash, actions[actionIndex].Hash, @event.EventHash); - } - } - } - - stopwatch.Stop(); - - if (observedMatches != _totalMatches) - { - throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalMatches} matches but observed {observedMatches}."); - } - - if (observedDeliveries != _totalDeliveries) - { - throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalDeliveries} deliveries but observed {observedDeliveries}."); - } - - accumulator.AssertConsumed(); - - var elapsedMs = stopwatch.Elapsed.TotalMilliseconds; - if (elapsedMs <= 0d) - { - elapsedMs = 0.0001d; - } - - var afterAllocated = GC.GetTotalAllocatedBytes(); - - durations[index] = elapsedMs; - throughputs[index] = observedDeliveries / Math.Max(stopwatch.Elapsed.TotalSeconds, 0.0001d); - allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d); - } - - return new ScenarioExecutionResult( - durations, - throughputs, - allocations, - _totalEvents, - _ruleCount, - _actionsPerRule, - _averageMatchesPerEvent, - _minMatchesPerEvent, - _maxMatchesPerEvent, - _averageDeliveriesPerEvent, - _totalMatches, - _totalDeliveries); - } - - private static bool Matches(in RuleDescriptor rule, in EventDescriptor @event) - { - if (!string.Equals(@event.Kind, EventKind, StringComparison.Ordinal)) - { - return false; - } - - if (!string.Equals(rule.Namespace, @event.Namespace, StringComparison.Ordinal)) - { - return false; - } - - if (!string.Equals(rule.Repository, @event.Repository, StringComparison.Ordinal)) - { - return false; - } - - if (!string.Equals(rule.Digest, @event.Digest, StringComparison.Ordinal)) - { - return false; - } - - return true; - } - - private static int ComputeEventHash(Guid eventId) - { - var bytes = eventId.ToByteArray(); - var value = BitConverter.ToInt32(bytes, 0); - return value & int.MaxValue; - } - - private static string GenerateDigest(Random random, int groupIndex) - { - var buffer = new byte[16]; - random.NextBytes(buffer); - - var hex = Convert.ToHexString(buffer).ToLowerInvariant(); - return $"sha256:{hex}{groupIndex:D3}"; - } - - private static Guid GenerateEventId(Random random, int tenantIndex, int eventIndex) - { - Span buffer = stackalloc byte[16]; - random.NextBytes(buffer); - buffer[^1] = (byte)(tenantIndex & 0xFF); - buffer[^2] = (byte)(eventIndex & 0xFF); - return new Guid(buffer); - } - - private static RuleDescriptor[][] BuildRulesByTenant(int tenantCount, List rules) - { - var result = new RuleDescriptor[tenantCount][]; - for (var tenantIndex = 0; tenantIndex < tenantCount; tenantIndex++) - { - result[tenantIndex] = rules - .Where(rule => rule.TenantIndex == tenantIndex) - .ToArray(); - } - - return result; - } - - private static string[] BuildTenants(int tenantCount) - { - var tenants = new string[tenantCount]; - for (var index = 0; index < tenantCount; index++) - { - tenants[index] = $"tenant-{index:D2}"; - } - - return tenants; - } - - private static string[] BuildChannels(int channelCount) - { - var channels = new string[channelCount]; - for (var index = 0; index < channelCount; index++) - { - var kind = (index % 4) switch - { - 0 => "slack", - 1 => "teams", - 2 => "email", - _ => "webhook" - }; - - channels[index] = $"{kind}:channel-{index:D2}"; - } - - return channels; - } - - private static int StableHash(string value) - { - unchecked - { - const int offset = unchecked((int)2166136261); - const int prime = 16777619; - - var hash = offset; - foreach (var ch in value.AsSpan()) - { - hash ^= ch; - hash *= prime; - } - - return hash & int.MaxValue; - } - } - - private readonly record struct RuleDescriptor( - string RuleId, - int RuleHash, - int TenantIndex, - string Namespace, - string Repository, - string Digest, - ActionDescriptor[] Actions); - - private readonly record struct ActionDescriptor( - string ActionId, - string Channel, - int Hash); - - private readonly record struct RuleGroup( - int TenantIndex, - string Namespace, - string Repository, - string Digest, - RuleDescriptor[] Rules); - - private readonly record struct EventDescriptor( - int TenantIndex, - string Kind, - string Namespace, - string Repository, - string Digest, - int EventHash); -} +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Text; +using StellaOps.Notify.Models; + +namespace StellaOps.Bench.Notify; + +internal sealed class NotifyScenarioRunner +{ + private static readonly DateTimeOffset BaseTimestamp = new(2025, 10, 26, 0, 0, 0, TimeSpan.Zero); + private const string EventKind = NotifyEventKinds.ScannerReportReady; + + private readonly NotifyScenarioConfig _config; + private readonly EventDescriptor[] _events; + private readonly RuleDescriptor[][] _rulesByTenant; + private readonly int _totalEvents; + private readonly int _ruleCount; + private readonly int _actionsPerRule; + private readonly int _totalMatches; + private readonly int _totalDeliveries; + private readonly double _averageMatchesPerEvent; + private readonly double _averageDeliveriesPerEvent; + private readonly int _minMatchesPerEvent; + private readonly int _maxMatchesPerEvent; + + public NotifyScenarioRunner(NotifyScenarioConfig config) + { + _config = config ?? throw new ArgumentNullException(nameof(config)); + + var eventCount = config.ResolveEventCount(); + var ruleCount = config.ResolveRuleCount(); + var actionsPerRule = config.ResolveActionsPerRule(); + var matchRate = config.ResolveMatchRate(); + var tenantCount = config.ResolveTenantCount(); + var channelCount = config.ResolveChannelCount(); + var seed = config.ResolveSeed(); + + if (tenantCount > ruleCount) + { + tenantCount = Math.Max(1, ruleCount); + } + + _totalEvents = eventCount; + _ruleCount = ruleCount; + _actionsPerRule = actionsPerRule; + + var tenants = BuildTenants(tenantCount); + var channels = BuildChannels(channelCount); + var random = new Random(seed); + + var targetMatchesPerEvent = Math.Max(1, (int)Math.Round(ruleCount * matchRate)); + targetMatchesPerEvent = Math.Min(targetMatchesPerEvent, ruleCount); + + var ruleDescriptors = new List(ruleCount); + var groups = new List(); + + var ruleIndex = 0; + var groupIndex = 0; + var channelCursor = 0; + + while (ruleIndex < ruleCount) + { + var groupSize = Math.Min(targetMatchesPerEvent, ruleCount - ruleIndex); + var tenantIndex = groupIndex % tenantCount; + var tenantId = tenants[tenantIndex]; + + var namespaceValue = $"svc-{tenantIndex:D2}-{groupIndex:D3}"; + var repositoryValue = $"registry.local/{tenantId}/service-{groupIndex:D3}"; + var digestValue = GenerateDigest(random, groupIndex); + + var rules = new RuleDescriptor[groupSize]; + for (var local = 0; local < groupSize && ruleIndex < ruleCount; local++, ruleIndex++) + { + var ruleId = $"rule-{groupIndex:D3}-{local:D3}"; + var actions = new ActionDescriptor[actionsPerRule]; + + for (var actionIndex = 0; actionIndex < actionsPerRule; actionIndex++) + { + var channel = channels[channelCursor % channelCount]; + channelCursor++; + + var actionId = $"{ruleId}-act-{actionIndex:D2}"; + actions[actionIndex] = new ActionDescriptor( + actionId, + channel, + StableHash($"{actionId}|{channel}")); + } + + rules[local] = new RuleDescriptor( + ruleId, + StableHash(ruleId), + tenantIndex, + namespaceValue, + repositoryValue, + digestValue, + actions); + + ruleDescriptors.Add(rules[local]); + } + + groups.Add(new RuleGroup(tenantIndex, namespaceValue, repositoryValue, digestValue, rules)); + groupIndex++; + } + + _rulesByTenant = BuildRulesByTenant(tenantCount, ruleDescriptors); + + var events = new EventDescriptor[eventCount]; + long totalMatches = 0; + var minMatches = int.MaxValue; + var maxMatches = 0; + + for (var eventIndex = 0; eventIndex < eventCount; eventIndex++) + { + var group = groups[eventIndex % groups.Count]; + var matchingRules = group.Rules.Length; + + totalMatches += matchingRules; + if (matchingRules < minMatches) + { + minMatches = matchingRules; + } + + if (matchingRules > maxMatches) + { + maxMatches = matchingRules; + } + + var eventId = GenerateEventId(random, group.TenantIndex, eventIndex); + var timestamp = BaseTimestamp.AddMilliseconds(eventIndex * 10d); + + // Materialize NotifyEvent to reflect production payload shape. + _ = NotifyEvent.Create( + eventId, + EventKind, + tenants[group.TenantIndex], + timestamp, + payload: null, + scope: NotifyEventScope.Create( + @namespace: group.Namespace, + repo: group.Repository, + digest: group.Digest)); + + events[eventIndex] = new EventDescriptor( + group.TenantIndex, + EventKind, + group.Namespace, + group.Repository, + group.Digest, + ComputeEventHash(eventId)); + } + + _events = events; + _totalMatches = checked((int)totalMatches); + _totalDeliveries = checked(_totalMatches * actionsPerRule); + _averageMatchesPerEvent = totalMatches / (double)eventCount; + _averageDeliveriesPerEvent = _averageMatchesPerEvent * actionsPerRule; + _minMatchesPerEvent = minMatches; + _maxMatchesPerEvent = maxMatches; + } + + public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken) + { + if (iterations <= 0) + { + throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); + } + + var durations = new double[iterations]; + var throughputs = new double[iterations]; + var allocations = new double[iterations]; + + for (var index = 0; index < iterations; index++) + { + cancellationToken.ThrowIfCancellationRequested(); + + var beforeAllocated = GC.GetTotalAllocatedBytes(); + var stopwatch = Stopwatch.StartNew(); + + var accumulator = new DispatchAccumulator(); + var observedMatches = 0; + var observedDeliveries = 0; + + foreach (ref readonly var @event in _events.AsSpan()) + { + var tenantRules = _rulesByTenant[@event.TenantIndex]; + foreach (var rule in tenantRules) + { + if (!Matches(rule, @event)) + { + continue; + } + + observedMatches++; + + var actions = rule.Actions; + for (var actionIndex = 0; actionIndex < actions.Length; actionIndex++) + { + observedDeliveries++; + accumulator.Add(rule.RuleHash, actions[actionIndex].Hash, @event.EventHash); + } + } + } + + stopwatch.Stop(); + + if (observedMatches != _totalMatches) + { + throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalMatches} matches but observed {observedMatches}."); + } + + if (observedDeliveries != _totalDeliveries) + { + throw new InvalidOperationException($"Scenario '{_config.ScenarioId}' expected {_totalDeliveries} deliveries but observed {observedDeliveries}."); + } + + accumulator.AssertConsumed(); + + var elapsedMs = stopwatch.Elapsed.TotalMilliseconds; + if (elapsedMs <= 0d) + { + elapsedMs = 0.0001d; + } + + var afterAllocated = GC.GetTotalAllocatedBytes(); + + durations[index] = elapsedMs; + throughputs[index] = observedDeliveries / Math.Max(stopwatch.Elapsed.TotalSeconds, 0.0001d); + allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d); + } + + return new ScenarioExecutionResult( + durations, + throughputs, + allocations, + _totalEvents, + _ruleCount, + _actionsPerRule, + _averageMatchesPerEvent, + _minMatchesPerEvent, + _maxMatchesPerEvent, + _averageDeliveriesPerEvent, + _totalMatches, + _totalDeliveries); + } + + private static bool Matches(in RuleDescriptor rule, in EventDescriptor @event) + { + if (!string.Equals(@event.Kind, EventKind, StringComparison.Ordinal)) + { + return false; + } + + if (!string.Equals(rule.Namespace, @event.Namespace, StringComparison.Ordinal)) + { + return false; + } + + if (!string.Equals(rule.Repository, @event.Repository, StringComparison.Ordinal)) + { + return false; + } + + if (!string.Equals(rule.Digest, @event.Digest, StringComparison.Ordinal)) + { + return false; + } + + return true; + } + + private static int ComputeEventHash(Guid eventId) + { + var bytes = eventId.ToByteArray(); + var value = BitConverter.ToInt32(bytes, 0); + return value & int.MaxValue; + } + + private static string GenerateDigest(Random random, int groupIndex) + { + var buffer = new byte[16]; + random.NextBytes(buffer); + + var hex = Convert.ToHexString(buffer).ToLowerInvariant(); + return $"sha256:{hex}{groupIndex:D3}"; + } + + private static Guid GenerateEventId(Random random, int tenantIndex, int eventIndex) + { + Span buffer = stackalloc byte[16]; + random.NextBytes(buffer); + buffer[^1] = (byte)(tenantIndex & 0xFF); + buffer[^2] = (byte)(eventIndex & 0xFF); + return new Guid(buffer); + } + + private static RuleDescriptor[][] BuildRulesByTenant(int tenantCount, List rules) + { + var result = new RuleDescriptor[tenantCount][]; + for (var tenantIndex = 0; tenantIndex < tenantCount; tenantIndex++) + { + result[tenantIndex] = rules + .Where(rule => rule.TenantIndex == tenantIndex) + .ToArray(); + } + + return result; + } + + private static string[] BuildTenants(int tenantCount) + { + var tenants = new string[tenantCount]; + for (var index = 0; index < tenantCount; index++) + { + tenants[index] = $"tenant-{index:D2}"; + } + + return tenants; + } + + private static string[] BuildChannels(int channelCount) + { + var channels = new string[channelCount]; + for (var index = 0; index < channelCount; index++) + { + var kind = (index % 4) switch + { + 0 => "slack", + 1 => "teams", + 2 => "email", + _ => "webhook" + }; + + channels[index] = $"{kind}:channel-{index:D2}"; + } + + return channels; + } + + private static int StableHash(string value) + { + unchecked + { + const int offset = unchecked((int)2166136261); + const int prime = 16777619; + + var hash = offset; + foreach (var ch in value.AsSpan()) + { + hash ^= ch; + hash *= prime; + } + + return hash & int.MaxValue; + } + } + + private readonly record struct RuleDescriptor( + string RuleId, + int RuleHash, + int TenantIndex, + string Namespace, + string Repository, + string Digest, + ActionDescriptor[] Actions); + + private readonly record struct ActionDescriptor( + string ActionId, + string Channel, + int Hash); + + private readonly record struct RuleGroup( + int TenantIndex, + string Namespace, + string Repository, + string Digest, + RuleDescriptor[] Rules); + + private readonly record struct EventDescriptor( + int TenantIndex, + string Kind, + string Namespace, + string Repository, + string Digest, + int EventHash); +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Program.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Program.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Program.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Program.cs index 5264b3f3..a50bd76a 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Program.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Program.cs @@ -1,364 +1,364 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using System.Globalization; -using StellaOps.Bench.Notify.Baseline; -using StellaOps.Bench.Notify.Reporting; - -namespace StellaOps.Bench.Notify; - -internal static class Program -{ - public static async Task Main(string[] args) - { - try - { - var options = ProgramOptions.Parse(args); - var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); - - var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); - - var results = new List(); - var reports = new List(); - var failures = new List(); - - foreach (var scenario in config.Scenarios) - { - var iterations = options.Iterations - ?? scenario.Iterations - ?? config.Iterations - ?? 5; - - var runner = new NotifyScenarioRunner(scenario); - var execution = runner.Execute(iterations, CancellationToken.None); - - var durationStats = DurationStatistics.From(execution.Durations); - var throughputStats = ThroughputStatistics.From(execution.Throughputs); - var allocationStats = AllocationStatistics.From(execution.AllocatedMb); - - var scenarioThreshold = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs; - var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond; - var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb; - - var result = new ScenarioResult( - scenario.ScenarioId, - scenario.DisplayLabel, - iterations, - execution.TotalEvents, - execution.TotalRules, - execution.ActionsPerRule, - execution.AverageMatchesPerEvent, - execution.MinMatchesPerEvent, - execution.MaxMatchesPerEvent, - execution.AverageDeliveriesPerEvent, - execution.TotalDeliveries, - durationStats.MeanMs, - durationStats.P95Ms, - durationStats.MaxMs, - throughputStats.MeanPerSecond, - throughputStats.MinPerSecond, - allocationStats.MaxAllocatedMb, - scenarioThreshold, - scenarioThroughputFloor, - scenarioAllocationLimit); - - results.Add(result); - - if (scenarioThreshold is { } threshold && result.MaxMs > threshold) - { - failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms"); - } - - if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor) - { - failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} deliveries/s < {floor:N0} deliveries/s"); - } - - if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit) - { - failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB"); - } - - baseline.TryGetValue(result.Id, out var baselineEntry); - var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit); - reports.Add(report); - failures.AddRange(report.BuildRegressionFailureMessages()); - } - - TablePrinter.Print(results); - - if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) - { - CsvWriter.Write(options.CsvOutPath!, results); - } - - if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) - { - var metadata = new BenchmarkJsonMetadata( - SchemaVersion: "notify-dispatch-bench/1.0", - CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(), - Commit: options.Commit, - Environment: options.Environment); - - await BenchmarkJsonWriter.WriteAsync( - options.JsonOutPath!, - metadata, - reports, - CancellationToken.None).ConfigureAwait(false); - } - - if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) - { - PrometheusWriter.Write(options.PrometheusOutPath!, reports); - } - - if (failures.Count > 0) - { - Console.Error.WriteLine(); - Console.Error.WriteLine("Benchmark failures detected:"); - foreach (var failure in failures.Distinct()) - { - Console.Error.WriteLine($" - {failure}"); - } - - return 1; - } - - return 0; - } - catch (Exception ex) - { - Console.Error.WriteLine($"notify-bench error: {ex.Message}"); - return 1; - } - } - - private sealed record ProgramOptions( - string ConfigPath, - int? Iterations, - double? ThresholdMs, - double? MinThroughputPerSecond, - double? MaxAllocatedMb, - string? CsvOutPath, - string? JsonOutPath, - string? PrometheusOutPath, - string BaselinePath, - DateTimeOffset? CapturedAtUtc, - string? Commit, - string? Environment, - double? RegressionLimit) - { - public static ProgramOptions Parse(string[] args) - { - var configPath = DefaultConfigPath(); - var baselinePath = DefaultBaselinePath(); - - int? iterations = null; - double? thresholdMs = null; - double? minThroughput = null; - double? maxAllocated = null; - string? csvOut = null; - string? jsonOut = null; - string? promOut = null; - DateTimeOffset? capturedAt = null; - string? commit = null; - string? environment = null; - double? regressionLimit = null; - - for (var index = 0; index < args.Length; index++) - { - var current = args[index]; - switch (current) - { - case "--config": - EnsureNext(args, index); - configPath = Path.GetFullPath(args[++index]); - break; - case "--iterations": - EnsureNext(args, index); - iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--threshold-ms": - EnsureNext(args, index); - thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--min-throughput": - EnsureNext(args, index); - minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--max-allocated-mb": - EnsureNext(args, index); - maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--csv": - EnsureNext(args, index); - csvOut = args[++index]; - break; - case "--json": - EnsureNext(args, index); - jsonOut = args[++index]; - break; - case "--prometheus": - EnsureNext(args, index); - promOut = args[++index]; - break; - case "--baseline": - EnsureNext(args, index); - baselinePath = Path.GetFullPath(args[++index]); - break; - case "--captured-at": - EnsureNext(args, index); - capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); - break; - case "--commit": - EnsureNext(args, index); - commit = args[++index]; - break; - case "--environment": - EnsureNext(args, index); - environment = args[++index]; - break; - case "--regression-limit": - EnsureNext(args, index); - regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--help": - case "-h": - PrintUsage(); - System.Environment.Exit(0); - break; - default: - throw new ArgumentException($"Unknown argument '{current}'."); - } - } - - return new ProgramOptions( - configPath, - iterations, - thresholdMs, - minThroughput, - maxAllocated, - csvOut, - jsonOut, - promOut, - baselinePath, - capturedAt, - commit, - environment, - regressionLimit); - } - - private static string DefaultConfigPath() - { - var binaryDir = AppContext.BaseDirectory; - var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); - return Path.Combine(benchRoot, "config.json"); - } - - private static string DefaultBaselinePath() - { - var binaryDir = AppContext.BaseDirectory; - var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); - return Path.Combine(benchRoot, "baseline.csv"); - } - - private static void EnsureNext(string[] args, int index) - { - if (index + 1 >= args.Length) - { - throw new ArgumentException("Missing value for argument."); - } - } - - private static void PrintUsage() - { - Console.WriteLine("Usage: notify-bench [options]"); - Console.WriteLine(); - Console.WriteLine("Options:"); - Console.WriteLine(" --config Path to benchmark configuration JSON."); - Console.WriteLine(" --iterations Override iteration count."); - Console.WriteLine(" --threshold-ms Global latency threshold in milliseconds."); - Console.WriteLine(" --min-throughput Global throughput floor (deliveries/second)."); - Console.WriteLine(" --max-allocated-mb Global allocation ceiling (MB)."); - Console.WriteLine(" --csv Write CSV results to path."); - Console.WriteLine(" --json Write JSON results to path."); - Console.WriteLine(" --prometheus Write Prometheus exposition metrics to path."); - Console.WriteLine(" --baseline Baseline CSV path."); - Console.WriteLine(" --captured-at Timestamp to embed in JSON metadata."); - Console.WriteLine(" --commit Commit identifier for metadata."); - Console.WriteLine(" --environment Environment label for metadata."); - Console.WriteLine(" --regression-limit Regression multiplier (default 1.15)."); - } - } -} - -internal static class TablePrinter -{ - public static void Print(IEnumerable results) - { - Console.WriteLine("Scenario | Events | Rules | Match/Evt | Deliver/Evt | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)"); - Console.WriteLine("---------------------------- | ------------| -------- | --------- | ----------- | ---------- | ---------- | ---------- | -------- | --------"); - foreach (var row in results) - { - Console.WriteLine(string.Join(" | ", new[] - { - row.IdColumn, - row.EventsColumn, - row.RulesColumn, - row.MatchesColumn, - row.DeliveriesColumn, - row.MeanColumn, - row.P95Column, - row.MaxColumn, - row.MinThroughputColumn, - row.AllocatedColumn - })); - } - } -} - -internal static class CsvWriter -{ - public static void Write(string path, IEnumerable results) - { - var resolvedPath = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolvedPath); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None); - using var writer = new StreamWriter(stream); - writer.WriteLine("scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb"); - - foreach (var row in results) - { - writer.Write(row.Id); - writer.Write(','); - writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.TotalEvents.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.TotalDeliveries.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture)); - writer.WriteLine(); - } - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using System.Globalization; +using StellaOps.Bench.Notify.Baseline; +using StellaOps.Bench.Notify.Reporting; + +namespace StellaOps.Bench.Notify; + +internal static class Program +{ + public static async Task Main(string[] args) + { + try + { + var options = ProgramOptions.Parse(args); + var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); + + var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); + + var results = new List(); + var reports = new List(); + var failures = new List(); + + foreach (var scenario in config.Scenarios) + { + var iterations = options.Iterations + ?? scenario.Iterations + ?? config.Iterations + ?? 5; + + var runner = new NotifyScenarioRunner(scenario); + var execution = runner.Execute(iterations, CancellationToken.None); + + var durationStats = DurationStatistics.From(execution.Durations); + var throughputStats = ThroughputStatistics.From(execution.Throughputs); + var allocationStats = AllocationStatistics.From(execution.AllocatedMb); + + var scenarioThreshold = scenario.ThresholdMs ?? options.ThresholdMs ?? config.ThresholdMs; + var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? options.MinThroughputPerSecond ?? config.MinThroughputPerSecond; + var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? options.MaxAllocatedMb ?? config.MaxAllocatedMb; + + var result = new ScenarioResult( + scenario.ScenarioId, + scenario.DisplayLabel, + iterations, + execution.TotalEvents, + execution.TotalRules, + execution.ActionsPerRule, + execution.AverageMatchesPerEvent, + execution.MinMatchesPerEvent, + execution.MaxMatchesPerEvent, + execution.AverageDeliveriesPerEvent, + execution.TotalDeliveries, + durationStats.MeanMs, + durationStats.P95Ms, + durationStats.MaxMs, + throughputStats.MeanPerSecond, + throughputStats.MinPerSecond, + allocationStats.MaxAllocatedMb, + scenarioThreshold, + scenarioThroughputFloor, + scenarioAllocationLimit); + + results.Add(result); + + if (scenarioThreshold is { } threshold && result.MaxMs > threshold) + { + failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms"); + } + + if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor) + { + failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} deliveries/s < {floor:N0} deliveries/s"); + } + + if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit) + { + failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB"); + } + + baseline.TryGetValue(result.Id, out var baselineEntry); + var report = new BenchmarkScenarioReport(result, baselineEntry, options.RegressionLimit); + reports.Add(report); + failures.AddRange(report.BuildRegressionFailureMessages()); + } + + TablePrinter.Print(results); + + if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) + { + CsvWriter.Write(options.CsvOutPath!, results); + } + + if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) + { + var metadata = new BenchmarkJsonMetadata( + SchemaVersion: "notify-dispatch-bench/1.0", + CapturedAtUtc: (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(), + Commit: options.Commit, + Environment: options.Environment); + + await BenchmarkJsonWriter.WriteAsync( + options.JsonOutPath!, + metadata, + reports, + CancellationToken.None).ConfigureAwait(false); + } + + if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) + { + PrometheusWriter.Write(options.PrometheusOutPath!, reports); + } + + if (failures.Count > 0) + { + Console.Error.WriteLine(); + Console.Error.WriteLine("Benchmark failures detected:"); + foreach (var failure in failures.Distinct()) + { + Console.Error.WriteLine($" - {failure}"); + } + + return 1; + } + + return 0; + } + catch (Exception ex) + { + Console.Error.WriteLine($"notify-bench error: {ex.Message}"); + return 1; + } + } + + private sealed record ProgramOptions( + string ConfigPath, + int? Iterations, + double? ThresholdMs, + double? MinThroughputPerSecond, + double? MaxAllocatedMb, + string? CsvOutPath, + string? JsonOutPath, + string? PrometheusOutPath, + string BaselinePath, + DateTimeOffset? CapturedAtUtc, + string? Commit, + string? Environment, + double? RegressionLimit) + { + public static ProgramOptions Parse(string[] args) + { + var configPath = DefaultConfigPath(); + var baselinePath = DefaultBaselinePath(); + + int? iterations = null; + double? thresholdMs = null; + double? minThroughput = null; + double? maxAllocated = null; + string? csvOut = null; + string? jsonOut = null; + string? promOut = null; + DateTimeOffset? capturedAt = null; + string? commit = null; + string? environment = null; + double? regressionLimit = null; + + for (var index = 0; index < args.Length; index++) + { + var current = args[index]; + switch (current) + { + case "--config": + EnsureNext(args, index); + configPath = Path.GetFullPath(args[++index]); + break; + case "--iterations": + EnsureNext(args, index); + iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--threshold-ms": + EnsureNext(args, index); + thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--min-throughput": + EnsureNext(args, index); + minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--max-allocated-mb": + EnsureNext(args, index); + maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--csv": + EnsureNext(args, index); + csvOut = args[++index]; + break; + case "--json": + EnsureNext(args, index); + jsonOut = args[++index]; + break; + case "--prometheus": + EnsureNext(args, index); + promOut = args[++index]; + break; + case "--baseline": + EnsureNext(args, index); + baselinePath = Path.GetFullPath(args[++index]); + break; + case "--captured-at": + EnsureNext(args, index); + capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); + break; + case "--commit": + EnsureNext(args, index); + commit = args[++index]; + break; + case "--environment": + EnsureNext(args, index); + environment = args[++index]; + break; + case "--regression-limit": + EnsureNext(args, index); + regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--help": + case "-h": + PrintUsage(); + System.Environment.Exit(0); + break; + default: + throw new ArgumentException($"Unknown argument '{current}'."); + } + } + + return new ProgramOptions( + configPath, + iterations, + thresholdMs, + minThroughput, + maxAllocated, + csvOut, + jsonOut, + promOut, + baselinePath, + capturedAt, + commit, + environment, + regressionLimit); + } + + private static string DefaultConfigPath() + { + var binaryDir = AppContext.BaseDirectory; + var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); + return Path.Combine(benchRoot, "config.json"); + } + + private static string DefaultBaselinePath() + { + var binaryDir = AppContext.BaseDirectory; + var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); + return Path.Combine(benchRoot, "baseline.csv"); + } + + private static void EnsureNext(string[] args, int index) + { + if (index + 1 >= args.Length) + { + throw new ArgumentException("Missing value for argument."); + } + } + + private static void PrintUsage() + { + Console.WriteLine("Usage: notify-bench [options]"); + Console.WriteLine(); + Console.WriteLine("Options:"); + Console.WriteLine(" --config Path to benchmark configuration JSON."); + Console.WriteLine(" --iterations Override iteration count."); + Console.WriteLine(" --threshold-ms Global latency threshold in milliseconds."); + Console.WriteLine(" --min-throughput Global throughput floor (deliveries/second)."); + Console.WriteLine(" --max-allocated-mb Global allocation ceiling (MB)."); + Console.WriteLine(" --csv Write CSV results to path."); + Console.WriteLine(" --json Write JSON results to path."); + Console.WriteLine(" --prometheus Write Prometheus exposition metrics to path."); + Console.WriteLine(" --baseline Baseline CSV path."); + Console.WriteLine(" --captured-at Timestamp to embed in JSON metadata."); + Console.WriteLine(" --commit Commit identifier for metadata."); + Console.WriteLine(" --environment Environment label for metadata."); + Console.WriteLine(" --regression-limit Regression multiplier (default 1.15)."); + } + } +} + +internal static class TablePrinter +{ + public static void Print(IEnumerable results) + { + Console.WriteLine("Scenario | Events | Rules | Match/Evt | Deliver/Evt | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)"); + Console.WriteLine("---------------------------- | ------------| -------- | --------- | ----------- | ---------- | ---------- | ---------- | -------- | --------"); + foreach (var row in results) + { + Console.WriteLine(string.Join(" | ", new[] + { + row.IdColumn, + row.EventsColumn, + row.RulesColumn, + row.MatchesColumn, + row.DeliveriesColumn, + row.MeanColumn, + row.P95Column, + row.MaxColumn, + row.MinThroughputColumn, + row.AllocatedColumn + })); + } + } +} + +internal static class CsvWriter +{ + public static void Write(string path, IEnumerable results) + { + var resolvedPath = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolvedPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None); + using var writer = new StreamWriter(stream); + writer.WriteLine("scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb"); + + foreach (var row in results) + { + writer.Write(row.Id); + writer.Write(','); + writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.TotalEvents.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.TotalDeliveries.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture)); + writer.WriteLine(); + } + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Properties/AssemblyInfo.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Properties/AssemblyInfo.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Properties/AssemblyInfo.cs index 99c056c4..cd964498 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Properties/AssemblyInfo.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Bench.Notify.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Bench.Notify.Tests")] diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkJsonWriter.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkJsonWriter.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkJsonWriter.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkJsonWriter.cs index b8ea781a..384cea4f 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkJsonWriter.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkJsonWriter.cs @@ -1,147 +1,147 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Bench.Notify.Baseline; - -namespace StellaOps.Bench.Notify.Reporting; - -internal static class BenchmarkJsonWriter -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - WriteIndented = true, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - public static async Task WriteAsync( - string path, - BenchmarkJsonMetadata metadata, - IReadOnlyList reports, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(metadata); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var document = new BenchmarkJsonDocument( - metadata.SchemaVersion, - metadata.CapturedAtUtc, - metadata.Commit, - metadata.Environment, - reports.Select(CreateScenario).ToArray()); - - await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); - await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); - await stream.FlushAsync(cancellationToken).ConfigureAwait(false); - } - - private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) - { - var baseline = report.Baseline; - - return new BenchmarkJsonScenario( - report.Result.Id, - report.Result.Label, - report.Result.Iterations, - report.Result.TotalEvents, - report.Result.TotalRules, - report.Result.ActionsPerRule, - report.Result.AverageMatchesPerEvent, - report.Result.MinMatchesPerEvent, - report.Result.MaxMatchesPerEvent, - report.Result.AverageDeliveriesPerEvent, - report.Result.TotalDeliveries, - report.Result.MeanMs, - report.Result.P95Ms, - report.Result.MaxMs, - report.Result.MeanThroughputPerSecond, - report.Result.MinThroughputPerSecond, - report.Result.MaxAllocatedMb, - report.Result.ThresholdMs, - report.Result.MinThroughputThresholdPerSecond, - report.Result.MaxAllocatedThresholdMb, - baseline is null - ? null - : new BenchmarkJsonScenarioBaseline( - baseline.Iterations, - baseline.EventCount, - baseline.DeliveryCount, - baseline.MeanMs, - baseline.P95Ms, - baseline.MaxMs, - baseline.MeanThroughputPerSecond, - baseline.MinThroughputPerSecond, - baseline.MaxAllocatedMb), - new BenchmarkJsonScenarioRegression( - report.DurationRegressionRatio, - report.ThroughputRegressionRatio, - report.RegressionLimit, - report.RegressionBreached)); - } - - private sealed record BenchmarkJsonDocument( - string SchemaVersion, - DateTimeOffset CapturedAt, - string? Commit, - string? Environment, - IReadOnlyList Scenarios); - - private sealed record BenchmarkJsonScenario( - string Id, - string Label, - int Iterations, - int TotalEvents, - int TotalRules, - int ActionsPerRule, - double AverageMatchesPerEvent, - int MinMatchesPerEvent, - int MaxMatchesPerEvent, - double AverageDeliveriesPerEvent, - int TotalDeliveries, - double MeanMs, - double P95Ms, - double MaxMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MaxAllocatedMb, - double? ThresholdMs, - double? MinThroughputThresholdPerSecond, - double? MaxAllocatedThresholdMb, - BenchmarkJsonScenarioBaseline? Baseline, - BenchmarkJsonScenarioRegression Regression); - - private sealed record BenchmarkJsonScenarioBaseline( - int Iterations, - int EventCount, - int DeliveryCount, - double MeanMs, - double P95Ms, - double MaxMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MaxAllocatedMb); - - private sealed record BenchmarkJsonScenarioRegression( - double? DurationRatio, - double? ThroughputRatio, - double Limit, - bool Breached); -} - -internal sealed record BenchmarkJsonMetadata( - string SchemaVersion, - DateTimeOffset CapturedAtUtc, - string? Commit, - string? Environment); +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Bench.Notify.Baseline; + +namespace StellaOps.Bench.Notify.Reporting; + +internal static class BenchmarkJsonWriter +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + public static async Task WriteAsync( + string path, + BenchmarkJsonMetadata metadata, + IReadOnlyList reports, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(metadata); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var document = new BenchmarkJsonDocument( + metadata.SchemaVersion, + metadata.CapturedAtUtc, + metadata.Commit, + metadata.Environment, + reports.Select(CreateScenario).ToArray()); + + await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); + await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); + await stream.FlushAsync(cancellationToken).ConfigureAwait(false); + } + + private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) + { + var baseline = report.Baseline; + + return new BenchmarkJsonScenario( + report.Result.Id, + report.Result.Label, + report.Result.Iterations, + report.Result.TotalEvents, + report.Result.TotalRules, + report.Result.ActionsPerRule, + report.Result.AverageMatchesPerEvent, + report.Result.MinMatchesPerEvent, + report.Result.MaxMatchesPerEvent, + report.Result.AverageDeliveriesPerEvent, + report.Result.TotalDeliveries, + report.Result.MeanMs, + report.Result.P95Ms, + report.Result.MaxMs, + report.Result.MeanThroughputPerSecond, + report.Result.MinThroughputPerSecond, + report.Result.MaxAllocatedMb, + report.Result.ThresholdMs, + report.Result.MinThroughputThresholdPerSecond, + report.Result.MaxAllocatedThresholdMb, + baseline is null + ? null + : new BenchmarkJsonScenarioBaseline( + baseline.Iterations, + baseline.EventCount, + baseline.DeliveryCount, + baseline.MeanMs, + baseline.P95Ms, + baseline.MaxMs, + baseline.MeanThroughputPerSecond, + baseline.MinThroughputPerSecond, + baseline.MaxAllocatedMb), + new BenchmarkJsonScenarioRegression( + report.DurationRegressionRatio, + report.ThroughputRegressionRatio, + report.RegressionLimit, + report.RegressionBreached)); + } + + private sealed record BenchmarkJsonDocument( + string SchemaVersion, + DateTimeOffset CapturedAt, + string? Commit, + string? Environment, + IReadOnlyList Scenarios); + + private sealed record BenchmarkJsonScenario( + string Id, + string Label, + int Iterations, + int TotalEvents, + int TotalRules, + int ActionsPerRule, + double AverageMatchesPerEvent, + int MinMatchesPerEvent, + int MaxMatchesPerEvent, + double AverageDeliveriesPerEvent, + int TotalDeliveries, + double MeanMs, + double P95Ms, + double MaxMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MaxAllocatedMb, + double? ThresholdMs, + double? MinThroughputThresholdPerSecond, + double? MaxAllocatedThresholdMb, + BenchmarkJsonScenarioBaseline? Baseline, + BenchmarkJsonScenarioRegression Regression); + + private sealed record BenchmarkJsonScenarioBaseline( + int Iterations, + int EventCount, + int DeliveryCount, + double MeanMs, + double P95Ms, + double MaxMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MaxAllocatedMb); + + private sealed record BenchmarkJsonScenarioRegression( + double? DurationRatio, + double? ThroughputRatio, + double Limit, + bool Breached); +} + +internal sealed record BenchmarkJsonMetadata( + string SchemaVersion, + DateTimeOffset CapturedAtUtc, + string? Commit, + string? Environment); diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkScenarioReport.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkScenarioReport.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkScenarioReport.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkScenarioReport.cs index f0a62903..16d2e85b 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkScenarioReport.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/BenchmarkScenarioReport.cs @@ -1,84 +1,84 @@ -using System; -using System.Collections.Generic; -using StellaOps.Bench.Notify.Baseline; - -namespace StellaOps.Bench.Notify.Reporting; - -internal sealed class BenchmarkScenarioReport -{ - private const double DefaultRegressionLimit = 1.15d; - - public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) - { - Result = result ?? throw new ArgumentNullException(nameof(result)); - Baseline = baseline; - RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit; - DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs); - ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond); - } - - public ScenarioResult Result { get; } - - public BaselineEntry? Baseline { get; } - - public double RegressionLimit { get; } - - public double? DurationRegressionRatio { get; } - - public double? ThroughputRegressionRatio { get; } - - public bool DurationRegressionBreached => - DurationRegressionRatio is { } ratio && - ratio >= RegressionLimit; - - public bool ThroughputRegressionBreached => - ThroughputRegressionRatio is { } ratio && - ratio >= RegressionLimit; - - public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached; - - public IEnumerable BuildRegressionFailureMessages() - { - if (Baseline is null) - { - yield break; - } - - if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio) - { - var delta = (durationRatio - 1d) * 100d; - yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%)."; - } - - if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio) - { - var delta = (throughputRatio - 1d) * 100d; - yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%)."; - } - } - - private static double? CalculateDurationRatio(double current, double? baseline) - { - if (!baseline.HasValue || baseline.Value <= 0d) - { - return null; - } - - return current / baseline.Value; - } - - private static double? CalculateThroughputRatio(double current, double? baseline) - { - if (!baseline.HasValue || baseline.Value <= 0d) - { - return null; - } - - if (current <= 0d) - { - return double.PositiveInfinity; - } - - return baseline.Value / current; - } -} +using System; +using System.Collections.Generic; +using StellaOps.Bench.Notify.Baseline; + +namespace StellaOps.Bench.Notify.Reporting; + +internal sealed class BenchmarkScenarioReport +{ + private const double DefaultRegressionLimit = 1.15d; + + public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) + { + Result = result ?? throw new ArgumentNullException(nameof(result)); + Baseline = baseline; + RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit; + DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs); + ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond); + } + + public ScenarioResult Result { get; } + + public BaselineEntry? Baseline { get; } + + public double RegressionLimit { get; } + + public double? DurationRegressionRatio { get; } + + public double? ThroughputRegressionRatio { get; } + + public bool DurationRegressionBreached => + DurationRegressionRatio is { } ratio && + ratio >= RegressionLimit; + + public bool ThroughputRegressionBreached => + ThroughputRegressionRatio is { } ratio && + ratio >= RegressionLimit; + + public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached; + + public IEnumerable BuildRegressionFailureMessages() + { + if (Baseline is null) + { + yield break; + } + + if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio) + { + var delta = (durationRatio - 1d) * 100d; + yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%)."; + } + + if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio) + { + var delta = (throughputRatio - 1d) * 100d; + yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%)."; + } + } + + private static double? CalculateDurationRatio(double current, double? baseline) + { + if (!baseline.HasValue || baseline.Value <= 0d) + { + return null; + } + + return current / baseline.Value; + } + + private static double? CalculateThroughputRatio(double current, double? baseline) + { + if (!baseline.HasValue || baseline.Value <= 0d) + { + return null; + } + + if (current <= 0d) + { + return double.PositiveInfinity; + } + + return baseline.Value / current; + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/PrometheusWriter.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/PrometheusWriter.cs similarity index 98% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/PrometheusWriter.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/PrometheusWriter.cs index d7c8f785..3ac772ee 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/PrometheusWriter.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/Reporting/PrometheusWriter.cs @@ -1,86 +1,86 @@ -using System; -using System.Collections.Generic; -using System.Globalization; -using System.IO; -using System.Text; - -namespace StellaOps.Bench.Notify.Reporting; - -internal static class PrometheusWriter -{ - public static void Write(string path, IReadOnlyList reports) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var builder = new StringBuilder(); - builder.AppendLine("# HELP notify_dispatch_bench_duration_ms Notify dispatch benchmark duration metrics (milliseconds)."); - builder.AppendLine("# TYPE notify_dispatch_bench_duration_ms gauge"); - builder.AppendLine("# HELP notify_dispatch_bench_throughput_per_sec Notify dispatch benchmark throughput metrics (deliveries per second)."); - builder.AppendLine("# TYPE notify_dispatch_bench_throughput_per_sec gauge"); - builder.AppendLine("# HELP notify_dispatch_bench_allocation_mb Notify dispatch benchmark allocation metrics (megabytes)."); - builder.AppendLine("# TYPE notify_dispatch_bench_allocation_mb gauge"); - - foreach (var report in reports) - { - var scenarioLabel = Escape(report.Result.Id); - AppendMetric(builder, "notify_dispatch_bench_mean_ms", scenarioLabel, report.Result.MeanMs); - AppendMetric(builder, "notify_dispatch_bench_p95_ms", scenarioLabel, report.Result.P95Ms); - AppendMetric(builder, "notify_dispatch_bench_max_ms", scenarioLabel, report.Result.MaxMs); - AppendMetric(builder, "notify_dispatch_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs); - - AppendMetric(builder, "notify_dispatch_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond); - AppendMetric(builder, "notify_dispatch_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond); - AppendMetric(builder, "notify_dispatch_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond); - - AppendMetric(builder, "notify_dispatch_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb); - AppendMetric(builder, "notify_dispatch_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb); - - if (report.Baseline is { } baseline) - { - AppendMetric(builder, "notify_dispatch_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs); - AppendMetric(builder, "notify_dispatch_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs); - AppendMetric(builder, "notify_dispatch_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond); - } - - if (report.DurationRegressionRatio is { } durationRatio) - { - AppendMetric(builder, "notify_dispatch_bench_duration_regression_ratio", scenarioLabel, durationRatio); - } - - if (report.ThroughputRegressionRatio is { } throughputRatio) - { - AppendMetric(builder, "notify_dispatch_bench_throughput_regression_ratio", scenarioLabel, throughputRatio); - } - - AppendMetric(builder, "notify_dispatch_bench_regression_limit", scenarioLabel, report.RegressionLimit); - AppendMetric(builder, "notify_dispatch_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0); - } - - File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); - } - - private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value) - { - if (!value.HasValue) - { - return; - } - - builder.Append(metric); - builder.Append("{scenario=\""); - builder.Append(scenario); - builder.Append("\"} "); - builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture)); - } - - private static string Escape(string value) => - value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); -} +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Text; + +namespace StellaOps.Bench.Notify.Reporting; + +internal static class PrometheusWriter +{ + public static void Write(string path, IReadOnlyList reports) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var builder = new StringBuilder(); + builder.AppendLine("# HELP notify_dispatch_bench_duration_ms Notify dispatch benchmark duration metrics (milliseconds)."); + builder.AppendLine("# TYPE notify_dispatch_bench_duration_ms gauge"); + builder.AppendLine("# HELP notify_dispatch_bench_throughput_per_sec Notify dispatch benchmark throughput metrics (deliveries per second)."); + builder.AppendLine("# TYPE notify_dispatch_bench_throughput_per_sec gauge"); + builder.AppendLine("# HELP notify_dispatch_bench_allocation_mb Notify dispatch benchmark allocation metrics (megabytes)."); + builder.AppendLine("# TYPE notify_dispatch_bench_allocation_mb gauge"); + + foreach (var report in reports) + { + var scenarioLabel = Escape(report.Result.Id); + AppendMetric(builder, "notify_dispatch_bench_mean_ms", scenarioLabel, report.Result.MeanMs); + AppendMetric(builder, "notify_dispatch_bench_p95_ms", scenarioLabel, report.Result.P95Ms); + AppendMetric(builder, "notify_dispatch_bench_max_ms", scenarioLabel, report.Result.MaxMs); + AppendMetric(builder, "notify_dispatch_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs); + + AppendMetric(builder, "notify_dispatch_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond); + AppendMetric(builder, "notify_dispatch_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond); + AppendMetric(builder, "notify_dispatch_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond); + + AppendMetric(builder, "notify_dispatch_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb); + AppendMetric(builder, "notify_dispatch_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb); + + if (report.Baseline is { } baseline) + { + AppendMetric(builder, "notify_dispatch_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs); + AppendMetric(builder, "notify_dispatch_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs); + AppendMetric(builder, "notify_dispatch_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond); + } + + if (report.DurationRegressionRatio is { } durationRatio) + { + AppendMetric(builder, "notify_dispatch_bench_duration_regression_ratio", scenarioLabel, durationRatio); + } + + if (report.ThroughputRegressionRatio is { } throughputRatio) + { + AppendMetric(builder, "notify_dispatch_bench_throughput_regression_ratio", scenarioLabel, throughputRatio); + } + + AppendMetric(builder, "notify_dispatch_bench_regression_limit", scenarioLabel, report.RegressionLimit); + AppendMetric(builder, "notify_dispatch_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0); + } + + File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); + } + + private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value) + { + if (!value.HasValue) + { + return; + } + + builder.Append(metric); + builder.Append("{scenario=\""); + builder.Append(scenario); + builder.Append("\"} "); + builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture)); + } + + private static string Escape(string value) => + value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioExecutionResult.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioExecutionResult.cs similarity index 96% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioExecutionResult.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioExecutionResult.cs index f205ec88..2787b779 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioExecutionResult.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioExecutionResult.cs @@ -1,17 +1,17 @@ -using System.Collections.Generic; - -namespace StellaOps.Bench.Notify; - -internal sealed record ScenarioExecutionResult( - IReadOnlyList Durations, - IReadOnlyList Throughputs, - IReadOnlyList AllocatedMb, - int TotalEvents, - int TotalRules, - int ActionsPerRule, - double AverageMatchesPerEvent, - int MinMatchesPerEvent, - int MaxMatchesPerEvent, - double AverageDeliveriesPerEvent, - int TotalMatches, - int TotalDeliveries); +using System.Collections.Generic; + +namespace StellaOps.Bench.Notify; + +internal sealed record ScenarioExecutionResult( + IReadOnlyList Durations, + IReadOnlyList Throughputs, + IReadOnlyList AllocatedMb, + int TotalEvents, + int TotalRules, + int ActionsPerRule, + double AverageMatchesPerEvent, + int MinMatchesPerEvent, + int MaxMatchesPerEvent, + double AverageDeliveriesPerEvent, + int TotalMatches, + int TotalDeliveries); diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioResult.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioResult.cs similarity index 97% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioResult.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioResult.cs index 8574ed8d..ceb33422 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioResult.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioResult.cs @@ -1,46 +1,46 @@ -using System.Globalization; - -namespace StellaOps.Bench.Notify; - -internal sealed record ScenarioResult( - string Id, - string Label, - int Iterations, - int TotalEvents, - int TotalRules, - int ActionsPerRule, - double AverageMatchesPerEvent, - int MinMatchesPerEvent, - int MaxMatchesPerEvent, - double AverageDeliveriesPerEvent, - int TotalDeliveries, - double MeanMs, - double P95Ms, - double MaxMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MaxAllocatedMb, - double? ThresholdMs, - double? MinThroughputThresholdPerSecond, - double? MaxAllocatedThresholdMb) -{ - public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; - - public string EventsColumn => TotalEvents.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12); - - public string RulesColumn => TotalRules.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9); - - public string MatchesColumn => AverageMatchesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(8); - - public string DeliveriesColumn => AverageDeliveriesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(10); - - public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - - public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - - public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - - public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); - - public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); -} +using System.Globalization; + +namespace StellaOps.Bench.Notify; + +internal sealed record ScenarioResult( + string Id, + string Label, + int Iterations, + int TotalEvents, + int TotalRules, + int ActionsPerRule, + double AverageMatchesPerEvent, + int MinMatchesPerEvent, + int MaxMatchesPerEvent, + double AverageDeliveriesPerEvent, + int TotalDeliveries, + double MeanMs, + double P95Ms, + double MaxMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MaxAllocatedMb, + double? ThresholdMs, + double? MinThroughputThresholdPerSecond, + double? MaxAllocatedThresholdMb) +{ + public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; + + public string EventsColumn => TotalEvents.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12); + + public string RulesColumn => TotalRules.ToString("N0", CultureInfo.InvariantCulture).PadLeft(9); + + public string MatchesColumn => AverageMatchesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(8); + + public string DeliveriesColumn => AverageDeliveriesPerEvent.ToString("F1", CultureInfo.InvariantCulture).PadLeft(10); + + public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + + public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + + public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + + public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); + + public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioStatistics.cs b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioStatistics.cs similarity index 96% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioStatistics.cs rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioStatistics.cs index b15ed639..b8bd2d93 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioStatistics.cs +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/ScenarioStatistics.cs @@ -1,87 +1,87 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Bench.Notify; - -internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs) -{ - public static DurationStatistics From(IReadOnlyList durations) - { - if (durations.Count == 0) - { - return new DurationStatistics(0, 0, 0); - } - - var sorted = durations.ToArray(); - Array.Sort(sorted); - - var total = 0d; - foreach (var value in durations) - { - total += value; - } - - var mean = total / durations.Count; - var p95 = Percentile(sorted, 95); - var max = sorted[^1]; - - return new DurationStatistics(mean, p95, max); - } - - private static double Percentile(IReadOnlyList sorted, double percentile) - { - if (sorted.Count == 0) - { - return 0; - } - - var rank = (percentile / 100d) * (sorted.Count - 1); - var lower = (int)Math.Floor(rank); - var upper = (int)Math.Ceiling(rank); - var weight = rank - lower; - - if (upper >= sorted.Count) - { - return sorted[lower]; - } - - return sorted[lower] + weight * (sorted[upper] - sorted[lower]); - } -} - -internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond) -{ - public static ThroughputStatistics From(IReadOnlyList values) - { - if (values.Count == 0) - { - return new ThroughputStatistics(0, 0); - } - - var total = 0d; - var min = double.MaxValue; - - foreach (var value in values) - { - total += value; - min = Math.Min(min, value); - } - - var mean = total / values.Count; - return new ThroughputStatistics(mean, min); - } -} - -internal readonly record struct AllocationStatistics(double MaxAllocatedMb) -{ - public static AllocationStatistics From(IReadOnlyList values) - { - var max = 0d; - foreach (var value in values) - { - max = Math.Max(max, value); - } - - return new AllocationStatistics(max); - } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Bench.Notify; + +internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs) +{ + public static DurationStatistics From(IReadOnlyList durations) + { + if (durations.Count == 0) + { + return new DurationStatistics(0, 0, 0); + } + + var sorted = durations.ToArray(); + Array.Sort(sorted); + + var total = 0d; + foreach (var value in durations) + { + total += value; + } + + var mean = total / durations.Count; + var p95 = Percentile(sorted, 95); + var max = sorted[^1]; + + return new DurationStatistics(mean, p95, max); + } + + private static double Percentile(IReadOnlyList sorted, double percentile) + { + if (sorted.Count == 0) + { + return 0; + } + + var rank = (percentile / 100d) * (sorted.Count - 1); + var lower = (int)Math.Floor(rank); + var upper = (int)Math.Ceiling(rank); + var weight = rank - lower; + + if (upper >= sorted.Count) + { + return sorted[lower]; + } + + return sorted[lower] + weight * (sorted[upper] - sorted[lower]); + } +} + +internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond) +{ + public static ThroughputStatistics From(IReadOnlyList values) + { + if (values.Count == 0) + { + return new ThroughputStatistics(0, 0); + } + + var total = 0d; + var min = double.MaxValue; + + foreach (var value in values) + { + total += value; + min = Math.Min(min, value); + } + + var mean = total / values.Count; + return new ThroughputStatistics(mean, min); + } +} + +internal readonly record struct AllocationStatistics(double MaxAllocatedMb) +{ + public static AllocationStatistics From(IReadOnlyList values) + { + var max = 0d; + foreach (var value in values) + { + max = Math.Max(max, value); + } + + return new AllocationStatistics(max); + } +} diff --git a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj similarity index 67% rename from src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj rename to src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj index 55c3f3a2..c34516a3 100644 --- a/src/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj +++ b/src/Bench/StellaOps.Bench/Notify/StellaOps.Bench.Notify/StellaOps.Bench.Notify.csproj @@ -1,3 +1,4 @@ + Exe @@ -9,6 +10,6 @@ - + - + \ No newline at end of file diff --git a/src/StellaOps.Bench/Notify/baseline.csv b/src/Bench/StellaOps.Bench/Notify/baseline.csv similarity index 99% rename from src/StellaOps.Bench/Notify/baseline.csv rename to src/Bench/StellaOps.Bench/Notify/baseline.csv index d79c2c45..0030cde3 100644 --- a/src/StellaOps.Bench/Notify/baseline.csv +++ b/src/Bench/StellaOps.Bench/Notify/baseline.csv @@ -1,4 +1,4 @@ -scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb -notify_dispatch_density_05,5,5000,20000,3.4150,4.1722,4.3039,6053938.5172,4646948.1168,0.0000 -notify_dispatch_density_20,5,7500,675000,24.2274,25.8517,26.0526,27923335.5855,25909122.3141,0.0000 -notify_dispatch_density_40,5,10000,4000080,138.7387,147.7174,149.1124,28916602.9214,26825938.0172,0.0000 +scenario,iterations,events,deliveries,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb +notify_dispatch_density_05,5,5000,20000,3.4150,4.1722,4.3039,6053938.5172,4646948.1168,0.0000 +notify_dispatch_density_20,5,7500,675000,24.2274,25.8517,26.0526,27923335.5855,25909122.3141,0.0000 +notify_dispatch_density_40,5,10000,4000080,138.7387,147.7174,149.1124,28916602.9214,26825938.0172,0.0000 diff --git a/src/StellaOps.Bench/Notify/config.json b/src/Bench/StellaOps.Bench/Notify/config.json similarity index 96% rename from src/StellaOps.Bench/Notify/config.json rename to src/Bench/StellaOps.Bench/Notify/config.json index ecd9c5b5..30f2593e 100644 --- a/src/StellaOps.Bench/Notify/config.json +++ b/src/Bench/StellaOps.Bench/Notify/config.json @@ -1,47 +1,47 @@ -{ - "thresholdMs": 1200, - "minThroughputPerSecond": 10000, - "maxAllocatedMb": 512, - "iterations": 5, - "scenarios": [ - { - "id": "notify_dispatch_density_05", - "label": "50 rules / 5% fanout", - "eventCount": 5000, - "ruleCount": 50, - "actionsPerRule": 2, - "matchRate": 0.05, - "tenantCount": 4, - "channelCount": 12, - "thresholdMs": 400, - "minThroughputPerSecond": 15000, - "maxAllocatedMb": 128 - }, - { - "id": "notify_dispatch_density_20", - "label": "150 rules / 20% fanout", - "eventCount": 7500, - "ruleCount": 150, - "actionsPerRule": 3, - "matchRate": 0.2, - "tenantCount": 6, - "channelCount": 24, - "thresholdMs": 650, - "minThroughputPerSecond": 30000, - "maxAllocatedMb": 192 - }, - { - "id": "notify_dispatch_density_40", - "label": "300 rules / 40% fanout", - "eventCount": 10000, - "ruleCount": 300, - "actionsPerRule": 4, - "matchRate": 0.4, - "tenantCount": 8, - "channelCount": 32, - "thresholdMs": 900, - "minThroughputPerSecond": 45000, - "maxAllocatedMb": 256 - } - ] -} +{ + "thresholdMs": 1200, + "minThroughputPerSecond": 10000, + "maxAllocatedMb": 512, + "iterations": 5, + "scenarios": [ + { + "id": "notify_dispatch_density_05", + "label": "50 rules / 5% fanout", + "eventCount": 5000, + "ruleCount": 50, + "actionsPerRule": 2, + "matchRate": 0.05, + "tenantCount": 4, + "channelCount": 12, + "thresholdMs": 400, + "minThroughputPerSecond": 15000, + "maxAllocatedMb": 128 + }, + { + "id": "notify_dispatch_density_20", + "label": "150 rules / 20% fanout", + "eventCount": 7500, + "ruleCount": 150, + "actionsPerRule": 3, + "matchRate": 0.2, + "tenantCount": 6, + "channelCount": 24, + "thresholdMs": 650, + "minThroughputPerSecond": 30000, + "maxAllocatedMb": 192 + }, + { + "id": "notify_dispatch_density_40", + "label": "300 rules / 40% fanout", + "eventCount": 10000, + "ruleCount": 300, + "actionsPerRule": 4, + "matchRate": 0.4, + "tenantCount": 8, + "channelCount": 32, + "thresholdMs": 900, + "minThroughputPerSecond": 45000, + "maxAllocatedMb": 256 + } + ] +} diff --git a/src/StellaOps.Bench/PolicyEngine/README.md b/src/Bench/StellaOps.Bench/PolicyEngine/README.md similarity index 87% rename from src/StellaOps.Bench/PolicyEngine/README.md rename to src/Bench/StellaOps.Bench/PolicyEngine/README.md index 9dac29d2..bd09e91f 100644 --- a/src/StellaOps.Bench/PolicyEngine/README.md +++ b/src/Bench/StellaOps.Bench/PolicyEngine/README.md @@ -14,7 +14,7 @@ Synthetic workload that measures end-to-end policy evaluation throughput against ```bash dotnet run \ - --project src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj \ + --project src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj \ -- \ --csv out/policy-bench.csv \ --json out/policy-bench.json \ diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineEntry.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineEntry.cs similarity index 96% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineEntry.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineEntry.cs index 8308f42a..71929937 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineEntry.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineEntry.cs @@ -1,12 +1,12 @@ -namespace StellaOps.Bench.PolicyEngine.Baseline; - -internal sealed record BaselineEntry( - string ScenarioId, - int Iterations, - int FindingCount, - double MeanMs, - double P95Ms, - double MaxMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MaxAllocatedMb); +namespace StellaOps.Bench.PolicyEngine.Baseline; + +internal sealed record BaselineEntry( + string ScenarioId, + int Iterations, + int FindingCount, + double MeanMs, + double P95Ms, + double MaxMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MaxAllocatedMb); diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineLoader.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineLoader.cs similarity index 97% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineLoader.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineLoader.cs index d5bd4a99..6ab36a0a 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineLoader.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Baseline/BaselineLoader.cs @@ -1,86 +1,86 @@ -using System.Globalization; - -namespace StellaOps.Bench.PolicyEngine.Baseline; - -internal static class BaselineLoader -{ - public static async Task> LoadAsync(string path, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - - var resolved = Path.GetFullPath(path); - if (!File.Exists(resolved)) - { - return new Dictionary(StringComparer.OrdinalIgnoreCase); - } - - var result = new Dictionary(StringComparer.OrdinalIgnoreCase); - - await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream); - - var lineNumber = 0; - while (true) - { - cancellationToken.ThrowIfCancellationRequested(); - - var line = await reader.ReadLineAsync().ConfigureAwait(false); - if (line is null) - { - break; - } - - lineNumber++; - if (lineNumber == 1) - { - continue; // header - } - - if (string.IsNullOrWhiteSpace(line)) - { - continue; - } - - var parts = line.Split(',', StringSplitOptions.TrimEntries); - if (parts.Length < 9) - { - throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 9 columns, found {parts.Length})."); - } - - var entry = new BaselineEntry( - ScenarioId: parts[0], - Iterations: ParseInt(parts[1], resolved, lineNumber), - FindingCount: ParseInt(parts[2], resolved, lineNumber), - MeanMs: ParseDouble(parts[3], resolved, lineNumber), - P95Ms: ParseDouble(parts[4], resolved, lineNumber), - MaxMs: ParseDouble(parts[5], resolved, lineNumber), - MeanThroughputPerSecond: ParseDouble(parts[6], resolved, lineNumber), - MinThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber), - MaxAllocatedMb: ParseDouble(parts[8], resolved, lineNumber)); - - result[entry.ScenarioId] = entry; - } - - return result; - } - - private static int ParseInt(string value, string file, int line) - { - if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result)) - { - return result; - } - - throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'."); - } - - private static double ParseDouble(string value, string file, int line) - { - if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result)) - { - return result; - } - - throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'."); - } -} +using System.Globalization; + +namespace StellaOps.Bench.PolicyEngine.Baseline; + +internal static class BaselineLoader +{ + public static async Task> LoadAsync(string path, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + + var resolved = Path.GetFullPath(path); + if (!File.Exists(resolved)) + { + return new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + + await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream); + + var lineNumber = 0; + while (true) + { + cancellationToken.ThrowIfCancellationRequested(); + + var line = await reader.ReadLineAsync().ConfigureAwait(false); + if (line is null) + { + break; + } + + lineNumber++; + if (lineNumber == 1) + { + continue; // header + } + + if (string.IsNullOrWhiteSpace(line)) + { + continue; + } + + var parts = line.Split(',', StringSplitOptions.TrimEntries); + if (parts.Length < 9) + { + throw new InvalidOperationException($"Baseline '{resolved}' line {lineNumber} is invalid (expected 9 columns, found {parts.Length})."); + } + + var entry = new BaselineEntry( + ScenarioId: parts[0], + Iterations: ParseInt(parts[1], resolved, lineNumber), + FindingCount: ParseInt(parts[2], resolved, lineNumber), + MeanMs: ParseDouble(parts[3], resolved, lineNumber), + P95Ms: ParseDouble(parts[4], resolved, lineNumber), + MaxMs: ParseDouble(parts[5], resolved, lineNumber), + MeanThroughputPerSecond: ParseDouble(parts[6], resolved, lineNumber), + MinThroughputPerSecond: ParseDouble(parts[7], resolved, lineNumber), + MaxAllocatedMb: ParseDouble(parts[8], resolved, lineNumber)); + + result[entry.ScenarioId] = entry; + } + + return result; + } + + private static int ParseInt(string value, string file, int line) + { + if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var result)) + { + return result; + } + + throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid integer '{value}'."); + } + + private static double ParseDouble(string value, string file, int line) + { + if (double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var result)) + { + return result; + } + + throw new InvalidOperationException($"Baseline '{file}' line {line} contains an invalid number '{value}'."); + } +} diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/BenchmarkConfig.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/BenchmarkConfig.cs similarity index 96% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/BenchmarkConfig.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/BenchmarkConfig.cs index a325b784..a1f9fd6c 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/BenchmarkConfig.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/BenchmarkConfig.cs @@ -1,155 +1,155 @@ -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Bench.PolicyEngine; - -internal sealed record BenchmarkConfig( - double? ThresholdMs, - double? MinThroughputPerSecond, - double? MaxAllocatedMb, - int? Iterations, - IReadOnlyList Scenarios) -{ - public static async Task LoadAsync(string path) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - - var resolved = Path.GetFullPath(path); - if (!File.Exists(resolved)) - { - throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved); - } - - await using var stream = File.OpenRead(resolved); - var model = await JsonSerializer.DeserializeAsync( - stream, - new JsonSerializerOptions(JsonSerializerDefaults.Web) - { - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - AllowTrailingCommas = true - }).ConfigureAwait(false); - - if (model is null) - { - throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed."); - } - - if (model.Scenarios.Count == 0) - { - throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios."); - } - - foreach (var scenario in model.Scenarios) - { - scenario.Validate(); - } - - return new BenchmarkConfig( - model.ThresholdMs, - model.MinThroughputPerSecond, - model.MaxAllocatedMb, - model.Iterations, - model.Scenarios); - } - - private sealed class BenchmarkConfigModel - { - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - [JsonPropertyName("minThroughputPerSecond")] - public double? MinThroughputPerSecond { get; init; } - - [JsonPropertyName("maxAllocatedMb")] - public double? MaxAllocatedMb { get; init; } - - [JsonPropertyName("iterations")] - public int? Iterations { get; init; } - - [JsonPropertyName("scenarios")] - public List Scenarios { get; init; } = new(); - } -} - -internal sealed class PolicyScenarioConfig -{ - private const int DefaultComponentCount = 100_000; - private const int DefaultAdvisoriesPerComponent = 10; - - [JsonPropertyName("id")] - public string? Id { get; init; } - - [JsonPropertyName("label")] - public string? Label { get; init; } - - [JsonPropertyName("policyPath")] - public string PolicyPath { get; init; } = "docs/examples/policies/baseline.yaml"; - - [JsonPropertyName("scoringConfig")] - public string? ScoringConfigPath { get; init; } - - [JsonPropertyName("componentCount")] - public int ComponentCount { get; init; } = DefaultComponentCount; - - [JsonPropertyName("advisoriesPerComponent")] - public int AdvisoriesPerComponent { get; init; } = DefaultAdvisoriesPerComponent; - - [JsonPropertyName("totalFindings")] - public int? TotalFindings { get; init; } - - [JsonPropertyName("seed")] - public int? Seed { get; init; } - - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - [JsonPropertyName("minThroughputPerSecond")] - public double? MinThroughputPerSecond { get; init; } - - [JsonPropertyName("maxAllocatedMb")] - public double? MaxAllocatedMb { get; init; } - - public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "policy_eval" : Id.Trim(); - - public int ResolveFindingCount() - { - if (TotalFindings is { } findings) - { - if (findings <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires totalFindings > 0."); - } - - return findings; - } - - if (ComponentCount <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires componentCount > 0."); - } - - if (AdvisoriesPerComponent <= 0) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires advisoriesPerComponent > 0."); - } - - checked - { - var total = ComponentCount * AdvisoriesPerComponent; - return total; - } - } - - public int ResolveSeed() => Seed ?? 2025_10_26; - - public void Validate() - { - if (string.IsNullOrWhiteSpace(PolicyPath)) - { - throw new InvalidOperationException($"Scenario '{ScenarioId}' requires a policyPath."); - } - - ResolveFindingCount(); - } -} +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Bench.PolicyEngine; + +internal sealed record BenchmarkConfig( + double? ThresholdMs, + double? MinThroughputPerSecond, + double? MaxAllocatedMb, + int? Iterations, + IReadOnlyList Scenarios) +{ + public static async Task LoadAsync(string path) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + + var resolved = Path.GetFullPath(path); + if (!File.Exists(resolved)) + { + throw new FileNotFoundException($"Benchmark configuration '{resolved}' was not found.", resolved); + } + + await using var stream = File.OpenRead(resolved); + var model = await JsonSerializer.DeserializeAsync( + stream, + new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true + }).ConfigureAwait(false); + + if (model is null) + { + throw new InvalidOperationException($"Benchmark configuration '{resolved}' could not be parsed."); + } + + if (model.Scenarios.Count == 0) + { + throw new InvalidOperationException($"Benchmark configuration '{resolved}' does not contain any scenarios."); + } + + foreach (var scenario in model.Scenarios) + { + scenario.Validate(); + } + + return new BenchmarkConfig( + model.ThresholdMs, + model.MinThroughputPerSecond, + model.MaxAllocatedMb, + model.Iterations, + model.Scenarios); + } + + private sealed class BenchmarkConfigModel + { + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + [JsonPropertyName("minThroughputPerSecond")] + public double? MinThroughputPerSecond { get; init; } + + [JsonPropertyName("maxAllocatedMb")] + public double? MaxAllocatedMb { get; init; } + + [JsonPropertyName("iterations")] + public int? Iterations { get; init; } + + [JsonPropertyName("scenarios")] + public List Scenarios { get; init; } = new(); + } +} + +internal sealed class PolicyScenarioConfig +{ + private const int DefaultComponentCount = 100_000; + private const int DefaultAdvisoriesPerComponent = 10; + + [JsonPropertyName("id")] + public string? Id { get; init; } + + [JsonPropertyName("label")] + public string? Label { get; init; } + + [JsonPropertyName("policyPath")] + public string PolicyPath { get; init; } = "docs/examples/policies/baseline.yaml"; + + [JsonPropertyName("scoringConfig")] + public string? ScoringConfigPath { get; init; } + + [JsonPropertyName("componentCount")] + public int ComponentCount { get; init; } = DefaultComponentCount; + + [JsonPropertyName("advisoriesPerComponent")] + public int AdvisoriesPerComponent { get; init; } = DefaultAdvisoriesPerComponent; + + [JsonPropertyName("totalFindings")] + public int? TotalFindings { get; init; } + + [JsonPropertyName("seed")] + public int? Seed { get; init; } + + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + [JsonPropertyName("minThroughputPerSecond")] + public double? MinThroughputPerSecond { get; init; } + + [JsonPropertyName("maxAllocatedMb")] + public double? MaxAllocatedMb { get; init; } + + public string ScenarioId => string.IsNullOrWhiteSpace(Id) ? "policy_eval" : Id.Trim(); + + public int ResolveFindingCount() + { + if (TotalFindings is { } findings) + { + if (findings <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires totalFindings > 0."); + } + + return findings; + } + + if (ComponentCount <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires componentCount > 0."); + } + + if (AdvisoriesPerComponent <= 0) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires advisoriesPerComponent > 0."); + } + + checked + { + var total = ComponentCount * AdvisoriesPerComponent; + return total; + } + } + + public int ResolveSeed() => Seed ?? 2025_10_26; + + public void Validate() + { + if (string.IsNullOrWhiteSpace(PolicyPath)) + { + throw new InvalidOperationException($"Scenario '{ScenarioId}' requires a policyPath."); + } + + ResolveFindingCount(); + } +} diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PathUtilities.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PathUtilities.cs similarity index 96% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PathUtilities.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PathUtilities.cs index c16b67af..334e9878 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PathUtilities.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PathUtilities.cs @@ -1,15 +1,15 @@ -namespace StellaOps.Bench.PolicyEngine; - -internal static class PathUtilities -{ - public static bool IsWithinRoot(string root, string candidate) - { - var relative = Path.GetRelativePath(root, candidate); - if (string.IsNullOrEmpty(relative) || relative == ".") - { - return true; - } - - return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative); - } -} +namespace StellaOps.Bench.PolicyEngine; + +internal static class PathUtilities +{ + public static bool IsWithinRoot(string root, string candidate) + { + var relative = Path.GetRelativePath(root, candidate); + if (string.IsNullOrEmpty(relative) || relative == ".") + { + return true; + } + + return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative); + } +} diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PolicyScenarioRunner.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PolicyScenarioRunner.cs similarity index 97% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PolicyScenarioRunner.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PolicyScenarioRunner.cs index 16afe6a6..24968795 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PolicyScenarioRunner.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/PolicyScenarioRunner.cs @@ -1,249 +1,249 @@ -using System.Collections.Immutable; -using System.Diagnostics; -using System.Globalization; -using System.Linq; -using StellaOps.Policy; - -namespace StellaOps.Bench.PolicyEngine; - -internal sealed class PolicyScenarioRunner -{ - private readonly PolicyScenarioConfig _config; - private readonly PolicyDocument _document; - private readonly PolicyScoringConfig _scoringConfig; - private readonly PolicyFinding[] _findings; - - public PolicyScenarioRunner(PolicyScenarioConfig config, string repoRoot) - { - _config = config ?? throw new ArgumentNullException(nameof(config)); - ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot); - - var policyPath = ResolvePathWithinRoot(repoRoot, config.PolicyPath); - var policyContent = File.ReadAllText(policyPath); - var policyFormat = PolicySchema.DetectFormat(policyPath); - var binding = PolicyBinder.Bind(policyContent, policyFormat); - if (!binding.Success) - { - var issues = string.Join(", ", binding.Issues.Select(issue => issue.Code)); - throw new InvalidOperationException($"Policy '{config.PolicyPath}' failed validation: {issues}."); - } - - _document = binding.Document; - - _scoringConfig = LoadScoringConfig(repoRoot, config.ScoringConfigPath); - _findings = SyntheticFindingGenerator.Create(config, repoRoot); - } - - public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken) - { - if (iterations <= 0) - { - throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); - } - - var durations = new double[iterations]; - var throughputs = new double[iterations]; - var allocations = new double[iterations]; - var hashingAccumulator = new EvaluationAccumulator(); - - for (var index = 0; index < iterations; index++) - { - cancellationToken.ThrowIfCancellationRequested(); - - var beforeAllocated = GC.GetTotalAllocatedBytes(); - var stopwatch = Stopwatch.StartNew(); - - hashingAccumulator.Reset(); - foreach (var finding in _findings) - { - var verdict = PolicyEvaluation.EvaluateFinding(_document, _scoringConfig, finding); - hashingAccumulator.Add(verdict); - } - - stopwatch.Stop(); - - var afterAllocated = GC.GetTotalAllocatedBytes(); - var elapsedMs = stopwatch.Elapsed.TotalMilliseconds; - if (elapsedMs <= 0) - { - elapsedMs = 0.0001; - } - - durations[index] = elapsedMs; - throughputs[index] = _findings.Length / stopwatch.Elapsed.TotalSeconds; - allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d); - - hashingAccumulator.AssertConsumed(); - } - - return new ScenarioExecutionResult( - durations, - throughputs, - allocations, - _findings.Length); - } - - private static PolicyScoringConfig LoadScoringConfig(string repoRoot, string? scoringPath) - { - if (string.IsNullOrWhiteSpace(scoringPath)) - { - return PolicyScoringConfig.Default; - } - - var resolved = ResolvePathWithinRoot(repoRoot, scoringPath); - var format = PolicySchema.DetectFormat(resolved); - var content = File.ReadAllText(resolved); - var binding = PolicyScoringConfigBinder.Bind(content, format); - if (!binding.Success || binding.Config is null) - { - var issues = binding.Issues.Length == 0 - ? "unknown" - : string.Join(", ", binding.Issues.Select(issue => issue.Code)); - throw new InvalidOperationException($"Scoring configuration '{scoringPath}' failed validation: {issues}."); - } - - return binding.Config; - } - - private static string ResolvePathWithinRoot(string repoRoot, string relativePath) - { - ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot); - ArgumentException.ThrowIfNullOrWhiteSpace(relativePath); - - var combined = Path.GetFullPath(Path.Combine(repoRoot, relativePath)); - if (!PathUtilities.IsWithinRoot(repoRoot, combined)) - { - throw new InvalidOperationException($"Path '{relativePath}' escapes repository root '{repoRoot}'."); - } - - if (!File.Exists(combined)) - { - throw new FileNotFoundException($"Path '{relativePath}' resolved to '{combined}' but does not exist.", combined); - } - - return combined; - } -} - -internal sealed record ScenarioExecutionResult( - IReadOnlyList Durations, - IReadOnlyList Throughputs, - IReadOnlyList AllocatedMb, - int FindingCount); - -internal static class SyntheticFindingGenerator -{ - private static readonly ImmutableArray Environments = ImmutableArray.Create("prod", "staging", "dev"); - private static readonly ImmutableArray Sources = ImmutableArray.Create("concelier", "excitor", "sbom"); - private static readonly ImmutableArray Vendors = ImmutableArray.Create("acme", "contoso", "globex", "initech", "umbrella"); - private static readonly ImmutableArray Licenses = ImmutableArray.Create("MIT", "Apache-2.0", "GPL-3.0", "BSD-3-Clause", "Proprietary"); - private static readonly ImmutableArray Repositories = ImmutableArray.Create("acme/service-api", "acme/web", "acme/worker", "acme/mobile", "acme/cli"); - private static readonly ImmutableArray Images = ImmutableArray.Create("registry.local/worker:2025.10", "registry.local/api:2025.10", "registry.local/cli:2025.10"); - private static readonly ImmutableArray TagPool = ImmutableArray.Create("kev", "runtime", "reachable", "public", "third-party", "critical-path"); - private static readonly ImmutableArray> TagSets = BuildTagSets(); - private static readonly PolicySeverity[] SeverityPool = - { - PolicySeverity.Critical, - PolicySeverity.High, - PolicySeverity.Medium, - PolicySeverity.Low, - PolicySeverity.Informational - }; - - public static PolicyFinding[] Create(PolicyScenarioConfig config, string repoRoot) - { - var totalFindings = config.ResolveFindingCount(); - if (totalFindings <= 0) - { - return Array.Empty(); - } - - var seed = config.ResolveSeed(); - var random = new Random(seed); - var findings = new PolicyFinding[totalFindings]; - var tagsBuffer = new List(3); - - var componentCount = Math.Max(1, config.ComponentCount); - - for (var index = 0; index < totalFindings; index++) - { - var componentIndex = index % componentCount; - var findingId = $"F-{componentIndex:D5}-{index:D6}"; - var severity = SeverityPool[random.Next(SeverityPool.Length)]; - var environment = Environments[componentIndex % Environments.Length]; - var source = Sources[random.Next(Sources.Length)]; - var vendor = Vendors[random.Next(Vendors.Length)]; - var license = Licenses[random.Next(Licenses.Length)]; - var repository = Repositories[componentIndex % Repositories.Length]; - var image = Images[(componentIndex + index) % Images.Length]; - var packageName = $"pkg{componentIndex % 1000}"; - var purl = $"pkg:generic/{packageName}@{1 + (index % 20)}.{1 + (componentIndex % 10)}.{index % 5}"; - var cve = index % 7 == 0 ? $"CVE-2025-{1000 + index % 9000:D4}" : null; - var layerDigest = $"sha256:{Convert.ToHexString(Guid.NewGuid().ToByteArray())[..32].ToLowerInvariant()}"; - - var tags = TagSets[random.Next(TagSets.Length)]; - - findings[index] = PolicyFinding.Create( - findingId, - severity, - environment: environment, - source: source, - vendor: vendor, - license: license, - image: image, - repository: repository, - package: packageName, - purl: purl, - cve: cve, - path: $"/app/{packageName}/{index % 50}.so", - layerDigest: layerDigest, - tags: tags); - } - - return findings; - } - - private static ImmutableArray> BuildTagSets() - { - var builder = ImmutableArray.CreateBuilder>(); - builder.Add(ImmutableArray.Empty); - builder.Add(ImmutableArray.Create("kev")); - builder.Add(ImmutableArray.Create("runtime")); - builder.Add(ImmutableArray.Create("reachable")); - builder.Add(ImmutableArray.Create("third-party")); - builder.Add(ImmutableArray.Create("kev", "runtime")); - builder.Add(ImmutableArray.Create("kev", "third-party")); - builder.Add(ImmutableArray.Create("runtime", "public")); - builder.Add(ImmutableArray.Create("reachable", "critical-path")); - return builder.ToImmutable(); - } -} - -internal sealed class EvaluationAccumulator -{ - private double _scoreAccumulator; - private int _quietCount; - - public void Reset() - { - _scoreAccumulator = 0; - _quietCount = 0; - } - - public void Add(PolicyVerdict verdict) - { - _scoreAccumulator += verdict.Score; - if (verdict.Quiet) - { - _quietCount++; - } - } - - public void AssertConsumed() - { - if (_scoreAccumulator == 0 && _quietCount == 0) - { - throw new InvalidOperationException("Evaluation accumulator detected zero work; dataset may be empty."); - } - } -} +using System.Collections.Immutable; +using System.Diagnostics; +using System.Globalization; +using System.Linq; +using StellaOps.Policy; + +namespace StellaOps.Bench.PolicyEngine; + +internal sealed class PolicyScenarioRunner +{ + private readonly PolicyScenarioConfig _config; + private readonly PolicyDocument _document; + private readonly PolicyScoringConfig _scoringConfig; + private readonly PolicyFinding[] _findings; + + public PolicyScenarioRunner(PolicyScenarioConfig config, string repoRoot) + { + _config = config ?? throw new ArgumentNullException(nameof(config)); + ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot); + + var policyPath = ResolvePathWithinRoot(repoRoot, config.PolicyPath); + var policyContent = File.ReadAllText(policyPath); + var policyFormat = PolicySchema.DetectFormat(policyPath); + var binding = PolicyBinder.Bind(policyContent, policyFormat); + if (!binding.Success) + { + var issues = string.Join(", ", binding.Issues.Select(issue => issue.Code)); + throw new InvalidOperationException($"Policy '{config.PolicyPath}' failed validation: {issues}."); + } + + _document = binding.Document; + + _scoringConfig = LoadScoringConfig(repoRoot, config.ScoringConfigPath); + _findings = SyntheticFindingGenerator.Create(config, repoRoot); + } + + public ScenarioExecutionResult Execute(int iterations, CancellationToken cancellationToken) + { + if (iterations <= 0) + { + throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); + } + + var durations = new double[iterations]; + var throughputs = new double[iterations]; + var allocations = new double[iterations]; + var hashingAccumulator = new EvaluationAccumulator(); + + for (var index = 0; index < iterations; index++) + { + cancellationToken.ThrowIfCancellationRequested(); + + var beforeAllocated = GC.GetTotalAllocatedBytes(); + var stopwatch = Stopwatch.StartNew(); + + hashingAccumulator.Reset(); + foreach (var finding in _findings) + { + var verdict = PolicyEvaluation.EvaluateFinding(_document, _scoringConfig, finding); + hashingAccumulator.Add(verdict); + } + + stopwatch.Stop(); + + var afterAllocated = GC.GetTotalAllocatedBytes(); + var elapsedMs = stopwatch.Elapsed.TotalMilliseconds; + if (elapsedMs <= 0) + { + elapsedMs = 0.0001; + } + + durations[index] = elapsedMs; + throughputs[index] = _findings.Length / stopwatch.Elapsed.TotalSeconds; + allocations[index] = Math.Max(0, afterAllocated - beforeAllocated) / (1024d * 1024d); + + hashingAccumulator.AssertConsumed(); + } + + return new ScenarioExecutionResult( + durations, + throughputs, + allocations, + _findings.Length); + } + + private static PolicyScoringConfig LoadScoringConfig(string repoRoot, string? scoringPath) + { + if (string.IsNullOrWhiteSpace(scoringPath)) + { + return PolicyScoringConfig.Default; + } + + var resolved = ResolvePathWithinRoot(repoRoot, scoringPath); + var format = PolicySchema.DetectFormat(resolved); + var content = File.ReadAllText(resolved); + var binding = PolicyScoringConfigBinder.Bind(content, format); + if (!binding.Success || binding.Config is null) + { + var issues = binding.Issues.Length == 0 + ? "unknown" + : string.Join(", ", binding.Issues.Select(issue => issue.Code)); + throw new InvalidOperationException($"Scoring configuration '{scoringPath}' failed validation: {issues}."); + } + + return binding.Config; + } + + private static string ResolvePathWithinRoot(string repoRoot, string relativePath) + { + ArgumentException.ThrowIfNullOrWhiteSpace(repoRoot); + ArgumentException.ThrowIfNullOrWhiteSpace(relativePath); + + var combined = Path.GetFullPath(Path.Combine(repoRoot, relativePath)); + if (!PathUtilities.IsWithinRoot(repoRoot, combined)) + { + throw new InvalidOperationException($"Path '{relativePath}' escapes repository root '{repoRoot}'."); + } + + if (!File.Exists(combined)) + { + throw new FileNotFoundException($"Path '{relativePath}' resolved to '{combined}' but does not exist.", combined); + } + + return combined; + } +} + +internal sealed record ScenarioExecutionResult( + IReadOnlyList Durations, + IReadOnlyList Throughputs, + IReadOnlyList AllocatedMb, + int FindingCount); + +internal static class SyntheticFindingGenerator +{ + private static readonly ImmutableArray Environments = ImmutableArray.Create("prod", "staging", "dev"); + private static readonly ImmutableArray Sources = ImmutableArray.Create("concelier", "excitor", "sbom"); + private static readonly ImmutableArray Vendors = ImmutableArray.Create("acme", "contoso", "globex", "initech", "umbrella"); + private static readonly ImmutableArray Licenses = ImmutableArray.Create("MIT", "Apache-2.0", "GPL-3.0", "BSD-3-Clause", "Proprietary"); + private static readonly ImmutableArray Repositories = ImmutableArray.Create("acme/service-api", "acme/web", "acme/worker", "acme/mobile", "acme/cli"); + private static readonly ImmutableArray Images = ImmutableArray.Create("registry.local/worker:2025.10", "registry.local/api:2025.10", "registry.local/cli:2025.10"); + private static readonly ImmutableArray TagPool = ImmutableArray.Create("kev", "runtime", "reachable", "public", "third-party", "critical-path"); + private static readonly ImmutableArray> TagSets = BuildTagSets(); + private static readonly PolicySeverity[] SeverityPool = + { + PolicySeverity.Critical, + PolicySeverity.High, + PolicySeverity.Medium, + PolicySeverity.Low, + PolicySeverity.Informational + }; + + public static PolicyFinding[] Create(PolicyScenarioConfig config, string repoRoot) + { + var totalFindings = config.ResolveFindingCount(); + if (totalFindings <= 0) + { + return Array.Empty(); + } + + var seed = config.ResolveSeed(); + var random = new Random(seed); + var findings = new PolicyFinding[totalFindings]; + var tagsBuffer = new List(3); + + var componentCount = Math.Max(1, config.ComponentCount); + + for (var index = 0; index < totalFindings; index++) + { + var componentIndex = index % componentCount; + var findingId = $"F-{componentIndex:D5}-{index:D6}"; + var severity = SeverityPool[random.Next(SeverityPool.Length)]; + var environment = Environments[componentIndex % Environments.Length]; + var source = Sources[random.Next(Sources.Length)]; + var vendor = Vendors[random.Next(Vendors.Length)]; + var license = Licenses[random.Next(Licenses.Length)]; + var repository = Repositories[componentIndex % Repositories.Length]; + var image = Images[(componentIndex + index) % Images.Length]; + var packageName = $"pkg{componentIndex % 1000}"; + var purl = $"pkg:generic/{packageName}@{1 + (index % 20)}.{1 + (componentIndex % 10)}.{index % 5}"; + var cve = index % 7 == 0 ? $"CVE-2025-{1000 + index % 9000:D4}" : null; + var layerDigest = $"sha256:{Convert.ToHexString(Guid.NewGuid().ToByteArray())[..32].ToLowerInvariant()}"; + + var tags = TagSets[random.Next(TagSets.Length)]; + + findings[index] = PolicyFinding.Create( + findingId, + severity, + environment: environment, + source: source, + vendor: vendor, + license: license, + image: image, + repository: repository, + package: packageName, + purl: purl, + cve: cve, + path: $"/app/{packageName}/{index % 50}.so", + layerDigest: layerDigest, + tags: tags); + } + + return findings; + } + + private static ImmutableArray> BuildTagSets() + { + var builder = ImmutableArray.CreateBuilder>(); + builder.Add(ImmutableArray.Empty); + builder.Add(ImmutableArray.Create("kev")); + builder.Add(ImmutableArray.Create("runtime")); + builder.Add(ImmutableArray.Create("reachable")); + builder.Add(ImmutableArray.Create("third-party")); + builder.Add(ImmutableArray.Create("kev", "runtime")); + builder.Add(ImmutableArray.Create("kev", "third-party")); + builder.Add(ImmutableArray.Create("runtime", "public")); + builder.Add(ImmutableArray.Create("reachable", "critical-path")); + return builder.ToImmutable(); + } +} + +internal sealed class EvaluationAccumulator +{ + private double _scoreAccumulator; + private int _quietCount; + + public void Reset() + { + _scoreAccumulator = 0; + _quietCount = 0; + } + + public void Add(PolicyVerdict verdict) + { + _scoreAccumulator += verdict.Score; + if (verdict.Quiet) + { + _quietCount++; + } + } + + public void AssertConsumed() + { + if (_scoreAccumulator == 0 && _quietCount == 0) + { + throw new InvalidOperationException("Evaluation accumulator detected zero work; dataset may be empty."); + } + } +} diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Program.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Program.cs similarity index 97% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Program.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Program.cs index 6ca61d97..ee99b433 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Program.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Program.cs @@ -1,373 +1,373 @@ -using System.Globalization; -using System.Linq; -using StellaOps.Bench.PolicyEngine.Baseline; -using StellaOps.Bench.PolicyEngine.Reporting; - -namespace StellaOps.Bench.PolicyEngine; - -internal static class Program -{ - public static async Task Main(string[] args) - { - try - { - var options = ProgramOptions.Parse(args); - var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); - var iterations = options.Iterations ?? config.Iterations ?? 3; - var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath); - var thresholdMs = options.ThresholdMs ?? config.ThresholdMs; - var throughputFloor = options.MinThroughputPerSecond ?? config.MinThroughputPerSecond; - var allocationLimit = options.MaxAllocatedMb ?? config.MaxAllocatedMb; - var regressionLimit = options.RegressionLimit; - var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(); - - var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); - - var results = new List(); - var reports = new List(); - var failures = new List(); - - foreach (var scenario in config.Scenarios) - { - var runner = new PolicyScenarioRunner(scenario, repoRoot); - var execution = runner.Execute(iterations, CancellationToken.None); - - var durationStats = DurationStatistics.From(execution.Durations); - var throughputStats = ThroughputStatistics.From(execution.Throughputs); - var allocationStats = AllocationStatistics.From(execution.AllocatedMb); - - var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs; - var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? throughputFloor; - var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? allocationLimit; - - var result = new ScenarioResult( - scenario.ScenarioId, - scenario.Label ?? scenario.ScenarioId, - iterations, - execution.FindingCount, - durationStats.MeanMs, - durationStats.P95Ms, - durationStats.MaxMs, - throughputStats.MeanPerSecond, - throughputStats.MinPerSecond, - allocationStats.MaxAllocatedMb, - scenarioThreshold, - scenarioThroughputFloor, - scenarioAllocationLimit); - - results.Add(result); - - if (scenarioThreshold is { } threshold && result.MaxMs > threshold) - { - failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms"); - } - - if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor) - { - failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} findings/s < {floor:N0} findings/s"); - } - - if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit) - { - failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB"); - } - - baseline.TryGetValue(result.Id, out var baselineEntry); - var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit); - reports.Add(report); - failures.AddRange(report.BuildRegressionFailureMessages()); - } - - TablePrinter.Print(results); - - if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) - { - CsvWriter.Write(options.CsvOutPath!, results); - } - - if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) - { - var metadata = new BenchmarkJsonMetadata( - SchemaVersion: "policy-bench/1.0", - CapturedAtUtc: capturedAt, - Commit: options.Commit, - Environment: options.Environment); - - await BenchmarkJsonWriter.WriteAsync( - options.JsonOutPath!, - metadata, - reports, - CancellationToken.None).ConfigureAwait(false); - } - - if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) - { - PrometheusWriter.Write(options.PrometheusOutPath!, reports); - } - - if (failures.Count > 0) - { - Console.Error.WriteLine(); - Console.Error.WriteLine("Benchmark failures detected:"); - foreach (var failure in failures.Distinct()) - { - Console.Error.WriteLine($" - {failure}"); - } - - return 1; - } - - return 0; - } - catch (Exception ex) - { - Console.Error.WriteLine($"policy-bench error: {ex.Message}"); - return 1; - } - } - - private static string ResolveRepoRoot(string? overridePath, string configPath) - { - if (!string.IsNullOrWhiteSpace(overridePath)) - { - return Path.GetFullPath(overridePath); - } - - var configDirectory = Path.GetDirectoryName(configPath); - if (string.IsNullOrWhiteSpace(configDirectory)) - { - return Directory.GetCurrentDirectory(); - } - - return Path.GetFullPath(Path.Combine(configDirectory, "..", "..", "..")); - } - - private sealed record ProgramOptions( - string ConfigPath, - int? Iterations, - double? ThresholdMs, - double? MinThroughputPerSecond, - double? MaxAllocatedMb, - string? CsvOutPath, - string? JsonOutPath, - string? PrometheusOutPath, - string? RepoRoot, - string BaselinePath, - DateTimeOffset? CapturedAtUtc, - string? Commit, - string? Environment, - double? RegressionLimit) - { - public static ProgramOptions Parse(string[] args) - { - var configPath = DefaultConfigPath(); - var baselinePath = DefaultBaselinePath(); - - int? iterations = null; - double? thresholdMs = null; - double? minThroughput = null; - double? maxAllocated = null; - string? csvOut = null; - string? jsonOut = null; - string? promOut = null; - string? repoRoot = null; - DateTimeOffset? capturedAt = null; - string? commit = null; - string? environment = null; - double? regressionLimit = null; - - for (var index = 0; index < args.Length; index++) - { - var current = args[index]; - switch (current) - { - case "--config": - EnsureNext(args, index); - configPath = Path.GetFullPath(args[++index]); - break; - case "--iterations": - EnsureNext(args, index); - iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--threshold-ms": - EnsureNext(args, index); - thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--min-throughput": - EnsureNext(args, index); - minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--max-allocated-mb": - EnsureNext(args, index); - maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--csv": - EnsureNext(args, index); - csvOut = args[++index]; - break; - case "--json": - EnsureNext(args, index); - jsonOut = args[++index]; - break; - case "--prometheus": - EnsureNext(args, index); - promOut = args[++index]; - break; - case "--repo-root": - EnsureNext(args, index); - repoRoot = args[++index]; - break; - case "--baseline": - EnsureNext(args, index); - baselinePath = Path.GetFullPath(args[++index]); - break; - case "--captured-at": - EnsureNext(args, index); - capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); - break; - case "--commit": - EnsureNext(args, index); - commit = args[++index]; - break; - case "--environment": - EnsureNext(args, index); - environment = args[++index]; - break; - case "--regression-limit": - EnsureNext(args, index); - regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--help": - case "-h": - PrintUsage(); - System.Environment.Exit(0); - break; - default: - throw new ArgumentException($"Unknown argument '{current}'."); - } - } - - return new ProgramOptions( - configPath, - iterations, - thresholdMs, - minThroughput, - maxAllocated, - csvOut, - jsonOut, - promOut, - repoRoot, - baselinePath, - capturedAt, - commit, - environment, - regressionLimit); - } - - private static string DefaultConfigPath() - { - var binaryDir = AppContext.BaseDirectory; - var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); - return Path.Combine(benchRoot, "config.json"); - } - - private static string DefaultBaselinePath() - { - var binaryDir = AppContext.BaseDirectory; - var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); - return Path.Combine(benchRoot, "baseline.csv"); - } - - private static void EnsureNext(string[] args, int index) - { - if (index + 1 >= args.Length) - { - throw new ArgumentException("Missing value for argument."); - } - } - - private static void PrintUsage() - { - Console.WriteLine("Usage: policy-bench [options]"); - Console.WriteLine(); - Console.WriteLine("Options:"); - Console.WriteLine(" --config Path to benchmark configuration JSON."); - Console.WriteLine(" --iterations Override iteration count."); - Console.WriteLine(" --threshold-ms Global latency threshold in milliseconds."); - Console.WriteLine(" --min-throughput Global throughput floor (findings/second)."); - Console.WriteLine(" --max-allocated-mb Global allocation ceiling (MB)."); - Console.WriteLine(" --csv Write CSV results to path."); - Console.WriteLine(" --json Write JSON results to path."); - Console.WriteLine(" --prometheus Write Prometheus exposition metrics to path."); - Console.WriteLine(" --repo-root Repository root override."); - Console.WriteLine(" --baseline Baseline CSV path."); - Console.WriteLine(" --captured-at Timestamp to embed in JSON metadata."); - Console.WriteLine(" --commit Commit identifier for metadata."); - Console.WriteLine(" --environment Environment label for metadata."); - Console.WriteLine(" --regression-limit Regression multiplier (default 1.15)."); - } - } -} - -internal static class TablePrinter -{ - public static void Print(IEnumerable results) - { - Console.WriteLine("Scenario | Findings | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)"); - Console.WriteLine("---------------------------- | ----------- | ---------- | ---------- | ---------- | -------- | --------"); - foreach (var row in results) - { - Console.WriteLine(string.Join(" | ", new[] - { - row.IdColumn, - row.FindingsColumn, - row.MeanColumn, - row.P95Column, - row.MaxColumn, - row.MinThroughputColumn, - row.AllocatedColumn - })); - } - } -} - -internal static class CsvWriter -{ - public static void Write(string path, IEnumerable results) - { - var resolvedPath = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolvedPath); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None); - using var writer = new StreamWriter(stream); - writer.WriteLine("scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb"); - - foreach (var row in results) - { - writer.Write(row.Id); - writer.Write(','); - writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.FindingCount.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture)); - writer.WriteLine(); - } - } -} +using System.Globalization; +using System.Linq; +using StellaOps.Bench.PolicyEngine.Baseline; +using StellaOps.Bench.PolicyEngine.Reporting; + +namespace StellaOps.Bench.PolicyEngine; + +internal static class Program +{ + public static async Task Main(string[] args) + { + try + { + var options = ProgramOptions.Parse(args); + var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); + var iterations = options.Iterations ?? config.Iterations ?? 3; + var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath); + var thresholdMs = options.ThresholdMs ?? config.ThresholdMs; + var throughputFloor = options.MinThroughputPerSecond ?? config.MinThroughputPerSecond; + var allocationLimit = options.MaxAllocatedMb ?? config.MaxAllocatedMb; + var regressionLimit = options.RegressionLimit; + var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(); + + var baseline = await BaselineLoader.LoadAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); + + var results = new List(); + var reports = new List(); + var failures = new List(); + + foreach (var scenario in config.Scenarios) + { + var runner = new PolicyScenarioRunner(scenario, repoRoot); + var execution = runner.Execute(iterations, CancellationToken.None); + + var durationStats = DurationStatistics.From(execution.Durations); + var throughputStats = ThroughputStatistics.From(execution.Throughputs); + var allocationStats = AllocationStatistics.From(execution.AllocatedMb); + + var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs; + var scenarioThroughputFloor = scenario.MinThroughputPerSecond ?? throughputFloor; + var scenarioAllocationLimit = scenario.MaxAllocatedMb ?? allocationLimit; + + var result = new ScenarioResult( + scenario.ScenarioId, + scenario.Label ?? scenario.ScenarioId, + iterations, + execution.FindingCount, + durationStats.MeanMs, + durationStats.P95Ms, + durationStats.MaxMs, + throughputStats.MeanPerSecond, + throughputStats.MinPerSecond, + allocationStats.MaxAllocatedMb, + scenarioThreshold, + scenarioThroughputFloor, + scenarioAllocationLimit); + + results.Add(result); + + if (scenarioThreshold is { } threshold && result.MaxMs > threshold) + { + failures.Add($"{result.Id} exceeded latency threshold: {result.MaxMs:F2} ms > {threshold:F2} ms"); + } + + if (scenarioThroughputFloor is { } floor && result.MinThroughputPerSecond < floor) + { + failures.Add($"{result.Id} fell below throughput floor: {result.MinThroughputPerSecond:N0} findings/s < {floor:N0} findings/s"); + } + + if (scenarioAllocationLimit is { } limit && result.MaxAllocatedMb > limit) + { + failures.Add($"{result.Id} exceeded allocation budget: {result.MaxAllocatedMb:F2} MB > {limit:F2} MB"); + } + + baseline.TryGetValue(result.Id, out var baselineEntry); + var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit); + reports.Add(report); + failures.AddRange(report.BuildRegressionFailureMessages()); + } + + TablePrinter.Print(results); + + if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) + { + CsvWriter.Write(options.CsvOutPath!, results); + } + + if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) + { + var metadata = new BenchmarkJsonMetadata( + SchemaVersion: "policy-bench/1.0", + CapturedAtUtc: capturedAt, + Commit: options.Commit, + Environment: options.Environment); + + await BenchmarkJsonWriter.WriteAsync( + options.JsonOutPath!, + metadata, + reports, + CancellationToken.None).ConfigureAwait(false); + } + + if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) + { + PrometheusWriter.Write(options.PrometheusOutPath!, reports); + } + + if (failures.Count > 0) + { + Console.Error.WriteLine(); + Console.Error.WriteLine("Benchmark failures detected:"); + foreach (var failure in failures.Distinct()) + { + Console.Error.WriteLine($" - {failure}"); + } + + return 1; + } + + return 0; + } + catch (Exception ex) + { + Console.Error.WriteLine($"policy-bench error: {ex.Message}"); + return 1; + } + } + + private static string ResolveRepoRoot(string? overridePath, string configPath) + { + if (!string.IsNullOrWhiteSpace(overridePath)) + { + return Path.GetFullPath(overridePath); + } + + var configDirectory = Path.GetDirectoryName(configPath); + if (string.IsNullOrWhiteSpace(configDirectory)) + { + return Directory.GetCurrentDirectory(); + } + + return Path.GetFullPath(Path.Combine(configDirectory, "..", "..", "..")); + } + + private sealed record ProgramOptions( + string ConfigPath, + int? Iterations, + double? ThresholdMs, + double? MinThroughputPerSecond, + double? MaxAllocatedMb, + string? CsvOutPath, + string? JsonOutPath, + string? PrometheusOutPath, + string? RepoRoot, + string BaselinePath, + DateTimeOffset? CapturedAtUtc, + string? Commit, + string? Environment, + double? RegressionLimit) + { + public static ProgramOptions Parse(string[] args) + { + var configPath = DefaultConfigPath(); + var baselinePath = DefaultBaselinePath(); + + int? iterations = null; + double? thresholdMs = null; + double? minThroughput = null; + double? maxAllocated = null; + string? csvOut = null; + string? jsonOut = null; + string? promOut = null; + string? repoRoot = null; + DateTimeOffset? capturedAt = null; + string? commit = null; + string? environment = null; + double? regressionLimit = null; + + for (var index = 0; index < args.Length; index++) + { + var current = args[index]; + switch (current) + { + case "--config": + EnsureNext(args, index); + configPath = Path.GetFullPath(args[++index]); + break; + case "--iterations": + EnsureNext(args, index); + iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--threshold-ms": + EnsureNext(args, index); + thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--min-throughput": + EnsureNext(args, index); + minThroughput = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--max-allocated-mb": + EnsureNext(args, index); + maxAllocated = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--csv": + EnsureNext(args, index); + csvOut = args[++index]; + break; + case "--json": + EnsureNext(args, index); + jsonOut = args[++index]; + break; + case "--prometheus": + EnsureNext(args, index); + promOut = args[++index]; + break; + case "--repo-root": + EnsureNext(args, index); + repoRoot = args[++index]; + break; + case "--baseline": + EnsureNext(args, index); + baselinePath = Path.GetFullPath(args[++index]); + break; + case "--captured-at": + EnsureNext(args, index); + capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); + break; + case "--commit": + EnsureNext(args, index); + commit = args[++index]; + break; + case "--environment": + EnsureNext(args, index); + environment = args[++index]; + break; + case "--regression-limit": + EnsureNext(args, index); + regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--help": + case "-h": + PrintUsage(); + System.Environment.Exit(0); + break; + default: + throw new ArgumentException($"Unknown argument '{current}'."); + } + } + + return new ProgramOptions( + configPath, + iterations, + thresholdMs, + minThroughput, + maxAllocated, + csvOut, + jsonOut, + promOut, + repoRoot, + baselinePath, + capturedAt, + commit, + environment, + regressionLimit); + } + + private static string DefaultConfigPath() + { + var binaryDir = AppContext.BaseDirectory; + var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); + return Path.Combine(benchRoot, "config.json"); + } + + private static string DefaultBaselinePath() + { + var binaryDir = AppContext.BaseDirectory; + var projectDir = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var benchRoot = Path.GetFullPath(Path.Combine(projectDir, "..")); + return Path.Combine(benchRoot, "baseline.csv"); + } + + private static void EnsureNext(string[] args, int index) + { + if (index + 1 >= args.Length) + { + throw new ArgumentException("Missing value for argument."); + } + } + + private static void PrintUsage() + { + Console.WriteLine("Usage: policy-bench [options]"); + Console.WriteLine(); + Console.WriteLine("Options:"); + Console.WriteLine(" --config Path to benchmark configuration JSON."); + Console.WriteLine(" --iterations Override iteration count."); + Console.WriteLine(" --threshold-ms Global latency threshold in milliseconds."); + Console.WriteLine(" --min-throughput Global throughput floor (findings/second)."); + Console.WriteLine(" --max-allocated-mb Global allocation ceiling (MB)."); + Console.WriteLine(" --csv Write CSV results to path."); + Console.WriteLine(" --json Write JSON results to path."); + Console.WriteLine(" --prometheus Write Prometheus exposition metrics to path."); + Console.WriteLine(" --repo-root Repository root override."); + Console.WriteLine(" --baseline Baseline CSV path."); + Console.WriteLine(" --captured-at Timestamp to embed in JSON metadata."); + Console.WriteLine(" --commit Commit identifier for metadata."); + Console.WriteLine(" --environment Environment label for metadata."); + Console.WriteLine(" --regression-limit Regression multiplier (default 1.15)."); + } + } +} + +internal static class TablePrinter +{ + public static void Print(IEnumerable results) + { + Console.WriteLine("Scenario | Findings | Mean(ms) | P95(ms) | Max(ms) | Min k/s | Alloc(MB)"); + Console.WriteLine("---------------------------- | ----------- | ---------- | ---------- | ---------- | -------- | --------"); + foreach (var row in results) + { + Console.WriteLine(string.Join(" | ", new[] + { + row.IdColumn, + row.FindingsColumn, + row.MeanColumn, + row.P95Column, + row.MaxColumn, + row.MinThroughputColumn, + row.AllocatedColumn + })); + } + } +} + +internal static class CsvWriter +{ + public static void Write(string path, IEnumerable results) + { + var resolvedPath = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolvedPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None); + using var writer = new StreamWriter(stream); + writer.WriteLine("scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb"); + + foreach (var row in results) + { + writer.Write(row.Id); + writer.Write(','); + writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.FindingCount.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MeanThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MinThroughputPerSecond.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MaxAllocatedMb.ToString("F4", CultureInfo.InvariantCulture)); + writer.WriteLine(); + } + } +} diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkJsonWriter.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkJsonWriter.cs similarity index 97% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkJsonWriter.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkJsonWriter.cs index 16cf6299..7dafccc8 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkJsonWriter.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkJsonWriter.cs @@ -1,125 +1,125 @@ -using System.Linq; -using System.Text.Json; -using System.Text.Json.Serialization; -using StellaOps.Bench.PolicyEngine.Baseline; - -namespace StellaOps.Bench.PolicyEngine.Reporting; - -internal static class BenchmarkJsonWriter -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - WriteIndented = true, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - public static async Task WriteAsync( - string path, - BenchmarkJsonMetadata metadata, - IReadOnlyList reports, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(metadata); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var document = new BenchmarkJsonDocument( - metadata.SchemaVersion, - metadata.CapturedAtUtc, - metadata.Commit, - metadata.Environment, - reports.Select(CreateScenario).ToArray()); - - await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); - await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); - await stream.FlushAsync(cancellationToken).ConfigureAwait(false); - } - - private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) - { - var baseline = report.Baseline; - return new BenchmarkJsonScenario( - report.Result.Id, - report.Result.Label, - report.Result.Iterations, - report.Result.FindingCount, - report.Result.MeanMs, - report.Result.P95Ms, - report.Result.MaxMs, - report.Result.MeanThroughputPerSecond, - report.Result.MinThroughputPerSecond, - report.Result.MaxAllocatedMb, - report.Result.ThresholdMs, - report.Result.MinThroughputThresholdPerSecond, - report.Result.MaxAllocatedThresholdMb, - baseline is null - ? null - : new BenchmarkJsonScenarioBaseline( - baseline.Iterations, - baseline.FindingCount, - baseline.MeanMs, - baseline.P95Ms, - baseline.MaxMs, - baseline.MeanThroughputPerSecond, - baseline.MinThroughputPerSecond, - baseline.MaxAllocatedMb), - new BenchmarkJsonScenarioRegression( - report.DurationRegressionRatio, - report.ThroughputRegressionRatio, - report.RegressionLimit, - report.RegressionBreached)); - } - - private sealed record BenchmarkJsonDocument( - string SchemaVersion, - DateTimeOffset CapturedAt, - string? Commit, - string? Environment, - IReadOnlyList Scenarios); - - private sealed record BenchmarkJsonScenario( - string Id, - string Label, - int Iterations, - int FindingCount, - double MeanMs, - double P95Ms, - double MaxMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MaxAllocatedMb, - double? ThresholdMs, - double? MinThroughputThresholdPerSecond, - double? MaxAllocatedThresholdMb, - BenchmarkJsonScenarioBaseline? Baseline, - BenchmarkJsonScenarioRegression Regression); - - private sealed record BenchmarkJsonScenarioBaseline( - int Iterations, - int FindingCount, - double MeanMs, - double P95Ms, - double MaxMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MaxAllocatedMb); - - private sealed record BenchmarkJsonScenarioRegression( - double? DurationRatio, - double? ThroughputRatio, - double Limit, - bool Breached); -} - -internal sealed record BenchmarkJsonMetadata( - string SchemaVersion, - DateTimeOffset CapturedAtUtc, - string? Commit, - string? Environment); +using System.Linq; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Bench.PolicyEngine.Baseline; + +namespace StellaOps.Bench.PolicyEngine.Reporting; + +internal static class BenchmarkJsonWriter +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + public static async Task WriteAsync( + string path, + BenchmarkJsonMetadata metadata, + IReadOnlyList reports, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(metadata); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var document = new BenchmarkJsonDocument( + metadata.SchemaVersion, + metadata.CapturedAtUtc, + metadata.Commit, + metadata.Environment, + reports.Select(CreateScenario).ToArray()); + + await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); + await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); + await stream.FlushAsync(cancellationToken).ConfigureAwait(false); + } + + private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) + { + var baseline = report.Baseline; + return new BenchmarkJsonScenario( + report.Result.Id, + report.Result.Label, + report.Result.Iterations, + report.Result.FindingCount, + report.Result.MeanMs, + report.Result.P95Ms, + report.Result.MaxMs, + report.Result.MeanThroughputPerSecond, + report.Result.MinThroughputPerSecond, + report.Result.MaxAllocatedMb, + report.Result.ThresholdMs, + report.Result.MinThroughputThresholdPerSecond, + report.Result.MaxAllocatedThresholdMb, + baseline is null + ? null + : new BenchmarkJsonScenarioBaseline( + baseline.Iterations, + baseline.FindingCount, + baseline.MeanMs, + baseline.P95Ms, + baseline.MaxMs, + baseline.MeanThroughputPerSecond, + baseline.MinThroughputPerSecond, + baseline.MaxAllocatedMb), + new BenchmarkJsonScenarioRegression( + report.DurationRegressionRatio, + report.ThroughputRegressionRatio, + report.RegressionLimit, + report.RegressionBreached)); + } + + private sealed record BenchmarkJsonDocument( + string SchemaVersion, + DateTimeOffset CapturedAt, + string? Commit, + string? Environment, + IReadOnlyList Scenarios); + + private sealed record BenchmarkJsonScenario( + string Id, + string Label, + int Iterations, + int FindingCount, + double MeanMs, + double P95Ms, + double MaxMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MaxAllocatedMb, + double? ThresholdMs, + double? MinThroughputThresholdPerSecond, + double? MaxAllocatedThresholdMb, + BenchmarkJsonScenarioBaseline? Baseline, + BenchmarkJsonScenarioRegression Regression); + + private sealed record BenchmarkJsonScenarioBaseline( + int Iterations, + int FindingCount, + double MeanMs, + double P95Ms, + double MaxMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MaxAllocatedMb); + + private sealed record BenchmarkJsonScenarioRegression( + double? DurationRatio, + double? ThroughputRatio, + double Limit, + bool Breached); +} + +internal sealed record BenchmarkJsonMetadata( + string SchemaVersion, + DateTimeOffset CapturedAtUtc, + string? Commit, + string? Environment); diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkScenarioReport.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkScenarioReport.cs similarity index 97% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkScenarioReport.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkScenarioReport.cs index 5a705a71..a41b89ac 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkScenarioReport.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/BenchmarkScenarioReport.cs @@ -1,82 +1,82 @@ -using StellaOps.Bench.PolicyEngine.Baseline; - -namespace StellaOps.Bench.PolicyEngine.Reporting; - -internal sealed class BenchmarkScenarioReport -{ - private const double DefaultRegressionLimit = 1.15d; - - public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) - { - Result = result ?? throw new ArgumentNullException(nameof(result)); - Baseline = baseline; - RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit; - DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs); - ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond); - } - - public ScenarioResult Result { get; } - - public BaselineEntry? Baseline { get; } - - public double RegressionLimit { get; } - - public double? DurationRegressionRatio { get; } - - public double? ThroughputRegressionRatio { get; } - - public bool DurationRegressionBreached => - DurationRegressionRatio is { } ratio && - ratio >= RegressionLimit; - - public bool ThroughputRegressionBreached => - ThroughputRegressionRatio is { } ratio && - ratio >= RegressionLimit; - - public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached; - - public IEnumerable BuildRegressionFailureMessages() - { - if (Baseline is null) - { - yield break; - } - - if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio) - { - var delta = (durationRatio - 1d) * 100d; - yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%)."; - } - - if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio) - { - var delta = (throughputRatio - 1d) * 100d; - yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%)."; - } - } - - private static double? CalculateDurationRatio(double current, double? baseline) - { - if (!baseline.HasValue || baseline.Value <= 0d) - { - return null; - } - - return current / baseline.Value; - } - - private static double? CalculateThroughputRatio(double current, double? baseline) - { - if (!baseline.HasValue || baseline.Value <= 0d) - { - return null; - } - - if (current <= 0d) - { - return double.PositiveInfinity; - } - - return baseline.Value / current; - } -} +using StellaOps.Bench.PolicyEngine.Baseline; + +namespace StellaOps.Bench.PolicyEngine.Reporting; + +internal sealed class BenchmarkScenarioReport +{ + private const double DefaultRegressionLimit = 1.15d; + + public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) + { + Result = result ?? throw new ArgumentNullException(nameof(result)); + Baseline = baseline; + RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : DefaultRegressionLimit; + DurationRegressionRatio = CalculateDurationRatio(result.MaxMs, baseline?.MaxMs); + ThroughputRegressionRatio = CalculateThroughputRatio(result.MinThroughputPerSecond, baseline?.MinThroughputPerSecond); + } + + public ScenarioResult Result { get; } + + public BaselineEntry? Baseline { get; } + + public double RegressionLimit { get; } + + public double? DurationRegressionRatio { get; } + + public double? ThroughputRegressionRatio { get; } + + public bool DurationRegressionBreached => + DurationRegressionRatio is { } ratio && + ratio >= RegressionLimit; + + public bool ThroughputRegressionBreached => + ThroughputRegressionRatio is { } ratio && + ratio >= RegressionLimit; + + public bool RegressionBreached => DurationRegressionBreached || ThroughputRegressionBreached; + + public IEnumerable BuildRegressionFailureMessages() + { + if (Baseline is null) + { + yield break; + } + + if (DurationRegressionBreached && DurationRegressionRatio is { } durationRatio) + { + var delta = (durationRatio - 1d) * 100d; + yield return $"{Result.Id} exceeded max duration budget: {Result.MaxMs:F2} ms vs baseline {Baseline.MaxMs:F2} ms (+{delta:F1}%)."; + } + + if (ThroughputRegressionBreached && ThroughputRegressionRatio is { } throughputRatio) + { + var delta = (throughputRatio - 1d) * 100d; + yield return $"{Result.Id} throughput regressed: min {Result.MinThroughputPerSecond:N0} /s vs baseline {Baseline.MinThroughputPerSecond:N0} /s (-{delta:F1}%)."; + } + } + + private static double? CalculateDurationRatio(double current, double? baseline) + { + if (!baseline.HasValue || baseline.Value <= 0d) + { + return null; + } + + return current / baseline.Value; + } + + private static double? CalculateThroughputRatio(double current, double? baseline) + { + if (!baseline.HasValue || baseline.Value <= 0d) + { + return null; + } + + if (current <= 0d) + { + return double.PositiveInfinity; + } + + return baseline.Value / current; + } +} diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/PrometheusWriter.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/PrometheusWriter.cs similarity index 98% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/PrometheusWriter.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/PrometheusWriter.cs index ea6f445a..e906ca41 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/PrometheusWriter.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/Reporting/PrometheusWriter.cs @@ -1,83 +1,83 @@ -using System.Globalization; -using System.Text; - -namespace StellaOps.Bench.PolicyEngine.Reporting; - -internal static class PrometheusWriter -{ - public static void Write(string path, IReadOnlyList reports) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var builder = new StringBuilder(); - builder.AppendLine("# HELP policy_engine_bench_duration_ms Policy Engine benchmark duration metrics (milliseconds)."); - builder.AppendLine("# TYPE policy_engine_bench_duration_ms gauge"); - builder.AppendLine("# HELP policy_engine_bench_throughput_per_sec Policy Engine benchmark throughput metrics (findings per second)."); - builder.AppendLine("# TYPE policy_engine_bench_throughput_per_sec gauge"); - builder.AppendLine("# HELP policy_engine_bench_allocation_mb Policy Engine benchmark allocation metrics (megabytes)."); - builder.AppendLine("# TYPE policy_engine_bench_allocation_mb gauge"); - - foreach (var report in reports) - { - var scenarioLabel = Escape(report.Result.Id); - AppendMetric(builder, "policy_engine_bench_mean_ms", scenarioLabel, report.Result.MeanMs); - AppendMetric(builder, "policy_engine_bench_p95_ms", scenarioLabel, report.Result.P95Ms); - AppendMetric(builder, "policy_engine_bench_max_ms", scenarioLabel, report.Result.MaxMs); - AppendMetric(builder, "policy_engine_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs); - - AppendMetric(builder, "policy_engine_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond); - AppendMetric(builder, "policy_engine_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond); - AppendMetric(builder, "policy_engine_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond); - - AppendMetric(builder, "policy_engine_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb); - AppendMetric(builder, "policy_engine_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb); - - if (report.Baseline is { } baseline) - { - AppendMetric(builder, "policy_engine_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs); - AppendMetric(builder, "policy_engine_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs); - AppendMetric(builder, "policy_engine_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond); - } - - if (report.DurationRegressionRatio is { } durationRatio) - { - AppendMetric(builder, "policy_engine_bench_duration_regression_ratio", scenarioLabel, durationRatio); - } - - if (report.ThroughputRegressionRatio is { } throughputRatio) - { - AppendMetric(builder, "policy_engine_bench_throughput_regression_ratio", scenarioLabel, throughputRatio); - } - - AppendMetric(builder, "policy_engine_bench_regression_limit", scenarioLabel, report.RegressionLimit); - AppendMetric(builder, "policy_engine_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0); - } - - File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); - } - - private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value) - { - if (!value.HasValue) - { - return; - } - - builder.Append(metric); - builder.Append("{scenario=\""); - builder.Append(scenario); - builder.Append("\"} "); - builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture)); - } - - private static string Escape(string value) => - value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); -} +using System.Globalization; +using System.Text; + +namespace StellaOps.Bench.PolicyEngine.Reporting; + +internal static class PrometheusWriter +{ + public static void Write(string path, IReadOnlyList reports) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var builder = new StringBuilder(); + builder.AppendLine("# HELP policy_engine_bench_duration_ms Policy Engine benchmark duration metrics (milliseconds)."); + builder.AppendLine("# TYPE policy_engine_bench_duration_ms gauge"); + builder.AppendLine("# HELP policy_engine_bench_throughput_per_sec Policy Engine benchmark throughput metrics (findings per second)."); + builder.AppendLine("# TYPE policy_engine_bench_throughput_per_sec gauge"); + builder.AppendLine("# HELP policy_engine_bench_allocation_mb Policy Engine benchmark allocation metrics (megabytes)."); + builder.AppendLine("# TYPE policy_engine_bench_allocation_mb gauge"); + + foreach (var report in reports) + { + var scenarioLabel = Escape(report.Result.Id); + AppendMetric(builder, "policy_engine_bench_mean_ms", scenarioLabel, report.Result.MeanMs); + AppendMetric(builder, "policy_engine_bench_p95_ms", scenarioLabel, report.Result.P95Ms); + AppendMetric(builder, "policy_engine_bench_max_ms", scenarioLabel, report.Result.MaxMs); + AppendMetric(builder, "policy_engine_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs); + + AppendMetric(builder, "policy_engine_bench_mean_throughput_per_sec", scenarioLabel, report.Result.MeanThroughputPerSecond); + AppendMetric(builder, "policy_engine_bench_min_throughput_per_sec", scenarioLabel, report.Result.MinThroughputPerSecond); + AppendMetric(builder, "policy_engine_bench_min_throughput_threshold_per_sec", scenarioLabel, report.Result.MinThroughputThresholdPerSecond); + + AppendMetric(builder, "policy_engine_bench_max_allocated_mb", scenarioLabel, report.Result.MaxAllocatedMb); + AppendMetric(builder, "policy_engine_bench_max_allocated_threshold_mb", scenarioLabel, report.Result.MaxAllocatedThresholdMb); + + if (report.Baseline is { } baseline) + { + AppendMetric(builder, "policy_engine_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs); + AppendMetric(builder, "policy_engine_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs); + AppendMetric(builder, "policy_engine_bench_baseline_min_throughput_per_sec", scenarioLabel, baseline.MinThroughputPerSecond); + } + + if (report.DurationRegressionRatio is { } durationRatio) + { + AppendMetric(builder, "policy_engine_bench_duration_regression_ratio", scenarioLabel, durationRatio); + } + + if (report.ThroughputRegressionRatio is { } throughputRatio) + { + AppendMetric(builder, "policy_engine_bench_throughput_regression_ratio", scenarioLabel, throughputRatio); + } + + AppendMetric(builder, "policy_engine_bench_regression_limit", scenarioLabel, report.RegressionLimit); + AppendMetric(builder, "policy_engine_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0); + } + + File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); + } + + private static void AppendMetric(StringBuilder builder, string metric, string scenario, double? value) + { + if (!value.HasValue) + { + return; + } + + builder.Append(metric); + builder.Append("{scenario=\""); + builder.Append(scenario); + builder.Append("\"} "); + builder.AppendLine(value.Value.ToString("G17", CultureInfo.InvariantCulture)); + } + + private static string Escape(string value) => + value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); +} diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/ScenarioResult.cs b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/ScenarioResult.cs similarity index 96% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/ScenarioResult.cs rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/ScenarioResult.cs index 8486133c..bdc6a8f2 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/ScenarioResult.cs +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/ScenarioResult.cs @@ -1,110 +1,110 @@ -using System.Globalization; - -namespace StellaOps.Bench.PolicyEngine; - -internal sealed record ScenarioResult( - string Id, - string Label, - int Iterations, - int FindingCount, - double MeanMs, - double P95Ms, - double MaxMs, - double MeanThroughputPerSecond, - double MinThroughputPerSecond, - double MaxAllocatedMb, - double? ThresholdMs, - double? MinThroughputThresholdPerSecond, - double? MaxAllocatedThresholdMb) -{ - public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; - public string FindingsColumn => FindingCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12); - public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); - public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); - public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); -} - -internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs) -{ - public static DurationStatistics From(IReadOnlyList durations) - { - if (durations.Count == 0) - { - return new DurationStatistics(0, 0, 0); - } - - var sorted = durations.ToArray(); - Array.Sort(sorted); - - var total = 0d; - foreach (var value in durations) - { - total += value; - } - - var mean = total / durations.Count; - var p95 = Percentile(sorted, 95); - var max = sorted[^1]; - - return new DurationStatistics(mean, p95, max); - } - - private static double Percentile(IReadOnlyList sorted, double percentile) - { - if (sorted.Count == 0) - { - return 0; - } - - var rank = (percentile / 100d) * (sorted.Count - 1); - var lower = (int)Math.Floor(rank); - var upper = (int)Math.Ceiling(rank); - var weight = rank - lower; - - if (upper >= sorted.Count) - { - return sorted[lower]; - } - - return sorted[lower] + weight * (sorted[upper] - sorted[lower]); - } -} - -internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond) -{ - public static ThroughputStatistics From(IReadOnlyList values) - { - if (values.Count == 0) - { - return new ThroughputStatistics(0, 0); - } - - var total = 0d; - var min = double.MaxValue; - - foreach (var value in values) - { - total += value; - min = Math.Min(min, value); - } - - var mean = total / values.Count; - return new ThroughputStatistics(mean, min); - } -} - -internal readonly record struct AllocationStatistics(double MaxAllocatedMb) -{ - public static AllocationStatistics From(IReadOnlyList values) - { - var max = 0d; - foreach (var value in values) - { - max = Math.Max(max, value); - } - - return new AllocationStatistics(max); - } -} +using System.Globalization; + +namespace StellaOps.Bench.PolicyEngine; + +internal sealed record ScenarioResult( + string Id, + string Label, + int Iterations, + int FindingCount, + double MeanMs, + double P95Ms, + double MaxMs, + double MeanThroughputPerSecond, + double MinThroughputPerSecond, + double MaxAllocatedMb, + double? ThresholdMs, + double? MinThroughputThresholdPerSecond, + double? MaxAllocatedThresholdMb) +{ + public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; + public string FindingsColumn => FindingCount.ToString("N0", CultureInfo.InvariantCulture).PadLeft(12); + public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); + public string MinThroughputColumn => (MinThroughputPerSecond / 1_000d).ToString("F2", CultureInfo.InvariantCulture).PadLeft(11); + public string AllocatedColumn => MaxAllocatedMb.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); +} + +internal readonly record struct DurationStatistics(double MeanMs, double P95Ms, double MaxMs) +{ + public static DurationStatistics From(IReadOnlyList durations) + { + if (durations.Count == 0) + { + return new DurationStatistics(0, 0, 0); + } + + var sorted = durations.ToArray(); + Array.Sort(sorted); + + var total = 0d; + foreach (var value in durations) + { + total += value; + } + + var mean = total / durations.Count; + var p95 = Percentile(sorted, 95); + var max = sorted[^1]; + + return new DurationStatistics(mean, p95, max); + } + + private static double Percentile(IReadOnlyList sorted, double percentile) + { + if (sorted.Count == 0) + { + return 0; + } + + var rank = (percentile / 100d) * (sorted.Count - 1); + var lower = (int)Math.Floor(rank); + var upper = (int)Math.Ceiling(rank); + var weight = rank - lower; + + if (upper >= sorted.Count) + { + return sorted[lower]; + } + + return sorted[lower] + weight * (sorted[upper] - sorted[lower]); + } +} + +internal readonly record struct ThroughputStatistics(double MeanPerSecond, double MinPerSecond) +{ + public static ThroughputStatistics From(IReadOnlyList values) + { + if (values.Count == 0) + { + return new ThroughputStatistics(0, 0); + } + + var total = 0d; + var min = double.MaxValue; + + foreach (var value in values) + { + total += value; + min = Math.Min(min, value); + } + + var mean = total / values.Count; + return new ThroughputStatistics(mean, min); + } +} + +internal readonly record struct AllocationStatistics(double MaxAllocatedMb) +{ + public static AllocationStatistics From(IReadOnlyList values) + { + var max = 0d; + foreach (var value in values) + { + max = Math.Max(max, value); + } + + return new AllocationStatistics(max); + } +} diff --git a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj similarity index 69% rename from src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj rename to src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj index dbe05703..3ca71c14 100644 --- a/src/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj +++ b/src/Bench/StellaOps.Bench/PolicyEngine/StellaOps.Bench.PolicyEngine/StellaOps.Bench.PolicyEngine.csproj @@ -1,3 +1,4 @@ + Exe @@ -9,6 +10,6 @@ - + - + \ No newline at end of file diff --git a/src/StellaOps.Bench/PolicyEngine/baseline.csv b/src/Bench/StellaOps.Bench/PolicyEngine/baseline.csv similarity index 99% rename from src/StellaOps.Bench/PolicyEngine/baseline.csv rename to src/Bench/StellaOps.Bench/PolicyEngine/baseline.csv index 79cdb0d4..b5be42ff 100644 --- a/src/StellaOps.Bench/PolicyEngine/baseline.csv +++ b/src/Bench/StellaOps.Bench/PolicyEngine/baseline.csv @@ -1,2 +1,2 @@ -scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb -policy_eval_baseline,3,1000000,1109.3542,1257.7493,1280.1721,912094.5581,781144.9726,563.6901 +scenario,iterations,findings,mean_ms,p95_ms,max_ms,mean_throughput_per_sec,min_throughput_per_sec,max_allocated_mb +policy_eval_baseline,3,1000000,1109.3542,1257.7493,1280.1721,912094.5581,781144.9726,563.6901 diff --git a/src/StellaOps.Bench/PolicyEngine/config.json b/src/Bench/StellaOps.Bench/PolicyEngine/config.json similarity index 96% rename from src/StellaOps.Bench/PolicyEngine/config.json rename to src/Bench/StellaOps.Bench/PolicyEngine/config.json index e5242fdb..648ae309 100644 --- a/src/StellaOps.Bench/PolicyEngine/config.json +++ b/src/Bench/StellaOps.Bench/PolicyEngine/config.json @@ -1,19 +1,19 @@ -{ - "iterations": 3, - "thresholdMs": 20000, - "minThroughputPerSecond": 60000, - "maxAllocatedMb": 900, - "scenarios": [ - { - "id": "policy_eval_baseline", - "label": "Policy evaluation (100k components, 1M findings)", - "policyPath": "docs/examples/policies/baseline.yaml", - "componentCount": 100000, - "advisoriesPerComponent": 10, - "seed": 20251026, - "thresholdMs": 20000, - "minThroughputPerSecond": 60000, - "maxAllocatedMb": 900 - } - ] -} +{ + "iterations": 3, + "thresholdMs": 20000, + "minThroughputPerSecond": 60000, + "maxAllocatedMb": 900, + "scenarios": [ + { + "id": "policy_eval_baseline", + "label": "Policy evaluation (100k components, 1M findings)", + "policyPath": "docs/examples/policies/baseline.yaml", + "componentCount": 100000, + "advisoriesPerComponent": 10, + "seed": 20251026, + "thresholdMs": 20000, + "minThroughputPerSecond": 60000, + "maxAllocatedMb": 900 + } + ] +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/README.md b/src/Bench/StellaOps.Bench/Scanner.Analyzers/README.md similarity index 94% rename from src/StellaOps.Bench/Scanner.Analyzers/README.md rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/README.md index 340b23fd..26021639 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/README.md +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/README.md @@ -16,10 +16,10 @@ The bench harness exercises the language analyzers against representative filesy ```bash dotnet run \ - --project src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj \ + --project src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj \ -- \ --repo-root . \ - --out src/StellaOps.Bench/Scanner.Analyzers/baseline.csv \ + --out src/Bench/StellaOps.Bench/Scanner.Analyzers/baseline.csv \ --json out/bench/scanner-analyzers/latest.json \ --prom out/bench/scanner-analyzers/latest.prom \ --commit "$(git rev-parse HEAD)" diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BaselineLoaderTests.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BaselineLoaderTests.cs similarity index 97% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BaselineLoaderTests.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BaselineLoaderTests.cs index b479eb9a..81935dfe 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BaselineLoaderTests.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BaselineLoaderTests.cs @@ -1,37 +1,37 @@ -using System.Text; -using StellaOps.Bench.ScannerAnalyzers.Baseline; -using Xunit; - -namespace StellaOps.Bench.ScannerAnalyzers.Tests; - -public sealed class BaselineLoaderTests -{ - [Fact] - public async Task LoadAsync_ReadsCsvIntoDictionary() - { - var csv = """ - scenario,iterations,sample_count,mean_ms,p95_ms,max_ms - node_monorepo_walk,5,4,9.4303,36.1354,45.0012 - python_site_packages_walk,5,10,12.1000,18.2000,26.3000 - """; - - var path = await WriteTempFileAsync(csv); - - var result = await BaselineLoader.LoadAsync(path, CancellationToken.None); - - Assert.Equal(2, result.Count); - var entry = Assert.Contains("node_monorepo_walk", result); - Assert.Equal(5, entry.Iterations); - Assert.Equal(4, entry.SampleCount); - Assert.Equal(9.4303, entry.MeanMs, 4); - Assert.Equal(36.1354, entry.P95Ms, 4); - Assert.Equal(45.0012, entry.MaxMs, 4); - } - - private static async Task WriteTempFileAsync(string content) - { - var path = Path.Combine(Path.GetTempPath(), $"baseline-{Guid.NewGuid():N}.csv"); - await File.WriteAllTextAsync(path, content, Encoding.UTF8); - return path; - } -} +using System.Text; +using StellaOps.Bench.ScannerAnalyzers.Baseline; +using Xunit; + +namespace StellaOps.Bench.ScannerAnalyzers.Tests; + +public sealed class BaselineLoaderTests +{ + [Fact] + public async Task LoadAsync_ReadsCsvIntoDictionary() + { + var csv = """ + scenario,iterations,sample_count,mean_ms,p95_ms,max_ms + node_monorepo_walk,5,4,9.4303,36.1354,45.0012 + python_site_packages_walk,5,10,12.1000,18.2000,26.3000 + """; + + var path = await WriteTempFileAsync(csv); + + var result = await BaselineLoader.LoadAsync(path, CancellationToken.None); + + Assert.Equal(2, result.Count); + var entry = Assert.Contains("node_monorepo_walk", result); + Assert.Equal(5, entry.Iterations); + Assert.Equal(4, entry.SampleCount); + Assert.Equal(9.4303, entry.MeanMs, 4); + Assert.Equal(36.1354, entry.P95Ms, 4); + Assert.Equal(45.0012, entry.MaxMs, 4); + } + + private static async Task WriteTempFileAsync(string content) + { + var path = Path.Combine(Path.GetTempPath(), $"baseline-{Guid.NewGuid():N}.csv"); + await File.WriteAllTextAsync(path, content, Encoding.UTF8); + return path; + } +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkJsonWriterTests.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkJsonWriterTests.cs similarity index 97% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkJsonWriterTests.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkJsonWriterTests.cs index 6fd4c713..7290a45a 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkJsonWriterTests.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkJsonWriterTests.cs @@ -1,41 +1,41 @@ -using System.Text.Json; -using StellaOps.Bench.ScannerAnalyzers; -using StellaOps.Bench.ScannerAnalyzers.Baseline; -using StellaOps.Bench.ScannerAnalyzers.Reporting; -using Xunit; - -namespace StellaOps.Bench.ScannerAnalyzers.Tests; - -public sealed class BenchmarkJsonWriterTests -{ - [Fact] - public async Task WriteAsync_EmitsMetadataAndScenarioDetails() - { - var metadata = new BenchmarkJsonMetadata("1.0", DateTimeOffset.Parse("2025-10-23T12:00:00Z"), "abc123", "ci"); - var result = new ScenarioResult( - "scenario", - "Scenario", - SampleCount: 5, - MeanMs: 10, - P95Ms: 12, - MaxMs: 20, - Iterations: 5, - ThresholdMs: 5000); - var baseline = new BaselineEntry("scenario", 5, 5, 9, 11, 10); - var report = new BenchmarkScenarioReport(result, baseline, 1.2); - - var path = Path.Combine(Path.GetTempPath(), $"bench-{Guid.NewGuid():N}.json"); - await BenchmarkJsonWriter.WriteAsync(path, metadata, new[] { report }, CancellationToken.None); - - using var document = JsonDocument.Parse(await File.ReadAllTextAsync(path)); - var root = document.RootElement; - - Assert.Equal("1.0", root.GetProperty("schemaVersion").GetString()); - Assert.Equal("abc123", root.GetProperty("commit").GetString()); - var scenario = root.GetProperty("scenarios")[0]; - Assert.Equal("scenario", scenario.GetProperty("id").GetString()); - Assert.Equal(20, scenario.GetProperty("maxMs").GetDouble()); - Assert.Equal(10, scenario.GetProperty("baseline").GetProperty("maxMs").GetDouble()); - Assert.True(scenario.GetProperty("regression").GetProperty("breached").GetBoolean()); - } -} +using System.Text.Json; +using StellaOps.Bench.ScannerAnalyzers; +using StellaOps.Bench.ScannerAnalyzers.Baseline; +using StellaOps.Bench.ScannerAnalyzers.Reporting; +using Xunit; + +namespace StellaOps.Bench.ScannerAnalyzers.Tests; + +public sealed class BenchmarkJsonWriterTests +{ + [Fact] + public async Task WriteAsync_EmitsMetadataAndScenarioDetails() + { + var metadata = new BenchmarkJsonMetadata("1.0", DateTimeOffset.Parse("2025-10-23T12:00:00Z"), "abc123", "ci"); + var result = new ScenarioResult( + "scenario", + "Scenario", + SampleCount: 5, + MeanMs: 10, + P95Ms: 12, + MaxMs: 20, + Iterations: 5, + ThresholdMs: 5000); + var baseline = new BaselineEntry("scenario", 5, 5, 9, 11, 10); + var report = new BenchmarkScenarioReport(result, baseline, 1.2); + + var path = Path.Combine(Path.GetTempPath(), $"bench-{Guid.NewGuid():N}.json"); + await BenchmarkJsonWriter.WriteAsync(path, metadata, new[] { report }, CancellationToken.None); + + using var document = JsonDocument.Parse(await File.ReadAllTextAsync(path)); + var root = document.RootElement; + + Assert.Equal("1.0", root.GetProperty("schemaVersion").GetString()); + Assert.Equal("abc123", root.GetProperty("commit").GetString()); + var scenario = root.GetProperty("scenarios")[0]; + Assert.Equal("scenario", scenario.GetProperty("id").GetString()); + Assert.Equal(20, scenario.GetProperty("maxMs").GetDouble()); + Assert.Equal(10, scenario.GetProperty("baseline").GetProperty("maxMs").GetDouble()); + Assert.True(scenario.GetProperty("regression").GetProperty("breached").GetBoolean()); + } +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkScenarioReportTests.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkScenarioReportTests.cs similarity index 96% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkScenarioReportTests.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkScenarioReportTests.cs index e0da853a..0bf3dedf 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkScenarioReportTests.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/BenchmarkScenarioReportTests.cs @@ -1,58 +1,58 @@ -using StellaOps.Bench.ScannerAnalyzers; -using StellaOps.Bench.ScannerAnalyzers.Baseline; -using StellaOps.Bench.ScannerAnalyzers.Reporting; -using Xunit; - -namespace StellaOps.Bench.ScannerAnalyzers.Tests; - -public sealed class BenchmarkScenarioReportTests -{ - [Fact] - public void RegressionRatio_ComputedWhenBaselinePresent() - { - var result = new ScenarioResult( - "scenario", - "Scenario", - SampleCount: 5, - MeanMs: 10, - P95Ms: 12, - MaxMs: 20, - Iterations: 5, - ThresholdMs: 5000); - - var baseline = new BaselineEntry( - "scenario", - Iterations: 5, - SampleCount: 5, - MeanMs: 8, - P95Ms: 11, - MaxMs: 15); - - var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.2); - - Assert.True(report.MaxRegressionRatio.HasValue); - Assert.Equal(20d / 15d, report.MaxRegressionRatio.Value, 6); - Assert.True(report.RegressionBreached); - Assert.Contains("+33.3%", report.BuildRegressionFailureMessage()); - } - - [Fact] - public void RegressionRatio_NullWhenBaselineMissing() - { - var result = new ScenarioResult( - "scenario", - "Scenario", - SampleCount: 5, - MeanMs: 10, - P95Ms: 12, - MaxMs: 20, - Iterations: 5, - ThresholdMs: 5000); - - var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: 1.2); - - Assert.Null(report.MaxRegressionRatio); - Assert.False(report.RegressionBreached); - Assert.Null(report.BuildRegressionFailureMessage()); - } -} +using StellaOps.Bench.ScannerAnalyzers; +using StellaOps.Bench.ScannerAnalyzers.Baseline; +using StellaOps.Bench.ScannerAnalyzers.Reporting; +using Xunit; + +namespace StellaOps.Bench.ScannerAnalyzers.Tests; + +public sealed class BenchmarkScenarioReportTests +{ + [Fact] + public void RegressionRatio_ComputedWhenBaselinePresent() + { + var result = new ScenarioResult( + "scenario", + "Scenario", + SampleCount: 5, + MeanMs: 10, + P95Ms: 12, + MaxMs: 20, + Iterations: 5, + ThresholdMs: 5000); + + var baseline = new BaselineEntry( + "scenario", + Iterations: 5, + SampleCount: 5, + MeanMs: 8, + P95Ms: 11, + MaxMs: 15); + + var report = new BenchmarkScenarioReport(result, baseline, regressionLimit: 1.2); + + Assert.True(report.MaxRegressionRatio.HasValue); + Assert.Equal(20d / 15d, report.MaxRegressionRatio.Value, 6); + Assert.True(report.RegressionBreached); + Assert.Contains("+33.3%", report.BuildRegressionFailureMessage()); + } + + [Fact] + public void RegressionRatio_NullWhenBaselineMissing() + { + var result = new ScenarioResult( + "scenario", + "Scenario", + SampleCount: 5, + MeanMs: 10, + P95Ms: 12, + MaxMs: 20, + Iterations: 5, + ThresholdMs: 5000); + + var report = new BenchmarkScenarioReport(result, baseline: null, regressionLimit: 1.2); + + Assert.Null(report.MaxRegressionRatio); + Assert.False(report.RegressionBreached); + Assert.Null(report.BuildRegressionFailureMessage()); + } +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/PrometheusWriterTests.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/PrometheusWriterTests.cs similarity index 97% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/PrometheusWriterTests.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/PrometheusWriterTests.cs index 0e1dfd64..8b803399 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/PrometheusWriterTests.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/PrometheusWriterTests.cs @@ -1,32 +1,32 @@ -using StellaOps.Bench.ScannerAnalyzers; -using StellaOps.Bench.ScannerAnalyzers.Baseline; -using StellaOps.Bench.ScannerAnalyzers.Reporting; -using Xunit; - -namespace StellaOps.Bench.ScannerAnalyzers.Tests; - -public sealed class PrometheusWriterTests -{ - [Fact] - public void Write_EmitsMetricsForScenario() - { - var result = new ScenarioResult( - "scenario_a", - "Scenario A", - SampleCount: 5, - MeanMs: 10, - P95Ms: 12, - MaxMs: 20, - Iterations: 5, - ThresholdMs: 5000); - var baseline = new BaselineEntry("scenario_a", 5, 5, 9, 11, 18); - var report = new BenchmarkScenarioReport(result, baseline, 1.2); - - var path = Path.Combine(Path.GetTempPath(), $"metrics-{Guid.NewGuid():N}.prom"); - PrometheusWriter.Write(path, new[] { report }); - - var contents = File.ReadAllText(path); - Assert.Contains("scanner_analyzer_bench_max_ms{scenario=\"scenario_a\"} 20", contents); - Assert.Contains("scanner_analyzer_bench_regression_ratio{scenario=\"scenario_a\"}", contents); - } -} +using StellaOps.Bench.ScannerAnalyzers; +using StellaOps.Bench.ScannerAnalyzers.Baseline; +using StellaOps.Bench.ScannerAnalyzers.Reporting; +using Xunit; + +namespace StellaOps.Bench.ScannerAnalyzers.Tests; + +public sealed class PrometheusWriterTests +{ + [Fact] + public void Write_EmitsMetricsForScenario() + { + var result = new ScenarioResult( + "scenario_a", + "Scenario A", + SampleCount: 5, + MeanMs: 10, + P95Ms: 12, + MaxMs: 20, + Iterations: 5, + ThresholdMs: 5000); + var baseline = new BaselineEntry("scenario_a", 5, 5, 9, 11, 18); + var report = new BenchmarkScenarioReport(result, baseline, 1.2); + + var path = Path.Combine(Path.GetTempPath(), $"metrics-{Guid.NewGuid():N}.prom"); + PrometheusWriter.Write(path, new[] { report }); + + var contents = File.ReadAllText(path); + Assert.Contains("scanner_analyzer_bench_max_ms{scenario=\"scenario_a\"} 20", contents); + Assert.Contains("scanner_analyzer_bench_regression_ratio{scenario=\"scenario_a\"}", contents); + } +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/StellaOps.Bench.ScannerAnalyzers.Tests.csproj b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/StellaOps.Bench.ScannerAnalyzers.Tests.csproj similarity index 97% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/StellaOps.Bench.ScannerAnalyzers.Tests.csproj rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/StellaOps.Bench.ScannerAnalyzers.Tests.csproj index e2ccb99b..d41c8df5 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/StellaOps.Bench.ScannerAnalyzers.Tests.csproj +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers.Tests/StellaOps.Bench.ScannerAnalyzers.Tests.csproj @@ -1,26 +1,26 @@ - - - net10.0 - enable - enable - preview - true - - - - - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - runtime; build; native; contentfiles; analyzers; buildtransitive - all - - - - - - - + + + net10.0 + enable + enable + preview + true + + + + + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + runtime; build; native; contentfiles; analyzers; buildtransitive + all + + + + + + + diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineEntry.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineEntry.cs similarity index 95% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineEntry.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineEntry.cs index 37e69949..90066821 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineEntry.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineEntry.cs @@ -1,9 +1,9 @@ -namespace StellaOps.Bench.ScannerAnalyzers.Baseline; - -internal sealed record BaselineEntry( - string ScenarioId, - int Iterations, - int SampleCount, - double MeanMs, - double P95Ms, - double MaxMs); +namespace StellaOps.Bench.ScannerAnalyzers.Baseline; + +internal sealed record BaselineEntry( + string ScenarioId, + int Iterations, + int SampleCount, + double MeanMs, + double P95Ms, + double MaxMs); diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineLoader.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineLoader.cs similarity index 97% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineLoader.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineLoader.cs index db39b57e..0462bde1 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineLoader.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Baseline/BaselineLoader.cs @@ -1,88 +1,88 @@ -using System.Globalization; - -namespace StellaOps.Bench.ScannerAnalyzers.Baseline; - -internal static class BaselineLoader -{ - public static async Task> LoadAsync(string path, CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(path)) - { - throw new ArgumentException("Baseline path must be provided.", nameof(path)); - } - - var resolved = Path.GetFullPath(path); - if (!File.Exists(resolved)) - { - throw new FileNotFoundException($"Baseline file not found at {resolved}", resolved); - } - - var result = new Dictionary(StringComparer.OrdinalIgnoreCase); - - await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream); - string? line; - var isFirst = true; - - while ((line = await reader.ReadLineAsync().ConfigureAwait(false)) is not null) - { - cancellationToken.ThrowIfCancellationRequested(); - if (string.IsNullOrWhiteSpace(line)) - { - continue; - } - - if (isFirst) - { - isFirst = false; - if (line.StartsWith("scenario,", StringComparison.OrdinalIgnoreCase)) - { - continue; - } - } - - var entry = ParseLine(line); - result[entry.ScenarioId] = entry; - } - - return result; - } - - private static BaselineEntry ParseLine(string line) - { - var parts = line.Split(',', StringSplitOptions.TrimEntries); - if (parts.Length < 6) - { - throw new InvalidDataException($"Baseline CSV row malformed: '{line}'"); - } - - var scenarioId = parts[0]; - var iterations = ParseInt(parts[1], nameof(BaselineEntry.Iterations)); - var sampleCount = ParseInt(parts[2], nameof(BaselineEntry.SampleCount)); - var meanMs = ParseDouble(parts[3], nameof(BaselineEntry.MeanMs)); - var p95Ms = ParseDouble(parts[4], nameof(BaselineEntry.P95Ms)); - var maxMs = ParseDouble(parts[5], nameof(BaselineEntry.MaxMs)); - - return new BaselineEntry(scenarioId, iterations, sampleCount, meanMs, p95Ms, maxMs); - } - - private static int ParseInt(string value, string field) - { - if (!int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) - { - throw new InvalidDataException($"Failed to parse integer {field} from '{value}'."); - } - - return parsed; - } - - private static double ParseDouble(string value, string field) - { - if (!double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed)) - { - throw new InvalidDataException($"Failed to parse double {field} from '{value}'."); - } - - return parsed; - } -} +using System.Globalization; + +namespace StellaOps.Bench.ScannerAnalyzers.Baseline; + +internal static class BaselineLoader +{ + public static async Task> LoadAsync(string path, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(path)) + { + throw new ArgumentException("Baseline path must be provided.", nameof(path)); + } + + var resolved = Path.GetFullPath(path); + if (!File.Exists(resolved)) + { + throw new FileNotFoundException($"Baseline file not found at {resolved}", resolved); + } + + var result = new Dictionary(StringComparer.OrdinalIgnoreCase); + + await using var stream = new FileStream(resolved, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream); + string? line; + var isFirst = true; + + while ((line = await reader.ReadLineAsync().ConfigureAwait(false)) is not null) + { + cancellationToken.ThrowIfCancellationRequested(); + if (string.IsNullOrWhiteSpace(line)) + { + continue; + } + + if (isFirst) + { + isFirst = false; + if (line.StartsWith("scenario,", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + } + + var entry = ParseLine(line); + result[entry.ScenarioId] = entry; + } + + return result; + } + + private static BaselineEntry ParseLine(string line) + { + var parts = line.Split(',', StringSplitOptions.TrimEntries); + if (parts.Length < 6) + { + throw new InvalidDataException($"Baseline CSV row malformed: '{line}'"); + } + + var scenarioId = parts[0]; + var iterations = ParseInt(parts[1], nameof(BaselineEntry.Iterations)); + var sampleCount = ParseInt(parts[2], nameof(BaselineEntry.SampleCount)); + var meanMs = ParseDouble(parts[3], nameof(BaselineEntry.MeanMs)); + var p95Ms = ParseDouble(parts[4], nameof(BaselineEntry.P95Ms)); + var maxMs = ParseDouble(parts[5], nameof(BaselineEntry.MaxMs)); + + return new BaselineEntry(scenarioId, iterations, sampleCount, meanMs, p95Ms, maxMs); + } + + private static int ParseInt(string value, string field) + { + if (!int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + throw new InvalidDataException($"Failed to parse integer {field} from '{value}'."); + } + + return parsed; + } + + private static double ParseDouble(string value, string field) + { + if (!double.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out var parsed)) + { + throw new InvalidDataException($"Failed to parse double {field} from '{value}'."); + } + + return parsed; + } +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/BenchmarkConfig.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/BenchmarkConfig.cs similarity index 96% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/BenchmarkConfig.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/BenchmarkConfig.cs index 48184508..15b66c6d 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/BenchmarkConfig.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/BenchmarkConfig.cs @@ -1,104 +1,104 @@ -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Bench.ScannerAnalyzers; - -internal sealed record BenchmarkConfig -{ - [JsonPropertyName("iterations")] - public int? Iterations { get; init; } - - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - [JsonPropertyName("scenarios")] - public List Scenarios { get; init; } = new(); - - public static async Task LoadAsync(string path) - { - if (string.IsNullOrWhiteSpace(path)) - { - throw new ArgumentException("Config path is required.", nameof(path)); - } - - await using var stream = File.OpenRead(path); - var config = await JsonSerializer.DeserializeAsync(stream, SerializerOptions).ConfigureAwait(false); - if (config is null) - { - throw new InvalidOperationException($"Failed to parse benchmark config '{path}'."); - } - - if (config.Scenarios.Count == 0) - { - throw new InvalidOperationException("config.scenarios must declare at least one scenario."); - } - - foreach (var scenario in config.Scenarios) - { - scenario.Validate(); - } - - return config; - } - - private static JsonSerializerOptions SerializerOptions => new() - { - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - AllowTrailingCommas = true, - }; -} - -internal sealed record BenchmarkScenarioConfig -{ - [JsonPropertyName("id")] - public string? Id { get; init; } - - [JsonPropertyName("label")] - public string? Label { get; init; } - - [JsonPropertyName("root")] - public string? Root { get; init; } - - [JsonPropertyName("analyzers")] - public List? Analyzers { get; init; } - - [JsonPropertyName("matcher")] - public string? Matcher { get; init; } - - [JsonPropertyName("parser")] - public string? Parser { get; init; } - - [JsonPropertyName("thresholdMs")] - public double? ThresholdMs { get; init; } - - public bool HasAnalyzers => Analyzers is { Count: > 0 }; - - public void Validate() - { - if (string.IsNullOrWhiteSpace(Id)) - { - throw new InvalidOperationException("scenario.id is required."); - } - - if (string.IsNullOrWhiteSpace(Root)) - { - throw new InvalidOperationException($"Scenario '{Id}' must specify a root path."); - } - - if (HasAnalyzers) - { - return; - } - - if (string.IsNullOrWhiteSpace(Parser)) - { - throw new InvalidOperationException($"Scenario '{Id}' must specify parser or analyzers."); - } - - if (string.IsNullOrWhiteSpace(Matcher)) - { - throw new InvalidOperationException($"Scenario '{Id}' must specify matcher when parser is used."); - } - } -} +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Bench.ScannerAnalyzers; + +internal sealed record BenchmarkConfig +{ + [JsonPropertyName("iterations")] + public int? Iterations { get; init; } + + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + [JsonPropertyName("scenarios")] + public List Scenarios { get; init; } = new(); + + public static async Task LoadAsync(string path) + { + if (string.IsNullOrWhiteSpace(path)) + { + throw new ArgumentException("Config path is required.", nameof(path)); + } + + await using var stream = File.OpenRead(path); + var config = await JsonSerializer.DeserializeAsync(stream, SerializerOptions).ConfigureAwait(false); + if (config is null) + { + throw new InvalidOperationException($"Failed to parse benchmark config '{path}'."); + } + + if (config.Scenarios.Count == 0) + { + throw new InvalidOperationException("config.scenarios must declare at least one scenario."); + } + + foreach (var scenario in config.Scenarios) + { + scenario.Validate(); + } + + return config; + } + + private static JsonSerializerOptions SerializerOptions => new() + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true, + }; +} + +internal sealed record BenchmarkScenarioConfig +{ + [JsonPropertyName("id")] + public string? Id { get; init; } + + [JsonPropertyName("label")] + public string? Label { get; init; } + + [JsonPropertyName("root")] + public string? Root { get; init; } + + [JsonPropertyName("analyzers")] + public List? Analyzers { get; init; } + + [JsonPropertyName("matcher")] + public string? Matcher { get; init; } + + [JsonPropertyName("parser")] + public string? Parser { get; init; } + + [JsonPropertyName("thresholdMs")] + public double? ThresholdMs { get; init; } + + public bool HasAnalyzers => Analyzers is { Count: > 0 }; + + public void Validate() + { + if (string.IsNullOrWhiteSpace(Id)) + { + throw new InvalidOperationException("scenario.id is required."); + } + + if (string.IsNullOrWhiteSpace(Root)) + { + throw new InvalidOperationException($"Scenario '{Id}' must specify a root path."); + } + + if (HasAnalyzers) + { + return; + } + + if (string.IsNullOrWhiteSpace(Parser)) + { + throw new InvalidOperationException($"Scenario '{Id}' must specify parser or analyzers."); + } + + if (string.IsNullOrWhiteSpace(Matcher)) + { + throw new InvalidOperationException($"Scenario '{Id}' must specify matcher when parser is used."); + } + } +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Program.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Program.cs similarity index 97% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Program.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Program.cs index 0100de0b..fcffc2e8 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Program.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Program.cs @@ -1,393 +1,393 @@ -using System.Globalization; -using StellaOps.Bench.ScannerAnalyzers.Baseline; -using StellaOps.Bench.ScannerAnalyzers.Reporting; -using StellaOps.Bench.ScannerAnalyzers.Scenarios; - -namespace StellaOps.Bench.ScannerAnalyzers; - -internal static class Program -{ - public static async Task Main(string[] args) - { - try - { - var options = ProgramOptions.Parse(args); - var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); - - var iterations = options.Iterations ?? config.Iterations ?? 5; - var thresholdMs = options.ThresholdMs ?? config.ThresholdMs ?? 5000; - var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath); - var regressionLimit = options.RegressionLimit ?? 1.2d; - var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(); - - var baseline = await LoadBaselineDictionaryAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); - - var results = new List(); - var reports = new List(); - var failures = new List(); - - foreach (var scenario in config.Scenarios) - { - var runner = ScenarioRunnerFactory.Create(scenario); - var scenarioRoot = ResolveScenarioRoot(repoRoot, scenario.Root!); - - var execution = await runner.ExecuteAsync(scenarioRoot, iterations, CancellationToken.None).ConfigureAwait(false); - var stats = ScenarioStatistics.FromDurations(execution.Durations); - var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs; - - var result = new ScenarioResult( - scenario.Id!, - scenario.Label ?? scenario.Id!, - execution.SampleCount, - stats.MeanMs, - stats.P95Ms, - stats.MaxMs, - iterations, - scenarioThreshold); - - results.Add(result); - - if (stats.MaxMs > scenarioThreshold) - { - failures.Add($"{scenario.Id} exceeded threshold: {stats.MaxMs:F2} ms > {scenarioThreshold:F2} ms"); - } - - baseline.TryGetValue(result.Id, out var baselineEntry); - var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit); - if (report.BuildRegressionFailureMessage() is { } regressionFailure) - { - failures.Add(regressionFailure); - } - - reports.Add(report); - } - - TablePrinter.Print(results); - - if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) - { - CsvWriter.Write(options.CsvOutPath!, results); - } - - if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) - { - var metadata = new BenchmarkJsonMetadata( - "1.0", - capturedAt, - options.Commit, - options.Environment); - - await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false); - } - - if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) - { - PrometheusWriter.Write(options.PrometheusOutPath!, reports); - } - - if (failures.Count > 0) - { - Console.Error.WriteLine(); - Console.Error.WriteLine("Performance threshold exceeded:"); - foreach (var failure in failures) - { - Console.Error.WriteLine($" - {failure}"); - } - - return 1; - } - - return 0; - } - catch (Exception ex) - { - Console.Error.WriteLine(ex.Message); - return 1; - } - } - - private static async Task> LoadBaselineDictionaryAsync(string? baselinePath, CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(baselinePath)) - { - return new Dictionary(StringComparer.OrdinalIgnoreCase); - } - - var resolved = Path.GetFullPath(baselinePath); - if (!File.Exists(resolved)) - { - return new Dictionary(StringComparer.OrdinalIgnoreCase); - } - - return await BaselineLoader.LoadAsync(resolved, cancellationToken).ConfigureAwait(false); - } - - private static string ResolveRepoRoot(string? overridePath, string configPath) - { - if (!string.IsNullOrWhiteSpace(overridePath)) - { - return Path.GetFullPath(overridePath); - } - - var configDirectory = Path.GetDirectoryName(configPath); - if (string.IsNullOrWhiteSpace(configDirectory)) - { - return Directory.GetCurrentDirectory(); - } - - return Path.GetFullPath(Path.Combine(configDirectory, "..", "..")); - } - - private static string ResolveScenarioRoot(string repoRoot, string relativeRoot) - { - if (string.IsNullOrWhiteSpace(relativeRoot)) - { - throw new InvalidOperationException("Scenario root is required."); - } - - var combined = Path.GetFullPath(Path.Combine(repoRoot, relativeRoot)); - if (!PathUtilities.IsWithinRoot(repoRoot, combined)) - { - throw new InvalidOperationException($"Scenario root '{relativeRoot}' escapes repository root '{repoRoot}'."); - } - - if (!Directory.Exists(combined)) - { - throw new DirectoryNotFoundException($"Scenario root '{combined}' does not exist."); - } - - return combined; - } - - private sealed record ProgramOptions( - string ConfigPath, - int? Iterations, - double? ThresholdMs, - string? CsvOutPath, - string? JsonOutPath, - string? PrometheusOutPath, - string? RepoRoot, - string? BaselinePath, - DateTimeOffset? CapturedAtUtc, - string? Commit, - string? Environment, - double? RegressionLimit) - { - public static ProgramOptions Parse(string[] args) - { - var configPath = DefaultConfigPath(); - var baselinePath = DefaultBaselinePath(); - int? iterations = null; - double? thresholdMs = null; - string? csvOut = null; - string? jsonOut = null; - string? promOut = null; - string? repoRoot = null; - DateTimeOffset? capturedAt = null; - string? commit = null; - string? environment = null; - double? regressionLimit = null; - - for (var index = 0; index < args.Length; index++) - { - var current = args[index]; - switch (current) - { - case "--config": - EnsureNext(args, index); - configPath = Path.GetFullPath(args[++index]); - break; - case "--iterations": - EnsureNext(args, index); - iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--threshold-ms": - EnsureNext(args, index); - thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - case "--out": - case "--csv": - EnsureNext(args, index); - csvOut = args[++index]; - break; - case "--json": - EnsureNext(args, index); - jsonOut = args[++index]; - break; - case "--prom": - case "--prometheus": - EnsureNext(args, index); - promOut = args[++index]; - break; - case "--baseline": - EnsureNext(args, index); - baselinePath = args[++index]; - break; - case "--repo-root": - case "--samples": - EnsureNext(args, index); - repoRoot = args[++index]; - break; - case "--captured-at": - EnsureNext(args, index); - capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal); - break; - case "--commit": - EnsureNext(args, index); - commit = args[++index]; - break; - case "--environment": - EnsureNext(args, index); - environment = args[++index]; - break; - case "--regression-limit": - EnsureNext(args, index); - regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); - break; - default: - throw new ArgumentException($"Unknown argument: {current}", nameof(args)); - } - } - - return new ProgramOptions(configPath, iterations, thresholdMs, csvOut, jsonOut, promOut, repoRoot, baselinePath, capturedAt, commit, environment, regressionLimit); - } - - private static string DefaultConfigPath() - { - var binaryDir = AppContext.BaseDirectory; - var projectRoot = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var configDirectory = Path.GetFullPath(Path.Combine(projectRoot, "..")); - return Path.Combine(configDirectory, "config.json"); - } - - private static string? DefaultBaselinePath() - { - var binaryDir = AppContext.BaseDirectory; - var projectRoot = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); - var benchRoot = Path.GetFullPath(Path.Combine(projectRoot, "..")); - var baselinePath = Path.Combine(benchRoot, "baseline.csv"); - return File.Exists(baselinePath) ? baselinePath : baselinePath; - } - - private static void EnsureNext(string[] args, int index) - { - if (index + 1 >= args.Length) - { - throw new ArgumentException("Missing value for argument.", nameof(args)); - } - } - } - - private sealed record ScenarioStatistics(double MeanMs, double P95Ms, double MaxMs) - { - public static ScenarioStatistics FromDurations(IReadOnlyList durations) - { - if (durations.Count == 0) - { - return new ScenarioStatistics(0, 0, 0); - } - - var sorted = durations.ToArray(); - Array.Sort(sorted); - - var total = 0d; - foreach (var value in durations) - { - total += value; - } - - var mean = total / durations.Count; - var p95 = Percentile(sorted, 95); - var max = sorted[^1]; - - return new ScenarioStatistics(mean, p95, max); - } - - private static double Percentile(IReadOnlyList sorted, double percentile) - { - if (sorted.Count == 0) - { - return 0; - } - - var rank = (percentile / 100d) * (sorted.Count - 1); - var lower = (int)Math.Floor(rank); - var upper = (int)Math.Ceiling(rank); - var weight = rank - lower; - - if (upper >= sorted.Count) - { - return sorted[lower]; - } - - return sorted[lower] + weight * (sorted[upper] - sorted[lower]); - } - } - - private static class TablePrinter - { - public static void Print(IEnumerable results) - { - Console.WriteLine("Scenario | Count | Mean(ms) | P95(ms) | Max(ms)"); - Console.WriteLine("---------------------------- | ----- | --------- | --------- | ----------"); - foreach (var row in results) - { - Console.WriteLine(string.Join(" | ", new[] - { - row.IdColumn, - row.SampleCountColumn, - row.MeanColumn, - row.P95Column, - row.MaxColumn - })); - } - } - } - - private static class CsvWriter - { - public static void Write(string path, IEnumerable results) - { - var resolvedPath = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolvedPath); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None); - using var writer = new StreamWriter(stream); - writer.WriteLine("scenario,iterations,sample_count,mean_ms,p95_ms,max_ms"); - - foreach (var row in results) - { - writer.Write(row.Id); - writer.Write(','); - writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.SampleCount.ToString(CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); - writer.Write(','); - writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); - writer.WriteLine(); - } - } - } - - internal static class PathUtilities - { - public static bool IsWithinRoot(string root, string candidate) - { - var relative = Path.GetRelativePath(root, candidate); - if (string.IsNullOrEmpty(relative) || relative == ".") - { - return true; - } - - return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative); - } - } -} +using System.Globalization; +using StellaOps.Bench.ScannerAnalyzers.Baseline; +using StellaOps.Bench.ScannerAnalyzers.Reporting; +using StellaOps.Bench.ScannerAnalyzers.Scenarios; + +namespace StellaOps.Bench.ScannerAnalyzers; + +internal static class Program +{ + public static async Task Main(string[] args) + { + try + { + var options = ProgramOptions.Parse(args); + var config = await BenchmarkConfig.LoadAsync(options.ConfigPath).ConfigureAwait(false); + + var iterations = options.Iterations ?? config.Iterations ?? 5; + var thresholdMs = options.ThresholdMs ?? config.ThresholdMs ?? 5000; + var repoRoot = ResolveRepoRoot(options.RepoRoot, options.ConfigPath); + var regressionLimit = options.RegressionLimit ?? 1.2d; + var capturedAt = (options.CapturedAtUtc ?? DateTimeOffset.UtcNow).ToUniversalTime(); + + var baseline = await LoadBaselineDictionaryAsync(options.BaselinePath, CancellationToken.None).ConfigureAwait(false); + + var results = new List(); + var reports = new List(); + var failures = new List(); + + foreach (var scenario in config.Scenarios) + { + var runner = ScenarioRunnerFactory.Create(scenario); + var scenarioRoot = ResolveScenarioRoot(repoRoot, scenario.Root!); + + var execution = await runner.ExecuteAsync(scenarioRoot, iterations, CancellationToken.None).ConfigureAwait(false); + var stats = ScenarioStatistics.FromDurations(execution.Durations); + var scenarioThreshold = scenario.ThresholdMs ?? thresholdMs; + + var result = new ScenarioResult( + scenario.Id!, + scenario.Label ?? scenario.Id!, + execution.SampleCount, + stats.MeanMs, + stats.P95Ms, + stats.MaxMs, + iterations, + scenarioThreshold); + + results.Add(result); + + if (stats.MaxMs > scenarioThreshold) + { + failures.Add($"{scenario.Id} exceeded threshold: {stats.MaxMs:F2} ms > {scenarioThreshold:F2} ms"); + } + + baseline.TryGetValue(result.Id, out var baselineEntry); + var report = new BenchmarkScenarioReport(result, baselineEntry, regressionLimit); + if (report.BuildRegressionFailureMessage() is { } regressionFailure) + { + failures.Add(regressionFailure); + } + + reports.Add(report); + } + + TablePrinter.Print(results); + + if (!string.IsNullOrWhiteSpace(options.CsvOutPath)) + { + CsvWriter.Write(options.CsvOutPath!, results); + } + + if (!string.IsNullOrWhiteSpace(options.JsonOutPath)) + { + var metadata = new BenchmarkJsonMetadata( + "1.0", + capturedAt, + options.Commit, + options.Environment); + + await BenchmarkJsonWriter.WriteAsync(options.JsonOutPath!, metadata, reports, CancellationToken.None).ConfigureAwait(false); + } + + if (!string.IsNullOrWhiteSpace(options.PrometheusOutPath)) + { + PrometheusWriter.Write(options.PrometheusOutPath!, reports); + } + + if (failures.Count > 0) + { + Console.Error.WriteLine(); + Console.Error.WriteLine("Performance threshold exceeded:"); + foreach (var failure in failures) + { + Console.Error.WriteLine($" - {failure}"); + } + + return 1; + } + + return 0; + } + catch (Exception ex) + { + Console.Error.WriteLine(ex.Message); + return 1; + } + } + + private static async Task> LoadBaselineDictionaryAsync(string? baselinePath, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(baselinePath)) + { + return new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + var resolved = Path.GetFullPath(baselinePath); + if (!File.Exists(resolved)) + { + return new Dictionary(StringComparer.OrdinalIgnoreCase); + } + + return await BaselineLoader.LoadAsync(resolved, cancellationToken).ConfigureAwait(false); + } + + private static string ResolveRepoRoot(string? overridePath, string configPath) + { + if (!string.IsNullOrWhiteSpace(overridePath)) + { + return Path.GetFullPath(overridePath); + } + + var configDirectory = Path.GetDirectoryName(configPath); + if (string.IsNullOrWhiteSpace(configDirectory)) + { + return Directory.GetCurrentDirectory(); + } + + return Path.GetFullPath(Path.Combine(configDirectory, "..", "..")); + } + + private static string ResolveScenarioRoot(string repoRoot, string relativeRoot) + { + if (string.IsNullOrWhiteSpace(relativeRoot)) + { + throw new InvalidOperationException("Scenario root is required."); + } + + var combined = Path.GetFullPath(Path.Combine(repoRoot, relativeRoot)); + if (!PathUtilities.IsWithinRoot(repoRoot, combined)) + { + throw new InvalidOperationException($"Scenario root '{relativeRoot}' escapes repository root '{repoRoot}'."); + } + + if (!Directory.Exists(combined)) + { + throw new DirectoryNotFoundException($"Scenario root '{combined}' does not exist."); + } + + return combined; + } + + private sealed record ProgramOptions( + string ConfigPath, + int? Iterations, + double? ThresholdMs, + string? CsvOutPath, + string? JsonOutPath, + string? PrometheusOutPath, + string? RepoRoot, + string? BaselinePath, + DateTimeOffset? CapturedAtUtc, + string? Commit, + string? Environment, + double? RegressionLimit) + { + public static ProgramOptions Parse(string[] args) + { + var configPath = DefaultConfigPath(); + var baselinePath = DefaultBaselinePath(); + int? iterations = null; + double? thresholdMs = null; + string? csvOut = null; + string? jsonOut = null; + string? promOut = null; + string? repoRoot = null; + DateTimeOffset? capturedAt = null; + string? commit = null; + string? environment = null; + double? regressionLimit = null; + + for (var index = 0; index < args.Length; index++) + { + var current = args[index]; + switch (current) + { + case "--config": + EnsureNext(args, index); + configPath = Path.GetFullPath(args[++index]); + break; + case "--iterations": + EnsureNext(args, index); + iterations = int.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--threshold-ms": + EnsureNext(args, index); + thresholdMs = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + case "--out": + case "--csv": + EnsureNext(args, index); + csvOut = args[++index]; + break; + case "--json": + EnsureNext(args, index); + jsonOut = args[++index]; + break; + case "--prom": + case "--prometheus": + EnsureNext(args, index); + promOut = args[++index]; + break; + case "--baseline": + EnsureNext(args, index); + baselinePath = args[++index]; + break; + case "--repo-root": + case "--samples": + EnsureNext(args, index); + repoRoot = args[++index]; + break; + case "--captured-at": + EnsureNext(args, index); + capturedAt = DateTimeOffset.Parse(args[++index], CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal); + break; + case "--commit": + EnsureNext(args, index); + commit = args[++index]; + break; + case "--environment": + EnsureNext(args, index); + environment = args[++index]; + break; + case "--regression-limit": + EnsureNext(args, index); + regressionLimit = double.Parse(args[++index], CultureInfo.InvariantCulture); + break; + default: + throw new ArgumentException($"Unknown argument: {current}", nameof(args)); + } + } + + return new ProgramOptions(configPath, iterations, thresholdMs, csvOut, jsonOut, promOut, repoRoot, baselinePath, capturedAt, commit, environment, regressionLimit); + } + + private static string DefaultConfigPath() + { + var binaryDir = AppContext.BaseDirectory; + var projectRoot = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var configDirectory = Path.GetFullPath(Path.Combine(projectRoot, "..")); + return Path.Combine(configDirectory, "config.json"); + } + + private static string? DefaultBaselinePath() + { + var binaryDir = AppContext.BaseDirectory; + var projectRoot = Path.GetFullPath(Path.Combine(binaryDir, "..", "..", "..")); + var benchRoot = Path.GetFullPath(Path.Combine(projectRoot, "..")); + var baselinePath = Path.Combine(benchRoot, "baseline.csv"); + return File.Exists(baselinePath) ? baselinePath : baselinePath; + } + + private static void EnsureNext(string[] args, int index) + { + if (index + 1 >= args.Length) + { + throw new ArgumentException("Missing value for argument.", nameof(args)); + } + } + } + + private sealed record ScenarioStatistics(double MeanMs, double P95Ms, double MaxMs) + { + public static ScenarioStatistics FromDurations(IReadOnlyList durations) + { + if (durations.Count == 0) + { + return new ScenarioStatistics(0, 0, 0); + } + + var sorted = durations.ToArray(); + Array.Sort(sorted); + + var total = 0d; + foreach (var value in durations) + { + total += value; + } + + var mean = total / durations.Count; + var p95 = Percentile(sorted, 95); + var max = sorted[^1]; + + return new ScenarioStatistics(mean, p95, max); + } + + private static double Percentile(IReadOnlyList sorted, double percentile) + { + if (sorted.Count == 0) + { + return 0; + } + + var rank = (percentile / 100d) * (sorted.Count - 1); + var lower = (int)Math.Floor(rank); + var upper = (int)Math.Ceiling(rank); + var weight = rank - lower; + + if (upper >= sorted.Count) + { + return sorted[lower]; + } + + return sorted[lower] + weight * (sorted[upper] - sorted[lower]); + } + } + + private static class TablePrinter + { + public static void Print(IEnumerable results) + { + Console.WriteLine("Scenario | Count | Mean(ms) | P95(ms) | Max(ms)"); + Console.WriteLine("---------------------------- | ----- | --------- | --------- | ----------"); + foreach (var row in results) + { + Console.WriteLine(string.Join(" | ", new[] + { + row.IdColumn, + row.SampleCountColumn, + row.MeanColumn, + row.P95Column, + row.MaxColumn + })); + } + } + } + + private static class CsvWriter + { + public static void Write(string path, IEnumerable results) + { + var resolvedPath = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolvedPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + using var stream = new FileStream(resolvedPath, FileMode.Create, FileAccess.Write, FileShare.None); + using var writer = new StreamWriter(stream); + writer.WriteLine("scenario,iterations,sample_count,mean_ms,p95_ms,max_ms"); + + foreach (var row in results) + { + writer.Write(row.Id); + writer.Write(','); + writer.Write(row.Iterations.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.SampleCount.ToString(CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MeanMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.P95Ms.ToString("F4", CultureInfo.InvariantCulture)); + writer.Write(','); + writer.Write(row.MaxMs.ToString("F4", CultureInfo.InvariantCulture)); + writer.WriteLine(); + } + } + } + + internal static class PathUtilities + { + public static bool IsWithinRoot(string root, string candidate) + { + var relative = Path.GetRelativePath(root, candidate); + if (string.IsNullOrEmpty(relative) || relative == ".") + { + return true; + } + + return !relative.StartsWith("..", StringComparison.Ordinal) && !Path.IsPathRooted(relative); + } + } +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkJsonWriter.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkJsonWriter.cs similarity index 97% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkJsonWriter.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkJsonWriter.cs index 183415b6..e6be774e 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkJsonWriter.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkJsonWriter.cs @@ -1,108 +1,108 @@ -using System.Text.Json; -using System.Text.Json.Serialization; -using StellaOps.Bench.ScannerAnalyzers.Baseline; - -namespace StellaOps.Bench.ScannerAnalyzers.Reporting; - -internal static class BenchmarkJsonWriter -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - WriteIndented = true, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - public static async Task WriteAsync( - string path, - BenchmarkJsonMetadata metadata, - IReadOnlyList reports, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(metadata); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var document = new BenchmarkJsonDocument( - metadata.SchemaVersion, - metadata.CapturedAtUtc, - metadata.Commit, - metadata.Environment, - reports.Select(CreateScenario).ToArray()); - - await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); - await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); - await stream.FlushAsync(cancellationToken).ConfigureAwait(false); - } - - private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) - { - var baseline = report.Baseline; - return new BenchmarkJsonScenario( - report.Result.Id, - report.Result.Label, - report.Result.Iterations, - report.Result.SampleCount, - report.Result.MeanMs, - report.Result.P95Ms, - report.Result.MaxMs, - report.Result.ThresholdMs, - baseline is null - ? null - : new BenchmarkJsonScenarioBaseline( - baseline.Iterations, - baseline.SampleCount, - baseline.MeanMs, - baseline.P95Ms, - baseline.MaxMs), - new BenchmarkJsonScenarioRegression( - report.MaxRegressionRatio, - report.MeanRegressionRatio, - report.RegressionLimit, - report.RegressionBreached)); - } - - private sealed record BenchmarkJsonDocument( - string SchemaVersion, - DateTimeOffset CapturedAt, - string? Commit, - string? Environment, - IReadOnlyList Scenarios); - - private sealed record BenchmarkJsonScenario( - string Id, - string Label, - int Iterations, - int SampleCount, - double MeanMs, - double P95Ms, - double MaxMs, - double ThresholdMs, - BenchmarkJsonScenarioBaseline? Baseline, - BenchmarkJsonScenarioRegression Regression); - - private sealed record BenchmarkJsonScenarioBaseline( - int Iterations, - int SampleCount, - double MeanMs, - double P95Ms, - double MaxMs); - - private sealed record BenchmarkJsonScenarioRegression( - double? MaxRatio, - double? MeanRatio, - double Limit, - bool Breached); -} - -internal sealed record BenchmarkJsonMetadata( - string SchemaVersion, - DateTimeOffset CapturedAtUtc, - string? Commit, - string? Environment); +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Bench.ScannerAnalyzers.Baseline; + +namespace StellaOps.Bench.ScannerAnalyzers.Reporting; + +internal static class BenchmarkJsonWriter +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + public static async Task WriteAsync( + string path, + BenchmarkJsonMetadata metadata, + IReadOnlyList reports, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(metadata); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var document = new BenchmarkJsonDocument( + metadata.SchemaVersion, + metadata.CapturedAtUtc, + metadata.Commit, + metadata.Environment, + reports.Select(CreateScenario).ToArray()); + + await using var stream = new FileStream(resolved, FileMode.Create, FileAccess.Write, FileShare.None); + await JsonSerializer.SerializeAsync(stream, document, SerializerOptions, cancellationToken).ConfigureAwait(false); + await stream.FlushAsync(cancellationToken).ConfigureAwait(false); + } + + private static BenchmarkJsonScenario CreateScenario(BenchmarkScenarioReport report) + { + var baseline = report.Baseline; + return new BenchmarkJsonScenario( + report.Result.Id, + report.Result.Label, + report.Result.Iterations, + report.Result.SampleCount, + report.Result.MeanMs, + report.Result.P95Ms, + report.Result.MaxMs, + report.Result.ThresholdMs, + baseline is null + ? null + : new BenchmarkJsonScenarioBaseline( + baseline.Iterations, + baseline.SampleCount, + baseline.MeanMs, + baseline.P95Ms, + baseline.MaxMs), + new BenchmarkJsonScenarioRegression( + report.MaxRegressionRatio, + report.MeanRegressionRatio, + report.RegressionLimit, + report.RegressionBreached)); + } + + private sealed record BenchmarkJsonDocument( + string SchemaVersion, + DateTimeOffset CapturedAt, + string? Commit, + string? Environment, + IReadOnlyList Scenarios); + + private sealed record BenchmarkJsonScenario( + string Id, + string Label, + int Iterations, + int SampleCount, + double MeanMs, + double P95Ms, + double MaxMs, + double ThresholdMs, + BenchmarkJsonScenarioBaseline? Baseline, + BenchmarkJsonScenarioRegression Regression); + + private sealed record BenchmarkJsonScenarioBaseline( + int Iterations, + int SampleCount, + double MeanMs, + double P95Ms, + double MaxMs); + + private sealed record BenchmarkJsonScenarioRegression( + double? MaxRatio, + double? MeanRatio, + double Limit, + bool Breached); +} + +internal sealed record BenchmarkJsonMetadata( + string SchemaVersion, + DateTimeOffset CapturedAtUtc, + string? Commit, + string? Environment); diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkScenarioReport.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkScenarioReport.cs similarity index 96% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkScenarioReport.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkScenarioReport.cs index 55ab4ba4..d0c1a978 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkScenarioReport.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/BenchmarkScenarioReport.cs @@ -1,55 +1,55 @@ -using StellaOps.Bench.ScannerAnalyzers.Baseline; - -namespace StellaOps.Bench.ScannerAnalyzers.Reporting; - -internal sealed class BenchmarkScenarioReport -{ - private const double RegressionLimitDefault = 1.2d; - - public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) - { - Result = result ?? throw new ArgumentNullException(nameof(result)); - Baseline = baseline; - RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : RegressionLimitDefault; - MaxRegressionRatio = CalculateRatio(result.MaxMs, baseline?.MaxMs); - MeanRegressionRatio = CalculateRatio(result.MeanMs, baseline?.MeanMs); - } - - public ScenarioResult Result { get; } - - public BaselineEntry? Baseline { get; } - - public double RegressionLimit { get; } - - public double? MaxRegressionRatio { get; } - - public double? MeanRegressionRatio { get; } - - public bool RegressionBreached => MaxRegressionRatio.HasValue && MaxRegressionRatio.Value >= RegressionLimit; - - public string? BuildRegressionFailureMessage() - { - if (!RegressionBreached || MaxRegressionRatio is null) - { - return null; - } - - var percentage = (MaxRegressionRatio.Value - 1d) * 100d; - return $"{Result.Id} exceeded regression budget: max {Result.MaxMs:F2} ms vs baseline {Baseline!.MaxMs:F2} ms (+{percentage:F1}%)"; - } - - private static double? CalculateRatio(double current, double? baseline) - { - if (!baseline.HasValue) - { - return null; - } - - if (baseline.Value <= 0d) - { - return null; - } - - return current / baseline.Value; - } -} +using StellaOps.Bench.ScannerAnalyzers.Baseline; + +namespace StellaOps.Bench.ScannerAnalyzers.Reporting; + +internal sealed class BenchmarkScenarioReport +{ + private const double RegressionLimitDefault = 1.2d; + + public BenchmarkScenarioReport(ScenarioResult result, BaselineEntry? baseline, double? regressionLimit = null) + { + Result = result ?? throw new ArgumentNullException(nameof(result)); + Baseline = baseline; + RegressionLimit = regressionLimit is { } limit && limit > 0 ? limit : RegressionLimitDefault; + MaxRegressionRatio = CalculateRatio(result.MaxMs, baseline?.MaxMs); + MeanRegressionRatio = CalculateRatio(result.MeanMs, baseline?.MeanMs); + } + + public ScenarioResult Result { get; } + + public BaselineEntry? Baseline { get; } + + public double RegressionLimit { get; } + + public double? MaxRegressionRatio { get; } + + public double? MeanRegressionRatio { get; } + + public bool RegressionBreached => MaxRegressionRatio.HasValue && MaxRegressionRatio.Value >= RegressionLimit; + + public string? BuildRegressionFailureMessage() + { + if (!RegressionBreached || MaxRegressionRatio is null) + { + return null; + } + + var percentage = (MaxRegressionRatio.Value - 1d) * 100d; + return $"{Result.Id} exceeded regression budget: max {Result.MaxMs:F2} ms vs baseline {Baseline!.MaxMs:F2} ms (+{percentage:F1}%)"; + } + + private static double? CalculateRatio(double current, double? baseline) + { + if (!baseline.HasValue) + { + return null; + } + + if (baseline.Value <= 0d) + { + return null; + } + + return current / baseline.Value; + } +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/PrometheusWriter.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/PrometheusWriter.cs similarity index 97% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/PrometheusWriter.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/PrometheusWriter.cs index 03697ff5..3073a9c0 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/PrometheusWriter.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/Reporting/PrometheusWriter.cs @@ -1,59 +1,59 @@ -using System.Globalization; -using System.Text; - -namespace StellaOps.Bench.ScannerAnalyzers.Reporting; - -internal static class PrometheusWriter -{ - public static void Write(string path, IReadOnlyList reports) - { - ArgumentException.ThrowIfNullOrWhiteSpace(path); - ArgumentNullException.ThrowIfNull(reports); - - var resolved = Path.GetFullPath(path); - var directory = Path.GetDirectoryName(resolved); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var builder = new StringBuilder(); - builder.AppendLine("# HELP scanner_analyzer_bench_duration_ms Analyzer benchmark duration metrics in milliseconds."); - builder.AppendLine("# TYPE scanner_analyzer_bench_duration_ms gauge"); - - foreach (var report in reports) - { - var scenarioLabel = Escape(report.Result.Id); - AppendMetric(builder, "scanner_analyzer_bench_mean_ms", scenarioLabel, report.Result.MeanMs); - AppendMetric(builder, "scanner_analyzer_bench_p95_ms", scenarioLabel, report.Result.P95Ms); - AppendMetric(builder, "scanner_analyzer_bench_max_ms", scenarioLabel, report.Result.MaxMs); - AppendMetric(builder, "scanner_analyzer_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs); - - if (report.Baseline is { } baseline) - { - AppendMetric(builder, "scanner_analyzer_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs); - AppendMetric(builder, "scanner_analyzer_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs); - } - - if (report.MaxRegressionRatio is { } ratio) - { - AppendMetric(builder, "scanner_analyzer_bench_regression_ratio", scenarioLabel, ratio); - AppendMetric(builder, "scanner_analyzer_bench_regression_limit", scenarioLabel, report.RegressionLimit); - AppendMetric(builder, "scanner_analyzer_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0); - } - } - - File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); - } - - private static void AppendMetric(StringBuilder builder, string metric, string scenarioLabel, double value) - { - builder.Append(metric); - builder.Append("{scenario=\""); - builder.Append(scenarioLabel); - builder.Append("\"} "); - builder.AppendLine(value.ToString("G17", CultureInfo.InvariantCulture)); - } - - private static string Escape(string value) => value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); -} +using System.Globalization; +using System.Text; + +namespace StellaOps.Bench.ScannerAnalyzers.Reporting; + +internal static class PrometheusWriter +{ + public static void Write(string path, IReadOnlyList reports) + { + ArgumentException.ThrowIfNullOrWhiteSpace(path); + ArgumentNullException.ThrowIfNull(reports); + + var resolved = Path.GetFullPath(path); + var directory = Path.GetDirectoryName(resolved); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var builder = new StringBuilder(); + builder.AppendLine("# HELP scanner_analyzer_bench_duration_ms Analyzer benchmark duration metrics in milliseconds."); + builder.AppendLine("# TYPE scanner_analyzer_bench_duration_ms gauge"); + + foreach (var report in reports) + { + var scenarioLabel = Escape(report.Result.Id); + AppendMetric(builder, "scanner_analyzer_bench_mean_ms", scenarioLabel, report.Result.MeanMs); + AppendMetric(builder, "scanner_analyzer_bench_p95_ms", scenarioLabel, report.Result.P95Ms); + AppendMetric(builder, "scanner_analyzer_bench_max_ms", scenarioLabel, report.Result.MaxMs); + AppendMetric(builder, "scanner_analyzer_bench_threshold_ms", scenarioLabel, report.Result.ThresholdMs); + + if (report.Baseline is { } baseline) + { + AppendMetric(builder, "scanner_analyzer_bench_baseline_max_ms", scenarioLabel, baseline.MaxMs); + AppendMetric(builder, "scanner_analyzer_bench_baseline_mean_ms", scenarioLabel, baseline.MeanMs); + } + + if (report.MaxRegressionRatio is { } ratio) + { + AppendMetric(builder, "scanner_analyzer_bench_regression_ratio", scenarioLabel, ratio); + AppendMetric(builder, "scanner_analyzer_bench_regression_limit", scenarioLabel, report.RegressionLimit); + AppendMetric(builder, "scanner_analyzer_bench_regression_breached", scenarioLabel, report.RegressionBreached ? 1 : 0); + } + } + + File.WriteAllText(resolved, builder.ToString(), Encoding.UTF8); + } + + private static void AppendMetric(StringBuilder builder, string metric, string scenarioLabel, double value) + { + builder.Append(metric); + builder.Append("{scenario=\""); + builder.Append(scenarioLabel); + builder.Append("\"} "); + builder.AppendLine(value.ToString("G17", CultureInfo.InvariantCulture)); + } + + private static string Escape(string value) => value.Replace("\\", "\\\\", StringComparison.Ordinal).Replace("\"", "\\\"", StringComparison.Ordinal); +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioResult.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioResult.cs similarity index 96% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioResult.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioResult.cs index 4632bb6c..015fe5d0 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioResult.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioResult.cs @@ -1,24 +1,24 @@ -using System.Globalization; - -namespace StellaOps.Bench.ScannerAnalyzers; - -internal sealed record ScenarioResult( - string Id, - string Label, - int SampleCount, - double MeanMs, - double P95Ms, - double MaxMs, - int Iterations, - double ThresholdMs) -{ - public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; - - public string SampleCountColumn => SampleCount.ToString(CultureInfo.InvariantCulture).PadLeft(5); - - public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); - - public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); - - public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); -} +using System.Globalization; + +namespace StellaOps.Bench.ScannerAnalyzers; + +internal sealed record ScenarioResult( + string Id, + string Label, + int SampleCount, + double MeanMs, + double P95Ms, + double MaxMs, + int Iterations, + double ThresholdMs) +{ + public string IdColumn => Id.Length <= 28 ? Id.PadRight(28) : Id[..28]; + + public string SampleCountColumn => SampleCount.ToString(CultureInfo.InvariantCulture).PadLeft(5); + + public string MeanColumn => MeanMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); + + public string P95Column => P95Ms.ToString("F2", CultureInfo.InvariantCulture).PadLeft(9); + + public string MaxColumn => MaxMs.ToString("F2", CultureInfo.InvariantCulture).PadLeft(10); +} diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioRunners.cs b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioRunners.cs similarity index 97% rename from src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioRunners.cs rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioRunners.cs index bbbb4a3b..1f59f4ba 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioRunners.cs +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/ScenarioRunners.cs @@ -1,285 +1,285 @@ -using System.Diagnostics; -using System.Text; -using System.Linq; -using System.Text.Json; -using System.Text.RegularExpressions; -using StellaOps.Scanner.Analyzers.Lang; -using StellaOps.Scanner.Analyzers.Lang.Go; -using StellaOps.Scanner.Analyzers.Lang.Java; -using StellaOps.Scanner.Analyzers.Lang.Node; -using StellaOps.Scanner.Analyzers.Lang.DotNet; -using StellaOps.Scanner.Analyzers.Lang.Python; - -namespace StellaOps.Bench.ScannerAnalyzers.Scenarios; - -internal interface IScenarioRunner -{ - Task ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken); -} - -internal sealed record ScenarioExecutionResult(double[] Durations, int SampleCount); - -internal static class ScenarioRunnerFactory -{ - public static IScenarioRunner Create(BenchmarkScenarioConfig scenario) - { - if (scenario.HasAnalyzers) - { - return new LanguageAnalyzerScenarioRunner(scenario.Analyzers!); - } - - if (string.IsNullOrWhiteSpace(scenario.Parser) || string.IsNullOrWhiteSpace(scenario.Matcher)) - { - throw new InvalidOperationException($"Scenario '{scenario.Id}' missing parser or matcher configuration."); - } - - return new MetadataWalkScenarioRunner(scenario.Parser, scenario.Matcher); - } -} - -internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner -{ - private readonly IReadOnlyList> _analyzerFactories; - - public LanguageAnalyzerScenarioRunner(IEnumerable analyzerIds) - { - if (analyzerIds is null) - { - throw new ArgumentNullException(nameof(analyzerIds)); - } - - _analyzerFactories = analyzerIds - .Where(static id => !string.IsNullOrWhiteSpace(id)) - .Select(CreateFactory) - .ToArray(); - - if (_analyzerFactories.Count == 0) - { - throw new InvalidOperationException("At least one analyzer id must be provided."); - } - } - - public async Task ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken) - { - if (iterations <= 0) - { - throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); - } - - var analyzers = _analyzerFactories.Select(factory => factory()).ToArray(); - var engine = new LanguageAnalyzerEngine(analyzers); - var durations = new double[iterations]; - var componentCount = -1; - - for (var i = 0; i < iterations; i++) - { - cancellationToken.ThrowIfCancellationRequested(); - - var context = new LanguageAnalyzerContext(rootPath, TimeProvider.System); - var stopwatch = Stopwatch.StartNew(); - var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false); - stopwatch.Stop(); - - durations[i] = stopwatch.Elapsed.TotalMilliseconds; - - var currentCount = result.Components.Count; - if (componentCount < 0) - { - componentCount = currentCount; - } - else if (componentCount != currentCount) - { - throw new InvalidOperationException($"Analyzer output count changed between iterations ({componentCount} vs {currentCount})."); - } - } - - if (componentCount < 0) - { - componentCount = 0; - } - - return new ScenarioExecutionResult(durations, componentCount); - } - - private static Func CreateFactory(string analyzerId) - { - var id = analyzerId.Trim().ToLowerInvariant(); - return id switch - { - "java" => static () => new JavaLanguageAnalyzer(), - "go" => static () => new GoLanguageAnalyzer(), - "node" => static () => new NodeLanguageAnalyzer(), - "dotnet" => static () => new DotNetLanguageAnalyzer(), - "python" => static () => new PythonLanguageAnalyzer(), - _ => throw new InvalidOperationException($"Unsupported analyzer '{analyzerId}'."), - }; - } -} - -internal sealed class MetadataWalkScenarioRunner : IScenarioRunner -{ - private readonly Regex _matcher; - private readonly string _parserKind; - - public MetadataWalkScenarioRunner(string parserKind, string globPattern) - { - _parserKind = parserKind?.Trim().ToLowerInvariant() ?? throw new ArgumentNullException(nameof(parserKind)); - _matcher = GlobToRegex(globPattern ?? throw new ArgumentNullException(nameof(globPattern))); - } - - public async Task ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken) - { - if (iterations <= 0) - { - throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); - } - - var durations = new double[iterations]; - var sampleCount = -1; - - for (var i = 0; i < iterations; i++) - { - cancellationToken.ThrowIfCancellationRequested(); - - var stopwatch = Stopwatch.StartNew(); - var files = EnumerateMatchingFiles(rootPath); - if (files.Count == 0) - { - throw new InvalidOperationException($"Parser '{_parserKind}' matched zero files under '{rootPath}'."); - } - - foreach (var file in files) - { - cancellationToken.ThrowIfCancellationRequested(); - await ParseAsync(file).ConfigureAwait(false); - } - - stopwatch.Stop(); - durations[i] = stopwatch.Elapsed.TotalMilliseconds; - - if (sampleCount < 0) - { - sampleCount = files.Count; - } - else if (sampleCount != files.Count) - { - throw new InvalidOperationException($"File count changed between iterations ({sampleCount} vs {files.Count})."); - } - } - - if (sampleCount < 0) - { - sampleCount = 0; - } - - return new ScenarioExecutionResult(durations, sampleCount); - } - - private async ValueTask ParseAsync(string filePath) - { - switch (_parserKind) - { - case "node": - { - using var stream = File.OpenRead(filePath); - using var document = await JsonDocument.ParseAsync(stream).ConfigureAwait(false); - - if (!document.RootElement.TryGetProperty("name", out var name) || name.ValueKind != JsonValueKind.String) - { - throw new InvalidOperationException($"package.json '{filePath}' missing name."); - } - - if (!document.RootElement.TryGetProperty("version", out var version) || version.ValueKind != JsonValueKind.String) - { - throw new InvalidOperationException($"package.json '{filePath}' missing version."); - } - } - break; - case "python": - { - var (name, version) = await ParsePythonMetadataAsync(filePath).ConfigureAwait(false); - if (string.IsNullOrEmpty(name) || string.IsNullOrEmpty(version)) - { - throw new InvalidOperationException($"METADATA '{filePath}' missing Name/Version."); - } - } - break; - default: - throw new InvalidOperationException($"Unknown parser '{_parserKind}'."); - } - } - - private static async Task<(string? Name, string? Version)> ParsePythonMetadataAsync(string filePath) - { - using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read | FileShare.Delete); - using var reader = new StreamReader(stream); - - string? name = null; - string? version = null; - - while (await reader.ReadLineAsync().ConfigureAwait(false) is { } line) - { - if (line.StartsWith("Name:", StringComparison.OrdinalIgnoreCase)) - { - name ??= line[5..].Trim(); - } - else if (line.StartsWith("Version:", StringComparison.OrdinalIgnoreCase)) - { - version ??= line[8..].Trim(); - } - - if (!string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(version)) - { - break; - } - } - - return (name, version); - } - - private IReadOnlyList EnumerateMatchingFiles(string rootPath) - { - var files = new List(); - var stack = new Stack(); - stack.Push(rootPath); - - while (stack.Count > 0) - { - var current = stack.Pop(); - foreach (var directory in Directory.EnumerateDirectories(current)) - { - stack.Push(directory); - } - - foreach (var file in Directory.EnumerateFiles(current)) - { - var relative = Path.GetRelativePath(rootPath, file).Replace('\\', '/'); - if (_matcher.IsMatch(relative)) - { - files.Add(file); - } - } - } - - return files; - } - - private static Regex GlobToRegex(string pattern) - { - if (string.IsNullOrWhiteSpace(pattern)) - { - throw new ArgumentException("Glob pattern is required.", nameof(pattern)); - } - - var normalized = pattern.Replace("\\", "/"); - normalized = normalized.Replace("**", "\u0001"); - normalized = normalized.Replace("*", "\u0002"); - - var escaped = Regex.Escape(normalized); - escaped = escaped.Replace("\u0001/", "(?:.*/)?", StringComparison.Ordinal); - escaped = escaped.Replace("\u0001", ".*", StringComparison.Ordinal); - escaped = escaped.Replace("\u0002", "[^/]*", StringComparison.Ordinal); - - return new Regex("^" + escaped + "$", RegexOptions.Compiled | RegexOptions.CultureInvariant); - } -} +using System.Diagnostics; +using System.Text; +using System.Linq; +using System.Text.Json; +using System.Text.RegularExpressions; +using StellaOps.Scanner.Analyzers.Lang; +using StellaOps.Scanner.Analyzers.Lang.Go; +using StellaOps.Scanner.Analyzers.Lang.Java; +using StellaOps.Scanner.Analyzers.Lang.Node; +using StellaOps.Scanner.Analyzers.Lang.DotNet; +using StellaOps.Scanner.Analyzers.Lang.Python; + +namespace StellaOps.Bench.ScannerAnalyzers.Scenarios; + +internal interface IScenarioRunner +{ + Task ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken); +} + +internal sealed record ScenarioExecutionResult(double[] Durations, int SampleCount); + +internal static class ScenarioRunnerFactory +{ + public static IScenarioRunner Create(BenchmarkScenarioConfig scenario) + { + if (scenario.HasAnalyzers) + { + return new LanguageAnalyzerScenarioRunner(scenario.Analyzers!); + } + + if (string.IsNullOrWhiteSpace(scenario.Parser) || string.IsNullOrWhiteSpace(scenario.Matcher)) + { + throw new InvalidOperationException($"Scenario '{scenario.Id}' missing parser or matcher configuration."); + } + + return new MetadataWalkScenarioRunner(scenario.Parser, scenario.Matcher); + } +} + +internal sealed class LanguageAnalyzerScenarioRunner : IScenarioRunner +{ + private readonly IReadOnlyList> _analyzerFactories; + + public LanguageAnalyzerScenarioRunner(IEnumerable analyzerIds) + { + if (analyzerIds is null) + { + throw new ArgumentNullException(nameof(analyzerIds)); + } + + _analyzerFactories = analyzerIds + .Where(static id => !string.IsNullOrWhiteSpace(id)) + .Select(CreateFactory) + .ToArray(); + + if (_analyzerFactories.Count == 0) + { + throw new InvalidOperationException("At least one analyzer id must be provided."); + } + } + + public async Task ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken) + { + if (iterations <= 0) + { + throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); + } + + var analyzers = _analyzerFactories.Select(factory => factory()).ToArray(); + var engine = new LanguageAnalyzerEngine(analyzers); + var durations = new double[iterations]; + var componentCount = -1; + + for (var i = 0; i < iterations; i++) + { + cancellationToken.ThrowIfCancellationRequested(); + + var context = new LanguageAnalyzerContext(rootPath, TimeProvider.System); + var stopwatch = Stopwatch.StartNew(); + var result = await engine.AnalyzeAsync(context, cancellationToken).ConfigureAwait(false); + stopwatch.Stop(); + + durations[i] = stopwatch.Elapsed.TotalMilliseconds; + + var currentCount = result.Components.Count; + if (componentCount < 0) + { + componentCount = currentCount; + } + else if (componentCount != currentCount) + { + throw new InvalidOperationException($"Analyzer output count changed between iterations ({componentCount} vs {currentCount})."); + } + } + + if (componentCount < 0) + { + componentCount = 0; + } + + return new ScenarioExecutionResult(durations, componentCount); + } + + private static Func CreateFactory(string analyzerId) + { + var id = analyzerId.Trim().ToLowerInvariant(); + return id switch + { + "java" => static () => new JavaLanguageAnalyzer(), + "go" => static () => new GoLanguageAnalyzer(), + "node" => static () => new NodeLanguageAnalyzer(), + "dotnet" => static () => new DotNetLanguageAnalyzer(), + "python" => static () => new PythonLanguageAnalyzer(), + _ => throw new InvalidOperationException($"Unsupported analyzer '{analyzerId}'."), + }; + } +} + +internal sealed class MetadataWalkScenarioRunner : IScenarioRunner +{ + private readonly Regex _matcher; + private readonly string _parserKind; + + public MetadataWalkScenarioRunner(string parserKind, string globPattern) + { + _parserKind = parserKind?.Trim().ToLowerInvariant() ?? throw new ArgumentNullException(nameof(parserKind)); + _matcher = GlobToRegex(globPattern ?? throw new ArgumentNullException(nameof(globPattern))); + } + + public async Task ExecuteAsync(string rootPath, int iterations, CancellationToken cancellationToken) + { + if (iterations <= 0) + { + throw new ArgumentOutOfRangeException(nameof(iterations), iterations, "Iterations must be positive."); + } + + var durations = new double[iterations]; + var sampleCount = -1; + + for (var i = 0; i < iterations; i++) + { + cancellationToken.ThrowIfCancellationRequested(); + + var stopwatch = Stopwatch.StartNew(); + var files = EnumerateMatchingFiles(rootPath); + if (files.Count == 0) + { + throw new InvalidOperationException($"Parser '{_parserKind}' matched zero files under '{rootPath}'."); + } + + foreach (var file in files) + { + cancellationToken.ThrowIfCancellationRequested(); + await ParseAsync(file).ConfigureAwait(false); + } + + stopwatch.Stop(); + durations[i] = stopwatch.Elapsed.TotalMilliseconds; + + if (sampleCount < 0) + { + sampleCount = files.Count; + } + else if (sampleCount != files.Count) + { + throw new InvalidOperationException($"File count changed between iterations ({sampleCount} vs {files.Count})."); + } + } + + if (sampleCount < 0) + { + sampleCount = 0; + } + + return new ScenarioExecutionResult(durations, sampleCount); + } + + private async ValueTask ParseAsync(string filePath) + { + switch (_parserKind) + { + case "node": + { + using var stream = File.OpenRead(filePath); + using var document = await JsonDocument.ParseAsync(stream).ConfigureAwait(false); + + if (!document.RootElement.TryGetProperty("name", out var name) || name.ValueKind != JsonValueKind.String) + { + throw new InvalidOperationException($"package.json '{filePath}' missing name."); + } + + if (!document.RootElement.TryGetProperty("version", out var version) || version.ValueKind != JsonValueKind.String) + { + throw new InvalidOperationException($"package.json '{filePath}' missing version."); + } + } + break; + case "python": + { + var (name, version) = await ParsePythonMetadataAsync(filePath).ConfigureAwait(false); + if (string.IsNullOrEmpty(name) || string.IsNullOrEmpty(version)) + { + throw new InvalidOperationException($"METADATA '{filePath}' missing Name/Version."); + } + } + break; + default: + throw new InvalidOperationException($"Unknown parser '{_parserKind}'."); + } + } + + private static async Task<(string? Name, string? Version)> ParsePythonMetadataAsync(string filePath) + { + using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read | FileShare.Delete); + using var reader = new StreamReader(stream); + + string? name = null; + string? version = null; + + while (await reader.ReadLineAsync().ConfigureAwait(false) is { } line) + { + if (line.StartsWith("Name:", StringComparison.OrdinalIgnoreCase)) + { + name ??= line[5..].Trim(); + } + else if (line.StartsWith("Version:", StringComparison.OrdinalIgnoreCase)) + { + version ??= line[8..].Trim(); + } + + if (!string.IsNullOrEmpty(name) && !string.IsNullOrEmpty(version)) + { + break; + } + } + + return (name, version); + } + + private IReadOnlyList EnumerateMatchingFiles(string rootPath) + { + var files = new List(); + var stack = new Stack(); + stack.Push(rootPath); + + while (stack.Count > 0) + { + var current = stack.Pop(); + foreach (var directory in Directory.EnumerateDirectories(current)) + { + stack.Push(directory); + } + + foreach (var file in Directory.EnumerateFiles(current)) + { + var relative = Path.GetRelativePath(rootPath, file).Replace('\\', '/'); + if (_matcher.IsMatch(relative)) + { + files.Add(file); + } + } + } + + return files; + } + + private static Regex GlobToRegex(string pattern) + { + if (string.IsNullOrWhiteSpace(pattern)) + { + throw new ArgumentException("Glob pattern is required.", nameof(pattern)); + } + + var normalized = pattern.Replace("\\", "/"); + normalized = normalized.Replace("**", "\u0001"); + normalized = normalized.Replace("*", "\u0002"); + + var escaped = Regex.Escape(normalized); + escaped = escaped.Replace("\u0001/", "(?:.*/)?", StringComparison.Ordinal); + escaped = escaped.Replace("\u0001", ".*", StringComparison.Ordinal); + escaped = escaped.Replace("\u0002", "[^/]*", StringComparison.Ordinal); + + return new Regex("^" + escaped + "$", RegexOptions.Compiled | RegexOptions.CultureInvariant); + } +} diff --git a/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj new file mode 100644 index 00000000..e6f053d1 --- /dev/null +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj @@ -0,0 +1,24 @@ + + + + Exe + net10.0 + enable + enable + preview + true + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/StellaOps.Bench/Scanner.Analyzers/baseline.csv b/src/Bench/StellaOps.Bench/Scanner.Analyzers/baseline.csv similarity index 98% rename from src/StellaOps.Bench/Scanner.Analyzers/baseline.csv rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/baseline.csv index 2611f6de..a75ee90e 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/baseline.csv +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/baseline.csv @@ -1,7 +1,7 @@ -scenario,iterations,sample_count,mean_ms,p95_ms,max_ms -node_monorepo_walk,5,4,6.0975,21.7421,26.8537 -java_demo_archive,5,1,6.2007,23.4837,29.1143 -go_buildinfo_fixture,5,2,6.1949,22.6851,27.9196 -dotnet_multirid_fixture,5,2,11.4884,37.7460,46.4850 -python_site_packages_scan,5,3,5.6420,18.2943,22.3739 -python_pip_cache_fixture,5,1,5.8598,13.2855,15.6256 +scenario,iterations,sample_count,mean_ms,p95_ms,max_ms +node_monorepo_walk,5,4,6.0975,21.7421,26.8537 +java_demo_archive,5,1,6.2007,23.4837,29.1143 +go_buildinfo_fixture,5,2,6.1949,22.6851,27.9196 +dotnet_multirid_fixture,5,2,11.4884,37.7460,46.4850 +python_site_packages_scan,5,3,5.6420,18.2943,22.3739 +python_pip_cache_fixture,5,1,5.8598,13.2855,15.6256 diff --git a/src/StellaOps.Bench/Scanner.Analyzers/config.json b/src/Bench/StellaOps.Bench/Scanner.Analyzers/config.json similarity index 77% rename from src/StellaOps.Bench/Scanner.Analyzers/config.json rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/config.json index 2b49a58d..456c3246 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/config.json +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/config.json @@ -21,7 +21,7 @@ { "id": "go_buildinfo_fixture", "label": "Go analyzer on build-info binary", - "root": "src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic", + "root": "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic", "analyzers": [ "go" ] @@ -29,7 +29,7 @@ { "id": "dotnet_multirid_fixture", "label": ".NET analyzer on multi-RID fixture", - "root": "src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi", + "root": "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi", "analyzers": [ "dotnet" ] @@ -45,7 +45,7 @@ { "id": "python_pip_cache_fixture", "label": "Python analyzer verifying RECORD hashes", - "root": "src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache", + "root": "src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache", "analyzers": [ "python" ] diff --git a/src/StellaOps.Bench/Scanner.Analyzers/lang/README.md b/src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/README.md similarity index 84% rename from src/StellaOps.Bench/Scanner.Analyzers/lang/README.md rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/README.md index 3ffaba33..72230516 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/lang/README.md +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/README.md @@ -16,13 +16,13 @@ Results should be committed as deterministic CSV/JSON outputs with accompanying - Scenario `go_buildinfo_fixture` captures our Go analyzer running against the basic build-info fixture. The Oct 23 baseline (`baseline.csv`) shows a mean duration of **35.03 ms** (p95 136.55 ms, max 170.16 ms) over 5 iterations on the current rig; earlier Oct 21 measurement recorded **4.02 ms** mean when the analyzer was profiled on the warm perf runner. - Comparative run against Syft v1.29.1 on the same fixture (captured 2025-10-21) reported a mean of **5.18 ms** (p95 18.64 ms, max 23.51 ms); raw measurements live in `go/syft-comparison-20251021.csv`. - Bench command (from repo root):\ - `dotnet run --project src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj -- --config src/StellaOps.Bench/Scanner.Analyzers/config.json --out src/StellaOps.Bench/Scanner.Analyzers/baseline.csv` + `dotnet run --project src/Bench/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj -- --config src/Bench/StellaOps.Bench/Scanner.Analyzers/config.json --out src/Bench/StellaOps.Bench/Scanner.Analyzers/baseline.csv` ## Sprint LA4 — .NET Analyzer Benchmark Notes (2025-10-23) - Scenario `dotnet_multirid_fixture` exercises the .NET analyzer against the multi-RID test fixture that merges two applications and four runtime identifiers. Latest baseline run (Release build, 5 iterations) records a mean duration of **29.19 ms** (p95 106.62 ms, max 132.30 ms) with a stable component count of 2. - Syft v1.29.1 scanning the same fixture (`syft scan dir:…`) averaged **1 546 ms** (p95 ≈2 100 ms, max ≈2 100 ms) while also reporting duplicate packages; raw numbers captured in `dotnet/syft-comparison-20251023.csv`. -- The new scenario is declared in `src/StellaOps.Bench/Scanner.Analyzers/config.json`; rerun the bench command above after rebuilding analyzers to refresh baselines and comparison data. +- The new scenario is declared in `src/Bench/StellaOps.Bench/Scanner.Analyzers/config.json`; rerun the bench command above after rebuilding analyzers to refresh baselines and comparison data. ## Sprint LA2 — Python Analyzer Benchmark Notes (2025-10-23) diff --git a/src/StellaOps.Bench/Scanner.Analyzers/lang/dotnet/syft-comparison-20251023.csv b/src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/dotnet/syft-comparison-20251023.csv similarity index 98% rename from src/StellaOps.Bench/Scanner.Analyzers/lang/dotnet/syft-comparison-20251023.csv rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/dotnet/syft-comparison-20251023.csv index 014278a4..fd2406f4 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/lang/dotnet/syft-comparison-20251023.csv +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/dotnet/syft-comparison-20251023.csv @@ -1,2 +1,2 @@ -scenario,iterations,sample_count,mean_ms,p95_ms,max_ms -syft_dotnet_multirid_fixture,5,2,1546.1609,2099.6870,2099.6870 +scenario,iterations,sample_count,mean_ms,p95_ms,max_ms +syft_dotnet_multirid_fixture,5,2,1546.1609,2099.6870,2099.6870 diff --git a/src/StellaOps.Bench/Scanner.Analyzers/lang/go/syft-comparison-20251021.csv b/src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/go/syft-comparison-20251021.csv similarity index 98% rename from src/StellaOps.Bench/Scanner.Analyzers/lang/go/syft-comparison-20251021.csv rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/go/syft-comparison-20251021.csv index 62bb4b2d..19a2e1c5 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/lang/go/syft-comparison-20251021.csv +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/go/syft-comparison-20251021.csv @@ -1,2 +1,2 @@ -scenario,iterations,sample_count,mean_ms,p95_ms,max_ms -syft_go_buildinfo_fixture,5,2,5.1840,18.6375,23.5120 +scenario,iterations,sample_count,mean_ms,p95_ms,max_ms +syft_go_buildinfo_fixture,5,2,5.1840,18.6375,23.5120 diff --git a/src/StellaOps.Bench/Scanner.Analyzers/lang/python/hash-throughput-20251023.csv b/src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/python/hash-throughput-20251023.csv similarity index 98% rename from src/StellaOps.Bench/Scanner.Analyzers/lang/python/hash-throughput-20251023.csv rename to src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/python/hash-throughput-20251023.csv index fd965df4..e846e0b8 100644 --- a/src/StellaOps.Bench/Scanner.Analyzers/lang/python/hash-throughput-20251023.csv +++ b/src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/python/hash-throughput-20251023.csv @@ -1,3 +1,3 @@ -scenario,iterations,sample_count,mean_ms,p95_ms,max_ms -python_site_packages_scan,5,3,5.6420,18.2943,22.3739 -python_pip_cache_fixture,5,1,5.8598,13.2855,15.6256 +scenario,iterations,sample_count,mean_ms,p95_ms,max_ms +python_site_packages_scan,5,3,5.6420,18.2943,22.3739 +python_pip_cache_fixture,5,1,5.8598,13.2855,15.6256 diff --git a/src/StellaOps.Bench/TASKS.md b/src/Bench/StellaOps.Bench/TASKS.md similarity index 98% rename from src/StellaOps.Bench/TASKS.md rename to src/Bench/StellaOps.Bench/TASKS.md index c25d043c..85109a35 100644 --- a/src/StellaOps.Bench/TASKS.md +++ b/src/Bench/StellaOps.Bench/TASKS.md @@ -2,7 +2,7 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| BENCH-SCANNER-10-001 | DONE | Bench Guild, Scanner Team | SCANNER-ANALYZERS-LANG-10-303 | Analyzer microbench harness (node_modules, site-packages) + baseline CSV. | Harness committed under `src/StellaOps.Bench/Scanner.Analyzers`; baseline CSV recorded; CI job publishes results. | +| BENCH-SCANNER-10-001 | DONE | Bench Guild, Scanner Team | SCANNER-ANALYZERS-LANG-10-303 | Analyzer microbench harness (node_modules, site-packages) + baseline CSV. | Harness committed under `src/Bench/StellaOps.Bench/Scanner.Analyzers`; baseline CSV recorded; CI job publishes results. | | BENCH-SCANNER-10-002 | DONE (2025-10-21) | Bench Guild, Language Analyzer Guild | SCANNER-ANALYZERS-LANG-10-301..309 | Wire real language analyzers into bench harness & refresh baselines post-implementation. | Harness executes analyzer assemblies end-to-end; updated baseline committed; CI trend doc linked. | | BENCH-IMPACT-16-001 | TODO | Bench Guild, Scheduler Team | SCHED-IMPACT-16-301 | ImpactIndex throughput bench (resolve 10k productKeys) + RAM profile. | Benchmark script ready; baseline metrics recorded; alert thresholds defined. | | BENCH-NOTIFY-15-001 | DONE (2025-10-26) | Bench Guild, Notify Team | NOTIFY-ENGINE-15-301 | Notify dispatch throughput bench (vary rule density) with results CSV. | Bench executed; results stored; regression alert configured. | diff --git a/src/Cartographer/StellaOps.Cartographer.sln b/src/Cartographer/StellaOps.Cartographer.sln new file mode 100644 index 00000000..af71f487 --- /dev/null +++ b/src/Cartographer/StellaOps.Cartographer.sln @@ -0,0 +1,179 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cartographer", "StellaOps.Cartographer\StellaOps.Cartographer.csproj", "{BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{A324A97D-60A2-4A5C-B882-11E08019EB80}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{90295E53-CAE8-4A4D-9B6E-7F58583836B4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{8559B69A-794A-4F22-A78C-1ED0B38D6B20}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{8C0747BF-4F65-4238-863F-36D1E2E87355}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Engine", "..\Policy\StellaOps.Policy.Engine\StellaOps.Policy.Engine.csproj", "{288F9D27-634E-45EC-8F89-4EAC68175113}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy", "..\Policy\__Libraries\StellaOps.Policy\StellaOps.Policy.csproj", "{2117B457-836C-4F74-A8EB-B5F910B54524}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{762B2F00-9917-4D77-8DF4-ECD8651A4C13}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{772D954B-0C2A-4377-B66F-329484EEB19F}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cartographer.Tests", "__Tests\StellaOps.Cartographer.Tests\StellaOps.Cartographer.Tests.csproj", "{0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Debug|x64.ActiveCfg = Debug|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Debug|x64.Build.0 = Debug|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Debug|x86.ActiveCfg = Debug|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Debug|x86.Build.0 = Debug|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Release|Any CPU.Build.0 = Release|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Release|x64.ActiveCfg = Release|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Release|x64.Build.0 = Release|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Release|x86.ActiveCfg = Release|Any CPU + {BD5B8D1C-C3C2-4ED5-B917-E5318CA3EF20}.Release|x86.Build.0 = Release|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Debug|x64.ActiveCfg = Debug|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Debug|x64.Build.0 = Debug|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Debug|x86.ActiveCfg = Debug|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Debug|x86.Build.0 = Debug|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Release|Any CPU.Build.0 = Release|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Release|x64.ActiveCfg = Release|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Release|x64.Build.0 = Release|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Release|x86.ActiveCfg = Release|Any CPU + {A324A97D-60A2-4A5C-B882-11E08019EB80}.Release|x86.Build.0 = Release|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Debug|x64.ActiveCfg = Debug|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Debug|x64.Build.0 = Debug|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Debug|x86.ActiveCfg = Debug|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Debug|x86.Build.0 = Debug|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Release|Any CPU.Build.0 = Release|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Release|x64.ActiveCfg = Release|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Release|x64.Build.0 = Release|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Release|x86.ActiveCfg = Release|Any CPU + {90295E53-CAE8-4A4D-9B6E-7F58583836B4}.Release|x86.Build.0 = Release|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Debug|x64.ActiveCfg = Debug|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Debug|x64.Build.0 = Debug|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Debug|x86.ActiveCfg = Debug|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Debug|x86.Build.0 = Debug|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Release|Any CPU.Build.0 = Release|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Release|x64.ActiveCfg = Release|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Release|x64.Build.0 = Release|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Release|x86.ActiveCfg = Release|Any CPU + {8559B69A-794A-4F22-A78C-1ED0B38D6B20}.Release|x86.Build.0 = Release|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Debug|x64.ActiveCfg = Debug|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Debug|x64.Build.0 = Debug|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Debug|x86.ActiveCfg = Debug|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Debug|x86.Build.0 = Debug|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Release|Any CPU.Build.0 = Release|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Release|x64.ActiveCfg = Release|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Release|x64.Build.0 = Release|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Release|x86.ActiveCfg = Release|Any CPU + {6E0F66B6-228D-41EE-B7FF-CC9D9AF19345}.Release|x86.Build.0 = Release|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Debug|x64.ActiveCfg = Debug|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Debug|x64.Build.0 = Debug|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Debug|x86.ActiveCfg = Debug|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Debug|x86.Build.0 = Debug|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Release|Any CPU.Build.0 = Release|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Release|x64.ActiveCfg = Release|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Release|x64.Build.0 = Release|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Release|x86.ActiveCfg = Release|Any CPU + {8C0747BF-4F65-4238-863F-36D1E2E87355}.Release|x86.Build.0 = Release|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Debug|Any CPU.Build.0 = Debug|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Debug|x64.ActiveCfg = Debug|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Debug|x64.Build.0 = Debug|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Debug|x86.ActiveCfg = Debug|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Debug|x86.Build.0 = Debug|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Release|Any CPU.ActiveCfg = Release|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Release|Any CPU.Build.0 = Release|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Release|x64.ActiveCfg = Release|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Release|x64.Build.0 = Release|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Release|x86.ActiveCfg = Release|Any CPU + {288F9D27-634E-45EC-8F89-4EAC68175113}.Release|x86.Build.0 = Release|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Debug|x64.ActiveCfg = Debug|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Debug|x64.Build.0 = Debug|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Debug|x86.ActiveCfg = Debug|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Debug|x86.Build.0 = Debug|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Release|Any CPU.Build.0 = Release|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Release|x64.ActiveCfg = Release|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Release|x64.Build.0 = Release|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Release|x86.ActiveCfg = Release|Any CPU + {2117B457-836C-4F74-A8EB-B5F910B54524}.Release|x86.Build.0 = Release|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Debug|Any CPU.Build.0 = Debug|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Debug|x64.ActiveCfg = Debug|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Debug|x64.Build.0 = Debug|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Debug|x86.ActiveCfg = Debug|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Debug|x86.Build.0 = Debug|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Release|Any CPU.ActiveCfg = Release|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Release|Any CPU.Build.0 = Release|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Release|x64.ActiveCfg = Release|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Release|x64.Build.0 = Release|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Release|x86.ActiveCfg = Release|Any CPU + {762B2F00-9917-4D77-8DF4-ECD8651A4C13}.Release|x86.Build.0 = Release|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Debug|x64.ActiveCfg = Debug|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Debug|x64.Build.0 = Debug|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Debug|x86.ActiveCfg = Debug|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Debug|x86.Build.0 = Debug|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Release|Any CPU.Build.0 = Release|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Release|x64.ActiveCfg = Release|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Release|x64.Build.0 = Release|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Release|x86.ActiveCfg = Release|Any CPU + {772D954B-0C2A-4377-B66F-329484EEB19F}.Release|x86.Build.0 = Release|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Debug|x64.ActiveCfg = Debug|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Debug|x64.Build.0 = Debug|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Debug|x86.ActiveCfg = Debug|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Debug|x86.Build.0 = Debug|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Release|Any CPU.Build.0 = Release|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Release|x64.ActiveCfg = Release|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Release|x64.Build.0 = Release|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Release|x86.ActiveCfg = Release|Any CPU + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {0AF757AA-BD1E-49A2-A7E9-C3F78DD09176} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Cartographer/AGENTS.md b/src/Cartographer/StellaOps.Cartographer/AGENTS.md similarity index 93% rename from src/StellaOps.Cartographer/AGENTS.md rename to src/Cartographer/StellaOps.Cartographer/AGENTS.md index 1c3cc373..b27b2019 100644 --- a/src/StellaOps.Cartographer/AGENTS.md +++ b/src/Cartographer/StellaOps.Cartographer/AGENTS.md @@ -1,18 +1,18 @@ -# StellaOps.Cartographer — Agent Charter - -## Mission -Build and operate the Cartographer service that materializes immutable SBOM property graphs, precomputes layout tiles, and hydrates policy/VEX overlays so other services (API, UI, CLI) can navigate and reason about dependency relationships with context. - -## Responsibilities -- Ingest normalized SBOM projections (CycloneDX/SPDX) and generate versioned graph snapshots with tenant-aware storage. -- Maintain overlay workers that merge Policy Engine effective findings and VEX metadata onto graph nodes/edges, including path relevance computation. -- Serve graph APIs for viewport tiles, paths, filters, exports, simulation overlays, and diffing. -- Coordinate with Policy Engine, Scheduler, Conseiller, Excitator, and Authority to keep overlays current, respect RBAC, and uphold determinism guarantees. -- Deliver observability (metrics/traces/logs) and performance benchmarks for large graphs (≥50k nodes). - -## Expectations -- Keep builds deterministic; snapshots are write-once and content-addressed. -- Tenancy and scope enforcement must match Authority policies (`graph:*`, `sbom:read`, `findings:read`). -- Update `TASKS.md`, `SPRINTS.md` when status changes. -- Provide fixtures and documentation so UI/CLI teams can simulate graphs offline. -- Authority integration derives scope names from `StellaOps.Auth.Abstractions.StellaOpsScopes`; avoid hard-coded `graph:*` literals. +# StellaOps.Cartographer — Agent Charter + +## Mission +Build and operate the Cartographer service that materializes immutable SBOM property graphs, precomputes layout tiles, and hydrates policy/VEX overlays so other services (API, UI, CLI) can navigate and reason about dependency relationships with context. + +## Responsibilities +- Ingest normalized SBOM projections (CycloneDX/SPDX) and generate versioned graph snapshots with tenant-aware storage. +- Maintain overlay workers that merge Policy Engine effective findings and VEX metadata onto graph nodes/edges, including path relevance computation. +- Serve graph APIs for viewport tiles, paths, filters, exports, simulation overlays, and diffing. +- Coordinate with Policy Engine, Scheduler, Conseiller, Excitator, and Authority to keep overlays current, respect RBAC, and uphold determinism guarantees. +- Deliver observability (metrics/traces/logs) and performance benchmarks for large graphs (≥50k nodes). + +## Expectations +- Keep builds deterministic; snapshots are write-once and content-addressed. +- Tenancy and scope enforcement must match Authority policies (`graph:*`, `sbom:read`, `findings:read`). +- Update `TASKS.md`, `../../docs/implplan/SPRINTS.md` when status changes. +- Provide fixtures and documentation so UI/CLI teams can simulate graphs offline. +- Authority integration derives scope names from `StellaOps.Auth.Abstractions.StellaOpsScopes`; avoid hard-coded `graph:*` literals. diff --git a/src/StellaOps.Cartographer/Options/CartographerAuthorityOptions.cs b/src/Cartographer/StellaOps.Cartographer/Options/CartographerAuthorityOptions.cs similarity index 97% rename from src/StellaOps.Cartographer/Options/CartographerAuthorityOptions.cs rename to src/Cartographer/StellaOps.Cartographer/Options/CartographerAuthorityOptions.cs index c0bbdc92..0e42c1df 100644 --- a/src/StellaOps.Cartographer/Options/CartographerAuthorityOptions.cs +++ b/src/Cartographer/StellaOps.Cartographer/Options/CartographerAuthorityOptions.cs @@ -1,101 +1,101 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Cartographer.Options; - -/// -/// Configuration controlling Authority-backed authentication for the Cartographer service. -/// -public sealed class CartographerAuthorityOptions -{ - /// - /// Enables Authority-backed authentication for Cartographer endpoints. - /// - public bool Enabled { get; set; } - - /// - /// Allows anonymous access when Authority integration is enabled (development only). - /// - public bool AllowAnonymousFallback { get; set; } - - /// - /// Authority issuer URL exposed via OpenID discovery. - /// - public string Issuer { get; set; } = string.Empty; - - /// - /// Whether HTTPS metadata is required when fetching Authority discovery documents. - /// - public bool RequireHttpsMetadata { get; set; } = true; - - /// - /// Optional explicit metadata endpoint for Authority discovery. - /// - public string? MetadataAddress { get; set; } - - /// - /// Timeout (seconds) applied to Authority back-channel HTTP calls. - /// - public int BackchannelTimeoutSeconds { get; set; } = 30; - - /// - /// Allowed token clock skew (seconds) when validating Authority-issued tokens. - /// - public int TokenClockSkewSeconds { get; set; } = 60; - - /// - /// Accepted audiences for Cartographer access tokens. - /// - public IList Audiences { get; } = new List(); - - /// - /// Scopes required for Cartographer operations. - /// - public IList RequiredScopes { get; } = new List(); - - /// - /// Tenants permitted to access Cartographer resources. - /// - public IList RequiredTenants { get; } = new List(); - - /// - /// Networks allowed to bypass authentication enforcement. - /// - public IList BypassNetworks { get; } = new List(); - - /// - /// Validates configured values and throws on failure. - /// - public void Validate() - { - if (!Enabled) - { - return; - } - - if (string.IsNullOrWhiteSpace(Issuer)) - { - throw new InvalidOperationException("Cartographer Authority issuer must be configured when Authority integration is enabled."); - } - - if (!Uri.TryCreate(Issuer.Trim(), UriKind.Absolute, out var issuerUri)) - { - throw new InvalidOperationException("Cartographer Authority issuer must be an absolute URI."); - } - - if (RequireHttpsMetadata && !issuerUri.IsLoopback && !string.Equals(issuerUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Cartographer Authority issuer must use HTTPS unless running on loopback."); - } - - if (BackchannelTimeoutSeconds <= 0) - { - throw new InvalidOperationException("Cartographer Authority back-channel timeout must be greater than zero seconds."); - } - - if (TokenClockSkewSeconds < 0 || TokenClockSkewSeconds > 300) - { - throw new InvalidOperationException("Cartographer Authority token clock skew must be between 0 and 300 seconds."); - } - } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Cartographer.Options; + +/// +/// Configuration controlling Authority-backed authentication for the Cartographer service. +/// +public sealed class CartographerAuthorityOptions +{ + /// + /// Enables Authority-backed authentication for Cartographer endpoints. + /// + public bool Enabled { get; set; } + + /// + /// Allows anonymous access when Authority integration is enabled (development only). + /// + public bool AllowAnonymousFallback { get; set; } + + /// + /// Authority issuer URL exposed via OpenID discovery. + /// + public string Issuer { get; set; } = string.Empty; + + /// + /// Whether HTTPS metadata is required when fetching Authority discovery documents. + /// + public bool RequireHttpsMetadata { get; set; } = true; + + /// + /// Optional explicit metadata endpoint for Authority discovery. + /// + public string? MetadataAddress { get; set; } + + /// + /// Timeout (seconds) applied to Authority back-channel HTTP calls. + /// + public int BackchannelTimeoutSeconds { get; set; } = 30; + + /// + /// Allowed token clock skew (seconds) when validating Authority-issued tokens. + /// + public int TokenClockSkewSeconds { get; set; } = 60; + + /// + /// Accepted audiences for Cartographer access tokens. + /// + public IList Audiences { get; } = new List(); + + /// + /// Scopes required for Cartographer operations. + /// + public IList RequiredScopes { get; } = new List(); + + /// + /// Tenants permitted to access Cartographer resources. + /// + public IList RequiredTenants { get; } = new List(); + + /// + /// Networks allowed to bypass authentication enforcement. + /// + public IList BypassNetworks { get; } = new List(); + + /// + /// Validates configured values and throws on failure. + /// + public void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(Issuer)) + { + throw new InvalidOperationException("Cartographer Authority issuer must be configured when Authority integration is enabled."); + } + + if (!Uri.TryCreate(Issuer.Trim(), UriKind.Absolute, out var issuerUri)) + { + throw new InvalidOperationException("Cartographer Authority issuer must be an absolute URI."); + } + + if (RequireHttpsMetadata && !issuerUri.IsLoopback && !string.Equals(issuerUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Cartographer Authority issuer must use HTTPS unless running on loopback."); + } + + if (BackchannelTimeoutSeconds <= 0) + { + throw new InvalidOperationException("Cartographer Authority back-channel timeout must be greater than zero seconds."); + } + + if (TokenClockSkewSeconds < 0 || TokenClockSkewSeconds > 300) + { + throw new InvalidOperationException("Cartographer Authority token clock skew must be between 0 and 300 seconds."); + } + } +} diff --git a/src/StellaOps.Cartographer/Options/CartographerAuthorityOptionsConfigurator.cs b/src/Cartographer/StellaOps.Cartographer/Options/CartographerAuthorityOptionsConfigurator.cs similarity index 96% rename from src/StellaOps.Cartographer/Options/CartographerAuthorityOptionsConfigurator.cs rename to src/Cartographer/StellaOps.Cartographer/Options/CartographerAuthorityOptionsConfigurator.cs index f716ac6e..be83a84e 100644 --- a/src/StellaOps.Cartographer/Options/CartographerAuthorityOptionsConfigurator.cs +++ b/src/Cartographer/StellaOps.Cartographer/Options/CartographerAuthorityOptionsConfigurator.cs @@ -1,37 +1,37 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Cartographer.Options; - -/// -/// Applies Cartographer-specific defaults to . -/// -internal static class CartographerAuthorityOptionsConfigurator -{ - /// - /// Ensures required scopes are present and duplicates are removed case-insensitively. - /// - /// Target options. - public static void ApplyDefaults(CartographerAuthorityOptions options) - { - ArgumentNullException.ThrowIfNull(options); - - EnsureScope(options.RequiredScopes, StellaOpsScopes.GraphRead); - EnsureScope(options.RequiredScopes, StellaOpsScopes.GraphWrite); - } - - private static void EnsureScope(ICollection scopes, string scope) - { - ArgumentNullException.ThrowIfNull(scopes); - ArgumentException.ThrowIfNullOrEmpty(scope); - - if (scopes.Any(existing => string.Equals(existing, scope, StringComparison.OrdinalIgnoreCase))) - { - return; - } - - scopes.Add(scope); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Cartographer.Options; + +/// +/// Applies Cartographer-specific defaults to . +/// +internal static class CartographerAuthorityOptionsConfigurator +{ + /// + /// Ensures required scopes are present and duplicates are removed case-insensitively. + /// + /// Target options. + public static void ApplyDefaults(CartographerAuthorityOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + EnsureScope(options.RequiredScopes, StellaOpsScopes.GraphRead); + EnsureScope(options.RequiredScopes, StellaOpsScopes.GraphWrite); + } + + private static void EnsureScope(ICollection scopes, string scope) + { + ArgumentNullException.ThrowIfNull(scopes); + ArgumentException.ThrowIfNullOrEmpty(scope); + + if (scopes.Any(existing => string.Equals(existing, scope, StringComparison.OrdinalIgnoreCase))) + { + return; + } + + scopes.Add(scope); + } +} diff --git a/src/StellaOps.Cartographer/Program.cs b/src/Cartographer/StellaOps.Cartographer/Program.cs similarity index 97% rename from src/StellaOps.Cartographer/Program.cs rename to src/Cartographer/StellaOps.Cartographer/Program.cs index a64704fd..bd8185bc 100644 --- a/src/StellaOps.Cartographer/Program.cs +++ b/src/Cartographer/StellaOps.Cartographer/Program.cs @@ -1,39 +1,39 @@ -using StellaOps.Cartographer.Options; - -var builder = WebApplication.CreateBuilder(args); - -builder.Configuration - .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables("CARTOGRAPHER_"); - -builder.Services.AddOptions(); -builder.Services.AddLogging(); - -var authoritySection = builder.Configuration.GetSection("Cartographer:Authority"); -var authorityOptions = new CartographerAuthorityOptions(); -authoritySection.Bind(authorityOptions); -CartographerAuthorityOptionsConfigurator.ApplyDefaults(authorityOptions); -authorityOptions.Validate(); - -builder.Services.AddSingleton(authorityOptions); -builder.Services.AddOptions() - .Bind(authoritySection) - .PostConfigure(CartographerAuthorityOptionsConfigurator.ApplyDefaults); - -// TODO: register Cartographer graph builders, overlay workers, and Authority client once implementations land. - -var app = builder.Build(); - -if (!authorityOptions.Enabled) -{ - app.Logger.LogWarning("Cartographer Authority authentication is disabled; enable it before production deployments."); -} -else if (authorityOptions.AllowAnonymousFallback) -{ - app.Logger.LogWarning("Cartographer Authority allows anonymous fallback; disable fallback before production rollout."); -} - -app.MapGet("/healthz", () => Results.Ok(new { status = "ok" })); -app.MapGet("/readyz", () => Results.Ok(new { status = "warming" })); - -app.Run(); +using StellaOps.Cartographer.Options; + +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration + .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables("CARTOGRAPHER_"); + +builder.Services.AddOptions(); +builder.Services.AddLogging(); + +var authoritySection = builder.Configuration.GetSection("Cartographer:Authority"); +var authorityOptions = new CartographerAuthorityOptions(); +authoritySection.Bind(authorityOptions); +CartographerAuthorityOptionsConfigurator.ApplyDefaults(authorityOptions); +authorityOptions.Validate(); + +builder.Services.AddSingleton(authorityOptions); +builder.Services.AddOptions() + .Bind(authoritySection) + .PostConfigure(CartographerAuthorityOptionsConfigurator.ApplyDefaults); + +// TODO: register Cartographer graph builders, overlay workers, and Authority client once implementations land. + +var app = builder.Build(); + +if (!authorityOptions.Enabled) +{ + app.Logger.LogWarning("Cartographer Authority authentication is disabled; enable it before production deployments."); +} +else if (authorityOptions.AllowAnonymousFallback) +{ + app.Logger.LogWarning("Cartographer Authority allows anonymous fallback; disable fallback before production rollout."); +} + +app.MapGet("/healthz", () => Results.Ok(new { status = "ok" })); +app.MapGet("/readyz", () => Results.Ok(new { status = "warming" })); + +app.Run(); diff --git a/src/StellaOps.Cartographer/Properties/AssemblyInfo.cs b/src/Cartographer/StellaOps.Cartographer/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Cartographer/Properties/AssemblyInfo.cs rename to src/Cartographer/StellaOps.Cartographer/Properties/AssemblyInfo.cs index 5ae6a884..a9c3b335 100644 --- a/src/StellaOps.Cartographer/Properties/AssemblyInfo.cs +++ b/src/Cartographer/StellaOps.Cartographer/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Cartographer.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Cartographer.Tests")] diff --git a/src/Cartographer/StellaOps.Cartographer/StellaOps.Cartographer.csproj b/src/Cartographer/StellaOps.Cartographer/StellaOps.Cartographer.csproj new file mode 100644 index 00000000..cf8633fa --- /dev/null +++ b/src/Cartographer/StellaOps.Cartographer/StellaOps.Cartographer.csproj @@ -0,0 +1,18 @@ + + + + net10.0 + enable + enable + preview + true + InProcess + + + + + + + + + \ No newline at end of file diff --git a/src/StellaOps.Cartographer/TASKS.md b/src/Cartographer/StellaOps.Cartographer/TASKS.md similarity index 99% rename from src/StellaOps.Cartographer/TASKS.md rename to src/Cartographer/StellaOps.Cartographer/TASKS.md index b7e55a8f..3e3f0301 100644 --- a/src/StellaOps.Cartographer/TASKS.md +++ b/src/Cartographer/StellaOps.Cartographer/TASKS.md @@ -1,6 +1,6 @@ -# Cartographer Task Board — Epic 3: Graph Explorer v1 -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| CARTO-GRAPH-21-010 | DONE (2025-10-27) | Cartographer Guild | AUTH-GRAPH-21-001 | Replace hard-coded `graph:*` scope strings in Cartographer services/clients with `StellaOpsScopes` constants; document new dependency. | All scope checks reference `StellaOpsScopes`; documentation updated; unit tests adjusted if needed. | - -> 2025-10-26 — Note: awaiting Cartographer service bootstrap. Keep this task open until Cartographer routes exist so we can swap to `StellaOpsScopes` immediately. +# Cartographer Task Board — Epic 3: Graph Explorer v1 +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| CARTO-GRAPH-21-010 | DONE (2025-10-27) | Cartographer Guild | AUTH-GRAPH-21-001 | Replace hard-coded `graph:*` scope strings in Cartographer services/clients with `StellaOpsScopes` constants; document new dependency. | All scope checks reference `StellaOpsScopes`; documentation updated; unit tests adjusted if needed. | + +> 2025-10-26 — Note: awaiting Cartographer service bootstrap. Keep this task open until Cartographer routes exist so we can swap to `StellaOpsScopes` immediately. diff --git a/src/StellaOps.Cartographer.Tests/Options/CartographerAuthorityOptionsConfiguratorTests.cs b/src/Cartographer/__Tests/StellaOps.Cartographer.Tests/Options/CartographerAuthorityOptionsConfiguratorTests.cs similarity index 96% rename from src/StellaOps.Cartographer.Tests/Options/CartographerAuthorityOptionsConfiguratorTests.cs rename to src/Cartographer/__Tests/StellaOps.Cartographer.Tests/Options/CartographerAuthorityOptionsConfiguratorTests.cs index f9fc2af5..64cf0e58 100644 --- a/src/StellaOps.Cartographer.Tests/Options/CartographerAuthorityOptionsConfiguratorTests.cs +++ b/src/Cartographer/__Tests/StellaOps.Cartographer.Tests/Options/CartographerAuthorityOptionsConfiguratorTests.cs @@ -1,51 +1,51 @@ -using StellaOps.Auth.Abstractions; -using StellaOps.Cartographer.Options; -using Xunit; - -namespace StellaOps.Cartographer.Tests.Options; - -public class CartographerAuthorityOptionsConfiguratorTests -{ - [Fact] - public void ApplyDefaults_AddsGraphScopes() - { - var options = new CartographerAuthorityOptions(); - - CartographerAuthorityOptionsConfigurator.ApplyDefaults(options); - - Assert.Contains(StellaOpsScopes.GraphRead, options.RequiredScopes); - Assert.Contains(StellaOpsScopes.GraphWrite, options.RequiredScopes); - } - - [Fact] - public void ApplyDefaults_DoesNotDuplicateScopes() - { - var options = new CartographerAuthorityOptions(); - options.RequiredScopes.Add("GRAPH:READ"); - options.RequiredScopes.Add(StellaOpsScopes.GraphWrite); - - CartographerAuthorityOptionsConfigurator.ApplyDefaults(options); - - Assert.Equal(2, options.RequiredScopes.Count); - } - - [Fact] - public void Validate_AllowsDisabledConfiguration() - { - var options = new CartographerAuthorityOptions(); - - options.Validate(); // should not throw when disabled - } - - [Fact] - public void Validate_ThrowsForInvalidIssuer() - { - var options = new CartographerAuthorityOptions - { - Enabled = true, - Issuer = "invalid" - }; - - Assert.Throws(() => options.Validate()); - } -} +using StellaOps.Auth.Abstractions; +using StellaOps.Cartographer.Options; +using Xunit; + +namespace StellaOps.Cartographer.Tests.Options; + +public class CartographerAuthorityOptionsConfiguratorTests +{ + [Fact] + public void ApplyDefaults_AddsGraphScopes() + { + var options = new CartographerAuthorityOptions(); + + CartographerAuthorityOptionsConfigurator.ApplyDefaults(options); + + Assert.Contains(StellaOpsScopes.GraphRead, options.RequiredScopes); + Assert.Contains(StellaOpsScopes.GraphWrite, options.RequiredScopes); + } + + [Fact] + public void ApplyDefaults_DoesNotDuplicateScopes() + { + var options = new CartographerAuthorityOptions(); + options.RequiredScopes.Add("GRAPH:READ"); + options.RequiredScopes.Add(StellaOpsScopes.GraphWrite); + + CartographerAuthorityOptionsConfigurator.ApplyDefaults(options); + + Assert.Equal(2, options.RequiredScopes.Count); + } + + [Fact] + public void Validate_AllowsDisabledConfiguration() + { + var options = new CartographerAuthorityOptions(); + + options.Validate(); // should not throw when disabled + } + + [Fact] + public void Validate_ThrowsForInvalidIssuer() + { + var options = new CartographerAuthorityOptions + { + Enabled = true, + Issuer = "invalid" + }; + + Assert.Throws(() => options.Validate()); + } +} diff --git a/src/StellaOps.Cartographer.Tests/StellaOps.Cartographer.Tests.csproj b/src/Cartographer/__Tests/StellaOps.Cartographer.Tests/StellaOps.Cartographer.Tests.csproj similarity index 79% rename from src/StellaOps.Cartographer.Tests/StellaOps.Cartographer.Tests.csproj rename to src/Cartographer/__Tests/StellaOps.Cartographer.Tests/StellaOps.Cartographer.Tests.csproj index eb0e3081..37fdb8d1 100644 --- a/src/StellaOps.Cartographer.Tests/StellaOps.Cartographer.Tests.csproj +++ b/src/Cartographer/__Tests/StellaOps.Cartographer.Tests/StellaOps.Cartographer.Tests.csproj @@ -1,3 +1,4 @@ + net10.0 @@ -12,6 +13,6 @@ - + - + \ No newline at end of file diff --git a/src/Cli/StellaOps.Cli.sln b/src/Cli/StellaOps.Cli.sln new file mode 100644 index 00000000..9e61151f --- /dev/null +++ b/src/Cli/StellaOps.Cli.sln @@ -0,0 +1,169 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli", "StellaOps.Cli\StellaOps.Cli.csproj", "{9258A5D3-2567-4BBA-8F0B-D018E431B7F8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{2846557F-1917-4A55-9EDB-EB28398D22EB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{39C8D95B-08FB-486A-9A0B-1559D70E8689}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{D42AC6A1-BB0E-48AD-A609-5672B6B888A2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{77853EC3-FED1-490B-B680-E9A1BDDC0D7C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{376B4717-AD51-4775-9B25-2C573F1E6215}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli.Plugins.NonCore", "__Libraries\StellaOps.Cli.Plugins.NonCore\StellaOps.Cli.Plugins.NonCore.csproj", "{30E528B3-0EB1-4A89-8130-F69D3C0F1962}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli.Tests", "__Tests\StellaOps.Cli.Tests\StellaOps.Cli.Tests.csproj", "{B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Debug|x64.ActiveCfg = Debug|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Debug|x64.Build.0 = Debug|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Debug|x86.ActiveCfg = Debug|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Debug|x86.Build.0 = Debug|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Release|Any CPU.Build.0 = Release|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Release|x64.ActiveCfg = Release|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Release|x64.Build.0 = Release|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Release|x86.ActiveCfg = Release|Any CPU + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8}.Release|x86.Build.0 = Release|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Debug|x64.ActiveCfg = Debug|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Debug|x64.Build.0 = Debug|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Debug|x86.ActiveCfg = Debug|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Debug|x86.Build.0 = Debug|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Release|Any CPU.Build.0 = Release|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Release|x64.ActiveCfg = Release|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Release|x64.Build.0 = Release|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Release|x86.ActiveCfg = Release|Any CPU + {2846557F-1917-4A55-9EDB-EB28398D22EB}.Release|x86.Build.0 = Release|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Debug|x64.ActiveCfg = Debug|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Debug|x64.Build.0 = Debug|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Debug|x86.ActiveCfg = Debug|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Debug|x86.Build.0 = Debug|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Release|Any CPU.Build.0 = Release|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Release|x64.ActiveCfg = Release|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Release|x64.Build.0 = Release|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Release|x86.ActiveCfg = Release|Any CPU + {16D7BF0B-AEFE-4D3D-AE3F-88F96CD483AB}.Release|x86.Build.0 = Release|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Debug|Any CPU.Build.0 = Debug|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Debug|x64.ActiveCfg = Debug|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Debug|x64.Build.0 = Debug|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Debug|x86.ActiveCfg = Debug|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Debug|x86.Build.0 = Debug|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Release|Any CPU.ActiveCfg = Release|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Release|Any CPU.Build.0 = Release|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Release|x64.ActiveCfg = Release|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Release|x64.Build.0 = Release|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Release|x86.ActiveCfg = Release|Any CPU + {39C8D95B-08FB-486A-9A0B-1559D70E8689}.Release|x86.Build.0 = Release|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Debug|x64.ActiveCfg = Debug|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Debug|x64.Build.0 = Debug|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Debug|x86.ActiveCfg = Debug|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Debug|x86.Build.0 = Debug|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Release|Any CPU.Build.0 = Release|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Release|x64.ActiveCfg = Release|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Release|x64.Build.0 = Release|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Release|x86.ActiveCfg = Release|Any CPU + {D42AC6A1-BB0E-48AD-A609-5672B6B888A2}.Release|x86.Build.0 = Release|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Debug|x64.ActiveCfg = Debug|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Debug|x64.Build.0 = Debug|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Debug|x86.ActiveCfg = Debug|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Debug|x86.Build.0 = Debug|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Release|Any CPU.Build.0 = Release|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Release|x64.ActiveCfg = Release|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Release|x64.Build.0 = Release|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Release|x86.ActiveCfg = Release|Any CPU + {77853EC3-FED1-490B-B680-E9A1BDDC0D7C}.Release|x86.Build.0 = Release|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Debug|Any CPU.Build.0 = Debug|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Debug|x64.ActiveCfg = Debug|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Debug|x64.Build.0 = Debug|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Debug|x86.ActiveCfg = Debug|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Debug|x86.Build.0 = Debug|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Release|Any CPU.ActiveCfg = Release|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Release|Any CPU.Build.0 = Release|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Release|x64.ActiveCfg = Release|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Release|x64.Build.0 = Release|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Release|x86.ActiveCfg = Release|Any CPU + {376B4717-AD51-4775-9B25-2C573F1E6215}.Release|x86.Build.0 = Release|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Debug|x64.ActiveCfg = Debug|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Debug|x64.Build.0 = Debug|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Debug|x86.ActiveCfg = Debug|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Debug|x86.Build.0 = Debug|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Release|Any CPU.Build.0 = Release|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Release|x64.ActiveCfg = Release|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Release|x64.Build.0 = Release|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Release|x86.ActiveCfg = Release|Any CPU + {429E5D21-7ABE-4A19-B3C3-BBEF97337ADA}.Release|x86.Build.0 = Release|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Debug|Any CPU.Build.0 = Debug|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Debug|x64.ActiveCfg = Debug|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Debug|x64.Build.0 = Debug|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Debug|x86.ActiveCfg = Debug|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Debug|x86.Build.0 = Debug|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Release|Any CPU.ActiveCfg = Release|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Release|Any CPU.Build.0 = Release|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Release|x64.ActiveCfg = Release|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Release|x64.Build.0 = Release|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Release|x86.ActiveCfg = Release|Any CPU + {30E528B3-0EB1-4A89-8130-F69D3C0F1962}.Release|x86.Build.0 = Release|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Debug|x64.ActiveCfg = Debug|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Debug|x64.Build.0 = Debug|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Debug|x86.ActiveCfg = Debug|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Debug|x86.Build.0 = Debug|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Release|Any CPU.Build.0 = Release|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Release|x64.ActiveCfg = Release|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Release|x64.Build.0 = Release|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Release|x86.ActiveCfg = Release|Any CPU + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {9258A5D3-2567-4BBA-8F0B-D018E431B7F8} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {30E528B3-0EB1-4A89-8130-F69D3C0F1962} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {B434D60B-8A05-44EC-ADA6-07C9E2CB1D92} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Cli/AGENTS.md b/src/Cli/StellaOps.Cli/AGENTS.md similarity index 95% rename from src/StellaOps.Cli/AGENTS.md rename to src/Cli/StellaOps.Cli/AGENTS.md index e073aeb6..4e504f0a 100644 --- a/src/StellaOps.Cli/AGENTS.md +++ b/src/Cli/StellaOps.Cli/AGENTS.md @@ -1,28 +1,28 @@ -# StellaOps.Cli — Agent Brief - -## Mission -- Deliver an offline-capable command-line interface that drives StellaOps back-end operations: scanner distribution, scan execution, result uploads, and Concelier database lifecycle calls (init/resume/export). -- Honour StellaOps principles of determinism, observability, and offline-first behaviour while providing a polished operator experience. - -## Role Charter -| Role | Mandate | Collaboration | -| --- | --- | --- | -| **DevEx/CLI** | Own CLI UX, command routing, and configuration model. Ensure commands work with empty/default config and document overrides. | Coordinate with Backend/WebService for API contracts and with Docs for operator workflows. | -| **Ops Integrator** | Maintain integration paths for shell/dotnet/docker tooling. Validate that air-gapped runners can bootstrap required binaries. | Work with Concelier/Agent teams to mirror packaging and signing requirements. | -| **QA** | Provide command-level fixtures, golden outputs, and regression coverage (unit & smoke). Ensure commands respect cancellation and deterministic logging. | Partner with QA guild for shared harnesses and test data. | - -## Working Agreements -- Configuration is centralised in `StellaOps.Configuration`; always consume the bootstrapper instead of hand rolling builders. Env vars (`API_KEY`, `STELLAOPS_BACKEND_URL`, `StellaOps:*`) override JSON/YAML and default to empty values. -- Command verbs (`scanner`, `scan`, `db`, `config`) are wired through System.CommandLine 2.0; keep handlers composable, cancellation-aware, and unit-testable. -- `scanner download` must verify digests/signatures, install containers locally (docker load), and log artefact metadata. -- `scan run` must execute the container against a directory, materialise artefacts in `ResultsDirectory`, and auto-upload them on success; `scan upload` is the manual retry path. -- Emit structured console logs (single line, UTC timestamps) and honour offline-first expectations—no hidden network calls. -- Mirror repository guidance: stay within `src/StellaOps.Cli` unless collaborating via documented handshakes. -- Update `TASKS.md` as states change (TODO → DOING → DONE/BLOCKED) and record added tests/fixtures alongside implementation notes. - -## Reference Materials -- `docs/ARCHITECTURE_CONCELIER.md` for database operations surface area. -- Backend OpenAPI/contract docs (once available) for job triggers and scanner endpoints. +# StellaOps.Cli — Agent Brief + +## Mission +- Deliver an offline-capable command-line interface that drives StellaOps back-end operations: scanner distribution, scan execution, result uploads, and Concelier database lifecycle calls (init/resume/export). +- Honour StellaOps principles of determinism, observability, and offline-first behaviour while providing a polished operator experience. + +## Role Charter +| Role | Mandate | Collaboration | +| --- | --- | --- | +| **DevEx/CLI** | Own CLI UX, command routing, and configuration model. Ensure commands work with empty/default config and document overrides. | Coordinate with Backend/WebService for API contracts and with Docs for operator workflows. | +| **Ops Integrator** | Maintain integration paths for shell/dotnet/docker tooling. Validate that air-gapped runners can bootstrap required binaries. | Work with Concelier/Agent teams to mirror packaging and signing requirements. | +| **QA** | Provide command-level fixtures, golden outputs, and regression coverage (unit & smoke). Ensure commands respect cancellation and deterministic logging. | Partner with QA guild for shared harnesses and test data. | + +## Working Agreements +- Configuration is centralised in `StellaOps.Configuration`; always consume the bootstrapper instead of hand rolling builders. Env vars (`API_KEY`, `STELLAOPS_BACKEND_URL`, `StellaOps:*`) override JSON/YAML and default to empty values. +- Command verbs (`scanner`, `scan`, `db`, `config`) are wired through System.CommandLine 2.0; keep handlers composable, cancellation-aware, and unit-testable. +- `scanner download` must verify digests/signatures, install containers locally (docker load), and log artefact metadata. +- `scan run` must execute the container against a directory, materialise artefacts in `ResultsDirectory`, and auto-upload them on success; `scan upload` is the manual retry path. +- Emit structured console logs (single line, UTC timestamps) and honour offline-first expectations—no hidden network calls. +- Mirror repository guidance: stay within `src/Cli/StellaOps.Cli` unless collaborating via documented handshakes. +- Update `TASKS.md` as states change (TODO → DOING → DONE/BLOCKED) and record added tests/fixtures alongside implementation notes. + +## Reference Materials +- `docs/ARCHITECTURE_CONCELIER.md` for database operations surface area. +- Backend OpenAPI/contract docs (once available) for job triggers and scanner endpoints. - Existing module AGENTS/TASKS files for style and coordination cues. - `docs/09_API_CLI_REFERENCE.md` (section 3) for the user-facing synopsis of the CLI verbs and flags. diff --git a/src/StellaOps.Cli/Commands/CommandFactory.cs b/src/Cli/StellaOps.Cli/Commands/CommandFactory.cs similarity index 100% rename from src/StellaOps.Cli/Commands/CommandFactory.cs rename to src/Cli/StellaOps.Cli/Commands/CommandFactory.cs diff --git a/src/StellaOps.Cli/Commands/CommandHandlers.cs b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs similarity index 97% rename from src/StellaOps.Cli/Commands/CommandHandlers.cs rename to src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs index fe884193..2a2bf549 100644 --- a/src/StellaOps.Cli/Commands/CommandHandlers.cs +++ b/src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs @@ -1,5638 +1,5638 @@ -using System; -using System.Buffers; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Diagnostics; -using System.Globalization; -using System.IO; -using System.IO.Compression; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Security.Cryptography; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Spectre.Console; -using StellaOps.Auth.Client; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Prompts; -using StellaOps.Cli.Services; -using StellaOps.Cli.Services.Models; -using StellaOps.Cli.Telemetry; -using StellaOps.Cryptography; - -namespace StellaOps.Cli.Commands; - -internal static class CommandHandlers -{ - public static async Task HandleScannerDownloadAsync( - IServiceProvider services, - string channel, - string? output, - bool overwrite, - bool install, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-download"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scanner.download", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "scanner download"); - activity?.SetTag("stellaops.cli.channel", channel); - using var duration = CliMetrics.MeasureCommandDuration("scanner download"); - - try - { - var result = await client.DownloadScannerAsync(channel, output ?? string.Empty, overwrite, verbose, cancellationToken).ConfigureAwait(false); - - if (result.FromCache) - { - logger.LogInformation("Using cached scanner at {Path}.", result.Path); - } - else - { - logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", result.Path, result.SizeBytes); - } - - CliMetrics.RecordScannerDownload(channel, result.FromCache); - - if (install) - { - var installer = scope.ServiceProvider.GetRequiredService(); - await installer.InstallAsync(result.Path, verbose, cancellationToken).ConfigureAwait(false); - CliMetrics.RecordScannerInstall(channel); - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to download scanner bundle."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleScannerRunAsync( - IServiceProvider services, - string runner, - string entry, - string targetDirectory, - IReadOnlyList arguments, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var executor = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-run"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scan.run", ActivityKind.Internal); - activity?.SetTag("stellaops.cli.command", "scan run"); - activity?.SetTag("stellaops.cli.runner", runner); - activity?.SetTag("stellaops.cli.entry", entry); - activity?.SetTag("stellaops.cli.target", targetDirectory); - using var duration = CliMetrics.MeasureCommandDuration("scan run"); - - try - { - var options = scope.ServiceProvider.GetRequiredService(); - var resultsDirectory = options.ResultsDirectory; - - var executionResult = await executor.RunAsync( - runner, - entry, - targetDirectory, - resultsDirectory, - arguments, - verbose, - cancellationToken).ConfigureAwait(false); - - Environment.ExitCode = executionResult.ExitCode; - CliMetrics.RecordScanRun(runner, executionResult.ExitCode); - - if (executionResult.ExitCode == 0) - { - var backend = scope.ServiceProvider.GetRequiredService(); - logger.LogInformation("Uploading scan artefact {Path}...", executionResult.ResultsPath); - await backend.UploadScanResultsAsync(executionResult.ResultsPath, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Scan artefact uploaded."); - activity?.SetTag("stellaops.cli.results", executionResult.ResultsPath); - } - else - { - logger.LogWarning("Skipping automatic upload because scan exited with code {Code}.", executionResult.ExitCode); - } - - logger.LogInformation("Run metadata written to {Path}.", executionResult.RunMetadataPath); - activity?.SetTag("stellaops.cli.run_metadata", executionResult.RunMetadataPath); - } - catch (Exception ex) - { - logger.LogError(ex, "Scanner execution failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleScanUploadAsync( - IServiceProvider services, - string file, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-upload"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.scan.upload", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "scan upload"); - activity?.SetTag("stellaops.cli.file", file); - using var duration = CliMetrics.MeasureCommandDuration("scan upload"); - - try - { - var path = Path.GetFullPath(file); - await client.UploadScanResultsAsync(path, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Scan results uploaded successfully."); - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to upload scan results."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleSourcesIngestAsync( - IServiceProvider services, - bool dryRun, - string source, - string input, - string? tenantOverride, - string format, - bool disableColor, - string? output, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("sources-ingest"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - using var activity = CliActivitySource.Instance.StartActivity("cli.sources.ingest.dry_run", ActivityKind.Client); - var statusMetric = "unknown"; - using var duration = CliMetrics.MeasureCommandDuration("sources ingest dry-run"); - - try - { - if (!dryRun) - { - statusMetric = "unsupported"; - logger.LogError("Only --dry-run mode is supported for 'stella sources ingest' at this time."); - Environment.ExitCode = 1; - return; - } - - source = source?.Trim() ?? string.Empty; - if (string.IsNullOrWhiteSpace(source)) - { - throw new InvalidOperationException("Source identifier must be provided."); - } - - var formatNormalized = string.IsNullOrWhiteSpace(format) - ? "table" - : format.Trim().ToLowerInvariant(); - - if (formatNormalized is not ("table" or "json")) - { - throw new InvalidOperationException("Format must be either 'table' or 'json'."); - } - - var tenant = ResolveTenant(tenantOverride); - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); - } - - var payload = await LoadIngestInputAsync(input, cancellationToken).ConfigureAwait(false); - - logger.LogInformation("Executing ingestion dry-run for source {Source} using input {Input}.", source, payload.Name); - - activity?.SetTag("stellaops.cli.command", "sources ingest dry-run"); - activity?.SetTag("stellaops.cli.source", source); - activity?.SetTag("stellaops.cli.tenant", tenant); - activity?.SetTag("stellaops.cli.format", formatNormalized); - activity?.SetTag("stellaops.cli.input_kind", payload.Kind); - - var request = new AocIngestDryRunRequest - { - Tenant = tenant, - Source = source, - Document = new AocIngestDryRunDocument - { - Name = payload.Name, - Content = payload.Content, - ContentType = payload.ContentType, - ContentEncoding = payload.ContentEncoding - } - }; - - var response = await client.ExecuteAocIngestDryRunAsync(request, cancellationToken).ConfigureAwait(false); - activity?.SetTag("stellaops.cli.status", response.Status ?? "unknown"); - - if (!string.IsNullOrWhiteSpace(output)) - { - var reportPath = await WriteJsonReportAsync(response, output, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Dry-run report written to {Path}.", reportPath); - } - - if (formatNormalized == "json") - { - var json = JsonSerializer.Serialize(response, new JsonSerializerOptions - { - WriteIndented = true - }); - Console.WriteLine(json); - } - else - { - RenderDryRunTable(response, !disableColor); - } - - var exitCode = DetermineDryRunExitCode(response); - Environment.ExitCode = exitCode; - statusMetric = exitCode == 0 ? "ok" : "violation"; - activity?.SetTag("stellaops.cli.exit_code", exitCode); - } - catch (Exception ex) - { - statusMetric = "transport_error"; - logger.LogError(ex, "Dry-run ingestion failed."); - Environment.ExitCode = 70; - } - finally - { - verbosity.MinimumLevel = previousLevel; - CliMetrics.RecordSourcesDryRun(statusMetric); - } - } - - public static async Task HandleAocVerifyAsync( - IServiceProvider services, - string? sinceOption, - int? limitOption, - string? sourcesOption, - string? codesOption, - string format, - string? exportPath, - string? tenantOverride, - bool disableColor, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("aoc-verify"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - - using var activity = CliActivitySource.Instance.StartActivity("cli.aoc.verify", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("aoc verify"); - var outcome = "unknown"; - - try - { - var tenant = ResolveTenant(tenantOverride); - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); - } - - var normalizedFormat = string.IsNullOrWhiteSpace(format) - ? "table" - : format.Trim().ToLowerInvariant(); - - if (normalizedFormat is not ("table" or "json")) - { - throw new InvalidOperationException("Format must be either 'table' or 'json'."); - } - - var since = DetermineVerificationSince(sinceOption); - var sinceIso = since.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - var limit = NormalizeLimit(limitOption); - var sources = ParseCommaSeparatedList(sourcesOption); - var codes = ParseCommaSeparatedList(codesOption); - - var normalizedSources = sources.Count == 0 - ? Array.Empty() - : sources.Select(item => item.ToLowerInvariant()).ToArray(); - - var normalizedCodes = codes.Count == 0 - ? Array.Empty() - : codes.Select(item => item.ToUpperInvariant()).ToArray(); - - activity?.SetTag("stellaops.cli.command", "aoc verify"); - activity?.SetTag("stellaops.cli.tenant", tenant); - activity?.SetTag("stellaops.cli.since", sinceIso); - activity?.SetTag("stellaops.cli.limit", limit); - activity?.SetTag("stellaops.cli.format", normalizedFormat); - if (normalizedSources.Length > 0) - { - activity?.SetTag("stellaops.cli.sources", string.Join(",", normalizedSources)); - } - - if (normalizedCodes.Length > 0) - { - activity?.SetTag("stellaops.cli.codes", string.Join(",", normalizedCodes)); - } - - var request = new AocVerifyRequest - { - Tenant = tenant, - Since = sinceIso, - Limit = limit, - Sources = normalizedSources.Length == 0 ? null : normalizedSources, - Codes = normalizedCodes.Length == 0 ? null : normalizedCodes - }; - - var response = await client.ExecuteAocVerifyAsync(request, cancellationToken).ConfigureAwait(false); - - if (!string.IsNullOrWhiteSpace(exportPath)) - { - var reportPath = await WriteJsonReportAsync(response, exportPath, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Verification report written to {Path}.", reportPath); - } - - if (normalizedFormat == "json") - { - var json = JsonSerializer.Serialize(response, new JsonSerializerOptions - { - WriteIndented = true - }); - Console.WriteLine(json); - } - else - { - RenderAocVerifyTable(response, !disableColor, limit); - } - - var exitCode = DetermineVerifyExitCode(response); - Environment.ExitCode = exitCode; - activity?.SetTag("stellaops.cli.exit_code", exitCode); - outcome = exitCode switch - { - 0 => "ok", - >= 11 and <= 17 => "violations", - 18 => "truncated", - _ => "unknown" - }; - } - catch (InvalidOperationException ex) - { - outcome = "usage_error"; - logger.LogError(ex, "Verification failed: {Message}", ex.Message); - Console.Error.WriteLine(ex.Message); - Environment.ExitCode = 71; - activity?.SetStatus(ActivityStatusCode.Error, ex.Message); - } - catch (Exception ex) - { - outcome = "transport_error"; - logger.LogError(ex, "Verification request failed."); - Console.Error.WriteLine(ex.Message); - Environment.ExitCode = 70; - activity?.SetStatus(ActivityStatusCode.Error, ex.Message); - } - finally - { - verbosity.MinimumLevel = previousLevel; - CliMetrics.RecordAocVerify(outcome); - } - } - - public static async Task HandleConnectorJobAsync( - IServiceProvider services, - string source, - string stage, - string? mode, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-connector"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.db.fetch", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "db fetch"); - activity?.SetTag("stellaops.cli.source", source); - activity?.SetTag("stellaops.cli.stage", stage); - if (!string.IsNullOrWhiteSpace(mode)) - { - activity?.SetTag("stellaops.cli.mode", mode); - } - using var duration = CliMetrics.MeasureCommandDuration("db fetch"); - - try - { - var jobKind = $"source:{source}:{stage}"; - var parameters = new Dictionary(StringComparer.Ordinal); - if (!string.IsNullOrWhiteSpace(mode)) - { - parameters["mode"] = mode; - } - - await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - logger.LogError(ex, "Connector job failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleMergeJobAsync( - IServiceProvider services, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-merge"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.db.merge", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "db merge"); - using var duration = CliMetrics.MeasureCommandDuration("db merge"); - - try - { - await TriggerJobAsync(client, logger, "merge:reconcile", new Dictionary(StringComparer.Ordinal), cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - logger.LogError(ex, "Merge job failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleExportJobAsync( - IServiceProvider services, - string format, - bool delta, - bool? publishFull, - bool? publishDelta, - bool? includeFull, - bool? includeDelta, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-export"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.db.export", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "db export"); - activity?.SetTag("stellaops.cli.format", format); - activity?.SetTag("stellaops.cli.delta", delta); - using var duration = CliMetrics.MeasureCommandDuration("db export"); - activity?.SetTag("stellaops.cli.publish_full", publishFull); - activity?.SetTag("stellaops.cli.publish_delta", publishDelta); - activity?.SetTag("stellaops.cli.include_full", includeFull); - activity?.SetTag("stellaops.cli.include_delta", includeDelta); - - try - { - var jobKind = format switch - { - "trivy-db" or "trivy" => "export:trivy-db", - _ => "export:json" - }; - - var isTrivy = jobKind == "export:trivy-db"; - if (isTrivy - && !publishFull.HasValue - && !publishDelta.HasValue - && !includeFull.HasValue - && !includeDelta.HasValue - && AnsiConsole.Profile.Capabilities.Interactive) - { - var overrides = TrivyDbExportPrompt.PromptOverrides(); - publishFull = overrides.publishFull; - publishDelta = overrides.publishDelta; - includeFull = overrides.includeFull; - includeDelta = overrides.includeDelta; - } - - var parameters = new Dictionary(StringComparer.Ordinal) - { - ["delta"] = delta - }; - if (publishFull.HasValue) - { - parameters["publishFull"] = publishFull.Value; - } - if (publishDelta.HasValue) - { - parameters["publishDelta"] = publishDelta.Value; - } - if (includeFull.HasValue) - { - parameters["includeFull"] = includeFull.Value; - } - if (includeDelta.HasValue) - { - parameters["includeDelta"] = includeDelta.Value; - } - - await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - logger.LogError(ex, "Export job failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static Task HandleExcititorInitAsync( - IServiceProvider services, - IReadOnlyList providers, - bool resume, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (resume) - { - payload["resume"] = true; - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor init", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["resume"] = resume - }, - client => client.ExecuteExcititorOperationAsync("init", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorPullAsync( - IServiceProvider services, - IReadOnlyList providers, - DateTimeOffset? since, - TimeSpan? window, - bool force, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (since.HasValue) - { - payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - if (window.HasValue) - { - payload["window"] = window.Value.ToString("c", CultureInfo.InvariantCulture); - } - if (force) - { - payload["force"] = true; - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor pull", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["force"] = force, - ["since"] = since?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture), - ["window"] = window?.ToString("c", CultureInfo.InvariantCulture) - }, - client => client.ExecuteExcititorOperationAsync("ingest/run", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorResumeAsync( - IServiceProvider services, - IReadOnlyList providers, - string? checkpoint, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (!string.IsNullOrWhiteSpace(checkpoint)) - { - payload["checkpoint"] = checkpoint.Trim(); - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor resume", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["checkpoint"] = checkpoint - }, - client => client.ExecuteExcititorOperationAsync("ingest/resume", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static async Task HandleExcititorListProvidersAsync( - IServiceProvider services, - bool includeDisabled, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("excititor-list-providers"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.list-providers", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "excititor list-providers"); - activity?.SetTag("stellaops.cli.include_disabled", includeDisabled); - using var duration = CliMetrics.MeasureCommandDuration("excititor list-providers"); - - try - { - var providers = await client.GetExcititorProvidersAsync(includeDisabled, cancellationToken).ConfigureAwait(false); - Environment.ExitCode = 0; - logger.LogInformation("Providers returned: {Count}", providers.Count); - - if (providers.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().Border(TableBorder.Rounded).AddColumns("Provider", "Kind", "Trust", "Enabled", "Last Ingested"); - foreach (var provider in providers) - { - table.AddRow( - provider.Id, - provider.Kind, - string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, - provider.Enabled ? "yes" : "no", - provider.LastIngestedAt?.ToString("yyyy-MM-dd HH:mm:ss 'UTC'", CultureInfo.InvariantCulture) ?? "unknown"); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var provider in providers) - { - logger.LogInformation("{ProviderId} [{Kind}] Enabled={Enabled} Trust={Trust} LastIngested={LastIngested}", - provider.Id, - provider.Kind, - provider.Enabled ? "yes" : "no", - string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, - provider.LastIngestedAt?.ToString("O", CultureInfo.InvariantCulture) ?? "unknown"); - } - } - } - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to list Excititor providers."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleExcititorExportAsync( - IServiceProvider services, - string format, - bool delta, - string? scope, - DateTimeOffset? since, - string? provider, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scopeHandle = services.CreateAsyncScope(); - var client = scopeHandle.ServiceProvider.GetRequiredService(); - var logger = scopeHandle.ServiceProvider.GetRequiredService().CreateLogger("excititor-export"); - var options = scopeHandle.ServiceProvider.GetRequiredService(); - var verbosity = scopeHandle.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.export", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "excititor export"); - activity?.SetTag("stellaops.cli.format", format); - activity?.SetTag("stellaops.cli.delta", delta); - if (!string.IsNullOrWhiteSpace(scope)) - { - activity?.SetTag("stellaops.cli.scope", scope); - } - if (since.HasValue) - { - activity?.SetTag("stellaops.cli.since", since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture)); - } - if (!string.IsNullOrWhiteSpace(provider)) - { - activity?.SetTag("stellaops.cli.provider", provider); - } - if (!string.IsNullOrWhiteSpace(outputPath)) - { - activity?.SetTag("stellaops.cli.output", outputPath); - } - using var duration = CliMetrics.MeasureCommandDuration("excititor export"); - - try - { - var payload = new Dictionary(StringComparer.Ordinal) - { - ["format"] = string.IsNullOrWhiteSpace(format) ? "openvex" : format.Trim(), - ["delta"] = delta - }; - - if (!string.IsNullOrWhiteSpace(scope)) - { - payload["scope"] = scope.Trim(); - } - if (since.HasValue) - { - payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - if (!string.IsNullOrWhiteSpace(provider)) - { - payload["provider"] = provider.Trim(); - } - - var result = await client.ExecuteExcititorOperationAsync( - "export", - HttpMethod.Post, - RemoveNullValues(payload), - cancellationToken).ConfigureAwait(false); - - if (!result.Success) - { - logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Excititor export failed." : result.Message); - Environment.ExitCode = 1; - return; - } - - Environment.ExitCode = 0; - - var manifest = TryParseExportManifest(result.Payload); - if (!string.IsNullOrWhiteSpace(result.Message) - && (manifest is null || !string.Equals(result.Message, "ok", StringComparison.OrdinalIgnoreCase))) - { - logger.LogInformation(result.Message); - } - - if (manifest is not null) - { - activity?.SetTag("stellaops.cli.export_id", manifest.ExportId); - if (!string.IsNullOrWhiteSpace(manifest.Format)) - { - activity?.SetTag("stellaops.cli.export_format", manifest.Format); - } - if (manifest.FromCache.HasValue) - { - activity?.SetTag("stellaops.cli.export_cached", manifest.FromCache.Value); - } - if (manifest.SizeBytes.HasValue) - { - activity?.SetTag("stellaops.cli.export_size", manifest.SizeBytes.Value); - } - - if (manifest.FromCache == true) - { - logger.LogInformation("Reusing cached export {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); - } - else - { - logger.LogInformation("Export ready: {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); - } - - if (manifest.CreatedAt.HasValue) - { - logger.LogInformation("Created at {CreatedAt}.", manifest.CreatedAt.Value.ToString("u", CultureInfo.InvariantCulture)); - } - - if (!string.IsNullOrWhiteSpace(manifest.Digest)) - { - var digestDisplay = BuildDigestDisplay(manifest.Algorithm, manifest.Digest); - if (manifest.SizeBytes.HasValue) - { - logger.LogInformation("Digest {Digest} ({Size}).", digestDisplay, FormatSize(manifest.SizeBytes.Value)); - } - else - { - logger.LogInformation("Digest {Digest}.", digestDisplay); - } - } - - if (!string.IsNullOrWhiteSpace(manifest.RekorLocation)) - { - if (!string.IsNullOrWhiteSpace(manifest.RekorIndex)) - { - logger.LogInformation("Rekor entry: {Location} (index {Index}).", manifest.RekorLocation, manifest.RekorIndex); - } - else - { - logger.LogInformation("Rekor entry: {Location}.", manifest.RekorLocation); - } - } - - if (!string.IsNullOrWhiteSpace(manifest.RekorInclusionUrl) - && !string.Equals(manifest.RekorInclusionUrl, manifest.RekorLocation, StringComparison.OrdinalIgnoreCase)) - { - logger.LogInformation("Rekor inclusion proof: {Url}.", manifest.RekorInclusionUrl); - } - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - var resolvedPath = ResolveExportOutputPath(outputPath!, manifest); - var download = await client.DownloadExcititorExportAsync( - manifest.ExportId, - resolvedPath, - manifest.Algorithm, - manifest.Digest, - cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.export_path", download.Path); - - if (download.FromCache) - { - logger.LogInformation("Export already cached at {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); - } - else - { - logger.LogInformation("Export saved to {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); - } - } - else if (!string.IsNullOrWhiteSpace(result.Location)) - { - var downloadUrl = ResolveLocationUrl(options, result.Location); - if (!string.IsNullOrWhiteSpace(downloadUrl)) - { - logger.LogInformation("Download URL: {Url}", downloadUrl); - } - else - { - logger.LogInformation("Download location: {Location}", result.Location); - } - } - } - else - { - if (!string.IsNullOrWhiteSpace(result.Location)) - { - var downloadUrl = ResolveLocationUrl(options, result.Location); - if (!string.IsNullOrWhiteSpace(downloadUrl)) - { - logger.LogInformation("Download URL: {Url}", downloadUrl); - } - else - { - logger.LogInformation("Location: {Location}", result.Location); - } - } - else if (string.IsNullOrWhiteSpace(result.Message)) - { - logger.LogInformation("Export request accepted."); - } - } - } - catch (Exception ex) - { - logger.LogError(ex, "Excititor export failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static Task HandleExcititorBackfillStatementsAsync( - IServiceProvider services, - DateTimeOffset? retrievedSince, - bool force, - int batchSize, - int? maxDocuments, - bool verbose, - CancellationToken cancellationToken) - { - if (batchSize <= 0) - { - throw new ArgumentOutOfRangeException(nameof(batchSize), "Batch size must be greater than zero."); - } - - if (maxDocuments.HasValue && maxDocuments.Value <= 0) - { - throw new ArgumentOutOfRangeException(nameof(maxDocuments), "Max documents must be greater than zero when specified."); - } - - var payload = new Dictionary(StringComparer.Ordinal) - { - ["force"] = force, - ["batchSize"] = batchSize, - ["maxDocuments"] = maxDocuments - }; - - if (retrievedSince.HasValue) - { - payload["retrievedSince"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - - var activityTags = new Dictionary(StringComparer.Ordinal) - { - ["stellaops.cli.force"] = force, - ["stellaops.cli.batch_size"] = batchSize, - ["stellaops.cli.max_documents"] = maxDocuments - }; - - if (retrievedSince.HasValue) - { - activityTags["stellaops.cli.retrieved_since"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor backfill-statements", - verbose, - activityTags, - client => client.ExecuteExcititorOperationAsync( - "admin/backfill-statements", - HttpMethod.Post, - RemoveNullValues(payload), - cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorVerifyAsync( - IServiceProvider services, - string? exportId, - string? digest, - string? attestationPath, - bool verbose, - CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(exportId) && string.IsNullOrWhiteSpace(digest) && string.IsNullOrWhiteSpace(attestationPath)) - { - var logger = services.GetRequiredService().CreateLogger("excititor-verify"); - logger.LogError("At least one of --export-id, --digest, or --attestation must be provided."); - Environment.ExitCode = 1; - return Task.CompletedTask; - } - - var payload = new Dictionary(StringComparer.Ordinal); - if (!string.IsNullOrWhiteSpace(exportId)) - { - payload["exportId"] = exportId.Trim(); - } - if (!string.IsNullOrWhiteSpace(digest)) - { - payload["digest"] = digest.Trim(); - } - if (!string.IsNullOrWhiteSpace(attestationPath)) - { - var fullPath = Path.GetFullPath(attestationPath); - if (!File.Exists(fullPath)) - { - var logger = services.GetRequiredService().CreateLogger("excititor-verify"); - logger.LogError("Attestation file not found at {Path}.", fullPath); - Environment.ExitCode = 1; - return Task.CompletedTask; - } - - var bytes = File.ReadAllBytes(fullPath); - payload["attestation"] = new Dictionary(StringComparer.Ordinal) - { - ["fileName"] = Path.GetFileName(fullPath), - ["base64"] = Convert.ToBase64String(bytes) - }; - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor verify", - verbose, - new Dictionary - { - ["export_id"] = exportId, - ["digest"] = digest, - ["attestation_path"] = attestationPath - }, - client => client.ExecuteExcititorOperationAsync("verify", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static Task HandleExcititorReconcileAsync( - IServiceProvider services, - IReadOnlyList providers, - TimeSpan? maxAge, - bool verbose, - CancellationToken cancellationToken) - { - var normalizedProviders = NormalizeProviders(providers); - var payload = new Dictionary(StringComparer.Ordinal); - if (normalizedProviders.Count > 0) - { - payload["providers"] = normalizedProviders; - } - if (maxAge.HasValue) - { - payload["maxAge"] = maxAge.Value.ToString("c", CultureInfo.InvariantCulture); - } - - return ExecuteExcititorCommandAsync( - services, - commandName: "excititor reconcile", - verbose, - new Dictionary - { - ["providers"] = normalizedProviders.Count, - ["max_age"] = maxAge?.ToString("c", CultureInfo.InvariantCulture) - }, - client => client.ExecuteExcititorOperationAsync("reconcile", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), - cancellationToken); - } - - public static async Task HandleRuntimePolicyTestAsync( - IServiceProvider services, - string? namespaceValue, - IReadOnlyList imageArguments, - string? filePath, - IReadOnlyList labelArguments, - bool outputJson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("runtime-policy-test"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.runtime.policy.test", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "runtime policy test"); - if (!string.IsNullOrWhiteSpace(namespaceValue)) - { - activity?.SetTag("stellaops.cli.namespace", namespaceValue); - } - using var duration = CliMetrics.MeasureCommandDuration("runtime policy test"); - - try - { - IReadOnlyList images; - try - { - images = await GatherImageDigestsAsync(imageArguments, filePath, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or ArgumentException or FileNotFoundException) - { - logger.LogError(ex, "Failed to gather image digests: {Message}", ex.Message); - Environment.ExitCode = 9; - return; - } - - if (images.Count == 0) - { - logger.LogError("No image digests provided. Use --image, --file, or pipe digests via stdin."); - Environment.ExitCode = 9; - return; - } - - IReadOnlyDictionary labels; - try - { - labels = ParseLabelSelectors(labelArguments); - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - Environment.ExitCode = 9; - return; - } - - activity?.SetTag("stellaops.cli.images", images.Count); - activity?.SetTag("stellaops.cli.labels", labels.Count); - - var request = new RuntimePolicyEvaluationRequest(namespaceValue, labels, images); - var result = await client.EvaluateRuntimePolicyAsync(request, cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.ttl_seconds", result.TtlSeconds); - Environment.ExitCode = 0; - - if (outputJson) - { - var json = BuildRuntimePolicyJson(result, images); - Console.WriteLine(json); - return; - } - - if (result.ExpiresAtUtc.HasValue) - { - logger.LogInformation("Decision TTL: {TtlSeconds}s (expires {ExpiresAt})", result.TtlSeconds, result.ExpiresAtUtc.Value.ToString("u", CultureInfo.InvariantCulture)); - } - else - { - logger.LogInformation("Decision TTL: {TtlSeconds}s", result.TtlSeconds); - } - - if (!string.IsNullOrWhiteSpace(result.PolicyRevision)) - { - logger.LogInformation("Policy revision: {Revision}", result.PolicyRevision); - } - - DisplayRuntimePolicyResults(logger, result, images); - } - catch (Exception ex) - { - logger.LogError(ex, "Runtime policy evaluation failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleAuthLoginAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - bool force, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-login"); - Environment.ExitCode = 0; - - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - logger.LogError("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update your configuration."); - Environment.ExitCode = 1; - return; - } - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogError("Authority client is not available. Ensure AddStellaOpsAuthClient is registered in Program.cs."); - Environment.ExitCode = 1; - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogError("Authority configuration is incomplete; unable to determine cache key."); - Environment.ExitCode = 1; - return; - } - - try - { - if (force) - { - await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - } - - var scopeName = AuthorityTokenUtilities.ResolveScope(options); - StellaOpsTokenResult token; - - if (!string.IsNullOrWhiteSpace(options.Authority.Username)) - { - if (string.IsNullOrWhiteSpace(options.Authority.Password)) - { - logger.LogError("Authority password must be provided when username is configured."); - Environment.ExitCode = 1; - return; - } - - token = await tokenClient.RequestPasswordTokenAsync( - options.Authority.Username, - options.Authority.Password!, - scopeName, - null, - cancellationToken).ConfigureAwait(false); - } - else - { - token = await tokenClient.RequestClientCredentialsTokenAsync(scopeName, null, cancellationToken).ConfigureAwait(false); - } - - await tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); - - if (verbose) - { - logger.LogInformation("Authenticated with {Authority} (scopes: {Scopes}).", options.Authority.Url, string.Join(", ", token.Scopes)); - } - - logger.LogInformation("Login successful. Access token expires at {Expires}.", token.ExpiresAtUtc.ToString("u")); - } - catch (Exception ex) - { - logger.LogError(ex, "Authentication failed: {Message}", ex.Message); - Environment.ExitCode = 1; - } - } - - public static async Task HandleAuthLogoutAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-logout"); - Environment.ExitCode = 0; - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogInformation("No authority client registered; nothing to remove."); - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogInformation("Authority configuration missing; no cached tokens to remove."); - return; - } - - await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (verbose) - { - logger.LogInformation("Cleared cached token for {Authority}.", options.Authority?.Url ?? "authority"); - } - } - - public static async Task HandleAuthStatusAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-status"); - Environment.ExitCode = 0; - - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); - Environment.ExitCode = 1; - return; - } - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogInformation("Authority client not registered; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogInformation("Authority configuration incomplete; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (entry is null) - { - logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); - Environment.ExitCode = 1; - return; - } - - logger.LogInformation("Cached token for {Authority} expires at {Expires}.", options.Authority.Url, entry.ExpiresAtUtc.ToString("u")); - if (verbose) - { - logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); - } - } - - public static async Task HandleAuthWhoAmIAsync( - IServiceProvider services, - StellaOpsCliOptions options, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-whoami"); - Environment.ExitCode = 0; - - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); - Environment.ExitCode = 1; - return; - } - - var tokenClient = scope.ServiceProvider.GetService(); - if (tokenClient is null) - { - logger.LogInformation("Authority client not registered; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); - if (string.IsNullOrWhiteSpace(cacheKey)) - { - logger.LogInformation("Authority configuration incomplete; no cached tokens available."); - Environment.ExitCode = 1; - return; - } - - var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (entry is null) - { - logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); - Environment.ExitCode = 1; - return; - } - - var grantType = string.IsNullOrWhiteSpace(options.Authority.Username) ? "client_credentials" : "password"; - var now = DateTimeOffset.UtcNow; - var remaining = entry.ExpiresAtUtc - now; - if (remaining < TimeSpan.Zero) - { - remaining = TimeSpan.Zero; - } - - logger.LogInformation("Authority: {Authority}", options.Authority.Url); - logger.LogInformation("Grant type: {GrantType}", grantType); - logger.LogInformation("Token type: {TokenType}", entry.TokenType); - logger.LogInformation("Expires: {Expires} ({Remaining})", entry.ExpiresAtUtc.ToString("u"), FormatDuration(remaining)); - - if (entry.Scopes.Count > 0) - { - logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); - } - - if (TryExtractJwtClaims(entry.AccessToken, out var claims, out var issuedAt, out var notBefore)) - { - if (claims.TryGetValue("sub", out var subject) && !string.IsNullOrWhiteSpace(subject)) - { - logger.LogInformation("Subject: {Subject}", subject); - } - - if (claims.TryGetValue("client_id", out var clientId) && !string.IsNullOrWhiteSpace(clientId)) - { - logger.LogInformation("Client ID (token): {ClientId}", clientId); - } - - if (claims.TryGetValue("aud", out var audience) && !string.IsNullOrWhiteSpace(audience)) - { - logger.LogInformation("Audience: {Audience}", audience); - } - - if (claims.TryGetValue("iss", out var issuer) && !string.IsNullOrWhiteSpace(issuer)) - { - logger.LogInformation("Issuer: {Issuer}", issuer); - } - - if (issuedAt is not null) - { - logger.LogInformation("Issued at: {IssuedAt}", issuedAt.Value.ToString("u")); - } - - if (notBefore is not null) - { - logger.LogInformation("Not before: {NotBefore}", notBefore.Value.ToString("u")); - } - - var extraClaims = CollectAdditionalClaims(claims); - if (extraClaims.Count > 0 && verbose) - { - logger.LogInformation("Additional claims: {Claims}", string.Join(", ", extraClaims)); - } - } - else - { - logger.LogInformation("Access token appears opaque; claims are unavailable."); - } - } - - public static async Task HandleAuthRevokeExportAsync( - IServiceProvider services, - StellaOpsCliOptions options, - string? outputDirectory, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-revoke-export"); - Environment.ExitCode = 0; - - try - { - var client = scope.ServiceProvider.GetRequiredService(); - var result = await client.ExportAsync(verbose, cancellationToken).ConfigureAwait(false); - - var directory = string.IsNullOrWhiteSpace(outputDirectory) - ? Directory.GetCurrentDirectory() - : Path.GetFullPath(outputDirectory); - - Directory.CreateDirectory(directory); - - var bundlePath = Path.Combine(directory, "revocation-bundle.json"); - var signaturePath = Path.Combine(directory, "revocation-bundle.json.jws"); - var digestPath = Path.Combine(directory, "revocation-bundle.json.sha256"); - - await File.WriteAllBytesAsync(bundlePath, result.BundleBytes, cancellationToken).ConfigureAwait(false); - await File.WriteAllTextAsync(signaturePath, result.Signature, cancellationToken).ConfigureAwait(false); - await File.WriteAllTextAsync(digestPath, $"sha256:{result.Digest}", cancellationToken).ConfigureAwait(false); - - var computedDigest = Convert.ToHexString(SHA256.HashData(result.BundleBytes)).ToLowerInvariant(); - if (!string.Equals(computedDigest, result.Digest, StringComparison.OrdinalIgnoreCase)) - { - logger.LogError("Digest mismatch. Expected {Expected} but computed {Actual}.", result.Digest, computedDigest); - Environment.ExitCode = 1; - return; - } - - logger.LogInformation( - "Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}, provider {Provider}).", - directory, - result.Sequence, - result.IssuedAt, - string.IsNullOrWhiteSpace(result.SigningKeyId) ? "" : result.SigningKeyId, - string.IsNullOrWhiteSpace(result.SigningProvider) ? "default" : result.SigningProvider); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to export revocation bundle."); - Environment.ExitCode = 1; - } - } - - public static async Task HandleAuthRevokeVerifyAsync( - string bundlePath, - string signaturePath, - string keyPath, - bool verbose, - CancellationToken cancellationToken) - { - var loggerFactory = LoggerFactory.Create(builder => builder.AddSimpleConsole(options => - { - options.SingleLine = true; - options.TimestampFormat = "HH:mm:ss "; - })); - var logger = loggerFactory.CreateLogger("auth-revoke-verify"); - Environment.ExitCode = 0; - - try - { - if (string.IsNullOrWhiteSpace(bundlePath) || string.IsNullOrWhiteSpace(signaturePath) || string.IsNullOrWhiteSpace(keyPath)) - { - logger.LogError("Arguments --bundle, --signature, and --key are required."); - Environment.ExitCode = 1; - return; - } - - var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken).ConfigureAwait(false); - var signatureContent = (await File.ReadAllTextAsync(signaturePath, cancellationToken).ConfigureAwait(false)).Trim(); - var keyPem = await File.ReadAllTextAsync(keyPath, cancellationToken).ConfigureAwait(false); - - var digest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); - logger.LogInformation("Bundle digest sha256:{Digest}", digest); - - if (!TryParseDetachedJws(signatureContent, out var encodedHeader, out var encodedSignature)) - { - logger.LogError("Signature is not in detached JWS format."); - Environment.ExitCode = 1; - return; - } - - var headerJson = Encoding.UTF8.GetString(Base64UrlDecode(encodedHeader)); - using var headerDocument = JsonDocument.Parse(headerJson); - var header = headerDocument.RootElement; - - if (!header.TryGetProperty("b64", out var b64Element) || b64Element.GetBoolean()) - { - logger.LogError("Detached JWS header must include '\"b64\": false'."); - Environment.ExitCode = 1; - return; - } - - var algorithm = header.TryGetProperty("alg", out var algElement) ? algElement.GetString() : SignatureAlgorithms.Es256; - if (string.IsNullOrWhiteSpace(algorithm)) - { - algorithm = SignatureAlgorithms.Es256; - } - - var providerHint = header.TryGetProperty("provider", out var providerElement) - ? providerElement.GetString() - : null; - - var keyId = header.TryGetProperty("kid", out var kidElement) ? kidElement.GetString() : null; - if (string.IsNullOrWhiteSpace(keyId)) - { - keyId = Path.GetFileNameWithoutExtension(keyPath); - logger.LogWarning("JWS header missing 'kid'; using fallback key id {KeyId}.", keyId); - } - - CryptoSigningKey signingKey; - try - { - signingKey = CreateVerificationSigningKey(keyId!, algorithm!, providerHint, keyPem, keyPath); - } - catch (Exception ex) when (ex is InvalidOperationException or CryptographicException) - { - logger.LogError(ex, "Failed to load verification key material."); - Environment.ExitCode = 1; - return; - } - - var providers = new List - { - new DefaultCryptoProvider() - }; - -#if STELLAOPS_CRYPTO_SODIUM - providers.Add(new LibsodiumCryptoProvider()); -#endif - - foreach (var provider in providers) - { - if (provider.Supports(CryptoCapability.Verification, algorithm!)) - { - provider.UpsertSigningKey(signingKey); - } - } - - var preferredOrder = !string.IsNullOrWhiteSpace(providerHint) - ? new[] { providerHint! } - : Array.Empty(); - var registry = new CryptoProviderRegistry(providers, preferredOrder); - CryptoSignerResolution resolution; - try - { - resolution = registry.ResolveSigner( - CryptoCapability.Verification, - algorithm!, - signingKey.Reference, - providerHint); - } - catch (Exception ex) - { - logger.LogError(ex, "No crypto provider available for verification (algorithm {Algorithm}).", algorithm); - Environment.ExitCode = 1; - return; - } - - var signingInputLength = encodedHeader.Length + 1 + bundleBytes.Length; - var buffer = ArrayPool.Shared.Rent(signingInputLength); - try - { - var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); - Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length); - buffer[headerBytes.Length] = (byte)'.'; - Buffer.BlockCopy(bundleBytes, 0, buffer, headerBytes.Length + 1, bundleBytes.Length); - - var signatureBytes = Base64UrlDecode(encodedSignature); - var verified = await resolution.Signer.VerifyAsync( - new ReadOnlyMemory(buffer, 0, signingInputLength), - signatureBytes, - cancellationToken).ConfigureAwait(false); - - if (!verified) - { - logger.LogError("Signature verification failed."); - Environment.ExitCode = 1; - return; - } - } - finally - { - ArrayPool.Shared.Return(buffer); - } - - if (!string.IsNullOrWhiteSpace(providerHint) && !string.Equals(providerHint, resolution.ProviderName, StringComparison.OrdinalIgnoreCase)) - { - logger.LogWarning( - "Preferred provider '{Preferred}' unavailable; verification used '{Provider}'.", - providerHint, - resolution.ProviderName); - } - - logger.LogInformation( - "Signature verified using algorithm {Algorithm} via provider {Provider} (kid {KeyId}).", - algorithm, - resolution.ProviderName, - signingKey.Reference.KeyId); - - if (verbose) - { - logger.LogInformation("JWS header: {Header}", headerJson); - } - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to verify revocation bundle."); - Environment.ExitCode = 1; - } - finally - { - loggerFactory.Dispose(); - } - } - - public static async Task HandleVulnObservationsAsync( - IServiceProvider services, - string tenant, - IReadOnlyList observationIds, - IReadOnlyList aliases, - IReadOnlyList purls, - IReadOnlyList cpes, - int? limit, - string? cursor, - bool emitJson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-observations"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.observations", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "vuln observations"); - activity?.SetTag("stellaops.cli.tenant", tenant); - using var duration = CliMetrics.MeasureCommandDuration("vuln observations"); - - try - { - tenant = tenant?.Trim().ToLowerInvariant() ?? string.Empty; - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Tenant must be provided."); - } - - var query = new AdvisoryObservationsQuery( - tenant, - NormalizeSet(observationIds, toLower: false), - NormalizeSet(aliases, toLower: true), - NormalizeSet(purls, toLower: false), - NormalizeSet(cpes, toLower: false), - limit, - cursor); - - var response = await client.GetObservationsAsync(query, cancellationToken).ConfigureAwait(false); - - if (emitJson) - { - var json = JsonSerializer.Serialize(response, new JsonSerializerOptions - { - WriteIndented = true - }); - Console.WriteLine(json); - Environment.ExitCode = 0; - return; - } - - RenderObservationTable(response); - if (!emitJson && response.HasMore && !string.IsNullOrWhiteSpace(response.NextCursor)) - { - var escapedCursor = Markup.Escape(response.NextCursor); - AnsiConsole.MarkupLine($"[yellow]More observations available. Continue with[/] [cyan]--cursor[/] [grey]{escapedCursor}[/]"); - } - Environment.ExitCode = 0; - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - logger.LogWarning("Operation cancelled by user."); - Environment.ExitCode = 130; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to fetch observations from Concelier."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - - static IReadOnlyList NormalizeSet(IReadOnlyList values, bool toLower) - { - if (values is null || values.Count == 0) - { - return Array.Empty(); - } - - var set = new HashSet(StringComparer.Ordinal); - foreach (var raw in values) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var normalized = raw.Trim(); - if (toLower) - { - normalized = normalized.ToLowerInvariant(); - } - - set.Add(normalized); - } - - return set.Count == 0 ? Array.Empty() : set.ToArray(); - } - - static void RenderObservationTable(AdvisoryObservationsResponse response) - { - var observations = response.Observations ?? Array.Empty(); - if (observations.Count == 0) - { - AnsiConsole.MarkupLine("[yellow]No observations matched the provided filters.[/]"); - return; - } - - var table = new Table() - .Centered() - .Border(TableBorder.Rounded); - - table.AddColumn("Observation"); - table.AddColumn("Source"); - table.AddColumn("Upstream Id"); - table.AddColumn("Aliases"); - table.AddColumn("PURLs"); - table.AddColumn("CPEs"); - table.AddColumn("Created (UTC)"); - - foreach (var observation in observations) - { - var sourceVendor = observation.Source?.Vendor ?? "(unknown)"; - var upstreamId = observation.Upstream?.UpstreamId ?? "(unknown)"; - var aliasesText = FormatList(observation.Linkset?.Aliases); - var purlsText = FormatList(observation.Linkset?.Purls); - var cpesText = FormatList(observation.Linkset?.Cpes); - - table.AddRow( - Markup.Escape(observation.ObservationId), - Markup.Escape(sourceVendor), - Markup.Escape(upstreamId), - Markup.Escape(aliasesText), - Markup.Escape(purlsText), - Markup.Escape(cpesText), - observation.CreatedAt.ToUniversalTime().ToString("u", CultureInfo.InvariantCulture)); - } - - AnsiConsole.Write(table); - AnsiConsole.MarkupLine( - "[green]{0}[/] observation(s). Aliases: [green]{1}[/], PURLs: [green]{2}[/], CPEs: [green]{3}[/].", - observations.Count, - response.Linkset?.Aliases?.Count ?? 0, - response.Linkset?.Purls?.Count ?? 0, - response.Linkset?.Cpes?.Count ?? 0); - } - - static string FormatList(IReadOnlyList? values) - { - if (values is null || values.Count == 0) - { - return "(none)"; - } - - const int MaxItems = 3; - if (values.Count <= MaxItems) - { - return string.Join(", ", values); - } - - var preview = values.Take(MaxItems); - return $"{string.Join(", ", preview)} (+{values.Count - MaxItems})"; - } - } - - public static async Task HandleOfflineKitPullAsync( - IServiceProvider services, - string? bundleId, - string? destinationDirectory, - bool overwrite, - bool resume, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var options = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-pull"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.pull", ActivityKind.Client); - activity?.SetTag("stellaops.cli.bundle_id", string.IsNullOrWhiteSpace(bundleId) ? "latest" : bundleId); - using var duration = CliMetrics.MeasureCommandDuration("offline kit pull"); - - try - { - var targetDirectory = string.IsNullOrWhiteSpace(destinationDirectory) - ? options.Offline?.KitsDirectory ?? Path.Combine(Environment.CurrentDirectory, "offline-kits") - : destinationDirectory; - - targetDirectory = Path.GetFullPath(targetDirectory); - Directory.CreateDirectory(targetDirectory); - - var result = await client.DownloadOfflineKitAsync(bundleId, targetDirectory, overwrite, resume, cancellationToken).ConfigureAwait(false); - - logger.LogInformation( - "Bundle {BundleId} stored at {Path} (captured {Captured:u}, sha256:{Digest}).", - result.Descriptor.BundleId, - result.BundlePath, - result.Descriptor.CapturedAt, - result.Descriptor.BundleSha256); - - logger.LogInformation("Manifest saved to {Manifest}.", result.ManifestPath); - - if (!string.IsNullOrWhiteSpace(result.MetadataPath)) - { - logger.LogDebug("Metadata recorded at {Metadata}.", result.MetadataPath); - } - - if (result.BundleSignaturePath is not null) - { - logger.LogInformation("Bundle signature saved to {Signature}.", result.BundleSignaturePath); - } - - if (result.ManifestSignaturePath is not null) - { - logger.LogInformation("Manifest signature saved to {Signature}.", result.ManifestSignaturePath); - } - - CliMetrics.RecordOfflineKitDownload(result.Descriptor.Kind ?? "unknown", result.FromCache); - activity?.SetTag("stellaops.cli.bundle_cache", result.FromCache); - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to download offline kit bundle."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyFindingsListAsync( - IServiceProvider services, - string policyId, - string[] sbomFilters, - string[] statusFilters, - string[] severityFilters, - string? since, - string? cursor, - int? page, - int? pageSize, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-ls"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.list", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("policy findings list"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (page.HasValue && page.Value < 1) - { - throw new ArgumentException("--page must be greater than or equal to 1.", nameof(page)); - } - - if (pageSize.HasValue && (pageSize.Value < 1 || pageSize.Value > 500)) - { - throw new ArgumentException("--page-size must be between 1 and 500.", nameof(pageSize)); - } - - var normalizedPolicyId = policyId.Trim(); - var sboms = NormalizePolicyFilterValues(sbomFilters); - var statuses = NormalizePolicyFilterValues(statusFilters, toLower: true); - var severities = NormalizePolicyFilterValues(severityFilters); - var sinceValue = ParsePolicySince(since); - var cursorValue = string.IsNullOrWhiteSpace(cursor) ? null : cursor.Trim(); - - var query = new PolicyFindingsQuery( - normalizedPolicyId, - sboms, - statuses, - severities, - cursorValue, - page, - pageSize, - sinceValue); - - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - if (sboms.Count > 0) - { - activity?.SetTag("stellaops.cli.findings.sbom_filters", string.Join(",", sboms)); - } - - if (statuses.Count > 0) - { - activity?.SetTag("stellaops.cli.findings.status_filters", string.Join(",", statuses)); - } - - if (severities.Count > 0) - { - activity?.SetTag("stellaops.cli.findings.severity_filters", string.Join(",", severities)); - } - - if (!string.IsNullOrWhiteSpace(cursorValue)) - { - activity?.SetTag("stellaops.cli.findings.cursor", cursorValue); - } - - if (page.HasValue) - { - activity?.SetTag("stellaops.cli.findings.page", page.Value); - } - - if (pageSize.HasValue) - { - activity?.SetTag("stellaops.cli.findings.page_size", pageSize.Value); - } - - if (sinceValue.HasValue) - { - activity?.SetTag("stellaops.cli.findings.since", sinceValue.Value.ToString("o", CultureInfo.InvariantCulture)); - } - - var result = await client.GetPolicyFindingsAsync(query, cancellationToken).ConfigureAwait(false); - activity?.SetTag("stellaops.cli.findings.count", result.Items.Count); - if (!string.IsNullOrWhiteSpace(result.NextCursor)) - { - activity?.SetTag("stellaops.cli.findings.next_cursor", result.NextCursor); - } - - var payload = BuildPolicyFindingsPayload(normalizedPolicyId, query, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Results written to {Path}.", Path.GetFullPath(outputPath!)); - } - - var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); - if (outputFormat == PolicyFindingsOutputFormat.Json) - { - var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); - Console.WriteLine(json); - } - else - { - RenderPolicyFindingsTable(logger, result); - } - - CliMetrics.RecordPolicyFindingsList(result.Items.Count == 0 ? "empty" : "ok"); - Environment.ExitCode = 0; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyFindingsList("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsList); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to list policy findings."); - CliMetrics.RecordPolicyFindingsList("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyFindingsGetAsync( - IServiceProvider services, - string policyId, - string findingId, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-get"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.get", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("policy findings get"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var normalizedPolicyId = policyId.Trim(); - var normalizedFindingId = findingId.Trim(); - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); - - var result = await client.GetPolicyFindingAsync(normalizedPolicyId, normalizedFindingId, cancellationToken).ConfigureAwait(false); - var payload = BuildPolicyFindingPayload(normalizedPolicyId, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Finding written to {Path}.", Path.GetFullPath(outputPath!)); - } - - var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); - if (outputFormat == PolicyFindingsOutputFormat.Json) - { - Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); - } - else - { - RenderPolicyFindingDetails(logger, result); - } - - var outcome = string.IsNullOrWhiteSpace(result.Status) ? "unknown" : result.Status.ToLowerInvariant(); - CliMetrics.RecordPolicyFindingsGet(outcome); - Environment.ExitCode = 0; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyFindingsGet("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsGet); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to retrieve policy finding."); - CliMetrics.RecordPolicyFindingsGet("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyFindingsExplainAsync( - IServiceProvider services, - string policyId, - string findingId, - string? mode, - string? format, - string? outputPath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-explain"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.explain", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("policy findings explain"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var normalizedPolicyId = policyId.Trim(); - var normalizedFindingId = findingId.Trim(); - var normalizedMode = NormalizeExplainMode(mode); - - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); - if (!string.IsNullOrWhiteSpace(normalizedMode)) - { - activity?.SetTag("stellaops.cli.findings.mode", normalizedMode); - } - - var result = await client.GetPolicyFindingExplainAsync(normalizedPolicyId, normalizedFindingId, normalizedMode, cancellationToken).ConfigureAwait(false); - activity?.SetTag("stellaops.cli.findings.step_count", result.Steps.Count); - - var payload = BuildPolicyFindingExplainPayload(normalizedPolicyId, normalizedFindingId, normalizedMode, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Explain trace written to {Path}.", Path.GetFullPath(outputPath!)); - } - - var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); - if (outputFormat == PolicyFindingsOutputFormat.Json) - { - Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); - } - else - { - RenderPolicyFindingExplain(logger, result); - } - - CliMetrics.RecordPolicyFindingsExplain(result.Steps.Count == 0 ? "empty" : "ok"); - Environment.ExitCode = 0; - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyFindingsExplain("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsExplain); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to fetch policy explain trace."); - CliMetrics.RecordPolicyFindingsExplain("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicyActivateAsync( - IServiceProvider services, - string policyId, - int version, - string? note, - bool runNow, - string? scheduledAt, - string? priority, - bool rollback, - string? incidentId, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-activate"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.activate", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "policy activate"); - using var duration = CliMetrics.MeasureCommandDuration("policy activate"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (version <= 0) - { - throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); - } - - var normalizedPolicyId = policyId.Trim(); - DateTimeOffset? scheduled = null; - if (!string.IsNullOrWhiteSpace(scheduledAt)) - { - if (!DateTimeOffset.TryParse(scheduledAt, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed)) - { - throw new ArgumentException("Scheduled timestamp must be a valid ISO-8601 value.", nameof(scheduledAt)); - } - - scheduled = parsed; - } - - var request = new PolicyActivationRequest( - runNow, - scheduled, - NormalizePolicyPriority(priority), - rollback, - string.IsNullOrWhiteSpace(incidentId) ? null : incidentId.Trim(), - string.IsNullOrWhiteSpace(note) ? null : note.Trim()); - - activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); - activity?.SetTag("stellaops.cli.policy_version", version); - if (request.RunNow) - { - activity?.SetTag("stellaops.cli.policy_run_now", true); - } - - if (request.ScheduledAt.HasValue) - { - activity?.SetTag("stellaops.cli.policy_scheduled_at", request.ScheduledAt.Value.ToString("o", CultureInfo.InvariantCulture)); - } - - if (!string.IsNullOrWhiteSpace(request.Priority)) - { - activity?.SetTag("stellaops.cli.policy_priority", request.Priority); - } - - if (request.Rollback) - { - activity?.SetTag("stellaops.cli.policy_rollback", true); - } - - var result = await client.ActivatePolicyRevisionAsync(normalizedPolicyId, version, request, cancellationToken).ConfigureAwait(false); - - var outcome = NormalizePolicyActivationOutcome(result.Status); - CliMetrics.RecordPolicyActivation(outcome); - RenderPolicyActivationResult(result, request); - - var exitCode = DeterminePolicyActivationExitCode(outcome); - Environment.ExitCode = exitCode; - - if (exitCode == 0) - { - logger.LogInformation("Policy {PolicyId} v{Version} activation status: {Status}.", result.Revision.PolicyId, result.Revision.Version, outcome); - } - else - { - logger.LogWarning("Policy {PolicyId} v{Version} requires additional approval (status: {Status}).", result.Revision.PolicyId, result.Revision.Version, outcome); - } - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicyActivation("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicyActivationFailure(ex, logger); - } - catch (Exception ex) - { - logger.LogError(ex, "Policy activation failed."); - CliMetrics.RecordPolicyActivation("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandlePolicySimulateAsync( - IServiceProvider services, - string policyId, - int? baseVersion, - int? candidateVersion, - IReadOnlyList sbomArguments, - IReadOnlyList environmentArguments, - string? format, - string? outputPath, - bool explain, - bool failOnDiff, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-simulate"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.policy.simulate", ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", "policy simulate"); - activity?.SetTag("stellaops.cli.policy_id", policyId); - if (baseVersion.HasValue) - { - activity?.SetTag("stellaops.cli.base_version", baseVersion.Value); - } - if (candidateVersion.HasValue) - { - activity?.SetTag("stellaops.cli.candidate_version", candidateVersion.Value); - } - using var duration = CliMetrics.MeasureCommandDuration("policy simulate"); - - try - { - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - var normalizedPolicyId = policyId.Trim(); - var sbomSet = NormalizePolicySbomSet(sbomArguments); - var environment = ParsePolicyEnvironment(environmentArguments); - - var input = new PolicySimulationInput( - baseVersion, - candidateVersion, - sbomSet, - environment, - explain); - - var result = await client.SimulatePolicyAsync(normalizedPolicyId, input, cancellationToken).ConfigureAwait(false); - - activity?.SetTag("stellaops.cli.diff_added", result.Diff.Added); - activity?.SetTag("stellaops.cli.diff_removed", result.Diff.Removed); - if (result.Diff.BySeverity.Count > 0) - { - activity?.SetTag("stellaops.cli.severity_buckets", result.Diff.BySeverity.Count); - } - - var outputFormat = DeterminePolicySimulationFormat(format, outputPath); - var payload = BuildPolicySimulationPayload(normalizedPolicyId, baseVersion, candidateVersion, sbomSet, environment, result); - - if (!string.IsNullOrWhiteSpace(outputPath)) - { - await WriteSimulationOutputAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); - logger.LogInformation("Simulation results written to {Path}.", Path.GetFullPath(outputPath!)); - } - - RenderPolicySimulationResult(logger, payload, result, outputFormat); - - var exitCode = DetermineSimulationExitCode(result, failOnDiff); - Environment.ExitCode = exitCode; - - var outcome = exitCode == 20 - ? "diff_blocked" - : (result.Diff.Added + result.Diff.Removed) > 0 ? "diff" : "clean"; - CliMetrics.RecordPolicySimulation(outcome); - - if (exitCode == 20) - { - logger.LogWarning("Differences detected; exiting with code 20 due to --fail-on-diff."); - } - - if (!string.IsNullOrWhiteSpace(result.ExplainUri)) - { - activity?.SetTag("stellaops.cli.explain_uri", result.ExplainUri); - } - } - catch (ArgumentException ex) - { - logger.LogError(ex.Message); - CliMetrics.RecordPolicySimulation("error"); - Environment.ExitCode = 64; - } - catch (PolicyApiException ex) - { - HandlePolicySimulationFailure(ex, logger); - } - catch (Exception ex) - { - logger.LogError(ex, "Policy simulation failed."); - CliMetrics.RecordPolicySimulation("error"); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleOfflineKitImportAsync( - IServiceProvider services, - string bundlePath, - string? manifestPath, - string? bundleSignaturePath, - string? manifestSignaturePath, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var options = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-import"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.import", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("offline kit import"); - - try - { - if (string.IsNullOrWhiteSpace(bundlePath)) - { - logger.LogError("Bundle path is required."); - Environment.ExitCode = 1; - return; - } - - bundlePath = Path.GetFullPath(bundlePath); - if (!File.Exists(bundlePath)) - { - logger.LogError("Bundle file {Path} not found.", bundlePath); - Environment.ExitCode = 1; - return; - } - - var metadata = await LoadOfflineKitMetadataAsync(bundlePath, cancellationToken).ConfigureAwait(false); - if (metadata is not null) - { - manifestPath ??= metadata.ManifestPath; - bundleSignaturePath ??= metadata.BundleSignaturePath; - manifestSignaturePath ??= metadata.ManifestSignaturePath; - } - - manifestPath = NormalizeFilePath(manifestPath); - bundleSignaturePath = NormalizeFilePath(bundleSignaturePath); - manifestSignaturePath = NormalizeFilePath(manifestSignaturePath); - - if (manifestPath is null) - { - manifestPath = TryInferManifestPath(bundlePath); - if (manifestPath is not null) - { - logger.LogDebug("Using inferred manifest path {Path}.", manifestPath); - } - } - - if (manifestPath is not null && !File.Exists(manifestPath)) - { - logger.LogError("Manifest file {Path} not found.", manifestPath); - Environment.ExitCode = 1; - return; - } - - if (bundleSignaturePath is not null && !File.Exists(bundleSignaturePath)) - { - logger.LogWarning("Bundle signature {Path} not found; skipping.", bundleSignaturePath); - bundleSignaturePath = null; - } - - if (manifestSignaturePath is not null && !File.Exists(manifestSignaturePath)) - { - logger.LogWarning("Manifest signature {Path} not found; skipping.", manifestSignaturePath); - manifestSignaturePath = null; - } - - if (metadata is not null) - { - var computedBundleDigest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); - if (!DigestsEqual(computedBundleDigest, metadata.BundleSha256)) - { - logger.LogError("Bundle digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.BundleSha256, computedBundleDigest); - Environment.ExitCode = 1; - return; - } - - if (manifestPath is not null) - { - var computedManifestDigest = await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false); - if (!DigestsEqual(computedManifestDigest, metadata.ManifestSha256)) - { - logger.LogError("Manifest digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.ManifestSha256, computedManifestDigest); - Environment.ExitCode = 1; - return; - } - } - } - - var request = new OfflineKitImportRequest( - bundlePath, - manifestPath, - bundleSignaturePath, - manifestSignaturePath, - metadata?.BundleId, - metadata?.BundleSha256, - metadata?.BundleSize, - metadata?.CapturedAt, - metadata?.Channel, - metadata?.Kind, - metadata?.IsDelta, - metadata?.BaseBundleId, - metadata?.ManifestSha256, - metadata?.ManifestSize); - - var result = await client.ImportOfflineKitAsync(request, cancellationToken).ConfigureAwait(false); - CliMetrics.RecordOfflineKitImport(result.Status); - - logger.LogInformation( - "Import {ImportId} submitted at {Submitted:u} with status {Status}.", - string.IsNullOrWhiteSpace(result.ImportId) ? "" : result.ImportId, - result.SubmittedAt, - string.IsNullOrWhiteSpace(result.Status) ? "queued" : result.Status); - - if (!string.IsNullOrWhiteSpace(result.Message)) - { - logger.LogInformation(result.Message); - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Offline kit import failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - public static async Task HandleOfflineKitStatusAsync( - IServiceProvider services, - bool asJson, - bool verbose, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-status"); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.status", ActivityKind.Client); - using var duration = CliMetrics.MeasureCommandDuration("offline kit status"); - - try - { - var status = await client.GetOfflineKitStatusAsync(cancellationToken).ConfigureAwait(false); - - if (asJson) - { - var payload = new - { - bundleId = status.BundleId, - channel = status.Channel, - kind = status.Kind, - isDelta = status.IsDelta, - baseBundleId = status.BaseBundleId, - capturedAt = status.CapturedAt, - importedAt = status.ImportedAt, - sha256 = status.BundleSha256, - sizeBytes = status.BundleSize, - components = status.Components.Select(component => new - { - component.Name, - component.Version, - component.Digest, - component.CapturedAt, - component.SizeBytes - }) - }; - - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true }); - Console.WriteLine(json); - } - else - { - if (string.IsNullOrWhiteSpace(status.BundleId)) - { - logger.LogInformation("No offline kit bundle has been imported yet."); - } - else - { - logger.LogInformation( - "Current bundle {BundleId} ({Kind}) captured {Captured:u}, imported {Imported:u}, sha256:{Digest}, size {Size}.", - status.BundleId, - status.Kind ?? "unknown", - status.CapturedAt ?? default, - status.ImportedAt ?? default, - status.BundleSha256 ?? "", - status.BundleSize.HasValue ? status.BundleSize.Value.ToString("N0", CultureInfo.InvariantCulture) : ""); - } - - if (status.Components.Count > 0) - { - var table = new Table().AddColumns("Component", "Version", "Digest", "Captured", "Size (bytes)"); - foreach (var component in status.Components) - { - table.AddRow( - component.Name, - string.IsNullOrWhiteSpace(component.Version) ? "-" : component.Version!, - string.IsNullOrWhiteSpace(component.Digest) ? "-" : $"sha256:{component.Digest}", - component.CapturedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "-", - component.SizeBytes.HasValue ? component.SizeBytes.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"); - } - - AnsiConsole.Write(table); - } - } - - Environment.ExitCode = 0; - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to read offline kit status."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - private static async Task LoadOfflineKitMetadataAsync(string bundlePath, CancellationToken cancellationToken) - { - var metadataPath = bundlePath + ".metadata.json"; - if (!File.Exists(metadataPath)) - { - return null; - } - - try - { - await using var stream = File.OpenRead(metadataPath); - return await JsonSerializer.DeserializeAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch - { - return null; - } - } - - private static string? NormalizeFilePath(string? path) - { - if (string.IsNullOrWhiteSpace(path)) - { - return null; - } - - return Path.GetFullPath(path); - } - - private static string? TryInferManifestPath(string bundlePath) - { - var directory = Path.GetDirectoryName(bundlePath); - if (string.IsNullOrWhiteSpace(directory)) - { - return null; - } - - var baseName = Path.GetFileName(bundlePath); - if (string.IsNullOrWhiteSpace(baseName)) - { - return null; - } - - baseName = Path.GetFileNameWithoutExtension(baseName); - if (baseName.EndsWith(".tar", StringComparison.OrdinalIgnoreCase)) - { - baseName = Path.GetFileNameWithoutExtension(baseName); - } - - var candidates = new[] - { - Path.Combine(directory, $"offline-manifest-{baseName}.json"), - Path.Combine(directory, "offline-manifest.json") - }; - - foreach (var candidate in candidates) - { - if (File.Exists(candidate)) - { - return Path.GetFullPath(candidate); - } - } - - return Directory.EnumerateFiles(directory, "offline-manifest*.json").FirstOrDefault(); - } - - private static bool DigestsEqual(string computed, string? expected) - { - if (string.IsNullOrWhiteSpace(expected)) - { - return true; - } - - return string.Equals(NormalizeDigest(computed), NormalizeDigest(expected), StringComparison.OrdinalIgnoreCase); - } - - private static string NormalizeDigest(string digest) - { - var value = digest.Trim(); - if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - value = value.Substring("sha256:".Length); - } - - return value.ToLowerInvariant(); - } - - private static async Task ComputeSha256Async(string path, CancellationToken cancellationToken) - { - await using var stream = File.OpenRead(path); - var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature) - { - encodedHeader = string.Empty; - encodedSignature = string.Empty; - - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var parts = value.Split('.'); - if (parts.Length != 3) - { - return false; - } - - encodedHeader = parts[0]; - encodedSignature = parts[2]; - return parts[1].Length == 0; - } - - private static byte[] Base64UrlDecode(string value) - { - var normalized = value.Replace('-', '+').Replace('_', '/'); - var padding = normalized.Length % 4; - if (padding == 2) - { - normalized += "=="; - } - else if (padding == 3) - { - normalized += "="; - } - else if (padding == 1) - { - throw new FormatException("Invalid Base64Url value."); - } - - return Convert.FromBase64String(normalized); - } - - private static CryptoSigningKey CreateVerificationSigningKey( - string keyId, - string algorithm, - string? providerHint, - string keyPem, - string keyPath) - { - if (string.IsNullOrWhiteSpace(keyPem)) - { - throw new InvalidOperationException("Verification key PEM content is empty."); - } - - using var ecdsa = ECDsa.Create(); - ecdsa.ImportFromPem(keyPem); - - var parameters = ecdsa.ExportParameters(includePrivateParameters: false); - if (parameters.D is null || parameters.D.Length == 0) - { - parameters.D = new byte[] { 0x01 }; - } - - var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - ["source"] = Path.GetFullPath(keyPath), - ["verificationOnly"] = "true" - }; - - return new CryptoSigningKey( - new CryptoKeyReference(keyId, providerHint), - algorithm, - in parameters, - DateTimeOffset.UtcNow, - metadata: metadata); - } - - private static string FormatDuration(TimeSpan duration) - { - if (duration <= TimeSpan.Zero) - { - return "expired"; - } - - if (duration.TotalDays >= 1) - { - var days = (int)duration.TotalDays; - var hours = duration.Hours; - return hours > 0 - ? FormattableString.Invariant($"{days}d {hours}h") - : FormattableString.Invariant($"{days}d"); - } - - if (duration.TotalHours >= 1) - { - return FormattableString.Invariant($"{(int)duration.TotalHours}h {duration.Minutes}m"); - } - - if (duration.TotalMinutes >= 1) - { - return FormattableString.Invariant($"{(int)duration.TotalMinutes}m {duration.Seconds}s"); - } - - return FormattableString.Invariant($"{duration.Seconds}s"); - } - - private static bool TryExtractJwtClaims( - string accessToken, - out Dictionary claims, - out DateTimeOffset? issuedAt, - out DateTimeOffset? notBefore) - { - claims = new Dictionary(StringComparer.OrdinalIgnoreCase); - issuedAt = null; - notBefore = null; - - if (string.IsNullOrWhiteSpace(accessToken)) - { - return false; - } - - var parts = accessToken.Split('.'); - if (parts.Length < 2) - { - return false; - } - - if (!TryDecodeBase64Url(parts[1], out var payloadBytes)) - { - return false; - } - - try - { - using var document = JsonDocument.Parse(payloadBytes); - foreach (var property in document.RootElement.EnumerateObject()) - { - var value = FormatJsonValue(property.Value); - claims[property.Name] = value; - - if (issuedAt is null && property.NameEquals("iat") && TryParseUnixSeconds(property.Value, out var parsedIat)) - { - issuedAt = parsedIat; - } - - if (notBefore is null && property.NameEquals("nbf") && TryParseUnixSeconds(property.Value, out var parsedNbf)) - { - notBefore = parsedNbf; - } - } - - return true; - } - catch (JsonException) - { - claims.Clear(); - issuedAt = null; - notBefore = null; - return false; - } - } - - private static bool TryDecodeBase64Url(string value, out byte[] bytes) - { - bytes = Array.Empty(); - - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var normalized = value.Replace('-', '+').Replace('_', '/'); - var padding = normalized.Length % 4; - if (padding is 2 or 3) - { - normalized = normalized.PadRight(normalized.Length + (4 - padding), '='); - } - else if (padding == 1) - { - return false; - } - - try - { - bytes = Convert.FromBase64String(normalized); - return true; - } - catch (FormatException) - { - return false; - } - } - - private static string FormatJsonValue(JsonElement element) - { - return element.ValueKind switch - { - JsonValueKind.String => element.GetString() ?? string.Empty, - JsonValueKind.Number => element.TryGetInt64(out var longValue) - ? longValue.ToString(CultureInfo.InvariantCulture) - : element.GetDouble().ToString(CultureInfo.InvariantCulture), - JsonValueKind.True => "true", - JsonValueKind.False => "false", - JsonValueKind.Null => "null", - JsonValueKind.Array => FormatArray(element), - JsonValueKind.Object => element.GetRawText(), - _ => element.GetRawText() - }; - } - - private static string FormatArray(JsonElement array) - { - var values = new List(); - foreach (var item in array.EnumerateArray()) - { - values.Add(FormatJsonValue(item)); - } - - return string.Join(", ", values); - } - - private static bool TryParseUnixSeconds(JsonElement element, out DateTimeOffset value) - { - value = default; - - if (element.ValueKind == JsonValueKind.Number) - { - if (element.TryGetInt64(out var seconds)) - { - value = DateTimeOffset.FromUnixTimeSeconds(seconds); - return true; - } - - if (element.TryGetDouble(out var doubleValue)) - { - value = DateTimeOffset.FromUnixTimeSeconds((long)doubleValue); - return true; - } - } - - if (element.ValueKind == JsonValueKind.String) - { - var text = element.GetString(); - if (!string.IsNullOrWhiteSpace(text) && long.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds)) - { - value = DateTimeOffset.FromUnixTimeSeconds(seconds); - return true; - } - } - - return false; - } - - private static List CollectAdditionalClaims(Dictionary claims) - { - var result = new List(); - foreach (var pair in claims) - { - if (CommonClaimNames.Contains(pair.Key)) - { - continue; - } - - result.Add(FormattableString.Invariant($"{pair.Key}={pair.Value}")); - } - - result.Sort(StringComparer.OrdinalIgnoreCase); - return result; - } - - private static readonly HashSet CommonClaimNames = new(StringComparer.OrdinalIgnoreCase) - { - "aud", - "client_id", - "exp", - "iat", - "iss", - "nbf", - "scope", - "scopes", - "sub", - "token_type", - "jti" - }; - - private static async Task ExecuteExcititorCommandAsync( - IServiceProvider services, - string commandName, - bool verbose, - IDictionary? activityTags, - Func> operation, - CancellationToken cancellationToken) - { - await using var scope = services.CreateAsyncScope(); - var client = scope.ServiceProvider.GetRequiredService(); - var logger = scope.ServiceProvider.GetRequiredService().CreateLogger(commandName.Replace(' ', '-')); - var verbosity = scope.ServiceProvider.GetRequiredService(); - var previousLevel = verbosity.MinimumLevel; - verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; - using var activity = CliActivitySource.Instance.StartActivity($"cli.{commandName.Replace(' ', '.')}" , ActivityKind.Client); - activity?.SetTag("stellaops.cli.command", commandName); - if (activityTags is not null) - { - foreach (var tag in activityTags) - { - activity?.SetTag(tag.Key, tag.Value); - } - } - using var duration = CliMetrics.MeasureCommandDuration(commandName); - - try - { - var result = await operation(client).ConfigureAwait(false); - if (result.Success) - { - if (!string.IsNullOrWhiteSpace(result.Message)) - { - logger.LogInformation(result.Message); - } - else - { - logger.LogInformation("Operation completed successfully."); - } - - if (!string.IsNullOrWhiteSpace(result.Location)) - { - logger.LogInformation("Location: {Location}", result.Location); - } - - if (result.Payload is JsonElement payload && payload.ValueKind is not JsonValueKind.Undefined and not JsonValueKind.Null) - { - logger.LogDebug("Response payload: {Payload}", payload.ToString()); - } - - Environment.ExitCode = 0; - } - else - { - logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Operation failed." : result.Message); - Environment.ExitCode = 1; - } - } - catch (Exception ex) - { - logger.LogError(ex, "Excititor operation failed."); - Environment.ExitCode = 1; - } - finally - { - verbosity.MinimumLevel = previousLevel; - } - } - - private static async Task> GatherImageDigestsAsync( - IReadOnlyList inline, - string? filePath, - CancellationToken cancellationToken) - { - var results = new List(); - var seen = new HashSet(StringComparer.Ordinal); - - void AddCandidates(string? candidate) - { - foreach (var image in SplitImageCandidates(candidate)) - { - if (seen.Add(image)) - { - results.Add(image); - } - } - } - - if (inline is not null) - { - foreach (var entry in inline) - { - AddCandidates(entry); - } - } - - if (!string.IsNullOrWhiteSpace(filePath)) - { - var path = Path.GetFullPath(filePath); - if (!File.Exists(path)) - { - throw new FileNotFoundException("Input file not found.", path); - } - - foreach (var line in File.ReadLines(path)) - { - cancellationToken.ThrowIfCancellationRequested(); - AddCandidates(line); - } - } - - if (Console.IsInputRedirected) - { - while (!cancellationToken.IsCancellationRequested) - { - var line = await Console.In.ReadLineAsync().ConfigureAwait(false); - if (line is null) - { - break; - } - - AddCandidates(line); - } - } - - return new ReadOnlyCollection(results); - } - - private static IEnumerable SplitImageCandidates(string? raw) - { - if (string.IsNullOrWhiteSpace(raw)) - { - yield break; - } - - var candidate = raw.Trim(); - var commentIndex = candidate.IndexOf('#'); - if (commentIndex >= 0) - { - candidate = candidate[..commentIndex].Trim(); - } - - if (candidate.Length == 0) - { - yield break; - } - - var tokens = candidate.Split(new[] { ',', ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); - foreach (var token in tokens) - { - var trimmed = token.Trim(); - if (trimmed.Length > 0) - { - yield return trimmed; - } - } - } - - private static IReadOnlyDictionary ParseLabelSelectors(IReadOnlyList labelArguments) - { - if (labelArguments is null || labelArguments.Count == 0) - { - return EmptyLabelSelectors; - } - - var labels = new Dictionary(StringComparer.OrdinalIgnoreCase); - foreach (var raw in labelArguments) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var trimmed = raw.Trim(); - var delimiter = trimmed.IndexOf('='); - if (delimiter <= 0 || delimiter == trimmed.Length - 1) - { - throw new ArgumentException($"Invalid label '{raw}'. Expected key=value format."); - } - - var key = trimmed[..delimiter].Trim(); - var value = trimmed[(delimiter + 1)..].Trim(); - if (key.Length == 0) - { - throw new ArgumentException($"Invalid label '{raw}'. Label key cannot be empty."); - } - - labels[key] = value; - } - - return labels.Count == 0 ? EmptyLabelSelectors : new ReadOnlyDictionary(labels); - } - - private sealed record ExcititorExportManifestSummary( - string ExportId, - string? Format, - string? Algorithm, - string? Digest, - long? SizeBytes, - bool? FromCache, - DateTimeOffset? CreatedAt, - string? RekorLocation, - string? RekorIndex, - string? RekorInclusionUrl); - - private static ExcititorExportManifestSummary? TryParseExportManifest(JsonElement? payload) - { - if (payload is null || payload.Value.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null) - { - return null; - } - - var element = payload.Value; - var exportId = GetStringProperty(element, "exportId"); - if (string.IsNullOrWhiteSpace(exportId)) - { - return null; - } - - var format = GetStringProperty(element, "format"); - var algorithm = default(string?); - var digest = default(string?); - - if (TryGetPropertyCaseInsensitive(element, "artifact", out var artifact) && artifact.ValueKind == JsonValueKind.Object) - { - algorithm = GetStringProperty(artifact, "algorithm"); - digest = GetStringProperty(artifact, "digest"); - } - - var sizeBytes = GetInt64Property(element, "sizeBytes"); - var fromCache = GetBooleanProperty(element, "fromCache"); - var createdAt = GetDateTimeOffsetProperty(element, "createdAt"); - - string? rekorLocation = null; - string? rekorIndex = null; - string? rekorInclusion = null; - - if (TryGetPropertyCaseInsensitive(element, "attestation", out var attestation) && attestation.ValueKind == JsonValueKind.Object) - { - if (TryGetPropertyCaseInsensitive(attestation, "rekor", out var rekor) && rekor.ValueKind == JsonValueKind.Object) - { - rekorLocation = GetStringProperty(rekor, "location"); - rekorIndex = GetStringProperty(rekor, "logIndex"); - var inclusion = GetStringProperty(rekor, "inclusionProofUri"); - if (!string.IsNullOrWhiteSpace(inclusion)) - { - rekorInclusion = inclusion; - } - } - } - - return new ExcititorExportManifestSummary( - exportId.Trim(), - format, - algorithm, - digest, - sizeBytes, - fromCache, - createdAt, - rekorLocation, - rekorIndex, - rekorInclusion); - } - - private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) - { - if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) - { - return true; - } - - if (element.ValueKind == JsonValueKind.Object) - { - foreach (var candidate in element.EnumerateObject()) - { - if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) - { - property = candidate.Value; - return true; - } - } - } - - property = default; - return false; - } - - private static string? GetStringProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - return property.ValueKind switch - { - JsonValueKind.String => property.GetString(), - JsonValueKind.Number => property.ToString(), - _ => null - }; - } - - return null; - } - - private static bool? GetBooleanProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - return property.ValueKind switch - { - JsonValueKind.True => true, - JsonValueKind.False => false, - JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, - _ => null - }; - } - - return null; - } - - private static long? GetInt64Property(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - if (property.ValueKind == JsonValueKind.Number && property.TryGetInt64(out var value)) - { - return value; - } - - if (property.ValueKind == JsonValueKind.String - && long.TryParse(property.GetString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) - { - return parsed; - } - } - - return null; - } - - private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) - && property.ValueKind == JsonValueKind.String - && DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var value)) - { - return value.ToUniversalTime(); - } - - return null; - } - - private static string BuildDigestDisplay(string? algorithm, string digest) - { - if (string.IsNullOrWhiteSpace(digest)) - { - return string.Empty; - } - - if (digest.Contains(':', StringComparison.Ordinal)) - { - return digest; - } - - if (string.IsNullOrWhiteSpace(algorithm) || algorithm.Equals("sha256", StringComparison.OrdinalIgnoreCase)) - { - return $"sha256:{digest}"; - } - - return $"{algorithm}:{digest}"; - } - - private static string FormatSize(long sizeBytes) - { - if (sizeBytes < 0) - { - return $"{sizeBytes} bytes"; - } - - string[] units = { "bytes", "KB", "MB", "GB", "TB" }; - double size = sizeBytes; - var unit = 0; - - while (size >= 1024 && unit < units.Length - 1) - { - size /= 1024; - unit++; - } - - return unit == 0 ? $"{sizeBytes} bytes" : $"{size:0.##} {units[unit]}"; - } - - private static string ResolveExportOutputPath(string outputPath, ExcititorExportManifestSummary manifest) - { - if (string.IsNullOrWhiteSpace(outputPath)) - { - throw new ArgumentException("Output path must be provided.", nameof(outputPath)); - } - - var fullPath = Path.GetFullPath(outputPath); - if (Directory.Exists(fullPath) - || outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal) - || outputPath.EndsWith(Path.AltDirectorySeparatorChar.ToString(), StringComparison.Ordinal)) - { - return Path.Combine(fullPath, BuildExportFileName(manifest)); - } - - var directory = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) - { - Directory.CreateDirectory(directory); - } - - return fullPath; - } - - private static string BuildExportFileName(ExcititorExportManifestSummary manifest) - { - var token = !string.IsNullOrWhiteSpace(manifest.Digest) - ? manifest.Digest! - : manifest.ExportId; - - token = SanitizeToken(token); - if (token.Length > 40) - { - token = token[..40]; - } - - var extension = DetermineExportExtension(manifest.Format); - return $"stellaops-excititor-{token}{extension}"; - } - - private static string DetermineExportExtension(string? format) - { - if (string.IsNullOrWhiteSpace(format)) - { - return ".bin"; - } - - return format switch - { - not null when format.Equals("jsonl", StringComparison.OrdinalIgnoreCase) => ".jsonl", - not null when format.Equals("json", StringComparison.OrdinalIgnoreCase) => ".json", - not null when format.Equals("openvex", StringComparison.OrdinalIgnoreCase) => ".json", - not null when format.Equals("csaf", StringComparison.OrdinalIgnoreCase) => ".json", - _ => ".bin" - }; - } - - private static string SanitizeToken(string token) - { - var builder = new StringBuilder(token.Length); - foreach (var ch in token) - { - if (char.IsLetterOrDigit(ch)) - { - builder.Append(char.ToLowerInvariant(ch)); - } - } - - if (builder.Length == 0) - { - builder.Append("export"); - } - - return builder.ToString(); - } - - private static string? ResolveLocationUrl(StellaOpsCliOptions options, string location) - { - if (string.IsNullOrWhiteSpace(location)) - { - return null; - } - - if (Uri.TryCreate(location, UriKind.Absolute, out var absolute)) - { - return absolute.ToString(); - } - - if (!string.IsNullOrWhiteSpace(options?.BackendUrl) && Uri.TryCreate(options.BackendUrl, UriKind.Absolute, out var baseUri)) - { - if (!location.StartsWith("/", StringComparison.Ordinal)) - { - location = "/" + location; - } - - return new Uri(baseUri, location).ToString(); - } - - return location; - } - - private static string BuildRuntimePolicyJson(RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) - { - var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); - var results = new Dictionary(StringComparer.Ordinal); - - foreach (var image in orderedImages) - { - if (result.Decisions.TryGetValue(image, out var decision)) - { - results[image] = BuildDecisionMap(decision); - } - } - - var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) - { - WriteIndented = true, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - var payload = new Dictionary(StringComparer.Ordinal) - { - ["ttlSeconds"] = result.TtlSeconds, - ["expiresAtUtc"] = result.ExpiresAtUtc?.ToString("O", CultureInfo.InvariantCulture), - ["policyRevision"] = result.PolicyRevision, - ["results"] = results - }; - - return JsonSerializer.Serialize(payload, options); - } - - private static IDictionary BuildDecisionMap(RuntimePolicyImageDecision decision) - { - var map = new Dictionary(StringComparer.Ordinal) - { - ["policyVerdict"] = decision.PolicyVerdict, - ["signed"] = decision.Signed, - ["hasSbomReferrers"] = decision.HasSbomReferrers - }; - - if (decision.Reasons.Count > 0) - { - map["reasons"] = decision.Reasons; - } - - if (decision.Rekor is not null) - { - var rekorMap = new Dictionary(StringComparer.Ordinal); - if (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid)) - { - rekorMap["uuid"] = decision.Rekor.Uuid; - } - - if (!string.IsNullOrWhiteSpace(decision.Rekor.Url)) - { - rekorMap["url"] = decision.Rekor.Url; - } - - if (decision.Rekor.Verified.HasValue) - { - rekorMap["verified"] = decision.Rekor.Verified; - } - - if (rekorMap.Count > 0) - { - map["rekor"] = rekorMap; - } - } - - foreach (var kvp in decision.AdditionalProperties) - { - map[kvp.Key] = kvp.Value; - } - - return map; - } - - private static void DisplayRuntimePolicyResults(ILogger logger, RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) - { - var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); - var summary = new Dictionary(StringComparer.OrdinalIgnoreCase); - - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().Border(TableBorder.Rounded) - .AddColumns("Image", "Verdict", "Signed", "SBOM Ref", "Quieted", "Confidence", "Reasons", "Attestation"); - - foreach (var image in orderedImages) - { - if (result.Decisions.TryGetValue(image, out var decision)) - { - table.AddRow( - image, - decision.PolicyVerdict, - FormatBoolean(decision.Signed), - FormatBoolean(decision.HasSbomReferrers), - FormatQuietedDisplay(decision.AdditionalProperties), - FormatConfidenceDisplay(decision.AdditionalProperties), - decision.Reasons.Count > 0 ? string.Join(Environment.NewLine, decision.Reasons) : "-", - FormatAttestation(decision.Rekor)); - - summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; - - if (decision.AdditionalProperties.Count > 0) - { - var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); - logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); - } - } - else - { - table.AddRow(image, "", "-", "-", "-", "-", "-", "-"); - } - } - - AnsiConsole.Write(table); - } - else - { - foreach (var image in orderedImages) - { - if (result.Decisions.TryGetValue(image, out var decision)) - { - var reasons = decision.Reasons.Count > 0 ? string.Join(", ", decision.Reasons) : "none"; - logger.LogInformation( - "{Image} -> verdict={Verdict} signed={Signed} sbomRef={Sbom} quieted={Quieted} confidence={Confidence} attestation={Attestation} reasons={Reasons}", - image, - decision.PolicyVerdict, - FormatBoolean(decision.Signed), - FormatBoolean(decision.HasSbomReferrers), - FormatQuietedDisplay(decision.AdditionalProperties), - FormatConfidenceDisplay(decision.AdditionalProperties), - FormatAttestation(decision.Rekor), - reasons); - - summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; - - if (decision.AdditionalProperties.Count > 0) - { - var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); - logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); - } - } - else - { - logger.LogWarning("{Image} -> no decision returned by backend.", image); - } - } - } - - if (summary.Count > 0) - { - var summaryText = string.Join(", ", summary.Select(kvp => $"{kvp.Key}:{kvp.Value}")); - logger.LogInformation("Verdict summary: {Summary}", summaryText); - } - } - - private static IReadOnlyList BuildImageOrder(IReadOnlyList requestedImages, IEnumerable actual) - { - var order = new List(); - var seen = new HashSet(StringComparer.Ordinal); - - if (requestedImages is not null) - { - foreach (var image in requestedImages) - { - if (!string.IsNullOrWhiteSpace(image)) - { - var trimmed = image.Trim(); - if (seen.Add(trimmed)) - { - order.Add(trimmed); - } - } - } - } - - foreach (var image in actual) - { - if (!string.IsNullOrWhiteSpace(image)) - { - var trimmed = image.Trim(); - if (seen.Add(trimmed)) - { - order.Add(trimmed); - } - } - } - - return new ReadOnlyCollection(order); - } - - private static string FormatBoolean(bool? value) - => value is null ? "unknown" : value.Value ? "yes" : "no"; - - private static string FormatQuietedDisplay(IReadOnlyDictionary metadata) - { - var quieted = GetMetadataBoolean(metadata, "quieted", "quiet"); - var quietedBy = GetMetadataString(metadata, "quietedBy", "quietedReason"); - - if (quieted is true) - { - return string.IsNullOrWhiteSpace(quietedBy) ? "yes" : $"yes ({quietedBy})"; - } - - if (quieted is false) - { - return "no"; - } - - return string.IsNullOrWhiteSpace(quietedBy) ? "-" : $"? ({quietedBy})"; - } - - private static string FormatConfidenceDisplay(IReadOnlyDictionary metadata) - { - var confidence = GetMetadataDouble(metadata, "confidence"); - var confidenceBand = GetMetadataString(metadata, "confidenceBand", "confidenceTier"); - - if (confidence.HasValue && !string.IsNullOrWhiteSpace(confidenceBand)) - { - return string.Format(CultureInfo.InvariantCulture, "{0:0.###} ({1})", confidence.Value, confidenceBand); - } - - if (confidence.HasValue) - { - return confidence.Value.ToString("0.###", CultureInfo.InvariantCulture); - } - - if (!string.IsNullOrWhiteSpace(confidenceBand)) - { - return confidenceBand!; - } - - return "-"; - } - - private static string FormatAttestation(RuntimePolicyRekorReference? rekor) - { - if (rekor is null) - { - return "-"; - } - - var uuid = string.IsNullOrWhiteSpace(rekor.Uuid) ? null : rekor.Uuid; - var url = string.IsNullOrWhiteSpace(rekor.Url) ? null : rekor.Url; - var verified = rekor.Verified; - - var core = uuid ?? url; - if (!string.IsNullOrEmpty(core)) - { - if (verified.HasValue) - { - var suffix = verified.Value ? " (verified)" : " (unverified)"; - return core + suffix; - } - - return core!; - } - - if (verified.HasValue) - { - return verified.Value ? "verified" : "unverified"; - } - - return "-"; - } - - private static bool? GetMetadataBoolean(IReadOnlyDictionary metadata, params string[] keys) - { - foreach (var key in keys) - { - if (metadata.TryGetValue(key, out var value) && value is not null) - { - switch (value) - { - case bool b: - return b; - case string s when bool.TryParse(s, out var parsed): - return parsed; - } - } - } - - return null; - } - - private static string? GetMetadataString(IReadOnlyDictionary metadata, params string[] keys) - { - foreach (var key in keys) - { - if (metadata.TryGetValue(key, out var value) && value is not null) - { - if (value is string s) - { - return string.IsNullOrWhiteSpace(s) ? null : s; - } - } - } - - return null; - } - - private static double? GetMetadataDouble(IReadOnlyDictionary metadata, params string[] keys) - { - foreach (var key in keys) - { - if (metadata.TryGetValue(key, out var value) && value is not null) - { - switch (value) - { - case double d: - return d; - case float f: - return f; - case decimal m: - return (double)m; - case long l: - return l; - case int i: - return i; - case string s when double.TryParse(s, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var parsed): - return parsed; - } - } - } - - return null; - } - - private static PolicySimulationOutputFormat DeterminePolicySimulationFormat(string? value, string? outputPath) - { - if (!string.IsNullOrWhiteSpace(value)) - { - return value.Trim().ToLowerInvariant() switch - { - "table" => PolicySimulationOutputFormat.Table, - "json" => PolicySimulationOutputFormat.Json, - _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") - }; - } - - if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) - { - return PolicySimulationOutputFormat.Json; - } - - return PolicySimulationOutputFormat.Table; - } - - private static object BuildPolicySimulationPayload( - string policyId, - int? baseVersion, - int? candidateVersion, - IReadOnlyList sbomSet, - IReadOnlyDictionary environment, - PolicySimulationResult result) - => new - { - policyId, - baseVersion, - candidateVersion, - sbomSet = sbomSet.Count == 0 ? Array.Empty() : sbomSet, - environment = environment.Count == 0 ? null : environment, - diff = result.Diff, - explainUri = result.ExplainUri - }; - - private static void RenderPolicySimulationResult( - ILogger logger, - object payload, - PolicySimulationResult result, - PolicySimulationOutputFormat format) - { - if (format == PolicySimulationOutputFormat.Json) - { - var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); - Console.WriteLine(json); - return; - } - - logger.LogInformation( - "Policy diff summary — Added: {Added}, Removed: {Removed}, Unchanged: {Unchanged}.", - result.Diff.Added, - result.Diff.Removed, - result.Diff.Unchanged); - - if (result.Diff.BySeverity.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().AddColumns("Severity", "Up", "Down"); - foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) - { - table.AddRow( - entry.Key, - FormatDelta(entry.Value.Up), - FormatDelta(entry.Value.Down)); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) - { - logger.LogInformation("Severity {Severity}: up={Up}, down={Down}", entry.Key, entry.Value.Up ?? 0, entry.Value.Down ?? 0); - } - } - } - - if (result.Diff.RuleHits.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table().AddColumns("Rule", "Up", "Down"); - foreach (var hit in result.Diff.RuleHits) - { - table.AddRow( - string.IsNullOrWhiteSpace(hit.RuleName) ? hit.RuleId : $"{hit.RuleName} ({hit.RuleId})", - FormatDelta(hit.Up), - FormatDelta(hit.Down)); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var hit in result.Diff.RuleHits) - { - logger.LogInformation("Rule {RuleId}: up={Up}, down={Down}", hit.RuleId, hit.Up ?? 0, hit.Down ?? 0); - } - } - } - - if (!string.IsNullOrWhiteSpace(result.ExplainUri)) - { - logger.LogInformation("Explain trace available at {ExplainUri}.", result.ExplainUri); - } - } - - private static IReadOnlyList NormalizePolicySbomSet(IReadOnlyList arguments) - { - if (arguments is null || arguments.Count == 0) - { - return EmptyPolicySbomSet; - } - - var set = new SortedSet(StringComparer.Ordinal); - foreach (var raw in arguments) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var trimmed = raw.Trim(); - if (trimmed.Length > 0) - { - set.Add(trimmed); - } - } - - if (set.Count == 0) - { - return EmptyPolicySbomSet; - } - - var list = set.ToList(); - return new ReadOnlyCollection(list); - } - - private static IReadOnlyDictionary ParsePolicyEnvironment(IReadOnlyList arguments) - { - if (arguments is null || arguments.Count == 0) - { - return EmptyPolicyEnvironment; - } - - var env = new SortedDictionary(StringComparer.Ordinal); - foreach (var raw in arguments) - { - if (string.IsNullOrWhiteSpace(raw)) - { - continue; - } - - var trimmed = raw.Trim(); - var separator = trimmed.IndexOf('='); - if (separator <= 0 || separator == trimmed.Length - 1) - { - throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); - } - - var key = trimmed[..separator].Trim().ToLowerInvariant(); - if (string.IsNullOrWhiteSpace(key)) - { - throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); - } - - var valueToken = trimmed[(separator + 1)..].Trim(); - env[key] = ParsePolicyEnvironmentValue(valueToken); - } - - return env.Count == 0 ? EmptyPolicyEnvironment : new ReadOnlyDictionary(env); - } - - private static object? ParsePolicyEnvironmentValue(string token) - { - if (string.IsNullOrWhiteSpace(token)) - { - return string.Empty; - } - - var value = token; - if ((value.Length >= 2 && value.StartsWith("\"", StringComparison.Ordinal) && value.EndsWith("\"", StringComparison.Ordinal)) || - (value.Length >= 2 && value.StartsWith("'", StringComparison.Ordinal) && value.EndsWith("'", StringComparison.Ordinal))) - { - value = value[1..^1]; - } - - if (string.Equals(value, "null", StringComparison.OrdinalIgnoreCase)) - { - return null; - } - - if (bool.TryParse(value, out var boolResult)) - { - return boolResult; - } - - if (long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var longResult)) - { - return longResult; - } - - if (double.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var doubleResult)) - { - return doubleResult; - } - - return value; - } - - private static Task WriteSimulationOutputAsync(string outputPath, object payload, CancellationToken cancellationToken) - => WriteJsonPayloadAsync(outputPath, payload, cancellationToken); - - private static async Task WriteJsonPayloadAsync(string outputPath, object payload, CancellationToken cancellationToken) - { - var fullPath = Path.GetFullPath(outputPath); - var directory = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrWhiteSpace(directory)) - { - Directory.CreateDirectory(directory); - } - - var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); - await File.WriteAllTextAsync(fullPath, json + Environment.NewLine, cancellationToken).ConfigureAwait(false); - } - - private static int DetermineSimulationExitCode(PolicySimulationResult result, bool failOnDiff) - { - if (!failOnDiff) - { - return 0; - } - - return (result.Diff.Added + result.Diff.Removed) > 0 ? 20 : 0; - } - - private static void HandlePolicySimulationFailure(PolicyApiException exception, ILogger logger) - { - var exitCode = exception.ErrorCode switch - { - "ERR_POL_001" => 10, - "ERR_POL_002" or "ERR_POL_005" => 12, - "ERR_POL_003" => 21, - "ERR_POL_004" => 22, - "ERR_POL_006" => 23, - _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, - _ => 1 - }; - - if (string.IsNullOrWhiteSpace(exception.ErrorCode)) - { - logger.LogError("Policy simulation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); - } - else - { - logger.LogError("Policy simulation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); - } - - CliMetrics.RecordPolicySimulation("error"); - Environment.ExitCode = exitCode; - } - - private static void HandlePolicyActivationFailure(PolicyApiException exception, ILogger logger) - { - var exitCode = exception.ErrorCode switch - { - "ERR_POL_002" => 70, - "ERR_POL_003" => 71, - "ERR_POL_004" => 72, - _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, - _ => 1 - }; - - if (string.IsNullOrWhiteSpace(exception.ErrorCode)) - { - logger.LogError("Policy activation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); - } - else - { - logger.LogError("Policy activation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); - } - - CliMetrics.RecordPolicyActivation("error"); - Environment.ExitCode = exitCode; - } - - private static IReadOnlyList NormalizePolicyFilterValues(string[] values, bool toLower = false) - { - if (values is null || values.Length == 0) - { - return Array.Empty(); - } - - var set = new HashSet(StringComparer.OrdinalIgnoreCase); - var list = new List(); - foreach (var raw in values) - { - var candidate = raw?.Trim(); - if (string.IsNullOrWhiteSpace(candidate)) - { - continue; - } - - var normalized = toLower ? candidate.ToLowerInvariant() : candidate; - if (set.Add(normalized)) - { - list.Add(normalized); - } - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static string? NormalizePolicyPriority(string? priority) - { - if (string.IsNullOrWhiteSpace(priority)) - { - return null; - } - - var normalized = priority.Trim(); - return string.IsNullOrWhiteSpace(normalized) ? null : normalized.ToLowerInvariant(); - } - - private static string NormalizePolicyActivationOutcome(string status) - { - if (string.IsNullOrWhiteSpace(status)) - { - return "unknown"; - } - - return status.Trim().ToLowerInvariant(); - } - - private static int DeterminePolicyActivationExitCode(string outcome) - => string.Equals(outcome, "pending_second_approval", StringComparison.Ordinal) ? 75 : 0; - - private static void RenderPolicyActivationResult(PolicyActivationResult result, PolicyActivationRequest request) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var summary = new Table().Expand(); - summary.Border(TableBorder.Rounded); - summary.AddColumn(new TableColumn("[grey]Field[/]").LeftAligned()); - summary.AddColumn(new TableColumn("[grey]Value[/]").LeftAligned()); - summary.AddRow("Policy", Markup.Escape($"{result.Revision.PolicyId} v{result.Revision.Version}")); - summary.AddRow("Status", FormatActivationStatus(result.Status)); - summary.AddRow("Requires 2 approvals", result.Revision.RequiresTwoPersonApproval ? "[yellow]yes[/]" : "[green]no[/]"); - summary.AddRow("Created (UTC)", Markup.Escape(FormatUpdatedAt(result.Revision.CreatedAt))); - summary.AddRow("Activated (UTC)", result.Revision.ActivatedAt.HasValue - ? Markup.Escape(FormatUpdatedAt(result.Revision.ActivatedAt.Value)) - : "[grey](not yet active)[/]"); - - if (request.RunNow) - { - summary.AddRow("Run", "[green]immediate[/]"); - } - else if (request.ScheduledAt.HasValue) - { - summary.AddRow("Scheduled at", Markup.Escape(FormatUpdatedAt(request.ScheduledAt.Value))); - } - - if (!string.IsNullOrWhiteSpace(request.Priority)) - { - summary.AddRow("Priority", Markup.Escape(request.Priority!)); - } - - if (request.Rollback) - { - summary.AddRow("Rollback", "[yellow]yes[/]"); - } - - if (!string.IsNullOrWhiteSpace(request.IncidentId)) - { - summary.AddRow("Incident", Markup.Escape(request.IncidentId!)); - } - - if (!string.IsNullOrWhiteSpace(request.Comment)) - { - summary.AddRow("Note", Markup.Escape(request.Comment!)); - } - - AnsiConsole.Write(summary); - - if (result.Revision.Approvals.Count > 0) - { - var approvalTable = new Table().Title("[grey]Approvals[/]"); - approvalTable.Border(TableBorder.Minimal); - approvalTable.AddColumn(new TableColumn("Actor").LeftAligned()); - approvalTable.AddColumn(new TableColumn("Approved (UTC)").LeftAligned()); - approvalTable.AddColumn(new TableColumn("Comment").LeftAligned()); - - foreach (var approval in result.Revision.Approvals) - { - var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment!; - approvalTable.AddRow( - Markup.Escape(approval.ActorId), - Markup.Escape(FormatUpdatedAt(approval.ApprovedAt)), - Markup.Escape(comment)); - } - - AnsiConsole.Write(approvalTable); - } - else - { - AnsiConsole.MarkupLine("[grey]No activation approvals recorded yet.[/]"); - } - } - else - { - Console.WriteLine(FormattableString.Invariant($"Policy: {result.Revision.PolicyId} v{result.Revision.Version}")); - Console.WriteLine(FormattableString.Invariant($"Status: {NormalizePolicyActivationOutcome(result.Status)}")); - Console.WriteLine(FormattableString.Invariant($"Requires 2 approvals: {(result.Revision.RequiresTwoPersonApproval ? "yes" : "no")}")); - Console.WriteLine(FormattableString.Invariant($"Created (UTC): {FormatUpdatedAt(result.Revision.CreatedAt)}")); - Console.WriteLine(FormattableString.Invariant($"Activated (UTC): {(result.Revision.ActivatedAt.HasValue ? FormatUpdatedAt(result.Revision.ActivatedAt.Value) : "(not yet active)")}")); - - if (request.RunNow) - { - Console.WriteLine("Run: immediate"); - } - else if (request.ScheduledAt.HasValue) - { - Console.WriteLine(FormattableString.Invariant($"Scheduled at: {FormatUpdatedAt(request.ScheduledAt.Value)}")); - } - - if (!string.IsNullOrWhiteSpace(request.Priority)) - { - Console.WriteLine(FormattableString.Invariant($"Priority: {request.Priority}")); - } - - if (request.Rollback) - { - Console.WriteLine("Rollback: yes"); - } - - if (!string.IsNullOrWhiteSpace(request.IncidentId)) - { - Console.WriteLine(FormattableString.Invariant($"Incident: {request.IncidentId}")); - } - - if (!string.IsNullOrWhiteSpace(request.Comment)) - { - Console.WriteLine(FormattableString.Invariant($"Note: {request.Comment}")); - } - - if (result.Revision.Approvals.Count == 0) - { - Console.WriteLine("Approvals: none"); - } - else - { - foreach (var approval in result.Revision.Approvals) - { - var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment; - Console.WriteLine(FormattableString.Invariant($"Approval: {approval.ActorId} at {FormatUpdatedAt(approval.ApprovedAt)} ({comment})")); - } - } - } - } - - private static string FormatActivationStatus(string status) - { - var normalized = NormalizePolicyActivationOutcome(status); - return normalized switch - { - "activated" => "[green]activated[/]", - "already_active" => "[yellow]already_active[/]", - "pending_second_approval" => "[yellow]pending_second_approval[/]", - _ => "[red]" + Markup.Escape(string.IsNullOrWhiteSpace(status) ? "unknown" : status) + "[/]" - }; - } - - private static DateTimeOffset? ParsePolicySince(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - if (DateTimeOffset.TryParse( - value.Trim(), - CultureInfo.InvariantCulture, - DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, - out var parsed)) - { - return parsed.ToUniversalTime(); - } - - throw new ArgumentException("Invalid --since value. Use an ISO-8601 timestamp."); - } - - private static string? NormalizeExplainMode(string? mode) - => string.IsNullOrWhiteSpace(mode) ? null : mode.Trim().ToLowerInvariant(); - - private static PolicyFindingsOutputFormat DeterminePolicyFindingsFormat(string? value, string? outputPath) - { - if (!string.IsNullOrWhiteSpace(value)) - { - return value.Trim().ToLowerInvariant() switch - { - "table" => PolicyFindingsOutputFormat.Table, - "json" => PolicyFindingsOutputFormat.Json, - _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") - }; - } - - if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) - { - return PolicyFindingsOutputFormat.Json; - } - - return PolicyFindingsOutputFormat.Table; - } - - private static object BuildPolicyFindingsPayload( - string policyId, - PolicyFindingsQuery query, - PolicyFindingsPage page) - => new - { - policyId, - filters = new - { - sbom = query.SbomIds, - status = query.Statuses, - severity = query.Severities, - cursor = query.Cursor, - page = query.Page, - pageSize = query.PageSize, - since = query.Since?.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture) - }, - items = page.Items.Select(item => new - { - findingId = item.FindingId, - status = item.Status, - severity = new - { - normalized = item.Severity.Normalized, - score = item.Severity.Score - }, - sbomId = item.SbomId, - advisoryIds = item.AdvisoryIds, - vex = item.Vex is null ? null : new - { - winningStatementId = item.Vex.WinningStatementId, - source = item.Vex.Source, - status = item.Vex.Status - }, - policyVersion = item.PolicyVersion, - updatedAt = item.UpdatedAt == DateTimeOffset.MinValue ? null : item.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), - runId = item.RunId - }), - nextCursor = page.NextCursor, - totalCount = page.TotalCount - }; - - private static object BuildPolicyFindingPayload(string policyId, PolicyFindingDocument finding) - => new - { - policyId, - finding = new - { - findingId = finding.FindingId, - status = finding.Status, - severity = new - { - normalized = finding.Severity.Normalized, - score = finding.Severity.Score - }, - sbomId = finding.SbomId, - advisoryIds = finding.AdvisoryIds, - vex = finding.Vex is null ? null : new - { - winningStatementId = finding.Vex.WinningStatementId, - source = finding.Vex.Source, - status = finding.Vex.Status - }, - policyVersion = finding.PolicyVersion, - updatedAt = finding.UpdatedAt == DateTimeOffset.MinValue ? null : finding.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), - runId = finding.RunId - } - }; - - private static object BuildPolicyFindingExplainPayload( - string policyId, - string findingId, - string? mode, - PolicyFindingExplainResult explain) - => new - { - policyId, - findingId, - mode, - explain = new - { - policyVersion = explain.PolicyVersion, - steps = explain.Steps.Select(step => new - { - rule = step.Rule, - status = step.Status, - action = step.Action, - score = step.Score, - inputs = step.Inputs, - evidence = step.Evidence - }), - sealedHints = explain.SealedHints.Select(hint => hint.Message) - } - }; - - private static void RenderPolicyFindingsTable(ILogger logger, PolicyFindingsPage page) - { - var items = page.Items; - if (items.Count == 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - AnsiConsole.MarkupLine("[yellow]No findings matched the provided filters.[/]"); - } - else - { - logger.LogWarning("No findings matched the provided filters."); - } - return; - } - - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table() - .Border(TableBorder.Rounded) - .Centered(); - - table.AddColumn("Finding"); - table.AddColumn("Status"); - table.AddColumn("Severity"); - table.AddColumn("Score"); - table.AddColumn("SBOM"); - table.AddColumn("Advisories"); - table.AddColumn("Updated (UTC)"); - - foreach (var item in items) - { - table.AddRow( - Markup.Escape(item.FindingId), - Markup.Escape(item.Status), - Markup.Escape(item.Severity.Normalized), - Markup.Escape(FormatScore(item.Severity.Score)), - Markup.Escape(item.SbomId), - Markup.Escape(FormatListPreview(item.AdvisoryIds)), - Markup.Escape(FormatUpdatedAt(item.UpdatedAt))); - } - - AnsiConsole.Write(table); - } - else - { - foreach (var item in items) - { - logger.LogInformation( - "{Finding} — Status {Status}, Severity {Severity} ({Score}), SBOM {Sbom}, Updated {Updated}", - item.FindingId, - item.Status, - item.Severity.Normalized, - item.Severity.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", - item.SbomId, - FormatUpdatedAt(item.UpdatedAt)); - } - } - - logger.LogInformation("{Count} finding(s).", items.Count); - - if (page.TotalCount.HasValue) - { - logger.LogInformation("Total available: {Total}", page.TotalCount.Value); - } - - if (!string.IsNullOrWhiteSpace(page.NextCursor)) - { - logger.LogInformation("Next cursor: {Cursor}", page.NextCursor); - } - } - - private static void RenderPolicyFindingDetails(ILogger logger, PolicyFindingDocument finding) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table() - .Border(TableBorder.Rounded) - .AddColumn("Field") - .AddColumn("Value"); - - table.AddRow("Finding", Markup.Escape(finding.FindingId)); - table.AddRow("Status", Markup.Escape(finding.Status)); - table.AddRow("Severity", Markup.Escape(FormatSeverity(finding.Severity))); - table.AddRow("SBOM", Markup.Escape(finding.SbomId)); - table.AddRow("Policy Version", Markup.Escape(finding.PolicyVersion.ToString(CultureInfo.InvariantCulture))); - table.AddRow("Updated (UTC)", Markup.Escape(FormatUpdatedAt(finding.UpdatedAt))); - table.AddRow("Run Id", Markup.Escape(string.IsNullOrWhiteSpace(finding.RunId) ? "(none)" : finding.RunId)); - table.AddRow("Advisories", Markup.Escape(FormatListPreview(finding.AdvisoryIds))); - table.AddRow("VEX", Markup.Escape(FormatVexMetadata(finding.Vex))); - - AnsiConsole.Write(table); - } - else - { - logger.LogInformation("Finding {Finding}", finding.FindingId); - logger.LogInformation(" Status: {Status}", finding.Status); - logger.LogInformation(" Severity: {Severity}", FormatSeverity(finding.Severity)); - logger.LogInformation(" SBOM: {Sbom}", finding.SbomId); - logger.LogInformation(" Policy version: {Version}", finding.PolicyVersion); - logger.LogInformation(" Updated (UTC): {Updated}", FormatUpdatedAt(finding.UpdatedAt)); - if (!string.IsNullOrWhiteSpace(finding.RunId)) - { - logger.LogInformation(" Run Id: {Run}", finding.RunId); - } - if (finding.AdvisoryIds.Count > 0) - { - logger.LogInformation(" Advisories: {Advisories}", string.Join(", ", finding.AdvisoryIds)); - } - if (!string.IsNullOrWhiteSpace(FormatVexMetadata(finding.Vex))) - { - logger.LogInformation(" VEX: {Vex}", FormatVexMetadata(finding.Vex)); - } - } - } - - private static void RenderPolicyFindingExplain(ILogger logger, PolicyFindingExplainResult explain) - { - if (explain.Steps.Count == 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - AnsiConsole.MarkupLine("[yellow]No explain steps were returned.[/]"); - } - else - { - logger.LogWarning("No explain steps were returned."); - } - } - else if (AnsiConsole.Profile.Capabilities.Interactive) - { - var table = new Table() - .Border(TableBorder.Rounded) - .AddColumn("Rule") - .AddColumn("Status") - .AddColumn("Action") - .AddColumn("Score") - .AddColumn("Inputs") - .AddColumn("Evidence"); - - foreach (var step in explain.Steps) - { - table.AddRow( - Markup.Escape(step.Rule), - Markup.Escape(step.Status ?? "(n/a)"), - Markup.Escape(step.Action ?? "(n/a)"), - Markup.Escape(step.Score.HasValue ? step.Score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"), - Markup.Escape(FormatKeyValuePairs(step.Inputs)), - Markup.Escape(FormatKeyValuePairs(step.Evidence))); - } - - AnsiConsole.Write(table); - } - else - { - logger.LogInformation("{Count} explain step(s).", explain.Steps.Count); - foreach (var step in explain.Steps) - { - logger.LogInformation( - "Rule {Rule} — Status {Status}, Action {Action}, Score {Score}, Inputs {Inputs}", - step.Rule, - step.Status ?? "n/a", - step.Action ?? "n/a", - step.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", - FormatKeyValuePairs(step.Inputs)); - - if (step.Evidence is not null && step.Evidence.Count > 0) - { - logger.LogInformation(" Evidence: {Evidence}", FormatKeyValuePairs(step.Evidence)); - } - } - } - - if (explain.SealedHints.Count > 0) - { - if (AnsiConsole.Profile.Capabilities.Interactive) - { - AnsiConsole.MarkupLine("[grey]Hints:[/]"); - foreach (var hint in explain.SealedHints) - { - AnsiConsole.MarkupLine($" • {Markup.Escape(hint.Message)}"); - } - } - else - { - foreach (var hint in explain.SealedHints) - { - logger.LogInformation("Hint: {Hint}", hint.Message); - } - } - } - } - - private static string FormatSeverity(PolicyFindingSeverity severity) - { - if (severity.Score.HasValue) - { - return FormattableString.Invariant($"{severity.Normalized} ({severity.Score.Value:0.00})"); - } - - return severity.Normalized; - } - - private static string FormatListPreview(IReadOnlyList values) - { - if (values is null || values.Count == 0) - { - return "(none)"; - } - - const int MaxItems = 3; - if (values.Count <= MaxItems) - { - return string.Join(", ", values); - } - - var preview = string.Join(", ", values.Take(MaxItems)); - return FormattableString.Invariant($"{preview} (+{values.Count - MaxItems})"); - } - - private static string FormatUpdatedAt(DateTimeOffset timestamp) - { - if (timestamp == DateTimeOffset.MinValue) - { - return "(unknown)"; - } - - return timestamp.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss'Z'", CultureInfo.InvariantCulture); - } - - private static string FormatScore(double? score) - => score.HasValue ? score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"; - - private static string FormatKeyValuePairs(IReadOnlyDictionary? values) - { - if (values is null || values.Count == 0) - { - return "(none)"; - } - - return string.Join(", ", values.Select(pair => $"{pair.Key}={pair.Value}")); - } - - private static string FormatVexMetadata(PolicyFindingVexMetadata? value) - { - if (value is null) - { - return "(none)"; - } - - var parts = new List(3); - if (!string.IsNullOrWhiteSpace(value.WinningStatementId)) - { - parts.Add($"winning={value.WinningStatementId}"); - } - - if (!string.IsNullOrWhiteSpace(value.Source)) - { - parts.Add($"source={value.Source}"); - } - - if (!string.IsNullOrWhiteSpace(value.Status)) - { - parts.Add($"status={value.Status}"); - } - - return parts.Count == 0 ? "(none)" : string.Join(", ", parts); - } - - private static void HandlePolicyFindingsFailure(PolicyApiException exception, ILogger logger, Action recordMetric) - { - var exitCode = exception.StatusCode switch - { - HttpStatusCode.Unauthorized or HttpStatusCode.Forbidden => 12, - HttpStatusCode.NotFound => 1, - _ => 1 - }; - - if (string.IsNullOrWhiteSpace(exception.ErrorCode)) - { - logger.LogError("Policy API request failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); - } - else - { - logger.LogError("Policy API request failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); - } - - recordMetric("error"); - Environment.ExitCode = exitCode; - } - - private static string FormatDelta(int? value) - => value.HasValue ? value.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"; - - private static readonly JsonSerializerOptions SimulationJsonOptions = - new(JsonSerializerDefaults.Web) { WriteIndented = true }; - - private static readonly IReadOnlyDictionary EmptyPolicyEnvironment = - new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)); - - private static readonly IReadOnlyList EmptyPolicySbomSet = - new ReadOnlyCollection(Array.Empty()); - - private static readonly IReadOnlyDictionary EmptyLabelSelectors = - new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); - - private enum PolicySimulationOutputFormat - { - Table, - Json - } - - private enum PolicyFindingsOutputFormat - { - Table, - Json - } - - - private static string FormatAdditionalValue(object? value) - { - return value switch - { - null => "null", - bool b => b ? "true" : "false", - double d => d.ToString("G17", CultureInfo.InvariantCulture), - float f => f.ToString("G9", CultureInfo.InvariantCulture), - IFormattable formattable => formattable.ToString(null, CultureInfo.InvariantCulture), - _ => value.ToString() ?? string.Empty - }; - } - - - private static IReadOnlyList NormalizeProviders(IReadOnlyList providers) - { - if (providers is null || providers.Count == 0) - { - return Array.Empty(); - } - - var list = new List(); - foreach (var provider in providers) - { - if (!string.IsNullOrWhiteSpace(provider)) - { - list.Add(provider.Trim()); - } - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static string ResolveTenant(string? tenantOption) - { - if (!string.IsNullOrWhiteSpace(tenantOption)) - { - return tenantOption.Trim(); - } - - var fromEnvironment = Environment.GetEnvironmentVariable("STELLA_TENANT"); - return string.IsNullOrWhiteSpace(fromEnvironment) ? string.Empty : fromEnvironment.Trim(); - } - - private static async Task LoadIngestInputAsync(string input, CancellationToken cancellationToken) - { - if (Uri.TryCreate(input, UriKind.Absolute, out var uri) && - (uri.Scheme.Equals(Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase) || - uri.Scheme.Equals(Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))) - { - return await LoadIngestInputFromHttpAsync(uri, cancellationToken).ConfigureAwait(false); - } - - return await LoadIngestInputFromFileAsync(input, cancellationToken).ConfigureAwait(false); - } - - private static async Task LoadIngestInputFromHttpAsync(Uri uri, CancellationToken cancellationToken) - { - using var handler = new HttpClientHandler { AutomaticDecompression = DecompressionMethods.All }; - using var httpClient = new HttpClient(handler); - using var response = await httpClient.GetAsync(uri, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - throw new InvalidOperationException($"Failed to download document from {uri} (HTTP {(int)response.StatusCode})."); - } - - var contentType = response.Content.Headers.ContentType?.MediaType ?? "application/json"; - var contentEncoding = response.Content.Headers.ContentEncoding is { Count: > 0 } - ? string.Join(",", response.Content.Headers.ContentEncoding) - : null; - - var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); - var normalized = NormalizeDocument(bytes, contentType, contentEncoding); - - return new IngestInputPayload( - "uri", - uri.ToString(), - normalized.Content, - normalized.ContentType, - normalized.ContentEncoding); - } - - private static async Task LoadIngestInputFromFileAsync(string path, CancellationToken cancellationToken) - { - var fullPath = Path.GetFullPath(path); - if (!File.Exists(fullPath)) - { - throw new FileNotFoundException("Input document not found.", fullPath); - } - - var bytes = await File.ReadAllBytesAsync(fullPath, cancellationToken).ConfigureAwait(false); - var normalized = NormalizeDocument(bytes, GuessContentTypeFromExtension(fullPath), null); - - return new IngestInputPayload( - "file", - Path.GetFileName(fullPath), - normalized.Content, - normalized.ContentType, - normalized.ContentEncoding); - } - - private static DocumentNormalizationResult NormalizeDocument(byte[] bytes, string? contentType, string? encodingHint) - { - if (bytes is null || bytes.Length == 0) - { - throw new InvalidOperationException("Input document is empty."); - } - - var working = bytes; - var encodings = new List(); - if (!string.IsNullOrWhiteSpace(encodingHint)) - { - encodings.Add(encodingHint); - } - - if (IsGzip(working)) - { - working = DecompressGzip(working); - encodings.Add("gzip"); - } - - var text = DecodeText(working); - var trimmed = text.TrimStart(); - - if (!string.IsNullOrWhiteSpace(trimmed) && trimmed[0] != '{' && trimmed[0] != '[') - { - if (TryDecodeBase64(text, out var decodedBytes)) - { - working = decodedBytes; - encodings.Add("base64"); - - if (IsGzip(working)) - { - working = DecompressGzip(working); - encodings.Add("gzip"); - } - - text = DecodeText(working); - } - } - - text = text.Trim(); - if (string.IsNullOrWhiteSpace(text)) - { - throw new InvalidOperationException("Input document contained no data after decoding."); - } - - var encodingLabel = encodings.Count == 0 ? null : string.Join("+", encodings); - var finalContentType = string.IsNullOrWhiteSpace(contentType) ? "application/json" : contentType; - - return new DocumentNormalizationResult(text, finalContentType, encodingLabel); - } - - private static string GuessContentTypeFromExtension(string path) - { - var extension = Path.GetExtension(path); - if (string.IsNullOrWhiteSpace(extension)) - { - return "application/json"; - } - - return extension.ToLowerInvariant() switch - { - ".json" or ".csaf" => "application/json", - ".xml" => "application/xml", - _ => "application/json" - }; - } - - private static DateTimeOffset DetermineVerificationSince(string? sinceOption) - { - if (string.IsNullOrWhiteSpace(sinceOption)) - { - return DateTimeOffset.UtcNow.AddHours(-24); - } - - var trimmed = sinceOption.Trim(); - - if (DateTimeOffset.TryParse( - trimmed, - CultureInfo.InvariantCulture, - DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, - out var parsedTimestamp)) - { - return parsedTimestamp.ToUniversalTime(); - } - - if (TryParseRelativeDuration(trimmed, out var duration)) - { - return DateTimeOffset.UtcNow.Subtract(duration); - } - - throw new InvalidOperationException("Invalid --since value. Use ISO-8601 timestamp or duration (e.g. 24h, 7d)."); - } - - private static bool TryParseRelativeDuration(string value, out TimeSpan duration) - { - duration = TimeSpan.Zero; - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - var normalized = value.Trim().ToLowerInvariant(); - if (normalized.Length < 2) - { - return false; - } - - var suffix = normalized[^1]; - var magnitudeText = normalized[..^1]; - - double multiplier = suffix switch - { - 's' => 1, - 'm' => 60, - 'h' => 3600, - 'd' => 86400, - 'w' => 604800, - _ => 0 - }; - - if (multiplier == 0) - { - return false; - } - - if (!double.TryParse(magnitudeText, NumberStyles.Float, CultureInfo.InvariantCulture, out var magnitude)) - { - return false; - } - - if (double.IsNaN(magnitude) || double.IsInfinity(magnitude) || magnitude <= 0) - { - return false; - } - - var seconds = magnitude * multiplier; - if (double.IsNaN(seconds) || double.IsInfinity(seconds) || seconds <= 0) - { - return false; - } - - duration = TimeSpan.FromSeconds(seconds); - return true; - } - - private static int NormalizeLimit(int? limitOption) - { - if (!limitOption.HasValue) - { - return 20; - } - - if (limitOption.Value < 0) - { - throw new InvalidOperationException("Limit cannot be negative."); - } - - return limitOption.Value; - } - - private static IReadOnlyList ParseCommaSeparatedList(string? raw) - { - if (string.IsNullOrWhiteSpace(raw)) - { - return Array.Empty(); - } - - var tokens = raw - .Split(',', StringSplitOptions.RemoveEmptyEntries) - .Select(token => token.Trim()) - .Where(token => token.Length > 0) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToArray(); - - return tokens.Length == 0 ? Array.Empty() : tokens; - } - - private static string FormatWindowRange(AocVerifyWindow? window) - { - if (window is null) - { - return "(unspecified)"; - } - - var fromText = window.From?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; - var toText = window.To?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; - return $"{fromText} -> {toText}"; - } - - private static string FormatCheckedCounts(AocVerifyChecked? checkedCounts) - { - if (checkedCounts is null) - { - return "(unspecified)"; - } - - return $"advisories: {checkedCounts.Advisories.ToString("N0", CultureInfo.InvariantCulture)}, vex: {checkedCounts.Vex.ToString("N0", CultureInfo.InvariantCulture)}"; - } - - private static string DetermineVerifyStatus(AocVerifyResponse? response) - { - if (response is null) - { - return "unknown"; - } - - if (response.Truncated == true && (response.Violations is null || response.Violations.Count == 0)) - { - return "truncated"; - } - - var total = response.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; - return total > 0 ? "violations" : "ok"; - } - - private static string FormatBoolean(bool value, bool useColor) - { - var text = value ? "yes" : "no"; - if (!useColor) - { - return text; - } - - return value - ? $"[yellow]{text}[/]" - : $"[green]{text}[/]"; - } - - private static string FormatVerifyStatus(string? status, bool useColor) - { - var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); - var escaped = Markup.Escape(normalized); - if (!useColor) - { - return escaped; - } - - return normalized switch - { - "ok" => $"[green]{escaped}[/]", - "violations" => $"[red]{escaped}[/]", - "truncated" => $"[yellow]{escaped}[/]", - _ => $"[grey]{escaped}[/]" - }; - } - - private static string FormatViolationExample(AocVerifyViolationExample? example) - { - if (example is null) - { - return "(n/a)"; - } - - var parts = new List(); - if (!string.IsNullOrWhiteSpace(example.Source)) - { - parts.Add(example.Source.Trim()); - } - - if (!string.IsNullOrWhiteSpace(example.DocumentId)) - { - parts.Add(example.DocumentId.Trim()); - } - - var label = parts.Count == 0 ? "(n/a)" : string.Join(" | ", parts); - if (!string.IsNullOrWhiteSpace(example.ContentHash)) - { - label = $"{label} [{example.ContentHash.Trim()}]"; - } - - return label; - } - - private static void RenderAocVerifyTable(AocVerifyResponse response, bool useColor, int limit) - { - var summary = new Table().Border(TableBorder.Rounded); - summary.AddColumn("Field"); - summary.AddColumn("Value"); - - summary.AddRow("Tenant", Markup.Escape(string.IsNullOrWhiteSpace(response?.Tenant) ? "(unknown)" : response.Tenant!)); - summary.AddRow("Window", Markup.Escape(FormatWindowRange(response?.Window))); - summary.AddRow("Checked", Markup.Escape(FormatCheckedCounts(response?.Checked))); - - summary.AddRow("Limit", Markup.Escape(limit <= 0 ? "unbounded" : limit.ToString(CultureInfo.InvariantCulture))); - summary.AddRow("Status", FormatVerifyStatus(DetermineVerifyStatus(response), useColor)); - - if (response?.Metrics?.IngestionWriteTotal is int writes) - { - summary.AddRow("Ingestion Writes", Markup.Escape(writes.ToString("N0", CultureInfo.InvariantCulture))); - } - - if (response?.Metrics?.AocViolationTotal is int totalViolations) - { - summary.AddRow("Violations (total)", Markup.Escape(totalViolations.ToString("N0", CultureInfo.InvariantCulture))); - } - else - { - var computedViolations = response?.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; - summary.AddRow("Violations (total)", Markup.Escape(computedViolations.ToString("N0", CultureInfo.InvariantCulture))); - } - - summary.AddRow("Truncated", FormatBoolean(response?.Truncated == true, useColor)); - - AnsiConsole.Write(summary); - - if (response?.Violations is null || response.Violations.Count == 0) - { - var message = response?.Truncated == true - ? "No violations reported, but results were truncated. Increase --limit to review full output." - : "No AOC violations detected in the requested window."; - - if (useColor) - { - var color = response?.Truncated == true ? "yellow" : "green"; - AnsiConsole.MarkupLine($"[{color}]{Markup.Escape(message)}[/]"); - } - else - { - Console.WriteLine(message); - } - - return; - } - - var violationTable = new Table().Border(TableBorder.Rounded); - violationTable.AddColumn("Code"); - violationTable.AddColumn("Count"); - violationTable.AddColumn("Sample Document"); - violationTable.AddColumn("Path"); - - foreach (var violation in response.Violations) - { - var codeDisplay = FormatViolationCode(violation.Code, useColor); - var countDisplay = violation.Count.ToString("N0", CultureInfo.InvariantCulture); - var example = violation.Examples?.FirstOrDefault(); - var documentDisplay = Markup.Escape(FormatViolationExample(example)); - var pathDisplay = example is null || string.IsNullOrWhiteSpace(example.Path) - ? "(none)" - : example.Path!; - - violationTable.AddRow(codeDisplay, countDisplay, documentDisplay, Markup.Escape(pathDisplay)); - } - - AnsiConsole.Write(violationTable); -} - - private static int DetermineVerifyExitCode(AocVerifyResponse response) - { - ArgumentNullException.ThrowIfNull(response); - - if (response.Violations is not null && response.Violations.Count > 0) - { - var exitCodes = new List(); - foreach (var violation in response.Violations) - { - if (string.IsNullOrWhiteSpace(violation.Code)) - { - continue; - } - - if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) - { - exitCodes.Add(mapped); - } - } - - if (exitCodes.Count > 0) - { - return exitCodes.Min(); - } - - return response.Truncated == true ? 18 : 17; - } - - if (response.Truncated == true) - { - return 18; - } - - return 0; - } - - private static async Task WriteJsonReportAsync(T payload, string destination, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(payload); - - if (string.IsNullOrWhiteSpace(destination)) - { - throw new InvalidOperationException("Output path must be provided."); - } - - var outputPath = Path.GetFullPath(destination); - var directory = Path.GetDirectoryName(outputPath); - if (!string.IsNullOrWhiteSpace(directory)) - { - Directory.CreateDirectory(directory); - } - - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions - { - WriteIndented = true - }); - - await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false); - return outputPath; - } - - private static void RenderDryRunTable(AocIngestDryRunResponse response, bool useColor) - { - var summary = new Table().Border(TableBorder.Rounded); - summary.AddColumn("Field"); - summary.AddColumn("Value"); - - summary.AddRow("Source", Markup.Escape(response?.Source ?? "(unknown)")); - summary.AddRow("Tenant", Markup.Escape(response?.Tenant ?? "(unknown)")); - summary.AddRow("Guard Version", Markup.Escape(response?.GuardVersion ?? "(unknown)")); - summary.AddRow("Status", FormatStatusMarkup(response?.Status, useColor)); - - var violationCount = response?.Violations?.Count ?? 0; - summary.AddRow("Violations", violationCount.ToString(CultureInfo.InvariantCulture)); - - if (!string.IsNullOrWhiteSpace(response?.Document?.ContentHash)) - { - summary.AddRow("Content Hash", Markup.Escape(response.Document.ContentHash!)); - } - - if (!string.IsNullOrWhiteSpace(response?.Document?.Supersedes)) - { - summary.AddRow("Supersedes", Markup.Escape(response.Document.Supersedes!)); - } - - if (!string.IsNullOrWhiteSpace(response?.Document?.Provenance?.Signature?.Format)) - { - var signature = response.Document.Provenance.Signature; - var summaryText = signature!.Present - ? signature.Format ?? "present" - : "missing"; - summary.AddRow("Signature", Markup.Escape(summaryText)); - } - - AnsiConsole.Write(summary); - - if (violationCount == 0) - { - if (useColor) - { - AnsiConsole.MarkupLine("[green]No AOC violations detected.[/]"); - } - else - { - Console.WriteLine("No AOC violations detected."); - } - - return; - } - - var violationTable = new Table().Border(TableBorder.Rounded); - violationTable.AddColumn("Code"); - violationTable.AddColumn("Path"); - violationTable.AddColumn("Message"); - - foreach (var violation in response!.Violations!) - { - var codeDisplay = FormatViolationCode(violation.Code, useColor); - var pathDisplay = string.IsNullOrWhiteSpace(violation.Path) ? "(root)" : violation.Path!; - var messageDisplay = string.IsNullOrWhiteSpace(violation.Message) ? "(unspecified)" : violation.Message!; - violationTable.AddRow(codeDisplay, Markup.Escape(pathDisplay), Markup.Escape(messageDisplay)); - } - - AnsiConsole.Write(violationTable); - } - - private static int DetermineDryRunExitCode(AocIngestDryRunResponse response) - { - if (response?.Violations is null || response.Violations.Count == 0) - { - return 0; - } - - var exitCodes = new List(); - foreach (var violation in response.Violations) - { - if (string.IsNullOrWhiteSpace(violation.Code)) - { - continue; - } - - if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) - { - exitCodes.Add(mapped); - } - } - - if (exitCodes.Count == 0) - { - return 17; - } - - return exitCodes.Min(); - } - - private static string FormatStatusMarkup(string? status, bool useColor) - { - var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); - if (!useColor) - { - return Markup.Escape(normalized); - } - - return normalized.Equals("ok", StringComparison.OrdinalIgnoreCase) - ? $"[green]{Markup.Escape(normalized)}[/]" - : $"[red]{Markup.Escape(normalized)}[/]"; - } - - private static string FormatViolationCode(string code, bool useColor) - { - var sanitized = string.IsNullOrWhiteSpace(code) ? "(unknown)" : code.Trim(); - if (!useColor) - { - return Markup.Escape(sanitized); - } - - return $"[red]{Markup.Escape(sanitized)}[/]"; - } - - private static bool IsGzip(ReadOnlySpan data) - { - return data.Length >= 2 && data[0] == 0x1F && data[1] == 0x8B; - } - - private static byte[] DecompressGzip(byte[] payload) - { - using var input = new MemoryStream(payload); - using var gzip = new GZipStream(input, CompressionMode.Decompress); - using var output = new MemoryStream(); - gzip.CopyTo(output); - return output.ToArray(); - } - - private static string DecodeText(byte[] payload) - { - var encoding = DetectEncoding(payload); - return encoding.GetString(payload); - } - - private static Encoding DetectEncoding(ReadOnlySpan data) - { - if (data.Length >= 4) - { - if (data[0] == 0x00 && data[1] == 0x00 && data[2] == 0xFE && data[3] == 0xFF) - { - return new UTF32Encoding(bigEndian: true, byteOrderMark: true); - } - - if (data[0] == 0xFF && data[1] == 0xFE && data[2] == 0x00 && data[3] == 0x00) - { - return new UTF32Encoding(bigEndian: false, byteOrderMark: true); - } - } - - if (data.Length >= 2) - { - if (data[0] == 0xFE && data[1] == 0xFF) - { - return Encoding.BigEndianUnicode; - } - - if (data[0] == 0xFF && data[1] == 0xFE) - { - return Encoding.Unicode; - } - } - - if (data.Length >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF) - { - return Encoding.UTF8; - } - - return Encoding.UTF8; - } - - private static bool TryDecodeBase64(string text, out byte[] decoded) - { - decoded = Array.Empty(); - if (string.IsNullOrWhiteSpace(text)) - { - return false; - } - - var builder = new StringBuilder(text.Length); - foreach (var ch in text) - { - if (!char.IsWhiteSpace(ch)) - { - builder.Append(ch); - } - } - - var candidate = builder.ToString(); - if (candidate.Length < 8 || candidate.Length % 4 != 0) - { - return false; - } - - for (var i = 0; i < candidate.Length; i++) - { - var c = candidate[i]; - if (!(char.IsLetterOrDigit(c) || c is '+' or '/' or '=')) - { - return false; - } - } - - try - { - decoded = Convert.FromBase64String(candidate); - return true; - } - catch (FormatException) - { - return false; - } - } - - private sealed record IngestInputPayload(string Kind, string Name, string Content, string ContentType, string? ContentEncoding); - - private sealed record DocumentNormalizationResult(string Content, string ContentType, string? ContentEncoding); - - private static readonly IReadOnlyDictionary AocViolationExitCodeMap = new Dictionary(StringComparer.OrdinalIgnoreCase) - { - ["ERR_AOC_001"] = 11, - ["ERR_AOC_002"] = 12, - ["ERR_AOC_003"] = 13, - ["ERR_AOC_004"] = 14, - ["ERR_AOC_005"] = 15, - ["ERR_AOC_006"] = 16, - ["ERR_AOC_007"] = 17 - }; - - private static IDictionary RemoveNullValues(Dictionary source) - { - foreach (var key in source.Where(kvp => kvp.Value is null).Select(kvp => kvp.Key).ToList()) - { - source.Remove(key); - } - - return source; - } - - private static async Task TriggerJobAsync( - IBackendOperationsClient client, - ILogger logger, - string jobKind, - IDictionary parameters, - CancellationToken cancellationToken) - { - JobTriggerResult result = await client.TriggerJobAsync(jobKind, parameters, cancellationToken).ConfigureAwait(false); - if (result.Success) - { - if (!string.IsNullOrWhiteSpace(result.Location)) - { - logger.LogInformation("Job accepted. Track status at {Location}.", result.Location); - } - else if (result.Run is not null) - { - logger.LogInformation("Job accepted. RunId: {RunId} Status: {Status}", result.Run.RunId, result.Run.Status); - } - else - { - logger.LogInformation("Job accepted."); - } - - Environment.ExitCode = 0; - } - else - { - logger.LogError("Job '{JobKind}' failed: {Message}", jobKind, result.Message); - Environment.ExitCode = 1; - } - } -} +using System; +using System.Buffers; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Diagnostics; +using System.Globalization; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Spectre.Console; +using StellaOps.Auth.Client; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Prompts; +using StellaOps.Cli.Services; +using StellaOps.Cli.Services.Models; +using StellaOps.Cli.Telemetry; +using StellaOps.Cryptography; + +namespace StellaOps.Cli.Commands; + +internal static class CommandHandlers +{ + public static async Task HandleScannerDownloadAsync( + IServiceProvider services, + string channel, + string? output, + bool overwrite, + bool install, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-download"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scanner.download", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "scanner download"); + activity?.SetTag("stellaops.cli.channel", channel); + using var duration = CliMetrics.MeasureCommandDuration("scanner download"); + + try + { + var result = await client.DownloadScannerAsync(channel, output ?? string.Empty, overwrite, verbose, cancellationToken).ConfigureAwait(false); + + if (result.FromCache) + { + logger.LogInformation("Using cached scanner at {Path}.", result.Path); + } + else + { + logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", result.Path, result.SizeBytes); + } + + CliMetrics.RecordScannerDownload(channel, result.FromCache); + + if (install) + { + var installer = scope.ServiceProvider.GetRequiredService(); + await installer.InstallAsync(result.Path, verbose, cancellationToken).ConfigureAwait(false); + CliMetrics.RecordScannerInstall(channel); + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to download scanner bundle."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleScannerRunAsync( + IServiceProvider services, + string runner, + string entry, + string targetDirectory, + IReadOnlyList arguments, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var executor = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-run"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scan.run", ActivityKind.Internal); + activity?.SetTag("stellaops.cli.command", "scan run"); + activity?.SetTag("stellaops.cli.runner", runner); + activity?.SetTag("stellaops.cli.entry", entry); + activity?.SetTag("stellaops.cli.target", targetDirectory); + using var duration = CliMetrics.MeasureCommandDuration("scan run"); + + try + { + var options = scope.ServiceProvider.GetRequiredService(); + var resultsDirectory = options.ResultsDirectory; + + var executionResult = await executor.RunAsync( + runner, + entry, + targetDirectory, + resultsDirectory, + arguments, + verbose, + cancellationToken).ConfigureAwait(false); + + Environment.ExitCode = executionResult.ExitCode; + CliMetrics.RecordScanRun(runner, executionResult.ExitCode); + + if (executionResult.ExitCode == 0) + { + var backend = scope.ServiceProvider.GetRequiredService(); + logger.LogInformation("Uploading scan artefact {Path}...", executionResult.ResultsPath); + await backend.UploadScanResultsAsync(executionResult.ResultsPath, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Scan artefact uploaded."); + activity?.SetTag("stellaops.cli.results", executionResult.ResultsPath); + } + else + { + logger.LogWarning("Skipping automatic upload because scan exited with code {Code}.", executionResult.ExitCode); + } + + logger.LogInformation("Run metadata written to {Path}.", executionResult.RunMetadataPath); + activity?.SetTag("stellaops.cli.run_metadata", executionResult.RunMetadataPath); + } + catch (Exception ex) + { + logger.LogError(ex, "Scanner execution failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleScanUploadAsync( + IServiceProvider services, + string file, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("scanner-upload"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.scan.upload", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "scan upload"); + activity?.SetTag("stellaops.cli.file", file); + using var duration = CliMetrics.MeasureCommandDuration("scan upload"); + + try + { + var path = Path.GetFullPath(file); + await client.UploadScanResultsAsync(path, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Scan results uploaded successfully."); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to upload scan results."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleSourcesIngestAsync( + IServiceProvider services, + bool dryRun, + string source, + string input, + string? tenantOverride, + string format, + bool disableColor, + string? output, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("sources-ingest"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity("cli.sources.ingest.dry_run", ActivityKind.Client); + var statusMetric = "unknown"; + using var duration = CliMetrics.MeasureCommandDuration("sources ingest dry-run"); + + try + { + if (!dryRun) + { + statusMetric = "unsupported"; + logger.LogError("Only --dry-run mode is supported for 'stella sources ingest' at this time."); + Environment.ExitCode = 1; + return; + } + + source = source?.Trim() ?? string.Empty; + if (string.IsNullOrWhiteSpace(source)) + { + throw new InvalidOperationException("Source identifier must be provided."); + } + + var formatNormalized = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + + if (formatNormalized is not ("table" or "json")) + { + throw new InvalidOperationException("Format must be either 'table' or 'json'."); + } + + var tenant = ResolveTenant(tenantOverride); + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); + } + + var payload = await LoadIngestInputAsync(input, cancellationToken).ConfigureAwait(false); + + logger.LogInformation("Executing ingestion dry-run for source {Source} using input {Input}.", source, payload.Name); + + activity?.SetTag("stellaops.cli.command", "sources ingest dry-run"); + activity?.SetTag("stellaops.cli.source", source); + activity?.SetTag("stellaops.cli.tenant", tenant); + activity?.SetTag("stellaops.cli.format", formatNormalized); + activity?.SetTag("stellaops.cli.input_kind", payload.Kind); + + var request = new AocIngestDryRunRequest + { + Tenant = tenant, + Source = source, + Document = new AocIngestDryRunDocument + { + Name = payload.Name, + Content = payload.Content, + ContentType = payload.ContentType, + ContentEncoding = payload.ContentEncoding + } + }; + + var response = await client.ExecuteAocIngestDryRunAsync(request, cancellationToken).ConfigureAwait(false); + activity?.SetTag("stellaops.cli.status", response.Status ?? "unknown"); + + if (!string.IsNullOrWhiteSpace(output)) + { + var reportPath = await WriteJsonReportAsync(response, output, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Dry-run report written to {Path}.", reportPath); + } + + if (formatNormalized == "json") + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + } + else + { + RenderDryRunTable(response, !disableColor); + } + + var exitCode = DetermineDryRunExitCode(response); + Environment.ExitCode = exitCode; + statusMetric = exitCode == 0 ? "ok" : "violation"; + activity?.SetTag("stellaops.cli.exit_code", exitCode); + } + catch (Exception ex) + { + statusMetric = "transport_error"; + logger.LogError(ex, "Dry-run ingestion failed."); + Environment.ExitCode = 70; + } + finally + { + verbosity.MinimumLevel = previousLevel; + CliMetrics.RecordSourcesDryRun(statusMetric); + } + } + + public static async Task HandleAocVerifyAsync( + IServiceProvider services, + string? sinceOption, + int? limitOption, + string? sourcesOption, + string? codesOption, + string format, + string? exportPath, + string? tenantOverride, + bool disableColor, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("aoc-verify"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + + using var activity = CliActivitySource.Instance.StartActivity("cli.aoc.verify", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("aoc verify"); + var outcome = "unknown"; + + try + { + var tenant = ResolveTenant(tenantOverride); + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Tenant must be provided via --tenant or STELLA_TENANT."); + } + + var normalizedFormat = string.IsNullOrWhiteSpace(format) + ? "table" + : format.Trim().ToLowerInvariant(); + + if (normalizedFormat is not ("table" or "json")) + { + throw new InvalidOperationException("Format must be either 'table' or 'json'."); + } + + var since = DetermineVerificationSince(sinceOption); + var sinceIso = since.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + var limit = NormalizeLimit(limitOption); + var sources = ParseCommaSeparatedList(sourcesOption); + var codes = ParseCommaSeparatedList(codesOption); + + var normalizedSources = sources.Count == 0 + ? Array.Empty() + : sources.Select(item => item.ToLowerInvariant()).ToArray(); + + var normalizedCodes = codes.Count == 0 + ? Array.Empty() + : codes.Select(item => item.ToUpperInvariant()).ToArray(); + + activity?.SetTag("stellaops.cli.command", "aoc verify"); + activity?.SetTag("stellaops.cli.tenant", tenant); + activity?.SetTag("stellaops.cli.since", sinceIso); + activity?.SetTag("stellaops.cli.limit", limit); + activity?.SetTag("stellaops.cli.format", normalizedFormat); + if (normalizedSources.Length > 0) + { + activity?.SetTag("stellaops.cli.sources", string.Join(",", normalizedSources)); + } + + if (normalizedCodes.Length > 0) + { + activity?.SetTag("stellaops.cli.codes", string.Join(",", normalizedCodes)); + } + + var request = new AocVerifyRequest + { + Tenant = tenant, + Since = sinceIso, + Limit = limit, + Sources = normalizedSources.Length == 0 ? null : normalizedSources, + Codes = normalizedCodes.Length == 0 ? null : normalizedCodes + }; + + var response = await client.ExecuteAocVerifyAsync(request, cancellationToken).ConfigureAwait(false); + + if (!string.IsNullOrWhiteSpace(exportPath)) + { + var reportPath = await WriteJsonReportAsync(response, exportPath, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Verification report written to {Path}.", reportPath); + } + + if (normalizedFormat == "json") + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + } + else + { + RenderAocVerifyTable(response, !disableColor, limit); + } + + var exitCode = DetermineVerifyExitCode(response); + Environment.ExitCode = exitCode; + activity?.SetTag("stellaops.cli.exit_code", exitCode); + outcome = exitCode switch + { + 0 => "ok", + >= 11 and <= 17 => "violations", + 18 => "truncated", + _ => "unknown" + }; + } + catch (InvalidOperationException ex) + { + outcome = "usage_error"; + logger.LogError(ex, "Verification failed: {Message}", ex.Message); + Console.Error.WriteLine(ex.Message); + Environment.ExitCode = 71; + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + } + catch (Exception ex) + { + outcome = "transport_error"; + logger.LogError(ex, "Verification request failed."); + Console.Error.WriteLine(ex.Message); + Environment.ExitCode = 70; + activity?.SetStatus(ActivityStatusCode.Error, ex.Message); + } + finally + { + verbosity.MinimumLevel = previousLevel; + CliMetrics.RecordAocVerify(outcome); + } + } + + public static async Task HandleConnectorJobAsync( + IServiceProvider services, + string source, + string stage, + string? mode, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-connector"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.db.fetch", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "db fetch"); + activity?.SetTag("stellaops.cli.source", source); + activity?.SetTag("stellaops.cli.stage", stage); + if (!string.IsNullOrWhiteSpace(mode)) + { + activity?.SetTag("stellaops.cli.mode", mode); + } + using var duration = CliMetrics.MeasureCommandDuration("db fetch"); + + try + { + var jobKind = $"source:{source}:{stage}"; + var parameters = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(mode)) + { + parameters["mode"] = mode; + } + + await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogError(ex, "Connector job failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleMergeJobAsync( + IServiceProvider services, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-merge"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.db.merge", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "db merge"); + using var duration = CliMetrics.MeasureCommandDuration("db merge"); + + try + { + await TriggerJobAsync(client, logger, "merge:reconcile", new Dictionary(StringComparer.Ordinal), cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogError(ex, "Merge job failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleExportJobAsync( + IServiceProvider services, + string format, + bool delta, + bool? publishFull, + bool? publishDelta, + bool? includeFull, + bool? includeDelta, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("db-export"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.db.export", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "db export"); + activity?.SetTag("stellaops.cli.format", format); + activity?.SetTag("stellaops.cli.delta", delta); + using var duration = CliMetrics.MeasureCommandDuration("db export"); + activity?.SetTag("stellaops.cli.publish_full", publishFull); + activity?.SetTag("stellaops.cli.publish_delta", publishDelta); + activity?.SetTag("stellaops.cli.include_full", includeFull); + activity?.SetTag("stellaops.cli.include_delta", includeDelta); + + try + { + var jobKind = format switch + { + "trivy-db" or "trivy" => "export:trivy-db", + _ => "export:json" + }; + + var isTrivy = jobKind == "export:trivy-db"; + if (isTrivy + && !publishFull.HasValue + && !publishDelta.HasValue + && !includeFull.HasValue + && !includeDelta.HasValue + && AnsiConsole.Profile.Capabilities.Interactive) + { + var overrides = TrivyDbExportPrompt.PromptOverrides(); + publishFull = overrides.publishFull; + publishDelta = overrides.publishDelta; + includeFull = overrides.includeFull; + includeDelta = overrides.includeDelta; + } + + var parameters = new Dictionary(StringComparer.Ordinal) + { + ["delta"] = delta + }; + if (publishFull.HasValue) + { + parameters["publishFull"] = publishFull.Value; + } + if (publishDelta.HasValue) + { + parameters["publishDelta"] = publishDelta.Value; + } + if (includeFull.HasValue) + { + parameters["includeFull"] = includeFull.Value; + } + if (includeDelta.HasValue) + { + parameters["includeDelta"] = includeDelta.Value; + } + + await TriggerJobAsync(client, logger, jobKind, parameters, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + logger.LogError(ex, "Export job failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static Task HandleExcititorInitAsync( + IServiceProvider services, + IReadOnlyList providers, + bool resume, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (resume) + { + payload["resume"] = true; + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor init", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["resume"] = resume + }, + client => client.ExecuteExcititorOperationAsync("init", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorPullAsync( + IServiceProvider services, + IReadOnlyList providers, + DateTimeOffset? since, + TimeSpan? window, + bool force, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (since.HasValue) + { + payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + if (window.HasValue) + { + payload["window"] = window.Value.ToString("c", CultureInfo.InvariantCulture); + } + if (force) + { + payload["force"] = true; + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor pull", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["force"] = force, + ["since"] = since?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture), + ["window"] = window?.ToString("c", CultureInfo.InvariantCulture) + }, + client => client.ExecuteExcititorOperationAsync("ingest/run", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorResumeAsync( + IServiceProvider services, + IReadOnlyList providers, + string? checkpoint, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (!string.IsNullOrWhiteSpace(checkpoint)) + { + payload["checkpoint"] = checkpoint.Trim(); + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor resume", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["checkpoint"] = checkpoint + }, + client => client.ExecuteExcititorOperationAsync("ingest/resume", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static async Task HandleExcititorListProvidersAsync( + IServiceProvider services, + bool includeDisabled, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("excititor-list-providers"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.list-providers", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "excititor list-providers"); + activity?.SetTag("stellaops.cli.include_disabled", includeDisabled); + using var duration = CliMetrics.MeasureCommandDuration("excititor list-providers"); + + try + { + var providers = await client.GetExcititorProvidersAsync(includeDisabled, cancellationToken).ConfigureAwait(false); + Environment.ExitCode = 0; + logger.LogInformation("Providers returned: {Count}", providers.Count); + + if (providers.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().Border(TableBorder.Rounded).AddColumns("Provider", "Kind", "Trust", "Enabled", "Last Ingested"); + foreach (var provider in providers) + { + table.AddRow( + provider.Id, + provider.Kind, + string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, + provider.Enabled ? "yes" : "no", + provider.LastIngestedAt?.ToString("yyyy-MM-dd HH:mm:ss 'UTC'", CultureInfo.InvariantCulture) ?? "unknown"); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var provider in providers) + { + logger.LogInformation("{ProviderId} [{Kind}] Enabled={Enabled} Trust={Trust} LastIngested={LastIngested}", + provider.Id, + provider.Kind, + provider.Enabled ? "yes" : "no", + string.IsNullOrWhiteSpace(provider.TrustTier) ? "-" : provider.TrustTier, + provider.LastIngestedAt?.ToString("O", CultureInfo.InvariantCulture) ?? "unknown"); + } + } + } + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to list Excititor providers."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleExcititorExportAsync( + IServiceProvider services, + string format, + bool delta, + string? scope, + DateTimeOffset? since, + string? provider, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scopeHandle = services.CreateAsyncScope(); + var client = scopeHandle.ServiceProvider.GetRequiredService(); + var logger = scopeHandle.ServiceProvider.GetRequiredService().CreateLogger("excititor-export"); + var options = scopeHandle.ServiceProvider.GetRequiredService(); + var verbosity = scopeHandle.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.excititor.export", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "excititor export"); + activity?.SetTag("stellaops.cli.format", format); + activity?.SetTag("stellaops.cli.delta", delta); + if (!string.IsNullOrWhiteSpace(scope)) + { + activity?.SetTag("stellaops.cli.scope", scope); + } + if (since.HasValue) + { + activity?.SetTag("stellaops.cli.since", since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture)); + } + if (!string.IsNullOrWhiteSpace(provider)) + { + activity?.SetTag("stellaops.cli.provider", provider); + } + if (!string.IsNullOrWhiteSpace(outputPath)) + { + activity?.SetTag("stellaops.cli.output", outputPath); + } + using var duration = CliMetrics.MeasureCommandDuration("excititor export"); + + try + { + var payload = new Dictionary(StringComparer.Ordinal) + { + ["format"] = string.IsNullOrWhiteSpace(format) ? "openvex" : format.Trim(), + ["delta"] = delta + }; + + if (!string.IsNullOrWhiteSpace(scope)) + { + payload["scope"] = scope.Trim(); + } + if (since.HasValue) + { + payload["since"] = since.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + if (!string.IsNullOrWhiteSpace(provider)) + { + payload["provider"] = provider.Trim(); + } + + var result = await client.ExecuteExcititorOperationAsync( + "export", + HttpMethod.Post, + RemoveNullValues(payload), + cancellationToken).ConfigureAwait(false); + + if (!result.Success) + { + logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Excititor export failed." : result.Message); + Environment.ExitCode = 1; + return; + } + + Environment.ExitCode = 0; + + var manifest = TryParseExportManifest(result.Payload); + if (!string.IsNullOrWhiteSpace(result.Message) + && (manifest is null || !string.Equals(result.Message, "ok", StringComparison.OrdinalIgnoreCase))) + { + logger.LogInformation(result.Message); + } + + if (manifest is not null) + { + activity?.SetTag("stellaops.cli.export_id", manifest.ExportId); + if (!string.IsNullOrWhiteSpace(manifest.Format)) + { + activity?.SetTag("stellaops.cli.export_format", manifest.Format); + } + if (manifest.FromCache.HasValue) + { + activity?.SetTag("stellaops.cli.export_cached", manifest.FromCache.Value); + } + if (manifest.SizeBytes.HasValue) + { + activity?.SetTag("stellaops.cli.export_size", manifest.SizeBytes.Value); + } + + if (manifest.FromCache == true) + { + logger.LogInformation("Reusing cached export {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); + } + else + { + logger.LogInformation("Export ready: {ExportId} ({Format}).", manifest.ExportId, manifest.Format ?? "unknown"); + } + + if (manifest.CreatedAt.HasValue) + { + logger.LogInformation("Created at {CreatedAt}.", manifest.CreatedAt.Value.ToString("u", CultureInfo.InvariantCulture)); + } + + if (!string.IsNullOrWhiteSpace(manifest.Digest)) + { + var digestDisplay = BuildDigestDisplay(manifest.Algorithm, manifest.Digest); + if (manifest.SizeBytes.HasValue) + { + logger.LogInformation("Digest {Digest} ({Size}).", digestDisplay, FormatSize(manifest.SizeBytes.Value)); + } + else + { + logger.LogInformation("Digest {Digest}.", digestDisplay); + } + } + + if (!string.IsNullOrWhiteSpace(manifest.RekorLocation)) + { + if (!string.IsNullOrWhiteSpace(manifest.RekorIndex)) + { + logger.LogInformation("Rekor entry: {Location} (index {Index}).", manifest.RekorLocation, manifest.RekorIndex); + } + else + { + logger.LogInformation("Rekor entry: {Location}.", manifest.RekorLocation); + } + } + + if (!string.IsNullOrWhiteSpace(manifest.RekorInclusionUrl) + && !string.Equals(manifest.RekorInclusionUrl, manifest.RekorLocation, StringComparison.OrdinalIgnoreCase)) + { + logger.LogInformation("Rekor inclusion proof: {Url}.", manifest.RekorInclusionUrl); + } + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + var resolvedPath = ResolveExportOutputPath(outputPath!, manifest); + var download = await client.DownloadExcititorExportAsync( + manifest.ExportId, + resolvedPath, + manifest.Algorithm, + manifest.Digest, + cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.export_path", download.Path); + + if (download.FromCache) + { + logger.LogInformation("Export already cached at {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); + } + else + { + logger.LogInformation("Export saved to {Path} ({Size}).", download.Path, FormatSize(download.SizeBytes)); + } + } + else if (!string.IsNullOrWhiteSpace(result.Location)) + { + var downloadUrl = ResolveLocationUrl(options, result.Location); + if (!string.IsNullOrWhiteSpace(downloadUrl)) + { + logger.LogInformation("Download URL: {Url}", downloadUrl); + } + else + { + logger.LogInformation("Download location: {Location}", result.Location); + } + } + } + else + { + if (!string.IsNullOrWhiteSpace(result.Location)) + { + var downloadUrl = ResolveLocationUrl(options, result.Location); + if (!string.IsNullOrWhiteSpace(downloadUrl)) + { + logger.LogInformation("Download URL: {Url}", downloadUrl); + } + else + { + logger.LogInformation("Location: {Location}", result.Location); + } + } + else if (string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation("Export request accepted."); + } + } + } + catch (Exception ex) + { + logger.LogError(ex, "Excititor export failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static Task HandleExcititorBackfillStatementsAsync( + IServiceProvider services, + DateTimeOffset? retrievedSince, + bool force, + int batchSize, + int? maxDocuments, + bool verbose, + CancellationToken cancellationToken) + { + if (batchSize <= 0) + { + throw new ArgumentOutOfRangeException(nameof(batchSize), "Batch size must be greater than zero."); + } + + if (maxDocuments.HasValue && maxDocuments.Value <= 0) + { + throw new ArgumentOutOfRangeException(nameof(maxDocuments), "Max documents must be greater than zero when specified."); + } + + var payload = new Dictionary(StringComparer.Ordinal) + { + ["force"] = force, + ["batchSize"] = batchSize, + ["maxDocuments"] = maxDocuments + }; + + if (retrievedSince.HasValue) + { + payload["retrievedSince"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + + var activityTags = new Dictionary(StringComparer.Ordinal) + { + ["stellaops.cli.force"] = force, + ["stellaops.cli.batch_size"] = batchSize, + ["stellaops.cli.max_documents"] = maxDocuments + }; + + if (retrievedSince.HasValue) + { + activityTags["stellaops.cli.retrieved_since"] = retrievedSince.Value.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture); + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor backfill-statements", + verbose, + activityTags, + client => client.ExecuteExcititorOperationAsync( + "admin/backfill-statements", + HttpMethod.Post, + RemoveNullValues(payload), + cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorVerifyAsync( + IServiceProvider services, + string? exportId, + string? digest, + string? attestationPath, + bool verbose, + CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(exportId) && string.IsNullOrWhiteSpace(digest) && string.IsNullOrWhiteSpace(attestationPath)) + { + var logger = services.GetRequiredService().CreateLogger("excititor-verify"); + logger.LogError("At least one of --export-id, --digest, or --attestation must be provided."); + Environment.ExitCode = 1; + return Task.CompletedTask; + } + + var payload = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(exportId)) + { + payload["exportId"] = exportId.Trim(); + } + if (!string.IsNullOrWhiteSpace(digest)) + { + payload["digest"] = digest.Trim(); + } + if (!string.IsNullOrWhiteSpace(attestationPath)) + { + var fullPath = Path.GetFullPath(attestationPath); + if (!File.Exists(fullPath)) + { + var logger = services.GetRequiredService().CreateLogger("excititor-verify"); + logger.LogError("Attestation file not found at {Path}.", fullPath); + Environment.ExitCode = 1; + return Task.CompletedTask; + } + + var bytes = File.ReadAllBytes(fullPath); + payload["attestation"] = new Dictionary(StringComparer.Ordinal) + { + ["fileName"] = Path.GetFileName(fullPath), + ["base64"] = Convert.ToBase64String(bytes) + }; + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor verify", + verbose, + new Dictionary + { + ["export_id"] = exportId, + ["digest"] = digest, + ["attestation_path"] = attestationPath + }, + client => client.ExecuteExcititorOperationAsync("verify", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static Task HandleExcititorReconcileAsync( + IServiceProvider services, + IReadOnlyList providers, + TimeSpan? maxAge, + bool verbose, + CancellationToken cancellationToken) + { + var normalizedProviders = NormalizeProviders(providers); + var payload = new Dictionary(StringComparer.Ordinal); + if (normalizedProviders.Count > 0) + { + payload["providers"] = normalizedProviders; + } + if (maxAge.HasValue) + { + payload["maxAge"] = maxAge.Value.ToString("c", CultureInfo.InvariantCulture); + } + + return ExecuteExcititorCommandAsync( + services, + commandName: "excititor reconcile", + verbose, + new Dictionary + { + ["providers"] = normalizedProviders.Count, + ["max_age"] = maxAge?.ToString("c", CultureInfo.InvariantCulture) + }, + client => client.ExecuteExcititorOperationAsync("reconcile", HttpMethod.Post, RemoveNullValues(payload), cancellationToken), + cancellationToken); + } + + public static async Task HandleRuntimePolicyTestAsync( + IServiceProvider services, + string? namespaceValue, + IReadOnlyList imageArguments, + string? filePath, + IReadOnlyList labelArguments, + bool outputJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("runtime-policy-test"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.runtime.policy.test", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "runtime policy test"); + if (!string.IsNullOrWhiteSpace(namespaceValue)) + { + activity?.SetTag("stellaops.cli.namespace", namespaceValue); + } + using var duration = CliMetrics.MeasureCommandDuration("runtime policy test"); + + try + { + IReadOnlyList images; + try + { + images = await GatherImageDigestsAsync(imageArguments, filePath, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or ArgumentException or FileNotFoundException) + { + logger.LogError(ex, "Failed to gather image digests: {Message}", ex.Message); + Environment.ExitCode = 9; + return; + } + + if (images.Count == 0) + { + logger.LogError("No image digests provided. Use --image, --file, or pipe digests via stdin."); + Environment.ExitCode = 9; + return; + } + + IReadOnlyDictionary labels; + try + { + labels = ParseLabelSelectors(labelArguments); + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + Environment.ExitCode = 9; + return; + } + + activity?.SetTag("stellaops.cli.images", images.Count); + activity?.SetTag("stellaops.cli.labels", labels.Count); + + var request = new RuntimePolicyEvaluationRequest(namespaceValue, labels, images); + var result = await client.EvaluateRuntimePolicyAsync(request, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.ttl_seconds", result.TtlSeconds); + Environment.ExitCode = 0; + + if (outputJson) + { + var json = BuildRuntimePolicyJson(result, images); + Console.WriteLine(json); + return; + } + + if (result.ExpiresAtUtc.HasValue) + { + logger.LogInformation("Decision TTL: {TtlSeconds}s (expires {ExpiresAt})", result.TtlSeconds, result.ExpiresAtUtc.Value.ToString("u", CultureInfo.InvariantCulture)); + } + else + { + logger.LogInformation("Decision TTL: {TtlSeconds}s", result.TtlSeconds); + } + + if (!string.IsNullOrWhiteSpace(result.PolicyRevision)) + { + logger.LogInformation("Policy revision: {Revision}", result.PolicyRevision); + } + + DisplayRuntimePolicyResults(logger, result, images); + } + catch (Exception ex) + { + logger.LogError(ex, "Runtime policy evaluation failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleAuthLoginAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + bool force, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-login"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogError("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update your configuration."); + Environment.ExitCode = 1; + return; + } + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogError("Authority client is not available. Ensure AddStellaOpsAuthClient is registered in Program.cs."); + Environment.ExitCode = 1; + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogError("Authority configuration is incomplete; unable to determine cache key."); + Environment.ExitCode = 1; + return; + } + + try + { + if (force) + { + await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + } + + var scopeName = AuthorityTokenUtilities.ResolveScope(options); + StellaOpsTokenResult token; + + if (!string.IsNullOrWhiteSpace(options.Authority.Username)) + { + if (string.IsNullOrWhiteSpace(options.Authority.Password)) + { + logger.LogError("Authority password must be provided when username is configured."); + Environment.ExitCode = 1; + return; + } + + token = await tokenClient.RequestPasswordTokenAsync( + options.Authority.Username, + options.Authority.Password!, + scopeName, + null, + cancellationToken).ConfigureAwait(false); + } + else + { + token = await tokenClient.RequestClientCredentialsTokenAsync(scopeName, null, cancellationToken).ConfigureAwait(false); + } + + await tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); + + if (verbose) + { + logger.LogInformation("Authenticated with {Authority} (scopes: {Scopes}).", options.Authority.Url, string.Join(", ", token.Scopes)); + } + + logger.LogInformation("Login successful. Access token expires at {Expires}.", token.ExpiresAtUtc.ToString("u")); + } + catch (Exception ex) + { + logger.LogError(ex, "Authentication failed: {Message}", ex.Message); + Environment.ExitCode = 1; + } + } + + public static async Task HandleAuthLogoutAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-logout"); + Environment.ExitCode = 0; + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogInformation("No authority client registered; nothing to remove."); + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogInformation("Authority configuration missing; no cached tokens to remove."); + return; + } + + await tokenClient.ClearCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (verbose) + { + logger.LogInformation("Cleared cached token for {Authority}.", options.Authority?.Url ?? "authority"); + } + } + + public static async Task HandleAuthStatusAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-status"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); + Environment.ExitCode = 1; + return; + } + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogInformation("Authority client not registered; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogInformation("Authority configuration incomplete; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); + Environment.ExitCode = 1; + return; + } + + logger.LogInformation("Cached token for {Authority} expires at {Expires}.", options.Authority.Url, entry.ExpiresAtUtc.ToString("u")); + if (verbose) + { + logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); + } + } + + public static async Task HandleAuthWhoAmIAsync( + IServiceProvider services, + StellaOpsCliOptions options, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-whoami"); + Environment.ExitCode = 0; + + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + logger.LogInformation("Authority URL not configured. Set STELLAOPS_AUTHORITY_URL and run 'auth login'."); + Environment.ExitCode = 1; + return; + } + + var tokenClient = scope.ServiceProvider.GetService(); + if (tokenClient is null) + { + logger.LogInformation("Authority client not registered; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(options); + if (string.IsNullOrWhiteSpace(cacheKey)) + { + logger.LogInformation("Authority configuration incomplete; no cached tokens available."); + Environment.ExitCode = 1; + return; + } + + var entry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (entry is null) + { + logger.LogInformation("No cached token for {Authority}. Run 'auth login' to authenticate.", options.Authority.Url); + Environment.ExitCode = 1; + return; + } + + var grantType = string.IsNullOrWhiteSpace(options.Authority.Username) ? "client_credentials" : "password"; + var now = DateTimeOffset.UtcNow; + var remaining = entry.ExpiresAtUtc - now; + if (remaining < TimeSpan.Zero) + { + remaining = TimeSpan.Zero; + } + + logger.LogInformation("Authority: {Authority}", options.Authority.Url); + logger.LogInformation("Grant type: {GrantType}", grantType); + logger.LogInformation("Token type: {TokenType}", entry.TokenType); + logger.LogInformation("Expires: {Expires} ({Remaining})", entry.ExpiresAtUtc.ToString("u"), FormatDuration(remaining)); + + if (entry.Scopes.Count > 0) + { + logger.LogInformation("Scopes: {Scopes}", string.Join(", ", entry.Scopes)); + } + + if (TryExtractJwtClaims(entry.AccessToken, out var claims, out var issuedAt, out var notBefore)) + { + if (claims.TryGetValue("sub", out var subject) && !string.IsNullOrWhiteSpace(subject)) + { + logger.LogInformation("Subject: {Subject}", subject); + } + + if (claims.TryGetValue("client_id", out var clientId) && !string.IsNullOrWhiteSpace(clientId)) + { + logger.LogInformation("Client ID (token): {ClientId}", clientId); + } + + if (claims.TryGetValue("aud", out var audience) && !string.IsNullOrWhiteSpace(audience)) + { + logger.LogInformation("Audience: {Audience}", audience); + } + + if (claims.TryGetValue("iss", out var issuer) && !string.IsNullOrWhiteSpace(issuer)) + { + logger.LogInformation("Issuer: {Issuer}", issuer); + } + + if (issuedAt is not null) + { + logger.LogInformation("Issued at: {IssuedAt}", issuedAt.Value.ToString("u")); + } + + if (notBefore is not null) + { + logger.LogInformation("Not before: {NotBefore}", notBefore.Value.ToString("u")); + } + + var extraClaims = CollectAdditionalClaims(claims); + if (extraClaims.Count > 0 && verbose) + { + logger.LogInformation("Additional claims: {Claims}", string.Join(", ", extraClaims)); + } + } + else + { + logger.LogInformation("Access token appears opaque; claims are unavailable."); + } + } + + public static async Task HandleAuthRevokeExportAsync( + IServiceProvider services, + StellaOpsCliOptions options, + string? outputDirectory, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("auth-revoke-export"); + Environment.ExitCode = 0; + + try + { + var client = scope.ServiceProvider.GetRequiredService(); + var result = await client.ExportAsync(verbose, cancellationToken).ConfigureAwait(false); + + var directory = string.IsNullOrWhiteSpace(outputDirectory) + ? Directory.GetCurrentDirectory() + : Path.GetFullPath(outputDirectory); + + Directory.CreateDirectory(directory); + + var bundlePath = Path.Combine(directory, "revocation-bundle.json"); + var signaturePath = Path.Combine(directory, "revocation-bundle.json.jws"); + var digestPath = Path.Combine(directory, "revocation-bundle.json.sha256"); + + await File.WriteAllBytesAsync(bundlePath, result.BundleBytes, cancellationToken).ConfigureAwait(false); + await File.WriteAllTextAsync(signaturePath, result.Signature, cancellationToken).ConfigureAwait(false); + await File.WriteAllTextAsync(digestPath, $"sha256:{result.Digest}", cancellationToken).ConfigureAwait(false); + + var computedDigest = Convert.ToHexString(SHA256.HashData(result.BundleBytes)).ToLowerInvariant(); + if (!string.Equals(computedDigest, result.Digest, StringComparison.OrdinalIgnoreCase)) + { + logger.LogError("Digest mismatch. Expected {Expected} but computed {Actual}.", result.Digest, computedDigest); + Environment.ExitCode = 1; + return; + } + + logger.LogInformation( + "Revocation bundle exported to {Directory} (sequence {Sequence}, issued {Issued:u}, signing key {KeyId}, provider {Provider}).", + directory, + result.Sequence, + result.IssuedAt, + string.IsNullOrWhiteSpace(result.SigningKeyId) ? "" : result.SigningKeyId, + string.IsNullOrWhiteSpace(result.SigningProvider) ? "default" : result.SigningProvider); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to export revocation bundle."); + Environment.ExitCode = 1; + } + } + + public static async Task HandleAuthRevokeVerifyAsync( + string bundlePath, + string signaturePath, + string keyPath, + bool verbose, + CancellationToken cancellationToken) + { + var loggerFactory = LoggerFactory.Create(builder => builder.AddSimpleConsole(options => + { + options.SingleLine = true; + options.TimestampFormat = "HH:mm:ss "; + })); + var logger = loggerFactory.CreateLogger("auth-revoke-verify"); + Environment.ExitCode = 0; + + try + { + if (string.IsNullOrWhiteSpace(bundlePath) || string.IsNullOrWhiteSpace(signaturePath) || string.IsNullOrWhiteSpace(keyPath)) + { + logger.LogError("Arguments --bundle, --signature, and --key are required."); + Environment.ExitCode = 1; + return; + } + + var bundleBytes = await File.ReadAllBytesAsync(bundlePath, cancellationToken).ConfigureAwait(false); + var signatureContent = (await File.ReadAllTextAsync(signaturePath, cancellationToken).ConfigureAwait(false)).Trim(); + var keyPem = await File.ReadAllTextAsync(keyPath, cancellationToken).ConfigureAwait(false); + + var digest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); + logger.LogInformation("Bundle digest sha256:{Digest}", digest); + + if (!TryParseDetachedJws(signatureContent, out var encodedHeader, out var encodedSignature)) + { + logger.LogError("Signature is not in detached JWS format."); + Environment.ExitCode = 1; + return; + } + + var headerJson = Encoding.UTF8.GetString(Base64UrlDecode(encodedHeader)); + using var headerDocument = JsonDocument.Parse(headerJson); + var header = headerDocument.RootElement; + + if (!header.TryGetProperty("b64", out var b64Element) || b64Element.GetBoolean()) + { + logger.LogError("Detached JWS header must include '\"b64\": false'."); + Environment.ExitCode = 1; + return; + } + + var algorithm = header.TryGetProperty("alg", out var algElement) ? algElement.GetString() : SignatureAlgorithms.Es256; + if (string.IsNullOrWhiteSpace(algorithm)) + { + algorithm = SignatureAlgorithms.Es256; + } + + var providerHint = header.TryGetProperty("provider", out var providerElement) + ? providerElement.GetString() + : null; + + var keyId = header.TryGetProperty("kid", out var kidElement) ? kidElement.GetString() : null; + if (string.IsNullOrWhiteSpace(keyId)) + { + keyId = Path.GetFileNameWithoutExtension(keyPath); + logger.LogWarning("JWS header missing 'kid'; using fallback key id {KeyId}.", keyId); + } + + CryptoSigningKey signingKey; + try + { + signingKey = CreateVerificationSigningKey(keyId!, algorithm!, providerHint, keyPem, keyPath); + } + catch (Exception ex) when (ex is InvalidOperationException or CryptographicException) + { + logger.LogError(ex, "Failed to load verification key material."); + Environment.ExitCode = 1; + return; + } + + var providers = new List + { + new DefaultCryptoProvider() + }; + +#if STELLAOPS_CRYPTO_SODIUM + providers.Add(new LibsodiumCryptoProvider()); +#endif + + foreach (var provider in providers) + { + if (provider.Supports(CryptoCapability.Verification, algorithm!)) + { + provider.UpsertSigningKey(signingKey); + } + } + + var preferredOrder = !string.IsNullOrWhiteSpace(providerHint) + ? new[] { providerHint! } + : Array.Empty(); + var registry = new CryptoProviderRegistry(providers, preferredOrder); + CryptoSignerResolution resolution; + try + { + resolution = registry.ResolveSigner( + CryptoCapability.Verification, + algorithm!, + signingKey.Reference, + providerHint); + } + catch (Exception ex) + { + logger.LogError(ex, "No crypto provider available for verification (algorithm {Algorithm}).", algorithm); + Environment.ExitCode = 1; + return; + } + + var signingInputLength = encodedHeader.Length + 1 + bundleBytes.Length; + var buffer = ArrayPool.Shared.Rent(signingInputLength); + try + { + var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); + Buffer.BlockCopy(headerBytes, 0, buffer, 0, headerBytes.Length); + buffer[headerBytes.Length] = (byte)'.'; + Buffer.BlockCopy(bundleBytes, 0, buffer, headerBytes.Length + 1, bundleBytes.Length); + + var signatureBytes = Base64UrlDecode(encodedSignature); + var verified = await resolution.Signer.VerifyAsync( + new ReadOnlyMemory(buffer, 0, signingInputLength), + signatureBytes, + cancellationToken).ConfigureAwait(false); + + if (!verified) + { + logger.LogError("Signature verification failed."); + Environment.ExitCode = 1; + return; + } + } + finally + { + ArrayPool.Shared.Return(buffer); + } + + if (!string.IsNullOrWhiteSpace(providerHint) && !string.Equals(providerHint, resolution.ProviderName, StringComparison.OrdinalIgnoreCase)) + { + logger.LogWarning( + "Preferred provider '{Preferred}' unavailable; verification used '{Provider}'.", + providerHint, + resolution.ProviderName); + } + + logger.LogInformation( + "Signature verified using algorithm {Algorithm} via provider {Provider} (kid {KeyId}).", + algorithm, + resolution.ProviderName, + signingKey.Reference.KeyId); + + if (verbose) + { + logger.LogInformation("JWS header: {Header}", headerJson); + } + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to verify revocation bundle."); + Environment.ExitCode = 1; + } + finally + { + loggerFactory.Dispose(); + } + } + + public static async Task HandleVulnObservationsAsync( + IServiceProvider services, + string tenant, + IReadOnlyList observationIds, + IReadOnlyList aliases, + IReadOnlyList purls, + IReadOnlyList cpes, + int? limit, + string? cursor, + bool emitJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("vuln-observations"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.vuln.observations", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "vuln observations"); + activity?.SetTag("stellaops.cli.tenant", tenant); + using var duration = CliMetrics.MeasureCommandDuration("vuln observations"); + + try + { + tenant = tenant?.Trim().ToLowerInvariant() ?? string.Empty; + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Tenant must be provided."); + } + + var query = new AdvisoryObservationsQuery( + tenant, + NormalizeSet(observationIds, toLower: false), + NormalizeSet(aliases, toLower: true), + NormalizeSet(purls, toLower: false), + NormalizeSet(cpes, toLower: false), + limit, + cursor); + + var response = await client.GetObservationsAsync(query, cancellationToken).ConfigureAwait(false); + + if (emitJson) + { + var json = JsonSerializer.Serialize(response, new JsonSerializerOptions + { + WriteIndented = true + }); + Console.WriteLine(json); + Environment.ExitCode = 0; + return; + } + + RenderObservationTable(response); + if (!emitJson && response.HasMore && !string.IsNullOrWhiteSpace(response.NextCursor)) + { + var escapedCursor = Markup.Escape(response.NextCursor); + AnsiConsole.MarkupLine($"[yellow]More observations available. Continue with[/] [cyan]--cursor[/] [grey]{escapedCursor}[/]"); + } + Environment.ExitCode = 0; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + logger.LogWarning("Operation cancelled by user."); + Environment.ExitCode = 130; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch observations from Concelier."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + + static IReadOnlyList NormalizeSet(IReadOnlyList values, bool toLower) + { + if (values is null || values.Count == 0) + { + return Array.Empty(); + } + + var set = new HashSet(StringComparer.Ordinal); + foreach (var raw in values) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var normalized = raw.Trim(); + if (toLower) + { + normalized = normalized.ToLowerInvariant(); + } + + set.Add(normalized); + } + + return set.Count == 0 ? Array.Empty() : set.ToArray(); + } + + static void RenderObservationTable(AdvisoryObservationsResponse response) + { + var observations = response.Observations ?? Array.Empty(); + if (observations.Count == 0) + { + AnsiConsole.MarkupLine("[yellow]No observations matched the provided filters.[/]"); + return; + } + + var table = new Table() + .Centered() + .Border(TableBorder.Rounded); + + table.AddColumn("Observation"); + table.AddColumn("Source"); + table.AddColumn("Upstream Id"); + table.AddColumn("Aliases"); + table.AddColumn("PURLs"); + table.AddColumn("CPEs"); + table.AddColumn("Created (UTC)"); + + foreach (var observation in observations) + { + var sourceVendor = observation.Source?.Vendor ?? "(unknown)"; + var upstreamId = observation.Upstream?.UpstreamId ?? "(unknown)"; + var aliasesText = FormatList(observation.Linkset?.Aliases); + var purlsText = FormatList(observation.Linkset?.Purls); + var cpesText = FormatList(observation.Linkset?.Cpes); + + table.AddRow( + Markup.Escape(observation.ObservationId), + Markup.Escape(sourceVendor), + Markup.Escape(upstreamId), + Markup.Escape(aliasesText), + Markup.Escape(purlsText), + Markup.Escape(cpesText), + observation.CreatedAt.ToUniversalTime().ToString("u", CultureInfo.InvariantCulture)); + } + + AnsiConsole.Write(table); + AnsiConsole.MarkupLine( + "[green]{0}[/] observation(s). Aliases: [green]{1}[/], PURLs: [green]{2}[/], CPEs: [green]{3}[/].", + observations.Count, + response.Linkset?.Aliases?.Count ?? 0, + response.Linkset?.Purls?.Count ?? 0, + response.Linkset?.Cpes?.Count ?? 0); + } + + static string FormatList(IReadOnlyList? values) + { + if (values is null || values.Count == 0) + { + return "(none)"; + } + + const int MaxItems = 3; + if (values.Count <= MaxItems) + { + return string.Join(", ", values); + } + + var preview = values.Take(MaxItems); + return $"{string.Join(", ", preview)} (+{values.Count - MaxItems})"; + } + } + + public static async Task HandleOfflineKitPullAsync( + IServiceProvider services, + string? bundleId, + string? destinationDirectory, + bool overwrite, + bool resume, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var options = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-pull"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.pull", ActivityKind.Client); + activity?.SetTag("stellaops.cli.bundle_id", string.IsNullOrWhiteSpace(bundleId) ? "latest" : bundleId); + using var duration = CliMetrics.MeasureCommandDuration("offline kit pull"); + + try + { + var targetDirectory = string.IsNullOrWhiteSpace(destinationDirectory) + ? options.Offline?.KitsDirectory ?? Path.Combine(Environment.CurrentDirectory, "offline-kits") + : destinationDirectory; + + targetDirectory = Path.GetFullPath(targetDirectory); + Directory.CreateDirectory(targetDirectory); + + var result = await client.DownloadOfflineKitAsync(bundleId, targetDirectory, overwrite, resume, cancellationToken).ConfigureAwait(false); + + logger.LogInformation( + "Bundle {BundleId} stored at {Path} (captured {Captured:u}, sha256:{Digest}).", + result.Descriptor.BundleId, + result.BundlePath, + result.Descriptor.CapturedAt, + result.Descriptor.BundleSha256); + + logger.LogInformation("Manifest saved to {Manifest}.", result.ManifestPath); + + if (!string.IsNullOrWhiteSpace(result.MetadataPath)) + { + logger.LogDebug("Metadata recorded at {Metadata}.", result.MetadataPath); + } + + if (result.BundleSignaturePath is not null) + { + logger.LogInformation("Bundle signature saved to {Signature}.", result.BundleSignaturePath); + } + + if (result.ManifestSignaturePath is not null) + { + logger.LogInformation("Manifest signature saved to {Signature}.", result.ManifestSignaturePath); + } + + CliMetrics.RecordOfflineKitDownload(result.Descriptor.Kind ?? "unknown", result.FromCache); + activity?.SetTag("stellaops.cli.bundle_cache", result.FromCache); + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to download offline kit bundle."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyFindingsListAsync( + IServiceProvider services, + string policyId, + string[] sbomFilters, + string[] statusFilters, + string[] severityFilters, + string? since, + string? cursor, + int? page, + int? pageSize, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-ls"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.list", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("policy findings list"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (page.HasValue && page.Value < 1) + { + throw new ArgumentException("--page must be greater than or equal to 1.", nameof(page)); + } + + if (pageSize.HasValue && (pageSize.Value < 1 || pageSize.Value > 500)) + { + throw new ArgumentException("--page-size must be between 1 and 500.", nameof(pageSize)); + } + + var normalizedPolicyId = policyId.Trim(); + var sboms = NormalizePolicyFilterValues(sbomFilters); + var statuses = NormalizePolicyFilterValues(statusFilters, toLower: true); + var severities = NormalizePolicyFilterValues(severityFilters); + var sinceValue = ParsePolicySince(since); + var cursorValue = string.IsNullOrWhiteSpace(cursor) ? null : cursor.Trim(); + + var query = new PolicyFindingsQuery( + normalizedPolicyId, + sboms, + statuses, + severities, + cursorValue, + page, + pageSize, + sinceValue); + + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + if (sboms.Count > 0) + { + activity?.SetTag("stellaops.cli.findings.sbom_filters", string.Join(",", sboms)); + } + + if (statuses.Count > 0) + { + activity?.SetTag("stellaops.cli.findings.status_filters", string.Join(",", statuses)); + } + + if (severities.Count > 0) + { + activity?.SetTag("stellaops.cli.findings.severity_filters", string.Join(",", severities)); + } + + if (!string.IsNullOrWhiteSpace(cursorValue)) + { + activity?.SetTag("stellaops.cli.findings.cursor", cursorValue); + } + + if (page.HasValue) + { + activity?.SetTag("stellaops.cli.findings.page", page.Value); + } + + if (pageSize.HasValue) + { + activity?.SetTag("stellaops.cli.findings.page_size", pageSize.Value); + } + + if (sinceValue.HasValue) + { + activity?.SetTag("stellaops.cli.findings.since", sinceValue.Value.ToString("o", CultureInfo.InvariantCulture)); + } + + var result = await client.GetPolicyFindingsAsync(query, cancellationToken).ConfigureAwait(false); + activity?.SetTag("stellaops.cli.findings.count", result.Items.Count); + if (!string.IsNullOrWhiteSpace(result.NextCursor)) + { + activity?.SetTag("stellaops.cli.findings.next_cursor", result.NextCursor); + } + + var payload = BuildPolicyFindingsPayload(normalizedPolicyId, query, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Results written to {Path}.", Path.GetFullPath(outputPath!)); + } + + var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); + if (outputFormat == PolicyFindingsOutputFormat.Json) + { + var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); + Console.WriteLine(json); + } + else + { + RenderPolicyFindingsTable(logger, result); + } + + CliMetrics.RecordPolicyFindingsList(result.Items.Count == 0 ? "empty" : "ok"); + Environment.ExitCode = 0; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyFindingsList("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsList); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to list policy findings."); + CliMetrics.RecordPolicyFindingsList("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyFindingsGetAsync( + IServiceProvider services, + string policyId, + string findingId, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-get"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.get", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("policy findings get"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var normalizedPolicyId = policyId.Trim(); + var normalizedFindingId = findingId.Trim(); + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); + + var result = await client.GetPolicyFindingAsync(normalizedPolicyId, normalizedFindingId, cancellationToken).ConfigureAwait(false); + var payload = BuildPolicyFindingPayload(normalizedPolicyId, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Finding written to {Path}.", Path.GetFullPath(outputPath!)); + } + + var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); + if (outputFormat == PolicyFindingsOutputFormat.Json) + { + Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); + } + else + { + RenderPolicyFindingDetails(logger, result); + } + + var outcome = string.IsNullOrWhiteSpace(result.Status) ? "unknown" : result.Status.ToLowerInvariant(); + CliMetrics.RecordPolicyFindingsGet(outcome); + Environment.ExitCode = 0; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyFindingsGet("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsGet); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to retrieve policy finding."); + CliMetrics.RecordPolicyFindingsGet("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyFindingsExplainAsync( + IServiceProvider services, + string policyId, + string findingId, + string? mode, + string? format, + string? outputPath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-findings-explain"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.findings.explain", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("policy findings explain"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var normalizedPolicyId = policyId.Trim(); + var normalizedFindingId = findingId.Trim(); + var normalizedMode = NormalizeExplainMode(mode); + + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + activity?.SetTag("stellaops.cli.finding_id", normalizedFindingId); + if (!string.IsNullOrWhiteSpace(normalizedMode)) + { + activity?.SetTag("stellaops.cli.findings.mode", normalizedMode); + } + + var result = await client.GetPolicyFindingExplainAsync(normalizedPolicyId, normalizedFindingId, normalizedMode, cancellationToken).ConfigureAwait(false); + activity?.SetTag("stellaops.cli.findings.step_count", result.Steps.Count); + + var payload = BuildPolicyFindingExplainPayload(normalizedPolicyId, normalizedFindingId, normalizedMode, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteJsonPayloadAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Explain trace written to {Path}.", Path.GetFullPath(outputPath!)); + } + + var outputFormat = DeterminePolicyFindingsFormat(format, outputPath); + if (outputFormat == PolicyFindingsOutputFormat.Json) + { + Console.WriteLine(JsonSerializer.Serialize(payload, SimulationJsonOptions)); + } + else + { + RenderPolicyFindingExplain(logger, result); + } + + CliMetrics.RecordPolicyFindingsExplain(result.Steps.Count == 0 ? "empty" : "ok"); + Environment.ExitCode = 0; + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyFindingsExplain("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyFindingsFailure(ex, logger, CliMetrics.RecordPolicyFindingsExplain); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to fetch policy explain trace."); + CliMetrics.RecordPolicyFindingsExplain("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicyActivateAsync( + IServiceProvider services, + string policyId, + int version, + string? note, + bool runNow, + string? scheduledAt, + string? priority, + bool rollback, + string? incidentId, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-activate"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.activate", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "policy activate"); + using var duration = CliMetrics.MeasureCommandDuration("policy activate"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (version <= 0) + { + throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); + } + + var normalizedPolicyId = policyId.Trim(); + DateTimeOffset? scheduled = null; + if (!string.IsNullOrWhiteSpace(scheduledAt)) + { + if (!DateTimeOffset.TryParse(scheduledAt, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var parsed)) + { + throw new ArgumentException("Scheduled timestamp must be a valid ISO-8601 value.", nameof(scheduledAt)); + } + + scheduled = parsed; + } + + var request = new PolicyActivationRequest( + runNow, + scheduled, + NormalizePolicyPriority(priority), + rollback, + string.IsNullOrWhiteSpace(incidentId) ? null : incidentId.Trim(), + string.IsNullOrWhiteSpace(note) ? null : note.Trim()); + + activity?.SetTag("stellaops.cli.policy_id", normalizedPolicyId); + activity?.SetTag("stellaops.cli.policy_version", version); + if (request.RunNow) + { + activity?.SetTag("stellaops.cli.policy_run_now", true); + } + + if (request.ScheduledAt.HasValue) + { + activity?.SetTag("stellaops.cli.policy_scheduled_at", request.ScheduledAt.Value.ToString("o", CultureInfo.InvariantCulture)); + } + + if (!string.IsNullOrWhiteSpace(request.Priority)) + { + activity?.SetTag("stellaops.cli.policy_priority", request.Priority); + } + + if (request.Rollback) + { + activity?.SetTag("stellaops.cli.policy_rollback", true); + } + + var result = await client.ActivatePolicyRevisionAsync(normalizedPolicyId, version, request, cancellationToken).ConfigureAwait(false); + + var outcome = NormalizePolicyActivationOutcome(result.Status); + CliMetrics.RecordPolicyActivation(outcome); + RenderPolicyActivationResult(result, request); + + var exitCode = DeterminePolicyActivationExitCode(outcome); + Environment.ExitCode = exitCode; + + if (exitCode == 0) + { + logger.LogInformation("Policy {PolicyId} v{Version} activation status: {Status}.", result.Revision.PolicyId, result.Revision.Version, outcome); + } + else + { + logger.LogWarning("Policy {PolicyId} v{Version} requires additional approval (status: {Status}).", result.Revision.PolicyId, result.Revision.Version, outcome); + } + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicyActivation("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicyActivationFailure(ex, logger); + } + catch (Exception ex) + { + logger.LogError(ex, "Policy activation failed."); + CliMetrics.RecordPolicyActivation("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandlePolicySimulateAsync( + IServiceProvider services, + string policyId, + int? baseVersion, + int? candidateVersion, + IReadOnlyList sbomArguments, + IReadOnlyList environmentArguments, + string? format, + string? outputPath, + bool explain, + bool failOnDiff, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("policy-simulate"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.policy.simulate", ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", "policy simulate"); + activity?.SetTag("stellaops.cli.policy_id", policyId); + if (baseVersion.HasValue) + { + activity?.SetTag("stellaops.cli.base_version", baseVersion.Value); + } + if (candidateVersion.HasValue) + { + activity?.SetTag("stellaops.cli.candidate_version", candidateVersion.Value); + } + using var duration = CliMetrics.MeasureCommandDuration("policy simulate"); + + try + { + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + var normalizedPolicyId = policyId.Trim(); + var sbomSet = NormalizePolicySbomSet(sbomArguments); + var environment = ParsePolicyEnvironment(environmentArguments); + + var input = new PolicySimulationInput( + baseVersion, + candidateVersion, + sbomSet, + environment, + explain); + + var result = await client.SimulatePolicyAsync(normalizedPolicyId, input, cancellationToken).ConfigureAwait(false); + + activity?.SetTag("stellaops.cli.diff_added", result.Diff.Added); + activity?.SetTag("stellaops.cli.diff_removed", result.Diff.Removed); + if (result.Diff.BySeverity.Count > 0) + { + activity?.SetTag("stellaops.cli.severity_buckets", result.Diff.BySeverity.Count); + } + + var outputFormat = DeterminePolicySimulationFormat(format, outputPath); + var payload = BuildPolicySimulationPayload(normalizedPolicyId, baseVersion, candidateVersion, sbomSet, environment, result); + + if (!string.IsNullOrWhiteSpace(outputPath)) + { + await WriteSimulationOutputAsync(outputPath!, payload, cancellationToken).ConfigureAwait(false); + logger.LogInformation("Simulation results written to {Path}.", Path.GetFullPath(outputPath!)); + } + + RenderPolicySimulationResult(logger, payload, result, outputFormat); + + var exitCode = DetermineSimulationExitCode(result, failOnDiff); + Environment.ExitCode = exitCode; + + var outcome = exitCode == 20 + ? "diff_blocked" + : (result.Diff.Added + result.Diff.Removed) > 0 ? "diff" : "clean"; + CliMetrics.RecordPolicySimulation(outcome); + + if (exitCode == 20) + { + logger.LogWarning("Differences detected; exiting with code 20 due to --fail-on-diff."); + } + + if (!string.IsNullOrWhiteSpace(result.ExplainUri)) + { + activity?.SetTag("stellaops.cli.explain_uri", result.ExplainUri); + } + } + catch (ArgumentException ex) + { + logger.LogError(ex.Message); + CliMetrics.RecordPolicySimulation("error"); + Environment.ExitCode = 64; + } + catch (PolicyApiException ex) + { + HandlePolicySimulationFailure(ex, logger); + } + catch (Exception ex) + { + logger.LogError(ex, "Policy simulation failed."); + CliMetrics.RecordPolicySimulation("error"); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleOfflineKitImportAsync( + IServiceProvider services, + string bundlePath, + string? manifestPath, + string? bundleSignaturePath, + string? manifestSignaturePath, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var options = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-import"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.import", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("offline kit import"); + + try + { + if (string.IsNullOrWhiteSpace(bundlePath)) + { + logger.LogError("Bundle path is required."); + Environment.ExitCode = 1; + return; + } + + bundlePath = Path.GetFullPath(bundlePath); + if (!File.Exists(bundlePath)) + { + logger.LogError("Bundle file {Path} not found.", bundlePath); + Environment.ExitCode = 1; + return; + } + + var metadata = await LoadOfflineKitMetadataAsync(bundlePath, cancellationToken).ConfigureAwait(false); + if (metadata is not null) + { + manifestPath ??= metadata.ManifestPath; + bundleSignaturePath ??= metadata.BundleSignaturePath; + manifestSignaturePath ??= metadata.ManifestSignaturePath; + } + + manifestPath = NormalizeFilePath(manifestPath); + bundleSignaturePath = NormalizeFilePath(bundleSignaturePath); + manifestSignaturePath = NormalizeFilePath(manifestSignaturePath); + + if (manifestPath is null) + { + manifestPath = TryInferManifestPath(bundlePath); + if (manifestPath is not null) + { + logger.LogDebug("Using inferred manifest path {Path}.", manifestPath); + } + } + + if (manifestPath is not null && !File.Exists(manifestPath)) + { + logger.LogError("Manifest file {Path} not found.", manifestPath); + Environment.ExitCode = 1; + return; + } + + if (bundleSignaturePath is not null && !File.Exists(bundleSignaturePath)) + { + logger.LogWarning("Bundle signature {Path} not found; skipping.", bundleSignaturePath); + bundleSignaturePath = null; + } + + if (manifestSignaturePath is not null && !File.Exists(manifestSignaturePath)) + { + logger.LogWarning("Manifest signature {Path} not found; skipping.", manifestSignaturePath); + manifestSignaturePath = null; + } + + if (metadata is not null) + { + var computedBundleDigest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); + if (!DigestsEqual(computedBundleDigest, metadata.BundleSha256)) + { + logger.LogError("Bundle digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.BundleSha256, computedBundleDigest); + Environment.ExitCode = 1; + return; + } + + if (manifestPath is not null) + { + var computedManifestDigest = await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false); + if (!DigestsEqual(computedManifestDigest, metadata.ManifestSha256)) + { + logger.LogError("Manifest digest mismatch. Expected sha256:{Expected} but computed sha256:{Actual}.", metadata.ManifestSha256, computedManifestDigest); + Environment.ExitCode = 1; + return; + } + } + } + + var request = new OfflineKitImportRequest( + bundlePath, + manifestPath, + bundleSignaturePath, + manifestSignaturePath, + metadata?.BundleId, + metadata?.BundleSha256, + metadata?.BundleSize, + metadata?.CapturedAt, + metadata?.Channel, + metadata?.Kind, + metadata?.IsDelta, + metadata?.BaseBundleId, + metadata?.ManifestSha256, + metadata?.ManifestSize); + + var result = await client.ImportOfflineKitAsync(request, cancellationToken).ConfigureAwait(false); + CliMetrics.RecordOfflineKitImport(result.Status); + + logger.LogInformation( + "Import {ImportId} submitted at {Submitted:u} with status {Status}.", + string.IsNullOrWhiteSpace(result.ImportId) ? "" : result.ImportId, + result.SubmittedAt, + string.IsNullOrWhiteSpace(result.Status) ? "queued" : result.Status); + + if (!string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation(result.Message); + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Offline kit import failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + public static async Task HandleOfflineKitStatusAsync( + IServiceProvider services, + bool asJson, + bool verbose, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger("offline-kit-status"); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity("cli.offline.kit.status", ActivityKind.Client); + using var duration = CliMetrics.MeasureCommandDuration("offline kit status"); + + try + { + var status = await client.GetOfflineKitStatusAsync(cancellationToken).ConfigureAwait(false); + + if (asJson) + { + var payload = new + { + bundleId = status.BundleId, + channel = status.Channel, + kind = status.Kind, + isDelta = status.IsDelta, + baseBundleId = status.BaseBundleId, + capturedAt = status.CapturedAt, + importedAt = status.ImportedAt, + sha256 = status.BundleSha256, + sizeBytes = status.BundleSize, + components = status.Components.Select(component => new + { + component.Name, + component.Version, + component.Digest, + component.CapturedAt, + component.SizeBytes + }) + }; + + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true }); + Console.WriteLine(json); + } + else + { + if (string.IsNullOrWhiteSpace(status.BundleId)) + { + logger.LogInformation("No offline kit bundle has been imported yet."); + } + else + { + logger.LogInformation( + "Current bundle {BundleId} ({Kind}) captured {Captured:u}, imported {Imported:u}, sha256:{Digest}, size {Size}.", + status.BundleId, + status.Kind ?? "unknown", + status.CapturedAt ?? default, + status.ImportedAt ?? default, + status.BundleSha256 ?? "", + status.BundleSize.HasValue ? status.BundleSize.Value.ToString("N0", CultureInfo.InvariantCulture) : ""); + } + + if (status.Components.Count > 0) + { + var table = new Table().AddColumns("Component", "Version", "Digest", "Captured", "Size (bytes)"); + foreach (var component in status.Components) + { + table.AddRow( + component.Name, + string.IsNullOrWhiteSpace(component.Version) ? "-" : component.Version!, + string.IsNullOrWhiteSpace(component.Digest) ? "-" : $"sha256:{component.Digest}", + component.CapturedAt?.ToString("u", CultureInfo.InvariantCulture) ?? "-", + component.SizeBytes.HasValue ? component.SizeBytes.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"); + } + + AnsiConsole.Write(table); + } + } + + Environment.ExitCode = 0; + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to read offline kit status."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static async Task LoadOfflineKitMetadataAsync(string bundlePath, CancellationToken cancellationToken) + { + var metadataPath = bundlePath + ".metadata.json"; + if (!File.Exists(metadataPath)) + { + return null; + } + + try + { + await using var stream = File.OpenRead(metadataPath); + return await JsonSerializer.DeserializeAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch + { + return null; + } + } + + private static string? NormalizeFilePath(string? path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return null; + } + + return Path.GetFullPath(path); + } + + private static string? TryInferManifestPath(string bundlePath) + { + var directory = Path.GetDirectoryName(bundlePath); + if (string.IsNullOrWhiteSpace(directory)) + { + return null; + } + + var baseName = Path.GetFileName(bundlePath); + if (string.IsNullOrWhiteSpace(baseName)) + { + return null; + } + + baseName = Path.GetFileNameWithoutExtension(baseName); + if (baseName.EndsWith(".tar", StringComparison.OrdinalIgnoreCase)) + { + baseName = Path.GetFileNameWithoutExtension(baseName); + } + + var candidates = new[] + { + Path.Combine(directory, $"offline-manifest-{baseName}.json"), + Path.Combine(directory, "offline-manifest.json") + }; + + foreach (var candidate in candidates) + { + if (File.Exists(candidate)) + { + return Path.GetFullPath(candidate); + } + } + + return Directory.EnumerateFiles(directory, "offline-manifest*.json").FirstOrDefault(); + } + + private static bool DigestsEqual(string computed, string? expected) + { + if (string.IsNullOrWhiteSpace(expected)) + { + return true; + } + + return string.Equals(NormalizeDigest(computed), NormalizeDigest(expected), StringComparison.OrdinalIgnoreCase); + } + + private static string NormalizeDigest(string digest) + { + var value = digest.Trim(); + if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + value = value.Substring("sha256:".Length); + } + + return value.ToLowerInvariant(); + } + + private static async Task ComputeSha256Async(string path, CancellationToken cancellationToken) + { + await using var stream = File.OpenRead(path); + var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature) + { + encodedHeader = string.Empty; + encodedSignature = string.Empty; + + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var parts = value.Split('.'); + if (parts.Length != 3) + { + return false; + } + + encodedHeader = parts[0]; + encodedSignature = parts[2]; + return parts[1].Length == 0; + } + + private static byte[] Base64UrlDecode(string value) + { + var normalized = value.Replace('-', '+').Replace('_', '/'); + var padding = normalized.Length % 4; + if (padding == 2) + { + normalized += "=="; + } + else if (padding == 3) + { + normalized += "="; + } + else if (padding == 1) + { + throw new FormatException("Invalid Base64Url value."); + } + + return Convert.FromBase64String(normalized); + } + + private static CryptoSigningKey CreateVerificationSigningKey( + string keyId, + string algorithm, + string? providerHint, + string keyPem, + string keyPath) + { + if (string.IsNullOrWhiteSpace(keyPem)) + { + throw new InvalidOperationException("Verification key PEM content is empty."); + } + + using var ecdsa = ECDsa.Create(); + ecdsa.ImportFromPem(keyPem); + + var parameters = ecdsa.ExportParameters(includePrivateParameters: false); + if (parameters.D is null || parameters.D.Length == 0) + { + parameters.D = new byte[] { 0x01 }; + } + + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["source"] = Path.GetFullPath(keyPath), + ["verificationOnly"] = "true" + }; + + return new CryptoSigningKey( + new CryptoKeyReference(keyId, providerHint), + algorithm, + in parameters, + DateTimeOffset.UtcNow, + metadata: metadata); + } + + private static string FormatDuration(TimeSpan duration) + { + if (duration <= TimeSpan.Zero) + { + return "expired"; + } + + if (duration.TotalDays >= 1) + { + var days = (int)duration.TotalDays; + var hours = duration.Hours; + return hours > 0 + ? FormattableString.Invariant($"{days}d {hours}h") + : FormattableString.Invariant($"{days}d"); + } + + if (duration.TotalHours >= 1) + { + return FormattableString.Invariant($"{(int)duration.TotalHours}h {duration.Minutes}m"); + } + + if (duration.TotalMinutes >= 1) + { + return FormattableString.Invariant($"{(int)duration.TotalMinutes}m {duration.Seconds}s"); + } + + return FormattableString.Invariant($"{duration.Seconds}s"); + } + + private static bool TryExtractJwtClaims( + string accessToken, + out Dictionary claims, + out DateTimeOffset? issuedAt, + out DateTimeOffset? notBefore) + { + claims = new Dictionary(StringComparer.OrdinalIgnoreCase); + issuedAt = null; + notBefore = null; + + if (string.IsNullOrWhiteSpace(accessToken)) + { + return false; + } + + var parts = accessToken.Split('.'); + if (parts.Length < 2) + { + return false; + } + + if (!TryDecodeBase64Url(parts[1], out var payloadBytes)) + { + return false; + } + + try + { + using var document = JsonDocument.Parse(payloadBytes); + foreach (var property in document.RootElement.EnumerateObject()) + { + var value = FormatJsonValue(property.Value); + claims[property.Name] = value; + + if (issuedAt is null && property.NameEquals("iat") && TryParseUnixSeconds(property.Value, out var parsedIat)) + { + issuedAt = parsedIat; + } + + if (notBefore is null && property.NameEquals("nbf") && TryParseUnixSeconds(property.Value, out var parsedNbf)) + { + notBefore = parsedNbf; + } + } + + return true; + } + catch (JsonException) + { + claims.Clear(); + issuedAt = null; + notBefore = null; + return false; + } + } + + private static bool TryDecodeBase64Url(string value, out byte[] bytes) + { + bytes = Array.Empty(); + + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var normalized = value.Replace('-', '+').Replace('_', '/'); + var padding = normalized.Length % 4; + if (padding is 2 or 3) + { + normalized = normalized.PadRight(normalized.Length + (4 - padding), '='); + } + else if (padding == 1) + { + return false; + } + + try + { + bytes = Convert.FromBase64String(normalized); + return true; + } + catch (FormatException) + { + return false; + } + } + + private static string FormatJsonValue(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString() ?? string.Empty, + JsonValueKind.Number => element.TryGetInt64(out var longValue) + ? longValue.ToString(CultureInfo.InvariantCulture) + : element.GetDouble().ToString(CultureInfo.InvariantCulture), + JsonValueKind.True => "true", + JsonValueKind.False => "false", + JsonValueKind.Null => "null", + JsonValueKind.Array => FormatArray(element), + JsonValueKind.Object => element.GetRawText(), + _ => element.GetRawText() + }; + } + + private static string FormatArray(JsonElement array) + { + var values = new List(); + foreach (var item in array.EnumerateArray()) + { + values.Add(FormatJsonValue(item)); + } + + return string.Join(", ", values); + } + + private static bool TryParseUnixSeconds(JsonElement element, out DateTimeOffset value) + { + value = default; + + if (element.ValueKind == JsonValueKind.Number) + { + if (element.TryGetInt64(out var seconds)) + { + value = DateTimeOffset.FromUnixTimeSeconds(seconds); + return true; + } + + if (element.TryGetDouble(out var doubleValue)) + { + value = DateTimeOffset.FromUnixTimeSeconds((long)doubleValue); + return true; + } + } + + if (element.ValueKind == JsonValueKind.String) + { + var text = element.GetString(); + if (!string.IsNullOrWhiteSpace(text) && long.TryParse(text, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds)) + { + value = DateTimeOffset.FromUnixTimeSeconds(seconds); + return true; + } + } + + return false; + } + + private static List CollectAdditionalClaims(Dictionary claims) + { + var result = new List(); + foreach (var pair in claims) + { + if (CommonClaimNames.Contains(pair.Key)) + { + continue; + } + + result.Add(FormattableString.Invariant($"{pair.Key}={pair.Value}")); + } + + result.Sort(StringComparer.OrdinalIgnoreCase); + return result; + } + + private static readonly HashSet CommonClaimNames = new(StringComparer.OrdinalIgnoreCase) + { + "aud", + "client_id", + "exp", + "iat", + "iss", + "nbf", + "scope", + "scopes", + "sub", + "token_type", + "jti" + }; + + private static async Task ExecuteExcititorCommandAsync( + IServiceProvider services, + string commandName, + bool verbose, + IDictionary? activityTags, + Func> operation, + CancellationToken cancellationToken) + { + await using var scope = services.CreateAsyncScope(); + var client = scope.ServiceProvider.GetRequiredService(); + var logger = scope.ServiceProvider.GetRequiredService().CreateLogger(commandName.Replace(' ', '-')); + var verbosity = scope.ServiceProvider.GetRequiredService(); + var previousLevel = verbosity.MinimumLevel; + verbosity.MinimumLevel = verbose ? LogLevel.Debug : LogLevel.Information; + using var activity = CliActivitySource.Instance.StartActivity($"cli.{commandName.Replace(' ', '.')}" , ActivityKind.Client); + activity?.SetTag("stellaops.cli.command", commandName); + if (activityTags is not null) + { + foreach (var tag in activityTags) + { + activity?.SetTag(tag.Key, tag.Value); + } + } + using var duration = CliMetrics.MeasureCommandDuration(commandName); + + try + { + var result = await operation(client).ConfigureAwait(false); + if (result.Success) + { + if (!string.IsNullOrWhiteSpace(result.Message)) + { + logger.LogInformation(result.Message); + } + else + { + logger.LogInformation("Operation completed successfully."); + } + + if (!string.IsNullOrWhiteSpace(result.Location)) + { + logger.LogInformation("Location: {Location}", result.Location); + } + + if (result.Payload is JsonElement payload && payload.ValueKind is not JsonValueKind.Undefined and not JsonValueKind.Null) + { + logger.LogDebug("Response payload: {Payload}", payload.ToString()); + } + + Environment.ExitCode = 0; + } + else + { + logger.LogError(string.IsNullOrWhiteSpace(result.Message) ? "Operation failed." : result.Message); + Environment.ExitCode = 1; + } + } + catch (Exception ex) + { + logger.LogError(ex, "Excititor operation failed."); + Environment.ExitCode = 1; + } + finally + { + verbosity.MinimumLevel = previousLevel; + } + } + + private static async Task> GatherImageDigestsAsync( + IReadOnlyList inline, + string? filePath, + CancellationToken cancellationToken) + { + var results = new List(); + var seen = new HashSet(StringComparer.Ordinal); + + void AddCandidates(string? candidate) + { + foreach (var image in SplitImageCandidates(candidate)) + { + if (seen.Add(image)) + { + results.Add(image); + } + } + } + + if (inline is not null) + { + foreach (var entry in inline) + { + AddCandidates(entry); + } + } + + if (!string.IsNullOrWhiteSpace(filePath)) + { + var path = Path.GetFullPath(filePath); + if (!File.Exists(path)) + { + throw new FileNotFoundException("Input file not found.", path); + } + + foreach (var line in File.ReadLines(path)) + { + cancellationToken.ThrowIfCancellationRequested(); + AddCandidates(line); + } + } + + if (Console.IsInputRedirected) + { + while (!cancellationToken.IsCancellationRequested) + { + var line = await Console.In.ReadLineAsync().ConfigureAwait(false); + if (line is null) + { + break; + } + + AddCandidates(line); + } + } + + return new ReadOnlyCollection(results); + } + + private static IEnumerable SplitImageCandidates(string? raw) + { + if (string.IsNullOrWhiteSpace(raw)) + { + yield break; + } + + var candidate = raw.Trim(); + var commentIndex = candidate.IndexOf('#'); + if (commentIndex >= 0) + { + candidate = candidate[..commentIndex].Trim(); + } + + if (candidate.Length == 0) + { + yield break; + } + + var tokens = candidate.Split(new[] { ',', ' ', '\t' }, StringSplitOptions.RemoveEmptyEntries); + foreach (var token in tokens) + { + var trimmed = token.Trim(); + if (trimmed.Length > 0) + { + yield return trimmed; + } + } + } + + private static IReadOnlyDictionary ParseLabelSelectors(IReadOnlyList labelArguments) + { + if (labelArguments is null || labelArguments.Count == 0) + { + return EmptyLabelSelectors; + } + + var labels = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var raw in labelArguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + var delimiter = trimmed.IndexOf('='); + if (delimiter <= 0 || delimiter == trimmed.Length - 1) + { + throw new ArgumentException($"Invalid label '{raw}'. Expected key=value format."); + } + + var key = trimmed[..delimiter].Trim(); + var value = trimmed[(delimiter + 1)..].Trim(); + if (key.Length == 0) + { + throw new ArgumentException($"Invalid label '{raw}'. Label key cannot be empty."); + } + + labels[key] = value; + } + + return labels.Count == 0 ? EmptyLabelSelectors : new ReadOnlyDictionary(labels); + } + + private sealed record ExcititorExportManifestSummary( + string ExportId, + string? Format, + string? Algorithm, + string? Digest, + long? SizeBytes, + bool? FromCache, + DateTimeOffset? CreatedAt, + string? RekorLocation, + string? RekorIndex, + string? RekorInclusionUrl); + + private static ExcititorExportManifestSummary? TryParseExportManifest(JsonElement? payload) + { + if (payload is null || payload.Value.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null) + { + return null; + } + + var element = payload.Value; + var exportId = GetStringProperty(element, "exportId"); + if (string.IsNullOrWhiteSpace(exportId)) + { + return null; + } + + var format = GetStringProperty(element, "format"); + var algorithm = default(string?); + var digest = default(string?); + + if (TryGetPropertyCaseInsensitive(element, "artifact", out var artifact) && artifact.ValueKind == JsonValueKind.Object) + { + algorithm = GetStringProperty(artifact, "algorithm"); + digest = GetStringProperty(artifact, "digest"); + } + + var sizeBytes = GetInt64Property(element, "sizeBytes"); + var fromCache = GetBooleanProperty(element, "fromCache"); + var createdAt = GetDateTimeOffsetProperty(element, "createdAt"); + + string? rekorLocation = null; + string? rekorIndex = null; + string? rekorInclusion = null; + + if (TryGetPropertyCaseInsensitive(element, "attestation", out var attestation) && attestation.ValueKind == JsonValueKind.Object) + { + if (TryGetPropertyCaseInsensitive(attestation, "rekor", out var rekor) && rekor.ValueKind == JsonValueKind.Object) + { + rekorLocation = GetStringProperty(rekor, "location"); + rekorIndex = GetStringProperty(rekor, "logIndex"); + var inclusion = GetStringProperty(rekor, "inclusionProofUri"); + if (!string.IsNullOrWhiteSpace(inclusion)) + { + rekorInclusion = inclusion; + } + } + } + + return new ExcititorExportManifestSummary( + exportId.Trim(), + format, + algorithm, + digest, + sizeBytes, + fromCache, + createdAt, + rekorLocation, + rekorIndex, + rekorInclusion); + } + + private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) + { + if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) + { + return true; + } + + if (element.ValueKind == JsonValueKind.Object) + { + foreach (var candidate in element.EnumerateObject()) + { + if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) + { + property = candidate.Value; + return true; + } + } + } + + property = default; + return false; + } + + private static string? GetStringProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.String => property.GetString(), + JsonValueKind.Number => property.ToString(), + _ => null + }; + } + + return null; + } + + private static bool? GetBooleanProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, + _ => null + }; + } + + return null; + } + + private static long? GetInt64Property(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + if (property.ValueKind == JsonValueKind.Number && property.TryGetInt64(out var value)) + { + return value; + } + + if (property.ValueKind == JsonValueKind.String + && long.TryParse(property.GetString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + return parsed; + } + } + + return null; + } + + private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) + && property.ValueKind == JsonValueKind.String + && DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var value)) + { + return value.ToUniversalTime(); + } + + return null; + } + + private static string BuildDigestDisplay(string? algorithm, string digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return string.Empty; + } + + if (digest.Contains(':', StringComparison.Ordinal)) + { + return digest; + } + + if (string.IsNullOrWhiteSpace(algorithm) || algorithm.Equals("sha256", StringComparison.OrdinalIgnoreCase)) + { + return $"sha256:{digest}"; + } + + return $"{algorithm}:{digest}"; + } + + private static string FormatSize(long sizeBytes) + { + if (sizeBytes < 0) + { + return $"{sizeBytes} bytes"; + } + + string[] units = { "bytes", "KB", "MB", "GB", "TB" }; + double size = sizeBytes; + var unit = 0; + + while (size >= 1024 && unit < units.Length - 1) + { + size /= 1024; + unit++; + } + + return unit == 0 ? $"{sizeBytes} bytes" : $"{size:0.##} {units[unit]}"; + } + + private static string ResolveExportOutputPath(string outputPath, ExcititorExportManifestSummary manifest) + { + if (string.IsNullOrWhiteSpace(outputPath)) + { + throw new ArgumentException("Output path must be provided.", nameof(outputPath)); + } + + var fullPath = Path.GetFullPath(outputPath); + if (Directory.Exists(fullPath) + || outputPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal) + || outputPath.EndsWith(Path.AltDirectorySeparatorChar.ToString(), StringComparison.Ordinal)) + { + return Path.Combine(fullPath, BuildExportFileName(manifest)); + } + + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) + { + Directory.CreateDirectory(directory); + } + + return fullPath; + } + + private static string BuildExportFileName(ExcititorExportManifestSummary manifest) + { + var token = !string.IsNullOrWhiteSpace(manifest.Digest) + ? manifest.Digest! + : manifest.ExportId; + + token = SanitizeToken(token); + if (token.Length > 40) + { + token = token[..40]; + } + + var extension = DetermineExportExtension(manifest.Format); + return $"stellaops-excititor-{token}{extension}"; + } + + private static string DetermineExportExtension(string? format) + { + if (string.IsNullOrWhiteSpace(format)) + { + return ".bin"; + } + + return format switch + { + not null when format.Equals("jsonl", StringComparison.OrdinalIgnoreCase) => ".jsonl", + not null when format.Equals("json", StringComparison.OrdinalIgnoreCase) => ".json", + not null when format.Equals("openvex", StringComparison.OrdinalIgnoreCase) => ".json", + not null when format.Equals("csaf", StringComparison.OrdinalIgnoreCase) => ".json", + _ => ".bin" + }; + } + + private static string SanitizeToken(string token) + { + var builder = new StringBuilder(token.Length); + foreach (var ch in token) + { + if (char.IsLetterOrDigit(ch)) + { + builder.Append(char.ToLowerInvariant(ch)); + } + } + + if (builder.Length == 0) + { + builder.Append("export"); + } + + return builder.ToString(); + } + + private static string? ResolveLocationUrl(StellaOpsCliOptions options, string location) + { + if (string.IsNullOrWhiteSpace(location)) + { + return null; + } + + if (Uri.TryCreate(location, UriKind.Absolute, out var absolute)) + { + return absolute.ToString(); + } + + if (!string.IsNullOrWhiteSpace(options?.BackendUrl) && Uri.TryCreate(options.BackendUrl, UriKind.Absolute, out var baseUri)) + { + if (!location.StartsWith("/", StringComparison.Ordinal)) + { + location = "/" + location; + } + + return new Uri(baseUri, location).ToString(); + } + + return location; + } + + private static string BuildRuntimePolicyJson(RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) + { + var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); + var results = new Dictionary(StringComparer.Ordinal); + + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + results[image] = BuildDecisionMap(decision); + } + } + + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + var payload = new Dictionary(StringComparer.Ordinal) + { + ["ttlSeconds"] = result.TtlSeconds, + ["expiresAtUtc"] = result.ExpiresAtUtc?.ToString("O", CultureInfo.InvariantCulture), + ["policyRevision"] = result.PolicyRevision, + ["results"] = results + }; + + return JsonSerializer.Serialize(payload, options); + } + + private static IDictionary BuildDecisionMap(RuntimePolicyImageDecision decision) + { + var map = new Dictionary(StringComparer.Ordinal) + { + ["policyVerdict"] = decision.PolicyVerdict, + ["signed"] = decision.Signed, + ["hasSbomReferrers"] = decision.HasSbomReferrers + }; + + if (decision.Reasons.Count > 0) + { + map["reasons"] = decision.Reasons; + } + + if (decision.Rekor is not null) + { + var rekorMap = new Dictionary(StringComparer.Ordinal); + if (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid)) + { + rekorMap["uuid"] = decision.Rekor.Uuid; + } + + if (!string.IsNullOrWhiteSpace(decision.Rekor.Url)) + { + rekorMap["url"] = decision.Rekor.Url; + } + + if (decision.Rekor.Verified.HasValue) + { + rekorMap["verified"] = decision.Rekor.Verified; + } + + if (rekorMap.Count > 0) + { + map["rekor"] = rekorMap; + } + } + + foreach (var kvp in decision.AdditionalProperties) + { + map[kvp.Key] = kvp.Value; + } + + return map; + } + + private static void DisplayRuntimePolicyResults(ILogger logger, RuntimePolicyEvaluationResult result, IReadOnlyList requestedImages) + { + var orderedImages = BuildImageOrder(requestedImages, result.Decisions.Keys); + var summary = new Dictionary(StringComparer.OrdinalIgnoreCase); + + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().Border(TableBorder.Rounded) + .AddColumns("Image", "Verdict", "Signed", "SBOM Ref", "Quieted", "Confidence", "Reasons", "Attestation"); + + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + table.AddRow( + image, + decision.PolicyVerdict, + FormatBoolean(decision.Signed), + FormatBoolean(decision.HasSbomReferrers), + FormatQuietedDisplay(decision.AdditionalProperties), + FormatConfidenceDisplay(decision.AdditionalProperties), + decision.Reasons.Count > 0 ? string.Join(Environment.NewLine, decision.Reasons) : "-", + FormatAttestation(decision.Rekor)); + + summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; + + if (decision.AdditionalProperties.Count > 0) + { + var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); + logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); + } + } + else + { + table.AddRow(image, "", "-", "-", "-", "-", "-", "-"); + } + } + + AnsiConsole.Write(table); + } + else + { + foreach (var image in orderedImages) + { + if (result.Decisions.TryGetValue(image, out var decision)) + { + var reasons = decision.Reasons.Count > 0 ? string.Join(", ", decision.Reasons) : "none"; + logger.LogInformation( + "{Image} -> verdict={Verdict} signed={Signed} sbomRef={Sbom} quieted={Quieted} confidence={Confidence} attestation={Attestation} reasons={Reasons}", + image, + decision.PolicyVerdict, + FormatBoolean(decision.Signed), + FormatBoolean(decision.HasSbomReferrers), + FormatQuietedDisplay(decision.AdditionalProperties), + FormatConfidenceDisplay(decision.AdditionalProperties), + FormatAttestation(decision.Rekor), + reasons); + + summary[decision.PolicyVerdict] = summary.TryGetValue(decision.PolicyVerdict, out var count) ? count + 1 : 1; + + if (decision.AdditionalProperties.Count > 0) + { + var metadata = string.Join(", ", decision.AdditionalProperties.Select(kvp => $"{kvp.Key}={FormatAdditionalValue(kvp.Value)}")); + logger.LogDebug("Metadata for {Image}: {Metadata}", image, metadata); + } + } + else + { + logger.LogWarning("{Image} -> no decision returned by backend.", image); + } + } + } + + if (summary.Count > 0) + { + var summaryText = string.Join(", ", summary.Select(kvp => $"{kvp.Key}:{kvp.Value}")); + logger.LogInformation("Verdict summary: {Summary}", summaryText); + } + } + + private static IReadOnlyList BuildImageOrder(IReadOnlyList requestedImages, IEnumerable actual) + { + var order = new List(); + var seen = new HashSet(StringComparer.Ordinal); + + if (requestedImages is not null) + { + foreach (var image in requestedImages) + { + if (!string.IsNullOrWhiteSpace(image)) + { + var trimmed = image.Trim(); + if (seen.Add(trimmed)) + { + order.Add(trimmed); + } + } + } + } + + foreach (var image in actual) + { + if (!string.IsNullOrWhiteSpace(image)) + { + var trimmed = image.Trim(); + if (seen.Add(trimmed)) + { + order.Add(trimmed); + } + } + } + + return new ReadOnlyCollection(order); + } + + private static string FormatBoolean(bool? value) + => value is null ? "unknown" : value.Value ? "yes" : "no"; + + private static string FormatQuietedDisplay(IReadOnlyDictionary metadata) + { + var quieted = GetMetadataBoolean(metadata, "quieted", "quiet"); + var quietedBy = GetMetadataString(metadata, "quietedBy", "quietedReason"); + + if (quieted is true) + { + return string.IsNullOrWhiteSpace(quietedBy) ? "yes" : $"yes ({quietedBy})"; + } + + if (quieted is false) + { + return "no"; + } + + return string.IsNullOrWhiteSpace(quietedBy) ? "-" : $"? ({quietedBy})"; + } + + private static string FormatConfidenceDisplay(IReadOnlyDictionary metadata) + { + var confidence = GetMetadataDouble(metadata, "confidence"); + var confidenceBand = GetMetadataString(metadata, "confidenceBand", "confidenceTier"); + + if (confidence.HasValue && !string.IsNullOrWhiteSpace(confidenceBand)) + { + return string.Format(CultureInfo.InvariantCulture, "{0:0.###} ({1})", confidence.Value, confidenceBand); + } + + if (confidence.HasValue) + { + return confidence.Value.ToString("0.###", CultureInfo.InvariantCulture); + } + + if (!string.IsNullOrWhiteSpace(confidenceBand)) + { + return confidenceBand!; + } + + return "-"; + } + + private static string FormatAttestation(RuntimePolicyRekorReference? rekor) + { + if (rekor is null) + { + return "-"; + } + + var uuid = string.IsNullOrWhiteSpace(rekor.Uuid) ? null : rekor.Uuid; + var url = string.IsNullOrWhiteSpace(rekor.Url) ? null : rekor.Url; + var verified = rekor.Verified; + + var core = uuid ?? url; + if (!string.IsNullOrEmpty(core)) + { + if (verified.HasValue) + { + var suffix = verified.Value ? " (verified)" : " (unverified)"; + return core + suffix; + } + + return core!; + } + + if (verified.HasValue) + { + return verified.Value ? "verified" : "unverified"; + } + + return "-"; + } + + private static bool? GetMetadataBoolean(IReadOnlyDictionary metadata, params string[] keys) + { + foreach (var key in keys) + { + if (metadata.TryGetValue(key, out var value) && value is not null) + { + switch (value) + { + case bool b: + return b; + case string s when bool.TryParse(s, out var parsed): + return parsed; + } + } + } + + return null; + } + + private static string? GetMetadataString(IReadOnlyDictionary metadata, params string[] keys) + { + foreach (var key in keys) + { + if (metadata.TryGetValue(key, out var value) && value is not null) + { + if (value is string s) + { + return string.IsNullOrWhiteSpace(s) ? null : s; + } + } + } + + return null; + } + + private static double? GetMetadataDouble(IReadOnlyDictionary metadata, params string[] keys) + { + foreach (var key in keys) + { + if (metadata.TryGetValue(key, out var value) && value is not null) + { + switch (value) + { + case double d: + return d; + case float f: + return f; + case decimal m: + return (double)m; + case long l: + return l; + case int i: + return i; + case string s when double.TryParse(s, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var parsed): + return parsed; + } + } + } + + return null; + } + + private static PolicySimulationOutputFormat DeterminePolicySimulationFormat(string? value, string? outputPath) + { + if (!string.IsNullOrWhiteSpace(value)) + { + return value.Trim().ToLowerInvariant() switch + { + "table" => PolicySimulationOutputFormat.Table, + "json" => PolicySimulationOutputFormat.Json, + _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") + }; + } + + if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) + { + return PolicySimulationOutputFormat.Json; + } + + return PolicySimulationOutputFormat.Table; + } + + private static object BuildPolicySimulationPayload( + string policyId, + int? baseVersion, + int? candidateVersion, + IReadOnlyList sbomSet, + IReadOnlyDictionary environment, + PolicySimulationResult result) + => new + { + policyId, + baseVersion, + candidateVersion, + sbomSet = sbomSet.Count == 0 ? Array.Empty() : sbomSet, + environment = environment.Count == 0 ? null : environment, + diff = result.Diff, + explainUri = result.ExplainUri + }; + + private static void RenderPolicySimulationResult( + ILogger logger, + object payload, + PolicySimulationResult result, + PolicySimulationOutputFormat format) + { + if (format == PolicySimulationOutputFormat.Json) + { + var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); + Console.WriteLine(json); + return; + } + + logger.LogInformation( + "Policy diff summary — Added: {Added}, Removed: {Removed}, Unchanged: {Unchanged}.", + result.Diff.Added, + result.Diff.Removed, + result.Diff.Unchanged); + + if (result.Diff.BySeverity.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().AddColumns("Severity", "Up", "Down"); + foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) + { + table.AddRow( + entry.Key, + FormatDelta(entry.Value.Up), + FormatDelta(entry.Value.Down)); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var entry in result.Diff.BySeverity.OrderBy(kvp => kvp.Key, StringComparer.Ordinal)) + { + logger.LogInformation("Severity {Severity}: up={Up}, down={Down}", entry.Key, entry.Value.Up ?? 0, entry.Value.Down ?? 0); + } + } + } + + if (result.Diff.RuleHits.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table().AddColumns("Rule", "Up", "Down"); + foreach (var hit in result.Diff.RuleHits) + { + table.AddRow( + string.IsNullOrWhiteSpace(hit.RuleName) ? hit.RuleId : $"{hit.RuleName} ({hit.RuleId})", + FormatDelta(hit.Up), + FormatDelta(hit.Down)); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var hit in result.Diff.RuleHits) + { + logger.LogInformation("Rule {RuleId}: up={Up}, down={Down}", hit.RuleId, hit.Up ?? 0, hit.Down ?? 0); + } + } + } + + if (!string.IsNullOrWhiteSpace(result.ExplainUri)) + { + logger.LogInformation("Explain trace available at {ExplainUri}.", result.ExplainUri); + } + } + + private static IReadOnlyList NormalizePolicySbomSet(IReadOnlyList arguments) + { + if (arguments is null || arguments.Count == 0) + { + return EmptyPolicySbomSet; + } + + var set = new SortedSet(StringComparer.Ordinal); + foreach (var raw in arguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + if (trimmed.Length > 0) + { + set.Add(trimmed); + } + } + + if (set.Count == 0) + { + return EmptyPolicySbomSet; + } + + var list = set.ToList(); + return new ReadOnlyCollection(list); + } + + private static IReadOnlyDictionary ParsePolicyEnvironment(IReadOnlyList arguments) + { + if (arguments is null || arguments.Count == 0) + { + return EmptyPolicyEnvironment; + } + + var env = new SortedDictionary(StringComparer.Ordinal); + foreach (var raw in arguments) + { + if (string.IsNullOrWhiteSpace(raw)) + { + continue; + } + + var trimmed = raw.Trim(); + var separator = trimmed.IndexOf('='); + if (separator <= 0 || separator == trimmed.Length - 1) + { + throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); + } + + var key = trimmed[..separator].Trim().ToLowerInvariant(); + if (string.IsNullOrWhiteSpace(key)) + { + throw new ArgumentException($"Invalid environment assignment '{raw}'. Expected key=value."); + } + + var valueToken = trimmed[(separator + 1)..].Trim(); + env[key] = ParsePolicyEnvironmentValue(valueToken); + } + + return env.Count == 0 ? EmptyPolicyEnvironment : new ReadOnlyDictionary(env); + } + + private static object? ParsePolicyEnvironmentValue(string token) + { + if (string.IsNullOrWhiteSpace(token)) + { + return string.Empty; + } + + var value = token; + if ((value.Length >= 2 && value.StartsWith("\"", StringComparison.Ordinal) && value.EndsWith("\"", StringComparison.Ordinal)) || + (value.Length >= 2 && value.StartsWith("'", StringComparison.Ordinal) && value.EndsWith("'", StringComparison.Ordinal))) + { + value = value[1..^1]; + } + + if (string.Equals(value, "null", StringComparison.OrdinalIgnoreCase)) + { + return null; + } + + if (bool.TryParse(value, out var boolResult)) + { + return boolResult; + } + + if (long.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var longResult)) + { + return longResult; + } + + if (double.TryParse(value, NumberStyles.Float | NumberStyles.AllowThousands, CultureInfo.InvariantCulture, out var doubleResult)) + { + return doubleResult; + } + + return value; + } + + private static Task WriteSimulationOutputAsync(string outputPath, object payload, CancellationToken cancellationToken) + => WriteJsonPayloadAsync(outputPath, payload, cancellationToken); + + private static async Task WriteJsonPayloadAsync(string outputPath, object payload, CancellationToken cancellationToken) + { + var fullPath = Path.GetFullPath(outputPath); + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrWhiteSpace(directory)) + { + Directory.CreateDirectory(directory); + } + + var json = JsonSerializer.Serialize(payload, SimulationJsonOptions); + await File.WriteAllTextAsync(fullPath, json + Environment.NewLine, cancellationToken).ConfigureAwait(false); + } + + private static int DetermineSimulationExitCode(PolicySimulationResult result, bool failOnDiff) + { + if (!failOnDiff) + { + return 0; + } + + return (result.Diff.Added + result.Diff.Removed) > 0 ? 20 : 0; + } + + private static void HandlePolicySimulationFailure(PolicyApiException exception, ILogger logger) + { + var exitCode = exception.ErrorCode switch + { + "ERR_POL_001" => 10, + "ERR_POL_002" or "ERR_POL_005" => 12, + "ERR_POL_003" => 21, + "ERR_POL_004" => 22, + "ERR_POL_006" => 23, + _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, + _ => 1 + }; + + if (string.IsNullOrWhiteSpace(exception.ErrorCode)) + { + logger.LogError("Policy simulation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); + } + else + { + logger.LogError("Policy simulation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); + } + + CliMetrics.RecordPolicySimulation("error"); + Environment.ExitCode = exitCode; + } + + private static void HandlePolicyActivationFailure(PolicyApiException exception, ILogger logger) + { + var exitCode = exception.ErrorCode switch + { + "ERR_POL_002" => 70, + "ERR_POL_003" => 71, + "ERR_POL_004" => 72, + _ when exception.StatusCode == HttpStatusCode.Forbidden || exception.StatusCode == HttpStatusCode.Unauthorized => 12, + _ => 1 + }; + + if (string.IsNullOrWhiteSpace(exception.ErrorCode)) + { + logger.LogError("Policy activation failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); + } + else + { + logger.LogError("Policy activation failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); + } + + CliMetrics.RecordPolicyActivation("error"); + Environment.ExitCode = exitCode; + } + + private static IReadOnlyList NormalizePolicyFilterValues(string[] values, bool toLower = false) + { + if (values is null || values.Length == 0) + { + return Array.Empty(); + } + + var set = new HashSet(StringComparer.OrdinalIgnoreCase); + var list = new List(); + foreach (var raw in values) + { + var candidate = raw?.Trim(); + if (string.IsNullOrWhiteSpace(candidate)) + { + continue; + } + + var normalized = toLower ? candidate.ToLowerInvariant() : candidate; + if (set.Add(normalized)) + { + list.Add(normalized); + } + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string? NormalizePolicyPriority(string? priority) + { + if (string.IsNullOrWhiteSpace(priority)) + { + return null; + } + + var normalized = priority.Trim(); + return string.IsNullOrWhiteSpace(normalized) ? null : normalized.ToLowerInvariant(); + } + + private static string NormalizePolicyActivationOutcome(string status) + { + if (string.IsNullOrWhiteSpace(status)) + { + return "unknown"; + } + + return status.Trim().ToLowerInvariant(); + } + + private static int DeterminePolicyActivationExitCode(string outcome) + => string.Equals(outcome, "pending_second_approval", StringComparison.Ordinal) ? 75 : 0; + + private static void RenderPolicyActivationResult(PolicyActivationResult result, PolicyActivationRequest request) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var summary = new Table().Expand(); + summary.Border(TableBorder.Rounded); + summary.AddColumn(new TableColumn("[grey]Field[/]").LeftAligned()); + summary.AddColumn(new TableColumn("[grey]Value[/]").LeftAligned()); + summary.AddRow("Policy", Markup.Escape($"{result.Revision.PolicyId} v{result.Revision.Version}")); + summary.AddRow("Status", FormatActivationStatus(result.Status)); + summary.AddRow("Requires 2 approvals", result.Revision.RequiresTwoPersonApproval ? "[yellow]yes[/]" : "[green]no[/]"); + summary.AddRow("Created (UTC)", Markup.Escape(FormatUpdatedAt(result.Revision.CreatedAt))); + summary.AddRow("Activated (UTC)", result.Revision.ActivatedAt.HasValue + ? Markup.Escape(FormatUpdatedAt(result.Revision.ActivatedAt.Value)) + : "[grey](not yet active)[/]"); + + if (request.RunNow) + { + summary.AddRow("Run", "[green]immediate[/]"); + } + else if (request.ScheduledAt.HasValue) + { + summary.AddRow("Scheduled at", Markup.Escape(FormatUpdatedAt(request.ScheduledAt.Value))); + } + + if (!string.IsNullOrWhiteSpace(request.Priority)) + { + summary.AddRow("Priority", Markup.Escape(request.Priority!)); + } + + if (request.Rollback) + { + summary.AddRow("Rollback", "[yellow]yes[/]"); + } + + if (!string.IsNullOrWhiteSpace(request.IncidentId)) + { + summary.AddRow("Incident", Markup.Escape(request.IncidentId!)); + } + + if (!string.IsNullOrWhiteSpace(request.Comment)) + { + summary.AddRow("Note", Markup.Escape(request.Comment!)); + } + + AnsiConsole.Write(summary); + + if (result.Revision.Approvals.Count > 0) + { + var approvalTable = new Table().Title("[grey]Approvals[/]"); + approvalTable.Border(TableBorder.Minimal); + approvalTable.AddColumn(new TableColumn("Actor").LeftAligned()); + approvalTable.AddColumn(new TableColumn("Approved (UTC)").LeftAligned()); + approvalTable.AddColumn(new TableColumn("Comment").LeftAligned()); + + foreach (var approval in result.Revision.Approvals) + { + var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment!; + approvalTable.AddRow( + Markup.Escape(approval.ActorId), + Markup.Escape(FormatUpdatedAt(approval.ApprovedAt)), + Markup.Escape(comment)); + } + + AnsiConsole.Write(approvalTable); + } + else + { + AnsiConsole.MarkupLine("[grey]No activation approvals recorded yet.[/]"); + } + } + else + { + Console.WriteLine(FormattableString.Invariant($"Policy: {result.Revision.PolicyId} v{result.Revision.Version}")); + Console.WriteLine(FormattableString.Invariant($"Status: {NormalizePolicyActivationOutcome(result.Status)}")); + Console.WriteLine(FormattableString.Invariant($"Requires 2 approvals: {(result.Revision.RequiresTwoPersonApproval ? "yes" : "no")}")); + Console.WriteLine(FormattableString.Invariant($"Created (UTC): {FormatUpdatedAt(result.Revision.CreatedAt)}")); + Console.WriteLine(FormattableString.Invariant($"Activated (UTC): {(result.Revision.ActivatedAt.HasValue ? FormatUpdatedAt(result.Revision.ActivatedAt.Value) : "(not yet active)")}")); + + if (request.RunNow) + { + Console.WriteLine("Run: immediate"); + } + else if (request.ScheduledAt.HasValue) + { + Console.WriteLine(FormattableString.Invariant($"Scheduled at: {FormatUpdatedAt(request.ScheduledAt.Value)}")); + } + + if (!string.IsNullOrWhiteSpace(request.Priority)) + { + Console.WriteLine(FormattableString.Invariant($"Priority: {request.Priority}")); + } + + if (request.Rollback) + { + Console.WriteLine("Rollback: yes"); + } + + if (!string.IsNullOrWhiteSpace(request.IncidentId)) + { + Console.WriteLine(FormattableString.Invariant($"Incident: {request.IncidentId}")); + } + + if (!string.IsNullOrWhiteSpace(request.Comment)) + { + Console.WriteLine(FormattableString.Invariant($"Note: {request.Comment}")); + } + + if (result.Revision.Approvals.Count == 0) + { + Console.WriteLine("Approvals: none"); + } + else + { + foreach (var approval in result.Revision.Approvals) + { + var comment = string.IsNullOrWhiteSpace(approval.Comment) ? "-" : approval.Comment; + Console.WriteLine(FormattableString.Invariant($"Approval: {approval.ActorId} at {FormatUpdatedAt(approval.ApprovedAt)} ({comment})")); + } + } + } + } + + private static string FormatActivationStatus(string status) + { + var normalized = NormalizePolicyActivationOutcome(status); + return normalized switch + { + "activated" => "[green]activated[/]", + "already_active" => "[yellow]already_active[/]", + "pending_second_approval" => "[yellow]pending_second_approval[/]", + _ => "[red]" + Markup.Escape(string.IsNullOrWhiteSpace(status) ? "unknown" : status) + "[/]" + }; + } + + private static DateTimeOffset? ParsePolicySince(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + if (DateTimeOffset.TryParse( + value.Trim(), + CultureInfo.InvariantCulture, + DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, + out var parsed)) + { + return parsed.ToUniversalTime(); + } + + throw new ArgumentException("Invalid --since value. Use an ISO-8601 timestamp."); + } + + private static string? NormalizeExplainMode(string? mode) + => string.IsNullOrWhiteSpace(mode) ? null : mode.Trim().ToLowerInvariant(); + + private static PolicyFindingsOutputFormat DeterminePolicyFindingsFormat(string? value, string? outputPath) + { + if (!string.IsNullOrWhiteSpace(value)) + { + return value.Trim().ToLowerInvariant() switch + { + "table" => PolicyFindingsOutputFormat.Table, + "json" => PolicyFindingsOutputFormat.Json, + _ => throw new ArgumentException("Invalid format. Use 'table' or 'json'.") + }; + } + + if (!string.IsNullOrWhiteSpace(outputPath) || Console.IsOutputRedirected) + { + return PolicyFindingsOutputFormat.Json; + } + + return PolicyFindingsOutputFormat.Table; + } + + private static object BuildPolicyFindingsPayload( + string policyId, + PolicyFindingsQuery query, + PolicyFindingsPage page) + => new + { + policyId, + filters = new + { + sbom = query.SbomIds, + status = query.Statuses, + severity = query.Severities, + cursor = query.Cursor, + page = query.Page, + pageSize = query.PageSize, + since = query.Since?.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture) + }, + items = page.Items.Select(item => new + { + findingId = item.FindingId, + status = item.Status, + severity = new + { + normalized = item.Severity.Normalized, + score = item.Severity.Score + }, + sbomId = item.SbomId, + advisoryIds = item.AdvisoryIds, + vex = item.Vex is null ? null : new + { + winningStatementId = item.Vex.WinningStatementId, + source = item.Vex.Source, + status = item.Vex.Status + }, + policyVersion = item.PolicyVersion, + updatedAt = item.UpdatedAt == DateTimeOffset.MinValue ? null : item.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), + runId = item.RunId + }), + nextCursor = page.NextCursor, + totalCount = page.TotalCount + }; + + private static object BuildPolicyFindingPayload(string policyId, PolicyFindingDocument finding) + => new + { + policyId, + finding = new + { + findingId = finding.FindingId, + status = finding.Status, + severity = new + { + normalized = finding.Severity.Normalized, + score = finding.Severity.Score + }, + sbomId = finding.SbomId, + advisoryIds = finding.AdvisoryIds, + vex = finding.Vex is null ? null : new + { + winningStatementId = finding.Vex.WinningStatementId, + source = finding.Vex.Source, + status = finding.Vex.Status + }, + policyVersion = finding.PolicyVersion, + updatedAt = finding.UpdatedAt == DateTimeOffset.MinValue ? null : finding.UpdatedAt.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture), + runId = finding.RunId + } + }; + + private static object BuildPolicyFindingExplainPayload( + string policyId, + string findingId, + string? mode, + PolicyFindingExplainResult explain) + => new + { + policyId, + findingId, + mode, + explain = new + { + policyVersion = explain.PolicyVersion, + steps = explain.Steps.Select(step => new + { + rule = step.Rule, + status = step.Status, + action = step.Action, + score = step.Score, + inputs = step.Inputs, + evidence = step.Evidence + }), + sealedHints = explain.SealedHints.Select(hint => hint.Message) + } + }; + + private static void RenderPolicyFindingsTable(ILogger logger, PolicyFindingsPage page) + { + var items = page.Items; + if (items.Count == 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + AnsiConsole.MarkupLine("[yellow]No findings matched the provided filters.[/]"); + } + else + { + logger.LogWarning("No findings matched the provided filters."); + } + return; + } + + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table() + .Border(TableBorder.Rounded) + .Centered(); + + table.AddColumn("Finding"); + table.AddColumn("Status"); + table.AddColumn("Severity"); + table.AddColumn("Score"); + table.AddColumn("SBOM"); + table.AddColumn("Advisories"); + table.AddColumn("Updated (UTC)"); + + foreach (var item in items) + { + table.AddRow( + Markup.Escape(item.FindingId), + Markup.Escape(item.Status), + Markup.Escape(item.Severity.Normalized), + Markup.Escape(FormatScore(item.Severity.Score)), + Markup.Escape(item.SbomId), + Markup.Escape(FormatListPreview(item.AdvisoryIds)), + Markup.Escape(FormatUpdatedAt(item.UpdatedAt))); + } + + AnsiConsole.Write(table); + } + else + { + foreach (var item in items) + { + logger.LogInformation( + "{Finding} — Status {Status}, Severity {Severity} ({Score}), SBOM {Sbom}, Updated {Updated}", + item.FindingId, + item.Status, + item.Severity.Normalized, + item.Severity.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", + item.SbomId, + FormatUpdatedAt(item.UpdatedAt)); + } + } + + logger.LogInformation("{Count} finding(s).", items.Count); + + if (page.TotalCount.HasValue) + { + logger.LogInformation("Total available: {Total}", page.TotalCount.Value); + } + + if (!string.IsNullOrWhiteSpace(page.NextCursor)) + { + logger.LogInformation("Next cursor: {Cursor}", page.NextCursor); + } + } + + private static void RenderPolicyFindingDetails(ILogger logger, PolicyFindingDocument finding) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table() + .Border(TableBorder.Rounded) + .AddColumn("Field") + .AddColumn("Value"); + + table.AddRow("Finding", Markup.Escape(finding.FindingId)); + table.AddRow("Status", Markup.Escape(finding.Status)); + table.AddRow("Severity", Markup.Escape(FormatSeverity(finding.Severity))); + table.AddRow("SBOM", Markup.Escape(finding.SbomId)); + table.AddRow("Policy Version", Markup.Escape(finding.PolicyVersion.ToString(CultureInfo.InvariantCulture))); + table.AddRow("Updated (UTC)", Markup.Escape(FormatUpdatedAt(finding.UpdatedAt))); + table.AddRow("Run Id", Markup.Escape(string.IsNullOrWhiteSpace(finding.RunId) ? "(none)" : finding.RunId)); + table.AddRow("Advisories", Markup.Escape(FormatListPreview(finding.AdvisoryIds))); + table.AddRow("VEX", Markup.Escape(FormatVexMetadata(finding.Vex))); + + AnsiConsole.Write(table); + } + else + { + logger.LogInformation("Finding {Finding}", finding.FindingId); + logger.LogInformation(" Status: {Status}", finding.Status); + logger.LogInformation(" Severity: {Severity}", FormatSeverity(finding.Severity)); + logger.LogInformation(" SBOM: {Sbom}", finding.SbomId); + logger.LogInformation(" Policy version: {Version}", finding.PolicyVersion); + logger.LogInformation(" Updated (UTC): {Updated}", FormatUpdatedAt(finding.UpdatedAt)); + if (!string.IsNullOrWhiteSpace(finding.RunId)) + { + logger.LogInformation(" Run Id: {Run}", finding.RunId); + } + if (finding.AdvisoryIds.Count > 0) + { + logger.LogInformation(" Advisories: {Advisories}", string.Join(", ", finding.AdvisoryIds)); + } + if (!string.IsNullOrWhiteSpace(FormatVexMetadata(finding.Vex))) + { + logger.LogInformation(" VEX: {Vex}", FormatVexMetadata(finding.Vex)); + } + } + } + + private static void RenderPolicyFindingExplain(ILogger logger, PolicyFindingExplainResult explain) + { + if (explain.Steps.Count == 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + AnsiConsole.MarkupLine("[yellow]No explain steps were returned.[/]"); + } + else + { + logger.LogWarning("No explain steps were returned."); + } + } + else if (AnsiConsole.Profile.Capabilities.Interactive) + { + var table = new Table() + .Border(TableBorder.Rounded) + .AddColumn("Rule") + .AddColumn("Status") + .AddColumn("Action") + .AddColumn("Score") + .AddColumn("Inputs") + .AddColumn("Evidence"); + + foreach (var step in explain.Steps) + { + table.AddRow( + Markup.Escape(step.Rule), + Markup.Escape(step.Status ?? "(n/a)"), + Markup.Escape(step.Action ?? "(n/a)"), + Markup.Escape(step.Score.HasValue ? step.Score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"), + Markup.Escape(FormatKeyValuePairs(step.Inputs)), + Markup.Escape(FormatKeyValuePairs(step.Evidence))); + } + + AnsiConsole.Write(table); + } + else + { + logger.LogInformation("{Count} explain step(s).", explain.Steps.Count); + foreach (var step in explain.Steps) + { + logger.LogInformation( + "Rule {Rule} — Status {Status}, Action {Action}, Score {Score}, Inputs {Inputs}", + step.Rule, + step.Status ?? "n/a", + step.Action ?? "n/a", + step.Score?.ToString("0.00", CultureInfo.InvariantCulture) ?? "n/a", + FormatKeyValuePairs(step.Inputs)); + + if (step.Evidence is not null && step.Evidence.Count > 0) + { + logger.LogInformation(" Evidence: {Evidence}", FormatKeyValuePairs(step.Evidence)); + } + } + } + + if (explain.SealedHints.Count > 0) + { + if (AnsiConsole.Profile.Capabilities.Interactive) + { + AnsiConsole.MarkupLine("[grey]Hints:[/]"); + foreach (var hint in explain.SealedHints) + { + AnsiConsole.MarkupLine($" • {Markup.Escape(hint.Message)}"); + } + } + else + { + foreach (var hint in explain.SealedHints) + { + logger.LogInformation("Hint: {Hint}", hint.Message); + } + } + } + } + + private static string FormatSeverity(PolicyFindingSeverity severity) + { + if (severity.Score.HasValue) + { + return FormattableString.Invariant($"{severity.Normalized} ({severity.Score.Value:0.00})"); + } + + return severity.Normalized; + } + + private static string FormatListPreview(IReadOnlyList values) + { + if (values is null || values.Count == 0) + { + return "(none)"; + } + + const int MaxItems = 3; + if (values.Count <= MaxItems) + { + return string.Join(", ", values); + } + + var preview = string.Join(", ", values.Take(MaxItems)); + return FormattableString.Invariant($"{preview} (+{values.Count - MaxItems})"); + } + + private static string FormatUpdatedAt(DateTimeOffset timestamp) + { + if (timestamp == DateTimeOffset.MinValue) + { + return "(unknown)"; + } + + return timestamp.ToUniversalTime().ToString("yyyy-MM-ddTHH:mm:ss'Z'", CultureInfo.InvariantCulture); + } + + private static string FormatScore(double? score) + => score.HasValue ? score.Value.ToString("0.00", CultureInfo.InvariantCulture) : "-"; + + private static string FormatKeyValuePairs(IReadOnlyDictionary? values) + { + if (values is null || values.Count == 0) + { + return "(none)"; + } + + return string.Join(", ", values.Select(pair => $"{pair.Key}={pair.Value}")); + } + + private static string FormatVexMetadata(PolicyFindingVexMetadata? value) + { + if (value is null) + { + return "(none)"; + } + + var parts = new List(3); + if (!string.IsNullOrWhiteSpace(value.WinningStatementId)) + { + parts.Add($"winning={value.WinningStatementId}"); + } + + if (!string.IsNullOrWhiteSpace(value.Source)) + { + parts.Add($"source={value.Source}"); + } + + if (!string.IsNullOrWhiteSpace(value.Status)) + { + parts.Add($"status={value.Status}"); + } + + return parts.Count == 0 ? "(none)" : string.Join(", ", parts); + } + + private static void HandlePolicyFindingsFailure(PolicyApiException exception, ILogger logger, Action recordMetric) + { + var exitCode = exception.StatusCode switch + { + HttpStatusCode.Unauthorized or HttpStatusCode.Forbidden => 12, + HttpStatusCode.NotFound => 1, + _ => 1 + }; + + if (string.IsNullOrWhiteSpace(exception.ErrorCode)) + { + logger.LogError("Policy API request failed ({StatusCode}): {Message}", (int)exception.StatusCode, exception.Message); + } + else + { + logger.LogError("Policy API request failed ({StatusCode} {Code}): {Message}", (int)exception.StatusCode, exception.ErrorCode, exception.Message); + } + + recordMetric("error"); + Environment.ExitCode = exitCode; + } + + private static string FormatDelta(int? value) + => value.HasValue ? value.Value.ToString("N0", CultureInfo.InvariantCulture) : "-"; + + private static readonly JsonSerializerOptions SimulationJsonOptions = + new(JsonSerializerDefaults.Web) { WriteIndented = true }; + + private static readonly IReadOnlyDictionary EmptyPolicyEnvironment = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)); + + private static readonly IReadOnlyList EmptyPolicySbomSet = + new ReadOnlyCollection(Array.Empty()); + + private static readonly IReadOnlyDictionary EmptyLabelSelectors = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); + + private enum PolicySimulationOutputFormat + { + Table, + Json + } + + private enum PolicyFindingsOutputFormat + { + Table, + Json + } + + + private static string FormatAdditionalValue(object? value) + { + return value switch + { + null => "null", + bool b => b ? "true" : "false", + double d => d.ToString("G17", CultureInfo.InvariantCulture), + float f => f.ToString("G9", CultureInfo.InvariantCulture), + IFormattable formattable => formattable.ToString(null, CultureInfo.InvariantCulture), + _ => value.ToString() ?? string.Empty + }; + } + + + private static IReadOnlyList NormalizeProviders(IReadOnlyList providers) + { + if (providers is null || providers.Count == 0) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var provider in providers) + { + if (!string.IsNullOrWhiteSpace(provider)) + { + list.Add(provider.Trim()); + } + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string ResolveTenant(string? tenantOption) + { + if (!string.IsNullOrWhiteSpace(tenantOption)) + { + return tenantOption.Trim(); + } + + var fromEnvironment = Environment.GetEnvironmentVariable("STELLA_TENANT"); + return string.IsNullOrWhiteSpace(fromEnvironment) ? string.Empty : fromEnvironment.Trim(); + } + + private static async Task LoadIngestInputAsync(string input, CancellationToken cancellationToken) + { + if (Uri.TryCreate(input, UriKind.Absolute, out var uri) && + (uri.Scheme.Equals(Uri.UriSchemeHttp, StringComparison.OrdinalIgnoreCase) || + uri.Scheme.Equals(Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase))) + { + return await LoadIngestInputFromHttpAsync(uri, cancellationToken).ConfigureAwait(false); + } + + return await LoadIngestInputFromFileAsync(input, cancellationToken).ConfigureAwait(false); + } + + private static async Task LoadIngestInputFromHttpAsync(Uri uri, CancellationToken cancellationToken) + { + using var handler = new HttpClientHandler { AutomaticDecompression = DecompressionMethods.All }; + using var httpClient = new HttpClient(handler); + using var response = await httpClient.GetAsync(uri, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + throw new InvalidOperationException($"Failed to download document from {uri} (HTTP {(int)response.StatusCode})."); + } + + var contentType = response.Content.Headers.ContentType?.MediaType ?? "application/json"; + var contentEncoding = response.Content.Headers.ContentEncoding is { Count: > 0 } + ? string.Join(",", response.Content.Headers.ContentEncoding) + : null; + + var bytes = await response.Content.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + var normalized = NormalizeDocument(bytes, contentType, contentEncoding); + + return new IngestInputPayload( + "uri", + uri.ToString(), + normalized.Content, + normalized.ContentType, + normalized.ContentEncoding); + } + + private static async Task LoadIngestInputFromFileAsync(string path, CancellationToken cancellationToken) + { + var fullPath = Path.GetFullPath(path); + if (!File.Exists(fullPath)) + { + throw new FileNotFoundException("Input document not found.", fullPath); + } + + var bytes = await File.ReadAllBytesAsync(fullPath, cancellationToken).ConfigureAwait(false); + var normalized = NormalizeDocument(bytes, GuessContentTypeFromExtension(fullPath), null); + + return new IngestInputPayload( + "file", + Path.GetFileName(fullPath), + normalized.Content, + normalized.ContentType, + normalized.ContentEncoding); + } + + private static DocumentNormalizationResult NormalizeDocument(byte[] bytes, string? contentType, string? encodingHint) + { + if (bytes is null || bytes.Length == 0) + { + throw new InvalidOperationException("Input document is empty."); + } + + var working = bytes; + var encodings = new List(); + if (!string.IsNullOrWhiteSpace(encodingHint)) + { + encodings.Add(encodingHint); + } + + if (IsGzip(working)) + { + working = DecompressGzip(working); + encodings.Add("gzip"); + } + + var text = DecodeText(working); + var trimmed = text.TrimStart(); + + if (!string.IsNullOrWhiteSpace(trimmed) && trimmed[0] != '{' && trimmed[0] != '[') + { + if (TryDecodeBase64(text, out var decodedBytes)) + { + working = decodedBytes; + encodings.Add("base64"); + + if (IsGzip(working)) + { + working = DecompressGzip(working); + encodings.Add("gzip"); + } + + text = DecodeText(working); + } + } + + text = text.Trim(); + if (string.IsNullOrWhiteSpace(text)) + { + throw new InvalidOperationException("Input document contained no data after decoding."); + } + + var encodingLabel = encodings.Count == 0 ? null : string.Join("+", encodings); + var finalContentType = string.IsNullOrWhiteSpace(contentType) ? "application/json" : contentType; + + return new DocumentNormalizationResult(text, finalContentType, encodingLabel); + } + + private static string GuessContentTypeFromExtension(string path) + { + var extension = Path.GetExtension(path); + if (string.IsNullOrWhiteSpace(extension)) + { + return "application/json"; + } + + return extension.ToLowerInvariant() switch + { + ".json" or ".csaf" => "application/json", + ".xml" => "application/xml", + _ => "application/json" + }; + } + + private static DateTimeOffset DetermineVerificationSince(string? sinceOption) + { + if (string.IsNullOrWhiteSpace(sinceOption)) + { + return DateTimeOffset.UtcNow.AddHours(-24); + } + + var trimmed = sinceOption.Trim(); + + if (DateTimeOffset.TryParse( + trimmed, + CultureInfo.InvariantCulture, + DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, + out var parsedTimestamp)) + { + return parsedTimestamp.ToUniversalTime(); + } + + if (TryParseRelativeDuration(trimmed, out var duration)) + { + return DateTimeOffset.UtcNow.Subtract(duration); + } + + throw new InvalidOperationException("Invalid --since value. Use ISO-8601 timestamp or duration (e.g. 24h, 7d)."); + } + + private static bool TryParseRelativeDuration(string value, out TimeSpan duration) + { + duration = TimeSpan.Zero; + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + var normalized = value.Trim().ToLowerInvariant(); + if (normalized.Length < 2) + { + return false; + } + + var suffix = normalized[^1]; + var magnitudeText = normalized[..^1]; + + double multiplier = suffix switch + { + 's' => 1, + 'm' => 60, + 'h' => 3600, + 'd' => 86400, + 'w' => 604800, + _ => 0 + }; + + if (multiplier == 0) + { + return false; + } + + if (!double.TryParse(magnitudeText, NumberStyles.Float, CultureInfo.InvariantCulture, out var magnitude)) + { + return false; + } + + if (double.IsNaN(magnitude) || double.IsInfinity(magnitude) || magnitude <= 0) + { + return false; + } + + var seconds = magnitude * multiplier; + if (double.IsNaN(seconds) || double.IsInfinity(seconds) || seconds <= 0) + { + return false; + } + + duration = TimeSpan.FromSeconds(seconds); + return true; + } + + private static int NormalizeLimit(int? limitOption) + { + if (!limitOption.HasValue) + { + return 20; + } + + if (limitOption.Value < 0) + { + throw new InvalidOperationException("Limit cannot be negative."); + } + + return limitOption.Value; + } + + private static IReadOnlyList ParseCommaSeparatedList(string? raw) + { + if (string.IsNullOrWhiteSpace(raw)) + { + return Array.Empty(); + } + + var tokens = raw + .Split(',', StringSplitOptions.RemoveEmptyEntries) + .Select(token => token.Trim()) + .Where(token => token.Length > 0) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + return tokens.Length == 0 ? Array.Empty() : tokens; + } + + private static string FormatWindowRange(AocVerifyWindow? window) + { + if (window is null) + { + return "(unspecified)"; + } + + var fromText = window.From?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; + var toText = window.To?.ToUniversalTime().ToString("O", CultureInfo.InvariantCulture) ?? "(unknown)"; + return $"{fromText} -> {toText}"; + } + + private static string FormatCheckedCounts(AocVerifyChecked? checkedCounts) + { + if (checkedCounts is null) + { + return "(unspecified)"; + } + + return $"advisories: {checkedCounts.Advisories.ToString("N0", CultureInfo.InvariantCulture)}, vex: {checkedCounts.Vex.ToString("N0", CultureInfo.InvariantCulture)}"; + } + + private static string DetermineVerifyStatus(AocVerifyResponse? response) + { + if (response is null) + { + return "unknown"; + } + + if (response.Truncated == true && (response.Violations is null || response.Violations.Count == 0)) + { + return "truncated"; + } + + var total = response.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; + return total > 0 ? "violations" : "ok"; + } + + private static string FormatBoolean(bool value, bool useColor) + { + var text = value ? "yes" : "no"; + if (!useColor) + { + return text; + } + + return value + ? $"[yellow]{text}[/]" + : $"[green]{text}[/]"; + } + + private static string FormatVerifyStatus(string? status, bool useColor) + { + var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); + var escaped = Markup.Escape(normalized); + if (!useColor) + { + return escaped; + } + + return normalized switch + { + "ok" => $"[green]{escaped}[/]", + "violations" => $"[red]{escaped}[/]", + "truncated" => $"[yellow]{escaped}[/]", + _ => $"[grey]{escaped}[/]" + }; + } + + private static string FormatViolationExample(AocVerifyViolationExample? example) + { + if (example is null) + { + return "(n/a)"; + } + + var parts = new List(); + if (!string.IsNullOrWhiteSpace(example.Source)) + { + parts.Add(example.Source.Trim()); + } + + if (!string.IsNullOrWhiteSpace(example.DocumentId)) + { + parts.Add(example.DocumentId.Trim()); + } + + var label = parts.Count == 0 ? "(n/a)" : string.Join(" | ", parts); + if (!string.IsNullOrWhiteSpace(example.ContentHash)) + { + label = $"{label} [{example.ContentHash.Trim()}]"; + } + + return label; + } + + private static void RenderAocVerifyTable(AocVerifyResponse response, bool useColor, int limit) + { + var summary = new Table().Border(TableBorder.Rounded); + summary.AddColumn("Field"); + summary.AddColumn("Value"); + + summary.AddRow("Tenant", Markup.Escape(string.IsNullOrWhiteSpace(response?.Tenant) ? "(unknown)" : response.Tenant!)); + summary.AddRow("Window", Markup.Escape(FormatWindowRange(response?.Window))); + summary.AddRow("Checked", Markup.Escape(FormatCheckedCounts(response?.Checked))); + + summary.AddRow("Limit", Markup.Escape(limit <= 0 ? "unbounded" : limit.ToString(CultureInfo.InvariantCulture))); + summary.AddRow("Status", FormatVerifyStatus(DetermineVerifyStatus(response), useColor)); + + if (response?.Metrics?.IngestionWriteTotal is int writes) + { + summary.AddRow("Ingestion Writes", Markup.Escape(writes.ToString("N0", CultureInfo.InvariantCulture))); + } + + if (response?.Metrics?.AocViolationTotal is int totalViolations) + { + summary.AddRow("Violations (total)", Markup.Escape(totalViolations.ToString("N0", CultureInfo.InvariantCulture))); + } + else + { + var computedViolations = response?.Violations?.Sum(violation => Math.Max(0, violation?.Count ?? 0)) ?? 0; + summary.AddRow("Violations (total)", Markup.Escape(computedViolations.ToString("N0", CultureInfo.InvariantCulture))); + } + + summary.AddRow("Truncated", FormatBoolean(response?.Truncated == true, useColor)); + + AnsiConsole.Write(summary); + + if (response?.Violations is null || response.Violations.Count == 0) + { + var message = response?.Truncated == true + ? "No violations reported, but results were truncated. Increase --limit to review full output." + : "No AOC violations detected in the requested window."; + + if (useColor) + { + var color = response?.Truncated == true ? "yellow" : "green"; + AnsiConsole.MarkupLine($"[{color}]{Markup.Escape(message)}[/]"); + } + else + { + Console.WriteLine(message); + } + + return; + } + + var violationTable = new Table().Border(TableBorder.Rounded); + violationTable.AddColumn("Code"); + violationTable.AddColumn("Count"); + violationTable.AddColumn("Sample Document"); + violationTable.AddColumn("Path"); + + foreach (var violation in response.Violations) + { + var codeDisplay = FormatViolationCode(violation.Code, useColor); + var countDisplay = violation.Count.ToString("N0", CultureInfo.InvariantCulture); + var example = violation.Examples?.FirstOrDefault(); + var documentDisplay = Markup.Escape(FormatViolationExample(example)); + var pathDisplay = example is null || string.IsNullOrWhiteSpace(example.Path) + ? "(none)" + : example.Path!; + + violationTable.AddRow(codeDisplay, countDisplay, documentDisplay, Markup.Escape(pathDisplay)); + } + + AnsiConsole.Write(violationTable); +} + + private static int DetermineVerifyExitCode(AocVerifyResponse response) + { + ArgumentNullException.ThrowIfNull(response); + + if (response.Violations is not null && response.Violations.Count > 0) + { + var exitCodes = new List(); + foreach (var violation in response.Violations) + { + if (string.IsNullOrWhiteSpace(violation.Code)) + { + continue; + } + + if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) + { + exitCodes.Add(mapped); + } + } + + if (exitCodes.Count > 0) + { + return exitCodes.Min(); + } + + return response.Truncated == true ? 18 : 17; + } + + if (response.Truncated == true) + { + return 18; + } + + return 0; + } + + private static async Task WriteJsonReportAsync(T payload, string destination, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(payload); + + if (string.IsNullOrWhiteSpace(destination)) + { + throw new InvalidOperationException("Output path must be provided."); + } + + var outputPath = Path.GetFullPath(destination); + var directory = Path.GetDirectoryName(outputPath); + if (!string.IsNullOrWhiteSpace(directory)) + { + Directory.CreateDirectory(directory); + } + + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions + { + WriteIndented = true + }); + + await File.WriteAllTextAsync(outputPath, json, cancellationToken).ConfigureAwait(false); + return outputPath; + } + + private static void RenderDryRunTable(AocIngestDryRunResponse response, bool useColor) + { + var summary = new Table().Border(TableBorder.Rounded); + summary.AddColumn("Field"); + summary.AddColumn("Value"); + + summary.AddRow("Source", Markup.Escape(response?.Source ?? "(unknown)")); + summary.AddRow("Tenant", Markup.Escape(response?.Tenant ?? "(unknown)")); + summary.AddRow("Guard Version", Markup.Escape(response?.GuardVersion ?? "(unknown)")); + summary.AddRow("Status", FormatStatusMarkup(response?.Status, useColor)); + + var violationCount = response?.Violations?.Count ?? 0; + summary.AddRow("Violations", violationCount.ToString(CultureInfo.InvariantCulture)); + + if (!string.IsNullOrWhiteSpace(response?.Document?.ContentHash)) + { + summary.AddRow("Content Hash", Markup.Escape(response.Document.ContentHash!)); + } + + if (!string.IsNullOrWhiteSpace(response?.Document?.Supersedes)) + { + summary.AddRow("Supersedes", Markup.Escape(response.Document.Supersedes!)); + } + + if (!string.IsNullOrWhiteSpace(response?.Document?.Provenance?.Signature?.Format)) + { + var signature = response.Document.Provenance.Signature; + var summaryText = signature!.Present + ? signature.Format ?? "present" + : "missing"; + summary.AddRow("Signature", Markup.Escape(summaryText)); + } + + AnsiConsole.Write(summary); + + if (violationCount == 0) + { + if (useColor) + { + AnsiConsole.MarkupLine("[green]No AOC violations detected.[/]"); + } + else + { + Console.WriteLine("No AOC violations detected."); + } + + return; + } + + var violationTable = new Table().Border(TableBorder.Rounded); + violationTable.AddColumn("Code"); + violationTable.AddColumn("Path"); + violationTable.AddColumn("Message"); + + foreach (var violation in response!.Violations!) + { + var codeDisplay = FormatViolationCode(violation.Code, useColor); + var pathDisplay = string.IsNullOrWhiteSpace(violation.Path) ? "(root)" : violation.Path!; + var messageDisplay = string.IsNullOrWhiteSpace(violation.Message) ? "(unspecified)" : violation.Message!; + violationTable.AddRow(codeDisplay, Markup.Escape(pathDisplay), Markup.Escape(messageDisplay)); + } + + AnsiConsole.Write(violationTable); + } + + private static int DetermineDryRunExitCode(AocIngestDryRunResponse response) + { + if (response?.Violations is null || response.Violations.Count == 0) + { + return 0; + } + + var exitCodes = new List(); + foreach (var violation in response.Violations) + { + if (string.IsNullOrWhiteSpace(violation.Code)) + { + continue; + } + + if (AocViolationExitCodeMap.TryGetValue(violation.Code, out var mapped)) + { + exitCodes.Add(mapped); + } + } + + if (exitCodes.Count == 0) + { + return 17; + } + + return exitCodes.Min(); + } + + private static string FormatStatusMarkup(string? status, bool useColor) + { + var normalized = string.IsNullOrWhiteSpace(status) ? "unknown" : status.Trim(); + if (!useColor) + { + return Markup.Escape(normalized); + } + + return normalized.Equals("ok", StringComparison.OrdinalIgnoreCase) + ? $"[green]{Markup.Escape(normalized)}[/]" + : $"[red]{Markup.Escape(normalized)}[/]"; + } + + private static string FormatViolationCode(string code, bool useColor) + { + var sanitized = string.IsNullOrWhiteSpace(code) ? "(unknown)" : code.Trim(); + if (!useColor) + { + return Markup.Escape(sanitized); + } + + return $"[red]{Markup.Escape(sanitized)}[/]"; + } + + private static bool IsGzip(ReadOnlySpan data) + { + return data.Length >= 2 && data[0] == 0x1F && data[1] == 0x8B; + } + + private static byte[] DecompressGzip(byte[] payload) + { + using var input = new MemoryStream(payload); + using var gzip = new GZipStream(input, CompressionMode.Decompress); + using var output = new MemoryStream(); + gzip.CopyTo(output); + return output.ToArray(); + } + + private static string DecodeText(byte[] payload) + { + var encoding = DetectEncoding(payload); + return encoding.GetString(payload); + } + + private static Encoding DetectEncoding(ReadOnlySpan data) + { + if (data.Length >= 4) + { + if (data[0] == 0x00 && data[1] == 0x00 && data[2] == 0xFE && data[3] == 0xFF) + { + return new UTF32Encoding(bigEndian: true, byteOrderMark: true); + } + + if (data[0] == 0xFF && data[1] == 0xFE && data[2] == 0x00 && data[3] == 0x00) + { + return new UTF32Encoding(bigEndian: false, byteOrderMark: true); + } + } + + if (data.Length >= 2) + { + if (data[0] == 0xFE && data[1] == 0xFF) + { + return Encoding.BigEndianUnicode; + } + + if (data[0] == 0xFF && data[1] == 0xFE) + { + return Encoding.Unicode; + } + } + + if (data.Length >= 3 && data[0] == 0xEF && data[1] == 0xBB && data[2] == 0xBF) + { + return Encoding.UTF8; + } + + return Encoding.UTF8; + } + + private static bool TryDecodeBase64(string text, out byte[] decoded) + { + decoded = Array.Empty(); + if (string.IsNullOrWhiteSpace(text)) + { + return false; + } + + var builder = new StringBuilder(text.Length); + foreach (var ch in text) + { + if (!char.IsWhiteSpace(ch)) + { + builder.Append(ch); + } + } + + var candidate = builder.ToString(); + if (candidate.Length < 8 || candidate.Length % 4 != 0) + { + return false; + } + + for (var i = 0; i < candidate.Length; i++) + { + var c = candidate[i]; + if (!(char.IsLetterOrDigit(c) || c is '+' or '/' or '=')) + { + return false; + } + } + + try + { + decoded = Convert.FromBase64String(candidate); + return true; + } + catch (FormatException) + { + return false; + } + } + + private sealed record IngestInputPayload(string Kind, string Name, string Content, string ContentType, string? ContentEncoding); + + private sealed record DocumentNormalizationResult(string Content, string ContentType, string? ContentEncoding); + + private static readonly IReadOnlyDictionary AocViolationExitCodeMap = new Dictionary(StringComparer.OrdinalIgnoreCase) + { + ["ERR_AOC_001"] = 11, + ["ERR_AOC_002"] = 12, + ["ERR_AOC_003"] = 13, + ["ERR_AOC_004"] = 14, + ["ERR_AOC_005"] = 15, + ["ERR_AOC_006"] = 16, + ["ERR_AOC_007"] = 17 + }; + + private static IDictionary RemoveNullValues(Dictionary source) + { + foreach (var key in source.Where(kvp => kvp.Value is null).Select(kvp => kvp.Key).ToList()) + { + source.Remove(key); + } + + return source; + } + + private static async Task TriggerJobAsync( + IBackendOperationsClient client, + ILogger logger, + string jobKind, + IDictionary parameters, + CancellationToken cancellationToken) + { + JobTriggerResult result = await client.TriggerJobAsync(jobKind, parameters, cancellationToken).ConfigureAwait(false); + if (result.Success) + { + if (!string.IsNullOrWhiteSpace(result.Location)) + { + logger.LogInformation("Job accepted. Track status at {Location}.", result.Location); + } + else if (result.Run is not null) + { + logger.LogInformation("Job accepted. RunId: {RunId} Status: {Status}", result.Run.RunId, result.Run.Status); + } + else + { + logger.LogInformation("Job accepted."); + } + + Environment.ExitCode = 0; + } + else + { + logger.LogError("Job '{JobKind}' failed: {Message}", jobKind, result.Message); + Environment.ExitCode = 1; + } + } +} diff --git a/src/StellaOps.Cli/Configuration/AuthorityTokenUtilities.cs b/src/Cli/StellaOps.Cli/Configuration/AuthorityTokenUtilities.cs similarity index 96% rename from src/StellaOps.Cli/Configuration/AuthorityTokenUtilities.cs rename to src/Cli/StellaOps.Cli/Configuration/AuthorityTokenUtilities.cs index fa9a3854..c9c5cf79 100644 --- a/src/StellaOps.Cli/Configuration/AuthorityTokenUtilities.cs +++ b/src/Cli/StellaOps.Cli/Configuration/AuthorityTokenUtilities.cs @@ -1,58 +1,58 @@ -using System; -using System.Security.Cryptography; -using System.Text; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Cli.Configuration; - -internal static class AuthorityTokenUtilities -{ - public static string ResolveScope(StellaOpsCliOptions options) - { - ArgumentNullException.ThrowIfNull(options); - - var scope = options.Authority?.Scope; - return string.IsNullOrWhiteSpace(scope) - ? StellaOpsScopes.ConcelierJobsTrigger - : scope.Trim(); - } - - public static string BuildCacheKey(StellaOpsCliOptions options) - { - ArgumentNullException.ThrowIfNull(options); - - if (options.Authority is null) - { - return string.Empty; - } - - var scope = ResolveScope(options); - var credential = !string.IsNullOrWhiteSpace(options.Authority.Username) - ? $"user:{options.Authority.Username}" - : $"client:{options.Authority.ClientId}"; - - var cacheKey = $"{options.Authority.Url}|{credential}|{scope}"; - - if (!string.IsNullOrWhiteSpace(scope) && scope.Contains("orch:operate", StringComparison.OrdinalIgnoreCase)) - { - var reasonHash = HashOperatorMetadata(options.Authority.OperatorReason); - var ticketHash = HashOperatorMetadata(options.Authority.OperatorTicket); - cacheKey = $"{cacheKey}|op_reason:{reasonHash}|op_ticket:{ticketHash}"; - } - - return cacheKey; - } - - private static string HashOperatorMetadata(string value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return "none"; - } - - var trimmed = value.Trim(); - var bytes = Encoding.UTF8.GetBytes(trimmed); - var hash = SHA256.HashData(bytes); - return Convert.ToHexString(hash).ToLowerInvariant(); - } -} +using System; +using System.Security.Cryptography; +using System.Text; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Cli.Configuration; + +internal static class AuthorityTokenUtilities +{ + public static string ResolveScope(StellaOpsCliOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + var scope = options.Authority?.Scope; + return string.IsNullOrWhiteSpace(scope) + ? StellaOpsScopes.ConcelierJobsTrigger + : scope.Trim(); + } + + public static string BuildCacheKey(StellaOpsCliOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + if (options.Authority is null) + { + return string.Empty; + } + + var scope = ResolveScope(options); + var credential = !string.IsNullOrWhiteSpace(options.Authority.Username) + ? $"user:{options.Authority.Username}" + : $"client:{options.Authority.ClientId}"; + + var cacheKey = $"{options.Authority.Url}|{credential}|{scope}"; + + if (!string.IsNullOrWhiteSpace(scope) && scope.Contains("orch:operate", StringComparison.OrdinalIgnoreCase)) + { + var reasonHash = HashOperatorMetadata(options.Authority.OperatorReason); + var ticketHash = HashOperatorMetadata(options.Authority.OperatorTicket); + cacheKey = $"{cacheKey}|op_reason:{reasonHash}|op_ticket:{ticketHash}"; + } + + return cacheKey; + } + + private static string HashOperatorMetadata(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return "none"; + } + + var trimmed = value.Trim(); + var bytes = Encoding.UTF8.GetBytes(trimmed); + var hash = SHA256.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Cli/Configuration/CliBootstrapper.cs b/src/Cli/StellaOps.Cli/Configuration/CliBootstrapper.cs similarity index 97% rename from src/StellaOps.Cli/Configuration/CliBootstrapper.cs rename to src/Cli/StellaOps.Cli/Configuration/CliBootstrapper.cs index 3e27f926..3dd83547 100644 --- a/src/StellaOps.Cli/Configuration/CliBootstrapper.cs +++ b/src/Cli/StellaOps.Cli/Configuration/CliBootstrapper.cs @@ -1,418 +1,418 @@ -using System; -using System.Collections.Generic; -using System.Globalization; -using System.IO; -using System.Linq; -using Microsoft.Extensions.Configuration; -using StellaOps.Configuration; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Cli.Configuration; - -public static class CliBootstrapper -{ - public static (StellaOpsCliOptions Options, IConfigurationRoot Configuration) Build(string[] args) - { - var bootstrap = StellaOpsConfigurationBootstrapper.Build(options => - { - options.BindingSection = "StellaOps"; - options.ConfigureBuilder = builder => - { - if (args.Length > 0) - { - builder.AddCommandLine(args); - } - }; - options.PostBind = (cliOptions, configuration) => - { - cliOptions.ApiKey = ResolveWithFallback(cliOptions.ApiKey, configuration, "API_KEY", "StellaOps:ApiKey", "ApiKey"); - cliOptions.BackendUrl = ResolveWithFallback(cliOptions.BackendUrl, configuration, "STELLAOPS_BACKEND_URL", "StellaOps:BackendUrl", "BackendUrl"); - cliOptions.ConcelierUrl = ResolveWithFallback(cliOptions.ConcelierUrl, configuration, "STELLAOPS_CONCELIER_URL", "StellaOps:ConcelierUrl", "ConcelierUrl"); - cliOptions.ScannerSignaturePublicKeyPath = ResolveWithFallback(cliOptions.ScannerSignaturePublicKeyPath, configuration, "SCANNER_PUBLIC_KEY", "STELLAOPS_SCANNER_PUBLIC_KEY", "StellaOps:ScannerSignaturePublicKeyPath", "ScannerSignaturePublicKeyPath"); - - cliOptions.ApiKey = cliOptions.ApiKey?.Trim() ?? string.Empty; - cliOptions.BackendUrl = cliOptions.BackendUrl?.Trim() ?? string.Empty; - cliOptions.ConcelierUrl = cliOptions.ConcelierUrl?.Trim() ?? string.Empty; - cliOptions.ScannerSignaturePublicKeyPath = cliOptions.ScannerSignaturePublicKeyPath?.Trim() ?? string.Empty; - - var attemptsRaw = ResolveWithFallback( - string.Empty, - configuration, - "SCANNER_DOWNLOAD_ATTEMPTS", - "STELLAOPS_SCANNER_DOWNLOAD_ATTEMPTS", - "StellaOps:ScannerDownloadAttempts", - "ScannerDownloadAttempts"); - - if (string.IsNullOrWhiteSpace(attemptsRaw)) - { - attemptsRaw = cliOptions.ScannerDownloadAttempts.ToString(CultureInfo.InvariantCulture); - } - - if (int.TryParse(attemptsRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedAttempts) && parsedAttempts > 0) - { - cliOptions.ScannerDownloadAttempts = parsedAttempts; - } - - if (cliOptions.ScannerDownloadAttempts <= 0) - { - cliOptions.ScannerDownloadAttempts = 3; - } - - cliOptions.Authority ??= new StellaOpsCliAuthorityOptions(); - var authority = cliOptions.Authority; - - authority.Url = ResolveWithFallback( - authority.Url, - configuration, - "STELLAOPS_AUTHORITY_URL", - "StellaOps:Authority:Url", - "Authority:Url", - "Authority:Issuer"); - - authority.ClientId = ResolveWithFallback( - authority.ClientId, - configuration, - "STELLAOPS_AUTHORITY_CLIENT_ID", - "StellaOps:Authority:ClientId", - "Authority:ClientId"); - - authority.ClientSecret = ResolveWithFallback( - authority.ClientSecret ?? string.Empty, - configuration, - "STELLAOPS_AUTHORITY_CLIENT_SECRET", - "StellaOps:Authority:ClientSecret", - "Authority:ClientSecret"); - - authority.Username = ResolveWithFallback( - authority.Username, - configuration, - "STELLAOPS_AUTHORITY_USERNAME", - "StellaOps:Authority:Username", - "Authority:Username"); - - authority.Password = ResolveWithFallback( - authority.Password ?? string.Empty, - configuration, - "STELLAOPS_AUTHORITY_PASSWORD", - "StellaOps:Authority:Password", - "Authority:Password"); - - authority.Scope = ResolveWithFallback( - authority.Scope, - configuration, - "STELLAOPS_AUTHORITY_SCOPE", - "StellaOps:Authority:Scope", - "Authority:Scope"); - - authority.OperatorReason = ResolveWithFallback( - authority.OperatorReason, - configuration, - "STELLAOPS_ORCH_REASON", - "StellaOps:Authority:OperatorReason", - "Authority:OperatorReason"); - - authority.OperatorTicket = ResolveWithFallback( - authority.OperatorTicket, - configuration, - "STELLAOPS_ORCH_TICKET", - "StellaOps:Authority:OperatorTicket", - "Authority:OperatorTicket"); - - authority.TokenCacheDirectory = ResolveWithFallback( - authority.TokenCacheDirectory, - configuration, - "STELLAOPS_AUTHORITY_TOKEN_CACHE_DIR", - "StellaOps:Authority:TokenCacheDirectory", - "Authority:TokenCacheDirectory"); - - authority.Url = authority.Url?.Trim() ?? string.Empty; - authority.ClientId = authority.ClientId?.Trim() ?? string.Empty; - authority.ClientSecret = string.IsNullOrWhiteSpace(authority.ClientSecret) ? null : authority.ClientSecret.Trim(); - authority.Username = authority.Username?.Trim() ?? string.Empty; - authority.Password = string.IsNullOrWhiteSpace(authority.Password) ? null : authority.Password.Trim(); - authority.Scope = string.IsNullOrWhiteSpace(authority.Scope) ? StellaOpsScopes.ConcelierJobsTrigger : authority.Scope.Trim(); - authority.OperatorReason = authority.OperatorReason?.Trim() ?? string.Empty; - authority.OperatorTicket = authority.OperatorTicket?.Trim() ?? string.Empty; - - authority.Resilience ??= new StellaOpsCliAuthorityResilienceOptions(); - authority.Resilience.RetryDelays ??= new List(); - var resilience = authority.Resilience; - - if (!resilience.EnableRetries.HasValue) - { - var raw = ResolveWithFallback( - string.Empty, - configuration, - "STELLAOPS_AUTHORITY_ENABLE_RETRIES", - "StellaOps:Authority:Resilience:EnableRetries", - "StellaOps:Authority:EnableRetries", - "Authority:Resilience:EnableRetries", - "Authority:EnableRetries"); - - if (TryParseBoolean(raw, out var parsed)) - { - resilience.EnableRetries = parsed; - } - } - - var retryDelaysRaw = ResolveWithFallback( - string.Empty, - configuration, - "STELLAOPS_AUTHORITY_RETRY_DELAYS", - "StellaOps:Authority:Resilience:RetryDelays", - "StellaOps:Authority:RetryDelays", - "Authority:Resilience:RetryDelays", - "Authority:RetryDelays"); - - if (!string.IsNullOrWhiteSpace(retryDelaysRaw)) - { - resilience.RetryDelays.Clear(); - foreach (var delay in ParseRetryDelays(retryDelaysRaw)) - { - if (delay > TimeSpan.Zero) - { - resilience.RetryDelays.Add(delay); - } - } - } - - if (!resilience.AllowOfflineCacheFallback.HasValue) - { - var raw = ResolveWithFallback( - string.Empty, - configuration, - "STELLAOPS_AUTHORITY_ALLOW_OFFLINE_CACHE_FALLBACK", - "StellaOps:Authority:Resilience:AllowOfflineCacheFallback", - "StellaOps:Authority:AllowOfflineCacheFallback", - "Authority:Resilience:AllowOfflineCacheFallback", - "Authority:AllowOfflineCacheFallback"); - - if (TryParseBoolean(raw, out var parsed)) - { - resilience.AllowOfflineCacheFallback = parsed; - } - } - - if (!resilience.OfflineCacheTolerance.HasValue) - { - var raw = ResolveWithFallback( - string.Empty, - configuration, - "STELLAOPS_AUTHORITY_OFFLINE_CACHE_TOLERANCE", - "StellaOps:Authority:Resilience:OfflineCacheTolerance", - "StellaOps:Authority:OfflineCacheTolerance", - "Authority:Resilience:OfflineCacheTolerance", - "Authority:OfflineCacheTolerance"); - - if (TimeSpan.TryParse(raw, CultureInfo.InvariantCulture, out var tolerance) && tolerance >= TimeSpan.Zero) - { - resilience.OfflineCacheTolerance = tolerance; - } - } - - var defaultTokenCache = GetDefaultTokenCacheDirectory(); - if (string.IsNullOrWhiteSpace(authority.TokenCacheDirectory)) - { - authority.TokenCacheDirectory = defaultTokenCache; - } - else - { - authority.TokenCacheDirectory = Path.GetFullPath(authority.TokenCacheDirectory); - } - - cliOptions.Offline ??= new StellaOpsCliOfflineOptions(); - var offline = cliOptions.Offline; - - var kitsDirectory = ResolveWithFallback( - string.Empty, - configuration, - "STELLAOPS_OFFLINE_KITS_DIRECTORY", - "STELLAOPS_OFFLINE_KITS_DIR", - "StellaOps:Offline:KitsDirectory", - "StellaOps:Offline:KitDirectory", - "Offline:KitsDirectory", - "Offline:KitDirectory"); - - if (string.IsNullOrWhiteSpace(kitsDirectory)) - { - kitsDirectory = offline.KitsDirectory ?? "offline-kits"; - } - - offline.KitsDirectory = Path.GetFullPath(kitsDirectory); - if (!Directory.Exists(offline.KitsDirectory)) - { - Directory.CreateDirectory(offline.KitsDirectory); - } - - var mirror = ResolveWithFallback( - string.Empty, - configuration, - "STELLAOPS_OFFLINE_MIRROR_URL", - "StellaOps:Offline:KitMirror", - "Offline:KitMirror", - "Offline:MirrorUrl"); - - offline.MirrorUrl = string.IsNullOrWhiteSpace(mirror) ? null : mirror.Trim(); - - cliOptions.Plugins ??= new StellaOpsCliPluginOptions(); - var pluginOptions = cliOptions.Plugins; - - pluginOptions.BaseDirectory = ResolveWithFallback( - pluginOptions.BaseDirectory, - configuration, - "STELLAOPS_CLI_PLUGIN_BASE_DIRECTORY", - "StellaOps:Plugins:BaseDirectory", - "Plugins:BaseDirectory"); - - pluginOptions.BaseDirectory = (pluginOptions.BaseDirectory ?? string.Empty).Trim(); - - if (string.IsNullOrWhiteSpace(pluginOptions.BaseDirectory)) - { - pluginOptions.BaseDirectory = AppContext.BaseDirectory; - } - - pluginOptions.BaseDirectory = Path.GetFullPath(pluginOptions.BaseDirectory); - - pluginOptions.Directory = ResolveWithFallback( - pluginOptions.Directory, - configuration, - "STELLAOPS_CLI_PLUGIN_DIRECTORY", - "StellaOps:Plugins:Directory", - "Plugins:Directory"); - - pluginOptions.Directory = (pluginOptions.Directory ?? string.Empty).Trim(); - - if (string.IsNullOrWhiteSpace(pluginOptions.Directory)) - { - pluginOptions.Directory = Path.Combine("plugins", "cli"); - } - - if (!Path.IsPathRooted(pluginOptions.Directory)) - { - pluginOptions.Directory = Path.GetFullPath(Path.Combine(pluginOptions.BaseDirectory, pluginOptions.Directory)); - } - else - { - pluginOptions.Directory = Path.GetFullPath(pluginOptions.Directory); - } - - pluginOptions.ManifestSearchPattern = ResolveWithFallback( - pluginOptions.ManifestSearchPattern, - configuration, - "STELLAOPS_CLI_PLUGIN_MANIFEST_PATTERN", - "StellaOps:Plugins:ManifestSearchPattern", - "Plugins:ManifestSearchPattern"); - - pluginOptions.ManifestSearchPattern = (pluginOptions.ManifestSearchPattern ?? string.Empty).Trim(); - - if (string.IsNullOrWhiteSpace(pluginOptions.ManifestSearchPattern)) - { - pluginOptions.ManifestSearchPattern = "*.manifest.json"; - } - - if (pluginOptions.SearchPatterns is null || pluginOptions.SearchPatterns.Count == 0) - { - pluginOptions.SearchPatterns = new List { "StellaOps.Cli.Plugin.*.dll" }; - } - else - { - pluginOptions.SearchPatterns = pluginOptions.SearchPatterns - .Where(pattern => !string.IsNullOrWhiteSpace(pattern)) - .Select(pattern => pattern.Trim()) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToList(); - - if (pluginOptions.SearchPatterns.Count == 0) - { - pluginOptions.SearchPatterns.Add("StellaOps.Cli.Plugin.*.dll"); - } - } - - if (pluginOptions.PluginOrder is null) - { - pluginOptions.PluginOrder = new List(); - } - else - { - pluginOptions.PluginOrder = pluginOptions.PluginOrder - .Where(name => !string.IsNullOrWhiteSpace(name)) - .Select(name => name.Trim()) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToList(); - } - }; - }); - - return (bootstrap.Options, bootstrap.Configuration); - } - - private static string ResolveWithFallback(string currentValue, IConfiguration configuration, params string[] keys) - { - if (!string.IsNullOrWhiteSpace(currentValue)) - { - return currentValue; - } - - foreach (var key in keys) - { - var value = configuration[key]; - if (!string.IsNullOrWhiteSpace(value)) - { - return value; - } - } - - return string.Empty; - } - - private static bool TryParseBoolean(string value, out bool parsed) - { - if (string.IsNullOrWhiteSpace(value)) - { - parsed = default; - return false; - } - - if (bool.TryParse(value, out parsed)) - { - return true; - } - - if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var numeric)) - { - parsed = numeric != 0; - return true; - } - - parsed = default; - return false; - } - - private static IEnumerable ParseRetryDelays(string raw) - { - if (string.IsNullOrWhiteSpace(raw)) - { - yield break; - } - - var separators = new[] { ',', ';', ' ' }; - foreach (var token in raw.Split(separators, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) - { - if (TimeSpan.TryParse(token, CultureInfo.InvariantCulture, out var delay) && delay > TimeSpan.Zero) - { - yield return delay; - } - } - } - - private static string GetDefaultTokenCacheDirectory() - { - var home = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); - if (string.IsNullOrWhiteSpace(home)) - { - home = AppContext.BaseDirectory; - } - - return Path.GetFullPath(Path.Combine(home, ".stellaops", "tokens")); - } -} +using System; +using System.Collections.Generic; +using System.Globalization; +using System.IO; +using System.Linq; +using Microsoft.Extensions.Configuration; +using StellaOps.Configuration; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Cli.Configuration; + +public static class CliBootstrapper +{ + public static (StellaOpsCliOptions Options, IConfigurationRoot Configuration) Build(string[] args) + { + var bootstrap = StellaOpsConfigurationBootstrapper.Build(options => + { + options.BindingSection = "StellaOps"; + options.ConfigureBuilder = builder => + { + if (args.Length > 0) + { + builder.AddCommandLine(args); + } + }; + options.PostBind = (cliOptions, configuration) => + { + cliOptions.ApiKey = ResolveWithFallback(cliOptions.ApiKey, configuration, "API_KEY", "StellaOps:ApiKey", "ApiKey"); + cliOptions.BackendUrl = ResolveWithFallback(cliOptions.BackendUrl, configuration, "STELLAOPS_BACKEND_URL", "StellaOps:BackendUrl", "BackendUrl"); + cliOptions.ConcelierUrl = ResolveWithFallback(cliOptions.ConcelierUrl, configuration, "STELLAOPS_CONCELIER_URL", "StellaOps:ConcelierUrl", "ConcelierUrl"); + cliOptions.ScannerSignaturePublicKeyPath = ResolveWithFallback(cliOptions.ScannerSignaturePublicKeyPath, configuration, "SCANNER_PUBLIC_KEY", "STELLAOPS_SCANNER_PUBLIC_KEY", "StellaOps:ScannerSignaturePublicKeyPath", "ScannerSignaturePublicKeyPath"); + + cliOptions.ApiKey = cliOptions.ApiKey?.Trim() ?? string.Empty; + cliOptions.BackendUrl = cliOptions.BackendUrl?.Trim() ?? string.Empty; + cliOptions.ConcelierUrl = cliOptions.ConcelierUrl?.Trim() ?? string.Empty; + cliOptions.ScannerSignaturePublicKeyPath = cliOptions.ScannerSignaturePublicKeyPath?.Trim() ?? string.Empty; + + var attemptsRaw = ResolveWithFallback( + string.Empty, + configuration, + "SCANNER_DOWNLOAD_ATTEMPTS", + "STELLAOPS_SCANNER_DOWNLOAD_ATTEMPTS", + "StellaOps:ScannerDownloadAttempts", + "ScannerDownloadAttempts"); + + if (string.IsNullOrWhiteSpace(attemptsRaw)) + { + attemptsRaw = cliOptions.ScannerDownloadAttempts.ToString(CultureInfo.InvariantCulture); + } + + if (int.TryParse(attemptsRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedAttempts) && parsedAttempts > 0) + { + cliOptions.ScannerDownloadAttempts = parsedAttempts; + } + + if (cliOptions.ScannerDownloadAttempts <= 0) + { + cliOptions.ScannerDownloadAttempts = 3; + } + + cliOptions.Authority ??= new StellaOpsCliAuthorityOptions(); + var authority = cliOptions.Authority; + + authority.Url = ResolveWithFallback( + authority.Url, + configuration, + "STELLAOPS_AUTHORITY_URL", + "StellaOps:Authority:Url", + "Authority:Url", + "Authority:Issuer"); + + authority.ClientId = ResolveWithFallback( + authority.ClientId, + configuration, + "STELLAOPS_AUTHORITY_CLIENT_ID", + "StellaOps:Authority:ClientId", + "Authority:ClientId"); + + authority.ClientSecret = ResolveWithFallback( + authority.ClientSecret ?? string.Empty, + configuration, + "STELLAOPS_AUTHORITY_CLIENT_SECRET", + "StellaOps:Authority:ClientSecret", + "Authority:ClientSecret"); + + authority.Username = ResolveWithFallback( + authority.Username, + configuration, + "STELLAOPS_AUTHORITY_USERNAME", + "StellaOps:Authority:Username", + "Authority:Username"); + + authority.Password = ResolveWithFallback( + authority.Password ?? string.Empty, + configuration, + "STELLAOPS_AUTHORITY_PASSWORD", + "StellaOps:Authority:Password", + "Authority:Password"); + + authority.Scope = ResolveWithFallback( + authority.Scope, + configuration, + "STELLAOPS_AUTHORITY_SCOPE", + "StellaOps:Authority:Scope", + "Authority:Scope"); + + authority.OperatorReason = ResolveWithFallback( + authority.OperatorReason, + configuration, + "STELLAOPS_ORCH_REASON", + "StellaOps:Authority:OperatorReason", + "Authority:OperatorReason"); + + authority.OperatorTicket = ResolveWithFallback( + authority.OperatorTicket, + configuration, + "STELLAOPS_ORCH_TICKET", + "StellaOps:Authority:OperatorTicket", + "Authority:OperatorTicket"); + + authority.TokenCacheDirectory = ResolveWithFallback( + authority.TokenCacheDirectory, + configuration, + "STELLAOPS_AUTHORITY_TOKEN_CACHE_DIR", + "StellaOps:Authority:TokenCacheDirectory", + "Authority:TokenCacheDirectory"); + + authority.Url = authority.Url?.Trim() ?? string.Empty; + authority.ClientId = authority.ClientId?.Trim() ?? string.Empty; + authority.ClientSecret = string.IsNullOrWhiteSpace(authority.ClientSecret) ? null : authority.ClientSecret.Trim(); + authority.Username = authority.Username?.Trim() ?? string.Empty; + authority.Password = string.IsNullOrWhiteSpace(authority.Password) ? null : authority.Password.Trim(); + authority.Scope = string.IsNullOrWhiteSpace(authority.Scope) ? StellaOpsScopes.ConcelierJobsTrigger : authority.Scope.Trim(); + authority.OperatorReason = authority.OperatorReason?.Trim() ?? string.Empty; + authority.OperatorTicket = authority.OperatorTicket?.Trim() ?? string.Empty; + + authority.Resilience ??= new StellaOpsCliAuthorityResilienceOptions(); + authority.Resilience.RetryDelays ??= new List(); + var resilience = authority.Resilience; + + if (!resilience.EnableRetries.HasValue) + { + var raw = ResolveWithFallback( + string.Empty, + configuration, + "STELLAOPS_AUTHORITY_ENABLE_RETRIES", + "StellaOps:Authority:Resilience:EnableRetries", + "StellaOps:Authority:EnableRetries", + "Authority:Resilience:EnableRetries", + "Authority:EnableRetries"); + + if (TryParseBoolean(raw, out var parsed)) + { + resilience.EnableRetries = parsed; + } + } + + var retryDelaysRaw = ResolveWithFallback( + string.Empty, + configuration, + "STELLAOPS_AUTHORITY_RETRY_DELAYS", + "StellaOps:Authority:Resilience:RetryDelays", + "StellaOps:Authority:RetryDelays", + "Authority:Resilience:RetryDelays", + "Authority:RetryDelays"); + + if (!string.IsNullOrWhiteSpace(retryDelaysRaw)) + { + resilience.RetryDelays.Clear(); + foreach (var delay in ParseRetryDelays(retryDelaysRaw)) + { + if (delay > TimeSpan.Zero) + { + resilience.RetryDelays.Add(delay); + } + } + } + + if (!resilience.AllowOfflineCacheFallback.HasValue) + { + var raw = ResolveWithFallback( + string.Empty, + configuration, + "STELLAOPS_AUTHORITY_ALLOW_OFFLINE_CACHE_FALLBACK", + "StellaOps:Authority:Resilience:AllowOfflineCacheFallback", + "StellaOps:Authority:AllowOfflineCacheFallback", + "Authority:Resilience:AllowOfflineCacheFallback", + "Authority:AllowOfflineCacheFallback"); + + if (TryParseBoolean(raw, out var parsed)) + { + resilience.AllowOfflineCacheFallback = parsed; + } + } + + if (!resilience.OfflineCacheTolerance.HasValue) + { + var raw = ResolveWithFallback( + string.Empty, + configuration, + "STELLAOPS_AUTHORITY_OFFLINE_CACHE_TOLERANCE", + "StellaOps:Authority:Resilience:OfflineCacheTolerance", + "StellaOps:Authority:OfflineCacheTolerance", + "Authority:Resilience:OfflineCacheTolerance", + "Authority:OfflineCacheTolerance"); + + if (TimeSpan.TryParse(raw, CultureInfo.InvariantCulture, out var tolerance) && tolerance >= TimeSpan.Zero) + { + resilience.OfflineCacheTolerance = tolerance; + } + } + + var defaultTokenCache = GetDefaultTokenCacheDirectory(); + if (string.IsNullOrWhiteSpace(authority.TokenCacheDirectory)) + { + authority.TokenCacheDirectory = defaultTokenCache; + } + else + { + authority.TokenCacheDirectory = Path.GetFullPath(authority.TokenCacheDirectory); + } + + cliOptions.Offline ??= new StellaOpsCliOfflineOptions(); + var offline = cliOptions.Offline; + + var kitsDirectory = ResolveWithFallback( + string.Empty, + configuration, + "STELLAOPS_OFFLINE_KITS_DIRECTORY", + "STELLAOPS_OFFLINE_KITS_DIR", + "StellaOps:Offline:KitsDirectory", + "StellaOps:Offline:KitDirectory", + "Offline:KitsDirectory", + "Offline:KitDirectory"); + + if (string.IsNullOrWhiteSpace(kitsDirectory)) + { + kitsDirectory = offline.KitsDirectory ?? "offline-kits"; + } + + offline.KitsDirectory = Path.GetFullPath(kitsDirectory); + if (!Directory.Exists(offline.KitsDirectory)) + { + Directory.CreateDirectory(offline.KitsDirectory); + } + + var mirror = ResolveWithFallback( + string.Empty, + configuration, + "STELLAOPS_OFFLINE_MIRROR_URL", + "StellaOps:Offline:KitMirror", + "Offline:KitMirror", + "Offline:MirrorUrl"); + + offline.MirrorUrl = string.IsNullOrWhiteSpace(mirror) ? null : mirror.Trim(); + + cliOptions.Plugins ??= new StellaOpsCliPluginOptions(); + var pluginOptions = cliOptions.Plugins; + + pluginOptions.BaseDirectory = ResolveWithFallback( + pluginOptions.BaseDirectory, + configuration, + "STELLAOPS_CLI_PLUGIN_BASE_DIRECTORY", + "StellaOps:Plugins:BaseDirectory", + "Plugins:BaseDirectory"); + + pluginOptions.BaseDirectory = (pluginOptions.BaseDirectory ?? string.Empty).Trim(); + + if (string.IsNullOrWhiteSpace(pluginOptions.BaseDirectory)) + { + pluginOptions.BaseDirectory = AppContext.BaseDirectory; + } + + pluginOptions.BaseDirectory = Path.GetFullPath(pluginOptions.BaseDirectory); + + pluginOptions.Directory = ResolveWithFallback( + pluginOptions.Directory, + configuration, + "STELLAOPS_CLI_PLUGIN_DIRECTORY", + "StellaOps:Plugins:Directory", + "Plugins:Directory"); + + pluginOptions.Directory = (pluginOptions.Directory ?? string.Empty).Trim(); + + if (string.IsNullOrWhiteSpace(pluginOptions.Directory)) + { + pluginOptions.Directory = Path.Combine("plugins", "cli"); + } + + if (!Path.IsPathRooted(pluginOptions.Directory)) + { + pluginOptions.Directory = Path.GetFullPath(Path.Combine(pluginOptions.BaseDirectory, pluginOptions.Directory)); + } + else + { + pluginOptions.Directory = Path.GetFullPath(pluginOptions.Directory); + } + + pluginOptions.ManifestSearchPattern = ResolveWithFallback( + pluginOptions.ManifestSearchPattern, + configuration, + "STELLAOPS_CLI_PLUGIN_MANIFEST_PATTERN", + "StellaOps:Plugins:ManifestSearchPattern", + "Plugins:ManifestSearchPattern"); + + pluginOptions.ManifestSearchPattern = (pluginOptions.ManifestSearchPattern ?? string.Empty).Trim(); + + if (string.IsNullOrWhiteSpace(pluginOptions.ManifestSearchPattern)) + { + pluginOptions.ManifestSearchPattern = "*.manifest.json"; + } + + if (pluginOptions.SearchPatterns is null || pluginOptions.SearchPatterns.Count == 0) + { + pluginOptions.SearchPatterns = new List { "StellaOps.Cli.Plugin.*.dll" }; + } + else + { + pluginOptions.SearchPatterns = pluginOptions.SearchPatterns + .Where(pattern => !string.IsNullOrWhiteSpace(pattern)) + .Select(pattern => pattern.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToList(); + + if (pluginOptions.SearchPatterns.Count == 0) + { + pluginOptions.SearchPatterns.Add("StellaOps.Cli.Plugin.*.dll"); + } + } + + if (pluginOptions.PluginOrder is null) + { + pluginOptions.PluginOrder = new List(); + } + else + { + pluginOptions.PluginOrder = pluginOptions.PluginOrder + .Where(name => !string.IsNullOrWhiteSpace(name)) + .Select(name => name.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToList(); + } + }; + }); + + return (bootstrap.Options, bootstrap.Configuration); + } + + private static string ResolveWithFallback(string currentValue, IConfiguration configuration, params string[] keys) + { + if (!string.IsNullOrWhiteSpace(currentValue)) + { + return currentValue; + } + + foreach (var key in keys) + { + var value = configuration[key]; + if (!string.IsNullOrWhiteSpace(value)) + { + return value; + } + } + + return string.Empty; + } + + private static bool TryParseBoolean(string value, out bool parsed) + { + if (string.IsNullOrWhiteSpace(value)) + { + parsed = default; + return false; + } + + if (bool.TryParse(value, out parsed)) + { + return true; + } + + if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var numeric)) + { + parsed = numeric != 0; + return true; + } + + parsed = default; + return false; + } + + private static IEnumerable ParseRetryDelays(string raw) + { + if (string.IsNullOrWhiteSpace(raw)) + { + yield break; + } + + var separators = new[] { ',', ';', ' ' }; + foreach (var token in raw.Split(separators, StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + { + if (TimeSpan.TryParse(token, CultureInfo.InvariantCulture, out var delay) && delay > TimeSpan.Zero) + { + yield return delay; + } + } + } + + private static string GetDefaultTokenCacheDirectory() + { + var home = Environment.GetFolderPath(Environment.SpecialFolder.UserProfile); + if (string.IsNullOrWhiteSpace(home)) + { + home = AppContext.BaseDirectory; + } + + return Path.GetFullPath(Path.Combine(home, ".stellaops", "tokens")); + } +} diff --git a/src/StellaOps.Cli/Configuration/StellaOpsCliOptions.cs b/src/Cli/StellaOps.Cli/Configuration/StellaOpsCliOptions.cs similarity index 96% rename from src/StellaOps.Cli/Configuration/StellaOpsCliOptions.cs rename to src/Cli/StellaOps.Cli/Configuration/StellaOpsCliOptions.cs index 1bc57060..589ee81a 100644 --- a/src/StellaOps.Cli/Configuration/StellaOpsCliOptions.cs +++ b/src/Cli/StellaOps.Cli/Configuration/StellaOpsCliOptions.cs @@ -1,87 +1,87 @@ -using System; -using System.Collections.Generic; -using StellaOps.Auth.Abstractions; -using System.IO; - -namespace StellaOps.Cli.Configuration; - -public sealed class StellaOpsCliOptions -{ - public string ApiKey { get; set; } = string.Empty; - - public string BackendUrl { get; set; } = string.Empty; - - public string ConcelierUrl { get; set; } = string.Empty; - - public string ScannerCacheDirectory { get; set; } = "scanners"; - - public string ResultsDirectory { get; set; } = "results"; - - public string DefaultRunner { get; set; } = "docker"; - - public string ScannerSignaturePublicKeyPath { get; set; } = string.Empty; - - public int ScannerDownloadAttempts { get; set; } = 3; - - public int ScanUploadAttempts { get; set; } = 3; - - public StellaOpsCliAuthorityOptions Authority { get; set; } = new(); - - public StellaOpsCliOfflineOptions Offline { get; set; } = new(); - - public StellaOpsCliPluginOptions Plugins { get; set; } = new(); -} - -public sealed class StellaOpsCliAuthorityOptions -{ - public string Url { get; set; } = string.Empty; - - public string ClientId { get; set; } = string.Empty; - - public string? ClientSecret { get; set; } - - public string Username { get; set; } = string.Empty; - - public string? Password { get; set; } - - public string Scope { get; set; } = StellaOpsScopes.ConcelierJobsTrigger; - - public string OperatorReason { get; set; } = string.Empty; - - public string OperatorTicket { get; set; } = string.Empty; - - public string TokenCacheDirectory { get; set; } = string.Empty; - - public StellaOpsCliAuthorityResilienceOptions Resilience { get; set; } = new(); -} - -public sealed class StellaOpsCliAuthorityResilienceOptions -{ - public bool? EnableRetries { get; set; } - - public IList RetryDelays { get; set; } = new List(); - - public bool? AllowOfflineCacheFallback { get; set; } - - public TimeSpan? OfflineCacheTolerance { get; set; } -} - -public sealed class StellaOpsCliOfflineOptions -{ - public string KitsDirectory { get; set; } = "offline-kits"; - - public string? MirrorUrl { get; set; } -} - -public sealed class StellaOpsCliPluginOptions -{ - public string BaseDirectory { get; set; } = string.Empty; - - public string Directory { get; set; } = "plugins/cli"; - - public IList SearchPatterns { get; set; } = new List(); - - public IList PluginOrder { get; set; } = new List(); - - public string ManifestSearchPattern { get; set; } = "*.manifest.json"; -} +using System; +using System.Collections.Generic; +using StellaOps.Auth.Abstractions; +using System.IO; + +namespace StellaOps.Cli.Configuration; + +public sealed class StellaOpsCliOptions +{ + public string ApiKey { get; set; } = string.Empty; + + public string BackendUrl { get; set; } = string.Empty; + + public string ConcelierUrl { get; set; } = string.Empty; + + public string ScannerCacheDirectory { get; set; } = "scanners"; + + public string ResultsDirectory { get; set; } = "results"; + + public string DefaultRunner { get; set; } = "docker"; + + public string ScannerSignaturePublicKeyPath { get; set; } = string.Empty; + + public int ScannerDownloadAttempts { get; set; } = 3; + + public int ScanUploadAttempts { get; set; } = 3; + + public StellaOpsCliAuthorityOptions Authority { get; set; } = new(); + + public StellaOpsCliOfflineOptions Offline { get; set; } = new(); + + public StellaOpsCliPluginOptions Plugins { get; set; } = new(); +} + +public sealed class StellaOpsCliAuthorityOptions +{ + public string Url { get; set; } = string.Empty; + + public string ClientId { get; set; } = string.Empty; + + public string? ClientSecret { get; set; } + + public string Username { get; set; } = string.Empty; + + public string? Password { get; set; } + + public string Scope { get; set; } = StellaOpsScopes.ConcelierJobsTrigger; + + public string OperatorReason { get; set; } = string.Empty; + + public string OperatorTicket { get; set; } = string.Empty; + + public string TokenCacheDirectory { get; set; } = string.Empty; + + public StellaOpsCliAuthorityResilienceOptions Resilience { get; set; } = new(); +} + +public sealed class StellaOpsCliAuthorityResilienceOptions +{ + public bool? EnableRetries { get; set; } + + public IList RetryDelays { get; set; } = new List(); + + public bool? AllowOfflineCacheFallback { get; set; } + + public TimeSpan? OfflineCacheTolerance { get; set; } +} + +public sealed class StellaOpsCliOfflineOptions +{ + public string KitsDirectory { get; set; } = "offline-kits"; + + public string? MirrorUrl { get; set; } +} + +public sealed class StellaOpsCliPluginOptions +{ + public string BaseDirectory { get; set; } = string.Empty; + + public string Directory { get; set; } = "plugins/cli"; + + public IList SearchPatterns { get; set; } = new List(); + + public IList PluginOrder { get; set; } = new List(); + + public string ManifestSearchPattern { get; set; } = "*.manifest.json"; +} diff --git a/src/StellaOps.Cli/Plugins/CliCommandModuleLoader.cs b/src/Cli/StellaOps.Cli/Plugins/CliCommandModuleLoader.cs similarity index 97% rename from src/StellaOps.Cli/Plugins/CliCommandModuleLoader.cs rename to src/Cli/StellaOps.Cli/Plugins/CliCommandModuleLoader.cs index c62c85a1..d0c37619 100644 --- a/src/StellaOps.Cli/Plugins/CliCommandModuleLoader.cs +++ b/src/Cli/StellaOps.Cli/Plugins/CliCommandModuleLoader.cs @@ -1,278 +1,278 @@ -using System; -using System.Collections.Generic; -using System.CommandLine; -using System.IO; -using System.Linq; -using System.Reflection; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using StellaOps.Cli.Configuration; -using StellaOps.Plugin.Hosting; - -namespace StellaOps.Cli.Plugins; - -internal sealed class CliCommandModuleLoader -{ - private readonly IServiceProvider _services; - private readonly StellaOpsCliOptions _options; - private readonly ILogger _logger; - private readonly RestartOnlyCliPluginGuard _guard = new(); - - private IReadOnlyList _modules = Array.Empty(); - private bool _loaded; - - public CliCommandModuleLoader( - IServiceProvider services, - StellaOpsCliOptions options, - ILogger logger) - { - _services = services ?? throw new ArgumentNullException(nameof(services)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public IReadOnlyList LoadModules() - { - if (_loaded) - { - return _modules; - } - - var pluginOptions = _options.Plugins ?? new StellaOpsCliPluginOptions(); - - var baseDirectory = ResolveBaseDirectory(pluginOptions); - var pluginsDirectory = ResolvePluginsDirectory(pluginOptions, baseDirectory); - var searchPatterns = ResolveSearchPatterns(pluginOptions); - var manifestPattern = string.IsNullOrWhiteSpace(pluginOptions.ManifestSearchPattern) - ? "*.manifest.json" - : pluginOptions.ManifestSearchPattern; - - _logger.LogDebug("Loading CLI plug-ins from '{Directory}' (base: '{Base}').", pluginsDirectory, baseDirectory); - - var manifestLoader = new CliPluginManifestLoader(pluginsDirectory, manifestPattern); - IReadOnlyList manifests; - try - { - manifests = manifestLoader.LoadAsync(CancellationToken.None).GetAwaiter().GetResult(); - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to enumerate CLI plug-in manifests from '{Directory}'.", pluginsDirectory); - manifests = Array.Empty(); - } - - if (manifests.Count == 0) - { - _logger.LogInformation("No CLI plug-in manifests discovered under '{Directory}'.", pluginsDirectory); - _loaded = true; - _guard.Seal(); - _modules = Array.Empty(); - return _modules; - } - - var hostOptions = new PluginHostOptions - { - BaseDirectory = baseDirectory, - PluginsDirectory = pluginsDirectory, - EnsureDirectoryExists = false, - RecursiveSearch = true, - PrimaryPrefix = "StellaOps.Cli" - }; - - foreach (var pattern in searchPatterns) - { - hostOptions.SearchPatterns.Add(pattern); - } - - foreach (var ordered in pluginOptions.PluginOrder ?? Array.Empty()) - { - if (!string.IsNullOrWhiteSpace(ordered)) - { - hostOptions.PluginOrder.Add(ordered); - } - } - - var loadResult = PluginHost.LoadPlugins(hostOptions, _logger); - - var assemblies = loadResult.Plugins.ToDictionary( - descriptor => Normalize(descriptor.AssemblyPath), - descriptor => descriptor.Assembly, - StringComparer.OrdinalIgnoreCase); - - var modules = new List(manifests.Count); - - foreach (var manifest in manifests) - { - try - { - var assemblyPath = ResolveAssemblyPath(manifest); - _guard.EnsureRegistrationAllowed(assemblyPath); - - if (!assemblies.TryGetValue(assemblyPath, out var assembly)) - { - if (!File.Exists(assemblyPath)) - { - throw new FileNotFoundException($"Plug-in assembly '{assemblyPath}' referenced by manifest '{manifest.Id}' was not found."); - } - - assembly = Assembly.LoadFrom(assemblyPath); - assemblies[assemblyPath] = assembly; - } - - var module = CreateModule(assembly, manifest); - if (module is null) - { - continue; - } - - modules.Add(module); - _logger.LogInformation("Registered CLI plug-in '{PluginId}' ({PluginName}) from '{AssemblyPath}'.", manifest.Id, module.Name, assemblyPath); - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to register CLI plug-in '{PluginId}'.", manifest.Id); - } - } - - _modules = modules; - _loaded = true; - _guard.Seal(); - return _modules; - } - - public void RegisterModules(RootCommand root, Option verboseOption, CancellationToken cancellationToken) - { - if (root is null) - { - throw new ArgumentNullException(nameof(root)); - } - if (verboseOption is null) - { - throw new ArgumentNullException(nameof(verboseOption)); - } - - var modules = LoadModules(); - if (modules.Count == 0) - { - return; - } - - foreach (var module in modules) - { - if (!module.IsAvailable(_services)) - { - _logger.LogDebug("CLI plug-in '{Name}' reported unavailable; skipping registration.", module.Name); - continue; - } - - try - { - module.RegisterCommands(root, _services, _options, verboseOption, cancellationToken); - _logger.LogInformation("CLI plug-in '{Name}' commands registered.", module.Name); - } - catch (Exception ex) - { - _logger.LogError(ex, "CLI plug-in '{Name}' failed to register commands.", module.Name); - } - } - } - - private static string ResolveAssemblyPath(CliPluginManifest manifest) - { - if (manifest.EntryPoint is null) - { - throw new InvalidOperationException($"Manifest '{manifest.SourcePath}' does not define an entry point."); - } - - var assemblyPath = manifest.EntryPoint.Assembly; - if (string.IsNullOrWhiteSpace(assemblyPath)) - { - throw new InvalidOperationException($"Manifest '{manifest.SourcePath}' specifies an empty assembly path."); - } - - if (!Path.IsPathRooted(assemblyPath)) - { - if (string.IsNullOrWhiteSpace(manifest.SourceDirectory)) - { - throw new InvalidOperationException($"Manifest '{manifest.SourcePath}' cannot resolve relative assembly path without source directory metadata."); - } - - assemblyPath = Path.Combine(manifest.SourceDirectory, assemblyPath); - } - - return Normalize(assemblyPath); - } - - private ICliCommandModule? CreateModule(Assembly assembly, CliPluginManifest manifest) - { - if (manifest.EntryPoint is null) - { - return null; - } - - var type = assembly.GetType(manifest.EntryPoint.TypeName, throwOnError: true); - if (type is null) - { - throw new InvalidOperationException($"Plug-in type '{manifest.EntryPoint.TypeName}' could not be loaded from assembly '{assembly.FullName}'."); - } - - var module = ActivatorUtilities.CreateInstance(_services, type) as ICliCommandModule; - if (module is null) - { - throw new InvalidOperationException($"Plug-in type '{manifest.EntryPoint.TypeName}' does not implement {nameof(ICliCommandModule)}."); - } - - return module; - } - - private static string ResolveBaseDirectory(StellaOpsCliPluginOptions options) - { - var baseDirectory = options.BaseDirectory; - if (string.IsNullOrWhiteSpace(baseDirectory)) - { - baseDirectory = AppContext.BaseDirectory; - } - - return Path.GetFullPath(baseDirectory); - } - - private static string ResolvePluginsDirectory(StellaOpsCliPluginOptions options, string baseDirectory) - { - var directory = options.Directory; - if (string.IsNullOrWhiteSpace(directory)) - { - directory = Path.Combine("plugins", "cli"); - } - - directory = directory.Trim(); - - if (!Path.IsPathRooted(directory)) - { - directory = Path.Combine(baseDirectory, directory); - } - - return Path.GetFullPath(directory); - } - - private static IReadOnlyList ResolveSearchPatterns(StellaOpsCliPluginOptions options) - { - if (options.SearchPatterns is null || options.SearchPatterns.Count == 0) - { - return new[] { "StellaOps.Cli.Plugin.*.dll" }; - } - - return options.SearchPatterns - .Where(pattern => !string.IsNullOrWhiteSpace(pattern)) - .Select(pattern => pattern.Trim()) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToArray(); - } - - private static string Normalize(string path) - { - var full = Path.GetFullPath(path); - return full.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); - } -} +using System; +using System.Collections.Generic; +using System.CommandLine; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Cli.Configuration; +using StellaOps.Plugin.Hosting; + +namespace StellaOps.Cli.Plugins; + +internal sealed class CliCommandModuleLoader +{ + private readonly IServiceProvider _services; + private readonly StellaOpsCliOptions _options; + private readonly ILogger _logger; + private readonly RestartOnlyCliPluginGuard _guard = new(); + + private IReadOnlyList _modules = Array.Empty(); + private bool _loaded; + + public CliCommandModuleLoader( + IServiceProvider services, + StellaOpsCliOptions options, + ILogger logger) + { + _services = services ?? throw new ArgumentNullException(nameof(services)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public IReadOnlyList LoadModules() + { + if (_loaded) + { + return _modules; + } + + var pluginOptions = _options.Plugins ?? new StellaOpsCliPluginOptions(); + + var baseDirectory = ResolveBaseDirectory(pluginOptions); + var pluginsDirectory = ResolvePluginsDirectory(pluginOptions, baseDirectory); + var searchPatterns = ResolveSearchPatterns(pluginOptions); + var manifestPattern = string.IsNullOrWhiteSpace(pluginOptions.ManifestSearchPattern) + ? "*.manifest.json" + : pluginOptions.ManifestSearchPattern; + + _logger.LogDebug("Loading CLI plug-ins from '{Directory}' (base: '{Base}').", pluginsDirectory, baseDirectory); + + var manifestLoader = new CliPluginManifestLoader(pluginsDirectory, manifestPattern); + IReadOnlyList manifests; + try + { + manifests = manifestLoader.LoadAsync(CancellationToken.None).GetAwaiter().GetResult(); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to enumerate CLI plug-in manifests from '{Directory}'.", pluginsDirectory); + manifests = Array.Empty(); + } + + if (manifests.Count == 0) + { + _logger.LogInformation("No CLI plug-in manifests discovered under '{Directory}'.", pluginsDirectory); + _loaded = true; + _guard.Seal(); + _modules = Array.Empty(); + return _modules; + } + + var hostOptions = new PluginHostOptions + { + BaseDirectory = baseDirectory, + PluginsDirectory = pluginsDirectory, + EnsureDirectoryExists = false, + RecursiveSearch = true, + PrimaryPrefix = "StellaOps.Cli" + }; + + foreach (var pattern in searchPatterns) + { + hostOptions.SearchPatterns.Add(pattern); + } + + foreach (var ordered in pluginOptions.PluginOrder ?? Array.Empty()) + { + if (!string.IsNullOrWhiteSpace(ordered)) + { + hostOptions.PluginOrder.Add(ordered); + } + } + + var loadResult = PluginHost.LoadPlugins(hostOptions, _logger); + + var assemblies = loadResult.Plugins.ToDictionary( + descriptor => Normalize(descriptor.AssemblyPath), + descriptor => descriptor.Assembly, + StringComparer.OrdinalIgnoreCase); + + var modules = new List(manifests.Count); + + foreach (var manifest in manifests) + { + try + { + var assemblyPath = ResolveAssemblyPath(manifest); + _guard.EnsureRegistrationAllowed(assemblyPath); + + if (!assemblies.TryGetValue(assemblyPath, out var assembly)) + { + if (!File.Exists(assemblyPath)) + { + throw new FileNotFoundException($"Plug-in assembly '{assemblyPath}' referenced by manifest '{manifest.Id}' was not found."); + } + + assembly = Assembly.LoadFrom(assemblyPath); + assemblies[assemblyPath] = assembly; + } + + var module = CreateModule(assembly, manifest); + if (module is null) + { + continue; + } + + modules.Add(module); + _logger.LogInformation("Registered CLI plug-in '{PluginId}' ({PluginName}) from '{AssemblyPath}'.", manifest.Id, module.Name, assemblyPath); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to register CLI plug-in '{PluginId}'.", manifest.Id); + } + } + + _modules = modules; + _loaded = true; + _guard.Seal(); + return _modules; + } + + public void RegisterModules(RootCommand root, Option verboseOption, CancellationToken cancellationToken) + { + if (root is null) + { + throw new ArgumentNullException(nameof(root)); + } + if (verboseOption is null) + { + throw new ArgumentNullException(nameof(verboseOption)); + } + + var modules = LoadModules(); + if (modules.Count == 0) + { + return; + } + + foreach (var module in modules) + { + if (!module.IsAvailable(_services)) + { + _logger.LogDebug("CLI plug-in '{Name}' reported unavailable; skipping registration.", module.Name); + continue; + } + + try + { + module.RegisterCommands(root, _services, _options, verboseOption, cancellationToken); + _logger.LogInformation("CLI plug-in '{Name}' commands registered.", module.Name); + } + catch (Exception ex) + { + _logger.LogError(ex, "CLI plug-in '{Name}' failed to register commands.", module.Name); + } + } + } + + private static string ResolveAssemblyPath(CliPluginManifest manifest) + { + if (manifest.EntryPoint is null) + { + throw new InvalidOperationException($"Manifest '{manifest.SourcePath}' does not define an entry point."); + } + + var assemblyPath = manifest.EntryPoint.Assembly; + if (string.IsNullOrWhiteSpace(assemblyPath)) + { + throw new InvalidOperationException($"Manifest '{manifest.SourcePath}' specifies an empty assembly path."); + } + + if (!Path.IsPathRooted(assemblyPath)) + { + if (string.IsNullOrWhiteSpace(manifest.SourceDirectory)) + { + throw new InvalidOperationException($"Manifest '{manifest.SourcePath}' cannot resolve relative assembly path without source directory metadata."); + } + + assemblyPath = Path.Combine(manifest.SourceDirectory, assemblyPath); + } + + return Normalize(assemblyPath); + } + + private ICliCommandModule? CreateModule(Assembly assembly, CliPluginManifest manifest) + { + if (manifest.EntryPoint is null) + { + return null; + } + + var type = assembly.GetType(manifest.EntryPoint.TypeName, throwOnError: true); + if (type is null) + { + throw new InvalidOperationException($"Plug-in type '{manifest.EntryPoint.TypeName}' could not be loaded from assembly '{assembly.FullName}'."); + } + + var module = ActivatorUtilities.CreateInstance(_services, type) as ICliCommandModule; + if (module is null) + { + throw new InvalidOperationException($"Plug-in type '{manifest.EntryPoint.TypeName}' does not implement {nameof(ICliCommandModule)}."); + } + + return module; + } + + private static string ResolveBaseDirectory(StellaOpsCliPluginOptions options) + { + var baseDirectory = options.BaseDirectory; + if (string.IsNullOrWhiteSpace(baseDirectory)) + { + baseDirectory = AppContext.BaseDirectory; + } + + return Path.GetFullPath(baseDirectory); + } + + private static string ResolvePluginsDirectory(StellaOpsCliPluginOptions options, string baseDirectory) + { + var directory = options.Directory; + if (string.IsNullOrWhiteSpace(directory)) + { + directory = Path.Combine("plugins", "cli"); + } + + directory = directory.Trim(); + + if (!Path.IsPathRooted(directory)) + { + directory = Path.Combine(baseDirectory, directory); + } + + return Path.GetFullPath(directory); + } + + private static IReadOnlyList ResolveSearchPatterns(StellaOpsCliPluginOptions options) + { + if (options.SearchPatterns is null || options.SearchPatterns.Count == 0) + { + return new[] { "StellaOps.Cli.Plugin.*.dll" }; + } + + return options.SearchPatterns + .Where(pattern => !string.IsNullOrWhiteSpace(pattern)) + .Select(pattern => pattern.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static string Normalize(string path) + { + var full = Path.GetFullPath(path); + return full.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + } +} diff --git a/src/StellaOps.Cli/Plugins/CliPluginManifest.cs b/src/Cli/StellaOps.Cli/Plugins/CliPluginManifest.cs similarity index 96% rename from src/StellaOps.Cli/Plugins/CliPluginManifest.cs rename to src/Cli/StellaOps.Cli/Plugins/CliPluginManifest.cs index 1b4479dc..ef6bac3a 100644 --- a/src/StellaOps.Cli/Plugins/CliPluginManifest.cs +++ b/src/Cli/StellaOps.Cli/Plugins/CliPluginManifest.cs @@ -1,39 +1,39 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Cli.Plugins; - -public sealed record CliPluginManifest -{ - public const string CurrentSchemaVersion = "1.0"; - - public string SchemaVersion { get; init; } = CurrentSchemaVersion; - - public string Id { get; init; } = string.Empty; - - public string DisplayName { get; init; } = string.Empty; - - public string Version { get; init; } = "0.0.0"; - - public bool RequiresRestart { get; init; } = true; - - public CliPluginEntryPoint? EntryPoint { get; init; } - - public IReadOnlyList Capabilities { get; init; } = Array.Empty(); - - public IReadOnlyDictionary Metadata { get; init; } = - new Dictionary(StringComparer.OrdinalIgnoreCase); - - public string? SourcePath { get; init; } - - public string? SourceDirectory { get; init; } -} - -public sealed record CliPluginEntryPoint -{ - public string Type { get; init; } = "dotnet"; - - public string Assembly { get; init; } = string.Empty; - - public string TypeName { get; init; } = string.Empty; -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Cli.Plugins; + +public sealed record CliPluginManifest +{ + public const string CurrentSchemaVersion = "1.0"; + + public string SchemaVersion { get; init; } = CurrentSchemaVersion; + + public string Id { get; init; } = string.Empty; + + public string DisplayName { get; init; } = string.Empty; + + public string Version { get; init; } = "0.0.0"; + + public bool RequiresRestart { get; init; } = true; + + public CliPluginEntryPoint? EntryPoint { get; init; } + + public IReadOnlyList Capabilities { get; init; } = Array.Empty(); + + public IReadOnlyDictionary Metadata { get; init; } = + new Dictionary(StringComparer.OrdinalIgnoreCase); + + public string? SourcePath { get; init; } + + public string? SourceDirectory { get; init; } +} + +public sealed record CliPluginEntryPoint +{ + public string Type { get; init; } = "dotnet"; + + public string Assembly { get; init; } = string.Empty; + + public string TypeName { get; init; } = string.Empty; +} diff --git a/src/StellaOps.Cli/Plugins/CliPluginManifestLoader.cs b/src/Cli/StellaOps.Cli/Plugins/CliPluginManifestLoader.cs similarity index 97% rename from src/StellaOps.Cli/Plugins/CliPluginManifestLoader.cs rename to src/Cli/StellaOps.Cli/Plugins/CliPluginManifestLoader.cs index 0cabc2a5..2a70cb49 100644 --- a/src/StellaOps.Cli/Plugins/CliPluginManifestLoader.cs +++ b/src/Cli/StellaOps.Cli/Plugins/CliPluginManifestLoader.cs @@ -1,150 +1,150 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Cli.Plugins; - -internal sealed class CliPluginManifestLoader -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - AllowTrailingCommas = true, - ReadCommentHandling = JsonCommentHandling.Skip, - PropertyNameCaseInsensitive = true - }; - - private readonly string _directory; - private readonly string _searchPattern; - - public CliPluginManifestLoader(string directory, string searchPattern) - { - if (string.IsNullOrWhiteSpace(directory)) - { - throw new ArgumentException("Plug-in manifest directory is required.", nameof(directory)); - } - - if (string.IsNullOrWhiteSpace(searchPattern)) - { - throw new ArgumentException("Manifest search pattern is required.", nameof(searchPattern)); - } - - _directory = Path.GetFullPath(directory); - _searchPattern = searchPattern; - } - - public async Task> LoadAsync(CancellationToken cancellationToken) - { - if (!Directory.Exists(_directory)) - { - return Array.Empty(); - } - - var manifests = new List(); - - foreach (var file in Directory.EnumerateFiles(_directory, _searchPattern, SearchOption.AllDirectories)) - { - if (IsHidden(file)) - { - continue; - } - - var manifest = await DeserializeAsync(file, cancellationToken).ConfigureAwait(false); - manifests.Add(manifest); - } - - return manifests - .OrderBy(static m => m.Id, StringComparer.OrdinalIgnoreCase) - .ThenBy(static m => m.Version, StringComparer.OrdinalIgnoreCase) - .ToArray(); - } - - private static bool IsHidden(string path) - { - var directory = Path.GetDirectoryName(path); - while (!string.IsNullOrEmpty(directory)) - { - var name = Path.GetFileName(directory); - if (name.StartsWith(".", StringComparison.Ordinal)) - { - return true; - } - - directory = Path.GetDirectoryName(directory); - } - - return false; - } - - private static async Task DeserializeAsync(string file, CancellationToken cancellationToken) - { - await using var stream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.Asynchronous); - CliPluginManifest? manifest; - - try - { - manifest = await JsonSerializer.DeserializeAsync(stream, SerializerOptions, cancellationToken) - .ConfigureAwait(false); - } - catch (JsonException ex) - { - throw new InvalidOperationException($"Failed to parse CLI plug-in manifest '{file}'.", ex); - } - - if (manifest is null) - { - throw new InvalidOperationException($"CLI plug-in manifest '{file}' is empty or invalid."); - } - - ValidateManifest(manifest, file); - - var directory = Path.GetDirectoryName(file); - return manifest with - { - SourcePath = file, - SourceDirectory = directory - }; - } - - private static void ValidateManifest(CliPluginManifest manifest, string file) - { - if (!string.Equals(manifest.SchemaVersion, CliPluginManifest.CurrentSchemaVersion, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException( - $"Manifest '{file}' uses unsupported schema version '{manifest.SchemaVersion}'. Expected '{CliPluginManifest.CurrentSchemaVersion}'."); - } - - if (string.IsNullOrWhiteSpace(manifest.Id)) - { - throw new InvalidOperationException($"Manifest '{file}' must specify a non-empty 'id'."); - } - - if (manifest.EntryPoint is null) - { - throw new InvalidOperationException($"Manifest '{file}' must specify an 'entryPoint'."); - } - - if (!string.Equals(manifest.EntryPoint.Type, "dotnet", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Manifest '{file}' entry point type '{manifest.EntryPoint.Type}' is not supported. Expected 'dotnet'."); - } - - if (string.IsNullOrWhiteSpace(manifest.EntryPoint.Assembly)) - { - throw new InvalidOperationException($"Manifest '{file}' must specify an entry point assembly."); - } - - if (string.IsNullOrWhiteSpace(manifest.EntryPoint.TypeName)) - { - throw new InvalidOperationException($"Manifest '{file}' must specify an entry point type."); - } - - if (!manifest.RequiresRestart) - { - throw new InvalidOperationException($"Manifest '{file}' must set 'requiresRestart' to true."); - } - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Cli.Plugins; + +internal sealed class CliPluginManifestLoader +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + AllowTrailingCommas = true, + ReadCommentHandling = JsonCommentHandling.Skip, + PropertyNameCaseInsensitive = true + }; + + private readonly string _directory; + private readonly string _searchPattern; + + public CliPluginManifestLoader(string directory, string searchPattern) + { + if (string.IsNullOrWhiteSpace(directory)) + { + throw new ArgumentException("Plug-in manifest directory is required.", nameof(directory)); + } + + if (string.IsNullOrWhiteSpace(searchPattern)) + { + throw new ArgumentException("Manifest search pattern is required.", nameof(searchPattern)); + } + + _directory = Path.GetFullPath(directory); + _searchPattern = searchPattern; + } + + public async Task> LoadAsync(CancellationToken cancellationToken) + { + if (!Directory.Exists(_directory)) + { + return Array.Empty(); + } + + var manifests = new List(); + + foreach (var file in Directory.EnumerateFiles(_directory, _searchPattern, SearchOption.AllDirectories)) + { + if (IsHidden(file)) + { + continue; + } + + var manifest = await DeserializeAsync(file, cancellationToken).ConfigureAwait(false); + manifests.Add(manifest); + } + + return manifests + .OrderBy(static m => m.Id, StringComparer.OrdinalIgnoreCase) + .ThenBy(static m => m.Version, StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private static bool IsHidden(string path) + { + var directory = Path.GetDirectoryName(path); + while (!string.IsNullOrEmpty(directory)) + { + var name = Path.GetFileName(directory); + if (name.StartsWith(".", StringComparison.Ordinal)) + { + return true; + } + + directory = Path.GetDirectoryName(directory); + } + + return false; + } + + private static async Task DeserializeAsync(string file, CancellationToken cancellationToken) + { + await using var stream = new FileStream(file, FileMode.Open, FileAccess.Read, FileShare.Read, 4096, FileOptions.Asynchronous); + CliPluginManifest? manifest; + + try + { + manifest = await JsonSerializer.DeserializeAsync(stream, SerializerOptions, cancellationToken) + .ConfigureAwait(false); + } + catch (JsonException ex) + { + throw new InvalidOperationException($"Failed to parse CLI plug-in manifest '{file}'.", ex); + } + + if (manifest is null) + { + throw new InvalidOperationException($"CLI plug-in manifest '{file}' is empty or invalid."); + } + + ValidateManifest(manifest, file); + + var directory = Path.GetDirectoryName(file); + return manifest with + { + SourcePath = file, + SourceDirectory = directory + }; + } + + private static void ValidateManifest(CliPluginManifest manifest, string file) + { + if (!string.Equals(manifest.SchemaVersion, CliPluginManifest.CurrentSchemaVersion, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException( + $"Manifest '{file}' uses unsupported schema version '{manifest.SchemaVersion}'. Expected '{CliPluginManifest.CurrentSchemaVersion}'."); + } + + if (string.IsNullOrWhiteSpace(manifest.Id)) + { + throw new InvalidOperationException($"Manifest '{file}' must specify a non-empty 'id'."); + } + + if (manifest.EntryPoint is null) + { + throw new InvalidOperationException($"Manifest '{file}' must specify an 'entryPoint'."); + } + + if (!string.Equals(manifest.EntryPoint.Type, "dotnet", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Manifest '{file}' entry point type '{manifest.EntryPoint.Type}' is not supported. Expected 'dotnet'."); + } + + if (string.IsNullOrWhiteSpace(manifest.EntryPoint.Assembly)) + { + throw new InvalidOperationException($"Manifest '{file}' must specify an entry point assembly."); + } + + if (string.IsNullOrWhiteSpace(manifest.EntryPoint.TypeName)) + { + throw new InvalidOperationException($"Manifest '{file}' must specify an entry point type."); + } + + if (!manifest.RequiresRestart) + { + throw new InvalidOperationException($"Manifest '{file}' must set 'requiresRestart' to true."); + } + } +} diff --git a/src/StellaOps.Cli/Plugins/ICliCommandModule.cs b/src/Cli/StellaOps.Cli/Plugins/ICliCommandModule.cs similarity index 95% rename from src/StellaOps.Cli/Plugins/ICliCommandModule.cs rename to src/Cli/StellaOps.Cli/Plugins/ICliCommandModule.cs index 5c74532c..f04044b4 100644 --- a/src/StellaOps.Cli/Plugins/ICliCommandModule.cs +++ b/src/Cli/StellaOps.Cli/Plugins/ICliCommandModule.cs @@ -1,20 +1,20 @@ -using System; -using System.CommandLine; -using System.Threading; -using StellaOps.Cli.Configuration; - -namespace StellaOps.Cli.Plugins; - -public interface ICliCommandModule -{ - string Name { get; } - - bool IsAvailable(IServiceProvider services); - - void RegisterCommands( - RootCommand root, - IServiceProvider services, - StellaOpsCliOptions options, - Option verboseOption, - CancellationToken cancellationToken); -} +using System; +using System.CommandLine; +using System.Threading; +using StellaOps.Cli.Configuration; + +namespace StellaOps.Cli.Plugins; + +public interface ICliCommandModule +{ + string Name { get; } + + bool IsAvailable(IServiceProvider services); + + void RegisterCommands( + RootCommand root, + IServiceProvider services, + StellaOpsCliOptions options, + Option verboseOption, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Cli/Plugins/RestartOnlyCliPluginGuard.cs b/src/Cli/StellaOps.Cli/Plugins/RestartOnlyCliPluginGuard.cs similarity index 96% rename from src/StellaOps.Cli/Plugins/RestartOnlyCliPluginGuard.cs rename to src/Cli/StellaOps.Cli/Plugins/RestartOnlyCliPluginGuard.cs index a18b8b40..bc731079 100644 --- a/src/StellaOps.Cli/Plugins/RestartOnlyCliPluginGuard.cs +++ b/src/Cli/StellaOps.Cli/Plugins/RestartOnlyCliPluginGuard.cs @@ -1,41 +1,41 @@ -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.IO; -using System.Linq; -using System.Threading; - -namespace StellaOps.Cli.Plugins; - -internal sealed class RestartOnlyCliPluginGuard -{ - private readonly ConcurrentDictionary _plugins = new(StringComparer.OrdinalIgnoreCase); - private bool _sealed; - - public IReadOnlyCollection KnownPlugins => _plugins.Keys.ToArray(); - - public bool IsSealed => Volatile.Read(ref _sealed); - - public void EnsureRegistrationAllowed(string pluginPath) - { - ArgumentException.ThrowIfNullOrWhiteSpace(pluginPath); - - var normalized = Normalize(pluginPath); - if (IsSealed && !_plugins.ContainsKey(normalized)) - { - throw new InvalidOperationException($"Plug-in '{pluginPath}' cannot be registered after startup. Restart required."); - } - - _plugins.TryAdd(normalized, 0); - } - - public void Seal() - { - Volatile.Write(ref _sealed, true); - } - - private static string Normalize(string path) - { - var full = Path.GetFullPath(path); - return full.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); - } -} +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.IO; +using System.Linq; +using System.Threading; + +namespace StellaOps.Cli.Plugins; + +internal sealed class RestartOnlyCliPluginGuard +{ + private readonly ConcurrentDictionary _plugins = new(StringComparer.OrdinalIgnoreCase); + private bool _sealed; + + public IReadOnlyCollection KnownPlugins => _plugins.Keys.ToArray(); + + public bool IsSealed => Volatile.Read(ref _sealed); + + public void EnsureRegistrationAllowed(string pluginPath) + { + ArgumentException.ThrowIfNullOrWhiteSpace(pluginPath); + + var normalized = Normalize(pluginPath); + if (IsSealed && !_plugins.ContainsKey(normalized)) + { + throw new InvalidOperationException($"Plug-in '{pluginPath}' cannot be registered after startup. Restart required."); + } + + _plugins.TryAdd(normalized, 0); + } + + public void Seal() + { + Volatile.Write(ref _sealed, true); + } + + private static string Normalize(string path) + { + var full = Path.GetFullPath(path); + return full.TrimEnd(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar); + } +} diff --git a/src/StellaOps.Cli/Program.cs b/src/Cli/StellaOps.Cli/Program.cs similarity index 100% rename from src/StellaOps.Cli/Program.cs rename to src/Cli/StellaOps.Cli/Program.cs diff --git a/src/StellaOps.Cli/Prompts/TrivyDbExportPrompt.cs b/src/Cli/StellaOps.Cli/Prompts/TrivyDbExportPrompt.cs similarity index 100% rename from src/StellaOps.Cli/Prompts/TrivyDbExportPrompt.cs rename to src/Cli/StellaOps.Cli/Prompts/TrivyDbExportPrompt.cs diff --git a/src/StellaOps.Cli/Properties/AssemblyInfo.cs b/src/Cli/StellaOps.Cli/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Cli/Properties/AssemblyInfo.cs rename to src/Cli/StellaOps.Cli/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Cli/Services/AuthorityDiagnosticsReporter.cs b/src/Cli/StellaOps.Cli/Services/AuthorityDiagnosticsReporter.cs similarity index 100% rename from src/StellaOps.Cli/Services/AuthorityDiagnosticsReporter.cs rename to src/Cli/StellaOps.Cli/Services/AuthorityDiagnosticsReporter.cs diff --git a/src/StellaOps.Cli/Services/AuthorityRevocationClient.cs b/src/Cli/StellaOps.Cli/Services/AuthorityRevocationClient.cs similarity index 97% rename from src/StellaOps.Cli/Services/AuthorityRevocationClient.cs rename to src/Cli/StellaOps.Cli/Services/AuthorityRevocationClient.cs index 5ae386ae..07425775 100644 --- a/src/StellaOps.Cli/Services/AuthorityRevocationClient.cs +++ b/src/Cli/StellaOps.Cli/Services/AuthorityRevocationClient.cs @@ -1,223 +1,223 @@ -using System; -using System.Buffers.Text; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StellaOps.Auth.Client; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Services.Models; - -namespace StellaOps.Cli.Services; - -internal sealed class AuthorityRevocationClient : IAuthorityRevocationClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); - private static readonly TimeSpan TokenRefreshSkew = TimeSpan.FromSeconds(30); - - private readonly HttpClient httpClient; - private readonly StellaOpsCliOptions options; - private readonly ILogger logger; - private readonly IStellaOpsTokenClient? tokenClient; - private readonly object tokenSync = new(); - - private string? cachedAccessToken; - private DateTimeOffset cachedAccessTokenExpiresAt = DateTimeOffset.MinValue; - - public AuthorityRevocationClient( - HttpClient httpClient, - StellaOpsCliOptions options, - ILogger logger, - IStellaOpsTokenClient? tokenClient = null) - { - this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - this.options = options ?? throw new ArgumentNullException(nameof(options)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - this.tokenClient = tokenClient; - - if (!string.IsNullOrWhiteSpace(options.Authority?.Url) && httpClient.BaseAddress is null && Uri.TryCreate(options.Authority.Url, UriKind.Absolute, out var authorityUri)) - { - httpClient.BaseAddress = authorityUri; - } - } - - public async Task ExportAsync(bool verbose, CancellationToken cancellationToken) - { - EnsureAuthorityConfigured(); - - using var request = new HttpRequestMessage(HttpMethod.Get, "internal/revocations/export"); - var accessToken = await AcquireAccessTokenAsync(cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(accessToken)) - { - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", accessToken); - } - - using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - var message = $"Authority export request failed with {(int)response.StatusCode} {response.ReasonPhrase}: {body}"; - throw new InvalidOperationException(message); - } - - var payload = await JsonSerializer.DeserializeAsync( - await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false), - SerializerOptions, - cancellationToken).ConfigureAwait(false); - - if (payload is null) - { - throw new InvalidOperationException("Authority export response payload was empty."); - } - - var bundleBytes = Convert.FromBase64String(payload.Bundle.Data); - var digest = payload.Digest?.Value ?? string.Empty; - - if (verbose) - { - logger.LogInformation( - "Received revocation export sequence {Sequence} (sha256:{Digest}, signing key {KeyId}, provider {Provider}).", - payload.Sequence, - digest, - payload.SigningKeyId ?? "", - string.IsNullOrWhiteSpace(payload.Signature?.Provider) ? "default" : payload.Signature!.Provider); - } - - return new AuthorityRevocationExportResult - { - BundleBytes = bundleBytes, - Signature = payload.Signature?.Value ?? string.Empty, - Digest = digest, - Sequence = payload.Sequence, - IssuedAt = payload.IssuedAt, - SigningKeyId = payload.SigningKeyId, - SigningProvider = payload.Signature?.Provider - }; - } - - private async Task AcquireAccessTokenAsync(CancellationToken cancellationToken) - { - if (tokenClient is null) - { - return null; - } - - lock (tokenSync) - { - if (!string.IsNullOrEmpty(cachedAccessToken) && cachedAccessTokenExpiresAt - TokenRefreshSkew > DateTimeOffset.UtcNow) - { - return cachedAccessToken; - } - } - - var scope = AuthorityTokenUtilities.ResolveScope(options); - var token = await RequestAccessTokenAsync(scope, cancellationToken).ConfigureAwait(false); - - lock (tokenSync) - { - cachedAccessToken = token.AccessToken; - cachedAccessTokenExpiresAt = token.ExpiresAtUtc; - return cachedAccessToken; - } - } - - private async Task RequestAccessTokenAsync(string scope, CancellationToken cancellationToken) - { - if (options.Authority is null) - { - throw new InvalidOperationException("Authority credentials are not configured."); - } - - if (!string.IsNullOrWhiteSpace(options.Authority.Username)) - { - if (string.IsNullOrWhiteSpace(options.Authority.Password)) - { - throw new InvalidOperationException("Authority password must be configured or run 'auth login'."); - } - - return await tokenClient!.RequestPasswordTokenAsync( - options.Authority.Username, - options.Authority.Password!, - scope, - null, - cancellationToken).ConfigureAwait(false); - } - - return await tokenClient!.RequestClientCredentialsTokenAsync(scope, null, cancellationToken).ConfigureAwait(false); - } - - private void EnsureAuthorityConfigured() - { - if (string.IsNullOrWhiteSpace(options.Authority?.Url)) - { - throw new InvalidOperationException("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update stellaops.yaml."); - } - - if (httpClient.BaseAddress is null) - { - if (!Uri.TryCreate(options.Authority.Url, UriKind.Absolute, out var baseUri)) - { - throw new InvalidOperationException("Authority URL is invalid."); - } - - httpClient.BaseAddress = baseUri; - } - } - - private sealed class ExportResponseDto - { - [JsonPropertyName("schemaVersion")] - public string SchemaVersion { get; set; } = string.Empty; - - [JsonPropertyName("bundleId")] - public string BundleId { get; set; } = string.Empty; - - [JsonPropertyName("sequence")] - public long Sequence { get; set; } - - [JsonPropertyName("issuedAt")] - public DateTimeOffset IssuedAt { get; set; } - - [JsonPropertyName("signingKeyId")] - public string? SigningKeyId { get; set; } - - [JsonPropertyName("bundle")] - public ExportPayloadDto Bundle { get; set; } = new(); - - [JsonPropertyName("signature")] - public ExportSignatureDto? Signature { get; set; } - - [JsonPropertyName("digest")] - public ExportDigestDto? Digest { get; set; } - } - - private sealed class ExportPayloadDto - { - [JsonPropertyName("data")] - public string Data { get; set; } = string.Empty; - } - - private sealed class ExportSignatureDto - { - [JsonPropertyName("algorithm")] - public string Algorithm { get; set; } = string.Empty; - - [JsonPropertyName("keyId")] - public string KeyId { get; set; } = string.Empty; - - [JsonPropertyName("provider")] - public string Provider { get; set; } = string.Empty; - - [JsonPropertyName("value")] - public string Value { get; set; } = string.Empty; - } - - private sealed class ExportDigestDto - { - [JsonPropertyName("value")] - public string Value { get; set; } = string.Empty; - } -} +using System; +using System.Buffers.Text; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Auth.Client; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Services.Models; + +namespace StellaOps.Cli.Services; + +internal sealed class AuthorityRevocationClient : IAuthorityRevocationClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + private static readonly TimeSpan TokenRefreshSkew = TimeSpan.FromSeconds(30); + + private readonly HttpClient httpClient; + private readonly StellaOpsCliOptions options; + private readonly ILogger logger; + private readonly IStellaOpsTokenClient? tokenClient; + private readonly object tokenSync = new(); + + private string? cachedAccessToken; + private DateTimeOffset cachedAccessTokenExpiresAt = DateTimeOffset.MinValue; + + public AuthorityRevocationClient( + HttpClient httpClient, + StellaOpsCliOptions options, + ILogger logger, + IStellaOpsTokenClient? tokenClient = null) + { + this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + this.options = options ?? throw new ArgumentNullException(nameof(options)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + this.tokenClient = tokenClient; + + if (!string.IsNullOrWhiteSpace(options.Authority?.Url) && httpClient.BaseAddress is null && Uri.TryCreate(options.Authority.Url, UriKind.Absolute, out var authorityUri)) + { + httpClient.BaseAddress = authorityUri; + } + } + + public async Task ExportAsync(bool verbose, CancellationToken cancellationToken) + { + EnsureAuthorityConfigured(); + + using var request = new HttpRequestMessage(HttpMethod.Get, "internal/revocations/export"); + var accessToken = await AcquireAccessTokenAsync(cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(accessToken)) + { + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", accessToken); + } + + using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + var message = $"Authority export request failed with {(int)response.StatusCode} {response.ReasonPhrase}: {body}"; + throw new InvalidOperationException(message); + } + + var payload = await JsonSerializer.DeserializeAsync( + await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false), + SerializerOptions, + cancellationToken).ConfigureAwait(false); + + if (payload is null) + { + throw new InvalidOperationException("Authority export response payload was empty."); + } + + var bundleBytes = Convert.FromBase64String(payload.Bundle.Data); + var digest = payload.Digest?.Value ?? string.Empty; + + if (verbose) + { + logger.LogInformation( + "Received revocation export sequence {Sequence} (sha256:{Digest}, signing key {KeyId}, provider {Provider}).", + payload.Sequence, + digest, + payload.SigningKeyId ?? "", + string.IsNullOrWhiteSpace(payload.Signature?.Provider) ? "default" : payload.Signature!.Provider); + } + + return new AuthorityRevocationExportResult + { + BundleBytes = bundleBytes, + Signature = payload.Signature?.Value ?? string.Empty, + Digest = digest, + Sequence = payload.Sequence, + IssuedAt = payload.IssuedAt, + SigningKeyId = payload.SigningKeyId, + SigningProvider = payload.Signature?.Provider + }; + } + + private async Task AcquireAccessTokenAsync(CancellationToken cancellationToken) + { + if (tokenClient is null) + { + return null; + } + + lock (tokenSync) + { + if (!string.IsNullOrEmpty(cachedAccessToken) && cachedAccessTokenExpiresAt - TokenRefreshSkew > DateTimeOffset.UtcNow) + { + return cachedAccessToken; + } + } + + var scope = AuthorityTokenUtilities.ResolveScope(options); + var token = await RequestAccessTokenAsync(scope, cancellationToken).ConfigureAwait(false); + + lock (tokenSync) + { + cachedAccessToken = token.AccessToken; + cachedAccessTokenExpiresAt = token.ExpiresAtUtc; + return cachedAccessToken; + } + } + + private async Task RequestAccessTokenAsync(string scope, CancellationToken cancellationToken) + { + if (options.Authority is null) + { + throw new InvalidOperationException("Authority credentials are not configured."); + } + + if (!string.IsNullOrWhiteSpace(options.Authority.Username)) + { + if (string.IsNullOrWhiteSpace(options.Authority.Password)) + { + throw new InvalidOperationException("Authority password must be configured or run 'auth login'."); + } + + return await tokenClient!.RequestPasswordTokenAsync( + options.Authority.Username, + options.Authority.Password!, + scope, + null, + cancellationToken).ConfigureAwait(false); + } + + return await tokenClient!.RequestClientCredentialsTokenAsync(scope, null, cancellationToken).ConfigureAwait(false); + } + + private void EnsureAuthorityConfigured() + { + if (string.IsNullOrWhiteSpace(options.Authority?.Url)) + { + throw new InvalidOperationException("Authority URL is not configured. Set STELLAOPS_AUTHORITY_URL or update stellaops.yaml."); + } + + if (httpClient.BaseAddress is null) + { + if (!Uri.TryCreate(options.Authority.Url, UriKind.Absolute, out var baseUri)) + { + throw new InvalidOperationException("Authority URL is invalid."); + } + + httpClient.BaseAddress = baseUri; + } + } + + private sealed class ExportResponseDto + { + [JsonPropertyName("schemaVersion")] + public string SchemaVersion { get; set; } = string.Empty; + + [JsonPropertyName("bundleId")] + public string BundleId { get; set; } = string.Empty; + + [JsonPropertyName("sequence")] + public long Sequence { get; set; } + + [JsonPropertyName("issuedAt")] + public DateTimeOffset IssuedAt { get; set; } + + [JsonPropertyName("signingKeyId")] + public string? SigningKeyId { get; set; } + + [JsonPropertyName("bundle")] + public ExportPayloadDto Bundle { get; set; } = new(); + + [JsonPropertyName("signature")] + public ExportSignatureDto? Signature { get; set; } + + [JsonPropertyName("digest")] + public ExportDigestDto? Digest { get; set; } + } + + private sealed class ExportPayloadDto + { + [JsonPropertyName("data")] + public string Data { get; set; } = string.Empty; + } + + private sealed class ExportSignatureDto + { + [JsonPropertyName("algorithm")] + public string Algorithm { get; set; } = string.Empty; + + [JsonPropertyName("keyId")] + public string KeyId { get; set; } = string.Empty; + + [JsonPropertyName("provider")] + public string Provider { get; set; } = string.Empty; + + [JsonPropertyName("value")] + public string Value { get; set; } = string.Empty; + } + + private sealed class ExportDigestDto + { + [JsonPropertyName("value")] + public string Value { get; set; } = string.Empty; + } +} diff --git a/src/StellaOps.Cli/Services/BackendOperationsClient.cs b/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs similarity index 97% rename from src/StellaOps.Cli/Services/BackendOperationsClient.cs rename to src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs index 6f875f58..6d3a9337 100644 --- a/src/StellaOps.Cli/Services/BackendOperationsClient.cs +++ b/src/Cli/StellaOps.Cli/Services/BackendOperationsClient.cs @@ -1,2486 +1,2486 @@ -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Linq; -using System.Net.Http.Headers; -using System.Net.Http.Json; -using System.Globalization; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.Client; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Services.Models; -using StellaOps.Cli.Services.Models.Transport; - -namespace StellaOps.Cli.Services; - -internal sealed class BackendOperationsClient : IBackendOperationsClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); - private static readonly TimeSpan TokenRefreshSkew = TimeSpan.FromSeconds(30); - private static readonly IReadOnlyDictionary EmptyMetadata = - new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); - - private const string OperatorReasonParameterName = "operator_reason"; - private const string OperatorTicketParameterName = "operator_ticket"; - - private readonly HttpClient _httpClient; - private readonly StellaOpsCliOptions _options; - private readonly ILogger _logger; - private readonly IStellaOpsTokenClient? _tokenClient; - private readonly object _tokenSync = new(); - private string? _cachedAccessToken; - private DateTimeOffset _cachedAccessTokenExpiresAt = DateTimeOffset.MinValue; - - public BackendOperationsClient(HttpClient httpClient, StellaOpsCliOptions options, ILogger logger, IStellaOpsTokenClient? tokenClient = null) - { - _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _tokenClient = tokenClient; - - if (!string.IsNullOrWhiteSpace(_options.BackendUrl) && httpClient.BaseAddress is null) - { - if (Uri.TryCreate(_options.BackendUrl, UriKind.Absolute, out var baseUri)) - { - httpClient.BaseAddress = baseUri; - } - } - } - - public async Task DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - channel = string.IsNullOrWhiteSpace(channel) ? "stable" : channel.Trim(); - outputPath = ResolveArtifactPath(outputPath, channel); - Directory.CreateDirectory(Path.GetDirectoryName(outputPath)!); - - if (!overwrite && File.Exists(outputPath)) - { - var existing = new FileInfo(outputPath); - _logger.LogInformation("Scanner artifact already cached at {Path} ({Size} bytes).", outputPath, existing.Length); - return new ScannerArtifactResult(outputPath, existing.Length, true); - } - - var attempt = 0; - var maxAttempts = Math.Max(1, _options.ScannerDownloadAttempts); - - while (true) - { - attempt++; - try - { - using var request = CreateRequest(HttpMethod.Get, $"api/scanner/artifacts/{channel}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - return await ProcessScannerResponseAsync(response, outputPath, channel, verbose, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (attempt < maxAttempts) - { - var backoffSeconds = Math.Pow(2, attempt); - _logger.LogWarning(ex, "Scanner download attempt {Attempt}/{MaxAttempts} failed. Retrying in {Delay:F0}s...", attempt, maxAttempts, backoffSeconds); - await Task.Delay(TimeSpan.FromSeconds(backoffSeconds), cancellationToken).ConfigureAwait(false); - } - } - } - - private async Task ProcessScannerResponseAsync(HttpResponseMessage response, string outputPath, string channel, bool verbose, CancellationToken cancellationToken) - { - var tempFile = outputPath + ".tmp"; - await using (var payloadStream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false)) - await using (var fileStream = File.Create(tempFile)) - { - await payloadStream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); - } - - var expectedDigest = ExtractHeaderValue(response.Headers, "X-StellaOps-Digest"); - var signatureHeader = ExtractHeaderValue(response.Headers, "X-StellaOps-Signature"); - - var digestHex = await ValidateDigestAsync(tempFile, expectedDigest, cancellationToken).ConfigureAwait(false); - await ValidateSignatureAsync(signatureHeader, digestHex, verbose, cancellationToken).ConfigureAwait(false); - - if (verbose) - { - var signatureNote = string.IsNullOrWhiteSpace(signatureHeader) ? "no signature" : "signature validated"; - _logger.LogDebug("Scanner digest sha256:{Digest} ({SignatureNote}).", digestHex, signatureNote); - } - - if (File.Exists(outputPath)) - { - File.Delete(outputPath); - } - - File.Move(tempFile, outputPath); - - PersistMetadata(outputPath, channel, digestHex, signatureHeader, response); - - var downloaded = new FileInfo(outputPath); - _logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", outputPath, downloaded.Length); - - return new ScannerArtifactResult(outputPath, downloaded.Length, false); - } - - public async Task UploadScanResultsAsync(string filePath, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (!File.Exists(filePath)) - { - throw new FileNotFoundException("Scan result file not found.", filePath); - } - - var maxAttempts = Math.Max(1, _options.ScanUploadAttempts); - var attempt = 0; - - while (true) - { - attempt++; - try - { - using var content = new MultipartFormDataContent(); - await using var fileStream = File.OpenRead(filePath); - var streamContent = new StreamContent(fileStream); - streamContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); - content.Add(streamContent, "file", Path.GetFileName(filePath)); - - using var request = CreateRequest(HttpMethod.Post, "api/scanner/results"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = content; - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (response.IsSuccessStatusCode) - { - _logger.LogInformation("Scan results uploaded from {Path}.", filePath); - return; - } - - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - if (attempt >= maxAttempts) - { - throw new InvalidOperationException(failure); - } - - var delay = GetRetryDelay(response, attempt); - _logger.LogWarning( - "Scan upload attempt {Attempt}/{MaxAttempts} failed ({Reason}). Retrying in {Delay:F1}s...", - attempt, - maxAttempts, - failure, - delay.TotalSeconds); - await Task.Delay(delay, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (attempt < maxAttempts) - { - var delay = TimeSpan.FromSeconds(Math.Pow(2, attempt)); - _logger.LogWarning( - ex, - "Scan upload attempt {Attempt}/{MaxAttempts} threw an exception. Retrying in {Delay:F1}s...", - attempt, - maxAttempts, - delay.TotalSeconds); - await Task.Delay(delay, cancellationToken).ConfigureAwait(false); - } - } - } - - public async Task TriggerJobAsync(string jobKind, IDictionary parameters, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(jobKind)) - { - throw new ArgumentException("Job kind must be provided.", nameof(jobKind)); - } - - var requestBody = new JobTriggerRequest - { - Trigger = "cli", - Parameters = parameters is null ? new Dictionary(StringComparer.Ordinal) : new Dictionary(parameters, StringComparer.Ordinal) - }; - - var request = CreateRequest(HttpMethod.Post, $"jobs/{jobKind}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = JsonContent.Create(requestBody, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (response.StatusCode == HttpStatusCode.Accepted) - { - JobRunResponse? run = null; - if (response.Content.Headers.ContentLength is > 0) - { - try - { - run = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - _logger.LogWarning(ex, "Failed to deserialize job run response for job kind {Kind}.", jobKind); - } - } - - var location = response.Headers.Location?.ToString(); - return new JobTriggerResult(true, "Accepted", location, run); - } - - var failureMessage = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - return new JobTriggerResult(false, failureMessage, null, null); - } - - public async Task ExecuteExcititorOperationAsync(string route, HttpMethod method, object? payload, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(route)) - { - throw new ArgumentException("Route must be provided.", nameof(route)); - } - - var relative = route.TrimStart('/'); - using var request = CreateRequest(method, $"excititor/{relative}"); - - if (payload is not null && method != HttpMethod.Get && method != HttpMethod.Delete) - { - request.Content = JsonContent.Create(payload, options: SerializerOptions); - } - - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (response.IsSuccessStatusCode) - { - var (message, payloadElement) = await ExtractExcititorResponseAsync(response, cancellationToken).ConfigureAwait(false); - var location = response.Headers.Location?.ToString(); - return new ExcititorOperationResult(true, message, location, payloadElement); - } - - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - return new ExcititorOperationResult(false, failure, null, null); - } - - public async Task DownloadExcititorExportAsync(string exportId, string destinationPath, string? expectedDigestAlgorithm, string? expectedDigest, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(exportId)) - { - throw new ArgumentException("Export id must be provided.", nameof(exportId)); - } - - if (string.IsNullOrWhiteSpace(destinationPath)) - { - throw new ArgumentException("Destination path must be provided.", nameof(destinationPath)); - } - - var fullPath = Path.GetFullPath(destinationPath); - var directory = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) - { - Directory.CreateDirectory(directory); - } - - var normalizedAlgorithm = string.IsNullOrWhiteSpace(expectedDigestAlgorithm) - ? null - : expectedDigestAlgorithm.Trim(); - var normalizedDigest = NormalizeExpectedDigest(expectedDigest); - - if (File.Exists(fullPath) - && string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase) - && !string.IsNullOrWhiteSpace(normalizedDigest)) - { - var existingDigest = await ComputeSha256Async(fullPath, cancellationToken).ConfigureAwait(false); - if (string.Equals(existingDigest, normalizedDigest, StringComparison.OrdinalIgnoreCase)) - { - var info = new FileInfo(fullPath); - _logger.LogDebug("Export {ExportId} already present at {Path}; digest matches.", exportId, fullPath); - return new ExcititorExportDownloadResult(fullPath, info.Length, true); - } - } - - var encodedId = Uri.EscapeDataString(exportId); - using var request = CreateRequest(HttpMethod.Get, $"excititor/export/{encodedId}/download"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - var tempPath = fullPath + ".tmp"; - if (File.Exists(tempPath)) - { - File.Delete(tempPath); - } - - using (var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false)) - { - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - await using (var fileStream = File.Create(tempPath)) - { - await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); - } - } - - if (!string.IsNullOrWhiteSpace(normalizedAlgorithm) && !string.IsNullOrWhiteSpace(normalizedDigest)) - { - if (string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase)) - { - var computed = await ComputeSha256Async(tempPath, cancellationToken).ConfigureAwait(false); - if (!string.Equals(computed, normalizedDigest, StringComparison.OrdinalIgnoreCase)) - { - File.Delete(tempPath); - throw new InvalidOperationException($"Export digest mismatch. Expected sha256:{normalizedDigest}, computed sha256:{computed}."); - } - } - else - { - _logger.LogWarning("Export digest verification skipped. Unsupported algorithm {Algorithm}.", normalizedAlgorithm); - } - } - - if (File.Exists(fullPath)) - { - File.Delete(fullPath); - } - - File.Move(tempPath, fullPath); - - var downloaded = new FileInfo(fullPath); - return new ExcititorExportDownloadResult(fullPath, downloaded.Length, false); - } - - public async Task EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - var images = NormalizeImages(request.Images); - if (images.Count == 0) - { - throw new ArgumentException("At least one image digest must be provided.", nameof(request)); - } - - var payload = new RuntimePolicyEvaluationRequestDocument - { - Namespace = string.IsNullOrWhiteSpace(request.Namespace) ? null : request.Namespace.Trim(), - Images = images - }; - - if (request.Labels.Count > 0) - { - payload.Labels = new Dictionary(StringComparer.Ordinal); - foreach (var label in request.Labels) - { - if (!string.IsNullOrWhiteSpace(label.Key)) - { - payload.Labels[label.Key] = label.Value ?? string.Empty; - } - } - } - - using var message = CreateRequest(HttpMethod.Post, "api/scanner/policy/runtime"); - await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false); - message.Content = JsonContent.Create(payload, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - RuntimePolicyEvaluationResponseDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = response.Content is null ? string.Empty : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse runtime policy response. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Runtime policy response was empty."); - } - - var decisions = new Dictionary(StringComparer.Ordinal); - if (document.Results is not null) - { - foreach (var kvp in document.Results) - { - var image = kvp.Key; - var decision = kvp.Value; - if (string.IsNullOrWhiteSpace(image) || decision is null) - { - continue; - } - - var verdict = string.IsNullOrWhiteSpace(decision.PolicyVerdict) - ? "unknown" - : decision.PolicyVerdict!.Trim(); - - var reasons = ExtractReasons(decision.Reasons); - var metadata = ExtractExtensionMetadata(decision.ExtensionData); - - var hasSbom = decision.HasSbomReferrers ?? decision.HasSbomLegacy; - - RuntimePolicyRekorReference? rekor = null; - if (decision.Rekor is not null && - (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid) || - !string.IsNullOrWhiteSpace(decision.Rekor.Url) || - decision.Rekor.Verified.HasValue)) - { - rekor = new RuntimePolicyRekorReference( - NormalizeOptionalString(decision.Rekor.Uuid), - NormalizeOptionalString(decision.Rekor.Url), - decision.Rekor.Verified); - } - - decisions[image] = new RuntimePolicyImageDecision( - verdict, - decision.Signed, - hasSbom, - reasons, - rekor, - metadata); - } - } - - var decisionsView = new ReadOnlyDictionary(decisions); - - return new RuntimePolicyEvaluationResult( - document.TtlSeconds ?? 0, - document.ExpiresAtUtc?.ToUniversalTime(), - string.IsNullOrWhiteSpace(document.PolicyRevision) ? null : document.PolicyRevision, - decisionsView); - } - - public async Task ActivatePolicyRevisionAsync(string policyId, int version, PolicyActivationRequest request, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (version <= 0) - { - throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); - } - - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - var requestDocument = new PolicyActivationRequestDocument - { - Comment = NormalizeOptionalString(request.Comment), - RunNow = request.RunNow ? true : null, - ScheduledAt = request.ScheduledAt, - Priority = NormalizeOptionalString(request.Priority), - Rollback = request.Rollback ? true : null, - IncidentId = NormalizeOptionalString(request.IncidentId) - }; - - var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); - using var httpRequest = CreateRequest(HttpMethod.Post, $"api/policy/policies/{encodedPolicyId}/versions/{version}:activate"); - await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); - httpRequest.Content = JsonContent.Create(requestDocument, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - PolicyActivationResponseDocument? responseDocument; - try - { - responseDocument = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = response.Content is null ? string.Empty : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy activation response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (responseDocument is null) - { - throw new InvalidOperationException("Policy activation response was empty."); - } - - if (string.IsNullOrWhiteSpace(responseDocument.Status)) - { - throw new InvalidOperationException("Policy activation response missing status."); - } - - if (responseDocument.Revision is null) - { - throw new InvalidOperationException("Policy activation response missing revision."); - } - - return MapPolicyActivation(responseDocument); - } - - public async Task SimulatePolicyAsync(string policyId, PolicySimulationInput input, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (input is null) - { - throw new ArgumentNullException(nameof(input)); - } - - var requestDocument = new PolicySimulationRequestDocument - { - BaseVersion = input.BaseVersion, - CandidateVersion = input.CandidateVersion, - Explain = input.Explain ? true : null - }; - - if (input.SbomSet.Count > 0) - { - requestDocument.SbomSet = input.SbomSet; - } - - if (input.Environment.Count > 0) - { - var environment = new Dictionary(StringComparer.Ordinal); - foreach (var pair in input.Environment) - { - if (string.IsNullOrWhiteSpace(pair.Key)) - { - continue; - } - - environment[pair.Key] = SerializeEnvironmentValue(pair.Value); - } - - if (environment.Count > 0) - { - requestDocument.Env = environment; - } - } - - var encodedPolicyId = Uri.EscapeDataString(policyId); - using var request = CreateRequest(HttpMethod.Post, $"api/policy/policies/{encodedPolicyId}/simulate"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = JsonContent.Create(requestDocument, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - if (response.Content is null || response.Content.Headers.ContentLength is 0) - { - throw new InvalidOperationException("Policy simulation response was empty."); - } - - PolicySimulationResponseDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy simulation response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Policy simulation response was empty."); - } - - if (document.Diff is null) - { - throw new InvalidOperationException("Policy simulation response missing diff summary."); - } - - return MapPolicySimulation(document); - } - - public async Task GetPolicyFindingsAsync(PolicyFindingsQuery query, CancellationToken cancellationToken) - { - if (query is null) - { - throw new ArgumentNullException(nameof(query)); - } - - EnsureBackendConfigured(); - - var policyId = query.PolicyId; - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(query)); - } - - var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); - var relative = $"api/policy/findings/{encodedPolicyId}{BuildPolicyFindingsQueryString(query)}"; - - using var request = CreateRequest(HttpMethod.Get, relative); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - PolicyFindingsResponseDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy findings response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Policy findings response was empty."); - } - - return MapPolicyFindings(document); - } - - public async Task GetPolicyFindingAsync(string policyId, string findingId, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); - var encodedFindingId = Uri.EscapeDataString(findingId.Trim()); - using var request = CreateRequest(HttpMethod.Get, $"api/policy/findings/{encodedPolicyId}/{encodedFindingId}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - PolicyFindingDocumentDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy finding response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Policy finding response was empty."); - } - - return MapPolicyFinding(document); - } - - public async Task GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (string.IsNullOrWhiteSpace(policyId)) - { - throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); - } - - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); - } - - var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); - var encodedFindingId = Uri.EscapeDataString(findingId.Trim()); - var query = string.IsNullOrWhiteSpace(mode) ? string.Empty : $"?mode={Uri.EscapeDataString(mode.Trim())}"; - - using var request = CreateRequest(HttpMethod.Get, $"api/policy/findings/{encodedPolicyId}/{encodedFindingId}/explain{query}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - var errorCode = ExtractProblemErrorCode(problem); - throw new PolicyApiException(message, response.StatusCode, errorCode); - } - - PolicyFindingExplainResponseDocument? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse policy finding explain response: {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (document is null) - { - throw new InvalidOperationException("Policy finding explain response was empty."); - } - - return MapPolicyFindingExplain(document); - } - - public async Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - var query = includeDisabled ? "?includeDisabled=true" : string.Empty; - using var request = CreateRequest(HttpMethod.Get, $"excititor/providers{query}"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - if (response.Content is null || response.Content.Headers.ContentLength is 0) - { - return Array.Empty(); - } - - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - if (stream is null || stream.Length == 0) - { - return Array.Empty(); - } - - using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); - var root = document.RootElement; - if (root.ValueKind == JsonValueKind.Object && root.TryGetProperty("providers", out var providersProperty)) - { - root = providersProperty; - } - - if (root.ValueKind != JsonValueKind.Array) - { - return Array.Empty(); - } - - var list = new List(); - foreach (var item in root.EnumerateArray()) - { - var id = GetStringProperty(item, "id") ?? string.Empty; - if (string.IsNullOrWhiteSpace(id)) - { - continue; - } - - var kind = GetStringProperty(item, "kind") ?? "unknown"; - var displayName = GetStringProperty(item, "displayName") ?? id; - var trustTier = GetStringProperty(item, "trustTier") ?? string.Empty; - var enabled = GetBooleanProperty(item, "enabled", defaultValue: true); - var lastIngested = GetDateTimeOffsetProperty(item, "lastIngestedAt"); - - list.Add(new ExcititorProviderSummary(id, kind, displayName, trustTier, enabled, lastIngested)); - } - - return list; - } - - public async Task DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - var rootDirectory = ResolveOfflineDirectory(destinationDirectory); - Directory.CreateDirectory(rootDirectory); - - var descriptor = await FetchOfflineKitDescriptorAsync(bundleId, cancellationToken).ConfigureAwait(false); - - var bundlePath = Path.Combine(rootDirectory, descriptor.BundleName); - var metadataPath = bundlePath + ".metadata.json"; - var manifestPath = Path.Combine(rootDirectory, descriptor.ManifestName); - var bundleSignaturePath = descriptor.BundleSignatureName is not null ? Path.Combine(rootDirectory, descriptor.BundleSignatureName) : null; - var manifestSignaturePath = descriptor.ManifestSignatureName is not null ? Path.Combine(rootDirectory, descriptor.ManifestSignatureName) : null; - - var fromCache = false; - if (!overwrite && File.Exists(bundlePath)) - { - var digest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); - if (string.Equals(digest, descriptor.BundleSha256, StringComparison.OrdinalIgnoreCase)) - { - fromCache = true; - } - else if (resume) - { - var partial = bundlePath + ".partial"; - File.Move(bundlePath, partial, overwrite: true); - } - else - { - File.Delete(bundlePath); - } - } - - if (!fromCache) - { - await DownloadFileWithResumeAsync(descriptor.BundleDownloadUri, bundlePath, descriptor.BundleSha256, descriptor.BundleSize, resume, cancellationToken).ConfigureAwait(false); - } - - await DownloadFileWithResumeAsync(descriptor.ManifestDownloadUri, manifestPath, descriptor.ManifestSha256, descriptor.ManifestSize ?? 0, resume: false, cancellationToken).ConfigureAwait(false); - - if (descriptor.BundleSignatureDownloadUri is not null && bundleSignaturePath is not null) - { - await DownloadAuxiliaryFileAsync(descriptor.BundleSignatureDownloadUri, bundleSignaturePath, cancellationToken).ConfigureAwait(false); - } - - if (descriptor.ManifestSignatureDownloadUri is not null && manifestSignaturePath is not null) - { - await DownloadAuxiliaryFileAsync(descriptor.ManifestSignatureDownloadUri, manifestSignaturePath, cancellationToken).ConfigureAwait(false); - } - - await WriteOfflineKitMetadataAsync(metadataPath, descriptor, bundlePath, manifestPath, bundleSignaturePath, manifestSignaturePath, cancellationToken).ConfigureAwait(false); - - return new OfflineKitDownloadResult( - descriptor, - bundlePath, - manifestPath, - bundleSignaturePath, - manifestSignaturePath, - metadataPath, - fromCache); - } - - public async Task ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - var bundlePath = Path.GetFullPath(request.BundlePath); - if (!File.Exists(bundlePath)) - { - throw new FileNotFoundException("Offline kit bundle not found.", bundlePath); - } - - string? manifestPath = null; - if (!string.IsNullOrWhiteSpace(request.ManifestPath)) - { - manifestPath = Path.GetFullPath(request.ManifestPath); - if (!File.Exists(manifestPath)) - { - throw new FileNotFoundException("Offline kit manifest not found.", manifestPath); - } - } - - string? bundleSignaturePath = null; - if (!string.IsNullOrWhiteSpace(request.BundleSignaturePath)) - { - bundleSignaturePath = Path.GetFullPath(request.BundleSignaturePath); - if (!File.Exists(bundleSignaturePath)) - { - throw new FileNotFoundException("Offline kit bundle signature not found.", bundleSignaturePath); - } - } - - string? manifestSignaturePath = null; - if (!string.IsNullOrWhiteSpace(request.ManifestSignaturePath)) - { - manifestSignaturePath = Path.GetFullPath(request.ManifestSignaturePath); - if (!File.Exists(manifestSignaturePath)) - { - throw new FileNotFoundException("Offline kit manifest signature not found.", manifestSignaturePath); - } - } - - var bundleSize = request.BundleSize ?? new FileInfo(bundlePath).Length; - var bundleSha = string.IsNullOrWhiteSpace(request.BundleSha256) - ? await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false) - : NormalizeSha(request.BundleSha256) ?? throw new InvalidOperationException("Bundle digest must not be empty."); - - string? manifestSha = null; - long? manifestSize = null; - if (manifestPath is not null) - { - manifestSize = request.ManifestSize ?? new FileInfo(manifestPath).Length; - manifestSha = string.IsNullOrWhiteSpace(request.ManifestSha256) - ? await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false) - : NormalizeSha(request.ManifestSha256); - } - - var metadata = new OfflineKitImportMetadataPayload - { - BundleId = request.BundleId, - BundleSha256 = bundleSha, - BundleSize = bundleSize, - CapturedAt = request.CapturedAt, - Channel = request.Channel, - Kind = request.Kind, - IsDelta = request.IsDelta, - BaseBundleId = request.BaseBundleId, - ManifestSha256 = manifestSha, - ManifestSize = manifestSize - }; - - using var message = CreateRequest(HttpMethod.Post, "api/offline-kit/import"); - await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false); - - using var content = new MultipartFormDataContent(); - - var metadataOptions = new JsonSerializerOptions(SerializerOptions) - { - WriteIndented = false - }; - var metadataJson = JsonSerializer.Serialize(metadata, metadataOptions); - var metadataContent = new StringContent(metadataJson, Encoding.UTF8, "application/json"); - content.Add(metadataContent, "metadata"); - - var bundleStream = File.OpenRead(bundlePath); - var bundleContent = new StreamContent(bundleStream); - bundleContent.Headers.ContentType = new MediaTypeHeaderValue("application/gzip"); - content.Add(bundleContent, "bundle", Path.GetFileName(bundlePath)); - - if (manifestPath is not null) - { - var manifestStream = File.OpenRead(manifestPath); - var manifestContent = new StreamContent(manifestStream); - manifestContent.Headers.ContentType = new MediaTypeHeaderValue("application/json"); - content.Add(manifestContent, "manifest", Path.GetFileName(manifestPath)); - } - - if (bundleSignaturePath is not null) - { - var signatureStream = File.OpenRead(bundleSignaturePath); - var signatureContent = new StreamContent(signatureStream); - signatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); - content.Add(signatureContent, "bundleSignature", Path.GetFileName(bundleSignaturePath)); - } - - if (manifestSignaturePath is not null) - { - var manifestSignatureStream = File.OpenRead(manifestSignaturePath); - var manifestSignatureContent = new StreamContent(manifestSignatureStream); - manifestSignatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); - content.Add(manifestSignatureContent, "manifestSignature", Path.GetFileName(manifestSignaturePath)); - } - - message.Content = content; - - using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - OfflineKitImportResponseTransport? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse offline kit import response. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - var submittedAt = document?.SubmittedAt ?? DateTimeOffset.UtcNow; - - return new OfflineKitImportResult( - document?.ImportId, - document?.Status, - submittedAt, - document?.Message); - } - - public async Task GetOfflineKitStatusAsync(CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - - using var request = CreateRequest(HttpMethod.Get, "api/offline-kit/status"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - if (response.Content is null || response.Content.Headers.ContentLength is 0) - { - return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, Array.Empty()); - } - - OfflineKitStatusTransport? document; - try - { - document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse offline kit status response. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - var current = document?.Current; - var components = MapOfflineComponents(document?.Components); - - if (current is null) - { - return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, components); - } - - return new OfflineKitStatus( - NormalizeOptionalString(current.BundleId), - NormalizeOptionalString(current.Channel), - NormalizeOptionalString(current.Kind), - current.IsDelta ?? false, - NormalizeOptionalString(current.BaseBundleId), - current.CapturedAt?.ToUniversalTime(), - current.ImportedAt?.ToUniversalTime(), - NormalizeSha(current.BundleSha256), - current.BundleSize, - components); - } - - public async Task ExecuteAocIngestDryRunAsync(AocIngestDryRunRequest requestBody, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - ArgumentNullException.ThrowIfNull(requestBody); - - using var request = CreateRequest(HttpMethod.Post, "api/aoc/ingest/dry-run"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = JsonContent.Create(requestBody, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - try - { - var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - return result ?? new AocIngestDryRunResponse(); - } - catch (JsonException ex) - { - var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse ingest dry-run response. {ex.Message}", ex) - { - Data = { ["payload"] = payload } - }; - } - } - - public async Task ExecuteAocVerifyAsync(AocVerifyRequest requestBody, CancellationToken cancellationToken) - { - EnsureBackendConfigured(); - ArgumentNullException.ThrowIfNull(requestBody); - - using var request = CreateRequest(HttpMethod.Post, "api/aoc/verify"); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - request.Content = JsonContent.Create(requestBody, options: SerializerOptions); - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - try - { - var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - return result ?? new AocVerifyResponse(); - } - catch (JsonException ex) - { - var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse AOC verification response. {ex.Message}", ex) - { - Data = { ["payload"] = payload } - }; - } - } - - private string ResolveOfflineDirectory(string destinationDirectory) - { - if (!string.IsNullOrWhiteSpace(destinationDirectory)) - { - return Path.GetFullPath(destinationDirectory); - } - - var configured = _options.Offline?.KitsDirectory; - if (!string.IsNullOrWhiteSpace(configured)) - { - return Path.GetFullPath(configured); - } - - return Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, "offline-kits")); - } - - private async Task FetchOfflineKitDescriptorAsync(string? bundleId, CancellationToken cancellationToken) - { - var route = string.IsNullOrWhiteSpace(bundleId) - ? "api/offline-kit/bundles/latest" - : $"api/offline-kit/bundles/{Uri.EscapeDataString(bundleId)}"; - - using var request = CreateRequest(HttpMethod.Get, route); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - OfflineKitBundleDescriptorTransport? payload; - try - { - payload = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException($"Failed to parse offline kit metadata. {ex.Message}", ex) - { - Data = { ["payload"] = raw } - }; - } - - if (payload is null) - { - throw new InvalidOperationException("Offline kit metadata response was empty."); - } - - return MapOfflineKitDescriptor(payload); - } - - private OfflineKitBundleDescriptor MapOfflineKitDescriptor(OfflineKitBundleDescriptorTransport transport) - { - if (transport is null) - { - throw new ArgumentNullException(nameof(transport)); - } - - var bundleName = string.IsNullOrWhiteSpace(transport.BundleName) - ? throw new InvalidOperationException("Offline kit metadata missing bundleName.") - : transport.BundleName!.Trim(); - - var bundleId = string.IsNullOrWhiteSpace(transport.BundleId) ? bundleName : transport.BundleId!.Trim(); - var bundleSha = NormalizeSha(transport.BundleSha256) ?? throw new InvalidOperationException("Offline kit metadata missing bundleSha256."); - - var bundleSize = transport.BundleSize; - if (bundleSize <= 0) - { - throw new InvalidOperationException("Offline kit metadata missing bundle size."); - } - - var manifestName = string.IsNullOrWhiteSpace(transport.ManifestName) ? "offline-manifest.json" : transport.ManifestName!.Trim(); - var manifestSha = NormalizeSha(transport.ManifestSha256) ?? throw new InvalidOperationException("Offline kit metadata missing manifestSha256."); - var capturedAt = transport.CapturedAt?.ToUniversalTime() ?? DateTimeOffset.UtcNow; - - var bundleDownloadUri = ResolveDownloadUri(transport.BundleUrl, transport.BundlePath, bundleName); - var manifestDownloadUri = ResolveDownloadUri(transport.ManifestUrl, transport.ManifestPath, manifestName); - var bundleSignatureUri = ResolveOptionalDownloadUri(transport.BundleSignatureUrl, transport.BundleSignaturePath, transport.BundleSignatureName); - var manifestSignatureUri = ResolveOptionalDownloadUri(transport.ManifestSignatureUrl, transport.ManifestSignaturePath, transport.ManifestSignatureName); - var bundleSignatureName = ResolveArtifactName(transport.BundleSignatureName, bundleSignatureUri); - var manifestSignatureName = ResolveArtifactName(transport.ManifestSignatureName, manifestSignatureUri); - - return new OfflineKitBundleDescriptor( - bundleId, - bundleName, - bundleSha, - bundleSize, - bundleDownloadUri, - manifestName, - manifestSha, - manifestDownloadUri, - capturedAt, - NormalizeOptionalString(transport.Channel), - NormalizeOptionalString(transport.Kind), - transport.IsDelta ?? false, - NormalizeOptionalString(transport.BaseBundleId), - bundleSignatureName, - bundleSignatureUri, - manifestSignatureName, - manifestSignatureUri, - transport.ManifestSize); - } - - private static string? ResolveArtifactName(string? explicitName, Uri? uri) - { - if (!string.IsNullOrWhiteSpace(explicitName)) - { - return explicitName.Trim(); - } - - if (uri is not null) - { - var name = Path.GetFileName(uri.LocalPath); - return string.IsNullOrWhiteSpace(name) ? null : name; - } - - return null; - } - - private Uri ResolveDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string fallbackFileName) - { - if (!string.IsNullOrWhiteSpace(absoluteOrRelativeUrl)) - { - var candidate = new Uri(absoluteOrRelativeUrl, UriKind.RelativeOrAbsolute); - if (candidate.IsAbsoluteUri) - { - return candidate; - } - - if (_httpClient.BaseAddress is not null) - { - return new Uri(_httpClient.BaseAddress, candidate); - } - - return BuildUriFromRelative(candidate.ToString()); - } - - if (!string.IsNullOrWhiteSpace(relativePath)) - { - return BuildUriFromRelative(relativePath); - } - - if (!string.IsNullOrWhiteSpace(fallbackFileName)) - { - return BuildUriFromRelative(fallbackFileName); - } - - throw new InvalidOperationException("Offline kit metadata did not include a download URL."); - } - - private Uri BuildUriFromRelative(string relative) - { - var normalized = relative.TrimStart('/'); - if (!string.IsNullOrWhiteSpace(_options.Offline?.MirrorUrl) && - Uri.TryCreate(_options.Offline.MirrorUrl, UriKind.Absolute, out var mirrorBase)) - { - if (!mirrorBase.AbsoluteUri.EndsWith("/")) - { - mirrorBase = new Uri(mirrorBase.AbsoluteUri + "/"); - } - - return new Uri(mirrorBase, normalized); - } - - if (_httpClient.BaseAddress is not null) - { - return new Uri(_httpClient.BaseAddress, normalized); - } - - throw new InvalidOperationException($"Cannot resolve offline kit URI for '{relative}' because no mirror or backend base address is configured."); - } - - private Uri? ResolveOptionalDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string? fallbackName) - { - var hasData = !string.IsNullOrWhiteSpace(absoluteOrRelativeUrl) || - !string.IsNullOrWhiteSpace(relativePath) || - !string.IsNullOrWhiteSpace(fallbackName); - - if (!hasData) - { - return null; - } - - try - { - return ResolveDownloadUri(absoluteOrRelativeUrl, relativePath, fallbackName ?? string.Empty); - } - catch - { - return null; - } - } - - private async Task DownloadFileWithResumeAsync(Uri downloadUri, string targetPath, string expectedSha256, long expectedSize, bool resume, CancellationToken cancellationToken) - { - var directory = Path.GetDirectoryName(targetPath); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - var partialPath = resume ? targetPath + ".partial" : targetPath + ".tmp"; - - if (!resume && File.Exists(targetPath)) - { - File.Delete(targetPath); - } - - if (resume && File.Exists(targetPath)) - { - File.Move(targetPath, partialPath, overwrite: true); - } - - long existingLength = 0; - if (resume && File.Exists(partialPath)) - { - existingLength = new FileInfo(partialPath).Length; - if (expectedSize > 0 && existingLength >= expectedSize) - { - existingLength = expectedSize; - } - } - - while (true) - { - using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri); - if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize) - { - request.Headers.Range = new RangeHeaderValue(existingLength, null); - } - - using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); - - if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize && response.StatusCode == HttpStatusCode.OK) - { - existingLength = 0; - if (File.Exists(partialPath)) - { - File.Delete(partialPath); - } - - continue; - } - - if (!response.IsSuccessStatusCode && - !(resume && existingLength > 0 && response.StatusCode == HttpStatusCode.PartialContent)) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - var destination = resume ? partialPath : targetPath; - var mode = resume && existingLength > 0 ? FileMode.Append : FileMode.Create; - - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - await using (var file = new FileStream(destination, mode, FileAccess.Write, FileShare.None, 81920, useAsync: true)) - { - await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false); - } - - break; - } - - if (resume && File.Exists(partialPath)) - { - File.Move(partialPath, targetPath, overwrite: true); - } - - var digest = await ComputeSha256Async(targetPath, cancellationToken).ConfigureAwait(false); - if (!string.Equals(digest, expectedSha256, StringComparison.OrdinalIgnoreCase)) - { - File.Delete(targetPath); - throw new InvalidOperationException($"Digest mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSha256} but computed {digest}."); - } - - if (expectedSize > 0) - { - var actualSize = new FileInfo(targetPath).Length; - if (actualSize != expectedSize) - { - File.Delete(targetPath); - throw new InvalidOperationException($"Size mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSize:N0} bytes but downloaded {actualSize:N0} bytes."); - } - } - } - - private async Task DownloadAuxiliaryFileAsync(Uri downloadUri, string targetPath, CancellationToken cancellationToken) - { - var directory = Path.GetDirectoryName(targetPath); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri); - using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(failure); - } - - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - await using var file = new FileStream(targetPath, FileMode.Create, FileAccess.Write, FileShare.None, 81920, useAsync: true); - await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false); - } - - private static async Task WriteOfflineKitMetadataAsync( - string metadataPath, - OfflineKitBundleDescriptor descriptor, - string bundlePath, - string manifestPath, - string? bundleSignaturePath, - string? manifestSignaturePath, - CancellationToken cancellationToken) - { - var document = new OfflineKitMetadataDocument - { - BundleId = descriptor.BundleId, - BundleName = descriptor.BundleName, - BundleSha256 = descriptor.BundleSha256, - BundleSize = descriptor.BundleSize, - BundlePath = Path.GetFullPath(bundlePath), - CapturedAt = descriptor.CapturedAt, - DownloadedAt = DateTimeOffset.UtcNow, - Channel = descriptor.Channel, - Kind = descriptor.Kind, - IsDelta = descriptor.IsDelta, - BaseBundleId = descriptor.BaseBundleId, - ManifestName = descriptor.ManifestName, - ManifestSha256 = descriptor.ManifestSha256, - ManifestSize = descriptor.ManifestSize, - ManifestPath = Path.GetFullPath(manifestPath), - BundleSignaturePath = bundleSignaturePath is null ? null : Path.GetFullPath(bundleSignaturePath), - ManifestSignaturePath = manifestSignaturePath is null ? null : Path.GetFullPath(manifestSignaturePath) - }; - - var options = new JsonSerializerOptions(SerializerOptions) - { - WriteIndented = true - }; - - var payload = JsonSerializer.Serialize(document, options); - await File.WriteAllTextAsync(metadataPath, payload, cancellationToken).ConfigureAwait(false); - } - - private static IReadOnlyList MapOfflineComponents(List? transports) - { - if (transports is null || transports.Count == 0) - { - return Array.Empty(); - } - - var list = new List(); - foreach (var transport in transports) - { - if (transport is null || string.IsNullOrWhiteSpace(transport.Name)) - { - continue; - } - - list.Add(new OfflineKitComponentStatus( - transport.Name.Trim(), - NormalizeOptionalString(transport.Version), - NormalizeSha(transport.Digest), - transport.CapturedAt?.ToUniversalTime(), - transport.SizeBytes)); - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static string? NormalizeSha(string? digest) - { - if (string.IsNullOrWhiteSpace(digest)) - { - return null; - } - - var value = digest.Trim(); - if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - value = value.Substring("sha256:".Length); - } - - return value.ToLowerInvariant(); - } - - private sealed class OfflineKitImportMetadataPayload - { - public string? BundleId { get; set; } - - public string BundleSha256 { get; set; } = string.Empty; - - public long BundleSize { get; set; } - - public DateTimeOffset? CapturedAt { get; set; } - - public string? Channel { get; set; } - - public string? Kind { get; set; } - - public bool? IsDelta { get; set; } - - public string? BaseBundleId { get; set; } - - public string? ManifestSha256 { get; set; } - - public long? ManifestSize { get; set; } - } - - private static List NormalizeImages(IReadOnlyList images) - { - var normalized = new List(); - if (images is null) - { - return normalized; - } - - var seen = new HashSet(StringComparer.Ordinal); - foreach (var entry in images) - { - if (string.IsNullOrWhiteSpace(entry)) - { - continue; - } - - var trimmed = entry.Trim(); - if (seen.Add(trimmed)) - { - normalized.Add(trimmed); - } - } - - return normalized; - } - - private static IReadOnlyList ExtractReasons(List? reasons) - { - if (reasons is null || reasons.Count == 0) - { - return Array.Empty(); - } - - var list = new List(); - foreach (var reason in reasons) - { - if (!string.IsNullOrWhiteSpace(reason)) - { - list.Add(reason.Trim()); - } - } - - return list.Count == 0 ? Array.Empty() : list; - } - - private static IReadOnlyDictionary ExtractExtensionMetadata(Dictionary? extensionData) - { - if (extensionData is null || extensionData.Count == 0) - { - return EmptyMetadata; - } - - var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase); - foreach (var kvp in extensionData) - { - var value = ConvertJsonElementToObject(kvp.Value); - if (value is not null) - { - metadata[kvp.Key] = value; - } - } - - if (metadata.Count == 0) - { - return EmptyMetadata; - } - - return new ReadOnlyDictionary(metadata); - } - - private static object? ConvertJsonElementToObject(JsonElement element) - { - return element.ValueKind switch - { - JsonValueKind.String => element.GetString(), - JsonValueKind.True => true, - JsonValueKind.False => false, - JsonValueKind.Number when element.TryGetInt64(out var integer) => integer, - JsonValueKind.Number when element.TryGetDouble(out var @double) => @double, - JsonValueKind.Null or JsonValueKind.Undefined => null, - _ => element.GetRawText() - }; - } - - private static string? NormalizeOptionalString(string? value) - { - return string.IsNullOrWhiteSpace(value) ? null : value.Trim(); - } - - private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri) - { - if (!Uri.TryCreate(relativeUri, UriKind.RelativeOrAbsolute, out var requestUri)) - { - throw new InvalidOperationException($"Invalid request URI '{relativeUri}'."); - } - - if (requestUri.IsAbsoluteUri) - { - // Nothing to normalize. - } - else - { - requestUri = new Uri(relativeUri.TrimStart('/'), UriKind.Relative); - } - - return new HttpRequestMessage(method, requestUri); - } - - private async Task AuthorizeRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - var token = await ResolveAccessTokenAsync(cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(token)) - { - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); - } - } - - private IReadOnlyDictionary? ResolveOperatorMetadataIfNeeded(string? scope) - { - if (string.IsNullOrWhiteSpace(scope) || !scope.Contains("orch:operate", StringComparison.OrdinalIgnoreCase)) - { - return null; - } - - var reason = _options.Authority.OperatorReason?.Trim(); - var ticket = _options.Authority.OperatorTicket?.Trim(); - - if (string.IsNullOrWhiteSpace(reason) || string.IsNullOrWhiteSpace(ticket)) - { - throw new InvalidOperationException("Authority.OperatorReason and Authority.OperatorTicket must be configured when requesting orch:operate tokens. Set STELLAOPS_ORCH_REASON and STELLAOPS_ORCH_TICKET or the corresponding configuration values."); - } - - return new Dictionary(StringComparer.Ordinal) - { - [OperatorReasonParameterName] = reason, - [OperatorTicketParameterName] = ticket - }; - } - - private async Task ResolveAccessTokenAsync(CancellationToken cancellationToken) - { - if (!string.IsNullOrWhiteSpace(_options.ApiKey)) - { - return _options.ApiKey; - } - - if (_tokenClient is null || string.IsNullOrWhiteSpace(_options.Authority.Url)) - { - return null; - } - - var now = DateTimeOffset.UtcNow; - - lock (_tokenSync) - { - if (!string.IsNullOrEmpty(_cachedAccessToken) && now < _cachedAccessTokenExpiresAt - TokenRefreshSkew) - { - return _cachedAccessToken; - } - } - - var cacheKey = AuthorityTokenUtilities.BuildCacheKey(_options); - var cachedEntry = await _tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (cachedEntry is not null && now < cachedEntry.ExpiresAtUtc - TokenRefreshSkew) - { - lock (_tokenSync) - { - _cachedAccessToken = cachedEntry.AccessToken; - _cachedAccessTokenExpiresAt = cachedEntry.ExpiresAtUtc; - return _cachedAccessToken; - } - } - - var scope = AuthorityTokenUtilities.ResolveScope(_options); - var operatorMetadata = ResolveOperatorMetadataIfNeeded(scope); - - StellaOpsTokenResult token; - if (!string.IsNullOrWhiteSpace(_options.Authority.Username)) - { - if (string.IsNullOrWhiteSpace(_options.Authority.Password)) - { - throw new InvalidOperationException("Authority password must be configured when username is provided."); - } - - token = await _tokenClient.RequestPasswordTokenAsync( - _options.Authority.Username, - _options.Authority.Password!, - scope, - null, - cancellationToken).ConfigureAwait(false); - } - else - { - token = await _tokenClient.RequestClientCredentialsTokenAsync(scope, operatorMetadata, cancellationToken).ConfigureAwait(false); - } - - await _tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); - - lock (_tokenSync) - { - _cachedAccessToken = token.AccessToken; - _cachedAccessTokenExpiresAt = token.ExpiresAtUtc; - return _cachedAccessToken; - } - } - - private async Task<(string Message, JsonElement? Payload)> ExtractExcititorResponseAsync(HttpResponseMessage response, CancellationToken cancellationToken) - { - if (response.Content is null || response.Content.Headers.ContentLength is 0) - { - return ($"HTTP {(int)response.StatusCode}", null); - } - - try - { - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - if (stream is null || stream.Length == 0) - { - return ($"HTTP {(int)response.StatusCode}", null); - } - - using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); - var root = document.RootElement.Clone(); - string? message = null; - if (root.ValueKind == JsonValueKind.Object) - { - message = GetStringProperty(root, "message") ?? GetStringProperty(root, "status"); - } - - if (string.IsNullOrWhiteSpace(message)) - { - message = root.ValueKind == JsonValueKind.Object || root.ValueKind == JsonValueKind.Array - ? root.ToString() - : root.GetRawText(); - } - - return (message ?? $"HTTP {(int)response.StatusCode}", root); - } - catch (JsonException) - { - var text = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - return (string.IsNullOrWhiteSpace(text) ? $"HTTP {(int)response.StatusCode}" : text.Trim(), null); - } - } - - private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) - { - if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) - { - return true; - } - - if (element.ValueKind == JsonValueKind.Object) - { - foreach (var candidate in element.EnumerateObject()) - { - if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) - { - property = candidate.Value; - return true; - } - } - } - - property = default; - return false; - } - - private static string? GetStringProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - if (property.ValueKind == JsonValueKind.String) - { - return property.GetString(); - } - } - - return null; - } - - private static bool GetBooleanProperty(JsonElement element, string propertyName, bool defaultValue) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) - { - return property.ValueKind switch - { - JsonValueKind.True => true, - JsonValueKind.False => false, - JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, - _ => defaultValue - }; - } - - return defaultValue; - } - - private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) - { - if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) && property.ValueKind == JsonValueKind.String) - { - if (DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed)) - { - return parsed.ToUniversalTime(); - } - } - - return null; - } - - private static JsonElement SerializeEnvironmentValue(object? value) - { - if (value is JsonElement element) - { - return element; - } - - return JsonSerializer.SerializeToElement(value, SerializerOptions); - } - - private static string? ExtractProblemErrorCode(ProblemDocument? problem) - { - if (problem?.Extensions is null || problem.Extensions.Count == 0) - { - return null; - } - - if (problem.Extensions.TryGetValue("code", out var value)) - { - switch (value) - { - case string code when !string.IsNullOrWhiteSpace(code): - return code; - case JsonElement element when element.ValueKind == JsonValueKind.String: - var text = element.GetString(); - return string.IsNullOrWhiteSpace(text) ? null : text; - } - } - - return null; - } - - private static string BuildPolicyFindingsQueryString(PolicyFindingsQuery query) - { - var parameters = new List(); - - if (query.SbomIds is not null) - { - foreach (var sbom in query.SbomIds) - { - if (!string.IsNullOrWhiteSpace(sbom)) - { - parameters.Add($"sbomId={Uri.EscapeDataString(sbom)}"); - } - } - } - - if (query.Statuses is not null && query.Statuses.Count > 0) - { - var joined = string.Join(",", query.Statuses.Where(s => !string.IsNullOrWhiteSpace(s))); - if (!string.IsNullOrWhiteSpace(joined)) - { - parameters.Add($"status={Uri.EscapeDataString(joined)}"); - } - } - - if (query.Severities is not null && query.Severities.Count > 0) - { - var joined = string.Join(",", query.Severities.Where(s => !string.IsNullOrWhiteSpace(s))); - if (!string.IsNullOrWhiteSpace(joined)) - { - parameters.Add($"severity={Uri.EscapeDataString(joined)}"); - } - } - - if (!string.IsNullOrWhiteSpace(query.Cursor)) - { - parameters.Add($"cursor={Uri.EscapeDataString(query.Cursor)}"); - } - - if (query.Page.HasValue) - { - parameters.Add($"page={query.Page.Value}"); - } - - if (query.PageSize.HasValue) - { - parameters.Add($"pageSize={query.PageSize.Value}"); - } - - if (query.Since.HasValue) - { - var value = query.Since.Value.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture); - parameters.Add($"since={Uri.EscapeDataString(value)}"); - } - - if (parameters.Count == 0) - { - return string.Empty; - } - - return "?" + string.Join("&", parameters); - } - - private static PolicyFindingsPage MapPolicyFindings(PolicyFindingsResponseDocument document) - { - var items = document.Items is null - ? new List(capacity: 0) - : document.Items - .Where(item => item is not null) - .Select(item => MapPolicyFinding(item!)) - .ToList(); - - var nextCursor = string.IsNullOrWhiteSpace(document.NextCursor) ? null : document.NextCursor; - var view = new ReadOnlyCollection(items); - return new PolicyFindingsPage(view, nextCursor, document.TotalCount); - } - - private static PolicyFindingDocument MapPolicyFinding(PolicyFindingDocumentDocument document) - { - var findingId = document.FindingId; - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new InvalidOperationException("Policy finding response missing findingId."); - } - - var status = string.IsNullOrWhiteSpace(document.Status) ? "unknown" : document.Status!; - var severityNormalized = document.Severity?.Normalized; - if (string.IsNullOrWhiteSpace(severityNormalized)) - { - severityNormalized = "unknown"; - } - - var severity = new PolicyFindingSeverity(severityNormalized!, document.Severity?.Score); - - var sbomId = string.IsNullOrWhiteSpace(document.SbomId) ? "(unknown)" : document.SbomId!; - - IReadOnlyList advisoryIds; - if (document.AdvisoryIds is null || document.AdvisoryIds.Count == 0) - { - advisoryIds = Array.Empty(); - } - else - { - advisoryIds = document.AdvisoryIds - .Where(id => !string.IsNullOrWhiteSpace(id)) - .ToArray(); - } - - PolicyFindingVexMetadata? vex = null; - if (document.Vex is not null) - { - if (!string.IsNullOrWhiteSpace(document.Vex.WinningStatementId) - || !string.IsNullOrWhiteSpace(document.Vex.Source) - || !string.IsNullOrWhiteSpace(document.Vex.Status)) - { - vex = new PolicyFindingVexMetadata( - string.IsNullOrWhiteSpace(document.Vex.WinningStatementId) ? null : document.Vex.WinningStatementId, - string.IsNullOrWhiteSpace(document.Vex.Source) ? null : document.Vex.Source, - string.IsNullOrWhiteSpace(document.Vex.Status) ? null : document.Vex.Status); - } - } - - var updatedAt = document.UpdatedAt ?? DateTimeOffset.MinValue; - - return new PolicyFindingDocument( - findingId, - status, - severity, - sbomId, - advisoryIds, - vex, - document.PolicyVersion ?? 0, - updatedAt, - string.IsNullOrWhiteSpace(document.RunId) ? null : document.RunId); - } - - private static PolicyFindingExplainResult MapPolicyFindingExplain(PolicyFindingExplainResponseDocument document) - { - var findingId = document.FindingId; - if (string.IsNullOrWhiteSpace(findingId)) - { - throw new InvalidOperationException("Policy finding explain response missing findingId."); - } - - var steps = document.Steps is null - ? new List(capacity: 0) - : document.Steps - .Where(step => step is not null) - .Select(step => MapPolicyFindingExplainStep(step!)) - .ToList(); - - var hints = document.SealedHints is null - ? new List(capacity: 0) - : document.SealedHints - .Where(hint => hint is not null && !string.IsNullOrWhiteSpace(hint!.Message)) - .Select(hint => new PolicyFindingExplainHint(hint!.Message!.Trim())) - .ToList(); - - return new PolicyFindingExplainResult( - findingId, - document.PolicyVersion ?? 0, - new ReadOnlyCollection(steps), - new ReadOnlyCollection(hints)); - } - - private static PolicyFindingExplainStep MapPolicyFindingExplainStep(PolicyFindingExplainStepDocument document) - { - var rule = string.IsNullOrWhiteSpace(document.Rule) ? "(unknown)" : document.Rule!; - var status = string.IsNullOrWhiteSpace(document.Status) ? null : document.Status; - var action = string.IsNullOrWhiteSpace(document.Action) ? null : document.Action; - - IReadOnlyDictionary inputs = document.Inputs is null - ? new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)) - : new ReadOnlyDictionary(document.Inputs - .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key)) - .ToDictionary( - kvp => kvp.Key, - kvp => ConvertJsonElementToString(kvp.Value), - StringComparer.Ordinal)); - - IReadOnlyDictionary? evidence = null; - if (document.Evidence is not null && document.Evidence.Count > 0) - { - var evidenceDict = document.Evidence - .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key)) - .ToDictionary( - kvp => kvp.Key, - kvp => ConvertJsonElementToString(kvp.Value), - StringComparer.Ordinal); - - evidence = new ReadOnlyDictionary(evidenceDict); - } - - return new PolicyFindingExplainStep( - rule, - status, - action, - document.Score, - inputs, - evidence); - } - - private static string ConvertJsonElementToString(JsonElement element) - { - return element.ValueKind switch - { - JsonValueKind.String => element.GetString() ?? string.Empty, - JsonValueKind.Number => element.TryGetInt64(out var longValue) - ? longValue.ToString(CultureInfo.InvariantCulture) - : element.GetDouble().ToString(CultureInfo.InvariantCulture), - JsonValueKind.True => "true", - JsonValueKind.False => "false", - JsonValueKind.Null => "null", - JsonValueKind.Array => string.Join(", ", element.EnumerateArray().Select(ConvertJsonElementToString)), - JsonValueKind.Object => element.GetRawText(), - _ => element.GetRawText() - }; - } - - private static PolicyActivationResult MapPolicyActivation(PolicyActivationResponseDocument document) - { - if (document.Revision is null) - { - throw new InvalidOperationException("Policy activation response missing revision data."); - } - - var revisionDocument = document.Revision; - if (string.IsNullOrWhiteSpace(revisionDocument.PackId)) - { - throw new InvalidOperationException("Policy activation revision missing policy identifier."); - } - - if (!revisionDocument.Version.HasValue) - { - throw new InvalidOperationException("Policy activation revision missing version number."); - } - - var approvals = new List(); - if (revisionDocument.Approvals is not null) - { - foreach (var approval in revisionDocument.Approvals) - { - if (approval is null || string.IsNullOrWhiteSpace(approval.ActorId) || !approval.ApprovedAt.HasValue) - { - continue; - } - - approvals.Add(new PolicyActivationApproval( - approval.ActorId, - approval.ApprovedAt.Value.ToUniversalTime(), - NormalizeOptionalString(approval.Comment))); - } - } - - var revision = new PolicyActivationRevision( - revisionDocument.PackId, - revisionDocument.Version.Value, - NormalizeOptionalString(revisionDocument.Status) ?? "unknown", - revisionDocument.RequiresTwoPersonApproval ?? false, - revisionDocument.CreatedAt?.ToUniversalTime() ?? DateTimeOffset.MinValue, - revisionDocument.ActivatedAt?.ToUniversalTime(), - new ReadOnlyCollection(approvals)); - - return new PolicyActivationResult( - NormalizeOptionalString(document.Status) ?? "unknown", - revision); - } - - private static PolicySimulationResult MapPolicySimulation(PolicySimulationResponseDocument document) - { - var diffDocument = document.Diff ?? throw new InvalidOperationException("Policy simulation response missing diff summary."); - - var severity = diffDocument.BySeverity is null - ? new Dictionary(0, StringComparer.Ordinal) - : diffDocument.BySeverity - .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key) && kvp.Value is not null) - .ToDictionary( - kvp => kvp.Key, - kvp => new PolicySimulationSeverityDelta(kvp.Value!.Up, kvp.Value.Down), - StringComparer.Ordinal); - - var severityView = new ReadOnlyDictionary(severity); - - var ruleHits = diffDocument.RuleHits is null - ? new List() - : diffDocument.RuleHits - .Where(hit => hit is not null) - .Select(hit => new PolicySimulationRuleDelta( - hit!.RuleId ?? string.Empty, - hit.RuleName ?? string.Empty, - hit.Up, - hit.Down)) - .ToList(); - - var ruleHitsView = ruleHits.AsReadOnly(); - - var diff = new PolicySimulationDiff( - string.IsNullOrWhiteSpace(diffDocument.SchemaVersion) ? null : diffDocument.SchemaVersion, - diffDocument.Added ?? 0, - diffDocument.Removed ?? 0, - diffDocument.Unchanged ?? 0, - severityView, - ruleHitsView); - - return new PolicySimulationResult( - diff, - string.IsNullOrWhiteSpace(document.ExplainUri) ? null : document.ExplainUri); - } - - private void EnsureBackendConfigured() - { - if (_httpClient.BaseAddress is null) - { - throw new InvalidOperationException("Backend URL is not configured. Provide STELLAOPS_BACKEND_URL or configure appsettings."); - } - } - - private string ResolveArtifactPath(string outputPath, string channel) - { - if (!string.IsNullOrWhiteSpace(outputPath)) - { - return Path.GetFullPath(outputPath); - } - - var directory = string.IsNullOrWhiteSpace(_options.ScannerCacheDirectory) - ? Directory.GetCurrentDirectory() - : Path.GetFullPath(_options.ScannerCacheDirectory); - - Directory.CreateDirectory(directory); - var fileName = $"stellaops-scanner-{channel}.tar.gz"; - return Path.Combine(directory, fileName); - } - - private async Task CreateFailureMessageAsync(HttpResponseMessage response, CancellationToken cancellationToken) - { - var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); - return message; - } - - private async Task<(string Message, ProblemDocument? Problem)> CreateFailureDetailsAsync(HttpResponseMessage response, CancellationToken cancellationToken) - { - var statusCode = (int)response.StatusCode; - var builder = new StringBuilder(); - builder.Append("Backend request failed with status "); - builder.Append(statusCode); - builder.Append(' '); - builder.Append(response.ReasonPhrase ?? "Unknown"); - - ProblemDocument? problem = null; - - if (response.Content is not null && response.Content.Headers.ContentLength is > 0) - { - string? raw = null; - try - { - raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(raw)) - { - problem = JsonSerializer.Deserialize(raw, SerializerOptions); - } - } - catch (JsonException) - { - problem = null; - } - - if (problem is not null) - { - if (!string.IsNullOrWhiteSpace(problem.Title)) - { - builder.AppendLine().Append(problem.Title); - } - - if (!string.IsNullOrWhiteSpace(problem.Detail)) - { - builder.AppendLine().Append(problem.Detail); - } - } - else if (!string.IsNullOrWhiteSpace(raw)) - { - builder.AppendLine().Append(raw); - } - } - - return (builder.ToString(), problem); - } - - private static string? ExtractHeaderValue(HttpResponseHeaders headers, string name) - { - if (headers.TryGetValues(name, out var values)) - { - return values.FirstOrDefault(); - } - - return null; - } - - private static string? NormalizeExpectedDigest(string? digest) - { - if (string.IsNullOrWhiteSpace(digest)) - { - return null; - } - - var trimmed = digest.Trim(); - return trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) - ? trimmed[7..] - : trimmed; - } - - private async Task ValidateDigestAsync(string filePath, string? expectedDigest, CancellationToken cancellationToken) - { - string digestHex; - await using (var stream = File.OpenRead(filePath)) - { - var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); - digestHex = Convert.ToHexString(hash).ToLowerInvariant(); - } - - if (!string.IsNullOrWhiteSpace(expectedDigest)) - { - var normalized = NormalizeDigest(expectedDigest); - if (!normalized.Equals(digestHex, StringComparison.OrdinalIgnoreCase)) - { - File.Delete(filePath); - throw new InvalidOperationException($"Scanner digest mismatch. Expected sha256:{normalized}, calculated sha256:{digestHex}."); - } - } - else - { - _logger.LogWarning("Scanner download missing X-StellaOps-Digest header; relying on computed digest only."); - } - - return digestHex; - } - - private static string NormalizeDigest(string digest) - { - if (digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - return digest[7..]; - } - - return digest; - } - - private static async Task ComputeSha256Async(string filePath, CancellationToken cancellationToken) - { - await using var stream = File.OpenRead(filePath); - var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - private async Task ValidateSignatureAsync(string? signatureHeader, string digestHex, bool verbose, CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(_options.ScannerSignaturePublicKeyPath)) - { - if (!string.IsNullOrWhiteSpace(signatureHeader)) - { - _logger.LogDebug("Signature header present but no public key configured; skipping validation."); - } - return; - } - - if (string.IsNullOrWhiteSpace(signatureHeader)) - { - throw new InvalidOperationException("Scanner signature missing while a public key is configured."); - } - - var publicKeyPath = Path.GetFullPath(_options.ScannerSignaturePublicKeyPath); - if (!File.Exists(publicKeyPath)) - { - throw new FileNotFoundException("Scanner signature public key not found.", publicKeyPath); - } - - var signatureBytes = Convert.FromBase64String(signatureHeader); - var digestBytes = Convert.FromHexString(digestHex); - - var pem = await File.ReadAllTextAsync(publicKeyPath, cancellationToken).ConfigureAwait(false); - using var rsa = RSA.Create(); - rsa.ImportFromPem(pem); - - var valid = rsa.VerifyHash(digestBytes, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - if (!valid) - { - throw new InvalidOperationException("Scanner signature validation failed."); - } - - if (verbose) - { - _logger.LogDebug("Scanner signature validated using key {KeyPath}.", publicKeyPath); - } - } - - private void PersistMetadata(string outputPath, string channel, string digestHex, string? signatureHeader, HttpResponseMessage response) - { - var metadata = new - { - channel, - digest = $"sha256:{digestHex}", - signature = signatureHeader, - downloadedAt = DateTimeOffset.UtcNow, - source = response.RequestMessage?.RequestUri?.ToString(), - sizeBytes = new FileInfo(outputPath).Length, - headers = new - { - etag = response.Headers.ETag?.Tag, - lastModified = response.Content.Headers.LastModified, - contentType = response.Content.Headers.ContentType?.ToString() - } - }; - - var metadataPath = outputPath + ".metadata.json"; - var json = JsonSerializer.Serialize(metadata, new JsonSerializerOptions - { - WriteIndented = true - }); - - File.WriteAllText(metadataPath, json); - } - - private static TimeSpan GetRetryDelay(HttpResponseMessage response, int attempt) - { - if (response.Headers.TryGetValues("Retry-After", out var retryValues)) - { - var value = retryValues.FirstOrDefault(); - if (!string.IsNullOrWhiteSpace(value)) - { - if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds) && seconds >= 0) - { - return TimeSpan.FromSeconds(Math.Min(seconds, 300)); - } - - if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var when)) - { - var delta = when - DateTimeOffset.UtcNow; - if (delta > TimeSpan.Zero) - { - return delta < TimeSpan.FromMinutes(5) ? delta : TimeSpan.FromMinutes(5); - } - } - } - } - - var fallbackSeconds = Math.Min(60, Math.Pow(2, attempt)); - return TimeSpan.FromSeconds(fallbackSeconds); - } -} +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Linq; +using System.Net.Http.Headers; +using System.Net.Http.Json; +using System.Globalization; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.Client; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Services.Models; +using StellaOps.Cli.Services.Models.Transport; + +namespace StellaOps.Cli.Services; + +internal sealed class BackendOperationsClient : IBackendOperationsClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + private static readonly TimeSpan TokenRefreshSkew = TimeSpan.FromSeconds(30); + private static readonly IReadOnlyDictionary EmptyMetadata = + new ReadOnlyDictionary(new Dictionary(0, StringComparer.OrdinalIgnoreCase)); + + private const string OperatorReasonParameterName = "operator_reason"; + private const string OperatorTicketParameterName = "operator_ticket"; + + private readonly HttpClient _httpClient; + private readonly StellaOpsCliOptions _options; + private readonly ILogger _logger; + private readonly IStellaOpsTokenClient? _tokenClient; + private readonly object _tokenSync = new(); + private string? _cachedAccessToken; + private DateTimeOffset _cachedAccessTokenExpiresAt = DateTimeOffset.MinValue; + + public BackendOperationsClient(HttpClient httpClient, StellaOpsCliOptions options, ILogger logger, IStellaOpsTokenClient? tokenClient = null) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _tokenClient = tokenClient; + + if (!string.IsNullOrWhiteSpace(_options.BackendUrl) && httpClient.BaseAddress is null) + { + if (Uri.TryCreate(_options.BackendUrl, UriKind.Absolute, out var baseUri)) + { + httpClient.BaseAddress = baseUri; + } + } + } + + public async Task DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + channel = string.IsNullOrWhiteSpace(channel) ? "stable" : channel.Trim(); + outputPath = ResolveArtifactPath(outputPath, channel); + Directory.CreateDirectory(Path.GetDirectoryName(outputPath)!); + + if (!overwrite && File.Exists(outputPath)) + { + var existing = new FileInfo(outputPath); + _logger.LogInformation("Scanner artifact already cached at {Path} ({Size} bytes).", outputPath, existing.Length); + return new ScannerArtifactResult(outputPath, existing.Length, true); + } + + var attempt = 0; + var maxAttempts = Math.Max(1, _options.ScannerDownloadAttempts); + + while (true) + { + attempt++; + try + { + using var request = CreateRequest(HttpMethod.Get, $"api/scanner/artifacts/{channel}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + return await ProcessScannerResponseAsync(response, outputPath, channel, verbose, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (attempt < maxAttempts) + { + var backoffSeconds = Math.Pow(2, attempt); + _logger.LogWarning(ex, "Scanner download attempt {Attempt}/{MaxAttempts} failed. Retrying in {Delay:F0}s...", attempt, maxAttempts, backoffSeconds); + await Task.Delay(TimeSpan.FromSeconds(backoffSeconds), cancellationToken).ConfigureAwait(false); + } + } + } + + private async Task ProcessScannerResponseAsync(HttpResponseMessage response, string outputPath, string channel, bool verbose, CancellationToken cancellationToken) + { + var tempFile = outputPath + ".tmp"; + await using (var payloadStream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false)) + await using (var fileStream = File.Create(tempFile)) + { + await payloadStream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + } + + var expectedDigest = ExtractHeaderValue(response.Headers, "X-StellaOps-Digest"); + var signatureHeader = ExtractHeaderValue(response.Headers, "X-StellaOps-Signature"); + + var digestHex = await ValidateDigestAsync(tempFile, expectedDigest, cancellationToken).ConfigureAwait(false); + await ValidateSignatureAsync(signatureHeader, digestHex, verbose, cancellationToken).ConfigureAwait(false); + + if (verbose) + { + var signatureNote = string.IsNullOrWhiteSpace(signatureHeader) ? "no signature" : "signature validated"; + _logger.LogDebug("Scanner digest sha256:{Digest} ({SignatureNote}).", digestHex, signatureNote); + } + + if (File.Exists(outputPath)) + { + File.Delete(outputPath); + } + + File.Move(tempFile, outputPath); + + PersistMetadata(outputPath, channel, digestHex, signatureHeader, response); + + var downloaded = new FileInfo(outputPath); + _logger.LogInformation("Scanner downloaded to {Path} ({Size} bytes).", outputPath, downloaded.Length); + + return new ScannerArtifactResult(outputPath, downloaded.Length, false); + } + + public async Task UploadScanResultsAsync(string filePath, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (!File.Exists(filePath)) + { + throw new FileNotFoundException("Scan result file not found.", filePath); + } + + var maxAttempts = Math.Max(1, _options.ScanUploadAttempts); + var attempt = 0; + + while (true) + { + attempt++; + try + { + using var content = new MultipartFormDataContent(); + await using var fileStream = File.OpenRead(filePath); + var streamContent = new StreamContent(fileStream); + streamContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + content.Add(streamContent, "file", Path.GetFileName(filePath)); + + using var request = CreateRequest(HttpMethod.Post, "api/scanner/results"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = content; + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (response.IsSuccessStatusCode) + { + _logger.LogInformation("Scan results uploaded from {Path}.", filePath); + return; + } + + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + if (attempt >= maxAttempts) + { + throw new InvalidOperationException(failure); + } + + var delay = GetRetryDelay(response, attempt); + _logger.LogWarning( + "Scan upload attempt {Attempt}/{MaxAttempts} failed ({Reason}). Retrying in {Delay:F1}s...", + attempt, + maxAttempts, + failure, + delay.TotalSeconds); + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (attempt < maxAttempts) + { + var delay = TimeSpan.FromSeconds(Math.Pow(2, attempt)); + _logger.LogWarning( + ex, + "Scan upload attempt {Attempt}/{MaxAttempts} threw an exception. Retrying in {Delay:F1}s...", + attempt, + maxAttempts, + delay.TotalSeconds); + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + } + } + + public async Task TriggerJobAsync(string jobKind, IDictionary parameters, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(jobKind)) + { + throw new ArgumentException("Job kind must be provided.", nameof(jobKind)); + } + + var requestBody = new JobTriggerRequest + { + Trigger = "cli", + Parameters = parameters is null ? new Dictionary(StringComparer.Ordinal) : new Dictionary(parameters, StringComparer.Ordinal) + }; + + var request = CreateRequest(HttpMethod.Post, $"jobs/{jobKind}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = JsonContent.Create(requestBody, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (response.StatusCode == HttpStatusCode.Accepted) + { + JobRunResponse? run = null; + if (response.Content.Headers.ContentLength is > 0) + { + try + { + run = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + _logger.LogWarning(ex, "Failed to deserialize job run response for job kind {Kind}.", jobKind); + } + } + + var location = response.Headers.Location?.ToString(); + return new JobTriggerResult(true, "Accepted", location, run); + } + + var failureMessage = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + return new JobTriggerResult(false, failureMessage, null, null); + } + + public async Task ExecuteExcititorOperationAsync(string route, HttpMethod method, object? payload, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(route)) + { + throw new ArgumentException("Route must be provided.", nameof(route)); + } + + var relative = route.TrimStart('/'); + using var request = CreateRequest(method, $"excititor/{relative}"); + + if (payload is not null && method != HttpMethod.Get && method != HttpMethod.Delete) + { + request.Content = JsonContent.Create(payload, options: SerializerOptions); + } + + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (response.IsSuccessStatusCode) + { + var (message, payloadElement) = await ExtractExcititorResponseAsync(response, cancellationToken).ConfigureAwait(false); + var location = response.Headers.Location?.ToString(); + return new ExcititorOperationResult(true, message, location, payloadElement); + } + + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + return new ExcititorOperationResult(false, failure, null, null); + } + + public async Task DownloadExcititorExportAsync(string exportId, string destinationPath, string? expectedDigestAlgorithm, string? expectedDigest, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(exportId)) + { + throw new ArgumentException("Export id must be provided.", nameof(exportId)); + } + + if (string.IsNullOrWhiteSpace(destinationPath)) + { + throw new ArgumentException("Destination path must be provided.", nameof(destinationPath)); + } + + var fullPath = Path.GetFullPath(destinationPath); + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(directory) && !Directory.Exists(directory)) + { + Directory.CreateDirectory(directory); + } + + var normalizedAlgorithm = string.IsNullOrWhiteSpace(expectedDigestAlgorithm) + ? null + : expectedDigestAlgorithm.Trim(); + var normalizedDigest = NormalizeExpectedDigest(expectedDigest); + + if (File.Exists(fullPath) + && string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase) + && !string.IsNullOrWhiteSpace(normalizedDigest)) + { + var existingDigest = await ComputeSha256Async(fullPath, cancellationToken).ConfigureAwait(false); + if (string.Equals(existingDigest, normalizedDigest, StringComparison.OrdinalIgnoreCase)) + { + var info = new FileInfo(fullPath); + _logger.LogDebug("Export {ExportId} already present at {Path}; digest matches.", exportId, fullPath); + return new ExcititorExportDownloadResult(fullPath, info.Length, true); + } + } + + var encodedId = Uri.EscapeDataString(exportId); + using var request = CreateRequest(HttpMethod.Get, $"excititor/export/{encodedId}/download"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + var tempPath = fullPath + ".tmp"; + if (File.Exists(tempPath)) + { + File.Delete(tempPath); + } + + using (var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false)) + { + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + await using (var fileStream = File.Create(tempPath)) + { + await stream.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + } + } + + if (!string.IsNullOrWhiteSpace(normalizedAlgorithm) && !string.IsNullOrWhiteSpace(normalizedDigest)) + { + if (string.Equals(normalizedAlgorithm, "sha256", StringComparison.OrdinalIgnoreCase)) + { + var computed = await ComputeSha256Async(tempPath, cancellationToken).ConfigureAwait(false); + if (!string.Equals(computed, normalizedDigest, StringComparison.OrdinalIgnoreCase)) + { + File.Delete(tempPath); + throw new InvalidOperationException($"Export digest mismatch. Expected sha256:{normalizedDigest}, computed sha256:{computed}."); + } + } + else + { + _logger.LogWarning("Export digest verification skipped. Unsupported algorithm {Algorithm}.", normalizedAlgorithm); + } + } + + if (File.Exists(fullPath)) + { + File.Delete(fullPath); + } + + File.Move(tempPath, fullPath); + + var downloaded = new FileInfo(fullPath); + return new ExcititorExportDownloadResult(fullPath, downloaded.Length, false); + } + + public async Task EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var images = NormalizeImages(request.Images); + if (images.Count == 0) + { + throw new ArgumentException("At least one image digest must be provided.", nameof(request)); + } + + var payload = new RuntimePolicyEvaluationRequestDocument + { + Namespace = string.IsNullOrWhiteSpace(request.Namespace) ? null : request.Namespace.Trim(), + Images = images + }; + + if (request.Labels.Count > 0) + { + payload.Labels = new Dictionary(StringComparer.Ordinal); + foreach (var label in request.Labels) + { + if (!string.IsNullOrWhiteSpace(label.Key)) + { + payload.Labels[label.Key] = label.Value ?? string.Empty; + } + } + } + + using var message = CreateRequest(HttpMethod.Post, "api/scanner/policy/runtime"); + await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false); + message.Content = JsonContent.Create(payload, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + RuntimePolicyEvaluationResponseDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = response.Content is null ? string.Empty : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse runtime policy response. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Runtime policy response was empty."); + } + + var decisions = new Dictionary(StringComparer.Ordinal); + if (document.Results is not null) + { + foreach (var kvp in document.Results) + { + var image = kvp.Key; + var decision = kvp.Value; + if (string.IsNullOrWhiteSpace(image) || decision is null) + { + continue; + } + + var verdict = string.IsNullOrWhiteSpace(decision.PolicyVerdict) + ? "unknown" + : decision.PolicyVerdict!.Trim(); + + var reasons = ExtractReasons(decision.Reasons); + var metadata = ExtractExtensionMetadata(decision.ExtensionData); + + var hasSbom = decision.HasSbomReferrers ?? decision.HasSbomLegacy; + + RuntimePolicyRekorReference? rekor = null; + if (decision.Rekor is not null && + (!string.IsNullOrWhiteSpace(decision.Rekor.Uuid) || + !string.IsNullOrWhiteSpace(decision.Rekor.Url) || + decision.Rekor.Verified.HasValue)) + { + rekor = new RuntimePolicyRekorReference( + NormalizeOptionalString(decision.Rekor.Uuid), + NormalizeOptionalString(decision.Rekor.Url), + decision.Rekor.Verified); + } + + decisions[image] = new RuntimePolicyImageDecision( + verdict, + decision.Signed, + hasSbom, + reasons, + rekor, + metadata); + } + } + + var decisionsView = new ReadOnlyDictionary(decisions); + + return new RuntimePolicyEvaluationResult( + document.TtlSeconds ?? 0, + document.ExpiresAtUtc?.ToUniversalTime(), + string.IsNullOrWhiteSpace(document.PolicyRevision) ? null : document.PolicyRevision, + decisionsView); + } + + public async Task ActivatePolicyRevisionAsync(string policyId, int version, PolicyActivationRequest request, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (version <= 0) + { + throw new ArgumentOutOfRangeException(nameof(version), "Version must be greater than zero."); + } + + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var requestDocument = new PolicyActivationRequestDocument + { + Comment = NormalizeOptionalString(request.Comment), + RunNow = request.RunNow ? true : null, + ScheduledAt = request.ScheduledAt, + Priority = NormalizeOptionalString(request.Priority), + Rollback = request.Rollback ? true : null, + IncidentId = NormalizeOptionalString(request.IncidentId) + }; + + var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); + using var httpRequest = CreateRequest(HttpMethod.Post, $"api/policy/policies/{encodedPolicyId}/versions/{version}:activate"); + await AuthorizeRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false); + httpRequest.Content = JsonContent.Create(requestDocument, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + PolicyActivationResponseDocument? responseDocument; + try + { + responseDocument = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = response.Content is null ? string.Empty : await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy activation response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (responseDocument is null) + { + throw new InvalidOperationException("Policy activation response was empty."); + } + + if (string.IsNullOrWhiteSpace(responseDocument.Status)) + { + throw new InvalidOperationException("Policy activation response missing status."); + } + + if (responseDocument.Revision is null) + { + throw new InvalidOperationException("Policy activation response missing revision."); + } + + return MapPolicyActivation(responseDocument); + } + + public async Task SimulatePolicyAsync(string policyId, PolicySimulationInput input, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (input is null) + { + throw new ArgumentNullException(nameof(input)); + } + + var requestDocument = new PolicySimulationRequestDocument + { + BaseVersion = input.BaseVersion, + CandidateVersion = input.CandidateVersion, + Explain = input.Explain ? true : null + }; + + if (input.SbomSet.Count > 0) + { + requestDocument.SbomSet = input.SbomSet; + } + + if (input.Environment.Count > 0) + { + var environment = new Dictionary(StringComparer.Ordinal); + foreach (var pair in input.Environment) + { + if (string.IsNullOrWhiteSpace(pair.Key)) + { + continue; + } + + environment[pair.Key] = SerializeEnvironmentValue(pair.Value); + } + + if (environment.Count > 0) + { + requestDocument.Env = environment; + } + } + + var encodedPolicyId = Uri.EscapeDataString(policyId); + using var request = CreateRequest(HttpMethod.Post, $"api/policy/policies/{encodedPolicyId}/simulate"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = JsonContent.Create(requestDocument, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + if (response.Content is null || response.Content.Headers.ContentLength is 0) + { + throw new InvalidOperationException("Policy simulation response was empty."); + } + + PolicySimulationResponseDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy simulation response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Policy simulation response was empty."); + } + + if (document.Diff is null) + { + throw new InvalidOperationException("Policy simulation response missing diff summary."); + } + + return MapPolicySimulation(document); + } + + public async Task GetPolicyFindingsAsync(PolicyFindingsQuery query, CancellationToken cancellationToken) + { + if (query is null) + { + throw new ArgumentNullException(nameof(query)); + } + + EnsureBackendConfigured(); + + var policyId = query.PolicyId; + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(query)); + } + + var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); + var relative = $"api/policy/findings/{encodedPolicyId}{BuildPolicyFindingsQueryString(query)}"; + + using var request = CreateRequest(HttpMethod.Get, relative); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + PolicyFindingsResponseDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy findings response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Policy findings response was empty."); + } + + return MapPolicyFindings(document); + } + + public async Task GetPolicyFindingAsync(string policyId, string findingId, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); + var encodedFindingId = Uri.EscapeDataString(findingId.Trim()); + using var request = CreateRequest(HttpMethod.Get, $"api/policy/findings/{encodedPolicyId}/{encodedFindingId}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + PolicyFindingDocumentDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy finding response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Policy finding response was empty."); + } + + return MapPolicyFinding(document); + } + + public async Task GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (string.IsNullOrWhiteSpace(policyId)) + { + throw new ArgumentException("Policy identifier must be provided.", nameof(policyId)); + } + + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new ArgumentException("Finding identifier must be provided.", nameof(findingId)); + } + + var encodedPolicyId = Uri.EscapeDataString(policyId.Trim()); + var encodedFindingId = Uri.EscapeDataString(findingId.Trim()); + var query = string.IsNullOrWhiteSpace(mode) ? string.Empty : $"?mode={Uri.EscapeDataString(mode.Trim())}"; + + using var request = CreateRequest(HttpMethod.Get, $"api/policy/findings/{encodedPolicyId}/{encodedFindingId}/explain{query}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var (message, problem) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + var errorCode = ExtractProblemErrorCode(problem); + throw new PolicyApiException(message, response.StatusCode, errorCode); + } + + PolicyFindingExplainResponseDocument? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse policy finding explain response: {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (document is null) + { + throw new InvalidOperationException("Policy finding explain response was empty."); + } + + return MapPolicyFindingExplain(document); + } + + public async Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + var query = includeDisabled ? "?includeDisabled=true" : string.Empty; + using var request = CreateRequest(HttpMethod.Get, $"excititor/providers{query}"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + if (response.Content is null || response.Content.Headers.ContentLength is 0) + { + return Array.Empty(); + } + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + if (stream is null || stream.Length == 0) + { + return Array.Empty(); + } + + using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + var root = document.RootElement; + if (root.ValueKind == JsonValueKind.Object && root.TryGetProperty("providers", out var providersProperty)) + { + root = providersProperty; + } + + if (root.ValueKind != JsonValueKind.Array) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var item in root.EnumerateArray()) + { + var id = GetStringProperty(item, "id") ?? string.Empty; + if (string.IsNullOrWhiteSpace(id)) + { + continue; + } + + var kind = GetStringProperty(item, "kind") ?? "unknown"; + var displayName = GetStringProperty(item, "displayName") ?? id; + var trustTier = GetStringProperty(item, "trustTier") ?? string.Empty; + var enabled = GetBooleanProperty(item, "enabled", defaultValue: true); + var lastIngested = GetDateTimeOffsetProperty(item, "lastIngestedAt"); + + list.Add(new ExcititorProviderSummary(id, kind, displayName, trustTier, enabled, lastIngested)); + } + + return list; + } + + public async Task DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + var rootDirectory = ResolveOfflineDirectory(destinationDirectory); + Directory.CreateDirectory(rootDirectory); + + var descriptor = await FetchOfflineKitDescriptorAsync(bundleId, cancellationToken).ConfigureAwait(false); + + var bundlePath = Path.Combine(rootDirectory, descriptor.BundleName); + var metadataPath = bundlePath + ".metadata.json"; + var manifestPath = Path.Combine(rootDirectory, descriptor.ManifestName); + var bundleSignaturePath = descriptor.BundleSignatureName is not null ? Path.Combine(rootDirectory, descriptor.BundleSignatureName) : null; + var manifestSignaturePath = descriptor.ManifestSignatureName is not null ? Path.Combine(rootDirectory, descriptor.ManifestSignatureName) : null; + + var fromCache = false; + if (!overwrite && File.Exists(bundlePath)) + { + var digest = await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false); + if (string.Equals(digest, descriptor.BundleSha256, StringComparison.OrdinalIgnoreCase)) + { + fromCache = true; + } + else if (resume) + { + var partial = bundlePath + ".partial"; + File.Move(bundlePath, partial, overwrite: true); + } + else + { + File.Delete(bundlePath); + } + } + + if (!fromCache) + { + await DownloadFileWithResumeAsync(descriptor.BundleDownloadUri, bundlePath, descriptor.BundleSha256, descriptor.BundleSize, resume, cancellationToken).ConfigureAwait(false); + } + + await DownloadFileWithResumeAsync(descriptor.ManifestDownloadUri, manifestPath, descriptor.ManifestSha256, descriptor.ManifestSize ?? 0, resume: false, cancellationToken).ConfigureAwait(false); + + if (descriptor.BundleSignatureDownloadUri is not null && bundleSignaturePath is not null) + { + await DownloadAuxiliaryFileAsync(descriptor.BundleSignatureDownloadUri, bundleSignaturePath, cancellationToken).ConfigureAwait(false); + } + + if (descriptor.ManifestSignatureDownloadUri is not null && manifestSignaturePath is not null) + { + await DownloadAuxiliaryFileAsync(descriptor.ManifestSignatureDownloadUri, manifestSignaturePath, cancellationToken).ConfigureAwait(false); + } + + await WriteOfflineKitMetadataAsync(metadataPath, descriptor, bundlePath, manifestPath, bundleSignaturePath, manifestSignaturePath, cancellationToken).ConfigureAwait(false); + + return new OfflineKitDownloadResult( + descriptor, + bundlePath, + manifestPath, + bundleSignaturePath, + manifestSignaturePath, + metadataPath, + fromCache); + } + + public async Task ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var bundlePath = Path.GetFullPath(request.BundlePath); + if (!File.Exists(bundlePath)) + { + throw new FileNotFoundException("Offline kit bundle not found.", bundlePath); + } + + string? manifestPath = null; + if (!string.IsNullOrWhiteSpace(request.ManifestPath)) + { + manifestPath = Path.GetFullPath(request.ManifestPath); + if (!File.Exists(manifestPath)) + { + throw new FileNotFoundException("Offline kit manifest not found.", manifestPath); + } + } + + string? bundleSignaturePath = null; + if (!string.IsNullOrWhiteSpace(request.BundleSignaturePath)) + { + bundleSignaturePath = Path.GetFullPath(request.BundleSignaturePath); + if (!File.Exists(bundleSignaturePath)) + { + throw new FileNotFoundException("Offline kit bundle signature not found.", bundleSignaturePath); + } + } + + string? manifestSignaturePath = null; + if (!string.IsNullOrWhiteSpace(request.ManifestSignaturePath)) + { + manifestSignaturePath = Path.GetFullPath(request.ManifestSignaturePath); + if (!File.Exists(manifestSignaturePath)) + { + throw new FileNotFoundException("Offline kit manifest signature not found.", manifestSignaturePath); + } + } + + var bundleSize = request.BundleSize ?? new FileInfo(bundlePath).Length; + var bundleSha = string.IsNullOrWhiteSpace(request.BundleSha256) + ? await ComputeSha256Async(bundlePath, cancellationToken).ConfigureAwait(false) + : NormalizeSha(request.BundleSha256) ?? throw new InvalidOperationException("Bundle digest must not be empty."); + + string? manifestSha = null; + long? manifestSize = null; + if (manifestPath is not null) + { + manifestSize = request.ManifestSize ?? new FileInfo(manifestPath).Length; + manifestSha = string.IsNullOrWhiteSpace(request.ManifestSha256) + ? await ComputeSha256Async(manifestPath, cancellationToken).ConfigureAwait(false) + : NormalizeSha(request.ManifestSha256); + } + + var metadata = new OfflineKitImportMetadataPayload + { + BundleId = request.BundleId, + BundleSha256 = bundleSha, + BundleSize = bundleSize, + CapturedAt = request.CapturedAt, + Channel = request.Channel, + Kind = request.Kind, + IsDelta = request.IsDelta, + BaseBundleId = request.BaseBundleId, + ManifestSha256 = manifestSha, + ManifestSize = manifestSize + }; + + using var message = CreateRequest(HttpMethod.Post, "api/offline-kit/import"); + await AuthorizeRequestAsync(message, cancellationToken).ConfigureAwait(false); + + using var content = new MultipartFormDataContent(); + + var metadataOptions = new JsonSerializerOptions(SerializerOptions) + { + WriteIndented = false + }; + var metadataJson = JsonSerializer.Serialize(metadata, metadataOptions); + var metadataContent = new StringContent(metadataJson, Encoding.UTF8, "application/json"); + content.Add(metadataContent, "metadata"); + + var bundleStream = File.OpenRead(bundlePath); + var bundleContent = new StreamContent(bundleStream); + bundleContent.Headers.ContentType = new MediaTypeHeaderValue("application/gzip"); + content.Add(bundleContent, "bundle", Path.GetFileName(bundlePath)); + + if (manifestPath is not null) + { + var manifestStream = File.OpenRead(manifestPath); + var manifestContent = new StreamContent(manifestStream); + manifestContent.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + content.Add(manifestContent, "manifest", Path.GetFileName(manifestPath)); + } + + if (bundleSignaturePath is not null) + { + var signatureStream = File.OpenRead(bundleSignaturePath); + var signatureContent = new StreamContent(signatureStream); + signatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + content.Add(signatureContent, "bundleSignature", Path.GetFileName(bundleSignaturePath)); + } + + if (manifestSignaturePath is not null) + { + var manifestSignatureStream = File.OpenRead(manifestSignaturePath); + var manifestSignatureContent = new StreamContent(manifestSignatureStream); + manifestSignatureContent.Headers.ContentType = new MediaTypeHeaderValue("application/octet-stream"); + content.Add(manifestSignatureContent, "manifestSignature", Path.GetFileName(manifestSignaturePath)); + } + + message.Content = content; + + using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + OfflineKitImportResponseTransport? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse offline kit import response. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + var submittedAt = document?.SubmittedAt ?? DateTimeOffset.UtcNow; + + return new OfflineKitImportResult( + document?.ImportId, + document?.Status, + submittedAt, + document?.Message); + } + + public async Task GetOfflineKitStatusAsync(CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + + using var request = CreateRequest(HttpMethod.Get, "api/offline-kit/status"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + if (response.Content is null || response.Content.Headers.ContentLength is 0) + { + return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, Array.Empty()); + } + + OfflineKitStatusTransport? document; + try + { + document = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse offline kit status response. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + var current = document?.Current; + var components = MapOfflineComponents(document?.Components); + + if (current is null) + { + return new OfflineKitStatus(null, null, null, false, null, null, null, null, null, components); + } + + return new OfflineKitStatus( + NormalizeOptionalString(current.BundleId), + NormalizeOptionalString(current.Channel), + NormalizeOptionalString(current.Kind), + current.IsDelta ?? false, + NormalizeOptionalString(current.BaseBundleId), + current.CapturedAt?.ToUniversalTime(), + current.ImportedAt?.ToUniversalTime(), + NormalizeSha(current.BundleSha256), + current.BundleSize, + components); + } + + public async Task ExecuteAocIngestDryRunAsync(AocIngestDryRunRequest requestBody, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + ArgumentNullException.ThrowIfNull(requestBody); + + using var request = CreateRequest(HttpMethod.Post, "api/aoc/ingest/dry-run"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = JsonContent.Create(requestBody, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + try + { + var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + return result ?? new AocIngestDryRunResponse(); + } + catch (JsonException ex) + { + var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse ingest dry-run response. {ex.Message}", ex) + { + Data = { ["payload"] = payload } + }; + } + } + + public async Task ExecuteAocVerifyAsync(AocVerifyRequest requestBody, CancellationToken cancellationToken) + { + EnsureBackendConfigured(); + ArgumentNullException.ThrowIfNull(requestBody); + + using var request = CreateRequest(HttpMethod.Post, "api/aoc/verify"); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + request.Content = JsonContent.Create(requestBody, options: SerializerOptions); + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + try + { + var result = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + return result ?? new AocVerifyResponse(); + } + catch (JsonException ex) + { + var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse AOC verification response. {ex.Message}", ex) + { + Data = { ["payload"] = payload } + }; + } + } + + private string ResolveOfflineDirectory(string destinationDirectory) + { + if (!string.IsNullOrWhiteSpace(destinationDirectory)) + { + return Path.GetFullPath(destinationDirectory); + } + + var configured = _options.Offline?.KitsDirectory; + if (!string.IsNullOrWhiteSpace(configured)) + { + return Path.GetFullPath(configured); + } + + return Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, "offline-kits")); + } + + private async Task FetchOfflineKitDescriptorAsync(string? bundleId, CancellationToken cancellationToken) + { + var route = string.IsNullOrWhiteSpace(bundleId) + ? "api/offline-kit/bundles/latest" + : $"api/offline-kit/bundles/{Uri.EscapeDataString(bundleId)}"; + + using var request = CreateRequest(HttpMethod.Get, route); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + OfflineKitBundleDescriptorTransport? payload; + try + { + payload = await response.Content.ReadFromJsonAsync(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + var raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException($"Failed to parse offline kit metadata. {ex.Message}", ex) + { + Data = { ["payload"] = raw } + }; + } + + if (payload is null) + { + throw new InvalidOperationException("Offline kit metadata response was empty."); + } + + return MapOfflineKitDescriptor(payload); + } + + private OfflineKitBundleDescriptor MapOfflineKitDescriptor(OfflineKitBundleDescriptorTransport transport) + { + if (transport is null) + { + throw new ArgumentNullException(nameof(transport)); + } + + var bundleName = string.IsNullOrWhiteSpace(transport.BundleName) + ? throw new InvalidOperationException("Offline kit metadata missing bundleName.") + : transport.BundleName!.Trim(); + + var bundleId = string.IsNullOrWhiteSpace(transport.BundleId) ? bundleName : transport.BundleId!.Trim(); + var bundleSha = NormalizeSha(transport.BundleSha256) ?? throw new InvalidOperationException("Offline kit metadata missing bundleSha256."); + + var bundleSize = transport.BundleSize; + if (bundleSize <= 0) + { + throw new InvalidOperationException("Offline kit metadata missing bundle size."); + } + + var manifestName = string.IsNullOrWhiteSpace(transport.ManifestName) ? "offline-manifest.json" : transport.ManifestName!.Trim(); + var manifestSha = NormalizeSha(transport.ManifestSha256) ?? throw new InvalidOperationException("Offline kit metadata missing manifestSha256."); + var capturedAt = transport.CapturedAt?.ToUniversalTime() ?? DateTimeOffset.UtcNow; + + var bundleDownloadUri = ResolveDownloadUri(transport.BundleUrl, transport.BundlePath, bundleName); + var manifestDownloadUri = ResolveDownloadUri(transport.ManifestUrl, transport.ManifestPath, manifestName); + var bundleSignatureUri = ResolveOptionalDownloadUri(transport.BundleSignatureUrl, transport.BundleSignaturePath, transport.BundleSignatureName); + var manifestSignatureUri = ResolveOptionalDownloadUri(transport.ManifestSignatureUrl, transport.ManifestSignaturePath, transport.ManifestSignatureName); + var bundleSignatureName = ResolveArtifactName(transport.BundleSignatureName, bundleSignatureUri); + var manifestSignatureName = ResolveArtifactName(transport.ManifestSignatureName, manifestSignatureUri); + + return new OfflineKitBundleDescriptor( + bundleId, + bundleName, + bundleSha, + bundleSize, + bundleDownloadUri, + manifestName, + manifestSha, + manifestDownloadUri, + capturedAt, + NormalizeOptionalString(transport.Channel), + NormalizeOptionalString(transport.Kind), + transport.IsDelta ?? false, + NormalizeOptionalString(transport.BaseBundleId), + bundleSignatureName, + bundleSignatureUri, + manifestSignatureName, + manifestSignatureUri, + transport.ManifestSize); + } + + private static string? ResolveArtifactName(string? explicitName, Uri? uri) + { + if (!string.IsNullOrWhiteSpace(explicitName)) + { + return explicitName.Trim(); + } + + if (uri is not null) + { + var name = Path.GetFileName(uri.LocalPath); + return string.IsNullOrWhiteSpace(name) ? null : name; + } + + return null; + } + + private Uri ResolveDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string fallbackFileName) + { + if (!string.IsNullOrWhiteSpace(absoluteOrRelativeUrl)) + { + var candidate = new Uri(absoluteOrRelativeUrl, UriKind.RelativeOrAbsolute); + if (candidate.IsAbsoluteUri) + { + return candidate; + } + + if (_httpClient.BaseAddress is not null) + { + return new Uri(_httpClient.BaseAddress, candidate); + } + + return BuildUriFromRelative(candidate.ToString()); + } + + if (!string.IsNullOrWhiteSpace(relativePath)) + { + return BuildUriFromRelative(relativePath); + } + + if (!string.IsNullOrWhiteSpace(fallbackFileName)) + { + return BuildUriFromRelative(fallbackFileName); + } + + throw new InvalidOperationException("Offline kit metadata did not include a download URL."); + } + + private Uri BuildUriFromRelative(string relative) + { + var normalized = relative.TrimStart('/'); + if (!string.IsNullOrWhiteSpace(_options.Offline?.MirrorUrl) && + Uri.TryCreate(_options.Offline.MirrorUrl, UriKind.Absolute, out var mirrorBase)) + { + if (!mirrorBase.AbsoluteUri.EndsWith("/")) + { + mirrorBase = new Uri(mirrorBase.AbsoluteUri + "/"); + } + + return new Uri(mirrorBase, normalized); + } + + if (_httpClient.BaseAddress is not null) + { + return new Uri(_httpClient.BaseAddress, normalized); + } + + throw new InvalidOperationException($"Cannot resolve offline kit URI for '{relative}' because no mirror or backend base address is configured."); + } + + private Uri? ResolveOptionalDownloadUri(string? absoluteOrRelativeUrl, string? relativePath, string? fallbackName) + { + var hasData = !string.IsNullOrWhiteSpace(absoluteOrRelativeUrl) || + !string.IsNullOrWhiteSpace(relativePath) || + !string.IsNullOrWhiteSpace(fallbackName); + + if (!hasData) + { + return null; + } + + try + { + return ResolveDownloadUri(absoluteOrRelativeUrl, relativePath, fallbackName ?? string.Empty); + } + catch + { + return null; + } + } + + private async Task DownloadFileWithResumeAsync(Uri downloadUri, string targetPath, string expectedSha256, long expectedSize, bool resume, CancellationToken cancellationToken) + { + var directory = Path.GetDirectoryName(targetPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + var partialPath = resume ? targetPath + ".partial" : targetPath + ".tmp"; + + if (!resume && File.Exists(targetPath)) + { + File.Delete(targetPath); + } + + if (resume && File.Exists(targetPath)) + { + File.Move(targetPath, partialPath, overwrite: true); + } + + long existingLength = 0; + if (resume && File.Exists(partialPath)) + { + existingLength = new FileInfo(partialPath).Length; + if (expectedSize > 0 && existingLength >= expectedSize) + { + existingLength = expectedSize; + } + } + + while (true) + { + using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri); + if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize) + { + request.Headers.Range = new RangeHeaderValue(existingLength, null); + } + + using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + + if (resume && existingLength > 0 && expectedSize > 0 && existingLength < expectedSize && response.StatusCode == HttpStatusCode.OK) + { + existingLength = 0; + if (File.Exists(partialPath)) + { + File.Delete(partialPath); + } + + continue; + } + + if (!response.IsSuccessStatusCode && + !(resume && existingLength > 0 && response.StatusCode == HttpStatusCode.PartialContent)) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + var destination = resume ? partialPath : targetPath; + var mode = resume && existingLength > 0 ? FileMode.Append : FileMode.Create; + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + await using (var file = new FileStream(destination, mode, FileAccess.Write, FileShare.None, 81920, useAsync: true)) + { + await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false); + } + + break; + } + + if (resume && File.Exists(partialPath)) + { + File.Move(partialPath, targetPath, overwrite: true); + } + + var digest = await ComputeSha256Async(targetPath, cancellationToken).ConfigureAwait(false); + if (!string.Equals(digest, expectedSha256, StringComparison.OrdinalIgnoreCase)) + { + File.Delete(targetPath); + throw new InvalidOperationException($"Digest mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSha256} but computed {digest}."); + } + + if (expectedSize > 0) + { + var actualSize = new FileInfo(targetPath).Length; + if (actualSize != expectedSize) + { + File.Delete(targetPath); + throw new InvalidOperationException($"Size mismatch for {Path.GetFileName(targetPath)}. Expected {expectedSize:N0} bytes but downloaded {actualSize:N0} bytes."); + } + } + } + + private async Task DownloadAuxiliaryFileAsync(Uri downloadUri, string targetPath, CancellationToken cancellationToken) + { + var directory = Path.GetDirectoryName(targetPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + using var request = new HttpRequestMessage(HttpMethod.Get, downloadUri); + using var response = await _httpClient.SendAsync(request, HttpCompletionOption.ResponseHeadersRead, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var failure = await CreateFailureMessageAsync(response, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(failure); + } + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + await using var file = new FileStream(targetPath, FileMode.Create, FileAccess.Write, FileShare.None, 81920, useAsync: true); + await stream.CopyToAsync(file, cancellationToken).ConfigureAwait(false); + } + + private static async Task WriteOfflineKitMetadataAsync( + string metadataPath, + OfflineKitBundleDescriptor descriptor, + string bundlePath, + string manifestPath, + string? bundleSignaturePath, + string? manifestSignaturePath, + CancellationToken cancellationToken) + { + var document = new OfflineKitMetadataDocument + { + BundleId = descriptor.BundleId, + BundleName = descriptor.BundleName, + BundleSha256 = descriptor.BundleSha256, + BundleSize = descriptor.BundleSize, + BundlePath = Path.GetFullPath(bundlePath), + CapturedAt = descriptor.CapturedAt, + DownloadedAt = DateTimeOffset.UtcNow, + Channel = descriptor.Channel, + Kind = descriptor.Kind, + IsDelta = descriptor.IsDelta, + BaseBundleId = descriptor.BaseBundleId, + ManifestName = descriptor.ManifestName, + ManifestSha256 = descriptor.ManifestSha256, + ManifestSize = descriptor.ManifestSize, + ManifestPath = Path.GetFullPath(manifestPath), + BundleSignaturePath = bundleSignaturePath is null ? null : Path.GetFullPath(bundleSignaturePath), + ManifestSignaturePath = manifestSignaturePath is null ? null : Path.GetFullPath(manifestSignaturePath) + }; + + var options = new JsonSerializerOptions(SerializerOptions) + { + WriteIndented = true + }; + + var payload = JsonSerializer.Serialize(document, options); + await File.WriteAllTextAsync(metadataPath, payload, cancellationToken).ConfigureAwait(false); + } + + private static IReadOnlyList MapOfflineComponents(List? transports) + { + if (transports is null || transports.Count == 0) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var transport in transports) + { + if (transport is null || string.IsNullOrWhiteSpace(transport.Name)) + { + continue; + } + + list.Add(new OfflineKitComponentStatus( + transport.Name.Trim(), + NormalizeOptionalString(transport.Version), + NormalizeSha(transport.Digest), + transport.CapturedAt?.ToUniversalTime(), + transport.SizeBytes)); + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static string? NormalizeSha(string? digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return null; + } + + var value = digest.Trim(); + if (value.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + value = value.Substring("sha256:".Length); + } + + return value.ToLowerInvariant(); + } + + private sealed class OfflineKitImportMetadataPayload + { + public string? BundleId { get; set; } + + public string BundleSha256 { get; set; } = string.Empty; + + public long BundleSize { get; set; } + + public DateTimeOffset? CapturedAt { get; set; } + + public string? Channel { get; set; } + + public string? Kind { get; set; } + + public bool? IsDelta { get; set; } + + public string? BaseBundleId { get; set; } + + public string? ManifestSha256 { get; set; } + + public long? ManifestSize { get; set; } + } + + private static List NormalizeImages(IReadOnlyList images) + { + var normalized = new List(); + if (images is null) + { + return normalized; + } + + var seen = new HashSet(StringComparer.Ordinal); + foreach (var entry in images) + { + if (string.IsNullOrWhiteSpace(entry)) + { + continue; + } + + var trimmed = entry.Trim(); + if (seen.Add(trimmed)) + { + normalized.Add(trimmed); + } + } + + return normalized; + } + + private static IReadOnlyList ExtractReasons(List? reasons) + { + if (reasons is null || reasons.Count == 0) + { + return Array.Empty(); + } + + var list = new List(); + foreach (var reason in reasons) + { + if (!string.IsNullOrWhiteSpace(reason)) + { + list.Add(reason.Trim()); + } + } + + return list.Count == 0 ? Array.Empty() : list; + } + + private static IReadOnlyDictionary ExtractExtensionMetadata(Dictionary? extensionData) + { + if (extensionData is null || extensionData.Count == 0) + { + return EmptyMetadata; + } + + var metadata = new Dictionary(StringComparer.OrdinalIgnoreCase); + foreach (var kvp in extensionData) + { + var value = ConvertJsonElementToObject(kvp.Value); + if (value is not null) + { + metadata[kvp.Key] = value; + } + } + + if (metadata.Count == 0) + { + return EmptyMetadata; + } + + return new ReadOnlyDictionary(metadata); + } + + private static object? ConvertJsonElementToObject(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString(), + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.Number when element.TryGetInt64(out var integer) => integer, + JsonValueKind.Number when element.TryGetDouble(out var @double) => @double, + JsonValueKind.Null or JsonValueKind.Undefined => null, + _ => element.GetRawText() + }; + } + + private static string? NormalizeOptionalString(string? value) + { + return string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + } + + private HttpRequestMessage CreateRequest(HttpMethod method, string relativeUri) + { + if (!Uri.TryCreate(relativeUri, UriKind.RelativeOrAbsolute, out var requestUri)) + { + throw new InvalidOperationException($"Invalid request URI '{relativeUri}'."); + } + + if (requestUri.IsAbsoluteUri) + { + // Nothing to normalize. + } + else + { + requestUri = new Uri(relativeUri.TrimStart('/'), UriKind.Relative); + } + + return new HttpRequestMessage(method, requestUri); + } + + private async Task AuthorizeRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + var token = await ResolveAccessTokenAsync(cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(token)) + { + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); + } + } + + private IReadOnlyDictionary? ResolveOperatorMetadataIfNeeded(string? scope) + { + if (string.IsNullOrWhiteSpace(scope) || !scope.Contains("orch:operate", StringComparison.OrdinalIgnoreCase)) + { + return null; + } + + var reason = _options.Authority.OperatorReason?.Trim(); + var ticket = _options.Authority.OperatorTicket?.Trim(); + + if (string.IsNullOrWhiteSpace(reason) || string.IsNullOrWhiteSpace(ticket)) + { + throw new InvalidOperationException("Authority.OperatorReason and Authority.OperatorTicket must be configured when requesting orch:operate tokens. Set STELLAOPS_ORCH_REASON and STELLAOPS_ORCH_TICKET or the corresponding configuration values."); + } + + return new Dictionary(StringComparer.Ordinal) + { + [OperatorReasonParameterName] = reason, + [OperatorTicketParameterName] = ticket + }; + } + + private async Task ResolveAccessTokenAsync(CancellationToken cancellationToken) + { + if (!string.IsNullOrWhiteSpace(_options.ApiKey)) + { + return _options.ApiKey; + } + + if (_tokenClient is null || string.IsNullOrWhiteSpace(_options.Authority.Url)) + { + return null; + } + + var now = DateTimeOffset.UtcNow; + + lock (_tokenSync) + { + if (!string.IsNullOrEmpty(_cachedAccessToken) && now < _cachedAccessTokenExpiresAt - TokenRefreshSkew) + { + return _cachedAccessToken; + } + } + + var cacheKey = AuthorityTokenUtilities.BuildCacheKey(_options); + var cachedEntry = await _tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (cachedEntry is not null && now < cachedEntry.ExpiresAtUtc - TokenRefreshSkew) + { + lock (_tokenSync) + { + _cachedAccessToken = cachedEntry.AccessToken; + _cachedAccessTokenExpiresAt = cachedEntry.ExpiresAtUtc; + return _cachedAccessToken; + } + } + + var scope = AuthorityTokenUtilities.ResolveScope(_options); + var operatorMetadata = ResolveOperatorMetadataIfNeeded(scope); + + StellaOpsTokenResult token; + if (!string.IsNullOrWhiteSpace(_options.Authority.Username)) + { + if (string.IsNullOrWhiteSpace(_options.Authority.Password)) + { + throw new InvalidOperationException("Authority password must be configured when username is provided."); + } + + token = await _tokenClient.RequestPasswordTokenAsync( + _options.Authority.Username, + _options.Authority.Password!, + scope, + null, + cancellationToken).ConfigureAwait(false); + } + else + { + token = await _tokenClient.RequestClientCredentialsTokenAsync(scope, operatorMetadata, cancellationToken).ConfigureAwait(false); + } + + await _tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); + + lock (_tokenSync) + { + _cachedAccessToken = token.AccessToken; + _cachedAccessTokenExpiresAt = token.ExpiresAtUtc; + return _cachedAccessToken; + } + } + + private async Task<(string Message, JsonElement? Payload)> ExtractExcititorResponseAsync(HttpResponseMessage response, CancellationToken cancellationToken) + { + if (response.Content is null || response.Content.Headers.ContentLength is 0) + { + return ($"HTTP {(int)response.StatusCode}", null); + } + + try + { + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + if (stream is null || stream.Length == 0) + { + return ($"HTTP {(int)response.StatusCode}", null); + } + + using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + var root = document.RootElement.Clone(); + string? message = null; + if (root.ValueKind == JsonValueKind.Object) + { + message = GetStringProperty(root, "message") ?? GetStringProperty(root, "status"); + } + + if (string.IsNullOrWhiteSpace(message)) + { + message = root.ValueKind == JsonValueKind.Object || root.ValueKind == JsonValueKind.Array + ? root.ToString() + : root.GetRawText(); + } + + return (message ?? $"HTTP {(int)response.StatusCode}", root); + } + catch (JsonException) + { + var text = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + return (string.IsNullOrWhiteSpace(text) ? $"HTTP {(int)response.StatusCode}" : text.Trim(), null); + } + } + + private static bool TryGetPropertyCaseInsensitive(JsonElement element, string propertyName, out JsonElement property) + { + if (element.ValueKind == JsonValueKind.Object && element.TryGetProperty(propertyName, out property)) + { + return true; + } + + if (element.ValueKind == JsonValueKind.Object) + { + foreach (var candidate in element.EnumerateObject()) + { + if (string.Equals(candidate.Name, propertyName, StringComparison.OrdinalIgnoreCase)) + { + property = candidate.Value; + return true; + } + } + } + + property = default; + return false; + } + + private static string? GetStringProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + if (property.ValueKind == JsonValueKind.String) + { + return property.GetString(); + } + } + + return null; + } + + private static bool GetBooleanProperty(JsonElement element, string propertyName, bool defaultValue) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property)) + { + return property.ValueKind switch + { + JsonValueKind.True => true, + JsonValueKind.False => false, + JsonValueKind.String when bool.TryParse(property.GetString(), out var parsed) => parsed, + _ => defaultValue + }; + } + + return defaultValue; + } + + private static DateTimeOffset? GetDateTimeOffsetProperty(JsonElement element, string propertyName) + { + if (TryGetPropertyCaseInsensitive(element, propertyName, out var property) && property.ValueKind == JsonValueKind.String) + { + if (DateTimeOffset.TryParse(property.GetString(), CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed)) + { + return parsed.ToUniversalTime(); + } + } + + return null; + } + + private static JsonElement SerializeEnvironmentValue(object? value) + { + if (value is JsonElement element) + { + return element; + } + + return JsonSerializer.SerializeToElement(value, SerializerOptions); + } + + private static string? ExtractProblemErrorCode(ProblemDocument? problem) + { + if (problem?.Extensions is null || problem.Extensions.Count == 0) + { + return null; + } + + if (problem.Extensions.TryGetValue("code", out var value)) + { + switch (value) + { + case string code when !string.IsNullOrWhiteSpace(code): + return code; + case JsonElement element when element.ValueKind == JsonValueKind.String: + var text = element.GetString(); + return string.IsNullOrWhiteSpace(text) ? null : text; + } + } + + return null; + } + + private static string BuildPolicyFindingsQueryString(PolicyFindingsQuery query) + { + var parameters = new List(); + + if (query.SbomIds is not null) + { + foreach (var sbom in query.SbomIds) + { + if (!string.IsNullOrWhiteSpace(sbom)) + { + parameters.Add($"sbomId={Uri.EscapeDataString(sbom)}"); + } + } + } + + if (query.Statuses is not null && query.Statuses.Count > 0) + { + var joined = string.Join(",", query.Statuses.Where(s => !string.IsNullOrWhiteSpace(s))); + if (!string.IsNullOrWhiteSpace(joined)) + { + parameters.Add($"status={Uri.EscapeDataString(joined)}"); + } + } + + if (query.Severities is not null && query.Severities.Count > 0) + { + var joined = string.Join(",", query.Severities.Where(s => !string.IsNullOrWhiteSpace(s))); + if (!string.IsNullOrWhiteSpace(joined)) + { + parameters.Add($"severity={Uri.EscapeDataString(joined)}"); + } + } + + if (!string.IsNullOrWhiteSpace(query.Cursor)) + { + parameters.Add($"cursor={Uri.EscapeDataString(query.Cursor)}"); + } + + if (query.Page.HasValue) + { + parameters.Add($"page={query.Page.Value}"); + } + + if (query.PageSize.HasValue) + { + parameters.Add($"pageSize={query.PageSize.Value}"); + } + + if (query.Since.HasValue) + { + var value = query.Since.Value.ToUniversalTime().ToString("o", CultureInfo.InvariantCulture); + parameters.Add($"since={Uri.EscapeDataString(value)}"); + } + + if (parameters.Count == 0) + { + return string.Empty; + } + + return "?" + string.Join("&", parameters); + } + + private static PolicyFindingsPage MapPolicyFindings(PolicyFindingsResponseDocument document) + { + var items = document.Items is null + ? new List(capacity: 0) + : document.Items + .Where(item => item is not null) + .Select(item => MapPolicyFinding(item!)) + .ToList(); + + var nextCursor = string.IsNullOrWhiteSpace(document.NextCursor) ? null : document.NextCursor; + var view = new ReadOnlyCollection(items); + return new PolicyFindingsPage(view, nextCursor, document.TotalCount); + } + + private static PolicyFindingDocument MapPolicyFinding(PolicyFindingDocumentDocument document) + { + var findingId = document.FindingId; + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new InvalidOperationException("Policy finding response missing findingId."); + } + + var status = string.IsNullOrWhiteSpace(document.Status) ? "unknown" : document.Status!; + var severityNormalized = document.Severity?.Normalized; + if (string.IsNullOrWhiteSpace(severityNormalized)) + { + severityNormalized = "unknown"; + } + + var severity = new PolicyFindingSeverity(severityNormalized!, document.Severity?.Score); + + var sbomId = string.IsNullOrWhiteSpace(document.SbomId) ? "(unknown)" : document.SbomId!; + + IReadOnlyList advisoryIds; + if (document.AdvisoryIds is null || document.AdvisoryIds.Count == 0) + { + advisoryIds = Array.Empty(); + } + else + { + advisoryIds = document.AdvisoryIds + .Where(id => !string.IsNullOrWhiteSpace(id)) + .ToArray(); + } + + PolicyFindingVexMetadata? vex = null; + if (document.Vex is not null) + { + if (!string.IsNullOrWhiteSpace(document.Vex.WinningStatementId) + || !string.IsNullOrWhiteSpace(document.Vex.Source) + || !string.IsNullOrWhiteSpace(document.Vex.Status)) + { + vex = new PolicyFindingVexMetadata( + string.IsNullOrWhiteSpace(document.Vex.WinningStatementId) ? null : document.Vex.WinningStatementId, + string.IsNullOrWhiteSpace(document.Vex.Source) ? null : document.Vex.Source, + string.IsNullOrWhiteSpace(document.Vex.Status) ? null : document.Vex.Status); + } + } + + var updatedAt = document.UpdatedAt ?? DateTimeOffset.MinValue; + + return new PolicyFindingDocument( + findingId, + status, + severity, + sbomId, + advisoryIds, + vex, + document.PolicyVersion ?? 0, + updatedAt, + string.IsNullOrWhiteSpace(document.RunId) ? null : document.RunId); + } + + private static PolicyFindingExplainResult MapPolicyFindingExplain(PolicyFindingExplainResponseDocument document) + { + var findingId = document.FindingId; + if (string.IsNullOrWhiteSpace(findingId)) + { + throw new InvalidOperationException("Policy finding explain response missing findingId."); + } + + var steps = document.Steps is null + ? new List(capacity: 0) + : document.Steps + .Where(step => step is not null) + .Select(step => MapPolicyFindingExplainStep(step!)) + .ToList(); + + var hints = document.SealedHints is null + ? new List(capacity: 0) + : document.SealedHints + .Where(hint => hint is not null && !string.IsNullOrWhiteSpace(hint!.Message)) + .Select(hint => new PolicyFindingExplainHint(hint!.Message!.Trim())) + .ToList(); + + return new PolicyFindingExplainResult( + findingId, + document.PolicyVersion ?? 0, + new ReadOnlyCollection(steps), + new ReadOnlyCollection(hints)); + } + + private static PolicyFindingExplainStep MapPolicyFindingExplainStep(PolicyFindingExplainStepDocument document) + { + var rule = string.IsNullOrWhiteSpace(document.Rule) ? "(unknown)" : document.Rule!; + var status = string.IsNullOrWhiteSpace(document.Status) ? null : document.Status; + var action = string.IsNullOrWhiteSpace(document.Action) ? null : document.Action; + + IReadOnlyDictionary inputs = document.Inputs is null + ? new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)) + : new ReadOnlyDictionary(document.Inputs + .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key)) + .ToDictionary( + kvp => kvp.Key, + kvp => ConvertJsonElementToString(kvp.Value), + StringComparer.Ordinal)); + + IReadOnlyDictionary? evidence = null; + if (document.Evidence is not null && document.Evidence.Count > 0) + { + var evidenceDict = document.Evidence + .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key)) + .ToDictionary( + kvp => kvp.Key, + kvp => ConvertJsonElementToString(kvp.Value), + StringComparer.Ordinal); + + evidence = new ReadOnlyDictionary(evidenceDict); + } + + return new PolicyFindingExplainStep( + rule, + status, + action, + document.Score, + inputs, + evidence); + } + + private static string ConvertJsonElementToString(JsonElement element) + { + return element.ValueKind switch + { + JsonValueKind.String => element.GetString() ?? string.Empty, + JsonValueKind.Number => element.TryGetInt64(out var longValue) + ? longValue.ToString(CultureInfo.InvariantCulture) + : element.GetDouble().ToString(CultureInfo.InvariantCulture), + JsonValueKind.True => "true", + JsonValueKind.False => "false", + JsonValueKind.Null => "null", + JsonValueKind.Array => string.Join(", ", element.EnumerateArray().Select(ConvertJsonElementToString)), + JsonValueKind.Object => element.GetRawText(), + _ => element.GetRawText() + }; + } + + private static PolicyActivationResult MapPolicyActivation(PolicyActivationResponseDocument document) + { + if (document.Revision is null) + { + throw new InvalidOperationException("Policy activation response missing revision data."); + } + + var revisionDocument = document.Revision; + if (string.IsNullOrWhiteSpace(revisionDocument.PackId)) + { + throw new InvalidOperationException("Policy activation revision missing policy identifier."); + } + + if (!revisionDocument.Version.HasValue) + { + throw new InvalidOperationException("Policy activation revision missing version number."); + } + + var approvals = new List(); + if (revisionDocument.Approvals is not null) + { + foreach (var approval in revisionDocument.Approvals) + { + if (approval is null || string.IsNullOrWhiteSpace(approval.ActorId) || !approval.ApprovedAt.HasValue) + { + continue; + } + + approvals.Add(new PolicyActivationApproval( + approval.ActorId, + approval.ApprovedAt.Value.ToUniversalTime(), + NormalizeOptionalString(approval.Comment))); + } + } + + var revision = new PolicyActivationRevision( + revisionDocument.PackId, + revisionDocument.Version.Value, + NormalizeOptionalString(revisionDocument.Status) ?? "unknown", + revisionDocument.RequiresTwoPersonApproval ?? false, + revisionDocument.CreatedAt?.ToUniversalTime() ?? DateTimeOffset.MinValue, + revisionDocument.ActivatedAt?.ToUniversalTime(), + new ReadOnlyCollection(approvals)); + + return new PolicyActivationResult( + NormalizeOptionalString(document.Status) ?? "unknown", + revision); + } + + private static PolicySimulationResult MapPolicySimulation(PolicySimulationResponseDocument document) + { + var diffDocument = document.Diff ?? throw new InvalidOperationException("Policy simulation response missing diff summary."); + + var severity = diffDocument.BySeverity is null + ? new Dictionary(0, StringComparer.Ordinal) + : diffDocument.BySeverity + .Where(kvp => !string.IsNullOrWhiteSpace(kvp.Key) && kvp.Value is not null) + .ToDictionary( + kvp => kvp.Key, + kvp => new PolicySimulationSeverityDelta(kvp.Value!.Up, kvp.Value.Down), + StringComparer.Ordinal); + + var severityView = new ReadOnlyDictionary(severity); + + var ruleHits = diffDocument.RuleHits is null + ? new List() + : diffDocument.RuleHits + .Where(hit => hit is not null) + .Select(hit => new PolicySimulationRuleDelta( + hit!.RuleId ?? string.Empty, + hit.RuleName ?? string.Empty, + hit.Up, + hit.Down)) + .ToList(); + + var ruleHitsView = ruleHits.AsReadOnly(); + + var diff = new PolicySimulationDiff( + string.IsNullOrWhiteSpace(diffDocument.SchemaVersion) ? null : diffDocument.SchemaVersion, + diffDocument.Added ?? 0, + diffDocument.Removed ?? 0, + diffDocument.Unchanged ?? 0, + severityView, + ruleHitsView); + + return new PolicySimulationResult( + diff, + string.IsNullOrWhiteSpace(document.ExplainUri) ? null : document.ExplainUri); + } + + private void EnsureBackendConfigured() + { + if (_httpClient.BaseAddress is null) + { + throw new InvalidOperationException("Backend URL is not configured. Provide STELLAOPS_BACKEND_URL or configure appsettings."); + } + } + + private string ResolveArtifactPath(string outputPath, string channel) + { + if (!string.IsNullOrWhiteSpace(outputPath)) + { + return Path.GetFullPath(outputPath); + } + + var directory = string.IsNullOrWhiteSpace(_options.ScannerCacheDirectory) + ? Directory.GetCurrentDirectory() + : Path.GetFullPath(_options.ScannerCacheDirectory); + + Directory.CreateDirectory(directory); + var fileName = $"stellaops-scanner-{channel}.tar.gz"; + return Path.Combine(directory, fileName); + } + + private async Task CreateFailureMessageAsync(HttpResponseMessage response, CancellationToken cancellationToken) + { + var (message, _) = await CreateFailureDetailsAsync(response, cancellationToken).ConfigureAwait(false); + return message; + } + + private async Task<(string Message, ProblemDocument? Problem)> CreateFailureDetailsAsync(HttpResponseMessage response, CancellationToken cancellationToken) + { + var statusCode = (int)response.StatusCode; + var builder = new StringBuilder(); + builder.Append("Backend request failed with status "); + builder.Append(statusCode); + builder.Append(' '); + builder.Append(response.ReasonPhrase ?? "Unknown"); + + ProblemDocument? problem = null; + + if (response.Content is not null && response.Content.Headers.ContentLength is > 0) + { + string? raw = null; + try + { + raw = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(raw)) + { + problem = JsonSerializer.Deserialize(raw, SerializerOptions); + } + } + catch (JsonException) + { + problem = null; + } + + if (problem is not null) + { + if (!string.IsNullOrWhiteSpace(problem.Title)) + { + builder.AppendLine().Append(problem.Title); + } + + if (!string.IsNullOrWhiteSpace(problem.Detail)) + { + builder.AppendLine().Append(problem.Detail); + } + } + else if (!string.IsNullOrWhiteSpace(raw)) + { + builder.AppendLine().Append(raw); + } + } + + return (builder.ToString(), problem); + } + + private static string? ExtractHeaderValue(HttpResponseHeaders headers, string name) + { + if (headers.TryGetValues(name, out var values)) + { + return values.FirstOrDefault(); + } + + return null; + } + + private static string? NormalizeExpectedDigest(string? digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return null; + } + + var trimmed = digest.Trim(); + return trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) + ? trimmed[7..] + : trimmed; + } + + private async Task ValidateDigestAsync(string filePath, string? expectedDigest, CancellationToken cancellationToken) + { + string digestHex; + await using (var stream = File.OpenRead(filePath)) + { + var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + digestHex = Convert.ToHexString(hash).ToLowerInvariant(); + } + + if (!string.IsNullOrWhiteSpace(expectedDigest)) + { + var normalized = NormalizeDigest(expectedDigest); + if (!normalized.Equals(digestHex, StringComparison.OrdinalIgnoreCase)) + { + File.Delete(filePath); + throw new InvalidOperationException($"Scanner digest mismatch. Expected sha256:{normalized}, calculated sha256:{digestHex}."); + } + } + else + { + _logger.LogWarning("Scanner download missing X-StellaOps-Digest header; relying on computed digest only."); + } + + return digestHex; + } + + private static string NormalizeDigest(string digest) + { + if (digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + return digest[7..]; + } + + return digest; + } + + private static async Task ComputeSha256Async(string filePath, CancellationToken cancellationToken) + { + await using var stream = File.OpenRead(filePath); + var hash = await SHA256.HashDataAsync(stream, cancellationToken).ConfigureAwait(false); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private async Task ValidateSignatureAsync(string? signatureHeader, string digestHex, bool verbose, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(_options.ScannerSignaturePublicKeyPath)) + { + if (!string.IsNullOrWhiteSpace(signatureHeader)) + { + _logger.LogDebug("Signature header present but no public key configured; skipping validation."); + } + return; + } + + if (string.IsNullOrWhiteSpace(signatureHeader)) + { + throw new InvalidOperationException("Scanner signature missing while a public key is configured."); + } + + var publicKeyPath = Path.GetFullPath(_options.ScannerSignaturePublicKeyPath); + if (!File.Exists(publicKeyPath)) + { + throw new FileNotFoundException("Scanner signature public key not found.", publicKeyPath); + } + + var signatureBytes = Convert.FromBase64String(signatureHeader); + var digestBytes = Convert.FromHexString(digestHex); + + var pem = await File.ReadAllTextAsync(publicKeyPath, cancellationToken).ConfigureAwait(false); + using var rsa = RSA.Create(); + rsa.ImportFromPem(pem); + + var valid = rsa.VerifyHash(digestBytes, signatureBytes, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + if (!valid) + { + throw new InvalidOperationException("Scanner signature validation failed."); + } + + if (verbose) + { + _logger.LogDebug("Scanner signature validated using key {KeyPath}.", publicKeyPath); + } + } + + private void PersistMetadata(string outputPath, string channel, string digestHex, string? signatureHeader, HttpResponseMessage response) + { + var metadata = new + { + channel, + digest = $"sha256:{digestHex}", + signature = signatureHeader, + downloadedAt = DateTimeOffset.UtcNow, + source = response.RequestMessage?.RequestUri?.ToString(), + sizeBytes = new FileInfo(outputPath).Length, + headers = new + { + etag = response.Headers.ETag?.Tag, + lastModified = response.Content.Headers.LastModified, + contentType = response.Content.Headers.ContentType?.ToString() + } + }; + + var metadataPath = outputPath + ".metadata.json"; + var json = JsonSerializer.Serialize(metadata, new JsonSerializerOptions + { + WriteIndented = true + }); + + File.WriteAllText(metadataPath, json); + } + + private static TimeSpan GetRetryDelay(HttpResponseMessage response, int attempt) + { + if (response.Headers.TryGetValues("Retry-After", out var retryValues)) + { + var value = retryValues.FirstOrDefault(); + if (!string.IsNullOrWhiteSpace(value)) + { + if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var seconds) && seconds >= 0) + { + return TimeSpan.FromSeconds(Math.Min(seconds, 300)); + } + + if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var when)) + { + var delta = when - DateTimeOffset.UtcNow; + if (delta > TimeSpan.Zero) + { + return delta < TimeSpan.FromMinutes(5) ? delta : TimeSpan.FromMinutes(5); + } + } + } + } + + var fallbackSeconds = Math.Min(60, Math.Pow(2, attempt)); + return TimeSpan.FromSeconds(fallbackSeconds); + } +} diff --git a/src/StellaOps.Cli/Services/ConcelierObservationsClient.cs b/src/Cli/StellaOps.Cli/Services/ConcelierObservationsClient.cs similarity index 97% rename from src/StellaOps.Cli/Services/ConcelierObservationsClient.cs rename to src/Cli/StellaOps.Cli/Services/ConcelierObservationsClient.cs index ace3002a..d3862c01 100644 --- a/src/StellaOps.Cli/Services/ConcelierObservationsClient.cs +++ b/src/Cli/StellaOps.Cli/Services/ConcelierObservationsClient.cs @@ -1,250 +1,250 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Text; -using System.Text.Json; -using System.Globalization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.Client; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Services.Models; - -namespace StellaOps.Cli.Services; - -internal sealed class ConcelierObservationsClient : IConcelierObservationsClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); - private static readonly TimeSpan TokenRefreshSkew = TimeSpan.FromSeconds(30); - - private readonly HttpClient httpClient; - private readonly StellaOpsCliOptions options; - private readonly ILogger logger; - private readonly IStellaOpsTokenClient? tokenClient; - private readonly object tokenSync = new(); - - private string? cachedAccessToken; - private DateTimeOffset cachedAccessTokenExpiresAt = DateTimeOffset.MinValue; - - public ConcelierObservationsClient( - HttpClient httpClient, - StellaOpsCliOptions options, - ILogger logger, - IStellaOpsTokenClient? tokenClient = null) - { - this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - this.options = options ?? throw new ArgumentNullException(nameof(options)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - this.tokenClient = tokenClient; - - if (!string.IsNullOrWhiteSpace(options.ConcelierUrl) && httpClient.BaseAddress is null) - { - if (Uri.TryCreate(options.ConcelierUrl, UriKind.Absolute, out var baseUri)) - { - httpClient.BaseAddress = baseUri; - } - } - } - - public async Task GetObservationsAsync( - AdvisoryObservationsQuery query, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(query); - - EnsureConfigured(); - - var requestUri = BuildRequestUri(query); - using var request = new HttpRequestMessage(HttpMethod.Get, requestUri); - await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); - - using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - logger.LogError( - "Failed to query observations (status {StatusCode}). Response: {Payload}", - (int)response.StatusCode, - string.IsNullOrWhiteSpace(payload) ? "" : payload); - - response.EnsureSuccessStatusCode(); - } - - await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); - var result = await JsonSerializer - .DeserializeAsync(stream, SerializerOptions, cancellationToken) - .ConfigureAwait(false); - - return result ?? new AdvisoryObservationsResponse(); - } - - private static string BuildRequestUri(AdvisoryObservationsQuery query) - { - var builder = new StringBuilder("/concelier/observations?tenant="); - builder.Append(Uri.EscapeDataString(query.Tenant)); - - AppendValues(builder, "observationId", query.ObservationIds); - AppendValues(builder, "alias", query.Aliases); - AppendValues(builder, "purl", query.Purls); - AppendValues(builder, "cpe", query.Cpes); - - if (query.Limit.HasValue && query.Limit.Value > 0) - { - builder.Append('&'); - builder.Append("limit="); - builder.Append(query.Limit.Value.ToString(CultureInfo.InvariantCulture)); - } - - if (!string.IsNullOrWhiteSpace(query.Cursor)) - { - builder.Append('&'); - builder.Append("cursor="); - builder.Append(Uri.EscapeDataString(query.Cursor)); - } - - return builder.ToString(); - - static void AppendValues(StringBuilder builder, string name, IReadOnlyList values) - { - if (values is null || values.Count == 0) - { - return; - } - - foreach (var value in values) - { - if (string.IsNullOrWhiteSpace(value)) - { - continue; - } - - builder.Append('&'); - builder.Append(name); - builder.Append('='); - builder.Append(Uri.EscapeDataString(value)); - } - } - } - - private void EnsureConfigured() - { - if (!string.IsNullOrWhiteSpace(options.ConcelierUrl)) - { - return; - } - - throw new InvalidOperationException( - "ConcelierUrl is not configured. Set StellaOps:ConcelierUrl or STELLAOPS_CONCELIER_URL."); - } - - private async Task AuthorizeRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - var token = await ResolveAccessTokenAsync(cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(token)) - { - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); - } - } - - private async Task ResolveAccessTokenAsync(CancellationToken cancellationToken) - { - if (!string.IsNullOrWhiteSpace(options.ApiKey)) - { - return options.ApiKey; - } - - if (tokenClient is null || string.IsNullOrWhiteSpace(options.Authority.Url)) - { - return null; - } - - var now = DateTimeOffset.UtcNow; - - lock (tokenSync) - { - if (!string.IsNullOrEmpty(cachedAccessToken) && now < cachedAccessTokenExpiresAt - TokenRefreshSkew) - { - return cachedAccessToken; - } - } - - var (scope, cacheKey) = BuildScopeAndCacheKey(options); - var cachedEntry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); - if (cachedEntry is not null && now < cachedEntry.ExpiresAtUtc - TokenRefreshSkew) - { - lock (tokenSync) - { - cachedAccessToken = cachedEntry.AccessToken; - cachedAccessTokenExpiresAt = cachedEntry.ExpiresAtUtc; - return cachedAccessToken; - } - } - - StellaOpsTokenResult token; - if (!string.IsNullOrWhiteSpace(options.Authority.Username)) - { - if (string.IsNullOrWhiteSpace(options.Authority.Password)) - { - throw new InvalidOperationException("Authority password must be configured when username is provided."); - } - - token = await tokenClient.RequestPasswordTokenAsync( - options.Authority.Username, - options.Authority.Password!, - scope, - null, - cancellationToken).ConfigureAwait(false); - } - else - { - token = await tokenClient.RequestClientCredentialsTokenAsync(scope, null, cancellationToken).ConfigureAwait(false); - } - - await tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); - - lock (tokenSync) - { - cachedAccessToken = token.AccessToken; - cachedAccessTokenExpiresAt = token.ExpiresAtUtc; - return cachedAccessToken; - } - } - - private static (string Scope, string CacheKey) BuildScopeAndCacheKey(StellaOpsCliOptions options) - { - var baseScope = AuthorityTokenUtilities.ResolveScope(options); - var finalScope = EnsureScope(baseScope, StellaOpsScopes.VulnRead); - - var credential = !string.IsNullOrWhiteSpace(options.Authority.Username) - ? $"user:{options.Authority.Username}" - : $"client:{options.Authority.ClientId}"; - - var cacheKey = $"{options.Authority.Url}|{credential}|{finalScope}"; - return (finalScope, cacheKey); - } - - private static string EnsureScope(string scopes, string required) - { - if (string.IsNullOrWhiteSpace(scopes)) - { - return required; - } - - var parts = scopes - .Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) - .Select(static scope => scope.ToLowerInvariant()) - .Distinct(StringComparer.Ordinal) - .ToList(); - - if (!parts.Contains(required, StringComparer.Ordinal)) - { - parts.Add(required); - } - - return string.Join(' ', parts); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Text.Json; +using System.Globalization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.Client; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Services.Models; + +namespace StellaOps.Cli.Services; + +internal sealed class ConcelierObservationsClient : IConcelierObservationsClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + private static readonly TimeSpan TokenRefreshSkew = TimeSpan.FromSeconds(30); + + private readonly HttpClient httpClient; + private readonly StellaOpsCliOptions options; + private readonly ILogger logger; + private readonly IStellaOpsTokenClient? tokenClient; + private readonly object tokenSync = new(); + + private string? cachedAccessToken; + private DateTimeOffset cachedAccessTokenExpiresAt = DateTimeOffset.MinValue; + + public ConcelierObservationsClient( + HttpClient httpClient, + StellaOpsCliOptions options, + ILogger logger, + IStellaOpsTokenClient? tokenClient = null) + { + this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + this.options = options ?? throw new ArgumentNullException(nameof(options)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + this.tokenClient = tokenClient; + + if (!string.IsNullOrWhiteSpace(options.ConcelierUrl) && httpClient.BaseAddress is null) + { + if (Uri.TryCreate(options.ConcelierUrl, UriKind.Absolute, out var baseUri)) + { + httpClient.BaseAddress = baseUri; + } + } + } + + public async Task GetObservationsAsync( + AdvisoryObservationsQuery query, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(query); + + EnsureConfigured(); + + var requestUri = BuildRequestUri(query); + using var request = new HttpRequestMessage(HttpMethod.Get, requestUri); + await AuthorizeRequestAsync(request, cancellationToken).ConfigureAwait(false); + + using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + logger.LogError( + "Failed to query observations (status {StatusCode}). Response: {Payload}", + (int)response.StatusCode, + string.IsNullOrWhiteSpace(payload) ? "" : payload); + + response.EnsureSuccessStatusCode(); + } + + await using var stream = await response.Content.ReadAsStreamAsync(cancellationToken).ConfigureAwait(false); + var result = await JsonSerializer + .DeserializeAsync(stream, SerializerOptions, cancellationToken) + .ConfigureAwait(false); + + return result ?? new AdvisoryObservationsResponse(); + } + + private static string BuildRequestUri(AdvisoryObservationsQuery query) + { + var builder = new StringBuilder("/concelier/observations?tenant="); + builder.Append(Uri.EscapeDataString(query.Tenant)); + + AppendValues(builder, "observationId", query.ObservationIds); + AppendValues(builder, "alias", query.Aliases); + AppendValues(builder, "purl", query.Purls); + AppendValues(builder, "cpe", query.Cpes); + + if (query.Limit.HasValue && query.Limit.Value > 0) + { + builder.Append('&'); + builder.Append("limit="); + builder.Append(query.Limit.Value.ToString(CultureInfo.InvariantCulture)); + } + + if (!string.IsNullOrWhiteSpace(query.Cursor)) + { + builder.Append('&'); + builder.Append("cursor="); + builder.Append(Uri.EscapeDataString(query.Cursor)); + } + + return builder.ToString(); + + static void AppendValues(StringBuilder builder, string name, IReadOnlyList values) + { + if (values is null || values.Count == 0) + { + return; + } + + foreach (var value in values) + { + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + builder.Append('&'); + builder.Append(name); + builder.Append('='); + builder.Append(Uri.EscapeDataString(value)); + } + } + } + + private void EnsureConfigured() + { + if (!string.IsNullOrWhiteSpace(options.ConcelierUrl)) + { + return; + } + + throw new InvalidOperationException( + "ConcelierUrl is not configured. Set StellaOps:ConcelierUrl or STELLAOPS_CONCELIER_URL."); + } + + private async Task AuthorizeRequestAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + var token = await ResolveAccessTokenAsync(cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(token)) + { + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", token); + } + } + + private async Task ResolveAccessTokenAsync(CancellationToken cancellationToken) + { + if (!string.IsNullOrWhiteSpace(options.ApiKey)) + { + return options.ApiKey; + } + + if (tokenClient is null || string.IsNullOrWhiteSpace(options.Authority.Url)) + { + return null; + } + + var now = DateTimeOffset.UtcNow; + + lock (tokenSync) + { + if (!string.IsNullOrEmpty(cachedAccessToken) && now < cachedAccessTokenExpiresAt - TokenRefreshSkew) + { + return cachedAccessToken; + } + } + + var (scope, cacheKey) = BuildScopeAndCacheKey(options); + var cachedEntry = await tokenClient.GetCachedTokenAsync(cacheKey, cancellationToken).ConfigureAwait(false); + if (cachedEntry is not null && now < cachedEntry.ExpiresAtUtc - TokenRefreshSkew) + { + lock (tokenSync) + { + cachedAccessToken = cachedEntry.AccessToken; + cachedAccessTokenExpiresAt = cachedEntry.ExpiresAtUtc; + return cachedAccessToken; + } + } + + StellaOpsTokenResult token; + if (!string.IsNullOrWhiteSpace(options.Authority.Username)) + { + if (string.IsNullOrWhiteSpace(options.Authority.Password)) + { + throw new InvalidOperationException("Authority password must be configured when username is provided."); + } + + token = await tokenClient.RequestPasswordTokenAsync( + options.Authority.Username, + options.Authority.Password!, + scope, + null, + cancellationToken).ConfigureAwait(false); + } + else + { + token = await tokenClient.RequestClientCredentialsTokenAsync(scope, null, cancellationToken).ConfigureAwait(false); + } + + await tokenClient.CacheTokenAsync(cacheKey, token.ToCacheEntry(), cancellationToken).ConfigureAwait(false); + + lock (tokenSync) + { + cachedAccessToken = token.AccessToken; + cachedAccessTokenExpiresAt = token.ExpiresAtUtc; + return cachedAccessToken; + } + } + + private static (string Scope, string CacheKey) BuildScopeAndCacheKey(StellaOpsCliOptions options) + { + var baseScope = AuthorityTokenUtilities.ResolveScope(options); + var finalScope = EnsureScope(baseScope, StellaOpsScopes.VulnRead); + + var credential = !string.IsNullOrWhiteSpace(options.Authority.Username) + ? $"user:{options.Authority.Username}" + : $"client:{options.Authority.ClientId}"; + + var cacheKey = $"{options.Authority.Url}|{credential}|{finalScope}"; + return (finalScope, cacheKey); + } + + private static string EnsureScope(string scopes, string required) + { + if (string.IsNullOrWhiteSpace(scopes)) + { + return required; + } + + var parts = scopes + .Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Select(static scope => scope.ToLowerInvariant()) + .Distinct(StringComparer.Ordinal) + .ToList(); + + if (!parts.Contains(required, StringComparer.Ordinal)) + { + parts.Add(required); + } + + return string.Join(' ', parts); + } +} diff --git a/src/StellaOps.Cli/Services/IAuthorityRevocationClient.cs b/src/Cli/StellaOps.Cli/Services/IAuthorityRevocationClient.cs similarity index 100% rename from src/StellaOps.Cli/Services/IAuthorityRevocationClient.cs rename to src/Cli/StellaOps.Cli/Services/IAuthorityRevocationClient.cs diff --git a/src/StellaOps.Cli/Services/IBackendOperationsClient.cs b/src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs similarity index 100% rename from src/StellaOps.Cli/Services/IBackendOperationsClient.cs rename to src/Cli/StellaOps.Cli/Services/IBackendOperationsClient.cs diff --git a/src/StellaOps.Cli/Services/IConcelierObservationsClient.cs b/src/Cli/StellaOps.Cli/Services/IConcelierObservationsClient.cs similarity index 96% rename from src/StellaOps.Cli/Services/IConcelierObservationsClient.cs rename to src/Cli/StellaOps.Cli/Services/IConcelierObservationsClient.cs index 2e3d8ec8..446685ae 100644 --- a/src/StellaOps.Cli/Services/IConcelierObservationsClient.cs +++ b/src/Cli/StellaOps.Cli/Services/IConcelierObservationsClient.cs @@ -1,12 +1,12 @@ -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Cli.Services.Models; - -namespace StellaOps.Cli.Services; - -internal interface IConcelierObservationsClient -{ - Task GetObservationsAsync( - AdvisoryObservationsQuery query, - CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Cli.Services.Models; + +namespace StellaOps.Cli.Services; + +internal interface IConcelierObservationsClient +{ + Task GetObservationsAsync( + AdvisoryObservationsQuery query, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Cli/Services/IScannerExecutor.cs b/src/Cli/StellaOps.Cli/Services/IScannerExecutor.cs similarity index 100% rename from src/StellaOps.Cli/Services/IScannerExecutor.cs rename to src/Cli/StellaOps.Cli/Services/IScannerExecutor.cs diff --git a/src/StellaOps.Cli/Services/IScannerInstaller.cs b/src/Cli/StellaOps.Cli/Services/IScannerInstaller.cs similarity index 100% rename from src/StellaOps.Cli/Services/IScannerInstaller.cs rename to src/Cli/StellaOps.Cli/Services/IScannerInstaller.cs diff --git a/src/StellaOps.Cli/Services/Models/AdvisoryObservationsModels.cs b/src/Cli/StellaOps.Cli/Services/Models/AdvisoryObservationsModels.cs similarity index 96% rename from src/StellaOps.Cli/Services/Models/AdvisoryObservationsModels.cs rename to src/Cli/StellaOps.Cli/Services/Models/AdvisoryObservationsModels.cs index 0af503fc..7ad4bb0c 100644 --- a/src/StellaOps.Cli/Services/Models/AdvisoryObservationsModels.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/AdvisoryObservationsModels.cs @@ -1,117 +1,117 @@ -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace StellaOps.Cli.Services.Models; - -internal sealed record AdvisoryObservationsQuery( - string Tenant, - IReadOnlyList ObservationIds, - IReadOnlyList Aliases, - IReadOnlyList Purls, - IReadOnlyList Cpes, - int? Limit, - string? Cursor); - -internal sealed class AdvisoryObservationsResponse -{ - [JsonPropertyName("observations")] - public IReadOnlyList Observations { get; init; } = - Array.Empty(); - - [JsonPropertyName("linkset")] - public AdvisoryObservationLinksetAggregate Linkset { get; init; } = - new(); - - [JsonPropertyName("nextCursor")] - public string? NextCursor { get; init; } - - [JsonPropertyName("hasMore")] - public bool HasMore { get; init; } -} - -internal sealed class AdvisoryObservationDocument -{ - [JsonPropertyName("observationId")] - public string ObservationId { get; init; } = string.Empty; - - [JsonPropertyName("tenant")] - public string Tenant { get; init; } = string.Empty; - - [JsonPropertyName("source")] - public AdvisoryObservationSource Source { get; init; } = new(); - - [JsonPropertyName("upstream")] - public AdvisoryObservationUpstream Upstream { get; init; } = new(); - - [JsonPropertyName("linkset")] - public AdvisoryObservationLinkset Linkset { get; init; } = new(); - - [JsonPropertyName("createdAt")] - public DateTimeOffset CreatedAt { get; init; } -} - -internal sealed class AdvisoryObservationSource -{ - [JsonPropertyName("vendor")] - public string Vendor { get; init; } = string.Empty; - - [JsonPropertyName("stream")] - public string Stream { get; init; } = string.Empty; - - [JsonPropertyName("api")] - public string Api { get; init; } = string.Empty; - - [JsonPropertyName("collectorVersion")] - public string? CollectorVersion { get; init; } -} - -internal sealed class AdvisoryObservationUpstream -{ - [JsonPropertyName("upstreamId")] - public string UpstreamId { get; init; } = string.Empty; - - [JsonPropertyName("documentVersion")] - public string? DocumentVersion { get; init; } -} - -internal sealed class AdvisoryObservationLinkset -{ - [JsonPropertyName("aliases")] - public IReadOnlyList Aliases { get; init; } = Array.Empty(); - - [JsonPropertyName("purls")] - public IReadOnlyList Purls { get; init; } = Array.Empty(); - - [JsonPropertyName("cpes")] - public IReadOnlyList Cpes { get; init; } = Array.Empty(); - - [JsonPropertyName("references")] - public IReadOnlyList References { get; init; } = - Array.Empty(); -} - -internal sealed class AdvisoryObservationReference -{ - [JsonPropertyName("type")] - public string Type { get; init; } = string.Empty; - - [JsonPropertyName("url")] - public string Url { get; init; } = string.Empty; -} - -internal sealed class AdvisoryObservationLinksetAggregate -{ - [JsonPropertyName("aliases")] - public IReadOnlyList Aliases { get; init; } = Array.Empty(); - - [JsonPropertyName("purls")] - public IReadOnlyList Purls { get; init; } = Array.Empty(); - - [JsonPropertyName("cpes")] - public IReadOnlyList Cpes { get; init; } = Array.Empty(); - - [JsonPropertyName("references")] - public IReadOnlyList References { get; init; } = - Array.Empty(); -} +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Cli.Services.Models; + +internal sealed record AdvisoryObservationsQuery( + string Tenant, + IReadOnlyList ObservationIds, + IReadOnlyList Aliases, + IReadOnlyList Purls, + IReadOnlyList Cpes, + int? Limit, + string? Cursor); + +internal sealed class AdvisoryObservationsResponse +{ + [JsonPropertyName("observations")] + public IReadOnlyList Observations { get; init; } = + Array.Empty(); + + [JsonPropertyName("linkset")] + public AdvisoryObservationLinksetAggregate Linkset { get; init; } = + new(); + + [JsonPropertyName("nextCursor")] + public string? NextCursor { get; init; } + + [JsonPropertyName("hasMore")] + public bool HasMore { get; init; } +} + +internal sealed class AdvisoryObservationDocument +{ + [JsonPropertyName("observationId")] + public string ObservationId { get; init; } = string.Empty; + + [JsonPropertyName("tenant")] + public string Tenant { get; init; } = string.Empty; + + [JsonPropertyName("source")] + public AdvisoryObservationSource Source { get; init; } = new(); + + [JsonPropertyName("upstream")] + public AdvisoryObservationUpstream Upstream { get; init; } = new(); + + [JsonPropertyName("linkset")] + public AdvisoryObservationLinkset Linkset { get; init; } = new(); + + [JsonPropertyName("createdAt")] + public DateTimeOffset CreatedAt { get; init; } +} + +internal sealed class AdvisoryObservationSource +{ + [JsonPropertyName("vendor")] + public string Vendor { get; init; } = string.Empty; + + [JsonPropertyName("stream")] + public string Stream { get; init; } = string.Empty; + + [JsonPropertyName("api")] + public string Api { get; init; } = string.Empty; + + [JsonPropertyName("collectorVersion")] + public string? CollectorVersion { get; init; } +} + +internal sealed class AdvisoryObservationUpstream +{ + [JsonPropertyName("upstreamId")] + public string UpstreamId { get; init; } = string.Empty; + + [JsonPropertyName("documentVersion")] + public string? DocumentVersion { get; init; } +} + +internal sealed class AdvisoryObservationLinkset +{ + [JsonPropertyName("aliases")] + public IReadOnlyList Aliases { get; init; } = Array.Empty(); + + [JsonPropertyName("purls")] + public IReadOnlyList Purls { get; init; } = Array.Empty(); + + [JsonPropertyName("cpes")] + public IReadOnlyList Cpes { get; init; } = Array.Empty(); + + [JsonPropertyName("references")] + public IReadOnlyList References { get; init; } = + Array.Empty(); +} + +internal sealed class AdvisoryObservationReference +{ + [JsonPropertyName("type")] + public string Type { get; init; } = string.Empty; + + [JsonPropertyName("url")] + public string Url { get; init; } = string.Empty; +} + +internal sealed class AdvisoryObservationLinksetAggregate +{ + [JsonPropertyName("aliases")] + public IReadOnlyList Aliases { get; init; } = Array.Empty(); + + [JsonPropertyName("purls")] + public IReadOnlyList Purls { get; init; } = Array.Empty(); + + [JsonPropertyName("cpes")] + public IReadOnlyList Cpes { get; init; } = Array.Empty(); + + [JsonPropertyName("references")] + public IReadOnlyList References { get; init; } = + Array.Empty(); +} diff --git a/src/StellaOps.Cli/Services/Models/AocIngestDryRunModels.cs b/src/Cli/StellaOps.Cli/Services/Models/AocIngestDryRunModels.cs similarity index 96% rename from src/StellaOps.Cli/Services/Models/AocIngestDryRunModels.cs rename to src/Cli/StellaOps.Cli/Services/Models/AocIngestDryRunModels.cs index 55fe3eef..24fbebf9 100644 --- a/src/StellaOps.Cli/Services/Models/AocIngestDryRunModels.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/AocIngestDryRunModels.cs @@ -1,93 +1,93 @@ -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace StellaOps.Cli.Services.Models; - -internal sealed class AocIngestDryRunRequest -{ - [JsonPropertyName("tenant")] - public string Tenant { get; init; } = string.Empty; - - [JsonPropertyName("source")] - public string Source { get; init; } = string.Empty; - - [JsonPropertyName("document")] - public AocIngestDryRunDocument Document { get; init; } = new(); -} - -internal sealed class AocIngestDryRunDocument -{ - [JsonPropertyName("name")] - public string? Name { get; init; } - - [JsonPropertyName("content")] - public string Content { get; init; } = string.Empty; - - [JsonPropertyName("contentType")] - public string ContentType { get; init; } = "application/json"; - - [JsonPropertyName("contentEncoding")] - public string? ContentEncoding { get; init; } -} - -internal sealed class AocIngestDryRunResponse -{ - [JsonPropertyName("source")] - public string? Source { get; init; } - - [JsonPropertyName("tenant")] - public string? Tenant { get; init; } - - [JsonPropertyName("guardVersion")] - public string? GuardVersion { get; init; } - - [JsonPropertyName("status")] - public string? Status { get; init; } - - [JsonPropertyName("document")] - public AocIngestDryRunDocumentResult Document { get; init; } = new(); - - [JsonPropertyName("violations")] - public IReadOnlyList Violations { get; init; } = - Array.Empty(); -} - -internal sealed class AocIngestDryRunDocumentResult -{ - [JsonPropertyName("contentHash")] - public string? ContentHash { get; init; } - - [JsonPropertyName("supersedes")] - public string? Supersedes { get; init; } - - [JsonPropertyName("provenance")] - public AocIngestDryRunProvenance Provenance { get; init; } = new(); -} - -internal sealed class AocIngestDryRunProvenance -{ - [JsonPropertyName("signature")] - public AocIngestDryRunSignature Signature { get; init; } = new(); -} - -internal sealed class AocIngestDryRunSignature -{ - [JsonPropertyName("format")] - public string? Format { get; init; } - - [JsonPropertyName("present")] - public bool Present { get; init; } -} - -internal sealed class AocIngestDryRunViolation -{ - [JsonPropertyName("code")] - public string Code { get; init; } = string.Empty; - - [JsonPropertyName("message")] - public string Message { get; init; } = string.Empty; - - [JsonPropertyName("path")] - public string? Path { get; init; } -} +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Cli.Services.Models; + +internal sealed class AocIngestDryRunRequest +{ + [JsonPropertyName("tenant")] + public string Tenant { get; init; } = string.Empty; + + [JsonPropertyName("source")] + public string Source { get; init; } = string.Empty; + + [JsonPropertyName("document")] + public AocIngestDryRunDocument Document { get; init; } = new(); +} + +internal sealed class AocIngestDryRunDocument +{ + [JsonPropertyName("name")] + public string? Name { get; init; } + + [JsonPropertyName("content")] + public string Content { get; init; } = string.Empty; + + [JsonPropertyName("contentType")] + public string ContentType { get; init; } = "application/json"; + + [JsonPropertyName("contentEncoding")] + public string? ContentEncoding { get; init; } +} + +internal sealed class AocIngestDryRunResponse +{ + [JsonPropertyName("source")] + public string? Source { get; init; } + + [JsonPropertyName("tenant")] + public string? Tenant { get; init; } + + [JsonPropertyName("guardVersion")] + public string? GuardVersion { get; init; } + + [JsonPropertyName("status")] + public string? Status { get; init; } + + [JsonPropertyName("document")] + public AocIngestDryRunDocumentResult Document { get; init; } = new(); + + [JsonPropertyName("violations")] + public IReadOnlyList Violations { get; init; } = + Array.Empty(); +} + +internal sealed class AocIngestDryRunDocumentResult +{ + [JsonPropertyName("contentHash")] + public string? ContentHash { get; init; } + + [JsonPropertyName("supersedes")] + public string? Supersedes { get; init; } + + [JsonPropertyName("provenance")] + public AocIngestDryRunProvenance Provenance { get; init; } = new(); +} + +internal sealed class AocIngestDryRunProvenance +{ + [JsonPropertyName("signature")] + public AocIngestDryRunSignature Signature { get; init; } = new(); +} + +internal sealed class AocIngestDryRunSignature +{ + [JsonPropertyName("format")] + public string? Format { get; init; } + + [JsonPropertyName("present")] + public bool Present { get; init; } +} + +internal sealed class AocIngestDryRunViolation +{ + [JsonPropertyName("code")] + public string Code { get; init; } = string.Empty; + + [JsonPropertyName("message")] + public string Message { get; init; } = string.Empty; + + [JsonPropertyName("path")] + public string? Path { get; init; } +} diff --git a/src/StellaOps.Cli/Services/Models/AocVerifyModels.cs b/src/Cli/StellaOps.Cli/Services/Models/AocVerifyModels.cs similarity index 96% rename from src/StellaOps.Cli/Services/Models/AocVerifyModels.cs rename to src/Cli/StellaOps.Cli/Services/Models/AocVerifyModels.cs index baf28ae6..60fefc3e 100644 --- a/src/StellaOps.Cli/Services/Models/AocVerifyModels.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/AocVerifyModels.cs @@ -1,100 +1,100 @@ -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace StellaOps.Cli.Services.Models; - -internal sealed class AocVerifyRequest -{ - [JsonPropertyName("tenant")] - public string Tenant { get; init; } = string.Empty; - - [JsonPropertyName("since")] - public string? Since { get; init; } - - [JsonPropertyName("limit")] - public int? Limit { get; init; } - - [JsonPropertyName("sources")] - public IReadOnlyList? Sources { get; init; } - - [JsonPropertyName("codes")] - public IReadOnlyList? Codes { get; init; } -} - -internal sealed class AocVerifyResponse -{ - [JsonPropertyName("tenant")] - public string? Tenant { get; init; } - - [JsonPropertyName("window")] - public AocVerifyWindow Window { get; init; } = new(); - - [JsonPropertyName("checked")] - public AocVerifyChecked Checked { get; init; } = new(); - - [JsonPropertyName("violations")] - public IReadOnlyList Violations { get; init; } = - Array.Empty(); - - [JsonPropertyName("metrics")] - public AocVerifyMetrics Metrics { get; init; } = new(); - - [JsonPropertyName("truncated")] - public bool? Truncated { get; init; } -} - -internal sealed class AocVerifyWindow -{ - [JsonPropertyName("from")] - public DateTimeOffset? From { get; init; } - - [JsonPropertyName("to")] - public DateTimeOffset? To { get; init; } -} - -internal sealed class AocVerifyChecked -{ - [JsonPropertyName("advisories")] - public int Advisories { get; init; } - - [JsonPropertyName("vex")] - public int Vex { get; init; } -} - -internal sealed class AocVerifyViolation -{ - [JsonPropertyName("code")] - public string Code { get; init; } = string.Empty; - - [JsonPropertyName("count")] - public int Count { get; init; } - - [JsonPropertyName("examples")] - public IReadOnlyList Examples { get; init; } = - Array.Empty(); -} - -internal sealed class AocVerifyViolationExample -{ - [JsonPropertyName("source")] - public string? Source { get; init; } - - [JsonPropertyName("documentId")] - public string? DocumentId { get; init; } - - [JsonPropertyName("contentHash")] - public string? ContentHash { get; init; } - - [JsonPropertyName("path")] - public string? Path { get; init; } -} - -internal sealed class AocVerifyMetrics -{ - [JsonPropertyName("ingestion_write_total")] - public int? IngestionWriteTotal { get; init; } - - [JsonPropertyName("aoc_violation_total")] - public int? AocViolationTotal { get; init; } -} +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Cli.Services.Models; + +internal sealed class AocVerifyRequest +{ + [JsonPropertyName("tenant")] + public string Tenant { get; init; } = string.Empty; + + [JsonPropertyName("since")] + public string? Since { get; init; } + + [JsonPropertyName("limit")] + public int? Limit { get; init; } + + [JsonPropertyName("sources")] + public IReadOnlyList? Sources { get; init; } + + [JsonPropertyName("codes")] + public IReadOnlyList? Codes { get; init; } +} + +internal sealed class AocVerifyResponse +{ + [JsonPropertyName("tenant")] + public string? Tenant { get; init; } + + [JsonPropertyName("window")] + public AocVerifyWindow Window { get; init; } = new(); + + [JsonPropertyName("checked")] + public AocVerifyChecked Checked { get; init; } = new(); + + [JsonPropertyName("violations")] + public IReadOnlyList Violations { get; init; } = + Array.Empty(); + + [JsonPropertyName("metrics")] + public AocVerifyMetrics Metrics { get; init; } = new(); + + [JsonPropertyName("truncated")] + public bool? Truncated { get; init; } +} + +internal sealed class AocVerifyWindow +{ + [JsonPropertyName("from")] + public DateTimeOffset? From { get; init; } + + [JsonPropertyName("to")] + public DateTimeOffset? To { get; init; } +} + +internal sealed class AocVerifyChecked +{ + [JsonPropertyName("advisories")] + public int Advisories { get; init; } + + [JsonPropertyName("vex")] + public int Vex { get; init; } +} + +internal sealed class AocVerifyViolation +{ + [JsonPropertyName("code")] + public string Code { get; init; } = string.Empty; + + [JsonPropertyName("count")] + public int Count { get; init; } + + [JsonPropertyName("examples")] + public IReadOnlyList Examples { get; init; } = + Array.Empty(); +} + +internal sealed class AocVerifyViolationExample +{ + [JsonPropertyName("source")] + public string? Source { get; init; } + + [JsonPropertyName("documentId")] + public string? DocumentId { get; init; } + + [JsonPropertyName("contentHash")] + public string? ContentHash { get; init; } + + [JsonPropertyName("path")] + public string? Path { get; init; } +} + +internal sealed class AocVerifyMetrics +{ + [JsonPropertyName("ingestion_write_total")] + public int? IngestionWriteTotal { get; init; } + + [JsonPropertyName("aoc_violation_total")] + public int? AocViolationTotal { get; init; } +} diff --git a/src/StellaOps.Cli/Services/Models/AuthorityRevocationExportResult.cs b/src/Cli/StellaOps.Cli/Services/Models/AuthorityRevocationExportResult.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/AuthorityRevocationExportResult.cs rename to src/Cli/StellaOps.Cli/Services/Models/AuthorityRevocationExportResult.cs diff --git a/src/StellaOps.Cli/Services/Models/ExcititorExportDownloadResult.cs b/src/Cli/StellaOps.Cli/Services/Models/ExcititorExportDownloadResult.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/ExcititorExportDownloadResult.cs rename to src/Cli/StellaOps.Cli/Services/Models/ExcititorExportDownloadResult.cs diff --git a/src/StellaOps.Cli/Services/Models/ExcititorOperationResult.cs b/src/Cli/StellaOps.Cli/Services/Models/ExcititorOperationResult.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/ExcititorOperationResult.cs rename to src/Cli/StellaOps.Cli/Services/Models/ExcititorOperationResult.cs diff --git a/src/StellaOps.Cli/Services/Models/ExcititorProviderSummary.cs b/src/Cli/StellaOps.Cli/Services/Models/ExcititorProviderSummary.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/ExcititorProviderSummary.cs rename to src/Cli/StellaOps.Cli/Services/Models/ExcititorProviderSummary.cs diff --git a/src/StellaOps.Cli/Services/Models/JobTriggerResult.cs b/src/Cli/StellaOps.Cli/Services/Models/JobTriggerResult.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/JobTriggerResult.cs rename to src/Cli/StellaOps.Cli/Services/Models/JobTriggerResult.cs diff --git a/src/StellaOps.Cli/Services/Models/OfflineKitModels.cs b/src/Cli/StellaOps.Cli/Services/Models/OfflineKitModels.cs similarity index 96% rename from src/StellaOps.Cli/Services/Models/OfflineKitModels.cs rename to src/Cli/StellaOps.Cli/Services/Models/OfflineKitModels.cs index da408685..a19d091d 100644 --- a/src/StellaOps.Cli/Services/Models/OfflineKitModels.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/OfflineKitModels.cs @@ -1,111 +1,111 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Cli.Services.Models; - -internal sealed record OfflineKitBundleDescriptor( - string BundleId, - string BundleName, - string BundleSha256, - long BundleSize, - Uri BundleDownloadUri, - string ManifestName, - string ManifestSha256, - Uri ManifestDownloadUri, - DateTimeOffset CapturedAt, - string? Channel, - string? Kind, - bool IsDelta, - string? BaseBundleId, - string? BundleSignatureName, - Uri? BundleSignatureDownloadUri, - string? ManifestSignatureName, - Uri? ManifestSignatureDownloadUri, - long? ManifestSize); - -internal sealed record OfflineKitDownloadResult( - OfflineKitBundleDescriptor Descriptor, - string BundlePath, - string ManifestPath, - string? BundleSignaturePath, - string? ManifestSignaturePath, - string MetadataPath, - bool FromCache); - -internal sealed record OfflineKitImportRequest( - string BundlePath, - string? ManifestPath, - string? BundleSignaturePath, - string? ManifestSignaturePath, - string? BundleId, - string? BundleSha256, - long? BundleSize, - DateTimeOffset? CapturedAt, - string? Channel, - string? Kind, - bool? IsDelta, - string? BaseBundleId, - string? ManifestSha256, - long? ManifestSize); - -internal sealed record OfflineKitImportResult( - string? ImportId, - string? Status, - DateTimeOffset SubmittedAt, - string? Message); - -internal sealed record OfflineKitStatus( - string? BundleId, - string? Channel, - string? Kind, - bool IsDelta, - string? BaseBundleId, - DateTimeOffset? CapturedAt, - DateTimeOffset? ImportedAt, - string? BundleSha256, - long? BundleSize, - IReadOnlyList Components); - -internal sealed record OfflineKitComponentStatus( - string Name, - string? Version, - string? Digest, - DateTimeOffset? CapturedAt, - long? SizeBytes); - -internal sealed record OfflineKitMetadataDocument -{ - public string? BundleId { get; init; } - - public string BundleName { get; init; } = string.Empty; - - public string BundleSha256 { get; init; } = string.Empty; - - public long BundleSize { get; init; } - - public string BundlePath { get; init; } = string.Empty; - - public DateTimeOffset CapturedAt { get; init; } - - public DateTimeOffset DownloadedAt { get; init; } - - public string? Channel { get; init; } - - public string? Kind { get; init; } - - public bool IsDelta { get; init; } - - public string? BaseBundleId { get; init; } - - public string ManifestName { get; init; } = string.Empty; - - public string ManifestSha256 { get; init; } = string.Empty; - - public long? ManifestSize { get; init; } - - public string ManifestPath { get; init; } = string.Empty; - - public string? BundleSignaturePath { get; init; } - - public string? ManifestSignaturePath { get; init; } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Cli.Services.Models; + +internal sealed record OfflineKitBundleDescriptor( + string BundleId, + string BundleName, + string BundleSha256, + long BundleSize, + Uri BundleDownloadUri, + string ManifestName, + string ManifestSha256, + Uri ManifestDownloadUri, + DateTimeOffset CapturedAt, + string? Channel, + string? Kind, + bool IsDelta, + string? BaseBundleId, + string? BundleSignatureName, + Uri? BundleSignatureDownloadUri, + string? ManifestSignatureName, + Uri? ManifestSignatureDownloadUri, + long? ManifestSize); + +internal sealed record OfflineKitDownloadResult( + OfflineKitBundleDescriptor Descriptor, + string BundlePath, + string ManifestPath, + string? BundleSignaturePath, + string? ManifestSignaturePath, + string MetadataPath, + bool FromCache); + +internal sealed record OfflineKitImportRequest( + string BundlePath, + string? ManifestPath, + string? BundleSignaturePath, + string? ManifestSignaturePath, + string? BundleId, + string? BundleSha256, + long? BundleSize, + DateTimeOffset? CapturedAt, + string? Channel, + string? Kind, + bool? IsDelta, + string? BaseBundleId, + string? ManifestSha256, + long? ManifestSize); + +internal sealed record OfflineKitImportResult( + string? ImportId, + string? Status, + DateTimeOffset SubmittedAt, + string? Message); + +internal sealed record OfflineKitStatus( + string? BundleId, + string? Channel, + string? Kind, + bool IsDelta, + string? BaseBundleId, + DateTimeOffset? CapturedAt, + DateTimeOffset? ImportedAt, + string? BundleSha256, + long? BundleSize, + IReadOnlyList Components); + +internal sealed record OfflineKitComponentStatus( + string Name, + string? Version, + string? Digest, + DateTimeOffset? CapturedAt, + long? SizeBytes); + +internal sealed record OfflineKitMetadataDocument +{ + public string? BundleId { get; init; } + + public string BundleName { get; init; } = string.Empty; + + public string BundleSha256 { get; init; } = string.Empty; + + public long BundleSize { get; init; } + + public string BundlePath { get; init; } = string.Empty; + + public DateTimeOffset CapturedAt { get; init; } + + public DateTimeOffset DownloadedAt { get; init; } + + public string? Channel { get; init; } + + public string? Kind { get; init; } + + public bool IsDelta { get; init; } + + public string? BaseBundleId { get; init; } + + public string ManifestName { get; init; } = string.Empty; + + public string ManifestSha256 { get; init; } = string.Empty; + + public long? ManifestSize { get; init; } + + public string ManifestPath { get; init; } = string.Empty; + + public string? BundleSignaturePath { get; init; } + + public string? ManifestSignaturePath { get; init; } +} diff --git a/src/StellaOps.Cli/Services/Models/PolicyActivationModels.cs b/src/Cli/StellaOps.Cli/Services/Models/PolicyActivationModels.cs similarity index 96% rename from src/StellaOps.Cli/Services/Models/PolicyActivationModels.cs rename to src/Cli/StellaOps.Cli/Services/Models/PolicyActivationModels.cs index 4eb954a3..186097d7 100644 --- a/src/StellaOps.Cli/Services/Models/PolicyActivationModels.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/PolicyActivationModels.cs @@ -1,30 +1,30 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Cli.Services.Models; - -internal sealed record PolicyActivationRequest( - bool RunNow, - DateTimeOffset? ScheduledAt, - string? Priority, - bool Rollback, - string? IncidentId, - string? Comment); - -internal sealed record PolicyActivationResult( - string Status, - PolicyActivationRevision Revision); - -internal sealed record PolicyActivationRevision( - string PolicyId, - int Version, - string Status, - bool RequiresTwoPersonApproval, - DateTimeOffset CreatedAt, - DateTimeOffset? ActivatedAt, - IReadOnlyList Approvals); - -internal sealed record PolicyActivationApproval( - string ActorId, - DateTimeOffset ApprovedAt, - string? Comment); +using System; +using System.Collections.Generic; + +namespace StellaOps.Cli.Services.Models; + +internal sealed record PolicyActivationRequest( + bool RunNow, + DateTimeOffset? ScheduledAt, + string? Priority, + bool Rollback, + string? IncidentId, + string? Comment); + +internal sealed record PolicyActivationResult( + string Status, + PolicyActivationRevision Revision); + +internal sealed record PolicyActivationRevision( + string PolicyId, + int Version, + string Status, + bool RequiresTwoPersonApproval, + DateTimeOffset CreatedAt, + DateTimeOffset? ActivatedAt, + IReadOnlyList Approvals); + +internal sealed record PolicyActivationApproval( + string ActorId, + DateTimeOffset ApprovedAt, + string? Comment); diff --git a/src/StellaOps.Cli/Services/Models/PolicyFindingsModels.cs b/src/Cli/StellaOps.Cli/Services/Models/PolicyFindingsModels.cs similarity index 96% rename from src/StellaOps.Cli/Services/Models/PolicyFindingsModels.cs rename to src/Cli/StellaOps.Cli/Services/Models/PolicyFindingsModels.cs index a75edc0f..627a32ba 100644 --- a/src/StellaOps.Cli/Services/Models/PolicyFindingsModels.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/PolicyFindingsModels.cs @@ -1,50 +1,50 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Cli.Services.Models; - -internal sealed record PolicyFindingsQuery( - string PolicyId, - IReadOnlyList SbomIds, - IReadOnlyList Statuses, - IReadOnlyList Severities, - string? Cursor, - int? Page, - int? PageSize, - DateTimeOffset? Since); - -internal sealed record PolicyFindingsPage( - IReadOnlyList Items, - string? NextCursor, - int? TotalCount); - -internal sealed record PolicyFindingDocument( - string FindingId, - string Status, - PolicyFindingSeverity Severity, - string SbomId, - IReadOnlyList AdvisoryIds, - PolicyFindingVexMetadata? Vex, - int PolicyVersion, - DateTimeOffset UpdatedAt, - string? RunId); - -internal sealed record PolicyFindingSeverity(string Normalized, double? Score); - -internal sealed record PolicyFindingVexMetadata(string? WinningStatementId, string? Source, string? Status); - -internal sealed record PolicyFindingExplainResult( - string FindingId, - int PolicyVersion, - IReadOnlyList Steps, - IReadOnlyList SealedHints); - -internal sealed record PolicyFindingExplainStep( - string Rule, - string? Status, - string? Action, - double? Score, - IReadOnlyDictionary Inputs, - IReadOnlyDictionary? Evidence); - -internal sealed record PolicyFindingExplainHint(string Message); +using System; +using System.Collections.Generic; + +namespace StellaOps.Cli.Services.Models; + +internal sealed record PolicyFindingsQuery( + string PolicyId, + IReadOnlyList SbomIds, + IReadOnlyList Statuses, + IReadOnlyList Severities, + string? Cursor, + int? Page, + int? PageSize, + DateTimeOffset? Since); + +internal sealed record PolicyFindingsPage( + IReadOnlyList Items, + string? NextCursor, + int? TotalCount); + +internal sealed record PolicyFindingDocument( + string FindingId, + string Status, + PolicyFindingSeverity Severity, + string SbomId, + IReadOnlyList AdvisoryIds, + PolicyFindingVexMetadata? Vex, + int PolicyVersion, + DateTimeOffset UpdatedAt, + string? RunId); + +internal sealed record PolicyFindingSeverity(string Normalized, double? Score); + +internal sealed record PolicyFindingVexMetadata(string? WinningStatementId, string? Source, string? Status); + +internal sealed record PolicyFindingExplainResult( + string FindingId, + int PolicyVersion, + IReadOnlyList Steps, + IReadOnlyList SealedHints); + +internal sealed record PolicyFindingExplainStep( + string Rule, + string? Status, + string? Action, + double? Score, + IReadOnlyDictionary Inputs, + IReadOnlyDictionary? Evidence); + +internal sealed record PolicyFindingExplainHint(string Message); diff --git a/src/StellaOps.Cli/Services/Models/PolicySimulationModels.cs b/src/Cli/StellaOps.Cli/Services/Models/PolicySimulationModels.cs similarity index 96% rename from src/StellaOps.Cli/Services/Models/PolicySimulationModels.cs rename to src/Cli/StellaOps.Cli/Services/Models/PolicySimulationModels.cs index 94f5dc82..25701279 100644 --- a/src/StellaOps.Cli/Services/Models/PolicySimulationModels.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/PolicySimulationModels.cs @@ -1,26 +1,26 @@ -using System.Collections.Generic; - -namespace StellaOps.Cli.Services.Models; - -internal sealed record PolicySimulationInput( - int? BaseVersion, - int? CandidateVersion, - IReadOnlyList SbomSet, - IReadOnlyDictionary Environment, - bool Explain); - -internal sealed record PolicySimulationResult( - PolicySimulationDiff Diff, - string? ExplainUri); - -internal sealed record PolicySimulationDiff( - string? SchemaVersion, - int Added, - int Removed, - int Unchanged, - IReadOnlyDictionary BySeverity, - IReadOnlyList RuleHits); - -internal sealed record PolicySimulationSeverityDelta(int? Up, int? Down); - -internal sealed record PolicySimulationRuleDelta(string RuleId, string RuleName, int? Up, int? Down); +using System.Collections.Generic; + +namespace StellaOps.Cli.Services.Models; + +internal sealed record PolicySimulationInput( + int? BaseVersion, + int? CandidateVersion, + IReadOnlyList SbomSet, + IReadOnlyDictionary Environment, + bool Explain); + +internal sealed record PolicySimulationResult( + PolicySimulationDiff Diff, + string? ExplainUri); + +internal sealed record PolicySimulationDiff( + string? SchemaVersion, + int Added, + int Removed, + int Unchanged, + IReadOnlyDictionary BySeverity, + IReadOnlyList RuleHits); + +internal sealed record PolicySimulationSeverityDelta(int? Up, int? Down); + +internal sealed record PolicySimulationRuleDelta(string RuleId, string RuleName, int? Up, int? Down); diff --git a/src/StellaOps.Cli/Services/Models/RuntimePolicyEvaluationModels.cs b/src/Cli/StellaOps.Cli/Services/Models/RuntimePolicyEvaluationModels.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/RuntimePolicyEvaluationModels.cs rename to src/Cli/StellaOps.Cli/Services/Models/RuntimePolicyEvaluationModels.cs diff --git a/src/StellaOps.Cli/Services/Models/ScannerArtifactResult.cs b/src/Cli/StellaOps.Cli/Services/Models/ScannerArtifactResult.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/ScannerArtifactResult.cs rename to src/Cli/StellaOps.Cli/Services/Models/ScannerArtifactResult.cs diff --git a/src/StellaOps.Cli/Services/Models/Transport/JobRunResponse.cs b/src/Cli/StellaOps.Cli/Services/Models/Transport/JobRunResponse.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/Transport/JobRunResponse.cs rename to src/Cli/StellaOps.Cli/Services/Models/Transport/JobRunResponse.cs diff --git a/src/StellaOps.Cli/Services/Models/Transport/JobTriggerRequest.cs b/src/Cli/StellaOps.Cli/Services/Models/Transport/JobTriggerRequest.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/Transport/JobTriggerRequest.cs rename to src/Cli/StellaOps.Cli/Services/Models/Transport/JobTriggerRequest.cs diff --git a/src/StellaOps.Cli/Services/Models/Transport/OfflineKitTransport.cs b/src/Cli/StellaOps.Cli/Services/Models/Transport/OfflineKitTransport.cs similarity index 95% rename from src/StellaOps.Cli/Services/Models/Transport/OfflineKitTransport.cs rename to src/Cli/StellaOps.Cli/Services/Models/Transport/OfflineKitTransport.cs index d6eac33f..00909390 100644 --- a/src/StellaOps.Cli/Services/Models/Transport/OfflineKitTransport.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/Transport/OfflineKitTransport.cs @@ -1,103 +1,103 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Cli.Services.Models.Transport; - -internal sealed class OfflineKitBundleDescriptorTransport -{ - public string? BundleId { get; set; } - - public string? BundleName { get; set; } - - public string? BundleSha256 { get; set; } - - public long BundleSize { get; set; } - - public string? BundleUrl { get; set; } - - public string? BundlePath { get; set; } - - public string? BundleSignatureName { get; set; } - - public string? BundleSignatureUrl { get; set; } - - public string? BundleSignaturePath { get; set; } - - public string? ManifestName { get; set; } - - public string? ManifestSha256 { get; set; } - - public long? ManifestSize { get; set; } - - public string? ManifestUrl { get; set; } - - public string? ManifestPath { get; set; } - - public string? ManifestSignatureName { get; set; } - - public string? ManifestSignatureUrl { get; set; } - - public string? ManifestSignaturePath { get; set; } - - public DateTimeOffset? CapturedAt { get; set; } - - public string? Channel { get; set; } - - public string? Kind { get; set; } - - public bool? IsDelta { get; set; } - - public string? BaseBundleId { get; set; } -} - -internal sealed class OfflineKitStatusBundleTransport -{ - public string? BundleId { get; set; } - - public string? Channel { get; set; } - - public string? Kind { get; set; } - - public bool? IsDelta { get; set; } - - public string? BaseBundleId { get; set; } - - public string? BundleSha256 { get; set; } - - public long? BundleSize { get; set; } - - public DateTimeOffset? CapturedAt { get; set; } - - public DateTimeOffset? ImportedAt { get; set; } -} - -internal sealed class OfflineKitStatusTransport -{ - public OfflineKitStatusBundleTransport? Current { get; set; } - - public List? Components { get; set; } -} - -internal sealed class OfflineKitComponentStatusTransport -{ - public string? Name { get; set; } - - public string? Version { get; set; } - - public string? Digest { get; set; } - - public DateTimeOffset? CapturedAt { get; set; } - - public long? SizeBytes { get; set; } -} - -internal sealed class OfflineKitImportResponseTransport -{ - public string? ImportId { get; set; } - - public string? Status { get; set; } - - public DateTimeOffset? SubmittedAt { get; set; } - - public string? Message { get; set; } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Cli.Services.Models.Transport; + +internal sealed class OfflineKitBundleDescriptorTransport +{ + public string? BundleId { get; set; } + + public string? BundleName { get; set; } + + public string? BundleSha256 { get; set; } + + public long BundleSize { get; set; } + + public string? BundleUrl { get; set; } + + public string? BundlePath { get; set; } + + public string? BundleSignatureName { get; set; } + + public string? BundleSignatureUrl { get; set; } + + public string? BundleSignaturePath { get; set; } + + public string? ManifestName { get; set; } + + public string? ManifestSha256 { get; set; } + + public long? ManifestSize { get; set; } + + public string? ManifestUrl { get; set; } + + public string? ManifestPath { get; set; } + + public string? ManifestSignatureName { get; set; } + + public string? ManifestSignatureUrl { get; set; } + + public string? ManifestSignaturePath { get; set; } + + public DateTimeOffset? CapturedAt { get; set; } + + public string? Channel { get; set; } + + public string? Kind { get; set; } + + public bool? IsDelta { get; set; } + + public string? BaseBundleId { get; set; } +} + +internal sealed class OfflineKitStatusBundleTransport +{ + public string? BundleId { get; set; } + + public string? Channel { get; set; } + + public string? Kind { get; set; } + + public bool? IsDelta { get; set; } + + public string? BaseBundleId { get; set; } + + public string? BundleSha256 { get; set; } + + public long? BundleSize { get; set; } + + public DateTimeOffset? CapturedAt { get; set; } + + public DateTimeOffset? ImportedAt { get; set; } +} + +internal sealed class OfflineKitStatusTransport +{ + public OfflineKitStatusBundleTransport? Current { get; set; } + + public List? Components { get; set; } +} + +internal sealed class OfflineKitComponentStatusTransport +{ + public string? Name { get; set; } + + public string? Version { get; set; } + + public string? Digest { get; set; } + + public DateTimeOffset? CapturedAt { get; set; } + + public long? SizeBytes { get; set; } +} + +internal sealed class OfflineKitImportResponseTransport +{ + public string? ImportId { get; set; } + + public string? Status { get; set; } + + public DateTimeOffset? SubmittedAt { get; set; } + + public string? Message { get; set; } +} diff --git a/src/StellaOps.Cli/Services/Models/Transport/PolicyActivationTransport.cs b/src/Cli/StellaOps.Cli/Services/Models/Transport/PolicyActivationTransport.cs similarity index 95% rename from src/StellaOps.Cli/Services/Models/Transport/PolicyActivationTransport.cs rename to src/Cli/StellaOps.Cli/Services/Models/Transport/PolicyActivationTransport.cs index 31ada9a4..230b9ff9 100644 --- a/src/StellaOps.Cli/Services/Models/Transport/PolicyActivationTransport.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/Transport/PolicyActivationTransport.cs @@ -1,52 +1,52 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Cli.Services.Models.Transport; - -internal sealed class PolicyActivationRequestDocument -{ - public string? Comment { get; set; } - - public bool? RunNow { get; set; } - - public DateTimeOffset? ScheduledAt { get; set; } - - public string? Priority { get; set; } - - public bool? Rollback { get; set; } - - public string? IncidentId { get; set; } -} - -internal sealed class PolicyActivationResponseDocument -{ - public string? Status { get; set; } - - public PolicyActivationRevisionDocument? Revision { get; set; } -} - -internal sealed class PolicyActivationRevisionDocument -{ - public string? PackId { get; set; } - - public int? Version { get; set; } - - public string? Status { get; set; } - - public bool? RequiresTwoPersonApproval { get; set; } - - public DateTimeOffset? CreatedAt { get; set; } - - public DateTimeOffset? ActivatedAt { get; set; } - - public List? Approvals { get; set; } -} - -internal sealed class PolicyActivationApprovalDocument -{ - public string? ActorId { get; set; } - - public DateTimeOffset? ApprovedAt { get; set; } - - public string? Comment { get; set; } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Cli.Services.Models.Transport; + +internal sealed class PolicyActivationRequestDocument +{ + public string? Comment { get; set; } + + public bool? RunNow { get; set; } + + public DateTimeOffset? ScheduledAt { get; set; } + + public string? Priority { get; set; } + + public bool? Rollback { get; set; } + + public string? IncidentId { get; set; } +} + +internal sealed class PolicyActivationResponseDocument +{ + public string? Status { get; set; } + + public PolicyActivationRevisionDocument? Revision { get; set; } +} + +internal sealed class PolicyActivationRevisionDocument +{ + public string? PackId { get; set; } + + public int? Version { get; set; } + + public string? Status { get; set; } + + public bool? RequiresTwoPersonApproval { get; set; } + + public DateTimeOffset? CreatedAt { get; set; } + + public DateTimeOffset? ActivatedAt { get; set; } + + public List? Approvals { get; set; } +} + +internal sealed class PolicyActivationApprovalDocument +{ + public string? ActorId { get; set; } + + public DateTimeOffset? ApprovedAt { get; set; } + + public string? Comment { get; set; } +} diff --git a/src/StellaOps.Cli/Services/Models/Transport/PolicyFindingsTransport.cs b/src/Cli/StellaOps.Cli/Services/Models/Transport/PolicyFindingsTransport.cs similarity index 95% rename from src/StellaOps.Cli/Services/Models/Transport/PolicyFindingsTransport.cs rename to src/Cli/StellaOps.Cli/Services/Models/Transport/PolicyFindingsTransport.cs index 77d81563..b8961ed4 100644 --- a/src/StellaOps.Cli/Services/Models/Transport/PolicyFindingsTransport.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/Transport/PolicyFindingsTransport.cs @@ -1,82 +1,82 @@ -using System; -using System.Collections.Generic; -using System.Text.Json; - -namespace StellaOps.Cli.Services.Models.Transport; - -internal sealed class PolicyFindingsResponseDocument -{ - public List? Items { get; set; } - - public string? NextCursor { get; set; } - - public int? TotalCount { get; set; } -} - -internal sealed class PolicyFindingDocumentDocument -{ - public string? FindingId { get; set; } - - public string? Status { get; set; } - - public PolicyFindingSeverityDocument? Severity { get; set; } - - public string? SbomId { get; set; } - - public List? AdvisoryIds { get; set; } - - public PolicyFindingVexDocument? Vex { get; set; } - - public int? PolicyVersion { get; set; } - - public DateTimeOffset? UpdatedAt { get; set; } - - public string? RunId { get; set; } -} - -internal sealed class PolicyFindingSeverityDocument -{ - public string? Normalized { get; set; } - - public double? Score { get; set; } -} - -internal sealed class PolicyFindingVexDocument -{ - public string? WinningStatementId { get; set; } - - public string? Source { get; set; } - - public string? Status { get; set; } -} - -internal sealed class PolicyFindingExplainResponseDocument -{ - public string? FindingId { get; set; } - - public int? PolicyVersion { get; set; } - - public List? Steps { get; set; } - - public List? SealedHints { get; set; } -} - -internal sealed class PolicyFindingExplainStepDocument -{ - public string? Rule { get; set; } - - public string? Status { get; set; } - - public string? Action { get; set; } - - public double? Score { get; set; } - - public Dictionary? Inputs { get; set; } - - public Dictionary? Evidence { get; set; } -} - -internal sealed class PolicyFindingExplainHintDocument -{ - public string? Message { get; set; } -} +using System; +using System.Collections.Generic; +using System.Text.Json; + +namespace StellaOps.Cli.Services.Models.Transport; + +internal sealed class PolicyFindingsResponseDocument +{ + public List? Items { get; set; } + + public string? NextCursor { get; set; } + + public int? TotalCount { get; set; } +} + +internal sealed class PolicyFindingDocumentDocument +{ + public string? FindingId { get; set; } + + public string? Status { get; set; } + + public PolicyFindingSeverityDocument? Severity { get; set; } + + public string? SbomId { get; set; } + + public List? AdvisoryIds { get; set; } + + public PolicyFindingVexDocument? Vex { get; set; } + + public int? PolicyVersion { get; set; } + + public DateTimeOffset? UpdatedAt { get; set; } + + public string? RunId { get; set; } +} + +internal sealed class PolicyFindingSeverityDocument +{ + public string? Normalized { get; set; } + + public double? Score { get; set; } +} + +internal sealed class PolicyFindingVexDocument +{ + public string? WinningStatementId { get; set; } + + public string? Source { get; set; } + + public string? Status { get; set; } +} + +internal sealed class PolicyFindingExplainResponseDocument +{ + public string? FindingId { get; set; } + + public int? PolicyVersion { get; set; } + + public List? Steps { get; set; } + + public List? SealedHints { get; set; } +} + +internal sealed class PolicyFindingExplainStepDocument +{ + public string? Rule { get; set; } + + public string? Status { get; set; } + + public string? Action { get; set; } + + public double? Score { get; set; } + + public Dictionary? Inputs { get; set; } + + public Dictionary? Evidence { get; set; } +} + +internal sealed class PolicyFindingExplainHintDocument +{ + public string? Message { get; set; } +} diff --git a/src/StellaOps.Cli/Services/Models/Transport/PolicySimulationTransport.cs b/src/Cli/StellaOps.Cli/Services/Models/Transport/PolicySimulationTransport.cs similarity index 95% rename from src/StellaOps.Cli/Services/Models/Transport/PolicySimulationTransport.cs rename to src/Cli/StellaOps.Cli/Services/Models/Transport/PolicySimulationTransport.cs index 6ab7889f..b856b8f0 100644 --- a/src/StellaOps.Cli/Services/Models/Transport/PolicySimulationTransport.cs +++ b/src/Cli/StellaOps.Cli/Services/Models/Transport/PolicySimulationTransport.cs @@ -1,57 +1,57 @@ -using System.Collections.Generic; -using System.Text.Json; - -namespace StellaOps.Cli.Services.Models.Transport; - -internal sealed class PolicySimulationRequestDocument -{ - public int? BaseVersion { get; set; } - - public int? CandidateVersion { get; set; } - - public IReadOnlyList? SbomSet { get; set; } - - public Dictionary? Env { get; set; } - - public bool? Explain { get; set; } -} - -internal sealed class PolicySimulationResponseDocument -{ - public PolicySimulationDiffDocument? Diff { get; set; } - - public string? ExplainUri { get; set; } -} - -internal sealed class PolicySimulationDiffDocument -{ - public string? SchemaVersion { get; set; } - - public int? Added { get; set; } - - public int? Removed { get; set; } - - public int? Unchanged { get; set; } - - public Dictionary? BySeverity { get; set; } - - public List? RuleHits { get; set; } -} - -internal sealed class PolicySimulationSeverityDeltaDocument -{ - public int? Up { get; set; } - - public int? Down { get; set; } -} - -internal sealed class PolicySimulationRuleDeltaDocument -{ - public string? RuleId { get; set; } - - public string? RuleName { get; set; } - - public int? Up { get; set; } - - public int? Down { get; set; } -} +using System.Collections.Generic; +using System.Text.Json; + +namespace StellaOps.Cli.Services.Models.Transport; + +internal sealed class PolicySimulationRequestDocument +{ + public int? BaseVersion { get; set; } + + public int? CandidateVersion { get; set; } + + public IReadOnlyList? SbomSet { get; set; } + + public Dictionary? Env { get; set; } + + public bool? Explain { get; set; } +} + +internal sealed class PolicySimulationResponseDocument +{ + public PolicySimulationDiffDocument? Diff { get; set; } + + public string? ExplainUri { get; set; } +} + +internal sealed class PolicySimulationDiffDocument +{ + public string? SchemaVersion { get; set; } + + public int? Added { get; set; } + + public int? Removed { get; set; } + + public int? Unchanged { get; set; } + + public Dictionary? BySeverity { get; set; } + + public List? RuleHits { get; set; } +} + +internal sealed class PolicySimulationSeverityDeltaDocument +{ + public int? Up { get; set; } + + public int? Down { get; set; } +} + +internal sealed class PolicySimulationRuleDeltaDocument +{ + public string? RuleId { get; set; } + + public string? RuleName { get; set; } + + public int? Up { get; set; } + + public int? Down { get; set; } +} diff --git a/src/StellaOps.Cli/Services/Models/Transport/ProblemDocument.cs b/src/Cli/StellaOps.Cli/Services/Models/Transport/ProblemDocument.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/Transport/ProblemDocument.cs rename to src/Cli/StellaOps.Cli/Services/Models/Transport/ProblemDocument.cs diff --git a/src/StellaOps.Cli/Services/Models/Transport/RuntimePolicyEvaluationTransport.cs b/src/Cli/StellaOps.Cli/Services/Models/Transport/RuntimePolicyEvaluationTransport.cs similarity index 100% rename from src/StellaOps.Cli/Services/Models/Transport/RuntimePolicyEvaluationTransport.cs rename to src/Cli/StellaOps.Cli/Services/Models/Transport/RuntimePolicyEvaluationTransport.cs diff --git a/src/StellaOps.Cli/Services/PolicyApiException.cs b/src/Cli/StellaOps.Cli/Services/PolicyApiException.cs similarity index 96% rename from src/StellaOps.Cli/Services/PolicyApiException.cs rename to src/Cli/StellaOps.Cli/Services/PolicyApiException.cs index bbfa0c43..13b4b6f7 100644 --- a/src/StellaOps.Cli/Services/PolicyApiException.cs +++ b/src/Cli/StellaOps.Cli/Services/PolicyApiException.cs @@ -1,18 +1,18 @@ -using System; -using System.Net; - -namespace StellaOps.Cli.Services; - -internal sealed class PolicyApiException : Exception -{ - public PolicyApiException(string message, HttpStatusCode statusCode, string? errorCode, Exception? innerException = null) - : base(message, innerException) - { - StatusCode = statusCode; - ErrorCode = errorCode; - } - - public HttpStatusCode StatusCode { get; } - - public string? ErrorCode { get; } -} +using System; +using System.Net; + +namespace StellaOps.Cli.Services; + +internal sealed class PolicyApiException : Exception +{ + public PolicyApiException(string message, HttpStatusCode statusCode, string? errorCode, Exception? innerException = null) + : base(message, innerException) + { + StatusCode = statusCode; + ErrorCode = errorCode; + } + + public HttpStatusCode StatusCode { get; } + + public string? ErrorCode { get; } +} diff --git a/src/StellaOps.Cli/Services/ScannerExecutionResult.cs b/src/Cli/StellaOps.Cli/Services/ScannerExecutionResult.cs similarity index 100% rename from src/StellaOps.Cli/Services/ScannerExecutionResult.cs rename to src/Cli/StellaOps.Cli/Services/ScannerExecutionResult.cs diff --git a/src/StellaOps.Cli/Services/ScannerExecutor.cs b/src/Cli/StellaOps.Cli/Services/ScannerExecutor.cs similarity index 100% rename from src/StellaOps.Cli/Services/ScannerExecutor.cs rename to src/Cli/StellaOps.Cli/Services/ScannerExecutor.cs diff --git a/src/StellaOps.Cli/Services/ScannerInstaller.cs b/src/Cli/StellaOps.Cli/Services/ScannerInstaller.cs similarity index 100% rename from src/StellaOps.Cli/Services/ScannerInstaller.cs rename to src/Cli/StellaOps.Cli/Services/ScannerInstaller.cs diff --git a/src/StellaOps.Cli/StellaOps.Cli.csproj b/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj similarity index 78% rename from src/StellaOps.Cli/StellaOps.Cli.csproj rename to src/Cli/StellaOps.Cli/StellaOps.Cli.csproj index 94b3629c..bd23bf1e 100644 --- a/src/StellaOps.Cli/StellaOps.Cli.csproj +++ b/src/Cli/StellaOps.Cli/StellaOps.Cli.csproj @@ -1,4 +1,5 @@ - + + Exe @@ -37,10 +38,10 @@ - - - - + + + + - + \ No newline at end of file diff --git a/src/StellaOps.Cli/TASKS.md b/src/Cli/StellaOps.Cli/TASKS.md similarity index 99% rename from src/StellaOps.Cli/TASKS.md rename to src/Cli/StellaOps.Cli/TASKS.md index f6bc5707..17254882 100644 --- a/src/StellaOps.Cli/TASKS.md +++ b/src/Cli/StellaOps.Cli/TASKS.md @@ -20,7 +20,7 @@ |----|--------|----------|------------|-------------|---------------| | CLI-POLICY-20-001 | TODO | DevEx/CLI Guild | WEB-POLICY-20-001 | Add `stella policy new|edit|submit|approve` commands with local editor integration, version pinning, and approval workflow wiring. | Commands round-trip policy drafts with temp files; approval requires correct scopes; unit tests cover happy/error paths. | | CLI-POLICY-20-002 | DONE (2025-10-27) | DevEx/CLI Guild | CLI-POLICY-20-001, WEB-POLICY-20-001, WEB-POLICY-20-002 | Implement `stella policy simulate` with SBOM/env arguments and diff output (table/JSON), handling exit codes for `ERR_POL_*`. | Simulation outputs deterministic diffs; JSON schema documented; tests validate exit codes + piping of env variables. | -> 2025-10-26: Scheduler Models expose canonical run/diff schemas (`src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`). Schema exporter lives at `scripts/export-policy-schemas.sh`; wire schema validation once DevOps publishes artifacts (see DEVOPS-POLICY-20-004). +> 2025-10-26: Scheduler Models expose canonical run/diff schemas (`src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`). Schema exporter lives at `scripts/export-policy-schemas.sh`; wire schema validation once DevOps publishes artifacts (see DEVOPS-POLICY-20-004). > 2025-10-27: DevOps pipeline now publishes `policy-schema-exports` artefacts per commit (see `.gitea/workflows/build-test-deploy.yml`); Slack `#policy-engine` alerts trigger on schema diffs. Pull the JSON from the CI artifact instead of committing local copies. > 2025-10-27: CLI command supports table/JSON output, environment parsing, `--fail-on-diff`, and maps `ERR_POL_*` to exit codes; tested in `StellaOps.Cli.Tests` against stubbed backend. | CLI-POLICY-20-003 | DONE (2025-10-30) | DevEx/CLI Guild, Docs Guild | CLI-POLICY-20-002, WEB-POLICY-20-003, DOCS-POLICY-20-006 | Extend `stella findings ls|get` commands for policy-filtered retrieval with pagination, severity filters, and explain output. | Commands stream paginated results; explain view renders rationale entries; docs/help updated; end-to-end tests cover filters. | diff --git a/src/StellaOps.Cli/Telemetry/CliActivitySource.cs b/src/Cli/StellaOps.Cli/Telemetry/CliActivitySource.cs similarity index 100% rename from src/StellaOps.Cli/Telemetry/CliActivitySource.cs rename to src/Cli/StellaOps.Cli/Telemetry/CliActivitySource.cs diff --git a/src/StellaOps.Cli/Telemetry/CliMetrics.cs b/src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs similarity index 100% rename from src/StellaOps.Cli/Telemetry/CliMetrics.cs rename to src/Cli/StellaOps.Cli/Telemetry/CliMetrics.cs diff --git a/src/StellaOps.Cli/Telemetry/VerbosityState.cs b/src/Cli/StellaOps.Cli/Telemetry/VerbosityState.cs similarity index 100% rename from src/StellaOps.Cli/Telemetry/VerbosityState.cs rename to src/Cli/StellaOps.Cli/Telemetry/VerbosityState.cs diff --git a/src/StellaOps.Cli/appsettings.json b/src/Cli/StellaOps.Cli/appsettings.json similarity index 100% rename from src/StellaOps.Cli/appsettings.json rename to src/Cli/StellaOps.Cli/appsettings.json diff --git a/src/StellaOps.Cli.Plugins.NonCore/NonCoreCliCommandModule.cs b/src/Cli/__Libraries/StellaOps.Cli.Plugins.NonCore/NonCoreCliCommandModule.cs similarity index 97% rename from src/StellaOps.Cli.Plugins.NonCore/NonCoreCliCommandModule.cs rename to src/Cli/__Libraries/StellaOps.Cli.Plugins.NonCore/NonCoreCliCommandModule.cs index aad63a3b..4dae0189 100644 --- a/src/StellaOps.Cli.Plugins.NonCore/NonCoreCliCommandModule.cs +++ b/src/Cli/__Libraries/StellaOps.Cli.Plugins.NonCore/NonCoreCliCommandModule.cs @@ -1,416 +1,416 @@ -using System; -using System.CommandLine; -using System.Threading; -using StellaOps.Cli.Commands; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Plugins; - -namespace StellaOps.Cli.Plugins.NonCore; - -public sealed class NonCoreCliCommandModule : ICliCommandModule -{ - public string Name => "stellaops.cli.plugins.noncore"; - - public bool IsAvailable(IServiceProvider services) => true; - - public void RegisterCommands( - RootCommand root, - IServiceProvider services, - StellaOpsCliOptions options, - Option verboseOption, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(root); - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(verboseOption); - - root.Add(BuildExcititorCommand(services, verboseOption, cancellationToken)); - root.Add(BuildRuntimeCommand(services, verboseOption, cancellationToken)); - root.Add(BuildOfflineCommand(services, verboseOption, cancellationToken)); - } - - private static Command BuildExcititorCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) - { - var excititor = new Command("excititor", "Manage Excititor ingest, exports, and reconciliation workflows."); - - var init = new Command("init", "Initialize Excititor ingest state."); - var initProviders = new Option("--provider", new[] { "-p" }) - { - Description = "Optional provider identifier(s) to initialize.", - Arity = ArgumentArity.ZeroOrMore - }; - var resumeOption = new Option("--resume") - { - Description = "Resume ingest from the last persisted checkpoint instead of starting fresh." - }; - init.Add(initProviders); - init.Add(resumeOption); - init.SetAction((parseResult, _) => - { - var providers = parseResult.GetValue(initProviders) ?? Array.Empty(); - var resume = parseResult.GetValue(resumeOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleExcititorInitAsync(services, providers, resume, verbose, cancellationToken); - }); - - var pull = new Command("pull", "Trigger Excititor ingest for configured providers."); - var pullProviders = new Option("--provider", new[] { "-p" }) - { - Description = "Optional provider identifier(s) to ingest.", - Arity = ArgumentArity.ZeroOrMore - }; - var sinceOption = new Option("--since") - { - Description = "Optional ISO-8601 timestamp to begin the ingest window." - }; - var windowOption = new Option("--window") - { - Description = "Optional window duration (e.g. 24:00:00)." - }; - var forceOption = new Option("--force") - { - Description = "Force ingestion even if the backend reports no pending work." - }; - pull.Add(pullProviders); - pull.Add(sinceOption); - pull.Add(windowOption); - pull.Add(forceOption); - pull.SetAction((parseResult, _) => - { - var providers = parseResult.GetValue(pullProviders) ?? Array.Empty(); - var since = parseResult.GetValue(sinceOption); - var window = parseResult.GetValue(windowOption); - var force = parseResult.GetValue(forceOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleExcititorPullAsync(services, providers, since, window, force, verbose, cancellationToken); - }); - - var resume = new Command("resume", "Resume Excititor ingest using a checkpoint token."); - var resumeProviders = new Option("--provider", new[] { "-p" }) - { - Description = "Optional provider identifier(s) to resume.", - Arity = ArgumentArity.ZeroOrMore - }; - var checkpointOption = new Option("--checkpoint") - { - Description = "Optional checkpoint identifier to resume from." - }; - resume.Add(resumeProviders); - resume.Add(checkpointOption); - resume.SetAction((parseResult, _) => - { - var providers = parseResult.GetValue(resumeProviders) ?? Array.Empty(); - var checkpoint = parseResult.GetValue(checkpointOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleExcititorResumeAsync(services, providers, checkpoint, verbose, cancellationToken); - }); - - var list = new Command("list-providers", "List Excititor providers and their ingest status."); - var includeDisabledOption = new Option("--include-disabled") - { - Description = "Include disabled providers in the listing." - }; - list.Add(includeDisabledOption); - list.SetAction((parseResult, _) => - { - var includeDisabled = parseResult.GetValue(includeDisabledOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleExcititorListProvidersAsync(services, includeDisabled, verbose, cancellationToken); - }); - - var export = new Command("export", "Trigger Excititor export generation."); - var formatOption = new Option("--format") - { - Description = "Export format (e.g. openvex, json)." - }; - var exportDeltaOption = new Option("--delta") - { - Description = "Request a delta export when supported." - }; - var exportScopeOption = new Option("--scope") - { - Description = "Optional policy scope or tenant identifier." - }; - var exportSinceOption = new Option("--since") - { - Description = "Optional ISO-8601 timestamp to restrict export contents." - }; - var exportProviderOption = new Option("--provider") - { - Description = "Optional provider identifier when requesting targeted exports." - }; - var exportOutputOption = new Option("--output") - { - Description = "Optional path to download the export artifact." - }; - export.Add(formatOption); - export.Add(exportDeltaOption); - export.Add(exportScopeOption); - export.Add(exportSinceOption); - export.Add(exportProviderOption); - export.Add(exportOutputOption); - export.SetAction((parseResult, _) => - { - var format = parseResult.GetValue(formatOption) ?? "openvex"; - var delta = parseResult.GetValue(exportDeltaOption); - var scope = parseResult.GetValue(exportScopeOption); - var since = parseResult.GetValue(exportSinceOption); - var provider = parseResult.GetValue(exportProviderOption); - var output = parseResult.GetValue(exportOutputOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleExcititorExportAsync(services, format, delta, scope, since, provider, output, verbose, cancellationToken); - }); - - var backfill = new Command("backfill-statements", "Replay historical raw documents into Excititor statements."); - var backfillRetrievedSinceOption = new Option("--retrieved-since") - { - Description = "Only process raw documents retrieved on or after the provided ISO-8601 timestamp." - }; - var backfillForceOption = new Option("--force") - { - Description = "Reprocess documents even if statements already exist." - }; - var backfillBatchSizeOption = new Option("--batch-size") - { - Description = "Number of raw documents to fetch per batch (default 100)." - }; - var backfillMaxDocumentsOption = new Option("--max-documents") - { - Description = "Optional maximum number of raw documents to process." - }; - backfill.Add(backfillRetrievedSinceOption); - backfill.Add(backfillForceOption); - backfill.Add(backfillBatchSizeOption); - backfill.Add(backfillMaxDocumentsOption); - backfill.SetAction((parseResult, _) => - { - var retrievedSince = parseResult.GetValue(backfillRetrievedSinceOption); - var force = parseResult.GetValue(backfillForceOption); - var batchSize = parseResult.GetValue(backfillBatchSizeOption); - if (batchSize <= 0) - { - batchSize = 100; - } - var maxDocuments = parseResult.GetValue(backfillMaxDocumentsOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleExcititorBackfillStatementsAsync( - services, - retrievedSince, - force, - batchSize, - maxDocuments, - verbose, - cancellationToken); - }); - - var verify = new Command("verify", "Verify Excititor exports or attestations."); - var exportIdOption = new Option("--export-id") - { - Description = "Export identifier to verify." - }; - var digestOption = new Option("--digest") - { - Description = "Expected digest for the export or attestation." - }; - var attestationOption = new Option("--attestation") - { - Description = "Path to a local attestation file to verify (base64 content will be uploaded)." - }; - verify.Add(exportIdOption); - verify.Add(digestOption); - verify.Add(attestationOption); - verify.SetAction((parseResult, _) => - { - var exportId = parseResult.GetValue(exportIdOption); - var digest = parseResult.GetValue(digestOption); - var attestation = parseResult.GetValue(attestationOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleExcititorVerifyAsync(services, exportId, digest, attestation, verbose, cancellationToken); - }); - - var reconcile = new Command("reconcile", "Trigger Excititor reconciliation against canonical advisories."); - var reconcileProviders = new Option("--provider", new[] { "-p" }) - { - Description = "Optional provider identifier(s) to reconcile.", - Arity = ArgumentArity.ZeroOrMore - }; - var maxAgeOption = new Option("--max-age") - { - Description = "Optional maximum age window (e.g. 7.00:00:00)." - }; - reconcile.Add(reconcileProviders); - reconcile.Add(maxAgeOption); - reconcile.SetAction((parseResult, _) => - { - var providers = parseResult.GetValue(reconcileProviders) ?? Array.Empty(); - var maxAge = parseResult.GetValue(maxAgeOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleExcititorReconcileAsync(services, providers, maxAge, verbose, cancellationToken); - }); - - excititor.Add(init); - excititor.Add(pull); - excititor.Add(resume); - excititor.Add(list); - excititor.Add(export); - excititor.Add(backfill); - excititor.Add(verify); - excititor.Add(reconcile); - return excititor; - } - - private static Command BuildRuntimeCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) - { - var runtime = new Command("runtime", "Interact with runtime admission policy APIs."); - var policy = new Command("policy", "Runtime policy operations."); - - var test = new Command("test", "Evaluate runtime policy decisions for image digests."); - var namespaceOption = new Option("--namespace", new[] { "--ns" }) - { - Description = "Namespace or logical scope for the evaluation." - }; - - var imageOption = new Option("--image", new[] { "-i", "--images" }) - { - Description = "Image digests to evaluate (repeatable).", - Arity = ArgumentArity.ZeroOrMore - }; - - var fileOption = new Option("--file", new[] { "-f" }) - { - Description = "Path to a file containing image digests (one per line)." - }; - - var labelOption = new Option("--label", new[] { "-l", "--labels" }) - { - Description = "Pod labels in key=value format (repeatable).", - Arity = ArgumentArity.ZeroOrMore - }; - - var jsonOption = new Option("--json") - { - Description = "Emit the raw JSON response." - }; - - test.Add(namespaceOption); - test.Add(imageOption); - test.Add(fileOption); - test.Add(labelOption); - test.Add(jsonOption); - - test.SetAction((parseResult, _) => - { - var nsValue = parseResult.GetValue(namespaceOption); - var images = parseResult.GetValue(imageOption) ?? Array.Empty(); - var file = parseResult.GetValue(fileOption); - var labels = parseResult.GetValue(labelOption) ?? Array.Empty(); - var outputJson = parseResult.GetValue(jsonOption); - var verbose = parseResult.GetValue(verboseOption); - - return CommandHandlers.HandleRuntimePolicyTestAsync( - services, - nsValue, - images, - file, - labels, - outputJson, - verbose, - cancellationToken); - }); - - policy.Add(test); - runtime.Add(policy); - return runtime; - } - - private static Command BuildOfflineCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) - { - var offline = new Command("offline", "Offline kit workflows and utilities."); - - var kit = new Command("kit", "Manage offline kit bundles."); - - var pull = new Command("pull", "Download the latest offline kit bundle."); - var bundleIdOption = new Option("--bundle-id") - { - Description = "Optional bundle identifier. Defaults to the latest available." - }; - var destinationOption = new Option("--destination") - { - Description = "Directory to store downloaded bundles (defaults to the configured offline kits directory)." - }; - var overwriteOption = new Option("--overwrite") - { - Description = "Overwrite existing files even if checksums match." - }; - var noResumeOption = new Option("--no-resume") - { - Description = "Disable resuming partial downloads." - }; - - pull.Add(bundleIdOption); - pull.Add(destinationOption); - pull.Add(overwriteOption); - pull.Add(noResumeOption); - pull.SetAction((parseResult, _) => - { - var bundleId = parseResult.GetValue(bundleIdOption); - var destination = parseResult.GetValue(destinationOption); - var overwrite = parseResult.GetValue(overwriteOption); - var resume = !parseResult.GetValue(noResumeOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleOfflineKitPullAsync(services, bundleId, destination, overwrite, resume, verbose, cancellationToken); - }); - - var import = new Command("import", "Upload an offline kit bundle to the backend."); - var bundleArgument = new Argument("bundle") - { - Description = "Path to the offline kit tarball (.tgz)." - }; - var manifestOption = new Option("--manifest") - { - Description = "Offline manifest JSON path (defaults to metadata or sibling file)." - }; - var bundleSignatureOption = new Option("--bundle-signature") - { - Description = "Detached signature for the offline bundle (e.g. .sig)." - }; - var manifestSignatureOption = new Option("--manifest-signature") - { - Description = "Detached signature for the offline manifest (e.g. .jws)." - }; - - import.Add(bundleArgument); - import.Add(manifestOption); - import.Add(bundleSignatureOption); - import.Add(manifestSignatureOption); - import.SetAction((parseResult, _) => - { - var bundlePath = parseResult.GetValue(bundleArgument) ?? string.Empty; - var manifest = parseResult.GetValue(manifestOption); - var bundleSignature = parseResult.GetValue(bundleSignatureOption); - var manifestSignature = parseResult.GetValue(manifestSignatureOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleOfflineKitImportAsync(services, bundlePath, manifest, bundleSignature, manifestSignature, verbose, cancellationToken); - }); - - var status = new Command("status", "Display offline kit installation status."); - var jsonOption = new Option("--json") - { - Description = "Emit status as JSON." - }; - status.Add(jsonOption); - status.SetAction((parseResult, _) => - { - var asJson = parseResult.GetValue(jsonOption); - var verbose = parseResult.GetValue(verboseOption); - return CommandHandlers.HandleOfflineKitStatusAsync(services, asJson, verbose, cancellationToken); - }); - - kit.Add(pull); - kit.Add(import); - kit.Add(status); - - offline.Add(kit); - return offline; - } -} +using System; +using System.CommandLine; +using System.Threading; +using StellaOps.Cli.Commands; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Plugins; + +namespace StellaOps.Cli.Plugins.NonCore; + +public sealed class NonCoreCliCommandModule : ICliCommandModule +{ + public string Name => "stellaops.cli.plugins.noncore"; + + public bool IsAvailable(IServiceProvider services) => true; + + public void RegisterCommands( + RootCommand root, + IServiceProvider services, + StellaOpsCliOptions options, + Option verboseOption, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(root); + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(verboseOption); + + root.Add(BuildExcititorCommand(services, verboseOption, cancellationToken)); + root.Add(BuildRuntimeCommand(services, verboseOption, cancellationToken)); + root.Add(BuildOfflineCommand(services, verboseOption, cancellationToken)); + } + + private static Command BuildExcititorCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) + { + var excititor = new Command("excititor", "Manage Excititor ingest, exports, and reconciliation workflows."); + + var init = new Command("init", "Initialize Excititor ingest state."); + var initProviders = new Option("--provider", new[] { "-p" }) + { + Description = "Optional provider identifier(s) to initialize.", + Arity = ArgumentArity.ZeroOrMore + }; + var resumeOption = new Option("--resume") + { + Description = "Resume ingest from the last persisted checkpoint instead of starting fresh." + }; + init.Add(initProviders); + init.Add(resumeOption); + init.SetAction((parseResult, _) => + { + var providers = parseResult.GetValue(initProviders) ?? Array.Empty(); + var resume = parseResult.GetValue(resumeOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleExcititorInitAsync(services, providers, resume, verbose, cancellationToken); + }); + + var pull = new Command("pull", "Trigger Excititor ingest for configured providers."); + var pullProviders = new Option("--provider", new[] { "-p" }) + { + Description = "Optional provider identifier(s) to ingest.", + Arity = ArgumentArity.ZeroOrMore + }; + var sinceOption = new Option("--since") + { + Description = "Optional ISO-8601 timestamp to begin the ingest window." + }; + var windowOption = new Option("--window") + { + Description = "Optional window duration (e.g. 24:00:00)." + }; + var forceOption = new Option("--force") + { + Description = "Force ingestion even if the backend reports no pending work." + }; + pull.Add(pullProviders); + pull.Add(sinceOption); + pull.Add(windowOption); + pull.Add(forceOption); + pull.SetAction((parseResult, _) => + { + var providers = parseResult.GetValue(pullProviders) ?? Array.Empty(); + var since = parseResult.GetValue(sinceOption); + var window = parseResult.GetValue(windowOption); + var force = parseResult.GetValue(forceOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleExcititorPullAsync(services, providers, since, window, force, verbose, cancellationToken); + }); + + var resume = new Command("resume", "Resume Excititor ingest using a checkpoint token."); + var resumeProviders = new Option("--provider", new[] { "-p" }) + { + Description = "Optional provider identifier(s) to resume.", + Arity = ArgumentArity.ZeroOrMore + }; + var checkpointOption = new Option("--checkpoint") + { + Description = "Optional checkpoint identifier to resume from." + }; + resume.Add(resumeProviders); + resume.Add(checkpointOption); + resume.SetAction((parseResult, _) => + { + var providers = parseResult.GetValue(resumeProviders) ?? Array.Empty(); + var checkpoint = parseResult.GetValue(checkpointOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleExcititorResumeAsync(services, providers, checkpoint, verbose, cancellationToken); + }); + + var list = new Command("list-providers", "List Excititor providers and their ingest status."); + var includeDisabledOption = new Option("--include-disabled") + { + Description = "Include disabled providers in the listing." + }; + list.Add(includeDisabledOption); + list.SetAction((parseResult, _) => + { + var includeDisabled = parseResult.GetValue(includeDisabledOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleExcititorListProvidersAsync(services, includeDisabled, verbose, cancellationToken); + }); + + var export = new Command("export", "Trigger Excititor export generation."); + var formatOption = new Option("--format") + { + Description = "Export format (e.g. openvex, json)." + }; + var exportDeltaOption = new Option("--delta") + { + Description = "Request a delta export when supported." + }; + var exportScopeOption = new Option("--scope") + { + Description = "Optional policy scope or tenant identifier." + }; + var exportSinceOption = new Option("--since") + { + Description = "Optional ISO-8601 timestamp to restrict export contents." + }; + var exportProviderOption = new Option("--provider") + { + Description = "Optional provider identifier when requesting targeted exports." + }; + var exportOutputOption = new Option("--output") + { + Description = "Optional path to download the export artifact." + }; + export.Add(formatOption); + export.Add(exportDeltaOption); + export.Add(exportScopeOption); + export.Add(exportSinceOption); + export.Add(exportProviderOption); + export.Add(exportOutputOption); + export.SetAction((parseResult, _) => + { + var format = parseResult.GetValue(formatOption) ?? "openvex"; + var delta = parseResult.GetValue(exportDeltaOption); + var scope = parseResult.GetValue(exportScopeOption); + var since = parseResult.GetValue(exportSinceOption); + var provider = parseResult.GetValue(exportProviderOption); + var output = parseResult.GetValue(exportOutputOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleExcititorExportAsync(services, format, delta, scope, since, provider, output, verbose, cancellationToken); + }); + + var backfill = new Command("backfill-statements", "Replay historical raw documents into Excititor statements."); + var backfillRetrievedSinceOption = new Option("--retrieved-since") + { + Description = "Only process raw documents retrieved on or after the provided ISO-8601 timestamp." + }; + var backfillForceOption = new Option("--force") + { + Description = "Reprocess documents even if statements already exist." + }; + var backfillBatchSizeOption = new Option("--batch-size") + { + Description = "Number of raw documents to fetch per batch (default 100)." + }; + var backfillMaxDocumentsOption = new Option("--max-documents") + { + Description = "Optional maximum number of raw documents to process." + }; + backfill.Add(backfillRetrievedSinceOption); + backfill.Add(backfillForceOption); + backfill.Add(backfillBatchSizeOption); + backfill.Add(backfillMaxDocumentsOption); + backfill.SetAction((parseResult, _) => + { + var retrievedSince = parseResult.GetValue(backfillRetrievedSinceOption); + var force = parseResult.GetValue(backfillForceOption); + var batchSize = parseResult.GetValue(backfillBatchSizeOption); + if (batchSize <= 0) + { + batchSize = 100; + } + var maxDocuments = parseResult.GetValue(backfillMaxDocumentsOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleExcititorBackfillStatementsAsync( + services, + retrievedSince, + force, + batchSize, + maxDocuments, + verbose, + cancellationToken); + }); + + var verify = new Command("verify", "Verify Excititor exports or attestations."); + var exportIdOption = new Option("--export-id") + { + Description = "Export identifier to verify." + }; + var digestOption = new Option("--digest") + { + Description = "Expected digest for the export or attestation." + }; + var attestationOption = new Option("--attestation") + { + Description = "Path to a local attestation file to verify (base64 content will be uploaded)." + }; + verify.Add(exportIdOption); + verify.Add(digestOption); + verify.Add(attestationOption); + verify.SetAction((parseResult, _) => + { + var exportId = parseResult.GetValue(exportIdOption); + var digest = parseResult.GetValue(digestOption); + var attestation = parseResult.GetValue(attestationOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleExcititorVerifyAsync(services, exportId, digest, attestation, verbose, cancellationToken); + }); + + var reconcile = new Command("reconcile", "Trigger Excititor reconciliation against canonical advisories."); + var reconcileProviders = new Option("--provider", new[] { "-p" }) + { + Description = "Optional provider identifier(s) to reconcile.", + Arity = ArgumentArity.ZeroOrMore + }; + var maxAgeOption = new Option("--max-age") + { + Description = "Optional maximum age window (e.g. 7.00:00:00)." + }; + reconcile.Add(reconcileProviders); + reconcile.Add(maxAgeOption); + reconcile.SetAction((parseResult, _) => + { + var providers = parseResult.GetValue(reconcileProviders) ?? Array.Empty(); + var maxAge = parseResult.GetValue(maxAgeOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleExcititorReconcileAsync(services, providers, maxAge, verbose, cancellationToken); + }); + + excititor.Add(init); + excititor.Add(pull); + excititor.Add(resume); + excititor.Add(list); + excititor.Add(export); + excititor.Add(backfill); + excititor.Add(verify); + excititor.Add(reconcile); + return excititor; + } + + private static Command BuildRuntimeCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) + { + var runtime = new Command("runtime", "Interact with runtime admission policy APIs."); + var policy = new Command("policy", "Runtime policy operations."); + + var test = new Command("test", "Evaluate runtime policy decisions for image digests."); + var namespaceOption = new Option("--namespace", new[] { "--ns" }) + { + Description = "Namespace or logical scope for the evaluation." + }; + + var imageOption = new Option("--image", new[] { "-i", "--images" }) + { + Description = "Image digests to evaluate (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + + var fileOption = new Option("--file", new[] { "-f" }) + { + Description = "Path to a file containing image digests (one per line)." + }; + + var labelOption = new Option("--label", new[] { "-l", "--labels" }) + { + Description = "Pod labels in key=value format (repeatable).", + Arity = ArgumentArity.ZeroOrMore + }; + + var jsonOption = new Option("--json") + { + Description = "Emit the raw JSON response." + }; + + test.Add(namespaceOption); + test.Add(imageOption); + test.Add(fileOption); + test.Add(labelOption); + test.Add(jsonOption); + + test.SetAction((parseResult, _) => + { + var nsValue = parseResult.GetValue(namespaceOption); + var images = parseResult.GetValue(imageOption) ?? Array.Empty(); + var file = parseResult.GetValue(fileOption); + var labels = parseResult.GetValue(labelOption) ?? Array.Empty(); + var outputJson = parseResult.GetValue(jsonOption); + var verbose = parseResult.GetValue(verboseOption); + + return CommandHandlers.HandleRuntimePolicyTestAsync( + services, + nsValue, + images, + file, + labels, + outputJson, + verbose, + cancellationToken); + }); + + policy.Add(test); + runtime.Add(policy); + return runtime; + } + + private static Command BuildOfflineCommand(IServiceProvider services, Option verboseOption, CancellationToken cancellationToken) + { + var offline = new Command("offline", "Offline kit workflows and utilities."); + + var kit = new Command("kit", "Manage offline kit bundles."); + + var pull = new Command("pull", "Download the latest offline kit bundle."); + var bundleIdOption = new Option("--bundle-id") + { + Description = "Optional bundle identifier. Defaults to the latest available." + }; + var destinationOption = new Option("--destination") + { + Description = "Directory to store downloaded bundles (defaults to the configured offline kits directory)." + }; + var overwriteOption = new Option("--overwrite") + { + Description = "Overwrite existing files even if checksums match." + }; + var noResumeOption = new Option("--no-resume") + { + Description = "Disable resuming partial downloads." + }; + + pull.Add(bundleIdOption); + pull.Add(destinationOption); + pull.Add(overwriteOption); + pull.Add(noResumeOption); + pull.SetAction((parseResult, _) => + { + var bundleId = parseResult.GetValue(bundleIdOption); + var destination = parseResult.GetValue(destinationOption); + var overwrite = parseResult.GetValue(overwriteOption); + var resume = !parseResult.GetValue(noResumeOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleOfflineKitPullAsync(services, bundleId, destination, overwrite, resume, verbose, cancellationToken); + }); + + var import = new Command("import", "Upload an offline kit bundle to the backend."); + var bundleArgument = new Argument("bundle") + { + Description = "Path to the offline kit tarball (.tgz)." + }; + var manifestOption = new Option("--manifest") + { + Description = "Offline manifest JSON path (defaults to metadata or sibling file)." + }; + var bundleSignatureOption = new Option("--bundle-signature") + { + Description = "Detached signature for the offline bundle (e.g. .sig)." + }; + var manifestSignatureOption = new Option("--manifest-signature") + { + Description = "Detached signature for the offline manifest (e.g. .jws)." + }; + + import.Add(bundleArgument); + import.Add(manifestOption); + import.Add(bundleSignatureOption); + import.Add(manifestSignatureOption); + import.SetAction((parseResult, _) => + { + var bundlePath = parseResult.GetValue(bundleArgument) ?? string.Empty; + var manifest = parseResult.GetValue(manifestOption); + var bundleSignature = parseResult.GetValue(bundleSignatureOption); + var manifestSignature = parseResult.GetValue(manifestSignatureOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleOfflineKitImportAsync(services, bundlePath, manifest, bundleSignature, manifestSignature, verbose, cancellationToken); + }); + + var status = new Command("status", "Display offline kit installation status."); + var jsonOption = new Option("--json") + { + Description = "Emit status as JSON." + }; + status.Add(jsonOption); + status.SetAction((parseResult, _) => + { + var asJson = parseResult.GetValue(jsonOption); + var verbose = parseResult.GetValue(verboseOption); + return CommandHandlers.HandleOfflineKitStatusAsync(services, asJson, verbose, cancellationToken); + }); + + kit.Add(pull); + kit.Add(import); + kit.Add(status); + + offline.Add(kit); + return offline; + } +} diff --git a/src/StellaOps.Cli.Plugins.NonCore/StellaOps.Cli.Plugins.NonCore.csproj b/src/Cli/__Libraries/StellaOps.Cli.Plugins.NonCore/StellaOps.Cli.Plugins.NonCore.csproj similarity index 97% rename from src/StellaOps.Cli.Plugins.NonCore/StellaOps.Cli.Plugins.NonCore.csproj rename to src/Cli/__Libraries/StellaOps.Cli.Plugins.NonCore/StellaOps.Cli.Plugins.NonCore.csproj index bd8c186f..1dc96cda 100644 --- a/src/StellaOps.Cli.Plugins.NonCore/StellaOps.Cli.Plugins.NonCore.csproj +++ b/src/Cli/__Libraries/StellaOps.Cli.Plugins.NonCore/StellaOps.Cli.Plugins.NonCore.csproj @@ -1,22 +1,22 @@ - - - net10.0 - enable - enable - preview - true - $([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\\..\\plugins\\cli\\StellaOps.Cli.Plugins.NonCore\\')) - - - - - - - - - - - - + + + net10.0 + enable + enable + preview + true + $([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\\..\\plugins\\cli\\StellaOps.Cli.Plugins.NonCore\\')) + + + + + + + + + + + + diff --git a/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs similarity index 97% rename from src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs rename to src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs index 28d3c64e..61a64214 100644 --- a/src/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/Commands/CommandHandlersTests.cs @@ -1,2489 +1,2489 @@ -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.IO; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using System.Globalization; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.Client; -using StellaOps.Cli.Commands; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Services; -using StellaOps.Cli.Services.Models; -using StellaOps.Cli.Telemetry; -using StellaOps.Cli.Tests.Testing; -using StellaOps.Cryptography; -using Spectre.Console; -using Spectre.Console.Testing; - -namespace StellaOps.Cli.Tests.Commands; - -public sealed class CommandHandlersTests -{ - [Fact] - public async Task HandleExportJobAsync_SetsExitCodeZeroOnSuccess() - { - var original = Environment.ExitCode; - try - { - var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", "/jobs/export:json/1", null)); - var provider = BuildServiceProvider(backend); - - await CommandHandlers.HandleExportJobAsync( - provider, - format: "json", - delta: false, - publishFull: null, - publishDelta: null, - includeFull: null, - includeDelta: null, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.Equal("export:json", backend.LastJobKind); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleMergeJobAsync_SetsExitCodeOnFailure() - { - var original = Environment.ExitCode; - try - { - var backend = new StubBackendClient(new JobTriggerResult(false, "Job already running", null, null)); - var provider = BuildServiceProvider(backend); - - await CommandHandlers.HandleMergeJobAsync(provider, verbose: false, CancellationToken.None); - - Assert.Equal(1, Environment.ExitCode); - Assert.Equal("merge:reconcile", backend.LastJobKind); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleScannerRunAsync_AutomaticallyUploadsResults() - { - using var tempDir = new TempDirectory(); - var resultsFile = Path.Combine(tempDir.Path, "results", "scan.json"); - var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", null, null)); - var metadataFile = Path.Combine(tempDir.Path, "results", "scan-run.json"); - var executor = new StubExecutor(new ScannerExecutionResult(0, resultsFile, metadataFile)); - var options = new StellaOpsCliOptions - { - ResultsDirectory = Path.Combine(tempDir.Path, "results") - }; - - var provider = BuildServiceProvider(backend, executor, new StubInstaller(), options); - - Directory.CreateDirectory(Path.Combine(tempDir.Path, "target")); - - var original = Environment.ExitCode; - try - { - await CommandHandlers.HandleScannerRunAsync( - provider, - runner: "docker", - entry: "scanner-image", - targetDirectory: Path.Combine(tempDir.Path, "target"), - arguments: Array.Empty(), - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.Equal(resultsFile, backend.LastUploadPath); - Assert.True(File.Exists(metadataFile)); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleAuthLoginAsync_UsesClientCredentialsFlow() - { - var original = Environment.ExitCode; - using var tempDir = new TempDirectory(); - - try - { - var options = new StellaOpsCliOptions - { - ResultsDirectory = Path.Combine(tempDir.Path, "results"), - Authority = new StellaOpsCliAuthorityOptions - { - Url = "https://authority.example", - ClientId = "cli", - ClientSecret = "secret", - Scope = "concelier.jobs.trigger", - TokenCacheDirectory = tempDir.Path - } - }; - - var tokenClient = new StubTokenClient(); - var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: tokenClient); - - await CommandHandlers.HandleAuthLoginAsync(provider, options, verbose: false, force: false, cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.Equal(1, tokenClient.ClientCredentialRequests); - Assert.NotNull(tokenClient.CachedEntry); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleAuthLoginAsync_FailsWhenPasswordMissing() - { - var original = Environment.ExitCode; - using var tempDir = new TempDirectory(); - - try - { - var options = new StellaOpsCliOptions - { - ResultsDirectory = Path.Combine(tempDir.Path, "results"), - Authority = new StellaOpsCliAuthorityOptions - { - Url = "https://authority.example", - ClientId = "cli", - Username = "user", - TokenCacheDirectory = tempDir.Path - } - }; - - var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: new StubTokenClient()); - - await CommandHandlers.HandleAuthLoginAsync(provider, options, verbose: false, force: false, cancellationToken: CancellationToken.None); - - Assert.Equal(1, Environment.ExitCode); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleAuthStatusAsync_ReportsMissingToken() - { - var original = Environment.ExitCode; - using var tempDir = new TempDirectory(); - - try - { - var options = new StellaOpsCliOptions - { - ResultsDirectory = Path.Combine(tempDir.Path, "results"), - Authority = new StellaOpsCliAuthorityOptions - { - Url = "https://authority.example", - ClientId = "cli", - TokenCacheDirectory = tempDir.Path - } - }; - - var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: new StubTokenClient()); - - await CommandHandlers.HandleAuthStatusAsync(provider, options, verbose: false, cancellationToken: CancellationToken.None); - - Assert.Equal(1, Environment.ExitCode); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleExcititorInitAsync_CallsBackend() - { - var original = Environment.ExitCode; - try - { - var backend = new StubBackendClient(new JobTriggerResult(true, "accepted", null, null)); - var provider = BuildServiceProvider(backend); - - await CommandHandlers.HandleExcititorInitAsync( - provider, - new[] { "redhat" }, - resume: true, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.Equal("init", backend.LastExcititorRoute); - Assert.Equal(HttpMethod.Post, backend.LastExcititorMethod); - var payload = Assert.IsAssignableFrom>(backend.LastExcititorPayload); - Assert.Equal(true, payload["resume"]); - var providers = Assert.IsAssignableFrom>(payload["providers"]!); - Assert.Contains("redhat", providers, StringComparer.OrdinalIgnoreCase); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleExcititorListProvidersAsync_WritesOutput() - { - var original = Environment.ExitCode; - try - { - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - ProviderSummaries = new[] - { - new ExcititorProviderSummary("redhat", "distro", "Red Hat", "vendor", true, DateTimeOffset.UtcNow) - } - }; - - var provider = BuildServiceProvider(backend); - await CommandHandlers.HandleExcititorListProvidersAsync(provider, includeDisabled: false, verbose: false, cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleExcititorVerifyAsync_FailsWithoutArguments() - { - var original = Environment.ExitCode; - try - { - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - var provider = BuildServiceProvider(backend); - - await CommandHandlers.HandleExcititorVerifyAsync(provider, null, null, null, verbose: false, cancellationToken: CancellationToken.None); - - Assert.Equal(1, Environment.ExitCode); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleExcititorVerifyAsync_AttachesAttestationFile() - { - var original = Environment.ExitCode; - using var tempFile = new TempFile("attestation.json", Encoding.UTF8.GetBytes("{\"ok\":true}")); - try - { - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - var provider = BuildServiceProvider(backend); - - await CommandHandlers.HandleExcititorVerifyAsync( - provider, - exportId: "export-123", - digest: "sha256:abc", - attestationPath: tempFile.Path, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.Equal("verify", backend.LastExcititorRoute); - var payload = Assert.IsAssignableFrom>(backend.LastExcititorPayload); - Assert.Equal("export-123", payload["exportId"]); - Assert.Equal("sha256:abc", payload["digest"]); - var attestation = Assert.IsAssignableFrom>(payload["attestation"]!); - Assert.Equal(Path.GetFileName(tempFile.Path), attestation["fileName"]); - Assert.NotNull(attestation["base64"]); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleExcititorExportAsync_DownloadsWhenOutputProvided() - { - var original = Environment.ExitCode; - using var tempDir = new TempDirectory(); - - try - { - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - const string manifestJson = """ - { - "exportId": "exports/20251019T101530Z/abcdef1234567890", - "format": "openvex", - "createdAt": "2025-10-19T10:15:30Z", - "artifact": { "algorithm": "sha256", "digest": "abcdef1234567890" }, - "fromCache": false, - "sizeBytes": 2048, - "attestation": { - "rekor": { - "location": "https://rekor.example/api/v1/log/entries/123", - "logIndex": "123" - } - } - } - """; - - backend.ExcititorResult = new ExcititorOperationResult(true, "ok", null, JsonDocument.Parse(manifestJson).RootElement.Clone()); - var provider = BuildServiceProvider(backend); - var outputPath = Path.Combine(tempDir.Path, "export.json"); - - await CommandHandlers.HandleExcititorExportAsync( - provider, - format: "openvex", - delta: false, - scope: null, - since: null, - provider: null, - outputPath: outputPath, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.Single(backend.ExportDownloads); - var request = backend.ExportDownloads[0]; - Assert.Equal("exports/20251019T101530Z/abcdef1234567890", request.ExportId); - Assert.Equal(Path.GetFullPath(outputPath), request.DestinationPath); - Assert.Equal("sha256", request.Algorithm); - Assert.Equal("abcdef1234567890", request.Digest); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleVulnObservationsAsync_WritesTableOutput() - { - var originalExit = Environment.ExitCode; - var response = new AdvisoryObservationsResponse - { - Observations = new[] - { - new AdvisoryObservationDocument - { - ObservationId = "tenant-a:ghsa:alpha:1", - Tenant = "tenant-a", - Source = new AdvisoryObservationSource - { - Vendor = "ghsa", - Stream = "advisories", - Api = "https://example.test/api" - }, - Upstream = new AdvisoryObservationUpstream - { - UpstreamId = "GHSA-abcd-efgh" - }, - Linkset = new AdvisoryObservationLinkset - { - Aliases = new[] { "cve-2025-0001" }, - Purls = new[] { "pkg:npm/package-a@1.0.0" }, - Cpes = new[] { "cpe:/a:vendor:product:1.0" } - }, - CreatedAt = new DateTimeOffset(2025, 10, 27, 6, 0, 0, TimeSpan.Zero) - } - }, - Linkset = new AdvisoryObservationLinksetAggregate - { - Aliases = new[] { "cve-2025-0001" }, - Purls = new[] { "pkg:npm/package-a@1.0.0" }, - Cpes = new[] { "cpe:/a:vendor:product:1.0" }, - References = Array.Empty() - } - }; - - var stubClient = new StubConcelierObservationsClient(response); - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - var provider = BuildServiceProvider(backend, concelierClient: stubClient); - - var console = new TestConsole(); - var originalConsole = AnsiConsole.Console; - AnsiConsole.Console = console; - - try - { - await CommandHandlers.HandleVulnObservationsAsync( - provider, - tenant: "Tenant-A ", - observationIds: new[] { "tenant-a:ghsa:alpha:1 " }, - aliases: new[] { " CVE-2025-0001 " }, - purls: new[] { " pkg:npm/package-a@1.0.0 " }, - cpes: Array.Empty(), - limit: null, - cursor: null, - emitJson: false, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - } - finally - { - Environment.ExitCode = originalExit; - AnsiConsole.Console = originalConsole; - } - - Assert.NotNull(stubClient.LastQuery); - var query = stubClient.LastQuery!; - Assert.Equal("tenant-a", query.Tenant); - Assert.Contains("cve-2025-0001", query.Aliases); - Assert.Contains("pkg:npm/package-a@1.0.0", query.Purls); - Assert.Null(query.Limit); - Assert.Null(query.Cursor); - - var output = console.Output; - Assert.False(string.IsNullOrWhiteSpace(output)); - } - - [Fact] - public async Task HandleVulnObservationsAsync_WritesJsonOutput() - { - var originalExit = Environment.ExitCode; - var response = new AdvisoryObservationsResponse - { - Observations = new[] - { - new AdvisoryObservationDocument - { - ObservationId = "tenant-a:osv:beta:2", - Tenant = "tenant-a", - Source = new AdvisoryObservationSource - { - Vendor = "osv", - Stream = "osv", - Api = "https://example.test/osv" - }, - Upstream = new AdvisoryObservationUpstream - { - UpstreamId = "OSV-2025-XYZ" - }, - Linkset = new AdvisoryObservationLinkset - { - Aliases = new[] { "cve-2025-0101" }, - Purls = new[] { "pkg:pypi/package-b@2.0.0" }, - Cpes = Array.Empty(), - References = new[] - { - new AdvisoryObservationReference { Type = "advisory", Url = "https://example.test/advisory" } - } - }, - CreatedAt = new DateTimeOffset(2025, 10, 27, 7, 30, 0, TimeSpan.Zero) - } - }, - Linkset = new AdvisoryObservationLinksetAggregate - { - Aliases = new[] { "cve-2025-0101" }, - Purls = new[] { "pkg:pypi/package-b@2.0.0" }, - Cpes = Array.Empty(), - References = new[] - { - new AdvisoryObservationReference { Type = "advisory", Url = "https://example.test/advisory" } - } - } - }; - - var stubClient = new StubConcelierObservationsClient(response); - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - var provider = BuildServiceProvider(backend, concelierClient: stubClient); - - var writer = new StringWriter(); - var originalOut = Console.Out; - Console.SetOut(writer); - - try - { - await CommandHandlers.HandleVulnObservationsAsync( - provider, - tenant: "tenant-a", - observationIds: Array.Empty(), - aliases: Array.Empty(), - purls: Array.Empty(), - cpes: Array.Empty(), - limit: null, - cursor: null, - emitJson: true, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - } - finally - { - Environment.ExitCode = originalExit; - Console.SetOut(originalOut); - } - - var json = writer.ToString(); - using var document = JsonDocument.Parse(json); - var root = document.RootElement; - Assert.True(root.TryGetProperty("observations", out var observations)); - Assert.Equal("tenant-a:osv:beta:2", observations[0].GetProperty("observationId").GetString()); - Assert.Equal("pkg:pypi/package-b@2.0.0", observations[0].GetProperty("linkset").GetProperty("purls")[0].GetString()); - } - - [Fact] - public async Task HandleVulnObservationsAsync_WhenHasMore_PrintsCursorHint() - { - var originalExit = Environment.ExitCode; - var response = new AdvisoryObservationsResponse - { - Observations = new[] - { - new AdvisoryObservationDocument - { - ObservationId = "tenant-a:source:1", - Tenant = "tenant-a", - Linkset = new AdvisoryObservationLinkset(), - Source = new AdvisoryObservationSource(), - Upstream = new AdvisoryObservationUpstream(), - CreatedAt = DateTimeOffset.UtcNow - } - }, - Linkset = new AdvisoryObservationLinksetAggregate(), - HasMore = true, - NextCursor = "cursor-token" - }; - - var stubClient = new StubConcelierObservationsClient(response); - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - var provider = BuildServiceProvider(backend, concelierClient: stubClient); - - var console = new TestConsole(); - var originalConsole = AnsiConsole.Console; - AnsiConsole.Console = console; - - try - { - await CommandHandlers.HandleVulnObservationsAsync( - provider, - tenant: "tenant-a", - observationIds: Array.Empty(), - aliases: Array.Empty(), - purls: Array.Empty(), - cpes: Array.Empty(), - limit: 1, - cursor: null, - emitJson: false, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - } - finally - { - Environment.ExitCode = originalExit; - AnsiConsole.Console = originalConsole; - } - - var output = console.Output; - Assert.Contains("--cursor", output, StringComparison.OrdinalIgnoreCase); - Assert.Contains("cursor-token", output, StringComparison.Ordinal); - } - - [Theory] - [InlineData(null)] - [InlineData("default")] - [InlineData("libsodium")] - public async Task HandleAuthRevokeVerifyAsync_VerifiesBundlesUsingProviderRegistry(string? providerHint) - { - var original = Environment.ExitCode; - using var tempDir = new TempDirectory(); - - try - { - var artifacts = await WriteRevocationArtifactsAsync(tempDir, providerHint); - - await CommandHandlers.HandleAuthRevokeVerifyAsync( - artifacts.BundlePath, - artifacts.SignaturePath, - artifacts.KeyPath, - verbose: true, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleAuthStatusAsync_ReportsCachedToken() - { - var original = Environment.ExitCode; - using var tempDir = new TempDirectory(); - - try - { - var options = new StellaOpsCliOptions - { - ResultsDirectory = Path.Combine(tempDir.Path, "results"), - Authority = new StellaOpsCliAuthorityOptions - { - Url = "https://authority.example", - ClientId = "cli", - TokenCacheDirectory = tempDir.Path - } - }; - - var tokenClient = new StubTokenClient(); - tokenClient.CachedEntry = new StellaOpsTokenCacheEntry( - "token", - "Bearer", - DateTimeOffset.UtcNow.AddMinutes(30), - new[] { StellaOpsScopes.ConcelierJobsTrigger }); - - var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: tokenClient); - - await CommandHandlers.HandleAuthStatusAsync(provider, options, verbose: true, cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleAuthWhoAmIAsync_ReturnsErrorWhenTokenMissing() - { - var original = Environment.ExitCode; - using var tempDir = new TempDirectory(); - - try - { - var options = new StellaOpsCliOptions - { - ResultsDirectory = Path.Combine(tempDir.Path, "results"), - Authority = new StellaOpsCliAuthorityOptions - { - Url = "https://authority.example", - ClientId = "cli", - TokenCacheDirectory = tempDir.Path - } - }; - - var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: new StubTokenClient()); - - await CommandHandlers.HandleAuthWhoAmIAsync(provider, options, verbose: false, cancellationToken: CancellationToken.None); - - Assert.Equal(1, Environment.ExitCode); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleAuthWhoAmIAsync_ReportsClaimsForJwtToken() - { - var original = Environment.ExitCode; - using var tempDir = new TempDirectory(); - - try - { - var options = new StellaOpsCliOptions - { - ResultsDirectory = Path.Combine(tempDir.Path, "results"), - Authority = new StellaOpsCliAuthorityOptions - { - Url = "https://authority.example", - ClientId = "cli", - TokenCacheDirectory = tempDir.Path - } - }; - - var tokenClient = new StubTokenClient(); - tokenClient.CachedEntry = new StellaOpsTokenCacheEntry( - CreateUnsignedJwt( - ("sub", "cli-user"), - ("aud", "concelier"), - ("iss", "https://authority.example"), - ("iat", 1_700_000_000), - ("nbf", 1_700_000_000)), - "Bearer", - DateTimeOffset.UtcNow.AddMinutes(30), - new[] { StellaOpsScopes.ConcelierJobsTrigger }); - - var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: tokenClient); - - await CommandHandlers.HandleAuthWhoAmIAsync(provider, options, verbose: true, cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleAuthLogoutAsync_ClearsToken() - { - var original = Environment.ExitCode; - using var tempDir = new TempDirectory(); - - try - { - var options = new StellaOpsCliOptions - { - ResultsDirectory = Path.Combine(tempDir.Path, "results"), - Authority = new StellaOpsCliAuthorityOptions - { - Url = "https://authority.example", - ClientId = "cli", - TokenCacheDirectory = tempDir.Path - } - }; - - var tokenClient = new StubTokenClient(); - tokenClient.CachedEntry = new StellaOpsTokenCacheEntry( - "token", - "Bearer", - DateTimeOffset.UtcNow.AddMinutes(5), - new[] { StellaOpsScopes.ConcelierJobsTrigger }); - - var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: tokenClient); - - await CommandHandlers.HandleAuthLogoutAsync(provider, options, verbose: true, cancellationToken: CancellationToken.None); - - Assert.Null(tokenClient.CachedEntry); - Assert.Equal(1, tokenClient.ClearRequests); - Assert.Equal(0, Environment.ExitCode); - } - finally - { - Environment.ExitCode = original; - } - } - - [Fact] - public async Task HandleRuntimePolicyTestAsync_WritesInteractiveTable() - { - var originalExit = Environment.ExitCode; - var originalConsole = AnsiConsole.Console; - - var console = new TestConsole(); - console.Width(120); - console.Interactive(); - console.EmitAnsiSequences(); - - AnsiConsole.Console = console; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - - var decisions = new Dictionary(StringComparer.Ordinal) - { - ["sha256:aaa"] = new RuntimePolicyImageDecision( - "allow", - true, - true, - Array.AsReadOnly(new[] { "trusted baseline" }), - new RuntimePolicyRekorReference("uuid-allow", "https://rekor.example/entries/uuid-allow", true), - new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) - { - ["source"] = "baseline", - ["quieted"] = false, - ["confidence"] = 0.97, - ["confidenceBand"] = "high" - })), - ["sha256:bbb"] = new RuntimePolicyImageDecision( - "block", - false, - false, - Array.AsReadOnly(new[] { "missing attestation" }), - new RuntimePolicyRekorReference("uuid-block", "https://rekor.example/entries/uuid-block", false), - new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) - { - ["source"] = "policy", - ["quieted"] = false, - ["confidence"] = 0.12, - ["confidenceBand"] = "low" - })), - ["sha256:ccc"] = new RuntimePolicyImageDecision( - "audit", - true, - false, - Array.AsReadOnly(new[] { "pending sbom sync" }), - new RuntimePolicyRekorReference(null, null, null), - new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) - { - ["source"] = "mirror", - ["quieted"] = true, - ["quietedBy"] = "allow-temporary", - ["confidence"] = 0.42, - ["confidenceBand"] = "medium" - })) - }; - - backend.RuntimePolicyResult = new RuntimePolicyEvaluationResult( - 300, - DateTimeOffset.Parse("2025-10-19T12:00:00Z", CultureInfo.InvariantCulture), - "rev-42", - new ReadOnlyDictionary(decisions)); - - var provider = BuildServiceProvider(backend); - - try - { - await CommandHandlers.HandleRuntimePolicyTestAsync( - provider, - namespaceValue: "prod", - imageArguments: new[] { "sha256:aaa", "sha256:bbb" }, - filePath: null, - labelArguments: new[] { "app=frontend" }, - outputJson: false, - verbose: false, - cancellationToken: CancellationToken.None); - - var output = console.Output; - - Assert.Equal(0, Environment.ExitCode); - Assert.Contains("Image", output, StringComparison.Ordinal); - Assert.Contains("Verdict", output, StringComparison.Ordinal); - Assert.Contains("SBOM Ref", output, StringComparison.Ordinal); - Assert.Contains("Quieted", output, StringComparison.Ordinal); - Assert.Contains("Confidence", output, StringComparison.Ordinal); - Assert.Contains("sha256:aaa", output, StringComparison.Ordinal); - Assert.Contains("uuid-allow", output, StringComparison.Ordinal); - Assert.Contains("(verified)", output, StringComparison.Ordinal); - Assert.Contains("0.97 (high)", output, StringComparison.Ordinal); - Assert.Contains("sha256:bbb", output, StringComparison.Ordinal); - Assert.Contains("uuid-block", output, StringComparison.Ordinal); - Assert.Contains("(unverified)", output, StringComparison.Ordinal); - Assert.Contains("sha256:ccc", output, StringComparison.Ordinal); - Assert.Contains("yes", output, StringComparison.Ordinal); - Assert.Contains("allow-temporary", output, StringComparison.Ordinal); - Assert.True( - output.IndexOf("sha256:aaa", StringComparison.Ordinal) < - output.IndexOf("sha256:ccc", StringComparison.Ordinal)); - } - finally - { - Environment.ExitCode = originalExit; - AnsiConsole.Console = originalConsole; - } - } - - [Fact] - public async Task HandleRuntimePolicyTestAsync_WritesDeterministicJson() - { - var originalExit = Environment.ExitCode; - var originalOut = Console.Out; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - - var decisions = new Dictionary(StringComparer.Ordinal) - { - ["sha256:json-a"] = new RuntimePolicyImageDecision( - "allow", - true, - true, - Array.AsReadOnly(new[] { "baseline allow" }), - new RuntimePolicyRekorReference("uuid-json-allow", "https://rekor.example/entries/uuid-json-allow", true), - new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) - { - ["source"] = "baseline", - ["confidence"] = 0.66 - })), - ["sha256:json-b"] = new RuntimePolicyImageDecision( - "audit", - true, - false, - Array.AsReadOnly(Array.Empty()), - new RuntimePolicyRekorReference(null, null, null), - new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) - { - ["source"] = "mirror", - ["quieted"] = true, - ["quietedBy"] = "risk-accepted" - })) - }; - - backend.RuntimePolicyResult = new RuntimePolicyEvaluationResult( - 600, - DateTimeOffset.Parse("2025-10-20T00:00:00Z", CultureInfo.InvariantCulture), - "rev-json-7", - new ReadOnlyDictionary(decisions)); - - var provider = BuildServiceProvider(backend); - - using var writer = new StringWriter(); - Console.SetOut(writer); - - try - { - await CommandHandlers.HandleRuntimePolicyTestAsync( - provider, - namespaceValue: "staging", - imageArguments: new[] { "sha256:json-a", "sha256:json-b" }, - filePath: null, - labelArguments: Array.Empty(), - outputJson: true, - verbose: false, - cancellationToken: CancellationToken.None); - - var output = writer.ToString().Trim(); - - Assert.Equal(0, Environment.ExitCode); - Assert.False(string.IsNullOrWhiteSpace(output)); - - using var document = JsonDocument.Parse(output); - var root = document.RootElement; - - Assert.Equal(600, root.GetProperty("ttlSeconds").GetInt32()); - Assert.Equal("rev-json-7", root.GetProperty("policyRevision").GetString()); - var expiresAt = root.GetProperty("expiresAtUtc").GetString(); - Assert.NotNull(expiresAt); - Assert.Equal( - DateTimeOffset.Parse("2025-10-20T00:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), - DateTimeOffset.Parse(expiresAt!, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal)); - - var results = root.GetProperty("results"); - var keys = results.EnumerateObject().Select(p => p.Name).ToArray(); - Assert.Equal(new[] { "sha256:json-a", "sha256:json-b" }, keys); - - var first = results.GetProperty("sha256:json-a"); - Assert.Equal("allow", first.GetProperty("policyVerdict").GetString()); - Assert.True(first.GetProperty("signed").GetBoolean()); - Assert.True(first.GetProperty("hasSbomReferrers").GetBoolean()); - var rekor = first.GetProperty("rekor"); - Assert.Equal("uuid-json-allow", rekor.GetProperty("uuid").GetString()); - Assert.True(rekor.GetProperty("verified").GetBoolean()); - Assert.Equal("baseline", first.GetProperty("source").GetString()); - Assert.Equal(0.66, first.GetProperty("confidence").GetDouble(), 3); - - var second = results.GetProperty("sha256:json-b"); - Assert.Equal("audit", second.GetProperty("policyVerdict").GetString()); - Assert.True(second.GetProperty("signed").GetBoolean()); - Assert.False(second.GetProperty("hasSbomReferrers").GetBoolean()); - Assert.Equal("mirror", second.GetProperty("source").GetString()); - Assert.True(second.GetProperty("quieted").GetBoolean()); - Assert.Equal("risk-accepted", second.GetProperty("quietedBy").GetString()); - Assert.False(second.TryGetProperty("rekor", out _)); - } - finally - { - Console.SetOut(originalOut); - Environment.ExitCode = originalExit; - } - } - - [Fact] - public async Task HandlePolicyFindingsListAsync_WritesInteractiveTable() - { - var originalExit = Environment.ExitCode; - var originalConsole = AnsiConsole.Console; - - var console = new TestConsole(); - console.Interactive(); - console.EmitAnsiSequences(); - console.Width(140); - AnsiConsole.Console = console; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - FindingsPage = new PolicyFindingsPage( - new[] - { - new PolicyFindingDocument( - "P-7:S-42:pkg:npm/lodash@4.17.21:CVE-2021-23337", - "affected", - new PolicyFindingSeverity("High", 7.5), - "sbom:S-42", - new[] { "CVE-2021-23337", "GHSA-xxxx-yyyy" }, - new PolicyFindingVexMetadata("VendorX-123", "vendor-x", "not_affected"), - 4, - DateTimeOffset.Parse("2025-10-26T14:06:01Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), - "run:P-7:2025-10-26:auto") - }, - "cursor-42", - 10) - }; - var provider = BuildServiceProvider(backend); - - try - { - await CommandHandlers.HandlePolicyFindingsListAsync( - provider, - " P-7 ", - new[] { " sbom:S-42 " }, - new[] { "Affected", "QUIETED" }, - new[] { "High", "Critical" }, - "2025-10-25T00:00:00Z", - " cursor-0 ", - page: 2, - pageSize: 100, - format: "table", - outputPath: null, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.NotNull(backend.LastFindingsQuery); - var query = backend.LastFindingsQuery!; - Assert.Equal("P-7", query.PolicyId); - Assert.Contains("sbom:S-42", query.SbomIds); - Assert.Contains("affected", query.Statuses); - Assert.Contains("quieted", query.Statuses); - Assert.Contains("High", query.Severities); - Assert.Contains("Critical", query.Severities); - Assert.Equal(2, query.Page); - Assert.Equal(100, query.PageSize); - Assert.Equal("cursor-0", query.Cursor); - Assert.Equal(DateTimeOffset.Parse("2025-10-25T00:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), query.Since); - - var output = console.Output; - Assert.Contains("P-7:S-42", output, StringComparison.Ordinal); - Assert.Contains("High", output, StringComparison.Ordinal); - } - finally - { - AnsiConsole.Console = originalConsole; - Environment.ExitCode = originalExit; - } - } - - [Fact] - public async Task HandlePolicyFindingsListAsync_WritesJson() - { - var originalExit = Environment.ExitCode; - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - FindingsPage = new PolicyFindingsPage( - new[] - { - new PolicyFindingDocument( - "finding-1", - "quieted", - new PolicyFindingSeverity("Medium", 5.1), - "sbom:S-99", - Array.Empty(), - null, - 3, - DateTimeOffset.MinValue, - null) - }, - null, - null) - }; - var provider = BuildServiceProvider(backend); - using var writer = new StringWriter(); - var originalOut = Console.Out; - Console.SetOut(writer); - - try - { - await CommandHandlers.HandlePolicyFindingsListAsync( - provider, - "P-9", - Array.Empty(), - Array.Empty(), - Array.Empty(), - null, - null, - page: null, - pageSize: null, - format: "json", - outputPath: null, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - using var document = JsonDocument.Parse(writer.ToString()); - var root = document.RootElement; - Assert.Equal("P-9", root.GetProperty("policyId").GetString()); - var items = root.GetProperty("items"); - Assert.Equal(1, items.GetArrayLength()); - var first = items[0]; - Assert.Equal("finding-1", first.GetProperty("findingId").GetString()); - Assert.Equal("quieted", first.GetProperty("status").GetString()); - Assert.Equal("Medium", first.GetProperty("severity").GetProperty("normalized").GetString()); - } - finally - { - Console.SetOut(originalOut); - Environment.ExitCode = originalExit; - } - } - - [Fact] - public async Task HandlePolicyFindingsGetAsync_WritesInteractiveTable() - { - var originalExit = Environment.ExitCode; - var originalConsole = AnsiConsole.Console; - - var console = new TestConsole(); - console.Interactive(); - console.EmitAnsiSequences(); - console.Width(120); - AnsiConsole.Console = console; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - FindingDocument = new PolicyFindingDocument( - "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", - "affected", - new PolicyFindingSeverity("Critical", 9.1), - "sbom:S-1", - new[] { "CVE-1111" }, - new PolicyFindingVexMetadata("VendorY-9", null, "affected"), - 7, - DateTimeOffset.Parse("2025-10-26T12:34:56Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), - "run:P-9:1234") - }; - var provider = BuildServiceProvider(backend); - - try - { - await CommandHandlers.HandlePolicyFindingsGetAsync( - provider, - "P-9", - "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", - format: "table", - outputPath: null, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.Equal(("P-9", "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111"), backend.LastFindingGet); - var output = console.Output; - Assert.Contains("Critical", output); - Assert.Contains("run:P-9:1234", output); - } - finally - { - AnsiConsole.Console = originalConsole; - Environment.ExitCode = originalExit; - } - } - - [Fact] - public async Task HandlePolicyFindingsExplainAsync_WritesInteractiveTable() - { - var originalExit = Environment.ExitCode; - var originalConsole = AnsiConsole.Console; - - var console = new TestConsole(); - console.Interactive(); - console.EmitAnsiSequences(); - console.Width(140); - AnsiConsole.Console = console; - - var steps = new[] - { - new PolicyFindingExplainStep( - "rule-block-critical", - "blocked", - "block", - 9.1, - new ReadOnlyDictionary(new Dictionary - { - ["severity"] = "Critical", - ["sealed"] = "false" - }), - new ReadOnlyDictionary(new Dictionary - { - ["vex"] = "VendorY-9" - })) - }; - var hints = new[] - { - new PolicyFindingExplainHint("Using cached EPSS percentile from bundle 2025-10-20") - }; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - ExplainResult = new PolicyFindingExplainResult( - "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", - 7, - new ReadOnlyCollection(steps), - new ReadOnlyCollection(hints)) - }; - var provider = BuildServiceProvider(backend); - - try - { - await CommandHandlers.HandlePolicyFindingsExplainAsync( - provider, - "P-9", - "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", - mode: "verbose", - format: "table", - outputPath: null, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.Equal(("P-9", "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", "verbose"), backend.LastFindingExplain); - var output = console.Output; - Assert.Contains("rule-block-critical", output); - Assert.Contains("EPSS percentile", output); - } - finally - { - AnsiConsole.Console = originalConsole; - Environment.ExitCode = originalExit; - } - } - - [Fact] - public async Task HandlePolicySimulateAsync_WritesInteractiveSummary() - { - var originalExit = Environment.ExitCode; - var originalConsole = AnsiConsole.Console; - - var console = new TestConsole(); - console.Width(120); - console.Interactive(); - console.EmitAnsiSequences(); - AnsiConsole.Console = console; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - - var severity = new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) - { - ["critical"] = new PolicySimulationSeverityDelta(1, null), - ["high"] = new PolicySimulationSeverityDelta(null, 2) - }); - var ruleHits = new ReadOnlyCollection(new List - { - new("rule-block-critical", "Block Critical", 1, 0), - new("rule-quiet-low", "Quiet Low", null, 2) - }); - - backend.SimulationResult = new PolicySimulationResult( - new PolicySimulationDiff( - "scheduler.policy-diff-summary@1", - 2, - 1, - 10, - severity, - ruleHits), - "blob://policy/P-7/simulation.json"); - - var provider = BuildServiceProvider(backend); - - try - { - await CommandHandlers.HandlePolicySimulateAsync( - provider, - policyId: "P-7", - baseVersion: 3, - candidateVersion: 4, - sbomArguments: new[] { "sbom:A", "sbom:B" }, - environmentArguments: new[] { "sealed=false", "exposure=internet" }, - format: "table", - outputPath: null, - explain: true, - failOnDiff: false, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.NotNull(backend.LastPolicySimulation); - var simulation = backend.LastPolicySimulation!.Value; - Assert.Equal("P-7", simulation.PolicyId); - Assert.Equal(3, simulation.Input.BaseVersion); - Assert.Equal(4, simulation.Input.CandidateVersion); - Assert.True(simulation.Input.Explain); - Assert.Equal(new[] { "sbom:A", "sbom:B" }, simulation.Input.SbomSet); - Assert.True(simulation.Input.Environment.TryGetValue("sealed", out var sealedValue) && sealedValue is bool sealedFlag && sealedFlag == false); - Assert.True(simulation.Input.Environment.TryGetValue("exposure", out var exposureValue) && string.Equals(exposureValue as string, "internet", StringComparison.Ordinal)); - - var output = console.Output; - Assert.Contains("Severity", output, StringComparison.Ordinal); - Assert.Contains("critical", output, StringComparison.OrdinalIgnoreCase); - Assert.Contains("Rule", output, StringComparison.Ordinal); - Assert.Contains("Block Critical", output, StringComparison.Ordinal); - } - finally - { - Environment.ExitCode = originalExit; - AnsiConsole.Console = originalConsole; - } - } - - [Fact] - public async Task HandlePolicySimulateAsync_WritesJsonOutput() - { - var originalExit = Environment.ExitCode; - var originalOut = Console.Out; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - backend.SimulationResult = new PolicySimulationResult( - new PolicySimulationDiff( - "scheduler.policy-diff-summary@1", - 0, - 0, - 5, - new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)), - new ReadOnlyCollection(Array.Empty())), - null); - - var provider = BuildServiceProvider(backend); - - using var writer = new StringWriter(); - Console.SetOut(writer); - - try - { - await CommandHandlers.HandlePolicySimulateAsync( - provider, - policyId: "P-9", - baseVersion: null, - candidateVersion: 5, - sbomArguments: Array.Empty(), - environmentArguments: new[] { "sealed=true", "threshold=0.8" }, - format: "json", - outputPath: null, - explain: false, - failOnDiff: false, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - using var document = JsonDocument.Parse(writer.ToString()); - var root = document.RootElement; - Assert.Equal("P-9", root.GetProperty("policyId").GetString()); - Assert.Equal(5, root.GetProperty("candidateVersion").GetInt32()); - Assert.True(root.TryGetProperty("environment", out var envElement) && envElement.TryGetProperty("sealed", out var sealedElement) && sealedElement.GetBoolean()); - Assert.True(envElement.TryGetProperty("threshold", out var thresholdElement) && Math.Abs(thresholdElement.GetDouble() - 0.8) < 0.0001); - } - finally - { - Console.SetOut(originalOut); - Environment.ExitCode = originalExit; - } - } - - [Fact] - public async Task HandlePolicySimulateAsync_FailOnDiffSetsExitCode20() - { - var originalExit = Environment.ExitCode; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - backend.SimulationResult = new PolicySimulationResult( - new PolicySimulationDiff( - null, - 1, - 0, - 0, - new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)), - new ReadOnlyCollection(Array.Empty())), - null); - - var provider = BuildServiceProvider(backend); - - try - { - await CommandHandlers.HandlePolicySimulateAsync( - provider, - policyId: "P-11", - baseVersion: null, - candidateVersion: null, - sbomArguments: Array.Empty(), - environmentArguments: Array.Empty(), - format: "json", - outputPath: null, - explain: false, - failOnDiff: true, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(20, Environment.ExitCode); - } - finally - { - Environment.ExitCode = originalExit; - } - } - - [Fact] - public async Task HandlePolicySimulateAsync_MapsErrorCodes() - { - var originalExit = Environment.ExitCode; - var originalOut = Console.Out; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - SimulationException = new PolicyApiException("Missing inputs", HttpStatusCode.BadRequest, "ERR_POL_003") - }; - var provider = BuildServiceProvider(backend); - - using var writer = new StringWriter(); - Console.SetOut(writer); - - try - { - await CommandHandlers.HandlePolicySimulateAsync( - provider, - policyId: "P-12", - baseVersion: null, - candidateVersion: null, - sbomArguments: Array.Empty(), - environmentArguments: Array.Empty(), - format: "json", - outputPath: null, - explain: false, - failOnDiff: false, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(21, Environment.ExitCode); - } - finally - { - Console.SetOut(originalOut); - Environment.ExitCode = originalExit; - } - } - - [Fact] - public async Task HandlePolicyActivateAsync_DisplaysInteractiveSummary() - { - var originalExit = Environment.ExitCode; - var originalConsole = AnsiConsole.Console; - - var console = new TestConsole(); - console.Width(120); - console.Interactive(); - console.EmitAnsiSequences(); - AnsiConsole.Console = console; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - backend.ActivationResult = new PolicyActivationResult( - "activated", - new PolicyActivationRevision( - "P-7", - 4, - "active", - true, - DateTimeOffset.Parse("2025-10-27T00:00:00Z", CultureInfo.InvariantCulture), - DateTimeOffset.Parse("2025-10-27T01:15:00Z", CultureInfo.InvariantCulture), - new ReadOnlyCollection(new List - { - new("user:alice", DateTimeOffset.Parse("2025-10-27T01:10:00Z", CultureInfo.InvariantCulture), "Primary"), - new("user:bob", DateTimeOffset.Parse("2025-10-27T01:12:00Z", CultureInfo.InvariantCulture), null) - }))); - - var provider = BuildServiceProvider(backend); - - try - { - await CommandHandlers.HandlePolicyActivateAsync( - provider, - policyId: "P-7", - version: 4, - note: "Rolling forward", - runNow: true, - scheduledAt: null, - priority: "high", - rollback: false, - incidentId: "INC-204", - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.NotNull(backend.LastPolicyActivation); - var activation = backend.LastPolicyActivation!.Value; - Assert.Equal("P-7", activation.PolicyId); - Assert.Equal(4, activation.Version); - Assert.True(activation.Request.RunNow); - Assert.Null(activation.Request.ScheduledAt); - Assert.Equal("high", activation.Request.Priority); - Assert.Equal("INC-204", activation.Request.IncidentId); - Assert.Equal("Rolling forward", activation.Request.Comment); - - var output = console.Output; - Assert.Contains("activated", output, StringComparison.OrdinalIgnoreCase); - Assert.Contains("user:alice", output, StringComparison.Ordinal); - Assert.Contains("Rolling forward", output, StringComparison.Ordinal); - } - finally - { - Environment.ExitCode = originalExit; - AnsiConsole.Console = originalConsole; - } - } - - [Fact] - public async Task HandlePolicyActivateAsync_PendingSecondApprovalSetsExitCode() - { - var originalExit = Environment.ExitCode; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - backend.ActivationResult = new PolicyActivationResult( - "pending_second_approval", - new PolicyActivationRevision( - "P-7", - 4, - "approved", - true, - DateTimeOffset.UtcNow, - null, - new ReadOnlyCollection(new List - { - new("user:alice", DateTimeOffset.UtcNow, "Primary") - }))); - - var provider = BuildServiceProvider(backend); - - try - { - await CommandHandlers.HandlePolicyActivateAsync( - provider, - policyId: "P-7", - version: 4, - note: null, - runNow: false, - scheduledAt: null, - priority: null, - rollback: false, - incidentId: null, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(75, Environment.ExitCode); - } - finally - { - Environment.ExitCode = originalExit; - } - } - - [Fact] - public async Task HandlePolicyActivateAsync_MapsErrorCodes() - { - var originalExit = Environment.ExitCode; - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - ActivationException = new PolicyApiException("Revision not approved", HttpStatusCode.BadRequest, "ERR_POL_002") - }; - - var provider = BuildServiceProvider(backend); - - try - { - await CommandHandlers.HandlePolicyActivateAsync( - provider, - policyId: "P-9", - version: 2, - note: null, - runNow: false, - scheduledAt: null, - priority: null, - rollback: false, - incidentId: null, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(70, Environment.ExitCode); - } - finally - { - Environment.ExitCode = originalExit; - } - } - - private static async Task WriteRevocationArtifactsAsync(TempDirectory temp, string? providerHint) - { - var (bundleBytes, signature, keyPem) = await BuildRevocationArtifactsAsync(providerHint); - - var bundlePath = Path.Combine(temp.Path, "revocation-bundle.json"); - var signaturePath = Path.Combine(temp.Path, "revocation-bundle.json.jws"); - var keyPath = Path.Combine(temp.Path, "revocation-key.pem"); - - await File.WriteAllBytesAsync(bundlePath, bundleBytes); - await File.WriteAllTextAsync(signaturePath, signature); - await File.WriteAllTextAsync(keyPath, keyPem); - - return new RevocationArtifactPaths(bundlePath, signaturePath, keyPath); - } - - private static async Task<(byte[] Bundle, string Signature, string KeyPem)> BuildRevocationArtifactsAsync(string? providerHint) - { - var bundleBytes = Encoding.UTF8.GetBytes("{\"revocations\":[]}"); - - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var parameters = ecdsa.ExportParameters(includePrivateParameters: true); - - var signingKey = new CryptoSigningKey( - new CryptoKeyReference("revocation-test"), - SignatureAlgorithms.Es256, - privateParameters: in parameters, - createdAt: DateTimeOffset.UtcNow); - - var provider = new DefaultCryptoProvider(); - provider.UpsertSigningKey(signingKey); - var signer = provider.GetSigner(SignatureAlgorithms.Es256, signingKey.Reference); - - var header = new Dictionary - { - ["alg"] = SignatureAlgorithms.Es256, - ["kid"] = signingKey.Reference.KeyId, - ["typ"] = "application/vnd.stellaops.revocation-bundle+jws", - ["b64"] = false, - ["crit"] = new[] { "b64" } - }; - - if (!string.IsNullOrWhiteSpace(providerHint)) - { - header["provider"] = providerHint; - } - - var serializerOptions = new JsonSerializerOptions - { - PropertyNamingPolicy = null, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - var headerJson = JsonSerializer.Serialize(header, serializerOptions); - var encodedHeader = Base64UrlEncoder.Encode(Encoding.UTF8.GetBytes(headerJson)); - - var signingInput = new byte[encodedHeader.Length + 1 + bundleBytes.Length]; - var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); - Buffer.BlockCopy(headerBytes, 0, signingInput, 0, headerBytes.Length); - signingInput[headerBytes.Length] = (byte)'.'; - Buffer.BlockCopy(bundleBytes, 0, signingInput, headerBytes.Length + 1, bundleBytes.Length); - - var signatureBytes = await signer.SignAsync(signingInput); - var encodedSignature = Base64UrlEncoder.Encode(signatureBytes); - var jws = string.Concat(encodedHeader, "..", encodedSignature); - - var publicKeyBytes = ecdsa.ExportSubjectPublicKeyInfo(); - var keyPem = new string(PemEncoding.Write("PUBLIC KEY", publicKeyBytes)); - - return (bundleBytes, jws, keyPem); - } - - private sealed record RevocationArtifactPaths(string BundlePath, string SignaturePath, string KeyPath); - - [Fact] - public async Task HandleSourcesIngestAsync_NoViolations_WritesJsonReport() - { - var originalExitCode = Environment.ExitCode; - var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); - using var tempDir = new TempDirectory(); - - var originalConsole = AnsiConsole.Console; - var console = new TestConsole(); - var originalOut = Console.Out; - using var writer = new StringWriter(); - - try - { - Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-alpha"); - AnsiConsole.Console = console; - Console.SetOut(writer); - - var inputPath = Path.Combine(tempDir.Path, "payload.json"); - await File.WriteAllTextAsync(inputPath, "{ \"id\": 1 }"); - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - DryRunResponse = new AocIngestDryRunResponse - { - Source = "redhat", - Tenant = "tenant-alpha", - Status = "ok", - Document = new AocIngestDryRunDocumentResult - { - ContentHash = "sha256:test" - }, - Violations = Array.Empty() - } - }; - - var provider = BuildServiceProvider(backend); - var outputPath = Path.Combine(tempDir.Path, "dry-run.json"); - - await CommandHandlers.HandleSourcesIngestAsync( - provider, - dryRun: true, - source: "RedHat", - input: inputPath, - tenantOverride: null, - format: "json", - disableColor: true, - output: outputPath, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.True(File.Exists(outputPath)); - - Assert.NotNull(backend.LastDryRunRequest); - var request = backend.LastDryRunRequest!; - Assert.Equal("tenant-alpha", request.Tenant); - Assert.Equal("RedHat", request.Source); - Assert.Equal("payload.json", request.Document.Name); - Assert.Equal("application/json", request.Document.ContentType); - Assert.Null(request.Document.ContentEncoding); - using (var document = JsonDocument.Parse(request.Document.Content)) - { - Assert.Equal(1, document.RootElement.GetProperty("id").GetInt32()); - } - - var consoleJson = writer.ToString(); - Assert.Contains("\"status\": \"ok\"", consoleJson); - } - finally - { - Environment.ExitCode = originalExitCode; - Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); - AnsiConsole.Console = originalConsole; - Console.SetOut(originalOut); - } - } - - [Fact] - public async Task HandleSourcesIngestAsync_ViolationMapsExitCode() - { - var originalExitCode = Environment.ExitCode; - var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); - using var tempDir = new TempDirectory(); - - var originalConsole = AnsiConsole.Console; - var console = new TestConsole(); - - try - { - Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-beta"); - AnsiConsole.Console = console; - - var inputPath = Path.Combine(tempDir.Path, "payload.json"); - await File.WriteAllTextAsync(inputPath, "{ \"id\": 2 }"); - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - DryRunResponse = new AocIngestDryRunResponse - { - Status = "error", - Violations = new[] - { - new AocIngestDryRunViolation - { - Code = "ERR_AOC_002", - Message = "merge detected", - Path = "/content/derived" - } - } - } - }; - - var provider = BuildServiceProvider(backend); - - await CommandHandlers.HandleSourcesIngestAsync( - provider, - dryRun: true, - source: "osv", - input: inputPath, - tenantOverride: null, - format: "table", - disableColor: true, - output: null, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(12, Environment.ExitCode); - var output = console.Output; - Assert.Contains("ERR_AOC_002", output); - Assert.Contains("/content/derived", output); - } - finally - { - Environment.ExitCode = originalExitCode; - Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); - AnsiConsole.Console = originalConsole; - } - } - - [Fact] - public async Task HandleSourcesIngestAsync_MissingTenant_ReturnsUsageError() - { - var originalExitCode = Environment.ExitCode; - var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); - using var tempDir = new TempDirectory(); - - try - { - Environment.SetEnvironmentVariable("STELLA_TENANT", null); - - var inputPath = Path.Combine(tempDir.Path, "payload.json"); - await File.WriteAllTextAsync(inputPath, "{ \"id\": 3 }"); - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - var provider = BuildServiceProvider(backend); - - await CommandHandlers.HandleSourcesIngestAsync( - provider, - dryRun: true, - source: "osv", - input: inputPath, - tenantOverride: null, - format: "table", - disableColor: true, - output: null, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(70, Environment.ExitCode); - } - finally - { - Environment.ExitCode = originalExitCode; - Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); - } - } - - [Fact] - public async Task HandleAocVerifyAsync_NoViolations_WritesReportAndReturnsZero() - { - var originalExitCode = Environment.ExitCode; - var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); - using var tempDir = new TempDirectory(); - - var originalConsole = AnsiConsole.Console; - var console = new TestConsole(); - var originalOut = Console.Out; - using var writer = new StringWriter(); - - try - { - AnsiConsole.Console = console; - Console.SetOut(writer); - Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-a"); - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - VerifyResponse = new AocVerifyResponse - { - Tenant = "tenant-a", - Window = new AocVerifyWindow - { - From = DateTimeOffset.Parse("2025-10-25T12:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), - To = DateTimeOffset.Parse("2025-10-26T12:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal) - }, - Checked = new AocVerifyChecked { Advisories = 4, Vex = 1 }, - Metrics = new AocVerifyMetrics { IngestionWriteTotal = 5, AocViolationTotal = 0 }, - Violations = Array.Empty(), - Truncated = false - } - }; - - var provider = BuildServiceProvider(backend); - var exportPath = Path.Combine(tempDir.Path, "verify.json"); - - await CommandHandlers.HandleAocVerifyAsync( - provider, - sinceOption: "2025-10-25T12:00:00Z", - limitOption: 10, - sourcesOption: "RedHat,Ubuntu", - codesOption: "err_aoc_001", - format: "json", - exportPath: exportPath, - tenantOverride: null, - disableColor: true, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(0, Environment.ExitCode); - Assert.True(File.Exists(exportPath)); - - Assert.NotNull(backend.LastVerifyRequest); - Assert.Equal("tenant-a", backend.LastVerifyRequest!.Tenant); - var expectedSince = DateTimeOffset.Parse("2025-10-25T12:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); - var actualSince = DateTimeOffset.Parse(backend.LastVerifyRequest.Since!, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); - Assert.Equal(expectedSince, actualSince); - Assert.Equal(10, backend.LastVerifyRequest.Limit); - Assert.Equal(new[] { "redhat", "ubuntu" }, backend.LastVerifyRequest.Sources); - Assert.Equal(new[] { "ERR_AOC_001" }, backend.LastVerifyRequest.Codes); - - var jsonOutput = writer.ToString(); - Assert.Contains("\"tenant\": \"tenant-a\"", jsonOutput); - Assert.Contains("\"ingestion_write_total\": 5", jsonOutput); - } - finally - { - Environment.ExitCode = originalExitCode; - Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); - Console.SetOut(originalOut); - AnsiConsole.Console = originalConsole; - } - } - - [Fact] - public async Task HandleAocVerifyAsync_WithViolations_MapsExitCode() - { - var originalExitCode = Environment.ExitCode; - var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); - - var originalConsole = AnsiConsole.Console; - var console = new TestConsole(); - - try - { - AnsiConsole.Console = console; - Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-b"); - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - VerifyResponse = new AocVerifyResponse - { - Violations = new[] - { - new AocVerifyViolation - { - Code = "ERR_AOC_003", - Count = 2, - Examples = new[] - { - new AocVerifyViolationExample - { - Source = "redhat", - DocumentId = "doc-1", - Path = "/content/raw" - } - } - } - } - } - }; - - var provider = BuildServiceProvider(backend); - - var capturedBefore = DateTimeOffset.UtcNow; - - await CommandHandlers.HandleAocVerifyAsync( - provider, - sinceOption: "24h", - limitOption: null, - sourcesOption: null, - codesOption: null, - format: "table", - exportPath: null, - tenantOverride: null, - disableColor: true, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(13, Environment.ExitCode); - Assert.NotNull(backend.LastVerifyRequest); - Assert.Equal(20, backend.LastVerifyRequest!.Limit); - Assert.Null(backend.LastVerifyRequest.Sources); - Assert.Null(backend.LastVerifyRequest.Codes); - - var parsedSince = DateTimeOffset.Parse(backend.LastVerifyRequest.Since!, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); - var expectedSince = capturedBefore.AddHours(-24); - Assert.InRange((expectedSince - parsedSince).Duration(), TimeSpan.Zero, TimeSpan.FromSeconds(10)); - - var output = console.Output; - Assert.Contains("ERR_AOC_003", output); - Assert.Contains("doc-1", output); - } - finally - { - Environment.ExitCode = originalExitCode; - Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); - AnsiConsole.Console = originalConsole; - } - } - - [Fact] - public async Task HandleAocVerifyAsync_TruncatedWithoutViolations_ReturnsExitCode18() - { - var originalExitCode = Environment.ExitCode; - var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); - - var originalConsole = AnsiConsole.Console; - var console = new TestConsole(); - - try - { - AnsiConsole.Console = console; - Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-c"); - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) - { - VerifyResponse = new AocVerifyResponse - { - Violations = Array.Empty(), - Truncated = true - } - }; - - var provider = BuildServiceProvider(backend); - - await CommandHandlers.HandleAocVerifyAsync( - provider, - sinceOption: "2025-01-01T00:00:00Z", - limitOption: 0, - sourcesOption: null, - codesOption: null, - format: "table", - exportPath: null, - tenantOverride: null, - disableColor: true, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(18, Environment.ExitCode); - - var output = console.Output; - Assert.Contains("Truncated", output); - } - finally - { - Environment.ExitCode = originalExitCode; - Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); - AnsiConsole.Console = originalConsole; - } - } - - [Fact] - public async Task HandleAocVerifyAsync_MissingTenant_ReturnsUsageError() - { - var originalExitCode = Environment.ExitCode; - var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); - - try - { - Environment.SetEnvironmentVariable("STELLA_TENANT", null); - - var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); - var provider = BuildServiceProvider(backend); - - await CommandHandlers.HandleAocVerifyAsync( - provider, - sinceOption: "24h", - limitOption: null, - sourcesOption: null, - codesOption: null, - format: "table", - exportPath: null, - tenantOverride: null, - disableColor: true, - verbose: false, - cancellationToken: CancellationToken.None); - - Assert.Equal(71, Environment.ExitCode); - } - finally - { - Environment.ExitCode = originalExitCode; - Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); - } - } - - private static IServiceProvider BuildServiceProvider( - IBackendOperationsClient backend, - IScannerExecutor? executor = null, - IScannerInstaller? installer = null, - StellaOpsCliOptions? options = null, - IStellaOpsTokenClient? tokenClient = null, - IConcelierObservationsClient? concelierClient = null) - { - var services = new ServiceCollection(); - services.AddSingleton(backend); - services.AddSingleton(_ => LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug))); - services.AddSingleton(new VerbosityState()); - var resolvedOptions = options ?? new StellaOpsCliOptions - { - ResultsDirectory = Path.Combine(Path.GetTempPath(), $"stellaops-cli-results-{Guid.NewGuid():N}") - }; - services.AddSingleton(resolvedOptions); - - var resolvedExecutor = executor ?? CreateDefaultExecutor(); - services.AddSingleton(resolvedExecutor); - services.AddSingleton(installer ?? new StubInstaller()); - - if (tokenClient is not null) - { - services.AddSingleton(tokenClient); - } - - services.AddSingleton( - concelierClient ?? new StubConcelierObservationsClient()); - - return services.BuildServiceProvider(); - } - - private static IScannerExecutor CreateDefaultExecutor() - { - var tempResultsFile = Path.GetTempFileName(); - var tempMetadataFile = Path.Combine( - Path.GetDirectoryName(tempResultsFile)!, - $"{Path.GetFileNameWithoutExtension(tempResultsFile)}-run.json"); - return new StubExecutor(new ScannerExecutionResult(0, tempResultsFile, tempMetadataFile)); - } - - private sealed class StubBackendClient : IBackendOperationsClient - { - private readonly JobTriggerResult _jobResult; - private static readonly RuntimePolicyEvaluationResult DefaultRuntimePolicyResult = - new RuntimePolicyEvaluationResult( - 0, - null, - null, - new ReadOnlyDictionary( - new Dictionary())); - - public StubBackendClient(JobTriggerResult result) - { - _jobResult = result; - } - - public string? LastJobKind { get; private set; } - public string? LastUploadPath { get; private set; } - public string? LastExcititorRoute { get; private set; } - public HttpMethod? LastExcititorMethod { get; private set; } - public object? LastExcititorPayload { get; private set; } - public List<(string ExportId, string DestinationPath, string? Algorithm, string? Digest)> ExportDownloads { get; } = new(); - public ExcititorOperationResult? ExcititorResult { get; set; } = new ExcititorOperationResult(true, "ok", null, null); - public IReadOnlyList ProviderSummaries { get; set; } = Array.Empty(); - public RuntimePolicyEvaluationResult RuntimePolicyResult { get; set; } = DefaultRuntimePolicyResult; - public PolicySimulationResult SimulationResult { get; set; } = new PolicySimulationResult( - new PolicySimulationDiff( - null, - 0, - 0, - 0, - new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)), - new ReadOnlyCollection(Array.Empty())), - null); - public PolicyApiException? SimulationException { get; set; } - public (string PolicyId, PolicySimulationInput Input)? LastPolicySimulation { get; private set; } - public PolicyActivationResult ActivationResult { get; set; } = new PolicyActivationResult( - "activated", - new PolicyActivationRevision( - "P-0", - 1, - "active", - false, - DateTimeOffset.UtcNow, - DateTimeOffset.UtcNow, - new ReadOnlyCollection(Array.Empty()))); - public PolicyApiException? ActivationException { get; set; } - public (string PolicyId, int Version, PolicyActivationRequest Request)? LastPolicyActivation { get; private set; } - public AocIngestDryRunResponse DryRunResponse { get; set; } = new(); - public Exception? DryRunException { get; set; } - public AocIngestDryRunRequest? LastDryRunRequest { get; private set; } - public AocVerifyResponse VerifyResponse { get; set; } = new(); - public Exception? VerifyException { get; set; } - public AocVerifyRequest? LastVerifyRequest { get; private set; } - public PolicyFindingsPage FindingsPage { get; set; } = new PolicyFindingsPage(Array.Empty(), null, null); - public PolicyFindingsQuery? LastFindingsQuery { get; private set; } - public PolicyApiException? FindingsListException { get; set; } - public PolicyFindingDocument FindingDocument { get; set; } = new PolicyFindingDocument( - "finding-default", - "affected", - new PolicyFindingSeverity("High", 7.5), - "sbom:default", - Array.Empty(), - null, - 1, - DateTimeOffset.UtcNow, - null); - public (string PolicyId, string FindingId)? LastFindingGet { get; private set; } - public PolicyApiException? FindingGetException { get; set; } - public PolicyFindingExplainResult ExplainResult { get; set; } = new PolicyFindingExplainResult( - "finding-default", - 1, - new ReadOnlyCollection(Array.Empty()), - new ReadOnlyCollection(Array.Empty())); - public (string PolicyId, string FindingId, string? Mode)? LastFindingExplain { get; private set; } - public PolicyApiException? FindingExplainException { get; set; } - - public Task DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken) - => throw new NotImplementedException(); - - public Task UploadScanResultsAsync(string filePath, CancellationToken cancellationToken) - { - LastUploadPath = filePath; - return Task.CompletedTask; - } - - public Task TriggerJobAsync(string jobKind, IDictionary parameters, CancellationToken cancellationToken) - { - LastJobKind = jobKind; - return Task.FromResult(_jobResult); - } - - public Task ExecuteExcititorOperationAsync(string route, HttpMethod method, object? payload, CancellationToken cancellationToken) - { - LastExcititorRoute = route; - LastExcititorMethod = method; - LastExcititorPayload = payload; - return Task.FromResult(ExcititorResult ?? new ExcititorOperationResult(true, "ok", null, null)); - } - - public Task DownloadExcititorExportAsync(string exportId, string destinationPath, string? expectedDigestAlgorithm, string? expectedDigest, CancellationToken cancellationToken) - { - var fullPath = Path.GetFullPath(destinationPath); - var directory = Path.GetDirectoryName(fullPath); - if (!string.IsNullOrEmpty(directory)) - { - Directory.CreateDirectory(directory); - } - - File.WriteAllText(fullPath, "{}"); - var info = new FileInfo(fullPath); - ExportDownloads.Add((exportId, fullPath, expectedDigestAlgorithm, expectedDigest)); - return Task.FromResult(new ExcititorExportDownloadResult(fullPath, info.Length, false)); - } - - public Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) - => Task.FromResult(ProviderSummaries); - - public Task EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) - => Task.FromResult(RuntimePolicyResult); - - public Task SimulatePolicyAsync(string policyId, PolicySimulationInput input, CancellationToken cancellationToken) - { - LastPolicySimulation = (policyId, input); - if (SimulationException is not null) - { - throw SimulationException; - } - - return Task.FromResult(SimulationResult); - } - - public Task ActivatePolicyRevisionAsync(string policyId, int version, PolicyActivationRequest request, CancellationToken cancellationToken) - { - LastPolicyActivation = (policyId, version, request); - if (ActivationException is not null) - { - throw ActivationException; - } - - return Task.FromResult(ActivationResult); - } - - public Task ExecuteAocIngestDryRunAsync(AocIngestDryRunRequest request, CancellationToken cancellationToken) - { - LastDryRunRequest = request; - if (DryRunException is not null) - { - throw DryRunException; - } - - return Task.FromResult(DryRunResponse); - } - - public Task ExecuteAocVerifyAsync(AocVerifyRequest request, CancellationToken cancellationToken) - { - LastVerifyRequest = request; - if (VerifyException is not null) - { - throw VerifyException; - } - - return Task.FromResult(VerifyResponse); - } - - public Task GetPolicyFindingsAsync(PolicyFindingsQuery query, CancellationToken cancellationToken) - { - LastFindingsQuery = query; - if (FindingsListException is not null) - { - throw FindingsListException; - } - - return Task.FromResult(FindingsPage); - } - - public Task GetPolicyFindingAsync(string policyId, string findingId, CancellationToken cancellationToken) - { - LastFindingGet = (policyId, findingId); - if (FindingGetException is not null) - { - throw FindingGetException; - } - - return Task.FromResult(FindingDocument); - } - - public Task GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken) - { - LastFindingExplain = (policyId, findingId, mode); - if (FindingExplainException is not null) - { - throw FindingExplainException; - } - - return Task.FromResult(ExplainResult); - } - - - public Task DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken) - => throw new NotSupportedException(); - - public Task ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken) - => throw new NotSupportedException(); - - public Task GetOfflineKitStatusAsync(CancellationToken cancellationToken) - => throw new NotSupportedException(); - } - - private sealed class StubExecutor : IScannerExecutor - { - private readonly ScannerExecutionResult _result; - - public StubExecutor(ScannerExecutionResult result) - { - _result = result; - } - - public Task RunAsync(string runner, string entry, string targetDirectory, string resultsDirectory, IReadOnlyList arguments, bool verbose, CancellationToken cancellationToken) - { - Directory.CreateDirectory(Path.GetDirectoryName(_result.ResultsPath)!); - if (!File.Exists(_result.ResultsPath)) - { - File.WriteAllText(_result.ResultsPath, "{}"); - } - - Directory.CreateDirectory(Path.GetDirectoryName(_result.RunMetadataPath)!); - if (!File.Exists(_result.RunMetadataPath)) - { - File.WriteAllText(_result.RunMetadataPath, "{}"); - } - - return Task.FromResult(_result); - } - } - - private sealed class StubInstaller : IScannerInstaller - { - public Task InstallAsync(string artifactPath, bool verbose, CancellationToken cancellationToken) - => Task.CompletedTask; - } - - private sealed class StubTokenClient : IStellaOpsTokenClient - { - private readonly StellaOpsTokenResult _token; - - public StubTokenClient() - { - _token = new StellaOpsTokenResult( - "token-123", - "Bearer", - DateTimeOffset.UtcNow.AddMinutes(30), - new[] { StellaOpsScopes.ConcelierJobsTrigger }); - } - - public int ClientCredentialRequests { get; private set; } - public IReadOnlyDictionary? LastAdditionalParameters { get; private set; } - public int PasswordRequests { get; private set; } - public int ClearRequests { get; private set; } - public StellaOpsTokenCacheEntry? CachedEntry { get; set; } - - public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) - { - CachedEntry = entry; - return ValueTask.CompletedTask; - } - - public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) - { - ClearRequests++; - CachedEntry = null; - return ValueTask.CompletedTask; - } - - public Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) - => Task.FromResult(new JsonWebKeySet("{\"keys\":[]}")); - - public ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.FromResult(CachedEntry); - - public Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) - { - ClientCredentialRequests++; - LastAdditionalParameters = additionalParameters; - return Task.FromResult(_token); - } - - public Task RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) - { - PasswordRequests++; - LastAdditionalParameters = additionalParameters; - return Task.FromResult(_token); - } - } - - private static string CreateUnsignedJwt(params (string Key, object Value)[] claims) - { - var headerJson = "{\"alg\":\"none\",\"typ\":\"JWT\"}"; - var payload = new Dictionary(StringComparer.Ordinal); - foreach (var claim in claims) - { - payload[claim.Key] = claim.Value; - } - - var payloadJson = JsonSerializer.Serialize(payload); - return $"{Base64UrlEncode(headerJson)}.{Base64UrlEncode(payloadJson)}."; - } - - private static string Base64UrlEncode(string value) - { - var bytes = Encoding.UTF8.GetBytes(value); - return Convert.ToBase64String(bytes) - .TrimEnd('=') - .Replace('+', '-') - .Replace('/', '_'); - } - - private sealed class StubConcelierObservationsClient : IConcelierObservationsClient - { - private readonly AdvisoryObservationsResponse _response; - - public StubConcelierObservationsClient(AdvisoryObservationsResponse? response = null) - { - _response = response ?? new AdvisoryObservationsResponse(); - } - - public AdvisoryObservationsQuery? LastQuery { get; private set; } - - public Task GetObservationsAsync( - AdvisoryObservationsQuery query, - CancellationToken cancellationToken) - { - LastQuery = query; - return Task.FromResult(_response); - } - } -} +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using System.Globalization; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.Client; +using StellaOps.Cli.Commands; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Services; +using StellaOps.Cli.Services.Models; +using StellaOps.Cli.Telemetry; +using StellaOps.Cli.Tests.Testing; +using StellaOps.Cryptography; +using Spectre.Console; +using Spectre.Console.Testing; + +namespace StellaOps.Cli.Tests.Commands; + +public sealed class CommandHandlersTests +{ + [Fact] + public async Task HandleExportJobAsync_SetsExitCodeZeroOnSuccess() + { + var original = Environment.ExitCode; + try + { + var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", "/jobs/export:json/1", null)); + var provider = BuildServiceProvider(backend); + + await CommandHandlers.HandleExportJobAsync( + provider, + format: "json", + delta: false, + publishFull: null, + publishDelta: null, + includeFull: null, + includeDelta: null, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Equal("export:json", backend.LastJobKind); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleMergeJobAsync_SetsExitCodeOnFailure() + { + var original = Environment.ExitCode; + try + { + var backend = new StubBackendClient(new JobTriggerResult(false, "Job already running", null, null)); + var provider = BuildServiceProvider(backend); + + await CommandHandlers.HandleMergeJobAsync(provider, verbose: false, CancellationToken.None); + + Assert.Equal(1, Environment.ExitCode); + Assert.Equal("merge:reconcile", backend.LastJobKind); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleScannerRunAsync_AutomaticallyUploadsResults() + { + using var tempDir = new TempDirectory(); + var resultsFile = Path.Combine(tempDir.Path, "results", "scan.json"); + var backend = new StubBackendClient(new JobTriggerResult(true, "Accepted", null, null)); + var metadataFile = Path.Combine(tempDir.Path, "results", "scan-run.json"); + var executor = new StubExecutor(new ScannerExecutionResult(0, resultsFile, metadataFile)); + var options = new StellaOpsCliOptions + { + ResultsDirectory = Path.Combine(tempDir.Path, "results") + }; + + var provider = BuildServiceProvider(backend, executor, new StubInstaller(), options); + + Directory.CreateDirectory(Path.Combine(tempDir.Path, "target")); + + var original = Environment.ExitCode; + try + { + await CommandHandlers.HandleScannerRunAsync( + provider, + runner: "docker", + entry: "scanner-image", + targetDirectory: Path.Combine(tempDir.Path, "target"), + arguments: Array.Empty(), + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Equal(resultsFile, backend.LastUploadPath); + Assert.True(File.Exists(metadataFile)); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleAuthLoginAsync_UsesClientCredentialsFlow() + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var options = new StellaOpsCliOptions + { + ResultsDirectory = Path.Combine(tempDir.Path, "results"), + Authority = new StellaOpsCliAuthorityOptions + { + Url = "https://authority.example", + ClientId = "cli", + ClientSecret = "secret", + Scope = "concelier.jobs.trigger", + TokenCacheDirectory = tempDir.Path + } + }; + + var tokenClient = new StubTokenClient(); + var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: tokenClient); + + await CommandHandlers.HandleAuthLoginAsync(provider, options, verbose: false, force: false, cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Equal(1, tokenClient.ClientCredentialRequests); + Assert.NotNull(tokenClient.CachedEntry); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleAuthLoginAsync_FailsWhenPasswordMissing() + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var options = new StellaOpsCliOptions + { + ResultsDirectory = Path.Combine(tempDir.Path, "results"), + Authority = new StellaOpsCliAuthorityOptions + { + Url = "https://authority.example", + ClientId = "cli", + Username = "user", + TokenCacheDirectory = tempDir.Path + } + }; + + var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: new StubTokenClient()); + + await CommandHandlers.HandleAuthLoginAsync(provider, options, verbose: false, force: false, cancellationToken: CancellationToken.None); + + Assert.Equal(1, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleAuthStatusAsync_ReportsMissingToken() + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var options = new StellaOpsCliOptions + { + ResultsDirectory = Path.Combine(tempDir.Path, "results"), + Authority = new StellaOpsCliAuthorityOptions + { + Url = "https://authority.example", + ClientId = "cli", + TokenCacheDirectory = tempDir.Path + } + }; + + var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: new StubTokenClient()); + + await CommandHandlers.HandleAuthStatusAsync(provider, options, verbose: false, cancellationToken: CancellationToken.None); + + Assert.Equal(1, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleExcititorInitAsync_CallsBackend() + { + var original = Environment.ExitCode; + try + { + var backend = new StubBackendClient(new JobTriggerResult(true, "accepted", null, null)); + var provider = BuildServiceProvider(backend); + + await CommandHandlers.HandleExcititorInitAsync( + provider, + new[] { "redhat" }, + resume: true, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Equal("init", backend.LastExcititorRoute); + Assert.Equal(HttpMethod.Post, backend.LastExcititorMethod); + var payload = Assert.IsAssignableFrom>(backend.LastExcititorPayload); + Assert.Equal(true, payload["resume"]); + var providers = Assert.IsAssignableFrom>(payload["providers"]!); + Assert.Contains("redhat", providers, StringComparer.OrdinalIgnoreCase); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleExcititorListProvidersAsync_WritesOutput() + { + var original = Environment.ExitCode; + try + { + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + ProviderSummaries = new[] + { + new ExcititorProviderSummary("redhat", "distro", "Red Hat", "vendor", true, DateTimeOffset.UtcNow) + } + }; + + var provider = BuildServiceProvider(backend); + await CommandHandlers.HandleExcititorListProvidersAsync(provider, includeDisabled: false, verbose: false, cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleExcititorVerifyAsync_FailsWithoutArguments() + { + var original = Environment.ExitCode; + try + { + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + var provider = BuildServiceProvider(backend); + + await CommandHandlers.HandleExcititorVerifyAsync(provider, null, null, null, verbose: false, cancellationToken: CancellationToken.None); + + Assert.Equal(1, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleExcititorVerifyAsync_AttachesAttestationFile() + { + var original = Environment.ExitCode; + using var tempFile = new TempFile("attestation.json", Encoding.UTF8.GetBytes("{\"ok\":true}")); + try + { + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + var provider = BuildServiceProvider(backend); + + await CommandHandlers.HandleExcititorVerifyAsync( + provider, + exportId: "export-123", + digest: "sha256:abc", + attestationPath: tempFile.Path, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Equal("verify", backend.LastExcititorRoute); + var payload = Assert.IsAssignableFrom>(backend.LastExcititorPayload); + Assert.Equal("export-123", payload["exportId"]); + Assert.Equal("sha256:abc", payload["digest"]); + var attestation = Assert.IsAssignableFrom>(payload["attestation"]!); + Assert.Equal(Path.GetFileName(tempFile.Path), attestation["fileName"]); + Assert.NotNull(attestation["base64"]); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleExcititorExportAsync_DownloadsWhenOutputProvided() + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + const string manifestJson = """ + { + "exportId": "exports/20251019T101530Z/abcdef1234567890", + "format": "openvex", + "createdAt": "2025-10-19T10:15:30Z", + "artifact": { "algorithm": "sha256", "digest": "abcdef1234567890" }, + "fromCache": false, + "sizeBytes": 2048, + "attestation": { + "rekor": { + "location": "https://rekor.example/api/v1/log/entries/123", + "logIndex": "123" + } + } + } + """; + + backend.ExcititorResult = new ExcititorOperationResult(true, "ok", null, JsonDocument.Parse(manifestJson).RootElement.Clone()); + var provider = BuildServiceProvider(backend); + var outputPath = Path.Combine(tempDir.Path, "export.json"); + + await CommandHandlers.HandleExcititorExportAsync( + provider, + format: "openvex", + delta: false, + scope: null, + since: null, + provider: null, + outputPath: outputPath, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Single(backend.ExportDownloads); + var request = backend.ExportDownloads[0]; + Assert.Equal("exports/20251019T101530Z/abcdef1234567890", request.ExportId); + Assert.Equal(Path.GetFullPath(outputPath), request.DestinationPath); + Assert.Equal("sha256", request.Algorithm); + Assert.Equal("abcdef1234567890", request.Digest); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleVulnObservationsAsync_WritesTableOutput() + { + var originalExit = Environment.ExitCode; + var response = new AdvisoryObservationsResponse + { + Observations = new[] + { + new AdvisoryObservationDocument + { + ObservationId = "tenant-a:ghsa:alpha:1", + Tenant = "tenant-a", + Source = new AdvisoryObservationSource + { + Vendor = "ghsa", + Stream = "advisories", + Api = "https://example.test/api" + }, + Upstream = new AdvisoryObservationUpstream + { + UpstreamId = "GHSA-abcd-efgh" + }, + Linkset = new AdvisoryObservationLinkset + { + Aliases = new[] { "cve-2025-0001" }, + Purls = new[] { "pkg:npm/package-a@1.0.0" }, + Cpes = new[] { "cpe:/a:vendor:product:1.0" } + }, + CreatedAt = new DateTimeOffset(2025, 10, 27, 6, 0, 0, TimeSpan.Zero) + } + }, + Linkset = new AdvisoryObservationLinksetAggregate + { + Aliases = new[] { "cve-2025-0001" }, + Purls = new[] { "pkg:npm/package-a@1.0.0" }, + Cpes = new[] { "cpe:/a:vendor:product:1.0" }, + References = Array.Empty() + } + }; + + var stubClient = new StubConcelierObservationsClient(response); + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + var provider = BuildServiceProvider(backend, concelierClient: stubClient); + + var console = new TestConsole(); + var originalConsole = AnsiConsole.Console; + AnsiConsole.Console = console; + + try + { + await CommandHandlers.HandleVulnObservationsAsync( + provider, + tenant: "Tenant-A ", + observationIds: new[] { "tenant-a:ghsa:alpha:1 " }, + aliases: new[] { " CVE-2025-0001 " }, + purls: new[] { " pkg:npm/package-a@1.0.0 " }, + cpes: Array.Empty(), + limit: null, + cursor: null, + emitJson: false, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + } + finally + { + Environment.ExitCode = originalExit; + AnsiConsole.Console = originalConsole; + } + + Assert.NotNull(stubClient.LastQuery); + var query = stubClient.LastQuery!; + Assert.Equal("tenant-a", query.Tenant); + Assert.Contains("cve-2025-0001", query.Aliases); + Assert.Contains("pkg:npm/package-a@1.0.0", query.Purls); + Assert.Null(query.Limit); + Assert.Null(query.Cursor); + + var output = console.Output; + Assert.False(string.IsNullOrWhiteSpace(output)); + } + + [Fact] + public async Task HandleVulnObservationsAsync_WritesJsonOutput() + { + var originalExit = Environment.ExitCode; + var response = new AdvisoryObservationsResponse + { + Observations = new[] + { + new AdvisoryObservationDocument + { + ObservationId = "tenant-a:osv:beta:2", + Tenant = "tenant-a", + Source = new AdvisoryObservationSource + { + Vendor = "osv", + Stream = "osv", + Api = "https://example.test/osv" + }, + Upstream = new AdvisoryObservationUpstream + { + UpstreamId = "OSV-2025-XYZ" + }, + Linkset = new AdvisoryObservationLinkset + { + Aliases = new[] { "cve-2025-0101" }, + Purls = new[] { "pkg:pypi/package-b@2.0.0" }, + Cpes = Array.Empty(), + References = new[] + { + new AdvisoryObservationReference { Type = "advisory", Url = "https://example.test/advisory" } + } + }, + CreatedAt = new DateTimeOffset(2025, 10, 27, 7, 30, 0, TimeSpan.Zero) + } + }, + Linkset = new AdvisoryObservationLinksetAggregate + { + Aliases = new[] { "cve-2025-0101" }, + Purls = new[] { "pkg:pypi/package-b@2.0.0" }, + Cpes = Array.Empty(), + References = new[] + { + new AdvisoryObservationReference { Type = "advisory", Url = "https://example.test/advisory" } + } + } + }; + + var stubClient = new StubConcelierObservationsClient(response); + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + var provider = BuildServiceProvider(backend, concelierClient: stubClient); + + var writer = new StringWriter(); + var originalOut = Console.Out; + Console.SetOut(writer); + + try + { + await CommandHandlers.HandleVulnObservationsAsync( + provider, + tenant: "tenant-a", + observationIds: Array.Empty(), + aliases: Array.Empty(), + purls: Array.Empty(), + cpes: Array.Empty(), + limit: null, + cursor: null, + emitJson: true, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + } + finally + { + Environment.ExitCode = originalExit; + Console.SetOut(originalOut); + } + + var json = writer.ToString(); + using var document = JsonDocument.Parse(json); + var root = document.RootElement; + Assert.True(root.TryGetProperty("observations", out var observations)); + Assert.Equal("tenant-a:osv:beta:2", observations[0].GetProperty("observationId").GetString()); + Assert.Equal("pkg:pypi/package-b@2.0.0", observations[0].GetProperty("linkset").GetProperty("purls")[0].GetString()); + } + + [Fact] + public async Task HandleVulnObservationsAsync_WhenHasMore_PrintsCursorHint() + { + var originalExit = Environment.ExitCode; + var response = new AdvisoryObservationsResponse + { + Observations = new[] + { + new AdvisoryObservationDocument + { + ObservationId = "tenant-a:source:1", + Tenant = "tenant-a", + Linkset = new AdvisoryObservationLinkset(), + Source = new AdvisoryObservationSource(), + Upstream = new AdvisoryObservationUpstream(), + CreatedAt = DateTimeOffset.UtcNow + } + }, + Linkset = new AdvisoryObservationLinksetAggregate(), + HasMore = true, + NextCursor = "cursor-token" + }; + + var stubClient = new StubConcelierObservationsClient(response); + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + var provider = BuildServiceProvider(backend, concelierClient: stubClient); + + var console = new TestConsole(); + var originalConsole = AnsiConsole.Console; + AnsiConsole.Console = console; + + try + { + await CommandHandlers.HandleVulnObservationsAsync( + provider, + tenant: "tenant-a", + observationIds: Array.Empty(), + aliases: Array.Empty(), + purls: Array.Empty(), + cpes: Array.Empty(), + limit: 1, + cursor: null, + emitJson: false, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + } + finally + { + Environment.ExitCode = originalExit; + AnsiConsole.Console = originalConsole; + } + + var output = console.Output; + Assert.Contains("--cursor", output, StringComparison.OrdinalIgnoreCase); + Assert.Contains("cursor-token", output, StringComparison.Ordinal); + } + + [Theory] + [InlineData(null)] + [InlineData("default")] + [InlineData("libsodium")] + public async Task HandleAuthRevokeVerifyAsync_VerifiesBundlesUsingProviderRegistry(string? providerHint) + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var artifacts = await WriteRevocationArtifactsAsync(tempDir, providerHint); + + await CommandHandlers.HandleAuthRevokeVerifyAsync( + artifacts.BundlePath, + artifacts.SignaturePath, + artifacts.KeyPath, + verbose: true, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleAuthStatusAsync_ReportsCachedToken() + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var options = new StellaOpsCliOptions + { + ResultsDirectory = Path.Combine(tempDir.Path, "results"), + Authority = new StellaOpsCliAuthorityOptions + { + Url = "https://authority.example", + ClientId = "cli", + TokenCacheDirectory = tempDir.Path + } + }; + + var tokenClient = new StubTokenClient(); + tokenClient.CachedEntry = new StellaOpsTokenCacheEntry( + "token", + "Bearer", + DateTimeOffset.UtcNow.AddMinutes(30), + new[] { StellaOpsScopes.ConcelierJobsTrigger }); + + var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: tokenClient); + + await CommandHandlers.HandleAuthStatusAsync(provider, options, verbose: true, cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleAuthWhoAmIAsync_ReturnsErrorWhenTokenMissing() + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var options = new StellaOpsCliOptions + { + ResultsDirectory = Path.Combine(tempDir.Path, "results"), + Authority = new StellaOpsCliAuthorityOptions + { + Url = "https://authority.example", + ClientId = "cli", + TokenCacheDirectory = tempDir.Path + } + }; + + var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: new StubTokenClient()); + + await CommandHandlers.HandleAuthWhoAmIAsync(provider, options, verbose: false, cancellationToken: CancellationToken.None); + + Assert.Equal(1, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleAuthWhoAmIAsync_ReportsClaimsForJwtToken() + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var options = new StellaOpsCliOptions + { + ResultsDirectory = Path.Combine(tempDir.Path, "results"), + Authority = new StellaOpsCliAuthorityOptions + { + Url = "https://authority.example", + ClientId = "cli", + TokenCacheDirectory = tempDir.Path + } + }; + + var tokenClient = new StubTokenClient(); + tokenClient.CachedEntry = new StellaOpsTokenCacheEntry( + CreateUnsignedJwt( + ("sub", "cli-user"), + ("aud", "concelier"), + ("iss", "https://authority.example"), + ("iat", 1_700_000_000), + ("nbf", 1_700_000_000)), + "Bearer", + DateTimeOffset.UtcNow.AddMinutes(30), + new[] { StellaOpsScopes.ConcelierJobsTrigger }); + + var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: tokenClient); + + await CommandHandlers.HandleAuthWhoAmIAsync(provider, options, verbose: true, cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleAuthLogoutAsync_ClearsToken() + { + var original = Environment.ExitCode; + using var tempDir = new TempDirectory(); + + try + { + var options = new StellaOpsCliOptions + { + ResultsDirectory = Path.Combine(tempDir.Path, "results"), + Authority = new StellaOpsCliAuthorityOptions + { + Url = "https://authority.example", + ClientId = "cli", + TokenCacheDirectory = tempDir.Path + } + }; + + var tokenClient = new StubTokenClient(); + tokenClient.CachedEntry = new StellaOpsTokenCacheEntry( + "token", + "Bearer", + DateTimeOffset.UtcNow.AddMinutes(5), + new[] { StellaOpsScopes.ConcelierJobsTrigger }); + + var provider = BuildServiceProvider(new StubBackendClient(new JobTriggerResult(true, "ok", null, null)), options: options, tokenClient: tokenClient); + + await CommandHandlers.HandleAuthLogoutAsync(provider, options, verbose: true, cancellationToken: CancellationToken.None); + + Assert.Null(tokenClient.CachedEntry); + Assert.Equal(1, tokenClient.ClearRequests); + Assert.Equal(0, Environment.ExitCode); + } + finally + { + Environment.ExitCode = original; + } + } + + [Fact] + public async Task HandleRuntimePolicyTestAsync_WritesInteractiveTable() + { + var originalExit = Environment.ExitCode; + var originalConsole = AnsiConsole.Console; + + var console = new TestConsole(); + console.Width(120); + console.Interactive(); + console.EmitAnsiSequences(); + + AnsiConsole.Console = console; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + + var decisions = new Dictionary(StringComparer.Ordinal) + { + ["sha256:aaa"] = new RuntimePolicyImageDecision( + "allow", + true, + true, + Array.AsReadOnly(new[] { "trusted baseline" }), + new RuntimePolicyRekorReference("uuid-allow", "https://rekor.example/entries/uuid-allow", true), + new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) + { + ["source"] = "baseline", + ["quieted"] = false, + ["confidence"] = 0.97, + ["confidenceBand"] = "high" + })), + ["sha256:bbb"] = new RuntimePolicyImageDecision( + "block", + false, + false, + Array.AsReadOnly(new[] { "missing attestation" }), + new RuntimePolicyRekorReference("uuid-block", "https://rekor.example/entries/uuid-block", false), + new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) + { + ["source"] = "policy", + ["quieted"] = false, + ["confidence"] = 0.12, + ["confidenceBand"] = "low" + })), + ["sha256:ccc"] = new RuntimePolicyImageDecision( + "audit", + true, + false, + Array.AsReadOnly(new[] { "pending sbom sync" }), + new RuntimePolicyRekorReference(null, null, null), + new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) + { + ["source"] = "mirror", + ["quieted"] = true, + ["quietedBy"] = "allow-temporary", + ["confidence"] = 0.42, + ["confidenceBand"] = "medium" + })) + }; + + backend.RuntimePolicyResult = new RuntimePolicyEvaluationResult( + 300, + DateTimeOffset.Parse("2025-10-19T12:00:00Z", CultureInfo.InvariantCulture), + "rev-42", + new ReadOnlyDictionary(decisions)); + + var provider = BuildServiceProvider(backend); + + try + { + await CommandHandlers.HandleRuntimePolicyTestAsync( + provider, + namespaceValue: "prod", + imageArguments: new[] { "sha256:aaa", "sha256:bbb" }, + filePath: null, + labelArguments: new[] { "app=frontend" }, + outputJson: false, + verbose: false, + cancellationToken: CancellationToken.None); + + var output = console.Output; + + Assert.Equal(0, Environment.ExitCode); + Assert.Contains("Image", output, StringComparison.Ordinal); + Assert.Contains("Verdict", output, StringComparison.Ordinal); + Assert.Contains("SBOM Ref", output, StringComparison.Ordinal); + Assert.Contains("Quieted", output, StringComparison.Ordinal); + Assert.Contains("Confidence", output, StringComparison.Ordinal); + Assert.Contains("sha256:aaa", output, StringComparison.Ordinal); + Assert.Contains("uuid-allow", output, StringComparison.Ordinal); + Assert.Contains("(verified)", output, StringComparison.Ordinal); + Assert.Contains("0.97 (high)", output, StringComparison.Ordinal); + Assert.Contains("sha256:bbb", output, StringComparison.Ordinal); + Assert.Contains("uuid-block", output, StringComparison.Ordinal); + Assert.Contains("(unverified)", output, StringComparison.Ordinal); + Assert.Contains("sha256:ccc", output, StringComparison.Ordinal); + Assert.Contains("yes", output, StringComparison.Ordinal); + Assert.Contains("allow-temporary", output, StringComparison.Ordinal); + Assert.True( + output.IndexOf("sha256:aaa", StringComparison.Ordinal) < + output.IndexOf("sha256:ccc", StringComparison.Ordinal)); + } + finally + { + Environment.ExitCode = originalExit; + AnsiConsole.Console = originalConsole; + } + } + + [Fact] + public async Task HandleRuntimePolicyTestAsync_WritesDeterministicJson() + { + var originalExit = Environment.ExitCode; + var originalOut = Console.Out; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + + var decisions = new Dictionary(StringComparer.Ordinal) + { + ["sha256:json-a"] = new RuntimePolicyImageDecision( + "allow", + true, + true, + Array.AsReadOnly(new[] { "baseline allow" }), + new RuntimePolicyRekorReference("uuid-json-allow", "https://rekor.example/entries/uuid-json-allow", true), + new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) + { + ["source"] = "baseline", + ["confidence"] = 0.66 + })), + ["sha256:json-b"] = new RuntimePolicyImageDecision( + "audit", + true, + false, + Array.AsReadOnly(Array.Empty()), + new RuntimePolicyRekorReference(null, null, null), + new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) + { + ["source"] = "mirror", + ["quieted"] = true, + ["quietedBy"] = "risk-accepted" + })) + }; + + backend.RuntimePolicyResult = new RuntimePolicyEvaluationResult( + 600, + DateTimeOffset.Parse("2025-10-20T00:00:00Z", CultureInfo.InvariantCulture), + "rev-json-7", + new ReadOnlyDictionary(decisions)); + + var provider = BuildServiceProvider(backend); + + using var writer = new StringWriter(); + Console.SetOut(writer); + + try + { + await CommandHandlers.HandleRuntimePolicyTestAsync( + provider, + namespaceValue: "staging", + imageArguments: new[] { "sha256:json-a", "sha256:json-b" }, + filePath: null, + labelArguments: Array.Empty(), + outputJson: true, + verbose: false, + cancellationToken: CancellationToken.None); + + var output = writer.ToString().Trim(); + + Assert.Equal(0, Environment.ExitCode); + Assert.False(string.IsNullOrWhiteSpace(output)); + + using var document = JsonDocument.Parse(output); + var root = document.RootElement; + + Assert.Equal(600, root.GetProperty("ttlSeconds").GetInt32()); + Assert.Equal("rev-json-7", root.GetProperty("policyRevision").GetString()); + var expiresAt = root.GetProperty("expiresAtUtc").GetString(); + Assert.NotNull(expiresAt); + Assert.Equal( + DateTimeOffset.Parse("2025-10-20T00:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), + DateTimeOffset.Parse(expiresAt!, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal)); + + var results = root.GetProperty("results"); + var keys = results.EnumerateObject().Select(p => p.Name).ToArray(); + Assert.Equal(new[] { "sha256:json-a", "sha256:json-b" }, keys); + + var first = results.GetProperty("sha256:json-a"); + Assert.Equal("allow", first.GetProperty("policyVerdict").GetString()); + Assert.True(first.GetProperty("signed").GetBoolean()); + Assert.True(first.GetProperty("hasSbomReferrers").GetBoolean()); + var rekor = first.GetProperty("rekor"); + Assert.Equal("uuid-json-allow", rekor.GetProperty("uuid").GetString()); + Assert.True(rekor.GetProperty("verified").GetBoolean()); + Assert.Equal("baseline", first.GetProperty("source").GetString()); + Assert.Equal(0.66, first.GetProperty("confidence").GetDouble(), 3); + + var second = results.GetProperty("sha256:json-b"); + Assert.Equal("audit", second.GetProperty("policyVerdict").GetString()); + Assert.True(second.GetProperty("signed").GetBoolean()); + Assert.False(second.GetProperty("hasSbomReferrers").GetBoolean()); + Assert.Equal("mirror", second.GetProperty("source").GetString()); + Assert.True(second.GetProperty("quieted").GetBoolean()); + Assert.Equal("risk-accepted", second.GetProperty("quietedBy").GetString()); + Assert.False(second.TryGetProperty("rekor", out _)); + } + finally + { + Console.SetOut(originalOut); + Environment.ExitCode = originalExit; + } + } + + [Fact] + public async Task HandlePolicyFindingsListAsync_WritesInteractiveTable() + { + var originalExit = Environment.ExitCode; + var originalConsole = AnsiConsole.Console; + + var console = new TestConsole(); + console.Interactive(); + console.EmitAnsiSequences(); + console.Width(140); + AnsiConsole.Console = console; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + FindingsPage = new PolicyFindingsPage( + new[] + { + new PolicyFindingDocument( + "P-7:S-42:pkg:npm/lodash@4.17.21:CVE-2021-23337", + "affected", + new PolicyFindingSeverity("High", 7.5), + "sbom:S-42", + new[] { "CVE-2021-23337", "GHSA-xxxx-yyyy" }, + new PolicyFindingVexMetadata("VendorX-123", "vendor-x", "not_affected"), + 4, + DateTimeOffset.Parse("2025-10-26T14:06:01Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), + "run:P-7:2025-10-26:auto") + }, + "cursor-42", + 10) + }; + var provider = BuildServiceProvider(backend); + + try + { + await CommandHandlers.HandlePolicyFindingsListAsync( + provider, + " P-7 ", + new[] { " sbom:S-42 " }, + new[] { "Affected", "QUIETED" }, + new[] { "High", "Critical" }, + "2025-10-25T00:00:00Z", + " cursor-0 ", + page: 2, + pageSize: 100, + format: "table", + outputPath: null, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.NotNull(backend.LastFindingsQuery); + var query = backend.LastFindingsQuery!; + Assert.Equal("P-7", query.PolicyId); + Assert.Contains("sbom:S-42", query.SbomIds); + Assert.Contains("affected", query.Statuses); + Assert.Contains("quieted", query.Statuses); + Assert.Contains("High", query.Severities); + Assert.Contains("Critical", query.Severities); + Assert.Equal(2, query.Page); + Assert.Equal(100, query.PageSize); + Assert.Equal("cursor-0", query.Cursor); + Assert.Equal(DateTimeOffset.Parse("2025-10-25T00:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), query.Since); + + var output = console.Output; + Assert.Contains("P-7:S-42", output, StringComparison.Ordinal); + Assert.Contains("High", output, StringComparison.Ordinal); + } + finally + { + AnsiConsole.Console = originalConsole; + Environment.ExitCode = originalExit; + } + } + + [Fact] + public async Task HandlePolicyFindingsListAsync_WritesJson() + { + var originalExit = Environment.ExitCode; + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + FindingsPage = new PolicyFindingsPage( + new[] + { + new PolicyFindingDocument( + "finding-1", + "quieted", + new PolicyFindingSeverity("Medium", 5.1), + "sbom:S-99", + Array.Empty(), + null, + 3, + DateTimeOffset.MinValue, + null) + }, + null, + null) + }; + var provider = BuildServiceProvider(backend); + using var writer = new StringWriter(); + var originalOut = Console.Out; + Console.SetOut(writer); + + try + { + await CommandHandlers.HandlePolicyFindingsListAsync( + provider, + "P-9", + Array.Empty(), + Array.Empty(), + Array.Empty(), + null, + null, + page: null, + pageSize: null, + format: "json", + outputPath: null, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + using var document = JsonDocument.Parse(writer.ToString()); + var root = document.RootElement; + Assert.Equal("P-9", root.GetProperty("policyId").GetString()); + var items = root.GetProperty("items"); + Assert.Equal(1, items.GetArrayLength()); + var first = items[0]; + Assert.Equal("finding-1", first.GetProperty("findingId").GetString()); + Assert.Equal("quieted", first.GetProperty("status").GetString()); + Assert.Equal("Medium", first.GetProperty("severity").GetProperty("normalized").GetString()); + } + finally + { + Console.SetOut(originalOut); + Environment.ExitCode = originalExit; + } + } + + [Fact] + public async Task HandlePolicyFindingsGetAsync_WritesInteractiveTable() + { + var originalExit = Environment.ExitCode; + var originalConsole = AnsiConsole.Console; + + var console = new TestConsole(); + console.Interactive(); + console.EmitAnsiSequences(); + console.Width(120); + AnsiConsole.Console = console; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + FindingDocument = new PolicyFindingDocument( + "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", + "affected", + new PolicyFindingSeverity("Critical", 9.1), + "sbom:S-1", + new[] { "CVE-1111" }, + new PolicyFindingVexMetadata("VendorY-9", null, "affected"), + 7, + DateTimeOffset.Parse("2025-10-26T12:34:56Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), + "run:P-9:1234") + }; + var provider = BuildServiceProvider(backend); + + try + { + await CommandHandlers.HandlePolicyFindingsGetAsync( + provider, + "P-9", + "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", + format: "table", + outputPath: null, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Equal(("P-9", "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111"), backend.LastFindingGet); + var output = console.Output; + Assert.Contains("Critical", output); + Assert.Contains("run:P-9:1234", output); + } + finally + { + AnsiConsole.Console = originalConsole; + Environment.ExitCode = originalExit; + } + } + + [Fact] + public async Task HandlePolicyFindingsExplainAsync_WritesInteractiveTable() + { + var originalExit = Environment.ExitCode; + var originalConsole = AnsiConsole.Console; + + var console = new TestConsole(); + console.Interactive(); + console.EmitAnsiSequences(); + console.Width(140); + AnsiConsole.Console = console; + + var steps = new[] + { + new PolicyFindingExplainStep( + "rule-block-critical", + "blocked", + "block", + 9.1, + new ReadOnlyDictionary(new Dictionary + { + ["severity"] = "Critical", + ["sealed"] = "false" + }), + new ReadOnlyDictionary(new Dictionary + { + ["vex"] = "VendorY-9" + })) + }; + var hints = new[] + { + new PolicyFindingExplainHint("Using cached EPSS percentile from bundle 2025-10-20") + }; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + ExplainResult = new PolicyFindingExplainResult( + "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", + 7, + new ReadOnlyCollection(steps), + new ReadOnlyCollection(hints)) + }; + var provider = BuildServiceProvider(backend); + + try + { + await CommandHandlers.HandlePolicyFindingsExplainAsync( + provider, + "P-9", + "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", + mode: "verbose", + format: "table", + outputPath: null, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.Equal(("P-9", "P-9:S-1:pkg:npm/leftpad@1.0.0:CVE-1111", "verbose"), backend.LastFindingExplain); + var output = console.Output; + Assert.Contains("rule-block-critical", output); + Assert.Contains("EPSS percentile", output); + } + finally + { + AnsiConsole.Console = originalConsole; + Environment.ExitCode = originalExit; + } + } + + [Fact] + public async Task HandlePolicySimulateAsync_WritesInteractiveSummary() + { + var originalExit = Environment.ExitCode; + var originalConsole = AnsiConsole.Console; + + var console = new TestConsole(); + console.Width(120); + console.Interactive(); + console.EmitAnsiSequences(); + AnsiConsole.Console = console; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + + var severity = new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) + { + ["critical"] = new PolicySimulationSeverityDelta(1, null), + ["high"] = new PolicySimulationSeverityDelta(null, 2) + }); + var ruleHits = new ReadOnlyCollection(new List + { + new("rule-block-critical", "Block Critical", 1, 0), + new("rule-quiet-low", "Quiet Low", null, 2) + }); + + backend.SimulationResult = new PolicySimulationResult( + new PolicySimulationDiff( + "scheduler.policy-diff-summary@1", + 2, + 1, + 10, + severity, + ruleHits), + "blob://policy/P-7/simulation.json"); + + var provider = BuildServiceProvider(backend); + + try + { + await CommandHandlers.HandlePolicySimulateAsync( + provider, + policyId: "P-7", + baseVersion: 3, + candidateVersion: 4, + sbomArguments: new[] { "sbom:A", "sbom:B" }, + environmentArguments: new[] { "sealed=false", "exposure=internet" }, + format: "table", + outputPath: null, + explain: true, + failOnDiff: false, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.NotNull(backend.LastPolicySimulation); + var simulation = backend.LastPolicySimulation!.Value; + Assert.Equal("P-7", simulation.PolicyId); + Assert.Equal(3, simulation.Input.BaseVersion); + Assert.Equal(4, simulation.Input.CandidateVersion); + Assert.True(simulation.Input.Explain); + Assert.Equal(new[] { "sbom:A", "sbom:B" }, simulation.Input.SbomSet); + Assert.True(simulation.Input.Environment.TryGetValue("sealed", out var sealedValue) && sealedValue is bool sealedFlag && sealedFlag == false); + Assert.True(simulation.Input.Environment.TryGetValue("exposure", out var exposureValue) && string.Equals(exposureValue as string, "internet", StringComparison.Ordinal)); + + var output = console.Output; + Assert.Contains("Severity", output, StringComparison.Ordinal); + Assert.Contains("critical", output, StringComparison.OrdinalIgnoreCase); + Assert.Contains("Rule", output, StringComparison.Ordinal); + Assert.Contains("Block Critical", output, StringComparison.Ordinal); + } + finally + { + Environment.ExitCode = originalExit; + AnsiConsole.Console = originalConsole; + } + } + + [Fact] + public async Task HandlePolicySimulateAsync_WritesJsonOutput() + { + var originalExit = Environment.ExitCode; + var originalOut = Console.Out; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + backend.SimulationResult = new PolicySimulationResult( + new PolicySimulationDiff( + "scheduler.policy-diff-summary@1", + 0, + 0, + 5, + new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)), + new ReadOnlyCollection(Array.Empty())), + null); + + var provider = BuildServiceProvider(backend); + + using var writer = new StringWriter(); + Console.SetOut(writer); + + try + { + await CommandHandlers.HandlePolicySimulateAsync( + provider, + policyId: "P-9", + baseVersion: null, + candidateVersion: 5, + sbomArguments: Array.Empty(), + environmentArguments: new[] { "sealed=true", "threshold=0.8" }, + format: "json", + outputPath: null, + explain: false, + failOnDiff: false, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + using var document = JsonDocument.Parse(writer.ToString()); + var root = document.RootElement; + Assert.Equal("P-9", root.GetProperty("policyId").GetString()); + Assert.Equal(5, root.GetProperty("candidateVersion").GetInt32()); + Assert.True(root.TryGetProperty("environment", out var envElement) && envElement.TryGetProperty("sealed", out var sealedElement) && sealedElement.GetBoolean()); + Assert.True(envElement.TryGetProperty("threshold", out var thresholdElement) && Math.Abs(thresholdElement.GetDouble() - 0.8) < 0.0001); + } + finally + { + Console.SetOut(originalOut); + Environment.ExitCode = originalExit; + } + } + + [Fact] + public async Task HandlePolicySimulateAsync_FailOnDiffSetsExitCode20() + { + var originalExit = Environment.ExitCode; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + backend.SimulationResult = new PolicySimulationResult( + new PolicySimulationDiff( + null, + 1, + 0, + 0, + new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)), + new ReadOnlyCollection(Array.Empty())), + null); + + var provider = BuildServiceProvider(backend); + + try + { + await CommandHandlers.HandlePolicySimulateAsync( + provider, + policyId: "P-11", + baseVersion: null, + candidateVersion: null, + sbomArguments: Array.Empty(), + environmentArguments: Array.Empty(), + format: "json", + outputPath: null, + explain: false, + failOnDiff: true, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(20, Environment.ExitCode); + } + finally + { + Environment.ExitCode = originalExit; + } + } + + [Fact] + public async Task HandlePolicySimulateAsync_MapsErrorCodes() + { + var originalExit = Environment.ExitCode; + var originalOut = Console.Out; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + SimulationException = new PolicyApiException("Missing inputs", HttpStatusCode.BadRequest, "ERR_POL_003") + }; + var provider = BuildServiceProvider(backend); + + using var writer = new StringWriter(); + Console.SetOut(writer); + + try + { + await CommandHandlers.HandlePolicySimulateAsync( + provider, + policyId: "P-12", + baseVersion: null, + candidateVersion: null, + sbomArguments: Array.Empty(), + environmentArguments: Array.Empty(), + format: "json", + outputPath: null, + explain: false, + failOnDiff: false, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(21, Environment.ExitCode); + } + finally + { + Console.SetOut(originalOut); + Environment.ExitCode = originalExit; + } + } + + [Fact] + public async Task HandlePolicyActivateAsync_DisplaysInteractiveSummary() + { + var originalExit = Environment.ExitCode; + var originalConsole = AnsiConsole.Console; + + var console = new TestConsole(); + console.Width(120); + console.Interactive(); + console.EmitAnsiSequences(); + AnsiConsole.Console = console; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + backend.ActivationResult = new PolicyActivationResult( + "activated", + new PolicyActivationRevision( + "P-7", + 4, + "active", + true, + DateTimeOffset.Parse("2025-10-27T00:00:00Z", CultureInfo.InvariantCulture), + DateTimeOffset.Parse("2025-10-27T01:15:00Z", CultureInfo.InvariantCulture), + new ReadOnlyCollection(new List + { + new("user:alice", DateTimeOffset.Parse("2025-10-27T01:10:00Z", CultureInfo.InvariantCulture), "Primary"), + new("user:bob", DateTimeOffset.Parse("2025-10-27T01:12:00Z", CultureInfo.InvariantCulture), null) + }))); + + var provider = BuildServiceProvider(backend); + + try + { + await CommandHandlers.HandlePolicyActivateAsync( + provider, + policyId: "P-7", + version: 4, + note: "Rolling forward", + runNow: true, + scheduledAt: null, + priority: "high", + rollback: false, + incidentId: "INC-204", + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.NotNull(backend.LastPolicyActivation); + var activation = backend.LastPolicyActivation!.Value; + Assert.Equal("P-7", activation.PolicyId); + Assert.Equal(4, activation.Version); + Assert.True(activation.Request.RunNow); + Assert.Null(activation.Request.ScheduledAt); + Assert.Equal("high", activation.Request.Priority); + Assert.Equal("INC-204", activation.Request.IncidentId); + Assert.Equal("Rolling forward", activation.Request.Comment); + + var output = console.Output; + Assert.Contains("activated", output, StringComparison.OrdinalIgnoreCase); + Assert.Contains("user:alice", output, StringComparison.Ordinal); + Assert.Contains("Rolling forward", output, StringComparison.Ordinal); + } + finally + { + Environment.ExitCode = originalExit; + AnsiConsole.Console = originalConsole; + } + } + + [Fact] + public async Task HandlePolicyActivateAsync_PendingSecondApprovalSetsExitCode() + { + var originalExit = Environment.ExitCode; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + backend.ActivationResult = new PolicyActivationResult( + "pending_second_approval", + new PolicyActivationRevision( + "P-7", + 4, + "approved", + true, + DateTimeOffset.UtcNow, + null, + new ReadOnlyCollection(new List + { + new("user:alice", DateTimeOffset.UtcNow, "Primary") + }))); + + var provider = BuildServiceProvider(backend); + + try + { + await CommandHandlers.HandlePolicyActivateAsync( + provider, + policyId: "P-7", + version: 4, + note: null, + runNow: false, + scheduledAt: null, + priority: null, + rollback: false, + incidentId: null, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(75, Environment.ExitCode); + } + finally + { + Environment.ExitCode = originalExit; + } + } + + [Fact] + public async Task HandlePolicyActivateAsync_MapsErrorCodes() + { + var originalExit = Environment.ExitCode; + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + ActivationException = new PolicyApiException("Revision not approved", HttpStatusCode.BadRequest, "ERR_POL_002") + }; + + var provider = BuildServiceProvider(backend); + + try + { + await CommandHandlers.HandlePolicyActivateAsync( + provider, + policyId: "P-9", + version: 2, + note: null, + runNow: false, + scheduledAt: null, + priority: null, + rollback: false, + incidentId: null, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(70, Environment.ExitCode); + } + finally + { + Environment.ExitCode = originalExit; + } + } + + private static async Task WriteRevocationArtifactsAsync(TempDirectory temp, string? providerHint) + { + var (bundleBytes, signature, keyPem) = await BuildRevocationArtifactsAsync(providerHint); + + var bundlePath = Path.Combine(temp.Path, "revocation-bundle.json"); + var signaturePath = Path.Combine(temp.Path, "revocation-bundle.json.jws"); + var keyPath = Path.Combine(temp.Path, "revocation-key.pem"); + + await File.WriteAllBytesAsync(bundlePath, bundleBytes); + await File.WriteAllTextAsync(signaturePath, signature); + await File.WriteAllTextAsync(keyPath, keyPem); + + return new RevocationArtifactPaths(bundlePath, signaturePath, keyPath); + } + + private static async Task<(byte[] Bundle, string Signature, string KeyPem)> BuildRevocationArtifactsAsync(string? providerHint) + { + var bundleBytes = Encoding.UTF8.GetBytes("{\"revocations\":[]}"); + + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var parameters = ecdsa.ExportParameters(includePrivateParameters: true); + + var signingKey = new CryptoSigningKey( + new CryptoKeyReference("revocation-test"), + SignatureAlgorithms.Es256, + privateParameters: in parameters, + createdAt: DateTimeOffset.UtcNow); + + var provider = new DefaultCryptoProvider(); + provider.UpsertSigningKey(signingKey); + var signer = provider.GetSigner(SignatureAlgorithms.Es256, signingKey.Reference); + + var header = new Dictionary + { + ["alg"] = SignatureAlgorithms.Es256, + ["kid"] = signingKey.Reference.KeyId, + ["typ"] = "application/vnd.stellaops.revocation-bundle+jws", + ["b64"] = false, + ["crit"] = new[] { "b64" } + }; + + if (!string.IsNullOrWhiteSpace(providerHint)) + { + header["provider"] = providerHint; + } + + var serializerOptions = new JsonSerializerOptions + { + PropertyNamingPolicy = null, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + var headerJson = JsonSerializer.Serialize(header, serializerOptions); + var encodedHeader = Base64UrlEncoder.Encode(Encoding.UTF8.GetBytes(headerJson)); + + var signingInput = new byte[encodedHeader.Length + 1 + bundleBytes.Length]; + var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); + Buffer.BlockCopy(headerBytes, 0, signingInput, 0, headerBytes.Length); + signingInput[headerBytes.Length] = (byte)'.'; + Buffer.BlockCopy(bundleBytes, 0, signingInput, headerBytes.Length + 1, bundleBytes.Length); + + var signatureBytes = await signer.SignAsync(signingInput); + var encodedSignature = Base64UrlEncoder.Encode(signatureBytes); + var jws = string.Concat(encodedHeader, "..", encodedSignature); + + var publicKeyBytes = ecdsa.ExportSubjectPublicKeyInfo(); + var keyPem = new string(PemEncoding.Write("PUBLIC KEY", publicKeyBytes)); + + return (bundleBytes, jws, keyPem); + } + + private sealed record RevocationArtifactPaths(string BundlePath, string SignaturePath, string KeyPath); + + [Fact] + public async Task HandleSourcesIngestAsync_NoViolations_WritesJsonReport() + { + var originalExitCode = Environment.ExitCode; + var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); + using var tempDir = new TempDirectory(); + + var originalConsole = AnsiConsole.Console; + var console = new TestConsole(); + var originalOut = Console.Out; + using var writer = new StringWriter(); + + try + { + Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-alpha"); + AnsiConsole.Console = console; + Console.SetOut(writer); + + var inputPath = Path.Combine(tempDir.Path, "payload.json"); + await File.WriteAllTextAsync(inputPath, "{ \"id\": 1 }"); + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + DryRunResponse = new AocIngestDryRunResponse + { + Source = "redhat", + Tenant = "tenant-alpha", + Status = "ok", + Document = new AocIngestDryRunDocumentResult + { + ContentHash = "sha256:test" + }, + Violations = Array.Empty() + } + }; + + var provider = BuildServiceProvider(backend); + var outputPath = Path.Combine(tempDir.Path, "dry-run.json"); + + await CommandHandlers.HandleSourcesIngestAsync( + provider, + dryRun: true, + source: "RedHat", + input: inputPath, + tenantOverride: null, + format: "json", + disableColor: true, + output: outputPath, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.True(File.Exists(outputPath)); + + Assert.NotNull(backend.LastDryRunRequest); + var request = backend.LastDryRunRequest!; + Assert.Equal("tenant-alpha", request.Tenant); + Assert.Equal("RedHat", request.Source); + Assert.Equal("payload.json", request.Document.Name); + Assert.Equal("application/json", request.Document.ContentType); + Assert.Null(request.Document.ContentEncoding); + using (var document = JsonDocument.Parse(request.Document.Content)) + { + Assert.Equal(1, document.RootElement.GetProperty("id").GetInt32()); + } + + var consoleJson = writer.ToString(); + Assert.Contains("\"status\": \"ok\"", consoleJson); + } + finally + { + Environment.ExitCode = originalExitCode; + Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); + AnsiConsole.Console = originalConsole; + Console.SetOut(originalOut); + } + } + + [Fact] + public async Task HandleSourcesIngestAsync_ViolationMapsExitCode() + { + var originalExitCode = Environment.ExitCode; + var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); + using var tempDir = new TempDirectory(); + + var originalConsole = AnsiConsole.Console; + var console = new TestConsole(); + + try + { + Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-beta"); + AnsiConsole.Console = console; + + var inputPath = Path.Combine(tempDir.Path, "payload.json"); + await File.WriteAllTextAsync(inputPath, "{ \"id\": 2 }"); + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + DryRunResponse = new AocIngestDryRunResponse + { + Status = "error", + Violations = new[] + { + new AocIngestDryRunViolation + { + Code = "ERR_AOC_002", + Message = "merge detected", + Path = "/content/derived" + } + } + } + }; + + var provider = BuildServiceProvider(backend); + + await CommandHandlers.HandleSourcesIngestAsync( + provider, + dryRun: true, + source: "osv", + input: inputPath, + tenantOverride: null, + format: "table", + disableColor: true, + output: null, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(12, Environment.ExitCode); + var output = console.Output; + Assert.Contains("ERR_AOC_002", output); + Assert.Contains("/content/derived", output); + } + finally + { + Environment.ExitCode = originalExitCode; + Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); + AnsiConsole.Console = originalConsole; + } + } + + [Fact] + public async Task HandleSourcesIngestAsync_MissingTenant_ReturnsUsageError() + { + var originalExitCode = Environment.ExitCode; + var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); + using var tempDir = new TempDirectory(); + + try + { + Environment.SetEnvironmentVariable("STELLA_TENANT", null); + + var inputPath = Path.Combine(tempDir.Path, "payload.json"); + await File.WriteAllTextAsync(inputPath, "{ \"id\": 3 }"); + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + var provider = BuildServiceProvider(backend); + + await CommandHandlers.HandleSourcesIngestAsync( + provider, + dryRun: true, + source: "osv", + input: inputPath, + tenantOverride: null, + format: "table", + disableColor: true, + output: null, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(70, Environment.ExitCode); + } + finally + { + Environment.ExitCode = originalExitCode; + Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); + } + } + + [Fact] + public async Task HandleAocVerifyAsync_NoViolations_WritesReportAndReturnsZero() + { + var originalExitCode = Environment.ExitCode; + var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); + using var tempDir = new TempDirectory(); + + var originalConsole = AnsiConsole.Console; + var console = new TestConsole(); + var originalOut = Console.Out; + using var writer = new StringWriter(); + + try + { + AnsiConsole.Console = console; + Console.SetOut(writer); + Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-a"); + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + VerifyResponse = new AocVerifyResponse + { + Tenant = "tenant-a", + Window = new AocVerifyWindow + { + From = DateTimeOffset.Parse("2025-10-25T12:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal), + To = DateTimeOffset.Parse("2025-10-26T12:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal) + }, + Checked = new AocVerifyChecked { Advisories = 4, Vex = 1 }, + Metrics = new AocVerifyMetrics { IngestionWriteTotal = 5, AocViolationTotal = 0 }, + Violations = Array.Empty(), + Truncated = false + } + }; + + var provider = BuildServiceProvider(backend); + var exportPath = Path.Combine(tempDir.Path, "verify.json"); + + await CommandHandlers.HandleAocVerifyAsync( + provider, + sinceOption: "2025-10-25T12:00:00Z", + limitOption: 10, + sourcesOption: "RedHat,Ubuntu", + codesOption: "err_aoc_001", + format: "json", + exportPath: exportPath, + tenantOverride: null, + disableColor: true, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(0, Environment.ExitCode); + Assert.True(File.Exists(exportPath)); + + Assert.NotNull(backend.LastVerifyRequest); + Assert.Equal("tenant-a", backend.LastVerifyRequest!.Tenant); + var expectedSince = DateTimeOffset.Parse("2025-10-25T12:00:00Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); + var actualSince = DateTimeOffset.Parse(backend.LastVerifyRequest.Since!, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); + Assert.Equal(expectedSince, actualSince); + Assert.Equal(10, backend.LastVerifyRequest.Limit); + Assert.Equal(new[] { "redhat", "ubuntu" }, backend.LastVerifyRequest.Sources); + Assert.Equal(new[] { "ERR_AOC_001" }, backend.LastVerifyRequest.Codes); + + var jsonOutput = writer.ToString(); + Assert.Contains("\"tenant\": \"tenant-a\"", jsonOutput); + Assert.Contains("\"ingestion_write_total\": 5", jsonOutput); + } + finally + { + Environment.ExitCode = originalExitCode; + Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); + Console.SetOut(originalOut); + AnsiConsole.Console = originalConsole; + } + } + + [Fact] + public async Task HandleAocVerifyAsync_WithViolations_MapsExitCode() + { + var originalExitCode = Environment.ExitCode; + var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); + + var originalConsole = AnsiConsole.Console; + var console = new TestConsole(); + + try + { + AnsiConsole.Console = console; + Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-b"); + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + VerifyResponse = new AocVerifyResponse + { + Violations = new[] + { + new AocVerifyViolation + { + Code = "ERR_AOC_003", + Count = 2, + Examples = new[] + { + new AocVerifyViolationExample + { + Source = "redhat", + DocumentId = "doc-1", + Path = "/content/raw" + } + } + } + } + } + }; + + var provider = BuildServiceProvider(backend); + + var capturedBefore = DateTimeOffset.UtcNow; + + await CommandHandlers.HandleAocVerifyAsync( + provider, + sinceOption: "24h", + limitOption: null, + sourcesOption: null, + codesOption: null, + format: "table", + exportPath: null, + tenantOverride: null, + disableColor: true, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(13, Environment.ExitCode); + Assert.NotNull(backend.LastVerifyRequest); + Assert.Equal(20, backend.LastVerifyRequest!.Limit); + Assert.Null(backend.LastVerifyRequest.Sources); + Assert.Null(backend.LastVerifyRequest.Codes); + + var parsedSince = DateTimeOffset.Parse(backend.LastVerifyRequest.Since!, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal); + var expectedSince = capturedBefore.AddHours(-24); + Assert.InRange((expectedSince - parsedSince).Duration(), TimeSpan.Zero, TimeSpan.FromSeconds(10)); + + var output = console.Output; + Assert.Contains("ERR_AOC_003", output); + Assert.Contains("doc-1", output); + } + finally + { + Environment.ExitCode = originalExitCode; + Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); + AnsiConsole.Console = originalConsole; + } + } + + [Fact] + public async Task HandleAocVerifyAsync_TruncatedWithoutViolations_ReturnsExitCode18() + { + var originalExitCode = Environment.ExitCode; + var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); + + var originalConsole = AnsiConsole.Console; + var console = new TestConsole(); + + try + { + AnsiConsole.Console = console; + Environment.SetEnvironmentVariable("STELLA_TENANT", "tenant-c"); + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)) + { + VerifyResponse = new AocVerifyResponse + { + Violations = Array.Empty(), + Truncated = true + } + }; + + var provider = BuildServiceProvider(backend); + + await CommandHandlers.HandleAocVerifyAsync( + provider, + sinceOption: "2025-01-01T00:00:00Z", + limitOption: 0, + sourcesOption: null, + codesOption: null, + format: "table", + exportPath: null, + tenantOverride: null, + disableColor: true, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(18, Environment.ExitCode); + + var output = console.Output; + Assert.Contains("Truncated", output); + } + finally + { + Environment.ExitCode = originalExitCode; + Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); + AnsiConsole.Console = originalConsole; + } + } + + [Fact] + public async Task HandleAocVerifyAsync_MissingTenant_ReturnsUsageError() + { + var originalExitCode = Environment.ExitCode; + var originalTenant = Environment.GetEnvironmentVariable("STELLA_TENANT"); + + try + { + Environment.SetEnvironmentVariable("STELLA_TENANT", null); + + var backend = new StubBackendClient(new JobTriggerResult(true, "ok", null, null)); + var provider = BuildServiceProvider(backend); + + await CommandHandlers.HandleAocVerifyAsync( + provider, + sinceOption: "24h", + limitOption: null, + sourcesOption: null, + codesOption: null, + format: "table", + exportPath: null, + tenantOverride: null, + disableColor: true, + verbose: false, + cancellationToken: CancellationToken.None); + + Assert.Equal(71, Environment.ExitCode); + } + finally + { + Environment.ExitCode = originalExitCode; + Environment.SetEnvironmentVariable("STELLA_TENANT", originalTenant); + } + } + + private static IServiceProvider BuildServiceProvider( + IBackendOperationsClient backend, + IScannerExecutor? executor = null, + IScannerInstaller? installer = null, + StellaOpsCliOptions? options = null, + IStellaOpsTokenClient? tokenClient = null, + IConcelierObservationsClient? concelierClient = null) + { + var services = new ServiceCollection(); + services.AddSingleton(backend); + services.AddSingleton(_ => LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug))); + services.AddSingleton(new VerbosityState()); + var resolvedOptions = options ?? new StellaOpsCliOptions + { + ResultsDirectory = Path.Combine(Path.GetTempPath(), $"stellaops-cli-results-{Guid.NewGuid():N}") + }; + services.AddSingleton(resolvedOptions); + + var resolvedExecutor = executor ?? CreateDefaultExecutor(); + services.AddSingleton(resolvedExecutor); + services.AddSingleton(installer ?? new StubInstaller()); + + if (tokenClient is not null) + { + services.AddSingleton(tokenClient); + } + + services.AddSingleton( + concelierClient ?? new StubConcelierObservationsClient()); + + return services.BuildServiceProvider(); + } + + private static IScannerExecutor CreateDefaultExecutor() + { + var tempResultsFile = Path.GetTempFileName(); + var tempMetadataFile = Path.Combine( + Path.GetDirectoryName(tempResultsFile)!, + $"{Path.GetFileNameWithoutExtension(tempResultsFile)}-run.json"); + return new StubExecutor(new ScannerExecutionResult(0, tempResultsFile, tempMetadataFile)); + } + + private sealed class StubBackendClient : IBackendOperationsClient + { + private readonly JobTriggerResult _jobResult; + private static readonly RuntimePolicyEvaluationResult DefaultRuntimePolicyResult = + new RuntimePolicyEvaluationResult( + 0, + null, + null, + new ReadOnlyDictionary( + new Dictionary())); + + public StubBackendClient(JobTriggerResult result) + { + _jobResult = result; + } + + public string? LastJobKind { get; private set; } + public string? LastUploadPath { get; private set; } + public string? LastExcititorRoute { get; private set; } + public HttpMethod? LastExcititorMethod { get; private set; } + public object? LastExcititorPayload { get; private set; } + public List<(string ExportId, string DestinationPath, string? Algorithm, string? Digest)> ExportDownloads { get; } = new(); + public ExcititorOperationResult? ExcititorResult { get; set; } = new ExcititorOperationResult(true, "ok", null, null); + public IReadOnlyList ProviderSummaries { get; set; } = Array.Empty(); + public RuntimePolicyEvaluationResult RuntimePolicyResult { get; set; } = DefaultRuntimePolicyResult; + public PolicySimulationResult SimulationResult { get; set; } = new PolicySimulationResult( + new PolicySimulationDiff( + null, + 0, + 0, + 0, + new ReadOnlyDictionary(new Dictionary(0, StringComparer.Ordinal)), + new ReadOnlyCollection(Array.Empty())), + null); + public PolicyApiException? SimulationException { get; set; } + public (string PolicyId, PolicySimulationInput Input)? LastPolicySimulation { get; private set; } + public PolicyActivationResult ActivationResult { get; set; } = new PolicyActivationResult( + "activated", + new PolicyActivationRevision( + "P-0", + 1, + "active", + false, + DateTimeOffset.UtcNow, + DateTimeOffset.UtcNow, + new ReadOnlyCollection(Array.Empty()))); + public PolicyApiException? ActivationException { get; set; } + public (string PolicyId, int Version, PolicyActivationRequest Request)? LastPolicyActivation { get; private set; } + public AocIngestDryRunResponse DryRunResponse { get; set; } = new(); + public Exception? DryRunException { get; set; } + public AocIngestDryRunRequest? LastDryRunRequest { get; private set; } + public AocVerifyResponse VerifyResponse { get; set; } = new(); + public Exception? VerifyException { get; set; } + public AocVerifyRequest? LastVerifyRequest { get; private set; } + public PolicyFindingsPage FindingsPage { get; set; } = new PolicyFindingsPage(Array.Empty(), null, null); + public PolicyFindingsQuery? LastFindingsQuery { get; private set; } + public PolicyApiException? FindingsListException { get; set; } + public PolicyFindingDocument FindingDocument { get; set; } = new PolicyFindingDocument( + "finding-default", + "affected", + new PolicyFindingSeverity("High", 7.5), + "sbom:default", + Array.Empty(), + null, + 1, + DateTimeOffset.UtcNow, + null); + public (string PolicyId, string FindingId)? LastFindingGet { get; private set; } + public PolicyApiException? FindingGetException { get; set; } + public PolicyFindingExplainResult ExplainResult { get; set; } = new PolicyFindingExplainResult( + "finding-default", + 1, + new ReadOnlyCollection(Array.Empty()), + new ReadOnlyCollection(Array.Empty())); + public (string PolicyId, string FindingId, string? Mode)? LastFindingExplain { get; private set; } + public PolicyApiException? FindingExplainException { get; set; } + + public Task DownloadScannerAsync(string channel, string outputPath, bool overwrite, bool verbose, CancellationToken cancellationToken) + => throw new NotImplementedException(); + + public Task UploadScanResultsAsync(string filePath, CancellationToken cancellationToken) + { + LastUploadPath = filePath; + return Task.CompletedTask; + } + + public Task TriggerJobAsync(string jobKind, IDictionary parameters, CancellationToken cancellationToken) + { + LastJobKind = jobKind; + return Task.FromResult(_jobResult); + } + + public Task ExecuteExcititorOperationAsync(string route, HttpMethod method, object? payload, CancellationToken cancellationToken) + { + LastExcititorRoute = route; + LastExcititorMethod = method; + LastExcititorPayload = payload; + return Task.FromResult(ExcititorResult ?? new ExcititorOperationResult(true, "ok", null, null)); + } + + public Task DownloadExcititorExportAsync(string exportId, string destinationPath, string? expectedDigestAlgorithm, string? expectedDigest, CancellationToken cancellationToken) + { + var fullPath = Path.GetFullPath(destinationPath); + var directory = Path.GetDirectoryName(fullPath); + if (!string.IsNullOrEmpty(directory)) + { + Directory.CreateDirectory(directory); + } + + File.WriteAllText(fullPath, "{}"); + var info = new FileInfo(fullPath); + ExportDownloads.Add((exportId, fullPath, expectedDigestAlgorithm, expectedDigest)); + return Task.FromResult(new ExcititorExportDownloadResult(fullPath, info.Length, false)); + } + + public Task> GetExcititorProvidersAsync(bool includeDisabled, CancellationToken cancellationToken) + => Task.FromResult(ProviderSummaries); + + public Task EvaluateRuntimePolicyAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) + => Task.FromResult(RuntimePolicyResult); + + public Task SimulatePolicyAsync(string policyId, PolicySimulationInput input, CancellationToken cancellationToken) + { + LastPolicySimulation = (policyId, input); + if (SimulationException is not null) + { + throw SimulationException; + } + + return Task.FromResult(SimulationResult); + } + + public Task ActivatePolicyRevisionAsync(string policyId, int version, PolicyActivationRequest request, CancellationToken cancellationToken) + { + LastPolicyActivation = (policyId, version, request); + if (ActivationException is not null) + { + throw ActivationException; + } + + return Task.FromResult(ActivationResult); + } + + public Task ExecuteAocIngestDryRunAsync(AocIngestDryRunRequest request, CancellationToken cancellationToken) + { + LastDryRunRequest = request; + if (DryRunException is not null) + { + throw DryRunException; + } + + return Task.FromResult(DryRunResponse); + } + + public Task ExecuteAocVerifyAsync(AocVerifyRequest request, CancellationToken cancellationToken) + { + LastVerifyRequest = request; + if (VerifyException is not null) + { + throw VerifyException; + } + + return Task.FromResult(VerifyResponse); + } + + public Task GetPolicyFindingsAsync(PolicyFindingsQuery query, CancellationToken cancellationToken) + { + LastFindingsQuery = query; + if (FindingsListException is not null) + { + throw FindingsListException; + } + + return Task.FromResult(FindingsPage); + } + + public Task GetPolicyFindingAsync(string policyId, string findingId, CancellationToken cancellationToken) + { + LastFindingGet = (policyId, findingId); + if (FindingGetException is not null) + { + throw FindingGetException; + } + + return Task.FromResult(FindingDocument); + } + + public Task GetPolicyFindingExplainAsync(string policyId, string findingId, string? mode, CancellationToken cancellationToken) + { + LastFindingExplain = (policyId, findingId, mode); + if (FindingExplainException is not null) + { + throw FindingExplainException; + } + + return Task.FromResult(ExplainResult); + } + + + public Task DownloadOfflineKitAsync(string? bundleId, string destinationDirectory, bool overwrite, bool resume, CancellationToken cancellationToken) + => throw new NotSupportedException(); + + public Task ImportOfflineKitAsync(OfflineKitImportRequest request, CancellationToken cancellationToken) + => throw new NotSupportedException(); + + public Task GetOfflineKitStatusAsync(CancellationToken cancellationToken) + => throw new NotSupportedException(); + } + + private sealed class StubExecutor : IScannerExecutor + { + private readonly ScannerExecutionResult _result; + + public StubExecutor(ScannerExecutionResult result) + { + _result = result; + } + + public Task RunAsync(string runner, string entry, string targetDirectory, string resultsDirectory, IReadOnlyList arguments, bool verbose, CancellationToken cancellationToken) + { + Directory.CreateDirectory(Path.GetDirectoryName(_result.ResultsPath)!); + if (!File.Exists(_result.ResultsPath)) + { + File.WriteAllText(_result.ResultsPath, "{}"); + } + + Directory.CreateDirectory(Path.GetDirectoryName(_result.RunMetadataPath)!); + if (!File.Exists(_result.RunMetadataPath)) + { + File.WriteAllText(_result.RunMetadataPath, "{}"); + } + + return Task.FromResult(_result); + } + } + + private sealed class StubInstaller : IScannerInstaller + { + public Task InstallAsync(string artifactPath, bool verbose, CancellationToken cancellationToken) + => Task.CompletedTask; + } + + private sealed class StubTokenClient : IStellaOpsTokenClient + { + private readonly StellaOpsTokenResult _token; + + public StubTokenClient() + { + _token = new StellaOpsTokenResult( + "token-123", + "Bearer", + DateTimeOffset.UtcNow.AddMinutes(30), + new[] { StellaOpsScopes.ConcelierJobsTrigger }); + } + + public int ClientCredentialRequests { get; private set; } + public IReadOnlyDictionary? LastAdditionalParameters { get; private set; } + public int PasswordRequests { get; private set; } + public int ClearRequests { get; private set; } + public StellaOpsTokenCacheEntry? CachedEntry { get; set; } + + public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) + { + CachedEntry = entry; + return ValueTask.CompletedTask; + } + + public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) + { + ClearRequests++; + CachedEntry = null; + return ValueTask.CompletedTask; + } + + public Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) + => Task.FromResult(new JsonWebKeySet("{\"keys\":[]}")); + + public ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.FromResult(CachedEntry); + + public Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) + { + ClientCredentialRequests++; + LastAdditionalParameters = additionalParameters; + return Task.FromResult(_token); + } + + public Task RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) + { + PasswordRequests++; + LastAdditionalParameters = additionalParameters; + return Task.FromResult(_token); + } + } + + private static string CreateUnsignedJwt(params (string Key, object Value)[] claims) + { + var headerJson = "{\"alg\":\"none\",\"typ\":\"JWT\"}"; + var payload = new Dictionary(StringComparer.Ordinal); + foreach (var claim in claims) + { + payload[claim.Key] = claim.Value; + } + + var payloadJson = JsonSerializer.Serialize(payload); + return $"{Base64UrlEncode(headerJson)}.{Base64UrlEncode(payloadJson)}."; + } + + private static string Base64UrlEncode(string value) + { + var bytes = Encoding.UTF8.GetBytes(value); + return Convert.ToBase64String(bytes) + .TrimEnd('=') + .Replace('+', '-') + .Replace('/', '_'); + } + + private sealed class StubConcelierObservationsClient : IConcelierObservationsClient + { + private readonly AdvisoryObservationsResponse _response; + + public StubConcelierObservationsClient(AdvisoryObservationsResponse? response = null) + { + _response = response ?? new AdvisoryObservationsResponse(); + } + + public AdvisoryObservationsQuery? LastQuery { get; private set; } + + public Task GetObservationsAsync( + AdvisoryObservationsQuery query, + CancellationToken cancellationToken) + { + LastQuery = query; + return Task.FromResult(_response); + } + } +} diff --git a/src/StellaOps.Cli.Tests/Configuration/CliBootstrapperTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Configuration/CliBootstrapperTests.cs similarity index 100% rename from src/StellaOps.Cli.Tests/Configuration/CliBootstrapperTests.cs rename to src/Cli/__Tests/StellaOps.Cli.Tests/Configuration/CliBootstrapperTests.cs diff --git a/src/StellaOps.Cli.Tests/Plugins/CliCommandModuleLoaderTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Plugins/CliCommandModuleLoaderTests.cs similarity index 97% rename from src/StellaOps.Cli.Tests/Plugins/CliCommandModuleLoaderTests.cs rename to src/Cli/__Tests/StellaOps.Cli.Tests/Plugins/CliCommandModuleLoaderTests.cs index 390f24e0..9fa44cb2 100644 --- a/src/StellaOps.Cli.Tests/Plugins/CliCommandModuleLoaderTests.cs +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/Plugins/CliCommandModuleLoaderTests.cs @@ -1,43 +1,43 @@ -using System; -using System.CommandLine; -using System.IO; -using System.Threading; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Plugins; -using Xunit; - -namespace StellaOps.Cli.Tests.Plugins; - -public sealed class CliCommandModuleLoaderTests -{ - [Fact] - public void RegisterModules_LoadsNonCoreCommandsFromPlugin() - { - var options = new StellaOpsCliOptions(); - var repoRoot = Path.GetFullPath( - Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "..", "..")); - - options.Plugins.BaseDirectory = repoRoot; - options.Plugins.Directory = "plugins/cli"; - options.Plugins.ManifestSearchPattern = "manifest.json"; - - var services = new ServiceCollection() - .AddSingleton(options) - .BuildServiceProvider(); - - var logger = NullLoggerFactory.Instance.CreateLogger(); - var loader = new CliCommandModuleLoader(services, options, logger); - - var root = new RootCommand(); - var verbose = new Option("--verbose"); - - loader.RegisterModules(root, verbose, CancellationToken.None); - - Assert.Contains(root.Children, command => string.Equals(command.Name, "excititor", StringComparison.Ordinal)); - Assert.Contains(root.Children, command => string.Equals(command.Name, "runtime", StringComparison.Ordinal)); - Assert.Contains(root.Children, command => string.Equals(command.Name, "offline", StringComparison.Ordinal)); - } -} +using System; +using System.CommandLine; +using System.IO; +using System.Threading; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Plugins; +using Xunit; + +namespace StellaOps.Cli.Tests.Plugins; + +public sealed class CliCommandModuleLoaderTests +{ + [Fact] + public void RegisterModules_LoadsNonCoreCommandsFromPlugin() + { + var options = new StellaOpsCliOptions(); + var repoRoot = Path.GetFullPath( + Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "..", "..")); + + options.Plugins.BaseDirectory = repoRoot; + options.Plugins.Directory = "plugins/cli"; + options.Plugins.ManifestSearchPattern = "manifest.json"; + + var services = new ServiceCollection() + .AddSingleton(options) + .BuildServiceProvider(); + + var logger = NullLoggerFactory.Instance.CreateLogger(); + var loader = new CliCommandModuleLoader(services, options, logger); + + var root = new RootCommand(); + var verbose = new Option("--verbose"); + + loader.RegisterModules(root, verbose, CancellationToken.None); + + Assert.Contains(root.Children, command => string.Equals(command.Name, "excititor", StringComparison.Ordinal)); + Assert.Contains(root.Children, command => string.Equals(command.Name, "runtime", StringComparison.Ordinal)); + Assert.Contains(root.Children, command => string.Equals(command.Name, "offline", StringComparison.Ordinal)); + } +} diff --git a/src/StellaOps.Cli.Tests/Plugins/RestartOnlyCliPluginGuardTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Plugins/RestartOnlyCliPluginGuardTests.cs similarity index 96% rename from src/StellaOps.Cli.Tests/Plugins/RestartOnlyCliPluginGuardTests.cs rename to src/Cli/__Tests/StellaOps.Cli.Tests/Plugins/RestartOnlyCliPluginGuardTests.cs index 0d2c781f..255a14a7 100644 --- a/src/StellaOps.Cli.Tests/Plugins/RestartOnlyCliPluginGuardTests.cs +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/Plugins/RestartOnlyCliPluginGuardTests.cs @@ -1,29 +1,29 @@ -using StellaOps.Cli.Plugins; -using Xunit; - -namespace StellaOps.Cli.Tests.Plugins; - -public sealed class RestartOnlyCliPluginGuardTests -{ - [Fact] - public void EnsureRegistrationAllowed_AllowsDuringStartup() - { - var guard = new RestartOnlyCliPluginGuard(); - guard.EnsureRegistrationAllowed("./plugins/sample.dll"); - guard.Seal(); - - // Re-registering known plug-ins after sealing should succeed. - guard.EnsureRegistrationAllowed("./plugins/sample.dll"); - Assert.True(guard.IsSealed); - Assert.Single(guard.KnownPlugins); - } - - [Fact] - public void EnsureRegistrationAllowed_ThrowsForUnknownAfterSeal() - { - var guard = new RestartOnlyCliPluginGuard(); - guard.Seal(); - - Assert.Throws(() => guard.EnsureRegistrationAllowed("./plugins/new.dll")); - } -} +using StellaOps.Cli.Plugins; +using Xunit; + +namespace StellaOps.Cli.Tests.Plugins; + +public sealed class RestartOnlyCliPluginGuardTests +{ + [Fact] + public void EnsureRegistrationAllowed_AllowsDuringStartup() + { + var guard = new RestartOnlyCliPluginGuard(); + guard.EnsureRegistrationAllowed("./plugins/sample.dll"); + guard.Seal(); + + // Re-registering known plug-ins after sealing should succeed. + guard.EnsureRegistrationAllowed("./plugins/sample.dll"); + Assert.True(guard.IsSealed); + Assert.Single(guard.KnownPlugins); + } + + [Fact] + public void EnsureRegistrationAllowed_ThrowsForUnknownAfterSeal() + { + var guard = new RestartOnlyCliPluginGuard(); + guard.Seal(); + + Assert.Throws(() => guard.EnsureRegistrationAllowed("./plugins/new.dll")); + } +} diff --git a/src/StellaOps.Cli.Tests/Services/AuthorityDiagnosticsReporterTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Services/AuthorityDiagnosticsReporterTests.cs similarity index 100% rename from src/StellaOps.Cli.Tests/Services/AuthorityDiagnosticsReporterTests.cs rename to src/Cli/__Tests/StellaOps.Cli.Tests/Services/AuthorityDiagnosticsReporterTests.cs diff --git a/src/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs similarity index 97% rename from src/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs rename to src/Cli/__Tests/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs index 0112fcdf..84f78dbf 100644 --- a/src/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/Services/BackendOperationsClientTests.cs @@ -1,1131 +1,1131 @@ -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Globalization; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Net.Http.Json; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.Client; -using StellaOps.Cli.Configuration; -using StellaOps.Cli.Services; -using StellaOps.Cli.Services.Models; -using StellaOps.Cli.Services.Models.Transport; -using StellaOps.Cli.Tests.Testing; -using System.Linq; - -namespace StellaOps.Cli.Tests.Services; - -public sealed class BackendOperationsClientTests -{ - [Fact] - public async Task DownloadScannerAsync_VerifiesDigestAndWritesMetadata() - { - using var temp = new TempDirectory(); - - var contentBytes = Encoding.UTF8.GetBytes("scanner-blob"); - var digestHex = Convert.ToHexString(SHA256.HashData(contentBytes)).ToLowerInvariant(); - - var handler = new StubHttpMessageHandler((request, _) => - { - var response = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new ByteArrayContent(contentBytes), - RequestMessage = request - }; - - response.Headers.Add("X-StellaOps-Digest", $"sha256:{digestHex}"); - response.Content.Headers.LastModified = DateTimeOffset.UtcNow; - response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream"); - return response; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://concelier.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://concelier.example", - ScannerCacheDirectory = temp.Path, - ScannerDownloadAttempts = 1 - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var targetPath = Path.Combine(temp.Path, "scanner.tar.gz"); - var result = await client.DownloadScannerAsync("stable", targetPath, overwrite: false, verbose: true, CancellationToken.None); - - Assert.False(result.FromCache); - Assert.True(File.Exists(targetPath)); - - var metadataPath = targetPath + ".metadata.json"; - Assert.True(File.Exists(metadataPath)); - - using var document = JsonDocument.Parse(File.ReadAllText(metadataPath)); - Assert.Equal($"sha256:{digestHex}", document.RootElement.GetProperty("digest").GetString()); - Assert.Equal("stable", document.RootElement.GetProperty("channel").GetString()); - } - - [Fact] - public async Task DownloadScannerAsync_ThrowsOnDigestMismatch() - { - using var temp = new TempDirectory(); - - var contentBytes = Encoding.UTF8.GetBytes("scanner-data"); - var handler = new StubHttpMessageHandler((request, _) => - { - var response = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new ByteArrayContent(contentBytes), - RequestMessage = request - }; - response.Headers.Add("X-StellaOps-Digest", "sha256:deadbeef"); - return response; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://concelier.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://concelier.example", - ScannerCacheDirectory = temp.Path, - ScannerDownloadAttempts = 1 - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var targetPath = Path.Combine(temp.Path, "scanner.tar.gz"); - - await Assert.ThrowsAsync(() => client.DownloadScannerAsync("stable", targetPath, overwrite: true, verbose: false, CancellationToken.None)); - Assert.False(File.Exists(targetPath)); - } - - [Fact] - public async Task DownloadScannerAsync_RetriesOnFailure() - { - using var temp = new TempDirectory(); - - var successBytes = Encoding.UTF8.GetBytes("success"); - var digestHex = Convert.ToHexString(SHA256.HashData(successBytes)).ToLowerInvariant(); - var attempts = 0; - - var handler = new StubHttpMessageHandler( - (request, _) => - { - attempts++; - return new HttpResponseMessage(HttpStatusCode.InternalServerError) - { - RequestMessage = request, - Content = new StringContent("error") - }; - }, - (request, _) => - { - attempts++; - var response = new HttpResponseMessage(HttpStatusCode.OK) - { - RequestMessage = request, - Content = new ByteArrayContent(successBytes) - }; - response.Headers.Add("X-StellaOps-Digest", $"sha256:{digestHex}"); - return response; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://concelier.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://concelier.example", - ScannerCacheDirectory = temp.Path, - ScannerDownloadAttempts = 3 - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var targetPath = Path.Combine(temp.Path, "scanner.tar.gz"); - var result = await client.DownloadScannerAsync("stable", targetPath, overwrite: false, verbose: false, CancellationToken.None); - - Assert.Equal(2, attempts); - Assert.False(result.FromCache); - Assert.True(File.Exists(targetPath)); - } - - [Fact] - public async Task UploadScanResultsAsync_RetriesOnRetryAfter() - { - using var temp = new TempDirectory(); - var filePath = Path.Combine(temp.Path, "scan.json"); - await File.WriteAllTextAsync(filePath, "{}"); - - var attempts = 0; - var handler = new StubHttpMessageHandler( - (request, _) => - { - attempts++; - var response = new HttpResponseMessage(HttpStatusCode.TooManyRequests) - { - RequestMessage = request, - Content = new StringContent("busy") - }; - response.Headers.Add("Retry-After", "1"); - return response; - }, - (request, _) => - { - attempts++; - return new HttpResponseMessage(HttpStatusCode.OK) - { - RequestMessage = request - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://concelier.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://concelier.example", - ScanUploadAttempts = 3 - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - await client.UploadScanResultsAsync(filePath, CancellationToken.None); - - Assert.Equal(2, attempts); - } - - [Fact] - public async Task UploadScanResultsAsync_ThrowsAfterMaxAttempts() - { - using var temp = new TempDirectory(); - var filePath = Path.Combine(temp.Path, "scan.json"); - await File.WriteAllTextAsync(filePath, "{}"); - - var attempts = 0; - var handler = new StubHttpMessageHandler( - (request, _) => - { - attempts++; - return new HttpResponseMessage(HttpStatusCode.BadGateway) - { - RequestMessage = request, - Content = new StringContent("bad gateway") - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://concelier.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://concelier.example", - ScanUploadAttempts = 2 - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - await Assert.ThrowsAsync(() => client.UploadScanResultsAsync(filePath, CancellationToken.None)); - Assert.Equal(2, attempts); - } - - [Fact] - public async Task TriggerJobAsync_ReturnsAcceptedResult() - { - var handler = new StubHttpMessageHandler((request, _) => - { - var response = new HttpResponseMessage(HttpStatusCode.Accepted) - { - RequestMessage = request, - Content = JsonContent.Create(new JobRunResponse - { - RunId = Guid.NewGuid(), - Status = "queued", - Kind = "export:json", - Trigger = "cli", - CreatedAt = DateTimeOffset.UtcNow - }) - }; - response.Headers.Location = new Uri("/jobs/export:json/runs/123", UriKind.Relative); - return response; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://concelier.example") - }; - - var options = new StellaOpsCliOptions { BackendUrl = "https://concelier.example" }; - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var result = await client.TriggerJobAsync("export:json", new Dictionary(), CancellationToken.None); - - Assert.True(result.Success); - Assert.Equal("Accepted", result.Message); - Assert.Equal("/jobs/export:json/runs/123", result.Location); - } - - [Fact] - public async Task TriggerJobAsync_ReturnsFailureMessage() - { - var handler = new StubHttpMessageHandler((request, _) => - { - var problem = new - { - title = "Job already running", - detail = "export job active" - }; - - var response = new HttpResponseMessage(HttpStatusCode.Conflict) - { - RequestMessage = request, - Content = JsonContent.Create(problem) - }; - return response; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://concelier.example") - }; - - var options = new StellaOpsCliOptions { BackendUrl = "https://concelier.example" }; - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var result = await client.TriggerJobAsync("export:json", new Dictionary(), CancellationToken.None); - - Assert.False(result.Success); - Assert.Contains("Job already running", result.Message); - } - - [Fact] - public async Task TriggerJobAsync_UsesAuthorityTokenWhenConfigured() - { - using var temp = new TempDirectory(); - - var handler = new StubHttpMessageHandler((request, _) => - { - Assert.NotNull(request.Headers.Authorization); - Assert.Equal("Bearer", request.Headers.Authorization!.Scheme); - Assert.Equal("token-123", request.Headers.Authorization.Parameter); - - return new HttpResponseMessage(HttpStatusCode.Accepted) - { - RequestMessage = request, - Content = JsonContent.Create(new JobRunResponse - { - RunId = Guid.NewGuid(), - Kind = "test", - Status = "Pending", - Trigger = "cli", - CreatedAt = DateTimeOffset.UtcNow - }) - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://concelier.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://concelier.example", - Authority = - { - Url = "https://authority.example", - ClientId = "cli", - ClientSecret = "secret", - Scope = "concelier.jobs.trigger", - TokenCacheDirectory = temp.Path - } - }; - - var tokenClient = new StubTokenClient(); - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger(), tokenClient); - - var result = await client.TriggerJobAsync("test", new Dictionary(), CancellationToken.None); - - Assert.True(result.Success); - Assert.Equal("Accepted", result.Message); - Assert.True(tokenClient.Requests > 0); - } - - [Fact] - public async Task EvaluateRuntimePolicyAsync_ParsesDecisionPayload() - { - var handler = new StubHttpMessageHandler((request, _) => - { - Assert.Equal(HttpMethod.Post, request.Method); - Assert.Equal("/api/scanner/policy/runtime", request.RequestUri!.AbsolutePath); - - var body = request.Content!.ReadAsStringAsync().GetAwaiter().GetResult(); - using var document = JsonDocument.Parse(body); - var root = document.RootElement; - Assert.Equal("prod", root.GetProperty("namespace").GetString()); - Assert.Equal("payments", root.GetProperty("labels").GetProperty("app").GetString()); - var images = root.GetProperty("images"); - Assert.Equal(2, images.GetArrayLength()); - Assert.Equal("ghcr.io/app@sha256:abc", images[0].GetString()); - Assert.Equal("ghcr.io/api@sha256:def", images[1].GetString()); - - var responseJson = @"{ - ""ttlSeconds"": 120, - ""policyRevision"": ""rev-123"", - ""expiresAtUtc"": ""2025-10-19T12:34:56Z"", - ""results"": { - ""ghcr.io/app@sha256:abc"": { - ""policyVerdict"": ""pass"", - ""signed"": true, - ""hasSbomReferrers"": true, - ""reasons"": [], - ""rekor"": { ""uuid"": ""uuid-1"", ""url"": ""https://rekor.example/uuid-1"", ""verified"": true }, - ""confidence"": 0.87, - ""quieted"": false, - ""metadata"": { ""note"": ""cached"" } - }, - ""ghcr.io/api@sha256:def"": { - ""policyVerdict"": ""fail"", - ""signed"": false, - ""hasSbomReferrers"": false, - ""reasons"": [""unsigned"", ""missing sbom""], - ""quietedBy"": ""manual-override"" - } - } -}"; - - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(responseJson, Encoding.UTF8, "application/json"), - RequestMessage = request - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://scanner.example/") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://scanner.example/" - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var labels = new ReadOnlyDictionary(new Dictionary { ["app"] = "payments" }); - var imagesList = new ReadOnlyCollection(new List - { - "ghcr.io/app@sha256:abc", - "ghcr.io/app@sha256:abc", - "ghcr.io/api@sha256:def" - }); - var requestModel = new RuntimePolicyEvaluationRequest("prod", labels, imagesList); - - var result = await client.EvaluateRuntimePolicyAsync(requestModel, CancellationToken.None); - - Assert.Equal(120, result.TtlSeconds); - Assert.Equal("rev-123", result.PolicyRevision); - Assert.Equal(DateTimeOffset.Parse("2025-10-19T12:34:56Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal), result.ExpiresAtUtc); - Assert.Equal(2, result.Decisions.Count); - - var primary = result.Decisions["ghcr.io/app@sha256:abc"]; - Assert.Equal("pass", primary.PolicyVerdict); - Assert.True(primary.Signed); - Assert.True(primary.HasSbomReferrers); - Assert.Empty(primary.Reasons); - Assert.NotNull(primary.Rekor); - Assert.Equal("uuid-1", primary.Rekor!.Uuid); - Assert.Equal("https://rekor.example/uuid-1", primary.Rekor.Url); - Assert.True(primary.Rekor.Verified); - Assert.Equal(0.87, Assert.IsType(primary.AdditionalProperties["confidence"]), 3); - Assert.False(Assert.IsType(primary.AdditionalProperties["quieted"])); - var metadataJson = Assert.IsType(primary.AdditionalProperties["metadata"]); - using var metadataDocument = JsonDocument.Parse(metadataJson); - Assert.Equal("cached", metadataDocument.RootElement.GetProperty("note").GetString()); - - var secondary = result.Decisions["ghcr.io/api@sha256:def"]; - Assert.Equal("fail", secondary.PolicyVerdict); - Assert.False(secondary.Signed); - Assert.False(secondary.HasSbomReferrers); - Assert.Collection(secondary.Reasons, - item => Assert.Equal("unsigned", item), - item => Assert.Equal("missing sbom", item)); - Assert.Equal("manual-override", Assert.IsType(secondary.AdditionalProperties["quietedBy"])); - } - - [Fact] - public async Task DownloadOfflineKitAsync_DownloadsBundleAndWritesMetadata() - { - using var temp = new TempDirectory(); - - var bundleBytes = Encoding.UTF8.GetBytes("bundle-data"); - var manifestBytes = Encoding.UTF8.GetBytes("{\"artifacts\":[]}"); - var bundleDigest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); - var manifestDigest = Convert.ToHexString(SHA256.HashData(manifestBytes)).ToLowerInvariant(); - - var metadataPayload = JsonSerializer.Serialize(new - { - bundleId = "2025-10-20-full", - bundleName = "stella-ops-offline-kit-2025-10-20.tgz", - bundleSha256 = $"sha256:{bundleDigest}", - bundleSize = (long)bundleBytes.Length, - bundleUrl = "https://mirror.example/stella-ops-offline-kit-2025-10-20.tgz", - bundleSignatureName = "stella-ops-offline-kit-2025-10-20.tgz.sig", - bundleSignatureUrl = "https://mirror.example/stella-ops-offline-kit-2025-10-20.tgz.sig", - manifestName = "offline-manifest-2025-10-20.json", - manifestSha256 = $"sha256:{manifestDigest}", - manifestUrl = "https://mirror.example/offline-manifest-2025-10-20.json", - manifestSignatureName = "offline-manifest-2025-10-20.json.jws", - manifestSignatureUrl = "https://mirror.example/offline-manifest-2025-10-20.json.jws", - capturedAt = DateTimeOffset.UtcNow - }, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - - var handler = new StubHttpMessageHandler( - (request, _) => - { - Assert.Equal("https://backend.example/api/offline-kit/bundles/latest", request.RequestUri!.ToString()); - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(metadataPayload) - }; - }, - (request, _) => - { - var absolute = request.RequestUri!.AbsoluteUri; - if (absolute.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase)) - { - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new ByteArrayContent(bundleBytes) - }; - } - - if (absolute.EndsWith(".json", StringComparison.OrdinalIgnoreCase)) - { - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new ByteArrayContent(manifestBytes) - }; - } - - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new ByteArrayContent(Array.Empty()) - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://backend.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://backend.example", - Offline = new StellaOpsCliOfflineOptions - { - KitsDirectory = temp.Path - } - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var result = await client.DownloadOfflineKitAsync(null, temp.Path, overwrite: false, resume: false, CancellationToken.None); - - Assert.False(result.FromCache); - Assert.True(File.Exists(result.BundlePath)); - Assert.True(File.Exists(result.ManifestPath)); - Assert.NotNull(result.BundleSignaturePath); - Assert.NotNull(result.ManifestSignaturePath); - Assert.True(File.Exists(result.MetadataPath)); - - using var metadata = JsonDocument.Parse(File.ReadAllText(result.MetadataPath)); - Assert.Equal("2025-10-20-full", metadata.RootElement.GetProperty("bundleId").GetString()); - Assert.Equal(bundleDigest, metadata.RootElement.GetProperty("bundleSha256").GetString()); - } - - [Fact] - public async Task DownloadOfflineKitAsync_ResumesPartialDownload() - { - using var temp = new TempDirectory(); - - var bundleBytes = Encoding.UTF8.GetBytes("partial-download-data"); - var manifestBytes = Encoding.UTF8.GetBytes("{\"manifest\":true}"); - var bundleDigest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); - var manifestDigest = Convert.ToHexString(SHA256.HashData(manifestBytes)).ToLowerInvariant(); - - var metadataJson = JsonSerializer.Serialize(new - { - bundleId = "2025-10-21-full", - bundleName = "kit.tgz", - bundleSha256 = bundleDigest, - bundleSize = (long)bundleBytes.Length, - bundleUrl = "https://mirror.example/kit.tgz", - manifestName = "offline-manifest.json", - manifestSha256 = manifestDigest, - manifestUrl = "https://mirror.example/offline-manifest.json", - capturedAt = DateTimeOffset.UtcNow - }, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - - var partialPath = Path.Combine(temp.Path, "kit.tgz.partial"); - await File.WriteAllBytesAsync(partialPath, bundleBytes.AsSpan(0, bundleBytes.Length / 2).ToArray()); - - var handler = new StubHttpMessageHandler( - (request, _) => new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(metadataJson) - }, - (request, _) => - { - if (request.RequestUri!.AbsoluteUri.EndsWith("kit.tgz", StringComparison.OrdinalIgnoreCase)) - { - Assert.NotNull(request.Headers.Range); - Assert.Equal(bundleBytes.Length / 2, request.Headers.Range!.Ranges.Single().From); - return new HttpResponseMessage(HttpStatusCode.PartialContent) - { - Content = new ByteArrayContent(bundleBytes.AsSpan(bundleBytes.Length / 2).ToArray()) - }; - } - - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new ByteArrayContent(manifestBytes) - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://backend.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://backend.example", - Offline = new StellaOpsCliOfflineOptions - { - KitsDirectory = temp.Path - } - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var result = await client.DownloadOfflineKitAsync(null, temp.Path, overwrite: false, resume: true, CancellationToken.None); - - Assert.Equal(bundleDigest, result.Descriptor.BundleSha256); - Assert.Equal(bundleBytes.Length, new FileInfo(result.BundlePath).Length); - } - - [Fact] - public async Task ImportOfflineKitAsync_SendsMultipartPayload() - { - using var temp = new TempDirectory(); - var bundlePath = Path.Combine(temp.Path, "kit.tgz"); - var manifestPath = Path.Combine(temp.Path, "offline-manifest.json"); - - var bundleBytes = Encoding.UTF8.GetBytes("bundle-content"); - var manifestBytes = Encoding.UTF8.GetBytes("{\"manifest\":true}"); - await File.WriteAllBytesAsync(bundlePath, bundleBytes); - await File.WriteAllBytesAsync(manifestPath, manifestBytes); - - var bundleDigest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); - var manifestDigest = Convert.ToHexString(SHA256.HashData(manifestBytes)).ToLowerInvariant(); - - var metadata = new OfflineKitMetadataDocument - { - BundleId = "2025-10-21-full", - BundleName = "kit.tgz", - BundleSha256 = bundleDigest, - BundleSize = bundleBytes.Length, - BundlePath = bundlePath, - CapturedAt = DateTimeOffset.UtcNow, - DownloadedAt = DateTimeOffset.UtcNow, - Channel = "stable", - Kind = "full", - ManifestName = "offline-manifest.json", - ManifestSha256 = manifestDigest, - ManifestSize = manifestBytes.Length, - ManifestPath = manifestPath, - IsDelta = false, - BaseBundleId = null - }; - - await File.WriteAllTextAsync(bundlePath + ".metadata.json", JsonSerializer.Serialize(metadata, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true })); - - var recordingHandler = new ImportRecordingHandler(); - var httpClient = new HttpClient(recordingHandler) - { - BaseAddress = new Uri("https://backend.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://backend.example" - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var request = new OfflineKitImportRequest( - bundlePath, - manifestPath, - null, - null, - metadata.BundleId, - metadata.BundleSha256, - metadata.BundleSize, - metadata.CapturedAt, - metadata.Channel, - metadata.Kind, - metadata.IsDelta, - metadata.BaseBundleId, - metadata.ManifestSha256, - metadata.ManifestSize); - - var result = await client.ImportOfflineKitAsync(request, CancellationToken.None); - - Assert.Equal("imp-1", result.ImportId); - Assert.NotNull(recordingHandler.MetadataJson); - Assert.NotNull(recordingHandler.BundlePayload); - Assert.NotNull(recordingHandler.ManifestPayload); - - using var metadataJson = JsonDocument.Parse(recordingHandler.MetadataJson!); - Assert.Equal(bundleDigest, metadataJson.RootElement.GetProperty("bundleSha256").GetString()); - Assert.Equal(manifestDigest, metadataJson.RootElement.GetProperty("manifestSha256").GetString()); - } - - [Fact] - public async Task GetOfflineKitStatusAsync_ParsesResponse() - { - var captured = DateTimeOffset.UtcNow; - var imported = captured.AddMinutes(5); - - var statusJson = JsonSerializer.Serialize(new - { - current = new - { - bundleId = "2025-10-22-full", - channel = "stable", - kind = "full", - isDelta = false, - baseBundleId = (string?)null, - bundleSha256 = "sha256:abc123", - bundleSize = 42, - capturedAt = captured, - importedAt = imported - }, - components = new[] - { - new - { - name = "concelier-json", - version = "2025-10-22", - digest = "sha256:def456", - capturedAt = captured, - sizeBytes = 1234 - } - } - }, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - - var handler = new StubHttpMessageHandler( - (request, _) => - { - Assert.Equal("https://backend.example/api/offline-kit/status", request.RequestUri!.ToString()); - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(statusJson) - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://backend.example") - }; - - var options = new StellaOpsCliOptions - { - BackendUrl = "https://backend.example" - }; - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var status = await client.GetOfflineKitStatusAsync(CancellationToken.None); - - Assert.Equal("2025-10-22-full", status.BundleId); - Assert.Equal("stable", status.Channel); - Assert.Equal("full", status.Kind); - Assert.False(status.IsDelta); - Assert.Equal(42, status.BundleSize); - Assert.Single(status.Components); - Assert.Equal("concelier-json", status.Components[0].Name); - } - - private sealed class ImportRecordingHandler : HttpMessageHandler - { - public string? MetadataJson { get; private set; } - public byte[]? BundlePayload { get; private set; } - public byte[]? ManifestPayload { get; private set; } - - protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - if (request.RequestUri!.AbsoluteUri.EndsWith("/api/offline-kit/import", StringComparison.OrdinalIgnoreCase)) - { - Assert.IsType(request.Content); - foreach (var part in (MultipartFormDataContent)request.Content!) - { - var name = part.Headers.ContentDisposition?.Name?.Trim('"'); - switch (name) - { - case "metadata": - MetadataJson = await part.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - break; - case "bundle": - BundlePayload = await part.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); - break; - case "manifest": - ManifestPayload = await part.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); - break; - } - } - } - - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent("{\"importId\":\"imp-1\",\"status\":\"queued\",\"submittedAt\":\"2025-10-21T00:00:00Z\"}") - }; - } - } - - private sealed class StubTokenClient : IStellaOpsTokenClient - { - private readonly StellaOpsTokenResult _tokenResult; - - public int Requests { get; private set; } - - public string? LastScope { get; private set; } - - public IReadOnlyDictionary? LastAdditionalParameters { get; private set; } - - public StubTokenClient() - { - _tokenResult = new StellaOpsTokenResult( - "token-123", - "Bearer", - DateTimeOffset.UtcNow.AddMinutes(5), - new[] { StellaOpsScopes.ConcelierJobsTrigger }); - } - - public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - - public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - - public Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) - => Task.FromResult(new JsonWebKeySet("{\"keys\":[]}")); - - public ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.FromResult(null); - - public Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) - { - Requests++; - LastScope = scope; - LastAdditionalParameters = additionalParameters; - return Task.FromResult(_tokenResult); - } - - public Task RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) - { - Requests++; - LastScope = scope; - LastAdditionalParameters = additionalParameters; - return Task.FromResult(_tokenResult); - } - } - - [Fact] - public async Task SimulatePolicyAsync_SendsPayloadAndParsesResponse() - { - string? capturedBody = null; - - var handler = new StubHttpMessageHandler((request, _) => - { - Assert.Equal(HttpMethod.Post, request.Method); - Assert.Equal("https://policy.example/api/policy/policies/P-7/simulate", request.RequestUri!.ToString()); - capturedBody = request.Content!.ReadAsStringAsync().Result; - - var responseDocument = new PolicySimulationResponseDocument - { - Diff = new PolicySimulationDiffDocument - { - SchemaVersion = "scheduler.policy-diff-summary@1", - Added = 2, - Removed = 1, - Unchanged = 10, - BySeverity = new Dictionary - { - ["critical"] = new PolicySimulationSeverityDeltaDocument { Up = 1 }, - ["high"] = new PolicySimulationSeverityDeltaDocument { Down = 1 } - }, - RuleHits = new List - { - new() { RuleId = "rule-block", RuleName = "Block Critical", Up = 1, Down = 0 } - } - }, - ExplainUri = "blob://policy/P-7/simulation.json" - }; - - var json = JsonSerializer.Serialize(responseDocument, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(json, Encoding.UTF8, "application/json"), - RequestMessage = request - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://policy.example") - }; - - var options = new StellaOpsCliOptions { BackendUrl = "https://policy.example" }; - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var sbomSet = new ReadOnlyCollection(new List { "sbom:A", "sbom:B" }); - var environment = new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) - { - ["sealed"] = false, - ["threshold"] = 0.85 - }); - var input = new PolicySimulationInput(3, 4, sbomSet, environment, true); - - var result = await client.SimulatePolicyAsync("P-7", input, CancellationToken.None); - - Assert.NotNull(capturedBody); - using (var document = JsonDocument.Parse(capturedBody!)) - { - var root = document.RootElement; - Assert.Equal(3, root.GetProperty("baseVersion").GetInt32()); - Assert.Equal(4, root.GetProperty("candidateVersion").GetInt32()); - Assert.True(root.TryGetProperty("env", out var envElement) && envElement.GetProperty("sealed").GetBoolean() == false); - Assert.Equal(0.85, envElement.GetProperty("threshold").GetDouble(), 3); - Assert.True(root.GetProperty("explain").GetBoolean()); - var sboms = root.GetProperty("sbomSet"); - Assert.Equal(2, sboms.GetArrayLength()); - Assert.Equal("sbom:A", sboms[0].GetString()); - } - - Assert.Equal("scheduler.policy-diff-summary@1", result.Diff.SchemaVersion); - Assert.Equal(2, result.Diff.Added); - Assert.Equal(1, result.Diff.Removed); - Assert.Equal(10, result.Diff.Unchanged); - Assert.Equal("blob://policy/P-7/simulation.json", result.ExplainUri); - Assert.True(result.Diff.BySeverity.ContainsKey("critical")); - Assert.Single(result.Diff.RuleHits); - Assert.Equal("rule-block", result.Diff.RuleHits[0].RuleId); - } - - [Fact] - public async Task SimulatePolicyAsync_ThrowsPolicyApiExceptionOnError() - { - var handler = new StubHttpMessageHandler((request, _) => - { - var problem = new ProblemDocument - { - Title = "Bad request", - Detail = "Missing SBOM set", - Status = (int)HttpStatusCode.BadRequest, - Extensions = new Dictionary - { - ["code"] = "ERR_POL_003" - } - }; - - var json = JsonSerializer.Serialize(problem, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - return new HttpResponseMessage(HttpStatusCode.BadRequest) - { - Content = new StringContent(json, Encoding.UTF8, "application/json"), - RequestMessage = request - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://policy.example") - }; - - var options = new StellaOpsCliOptions { BackendUrl = "https://policy.example" }; - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var input = new PolicySimulationInput( - null, - null, - new ReadOnlyCollection(Array.Empty()), - new ReadOnlyDictionary(new Dictionary()), - false); - - var exception = await Assert.ThrowsAsync(() => client.SimulatePolicyAsync("P-7", input, CancellationToken.None)); - Assert.Equal(HttpStatusCode.BadRequest, exception.StatusCode); - Assert.Equal("ERR_POL_003", exception.ErrorCode); - Assert.Contains("Bad request", exception.Message); - } - - [Fact] - public async Task ActivatePolicyRevisionAsync_SendsPayloadAndParsesResponse() - { - string? capturedBody = null; - - var handler = new StubHttpMessageHandler((request, _) => - { - Assert.Equal(HttpMethod.Post, request.Method); - Assert.Equal("https://policy.example/api/policy/policies/P-7/versions/4:activate", request.RequestUri!.ToString()); - capturedBody = request.Content!.ReadAsStringAsync().Result; - - var responseDocument = new PolicyActivationResponseDocument - { - Status = "activated", - Revision = new PolicyActivationRevisionDocument - { - PackId = "P-7", - Version = 4, - Status = "active", - RequiresTwoPersonApproval = true, - CreatedAt = DateTimeOffset.Parse("2025-10-27T00:00:00Z", CultureInfo.InvariantCulture), - ActivatedAt = DateTimeOffset.Parse("2025-10-27T01:15:00Z", CultureInfo.InvariantCulture), - Approvals = new List - { - new() - { - ActorId = "user:alice", - ApprovedAt = DateTimeOffset.Parse("2025-10-27T01:10:00Z", CultureInfo.InvariantCulture), - Comment = "Primary approval" - } - } - } - }; - - var json = JsonSerializer.Serialize(responseDocument, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(json, Encoding.UTF8, "application/json"), - RequestMessage = request - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://policy.example") - }; - - var options = new StellaOpsCliOptions { BackendUrl = "https://policy.example" }; - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var request = new PolicyActivationRequest( - RunNow: true, - ScheduledAt: DateTimeOffset.Parse("2025-10-28T02:00:00Z", CultureInfo.InvariantCulture), - Priority: "high", - Rollback: false, - IncidentId: "INC-204", - Comment: "Production rollout"); - - var result = await client.ActivatePolicyRevisionAsync("P-7", 4, request, CancellationToken.None); - - Assert.NotNull(capturedBody); - using (var document = JsonDocument.Parse(capturedBody!)) - { - var root = document.RootElement; - Assert.Equal("Production rollout", root.GetProperty("comment").GetString()); - Assert.True(root.TryGetProperty("runNow", out var runNowElement) && runNowElement.GetBoolean()); - Assert.Equal("high", root.GetProperty("priority").GetString()); - Assert.Equal("INC-204", root.GetProperty("incidentId").GetString()); - Assert.True(!root.TryGetProperty("rollback", out var rollbackElement) || rollbackElement.ValueKind == JsonValueKind.Null); - } - - Assert.Equal("activated", result.Status); - Assert.Equal("P-7", result.Revision.PolicyId); - Assert.Equal(4, result.Revision.Version); - Assert.True(result.Revision.RequiresTwoPersonApproval); - Assert.Equal("active", result.Revision.Status); - Assert.Single(result.Revision.Approvals); - Assert.Equal("user:alice", result.Revision.Approvals[0].ActorId); - Assert.Equal("Primary approval", result.Revision.Approvals[0].Comment); - } - - [Fact] - public async Task ActivatePolicyRevisionAsync_ThrowsPolicyApiExceptionOnError() - { - var handler = new StubHttpMessageHandler((request, _) => - { - var problem = new ProblemDocument - { - Title = "Not approved", - Detail = "Revision awaiting approval", - Status = (int)HttpStatusCode.BadRequest, - Extensions = new Dictionary - { - ["code"] = "ERR_POL_002" - } - }; - - var json = JsonSerializer.Serialize(problem, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - return new HttpResponseMessage(HttpStatusCode.BadRequest) - { - Content = new StringContent(json, Encoding.UTF8, "application/json"), - RequestMessage = request - }; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://policy.example") - }; - - var options = new StellaOpsCliOptions { BackendUrl = "https://policy.example" }; - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); - - var request = new PolicyActivationRequest(false, null, null, false, null, null); - - var exception = await Assert.ThrowsAsync(() => client.ActivatePolicyRevisionAsync("P-7", 4, request, CancellationToken.None)); - Assert.Equal(HttpStatusCode.BadRequest, exception.StatusCode); - Assert.Equal("ERR_POL_002", exception.ErrorCode); - Assert.Contains("Not approved", exception.Message); - } - -} +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Globalization; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Net.Http.Json; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.Client; +using StellaOps.Cli.Configuration; +using StellaOps.Cli.Services; +using StellaOps.Cli.Services.Models; +using StellaOps.Cli.Services.Models.Transport; +using StellaOps.Cli.Tests.Testing; +using System.Linq; + +namespace StellaOps.Cli.Tests.Services; + +public sealed class BackendOperationsClientTests +{ + [Fact] + public async Task DownloadScannerAsync_VerifiesDigestAndWritesMetadata() + { + using var temp = new TempDirectory(); + + var contentBytes = Encoding.UTF8.GetBytes("scanner-blob"); + var digestHex = Convert.ToHexString(SHA256.HashData(contentBytes)).ToLowerInvariant(); + + var handler = new StubHttpMessageHandler((request, _) => + { + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(contentBytes), + RequestMessage = request + }; + + response.Headers.Add("X-StellaOps-Digest", $"sha256:{digestHex}"); + response.Content.Headers.LastModified = DateTimeOffset.UtcNow; + response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/octet-stream"); + return response; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://concelier.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://concelier.example", + ScannerCacheDirectory = temp.Path, + ScannerDownloadAttempts = 1 + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var targetPath = Path.Combine(temp.Path, "scanner.tar.gz"); + var result = await client.DownloadScannerAsync("stable", targetPath, overwrite: false, verbose: true, CancellationToken.None); + + Assert.False(result.FromCache); + Assert.True(File.Exists(targetPath)); + + var metadataPath = targetPath + ".metadata.json"; + Assert.True(File.Exists(metadataPath)); + + using var document = JsonDocument.Parse(File.ReadAllText(metadataPath)); + Assert.Equal($"sha256:{digestHex}", document.RootElement.GetProperty("digest").GetString()); + Assert.Equal("stable", document.RootElement.GetProperty("channel").GetString()); + } + + [Fact] + public async Task DownloadScannerAsync_ThrowsOnDigestMismatch() + { + using var temp = new TempDirectory(); + + var contentBytes = Encoding.UTF8.GetBytes("scanner-data"); + var handler = new StubHttpMessageHandler((request, _) => + { + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(contentBytes), + RequestMessage = request + }; + response.Headers.Add("X-StellaOps-Digest", "sha256:deadbeef"); + return response; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://concelier.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://concelier.example", + ScannerCacheDirectory = temp.Path, + ScannerDownloadAttempts = 1 + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var targetPath = Path.Combine(temp.Path, "scanner.tar.gz"); + + await Assert.ThrowsAsync(() => client.DownloadScannerAsync("stable", targetPath, overwrite: true, verbose: false, CancellationToken.None)); + Assert.False(File.Exists(targetPath)); + } + + [Fact] + public async Task DownloadScannerAsync_RetriesOnFailure() + { + using var temp = new TempDirectory(); + + var successBytes = Encoding.UTF8.GetBytes("success"); + var digestHex = Convert.ToHexString(SHA256.HashData(successBytes)).ToLowerInvariant(); + var attempts = 0; + + var handler = new StubHttpMessageHandler( + (request, _) => + { + attempts++; + return new HttpResponseMessage(HttpStatusCode.InternalServerError) + { + RequestMessage = request, + Content = new StringContent("error") + }; + }, + (request, _) => + { + attempts++; + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + RequestMessage = request, + Content = new ByteArrayContent(successBytes) + }; + response.Headers.Add("X-StellaOps-Digest", $"sha256:{digestHex}"); + return response; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://concelier.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://concelier.example", + ScannerCacheDirectory = temp.Path, + ScannerDownloadAttempts = 3 + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var targetPath = Path.Combine(temp.Path, "scanner.tar.gz"); + var result = await client.DownloadScannerAsync("stable", targetPath, overwrite: false, verbose: false, CancellationToken.None); + + Assert.Equal(2, attempts); + Assert.False(result.FromCache); + Assert.True(File.Exists(targetPath)); + } + + [Fact] + public async Task UploadScanResultsAsync_RetriesOnRetryAfter() + { + using var temp = new TempDirectory(); + var filePath = Path.Combine(temp.Path, "scan.json"); + await File.WriteAllTextAsync(filePath, "{}"); + + var attempts = 0; + var handler = new StubHttpMessageHandler( + (request, _) => + { + attempts++; + var response = new HttpResponseMessage(HttpStatusCode.TooManyRequests) + { + RequestMessage = request, + Content = new StringContent("busy") + }; + response.Headers.Add("Retry-After", "1"); + return response; + }, + (request, _) => + { + attempts++; + return new HttpResponseMessage(HttpStatusCode.OK) + { + RequestMessage = request + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://concelier.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://concelier.example", + ScanUploadAttempts = 3 + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + await client.UploadScanResultsAsync(filePath, CancellationToken.None); + + Assert.Equal(2, attempts); + } + + [Fact] + public async Task UploadScanResultsAsync_ThrowsAfterMaxAttempts() + { + using var temp = new TempDirectory(); + var filePath = Path.Combine(temp.Path, "scan.json"); + await File.WriteAllTextAsync(filePath, "{}"); + + var attempts = 0; + var handler = new StubHttpMessageHandler( + (request, _) => + { + attempts++; + return new HttpResponseMessage(HttpStatusCode.BadGateway) + { + RequestMessage = request, + Content = new StringContent("bad gateway") + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://concelier.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://concelier.example", + ScanUploadAttempts = 2 + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + await Assert.ThrowsAsync(() => client.UploadScanResultsAsync(filePath, CancellationToken.None)); + Assert.Equal(2, attempts); + } + + [Fact] + public async Task TriggerJobAsync_ReturnsAcceptedResult() + { + var handler = new StubHttpMessageHandler((request, _) => + { + var response = new HttpResponseMessage(HttpStatusCode.Accepted) + { + RequestMessage = request, + Content = JsonContent.Create(new JobRunResponse + { + RunId = Guid.NewGuid(), + Status = "queued", + Kind = "export:json", + Trigger = "cli", + CreatedAt = DateTimeOffset.UtcNow + }) + }; + response.Headers.Location = new Uri("/jobs/export:json/runs/123", UriKind.Relative); + return response; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://concelier.example") + }; + + var options = new StellaOpsCliOptions { BackendUrl = "https://concelier.example" }; + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var result = await client.TriggerJobAsync("export:json", new Dictionary(), CancellationToken.None); + + Assert.True(result.Success); + Assert.Equal("Accepted", result.Message); + Assert.Equal("/jobs/export:json/runs/123", result.Location); + } + + [Fact] + public async Task TriggerJobAsync_ReturnsFailureMessage() + { + var handler = new StubHttpMessageHandler((request, _) => + { + var problem = new + { + title = "Job already running", + detail = "export job active" + }; + + var response = new HttpResponseMessage(HttpStatusCode.Conflict) + { + RequestMessage = request, + Content = JsonContent.Create(problem) + }; + return response; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://concelier.example") + }; + + var options = new StellaOpsCliOptions { BackendUrl = "https://concelier.example" }; + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var result = await client.TriggerJobAsync("export:json", new Dictionary(), CancellationToken.None); + + Assert.False(result.Success); + Assert.Contains("Job already running", result.Message); + } + + [Fact] + public async Task TriggerJobAsync_UsesAuthorityTokenWhenConfigured() + { + using var temp = new TempDirectory(); + + var handler = new StubHttpMessageHandler((request, _) => + { + Assert.NotNull(request.Headers.Authorization); + Assert.Equal("Bearer", request.Headers.Authorization!.Scheme); + Assert.Equal("token-123", request.Headers.Authorization.Parameter); + + return new HttpResponseMessage(HttpStatusCode.Accepted) + { + RequestMessage = request, + Content = JsonContent.Create(new JobRunResponse + { + RunId = Guid.NewGuid(), + Kind = "test", + Status = "Pending", + Trigger = "cli", + CreatedAt = DateTimeOffset.UtcNow + }) + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://concelier.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://concelier.example", + Authority = + { + Url = "https://authority.example", + ClientId = "cli", + ClientSecret = "secret", + Scope = "concelier.jobs.trigger", + TokenCacheDirectory = temp.Path + } + }; + + var tokenClient = new StubTokenClient(); + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger(), tokenClient); + + var result = await client.TriggerJobAsync("test", new Dictionary(), CancellationToken.None); + + Assert.True(result.Success); + Assert.Equal("Accepted", result.Message); + Assert.True(tokenClient.Requests > 0); + } + + [Fact] + public async Task EvaluateRuntimePolicyAsync_ParsesDecisionPayload() + { + var handler = new StubHttpMessageHandler((request, _) => + { + Assert.Equal(HttpMethod.Post, request.Method); + Assert.Equal("/api/scanner/policy/runtime", request.RequestUri!.AbsolutePath); + + var body = request.Content!.ReadAsStringAsync().GetAwaiter().GetResult(); + using var document = JsonDocument.Parse(body); + var root = document.RootElement; + Assert.Equal("prod", root.GetProperty("namespace").GetString()); + Assert.Equal("payments", root.GetProperty("labels").GetProperty("app").GetString()); + var images = root.GetProperty("images"); + Assert.Equal(2, images.GetArrayLength()); + Assert.Equal("ghcr.io/app@sha256:abc", images[0].GetString()); + Assert.Equal("ghcr.io/api@sha256:def", images[1].GetString()); + + var responseJson = @"{ + ""ttlSeconds"": 120, + ""policyRevision"": ""rev-123"", + ""expiresAtUtc"": ""2025-10-19T12:34:56Z"", + ""results"": { + ""ghcr.io/app@sha256:abc"": { + ""policyVerdict"": ""pass"", + ""signed"": true, + ""hasSbomReferrers"": true, + ""reasons"": [], + ""rekor"": { ""uuid"": ""uuid-1"", ""url"": ""https://rekor.example/uuid-1"", ""verified"": true }, + ""confidence"": 0.87, + ""quieted"": false, + ""metadata"": { ""note"": ""cached"" } + }, + ""ghcr.io/api@sha256:def"": { + ""policyVerdict"": ""fail"", + ""signed"": false, + ""hasSbomReferrers"": false, + ""reasons"": [""unsigned"", ""missing sbom""], + ""quietedBy"": ""manual-override"" + } + } +}"; + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(responseJson, Encoding.UTF8, "application/json"), + RequestMessage = request + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://scanner.example/") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://scanner.example/" + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var labels = new ReadOnlyDictionary(new Dictionary { ["app"] = "payments" }); + var imagesList = new ReadOnlyCollection(new List + { + "ghcr.io/app@sha256:abc", + "ghcr.io/app@sha256:abc", + "ghcr.io/api@sha256:def" + }); + var requestModel = new RuntimePolicyEvaluationRequest("prod", labels, imagesList); + + var result = await client.EvaluateRuntimePolicyAsync(requestModel, CancellationToken.None); + + Assert.Equal(120, result.TtlSeconds); + Assert.Equal("rev-123", result.PolicyRevision); + Assert.Equal(DateTimeOffset.Parse("2025-10-19T12:34:56Z", CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal), result.ExpiresAtUtc); + Assert.Equal(2, result.Decisions.Count); + + var primary = result.Decisions["ghcr.io/app@sha256:abc"]; + Assert.Equal("pass", primary.PolicyVerdict); + Assert.True(primary.Signed); + Assert.True(primary.HasSbomReferrers); + Assert.Empty(primary.Reasons); + Assert.NotNull(primary.Rekor); + Assert.Equal("uuid-1", primary.Rekor!.Uuid); + Assert.Equal("https://rekor.example/uuid-1", primary.Rekor.Url); + Assert.True(primary.Rekor.Verified); + Assert.Equal(0.87, Assert.IsType(primary.AdditionalProperties["confidence"]), 3); + Assert.False(Assert.IsType(primary.AdditionalProperties["quieted"])); + var metadataJson = Assert.IsType(primary.AdditionalProperties["metadata"]); + using var metadataDocument = JsonDocument.Parse(metadataJson); + Assert.Equal("cached", metadataDocument.RootElement.GetProperty("note").GetString()); + + var secondary = result.Decisions["ghcr.io/api@sha256:def"]; + Assert.Equal("fail", secondary.PolicyVerdict); + Assert.False(secondary.Signed); + Assert.False(secondary.HasSbomReferrers); + Assert.Collection(secondary.Reasons, + item => Assert.Equal("unsigned", item), + item => Assert.Equal("missing sbom", item)); + Assert.Equal("manual-override", Assert.IsType(secondary.AdditionalProperties["quietedBy"])); + } + + [Fact] + public async Task DownloadOfflineKitAsync_DownloadsBundleAndWritesMetadata() + { + using var temp = new TempDirectory(); + + var bundleBytes = Encoding.UTF8.GetBytes("bundle-data"); + var manifestBytes = Encoding.UTF8.GetBytes("{\"artifacts\":[]}"); + var bundleDigest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); + var manifestDigest = Convert.ToHexString(SHA256.HashData(manifestBytes)).ToLowerInvariant(); + + var metadataPayload = JsonSerializer.Serialize(new + { + bundleId = "2025-10-20-full", + bundleName = "stella-ops-offline-kit-2025-10-20.tgz", + bundleSha256 = $"sha256:{bundleDigest}", + bundleSize = (long)bundleBytes.Length, + bundleUrl = "https://mirror.example/stella-ops-offline-kit-2025-10-20.tgz", + bundleSignatureName = "stella-ops-offline-kit-2025-10-20.tgz.sig", + bundleSignatureUrl = "https://mirror.example/stella-ops-offline-kit-2025-10-20.tgz.sig", + manifestName = "offline-manifest-2025-10-20.json", + manifestSha256 = $"sha256:{manifestDigest}", + manifestUrl = "https://mirror.example/offline-manifest-2025-10-20.json", + manifestSignatureName = "offline-manifest-2025-10-20.json.jws", + manifestSignatureUrl = "https://mirror.example/offline-manifest-2025-10-20.json.jws", + capturedAt = DateTimeOffset.UtcNow + }, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + + var handler = new StubHttpMessageHandler( + (request, _) => + { + Assert.Equal("https://backend.example/api/offline-kit/bundles/latest", request.RequestUri!.ToString()); + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(metadataPayload) + }; + }, + (request, _) => + { + var absolute = request.RequestUri!.AbsoluteUri; + if (absolute.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase)) + { + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(bundleBytes) + }; + } + + if (absolute.EndsWith(".json", StringComparison.OrdinalIgnoreCase)) + { + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(manifestBytes) + }; + } + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(Array.Empty()) + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://backend.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://backend.example", + Offline = new StellaOpsCliOfflineOptions + { + KitsDirectory = temp.Path + } + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var result = await client.DownloadOfflineKitAsync(null, temp.Path, overwrite: false, resume: false, CancellationToken.None); + + Assert.False(result.FromCache); + Assert.True(File.Exists(result.BundlePath)); + Assert.True(File.Exists(result.ManifestPath)); + Assert.NotNull(result.BundleSignaturePath); + Assert.NotNull(result.ManifestSignaturePath); + Assert.True(File.Exists(result.MetadataPath)); + + using var metadata = JsonDocument.Parse(File.ReadAllText(result.MetadataPath)); + Assert.Equal("2025-10-20-full", metadata.RootElement.GetProperty("bundleId").GetString()); + Assert.Equal(bundleDigest, metadata.RootElement.GetProperty("bundleSha256").GetString()); + } + + [Fact] + public async Task DownloadOfflineKitAsync_ResumesPartialDownload() + { + using var temp = new TempDirectory(); + + var bundleBytes = Encoding.UTF8.GetBytes("partial-download-data"); + var manifestBytes = Encoding.UTF8.GetBytes("{\"manifest\":true}"); + var bundleDigest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); + var manifestDigest = Convert.ToHexString(SHA256.HashData(manifestBytes)).ToLowerInvariant(); + + var metadataJson = JsonSerializer.Serialize(new + { + bundleId = "2025-10-21-full", + bundleName = "kit.tgz", + bundleSha256 = bundleDigest, + bundleSize = (long)bundleBytes.Length, + bundleUrl = "https://mirror.example/kit.tgz", + manifestName = "offline-manifest.json", + manifestSha256 = manifestDigest, + manifestUrl = "https://mirror.example/offline-manifest.json", + capturedAt = DateTimeOffset.UtcNow + }, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + + var partialPath = Path.Combine(temp.Path, "kit.tgz.partial"); + await File.WriteAllBytesAsync(partialPath, bundleBytes.AsSpan(0, bundleBytes.Length / 2).ToArray()); + + var handler = new StubHttpMessageHandler( + (request, _) => new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(metadataJson) + }, + (request, _) => + { + if (request.RequestUri!.AbsoluteUri.EndsWith("kit.tgz", StringComparison.OrdinalIgnoreCase)) + { + Assert.NotNull(request.Headers.Range); + Assert.Equal(bundleBytes.Length / 2, request.Headers.Range!.Ranges.Single().From); + return new HttpResponseMessage(HttpStatusCode.PartialContent) + { + Content = new ByteArrayContent(bundleBytes.AsSpan(bundleBytes.Length / 2).ToArray()) + }; + } + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(manifestBytes) + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://backend.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://backend.example", + Offline = new StellaOpsCliOfflineOptions + { + KitsDirectory = temp.Path + } + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var result = await client.DownloadOfflineKitAsync(null, temp.Path, overwrite: false, resume: true, CancellationToken.None); + + Assert.Equal(bundleDigest, result.Descriptor.BundleSha256); + Assert.Equal(bundleBytes.Length, new FileInfo(result.BundlePath).Length); + } + + [Fact] + public async Task ImportOfflineKitAsync_SendsMultipartPayload() + { + using var temp = new TempDirectory(); + var bundlePath = Path.Combine(temp.Path, "kit.tgz"); + var manifestPath = Path.Combine(temp.Path, "offline-manifest.json"); + + var bundleBytes = Encoding.UTF8.GetBytes("bundle-content"); + var manifestBytes = Encoding.UTF8.GetBytes("{\"manifest\":true}"); + await File.WriteAllBytesAsync(bundlePath, bundleBytes); + await File.WriteAllBytesAsync(manifestPath, manifestBytes); + + var bundleDigest = Convert.ToHexString(SHA256.HashData(bundleBytes)).ToLowerInvariant(); + var manifestDigest = Convert.ToHexString(SHA256.HashData(manifestBytes)).ToLowerInvariant(); + + var metadata = new OfflineKitMetadataDocument + { + BundleId = "2025-10-21-full", + BundleName = "kit.tgz", + BundleSha256 = bundleDigest, + BundleSize = bundleBytes.Length, + BundlePath = bundlePath, + CapturedAt = DateTimeOffset.UtcNow, + DownloadedAt = DateTimeOffset.UtcNow, + Channel = "stable", + Kind = "full", + ManifestName = "offline-manifest.json", + ManifestSha256 = manifestDigest, + ManifestSize = manifestBytes.Length, + ManifestPath = manifestPath, + IsDelta = false, + BaseBundleId = null + }; + + await File.WriteAllTextAsync(bundlePath + ".metadata.json", JsonSerializer.Serialize(metadata, new JsonSerializerOptions(JsonSerializerDefaults.Web) { WriteIndented = true })); + + var recordingHandler = new ImportRecordingHandler(); + var httpClient = new HttpClient(recordingHandler) + { + BaseAddress = new Uri("https://backend.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://backend.example" + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var request = new OfflineKitImportRequest( + bundlePath, + manifestPath, + null, + null, + metadata.BundleId, + metadata.BundleSha256, + metadata.BundleSize, + metadata.CapturedAt, + metadata.Channel, + metadata.Kind, + metadata.IsDelta, + metadata.BaseBundleId, + metadata.ManifestSha256, + metadata.ManifestSize); + + var result = await client.ImportOfflineKitAsync(request, CancellationToken.None); + + Assert.Equal("imp-1", result.ImportId); + Assert.NotNull(recordingHandler.MetadataJson); + Assert.NotNull(recordingHandler.BundlePayload); + Assert.NotNull(recordingHandler.ManifestPayload); + + using var metadataJson = JsonDocument.Parse(recordingHandler.MetadataJson!); + Assert.Equal(bundleDigest, metadataJson.RootElement.GetProperty("bundleSha256").GetString()); + Assert.Equal(manifestDigest, metadataJson.RootElement.GetProperty("manifestSha256").GetString()); + } + + [Fact] + public async Task GetOfflineKitStatusAsync_ParsesResponse() + { + var captured = DateTimeOffset.UtcNow; + var imported = captured.AddMinutes(5); + + var statusJson = JsonSerializer.Serialize(new + { + current = new + { + bundleId = "2025-10-22-full", + channel = "stable", + kind = "full", + isDelta = false, + baseBundleId = (string?)null, + bundleSha256 = "sha256:abc123", + bundleSize = 42, + capturedAt = captured, + importedAt = imported + }, + components = new[] + { + new + { + name = "concelier-json", + version = "2025-10-22", + digest = "sha256:def456", + capturedAt = captured, + sizeBytes = 1234 + } + } + }, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + + var handler = new StubHttpMessageHandler( + (request, _) => + { + Assert.Equal("https://backend.example/api/offline-kit/status", request.RequestUri!.ToString()); + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(statusJson) + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://backend.example") + }; + + var options = new StellaOpsCliOptions + { + BackendUrl = "https://backend.example" + }; + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var status = await client.GetOfflineKitStatusAsync(CancellationToken.None); + + Assert.Equal("2025-10-22-full", status.BundleId); + Assert.Equal("stable", status.Channel); + Assert.Equal("full", status.Kind); + Assert.False(status.IsDelta); + Assert.Equal(42, status.BundleSize); + Assert.Single(status.Components); + Assert.Equal("concelier-json", status.Components[0].Name); + } + + private sealed class ImportRecordingHandler : HttpMessageHandler + { + public string? MetadataJson { get; private set; } + public byte[]? BundlePayload { get; private set; } + public byte[]? ManifestPayload { get; private set; } + + protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (request.RequestUri!.AbsoluteUri.EndsWith("/api/offline-kit/import", StringComparison.OrdinalIgnoreCase)) + { + Assert.IsType(request.Content); + foreach (var part in (MultipartFormDataContent)request.Content!) + { + var name = part.Headers.ContentDisposition?.Name?.Trim('"'); + switch (name) + { + case "metadata": + MetadataJson = await part.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + break; + case "bundle": + BundlePayload = await part.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + break; + case "manifest": + ManifestPayload = await part.ReadAsByteArrayAsync(cancellationToken).ConfigureAwait(false); + break; + } + } + } + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent("{\"importId\":\"imp-1\",\"status\":\"queued\",\"submittedAt\":\"2025-10-21T00:00:00Z\"}") + }; + } + } + + private sealed class StubTokenClient : IStellaOpsTokenClient + { + private readonly StellaOpsTokenResult _tokenResult; + + public int Requests { get; private set; } + + public string? LastScope { get; private set; } + + public IReadOnlyDictionary? LastAdditionalParameters { get; private set; } + + public StubTokenClient() + { + _tokenResult = new StellaOpsTokenResult( + "token-123", + "Bearer", + DateTimeOffset.UtcNow.AddMinutes(5), + new[] { StellaOpsScopes.ConcelierJobsTrigger }); + } + + public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public Task GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) + => Task.FromResult(new JsonWebKeySet("{\"keys\":[]}")); + + public ValueTask GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.FromResult(null); + + public Task RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) + { + Requests++; + LastScope = scope; + LastAdditionalParameters = additionalParameters; + return Task.FromResult(_tokenResult); + } + + public Task RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary? additionalParameters = null, CancellationToken cancellationToken = default) + { + Requests++; + LastScope = scope; + LastAdditionalParameters = additionalParameters; + return Task.FromResult(_tokenResult); + } + } + + [Fact] + public async Task SimulatePolicyAsync_SendsPayloadAndParsesResponse() + { + string? capturedBody = null; + + var handler = new StubHttpMessageHandler((request, _) => + { + Assert.Equal(HttpMethod.Post, request.Method); + Assert.Equal("https://policy.example/api/policy/policies/P-7/simulate", request.RequestUri!.ToString()); + capturedBody = request.Content!.ReadAsStringAsync().Result; + + var responseDocument = new PolicySimulationResponseDocument + { + Diff = new PolicySimulationDiffDocument + { + SchemaVersion = "scheduler.policy-diff-summary@1", + Added = 2, + Removed = 1, + Unchanged = 10, + BySeverity = new Dictionary + { + ["critical"] = new PolicySimulationSeverityDeltaDocument { Up = 1 }, + ["high"] = new PolicySimulationSeverityDeltaDocument { Down = 1 } + }, + RuleHits = new List + { + new() { RuleId = "rule-block", RuleName = "Block Critical", Up = 1, Down = 0 } + } + }, + ExplainUri = "blob://policy/P-7/simulation.json" + }; + + var json = JsonSerializer.Serialize(responseDocument, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(json, Encoding.UTF8, "application/json"), + RequestMessage = request + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://policy.example") + }; + + var options = new StellaOpsCliOptions { BackendUrl = "https://policy.example" }; + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var sbomSet = new ReadOnlyCollection(new List { "sbom:A", "sbom:B" }); + var environment = new ReadOnlyDictionary(new Dictionary(StringComparer.Ordinal) + { + ["sealed"] = false, + ["threshold"] = 0.85 + }); + var input = new PolicySimulationInput(3, 4, sbomSet, environment, true); + + var result = await client.SimulatePolicyAsync("P-7", input, CancellationToken.None); + + Assert.NotNull(capturedBody); + using (var document = JsonDocument.Parse(capturedBody!)) + { + var root = document.RootElement; + Assert.Equal(3, root.GetProperty("baseVersion").GetInt32()); + Assert.Equal(4, root.GetProperty("candidateVersion").GetInt32()); + Assert.True(root.TryGetProperty("env", out var envElement) && envElement.GetProperty("sealed").GetBoolean() == false); + Assert.Equal(0.85, envElement.GetProperty("threshold").GetDouble(), 3); + Assert.True(root.GetProperty("explain").GetBoolean()); + var sboms = root.GetProperty("sbomSet"); + Assert.Equal(2, sboms.GetArrayLength()); + Assert.Equal("sbom:A", sboms[0].GetString()); + } + + Assert.Equal("scheduler.policy-diff-summary@1", result.Diff.SchemaVersion); + Assert.Equal(2, result.Diff.Added); + Assert.Equal(1, result.Diff.Removed); + Assert.Equal(10, result.Diff.Unchanged); + Assert.Equal("blob://policy/P-7/simulation.json", result.ExplainUri); + Assert.True(result.Diff.BySeverity.ContainsKey("critical")); + Assert.Single(result.Diff.RuleHits); + Assert.Equal("rule-block", result.Diff.RuleHits[0].RuleId); + } + + [Fact] + public async Task SimulatePolicyAsync_ThrowsPolicyApiExceptionOnError() + { + var handler = new StubHttpMessageHandler((request, _) => + { + var problem = new ProblemDocument + { + Title = "Bad request", + Detail = "Missing SBOM set", + Status = (int)HttpStatusCode.BadRequest, + Extensions = new Dictionary + { + ["code"] = "ERR_POL_003" + } + }; + + var json = JsonSerializer.Serialize(problem, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + return new HttpResponseMessage(HttpStatusCode.BadRequest) + { + Content = new StringContent(json, Encoding.UTF8, "application/json"), + RequestMessage = request + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://policy.example") + }; + + var options = new StellaOpsCliOptions { BackendUrl = "https://policy.example" }; + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var input = new PolicySimulationInput( + null, + null, + new ReadOnlyCollection(Array.Empty()), + new ReadOnlyDictionary(new Dictionary()), + false); + + var exception = await Assert.ThrowsAsync(() => client.SimulatePolicyAsync("P-7", input, CancellationToken.None)); + Assert.Equal(HttpStatusCode.BadRequest, exception.StatusCode); + Assert.Equal("ERR_POL_003", exception.ErrorCode); + Assert.Contains("Bad request", exception.Message); + } + + [Fact] + public async Task ActivatePolicyRevisionAsync_SendsPayloadAndParsesResponse() + { + string? capturedBody = null; + + var handler = new StubHttpMessageHandler((request, _) => + { + Assert.Equal(HttpMethod.Post, request.Method); + Assert.Equal("https://policy.example/api/policy/policies/P-7/versions/4:activate", request.RequestUri!.ToString()); + capturedBody = request.Content!.ReadAsStringAsync().Result; + + var responseDocument = new PolicyActivationResponseDocument + { + Status = "activated", + Revision = new PolicyActivationRevisionDocument + { + PackId = "P-7", + Version = 4, + Status = "active", + RequiresTwoPersonApproval = true, + CreatedAt = DateTimeOffset.Parse("2025-10-27T00:00:00Z", CultureInfo.InvariantCulture), + ActivatedAt = DateTimeOffset.Parse("2025-10-27T01:15:00Z", CultureInfo.InvariantCulture), + Approvals = new List + { + new() + { + ActorId = "user:alice", + ApprovedAt = DateTimeOffset.Parse("2025-10-27T01:10:00Z", CultureInfo.InvariantCulture), + Comment = "Primary approval" + } + } + } + }; + + var json = JsonSerializer.Serialize(responseDocument, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(json, Encoding.UTF8, "application/json"), + RequestMessage = request + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://policy.example") + }; + + var options = new StellaOpsCliOptions { BackendUrl = "https://policy.example" }; + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var request = new PolicyActivationRequest( + RunNow: true, + ScheduledAt: DateTimeOffset.Parse("2025-10-28T02:00:00Z", CultureInfo.InvariantCulture), + Priority: "high", + Rollback: false, + IncidentId: "INC-204", + Comment: "Production rollout"); + + var result = await client.ActivatePolicyRevisionAsync("P-7", 4, request, CancellationToken.None); + + Assert.NotNull(capturedBody); + using (var document = JsonDocument.Parse(capturedBody!)) + { + var root = document.RootElement; + Assert.Equal("Production rollout", root.GetProperty("comment").GetString()); + Assert.True(root.TryGetProperty("runNow", out var runNowElement) && runNowElement.GetBoolean()); + Assert.Equal("high", root.GetProperty("priority").GetString()); + Assert.Equal("INC-204", root.GetProperty("incidentId").GetString()); + Assert.True(!root.TryGetProperty("rollback", out var rollbackElement) || rollbackElement.ValueKind == JsonValueKind.Null); + } + + Assert.Equal("activated", result.Status); + Assert.Equal("P-7", result.Revision.PolicyId); + Assert.Equal(4, result.Revision.Version); + Assert.True(result.Revision.RequiresTwoPersonApproval); + Assert.Equal("active", result.Revision.Status); + Assert.Single(result.Revision.Approvals); + Assert.Equal("user:alice", result.Revision.Approvals[0].ActorId); + Assert.Equal("Primary approval", result.Revision.Approvals[0].Comment); + } + + [Fact] + public async Task ActivatePolicyRevisionAsync_ThrowsPolicyApiExceptionOnError() + { + var handler = new StubHttpMessageHandler((request, _) => + { + var problem = new ProblemDocument + { + Title = "Not approved", + Detail = "Revision awaiting approval", + Status = (int)HttpStatusCode.BadRequest, + Extensions = new Dictionary + { + ["code"] = "ERR_POL_002" + } + }; + + var json = JsonSerializer.Serialize(problem, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + return new HttpResponseMessage(HttpStatusCode.BadRequest) + { + Content = new StringContent(json, Encoding.UTF8, "application/json"), + RequestMessage = request + }; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://policy.example") + }; + + var options = new StellaOpsCliOptions { BackendUrl = "https://policy.example" }; + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var client = new BackendOperationsClient(httpClient, options, loggerFactory.CreateLogger()); + + var request = new PolicyActivationRequest(false, null, null, false, null, null); + + var exception = await Assert.ThrowsAsync(() => client.ActivatePolicyRevisionAsync("P-7", 4, request, CancellationToken.None)); + Assert.Equal(HttpStatusCode.BadRequest, exception.StatusCode); + Assert.Equal("ERR_POL_002", exception.ErrorCode); + Assert.Contains("Not approved", exception.Message); + } + +} diff --git a/src/Cli/__Tests/StellaOps.Cli.Tests/StellaOps.Cli.Tests.csproj b/src/Cli/__Tests/StellaOps.Cli.Tests/StellaOps.Cli.Tests.csproj new file mode 100644 index 00000000..e4a17dfc --- /dev/null +++ b/src/Cli/__Tests/StellaOps.Cli.Tests/StellaOps.Cli.Tests.csproj @@ -0,0 +1,30 @@ + + + + + net10.0 + enable + enable + false + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/StellaOps.Cli.Tests/Testing/TestHelpers.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/Testing/TestHelpers.cs similarity index 100% rename from src/StellaOps.Cli.Tests/Testing/TestHelpers.cs rename to src/Cli/__Tests/StellaOps.Cli.Tests/Testing/TestHelpers.cs diff --git a/src/StellaOps.Cli.Tests/UnitTest1.cs b/src/Cli/__Tests/StellaOps.Cli.Tests/UnitTest1.cs similarity index 100% rename from src/StellaOps.Cli.Tests/UnitTest1.cs rename to src/Cli/__Tests/StellaOps.Cli.Tests/UnitTest1.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/xunit.runner.json b/src/Cli/__Tests/StellaOps.Cli.Tests/xunit.runner.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/xunit.runner.json rename to src/Cli/__Tests/StellaOps.Cli.Tests/xunit.runner.json diff --git a/src/StellaOps.Concelier.Tests.Shared/AssemblyInfo.cs b/src/Concelier/StellaOps.Concelier.Tests.Shared/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Tests.Shared/AssemblyInfo.cs rename to src/Concelier/StellaOps.Concelier.Tests.Shared/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Tests.Shared/MongoFixtureCollection.cs b/src/Concelier/StellaOps.Concelier.Tests.Shared/MongoFixtureCollection.cs similarity index 100% rename from src/StellaOps.Concelier.Tests.Shared/MongoFixtureCollection.cs rename to src/Concelier/StellaOps.Concelier.Tests.Shared/MongoFixtureCollection.cs diff --git a/src/StellaOps.Concelier.WebService/AGENTS.md b/src/Concelier/StellaOps.Concelier.WebService/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.WebService/AGENTS.md rename to src/Concelier/StellaOps.Concelier.WebService/AGENTS.md diff --git a/src/StellaOps.Concelier.WebService/Contracts/AdvisoryObservationContracts.cs b/src/Concelier/StellaOps.Concelier.WebService/Contracts/AdvisoryObservationContracts.cs similarity index 97% rename from src/StellaOps.Concelier.WebService/Contracts/AdvisoryObservationContracts.cs rename to src/Concelier/StellaOps.Concelier.WebService/Contracts/AdvisoryObservationContracts.cs index 408e1f70..14545ee7 100644 --- a/src/StellaOps.Concelier.WebService/Contracts/AdvisoryObservationContracts.cs +++ b/src/Concelier/StellaOps.Concelier.WebService/Contracts/AdvisoryObservationContracts.cs @@ -1,16 +1,16 @@ -using System.Collections.Immutable; -using StellaOps.Concelier.Models.Observations; - -namespace StellaOps.Concelier.WebService.Contracts; - -public sealed record AdvisoryObservationQueryResponse( - ImmutableArray Observations, - AdvisoryObservationLinksetAggregateResponse Linkset, - string? NextCursor, - bool HasMore); - -public sealed record AdvisoryObservationLinksetAggregateResponse( - ImmutableArray Aliases, - ImmutableArray Purls, - ImmutableArray Cpes, - ImmutableArray References); +using System.Collections.Immutable; +using StellaOps.Concelier.Models.Observations; + +namespace StellaOps.Concelier.WebService.Contracts; + +public sealed record AdvisoryObservationQueryResponse( + ImmutableArray Observations, + AdvisoryObservationLinksetAggregateResponse Linkset, + string? NextCursor, + bool HasMore); + +public sealed record AdvisoryObservationLinksetAggregateResponse( + ImmutableArray Aliases, + ImmutableArray Purls, + ImmutableArray Cpes, + ImmutableArray References); diff --git a/src/StellaOps.Concelier.WebService/Contracts/AdvisoryRawContracts.cs b/src/Concelier/StellaOps.Concelier.WebService/Contracts/AdvisoryRawContracts.cs similarity index 98% rename from src/StellaOps.Concelier.WebService/Contracts/AdvisoryRawContracts.cs rename to src/Concelier/StellaOps.Concelier.WebService/Contracts/AdvisoryRawContracts.cs index af406fb7..4072da96 100644 --- a/src/StellaOps.Concelier.WebService/Contracts/AdvisoryRawContracts.cs +++ b/src/Concelier/StellaOps.Concelier.WebService/Contracts/AdvisoryRawContracts.cs @@ -1,127 +1,127 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Text.Json; -using System.Text.Json.Serialization; -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.WebService.Contracts; - -public sealed record AdvisoryIngestRequest( - AdvisorySourceRequest Source, - AdvisoryUpstreamRequest Upstream, - AdvisoryContentRequest Content, - AdvisoryIdentifiersRequest Identifiers, - AdvisoryLinksetRequest? Linkset); - -public sealed record AdvisorySourceRequest( - [property: JsonPropertyName("vendor")] string Vendor, - [property: JsonPropertyName("connector")] string Connector, - [property: JsonPropertyName("version")] string Version, - [property: JsonPropertyName("stream")] string? Stream); - -public sealed record AdvisoryUpstreamRequest( - [property: JsonPropertyName("upstreamId")] string UpstreamId, - [property: JsonPropertyName("documentVersion")] string? DocumentVersion, - [property: JsonPropertyName("retrievedAt")] DateTimeOffset? RetrievedAt, - [property: JsonPropertyName("contentHash")] string ContentHash, - [property: JsonPropertyName("signature")] AdvisorySignatureRequest Signature, - [property: JsonPropertyName("provenance")] IDictionary? Provenance); - -public sealed record AdvisorySignatureRequest( - [property: JsonPropertyName("present")] bool Present, - [property: JsonPropertyName("format")] string? Format, - [property: JsonPropertyName("keyId")] string? KeyId, - [property: JsonPropertyName("sig")] string? Signature, - [property: JsonPropertyName("certificate")] string? Certificate, - [property: JsonPropertyName("digest")] string? Digest); - -public sealed record AdvisoryContentRequest( - [property: JsonPropertyName("format")] string Format, - [property: JsonPropertyName("specVersion")] string? SpecVersion, - [property: JsonPropertyName("raw")] JsonElement Raw, - [property: JsonPropertyName("encoding")] string? Encoding); - -public sealed record AdvisoryIdentifiersRequest( - [property: JsonPropertyName("primary")] string Primary, - [property: JsonPropertyName("aliases")] IReadOnlyList? Aliases); - -public sealed record AdvisoryLinksetRequest( - [property: JsonPropertyName("aliases")] IReadOnlyList? Aliases, - [property: JsonPropertyName("purls")] IReadOnlyList? PackageUrls, - [property: JsonPropertyName("cpes")] IReadOnlyList? Cpes, - [property: JsonPropertyName("references")] IReadOnlyList? References, - [property: JsonPropertyName("reconciledFrom")] IReadOnlyList? ReconciledFrom, - [property: JsonPropertyName("notes")] IDictionary? Notes); - -public sealed record AdvisoryLinksetReferenceRequest( - [property: JsonPropertyName("type")] string Type, - [property: JsonPropertyName("url")] string Url, - [property: JsonPropertyName("source")] string? Source); - -public sealed record AdvisoryIngestResponse( - [property: JsonPropertyName("id")] string Id, - [property: JsonPropertyName("inserted")] bool Inserted, - [property: JsonPropertyName("tenant")] string Tenant, - [property: JsonPropertyName("contentHash")] string ContentHash, - [property: JsonPropertyName("supersedes")] string? Supersedes, - [property: JsonPropertyName("ingestedAt")] DateTimeOffset IngestedAt, - [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); - -public sealed record AdvisoryRawRecordResponse( - [property: JsonPropertyName("id")] string Id, - [property: JsonPropertyName("tenant")] string Tenant, - [property: JsonPropertyName("ingestedAt")] DateTimeOffset IngestedAt, - [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt, - [property: JsonPropertyName("document")] AdvisoryRawDocument Document); - -public sealed record AdvisoryRawListResponse( - [property: JsonPropertyName("records")] IReadOnlyList Records, - [property: JsonPropertyName("nextCursor")] string? NextCursor, - [property: JsonPropertyName("hasMore")] bool HasMore); - -public sealed record AdvisoryRawProvenanceResponse( - [property: JsonPropertyName("id")] string Id, - [property: JsonPropertyName("tenant")] string Tenant, - [property: JsonPropertyName("source")] RawSourceMetadata Source, - [property: JsonPropertyName("upstream")] RawUpstreamMetadata Upstream, - [property: JsonPropertyName("supersedes")] string? Supersedes, - [property: JsonPropertyName("ingestedAt")] DateTimeOffset IngestedAt, - [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); - -public sealed record AocVerifyRequest( - [property: JsonPropertyName("since")] DateTimeOffset? Since, - [property: JsonPropertyName("until")] DateTimeOffset? Until, - [property: JsonPropertyName("limit")] int? Limit, - [property: JsonPropertyName("sources")] IReadOnlyList? Sources, - [property: JsonPropertyName("codes")] IReadOnlyList? Codes); - -public sealed record AocVerifyResponse( - [property: JsonPropertyName("tenant")] string Tenant, - [property: JsonPropertyName("window")] AocVerifyWindow Window, - [property: JsonPropertyName("checked")] AocVerifyChecked Checked, - [property: JsonPropertyName("violations")] IReadOnlyList Violations, - [property: JsonPropertyName("metrics")] AocVerifyMetrics Metrics, - [property: JsonPropertyName("truncated")] bool Truncated); - -public sealed record AocVerifyWindow( - [property: JsonPropertyName("from")] DateTimeOffset From, - [property: JsonPropertyName("to")] DateTimeOffset To); - -public sealed record AocVerifyChecked( - [property: JsonPropertyName("advisories")] int Advisories, - [property: JsonPropertyName("vex")] int Vex); - -public sealed record AocVerifyMetrics( - [property: JsonPropertyName("ingestion_write_total")] int IngestionWriteTotal, - [property: JsonPropertyName("aoc_violation_total")] int AocViolationTotal); - -public sealed record AocVerifyViolation( - [property: JsonPropertyName("code")] string Code, - [property: JsonPropertyName("count")] int Count, - [property: JsonPropertyName("examples")] IReadOnlyList Examples); - -public sealed record AocVerifyViolationExample( - [property: JsonPropertyName("source")] string Source, - [property: JsonPropertyName("documentId")] string DocumentId, - [property: JsonPropertyName("contentHash")] string ContentHash, - [property: JsonPropertyName("path")] string Path); +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Text.Json; +using System.Text.Json.Serialization; +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.WebService.Contracts; + +public sealed record AdvisoryIngestRequest( + AdvisorySourceRequest Source, + AdvisoryUpstreamRequest Upstream, + AdvisoryContentRequest Content, + AdvisoryIdentifiersRequest Identifiers, + AdvisoryLinksetRequest? Linkset); + +public sealed record AdvisorySourceRequest( + [property: JsonPropertyName("vendor")] string Vendor, + [property: JsonPropertyName("connector")] string Connector, + [property: JsonPropertyName("version")] string Version, + [property: JsonPropertyName("stream")] string? Stream); + +public sealed record AdvisoryUpstreamRequest( + [property: JsonPropertyName("upstreamId")] string UpstreamId, + [property: JsonPropertyName("documentVersion")] string? DocumentVersion, + [property: JsonPropertyName("retrievedAt")] DateTimeOffset? RetrievedAt, + [property: JsonPropertyName("contentHash")] string ContentHash, + [property: JsonPropertyName("signature")] AdvisorySignatureRequest Signature, + [property: JsonPropertyName("provenance")] IDictionary? Provenance); + +public sealed record AdvisorySignatureRequest( + [property: JsonPropertyName("present")] bool Present, + [property: JsonPropertyName("format")] string? Format, + [property: JsonPropertyName("keyId")] string? KeyId, + [property: JsonPropertyName("sig")] string? Signature, + [property: JsonPropertyName("certificate")] string? Certificate, + [property: JsonPropertyName("digest")] string? Digest); + +public sealed record AdvisoryContentRequest( + [property: JsonPropertyName("format")] string Format, + [property: JsonPropertyName("specVersion")] string? SpecVersion, + [property: JsonPropertyName("raw")] JsonElement Raw, + [property: JsonPropertyName("encoding")] string? Encoding); + +public sealed record AdvisoryIdentifiersRequest( + [property: JsonPropertyName("primary")] string Primary, + [property: JsonPropertyName("aliases")] IReadOnlyList? Aliases); + +public sealed record AdvisoryLinksetRequest( + [property: JsonPropertyName("aliases")] IReadOnlyList? Aliases, + [property: JsonPropertyName("purls")] IReadOnlyList? PackageUrls, + [property: JsonPropertyName("cpes")] IReadOnlyList? Cpes, + [property: JsonPropertyName("references")] IReadOnlyList? References, + [property: JsonPropertyName("reconciledFrom")] IReadOnlyList? ReconciledFrom, + [property: JsonPropertyName("notes")] IDictionary? Notes); + +public sealed record AdvisoryLinksetReferenceRequest( + [property: JsonPropertyName("type")] string Type, + [property: JsonPropertyName("url")] string Url, + [property: JsonPropertyName("source")] string? Source); + +public sealed record AdvisoryIngestResponse( + [property: JsonPropertyName("id")] string Id, + [property: JsonPropertyName("inserted")] bool Inserted, + [property: JsonPropertyName("tenant")] string Tenant, + [property: JsonPropertyName("contentHash")] string ContentHash, + [property: JsonPropertyName("supersedes")] string? Supersedes, + [property: JsonPropertyName("ingestedAt")] DateTimeOffset IngestedAt, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); + +public sealed record AdvisoryRawRecordResponse( + [property: JsonPropertyName("id")] string Id, + [property: JsonPropertyName("tenant")] string Tenant, + [property: JsonPropertyName("ingestedAt")] DateTimeOffset IngestedAt, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt, + [property: JsonPropertyName("document")] AdvisoryRawDocument Document); + +public sealed record AdvisoryRawListResponse( + [property: JsonPropertyName("records")] IReadOnlyList Records, + [property: JsonPropertyName("nextCursor")] string? NextCursor, + [property: JsonPropertyName("hasMore")] bool HasMore); + +public sealed record AdvisoryRawProvenanceResponse( + [property: JsonPropertyName("id")] string Id, + [property: JsonPropertyName("tenant")] string Tenant, + [property: JsonPropertyName("source")] RawSourceMetadata Source, + [property: JsonPropertyName("upstream")] RawUpstreamMetadata Upstream, + [property: JsonPropertyName("supersedes")] string? Supersedes, + [property: JsonPropertyName("ingestedAt")] DateTimeOffset IngestedAt, + [property: JsonPropertyName("createdAt")] DateTimeOffset CreatedAt); + +public sealed record AocVerifyRequest( + [property: JsonPropertyName("since")] DateTimeOffset? Since, + [property: JsonPropertyName("until")] DateTimeOffset? Until, + [property: JsonPropertyName("limit")] int? Limit, + [property: JsonPropertyName("sources")] IReadOnlyList? Sources, + [property: JsonPropertyName("codes")] IReadOnlyList? Codes); + +public sealed record AocVerifyResponse( + [property: JsonPropertyName("tenant")] string Tenant, + [property: JsonPropertyName("window")] AocVerifyWindow Window, + [property: JsonPropertyName("checked")] AocVerifyChecked Checked, + [property: JsonPropertyName("violations")] IReadOnlyList Violations, + [property: JsonPropertyName("metrics")] AocVerifyMetrics Metrics, + [property: JsonPropertyName("truncated")] bool Truncated); + +public sealed record AocVerifyWindow( + [property: JsonPropertyName("from")] DateTimeOffset From, + [property: JsonPropertyName("to")] DateTimeOffset To); + +public sealed record AocVerifyChecked( + [property: JsonPropertyName("advisories")] int Advisories, + [property: JsonPropertyName("vex")] int Vex); + +public sealed record AocVerifyMetrics( + [property: JsonPropertyName("ingestion_write_total")] int IngestionWriteTotal, + [property: JsonPropertyName("aoc_violation_total")] int AocViolationTotal); + +public sealed record AocVerifyViolation( + [property: JsonPropertyName("code")] string Code, + [property: JsonPropertyName("count")] int Count, + [property: JsonPropertyName("examples")] IReadOnlyList Examples); + +public sealed record AocVerifyViolationExample( + [property: JsonPropertyName("source")] string Source, + [property: JsonPropertyName("documentId")] string DocumentId, + [property: JsonPropertyName("contentHash")] string ContentHash, + [property: JsonPropertyName("path")] string Path); diff --git a/src/StellaOps.Concelier.WebService/Diagnostics/HealthContracts.cs b/src/Concelier/StellaOps.Concelier.WebService/Diagnostics/HealthContracts.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Diagnostics/HealthContracts.cs rename to src/Concelier/StellaOps.Concelier.WebService/Diagnostics/HealthContracts.cs diff --git a/src/StellaOps.Concelier.WebService/Diagnostics/IngestionMetrics.cs b/src/Concelier/StellaOps.Concelier.WebService/Diagnostics/IngestionMetrics.cs similarity index 97% rename from src/StellaOps.Concelier.WebService/Diagnostics/IngestionMetrics.cs rename to src/Concelier/StellaOps.Concelier.WebService/Diagnostics/IngestionMetrics.cs index 948dced3..7bc219ba 100644 --- a/src/StellaOps.Concelier.WebService/Diagnostics/IngestionMetrics.cs +++ b/src/Concelier/StellaOps.Concelier.WebService/Diagnostics/IngestionMetrics.cs @@ -1,22 +1,22 @@ -using System.Diagnostics.Metrics; - -namespace StellaOps.Concelier.WebService.Diagnostics; - -internal static class IngestionMetrics -{ - internal const string MeterName = "StellaOps.Concelier.WebService.Ingestion"; - - private static readonly Meter Meter = new(MeterName); - - internal static readonly Counter WriteCounter = Meter.CreateCounter( - "ingestion_write_total", - description: "Counts raw advisory ingestion attempts, segmented by tenant, source, and result."); - - internal static readonly Counter ViolationCounter = Meter.CreateCounter( - "aoc_violation_total", - description: "Counts Aggregation-Only Contract violations detected during ingestion."); - - internal static readonly Counter VerificationCounter = Meter.CreateCounter( - "verify_runs_total", - description: "Counts AOC verification runs initiated via the API."); -} +using System.Diagnostics.Metrics; + +namespace StellaOps.Concelier.WebService.Diagnostics; + +internal static class IngestionMetrics +{ + internal const string MeterName = "StellaOps.Concelier.WebService.Ingestion"; + + private static readonly Meter Meter = new(MeterName); + + internal static readonly Counter WriteCounter = Meter.CreateCounter( + "ingestion_write_total", + description: "Counts raw advisory ingestion attempts, segmented by tenant, source, and result."); + + internal static readonly Counter ViolationCounter = Meter.CreateCounter( + "aoc_violation_total", + description: "Counts Aggregation-Only Contract violations detected during ingestion."); + + internal static readonly Counter VerificationCounter = Meter.CreateCounter( + "verify_runs_total", + description: "Counts AOC verification runs initiated via the API."); +} diff --git a/src/StellaOps.Concelier.WebService/Diagnostics/JobMetrics.cs b/src/Concelier/StellaOps.Concelier.WebService/Diagnostics/JobMetrics.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Diagnostics/JobMetrics.cs rename to src/Concelier/StellaOps.Concelier.WebService/Diagnostics/JobMetrics.cs diff --git a/src/StellaOps.Concelier.WebService/Diagnostics/ProblemTypes.cs b/src/Concelier/StellaOps.Concelier.WebService/Diagnostics/ProblemTypes.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Diagnostics/ProblemTypes.cs rename to src/Concelier/StellaOps.Concelier.WebService/Diagnostics/ProblemTypes.cs diff --git a/src/StellaOps.Concelier.WebService/Diagnostics/ServiceStatus.cs b/src/Concelier/StellaOps.Concelier.WebService/Diagnostics/ServiceStatus.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Diagnostics/ServiceStatus.cs rename to src/Concelier/StellaOps.Concelier.WebService/Diagnostics/ServiceStatus.cs diff --git a/src/StellaOps.Concelier.WebService/Extensions/AdvisoryRawRequestMapper.cs b/src/Concelier/StellaOps.Concelier.WebService/Extensions/AdvisoryRawRequestMapper.cs similarity index 97% rename from src/StellaOps.Concelier.WebService/Extensions/AdvisoryRawRequestMapper.cs rename to src/Concelier/StellaOps.Concelier.WebService/Extensions/AdvisoryRawRequestMapper.cs index 30222e4f..00b6560b 100644 --- a/src/StellaOps.Concelier.WebService/Extensions/AdvisoryRawRequestMapper.cs +++ b/src/Concelier/StellaOps.Concelier.WebService/Extensions/AdvisoryRawRequestMapper.cs @@ -1,157 +1,157 @@ -using System.Collections.Immutable; -using System.Text.Json; -using StellaOps.Concelier.RawModels; -using StellaOps.Concelier.WebService.Contracts; - -namespace StellaOps.Concelier.WebService.Extensions; - -internal static class AdvisoryRawRequestMapper -{ - internal static AdvisoryRawDocument Map(AdvisoryIngestRequest request, string tenant, TimeProvider timeProvider) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - ArgumentNullException.ThrowIfNull(timeProvider); - - var sourceRequest = request.Source ?? throw new ArgumentException("source section is required.", nameof(request)); - var upstreamRequest = request.Upstream ?? throw new ArgumentException("upstream section is required.", nameof(request)); - var contentRequest = request.Content ?? throw new ArgumentException("content section is required.", nameof(request)); - var identifiersRequest = request.Identifiers ?? throw new ArgumentException("identifiers section is required.", nameof(request)); - - var source = new RawSourceMetadata( - sourceRequest.Vendor, - sourceRequest.Connector, - sourceRequest.Version, - string.IsNullOrWhiteSpace(sourceRequest.Stream) ? null : sourceRequest.Stream); - - var signatureRequest = upstreamRequest.Signature ?? new AdvisorySignatureRequest(false, null, null, null, null, null); - var signature = new RawSignatureMetadata( - signatureRequest.Present, - string.IsNullOrWhiteSpace(signatureRequest.Format) ? null : signatureRequest.Format, - string.IsNullOrWhiteSpace(signatureRequest.KeyId) ? null : signatureRequest.KeyId, - string.IsNullOrWhiteSpace(signatureRequest.Signature) ? null : signatureRequest.Signature, - string.IsNullOrWhiteSpace(signatureRequest.Certificate) ? null : signatureRequest.Certificate, - string.IsNullOrWhiteSpace(signatureRequest.Digest) ? null : signatureRequest.Digest); - - var retrievedAt = upstreamRequest.RetrievedAt ?? timeProvider.GetUtcNow(); - - var upstream = new RawUpstreamMetadata( - upstreamRequest.UpstreamId, - string.IsNullOrWhiteSpace(upstreamRequest.DocumentVersion) ? null : upstreamRequest.DocumentVersion, - retrievedAt, - upstreamRequest.ContentHash, - signature, - NormalizeDictionary(upstreamRequest.Provenance)); - - var rawContent = NormalizeRawContent(contentRequest.Raw); - var content = new RawContent( - contentRequest.Format, - string.IsNullOrWhiteSpace(contentRequest.SpecVersion) ? null : contentRequest.SpecVersion, - rawContent, - string.IsNullOrWhiteSpace(contentRequest.Encoding) ? null : contentRequest.Encoding); - - var aliases = NormalizeStrings(identifiersRequest.Aliases); - if (aliases.IsDefault) - { - aliases = ImmutableArray.Empty; - } - - var identifiers = new RawIdentifiers( - aliases, - identifiersRequest.Primary); - - var linksetRequest = request.Linkset; - var linkset = new RawLinkset - { - Aliases = NormalizeStrings(linksetRequest?.Aliases), - PackageUrls = NormalizeStrings(linksetRequest?.PackageUrls), - Cpes = NormalizeStrings(linksetRequest?.Cpes), - References = NormalizeReferences(linksetRequest?.References), - ReconciledFrom = NormalizeStrings(linksetRequest?.ReconciledFrom), - Notes = NormalizeDictionary(linksetRequest?.Notes) - }; - - return new AdvisoryRawDocument( - tenant.Trim().ToLowerInvariant(), - source, - upstream, - content, - identifiers, - linkset); - } - - internal static ImmutableArray NormalizeStrings(IEnumerable? values) - { - if (values is null) - { - return ImmutableArray.Empty; - } - - var builder = ImmutableArray.CreateBuilder(); - foreach (var value in values) - { - if (string.IsNullOrWhiteSpace(value)) - { - continue; - } - - builder.Add(value.Trim()); - } - - return builder.Count == 0 ? ImmutableArray.Empty : builder.ToImmutable(); - } - - internal static ImmutableDictionary NormalizeDictionary(IDictionary? values) - { - if (values is null || values.Count == 0) - { - return ImmutableDictionary.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); - foreach (var kv in values) - { - if (string.IsNullOrWhiteSpace(kv.Key)) - { - continue; - } - - builder[kv.Key.Trim()] = kv.Value?.Trim() ?? string.Empty; - } - - return builder.ToImmutable(); - } - - private static ImmutableArray NormalizeReferences(IEnumerable? references) - { - if (references is null) - { - return ImmutableArray.Empty; - } - - var builder = ImmutableArray.CreateBuilder(); - foreach (var reference in references) - { - if (reference is null) - { - continue; - } - - if (string.IsNullOrWhiteSpace(reference.Type) || string.IsNullOrWhiteSpace(reference.Url)) - { - continue; - } - - builder.Add(new RawReference(reference.Type.Trim(), reference.Url.Trim(), string.IsNullOrWhiteSpace(reference.Source) ? null : reference.Source.Trim())); - } - - return builder.Count == 0 ? ImmutableArray.Empty : builder.ToImmutable(); - } - - private static JsonElement NormalizeRawContent(JsonElement element) - { - var json = element.ValueKind == JsonValueKind.Undefined ? "{}" : element.GetRawText(); - using var document = JsonDocument.Parse(string.IsNullOrWhiteSpace(json) ? "{}" : json); - return document.RootElement.Clone(); - } -} +using System.Collections.Immutable; +using System.Text.Json; +using StellaOps.Concelier.RawModels; +using StellaOps.Concelier.WebService.Contracts; + +namespace StellaOps.Concelier.WebService.Extensions; + +internal static class AdvisoryRawRequestMapper +{ + internal static AdvisoryRawDocument Map(AdvisoryIngestRequest request, string tenant, TimeProvider timeProvider) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + ArgumentNullException.ThrowIfNull(timeProvider); + + var sourceRequest = request.Source ?? throw new ArgumentException("source section is required.", nameof(request)); + var upstreamRequest = request.Upstream ?? throw new ArgumentException("upstream section is required.", nameof(request)); + var contentRequest = request.Content ?? throw new ArgumentException("content section is required.", nameof(request)); + var identifiersRequest = request.Identifiers ?? throw new ArgumentException("identifiers section is required.", nameof(request)); + + var source = new RawSourceMetadata( + sourceRequest.Vendor, + sourceRequest.Connector, + sourceRequest.Version, + string.IsNullOrWhiteSpace(sourceRequest.Stream) ? null : sourceRequest.Stream); + + var signatureRequest = upstreamRequest.Signature ?? new AdvisorySignatureRequest(false, null, null, null, null, null); + var signature = new RawSignatureMetadata( + signatureRequest.Present, + string.IsNullOrWhiteSpace(signatureRequest.Format) ? null : signatureRequest.Format, + string.IsNullOrWhiteSpace(signatureRequest.KeyId) ? null : signatureRequest.KeyId, + string.IsNullOrWhiteSpace(signatureRequest.Signature) ? null : signatureRequest.Signature, + string.IsNullOrWhiteSpace(signatureRequest.Certificate) ? null : signatureRequest.Certificate, + string.IsNullOrWhiteSpace(signatureRequest.Digest) ? null : signatureRequest.Digest); + + var retrievedAt = upstreamRequest.RetrievedAt ?? timeProvider.GetUtcNow(); + + var upstream = new RawUpstreamMetadata( + upstreamRequest.UpstreamId, + string.IsNullOrWhiteSpace(upstreamRequest.DocumentVersion) ? null : upstreamRequest.DocumentVersion, + retrievedAt, + upstreamRequest.ContentHash, + signature, + NormalizeDictionary(upstreamRequest.Provenance)); + + var rawContent = NormalizeRawContent(contentRequest.Raw); + var content = new RawContent( + contentRequest.Format, + string.IsNullOrWhiteSpace(contentRequest.SpecVersion) ? null : contentRequest.SpecVersion, + rawContent, + string.IsNullOrWhiteSpace(contentRequest.Encoding) ? null : contentRequest.Encoding); + + var aliases = NormalizeStrings(identifiersRequest.Aliases); + if (aliases.IsDefault) + { + aliases = ImmutableArray.Empty; + } + + var identifiers = new RawIdentifiers( + aliases, + identifiersRequest.Primary); + + var linksetRequest = request.Linkset; + var linkset = new RawLinkset + { + Aliases = NormalizeStrings(linksetRequest?.Aliases), + PackageUrls = NormalizeStrings(linksetRequest?.PackageUrls), + Cpes = NormalizeStrings(linksetRequest?.Cpes), + References = NormalizeReferences(linksetRequest?.References), + ReconciledFrom = NormalizeStrings(linksetRequest?.ReconciledFrom), + Notes = NormalizeDictionary(linksetRequest?.Notes) + }; + + return new AdvisoryRawDocument( + tenant.Trim().ToLowerInvariant(), + source, + upstream, + content, + identifiers, + linkset); + } + + internal static ImmutableArray NormalizeStrings(IEnumerable? values) + { + if (values is null) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(); + foreach (var value in values) + { + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + builder.Add(value.Trim()); + } + + return builder.Count == 0 ? ImmutableArray.Empty : builder.ToImmutable(); + } + + internal static ImmutableDictionary NormalizeDictionary(IDictionary? values) + { + if (values is null || values.Count == 0) + { + return ImmutableDictionary.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder(StringComparer.Ordinal); + foreach (var kv in values) + { + if (string.IsNullOrWhiteSpace(kv.Key)) + { + continue; + } + + builder[kv.Key.Trim()] = kv.Value?.Trim() ?? string.Empty; + } + + return builder.ToImmutable(); + } + + private static ImmutableArray NormalizeReferences(IEnumerable? references) + { + if (references is null) + { + return ImmutableArray.Empty; + } + + var builder = ImmutableArray.CreateBuilder(); + foreach (var reference in references) + { + if (reference is null) + { + continue; + } + + if (string.IsNullOrWhiteSpace(reference.Type) || string.IsNullOrWhiteSpace(reference.Url)) + { + continue; + } + + builder.Add(new RawReference(reference.Type.Trim(), reference.Url.Trim(), string.IsNullOrWhiteSpace(reference.Source) ? null : reference.Source.Trim())); + } + + return builder.Count == 0 ? ImmutableArray.Empty : builder.ToImmutable(); + } + + private static JsonElement NormalizeRawContent(JsonElement element) + { + var json = element.ValueKind == JsonValueKind.Undefined ? "{}" : element.GetRawText(); + using var document = JsonDocument.Parse(string.IsNullOrWhiteSpace(json) ? "{}" : json); + return document.RootElement.Clone(); + } +} diff --git a/src/StellaOps.Concelier.WebService/Extensions/ConfigurationExtensions.cs b/src/Concelier/StellaOps.Concelier.WebService/Extensions/ConfigurationExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Extensions/ConfigurationExtensions.cs rename to src/Concelier/StellaOps.Concelier.WebService/Extensions/ConfigurationExtensions.cs diff --git a/src/StellaOps.Concelier.WebService/Extensions/JobRegistrationExtensions.cs b/src/Concelier/StellaOps.Concelier.WebService/Extensions/JobRegistrationExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Extensions/JobRegistrationExtensions.cs rename to src/Concelier/StellaOps.Concelier.WebService/Extensions/JobRegistrationExtensions.cs diff --git a/src/StellaOps.Concelier.WebService/Extensions/MirrorEndpointExtensions.cs b/src/Concelier/StellaOps.Concelier.WebService/Extensions/MirrorEndpointExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Extensions/MirrorEndpointExtensions.cs rename to src/Concelier/StellaOps.Concelier.WebService/Extensions/MirrorEndpointExtensions.cs diff --git a/src/StellaOps.Concelier.WebService/Extensions/TelemetryExtensions.cs b/src/Concelier/StellaOps.Concelier.WebService/Extensions/TelemetryExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Extensions/TelemetryExtensions.cs rename to src/Concelier/StellaOps.Concelier.WebService/Extensions/TelemetryExtensions.cs diff --git a/src/StellaOps.Concelier.WebService/Filters/JobAuthorizationAuditFilter.cs b/src/Concelier/StellaOps.Concelier.WebService/Filters/JobAuthorizationAuditFilter.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Filters/JobAuthorizationAuditFilter.cs rename to src/Concelier/StellaOps.Concelier.WebService/Filters/JobAuthorizationAuditFilter.cs diff --git a/src/StellaOps.Concelier.WebService/Jobs/JobDefinitionResponse.cs b/src/Concelier/StellaOps.Concelier.WebService/Jobs/JobDefinitionResponse.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Jobs/JobDefinitionResponse.cs rename to src/Concelier/StellaOps.Concelier.WebService/Jobs/JobDefinitionResponse.cs diff --git a/src/StellaOps.Concelier.WebService/Jobs/JobRunResponse.cs b/src/Concelier/StellaOps.Concelier.WebService/Jobs/JobRunResponse.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Jobs/JobRunResponse.cs rename to src/Concelier/StellaOps.Concelier.WebService/Jobs/JobRunResponse.cs diff --git a/src/StellaOps.Concelier.WebService/Jobs/JobTriggerRequest.cs b/src/Concelier/StellaOps.Concelier.WebService/Jobs/JobTriggerRequest.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Jobs/JobTriggerRequest.cs rename to src/Concelier/StellaOps.Concelier.WebService/Jobs/JobTriggerRequest.cs diff --git a/src/StellaOps.Concelier.WebService/Options/ConcelierOptions.cs b/src/Concelier/StellaOps.Concelier.WebService/Options/ConcelierOptions.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Options/ConcelierOptions.cs rename to src/Concelier/StellaOps.Concelier.WebService/Options/ConcelierOptions.cs diff --git a/src/StellaOps.Concelier.WebService/Options/ConcelierOptionsPostConfigure.cs b/src/Concelier/StellaOps.Concelier.WebService/Options/ConcelierOptionsPostConfigure.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Options/ConcelierOptionsPostConfigure.cs rename to src/Concelier/StellaOps.Concelier.WebService/Options/ConcelierOptionsPostConfigure.cs diff --git a/src/StellaOps.Concelier.WebService/Options/ConcelierOptionsValidator.cs b/src/Concelier/StellaOps.Concelier.WebService/Options/ConcelierOptionsValidator.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Options/ConcelierOptionsValidator.cs rename to src/Concelier/StellaOps.Concelier.WebService/Options/ConcelierOptionsValidator.cs diff --git a/src/StellaOps.Concelier.WebService/Program.cs b/src/Concelier/StellaOps.Concelier.WebService/Program.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Program.cs rename to src/Concelier/StellaOps.Concelier.WebService/Program.cs diff --git a/src/StellaOps.Concelier.WebService/Properties/launchSettings.json b/src/Concelier/StellaOps.Concelier.WebService/Properties/launchSettings.json similarity index 100% rename from src/StellaOps.Concelier.WebService/Properties/launchSettings.json rename to src/Concelier/StellaOps.Concelier.WebService/Properties/launchSettings.json diff --git a/src/StellaOps.Concelier.WebService/Services/MirrorFileLocator.cs b/src/Concelier/StellaOps.Concelier.WebService/Services/MirrorFileLocator.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Services/MirrorFileLocator.cs rename to src/Concelier/StellaOps.Concelier.WebService/Services/MirrorFileLocator.cs diff --git a/src/StellaOps.Concelier.WebService/Services/MirrorRateLimiter.cs b/src/Concelier/StellaOps.Concelier.WebService/Services/MirrorRateLimiter.cs similarity index 100% rename from src/StellaOps.Concelier.WebService/Services/MirrorRateLimiter.cs rename to src/Concelier/StellaOps.Concelier.WebService/Services/MirrorRateLimiter.cs diff --git a/src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj b/src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj new file mode 100644 index 00000000..6e538da5 --- /dev/null +++ b/src/Concelier/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj @@ -0,0 +1,38 @@ + + + + net10.0 + preview + enable + enable + true + StellaOps.Concelier.WebService + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/StellaOps.Concelier.WebService/TASKS.md b/src/Concelier/StellaOps.Concelier.WebService/TASKS.md similarity index 99% rename from src/StellaOps.Concelier.WebService/TASKS.md rename to src/Concelier/StellaOps.Concelier.WebService/TASKS.md index c2754027..1f7d07ad 100644 --- a/src/StellaOps.Concelier.WebService/TASKS.md +++ b/src/Concelier/StellaOps.Concelier.WebService/TASKS.md @@ -1,95 +1,95 @@ -# TASKS — Epic 1: Aggregation-Only Contract -> **AOC Reminder:** service links and exposes raw data only—no precedence, severity, or hint computation inside Concelier APIs. -| ID | Status | Owner(s) | Depends on | Notes | -|---|---|---|---|---| -| CONCELIER-WEB-AOC-19-001 `Raw ingestion endpoints` | DONE (2025-10-28) | Concelier WebService Guild | CONCELIER-CORE-AOC-19-001, CONCELIER-STORE-AOC-19-001 | Implement `POST /ingest/advisory`, `GET /advisories/raw*`, and `POST /aoc/verify` minimal API endpoints. Enforce new Authority scopes, inject tenant claims, and surface `AOCWriteGuard` to repository calls. | -> Docs alignment (2025-10-26): Endpoint expectations + scope requirements detailed in `docs/ingestion/aggregation-only-contract.md` and `docs/security/authority-scopes.md`. -> 2025-10-28: Added coverage for pagination, tenancy enforcement, and ingestion/verification metrics; verified guard handling paths end-to-end. -| CONCELIER-WEB-AOC-19-002 `AOC observability` | TODO | Concelier WebService Guild, Observability Guild | CONCELIER-WEB-AOC-19-001 | Emit `ingestion_write_total`, `aoc_violation_total`, latency histograms, and tracing spans (`ingest.fetch/transform/write`, `aoc.guard`). Wire structured logging to include tenant, source vendor, upstream id, and content hash. | -> Docs alignment (2025-10-26): Metrics/traces/log schema in `docs/observability/observability.md`. -| CONCELIER-WEB-AOC-19-003 `Schema/guard unit tests` | TODO | QA Guild | CONCELIER-WEB-AOC-19-001 | Add unit tests covering schema validation failures, forbidden field rejections (`ERR_AOC_001/002/006/007`), idempotent upserts, and supersedes chains using deterministic fixtures. | -> Docs alignment (2025-10-26): Guard rules + error codes documented in AOC reference §5 and CLI guide. -| CONCELIER-WEB-AOC-19-004 `End-to-end ingest verification` | TODO | Concelier WebService Guild, QA Guild | CONCELIER-WEB-AOC-19-003, CONCELIER-CORE-AOC-19-002 | Create integration tests ingesting large advisory batches (cold/warm) validating linkset enrichment, metrics emission, and reproducible outputs. Capture load-test scripts + doc notes for Offline Kit dry runs. | -> Docs alignment (2025-10-26): Offline verification workflow referenced in `docs/deploy/containers.md` §5. - -## Policy Engine v2 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-POLICY-20-001 `Policy selection endpoints` | TODO | Concelier WebService Guild | WEB-POLICY-20-001, CONCELIER-CORE-AOC-19-004 | Add batch advisory lookup APIs (`/policy/select/advisories`, `/policy/select/vex`) optimized for PURL/ID lists with pagination, tenant scoping, and explain metadata. | - -## StellaOps Console (Sprint 23) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-CONSOLE-23-001 `Advisory aggregation views` | TODO | Concelier WebService Guild, BE-Base Platform Guild | CONCELIER-LNM-21-201, CONCELIER-LNM-21-202 | Expose `/console/advisories` endpoints returning aggregation groups (per linkset) with source chips, severity summaries, and provenance metadata for Console list + dashboard cards. Support filters by source, ecosystem, published/modified window, tenant enforcement. | -| CONCELIER-CONSOLE-23-002 `Dashboard deltas API` | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-001, CONCELIER-LNM-21-203 | Provide aggregated advisory delta counts (new, modified, conflicting) for Console dashboard + live status ticker; emit structured events for queue lag metrics. Ensure deterministic counts across repeated queries. | -| CONCELIER-CONSOLE-23-003 `Search fan-out helpers` | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-001 | Deliver fast lookup endpoints for CVE/GHSA/purl search (linksets, observations) returning evidence fragments for Console global search; implement caching + scope guards. | - -## Graph Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| - -## Link-Not-Merge v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-LNM-21-201 `Observation APIs` | TODO | Concelier WebService Guild, BE-Base Platform Guild | CONCELIER-LNM-21-001 | Add REST endpoints for advisory observations (`GET /advisories/observations`) with filters (alias, purl, source), pagination, and tenancy enforcement. | -| CONCELIER-LNM-21-202 `Linkset APIs` | TODO | Concelier WebService Guild | CONCELIER-LNM-21-002, CONCELIER-LNM-21-003 | Implement linkset read/export endpoints (`/advisories/linksets/{id}`, `/advisories/by-purl/{purl}`, `/advisories/linksets/{id}/export`, `/evidence`) with correlation/conflict payloads and `ERR_AGG_*` mapping. | -| CONCELIER-LNM-21-203 `Ingest events` | TODO | Concelier WebService Guild, Platform Events Guild | CONCELIER-LNM-21-005 | Publish NATS/Redis events for new observations/linksets and ensure idempotent consumer contracts; document event schemas. | - -## Graph & Vuln Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-GRAPH-24-101 `Advisory summary API` | TODO | Concelier WebService Guild | CONCELIER-GRAPH-24-001 | Expose `/advisories/summary` returning raw linkset/observation metadata for overlay services; no derived severity or fix hints. | -| CONCELIER-GRAPH-28-102 `Evidence batch API` | TODO | Concelier WebService Guild | CONCELIER-LNM-21-201 | Add batch fetch for advisory observations/linksets keyed by component sets to feed Graph overlay tooltips efficiently. | - -## VEX Lens (Sprint 30) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-VEXLENS-30-001 `Advisory rationale bridges` | TODO | Concelier WebService Guild, VEX Lens Guild | CONCELIER-VULN-29-001, VEXLENS-30-005 | Guarantee advisory key consistency and cross-links for consensus rationale; Label: VEX-Lens. | - -## Vulnerability Explorer (Sprint 29) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-VULN-29-001 `Advisory key canonicalization` | TODO | Concelier WebService Guild, Data Integrity Guild | CONCELIER-LNM-21-001 | Canonicalize (lossless) advisory identifiers (CVE/GHSA/vendor) into `advisory_key`, persist `links[]`, expose raw payload snapshots for Explorer evidence tabs; AOC-compliant: no merge, no derived fields, no suppression. Include migration/backfill scripts. | -| CONCELIER-VULN-29-002 `Evidence retrieval API` | TODO | Concelier WebService Guild | CONCELIER-VULN-29-001, VULN-API-29-003 | Provide `/vuln/evidence/advisories/{advisory_key}` returning raw advisory docs with provenance, filtering by tenant and source. | -| CONCELIER-VULN-29-004 `Observability enhancements` | TODO | Concelier WebService Guild, Observability Guild | CONCELIER-VULN-29-001 | Instrument metrics/logs for advisory normalization (key collisions, withdrawn flags), emit events consumed by Vuln Explorer resolver. | - -## Advisory AI (Sprint 31) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-AIAI-31-001 `Paragraph anchors` | TODO | Concelier WebService Guild | CONCELIER-VULN-29-001 | Expose advisory chunk API returning paragraph anchors, section metadata, and token-safe text for Advisory AI retrieval. | -| CONCELIER-AIAI-31-002 `Structured fields` | TODO | Concelier WebService Guild | CONCELIER-AIAI-31-001 | Ensure normalized advisories expose workaround/fix/CVSS fields via API; add caching for summary queries. | -| CONCELIER-AIAI-31-003 `Advisory AI telemetry` | TODO | Concelier WebService Guild, Observability Guild | CONCELIER-AIAI-31-001 | Emit metrics/logs for chunk requests, cache hits, and guardrail blocks triggered by advisory payloads. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-WEB-OBS-50-001 `Telemetry adoption` | TODO | Concelier WebService Guild | TELEMETRY-OBS-50-001, CONCELIER-OBS-50-001 | Adopt telemetry core in web service host, ensure ingest + read endpoints emit trace/log fields (`tenant_id`, `route`, `decision_effect`), and add correlation IDs to responses. | -| CONCELIER-WEB-OBS-51-001 `Observability APIs` | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-50-001, WEB-OBS-51-001 | Surface ingest health metrics, queue depth, and SLO status via `/obs/concelier/health` endpoint for Console widgets, with caching and tenant partitioning. | -| CONCELIER-WEB-OBS-52-001 `Timeline streaming` | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-50-001, TIMELINE-OBS-52-003 | Provide SSE stream `/obs/concelier/timeline` bridging to Timeline Indexer with paging tokens, guardrails, and audit logging. | -| CONCELIER-WEB-OBS-53-001 `Evidence locker integration` | TODO | Concelier WebService Guild, Evidence Locker Guild | CONCELIER-OBS-53-001, EVID-OBS-53-003 | Add `/evidence/advisories/*` routes invoking evidence locker snapshots, verifying tenant scopes (`evidence:read`), and returning signed manifest metadata. | -| CONCELIER-WEB-OBS-54-001 `Attestation exposure` | TODO | Concelier WebService Guild | CONCELIER-OBS-54-001, PROV-OBS-54-001 | Provide `/attestations/advisories/*` read APIs surfacing DSSE status, verification summary, and provenance chain for Console/CLI. | -| CONCELIER-WEB-OBS-55-001 `Incident mode toggles` | TODO | Concelier WebService Guild, DevOps Guild | CONCELIER-OBS-55-001, WEB-OBS-55-001 | Implement incident mode toggle endpoints, propagate to orchestrator/locker, and document cooldown/backoff semantics. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-WEB-AIRGAP-56-001 `Mirror import APIs` | TODO | Concelier WebService Guild | AIRGAP-IMP-58-001, CONCELIER-AIRGAP-56-001 | Extend ingestion endpoints to register mirror bundle sources, expose bundle catalog queries, and block external feed URLs in sealed mode. | -| CONCELIER-WEB-AIRGAP-56-002 `Airgap status surfaces` | TODO | Concelier WebService Guild | CONCELIER-AIRGAP-57-002, AIRGAP-CTL-56-002 | Add staleness metadata and bundle provenance to advisory APIs (`/advisories/observations`, `/advisories/linksets`). | -| CONCELIER-WEB-AIRGAP-57-001 `Error remediation` | TODO | Concelier WebService Guild, AirGap Policy Guild | AIRGAP-POL-56-001 | Map sealed-mode violations to `AIRGAP_EGRESS_BLOCKED` responses with user guidance. | -| CONCELIER-WEB-AIRGAP-58-001 `Import timeline emission` | TODO | Concelier WebService Guild, AirGap Importer Guild | CONCELIER-WEB-AIRGAP-56-001, TIMELINE-OBS-53-001 | Emit timeline events for bundle ingestion operations with bundle ID, scope, and actor metadata. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-WEB-OAS-61-001 `/.well-known/openapi` | TODO | Concelier WebService Guild | OAS-61-001 | Implement discovery endpoint emitting Concelier spec with version metadata and ETag. | -| CONCELIER-WEB-OAS-61-002 `Error envelope migration` | TODO | Concelier WebService Guild | APIGOV-61-001 | Ensure all API responses use standardized error envelope; update controllers/tests. | -| CONCELIER-WEB-OAS-62-001 `Examples expansion` | TODO | Concelier WebService Guild | CONCELIER-OAS-61-002 | Add curated examples for advisory observations/linksets/conflicts; integrate into dev portal. | -| CONCELIER-WEB-OAS-63-001 `Deprecation headers` | TODO | Concelier WebService Guild, API Governance Guild | APIGOV-63-001 | Add Sunset/Deprecation headers for retiring endpoints and update documentation/notifications. | +# TASKS — Epic 1: Aggregation-Only Contract +> **AOC Reminder:** service links and exposes raw data only—no precedence, severity, or hint computation inside Concelier APIs. +| ID | Status | Owner(s) | Depends on | Notes | +|---|---|---|---|---| +| CONCELIER-WEB-AOC-19-001 `Raw ingestion endpoints` | DONE (2025-10-28) | Concelier WebService Guild | CONCELIER-CORE-AOC-19-001, CONCELIER-STORE-AOC-19-001 | Implement `POST /ingest/advisory`, `GET /advisories/raw*`, and `POST /aoc/verify` minimal API endpoints. Enforce new Authority scopes, inject tenant claims, and surface `AOCWriteGuard` to repository calls. | +> Docs alignment (2025-10-26): Endpoint expectations + scope requirements detailed in `docs/ingestion/aggregation-only-contract.md` and `docs/security/authority-scopes.md`. +> 2025-10-28: Added coverage for pagination, tenancy enforcement, and ingestion/verification metrics; verified guard handling paths end-to-end. +| CONCELIER-WEB-AOC-19-002 `AOC observability` | TODO | Concelier WebService Guild, Observability Guild | CONCELIER-WEB-AOC-19-001 | Emit `ingestion_write_total`, `aoc_violation_total`, latency histograms, and tracing spans (`ingest.fetch/transform/write`, `aoc.guard`). Wire structured logging to include tenant, source vendor, upstream id, and content hash. | +> Docs alignment (2025-10-26): Metrics/traces/log schema in `docs/observability/observability.md`. +| CONCELIER-WEB-AOC-19-003 `Schema/guard unit tests` | TODO | QA Guild | CONCELIER-WEB-AOC-19-001 | Add unit tests covering schema validation failures, forbidden field rejections (`ERR_AOC_001/002/006/007`), idempotent upserts, and supersedes chains using deterministic fixtures. | +> Docs alignment (2025-10-26): Guard rules + error codes documented in AOC reference §5 and CLI guide. +| CONCELIER-WEB-AOC-19-004 `End-to-end ingest verification` | TODO | Concelier WebService Guild, QA Guild | CONCELIER-WEB-AOC-19-003, CONCELIER-CORE-AOC-19-002 | Create integration tests ingesting large advisory batches (cold/warm) validating linkset enrichment, metrics emission, and reproducible outputs. Capture load-test scripts + doc notes for Offline Kit dry runs. | +> Docs alignment (2025-10-26): Offline verification workflow referenced in `docs/deploy/containers.md` §5. + +## Policy Engine v2 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-POLICY-20-001 `Policy selection endpoints` | TODO | Concelier WebService Guild | WEB-POLICY-20-001, CONCELIER-CORE-AOC-19-004 | Add batch advisory lookup APIs (`/policy/select/advisories`, `/policy/select/vex`) optimized for PURL/ID lists with pagination, tenant scoping, and explain metadata. | + +## StellaOps Console (Sprint 23) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-CONSOLE-23-001 `Advisory aggregation views` | TODO | Concelier WebService Guild, BE-Base Platform Guild | CONCELIER-LNM-21-201, CONCELIER-LNM-21-202 | Expose `/console/advisories` endpoints returning aggregation groups (per linkset) with source chips, severity summaries, and provenance metadata for Console list + dashboard cards. Support filters by source, ecosystem, published/modified window, tenant enforcement. | +| CONCELIER-CONSOLE-23-002 `Dashboard deltas API` | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-001, CONCELIER-LNM-21-203 | Provide aggregated advisory delta counts (new, modified, conflicting) for Console dashboard + live status ticker; emit structured events for queue lag metrics. Ensure deterministic counts across repeated queries. | +| CONCELIER-CONSOLE-23-003 `Search fan-out helpers` | TODO | Concelier WebService Guild | CONCELIER-CONSOLE-23-001 | Deliver fast lookup endpoints for CVE/GHSA/purl search (linksets, observations) returning evidence fragments for Console global search; implement caching + scope guards. | + +## Graph Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| + +## Link-Not-Merge v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-LNM-21-201 `Observation APIs` | TODO | Concelier WebService Guild, BE-Base Platform Guild | CONCELIER-LNM-21-001 | Add REST endpoints for advisory observations (`GET /advisories/observations`) with filters (alias, purl, source), pagination, and tenancy enforcement. | +| CONCELIER-LNM-21-202 `Linkset APIs` | TODO | Concelier WebService Guild | CONCELIER-LNM-21-002, CONCELIER-LNM-21-003 | Implement linkset read/export endpoints (`/advisories/linksets/{id}`, `/advisories/by-purl/{purl}`, `/advisories/linksets/{id}/export`, `/evidence`) with correlation/conflict payloads and `ERR_AGG_*` mapping. | +| CONCELIER-LNM-21-203 `Ingest events` | TODO | Concelier WebService Guild, Platform Events Guild | CONCELIER-LNM-21-005 | Publish NATS/Redis events for new observations/linksets and ensure idempotent consumer contracts; document event schemas. | + +## Graph & Vuln Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-GRAPH-24-101 `Advisory summary API` | TODO | Concelier WebService Guild | CONCELIER-GRAPH-24-001 | Expose `/advisories/summary` returning raw linkset/observation metadata for overlay services; no derived severity or fix hints. | +| CONCELIER-GRAPH-28-102 `Evidence batch API` | TODO | Concelier WebService Guild | CONCELIER-LNM-21-201 | Add batch fetch for advisory observations/linksets keyed by component sets to feed Graph overlay tooltips efficiently. | + +## VEX Lens (Sprint 30) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-VEXLENS-30-001 `Advisory rationale bridges` | TODO | Concelier WebService Guild, VEX Lens Guild | CONCELIER-VULN-29-001, VEXLENS-30-005 | Guarantee advisory key consistency and cross-links for consensus rationale; Label: VEX-Lens. | + +## Vulnerability Explorer (Sprint 29) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-VULN-29-001 `Advisory key canonicalization` | TODO | Concelier WebService Guild, Data Integrity Guild | CONCELIER-LNM-21-001 | Canonicalize (lossless) advisory identifiers (CVE/GHSA/vendor) into `advisory_key`, persist `links[]`, expose raw payload snapshots for Explorer evidence tabs; AOC-compliant: no merge, no derived fields, no suppression. Include migration/backfill scripts. | +| CONCELIER-VULN-29-002 `Evidence retrieval API` | TODO | Concelier WebService Guild | CONCELIER-VULN-29-001, VULN-API-29-003 | Provide `/vuln/evidence/advisories/{advisory_key}` returning raw advisory docs with provenance, filtering by tenant and source. | +| CONCELIER-VULN-29-004 `Observability enhancements` | TODO | Concelier WebService Guild, Observability Guild | CONCELIER-VULN-29-001 | Instrument metrics/logs for advisory normalization (key collisions, withdrawn flags), emit events consumed by Vuln Explorer resolver. | + +## Advisory AI (Sprint 31) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-AIAI-31-001 `Paragraph anchors` | TODO | Concelier WebService Guild | CONCELIER-VULN-29-001 | Expose advisory chunk API returning paragraph anchors, section metadata, and token-safe text for Advisory AI retrieval. | +| CONCELIER-AIAI-31-002 `Structured fields` | TODO | Concelier WebService Guild | CONCELIER-AIAI-31-001 | Ensure normalized advisories expose workaround/fix/CVSS fields via API; add caching for summary queries. | +| CONCELIER-AIAI-31-003 `Advisory AI telemetry` | TODO | Concelier WebService Guild, Observability Guild | CONCELIER-AIAI-31-001 | Emit metrics/logs for chunk requests, cache hits, and guardrail blocks triggered by advisory payloads. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-WEB-OBS-50-001 `Telemetry adoption` | TODO | Concelier WebService Guild | TELEMETRY-OBS-50-001, CONCELIER-OBS-50-001 | Adopt telemetry core in web service host, ensure ingest + read endpoints emit trace/log fields (`tenant_id`, `route`, `decision_effect`), and add correlation IDs to responses. | +| CONCELIER-WEB-OBS-51-001 `Observability APIs` | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-50-001, WEB-OBS-51-001 | Surface ingest health metrics, queue depth, and SLO status via `/obs/concelier/health` endpoint for Console widgets, with caching and tenant partitioning. | +| CONCELIER-WEB-OBS-52-001 `Timeline streaming` | TODO | Concelier WebService Guild | CONCELIER-WEB-OBS-50-001, TIMELINE-OBS-52-003 | Provide SSE stream `/obs/concelier/timeline` bridging to Timeline Indexer with paging tokens, guardrails, and audit logging. | +| CONCELIER-WEB-OBS-53-001 `Evidence locker integration` | TODO | Concelier WebService Guild, Evidence Locker Guild | CONCELIER-OBS-53-001, EVID-OBS-53-003 | Add `/evidence/advisories/*` routes invoking evidence locker snapshots, verifying tenant scopes (`evidence:read`), and returning signed manifest metadata. | +| CONCELIER-WEB-OBS-54-001 `Attestation exposure` | TODO | Concelier WebService Guild | CONCELIER-OBS-54-001, PROV-OBS-54-001 | Provide `/attestations/advisories/*` read APIs surfacing DSSE status, verification summary, and provenance chain for Console/CLI. | +| CONCELIER-WEB-OBS-55-001 `Incident mode toggles` | TODO | Concelier WebService Guild, DevOps Guild | CONCELIER-OBS-55-001, WEB-OBS-55-001 | Implement incident mode toggle endpoints, propagate to orchestrator/locker, and document cooldown/backoff semantics. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-WEB-AIRGAP-56-001 `Mirror import APIs` | TODO | Concelier WebService Guild | AIRGAP-IMP-58-001, CONCELIER-AIRGAP-56-001 | Extend ingestion endpoints to register mirror bundle sources, expose bundle catalog queries, and block external feed URLs in sealed mode. | +| CONCELIER-WEB-AIRGAP-56-002 `Airgap status surfaces` | TODO | Concelier WebService Guild | CONCELIER-AIRGAP-57-002, AIRGAP-CTL-56-002 | Add staleness metadata and bundle provenance to advisory APIs (`/advisories/observations`, `/advisories/linksets`). | +| CONCELIER-WEB-AIRGAP-57-001 `Error remediation` | TODO | Concelier WebService Guild, AirGap Policy Guild | AIRGAP-POL-56-001 | Map sealed-mode violations to `AIRGAP_EGRESS_BLOCKED` responses with user guidance. | +| CONCELIER-WEB-AIRGAP-58-001 `Import timeline emission` | TODO | Concelier WebService Guild, AirGap Importer Guild | CONCELIER-WEB-AIRGAP-56-001, TIMELINE-OBS-53-001 | Emit timeline events for bundle ingestion operations with bundle ID, scope, and actor metadata. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-WEB-OAS-61-001 `/.well-known/openapi` | TODO | Concelier WebService Guild | OAS-61-001 | Implement discovery endpoint emitting Concelier spec with version metadata and ETag. | +| CONCELIER-WEB-OAS-61-002 `Error envelope migration` | TODO | Concelier WebService Guild | APIGOV-61-001 | Ensure all API responses use standardized error envelope; update controllers/tests. | +| CONCELIER-WEB-OAS-62-001 `Examples expansion` | TODO | Concelier WebService Guild | CONCELIER-OAS-61-002 | Add curated examples for advisory observations/linksets/conflicts; integrate into dev portal. | +| CONCELIER-WEB-OAS-63-001 `Deprecation headers` | TODO | Concelier WebService Guild, API Governance Guild | APIGOV-63-001 | Add Sunset/Deprecation headers for retiring endpoints and update documentation/notifications. | diff --git a/src/Concelier/StellaOps.Concelier.sln b/src/Concelier/StellaOps.Concelier.sln new file mode 100644 index 00000000..6316129c --- /dev/null +++ b/src/Concelier/StellaOps.Concelier.sln @@ -0,0 +1,1336 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.WebService", "StellaOps.Concelier.WebService\StellaOps.Concelier.WebService.csproj", "{FB98E71B-AF9D-4593-8306-F989C2CA2BBE}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core", "__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj", "{D93F34C2-5E5E-4CC7-A573-4376AA525838}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models", "__Libraries\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj", "{841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels", "__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj", "{EEC52FA0-8E78-4FCB-9454-D697F58B2118}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization", "__Libraries\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj", "{628700D6-97A5-4506-BC78-22E2A76C68E3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{A3E52755-5B68-4A33-9078-893A7FEE7D4B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{7B48F422-65E3-464B-B029-0766207035EB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "..\Aoc\__Libraries\StellaOps.Aoc\StellaOps.Aoc.csproj", "{A6802486-A8D3-4623-8D81-04ED23F9D312}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo", "__Libraries\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj", "{C926373D-5ACB-4E62-96D5-264EF4C61BE5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common", "__Libraries\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj", "{2D68125A-0ACD-4015-A8FA-B54284B8A3CB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Merge", "__Libraries\StellaOps.Concelier.Merge\StellaOps.Concelier.Merge.csproj", "{7760219F-6C19-4B61-9015-73BB02005C0B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{EDD39EE5-8341-4BB0-9C30-D829D97C1E65}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{80E7B08C-2916-4540-A34B-CB581EEEA202}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{5B04974C-EC04-446E-83C1-EF9686433586}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{1282AA12-C27D-4F85-B534-785FEFF52D5F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Acsc", "__Libraries\StellaOps.Concelier.Connector.Acsc\StellaOps.Concelier.Connector.Acsc.csproj", "{F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cccs", "__Libraries\StellaOps.Concelier.Connector.Cccs\StellaOps.Concelier.Connector.Cccs.csproj", "{30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertBund", "__Libraries\StellaOps.Concelier.Connector.CertBund\StellaOps.Concelier.Connector.CertBund.csproj", "{77FF9993-C811-4389-8BFE-974B4F0AB7C6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertCc", "__Libraries\StellaOps.Concelier.Connector.CertCc\StellaOps.Concelier.Connector.CertCc.csproj", "{4FB18D5B-8D48-4E50-9608-69890B3420F8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertFr", "__Libraries\StellaOps.Concelier.Connector.CertFr\StellaOps.Concelier.Connector.CertFr.csproj", "{585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertIn", "__Libraries\StellaOps.Concelier.Connector.CertIn\StellaOps.Concelier.Connector.CertIn.csproj", "{BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cve", "__Libraries\StellaOps.Concelier.Connector.Cve\StellaOps.Concelier.Connector.Cve.csproj", "{257E2D7B-EA3D-4B33-9546-7B77DA20A517}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Debian", "__Libraries\StellaOps.Concelier.Connector.Distro.Debian\StellaOps.Concelier.Connector.Distro.Debian.csproj", "{5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.RedHat", "__Libraries\StellaOps.Concelier.Connector.Distro.RedHat\StellaOps.Concelier.Connector.Distro.RedHat.csproj", "{C827F73E-68E2-4F6A-8CEA-0425B2D2D466}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Suse", "__Libraries\StellaOps.Concelier.Connector.Distro.Suse\StellaOps.Concelier.Connector.Distro.Suse.csproj", "{F67EECB0-7DCC-4643-82F3-E020D72BE762}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Ubuntu", "__Libraries\StellaOps.Concelier.Connector.Distro.Ubuntu\StellaOps.Concelier.Connector.Distro.Ubuntu.csproj", "{2975AE79-F23D-43D6-B075-6659AB6AE105}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ghsa", "__Libraries\StellaOps.Concelier.Connector.Ghsa\StellaOps.Concelier.Connector.Ghsa.csproj", "{667ACFE7-922E-4958-99D6-DD9D7BE8E744}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Cisa", "__Libraries\StellaOps.Concelier.Connector.Ics.Cisa\StellaOps.Concelier.Connector.Ics.Cisa.csproj", "{D4824290-3F8A-47BD-A368-F63BE593546B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Kaspersky", "__Libraries\StellaOps.Concelier.Connector.Ics.Kaspersky\StellaOps.Concelier.Connector.Ics.Kaspersky.csproj", "{B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Jvn", "__Libraries\StellaOps.Concelier.Connector.Jvn\StellaOps.Concelier.Connector.Jvn.csproj", "{33C98234-DF04-40CE-9459-2736AAB0CF6C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kev", "__Libraries\StellaOps.Concelier.Connector.Kev\StellaOps.Concelier.Connector.Kev.csproj", "{A6D364F9-3478-4432-9EE1-F4F3DCF125EA}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kisa", "__Libraries\StellaOps.Concelier.Connector.Kisa\StellaOps.Concelier.Connector.Kisa.csproj", "{B9EDA23E-5754-48AF-8978-DBCBF75134BF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Nvd", "__Libraries\StellaOps.Concelier.Connector.Nvd\StellaOps.Concelier.Connector.Nvd.csproj", "{9208F373-EDD1-491D-AEF9-FE280B453CD9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Osv", "__Libraries\StellaOps.Concelier.Connector.Osv\StellaOps.Concelier.Connector.Osv.csproj", "{EC5DE6F3-D158-4261-A4CD-AB81AE154918}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Bdu", "__Libraries\StellaOps.Concelier.Connector.Ru.Bdu\StellaOps.Concelier.Connector.Ru.Bdu.csproj", "{40591EC3-23C6-4E74-8280-35153641FF21}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Nkcki", "__Libraries\StellaOps.Concelier.Connector.Ru.Nkcki\StellaOps.Concelier.Connector.Ru.Nkcki.csproj", "{5646F7F2-FEDF-49D1-9053-F8E1B7892695}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.StellaOpsMirror", "__Libraries\StellaOps.Concelier.Connector.StellaOpsMirror\StellaOps.Concelier.Connector.StellaOpsMirror.csproj", "{3C877F0B-3870-452B-AA70-1F9960A4F062}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Adobe", "__Libraries\StellaOps.Concelier.Connector.Vndr.Adobe\StellaOps.Concelier.Connector.Vndr.Adobe.csproj", "{5525AD40-01DA-46DD-B331-DD032DD3C9C0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Apple", "__Libraries\StellaOps.Concelier.Connector.Vndr.Apple\StellaOps.Concelier.Connector.Vndr.Apple.csproj", "{D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Chromium", "__Libraries\StellaOps.Concelier.Connector.Vndr.Chromium\StellaOps.Concelier.Connector.Vndr.Chromium.csproj", "{296A426A-E429-4984-8813-AA7EEE3037D5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Cisco", "__Libraries\StellaOps.Concelier.Connector.Vndr.Cisco\StellaOps.Concelier.Connector.Vndr.Cisco.csproj", "{6F1AB15F-8875-4A62-A878-842D463A3B11}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Msrc", "__Libraries\StellaOps.Concelier.Connector.Vndr.Msrc\StellaOps.Concelier.Connector.Vndr.Msrc.csproj", "{CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Oracle", "__Libraries\StellaOps.Concelier.Connector.Vndr.Oracle\StellaOps.Concelier.Connector.Vndr.Oracle.csproj", "{8EFE438F-0513-470C-909B-8A1BD62D0E98}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Vmware", "__Libraries\StellaOps.Concelier.Connector.Vndr.Vmware\StellaOps.Concelier.Connector.Vndr.Vmware.csproj", "{60712A8D-FF22-452C-8AC0-22DB33B38180}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.Json", "__Libraries\StellaOps.Concelier.Exporter.Json\StellaOps.Concelier.Exporter.Json.csproj", "{4097C3CB-7C39-478B-89C2-4D317625EBBF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.TrivyDb", "__Libraries\StellaOps.Concelier.Exporter.TrivyDb\StellaOps.Concelier.Exporter.TrivyDb.csproj", "{935D16AC-8EBD-46E4-8D0E-934F3AE961D4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Testing", "__Libraries\StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj", "{0BC8276D-D726-4C8B-AB2B-122BE18F1112}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Acsc.Tests", "__Tests\StellaOps.Concelier.Connector.Acsc.Tests\StellaOps.Concelier.Connector.Acsc.Tests.csproj", "{654CF4EE-9EC5-464C-AF47-EE37329CD46A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cccs.Tests", "__Tests\StellaOps.Concelier.Connector.Cccs.Tests\StellaOps.Concelier.Connector.Cccs.Tests.csproj", "{BEF6FA33-E0EA-4ED2-B209-833D41607132}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertBund.Tests", "__Tests\StellaOps.Concelier.Connector.CertBund.Tests\StellaOps.Concelier.Connector.CertBund.Tests.csproj", "{B777945B-92DB-4D24-A795-5C900B6FCB92}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertCc.Tests", "__Tests\StellaOps.Concelier.Connector.CertCc.Tests\StellaOps.Concelier.Connector.CertCc.Tests.csproj", "{67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertFr.Tests", "__Tests\StellaOps.Concelier.Connector.CertFr.Tests\StellaOps.Concelier.Connector.CertFr.Tests.csproj", "{0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertIn.Tests", "__Tests\StellaOps.Concelier.Connector.CertIn.Tests\StellaOps.Concelier.Connector.CertIn.Tests.csproj", "{0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common.Tests", "__Tests\StellaOps.Concelier.Connector.Common.Tests\StellaOps.Concelier.Connector.Common.Tests.csproj", "{35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cve.Tests", "__Tests\StellaOps.Concelier.Connector.Cve.Tests\StellaOps.Concelier.Connector.Cve.Tests.csproj", "{F90FCF19-0426-4E62-93DC-835712E5B064}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Debian.Tests", "__Tests\StellaOps.Concelier.Connector.Distro.Debian.Tests\StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj", "{0E84E05F-53CE-4A6E-95F0-62EF14CBC385}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.RedHat.Tests", "__Tests\StellaOps.Concelier.Connector.Distro.RedHat.Tests\StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj", "{7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Suse.Tests", "__Tests\StellaOps.Concelier.Connector.Distro.Suse.Tests\StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj", "{4DD7C512-5624-4C6B-B02A-7EDF58242657}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Ubuntu.Tests", "__Tests\StellaOps.Concelier.Connector.Distro.Ubuntu.Tests\StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj", "{98AA9471-2498-45BC-A58F-B83F4B9A8B75}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ghsa.Tests", "__Tests\StellaOps.Concelier.Connector.Ghsa.Tests\StellaOps.Concelier.Connector.Ghsa.Tests.csproj", "{7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Cisa.Tests", "__Tests\StellaOps.Concelier.Connector.Ics.Cisa.Tests\StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj", "{253BAF8F-0CF8-4D1A-B5AA-F19713099173}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Kaspersky.Tests", "__Tests\StellaOps.Concelier.Connector.Ics.Kaspersky.Tests\StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj", "{57A423CD-4F40-4BAD-A6FC-93D494FCA51A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Jvn.Tests", "__Tests\StellaOps.Concelier.Connector.Jvn.Tests\StellaOps.Concelier.Connector.Jvn.Tests.csproj", "{07034F70-3E4F-49BF-A181-75443D3B3361}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kev.Tests", "__Tests\StellaOps.Concelier.Connector.Kev.Tests\StellaOps.Concelier.Connector.Kev.Tests.csproj", "{E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kisa.Tests", "__Tests\StellaOps.Concelier.Connector.Kisa.Tests\StellaOps.Concelier.Connector.Kisa.Tests.csproj", "{9165A6AB-140D-41BC-91BC-44523D1C9978}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Nvd.Tests", "__Tests\StellaOps.Concelier.Connector.Nvd.Tests\StellaOps.Concelier.Connector.Nvd.Tests.csproj", "{A35677A8-170D-4933-B2C3-314A30920766}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Osv.Tests", "__Tests\StellaOps.Concelier.Connector.Osv.Tests\StellaOps.Concelier.Connector.Osv.Tests.csproj", "{E21AD10F-87B8-4C39-BE45-B6C44CE6D841}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Bdu.Tests", "__Tests\StellaOps.Concelier.Connector.Ru.Bdu.Tests\StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj", "{1E93E173-53B1-4441-97E0-C60A3FB029D7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Nkcki.Tests", "__Tests\StellaOps.Concelier.Connector.Ru.Nkcki.Tests\StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj", "{92827ACC-284F-44EB-98A7-94B57BA92D27}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.StellaOpsMirror.Tests", "__Tests\StellaOps.Concelier.Connector.StellaOpsMirror.Tests\StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj", "{1B017E4D-6F3E-42D4-9418-DA8D76BA2797}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Adobe.Tests", "__Tests\StellaOps.Concelier.Connector.Vndr.Adobe.Tests\StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj", "{45A163F4-2569-40D9-8FF6-854AF95C061E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Apple.Tests", "__Tests\StellaOps.Concelier.Connector.Vndr.Apple.Tests\StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj", "{17934A3D-6420-48F2-A528-E32A34F0FE55}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Chromium.Tests", "__Tests\StellaOps.Concelier.Connector.Vndr.Chromium.Tests\StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj", "{07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Cisco.Tests", "__Tests\StellaOps.Concelier.Connector.Vndr.Cisco.Tests\StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj", "{D485A847-B3EA-40D8-A56A-459A02C902F8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Msrc.Tests", "__Tests\StellaOps.Concelier.Connector.Vndr.Msrc.Tests\StellaOps.Concelier.Connector.Vndr.Msrc.Tests.csproj", "{0B716CE2-810A-4143-8434-4DB111E0F3E9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Oracle.Tests", "__Tests\StellaOps.Concelier.Connector.Vndr.Oracle.Tests\StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj", "{2A75CB97-ACF1-43B2-8509-E8226000D7DC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Vmware.Tests", "__Tests\StellaOps.Concelier.Connector.Vndr.Vmware.Tests\StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj", "{3AEC19B5-44F6-4717-B1A0-3A2F04F42565}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core.Tests", "__Tests\StellaOps.Concelier.Core.Tests\StellaOps.Concelier.Core.Tests.csproj", "{1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.Json.Tests", "__Tests\StellaOps.Concelier.Exporter.Json.Tests\StellaOps.Concelier.Exporter.Json.Tests.csproj", "{258A6D13-F02C-48C6-8506-2C815EBBD1D5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.TrivyDb.Tests", "__Tests\StellaOps.Concelier.Exporter.TrivyDb.Tests\StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj", "{39F8D963-3D76-4BEC-BE7B-4AE9C9664211}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Merge.Tests", "__Tests\StellaOps.Concelier.Merge.Tests\StellaOps.Concelier.Merge.Tests.csproj", "{470793D6-A847-41E3-A15D-8D0DFE7CD9A3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models.Tests", "__Tests\StellaOps.Concelier.Models.Tests\StellaOps.Concelier.Models.Tests.csproj", "{2EB876DE-E940-4A7E-8E3D-804E2E6314DA}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization.Tests", "__Tests\StellaOps.Concelier.Normalization.Tests\StellaOps.Concelier.Normalization.Tests.csproj", "{C4C2037E-B301-4449-96D6-C6B165752E1A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels.Tests", "__Tests\StellaOps.Concelier.RawModels.Tests\StellaOps.Concelier.RawModels.Tests.csproj", "{7B995CBB-3D20-4509-9300-EC012C18C4B4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo.Tests", "__Tests\StellaOps.Concelier.Storage.Mongo.Tests\StellaOps.Concelier.Storage.Mongo.Tests.csproj", "{9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.WebService.Tests", "__Tests\StellaOps.Concelier.WebService.Tests\StellaOps.Concelier.WebService.Tests.csproj", "{664A2577-6DA1-42DA-A213-3253017FA4BF}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Debug|x64.ActiveCfg = Debug|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Debug|x64.Build.0 = Debug|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Debug|x86.ActiveCfg = Debug|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Debug|x86.Build.0 = Debug|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Release|Any CPU.Build.0 = Release|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Release|x64.ActiveCfg = Release|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Release|x64.Build.0 = Release|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Release|x86.ActiveCfg = Release|Any CPU + {FB98E71B-AF9D-4593-8306-F989C2CA2BBE}.Release|x86.Build.0 = Release|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Debug|x64.ActiveCfg = Debug|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Debug|x64.Build.0 = Debug|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Debug|x86.ActiveCfg = Debug|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Debug|x86.Build.0 = Debug|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Release|Any CPU.Build.0 = Release|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Release|x64.ActiveCfg = Release|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Release|x64.Build.0 = Release|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Release|x86.ActiveCfg = Release|Any CPU + {D93F34C2-5E5E-4CC7-A573-4376AA525838}.Release|x86.Build.0 = Release|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Debug|Any CPU.Build.0 = Debug|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Debug|x64.ActiveCfg = Debug|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Debug|x64.Build.0 = Debug|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Debug|x86.ActiveCfg = Debug|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Debug|x86.Build.0 = Debug|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Release|Any CPU.ActiveCfg = Release|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Release|Any CPU.Build.0 = Release|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Release|x64.ActiveCfg = Release|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Release|x64.Build.0 = Release|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Release|x86.ActiveCfg = Release|Any CPU + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94}.Release|x86.Build.0 = Release|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Debug|x64.ActiveCfg = Debug|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Debug|x64.Build.0 = Debug|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Debug|x86.ActiveCfg = Debug|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Debug|x86.Build.0 = Debug|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Release|Any CPU.Build.0 = Release|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Release|x64.ActiveCfg = Release|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Release|x64.Build.0 = Release|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Release|x86.ActiveCfg = Release|Any CPU + {EEC52FA0-8E78-4FCB-9454-D697F58B2118}.Release|x86.Build.0 = Release|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Debug|x64.ActiveCfg = Debug|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Debug|x64.Build.0 = Debug|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Debug|x86.ActiveCfg = Debug|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Debug|x86.Build.0 = Debug|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Release|Any CPU.Build.0 = Release|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Release|x64.ActiveCfg = Release|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Release|x64.Build.0 = Release|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Release|x86.ActiveCfg = Release|Any CPU + {628700D6-97A5-4506-BC78-22E2A76C68E3}.Release|x86.Build.0 = Release|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Debug|x64.ActiveCfg = Debug|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Debug|x64.Build.0 = Debug|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Debug|x86.ActiveCfg = Debug|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Debug|x86.Build.0 = Debug|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Release|Any CPU.Build.0 = Release|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Release|x64.ActiveCfg = Release|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Release|x64.Build.0 = Release|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Release|x86.ActiveCfg = Release|Any CPU + {A3E52755-5B68-4A33-9078-893A7FEE7D4B}.Release|x86.Build.0 = Release|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Debug|x64.ActiveCfg = Debug|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Debug|x64.Build.0 = Debug|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Debug|x86.ActiveCfg = Debug|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Debug|x86.Build.0 = Debug|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Release|Any CPU.Build.0 = Release|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Release|x64.ActiveCfg = Release|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Release|x64.Build.0 = Release|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Release|x86.ActiveCfg = Release|Any CPU + {7B48F422-65E3-464B-B029-0766207035EB}.Release|x86.Build.0 = Release|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Debug|x64.ActiveCfg = Debug|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Debug|x64.Build.0 = Debug|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Debug|x86.ActiveCfg = Debug|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Debug|x86.Build.0 = Debug|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Release|Any CPU.Build.0 = Release|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Release|x64.ActiveCfg = Release|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Release|x64.Build.0 = Release|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Release|x86.ActiveCfg = Release|Any CPU + {A6802486-A8D3-4623-8D81-04ED23F9D312}.Release|x86.Build.0 = Release|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|x64.ActiveCfg = Debug|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|x64.Build.0 = Debug|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|x86.ActiveCfg = Debug|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Debug|x86.Build.0 = Debug|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|Any CPU.Build.0 = Release|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|x64.ActiveCfg = Release|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|x64.Build.0 = Release|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|x86.ActiveCfg = Release|Any CPU + {C926373D-5ACB-4E62-96D5-264EF4C61BE5}.Release|x86.Build.0 = Release|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Debug|x64.ActiveCfg = Debug|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Debug|x64.Build.0 = Debug|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Debug|x86.ActiveCfg = Debug|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Debug|x86.Build.0 = Debug|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Release|Any CPU.Build.0 = Release|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Release|x64.ActiveCfg = Release|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Release|x64.Build.0 = Release|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Release|x86.ActiveCfg = Release|Any CPU + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB}.Release|x86.Build.0 = Release|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Debug|x64.ActiveCfg = Debug|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Debug|x64.Build.0 = Debug|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Debug|x86.ActiveCfg = Debug|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Debug|x86.Build.0 = Debug|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Release|Any CPU.Build.0 = Release|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Release|x64.ActiveCfg = Release|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Release|x64.Build.0 = Release|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Release|x86.ActiveCfg = Release|Any CPU + {7760219F-6C19-4B61-9015-73BB02005C0B}.Release|x86.Build.0 = Release|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Debug|x64.ActiveCfg = Debug|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Debug|x64.Build.0 = Debug|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Debug|x86.ActiveCfg = Debug|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Debug|x86.Build.0 = Debug|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Release|Any CPU.Build.0 = Release|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Release|x64.ActiveCfg = Release|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Release|x64.Build.0 = Release|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Release|x86.ActiveCfg = Release|Any CPU + {EDD39EE5-8341-4BB0-9C30-D829D97C1E65}.Release|x86.Build.0 = Release|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Debug|Any CPU.Build.0 = Debug|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Debug|x64.ActiveCfg = Debug|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Debug|x64.Build.0 = Debug|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Debug|x86.ActiveCfg = Debug|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Debug|x86.Build.0 = Debug|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Release|Any CPU.ActiveCfg = Release|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Release|Any CPU.Build.0 = Release|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Release|x64.ActiveCfg = Release|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Release|x64.Build.0 = Release|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Release|x86.ActiveCfg = Release|Any CPU + {80E7B08C-2916-4540-A34B-CB581EEEA202}.Release|x86.Build.0 = Release|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Debug|x64.ActiveCfg = Debug|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Debug|x64.Build.0 = Debug|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Debug|x86.ActiveCfg = Debug|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Debug|x86.Build.0 = Debug|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Release|Any CPU.Build.0 = Release|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Release|x64.ActiveCfg = Release|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Release|x64.Build.0 = Release|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Release|x86.ActiveCfg = Release|Any CPU + {5B04974C-EC04-446E-83C1-EF9686433586}.Release|x86.Build.0 = Release|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Debug|x64.ActiveCfg = Debug|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Debug|x64.Build.0 = Debug|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Debug|x86.ActiveCfg = Debug|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Debug|x86.Build.0 = Debug|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Release|Any CPU.Build.0 = Release|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Release|x64.ActiveCfg = Release|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Release|x64.Build.0 = Release|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Release|x86.ActiveCfg = Release|Any CPU + {1282AA12-C27D-4F85-B534-785FEFF52D5F}.Release|x86.Build.0 = Release|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Debug|x64.ActiveCfg = Debug|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Debug|x64.Build.0 = Debug|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Debug|x86.ActiveCfg = Debug|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Debug|x86.Build.0 = Debug|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Release|Any CPU.Build.0 = Release|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Release|x64.ActiveCfg = Release|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Release|x64.Build.0 = Release|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Release|x86.ActiveCfg = Release|Any CPU + {C4EF36C5-AE69-4781-96A5-FB9CCFBBE6CB}.Release|x86.Build.0 = Release|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Debug|x64.ActiveCfg = Debug|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Debug|x64.Build.0 = Debug|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Debug|x86.ActiveCfg = Debug|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Debug|x86.Build.0 = Debug|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Release|Any CPU.Build.0 = Release|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Release|x64.ActiveCfg = Release|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Release|x64.Build.0 = Release|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Release|x86.ActiveCfg = Release|Any CPU + {A488C9CC-A6CF-46B9-AAB7-F9284FF191D9}.Release|x86.Build.0 = Release|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Debug|x64.ActiveCfg = Debug|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Debug|x64.Build.0 = Debug|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Debug|x86.ActiveCfg = Debug|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Debug|x86.Build.0 = Debug|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Release|Any CPU.Build.0 = Release|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Release|x64.ActiveCfg = Release|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Release|x64.Build.0 = Release|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Release|x86.ActiveCfg = Release|Any CPU + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998}.Release|x86.Build.0 = Release|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Debug|x64.ActiveCfg = Debug|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Debug|x64.Build.0 = Debug|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Debug|x86.ActiveCfg = Debug|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Debug|x86.Build.0 = Debug|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Release|Any CPU.Build.0 = Release|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Release|x64.ActiveCfg = Release|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Release|x64.Build.0 = Release|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Release|x86.ActiveCfg = Release|Any CPU + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4}.Release|x86.Build.0 = Release|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Debug|x64.ActiveCfg = Debug|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Debug|x64.Build.0 = Debug|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Debug|x86.ActiveCfg = Debug|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Debug|x86.Build.0 = Debug|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Release|Any CPU.Build.0 = Release|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Release|x64.ActiveCfg = Release|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Release|x64.Build.0 = Release|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Release|x86.ActiveCfg = Release|Any CPU + {77FF9993-C811-4389-8BFE-974B4F0AB7C6}.Release|x86.Build.0 = Release|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Debug|x64.ActiveCfg = Debug|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Debug|x64.Build.0 = Debug|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Debug|x86.ActiveCfg = Debug|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Debug|x86.Build.0 = Debug|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Release|Any CPU.Build.0 = Release|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Release|x64.ActiveCfg = Release|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Release|x64.Build.0 = Release|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Release|x86.ActiveCfg = Release|Any CPU + {4FB18D5B-8D48-4E50-9608-69890B3420F8}.Release|x86.Build.0 = Release|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Debug|x64.ActiveCfg = Debug|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Debug|x64.Build.0 = Debug|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Debug|x86.ActiveCfg = Debug|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Debug|x86.Build.0 = Debug|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Release|Any CPU.Build.0 = Release|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Release|x64.ActiveCfg = Release|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Release|x64.Build.0 = Release|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Release|x86.ActiveCfg = Release|Any CPU + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE}.Release|x86.Build.0 = Release|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Debug|x64.ActiveCfg = Debug|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Debug|x64.Build.0 = Debug|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Debug|x86.ActiveCfg = Debug|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Debug|x86.Build.0 = Debug|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Release|Any CPU.Build.0 = Release|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Release|x64.ActiveCfg = Release|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Release|x64.Build.0 = Release|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Release|x86.ActiveCfg = Release|Any CPU + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B}.Release|x86.Build.0 = Release|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Debug|Any CPU.Build.0 = Debug|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Debug|x64.ActiveCfg = Debug|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Debug|x64.Build.0 = Debug|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Debug|x86.ActiveCfg = Debug|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Debug|x86.Build.0 = Debug|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Release|Any CPU.ActiveCfg = Release|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Release|Any CPU.Build.0 = Release|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Release|x64.ActiveCfg = Release|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Release|x64.Build.0 = Release|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Release|x86.ActiveCfg = Release|Any CPU + {257E2D7B-EA3D-4B33-9546-7B77DA20A517}.Release|x86.Build.0 = Release|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Debug|x64.ActiveCfg = Debug|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Debug|x64.Build.0 = Debug|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Debug|x86.ActiveCfg = Debug|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Debug|x86.Build.0 = Debug|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Release|Any CPU.Build.0 = Release|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Release|x64.ActiveCfg = Release|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Release|x64.Build.0 = Release|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Release|x86.ActiveCfg = Release|Any CPU + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6}.Release|x86.Build.0 = Release|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Debug|x64.ActiveCfg = Debug|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Debug|x64.Build.0 = Debug|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Debug|x86.ActiveCfg = Debug|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Debug|x86.Build.0 = Debug|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Release|Any CPU.Build.0 = Release|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Release|x64.ActiveCfg = Release|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Release|x64.Build.0 = Release|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Release|x86.ActiveCfg = Release|Any CPU + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466}.Release|x86.Build.0 = Release|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Debug|x64.ActiveCfg = Debug|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Debug|x64.Build.0 = Debug|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Debug|x86.ActiveCfg = Debug|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Debug|x86.Build.0 = Debug|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Release|Any CPU.Build.0 = Release|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Release|x64.ActiveCfg = Release|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Release|x64.Build.0 = Release|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Release|x86.ActiveCfg = Release|Any CPU + {F67EECB0-7DCC-4643-82F3-E020D72BE762}.Release|x86.Build.0 = Release|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Debug|x64.ActiveCfg = Debug|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Debug|x64.Build.0 = Debug|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Debug|x86.ActiveCfg = Debug|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Debug|x86.Build.0 = Debug|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Release|Any CPU.Build.0 = Release|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Release|x64.ActiveCfg = Release|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Release|x64.Build.0 = Release|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Release|x86.ActiveCfg = Release|Any CPU + {2975AE79-F23D-43D6-B075-6659AB6AE105}.Release|x86.Build.0 = Release|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Debug|Any CPU.Build.0 = Debug|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Debug|x64.ActiveCfg = Debug|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Debug|x64.Build.0 = Debug|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Debug|x86.ActiveCfg = Debug|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Debug|x86.Build.0 = Debug|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Release|Any CPU.ActiveCfg = Release|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Release|Any CPU.Build.0 = Release|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Release|x64.ActiveCfg = Release|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Release|x64.Build.0 = Release|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Release|x86.ActiveCfg = Release|Any CPU + {667ACFE7-922E-4958-99D6-DD9D7BE8E744}.Release|x86.Build.0 = Release|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Debug|x64.ActiveCfg = Debug|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Debug|x64.Build.0 = Debug|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Debug|x86.ActiveCfg = Debug|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Debug|x86.Build.0 = Debug|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Release|Any CPU.Build.0 = Release|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Release|x64.ActiveCfg = Release|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Release|x64.Build.0 = Release|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Release|x86.ActiveCfg = Release|Any CPU + {D4824290-3F8A-47BD-A368-F63BE593546B}.Release|x86.Build.0 = Release|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Debug|x64.ActiveCfg = Debug|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Debug|x64.Build.0 = Debug|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Debug|x86.ActiveCfg = Debug|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Debug|x86.Build.0 = Debug|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Release|Any CPU.Build.0 = Release|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Release|x64.ActiveCfg = Release|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Release|x64.Build.0 = Release|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Release|x86.ActiveCfg = Release|Any CPU + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF}.Release|x86.Build.0 = Release|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Debug|x64.ActiveCfg = Debug|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Debug|x64.Build.0 = Debug|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Debug|x86.ActiveCfg = Debug|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Debug|x86.Build.0 = Debug|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Release|Any CPU.Build.0 = Release|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Release|x64.ActiveCfg = Release|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Release|x64.Build.0 = Release|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Release|x86.ActiveCfg = Release|Any CPU + {33C98234-DF04-40CE-9459-2736AAB0CF6C}.Release|x86.Build.0 = Release|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Debug|x64.ActiveCfg = Debug|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Debug|x64.Build.0 = Debug|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Debug|x86.ActiveCfg = Debug|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Debug|x86.Build.0 = Debug|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Release|Any CPU.Build.0 = Release|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Release|x64.ActiveCfg = Release|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Release|x64.Build.0 = Release|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Release|x86.ActiveCfg = Release|Any CPU + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA}.Release|x86.Build.0 = Release|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Debug|x64.ActiveCfg = Debug|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Debug|x64.Build.0 = Debug|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Debug|x86.ActiveCfg = Debug|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Debug|x86.Build.0 = Debug|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Release|Any CPU.Build.0 = Release|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Release|x64.ActiveCfg = Release|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Release|x64.Build.0 = Release|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Release|x86.ActiveCfg = Release|Any CPU + {B9EDA23E-5754-48AF-8978-DBCBF75134BF}.Release|x86.Build.0 = Release|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Debug|x64.ActiveCfg = Debug|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Debug|x64.Build.0 = Debug|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Debug|x86.ActiveCfg = Debug|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Debug|x86.Build.0 = Debug|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Release|Any CPU.Build.0 = Release|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Release|x64.ActiveCfg = Release|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Release|x64.Build.0 = Release|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Release|x86.ActiveCfg = Release|Any CPU + {9208F373-EDD1-491D-AEF9-FE280B453CD9}.Release|x86.Build.0 = Release|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Debug|x64.ActiveCfg = Debug|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Debug|x64.Build.0 = Debug|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Debug|x86.ActiveCfg = Debug|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Debug|x86.Build.0 = Debug|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Release|Any CPU.Build.0 = Release|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Release|x64.ActiveCfg = Release|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Release|x64.Build.0 = Release|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Release|x86.ActiveCfg = Release|Any CPU + {EC5DE6F3-D158-4261-A4CD-AB81AE154918}.Release|x86.Build.0 = Release|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Debug|Any CPU.Build.0 = Debug|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Debug|x64.ActiveCfg = Debug|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Debug|x64.Build.0 = Debug|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Debug|x86.ActiveCfg = Debug|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Debug|x86.Build.0 = Debug|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Release|Any CPU.ActiveCfg = Release|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Release|Any CPU.Build.0 = Release|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Release|x64.ActiveCfg = Release|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Release|x64.Build.0 = Release|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Release|x86.ActiveCfg = Release|Any CPU + {40591EC3-23C6-4E74-8280-35153641FF21}.Release|x86.Build.0 = Release|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Debug|x64.ActiveCfg = Debug|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Debug|x64.Build.0 = Debug|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Debug|x86.ActiveCfg = Debug|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Debug|x86.Build.0 = Debug|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Release|Any CPU.Build.0 = Release|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Release|x64.ActiveCfg = Release|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Release|x64.Build.0 = Release|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Release|x86.ActiveCfg = Release|Any CPU + {5646F7F2-FEDF-49D1-9053-F8E1B7892695}.Release|x86.Build.0 = Release|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Debug|x64.ActiveCfg = Debug|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Debug|x64.Build.0 = Debug|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Debug|x86.ActiveCfg = Debug|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Debug|x86.Build.0 = Debug|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Release|Any CPU.Build.0 = Release|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Release|x64.ActiveCfg = Release|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Release|x64.Build.0 = Release|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Release|x86.ActiveCfg = Release|Any CPU + {3C877F0B-3870-452B-AA70-1F9960A4F062}.Release|x86.Build.0 = Release|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Debug|x64.ActiveCfg = Debug|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Debug|x64.Build.0 = Debug|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Debug|x86.ActiveCfg = Debug|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Debug|x86.Build.0 = Debug|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Release|Any CPU.Build.0 = Release|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Release|x64.ActiveCfg = Release|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Release|x64.Build.0 = Release|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Release|x86.ActiveCfg = Release|Any CPU + {5525AD40-01DA-46DD-B331-DD032DD3C9C0}.Release|x86.Build.0 = Release|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Debug|x64.ActiveCfg = Debug|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Debug|x64.Build.0 = Debug|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Debug|x86.ActiveCfg = Debug|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Debug|x86.Build.0 = Debug|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Release|Any CPU.Build.0 = Release|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Release|x64.ActiveCfg = Release|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Release|x64.Build.0 = Release|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Release|x86.ActiveCfg = Release|Any CPU + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77}.Release|x86.Build.0 = Release|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Debug|x64.ActiveCfg = Debug|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Debug|x64.Build.0 = Debug|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Debug|x86.ActiveCfg = Debug|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Debug|x86.Build.0 = Debug|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Release|Any CPU.Build.0 = Release|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Release|x64.ActiveCfg = Release|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Release|x64.Build.0 = Release|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Release|x86.ActiveCfg = Release|Any CPU + {296A426A-E429-4984-8813-AA7EEE3037D5}.Release|x86.Build.0 = Release|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Debug|x64.ActiveCfg = Debug|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Debug|x64.Build.0 = Debug|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Debug|x86.ActiveCfg = Debug|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Debug|x86.Build.0 = Debug|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Release|Any CPU.Build.0 = Release|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Release|x64.ActiveCfg = Release|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Release|x64.Build.0 = Release|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Release|x86.ActiveCfg = Release|Any CPU + {6F1AB15F-8875-4A62-A878-842D463A3B11}.Release|x86.Build.0 = Release|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Debug|x64.ActiveCfg = Debug|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Debug|x64.Build.0 = Debug|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Debug|x86.ActiveCfg = Debug|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Debug|x86.Build.0 = Debug|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Release|Any CPU.Build.0 = Release|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Release|x64.ActiveCfg = Release|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Release|x64.Build.0 = Release|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Release|x86.ActiveCfg = Release|Any CPU + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2}.Release|x86.Build.0 = Release|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Debug|x64.ActiveCfg = Debug|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Debug|x64.Build.0 = Debug|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Debug|x86.ActiveCfg = Debug|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Debug|x86.Build.0 = Debug|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Release|Any CPU.Build.0 = Release|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Release|x64.ActiveCfg = Release|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Release|x64.Build.0 = Release|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Release|x86.ActiveCfg = Release|Any CPU + {8EFE438F-0513-470C-909B-8A1BD62D0E98}.Release|x86.Build.0 = Release|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Debug|Any CPU.Build.0 = Debug|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Debug|x64.ActiveCfg = Debug|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Debug|x64.Build.0 = Debug|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Debug|x86.ActiveCfg = Debug|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Debug|x86.Build.0 = Debug|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Release|Any CPU.ActiveCfg = Release|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Release|Any CPU.Build.0 = Release|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Release|x64.ActiveCfg = Release|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Release|x64.Build.0 = Release|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Release|x86.ActiveCfg = Release|Any CPU + {60712A8D-FF22-452C-8AC0-22DB33B38180}.Release|x86.Build.0 = Release|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Debug|x64.ActiveCfg = Debug|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Debug|x64.Build.0 = Debug|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Debug|x86.ActiveCfg = Debug|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Debug|x86.Build.0 = Debug|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Release|Any CPU.Build.0 = Release|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Release|x64.ActiveCfg = Release|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Release|x64.Build.0 = Release|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Release|x86.ActiveCfg = Release|Any CPU + {4097C3CB-7C39-478B-89C2-4D317625EBBF}.Release|x86.Build.0 = Release|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Debug|x64.ActiveCfg = Debug|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Debug|x64.Build.0 = Debug|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Debug|x86.ActiveCfg = Debug|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Debug|x86.Build.0 = Debug|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Release|Any CPU.Build.0 = Release|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Release|x64.ActiveCfg = Release|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Release|x64.Build.0 = Release|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Release|x86.ActiveCfg = Release|Any CPU + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4}.Release|x86.Build.0 = Release|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Debug|x64.ActiveCfg = Debug|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Debug|x64.Build.0 = Debug|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Debug|x86.ActiveCfg = Debug|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Debug|x86.Build.0 = Debug|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Release|Any CPU.Build.0 = Release|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Release|x64.ActiveCfg = Release|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Release|x64.Build.0 = Release|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Release|x86.ActiveCfg = Release|Any CPU + {0BC8276D-D726-4C8B-AB2B-122BE18F1112}.Release|x86.Build.0 = Release|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Debug|x64.ActiveCfg = Debug|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Debug|x64.Build.0 = Debug|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Debug|x86.ActiveCfg = Debug|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Debug|x86.Build.0 = Debug|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Release|Any CPU.Build.0 = Release|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Release|x64.ActiveCfg = Release|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Release|x64.Build.0 = Release|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Release|x86.ActiveCfg = Release|Any CPU + {654CF4EE-9EC5-464C-AF47-EE37329CD46A}.Release|x86.Build.0 = Release|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Debug|x64.ActiveCfg = Debug|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Debug|x64.Build.0 = Debug|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Debug|x86.ActiveCfg = Debug|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Debug|x86.Build.0 = Debug|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Release|Any CPU.Build.0 = Release|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Release|x64.ActiveCfg = Release|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Release|x64.Build.0 = Release|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Release|x86.ActiveCfg = Release|Any CPU + {BEF6FA33-E0EA-4ED2-B209-833D41607132}.Release|x86.Build.0 = Release|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Debug|x64.ActiveCfg = Debug|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Debug|x64.Build.0 = Debug|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Debug|x86.ActiveCfg = Debug|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Debug|x86.Build.0 = Debug|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Release|Any CPU.Build.0 = Release|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Release|x64.ActiveCfg = Release|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Release|x64.Build.0 = Release|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Release|x86.ActiveCfg = Release|Any CPU + {B777945B-92DB-4D24-A795-5C900B6FCB92}.Release|x86.Build.0 = Release|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Debug|x64.ActiveCfg = Debug|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Debug|x64.Build.0 = Debug|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Debug|x86.ActiveCfg = Debug|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Debug|x86.Build.0 = Debug|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Release|Any CPU.Build.0 = Release|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Release|x64.ActiveCfg = Release|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Release|x64.Build.0 = Release|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Release|x86.ActiveCfg = Release|Any CPU + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A}.Release|x86.Build.0 = Release|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Debug|x64.ActiveCfg = Debug|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Debug|x64.Build.0 = Debug|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Debug|x86.ActiveCfg = Debug|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Debug|x86.Build.0 = Debug|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Release|Any CPU.Build.0 = Release|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Release|x64.ActiveCfg = Release|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Release|x64.Build.0 = Release|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Release|x86.ActiveCfg = Release|Any CPU + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84}.Release|x86.Build.0 = Release|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Debug|x64.ActiveCfg = Debug|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Debug|x64.Build.0 = Debug|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Debug|x86.ActiveCfg = Debug|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Debug|x86.Build.0 = Debug|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Release|Any CPU.Build.0 = Release|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Release|x64.ActiveCfg = Release|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Release|x64.Build.0 = Release|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Release|x86.ActiveCfg = Release|Any CPU + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864}.Release|x86.Build.0 = Release|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Debug|x64.ActiveCfg = Debug|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Debug|x64.Build.0 = Debug|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Debug|x86.ActiveCfg = Debug|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Debug|x86.Build.0 = Debug|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Release|Any CPU.Build.0 = Release|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Release|x64.ActiveCfg = Release|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Release|x64.Build.0 = Release|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Release|x86.ActiveCfg = Release|Any CPU + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B}.Release|x86.Build.0 = Release|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Debug|x64.ActiveCfg = Debug|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Debug|x64.Build.0 = Debug|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Debug|x86.ActiveCfg = Debug|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Debug|x86.Build.0 = Debug|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Release|Any CPU.Build.0 = Release|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Release|x64.ActiveCfg = Release|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Release|x64.Build.0 = Release|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Release|x86.ActiveCfg = Release|Any CPU + {F90FCF19-0426-4E62-93DC-835712E5B064}.Release|x86.Build.0 = Release|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Debug|x64.ActiveCfg = Debug|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Debug|x64.Build.0 = Debug|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Debug|x86.ActiveCfg = Debug|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Debug|x86.Build.0 = Debug|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Release|Any CPU.Build.0 = Release|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Release|x64.ActiveCfg = Release|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Release|x64.Build.0 = Release|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Release|x86.ActiveCfg = Release|Any CPU + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385}.Release|x86.Build.0 = Release|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Debug|x64.ActiveCfg = Debug|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Debug|x64.Build.0 = Debug|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Debug|x86.ActiveCfg = Debug|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Debug|x86.Build.0 = Debug|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Release|Any CPU.Build.0 = Release|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Release|x64.ActiveCfg = Release|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Release|x64.Build.0 = Release|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Release|x86.ActiveCfg = Release|Any CPU + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4}.Release|x86.Build.0 = Release|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Debug|x64.ActiveCfg = Debug|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Debug|x64.Build.0 = Debug|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Debug|x86.ActiveCfg = Debug|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Debug|x86.Build.0 = Debug|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Release|Any CPU.Build.0 = Release|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Release|x64.ActiveCfg = Release|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Release|x64.Build.0 = Release|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Release|x86.ActiveCfg = Release|Any CPU + {4DD7C512-5624-4C6B-B02A-7EDF58242657}.Release|x86.Build.0 = Release|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Debug|Any CPU.Build.0 = Debug|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Debug|x64.ActiveCfg = Debug|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Debug|x64.Build.0 = Debug|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Debug|x86.ActiveCfg = Debug|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Debug|x86.Build.0 = Debug|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Release|Any CPU.ActiveCfg = Release|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Release|Any CPU.Build.0 = Release|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Release|x64.ActiveCfg = Release|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Release|x64.Build.0 = Release|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Release|x86.ActiveCfg = Release|Any CPU + {98AA9471-2498-45BC-A58F-B83F4B9A8B75}.Release|x86.Build.0 = Release|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Debug|x64.ActiveCfg = Debug|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Debug|x64.Build.0 = Debug|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Debug|x86.ActiveCfg = Debug|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Debug|x86.Build.0 = Debug|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Release|Any CPU.Build.0 = Release|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Release|x64.ActiveCfg = Release|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Release|x64.Build.0 = Release|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Release|x86.ActiveCfg = Release|Any CPU + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40}.Release|x86.Build.0 = Release|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Debug|Any CPU.Build.0 = Debug|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Debug|x64.ActiveCfg = Debug|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Debug|x64.Build.0 = Debug|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Debug|x86.ActiveCfg = Debug|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Debug|x86.Build.0 = Debug|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Release|Any CPU.ActiveCfg = Release|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Release|Any CPU.Build.0 = Release|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Release|x64.ActiveCfg = Release|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Release|x64.Build.0 = Release|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Release|x86.ActiveCfg = Release|Any CPU + {253BAF8F-0CF8-4D1A-B5AA-F19713099173}.Release|x86.Build.0 = Release|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Debug|x64.ActiveCfg = Debug|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Debug|x64.Build.0 = Debug|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Debug|x86.ActiveCfg = Debug|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Debug|x86.Build.0 = Debug|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Release|Any CPU.Build.0 = Release|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Release|x64.ActiveCfg = Release|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Release|x64.Build.0 = Release|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Release|x86.ActiveCfg = Release|Any CPU + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A}.Release|x86.Build.0 = Release|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Debug|Any CPU.Build.0 = Debug|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Debug|x64.ActiveCfg = Debug|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Debug|x64.Build.0 = Debug|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Debug|x86.ActiveCfg = Debug|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Debug|x86.Build.0 = Debug|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Release|Any CPU.ActiveCfg = Release|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Release|Any CPU.Build.0 = Release|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Release|x64.ActiveCfg = Release|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Release|x64.Build.0 = Release|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Release|x86.ActiveCfg = Release|Any CPU + {07034F70-3E4F-49BF-A181-75443D3B3361}.Release|x86.Build.0 = Release|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Debug|x64.ActiveCfg = Debug|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Debug|x64.Build.0 = Debug|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Debug|x86.ActiveCfg = Debug|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Debug|x86.Build.0 = Debug|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Release|Any CPU.Build.0 = Release|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Release|x64.ActiveCfg = Release|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Release|x64.Build.0 = Release|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Release|x86.ActiveCfg = Release|Any CPU + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1}.Release|x86.Build.0 = Release|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Debug|x64.ActiveCfg = Debug|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Debug|x64.Build.0 = Debug|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Debug|x86.ActiveCfg = Debug|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Debug|x86.Build.0 = Debug|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Release|Any CPU.Build.0 = Release|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Release|x64.ActiveCfg = Release|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Release|x64.Build.0 = Release|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Release|x86.ActiveCfg = Release|Any CPU + {9165A6AB-140D-41BC-91BC-44523D1C9978}.Release|x86.Build.0 = Release|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Debug|x64.ActiveCfg = Debug|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Debug|x64.Build.0 = Debug|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Debug|x86.ActiveCfg = Debug|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Debug|x86.Build.0 = Debug|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Release|Any CPU.Build.0 = Release|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Release|x64.ActiveCfg = Release|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Release|x64.Build.0 = Release|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Release|x86.ActiveCfg = Release|Any CPU + {A35677A8-170D-4933-B2C3-314A30920766}.Release|x86.Build.0 = Release|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Debug|x64.ActiveCfg = Debug|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Debug|x64.Build.0 = Debug|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Debug|x86.ActiveCfg = Debug|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Debug|x86.Build.0 = Debug|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Release|Any CPU.Build.0 = Release|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Release|x64.ActiveCfg = Release|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Release|x64.Build.0 = Release|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Release|x86.ActiveCfg = Release|Any CPU + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841}.Release|x86.Build.0 = Release|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Debug|x64.ActiveCfg = Debug|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Debug|x64.Build.0 = Debug|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Debug|x86.ActiveCfg = Debug|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Debug|x86.Build.0 = Debug|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Release|Any CPU.Build.0 = Release|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Release|x64.ActiveCfg = Release|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Release|x64.Build.0 = Release|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Release|x86.ActiveCfg = Release|Any CPU + {1E93E173-53B1-4441-97E0-C60A3FB029D7}.Release|x86.Build.0 = Release|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Debug|Any CPU.Build.0 = Debug|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Debug|x64.ActiveCfg = Debug|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Debug|x64.Build.0 = Debug|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Debug|x86.ActiveCfg = Debug|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Debug|x86.Build.0 = Debug|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Release|Any CPU.ActiveCfg = Release|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Release|Any CPU.Build.0 = Release|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Release|x64.ActiveCfg = Release|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Release|x64.Build.0 = Release|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Release|x86.ActiveCfg = Release|Any CPU + {92827ACC-284F-44EB-98A7-94B57BA92D27}.Release|x86.Build.0 = Release|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Debug|x64.ActiveCfg = Debug|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Debug|x64.Build.0 = Debug|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Debug|x86.ActiveCfg = Debug|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Debug|x86.Build.0 = Debug|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Release|Any CPU.Build.0 = Release|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Release|x64.ActiveCfg = Release|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Release|x64.Build.0 = Release|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Release|x86.ActiveCfg = Release|Any CPU + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797}.Release|x86.Build.0 = Release|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Debug|x64.ActiveCfg = Debug|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Debug|x64.Build.0 = Debug|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Debug|x86.ActiveCfg = Debug|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Debug|x86.Build.0 = Debug|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Release|Any CPU.Build.0 = Release|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Release|x64.ActiveCfg = Release|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Release|x64.Build.0 = Release|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Release|x86.ActiveCfg = Release|Any CPU + {45A163F4-2569-40D9-8FF6-854AF95C061E}.Release|x86.Build.0 = Release|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Debug|Any CPU.Build.0 = Debug|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Debug|x64.ActiveCfg = Debug|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Debug|x64.Build.0 = Debug|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Debug|x86.ActiveCfg = Debug|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Debug|x86.Build.0 = Debug|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Release|Any CPU.ActiveCfg = Release|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Release|Any CPU.Build.0 = Release|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Release|x64.ActiveCfg = Release|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Release|x64.Build.0 = Release|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Release|x86.ActiveCfg = Release|Any CPU + {17934A3D-6420-48F2-A528-E32A34F0FE55}.Release|x86.Build.0 = Release|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Debug|x64.ActiveCfg = Debug|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Debug|x64.Build.0 = Debug|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Debug|x86.ActiveCfg = Debug|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Debug|x86.Build.0 = Debug|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Release|Any CPU.Build.0 = Release|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Release|x64.ActiveCfg = Release|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Release|x64.Build.0 = Release|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Release|x86.ActiveCfg = Release|Any CPU + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D}.Release|x86.Build.0 = Release|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Debug|x64.ActiveCfg = Debug|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Debug|x64.Build.0 = Debug|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Debug|x86.ActiveCfg = Debug|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Debug|x86.Build.0 = Debug|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Release|Any CPU.Build.0 = Release|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Release|x64.ActiveCfg = Release|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Release|x64.Build.0 = Release|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Release|x86.ActiveCfg = Release|Any CPU + {D485A847-B3EA-40D8-A56A-459A02C902F8}.Release|x86.Build.0 = Release|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Debug|x64.ActiveCfg = Debug|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Debug|x64.Build.0 = Debug|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Debug|x86.ActiveCfg = Debug|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Debug|x86.Build.0 = Debug|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Release|Any CPU.Build.0 = Release|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Release|x64.ActiveCfg = Release|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Release|x64.Build.0 = Release|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Release|x86.ActiveCfg = Release|Any CPU + {0B716CE2-810A-4143-8434-4DB111E0F3E9}.Release|x86.Build.0 = Release|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Debug|x64.ActiveCfg = Debug|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Debug|x64.Build.0 = Debug|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Debug|x86.ActiveCfg = Debug|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Debug|x86.Build.0 = Debug|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Release|Any CPU.Build.0 = Release|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Release|x64.ActiveCfg = Release|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Release|x64.Build.0 = Release|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Release|x86.ActiveCfg = Release|Any CPU + {2A75CB97-ACF1-43B2-8509-E8226000D7DC}.Release|x86.Build.0 = Release|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Debug|x64.ActiveCfg = Debug|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Debug|x64.Build.0 = Debug|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Debug|x86.ActiveCfg = Debug|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Debug|x86.Build.0 = Debug|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Release|Any CPU.Build.0 = Release|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Release|x64.ActiveCfg = Release|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Release|x64.Build.0 = Release|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Release|x86.ActiveCfg = Release|Any CPU + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565}.Release|x86.Build.0 = Release|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Debug|x64.ActiveCfg = Debug|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Debug|x64.Build.0 = Debug|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Debug|x86.ActiveCfg = Debug|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Debug|x86.Build.0 = Debug|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Release|Any CPU.Build.0 = Release|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Release|x64.ActiveCfg = Release|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Release|x64.Build.0 = Release|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Release|x86.ActiveCfg = Release|Any CPU + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691}.Release|x86.Build.0 = Release|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Debug|x64.ActiveCfg = Debug|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Debug|x64.Build.0 = Debug|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Debug|x86.ActiveCfg = Debug|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Debug|x86.Build.0 = Debug|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Release|Any CPU.Build.0 = Release|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Release|x64.ActiveCfg = Release|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Release|x64.Build.0 = Release|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Release|x86.ActiveCfg = Release|Any CPU + {258A6D13-F02C-48C6-8506-2C815EBBD1D5}.Release|x86.Build.0 = Release|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Debug|Any CPU.Build.0 = Debug|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Debug|x64.ActiveCfg = Debug|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Debug|x64.Build.0 = Debug|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Debug|x86.ActiveCfg = Debug|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Debug|x86.Build.0 = Debug|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Release|Any CPU.ActiveCfg = Release|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Release|Any CPU.Build.0 = Release|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Release|x64.ActiveCfg = Release|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Release|x64.Build.0 = Release|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Release|x86.ActiveCfg = Release|Any CPU + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211}.Release|x86.Build.0 = Release|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Debug|x64.ActiveCfg = Debug|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Debug|x64.Build.0 = Debug|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Debug|x86.ActiveCfg = Debug|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Debug|x86.Build.0 = Debug|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Release|Any CPU.Build.0 = Release|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Release|x64.ActiveCfg = Release|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Release|x64.Build.0 = Release|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Release|x86.ActiveCfg = Release|Any CPU + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3}.Release|x86.Build.0 = Release|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Debug|x64.ActiveCfg = Debug|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Debug|x64.Build.0 = Debug|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Debug|x86.ActiveCfg = Debug|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Debug|x86.Build.0 = Debug|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Release|Any CPU.Build.0 = Release|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Release|x64.ActiveCfg = Release|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Release|x64.Build.0 = Release|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Release|x86.ActiveCfg = Release|Any CPU + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA}.Release|x86.Build.0 = Release|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Debug|x64.ActiveCfg = Debug|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Debug|x64.Build.0 = Debug|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Debug|x86.ActiveCfg = Debug|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Debug|x86.Build.0 = Debug|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Release|Any CPU.Build.0 = Release|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Release|x64.ActiveCfg = Release|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Release|x64.Build.0 = Release|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Release|x86.ActiveCfg = Release|Any CPU + {C4C2037E-B301-4449-96D6-C6B165752E1A}.Release|x86.Build.0 = Release|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Debug|x64.ActiveCfg = Debug|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Debug|x64.Build.0 = Debug|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Debug|x86.ActiveCfg = Debug|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Debug|x86.Build.0 = Debug|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Release|Any CPU.Build.0 = Release|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Release|x64.ActiveCfg = Release|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Release|x64.Build.0 = Release|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Release|x86.ActiveCfg = Release|Any CPU + {7B995CBB-3D20-4509-9300-EC012C18C4B4}.Release|x86.Build.0 = Release|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|x64.ActiveCfg = Debug|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|x64.Build.0 = Debug|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|x86.ActiveCfg = Debug|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Debug|x86.Build.0 = Debug|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|Any CPU.Build.0 = Release|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|x64.ActiveCfg = Release|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|x64.Build.0 = Release|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|x86.ActiveCfg = Release|Any CPU + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62}.Release|x86.Build.0 = Release|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Debug|x64.ActiveCfg = Debug|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Debug|x64.Build.0 = Debug|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Debug|x86.ActiveCfg = Debug|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Debug|x86.Build.0 = Debug|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Release|Any CPU.Build.0 = Release|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Release|x64.ActiveCfg = Release|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Release|x64.Build.0 = Release|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Release|x86.ActiveCfg = Release|Any CPU + {664A2577-6DA1-42DA-A213-3253017FA4BF}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {D93F34C2-5E5E-4CC7-A573-4376AA525838} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {841F3EF5-7EB6-4F76-8A37-0AAFEED0DE94} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {EEC52FA0-8E78-4FCB-9454-D697F58B2118} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {628700D6-97A5-4506-BC78-22E2A76C68E3} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {C926373D-5ACB-4E62-96D5-264EF4C61BE5} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {2D68125A-0ACD-4015-A8FA-B54284B8A3CB} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {7760219F-6C19-4B61-9015-73BB02005C0B} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {F87DFC58-EE3E-4E2F-9E17-E6A6924F2998} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {30056CC9-4D34-4C2E-B60D-6D9B12DF0DF4} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {77FF9993-C811-4389-8BFE-974B4F0AB7C6} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {4FB18D5B-8D48-4E50-9608-69890B3420F8} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {585DA0F6-BD2D-4CD9-8CEC-A0A4C9E3DCFE} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {BAB3573C-C17E-436E-B3D5-F6C0E0B2825B} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {257E2D7B-EA3D-4B33-9546-7B77DA20A517} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {5C9D617D-86B6-4CAA-9981-A438DBFE8BB6} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {C827F73E-68E2-4F6A-8CEA-0425B2D2D466} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {F67EECB0-7DCC-4643-82F3-E020D72BE762} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {2975AE79-F23D-43D6-B075-6659AB6AE105} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {667ACFE7-922E-4958-99D6-DD9D7BE8E744} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {D4824290-3F8A-47BD-A368-F63BE593546B} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {B69F3D80-EACD-4A1B-80A0-0B5D7AD941AF} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {33C98234-DF04-40CE-9459-2736AAB0CF6C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {A6D364F9-3478-4432-9EE1-F4F3DCF125EA} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {B9EDA23E-5754-48AF-8978-DBCBF75134BF} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {9208F373-EDD1-491D-AEF9-FE280B453CD9} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {EC5DE6F3-D158-4261-A4CD-AB81AE154918} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {40591EC3-23C6-4E74-8280-35153641FF21} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {5646F7F2-FEDF-49D1-9053-F8E1B7892695} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {3C877F0B-3870-452B-AA70-1F9960A4F062} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {5525AD40-01DA-46DD-B331-DD032DD3C9C0} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {D6D4DFB9-7ADC-4D10-9904-6A5AD97FFE77} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {296A426A-E429-4984-8813-AA7EEE3037D5} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {6F1AB15F-8875-4A62-A878-842D463A3B11} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {CEFF5CDF-63F2-4EE6-9B95-7DB3DC9474B2} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {8EFE438F-0513-470C-909B-8A1BD62D0E98} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {60712A8D-FF22-452C-8AC0-22DB33B38180} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {4097C3CB-7C39-478B-89C2-4D317625EBBF} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {935D16AC-8EBD-46E4-8D0E-934F3AE961D4} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {0BC8276D-D726-4C8B-AB2B-122BE18F1112} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {654CF4EE-9EC5-464C-AF47-EE37329CD46A} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {BEF6FA33-E0EA-4ED2-B209-833D41607132} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {B777945B-92DB-4D24-A795-5C900B6FCB92} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {67B08EB0-7140-49C5-9BFA-DEA1A3A06E6A} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {0EDACFF4-DD7B-4FBB-9774-21B909EA8D84} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {0E1CAB5C-649A-47B6-BFE3-E53B5F63B864} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {35DDC22F-F6C4-43A8-9E08-AD5E5CF2354B} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {F90FCF19-0426-4E62-93DC-835712E5B064} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {0E84E05F-53CE-4A6E-95F0-62EF14CBC385} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {7B55B3B3-BBD2-406B-AB7C-5FB6E29923E4} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {4DD7C512-5624-4C6B-B02A-7EDF58242657} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {98AA9471-2498-45BC-A58F-B83F4B9A8B75} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {7A3BB8C3-27CE-4FB0-996A-11C7A873AB40} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {253BAF8F-0CF8-4D1A-B5AA-F19713099173} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {57A423CD-4F40-4BAD-A6FC-93D494FCA51A} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {07034F70-3E4F-49BF-A181-75443D3B3361} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {E9F3F0B8-BEE8-4FE8-8B56-40129DA1F7B1} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {9165A6AB-140D-41BC-91BC-44523D1C9978} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {A35677A8-170D-4933-B2C3-314A30920766} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {E21AD10F-87B8-4C39-BE45-B6C44CE6D841} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {1E93E173-53B1-4441-97E0-C60A3FB029D7} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {92827ACC-284F-44EB-98A7-94B57BA92D27} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {1B017E4D-6F3E-42D4-9418-DA8D76BA2797} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {45A163F4-2569-40D9-8FF6-854AF95C061E} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {17934A3D-6420-48F2-A528-E32A34F0FE55} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {07DE99DB-F550-4F85-96F1-7EDC6B4CF86D} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {D485A847-B3EA-40D8-A56A-459A02C902F8} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {0B716CE2-810A-4143-8434-4DB111E0F3E9} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {2A75CB97-ACF1-43B2-8509-E8226000D7DC} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {3AEC19B5-44F6-4717-B1A0-3A2F04F42565} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {1BB5AE2D-2F7F-4CE9-B472-A113BF85B691} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {258A6D13-F02C-48C6-8506-2C815EBBD1D5} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {39F8D963-3D76-4BEC-BE7B-4AE9C9664211} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {470793D6-A847-41E3-A15D-8D0DFE7CD9A3} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {2EB876DE-E940-4A7E-8E3D-804E2E6314DA} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {C4C2037E-B301-4449-96D6-C6B165752E1A} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {7B995CBB-3D20-4509-9300-EC012C18C4B4} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {9006A5A2-01D8-4A70-AEA7-B7B1987C4A62} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {664A2577-6DA1-42DA-A213-3253017FA4BF} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Concelier.Connector.Acsc/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Acsc/AcscConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AcscConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/AcscConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AcscConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/AcscConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AcscConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/AcscConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AcscConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/AcscDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AcscDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/AcscDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AcscDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/AcscServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AcscServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/AcscServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/AcscServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Configuration/AcscFeedOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Configuration/AcscFeedOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Configuration/AcscFeedOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Configuration/AcscFeedOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Configuration/AcscOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Configuration/AcscOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Configuration/AcscOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Configuration/AcscOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Internal/AcscCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Internal/AcscCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Internal/AcscDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Internal/AcscDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Internal/AcscDocumentMetadata.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscDocumentMetadata.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Internal/AcscDocumentMetadata.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscDocumentMetadata.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Internal/AcscDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Internal/AcscDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscDto.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Internal/AcscFeedParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscFeedParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Internal/AcscFeedParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscFeedParser.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Internal/AcscMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Internal/AcscMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Internal/AcscMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc/README.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/README.md similarity index 95% rename from src/StellaOps.Concelier.Connector.Acsc/README.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/README.md index 469b2b06..430cbf8d 100644 --- a/src/StellaOps.Concelier.Connector.Acsc/README.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/README.md @@ -1,68 +1,68 @@ -## StellaOps.Concelier.Connector.Acsc - -Australian Cyber Security Centre (ACSC) connector that ingests RSS/Atom advisories, sanitises embedded HTML, and maps entries into canonical `Advisory` records for Concelier. - -### Configuration -Settings live under `concelier:sources:acsc` (see `AcscOptions`): - -| Setting | Description | Default | -| --- | --- | --- | -| `baseEndpoint` | Base URI for direct ACSC requests (trailing slash required). | `https://www.cyber.gov.au/` | -| `relayEndpoint` | Optional relay host to fall back to when Akamai refuses HTTP/2. | empty | -| `preferRelayByDefault` | Default endpoint preference when no cursor state exists. | `false` | -| `enableRelayFallback` | Allows automatic relay fallback when direct fetch fails. | `true` | -| `forceRelay` | Forces all fetches through the relay (skips direct attempts). | `false` | -| `feeds` | Array of feed descriptors (`slug`, `relativePath`, `enabled`). | alerts/advisories enabled | -| `requestTimeout` | Per-request timeout override. | 45 seconds | -| `failureBackoff` | Backoff window when fetch fails. | 5 minutes | -| `initialBackfill` | Sliding window used to seed published cursors. | 120 days | -| `userAgent` | Outbound `User-Agent` header. | `StellaOps/Concelier (+https://stella-ops.org)` | -| `requestVersion`/`versionPolicy` | HTTP version negotiation knobs. | HTTP/2 with downgrade | - -The dependency injection routine registers the connector plus scheduled jobs: - -| Job | Cron | Purpose | -| --- | --- | --- | -| `source:acsc:fetch` | `7,37 * * * *` | Fetch RSS/Atom feeds (direct + relay fallback). | -| `source:acsc:parse` | `12,42 * * * *` | Persist sanitised DTOs (`acsc.feed.v1`). | -| `source:acsc:map` | `17,47 * * * *` | Map DTO entries into canonical advisories. | -| `source:acsc:probe` | `25,55 * * * *` | Verify direct endpoint health and adjust cursor preference. | - -### Metrics -Emitted via `AcscDiagnostics` (`Meter` = `StellaOps.Concelier.Connector.Acsc`): - -| Instrument | Unit | Description | -| --- | --- | --- | -| `acsc.fetch.attempts` | operations | Feed fetch attempts (tags: `feed`, `mode`). | -| `acsc.fetch.success` | operations | Successful fetches. | -| `acsc.fetch.failures` | operations | Failed fetches before retry backoff. | -| `acsc.fetch.unchanged` | operations | 304 Not Modified responses. | -| `acsc.fetch.fallbacks` | operations | Relay fallbacks triggered (`reason` tag). | -| `acsc.cursor.published_updates` | feeds | Published cursor updates per feed slug. | -| `acsc.parse.attempts` | documents | Parse attempts per feed. | -| `acsc.parse.success` | documents | Successful RSS → DTO conversions. | -| `acsc.parse.failures` | documents | Parse failures (tags: `feed`, `reason`). | -| `acsc.map.success` | advisories | Advisories emitted from a mapping pass. | - -### Logging -Key log messages include: -- Fetch successes/failures, HTTP status codes, and relay fallbacks. -- Parse failures with reasons (download, schema, sanitisation). -- Mapping summaries showing advisory counts per document. -- Probe results toggling relay usage. - -Logs include feed slug metadata for troubleshooting parallel ingestion. - -### Tests & fixtures -`StellaOps.Concelier.Connector.Acsc.Tests` exercises the fetch→parse→map pipeline using canned RSS content. Deterministic snapshots live in `Acsc/Fixtures`. To refresh them after intentional behavioural changes: - -```bash -UPDATE_ACSC_FIXTURES=1 dotnet test src/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj -``` - -Remember to review the generated `.actual.json` files when assertions fail without fixture updates. - -### Operational notes -- Keep the relay endpoint allowlisted for air-gapped deployments; the probe job will automatically switch back to direct fetching when Akamai stabilises. -- Mapping currently emits vendor `affectedPackages` from “Systems/Products affected” fields; expand range primitives once structured version data appears in ACSC feeds. -- The connector is offline-friendly—no outbound calls beyond the configured feeds. +## StellaOps.Concelier.Connector.Acsc + +Australian Cyber Security Centre (ACSC) connector that ingests RSS/Atom advisories, sanitises embedded HTML, and maps entries into canonical `Advisory` records for Concelier. + +### Configuration +Settings live under `concelier:sources:acsc` (see `AcscOptions`): + +| Setting | Description | Default | +| --- | --- | --- | +| `baseEndpoint` | Base URI for direct ACSC requests (trailing slash required). | `https://www.cyber.gov.au/` | +| `relayEndpoint` | Optional relay host to fall back to when Akamai refuses HTTP/2. | empty | +| `preferRelayByDefault` | Default endpoint preference when no cursor state exists. | `false` | +| `enableRelayFallback` | Allows automatic relay fallback when direct fetch fails. | `true` | +| `forceRelay` | Forces all fetches through the relay (skips direct attempts). | `false` | +| `feeds` | Array of feed descriptors (`slug`, `relativePath`, `enabled`). | alerts/advisories enabled | +| `requestTimeout` | Per-request timeout override. | 45 seconds | +| `failureBackoff` | Backoff window when fetch fails. | 5 minutes | +| `initialBackfill` | Sliding window used to seed published cursors. | 120 days | +| `userAgent` | Outbound `User-Agent` header. | `StellaOps/Concelier (+https://stella-ops.org)` | +| `requestVersion`/`versionPolicy` | HTTP version negotiation knobs. | HTTP/2 with downgrade | + +The dependency injection routine registers the connector plus scheduled jobs: + +| Job | Cron | Purpose | +| --- | --- | --- | +| `source:acsc:fetch` | `7,37 * * * *` | Fetch RSS/Atom feeds (direct + relay fallback). | +| `source:acsc:parse` | `12,42 * * * *` | Persist sanitised DTOs (`acsc.feed.v1`). | +| `source:acsc:map` | `17,47 * * * *` | Map DTO entries into canonical advisories. | +| `source:acsc:probe` | `25,55 * * * *` | Verify direct endpoint health and adjust cursor preference. | + +### Metrics +Emitted via `AcscDiagnostics` (`Meter` = `StellaOps.Concelier.Connector.Acsc`): + +| Instrument | Unit | Description | +| --- | --- | --- | +| `acsc.fetch.attempts` | operations | Feed fetch attempts (tags: `feed`, `mode`). | +| `acsc.fetch.success` | operations | Successful fetches. | +| `acsc.fetch.failures` | operations | Failed fetches before retry backoff. | +| `acsc.fetch.unchanged` | operations | 304 Not Modified responses. | +| `acsc.fetch.fallbacks` | operations | Relay fallbacks triggered (`reason` tag). | +| `acsc.cursor.published_updates` | feeds | Published cursor updates per feed slug. | +| `acsc.parse.attempts` | documents | Parse attempts per feed. | +| `acsc.parse.success` | documents | Successful RSS → DTO conversions. | +| `acsc.parse.failures` | documents | Parse failures (tags: `feed`, `reason`). | +| `acsc.map.success` | advisories | Advisories emitted from a mapping pass. | + +### Logging +Key log messages include: +- Fetch successes/failures, HTTP status codes, and relay fallbacks. +- Parse failures with reasons (download, schema, sanitisation). +- Mapping summaries showing advisory counts per document. +- Probe results toggling relay usage. + +Logs include feed slug metadata for troubleshooting parallel ingestion. + +### Tests & fixtures +`StellaOps.Concelier.Connector.Acsc.Tests` exercises the fetch→parse→map pipeline using canned RSS content. Deterministic snapshots live in `Acsc/Fixtures`. To refresh them after intentional behavioural changes: + +```bash +UPDATE_ACSC_FIXTURES=1 dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj +``` + +Remember to review the generated `.actual.json` files when assertions fail without fixture updates. + +### Operational notes +- Keep the relay endpoint allowlisted for air-gapped deployments; the probe job will automatically switch back to direct fetching when Akamai stabilises. +- Mapping currently emits vendor `affectedPackages` from “Systems/Products affected” fields; expand range primitives once structured version data appears in ACSC feeds. +- The connector is offline-friendly—no outbound calls beyond the configured feeds. diff --git a/src/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj similarity index 81% rename from src/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj index cfae3203..0c33d873 100644 --- a/src/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj @@ -1,18 +1,18 @@ - - - - net10.0 - enable - enable - - - - + + + + + net10.0 + enable + enable + + + + - - + \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Acsc/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Acsc/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Cccs/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Cccs/CccsConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/CccsConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/CccsConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/CccsConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/CccsConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/CccsConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/CccsConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/CccsConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/CccsDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/CccsDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/CccsDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/CccsDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/CccsServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/CccsServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/CccsServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/CccsServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Configuration/CccsOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Configuration/CccsOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Configuration/CccsOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Configuration/CccsOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Internal/CccsAdvisoryDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsAdvisoryDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Internal/CccsAdvisoryDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsAdvisoryDto.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Internal/CccsCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Internal/CccsCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Internal/CccsDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Internal/CccsDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedClient.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedModels.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedModels.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedModels.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsFeedModels.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Internal/CccsHtmlParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsHtmlParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Internal/CccsHtmlParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsHtmlParser.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Internal/CccsMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Internal/CccsMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Internal/CccsRawAdvisoryDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsRawAdvisoryDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Internal/CccsRawAdvisoryDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Internal/CccsRawAdvisoryDocument.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs/StellaOps.Concelier.Connector.Cccs.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/StellaOps.Concelier.Connector.Cccs.csproj similarity index 79% rename from src/StellaOps.Concelier.Connector.Cccs/StellaOps.Concelier.Connector.Cccs.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/StellaOps.Concelier.Connector.Cccs.csproj index 45e65329..5bd20434 100644 --- a/src/StellaOps.Concelier.Connector.Cccs/StellaOps.Concelier.Connector.Cccs.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/StellaOps.Concelier.Connector.Cccs.csproj @@ -1,16 +1,17 @@ - - - - net10.0 - enable - enable - - + + + + + net10.0 + enable + enable + + - + - + \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Cccs/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cccs/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.CertBund/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.CertBund/CertBundConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/CertBundConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/CertBundConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/CertBundConnector.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/CertBundConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/CertBundConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/CertBundConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/CertBundConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/CertBundDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/CertBundDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/CertBundDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/CertBundDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/CertBundServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/CertBundServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/CertBundServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/CertBundServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Configuration/CertBundOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Configuration/CertBundOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Configuration/CertBundOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Configuration/CertBundOptions.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundAdvisoryDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundAdvisoryDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundAdvisoryDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundAdvisoryDto.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundCursor.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailParser.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailResponse.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailResponse.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailResponse.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDetailResponse.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDocumentMetadata.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDocumentMetadata.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDocumentMetadata.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundDocumentMetadata.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedClient.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedItem.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedItem.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedItem.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundFeedItem.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Internal/CertBundMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Internal/CertBundMapper.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund/README.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/README.md similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/README.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/README.md diff --git a/src/StellaOps.Concelier.Connector.CertBund/StellaOps.Concelier.Connector.CertBund.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/StellaOps.Concelier.Connector.CertBund.csproj similarity index 75% rename from src/StellaOps.Concelier.Connector.CertBund/StellaOps.Concelier.Connector.CertBund.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/StellaOps.Concelier.Connector.CertBund.csproj index 6ba1e732..bc57abd1 100644 --- a/src/StellaOps.Concelier.Connector.CertBund/StellaOps.Concelier.Connector.CertBund.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/StellaOps.Concelier.Connector.CertBund.csproj @@ -1,15 +1,16 @@ - - - - net10.0 - enable - enable - - + + + + + net10.0 + enable + enable + + - + - + \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.CertBund/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertBund/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.CertCc/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.CertCc/CertCcConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/CertCcConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/CertCcConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/CertCcConnector.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/CertCcConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/CertCcConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/CertCcConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/CertCcConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/CertCcDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/CertCcDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/CertCcDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/CertCcDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/CertCcServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/CertCcServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/CertCcServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/CertCcServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Configuration/CertCcOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Configuration/CertCcOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Configuration/CertCcOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Configuration/CertCcOptions.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md similarity index 88% rename from src/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md index 99374b57..54d329d5 100644 --- a/src/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-009_PLAN.md @@ -1,59 +1,59 @@ -# FEEDCONN-CERTCC-02-009 – VINCE Detail & Map Reintegration Plan - -- **Author:** BE-Conn-CERTCC (current on-call) -- **Date:** 2025-10-11 -- **Scope:** Restore VINCE detail parsing and canonical mapping in Concelier without destabilising downstream Merge/Export pipelines. - -## 1. Current State Snapshot (2025-10-11) - -- ✅ Fetch pipeline, VINCE summary planner, and detail queue are live; documents land with `DocumentStatuses.PendingParse`. -- ✅ DTO aggregate (`CertCcNoteDto`) plus mapper emit vendor-centric `normalizedVersions` (`scheme=certcc.vendor`) and provenance aligned with `src/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md`. -- ✅ Regression coverage exists for fetch/parse/map flows (`CertCcConnectorSnapshotTests`), but snapshot regeneration is gated on harness refresh (FEEDCONN-CERTCC-02-007) and QA handoff (FEEDCONN-CERTCC-02-008). -- ⚠️ Parse/map jobs are not scheduled; production still operates in fetch-only mode. -- ⚠️ Downstream Merge team is finalising normalized range ingestion per `src/FASTER_MODELING_AND_NORMALIZATION.md`; we must avoid publishing canonical records until they certify compatibility. - -## 2. Required Dependencies & Coordinated Tasks - -| Dependency | Owner(s) | Blocking Condition | Handshake | -|------------|----------|--------------------|-----------| -| FEEDCONN-CERTCC-02-004 (Canonical mapping & range primitives hardening) | BE-Conn-CERTCC + Models | Ensure mapper emits deterministic `normalizedVersions` array and provenance field masks | Daily sync with Models/Merge leads; share fixture diff before each enablement phase | -| FEEDCONN-CERTCC-02-007 (Connector test harness remediation) | BE-Conn-CERTCC, QA | Restore `AddSourceCommon` harness + canned VINCE fixtures so we can shadow-run parse/map | Required before Phase 1 | -| FEEDCONN-CERTCC-02-008 (Snapshot coverage handoff) | QA | Snapshot refresh process green to surface regressions | Required before Phase 2 | -| FEEDCONN-CERTCC-02-010 (Partial-detail graceful degradation) | BE-Conn-CERTCC | Resiliency for missing VINCE endpoints to avoid job wedging after reintegration | Should land before Phase 2 cutover | - -## 3. Phased Rollout Plan - -| Phase | Window (UTC) | Actions | Success Signals | Rollback | -|-------|--------------|---------|-----------------|----------| -| **0 – Pre-flight validation** | 2025-10-11 → 2025-10-12 | • Finish FEEDCONN-CERTCC-02-007 harness fixes and regenerate fixtures.
• Run `dotnet test src/StellaOps.Concelier.Connector.CertCc.Tests` with `UPDATE_CERTCC_FIXTURES=0` to confirm deterministic baselines.
• Generate sample advisory batch (`dotnet test … --filter SnapshotSmoke`) and deliver JSON diff to Merge for schema verification (`normalizedVersions[].scheme == certcc.vendor`, provenance masks populated). | • Harness tests green locally and in CI.
• Merge sign-off that sample advisories conform to `FASTER_MODELING_AND_NORMALIZATION.md`. | N/A (no production enablement yet). | -| **1 – Shadow parse/map in staging** | Target start 2025-10-13 | • Register `source:cert-cc:parse` and `source:cert-cc:map` jobs, but gate them behind new config flag `concelier:sources:cert-cc:enableDetailMapping` (default `false`).
• Deploy (restart required for options rebinding), enable flag, and point connector at staging Mongo with isolated collection (`advisories_certcc_shadow`).
• Run connector for ≥2 cycles; compare advisory counts vs. fetch-only baseline and validate `concelier.range.primitives` metrics include `scheme=certcc.vendor`. | • No uncaught exceptions in staging logs.
• Shadow advisories match expected vendor counts (±5%).
• `certcc.summary.fetch.*` + new `certcc.map.duration.ms` metrics stable. | Disable flag; staging returns to fetch-only. No production impact. | -| **2 – Controlled production enablement** | Target start 2025-10-14 | • Redeploy production with flag enabled, start with job concurrency `1`, and reduce `MaxNotesPerFetch` to 5 for first 24 h.
• Observe metrics dashboards hourly (fetch/map latency, pending queues, Mongo write throughput).
• QA to replay latest snapshots and confirm no deterministic drift.
• Publish advisory sample (top 10 changed docs) to Merge Slack channel for validation. | • Pending parse/mapping queues drain within expected SLA (<30 min).
• No increase in merge dedupe anomalies.
• Mongo writes stay within 10% of baseline. | Toggle flag off, re-run fetch-only. Clear `pendingMappings` via connector cursor reset if stuck. | -| **3 – Full production & cleanup** | Target start 2025-10-15 | • Restore `MaxNotesPerFetch` to configured default (20).
• Remove temporary throttles and leave flag enabled by default.
• Update `README.md` rollout notes; close FEEDCONN-CERTCC-02-009.
• Kick off post-merge audit with Merge to ensure new advisories dedupe with other sources. | • Stable operations for ≥48 h, no degradation alerts.
• Merge confirms conflict resolver behaviour unchanged. | If regression detected, revert to Phase 2 state or disable jobs; retain plan for reuse. | - -## 4. Monitoring & Validation Checklist - -- Dashboards: `certcc.*` meters (plan, summary fetch, detail fetch) plus `concelier.range.primitives` with tag `scheme=certcc.vendor`. -- Logs: ensure Parse/Map jobs emit `correlationId` aligned with fetch events for traceability. -- Data QA: run `tools/dump_advisory` against two VINCE notes (one multi-vendor, one single-vendor) every phase to spot-check normalized versions ordering and provenance. -- Storage: verify Mongo TTL/size for `raw_documents` and `dtos`—detail payload volume increases by ~3× when mapping resumes. - -## 5. Rollback / Contingency Playbook - -1. Disable `concelier:sources:cert-cc:enableDetailMapping` flag (and optionally set `MaxNotesPerFetch=0` for a single cycle) to halt new detail ingestion. -2. Run connector once to update cursor; verify `pendingMappings` drains. -3. If advisories already persisted, coordinate with Merge to soft-delete affected `certcc/*` advisories by advisory key hash (no schema rollback required). -4. Re-run Phase 1 shadow validation before retrying. - -## 6. Communication Cadence - -- Daily check-in with Models/Merge leads (09:30 EDT) to surface normalizedVersions/provenance diffs. -- Post-phase reports in `#concelier-certcc` Slack channel summarising metrics, advisory counts, and outstanding issues. -- Escalate blockers >12 h via Runbook SEV-3 path and annotate `TASKS.md`. - -## 7. Open Questions / Next Actions - -- [ ] Confirm whether Merge requires additional provenance field masks before Phase 2 (waiting on feedback from 2025-10-11 sample). -- [ ] Decide if CSAF endpoint ingestion (optional) should piggyback on Phase 3 or stay deferred. -- [ ] Validate that FEEDCONN-CERTCC-02-010 coverage handles mixed 200/404 VINCE endpoints during partial outages. - -Once Dependencies (Section 2) are cleared and Phase 3 completes, update `src/StellaOps.Concelier.Connector.CertCc/TASKS.md` and close FEEDCONN-CERTCC-02-009. +# FEEDCONN-CERTCC-02-009 – VINCE Detail & Map Reintegration Plan + +- **Author:** BE-Conn-CERTCC (current on-call) +- **Date:** 2025-10-11 +- **Scope:** Restore VINCE detail parsing and canonical mapping in Concelier without destabilising downstream Merge/Export pipelines. + +## 1. Current State Snapshot (2025-10-11) + +- ✅ Fetch pipeline, VINCE summary planner, and detail queue are live; documents land with `DocumentStatuses.PendingParse`. +- ✅ DTO aggregate (`CertCcNoteDto`) plus mapper emit vendor-centric `normalizedVersions` (`scheme=certcc.vendor`) and provenance aligned with `src/Concelier/__Libraries/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md`. +- ✅ Regression coverage exists for fetch/parse/map flows (`CertCcConnectorSnapshotTests`), but snapshot regeneration is gated on harness refresh (FEEDCONN-CERTCC-02-007) and QA handoff (FEEDCONN-CERTCC-02-008). +- ⚠️ Parse/map jobs are not scheduled; production still operates in fetch-only mode. +- ⚠️ Downstream Merge team is finalising normalized range ingestion per `src/FASTER_MODELING_AND_NORMALIZATION.md`; we must avoid publishing canonical records until they certify compatibility. + +## 2. Required Dependencies & Coordinated Tasks + +| Dependency | Owner(s) | Blocking Condition | Handshake | +|------------|----------|--------------------|-----------| +| FEEDCONN-CERTCC-02-004 (Canonical mapping & range primitives hardening) | BE-Conn-CERTCC + Models | Ensure mapper emits deterministic `normalizedVersions` array and provenance field masks | Daily sync with Models/Merge leads; share fixture diff before each enablement phase | +| FEEDCONN-CERTCC-02-007 (Connector test harness remediation) | BE-Conn-CERTCC, QA | Restore `AddSourceCommon` harness + canned VINCE fixtures so we can shadow-run parse/map | Required before Phase 1 | +| FEEDCONN-CERTCC-02-008 (Snapshot coverage handoff) | QA | Snapshot refresh process green to surface regressions | Required before Phase 2 | +| FEEDCONN-CERTCC-02-010 (Partial-detail graceful degradation) | BE-Conn-CERTCC | Resiliency for missing VINCE endpoints to avoid job wedging after reintegration | Should land before Phase 2 cutover | + +## 3. Phased Rollout Plan + +| Phase | Window (UTC) | Actions | Success Signals | Rollback | +|-------|--------------|---------|-----------------|----------| +| **0 – Pre-flight validation** | 2025-10-11 → 2025-10-12 | • Finish FEEDCONN-CERTCC-02-007 harness fixes and regenerate fixtures.
• Run `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests` with `UPDATE_CERTCC_FIXTURES=0` to confirm deterministic baselines.
• Generate sample advisory batch (`dotnet test … --filter SnapshotSmoke`) and deliver JSON diff to Merge for schema verification (`normalizedVersions[].scheme == certcc.vendor`, provenance masks populated). | • Harness tests green locally and in CI.
• Merge sign-off that sample advisories conform to `FASTER_MODELING_AND_NORMALIZATION.md`. | N/A (no production enablement yet). | +| **1 – Shadow parse/map in staging** | Target start 2025-10-13 | • Register `source:cert-cc:parse` and `source:cert-cc:map` jobs, but gate them behind new config flag `concelier:sources:cert-cc:enableDetailMapping` (default `false`).
• Deploy (restart required for options rebinding), enable flag, and point connector at staging Mongo with isolated collection (`advisories_certcc_shadow`).
• Run connector for ≥2 cycles; compare advisory counts vs. fetch-only baseline and validate `concelier.range.primitives` metrics include `scheme=certcc.vendor`. | • No uncaught exceptions in staging logs.
• Shadow advisories match expected vendor counts (±5%).
• `certcc.summary.fetch.*` + new `certcc.map.duration.ms` metrics stable. | Disable flag; staging returns to fetch-only. No production impact. | +| **2 – Controlled production enablement** | Target start 2025-10-14 | • Redeploy production with flag enabled, start with job concurrency `1`, and reduce `MaxNotesPerFetch` to 5 for first 24 h.
• Observe metrics dashboards hourly (fetch/map latency, pending queues, Mongo write throughput).
• QA to replay latest snapshots and confirm no deterministic drift.
• Publish advisory sample (top 10 changed docs) to Merge Slack channel for validation. | • Pending parse/mapping queues drain within expected SLA (<30 min).
• No increase in merge dedupe anomalies.
• Mongo writes stay within 10% of baseline. | Toggle flag off, re-run fetch-only. Clear `pendingMappings` via connector cursor reset if stuck. | +| **3 – Full production & cleanup** | Target start 2025-10-15 | • Restore `MaxNotesPerFetch` to configured default (20).
• Remove temporary throttles and leave flag enabled by default.
• Update `README.md` rollout notes; close FEEDCONN-CERTCC-02-009.
• Kick off post-merge audit with Merge to ensure new advisories dedupe with other sources. | • Stable operations for ≥48 h, no degradation alerts.
• Merge confirms conflict resolver behaviour unchanged. | If regression detected, revert to Phase 2 state or disable jobs; retain plan for reuse. | + +## 4. Monitoring & Validation Checklist + +- Dashboards: `certcc.*` meters (plan, summary fetch, detail fetch) plus `concelier.range.primitives` with tag `scheme=certcc.vendor`. +- Logs: ensure Parse/Map jobs emit `correlationId` aligned with fetch events for traceability. +- Data QA: run `tools/dump_advisory` against two VINCE notes (one multi-vendor, one single-vendor) every phase to spot-check normalized versions ordering and provenance. +- Storage: verify Mongo TTL/size for `raw_documents` and `dtos`—detail payload volume increases by ~3× when mapping resumes. + +## 5. Rollback / Contingency Playbook + +1. Disable `concelier:sources:cert-cc:enableDetailMapping` flag (and optionally set `MaxNotesPerFetch=0` for a single cycle) to halt new detail ingestion. +2. Run connector once to update cursor; verify `pendingMappings` drains. +3. If advisories already persisted, coordinate with Merge to soft-delete affected `certcc/*` advisories by advisory key hash (no schema rollback required). +4. Re-run Phase 1 shadow validation before retrying. + +## 6. Communication Cadence + +- Daily check-in with Models/Merge leads (09:30 EDT) to surface normalizedVersions/provenance diffs. +- Post-phase reports in `#concelier-certcc` Slack channel summarising metrics, advisory counts, and outstanding issues. +- Escalate blockers >12 h via Runbook SEV-3 path and annotate `TASKS.md`. + +## 7. Open Questions / Next Actions + +- [ ] Confirm whether Merge requires additional provenance field masks before Phase 2 (waiting on feedback from 2025-10-11 sample). +- [ ] Decide if CSAF endpoint ingestion (optional) should piggyback on Phase 3 or stay deferred. +- [ ] Validate that FEEDCONN-CERTCC-02-010 coverage handles mixed 200/404 VINCE endpoints during partial outages. + +Once Dependencies (Section 2) are cleared and Phase 3 completes, update `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.CertCc/TASKS.md` and close FEEDCONN-CERTCC-02-009. diff --git a/src/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-012_HANDOFF.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-012_HANDOFF.md similarity index 58% rename from src/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-012_HANDOFF.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-012_HANDOFF.md index c710b360..a452e113 100644 --- a/src/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-012_HANDOFF.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/FEEDCONN-CERTCC-02-012_HANDOFF.md @@ -1,20 +1,20 @@ -# FEEDCONN-CERTCC-02-012 – Schema Sync & Snapshot Regeneration - -## Summary -- Re-ran `StellaOps.Concelier.Connector.CertCc.Tests` with `UPDATE_CERTCC_FIXTURES=1`; fixtures now capture SemVer-style normalized versions (`scheme=certcc.vendor`) and `provenance.decisionReason` values emitted by the mapper. -- Recorded HTTP request ordering is persisted in `certcc-requests.snapshot.json` to keep Merge aware of the deterministic fetch plan. -- Advisories snapshot (`certcc-advisories.snapshot.json`) reflects the dual-write storage changes (normalized versions + provenance) introduced by FEEDMODELS-SCHEMA-* and FEEDSTORAGE-DATA-*. - -## Artifacts -- `src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-advisories.snapshot.json` -- `src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-documents.snapshot.json` -- `src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-requests.snapshot.json` -- `src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-state.snapshot.json` - -## Validation steps -```bash -dotnet test src/StellaOps.Concelier.Connector.CertCc.Tests -UPDATE_CERTCC_FIXTURES=1 dotnet test src/StellaOps.Concelier.Connector.CertCc.Tests -``` - -The first command verifies deterministic behavior; the second regenerates fixtures if a future schema change occurs. Share the four snapshot files above with Merge for their backfill diff. +# FEEDCONN-CERTCC-02-012 – Schema Sync & Snapshot Regeneration + +## Summary +- Re-ran `StellaOps.Concelier.Connector.CertCc.Tests` with `UPDATE_CERTCC_FIXTURES=1`; fixtures now capture SemVer-style normalized versions (`scheme=certcc.vendor`) and `provenance.decisionReason` values emitted by the mapper. +- Recorded HTTP request ordering is persisted in `certcc-requests.snapshot.json` to keep Merge aware of the deterministic fetch plan. +- Advisories snapshot (`certcc-advisories.snapshot.json`) reflects the dual-write storage changes (normalized versions + provenance) introduced by FEEDMODELS-SCHEMA-* and FEEDSTORAGE-DATA-*. + +## Artifacts +- `src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-advisories.snapshot.json` +- `src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-documents.snapshot.json` +- `src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-requests.snapshot.json` +- `src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-state.snapshot.json` + +## Validation steps +```bash +dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests +UPDATE_CERTCC_FIXTURES=1 dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests +``` + +The first command verifies deterministic behavior; the second regenerates fixtures if a future schema change occurs. Share the four snapshot files above with Merge for their backfill diff. diff --git a/src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcCursor.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcMapper.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteDto.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcNoteParser.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryParser.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlan.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlan.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlan.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlan.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlanner.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlanner.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlanner.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcSummaryPlanner.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcVendorStatementParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcVendorStatementParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Internal/CertCcVendorStatementParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Internal/CertCcVendorStatementParser.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc/README.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/README.md similarity index 91% rename from src/StellaOps.Concelier.Connector.CertCc/README.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/README.md index 80b15ca4..8f77df90 100644 --- a/src/StellaOps.Concelier.Connector.CertCc/README.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/README.md @@ -1,63 +1,63 @@ -# CERT/CC Vulnerability Notes – Source Research - -## Canonical publication endpoints - -- **Public portal** – `https://www.kb.cert.org/vuls/` lists recently published Vulnerability Notes and exposes a “Subscribe to our feed” link for automation entry points.citeturn0search0 -- **Atom feed** – `https://www.kb.cert.org/vulfeed` returns an Atom 1.0 feed of the same notes (``, `<updated>`, `<summary>` HTML payload). Feed metadata advertises `rel="self"` at `https://kb.cert.org/vuls/atomfeed/`. Use conditional GET headers (`If-Modified-Since`, `If-None-Match`) to avoid refetching unchanged entries.citeturn0search2 - -## VINCE Vulnerability Note API - -The VINCE documentation describes an unauthenticated REST-style API for structured retrieval:citeturn1view0 - -| Endpoint | Payload | Notes | -| --- | --- | --- | -| `GET /vuls/api/{id}/` | Canonical note metadata (title, overview, markdown segments, timestamps, aliases). | Use numeric ID (e.g., `257161`). | -| `GET /vuls/api/{id}/vuls/` | Per-CVE vulnerability records tied to the note. | Includes CVE, description, timestamps. | -| `GET /vuls/api/{id}/vendors/` | Vendor statements per advisory. | Provides status text and optional references. | -| `GET /vuls/api/{id}/vendors/vuls/` | Vendor × vulnerability status matrix. | “known_affected” vs “known_not_affected” semantics. | -| `GET /vuls/api/vuls/cve/{cve}/` | Reverse lookup by CVE. | Returns combined note + vendor context. | -| `GET /vuls/api/{year}/summary/` | Annual summary listing (`count`, `notes[]`). | Year-month variants exist (`/{year}/{month}/summary/`). | -| `GET /vuls/api/{id}/csaf/` | CSAF 2.0 export generated by VINCE. | Useful for downstream CSAF tooling. | - -Operational considerations: - -- API responses are JSON (UTF-8) and publicly accessible; no authentication tokens or cookies are required.citeturn1view0 -- Monthly and annual summary endpoints enable incremental crawling without diffing the Atom feed. -- Expect high-volume notes to expose dozens of vendor records—prepare batching and pagination at the connector layer even though the API returns full arrays today. -- Apply polite backoff: the documentation does not publish explicit rate limits, but the kb.cert.org infrastructure throttles bursts; mirror existing backoff strategy (exponential with jitter) used by other connectors. -- Detail fetch tolerates missing optional endpoints (`vendors`, `vendors-vuls`, `vuls`) by logging a warning and continuing with partial data; repeated 4xx responses will not wedge the cursor. - -## Telemetry & monitoring - -The connector exposes an OpenTelemetry meter named `StellaOps.Concelier.Connector.CertCc`. Key instruments include: - -- Planning: `certcc.plan.windows`, `certcc.plan.requests`, and `certcc.plan.window_days`. -- Summary fetch: `certcc.summary.fetch.attempts`, `.success`, `.not_modified`, `.failures`. -- Detail fetch: `certcc.detail.fetch.attempts`, `.success`, `.unchanged`, `.missing`, `.failures` with an `endpoint` dimension (note/vendors/vuls/vendors-vuls). -- Parsing: `certcc.parse.success`, `.failures`, plus histograms for vendor/status/vulnerability counts. -- Mapping: `certcc.map.success`, `.failures`, and histograms `certcc.map.affected.count` / `certcc.map.normalized_versions.count`. - -Structured logs surface correlation IDs across fetch, parse, and map stages. Failures emit warnings for tolerated missing endpoints and errors for retry-worthy conditions so operators can hook them into existing alert policies. - -## Historical data sets - -CERT/CC publishes a Vulnerability Data Archive (JSON exports plus tooling) for deep history or backfills. The archive is hosted on the SEI site with mirrored GitHub repositories containing normalized JSON conversions.citeturn0search3turn0search4 - -## Snapshot regression workflow - -The connector ships deterministic fixtures so QA and Merge teams can replay fetch→parse→map without live calls. Use the following flow when validating changes or refreshing snapshots: - -1. `dotnet test src/StellaOps.Concelier.Connector.CertCc.Tests` – runs the connector snapshot suite against canned VINCE responses. -2. `UPDATE_CERTCC_FIXTURES=1 dotnet test src/StellaOps.Concelier.Connector.CertCc.Tests` – regenerates fixtures under `src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/*.snapshot.json` and mirrors them in the test output directory (`bin/Debug/net10.0/Source/CertCc/Fixtures`). - - The harness now records every HTTP request; `certcc-requests.snapshot.json` must list summaries/months in canonical order. - - Expect `certcc-advisories.snapshot.json` to include normalized versions (`scheme=certcc.vendor`) and provenance decision reasons. -3. Review diffs and attach `certcc-*.snapshot.json` plus test logs when handing off to Merge. - -Fixtures are sorted and timestamps normalized to UTC ISO‑8601 to preserve determinism across machines. - -## Next steps for the connector - -1. Implement Atom polling for quick detection, with VINCE API lookups for structured details. `CertCcSummaryPlanner` already computes the VINCE year/month summary URIs to fetch per window; wire this into the fetch job and persist the resulting `TimeWindowCursorState`. -2. Persist `updated` timestamps and VINCE `revision` counters to drive resume logic. -3. Capture vendor statements/CSAF exports to populate range primitives once model hooks exist. -4. Evaluate using the data archive for seed fixtures covering legacy notes (pre-2010).*** +# CERT/CC Vulnerability Notes – Source Research + +## Canonical publication endpoints + +- **Public portal** – `https://www.kb.cert.org/vuls/` lists recently published Vulnerability Notes and exposes a “Subscribe to our feed” link for automation entry points.citeturn0search0 +- **Atom feed** – `https://www.kb.cert.org/vulfeed` returns an Atom 1.0 feed of the same notes (`<title>`, `<updated>`, `<summary>` HTML payload). Feed metadata advertises `rel="self"` at `https://kb.cert.org/vuls/atomfeed/`. Use conditional GET headers (`If-Modified-Since`, `If-None-Match`) to avoid refetching unchanged entries.citeturn0search2 + +## VINCE Vulnerability Note API + +The VINCE documentation describes an unauthenticated REST-style API for structured retrieval:citeturn1view0 + +| Endpoint | Payload | Notes | +| --- | --- | --- | +| `GET /vuls/api/{id}/` | Canonical note metadata (title, overview, markdown segments, timestamps, aliases). | Use numeric ID (e.g., `257161`). | +| `GET /vuls/api/{id}/vuls/` | Per-CVE vulnerability records tied to the note. | Includes CVE, description, timestamps. | +| `GET /vuls/api/{id}/vendors/` | Vendor statements per advisory. | Provides status text and optional references. | +| `GET /vuls/api/{id}/vendors/vuls/` | Vendor × vulnerability status matrix. | “known_affected” vs “known_not_affected” semantics. | +| `GET /vuls/api/vuls/cve/{cve}/` | Reverse lookup by CVE. | Returns combined note + vendor context. | +| `GET /vuls/api/{year}/summary/` | Annual summary listing (`count`, `notes[]`). | Year-month variants exist (`/{year}/{month}/summary/`). | +| `GET /vuls/api/{id}/csaf/` | CSAF 2.0 export generated by VINCE. | Useful for downstream CSAF tooling. | + +Operational considerations: + +- API responses are JSON (UTF-8) and publicly accessible; no authentication tokens or cookies are required.citeturn1view0 +- Monthly and annual summary endpoints enable incremental crawling without diffing the Atom feed. +- Expect high-volume notes to expose dozens of vendor records—prepare batching and pagination at the connector layer even though the API returns full arrays today. +- Apply polite backoff: the documentation does not publish explicit rate limits, but the kb.cert.org infrastructure throttles bursts; mirror existing backoff strategy (exponential with jitter) used by other connectors. +- Detail fetch tolerates missing optional endpoints (`vendors`, `vendors-vuls`, `vuls`) by logging a warning and continuing with partial data; repeated 4xx responses will not wedge the cursor. + +## Telemetry & monitoring + +The connector exposes an OpenTelemetry meter named `StellaOps.Concelier.Connector.CertCc`. Key instruments include: + +- Planning: `certcc.plan.windows`, `certcc.plan.requests`, and `certcc.plan.window_days`. +- Summary fetch: `certcc.summary.fetch.attempts`, `.success`, `.not_modified`, `.failures`. +- Detail fetch: `certcc.detail.fetch.attempts`, `.success`, `.unchanged`, `.missing`, `.failures` with an `endpoint` dimension (note/vendors/vuls/vendors-vuls). +- Parsing: `certcc.parse.success`, `.failures`, plus histograms for vendor/status/vulnerability counts. +- Mapping: `certcc.map.success`, `.failures`, and histograms `certcc.map.affected.count` / `certcc.map.normalized_versions.count`. + +Structured logs surface correlation IDs across fetch, parse, and map stages. Failures emit warnings for tolerated missing endpoints and errors for retry-worthy conditions so operators can hook them into existing alert policies. + +## Historical data sets + +CERT/CC publishes a Vulnerability Data Archive (JSON exports plus tooling) for deep history or backfills. The archive is hosted on the SEI site with mirrored GitHub repositories containing normalized JSON conversions.citeturn0search3turn0search4 + +## Snapshot regression workflow + +The connector ships deterministic fixtures so QA and Merge teams can replay fetch→parse→map without live calls. Use the following flow when validating changes or refreshing snapshots: + +1. `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests` – runs the connector snapshot suite against canned VINCE responses. +2. `UPDATE_CERTCC_FIXTURES=1 dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests` – regenerates fixtures under `src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/*.snapshot.json` and mirrors them in the test output directory (`bin/Debug/net10.0/Source/CertCc/Fixtures`). + - The harness now records every HTTP request; `certcc-requests.snapshot.json` must list summaries/months in canonical order. + - Expect `certcc-advisories.snapshot.json` to include normalized versions (`scheme=certcc.vendor`) and provenance decision reasons. +3. Review diffs and attach `certcc-*.snapshot.json` plus test logs when handing off to Merge. + +Fixtures are sorted and timestamps normalized to UTC ISO‑8601 to preserve determinism across machines. + +## Next steps for the connector + +1. Implement Atom polling for quick detection, with VINCE API lookups for structured details. `CertCcSummaryPlanner` already computes the VINCE year/month summary URIs to fetch per window; wire this into the fetch job and persist the resulting `TimeWindowCursorState`. +2. Persist `updated` timestamps and VINCE `revision` counters to drive resume logic. +3. Capture vendor statements/CSAF exports to populate range primitives once model hooks exist. +4. Evaluate using the data archive for seed fixtures covering legacy notes (pre-2010).*** diff --git a/src/StellaOps.Concelier.Connector.CertCc/StellaOps.Concelier.Connector.CertCc.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/StellaOps.Concelier.Connector.CertCc.csproj similarity index 80% rename from src/StellaOps.Concelier.Connector.CertCc/StellaOps.Concelier.Connector.CertCc.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/StellaOps.Concelier.Connector.CertCc.csproj index 860f5b2a..83b1af1d 100644 --- a/src/StellaOps.Concelier.Connector.CertCc/StellaOps.Concelier.Connector.CertCc.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/StellaOps.Concelier.Connector.CertCc.csproj @@ -1,18 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> </PropertyGroup> <ItemGroup> <PackageReference Include="Markdig" Version="0.31.0" /> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.CertCc/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertCc/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.CertFr/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.CertFr/CertFrConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/CertFrConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/CertFrConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/CertFrConnector.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/CertFrConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/CertFrConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/CertFrConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/CertFrConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/CertFrDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/CertFrDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/CertFrDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/CertFrDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/CertFrServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/CertFrServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/CertFrServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/CertFrServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/Configuration/CertFrOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Configuration/CertFrOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/Configuration/CertFrOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Configuration/CertFrOptions.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrCursor.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDocumentMetadata.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDocumentMetadata.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDocumentMetadata.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDocumentMetadata.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrDto.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedClient.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedItem.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedItem.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedItem.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrFeedItem.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrMapper.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/Internal/CertFrParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Internal/CertFrParser.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj similarity index 79% rename from src/StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj index cf97d924..8407dcfb 100644 --- a/src/StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj @@ -1,13 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.CertFr/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertFr/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.CertIn/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.CertIn/CertInConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/CertInConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/CertInConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/CertInConnector.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/CertInConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/CertInConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/CertInConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/CertInConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/CertInDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/CertInDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/CertInDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/CertInDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/CertInServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/CertInServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/CertInServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/CertInServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/Configuration/CertInOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Configuration/CertInOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/Configuration/CertInOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Configuration/CertInOptions.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/Internal/CertInAdvisoryDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInAdvisoryDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/Internal/CertInAdvisoryDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInAdvisoryDto.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/Internal/CertInClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/Internal/CertInClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInClient.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/Internal/CertInCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/Internal/CertInCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInCursor.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/Internal/CertInDetailParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInDetailParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/Internal/CertInDetailParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInDetailParser.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/Internal/CertInListingItem.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInListingItem.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/Internal/CertInListingItem.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Internal/CertInListingItem.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj similarity index 79% rename from src/StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj index 44c74bcc..5bd20434 100644 --- a/src/StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj @@ -1,16 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.CertIn/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.CertIn/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Common/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Common/Cursors/PaginationPlanner.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Cursors/PaginationPlanner.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Cursors/PaginationPlanner.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Cursors/PaginationPlanner.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorPlanner.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorPlanner.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorPlanner.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorPlanner.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorState.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorState.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorState.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Cursors/TimeWindowCursorState.cs diff --git a/src/StellaOps.Concelier.Connector.Common/DocumentStatuses.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/DocumentStatuses.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/DocumentStatuses.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/DocumentStatuses.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Fetch/CryptoJitterSource.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/CryptoJitterSource.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Fetch/CryptoJitterSource.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/CryptoJitterSource.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Fetch/IJitterSource.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/IJitterSource.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Fetch/IJitterSource.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/IJitterSource.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Fetch/RawDocumentStorage.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/RawDocumentStorage.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Fetch/RawDocumentStorage.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/RawDocumentStorage.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchContentResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchContentResult.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchContentResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchContentResult.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchRequest.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchRequest.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchRequest.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchRequest.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchResult.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchResult.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchService.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceFetchService.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Fetch/SourceRetryPolicy.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceRetryPolicy.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Fetch/SourceRetryPolicy.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Fetch/SourceRetryPolicy.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Html/HtmlContentSanitizer.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Html/HtmlContentSanitizer.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Html/HtmlContentSanitizer.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Html/HtmlContentSanitizer.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Http/AllowlistedHttpMessageHandler.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Http/AllowlistedHttpMessageHandler.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Http/AllowlistedHttpMessageHandler.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Http/AllowlistedHttpMessageHandler.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Http/ServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Http/ServiceCollectionExtensions.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.Common/Http/ServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Http/ServiceCollectionExtensions.cs index 51f4f0da..19ee84ee 100644 --- a/src/StellaOps.Concelier.Connector.Common/Http/ServiceCollectionExtensions.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Http/ServiceCollectionExtensions.cs @@ -1,206 +1,206 @@ -using System.Net; -using System.Net.Http; -using System.Net.Security; -using System.Security.Cryptography.X509Certificates; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Options; -using StellaOps.Concelier.Connector.Common.Xml; -using StellaOps.Concelier.Core.Aoc; -using StellaOps.Concelier.Core.Linksets; - -namespace StellaOps.Concelier.Connector.Common.Http; - -public static class ServiceCollectionExtensions -{ - /// <summary> - /// Registers a named HTTP client configured for a source connector with allowlisted hosts and sensible defaults. - /// </summary> - public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<SourceHttpClientOptions> configure) - => services.AddSourceHttpClient(name, (_, options) => configure(options)); - - public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<IServiceProvider, SourceHttpClientOptions> configure) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentException.ThrowIfNullOrEmpty(name); - ArgumentNullException.ThrowIfNull(configure); - - services.AddOptions<SourceHttpClientOptions>(name).Configure<IServiceProvider>((options, sp) => - { - configure(sp, options); - SourceHttpClientConfigurationBinder.Apply(sp, name, options); - }); - - return services - .AddHttpClient(name) - .ConfigureHttpClient((sp, client) => - { - var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name); - - if (options.BaseAddress is not null) - { - client.BaseAddress = options.BaseAddress; - } - - client.Timeout = options.Timeout; - client.DefaultRequestHeaders.UserAgent.Clear(); - client.DefaultRequestHeaders.UserAgent.ParseAdd(options.UserAgent); - client.DefaultRequestVersion = options.RequestVersion; - client.DefaultVersionPolicy = options.VersionPolicy; - - foreach (var header in options.DefaultRequestHeaders) - { - client.DefaultRequestHeaders.TryAddWithoutValidation(header.Key, header.Value); - } - }) - .ConfigurePrimaryHttpMessageHandler((sp) => - { - var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone(); - var handler = new SocketsHttpHandler - { - AllowAutoRedirect = options.AllowAutoRedirect, - AutomaticDecompression = DecompressionMethods.All, - EnableMultipleHttp2Connections = options.EnableMultipleHttp2Connections, - }; - options.ConfigureHandler?.Invoke(handler); - ApplyProxySettings(handler, options); - - if (options.ServerCertificateCustomValidation is not null) - { - handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, sslPolicyErrors) => - { - X509Certificate2? certToValidate = certificate as X509Certificate2; - X509Certificate2? disposable = null; - if (certToValidate is null && certificate is not null) - { - disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert)); - certToValidate = disposable; - } - - try - { - return options.ServerCertificateCustomValidation(certToValidate, chain, sslPolicyErrors); - } - finally - { - disposable?.Dispose(); - } - }; - } - else if (options.TrustedRootCertificates.Count > 0 && handler.SslOptions.RemoteCertificateValidationCallback is null) - { - handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, errors) => - { - if (errors == SslPolicyErrors.None) - { - return true; - } - - if (certificate is null) - { - return false; - } - - X509Certificate2? certToValidate = certificate as X509Certificate2; - X509Certificate2? disposable = null; - var trustedRootCopies = new X509Certificate2Collection(); - try - { - if (certToValidate is null) - { - disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert)); - certToValidate = disposable; - } - - foreach (var root in options.TrustedRootCertificates) - { - trustedRootCopies.Add(new X509Certificate2(root.RawData)); - } - - using var customChain = new X509Chain(); - customChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; - customChain.ChainPolicy.CustomTrustStore.Clear(); - customChain.ChainPolicy.CustomTrustStore.AddRange(trustedRootCopies); - customChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; - customChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag; - - if (chain is not null) - { - foreach (var element in chain.ChainElements) - { - customChain.ChainPolicy.ExtraStore.Add(element.Certificate); - } - } - - return certToValidate is not null && customChain.Build(certToValidate); - } - finally - { - foreach (X509Certificate2 root in trustedRootCopies) - { - root.Dispose(); - } - - disposable?.Dispose(); - } - }; - } - - return handler; - }) - .AddHttpMessageHandler(sp => - { - var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone(); - return new AllowlistedHttpMessageHandler(options); - }); - } - - /// <summary> - /// Registers shared helpers used by source connectors. - /// </summary> - public static IServiceCollection AddSourceCommon(this IServiceCollection services) - { - ArgumentNullException.ThrowIfNull(services); - - services.AddSingleton<Json.JsonSchemaValidator>(); - services.AddSingleton<Json.IJsonSchemaValidator>(sp => sp.GetRequiredService<Json.JsonSchemaValidator>()); - services.AddSingleton<XmlSchemaValidator>(); - services.AddSingleton<IXmlSchemaValidator>(sp => sp.GetRequiredService<XmlSchemaValidator>()); - services.AddSingleton<Fetch.IJitterSource, Fetch.CryptoJitterSource>(); - services.AddConcelierAocGuards(); - services.AddConcelierLinksetMappers(); - services.AddSingleton<Fetch.RawDocumentStorage>(); - services.AddSingleton<Fetch.SourceFetchService>(); - - return services; - } - - private static void ApplyProxySettings(SocketsHttpHandler handler, SourceHttpClientOptions options) - { - if (options.ProxyAddress is null) - { - return; - } - - var proxy = new WebProxy(options.ProxyAddress) - { - BypassProxyOnLocal = options.ProxyBypassOnLocal, - UseDefaultCredentials = options.ProxyUseDefaultCredentials, - }; - - if (options.ProxyBypassList.Count > 0) - { - proxy.BypassList = options.ProxyBypassList.ToArray(); - } - - if (!options.ProxyUseDefaultCredentials - && !string.IsNullOrWhiteSpace(options.ProxyUsername)) - { - proxy.Credentials = new NetworkCredential( - options.ProxyUsername, - options.ProxyPassword ?? string.Empty); - } - - handler.Proxy = proxy; - handler.UseProxy = true; - } -} +using System.Net; +using System.Net.Http; +using System.Net.Security; +using System.Security.Cryptography.X509Certificates; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Options; +using StellaOps.Concelier.Connector.Common.Xml; +using StellaOps.Concelier.Core.Aoc; +using StellaOps.Concelier.Core.Linksets; + +namespace StellaOps.Concelier.Connector.Common.Http; + +public static class ServiceCollectionExtensions +{ + /// <summary> + /// Registers a named HTTP client configured for a source connector with allowlisted hosts and sensible defaults. + /// </summary> + public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<SourceHttpClientOptions> configure) + => services.AddSourceHttpClient(name, (_, options) => configure(options)); + + public static IHttpClientBuilder AddSourceHttpClient(this IServiceCollection services, string name, Action<IServiceProvider, SourceHttpClientOptions> configure) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentException.ThrowIfNullOrEmpty(name); + ArgumentNullException.ThrowIfNull(configure); + + services.AddOptions<SourceHttpClientOptions>(name).Configure<IServiceProvider>((options, sp) => + { + configure(sp, options); + SourceHttpClientConfigurationBinder.Apply(sp, name, options); + }); + + return services + .AddHttpClient(name) + .ConfigureHttpClient((sp, client) => + { + var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name); + + if (options.BaseAddress is not null) + { + client.BaseAddress = options.BaseAddress; + } + + client.Timeout = options.Timeout; + client.DefaultRequestHeaders.UserAgent.Clear(); + client.DefaultRequestHeaders.UserAgent.ParseAdd(options.UserAgent); + client.DefaultRequestVersion = options.RequestVersion; + client.DefaultVersionPolicy = options.VersionPolicy; + + foreach (var header in options.DefaultRequestHeaders) + { + client.DefaultRequestHeaders.TryAddWithoutValidation(header.Key, header.Value); + } + }) + .ConfigurePrimaryHttpMessageHandler((sp) => + { + var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone(); + var handler = new SocketsHttpHandler + { + AllowAutoRedirect = options.AllowAutoRedirect, + AutomaticDecompression = DecompressionMethods.All, + EnableMultipleHttp2Connections = options.EnableMultipleHttp2Connections, + }; + options.ConfigureHandler?.Invoke(handler); + ApplyProxySettings(handler, options); + + if (options.ServerCertificateCustomValidation is not null) + { + handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, sslPolicyErrors) => + { + X509Certificate2? certToValidate = certificate as X509Certificate2; + X509Certificate2? disposable = null; + if (certToValidate is null && certificate is not null) + { + disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert)); + certToValidate = disposable; + } + + try + { + return options.ServerCertificateCustomValidation(certToValidate, chain, sslPolicyErrors); + } + finally + { + disposable?.Dispose(); + } + }; + } + else if (options.TrustedRootCertificates.Count > 0 && handler.SslOptions.RemoteCertificateValidationCallback is null) + { + handler.SslOptions.RemoteCertificateValidationCallback = (_, certificate, chain, errors) => + { + if (errors == SslPolicyErrors.None) + { + return true; + } + + if (certificate is null) + { + return false; + } + + X509Certificate2? certToValidate = certificate as X509Certificate2; + X509Certificate2? disposable = null; + var trustedRootCopies = new X509Certificate2Collection(); + try + { + if (certToValidate is null) + { + disposable = X509CertificateLoader.LoadCertificate(certificate.Export(X509ContentType.Cert)); + certToValidate = disposable; + } + + foreach (var root in options.TrustedRootCertificates) + { + trustedRootCopies.Add(new X509Certificate2(root.RawData)); + } + + using var customChain = new X509Chain(); + customChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; + customChain.ChainPolicy.CustomTrustStore.Clear(); + customChain.ChainPolicy.CustomTrustStore.AddRange(trustedRootCopies); + customChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; + customChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag; + + if (chain is not null) + { + foreach (var element in chain.ChainElements) + { + customChain.ChainPolicy.ExtraStore.Add(element.Certificate); + } + } + + return certToValidate is not null && customChain.Build(certToValidate); + } + finally + { + foreach (X509Certificate2 root in trustedRootCopies) + { + root.Dispose(); + } + + disposable?.Dispose(); + } + }; + } + + return handler; + }) + .AddHttpMessageHandler(sp => + { + var options = sp.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get(name).Clone(); + return new AllowlistedHttpMessageHandler(options); + }); + } + + /// <summary> + /// Registers shared helpers used by source connectors. + /// </summary> + public static IServiceCollection AddSourceCommon(this IServiceCollection services) + { + ArgumentNullException.ThrowIfNull(services); + + services.AddSingleton<Json.JsonSchemaValidator>(); + services.AddSingleton<Json.IJsonSchemaValidator>(sp => sp.GetRequiredService<Json.JsonSchemaValidator>()); + services.AddSingleton<XmlSchemaValidator>(); + services.AddSingleton<IXmlSchemaValidator>(sp => sp.GetRequiredService<XmlSchemaValidator>()); + services.AddSingleton<Fetch.IJitterSource, Fetch.CryptoJitterSource>(); + services.AddConcelierAocGuards(); + services.AddConcelierLinksetMappers(); + services.AddSingleton<Fetch.RawDocumentStorage>(); + services.AddSingleton<Fetch.SourceFetchService>(); + + return services; + } + + private static void ApplyProxySettings(SocketsHttpHandler handler, SourceHttpClientOptions options) + { + if (options.ProxyAddress is null) + { + return; + } + + var proxy = new WebProxy(options.ProxyAddress) + { + BypassProxyOnLocal = options.ProxyBypassOnLocal, + UseDefaultCredentials = options.ProxyUseDefaultCredentials, + }; + + if (options.ProxyBypassList.Count > 0) + { + proxy.BypassList = options.ProxyBypassList.ToArray(); + } + + if (!options.ProxyUseDefaultCredentials + && !string.IsNullOrWhiteSpace(options.ProxyUsername)) + { + proxy.Credentials = new NetworkCredential( + options.ProxyUsername, + options.ProxyPassword ?? string.Empty); + } + + handler.Proxy = proxy; + handler.UseProxy = true; + } +} diff --git a/src/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientConfigurationBinder.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientConfigurationBinder.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientConfigurationBinder.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientConfigurationBinder.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Http/SourceHttpClientOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Json/IJsonSchemaValidator.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Json/IJsonSchemaValidator.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Json/IJsonSchemaValidator.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Json/IJsonSchemaValidator.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationError.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationError.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationError.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationError.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationException.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationException.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationException.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidationException.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidator.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidator.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidator.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Json/JsonSchemaValidator.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Packages/PackageCoordinateHelper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Packages/PackageCoordinateHelper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Packages/PackageCoordinateHelper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Packages/PackageCoordinateHelper.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Pdf/PdfTextExtractor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Pdf/PdfTextExtractor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Pdf/PdfTextExtractor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Pdf/PdfTextExtractor.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedModels.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/State/SourceStateSeedModels.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedModels.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/State/SourceStateSeedModels.cs diff --git a/src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedProcessor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/State/SourceStateSeedProcessor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/State/SourceStateSeedProcessor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/State/SourceStateSeedProcessor.cs diff --git a/src/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj similarity index 87% rename from src/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj index 27d622a1..68e130c0 100644 --- a/src/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -15,7 +16,7 @@ <ItemGroup> <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> <ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Common/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Common/Telemetry/SourceDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Telemetry/SourceDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Telemetry/SourceDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Telemetry/SourceDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Testing/CannedHttpMessageHandler.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Testing/CannedHttpMessageHandler.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Testing/CannedHttpMessageHandler.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Testing/CannedHttpMessageHandler.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Url/UrlNormalizer.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Url/UrlNormalizer.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Url/UrlNormalizer.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Url/UrlNormalizer.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Xml/IXmlSchemaValidator.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Xml/IXmlSchemaValidator.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Xml/IXmlSchemaValidator.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Xml/IXmlSchemaValidator.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationError.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationError.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationError.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationError.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationException.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationException.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationException.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidationException.cs diff --git a/src/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidator.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidator.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidator.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Common/Xml/XmlSchemaValidator.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Cve/Configuration/CveOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Configuration/CveOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/Configuration/CveOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Configuration/CveOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/CveConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/CveConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/CveConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/CveConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/CveConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/CveConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/CveConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/CveConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/CveDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/CveDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/CveDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/CveDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/CveServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/CveServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/CveServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/CveServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/Internal/CveCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/Internal/CveCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/Internal/CveDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/Internal/CveDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/Internal/CveListParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveListParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/Internal/CveListParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveListParser.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/Internal/CveMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/Internal/CveMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/Internal/CveRecordDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveRecordDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/Internal/CveRecordDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveRecordDto.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/Internal/CveRecordParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveRecordParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/Internal/CveRecordParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Internal/CveRecordParser.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj similarity index 75% rename from src/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj index d97e92d3..bc57abd1 100644 --- a/src/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj @@ -1,16 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - </ItemGroup> -</Project> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Cve/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/TASKS.md similarity index 94% rename from src/StellaOps.Concelier.Connector.Cve/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/TASKS.md index 94f5041f..5188b09d 100644 --- a/src/StellaOps.Concelier.Connector.Cve/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Cve/TASKS.md @@ -9,4 +9,4 @@ |Observability & docs|DevEx|Docs|**DONE (2025-10-10)** – Diagnostics meter (`cve.fetch.*`, etc.) wired; options/usage documented via `CveServiceCollectionExtensions`.| |Operator rollout playbook|BE-Conn-CVE, Ops|Docs|**DONE (2025-10-12)** – Refreshed `docs/ops/concelier-cve-kev-operations.md` with credential checklist, smoke book, PromQL guardrails, and linked Grafana pack (`docs/ops/concelier-cve-kev-grafana-dashboard.json`).| |Live smoke & monitoring|QA, BE-Conn-CVE|WebService, Observability|**DONE (2025-10-15)** – Executed connector harness smoke using CVE Services sample window (CVE-2024-0001), confirmed fetch/parse/map telemetry (`cve.fetch.*`, `cve.map.success`) all incremented once, and archived the summary log + Grafana import guidance in `docs/ops/concelier-cve-kev-operations.md` (“Staging smoke 2025-10-15”).| -|FEEDCONN-CVE-02-003 Normalized versions rollout|BE-Conn-CVE|Models `FEEDMODELS-SCHEMA-01-003`, Normalization playbook|**DONE (2025-10-12)** – Confirmed SemVer primitives map to normalized rules with `cve:{cveId}:{identifier}` notes and refreshed snapshots; `dotnet test src/StellaOps.Concelier.Connector.Cve.Tests` passes on net10 preview.| +|FEEDCONN-CVE-02-003 Normalized versions rollout|BE-Conn-CVE|Models `FEEDMODELS-SCHEMA-01-003`, Normalization playbook|**DONE (2025-10-12)** – Confirmed SemVer primitives map to normalized rules with `cve:{cveId}:{identifier}` notes and refreshed snapshots; `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Cve.Tests` passes on net10 preview.| diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Configuration/DebianOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Configuration/DebianOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Configuration/DebianOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Configuration/DebianOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/DebianConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/DebianConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/DebianConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/DebianConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/DebianConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/DebianConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/DebianConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/DebianConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/DebianDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/DebianDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/DebianDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/DebianDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/DebianServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/DebianServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/DebianServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/DebianServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianAdvisoryDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianAdvisoryDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianAdvisoryDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianAdvisoryDto.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianDetailMetadata.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianDetailMetadata.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianDetailMetadata.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianDetailMetadata.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianFetchCacheEntry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianFetchCacheEntry.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianFetchCacheEntry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianFetchCacheEntry.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianHtmlParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianHtmlParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianHtmlParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianHtmlParser.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListEntry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListEntry.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListEntry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListEntry.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianListParser.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Internal/DebianMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj similarity index 82% rename from src/StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj index 23396f92..27d49ab2 100644 --- a/src/StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj @@ -1,17 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md similarity index 78% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md index 9407b28e..463aa522 100644 --- a/src/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/CONFLICT_RESOLVER_NOTES.md @@ -1,25 +1,25 @@ -# RHSA Fixture Diffs for Conflict Resolver (Sprint 1) - -_Status date: 2025-10-11_ - -The Red Hat connector fixtures were re-baselined after the model helper rollout so that the conflict resolver receives the canonical payload shape expected for range reconciliation. - -## Key schema deltas - -- `affectedPackages[]` now emits the `type` field ahead of the identifier and always carries a `normalizedVersions` array (empty for NEVRA/CPE today) alongside existing `versionRanges`. -- All nested `provenance` objects (package ranges, statuses, advisory-level metadata, references) now serialize in canonical order – `source`, `kind`, `value`, `decisionReason`, `recordedAt`, `fieldMask` – to align with `AdvisoryProvenance` equality used by the conflict resolver. -- `decisionReason` is now present (null) on provenance payloads so future precedence decisions can annotate overrides without another fixture bump. - -## Impact on conflict resolver - -- Range merge logic must accept an optional `normalizedVersions` array even when it is empty; RPM reconciliation continues to rely on NEVRA primitives (`rangeKind: "nevra"`). -- Provenance comparisons should treat the new property ordering and `decisionReason` field as canonical; older snapshots that lacked these fields are obsolete. -- Advisory/reference provenance now matches the structure that merge emits, so deterministic hashing of resolver inputs will remain stable across connectors. - -## Updated goldens - -- `src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json` -- `src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json` -- `src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json` - -Keep these notes in sync with any future provenance or normalized-rule updates so the conflict resolver team can reason about fixture-driven regressions. +# RHSA Fixture Diffs for Conflict Resolver (Sprint 1) + +_Status date: 2025-10-11_ + +The Red Hat connector fixtures were re-baselined after the model helper rollout so that the conflict resolver receives the canonical payload shape expected for range reconciliation. + +## Key schema deltas + +- `affectedPackages[]` now emits the `type` field ahead of the identifier and always carries a `normalizedVersions` array (empty for NEVRA/CPE today) alongside existing `versionRanges`. +- All nested `provenance` objects (package ranges, statuses, advisory-level metadata, references) now serialize in canonical order – `source`, `kind`, `value`, `decisionReason`, `recordedAt`, `fieldMask` – to align with `AdvisoryProvenance` equality used by the conflict resolver. +- `decisionReason` is now present (null) on provenance payloads so future precedence decisions can annotate overrides without another fixture bump. + +## Impact on conflict resolver + +- Range merge logic must accept an optional `normalizedVersions` array even when it is empty; RPM reconciliation continues to rely on NEVRA primitives (`rangeKind: "nevra"`). +- Provenance comparisons should treat the new property ordering and `decisionReason` field as canonical; older snapshots that lacked these fields are obsolete. +- Advisory/reference provenance now matches the structure that merge emits, so deterministic hashing of resolver inputs will remain stable across connectors. + +## Updated goldens + +- `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json` +- `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json` +- `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json` + +Keep these notes in sync with any future provenance or normalized-rule updates so the conflict resolver team can reason about fixture-driven regressions. diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/Configuration/RedHatOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Configuration/RedHatOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/Configuration/RedHatOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Configuration/RedHatOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/Internal/Models/RedHatCsafModels.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Internal/Models/RedHatCsafModels.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/Internal/Models/RedHatCsafModels.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Internal/Models/RedHatCsafModels.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatSummaryItem.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatSummaryItem.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatSummaryItem.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Internal/RedHatSummaryItem.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/RedHatConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/RedHatDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/RedHatDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/RedHatDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/RedHatDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/RedHatServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/RedHatServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/RedHatServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/RedHatServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj similarity index 71% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj index f04c7094..2440fc2e 100644 --- a/src/StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj @@ -1,15 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/Configuration/SuseOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Configuration/SuseOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/Configuration/SuseOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Configuration/SuseOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseAdvisoryDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseAdvisoryDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseAdvisoryDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseAdvisoryDto.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangeRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangeRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangeRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangeRecord.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangesParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangesParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangesParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseChangesParser.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCsafParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCsafParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCsafParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCsafParser.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseFetchCacheEntry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseFetchCacheEntry.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseFetchCacheEntry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseFetchCacheEntry.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Internal/SuseMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj similarity index 82% rename from src/StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj index 23396f92..27d49ab2 100644 --- a/src/StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj @@ -1,17 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/SuseConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/SuseConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/SuseConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/SuseConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/SuseConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/SuseConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/SuseConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/SuseConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/SuseDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/SuseDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/SuseDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/SuseDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse/SuseServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/SuseServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse/SuseServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Suse/SuseServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/Configuration/UbuntuOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Configuration/UbuntuOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/Configuration/UbuntuOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Configuration/UbuntuOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuFetchCacheEntry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuFetchCacheEntry.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuFetchCacheEntry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuFetchCacheEntry.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeDto.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Internal/UbuntuNoticeParser.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj similarity index 82% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj index 23396f92..27d49ab2 100644 --- a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj @@ -1,17 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/UbuntuServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Configuration/GhsaOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Configuration/GhsaOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Configuration/GhsaOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Configuration/GhsaOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/GhsaConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/GhsaConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/GhsaConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/GhsaConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/GhsaConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/GhsaConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/GhsaConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/GhsaConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/GhsaDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/GhsaDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/GhsaDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/GhsaDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/GhsaServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/GhsaServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/GhsaServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/GhsaServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaListParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaListParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaListParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaListParser.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitParser.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitSnapshot.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitSnapshot.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitSnapshot.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRateLimitSnapshot.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordDto.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Internal/GhsaRecordParser.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj similarity index 79% rename from src/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj index 71801984..488b885c 100644 --- a/src/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj @@ -1,17 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> </ItemGroup> -</Project> - +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Ghsa/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/TASKS.md similarity index 91% rename from src/StellaOps.Concelier.Connector.Ghsa/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/TASKS.md index 03723827..2847d98e 100644 --- a/src/StellaOps.Concelier.Connector.Ghsa/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ghsa/TASKS.md @@ -9,11 +9,11 @@ |Telemetry & documentation|DevEx|Docs|**DONE (2025-10-10)** – Diagnostics meter (`ghsa.fetch.*`) wired; DI extension documents token/headers and job registrations.| |GitHub quota monitoring & retries|BE-Conn-GHSA, Observability|Source.Common|**DONE (2025-10-12)** – Rate-limit metrics/logs added, retry/backoff handles 403 secondary limits, and ops runbook documents dashboards + mitigation steps.| |Production credential & scheduler rollout|Ops, BE-Conn-GHSA|Docs, WebService|**DONE (2025-10-12)** – Scheduler defaults registered via `JobSchedulerBuilder`, credential provisioning documented (Compose/Helm samples), and staged backfill guidance captured in `docs/ops/concelier-ghsa-operations.md`.| -|FEEDCONN-GHSA-04-002 Conflict regression fixtures|BE-Conn-GHSA, QA|Merge `FEEDMERGE-ENGINE-04-001`|**DONE (2025-10-12)** – Added `conflict-ghsa.canonical.json` + `GhsaConflictFixtureTests`; SemVer ranges and credits align with merge precedence triple and shareable with QA. Validation: `dotnet test src/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj --filter GhsaConflictFixtureTests`.| +|FEEDCONN-GHSA-04-002 Conflict regression fixtures|BE-Conn-GHSA, QA|Merge `FEEDMERGE-ENGINE-04-001`|**DONE (2025-10-12)** – Added `conflict-ghsa.canonical.json` + `GhsaConflictFixtureTests`; SemVer ranges and credits align with merge precedence triple and shareable with QA. Validation: `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj --filter GhsaConflictFixtureTests`.| |FEEDCONN-GHSA-02-004 GHSA credits & ecosystem severity mapping|BE-Conn-GHSA|Models `FEEDMODELS-SCHEMA-01-002`|**DONE (2025-10-11)** – Mapper emits advisory credits with provenance masks, fixtures assert role/contact ordering, and severity normalization remains unchanged.| |FEEDCONN-GHSA-02-007 Credit parity regression fixtures|BE-Conn-GHSA, QA|Source.Nvd, Source.Osv|**DONE (2025-10-12)** – Parity fixtures regenerated via `tools/FixtureUpdater`, normalized SemVer notes verified against GHSA/NVD/OSV snapshots, and the fixtures guide now documents the headroom checks.| |FEEDCONN-GHSA-02-001 Normalized versions rollout|BE-Conn-GHSA|Models `FEEDMODELS-SCHEMA-01-003`, Normalization playbook|**DONE (2025-10-11)** – GHSA mapper now emits SemVer primitives + normalized ranges, fixtures refreshed, connector tests passing; report logged via FEEDMERGE-COORD-02-900.| |FEEDCONN-GHSA-02-005 Quota monitoring hardening|BE-Conn-GHSA, Observability|Source.Common metrics|**DONE (2025-10-12)** – Diagnostics expose headroom histograms/gauges, warning logs dedupe below the configured threshold, and the ops runbook gained alerting and mitigation guidance.| |FEEDCONN-GHSA-02-006 Scheduler rollout integration|BE-Conn-GHSA, Ops|Job scheduler|**DONE (2025-10-12)** – Dependency routine tests assert cron/timeouts, and the runbook highlights cron overrides plus backoff toggles for staged rollouts.| |FEEDCONN-GHSA-04-003 Description/CWE/metric parity rollout|BE-Conn-GHSA|Models, Core|**DONE (2025-10-15)** – Mapper emits advisory description, CWE weaknesses, and canonical CVSS metric id with updated fixtures (`osv-ghsa.osv.json` parity suite) and connector regression covers the new fields. Reported completion to Merge coordination.| -|FEEDCONN-GHSA-04-004 Canonical metric fallback coverage|BE-Conn-GHSA|Models, Merge|**DONE (2025-10-16)** – Ensure canonical metric ids remain populated when GitHub omits CVSS vectors/scores; add fixtures capturing severity-only advisories, document precedence with Merge, and emit analytics to track fallback usage.<br>2025-10-16: Mapper now emits `ghsa:severity/<level>` canonical ids when vectors are missing, diagnostics expose `ghsa.map.canonical_metric_fallbacks`, conflict/mapper fixtures updated, and runbook documents Merge precedence. Tests: `dotnet test src/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj`.| +|FEEDCONN-GHSA-04-004 Canonical metric fallback coverage|BE-Conn-GHSA|Models, Merge|**DONE (2025-10-16)** – Ensure canonical metric ids remain populated when GitHub omits CVSS vectors/scores; add fixtures capturing severity-only advisories, document precedence with Merge, and emit analytics to track fallback usage.<br>2025-10-16: Mapper now emits `ghsa:severity/<level>` canonical ids when vectors are missing, diagnostics expose `ghsa.map.canonical_metric_fallbacks`, conflict/mapper fixtures updated, and runbook documents Merge precedence. Tests: `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj`.| diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/Configuration/IcsCisaOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Configuration/IcsCisaOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/Configuration/IcsCisaOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Configuration/IcsCisaOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/HANDOVER.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/HANDOVER.md similarity index 69% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/HANDOVER.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/HANDOVER.md index 8b1c3eab..29aa5fb0 100644 --- a/src/StellaOps.Concelier.Connector.Ics.Cisa/HANDOVER.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/HANDOVER.md @@ -1,21 +1,21 @@ -# ICS CISA Connector – Status (2025-10-16) - -## Context -- Proxy plumbing for GovDelivery (`SourceHttpClientOptions.Proxy*`) is implemented and covered by `SourceHttpClientBuilderTests.AddSourceHttpClient_LoadsProxyConfiguration`. -- Detail enrichment now extracts mitigation paragraphs/bullets, merges them with feed data, and emits `mitigation` references plus combined alias sets. -- `BuildAffectedPackages` parses product/version pairs and now persists SemVer exact values for canonical ranges via the advisory store. - -## Current Outcomes -- Feed parser fixtures were refreshed so vendor PDFs stay surfaced as attachments; DTO references continue including canonical links. -- SemVer primitive deserialisation now restores `exactValue` (e.g., `"4.2"` → `"4.2.0"`), keeping connector snapshots deterministic. -- Console debugging noise was removed from connector/parser code. -- Ops runbook documents attachment + SemVer validation steps for dry runs. -- `dotnet test src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj` passes (2025-10-16). - -## Outstanding Items -- None. Continue monitoring Akamai access decisions and proxy requirements via Ops feedback. - -## Verification Checklist -- ✅ `dotnet test src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj` -- ☐ `dotnet test src/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj` (proxy support) — rerun when Source.Common changes land. -- Keep this summary aligned with `TASKS.md` as further work emerges. +# ICS CISA Connector – Status (2025-10-16) + +## Context +- Proxy plumbing for GovDelivery (`SourceHttpClientOptions.Proxy*`) is implemented and covered by `SourceHttpClientBuilderTests.AddSourceHttpClient_LoadsProxyConfiguration`. +- Detail enrichment now extracts mitigation paragraphs/bullets, merges them with feed data, and emits `mitigation` references plus combined alias sets. +- `BuildAffectedPackages` parses product/version pairs and now persists SemVer exact values for canonical ranges via the advisory store. + +## Current Outcomes +- Feed parser fixtures were refreshed so vendor PDFs stay surfaced as attachments; DTO references continue including canonical links. +- SemVer primitive deserialisation now restores `exactValue` (e.g., `"4.2"` → `"4.2.0"`), keeping connector snapshots deterministic. +- Console debugging noise was removed from connector/parser code. +- Ops runbook documents attachment + SemVer validation steps for dry runs. +- `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj` passes (2025-10-16). + +## Outstanding Items +- None. Continue monitoring Akamai access decisions and proxy requirements via Ops feedback. + +## Verification Checklist +- ✅ `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj` +- ☐ `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj` (proxy support) — rerun when Source.Common changes land. +- Keep this summary aligned with `TASKS.md` as further work emerges. diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/IcsCisaServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAdvisoryDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAdvisoryDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAdvisoryDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAdvisoryDto.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAttachmentDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAttachmentDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAttachmentDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaAttachmentDto.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedDto.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Internal/IcsCisaFeedParser.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj similarity index 87% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj index fd85683b..16373e3c 100644 --- a/src/StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj @@ -1,28 +1,29 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - - <ItemGroup> - <PackageReference Include="System.ServiceModel.Syndication" Version="8.0.0" /> - </ItemGroup> - - <ItemGroup> - <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> - <_Parameter1>StellaOps.Concelier.Connector.Ics.Cisa.Tests</_Parameter1> - </AssemblyAttribute> - </ItemGroup> - -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + + <ItemGroup> + <PackageReference Include="System.ServiceModel.Syndication" Version="8.0.0" /> + </ItemGroup> + + <ItemGroup> + <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> + <_Parameter1>StellaOps.Concelier.Connector.Ics.Cisa.Tests</_Parameter1> + </AssemblyAttribute> + </ItemGroup> + +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md similarity index 94% rename from src/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md index 11baa8c7..7eb4b9fe 100644 --- a/src/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/TASKS.md @@ -1,15 +1,15 @@ -# TASKS -| Task | Owner(s) | Depends on | Notes | -|---|---|---|---| -|FEEDCONN-ICSCISA-02-001 Document CISA ICS feed contract|BE-Conn-ICS-CISA|Research|**DONE (2025-10-11)** – `https://www.cisa.gov/cybersecurity-advisories/ics-advisories.xml` and legacy `/sites/default/files/feeds/...` return Akamai 403 even with browser UA; HTML landing page blocked as well. Logged full headers (x-reference-error, AkamaiGHost) in `docs/concelier-connector-research-20251011.md` and initiated GovDelivery access request.| -|FEEDCONN-ICSCISA-02-002 Fetch pipeline & cursor storage|BE-Conn-ICS-CISA|Source.Common, Storage.Mongo|**DONE (2025-10-16)** – Confirmed proxy knobs + cursor state behave with the refreshed fixtures; ops runbook now captures proxy usage/validation so the fetch stage is production-ready.| -|FEEDCONN-ICSCISA-02-003 DTO/parser implementation|BE-Conn-ICS-CISA|Source.Common|**DONE (2025-10-16)** – Feed parser fixtures updated to retain vendor PDFs as attachments while maintaining reference coverage; console diagnostics removed.| -|FEEDCONN-ICSCISA-02-004 Canonical mapping & range primitives|BE-Conn-ICS-CISA|Models|**DONE (2025-10-16)** – `TryCreateSemVerPrimitive` flow + Mongo deserialiser now persist `exactValue` (`4.2` → `4.2.0`), unblocking canonical snapshots.| -|FEEDCONN-ICSCISA-02-005 Deterministic fixtures/tests|QA|Testing|**DONE (2025-10-16)** – `dotnet test src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/...` passes; fixtures assert attachment handling + SemVer semantics.| -|FEEDCONN-ICSCISA-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-16)** – Ops guide documents attachment checks, SemVer exact values, and proxy guidance; diagnostics remain unchanged.| -|FEEDCONN-ICSCISA-02-007 Detail document inventory|BE-Conn-ICS-CISA|Research|**DONE (2025-10-16)** – Validated canned detail pages vs feed output so attachment inventories stay aligned; archived expectations noted in `HANDOVER.md`.| -|FEEDCONN-ICSCISA-02-008 Distribution fallback strategy|BE-Conn-ICS-CISA|Research|**DONE (2025-10-11)** – Outlined GovDelivery token request, HTML scrape + email digest fallback, and dependency on Ops for credential workflow; awaiting decision before fetch implementation.| -|FEEDCONN-ICSCISA-02-009 GovDelivery credential onboarding|Ops, BE-Conn-ICS-CISA|Ops|**DONE (2025-10-14)** – GovDelivery onboarding runbook captured in `docs/ops/concelier-icscisa-operations.md`; secret vault path and Offline Kit handling documented.| -|FEEDCONN-ICSCISA-02-010 Mitigation & SemVer polish|BE-Conn-ICS-CISA|02-003, 02-004|**DONE (2025-10-16)** – Attachment + mitigation references now land as expected and SemVer primitives carry exact values; end-to-end suite green (see `HANDOVER.md`).| -|FEEDCONN-ICSCISA-02-011 Docs & telemetry refresh|DevEx|02-006|**DONE (2025-10-16)** – Ops documentation refreshed (attachments, SemVer validation, proxy knobs) and telemetry notes verified.| -|FEEDCONN-ICSCISA-02-012 Normalized version decision|BE-Conn-ICS-CISA|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-23)** – Promote existing `SemVerPrimitive` exact values into `NormalizedVersions` via `.ToNormalizedVersionRule("ics-cisa:{advisoryId}:{product}")`, add regression coverage, and open Models ticket if non-SemVer firmware requires a new scheme.| +# TASKS +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| +|FEEDCONN-ICSCISA-02-001 Document CISA ICS feed contract|BE-Conn-ICS-CISA|Research|**DONE (2025-10-11)** – `https://www.cisa.gov/cybersecurity-advisories/ics-advisories.xml` and legacy `/sites/default/files/feeds/...` return Akamai 403 even with browser UA; HTML landing page blocked as well. Logged full headers (x-reference-error, AkamaiGHost) in `docs/concelier-connector-research-20251011.md` and initiated GovDelivery access request.| +|FEEDCONN-ICSCISA-02-002 Fetch pipeline & cursor storage|BE-Conn-ICS-CISA|Source.Common, Storage.Mongo|**DONE (2025-10-16)** – Confirmed proxy knobs + cursor state behave with the refreshed fixtures; ops runbook now captures proxy usage/validation so the fetch stage is production-ready.| +|FEEDCONN-ICSCISA-02-003 DTO/parser implementation|BE-Conn-ICS-CISA|Source.Common|**DONE (2025-10-16)** – Feed parser fixtures updated to retain vendor PDFs as attachments while maintaining reference coverage; console diagnostics removed.| +|FEEDCONN-ICSCISA-02-004 Canonical mapping & range primitives|BE-Conn-ICS-CISA|Models|**DONE (2025-10-16)** – `TryCreateSemVerPrimitive` flow + Mongo deserialiser now persist `exactValue` (`4.2` → `4.2.0`), unblocking canonical snapshots.| +|FEEDCONN-ICSCISA-02-005 Deterministic fixtures/tests|QA|Testing|**DONE (2025-10-16)** – `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/...` passes; fixtures assert attachment handling + SemVer semantics.| +|FEEDCONN-ICSCISA-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-16)** – Ops guide documents attachment checks, SemVer exact values, and proxy guidance; diagnostics remain unchanged.| +|FEEDCONN-ICSCISA-02-007 Detail document inventory|BE-Conn-ICS-CISA|Research|**DONE (2025-10-16)** – Validated canned detail pages vs feed output so attachment inventories stay aligned; archived expectations noted in `HANDOVER.md`.| +|FEEDCONN-ICSCISA-02-008 Distribution fallback strategy|BE-Conn-ICS-CISA|Research|**DONE (2025-10-11)** – Outlined GovDelivery token request, HTML scrape + email digest fallback, and dependency on Ops for credential workflow; awaiting decision before fetch implementation.| +|FEEDCONN-ICSCISA-02-009 GovDelivery credential onboarding|Ops, BE-Conn-ICS-CISA|Ops|**DONE (2025-10-14)** – GovDelivery onboarding runbook captured in `docs/ops/concelier-icscisa-operations.md`; secret vault path and Offline Kit handling documented.| +|FEEDCONN-ICSCISA-02-010 Mitigation & SemVer polish|BE-Conn-ICS-CISA|02-003, 02-004|**DONE (2025-10-16)** – Attachment + mitigation references now land as expected and SemVer primitives carry exact values; end-to-end suite green (see `HANDOVER.md`).| +|FEEDCONN-ICSCISA-02-011 Docs & telemetry refresh|DevEx|02-006|**DONE (2025-10-16)** – Ops documentation refreshed (attachments, SemVer validation, proxy knobs) and telemetry notes verified.| +|FEEDCONN-ICSCISA-02-012 Normalized version decision|BE-Conn-ICS-CISA|Merge coordination (`FEEDMERGE-COORD-02-900`)|**TODO (due 2025-10-23)** – Promote existing `SemVerPrimitive` exact values into `NormalizedVersions` via `.ToNormalizedVersionRule("ics-cisa:{advisoryId}:{product}")`, add regression coverage, and open Models ticket if non-SemVer firmware requires a new scheme.| diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/Configuration/KasperskyOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Configuration/KasperskyOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/Configuration/KasperskyOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Configuration/KasperskyOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryDto.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyAdvisoryParser.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedClient.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedItem.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedItem.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedItem.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Internal/KasperskyFeedItem.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/KasperskyServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj similarity index 79% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj index 44c74bcc..5bd20434 100644 --- a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj @@ -1,16 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Jvn/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Jvn/Configuration/JvnOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Configuration/JvnOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Configuration/JvnOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Configuration/JvnOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/JvnAdvisoryMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnAdvisoryMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/JvnAdvisoryMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnAdvisoryMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/JvnConstants.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnConstants.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/JvnConstants.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnConstants.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/JvnCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/JvnCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailDto.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnDetailParser.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewItem.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewItem.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewItem.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewItem.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewPage.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewPage.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewPage.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnOverviewPage.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaProvider.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaProvider.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaProvider.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaProvider.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaValidationException.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaValidationException.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaValidationException.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/JvnSchemaValidationException.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Internal/MyJvnClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/MyJvnClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Internal/MyJvnClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Internal/MyJvnClient.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/JvnConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/JvnConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/JvnConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/JvnConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/JvnConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/JvnConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/JvnConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/JvnConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/JvnDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/JvnDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/JvnDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/JvnDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/JvnServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/JvnServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/JvnServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/JvnServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Jvn/Schemas/data_marking.xsd b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/data_marking.xsd similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Schemas/data_marking.xsd rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/data_marking.xsd diff --git a/src/StellaOps.Concelier.Connector.Jvn/Schemas/jvnrss_3.2.xsd b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/jvnrss_3.2.xsd similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Schemas/jvnrss_3.2.xsd rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/jvnrss_3.2.xsd diff --git a/src/StellaOps.Concelier.Connector.Jvn/Schemas/mod_sec_3.0.xsd b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/mod_sec_3.0.xsd similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Schemas/mod_sec_3.0.xsd rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/mod_sec_3.0.xsd diff --git a/src/StellaOps.Concelier.Connector.Jvn/Schemas/status_3.3.xsd b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/status_3.3.xsd similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Schemas/status_3.3.xsd rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/status_3.3.xsd diff --git a/src/StellaOps.Concelier.Connector.Jvn/Schemas/tlp_marking.xsd b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/tlp_marking.xsd similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Schemas/tlp_marking.xsd rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/tlp_marking.xsd diff --git a/src/StellaOps.Concelier.Connector.Jvn/Schemas/vuldef_3.2.xsd b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/vuldef_3.2.xsd similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Schemas/vuldef_3.2.xsd rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/vuldef_3.2.xsd diff --git a/src/StellaOps.Concelier.Connector.Jvn/Schemas/xml.xsd b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/xml.xsd similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/Schemas/xml.xsd rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/Schemas/xml.xsd diff --git a/src/StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj similarity index 83% rename from src/StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj index 6662142d..106db49d 100644 --- a/src/StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj @@ -1,15 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <EmbeddedResource Include="Schemas\*.xsd" /> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <EmbeddedResource Include="Schemas\*.xsd" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Jvn/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Jvn/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Kev/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Kev/Configuration/KevOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Configuration/KevOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/Configuration/KevOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Configuration/KevOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/Internal/KevCatalogDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevCatalogDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/Internal/KevCatalogDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevCatalogDto.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/Internal/KevCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/Internal/KevCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/Internal/KevDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/Internal/KevDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/Internal/KevMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/Internal/KevMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/Internal/KevSchemaProvider.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevSchemaProvider.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/Internal/KevSchemaProvider.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Internal/KevSchemaProvider.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/KevConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/KevConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/KevConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/KevConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/KevConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/KevConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/KevConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/KevConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/KevDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/KevDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/KevDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/KevDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/KevServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/KevServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/KevServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/KevServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Kev/Schemas/kev-catalog.schema.json b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Schemas/kev-catalog.schema.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev/Schemas/kev-catalog.schema.json rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/Schemas/kev-catalog.schema.json diff --git a/src/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj similarity index 85% rename from src/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj index c5f1ae2c..c772f91a 100644 --- a/src/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj @@ -1,13 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> @@ -23,5 +24,4 @@ <ItemGroup> <EmbeddedResource Include="Schemas\kev-catalog.schema.json" /> </ItemGroup> -</Project> - +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Kev/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/TASKS.md similarity index 88% rename from src/StellaOps.Concelier.Connector.Kev/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/TASKS.md index 98e8361c..91eb7ad5 100644 --- a/src/StellaOps.Concelier.Connector.Kev/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kev/TASKS.md @@ -1,12 +1,12 @@ -# TASKS -| Task | Owner(s) | Depends on | Notes | -|---|---|---|---| -|Review KEV JSON schema & cadence|BE-Conn-KEV|Research|**DONE** – Feed defaults lock to the public JSON catalog; AGENTS notes call out daily cadence and allowlist requirements.| -|Fetch & cursor implementation|BE-Conn-KEV|Source.Common, Storage.Mongo|**DONE** – SourceFetchService drives ETag/Last-Modified aware fetches with SourceState cursor tracking documents + catalog metadata.| -|DTO/parser implementation|BE-Conn-KEV|Source.Common|**DONE** – `KevCatalogDto`/`KevVulnerabilityDto` deserialize payloads with logging for catalog version/releases before DTO persistence.| -|Canonical mapping & range primitives|BE-Conn-KEV|Models|**DONE** – Mapper produces vendor RangePrimitives (due dates, CWE list, ransomware flag, catalog metadata) and deduplicated references.| -|Deterministic fixtures/tests|QA|Testing|**DONE** – End-to-end fetch→parse→map test with canned catalog + snapshot (`UPDATE_KEV_FIXTURES=1`) guards determinism.| -|Telemetry & docs|DevEx|Docs|**DONE** – Connector emits structured logs + meters for catalog entries/advisories and AGENTS docs cover cadence/allowlist guidance.| -|Schema validation & anomaly surfacing|BE-Conn-KEV, QA|Source.Common|**DONE (2025-10-12)** – Wired `IJsonSchemaValidator` + embedded schema, added failure reasons (`schema`, `download`, `invalidJson`, etc.), anomaly counters (`missingCveId`, `countMismatch`, `nullEntry`), and kept `dotnet test src/StellaOps.Concelier.Connector.Kev.Tests` passing.| -|Metrics export wiring|DevOps, DevEx|Observability|**DONE (2025-10-12)** – Added `kev.fetch.*` counters, parse failure/anomaly tags, refreshed ops runbook + Grafana dashboard (`docs/ops/concelier-cve-kev-grafana-dashboard.json`) with PromQL guidance.| -|FEEDCONN-KEV-02-003 Normalized versions propagation|BE-Conn-KEV|Models `FEEDMODELS-SCHEMA-01-003`, Normalization playbook|**DONE (2025-10-12)** – Validated catalog/date/due normalized rules emission + ordering; fixtures assert rule set and `dotnet test src/StellaOps.Concelier.Connector.Kev.Tests` remains green.| +# TASKS +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| +|Review KEV JSON schema & cadence|BE-Conn-KEV|Research|**DONE** – Feed defaults lock to the public JSON catalog; AGENTS notes call out daily cadence and allowlist requirements.| +|Fetch & cursor implementation|BE-Conn-KEV|Source.Common, Storage.Mongo|**DONE** – SourceFetchService drives ETag/Last-Modified aware fetches with SourceState cursor tracking documents + catalog metadata.| +|DTO/parser implementation|BE-Conn-KEV|Source.Common|**DONE** – `KevCatalogDto`/`KevVulnerabilityDto` deserialize payloads with logging for catalog version/releases before DTO persistence.| +|Canonical mapping & range primitives|BE-Conn-KEV|Models|**DONE** – Mapper produces vendor RangePrimitives (due dates, CWE list, ransomware flag, catalog metadata) and deduplicated references.| +|Deterministic fixtures/tests|QA|Testing|**DONE** – End-to-end fetch→parse→map test with canned catalog + snapshot (`UPDATE_KEV_FIXTURES=1`) guards determinism.| +|Telemetry & docs|DevEx|Docs|**DONE** – Connector emits structured logs + meters for catalog entries/advisories and AGENTS docs cover cadence/allowlist guidance.| +|Schema validation & anomaly surfacing|BE-Conn-KEV, QA|Source.Common|**DONE (2025-10-12)** – Wired `IJsonSchemaValidator` + embedded schema, added failure reasons (`schema`, `download`, `invalidJson`, etc.), anomaly counters (`missingCveId`, `countMismatch`, `nullEntry`), and kept `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Kev.Tests` passing.| +|Metrics export wiring|DevOps, DevEx|Observability|**DONE (2025-10-12)** – Added `kev.fetch.*` counters, parse failure/anomaly tags, refreshed ops runbook + Grafana dashboard (`docs/ops/concelier-cve-kev-grafana-dashboard.json`) with PromQL guidance.| +|FEEDCONN-KEV-02-003 Normalized versions propagation|BE-Conn-KEV|Models `FEEDMODELS-SCHEMA-01-003`, Normalization playbook|**DONE (2025-10-12)** – Validated catalog/date/due normalized rules emission + ordering; fixtures assert rule set and `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Kev.Tests` remains green.| diff --git a/src/StellaOps.Concelier.Connector.Kisa/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Kisa/Configuration/KisaOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Configuration/KisaOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Configuration/KisaOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Configuration/KisaOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/Internal/KisaCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Internal/KisaCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailParser.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailResponse.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailResponse.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailResponse.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaDetailResponse.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/Internal/KisaDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Internal/KisaDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/Internal/KisaDocumentMetadata.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaDocumentMetadata.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Internal/KisaDocumentMetadata.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaDocumentMetadata.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedClient.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedItem.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedItem.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedItem.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaFeedItem.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/Internal/KisaMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Internal/KisaMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Internal/KisaMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/KisaConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/KisaConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/KisaConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/KisaConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/KisaConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/KisaConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/KisaConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/KisaConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/KisaDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/KisaDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/KisaDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/KisaDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/KisaServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/KisaServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/KisaServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/KisaServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj similarity index 79% rename from src/StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj index 48e91447..5bd20434 100644 --- a/src/StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj @@ -1,17 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> </ItemGroup> -</Project> - +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Kisa/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Kisa/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Nvd/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Nvd/Configuration/NvdOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Configuration/NvdOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/Configuration/NvdOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Configuration/NvdOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd/Internal/NvdCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Internal/NvdCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/Internal/NvdCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Internal/NvdCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd/Internal/NvdDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Internal/NvdDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/Internal/NvdDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Internal/NvdDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd/Internal/NvdMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Internal/NvdMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/Internal/NvdMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Internal/NvdMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd/Internal/NvdSchemaProvider.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Internal/NvdSchemaProvider.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/Internal/NvdSchemaProvider.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Internal/NvdSchemaProvider.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd/NvdConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/NvdConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/NvdConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/NvdConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd/NvdConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/NvdConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/NvdConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/NvdConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd/NvdServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/NvdServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/NvdServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/NvdServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd/Schemas/nvd-vulnerability.schema.json b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Schemas/nvd-vulnerability.schema.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd/Schemas/nvd-vulnerability.schema.json rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/Schemas/nvd-vulnerability.schema.json diff --git a/src/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj similarity index 84% rename from src/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj index 797a67a3..bd5923d5 100644 --- a/src/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj @@ -1,17 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <EmbeddedResource Include="Schemas\nvd-vulnerability.schema.json" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <EmbeddedResource Include="Schemas\nvd-vulnerability.schema.json" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Nvd/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/TASKS.md similarity index 93% rename from src/StellaOps.Concelier.Connector.Nvd/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/TASKS.md index bad8737c..13fefe22 100644 --- a/src/StellaOps.Concelier.Connector.Nvd/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Nvd/TASKS.md @@ -1,17 +1,17 @@ -# TASKS -| Task | Owner(s) | Depends on | Notes | -|---|---|---|---| -|Fetch job with sliding modified windows|BE-Conn-Nvd|Source.Common|**DONE** – windowed fetch implemented with overlap and raw doc persistence.| -|DTO schema + validation|BE-Conn-Nvd|Source.Common|**DONE** – schema validator enforced before DTO persistence.| +# TASKS +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| +|Fetch job with sliding modified windows|BE-Conn-Nvd|Source.Common|**DONE** – windowed fetch implemented with overlap and raw doc persistence.| +|DTO schema + validation|BE-Conn-Nvd|Source.Common|**DONE** – schema validator enforced before DTO persistence.| |Mapper to canonical model|BE-Conn-Nvd|Models|**DONE** – `NvdMapper` populates CVSS/CWE/CPE data.<br>2025-10-11 research trail: upcoming normalized rules must serialize as `[{"scheme":"semver","type":"range","min":"<floor>","minInclusive":true,"max":"<ceiling>","maxInclusive":false,"notes":"nvd:CVE-2025-XXXX"}]`; keep notes consistent with CVE IDs for provenance joins.| -|Watermark repo usage|BE-Conn-Nvd|Storage.Mongo|**DONE** – cursor tracks windowStart/windowEnd and updates SourceState.| -|Integration test fixture isolation|QA|Storage.Mongo|**DONE** – connector tests reset Mongo/time fixtures between runs to avoid cross-test bleed.| -|Tests: golden pages + resume|QA|Tests|**DONE** – snapshot and resume coverage added across `NvdConnectorTests`.| +|Watermark repo usage|BE-Conn-Nvd|Storage.Mongo|**DONE** – cursor tracks windowStart/windowEnd and updates SourceState.| +|Integration test fixture isolation|QA|Storage.Mongo|**DONE** – connector tests reset Mongo/time fixtures between runs to avoid cross-test bleed.| +|Tests: golden pages + resume|QA|Tests|**DONE** – snapshot and resume coverage added across `NvdConnectorTests`.| |Observability|BE-Conn-Nvd|Core|**DONE** – `NvdDiagnostics` meter tracks attempts/documents/failures with collector tests.| |Change history snapshotting|BE-Conn-Nvd|Storage.Mongo|DONE – connector now records per-CVE snapshots with top-level diff metadata whenever canonical advisories change.| |Pagination for windows over page limit|BE-Conn-Nvd|Source.Common|**DONE** – additional page fetcher honors `startIndex`; covered by multipage tests.| |Schema validation quarantine path|BE-Conn-Nvd|Storage.Mongo|**DONE** – schema failures mark documents failed and metrics assert quarantine.| -|FEEDCONN-NVD-04-002 Conflict regression fixtures|BE-Conn-Nvd, QA|Merge `FEEDMERGE-ENGINE-04-001`|**DONE (2025-10-12)** – Published `conflict-nvd.canonical.json` + mapper test; includes CVSS 3.1 + CWE reference and normalized CPE range feeding the conflict triple. Validation: `dotnet test src/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj --filter NvdConflictFixtureTests`.| +|FEEDCONN-NVD-04-002 Conflict regression fixtures|BE-Conn-Nvd, QA|Merge `FEEDMERGE-ENGINE-04-001`|**DONE (2025-10-12)** – Published `conflict-nvd.canonical.json` + mapper test; includes CVSS 3.1 + CWE reference and normalized CPE range feeding the conflict triple. Validation: `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj --filter NvdConflictFixtureTests`.| |FEEDCONN-NVD-02-004 NVD CVSS & CWE precedence payloads|BE-Conn-Nvd|Models `FEEDMODELS-SCHEMA-01-002`|**DONE (2025-10-11)** – CVSS metrics now carry provenance masks, CWE weaknesses emit normalized references, and fixtures cover the additional precedence data.| |FEEDCONN-NVD-02-005 NVD merge/export parity regression|BE-Conn-Nvd, BE-Merge|Merge `FEEDMERGE-ENGINE-04-003`|**DONE (2025-10-12)** – Canonical merge parity fixtures captured, regression test validates credit/reference union, and exporter snapshot check guarantees parity through JSON exports.| |FEEDCONN-NVD-02-002 Normalized versions rollout|BE-Conn-Nvd|Models `FEEDMODELS-SCHEMA-01-003`, Normalization playbook|**DONE (2025-10-11)** – SemVer primitives + normalized rules emitting for parseable ranges, fixtures/tests refreshed, coordination pinged via FEEDMERGE-COORD-02-900.| diff --git a/src/StellaOps.Concelier.Connector.Osv/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Osv/Configuration/OsvOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Configuration/OsvOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/Configuration/OsvOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Configuration/OsvOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/Internal/OsvCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Internal/OsvCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/Internal/OsvCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Internal/OsvCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/Internal/OsvDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Internal/OsvDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/Internal/OsvDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Internal/OsvDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/Internal/OsvMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Internal/OsvMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/Internal/OsvMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Internal/OsvMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/Internal/OsvVulnerabilityDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Internal/OsvVulnerabilityDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/Internal/OsvVulnerabilityDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Internal/OsvVulnerabilityDto.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/OsvConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/OsvConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/OsvConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/OsvConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/OsvConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/OsvConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/OsvConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/OsvConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/OsvDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/OsvDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/OsvDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/OsvDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/OsvServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/OsvServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/OsvServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/OsvServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj similarity index 88% rename from src/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj index dec35b89..bbf1eb75 100644 --- a/src/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj @@ -1,23 +1,24 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" /> - </ItemGroup> - <ItemGroup> - <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> - <_Parameter1>StellaOps.Concelier.Tests</_Parameter1> - </AssemblyAttribute> - <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> - <_Parameter1>StellaOps.Concelier.Connector.Osv.Tests</_Parameter1> - </AssemblyAttribute> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" /> + </ItemGroup> + <ItemGroup> + <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> + <_Parameter1>StellaOps.Concelier.Tests</_Parameter1> + </AssemblyAttribute> + <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> + <_Parameter1>StellaOps.Concelier.Connector.Osv.Tests</_Parameter1> + </AssemblyAttribute> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Osv/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/TASKS.md similarity index 88% rename from src/StellaOps.Concelier.Connector.Osv/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/TASKS.md index b17f04cb..cdf91137 100644 --- a/src/StellaOps.Concelier.Connector.Osv/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Osv/TASKS.md @@ -14,7 +14,7 @@ |FEEDCONN-OSV-02-004 OSV references & credits alignment|BE-Conn-OSV|Models `FEEDMODELS-SCHEMA-01-002`|**DONE (2025-10-11)** – Mapper normalizes references with provenance masks, emits advisory credits, and regression fixtures/assertions cover the new fields.| |FEEDCONN-OSV-02-005 Fixture updater workflow|BE-Conn-OSV, QA|Docs|**DONE (2025-10-12)** – Canonical PURL derivation now covers Go + scoped npm advisories without upstream `purl`; legacy invalid npm names still fall back to `ecosystem:name`. OSV/GHSA/NVD suites and normalization/storage tests rerun clean.| |FEEDCONN-OSV-02-003 Normalized versions rollout|BE-Conn-OSV|Models `FEEDMODELS-SCHEMA-01-003`, Normalization playbook|**DONE (2025-10-11)** – `OsvMapper` now emits SemVer primitives + normalized rules with `osv:{ecosystem}:{advisoryId}:{identifier}` notes; npm/PyPI/Parity fixtures refreshed; merge coordination pinged (OSV handoff).| -|FEEDCONN-OSV-04-003 Parity fixture refresh|QA, BE-Conn-OSV|Normalized versions rollout, GHSA parity tests|**DONE (2025-10-12)** – Parity fixtures include normalizedVersions notes (`osv:<ecosystem>:<id>:<purl>`); regression math rerun via `dotnet test src/StellaOps.Concelier.Connector.Osv.Tests` and docs flagged for workflow sync.| -|FEEDCONN-OSV-04-002 Conflict regression fixtures|BE-Conn-OSV, QA|Merge `FEEDMERGE-ENGINE-04-001`|**DONE (2025-10-12)** – Added `conflict-osv.canonical.json` + regression asserting SemVer range + CVSS medium severity; dataset matches GHSA/NVD fixtures for merge tests. Validation: `dotnet test src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj --filter OsvConflictFixtureTests`.| +|FEEDCONN-OSV-04-003 Parity fixture refresh|QA, BE-Conn-OSV|Normalized versions rollout, GHSA parity tests|**DONE (2025-10-12)** – Parity fixtures include normalizedVersions notes (`osv:<ecosystem>:<id>:<purl>`); regression math rerun via `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests` and docs flagged for workflow sync.| +|FEEDCONN-OSV-04-002 Conflict regression fixtures|BE-Conn-OSV, QA|Merge `FEEDMERGE-ENGINE-04-001`|**DONE (2025-10-12)** – Added `conflict-osv.canonical.json` + regression asserting SemVer range + CVSS medium severity; dataset matches GHSA/NVD fixtures for merge tests. Validation: `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj --filter OsvConflictFixtureTests`.| |FEEDCONN-OSV-04-004 Description/CWE/metric parity rollout|BE-Conn-OSV|Models, Core|**DONE (2025-10-15)** – OSV mapper writes advisory descriptions, `database_specific.cwe_ids` weaknesses, and canonical CVSS metric id. Parity fixtures (`osv-ghsa.*`, `osv-npm.snapshot.json`, `osv-pypi.snapshot.json`) refreshed and status communicated to Merge coordination.| -|FEEDCONN-OSV-04-005 Canonical metric fallbacks & CWE notes|BE-Conn-OSV|Models, Merge|**DONE (2025-10-16)** – Add fallback logic and metrics for advisories lacking CVSS vectors, enrich CWE provenance notes, and document merge/export expectations; refresh parity fixtures accordingly.<br>2025-10-16: Mapper now emits `osv:severity/<level>` canonical ids for severity-only advisories, weakness provenance carries `database_specific.cwe_ids`, diagnostics expose `osv.map.canonical_metric_fallbacks`, parity fixtures regenerated, and ops notes added in `docs/ops/concelier-osv-operations.md`. Tests: `dotnet test src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj`.| +|FEEDCONN-OSV-04-005 Canonical metric fallbacks & CWE notes|BE-Conn-OSV|Models, Merge|**DONE (2025-10-16)** – Add fallback logic and metrics for advisories lacking CVSS vectors, enrich CWE provenance notes, and document merge/export expectations; refresh parity fixtures accordingly.<br>2025-10-16: Mapper now emits `osv:severity/<level>` canonical ids for severity-only advisories, weakness provenance carries `database_specific.cwe_ids`, diagnostics expose `osv.map.canonical_metric_fallbacks`, parity fixtures regenerated, and ops notes added in `docs/ops/concelier-osv-operations.md`. Tests: `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj`.| diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/Configuration/RuBduOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Configuration/RuBduOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/Configuration/RuBduOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Configuration/RuBduOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduVulnerabilityDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduVulnerabilityDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduVulnerabilityDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduVulnerabilityDto.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduXmlParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduXmlParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduXmlParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Internal/RuBduXmlParser.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/README.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/README.md similarity index 92% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/README.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/README.md index aa378f50..2dafc24a 100644 --- a/src/StellaOps.Concelier.Connector.Ru.Bdu/README.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/README.md @@ -31,10 +31,10 @@ Use these metrics to alert on repeated cache fallbacks, sustained parse failures ## Regression fixtures -Deterministic fixtures live under `src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures`. Run +Deterministic fixtures live under `src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures`. Run ```bash -dotnet test src/StellaOps.Concelier.Connector.Ru.Bdu.Tests +dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests ``` to execute the RU BDU snapshot suite, and set `UPDATE_BDU_FIXTURES=1` to refresh stored snapshots when ingest logic changes. The harness records the fetch requests, documents, DTOs, advisories, and state cursor to guarantee reproducible pipelines across machines. diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/RuBduConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/RuBduDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/RuBduDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/RuBduDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/RuBduDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/RuBduServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/RuBduServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/RuBduServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/RuBduServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/StellaOps.Concelier.Connector.Ru.Bdu.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/StellaOps.Concelier.Connector.Ru.Bdu.csproj similarity index 83% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/StellaOps.Concelier.Connector.Ru.Bdu.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/StellaOps.Concelier.Connector.Ru.Bdu.csproj index ca36c398..21e91dc4 100644 --- a/src/StellaOps.Concelier.Connector.Ru.Bdu/StellaOps.Concelier.Connector.Ru.Bdu.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/StellaOps.Concelier.Connector.Ru.Bdu.csproj @@ -1,18 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md similarity index 94% rename from src/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md index 846d1b39..f14b7cae 100644 --- a/src/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/TASKS.md @@ -1,11 +1,11 @@ -# TASKS -| Task | Owner(s) | Depends on | Notes | -|---|---|---|---| -|FEEDCONN-RUBDU-02-001 Identify BDU data source & schema|BE-Conn-BDU|Research|**DONE (2025-10-11)** – Candidate endpoints (`https://bdu.fstec.ru/component/rsform/form/7-bdu?format=xml`, `...?format=json`) return 403/404 even with `--insecure` because TLS chain requires Russian Trusted Sub CA and WAF expects referer/session headers. Documented request/response samples in `docs/concelier-connector-research-20251011.md`; blocked until trusted root + access strategy from Ops.| -|FEEDCONN-RUBDU-02-002 Fetch pipeline & cursor handling|BE-Conn-BDU|Source.Common, Storage.Mongo|**DONE (2025-10-14)** – Connector streams `vulxml.zip` through cached fetches, persists JSON payloads via `RawDocumentStorage`, and tracks cursor pending sets. Added cache fallback + deterministic SHA logging and state updates tied to `TimeProvider`.| -|FEEDCONN-RUBDU-02-003 DTO/parser implementation|BE-Conn-BDU|Source.Common|**DONE (2025-10-14)** – `RuBduXmlParser` now captures identifiers, source links, CVSS 2/3 metrics, CWE arrays, and environment/software metadata with coverage for multi-entry fixtures.| -|FEEDCONN-RUBDU-02-004 Canonical mapping & range primitives|BE-Conn-BDU|Models|**DONE (2025-10-14)** – `RuBduMapper` emits vendor/ICS packages with normalized `ru-bdu.raw` rules, dual status provenance, alias/reference hydration (CVE, external, source), and CVSS severity normalisation.| -|FEEDCONN-RUBDU-02-005 Deterministic fixtures & regression tests|QA|Testing|**DONE (2025-10-14)** – Added connector harness snapshot suite with canned archive, state/documents/dtos/advisories snapshots under `Fixtures/`, gated by `UPDATE_BDU_FIXTURES`.| -|FEEDCONN-RUBDU-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-14)** – Introduced `RuBduDiagnostics` meter (fetch/parse/map counters & histograms) and authored connector README covering configuration, trusted roots, telemetry, and offline behaviour.| -|FEEDCONN-RUBDU-02-007 Access & export options assessment|BE-Conn-BDU|Research|**DONE (2025-10-14)** – Documented archive access constraints, offline mirroring expectations, and export packaging in `src/StellaOps.Concelier.Connector.Ru.Bdu/README.md` + flagged Offline Kit bundling requirements.| -|FEEDCONN-RUBDU-02-008 Trusted root onboarding plan|BE-Conn-BDU|Source.Common|**DONE (2025-10-14)** – Validated Russian Trusted Root/Sub CA bundle wiring (`certificates/russian_trusted_bundle.pem`), updated Offline Kit guidance, and surfaced `concelier:httpClients:source.bdu:trustedRootPaths` sample configuration.| +# TASKS +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| +|FEEDCONN-RUBDU-02-001 Identify BDU data source & schema|BE-Conn-BDU|Research|**DONE (2025-10-11)** – Candidate endpoints (`https://bdu.fstec.ru/component/rsform/form/7-bdu?format=xml`, `...?format=json`) return 403/404 even with `--insecure` because TLS chain requires Russian Trusted Sub CA and WAF expects referer/session headers. Documented request/response samples in `docs/concelier-connector-research-20251011.md`; blocked until trusted root + access strategy from Ops.| +|FEEDCONN-RUBDU-02-002 Fetch pipeline & cursor handling|BE-Conn-BDU|Source.Common, Storage.Mongo|**DONE (2025-10-14)** – Connector streams `vulxml.zip` through cached fetches, persists JSON payloads via `RawDocumentStorage`, and tracks cursor pending sets. Added cache fallback + deterministic SHA logging and state updates tied to `TimeProvider`.| +|FEEDCONN-RUBDU-02-003 DTO/parser implementation|BE-Conn-BDU|Source.Common|**DONE (2025-10-14)** – `RuBduXmlParser` now captures identifiers, source links, CVSS 2/3 metrics, CWE arrays, and environment/software metadata with coverage for multi-entry fixtures.| +|FEEDCONN-RUBDU-02-004 Canonical mapping & range primitives|BE-Conn-BDU|Models|**DONE (2025-10-14)** – `RuBduMapper` emits vendor/ICS packages with normalized `ru-bdu.raw` rules, dual status provenance, alias/reference hydration (CVE, external, source), and CVSS severity normalisation.| +|FEEDCONN-RUBDU-02-005 Deterministic fixtures & regression tests|QA|Testing|**DONE (2025-10-14)** – Added connector harness snapshot suite with canned archive, state/documents/dtos/advisories snapshots under `Fixtures/`, gated by `UPDATE_BDU_FIXTURES`.| +|FEEDCONN-RUBDU-02-006 Telemetry & documentation|DevEx|Docs|**DONE (2025-10-14)** – Introduced `RuBduDiagnostics` meter (fetch/parse/map counters & histograms) and authored connector README covering configuration, trusted roots, telemetry, and offline behaviour.| +|FEEDCONN-RUBDU-02-007 Access & export options assessment|BE-Conn-BDU|Research|**DONE (2025-10-14)** – Documented archive access constraints, offline mirroring expectations, and export packaging in `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ru.Bdu/README.md` + flagged Offline Kit bundling requirements.| +|FEEDCONN-RUBDU-02-008 Trusted root onboarding plan|BE-Conn-BDU|Source.Common|**DONE (2025-10-14)** – Validated Russian Trusted Root/Sub CA bundle wiring (`certificates/russian_trusted_bundle.pem`), updated Offline Kit guidance, and surfaced `concelier:httpClients:source.bdu:trustedRootPaths` sample configuration.| diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/Configuration/RuNkckiOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Configuration/RuNkckiOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/Configuration/RuNkckiOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Configuration/RuNkckiOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiJsonParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiJsonParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiJsonParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiJsonParser.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiVulnerabilityDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiVulnerabilityDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiVulnerabilityDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Internal/RuNkckiVulnerabilityDto.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/RuNkckiServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/StellaOps.Concelier.Connector.Ru.Nkcki.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/StellaOps.Concelier.Connector.Ru.Nkcki.csproj similarity index 85% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/StellaOps.Concelier.Connector.Ru.Nkcki.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/StellaOps.Concelier.Connector.Ru.Nkcki.csproj index b10cc4d0..ed67e1a9 100644 --- a/src/StellaOps.Concelier.Connector.Ru.Nkcki/StellaOps.Concelier.Connector.Ru.Nkcki.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/StellaOps.Concelier.Connector.Ru.Nkcki.csproj @@ -1,22 +1,23 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="AngleSharp" Version="1.1.1" /> - </ItemGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="AngleSharp" Version="1.1.1" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Client/MirrorManifestClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Client/MirrorManifestClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/Client/MirrorManifestClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Client/MirrorManifestClient.cs diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorAdvisoryMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorAdvisoryMapper.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorAdvisoryMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorAdvisoryMapper.cs index 3b1cce86..2b542225 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorAdvisoryMapper.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorAdvisoryMapper.cs @@ -1,203 +1,203 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Globalization; -using StellaOps.Concelier.Models; - -namespace StellaOps.Concelier.Connector.StellaOpsMirror.Internal; - -internal static class MirrorAdvisoryMapper -{ - private const string MirrorProvenanceKind = "map"; - - private static readonly string[] TopLevelFieldMask = - { - ProvenanceFieldMasks.Advisory, - ProvenanceFieldMasks.References, - ProvenanceFieldMasks.Credits, - ProvenanceFieldMasks.CvssMetrics, - ProvenanceFieldMasks.Weaknesses, - }; - - public static ImmutableArray<Advisory> Map(MirrorBundleDocument bundle) - { - if (bundle?.Advisories is null || bundle.Advisories.Count == 0) - { - return ImmutableArray<Advisory>.Empty; - } - - var builder = ImmutableArray.CreateBuilder<Advisory>(bundle.Advisories.Count); - var recordedAt = bundle.GeneratedAt.ToUniversalTime(); - var mirrorValue = BuildMirrorValue(bundle, recordedAt); - var topLevelProvenance = new AdvisoryProvenance( - StellaOpsMirrorConnector.Source, - MirrorProvenanceKind, - mirrorValue, - recordedAt, - TopLevelFieldMask); - - foreach (var advisory in bundle.Advisories) - { - if (advisory is null) - { - continue; - } - - var normalized = CanonicalJsonSerializer.Normalize(advisory); - var aliases = EnsureAliasCoverage(normalized); - var provenance = EnsureProvenance(normalized.Provenance, topLevelProvenance); - var packages = EnsurePackageProvenance(normalized.AffectedPackages, mirrorValue, recordedAt); - - var updated = new Advisory( - normalized.AdvisoryKey, - normalized.Title, - normalized.Summary, - normalized.Language, - normalized.Published, - normalized.Modified, - normalized.Severity, - normalized.ExploitKnown, - aliases, - normalized.Credits, - normalized.References, - packages, - normalized.CvssMetrics, - provenance, - normalized.Description, - normalized.Cwes, - normalized.CanonicalMetricId); - - builder.Add(updated); - } - - return builder.ToImmutable(); - } - - private static IEnumerable<string> EnsureAliasCoverage(Advisory advisory) - { - var aliases = new List<string>(advisory.Aliases.Length + 1); - var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - - foreach (var alias in advisory.Aliases) - { - if (seen.Add(alias)) - { - aliases.Add(alias); - } - } - - if (seen.Add(advisory.AdvisoryKey)) - { - aliases.Add(advisory.AdvisoryKey); - } - - return aliases; - } - - private static IEnumerable<AdvisoryProvenance> EnsureProvenance( - ImmutableArray<AdvisoryProvenance> existing, - AdvisoryProvenance mirrorProvenance) - { - if (!existing.IsDefaultOrEmpty - && existing.Any(provenance => - string.Equals(provenance.Source, mirrorProvenance.Source, StringComparison.Ordinal) - && string.Equals(provenance.Kind, mirrorProvenance.Kind, StringComparison.Ordinal) - && string.Equals(provenance.Value, mirrorProvenance.Value, StringComparison.Ordinal))) - { - return existing; - } - - return existing.Add(mirrorProvenance); - } - - private static IEnumerable<AffectedPackage> EnsurePackageProvenance( - ImmutableArray<AffectedPackage> packages, - string mirrorValue, - DateTimeOffset recordedAt) - { - if (packages.IsDefaultOrEmpty || packages.Length == 0) - { - return packages; - } - - var results = new List<AffectedPackage>(packages.Length); - - foreach (var package in packages) - { - var value = $"{mirrorValue};package={package.Identifier}"; - if (!package.Provenance.IsDefaultOrEmpty - && package.Provenance.Any(provenance => - string.Equals(provenance.Source, StellaOpsMirrorConnector.Source, StringComparison.Ordinal) - && string.Equals(provenance.Kind, MirrorProvenanceKind, StringComparison.Ordinal) - && string.Equals(provenance.Value, value, StringComparison.Ordinal))) - { - results.Add(package); - continue; - } - - var masks = BuildPackageFieldMask(package); - var packageProvenance = new AdvisoryProvenance( - StellaOpsMirrorConnector.Source, - MirrorProvenanceKind, - value, - recordedAt, - masks); - - var provenance = package.Provenance.Add(packageProvenance); - var updated = new AffectedPackage( - package.Type, - package.Identifier, - package.Platform, - package.VersionRanges, - package.Statuses, - provenance, - package.NormalizedVersions); - - results.Add(updated); - } - - return results; - } - - private static string[] BuildPackageFieldMask(AffectedPackage package) - { - var masks = new HashSet<string>(StringComparer.Ordinal) - { - ProvenanceFieldMasks.AffectedPackages, - }; - - if (!package.VersionRanges.IsDefaultOrEmpty && package.VersionRanges.Length > 0) - { - masks.Add(ProvenanceFieldMasks.VersionRanges); - } - - if (!package.Statuses.IsDefaultOrEmpty && package.Statuses.Length > 0) - { - masks.Add(ProvenanceFieldMasks.PackageStatuses); - } - - if (!package.NormalizedVersions.IsDefaultOrEmpty && package.NormalizedVersions.Length > 0) - { - masks.Add(ProvenanceFieldMasks.NormalizedVersions); - } - - return masks.ToArray(); - } - - private static string BuildMirrorValue(MirrorBundleDocument bundle, DateTimeOffset recordedAt) - { - var segments = new List<string> - { - $"domain={bundle.DomainId}", - }; - - if (!string.IsNullOrWhiteSpace(bundle.TargetRepository)) - { - segments.Add($"repository={bundle.TargetRepository}"); - } - - segments.Add($"generated={recordedAt.ToString("O", CultureInfo.InvariantCulture)}"); - return string.Join(';', segments); - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Globalization; +using StellaOps.Concelier.Models; + +namespace StellaOps.Concelier.Connector.StellaOpsMirror.Internal; + +internal static class MirrorAdvisoryMapper +{ + private const string MirrorProvenanceKind = "map"; + + private static readonly string[] TopLevelFieldMask = + { + ProvenanceFieldMasks.Advisory, + ProvenanceFieldMasks.References, + ProvenanceFieldMasks.Credits, + ProvenanceFieldMasks.CvssMetrics, + ProvenanceFieldMasks.Weaknesses, + }; + + public static ImmutableArray<Advisory> Map(MirrorBundleDocument bundle) + { + if (bundle?.Advisories is null || bundle.Advisories.Count == 0) + { + return ImmutableArray<Advisory>.Empty; + } + + var builder = ImmutableArray.CreateBuilder<Advisory>(bundle.Advisories.Count); + var recordedAt = bundle.GeneratedAt.ToUniversalTime(); + var mirrorValue = BuildMirrorValue(bundle, recordedAt); + var topLevelProvenance = new AdvisoryProvenance( + StellaOpsMirrorConnector.Source, + MirrorProvenanceKind, + mirrorValue, + recordedAt, + TopLevelFieldMask); + + foreach (var advisory in bundle.Advisories) + { + if (advisory is null) + { + continue; + } + + var normalized = CanonicalJsonSerializer.Normalize(advisory); + var aliases = EnsureAliasCoverage(normalized); + var provenance = EnsureProvenance(normalized.Provenance, topLevelProvenance); + var packages = EnsurePackageProvenance(normalized.AffectedPackages, mirrorValue, recordedAt); + + var updated = new Advisory( + normalized.AdvisoryKey, + normalized.Title, + normalized.Summary, + normalized.Language, + normalized.Published, + normalized.Modified, + normalized.Severity, + normalized.ExploitKnown, + aliases, + normalized.Credits, + normalized.References, + packages, + normalized.CvssMetrics, + provenance, + normalized.Description, + normalized.Cwes, + normalized.CanonicalMetricId); + + builder.Add(updated); + } + + return builder.ToImmutable(); + } + + private static IEnumerable<string> EnsureAliasCoverage(Advisory advisory) + { + var aliases = new List<string>(advisory.Aliases.Length + 1); + var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + + foreach (var alias in advisory.Aliases) + { + if (seen.Add(alias)) + { + aliases.Add(alias); + } + } + + if (seen.Add(advisory.AdvisoryKey)) + { + aliases.Add(advisory.AdvisoryKey); + } + + return aliases; + } + + private static IEnumerable<AdvisoryProvenance> EnsureProvenance( + ImmutableArray<AdvisoryProvenance> existing, + AdvisoryProvenance mirrorProvenance) + { + if (!existing.IsDefaultOrEmpty + && existing.Any(provenance => + string.Equals(provenance.Source, mirrorProvenance.Source, StringComparison.Ordinal) + && string.Equals(provenance.Kind, mirrorProvenance.Kind, StringComparison.Ordinal) + && string.Equals(provenance.Value, mirrorProvenance.Value, StringComparison.Ordinal))) + { + return existing; + } + + return existing.Add(mirrorProvenance); + } + + private static IEnumerable<AffectedPackage> EnsurePackageProvenance( + ImmutableArray<AffectedPackage> packages, + string mirrorValue, + DateTimeOffset recordedAt) + { + if (packages.IsDefaultOrEmpty || packages.Length == 0) + { + return packages; + } + + var results = new List<AffectedPackage>(packages.Length); + + foreach (var package in packages) + { + var value = $"{mirrorValue};package={package.Identifier}"; + if (!package.Provenance.IsDefaultOrEmpty + && package.Provenance.Any(provenance => + string.Equals(provenance.Source, StellaOpsMirrorConnector.Source, StringComparison.Ordinal) + && string.Equals(provenance.Kind, MirrorProvenanceKind, StringComparison.Ordinal) + && string.Equals(provenance.Value, value, StringComparison.Ordinal))) + { + results.Add(package); + continue; + } + + var masks = BuildPackageFieldMask(package); + var packageProvenance = new AdvisoryProvenance( + StellaOpsMirrorConnector.Source, + MirrorProvenanceKind, + value, + recordedAt, + masks); + + var provenance = package.Provenance.Add(packageProvenance); + var updated = new AffectedPackage( + package.Type, + package.Identifier, + package.Platform, + package.VersionRanges, + package.Statuses, + provenance, + package.NormalizedVersions); + + results.Add(updated); + } + + return results; + } + + private static string[] BuildPackageFieldMask(AffectedPackage package) + { + var masks = new HashSet<string>(StringComparer.Ordinal) + { + ProvenanceFieldMasks.AffectedPackages, + }; + + if (!package.VersionRanges.IsDefaultOrEmpty && package.VersionRanges.Length > 0) + { + masks.Add(ProvenanceFieldMasks.VersionRanges); + } + + if (!package.Statuses.IsDefaultOrEmpty && package.Statuses.Length > 0) + { + masks.Add(ProvenanceFieldMasks.PackageStatuses); + } + + if (!package.NormalizedVersions.IsDefaultOrEmpty && package.NormalizedVersions.Length > 0) + { + masks.Add(ProvenanceFieldMasks.NormalizedVersions); + } + + return masks.ToArray(); + } + + private static string BuildMirrorValue(MirrorBundleDocument bundle, DateTimeOffset recordedAt) + { + var segments = new List<string> + { + $"domain={bundle.DomainId}", + }; + + if (!string.IsNullOrWhiteSpace(bundle.TargetRepository)) + { + segments.Add($"repository={bundle.TargetRepository}"); + } + + segments.Add($"generated={recordedAt.ToString("O", CultureInfo.InvariantCulture)}"); + return string.Join(';', segments); + } +} diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorBundleDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorBundleDocument.cs similarity index 98% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorBundleDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorBundleDocument.cs index 33aa553c..db6daaa6 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorBundleDocument.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorBundleDocument.cs @@ -1,14 +1,14 @@ -using System.Text.Json.Serialization; -using StellaOps.Concelier.Models; - -namespace StellaOps.Concelier.Connector.StellaOpsMirror.Internal; - -public sealed record MirrorBundleDocument( - [property: JsonPropertyName("schemaVersion")] int SchemaVersion, - [property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt, - [property: JsonPropertyName("targetRepository")] string? TargetRepository, - [property: JsonPropertyName("domainId")] string DomainId, - [property: JsonPropertyName("displayName")] string DisplayName, - [property: JsonPropertyName("advisoryCount")] int AdvisoryCount, - [property: JsonPropertyName("advisories")] IReadOnlyList<Advisory> Advisories, - [property: JsonPropertyName("sources")] IReadOnlyList<MirrorSourceSummary> Sources); +using System.Text.Json.Serialization; +using StellaOps.Concelier.Models; + +namespace StellaOps.Concelier.Connector.StellaOpsMirror.Internal; + +public sealed record MirrorBundleDocument( + [property: JsonPropertyName("schemaVersion")] int SchemaVersion, + [property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt, + [property: JsonPropertyName("targetRepository")] string? TargetRepository, + [property: JsonPropertyName("domainId")] string DomainId, + [property: JsonPropertyName("displayName")] string DisplayName, + [property: JsonPropertyName("advisoryCount")] int AdvisoryCount, + [property: JsonPropertyName("advisories")] IReadOnlyList<Advisory> Advisories, + [property: JsonPropertyName("sources")] IReadOnlyList<MirrorSourceSummary> Sources); diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorIndexDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorIndexDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorIndexDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/MirrorIndexDocument.cs diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/StellaOpsMirrorCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/StellaOpsMirrorCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/StellaOpsMirrorCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Internal/StellaOpsMirrorCursor.cs diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Properties/AssemblyInfo.cs index 98e2ebf6..101bd334 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Properties/AssemblyInfo.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.StellaOpsMirror.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Concelier.Connector.StellaOpsMirror.Tests")] diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Security/MirrorSignatureVerifier.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Security/MirrorSignatureVerifier.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/Security/MirrorSignatureVerifier.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Security/MirrorSignatureVerifier.cs index bb3ddcd7..322f68eb 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Security/MirrorSignatureVerifier.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Security/MirrorSignatureVerifier.cs @@ -1,273 +1,273 @@ -using System; -using System.IO; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Caching.Memory; -using Microsoft.Extensions.Logging; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Cryptography; - -namespace StellaOps.Concelier.Connector.StellaOpsMirror.Security; - -/// <summary> -/// Validates detached JWS signatures emitted by mirror bundles. -/// </summary> -public sealed class MirrorSignatureVerifier -{ - private const string CachePrefix = "stellaops:mirror:public-key:"; - private static readonly JsonSerializerOptions HeaderSerializerOptions = new(JsonSerializerDefaults.Web) - { - PropertyNameCaseInsensitive = true - }; - - private readonly ICryptoProviderRegistry _providerRegistry; - private readonly ILogger<MirrorSignatureVerifier> _logger; - private readonly IMemoryCache? _memoryCache; - - public MirrorSignatureVerifier( - ICryptoProviderRegistry providerRegistry, - ILogger<MirrorSignatureVerifier> logger, - IMemoryCache? memoryCache = null) - { - _providerRegistry = providerRegistry ?? throw new ArgumentNullException(nameof(providerRegistry)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _memoryCache = memoryCache; - } - - public Task VerifyAsync(ReadOnlyMemory<byte> payload, string signatureValue, CancellationToken cancellationToken) - => VerifyAsync(payload, signatureValue, expectedKeyId: null, expectedProvider: null, fallbackPublicKeyPath: null, cancellationToken); - - public async Task VerifyAsync( - ReadOnlyMemory<byte> payload, - string signatureValue, - string? expectedKeyId, - string? expectedProvider, - string? fallbackPublicKeyPath, - CancellationToken cancellationToken) - { - if (payload.IsEmpty) - { - throw new ArgumentException("Payload must not be empty.", nameof(payload)); - } - - if (string.IsNullOrWhiteSpace(signatureValue)) - { - throw new ArgumentException("Signature value must be provided.", nameof(signatureValue)); - } - - if (!TryParseDetachedJws(signatureValue, out var encodedHeader, out var encodedSignature)) - { - throw new InvalidOperationException("Detached JWS signature is malformed."); - } - - var headerJson = Encoding.UTF8.GetString(Base64UrlEncoder.DecodeBytes(encodedHeader)); - var header = JsonSerializer.Deserialize<MirrorSignatureHeader>(headerJson, HeaderSerializerOptions) - ?? throw new InvalidOperationException("Detached JWS header could not be parsed."); - - if (!header.Critical.Contains("b64", StringComparer.Ordinal)) - { - throw new InvalidOperationException("Detached JWS header is missing required 'b64' critical parameter."); - } - - if (header.Base64Payload) - { - throw new InvalidOperationException("Detached JWS header sets b64=true; expected unencoded payload."); - } - - if (string.IsNullOrWhiteSpace(header.KeyId)) - { - throw new InvalidOperationException("Detached JWS header missing key identifier."); - } - - if (string.IsNullOrWhiteSpace(header.Algorithm)) - { - throw new InvalidOperationException("Detached JWS header missing algorithm identifier."); - } - - if (!string.IsNullOrWhiteSpace(expectedKeyId) && - !string.Equals(header.KeyId, expectedKeyId, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Mirror bundle signature key '{header.KeyId}' did not match expected key '{expectedKeyId}'."); - } - - if (!string.IsNullOrWhiteSpace(expectedProvider) && - !string.Equals(header.Provider, expectedProvider, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Mirror bundle signature provider '{header.Provider ?? "<null>"}' did not match expected provider '{expectedProvider}'."); - } - - var signingInput = BuildSigningInput(encodedHeader, payload.Span); - var signatureBytes = Base64UrlEncoder.DecodeBytes(encodedSignature); - - var keyReference = new CryptoKeyReference(header.KeyId, header.Provider); - CryptoSignerResolution? resolution = null; - bool providerVerified = false; - try - { - resolution = _providerRegistry.ResolveSigner( - CryptoCapability.Verification, - header.Algorithm, - keyReference, - header.Provider); - providerVerified = await resolution.Signer.VerifyAsync(signingInput, signatureBytes, cancellationToken).ConfigureAwait(false); - if (providerVerified) - { - return; - } - - _logger.LogWarning( - "Detached JWS verification failed for key {KeyId} via provider {Provider}.", - header.KeyId, - resolution.ProviderName); - } - catch (Exception ex) when (ex is InvalidOperationException or KeyNotFoundException) - { - _logger.LogWarning(ex, "Unable to resolve signer for mirror signature key {KeyId} via provider {Provider}.", header.KeyId, header.Provider ?? "<null>"); - } - - if (providerVerified) - { - return; - } - - if (!string.IsNullOrWhiteSpace(fallbackPublicKeyPath) && - await TryVerifyWithFallbackAsync(signingInput, signatureBytes, header.Algorithm, fallbackPublicKeyPath!, cancellationToken).ConfigureAwait(false)) - { - _logger.LogDebug( - "Detached JWS verification succeeded for key {KeyId} using fallback public key at {Path}.", - header.KeyId, - fallbackPublicKeyPath); - return; - } - - throw new InvalidOperationException("Detached JWS signature verification failed."); - } - - private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature) - { - var parts = value.Split("..", StringSplitOptions.None); - if (parts.Length != 2 || string.IsNullOrEmpty(parts[0]) || string.IsNullOrEmpty(parts[1])) - { - encodedHeader = string.Empty; - encodedSignature = string.Empty; - return false; - } - - encodedHeader = parts[0]; - encodedSignature = parts[1]; - return true; - } - - private static ReadOnlyMemory<byte> BuildSigningInput(string encodedHeader, ReadOnlySpan<byte> payload) - { - var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); - var buffer = new byte[headerBytes.Length + 1 + payload.Length]; - headerBytes.CopyTo(buffer.AsSpan()); - buffer[headerBytes.Length] = (byte)'.'; - payload.CopyTo(buffer.AsSpan(headerBytes.Length + 1)); - return buffer; - } - - private async Task<bool> TryVerifyWithFallbackAsync( - ReadOnlyMemory<byte> signingInput, - ReadOnlyMemory<byte> signature, - string algorithm, - string fallbackPublicKeyPath, - CancellationToken cancellationToken) - { - try - { - cancellationToken.ThrowIfCancellationRequested(); - var parameters = await GetFallbackPublicKeyAsync(fallbackPublicKeyPath, cancellationToken).ConfigureAwait(false); - if (parameters is null) - { - return false; - } - - using var ecdsa = ECDsa.Create(); - ecdsa.ImportParameters(parameters.Value); - var hashAlgorithm = ResolveHashAlgorithm(algorithm); - return ecdsa.VerifyData(signingInput.Span, signature.Span, hashAlgorithm); - } - catch (OperationCanceledException) - { - throw; - } - catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or CryptographicException or ArgumentException) - { - _logger.LogWarning(ex, "Failed to verify mirror signature using fallback public key at {Path}.", fallbackPublicKeyPath); - return false; - } - } - - private Task<ECParameters?> GetFallbackPublicKeyAsync(string path, CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (_memoryCache is null) - { - return Task.FromResult(LoadPublicKey(path)); - } - - if (_memoryCache.TryGetValue<Lazy<ECParameters?>>(CachePrefix + path, out var cached)) - { - return Task.FromResult(cached?.Value); - } - - if (!File.Exists(path)) - { - _logger.LogWarning("Mirror signature fallback public key path {Path} was not found.", path); - return Task.FromResult<ECParameters?>(null); - } - - var lazy = new Lazy<ECParameters?>( - () => LoadPublicKey(path), - LazyThreadSafetyMode.ExecutionAndPublication); - - var options = new MemoryCacheEntryOptions - { - AbsoluteExpirationRelativeToNow = TimeSpan.FromHours(6), - SlidingExpiration = TimeSpan.FromMinutes(30), - }; - - _memoryCache.Set(CachePrefix + path, lazy, options); - return Task.FromResult(lazy.Value); - } - - private ECParameters? LoadPublicKey(string path) - { - try - { - var pem = File.ReadAllText(path); - using var ecdsa = ECDsa.Create(); - ecdsa.ImportFromPem(pem.AsSpan()); - return ecdsa.ExportParameters(includePrivateParameters: false); - } - catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or CryptographicException or ArgumentException) - { - _logger.LogWarning(ex, "Failed to load mirror fallback public key from {Path}.", path); - return null; - } - } - - private static HashAlgorithmName ResolveHashAlgorithm(string algorithmId) - => algorithmId switch - { - { } alg when string.Equals(alg, SignatureAlgorithms.Es256, StringComparison.OrdinalIgnoreCase) => HashAlgorithmName.SHA256, - { } alg when string.Equals(alg, SignatureAlgorithms.Es384, StringComparison.OrdinalIgnoreCase) => HashAlgorithmName.SHA384, - { } alg when string.Equals(alg, SignatureAlgorithms.Es512, StringComparison.OrdinalIgnoreCase) => HashAlgorithmName.SHA512, - _ => throw new InvalidOperationException($"Unsupported mirror signature algorithm '{algorithmId}'."), - }; - - private sealed record MirrorSignatureHeader( - [property: JsonPropertyName("alg")] string Algorithm, - [property: JsonPropertyName("kid")] string KeyId, - [property: JsonPropertyName("provider")] string? Provider, - [property: JsonPropertyName("typ")] string? Type, - [property: JsonPropertyName("b64")] bool Base64Payload, - [property: JsonPropertyName("crit")] string[] Critical); -} +using System; +using System.IO; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Cryptography; + +namespace StellaOps.Concelier.Connector.StellaOpsMirror.Security; + +/// <summary> +/// Validates detached JWS signatures emitted by mirror bundles. +/// </summary> +public sealed class MirrorSignatureVerifier +{ + private const string CachePrefix = "stellaops:mirror:public-key:"; + private static readonly JsonSerializerOptions HeaderSerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true + }; + + private readonly ICryptoProviderRegistry _providerRegistry; + private readonly ILogger<MirrorSignatureVerifier> _logger; + private readonly IMemoryCache? _memoryCache; + + public MirrorSignatureVerifier( + ICryptoProviderRegistry providerRegistry, + ILogger<MirrorSignatureVerifier> logger, + IMemoryCache? memoryCache = null) + { + _providerRegistry = providerRegistry ?? throw new ArgumentNullException(nameof(providerRegistry)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _memoryCache = memoryCache; + } + + public Task VerifyAsync(ReadOnlyMemory<byte> payload, string signatureValue, CancellationToken cancellationToken) + => VerifyAsync(payload, signatureValue, expectedKeyId: null, expectedProvider: null, fallbackPublicKeyPath: null, cancellationToken); + + public async Task VerifyAsync( + ReadOnlyMemory<byte> payload, + string signatureValue, + string? expectedKeyId, + string? expectedProvider, + string? fallbackPublicKeyPath, + CancellationToken cancellationToken) + { + if (payload.IsEmpty) + { + throw new ArgumentException("Payload must not be empty.", nameof(payload)); + } + + if (string.IsNullOrWhiteSpace(signatureValue)) + { + throw new ArgumentException("Signature value must be provided.", nameof(signatureValue)); + } + + if (!TryParseDetachedJws(signatureValue, out var encodedHeader, out var encodedSignature)) + { + throw new InvalidOperationException("Detached JWS signature is malformed."); + } + + var headerJson = Encoding.UTF8.GetString(Base64UrlEncoder.DecodeBytes(encodedHeader)); + var header = JsonSerializer.Deserialize<MirrorSignatureHeader>(headerJson, HeaderSerializerOptions) + ?? throw new InvalidOperationException("Detached JWS header could not be parsed."); + + if (!header.Critical.Contains("b64", StringComparer.Ordinal)) + { + throw new InvalidOperationException("Detached JWS header is missing required 'b64' critical parameter."); + } + + if (header.Base64Payload) + { + throw new InvalidOperationException("Detached JWS header sets b64=true; expected unencoded payload."); + } + + if (string.IsNullOrWhiteSpace(header.KeyId)) + { + throw new InvalidOperationException("Detached JWS header missing key identifier."); + } + + if (string.IsNullOrWhiteSpace(header.Algorithm)) + { + throw new InvalidOperationException("Detached JWS header missing algorithm identifier."); + } + + if (!string.IsNullOrWhiteSpace(expectedKeyId) && + !string.Equals(header.KeyId, expectedKeyId, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Mirror bundle signature key '{header.KeyId}' did not match expected key '{expectedKeyId}'."); + } + + if (!string.IsNullOrWhiteSpace(expectedProvider) && + !string.Equals(header.Provider, expectedProvider, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Mirror bundle signature provider '{header.Provider ?? "<null>"}' did not match expected provider '{expectedProvider}'."); + } + + var signingInput = BuildSigningInput(encodedHeader, payload.Span); + var signatureBytes = Base64UrlEncoder.DecodeBytes(encodedSignature); + + var keyReference = new CryptoKeyReference(header.KeyId, header.Provider); + CryptoSignerResolution? resolution = null; + bool providerVerified = false; + try + { + resolution = _providerRegistry.ResolveSigner( + CryptoCapability.Verification, + header.Algorithm, + keyReference, + header.Provider); + providerVerified = await resolution.Signer.VerifyAsync(signingInput, signatureBytes, cancellationToken).ConfigureAwait(false); + if (providerVerified) + { + return; + } + + _logger.LogWarning( + "Detached JWS verification failed for key {KeyId} via provider {Provider}.", + header.KeyId, + resolution.ProviderName); + } + catch (Exception ex) when (ex is InvalidOperationException or KeyNotFoundException) + { + _logger.LogWarning(ex, "Unable to resolve signer for mirror signature key {KeyId} via provider {Provider}.", header.KeyId, header.Provider ?? "<null>"); + } + + if (providerVerified) + { + return; + } + + if (!string.IsNullOrWhiteSpace(fallbackPublicKeyPath) && + await TryVerifyWithFallbackAsync(signingInput, signatureBytes, header.Algorithm, fallbackPublicKeyPath!, cancellationToken).ConfigureAwait(false)) + { + _logger.LogDebug( + "Detached JWS verification succeeded for key {KeyId} using fallback public key at {Path}.", + header.KeyId, + fallbackPublicKeyPath); + return; + } + + throw new InvalidOperationException("Detached JWS signature verification failed."); + } + + private static bool TryParseDetachedJws(string value, out string encodedHeader, out string encodedSignature) + { + var parts = value.Split("..", StringSplitOptions.None); + if (parts.Length != 2 || string.IsNullOrEmpty(parts[0]) || string.IsNullOrEmpty(parts[1])) + { + encodedHeader = string.Empty; + encodedSignature = string.Empty; + return false; + } + + encodedHeader = parts[0]; + encodedSignature = parts[1]; + return true; + } + + private static ReadOnlyMemory<byte> BuildSigningInput(string encodedHeader, ReadOnlySpan<byte> payload) + { + var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); + var buffer = new byte[headerBytes.Length + 1 + payload.Length]; + headerBytes.CopyTo(buffer.AsSpan()); + buffer[headerBytes.Length] = (byte)'.'; + payload.CopyTo(buffer.AsSpan(headerBytes.Length + 1)); + return buffer; + } + + private async Task<bool> TryVerifyWithFallbackAsync( + ReadOnlyMemory<byte> signingInput, + ReadOnlyMemory<byte> signature, + string algorithm, + string fallbackPublicKeyPath, + CancellationToken cancellationToken) + { + try + { + cancellationToken.ThrowIfCancellationRequested(); + var parameters = await GetFallbackPublicKeyAsync(fallbackPublicKeyPath, cancellationToken).ConfigureAwait(false); + if (parameters is null) + { + return false; + } + + using var ecdsa = ECDsa.Create(); + ecdsa.ImportParameters(parameters.Value); + var hashAlgorithm = ResolveHashAlgorithm(algorithm); + return ecdsa.VerifyData(signingInput.Span, signature.Span, hashAlgorithm); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or CryptographicException or ArgumentException) + { + _logger.LogWarning(ex, "Failed to verify mirror signature using fallback public key at {Path}.", fallbackPublicKeyPath); + return false; + } + } + + private Task<ECParameters?> GetFallbackPublicKeyAsync(string path, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (_memoryCache is null) + { + return Task.FromResult(LoadPublicKey(path)); + } + + if (_memoryCache.TryGetValue<Lazy<ECParameters?>>(CachePrefix + path, out var cached)) + { + return Task.FromResult(cached?.Value); + } + + if (!File.Exists(path)) + { + _logger.LogWarning("Mirror signature fallback public key path {Path} was not found.", path); + return Task.FromResult<ECParameters?>(null); + } + + var lazy = new Lazy<ECParameters?>( + () => LoadPublicKey(path), + LazyThreadSafetyMode.ExecutionAndPublication); + + var options = new MemoryCacheEntryOptions + { + AbsoluteExpirationRelativeToNow = TimeSpan.FromHours(6), + SlidingExpiration = TimeSpan.FromMinutes(30), + }; + + _memoryCache.Set(CachePrefix + path, lazy, options); + return Task.FromResult(lazy.Value); + } + + private ECParameters? LoadPublicKey(string path) + { + try + { + var pem = File.ReadAllText(path); + using var ecdsa = ECDsa.Create(); + ecdsa.ImportFromPem(pem.AsSpan()); + return ecdsa.ExportParameters(includePrivateParameters: false); + } + catch (Exception ex) when (ex is IOException or UnauthorizedAccessException or CryptographicException or ArgumentException) + { + _logger.LogWarning(ex, "Failed to load mirror fallback public key from {Path}.", path); + return null; + } + } + + private static HashAlgorithmName ResolveHashAlgorithm(string algorithmId) + => algorithmId switch + { + { } alg when string.Equals(alg, SignatureAlgorithms.Es256, StringComparison.OrdinalIgnoreCase) => HashAlgorithmName.SHA256, + { } alg when string.Equals(alg, SignatureAlgorithms.Es384, StringComparison.OrdinalIgnoreCase) => HashAlgorithmName.SHA384, + { } alg when string.Equals(alg, SignatureAlgorithms.Es512, StringComparison.OrdinalIgnoreCase) => HashAlgorithmName.SHA512, + _ => throw new InvalidOperationException($"Unsupported mirror signature algorithm '{algorithmId}'."), + }; + + private sealed record MirrorSignatureHeader( + [property: JsonPropertyName("alg")] string Algorithm, + [property: JsonPropertyName("kid")] string KeyId, + [property: JsonPropertyName("provider")] string? Provider, + [property: JsonPropertyName("typ")] string? Type, + [property: JsonPropertyName("b64")] bool Base64Payload, + [property: JsonPropertyName("crit")] string[] Critical); +} diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/Settings/StellaOpsMirrorConnectorOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Settings/StellaOpsMirrorConnectorOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/Settings/StellaOpsMirrorConnectorOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/Settings/StellaOpsMirrorConnectorOptions.cs diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj similarity index 68% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj index adf69977..a18fdea7 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj @@ -1,16 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnector.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnector.cs index fda33b07..2e896e5f 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnector.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnector.cs @@ -1,573 +1,573 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Security.Cryptography; -using System.Text; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using MongoDB.Bson; -using StellaOps.Concelier.Connector.Common.Fetch; -using StellaOps.Concelier.Connector.Common; -using StellaOps.Concelier.Connector.StellaOpsMirror.Client; -using StellaOps.Concelier.Connector.StellaOpsMirror.Internal; -using StellaOps.Concelier.Connector.StellaOpsMirror.Security; -using StellaOps.Concelier.Connector.StellaOpsMirror.Settings; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.Storage.Mongo; -using StellaOps.Concelier.Storage.Mongo.Advisories; -using StellaOps.Concelier.Storage.Mongo.Documents; -using StellaOps.Concelier.Storage.Mongo.Dtos; -using StellaOps.Plugin; - -namespace StellaOps.Concelier.Connector.StellaOpsMirror; - -public sealed class StellaOpsMirrorConnector : IFeedConnector -{ - public const string Source = "stellaops-mirror"; - private const string BundleDtoSchemaVersion = "stellaops.mirror.bundle.v1"; - - private readonly MirrorManifestClient _client; - private readonly MirrorSignatureVerifier _signatureVerifier; - private readonly RawDocumentStorage _rawDocumentStorage; - private readonly IDocumentStore _documentStore; - private readonly IDtoStore _dtoStore; - private readonly IAdvisoryStore _advisoryStore; - private readonly ISourceStateRepository _stateRepository; - private readonly TimeProvider _timeProvider; - private readonly ILogger<StellaOpsMirrorConnector> _logger; - private readonly StellaOpsMirrorConnectorOptions _options; - - public StellaOpsMirrorConnector( - MirrorManifestClient client, - MirrorSignatureVerifier signatureVerifier, - RawDocumentStorage rawDocumentStorage, - IDocumentStore documentStore, - IDtoStore dtoStore, - IAdvisoryStore advisoryStore, - ISourceStateRepository stateRepository, - IOptions<StellaOpsMirrorConnectorOptions> options, - TimeProvider? timeProvider, - ILogger<StellaOpsMirrorConnector> logger) - { - _client = client ?? throw new ArgumentNullException(nameof(client)); - _signatureVerifier = signatureVerifier ?? throw new ArgumentNullException(nameof(signatureVerifier)); - _rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage)); - _documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore)); - _dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore)); - _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); - _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? TimeProvider.System; - _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); - ValidateOptions(_options); - } - - public string SourceName => Source; - - public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) - { - _ = services ?? throw new ArgumentNullException(nameof(services)); - - var now = _timeProvider.GetUtcNow(); - var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); - var pendingDocuments = cursor.PendingDocuments.ToHashSet(); - var pendingMappings = cursor.PendingMappings.ToHashSet(); - - MirrorIndexDocument index; - try - { - index = await _client.GetIndexAsync(_options.IndexPath, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - await _stateRepository.MarkFailureAsync(Source, now, TimeSpan.FromMinutes(15), ex.Message, cancellationToken).ConfigureAwait(false); - throw; - } - - var domain = index.Domains.FirstOrDefault(entry => - string.Equals(entry.DomainId, _options.DomainId, StringComparison.OrdinalIgnoreCase)); - - if (domain is null) - { - var message = $"Mirror domain '{_options.DomainId}' not present in index."; - await _stateRepository.MarkFailureAsync(Source, now, TimeSpan.FromMinutes(30), message, cancellationToken).ConfigureAwait(false); - throw new InvalidOperationException(message); - } - - var fingerprint = CreateFingerprint(index, domain); - var isNewDigest = !string.Equals(domain.Bundle.Digest, cursor.BundleDigest, StringComparison.OrdinalIgnoreCase); - - if (isNewDigest) - { - pendingDocuments.Clear(); - pendingMappings.Clear(); - } - - if (string.Equals(domain.Bundle.Digest, cursor.BundleDigest, StringComparison.OrdinalIgnoreCase)) - { - _logger.LogInformation("Mirror bundle digest {Digest} unchanged; skipping fetch.", domain.Bundle.Digest); - return; - } - - try - { - await ProcessDomainAsync(index, domain, pendingDocuments, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - await _stateRepository.MarkFailureAsync(Source, now, TimeSpan.FromMinutes(10), ex.Message, cancellationToken).ConfigureAwait(false); - throw; - } - - var completedFingerprint = isNewDigest ? null : cursor.CompletedFingerprint; - var updatedCursor = cursor - .WithPendingDocuments(pendingDocuments) - .WithPendingMappings(pendingMappings) - .WithBundleSnapshot(domain.Bundle.Path, domain.Bundle.Digest, index.GeneratedAt) - .WithCompletedFingerprint(completedFingerprint); - - await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); - } - - public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(services); - - return ParseInternalAsync(cancellationToken); - } - - public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(services); - - return MapInternalAsync(cancellationToken); - } - - private async Task ProcessDomainAsync( - MirrorIndexDocument index, - MirrorIndexDomainEntry domain, - HashSet<Guid> pendingDocuments, - CancellationToken cancellationToken) - { - var manifestBytes = await _client.DownloadAsync(domain.Manifest.Path, cancellationToken).ConfigureAwait(false); - var bundleBytes = await _client.DownloadAsync(domain.Bundle.Path, cancellationToken).ConfigureAwait(false); - - VerifyDigest(domain.Manifest.Digest, manifestBytes, domain.Manifest.Path); - VerifyDigest(domain.Bundle.Digest, bundleBytes, domain.Bundle.Path); - - if (_options.Signature.Enabled) - { - if (domain.Bundle.Signature is null) - { - throw new InvalidOperationException("Mirror bundle did not include a signature descriptor while verification is enabled."); - } - - if (!string.IsNullOrWhiteSpace(_options.Signature.KeyId) && - !string.Equals(domain.Bundle.Signature.KeyId, _options.Signature.KeyId, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Mirror bundle signature key '{domain.Bundle.Signature.KeyId}' did not match expected key '{_options.Signature.KeyId}'."); - } - - if (!string.IsNullOrWhiteSpace(_options.Signature.Provider) && - !string.Equals(domain.Bundle.Signature.Provider, _options.Signature.Provider, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Mirror bundle signature provider '{domain.Bundle.Signature.Provider ?? "<null>"}' did not match expected provider '{_options.Signature.Provider}'."); - } - - var signatureBytes = await _client.DownloadAsync(domain.Bundle.Signature.Path, cancellationToken).ConfigureAwait(false); - var signatureValue = Encoding.UTF8.GetString(signatureBytes).Trim(); - await _signatureVerifier.VerifyAsync( - bundleBytes, - signatureValue, - expectedKeyId: _options.Signature.KeyId, - expectedProvider: _options.Signature.Provider, - fallbackPublicKeyPath: _options.Signature.PublicKeyPath, - cancellationToken).ConfigureAwait(false); - } - else if (domain.Bundle.Signature is not null) - { - _logger.LogInformation("Mirror bundle provided signature descriptor but verification is disabled; skipping verification."); - } - - await StoreAsync(domain, index.GeneratedAt, domain.Manifest, manifestBytes, "application/json", DocumentStatuses.Mapped, addToPending: false, pendingDocuments, cancellationToken).ConfigureAwait(false); - var bundleRecord = await StoreAsync(domain, index.GeneratedAt, domain.Bundle, bundleBytes, "application/json", DocumentStatuses.PendingParse, addToPending: true, pendingDocuments, cancellationToken).ConfigureAwait(false); - - _logger.LogInformation( - "Stored mirror bundle {Uri} as document {DocumentId} with digest {Digest}.", - bundleRecord.Uri, - bundleRecord.Id, - bundleRecord.Sha256); - } - - private async Task<DocumentRecord> StoreAsync( - MirrorIndexDomainEntry domain, - DateTimeOffset generatedAt, - MirrorFileDescriptor descriptor, - byte[] payload, - string contentType, - string status, - bool addToPending, - HashSet<Guid> pendingDocuments, - CancellationToken cancellationToken) - { - var absolute = ResolveAbsolutePath(descriptor.Path); - - var existing = await _documentStore.FindBySourceAndUriAsync(Source, absolute, cancellationToken).ConfigureAwait(false); - if (existing is not null && string.Equals(existing.Sha256, NormalizeDigest(descriptor.Digest), StringComparison.OrdinalIgnoreCase)) - { - if (addToPending) - { - pendingDocuments.Add(existing.Id); - } - - return existing; - } - - var gridFsId = await _rawDocumentStorage.UploadAsync(Source, absolute, payload, contentType, cancellationToken).ConfigureAwait(false); - var now = _timeProvider.GetUtcNow(); - var sha = ComputeSha256(payload); - - var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase) - { - ["mirror.domainId"] = domain.DomainId, - ["mirror.displayName"] = domain.DisplayName, - ["mirror.path"] = descriptor.Path, - ["mirror.digest"] = NormalizeDigest(descriptor.Digest), - ["mirror.type"] = ReferenceEquals(descriptor, domain.Bundle) ? "bundle" : "manifest", - }; - - var record = new DocumentRecord( - existing?.Id ?? Guid.NewGuid(), - Source, - absolute, - now, - sha, - status, - contentType, - Headers: null, - Metadata: metadata, - Etag: null, - LastModified: generatedAt, - GridFsId: gridFsId, - ExpiresAt: null); - - var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false); - - if (addToPending) - { - pendingDocuments.Add(upserted.Id); - } - - return upserted; - } - - private string ResolveAbsolutePath(string path) - { - var uri = new Uri(_options.BaseAddress, path); - return uri.ToString(); - } - - private async Task<StellaOpsMirrorCursor> GetCursorAsync(CancellationToken cancellationToken) - { - var state = await _stateRepository.TryGetAsync(Source, cancellationToken).ConfigureAwait(false); - return state is null ? StellaOpsMirrorCursor.Empty : StellaOpsMirrorCursor.FromBson(state.Cursor); - } - - private async Task UpdateCursorAsync(StellaOpsMirrorCursor cursor, CancellationToken cancellationToken) - { - var document = cursor.ToBsonDocument(); - var now = _timeProvider.GetUtcNow(); - await _stateRepository.UpdateCursorAsync(Source, document, now, cancellationToken).ConfigureAwait(false); - } - - private static void VerifyDigest(string expected, ReadOnlySpan<byte> payload, string path) - { - if (string.IsNullOrWhiteSpace(expected)) - { - return; - } - - if (!expected.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Unsupported digest '{expected}' for '{path}'."); - } - - var actualHash = SHA256.HashData(payload); - var actual = "sha256:" + Convert.ToHexString(actualHash).ToLowerInvariant(); - if (!string.Equals(actual, expected, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Digest mismatch for '{path}'. Expected {expected}, computed {actual}."); - } - } - - private static string ComputeSha256(ReadOnlySpan<byte> payload) - { - var hash = SHA256.HashData(payload); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static string NormalizeDigest(string digest) - { - if (string.IsNullOrWhiteSpace(digest)) - { - return string.Empty; - } - - return digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) - ? digest[7..] - : digest.ToLowerInvariant(); - } - - private static string? CreateFingerprint(MirrorIndexDocument index, MirrorIndexDomainEntry domain) - => CreateFingerprint(domain.Bundle.Digest, index.GeneratedAt); - - private static string? CreateFingerprint(string? digest, DateTimeOffset? generatedAt) - { - var normalizedDigest = NormalizeDigest(digest ?? string.Empty); - if (string.IsNullOrWhiteSpace(normalizedDigest) || generatedAt is null) - { - return null; - } - - return FormattableString.Invariant($"{normalizedDigest}:{generatedAt.Value.ToUniversalTime():O}"); - } - - private static void ValidateOptions(StellaOpsMirrorConnectorOptions options) - { - if (options.BaseAddress is null || !options.BaseAddress.IsAbsoluteUri) - { - throw new InvalidOperationException("Mirror connector requires an absolute baseAddress."); - } - - if (string.IsNullOrWhiteSpace(options.DomainId)) - { - throw new InvalidOperationException("Mirror connector requires domainId to be specified."); - } - } - - private async Task ParseInternalAsync(CancellationToken cancellationToken) - { - var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); - if (cursor.PendingDocuments.Count == 0) - { - return; - } - - var pendingDocuments = cursor.PendingDocuments.ToHashSet(); - var pendingMappings = cursor.PendingMappings.ToHashSet(); - var now = _timeProvider.GetUtcNow(); - var parsed = 0; - var failures = 0; - - foreach (var documentId in cursor.PendingDocuments.ToArray()) - { - cancellationToken.ThrowIfCancellationRequested(); - - var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); - if (document is null) - { - pendingDocuments.Remove(documentId); - pendingMappings.Remove(documentId); - failures++; - continue; - } - - if (!document.GridFsId.HasValue) - { - _logger.LogWarning("Mirror bundle document {DocumentId} missing GridFS payload.", documentId); - await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); - pendingDocuments.Remove(documentId); - pendingMappings.Remove(documentId); - failures++; - continue; - } - - byte[] payload; - try - { - payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - _logger.LogError(ex, "Mirror bundle {DocumentId} failed to download from raw storage.", documentId); - throw; - } - - MirrorBundleDocument? bundle; - string json; - try - { - json = Encoding.UTF8.GetString(payload); - bundle = CanonicalJsonSerializer.Deserialize<MirrorBundleDocument>(json); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Mirror bundle {DocumentId} failed to deserialize.", documentId); - await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); - pendingDocuments.Remove(documentId); - pendingMappings.Remove(documentId); - failures++; - continue; - } - - if (bundle is null || bundle.Advisories is null) - { - _logger.LogWarning("Mirror bundle {DocumentId} produced null payload.", documentId); - await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); - pendingDocuments.Remove(documentId); - pendingMappings.Remove(documentId); - failures++; - continue; - } - - var dtoBson = BsonDocument.Parse(json); - var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, Source, BundleDtoSchemaVersion, dtoBson, now); - await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false); - await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false); - - pendingDocuments.Remove(documentId); - pendingMappings.Add(document.Id); - parsed++; - - _logger.LogDebug( - "Parsed mirror bundle {DocumentId} domain={DomainId} advisories={AdvisoryCount}.", - document.Id, - bundle.DomainId, - bundle.AdvisoryCount); - } - - var updatedCursor = cursor - .WithPendingDocuments(pendingDocuments) - .WithPendingMappings(pendingMappings); - - await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); - - if (parsed > 0 || failures > 0) - { - _logger.LogInformation( - "Mirror parse completed parsed={Parsed} failures={Failures} pendingDocuments={PendingDocuments} pendingMappings={PendingMappings}.", - parsed, - failures, - pendingDocuments.Count, - pendingMappings.Count); - } - } - - private async Task MapInternalAsync(CancellationToken cancellationToken) - { - var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); - if (cursor.PendingMappings.Count == 0) - { - return; - } - - var pendingMappings = cursor.PendingMappings.ToHashSet(); - var mapped = 0; - var failures = 0; - var completedFingerprint = cursor.CompletedFingerprint; - - foreach (var documentId in cursor.PendingMappings.ToArray()) - { - cancellationToken.ThrowIfCancellationRequested(); - - var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); - if (document is null) - { - pendingMappings.Remove(documentId); - failures++; - continue; - } - - var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false); - if (dtoRecord is null) - { - _logger.LogWarning("Mirror document {DocumentId} missing DTO payload.", documentId); - await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); - pendingMappings.Remove(documentId); - failures++; - continue; - } - - MirrorBundleDocument? bundle; - try - { - var json = dtoRecord.Payload.ToJson(); - bundle = CanonicalJsonSerializer.Deserialize<MirrorBundleDocument>(json); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Mirror DTO for document {DocumentId} failed to deserialize.", documentId); - await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); - pendingMappings.Remove(documentId); - failures++; - continue; - } - - if (bundle is null || bundle.Advisories is null) - { - _logger.LogWarning("Mirror bundle DTO {DocumentId} evaluated to null.", documentId); - await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); - pendingMappings.Remove(documentId); - failures++; - continue; - } - - try - { - var advisories = MirrorAdvisoryMapper.Map(bundle); - - foreach (var advisory in advisories) - { - cancellationToken.ThrowIfCancellationRequested(); - await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); - } - - await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); - pendingMappings.Remove(documentId); - mapped++; - - _logger.LogDebug( - "Mirror map completed for document {DocumentId} domain={DomainId} advisories={AdvisoryCount}.", - document.Id, - bundle.DomainId, - advisories.Length); - } - catch (Exception ex) - { - _logger.LogError(ex, "Mirror mapping failed for document {DocumentId}.", documentId); - await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); - pendingMappings.Remove(documentId); - failures++; - } - } - - if (pendingMappings.Count == 0 && failures == 0) - { - var fingerprint = CreateFingerprint(cursor.BundleDigest, cursor.GeneratedAt); - if (!string.IsNullOrWhiteSpace(fingerprint)) - { - completedFingerprint = fingerprint; - } - } - - var updatedCursor = cursor - .WithPendingMappings(pendingMappings) - .WithCompletedFingerprint(completedFingerprint); - await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); - - if (mapped > 0 || failures > 0) - { - _logger.LogInformation( - "Mirror map completed mapped={Mapped} failures={Failures} pendingMappings={PendingMappings}.", - mapped, - failures, - pendingMappings.Count); - } - } -} - -file static class UriExtensions -{ - public static Uri Combine(this Uri baseUri, string relative) - => new(baseUri, relative); -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Concelier.Connector.Common.Fetch; +using StellaOps.Concelier.Connector.Common; +using StellaOps.Concelier.Connector.StellaOpsMirror.Client; +using StellaOps.Concelier.Connector.StellaOpsMirror.Internal; +using StellaOps.Concelier.Connector.StellaOpsMirror.Security; +using StellaOps.Concelier.Connector.StellaOpsMirror.Settings; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.Storage.Mongo; +using StellaOps.Concelier.Storage.Mongo.Advisories; +using StellaOps.Concelier.Storage.Mongo.Documents; +using StellaOps.Concelier.Storage.Mongo.Dtos; +using StellaOps.Plugin; + +namespace StellaOps.Concelier.Connector.StellaOpsMirror; + +public sealed class StellaOpsMirrorConnector : IFeedConnector +{ + public const string Source = "stellaops-mirror"; + private const string BundleDtoSchemaVersion = "stellaops.mirror.bundle.v1"; + + private readonly MirrorManifestClient _client; + private readonly MirrorSignatureVerifier _signatureVerifier; + private readonly RawDocumentStorage _rawDocumentStorage; + private readonly IDocumentStore _documentStore; + private readonly IDtoStore _dtoStore; + private readonly IAdvisoryStore _advisoryStore; + private readonly ISourceStateRepository _stateRepository; + private readonly TimeProvider _timeProvider; + private readonly ILogger<StellaOpsMirrorConnector> _logger; + private readonly StellaOpsMirrorConnectorOptions _options; + + public StellaOpsMirrorConnector( + MirrorManifestClient client, + MirrorSignatureVerifier signatureVerifier, + RawDocumentStorage rawDocumentStorage, + IDocumentStore documentStore, + IDtoStore dtoStore, + IAdvisoryStore advisoryStore, + ISourceStateRepository stateRepository, + IOptions<StellaOpsMirrorConnectorOptions> options, + TimeProvider? timeProvider, + ILogger<StellaOpsMirrorConnector> logger) + { + _client = client ?? throw new ArgumentNullException(nameof(client)); + _signatureVerifier = signatureVerifier ?? throw new ArgumentNullException(nameof(signatureVerifier)); + _rawDocumentStorage = rawDocumentStorage ?? throw new ArgumentNullException(nameof(rawDocumentStorage)); + _documentStore = documentStore ?? throw new ArgumentNullException(nameof(documentStore)); + _dtoStore = dtoStore ?? throw new ArgumentNullException(nameof(dtoStore)); + _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); + _stateRepository = stateRepository ?? throw new ArgumentNullException(nameof(stateRepository)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + _options = (options ?? throw new ArgumentNullException(nameof(options))).Value ?? throw new ArgumentNullException(nameof(options)); + ValidateOptions(_options); + } + + public string SourceName => Source; + + public async Task FetchAsync(IServiceProvider services, CancellationToken cancellationToken) + { + _ = services ?? throw new ArgumentNullException(nameof(services)); + + var now = _timeProvider.GetUtcNow(); + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + var pendingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + + MirrorIndexDocument index; + try + { + index = await _client.GetIndexAsync(_options.IndexPath, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + await _stateRepository.MarkFailureAsync(Source, now, TimeSpan.FromMinutes(15), ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + + var domain = index.Domains.FirstOrDefault(entry => + string.Equals(entry.DomainId, _options.DomainId, StringComparison.OrdinalIgnoreCase)); + + if (domain is null) + { + var message = $"Mirror domain '{_options.DomainId}' not present in index."; + await _stateRepository.MarkFailureAsync(Source, now, TimeSpan.FromMinutes(30), message, cancellationToken).ConfigureAwait(false); + throw new InvalidOperationException(message); + } + + var fingerprint = CreateFingerprint(index, domain); + var isNewDigest = !string.Equals(domain.Bundle.Digest, cursor.BundleDigest, StringComparison.OrdinalIgnoreCase); + + if (isNewDigest) + { + pendingDocuments.Clear(); + pendingMappings.Clear(); + } + + if (string.Equals(domain.Bundle.Digest, cursor.BundleDigest, StringComparison.OrdinalIgnoreCase)) + { + _logger.LogInformation("Mirror bundle digest {Digest} unchanged; skipping fetch.", domain.Bundle.Digest); + return; + } + + try + { + await ProcessDomainAsync(index, domain, pendingDocuments, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + await _stateRepository.MarkFailureAsync(Source, now, TimeSpan.FromMinutes(10), ex.Message, cancellationToken).ConfigureAwait(false); + throw; + } + + var completedFingerprint = isNewDigest ? null : cursor.CompletedFingerprint; + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings) + .WithBundleSnapshot(domain.Bundle.Path, domain.Bundle.Digest, index.GeneratedAt) + .WithCompletedFingerprint(completedFingerprint); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + } + + public Task ParseAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + return ParseInternalAsync(cancellationToken); + } + + public Task MapAsync(IServiceProvider services, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(services); + + return MapInternalAsync(cancellationToken); + } + + private async Task ProcessDomainAsync( + MirrorIndexDocument index, + MirrorIndexDomainEntry domain, + HashSet<Guid> pendingDocuments, + CancellationToken cancellationToken) + { + var manifestBytes = await _client.DownloadAsync(domain.Manifest.Path, cancellationToken).ConfigureAwait(false); + var bundleBytes = await _client.DownloadAsync(domain.Bundle.Path, cancellationToken).ConfigureAwait(false); + + VerifyDigest(domain.Manifest.Digest, manifestBytes, domain.Manifest.Path); + VerifyDigest(domain.Bundle.Digest, bundleBytes, domain.Bundle.Path); + + if (_options.Signature.Enabled) + { + if (domain.Bundle.Signature is null) + { + throw new InvalidOperationException("Mirror bundle did not include a signature descriptor while verification is enabled."); + } + + if (!string.IsNullOrWhiteSpace(_options.Signature.KeyId) && + !string.Equals(domain.Bundle.Signature.KeyId, _options.Signature.KeyId, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Mirror bundle signature key '{domain.Bundle.Signature.KeyId}' did not match expected key '{_options.Signature.KeyId}'."); + } + + if (!string.IsNullOrWhiteSpace(_options.Signature.Provider) && + !string.Equals(domain.Bundle.Signature.Provider, _options.Signature.Provider, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Mirror bundle signature provider '{domain.Bundle.Signature.Provider ?? "<null>"}' did not match expected provider '{_options.Signature.Provider}'."); + } + + var signatureBytes = await _client.DownloadAsync(domain.Bundle.Signature.Path, cancellationToken).ConfigureAwait(false); + var signatureValue = Encoding.UTF8.GetString(signatureBytes).Trim(); + await _signatureVerifier.VerifyAsync( + bundleBytes, + signatureValue, + expectedKeyId: _options.Signature.KeyId, + expectedProvider: _options.Signature.Provider, + fallbackPublicKeyPath: _options.Signature.PublicKeyPath, + cancellationToken).ConfigureAwait(false); + } + else if (domain.Bundle.Signature is not null) + { + _logger.LogInformation("Mirror bundle provided signature descriptor but verification is disabled; skipping verification."); + } + + await StoreAsync(domain, index.GeneratedAt, domain.Manifest, manifestBytes, "application/json", DocumentStatuses.Mapped, addToPending: false, pendingDocuments, cancellationToken).ConfigureAwait(false); + var bundleRecord = await StoreAsync(domain, index.GeneratedAt, domain.Bundle, bundleBytes, "application/json", DocumentStatuses.PendingParse, addToPending: true, pendingDocuments, cancellationToken).ConfigureAwait(false); + + _logger.LogInformation( + "Stored mirror bundle {Uri} as document {DocumentId} with digest {Digest}.", + bundleRecord.Uri, + bundleRecord.Id, + bundleRecord.Sha256); + } + + private async Task<DocumentRecord> StoreAsync( + MirrorIndexDomainEntry domain, + DateTimeOffset generatedAt, + MirrorFileDescriptor descriptor, + byte[] payload, + string contentType, + string status, + bool addToPending, + HashSet<Guid> pendingDocuments, + CancellationToken cancellationToken) + { + var absolute = ResolveAbsolutePath(descriptor.Path); + + var existing = await _documentStore.FindBySourceAndUriAsync(Source, absolute, cancellationToken).ConfigureAwait(false); + if (existing is not null && string.Equals(existing.Sha256, NormalizeDigest(descriptor.Digest), StringComparison.OrdinalIgnoreCase)) + { + if (addToPending) + { + pendingDocuments.Add(existing.Id); + } + + return existing; + } + + var gridFsId = await _rawDocumentStorage.UploadAsync(Source, absolute, payload, contentType, cancellationToken).ConfigureAwait(false); + var now = _timeProvider.GetUtcNow(); + var sha = ComputeSha256(payload); + + var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase) + { + ["mirror.domainId"] = domain.DomainId, + ["mirror.displayName"] = domain.DisplayName, + ["mirror.path"] = descriptor.Path, + ["mirror.digest"] = NormalizeDigest(descriptor.Digest), + ["mirror.type"] = ReferenceEquals(descriptor, domain.Bundle) ? "bundle" : "manifest", + }; + + var record = new DocumentRecord( + existing?.Id ?? Guid.NewGuid(), + Source, + absolute, + now, + sha, + status, + contentType, + Headers: null, + Metadata: metadata, + Etag: null, + LastModified: generatedAt, + GridFsId: gridFsId, + ExpiresAt: null); + + var upserted = await _documentStore.UpsertAsync(record, cancellationToken).ConfigureAwait(false); + + if (addToPending) + { + pendingDocuments.Add(upserted.Id); + } + + return upserted; + } + + private string ResolveAbsolutePath(string path) + { + var uri = new Uri(_options.BaseAddress, path); + return uri.ToString(); + } + + private async Task<StellaOpsMirrorCursor> GetCursorAsync(CancellationToken cancellationToken) + { + var state = await _stateRepository.TryGetAsync(Source, cancellationToken).ConfigureAwait(false); + return state is null ? StellaOpsMirrorCursor.Empty : StellaOpsMirrorCursor.FromBson(state.Cursor); + } + + private async Task UpdateCursorAsync(StellaOpsMirrorCursor cursor, CancellationToken cancellationToken) + { + var document = cursor.ToBsonDocument(); + var now = _timeProvider.GetUtcNow(); + await _stateRepository.UpdateCursorAsync(Source, document, now, cancellationToken).ConfigureAwait(false); + } + + private static void VerifyDigest(string expected, ReadOnlySpan<byte> payload, string path) + { + if (string.IsNullOrWhiteSpace(expected)) + { + return; + } + + if (!expected.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Unsupported digest '{expected}' for '{path}'."); + } + + var actualHash = SHA256.HashData(payload); + var actual = "sha256:" + Convert.ToHexString(actualHash).ToLowerInvariant(); + if (!string.Equals(actual, expected, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Digest mismatch for '{path}'. Expected {expected}, computed {actual}."); + } + } + + private static string ComputeSha256(ReadOnlySpan<byte> payload) + { + var hash = SHA256.HashData(payload); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static string NormalizeDigest(string digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return string.Empty; + } + + return digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) + ? digest[7..] + : digest.ToLowerInvariant(); + } + + private static string? CreateFingerprint(MirrorIndexDocument index, MirrorIndexDomainEntry domain) + => CreateFingerprint(domain.Bundle.Digest, index.GeneratedAt); + + private static string? CreateFingerprint(string? digest, DateTimeOffset? generatedAt) + { + var normalizedDigest = NormalizeDigest(digest ?? string.Empty); + if (string.IsNullOrWhiteSpace(normalizedDigest) || generatedAt is null) + { + return null; + } + + return FormattableString.Invariant($"{normalizedDigest}:{generatedAt.Value.ToUniversalTime():O}"); + } + + private static void ValidateOptions(StellaOpsMirrorConnectorOptions options) + { + if (options.BaseAddress is null || !options.BaseAddress.IsAbsoluteUri) + { + throw new InvalidOperationException("Mirror connector requires an absolute baseAddress."); + } + + if (string.IsNullOrWhiteSpace(options.DomainId)) + { + throw new InvalidOperationException("Mirror connector requires domainId to be specified."); + } + } + + private async Task ParseInternalAsync(CancellationToken cancellationToken) + { + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingDocuments.Count == 0) + { + return; + } + + var pendingDocuments = cursor.PendingDocuments.ToHashSet(); + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var now = _timeProvider.GetUtcNow(); + var parsed = 0; + var failures = 0; + + foreach (var documentId in cursor.PendingDocuments.ToArray()) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + if (!document.GridFsId.HasValue) + { + _logger.LogWarning("Mirror bundle document {DocumentId} missing GridFS payload.", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + byte[] payload; + try + { + payload = await _rawDocumentStorage.DownloadAsync(document.GridFsId.Value, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, "Mirror bundle {DocumentId} failed to download from raw storage.", documentId); + throw; + } + + MirrorBundleDocument? bundle; + string json; + try + { + json = Encoding.UTF8.GetString(payload); + bundle = CanonicalJsonSerializer.Deserialize<MirrorBundleDocument>(json); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Mirror bundle {DocumentId} failed to deserialize.", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + if (bundle is null || bundle.Advisories is null) + { + _logger.LogWarning("Mirror bundle {DocumentId} produced null payload.", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingDocuments.Remove(documentId); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + var dtoBson = BsonDocument.Parse(json); + var dtoRecord = new DtoRecord(Guid.NewGuid(), document.Id, Source, BundleDtoSchemaVersion, dtoBson, now); + await _dtoStore.UpsertAsync(dtoRecord, cancellationToken).ConfigureAwait(false); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.PendingMap, cancellationToken).ConfigureAwait(false); + + pendingDocuments.Remove(documentId); + pendingMappings.Add(document.Id); + parsed++; + + _logger.LogDebug( + "Parsed mirror bundle {DocumentId} domain={DomainId} advisories={AdvisoryCount}.", + document.Id, + bundle.DomainId, + bundle.AdvisoryCount); + } + + var updatedCursor = cursor + .WithPendingDocuments(pendingDocuments) + .WithPendingMappings(pendingMappings); + + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + + if (parsed > 0 || failures > 0) + { + _logger.LogInformation( + "Mirror parse completed parsed={Parsed} failures={Failures} pendingDocuments={PendingDocuments} pendingMappings={PendingMappings}.", + parsed, + failures, + pendingDocuments.Count, + pendingMappings.Count); + } + } + + private async Task MapInternalAsync(CancellationToken cancellationToken) + { + var cursor = await GetCursorAsync(cancellationToken).ConfigureAwait(false); + if (cursor.PendingMappings.Count == 0) + { + return; + } + + var pendingMappings = cursor.PendingMappings.ToHashSet(); + var mapped = 0; + var failures = 0; + var completedFingerprint = cursor.CompletedFingerprint; + + foreach (var documentId in cursor.PendingMappings.ToArray()) + { + cancellationToken.ThrowIfCancellationRequested(); + + var document = await _documentStore.FindAsync(documentId, cancellationToken).ConfigureAwait(false); + if (document is null) + { + pendingMappings.Remove(documentId); + failures++; + continue; + } + + var dtoRecord = await _dtoStore.FindByDocumentIdAsync(documentId, cancellationToken).ConfigureAwait(false); + if (dtoRecord is null) + { + _logger.LogWarning("Mirror document {DocumentId} missing DTO payload.", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + MirrorBundleDocument? bundle; + try + { + var json = dtoRecord.Payload.ToJson(); + bundle = CanonicalJsonSerializer.Deserialize<MirrorBundleDocument>(json); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Mirror DTO for document {DocumentId} failed to deserialize.", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + if (bundle is null || bundle.Advisories is null) + { + _logger.LogWarning("Mirror bundle DTO {DocumentId} evaluated to null.", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + failures++; + continue; + } + + try + { + var advisories = MirrorAdvisoryMapper.Map(bundle); + + foreach (var advisory in advisories) + { + cancellationToken.ThrowIfCancellationRequested(); + await _advisoryStore.UpsertAsync(advisory, cancellationToken).ConfigureAwait(false); + } + + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Mapped, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + mapped++; + + _logger.LogDebug( + "Mirror map completed for document {DocumentId} domain={DomainId} advisories={AdvisoryCount}.", + document.Id, + bundle.DomainId, + advisories.Length); + } + catch (Exception ex) + { + _logger.LogError(ex, "Mirror mapping failed for document {DocumentId}.", documentId); + await _documentStore.UpdateStatusAsync(document.Id, DocumentStatuses.Failed, cancellationToken).ConfigureAwait(false); + pendingMappings.Remove(documentId); + failures++; + } + } + + if (pendingMappings.Count == 0 && failures == 0) + { + var fingerprint = CreateFingerprint(cursor.BundleDigest, cursor.GeneratedAt); + if (!string.IsNullOrWhiteSpace(fingerprint)) + { + completedFingerprint = fingerprint; + } + } + + var updatedCursor = cursor + .WithPendingMappings(pendingMappings) + .WithCompletedFingerprint(completedFingerprint); + await UpdateCursorAsync(updatedCursor, cancellationToken).ConfigureAwait(false); + + if (mapped > 0 || failures > 0) + { + _logger.LogInformation( + "Mirror map completed mapped={Mapped} failures={Failures} pendingMappings={PendingMappings}.", + mapped, + failures, + pendingMappings.Count); + } + } +} + +file static class UriExtensions +{ + public static Uri Combine(this Uri baseUri, string relative) + => new(baseUri, relative); +} diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOpsMirrorDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md similarity index 90% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md index 403c4340..34eb94a6 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/TASKS.md @@ -1,7 +1,7 @@ -# StellaOps Mirror Connector Task Board (Sprint 8) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| FEEDCONN-STELLA-08-001 | DONE (2025-10-20) | BE-Conn-Stella | CONCELIER-EXPORT-08-201 | Implement Concelier mirror fetcher hitting `https://<domain>.stella-ops.org/concelier/exports/index.json`, verify signatures/digests, and persist raw documents with provenance. | Fetch job downloads mirror manifest, verifies digest/signature, stores raw docs with tests covering happy-path + tampered manifest. *(Completed 2025-10-20: detached JWS + digest enforcement, metadata persisted, and regression coverage via `dotnet test src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj`.)* | +# StellaOps Mirror Connector Task Board (Sprint 8) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| FEEDCONN-STELLA-08-001 | DONE (2025-10-20) | BE-Conn-Stella | CONCELIER-EXPORT-08-201 | Implement Concelier mirror fetcher hitting `https://<domain>.stella-ops.org/concelier/exports/index.json`, verify signatures/digests, and persist raw documents with provenance. | Fetch job downloads mirror manifest, verifies digest/signature, stores raw docs with tests covering happy-path + tampered manifest. *(Completed 2025-10-20: detached JWS + digest enforcement, metadata persisted, and regression coverage via `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj`.)* | | FEEDCONN-STELLA-08-002 | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-001 | Map mirror payloads into canonical advisory DTOs with provenance referencing mirror domain + original source metadata. | Mapper produces advisories/aliases/affected with mirror provenance; fixtures assert canonical parity with upstream JSON exporters. | | FEEDCONN-STELLA-08-003 | DONE (2025-10-20) | BE-Conn-Stella | FEEDCONN-STELLA-08-002 | Add incremental cursor + resume support (per-export fingerprint) and document configuration for downstream Concelier instances. | Connector resumes from last export, handles deletion/delta cases, docs updated with config sample; integration test covers resume + new export scenario. | diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/AdobeServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/Configuration/AdobeOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Configuration/AdobeOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/Configuration/AdobeOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Configuration/AdobeOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeBulletinDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeBulletinDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeBulletinDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeBulletinDto.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDetailParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDetailParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDetailParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDetailParser.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDocumentMetadata.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDocumentMetadata.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDocumentMetadata.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeDocumentMetadata.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexEntry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexEntry.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexEntry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexEntry.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeIndexParser.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeSchemaProvider.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeSchemaProvider.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeSchemaProvider.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Internal/AdobeSchemaProvider.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/Schemas/adobe-bulletin.schema.json b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Schemas/adobe-bulletin.schema.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/Schemas/adobe-bulletin.schema.json rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/Schemas/adobe-bulletin.schema.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj similarity index 83% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj index bf9c782a..18611889 100644 --- a/src/StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj @@ -1,25 +1,25 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="AngleSharp" Version="1.1.1" /> - </ItemGroup> - - <ItemGroup> - <EmbeddedResource Include="Schemas\adobe-bulletin.schema.json" /> - </ItemGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="AngleSharp" Version="1.1.1" /> + </ItemGroup> + + <ItemGroup> + <EmbeddedResource Include="Schemas\adobe-bulletin.schema.json" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/AppleConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AppleConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/AppleConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AppleConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/AppleDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AppleDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/AppleDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AppleDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/AppleOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AppleOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/AppleOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AppleOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/AppleServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AppleServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/AppleServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/AppleServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailDto.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDetailParser.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleIndexEntry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleIndexEntry.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleIndexEntry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleIndexEntry.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Internal/AppleMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/README.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/README.md similarity index 89% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/README.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/README.md index 3aeafca7..507f9547 100644 --- a/src/StellaOps.Concelier.Connector.Vndr.Apple/README.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/README.md @@ -1,49 +1,49 @@ -# Apple Security Updates Connector - -## Feed contract - -The Apple Software Lookup Service (`https://gdmf.apple.com/v2/pmv`) publishes JSON payloads describing every public software release Apple has shipped. Each `AssetSet` entry exposes: - -- `ProductBuildVersion`, `ProductVersion`, and channel flags (e.g., `RapidSecurityResponse`) -- Timestamps for `PostingDate`, `ExpirationDate`, and `PreInstallDeadline` -- Associated product families/devices (Mac, iPhone, iPad, Apple TV, Apple Watch, VisionOS) -- Metadata for download packages, release notes, and signing assets - -The service supports delta polling by filtering on `PostingDate` and `ReleaseType`; responses are gzip-compressed and require a standard HTTPS client.citeturn3search8 - -Apple’s new security updates landing hub (`https://support.apple.com/100100`) consolidates bulletin detail pages (HT articles). Each update is linked via an `HT` identifier such as `HT214108` and lists: - -- CVE identifiers with Apple’s internal tracking IDs -- Product version/build applicability tables -- Mitigation guidance, acknowledgements, and update packaging notesciteturn1search6 - -Historical advisories redirect to per-platform pages (e.g., macOS, iOS, visionOS). The HTML structure uses `<section data-component="security-update">` blocks with nested tables for affected products. CVE rows include disclosure dates and impact text that we can normalise into canonical `AffectedPackage` entries. - -## Change detection strategy - -1. Poll the Software Lookup Service for updates where `PostingDate` is within the sliding window (`lastModified - tolerance`). Cache `ProductID` + `PostingDate` to avoid duplicate fetches. -2. For each candidate, derive the HT article URL from `DocumentationURL` or by combining the `HT` identifier with the base path (`https://support.apple.com/{locale}/`). Fetch with conditional headers (`If-None-Match`, `If-Modified-Since`). -3. On HTTP `200`, store the raw HTML + metadata (HT id, posting date, product identifiers). On `304`, re-queue existing documents for mapping only. - -Unofficial Apple documentation warns that the Software Lookup Service rate-limits clients after repeated unauthenticated bursts; respect 5 requests/second and honour `Retry-After` headers on `403/429` responses.citeturn3search3 - -## Parsing & mapping notes - -- CVE lists live inside `<ul data-testid="cve-list">` items; each `<li>` contains CVE, impact, and credit text. Parse these into canonical `Alias` + `AffectedPackage` records, using Apple’s component name as the package `name` and the OS build as the range primitive seed. -- Product/version tables have headers for platform (`Platform`, `Version`, `Build`). Map the OS name into our vendor range primitive namespace (`apple.platform`, `apple.build`). -- Rapid Security Response advisories include an `Rapid Security Responses` badge; emit `psirt_flags` with `apple.rapid_security_response = true`. - -## Outstanding questions - -- Some HT pages embed downloadable PDFs for supplemental mitigations. Confirm whether to persist PDF text via the shared `PdfTextExtractor`. -- Vision Pro updates include `deviceFamily` identifiers not yet mapped in `RangePrimitives`. Extend the model with `apple.deviceFamily` once sample fixtures are captured. - -## Fixture maintenance - -Deterministic regression coverage lives in `src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures`. When Apple publishes new advisories the fixtures must be refreshed using the provided helper scripts: - -- Bash: `./scripts/update-apple-fixtures.sh` -- PowerShell: `./scripts/update-apple-fixtures.ps1` - -Both scripts set `UPDATE_APPLE_FIXTURES=1`, touch a `.update-apple-fixtures` sentinel so test runs inside WSL propagate the flag, fetch the live HT articles referenced in `AppleFixtureManager`, sanitise the HTML, and rewrite the paired `.expected.json` DTO snapshots. Always inspect the resulting diff and re-run `dotnet test src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj` without the environment variable to ensure deterministic output before committing. - +# Apple Security Updates Connector + +## Feed contract + +The Apple Software Lookup Service (`https://gdmf.apple.com/v2/pmv`) publishes JSON payloads describing every public software release Apple has shipped. Each `AssetSet` entry exposes: + +- `ProductBuildVersion`, `ProductVersion`, and channel flags (e.g., `RapidSecurityResponse`) +- Timestamps for `PostingDate`, `ExpirationDate`, and `PreInstallDeadline` +- Associated product families/devices (Mac, iPhone, iPad, Apple TV, Apple Watch, VisionOS) +- Metadata for download packages, release notes, and signing assets + +The service supports delta polling by filtering on `PostingDate` and `ReleaseType`; responses are gzip-compressed and require a standard HTTPS client.citeturn3search8 + +Apple’s new security updates landing hub (`https://support.apple.com/100100`) consolidates bulletin detail pages (HT articles). Each update is linked via an `HT` identifier such as `HT214108` and lists: + +- CVE identifiers with Apple’s internal tracking IDs +- Product version/build applicability tables +- Mitigation guidance, acknowledgements, and update packaging notesciteturn1search6 + +Historical advisories redirect to per-platform pages (e.g., macOS, iOS, visionOS). The HTML structure uses `<section data-component="security-update">` blocks with nested tables for affected products. CVE rows include disclosure dates and impact text that we can normalise into canonical `AffectedPackage` entries. + +## Change detection strategy + +1. Poll the Software Lookup Service for updates where `PostingDate` is within the sliding window (`lastModified - tolerance`). Cache `ProductID` + `PostingDate` to avoid duplicate fetches. +2. For each candidate, derive the HT article URL from `DocumentationURL` or by combining the `HT` identifier with the base path (`https://support.apple.com/{locale}/`). Fetch with conditional headers (`If-None-Match`, `If-Modified-Since`). +3. On HTTP `200`, store the raw HTML + metadata (HT id, posting date, product identifiers). On `304`, re-queue existing documents for mapping only. + +Unofficial Apple documentation warns that the Software Lookup Service rate-limits clients after repeated unauthenticated bursts; respect 5 requests/second and honour `Retry-After` headers on `403/429` responses.citeturn3search3 + +## Parsing & mapping notes + +- CVE lists live inside `<ul data-testid="cve-list">` items; each `<li>` contains CVE, impact, and credit text. Parse these into canonical `Alias` + `AffectedPackage` records, using Apple’s component name as the package `name` and the OS build as the range primitive seed. +- Product/version tables have headers for platform (`Platform`, `Version`, `Build`). Map the OS name into our vendor range primitive namespace (`apple.platform`, `apple.build`). +- Rapid Security Response advisories include an `Rapid Security Responses` badge; emit `psirt_flags` with `apple.rapid_security_response = true`. + +## Outstanding questions + +- Some HT pages embed downloadable PDFs for supplemental mitigations. Confirm whether to persist PDF text via the shared `PdfTextExtractor`. +- Vision Pro updates include `deviceFamily` identifiers not yet mapped in `RangePrimitives`. Extend the model with `apple.deviceFamily` once sample fixtures are captured. + +## Fixture maintenance + +Deterministic regression coverage lives in `src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures`. When Apple publishes new advisories the fixtures must be refreshed using the provided helper scripts: + +- Bash: `./scripts/update-apple-fixtures.sh` +- PowerShell: `./scripts/update-apple-fixtures.ps1` + +Both scripts set `UPDATE_APPLE_FIXTURES=1`, touch a `.update-apple-fixtures` sentinel so test runs inside WSL propagate the flag, fetch the live HT articles referenced in `AppleFixtureManager`, sanitise the HTML, and rewrite the paired `.expected.json` DTO snapshots. Always inspect the resulting diff and re-run `dotnet test src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj` without the environment variable to ensure deterministic output before committing. + diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj similarity index 81% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj index 6f59477f..2c251c85 100644 --- a/src/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj @@ -1,18 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> </ItemGroup> -</Project> - +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple/VndrAppleConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/VndrAppleConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple/VndrAppleConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/VndrAppleConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/ChromiumServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Configuration/ChromiumOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Configuration/ChromiumOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Configuration/ChromiumOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Configuration/ChromiumOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDocumentMetadata.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDocumentMetadata.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDocumentMetadata.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDocumentMetadata.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumDto.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedEntry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedEntry.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedEntry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedEntry.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedLoader.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedLoader.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedLoader.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumFeedLoader.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumParser.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumSchemaProvider.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumSchemaProvider.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumSchemaProvider.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Internal/ChromiumSchemaProvider.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/Schemas/chromium-post.schema.json b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Schemas/chromium-post.schema.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/Schemas/chromium-post.schema.json rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/Schemas/chromium-post.schema.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj similarity index 87% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj index e1f0543a..3952bcb0 100644 --- a/src/StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj @@ -1,32 +1,32 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="AngleSharp" Version="1.1.1" /> - <PackageReference Include="System.ServiceModel.Syndication" Version="8.0.0" /> - </ItemGroup> - - <ItemGroup> - <EmbeddedResource Include="Schemas\chromium-post.schema.json" /> - </ItemGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - - <ItemGroup> - <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> - <_Parameter1>StellaOps.Concelier.Connector.Vndr.Chromium.Tests</_Parameter1> - </AssemblyAttribute> - </ItemGroup> -</Project> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="AngleSharp" Version="1.1.1" /> + <PackageReference Include="System.ServiceModel.Syndication" Version="8.0.0" /> + </ItemGroup> + + <ItemGroup> + <EmbeddedResource Include="Schemas\chromium-post.schema.json" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + + <ItemGroup> + <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> + <_Parameter1>StellaOps.Concelier.Connector.Vndr.Chromium.Tests</_Parameter1> + </AssemblyAttribute> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/CiscoServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Configuration/CiscoOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Configuration/CiscoOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Configuration/CiscoOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Configuration/CiscoOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAccessTokenProvider.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAccessTokenProvider.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAccessTokenProvider.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAccessTokenProvider.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAdvisoryDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAdvisoryDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAdvisoryDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoAdvisoryDto.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafClient.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafData.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafData.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafData.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafData.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCsafParser.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDtoFactory.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDtoFactory.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDtoFactory.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoDtoFactory.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOAuthMessageHandler.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOAuthMessageHandler.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOAuthMessageHandler.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOAuthMessageHandler.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOpenVulnClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOpenVulnClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOpenVulnClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoOpenVulnClient.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoRawAdvisory.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoRawAdvisory.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoRawAdvisory.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Internal/CiscoRawAdvisory.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/StellaOps.Concelier.Connector.Vndr.Cisco.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/StellaOps.Concelier.Connector.Vndr.Cisco.csproj similarity index 81% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/StellaOps.Concelier.Connector.Vndr.Cisco.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/StellaOps.Concelier.Connector.Vndr.Cisco.csproj index 4eea5e86..054d13ab 100644 --- a/src/StellaOps.Concelier.Connector.Vndr.Cisco/StellaOps.Concelier.Connector.Vndr.Cisco.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/StellaOps.Concelier.Connector.Vndr.Cisco.csproj @@ -1,16 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco/VndrCiscoConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/VndrCiscoConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco/VndrCiscoConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/VndrCiscoConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Configuration/MsrcOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Configuration/MsrcOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Configuration/MsrcOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Configuration/MsrcOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcAdvisoryDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcAdvisoryDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcAdvisoryDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcAdvisoryDto.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcApiClient.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcApiClient.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcApiClient.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcApiClient.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailDto.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDetailParser.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDocumentMetadata.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDocumentMetadata.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDocumentMetadata.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcDocumentMetadata.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcSummaryResponse.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcSummaryResponse.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcSummaryResponse.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcSummaryResponse.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcTokenProvider.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcTokenProvider.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcTokenProvider.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Internal/MsrcTokenProvider.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/MsrcServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/README.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/README.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/README.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/README.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj similarity index 75% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj index d97e92d3..bc57abd1 100644 --- a/src/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj @@ -1,16 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - </ItemGroup> -</Project> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Configuration/OracleOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Configuration/OracleOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Configuration/OracleOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Configuration/OracleOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleAffectedEntry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleAffectedEntry.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleAffectedEntry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleAffectedEntry.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCalendarFetcher.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCalendarFetcher.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCalendarFetcher.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCalendarFetcher.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDocumentMetadata.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDocumentMetadata.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDocumentMetadata.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDocumentMetadata.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDto.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDtoValidator.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDtoValidator.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDtoValidator.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleDtoValidator.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleParser.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleParser.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleParser.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OracleParser.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OraclePatchDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OraclePatchDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OraclePatchDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Internal/OraclePatchDocument.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/OracleConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/OracleConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/OracleConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/OracleConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/OracleDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/OracleDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/OracleDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/OracleDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/OracleServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/OracleServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/OracleServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/OracleServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj similarity index 79% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj index 092f2073..5bd20434 100644 --- a/src/StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj @@ -1,17 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle/VndrOracleConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/VndrOracleConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle/VndrOracleConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/VndrOracleConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/AGENTS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/Configuration/VmwareOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Configuration/VmwareOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/Configuration/VmwareOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Configuration/VmwareOptions.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareCursor.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareCursor.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareDetailDto.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareDetailDto.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareDetailDto.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareDetailDto.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareFetchCacheEntry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareFetchCacheEntry.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareFetchCacheEntry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareFetchCacheEntry.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareIndexItem.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareIndexItem.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareIndexItem.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareIndexItem.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareMapper.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Internal/VmwareMapper.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/Jobs.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Jobs.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/Jobs.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Jobs.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj similarity index 86% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj index b2bd7ee1..e3ffc88d 100644 --- a/src/StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj @@ -1,23 +1,23 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - </ItemGroup> - <ItemGroup> - <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> - <_Parameter1>StellaOps.Concelier.Tests</_Parameter1> - </AssemblyAttribute> - </ItemGroup> -</Project> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + </ItemGroup> + <ItemGroup> + <AssemblyAttribute Include="System.Runtime.CompilerServices.InternalsVisibleTo"> + <_Parameter1>StellaOps.Concelier.Tests</_Parameter1> + </AssemblyAttribute> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/TASKS.md diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnector.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnector.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnectorPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnectorPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnectorPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareConnectorPlugin.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareDiagnostics.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/VmwareServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Core/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Core/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Core/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/AGENTS.md diff --git a/src/StellaOps.Concelier.Core/Aoc/AdvisoryRawWriteGuard.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/AdvisoryRawWriteGuard.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Aoc/AdvisoryRawWriteGuard.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/AdvisoryRawWriteGuard.cs index 036a4a2e..7790e42b 100644 --- a/src/StellaOps.Concelier.Core/Aoc/AdvisoryRawWriteGuard.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/AdvisoryRawWriteGuard.cs @@ -1,35 +1,35 @@ -using System.Text.Json; -using Microsoft.Extensions.Options; -using StellaOps.Aoc; -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Aoc; - -/// <summary> -/// Aggregation-Only Contract guard applied to raw advisory documents prior to persistence. -/// </summary> -public sealed class AdvisoryRawWriteGuard : IAdvisoryRawWriteGuard -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); - - private readonly IAocGuard _guard; - private readonly AocGuardOptions _options; - - public AdvisoryRawWriteGuard(IAocGuard guard, IOptions<AocGuardOptions>? options = null) - { - _guard = guard ?? throw new ArgumentNullException(nameof(guard)); - _options = options?.Value ?? AocGuardOptions.Default; - } - - public void EnsureValid(AdvisoryRawDocument document) - { - ArgumentNullException.ThrowIfNull(document); - - using var payload = JsonDocument.Parse(JsonSerializer.Serialize(document, SerializerOptions)); - var result = _guard.Validate(payload.RootElement, _options); - if (!result.IsValid) - { - throw new ConcelierAocGuardException(result); - } - } -} +using System.Text.Json; +using Microsoft.Extensions.Options; +using StellaOps.Aoc; +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Aoc; + +/// <summary> +/// Aggregation-Only Contract guard applied to raw advisory documents prior to persistence. +/// </summary> +public sealed class AdvisoryRawWriteGuard : IAdvisoryRawWriteGuard +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + private readonly IAocGuard _guard; + private readonly AocGuardOptions _options; + + public AdvisoryRawWriteGuard(IAocGuard guard, IOptions<AocGuardOptions>? options = null) + { + _guard = guard ?? throw new ArgumentNullException(nameof(guard)); + _options = options?.Value ?? AocGuardOptions.Default; + } + + public void EnsureValid(AdvisoryRawDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + using var payload = JsonDocument.Parse(JsonSerializer.Serialize(document, SerializerOptions)); + var result = _guard.Validate(payload.RootElement, _options); + if (!result.IsValid) + { + throw new ConcelierAocGuardException(result); + } + } +} diff --git a/src/StellaOps.Concelier.Core/Aoc/AocServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/AocServiceCollectionExtensions.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Aoc/AocServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/AocServiceCollectionExtensions.cs index 2cc8e555..7d042f70 100644 --- a/src/StellaOps.Concelier.Core/Aoc/AocServiceCollectionExtensions.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/AocServiceCollectionExtensions.cs @@ -1,40 +1,40 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Options; -using StellaOps.Aoc; - -namespace StellaOps.Concelier.Core.Aoc; - -public static class AocServiceCollectionExtensions -{ - /// <summary> - /// Registers Aggregation-Only Contract guard services for raw advisory ingestion. - /// </summary> - /// <param name="services">Service collection to configure.</param> - /// <param name="configure">Optional guard configuration.</param> - public static IServiceCollection AddConcelierAocGuards( - this IServiceCollection services, - Action<AocGuardOptions>? configure = null) - { - if (services is null) - { - throw new ArgumentNullException(nameof(services)); - } - - services.AddAocGuard(); - - if (configure is not null) - { - services.Configure(configure); - } - - services.TryAddSingleton<IAdvisoryRawWriteGuard>(sp => - { - var guard = sp.GetRequiredService<IAocGuard>(); - var options = sp.GetService<IOptions<AocGuardOptions>>(); - return new AdvisoryRawWriteGuard(guard, options); - }); - - return services; - } -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Aoc; + +namespace StellaOps.Concelier.Core.Aoc; + +public static class AocServiceCollectionExtensions +{ + /// <summary> + /// Registers Aggregation-Only Contract guard services for raw advisory ingestion. + /// </summary> + /// <param name="services">Service collection to configure.</param> + /// <param name="configure">Optional guard configuration.</param> + public static IServiceCollection AddConcelierAocGuards( + this IServiceCollection services, + Action<AocGuardOptions>? configure = null) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + services.AddAocGuard(); + + if (configure is not null) + { + services.Configure(configure); + } + + services.TryAddSingleton<IAdvisoryRawWriteGuard>(sp => + { + var guard = sp.GetRequiredService<IAocGuard>(); + var options = sp.GetService<IOptions<AocGuardOptions>>(); + return new AdvisoryRawWriteGuard(guard, options); + }); + + return services; + } +} diff --git a/src/StellaOps.Concelier.Core/Aoc/ConcelierAocGuardException.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/ConcelierAocGuardException.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Aoc/ConcelierAocGuardException.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/ConcelierAocGuardException.cs index 9037f88e..d5a6a674 100644 --- a/src/StellaOps.Concelier.Core/Aoc/ConcelierAocGuardException.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/ConcelierAocGuardException.cs @@ -1,32 +1,32 @@ -using System.Collections.Immutable; -using StellaOps.Aoc; - -namespace StellaOps.Concelier.Core.Aoc; - -/// <summary> -/// Represents an Aggregation-Only Contract violation produced while validating a raw advisory document. -/// </summary> -public sealed class ConcelierAocGuardException : Exception -{ - public ConcelierAocGuardException(AocGuardResult result) - : base("AOC guard validation failed for the provided raw advisory document.") - { - Result = result ?? throw new ArgumentNullException(nameof(result)); - } - - /// <summary> - /// Guard evaluation result containing the individual violations. - /// </summary> - public AocGuardResult Result { get; } - - /// <summary> - /// Collection of violations returned by the guard. - /// </summary> - public ImmutableArray<AocViolation> Violations => Result.Violations; - - /// <summary> - /// Primary error code (`ERR_AOC_00x`) associated with the guard failure. - /// </summary> - public string PrimaryErrorCode => - Violations.IsDefaultOrEmpty ? "ERR_AOC_000" : Violations[0].ErrorCode; -} +using System.Collections.Immutable; +using StellaOps.Aoc; + +namespace StellaOps.Concelier.Core.Aoc; + +/// <summary> +/// Represents an Aggregation-Only Contract violation produced while validating a raw advisory document. +/// </summary> +public sealed class ConcelierAocGuardException : Exception +{ + public ConcelierAocGuardException(AocGuardResult result) + : base("AOC guard validation failed for the provided raw advisory document.") + { + Result = result ?? throw new ArgumentNullException(nameof(result)); + } + + /// <summary> + /// Guard evaluation result containing the individual violations. + /// </summary> + public AocGuardResult Result { get; } + + /// <summary> + /// Collection of violations returned by the guard. + /// </summary> + public ImmutableArray<AocViolation> Violations => Result.Violations; + + /// <summary> + /// Primary error code (`ERR_AOC_00x`) associated with the guard failure. + /// </summary> + public string PrimaryErrorCode => + Violations.IsDefaultOrEmpty ? "ERR_AOC_000" : Violations[0].ErrorCode; +} diff --git a/src/StellaOps.Concelier.Core/Aoc/IAdvisoryRawWriteGuard.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/IAdvisoryRawWriteGuard.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Aoc/IAdvisoryRawWriteGuard.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/IAdvisoryRawWriteGuard.cs index 8d814af4..9b532f62 100644 --- a/src/StellaOps.Concelier.Core/Aoc/IAdvisoryRawWriteGuard.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Aoc/IAdvisoryRawWriteGuard.cs @@ -1,16 +1,16 @@ -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Aoc; - -/// <summary> -/// Validates raw advisory documents against the Aggregation-Only Contract (AOC) -/// before they are persisted by repositories. -/// </summary> -public interface IAdvisoryRawWriteGuard -{ - /// <summary> - /// Ensures the provided raw advisory document satisfies the AOC guard. Throws when violations are detected. - /// </summary> - /// <param name="document">Raw advisory document to validate.</param> - void EnsureValid(AdvisoryRawDocument document); -} +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Aoc; + +/// <summary> +/// Validates raw advisory documents against the Aggregation-Only Contract (AOC) +/// before they are persisted by repositories. +/// </summary> +public interface IAdvisoryRawWriteGuard +{ + /// <summary> + /// Ensures the provided raw advisory document satisfies the AOC guard. Throws when violations are detected. + /// </summary> + /// <param name="document">Raw advisory document to validate.</param> + void EnsureValid(AdvisoryRawDocument document); +} diff --git a/src/StellaOps.Concelier.Core/CanonicalMergeResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/CanonicalMergeResult.cs similarity index 100% rename from src/StellaOps.Concelier.Core/CanonicalMergeResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/CanonicalMergeResult.cs diff --git a/src/StellaOps.Concelier.Core/CanonicalMerger.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/CanonicalMerger.cs similarity index 100% rename from src/StellaOps.Concelier.Core/CanonicalMerger.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/CanonicalMerger.cs diff --git a/src/StellaOps.Concelier.Core/Events/AdvisoryEventContracts.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Events/AdvisoryEventContracts.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Events/AdvisoryEventContracts.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Events/AdvisoryEventContracts.cs diff --git a/src/StellaOps.Concelier.Core/Events/AdvisoryEventLog.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Events/AdvisoryEventLog.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Events/AdvisoryEventLog.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Events/AdvisoryEventLog.cs diff --git a/src/StellaOps.Concelier.Core/Events/IAdvisoryEventLog.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Events/IAdvisoryEventLog.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Events/IAdvisoryEventLog.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Events/IAdvisoryEventLog.cs diff --git a/src/StellaOps.Concelier.Core/Events/IAdvisoryEventRepository.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Events/IAdvisoryEventRepository.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Events/IAdvisoryEventRepository.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Events/IAdvisoryEventRepository.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/IJob.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/IJob.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/IJob.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/IJob.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/IJobCoordinator.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/IJobCoordinator.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/IJobCoordinator.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/IJobCoordinator.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/IJobStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/IJobStore.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/IJobStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/IJobStore.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/ILeaseStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/ILeaseStore.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/ILeaseStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/ILeaseStore.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobCoordinator.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobCoordinator.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobCoordinator.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobCoordinator.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobDefinition.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobDefinition.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobDefinition.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobDefinition.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobDiagnostics.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobExecutionContext.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobExecutionContext.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobExecutionContext.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobExecutionContext.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobLease.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobLease.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobLease.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobLease.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobPluginRegistrationExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobPluginRegistrationExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobPluginRegistrationExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobPluginRegistrationExtensions.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobRunCompletion.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobRunCompletion.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobRunCompletion.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobRunCompletion.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobRunCreateRequest.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobRunCreateRequest.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobRunCreateRequest.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobRunCreateRequest.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobRunSnapshot.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobRunSnapshot.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobRunSnapshot.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobRunSnapshot.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobRunStatus.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobRunStatus.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobRunStatus.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobRunStatus.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobSchedulerBuilder.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobSchedulerBuilder.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobSchedulerBuilder.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobSchedulerBuilder.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobSchedulerHostedService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobSchedulerHostedService.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobSchedulerHostedService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobSchedulerHostedService.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobSchedulerOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobSchedulerOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobSchedulerOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobSchedulerOptions.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/JobTriggerResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobTriggerResult.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/JobTriggerResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/JobTriggerResult.cs diff --git a/src/StellaOps.Concelier.Core/Jobs/ServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Jobs/ServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Jobs/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetMapper.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetMapper.cs index 7f8661f2..0a7c3cac 100644 --- a/src/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetMapper.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryLinksetMapper.cs @@ -1,308 +1,308 @@ -using System.Collections.Immutable; -using System.Globalization; -using System.Text.Json; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Linksets; - -/// <summary> -/// Default implementation of <see cref="IAdvisoryLinksetMapper"/> that walks advisory payloads and emits deterministic linkset hints. -/// </summary> -public sealed partial class AdvisoryLinksetMapper : IAdvisoryLinksetMapper -{ - private static readonly HashSet<string> AliasSchemesOfInterest = new(new[] - { - AliasSchemes.Cve, - AliasSchemes.Ghsa, - AliasSchemes.OsV - }, StringComparer.OrdinalIgnoreCase); - - public RawLinkset Map(AdvisoryRawDocument document) - { - ArgumentNullException.ThrowIfNull(document); - - var aliasSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - var purlSet = new HashSet<string>(StringComparer.Ordinal); - var cpeSet = new HashSet<string>(StringComparer.Ordinal); - var referenceKeys = new HashSet<ReferenceKey>(ReferenceKeyComparer.Instance); - var references = new List<RawReference>(); - var pointerSet = new HashSet<string>(StringComparer.Ordinal); - - SeedAliases(document.Identifiers, aliasSet, pointerSet); - - if (document.Content.Raw.ValueKind != JsonValueKind.Undefined && - document.Content.Raw.ValueKind != JsonValueKind.Null) - { - Traverse( - document.Content.Raw, - "/content/raw", - aliasSet, - purlSet, - cpeSet, - references, - referenceKeys, - pointerSet); - } - - var aliases = aliasSet - .Select(static alias => alias.ToLowerInvariant()) - .Distinct(StringComparer.Ordinal) - .OrderBy(static alias => alias, StringComparer.Ordinal) - .ToImmutableArray(); - - var purls = purlSet - .OrderBy(static purl => purl, StringComparer.Ordinal) - .ToImmutableArray(); - - var cpes = cpeSet - .OrderBy(static cpe => cpe, StringComparer.Ordinal) - .ToImmutableArray(); - - var referenceArray = references - .OrderBy(static reference => reference.Type, StringComparer.Ordinal) - .ThenBy(static reference => reference.Url, StringComparer.Ordinal) - .ToImmutableArray(); - - var reconciledFrom = pointerSet - .OrderBy(static pointer => pointer, StringComparer.Ordinal) - .ToImmutableArray(); - - return new RawLinkset - { - Aliases = aliases, - PackageUrls = purls, - Cpes = cpes, - References = referenceArray, - ReconciledFrom = reconciledFrom, - Notes = ImmutableDictionary<string, string>.Empty - }; - } - - private static void SeedAliases( - RawIdentifiers identifiers, - HashSet<string> aliasSet, - HashSet<string> pointerSet) - { - if (!identifiers.Aliases.IsDefaultOrEmpty) - { - for (var index = 0; index < identifiers.Aliases.Length; index++) - { - var alias = identifiers.Aliases[index]; - if (TryNormalizeAlias(alias, out var normalized)) - { - var pointer = AppendPointer("/identifiers/aliases", index.ToString(CultureInfo.InvariantCulture)); - pointerSet.Add(pointer); - aliasSet.Add(normalized); - } - } - } - - if (TryNormalizeAlias(identifiers.PrimaryId, out var primaryNormalized)) - { - pointerSet.Add("/identifiers/primary"); - aliasSet.Add(primaryNormalized); - } - } - - private static void Traverse( - JsonElement element, - string pointer, - HashSet<string> aliasSet, - HashSet<string> purlSet, - HashSet<string> cpeSet, - List<RawReference> references, - HashSet<ReferenceKey> referenceKeys, - HashSet<string> pointerSet) - { - switch (element.ValueKind) - { - case JsonValueKind.Object: - if (TryExtractReference(element, pointer, out var reference, out var referencePointer)) - { - pointerSet.Add(referencePointer); - var key = new ReferenceKey(reference.Url, reference.Type, reference.Source); - if (referenceKeys.Add(key)) - { - references.Add(reference); - } - } - - foreach (var property in element.EnumerateObject()) - { - var childPointer = AppendPointer(pointer, property.Name); - Traverse( - property.Value, - childPointer, - aliasSet, - purlSet, - cpeSet, - references, - referenceKeys, - pointerSet); - } - break; - - case JsonValueKind.Array: - var index = 0; - foreach (var item in element.EnumerateArray()) - { - var childPointer = AppendPointer(pointer, index.ToString(CultureInfo.InvariantCulture)); - Traverse( - item, - childPointer, - aliasSet, - purlSet, - cpeSet, - references, - referenceKeys, - pointerSet); - index++; - } - break; - - case JsonValueKind.String: - var value = element.GetString(); - if (string.IsNullOrWhiteSpace(value)) - { - break; - } - - var trimmed = value.Trim(); - if (TryNormalizeAlias(trimmed, out var aliasNormalized)) - { - pointerSet.Add(pointer); - aliasSet.Add(aliasNormalized); - } - - if (LinksetNormalization.TryNormalizePackageUrl(trimmed, out var normalizedPurl) && - !string.IsNullOrEmpty(normalizedPurl)) - { - pointerSet.Add(pointer); - purlSet.Add(normalizedPurl); - } - - if (LinksetNormalization.TryNormalizeCpe(trimmed, out var normalizedCpe) && - !string.IsNullOrEmpty(normalizedCpe)) - { - pointerSet.Add(pointer); - cpeSet.Add(normalizedCpe); - } - break; - } - } - - private static bool TryNormalizeAlias(string? candidate, out string normalized) - { - normalized = string.Empty; - if (!LinksetNormalization.TryNormalizeAlias(candidate, out var canonical)) - { - return false; - } - - if (!AliasSchemeRegistry.TryGetScheme(canonical, out var scheme)) - { - return false; - } - - if (!AliasSchemesOfInterest.Contains(scheme)) - { - return false; - } - - normalized = canonical.ToLowerInvariant(); - return true; - } - - private static bool TryExtractReference(JsonElement element, string pointer, out RawReference reference, out string referencePointer) - { - reference = default!; - referencePointer = string.Empty; - - if (!element.TryGetProperty("url", out var urlElement) || urlElement.ValueKind != JsonValueKind.String) - { - return false; - } - - var url = Validation.TrimToNull(urlElement.GetString()); - if (url is null || !Validation.LooksLikeHttpUrl(url)) - { - return false; - } - - string? type = null; - if (element.TryGetProperty("type", out var typeElement) && typeElement.ValueKind == JsonValueKind.String) - { - type = Validation.TrimToNull(typeElement.GetString()); - } - else if (element.TryGetProperty("category", out var categoryElement) && categoryElement.ValueKind == JsonValueKind.String) - { - type = Validation.TrimToNull(categoryElement.GetString()); - } - - var source = element.TryGetProperty("source", out var sourceElement) && sourceElement.ValueKind == JsonValueKind.String - ? Validation.TrimToNull(sourceElement.GetString()) - : null; - - reference = new RawReference( - Type: string.IsNullOrWhiteSpace(type) ? "unspecified" : type!.ToLowerInvariant(), - Url: url, - Source: source); - - referencePointer = AppendPointer(pointer, "url"); - return true; - } - - private static string AppendPointer(string pointer, string token) - { - ArgumentNullException.ThrowIfNull(token); - - static string Encode(string value) - => value.Replace("~", "~0", StringComparison.Ordinal).Replace("/", "~1", StringComparison.Ordinal); - - var encoded = Encode(token); - - if (string.IsNullOrEmpty(pointer)) - { - return "/" + encoded; - } - - if (pointer == "/") - { - return "/" + encoded; - } - - if (pointer.EndsWith("/", StringComparison.Ordinal)) - { - return pointer + encoded; - } - - return pointer + "/" + encoded; - } - - private readonly record struct ReferenceKey(string Url, string Type, string? Source); - - private sealed class ReferenceKeyComparer : IEqualityComparer<ReferenceKey> - { - public static readonly ReferenceKeyComparer Instance = new(); - - public bool Equals(ReferenceKey x, ReferenceKey y) - { - return string.Equals(x.Url, y.Url, StringComparison.OrdinalIgnoreCase) - && string.Equals(x.Type, y.Type, StringComparison.OrdinalIgnoreCase) - && string.Equals(x.Source ?? string.Empty, y.Source ?? string.Empty, StringComparison.OrdinalIgnoreCase); - } - - public int GetHashCode(ReferenceKey obj) - { - var hash = StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Url); - hash = (hash * 397) ^ StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Type); - if (!string.IsNullOrEmpty(obj.Source)) - { - hash = (hash * 397) ^ StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Source); - } - - return hash; - } - } -} +using System.Collections.Immutable; +using System.Globalization; +using System.Text.Json; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Linksets; + +/// <summary> +/// Default implementation of <see cref="IAdvisoryLinksetMapper"/> that walks advisory payloads and emits deterministic linkset hints. +/// </summary> +public sealed partial class AdvisoryLinksetMapper : IAdvisoryLinksetMapper +{ + private static readonly HashSet<string> AliasSchemesOfInterest = new(new[] + { + AliasSchemes.Cve, + AliasSchemes.Ghsa, + AliasSchemes.OsV + }, StringComparer.OrdinalIgnoreCase); + + public RawLinkset Map(AdvisoryRawDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + var aliasSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + var purlSet = new HashSet<string>(StringComparer.Ordinal); + var cpeSet = new HashSet<string>(StringComparer.Ordinal); + var referenceKeys = new HashSet<ReferenceKey>(ReferenceKeyComparer.Instance); + var references = new List<RawReference>(); + var pointerSet = new HashSet<string>(StringComparer.Ordinal); + + SeedAliases(document.Identifiers, aliasSet, pointerSet); + + if (document.Content.Raw.ValueKind != JsonValueKind.Undefined && + document.Content.Raw.ValueKind != JsonValueKind.Null) + { + Traverse( + document.Content.Raw, + "/content/raw", + aliasSet, + purlSet, + cpeSet, + references, + referenceKeys, + pointerSet); + } + + var aliases = aliasSet + .Select(static alias => alias.ToLowerInvariant()) + .Distinct(StringComparer.Ordinal) + .OrderBy(static alias => alias, StringComparer.Ordinal) + .ToImmutableArray(); + + var purls = purlSet + .OrderBy(static purl => purl, StringComparer.Ordinal) + .ToImmutableArray(); + + var cpes = cpeSet + .OrderBy(static cpe => cpe, StringComparer.Ordinal) + .ToImmutableArray(); + + var referenceArray = references + .OrderBy(static reference => reference.Type, StringComparer.Ordinal) + .ThenBy(static reference => reference.Url, StringComparer.Ordinal) + .ToImmutableArray(); + + var reconciledFrom = pointerSet + .OrderBy(static pointer => pointer, StringComparer.Ordinal) + .ToImmutableArray(); + + return new RawLinkset + { + Aliases = aliases, + PackageUrls = purls, + Cpes = cpes, + References = referenceArray, + ReconciledFrom = reconciledFrom, + Notes = ImmutableDictionary<string, string>.Empty + }; + } + + private static void SeedAliases( + RawIdentifiers identifiers, + HashSet<string> aliasSet, + HashSet<string> pointerSet) + { + if (!identifiers.Aliases.IsDefaultOrEmpty) + { + for (var index = 0; index < identifiers.Aliases.Length; index++) + { + var alias = identifiers.Aliases[index]; + if (TryNormalizeAlias(alias, out var normalized)) + { + var pointer = AppendPointer("/identifiers/aliases", index.ToString(CultureInfo.InvariantCulture)); + pointerSet.Add(pointer); + aliasSet.Add(normalized); + } + } + } + + if (TryNormalizeAlias(identifiers.PrimaryId, out var primaryNormalized)) + { + pointerSet.Add("/identifiers/primary"); + aliasSet.Add(primaryNormalized); + } + } + + private static void Traverse( + JsonElement element, + string pointer, + HashSet<string> aliasSet, + HashSet<string> purlSet, + HashSet<string> cpeSet, + List<RawReference> references, + HashSet<ReferenceKey> referenceKeys, + HashSet<string> pointerSet) + { + switch (element.ValueKind) + { + case JsonValueKind.Object: + if (TryExtractReference(element, pointer, out var reference, out var referencePointer)) + { + pointerSet.Add(referencePointer); + var key = new ReferenceKey(reference.Url, reference.Type, reference.Source); + if (referenceKeys.Add(key)) + { + references.Add(reference); + } + } + + foreach (var property in element.EnumerateObject()) + { + var childPointer = AppendPointer(pointer, property.Name); + Traverse( + property.Value, + childPointer, + aliasSet, + purlSet, + cpeSet, + references, + referenceKeys, + pointerSet); + } + break; + + case JsonValueKind.Array: + var index = 0; + foreach (var item in element.EnumerateArray()) + { + var childPointer = AppendPointer(pointer, index.ToString(CultureInfo.InvariantCulture)); + Traverse( + item, + childPointer, + aliasSet, + purlSet, + cpeSet, + references, + referenceKeys, + pointerSet); + index++; + } + break; + + case JsonValueKind.String: + var value = element.GetString(); + if (string.IsNullOrWhiteSpace(value)) + { + break; + } + + var trimmed = value.Trim(); + if (TryNormalizeAlias(trimmed, out var aliasNormalized)) + { + pointerSet.Add(pointer); + aliasSet.Add(aliasNormalized); + } + + if (LinksetNormalization.TryNormalizePackageUrl(trimmed, out var normalizedPurl) && + !string.IsNullOrEmpty(normalizedPurl)) + { + pointerSet.Add(pointer); + purlSet.Add(normalizedPurl); + } + + if (LinksetNormalization.TryNormalizeCpe(trimmed, out var normalizedCpe) && + !string.IsNullOrEmpty(normalizedCpe)) + { + pointerSet.Add(pointer); + cpeSet.Add(normalizedCpe); + } + break; + } + } + + private static bool TryNormalizeAlias(string? candidate, out string normalized) + { + normalized = string.Empty; + if (!LinksetNormalization.TryNormalizeAlias(candidate, out var canonical)) + { + return false; + } + + if (!AliasSchemeRegistry.TryGetScheme(canonical, out var scheme)) + { + return false; + } + + if (!AliasSchemesOfInterest.Contains(scheme)) + { + return false; + } + + normalized = canonical.ToLowerInvariant(); + return true; + } + + private static bool TryExtractReference(JsonElement element, string pointer, out RawReference reference, out string referencePointer) + { + reference = default!; + referencePointer = string.Empty; + + if (!element.TryGetProperty("url", out var urlElement) || urlElement.ValueKind != JsonValueKind.String) + { + return false; + } + + var url = Validation.TrimToNull(urlElement.GetString()); + if (url is null || !Validation.LooksLikeHttpUrl(url)) + { + return false; + } + + string? type = null; + if (element.TryGetProperty("type", out var typeElement) && typeElement.ValueKind == JsonValueKind.String) + { + type = Validation.TrimToNull(typeElement.GetString()); + } + else if (element.TryGetProperty("category", out var categoryElement) && categoryElement.ValueKind == JsonValueKind.String) + { + type = Validation.TrimToNull(categoryElement.GetString()); + } + + var source = element.TryGetProperty("source", out var sourceElement) && sourceElement.ValueKind == JsonValueKind.String + ? Validation.TrimToNull(sourceElement.GetString()) + : null; + + reference = new RawReference( + Type: string.IsNullOrWhiteSpace(type) ? "unspecified" : type!.ToLowerInvariant(), + Url: url, + Source: source); + + referencePointer = AppendPointer(pointer, "url"); + return true; + } + + private static string AppendPointer(string pointer, string token) + { + ArgumentNullException.ThrowIfNull(token); + + static string Encode(string value) + => value.Replace("~", "~0", StringComparison.Ordinal).Replace("/", "~1", StringComparison.Ordinal); + + var encoded = Encode(token); + + if (string.IsNullOrEmpty(pointer)) + { + return "/" + encoded; + } + + if (pointer == "/") + { + return "/" + encoded; + } + + if (pointer.EndsWith("/", StringComparison.Ordinal)) + { + return pointer + encoded; + } + + return pointer + "/" + encoded; + } + + private readonly record struct ReferenceKey(string Url, string Type, string? Source); + + private sealed class ReferenceKeyComparer : IEqualityComparer<ReferenceKey> + { + public static readonly ReferenceKeyComparer Instance = new(); + + public bool Equals(ReferenceKey x, ReferenceKey y) + { + return string.Equals(x.Url, y.Url, StringComparison.OrdinalIgnoreCase) + && string.Equals(x.Type, y.Type, StringComparison.OrdinalIgnoreCase) + && string.Equals(x.Source ?? string.Empty, y.Source ?? string.Empty, StringComparison.OrdinalIgnoreCase); + } + + public int GetHashCode(ReferenceKey obj) + { + var hash = StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Url); + hash = (hash * 397) ^ StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Type); + if (!string.IsNullOrEmpty(obj.Source)) + { + hash = (hash * 397) ^ StringComparer.OrdinalIgnoreCase.GetHashCode(obj.Source); + } + + return hash; + } + } +} diff --git a/src/StellaOps.Concelier.Core/Linksets/AdvisoryObservationFactory.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryObservationFactory.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Linksets/AdvisoryObservationFactory.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryObservationFactory.cs index 8393d93e..1e4a5ac0 100644 --- a/src/StellaOps.Concelier.Core/Linksets/AdvisoryObservationFactory.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/AdvisoryObservationFactory.cs @@ -1,288 +1,288 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Text.Json; -using System.Text.Json.Nodes; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.Models.Observations; -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Linksets; - -/// <summary> -/// Builds <see cref="AdvisoryObservation"/> instances from raw advisory documents, -/// applying deterministic normalization across identifiers, linkset hints, and metadata. -/// </summary> -internal sealed class AdvisoryObservationFactory : IAdvisoryObservationFactory -{ - public AdvisoryObservation Create(AdvisoryRawDocument rawDocument, DateTimeOffset? observedAt = null) - { - ArgumentNullException.ThrowIfNull(rawDocument); - - var source = CreateSource(rawDocument.Source, rawDocument.Upstream); - var upstream = CreateUpstream(rawDocument.Upstream); - var content = CreateContent(rawDocument.Content); - var linkset = CreateLinkset(rawDocument.Identifiers, rawDocument.Linkset); - var attributes = CreateAttributes(rawDocument); - - var createdAt = (observedAt ?? rawDocument.Upstream.RetrievedAt).ToUniversalTime(); - - return new AdvisoryObservation( - observationId: BuildObservationId(rawDocument), - tenant: rawDocument.Tenant, - source: source, - upstream: upstream, - content: content, - linkset: linkset, - createdAt: createdAt, - attributes: attributes); - } - - private static AdvisoryObservationSource CreateSource(RawSourceMetadata source, RawUpstreamMetadata upstream) - { - ArgumentNullException.ThrowIfNull(source); - ArgumentNullException.ThrowIfNull(upstream); - - var stream = Validation.TrimToNull(source.Stream) ?? source.Connector; - var api = ResolveApi(source, upstream); - return new AdvisoryObservationSource( - vendor: source.Vendor, - stream: stream, - api: api, - collectorVersion: source.ConnectorVersion); - } - - private static string ResolveApi(RawSourceMetadata source, RawUpstreamMetadata upstream) - { - if (upstream.Provenance is not null) - { - if (upstream.Provenance.TryGetValue("api", out var apiValue) && !string.IsNullOrWhiteSpace(apiValue)) - { - return apiValue.Trim(); - } - - if (upstream.Provenance.TryGetValue("endpoint", out var endpoint) && !string.IsNullOrWhiteSpace(endpoint)) - { - return endpoint.Trim(); - } - } - - return source.Connector; - } - - private static AdvisoryObservationUpstream CreateUpstream(RawUpstreamMetadata upstream) - { - var signature = new AdvisoryObservationSignature( - upstream.Signature.Present, - upstream.Signature.Format, - upstream.Signature.KeyId, - upstream.Signature.Signature); - - var metadata = upstream.Provenance ?? ImmutableDictionary<string, string>.Empty; - - return new AdvisoryObservationUpstream( - upstreamId: upstream.UpstreamId, - documentVersion: upstream.DocumentVersion, - fetchedAt: upstream.RetrievedAt.ToUniversalTime(), - receivedAt: upstream.RetrievedAt.ToUniversalTime(), - contentHash: upstream.ContentHash, - signature: signature, - metadata: metadata); - } - - private static AdvisoryObservationContent CreateContent(RawContent content) - { - var rawNode = ParseJson(content.Raw); - return new AdvisoryObservationContent( - format: content.Format, - specVersion: content.SpecVersion, - raw: rawNode, - metadata: ImmutableDictionary<string, string>.Empty); - } - - private static JsonNode ParseJson(JsonElement element) - { - if (element.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null) - { - return JsonNode.Parse("{}")!; - } - - using var document = JsonDocument.Parse(element.GetRawText()); - return JsonNode.Parse(document.RootElement.GetRawText()) ?? JsonNode.Parse("{}")!; - } - - private static AdvisoryObservationLinkset CreateLinkset(RawIdentifiers identifiers, RawLinkset linkset) - { - var aliases = NormalizeAliases(identifiers, linkset); - var purls = NormalizePackageUrls(linkset.PackageUrls); - var cpes = NormalizeCpes(linkset.Cpes); - var references = NormalizeReferences(linkset.References); - - return new AdvisoryObservationLinkset(aliases, purls, cpes, references); - } - - private static IEnumerable<string> NormalizeAliases(RawIdentifiers identifiers, RawLinkset linkset) - { - var aliases = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - - if (LinksetNormalization.TryNormalizeAlias(identifiers.PrimaryId, out var primary)) - { - aliases.Add(primary); - } - - foreach (var alias in identifiers.Aliases) - { - if (LinksetNormalization.TryNormalizeAlias(alias, out var normalized)) - { - aliases.Add(normalized); - } - } - - foreach (var alias in linkset.Aliases) - { - if (LinksetNormalization.TryNormalizeAlias(alias, out var normalized)) - { - aliases.Add(normalized); - } - } - - foreach (var note in linkset.Notes) - { - if (!string.IsNullOrWhiteSpace(note.Value) - && LinksetNormalization.TryNormalizeAlias(note.Value, out var normalized)) - { - aliases.Add(normalized); - } - } - - return aliases - .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) - .ToImmutableArray(); - } - - private static IEnumerable<string> NormalizePackageUrls(ImmutableArray<string> packageUrls) - { - if (packageUrls.IsDefaultOrEmpty) - { - return ImmutableArray<string>.Empty; - } - - var set = new HashSet<string>(StringComparer.Ordinal); - - foreach (var candidate in packageUrls) - { - if (!LinksetNormalization.TryNormalizePackageUrl(candidate, out var normalized) || string.IsNullOrEmpty(normalized)) - { - continue; - } - - set.Add(normalized); - } - - return set - .OrderBy(static value => value, StringComparer.Ordinal) - .ToImmutableArray(); - } - - private static IEnumerable<string> NormalizeCpes(ImmutableArray<string> cpes) - { - if (cpes.IsDefaultOrEmpty) - { - return ImmutableArray<string>.Empty; - } - - var set = new HashSet<string>(StringComparer.Ordinal); - - foreach (var cpe in cpes) - { - if (!LinksetNormalization.TryNormalizeCpe(cpe, out var normalized) || string.IsNullOrEmpty(normalized)) - { - continue; - } - - set.Add(normalized); - } - - return set - .OrderBy(static value => value, StringComparer.Ordinal) - .ToImmutableArray(); - } - - private static IEnumerable<AdvisoryObservationReference> NormalizeReferences(ImmutableArray<RawReference> references) - { - if (references.IsDefaultOrEmpty) - { - return ImmutableArray<AdvisoryObservationReference>.Empty; - } - - var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - var list = new List<AdvisoryObservationReference>(); - - foreach (var reference in references) - { - var normalized = LinksetNormalization.TryCreateReference(reference.Type, reference.Url); - if (normalized is null) - { - continue; - } - - if (!seen.Add(normalized.Url)) - { - continue; - } - - list.Add(normalized); - } - - return list - .OrderBy(static reference => reference.Type, StringComparer.Ordinal) - .ThenBy(static reference => reference.Url, StringComparer.Ordinal) - .ToImmutableArray(); - } - - private static ImmutableDictionary<string, string> CreateAttributes(AdvisoryRawDocument rawDocument) - { - var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - - if (!string.IsNullOrWhiteSpace(rawDocument.Supersedes)) - { - builder["supersedes"] = rawDocument.Supersedes.Trim(); - } - - foreach (var note in rawDocument.Linkset.Notes) - { - if (string.IsNullOrWhiteSpace(note.Key) || note.Value is null) - { - continue; - } - - var key = $"linkset.note.{note.Key.Trim()}"; - builder[key] = note.Value; - } - - if (!rawDocument.Linkset.ReconciledFrom.IsDefaultOrEmpty && rawDocument.Linkset.ReconciledFrom.Length > 0) - { - var sources = rawDocument.Linkset.ReconciledFrom - .Where(static value => !string.IsNullOrWhiteSpace(value)) - .Select(static value => value.Trim()) - .ToArray(); - - if (sources.Length > 0) - { - builder["linkset.reconciled_from"] = string.Join(";", sources); - } - } - - return builder.Count == 0 ? ImmutableDictionary<string, string>.Empty : builder.ToImmutable(); - } - - private static string BuildObservationId(AdvisoryRawDocument rawDocument) - { - // Deterministic observation id format: - // {tenant}:{source.vendor}:{upstreamId}:{contentHash} - var tenant = Validation.EnsureNotNullOrWhiteSpace(rawDocument.Tenant, nameof(rawDocument.Tenant)).ToLowerInvariant(); - var vendor = Validation.EnsureNotNullOrWhiteSpace(rawDocument.Source.Vendor, nameof(rawDocument.Source.Vendor)).ToLowerInvariant(); - var upstreamId = Validation.TrimToNull(rawDocument.Upstream.UpstreamId) ?? rawDocument.Content.Raw.ToString(); - var contentHash = Validation.TrimToNull(rawDocument.Upstream.ContentHash) ?? "sha256:unknown"; - return $"{tenant}:{vendor}:{upstreamId}:{contentHash}".ToLowerInvariant(); - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Text.Json; +using System.Text.Json.Nodes; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.Models.Observations; +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Linksets; + +/// <summary> +/// Builds <see cref="AdvisoryObservation"/> instances from raw advisory documents, +/// applying deterministic normalization across identifiers, linkset hints, and metadata. +/// </summary> +internal sealed class AdvisoryObservationFactory : IAdvisoryObservationFactory +{ + public AdvisoryObservation Create(AdvisoryRawDocument rawDocument, DateTimeOffset? observedAt = null) + { + ArgumentNullException.ThrowIfNull(rawDocument); + + var source = CreateSource(rawDocument.Source, rawDocument.Upstream); + var upstream = CreateUpstream(rawDocument.Upstream); + var content = CreateContent(rawDocument.Content); + var linkset = CreateLinkset(rawDocument.Identifiers, rawDocument.Linkset); + var attributes = CreateAttributes(rawDocument); + + var createdAt = (observedAt ?? rawDocument.Upstream.RetrievedAt).ToUniversalTime(); + + return new AdvisoryObservation( + observationId: BuildObservationId(rawDocument), + tenant: rawDocument.Tenant, + source: source, + upstream: upstream, + content: content, + linkset: linkset, + createdAt: createdAt, + attributes: attributes); + } + + private static AdvisoryObservationSource CreateSource(RawSourceMetadata source, RawUpstreamMetadata upstream) + { + ArgumentNullException.ThrowIfNull(source); + ArgumentNullException.ThrowIfNull(upstream); + + var stream = Validation.TrimToNull(source.Stream) ?? source.Connector; + var api = ResolveApi(source, upstream); + return new AdvisoryObservationSource( + vendor: source.Vendor, + stream: stream, + api: api, + collectorVersion: source.ConnectorVersion); + } + + private static string ResolveApi(RawSourceMetadata source, RawUpstreamMetadata upstream) + { + if (upstream.Provenance is not null) + { + if (upstream.Provenance.TryGetValue("api", out var apiValue) && !string.IsNullOrWhiteSpace(apiValue)) + { + return apiValue.Trim(); + } + + if (upstream.Provenance.TryGetValue("endpoint", out var endpoint) && !string.IsNullOrWhiteSpace(endpoint)) + { + return endpoint.Trim(); + } + } + + return source.Connector; + } + + private static AdvisoryObservationUpstream CreateUpstream(RawUpstreamMetadata upstream) + { + var signature = new AdvisoryObservationSignature( + upstream.Signature.Present, + upstream.Signature.Format, + upstream.Signature.KeyId, + upstream.Signature.Signature); + + var metadata = upstream.Provenance ?? ImmutableDictionary<string, string>.Empty; + + return new AdvisoryObservationUpstream( + upstreamId: upstream.UpstreamId, + documentVersion: upstream.DocumentVersion, + fetchedAt: upstream.RetrievedAt.ToUniversalTime(), + receivedAt: upstream.RetrievedAt.ToUniversalTime(), + contentHash: upstream.ContentHash, + signature: signature, + metadata: metadata); + } + + private static AdvisoryObservationContent CreateContent(RawContent content) + { + var rawNode = ParseJson(content.Raw); + return new AdvisoryObservationContent( + format: content.Format, + specVersion: content.SpecVersion, + raw: rawNode, + metadata: ImmutableDictionary<string, string>.Empty); + } + + private static JsonNode ParseJson(JsonElement element) + { + if (element.ValueKind is JsonValueKind.Undefined or JsonValueKind.Null) + { + return JsonNode.Parse("{}")!; + } + + using var document = JsonDocument.Parse(element.GetRawText()); + return JsonNode.Parse(document.RootElement.GetRawText()) ?? JsonNode.Parse("{}")!; + } + + private static AdvisoryObservationLinkset CreateLinkset(RawIdentifiers identifiers, RawLinkset linkset) + { + var aliases = NormalizeAliases(identifiers, linkset); + var purls = NormalizePackageUrls(linkset.PackageUrls); + var cpes = NormalizeCpes(linkset.Cpes); + var references = NormalizeReferences(linkset.References); + + return new AdvisoryObservationLinkset(aliases, purls, cpes, references); + } + + private static IEnumerable<string> NormalizeAliases(RawIdentifiers identifiers, RawLinkset linkset) + { + var aliases = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + + if (LinksetNormalization.TryNormalizeAlias(identifiers.PrimaryId, out var primary)) + { + aliases.Add(primary); + } + + foreach (var alias in identifiers.Aliases) + { + if (LinksetNormalization.TryNormalizeAlias(alias, out var normalized)) + { + aliases.Add(normalized); + } + } + + foreach (var alias in linkset.Aliases) + { + if (LinksetNormalization.TryNormalizeAlias(alias, out var normalized)) + { + aliases.Add(normalized); + } + } + + foreach (var note in linkset.Notes) + { + if (!string.IsNullOrWhiteSpace(note.Value) + && LinksetNormalization.TryNormalizeAlias(note.Value, out var normalized)) + { + aliases.Add(normalized); + } + } + + return aliases + .OrderBy(static value => value, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + } + + private static IEnumerable<string> NormalizePackageUrls(ImmutableArray<string> packageUrls) + { + if (packageUrls.IsDefaultOrEmpty) + { + return ImmutableArray<string>.Empty; + } + + var set = new HashSet<string>(StringComparer.Ordinal); + + foreach (var candidate in packageUrls) + { + if (!LinksetNormalization.TryNormalizePackageUrl(candidate, out var normalized) || string.IsNullOrEmpty(normalized)) + { + continue; + } + + set.Add(normalized); + } + + return set + .OrderBy(static value => value, StringComparer.Ordinal) + .ToImmutableArray(); + } + + private static IEnumerable<string> NormalizeCpes(ImmutableArray<string> cpes) + { + if (cpes.IsDefaultOrEmpty) + { + return ImmutableArray<string>.Empty; + } + + var set = new HashSet<string>(StringComparer.Ordinal); + + foreach (var cpe in cpes) + { + if (!LinksetNormalization.TryNormalizeCpe(cpe, out var normalized) || string.IsNullOrEmpty(normalized)) + { + continue; + } + + set.Add(normalized); + } + + return set + .OrderBy(static value => value, StringComparer.Ordinal) + .ToImmutableArray(); + } + + private static IEnumerable<AdvisoryObservationReference> NormalizeReferences(ImmutableArray<RawReference> references) + { + if (references.IsDefaultOrEmpty) + { + return ImmutableArray<AdvisoryObservationReference>.Empty; + } + + var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + var list = new List<AdvisoryObservationReference>(); + + foreach (var reference in references) + { + var normalized = LinksetNormalization.TryCreateReference(reference.Type, reference.Url); + if (normalized is null) + { + continue; + } + + if (!seen.Add(normalized.Url)) + { + continue; + } + + list.Add(normalized); + } + + return list + .OrderBy(static reference => reference.Type, StringComparer.Ordinal) + .ThenBy(static reference => reference.Url, StringComparer.Ordinal) + .ToImmutableArray(); + } + + private static ImmutableDictionary<string, string> CreateAttributes(AdvisoryRawDocument rawDocument) + { + var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + + if (!string.IsNullOrWhiteSpace(rawDocument.Supersedes)) + { + builder["supersedes"] = rawDocument.Supersedes.Trim(); + } + + foreach (var note in rawDocument.Linkset.Notes) + { + if (string.IsNullOrWhiteSpace(note.Key) || note.Value is null) + { + continue; + } + + var key = $"linkset.note.{note.Key.Trim()}"; + builder[key] = note.Value; + } + + if (!rawDocument.Linkset.ReconciledFrom.IsDefaultOrEmpty && rawDocument.Linkset.ReconciledFrom.Length > 0) + { + var sources = rawDocument.Linkset.ReconciledFrom + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Select(static value => value.Trim()) + .ToArray(); + + if (sources.Length > 0) + { + builder["linkset.reconciled_from"] = string.Join(";", sources); + } + } + + return builder.Count == 0 ? ImmutableDictionary<string, string>.Empty : builder.ToImmutable(); + } + + private static string BuildObservationId(AdvisoryRawDocument rawDocument) + { + // Deterministic observation id format: + // {tenant}:{source.vendor}:{upstreamId}:{contentHash} + var tenant = Validation.EnsureNotNullOrWhiteSpace(rawDocument.Tenant, nameof(rawDocument.Tenant)).ToLowerInvariant(); + var vendor = Validation.EnsureNotNullOrWhiteSpace(rawDocument.Source.Vendor, nameof(rawDocument.Source.Vendor)).ToLowerInvariant(); + var upstreamId = Validation.TrimToNull(rawDocument.Upstream.UpstreamId) ?? rawDocument.Content.Raw.ToString(); + var contentHash = Validation.TrimToNull(rawDocument.Upstream.ContentHash) ?? "sha256:unknown"; + return $"{tenant}:{vendor}:{upstreamId}:{contentHash}".ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Concelier.Core/Linksets/IAdvisoryLinksetMapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/IAdvisoryLinksetMapper.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Linksets/IAdvisoryLinksetMapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/IAdvisoryLinksetMapper.cs index 8b550133..87bd3976 100644 --- a/src/StellaOps.Concelier.Core/Linksets/IAdvisoryLinksetMapper.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/IAdvisoryLinksetMapper.cs @@ -1,16 +1,16 @@ -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Linksets; - -/// <summary> -/// Produces canonical linkset hints for advisory raw documents. -/// </summary> -public interface IAdvisoryLinksetMapper -{ - /// <summary> - /// Extracts deterministic linkset signals (aliases, package coordinates, references) from the provided raw document. - /// </summary> - /// <param name="document">The advisory raw document to analyse.</param> - /// <returns>A normalized <see cref="RawLinkset"/> payload.</returns> - RawLinkset Map(AdvisoryRawDocument document); -} +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Linksets; + +/// <summary> +/// Produces canonical linkset hints for advisory raw documents. +/// </summary> +public interface IAdvisoryLinksetMapper +{ + /// <summary> + /// Extracts deterministic linkset signals (aliases, package coordinates, references) from the provided raw document. + /// </summary> + /// <param name="document">The advisory raw document to analyse.</param> + /// <returns>A normalized <see cref="RawLinkset"/> payload.</returns> + RawLinkset Map(AdvisoryRawDocument document); +} diff --git a/src/StellaOps.Concelier.Core/Linksets/IAdvisoryObservationFactory.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/IAdvisoryObservationFactory.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Linksets/IAdvisoryObservationFactory.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/IAdvisoryObservationFactory.cs index 70fb4ba5..d770587d 100644 --- a/src/StellaOps.Concelier.Core/Linksets/IAdvisoryObservationFactory.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/IAdvisoryObservationFactory.cs @@ -1,10 +1,10 @@ -using System; -using StellaOps.Concelier.Models.Observations; -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Linksets; - -internal interface IAdvisoryObservationFactory -{ - AdvisoryObservation Create(AdvisoryRawDocument rawDocument, DateTimeOffset? observedAt = null); -} +using System; +using StellaOps.Concelier.Models.Observations; +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Linksets; + +internal interface IAdvisoryObservationFactory +{ + AdvisoryObservation Create(AdvisoryRawDocument rawDocument, DateTimeOffset? observedAt = null); +} diff --git a/src/StellaOps.Concelier.Core/Linksets/LinksetNormalization.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/LinksetNormalization.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Linksets/LinksetNormalization.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/LinksetNormalization.cs index 16a2828b..bd83b885 100644 --- a/src/StellaOps.Concelier.Core/Linksets/LinksetNormalization.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/LinksetNormalization.cs @@ -1,95 +1,95 @@ -using System.Collections.Immutable; -using System.Text; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.Models.Observations; -using StellaOps.Concelier.Normalization.Identifiers; -using PackageUrl = StellaOps.Concelier.Normalization.Identifiers.PackageUrl; - -namespace StellaOps.Concelier.Core.Linksets; - -internal static class LinksetNormalization -{ - public static bool TryNormalizeAlias(string? value, out string normalized) - { - if (Validation.TryNormalizeAlias(value, out var alias) && !string.IsNullOrEmpty(alias)) - { - normalized = alias; - return true; - } - - normalized = string.Empty; - return false; - } - - public static bool TryNormalizePackageUrl(string? value, out string normalized) - { - normalized = string.Empty; - if (IdentifierNormalizer.TryNormalizePackageUrl(value, out _, out var packageUrl) && packageUrl is PackageUrl parsed) - { - normalized = CanonicalizePackageUrl(parsed); - return true; - } - - var trimmed = Validation.TrimToNull(value); - if (trimmed is null || !trimmed.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase)) - { - return false; - } - - normalized = trimmed; - return true; - } - - public static bool TryNormalizeCpe(string? value, out string normalized) - { - normalized = string.Empty; - if (IdentifierNormalizer.TryNormalizeCpe(value, out var canonical) && !string.IsNullOrEmpty(canonical)) - { - normalized = canonical; - return true; - } - - var trimmed = Validation.TrimToNull(value); - if (trimmed is null || !trimmed.StartsWith("cpe", StringComparison.OrdinalIgnoreCase)) - { - return false; - } - - normalized = trimmed.ToLowerInvariant(); - return true; - } - - public static AdvisoryObservationReference? TryCreateReference(string? type, string? url) - { - var trimmedUrl = Validation.TrimToNull(url); - if (trimmedUrl is null || !Validation.LooksLikeHttpUrl(trimmedUrl)) - { - return null; - } - - var normalizedType = Validation.TrimToNull(type) ?? "other"; - return new AdvisoryObservationReference(normalizedType, trimmedUrl); - } - - private static string CanonicalizePackageUrl(PackageUrl packageUrl) - { - var builder = new StringBuilder("pkg:"); - builder.Append(packageUrl.Type); - builder.Append('/'); - - if (!packageUrl.NamespaceSegments.IsDefaultOrEmpty && packageUrl.NamespaceSegments.Length > 0) - { - builder.Append(string.Join('/', packageUrl.NamespaceSegments)); - builder.Append('/'); - } - - builder.Append(packageUrl.Name); - if (!string.IsNullOrEmpty(packageUrl.Version)) - { - builder.Append('@'); - builder.Append(packageUrl.Version); - } - - return builder.ToString(); - } -} +using System.Collections.Immutable; +using System.Text; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.Models.Observations; +using StellaOps.Concelier.Normalization.Identifiers; +using PackageUrl = StellaOps.Concelier.Normalization.Identifiers.PackageUrl; + +namespace StellaOps.Concelier.Core.Linksets; + +internal static class LinksetNormalization +{ + public static bool TryNormalizeAlias(string? value, out string normalized) + { + if (Validation.TryNormalizeAlias(value, out var alias) && !string.IsNullOrEmpty(alias)) + { + normalized = alias; + return true; + } + + normalized = string.Empty; + return false; + } + + public static bool TryNormalizePackageUrl(string? value, out string normalized) + { + normalized = string.Empty; + if (IdentifierNormalizer.TryNormalizePackageUrl(value, out _, out var packageUrl) && packageUrl is PackageUrl parsed) + { + normalized = CanonicalizePackageUrl(parsed); + return true; + } + + var trimmed = Validation.TrimToNull(value); + if (trimmed is null || !trimmed.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + normalized = trimmed; + return true; + } + + public static bool TryNormalizeCpe(string? value, out string normalized) + { + normalized = string.Empty; + if (IdentifierNormalizer.TryNormalizeCpe(value, out var canonical) && !string.IsNullOrEmpty(canonical)) + { + normalized = canonical; + return true; + } + + var trimmed = Validation.TrimToNull(value); + if (trimmed is null || !trimmed.StartsWith("cpe", StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + normalized = trimmed.ToLowerInvariant(); + return true; + } + + public static AdvisoryObservationReference? TryCreateReference(string? type, string? url) + { + var trimmedUrl = Validation.TrimToNull(url); + if (trimmedUrl is null || !Validation.LooksLikeHttpUrl(trimmedUrl)) + { + return null; + } + + var normalizedType = Validation.TrimToNull(type) ?? "other"; + return new AdvisoryObservationReference(normalizedType, trimmedUrl); + } + + private static string CanonicalizePackageUrl(PackageUrl packageUrl) + { + var builder = new StringBuilder("pkg:"); + builder.Append(packageUrl.Type); + builder.Append('/'); + + if (!packageUrl.NamespaceSegments.IsDefaultOrEmpty && packageUrl.NamespaceSegments.Length > 0) + { + builder.Append(string.Join('/', packageUrl.NamespaceSegments)); + builder.Append('/'); + } + + builder.Append(packageUrl.Name); + if (!string.IsNullOrEmpty(packageUrl.Version)) + { + builder.Append('@'); + builder.Append(packageUrl.Version); + } + + return builder.ToString(); + } +} diff --git a/src/StellaOps.Concelier.Core/Linksets/LinksetServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/LinksetServiceCollectionExtensions.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Linksets/LinksetServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/LinksetServiceCollectionExtensions.cs index da6efeb0..93c85180 100644 --- a/src/StellaOps.Concelier.Core/Linksets/LinksetServiceCollectionExtensions.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Linksets/LinksetServiceCollectionExtensions.cs @@ -1,19 +1,19 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; - -namespace StellaOps.Concelier.Core.Linksets; - -public static class LinksetServiceCollectionExtensions -{ - /// <summary> - /// Registers advisory linkset mappers used by ingestion pipelines. - /// </summary> - public static IServiceCollection AddConcelierLinksetMappers(this IServiceCollection services) - { - ArgumentNullException.ThrowIfNull(services); - - services.TryAddSingleton<IAdvisoryLinksetMapper, AdvisoryLinksetMapper>(); - services.TryAddSingleton<IAdvisoryObservationFactory, AdvisoryObservationFactory>(); - return services; - } -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace StellaOps.Concelier.Core.Linksets; + +public static class LinksetServiceCollectionExtensions +{ + /// <summary> + /// Registers advisory linkset mappers used by ingestion pipelines. + /// </summary> + public static IServiceCollection AddConcelierLinksetMappers(this IServiceCollection services) + { + ArgumentNullException.ThrowIfNull(services); + + services.TryAddSingleton<IAdvisoryLinksetMapper, AdvisoryLinksetMapper>(); + services.TryAddSingleton<IAdvisoryObservationFactory, AdvisoryObservationFactory>(); + return services; + } +} diff --git a/src/StellaOps.Concelier.Core/Noise/INoisePriorRepository.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/INoisePriorRepository.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Noise/INoisePriorRepository.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/INoisePriorRepository.cs index 27b9f9fc..9255cfad 100644 --- a/src/StellaOps.Concelier.Core/Noise/INoisePriorRepository.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/INoisePriorRepository.cs @@ -1,26 +1,26 @@ -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Concelier.Core.Noise; - -/// <summary> -/// Persistence abstraction for storing and retrieving noise prior summaries. -/// </summary> -public interface INoisePriorRepository -{ - ValueTask UpsertAsync( - string vulnerabilityKey, - IReadOnlyCollection<NoisePriorSummary> summaries, - CancellationToken cancellationToken); - - ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync( - string vulnerabilityKey, - CancellationToken cancellationToken); - - ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync( - string packageType, - string packageIdentifier, - string? platform, - CancellationToken cancellationToken); -} +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Concelier.Core.Noise; + +/// <summary> +/// Persistence abstraction for storing and retrieving noise prior summaries. +/// </summary> +public interface INoisePriorRepository +{ + ValueTask UpsertAsync( + string vulnerabilityKey, + IReadOnlyCollection<NoisePriorSummary> summaries, + CancellationToken cancellationToken); + + ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync( + string vulnerabilityKey, + CancellationToken cancellationToken); + + ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync( + string packageType, + string packageIdentifier, + string? platform, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Concelier.Core/Noise/INoisePriorService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/INoisePriorService.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Noise/INoisePriorService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/INoisePriorService.cs index da45995e..ea110db9 100644 --- a/src/StellaOps.Concelier.Core/Noise/INoisePriorService.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/INoisePriorService.cs @@ -1,25 +1,25 @@ -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Concelier.Core.Noise; - -/// <summary> -/// Computes and serves false-positive priors for canonical advisories. -/// </summary> -public interface INoisePriorService -{ - ValueTask<NoisePriorComputationResult> RecomputeAsync( - NoisePriorComputationRequest request, - CancellationToken cancellationToken); - - ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync( - string vulnerabilityKey, - CancellationToken cancellationToken); - - ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync( - string packageType, - string packageIdentifier, - string? platform, - CancellationToken cancellationToken); -} +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Concelier.Core.Noise; + +/// <summary> +/// Computes and serves false-positive priors for canonical advisories. +/// </summary> +public interface INoisePriorService +{ + ValueTask<NoisePriorComputationResult> RecomputeAsync( + NoisePriorComputationRequest request, + CancellationToken cancellationToken); + + ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync( + string vulnerabilityKey, + CancellationToken cancellationToken); + + ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync( + string packageType, + string packageIdentifier, + string? platform, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Concelier.Core/Noise/NoisePriorComputationRequest.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorComputationRequest.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Noise/NoisePriorComputationRequest.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorComputationRequest.cs index 8bf2569e..9591e6a1 100644 --- a/src/StellaOps.Concelier.Core/Noise/NoisePriorComputationRequest.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorComputationRequest.cs @@ -1,10 +1,10 @@ -using System; - -namespace StellaOps.Concelier.Core.Noise; - -/// <summary> -/// Options for recomputing noise priors for a single vulnerability key. -/// </summary> -public sealed record NoisePriorComputationRequest( - string VulnerabilityKey, - DateTimeOffset? AsOf = null); +using System; + +namespace StellaOps.Concelier.Core.Noise; + +/// <summary> +/// Options for recomputing noise priors for a single vulnerability key. +/// </summary> +public sealed record NoisePriorComputationRequest( + string VulnerabilityKey, + DateTimeOffset? AsOf = null); diff --git a/src/StellaOps.Concelier.Core/Noise/NoisePriorComputationResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorComputationResult.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Noise/NoisePriorComputationResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorComputationResult.cs index 4fffda41..8c2eb0b1 100644 --- a/src/StellaOps.Concelier.Core/Noise/NoisePriorComputationResult.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorComputationResult.cs @@ -1,10 +1,10 @@ -using System.Collections.Immutable; - -namespace StellaOps.Concelier.Core.Noise; - -/// <summary> -/// Results of a recompute operation containing per-package noise prior summaries. -/// </summary> -public sealed record NoisePriorComputationResult( - string VulnerabilityKey, - ImmutableArray<NoisePriorSummary> Summaries); +using System.Collections.Immutable; + +namespace StellaOps.Concelier.Core.Noise; + +/// <summary> +/// Results of a recompute operation containing per-package noise prior summaries. +/// </summary> +public sealed record NoisePriorComputationResult( + string VulnerabilityKey, + ImmutableArray<NoisePriorSummary> Summaries); diff --git a/src/StellaOps.Concelier.Core/Noise/NoisePriorService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorService.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Noise/NoisePriorService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorService.cs index 087860c4..cfef0bb5 100644 --- a/src/StellaOps.Concelier.Core/Noise/NoisePriorService.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorService.cs @@ -1,400 +1,400 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Concelier.Core.Events; -using StellaOps.Concelier.Models; - -namespace StellaOps.Concelier.Core.Noise; - -/// <summary> -/// Default implementation that derives false-positive priors from advisory statements. -/// </summary> -public sealed class NoisePriorService : INoisePriorService -{ - private static readonly HashSet<string> NegativeStatuses = new( - new[] - { - AffectedPackageStatusCatalog.KnownNotAffected, - AffectedPackageStatusCatalog.NotAffected, - AffectedPackageStatusCatalog.NotApplicable, - }, - StringComparer.Ordinal); - - private static readonly HashSet<string> PositiveStatuses = new( - new[] - { - AffectedPackageStatusCatalog.KnownAffected, - AffectedPackageStatusCatalog.Affected, - AffectedPackageStatusCatalog.UnderInvestigation, - AffectedPackageStatusCatalog.Pending, - }, - StringComparer.Ordinal); - - private static readonly HashSet<string> ResolvedStatuses = new( - new[] - { - AffectedPackageStatusCatalog.Fixed, - AffectedPackageStatusCatalog.FirstFixed, - AffectedPackageStatusCatalog.Mitigated, - }, - StringComparer.Ordinal); - - private readonly IAdvisoryEventLog _eventLog; - private readonly INoisePriorRepository _repository; - private readonly TimeProvider _timeProvider; - - public NoisePriorService( - IAdvisoryEventLog eventLog, - INoisePriorRepository repository, - TimeProvider? timeProvider = null) - { - _eventLog = eventLog ?? throw new ArgumentNullException(nameof(eventLog)); - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _timeProvider = timeProvider ?? TimeProvider.System; - } - - public async ValueTask<NoisePriorComputationResult> RecomputeAsync( - NoisePriorComputationRequest request, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - - var normalizedKey = NormalizeKey(request.VulnerabilityKey, nameof(request.VulnerabilityKey)); - var replay = await _eventLog.ReplayAsync(normalizedKey, request.AsOf, cancellationToken).ConfigureAwait(false); - - var generatedAt = _timeProvider.GetUtcNow(); - var summaries = ComputeSummaries(replay, generatedAt); - - await _repository.UpsertAsync(normalizedKey, summaries, cancellationToken).ConfigureAwait(false); - - return new NoisePriorComputationResult( - normalizedKey, - summaries); - } - - public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync( - string vulnerabilityKey, - CancellationToken cancellationToken) - { - var normalizedKey = NormalizeKey(vulnerabilityKey, nameof(vulnerabilityKey)); - return _repository.GetByVulnerabilityAsync(normalizedKey, cancellationToken); - } - - public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync( - string packageType, - string packageIdentifier, - string? platform, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(packageType); - ArgumentException.ThrowIfNullOrWhiteSpace(packageIdentifier); - - var normalizedType = packageType.Trim().ToLowerInvariant(); - var normalizedIdentifier = packageIdentifier.Trim(); - var normalizedPlatform = NormalizePlatform(platform); - - return _repository.GetByPackageAsync( - normalizedType, - normalizedIdentifier, - normalizedPlatform, - cancellationToken); - } - - private ImmutableArray<NoisePriorSummary> ComputeSummaries( - AdvisoryReplay replay, - DateTimeOffset generatedAt) - { - if (replay is null || replay.Statements.IsDefaultOrEmpty) - { - return ImmutableArray<NoisePriorSummary>.Empty; - } - - var accumulators = new Dictionary<PackageKey, NoiseAccumulator>(capacity: replay.Statements.Length); - - foreach (var statement in replay.Statements) - { - if (statement is null) - { - continue; - } - - foreach (var package in statement.Advisory.AffectedPackages) - { - if (package is null || string.IsNullOrWhiteSpace(package.Identifier)) - { - continue; - } - - var platform = NormalizePlatform(package.Platform); - var key = new PackageKey(package.Type, package.Identifier, platform); - - if (!accumulators.TryGetValue(key, out var accumulator)) - { - accumulator = new NoiseAccumulator( - replay.VulnerabilityKey, - package.Type, - package.Identifier, - platform); - accumulators.Add(key, accumulator); - } - - accumulator.Register(statement.AsOf, package); - } - } - - if (accumulators.Count == 0) - { - return ImmutableArray<NoisePriorSummary>.Empty; - } - - var builder = ImmutableArray.CreateBuilder<NoisePriorSummary>(accumulators.Count); - foreach (var accumulator in accumulators.Values - .OrderBy(static a => a.PackageType, StringComparer.Ordinal) - .ThenBy(static a => a.PackageIdentifier, StringComparer.Ordinal) - .ThenBy(static a => a.Platform, StringComparer.Ordinal)) - { - builder.Add(accumulator.ToSummary(generatedAt)); - } - - return builder.ToImmutable(); - } - - private static string NormalizeKey(string value, string parameterName) - { - if (string.IsNullOrWhiteSpace(value)) - { - throw new ArgumentException("Value must be provided.", parameterName); - } - - return value.Trim().ToLowerInvariant(); - } - - private static string? NormalizePlatform(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); - - private sealed record PackageKey( - string PackageType, - string PackageIdentifier, - string? Platform); - - private sealed class NoiseAccumulator - { - private readonly string _vulnerabilityKey; - private readonly HashSet<string> _negativeSources = new(StringComparer.Ordinal); - - public NoiseAccumulator( - string vulnerabilityKey, - string packageType, - string packageIdentifier, - string? platform) - { - _vulnerabilityKey = vulnerabilityKey; - PackageType = packageType; - PackageIdentifier = packageIdentifier; - Platform = platform; - FirstObserved = DateTimeOffset.MaxValue; - LastObserved = DateTimeOffset.MinValue; - } - - public string PackageType { get; } - - public string PackageIdentifier { get; } - - public string? Platform { get; } - - public int ObservationCount { get; private set; } - - public int NegativeSignals { get; private set; } - - public int PositiveSignals { get; private set; } - - public int NeutralSignals { get; private set; } - - public int VersionRangeSignals { get; private set; } - - public bool HasMissingStatus { get; private set; } - - public DateTimeOffset FirstObserved { get; private set; } - - public DateTimeOffset LastObserved { get; private set; } - - public int UniqueNegativeSources => _negativeSources.Count; - - public void Register(DateTimeOffset asOf, AffectedPackage package) - { - ObservationCount++; - - var asOfUtc = asOf.ToUniversalTime(); - if (asOfUtc < FirstObserved) - { - FirstObserved = asOfUtc; - } - - if (asOfUtc > LastObserved) - { - LastObserved = asOfUtc; - } - - var statuses = package.Statuses; - if (statuses.IsDefaultOrEmpty || statuses.Length == 0) - { - HasMissingStatus = true; - } - - foreach (var status in statuses) - { - if (NegativeStatuses.Contains(status.Status)) - { - NegativeSignals++; - if (!string.IsNullOrWhiteSpace(status.Provenance.Source)) - { - _negativeSources.Add(status.Provenance.Source); - } - } - else if (PositiveStatuses.Contains(status.Status) || ResolvedStatuses.Contains(status.Status)) - { - PositiveSignals++; - } - else if (string.Equals(status.Status, AffectedPackageStatusCatalog.Unknown, StringComparison.Ordinal)) - { - NeutralSignals++; - } - else - { - NeutralSignals++; - } - } - - if (!package.VersionRanges.IsDefaultOrEmpty && package.VersionRanges.Length > 0) - { - VersionRangeSignals++; - } - } - - public NoisePriorSummary ToSummary(DateTimeOffset generatedAt) - { - var boundedFirst = FirstObserved == DateTimeOffset.MaxValue ? generatedAt : FirstObserved; - var boundedLast = LastObserved == DateTimeOffset.MinValue ? generatedAt : LastObserved; - - var probability = ComputeProbability(); - var rules = BuildRules(); - - return new NoisePriorSummary( - _vulnerabilityKey, - PackageType, - PackageIdentifier, - Platform, - probability, - ObservationCount, - NegativeSignals, - PositiveSignals, - NeutralSignals, - VersionRangeSignals, - UniqueNegativeSources, - rules, - boundedFirst, - boundedLast, - generatedAt); - } - - private double ComputeProbability() - { - var positiveSignals = PositiveSignals + VersionRangeSignals; - var denominator = NegativeSignals + positiveSignals; - - double score; - if (denominator == 0) - { - if (HasMissingStatus) - { - score = 0.35; - } - else if (NeutralSignals > 0) - { - score = 0.40; - } - else - { - score = 0.0; - } - } - else - { - score = NegativeSignals / (double)denominator; - - if (NegativeSignals > 0 && positiveSignals == 0) - { - score = Math.Min(1.0, score + 0.20); - } - - if (positiveSignals > 0 && NegativeSignals == 0) - { - score = Math.Max(0.0, score - 0.25); - } - - if (PositiveSignals > NegativeSignals) - { - score = Math.Max(0.0, score - 0.10); - } - - if (UniqueNegativeSources >= 2) - { - score = Math.Min(1.0, score + 0.10); - } - - if (NeutralSignals > 0) - { - var neutralBoost = Math.Min(0.10, NeutralSignals * 0.02); - score = Math.Min(1.0, score + neutralBoost); - } - } - - return Math.Round(Math.Clamp(score, 0.0, 1.0), 4, MidpointRounding.ToZero); - } - - private ImmutableArray<string> BuildRules() - { - var rules = new HashSet<string>(StringComparer.Ordinal); - - if (NegativeSignals > 0 && PositiveSignals == 0 && VersionRangeSignals == 0) - { - rules.Add("all_negative"); - } - - if (UniqueNegativeSources >= 2) - { - rules.Add("multi_source_negative"); - } - - if (PositiveSignals > 0 || VersionRangeSignals > 0) - { - rules.Add("positive_evidence"); - } - - if (NegativeSignals > 0 && (PositiveSignals > 0 || VersionRangeSignals > 0)) - { - rules.Add("conflicting_signals"); - } - - if (ObservationCount < 3) - { - rules.Add("sparse_observations"); - } - - if (HasMissingStatus) - { - rules.Add("missing_status"); - } - - if (NeutralSignals > 0 && NegativeSignals == 0 && PositiveSignals == 0 && VersionRangeSignals == 0) - { - rules.Add("neutral_only"); - } - - return rules.OrderBy(static rule => rule, StringComparer.Ordinal).ToImmutableArray(); - } - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Concelier.Core.Events; +using StellaOps.Concelier.Models; + +namespace StellaOps.Concelier.Core.Noise; + +/// <summary> +/// Default implementation that derives false-positive priors from advisory statements. +/// </summary> +public sealed class NoisePriorService : INoisePriorService +{ + private static readonly HashSet<string> NegativeStatuses = new( + new[] + { + AffectedPackageStatusCatalog.KnownNotAffected, + AffectedPackageStatusCatalog.NotAffected, + AffectedPackageStatusCatalog.NotApplicable, + }, + StringComparer.Ordinal); + + private static readonly HashSet<string> PositiveStatuses = new( + new[] + { + AffectedPackageStatusCatalog.KnownAffected, + AffectedPackageStatusCatalog.Affected, + AffectedPackageStatusCatalog.UnderInvestigation, + AffectedPackageStatusCatalog.Pending, + }, + StringComparer.Ordinal); + + private static readonly HashSet<string> ResolvedStatuses = new( + new[] + { + AffectedPackageStatusCatalog.Fixed, + AffectedPackageStatusCatalog.FirstFixed, + AffectedPackageStatusCatalog.Mitigated, + }, + StringComparer.Ordinal); + + private readonly IAdvisoryEventLog _eventLog; + private readonly INoisePriorRepository _repository; + private readonly TimeProvider _timeProvider; + + public NoisePriorService( + IAdvisoryEventLog eventLog, + INoisePriorRepository repository, + TimeProvider? timeProvider = null) + { + _eventLog = eventLog ?? throw new ArgumentNullException(nameof(eventLog)); + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public async ValueTask<NoisePriorComputationResult> RecomputeAsync( + NoisePriorComputationRequest request, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + var normalizedKey = NormalizeKey(request.VulnerabilityKey, nameof(request.VulnerabilityKey)); + var replay = await _eventLog.ReplayAsync(normalizedKey, request.AsOf, cancellationToken).ConfigureAwait(false); + + var generatedAt = _timeProvider.GetUtcNow(); + var summaries = ComputeSummaries(replay, generatedAt); + + await _repository.UpsertAsync(normalizedKey, summaries, cancellationToken).ConfigureAwait(false); + + return new NoisePriorComputationResult( + normalizedKey, + summaries); + } + + public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync( + string vulnerabilityKey, + CancellationToken cancellationToken) + { + var normalizedKey = NormalizeKey(vulnerabilityKey, nameof(vulnerabilityKey)); + return _repository.GetByVulnerabilityAsync(normalizedKey, cancellationToken); + } + + public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync( + string packageType, + string packageIdentifier, + string? platform, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(packageType); + ArgumentException.ThrowIfNullOrWhiteSpace(packageIdentifier); + + var normalizedType = packageType.Trim().ToLowerInvariant(); + var normalizedIdentifier = packageIdentifier.Trim(); + var normalizedPlatform = NormalizePlatform(platform); + + return _repository.GetByPackageAsync( + normalizedType, + normalizedIdentifier, + normalizedPlatform, + cancellationToken); + } + + private ImmutableArray<NoisePriorSummary> ComputeSummaries( + AdvisoryReplay replay, + DateTimeOffset generatedAt) + { + if (replay is null || replay.Statements.IsDefaultOrEmpty) + { + return ImmutableArray<NoisePriorSummary>.Empty; + } + + var accumulators = new Dictionary<PackageKey, NoiseAccumulator>(capacity: replay.Statements.Length); + + foreach (var statement in replay.Statements) + { + if (statement is null) + { + continue; + } + + foreach (var package in statement.Advisory.AffectedPackages) + { + if (package is null || string.IsNullOrWhiteSpace(package.Identifier)) + { + continue; + } + + var platform = NormalizePlatform(package.Platform); + var key = new PackageKey(package.Type, package.Identifier, platform); + + if (!accumulators.TryGetValue(key, out var accumulator)) + { + accumulator = new NoiseAccumulator( + replay.VulnerabilityKey, + package.Type, + package.Identifier, + platform); + accumulators.Add(key, accumulator); + } + + accumulator.Register(statement.AsOf, package); + } + } + + if (accumulators.Count == 0) + { + return ImmutableArray<NoisePriorSummary>.Empty; + } + + var builder = ImmutableArray.CreateBuilder<NoisePriorSummary>(accumulators.Count); + foreach (var accumulator in accumulators.Values + .OrderBy(static a => a.PackageType, StringComparer.Ordinal) + .ThenBy(static a => a.PackageIdentifier, StringComparer.Ordinal) + .ThenBy(static a => a.Platform, StringComparer.Ordinal)) + { + builder.Add(accumulator.ToSummary(generatedAt)); + } + + return builder.ToImmutable(); + } + + private static string NormalizeKey(string value, string parameterName) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Value must be provided.", parameterName); + } + + return value.Trim().ToLowerInvariant(); + } + + private static string? NormalizePlatform(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); + + private sealed record PackageKey( + string PackageType, + string PackageIdentifier, + string? Platform); + + private sealed class NoiseAccumulator + { + private readonly string _vulnerabilityKey; + private readonly HashSet<string> _negativeSources = new(StringComparer.Ordinal); + + public NoiseAccumulator( + string vulnerabilityKey, + string packageType, + string packageIdentifier, + string? platform) + { + _vulnerabilityKey = vulnerabilityKey; + PackageType = packageType; + PackageIdentifier = packageIdentifier; + Platform = platform; + FirstObserved = DateTimeOffset.MaxValue; + LastObserved = DateTimeOffset.MinValue; + } + + public string PackageType { get; } + + public string PackageIdentifier { get; } + + public string? Platform { get; } + + public int ObservationCount { get; private set; } + + public int NegativeSignals { get; private set; } + + public int PositiveSignals { get; private set; } + + public int NeutralSignals { get; private set; } + + public int VersionRangeSignals { get; private set; } + + public bool HasMissingStatus { get; private set; } + + public DateTimeOffset FirstObserved { get; private set; } + + public DateTimeOffset LastObserved { get; private set; } + + public int UniqueNegativeSources => _negativeSources.Count; + + public void Register(DateTimeOffset asOf, AffectedPackage package) + { + ObservationCount++; + + var asOfUtc = asOf.ToUniversalTime(); + if (asOfUtc < FirstObserved) + { + FirstObserved = asOfUtc; + } + + if (asOfUtc > LastObserved) + { + LastObserved = asOfUtc; + } + + var statuses = package.Statuses; + if (statuses.IsDefaultOrEmpty || statuses.Length == 0) + { + HasMissingStatus = true; + } + + foreach (var status in statuses) + { + if (NegativeStatuses.Contains(status.Status)) + { + NegativeSignals++; + if (!string.IsNullOrWhiteSpace(status.Provenance.Source)) + { + _negativeSources.Add(status.Provenance.Source); + } + } + else if (PositiveStatuses.Contains(status.Status) || ResolvedStatuses.Contains(status.Status)) + { + PositiveSignals++; + } + else if (string.Equals(status.Status, AffectedPackageStatusCatalog.Unknown, StringComparison.Ordinal)) + { + NeutralSignals++; + } + else + { + NeutralSignals++; + } + } + + if (!package.VersionRanges.IsDefaultOrEmpty && package.VersionRanges.Length > 0) + { + VersionRangeSignals++; + } + } + + public NoisePriorSummary ToSummary(DateTimeOffset generatedAt) + { + var boundedFirst = FirstObserved == DateTimeOffset.MaxValue ? generatedAt : FirstObserved; + var boundedLast = LastObserved == DateTimeOffset.MinValue ? generatedAt : LastObserved; + + var probability = ComputeProbability(); + var rules = BuildRules(); + + return new NoisePriorSummary( + _vulnerabilityKey, + PackageType, + PackageIdentifier, + Platform, + probability, + ObservationCount, + NegativeSignals, + PositiveSignals, + NeutralSignals, + VersionRangeSignals, + UniqueNegativeSources, + rules, + boundedFirst, + boundedLast, + generatedAt); + } + + private double ComputeProbability() + { + var positiveSignals = PositiveSignals + VersionRangeSignals; + var denominator = NegativeSignals + positiveSignals; + + double score; + if (denominator == 0) + { + if (HasMissingStatus) + { + score = 0.35; + } + else if (NeutralSignals > 0) + { + score = 0.40; + } + else + { + score = 0.0; + } + } + else + { + score = NegativeSignals / (double)denominator; + + if (NegativeSignals > 0 && positiveSignals == 0) + { + score = Math.Min(1.0, score + 0.20); + } + + if (positiveSignals > 0 && NegativeSignals == 0) + { + score = Math.Max(0.0, score - 0.25); + } + + if (PositiveSignals > NegativeSignals) + { + score = Math.Max(0.0, score - 0.10); + } + + if (UniqueNegativeSources >= 2) + { + score = Math.Min(1.0, score + 0.10); + } + + if (NeutralSignals > 0) + { + var neutralBoost = Math.Min(0.10, NeutralSignals * 0.02); + score = Math.Min(1.0, score + neutralBoost); + } + } + + return Math.Round(Math.Clamp(score, 0.0, 1.0), 4, MidpointRounding.ToZero); + } + + private ImmutableArray<string> BuildRules() + { + var rules = new HashSet<string>(StringComparer.Ordinal); + + if (NegativeSignals > 0 && PositiveSignals == 0 && VersionRangeSignals == 0) + { + rules.Add("all_negative"); + } + + if (UniqueNegativeSources >= 2) + { + rules.Add("multi_source_negative"); + } + + if (PositiveSignals > 0 || VersionRangeSignals > 0) + { + rules.Add("positive_evidence"); + } + + if (NegativeSignals > 0 && (PositiveSignals > 0 || VersionRangeSignals > 0)) + { + rules.Add("conflicting_signals"); + } + + if (ObservationCount < 3) + { + rules.Add("sparse_observations"); + } + + if (HasMissingStatus) + { + rules.Add("missing_status"); + } + + if (NeutralSignals > 0 && NegativeSignals == 0 && PositiveSignals == 0 && VersionRangeSignals == 0) + { + rules.Add("neutral_only"); + } + + return rules.OrderBy(static rule => rule, StringComparer.Ordinal).ToImmutableArray(); + } + } +} diff --git a/src/StellaOps.Concelier.Core/Noise/NoisePriorServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorServiceCollectionExtensions.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Noise/NoisePriorServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorServiceCollectionExtensions.cs index e3de3795..5240cf51 100644 --- a/src/StellaOps.Concelier.Core/Noise/NoisePriorServiceCollectionExtensions.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorServiceCollectionExtensions.cs @@ -1,24 +1,24 @@ -using System; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; - -namespace StellaOps.Concelier.Core.Noise; - -/// <summary> -/// Dependency injection helpers for the noise prior service. -/// </summary> -public static class NoisePriorServiceCollectionExtensions -{ - public static IServiceCollection AddNoisePriorService(this IServiceCollection services) - { - if (services is null) - { - throw new ArgumentNullException(nameof(services)); - } - - services.TryAddSingleton(TimeProvider.System); - services.AddSingleton<INoisePriorService, NoisePriorService>(); - - return services; - } -} +using System; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace StellaOps.Concelier.Core.Noise; + +/// <summary> +/// Dependency injection helpers for the noise prior service. +/// </summary> +public static class NoisePriorServiceCollectionExtensions +{ + public static IServiceCollection AddNoisePriorService(this IServiceCollection services) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + services.TryAddSingleton(TimeProvider.System); + services.AddSingleton<INoisePriorService, NoisePriorService>(); + + return services; + } +} diff --git a/src/StellaOps.Concelier.Core/Noise/NoisePriorSummary.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorSummary.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Noise/NoisePriorSummary.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorSummary.cs index 23c21f39..87c5fd06 100644 --- a/src/StellaOps.Concelier.Core/Noise/NoisePriorSummary.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Noise/NoisePriorSummary.cs @@ -1,24 +1,24 @@ -using System; -using System.Collections.Immutable; - -namespace StellaOps.Concelier.Core.Noise; - -/// <summary> -/// Immutable noise prior summary describing false-positive likelihood signals for a package/environment tuple. -/// </summary> -public sealed record NoisePriorSummary( - string VulnerabilityKey, - string PackageType, - string PackageIdentifier, - string? Platform, - double Probability, - int ObservationCount, - int NegativeSignals, - int PositiveSignals, - int NeutralSignals, - int VersionRangeSignals, - int UniqueNegativeSources, - ImmutableArray<string> RuleHits, - DateTimeOffset FirstObserved, - DateTimeOffset LastObserved, - DateTimeOffset GeneratedAt); +using System; +using System.Collections.Immutable; + +namespace StellaOps.Concelier.Core.Noise; + +/// <summary> +/// Immutable noise prior summary describing false-positive likelihood signals for a package/environment tuple. +/// </summary> +public sealed record NoisePriorSummary( + string VulnerabilityKey, + string PackageType, + string PackageIdentifier, + string? Platform, + double Probability, + int ObservationCount, + int NegativeSignals, + int PositiveSignals, + int NeutralSignals, + int VersionRangeSignals, + int UniqueNegativeSources, + ImmutableArray<string> RuleHits, + DateTimeOffset FirstObserved, + DateTimeOffset LastObserved, + DateTimeOffset GeneratedAt); diff --git a/src/StellaOps.Concelier.Core/Observations/AdvisoryObservationCursor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/AdvisoryObservationCursor.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Observations/AdvisoryObservationCursor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/AdvisoryObservationCursor.cs index 086f9f9a..71e447c9 100644 --- a/src/StellaOps.Concelier.Core/Observations/AdvisoryObservationCursor.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/AdvisoryObservationCursor.cs @@ -1,8 +1,8 @@ -namespace StellaOps.Concelier.Core.Observations; - -/// <summary> -/// Represents a stable pagination cursor for advisory observations. -/// </summary> -public readonly record struct AdvisoryObservationCursor( - DateTimeOffset CreatedAt, - string ObservationId); +namespace StellaOps.Concelier.Core.Observations; + +/// <summary> +/// Represents a stable pagination cursor for advisory observations. +/// </summary> +public readonly record struct AdvisoryObservationCursor( + DateTimeOffset CreatedAt, + string ObservationId); diff --git a/src/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryModels.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryModels.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryModels.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryModels.cs index 0baaed72..2050cac3 100644 --- a/src/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryModels.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryModels.cs @@ -1,82 +1,82 @@ -using System.Collections.Immutable; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.Models.Observations; - -namespace StellaOps.Concelier.Core.Observations; - -/// <summary> -/// Query options for retrieving advisory observations scoped to a tenant. -/// </summary> -public sealed record AdvisoryObservationQueryOptions -{ - public AdvisoryObservationQueryOptions( - string tenant, - IReadOnlyCollection<string>? observationIds = null, - IReadOnlyCollection<string>? aliases = null, - IReadOnlyCollection<string>? purls = null, - IReadOnlyCollection<string>? cpes = null, - int? limit = null, - string? cursor = null) - { - Tenant = Validation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)); - ObservationIds = observationIds ?? Array.Empty<string>(); - Aliases = aliases ?? Array.Empty<string>(); - Purls = purls ?? Array.Empty<string>(); - Cpes = cpes ?? Array.Empty<string>(); - Limit = limit; - Cursor = Validation.TrimToNull(cursor); - } - - /// <summary> - /// Tenant identifier used for scoping queries (case-insensitive). - /// </summary> - public string Tenant { get; } - - /// <summary> - /// Optional set of observation identifiers to include. - /// </summary> - public IReadOnlyCollection<string> ObservationIds { get; } - - /// <summary> - /// Optional set of alias identifiers (e.g., CVE/GHSA) to filter by. - /// </summary> - public IReadOnlyCollection<string> Aliases { get; } - - /// <summary> - /// Optional set of Package URLs to filter by. - /// </summary> - public IReadOnlyCollection<string> Purls { get; } - - /// <summary> - /// Optional set of CPE values to filter by. - /// </summary> - public IReadOnlyCollection<string> Cpes { get; } - - /// <summary> - /// Optional limit for page size. When null or non-positive the service default is used. - /// </summary> - public int? Limit { get; } - - /// <summary> - /// Opaque cursor returned by previous query page. - /// </summary> - public string? Cursor { get; } -} - -/// <summary> -/// Query result containing observations and their aggregated linkset hints. -/// </summary> -public sealed record AdvisoryObservationQueryResult( - ImmutableArray<AdvisoryObservation> Observations, - AdvisoryObservationLinksetAggregate Linkset, - string? NextCursor, - bool HasMore); - -/// <summary> -/// Aggregated linkset built from the observations returned by a query. -/// </summary> -public sealed record AdvisoryObservationLinksetAggregate( - ImmutableArray<string> Aliases, - ImmutableArray<string> Purls, - ImmutableArray<string> Cpes, - ImmutableArray<AdvisoryObservationReference> References); +using System.Collections.Immutable; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.Models.Observations; + +namespace StellaOps.Concelier.Core.Observations; + +/// <summary> +/// Query options for retrieving advisory observations scoped to a tenant. +/// </summary> +public sealed record AdvisoryObservationQueryOptions +{ + public AdvisoryObservationQueryOptions( + string tenant, + IReadOnlyCollection<string>? observationIds = null, + IReadOnlyCollection<string>? aliases = null, + IReadOnlyCollection<string>? purls = null, + IReadOnlyCollection<string>? cpes = null, + int? limit = null, + string? cursor = null) + { + Tenant = Validation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)); + ObservationIds = observationIds ?? Array.Empty<string>(); + Aliases = aliases ?? Array.Empty<string>(); + Purls = purls ?? Array.Empty<string>(); + Cpes = cpes ?? Array.Empty<string>(); + Limit = limit; + Cursor = Validation.TrimToNull(cursor); + } + + /// <summary> + /// Tenant identifier used for scoping queries (case-insensitive). + /// </summary> + public string Tenant { get; } + + /// <summary> + /// Optional set of observation identifiers to include. + /// </summary> + public IReadOnlyCollection<string> ObservationIds { get; } + + /// <summary> + /// Optional set of alias identifiers (e.g., CVE/GHSA) to filter by. + /// </summary> + public IReadOnlyCollection<string> Aliases { get; } + + /// <summary> + /// Optional set of Package URLs to filter by. + /// </summary> + public IReadOnlyCollection<string> Purls { get; } + + /// <summary> + /// Optional set of CPE values to filter by. + /// </summary> + public IReadOnlyCollection<string> Cpes { get; } + + /// <summary> + /// Optional limit for page size. When null or non-positive the service default is used. + /// </summary> + public int? Limit { get; } + + /// <summary> + /// Opaque cursor returned by previous query page. + /// </summary> + public string? Cursor { get; } +} + +/// <summary> +/// Query result containing observations and their aggregated linkset hints. +/// </summary> +public sealed record AdvisoryObservationQueryResult( + ImmutableArray<AdvisoryObservation> Observations, + AdvisoryObservationLinksetAggregate Linkset, + string? NextCursor, + bool HasMore); + +/// <summary> +/// Aggregated linkset built from the observations returned by a query. +/// </summary> +public sealed record AdvisoryObservationLinksetAggregate( + ImmutableArray<string> Aliases, + ImmutableArray<string> Purls, + ImmutableArray<string> Cpes, + ImmutableArray<AdvisoryObservationReference> References); diff --git a/src/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryService.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryService.cs index 93299414..ae41f482 100644 --- a/src/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryService.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/AdvisoryObservationQueryService.cs @@ -1,244 +1,244 @@ -using System.Collections.Immutable; -using System.Globalization; -using System.Text; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.Models.Observations; - -namespace StellaOps.Concelier.Core.Observations; - -/// <summary> -/// Default implementation of <see cref="IAdvisoryObservationQueryService"/> that projects raw observations for overlay consumers. -/// </summary> -public sealed class AdvisoryObservationQueryService : IAdvisoryObservationQueryService -{ - private const int DefaultPageSize = 200; - private const int MaxPageSize = 500; - private readonly IAdvisoryObservationLookup _lookup; - - public AdvisoryObservationQueryService(IAdvisoryObservationLookup lookup) - { - _lookup = lookup ?? throw new ArgumentNullException(nameof(lookup)); - } - - public async ValueTask<AdvisoryObservationQueryResult> QueryAsync( - AdvisoryObservationQueryOptions options, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(options); - cancellationToken.ThrowIfCancellationRequested(); - - var normalizedTenant = NormalizeTenant(options.Tenant); - var normalizedObservationIds = NormalizeSet(options.ObservationIds, static value => value, StringComparer.Ordinal); - var normalizedAliases = NormalizeSet(options.Aliases, static value => value.ToLowerInvariant(), StringComparer.Ordinal); - var normalizedPurls = NormalizeSet(options.Purls, static value => value, StringComparer.Ordinal); - var normalizedCpes = NormalizeSet(options.Cpes, static value => value, StringComparer.Ordinal); - - var limit = NormalizeLimit(options.Limit); - var fetchSize = checked(limit + 1); - - var cursor = DecodeCursor(options.Cursor); - - var observations = await _lookup - .FindByFiltersAsync( - normalizedTenant, - normalizedObservationIds, - normalizedAliases, - normalizedPurls, - normalizedCpes, - cursor, - fetchSize, - cancellationToken) - .ConfigureAwait(false); - - var ordered = observations - .Where(observation => Matches(observation, normalizedObservationIds, normalizedAliases, normalizedPurls, normalizedCpes)) - .OrderByDescending(static observation => observation.CreatedAt) - .ThenBy(static observation => observation.ObservationId, StringComparer.Ordinal) - .ToImmutableArray(); - - var hasMore = ordered.Length > limit; - var page = hasMore ? ordered.Take(limit).ToImmutableArray() : ordered; - var nextCursor = hasMore ? EncodeCursor(page[^1]) : null; - - var linkset = BuildAggregateLinkset(page); - return new AdvisoryObservationQueryResult(page, linkset, nextCursor, hasMore); - } - - private static bool Matches( - AdvisoryObservation observation, - ImmutableHashSet<string> observationIds, - ImmutableHashSet<string> aliases, - ImmutableHashSet<string> purls, - ImmutableHashSet<string> cpes) - { - ArgumentNullException.ThrowIfNull(observation); - - if (observationIds.Count > 0 && !observationIds.Contains(observation.ObservationId)) - { - return false; - } - - if (aliases.Count > 0 && !observation.Linkset.Aliases.Any(aliases.Contains)) - { - return false; - } - - if (purls.Count > 0 && !observation.Linkset.Purls.Any(purls.Contains)) - { - return false; - } - - if (cpes.Count > 0 && !observation.Linkset.Cpes.Any(cpes.Contains)) - { - return false; - } - - return true; - } - - private static string NormalizeTenant(string tenant) - => Validation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant(); - - private static ImmutableHashSet<string> NormalizeSet( - IEnumerable<string>? values, - Func<string, string> projector, - StringComparer comparer) - { - if (values is null) - { - return ImmutableHashSet<string>.Empty; - } - - var builder = ImmutableHashSet.CreateBuilder<string>(comparer); - foreach (var value in values) - { - var normalized = Validation.TrimToNull(value); - if (normalized is null) - { - continue; - } - - builder.Add(projector(normalized)); - } - - return builder.ToImmutable(); - } - - private static int NormalizeLimit(int? requestedLimit) - { - if (!requestedLimit.HasValue || requestedLimit.Value <= 0) - { - return DefaultPageSize; - } - - var limit = requestedLimit.Value; - if (limit > MaxPageSize) - { - return MaxPageSize; - } - - return limit; - } - - private static AdvisoryObservationCursor? DecodeCursor(string? cursor) - { - if (string.IsNullOrWhiteSpace(cursor)) - { - return null; - } - - try - { - var decoded = Convert.FromBase64String(cursor.Trim()); - var payload = Encoding.UTF8.GetString(decoded); - var separator = payload.IndexOf(':'); - if (separator <= 0 || separator >= payload.Length - 1) - { - throw new FormatException("Cursor is malformed."); - } - - var ticksText = payload.AsSpan(0, separator); - if (!long.TryParse(ticksText, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks)) - { - throw new FormatException("Cursor timestamp is invalid."); - } - - var createdAt = new DateTimeOffset(DateTime.SpecifyKind(new DateTime(ticks), DateTimeKind.Utc)); - var observationId = payload[(separator + 1)..]; - if (string.IsNullOrWhiteSpace(observationId)) - { - throw new FormatException("Cursor observation id is missing."); - } - - return new AdvisoryObservationCursor(createdAt, observationId); - } - catch (FormatException) - { - throw; - } - catch (Exception ex) - { - throw new FormatException("Cursor is malformed.", ex); - } - } - - private static string? EncodeCursor(AdvisoryObservation observation) - { - if (observation is null) - { - return null; - } - - var payload = $"{observation.CreatedAt.UtcTicks.ToString(CultureInfo.InvariantCulture)}:{observation.ObservationId}"; - return Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)); - } - - private static AdvisoryObservationLinksetAggregate BuildAggregateLinkset(ImmutableArray<AdvisoryObservation> observations) - { - if (observations.IsDefaultOrEmpty) - { - return new AdvisoryObservationLinksetAggregate( - ImmutableArray<string>.Empty, - ImmutableArray<string>.Empty, - ImmutableArray<string>.Empty, - ImmutableArray<AdvisoryObservationReference>.Empty); - } - - var aliasSet = new HashSet<string>(StringComparer.Ordinal); - var purlSet = new HashSet<string>(StringComparer.Ordinal); - var cpeSet = new HashSet<string>(StringComparer.Ordinal); - var referenceSet = new HashSet<AdvisoryObservationReference>(); - - foreach (var observation in observations) - { - foreach (var alias in observation.Linkset.Aliases) - { - aliasSet.Add(alias); - } - - foreach (var purl in observation.Linkset.Purls) - { - purlSet.Add(purl); - } - - foreach (var cpe in observation.Linkset.Cpes) - { - cpeSet.Add(cpe); - } - - foreach (var reference in observation.Linkset.References) - { - referenceSet.Add(reference); - } - } - - return new AdvisoryObservationLinksetAggregate( - aliasSet.OrderBy(static alias => alias, StringComparer.Ordinal).ToImmutableArray(), - purlSet.OrderBy(static purl => purl, StringComparer.Ordinal).ToImmutableArray(), - cpeSet.OrderBy(static cpe => cpe, StringComparer.Ordinal).ToImmutableArray(), - referenceSet - .OrderBy(static reference => reference.Type, StringComparer.Ordinal) - .ThenBy(static reference => reference.Url, StringComparer.Ordinal) - .ToImmutableArray()); - } -} +using System.Collections.Immutable; +using System.Globalization; +using System.Text; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.Models.Observations; + +namespace StellaOps.Concelier.Core.Observations; + +/// <summary> +/// Default implementation of <see cref="IAdvisoryObservationQueryService"/> that projects raw observations for overlay consumers. +/// </summary> +public sealed class AdvisoryObservationQueryService : IAdvisoryObservationQueryService +{ + private const int DefaultPageSize = 200; + private const int MaxPageSize = 500; + private readonly IAdvisoryObservationLookup _lookup; + + public AdvisoryObservationQueryService(IAdvisoryObservationLookup lookup) + { + _lookup = lookup ?? throw new ArgumentNullException(nameof(lookup)); + } + + public async ValueTask<AdvisoryObservationQueryResult> QueryAsync( + AdvisoryObservationQueryOptions options, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + cancellationToken.ThrowIfCancellationRequested(); + + var normalizedTenant = NormalizeTenant(options.Tenant); + var normalizedObservationIds = NormalizeSet(options.ObservationIds, static value => value, StringComparer.Ordinal); + var normalizedAliases = NormalizeSet(options.Aliases, static value => value.ToLowerInvariant(), StringComparer.Ordinal); + var normalizedPurls = NormalizeSet(options.Purls, static value => value, StringComparer.Ordinal); + var normalizedCpes = NormalizeSet(options.Cpes, static value => value, StringComparer.Ordinal); + + var limit = NormalizeLimit(options.Limit); + var fetchSize = checked(limit + 1); + + var cursor = DecodeCursor(options.Cursor); + + var observations = await _lookup + .FindByFiltersAsync( + normalizedTenant, + normalizedObservationIds, + normalizedAliases, + normalizedPurls, + normalizedCpes, + cursor, + fetchSize, + cancellationToken) + .ConfigureAwait(false); + + var ordered = observations + .Where(observation => Matches(observation, normalizedObservationIds, normalizedAliases, normalizedPurls, normalizedCpes)) + .OrderByDescending(static observation => observation.CreatedAt) + .ThenBy(static observation => observation.ObservationId, StringComparer.Ordinal) + .ToImmutableArray(); + + var hasMore = ordered.Length > limit; + var page = hasMore ? ordered.Take(limit).ToImmutableArray() : ordered; + var nextCursor = hasMore ? EncodeCursor(page[^1]) : null; + + var linkset = BuildAggregateLinkset(page); + return new AdvisoryObservationQueryResult(page, linkset, nextCursor, hasMore); + } + + private static bool Matches( + AdvisoryObservation observation, + ImmutableHashSet<string> observationIds, + ImmutableHashSet<string> aliases, + ImmutableHashSet<string> purls, + ImmutableHashSet<string> cpes) + { + ArgumentNullException.ThrowIfNull(observation); + + if (observationIds.Count > 0 && !observationIds.Contains(observation.ObservationId)) + { + return false; + } + + if (aliases.Count > 0 && !observation.Linkset.Aliases.Any(aliases.Contains)) + { + return false; + } + + if (purls.Count > 0 && !observation.Linkset.Purls.Any(purls.Contains)) + { + return false; + } + + if (cpes.Count > 0 && !observation.Linkset.Cpes.Any(cpes.Contains)) + { + return false; + } + + return true; + } + + private static string NormalizeTenant(string tenant) + => Validation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant(); + + private static ImmutableHashSet<string> NormalizeSet( + IEnumerable<string>? values, + Func<string, string> projector, + StringComparer comparer) + { + if (values is null) + { + return ImmutableHashSet<string>.Empty; + } + + var builder = ImmutableHashSet.CreateBuilder<string>(comparer); + foreach (var value in values) + { + var normalized = Validation.TrimToNull(value); + if (normalized is null) + { + continue; + } + + builder.Add(projector(normalized)); + } + + return builder.ToImmutable(); + } + + private static int NormalizeLimit(int? requestedLimit) + { + if (!requestedLimit.HasValue || requestedLimit.Value <= 0) + { + return DefaultPageSize; + } + + var limit = requestedLimit.Value; + if (limit > MaxPageSize) + { + return MaxPageSize; + } + + return limit; + } + + private static AdvisoryObservationCursor? DecodeCursor(string? cursor) + { + if (string.IsNullOrWhiteSpace(cursor)) + { + return null; + } + + try + { + var decoded = Convert.FromBase64String(cursor.Trim()); + var payload = Encoding.UTF8.GetString(decoded); + var separator = payload.IndexOf(':'); + if (separator <= 0 || separator >= payload.Length - 1) + { + throw new FormatException("Cursor is malformed."); + } + + var ticksText = payload.AsSpan(0, separator); + if (!long.TryParse(ticksText, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks)) + { + throw new FormatException("Cursor timestamp is invalid."); + } + + var createdAt = new DateTimeOffset(DateTime.SpecifyKind(new DateTime(ticks), DateTimeKind.Utc)); + var observationId = payload[(separator + 1)..]; + if (string.IsNullOrWhiteSpace(observationId)) + { + throw new FormatException("Cursor observation id is missing."); + } + + return new AdvisoryObservationCursor(createdAt, observationId); + } + catch (FormatException) + { + throw; + } + catch (Exception ex) + { + throw new FormatException("Cursor is malformed.", ex); + } + } + + private static string? EncodeCursor(AdvisoryObservation observation) + { + if (observation is null) + { + return null; + } + + var payload = $"{observation.CreatedAt.UtcTicks.ToString(CultureInfo.InvariantCulture)}:{observation.ObservationId}"; + return Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)); + } + + private static AdvisoryObservationLinksetAggregate BuildAggregateLinkset(ImmutableArray<AdvisoryObservation> observations) + { + if (observations.IsDefaultOrEmpty) + { + return new AdvisoryObservationLinksetAggregate( + ImmutableArray<string>.Empty, + ImmutableArray<string>.Empty, + ImmutableArray<string>.Empty, + ImmutableArray<AdvisoryObservationReference>.Empty); + } + + var aliasSet = new HashSet<string>(StringComparer.Ordinal); + var purlSet = new HashSet<string>(StringComparer.Ordinal); + var cpeSet = new HashSet<string>(StringComparer.Ordinal); + var referenceSet = new HashSet<AdvisoryObservationReference>(); + + foreach (var observation in observations) + { + foreach (var alias in observation.Linkset.Aliases) + { + aliasSet.Add(alias); + } + + foreach (var purl in observation.Linkset.Purls) + { + purlSet.Add(purl); + } + + foreach (var cpe in observation.Linkset.Cpes) + { + cpeSet.Add(cpe); + } + + foreach (var reference in observation.Linkset.References) + { + referenceSet.Add(reference); + } + } + + return new AdvisoryObservationLinksetAggregate( + aliasSet.OrderBy(static alias => alias, StringComparer.Ordinal).ToImmutableArray(), + purlSet.OrderBy(static purl => purl, StringComparer.Ordinal).ToImmutableArray(), + cpeSet.OrderBy(static cpe => cpe, StringComparer.Ordinal).ToImmutableArray(), + referenceSet + .OrderBy(static reference => reference.Type, StringComparer.Ordinal) + .ThenBy(static reference => reference.Url, StringComparer.Ordinal) + .ToImmutableArray()); + } +} diff --git a/src/StellaOps.Concelier.Core/Observations/IAdvisoryObservationLookup.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/IAdvisoryObservationLookup.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Observations/IAdvisoryObservationLookup.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/IAdvisoryObservationLookup.cs index 79d1d635..2a1c9872 100644 --- a/src/StellaOps.Concelier.Core/Observations/IAdvisoryObservationLookup.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/IAdvisoryObservationLookup.cs @@ -1,39 +1,39 @@ -using StellaOps.Concelier.Models.Observations; - -namespace StellaOps.Concelier.Core.Observations; - -/// <summary> -/// Abstraction over the advisory observation persistence layer used for overlay queries. -/// </summary> -public interface IAdvisoryObservationLookup -{ - /// <summary> - /// Lists all advisory observations for the provided tenant. - /// </summary> - /// <param name="tenant">Tenant identifier (case-insensitive).</param> - /// <param name="cancellationToken">A cancellation token.</param> - ValueTask<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync( - string tenant, - CancellationToken cancellationToken); - - /// <summary> - /// Finds advisory observations for a tenant that match the supplied filter criteria. - /// </summary> - /// <param name="tenant">Tenant identifier (case-insensitive).</param> - /// <param name="observationIds">Normalized observation identifiers to match against.</param> - /// <param name="aliases">Normalized alias values to match against.</param> - /// <param name="purls">Normalized Package URL values to match against.</param> - /// <param name="cpes">Normalized CPE values to match against.</param> - /// <param name="cursor">Optional cursor describing the last element retrieved in the previous page.</param> - /// <param name="limit">Maximum number of documents to return. Must be positive.</param> - /// <param name="cancellationToken">A cancellation token.</param> - ValueTask<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( - string tenant, - IReadOnlyCollection<string> observationIds, - IReadOnlyCollection<string> aliases, - IReadOnlyCollection<string> purls, - IReadOnlyCollection<string> cpes, - AdvisoryObservationCursor? cursor, - int limit, - CancellationToken cancellationToken); -} +using StellaOps.Concelier.Models.Observations; + +namespace StellaOps.Concelier.Core.Observations; + +/// <summary> +/// Abstraction over the advisory observation persistence layer used for overlay queries. +/// </summary> +public interface IAdvisoryObservationLookup +{ + /// <summary> + /// Lists all advisory observations for the provided tenant. + /// </summary> + /// <param name="tenant">Tenant identifier (case-insensitive).</param> + /// <param name="cancellationToken">A cancellation token.</param> + ValueTask<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync( + string tenant, + CancellationToken cancellationToken); + + /// <summary> + /// Finds advisory observations for a tenant that match the supplied filter criteria. + /// </summary> + /// <param name="tenant">Tenant identifier (case-insensitive).</param> + /// <param name="observationIds">Normalized observation identifiers to match against.</param> + /// <param name="aliases">Normalized alias values to match against.</param> + /// <param name="purls">Normalized Package URL values to match against.</param> + /// <param name="cpes">Normalized CPE values to match against.</param> + /// <param name="cursor">Optional cursor describing the last element retrieved in the previous page.</param> + /// <param name="limit">Maximum number of documents to return. Must be positive.</param> + /// <param name="cancellationToken">A cancellation token.</param> + ValueTask<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( + string tenant, + IReadOnlyCollection<string> observationIds, + IReadOnlyCollection<string> aliases, + IReadOnlyCollection<string> purls, + IReadOnlyCollection<string> cpes, + AdvisoryObservationCursor? cursor, + int limit, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Concelier.Core/Observations/IAdvisoryObservationQueryService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/IAdvisoryObservationQueryService.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Observations/IAdvisoryObservationQueryService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/IAdvisoryObservationQueryService.cs index 63c530f5..5b958df9 100644 --- a/src/StellaOps.Concelier.Core/Observations/IAdvisoryObservationQueryService.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Observations/IAdvisoryObservationQueryService.cs @@ -1,16 +1,16 @@ -namespace StellaOps.Concelier.Core.Observations; - -/// <summary> -/// Provides read-only access to advisory observations for overlay services. -/// </summary> -public interface IAdvisoryObservationQueryService -{ - /// <summary> - /// Queries advisory observations scoped by tenant and optional linkset filters. - /// </summary> - /// <param name="options">Query options defining tenant and filter criteria.</param> - /// <param name="cancellationToken">A cancellation token.</param> - ValueTask<AdvisoryObservationQueryResult> QueryAsync( - AdvisoryObservationQueryOptions options, - CancellationToken cancellationToken); -} +namespace StellaOps.Concelier.Core.Observations; + +/// <summary> +/// Provides read-only access to advisory observations for overlay services. +/// </summary> +public interface IAdvisoryObservationQueryService +{ + /// <summary> + /// Queries advisory observations scoped by tenant and optional linkset filters. + /// </summary> + /// <param name="options">Query options defining tenant and filter criteria.</param> + /// <param name="cancellationToken">A cancellation token.</param> + ValueTask<AdvisoryObservationQueryResult> QueryAsync( + AdvisoryObservationQueryOptions options, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Concelier.Core/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Properties/AssemblyInfo.cs index 99752f7d..56366bc1 100644 --- a/src/StellaOps.Concelier.Core/Properties/AssemblyInfo.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Concelier.Core.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Concelier.Core.Tests")] diff --git a/src/StellaOps.Concelier.Core/Raw/AdvisoryRawQueryOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/AdvisoryRawQueryOptions.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Raw/AdvisoryRawQueryOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/AdvisoryRawQueryOptions.cs index e9ead2ad..f7e6b156 100644 --- a/src/StellaOps.Concelier.Core/Raw/AdvisoryRawQueryOptions.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/AdvisoryRawQueryOptions.cs @@ -1,83 +1,83 @@ -using System.Collections.Immutable; - -namespace StellaOps.Concelier.Core.Raw; - -/// <summary> -/// Options controlling advisory raw document queries. -/// </summary> -public sealed record AdvisoryRawQueryOptions -{ - private int _limit = 50; - - public AdvisoryRawQueryOptions(string tenant) - { - Tenant = NormalizeTenant(tenant); - } - - /// <summary> - /// Tenant identifier (normalized). - /// </summary> - public string Tenant { get; } - - /// <summary> - /// Optional set of source vendors to filter by. - /// </summary> - public ImmutableArray<string> Vendors { get; init; } = ImmutableArray<string>.Empty; - - /// <summary> - /// Optional set of upstream identifiers to filter by. - /// </summary> - public ImmutableArray<string> UpstreamIds { get; init; } = ImmutableArray<string>.Empty; - - /// <summary> - /// Optional set of alias identifiers (CVE/GHSA/etc.) to filter by. - /// </summary> - public ImmutableArray<string> Aliases { get; init; } = ImmutableArray<string>.Empty; - - /// <summary> - /// Optional set of Package URLs to filter by. - /// </summary> - public ImmutableArray<string> PackageUrls { get; init; } = ImmutableArray<string>.Empty; - - /// <summary> - /// Optional set of content hashes to filter by. - /// </summary> - public ImmutableArray<string> ContentHashes { get; init; } = ImmutableArray<string>.Empty; - - /// <summary> - /// Optional lower bound on ingest time. - /// </summary> - public DateTimeOffset? Since { get; init; } - - /// <summary> - /// Maximum number of records to return (defaults to 50, capped at 200). - /// </summary> - public int Limit - { - get => _limit; - init => _limit = Math.Clamp(value, 1, 200); - } - - /// <summary> - /// Pagination cursor provided by previous result. - /// </summary> - public string? Cursor { get; init; } - - private static string NormalizeTenant(string tenant) - { - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new ArgumentException("Tenant must be provided.", nameof(tenant)); - } - - return tenant.Trim().ToLowerInvariant(); - } -} - -/// <summary> -/// Query response containing raw advisory records plus paging metadata. -/// </summary> -public sealed record AdvisoryRawQueryResult( - IReadOnlyList<AdvisoryRawRecord> Records, - string? NextCursor, - bool HasMore); +using System.Collections.Immutable; + +namespace StellaOps.Concelier.Core.Raw; + +/// <summary> +/// Options controlling advisory raw document queries. +/// </summary> +public sealed record AdvisoryRawQueryOptions +{ + private int _limit = 50; + + public AdvisoryRawQueryOptions(string tenant) + { + Tenant = NormalizeTenant(tenant); + } + + /// <summary> + /// Tenant identifier (normalized). + /// </summary> + public string Tenant { get; } + + /// <summary> + /// Optional set of source vendors to filter by. + /// </summary> + public ImmutableArray<string> Vendors { get; init; } = ImmutableArray<string>.Empty; + + /// <summary> + /// Optional set of upstream identifiers to filter by. + /// </summary> + public ImmutableArray<string> UpstreamIds { get; init; } = ImmutableArray<string>.Empty; + + /// <summary> + /// Optional set of alias identifiers (CVE/GHSA/etc.) to filter by. + /// </summary> + public ImmutableArray<string> Aliases { get; init; } = ImmutableArray<string>.Empty; + + /// <summary> + /// Optional set of Package URLs to filter by. + /// </summary> + public ImmutableArray<string> PackageUrls { get; init; } = ImmutableArray<string>.Empty; + + /// <summary> + /// Optional set of content hashes to filter by. + /// </summary> + public ImmutableArray<string> ContentHashes { get; init; } = ImmutableArray<string>.Empty; + + /// <summary> + /// Optional lower bound on ingest time. + /// </summary> + public DateTimeOffset? Since { get; init; } + + /// <summary> + /// Maximum number of records to return (defaults to 50, capped at 200). + /// </summary> + public int Limit + { + get => _limit; + init => _limit = Math.Clamp(value, 1, 200); + } + + /// <summary> + /// Pagination cursor provided by previous result. + /// </summary> + public string? Cursor { get; init; } + + private static string NormalizeTenant(string tenant) + { + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new ArgumentException("Tenant must be provided.", nameof(tenant)); + } + + return tenant.Trim().ToLowerInvariant(); + } +} + +/// <summary> +/// Query response containing raw advisory records plus paging metadata. +/// </summary> +public sealed record AdvisoryRawQueryResult( + IReadOnlyList<AdvisoryRawRecord> Records, + string? NextCursor, + bool HasMore); diff --git a/src/StellaOps.Concelier.Core/Raw/AdvisoryRawRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/AdvisoryRawRecord.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Raw/AdvisoryRawRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/AdvisoryRawRecord.cs index 9dce85a6..0cd202a6 100644 --- a/src/StellaOps.Concelier.Core/Raw/AdvisoryRawRecord.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/AdvisoryRawRecord.cs @@ -1,19 +1,19 @@ -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Raw; - -/// <summary> -/// Represents a stored advisory raw document together with ingestion metadata. -/// </summary> -public sealed record AdvisoryRawRecord( - string Id, - AdvisoryRawDocument Document, - DateTimeOffset IngestedAt, - DateTimeOffset CreatedAt); - -/// <summary> -/// Result produced when attempting to append a raw advisory document. -/// </summary> -public sealed record AdvisoryRawUpsertResult( - bool Inserted, - AdvisoryRawRecord Record); +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Raw; + +/// <summary> +/// Represents a stored advisory raw document together with ingestion metadata. +/// </summary> +public sealed record AdvisoryRawRecord( + string Id, + AdvisoryRawDocument Document, + DateTimeOffset IngestedAt, + DateTimeOffset CreatedAt); + +/// <summary> +/// Result produced when attempting to append a raw advisory document. +/// </summary> +public sealed record AdvisoryRawUpsertResult( + bool Inserted, + AdvisoryRawRecord Record); diff --git a/src/StellaOps.Concelier.Core/Raw/AdvisoryRawService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/AdvisoryRawService.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Raw/AdvisoryRawService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/AdvisoryRawService.cs index f87b5713..a6f90b5d 100644 --- a/src/StellaOps.Concelier.Core/Raw/AdvisoryRawService.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/AdvisoryRawService.cs @@ -1,439 +1,439 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Globalization; -using System.Text; -using System.Text.Json; -using System.Linq; -using Microsoft.Extensions.Logging; -using StellaOps.Aoc; -using StellaOps.Concelier.Core.Aoc; -using StellaOps.Concelier.Core.Linksets; -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Raw; - -internal sealed class AdvisoryRawService : IAdvisoryRawService -{ - private static readonly ImmutableArray<string> EmptyArray = ImmutableArray<string>.Empty; - - private readonly IAdvisoryRawRepository _repository; - private readonly IAdvisoryRawWriteGuard _writeGuard; - private readonly IAocGuard _aocGuard; - private readonly IAdvisoryLinksetMapper _linksetMapper; - private readonly TimeProvider _timeProvider; - private readonly ILogger<AdvisoryRawService> _logger; - - public AdvisoryRawService( - IAdvisoryRawRepository repository, - IAdvisoryRawWriteGuard writeGuard, - IAocGuard aocGuard, - IAdvisoryLinksetMapper linksetMapper, - TimeProvider timeProvider, - ILogger<AdvisoryRawService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _writeGuard = writeGuard ?? throw new ArgumentNullException(nameof(writeGuard)); - _aocGuard = aocGuard ?? throw new ArgumentNullException(nameof(aocGuard)); - _linksetMapper = linksetMapper ?? throw new ArgumentNullException(nameof(linksetMapper)); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<AdvisoryRawUpsertResult> IngestAsync(AdvisoryRawDocument document, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(document); - - var clientSupersedes = string.IsNullOrWhiteSpace(document.Supersedes) - ? null - : document.Supersedes.Trim(); - - var normalized = Normalize(document); - var enriched = normalized with { Linkset = _linksetMapper.Map(normalized) }; - - if (!string.IsNullOrEmpty(clientSupersedes)) - { - _logger.LogWarning( - "Ignoring client-supplied supersedes pointer for advisory_raw tenant={Tenant} source={Vendor} upstream={UpstreamId} pointer={Supersedes}", - enriched.Tenant, - enriched.Source.Vendor, - enriched.Upstream.UpstreamId, - clientSupersedes); - } - - _writeGuard.EnsureValid(enriched); - - var result = await _repository.UpsertAsync(enriched, cancellationToken).ConfigureAwait(false); - if (result.Inserted) - { - _logger.LogInformation( - "Ingested advisory_raw document id={DocumentId} tenant={Tenant} source={Vendor} upstream={UpstreamId} hash={Hash} supersedes={Supersedes}", - result.Record.Id, - result.Record.Document.Tenant, - result.Record.Document.Source.Vendor, - result.Record.Document.Upstream.UpstreamId, - result.Record.Document.Upstream.ContentHash, - string.IsNullOrWhiteSpace(result.Record.Document.Supersedes) - ? "(none)" - : result.Record.Document.Supersedes); - } - else - { - _logger.LogDebug( - "Skipped advisory_raw duplicate tenant={Tenant} source={Vendor} upstream={UpstreamId} hash={Hash}", - result.Record.Document.Tenant, - result.Record.Document.Source.Vendor, - result.Record.Document.Upstream.UpstreamId, - result.Record.Document.Upstream.ContentHash); - } - - return result; - } - - public Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - ArgumentException.ThrowIfNullOrWhiteSpace(id); - - var normalizedTenant = tenant.Trim().ToLowerInvariant(); - var normalizedId = id.Trim(); - return _repository.FindByIdAsync(normalizedTenant, normalizedId, cancellationToken); - } - - public Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(options); - return _repository.QueryAsync(options, cancellationToken); - } - - public async Task<AdvisoryRawVerificationResult> VerifyAsync(AdvisoryRawVerificationRequest request, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - var tenant = NormalizeTenant(request.Tenant); - var windowStart = request.Since.ToUniversalTime(); - var windowEnd = request.Until.ToUniversalTime(); - if (windowEnd < windowStart) - { - throw new ArgumentException("Verification window end must be greater than or equal to the start.", nameof(request)); - } - - var inclusionLimit = request.Limit <= 0 ? 0 : request.Limit; - var sourceFilter = request.SourceVendors ?? Array.Empty<string>(); - var normalizedSources = sourceFilter.Count == 0 - ? EmptyArray - : sourceFilter.Select(NormalizeSourceVendor).Distinct(StringComparer.Ordinal).ToImmutableArray(); - - var codeFilter = request.Codes ?? Array.Empty<string>(); - var normalizedCodes = codeFilter.Count == 0 - ? EmptyArray - : codeFilter - .Where(static code => !string.IsNullOrWhiteSpace(code)) - .Select(static code => code.Trim().ToUpperInvariant()) - .Distinct(StringComparer.Ordinal) - .ToImmutableArray(); - - var records = await _repository - .ListForVerificationAsync(tenant, windowStart, windowEnd, normalizedSources, cancellationToken) - .ConfigureAwait(false); - - var now = _timeProvider.GetUtcNow(); - var violations = new Dictionary<string, VerificationAggregation>(StringComparer.Ordinal); - var checkedCount = 0; - var totalExamples = 0; - - foreach (var record in records) - { - cancellationToken.ThrowIfCancellationRequested(); - checkedCount++; - - AocGuardResult guardResult; - try - { - guardResult = _aocGuard.Validate(ToJsonElement(record.Document)); - } - catch (Exception ex) - { - _logger.LogError( - ex, - "AOC guard threw unexpected exception while verifying advisory_raw document id={DocumentId}", - record.Id); - continue; - } - - if (guardResult.IsValid || guardResult.Violations.IsDefaultOrEmpty) - { - continue; - } - - foreach (var violation in guardResult.Violations) - { - if (!normalizedCodes.IsDefaultOrEmpty && - !normalizedCodes.Contains(violation.ErrorCode.ToUpperInvariant())) - { - continue; - } - - var key = violation.ErrorCode; - if (!violations.TryGetValue(key, out var aggregation)) - { - aggregation = new VerificationAggregation(key); - violations.Add(key, aggregation); - } - - aggregation.Count++; - if (inclusionLimit <= 0 || totalExamples >= inclusionLimit) - { - aggregation.Truncated = true; - continue; - } - - if (aggregation.TryAddExample(CreateExample(record, violation))) - { - totalExamples++; - } - } - } - - var orderedViolations = violations.Values - .OrderByDescending(static v => v.Count) - .ThenBy(static v => v.Code, StringComparer.Ordinal) - .Select(static v => new AdvisoryRawVerificationViolation( - v.Code, - v.Count, - v.Examples.ToArray())) - .ToArray(); - - var truncated = orderedViolations.Any(static v => v.Examples.Count > 0) && totalExamples >= inclusionLimit && inclusionLimit > 0; - - return new AdvisoryRawVerificationResult( - tenant, - windowStart, - windowEnd > windowStart ? windowEnd : now, - checkedCount, - orderedViolations, - truncated || violations.Values.Any(static v => v.Truncated)); - } - - private static AdvisoryRawViolationExample CreateExample(AdvisoryRawRecord record, AocViolation violation) - { - return new AdvisoryRawViolationExample( - record.Document.Source.Vendor, - record.Id, - record.Document.Upstream.ContentHash, - violation.Path); - } - - private static string NormalizeTenant(string tenant) - { - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new ArgumentException("Tenant must be provided.", nameof(tenant)); - } - - return tenant.Trim().ToLowerInvariant(); - } - - private static string NormalizeSourceVendor(string vendor) - { - if (string.IsNullOrWhiteSpace(vendor)) - { - return string.Empty; - } - - return vendor.Trim().ToLowerInvariant(); - } - - private AdvisoryRawDocument Normalize(AdvisoryRawDocument document) - { - var tenant = NormalizeTenant(document.Tenant); - var source = NormalizeSource(document.Source); - var upstream = NormalizeUpstream(document.Upstream); - var content = NormalizeContent(document.Content); - var identifiers = NormalizeIdentifiers(document.Identifiers); - var linkset = NormalizeLinkset(document.Linkset); - - return new AdvisoryRawDocument( - tenant, - source, - upstream, - content, - identifiers, - linkset, - Supersedes: null); - } - - private static RawSourceMetadata NormalizeSource(RawSourceMetadata source) - { - if (source is null) - { - throw new ArgumentNullException(nameof(source)); - } - - return new RawSourceMetadata( - NormalizeSourceVendor(source.Vendor), - source.Connector?.Trim() ?? string.Empty, - source.ConnectorVersion?.Trim() ?? "unknown", - string.IsNullOrWhiteSpace(source.Stream) ? null : source.Stream.Trim()); - } - - private static RawUpstreamMetadata NormalizeUpstream(RawUpstreamMetadata upstream) - { - if (upstream is null) - { - throw new ArgumentNullException(nameof(upstream)); - } - - var provenanceBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - if (upstream.Provenance is not null) - { - foreach (var entry in upstream.Provenance) - { - if (string.IsNullOrWhiteSpace(entry.Key)) - { - continue; - } - - var key = entry.Key.Trim(); - var value = entry.Value?.Trim() ?? string.Empty; - provenanceBuilder[key] = value; - } - } - - var signature = NormalizeSignature(upstream.Signature); - - return new RawUpstreamMetadata( - upstream.UpstreamId?.Trim() ?? string.Empty, - string.IsNullOrWhiteSpace(upstream.DocumentVersion) ? null : upstream.DocumentVersion.Trim(), - upstream.RetrievedAt.ToUniversalTime(), - upstream.ContentHash?.Trim() ?? string.Empty, - signature, - provenanceBuilder.ToImmutable()); - } - - private static RawSignatureMetadata NormalizeSignature(RawSignatureMetadata signature) - { - return new RawSignatureMetadata( - signature.Present, - string.IsNullOrWhiteSpace(signature.Format) ? null : signature.Format.Trim(), - string.IsNullOrWhiteSpace(signature.KeyId) ? null : signature.KeyId.Trim(), - string.IsNullOrWhiteSpace(signature.Signature) ? null : signature.Signature.Trim(), - string.IsNullOrWhiteSpace(signature.Certificate) ? null : signature.Certificate.Trim(), - string.IsNullOrWhiteSpace(signature.Digest) ? null : signature.Digest.Trim()); - } - - private static RawContent NormalizeContent(RawContent content) - { - if (content is null) - { - throw new ArgumentNullException(nameof(content)); - } - - var clonedRaw = content.Raw.Clone(); - - return new RawContent( - content.Format?.Trim() ?? string.Empty, - string.IsNullOrWhiteSpace(content.SpecVersion) ? null : content.SpecVersion.Trim(), - clonedRaw, - string.IsNullOrWhiteSpace(content.Encoding) ? null : content.Encoding.Trim()); - } - - private static RawIdentifiers NormalizeIdentifiers(RawIdentifiers identifiers) - { - var normalizedAliases = identifiers.Aliases - .Where(static alias => !string.IsNullOrWhiteSpace(alias)) - .Select(static alias => alias.Trim()) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToImmutableArray(); - - return new RawIdentifiers( - normalizedAliases, - identifiers.PrimaryId?.Trim() ?? string.Empty); - } - - private static RawLinkset NormalizeLinkset(RawLinkset linkset) - { - return new RawLinkset - { - Aliases = NormalizeStringArray(linkset.Aliases, StringComparer.OrdinalIgnoreCase), - PackageUrls = NormalizeStringArray(linkset.PackageUrls, StringComparer.Ordinal), - Cpes = NormalizeStringArray(linkset.Cpes, StringComparer.Ordinal), - References = NormalizeReferences(linkset.References), - ReconciledFrom = NormalizeStringArray(linkset.ReconciledFrom, StringComparer.Ordinal), - Notes = linkset.Notes ?? ImmutableDictionary<string, string>.Empty, - }; - } - - private static ImmutableArray<string> NormalizeStringArray(ImmutableArray<string> values, StringComparer comparer) - { - if (values.IsDefaultOrEmpty) - { - return EmptyArray; - } - - return values - .Where(static value => !string.IsNullOrWhiteSpace(value)) - .Select(static value => value.Trim()) - .Distinct(comparer) - .OrderBy(static value => value, comparer) - .ToImmutableArray(); - } - - private static ImmutableArray<RawReference> NormalizeReferences(ImmutableArray<RawReference> references) - { - if (references.IsDefaultOrEmpty) - { - return ImmutableArray<RawReference>.Empty; - } - - var builder = ImmutableArray.CreateBuilder<RawReference>(); - foreach (var reference in references) - { - if (string.IsNullOrWhiteSpace(reference.Type) || string.IsNullOrWhiteSpace(reference.Url)) - { - continue; - } - - builder.Add(new RawReference( - reference.Type.Trim(), - reference.Url.Trim(), - string.IsNullOrWhiteSpace(reference.Source) ? null : reference.Source.Trim())); - } - - return builder.ToImmutable(); - } - - private JsonElement ToJsonElement(AdvisoryRawDocument document) - { - var json = System.Text.Json.JsonSerializer.Serialize(document); - using var jsonDocument = System.Text.Json.JsonDocument.Parse(json); - return jsonDocument.RootElement.Clone(); - } - - private sealed class VerificationAggregation - { - private readonly List<AdvisoryRawViolationExample> _examples = new(); - - public VerificationAggregation(string code) - { - Code = code; - } - - public string Code { get; } - - public int Count { get; set; } - - public bool Truncated { get; set; } - - public IReadOnlyList<AdvisoryRawViolationExample> Examples => _examples; - - public bool TryAddExample(AdvisoryRawViolationExample example) - { - if (Truncated) - { - return false; - } - - _examples.Add(example); - return true; - } - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Globalization; +using System.Text; +using System.Text.Json; +using System.Linq; +using Microsoft.Extensions.Logging; +using StellaOps.Aoc; +using StellaOps.Concelier.Core.Aoc; +using StellaOps.Concelier.Core.Linksets; +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Raw; + +internal sealed class AdvisoryRawService : IAdvisoryRawService +{ + private static readonly ImmutableArray<string> EmptyArray = ImmutableArray<string>.Empty; + + private readonly IAdvisoryRawRepository _repository; + private readonly IAdvisoryRawWriteGuard _writeGuard; + private readonly IAocGuard _aocGuard; + private readonly IAdvisoryLinksetMapper _linksetMapper; + private readonly TimeProvider _timeProvider; + private readonly ILogger<AdvisoryRawService> _logger; + + public AdvisoryRawService( + IAdvisoryRawRepository repository, + IAdvisoryRawWriteGuard writeGuard, + IAocGuard aocGuard, + IAdvisoryLinksetMapper linksetMapper, + TimeProvider timeProvider, + ILogger<AdvisoryRawService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _writeGuard = writeGuard ?? throw new ArgumentNullException(nameof(writeGuard)); + _aocGuard = aocGuard ?? throw new ArgumentNullException(nameof(aocGuard)); + _linksetMapper = linksetMapper ?? throw new ArgumentNullException(nameof(linksetMapper)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<AdvisoryRawUpsertResult> IngestAsync(AdvisoryRawDocument document, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(document); + + var clientSupersedes = string.IsNullOrWhiteSpace(document.Supersedes) + ? null + : document.Supersedes.Trim(); + + var normalized = Normalize(document); + var enriched = normalized with { Linkset = _linksetMapper.Map(normalized) }; + + if (!string.IsNullOrEmpty(clientSupersedes)) + { + _logger.LogWarning( + "Ignoring client-supplied supersedes pointer for advisory_raw tenant={Tenant} source={Vendor} upstream={UpstreamId} pointer={Supersedes}", + enriched.Tenant, + enriched.Source.Vendor, + enriched.Upstream.UpstreamId, + clientSupersedes); + } + + _writeGuard.EnsureValid(enriched); + + var result = await _repository.UpsertAsync(enriched, cancellationToken).ConfigureAwait(false); + if (result.Inserted) + { + _logger.LogInformation( + "Ingested advisory_raw document id={DocumentId} tenant={Tenant} source={Vendor} upstream={UpstreamId} hash={Hash} supersedes={Supersedes}", + result.Record.Id, + result.Record.Document.Tenant, + result.Record.Document.Source.Vendor, + result.Record.Document.Upstream.UpstreamId, + result.Record.Document.Upstream.ContentHash, + string.IsNullOrWhiteSpace(result.Record.Document.Supersedes) + ? "(none)" + : result.Record.Document.Supersedes); + } + else + { + _logger.LogDebug( + "Skipped advisory_raw duplicate tenant={Tenant} source={Vendor} upstream={UpstreamId} hash={Hash}", + result.Record.Document.Tenant, + result.Record.Document.Source.Vendor, + result.Record.Document.Upstream.UpstreamId, + result.Record.Document.Upstream.ContentHash); + } + + return result; + } + + public Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + ArgumentException.ThrowIfNullOrWhiteSpace(id); + + var normalizedTenant = tenant.Trim().ToLowerInvariant(); + var normalizedId = id.Trim(); + return _repository.FindByIdAsync(normalizedTenant, normalizedId, cancellationToken); + } + + public Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + return _repository.QueryAsync(options, cancellationToken); + } + + public async Task<AdvisoryRawVerificationResult> VerifyAsync(AdvisoryRawVerificationRequest request, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + var tenant = NormalizeTenant(request.Tenant); + var windowStart = request.Since.ToUniversalTime(); + var windowEnd = request.Until.ToUniversalTime(); + if (windowEnd < windowStart) + { + throw new ArgumentException("Verification window end must be greater than or equal to the start.", nameof(request)); + } + + var inclusionLimit = request.Limit <= 0 ? 0 : request.Limit; + var sourceFilter = request.SourceVendors ?? Array.Empty<string>(); + var normalizedSources = sourceFilter.Count == 0 + ? EmptyArray + : sourceFilter.Select(NormalizeSourceVendor).Distinct(StringComparer.Ordinal).ToImmutableArray(); + + var codeFilter = request.Codes ?? Array.Empty<string>(); + var normalizedCodes = codeFilter.Count == 0 + ? EmptyArray + : codeFilter + .Where(static code => !string.IsNullOrWhiteSpace(code)) + .Select(static code => code.Trim().ToUpperInvariant()) + .Distinct(StringComparer.Ordinal) + .ToImmutableArray(); + + var records = await _repository + .ListForVerificationAsync(tenant, windowStart, windowEnd, normalizedSources, cancellationToken) + .ConfigureAwait(false); + + var now = _timeProvider.GetUtcNow(); + var violations = new Dictionary<string, VerificationAggregation>(StringComparer.Ordinal); + var checkedCount = 0; + var totalExamples = 0; + + foreach (var record in records) + { + cancellationToken.ThrowIfCancellationRequested(); + checkedCount++; + + AocGuardResult guardResult; + try + { + guardResult = _aocGuard.Validate(ToJsonElement(record.Document)); + } + catch (Exception ex) + { + _logger.LogError( + ex, + "AOC guard threw unexpected exception while verifying advisory_raw document id={DocumentId}", + record.Id); + continue; + } + + if (guardResult.IsValid || guardResult.Violations.IsDefaultOrEmpty) + { + continue; + } + + foreach (var violation in guardResult.Violations) + { + if (!normalizedCodes.IsDefaultOrEmpty && + !normalizedCodes.Contains(violation.ErrorCode.ToUpperInvariant())) + { + continue; + } + + var key = violation.ErrorCode; + if (!violations.TryGetValue(key, out var aggregation)) + { + aggregation = new VerificationAggregation(key); + violations.Add(key, aggregation); + } + + aggregation.Count++; + if (inclusionLimit <= 0 || totalExamples >= inclusionLimit) + { + aggregation.Truncated = true; + continue; + } + + if (aggregation.TryAddExample(CreateExample(record, violation))) + { + totalExamples++; + } + } + } + + var orderedViolations = violations.Values + .OrderByDescending(static v => v.Count) + .ThenBy(static v => v.Code, StringComparer.Ordinal) + .Select(static v => new AdvisoryRawVerificationViolation( + v.Code, + v.Count, + v.Examples.ToArray())) + .ToArray(); + + var truncated = orderedViolations.Any(static v => v.Examples.Count > 0) && totalExamples >= inclusionLimit && inclusionLimit > 0; + + return new AdvisoryRawVerificationResult( + tenant, + windowStart, + windowEnd > windowStart ? windowEnd : now, + checkedCount, + orderedViolations, + truncated || violations.Values.Any(static v => v.Truncated)); + } + + private static AdvisoryRawViolationExample CreateExample(AdvisoryRawRecord record, AocViolation violation) + { + return new AdvisoryRawViolationExample( + record.Document.Source.Vendor, + record.Id, + record.Document.Upstream.ContentHash, + violation.Path); + } + + private static string NormalizeTenant(string tenant) + { + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new ArgumentException("Tenant must be provided.", nameof(tenant)); + } + + return tenant.Trim().ToLowerInvariant(); + } + + private static string NormalizeSourceVendor(string vendor) + { + if (string.IsNullOrWhiteSpace(vendor)) + { + return string.Empty; + } + + return vendor.Trim().ToLowerInvariant(); + } + + private AdvisoryRawDocument Normalize(AdvisoryRawDocument document) + { + var tenant = NormalizeTenant(document.Tenant); + var source = NormalizeSource(document.Source); + var upstream = NormalizeUpstream(document.Upstream); + var content = NormalizeContent(document.Content); + var identifiers = NormalizeIdentifiers(document.Identifiers); + var linkset = NormalizeLinkset(document.Linkset); + + return new AdvisoryRawDocument( + tenant, + source, + upstream, + content, + identifiers, + linkset, + Supersedes: null); + } + + private static RawSourceMetadata NormalizeSource(RawSourceMetadata source) + { + if (source is null) + { + throw new ArgumentNullException(nameof(source)); + } + + return new RawSourceMetadata( + NormalizeSourceVendor(source.Vendor), + source.Connector?.Trim() ?? string.Empty, + source.ConnectorVersion?.Trim() ?? "unknown", + string.IsNullOrWhiteSpace(source.Stream) ? null : source.Stream.Trim()); + } + + private static RawUpstreamMetadata NormalizeUpstream(RawUpstreamMetadata upstream) + { + if (upstream is null) + { + throw new ArgumentNullException(nameof(upstream)); + } + + var provenanceBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + if (upstream.Provenance is not null) + { + foreach (var entry in upstream.Provenance) + { + if (string.IsNullOrWhiteSpace(entry.Key)) + { + continue; + } + + var key = entry.Key.Trim(); + var value = entry.Value?.Trim() ?? string.Empty; + provenanceBuilder[key] = value; + } + } + + var signature = NormalizeSignature(upstream.Signature); + + return new RawUpstreamMetadata( + upstream.UpstreamId?.Trim() ?? string.Empty, + string.IsNullOrWhiteSpace(upstream.DocumentVersion) ? null : upstream.DocumentVersion.Trim(), + upstream.RetrievedAt.ToUniversalTime(), + upstream.ContentHash?.Trim() ?? string.Empty, + signature, + provenanceBuilder.ToImmutable()); + } + + private static RawSignatureMetadata NormalizeSignature(RawSignatureMetadata signature) + { + return new RawSignatureMetadata( + signature.Present, + string.IsNullOrWhiteSpace(signature.Format) ? null : signature.Format.Trim(), + string.IsNullOrWhiteSpace(signature.KeyId) ? null : signature.KeyId.Trim(), + string.IsNullOrWhiteSpace(signature.Signature) ? null : signature.Signature.Trim(), + string.IsNullOrWhiteSpace(signature.Certificate) ? null : signature.Certificate.Trim(), + string.IsNullOrWhiteSpace(signature.Digest) ? null : signature.Digest.Trim()); + } + + private static RawContent NormalizeContent(RawContent content) + { + if (content is null) + { + throw new ArgumentNullException(nameof(content)); + } + + var clonedRaw = content.Raw.Clone(); + + return new RawContent( + content.Format?.Trim() ?? string.Empty, + string.IsNullOrWhiteSpace(content.SpecVersion) ? null : content.SpecVersion.Trim(), + clonedRaw, + string.IsNullOrWhiteSpace(content.Encoding) ? null : content.Encoding.Trim()); + } + + private static RawIdentifiers NormalizeIdentifiers(RawIdentifiers identifiers) + { + var normalizedAliases = identifiers.Aliases + .Where(static alias => !string.IsNullOrWhiteSpace(alias)) + .Select(static alias => alias.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + + return new RawIdentifiers( + normalizedAliases, + identifiers.PrimaryId?.Trim() ?? string.Empty); + } + + private static RawLinkset NormalizeLinkset(RawLinkset linkset) + { + return new RawLinkset + { + Aliases = NormalizeStringArray(linkset.Aliases, StringComparer.OrdinalIgnoreCase), + PackageUrls = NormalizeStringArray(linkset.PackageUrls, StringComparer.Ordinal), + Cpes = NormalizeStringArray(linkset.Cpes, StringComparer.Ordinal), + References = NormalizeReferences(linkset.References), + ReconciledFrom = NormalizeStringArray(linkset.ReconciledFrom, StringComparer.Ordinal), + Notes = linkset.Notes ?? ImmutableDictionary<string, string>.Empty, + }; + } + + private static ImmutableArray<string> NormalizeStringArray(ImmutableArray<string> values, StringComparer comparer) + { + if (values.IsDefaultOrEmpty) + { + return EmptyArray; + } + + return values + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Select(static value => value.Trim()) + .Distinct(comparer) + .OrderBy(static value => value, comparer) + .ToImmutableArray(); + } + + private static ImmutableArray<RawReference> NormalizeReferences(ImmutableArray<RawReference> references) + { + if (references.IsDefaultOrEmpty) + { + return ImmutableArray<RawReference>.Empty; + } + + var builder = ImmutableArray.CreateBuilder<RawReference>(); + foreach (var reference in references) + { + if (string.IsNullOrWhiteSpace(reference.Type) || string.IsNullOrWhiteSpace(reference.Url)) + { + continue; + } + + builder.Add(new RawReference( + reference.Type.Trim(), + reference.Url.Trim(), + string.IsNullOrWhiteSpace(reference.Source) ? null : reference.Source.Trim())); + } + + return builder.ToImmutable(); + } + + private JsonElement ToJsonElement(AdvisoryRawDocument document) + { + var json = System.Text.Json.JsonSerializer.Serialize(document); + using var jsonDocument = System.Text.Json.JsonDocument.Parse(json); + return jsonDocument.RootElement.Clone(); + } + + private sealed class VerificationAggregation + { + private readonly List<AdvisoryRawViolationExample> _examples = new(); + + public VerificationAggregation(string code) + { + Code = code; + } + + public string Code { get; } + + public int Count { get; set; } + + public bool Truncated { get; set; } + + public IReadOnlyList<AdvisoryRawViolationExample> Examples => _examples; + + public bool TryAddExample(AdvisoryRawViolationExample example) + { + if (Truncated) + { + return false; + } + + _examples.Add(example); + return true; + } + } +} diff --git a/src/StellaOps.Concelier.Core/Raw/IAdvisoryRawRepository.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/IAdvisoryRawRepository.cs similarity index 97% rename from src/StellaOps.Concelier.Core/Raw/IAdvisoryRawRepository.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/IAdvisoryRawRepository.cs index 6c8eb3c1..fb29ec97 100644 --- a/src/StellaOps.Concelier.Core/Raw/IAdvisoryRawRepository.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/IAdvisoryRawRepository.cs @@ -1,37 +1,37 @@ -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Raw; - -/// <summary> -/// Persistence abstraction for raw advisory documents. -/// </summary> -public interface IAdvisoryRawRepository -{ - /// <summary> - /// Appends a new raw document or returns the existing record when the content hash already exists. - /// </summary> - /// <param name="document">Document to append.</param> - /// <param name="cancellationToken">Cancellation token.</param> - /// <returns>Result describing whether a new document was inserted.</returns> - Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken); - - /// <summary> - /// Finds a raw document by identifier within the specified tenant. - /// </summary> - Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken); - - /// <summary> - /// Queries raw documents using the supplied filter/paging options. - /// </summary> - Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken); - - /// <summary> - /// Enumerates raw advisory documents for verification runs. - /// </summary> - Task<IReadOnlyList<AdvisoryRawRecord>> ListForVerificationAsync( - string tenant, - DateTimeOffset since, - DateTimeOffset until, - IReadOnlyCollection<string> sourceVendors, - CancellationToken cancellationToken); -} +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Raw; + +/// <summary> +/// Persistence abstraction for raw advisory documents. +/// </summary> +public interface IAdvisoryRawRepository +{ + /// <summary> + /// Appends a new raw document or returns the existing record when the content hash already exists. + /// </summary> + /// <param name="document">Document to append.</param> + /// <param name="cancellationToken">Cancellation token.</param> + /// <returns>Result describing whether a new document was inserted.</returns> + Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken); + + /// <summary> + /// Finds a raw document by identifier within the specified tenant. + /// </summary> + Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken); + + /// <summary> + /// Queries raw documents using the supplied filter/paging options. + /// </summary> + Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken); + + /// <summary> + /// Enumerates raw advisory documents for verification runs. + /// </summary> + Task<IReadOnlyList<AdvisoryRawRecord>> ListForVerificationAsync( + string tenant, + DateTimeOffset since, + DateTimeOffset until, + IReadOnlyCollection<string> sourceVendors, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Concelier.Core/Raw/IAdvisoryRawService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/IAdvisoryRawService.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Raw/IAdvisoryRawService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/IAdvisoryRawService.cs index 4eeb9866..62c3591c 100644 --- a/src/StellaOps.Concelier.Core/Raw/IAdvisoryRawService.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/IAdvisoryRawService.cs @@ -1,56 +1,56 @@ -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Raw; - -/// <summary> -/// High-level orchestration for advisory raw ingestion, querying, and verification. -/// </summary> -public interface IAdvisoryRawService -{ - Task<AdvisoryRawUpsertResult> IngestAsync(AdvisoryRawDocument document, CancellationToken cancellationToken); - - Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken); - - Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken); - - Task<AdvisoryRawVerificationResult> VerifyAsync(AdvisoryRawVerificationRequest request, CancellationToken cancellationToken); -} - -/// <summary> -/// Verification request parameters. -/// </summary> -public sealed record AdvisoryRawVerificationRequest( - string Tenant, - DateTimeOffset Since, - DateTimeOffset Until, - int Limit, - IReadOnlyCollection<string> SourceVendors, - IReadOnlyCollection<string> Codes); - -/// <summary> -/// Verification response summarising guard violations. -/// </summary> -public sealed record AdvisoryRawVerificationResult( - string Tenant, - DateTimeOffset WindowStart, - DateTimeOffset WindowEnd, - int CheckedCount, - IReadOnlyList<AdvisoryRawVerificationViolation> Violations, - bool Truncated); - -/// <summary> -/// Aggregated violation entry. -/// </summary> -public sealed record AdvisoryRawVerificationViolation( - string Code, - int Count, - IReadOnlyList<AdvisoryRawViolationExample> Examples); - -/// <summary> -/// Sample violation pointer for troubleshooting. -/// </summary> -public sealed record AdvisoryRawViolationExample( - string SourceVendor, - string DocumentId, - string ContentHash, - string Path); +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Raw; + +/// <summary> +/// High-level orchestration for advisory raw ingestion, querying, and verification. +/// </summary> +public interface IAdvisoryRawService +{ + Task<AdvisoryRawUpsertResult> IngestAsync(AdvisoryRawDocument document, CancellationToken cancellationToken); + + Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken); + + Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken); + + Task<AdvisoryRawVerificationResult> VerifyAsync(AdvisoryRawVerificationRequest request, CancellationToken cancellationToken); +} + +/// <summary> +/// Verification request parameters. +/// </summary> +public sealed record AdvisoryRawVerificationRequest( + string Tenant, + DateTimeOffset Since, + DateTimeOffset Until, + int Limit, + IReadOnlyCollection<string> SourceVendors, + IReadOnlyCollection<string> Codes); + +/// <summary> +/// Verification response summarising guard violations. +/// </summary> +public sealed record AdvisoryRawVerificationResult( + string Tenant, + DateTimeOffset WindowStart, + DateTimeOffset WindowEnd, + int CheckedCount, + IReadOnlyList<AdvisoryRawVerificationViolation> Violations, + bool Truncated); + +/// <summary> +/// Aggregated violation entry. +/// </summary> +public sealed record AdvisoryRawVerificationViolation( + string Code, + int Count, + IReadOnlyList<AdvisoryRawViolationExample> Examples); + +/// <summary> +/// Sample violation pointer for troubleshooting. +/// </summary> +public sealed record AdvisoryRawViolationExample( + string SourceVendor, + string DocumentId, + string ContentHash, + string Path); diff --git a/src/StellaOps.Concelier.Core/Raw/RawServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/RawServiceCollectionExtensions.cs similarity index 96% rename from src/StellaOps.Concelier.Core/Raw/RawServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/RawServiceCollectionExtensions.cs index 9f18fb0e..54a8ac1e 100644 --- a/src/StellaOps.Concelier.Core/Raw/RawServiceCollectionExtensions.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Raw/RawServiceCollectionExtensions.cs @@ -1,16 +1,16 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; - -namespace StellaOps.Concelier.Core.Raw; - -public static class RawServiceCollectionExtensions -{ - public static IServiceCollection AddAdvisoryRawServices(this IServiceCollection services) - { - ArgumentNullException.ThrowIfNull(services); - - services.TryAddSingleton<IAdvisoryRawService, AdvisoryRawService>(); - - return services; - } -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace StellaOps.Concelier.Core.Raw; + +public static class RawServiceCollectionExtensions +{ + public static IServiceCollection AddAdvisoryRawServices(this IServiceCollection services) + { + ArgumentNullException.ThrowIfNull(services); + + services.TryAddSingleton<IAdvisoryRawService, AdvisoryRawService>(); + + return services; + } +} diff --git a/src/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj similarity index 82% rename from src/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj index ff6b5df2..16d59351 100644 --- a/src/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -17,7 +18,7 @@ <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> <ProjectReference Include="..\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" /> <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="..\StellaOps.Plugin\StellaOps.Plugin.csproj" /> - <ProjectReference Include="..\StellaOps.Aoc\StellaOps.Aoc.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Core/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md similarity index 99% rename from src/StellaOps.Concelier.Core/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md index 5de0690d..6f28df97 100644 --- a/src/StellaOps.Concelier.Core/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Core/TASKS.md @@ -1,119 +1,119 @@ -# TASKS — Epic 1: Aggregation-Only Contract -> **AOC Reminder:** ingestion aggregates and links only—no precedence, normalization, or severity computation. Derived data lives in Policy/overlay services. -| ID | Status | Owner(s) | Depends on | Notes | -|---|---|---|---|---| -| CONCELIER-CORE-AOC-19-001 `AOC write guard` | DONE (2025-10-29) | Concelier Core Guild | WEB-AOC-19-001 | Implement repository interceptor that inspects write payloads for forbidden AOC keys, validates provenance/signature presence, and maps violations to `ERR_AOC_00x`. | -> Docs alignment (2025-10-26): Behaviour/spec captured in `docs/ingestion/aggregation-only-contract.md` and architecture overview §2. -> Implementation (2025-10-29): Added `AdvisoryRawWriteGuard` + DI extensions wrapping `AocWriteGuard`, throwing domain-specific `ConcelierAocGuardException` with `ERR_AOC_00x` mappings. Unit tests cover valid/missing-tenant/signature cases. -> Coordination (2025-10-27): Authority `dotnet test` run is currently blocked because `AdvisoryObservationQueryService.BuildAliasLookup` returns `ImmutableHashSet<string?>`; please normalise these lookups to `ImmutableHashSet<string>` (trim nulls) so downstream builds succeed. -| CONCELIER-CORE-AOC-19-002 `Deterministic linkset extraction` | DONE (2025-10-31) | Concelier Core Guild | CONCELIER-CORE-AOC-19-001 | Build canonical linkset mappers for CVE/GHSA/PURL/CPE/reference extraction from upstream raw payloads, ensuring reconciled-from metadata is tracked and deterministic. | -> 2025-10-31: Added advisory linkset mapper + DI registration, normalized PURL/CPE canonicalization, persisted `reconciled_from` pointers, and refreshed observation factory/tests for new raw linkset shape. -> Docs alignment (2025-10-26): Linkset expectations detailed in AOC reference §4 and policy-engine architecture §2.1. -| CONCELIER-CORE-AOC-19-003 `Idempotent append-only upsert` | DONE (2025-10-28) | Concelier Core Guild | CONCELIER-STORE-AOC-19-002 | Implement idempotent upsert path using `(vendor, upstreamId, contentHash, tenant)` key, emitting supersedes pointers for new revisions and preventing duplicate inserts. | -> 2025-10-28: Advisory raw ingestion now strips client-supplied supersedes hints, logs ignored pointers, and surfaces repository-supplied supersedes identifiers; service tests cover duplicate handling and append-only semantics. -> Docs alignment (2025-10-26): Deployment guide + observability guide describe supersedes metrics; ensure implementation emits `aoc_violation_total` on failure. -| CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DOING (2025-10-28) | Concelier Core Guild | CONCELIER-CORE-AOC-19-002, POLICY-AOC-19-003 | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only. | -> Docs alignment (2025-10-26): Architecture overview emphasises policy-only derivation; coordinate with Policy Engine guild for rollout. -| CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Concelier Core Guild | AUTH-AOC-19-002 | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. | Coordinate deliverable so Authority docs (`AUTH-AOC-19-003`) can close once tests are in place. | - -## Policy Engine v2 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-POLICY-20-002 `Linkset enrichment for policy` | TODO | Concelier Core Guild, Policy Guild | CONCELIER-CORE-AOC-19-002, POLICY-ENGINE-20-001 | Strengthen linkset builders with vendor-specific equivalence tables, NEVRA/PURL normalization, and version range parsing to maximize policy join recall; update fixtures + docs. | -> 2025-10-31: Base advisory linkset mapper landed under `CONCELIER-CORE-AOC-19-002`; policy enrichment work can now proceed with mapper outputs and observation schema fixtures. - -## Graph Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-GRAPH-21-001 `SBOM projection enrichment` | BLOCKED (2025-10-27) | Concelier Core Guild, Cartographer Guild | CONCELIER-POLICY-20-002, CARTO-GRAPH-21-002 | Extend SBOM normalization to emit full relationship graph (depends_on/contains/provides), scope tags, entrypoint annotations, and component metadata required by Cartographer. | -> 2025-10-27: Waiting on policy-driven linkset enrichment (`CONCELIER-POLICY-20-002`) and Cartographer API contract (`CARTO-GRAPH-21-002`) to define required relationship payloads. Without those schemas the projection changes cannot be implemented deterministically. -| CONCELIER-GRAPH-21-002 `Change events` | BLOCKED (2025-10-27) | Concelier Core Guild, Scheduler Guild | CONCELIER-GRAPH-21-001 | Publish change events (new SBOM version, relationship delta) for Cartographer build queue; ensure events include tenant/context metadata. | -> 2025-10-27: Depends on `CONCELIER-GRAPH-21-001`; event schema hinges on finalized projection output and Cartographer webhook contract, both pending. - -## Link-Not-Merge v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-LNM-21-001 `Advisory observation schema` | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-001 | Introduce immutable `advisory_observations` model with AOC metadata, raw payload pointers, normalized fields, and tenancy guardrails; publish schema definition. `DOCS-LNM-22-001` blocked pending this deliverable. | -| CONCELIER-LNM-21-002 `Linkset builder` | TODO | Concelier Core Guild, Data Science Guild | CONCELIER-LNM-21-001 | Implement correlation pipeline (alias graph, PURL overlap, CVSS vector equality, fuzzy title match) that produces `advisory_linksets` with confidence + conflict annotations. Docs note: unblock `DOCS-LNM-22-001` once builder lands. | -| CONCELIER-LNM-21-003 `Conflict annotator` | TODO | Concelier Core Guild | CONCELIER-LNM-21-002 | Detect field disagreements (severity, CVSS, ranges, references) and record structured conflicts on linksets; surface to API/UI. Docs awaiting structured conflict payloads. | -| CONCELIER-LNM-21-004 `Merge code removal` | TODO | Concelier Core Guild | CONCELIER-LNM-21-002 | Excise existing merge/dedup logic, enforce immutability on observations, and add guards/tests to prevent future merges. | -| CONCELIER-LNM-21-005 `Event emission` | TODO | Concelier Core Guild, Platform Events Guild | CONCELIER-LNM-21-002 | Emit `advisory.linkset.updated` events with delta payloads for downstream Policy Engine/Cartographer consumers; ensure idempotent delivery. | - -## Policy Engine + Editor v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-POLICY-23-001 `Evidence indexes` | TODO | Concelier Core Guild | CONCELIER-LNM-21-002 | Add secondary indexes/materialized views to accelerate policy lookups (alias, severity per observation, correlation confidence). Document query contracts for runtime. | -| CONCELIER-POLICY-23-002 `Event guarantees` | TODO | Concelier Core Guild, Platform Events Guild | CONCELIER-LNM-21-005 | Ensure `advisory.linkset.updated` emits at-least-once with idempotent keys and include policy-relevant metadata (confidence, conflict summary). | - -## Graph & Vuln Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-GRAPH-24-001 `Advisory overlay inputs` | DONE (2025-10-29) | Concelier Core Guild | CONCELIER-POLICY-23-001 | Expose raw advisory observations/linksets with tenant filters for overlay services; no derived counts/severity in ingestion. | -> 2025-10-29: Filter-aware lookup path and /concelier/observations coverage landed; overlay services can consume raw advisory feeds deterministically. - -## Reachability v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-SIG-26-001 `Vulnerable symbol exposure` | TODO | Concelier Core Guild, Signals Guild | SIGNALS-24-002 | Expose advisory metadata (affected symbols/functions) via API to enrich reachability scoring; update fixtures. | - -## Orchestrator Dashboard - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-ORCH-32-001 `Source registry integration` | TODO | Concelier Core Guild | ORCH-SVC-32-001, AUTH-ORCH-32-001 | Register Concelier data sources with orchestrator (metadata, schedules, rate policies) and wire provenance IDs/security scopes. | -| CONCELIER-ORCH-32-002 `Worker SDK adoption` | TODO | Concelier Core Guild | CONCELIER-ORCH-32-001, WORKER-GO-32-001, WORKER-PY-32-001 | Embed orchestrator worker SDK in ingestion loops, emit heartbeats/progress/artifact hashes, and enforce idempotency keys. | -| CONCELIER-ORCH-33-001 `Control hook compliance` | TODO | Concelier Core Guild | CONCELIER-ORCH-32-002, ORCH-SVC-33-001, ORCH-SVC-33-002 | Honor orchestrator throttle/pause/retry actions, surface structured error classes, and persist safe checkpoints for resume. | -| CONCELIER-ORCH-34-001 `Backfill + ledger linkage` | TODO | Concelier Core Guild | CONCELIER-ORCH-33-001, ORCH-SVC-33-003, ORCH-SVC-34-001 | Execute orchestrator-driven backfills, reuse artifact hashes to avoid duplicates, and link provenance to run ledger exports. | - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-TEN-48-001 `Tenant-aware linking` | TODO | Concelier Core Guild | AUTH-TEN-47-001 | Ensure advisory normalization/linking runs per tenant with RLS enforcing isolation; emit capability endpoint reporting `merge=false`; update events with tenant context. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-OBS-50-001 `Telemetry adoption` | TODO | Concelier Core Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Replace ad-hoc logging with telemetry core across ingestion/linking pipelines; ensure spans/logs include tenant, source vendor, upstream id, content hash, and trace IDs. | -| CONCELIER-OBS-51-001 `Metrics & SLOs` | TODO | Concelier Core Guild, DevOps Guild | CONCELIER-OBS-50-001, TELEMETRY-OBS-51-001 | Emit metrics for ingest latency (cold/warm), queue depth, aoc violation rate, and publish SLO burn-rate alerts (ingest P95 <30s cold / <5s warm). Ship dashboards + alert configs. | -| CONCELIER-OBS-52-001 `Timeline events` | TODO | Concelier Core Guild | CONCELIER-OBS-50-001, TIMELINE-OBS-52-002 | Emit `timeline_event` records for advisory ingest/normalization/linkset creation with provenance, trace IDs, conflict summaries, and evidence placeholders. | -| CONCELIER-OBS-53-001 `Evidence snapshots` | TODO | Concelier Core Guild, Evidence Locker Guild | CONCELIER-OBS-52-001, EVID-OBS-53-002 | Produce advisory evaluation bundle payloads (raw doc, linkset, normalization diff) for evidence locker; ensure Merkle manifests seeded with content hashes. | -| CONCELIER-OBS-54-001 `Attestation & verification` | TODO | Concelier Core Guild, Provenance Guild | CONCELIER-OBS-53-001, PROV-OBS-54-001 | Attach DSSE attestations for advisory processing batches, expose verification API to confirm bundle integrity, and link attestation IDs back to timeline + ledger. | -| CONCELIER-OBS-55-001 `Incident mode hooks` | TODO | Concelier Core Guild, DevOps Guild | CONCELIER-OBS-51-001, DEVOPS-OBS-55-001 | Increase sampling, capture raw payload snapshots, and extend retention under incident mode; emit activation events + guardrails against PII leak. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-AIRGAP-56-001 `Mirror ingestion adapters` | TODO | Concelier Core Guild | AIRGAP-IMP-57-002, MIRROR-CRT-56-001 | Add mirror source adapters reading advisories from imported bundles, preserving source metadata and bundle IDs. Ensure ingestion remains append-only. | -| CONCELIER-AIRGAP-56-002 `Bundle catalog linking` | TODO | Concelier Core Guild, AirGap Importer Guild | CONCELIER-AIRGAP-56-001, AIRGAP-IMP-57-001 | Persist `bundle_id`, `merkle_root`, and time anchor references on observations/linksets for provenance. | -| CONCELIER-AIRGAP-57-001 `Sealed-mode source restrictions` | TODO | Concelier Core Guild, AirGap Policy Guild | CONCELIER-AIRGAP-56-001, AIRGAP-POL-56-001 | Enforce sealed-mode egress rules by disallowing non-mirror connectors and surfacing remediation errors. | -| CONCELIER-AIRGAP-57-002 `Staleness annotations` | TODO | Concelier Core Guild, AirGap Time Guild | CONCELIER-AIRGAP-56-002, AIRGAP-TIME-58-001 | Compute staleness metadata for advisories per bundle and expose via API for Console/CLI badges. | -| CONCELIER-AIRGAP-58-001 `Portable advisory evidence` | TODO | Concelier Core Guild, Evidence Locker Guild | CONCELIER-OBS-53-001, EVID-OBS-54-001 | Package advisory evidence fragments into portable evidence bundles for cross-domain transfer. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-OAS-61-001 `Spec coverage` | TODO | Concelier Core Guild, API Contracts Guild | OAS-61-001 | Update Concelier OAS with advisory observation/linkset endpoints, standard pagination, and source provenance fields. | -| CONCELIER-OAS-61-002 `Examples library` | TODO | Concelier Core Guild | CONCELIER-OAS-61-001 | Provide rich examples for advisories, linksets, conflict annotations used by SDK + docs. | -| CONCELIER-OAS-62-001 `SDK smoke tests` | TODO | Concelier Core Guild, SDK Generator Guild | CONCELIER-OAS-61-001, SDKGEN-63-001 | Add SDK tests covering advisory search, pagination, and conflict handling; ensure source metadata surfaced. | -| CONCELIER-OAS-63-001 `Deprecation headers` | TODO | Concelier Core Guild, API Governance Guild | APIGOV-63-001 | Implement deprecation header support and timeline events for retiring endpoints. | - -## Risk Profiles (Epic 18) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-RISK-66-001 `CVSS/KEV providers` | TODO | Concelier Core Guild, Risk Engine Guild | RISK-ENGINE-67-001 | Expose CVSS, KEV, fix availability data via provider APIs with source metadata preserved. | -| CONCELIER-RISK-66-002 `Fix availability signals` | TODO | Concelier Core Guild | CONCELIER-RISK-66-001 | Provide structured fix availability and release metadata consumable by risk engine; document provenance. | -| CONCELIER-RISK-67-001 `Source consensus metrics` | TODO | Concelier Core Guild | CONCELIER-RISK-66-001 | Add consensus counts and confidence scores for linked advisories; ensure explainability includes source digests. | -| CONCELIER-RISK-68-001 `Policy Studio integration` | TODO | Concelier Core Guild, Policy Studio Guild | POLICY-RISK-68-001 | Surface advisory fields in Policy Studio profile editor (signal pickers, reducers). | -| CONCELIER-RISK-69-001 `Notification hooks` | TODO | Concelier Core Guild, Notifications Guild | CONCELIER-RISK-66-002 | Emit events when advisory signals change impacting risk scores (e.g., fix available). | - -## Attestor Console (Epic 19) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-ATTEST-73-001 `ScanResults attestation inputs` | TODO | Concelier Core Guild, Attestor Service Guild | ATTEST-TYPES-72-001 | Provide normalized advisory data and linkset digests needed for ScanResults attestations. | -| CONCELIER-ATTEST-73-002 `Transparency metadata` | TODO | Concelier Core Guild | CONCELIER-ATTEST-73-001 | Ensure Conseiller exposes source digests for transparency proofs and explainability. | +# TASKS — Epic 1: Aggregation-Only Contract +> **AOC Reminder:** ingestion aggregates and links only—no precedence, normalization, or severity computation. Derived data lives in Policy/overlay services. +| ID | Status | Owner(s) | Depends on | Notes | +|---|---|---|---|---| +| CONCELIER-CORE-AOC-19-001 `AOC write guard` | DONE (2025-10-29) | Concelier Core Guild | WEB-AOC-19-001 | Implement repository interceptor that inspects write payloads for forbidden AOC keys, validates provenance/signature presence, and maps violations to `ERR_AOC_00x`. | +> Docs alignment (2025-10-26): Behaviour/spec captured in `docs/ingestion/aggregation-only-contract.md` and architecture overview §2. +> Implementation (2025-10-29): Added `AdvisoryRawWriteGuard` + DI extensions wrapping `AocWriteGuard`, throwing domain-specific `ConcelierAocGuardException` with `ERR_AOC_00x` mappings. Unit tests cover valid/missing-tenant/signature cases. +> Coordination (2025-10-27): Authority `dotnet test` run is currently blocked because `AdvisoryObservationQueryService.BuildAliasLookup` returns `ImmutableHashSet<string?>`; please normalise these lookups to `ImmutableHashSet<string>` (trim nulls) so downstream builds succeed. +| CONCELIER-CORE-AOC-19-002 `Deterministic linkset extraction` | DONE (2025-10-31) | Concelier Core Guild | CONCELIER-CORE-AOC-19-001 | Build canonical linkset mappers for CVE/GHSA/PURL/CPE/reference extraction from upstream raw payloads, ensuring reconciled-from metadata is tracked and deterministic. | +> 2025-10-31: Added advisory linkset mapper + DI registration, normalized PURL/CPE canonicalization, persisted `reconciled_from` pointers, and refreshed observation factory/tests for new raw linkset shape. +> Docs alignment (2025-10-26): Linkset expectations detailed in AOC reference §4 and policy-engine architecture §2.1. +| CONCELIER-CORE-AOC-19-003 `Idempotent append-only upsert` | DONE (2025-10-28) | Concelier Core Guild | CONCELIER-STORE-AOC-19-002 | Implement idempotent upsert path using `(vendor, upstreamId, contentHash, tenant)` key, emitting supersedes pointers for new revisions and preventing duplicate inserts. | +> 2025-10-28: Advisory raw ingestion now strips client-supplied supersedes hints, logs ignored pointers, and surfaces repository-supplied supersedes identifiers; service tests cover duplicate handling and append-only semantics. +> Docs alignment (2025-10-26): Deployment guide + observability guide describe supersedes metrics; ensure implementation emits `aoc_violation_total` on failure. +| CONCELIER-CORE-AOC-19-004 `Remove ingestion normalization` | DOING (2025-10-28) | Concelier Core Guild | CONCELIER-CORE-AOC-19-002, POLICY-AOC-19-003 | Strip normalization/dedup/severity logic from ingestion pipelines, delegate derived computations to Policy Engine, and update exporters/tests to consume raw documents only. | +> Docs alignment (2025-10-26): Architecture overview emphasises policy-only derivation; coordinate with Policy Engine guild for rollout. +| CONCELIER-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Concelier Core Guild | AUTH-AOC-19-002 | Extend Concelier smoke/e2e fixtures to configure `requiredTenants` and assert cross-tenant rejection with updated Authority tokens. | Coordinate deliverable so Authority docs (`AUTH-AOC-19-003`) can close once tests are in place. | + +## Policy Engine v2 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-POLICY-20-002 `Linkset enrichment for policy` | TODO | Concelier Core Guild, Policy Guild | CONCELIER-CORE-AOC-19-002, POLICY-ENGINE-20-001 | Strengthen linkset builders with vendor-specific equivalence tables, NEVRA/PURL normalization, and version range parsing to maximize policy join recall; update fixtures + docs. | +> 2025-10-31: Base advisory linkset mapper landed under `CONCELIER-CORE-AOC-19-002`; policy enrichment work can now proceed with mapper outputs and observation schema fixtures. + +## Graph Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-GRAPH-21-001 `SBOM projection enrichment` | BLOCKED (2025-10-27) | Concelier Core Guild, Cartographer Guild | CONCELIER-POLICY-20-002, CARTO-GRAPH-21-002 | Extend SBOM normalization to emit full relationship graph (depends_on/contains/provides), scope tags, entrypoint annotations, and component metadata required by Cartographer. | +> 2025-10-27: Waiting on policy-driven linkset enrichment (`CONCELIER-POLICY-20-002`) and Cartographer API contract (`CARTO-GRAPH-21-002`) to define required relationship payloads. Without those schemas the projection changes cannot be implemented deterministically. +| CONCELIER-GRAPH-21-002 `Change events` | BLOCKED (2025-10-27) | Concelier Core Guild, Scheduler Guild | CONCELIER-GRAPH-21-001 | Publish change events (new SBOM version, relationship delta) for Cartographer build queue; ensure events include tenant/context metadata. | +> 2025-10-27: Depends on `CONCELIER-GRAPH-21-001`; event schema hinges on finalized projection output and Cartographer webhook contract, both pending. + +## Link-Not-Merge v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-LNM-21-001 `Advisory observation schema` | TODO | Concelier Core Guild | CONCELIER-CORE-AOC-19-001 | Introduce immutable `advisory_observations` model with AOC metadata, raw payload pointers, normalized fields, and tenancy guardrails; publish schema definition. `DOCS-LNM-22-001` blocked pending this deliverable. | +| CONCELIER-LNM-21-002 `Linkset builder` | TODO | Concelier Core Guild, Data Science Guild | CONCELIER-LNM-21-001 | Implement correlation pipeline (alias graph, PURL overlap, CVSS vector equality, fuzzy title match) that produces `advisory_linksets` with confidence + conflict annotations. Docs note: unblock `DOCS-LNM-22-001` once builder lands. | +| CONCELIER-LNM-21-003 `Conflict annotator` | TODO | Concelier Core Guild | CONCELIER-LNM-21-002 | Detect field disagreements (severity, CVSS, ranges, references) and record structured conflicts on linksets; surface to API/UI. Docs awaiting structured conflict payloads. | +| CONCELIER-LNM-21-004 `Merge code removal` | TODO | Concelier Core Guild | CONCELIER-LNM-21-002 | Excise existing merge/dedup logic, enforce immutability on observations, and add guards/tests to prevent future merges. | +| CONCELIER-LNM-21-005 `Event emission` | TODO | Concelier Core Guild, Platform Events Guild | CONCELIER-LNM-21-002 | Emit `advisory.linkset.updated` events with delta payloads for downstream Policy Engine/Cartographer consumers; ensure idempotent delivery. | + +## Policy Engine + Editor v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-POLICY-23-001 `Evidence indexes` | TODO | Concelier Core Guild | CONCELIER-LNM-21-002 | Add secondary indexes/materialized views to accelerate policy lookups (alias, severity per observation, correlation confidence). Document query contracts for runtime. | +| CONCELIER-POLICY-23-002 `Event guarantees` | TODO | Concelier Core Guild, Platform Events Guild | CONCELIER-LNM-21-005 | Ensure `advisory.linkset.updated` emits at-least-once with idempotent keys and include policy-relevant metadata (confidence, conflict summary). | + +## Graph & Vuln Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-GRAPH-24-001 `Advisory overlay inputs` | DONE (2025-10-29) | Concelier Core Guild | CONCELIER-POLICY-23-001 | Expose raw advisory observations/linksets with tenant filters for overlay services; no derived counts/severity in ingestion. | +> 2025-10-29: Filter-aware lookup path and /concelier/observations coverage landed; overlay services can consume raw advisory feeds deterministically. + +## Reachability v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-SIG-26-001 `Vulnerable symbol exposure` | TODO | Concelier Core Guild, Signals Guild | SIGNALS-24-002 | Expose advisory metadata (affected symbols/functions) via API to enrich reachability scoring; update fixtures. | + +## Orchestrator Dashboard + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-ORCH-32-001 `Source registry integration` | TODO | Concelier Core Guild | ORCH-SVC-32-001, AUTH-ORCH-32-001 | Register Concelier data sources with orchestrator (metadata, schedules, rate policies) and wire provenance IDs/security scopes. | +| CONCELIER-ORCH-32-002 `Worker SDK adoption` | TODO | Concelier Core Guild | CONCELIER-ORCH-32-001, WORKER-GO-32-001, WORKER-PY-32-001 | Embed orchestrator worker SDK in ingestion loops, emit heartbeats/progress/artifact hashes, and enforce idempotency keys. | +| CONCELIER-ORCH-33-001 `Control hook compliance` | TODO | Concelier Core Guild | CONCELIER-ORCH-32-002, ORCH-SVC-33-001, ORCH-SVC-33-002 | Honor orchestrator throttle/pause/retry actions, surface structured error classes, and persist safe checkpoints for resume. | +| CONCELIER-ORCH-34-001 `Backfill + ledger linkage` | TODO | Concelier Core Guild | CONCELIER-ORCH-33-001, ORCH-SVC-33-003, ORCH-SVC-34-001 | Execute orchestrator-driven backfills, reuse artifact hashes to avoid duplicates, and link provenance to run ledger exports. | + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-TEN-48-001 `Tenant-aware linking` | TODO | Concelier Core Guild | AUTH-TEN-47-001 | Ensure advisory normalization/linking runs per tenant with RLS enforcing isolation; emit capability endpoint reporting `merge=false`; update events with tenant context. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-OBS-50-001 `Telemetry adoption` | TODO | Concelier Core Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Replace ad-hoc logging with telemetry core across ingestion/linking pipelines; ensure spans/logs include tenant, source vendor, upstream id, content hash, and trace IDs. | +| CONCELIER-OBS-51-001 `Metrics & SLOs` | TODO | Concelier Core Guild, DevOps Guild | CONCELIER-OBS-50-001, TELEMETRY-OBS-51-001 | Emit metrics for ingest latency (cold/warm), queue depth, aoc violation rate, and publish SLO burn-rate alerts (ingest P95 <30s cold / <5s warm). Ship dashboards + alert configs. | +| CONCELIER-OBS-52-001 `Timeline events` | TODO | Concelier Core Guild | CONCELIER-OBS-50-001, TIMELINE-OBS-52-002 | Emit `timeline_event` records for advisory ingest/normalization/linkset creation with provenance, trace IDs, conflict summaries, and evidence placeholders. | +| CONCELIER-OBS-53-001 `Evidence snapshots` | TODO | Concelier Core Guild, Evidence Locker Guild | CONCELIER-OBS-52-001, EVID-OBS-53-002 | Produce advisory evaluation bundle payloads (raw doc, linkset, normalization diff) for evidence locker; ensure Merkle manifests seeded with content hashes. | +| CONCELIER-OBS-54-001 `Attestation & verification` | TODO | Concelier Core Guild, Provenance Guild | CONCELIER-OBS-53-001, PROV-OBS-54-001 | Attach DSSE attestations for advisory processing batches, expose verification API to confirm bundle integrity, and link attestation IDs back to timeline + ledger. | +| CONCELIER-OBS-55-001 `Incident mode hooks` | TODO | Concelier Core Guild, DevOps Guild | CONCELIER-OBS-51-001, DEVOPS-OBS-55-001 | Increase sampling, capture raw payload snapshots, and extend retention under incident mode; emit activation events + guardrails against PII leak. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-AIRGAP-56-001 `Mirror ingestion adapters` | TODO | Concelier Core Guild | AIRGAP-IMP-57-002, MIRROR-CRT-56-001 | Add mirror source adapters reading advisories from imported bundles, preserving source metadata and bundle IDs. Ensure ingestion remains append-only. | +| CONCELIER-AIRGAP-56-002 `Bundle catalog linking` | TODO | Concelier Core Guild, AirGap Importer Guild | CONCELIER-AIRGAP-56-001, AIRGAP-IMP-57-001 | Persist `bundle_id`, `merkle_root`, and time anchor references on observations/linksets for provenance. | +| CONCELIER-AIRGAP-57-001 `Sealed-mode source restrictions` | TODO | Concelier Core Guild, AirGap Policy Guild | CONCELIER-AIRGAP-56-001, AIRGAP-POL-56-001 | Enforce sealed-mode egress rules by disallowing non-mirror connectors and surfacing remediation errors. | +| CONCELIER-AIRGAP-57-002 `Staleness annotations` | TODO | Concelier Core Guild, AirGap Time Guild | CONCELIER-AIRGAP-56-002, AIRGAP-TIME-58-001 | Compute staleness metadata for advisories per bundle and expose via API for Console/CLI badges. | +| CONCELIER-AIRGAP-58-001 `Portable advisory evidence` | TODO | Concelier Core Guild, Evidence Locker Guild | CONCELIER-OBS-53-001, EVID-OBS-54-001 | Package advisory evidence fragments into portable evidence bundles for cross-domain transfer. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-OAS-61-001 `Spec coverage` | TODO | Concelier Core Guild, API Contracts Guild | OAS-61-001 | Update Concelier OAS with advisory observation/linkset endpoints, standard pagination, and source provenance fields. | +| CONCELIER-OAS-61-002 `Examples library` | TODO | Concelier Core Guild | CONCELIER-OAS-61-001 | Provide rich examples for advisories, linksets, conflict annotations used by SDK + docs. | +| CONCELIER-OAS-62-001 `SDK smoke tests` | TODO | Concelier Core Guild, SDK Generator Guild | CONCELIER-OAS-61-001, SDKGEN-63-001 | Add SDK tests covering advisory search, pagination, and conflict handling; ensure source metadata surfaced. | +| CONCELIER-OAS-63-001 `Deprecation headers` | TODO | Concelier Core Guild, API Governance Guild | APIGOV-63-001 | Implement deprecation header support and timeline events for retiring endpoints. | + +## Risk Profiles (Epic 18) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-RISK-66-001 `CVSS/KEV providers` | TODO | Concelier Core Guild, Risk Engine Guild | RISK-ENGINE-67-001 | Expose CVSS, KEV, fix availability data via provider APIs with source metadata preserved. | +| CONCELIER-RISK-66-002 `Fix availability signals` | TODO | Concelier Core Guild | CONCELIER-RISK-66-001 | Provide structured fix availability and release metadata consumable by risk engine; document provenance. | +| CONCELIER-RISK-67-001 `Source consensus metrics` | TODO | Concelier Core Guild | CONCELIER-RISK-66-001 | Add consensus counts and confidence scores for linked advisories; ensure explainability includes source digests. | +| CONCELIER-RISK-68-001 `Policy Studio integration` | TODO | Concelier Core Guild, Policy Studio Guild | POLICY-RISK-68-001 | Surface advisory fields in Policy Studio profile editor (signal pickers, reducers). | +| CONCELIER-RISK-69-001 `Notification hooks` | TODO | Concelier Core Guild, Notifications Guild | CONCELIER-RISK-66-002 | Emit events when advisory signals change impacting risk scores (e.g., fix available). | + +## Attestor Console (Epic 19) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-ATTEST-73-001 `ScanResults attestation inputs` | TODO | Concelier Core Guild, Attestor Service Guild | ATTEST-TYPES-72-001 | Provide normalized advisory data and linkset digests needed for ScanResults attestations. | +| CONCELIER-ATTEST-73-002 `Transparency metadata` | TODO | Concelier Core Guild | CONCELIER-ATTEST-73-001 | Ensure Conseiller exposes source digests for transparency proofs and explainability. | diff --git a/src/StellaOps.Concelier.Core/Unknown/IUnknownStateLedger.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/IUnknownStateLedger.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Unknown/IUnknownStateLedger.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/IUnknownStateLedger.cs diff --git a/src/StellaOps.Concelier.Core/Unknown/IUnknownStateRepository.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/IUnknownStateRepository.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Unknown/IUnknownStateRepository.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/IUnknownStateRepository.cs diff --git a/src/StellaOps.Concelier.Core/Unknown/UnknownStateLedger.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateLedger.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Unknown/UnknownStateLedger.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateLedger.cs diff --git a/src/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerRequest.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerRequest.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerRequest.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerRequest.cs diff --git a/src/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerResult.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateLedgerResult.cs diff --git a/src/StellaOps.Concelier.Core/Unknown/UnknownStateMarkerKinds.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateMarkerKinds.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Unknown/UnknownStateMarkerKinds.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateMarkerKinds.cs diff --git a/src/StellaOps.Concelier.Core/Unknown/UnknownStateSnapshot.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateSnapshot.cs similarity index 100% rename from src/StellaOps.Concelier.Core/Unknown/UnknownStateSnapshot.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Core/Unknown/UnknownStateSnapshot.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/AGENTS.md diff --git a/src/StellaOps.Concelier.Exporter.Json/ExportDigestCalculator.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/ExportDigestCalculator.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/ExportDigestCalculator.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/ExportDigestCalculator.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/ExporterVersion.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/ExporterVersion.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/ExporterVersion.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/ExporterVersion.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/IJsonExportPathResolver.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/IJsonExportPathResolver.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/IJsonExportPathResolver.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/IJsonExportPathResolver.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonExportFile.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportFile.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonExportFile.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportFile.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonExportJob.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportJob.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonExportJob.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportJob.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonExportManifestWriter.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportManifestWriter.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonExportManifestWriter.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportManifestWriter.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonExportOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonExportOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportOptions.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonExportResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportResult.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonExportResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportResult.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonExportSnapshotBuilder.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportSnapshotBuilder.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonExportSnapshotBuilder.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExportSnapshotBuilder.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonExporterDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExporterDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonExporterDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExporterDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonExporterPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExporterPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonExporterPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonExporterPlugin.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonFeedExporter.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonFeedExporter.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonFeedExporter.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonFeedExporter.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/JsonMirrorBundleWriter.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonMirrorBundleWriter.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/JsonMirrorBundleWriter.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/JsonMirrorBundleWriter.cs diff --git a/src/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj similarity index 77% rename from src/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj index f8210e66..49cb8991 100644 --- a/src/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj @@ -11,9 +11,9 @@ <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> <ProjectReference Include="..\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj" /> <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> </ItemGroup> <ItemGroup> <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> @@ -21,4 +21,4 @@ <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Exporter.Json/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/TASKS.md similarity index 83% rename from src/StellaOps.Concelier.Exporter.Json/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/TASKS.md index e52740fa..f0417dca 100644 --- a/src/StellaOps.Concelier.Exporter.Json/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/TASKS.md @@ -1,13 +1,13 @@ -# TASKS -| Task | Owner(s) | Depends on | Notes | -|---|---|---|---| -|Directory layout strategy (vuln-list mirror)|BE-Export|Models|DONE – `VulnListJsonExportPathResolver` maps CVE, GHSA, distro, and vendor identifiers into vuln-list style paths.| -|Deterministic serializer|BE-Export|Models|DONE – Canonical serializer + snapshot builder emit stable JSON across runs.| -|ExportState read/write|BE-Export|Storage.Mongo|DONE – `JsonFeedExporter` reads prior state, stores digests/cursors, and skips unchanged exports.| -|JsonExportJob wiring|BE-Export|Core|DONE – Job scheduler options now configurable via DI; JSON job registered with scheduler.| -|Snapshot tests for file tree|QA|Exporters|DONE – Added resolver/exporter tests asserting tree layout and deterministic behavior.| -|Parity smoke vs upstream vuln-list|QA|Exporters|DONE – `JsonExporterParitySmokeTests` covers common ecosystems against vuln-list layout.| +# TASKS +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| +|Directory layout strategy (vuln-list mirror)|BE-Export|Models|DONE – `VulnListJsonExportPathResolver` maps CVE, GHSA, distro, and vendor identifiers into vuln-list style paths.| +|Deterministic serializer|BE-Export|Models|DONE – Canonical serializer + snapshot builder emit stable JSON across runs.| +|ExportState read/write|BE-Export|Storage.Mongo|DONE – `JsonFeedExporter` reads prior state, stores digests/cursors, and skips unchanged exports.| +|JsonExportJob wiring|BE-Export|Core|DONE – Job scheduler options now configurable via DI; JSON job registered with scheduler.| +|Snapshot tests for file tree|QA|Exporters|DONE – Added resolver/exporter tests asserting tree layout and deterministic behavior.| +|Parity smoke vs upstream vuln-list|QA|Exporters|DONE – `JsonExporterParitySmokeTests` covers common ecosystems against vuln-list layout.| |Stream advisories during export|BE-Export|Storage.Mongo|DONE – exporter + streaming-only test ensures single enumeration and per-file digest capture.| |Emit export manifest with digest metadata|BE-Export|Exporters|DONE – manifest now includes per-file digests/sizes alongside tree digest.| -|Surface new advisory fields (description/CWEs/canonical metric)|BE-Export|Models, Core|DONE (2025-10-15) – JSON exporter validated with new fixtures ensuring description/CWEs/canonical metric are preserved in outputs; `dotnet test src/StellaOps.Concelier.Exporter.Json.Tests` run 2025-10-15 for regression coverage.| -|CONCELIER-EXPORT-08-201 – Mirror bundle + domain manifest|Team Concelier Export|FEEDCORE-ENGINE-07-001|DONE (2025-10-19) – Mirror bundle writer emits domain aggregates + manifests with cosign-compatible JWS signatures; index/tests updated via `dotnet test src/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj` (2025-10-19).| +|Surface new advisory fields (description/CWEs/canonical metric)|BE-Export|Models, Core|DONE (2025-10-15) – JSON exporter validated with new fixtures ensuring description/CWEs/canonical metric are preserved in outputs; `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.Json.Tests` run 2025-10-15 for regression coverage.| +|CONCELIER-EXPORT-08-201 – Mirror bundle + domain manifest|Team Concelier Export|FEEDCORE-ENGINE-07-001|DONE (2025-10-19) – Mirror bundle writer emits domain aggregates + manifests with cosign-compatible JWS signatures; index/tests updated via `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj` (2025-10-19).| diff --git a/src/StellaOps.Concelier.Exporter.Json/VulnListJsonExportPathResolver.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/VulnListJsonExportPathResolver.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json/VulnListJsonExportPathResolver.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.Json/VulnListJsonExportPathResolver.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/AGENTS.md diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbBuilder.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbBuilder.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbBuilder.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbBuilder.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbOrasPusher.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbOrasPusher.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbOrasPusher.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/ITrivyDbOrasPusher.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/OciDescriptor.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/OciDescriptor.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/OciDescriptor.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/OciDescriptor.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/OciIndex.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/OciIndex.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/OciIndex.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/OciIndex.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/OciManifest.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/OciManifest.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/OciManifest.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/OciManifest.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj similarity index 81% rename from src/StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj index a12b1a84..419062d1 100644 --- a/src/StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj @@ -11,12 +11,12 @@ <ProjectReference Include="..\StellaOps.Concelier.Exporter.Json\StellaOps.Concelier.Exporter.Json.csproj" /> <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> </ItemGroup> <ItemGroup> <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md similarity index 87% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md index e7ee2ed0..6b343ed0 100644 --- a/src/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TASKS.md @@ -11,5 +11,5 @@ |ExportState persistence & idempotence|BE-Export|Storage.Mongo|DONE – baseline resets wired into `ExportStateManager`, planner signals resets after delta runs, and exporters update state w/ repository-aware baseline rotation + tests.| |Streamed package building to avoid large copies|BE-Export|Exporters|DONE – metadata/config now reuse backing arrays and OCI writer streams directly without double buffering.| |Plan incremental/delta exports|BE-Export|Exporters|DONE – state captures per-file manifests, planner schedules delta vs full resets, layer reuse smoke test verifies OCI reuse, and operator guide documents the validation flow.| -|Advisory schema parity export (description/CWEs/canonical metric)|BE-Export|Models, Core|DONE (2025-10-15) – exporter/test fixtures updated to handle description/CWEs/canonical metric fields during Trivy DB packaging; `dotnet test src/StellaOps.Concelier.Exporter.TrivyDb.Tests` re-run 2025-10-15 to confirm coverage.| -|CONCELIER-EXPORT-08-202 – Mirror-ready Trivy DB bundles|Team Concelier Export|CONCELIER-EXPORT-08-201|**DONE (2025-10-19)** – Added mirror export options and writer emitting `mirror/index.json` plus per-domain `manifest.json`/`metadata.json`/`db.tar.gz` with deterministic SHA-256 digests; regression covered via `dotnet test src/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj`.| +|Advisory schema parity export (description/CWEs/canonical metric)|BE-Export|Models, Core|DONE (2025-10-15) – exporter/test fixtures updated to handle description/CWEs/canonical metric fields during Trivy DB packaging; `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.TrivyDb.Tests` re-run 2025-10-15 to confirm coverage.| +|CONCELIER-EXPORT-08-202 – Mirror-ready Trivy DB bundles|Team Concelier Export|CONCELIER-EXPORT-08-201|**DONE (2025-10-19)** – Added mirror export options and writer emitting `mirror/index.json` plus per-domain `manifest.json`/`metadata.json`/`db.tar.gz` with deterministic SHA-256 digests; regression covered via `dotnet test src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj`.| diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyConfigDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyConfigDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyConfigDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyConfigDocument.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBlob.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBlob.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBlob.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBlob.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBoltBuilder.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBoltBuilder.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBoltBuilder.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBoltBuilder.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBuilderResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBuilderResult.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBuilderResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbBuilderResult.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportJob.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportJob.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportJob.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportJob.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportMode.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportMode.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportMode.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportMode.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOptions.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOverrides.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOverrides.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOverrides.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOverrides.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlan.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlan.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlan.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlan.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlanner.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlanner.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlanner.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportPlanner.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterDependencyInjectionRoutine.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterDependencyInjectionRoutine.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterDependencyInjectionRoutine.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterPlugin.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterPlugin.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterPlugin.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExporterPlugin.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbFeedExporter.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbFeedExporter.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbFeedExporter.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbFeedExporter.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMediaTypes.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMediaTypes.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMediaTypes.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMediaTypes.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMirrorBundleWriter.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMirrorBundleWriter.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMirrorBundleWriter.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbMirrorBundleWriter.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriteResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriteResult.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriteResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriteResult.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriter.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriter.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriter.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOciWriter.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOrasPusher.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOrasPusher.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOrasPusher.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbOrasPusher.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackage.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackage.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackage.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackage.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageBuilder.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageBuilder.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageBuilder.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageBuilder.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageRequest.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageRequest.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageRequest.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbPackageRequest.cs diff --git a/src/StellaOps.Concelier.Merge/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Merge/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/AGENTS.md diff --git a/src/StellaOps.Concelier.Merge/Class1.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Class1.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Class1.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Class1.cs diff --git a/src/StellaOps.Concelier.Merge/Comparers/DebianEvr.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Comparers/DebianEvr.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Comparers/DebianEvr.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Comparers/DebianEvr.cs diff --git a/src/StellaOps.Concelier.Merge/Comparers/Nevra.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Comparers/Nevra.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Comparers/Nevra.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Comparers/Nevra.cs diff --git a/src/StellaOps.Concelier.Merge/Comparers/SemanticVersionRangeResolver.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Comparers/SemanticVersionRangeResolver.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Comparers/SemanticVersionRangeResolver.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Comparers/SemanticVersionRangeResolver.cs diff --git a/src/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityCluster.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityCluster.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityCluster.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityCluster.cs diff --git a/src/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityResolver.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityResolver.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityResolver.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Identity/AdvisoryIdentityResolver.cs diff --git a/src/StellaOps.Concelier.Merge/Identity/AliasIdentity.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Identity/AliasIdentity.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Identity/AliasIdentity.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Identity/AliasIdentity.cs diff --git a/src/StellaOps.Concelier.Merge/Jobs/MergeJobKinds.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Jobs/MergeJobKinds.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Jobs/MergeJobKinds.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Jobs/MergeJobKinds.cs diff --git a/src/StellaOps.Concelier.Merge/Jobs/MergeReconcileJob.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Jobs/MergeReconcileJob.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Jobs/MergeReconcileJob.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Jobs/MergeReconcileJob.cs diff --git a/src/StellaOps.Concelier.Merge/MergeServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/MergeServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/MergeServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/MergeServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceDefaults.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceDefaults.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceDefaults.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceDefaults.cs diff --git a/src/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceOptions.cs diff --git a/src/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceTable.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceTable.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceTable.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Options/AdvisoryPrecedenceTable.cs diff --git a/src/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md similarity index 98% rename from src/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md index 1e26c7ea..210d03b4 100644 --- a/src/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/RANGE_PRIMITIVES_COORDINATION.md @@ -1,97 +1,97 @@ -# Range Primitive Coordination (Sprint 2) - -_Status date: 2025-10-20_ - -## Why this exists -- SemVer range outputs must follow the embedded rule guidance in `../FASTER_MODELING_AND_NORMALIZATION.md` (array of `{scheme,type,min/max/value,notes}`). -- Merge will rely on normalized rules plus existing `RangePrimitives` (SemVer/NEVRA/EVR) to dedupe ranges and compute deterministic hashes. -- Connector teams are mid-flight; this playbook restarts coordination so every feed delivers the normalized payload needed by the conflict resolver work in Sprint 3. - -## Upstream dependencies -- **Models** (`FEEDMODELS-SCHEMA-01-003`, `FEEDMODELS-SCHEMA-02-900`) – extends `RangePrimitives.SemVer` metadata and introduces `NormalizedVersionRule` arrays on affected packages. -- **Normalization** (`FEEDNORM-NORM-02-001`) – provides `SemVerRangeRuleBuilder` used by OSS connectors (GHSA/OSV/NVD) to emit canonical rule docs plus provenance notes. -- **Storage.Mongo** (`FEEDSTORAGE-DATA-02-001`) – dual-write/dual-read modifications for the new arrays; required before staging rollout. -- **Merge** (`FEEDMERGE-ENGINE-02-002`) – unions/dedupes normalized rules across sources once connectors publish them. - -Until these blocks land, connectors should stage changes behind a feature flag or fixture branch so we can flip on normalized writes in sync. - -## Connector adoption matrix -| Connector | Owner team | Current state (2025-10-20) | Required actions for normalized rules | Coordination notes | -|-----------|------------|----------------------------|--------------------------------------|--------------------| -| Acsc | BE-Conn-ACSC | ❌ Not started – mapper emits legacy range strings only | Stage `SemVerRangeRuleBuilder` integration once relay HTTP/2 fixes stabilise; target kickoff 2025-10-24. | Pair with Merge on sample payloads; ensure fixtures capture vendor/device taxonomy for provenance notes. | -| Cccs | BE-Conn-CCCS | ⚠️ DOING – helper branch under review (due 2025-10-21) | Wire trailing-version split helper, emit `NormalizedVersions` with `cccs:{serial}:{index}` notes, refresh fixtures/tests. | Share MR link before 2025-10-21 stand-up; Merge to validate counters once fixtures land. | -| CertBund | BE-Conn-CERTBUND | ⚠️ In progress – localisation work pending (due 2025-10-22) | Translate `product.Versions` phrases (`bis`, `alle`) into builder inputs; emit provenance `certbund:{advisoryId}:{vendor}`; update README/tests. | Localization WG drafting deterministic casing guidance; expect sample payloads 2025-10-21. | -| CertCc | BE-Conn-CERTCC | ✅ Complete – emitting `certcc.vendor` rules since 2025-10-12 | Keep builder contract stable; bubble any VINCE payload changes. | Merge verified counters drop on 2025-10-19 run; no follow-up. | -| Cve | BE-Conn-CVE | ✅ Complete – SemVer rules emitted 2025-10-12 | Maintain provenance notes (`cve:{cveId}:{identifier}`) and extend fixtures as schema grows. | Latest nightly confirms normalized counters at expected baseline. | -| Ghsa | BE-Conn-GHSA | ✅ Complete – normalized rollout live 2025-10-11 | Monitor schema diffs; keep fixtures synced with GHSA provenance notes. | Coordinate with OSV on shared ecosystems; no open issues. | -| Osv | BE-Conn-OSV | ✅ Complete – normalized rules shipping 2025-10-11 | Track new ecosystems; ensure notes stay aligned with `osv:{ecosystem}:{advisoryId}:{identifier}`. | Merge analytics watching npm/PyPI parity; no action needed. | -| Nvd | BE-Conn-NVD | ✅ Complete – normalized SemVer output live 2025-10-11 | Maintain CVE-aligned provenance; monitor MR toggles if schema shifts. | Next check: confirm export parity once storage migration flips on 2025-10-23. | -| Kev | BE-Conn-KEV | ✅ Complete – catalog/due-date rules emitted 2025-10-12 | Keep schedule metadata synced with CISA feed. | Acts as flag-only enrich; no additional merge work required. | -| Ics.Cisa | BE-Conn-ICS-CISA | ⚠️ Pending decision (due 2025-10-23) | Promote existing SemVer primitives into normalized rules; open Models ticket if firmware requires new scheme. | Provide sample advisories to Merge by 2025-10-22 for schema review. | -| Kisa | BE-Conn-KISA | ⚠️ Proposal drafting (due 2025-10-24) | Finalise `kisa.build` (or alternate) scheme with Models, then emit normalized rules and update localisation notes/tests. | Localization WG prepping translation samples; Merge to review scheme request immediately. | -| Ru.Bdu | BE-Conn-BDU | ✅ Complete – emitting `ru-bdu.raw` rules since 2025-10-14 | Monitor UTF-8 sanitisation; keep provenance notes aligned with advisory ids. | Storage snapshot verified 2025-10-19; counters green. | -| Ru.Nkcki | BE-Conn-Nkcki | ✅ Complete – SemVer + normalized rules live 2025-10-13 | Maintain Cyrillic provenance fields and SemVer coverage. | Localization WG confirmed transliteration guidance; no open items. | -| Vndr.Apple | BE-Conn-Apple | ✅ Complete – `apple.build` SemVer rules live 2025-10-11 | Keep fixtures covering multi-range tables; notify Merge of schema evolutions. | Prepare follow-up for macOS/iOS beta channels by 2025-10-26. | -| Vndr.Cisco | BE-Conn-Cisco | ⚠️ DOING – normalized promotion branch open (due 2025-10-21) | Use helper to convert SemVer primitives into rule arrays with `cisco:{productId}` notes; refresh tests. | OAuth throttling validated; Merge to rerun counters post-merge. | -| Vndr.Msrc | BE-Conn-MSRC | ✅ Complete – `msrc.build` rules live 2025-10-15 | Monitor monthly rollup coverage and provenance notes. | Merge verified rule ingestion 2025-10-19; no outstanding actions. | - -## Storage alignment quick reference (2025-10-11) -- `NormalizedVersionDocumentFactory` copies each `NormalizedVersionRule` into Mongo with the shape `{ packageId, packageType, scheme, type, style, min, minInclusive, max, maxInclusive, value, notes, decisionReason, constraint, source, recordedAt }`. `style` is currently a direct echo of `type` but reserved for future vendor comparers—no connector action required. -- `constraint` is hydrated only when `NormalizedVersionRule` matches a legacy `VersionRange` primitive. Preserve `notes` (e.g., `nvd:cve-2025-1234`) so storage can join rules back to their provenance and carry decision reasoning. -- Valid `scheme` values today are `semver`, `nevra`, and `evr`. Raise a Models ticket before introducing additional scheme identifiers (e.g., `apple.build`, `ios.semver`). -- Prefer normalized `type` tokens from `NormalizedVersionRuleTypes` (`range`, `exact`, `lt`, `lte`, `gt`, `gte`). Builders already coerce casing/format—avoid custom strings. -- Ensure `AffectedPackage.Identifier`/`Type` and `Provenance` collections are populated; storage falls back to package-level provenance if range-level data is absent, but loses traceability if both are empty. -- Snapshot of an emitted document (SemVer range) for reference: - ```json - { - "packageId": "pkg:npm/example", - "packageType": "npm", - "scheme": "semver", - "type": "range", - "style": "range", - "min": "1.2.3", - "minInclusive": true, - "max": "2.0.0", - "maxInclusive": false, - "value": null, - "notes": "ghsa:GHSA-xxxx-yyyy", - "decisionReason": "ghsa-precedence-over-nvd", - "constraint": ">= 1.2.3 < 2.0.0", - "source": "ghsa", - "recordedAt": "2025-10-11T00:00:00Z" - } - ``` -- For distro sources emitting NEVRA/EVR primitives, expect the same envelope with `scheme` swapped accordingly. Example (`nevra`): - ```json - { - "packageId": "bash", - "packageType": "rpm", - "scheme": "nevra", - "type": "range", - "style": "range", - "min": "0:4.4.18-2.el7", - "minInclusive": true, - "max": "0:4.4.20-1.el7", - "maxInclusive": false, - "value": null, - "notes": "redhat:RHSA-2025:1234", - "decisionReason": "rhel-priority-over-nvd", - "constraint": "<= 0:4.4.20-1.el7", - "source": "redhat", - "recordedAt": "2025-10-11T00:00:00Z" - } - ``` - -## Immediate next steps -- **2025-10-21** – Cccs and Cisco teams to merge normalized-rule branches, regenerate fixtures, and post counter screenshots. -- **2025-10-22** – CertBund translator review with Localization WG; confirm localisation glossary + deterministic casing before merge. -- **2025-10-23** – ICS-CISA to confirm SemVer vs firmware scheme; escalate Models ticket if new scheme required. -- **2025-10-24** – KISA firmware scheme proposal due; Merge to review immediately and unblock builder integration. -- **2025-10-25** – Merge cross-connector review to validate counters, provenance notes, and storage projections before flipping default union logic. - -## Tracking & follow-up -- Track due dates above; if a connector slips past its deadline, flag in `#concelier-merge` stand-up and open a blocker ticket referencing FEEDMERGE-COORD-02-900. -- Capture connector progress updates in stand-ups twice per week; link PRs/issues back to this document and the rollout dashboard (`docs/dev/normalized_versions_rollout.md`). -- Monitor merge counters `concelier.merge.normalized_rules` and `concelier.merge.normalized_rules_missing` to spot advisories that still lack normalized arrays after precedence merge. -- When a connector is ready to emit normalized rules, update its module `TASKS.md` status and ping Merge in `#concelier-merge` with fixture diff screenshots. -- If new schemes or comparer logic is required (e.g., Cisco IOS), open a Models issue referencing `FEEDMODELS-SCHEMA-02-900` before implementing. +# Range Primitive Coordination (Sprint 2) + +_Status date: 2025-10-20_ + +## Why this exists +- SemVer range outputs must follow the embedded rule guidance in `../FASTER_MODELING_AND_NORMALIZATION.md` (array of `{scheme,type,min/max/value,notes}`). +- Merge will rely on normalized rules plus existing `RangePrimitives` (SemVer/NEVRA/EVR) to dedupe ranges and compute deterministic hashes. +- Connector teams are mid-flight; this playbook restarts coordination so every feed delivers the normalized payload needed by the conflict resolver work in Sprint 3. + +## Upstream dependencies +- **Models** (`FEEDMODELS-SCHEMA-01-003`, `FEEDMODELS-SCHEMA-02-900`) – extends `RangePrimitives.SemVer` metadata and introduces `NormalizedVersionRule` arrays on affected packages. +- **Normalization** (`FEEDNORM-NORM-02-001`) – provides `SemVerRangeRuleBuilder` used by OSS connectors (GHSA/OSV/NVD) to emit canonical rule docs plus provenance notes. +- **Storage.Mongo** (`FEEDSTORAGE-DATA-02-001`) – dual-write/dual-read modifications for the new arrays; required before staging rollout. +- **Merge** (`FEEDMERGE-ENGINE-02-002`) – unions/dedupes normalized rules across sources once connectors publish them. + +Until these blocks land, connectors should stage changes behind a feature flag or fixture branch so we can flip on normalized writes in sync. + +## Connector adoption matrix +| Connector | Owner team | Current state (2025-10-20) | Required actions for normalized rules | Coordination notes | +|-----------|------------|----------------------------|--------------------------------------|--------------------| +| Acsc | BE-Conn-ACSC | ❌ Not started – mapper emits legacy range strings only | Stage `SemVerRangeRuleBuilder` integration once relay HTTP/2 fixes stabilise; target kickoff 2025-10-24. | Pair with Merge on sample payloads; ensure fixtures capture vendor/device taxonomy for provenance notes. | +| Cccs | BE-Conn-CCCS | ⚠️ DOING – helper branch under review (due 2025-10-21) | Wire trailing-version split helper, emit `NormalizedVersions` with `cccs:{serial}:{index}` notes, refresh fixtures/tests. | Share MR link before 2025-10-21 stand-up; Merge to validate counters once fixtures land. | +| CertBund | BE-Conn-CERTBUND | ⚠️ In progress – localisation work pending (due 2025-10-22) | Translate `product.Versions` phrases (`bis`, `alle`) into builder inputs; emit provenance `certbund:{advisoryId}:{vendor}`; update README/tests. | Localization WG drafting deterministic casing guidance; expect sample payloads 2025-10-21. | +| CertCc | BE-Conn-CERTCC | ✅ Complete – emitting `certcc.vendor` rules since 2025-10-12 | Keep builder contract stable; bubble any VINCE payload changes. | Merge verified counters drop on 2025-10-19 run; no follow-up. | +| Cve | BE-Conn-CVE | ✅ Complete – SemVer rules emitted 2025-10-12 | Maintain provenance notes (`cve:{cveId}:{identifier}`) and extend fixtures as schema grows. | Latest nightly confirms normalized counters at expected baseline. | +| Ghsa | BE-Conn-GHSA | ✅ Complete – normalized rollout live 2025-10-11 | Monitor schema diffs; keep fixtures synced with GHSA provenance notes. | Coordinate with OSV on shared ecosystems; no open issues. | +| Osv | BE-Conn-OSV | ✅ Complete – normalized rules shipping 2025-10-11 | Track new ecosystems; ensure notes stay aligned with `osv:{ecosystem}:{advisoryId}:{identifier}`. | Merge analytics watching npm/PyPI parity; no action needed. | +| Nvd | BE-Conn-NVD | ✅ Complete – normalized SemVer output live 2025-10-11 | Maintain CVE-aligned provenance; monitor MR toggles if schema shifts. | Next check: confirm export parity once storage migration flips on 2025-10-23. | +| Kev | BE-Conn-KEV | ✅ Complete – catalog/due-date rules emitted 2025-10-12 | Keep schedule metadata synced with CISA feed. | Acts as flag-only enrich; no additional merge work required. | +| Ics.Cisa | BE-Conn-ICS-CISA | ⚠️ Pending decision (due 2025-10-23) | Promote existing SemVer primitives into normalized rules; open Models ticket if firmware requires new scheme. | Provide sample advisories to Merge by 2025-10-22 for schema review. | +| Kisa | BE-Conn-KISA | ⚠️ Proposal drafting (due 2025-10-24) | Finalise `kisa.build` (or alternate) scheme with Models, then emit normalized rules and update localisation notes/tests. | Localization WG prepping translation samples; Merge to review scheme request immediately. | +| Ru.Bdu | BE-Conn-BDU | ✅ Complete – emitting `ru-bdu.raw` rules since 2025-10-14 | Monitor UTF-8 sanitisation; keep provenance notes aligned with advisory ids. | Storage snapshot verified 2025-10-19; counters green. | +| Ru.Nkcki | BE-Conn-Nkcki | ✅ Complete – SemVer + normalized rules live 2025-10-13 | Maintain Cyrillic provenance fields and SemVer coverage. | Localization WG confirmed transliteration guidance; no open items. | +| Vndr.Apple | BE-Conn-Apple | ✅ Complete – `apple.build` SemVer rules live 2025-10-11 | Keep fixtures covering multi-range tables; notify Merge of schema evolutions. | Prepare follow-up for macOS/iOS beta channels by 2025-10-26. | +| Vndr.Cisco | BE-Conn-Cisco | ⚠️ DOING – normalized promotion branch open (due 2025-10-21) | Use helper to convert SemVer primitives into rule arrays with `cisco:{productId}` notes; refresh tests. | OAuth throttling validated; Merge to rerun counters post-merge. | +| Vndr.Msrc | BE-Conn-MSRC | ✅ Complete – `msrc.build` rules live 2025-10-15 | Monitor monthly rollup coverage and provenance notes. | Merge verified rule ingestion 2025-10-19; no outstanding actions. | + +## Storage alignment quick reference (2025-10-11) +- `NormalizedVersionDocumentFactory` copies each `NormalizedVersionRule` into Mongo with the shape `{ packageId, packageType, scheme, type, style, min, minInclusive, max, maxInclusive, value, notes, decisionReason, constraint, source, recordedAt }`. `style` is currently a direct echo of `type` but reserved for future vendor comparers—no connector action required. +- `constraint` is hydrated only when `NormalizedVersionRule` matches a legacy `VersionRange` primitive. Preserve `notes` (e.g., `nvd:cve-2025-1234`) so storage can join rules back to their provenance and carry decision reasoning. +- Valid `scheme` values today are `semver`, `nevra`, and `evr`. Raise a Models ticket before introducing additional scheme identifiers (e.g., `apple.build`, `ios.semver`). +- Prefer normalized `type` tokens from `NormalizedVersionRuleTypes` (`range`, `exact`, `lt`, `lte`, `gt`, `gte`). Builders already coerce casing/format—avoid custom strings. +- Ensure `AffectedPackage.Identifier`/`Type` and `Provenance` collections are populated; storage falls back to package-level provenance if range-level data is absent, but loses traceability if both are empty. +- Snapshot of an emitted document (SemVer range) for reference: + ```json + { + "packageId": "pkg:npm/example", + "packageType": "npm", + "scheme": "semver", + "type": "range", + "style": "range", + "min": "1.2.3", + "minInclusive": true, + "max": "2.0.0", + "maxInclusive": false, + "value": null, + "notes": "ghsa:GHSA-xxxx-yyyy", + "decisionReason": "ghsa-precedence-over-nvd", + "constraint": ">= 1.2.3 < 2.0.0", + "source": "ghsa", + "recordedAt": "2025-10-11T00:00:00Z" + } + ``` +- For distro sources emitting NEVRA/EVR primitives, expect the same envelope with `scheme` swapped accordingly. Example (`nevra`): + ```json + { + "packageId": "bash", + "packageType": "rpm", + "scheme": "nevra", + "type": "range", + "style": "range", + "min": "0:4.4.18-2.el7", + "minInclusive": true, + "max": "0:4.4.20-1.el7", + "maxInclusive": false, + "value": null, + "notes": "redhat:RHSA-2025:1234", + "decisionReason": "rhel-priority-over-nvd", + "constraint": "<= 0:4.4.20-1.el7", + "source": "redhat", + "recordedAt": "2025-10-11T00:00:00Z" + } + ``` + +## Immediate next steps +- **2025-10-21** – Cccs and Cisco teams to merge normalized-rule branches, regenerate fixtures, and post counter screenshots. +- **2025-10-22** – CertBund translator review with Localization WG; confirm localisation glossary + deterministic casing before merge. +- **2025-10-23** – ICS-CISA to confirm SemVer vs firmware scheme; escalate Models ticket if new scheme required. +- **2025-10-24** – KISA firmware scheme proposal due; Merge to review immediately and unblock builder integration. +- **2025-10-25** – Merge cross-connector review to validate counters, provenance notes, and storage projections before flipping default union logic. + +## Tracking & follow-up +- Track due dates above; if a connector slips past its deadline, flag in `#concelier-merge` stand-up and open a blocker ticket referencing FEEDMERGE-COORD-02-900. +- Capture connector progress updates in stand-ups twice per week; link PRs/issues back to this document and the rollout dashboard (`docs/dev/normalized_versions_rollout.md`). +- Monitor merge counters `concelier.merge.normalized_rules` and `concelier.merge.normalized_rules_missing` to spot advisories that still lack normalized arrays after precedence merge. +- When a connector is ready to emit normalized rules, update its module `TASKS.md` status and ping Merge in `#concelier-merge` with fixture diff screenshots. +- If new schemes or comparer logic is required (e.g., Cisco IOS), open a Models issue referencing `FEEDMODELS-SCHEMA-02-900` before implementing. diff --git a/src/StellaOps.Concelier.Merge/Services/AdvisoryMergeService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AdvisoryMergeService.cs similarity index 97% rename from src/StellaOps.Concelier.Merge/Services/AdvisoryMergeService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AdvisoryMergeService.cs index 98f23499..f697b083 100644 --- a/src/StellaOps.Concelier.Merge/Services/AdvisoryMergeService.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AdvisoryMergeService.cs @@ -1,439 +1,439 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Diagnostics.Metrics; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StellaOps.Concelier.Core; -using StellaOps.Concelier.Core.Events; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.Storage.Mongo.Advisories; -using StellaOps.Concelier.Storage.Mongo.Aliases; -using StellaOps.Concelier.Storage.Mongo.MergeEvents; -using System.Text.Json; - -namespace StellaOps.Concelier.Merge.Services; - -public sealed class AdvisoryMergeService -{ - private static readonly Meter MergeMeter = new("StellaOps.Concelier.Merge"); - private static readonly Counter<long> AliasCollisionCounter = MergeMeter.CreateCounter<long>( - "concelier.merge.identity_conflicts", - unit: "count", - description: "Number of alias collisions detected during merge."); - - private static readonly string[] PreferredAliasSchemes = - { - AliasSchemes.Cve, - AliasSchemes.Ghsa, - AliasSchemes.OsV, - AliasSchemes.Msrc, - }; - - private readonly AliasGraphResolver _aliasResolver; - private readonly IAdvisoryStore _advisoryStore; - private readonly AdvisoryPrecedenceMerger _precedenceMerger; - private readonly MergeEventWriter _mergeEventWriter; - private readonly IAdvisoryEventLog _eventLog; - private readonly TimeProvider _timeProvider; - private readonly CanonicalMerger _canonicalMerger; - private readonly ILogger<AdvisoryMergeService> _logger; - - public AdvisoryMergeService( - AliasGraphResolver aliasResolver, - IAdvisoryStore advisoryStore, - AdvisoryPrecedenceMerger precedenceMerger, - MergeEventWriter mergeEventWriter, - CanonicalMerger canonicalMerger, - IAdvisoryEventLog eventLog, - TimeProvider timeProvider, - ILogger<AdvisoryMergeService> logger) - { - _aliasResolver = aliasResolver ?? throw new ArgumentNullException(nameof(aliasResolver)); - _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); - _precedenceMerger = precedenceMerger ?? throw new ArgumentNullException(nameof(precedenceMerger)); - _mergeEventWriter = mergeEventWriter ?? throw new ArgumentNullException(nameof(mergeEventWriter)); - _canonicalMerger = canonicalMerger ?? throw new ArgumentNullException(nameof(canonicalMerger)); - _eventLog = eventLog ?? throw new ArgumentNullException(nameof(eventLog)); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<AdvisoryMergeResult> MergeAsync(string seedAdvisoryKey, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(seedAdvisoryKey); - - var component = await _aliasResolver.BuildComponentAsync(seedAdvisoryKey, cancellationToken).ConfigureAwait(false); - var inputs = new List<Advisory>(); - - foreach (var advisoryKey in component.AdvisoryKeys) - { - cancellationToken.ThrowIfCancellationRequested(); - var advisory = await _advisoryStore.FindAsync(advisoryKey, cancellationToken).ConfigureAwait(false); - if (advisory is not null) - { - inputs.Add(advisory); - } - } - - if (inputs.Count == 0) - { - _logger.LogWarning("Alias component seeded by {Seed} contains no persisted advisories", seedAdvisoryKey); - return AdvisoryMergeResult.Empty(seedAdvisoryKey, component); - } - - var canonicalKey = SelectCanonicalKey(component) ?? seedAdvisoryKey; - var canonicalMerge = ApplyCanonicalMergeIfNeeded(canonicalKey, inputs); - var before = await _advisoryStore.FindAsync(canonicalKey, cancellationToken).ConfigureAwait(false); - var normalizedInputs = NormalizeInputs(inputs, canonicalKey).ToList(); - - PrecedenceMergeResult precedenceResult; - try - { - precedenceResult = _precedenceMerger.Merge(normalizedInputs); - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to merge alias component seeded by {Seed}", seedAdvisoryKey); - throw; - } - - var merged = precedenceResult.Advisory; - var conflictDetails = precedenceResult.Conflicts; - - if (component.Collisions.Count > 0) - { - foreach (var collision in component.Collisions) - { - var tags = new KeyValuePair<string, object?>[] - { - new("scheme", collision.Scheme ?? string.Empty), - new("alias_value", collision.Value ?? string.Empty), - new("advisory_count", collision.AdvisoryKeys.Count), - }; - - AliasCollisionCounter.Add(1, tags); - - _logger.LogInformation( - "Alias collision {Scheme}:{Value} involves advisories {Advisories}", - collision.Scheme, - collision.Value, - string.Join(", ", collision.AdvisoryKeys)); - } - } - - await _advisoryStore.UpsertAsync(merged, cancellationToken).ConfigureAwait(false); - await _mergeEventWriter.AppendAsync( - canonicalKey, - before, - merged, - Array.Empty<Guid>(), - ConvertFieldDecisions(canonicalMerge?.Decisions), - cancellationToken).ConfigureAwait(false); - - var conflictSummaries = await AppendEventLogAsync(canonicalKey, normalizedInputs, merged, conflictDetails, cancellationToken).ConfigureAwait(false); - - return new AdvisoryMergeResult(seedAdvisoryKey, canonicalKey, component, inputs, before, merged, conflictSummaries); - } - - private async Task<IReadOnlyList<MergeConflictSummary>> AppendEventLogAsync( - string vulnerabilityKey, - IReadOnlyList<Advisory> inputs, - Advisory merged, - IReadOnlyList<MergeConflictDetail> conflicts, - CancellationToken cancellationToken) - { - var recordedAt = _timeProvider.GetUtcNow(); - var statements = new List<AdvisoryStatementInput>(inputs.Count + 1); - var statementIds = new Dictionary<Advisory, Guid>(ReferenceEqualityComparer.Instance); - - foreach (var advisory in inputs) - { - var statementId = Guid.NewGuid(); - statementIds[advisory] = statementId; - statements.Add(new AdvisoryStatementInput( - vulnerabilityKey, - advisory, - DetermineAsOf(advisory, recordedAt), - InputDocumentIds: Array.Empty<Guid>(), - StatementId: statementId, - AdvisoryKey: advisory.AdvisoryKey)); - } - - var canonicalStatementId = Guid.NewGuid(); - statementIds[merged] = canonicalStatementId; - statements.Add(new AdvisoryStatementInput( - vulnerabilityKey, - merged, - recordedAt, - InputDocumentIds: Array.Empty<Guid>(), - StatementId: canonicalStatementId, - AdvisoryKey: merged.AdvisoryKey)); - - var conflictMaterialization = BuildConflictInputs(conflicts, vulnerabilityKey, statementIds, canonicalStatementId, recordedAt); - var conflictInputs = conflictMaterialization.Inputs; - var conflictSummaries = conflictMaterialization.Summaries; - - if (statements.Count == 0 && conflictInputs.Count == 0) - { - return conflictSummaries.Count == 0 - ? Array.Empty<MergeConflictSummary>() - : conflictSummaries.ToArray(); - } - - var request = new AdvisoryEventAppendRequest(statements, conflictInputs.Count > 0 ? conflictInputs : null); - - try - { - await _eventLog.AppendAsync(request, cancellationToken).ConfigureAwait(false); - } - finally - { - foreach (var conflict in conflictInputs) - { - conflict.Details.Dispose(); - } - } - - return conflictSummaries.Count == 0 - ? Array.Empty<MergeConflictSummary>() - : conflictSummaries.ToArray(); - } - - private static DateTimeOffset DetermineAsOf(Advisory advisory, DateTimeOffset fallback) - { - return (advisory.Modified ?? advisory.Published ?? fallback).ToUniversalTime(); - } - - private static ConflictMaterialization BuildConflictInputs( - IReadOnlyList<MergeConflictDetail> conflicts, - string vulnerabilityKey, - IReadOnlyDictionary<Advisory, Guid> statementIds, - Guid canonicalStatementId, - DateTimeOffset recordedAt) - { - if (conflicts.Count == 0) - { - return new ConflictMaterialization(new List<AdvisoryConflictInput>(0), new List<MergeConflictSummary>(0)); - } - - var inputs = new List<AdvisoryConflictInput>(conflicts.Count); - var summaries = new List<MergeConflictSummary>(conflicts.Count); - - foreach (var detail in conflicts) - { - if (!statementIds.TryGetValue(detail.Suppressed, out var suppressedId)) - { - continue; - } - - var related = new List<Guid> { canonicalStatementId, suppressedId }; - if (statementIds.TryGetValue(detail.Primary, out var primaryId)) - { - if (!related.Contains(primaryId)) - { - related.Add(primaryId); - } - } - - var payload = ConflictDetailPayload.FromDetail(detail); - var explainer = payload.ToExplainer(); - - var canonicalJson = explainer.ToCanonicalJson(); - var document = JsonDocument.Parse(canonicalJson); - var asOf = (detail.Primary.Modified ?? detail.Suppressed.Modified ?? recordedAt).ToUniversalTime(); - var conflictId = Guid.NewGuid(); - var statementIdArray = ImmutableArray.CreateRange(related); - var conflictHash = explainer.ComputeHashHex(canonicalJson); - - inputs.Add(new AdvisoryConflictInput( - vulnerabilityKey, - document, - asOf, - related, - ConflictId: conflictId)); - - summaries.Add(new MergeConflictSummary( - conflictId, - vulnerabilityKey, - statementIdArray, - conflictHash, - asOf, - recordedAt, - explainer)); - } - - return new ConflictMaterialization(inputs, summaries); - } - - private static IEnumerable<Advisory> NormalizeInputs(IEnumerable<Advisory> advisories, string canonicalKey) - { - foreach (var advisory in advisories) - { - yield return CloneWithKey(advisory, canonicalKey); - } - } - - private static Advisory CloneWithKey(Advisory source, string advisoryKey) - => new( - advisoryKey, - source.Title, - source.Summary, - source.Language, - source.Published, - source.Modified, - source.Severity, - source.ExploitKnown, - source.Aliases, - source.Credits, - source.References, - source.AffectedPackages, - source.CvssMetrics, - source.Provenance, - source.Description, - source.Cwes, - source.CanonicalMetricId); - - private CanonicalMergeResult? ApplyCanonicalMergeIfNeeded(string canonicalKey, List<Advisory> inputs) - { - if (inputs.Count == 0) - { - return null; - } - - var ghsa = FindBySource(inputs, CanonicalSources.Ghsa); - var nvd = FindBySource(inputs, CanonicalSources.Nvd); - var osv = FindBySource(inputs, CanonicalSources.Osv); - - var participatingSources = 0; - if (ghsa is not null) - { - participatingSources++; - } - - if (nvd is not null) - { - participatingSources++; - } - - if (osv is not null) - { - participatingSources++; - } - - if (participatingSources < 2) - { - return null; - } - - var result = _canonicalMerger.Merge(canonicalKey, ghsa, nvd, osv); - - inputs.RemoveAll(advisory => MatchesCanonicalSource(advisory)); - inputs.Add(result.Advisory); - - return result; - } - - private static Advisory? FindBySource(IEnumerable<Advisory> advisories, string source) - => advisories.FirstOrDefault(advisory => advisory.Provenance.Any(provenance => - !string.Equals(provenance.Kind, "merge", StringComparison.OrdinalIgnoreCase) && - string.Equals(provenance.Source, source, StringComparison.OrdinalIgnoreCase))); - - private static bool MatchesCanonicalSource(Advisory advisory) - { - foreach (var provenance in advisory.Provenance) - { - if (string.Equals(provenance.Kind, "merge", StringComparison.OrdinalIgnoreCase)) - { - continue; - } - - if (string.Equals(provenance.Source, CanonicalSources.Ghsa, StringComparison.OrdinalIgnoreCase) || - string.Equals(provenance.Source, CanonicalSources.Nvd, StringComparison.OrdinalIgnoreCase) || - string.Equals(provenance.Source, CanonicalSources.Osv, StringComparison.OrdinalIgnoreCase)) - { - return true; - } - } - - return false; - } - - private static IReadOnlyList<MergeFieldDecision> ConvertFieldDecisions(ImmutableArray<FieldDecision>? decisions) - { - if (decisions is null || decisions.Value.IsDefaultOrEmpty) - { - return Array.Empty<MergeFieldDecision>(); - } - - var builder = ImmutableArray.CreateBuilder<MergeFieldDecision>(decisions.Value.Length); - foreach (var decision in decisions.Value) - { - builder.Add(new MergeFieldDecision( - decision.Field, - decision.SelectedSource, - decision.DecisionReason, - decision.SelectedModified, - decision.ConsideredSources.ToArray())); - } - - return builder.ToImmutable(); - } - - private static class CanonicalSources - { - public const string Ghsa = "ghsa"; - public const string Nvd = "nvd"; - public const string Osv = "osv"; - } - - private sealed record ConflictMaterialization( - List<AdvisoryConflictInput> Inputs, - List<MergeConflictSummary> Summaries); - - private static string? SelectCanonicalKey(AliasComponent component) - { - foreach (var scheme in PreferredAliasSchemes) - { - var alias = component.AliasMap.Values - .SelectMany(static aliases => aliases) - .FirstOrDefault(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase)); - if (!string.IsNullOrWhiteSpace(alias?.Value)) - { - return alias.Value; - } - } - - if (component.AliasMap.TryGetValue(component.SeedAdvisoryKey, out var seedAliases)) - { - var primary = seedAliases.FirstOrDefault(record => string.Equals(record.Scheme, AliasStoreConstants.PrimaryScheme, StringComparison.OrdinalIgnoreCase)); - if (!string.IsNullOrWhiteSpace(primary?.Value)) - { - return primary.Value; - } - } - - var firstAlias = component.AliasMap.Values.SelectMany(static aliases => aliases).FirstOrDefault(); - if (!string.IsNullOrWhiteSpace(firstAlias?.Value)) - { - return firstAlias.Value; - } - - return component.SeedAdvisoryKey; - } -} - -public sealed record AdvisoryMergeResult( - string SeedAdvisoryKey, - string CanonicalAdvisoryKey, - AliasComponent Component, - IReadOnlyList<Advisory> Inputs, - Advisory? Previous, - Advisory? Merged, - IReadOnlyList<MergeConflictSummary> Conflicts) -{ - public static AdvisoryMergeResult Empty(string seed, AliasComponent component) - => new(seed, seed, component, Array.Empty<Advisory>(), null, null, Array.Empty<MergeConflictSummary>()); -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Diagnostics.Metrics; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Concelier.Core; +using StellaOps.Concelier.Core.Events; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.Storage.Mongo.Advisories; +using StellaOps.Concelier.Storage.Mongo.Aliases; +using StellaOps.Concelier.Storage.Mongo.MergeEvents; +using System.Text.Json; + +namespace StellaOps.Concelier.Merge.Services; + +public sealed class AdvisoryMergeService +{ + private static readonly Meter MergeMeter = new("StellaOps.Concelier.Merge"); + private static readonly Counter<long> AliasCollisionCounter = MergeMeter.CreateCounter<long>( + "concelier.merge.identity_conflicts", + unit: "count", + description: "Number of alias collisions detected during merge."); + + private static readonly string[] PreferredAliasSchemes = + { + AliasSchemes.Cve, + AliasSchemes.Ghsa, + AliasSchemes.OsV, + AliasSchemes.Msrc, + }; + + private readonly AliasGraphResolver _aliasResolver; + private readonly IAdvisoryStore _advisoryStore; + private readonly AdvisoryPrecedenceMerger _precedenceMerger; + private readonly MergeEventWriter _mergeEventWriter; + private readonly IAdvisoryEventLog _eventLog; + private readonly TimeProvider _timeProvider; + private readonly CanonicalMerger _canonicalMerger; + private readonly ILogger<AdvisoryMergeService> _logger; + + public AdvisoryMergeService( + AliasGraphResolver aliasResolver, + IAdvisoryStore advisoryStore, + AdvisoryPrecedenceMerger precedenceMerger, + MergeEventWriter mergeEventWriter, + CanonicalMerger canonicalMerger, + IAdvisoryEventLog eventLog, + TimeProvider timeProvider, + ILogger<AdvisoryMergeService> logger) + { + _aliasResolver = aliasResolver ?? throw new ArgumentNullException(nameof(aliasResolver)); + _advisoryStore = advisoryStore ?? throw new ArgumentNullException(nameof(advisoryStore)); + _precedenceMerger = precedenceMerger ?? throw new ArgumentNullException(nameof(precedenceMerger)); + _mergeEventWriter = mergeEventWriter ?? throw new ArgumentNullException(nameof(mergeEventWriter)); + _canonicalMerger = canonicalMerger ?? throw new ArgumentNullException(nameof(canonicalMerger)); + _eventLog = eventLog ?? throw new ArgumentNullException(nameof(eventLog)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<AdvisoryMergeResult> MergeAsync(string seedAdvisoryKey, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(seedAdvisoryKey); + + var component = await _aliasResolver.BuildComponentAsync(seedAdvisoryKey, cancellationToken).ConfigureAwait(false); + var inputs = new List<Advisory>(); + + foreach (var advisoryKey in component.AdvisoryKeys) + { + cancellationToken.ThrowIfCancellationRequested(); + var advisory = await _advisoryStore.FindAsync(advisoryKey, cancellationToken).ConfigureAwait(false); + if (advisory is not null) + { + inputs.Add(advisory); + } + } + + if (inputs.Count == 0) + { + _logger.LogWarning("Alias component seeded by {Seed} contains no persisted advisories", seedAdvisoryKey); + return AdvisoryMergeResult.Empty(seedAdvisoryKey, component); + } + + var canonicalKey = SelectCanonicalKey(component) ?? seedAdvisoryKey; + var canonicalMerge = ApplyCanonicalMergeIfNeeded(canonicalKey, inputs); + var before = await _advisoryStore.FindAsync(canonicalKey, cancellationToken).ConfigureAwait(false); + var normalizedInputs = NormalizeInputs(inputs, canonicalKey).ToList(); + + PrecedenceMergeResult precedenceResult; + try + { + precedenceResult = _precedenceMerger.Merge(normalizedInputs); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to merge alias component seeded by {Seed}", seedAdvisoryKey); + throw; + } + + var merged = precedenceResult.Advisory; + var conflictDetails = precedenceResult.Conflicts; + + if (component.Collisions.Count > 0) + { + foreach (var collision in component.Collisions) + { + var tags = new KeyValuePair<string, object?>[] + { + new("scheme", collision.Scheme ?? string.Empty), + new("alias_value", collision.Value ?? string.Empty), + new("advisory_count", collision.AdvisoryKeys.Count), + }; + + AliasCollisionCounter.Add(1, tags); + + _logger.LogInformation( + "Alias collision {Scheme}:{Value} involves advisories {Advisories}", + collision.Scheme, + collision.Value, + string.Join(", ", collision.AdvisoryKeys)); + } + } + + await _advisoryStore.UpsertAsync(merged, cancellationToken).ConfigureAwait(false); + await _mergeEventWriter.AppendAsync( + canonicalKey, + before, + merged, + Array.Empty<Guid>(), + ConvertFieldDecisions(canonicalMerge?.Decisions), + cancellationToken).ConfigureAwait(false); + + var conflictSummaries = await AppendEventLogAsync(canonicalKey, normalizedInputs, merged, conflictDetails, cancellationToken).ConfigureAwait(false); + + return new AdvisoryMergeResult(seedAdvisoryKey, canonicalKey, component, inputs, before, merged, conflictSummaries); + } + + private async Task<IReadOnlyList<MergeConflictSummary>> AppendEventLogAsync( + string vulnerabilityKey, + IReadOnlyList<Advisory> inputs, + Advisory merged, + IReadOnlyList<MergeConflictDetail> conflicts, + CancellationToken cancellationToken) + { + var recordedAt = _timeProvider.GetUtcNow(); + var statements = new List<AdvisoryStatementInput>(inputs.Count + 1); + var statementIds = new Dictionary<Advisory, Guid>(ReferenceEqualityComparer.Instance); + + foreach (var advisory in inputs) + { + var statementId = Guid.NewGuid(); + statementIds[advisory] = statementId; + statements.Add(new AdvisoryStatementInput( + vulnerabilityKey, + advisory, + DetermineAsOf(advisory, recordedAt), + InputDocumentIds: Array.Empty<Guid>(), + StatementId: statementId, + AdvisoryKey: advisory.AdvisoryKey)); + } + + var canonicalStatementId = Guid.NewGuid(); + statementIds[merged] = canonicalStatementId; + statements.Add(new AdvisoryStatementInput( + vulnerabilityKey, + merged, + recordedAt, + InputDocumentIds: Array.Empty<Guid>(), + StatementId: canonicalStatementId, + AdvisoryKey: merged.AdvisoryKey)); + + var conflictMaterialization = BuildConflictInputs(conflicts, vulnerabilityKey, statementIds, canonicalStatementId, recordedAt); + var conflictInputs = conflictMaterialization.Inputs; + var conflictSummaries = conflictMaterialization.Summaries; + + if (statements.Count == 0 && conflictInputs.Count == 0) + { + return conflictSummaries.Count == 0 + ? Array.Empty<MergeConflictSummary>() + : conflictSummaries.ToArray(); + } + + var request = new AdvisoryEventAppendRequest(statements, conflictInputs.Count > 0 ? conflictInputs : null); + + try + { + await _eventLog.AppendAsync(request, cancellationToken).ConfigureAwait(false); + } + finally + { + foreach (var conflict in conflictInputs) + { + conflict.Details.Dispose(); + } + } + + return conflictSummaries.Count == 0 + ? Array.Empty<MergeConflictSummary>() + : conflictSummaries.ToArray(); + } + + private static DateTimeOffset DetermineAsOf(Advisory advisory, DateTimeOffset fallback) + { + return (advisory.Modified ?? advisory.Published ?? fallback).ToUniversalTime(); + } + + private static ConflictMaterialization BuildConflictInputs( + IReadOnlyList<MergeConflictDetail> conflicts, + string vulnerabilityKey, + IReadOnlyDictionary<Advisory, Guid> statementIds, + Guid canonicalStatementId, + DateTimeOffset recordedAt) + { + if (conflicts.Count == 0) + { + return new ConflictMaterialization(new List<AdvisoryConflictInput>(0), new List<MergeConflictSummary>(0)); + } + + var inputs = new List<AdvisoryConflictInput>(conflicts.Count); + var summaries = new List<MergeConflictSummary>(conflicts.Count); + + foreach (var detail in conflicts) + { + if (!statementIds.TryGetValue(detail.Suppressed, out var suppressedId)) + { + continue; + } + + var related = new List<Guid> { canonicalStatementId, suppressedId }; + if (statementIds.TryGetValue(detail.Primary, out var primaryId)) + { + if (!related.Contains(primaryId)) + { + related.Add(primaryId); + } + } + + var payload = ConflictDetailPayload.FromDetail(detail); + var explainer = payload.ToExplainer(); + + var canonicalJson = explainer.ToCanonicalJson(); + var document = JsonDocument.Parse(canonicalJson); + var asOf = (detail.Primary.Modified ?? detail.Suppressed.Modified ?? recordedAt).ToUniversalTime(); + var conflictId = Guid.NewGuid(); + var statementIdArray = ImmutableArray.CreateRange(related); + var conflictHash = explainer.ComputeHashHex(canonicalJson); + + inputs.Add(new AdvisoryConflictInput( + vulnerabilityKey, + document, + asOf, + related, + ConflictId: conflictId)); + + summaries.Add(new MergeConflictSummary( + conflictId, + vulnerabilityKey, + statementIdArray, + conflictHash, + asOf, + recordedAt, + explainer)); + } + + return new ConflictMaterialization(inputs, summaries); + } + + private static IEnumerable<Advisory> NormalizeInputs(IEnumerable<Advisory> advisories, string canonicalKey) + { + foreach (var advisory in advisories) + { + yield return CloneWithKey(advisory, canonicalKey); + } + } + + private static Advisory CloneWithKey(Advisory source, string advisoryKey) + => new( + advisoryKey, + source.Title, + source.Summary, + source.Language, + source.Published, + source.Modified, + source.Severity, + source.ExploitKnown, + source.Aliases, + source.Credits, + source.References, + source.AffectedPackages, + source.CvssMetrics, + source.Provenance, + source.Description, + source.Cwes, + source.CanonicalMetricId); + + private CanonicalMergeResult? ApplyCanonicalMergeIfNeeded(string canonicalKey, List<Advisory> inputs) + { + if (inputs.Count == 0) + { + return null; + } + + var ghsa = FindBySource(inputs, CanonicalSources.Ghsa); + var nvd = FindBySource(inputs, CanonicalSources.Nvd); + var osv = FindBySource(inputs, CanonicalSources.Osv); + + var participatingSources = 0; + if (ghsa is not null) + { + participatingSources++; + } + + if (nvd is not null) + { + participatingSources++; + } + + if (osv is not null) + { + participatingSources++; + } + + if (participatingSources < 2) + { + return null; + } + + var result = _canonicalMerger.Merge(canonicalKey, ghsa, nvd, osv); + + inputs.RemoveAll(advisory => MatchesCanonicalSource(advisory)); + inputs.Add(result.Advisory); + + return result; + } + + private static Advisory? FindBySource(IEnumerable<Advisory> advisories, string source) + => advisories.FirstOrDefault(advisory => advisory.Provenance.Any(provenance => + !string.Equals(provenance.Kind, "merge", StringComparison.OrdinalIgnoreCase) && + string.Equals(provenance.Source, source, StringComparison.OrdinalIgnoreCase))); + + private static bool MatchesCanonicalSource(Advisory advisory) + { + foreach (var provenance in advisory.Provenance) + { + if (string.Equals(provenance.Kind, "merge", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (string.Equals(provenance.Source, CanonicalSources.Ghsa, StringComparison.OrdinalIgnoreCase) || + string.Equals(provenance.Source, CanonicalSources.Nvd, StringComparison.OrdinalIgnoreCase) || + string.Equals(provenance.Source, CanonicalSources.Osv, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; + } + + private static IReadOnlyList<MergeFieldDecision> ConvertFieldDecisions(ImmutableArray<FieldDecision>? decisions) + { + if (decisions is null || decisions.Value.IsDefaultOrEmpty) + { + return Array.Empty<MergeFieldDecision>(); + } + + var builder = ImmutableArray.CreateBuilder<MergeFieldDecision>(decisions.Value.Length); + foreach (var decision in decisions.Value) + { + builder.Add(new MergeFieldDecision( + decision.Field, + decision.SelectedSource, + decision.DecisionReason, + decision.SelectedModified, + decision.ConsideredSources.ToArray())); + } + + return builder.ToImmutable(); + } + + private static class CanonicalSources + { + public const string Ghsa = "ghsa"; + public const string Nvd = "nvd"; + public const string Osv = "osv"; + } + + private sealed record ConflictMaterialization( + List<AdvisoryConflictInput> Inputs, + List<MergeConflictSummary> Summaries); + + private static string? SelectCanonicalKey(AliasComponent component) + { + foreach (var scheme in PreferredAliasSchemes) + { + var alias = component.AliasMap.Values + .SelectMany(static aliases => aliases) + .FirstOrDefault(record => string.Equals(record.Scheme, scheme, StringComparison.OrdinalIgnoreCase)); + if (!string.IsNullOrWhiteSpace(alias?.Value)) + { + return alias.Value; + } + } + + if (component.AliasMap.TryGetValue(component.SeedAdvisoryKey, out var seedAliases)) + { + var primary = seedAliases.FirstOrDefault(record => string.Equals(record.Scheme, AliasStoreConstants.PrimaryScheme, StringComparison.OrdinalIgnoreCase)); + if (!string.IsNullOrWhiteSpace(primary?.Value)) + { + return primary.Value; + } + } + + var firstAlias = component.AliasMap.Values.SelectMany(static aliases => aliases).FirstOrDefault(); + if (!string.IsNullOrWhiteSpace(firstAlias?.Value)) + { + return firstAlias.Value; + } + + return component.SeedAdvisoryKey; + } +} + +public sealed record AdvisoryMergeResult( + string SeedAdvisoryKey, + string CanonicalAdvisoryKey, + AliasComponent Component, + IReadOnlyList<Advisory> Inputs, + Advisory? Previous, + Advisory? Merged, + IReadOnlyList<MergeConflictSummary> Conflicts) +{ + public static AdvisoryMergeResult Empty(string seed, AliasComponent component) + => new(seed, seed, component, Array.Empty<Advisory>(), null, null, Array.Empty<MergeConflictSummary>()); +} diff --git a/src/StellaOps.Concelier.Merge/Services/AdvisoryPrecedenceMerger.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AdvisoryPrecedenceMerger.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Services/AdvisoryPrecedenceMerger.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AdvisoryPrecedenceMerger.cs diff --git a/src/StellaOps.Concelier.Merge/Services/AffectedPackagePrecedenceResolver.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AffectedPackagePrecedenceResolver.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Services/AffectedPackagePrecedenceResolver.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AffectedPackagePrecedenceResolver.cs diff --git a/src/StellaOps.Concelier.Merge/Services/AliasGraphResolver.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AliasGraphResolver.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Services/AliasGraphResolver.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/AliasGraphResolver.cs diff --git a/src/StellaOps.Concelier.Merge/Services/CanonicalHashCalculator.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/CanonicalHashCalculator.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Services/CanonicalHashCalculator.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/CanonicalHashCalculator.cs diff --git a/src/StellaOps.Concelier.Merge/Services/ConflictDetailPayload.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/ConflictDetailPayload.cs similarity index 96% rename from src/StellaOps.Concelier.Merge/Services/ConflictDetailPayload.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/ConflictDetailPayload.cs index d00e4b74..c29aaa73 100644 --- a/src/StellaOps.Concelier.Merge/Services/ConflictDetailPayload.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/ConflictDetailPayload.cs @@ -1,44 +1,44 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Concelier.Merge.Services; - -/// <summary> -/// Canonical conflict detail used to materialize structured payloads for persistence and explainers. -/// </summary> -public sealed record ConflictDetailPayload( - string Type, - string Reason, - IReadOnlyList<string> PrimarySources, - int PrimaryRank, - IReadOnlyList<string> SuppressedSources, - int SuppressedRank, - string? PrimaryValue, - string? SuppressedValue) -{ - public static ConflictDetailPayload FromDetail(MergeConflictDetail detail) - { - ArgumentNullException.ThrowIfNull(detail); - - return new ConflictDetailPayload( - detail.ConflictType, - detail.Reason, - detail.PrimarySources, - detail.PrimaryRank, - detail.SuppressedSources, - detail.SuppressedRank, - detail.PrimaryValue, - detail.SuppressedValue); - } - - public MergeConflictExplainerPayload ToExplainer() => - new( - Type, - Reason, - PrimarySources, - PrimaryRank, - SuppressedSources, - SuppressedRank, - PrimaryValue, - SuppressedValue); -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Concelier.Merge.Services; + +/// <summary> +/// Canonical conflict detail used to materialize structured payloads for persistence and explainers. +/// </summary> +public sealed record ConflictDetailPayload( + string Type, + string Reason, + IReadOnlyList<string> PrimarySources, + int PrimaryRank, + IReadOnlyList<string> SuppressedSources, + int SuppressedRank, + string? PrimaryValue, + string? SuppressedValue) +{ + public static ConflictDetailPayload FromDetail(MergeConflictDetail detail) + { + ArgumentNullException.ThrowIfNull(detail); + + return new ConflictDetailPayload( + detail.ConflictType, + detail.Reason, + detail.PrimarySources, + detail.PrimaryRank, + detail.SuppressedSources, + detail.SuppressedRank, + detail.PrimaryValue, + detail.SuppressedValue); + } + + public MergeConflictExplainerPayload ToExplainer() => + new( + Type, + Reason, + PrimarySources, + PrimaryRank, + SuppressedSources, + SuppressedRank, + PrimaryValue, + SuppressedValue); +} diff --git a/src/StellaOps.Concelier.Merge/Services/MergeConflictDetail.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/MergeConflictDetail.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Services/MergeConflictDetail.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/MergeConflictDetail.cs diff --git a/src/StellaOps.Concelier.Merge/Services/MergeConflictExplainerPayload.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/MergeConflictExplainerPayload.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Services/MergeConflictExplainerPayload.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/MergeConflictExplainerPayload.cs diff --git a/src/StellaOps.Concelier.Merge/Services/MergeConflictSummary.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/MergeConflictSummary.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Services/MergeConflictSummary.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/MergeConflictSummary.cs diff --git a/src/StellaOps.Concelier.Merge/Services/MergeEventWriter.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/MergeEventWriter.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Services/MergeEventWriter.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/MergeEventWriter.cs diff --git a/src/StellaOps.Concelier.Merge/Services/PrecedenceMergeResult.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/PrecedenceMergeResult.cs similarity index 100% rename from src/StellaOps.Concelier.Merge/Services/PrecedenceMergeResult.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/Services/PrecedenceMergeResult.cs diff --git a/src/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj similarity index 97% rename from src/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj index 4e9ff539..c8ae52ed 100644 --- a/src/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj @@ -1,18 +1,18 @@ -<?xml version="1.0" encoding="utf-8"?> -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Semver" Version="2.3.0" /> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> +<?xml version="1.0" encoding="utf-8"?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Semver" Version="2.3.0" /> + <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Concelier.Merge/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md similarity index 99% rename from src/StellaOps.Concelier.Merge/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md index 894c0c24..7b53549a 100644 --- a/src/StellaOps.Concelier.Merge/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Merge/TASKS.md @@ -1,33 +1,33 @@ -# TASKS -| Task | Owner(s) | Depends on | Notes | -|---|---|---|---| -|Identity graph and alias resolver|BE-Merge|Models, Storage.Mongo|DONE – `AdvisoryIdentityResolver` builds alias-driven clusters with canonical key selection + unit coverage.| -|Precedence policy engine|BE-Merge|Architecture|**DONE** – precedence defaults enforced by `AdvisoryPrecedenceMerger`/`AdvisoryPrecedenceDefaults` with distro/PSIRT overriding registry feeds and CERT/KEV enrichers.| -|NEVRA comparer plus tests|BE-Merge (Distro WG)|Source.Distro fixtures|DONE – Added Nevra parser/comparer with tilde-aware rpm ordering and unit coverage.| -|Debian EVR comparer plus tests|BE-Merge (Distro WG)|Debian fixtures|DONE – DebianEvr comparer mirrors dpkg ordering with tilde/epoch handling and unit coverage.| -|SemVer range resolver plus tests|BE-Merge (OSS WG)|OSV/GHSA fixtures|DONE – SemanticVersionRangeResolver covers introduced/fixed/lastAffected semantics with SemVer ordering tests.| -|Canonical hash and merge_event writer|BE-Merge|Models, Storage.Mongo|DONE – Hash calculator + MergeEventWriter compute canonical SHA-256 digests and persist merge events.| -|Conflict detection and metrics|BE-Merge|Core|**DONE** – merge meters emit override/conflict counters and structured audits (`AdvisoryPrecedenceMerger`).| -|FEEDMERGE-ENGINE-04-001 GHSA/NVD/OSV conflict rules|BE-Merge|Core, Storage.Mongo|DONE – `AdvisoryMergeService` applies `CanonicalMerger` output before precedence merge, replacing source advisories with the canonical transcript. **Coordination:** connector fixture owners should surface canonical deltas to Merge QA before regression sign-off.| -|FEEDMERGE-ENGINE-04-002 Override metrics instrumentation|BE-Merge|Observability|DONE – merge events persist `MergeFieldDecision` records enabling analytics on precedence/freshness decisions. **Next:** hand off metrics schema to Ops for dashboard wiring.| -|FEEDMERGE-ENGINE-04-003 Reference & credit union pipeline|BE-Merge|Models|DONE – canonical merge preserves union semantics while respecting precedence, validated via updated credit union tests.| -|End-to-end determinism test|QA|Merge, key connectors|**DONE** – `MergePrecedenceIntegrationTests.MergePipeline_IsDeterministicAcrossRuns` guards determinism.| -|FEEDMERGE-QA-04-001 End-to-end conflict regression suite|QA|Merge|DONE – `AdvisoryMergeServiceTests.MergeAsync_AppliesCanonicalRulesAndPersistsDecisions` exercises GHSA/NVD/OSV conflict path and merge-event analytics. **Reminder:** QA to sync with connector teams once new fixture triples land.| -|Override audit logging|BE-Merge|Observability|DONE – override audits now emit structured logs plus bounded-tag metrics suitable for prod telemetry.| -|Configurable precedence table|BE-Merge|Architecture|DONE – precedence options bind via concelier:merge:precedence:ranks with docs/tests covering operator workflow.| -|Range primitives backlog|BE-Merge|Connector WGs|**DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.| -|Range primitives backlog|BE-Merge|Connector WGs|**DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.<br>2025-10-20 19:30Z: Coordination matrix + rollout dashboard updated with current connector statuses and due dates; flagged Slack escalation plan if Cccs/Cisco miss 2025-10-21 and documented Acsc kickoff window for 2025-10-24.| -|Merge pipeline parity for new advisory fields|BE-Merge|Models, Core|DONE (2025-10-15) – merge service now surfaces description/CWE/canonical metric decisions with updated metrics/tests.| -|Connector coordination for new advisory fields|Connector Leads, BE-Merge|Models, Core|**DONE (2025-10-15)** – GHSA, NVD, and OSV connectors now emit advisory descriptions, CWE weaknesses, and canonical metric ids. Fixtures refreshed (GHSA connector regression suite, `conflict-nvd.canonical.json`, OSV parity snapshots) and completion recorded in coordination log.| -|FEEDMERGE-ENGINE-07-001 Conflict sets & explainers|BE-Merge|FEEDSTORAGE-DATA-07-001|**DONE (2025-10-20)** – Merge surfaces conflict explainers with replay hashes via `MergeConflictSummary`; API exposes structured payloads and integration tests cover deterministic `asOf` hashes.| -> Remark (2025-10-20): `AdvisoryMergeService` now returns conflict summaries with deterministic hashes; WebService replay endpoint emits typed explainers verified by new tests. -|FEEDMERGE-COORD-02-901 Connector deadline check-ins|BE-Merge|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-21)** – Confirm Cccs/Cisco normalized-rule branches land, capture `concelier.merge.normalized_rules*` counter screenshots, and update coordination docs with the results.| -|FEEDMERGE-COORD-02-902 ICS-CISA normalized-rule decision support|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-23)** – Review ICS-CISA sample advisories, confirm SemVer reuse vs new firmware scheme, pre-stage Models ticket template, and document outcome in coordination docs + tracker files.| -|FEEDMERGE-COORD-02-903 KISA firmware scheme review|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-24)** – Pair with KISA team on proposed firmware scheme (`kisa.build` or variant), ensure builder alignment, open Models ticket if required, and log decision in coordination docs + tracker files.| - -## Link-Not-Merge v1 Transition -| Task | Owner(s) | Depends on | Notes | -|---|---|---|---| -|MERGE-LNM-21-001 Migration plan authoring|BE-Merge, Architecture Guild|CONCELIER-LNM-21-101|Draft `no-merge` migration playbook, documenting backfill strategy, feature flag rollout, and rollback steps for legacy merge pipeline deprecation.| -|MERGE-LNM-21-002 Merge service deprecation|BE-Merge|MERGE-LNM-21-001|Refactor or retire `AdvisoryMergeService` and related pipelines, ensuring callers transition to observation/linkset APIs; add compile-time analyzer preventing merge service usage.| -|MERGE-LNM-21-003 Determinism/test updates|QA Guild, BE-Merge|MERGE-LNM-21-002|Replace merge determinism suites with observation/linkset regression tests verifying no data mutation and conflicts remain visible.| +# TASKS +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| +|Identity graph and alias resolver|BE-Merge|Models, Storage.Mongo|DONE – `AdvisoryIdentityResolver` builds alias-driven clusters with canonical key selection + unit coverage.| +|Precedence policy engine|BE-Merge|Architecture|**DONE** – precedence defaults enforced by `AdvisoryPrecedenceMerger`/`AdvisoryPrecedenceDefaults` with distro/PSIRT overriding registry feeds and CERT/KEV enrichers.| +|NEVRA comparer plus tests|BE-Merge (Distro WG)|Source.Distro fixtures|DONE – Added Nevra parser/comparer with tilde-aware rpm ordering and unit coverage.| +|Debian EVR comparer plus tests|BE-Merge (Distro WG)|Debian fixtures|DONE – DebianEvr comparer mirrors dpkg ordering with tilde/epoch handling and unit coverage.| +|SemVer range resolver plus tests|BE-Merge (OSS WG)|OSV/GHSA fixtures|DONE – SemanticVersionRangeResolver covers introduced/fixed/lastAffected semantics with SemVer ordering tests.| +|Canonical hash and merge_event writer|BE-Merge|Models, Storage.Mongo|DONE – Hash calculator + MergeEventWriter compute canonical SHA-256 digests and persist merge events.| +|Conflict detection and metrics|BE-Merge|Core|**DONE** – merge meters emit override/conflict counters and structured audits (`AdvisoryPrecedenceMerger`).| +|FEEDMERGE-ENGINE-04-001 GHSA/NVD/OSV conflict rules|BE-Merge|Core, Storage.Mongo|DONE – `AdvisoryMergeService` applies `CanonicalMerger` output before precedence merge, replacing source advisories with the canonical transcript. **Coordination:** connector fixture owners should surface canonical deltas to Merge QA before regression sign-off.| +|FEEDMERGE-ENGINE-04-002 Override metrics instrumentation|BE-Merge|Observability|DONE – merge events persist `MergeFieldDecision` records enabling analytics on precedence/freshness decisions. **Next:** hand off metrics schema to Ops for dashboard wiring.| +|FEEDMERGE-ENGINE-04-003 Reference & credit union pipeline|BE-Merge|Models|DONE – canonical merge preserves union semantics while respecting precedence, validated via updated credit union tests.| +|End-to-end determinism test|QA|Merge, key connectors|**DONE** – `MergePrecedenceIntegrationTests.MergePipeline_IsDeterministicAcrossRuns` guards determinism.| +|FEEDMERGE-QA-04-001 End-to-end conflict regression suite|QA|Merge|DONE – `AdvisoryMergeServiceTests.MergeAsync_AppliesCanonicalRulesAndPersistsDecisions` exercises GHSA/NVD/OSV conflict path and merge-event analytics. **Reminder:** QA to sync with connector teams once new fixture triples land.| +|Override audit logging|BE-Merge|Observability|DONE – override audits now emit structured logs plus bounded-tag metrics suitable for prod telemetry.| +|Configurable precedence table|BE-Merge|Architecture|DONE – precedence options bind via concelier:merge:precedence:ranks with docs/tests covering operator workflow.| +|Range primitives backlog|BE-Merge|Connector WGs|**DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.| +|Range primitives backlog|BE-Merge|Connector WGs|**DOING** – Coordinate remaining connectors (`Acsc`, `Cccs`, `CertBund`, `CertCc`, `Cve`, `Ghsa`, `Ics.Cisa`, `Kisa`, `Ru.Bdu`, `Ru.Nkcki`, `Vndr.Apple`, `Vndr.Cisco`, `Vndr.Msrc`) to emit canonical RangePrimitives with provenance tags; track progress/fixtures here.<br>2025-10-11: Storage alignment notes + sample normalized rule JSON now captured in `RANGE_PRIMITIVES_COORDINATION.md` (see “Storage alignment quick reference”).<br>2025-10-11 18:45Z: GHSA normalized rules landed; OSV connector picked up next for rollout.<br>2025-10-11 21:10Z: `docs/dev/merge_semver_playbook.md` Section 8 now documents the persisted Mongo projection (SemVer + NEVRA) for connector reviewers.<br>2025-10-11 21:30Z: Added `docs/dev/normalized_versions_rollout.md` dashboard to centralize connector status and upcoming milestones.<br>2025-10-11 21:55Z: Merge now emits `concelier.merge.normalized_rules*` counters and unions connector-provided normalized arrays; see new test coverage in `AdvisoryPrecedenceMergerTests.Merge_RecordsNormalizedRuleMetrics`.<br>2025-10-12 17:05Z: CVE + KEV normalized rule verification complete; OSV parity fixtures revalidated—downstream parity/monitoring tasks may proceed.<br>2025-10-19 14:35Z: Prerequisites reviewed (none outstanding); FEEDMERGE-COORD-02-900 remains in DOING with connector follow-ups unchanged.<br>2025-10-19 15:25Z: Refreshed `RANGE_PRIMITIVES_COORDINATION.md` matrix + added targeted follow-ups (Cccs, CertBund, ICS-CISA, Kisa, Vndr.Cisco) with delivery dates 2025-10-21 → 2025-10-25; monitoring merge counters for regression.<br>2025-10-20 19:30Z: Coordination matrix + rollout dashboard updated with current connector statuses and due dates; flagged Slack escalation plan if Cccs/Cisco miss 2025-10-21 and documented Acsc kickoff window for 2025-10-24.| +|Merge pipeline parity for new advisory fields|BE-Merge|Models, Core|DONE (2025-10-15) – merge service now surfaces description/CWE/canonical metric decisions with updated metrics/tests.| +|Connector coordination for new advisory fields|Connector Leads, BE-Merge|Models, Core|**DONE (2025-10-15)** – GHSA, NVD, and OSV connectors now emit advisory descriptions, CWE weaknesses, and canonical metric ids. Fixtures refreshed (GHSA connector regression suite, `conflict-nvd.canonical.json`, OSV parity snapshots) and completion recorded in coordination log.| +|FEEDMERGE-ENGINE-07-001 Conflict sets & explainers|BE-Merge|FEEDSTORAGE-DATA-07-001|**DONE (2025-10-20)** – Merge surfaces conflict explainers with replay hashes via `MergeConflictSummary`; API exposes structured payloads and integration tests cover deterministic `asOf` hashes.| +> Remark (2025-10-20): `AdvisoryMergeService` now returns conflict summaries with deterministic hashes; WebService replay endpoint emits typed explainers verified by new tests. +|FEEDMERGE-COORD-02-901 Connector deadline check-ins|BE-Merge|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-21)** – Confirm Cccs/Cisco normalized-rule branches land, capture `concelier.merge.normalized_rules*` counter screenshots, and update coordination docs with the results.| +|FEEDMERGE-COORD-02-902 ICS-CISA normalized-rule decision support|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-23)** – Review ICS-CISA sample advisories, confirm SemVer reuse vs new firmware scheme, pre-stage Models ticket template, and document outcome in coordination docs + tracker files.| +|FEEDMERGE-COORD-02-903 KISA firmware scheme review|BE-Merge, Models|FEEDMERGE-COORD-02-900|**TODO (due 2025-10-24)** – Pair with KISA team on proposed firmware scheme (`kisa.build` or variant), ensure builder alignment, open Models ticket if required, and log decision in coordination docs + tracker files.| + +## Link-Not-Merge v1 Transition +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| +|MERGE-LNM-21-001 Migration plan authoring|BE-Merge, Architecture Guild|CONCELIER-LNM-21-101|Draft `no-merge` migration playbook, documenting backfill strategy, feature flag rollout, and rollback steps for legacy merge pipeline deprecation.| +|MERGE-LNM-21-002 Merge service deprecation|BE-Merge|MERGE-LNM-21-001|Refactor or retire `AdvisoryMergeService` and related pipelines, ensuring callers transition to observation/linkset APIs; add compile-time analyzer preventing merge service usage.| +|MERGE-LNM-21-003 Determinism/test updates|QA Guild, BE-Merge|MERGE-LNM-21-002|Replace merge determinism suites with observation/linkset regression tests verifying no data mutation and conflicts remain visible.| diff --git a/src/StellaOps.Concelier.Models/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Models/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AGENTS.md diff --git a/src/StellaOps.Concelier.Models/Advisory.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/Advisory.cs similarity index 100% rename from src/StellaOps.Concelier.Models/Advisory.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/Advisory.cs diff --git a/src/StellaOps.Concelier.Models/AdvisoryCredit.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AdvisoryCredit.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AdvisoryCredit.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AdvisoryCredit.cs diff --git a/src/StellaOps.Concelier.Models/AdvisoryProvenance.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AdvisoryProvenance.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AdvisoryProvenance.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AdvisoryProvenance.cs diff --git a/src/StellaOps.Concelier.Models/AdvisoryReference.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AdvisoryReference.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AdvisoryReference.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AdvisoryReference.cs diff --git a/src/StellaOps.Concelier.Models/AdvisoryWeakness.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AdvisoryWeakness.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AdvisoryWeakness.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AdvisoryWeakness.cs diff --git a/src/StellaOps.Concelier.Models/AffectedPackage.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedPackage.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AffectedPackage.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedPackage.cs diff --git a/src/StellaOps.Concelier.Models/AffectedPackageStatus.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedPackageStatus.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AffectedPackageStatus.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedPackageStatus.cs diff --git a/src/StellaOps.Concelier.Models/AffectedPackageStatusCatalog.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedPackageStatusCatalog.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AffectedPackageStatusCatalog.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedPackageStatusCatalog.cs diff --git a/src/StellaOps.Concelier.Models/AffectedVersionRange.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedVersionRange.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AffectedVersionRange.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedVersionRange.cs diff --git a/src/StellaOps.Concelier.Models/AffectedVersionRangeExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedVersionRangeExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AffectedVersionRangeExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AffectedVersionRangeExtensions.cs diff --git a/src/StellaOps.Concelier.Models/AliasSchemeRegistry.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AliasSchemeRegistry.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AliasSchemeRegistry.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AliasSchemeRegistry.cs diff --git a/src/StellaOps.Concelier.Models/AliasSchemes.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/AliasSchemes.cs similarity index 100% rename from src/StellaOps.Concelier.Models/AliasSchemes.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/AliasSchemes.cs diff --git a/src/StellaOps.Concelier.Models/BACKWARD_COMPATIBILITY.md b/src/Concelier/__Libraries/StellaOps.Concelier.Models/BACKWARD_COMPATIBILITY.md similarity index 100% rename from src/StellaOps.Concelier.Models/BACKWARD_COMPATIBILITY.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/BACKWARD_COMPATIBILITY.md diff --git a/src/StellaOps.Concelier.Models/CANONICAL_RECORDS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Models/CANONICAL_RECORDS.md similarity index 100% rename from src/StellaOps.Concelier.Models/CANONICAL_RECORDS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/CANONICAL_RECORDS.md diff --git a/src/StellaOps.Concelier.Models/CanonicalJsonSerializer.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/CanonicalJsonSerializer.cs similarity index 100% rename from src/StellaOps.Concelier.Models/CanonicalJsonSerializer.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/CanonicalJsonSerializer.cs diff --git a/src/StellaOps.Concelier.Models/CvssMetric.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/CvssMetric.cs similarity index 100% rename from src/StellaOps.Concelier.Models/CvssMetric.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/CvssMetric.cs diff --git a/src/StellaOps.Concelier.Models/EvrPrimitiveExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/EvrPrimitiveExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Models/EvrPrimitiveExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/EvrPrimitiveExtensions.cs diff --git a/src/StellaOps.Concelier.Models/NevraPrimitiveExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/NevraPrimitiveExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Models/NevraPrimitiveExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/NevraPrimitiveExtensions.cs diff --git a/src/StellaOps.Concelier.Models/NormalizedVersionRule.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/NormalizedVersionRule.cs similarity index 100% rename from src/StellaOps.Concelier.Models/NormalizedVersionRule.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/NormalizedVersionRule.cs diff --git a/src/StellaOps.Concelier.Models/Observations/AdvisoryObservation.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/Observations/AdvisoryObservation.cs similarity index 96% rename from src/StellaOps.Concelier.Models/Observations/AdvisoryObservation.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/Observations/AdvisoryObservation.cs index a855004f..bc183f84 100644 --- a/src/StellaOps.Concelier.Models/Observations/AdvisoryObservation.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Models/Observations/AdvisoryObservation.cs @@ -1,283 +1,283 @@ -using System.Collections.Immutable; -using System.Text.Json; -using System.Text.Json.Nodes; - -namespace StellaOps.Concelier.Models.Observations; - -public sealed record AdvisoryObservation -{ - public AdvisoryObservation( - string observationId, - string tenant, - AdvisoryObservationSource source, - AdvisoryObservationUpstream upstream, - AdvisoryObservationContent content, - AdvisoryObservationLinkset linkset, - DateTimeOffset createdAt, - ImmutableDictionary<string, string>? attributes = null) - { - ObservationId = Validation.EnsureNotNullOrWhiteSpace(observationId, nameof(observationId)); - Tenant = Validation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant(); - Source = source ?? throw new ArgumentNullException(nameof(source)); - Upstream = upstream ?? throw new ArgumentNullException(nameof(upstream)); - Content = content ?? throw new ArgumentNullException(nameof(content)); - Linkset = linkset ?? throw new ArgumentNullException(nameof(linkset)); - CreatedAt = createdAt.ToUniversalTime(); - Attributes = NormalizeAttributes(attributes); - } - - public string ObservationId { get; } - - public string Tenant { get; } - - public AdvisoryObservationSource Source { get; } - - public AdvisoryObservationUpstream Upstream { get; } - - public AdvisoryObservationContent Content { get; } - - public AdvisoryObservationLinkset Linkset { get; } - - public DateTimeOffset CreatedAt { get; } - - public ImmutableDictionary<string, string> Attributes { get; } - - private static ImmutableDictionary<string, string> NormalizeAttributes(ImmutableDictionary<string, string>? attributes) - { - if (attributes is null || attributes.Count == 0) - { - return ImmutableDictionary<string, string>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var pair in attributes) - { - if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) - { - continue; - } - - builder[pair.Key.Trim()] = pair.Value; - } - - return builder.ToImmutable(); - } -} - -public sealed record AdvisoryObservationSource -{ - public AdvisoryObservationSource( - string vendor, - string stream, - string api, - string? collectorVersion = null) - { - Vendor = Validation.EnsureNotNullOrWhiteSpace(vendor, nameof(vendor)); - Stream = Validation.EnsureNotNullOrWhiteSpace(stream, nameof(stream)); - Api = Validation.EnsureNotNullOrWhiteSpace(api, nameof(api)); - CollectorVersion = Validation.TrimToNull(collectorVersion); - } - - public string Vendor { get; } - - public string Stream { get; } - - public string Api { get; } - - public string? CollectorVersion { get; } -} - -public sealed record AdvisoryObservationSignature -{ - public AdvisoryObservationSignature(bool present, string? format, string? keyId, string? signature) - { - Present = present; - Format = Validation.TrimToNull(format); - KeyId = Validation.TrimToNull(keyId); - Signature = Validation.TrimToNull(signature); - } - - public bool Present { get; } - - public string? Format { get; } - - public string? KeyId { get; } - - public string? Signature { get; } -} - -public sealed record AdvisoryObservationUpstream -{ - public AdvisoryObservationUpstream( - string upstreamId, - string? documentVersion, - DateTimeOffset fetchedAt, - DateTimeOffset receivedAt, - string contentHash, - AdvisoryObservationSignature signature, - ImmutableDictionary<string, string>? metadata = null) - { - UpstreamId = Validation.EnsureNotNullOrWhiteSpace(upstreamId, nameof(upstreamId)); - DocumentVersion = Validation.TrimToNull(documentVersion); - FetchedAt = fetchedAt.ToUniversalTime(); - ReceivedAt = receivedAt.ToUniversalTime(); - ContentHash = Validation.EnsureNotNullOrWhiteSpace(contentHash, nameof(contentHash)); - Signature = signature ?? throw new ArgumentNullException(nameof(signature)); - Metadata = NormalizeMetadata(metadata); - } - - public string UpstreamId { get; } - - public string? DocumentVersion { get; } - - public DateTimeOffset FetchedAt { get; } - - public DateTimeOffset ReceivedAt { get; } - - public string ContentHash { get; } - - public AdvisoryObservationSignature Signature { get; } - - public ImmutableDictionary<string, string> Metadata { get; } - - private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) - { - if (metadata is null || metadata.Count == 0) - { - return ImmutableDictionary<string, string>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var pair in metadata) - { - if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) - { - continue; - } - - builder[pair.Key.Trim()] = pair.Value; - } - - return builder.ToImmutable(); - } -} - -public sealed record AdvisoryObservationContent -{ - public AdvisoryObservationContent( - string format, - string? specVersion, - JsonNode raw, - ImmutableDictionary<string, string>? metadata = null) - { - Format = Validation.EnsureNotNullOrWhiteSpace(format, nameof(format)); - SpecVersion = Validation.TrimToNull(specVersion); - Raw = raw?.DeepClone() ?? throw new ArgumentNullException(nameof(raw)); - Metadata = NormalizeMetadata(metadata); - } - - public string Format { get; } - - public string? SpecVersion { get; } - - public JsonNode Raw { get; } - - public ImmutableDictionary<string, string> Metadata { get; } - - private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) - { - if (metadata is null || metadata.Count == 0) - { - return ImmutableDictionary<string, string>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var pair in metadata) - { - if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) - { - continue; - } - - builder[pair.Key.Trim()] = pair.Value; - } - - return builder.ToImmutable(); - } -} - -public sealed record AdvisoryObservationReference -{ - public AdvisoryObservationReference(string type, string url) - { - Type = Validation.EnsureNotNullOrWhiteSpace(type, nameof(type)).ToLowerInvariant(); - Url = Validation.EnsureNotNullOrWhiteSpace(url, nameof(url)); - } - - public string Type { get; } - - public string Url { get; } -} - -public sealed record AdvisoryObservationLinkset -{ - public AdvisoryObservationLinkset( - IEnumerable<string>? aliases, - IEnumerable<string>? purls, - IEnumerable<string>? cpes, - IEnumerable<AdvisoryObservationReference>? references) - { - Aliases = NormalizeStringSet(aliases, toLower: true); - Purls = NormalizeStringSet(purls); - Cpes = NormalizeStringSet(cpes); - References = NormalizeReferences(references); - } - - public ImmutableArray<string> Aliases { get; } - - public ImmutableArray<string> Purls { get; } - - public ImmutableArray<string> Cpes { get; } - - public ImmutableArray<AdvisoryObservationReference> References { get; } - - private static ImmutableArray<string> NormalizeStringSet(IEnumerable<string>? values, bool toLower = false) - { - if (values is null) - { - return ImmutableArray<string>.Empty; - } - - var list = new List<string>(); - foreach (var value in values) - { - var trimmed = Validation.TrimToNull(value); - if (trimmed is null) - { - continue; - } - - list.Add(toLower ? trimmed.ToLowerInvariant() : trimmed); - } - - return list - .Distinct(StringComparer.Ordinal) - .OrderBy(static v => v, StringComparer.Ordinal) - .ToImmutableArray(); - } - - private static ImmutableArray<AdvisoryObservationReference> NormalizeReferences(IEnumerable<AdvisoryObservationReference>? references) - { - if (references is null) - { - return ImmutableArray<AdvisoryObservationReference>.Empty; - } - - return references - .Where(static reference => reference is not null) - .Distinct() - .OrderBy(static reference => reference.Type, StringComparer.Ordinal) - .ThenBy(static reference => reference.Url, StringComparer.Ordinal) - .ToImmutableArray(); - } -} +using System.Collections.Immutable; +using System.Text.Json; +using System.Text.Json.Nodes; + +namespace StellaOps.Concelier.Models.Observations; + +public sealed record AdvisoryObservation +{ + public AdvisoryObservation( + string observationId, + string tenant, + AdvisoryObservationSource source, + AdvisoryObservationUpstream upstream, + AdvisoryObservationContent content, + AdvisoryObservationLinkset linkset, + DateTimeOffset createdAt, + ImmutableDictionary<string, string>? attributes = null) + { + ObservationId = Validation.EnsureNotNullOrWhiteSpace(observationId, nameof(observationId)); + Tenant = Validation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant(); + Source = source ?? throw new ArgumentNullException(nameof(source)); + Upstream = upstream ?? throw new ArgumentNullException(nameof(upstream)); + Content = content ?? throw new ArgumentNullException(nameof(content)); + Linkset = linkset ?? throw new ArgumentNullException(nameof(linkset)); + CreatedAt = createdAt.ToUniversalTime(); + Attributes = NormalizeAttributes(attributes); + } + + public string ObservationId { get; } + + public string Tenant { get; } + + public AdvisoryObservationSource Source { get; } + + public AdvisoryObservationUpstream Upstream { get; } + + public AdvisoryObservationContent Content { get; } + + public AdvisoryObservationLinkset Linkset { get; } + + public DateTimeOffset CreatedAt { get; } + + public ImmutableDictionary<string, string> Attributes { get; } + + private static ImmutableDictionary<string, string> NormalizeAttributes(ImmutableDictionary<string, string>? attributes) + { + if (attributes is null || attributes.Count == 0) + { + return ImmutableDictionary<string, string>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var pair in attributes) + { + if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) + { + continue; + } + + builder[pair.Key.Trim()] = pair.Value; + } + + return builder.ToImmutable(); + } +} + +public sealed record AdvisoryObservationSource +{ + public AdvisoryObservationSource( + string vendor, + string stream, + string api, + string? collectorVersion = null) + { + Vendor = Validation.EnsureNotNullOrWhiteSpace(vendor, nameof(vendor)); + Stream = Validation.EnsureNotNullOrWhiteSpace(stream, nameof(stream)); + Api = Validation.EnsureNotNullOrWhiteSpace(api, nameof(api)); + CollectorVersion = Validation.TrimToNull(collectorVersion); + } + + public string Vendor { get; } + + public string Stream { get; } + + public string Api { get; } + + public string? CollectorVersion { get; } +} + +public sealed record AdvisoryObservationSignature +{ + public AdvisoryObservationSignature(bool present, string? format, string? keyId, string? signature) + { + Present = present; + Format = Validation.TrimToNull(format); + KeyId = Validation.TrimToNull(keyId); + Signature = Validation.TrimToNull(signature); + } + + public bool Present { get; } + + public string? Format { get; } + + public string? KeyId { get; } + + public string? Signature { get; } +} + +public sealed record AdvisoryObservationUpstream +{ + public AdvisoryObservationUpstream( + string upstreamId, + string? documentVersion, + DateTimeOffset fetchedAt, + DateTimeOffset receivedAt, + string contentHash, + AdvisoryObservationSignature signature, + ImmutableDictionary<string, string>? metadata = null) + { + UpstreamId = Validation.EnsureNotNullOrWhiteSpace(upstreamId, nameof(upstreamId)); + DocumentVersion = Validation.TrimToNull(documentVersion); + FetchedAt = fetchedAt.ToUniversalTime(); + ReceivedAt = receivedAt.ToUniversalTime(); + ContentHash = Validation.EnsureNotNullOrWhiteSpace(contentHash, nameof(contentHash)); + Signature = signature ?? throw new ArgumentNullException(nameof(signature)); + Metadata = NormalizeMetadata(metadata); + } + + public string UpstreamId { get; } + + public string? DocumentVersion { get; } + + public DateTimeOffset FetchedAt { get; } + + public DateTimeOffset ReceivedAt { get; } + + public string ContentHash { get; } + + public AdvisoryObservationSignature Signature { get; } + + public ImmutableDictionary<string, string> Metadata { get; } + + private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) + { + if (metadata is null || metadata.Count == 0) + { + return ImmutableDictionary<string, string>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var pair in metadata) + { + if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) + { + continue; + } + + builder[pair.Key.Trim()] = pair.Value; + } + + return builder.ToImmutable(); + } +} + +public sealed record AdvisoryObservationContent +{ + public AdvisoryObservationContent( + string format, + string? specVersion, + JsonNode raw, + ImmutableDictionary<string, string>? metadata = null) + { + Format = Validation.EnsureNotNullOrWhiteSpace(format, nameof(format)); + SpecVersion = Validation.TrimToNull(specVersion); + Raw = raw?.DeepClone() ?? throw new ArgumentNullException(nameof(raw)); + Metadata = NormalizeMetadata(metadata); + } + + public string Format { get; } + + public string? SpecVersion { get; } + + public JsonNode Raw { get; } + + public ImmutableDictionary<string, string> Metadata { get; } + + private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) + { + if (metadata is null || metadata.Count == 0) + { + return ImmutableDictionary<string, string>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var pair in metadata) + { + if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) + { + continue; + } + + builder[pair.Key.Trim()] = pair.Value; + } + + return builder.ToImmutable(); + } +} + +public sealed record AdvisoryObservationReference +{ + public AdvisoryObservationReference(string type, string url) + { + Type = Validation.EnsureNotNullOrWhiteSpace(type, nameof(type)).ToLowerInvariant(); + Url = Validation.EnsureNotNullOrWhiteSpace(url, nameof(url)); + } + + public string Type { get; } + + public string Url { get; } +} + +public sealed record AdvisoryObservationLinkset +{ + public AdvisoryObservationLinkset( + IEnumerable<string>? aliases, + IEnumerable<string>? purls, + IEnumerable<string>? cpes, + IEnumerable<AdvisoryObservationReference>? references) + { + Aliases = NormalizeStringSet(aliases, toLower: true); + Purls = NormalizeStringSet(purls); + Cpes = NormalizeStringSet(cpes); + References = NormalizeReferences(references); + } + + public ImmutableArray<string> Aliases { get; } + + public ImmutableArray<string> Purls { get; } + + public ImmutableArray<string> Cpes { get; } + + public ImmutableArray<AdvisoryObservationReference> References { get; } + + private static ImmutableArray<string> NormalizeStringSet(IEnumerable<string>? values, bool toLower = false) + { + if (values is null) + { + return ImmutableArray<string>.Empty; + } + + var list = new List<string>(); + foreach (var value in values) + { + var trimmed = Validation.TrimToNull(value); + if (trimmed is null) + { + continue; + } + + list.Add(toLower ? trimmed.ToLowerInvariant() : trimmed); + } + + return list + .Distinct(StringComparer.Ordinal) + .OrderBy(static v => v, StringComparer.Ordinal) + .ToImmutableArray(); + } + + private static ImmutableArray<AdvisoryObservationReference> NormalizeReferences(IEnumerable<AdvisoryObservationReference>? references) + { + if (references is null) + { + return ImmutableArray<AdvisoryObservationReference>.Empty; + } + + return references + .Where(static reference => reference is not null) + .Distinct() + .OrderBy(static reference => reference.Type, StringComparer.Ordinal) + .ThenBy(static reference => reference.Url, StringComparer.Ordinal) + .ToImmutableArray(); + } +} diff --git a/src/StellaOps.Concelier.Models/OsvGhsaParityDiagnostics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/OsvGhsaParityDiagnostics.cs similarity index 100% rename from src/StellaOps.Concelier.Models/OsvGhsaParityDiagnostics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/OsvGhsaParityDiagnostics.cs diff --git a/src/StellaOps.Concelier.Models/OsvGhsaParityInspector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/OsvGhsaParityInspector.cs similarity index 100% rename from src/StellaOps.Concelier.Models/OsvGhsaParityInspector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/OsvGhsaParityInspector.cs diff --git a/src/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md b/src/Concelier/__Libraries/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md similarity index 100% rename from src/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/PROVENANCE_GUIDELINES.md diff --git a/src/StellaOps.Concelier.Models/ProvenanceFieldMasks.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/ProvenanceFieldMasks.cs similarity index 100% rename from src/StellaOps.Concelier.Models/ProvenanceFieldMasks.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/ProvenanceFieldMasks.cs diff --git a/src/StellaOps.Concelier.Models/ProvenanceInspector.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/ProvenanceInspector.cs similarity index 100% rename from src/StellaOps.Concelier.Models/ProvenanceInspector.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/ProvenanceInspector.cs diff --git a/src/StellaOps.Concelier.Models/RangePrimitives.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/RangePrimitives.cs similarity index 100% rename from src/StellaOps.Concelier.Models/RangePrimitives.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/RangePrimitives.cs diff --git a/src/StellaOps.Concelier.Models/SemVerPrimitiveExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/SemVerPrimitiveExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Models/SemVerPrimitiveExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/SemVerPrimitiveExtensions.cs diff --git a/src/StellaOps.Concelier.Models/SeverityNormalization.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/SeverityNormalization.cs similarity index 100% rename from src/StellaOps.Concelier.Models/SeverityNormalization.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/SeverityNormalization.cs diff --git a/src/StellaOps.Concelier.Models/SnapshotSerializer.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/SnapshotSerializer.cs similarity index 100% rename from src/StellaOps.Concelier.Models/SnapshotSerializer.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/SnapshotSerializer.cs diff --git a/src/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj similarity index 97% rename from src/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj index 5d5e6d1c..c3e89c54 100644 --- a/src/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj @@ -1,12 +1,12 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Concelier.Models/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Models/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Models/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/TASKS.md diff --git a/src/StellaOps.Concelier.Models/Validation.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Models/Validation.cs similarity index 100% rename from src/StellaOps.Concelier.Models/Validation.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Models/Validation.cs diff --git a/src/StellaOps.Concelier.Normalization/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Normalization/Cvss/CvssMetricNormalizer.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Cvss/CvssMetricNormalizer.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization/Cvss/CvssMetricNormalizer.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Cvss/CvssMetricNormalizer.cs diff --git a/src/StellaOps.Concelier.Normalization/Distro/DebianEvr.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Distro/DebianEvr.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization/Distro/DebianEvr.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Distro/DebianEvr.cs diff --git a/src/StellaOps.Concelier.Normalization/Distro/Nevra.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Distro/Nevra.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization/Distro/Nevra.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Distro/Nevra.cs diff --git a/src/StellaOps.Concelier.Normalization/Identifiers/Cpe23.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Identifiers/Cpe23.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization/Identifiers/Cpe23.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Identifiers/Cpe23.cs diff --git a/src/StellaOps.Concelier.Normalization/Identifiers/IdentifierNormalizer.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Identifiers/IdentifierNormalizer.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization/Identifiers/IdentifierNormalizer.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Identifiers/IdentifierNormalizer.cs diff --git a/src/StellaOps.Concelier.Normalization/Identifiers/PackageUrl.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Identifiers/PackageUrl.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization/Identifiers/PackageUrl.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Identifiers/PackageUrl.cs diff --git a/src/StellaOps.Concelier.Normalization/SemVer/SemVerRangeRuleBuilder.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/SemVer/SemVerRangeRuleBuilder.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization/SemVer/SemVerRangeRuleBuilder.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/SemVer/SemVerRangeRuleBuilder.cs diff --git a/src/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj similarity index 100% rename from src/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj diff --git a/src/StellaOps.Concelier.Normalization/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/TASKS.md similarity index 100% rename from src/StellaOps.Concelier.Normalization/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/TASKS.md diff --git a/src/StellaOps.Concelier.Normalization/Text/DescriptionNormalizer.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Text/DescriptionNormalizer.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization/Text/DescriptionNormalizer.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Normalization/Text/DescriptionNormalizer.cs diff --git a/src/StellaOps.Concelier.RawModels/AdvisoryRawDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/AdvisoryRawDocument.cs similarity index 97% rename from src/StellaOps.Concelier.RawModels/AdvisoryRawDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.RawModels/AdvisoryRawDocument.cs index c0b17701..2d7c8670 100644 --- a/src/StellaOps.Concelier.RawModels/AdvisoryRawDocument.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/AdvisoryRawDocument.cs @@ -1,76 +1,76 @@ -using System.Collections.Immutable; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Concelier.RawModels; - -public sealed record AdvisoryRawDocument( - [property: JsonPropertyName("tenant")] string Tenant, - [property: JsonPropertyName("source")] RawSourceMetadata Source, - [property: JsonPropertyName("upstream")] RawUpstreamMetadata Upstream, - [property: JsonPropertyName("content")] RawContent Content, - [property: JsonPropertyName("identifiers")] RawIdentifiers Identifiers, - [property: JsonPropertyName("linkset")] RawLinkset Linkset, - [property: JsonPropertyName("supersedes")] string? Supersedes = null) -{ - public AdvisoryRawDocument WithSupersedes(string supersedes) - => this with { Supersedes = supersedes }; -} - -public sealed record RawSourceMetadata( - [property: JsonPropertyName("vendor")] string Vendor, - [property: JsonPropertyName("connector")] string Connector, - [property: JsonPropertyName("version")] string ConnectorVersion, - [property: JsonPropertyName("stream")] string? Stream = null); - -public sealed record RawUpstreamMetadata( - [property: JsonPropertyName("upstream_id")] string UpstreamId, - [property: JsonPropertyName("document_version")] string? DocumentVersion, - [property: JsonPropertyName("retrieved_at")] DateTimeOffset RetrievedAt, - [property: JsonPropertyName("content_hash")] string ContentHash, - [property: JsonPropertyName("signature")] RawSignatureMetadata Signature, - [property: JsonPropertyName("provenance")] ImmutableDictionary<string, string> Provenance); - -public sealed record RawSignatureMetadata( - [property: JsonPropertyName("present")] bool Present, - [property: JsonPropertyName("format")] string? Format = null, - [property: JsonPropertyName("key_id")] string? KeyId = null, - [property: JsonPropertyName("sig")] string? Signature = null, - [property: JsonPropertyName("certificate")] string? Certificate = null, - [property: JsonPropertyName("digest")] string? Digest = null); - -public sealed record RawContent( - [property: JsonPropertyName("format")] string Format, - [property: JsonPropertyName("spec_version")] string? SpecVersion, - [property: JsonPropertyName("raw")] JsonElement Raw, - [property: JsonPropertyName("encoding")] string? Encoding = null); - -public sealed record RawIdentifiers( - [property: JsonPropertyName("aliases")] ImmutableArray<string> Aliases, - [property: JsonPropertyName("primary")] string PrimaryId); - -public sealed record RawLinkset -{ - [JsonPropertyName("aliases")] - public ImmutableArray<string> Aliases { get; init; } = ImmutableArray<string>.Empty; - - [JsonPropertyName("purls")] - public ImmutableArray<string> PackageUrls { get; init; } = ImmutableArray<string>.Empty; - - [JsonPropertyName("cpes")] - public ImmutableArray<string> Cpes { get; init; } = ImmutableArray<string>.Empty; - - [JsonPropertyName("references")] - public ImmutableArray<RawReference> References { get; init; } = ImmutableArray<RawReference>.Empty; - - [JsonPropertyName("reconciled_from")] - public ImmutableArray<string> ReconciledFrom { get; init; } = ImmutableArray<string>.Empty; - - [JsonPropertyName("notes")] - public ImmutableDictionary<string, string> Notes { get; init; } = ImmutableDictionary<string, string>.Empty; -} - -public sealed record RawReference( - [property: JsonPropertyName("type")] string Type, - [property: JsonPropertyName("url")] string Url, - [property: JsonPropertyName("source")] string? Source = null); +using System.Collections.Immutable; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Concelier.RawModels; + +public sealed record AdvisoryRawDocument( + [property: JsonPropertyName("tenant")] string Tenant, + [property: JsonPropertyName("source")] RawSourceMetadata Source, + [property: JsonPropertyName("upstream")] RawUpstreamMetadata Upstream, + [property: JsonPropertyName("content")] RawContent Content, + [property: JsonPropertyName("identifiers")] RawIdentifiers Identifiers, + [property: JsonPropertyName("linkset")] RawLinkset Linkset, + [property: JsonPropertyName("supersedes")] string? Supersedes = null) +{ + public AdvisoryRawDocument WithSupersedes(string supersedes) + => this with { Supersedes = supersedes }; +} + +public sealed record RawSourceMetadata( + [property: JsonPropertyName("vendor")] string Vendor, + [property: JsonPropertyName("connector")] string Connector, + [property: JsonPropertyName("version")] string ConnectorVersion, + [property: JsonPropertyName("stream")] string? Stream = null); + +public sealed record RawUpstreamMetadata( + [property: JsonPropertyName("upstream_id")] string UpstreamId, + [property: JsonPropertyName("document_version")] string? DocumentVersion, + [property: JsonPropertyName("retrieved_at")] DateTimeOffset RetrievedAt, + [property: JsonPropertyName("content_hash")] string ContentHash, + [property: JsonPropertyName("signature")] RawSignatureMetadata Signature, + [property: JsonPropertyName("provenance")] ImmutableDictionary<string, string> Provenance); + +public sealed record RawSignatureMetadata( + [property: JsonPropertyName("present")] bool Present, + [property: JsonPropertyName("format")] string? Format = null, + [property: JsonPropertyName("key_id")] string? KeyId = null, + [property: JsonPropertyName("sig")] string? Signature = null, + [property: JsonPropertyName("certificate")] string? Certificate = null, + [property: JsonPropertyName("digest")] string? Digest = null); + +public sealed record RawContent( + [property: JsonPropertyName("format")] string Format, + [property: JsonPropertyName("spec_version")] string? SpecVersion, + [property: JsonPropertyName("raw")] JsonElement Raw, + [property: JsonPropertyName("encoding")] string? Encoding = null); + +public sealed record RawIdentifiers( + [property: JsonPropertyName("aliases")] ImmutableArray<string> Aliases, + [property: JsonPropertyName("primary")] string PrimaryId); + +public sealed record RawLinkset +{ + [JsonPropertyName("aliases")] + public ImmutableArray<string> Aliases { get; init; } = ImmutableArray<string>.Empty; + + [JsonPropertyName("purls")] + public ImmutableArray<string> PackageUrls { get; init; } = ImmutableArray<string>.Empty; + + [JsonPropertyName("cpes")] + public ImmutableArray<string> Cpes { get; init; } = ImmutableArray<string>.Empty; + + [JsonPropertyName("references")] + public ImmutableArray<RawReference> References { get; init; } = ImmutableArray<RawReference>.Empty; + + [JsonPropertyName("reconciled_from")] + public ImmutableArray<string> ReconciledFrom { get; init; } = ImmutableArray<string>.Empty; + + [JsonPropertyName("notes")] + public ImmutableDictionary<string, string> Notes { get; init; } = ImmutableDictionary<string, string>.Empty; +} + +public sealed record RawReference( + [property: JsonPropertyName("type")] string Type, + [property: JsonPropertyName("url")] string Url, + [property: JsonPropertyName("source")] string? Source = null); diff --git a/src/StellaOps.Concelier.RawModels/Class1.cs b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/Class1.cs similarity index 92% rename from src/StellaOps.Concelier.RawModels/Class1.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.RawModels/Class1.cs index e55eda28..b793b044 100644 --- a/src/StellaOps.Concelier.RawModels/Class1.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.Concelier.RawModels; - -public class Class1 -{ - -} +namespace StellaOps.Concelier.RawModels; + +public class Class1 +{ + +} diff --git a/src/StellaOps.Concelier.RawModels/JsonElementExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/JsonElementExtensions.cs similarity index 96% rename from src/StellaOps.Concelier.RawModels/JsonElementExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.RawModels/JsonElementExtensions.cs index eed6909f..67e2fd6b 100644 --- a/src/StellaOps.Concelier.RawModels/JsonElementExtensions.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/JsonElementExtensions.cs @@ -1,12 +1,12 @@ -using System.Text.Json; - -namespace StellaOps.Concelier.RawModels; - -internal static class JsonElementExtensions -{ - public static JsonElement CloneElement(this JsonElement element) - { - using var document = JsonDocument.Parse(element.GetRawText()); - return document.RootElement.Clone(); - } -} +using System.Text.Json; + +namespace StellaOps.Concelier.RawModels; + +internal static class JsonElementExtensions +{ + public static JsonElement CloneElement(this JsonElement element) + { + using var document = JsonDocument.Parse(element.GetRawText()); + return document.RootElement.Clone(); + } +} diff --git a/src/StellaOps.Concelier.RawModels/RawDocumentFactory.cs b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/RawDocumentFactory.cs similarity index 97% rename from src/StellaOps.Concelier.RawModels/RawDocumentFactory.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.RawModels/RawDocumentFactory.cs index 0222a0a8..0f302251 100644 --- a/src/StellaOps.Concelier.RawModels/RawDocumentFactory.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/RawDocumentFactory.cs @@ -1,39 +1,39 @@ -using System.Collections.Immutable; -using System.Text.Json; - -namespace StellaOps.Concelier.RawModels; - -public static class RawDocumentFactory -{ - public static AdvisoryRawDocument CreateAdvisory( - string tenant, - RawSourceMetadata source, - RawUpstreamMetadata upstream, - RawContent content, - RawIdentifiers identifiers, - RawLinkset linkset, - string? supersedes = null) - { - var clonedContent = content with { Raw = Clone(content.Raw) }; - return new AdvisoryRawDocument(tenant, source, upstream, clonedContent, identifiers, linkset, supersedes); - } - - public static VexRawDocument CreateVex( - string tenant, - RawSourceMetadata source, - RawUpstreamMetadata upstream, - RawContent content, - RawLinkset linkset, - ImmutableArray<VexStatementSummary> statements, - string? supersedes = null) - { - var clonedContent = content with { Raw = Clone(content.Raw) }; - return new VexRawDocument(tenant, source, upstream, clonedContent, linkset, statements, supersedes); - } - - private static JsonElement Clone(JsonElement element) - { - using var document = JsonDocument.Parse(element.GetRawText()); - return document.RootElement.Clone(); - } -} +using System.Collections.Immutable; +using System.Text.Json; + +namespace StellaOps.Concelier.RawModels; + +public static class RawDocumentFactory +{ + public static AdvisoryRawDocument CreateAdvisory( + string tenant, + RawSourceMetadata source, + RawUpstreamMetadata upstream, + RawContent content, + RawIdentifiers identifiers, + RawLinkset linkset, + string? supersedes = null) + { + var clonedContent = content with { Raw = Clone(content.Raw) }; + return new AdvisoryRawDocument(tenant, source, upstream, clonedContent, identifiers, linkset, supersedes); + } + + public static VexRawDocument CreateVex( + string tenant, + RawSourceMetadata source, + RawUpstreamMetadata upstream, + RawContent content, + RawLinkset linkset, + ImmutableArray<VexStatementSummary> statements, + string? supersedes = null) + { + var clonedContent = content with { Raw = Clone(content.Raw) }; + return new VexRawDocument(tenant, source, upstream, clonedContent, linkset, statements, supersedes); + } + + private static JsonElement Clone(JsonElement element) + { + using var document = JsonDocument.Parse(element.GetRawText()); + return document.RootElement.Clone(); + } +} diff --git a/src/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj similarity index 97% rename from src/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj index 08b6b3a8..34cb1b45 100644 --- a/src/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj @@ -1,12 +1,12 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="MongoDB.Bson" Version="3.5.0" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="MongoDB.Bson" Version="3.5.0" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Concelier.RawModels/VexRawDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/VexRawDocument.cs similarity index 98% rename from src/StellaOps.Concelier.RawModels/VexRawDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.RawModels/VexRawDocument.cs index 3755a3ac..1e3820e0 100644 --- a/src/StellaOps.Concelier.RawModels/VexRawDocument.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.RawModels/VexRawDocument.cs @@ -1,24 +1,24 @@ -using System.Collections.Immutable; -using System.Text.Json.Serialization; - -namespace StellaOps.Concelier.RawModels; - -public sealed record VexRawDocument( - [property: JsonPropertyName("tenant")] string Tenant, - [property: JsonPropertyName("source")] RawSourceMetadata Source, - [property: JsonPropertyName("upstream")] RawUpstreamMetadata Upstream, - [property: JsonPropertyName("content")] RawContent Content, - [property: JsonPropertyName("linkset")] RawLinkset Linkset, - [property: JsonPropertyName("statements")] ImmutableArray<VexStatementSummary> Statements, - [property: JsonPropertyName("supersedes")] string? Supersedes = null) -{ - public VexRawDocument WithSupersedes(string supersedes) - => this with { Supersedes = supersedes }; -} - -public sealed record VexStatementSummary( - [property: JsonPropertyName("advisory_ids")] ImmutableArray<string> AdvisoryIds, - [property: JsonPropertyName("products")] ImmutableArray<string> Products, - [property: JsonPropertyName("statuses")] ImmutableArray<string> Statuses, - [property: JsonPropertyName("justification")] string? Justification = null, - [property: JsonPropertyName("impact")] string? Impact = null); +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Concelier.RawModels; + +public sealed record VexRawDocument( + [property: JsonPropertyName("tenant")] string Tenant, + [property: JsonPropertyName("source")] RawSourceMetadata Source, + [property: JsonPropertyName("upstream")] RawUpstreamMetadata Upstream, + [property: JsonPropertyName("content")] RawContent Content, + [property: JsonPropertyName("linkset")] RawLinkset Linkset, + [property: JsonPropertyName("statements")] ImmutableArray<VexStatementSummary> Statements, + [property: JsonPropertyName("supersedes")] string? Supersedes = null) +{ + public VexRawDocument WithSupersedes(string supersedes) + => this with { Supersedes = supersedes }; +} + +public sealed record VexStatementSummary( + [property: JsonPropertyName("advisory_ids")] ImmutableArray<string> AdvisoryIds, + [property: JsonPropertyName("products")] ImmutableArray<string> Products, + [property: JsonPropertyName("statuses")] ImmutableArray<string> Statuses, + [property: JsonPropertyName("justification")] string? Justification = null, + [property: JsonPropertyName("impact")] string? Impact = null); diff --git a/src/StellaOps.Concelier.Storage.Mongo/AGENTS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/AGENTS.md similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/AGENTS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/AGENTS.md diff --git a/src/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/AdvisoryStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Advisories/IAdvisoryStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/IAdvisoryStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Advisories/IAdvisoryStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/IAdvisoryStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocumentFactory.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocumentFactory.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocumentFactory.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Advisories/NormalizedVersionDocumentFactory.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Aliases/AliasDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/AliasDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Aliases/AliasDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/AliasDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreConstants.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreConstants.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreConstants.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreConstants.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreMetrics.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreMetrics.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreMetrics.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/AliasStoreMetrics.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Aliases/IAliasStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/IAliasStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Aliases/IAliasStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Aliases/IAliasStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocumentExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocumentExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocumentExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryDocumentExtensions.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryFieldChange.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryFieldChange.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryFieldChange.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryFieldChange.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/ChangeHistoryRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/IChangeHistoryStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/IChangeHistoryStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/IChangeHistoryStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/IChangeHistoryStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/MongoChangeHistoryStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/MongoChangeHistoryStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/ChangeHistory/MongoChangeHistoryStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ChangeHistory/MongoChangeHistoryStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Conflicts/AdvisoryConflictStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Documents/DocumentDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Documents/DocumentDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Documents/DocumentRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Documents/DocumentRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Documents/DocumentStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Documents/DocumentStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Documents/DocumentStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Documents/IDocumentStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Documents/IDocumentStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Documents/IDocumentStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Documents/IDocumentStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Dtos/DtoDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Dtos/DtoDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Dtos/DtoRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Dtos/DtoRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Dtos/DtoStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Dtos/DtoStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Dtos/DtoStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Dtos/IDtoStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Dtos/IDtoStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Dtos/IDtoStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Dtos/IDtoStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Events/MongoAdvisoryEventRepository.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Events/MongoAdvisoryEventRepository.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Events/MongoAdvisoryEventRepository.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Events/MongoAdvisoryEventRepository.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateManager.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateManager.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateManager.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateManager.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/ExportStateStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Exporting/IExportStateStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/IExportStateStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Exporting/IExportStateStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Exporting/IExportStateStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/ISourceStateRepository.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ISourceStateRepository.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/ISourceStateRepository.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ISourceStateRepository.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/JobLeaseDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JobLeaseDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/JobLeaseDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JobLeaseDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/JobRunDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JobRunDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/JobRunDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JobRunDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/JpFlags/IJpFlagStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JpFlags/IJpFlagStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/JpFlags/IJpFlagStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JpFlags/IJpFlagStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/JpFlags/JpFlagStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MIGRATIONS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MIGRATIONS.md similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MIGRATIONS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MIGRATIONS.md diff --git a/src/StellaOps.Concelier.Storage.Mongo/MergeEvents/IMergeEventStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/IMergeEventStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MergeEvents/IMergeEventStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/IMergeEventStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeEventStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeFieldDecision.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeFieldDecision.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeFieldDecision.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MergeEvents/MergeFieldDecision.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryEventCollectionsMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryEventCollectionsMigration.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryEventCollectionsMigration.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryEventCollectionsMigration.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawIdempotencyIndexMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawIdempotencyIndexMigration.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawIdempotencyIndexMigration.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawIdempotencyIndexMigration.cs index 7a3c6dfb..51dbacfb 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawIdempotencyIndexMigration.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawIdempotencyIndexMigration.cs @@ -1,156 +1,156 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using MongoDB.Bson; -using MongoDB.Driver; - -namespace StellaOps.Concelier.Storage.Mongo.Migrations; - -public sealed class EnsureAdvisoryRawIdempotencyIndexMigration : IMongoMigration -{ - private const string IndexName = "advisory_raw_idempotency"; - - public string Id => "20251028_advisory_raw_idempotency_index"; - - public string Description => "Ensure advisory_raw collection enforces idempotency via unique compound index."; - - public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(database); - - var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw); - await EnsureNoDuplicatesAsync(collection, cancellationToken).ConfigureAwait(false); - - var existingIndex = await FindExistingIndexAsync(collection, cancellationToken).ConfigureAwait(false); - if (existingIndex is not null && - existingIndex.TryGetValue("unique", out var uniqueValue) && - uniqueValue.ToBoolean() && - existingIndex.TryGetValue("key", out var keyValue) && - keyValue is BsonDocument keyDocument && - KeysMatch(keyDocument)) - { - return; - } - - if (existingIndex is not null) - { - try - { - await collection.Indexes.DropOneAsync(IndexName, cancellationToken).ConfigureAwait(false); - } - catch (MongoCommandException ex) when (ex.Code == 27) - { - // Index not found; safe to ignore. - } - } - - var keys = Builders<BsonDocument>.IndexKeys - .Ascending("source.vendor") - .Ascending("upstream.upstream_id") - .Ascending("upstream.content_hash") - .Ascending("tenant"); - - var options = new CreateIndexOptions - { - Name = IndexName, - Unique = true, - }; - - await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options), cancellationToken: cancellationToken).ConfigureAwait(false); - } - - private static async Task<BsonDocument?> FindExistingIndexAsync( - IMongoCollection<BsonDocument> collection, - CancellationToken cancellationToken) - { - using var cursor = await collection.Indexes.ListAsync(cancellationToken).ConfigureAwait(false); - var indexes = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); - return indexes.FirstOrDefault(doc => doc.TryGetValue("name", out var name) && string.Equals(name.AsString, IndexName, StringComparison.Ordinal)); - } - - private static async Task EnsureNoDuplicatesAsync(IMongoCollection<BsonDocument> collection, CancellationToken cancellationToken) - { - var pipeline = new[] - { - new BsonDocument("$group", new BsonDocument - { - { - "_id", - new BsonDocument - { - { "vendor", "$source.vendor" }, - { "upstreamId", "$upstream.upstream_id" }, - { "contentHash", "$upstream.content_hash" }, - { "tenant", "$tenant" }, - } - }, - { "count", new BsonDocument("$sum", 1) }, - { "ids", new BsonDocument("$push", "$_id") }, - }), - new BsonDocument("$match", new BsonDocument("count", new BsonDocument("$gt", 1))), - new BsonDocument("$limit", 1), - }; - - var pipelineDefinition = PipelineDefinition<BsonDocument, BsonDocument>.Create(pipeline); - var duplicate = await collection.Aggregate(pipelineDefinition, cancellationToken: cancellationToken) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - if (duplicate is null) - { - return; - } - - var keyDocument = duplicate["_id"].AsBsonDocument; - var vendor = keyDocument.GetValue("vendor", BsonNull.Value)?.ToString() ?? "<null>"; - var upstreamId = keyDocument.GetValue("upstreamId", BsonNull.Value)?.ToString() ?? "<null>"; - var contentHash = keyDocument.GetValue("contentHash", BsonNull.Value)?.ToString() ?? "<null>"; - var tenant = keyDocument.GetValue("tenant", BsonNull.Value)?.ToString() ?? "<null>"; - BsonArray idArray = duplicate.TryGetValue("ids", out var idsValue) && idsValue is BsonArray array - ? array - : new BsonArray(); - var ids = new string[idArray.Count]; - for (var i = 0; i < idArray.Count; i++) - { - ids[i] = idArray[i]?.ToString() ?? "<null>"; - } - - throw new InvalidOperationException( - $"Cannot create advisory_raw idempotency index because duplicate documents exist for vendor '{vendor}', upstream '{upstreamId}', content hash '{contentHash}', tenant '{tenant}'. Conflicting document ids: {string.Join(", ", ids)}."); - } - - private static bool KeysMatch(BsonDocument keyDocument) - { - if (keyDocument.ElementCount != 4) - { - return false; - } - - var expected = new[] - { - ("source.vendor", 1), - ("upstream.upstream_id", 1), - ("upstream.content_hash", 1), - ("tenant", 1), - }; - - var index = 0; - foreach (var element in keyDocument.Elements) - { - if (!string.Equals(element.Name, expected[index].Item1, StringComparison.Ordinal)) - { - return false; - } - - if (!element.Value.IsInt32 || element.Value.AsInt32 != expected[index].Item2) - { - return false; - } - - index++; - } - - return true; - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Driver; + +namespace StellaOps.Concelier.Storage.Mongo.Migrations; + +public sealed class EnsureAdvisoryRawIdempotencyIndexMigration : IMongoMigration +{ + private const string IndexName = "advisory_raw_idempotency"; + + public string Id => "20251028_advisory_raw_idempotency_index"; + + public string Description => "Ensure advisory_raw collection enforces idempotency via unique compound index."; + + public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(database); + + var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw); + await EnsureNoDuplicatesAsync(collection, cancellationToken).ConfigureAwait(false); + + var existingIndex = await FindExistingIndexAsync(collection, cancellationToken).ConfigureAwait(false); + if (existingIndex is not null && + existingIndex.TryGetValue("unique", out var uniqueValue) && + uniqueValue.ToBoolean() && + existingIndex.TryGetValue("key", out var keyValue) && + keyValue is BsonDocument keyDocument && + KeysMatch(keyDocument)) + { + return; + } + + if (existingIndex is not null) + { + try + { + await collection.Indexes.DropOneAsync(IndexName, cancellationToken).ConfigureAwait(false); + } + catch (MongoCommandException ex) when (ex.Code == 27) + { + // Index not found; safe to ignore. + } + } + + var keys = Builders<BsonDocument>.IndexKeys + .Ascending("source.vendor") + .Ascending("upstream.upstream_id") + .Ascending("upstream.content_hash") + .Ascending("tenant"); + + var options = new CreateIndexOptions + { + Name = IndexName, + Unique = true, + }; + + await collection.Indexes.CreateOneAsync(new CreateIndexModel<BsonDocument>(keys, options), cancellationToken: cancellationToken).ConfigureAwait(false); + } + + private static async Task<BsonDocument?> FindExistingIndexAsync( + IMongoCollection<BsonDocument> collection, + CancellationToken cancellationToken) + { + using var cursor = await collection.Indexes.ListAsync(cancellationToken).ConfigureAwait(false); + var indexes = await cursor.ToListAsync(cancellationToken).ConfigureAwait(false); + return indexes.FirstOrDefault(doc => doc.TryGetValue("name", out var name) && string.Equals(name.AsString, IndexName, StringComparison.Ordinal)); + } + + private static async Task EnsureNoDuplicatesAsync(IMongoCollection<BsonDocument> collection, CancellationToken cancellationToken) + { + var pipeline = new[] + { + new BsonDocument("$group", new BsonDocument + { + { + "_id", + new BsonDocument + { + { "vendor", "$source.vendor" }, + { "upstreamId", "$upstream.upstream_id" }, + { "contentHash", "$upstream.content_hash" }, + { "tenant", "$tenant" }, + } + }, + { "count", new BsonDocument("$sum", 1) }, + { "ids", new BsonDocument("$push", "$_id") }, + }), + new BsonDocument("$match", new BsonDocument("count", new BsonDocument("$gt", 1))), + new BsonDocument("$limit", 1), + }; + + var pipelineDefinition = PipelineDefinition<BsonDocument, BsonDocument>.Create(pipeline); + var duplicate = await collection.Aggregate(pipelineDefinition, cancellationToken: cancellationToken) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + if (duplicate is null) + { + return; + } + + var keyDocument = duplicate["_id"].AsBsonDocument; + var vendor = keyDocument.GetValue("vendor", BsonNull.Value)?.ToString() ?? "<null>"; + var upstreamId = keyDocument.GetValue("upstreamId", BsonNull.Value)?.ToString() ?? "<null>"; + var contentHash = keyDocument.GetValue("contentHash", BsonNull.Value)?.ToString() ?? "<null>"; + var tenant = keyDocument.GetValue("tenant", BsonNull.Value)?.ToString() ?? "<null>"; + BsonArray idArray = duplicate.TryGetValue("ids", out var idsValue) && idsValue is BsonArray array + ? array + : new BsonArray(); + var ids = new string[idArray.Count]; + for (var i = 0; i < idArray.Count; i++) + { + ids[i] = idArray[i]?.ToString() ?? "<null>"; + } + + throw new InvalidOperationException( + $"Cannot create advisory_raw idempotency index because duplicate documents exist for vendor '{vendor}', upstream '{upstreamId}', content hash '{contentHash}', tenant '{tenant}'. Conflicting document ids: {string.Join(", ", ids)}."); + } + + private static bool KeysMatch(BsonDocument keyDocument) + { + if (keyDocument.ElementCount != 4) + { + return false; + } + + var expected = new[] + { + ("source.vendor", 1), + ("upstream.upstream_id", 1), + ("upstream.content_hash", 1), + ("tenant", 1), + }; + + var index = 0; + foreach (var element in keyDocument.Elements) + { + if (!string.Equals(element.Name, expected[index].Item1, StringComparison.Ordinal)) + { + return false; + } + + if (!element.Value.IsInt32 || element.Value.AsInt32 != expected[index].Item2) + { + return false; + } + + index++; + } + + return true; + } +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawValidatorMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawValidatorMigration.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawValidatorMigration.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawValidatorMigration.cs index f9598385..acc19b63 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawValidatorMigration.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisoryRawValidatorMigration.cs @@ -1,372 +1,372 @@ -using System.Collections.Generic; -using Microsoft.Extensions.Options; -using MongoDB.Bson; -using MongoDB.Driver; - -namespace StellaOps.Concelier.Storage.Mongo.Migrations; - -internal sealed class EnsureAdvisoryRawValidatorMigration : IMongoMigration -{ - private const string ForbiddenExactPattern = "^(?i)(severity|cvss|cvss_vector|merged_from|consensus_provider|reachability|asset_criticality|risk_score)$"; - private const string ForbiddenEffectivePattern = "^(?i)effective_"; - - private static readonly IReadOnlyList<object> AllBsonTypeNames = new object[] - { - "double", - "string", - "object", - "array", - "binData", - "undefined", - "objectId", - "bool", - "date", - "null", - "regex", - "dbPointer", - "javascript", - "symbol", - "javascriptWithScope", - "int", - "timestamp", - "long", - "decimal", - "minKey", - "maxKey", - }; - - private readonly MongoStorageOptions _options; - - public EnsureAdvisoryRawValidatorMigration(IOptions<MongoStorageOptions> options) - { - ArgumentNullException.ThrowIfNull(options); - _options = options.Value ?? throw new ArgumentNullException(nameof(options.Value)); - } - - public string Id => "20251028_advisory_raw_validator"; - - public string Description => "Ensure advisory_raw collection enforces Aggregation-Only Contract schema"; - - public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(database); - - var validatorOptions = _options.AdvisoryRawValidator ?? new MongoCollectionValidatorOptions(); - var validator = new BsonDocument("$jsonSchema", BuildSchema()); - - var command = new BsonDocument - { - { "collMod", MongoStorageDefaults.Collections.AdvisoryRaw }, - { "validator", validator }, - { "validationLevel", GetValidationLevelString(validatorOptions.Level) }, - { "validationAction", GetValidationActionString(validatorOptions.Action) }, - }; - - try - { - await database.RunCommandAsync<BsonDocument>(command, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch (MongoCommandException ex) when (ex.Code == 26) - { - var createOptions = new CreateCollectionOptions<BsonDocument> - { - Validator = validator, - ValidationLevel = MapValidationLevel(validatorOptions.Level), - ValidationAction = MapValidationAction(validatorOptions.Action), - }; - - await database.CreateCollectionAsync( - MongoStorageDefaults.Collections.AdvisoryRaw, - createOptions, - cancellationToken).ConfigureAwait(false); - } - } - - private static BsonDocument BuildSchema() - { - var schema = new BsonDocument - { - { "bsonType", "object" }, - { "required", new BsonArray(new[] { "tenant", "source", "upstream", "content", "linkset" }) }, - { "properties", BuildTopLevelProperties() }, - { "patternProperties", BuildForbiddenPatterns() }, - }; - - return schema; - } - - private static BsonDocument BuildTopLevelProperties() - { - var properties = new BsonDocument - { - { "_id", new BsonDocument("bsonType", "string") }, - { - "tenant", - new BsonDocument - { - { "bsonType", "string" }, - { "minLength", 1 }, - } - }, - { "source", BuildSourceSchema() }, - { "upstream", BuildUpstreamSchema() }, - { "content", BuildContentSchema() }, - { "identifiers", BuildIdentifiersSchema() }, - { "linkset", BuildLinksetSchema() }, - { "supersedes", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - { - "created_at", - new BsonDocument - { - { "bsonType", new BsonArray(new object[] { "date", "string", "null" }) }, - } - }, - { - "ingested_at", - new BsonDocument - { - { "bsonType", new BsonArray(new object[] { "date", "string", "null" }) }, - } - }, - }; - - return properties; - } - - private static BsonDocument BuildSourceSchema() - { - return new BsonDocument - { - { "bsonType", "object" }, - { "required", new BsonArray(new[] { "vendor", "connector", "version" }) }, - { - "properties", - new BsonDocument - { - { "vendor", new BsonDocument("bsonType", "string") }, - { "connector", new BsonDocument("bsonType", "string") }, - { "version", new BsonDocument("bsonType", "string") }, - { "stream", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - } - }, - { "additionalProperties", false }, - }; - } - - private static BsonDocument BuildUpstreamSchema() - { - return new BsonDocument - { - { "bsonType", "object" }, - { "required", new BsonArray(new[] { "upstream_id", "retrieved_at", "content_hash", "signature", "provenance" }) }, - { - "properties", - new BsonDocument - { - { "upstream_id", new BsonDocument("bsonType", "string") }, - { "document_version", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - { "retrieved_at", new BsonDocument("bsonType", new BsonArray(new object[] { "date", "string" })) }, - { "content_hash", new BsonDocument("bsonType", "string") }, - { "signature", BuildSignatureSchema() }, - { "provenance", BuildProvenanceSchema() }, - } - }, - { "additionalProperties", false }, - }; - } - - private static BsonDocument BuildSignatureSchema() - { - return new BsonDocument - { - { "bsonType", "object" }, - { "required", new BsonArray(new[] { "present" }) }, - { - "properties", - new BsonDocument - { - { "present", new BsonDocument("bsonType", "bool") }, - { "format", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - { "key_id", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - { "sig", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - { "certificate", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - { "digest", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - } - }, - { "additionalProperties", false }, - }; - } - - private static BsonDocument BuildProvenanceSchema() - { - return new BsonDocument - { - { "bsonType", "object" }, - { "additionalProperties", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - }; - } - - private static BsonDocument BuildContentSchema() - { - return new BsonDocument - { - { "bsonType", "object" }, - { "required", new BsonArray(new[] { "format", "raw" }) }, - { - "properties", - new BsonDocument - { - { "format", new BsonDocument("bsonType", "string") }, - { "spec_version", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - { - "raw", - new BsonDocument - { - { "bsonType", new BsonArray(new object[] - { - "object", - "array", - "string", - "bool", - "double", - "int", - "long", - "decimal", - }) - }, - } - }, - { "encoding", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - } - }, - { "additionalProperties", false }, - }; - } - - private static BsonDocument BuildIdentifiersSchema() - { - return new BsonDocument - { - { "bsonType", "object" }, - { "required", new BsonArray(new[] { "aliases", "primary" }) }, - { - "properties", - new BsonDocument - { - { "aliases", CreateStringArraySchema() }, - { "primary", new BsonDocument("bsonType", "string") }, - } - }, - { "additionalProperties", false }, - }; - } - - private static BsonDocument BuildLinksetSchema() - { - return new BsonDocument - { - { "bsonType", "object" }, - { - "properties", - new BsonDocument - { - { "aliases", CreateStringArraySchema() }, - { "purls", CreateStringArraySchema() }, - { "cpes", CreateStringArraySchema() }, - { - "references", - new BsonDocument - { - { "bsonType", "array" }, - { - "items", - new BsonDocument - { - { "bsonType", "object" }, - { "required", new BsonArray(new[] { "type", "url" }) }, - { - "properties", - new BsonDocument - { - { "type", new BsonDocument("bsonType", "string") }, - { "url", new BsonDocument("bsonType", "string") }, - { "source", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - } - }, - { "additionalProperties", false }, - } - } - } - }, - { "reconciled_from", CreateStringArraySchema() }, - { - "notes", - new BsonDocument - { - { "bsonType", "object" }, - { "additionalProperties", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, - } - }, - } - }, - { "additionalProperties", false }, - }; - } - - private static BsonDocument CreateStringArraySchema() - { - return new BsonDocument - { - { "bsonType", "array" }, - { "items", new BsonDocument("bsonType", "string") }, - }; - } - - private static BsonDocument BuildForbiddenPatterns() - { - return new BsonDocument - { - { ForbiddenExactPattern, CreateForbiddenPattern("Derived and normalized fields are forbidden by the Aggregation-Only Contract.") }, - { ForbiddenEffectivePattern, CreateForbiddenPattern("Fields starting with 'effective_' must not be persisted in advisory_raw.") }, - }; - } - - private static BsonDocument CreateForbiddenPattern(string description) - { - return new BsonDocument - { - { "description", description }, - { "not", new BsonDocument("bsonType", new BsonArray(AllBsonTypeNames)) }, - }; - } - - private static string GetValidationLevelString(MongoValidationLevel level) => level switch - { - MongoValidationLevel.Off => "off", - MongoValidationLevel.Moderate => "moderate", - MongoValidationLevel.Strict => "strict", - _ => "moderate", - }; - - private static string GetValidationActionString(MongoValidationAction action) => action switch - { - MongoValidationAction.Warn => "warn", - MongoValidationAction.Error => "error", - _ => "warn", - }; - - private static DocumentValidationLevel MapValidationLevel(MongoValidationLevel level) => level switch - { - MongoValidationLevel.Off => DocumentValidationLevel.Off, - MongoValidationLevel.Moderate => DocumentValidationLevel.Moderate, - MongoValidationLevel.Strict => DocumentValidationLevel.Strict, - _ => DocumentValidationLevel.Moderate, - }; - - private static DocumentValidationAction MapValidationAction(MongoValidationAction action) => action switch - { - MongoValidationAction.Warn => DocumentValidationAction.Warn, - MongoValidationAction.Error => DocumentValidationAction.Error, - _ => DocumentValidationAction.Warn, - }; -} +using System.Collections.Generic; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using MongoDB.Driver; + +namespace StellaOps.Concelier.Storage.Mongo.Migrations; + +internal sealed class EnsureAdvisoryRawValidatorMigration : IMongoMigration +{ + private const string ForbiddenExactPattern = "^(?i)(severity|cvss|cvss_vector|merged_from|consensus_provider|reachability|asset_criticality|risk_score)$"; + private const string ForbiddenEffectivePattern = "^(?i)effective_"; + + private static readonly IReadOnlyList<object> AllBsonTypeNames = new object[] + { + "double", + "string", + "object", + "array", + "binData", + "undefined", + "objectId", + "bool", + "date", + "null", + "regex", + "dbPointer", + "javascript", + "symbol", + "javascriptWithScope", + "int", + "timestamp", + "long", + "decimal", + "minKey", + "maxKey", + }; + + private readonly MongoStorageOptions _options; + + public EnsureAdvisoryRawValidatorMigration(IOptions<MongoStorageOptions> options) + { + ArgumentNullException.ThrowIfNull(options); + _options = options.Value ?? throw new ArgumentNullException(nameof(options.Value)); + } + + public string Id => "20251028_advisory_raw_validator"; + + public string Description => "Ensure advisory_raw collection enforces Aggregation-Only Contract schema"; + + public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(database); + + var validatorOptions = _options.AdvisoryRawValidator ?? new MongoCollectionValidatorOptions(); + var validator = new BsonDocument("$jsonSchema", BuildSchema()); + + var command = new BsonDocument + { + { "collMod", MongoStorageDefaults.Collections.AdvisoryRaw }, + { "validator", validator }, + { "validationLevel", GetValidationLevelString(validatorOptions.Level) }, + { "validationAction", GetValidationActionString(validatorOptions.Action) }, + }; + + try + { + await database.RunCommandAsync<BsonDocument>(command, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (MongoCommandException ex) when (ex.Code == 26) + { + var createOptions = new CreateCollectionOptions<BsonDocument> + { + Validator = validator, + ValidationLevel = MapValidationLevel(validatorOptions.Level), + ValidationAction = MapValidationAction(validatorOptions.Action), + }; + + await database.CreateCollectionAsync( + MongoStorageDefaults.Collections.AdvisoryRaw, + createOptions, + cancellationToken).ConfigureAwait(false); + } + } + + private static BsonDocument BuildSchema() + { + var schema = new BsonDocument + { + { "bsonType", "object" }, + { "required", new BsonArray(new[] { "tenant", "source", "upstream", "content", "linkset" }) }, + { "properties", BuildTopLevelProperties() }, + { "patternProperties", BuildForbiddenPatterns() }, + }; + + return schema; + } + + private static BsonDocument BuildTopLevelProperties() + { + var properties = new BsonDocument + { + { "_id", new BsonDocument("bsonType", "string") }, + { + "tenant", + new BsonDocument + { + { "bsonType", "string" }, + { "minLength", 1 }, + } + }, + { "source", BuildSourceSchema() }, + { "upstream", BuildUpstreamSchema() }, + { "content", BuildContentSchema() }, + { "identifiers", BuildIdentifiersSchema() }, + { "linkset", BuildLinksetSchema() }, + { "supersedes", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + { + "created_at", + new BsonDocument + { + { "bsonType", new BsonArray(new object[] { "date", "string", "null" }) }, + } + }, + { + "ingested_at", + new BsonDocument + { + { "bsonType", new BsonArray(new object[] { "date", "string", "null" }) }, + } + }, + }; + + return properties; + } + + private static BsonDocument BuildSourceSchema() + { + return new BsonDocument + { + { "bsonType", "object" }, + { "required", new BsonArray(new[] { "vendor", "connector", "version" }) }, + { + "properties", + new BsonDocument + { + { "vendor", new BsonDocument("bsonType", "string") }, + { "connector", new BsonDocument("bsonType", "string") }, + { "version", new BsonDocument("bsonType", "string") }, + { "stream", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + } + }, + { "additionalProperties", false }, + }; + } + + private static BsonDocument BuildUpstreamSchema() + { + return new BsonDocument + { + { "bsonType", "object" }, + { "required", new BsonArray(new[] { "upstream_id", "retrieved_at", "content_hash", "signature", "provenance" }) }, + { + "properties", + new BsonDocument + { + { "upstream_id", new BsonDocument("bsonType", "string") }, + { "document_version", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + { "retrieved_at", new BsonDocument("bsonType", new BsonArray(new object[] { "date", "string" })) }, + { "content_hash", new BsonDocument("bsonType", "string") }, + { "signature", BuildSignatureSchema() }, + { "provenance", BuildProvenanceSchema() }, + } + }, + { "additionalProperties", false }, + }; + } + + private static BsonDocument BuildSignatureSchema() + { + return new BsonDocument + { + { "bsonType", "object" }, + { "required", new BsonArray(new[] { "present" }) }, + { + "properties", + new BsonDocument + { + { "present", new BsonDocument("bsonType", "bool") }, + { "format", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + { "key_id", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + { "sig", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + { "certificate", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + { "digest", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + } + }, + { "additionalProperties", false }, + }; + } + + private static BsonDocument BuildProvenanceSchema() + { + return new BsonDocument + { + { "bsonType", "object" }, + { "additionalProperties", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + }; + } + + private static BsonDocument BuildContentSchema() + { + return new BsonDocument + { + { "bsonType", "object" }, + { "required", new BsonArray(new[] { "format", "raw" }) }, + { + "properties", + new BsonDocument + { + { "format", new BsonDocument("bsonType", "string") }, + { "spec_version", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + { + "raw", + new BsonDocument + { + { "bsonType", new BsonArray(new object[] + { + "object", + "array", + "string", + "bool", + "double", + "int", + "long", + "decimal", + }) + }, + } + }, + { "encoding", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + } + }, + { "additionalProperties", false }, + }; + } + + private static BsonDocument BuildIdentifiersSchema() + { + return new BsonDocument + { + { "bsonType", "object" }, + { "required", new BsonArray(new[] { "aliases", "primary" }) }, + { + "properties", + new BsonDocument + { + { "aliases", CreateStringArraySchema() }, + { "primary", new BsonDocument("bsonType", "string") }, + } + }, + { "additionalProperties", false }, + }; + } + + private static BsonDocument BuildLinksetSchema() + { + return new BsonDocument + { + { "bsonType", "object" }, + { + "properties", + new BsonDocument + { + { "aliases", CreateStringArraySchema() }, + { "purls", CreateStringArraySchema() }, + { "cpes", CreateStringArraySchema() }, + { + "references", + new BsonDocument + { + { "bsonType", "array" }, + { + "items", + new BsonDocument + { + { "bsonType", "object" }, + { "required", new BsonArray(new[] { "type", "url" }) }, + { + "properties", + new BsonDocument + { + { "type", new BsonDocument("bsonType", "string") }, + { "url", new BsonDocument("bsonType", "string") }, + { "source", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + } + }, + { "additionalProperties", false }, + } + } + } + }, + { "reconciled_from", CreateStringArraySchema() }, + { + "notes", + new BsonDocument + { + { "bsonType", "object" }, + { "additionalProperties", new BsonDocument("bsonType", new BsonArray(new object[] { "string", "null" })) }, + } + }, + } + }, + { "additionalProperties", false }, + }; + } + + private static BsonDocument CreateStringArraySchema() + { + return new BsonDocument + { + { "bsonType", "array" }, + { "items", new BsonDocument("bsonType", "string") }, + }; + } + + private static BsonDocument BuildForbiddenPatterns() + { + return new BsonDocument + { + { ForbiddenExactPattern, CreateForbiddenPattern("Derived and normalized fields are forbidden by the Aggregation-Only Contract.") }, + { ForbiddenEffectivePattern, CreateForbiddenPattern("Fields starting with 'effective_' must not be persisted in advisory_raw.") }, + }; + } + + private static BsonDocument CreateForbiddenPattern(string description) + { + return new BsonDocument + { + { "description", description }, + { "not", new BsonDocument("bsonType", new BsonArray(AllBsonTypeNames)) }, + }; + } + + private static string GetValidationLevelString(MongoValidationLevel level) => level switch + { + MongoValidationLevel.Off => "off", + MongoValidationLevel.Moderate => "moderate", + MongoValidationLevel.Strict => "strict", + _ => "moderate", + }; + + private static string GetValidationActionString(MongoValidationAction action) => action switch + { + MongoValidationAction.Warn => "warn", + MongoValidationAction.Error => "error", + _ => "warn", + }; + + private static DocumentValidationLevel MapValidationLevel(MongoValidationLevel level) => level switch + { + MongoValidationLevel.Off => DocumentValidationLevel.Off, + MongoValidationLevel.Moderate => DocumentValidationLevel.Moderate, + MongoValidationLevel.Strict => DocumentValidationLevel.Strict, + _ => DocumentValidationLevel.Moderate, + }; + + private static DocumentValidationAction MapValidationAction(MongoValidationAction action) => action switch + { + MongoValidationAction.Warn => DocumentValidationAction.Warn, + MongoValidationAction.Error => DocumentValidationAction.Error, + _ => DocumentValidationAction.Warn, + }; +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisorySupersedesBackfillMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisorySupersedesBackfillMigration.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisorySupersedesBackfillMigration.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisorySupersedesBackfillMigration.cs index fde53761..0a07af6c 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisorySupersedesBackfillMigration.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureAdvisorySupersedesBackfillMigration.cs @@ -1,242 +1,242 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using MongoDB.Bson; -using MongoDB.Driver; - -namespace StellaOps.Concelier.Storage.Mongo.Migrations; - -public sealed class EnsureAdvisorySupersedesBackfillMigration : IMongoMigration -{ - private const string BackupCollectionName = "advisory_backup_20251028"; - private const string SupersedesMigrationId = "20251028_advisory_supersedes_backfill"; - - public string Id => SupersedesMigrationId; - - public string Description => "Backfill advisory_raw supersedes chains and replace legacy advisory collection with read-only view."; - - public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(database); - - await EnsureLegacyAdvisoryViewAsync(database, cancellationToken).ConfigureAwait(false); - await BackfillSupersedesAsync(database, cancellationToken).ConfigureAwait(false); - } - - private static async Task EnsureLegacyAdvisoryViewAsync(IMongoDatabase database, CancellationToken cancellationToken) - { - var advisoryInfo = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.Advisory, cancellationToken).ConfigureAwait(false); - var backupInfo = await GetCollectionInfoAsync(database, BackupCollectionName, cancellationToken).ConfigureAwait(false); - - if (advisoryInfo is not null && !IsView(advisoryInfo)) - { - if (backupInfo is null) - { - await RenameCollectionAsync(database, MongoStorageDefaults.Collections.Advisory, BackupCollectionName, cancellationToken).ConfigureAwait(false); - backupInfo = await GetCollectionInfoAsync(database, BackupCollectionName, cancellationToken).ConfigureAwait(false); - } - else - { - await database.DropCollectionAsync(MongoStorageDefaults.Collections.Advisory, cancellationToken).ConfigureAwait(false); - } - - advisoryInfo = null; - } - - if (backupInfo is null) - { - await database.CreateCollectionAsync(BackupCollectionName, cancellationToken: cancellationToken).ConfigureAwait(false); - } - - if (advisoryInfo is null) - { - await CreateViewAsync(database, MongoStorageDefaults.Collections.Advisory, BackupCollectionName, cancellationToken).ConfigureAwait(false); - } - else if (!ViewTargets(advisoryInfo, BackupCollectionName)) - { - await database.DropCollectionAsync(MongoStorageDefaults.Collections.Advisory, cancellationToken).ConfigureAwait(false); - await CreateViewAsync(database, MongoStorageDefaults.Collections.Advisory, BackupCollectionName, cancellationToken).ConfigureAwait(false); - } - } - - private static async Task BackfillSupersedesAsync(IMongoDatabase database, CancellationToken cancellationToken) - { - var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw); - - var pipeline = new EmptyPipelineDefinition<BsonDocument>() - .Group(new BsonDocument - { - { - "_id", - new BsonDocument - { - { "tenant", "$tenant" }, - { "vendor", "$source.vendor" }, - { "upstreamId", "$upstream.upstream_id" }, - } - }, - { - "documents", - new BsonDocument("$push", new BsonDocument - { - { "_id", "$_id" }, - { "retrievedAt", "$upstream.retrieved_at" }, - { "contentHash", "$upstream.content_hash" }, - { "supersedes", "$supersedes" }, - }) - } - }) - .Match(new BsonDocument("documents.1", new BsonDocument("$exists", true))); - - using var cursor = await collection.AggregateAsync(pipeline, cancellationToken: cancellationToken).ConfigureAwait(false); - - while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) - { - foreach (var group in cursor.Current) - { - if (!group.TryGetValue("documents", out var documentsValue) || documentsValue is not BsonArray documentsArray || documentsArray.Count == 0) - { - continue; - } - - var ordered = documentsArray - .Select(x => x.AsBsonDocument) - .Select(x => new AdvisoryRawRecord( - x.GetValue("_id").AsString, - GetDateTime(x, "retrievedAt"), - x.TryGetValue("supersedes", out var sup) ? sup : BsonNull.Value)) - .OrderBy(record => record.RetrievedAt) - .ThenBy(record => record.Id, StringComparer.Ordinal) - .ToArray(); - - if (ordered.Length <= 1) - { - continue; - } - - var updates = new List<WriteModel<BsonDocument>>(ordered.Length); - for (var index = 0; index < ordered.Length; index++) - { - var expectedSupersedes = index == 0 ? BsonNull.Value : BsonValue.Create(ordered[index - 1].Id); - var current = ordered[index]; - - if (AreSupersedesEqual(current.Supersedes, expectedSupersedes)) - { - continue; - } - - var filter = Builders<BsonDocument>.Filter.Eq("_id", current.Id); - var update = Builders<BsonDocument>.Update.Set("supersedes", expectedSupersedes); - updates.Add(new UpdateOneModel<BsonDocument>(filter, update)); - } - - if (updates.Count > 0) - { - await collection.BulkWriteAsync(updates, cancellationToken: cancellationToken).ConfigureAwait(false); - } - } - } - } - - private static async Task<BsonDocument?> GetCollectionInfoAsync(IMongoDatabase database, string name, CancellationToken cancellationToken) - { - var command = new BsonDocument - { - { "listCollections", 1 }, - { "filter", new BsonDocument("name", name) }, - }; - - var result = await database.RunCommandAsync<BsonDocument>(command, cancellationToken: cancellationToken).ConfigureAwait(false); - var batch = result["cursor"]["firstBatch"].AsBsonArray; - return batch.Count > 0 ? batch[0].AsBsonDocument : null; - } - - private static bool IsView(BsonDocument collectionInfo) - => string.Equals(collectionInfo.GetValue("type", BsonString.Empty).AsString, "view", StringComparison.OrdinalIgnoreCase); - - private static bool ViewTargets(BsonDocument collectionInfo, string target) - { - if (!IsView(collectionInfo)) - { - return false; - } - - if (!collectionInfo.TryGetValue("options", out var options) || options is not BsonDocument optionsDocument) - { - return false; - } - - return optionsDocument.TryGetValue("viewOn", out var viewOn) && string.Equals(viewOn.AsString, target, StringComparison.Ordinal); - } - - private static async Task RenameCollectionAsync(IMongoDatabase database, string source, string destination, CancellationToken cancellationToken) - { - var admin = database.Client.GetDatabase("admin"); - var renameCommand = new BsonDocument - { - { "renameCollection", $"{database.DatabaseNamespace.DatabaseName}.{source}" }, - { "to", $"{database.DatabaseNamespace.DatabaseName}.{destination}" }, - { "dropTarget", false }, - }; - - try - { - await admin.RunCommandAsync<BsonDocument>(renameCommand, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch (MongoCommandException ex) when (ex.Code == 26) - { - // Source namespace not found; ignore. - } - catch (MongoCommandException ex) when (ex.Code == 48) - { - // Target namespace exists; fall back to manual handling by copying data. - await database.DropCollectionAsync(destination, cancellationToken).ConfigureAwait(false); - await admin.RunCommandAsync<BsonDocument>(renameCommand, cancellationToken: cancellationToken).ConfigureAwait(false); - } - } - - private static async Task CreateViewAsync(IMongoDatabase database, string viewName, string sourceName, CancellationToken cancellationToken) - { - var createCommand = new BsonDocument - { - { "create", viewName }, - { "viewOn", sourceName }, - }; - - await database.RunCommandAsync<BsonDocument>(createCommand, cancellationToken: cancellationToken).ConfigureAwait(false); - } - - private static DateTime GetDateTime(BsonDocument document, string fieldName) - { - if (!document.TryGetValue(fieldName, out var value)) - { - return DateTime.MinValue; - } - - return value.BsonType switch - { - BsonType.DateTime => value.ToUniversalTime(), - BsonType.String when DateTime.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(), - _ => DateTime.MinValue, - }; - } - - private static bool AreSupersedesEqual(BsonValue? left, BsonValue? right) - { - if (left is null || left.IsBsonNull) - { - return right is null || right.IsBsonNull; - } - - if (right is null || right.IsBsonNull) - { - return left.IsBsonNull; - } - - return left.Equals(right); - } - - private sealed record AdvisoryRawRecord(string Id, DateTime RetrievedAt, BsonValue Supersedes); -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Driver; + +namespace StellaOps.Concelier.Storage.Mongo.Migrations; + +public sealed class EnsureAdvisorySupersedesBackfillMigration : IMongoMigration +{ + private const string BackupCollectionName = "advisory_backup_20251028"; + private const string SupersedesMigrationId = "20251028_advisory_supersedes_backfill"; + + public string Id => SupersedesMigrationId; + + public string Description => "Backfill advisory_raw supersedes chains and replace legacy advisory collection with read-only view."; + + public async Task ApplyAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(database); + + await EnsureLegacyAdvisoryViewAsync(database, cancellationToken).ConfigureAwait(false); + await BackfillSupersedesAsync(database, cancellationToken).ConfigureAwait(false); + } + + private static async Task EnsureLegacyAdvisoryViewAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + var advisoryInfo = await GetCollectionInfoAsync(database, MongoStorageDefaults.Collections.Advisory, cancellationToken).ConfigureAwait(false); + var backupInfo = await GetCollectionInfoAsync(database, BackupCollectionName, cancellationToken).ConfigureAwait(false); + + if (advisoryInfo is not null && !IsView(advisoryInfo)) + { + if (backupInfo is null) + { + await RenameCollectionAsync(database, MongoStorageDefaults.Collections.Advisory, BackupCollectionName, cancellationToken).ConfigureAwait(false); + backupInfo = await GetCollectionInfoAsync(database, BackupCollectionName, cancellationToken).ConfigureAwait(false); + } + else + { + await database.DropCollectionAsync(MongoStorageDefaults.Collections.Advisory, cancellationToken).ConfigureAwait(false); + } + + advisoryInfo = null; + } + + if (backupInfo is null) + { + await database.CreateCollectionAsync(BackupCollectionName, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + if (advisoryInfo is null) + { + await CreateViewAsync(database, MongoStorageDefaults.Collections.Advisory, BackupCollectionName, cancellationToken).ConfigureAwait(false); + } + else if (!ViewTargets(advisoryInfo, BackupCollectionName)) + { + await database.DropCollectionAsync(MongoStorageDefaults.Collections.Advisory, cancellationToken).ConfigureAwait(false); + await CreateViewAsync(database, MongoStorageDefaults.Collections.Advisory, BackupCollectionName, cancellationToken).ConfigureAwait(false); + } + } + + private static async Task BackfillSupersedesAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw); + + var pipeline = new EmptyPipelineDefinition<BsonDocument>() + .Group(new BsonDocument + { + { + "_id", + new BsonDocument + { + { "tenant", "$tenant" }, + { "vendor", "$source.vendor" }, + { "upstreamId", "$upstream.upstream_id" }, + } + }, + { + "documents", + new BsonDocument("$push", new BsonDocument + { + { "_id", "$_id" }, + { "retrievedAt", "$upstream.retrieved_at" }, + { "contentHash", "$upstream.content_hash" }, + { "supersedes", "$supersedes" }, + }) + } + }) + .Match(new BsonDocument("documents.1", new BsonDocument("$exists", true))); + + using var cursor = await collection.AggregateAsync(pipeline, cancellationToken: cancellationToken).ConfigureAwait(false); + + while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var group in cursor.Current) + { + if (!group.TryGetValue("documents", out var documentsValue) || documentsValue is not BsonArray documentsArray || documentsArray.Count == 0) + { + continue; + } + + var ordered = documentsArray + .Select(x => x.AsBsonDocument) + .Select(x => new AdvisoryRawRecord( + x.GetValue("_id").AsString, + GetDateTime(x, "retrievedAt"), + x.TryGetValue("supersedes", out var sup) ? sup : BsonNull.Value)) + .OrderBy(record => record.RetrievedAt) + .ThenBy(record => record.Id, StringComparer.Ordinal) + .ToArray(); + + if (ordered.Length <= 1) + { + continue; + } + + var updates = new List<WriteModel<BsonDocument>>(ordered.Length); + for (var index = 0; index < ordered.Length; index++) + { + var expectedSupersedes = index == 0 ? BsonNull.Value : BsonValue.Create(ordered[index - 1].Id); + var current = ordered[index]; + + if (AreSupersedesEqual(current.Supersedes, expectedSupersedes)) + { + continue; + } + + var filter = Builders<BsonDocument>.Filter.Eq("_id", current.Id); + var update = Builders<BsonDocument>.Update.Set("supersedes", expectedSupersedes); + updates.Add(new UpdateOneModel<BsonDocument>(filter, update)); + } + + if (updates.Count > 0) + { + await collection.BulkWriteAsync(updates, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + } + } + + private static async Task<BsonDocument?> GetCollectionInfoAsync(IMongoDatabase database, string name, CancellationToken cancellationToken) + { + var command = new BsonDocument + { + { "listCollections", 1 }, + { "filter", new BsonDocument("name", name) }, + }; + + var result = await database.RunCommandAsync<BsonDocument>(command, cancellationToken: cancellationToken).ConfigureAwait(false); + var batch = result["cursor"]["firstBatch"].AsBsonArray; + return batch.Count > 0 ? batch[0].AsBsonDocument : null; + } + + private static bool IsView(BsonDocument collectionInfo) + => string.Equals(collectionInfo.GetValue("type", BsonString.Empty).AsString, "view", StringComparison.OrdinalIgnoreCase); + + private static bool ViewTargets(BsonDocument collectionInfo, string target) + { + if (!IsView(collectionInfo)) + { + return false; + } + + if (!collectionInfo.TryGetValue("options", out var options) || options is not BsonDocument optionsDocument) + { + return false; + } + + return optionsDocument.TryGetValue("viewOn", out var viewOn) && string.Equals(viewOn.AsString, target, StringComparison.Ordinal); + } + + private static async Task RenameCollectionAsync(IMongoDatabase database, string source, string destination, CancellationToken cancellationToken) + { + var admin = database.Client.GetDatabase("admin"); + var renameCommand = new BsonDocument + { + { "renameCollection", $"{database.DatabaseNamespace.DatabaseName}.{source}" }, + { "to", $"{database.DatabaseNamespace.DatabaseName}.{destination}" }, + { "dropTarget", false }, + }; + + try + { + await admin.RunCommandAsync<BsonDocument>(renameCommand, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (MongoCommandException ex) when (ex.Code == 26) + { + // Source namespace not found; ignore. + } + catch (MongoCommandException ex) when (ex.Code == 48) + { + // Target namespace exists; fall back to manual handling by copying data. + await database.DropCollectionAsync(destination, cancellationToken).ConfigureAwait(false); + await admin.RunCommandAsync<BsonDocument>(renameCommand, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + private static async Task CreateViewAsync(IMongoDatabase database, string viewName, string sourceName, CancellationToken cancellationToken) + { + var createCommand = new BsonDocument + { + { "create", viewName }, + { "viewOn", sourceName }, + }; + + await database.RunCommandAsync<BsonDocument>(createCommand, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + private static DateTime GetDateTime(BsonDocument document, string fieldName) + { + if (!document.TryGetValue(fieldName, out var value)) + { + return DateTime.MinValue; + } + + return value.BsonType switch + { + BsonType.DateTime => value.ToUniversalTime(), + BsonType.String when DateTime.TryParse(value.AsString, out var parsed) => parsed.ToUniversalTime(), + _ => DateTime.MinValue, + }; + } + + private static bool AreSupersedesEqual(BsonValue? left, BsonValue? right) + { + if (left is null || left.IsBsonNull) + { + return right is null || right.IsBsonNull; + } + + if (right is null || right.IsBsonNull) + { + return left.IsBsonNull; + } + + return left.Equals(right); + } + + private sealed record AdvisoryRawRecord(string Id, DateTime RetrievedAt, BsonValue Supersedes); +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureDocumentExpiryIndexesMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureDocumentExpiryIndexesMigration.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureDocumentExpiryIndexesMigration.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureDocumentExpiryIndexesMigration.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureGridFsExpiryIndexesMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureGridFsExpiryIndexesMigration.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureGridFsExpiryIndexesMigration.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/EnsureGridFsExpiryIndexesMigration.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/IMongoMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/IMongoMigration.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/IMongoMigration.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/IMongoMigration.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationRunner.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationRunner.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationRunner.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/MongoMigrationRunner.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Migrations/SemVerStyleBackfillMigration.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/SemVerStyleBackfillMigration.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Migrations/SemVerStyleBackfillMigration.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Migrations/SemVerStyleBackfillMigration.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoBootstrapper.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoBootstrapper.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MongoBootstrapper.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoBootstrapper.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoCollectionValidatorOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoCollectionValidatorOptions.cs similarity index 95% rename from src/StellaOps.Concelier.Storage.Mongo/MongoCollectionValidatorOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoCollectionValidatorOptions.cs index cc97a81e..50283985 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/MongoCollectionValidatorOptions.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoCollectionValidatorOptions.cs @@ -1,21 +1,21 @@ -namespace StellaOps.Concelier.Storage.Mongo; - -public enum MongoValidationLevel -{ - Off, - Moderate, - Strict, -} - -public enum MongoValidationAction -{ - Warn, - Error, -} - -public sealed class MongoCollectionValidatorOptions -{ - public MongoValidationLevel Level { get; set; } = MongoValidationLevel.Moderate; - - public MongoValidationAction Action { get; set; } = MongoValidationAction.Warn; -} +namespace StellaOps.Concelier.Storage.Mongo; + +public enum MongoValidationLevel +{ + Off, + Moderate, + Strict, +} + +public enum MongoValidationAction +{ + Warn, + Error, +} + +public sealed class MongoCollectionValidatorOptions +{ + public MongoValidationLevel Level { get; set; } = MongoValidationLevel.Moderate; + + public MongoValidationAction Action { get; set; } = MongoValidationAction.Warn; +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoJobStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoJobStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MongoJobStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoJobStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoLeaseStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoLeaseStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MongoLeaseStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoLeaseStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoSessionProvider.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoSessionProvider.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MongoSessionProvider.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoSessionProvider.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoSourceStateRepository.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoSourceStateRepository.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MongoSourceStateRepository.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoSourceStateRepository.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoStorageDefaults.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoStorageDefaults.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MongoStorageDefaults.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoStorageDefaults.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/MongoStorageOptions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoStorageOptions.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/MongoStorageOptions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/MongoStorageOptions.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocument.cs similarity index 96% rename from src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocument.cs index 8a7db37e..eba034b8 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocument.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocument.cs @@ -1,163 +1,163 @@ -using System; -using System.Collections.Generic; -using MongoDB.Bson; -using MongoDB.Bson.Serialization.Attributes; - -namespace StellaOps.Concelier.Storage.Mongo.Observations; - -[BsonIgnoreExtraElements] -public sealed class AdvisoryObservationDocument -{ - [BsonId] - public string Id { get; set; } = string.Empty; - - [BsonElement("tenant")] - public string Tenant { get; set; } = string.Empty; - - [BsonElement("source")] - public AdvisoryObservationSourceDocument Source { get; set; } = new(); - - [BsonElement("upstream")] - public AdvisoryObservationUpstreamDocument Upstream { get; set; } = new(); - - [BsonElement("content")] - public AdvisoryObservationContentDocument Content { get; set; } = new(); - - [BsonElement("linkset")] - public AdvisoryObservationLinksetDocument Linkset { get; set; } = new(); - - [BsonElement("createdAt")] - public DateTime CreatedAt { get; set; } - = DateTime.UtcNow; - - [BsonElement("attributes")] - [BsonIgnoreIfNull] - public Dictionary<string, string>? Attributes { get; set; } - = new(StringComparer.Ordinal); -} - -[BsonIgnoreExtraElements] -public sealed class AdvisoryObservationSourceDocument -{ - [BsonElement("vendor")] - public string Vendor { get; set; } = string.Empty; - - [BsonElement("stream")] - public string Stream { get; set; } = string.Empty; - - [BsonElement("api")] - public string Api { get; set; } = string.Empty; - - [BsonElement("collectorVersion")] - [BsonIgnoreIfNull] - public string? CollectorVersion { get; set; } - = null; -} - -[BsonIgnoreExtraElements] -public sealed class AdvisoryObservationUpstreamDocument -{ - [BsonElement("upstream_id")] - public string UpstreamId { get; set; } = string.Empty; - - [BsonElement("document_version")] - [BsonIgnoreIfNull] - public string? DocumentVersion { get; set; } - = null; - - [BsonElement("fetchedAt")] - public DateTime FetchedAt { get; set; } - = DateTime.UtcNow; - - [BsonElement("receivedAt")] - public DateTime ReceivedAt { get; set; } - = DateTime.UtcNow; - - [BsonElement("contentHash")] - public string ContentHash { get; set; } = string.Empty; - - [BsonElement("signature")] - public AdvisoryObservationSignatureDocument Signature { get; set; } = new(); - - [BsonElement("metadata")] - [BsonIgnoreIfNull] - public Dictionary<string, string>? Metadata { get; set; } - = new(StringComparer.Ordinal); -} - -[BsonIgnoreExtraElements] -public sealed class AdvisoryObservationSignatureDocument -{ - [BsonElement("present")] - public bool Present { get; set; } - = false; - - [BsonElement("format")] - [BsonIgnoreIfNull] - public string? Format { get; set; } - = null; - - [BsonElement("keyId")] - [BsonIgnoreIfNull] - public string? KeyId { get; set; } - = null; - - [BsonElement("signature")] - [BsonIgnoreIfNull] - public string? Signature { get; set; } - = null; -} - -[BsonIgnoreExtraElements] -public sealed class AdvisoryObservationContentDocument -{ - [BsonElement("format")] - public string Format { get; set; } = string.Empty; - - [BsonElement("specVersion")] - [BsonIgnoreIfNull] - public string? SpecVersion { get; set; } - = null; - - [BsonElement("raw")] - public BsonDocument Raw { get; set; } = new(); - - [BsonElement("metadata")] - [BsonIgnoreIfNull] - public Dictionary<string, string>? Metadata { get; set; } - = new(StringComparer.Ordinal); -} - -[BsonIgnoreExtraElements] -public sealed class AdvisoryObservationLinksetDocument -{ - [BsonElement("aliases")] - [BsonIgnoreIfNull] - public List<string>? Aliases { get; set; } - = new(); - - [BsonElement("purls")] - [BsonIgnoreIfNull] - public List<string>? Purls { get; set; } - = new(); - - [BsonElement("cpes")] - [BsonIgnoreIfNull] - public List<string>? Cpes { get; set; } - = new(); - - [BsonElement("references")] - [BsonIgnoreIfNull] - public List<AdvisoryObservationReferenceDocument>? References { get; set; } - = new(); -} - -[BsonIgnoreExtraElements] -public sealed class AdvisoryObservationReferenceDocument -{ - [BsonElement("type")] - public string Type { get; set; } = string.Empty; - - [BsonElement("url")] - public string Url { get; set; } = string.Empty; -} +using System; +using System.Collections.Generic; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Concelier.Storage.Mongo.Observations; + +[BsonIgnoreExtraElements] +public sealed class AdvisoryObservationDocument +{ + [BsonId] + public string Id { get; set; } = string.Empty; + + [BsonElement("tenant")] + public string Tenant { get; set; } = string.Empty; + + [BsonElement("source")] + public AdvisoryObservationSourceDocument Source { get; set; } = new(); + + [BsonElement("upstream")] + public AdvisoryObservationUpstreamDocument Upstream { get; set; } = new(); + + [BsonElement("content")] + public AdvisoryObservationContentDocument Content { get; set; } = new(); + + [BsonElement("linkset")] + public AdvisoryObservationLinksetDocument Linkset { get; set; } = new(); + + [BsonElement("createdAt")] + public DateTime CreatedAt { get; set; } + = DateTime.UtcNow; + + [BsonElement("attributes")] + [BsonIgnoreIfNull] + public Dictionary<string, string>? Attributes { get; set; } + = new(StringComparer.Ordinal); +} + +[BsonIgnoreExtraElements] +public sealed class AdvisoryObservationSourceDocument +{ + [BsonElement("vendor")] + public string Vendor { get; set; } = string.Empty; + + [BsonElement("stream")] + public string Stream { get; set; } = string.Empty; + + [BsonElement("api")] + public string Api { get; set; } = string.Empty; + + [BsonElement("collectorVersion")] + [BsonIgnoreIfNull] + public string? CollectorVersion { get; set; } + = null; +} + +[BsonIgnoreExtraElements] +public sealed class AdvisoryObservationUpstreamDocument +{ + [BsonElement("upstream_id")] + public string UpstreamId { get; set; } = string.Empty; + + [BsonElement("document_version")] + [BsonIgnoreIfNull] + public string? DocumentVersion { get; set; } + = null; + + [BsonElement("fetchedAt")] + public DateTime FetchedAt { get; set; } + = DateTime.UtcNow; + + [BsonElement("receivedAt")] + public DateTime ReceivedAt { get; set; } + = DateTime.UtcNow; + + [BsonElement("contentHash")] + public string ContentHash { get; set; } = string.Empty; + + [BsonElement("signature")] + public AdvisoryObservationSignatureDocument Signature { get; set; } = new(); + + [BsonElement("metadata")] + [BsonIgnoreIfNull] + public Dictionary<string, string>? Metadata { get; set; } + = new(StringComparer.Ordinal); +} + +[BsonIgnoreExtraElements] +public sealed class AdvisoryObservationSignatureDocument +{ + [BsonElement("present")] + public bool Present { get; set; } + = false; + + [BsonElement("format")] + [BsonIgnoreIfNull] + public string? Format { get; set; } + = null; + + [BsonElement("keyId")] + [BsonIgnoreIfNull] + public string? KeyId { get; set; } + = null; + + [BsonElement("signature")] + [BsonIgnoreIfNull] + public string? Signature { get; set; } + = null; +} + +[BsonIgnoreExtraElements] +public sealed class AdvisoryObservationContentDocument +{ + [BsonElement("format")] + public string Format { get; set; } = string.Empty; + + [BsonElement("specVersion")] + [BsonIgnoreIfNull] + public string? SpecVersion { get; set; } + = null; + + [BsonElement("raw")] + public BsonDocument Raw { get; set; } = new(); + + [BsonElement("metadata")] + [BsonIgnoreIfNull] + public Dictionary<string, string>? Metadata { get; set; } + = new(StringComparer.Ordinal); +} + +[BsonIgnoreExtraElements] +public sealed class AdvisoryObservationLinksetDocument +{ + [BsonElement("aliases")] + [BsonIgnoreIfNull] + public List<string>? Aliases { get; set; } + = new(); + + [BsonElement("purls")] + [BsonIgnoreIfNull] + public List<string>? Purls { get; set; } + = new(); + + [BsonElement("cpes")] + [BsonIgnoreIfNull] + public List<string>? Cpes { get; set; } + = new(); + + [BsonElement("references")] + [BsonIgnoreIfNull] + public List<AdvisoryObservationReferenceDocument>? References { get; set; } + = new(); +} + +[BsonIgnoreExtraElements] +public sealed class AdvisoryObservationReferenceDocument +{ + [BsonElement("type")] + public string Type { get; set; } = string.Empty; + + [BsonElement("url")] + public string Url { get; set; } = string.Empty; +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocumentFactory.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocumentFactory.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocumentFactory.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocumentFactory.cs index 03886a9b..ed2d2f83 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocumentFactory.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationDocumentFactory.cs @@ -1,92 +1,92 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Text.Json.Nodes; -using MongoDB.Bson; -using MongoDB.Bson.IO; -using StellaOps.Concelier.Models.Observations; - -namespace StellaOps.Concelier.Storage.Mongo.Observations; - -internal static class AdvisoryObservationDocumentFactory -{ - private static readonly JsonWriterSettings JsonSettings = new() { OutputMode = JsonOutputMode.RelaxedExtendedJson }; - - public static AdvisoryObservation ToModel(AdvisoryObservationDocument document) - { - ArgumentNullException.ThrowIfNull(document); - - var rawNode = ParseJsonNode(document.Content.Raw); - var attributes = ToImmutable(document.Attributes); - var contentMetadata = ToImmutable(document.Content.Metadata); - var upstreamMetadata = ToImmutable(document.Upstream.Metadata); - - var observation = new AdvisoryObservation( - document.Id, - document.Tenant, - new AdvisoryObservationSource( - document.Source.Vendor, - document.Source.Stream, - document.Source.Api, - document.Source.CollectorVersion), - new AdvisoryObservationUpstream( - document.Upstream.UpstreamId, - document.Upstream.DocumentVersion, - DateTime.SpecifyKind(document.Upstream.FetchedAt, DateTimeKind.Utc), - DateTime.SpecifyKind(document.Upstream.ReceivedAt, DateTimeKind.Utc), - document.Upstream.ContentHash, - new AdvisoryObservationSignature( - document.Upstream.Signature.Present, - document.Upstream.Signature.Format, - document.Upstream.Signature.KeyId, - document.Upstream.Signature.Signature), - upstreamMetadata), - new AdvisoryObservationContent( - document.Content.Format, - document.Content.SpecVersion, - rawNode, - contentMetadata), - new AdvisoryObservationLinkset( - document.Linkset.Aliases ?? Enumerable.Empty<string>(), - document.Linkset.Purls ?? Enumerable.Empty<string>(), - document.Linkset.Cpes ?? Enumerable.Empty<string>(), - document.Linkset.References?.Select(reference => new AdvisoryObservationReference(reference.Type, reference.Url))), - DateTime.SpecifyKind(document.CreatedAt, DateTimeKind.Utc), - attributes); - - return observation; - } - - private static JsonNode ParseJsonNode(BsonDocument raw) - { - if (raw is null || raw.ElementCount == 0) - { - return JsonNode.Parse("{}")!; - } - - var json = raw.ToJson(JsonSettings); - return JsonNode.Parse(json)!; - } - - private static ImmutableDictionary<string, string> ToImmutable(Dictionary<string, string>? values) - { - if (values is null || values.Count == 0) - { - return ImmutableDictionary<string, string>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var pair in values) - { - if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) - { - continue; - } - - builder[pair.Key.Trim()] = pair.Value; - } - - return builder.ToImmutable(); - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json.Nodes; +using MongoDB.Bson; +using MongoDB.Bson.IO; +using StellaOps.Concelier.Models.Observations; + +namespace StellaOps.Concelier.Storage.Mongo.Observations; + +internal static class AdvisoryObservationDocumentFactory +{ + private static readonly JsonWriterSettings JsonSettings = new() { OutputMode = JsonOutputMode.RelaxedExtendedJson }; + + public static AdvisoryObservation ToModel(AdvisoryObservationDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + var rawNode = ParseJsonNode(document.Content.Raw); + var attributes = ToImmutable(document.Attributes); + var contentMetadata = ToImmutable(document.Content.Metadata); + var upstreamMetadata = ToImmutable(document.Upstream.Metadata); + + var observation = new AdvisoryObservation( + document.Id, + document.Tenant, + new AdvisoryObservationSource( + document.Source.Vendor, + document.Source.Stream, + document.Source.Api, + document.Source.CollectorVersion), + new AdvisoryObservationUpstream( + document.Upstream.UpstreamId, + document.Upstream.DocumentVersion, + DateTime.SpecifyKind(document.Upstream.FetchedAt, DateTimeKind.Utc), + DateTime.SpecifyKind(document.Upstream.ReceivedAt, DateTimeKind.Utc), + document.Upstream.ContentHash, + new AdvisoryObservationSignature( + document.Upstream.Signature.Present, + document.Upstream.Signature.Format, + document.Upstream.Signature.KeyId, + document.Upstream.Signature.Signature), + upstreamMetadata), + new AdvisoryObservationContent( + document.Content.Format, + document.Content.SpecVersion, + rawNode, + contentMetadata), + new AdvisoryObservationLinkset( + document.Linkset.Aliases ?? Enumerable.Empty<string>(), + document.Linkset.Purls ?? Enumerable.Empty<string>(), + document.Linkset.Cpes ?? Enumerable.Empty<string>(), + document.Linkset.References?.Select(reference => new AdvisoryObservationReference(reference.Type, reference.Url))), + DateTime.SpecifyKind(document.CreatedAt, DateTimeKind.Utc), + attributes); + + return observation; + } + + private static JsonNode ParseJsonNode(BsonDocument raw) + { + if (raw is null || raw.ElementCount == 0) + { + return JsonNode.Parse("{}")!; + } + + var json = raw.ToJson(JsonSettings); + return JsonNode.Parse(json)!; + } + + private static ImmutableDictionary<string, string> ToImmutable(Dictionary<string, string>? values) + { + if (values is null || values.Count == 0) + { + return ImmutableDictionary<string, string>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var pair in values) + { + if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) + { + continue; + } + + builder[pair.Key.Trim()] = pair.Value; + } + + return builder.ToImmutable(); + } +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationLookup.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationLookup.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationLookup.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationLookup.cs index 6c17e97b..52f3cc30 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationLookup.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationLookup.cs @@ -1,60 +1,60 @@ -using System; -using System.Collections.Generic; -using StellaOps.Concelier.Core.Observations; -using StellaOps.Concelier.Models.Observations; - -namespace StellaOps.Concelier.Storage.Mongo.Observations; - -internal sealed class AdvisoryObservationLookup : IAdvisoryObservationLookup -{ - private readonly IAdvisoryObservationStore _store; - - public AdvisoryObservationLookup(IAdvisoryObservationStore store) - { - _store = store ?? throw new ArgumentNullException(nameof(store)); - } - - public ValueTask<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync( - string tenant, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - cancellationToken.ThrowIfCancellationRequested(); - - return new ValueTask<IReadOnlyList<AdvisoryObservation>>( - _store.ListByTenantAsync(tenant, cancellationToken)); - } - - public ValueTask<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( - string tenant, - IReadOnlyCollection<string> observationIds, - IReadOnlyCollection<string> aliases, - IReadOnlyCollection<string> purls, - IReadOnlyCollection<string> cpes, - AdvisoryObservationCursor? cursor, - int limit, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - ArgumentNullException.ThrowIfNull(observationIds); - ArgumentNullException.ThrowIfNull(aliases); - ArgumentNullException.ThrowIfNull(purls); - ArgumentNullException.ThrowIfNull(cpes); - if (limit <= 0) - { - throw new ArgumentOutOfRangeException(nameof(limit), "Limit must be greater than zero."); - } - cancellationToken.ThrowIfCancellationRequested(); - - return new ValueTask<IReadOnlyList<AdvisoryObservation>>( - _store.FindByFiltersAsync( - tenant, - observationIds, - aliases, - purls, - cpes, - cursor, - limit, - cancellationToken)); - } -} +using System; +using System.Collections.Generic; +using StellaOps.Concelier.Core.Observations; +using StellaOps.Concelier.Models.Observations; + +namespace StellaOps.Concelier.Storage.Mongo.Observations; + +internal sealed class AdvisoryObservationLookup : IAdvisoryObservationLookup +{ + private readonly IAdvisoryObservationStore _store; + + public AdvisoryObservationLookup(IAdvisoryObservationStore store) + { + _store = store ?? throw new ArgumentNullException(nameof(store)); + } + + public ValueTask<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync( + string tenant, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + cancellationToken.ThrowIfCancellationRequested(); + + return new ValueTask<IReadOnlyList<AdvisoryObservation>>( + _store.ListByTenantAsync(tenant, cancellationToken)); + } + + public ValueTask<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( + string tenant, + IReadOnlyCollection<string> observationIds, + IReadOnlyCollection<string> aliases, + IReadOnlyCollection<string> purls, + IReadOnlyCollection<string> cpes, + AdvisoryObservationCursor? cursor, + int limit, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + ArgumentNullException.ThrowIfNull(observationIds); + ArgumentNullException.ThrowIfNull(aliases); + ArgumentNullException.ThrowIfNull(purls); + ArgumentNullException.ThrowIfNull(cpes); + if (limit <= 0) + { + throw new ArgumentOutOfRangeException(nameof(limit), "Limit must be greater than zero."); + } + cancellationToken.ThrowIfCancellationRequested(); + + return new ValueTask<IReadOnlyList<AdvisoryObservation>>( + _store.FindByFiltersAsync( + tenant, + observationIds, + aliases, + purls, + cpes, + cursor, + limit, + cancellationToken)); + } +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationStore.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationStore.cs index cf37e49e..21102d30 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationStore.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/AdvisoryObservationStore.cs @@ -1,137 +1,137 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using MongoDB.Driver; -using StellaOps.Concelier.Core.Observations; -using StellaOps.Concelier.Models.Observations; - -namespace StellaOps.Concelier.Storage.Mongo.Observations; - -internal sealed class AdvisoryObservationStore : IAdvisoryObservationStore -{ - private readonly IMongoCollection<AdvisoryObservationDocument> collection; - - public AdvisoryObservationStore(IMongoCollection<AdvisoryObservationDocument> collection) - { - this.collection = collection ?? throw new ArgumentNullException(nameof(collection)); - } - - public async Task<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync(string tenant, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - - var filter = Builders<AdvisoryObservationDocument>.Filter.Eq(document => document.Tenant, tenant.ToLowerInvariant()); - var documents = await collection - .Find(filter) - .SortByDescending(document => document.CreatedAt) - .ThenBy(document => document.Id) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return documents.Select(AdvisoryObservationDocumentFactory.ToModel).ToArray(); - } - - public async Task<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( - string tenant, - IEnumerable<string>? observationIds, - IEnumerable<string>? aliases, - IEnumerable<string>? purls, - IEnumerable<string>? cpes, - AdvisoryObservationCursor? cursor, - int limit, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - if (limit <= 0) - { - throw new ArgumentOutOfRangeException(nameof(limit), "Limit must be greater than zero."); - } - cancellationToken.ThrowIfCancellationRequested(); - - var normalizedTenant = tenant.ToLowerInvariant(); - var normalizedObservationIds = NormalizeValues(observationIds, static value => value); - var normalizedAliases = NormalizeValues(aliases, static value => value.ToLowerInvariant()); - var normalizedPurls = NormalizeValues(purls, static value => value); - var normalizedCpes = NormalizeValues(cpes, static value => value); - - var builder = Builders<AdvisoryObservationDocument>.Filter; - var filters = new List<FilterDefinition<AdvisoryObservationDocument>> - { - builder.Eq(document => document.Tenant, normalizedTenant) - }; - - if (normalizedObservationIds.Length > 0) - { - filters.Add(builder.In(document => document.Id, normalizedObservationIds)); - } - - if (normalizedAliases.Length > 0) - { - filters.Add(builder.In("linkset.aliases", normalizedAliases)); - } - - if (normalizedPurls.Length > 0) - { - filters.Add(builder.In("linkset.purls", normalizedPurls)); - } - - if (normalizedCpes.Length > 0) - { - filters.Add(builder.In("linkset.cpes", normalizedCpes)); - } - - if (cursor.HasValue) - { - var createdAtUtc = cursor.Value.CreatedAt.UtcDateTime; - var observationId = cursor.Value.ObservationId; - var createdBefore = builder.Lt(document => document.CreatedAt, createdAtUtc); - var sameCreatedNextId = builder.And( - builder.Eq(document => document.CreatedAt, createdAtUtc), - builder.Gt(document => document.Id, observationId)); - - filters.Add(builder.Or(createdBefore, sameCreatedNextId)); - } - - var filter = builder.And(filters); - - var documents = await collection - .Find(filter) - .SortByDescending(document => document.CreatedAt) - .ThenBy(document => document.Id) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return documents.Select(AdvisoryObservationDocumentFactory.ToModel).ToArray(); - } - - private static string[] NormalizeValues(IEnumerable<string>? values, Func<string, string> projector) - { - if (values is null) - { - return Array.Empty<string>(); - } - - var set = new HashSet<string>(StringComparer.Ordinal); - foreach (var value in values) - { - if (string.IsNullOrWhiteSpace(value)) - { - continue; - } - - var projected = projector(value.Trim()); - if (!string.IsNullOrEmpty(projected)) - { - set.Add(projected); - } - } - - if (set.Count == 0) - { - return Array.Empty<string>(); - } - - return set.ToArray(); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Driver; +using StellaOps.Concelier.Core.Observations; +using StellaOps.Concelier.Models.Observations; + +namespace StellaOps.Concelier.Storage.Mongo.Observations; + +internal sealed class AdvisoryObservationStore : IAdvisoryObservationStore +{ + private readonly IMongoCollection<AdvisoryObservationDocument> collection; + + public AdvisoryObservationStore(IMongoCollection<AdvisoryObservationDocument> collection) + { + this.collection = collection ?? throw new ArgumentNullException(nameof(collection)); + } + + public async Task<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync(string tenant, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + + var filter = Builders<AdvisoryObservationDocument>.Filter.Eq(document => document.Tenant, tenant.ToLowerInvariant()); + var documents = await collection + .Find(filter) + .SortByDescending(document => document.CreatedAt) + .ThenBy(document => document.Id) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(AdvisoryObservationDocumentFactory.ToModel).ToArray(); + } + + public async Task<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( + string tenant, + IEnumerable<string>? observationIds, + IEnumerable<string>? aliases, + IEnumerable<string>? purls, + IEnumerable<string>? cpes, + AdvisoryObservationCursor? cursor, + int limit, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + if (limit <= 0) + { + throw new ArgumentOutOfRangeException(nameof(limit), "Limit must be greater than zero."); + } + cancellationToken.ThrowIfCancellationRequested(); + + var normalizedTenant = tenant.ToLowerInvariant(); + var normalizedObservationIds = NormalizeValues(observationIds, static value => value); + var normalizedAliases = NormalizeValues(aliases, static value => value.ToLowerInvariant()); + var normalizedPurls = NormalizeValues(purls, static value => value); + var normalizedCpes = NormalizeValues(cpes, static value => value); + + var builder = Builders<AdvisoryObservationDocument>.Filter; + var filters = new List<FilterDefinition<AdvisoryObservationDocument>> + { + builder.Eq(document => document.Tenant, normalizedTenant) + }; + + if (normalizedObservationIds.Length > 0) + { + filters.Add(builder.In(document => document.Id, normalizedObservationIds)); + } + + if (normalizedAliases.Length > 0) + { + filters.Add(builder.In("linkset.aliases", normalizedAliases)); + } + + if (normalizedPurls.Length > 0) + { + filters.Add(builder.In("linkset.purls", normalizedPurls)); + } + + if (normalizedCpes.Length > 0) + { + filters.Add(builder.In("linkset.cpes", normalizedCpes)); + } + + if (cursor.HasValue) + { + var createdAtUtc = cursor.Value.CreatedAt.UtcDateTime; + var observationId = cursor.Value.ObservationId; + var createdBefore = builder.Lt(document => document.CreatedAt, createdAtUtc); + var sameCreatedNextId = builder.And( + builder.Eq(document => document.CreatedAt, createdAtUtc), + builder.Gt(document => document.Id, observationId)); + + filters.Add(builder.Or(createdBefore, sameCreatedNextId)); + } + + var filter = builder.And(filters); + + var documents = await collection + .Find(filter) + .SortByDescending(document => document.CreatedAt) + .ThenBy(document => document.Id) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(AdvisoryObservationDocumentFactory.ToModel).ToArray(); + } + + private static string[] NormalizeValues(IEnumerable<string>? values, Func<string, string> projector) + { + if (values is null) + { + return Array.Empty<string>(); + } + + var set = new HashSet<string>(StringComparer.Ordinal); + foreach (var value in values) + { + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + var projected = projector(value.Trim()); + if (!string.IsNullOrEmpty(projected)) + { + set.Add(projected); + } + } + + if (set.Count == 0) + { + return Array.Empty<string>(); + } + + return set.ToArray(); + } +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Observations/IAdvisoryObservationStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/IAdvisoryObservationStore.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo/Observations/IAdvisoryObservationStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/IAdvisoryObservationStore.cs index 247b5400..428bf4cc 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Observations/IAdvisoryObservationStore.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Observations/IAdvisoryObservationStore.cs @@ -1,20 +1,20 @@ -using System.Collections.Generic; -using StellaOps.Concelier.Models.Observations; -using StellaOps.Concelier.Core.Observations; - -namespace StellaOps.Concelier.Storage.Mongo.Observations; - -public interface IAdvisoryObservationStore -{ - Task<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync(string tenant, CancellationToken cancellationToken); - - Task<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( - string tenant, - IEnumerable<string>? observationIds, - IEnumerable<string>? aliases, - IEnumerable<string>? purls, - IEnumerable<string>? cpes, - AdvisoryObservationCursor? cursor, - int limit, - CancellationToken cancellationToken); -} +using System.Collections.Generic; +using StellaOps.Concelier.Models.Observations; +using StellaOps.Concelier.Core.Observations; + +namespace StellaOps.Concelier.Storage.Mongo.Observations; + +public interface IAdvisoryObservationStore +{ + Task<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync(string tenant, CancellationToken cancellationToken); + + Task<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( + string tenant, + IEnumerable<string>? observationIds, + IEnumerable<string>? aliases, + IEnumerable<string>? purls, + IEnumerable<string>? cpes, + AdvisoryObservationCursor? cursor, + int limit, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/Properties/AssemblyInfo.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Properties/AssemblyInfo.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/PsirtFlags/IPsirtFlagStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PsirtFlags/IPsirtFlagStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/PsirtFlags/IPsirtFlagStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PsirtFlags/IPsirtFlagStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/PsirtFlags/PsirtFlagStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Raw/MongoAdvisoryRawRepository.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Raw/MongoAdvisoryRawRepository.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo/Raw/MongoAdvisoryRawRepository.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Raw/MongoAdvisoryRawRepository.cs index e92494ff..71b9af23 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/Raw/MongoAdvisoryRawRepository.cs +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Raw/MongoAdvisoryRawRepository.cs @@ -1,720 +1,720 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Globalization; -using System.Text; -using System.Linq; -using System.Text.Json; -using MongoDB.Bson; -using MongoDB.Driver; -using MongoDB.Bson.IO; -using Microsoft.Extensions.Logging; -using StellaOps.Concelier.Core.Raw; -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Storage.Mongo.Raw; - -internal sealed class MongoAdvisoryRawRepository : IAdvisoryRawRepository -{ - private const int CursorSegmentCount = 2; - - private readonly IMongoCollection<BsonDocument> _collection; - private readonly TimeProvider _timeProvider; - private readonly ILogger<MongoAdvisoryRawRepository> _logger; - - public MongoAdvisoryRawRepository( - IMongoDatabase database, - TimeProvider timeProvider, - ILogger<MongoAdvisoryRawRepository> logger) - { - ArgumentNullException.ThrowIfNull(database); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - - _collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw); - } - - public async Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(document); - - var tenant = document.Tenant; - var vendor = document.Source.Vendor; - var upstreamId = document.Upstream.UpstreamId; - var contentHash = document.Upstream.ContentHash; - - var baseFilter = Builders<BsonDocument>.Filter.Eq("tenant", tenant) & - Builders<BsonDocument>.Filter.Eq("source.vendor", vendor) & - Builders<BsonDocument>.Filter.Eq("upstream.upstream_id", upstreamId); - - var duplicateFilter = baseFilter & - Builders<BsonDocument>.Filter.Eq("upstream.content_hash", contentHash); - - var duplicate = await _collection - .Find(duplicateFilter) - .Limit(1) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - - if (duplicate is not null) - { - var existing = MapToRecord(duplicate); - return new AdvisoryRawUpsertResult(false, existing); - } - - var previous = await _collection - .Find(baseFilter) - .Sort(Builders<BsonDocument>.Sort.Descending("ingested_at").Descending("_id")) - .Limit(1) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - - var supersedesId = previous?["_id"]?.AsString; - var recordDocument = CreateBsonDocument(document, supersedesId); - - try - { - await _collection.InsertOneAsync(recordDocument, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey) - { - _logger.LogWarning( - ex, - "Duplicate key detected while inserting advisory_raw document tenant={Tenant} vendor={Vendor} upstream={Upstream} hash={Hash}", - tenant, - vendor, - upstreamId, - contentHash); - - var existingDoc = await _collection - .Find(duplicateFilter) - .Limit(1) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - - if (existingDoc is not null) - { - var existing = MapToRecord(existingDoc); - return new AdvisoryRawUpsertResult(false, existing); - } - - throw; - } - - var inserted = MapToRecord(recordDocument); - return new AdvisoryRawUpsertResult(true, inserted); - } - - public async Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - ArgumentException.ThrowIfNullOrWhiteSpace(id); - - var filter = Builders<BsonDocument>.Filter.Eq("tenant", tenant) & - Builders<BsonDocument>.Filter.Eq("_id", id); - - var document = await _collection - .Find(filter) - .Limit(1) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - - return document is null ? null : MapToRecord(document); - } - - public async Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(options); - - var builder = Builders<BsonDocument>.Filter; - var filters = new List<FilterDefinition<BsonDocument>> - { - builder.Eq("tenant", options.Tenant) - }; - - if (!options.Vendors.IsDefaultOrEmpty) - { - filters.Add(builder.In("source.vendor", options.Vendors.Select(static vendor => vendor.Trim().ToLowerInvariant()))); - } - - if (!options.UpstreamIds.IsDefaultOrEmpty) - { - var upstreams = options.UpstreamIds - .Where(static id => !string.IsNullOrWhiteSpace(id)) - .Select(static id => id.Trim()) - .ToArray(); - if (upstreams.Length > 0) - { - filters.Add(builder.In("upstream.upstream_id", upstreams)); - } - } - - if (!options.ContentHashes.IsDefaultOrEmpty) - { - var hashes = options.ContentHashes - .Where(static hash => !string.IsNullOrWhiteSpace(hash)) - .Select(static hash => hash.Trim()) - .ToArray(); - if (hashes.Length > 0) - { - filters.Add(builder.In("upstream.content_hash", hashes)); - } - } - - if (!options.Aliases.IsDefaultOrEmpty) - { - var aliases = options.Aliases - .Where(static alias => !string.IsNullOrWhiteSpace(alias)) - .Select(static alias => alias.Trim().ToLowerInvariant()) - .ToArray(); - if (aliases.Length > 0) - { - filters.Add(builder.In("linkset.aliases", aliases)); - } - } - - if (!options.PackageUrls.IsDefaultOrEmpty) - { - var purls = options.PackageUrls - .Where(static purl => !string.IsNullOrWhiteSpace(purl)) - .Select(static purl => purl.Trim()) - .ToArray(); - if (purls.Length > 0) - { - filters.Add(builder.In("linkset.purls", purls)); - } - } - - if (options.Since is { } since) - { - filters.Add(builder.Gte("ingested_at", since.ToUniversalTime().UtcDateTime)); - } - - if (!string.IsNullOrWhiteSpace(options.Cursor) && TryDecodeCursor(options.Cursor, out var cursor)) - { - var ingestTime = cursor.IngestedAt.UtcDateTime; - var cursorFilter = builder.Or( - builder.Lt("ingested_at", ingestTime), - builder.And( - builder.Eq("ingested_at", ingestTime), - builder.Gt("_id", cursor.Id))); - filters.Add(cursorFilter); - } - - var filter = filters.Count == 1 ? filters[0] : builder.And(filters); - var limit = Math.Clamp(options.Limit, 1, 200); - var sort = Builders<BsonDocument>.Sort.Descending("ingested_at").Descending("_id"); - - var documents = await _collection - .Find(filter) - .Sort(sort) - .Limit(limit + 1) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - var hasMore = documents.Count > limit; - if (hasMore) - { - documents.RemoveAt(documents.Count - 1); - } - - var records = documents.Select(MapToRecord).ToArray(); - var nextCursor = hasMore && records.Length > 0 - ? EncodeCursor(records[^1].IngestedAt.UtcDateTime, records[^1].Id) - : null; - - return new AdvisoryRawQueryResult(records, nextCursor, hasMore); - } - - public async Task<IReadOnlyList<AdvisoryRawRecord>> ListForVerificationAsync( - string tenant, - DateTimeOffset since, - DateTimeOffset until, - IReadOnlyCollection<string> sourceVendors, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - if (until < since) - { - throw new ArgumentException("Verification window end must not precede the start."); - } - - var builder = Builders<BsonDocument>.Filter; - var filters = new List<FilterDefinition<BsonDocument>> - { - builder.Eq("tenant", tenant), - builder.Gte("ingested_at", since.ToUniversalTime().UtcDateTime), - builder.Lte("ingested_at", until.ToUniversalTime().UtcDateTime) - }; - - if (sourceVendors is { Count: > 0 }) - { - filters.Add(builder.In("source.vendor", sourceVendors.Select(static vendor => vendor.Trim().ToLowerInvariant()))); - } - - var filter = builder.And(filters); - var sort = Builders<BsonDocument>.Sort.Ascending("ingested_at").Ascending("_id"); - - var documents = await _collection - .Find(filter) - .Sort(sort) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return documents.Select(MapToRecord).ToArray(); - } - - private BsonDocument CreateBsonDocument(AdvisoryRawDocument document, string? supersedesId) - { - var now = _timeProvider.GetUtcNow().UtcDateTime; - var id = BuildDocumentId(document); - var supersedesValue = string.IsNullOrWhiteSpace(supersedesId) ? document.Supersedes : supersedesId; - - var source = new BsonDocument - { - { "vendor", document.Source.Vendor }, - { "connector", document.Source.Connector }, - { "version", document.Source.ConnectorVersion } - }; - if (!string.IsNullOrWhiteSpace(document.Source.Stream)) - { - source["stream"] = document.Source.Stream; - } - - var signature = new BsonDocument - { - { "present", document.Upstream.Signature.Present } - }; - if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.Format)) - { - signature["format"] = document.Upstream.Signature.Format; - } - if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.KeyId)) - { - signature["key_id"] = document.Upstream.Signature.KeyId; - } - if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.Signature)) - { - signature["sig"] = document.Upstream.Signature.Signature; - } - if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.Certificate)) - { - signature["certificate"] = document.Upstream.Signature.Certificate; - } - if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.Digest)) - { - signature["digest"] = document.Upstream.Signature.Digest; - } - - var provenance = new BsonDocument(); - if (document.Upstream.Provenance is not null) - { - foreach (var entry in document.Upstream.Provenance) - { - provenance[entry.Key] = entry.Value; - } - } - - var upstream = new BsonDocument - { - { "upstream_id", document.Upstream.UpstreamId }, - { "document_version", string.IsNullOrWhiteSpace(document.Upstream.DocumentVersion) ? BsonNull.Value : BsonValue.Create(document.Upstream.DocumentVersion) }, - { "retrieved_at", document.Upstream.RetrievedAt.UtcDateTime }, - { "content_hash", document.Upstream.ContentHash }, - { "signature", signature }, - { "provenance", provenance } - }; - - var content = new BsonDocument - { - { "format", document.Content.Format }, - { "raw", document.Content.Raw.GetRawText() } - }; - if (!string.IsNullOrWhiteSpace(document.Content.SpecVersion)) - { - content["spec_version"] = document.Content.SpecVersion; - } - if (!string.IsNullOrWhiteSpace(document.Content.Encoding)) - { - content["encoding"] = document.Content.Encoding; - } - - var identifiers = new BsonDocument - { - { "aliases", new BsonArray(document.Identifiers.Aliases) }, - { "primary", document.Identifiers.PrimaryId } - }; - - var references = new BsonArray(document.Linkset.References.Select(reference => - { - var referenceDocument = new BsonDocument - { - { "type", reference.Type }, - { "url", reference.Url } - }; - if (!string.IsNullOrWhiteSpace(reference.Source)) - { - referenceDocument["source"] = reference.Source; - } - return referenceDocument; - })); - - var notes = new BsonDocument(); - if (document.Linkset.Notes is not null) - { - foreach (var entry in document.Linkset.Notes) - { - notes[entry.Key] = entry.Value; - } - } - - var linkset = new BsonDocument - { - { "aliases", new BsonArray(document.Linkset.Aliases) }, - { "purls", new BsonArray(document.Linkset.PackageUrls) }, - { "cpes", new BsonArray(document.Linkset.Cpes) }, - { "references", references }, - { "reconciled_from", new BsonArray(document.Linkset.ReconciledFrom) }, - { "notes", notes } - }; - - var bson = new BsonDocument - { - { "_id", id }, - { "tenant", document.Tenant }, - { "source", source }, - { "upstream", upstream }, - { "content", content }, - { "identifiers", identifiers }, - { "linkset", linkset }, - { "supersedes", supersedesValue is null ? BsonNull.Value : supersedesValue }, - { "created_at", document.Upstream.RetrievedAt.UtcDateTime }, - { "ingested_at", now } - }; - - return bson; - } - - private AdvisoryRawRecord MapToRecord(BsonDocument document) - { - var tenant = GetRequiredString(document, "tenant"); - var source = MapSource(document["source"].AsBsonDocument); - var upstream = MapUpstream(document["upstream"].AsBsonDocument); - var content = MapContent(document["content"].AsBsonDocument); - var identifiers = MapIdentifiers(document["identifiers"].AsBsonDocument); - var linkset = MapLinkset(document["linkset"].AsBsonDocument); - var supersedes = document.GetValue("supersedes", BsonNull.Value); - - var rawDocument = new AdvisoryRawDocument( - tenant, - source, - upstream, - content, - identifiers, - linkset, - supersedes.IsBsonNull ? null : supersedes.AsString); - - var ingestedAt = GetDateTimeOffset(document, "ingested_at", rawDocument.Upstream.RetrievedAt); - var createdAt = GetDateTimeOffset(document, "created_at", rawDocument.Upstream.RetrievedAt); - - return new AdvisoryRawRecord( - document.GetValue("_id").AsString, - rawDocument, - ingestedAt, - createdAt); - } - - private static RawSourceMetadata MapSource(BsonDocument source) - { - return new RawSourceMetadata( - GetRequiredString(source, "vendor"), - GetRequiredString(source, "connector"), - GetRequiredString(source, "version"), - GetOptionalString(source, "stream")); - } - - private static RawUpstreamMetadata MapUpstream(BsonDocument upstream) - { - var provenance = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - if (upstream.TryGetValue("provenance", out var provenanceValue) && provenanceValue.IsBsonDocument) - { - foreach (var element in provenanceValue.AsBsonDocument) - { - provenance[element.Name] = BsonValueToString(element.Value); - } - } - - var signature = upstream["signature"].AsBsonDocument; - var signatureMetadata = new RawSignatureMetadata( - signature.GetValue("present", BsonBoolean.False).AsBoolean, - signature.TryGetValue("format", out var format) && !format.IsBsonNull ? format.AsString : null, - signature.TryGetValue("key_id", out var keyId) && !keyId.IsBsonNull ? keyId.AsString : null, - signature.TryGetValue("sig", out var sig) && !sig.IsBsonNull ? sig.AsString : null, - signature.TryGetValue("certificate", out var certificate) && !certificate.IsBsonNull ? certificate.AsString : null, - signature.TryGetValue("digest", out var digest) && !digest.IsBsonNull ? digest.AsString : null); - - return new RawUpstreamMetadata( - GetRequiredString(upstream, "upstream_id"), - upstream.TryGetValue("document_version", out var version) && !version.IsBsonNull ? version.AsString : null, - GetDateTimeOffset(upstream, "retrieved_at", DateTimeOffset.UtcNow), - GetRequiredString(upstream, "content_hash"), - signatureMetadata, - provenance.ToImmutable()); - } - - private static RawContent MapContent(BsonDocument content) - { - var rawValue = content.GetValue("raw", BsonNull.Value); - string rawJson; - if (rawValue.IsBsonNull) - { - rawJson = "{}"; - } - else if (rawValue.IsString) - { - rawJson = rawValue.AsString ?? "{}"; - } - else - { - rawJson = rawValue.ToJson(new JsonWriterSettings { OutputMode = JsonOutputMode.RelaxedExtendedJson }); - } - - using var document = System.Text.Json.JsonDocument.Parse(string.IsNullOrWhiteSpace(rawJson) ? "{}" : rawJson); - - return new RawContent( - GetRequiredString(content, "format"), - content.TryGetValue("spec_version", out var specVersion) && !specVersion.IsBsonNull ? specVersion.AsString : null, - document.RootElement.Clone(), - content.TryGetValue("encoding", out var encoding) && !encoding.IsBsonNull ? encoding.AsString : null); - } - - private static RawIdentifiers MapIdentifiers(BsonDocument identifiers) - { - var aliases = identifiers.TryGetValue("aliases", out var aliasValue) && aliasValue.IsBsonArray - ? aliasValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() - : ImmutableArray<string>.Empty; - - return new RawIdentifiers( - aliases, - GetRequiredString(identifiers, "primary")); - } - - private static RawLinkset MapLinkset(BsonDocument linkset) - { - var aliases = linkset.TryGetValue("aliases", out var aliasesValue) && aliasesValue.IsBsonArray - ? aliasesValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() - : ImmutableArray<string>.Empty; - - var purls = linkset.TryGetValue("purls", out var purlsValue) && purlsValue.IsBsonArray - ? purlsValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() - : ImmutableArray<string>.Empty; - - var cpes = linkset.TryGetValue("cpes", out var cpesValue) && cpesValue.IsBsonArray - ? cpesValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() - : ImmutableArray<string>.Empty; - - var references = linkset.TryGetValue("references", out var referencesValue) && referencesValue.IsBsonArray - ? referencesValue.AsBsonArray - .Where(static value => value.IsBsonDocument) - .Select(value => - { - var doc = value.AsBsonDocument; - return new RawReference( - GetRequiredString(doc, "type"), - GetRequiredString(doc, "url"), - doc.TryGetValue("source", out var sourceValue) && !sourceValue.IsBsonNull ? sourceValue.AsString : null); - }) - .ToImmutableArray() - : ImmutableArray<RawReference>.Empty; - - var reconciledFrom = linkset.TryGetValue("reconciled_from", out var reconciledValue) && reconciledValue.IsBsonArray - ? reconciledValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() - : ImmutableArray<string>.Empty; - - var notesBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - if (linkset.TryGetValue("notes", out var notesValue) && notesValue.IsBsonDocument) - { - foreach (var element in notesValue.AsBsonDocument) - { - notesBuilder[element.Name] = BsonValueToString(element.Value); - } - } - - return new RawLinkset - { - Aliases = aliases, - PackageUrls = purls, - Cpes = cpes, - References = references, - ReconciledFrom = reconciledFrom, - Notes = notesBuilder.ToImmutable() - }; - } - - private static DateTimeOffset GetDateTimeOffset(BsonDocument document, string field, DateTimeOffset fallback) - { - if (!document.TryGetValue(field, out var value) || value.IsBsonNull) - { - return fallback; - } - - return BsonValueToDateTimeOffset(value) ?? fallback; - } - - private static DateTimeOffset? BsonValueToDateTimeOffset(BsonValue value) - { - switch (value.BsonType) - { - case BsonType.DateTime: - var dateTime = value.ToUniversalTime(); - return new DateTimeOffset(DateTime.SpecifyKind(dateTime, DateTimeKind.Utc)); - - case BsonType.String: - if (DateTimeOffset.TryParse(value.AsString, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed)) - { - return parsed.ToUniversalTime(); - } - break; - - case BsonType.Int64: - return DateTimeOffset.FromUnixTimeMilliseconds(value.AsInt64).ToUniversalTime(); - } - - return null; - } - - private static string GetRequiredString(BsonDocument document, string key) - { - if (!document.TryGetValue(key, out var value) || value.IsBsonNull) - { - return string.Empty; - } - - return value.IsString ? value.AsString : value.ToString() ?? string.Empty; - } - - private static string? GetOptionalString(BsonDocument document, string key) - { - if (!document.TryGetValue(key, out var value) || value.IsBsonNull) - { - return null; - } - - return value.IsString ? value.AsString : value.ToString(); - } - - private static string BsonValueToString(BsonValue value) - { - if (value.IsString) - { - return value.AsString ?? string.Empty; - } - - if (value.IsBsonNull) - { - return string.Empty; - } - - return value.ToString() ?? string.Empty; - } - - private static string BuildDocumentId(AdvisoryRawDocument document) - { - var vendorSegment = SanitizeIdSegment(document.Source.Vendor); - var upstreamSegment = SanitizeIdSegment(document.Upstream.UpstreamId); - var revisionSegment = ComputeRevisionSegment(document.Upstream); - return $"advisory_raw:{vendorSegment}:{upstreamSegment}:{revisionSegment}"; - } - - private static string ComputeRevisionSegment(RawUpstreamMetadata upstream) - { - var hashSegment = SanitizeIdSegment(upstream.ContentHash.Replace(":", "-")); - if (string.IsNullOrWhiteSpace(upstream.DocumentVersion)) - { - return hashSegment; - } - - var versionSegment = SanitizeIdSegment(upstream.DocumentVersion); - if (hashSegment.Length > 12) - { - hashSegment = hashSegment[..12]; - } - - return $"{versionSegment}-{hashSegment}"; - } - - private static string SanitizeIdSegment(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return "unknown"; - } - - var builder = new StringBuilder(value.Length); - - foreach (var character in value.Trim()) - { - if (char.IsLetterOrDigit(character)) - { - builder.Append(char.ToLowerInvariant(character)); - } - else if (character is '-' or '.') - { - builder.Append(character); - } - else - { - builder.Append('-'); - } - } - - var sanitized = builder.ToString().Trim('-'); - if (sanitized.Length == 0) - { - return "unknown"; - } - - if (sanitized.Length > 64) - { - sanitized = sanitized[..64]; - } - - return sanitized; - } - - private static string EncodeCursor(DateTime dateTimeUtc, string id) - { - var payload = $"{dateTimeUtc.Ticks}:{id}"; - return Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)); - } - - private static bool TryDecodeCursor(string cursor, out AdvisoryRawCursor result) - { - result = default; - try - { - var bytes = Convert.FromBase64String(cursor); - var payload = Encoding.UTF8.GetString(bytes); - var parts = payload.Split(':', CursorSegmentCount); - if (parts.Length != CursorSegmentCount) - { - return false; - } - - if (!long.TryParse(parts[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks)) - { - return false; - } - - var dateTime = new DateTime(ticks, DateTimeKind.Utc); - result = new AdvisoryRawCursor(new DateTimeOffset(dateTime), parts[1]); - return true; - } - catch - { - return false; - } - } - - private readonly record struct AdvisoryRawCursor(DateTimeOffset IngestedAt, string Id); -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Globalization; +using System.Text; +using System.Linq; +using System.Text.Json; +using MongoDB.Bson; +using MongoDB.Driver; +using MongoDB.Bson.IO; +using Microsoft.Extensions.Logging; +using StellaOps.Concelier.Core.Raw; +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Storage.Mongo.Raw; + +internal sealed class MongoAdvisoryRawRepository : IAdvisoryRawRepository +{ + private const int CursorSegmentCount = 2; + + private readonly IMongoCollection<BsonDocument> _collection; + private readonly TimeProvider _timeProvider; + private readonly ILogger<MongoAdvisoryRawRepository> _logger; + + public MongoAdvisoryRawRepository( + IMongoDatabase database, + TimeProvider timeProvider, + ILogger<MongoAdvisoryRawRepository> logger) + { + ArgumentNullException.ThrowIfNull(database); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + _collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw); + } + + public async Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(document); + + var tenant = document.Tenant; + var vendor = document.Source.Vendor; + var upstreamId = document.Upstream.UpstreamId; + var contentHash = document.Upstream.ContentHash; + + var baseFilter = Builders<BsonDocument>.Filter.Eq("tenant", tenant) & + Builders<BsonDocument>.Filter.Eq("source.vendor", vendor) & + Builders<BsonDocument>.Filter.Eq("upstream.upstream_id", upstreamId); + + var duplicateFilter = baseFilter & + Builders<BsonDocument>.Filter.Eq("upstream.content_hash", contentHash); + + var duplicate = await _collection + .Find(duplicateFilter) + .Limit(1) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (duplicate is not null) + { + var existing = MapToRecord(duplicate); + return new AdvisoryRawUpsertResult(false, existing); + } + + var previous = await _collection + .Find(baseFilter) + .Sort(Builders<BsonDocument>.Sort.Descending("ingested_at").Descending("_id")) + .Limit(1) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + var supersedesId = previous?["_id"]?.AsString; + var recordDocument = CreateBsonDocument(document, supersedesId); + + try + { + await _collection.InsertOneAsync(recordDocument, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (MongoWriteException ex) when (ex.WriteError?.Category == ServerErrorCategory.DuplicateKey) + { + _logger.LogWarning( + ex, + "Duplicate key detected while inserting advisory_raw document tenant={Tenant} vendor={Vendor} upstream={Upstream} hash={Hash}", + tenant, + vendor, + upstreamId, + contentHash); + + var existingDoc = await _collection + .Find(duplicateFilter) + .Limit(1) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + if (existingDoc is not null) + { + var existing = MapToRecord(existingDoc); + return new AdvisoryRawUpsertResult(false, existing); + } + + throw; + } + + var inserted = MapToRecord(recordDocument); + return new AdvisoryRawUpsertResult(true, inserted); + } + + public async Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + ArgumentException.ThrowIfNullOrWhiteSpace(id); + + var filter = Builders<BsonDocument>.Filter.Eq("tenant", tenant) & + Builders<BsonDocument>.Filter.Eq("_id", id); + + var document = await _collection + .Find(filter) + .Limit(1) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + return document is null ? null : MapToRecord(document); + } + + public async Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + + var builder = Builders<BsonDocument>.Filter; + var filters = new List<FilterDefinition<BsonDocument>> + { + builder.Eq("tenant", options.Tenant) + }; + + if (!options.Vendors.IsDefaultOrEmpty) + { + filters.Add(builder.In("source.vendor", options.Vendors.Select(static vendor => vendor.Trim().ToLowerInvariant()))); + } + + if (!options.UpstreamIds.IsDefaultOrEmpty) + { + var upstreams = options.UpstreamIds + .Where(static id => !string.IsNullOrWhiteSpace(id)) + .Select(static id => id.Trim()) + .ToArray(); + if (upstreams.Length > 0) + { + filters.Add(builder.In("upstream.upstream_id", upstreams)); + } + } + + if (!options.ContentHashes.IsDefaultOrEmpty) + { + var hashes = options.ContentHashes + .Where(static hash => !string.IsNullOrWhiteSpace(hash)) + .Select(static hash => hash.Trim()) + .ToArray(); + if (hashes.Length > 0) + { + filters.Add(builder.In("upstream.content_hash", hashes)); + } + } + + if (!options.Aliases.IsDefaultOrEmpty) + { + var aliases = options.Aliases + .Where(static alias => !string.IsNullOrWhiteSpace(alias)) + .Select(static alias => alias.Trim().ToLowerInvariant()) + .ToArray(); + if (aliases.Length > 0) + { + filters.Add(builder.In("linkset.aliases", aliases)); + } + } + + if (!options.PackageUrls.IsDefaultOrEmpty) + { + var purls = options.PackageUrls + .Where(static purl => !string.IsNullOrWhiteSpace(purl)) + .Select(static purl => purl.Trim()) + .ToArray(); + if (purls.Length > 0) + { + filters.Add(builder.In("linkset.purls", purls)); + } + } + + if (options.Since is { } since) + { + filters.Add(builder.Gte("ingested_at", since.ToUniversalTime().UtcDateTime)); + } + + if (!string.IsNullOrWhiteSpace(options.Cursor) && TryDecodeCursor(options.Cursor, out var cursor)) + { + var ingestTime = cursor.IngestedAt.UtcDateTime; + var cursorFilter = builder.Or( + builder.Lt("ingested_at", ingestTime), + builder.And( + builder.Eq("ingested_at", ingestTime), + builder.Gt("_id", cursor.Id))); + filters.Add(cursorFilter); + } + + var filter = filters.Count == 1 ? filters[0] : builder.And(filters); + var limit = Math.Clamp(options.Limit, 1, 200); + var sort = Builders<BsonDocument>.Sort.Descending("ingested_at").Descending("_id"); + + var documents = await _collection + .Find(filter) + .Sort(sort) + .Limit(limit + 1) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + var hasMore = documents.Count > limit; + if (hasMore) + { + documents.RemoveAt(documents.Count - 1); + } + + var records = documents.Select(MapToRecord).ToArray(); + var nextCursor = hasMore && records.Length > 0 + ? EncodeCursor(records[^1].IngestedAt.UtcDateTime, records[^1].Id) + : null; + + return new AdvisoryRawQueryResult(records, nextCursor, hasMore); + } + + public async Task<IReadOnlyList<AdvisoryRawRecord>> ListForVerificationAsync( + string tenant, + DateTimeOffset since, + DateTimeOffset until, + IReadOnlyCollection<string> sourceVendors, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + if (until < since) + { + throw new ArgumentException("Verification window end must not precede the start."); + } + + var builder = Builders<BsonDocument>.Filter; + var filters = new List<FilterDefinition<BsonDocument>> + { + builder.Eq("tenant", tenant), + builder.Gte("ingested_at", since.ToUniversalTime().UtcDateTime), + builder.Lte("ingested_at", until.ToUniversalTime().UtcDateTime) + }; + + if (sourceVendors is { Count: > 0 }) + { + filters.Add(builder.In("source.vendor", sourceVendors.Select(static vendor => vendor.Trim().ToLowerInvariant()))); + } + + var filter = builder.And(filters); + var sort = Builders<BsonDocument>.Sort.Ascending("ingested_at").Ascending("_id"); + + var documents = await _collection + .Find(filter) + .Sort(sort) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(MapToRecord).ToArray(); + } + + private BsonDocument CreateBsonDocument(AdvisoryRawDocument document, string? supersedesId) + { + var now = _timeProvider.GetUtcNow().UtcDateTime; + var id = BuildDocumentId(document); + var supersedesValue = string.IsNullOrWhiteSpace(supersedesId) ? document.Supersedes : supersedesId; + + var source = new BsonDocument + { + { "vendor", document.Source.Vendor }, + { "connector", document.Source.Connector }, + { "version", document.Source.ConnectorVersion } + }; + if (!string.IsNullOrWhiteSpace(document.Source.Stream)) + { + source["stream"] = document.Source.Stream; + } + + var signature = new BsonDocument + { + { "present", document.Upstream.Signature.Present } + }; + if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.Format)) + { + signature["format"] = document.Upstream.Signature.Format; + } + if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.KeyId)) + { + signature["key_id"] = document.Upstream.Signature.KeyId; + } + if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.Signature)) + { + signature["sig"] = document.Upstream.Signature.Signature; + } + if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.Certificate)) + { + signature["certificate"] = document.Upstream.Signature.Certificate; + } + if (!string.IsNullOrWhiteSpace(document.Upstream.Signature.Digest)) + { + signature["digest"] = document.Upstream.Signature.Digest; + } + + var provenance = new BsonDocument(); + if (document.Upstream.Provenance is not null) + { + foreach (var entry in document.Upstream.Provenance) + { + provenance[entry.Key] = entry.Value; + } + } + + var upstream = new BsonDocument + { + { "upstream_id", document.Upstream.UpstreamId }, + { "document_version", string.IsNullOrWhiteSpace(document.Upstream.DocumentVersion) ? BsonNull.Value : BsonValue.Create(document.Upstream.DocumentVersion) }, + { "retrieved_at", document.Upstream.RetrievedAt.UtcDateTime }, + { "content_hash", document.Upstream.ContentHash }, + { "signature", signature }, + { "provenance", provenance } + }; + + var content = new BsonDocument + { + { "format", document.Content.Format }, + { "raw", document.Content.Raw.GetRawText() } + }; + if (!string.IsNullOrWhiteSpace(document.Content.SpecVersion)) + { + content["spec_version"] = document.Content.SpecVersion; + } + if (!string.IsNullOrWhiteSpace(document.Content.Encoding)) + { + content["encoding"] = document.Content.Encoding; + } + + var identifiers = new BsonDocument + { + { "aliases", new BsonArray(document.Identifiers.Aliases) }, + { "primary", document.Identifiers.PrimaryId } + }; + + var references = new BsonArray(document.Linkset.References.Select(reference => + { + var referenceDocument = new BsonDocument + { + { "type", reference.Type }, + { "url", reference.Url } + }; + if (!string.IsNullOrWhiteSpace(reference.Source)) + { + referenceDocument["source"] = reference.Source; + } + return referenceDocument; + })); + + var notes = new BsonDocument(); + if (document.Linkset.Notes is not null) + { + foreach (var entry in document.Linkset.Notes) + { + notes[entry.Key] = entry.Value; + } + } + + var linkset = new BsonDocument + { + { "aliases", new BsonArray(document.Linkset.Aliases) }, + { "purls", new BsonArray(document.Linkset.PackageUrls) }, + { "cpes", new BsonArray(document.Linkset.Cpes) }, + { "references", references }, + { "reconciled_from", new BsonArray(document.Linkset.ReconciledFrom) }, + { "notes", notes } + }; + + var bson = new BsonDocument + { + { "_id", id }, + { "tenant", document.Tenant }, + { "source", source }, + { "upstream", upstream }, + { "content", content }, + { "identifiers", identifiers }, + { "linkset", linkset }, + { "supersedes", supersedesValue is null ? BsonNull.Value : supersedesValue }, + { "created_at", document.Upstream.RetrievedAt.UtcDateTime }, + { "ingested_at", now } + }; + + return bson; + } + + private AdvisoryRawRecord MapToRecord(BsonDocument document) + { + var tenant = GetRequiredString(document, "tenant"); + var source = MapSource(document["source"].AsBsonDocument); + var upstream = MapUpstream(document["upstream"].AsBsonDocument); + var content = MapContent(document["content"].AsBsonDocument); + var identifiers = MapIdentifiers(document["identifiers"].AsBsonDocument); + var linkset = MapLinkset(document["linkset"].AsBsonDocument); + var supersedes = document.GetValue("supersedes", BsonNull.Value); + + var rawDocument = new AdvisoryRawDocument( + tenant, + source, + upstream, + content, + identifiers, + linkset, + supersedes.IsBsonNull ? null : supersedes.AsString); + + var ingestedAt = GetDateTimeOffset(document, "ingested_at", rawDocument.Upstream.RetrievedAt); + var createdAt = GetDateTimeOffset(document, "created_at", rawDocument.Upstream.RetrievedAt); + + return new AdvisoryRawRecord( + document.GetValue("_id").AsString, + rawDocument, + ingestedAt, + createdAt); + } + + private static RawSourceMetadata MapSource(BsonDocument source) + { + return new RawSourceMetadata( + GetRequiredString(source, "vendor"), + GetRequiredString(source, "connector"), + GetRequiredString(source, "version"), + GetOptionalString(source, "stream")); + } + + private static RawUpstreamMetadata MapUpstream(BsonDocument upstream) + { + var provenance = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + if (upstream.TryGetValue("provenance", out var provenanceValue) && provenanceValue.IsBsonDocument) + { + foreach (var element in provenanceValue.AsBsonDocument) + { + provenance[element.Name] = BsonValueToString(element.Value); + } + } + + var signature = upstream["signature"].AsBsonDocument; + var signatureMetadata = new RawSignatureMetadata( + signature.GetValue("present", BsonBoolean.False).AsBoolean, + signature.TryGetValue("format", out var format) && !format.IsBsonNull ? format.AsString : null, + signature.TryGetValue("key_id", out var keyId) && !keyId.IsBsonNull ? keyId.AsString : null, + signature.TryGetValue("sig", out var sig) && !sig.IsBsonNull ? sig.AsString : null, + signature.TryGetValue("certificate", out var certificate) && !certificate.IsBsonNull ? certificate.AsString : null, + signature.TryGetValue("digest", out var digest) && !digest.IsBsonNull ? digest.AsString : null); + + return new RawUpstreamMetadata( + GetRequiredString(upstream, "upstream_id"), + upstream.TryGetValue("document_version", out var version) && !version.IsBsonNull ? version.AsString : null, + GetDateTimeOffset(upstream, "retrieved_at", DateTimeOffset.UtcNow), + GetRequiredString(upstream, "content_hash"), + signatureMetadata, + provenance.ToImmutable()); + } + + private static RawContent MapContent(BsonDocument content) + { + var rawValue = content.GetValue("raw", BsonNull.Value); + string rawJson; + if (rawValue.IsBsonNull) + { + rawJson = "{}"; + } + else if (rawValue.IsString) + { + rawJson = rawValue.AsString ?? "{}"; + } + else + { + rawJson = rawValue.ToJson(new JsonWriterSettings { OutputMode = JsonOutputMode.RelaxedExtendedJson }); + } + + using var document = System.Text.Json.JsonDocument.Parse(string.IsNullOrWhiteSpace(rawJson) ? "{}" : rawJson); + + return new RawContent( + GetRequiredString(content, "format"), + content.TryGetValue("spec_version", out var specVersion) && !specVersion.IsBsonNull ? specVersion.AsString : null, + document.RootElement.Clone(), + content.TryGetValue("encoding", out var encoding) && !encoding.IsBsonNull ? encoding.AsString : null); + } + + private static RawIdentifiers MapIdentifiers(BsonDocument identifiers) + { + var aliases = identifiers.TryGetValue("aliases", out var aliasValue) && aliasValue.IsBsonArray + ? aliasValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() + : ImmutableArray<string>.Empty; + + return new RawIdentifiers( + aliases, + GetRequiredString(identifiers, "primary")); + } + + private static RawLinkset MapLinkset(BsonDocument linkset) + { + var aliases = linkset.TryGetValue("aliases", out var aliasesValue) && aliasesValue.IsBsonArray + ? aliasesValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() + : ImmutableArray<string>.Empty; + + var purls = linkset.TryGetValue("purls", out var purlsValue) && purlsValue.IsBsonArray + ? purlsValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() + : ImmutableArray<string>.Empty; + + var cpes = linkset.TryGetValue("cpes", out var cpesValue) && cpesValue.IsBsonArray + ? cpesValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() + : ImmutableArray<string>.Empty; + + var references = linkset.TryGetValue("references", out var referencesValue) && referencesValue.IsBsonArray + ? referencesValue.AsBsonArray + .Where(static value => value.IsBsonDocument) + .Select(value => + { + var doc = value.AsBsonDocument; + return new RawReference( + GetRequiredString(doc, "type"), + GetRequiredString(doc, "url"), + doc.TryGetValue("source", out var sourceValue) && !sourceValue.IsBsonNull ? sourceValue.AsString : null); + }) + .ToImmutableArray() + : ImmutableArray<RawReference>.Empty; + + var reconciledFrom = linkset.TryGetValue("reconciled_from", out var reconciledValue) && reconciledValue.IsBsonArray + ? reconciledValue.AsBsonArray.Select(BsonValueToString).ToImmutableArray() + : ImmutableArray<string>.Empty; + + var notesBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + if (linkset.TryGetValue("notes", out var notesValue) && notesValue.IsBsonDocument) + { + foreach (var element in notesValue.AsBsonDocument) + { + notesBuilder[element.Name] = BsonValueToString(element.Value); + } + } + + return new RawLinkset + { + Aliases = aliases, + PackageUrls = purls, + Cpes = cpes, + References = references, + ReconciledFrom = reconciledFrom, + Notes = notesBuilder.ToImmutable() + }; + } + + private static DateTimeOffset GetDateTimeOffset(BsonDocument document, string field, DateTimeOffset fallback) + { + if (!document.TryGetValue(field, out var value) || value.IsBsonNull) + { + return fallback; + } + + return BsonValueToDateTimeOffset(value) ?? fallback; + } + + private static DateTimeOffset? BsonValueToDateTimeOffset(BsonValue value) + { + switch (value.BsonType) + { + case BsonType.DateTime: + var dateTime = value.ToUniversalTime(); + return new DateTimeOffset(DateTime.SpecifyKind(dateTime, DateTimeKind.Utc)); + + case BsonType.String: + if (DateTimeOffset.TryParse(value.AsString, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed)) + { + return parsed.ToUniversalTime(); + } + break; + + case BsonType.Int64: + return DateTimeOffset.FromUnixTimeMilliseconds(value.AsInt64).ToUniversalTime(); + } + + return null; + } + + private static string GetRequiredString(BsonDocument document, string key) + { + if (!document.TryGetValue(key, out var value) || value.IsBsonNull) + { + return string.Empty; + } + + return value.IsString ? value.AsString : value.ToString() ?? string.Empty; + } + + private static string? GetOptionalString(BsonDocument document, string key) + { + if (!document.TryGetValue(key, out var value) || value.IsBsonNull) + { + return null; + } + + return value.IsString ? value.AsString : value.ToString(); + } + + private static string BsonValueToString(BsonValue value) + { + if (value.IsString) + { + return value.AsString ?? string.Empty; + } + + if (value.IsBsonNull) + { + return string.Empty; + } + + return value.ToString() ?? string.Empty; + } + + private static string BuildDocumentId(AdvisoryRawDocument document) + { + var vendorSegment = SanitizeIdSegment(document.Source.Vendor); + var upstreamSegment = SanitizeIdSegment(document.Upstream.UpstreamId); + var revisionSegment = ComputeRevisionSegment(document.Upstream); + return $"advisory_raw:{vendorSegment}:{upstreamSegment}:{revisionSegment}"; + } + + private static string ComputeRevisionSegment(RawUpstreamMetadata upstream) + { + var hashSegment = SanitizeIdSegment(upstream.ContentHash.Replace(":", "-")); + if (string.IsNullOrWhiteSpace(upstream.DocumentVersion)) + { + return hashSegment; + } + + var versionSegment = SanitizeIdSegment(upstream.DocumentVersion); + if (hashSegment.Length > 12) + { + hashSegment = hashSegment[..12]; + } + + return $"{versionSegment}-{hashSegment}"; + } + + private static string SanitizeIdSegment(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return "unknown"; + } + + var builder = new StringBuilder(value.Length); + + foreach (var character in value.Trim()) + { + if (char.IsLetterOrDigit(character)) + { + builder.Append(char.ToLowerInvariant(character)); + } + else if (character is '-' or '.') + { + builder.Append(character); + } + else + { + builder.Append('-'); + } + } + + var sanitized = builder.ToString().Trim('-'); + if (sanitized.Length == 0) + { + return "unknown"; + } + + if (sanitized.Length > 64) + { + sanitized = sanitized[..64]; + } + + return sanitized; + } + + private static string EncodeCursor(DateTime dateTimeUtc, string id) + { + var payload = $"{dateTimeUtc.Ticks}:{id}"; + return Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)); + } + + private static bool TryDecodeCursor(string cursor, out AdvisoryRawCursor result) + { + result = default; + try + { + var bytes = Convert.FromBase64String(cursor); + var payload = Encoding.UTF8.GetString(bytes); + var parts = payload.Split(':', CursorSegmentCount); + if (parts.Length != CursorSegmentCount) + { + return false; + } + + if (!long.TryParse(parts[0], NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks)) + { + return false; + } + + var dateTime = new DateTime(ticks, DateTimeKind.Utc); + result = new AdvisoryRawCursor(new DateTimeOffset(dateTime), parts[1]); + return true; + } + catch + { + return false; + } + } + + private readonly record struct AdvisoryRawCursor(DateTimeOffset IngestedAt, string Id); +} diff --git a/src/StellaOps.Concelier.Storage.Mongo/RawDocumentRetentionService.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/RawDocumentRetentionService.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/RawDocumentRetentionService.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/RawDocumentRetentionService.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/SourceStateDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/SourceStateDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/SourceStateDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/SourceStateDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/SourceStateRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/SourceStateRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/SourceStateRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/SourceStateRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/SourceStateRepositoryExtensions.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/SourceStateRepositoryExtensions.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/SourceStateRepositoryExtensions.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/SourceStateRepositoryExtensions.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementDocument.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementDocument.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementDocument.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementDocument.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementRecord.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementRecord.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementRecord.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementRecord.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementStore.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementStore.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementStore.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/Statements/AdvisoryStatementStore.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj index 2cf31682..eca05ada 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj @@ -1,18 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Concelier.Storage.Mongo/TASKS.md b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md similarity index 99% rename from src/StellaOps.Concelier.Storage.Mongo/TASKS.md rename to src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md index 59853dff..c07389df 100644 --- a/src/StellaOps.Concelier.Storage.Mongo/TASKS.md +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/TASKS.md @@ -1,30 +1,30 @@ -# TASKS — Epic 1: Aggregation-Only Contract -> **AOC Reminder:** storage enforces append-only raw documents; no precedence/severity/normalization in ingestion collections. -| ID | Status | Owner(s) | Depends on | Notes | -|---|---|---|---|---| -| CONCELIER-STORE-AOC-19-001 `advisory_raw schema validator` | DONE (2025-10-28) | Concelier Storage Guild | Mongo cluster ops sign-off | Author MongoDB JSON schema enforcing required fields (`source`, `upstream`, `content`, `linkset`, `tenant`) and forbidding normalized/severity fields. Include migration toggles for staged rollout. | -> 2025-10-28: Added configurable validator migration (`20251028_advisory_raw_validator`), bootstrapper collection registration, storage options toggle, and Mongo migration tests covering schema + enforcement levels. -> Docs alignment (2025-10-26): Validator expectations + deployment steps documented in `docs/deploy/containers.md` §1. -| CONCELIER-STORE-AOC-19-002 `idempotency unique index` | DONE (2025-10-28) | Concelier Storage Guild | CONCELIER-STORE-AOC-19-001 | Create compound unique index on `(source.vendor, upstream.upstream_id, upstream.content_hash, tenant)` with backfill script verifying existing data, and document offline validator bootstrap. | -> 2025-10-28: Added `20251028_advisory_raw_idempotency_index` migration that detects duplicate raw advisories before creating the unique compound index, wired into DI, and extended migration tests to cover index shape + duplicate handling with supporting package updates. -> Docs alignment (2025-10-26): Idempotency contract + supersedes metrics in `docs/ingestion/aggregation-only-contract.md` §7 and observability guide. -| CONCELIER-STORE-AOC-19-003 `append-only supersedes migration` | DONE (2025-10-28) | Concelier Storage Guild | CONCELIER-STORE-AOC-19-002 | Introduce migration that freezes legacy `advisories` writes, copies data into `_backup_*`, and backfills supersedes pointers for raw revisions. Provide rollback plan. | -> 2025-10-28: Added supersedes backfill migration (`20251028_advisory_supersedes_backfill`) that renames `advisory` to a read-only view, snapshots data into `_backup_20251028`, and walks raw revisions to populate deterministic supersedes chains with integration coverage and operator scripts. -> Docs alignment (2025-10-26): Rollback guidance added to `docs/deploy/containers.md` §6. -| CONCELIER-STORE-AOC-19-004 `validator deployment playbook` | DONE (2025-10-28) | Concelier Storage Guild, DevOps Guild | CONCELIER-STORE-AOC-19-001 | Update `MIGRATIONS.md` and Offline Kit docs to cover enabling validators, rolling restarts, and validator smoke tests for air-gapped installs. | -> 2025-10-28: Documented duplicate audit + migration workflow in `docs/deploy/containers.md`, Offline Kit guide, and `MIGRATIONS.md`; published `ops/devops/scripts/check-advisory-raw-duplicates.js` for staging/offline clusters. -> Docs alignment (2025-10-26): Offline kit requirements documented in `docs/deploy/containers.md` §5. - -## Policy Engine v2 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-POLICY-20-003 `Selection cursors` | TODO | Concelier Storage Guild | CONCELIER-STORE-AOC-19-002, POLICY-ENGINE-20-003 | Add advisory/vex selection cursors (per policy run) with change stream checkpoints, indexes, and offline migration scripts to support incremental evaluations. | - -## Link-Not-Merge v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| CONCELIER-LNM-21-101 `Observations collections` | TODO | Concelier Storage Guild | CONCELIER-LNM-21-001 | Provision `advisory_observations` and `advisory_linksets` collections with hashed shard keys, TTL for ingest metadata, and required indexes (`aliases`, `purls`, `observation_ids`). | -| CONCELIER-LNM-21-102 `Migration tooling` | TODO | Concelier Storage Guild, DevOps Guild | CONCELIER-LNM-21-101 | Backfill legacy merged advisories into observation/linkset collections, create tombstones for merged docs, and supply rollback scripts. | -| CONCELIER-LNM-21-103 `Blob/store wiring` | TODO | Concelier Storage Guild | CONCELIER-LNM-21-101 | Store large raw payloads in object storage with pointers from observations; update bootstrapper/offline kit to seed sample blobs. | +# TASKS — Epic 1: Aggregation-Only Contract +> **AOC Reminder:** storage enforces append-only raw documents; no precedence/severity/normalization in ingestion collections. +| ID | Status | Owner(s) | Depends on | Notes | +|---|---|---|---|---| +| CONCELIER-STORE-AOC-19-001 `advisory_raw schema validator` | DONE (2025-10-28) | Concelier Storage Guild | Mongo cluster ops sign-off | Author MongoDB JSON schema enforcing required fields (`source`, `upstream`, `content`, `linkset`, `tenant`) and forbidding normalized/severity fields. Include migration toggles for staged rollout. | +> 2025-10-28: Added configurable validator migration (`20251028_advisory_raw_validator`), bootstrapper collection registration, storage options toggle, and Mongo migration tests covering schema + enforcement levels. +> Docs alignment (2025-10-26): Validator expectations + deployment steps documented in `docs/deploy/containers.md` §1. +| CONCELIER-STORE-AOC-19-002 `idempotency unique index` | DONE (2025-10-28) | Concelier Storage Guild | CONCELIER-STORE-AOC-19-001 | Create compound unique index on `(source.vendor, upstream.upstream_id, upstream.content_hash, tenant)` with backfill script verifying existing data, and document offline validator bootstrap. | +> 2025-10-28: Added `20251028_advisory_raw_idempotency_index` migration that detects duplicate raw advisories before creating the unique compound index, wired into DI, and extended migration tests to cover index shape + duplicate handling with supporting package updates. +> Docs alignment (2025-10-26): Idempotency contract + supersedes metrics in `docs/ingestion/aggregation-only-contract.md` §7 and observability guide. +| CONCELIER-STORE-AOC-19-003 `append-only supersedes migration` | DONE (2025-10-28) | Concelier Storage Guild | CONCELIER-STORE-AOC-19-002 | Introduce migration that freezes legacy `advisories` writes, copies data into `_backup_*`, and backfills supersedes pointers for raw revisions. Provide rollback plan. | +> 2025-10-28: Added supersedes backfill migration (`20251028_advisory_supersedes_backfill`) that renames `advisory` to a read-only view, snapshots data into `_backup_20251028`, and walks raw revisions to populate deterministic supersedes chains with integration coverage and operator scripts. +> Docs alignment (2025-10-26): Rollback guidance added to `docs/deploy/containers.md` §6. +| CONCELIER-STORE-AOC-19-004 `validator deployment playbook` | DONE (2025-10-28) | Concelier Storage Guild, DevOps Guild | CONCELIER-STORE-AOC-19-001 | Update `MIGRATIONS.md` and Offline Kit docs to cover enabling validators, rolling restarts, and validator smoke tests for air-gapped installs. | +> 2025-10-28: Documented duplicate audit + migration workflow in `docs/deploy/containers.md`, Offline Kit guide, and `MIGRATIONS.md`; published `ops/devops/scripts/check-advisory-raw-duplicates.js` for staging/offline clusters. +> Docs alignment (2025-10-26): Offline kit requirements documented in `docs/deploy/containers.md` §5. + +## Policy Engine v2 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-POLICY-20-003 `Selection cursors` | TODO | Concelier Storage Guild | CONCELIER-STORE-AOC-19-002, POLICY-ENGINE-20-003 | Add advisory/vex selection cursors (per policy run) with change stream checkpoints, indexes, and offline migration scripts to support incremental evaluations. | + +## Link-Not-Merge v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| CONCELIER-LNM-21-101 `Observations collections` | TODO | Concelier Storage Guild | CONCELIER-LNM-21-001 | Provision `advisory_observations` and `advisory_linksets` collections with hashed shard keys, TTL for ingest metadata, and required indexes (`aliases`, `purls`, `observation_ids`). | +| CONCELIER-LNM-21-102 `Migration tooling` | TODO | Concelier Storage Guild, DevOps Guild | CONCELIER-LNM-21-101 | Backfill legacy merged advisories into observation/linkset collections, create tombstones for merged docs, and supply rollback scripts. | +| CONCELIER-LNM-21-103 `Blob/store wiring` | TODO | Concelier Storage Guild | CONCELIER-LNM-21-101 | Store large raw payloads in object storage with pointers from observations; update bootstrapper/offline kit to seed sample blobs. | diff --git a/src/StellaOps.Concelier.Testing/ConnectorTestHarness.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Testing/ConnectorTestHarness.cs similarity index 100% rename from src/StellaOps.Concelier.Testing/ConnectorTestHarness.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Testing/ConnectorTestHarness.cs diff --git a/src/StellaOps.Concelier.Testing/MongoIntegrationFixture.cs b/src/Concelier/__Libraries/StellaOps.Concelier.Testing/MongoIntegrationFixture.cs similarity index 100% rename from src/StellaOps.Concelier.Testing/MongoIntegrationFixture.cs rename to src/Concelier/__Libraries/StellaOps.Concelier.Testing/MongoIntegrationFixture.cs diff --git a/src/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj b/src/Concelier/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj similarity index 97% rename from src/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj rename to src/Concelier/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj index 74beb9ac..e78cbc6f 100644 --- a/src/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj +++ b/src/Concelier/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj @@ -1,20 +1,20 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies> - <IsTestProject>false</IsTestProject> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Mongo2Go" Version="3.1.3" /> - <PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" /> - <PackageReference Include="xunit" Version="2.9.2"> - <PrivateAssets>all</PrivateAssets> - </PackageReference> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <CopyLocalLockFileAssemblies>true</CopyLocalLockFileAssemblies> + <IsTestProject>false</IsTestProject> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Mongo2Go" Version="3.1.3" /> + <PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" /> + <PackageReference Include="xunit" Version="2.9.2"> + <PrivateAssets>all</PrivateAssets> + </PackageReference> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorFetchTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorFetchTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorFetchTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorFetchTests.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorParseTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorParseTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorParseTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscConnectorParseTests.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscHttpClientConfigurationTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscHttpClientConfigurationTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscHttpClientConfigurationTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/AcscHttpClientConfigurationTests.cs diff --git a/src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories-multi.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories-multi.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories-multi.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories-multi.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/Acsc/Fixtures/acsc-advisories.snapshot.json diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj new file mode 100644 index 00000000..75f84986 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj @@ -0,0 +1,20 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj" /> + </ItemGroup> + + <ItemGroup> + <None Include="Acsc/Fixtures/**" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Cccs.Tests/CccsConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/CccsConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs.Tests/CccsConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/CccsConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-feed-en.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-feed-en.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-feed-en.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-feed-en.json diff --git a/src/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory-fr.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory-fr.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory-fr.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory-fr.json diff --git a/src/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-raw-advisory.json diff --git a/src/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-taxonomy-en.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-taxonomy-en.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-taxonomy-en.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Fixtures/cccs-taxonomy-en.json diff --git a/src/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsHtmlParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsHtmlParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsHtmlParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsHtmlParserTests.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/Internal/CccsMapperTests.cs diff --git a/src/StellaOps.Concelier.Connector.Cccs.Tests/StellaOps.Concelier.Connector.Cccs.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/StellaOps.Concelier.Connector.Cccs.Tests.csproj similarity index 59% rename from src/StellaOps.Concelier.Connector.Cccs.Tests/StellaOps.Concelier.Connector.Cccs.Tests.csproj rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/StellaOps.Concelier.Connector.Cccs.Tests.csproj index 65052fa2..43985c2f 100644 --- a/src/StellaOps.Concelier.Connector.Cccs.Tests/StellaOps.Concelier.Connector.Cccs.Tests.csproj +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cccs.Tests/StellaOps.Concelier.Connector.Cccs.Tests.csproj @@ -1,19 +1,20 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Cccs/StellaOps.Concelier.Connector.Cccs.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> - <ItemGroup> - <None Update="Fixtures\*.json"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Cccs/StellaOps.Concelier.Connector.Cccs.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> + <ItemGroup> + <None Update="Fixtures\*.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.CertBund.Tests/CertBundConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertBund.Tests/CertBundConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund.Tests/CertBundConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertBund.Tests/CertBundConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-detail.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-detail.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-detail.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-detail.json diff --git a/src/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-feed.xml b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-feed.xml similarity index 100% rename from src/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-feed.xml rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertBund.Tests/Fixtures/certbund-feed.xml diff --git a/src/StellaOps.Concelier.Connector.CertBund.Tests/StellaOps.Concelier.Connector.CertBund.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertBund.Tests/StellaOps.Concelier.Connector.CertBund.Tests.csproj similarity index 64% rename from src/StellaOps.Concelier.Connector.CertBund.Tests/StellaOps.Concelier.Connector.CertBund.Tests.csproj rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertBund.Tests/StellaOps.Concelier.Connector.CertBund.Tests.csproj index df6e25a9..979ebc9e 100644 --- a/src/StellaOps.Concelier.Connector.CertBund.Tests/StellaOps.Concelier.Connector.CertBund.Tests.csproj +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertBund.Tests/StellaOps.Concelier.Connector.CertBund.Tests.csproj @@ -1,22 +1,23 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.CertBund/StellaOps.Concelier.Connector.CertBund.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> - <ItemGroup> - <None Update="Fixtures\*.json"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - <None Update="Fixtures\*.xml"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.CertBund/StellaOps.Concelier.Connector.CertBund.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> + <ItemGroup> + <None Update="Fixtures\*.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + <None Update="Fixtures\*.xml"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorFetchTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorFetchTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorFetchTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorFetchTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorSnapshotTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorSnapshotTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorSnapshotTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorSnapshotTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/CertCc/CertCcConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-advisories.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-advisories.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-advisories.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-advisories.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-documents.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-documents.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-documents.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-documents.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-requests.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-requests.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-requests.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-requests.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-state.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-state.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-state.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/certcc-state.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-09.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-09.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-09.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-09.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-10.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-10.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-10.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-10.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-11.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-11.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-11.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025-11.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/summary-2025.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendor-statuses-294418.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendor-statuses-294418.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendor-statuses-294418.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendor-statuses-294418.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendors-294418.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendors-294418.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendors-294418.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vendors-294418.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-257161.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-257161.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-257161.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-257161.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vendors.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vendors.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vendors.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vendors.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vuls.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vuls.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vuls.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418-vuls.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vu-294418.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vulnerabilities-294418.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vulnerabilities-294418.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vulnerabilities-294418.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Fixtures/vulnerabilities-294418.json diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcMapperTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryParserTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryPlannerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryPlannerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryPlannerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcSummaryPlannerTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcVendorStatementParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcVendorStatementParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcVendorStatementParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/Internal/CertCcVendorStatementParserTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertCc.Tests/StellaOps.Concelier.Connector.CertCc.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/StellaOps.Concelier.Connector.CertCc.Tests.csproj similarity index 59% rename from src/StellaOps.Concelier.Connector.CertCc.Tests/StellaOps.Concelier.Connector.CertCc.Tests.csproj rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/StellaOps.Concelier.Connector.CertCc.Tests.csproj index 949e4b0e..bf3755d6 100644 --- a/src/StellaOps.Concelier.Connector.CertCc.Tests/StellaOps.Concelier.Connector.CertCc.Tests.csproj +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertCc.Tests/StellaOps.Concelier.Connector.CertCc.Tests.csproj @@ -1,19 +1,20 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.CertCc/StellaOps.Concelier.Connector.CertCc.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> - <ItemGroup> - <None Update="Fixtures\*.json"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.CertCc/StellaOps.Concelier.Connector.CertCc.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> + <ItemGroup> + <None Update="Fixtures\*.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/CertFrConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/CertFrConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/CertFrConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/CertFrConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-advisories.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-advisories.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-advisories.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-advisories.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-001.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-001.html similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-001.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-001.html diff --git a/src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-002.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-002.html similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-002.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-detail-AV-2024-002.html diff --git a/src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-feed.xml b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-feed.xml similarity index 100% rename from src/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-feed.xml rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/CertFr/Fixtures/certfr-feed.xml diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/StellaOps.Concelier.Connector.CertFr.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/StellaOps.Concelier.Connector.CertFr.Tests.csproj new file mode 100644 index 00000000..07a81a25 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertFr.Tests/StellaOps.Concelier.Connector.CertFr.Tests.csproj @@ -0,0 +1,17 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="CertFr/Fixtures/**" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/CertInConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/CertInConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/CertInConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/CertInConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/alerts-page1.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/alerts-page1.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/alerts-page1.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/alerts-page1.json diff --git a/src/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/detail-CIAD-2024-0005.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/detail-CIAD-2024-0005.html similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/detail-CIAD-2024-0005.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/detail-CIAD-2024-0005.html diff --git a/src/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/expected-advisory.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/expected-advisory.json similarity index 100% rename from src/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/expected-advisory.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/CertIn/Fixtures/expected-advisory.json diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/StellaOps.Concelier.Connector.CertIn.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/StellaOps.Concelier.Connector.CertIn.Tests.csproj new file mode 100644 index 00000000..d504bfe2 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.CertIn.Tests/StellaOps.Concelier.Connector.CertIn.Tests.csproj @@ -0,0 +1,17 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="CertIn/Fixtures/**" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Common/CannedHttpMessageHandlerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/CannedHttpMessageHandlerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common.Tests/Common/CannedHttpMessageHandlerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/CannedHttpMessageHandlerTests.cs diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Common/HtmlContentSanitizerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/HtmlContentSanitizerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common.Tests/Common/HtmlContentSanitizerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/HtmlContentSanitizerTests.cs diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Common/PackageCoordinateHelperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/PackageCoordinateHelperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common.Tests/Common/PackageCoordinateHelperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/PackageCoordinateHelperTests.cs diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Common/PdfTextExtractorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/PdfTextExtractorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common.Tests/Common/PdfTextExtractorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/PdfTextExtractorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceGuardTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceGuardTests.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceGuardTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceGuardTests.cs index 08af6fde..7ef14a92 100644 --- a/src/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceGuardTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceGuardTests.cs @@ -1,254 +1,254 @@ -using System.Net; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Security.Cryptography; -using System.Text; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using Mongo2Go; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Aoc; -using StellaOps.Concelier.Connector.Common.Fetch; -using StellaOps.Concelier.Connector.Common.Http; -using StellaOps.Concelier.Core.Aoc; -using StellaOps.Concelier.Core.Linksets; -using StellaOps.Concelier.RawModels; -using StellaOps.Concelier.Storage.Mongo; -using StellaOps.Concelier.Storage.Mongo.Documents; - -namespace StellaOps.Concelier.Connector.Common.Tests; - -public sealed class SourceFetchServiceGuardTests : IAsyncLifetime -{ - private readonly MongoDbRunner _runner; - private readonly IMongoDatabase _database; - private readonly RawDocumentStorage _rawStorage; - - public SourceFetchServiceGuardTests() - { - _runner = MongoDbRunner.Start(singleNodeReplSet: true); - var client = new MongoClient(_runner.ConnectionString); - _database = client.GetDatabase($"source-fetch-guard-{Guid.NewGuid():N}"); - _rawStorage = new RawDocumentStorage(_database); - } - - [Fact] - public async Task FetchAsync_ValidatesWithGuardBeforePersisting() - { - var responsePayload = "{\"id\":\"CVE-2025-1111\"}"; - var handler = new StaticHttpMessageHandler(() => CreateSuccessResponse(responsePayload)); - var client = new HttpClient(handler, disposeHandler: false); - var httpClientFactory = new StaticHttpClientFactory(client); - var documentStore = new RecordingDocumentStore(); - var guard = new RecordingAdvisoryRawWriteGuard(); - var jitter = new NoJitterSource(); - - var httpOptions = new TestOptionsMonitor<StellaOps.Concelier.Connector.Common.Http.SourceHttpClientOptions>(new StellaOps.Concelier.Connector.Common.Http.SourceHttpClientOptions()); - var storageOptions = Options.Create(new MongoStorageOptions - { - ConnectionString = _runner.ConnectionString, - DatabaseName = _database.DatabaseNamespace.DatabaseName, - }); - - var linksetMapper = new NoopAdvisoryLinksetMapper(); - - var service = new SourceFetchService( - httpClientFactory, - _rawStorage, - documentStore, - NullLogger<SourceFetchService>.Instance, - jitter, - guard, - linksetMapper, - TimeProvider.System, - httpOptions, - storageOptions); - - var request = new SourceFetchRequest("client", "vndr.msrc", new Uri("https://example.test/advisories/ADV-1234")) - { - Metadata = new Dictionary<string, string> - { - ["upstream.id"] = "ADV-1234", - ["content.format"] = "csaf", - ["msrc.lastModified"] = DateTimeOffset.UtcNow.AddDays(-1).ToString("O"), - } - }; - - var result = await service.FetchAsync(request, CancellationToken.None); - - Assert.True(result.IsSuccess); - Assert.NotNull(guard.LastDocument); - Assert.Equal("tenant-default", guard.LastDocument!.Tenant); - Assert.Equal("msrc", guard.LastDocument.Source.Vendor); - Assert.Equal("ADV-1234", guard.LastDocument.Upstream.UpstreamId); - var expectedHash = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(responsePayload))).ToLowerInvariant(); - Assert.Equal(expectedHash, guard.LastDocument.Upstream.ContentHash); - Assert.NotNull(documentStore.LastRecord); - Assert.True(documentStore.UpsertCount > 0); - Assert.Equal("msrc", documentStore.LastRecord!.Metadata!["source.vendor"]); - Assert.Equal("tenant-default", documentStore.LastRecord.Metadata!["tenant"]); - - // verify raw payload stored - var filesCollection = _database.GetCollection<BsonDocument>("documents.files"); - var count = await filesCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); - Assert.Equal(1, count); - } - - [Fact] - public async Task FetchAsync_WhenGuardThrows_DoesNotPersist() - { - var handler = new StaticHttpMessageHandler(() => CreateSuccessResponse("{\"id\":\"CVE-2025-2222\"}")); - var client = new HttpClient(handler, disposeHandler: false); - var httpClientFactory = new StaticHttpClientFactory(client); - var documentStore = new RecordingDocumentStore(); - var guard = new RecordingAdvisoryRawWriteGuard { ShouldThrow = true }; - var jitter = new NoJitterSource(); - - var httpOptions = new TestOptionsMonitor<StellaOps.Concelier.Connector.Common.Http.SourceHttpClientOptions>(new StellaOps.Concelier.Connector.Common.Http.SourceHttpClientOptions()); - var storageOptions = Options.Create(new MongoStorageOptions - { - ConnectionString = _runner.ConnectionString, - DatabaseName = _database.DatabaseNamespace.DatabaseName, - }); - - var linksetMapper = new NoopAdvisoryLinksetMapper(); - - var service = new SourceFetchService( - httpClientFactory, - _rawStorage, - documentStore, - NullLogger<SourceFetchService>.Instance, - jitter, - guard, - linksetMapper, - TimeProvider.System, - httpOptions, - storageOptions); - - var request = new SourceFetchRequest("client", "nvd", new Uri("https://example.test/data/XYZ")) - { - Metadata = new Dictionary<string, string> - { - ["vulnerability.id"] = "CVE-2025-2222", - } - }; - - await Assert.ThrowsAsync<ConcelierAocGuardException>(() => service.FetchAsync(request, CancellationToken.None)); - Assert.Equal(0, documentStore.UpsertCount); - - var filesCollection = _database.GetCollection<BsonDocument>("documents.files"); - var count = await filesCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); - Assert.Equal(0, count); - } - - public Task InitializeAsync() => Task.CompletedTask; - - public Task DisposeAsync() - { - _runner.Dispose(); - return Task.CompletedTask; - } - - private static HttpResponseMessage CreateSuccessResponse(string payload) - { - var message = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(payload, Encoding.UTF8, "application/json"), - }; - - message.Headers.ETag = new EntityTagHeaderValue("\"etag\""); - message.Content.Headers.LastModified = DateTimeOffset.UtcNow.AddHours(-1); - return message; - } - - private sealed class StaticHttpClientFactory : IHttpClientFactory - { - private readonly HttpClient _client; - - public StaticHttpClientFactory(HttpClient client) => _client = client; - - public HttpClient CreateClient(string name) => _client; - } - - private sealed class StaticHttpMessageHandler : HttpMessageHandler - { - private readonly Func<HttpResponseMessage> _responseFactory; - - public StaticHttpMessageHandler(Func<HttpResponseMessage> responseFactory) => _responseFactory = responseFactory; - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - => Task.FromResult(_responseFactory()); - } - - private sealed class RecordingDocumentStore : IDocumentStore - { - public DocumentRecord? LastRecord { get; private set; } - - public int UpsertCount { get; private set; } - - public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - UpsertCount++; - LastRecord = record; - return Task.FromResult(record); - } - - public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => Task.FromResult<DocumentRecord?>(null); - - public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => Task.FromResult<DocumentRecord?>(null); - - public Task<bool> UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => Task.FromResult(false); - } - - private sealed class RecordingAdvisoryRawWriteGuard : IAdvisoryRawWriteGuard - { - public AdvisoryRawDocument? LastDocument { get; private set; } - - public bool ShouldThrow { get; set; } - - public void EnsureValid(AdvisoryRawDocument document) - { - LastDocument = document; - if (ShouldThrow) - { - var violation = AocViolation.Create(AocViolationCode.InvalidTenant, "/tenant", "test"); - throw new ConcelierAocGuardException(AocGuardResult.FromViolations(new[] { violation })); - } - } - } - - private sealed class NoJitterSource : IJitterSource - { - public TimeSpan Next(TimeSpan minInclusive, TimeSpan maxInclusive) => minInclusive; - } - - private sealed class TestOptionsMonitor<T> : IOptionsMonitor<T> - where T : class, new() - { - private readonly T _options; - - public TestOptionsMonitor(T options) => _options = options; - - public T CurrentValue => _options; - - public T Get(string? name) => _options; - - public IDisposable OnChange(Action<T, string> listener) => NullDisposable.Instance; - - private sealed class NullDisposable : IDisposable - { - public static NullDisposable Instance { get; } = new(); - - public void Dispose() { } - } - } - - private sealed class NoopAdvisoryLinksetMapper : IAdvisoryLinksetMapper - { - public RawLinkset Map(AdvisoryRawDocument document) => new(); - } -} +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Aoc; +using StellaOps.Concelier.Connector.Common.Fetch; +using StellaOps.Concelier.Connector.Common.Http; +using StellaOps.Concelier.Core.Aoc; +using StellaOps.Concelier.Core.Linksets; +using StellaOps.Concelier.RawModels; +using StellaOps.Concelier.Storage.Mongo; +using StellaOps.Concelier.Storage.Mongo.Documents; + +namespace StellaOps.Concelier.Connector.Common.Tests; + +public sealed class SourceFetchServiceGuardTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner; + private readonly IMongoDatabase _database; + private readonly RawDocumentStorage _rawStorage; + + public SourceFetchServiceGuardTests() + { + _runner = MongoDbRunner.Start(singleNodeReplSet: true); + var client = new MongoClient(_runner.ConnectionString); + _database = client.GetDatabase($"source-fetch-guard-{Guid.NewGuid():N}"); + _rawStorage = new RawDocumentStorage(_database); + } + + [Fact] + public async Task FetchAsync_ValidatesWithGuardBeforePersisting() + { + var responsePayload = "{\"id\":\"CVE-2025-1111\"}"; + var handler = new StaticHttpMessageHandler(() => CreateSuccessResponse(responsePayload)); + var client = new HttpClient(handler, disposeHandler: false); + var httpClientFactory = new StaticHttpClientFactory(client); + var documentStore = new RecordingDocumentStore(); + var guard = new RecordingAdvisoryRawWriteGuard(); + var jitter = new NoJitterSource(); + + var httpOptions = new TestOptionsMonitor<StellaOps.Concelier.Connector.Common.Http.SourceHttpClientOptions>(new StellaOps.Concelier.Connector.Common.Http.SourceHttpClientOptions()); + var storageOptions = Options.Create(new MongoStorageOptions + { + ConnectionString = _runner.ConnectionString, + DatabaseName = _database.DatabaseNamespace.DatabaseName, + }); + + var linksetMapper = new NoopAdvisoryLinksetMapper(); + + var service = new SourceFetchService( + httpClientFactory, + _rawStorage, + documentStore, + NullLogger<SourceFetchService>.Instance, + jitter, + guard, + linksetMapper, + TimeProvider.System, + httpOptions, + storageOptions); + + var request = new SourceFetchRequest("client", "vndr.msrc", new Uri("https://example.test/advisories/ADV-1234")) + { + Metadata = new Dictionary<string, string> + { + ["upstream.id"] = "ADV-1234", + ["content.format"] = "csaf", + ["msrc.lastModified"] = DateTimeOffset.UtcNow.AddDays(-1).ToString("O"), + } + }; + + var result = await service.FetchAsync(request, CancellationToken.None); + + Assert.True(result.IsSuccess); + Assert.NotNull(guard.LastDocument); + Assert.Equal("tenant-default", guard.LastDocument!.Tenant); + Assert.Equal("msrc", guard.LastDocument.Source.Vendor); + Assert.Equal("ADV-1234", guard.LastDocument.Upstream.UpstreamId); + var expectedHash = Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes(responsePayload))).ToLowerInvariant(); + Assert.Equal(expectedHash, guard.LastDocument.Upstream.ContentHash); + Assert.NotNull(documentStore.LastRecord); + Assert.True(documentStore.UpsertCount > 0); + Assert.Equal("msrc", documentStore.LastRecord!.Metadata!["source.vendor"]); + Assert.Equal("tenant-default", documentStore.LastRecord.Metadata!["tenant"]); + + // verify raw payload stored + var filesCollection = _database.GetCollection<BsonDocument>("documents.files"); + var count = await filesCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); + Assert.Equal(1, count); + } + + [Fact] + public async Task FetchAsync_WhenGuardThrows_DoesNotPersist() + { + var handler = new StaticHttpMessageHandler(() => CreateSuccessResponse("{\"id\":\"CVE-2025-2222\"}")); + var client = new HttpClient(handler, disposeHandler: false); + var httpClientFactory = new StaticHttpClientFactory(client); + var documentStore = new RecordingDocumentStore(); + var guard = new RecordingAdvisoryRawWriteGuard { ShouldThrow = true }; + var jitter = new NoJitterSource(); + + var httpOptions = new TestOptionsMonitor<StellaOps.Concelier.Connector.Common.Http.SourceHttpClientOptions>(new StellaOps.Concelier.Connector.Common.Http.SourceHttpClientOptions()); + var storageOptions = Options.Create(new MongoStorageOptions + { + ConnectionString = _runner.ConnectionString, + DatabaseName = _database.DatabaseNamespace.DatabaseName, + }); + + var linksetMapper = new NoopAdvisoryLinksetMapper(); + + var service = new SourceFetchService( + httpClientFactory, + _rawStorage, + documentStore, + NullLogger<SourceFetchService>.Instance, + jitter, + guard, + linksetMapper, + TimeProvider.System, + httpOptions, + storageOptions); + + var request = new SourceFetchRequest("client", "nvd", new Uri("https://example.test/data/XYZ")) + { + Metadata = new Dictionary<string, string> + { + ["vulnerability.id"] = "CVE-2025-2222", + } + }; + + await Assert.ThrowsAsync<ConcelierAocGuardException>(() => service.FetchAsync(request, CancellationToken.None)); + Assert.Equal(0, documentStore.UpsertCount); + + var filesCollection = _database.GetCollection<BsonDocument>("documents.files"); + var count = await filesCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); + Assert.Equal(0, count); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + private static HttpResponseMessage CreateSuccessResponse(string payload) + { + var message = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(payload, Encoding.UTF8, "application/json"), + }; + + message.Headers.ETag = new EntityTagHeaderValue("\"etag\""); + message.Content.Headers.LastModified = DateTimeOffset.UtcNow.AddHours(-1); + return message; + } + + private sealed class StaticHttpClientFactory : IHttpClientFactory + { + private readonly HttpClient _client; + + public StaticHttpClientFactory(HttpClient client) => _client = client; + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class StaticHttpMessageHandler : HttpMessageHandler + { + private readonly Func<HttpResponseMessage> _responseFactory; + + public StaticHttpMessageHandler(Func<HttpResponseMessage> responseFactory) => _responseFactory = responseFactory; + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + => Task.FromResult(_responseFactory()); + } + + private sealed class RecordingDocumentStore : IDocumentStore + { + public DocumentRecord? LastRecord { get; private set; } + + public int UpsertCount { get; private set; } + + public Task<DocumentRecord> UpsertAsync(DocumentRecord record, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + UpsertCount++; + LastRecord = record; + return Task.FromResult(record); + } + + public Task<DocumentRecord?> FindBySourceAndUriAsync(string sourceName, string uri, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => Task.FromResult<DocumentRecord?>(null); + + public Task<DocumentRecord?> FindAsync(Guid id, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => Task.FromResult<DocumentRecord?>(null); + + public Task<bool> UpdateStatusAsync(Guid id, string status, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => Task.FromResult(false); + } + + private sealed class RecordingAdvisoryRawWriteGuard : IAdvisoryRawWriteGuard + { + public AdvisoryRawDocument? LastDocument { get; private set; } + + public bool ShouldThrow { get; set; } + + public void EnsureValid(AdvisoryRawDocument document) + { + LastDocument = document; + if (ShouldThrow) + { + var violation = AocViolation.Create(AocViolationCode.InvalidTenant, "/tenant", "test"); + throw new ConcelierAocGuardException(AocGuardResult.FromViolations(new[] { violation })); + } + } + } + + private sealed class NoJitterSource : IJitterSource + { + public TimeSpan Next(TimeSpan minInclusive, TimeSpan maxInclusive) => minInclusive; + } + + private sealed class TestOptionsMonitor<T> : IOptionsMonitor<T> + where T : class, new() + { + private readonly T _options; + + public TestOptionsMonitor(T options) => _options = options; + + public T CurrentValue => _options; + + public T Get(string? name) => _options; + + public IDisposable OnChange(Action<T, string> listener) => NullDisposable.Instance; + + private sealed class NullDisposable : IDisposable + { + public static NullDisposable Instance { get; } = new(); + + public void Dispose() { } + } + } + + private sealed class NoopAdvisoryLinksetMapper : IAdvisoryLinksetMapper + { + public RawLinkset Map(AdvisoryRawDocument document) => new(); + } +} diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceFetchServiceTests.cs diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Common/SourceHttpClientBuilderTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceHttpClientBuilderTests.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.Common.Tests/Common/SourceHttpClientBuilderTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceHttpClientBuilderTests.cs index d2de6efc..f1c47ce9 100644 --- a/src/StellaOps.Concelier.Connector.Common.Tests/Common/SourceHttpClientBuilderTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/SourceHttpClientBuilderTests.cs @@ -1,327 +1,327 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Net.Security; -using System.Security.Cryptography; -using System.Security.Cryptography.X509Certificates; -using System.Text; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Options; -using StellaOps.Concelier.Connector.Common.Http; - -namespace StellaOps.Concelier.Connector.Common.Tests; - -public sealed class SourceHttpClientBuilderTests -{ - [Fact] - public void AddSourceHttpClient_ConfiguresVersionAndHandler() - { - var services = new ServiceCollection(); - services.AddLogging(); - services.AddSingleton<IConfiguration>(new ConfigurationBuilder().Build()); - - bool configureInvoked = false; - bool? observedEnableMultiple = null; - SocketsHttpHandler? capturedHandler = null; - - services.AddSourceHttpClient("source.test", (_, options) => - { - options.AllowedHosts.Add("example.test"); - options.RequestVersion = HttpVersion.Version20; - options.VersionPolicy = HttpVersionPolicy.RequestVersionOrLower; - options.EnableMultipleHttp2Connections = false; - options.ConfigureHandler = handler => - { - capturedHandler = handler; - observedEnableMultiple = handler.EnableMultipleHttp2Connections; - configureInvoked = true; - }; - }); - - using var provider = services.BuildServiceProvider(); - var factory = provider.GetRequiredService<IHttpClientFactory>(); - - var client = factory.CreateClient("source.test"); - - Assert.Equal(HttpVersion.Version20, client.DefaultRequestVersion); - Assert.Equal(HttpVersionPolicy.RequestVersionOrLower, client.DefaultVersionPolicy); - Assert.True(configureInvoked); - Assert.False(observedEnableMultiple); - Assert.NotNull(capturedHandler); - } - - [Fact] - public void AddSourceHttpClient_LoadsProxyConfiguration() - { - var services = new ServiceCollection(); - services.AddLogging(); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary<string, string?> - { - [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyAddressKey}"] = "http://proxy.local:8080", - [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyBypassOnLocalKey}"] = "false", - [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyBypassListKey}:0"] = "localhost", - [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyBypassListKey}:1"] = "127.0.0.1", - [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyUseDefaultCredentialsKey}"] = "false", - [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyUsernameKey}"] = "svc-concelier", - [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyPasswordKey}"] = "s3cr3t!", - }) - .Build(); - - services.AddSingleton<IConfiguration>(configuration); - - services.AddSourceHttpClient("source.icscisa", (_, options) => - { - options.AllowedHosts.Add("content.govdelivery.com"); - options.ProxyAddress = new Uri("http://configure.local:9000"); - }); - - using var provider = services.BuildServiceProvider(); - _ = provider.GetRequiredService<IHttpClientFactory>().CreateClient("source.icscisa"); - - var resolvedConfiguration = provider.GetRequiredService<IConfiguration>(); - var proxySection = resolvedConfiguration - .GetSection("concelier") - .GetSection("httpClients") - .GetSection("source.icscisa") - .GetSection("proxy"); - Assert.True(proxySection.Exists()); - Assert.Equal("http://proxy.local:8080", proxySection[ProxyAddressKey]); - - var configuredOptions = provider.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get("source.icscisa"); - Assert.NotNull(configuredOptions.ProxyAddress); - Assert.Equal(new Uri("http://proxy.local:8080"), configuredOptions.ProxyAddress); - Assert.False(configuredOptions.ProxyBypassOnLocal); - Assert.Contains("localhost", configuredOptions.ProxyBypassList, StringComparer.OrdinalIgnoreCase); - Assert.Contains("127.0.0.1", configuredOptions.ProxyBypassList); - Assert.False(configuredOptions.ProxyUseDefaultCredentials); - Assert.Equal("svc-concelier", configuredOptions.ProxyUsername); - Assert.Equal("s3cr3t!", configuredOptions.ProxyPassword); - } - - [Fact] - public void AddSourceHttpClient_UsesConfigurationToBypassValidation() - { - var services = new ServiceCollection(); - services.AddLogging(); - using var trustedRoot = CreateSelfSignedCertificate(); - var pemPath = Path.Combine(Path.GetTempPath(), $"stellaops-trust-{Guid.NewGuid():N}.pem"); - WriteCertificatePem(trustedRoot, pemPath); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary<string, string?> - { - [$"concelier:httpClients:source.acsc:{AllowInvalidKey}"] = "true", - [$"concelier:httpClients:source.acsc:{TrustedRootPathsKey}:0"] = pemPath, - }) - .Build(); - - services.AddSingleton<IConfiguration>(configuration); - - bool configureInvoked = false; - SocketsHttpHandler? capturedHandler = null; - - services.AddSourceHttpClient("source.acsc", (_, options) => - { - options.AllowedHosts.Add("example.test"); - options.ConfigureHandler = handler => - { - capturedHandler = handler; - configureInvoked = true; - }; - }); - - using var provider = services.BuildServiceProvider(); - var factory = provider.GetRequiredService<IHttpClientFactory>(); - - var client = factory.CreateClient("source.acsc"); - var optionsMonitor = provider.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>(); - var configuredOptions = optionsMonitor.Get("source.acsc"); - - Assert.True(configureInvoked); - Assert.NotNull(capturedHandler); - Assert.True(configuredOptions.AllowInvalidServerCertificates); - Assert.NotNull(capturedHandler!.SslOptions.RemoteCertificateValidationCallback); - - var callback = capturedHandler.SslOptions.RemoteCertificateValidationCallback!; -#pragma warning disable SYSLIB0057 - using var serverCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); -#pragma warning restore SYSLIB0057 - var result = callback(new object(), serverCertificate, null, SslPolicyErrors.RemoteCertificateChainErrors); - Assert.True(result); - - File.Delete(pemPath); - } - - [Fact] - public void AddSourceHttpClient_LoadsTrustedRootsFromOfflineRoot() - { - var services = new ServiceCollection(); - services.AddLogging(); - - using var trustedRoot = CreateSelfSignedCertificate(); - var offlineRoot = Directory.CreateDirectory(Path.Combine(Path.GetTempPath(), $"stellaops-offline-{Guid.NewGuid():N}")); - var relativePath = Path.Combine("trust", "root.pem"); - var certificatePath = Path.Combine(offlineRoot.FullName, relativePath); - Directory.CreateDirectory(Path.GetDirectoryName(certificatePath)!); - WriteCertificatePem(trustedRoot, certificatePath); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary<string, string?> - { - [$"concelier:{OfflineRootKey}"] = offlineRoot.FullName, - [$"concelier:httpClients:source.nkcki:{TrustedRootPathsKey}:0"] = relativePath, - }) - .Build(); - - services.AddSingleton<IConfiguration>(configuration); - - SocketsHttpHandler? capturedHandler = null; - services.AddSourceHttpClient("source.nkcki", (_, options) => - { - options.AllowedHosts.Add("example.test"); - options.ConfigureHandler = handler => capturedHandler = handler; - }); - - using var provider = services.BuildServiceProvider(); - var factory = provider.GetRequiredService<IHttpClientFactory>(); - _ = factory.CreateClient("source.nkcki"); - - var monitor = provider.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>(); - var configuredOptions = monitor.Get("source.nkcki"); - - Assert.False(configuredOptions.AllowInvalidServerCertificates); - Assert.NotEmpty(configuredOptions.TrustedRootCertificates); - - using (var manualChain = new X509Chain()) - { - manualChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; - manualChain.ChainPolicy.CustomTrustStore.AddRange(configuredOptions.TrustedRootCertificates.ToArray()); - manualChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; - manualChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag; -#pragma warning disable SYSLIB0057 - using var manualServerCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); -#pragma warning restore SYSLIB0057 - Assert.True(manualChain.Build(manualServerCertificate)); - } - Assert.All(configuredOptions.TrustedRootCertificates, certificate => Assert.NotEqual(IntPtr.Zero, certificate.Handle)); - - Assert.NotNull(capturedHandler); - var callback = capturedHandler!.SslOptions.RemoteCertificateValidationCallback; - Assert.NotNull(callback); -#pragma warning disable SYSLIB0057 - using var serverCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); -#pragma warning restore SYSLIB0057 - using var chain = new X509Chain(); - chain.ChainPolicy.CustomTrustStore.Add(serverCertificate); - chain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; - chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; - _ = chain.Build(serverCertificate); - var validationResult = callback!(new object(), serverCertificate, chain, SslPolicyErrors.RemoteCertificateChainErrors); - Assert.True(validationResult); - - Directory.Delete(offlineRoot.FullName, recursive: true); - } - - [Fact] - public void AddSourceHttpClient_LoadsConfigurationFromSourceHttpSection() - { - var services = new ServiceCollection(); - services.AddLogging(); - - using var trustedRoot = CreateSelfSignedCertificate(); - var offlineRoot = Directory.CreateDirectory(Path.Combine(Path.GetTempPath(), $"stellaops-offline-{Guid.NewGuid():N}")); - var relativePath = Path.Combine("certs", "root.pem"); - var certificatePath = Path.Combine(offlineRoot.FullName, relativePath); - Directory.CreateDirectory(Path.GetDirectoryName(certificatePath)!); - WriteCertificatePem(trustedRoot, certificatePath); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary<string, string?> - { - [$"concelier:{OfflineRootKey}"] = offlineRoot.FullName, - [$"concelier:sources:nkcki:http:{TrustedRootPathsKey}:0"] = relativePath, - }) - .Build(); - - services.AddSingleton<IConfiguration>(configuration); - - SocketsHttpHandler? capturedHandler = null; - services.AddSourceHttpClient("source.nkcki", (_, options) => - { - options.AllowedHosts.Add("example.test"); - options.ConfigureHandler = handler => capturedHandler = handler; - }); - - using var provider = services.BuildServiceProvider(); - _ = provider.GetRequiredService<IHttpClientFactory>().CreateClient("source.nkcki"); - - var configuredOptions = provider.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get("source.nkcki"); - Assert.False(configuredOptions.AllowInvalidServerCertificates); - Assert.NotEmpty(configuredOptions.TrustedRootCertificates); - - using (var manualChain = new X509Chain()) - { - manualChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; - manualChain.ChainPolicy.CustomTrustStore.AddRange(configuredOptions.TrustedRootCertificates.ToArray()); - manualChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; - manualChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag; -#pragma warning disable SYSLIB0057 - using var manualServerCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); -#pragma warning restore SYSLIB0057 - Assert.True(manualChain.Build(manualServerCertificate)); - } - Assert.All(configuredOptions.TrustedRootCertificates, certificate => Assert.NotEqual(IntPtr.Zero, certificate.Handle)); - - Assert.NotNull(capturedHandler); - var callback = capturedHandler!.SslOptions.RemoteCertificateValidationCallback; - Assert.NotNull(callback); -#pragma warning disable SYSLIB0057 - using var serverCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); -#pragma warning restore SYSLIB0057 - using var chain = new X509Chain(); - chain.ChainPolicy.CustomTrustStore.Add(serverCertificate); - chain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; - chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; - _ = chain.Build(serverCertificate); - var validationResult = callback!(new object(), serverCertificate, chain, SslPolicyErrors.RemoteCertificateChainErrors); - Assert.True(validationResult); - - Directory.Delete(offlineRoot.FullName, recursive: true); - } - - private static X509Certificate2 CreateSelfSignedCertificate() - { - using var rsa = RSA.Create(2048); - var request = new CertificateRequest("CN=StellaOps Test Root", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); - request.CertificateExtensions.Add(new X509BasicConstraintsExtension(true, false, 0, true)); - request.CertificateExtensions.Add(new X509KeyUsageExtension(X509KeyUsageFlags.KeyCertSign | X509KeyUsageFlags.CrlSign, true)); - request.CertificateExtensions.Add(new X509SubjectKeyIdentifierExtension(request.PublicKey, false)); - - return request.CreateSelfSigned(DateTimeOffset.UtcNow.AddDays(-1), DateTimeOffset.UtcNow.AddYears(5)); - } - - private static void WriteCertificatePem(X509Certificate2 certificate, string path) - { - var builder = new StringBuilder(); - builder.AppendLine("-----BEGIN CERTIFICATE-----"); - builder.AppendLine(Convert.ToBase64String(certificate.Export(X509ContentType.Cert), Base64FormattingOptions.InsertLineBreaks)); - builder.AppendLine("-----END CERTIFICATE-----"); - File.WriteAllText(path, builder.ToString(), Encoding.ASCII); - } - - private const string AllowInvalidKey = "allowInvalidCertificates"; - private const string TrustedRootPathsKey = "trustedRootPaths"; - private const string OfflineRootKey = "offlineRoot"; - private const string ProxySection = "proxy"; - private const string ProxyAddressKey = "address"; - private const string ProxyBypassOnLocalKey = "bypassOnLocal"; - private const string ProxyBypassListKey = "bypassList"; - private const string ProxyUseDefaultCredentialsKey = "useDefaultCredentials"; - private const string ProxyUsernameKey = "username"; - private const string ProxyPasswordKey = "password"; -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Net.Security; +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; +using System.Text; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Options; +using StellaOps.Concelier.Connector.Common.Http; + +namespace StellaOps.Concelier.Connector.Common.Tests; + +public sealed class SourceHttpClientBuilderTests +{ + [Fact] + public void AddSourceHttpClient_ConfiguresVersionAndHandler() + { + var services = new ServiceCollection(); + services.AddLogging(); + services.AddSingleton<IConfiguration>(new ConfigurationBuilder().Build()); + + bool configureInvoked = false; + bool? observedEnableMultiple = null; + SocketsHttpHandler? capturedHandler = null; + + services.AddSourceHttpClient("source.test", (_, options) => + { + options.AllowedHosts.Add("example.test"); + options.RequestVersion = HttpVersion.Version20; + options.VersionPolicy = HttpVersionPolicy.RequestVersionOrLower; + options.EnableMultipleHttp2Connections = false; + options.ConfigureHandler = handler => + { + capturedHandler = handler; + observedEnableMultiple = handler.EnableMultipleHttp2Connections; + configureInvoked = true; + }; + }); + + using var provider = services.BuildServiceProvider(); + var factory = provider.GetRequiredService<IHttpClientFactory>(); + + var client = factory.CreateClient("source.test"); + + Assert.Equal(HttpVersion.Version20, client.DefaultRequestVersion); + Assert.Equal(HttpVersionPolicy.RequestVersionOrLower, client.DefaultVersionPolicy); + Assert.True(configureInvoked); + Assert.False(observedEnableMultiple); + Assert.NotNull(capturedHandler); + } + + [Fact] + public void AddSourceHttpClient_LoadsProxyConfiguration() + { + var services = new ServiceCollection(); + services.AddLogging(); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary<string, string?> + { + [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyAddressKey}"] = "http://proxy.local:8080", + [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyBypassOnLocalKey}"] = "false", + [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyBypassListKey}:0"] = "localhost", + [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyBypassListKey}:1"] = "127.0.0.1", + [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyUseDefaultCredentialsKey}"] = "false", + [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyUsernameKey}"] = "svc-concelier", + [$"concelier:httpClients:source.icscisa:{ProxySection}:{ProxyPasswordKey}"] = "s3cr3t!", + }) + .Build(); + + services.AddSingleton<IConfiguration>(configuration); + + services.AddSourceHttpClient("source.icscisa", (_, options) => + { + options.AllowedHosts.Add("content.govdelivery.com"); + options.ProxyAddress = new Uri("http://configure.local:9000"); + }); + + using var provider = services.BuildServiceProvider(); + _ = provider.GetRequiredService<IHttpClientFactory>().CreateClient("source.icscisa"); + + var resolvedConfiguration = provider.GetRequiredService<IConfiguration>(); + var proxySection = resolvedConfiguration + .GetSection("concelier") + .GetSection("httpClients") + .GetSection("source.icscisa") + .GetSection("proxy"); + Assert.True(proxySection.Exists()); + Assert.Equal("http://proxy.local:8080", proxySection[ProxyAddressKey]); + + var configuredOptions = provider.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get("source.icscisa"); + Assert.NotNull(configuredOptions.ProxyAddress); + Assert.Equal(new Uri("http://proxy.local:8080"), configuredOptions.ProxyAddress); + Assert.False(configuredOptions.ProxyBypassOnLocal); + Assert.Contains("localhost", configuredOptions.ProxyBypassList, StringComparer.OrdinalIgnoreCase); + Assert.Contains("127.0.0.1", configuredOptions.ProxyBypassList); + Assert.False(configuredOptions.ProxyUseDefaultCredentials); + Assert.Equal("svc-concelier", configuredOptions.ProxyUsername); + Assert.Equal("s3cr3t!", configuredOptions.ProxyPassword); + } + + [Fact] + public void AddSourceHttpClient_UsesConfigurationToBypassValidation() + { + var services = new ServiceCollection(); + services.AddLogging(); + using var trustedRoot = CreateSelfSignedCertificate(); + var pemPath = Path.Combine(Path.GetTempPath(), $"stellaops-trust-{Guid.NewGuid():N}.pem"); + WriteCertificatePem(trustedRoot, pemPath); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary<string, string?> + { + [$"concelier:httpClients:source.acsc:{AllowInvalidKey}"] = "true", + [$"concelier:httpClients:source.acsc:{TrustedRootPathsKey}:0"] = pemPath, + }) + .Build(); + + services.AddSingleton<IConfiguration>(configuration); + + bool configureInvoked = false; + SocketsHttpHandler? capturedHandler = null; + + services.AddSourceHttpClient("source.acsc", (_, options) => + { + options.AllowedHosts.Add("example.test"); + options.ConfigureHandler = handler => + { + capturedHandler = handler; + configureInvoked = true; + }; + }); + + using var provider = services.BuildServiceProvider(); + var factory = provider.GetRequiredService<IHttpClientFactory>(); + + var client = factory.CreateClient("source.acsc"); + var optionsMonitor = provider.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>(); + var configuredOptions = optionsMonitor.Get("source.acsc"); + + Assert.True(configureInvoked); + Assert.NotNull(capturedHandler); + Assert.True(configuredOptions.AllowInvalidServerCertificates); + Assert.NotNull(capturedHandler!.SslOptions.RemoteCertificateValidationCallback); + + var callback = capturedHandler.SslOptions.RemoteCertificateValidationCallback!; +#pragma warning disable SYSLIB0057 + using var serverCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); +#pragma warning restore SYSLIB0057 + var result = callback(new object(), serverCertificate, null, SslPolicyErrors.RemoteCertificateChainErrors); + Assert.True(result); + + File.Delete(pemPath); + } + + [Fact] + public void AddSourceHttpClient_LoadsTrustedRootsFromOfflineRoot() + { + var services = new ServiceCollection(); + services.AddLogging(); + + using var trustedRoot = CreateSelfSignedCertificate(); + var offlineRoot = Directory.CreateDirectory(Path.Combine(Path.GetTempPath(), $"stellaops-offline-{Guid.NewGuid():N}")); + var relativePath = Path.Combine("trust", "root.pem"); + var certificatePath = Path.Combine(offlineRoot.FullName, relativePath); + Directory.CreateDirectory(Path.GetDirectoryName(certificatePath)!); + WriteCertificatePem(trustedRoot, certificatePath); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary<string, string?> + { + [$"concelier:{OfflineRootKey}"] = offlineRoot.FullName, + [$"concelier:httpClients:source.nkcki:{TrustedRootPathsKey}:0"] = relativePath, + }) + .Build(); + + services.AddSingleton<IConfiguration>(configuration); + + SocketsHttpHandler? capturedHandler = null; + services.AddSourceHttpClient("source.nkcki", (_, options) => + { + options.AllowedHosts.Add("example.test"); + options.ConfigureHandler = handler => capturedHandler = handler; + }); + + using var provider = services.BuildServiceProvider(); + var factory = provider.GetRequiredService<IHttpClientFactory>(); + _ = factory.CreateClient("source.nkcki"); + + var monitor = provider.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>(); + var configuredOptions = monitor.Get("source.nkcki"); + + Assert.False(configuredOptions.AllowInvalidServerCertificates); + Assert.NotEmpty(configuredOptions.TrustedRootCertificates); + + using (var manualChain = new X509Chain()) + { + manualChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; + manualChain.ChainPolicy.CustomTrustStore.AddRange(configuredOptions.TrustedRootCertificates.ToArray()); + manualChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; + manualChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag; +#pragma warning disable SYSLIB0057 + using var manualServerCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); +#pragma warning restore SYSLIB0057 + Assert.True(manualChain.Build(manualServerCertificate)); + } + Assert.All(configuredOptions.TrustedRootCertificates, certificate => Assert.NotEqual(IntPtr.Zero, certificate.Handle)); + + Assert.NotNull(capturedHandler); + var callback = capturedHandler!.SslOptions.RemoteCertificateValidationCallback; + Assert.NotNull(callback); +#pragma warning disable SYSLIB0057 + using var serverCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); +#pragma warning restore SYSLIB0057 + using var chain = new X509Chain(); + chain.ChainPolicy.CustomTrustStore.Add(serverCertificate); + chain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; + chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; + _ = chain.Build(serverCertificate); + var validationResult = callback!(new object(), serverCertificate, chain, SslPolicyErrors.RemoteCertificateChainErrors); + Assert.True(validationResult); + + Directory.Delete(offlineRoot.FullName, recursive: true); + } + + [Fact] + public void AddSourceHttpClient_LoadsConfigurationFromSourceHttpSection() + { + var services = new ServiceCollection(); + services.AddLogging(); + + using var trustedRoot = CreateSelfSignedCertificate(); + var offlineRoot = Directory.CreateDirectory(Path.Combine(Path.GetTempPath(), $"stellaops-offline-{Guid.NewGuid():N}")); + var relativePath = Path.Combine("certs", "root.pem"); + var certificatePath = Path.Combine(offlineRoot.FullName, relativePath); + Directory.CreateDirectory(Path.GetDirectoryName(certificatePath)!); + WriteCertificatePem(trustedRoot, certificatePath); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary<string, string?> + { + [$"concelier:{OfflineRootKey}"] = offlineRoot.FullName, + [$"concelier:sources:nkcki:http:{TrustedRootPathsKey}:0"] = relativePath, + }) + .Build(); + + services.AddSingleton<IConfiguration>(configuration); + + SocketsHttpHandler? capturedHandler = null; + services.AddSourceHttpClient("source.nkcki", (_, options) => + { + options.AllowedHosts.Add("example.test"); + options.ConfigureHandler = handler => capturedHandler = handler; + }); + + using var provider = services.BuildServiceProvider(); + _ = provider.GetRequiredService<IHttpClientFactory>().CreateClient("source.nkcki"); + + var configuredOptions = provider.GetRequiredService<IOptionsMonitor<SourceHttpClientOptions>>().Get("source.nkcki"); + Assert.False(configuredOptions.AllowInvalidServerCertificates); + Assert.NotEmpty(configuredOptions.TrustedRootCertificates); + + using (var manualChain = new X509Chain()) + { + manualChain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; + manualChain.ChainPolicy.CustomTrustStore.AddRange(configuredOptions.TrustedRootCertificates.ToArray()); + manualChain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; + manualChain.ChainPolicy.VerificationFlags = X509VerificationFlags.NoFlag; +#pragma warning disable SYSLIB0057 + using var manualServerCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); +#pragma warning restore SYSLIB0057 + Assert.True(manualChain.Build(manualServerCertificate)); + } + Assert.All(configuredOptions.TrustedRootCertificates, certificate => Assert.NotEqual(IntPtr.Zero, certificate.Handle)); + + Assert.NotNull(capturedHandler); + var callback = capturedHandler!.SslOptions.RemoteCertificateValidationCallback; + Assert.NotNull(callback); +#pragma warning disable SYSLIB0057 + using var serverCertificate = new X509Certificate2(trustedRoot.Export(X509ContentType.Cert)); +#pragma warning restore SYSLIB0057 + using var chain = new X509Chain(); + chain.ChainPolicy.CustomTrustStore.Add(serverCertificate); + chain.ChainPolicy.TrustMode = X509ChainTrustMode.CustomRootTrust; + chain.ChainPolicy.RevocationMode = X509RevocationMode.NoCheck; + _ = chain.Build(serverCertificate); + var validationResult = callback!(new object(), serverCertificate, chain, SslPolicyErrors.RemoteCertificateChainErrors); + Assert.True(validationResult); + + Directory.Delete(offlineRoot.FullName, recursive: true); + } + + private static X509Certificate2 CreateSelfSignedCertificate() + { + using var rsa = RSA.Create(2048); + var request = new CertificateRequest("CN=StellaOps Test Root", rsa, HashAlgorithmName.SHA256, RSASignaturePadding.Pkcs1); + request.CertificateExtensions.Add(new X509BasicConstraintsExtension(true, false, 0, true)); + request.CertificateExtensions.Add(new X509KeyUsageExtension(X509KeyUsageFlags.KeyCertSign | X509KeyUsageFlags.CrlSign, true)); + request.CertificateExtensions.Add(new X509SubjectKeyIdentifierExtension(request.PublicKey, false)); + + return request.CreateSelfSigned(DateTimeOffset.UtcNow.AddDays(-1), DateTimeOffset.UtcNow.AddYears(5)); + } + + private static void WriteCertificatePem(X509Certificate2 certificate, string path) + { + var builder = new StringBuilder(); + builder.AppendLine("-----BEGIN CERTIFICATE-----"); + builder.AppendLine(Convert.ToBase64String(certificate.Export(X509ContentType.Cert), Base64FormattingOptions.InsertLineBreaks)); + builder.AppendLine("-----END CERTIFICATE-----"); + File.WriteAllText(path, builder.ToString(), Encoding.ASCII); + } + + private const string AllowInvalidKey = "allowInvalidCertificates"; + private const string TrustedRootPathsKey = "trustedRootPaths"; + private const string OfflineRootKey = "offlineRoot"; + private const string ProxySection = "proxy"; + private const string ProxyAddressKey = "address"; + private const string ProxyBypassOnLocalKey = "bypassOnLocal"; + private const string ProxyBypassListKey = "bypassList"; + private const string ProxyUseDefaultCredentialsKey = "useDefaultCredentials"; + private const string ProxyUsernameKey = "username"; + private const string ProxyPasswordKey = "password"; +} diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Common/TimeWindowCursorPlannerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/TimeWindowCursorPlannerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common.Tests/Common/TimeWindowCursorPlannerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/TimeWindowCursorPlannerTests.cs diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Common/UrlNormalizerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/UrlNormalizerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common.Tests/Common/UrlNormalizerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Common/UrlNormalizerTests.cs diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Json/JsonSchemaValidatorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Json/JsonSchemaValidatorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common.Tests/Json/JsonSchemaValidatorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Json/JsonSchemaValidatorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj similarity index 54% rename from src/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj index 1fb726a4..99076293 100644 --- a/src/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj @@ -1,10 +1,11 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Common.Tests/Xml/XmlSchemaValidatorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Xml/XmlSchemaValidatorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Common.Tests/Xml/XmlSchemaValidatorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Common.Tests/Xml/XmlSchemaValidatorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Cve.Tests/Cve/CveConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/Cve/CveConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve.Tests/Cve/CveConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/Cve/CveConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-CVE-2024-0001.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-CVE-2024-0001.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-CVE-2024-0001.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-CVE-2024-0001.json diff --git a/src/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-list.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-list.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-list.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/cve-list.json diff --git a/src/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/expected-CVE-2024-0001.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/expected-CVE-2024-0001.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/expected-CVE-2024-0001.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/Fixtures/expected-CVE-2024-0001.json diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj new file mode 100644 index 00000000..ac9268e6 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj @@ -0,0 +1,18 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Fixtures/*.json" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/DebianMapperTests.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-123.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-123.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-123.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-123.html diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-124.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-124.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-124.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-detail-dsa-2024-124.html diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-list.txt b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-list.txt similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-list.txt rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/Source/Distro/Debian/Fixtures/debian-list.txt diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj new file mode 100644 index 00000000..5f5b0226 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0001.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0001.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0001.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0001.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0002.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0002.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0002.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0002.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0003.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0003.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0003.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/csaf-rhsa-2025-0003.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0001.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0002.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/rhsa-2025-0003.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1-repeat.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1-repeat.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1-repeat.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1-repeat.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page1.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page2.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page2.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page2.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page2.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page3.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page3.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page3.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/Fixtures/summary-page3.json diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorHarnessTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorHarnessTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorHarnessTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorHarnessTests.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/RedHat/RedHatConnectorTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj new file mode 100644 index 00000000..fcbca953 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj @@ -0,0 +1,17 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="RedHat/Fixtures/*.json" CopyToOutputDirectory="Always" TargetPath="Source/Distro/RedHat/Fixtures/%(Filename)%(Extension)" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-changes.csv b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-changes.csv similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-changes.csv rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-changes.csv diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0001-1.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0001-1.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0001-1.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0001-1.json diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0002-1.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0002-1.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0002-1.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/Source/Distro/Suse/Fixtures/suse-su-2025_0002-1.json diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj new file mode 100644 index 00000000..1ea51cb0 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj @@ -0,0 +1,19 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Update="Source\Distro\Suse\Fixtures\**\*"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseCsafParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseCsafParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseCsafParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseCsafParserTests.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests/SuseMapperTests.cs diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page0.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page0.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page0.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page0.json diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page1.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page1.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page1.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/Fixtures/ubuntu-notices-page1.json diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj new file mode 100644 index 00000000..fc5ffc8b --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj @@ -0,0 +1,19 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Update="Fixtures\**\*"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/UbuntuConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/UbuntuConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/UbuntuConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/UbuntuConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/conflict-ghsa.canonical.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/conflict-ghsa.canonical.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/conflict-ghsa.canonical.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/conflict-ghsa.canonical.json diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.ghsa.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.ghsa.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.ghsa.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.ghsa.json diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.nvd.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.nvd.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.nvd.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.nvd.json diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.osv.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.osv.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.osv.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/credit-parity.osv.json diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/expected-GHSA-xxxx-yyyy-zzzz.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/expected-GHSA-xxxx-yyyy-zzzz.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/expected-GHSA-xxxx-yyyy-zzzz.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/expected-GHSA-xxxx-yyyy-zzzz.json diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-GHSA-xxxx-yyyy-zzzz.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-GHSA-xxxx-yyyy-zzzz.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-GHSA-xxxx-yyyy-zzzz.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-GHSA-xxxx-yyyy-zzzz.json diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-list.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-list.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-list.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Fixtures/ghsa-list.json diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConflictFixtureTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConflictFixtureTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConflictFixtureTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConflictFixtureTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaCreditParityRegressionTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaCreditParityRegressionTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaCreditParityRegressionTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaCreditParityRegressionTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDependencyInjectionRoutineTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDependencyInjectionRoutineTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDependencyInjectionRoutineTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDependencyInjectionRoutineTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDiagnosticsTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDiagnosticsTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDiagnosticsTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaDiagnosticsTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaMapperTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaRateLimitParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaRateLimitParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaRateLimitParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/Ghsa/GhsaRateLimitParserTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj new file mode 100644 index 00000000..c8b9a744 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj @@ -0,0 +1,18 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Fixtures/*.json" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsa-25-123-01.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsa-25-123-01.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsa-25-123-01.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsa-25-123-01.html diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsma-25-045-01.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsma-25-045-01.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsma-25-045-01.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/icsma-25-045-01.html diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/sample-feed.xml b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/sample-feed.xml similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/sample-feed.xml rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/Fixtures/sample-feed.xml diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaConnectorMappingTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaConnectorMappingTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaConnectorMappingTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaConnectorMappingTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaFeedParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaFeedParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaFeedParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisa/IcsCisaFeedParserTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisaConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisaConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisaConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/IcsCisaConnectorTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj new file mode 100644 index 00000000..54a92e04 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj @@ -0,0 +1,17 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="IcsCisa/Fixtures/**" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/detail-acme-controller-2024.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/detail-acme-controller-2024.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/detail-acme-controller-2024.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/detail-acme-controller-2024.html diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/expected-advisory.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/expected-advisory.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/expected-advisory.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/expected-advisory.json diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/feed-page1.xml b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/feed-page1.xml similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/feed-page1.xml rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/Fixtures/feed-page1.xml diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/KasperskyConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/KasperskyConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/KasperskyConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/Kaspersky/KasperskyConnectorTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj new file mode 100644 index 00000000..ef81258b --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj @@ -0,0 +1,17 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Kaspersky/Fixtures/**" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/expected-advisory.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/expected-advisory.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/expected-advisory.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/expected-advisory.json diff --git a/src/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/jvnrss-window1.xml b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/jvnrss-window1.xml similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/jvnrss-window1.xml rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/jvnrss-window1.xml diff --git a/src/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/vuldef-JVNDB-2024-123456.xml b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/vuldef-JVNDB-2024-123456.xml similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/vuldef-JVNDB-2024-123456.xml rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/Fixtures/vuldef-JVNDB-2024-123456.xml diff --git a/src/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/JvnConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/JvnConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/JvnConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/Jvn/JvnConnectorTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/StellaOps.Concelier.Connector.Jvn.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/StellaOps.Concelier.Connector.Jvn.Tests.csproj new file mode 100644 index 00000000..ce970d8b --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Jvn.Tests/StellaOps.Concelier.Connector.Jvn.Tests.csproj @@ -0,0 +1,17 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Jvn/Fixtures/**" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-advisories.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-advisories.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-advisories.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-advisories.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-catalog.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-catalog.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-catalog.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/Kev/Fixtures/kev-catalog.json diff --git a/src/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/Kev/KevMapperTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/StellaOps.Concelier.Connector.Kev.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/StellaOps.Concelier.Connector.Kev.Tests.csproj new file mode 100644 index 00000000..9a31377e --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kev.Tests/StellaOps.Concelier.Connector.Kev.Tests.csproj @@ -0,0 +1,20 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj" /> + </ItemGroup> + + <ItemGroup> + <None Include="Kev/Fixtures/**" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-detail.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-detail.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-detail.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-detail.json diff --git a/src/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-feed.xml b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-feed.xml similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-feed.xml rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/Fixtures/kisa-feed.xml diff --git a/src/StellaOps.Concelier.Connector.Kisa.Tests/KisaConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/KisaConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Kisa.Tests/KisaConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/KisaConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj similarity index 62% rename from src/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj index c08ff53d..9e3868c0 100644 --- a/src/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Kisa.Tests/StellaOps.Concelier.Connector.Kisa.Tests.csproj @@ -1,24 +1,25 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> - <ItemGroup> - <None Update="Fixtures/*.json"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - <None Update="Fixtures/*.xml"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Kisa/StellaOps.Concelier.Connector.Kisa.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../StellaOps.Concelier.Connector.Common.Tests/StellaOps.Concelier.Connector.Common.Tests.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> + <ItemGroup> + <None Update="Fixtures/*.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + <None Update="Fixtures/*.xml"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/conflict-nvd.canonical.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/conflict-nvd.canonical.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/conflict-nvd.canonical.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/conflict-nvd.canonical.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.ghsa.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.ghsa.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.ghsa.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.ghsa.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.nvd.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.nvd.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.nvd.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.nvd.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.osv.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.osv.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.osv.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/credit-parity.osv.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-invalid-schema.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-invalid-schema.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-invalid-schema.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-invalid-schema.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-1.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-1.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-1.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-1.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-2.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-2.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-2.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-2.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-3.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-3.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-3.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-multipage-3.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-1.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-1.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-1.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-1.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-2.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-2.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-2.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-2.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-update.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-update.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-update.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/Fixtures/nvd-window-update.json diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConflictFixtureTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConflictFixtureTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConflictFixtureTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConflictFixtureTests.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorHarnessTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorHarnessTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorHarnessTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorHarnessTests.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdMergeExportParityTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdMergeExportParityTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdMergeExportParityTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/Nvd/NvdMergeExportParityTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj new file mode 100644 index 00000000..742f6d1d --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj @@ -0,0 +1,19 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Nvd/Fixtures/*.json" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/conflict-osv.canonical.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/conflict-osv.canonical.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/conflict-osv.canonical.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/conflict-osv.canonical.json diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.ghsa.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.ghsa.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.ghsa.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.ghsa.json diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.osv.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.osv.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.osv.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.osv.json diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-ghsa.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-ghsa.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-ghsa.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-ghsa.json diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-osv.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-osv.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-osv.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-ghsa.raw-osv.json diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-npm.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-npm.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-npm.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-npm.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-pypi.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-pypi.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-pypi.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Fixtures/osv-pypi.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvConflictFixtureTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvConflictFixtureTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvConflictFixtureTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvConflictFixtureTests.cs diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvGhsaParityRegressionTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvGhsaParityRegressionTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvGhsaParityRegressionTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvGhsaParityRegressionTests.cs diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvMapperTests.cs diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvSnapshotTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvSnapshotTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvSnapshotTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/Osv/OsvSnapshotTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj new file mode 100644 index 00000000..20742b05 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj @@ -0,0 +1,19 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Update="Fixtures\*.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/export-sample.xml b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/export-sample.xml similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/export-sample.xml rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/export-sample.xml diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-advisories.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-advisories.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-advisories.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-advisories.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-documents.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-documents.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-documents.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-documents.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-dtos.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-dtos.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-dtos.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-dtos.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-requests.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-requests.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-requests.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-requests.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-state.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-state.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-state.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/Fixtures/ru-bdu-state.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduConnectorSnapshotTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduConnectorSnapshotTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduConnectorSnapshotTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduConnectorSnapshotTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduMapperTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduXmlParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduXmlParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduXmlParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/RuBduXmlParserTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj new file mode 100644 index 00000000..de661798 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Ru.Bdu/StellaOps.Concelier.Connector.Ru.Bdu.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-legacy.json.zip b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-legacy.json.zip similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-legacy.json.zip rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-legacy.json.zip diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-sample.json.zip b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-sample.json.zip similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-sample.json.zip rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/bulletin-sample.json.zip diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing-page2.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing-page2.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing-page2.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing-page2.html diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/listing.html diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/nkcki-advisories.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/nkcki-advisories.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/nkcki-advisories.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/Fixtures/nkcki-advisories.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiJsonParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiJsonParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiJsonParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiJsonParserTests.cs diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/RuNkckiMapperTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj new file mode 100644 index 00000000..9ad0ed38 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Ru.Nkcki/StellaOps.Concelier.Connector.Ru.Nkcki.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/FixtureLoader.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/FixtureLoader.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/FixtureLoader.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/FixtureLoader.cs index 31cbd261..d4fe0011 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/FixtureLoader.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/FixtureLoader.cs @@ -1,33 +1,33 @@ -using System; -using System.IO; - -namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; - -internal static class FixtureLoader -{ - private static readonly string FixturesRoot = Path.Combine(AppContext.BaseDirectory, "Fixtures"); - - public static string Read(string relativePath) - { - if (string.IsNullOrWhiteSpace(relativePath)) - { - throw new ArgumentException("Fixture path must be provided.", nameof(relativePath)); - } - - var normalized = relativePath.Replace('\\', Path.DirectorySeparatorChar).Replace('/', Path.DirectorySeparatorChar); - var path = Path.Combine(FixturesRoot, normalized); - - if (!File.Exists(path)) - { - throw new FileNotFoundException($"Fixture '{relativePath}' not found at '{path}'.", path); - } - - var content = File.ReadAllText(path); - return NormalizeLineEndings(content); - } - - public static string Normalize(string value) => NormalizeLineEndings(value); - - private static string NormalizeLineEndings(string value) - => value.Replace("\r\n", "\n", StringComparison.Ordinal); -} +using System; +using System.IO; + +namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; + +internal static class FixtureLoader +{ + private static readonly string FixturesRoot = Path.Combine(AppContext.BaseDirectory, "Fixtures"); + + public static string Read(string relativePath) + { + if (string.IsNullOrWhiteSpace(relativePath)) + { + throw new ArgumentException("Fixture path must be provided.", nameof(relativePath)); + } + + var normalized = relativePath.Replace('\\', Path.DirectorySeparatorChar).Replace('/', Path.DirectorySeparatorChar); + var path = Path.Combine(FixturesRoot, normalized); + + if (!File.Exists(path)) + { + throw new FileNotFoundException($"Fixture '{relativePath}' not found at '{path}'.", path); + } + + var content = File.ReadAllText(path); + return NormalizeLineEndings(content); + } + + public static string Normalize(string value) => NormalizeLineEndings(value); + + private static string NormalizeLineEndings(string value) + => value.Replace("\r\n", "\n", StringComparison.Ordinal); +} diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-advisory.expected.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-advisory.expected.json similarity index 96% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-advisory.expected.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-advisory.expected.json index dfd8ab6f..bc4c1377 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-advisory.expected.json +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-advisory.expected.json @@ -1,212 +1,212 @@ -{ - "advisoryKey": "CVE-2025-1111", - "affectedPackages": [ - { - "type": "semver", - "identifier": "pkg:npm/example@1.0.0", - "platform": null, - "versionRanges": [ - { - "fixedVersion": "1.2.0", - "introducedVersion": "1.0.0", - "lastAffectedVersion": null, - "primitives": { - "evr": null, - "hasVendorExtensions": false, - "nevra": null, - "semVer": { - "constraintExpression": ">=1.0.0,<1.2.0", - "exactValue": null, - "fixed": "1.2.0", - "fixedInclusive": false, - "introduced": "1.0.0", - "introducedInclusive": true, - "lastAffected": null, - "lastAffectedInclusive": true, - "style": "range" - }, - "vendorExtensions": null - }, - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "range", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "affectedpackages[].versionranges[]" - ] - }, - "rangeExpression": ">=1.0.0,<1.2.0", - "rangeKind": "semver" - } - ], - "normalizedVersions": [ - { - "scheme": "semver", - "type": "range", - "min": "1.0.0", - "minInclusive": true, - "max": "1.2.0", - "maxInclusive": false, - "value": null, - "notes": null - } - ], - "statuses": [ - { - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "status", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "affectedpackages[].statuses[]" - ] - }, - "status": "fixed" - } - ], - "provenance": [ - { - "source": "ghsa", - "kind": "map", - "value": "package", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "affectedpackages[]" - ] - }, - { - "source": "stellaops-mirror", - "kind": "map", - "value": "domain=primary;repository=mirror-primary;generated=2025-10-19T12:00:00.0000000+00:00;package=pkg:npm/example@1.0.0", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "affectedpackages[]", - "affectedpackages[].normalizedversions[]", - "affectedpackages[].statuses[]", - "affectedpackages[].versionranges[]" - ] - } - ] - } - ], - "aliases": [ - "CVE-2025-1111", - "GHSA-xxxx-xxxx-xxxx" - ], - "canonicalMetricId": "cvss::ghsa::CVE-2025-1111", - "credits": [ - { - "displayName": "Security Researcher", - "role": "reporter", - "contacts": [ - "mailto:researcher@example.com" - ], - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "credit", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "credits[]" - ] - } - } - ], - "cvssMetrics": [ - { - "baseScore": 9.8, - "baseSeverity": "critical", - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "cvss", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "cvssmetrics[]" - ] - }, - "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", - "version": "3.1" - } - ], - "cwes": [ - { - "taxonomy": "cwe", - "identifier": "CWE-79", - "name": "Cross-site Scripting", - "uri": "https://cwe.mitre.org/data/definitions/79.html", - "provenance": [ - { - "source": "ghsa", - "kind": "map", - "value": "cwe", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "cwes[]" - ] - } - ] - } - ], - "description": "Deterministic test payload distributed via mirror.", - "exploitKnown": false, - "language": "en", - "modified": "2025-10-11T00:00:00+00:00", - "provenance": [ - { - "source": "ghsa", - "kind": "map", - "value": "advisory", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "advisory" - ] - }, - { - "source": "stellaops-mirror", - "kind": "map", - "value": "domain=primary;repository=mirror-primary;generated=2025-10-19T12:00:00.0000000+00:00", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "advisory", - "credits[]", - "cvssmetrics[]", - "cwes[]", - "references[]" - ] - } - ], - "published": "2025-10-10T00:00:00+00:00", - "references": [ - { - "kind": "advisory", - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "reference", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "references[]" - ] - }, - "sourceTag": "vendor", - "summary": "Vendor bulletin", - "url": "https://example.com/advisory" - } - ], - "severity": "high", - "summary": "Upstream advisory replicated through StellaOps mirror.", - "title": "Sample Mirror Advisory" -} +{ + "advisoryKey": "CVE-2025-1111", + "affectedPackages": [ + { + "type": "semver", + "identifier": "pkg:npm/example@1.0.0", + "platform": null, + "versionRanges": [ + { + "fixedVersion": "1.2.0", + "introducedVersion": "1.0.0", + "lastAffectedVersion": null, + "primitives": { + "evr": null, + "hasVendorExtensions": false, + "nevra": null, + "semVer": { + "constraintExpression": ">=1.0.0,<1.2.0", + "exactValue": null, + "fixed": "1.2.0", + "fixedInclusive": false, + "introduced": "1.0.0", + "introducedInclusive": true, + "lastAffected": null, + "lastAffectedInclusive": true, + "style": "range" + }, + "vendorExtensions": null + }, + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "range", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "affectedpackages[].versionranges[]" + ] + }, + "rangeExpression": ">=1.0.0,<1.2.0", + "rangeKind": "semver" + } + ], + "normalizedVersions": [ + { + "scheme": "semver", + "type": "range", + "min": "1.0.0", + "minInclusive": true, + "max": "1.2.0", + "maxInclusive": false, + "value": null, + "notes": null + } + ], + "statuses": [ + { + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "status", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "affectedpackages[].statuses[]" + ] + }, + "status": "fixed" + } + ], + "provenance": [ + { + "source": "ghsa", + "kind": "map", + "value": "package", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "affectedpackages[]" + ] + }, + { + "source": "stellaops-mirror", + "kind": "map", + "value": "domain=primary;repository=mirror-primary;generated=2025-10-19T12:00:00.0000000+00:00;package=pkg:npm/example@1.0.0", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "affectedpackages[]", + "affectedpackages[].normalizedversions[]", + "affectedpackages[].statuses[]", + "affectedpackages[].versionranges[]" + ] + } + ] + } + ], + "aliases": [ + "CVE-2025-1111", + "GHSA-xxxx-xxxx-xxxx" + ], + "canonicalMetricId": "cvss::ghsa::CVE-2025-1111", + "credits": [ + { + "displayName": "Security Researcher", + "role": "reporter", + "contacts": [ + "mailto:researcher@example.com" + ], + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "credit", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "credits[]" + ] + } + } + ], + "cvssMetrics": [ + { + "baseScore": 9.8, + "baseSeverity": "critical", + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "cvss", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "cvssmetrics[]" + ] + }, + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "version": "3.1" + } + ], + "cwes": [ + { + "taxonomy": "cwe", + "identifier": "CWE-79", + "name": "Cross-site Scripting", + "uri": "https://cwe.mitre.org/data/definitions/79.html", + "provenance": [ + { + "source": "ghsa", + "kind": "map", + "value": "cwe", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "cwes[]" + ] + } + ] + } + ], + "description": "Deterministic test payload distributed via mirror.", + "exploitKnown": false, + "language": "en", + "modified": "2025-10-11T00:00:00+00:00", + "provenance": [ + { + "source": "ghsa", + "kind": "map", + "value": "advisory", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "advisory" + ] + }, + { + "source": "stellaops-mirror", + "kind": "map", + "value": "domain=primary;repository=mirror-primary;generated=2025-10-19T12:00:00.0000000+00:00", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "advisory", + "credits[]", + "cvssmetrics[]", + "cwes[]", + "references[]" + ] + } + ], + "published": "2025-10-10T00:00:00+00:00", + "references": [ + { + "kind": "advisory", + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "reference", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "vendor", + "summary": "Vendor bulletin", + "url": "https://example.com/advisory" + } + ], + "severity": "high", + "summary": "Upstream advisory replicated through StellaOps mirror.", + "title": "Sample Mirror Advisory" +} diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-bundle.sample.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-bundle.sample.json similarity index 96% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-bundle.sample.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-bundle.sample.json index 14945746..2d218312 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-bundle.sample.json +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/Fixtures/mirror-bundle.sample.json @@ -1,202 +1,202 @@ -{ - "advisories": [ - { - "advisoryKey": "CVE-2025-1111", - "affectedPackages": [ - { - "type": "semver", - "identifier": "pkg:npm/example@1.0.0", - "platform": null, - "versionRanges": [ - { - "fixedVersion": "1.2.0", - "introducedVersion": "1.0.0", - "lastAffectedVersion": null, - "primitives": { - "evr": null, - "hasVendorExtensions": false, - "nevra": null, - "semVer": { - "constraintExpression": ">=1.0.0,<1.2.0", - "exactValue": null, - "fixed": "1.2.0", - "fixedInclusive": false, - "introduced": "1.0.0", - "introducedInclusive": true, - "lastAffected": null, - "lastAffectedInclusive": true, - "style": "range" - }, - "vendorExtensions": null - }, - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "range", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "affectedpackages[].versionranges[]" - ] - }, - "rangeExpression": ">=1.0.0,<1.2.0", - "rangeKind": "semver" - } - ], - "normalizedVersions": [ - { - "scheme": "semver", - "type": "range", - "min": "1.0.0", - "minInclusive": true, - "max": "1.2.0", - "maxInclusive": false, - "value": null, - "notes": null - } - ], - "statuses": [ - { - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "status", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "affectedpackages[].statuses[]" - ] - }, - "status": "fixed" - } - ], - "provenance": [ - { - "source": "ghsa", - "kind": "map", - "value": "package", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "affectedpackages[]" - ] - } - ] - } - ], - "aliases": [ - "GHSA-xxxx-xxxx-xxxx" - ], - "canonicalMetricId": "cvss::ghsa::CVE-2025-1111", - "credits": [ - { - "displayName": "Security Researcher", - "role": "reporter", - "contacts": [ - "mailto:researcher@example.com" - ], - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "credit", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "credits[]" - ] - } - } - ], - "cvssMetrics": [ - { - "baseScore": 9.8, - "baseSeverity": "critical", - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "cvss", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "cvssmetrics[]" - ] - }, - "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", - "version": "3.1" - } - ], - "cwes": [ - { - "taxonomy": "cwe", - "identifier": "CWE-79", - "name": "Cross-site Scripting", - "uri": "https://cwe.mitre.org/data/definitions/79.html", - "provenance": [ - { - "source": "ghsa", - "kind": "map", - "value": "cwe", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "cwes[]" - ] - } - ] - } - ], - "description": "Deterministic test payload distributed via mirror.", - "exploitKnown": false, - "language": "en", - "modified": "2025-10-11T00:00:00+00:00", - "provenance": [ - { - "source": "ghsa", - "kind": "map", - "value": "advisory", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "advisory" - ] - } - ], - "published": "2025-10-10T00:00:00+00:00", - "references": [ - { - "kind": "advisory", - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "reference", - "decisionReason": null, - "recordedAt": "2025-10-19T12:00:00+00:00", - "fieldMask": [ - "references[]" - ] - }, - "sourceTag": "vendor", - "summary": "Vendor bulletin", - "url": "https://example.com/advisory" - } - ], - "severity": "high", - "summary": "Upstream advisory replicated through StellaOps mirror.", - "title": "Sample Mirror Advisory" - } - ], - "advisoryCount": 1, - "displayName": "Primary Mirror", - "domainId": "primary", - "generatedAt": "2025-10-19T12:00:00+00:00", - "schemaVersion": 1, - "sources": [ - { - "advisoryCount": 1, - "firstRecordedAt": "2025-10-19T12:00:00+00:00", - "lastRecordedAt": "2025-10-19T12:00:00+00:00", - "source": "ghsa" - } - ], - "targetRepository": "mirror-primary" -} +{ + "advisories": [ + { + "advisoryKey": "CVE-2025-1111", + "affectedPackages": [ + { + "type": "semver", + "identifier": "pkg:npm/example@1.0.0", + "platform": null, + "versionRanges": [ + { + "fixedVersion": "1.2.0", + "introducedVersion": "1.0.0", + "lastAffectedVersion": null, + "primitives": { + "evr": null, + "hasVendorExtensions": false, + "nevra": null, + "semVer": { + "constraintExpression": ">=1.0.0,<1.2.0", + "exactValue": null, + "fixed": "1.2.0", + "fixedInclusive": false, + "introduced": "1.0.0", + "introducedInclusive": true, + "lastAffected": null, + "lastAffectedInclusive": true, + "style": "range" + }, + "vendorExtensions": null + }, + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "range", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "affectedpackages[].versionranges[]" + ] + }, + "rangeExpression": ">=1.0.0,<1.2.0", + "rangeKind": "semver" + } + ], + "normalizedVersions": [ + { + "scheme": "semver", + "type": "range", + "min": "1.0.0", + "minInclusive": true, + "max": "1.2.0", + "maxInclusive": false, + "value": null, + "notes": null + } + ], + "statuses": [ + { + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "status", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "affectedpackages[].statuses[]" + ] + }, + "status": "fixed" + } + ], + "provenance": [ + { + "source": "ghsa", + "kind": "map", + "value": "package", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "affectedpackages[]" + ] + } + ] + } + ], + "aliases": [ + "GHSA-xxxx-xxxx-xxxx" + ], + "canonicalMetricId": "cvss::ghsa::CVE-2025-1111", + "credits": [ + { + "displayName": "Security Researcher", + "role": "reporter", + "contacts": [ + "mailto:researcher@example.com" + ], + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "credit", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "credits[]" + ] + } + } + ], + "cvssMetrics": [ + { + "baseScore": 9.8, + "baseSeverity": "critical", + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "cvss", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "cvssmetrics[]" + ] + }, + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "version": "3.1" + } + ], + "cwes": [ + { + "taxonomy": "cwe", + "identifier": "CWE-79", + "name": "Cross-site Scripting", + "uri": "https://cwe.mitre.org/data/definitions/79.html", + "provenance": [ + { + "source": "ghsa", + "kind": "map", + "value": "cwe", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "cwes[]" + ] + } + ] + } + ], + "description": "Deterministic test payload distributed via mirror.", + "exploitKnown": false, + "language": "en", + "modified": "2025-10-11T00:00:00+00:00", + "provenance": [ + { + "source": "ghsa", + "kind": "map", + "value": "advisory", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "advisory" + ] + } + ], + "published": "2025-10-10T00:00:00+00:00", + "references": [ + { + "kind": "advisory", + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "reference", + "decisionReason": null, + "recordedAt": "2025-10-19T12:00:00+00:00", + "fieldMask": [ + "references[]" + ] + }, + "sourceTag": "vendor", + "summary": "Vendor bulletin", + "url": "https://example.com/advisory" + } + ], + "severity": "high", + "summary": "Upstream advisory replicated through StellaOps mirror.", + "title": "Sample Mirror Advisory" + } + ], + "advisoryCount": 1, + "displayName": "Primary Mirror", + "domainId": "primary", + "generatedAt": "2025-10-19T12:00:00+00:00", + "schemaVersion": 1, + "sources": [ + { + "advisoryCount": 1, + "firstRecordedAt": "2025-10-19T12:00:00+00:00", + "lastRecordedAt": "2025-10-19T12:00:00+00:00", + "source": "ghsa" + } + ], + "targetRepository": "mirror-primary" +} diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorAdvisoryMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorAdvisoryMapperTests.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorAdvisoryMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorAdvisoryMapperTests.cs index 1d4f8d3c..7fbc0069 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorAdvisoryMapperTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorAdvisoryMapperTests.cs @@ -1,47 +1,47 @@ -using System; -using StellaOps.Concelier.Connector.StellaOpsMirror.Internal; -using StellaOps.Concelier.Models; -using Xunit; - -namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; - -public sealed class MirrorAdvisoryMapperTests -{ - [Fact] - public void Map_ProducesCanonicalAdvisoryWithMirrorProvenance() - { - var bundle = SampleData.CreateBundle(); - var bundleJson = CanonicalJsonSerializer.SerializeIndented(bundle); - Assert.Equal( - FixtureLoader.Read(SampleData.BundleFixture).TrimEnd(), - FixtureLoader.Normalize(bundleJson).TrimEnd()); - - var advisories = MirrorAdvisoryMapper.Map(bundle); - - Assert.Single(advisories); - var advisory = advisories[0]; - - var expectedAdvisory = SampleData.CreateExpectedMappedAdvisory(); - var expectedJson = CanonicalJsonSerializer.SerializeIndented(expectedAdvisory); - Assert.Equal( - FixtureLoader.Read(SampleData.AdvisoryFixture).TrimEnd(), - FixtureLoader.Normalize(expectedJson).TrimEnd()); - - var actualJson = CanonicalJsonSerializer.SerializeIndented(advisory); - Assert.Equal( - FixtureLoader.Normalize(expectedJson).TrimEnd(), - FixtureLoader.Normalize(actualJson).TrimEnd()); - - Assert.Contains(advisory.Aliases, alias => string.Equals(alias, advisory.AdvisoryKey, StringComparison.OrdinalIgnoreCase)); - Assert.Contains( - advisory.Provenance, - provenance => string.Equals(provenance.Source, StellaOpsMirrorConnector.Source, StringComparison.Ordinal) && - string.Equals(provenance.Kind, "map", StringComparison.Ordinal)); - - var package = Assert.Single(advisory.AffectedPackages); - Assert.Contains( - package.Provenance, - provenance => string.Equals(provenance.Source, StellaOpsMirrorConnector.Source, StringComparison.Ordinal) && - string.Equals(provenance.Kind, "map", StringComparison.Ordinal)); - } -} +using System; +using StellaOps.Concelier.Connector.StellaOpsMirror.Internal; +using StellaOps.Concelier.Models; +using Xunit; + +namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; + +public sealed class MirrorAdvisoryMapperTests +{ + [Fact] + public void Map_ProducesCanonicalAdvisoryWithMirrorProvenance() + { + var bundle = SampleData.CreateBundle(); + var bundleJson = CanonicalJsonSerializer.SerializeIndented(bundle); + Assert.Equal( + FixtureLoader.Read(SampleData.BundleFixture).TrimEnd(), + FixtureLoader.Normalize(bundleJson).TrimEnd()); + + var advisories = MirrorAdvisoryMapper.Map(bundle); + + Assert.Single(advisories); + var advisory = advisories[0]; + + var expectedAdvisory = SampleData.CreateExpectedMappedAdvisory(); + var expectedJson = CanonicalJsonSerializer.SerializeIndented(expectedAdvisory); + Assert.Equal( + FixtureLoader.Read(SampleData.AdvisoryFixture).TrimEnd(), + FixtureLoader.Normalize(expectedJson).TrimEnd()); + + var actualJson = CanonicalJsonSerializer.SerializeIndented(advisory); + Assert.Equal( + FixtureLoader.Normalize(expectedJson).TrimEnd(), + FixtureLoader.Normalize(actualJson).TrimEnd()); + + Assert.Contains(advisory.Aliases, alias => string.Equals(alias, advisory.AdvisoryKey, StringComparison.OrdinalIgnoreCase)); + Assert.Contains( + advisory.Provenance, + provenance => string.Equals(provenance.Source, StellaOpsMirrorConnector.Source, StringComparison.Ordinal) && + string.Equals(provenance.Kind, "map", StringComparison.Ordinal)); + + var package = Assert.Single(advisory.AffectedPackages); + Assert.Contains( + package.Provenance, + provenance => string.Equals(provenance.Source, StellaOpsMirrorConnector.Source, StringComparison.Ordinal) && + string.Equals(provenance.Kind, "map", StringComparison.Ordinal)); + } +} diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorSignatureVerifierTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorSignatureVerifierTests.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorSignatureVerifierTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorSignatureVerifierTests.cs index 548501c5..66717014 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorSignatureVerifierTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/MirrorSignatureVerifierTests.cs @@ -1,189 +1,189 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Security.Cryptography; -using Microsoft.Extensions.Caching.Memory; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Concelier.Connector.StellaOpsMirror.Security; -using StellaOps.Cryptography; -using Xunit; - -namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; - -public sealed class MirrorSignatureVerifierTests -{ - [Fact] - public async Task VerifyAsync_ValidSignaturePasses() - { - var provider = new DefaultCryptoProvider(); - var key = CreateSigningKey("mirror-key"); - provider.UpsertSigningKey(key); - - var registry = new CryptoProviderRegistry(new[] { provider }); - var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions())); - - var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() }); - var payload = payloadText.ToUtf8Bytes(); - var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload); - - await verifier.VerifyAsync(payload, signature, CancellationToken.None); - } - - [Fact] - public async Task VerifyAsync_InvalidSignatureThrows() - { - var provider = new DefaultCryptoProvider(); - var key = CreateSigningKey("mirror-key"); - provider.UpsertSigningKey(key); - - var registry = new CryptoProviderRegistry(new[] { provider }); - var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions())); - - var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() }); - var payload = payloadText.ToUtf8Bytes(); - var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload); - - var tampered = signature.Replace('a', 'b'); - - await Assert.ThrowsAsync<InvalidOperationException>(() => verifier.VerifyAsync(payload, tampered, CancellationToken.None)); - } - - [Fact] - public async Task VerifyAsync_KeyMismatchThrows() - { - var provider = new DefaultCryptoProvider(); - var key = CreateSigningKey("mirror-key"); - provider.UpsertSigningKey(key); - - var registry = new CryptoProviderRegistry(new[] { provider }); - var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions())); - - var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() }); - var payload = payloadText.ToUtf8Bytes(); - var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload); - - await Assert.ThrowsAsync<InvalidOperationException>(() => verifier.VerifyAsync( - payload, - signature, - expectedKeyId: "unexpected-key", - expectedProvider: null, - fallbackPublicKeyPath: null, - cancellationToken: CancellationToken.None)); - } - - [Fact] - public async Task VerifyAsync_ThrowsWhenProviderMissingKey() - { - var provider = new DefaultCryptoProvider(); - var key = CreateSigningKey("mirror-key"); - provider.UpsertSigningKey(key); - - var registry = new CryptoProviderRegistry(new[] { provider }); - var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions())); - - var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() }); - var payload = payloadText.ToUtf8Bytes(); - var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload); - - provider.RemoveSigningKey(key.Reference.KeyId); - - await Assert.ThrowsAsync<InvalidOperationException>(() => verifier.VerifyAsync( - payload, - signature, - expectedKeyId: key.Reference.KeyId, - expectedProvider: provider.Name, - fallbackPublicKeyPath: null, - cancellationToken: CancellationToken.None)); - } - - [Fact] - public async Task VerifyAsync_UsesCachedPublicKeyWhenFileRemoved() - { - var provider = new DefaultCryptoProvider(); - var signingKey = CreateSigningKey("mirror-key"); - provider.UpsertSigningKey(signingKey); - var registry = new CryptoProviderRegistry(new[] { provider }); - var memoryCache = new MemoryCache(new MemoryCacheOptions()); - var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, memoryCache); - - var payload = "{\"advisories\":[]}"; - var (signature, _) = await CreateDetachedJwsAsync(provider, signingKey.Reference.KeyId, payload.ToUtf8Bytes()); - provider.RemoveSigningKey(signingKey.Reference.KeyId); - var pemPath = WritePublicKeyPem(signingKey); - - try - { - await verifier.VerifyAsync(payload.ToUtf8Bytes(), signature, expectedKeyId: signingKey.Reference.KeyId, expectedProvider: "default", fallbackPublicKeyPath: pemPath, cancellationToken: CancellationToken.None); - - File.Delete(pemPath); - - await verifier.VerifyAsync(payload.ToUtf8Bytes(), signature, expectedKeyId: signingKey.Reference.KeyId, expectedProvider: "default", fallbackPublicKeyPath: pemPath, cancellationToken: CancellationToken.None); - } - finally - { - if (File.Exists(pemPath)) - { - File.Delete(pemPath); - } - } - } - - private static CryptoSigningKey CreateSigningKey(string keyId) - { - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var parameters = ecdsa.ExportParameters(includePrivateParameters: true); - return new CryptoSigningKey(new CryptoKeyReference(keyId), SignatureAlgorithms.Es256, in parameters, DateTimeOffset.UtcNow); - } - - private static string WritePublicKeyPem(CryptoSigningKey signingKey) - { - using var ecdsa = ECDsa.Create(signingKey.PublicParameters); - var info = ecdsa.ExportSubjectPublicKeyInfo(); - var pem = PemEncoding.Write("PUBLIC KEY", info); - var path = Path.Combine(Path.GetTempPath(), $"stellaops-mirror-{Guid.NewGuid():N}.pem"); - File.WriteAllText(path, pem); - return path; - } - - private static async Task<(string Signature, DateTimeOffset SignedAt)> CreateDetachedJwsAsync( - DefaultCryptoProvider provider, - string keyId, - ReadOnlyMemory<byte> payload) - { - var signer = provider.GetSigner(SignatureAlgorithms.Es256, new CryptoKeyReference(keyId)); - var header = new Dictionary<string, object?> - { - ["alg"] = SignatureAlgorithms.Es256, - ["kid"] = keyId, - ["provider"] = provider.Name, - ["typ"] = "application/vnd.stellaops.concelier.mirror-bundle+jws", - ["b64"] = false, - ["crit"] = new[] { "b64" } - }; - - var headerJson = System.Text.Json.JsonSerializer.Serialize(header); - var protectedHeader = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(headerJson); - - var signingInput = BuildSigningInput(protectedHeader, payload.Span); - var signatureBytes = await signer.SignAsync(signingInput, CancellationToken.None).ConfigureAwait(false); - var encodedSignature = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(signatureBytes); - - return (string.Concat(protectedHeader, "..", encodedSignature), DateTimeOffset.UtcNow); - } - - private static ReadOnlyMemory<byte> BuildSigningInput(string encodedHeader, ReadOnlySpan<byte> payload) - { - var headerBytes = System.Text.Encoding.ASCII.GetBytes(encodedHeader); - var buffer = new byte[headerBytes.Length + 1 + payload.Length]; - headerBytes.CopyTo(buffer.AsSpan()); - buffer[headerBytes.Length] = (byte)'.'; - payload.CopyTo(buffer.AsSpan(headerBytes.Length + 1)); - return buffer; - } -} - -file static class Utf8Extensions -{ - public static ReadOnlyMemory<byte> ToUtf8Bytes(this string value) - => System.Text.Encoding.UTF8.GetBytes(value); -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Security.Cryptography; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Concelier.Connector.StellaOpsMirror.Security; +using StellaOps.Cryptography; +using Xunit; + +namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; + +public sealed class MirrorSignatureVerifierTests +{ + [Fact] + public async Task VerifyAsync_ValidSignaturePasses() + { + var provider = new DefaultCryptoProvider(); + var key = CreateSigningKey("mirror-key"); + provider.UpsertSigningKey(key); + + var registry = new CryptoProviderRegistry(new[] { provider }); + var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions())); + + var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() }); + var payload = payloadText.ToUtf8Bytes(); + var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload); + + await verifier.VerifyAsync(payload, signature, CancellationToken.None); + } + + [Fact] + public async Task VerifyAsync_InvalidSignatureThrows() + { + var provider = new DefaultCryptoProvider(); + var key = CreateSigningKey("mirror-key"); + provider.UpsertSigningKey(key); + + var registry = new CryptoProviderRegistry(new[] { provider }); + var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions())); + + var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() }); + var payload = payloadText.ToUtf8Bytes(); + var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload); + + var tampered = signature.Replace('a', 'b'); + + await Assert.ThrowsAsync<InvalidOperationException>(() => verifier.VerifyAsync(payload, tampered, CancellationToken.None)); + } + + [Fact] + public async Task VerifyAsync_KeyMismatchThrows() + { + var provider = new DefaultCryptoProvider(); + var key = CreateSigningKey("mirror-key"); + provider.UpsertSigningKey(key); + + var registry = new CryptoProviderRegistry(new[] { provider }); + var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions())); + + var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() }); + var payload = payloadText.ToUtf8Bytes(); + var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload); + + await Assert.ThrowsAsync<InvalidOperationException>(() => verifier.VerifyAsync( + payload, + signature, + expectedKeyId: "unexpected-key", + expectedProvider: null, + fallbackPublicKeyPath: null, + cancellationToken: CancellationToken.None)); + } + + [Fact] + public async Task VerifyAsync_ThrowsWhenProviderMissingKey() + { + var provider = new DefaultCryptoProvider(); + var key = CreateSigningKey("mirror-key"); + provider.UpsertSigningKey(key); + + var registry = new CryptoProviderRegistry(new[] { provider }); + var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, new MemoryCache(new MemoryCacheOptions())); + + var payloadText = System.Text.Json.JsonSerializer.Serialize(new { advisories = Array.Empty<string>() }); + var payload = payloadText.ToUtf8Bytes(); + var (signature, _) = await CreateDetachedJwsAsync(provider, key.Reference.KeyId, payload); + + provider.RemoveSigningKey(key.Reference.KeyId); + + await Assert.ThrowsAsync<InvalidOperationException>(() => verifier.VerifyAsync( + payload, + signature, + expectedKeyId: key.Reference.KeyId, + expectedProvider: provider.Name, + fallbackPublicKeyPath: null, + cancellationToken: CancellationToken.None)); + } + + [Fact] + public async Task VerifyAsync_UsesCachedPublicKeyWhenFileRemoved() + { + var provider = new DefaultCryptoProvider(); + var signingKey = CreateSigningKey("mirror-key"); + provider.UpsertSigningKey(signingKey); + var registry = new CryptoProviderRegistry(new[] { provider }); + var memoryCache = new MemoryCache(new MemoryCacheOptions()); + var verifier = new MirrorSignatureVerifier(registry, NullLogger<MirrorSignatureVerifier>.Instance, memoryCache); + + var payload = "{\"advisories\":[]}"; + var (signature, _) = await CreateDetachedJwsAsync(provider, signingKey.Reference.KeyId, payload.ToUtf8Bytes()); + provider.RemoveSigningKey(signingKey.Reference.KeyId); + var pemPath = WritePublicKeyPem(signingKey); + + try + { + await verifier.VerifyAsync(payload.ToUtf8Bytes(), signature, expectedKeyId: signingKey.Reference.KeyId, expectedProvider: "default", fallbackPublicKeyPath: pemPath, cancellationToken: CancellationToken.None); + + File.Delete(pemPath); + + await verifier.VerifyAsync(payload.ToUtf8Bytes(), signature, expectedKeyId: signingKey.Reference.KeyId, expectedProvider: "default", fallbackPublicKeyPath: pemPath, cancellationToken: CancellationToken.None); + } + finally + { + if (File.Exists(pemPath)) + { + File.Delete(pemPath); + } + } + } + + private static CryptoSigningKey CreateSigningKey(string keyId) + { + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var parameters = ecdsa.ExportParameters(includePrivateParameters: true); + return new CryptoSigningKey(new CryptoKeyReference(keyId), SignatureAlgorithms.Es256, in parameters, DateTimeOffset.UtcNow); + } + + private static string WritePublicKeyPem(CryptoSigningKey signingKey) + { + using var ecdsa = ECDsa.Create(signingKey.PublicParameters); + var info = ecdsa.ExportSubjectPublicKeyInfo(); + var pem = PemEncoding.Write("PUBLIC KEY", info); + var path = Path.Combine(Path.GetTempPath(), $"stellaops-mirror-{Guid.NewGuid():N}.pem"); + File.WriteAllText(path, pem); + return path; + } + + private static async Task<(string Signature, DateTimeOffset SignedAt)> CreateDetachedJwsAsync( + DefaultCryptoProvider provider, + string keyId, + ReadOnlyMemory<byte> payload) + { + var signer = provider.GetSigner(SignatureAlgorithms.Es256, new CryptoKeyReference(keyId)); + var header = new Dictionary<string, object?> + { + ["alg"] = SignatureAlgorithms.Es256, + ["kid"] = keyId, + ["provider"] = provider.Name, + ["typ"] = "application/vnd.stellaops.concelier.mirror-bundle+jws", + ["b64"] = false, + ["crit"] = new[] { "b64" } + }; + + var headerJson = System.Text.Json.JsonSerializer.Serialize(header); + var protectedHeader = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(headerJson); + + var signingInput = BuildSigningInput(protectedHeader, payload.Span); + var signatureBytes = await signer.SignAsync(signingInput, CancellationToken.None).ConfigureAwait(false); + var encodedSignature = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(signatureBytes); + + return (string.Concat(protectedHeader, "..", encodedSignature), DateTimeOffset.UtcNow); + } + + private static ReadOnlyMemory<byte> BuildSigningInput(string encodedHeader, ReadOnlySpan<byte> payload) + { + var headerBytes = System.Text.Encoding.ASCII.GetBytes(encodedHeader); + var buffer = new byte[headerBytes.Length + 1 + payload.Length]; + headerBytes.CopyTo(buffer.AsSpan()); + buffer[headerBytes.Length] = (byte)'.'; + payload.CopyTo(buffer.AsSpan(headerBytes.Length + 1)); + return buffer; + } +} + +file static class Utf8Extensions +{ + public static ReadOnlyMemory<byte> ToUtf8Bytes(this string value) + => System.Text.Encoding.UTF8.GetBytes(value); +} diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/SampleData.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/SampleData.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/SampleData.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/SampleData.cs index 72d4c50c..261e7226 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/SampleData.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/SampleData.cs @@ -1,265 +1,265 @@ -using System; -using System.Globalization; -using StellaOps.Concelier.Connector.StellaOpsMirror.Internal; -using StellaOps.Concelier.Models; - -namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; - -internal static class SampleData -{ - public const string BundleFixture = "mirror-bundle.sample.json"; - public const string AdvisoryFixture = "mirror-advisory.expected.json"; - public const string TargetRepository = "mirror-primary"; - public const string DomainId = "primary"; - public const string AdvisoryKey = "CVE-2025-1111"; - public const string GhsaAlias = "GHSA-xxxx-xxxx-xxxx"; - - public static DateTimeOffset GeneratedAt { get; } = new(2025, 10, 19, 12, 0, 0, TimeSpan.Zero); - - public static MirrorBundleDocument CreateBundle() - => new( - SchemaVersion: 1, - GeneratedAt: GeneratedAt, - TargetRepository: TargetRepository, - DomainId: DomainId, - DisplayName: "Primary Mirror", - AdvisoryCount: 1, - Advisories: new[] { CreateSourceAdvisory() }, - Sources: new[] - { - new MirrorSourceSummary("ghsa", GeneratedAt, GeneratedAt, 1) - }); - - public static Advisory CreateExpectedMappedAdvisory() - { - var baseAdvisory = CreateSourceAdvisory(); - var recordedAt = GeneratedAt.ToUniversalTime(); - var mirrorValue = BuildMirrorValue(recordedAt); - - var topProvenance = baseAdvisory.Provenance.Add(new AdvisoryProvenance( - StellaOpsMirrorConnector.Source, - "map", - mirrorValue, - recordedAt, - new[] - { - ProvenanceFieldMasks.Advisory, - ProvenanceFieldMasks.References, - ProvenanceFieldMasks.Credits, - ProvenanceFieldMasks.CvssMetrics, - ProvenanceFieldMasks.Weaknesses, - })); - - var package = baseAdvisory.AffectedPackages[0]; - var packageProvenance = package.Provenance.Add(new AdvisoryProvenance( - StellaOpsMirrorConnector.Source, - "map", - $"{mirrorValue};package={package.Identifier}", - recordedAt, - new[] - { - ProvenanceFieldMasks.AffectedPackages, - ProvenanceFieldMasks.VersionRanges, - ProvenanceFieldMasks.PackageStatuses, - ProvenanceFieldMasks.NormalizedVersions, - })); - var updatedPackage = new AffectedPackage( - package.Type, - package.Identifier, - package.Platform, - package.VersionRanges, - package.Statuses, - packageProvenance, - package.NormalizedVersions); - - return new Advisory( - AdvisoryKey, - baseAdvisory.Title, - baseAdvisory.Summary, - baseAdvisory.Language, - baseAdvisory.Published, - baseAdvisory.Modified, - baseAdvisory.Severity, - baseAdvisory.ExploitKnown, - new[] { AdvisoryKey, GhsaAlias }, - baseAdvisory.Credits, - baseAdvisory.References, - new[] { updatedPackage }, - baseAdvisory.CvssMetrics, - topProvenance, - baseAdvisory.Description, - baseAdvisory.Cwes, - baseAdvisory.CanonicalMetricId); - } - - private static Advisory CreateSourceAdvisory() - { - var recordedAt = GeneratedAt.ToUniversalTime(); - - var reference = new AdvisoryReference( - "https://example.com/advisory", - "advisory", - "vendor", - "Vendor bulletin", - new AdvisoryProvenance( - "ghsa", - "map", - "reference", - recordedAt, - new[] - { - ProvenanceFieldMasks.References, - })); - - var credit = new AdvisoryCredit( - "Security Researcher", - "reporter", - new[] { "mailto:researcher@example.com" }, - new AdvisoryProvenance( - "ghsa", - "map", - "credit", - recordedAt, - new[] - { - ProvenanceFieldMasks.Credits, - })); - - var semVerPrimitive = new SemVerPrimitive( - Introduced: "1.0.0", - IntroducedInclusive: true, - Fixed: "1.2.0", - FixedInclusive: false, - LastAffected: null, - LastAffectedInclusive: true, - ConstraintExpression: ">=1.0.0,<1.2.0", - ExactValue: null); - - var range = new AffectedVersionRange( - rangeKind: "semver", - introducedVersion: "1.0.0", - fixedVersion: "1.2.0", - lastAffectedVersion: null, - rangeExpression: ">=1.0.0,<1.2.0", - provenance: new AdvisoryProvenance( - "ghsa", - "map", - "range", - recordedAt, - new[] - { - ProvenanceFieldMasks.VersionRanges, - }), - primitives: new RangePrimitives(semVerPrimitive, null, null, null)); - - var status = new AffectedPackageStatus( - "fixed", - new AdvisoryProvenance( - "ghsa", - "map", - "status", - recordedAt, - new[] - { - ProvenanceFieldMasks.PackageStatuses, - })); - - var normalizedRule = new NormalizedVersionRule( - scheme: "semver", - type: "range", - min: "1.0.0", - minInclusive: true, - max: "1.2.0", - maxInclusive: false, - value: null, - notes: null); - - var package = new AffectedPackage( - AffectedPackageTypes.SemVer, - "pkg:npm/example@1.0.0", - platform: null, - versionRanges: new[] { range }, - statuses: new[] { status }, - provenance: new[] - { - new AdvisoryProvenance( - "ghsa", - "map", - "package", - recordedAt, - new[] - { - ProvenanceFieldMasks.AffectedPackages, - }) - }, - normalizedVersions: new[] { normalizedRule }); - - var cvss = new CvssMetric( - "3.1", - "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", - 9.8, - "critical", - new AdvisoryProvenance( - "ghsa", - "map", - "cvss", - recordedAt, - new[] - { - ProvenanceFieldMasks.CvssMetrics, - })); - - var weakness = new AdvisoryWeakness( - "cwe", - "CWE-79", - "Cross-site Scripting", - "https://cwe.mitre.org/data/definitions/79.html", - new[] - { - new AdvisoryProvenance( - "ghsa", - "map", - "cwe", - recordedAt, - new[] - { - ProvenanceFieldMasks.Weaknesses, - }) - }); - - var advisory = new Advisory( - AdvisoryKey, - "Sample Mirror Advisory", - "Upstream advisory replicated through StellaOps mirror.", - "en", - published: new DateTimeOffset(2025, 10, 10, 0, 0, 0, TimeSpan.Zero), - modified: new DateTimeOffset(2025, 10, 11, 0, 0, 0, TimeSpan.Zero), - severity: "high", - exploitKnown: false, - aliases: new[] { GhsaAlias }, - credits: new[] { credit }, - references: new[] { reference }, - affectedPackages: new[] { package }, - cvssMetrics: new[] { cvss }, - provenance: new[] - { - new AdvisoryProvenance( - "ghsa", - "map", - "advisory", - recordedAt, - new[] - { - ProvenanceFieldMasks.Advisory, - }) - }, - description: "Deterministic test payload distributed via mirror.", - cwes: new[] { weakness }, - canonicalMetricId: "cvss::ghsa::CVE-2025-1111"); - - return CanonicalJsonSerializer.Normalize(advisory); - } - - private static string BuildMirrorValue(DateTimeOffset recordedAt) - => $"domain={DomainId};repository={TargetRepository};generated={recordedAt.ToString("O", CultureInfo.InvariantCulture)}"; -} +using System; +using System.Globalization; +using StellaOps.Concelier.Connector.StellaOpsMirror.Internal; +using StellaOps.Concelier.Models; + +namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; + +internal static class SampleData +{ + public const string BundleFixture = "mirror-bundle.sample.json"; + public const string AdvisoryFixture = "mirror-advisory.expected.json"; + public const string TargetRepository = "mirror-primary"; + public const string DomainId = "primary"; + public const string AdvisoryKey = "CVE-2025-1111"; + public const string GhsaAlias = "GHSA-xxxx-xxxx-xxxx"; + + public static DateTimeOffset GeneratedAt { get; } = new(2025, 10, 19, 12, 0, 0, TimeSpan.Zero); + + public static MirrorBundleDocument CreateBundle() + => new( + SchemaVersion: 1, + GeneratedAt: GeneratedAt, + TargetRepository: TargetRepository, + DomainId: DomainId, + DisplayName: "Primary Mirror", + AdvisoryCount: 1, + Advisories: new[] { CreateSourceAdvisory() }, + Sources: new[] + { + new MirrorSourceSummary("ghsa", GeneratedAt, GeneratedAt, 1) + }); + + public static Advisory CreateExpectedMappedAdvisory() + { + var baseAdvisory = CreateSourceAdvisory(); + var recordedAt = GeneratedAt.ToUniversalTime(); + var mirrorValue = BuildMirrorValue(recordedAt); + + var topProvenance = baseAdvisory.Provenance.Add(new AdvisoryProvenance( + StellaOpsMirrorConnector.Source, + "map", + mirrorValue, + recordedAt, + new[] + { + ProvenanceFieldMasks.Advisory, + ProvenanceFieldMasks.References, + ProvenanceFieldMasks.Credits, + ProvenanceFieldMasks.CvssMetrics, + ProvenanceFieldMasks.Weaknesses, + })); + + var package = baseAdvisory.AffectedPackages[0]; + var packageProvenance = package.Provenance.Add(new AdvisoryProvenance( + StellaOpsMirrorConnector.Source, + "map", + $"{mirrorValue};package={package.Identifier}", + recordedAt, + new[] + { + ProvenanceFieldMasks.AffectedPackages, + ProvenanceFieldMasks.VersionRanges, + ProvenanceFieldMasks.PackageStatuses, + ProvenanceFieldMasks.NormalizedVersions, + })); + var updatedPackage = new AffectedPackage( + package.Type, + package.Identifier, + package.Platform, + package.VersionRanges, + package.Statuses, + packageProvenance, + package.NormalizedVersions); + + return new Advisory( + AdvisoryKey, + baseAdvisory.Title, + baseAdvisory.Summary, + baseAdvisory.Language, + baseAdvisory.Published, + baseAdvisory.Modified, + baseAdvisory.Severity, + baseAdvisory.ExploitKnown, + new[] { AdvisoryKey, GhsaAlias }, + baseAdvisory.Credits, + baseAdvisory.References, + new[] { updatedPackage }, + baseAdvisory.CvssMetrics, + topProvenance, + baseAdvisory.Description, + baseAdvisory.Cwes, + baseAdvisory.CanonicalMetricId); + } + + private static Advisory CreateSourceAdvisory() + { + var recordedAt = GeneratedAt.ToUniversalTime(); + + var reference = new AdvisoryReference( + "https://example.com/advisory", + "advisory", + "vendor", + "Vendor bulletin", + new AdvisoryProvenance( + "ghsa", + "map", + "reference", + recordedAt, + new[] + { + ProvenanceFieldMasks.References, + })); + + var credit = new AdvisoryCredit( + "Security Researcher", + "reporter", + new[] { "mailto:researcher@example.com" }, + new AdvisoryProvenance( + "ghsa", + "map", + "credit", + recordedAt, + new[] + { + ProvenanceFieldMasks.Credits, + })); + + var semVerPrimitive = new SemVerPrimitive( + Introduced: "1.0.0", + IntroducedInclusive: true, + Fixed: "1.2.0", + FixedInclusive: false, + LastAffected: null, + LastAffectedInclusive: true, + ConstraintExpression: ">=1.0.0,<1.2.0", + ExactValue: null); + + var range = new AffectedVersionRange( + rangeKind: "semver", + introducedVersion: "1.0.0", + fixedVersion: "1.2.0", + lastAffectedVersion: null, + rangeExpression: ">=1.0.0,<1.2.0", + provenance: new AdvisoryProvenance( + "ghsa", + "map", + "range", + recordedAt, + new[] + { + ProvenanceFieldMasks.VersionRanges, + }), + primitives: new RangePrimitives(semVerPrimitive, null, null, null)); + + var status = new AffectedPackageStatus( + "fixed", + new AdvisoryProvenance( + "ghsa", + "map", + "status", + recordedAt, + new[] + { + ProvenanceFieldMasks.PackageStatuses, + })); + + var normalizedRule = new NormalizedVersionRule( + scheme: "semver", + type: "range", + min: "1.0.0", + minInclusive: true, + max: "1.2.0", + maxInclusive: false, + value: null, + notes: null); + + var package = new AffectedPackage( + AffectedPackageTypes.SemVer, + "pkg:npm/example@1.0.0", + platform: null, + versionRanges: new[] { range }, + statuses: new[] { status }, + provenance: new[] + { + new AdvisoryProvenance( + "ghsa", + "map", + "package", + recordedAt, + new[] + { + ProvenanceFieldMasks.AffectedPackages, + }) + }, + normalizedVersions: new[] { normalizedRule }); + + var cvss = new CvssMetric( + "3.1", + "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + 9.8, + "critical", + new AdvisoryProvenance( + "ghsa", + "map", + "cvss", + recordedAt, + new[] + { + ProvenanceFieldMasks.CvssMetrics, + })); + + var weakness = new AdvisoryWeakness( + "cwe", + "CWE-79", + "Cross-site Scripting", + "https://cwe.mitre.org/data/definitions/79.html", + new[] + { + new AdvisoryProvenance( + "ghsa", + "map", + "cwe", + recordedAt, + new[] + { + ProvenanceFieldMasks.Weaknesses, + }) + }); + + var advisory = new Advisory( + AdvisoryKey, + "Sample Mirror Advisory", + "Upstream advisory replicated through StellaOps mirror.", + "en", + published: new DateTimeOffset(2025, 10, 10, 0, 0, 0, TimeSpan.Zero), + modified: new DateTimeOffset(2025, 10, 11, 0, 0, 0, TimeSpan.Zero), + severity: "high", + exploitKnown: false, + aliases: new[] { GhsaAlias }, + credits: new[] { credit }, + references: new[] { reference }, + affectedPackages: new[] { package }, + cvssMetrics: new[] { cvss }, + provenance: new[] + { + new AdvisoryProvenance( + "ghsa", + "map", + "advisory", + recordedAt, + new[] + { + ProvenanceFieldMasks.Advisory, + }) + }, + description: "Deterministic test payload distributed via mirror.", + cwes: new[] { weakness }, + canonicalMetricId: "cvss::ghsa::CVE-2025-1111"); + + return CanonicalJsonSerializer.Normalize(advisory); + } + + private static string BuildMirrorValue(DateTimeOffset recordedAt) + => $"domain={DomainId};repository={TargetRepository};generated={recordedAt.ToString("O", CultureInfo.InvariantCulture)}"; +} diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj similarity index 51% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj index 76070cd6..952005b1 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests.csproj @@ -1,14 +1,15 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj" /> - <ProjectReference Include="../StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.StellaOpsMirror/StellaOps.Concelier.Connector.StellaOpsMirror.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> </ItemGroup> <ItemGroup> <None Include="Fixtures\**\*.json" CopyToOutputDirectory="Always" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOpsMirrorConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOpsMirrorConnectorTests.cs similarity index 97% rename from src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOpsMirrorConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOpsMirrorConnectorTests.cs index e1bf4a0a..617fba8c 100644 --- a/src/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOpsMirrorConnectorTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.StellaOpsMirror.Tests/StellaOpsMirrorConnectorTests.cs @@ -1,464 +1,464 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Net; -using System.Net.Http; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Http; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using MongoDB.Bson; -using StellaOps.Concelier.Connector.Common; -using StellaOps.Concelier.Connector.Common.Fetch; -using StellaOps.Concelier.Connector.Common.Testing; -using StellaOps.Concelier.Connector.StellaOpsMirror.Internal; -using StellaOps.Concelier.Connector.StellaOpsMirror.Settings; -using StellaOps.Concelier.Storage.Mongo; -using StellaOps.Concelier.Storage.Mongo.Advisories; -using StellaOps.Concelier.Storage.Mongo.Documents; -using StellaOps.Concelier.Storage.Mongo.Dtos; -using StellaOps.Concelier.Testing; -using StellaOps.Cryptography; -using StellaOps.Concelier.Models; -using Xunit; - -namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; - -[Collection("mongo-fixture")] -public sealed class StellaOpsMirrorConnectorTests : IAsyncLifetime -{ - private readonly MongoIntegrationFixture _fixture; - private readonly CannedHttpMessageHandler _handler; - - public StellaOpsMirrorConnectorTests(MongoIntegrationFixture fixture) - { - _fixture = fixture; - _handler = new CannedHttpMessageHandler(); - } - - [Fact] - public async Task FetchAsync_PersistsMirrorArtifacts() - { - var manifestContent = "{\"domain\":\"primary\",\"files\":[]}"; - var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0001\"}]}"; - - var manifestDigest = ComputeDigest(manifestContent); - var bundleDigest = ComputeDigest(bundleContent); - - var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestContent), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: false); - - await using var provider = await BuildServiceProviderAsync(); - - SeedResponses(index, manifestContent, bundleContent, signature: null); - - var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); - await connector.FetchAsync(provider, CancellationToken.None); - - var documentStore = provider.GetRequiredService<IDocumentStore>(); - var manifestUri = "https://mirror.test/mirror/primary/manifest.json"; - var bundleUri = "https://mirror.test/mirror/primary/bundle.json"; - - var manifestDocument = await documentStore.FindBySourceAndUriAsync(StellaOpsMirrorConnector.Source, manifestUri, CancellationToken.None); - Assert.NotNull(manifestDocument); - Assert.Equal(DocumentStatuses.Mapped, manifestDocument!.Status); - Assert.Equal(NormalizeDigest(manifestDigest), manifestDocument.Sha256); - - var bundleDocument = await documentStore.FindBySourceAndUriAsync(StellaOpsMirrorConnector.Source, bundleUri, CancellationToken.None); - Assert.NotNull(bundleDocument); - Assert.Equal(DocumentStatuses.PendingParse, bundleDocument!.Status); - Assert.Equal(NormalizeDigest(bundleDigest), bundleDocument.Sha256); - - var rawStorage = provider.GetRequiredService<RawDocumentStorage>(); - Assert.NotNull(manifestDocument.GridFsId); - Assert.NotNull(bundleDocument.GridFsId); - - var manifestBytes = await rawStorage.DownloadAsync(manifestDocument.GridFsId!.Value, CancellationToken.None); - var bundleBytes = await rawStorage.DownloadAsync(bundleDocument.GridFsId!.Value, CancellationToken.None); - Assert.Equal(manifestContent, Encoding.UTF8.GetString(manifestBytes)); - Assert.Equal(bundleContent, Encoding.UTF8.GetString(bundleBytes)); - - var stateRepository = provider.GetRequiredService<ISourceStateRepository>(); - var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None); - Assert.NotNull(state); - - var cursorDocument = state!.Cursor ?? new BsonDocument(); - var digestValue = cursorDocument.TryGetValue("bundleDigest", out var digestBson) ? digestBson.AsString : string.Empty; - Assert.Equal(NormalizeDigest(bundleDigest), NormalizeDigest(digestValue)); - - var pendingDocumentsArray = cursorDocument.TryGetValue("pendingDocuments", out var pendingDocsBson) && pendingDocsBson is BsonArray pendingArray - ? pendingArray - : new BsonArray(); - Assert.Single(pendingDocumentsArray); - var pendingDocumentId = Guid.Parse(pendingDocumentsArray[0].AsString); - Assert.Equal(bundleDocument.Id, pendingDocumentId); - - var pendingMappingsArray = cursorDocument.TryGetValue("pendingMappings", out var pendingMappingsBson) && pendingMappingsBson is BsonArray mappingsArray - ? mappingsArray - : new BsonArray(); - Assert.Empty(pendingMappingsArray); - } - - [Fact] - public async Task FetchAsync_TamperedSignatureThrows() - { - var manifestContent = "{\"domain\":\"primary\"}"; - var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0002\"}]}"; - - var manifestDigest = ComputeDigest(manifestContent); - var bundleDigest = ComputeDigest(bundleContent); - var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestContent), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: true); - - await using var provider = await BuildServiceProviderAsync(options => - { - options.Signature.Enabled = true; - options.Signature.KeyId = "mirror-key"; - options.Signature.Provider = "default"; - }); - - var defaultProvider = provider.GetRequiredService<DefaultCryptoProvider>(); - var signingKey = CreateSigningKey("mirror-key"); - defaultProvider.UpsertSigningKey(signingKey); - - var (signatureValue, _) = CreateDetachedJws(signingKey, bundleContent); - // Tamper with signature so verification fails. - var tamperedSignature = signatureValue.Replace('a', 'b'); - - SeedResponses(index, manifestContent, bundleContent, tamperedSignature); - - var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); - await Assert.ThrowsAsync<InvalidOperationException>(() => connector.FetchAsync(provider, CancellationToken.None)); - - var stateRepository = provider.GetRequiredService<ISourceStateRepository>(); - var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None); - Assert.NotNull(state); - Assert.True(state!.FailCount >= 1); - Assert.False(state.Cursor.TryGetValue("bundleDigest", out _)); - } - - [Fact] - public async Task FetchAsync_SignatureKeyMismatchThrows() - { - var manifestContent = "{\"domain\":\"primary\"}"; - var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0003\"}]}"; - - var manifestDigest = ComputeDigest(manifestContent); - var bundleDigest = ComputeDigest(bundleContent); - var index = BuildIndex( - manifestDigest, - Encoding.UTF8.GetByteCount(manifestContent), - bundleDigest, - Encoding.UTF8.GetByteCount(bundleContent), - includeSignature: true, - signatureKeyId: "unexpected-key", - signatureProvider: "default"); - - var signingKey = CreateSigningKey("unexpected-key"); - var (signatureValue, _) = CreateDetachedJws(signingKey, bundleContent); - - await using var provider = await BuildServiceProviderAsync(options => - { - options.Signature.Enabled = true; - options.Signature.KeyId = "mirror-key"; - options.Signature.Provider = "default"; - }); - - SeedResponses(index, manifestContent, bundleContent, signatureValue); - - var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); - await Assert.ThrowsAsync<InvalidOperationException>(() => connector.FetchAsync(provider, CancellationToken.None)); - } - - [Fact] - public async Task FetchAsync_VerifiesSignatureUsingFallbackPublicKey() - { - var manifestContent = "{\"domain\":\"primary\"}"; - var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0004\"}]}"; - - var manifestDigest = ComputeDigest(manifestContent); - var bundleDigest = ComputeDigest(bundleContent); - var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestContent), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: true); - - var signingKey = CreateSigningKey("mirror-key"); - var (signatureValue, _) = CreateDetachedJws(signingKey, bundleContent); - var publicKeyPath = WritePublicKeyPem(signingKey); - - await using var provider = await BuildServiceProviderAsync(options => - { - options.Signature.Enabled = true; - options.Signature.KeyId = "mirror-key"; - options.Signature.Provider = "default"; - options.Signature.PublicKeyPath = publicKeyPath; - }); - - try - { - SeedResponses(index, manifestContent, bundleContent, signatureValue); - - var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); - await connector.FetchAsync(provider, CancellationToken.None); - - var stateRepository = provider.GetRequiredService<ISourceStateRepository>(); - var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None); - Assert.NotNull(state); - Assert.Equal(0, state!.FailCount); - } - finally - { - if (File.Exists(publicKeyPath)) - { - File.Delete(publicKeyPath); - } - } - } - - [Fact] - public async Task FetchAsync_DigestMismatchMarksFailure() - { - var manifestExpected = "{\"domain\":\"primary\"}"; - var manifestTampered = "{\"domain\":\"tampered\"}"; - var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0005\"}]}"; - - var manifestDigest = ComputeDigest(manifestExpected); - var bundleDigest = ComputeDigest(bundleContent); - var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestExpected), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: false); - - await using var provider = await BuildServiceProviderAsync(); - - SeedResponses(index, manifestTampered, bundleContent, signature: null); - - var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); - - await Assert.ThrowsAsync<InvalidOperationException>(() => connector.FetchAsync(provider, CancellationToken.None)); - - var stateRepository = provider.GetRequiredService<ISourceStateRepository>(); - var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None); - Assert.NotNull(state); - var cursor = state!.Cursor ?? new BsonDocument(); - Assert.True(state.FailCount >= 1); - Assert.False(cursor.Contains("bundleDigest")); - } - - [Fact] - public void ParseAndMap_PersistAdvisoriesFromBundle() - { - var bundleDocument = SampleData.CreateBundle(); - var bundleJson = CanonicalJsonSerializer.SerializeIndented(bundleDocument); - var normalizedFixture = FixtureLoader.Read(SampleData.BundleFixture).TrimEnd(); - Assert.Equal(normalizedFixture, FixtureLoader.Normalize(bundleJson).TrimEnd()); - - var advisories = MirrorAdvisoryMapper.Map(bundleDocument); - Assert.Single(advisories); - var advisory = advisories[0]; - - var expectedAdvisoryJson = FixtureLoader.Read(SampleData.AdvisoryFixture).TrimEnd(); - var mappedJson = CanonicalJsonSerializer.SerializeIndented(advisory); - Assert.Equal(expectedAdvisoryJson, FixtureLoader.Normalize(mappedJson).TrimEnd()); - - // AdvisoryStore integration validated elsewhere; ensure canonical serialization is stable. - } - - public Task InitializeAsync() => Task.CompletedTask; - - public Task DisposeAsync() - { - _handler.Clear(); - return Task.CompletedTask; - } - - private async Task<ServiceProvider> BuildServiceProviderAsync(Action<StellaOpsMirrorConnectorOptions>? configureOptions = null) - { - await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName); - _handler.Clear(); - - var services = new ServiceCollection(); - services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance)); - services.AddSingleton(_handler); - services.AddSingleton(TimeProvider.System); - - services.AddMongoStorage(options => - { - options.ConnectionString = _fixture.Runner.ConnectionString; - options.DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName; - options.CommandTimeout = TimeSpan.FromSeconds(5); - }); - - services.AddSingleton<DefaultCryptoProvider>(); - services.AddSingleton<ICryptoProvider>(sp => sp.GetRequiredService<DefaultCryptoProvider>()); - services.AddSingleton<ICryptoProviderRegistry>(sp => new CryptoProviderRegistry(sp.GetServices<ICryptoProvider>())); - - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary<string, string?> - { - ["concelier:sources:stellaopsMirror:baseAddress"] = "https://mirror.test/", - ["concelier:sources:stellaopsMirror:domainId"] = "primary", - ["concelier:sources:stellaopsMirror:indexPath"] = "/concelier/exports/index.json", - }) - .Build(); - - var routine = new StellaOpsMirrorDependencyInjectionRoutine(); - routine.Register(services, configuration); - - if (configureOptions is not null) - { - services.PostConfigure(configureOptions); - } - - services.Configure<HttpClientFactoryOptions>("stellaops-mirror", builder => - { - builder.HttpMessageHandlerBuilderActions.Add(options => - { - options.PrimaryHandler = _handler; - }); - }); - - var provider = services.BuildServiceProvider(); - var bootstrapper = provider.GetRequiredService<MongoBootstrapper>(); - await bootstrapper.InitializeAsync(CancellationToken.None); - return provider; - } - - private void SeedResponses(string indexJson, string manifestContent, string bundleContent, string? signature) - { - var baseUri = new Uri("https://mirror.test"); - _handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "/concelier/exports/index.json"), () => CreateJsonResponse(indexJson)); - _handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "mirror/primary/manifest.json"), () => CreateJsonResponse(manifestContent)); - _handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "mirror/primary/bundle.json"), () => CreateJsonResponse(bundleContent)); - - if (signature is not null) - { - _handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "mirror/primary/bundle.json.jws"), () => new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(signature, Encoding.UTF8, "application/jose+json"), - }); - } - } - - private static HttpResponseMessage CreateJsonResponse(string content) - => new(HttpStatusCode.OK) - { - Content = new StringContent(content, Encoding.UTF8, "application/json"), - }; - - private static string BuildIndex( - string manifestDigest, - int manifestBytes, - string bundleDigest, - int bundleBytes, - bool includeSignature, - string signatureKeyId = "mirror-key", - string signatureProvider = "default") - { - var index = new - { - schemaVersion = 1, - generatedAt = new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero), - targetRepository = "repo", - domains = new[] - { - new - { - domainId = "primary", - displayName = "Primary", - advisoryCount = 1, - manifest = new - { - path = "mirror/primary/manifest.json", - sizeBytes = manifestBytes, - digest = manifestDigest, - signature = (object?)null, - }, - bundle = new - { - path = "mirror/primary/bundle.json", - sizeBytes = bundleBytes, - digest = bundleDigest, - signature = includeSignature - ? new - { - path = "mirror/primary/bundle.json.jws", - algorithm = "ES256", - keyId = signatureKeyId, - provider = signatureProvider, - signedAt = new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero), - } - : null, - }, - sources = Array.Empty<object>(), - } - } - }; - - return JsonSerializer.Serialize(index, new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - WriteIndented = false, - }); - } - - private static string ComputeDigest(string content) - { - var bytes = Encoding.UTF8.GetBytes(content); - var hash = SHA256.HashData(bytes); - return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static string NormalizeDigest(string digest) - => digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) ? digest[7..] : digest; - - private static CryptoSigningKey CreateSigningKey(string keyId) - { - using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); - var parameters = ecdsa.ExportParameters(includePrivateParameters: true); - return new CryptoSigningKey(new CryptoKeyReference(keyId), SignatureAlgorithms.Es256, in parameters, DateTimeOffset.UtcNow); - } - - private static string WritePublicKeyPem(CryptoSigningKey signingKey) - { - ArgumentNullException.ThrowIfNull(signingKey); - var path = Path.Combine(Path.GetTempPath(), $"stellaops-mirror-{Guid.NewGuid():N}.pem"); - using var ecdsa = ECDsa.Create(signingKey.PublicParameters); - var publicKeyInfo = ecdsa.ExportSubjectPublicKeyInfo(); - var pem = PemEncoding.Write("PUBLIC KEY", publicKeyInfo); - File.WriteAllText(path, pem); - return path; - } - - private static (string Signature, DateTimeOffset SignedAt) CreateDetachedJws(CryptoSigningKey signingKey, string payload) - { - var provider = new DefaultCryptoProvider(); - provider.UpsertSigningKey(signingKey); - var signer = provider.GetSigner(SignatureAlgorithms.Es256, signingKey.Reference); - var header = new Dictionary<string, object?> - { - ["alg"] = SignatureAlgorithms.Es256, - ["kid"] = signingKey.Reference.KeyId, - ["provider"] = provider.Name, - ["typ"] = "application/vnd.stellaops.concelier.mirror-bundle+jws", - ["b64"] = false, - ["crit"] = new[] { "b64" } - }; - - var headerJson = JsonSerializer.Serialize(header); - var encodedHeader = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(headerJson); - var payloadBytes = Encoding.UTF8.GetBytes(payload); - var signingInput = BuildSigningInput(encodedHeader, payloadBytes); - var signatureBytes = signer.SignAsync(signingInput, CancellationToken.None).GetAwaiter().GetResult(); - var encodedSignature = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(signatureBytes); - return (string.Concat(encodedHeader, "..", encodedSignature), DateTimeOffset.UtcNow); - } - - private static ReadOnlyMemory<byte> BuildSigningInput(string encodedHeader, ReadOnlySpan<byte> payload) - { - var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); - var buffer = new byte[headerBytes.Length + 1 + payload.Length]; - headerBytes.CopyTo(buffer, 0); - buffer[headerBytes.Length] = (byte)'.'; - payload.CopyTo(buffer.AsSpan(headerBytes.Length + 1)); - return buffer; - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Net; +using System.Net.Http; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using MongoDB.Bson; +using StellaOps.Concelier.Connector.Common; +using StellaOps.Concelier.Connector.Common.Fetch; +using StellaOps.Concelier.Connector.Common.Testing; +using StellaOps.Concelier.Connector.StellaOpsMirror.Internal; +using StellaOps.Concelier.Connector.StellaOpsMirror.Settings; +using StellaOps.Concelier.Storage.Mongo; +using StellaOps.Concelier.Storage.Mongo.Advisories; +using StellaOps.Concelier.Storage.Mongo.Documents; +using StellaOps.Concelier.Storage.Mongo.Dtos; +using StellaOps.Concelier.Testing; +using StellaOps.Cryptography; +using StellaOps.Concelier.Models; +using Xunit; + +namespace StellaOps.Concelier.Connector.StellaOpsMirror.Tests; + +[Collection("mongo-fixture")] +public sealed class StellaOpsMirrorConnectorTests : IAsyncLifetime +{ + private readonly MongoIntegrationFixture _fixture; + private readonly CannedHttpMessageHandler _handler; + + public StellaOpsMirrorConnectorTests(MongoIntegrationFixture fixture) + { + _fixture = fixture; + _handler = new CannedHttpMessageHandler(); + } + + [Fact] + public async Task FetchAsync_PersistsMirrorArtifacts() + { + var manifestContent = "{\"domain\":\"primary\",\"files\":[]}"; + var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0001\"}]}"; + + var manifestDigest = ComputeDigest(manifestContent); + var bundleDigest = ComputeDigest(bundleContent); + + var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestContent), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: false); + + await using var provider = await BuildServiceProviderAsync(); + + SeedResponses(index, manifestContent, bundleContent, signature: null); + + var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); + await connector.FetchAsync(provider, CancellationToken.None); + + var documentStore = provider.GetRequiredService<IDocumentStore>(); + var manifestUri = "https://mirror.test/mirror/primary/manifest.json"; + var bundleUri = "https://mirror.test/mirror/primary/bundle.json"; + + var manifestDocument = await documentStore.FindBySourceAndUriAsync(StellaOpsMirrorConnector.Source, manifestUri, CancellationToken.None); + Assert.NotNull(manifestDocument); + Assert.Equal(DocumentStatuses.Mapped, manifestDocument!.Status); + Assert.Equal(NormalizeDigest(manifestDigest), manifestDocument.Sha256); + + var bundleDocument = await documentStore.FindBySourceAndUriAsync(StellaOpsMirrorConnector.Source, bundleUri, CancellationToken.None); + Assert.NotNull(bundleDocument); + Assert.Equal(DocumentStatuses.PendingParse, bundleDocument!.Status); + Assert.Equal(NormalizeDigest(bundleDigest), bundleDocument.Sha256); + + var rawStorage = provider.GetRequiredService<RawDocumentStorage>(); + Assert.NotNull(manifestDocument.GridFsId); + Assert.NotNull(bundleDocument.GridFsId); + + var manifestBytes = await rawStorage.DownloadAsync(manifestDocument.GridFsId!.Value, CancellationToken.None); + var bundleBytes = await rawStorage.DownloadAsync(bundleDocument.GridFsId!.Value, CancellationToken.None); + Assert.Equal(manifestContent, Encoding.UTF8.GetString(manifestBytes)); + Assert.Equal(bundleContent, Encoding.UTF8.GetString(bundleBytes)); + + var stateRepository = provider.GetRequiredService<ISourceStateRepository>(); + var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None); + Assert.NotNull(state); + + var cursorDocument = state!.Cursor ?? new BsonDocument(); + var digestValue = cursorDocument.TryGetValue("bundleDigest", out var digestBson) ? digestBson.AsString : string.Empty; + Assert.Equal(NormalizeDigest(bundleDigest), NormalizeDigest(digestValue)); + + var pendingDocumentsArray = cursorDocument.TryGetValue("pendingDocuments", out var pendingDocsBson) && pendingDocsBson is BsonArray pendingArray + ? pendingArray + : new BsonArray(); + Assert.Single(pendingDocumentsArray); + var pendingDocumentId = Guid.Parse(pendingDocumentsArray[0].AsString); + Assert.Equal(bundleDocument.Id, pendingDocumentId); + + var pendingMappingsArray = cursorDocument.TryGetValue("pendingMappings", out var pendingMappingsBson) && pendingMappingsBson is BsonArray mappingsArray + ? mappingsArray + : new BsonArray(); + Assert.Empty(pendingMappingsArray); + } + + [Fact] + public async Task FetchAsync_TamperedSignatureThrows() + { + var manifestContent = "{\"domain\":\"primary\"}"; + var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0002\"}]}"; + + var manifestDigest = ComputeDigest(manifestContent); + var bundleDigest = ComputeDigest(bundleContent); + var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestContent), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: true); + + await using var provider = await BuildServiceProviderAsync(options => + { + options.Signature.Enabled = true; + options.Signature.KeyId = "mirror-key"; + options.Signature.Provider = "default"; + }); + + var defaultProvider = provider.GetRequiredService<DefaultCryptoProvider>(); + var signingKey = CreateSigningKey("mirror-key"); + defaultProvider.UpsertSigningKey(signingKey); + + var (signatureValue, _) = CreateDetachedJws(signingKey, bundleContent); + // Tamper with signature so verification fails. + var tamperedSignature = signatureValue.Replace('a', 'b'); + + SeedResponses(index, manifestContent, bundleContent, tamperedSignature); + + var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); + await Assert.ThrowsAsync<InvalidOperationException>(() => connector.FetchAsync(provider, CancellationToken.None)); + + var stateRepository = provider.GetRequiredService<ISourceStateRepository>(); + var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None); + Assert.NotNull(state); + Assert.True(state!.FailCount >= 1); + Assert.False(state.Cursor.TryGetValue("bundleDigest", out _)); + } + + [Fact] + public async Task FetchAsync_SignatureKeyMismatchThrows() + { + var manifestContent = "{\"domain\":\"primary\"}"; + var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0003\"}]}"; + + var manifestDigest = ComputeDigest(manifestContent); + var bundleDigest = ComputeDigest(bundleContent); + var index = BuildIndex( + manifestDigest, + Encoding.UTF8.GetByteCount(manifestContent), + bundleDigest, + Encoding.UTF8.GetByteCount(bundleContent), + includeSignature: true, + signatureKeyId: "unexpected-key", + signatureProvider: "default"); + + var signingKey = CreateSigningKey("unexpected-key"); + var (signatureValue, _) = CreateDetachedJws(signingKey, bundleContent); + + await using var provider = await BuildServiceProviderAsync(options => + { + options.Signature.Enabled = true; + options.Signature.KeyId = "mirror-key"; + options.Signature.Provider = "default"; + }); + + SeedResponses(index, manifestContent, bundleContent, signatureValue); + + var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); + await Assert.ThrowsAsync<InvalidOperationException>(() => connector.FetchAsync(provider, CancellationToken.None)); + } + + [Fact] + public async Task FetchAsync_VerifiesSignatureUsingFallbackPublicKey() + { + var manifestContent = "{\"domain\":\"primary\"}"; + var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0004\"}]}"; + + var manifestDigest = ComputeDigest(manifestContent); + var bundleDigest = ComputeDigest(bundleContent); + var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestContent), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: true); + + var signingKey = CreateSigningKey("mirror-key"); + var (signatureValue, _) = CreateDetachedJws(signingKey, bundleContent); + var publicKeyPath = WritePublicKeyPem(signingKey); + + await using var provider = await BuildServiceProviderAsync(options => + { + options.Signature.Enabled = true; + options.Signature.KeyId = "mirror-key"; + options.Signature.Provider = "default"; + options.Signature.PublicKeyPath = publicKeyPath; + }); + + try + { + SeedResponses(index, manifestContent, bundleContent, signatureValue); + + var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); + await connector.FetchAsync(provider, CancellationToken.None); + + var stateRepository = provider.GetRequiredService<ISourceStateRepository>(); + var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None); + Assert.NotNull(state); + Assert.Equal(0, state!.FailCount); + } + finally + { + if (File.Exists(publicKeyPath)) + { + File.Delete(publicKeyPath); + } + } + } + + [Fact] + public async Task FetchAsync_DigestMismatchMarksFailure() + { + var manifestExpected = "{\"domain\":\"primary\"}"; + var manifestTampered = "{\"domain\":\"tampered\"}"; + var bundleContent = "{\"advisories\":[{\"id\":\"CVE-2025-0005\"}]}"; + + var manifestDigest = ComputeDigest(manifestExpected); + var bundleDigest = ComputeDigest(bundleContent); + var index = BuildIndex(manifestDigest, Encoding.UTF8.GetByteCount(manifestExpected), bundleDigest, Encoding.UTF8.GetByteCount(bundleContent), includeSignature: false); + + await using var provider = await BuildServiceProviderAsync(); + + SeedResponses(index, manifestTampered, bundleContent, signature: null); + + var connector = provider.GetRequiredService<StellaOpsMirrorConnector>(); + + await Assert.ThrowsAsync<InvalidOperationException>(() => connector.FetchAsync(provider, CancellationToken.None)); + + var stateRepository = provider.GetRequiredService<ISourceStateRepository>(); + var state = await stateRepository.TryGetAsync(StellaOpsMirrorConnector.Source, CancellationToken.None); + Assert.NotNull(state); + var cursor = state!.Cursor ?? new BsonDocument(); + Assert.True(state.FailCount >= 1); + Assert.False(cursor.Contains("bundleDigest")); + } + + [Fact] + public void ParseAndMap_PersistAdvisoriesFromBundle() + { + var bundleDocument = SampleData.CreateBundle(); + var bundleJson = CanonicalJsonSerializer.SerializeIndented(bundleDocument); + var normalizedFixture = FixtureLoader.Read(SampleData.BundleFixture).TrimEnd(); + Assert.Equal(normalizedFixture, FixtureLoader.Normalize(bundleJson).TrimEnd()); + + var advisories = MirrorAdvisoryMapper.Map(bundleDocument); + Assert.Single(advisories); + var advisory = advisories[0]; + + var expectedAdvisoryJson = FixtureLoader.Read(SampleData.AdvisoryFixture).TrimEnd(); + var mappedJson = CanonicalJsonSerializer.SerializeIndented(advisory); + Assert.Equal(expectedAdvisoryJson, FixtureLoader.Normalize(mappedJson).TrimEnd()); + + // AdvisoryStore integration validated elsewhere; ensure canonical serialization is stable. + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() + { + _handler.Clear(); + return Task.CompletedTask; + } + + private async Task<ServiceProvider> BuildServiceProviderAsync(Action<StellaOpsMirrorConnectorOptions>? configureOptions = null) + { + await _fixture.Client.DropDatabaseAsync(_fixture.Database.DatabaseNamespace.DatabaseName); + _handler.Clear(); + + var services = new ServiceCollection(); + services.AddLogging(builder => builder.AddProvider(NullLoggerProvider.Instance)); + services.AddSingleton(_handler); + services.AddSingleton(TimeProvider.System); + + services.AddMongoStorage(options => + { + options.ConnectionString = _fixture.Runner.ConnectionString; + options.DatabaseName = _fixture.Database.DatabaseNamespace.DatabaseName; + options.CommandTimeout = TimeSpan.FromSeconds(5); + }); + + services.AddSingleton<DefaultCryptoProvider>(); + services.AddSingleton<ICryptoProvider>(sp => sp.GetRequiredService<DefaultCryptoProvider>()); + services.AddSingleton<ICryptoProviderRegistry>(sp => new CryptoProviderRegistry(sp.GetServices<ICryptoProvider>())); + + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary<string, string?> + { + ["concelier:sources:stellaopsMirror:baseAddress"] = "https://mirror.test/", + ["concelier:sources:stellaopsMirror:domainId"] = "primary", + ["concelier:sources:stellaopsMirror:indexPath"] = "/concelier/exports/index.json", + }) + .Build(); + + var routine = new StellaOpsMirrorDependencyInjectionRoutine(); + routine.Register(services, configuration); + + if (configureOptions is not null) + { + services.PostConfigure(configureOptions); + } + + services.Configure<HttpClientFactoryOptions>("stellaops-mirror", builder => + { + builder.HttpMessageHandlerBuilderActions.Add(options => + { + options.PrimaryHandler = _handler; + }); + }); + + var provider = services.BuildServiceProvider(); + var bootstrapper = provider.GetRequiredService<MongoBootstrapper>(); + await bootstrapper.InitializeAsync(CancellationToken.None); + return provider; + } + + private void SeedResponses(string indexJson, string manifestContent, string bundleContent, string? signature) + { + var baseUri = new Uri("https://mirror.test"); + _handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "/concelier/exports/index.json"), () => CreateJsonResponse(indexJson)); + _handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "mirror/primary/manifest.json"), () => CreateJsonResponse(manifestContent)); + _handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "mirror/primary/bundle.json"), () => CreateJsonResponse(bundleContent)); + + if (signature is not null) + { + _handler.AddResponse(HttpMethod.Get, new Uri(baseUri, "mirror/primary/bundle.json.jws"), () => new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(signature, Encoding.UTF8, "application/jose+json"), + }); + } + } + + private static HttpResponseMessage CreateJsonResponse(string content) + => new(HttpStatusCode.OK) + { + Content = new StringContent(content, Encoding.UTF8, "application/json"), + }; + + private static string BuildIndex( + string manifestDigest, + int manifestBytes, + string bundleDigest, + int bundleBytes, + bool includeSignature, + string signatureKeyId = "mirror-key", + string signatureProvider = "default") + { + var index = new + { + schemaVersion = 1, + generatedAt = new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero), + targetRepository = "repo", + domains = new[] + { + new + { + domainId = "primary", + displayName = "Primary", + advisoryCount = 1, + manifest = new + { + path = "mirror/primary/manifest.json", + sizeBytes = manifestBytes, + digest = manifestDigest, + signature = (object?)null, + }, + bundle = new + { + path = "mirror/primary/bundle.json", + sizeBytes = bundleBytes, + digest = bundleDigest, + signature = includeSignature + ? new + { + path = "mirror/primary/bundle.json.jws", + algorithm = "ES256", + keyId = signatureKeyId, + provider = signatureProvider, + signedAt = new DateTimeOffset(2025, 10, 19, 12, 0, 0, TimeSpan.Zero), + } + : null, + }, + sources = Array.Empty<object>(), + } + } + }; + + return JsonSerializer.Serialize(index, new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + WriteIndented = false, + }); + } + + private static string ComputeDigest(string content) + { + var bytes = Encoding.UTF8.GetBytes(content); + var hash = SHA256.HashData(bytes); + return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static string NormalizeDigest(string digest) + => digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) ? digest[7..] : digest; + + private static CryptoSigningKey CreateSigningKey(string keyId) + { + using var ecdsa = ECDsa.Create(ECCurve.NamedCurves.nistP256); + var parameters = ecdsa.ExportParameters(includePrivateParameters: true); + return new CryptoSigningKey(new CryptoKeyReference(keyId), SignatureAlgorithms.Es256, in parameters, DateTimeOffset.UtcNow); + } + + private static string WritePublicKeyPem(CryptoSigningKey signingKey) + { + ArgumentNullException.ThrowIfNull(signingKey); + var path = Path.Combine(Path.GetTempPath(), $"stellaops-mirror-{Guid.NewGuid():N}.pem"); + using var ecdsa = ECDsa.Create(signingKey.PublicParameters); + var publicKeyInfo = ecdsa.ExportSubjectPublicKeyInfo(); + var pem = PemEncoding.Write("PUBLIC KEY", publicKeyInfo); + File.WriteAllText(path, pem); + return path; + } + + private static (string Signature, DateTimeOffset SignedAt) CreateDetachedJws(CryptoSigningKey signingKey, string payload) + { + var provider = new DefaultCryptoProvider(); + provider.UpsertSigningKey(signingKey); + var signer = provider.GetSigner(SignatureAlgorithms.Es256, signingKey.Reference); + var header = new Dictionary<string, object?> + { + ["alg"] = SignatureAlgorithms.Es256, + ["kid"] = signingKey.Reference.KeyId, + ["provider"] = provider.Name, + ["typ"] = "application/vnd.stellaops.concelier.mirror-bundle+jws", + ["b64"] = false, + ["crit"] = new[] { "b64" } + }; + + var headerJson = JsonSerializer.Serialize(header); + var encodedHeader = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(headerJson); + var payloadBytes = Encoding.UTF8.GetBytes(payload); + var signingInput = BuildSigningInput(encodedHeader, payloadBytes); + var signatureBytes = signer.SignAsync(signingInput, CancellationToken.None).GetAwaiter().GetResult(); + var encodedSignature = Microsoft.IdentityModel.Tokens.Base64UrlEncoder.Encode(signatureBytes); + return (string.Concat(encodedHeader, "..", encodedSignature), DateTimeOffset.UtcNow); + } + + private static ReadOnlyMemory<byte> BuildSigningInput(string encodedHeader, ReadOnlySpan<byte> payload) + { + var headerBytes = Encoding.ASCII.GetBytes(encodedHeader); + var buffer = new byte[headerBytes.Length + 1 + payload.Length]; + headerBytes.CopyTo(buffer, 0); + buffer[headerBytes.Length] = (byte)'.'; + payload.CopyTo(buffer.AsSpan(headerBytes.Length + 1)); + return buffer; + } +} diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/AdobeConnectorFetchTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/AdobeConnectorFetchTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/AdobeConnectorFetchTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/AdobeConnectorFetchTests.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-advisories.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-advisories.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-advisories.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-advisories.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-85.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-85.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-85.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-85.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-87.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-87.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-87.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-detail-apsb25-87.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-index.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-index.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-index.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/Adobe/Fixtures/adobe-index.html diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj new file mode 100644 index 00000000..2a80de0a --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj @@ -0,0 +1,18 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Adobe/Fixtures/*.html" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Adobe/Fixtures/%(Filename)%(Extension)" /> + <None Include="Adobe/Fixtures/*.json" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Adobe/Fixtures/%(Filename)%(Extension)" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleFixtureManager.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleFixtureManager.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleFixtureManager.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleFixtureManager.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleLiveRegressionTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleLiveRegressionTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleLiveRegressionTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/AppleLiveRegressionTests.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.expected.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.expected.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.expected.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.expected.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/106355.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.expected.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.expected.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.expected.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.expected.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125326.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.expected.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.expected.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.expected.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.expected.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/125328.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT214108.expected.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT214108.expected.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT214108.expected.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT214108.expected.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT215500.expected.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT215500.expected.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT215500.expected.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/HT215500.expected.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht214108.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht214108.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht214108.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht214108.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht215500.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht215500.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht215500.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/ht215500.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/index.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/index.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/index.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/Apple/Fixtures/index.json diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj new file mode 100644 index 00000000..4c290f54 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj @@ -0,0 +1,19 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Apple/Fixtures/*.html" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Apple/Fixtures/%(Filename)%(Extension)" /> + <None Include="Apple/Fixtures/*.json" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Apple/Fixtures/%(Filename)%(Extension)" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/ChromiumMapperTests.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-advisory.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-advisory.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-advisory.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-advisory.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-detail.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-detail.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-detail.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-detail.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-feed.xml b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-feed.xml similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-feed.xml rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/Chromium/Fixtures/chromium-feed.xml diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj new file mode 100644 index 00000000..3a3f8b8b --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj @@ -0,0 +1,19 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Chromium/Fixtures/*.html" CopyToOutputDirectory="Always" /> + <None Include="Chromium/Fixtures/*.xml" CopyToOutputDirectory="Always" /> + <None Include="Chromium/Fixtures/*.json" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoDtoFactoryTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoDtoFactoryTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoDtoFactoryTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoDtoFactoryTests.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/CiscoMapperTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj new file mode 100644 index 00000000..90744aa3 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj @@ -0,0 +1,18 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Cisco/StellaOps.Concelier.Connector.Vndr.Cisco.csproj" /> + </ItemGroup> + + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-detail.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-detail.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-detail.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-detail.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-summary.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-summary.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-summary.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/Fixtures/msrc-summary.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/MsrcConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/MsrcConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/MsrcConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/MsrcConnectorTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests.csproj new file mode 100644 index 00000000..479383ac --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests.csproj @@ -0,0 +1,25 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> + </ItemGroup> + + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> + + <ItemGroup> + <None Update="Fixtures\*.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-advisories.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-advisories.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-advisories.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-advisories.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024-single.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024-single.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024-single.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024-single.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-calendar-cpuapr2024.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-01.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-01.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-01.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-01.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-02.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-02.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-02.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-cpuapr2024-02.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-invalid.html b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-invalid.html similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-invalid.html rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/Fixtures/oracle-detail-invalid.html diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/OracleConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/OracleConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/OracleConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/Oracle/OracleConnectorTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj new file mode 100644 index 00000000..209b9573 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj @@ -0,0 +1,18 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Oracle/Fixtures/**/*.json" CopyToOutputDirectory="Always" /> + <None Include="Oracle/Fixtures/**/*.html" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj new file mode 100644 index 00000000..654c82a2 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj @@ -0,0 +1,19 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <None Update="Vmware/Fixtures/*.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-advisories.snapshot.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-advisories.snapshot.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-advisories.snapshot.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-advisories.snapshot.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0001.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0001.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0001.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0001.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0002.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0002.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0002.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0002.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0003.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0003.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0003.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-detail-vmsa-2024-0003.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-initial.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-initial.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-initial.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-initial.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-second.json b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-second.json similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-second.json rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/Fixtures/vmware-index-second.json diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareConnectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareConnectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareConnectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareConnectorTests.cs diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareMapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/Vmware/VmwareMapperTests.cs diff --git a/src/StellaOps.Concelier.Core.Tests/Aoc/AdvisoryRawWriteGuardTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Aoc/AdvisoryRawWriteGuardTests.cs similarity index 97% rename from src/StellaOps.Concelier.Core.Tests/Aoc/AdvisoryRawWriteGuardTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Aoc/AdvisoryRawWriteGuardTests.cs index 5fedd86a..725783fa 100644 --- a/src/StellaOps.Concelier.Core.Tests/Aoc/AdvisoryRawWriteGuardTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Aoc/AdvisoryRawWriteGuardTests.cs @@ -1,83 +1,83 @@ -using System.Collections.Immutable; -using System.Text.Json; -using StellaOps.Aoc; -using StellaOps.Concelier.Core.Aoc; -using StellaOps.Concelier.RawModels; - -namespace StellaOps.Concelier.Core.Tests.Aoc; - -public sealed class AdvisoryRawWriteGuardTests -{ - private static AdvisoryRawDocument CreateDocument( - string tenant = "tenant-a", - bool signaturePresent = false, - bool includeSignaturePayload = true) - { - using var rawDocument = JsonDocument.Parse("""{"id":"demo"}"""); - var signature = signaturePresent - ? new RawSignatureMetadata( - Present: true, - Format: "dsse", - KeyId: "key-1", - Signature: includeSignaturePayload ? "base64signature" : null) - : new RawSignatureMetadata(false); - - return new AdvisoryRawDocument( - Tenant: tenant, - Source: new RawSourceMetadata("vendor-x", "connector-y", "1.0.0"), - Upstream: new RawUpstreamMetadata( - UpstreamId: "GHSA-xxxx", - DocumentVersion: "1", - RetrievedAt: DateTimeOffset.UtcNow, - ContentHash: "sha256:abc", - Signature: signature, - Provenance: ImmutableDictionary<string, string>.Empty), - Content: new RawContent( - Format: "OSV", - SpecVersion: "1.0", - Raw: rawDocument.RootElement.Clone()), - Identifiers: new RawIdentifiers( - Aliases: ImmutableArray.Create("GHSA-xxxx"), - PrimaryId: "GHSA-xxxx"), - Linkset: new RawLinkset - { - Aliases = ImmutableArray<string>.Empty, - PackageUrls = ImmutableArray<string>.Empty, - Cpes = ImmutableArray<string>.Empty, - References = ImmutableArray<RawReference>.Empty, - ReconciledFrom = ImmutableArray<string>.Empty, - Notes = ImmutableDictionary<string, string>.Empty - }); - } - - [Fact] - public void EnsureValid_AllowsMinimalDocument() - { - var guard = new AdvisoryRawWriteGuard(new AocWriteGuard()); - var document = CreateDocument(); - - guard.EnsureValid(document); - } - - [Fact] - public void EnsureValid_ThrowsWhenTenantMissing() - { - var guard = new AdvisoryRawWriteGuard(new AocWriteGuard()); - var document = CreateDocument(tenant: string.Empty); - - var exception = Assert.Throws<ConcelierAocGuardException>(() => guard.EnsureValid(document)); - Assert.Equal("ERR_AOC_004", exception.PrimaryErrorCode); - Assert.Contains(exception.Violations, violation => violation.ErrorCode == "ERR_AOC_004" && violation.Path == "/tenant"); - } - - [Fact] - public void EnsureValid_ThrowsWhenSignaturePayloadMissing() - { - var guard = new AdvisoryRawWriteGuard(new AocWriteGuard()); - var document = CreateDocument(signaturePresent: true, includeSignaturePayload: false); - - var exception = Assert.Throws<ConcelierAocGuardException>(() => guard.EnsureValid(document)); - Assert.Equal("ERR_AOC_005", exception.PrimaryErrorCode); - Assert.Contains(exception.Violations, violation => violation.ErrorCode == "ERR_AOC_005"); - } -} +using System.Collections.Immutable; +using System.Text.Json; +using StellaOps.Aoc; +using StellaOps.Concelier.Core.Aoc; +using StellaOps.Concelier.RawModels; + +namespace StellaOps.Concelier.Core.Tests.Aoc; + +public sealed class AdvisoryRawWriteGuardTests +{ + private static AdvisoryRawDocument CreateDocument( + string tenant = "tenant-a", + bool signaturePresent = false, + bool includeSignaturePayload = true) + { + using var rawDocument = JsonDocument.Parse("""{"id":"demo"}"""); + var signature = signaturePresent + ? new RawSignatureMetadata( + Present: true, + Format: "dsse", + KeyId: "key-1", + Signature: includeSignaturePayload ? "base64signature" : null) + : new RawSignatureMetadata(false); + + return new AdvisoryRawDocument( + Tenant: tenant, + Source: new RawSourceMetadata("vendor-x", "connector-y", "1.0.0"), + Upstream: new RawUpstreamMetadata( + UpstreamId: "GHSA-xxxx", + DocumentVersion: "1", + RetrievedAt: DateTimeOffset.UtcNow, + ContentHash: "sha256:abc", + Signature: signature, + Provenance: ImmutableDictionary<string, string>.Empty), + Content: new RawContent( + Format: "OSV", + SpecVersion: "1.0", + Raw: rawDocument.RootElement.Clone()), + Identifiers: new RawIdentifiers( + Aliases: ImmutableArray.Create("GHSA-xxxx"), + PrimaryId: "GHSA-xxxx"), + Linkset: new RawLinkset + { + Aliases = ImmutableArray<string>.Empty, + PackageUrls = ImmutableArray<string>.Empty, + Cpes = ImmutableArray<string>.Empty, + References = ImmutableArray<RawReference>.Empty, + ReconciledFrom = ImmutableArray<string>.Empty, + Notes = ImmutableDictionary<string, string>.Empty + }); + } + + [Fact] + public void EnsureValid_AllowsMinimalDocument() + { + var guard = new AdvisoryRawWriteGuard(new AocWriteGuard()); + var document = CreateDocument(); + + guard.EnsureValid(document); + } + + [Fact] + public void EnsureValid_ThrowsWhenTenantMissing() + { + var guard = new AdvisoryRawWriteGuard(new AocWriteGuard()); + var document = CreateDocument(tenant: string.Empty); + + var exception = Assert.Throws<ConcelierAocGuardException>(() => guard.EnsureValid(document)); + Assert.Equal("ERR_AOC_004", exception.PrimaryErrorCode); + Assert.Contains(exception.Violations, violation => violation.ErrorCode == "ERR_AOC_004" && violation.Path == "/tenant"); + } + + [Fact] + public void EnsureValid_ThrowsWhenSignaturePayloadMissing() + { + var guard = new AdvisoryRawWriteGuard(new AocWriteGuard()); + var document = CreateDocument(signaturePresent: true, includeSignaturePayload: false); + + var exception = Assert.Throws<ConcelierAocGuardException>(() => guard.EnsureValid(document)); + Assert.Equal("ERR_AOC_005", exception.PrimaryErrorCode); + Assert.Contains(exception.Violations, violation => violation.ErrorCode == "ERR_AOC_005"); + } +} diff --git a/src/StellaOps.Concelier.Core.Tests/CanonicalMergerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/CanonicalMergerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Core.Tests/CanonicalMergerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/CanonicalMergerTests.cs diff --git a/src/StellaOps.Concelier.Core.Tests/Events/AdvisoryEventLogTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Events/AdvisoryEventLogTests.cs similarity index 100% rename from src/StellaOps.Concelier.Core.Tests/Events/AdvisoryEventLogTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Events/AdvisoryEventLogTests.cs diff --git a/src/StellaOps.Concelier.Core.Tests/JobCoordinatorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/JobCoordinatorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Core.Tests/JobCoordinatorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/JobCoordinatorTests.cs diff --git a/src/StellaOps.Concelier.Core.Tests/JobPluginRegistrationExtensionsTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/JobPluginRegistrationExtensionsTests.cs similarity index 100% rename from src/StellaOps.Concelier.Core.Tests/JobPluginRegistrationExtensionsTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/JobPluginRegistrationExtensionsTests.cs diff --git a/src/StellaOps.Concelier.Core.Tests/JobSchedulerBuilderTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/JobSchedulerBuilderTests.cs similarity index 100% rename from src/StellaOps.Concelier.Core.Tests/JobSchedulerBuilderTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/JobSchedulerBuilderTests.cs diff --git a/src/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryLinksetMapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryLinksetMapperTests.cs similarity index 97% rename from src/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryLinksetMapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryLinksetMapperTests.cs index 019345bc..51e4cb7f 100644 --- a/src/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryLinksetMapperTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryLinksetMapperTests.cs @@ -1,125 +1,125 @@ -using System.Collections.Immutable; -using System.Linq; -using System.Text.Json; -using StellaOps.Concelier.Core.Linksets; -using StellaOps.Concelier.RawModels; -using Xunit; - -namespace StellaOps.Concelier.Core.Tests.Linksets; - -public sealed class AdvisoryLinksetMapperTests -{ - [Fact] - public void Map_CollectsSignalsFromIdentifiersAndContent() - { - using var contentDoc = JsonDocument.Parse( - """ - { - "cve": { "id": "CVE-2025-0001" }, - "metadata": { - "ghsa": "GHSA-xxxx-yyyy-zzzz" - }, - "affected": [ - { - "package": { "purl": "pkg:npm/package-a@1.0.0" }, - "cpe": "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*" - } - ], - "references": [ - { "type": "Advisory", "url": "https://example.test/advisory" }, - { "url": "https://example.test/patch", "source": "vendor" } - ] - } - """); - - var document = new AdvisoryRawDocument( - Tenant: "tenant-a", - Source: new RawSourceMetadata("vendor", "connector", "1.0.0"), - Upstream: new RawUpstreamMetadata( - UpstreamId: "GHSA-xxxx-yyyy-zzzz", - DocumentVersion: "1", - RetrievedAt: DateTimeOffset.UtcNow, - ContentHash: "sha256:abc", - Signature: new RawSignatureMetadata(false), - Provenance: ImmutableDictionary<string, string>.Empty), - Content: new RawContent( - Format: "OSV", - SpecVersion: "1.0", - Raw: contentDoc.RootElement.Clone()), - Identifiers: new RawIdentifiers( - Aliases: ImmutableArray.Create("GHSA-xxxx-yyyy-zzzz"), - PrimaryId: "GHSA-xxxx-yyyy-zzzz"), - Linkset: new RawLinkset()); - - var mapper = new AdvisoryLinksetMapper(); - - var result = mapper.Map(document); - - Assert.Equal(new[] { "cve-2025-0001", "ghsa-xxxx-yyyy-zzzz" }, result.Aliases); - Assert.Equal(new[] { "pkg:npm/package-a@1.0.0" }, result.PackageUrls); - Assert.Equal(new[] { "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*" }, result.Cpes); - - Assert.Equal(2, result.References.Length); - Assert.Contains(result.References, reference => reference.Type == "advisory" && reference.Url == "https://example.test/advisory"); - Assert.Contains(result.References, reference => reference.Type == "unspecified" && reference.Url == "https://example.test/patch" && reference.Source == "vendor"); - - var expectedPointers = new[] - { - "/content/raw/affected/0/cpe", - "/content/raw/affected/0/package/purl", - "/content/raw/cve/id", - "/content/raw/metadata/ghsa", - "/content/raw/references/0/url", - "/content/raw/references/1/url", - "/identifiers/aliases/0", - "/identifiers/primary" - }; - - Assert.Equal(expectedPointers.OrderBy(static value => value, StringComparer.Ordinal), result.ReconciledFrom); - } - - [Fact] - public void Map_DeduplicatesValuesButRetainsMultipleOrigins() - { - using var contentDoc = JsonDocument.Parse( - """ - { - "aliases": ["CVE-2025-0002", "CVE-2025-0002"], - "packages": [ - { "coordinates": "pkg:npm/package-b@2.0.0" }, - { "coordinates": "pkg:npm/package-b@2.0.0" } - ] - } - """); - - var document = new AdvisoryRawDocument( - Tenant: "tenant-a", - Source: new RawSourceMetadata("vendor", "connector", "1.0.0"), - Upstream: new RawUpstreamMetadata( - UpstreamId: "GHSA-example", - DocumentVersion: "1", - RetrievedAt: DateTimeOffset.UtcNow, - ContentHash: "sha256:def", - Signature: new RawSignatureMetadata(false), - Provenance: ImmutableDictionary<string, string>.Empty), - Content: new RawContent( - Format: "custom", - SpecVersion: null, - Raw: contentDoc.RootElement.Clone()), - Identifiers: new RawIdentifiers( - Aliases: ImmutableArray<string>.Empty, - PrimaryId: "GHSA-example"), - Linkset: new RawLinkset()); - - var mapper = new AdvisoryLinksetMapper(); - var result = mapper.Map(document); - - Assert.Equal(new[] { "cve-2025-0002" }, result.Aliases); - Assert.Equal(new[] { "pkg:npm/package-b@2.0.0" }, result.PackageUrls); - - Assert.Contains("/content/raw/aliases/0", result.ReconciledFrom); - Assert.Contains("/content/raw/aliases/1", result.ReconciledFrom); - Assert.Contains("/content/raw/packages/0/coordinates", result.ReconciledFrom); - Assert.Contains("/content/raw/packages/1/coordinates", result.ReconciledFrom); - } -} +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json; +using StellaOps.Concelier.Core.Linksets; +using StellaOps.Concelier.RawModels; +using Xunit; + +namespace StellaOps.Concelier.Core.Tests.Linksets; + +public sealed class AdvisoryLinksetMapperTests +{ + [Fact] + public void Map_CollectsSignalsFromIdentifiersAndContent() + { + using var contentDoc = JsonDocument.Parse( + """ + { + "cve": { "id": "CVE-2025-0001" }, + "metadata": { + "ghsa": "GHSA-xxxx-yyyy-zzzz" + }, + "affected": [ + { + "package": { "purl": "pkg:npm/package-a@1.0.0" }, + "cpe": "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*" + } + ], + "references": [ + { "type": "Advisory", "url": "https://example.test/advisory" }, + { "url": "https://example.test/patch", "source": "vendor" } + ] + } + """); + + var document = new AdvisoryRawDocument( + Tenant: "tenant-a", + Source: new RawSourceMetadata("vendor", "connector", "1.0.0"), + Upstream: new RawUpstreamMetadata( + UpstreamId: "GHSA-xxxx-yyyy-zzzz", + DocumentVersion: "1", + RetrievedAt: DateTimeOffset.UtcNow, + ContentHash: "sha256:abc", + Signature: new RawSignatureMetadata(false), + Provenance: ImmutableDictionary<string, string>.Empty), + Content: new RawContent( + Format: "OSV", + SpecVersion: "1.0", + Raw: contentDoc.RootElement.Clone()), + Identifiers: new RawIdentifiers( + Aliases: ImmutableArray.Create("GHSA-xxxx-yyyy-zzzz"), + PrimaryId: "GHSA-xxxx-yyyy-zzzz"), + Linkset: new RawLinkset()); + + var mapper = new AdvisoryLinksetMapper(); + + var result = mapper.Map(document); + + Assert.Equal(new[] { "cve-2025-0001", "ghsa-xxxx-yyyy-zzzz" }, result.Aliases); + Assert.Equal(new[] { "pkg:npm/package-a@1.0.0" }, result.PackageUrls); + Assert.Equal(new[] { "cpe:2.3:a:vendor:product:1.0:*:*:*:*:*:*:*" }, result.Cpes); + + Assert.Equal(2, result.References.Length); + Assert.Contains(result.References, reference => reference.Type == "advisory" && reference.Url == "https://example.test/advisory"); + Assert.Contains(result.References, reference => reference.Type == "unspecified" && reference.Url == "https://example.test/patch" && reference.Source == "vendor"); + + var expectedPointers = new[] + { + "/content/raw/affected/0/cpe", + "/content/raw/affected/0/package/purl", + "/content/raw/cve/id", + "/content/raw/metadata/ghsa", + "/content/raw/references/0/url", + "/content/raw/references/1/url", + "/identifiers/aliases/0", + "/identifiers/primary" + }; + + Assert.Equal(expectedPointers.OrderBy(static value => value, StringComparer.Ordinal), result.ReconciledFrom); + } + + [Fact] + public void Map_DeduplicatesValuesButRetainsMultipleOrigins() + { + using var contentDoc = JsonDocument.Parse( + """ + { + "aliases": ["CVE-2025-0002", "CVE-2025-0002"], + "packages": [ + { "coordinates": "pkg:npm/package-b@2.0.0" }, + { "coordinates": "pkg:npm/package-b@2.0.0" } + ] + } + """); + + var document = new AdvisoryRawDocument( + Tenant: "tenant-a", + Source: new RawSourceMetadata("vendor", "connector", "1.0.0"), + Upstream: new RawUpstreamMetadata( + UpstreamId: "GHSA-example", + DocumentVersion: "1", + RetrievedAt: DateTimeOffset.UtcNow, + ContentHash: "sha256:def", + Signature: new RawSignatureMetadata(false), + Provenance: ImmutableDictionary<string, string>.Empty), + Content: new RawContent( + Format: "custom", + SpecVersion: null, + Raw: contentDoc.RootElement.Clone()), + Identifiers: new RawIdentifiers( + Aliases: ImmutableArray<string>.Empty, + PrimaryId: "GHSA-example"), + Linkset: new RawLinkset()); + + var mapper = new AdvisoryLinksetMapper(); + var result = mapper.Map(document); + + Assert.Equal(new[] { "cve-2025-0002" }, result.Aliases); + Assert.Equal(new[] { "pkg:npm/package-b@2.0.0" }, result.PackageUrls); + + Assert.Contains("/content/raw/aliases/0", result.ReconciledFrom); + Assert.Contains("/content/raw/aliases/1", result.ReconciledFrom); + Assert.Contains("/content/raw/packages/0/coordinates", result.ReconciledFrom); + Assert.Contains("/content/raw/packages/1/coordinates", result.ReconciledFrom); + } +} diff --git a/src/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryObservationFactoryTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryObservationFactoryTests.cs similarity index 97% rename from src/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryObservationFactoryTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryObservationFactoryTests.cs index f9aec004..a3531165 100644 --- a/src/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryObservationFactoryTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Linksets/AdvisoryObservationFactoryTests.cs @@ -1,151 +1,151 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Text.Json; -using StellaOps.Concelier.Core.Linksets; -using StellaOps.Concelier.Models.Observations; -using StellaOps.Concelier.RawModels; -using Xunit; - -namespace StellaOps.Concelier.Core.Tests.Linksets; - -public sealed class AdvisoryObservationFactoryTests -{ - private static readonly DateTimeOffset SampleTimestamp = DateTimeOffset.Parse("2025-10-26T12:34:56Z"); - - [Fact] - public void Create_NormalizesLinksetIdentifiersAndReferences() - { - var factory = new AdvisoryObservationFactory(); - var rawDocument = BuildRawDocument( - identifiers: new RawIdentifiers( - Aliases: ImmutableArray.Create(" CVE-2025-0001 ", "ghsa-XXXX-YYYY"), - PrimaryId: "GHSA-XXXX-YYYY"), - linkset: new RawLinkset - { - PackageUrls = ImmutableArray.Create("pkg:NPM/left-pad@1.0.0", "pkg:npm/left-pad@1.0.0?foo=bar"), - Cpes = ImmutableArray.Create("cpe:/a:Example:Product:1.0", "cpe:/a:example:product:1.0"), - Aliases = ImmutableArray.Create(" CVE-2025-0001 "), - References = ImmutableArray.Create( - new RawReference("Advisory", " https://example.test/advisory "), - new RawReference("ADVISORY", "https://example.test/advisory")) - }); - - var observation = factory.Create(rawDocument, SampleTimestamp); - - Assert.Equal(SampleTimestamp, observation.CreatedAt); - Assert.Equal(new[] { "cve-2025-0001", "ghsa-xxxx-yyyy" }, observation.Linkset.Aliases); - Assert.Equal(new[] { "pkg:npm/left-pad@1.0.0" }, observation.Linkset.Purls); - Assert.Equal(new[] { "cpe:2.3:a:example:product:1.0:*:*:*:*:*:*:*" }, observation.Linkset.Cpes); - var reference = Assert.Single(observation.Linkset.References); - Assert.Equal("advisory", reference.Type); - Assert.Equal("https://example.test/advisory", reference.Url); - } - - [Fact] - public void Create_SetsSourceAndUpstreamFields() - { - var factory = new AdvisoryObservationFactory(); - var upstreamProvenance = ImmutableDictionary.CreateRange(new Dictionary<string, string> - { - ["api"] = "https://api.example.test/v1/feed" - }); - - var rawDocument = BuildRawDocument( - source: new RawSourceMetadata("vendor-x", "connector-y", "2.3.4", Stream: "stable"), - upstream: new RawUpstreamMetadata( - UpstreamId: "doc-123", - DocumentVersion: "2025.10.26", - RetrievedAt: SampleTimestamp, - ContentHash: "sha256:abcdef", - Signature: new RawSignatureMetadata(true, "dsse", "key-1", "signature-bytes"), - Provenance: upstreamProvenance), - identifiers: new RawIdentifiers(ImmutableArray<string>.Empty, "doc-123"), - linkset: new RawLinkset()); - - var observation = factory.Create(rawDocument); - - Assert.Equal("vendor-x", observation.Source.Vendor); - Assert.Equal("stable", observation.Source.Stream); - Assert.Equal("https://api.example.test/v1/feed", observation.Source.Api); - Assert.Equal("2.3.4", observation.Source.CollectorVersion); - - Assert.Equal("doc-123", observation.Upstream.UpstreamId); - Assert.Equal("2025.10.26", observation.Upstream.DocumentVersion); - Assert.Equal("sha256:abcdef", observation.Upstream.ContentHash); - Assert.True(observation.Upstream.Signature.Present); - Assert.Equal("dsse", observation.Upstream.Signature.Format); - Assert.Equal(upstreamProvenance, observation.Upstream.Metadata); - } - - [Fact] - public void Create_StoresNotesAsAttributes() - { - var factory = new AdvisoryObservationFactory(); - var notes = ImmutableDictionary.CreateRange(new Dictionary<string, string> - { - ["range-introduced"] = "1.0.0", - ["range-fixed"] = "1.0.5" - }); - - var rawDocument = BuildRawDocument( - identifiers: new RawIdentifiers(ImmutableArray<string>.Empty, "primary"), - linkset: new RawLinkset - { - Notes = notes, - ReconciledFrom = ImmutableArray.Create("connector-a", "connector-b") - }, - supersedes: "tenant-a:vendor-x:previous:sha256:123"); - - var observation = factory.Create(rawDocument); - - Assert.Equal("1.0.0", observation.Attributes["linkset.note.range-introduced"]); - Assert.Equal("1.0.5", observation.Attributes["linkset.note.range-fixed"]); - Assert.Equal("tenant-a:vendor-x:previous:sha256:123", observation.Attributes["supersedes"]); - Assert.Equal("connector-a;connector-b", observation.Attributes["linkset.reconciled_from"]); - } - - private static AdvisoryRawDocument BuildRawDocument( - RawSourceMetadata? source = null, - RawUpstreamMetadata? upstream = null, - RawIdentifiers? identifiers = null, - RawLinkset? linkset = null, - string tenant = "tenant-a", - string? supersedes = null) - { - source ??= new RawSourceMetadata( - Vendor: "vendor-x", - Connector: "connector-y", - ConnectorVersion: "1.0.0", - Stream: null); - - upstream ??= new RawUpstreamMetadata( - UpstreamId: "doc-1", - DocumentVersion: "v1", - RetrievedAt: SampleTimestamp, - ContentHash: "sha256:123", - Signature: new RawSignatureMetadata(false), - Provenance: ImmutableDictionary<string, string>.Empty); - - identifiers ??= new RawIdentifiers( - Aliases: ImmutableArray<string>.Empty, - PrimaryId: "doc-1"); - - linkset ??= new RawLinkset(); - - using var document = JsonDocument.Parse("""{"id":"doc-1"}"""); - var content = new RawContent( - Format: "csaf", - SpecVersion: "2.0", - Raw: document.RootElement.Clone(), - Encoding: null); - - return new AdvisoryRawDocument( - Tenant: tenant, - Source: source, - Upstream: upstream, - Content: content, - Identifiers: identifiers, - Linkset: linkset, - Supersedes: supersedes); - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Text.Json; +using StellaOps.Concelier.Core.Linksets; +using StellaOps.Concelier.Models.Observations; +using StellaOps.Concelier.RawModels; +using Xunit; + +namespace StellaOps.Concelier.Core.Tests.Linksets; + +public sealed class AdvisoryObservationFactoryTests +{ + private static readonly DateTimeOffset SampleTimestamp = DateTimeOffset.Parse("2025-10-26T12:34:56Z"); + + [Fact] + public void Create_NormalizesLinksetIdentifiersAndReferences() + { + var factory = new AdvisoryObservationFactory(); + var rawDocument = BuildRawDocument( + identifiers: new RawIdentifiers( + Aliases: ImmutableArray.Create(" CVE-2025-0001 ", "ghsa-XXXX-YYYY"), + PrimaryId: "GHSA-XXXX-YYYY"), + linkset: new RawLinkset + { + PackageUrls = ImmutableArray.Create("pkg:NPM/left-pad@1.0.0", "pkg:npm/left-pad@1.0.0?foo=bar"), + Cpes = ImmutableArray.Create("cpe:/a:Example:Product:1.0", "cpe:/a:example:product:1.0"), + Aliases = ImmutableArray.Create(" CVE-2025-0001 "), + References = ImmutableArray.Create( + new RawReference("Advisory", " https://example.test/advisory "), + new RawReference("ADVISORY", "https://example.test/advisory")) + }); + + var observation = factory.Create(rawDocument, SampleTimestamp); + + Assert.Equal(SampleTimestamp, observation.CreatedAt); + Assert.Equal(new[] { "cve-2025-0001", "ghsa-xxxx-yyyy" }, observation.Linkset.Aliases); + Assert.Equal(new[] { "pkg:npm/left-pad@1.0.0" }, observation.Linkset.Purls); + Assert.Equal(new[] { "cpe:2.3:a:example:product:1.0:*:*:*:*:*:*:*" }, observation.Linkset.Cpes); + var reference = Assert.Single(observation.Linkset.References); + Assert.Equal("advisory", reference.Type); + Assert.Equal("https://example.test/advisory", reference.Url); + } + + [Fact] + public void Create_SetsSourceAndUpstreamFields() + { + var factory = new AdvisoryObservationFactory(); + var upstreamProvenance = ImmutableDictionary.CreateRange(new Dictionary<string, string> + { + ["api"] = "https://api.example.test/v1/feed" + }); + + var rawDocument = BuildRawDocument( + source: new RawSourceMetadata("vendor-x", "connector-y", "2.3.4", Stream: "stable"), + upstream: new RawUpstreamMetadata( + UpstreamId: "doc-123", + DocumentVersion: "2025.10.26", + RetrievedAt: SampleTimestamp, + ContentHash: "sha256:abcdef", + Signature: new RawSignatureMetadata(true, "dsse", "key-1", "signature-bytes"), + Provenance: upstreamProvenance), + identifiers: new RawIdentifiers(ImmutableArray<string>.Empty, "doc-123"), + linkset: new RawLinkset()); + + var observation = factory.Create(rawDocument); + + Assert.Equal("vendor-x", observation.Source.Vendor); + Assert.Equal("stable", observation.Source.Stream); + Assert.Equal("https://api.example.test/v1/feed", observation.Source.Api); + Assert.Equal("2.3.4", observation.Source.CollectorVersion); + + Assert.Equal("doc-123", observation.Upstream.UpstreamId); + Assert.Equal("2025.10.26", observation.Upstream.DocumentVersion); + Assert.Equal("sha256:abcdef", observation.Upstream.ContentHash); + Assert.True(observation.Upstream.Signature.Present); + Assert.Equal("dsse", observation.Upstream.Signature.Format); + Assert.Equal(upstreamProvenance, observation.Upstream.Metadata); + } + + [Fact] + public void Create_StoresNotesAsAttributes() + { + var factory = new AdvisoryObservationFactory(); + var notes = ImmutableDictionary.CreateRange(new Dictionary<string, string> + { + ["range-introduced"] = "1.0.0", + ["range-fixed"] = "1.0.5" + }); + + var rawDocument = BuildRawDocument( + identifiers: new RawIdentifiers(ImmutableArray<string>.Empty, "primary"), + linkset: new RawLinkset + { + Notes = notes, + ReconciledFrom = ImmutableArray.Create("connector-a", "connector-b") + }, + supersedes: "tenant-a:vendor-x:previous:sha256:123"); + + var observation = factory.Create(rawDocument); + + Assert.Equal("1.0.0", observation.Attributes["linkset.note.range-introduced"]); + Assert.Equal("1.0.5", observation.Attributes["linkset.note.range-fixed"]); + Assert.Equal("tenant-a:vendor-x:previous:sha256:123", observation.Attributes["supersedes"]); + Assert.Equal("connector-a;connector-b", observation.Attributes["linkset.reconciled_from"]); + } + + private static AdvisoryRawDocument BuildRawDocument( + RawSourceMetadata? source = null, + RawUpstreamMetadata? upstream = null, + RawIdentifiers? identifiers = null, + RawLinkset? linkset = null, + string tenant = "tenant-a", + string? supersedes = null) + { + source ??= new RawSourceMetadata( + Vendor: "vendor-x", + Connector: "connector-y", + ConnectorVersion: "1.0.0", + Stream: null); + + upstream ??= new RawUpstreamMetadata( + UpstreamId: "doc-1", + DocumentVersion: "v1", + RetrievedAt: SampleTimestamp, + ContentHash: "sha256:123", + Signature: new RawSignatureMetadata(false), + Provenance: ImmutableDictionary<string, string>.Empty); + + identifiers ??= new RawIdentifiers( + Aliases: ImmutableArray<string>.Empty, + PrimaryId: "doc-1"); + + linkset ??= new RawLinkset(); + + using var document = JsonDocument.Parse("""{"id":"doc-1"}"""); + var content = new RawContent( + Format: "csaf", + SpecVersion: "2.0", + Raw: document.RootElement.Clone(), + Encoding: null); + + return new AdvisoryRawDocument( + Tenant: tenant, + Source: source, + Upstream: upstream, + Content: content, + Identifiers: identifiers, + Linkset: linkset, + Supersedes: supersedes); + } +} diff --git a/src/StellaOps.Concelier.Core.Tests/Noise/NoisePriorServiceTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Noise/NoisePriorServiceTests.cs similarity index 97% rename from src/StellaOps.Concelier.Core.Tests/Noise/NoisePriorServiceTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Noise/NoisePriorServiceTests.cs index faf1f39c..166db86f 100644 --- a/src/StellaOps.Concelier.Core.Tests/Noise/NoisePriorServiceTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Noise/NoisePriorServiceTests.cs @@ -1,320 +1,320 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Concelier.Core.Events; -using StellaOps.Concelier.Core.Noise; -using StellaOps.Concelier.Models; -using Xunit; - -namespace StellaOps.Concelier.Core.Tests.Noise; - -public sealed class NoisePriorServiceTests -{ - [Fact] - public async Task RecomputeAsync_PersistsSummariesWithRules() - { - var statements = ImmutableArray.Create( - CreateStatement( - asOf: DateTimeOffset.Parse("2025-10-10T00:00:00Z"), - CreatePackage( - statuses: new[] - { - CreateStatus(AffectedPackageStatusCatalog.NotAffected, "vendor.redhat"), - }, - platform: "linux"))); - - statements = statements.Add(CreateStatement( - asOf: DateTimeOffset.Parse("2025-10-11T00:00:00Z"), - CreatePackage( - statuses: new[] - { - CreateStatus(AffectedPackageStatusCatalog.KnownNotAffected, "vendor.canonical"), - }, - platform: "linux"))); - - statements = statements.Add(CreateStatement( - asOf: DateTimeOffset.Parse("2025-10-12T00:00:00Z"), - CreatePackage( - statuses: new[] - { - CreateStatus(AffectedPackageStatusCatalog.Affected, "vendor.osv"), - }, - platform: "linux", - versionRanges: new[] - { - new AffectedVersionRange( - rangeKind: "semver", - introducedVersion: "1.0.0", - fixedVersion: null, - lastAffectedVersion: null, - rangeExpression: null, - provenance: CreateProvenance("vendor.osv")), - }))); - - var replay = new AdvisoryReplay( - "cve-9999-0001", - null, - statements, - ImmutableArray<AdvisoryConflictSnapshot>.Empty); - - var eventLog = new FakeEventLog(replay); - var repository = new FakeNoisePriorRepository(); - var now = DateTimeOffset.Parse("2025-10-21T12:00:00Z"); - var timeProvider = new FixedTimeProvider(now); - var service = new NoisePriorService(eventLog, repository, timeProvider); - - var result = await service.RecomputeAsync( - new NoisePriorComputationRequest("CVE-9999-0001"), - CancellationToken.None); - - Assert.Equal("cve-9999-0001", result.VulnerabilityKey); - Assert.Single(result.Summaries); - - var summary = result.Summaries[0]; - Assert.Equal("cve-9999-0001", summary.VulnerabilityKey); - Assert.Equal("semver", summary.PackageType); - Assert.Equal("pkg:npm/example", summary.PackageIdentifier); - Assert.Equal("linux", summary.Platform); - Assert.Equal(3, summary.ObservationCount); - Assert.Equal(2, summary.NegativeSignals); - Assert.Equal(1, summary.PositiveSignals); - Assert.Equal(0, summary.NeutralSignals); - Assert.Equal(1, summary.VersionRangeSignals); - Assert.Equal(2, summary.UniqueNegativeSources); - Assert.Equal(0.6, summary.Probability); - Assert.Equal(now, summary.GeneratedAt); - Assert.Equal(DateTimeOffset.Parse("2025-10-10T00:00:00Z"), summary.FirstObserved); - Assert.Equal(DateTimeOffset.Parse("2025-10-12T00:00:00Z"), summary.LastObserved); - - Assert.Equal( - new[] { "conflicting_signals", "multi_source_negative", "positive_evidence" }, - summary.RuleHits.ToArray()); - - Assert.Equal("cve-9999-0001", repository.LastUpsertKey); - Assert.NotNull(repository.LastUpsertSummaries); - Assert.Single(repository.LastUpsertSummaries!); - } - - [Fact] - public async Task RecomputeAsync_AllNegativeSignalsProducesHighPrior() - { - var statements = ImmutableArray.Create( - CreateStatement( - asOf: DateTimeOffset.Parse("2025-10-01T00:00:00Z"), - CreatePackage( - statuses: new[] - { - CreateStatus(AffectedPackageStatusCatalog.NotAffected, "vendor.redhat"), - }), - vulnerabilityKey: "cve-2025-1111")); - - statements = statements.Add(CreateStatement( - asOf: DateTimeOffset.Parse("2025-10-02T00:00:00Z"), - CreatePackage( - statuses: new[] - { - CreateStatus(AffectedPackageStatusCatalog.KnownNotAffected, "vendor.redhat"), - }), - vulnerabilityKey: "cve-2025-1111")); - - var replay = new AdvisoryReplay( - "cve-2025-1111", - null, - statements, - ImmutableArray<AdvisoryConflictSnapshot>.Empty); - - var eventLog = new FakeEventLog(replay); - var repository = new FakeNoisePriorRepository(); - var now = DateTimeOffset.Parse("2025-10-21T13:00:00Z"); - var timeProvider = new FixedTimeProvider(now); - var service = new NoisePriorService(eventLog, repository, timeProvider); - - var result = await service.RecomputeAsync( - new NoisePriorComputationRequest("cve-2025-1111"), - CancellationToken.None); - - var summary = Assert.Single(result.Summaries); - Assert.Equal(1.0, summary.Probability); - Assert.Equal( - new[] { "all_negative", "sparse_observations" }, - summary.RuleHits.ToArray()); - } - - [Fact] - public async Task GetByPackageAsync_NormalizesInputs() - { - var statements = ImmutableArray.Create( - CreateStatement( - asOf: DateTimeOffset.Parse("2025-10-03T00:00:00Z"), - CreatePackage( - statuses: new[] - { - CreateStatus(AffectedPackageStatusCatalog.Unknown, "vendor.generic"), - }, - platform: "linux"), - vulnerabilityKey: "cve-2025-2000")); - - var replay = new AdvisoryReplay( - "cve-2025-2000", - null, - statements, - ImmutableArray<AdvisoryConflictSnapshot>.Empty); - - var eventLog = new FakeEventLog(replay); - var repository = new FakeNoisePriorRepository(); - var service = new NoisePriorService(eventLog, repository, new FixedTimeProvider(DateTimeOffset.UtcNow)); - - await service.RecomputeAsync( - new NoisePriorComputationRequest("CVE-2025-2000"), - CancellationToken.None); - - var summaries = await service.GetByPackageAsync( - " SemVer ", - "pkg:npm/example", - " linux ", - CancellationToken.None); - - Assert.Single(summaries); - Assert.Equal("semver", summaries[0].PackageType); - Assert.Equal("linux", summaries[0].Platform); - } - - private static AdvisoryStatementSnapshot CreateStatement( - DateTimeOffset asOf, - AffectedPackage package, - string vulnerabilityKey = "cve-9999-0001") - { - var advisory = new Advisory( - advisoryKey: $"adv-{asOf:yyyyMMddHHmmss}", - title: "Example Advisory", - summary: null, - language: "en", - published: null, - modified: asOf, - severity: "high", - exploitKnown: false, - aliases: new[] { "CVE-TEST-0001" }, - references: Array.Empty<AdvisoryReference>(), - affectedPackages: new[] { package }, - cvssMetrics: Array.Empty<CvssMetric>(), - provenance: Array.Empty<AdvisoryProvenance>()); - - return new AdvisoryStatementSnapshot( - Guid.NewGuid(), - vulnerabilityKey, - advisory.AdvisoryKey, - advisory, - StatementHash: ImmutableArray<byte>.Empty, - AsOf: asOf, - RecordedAt: asOf, - InputDocumentIds: ImmutableArray<Guid>.Empty); - } - - private static AffectedPackage CreatePackage( - IEnumerable<AffectedPackageStatus> statuses, - string? platform = null, - IEnumerable<AffectedVersionRange>? versionRanges = null) - => new( - type: "semver", - identifier: "pkg:npm/example", - platform: platform, - versionRanges: versionRanges, - statuses: statuses, - provenance: new[] { CreateProvenance("vendor.core") }, - normalizedVersions: null); - - private static AffectedPackageStatus CreateStatus(string status, string source) - => new( - status, - CreateProvenance(source)); - - private static AdvisoryProvenance CreateProvenance(string source, string kind = "vendor") - => new( - source, - kind, - value: string.Empty, - recordedAt: DateTimeOffset.Parse("2025-10-01T00:00:00Z"), - fieldMask: null, - decisionReason: null); - - private sealed class FakeEventLog : IAdvisoryEventLog - { - private readonly AdvisoryReplay _replay; - - public FakeEventLog(AdvisoryReplay replay) - { - _replay = replay; - } - - public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken) - => throw new NotSupportedException("Append operations are not required for tests."); - - public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken) - => ValueTask.FromResult(_replay); - } - - private sealed class FakeNoisePriorRepository : INoisePriorRepository - { - private readonly List<NoisePriorSummary> _store = new(); - - public string? LastUpsertKey { get; private set; } - - public IReadOnlyCollection<NoisePriorSummary>? LastUpsertSummaries { get; private set; } - - public ValueTask UpsertAsync( - string vulnerabilityKey, - IReadOnlyCollection<NoisePriorSummary> summaries, - CancellationToken cancellationToken) - { - LastUpsertKey = vulnerabilityKey; - LastUpsertSummaries = summaries; - - _store.RemoveAll(summary => - string.Equals(summary.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal)); - - _store.AddRange(summaries); - return ValueTask.CompletedTask; - } - - public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync( - string vulnerabilityKey, - CancellationToken cancellationToken) - { - var matches = _store - .Where(summary => string.Equals(summary.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal)) - .ToList(); - return ValueTask.FromResult<IReadOnlyList<NoisePriorSummary>>(matches); - } - - public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync( - string packageType, - string packageIdentifier, - string? platform, - CancellationToken cancellationToken) - { - var matches = _store - .Where(summary => - string.Equals(summary.PackageType, packageType, StringComparison.Ordinal) && - string.Equals(summary.PackageIdentifier, packageIdentifier, StringComparison.Ordinal) && - string.Equals(summary.Platform ?? string.Empty, platform ?? string.Empty, StringComparison.Ordinal)) - .ToList(); - - return ValueTask.FromResult<IReadOnlyList<NoisePriorSummary>>(matches); - } - } - - private sealed class FixedTimeProvider : TimeProvider - { - private readonly DateTimeOffset _now; - - public FixedTimeProvider(DateTimeOffset now) - { - _now = now.ToUniversalTime(); - } - - public override DateTimeOffset GetUtcNow() => _now; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Concelier.Core.Events; +using StellaOps.Concelier.Core.Noise; +using StellaOps.Concelier.Models; +using Xunit; + +namespace StellaOps.Concelier.Core.Tests.Noise; + +public sealed class NoisePriorServiceTests +{ + [Fact] + public async Task RecomputeAsync_PersistsSummariesWithRules() + { + var statements = ImmutableArray.Create( + CreateStatement( + asOf: DateTimeOffset.Parse("2025-10-10T00:00:00Z"), + CreatePackage( + statuses: new[] + { + CreateStatus(AffectedPackageStatusCatalog.NotAffected, "vendor.redhat"), + }, + platform: "linux"))); + + statements = statements.Add(CreateStatement( + asOf: DateTimeOffset.Parse("2025-10-11T00:00:00Z"), + CreatePackage( + statuses: new[] + { + CreateStatus(AffectedPackageStatusCatalog.KnownNotAffected, "vendor.canonical"), + }, + platform: "linux"))); + + statements = statements.Add(CreateStatement( + asOf: DateTimeOffset.Parse("2025-10-12T00:00:00Z"), + CreatePackage( + statuses: new[] + { + CreateStatus(AffectedPackageStatusCatalog.Affected, "vendor.osv"), + }, + platform: "linux", + versionRanges: new[] + { + new AffectedVersionRange( + rangeKind: "semver", + introducedVersion: "1.0.0", + fixedVersion: null, + lastAffectedVersion: null, + rangeExpression: null, + provenance: CreateProvenance("vendor.osv")), + }))); + + var replay = new AdvisoryReplay( + "cve-9999-0001", + null, + statements, + ImmutableArray<AdvisoryConflictSnapshot>.Empty); + + var eventLog = new FakeEventLog(replay); + var repository = new FakeNoisePriorRepository(); + var now = DateTimeOffset.Parse("2025-10-21T12:00:00Z"); + var timeProvider = new FixedTimeProvider(now); + var service = new NoisePriorService(eventLog, repository, timeProvider); + + var result = await service.RecomputeAsync( + new NoisePriorComputationRequest("CVE-9999-0001"), + CancellationToken.None); + + Assert.Equal("cve-9999-0001", result.VulnerabilityKey); + Assert.Single(result.Summaries); + + var summary = result.Summaries[0]; + Assert.Equal("cve-9999-0001", summary.VulnerabilityKey); + Assert.Equal("semver", summary.PackageType); + Assert.Equal("pkg:npm/example", summary.PackageIdentifier); + Assert.Equal("linux", summary.Platform); + Assert.Equal(3, summary.ObservationCount); + Assert.Equal(2, summary.NegativeSignals); + Assert.Equal(1, summary.PositiveSignals); + Assert.Equal(0, summary.NeutralSignals); + Assert.Equal(1, summary.VersionRangeSignals); + Assert.Equal(2, summary.UniqueNegativeSources); + Assert.Equal(0.6, summary.Probability); + Assert.Equal(now, summary.GeneratedAt); + Assert.Equal(DateTimeOffset.Parse("2025-10-10T00:00:00Z"), summary.FirstObserved); + Assert.Equal(DateTimeOffset.Parse("2025-10-12T00:00:00Z"), summary.LastObserved); + + Assert.Equal( + new[] { "conflicting_signals", "multi_source_negative", "positive_evidence" }, + summary.RuleHits.ToArray()); + + Assert.Equal("cve-9999-0001", repository.LastUpsertKey); + Assert.NotNull(repository.LastUpsertSummaries); + Assert.Single(repository.LastUpsertSummaries!); + } + + [Fact] + public async Task RecomputeAsync_AllNegativeSignalsProducesHighPrior() + { + var statements = ImmutableArray.Create( + CreateStatement( + asOf: DateTimeOffset.Parse("2025-10-01T00:00:00Z"), + CreatePackage( + statuses: new[] + { + CreateStatus(AffectedPackageStatusCatalog.NotAffected, "vendor.redhat"), + }), + vulnerabilityKey: "cve-2025-1111")); + + statements = statements.Add(CreateStatement( + asOf: DateTimeOffset.Parse("2025-10-02T00:00:00Z"), + CreatePackage( + statuses: new[] + { + CreateStatus(AffectedPackageStatusCatalog.KnownNotAffected, "vendor.redhat"), + }), + vulnerabilityKey: "cve-2025-1111")); + + var replay = new AdvisoryReplay( + "cve-2025-1111", + null, + statements, + ImmutableArray<AdvisoryConflictSnapshot>.Empty); + + var eventLog = new FakeEventLog(replay); + var repository = new FakeNoisePriorRepository(); + var now = DateTimeOffset.Parse("2025-10-21T13:00:00Z"); + var timeProvider = new FixedTimeProvider(now); + var service = new NoisePriorService(eventLog, repository, timeProvider); + + var result = await service.RecomputeAsync( + new NoisePriorComputationRequest("cve-2025-1111"), + CancellationToken.None); + + var summary = Assert.Single(result.Summaries); + Assert.Equal(1.0, summary.Probability); + Assert.Equal( + new[] { "all_negative", "sparse_observations" }, + summary.RuleHits.ToArray()); + } + + [Fact] + public async Task GetByPackageAsync_NormalizesInputs() + { + var statements = ImmutableArray.Create( + CreateStatement( + asOf: DateTimeOffset.Parse("2025-10-03T00:00:00Z"), + CreatePackage( + statuses: new[] + { + CreateStatus(AffectedPackageStatusCatalog.Unknown, "vendor.generic"), + }, + platform: "linux"), + vulnerabilityKey: "cve-2025-2000")); + + var replay = new AdvisoryReplay( + "cve-2025-2000", + null, + statements, + ImmutableArray<AdvisoryConflictSnapshot>.Empty); + + var eventLog = new FakeEventLog(replay); + var repository = new FakeNoisePriorRepository(); + var service = new NoisePriorService(eventLog, repository, new FixedTimeProvider(DateTimeOffset.UtcNow)); + + await service.RecomputeAsync( + new NoisePriorComputationRequest("CVE-2025-2000"), + CancellationToken.None); + + var summaries = await service.GetByPackageAsync( + " SemVer ", + "pkg:npm/example", + " linux ", + CancellationToken.None); + + Assert.Single(summaries); + Assert.Equal("semver", summaries[0].PackageType); + Assert.Equal("linux", summaries[0].Platform); + } + + private static AdvisoryStatementSnapshot CreateStatement( + DateTimeOffset asOf, + AffectedPackage package, + string vulnerabilityKey = "cve-9999-0001") + { + var advisory = new Advisory( + advisoryKey: $"adv-{asOf:yyyyMMddHHmmss}", + title: "Example Advisory", + summary: null, + language: "en", + published: null, + modified: asOf, + severity: "high", + exploitKnown: false, + aliases: new[] { "CVE-TEST-0001" }, + references: Array.Empty<AdvisoryReference>(), + affectedPackages: new[] { package }, + cvssMetrics: Array.Empty<CvssMetric>(), + provenance: Array.Empty<AdvisoryProvenance>()); + + return new AdvisoryStatementSnapshot( + Guid.NewGuid(), + vulnerabilityKey, + advisory.AdvisoryKey, + advisory, + StatementHash: ImmutableArray<byte>.Empty, + AsOf: asOf, + RecordedAt: asOf, + InputDocumentIds: ImmutableArray<Guid>.Empty); + } + + private static AffectedPackage CreatePackage( + IEnumerable<AffectedPackageStatus> statuses, + string? platform = null, + IEnumerable<AffectedVersionRange>? versionRanges = null) + => new( + type: "semver", + identifier: "pkg:npm/example", + platform: platform, + versionRanges: versionRanges, + statuses: statuses, + provenance: new[] { CreateProvenance("vendor.core") }, + normalizedVersions: null); + + private static AffectedPackageStatus CreateStatus(string status, string source) + => new( + status, + CreateProvenance(source)); + + private static AdvisoryProvenance CreateProvenance(string source, string kind = "vendor") + => new( + source, + kind, + value: string.Empty, + recordedAt: DateTimeOffset.Parse("2025-10-01T00:00:00Z"), + fieldMask: null, + decisionReason: null); + + private sealed class FakeEventLog : IAdvisoryEventLog + { + private readonly AdvisoryReplay _replay; + + public FakeEventLog(AdvisoryReplay replay) + { + _replay = replay; + } + + public ValueTask AppendAsync(AdvisoryEventAppendRequest request, CancellationToken cancellationToken) + => throw new NotSupportedException("Append operations are not required for tests."); + + public ValueTask<AdvisoryReplay> ReplayAsync(string vulnerabilityKey, DateTimeOffset? asOf, CancellationToken cancellationToken) + => ValueTask.FromResult(_replay); + } + + private sealed class FakeNoisePriorRepository : INoisePriorRepository + { + private readonly List<NoisePriorSummary> _store = new(); + + public string? LastUpsertKey { get; private set; } + + public IReadOnlyCollection<NoisePriorSummary>? LastUpsertSummaries { get; private set; } + + public ValueTask UpsertAsync( + string vulnerabilityKey, + IReadOnlyCollection<NoisePriorSummary> summaries, + CancellationToken cancellationToken) + { + LastUpsertKey = vulnerabilityKey; + LastUpsertSummaries = summaries; + + _store.RemoveAll(summary => + string.Equals(summary.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal)); + + _store.AddRange(summaries); + return ValueTask.CompletedTask; + } + + public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByVulnerabilityAsync( + string vulnerabilityKey, + CancellationToken cancellationToken) + { + var matches = _store + .Where(summary => string.Equals(summary.VulnerabilityKey, vulnerabilityKey, StringComparison.Ordinal)) + .ToList(); + return ValueTask.FromResult<IReadOnlyList<NoisePriorSummary>>(matches); + } + + public ValueTask<IReadOnlyList<NoisePriorSummary>> GetByPackageAsync( + string packageType, + string packageIdentifier, + string? platform, + CancellationToken cancellationToken) + { + var matches = _store + .Where(summary => + string.Equals(summary.PackageType, packageType, StringComparison.Ordinal) && + string.Equals(summary.PackageIdentifier, packageIdentifier, StringComparison.Ordinal) && + string.Equals(summary.Platform ?? string.Empty, platform ?? string.Empty, StringComparison.Ordinal)) + .ToList(); + + return ValueTask.FromResult<IReadOnlyList<NoisePriorSummary>>(matches); + } + } + + private sealed class FixedTimeProvider : TimeProvider + { + private readonly DateTimeOffset _now; + + public FixedTimeProvider(DateTimeOffset now) + { + _now = now.ToUniversalTime(); + } + + public override DateTimeOffset GetUtcNow() => _now; + } +} diff --git a/src/StellaOps.Concelier.Core.Tests/Observations/AdvisoryObservationQueryServiceTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Observations/AdvisoryObservationQueryServiceTests.cs similarity index 97% rename from src/StellaOps.Concelier.Core.Tests/Observations/AdvisoryObservationQueryServiceTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Observations/AdvisoryObservationQueryServiceTests.cs index ca52ddfd..cbc54b9e 100644 --- a/src/StellaOps.Concelier.Core.Tests/Observations/AdvisoryObservationQueryServiceTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Observations/AdvisoryObservationQueryServiceTests.cs @@ -1,326 +1,326 @@ -using System.Collections.Immutable; -using System.Text.Json.Nodes; -using StellaOps.Concelier.Core.Observations; -using StellaOps.Concelier.Models.Observations; -using Xunit; - -namespace StellaOps.Concelier.Core.Tests.Observations; - -public sealed class AdvisoryObservationQueryServiceTests -{ - private static readonly AdvisoryObservationSource DefaultSource = new("ghsa", "stream", "https://example.test/api"); - private static readonly AdvisoryObservationSignature DefaultSignature = new(false, null, null, null); - - [Fact] - public async Task QueryAsync_WhenNoFilters_ReturnsTenantObservationsSortedAndAggregated() - { - var observations = new[] - { - CreateObservation( - observationId: "tenant-a:ghsa:alpha:1", - tenant: "Tenant-A", - aliases: new[] { "CVE-2025-0001" }, - purls: new[] { "pkg:npm/package-a@1.0.0" }, - cpes: new[] { "cpe:/a:vendor:product:1.0" }, - references: new[] - { - new AdvisoryObservationReference("advisory", "https://example.test/advisory-1") - }, - createdAt: DateTimeOffset.UtcNow.AddMinutes(-5)), - CreateObservation( - observationId: "tenant-a:osv:beta:1", - tenant: "tenant-a", - aliases: new[] { "CVE-2025-0002", "GHSA-xyzz" }, - purls: new[] { "pkg:pypi/package-b@2.0.0" }, - cpes: Array.Empty<string>(), - references: new[] - { - new AdvisoryObservationReference("advisory", "https://example.test/advisory-2"), - new AdvisoryObservationReference("patch", "https://example.test/patch-1") - }, - createdAt: DateTimeOffset.UtcNow) - }; - - var lookup = new InMemoryLookup(observations); - var service = new AdvisoryObservationQueryService(lookup); - - var result = await service.QueryAsync(new AdvisoryObservationQueryOptions("tenant-a"), CancellationToken.None); - - Assert.Equal(2, result.Observations.Length); - Assert.Equal("tenant-a:osv:beta:1", result.Observations[0].ObservationId); - Assert.Equal("tenant-a:ghsa:alpha:1", result.Observations[1].ObservationId); - - Assert.Equal( - new[] { "cve-2025-0001", "cve-2025-0002", "ghsa-xyzz" }, - result.Linkset.Aliases); - - Assert.Equal( - new[] { "pkg:npm/package-a@1.0.0", "pkg:pypi/package-b@2.0.0" }, - result.Linkset.Purls); - - Assert.Equal(new[] { "cpe:/a:vendor:product:1.0" }, result.Linkset.Cpes); - - Assert.Equal(3, result.Linkset.References.Length); - Assert.Equal("advisory", result.Linkset.References[0].Type); - Assert.Equal("https://example.test/advisory-1", result.Linkset.References[0].Url); - Assert.Equal("https://example.test/advisory-2", result.Linkset.References[1].Url); - Assert.Equal("patch", result.Linkset.References[2].Type); - - Assert.False(result.HasMore); - Assert.Null(result.NextCursor); - } - - [Fact] - public async Task QueryAsync_WithAliasFilter_UsesAliasLookupAndFilters() - { - var observations = new[] - { - CreateObservation( - observationId: "tenant-a:ghsa:alpha:1", - tenant: "tenant-a", - aliases: new[] { "CVE-2025-0001" }, - purls: Array.Empty<string>(), - cpes: Array.Empty<string>(), - references: Array.Empty<AdvisoryObservationReference>(), - createdAt: DateTimeOffset.UtcNow), - CreateObservation( - observationId: "tenant-a:nvd:gamma:1", - tenant: "tenant-a", - aliases: new[] { "CVE-2025-9999" }, - purls: Array.Empty<string>(), - cpes: Array.Empty<string>(), - references: Array.Empty<AdvisoryObservationReference>(), - createdAt: DateTimeOffset.UtcNow.AddMinutes(-10)) - }; - - var lookup = new InMemoryLookup(observations); - var service = new AdvisoryObservationQueryService(lookup); - - var result = await service.QueryAsync( - new AdvisoryObservationQueryOptions("TEnant-A", aliases: new[] { " CVE-2025-0001 ", "CVE-2025-9999" }), - CancellationToken.None); - - Assert.Equal(2, result.Observations.Length); - Assert.All(result.Observations, observation => - Assert.Contains(observation.Linkset.Aliases, alias => alias is "cve-2025-0001" or "cve-2025-9999")); - - Assert.False(result.HasMore); - Assert.Null(result.NextCursor); - } - - [Fact] - public async Task QueryAsync_WithObservationIdAndLinksetFilters_ReturnsIntersection() - { - var observations = new[] - { - CreateObservation( - observationId: "tenant-a:ghsa:alpha:1", - tenant: "tenant-a", - aliases: new[] { "CVE-2025-0001" }, - purls: new[] { "pkg:npm/package-a@1.0.0" }, - cpes: Array.Empty<string>(), - references: Array.Empty<AdvisoryObservationReference>(), - createdAt: DateTimeOffset.UtcNow), - CreateObservation( - observationId: "tenant-a:ghsa:beta:1", - tenant: "tenant-a", - aliases: new[] { "CVE-2025-0001" }, - purls: new[] { "pkg:pypi/package-b@2.0.0" }, - cpes: new[] { "cpe:/a:vendor:product:2.0" }, - references: Array.Empty<AdvisoryObservationReference>(), - createdAt: DateTimeOffset.UtcNow.AddMinutes(-1)) - }; - - var lookup = new InMemoryLookup(observations); - var service = new AdvisoryObservationQueryService(lookup); - - var options = new AdvisoryObservationQueryOptions( - tenant: "tenant-a", - observationIds: new[] { "tenant-a:ghsa:beta:1" }, - aliases: new[] { "CVE-2025-0001" }, - purls: new[] { "pkg:pypi/package-b@2.0.0" }, - cpes: new[] { "cpe:/a:vendor:product:2.0" }); - - var result = await service.QueryAsync(options, CancellationToken.None); - - Assert.Single(result.Observations); - Assert.Equal("tenant-a:ghsa:beta:1", result.Observations[0].ObservationId); - Assert.Equal(new[] { "pkg:pypi/package-b@2.0.0" }, result.Linkset.Purls); - Assert.Equal(new[] { "cpe:/a:vendor:product:2.0" }, result.Linkset.Cpes); - - Assert.False(result.HasMore); - Assert.Null(result.NextCursor); - } - - [Fact] - public async Task QueryAsync_WithLimitEmitsCursorForNextPage() - { - var now = DateTimeOffset.UtcNow; - var observations = new[] - { - CreateObservation( - observationId: "tenant-a:source:1", - tenant: "tenant-a", - aliases: new[] { "CVE-2025-2000" }, - purls: Array.Empty<string>(), - cpes: Array.Empty<string>(), - references: Array.Empty<AdvisoryObservationReference>(), - createdAt: now), - CreateObservation( - observationId: "tenant-a:source:2", - tenant: "tenant-a", - aliases: new[] { "CVE-2025-2001" }, - purls: Array.Empty<string>(), - cpes: Array.Empty<string>(), - references: Array.Empty<AdvisoryObservationReference>(), - createdAt: now.AddMinutes(-1)), - CreateObservation( - observationId: "tenant-a:source:3", - tenant: "tenant-a", - aliases: new[] { "CVE-2025-2002" }, - purls: Array.Empty<string>(), - cpes: Array.Empty<string>(), - references: Array.Empty<AdvisoryObservationReference>(), - createdAt: now.AddMinutes(-2)) - }; - - var lookup = new InMemoryLookup(observations); - var service = new AdvisoryObservationQueryService(lookup); - - var firstPage = await service.QueryAsync( - new AdvisoryObservationQueryOptions("tenant-a", limit: 2), - CancellationToken.None); - - Assert.Equal(2, firstPage.Observations.Length); - Assert.True(firstPage.HasMore); - Assert.NotNull(firstPage.NextCursor); - - var secondPage = await service.QueryAsync( - new AdvisoryObservationQueryOptions("tenant-a", limit: 2, cursor: firstPage.NextCursor), - CancellationToken.None); - - Assert.Single(secondPage.Observations); - Assert.False(secondPage.HasMore); - Assert.Null(secondPage.NextCursor); - Assert.Equal("tenant-a:source:3", secondPage.Observations[0].ObservationId); - } - - private static AdvisoryObservation CreateObservation( - string observationId, - string tenant, - IEnumerable<string> aliases, - IEnumerable<string> purls, - IEnumerable<string> cpes, - IEnumerable<AdvisoryObservationReference> references, - DateTimeOffset createdAt) - { - var raw = JsonNode.Parse("""{"message":"payload"}""") ?? throw new InvalidOperationException("Raw payload must not be null."); - - var upstream = new AdvisoryObservationUpstream( - upstreamId: observationId, - documentVersion: null, - fetchedAt: createdAt, - receivedAt: createdAt, - contentHash: $"sha256:{observationId}", - signature: DefaultSignature); - - var content = new AdvisoryObservationContent("CSAF", "2.0", raw); - var linkset = new AdvisoryObservationLinkset(aliases, purls, cpes, references); - - return new AdvisoryObservation( - observationId, - tenant, - DefaultSource, - upstream, - content, - linkset, - createdAt); - } - - private sealed class InMemoryLookup : IAdvisoryObservationLookup - { - private readonly ImmutableDictionary<string, ImmutableArray<AdvisoryObservation>> _observationsByTenant; - - public InMemoryLookup(IEnumerable<AdvisoryObservation> observations) - { - ArgumentNullException.ThrowIfNull(observations); - - _observationsByTenant = observations - .GroupBy(static observation => observation.Tenant, StringComparer.Ordinal) - .ToImmutableDictionary( - static group => group.Key, - static group => group.ToImmutableArray(), - StringComparer.Ordinal); - } - - public ValueTask<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync( - string tenant, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - cancellationToken.ThrowIfCancellationRequested(); - - if (_observationsByTenant.TryGetValue(tenant, out var observations)) - { - return ValueTask.FromResult<IReadOnlyList<AdvisoryObservation>>(observations); - } - - return ValueTask.FromResult<IReadOnlyList<AdvisoryObservation>>(Array.Empty<AdvisoryObservation>()); - } - - public ValueTask<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( - string tenant, - IReadOnlyCollection<string> observationIds, - IReadOnlyCollection<string> aliases, - IReadOnlyCollection<string> purls, - IReadOnlyCollection<string> cpes, - AdvisoryObservationCursor? cursor, - int limit, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenant); - ArgumentNullException.ThrowIfNull(observationIds); - ArgumentNullException.ThrowIfNull(aliases); - ArgumentNullException.ThrowIfNull(purls); - ArgumentNullException.ThrowIfNull(cpes); - if (limit <= 0) - { - throw new ArgumentOutOfRangeException(nameof(limit)); - } - cancellationToken.ThrowIfCancellationRequested(); - - if (!_observationsByTenant.TryGetValue(tenant, out var observations)) - { - return ValueTask.FromResult<IReadOnlyList<AdvisoryObservation>>(Array.Empty<AdvisoryObservation>()); - } - - var observationIdSet = observationIds.ToImmutableHashSet(StringComparer.Ordinal); - var aliasSet = aliases.ToImmutableHashSet(StringComparer.Ordinal); - var purlSet = purls.ToImmutableHashSet(StringComparer.Ordinal); - var cpeSet = cpes.ToImmutableHashSet(StringComparer.Ordinal); - var filtered = observations - .Where(observation => - (observationIdSet.Count == 0 || observationIdSet.Contains(observation.ObservationId)) && - (aliasSet.Count == 0 || observation.Linkset.Aliases.Any(aliasSet.Contains)) && - (purlSet.Count == 0 || observation.Linkset.Purls.Any(purlSet.Contains)) && - (cpeSet.Count == 0 || observation.Linkset.Cpes.Any(cpeSet.Contains))); - - if (cursor.HasValue) - { - var createdAt = cursor.Value.CreatedAt; - var observationId = cursor.Value.ObservationId; - filtered = filtered.Where(observation => - observation.CreatedAt < createdAt - || (observation.CreatedAt == createdAt && string.CompareOrdinal(observation.ObservationId, observationId) > 0)); - } - - var page = filtered - .OrderByDescending(static observation => observation.CreatedAt) - .ThenBy(static observation => observation.ObservationId, StringComparer.Ordinal) - .Take(limit) - .ToImmutableArray(); - - return ValueTask.FromResult<IReadOnlyList<AdvisoryObservation>>(page); - } - } -} +using System.Collections.Immutable; +using System.Text.Json.Nodes; +using StellaOps.Concelier.Core.Observations; +using StellaOps.Concelier.Models.Observations; +using Xunit; + +namespace StellaOps.Concelier.Core.Tests.Observations; + +public sealed class AdvisoryObservationQueryServiceTests +{ + private static readonly AdvisoryObservationSource DefaultSource = new("ghsa", "stream", "https://example.test/api"); + private static readonly AdvisoryObservationSignature DefaultSignature = new(false, null, null, null); + + [Fact] + public async Task QueryAsync_WhenNoFilters_ReturnsTenantObservationsSortedAndAggregated() + { + var observations = new[] + { + CreateObservation( + observationId: "tenant-a:ghsa:alpha:1", + tenant: "Tenant-A", + aliases: new[] { "CVE-2025-0001" }, + purls: new[] { "pkg:npm/package-a@1.0.0" }, + cpes: new[] { "cpe:/a:vendor:product:1.0" }, + references: new[] + { + new AdvisoryObservationReference("advisory", "https://example.test/advisory-1") + }, + createdAt: DateTimeOffset.UtcNow.AddMinutes(-5)), + CreateObservation( + observationId: "tenant-a:osv:beta:1", + tenant: "tenant-a", + aliases: new[] { "CVE-2025-0002", "GHSA-xyzz" }, + purls: new[] { "pkg:pypi/package-b@2.0.0" }, + cpes: Array.Empty<string>(), + references: new[] + { + new AdvisoryObservationReference("advisory", "https://example.test/advisory-2"), + new AdvisoryObservationReference("patch", "https://example.test/patch-1") + }, + createdAt: DateTimeOffset.UtcNow) + }; + + var lookup = new InMemoryLookup(observations); + var service = new AdvisoryObservationQueryService(lookup); + + var result = await service.QueryAsync(new AdvisoryObservationQueryOptions("tenant-a"), CancellationToken.None); + + Assert.Equal(2, result.Observations.Length); + Assert.Equal("tenant-a:osv:beta:1", result.Observations[0].ObservationId); + Assert.Equal("tenant-a:ghsa:alpha:1", result.Observations[1].ObservationId); + + Assert.Equal( + new[] { "cve-2025-0001", "cve-2025-0002", "ghsa-xyzz" }, + result.Linkset.Aliases); + + Assert.Equal( + new[] { "pkg:npm/package-a@1.0.0", "pkg:pypi/package-b@2.0.0" }, + result.Linkset.Purls); + + Assert.Equal(new[] { "cpe:/a:vendor:product:1.0" }, result.Linkset.Cpes); + + Assert.Equal(3, result.Linkset.References.Length); + Assert.Equal("advisory", result.Linkset.References[0].Type); + Assert.Equal("https://example.test/advisory-1", result.Linkset.References[0].Url); + Assert.Equal("https://example.test/advisory-2", result.Linkset.References[1].Url); + Assert.Equal("patch", result.Linkset.References[2].Type); + + Assert.False(result.HasMore); + Assert.Null(result.NextCursor); + } + + [Fact] + public async Task QueryAsync_WithAliasFilter_UsesAliasLookupAndFilters() + { + var observations = new[] + { + CreateObservation( + observationId: "tenant-a:ghsa:alpha:1", + tenant: "tenant-a", + aliases: new[] { "CVE-2025-0001" }, + purls: Array.Empty<string>(), + cpes: Array.Empty<string>(), + references: Array.Empty<AdvisoryObservationReference>(), + createdAt: DateTimeOffset.UtcNow), + CreateObservation( + observationId: "tenant-a:nvd:gamma:1", + tenant: "tenant-a", + aliases: new[] { "CVE-2025-9999" }, + purls: Array.Empty<string>(), + cpes: Array.Empty<string>(), + references: Array.Empty<AdvisoryObservationReference>(), + createdAt: DateTimeOffset.UtcNow.AddMinutes(-10)) + }; + + var lookup = new InMemoryLookup(observations); + var service = new AdvisoryObservationQueryService(lookup); + + var result = await service.QueryAsync( + new AdvisoryObservationQueryOptions("TEnant-A", aliases: new[] { " CVE-2025-0001 ", "CVE-2025-9999" }), + CancellationToken.None); + + Assert.Equal(2, result.Observations.Length); + Assert.All(result.Observations, observation => + Assert.Contains(observation.Linkset.Aliases, alias => alias is "cve-2025-0001" or "cve-2025-9999")); + + Assert.False(result.HasMore); + Assert.Null(result.NextCursor); + } + + [Fact] + public async Task QueryAsync_WithObservationIdAndLinksetFilters_ReturnsIntersection() + { + var observations = new[] + { + CreateObservation( + observationId: "tenant-a:ghsa:alpha:1", + tenant: "tenant-a", + aliases: new[] { "CVE-2025-0001" }, + purls: new[] { "pkg:npm/package-a@1.0.0" }, + cpes: Array.Empty<string>(), + references: Array.Empty<AdvisoryObservationReference>(), + createdAt: DateTimeOffset.UtcNow), + CreateObservation( + observationId: "tenant-a:ghsa:beta:1", + tenant: "tenant-a", + aliases: new[] { "CVE-2025-0001" }, + purls: new[] { "pkg:pypi/package-b@2.0.0" }, + cpes: new[] { "cpe:/a:vendor:product:2.0" }, + references: Array.Empty<AdvisoryObservationReference>(), + createdAt: DateTimeOffset.UtcNow.AddMinutes(-1)) + }; + + var lookup = new InMemoryLookup(observations); + var service = new AdvisoryObservationQueryService(lookup); + + var options = new AdvisoryObservationQueryOptions( + tenant: "tenant-a", + observationIds: new[] { "tenant-a:ghsa:beta:1" }, + aliases: new[] { "CVE-2025-0001" }, + purls: new[] { "pkg:pypi/package-b@2.0.0" }, + cpes: new[] { "cpe:/a:vendor:product:2.0" }); + + var result = await service.QueryAsync(options, CancellationToken.None); + + Assert.Single(result.Observations); + Assert.Equal("tenant-a:ghsa:beta:1", result.Observations[0].ObservationId); + Assert.Equal(new[] { "pkg:pypi/package-b@2.0.0" }, result.Linkset.Purls); + Assert.Equal(new[] { "cpe:/a:vendor:product:2.0" }, result.Linkset.Cpes); + + Assert.False(result.HasMore); + Assert.Null(result.NextCursor); + } + + [Fact] + public async Task QueryAsync_WithLimitEmitsCursorForNextPage() + { + var now = DateTimeOffset.UtcNow; + var observations = new[] + { + CreateObservation( + observationId: "tenant-a:source:1", + tenant: "tenant-a", + aliases: new[] { "CVE-2025-2000" }, + purls: Array.Empty<string>(), + cpes: Array.Empty<string>(), + references: Array.Empty<AdvisoryObservationReference>(), + createdAt: now), + CreateObservation( + observationId: "tenant-a:source:2", + tenant: "tenant-a", + aliases: new[] { "CVE-2025-2001" }, + purls: Array.Empty<string>(), + cpes: Array.Empty<string>(), + references: Array.Empty<AdvisoryObservationReference>(), + createdAt: now.AddMinutes(-1)), + CreateObservation( + observationId: "tenant-a:source:3", + tenant: "tenant-a", + aliases: new[] { "CVE-2025-2002" }, + purls: Array.Empty<string>(), + cpes: Array.Empty<string>(), + references: Array.Empty<AdvisoryObservationReference>(), + createdAt: now.AddMinutes(-2)) + }; + + var lookup = new InMemoryLookup(observations); + var service = new AdvisoryObservationQueryService(lookup); + + var firstPage = await service.QueryAsync( + new AdvisoryObservationQueryOptions("tenant-a", limit: 2), + CancellationToken.None); + + Assert.Equal(2, firstPage.Observations.Length); + Assert.True(firstPage.HasMore); + Assert.NotNull(firstPage.NextCursor); + + var secondPage = await service.QueryAsync( + new AdvisoryObservationQueryOptions("tenant-a", limit: 2, cursor: firstPage.NextCursor), + CancellationToken.None); + + Assert.Single(secondPage.Observations); + Assert.False(secondPage.HasMore); + Assert.Null(secondPage.NextCursor); + Assert.Equal("tenant-a:source:3", secondPage.Observations[0].ObservationId); + } + + private static AdvisoryObservation CreateObservation( + string observationId, + string tenant, + IEnumerable<string> aliases, + IEnumerable<string> purls, + IEnumerable<string> cpes, + IEnumerable<AdvisoryObservationReference> references, + DateTimeOffset createdAt) + { + var raw = JsonNode.Parse("""{"message":"payload"}""") ?? throw new InvalidOperationException("Raw payload must not be null."); + + var upstream = new AdvisoryObservationUpstream( + upstreamId: observationId, + documentVersion: null, + fetchedAt: createdAt, + receivedAt: createdAt, + contentHash: $"sha256:{observationId}", + signature: DefaultSignature); + + var content = new AdvisoryObservationContent("CSAF", "2.0", raw); + var linkset = new AdvisoryObservationLinkset(aliases, purls, cpes, references); + + return new AdvisoryObservation( + observationId, + tenant, + DefaultSource, + upstream, + content, + linkset, + createdAt); + } + + private sealed class InMemoryLookup : IAdvisoryObservationLookup + { + private readonly ImmutableDictionary<string, ImmutableArray<AdvisoryObservation>> _observationsByTenant; + + public InMemoryLookup(IEnumerable<AdvisoryObservation> observations) + { + ArgumentNullException.ThrowIfNull(observations); + + _observationsByTenant = observations + .GroupBy(static observation => observation.Tenant, StringComparer.Ordinal) + .ToImmutableDictionary( + static group => group.Key, + static group => group.ToImmutableArray(), + StringComparer.Ordinal); + } + + public ValueTask<IReadOnlyList<AdvisoryObservation>> ListByTenantAsync( + string tenant, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + cancellationToken.ThrowIfCancellationRequested(); + + if (_observationsByTenant.TryGetValue(tenant, out var observations)) + { + return ValueTask.FromResult<IReadOnlyList<AdvisoryObservation>>(observations); + } + + return ValueTask.FromResult<IReadOnlyList<AdvisoryObservation>>(Array.Empty<AdvisoryObservation>()); + } + + public ValueTask<IReadOnlyList<AdvisoryObservation>> FindByFiltersAsync( + string tenant, + IReadOnlyCollection<string> observationIds, + IReadOnlyCollection<string> aliases, + IReadOnlyCollection<string> purls, + IReadOnlyCollection<string> cpes, + AdvisoryObservationCursor? cursor, + int limit, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenant); + ArgumentNullException.ThrowIfNull(observationIds); + ArgumentNullException.ThrowIfNull(aliases); + ArgumentNullException.ThrowIfNull(purls); + ArgumentNullException.ThrowIfNull(cpes); + if (limit <= 0) + { + throw new ArgumentOutOfRangeException(nameof(limit)); + } + cancellationToken.ThrowIfCancellationRequested(); + + if (!_observationsByTenant.TryGetValue(tenant, out var observations)) + { + return ValueTask.FromResult<IReadOnlyList<AdvisoryObservation>>(Array.Empty<AdvisoryObservation>()); + } + + var observationIdSet = observationIds.ToImmutableHashSet(StringComparer.Ordinal); + var aliasSet = aliases.ToImmutableHashSet(StringComparer.Ordinal); + var purlSet = purls.ToImmutableHashSet(StringComparer.Ordinal); + var cpeSet = cpes.ToImmutableHashSet(StringComparer.Ordinal); + var filtered = observations + .Where(observation => + (observationIdSet.Count == 0 || observationIdSet.Contains(observation.ObservationId)) && + (aliasSet.Count == 0 || observation.Linkset.Aliases.Any(aliasSet.Contains)) && + (purlSet.Count == 0 || observation.Linkset.Purls.Any(purlSet.Contains)) && + (cpeSet.Count == 0 || observation.Linkset.Cpes.Any(cpeSet.Contains))); + + if (cursor.HasValue) + { + var createdAt = cursor.Value.CreatedAt; + var observationId = cursor.Value.ObservationId; + filtered = filtered.Where(observation => + observation.CreatedAt < createdAt + || (observation.CreatedAt == createdAt && string.CompareOrdinal(observation.ObservationId, observationId) > 0)); + } + + var page = filtered + .OrderByDescending(static observation => observation.CreatedAt) + .ThenBy(static observation => observation.ObservationId, StringComparer.Ordinal) + .Take(limit) + .ToImmutableArray(); + + return ValueTask.FromResult<IReadOnlyList<AdvisoryObservation>>(page); + } + } +} diff --git a/src/StellaOps.Concelier.Core.Tests/PluginRoutineFixtures.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/PluginRoutineFixtures.cs similarity index 100% rename from src/StellaOps.Concelier.Core.Tests/PluginRoutineFixtures.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/PluginRoutineFixtures.cs diff --git a/src/StellaOps.Concelier.Core.Tests/Raw/AdvisoryRawServiceTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Raw/AdvisoryRawServiceTests.cs similarity index 97% rename from src/StellaOps.Concelier.Core.Tests/Raw/AdvisoryRawServiceTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Raw/AdvisoryRawServiceTests.cs index 8c024392..02a41077 100644 --- a/src/StellaOps.Concelier.Core.Tests/Raw/AdvisoryRawServiceTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Raw/AdvisoryRawServiceTests.cs @@ -1,143 +1,143 @@ -using System; -using System.Collections.Immutable; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Aoc; -using StellaOps.Concelier.Core.Aoc; -using StellaOps.Concelier.Core.Linksets; -using StellaOps.Concelier.Core.Raw; -using StellaOps.Concelier.RawModels; -using Xunit; - -namespace StellaOps.Concelier.Core.Tests.Raw; - -public sealed class AdvisoryRawServiceTests -{ - [Fact] - public async Task IngestAsync_RemovesClientSupersedesBeforeUpsert() - { - var repository = new RecordingRepository(); - var service = CreateService(repository); - - var document = CreateDocument() with { Supersedes = " previous-id " }; - var storedDocument = document.WithSupersedes("advisory_raw:vendor-x:ghsa-xxxx:sha256-2"); - var expectedResult = new AdvisoryRawUpsertResult(true, CreateRecord(storedDocument)); - repository.NextResult = expectedResult; - - var result = await service.IngestAsync(document, CancellationToken.None); - - Assert.NotNull(repository.CapturedDocument); - Assert.Null(repository.CapturedDocument!.Supersedes); - Assert.Equal(expectedResult.Record.Document.Supersedes, result.Record.Document.Supersedes); - } - - [Fact] - public async Task IngestAsync_PropagatesRepositoryDuplicateResult() - { - var repository = new RecordingRepository(); - var service = CreateService(repository); - - var existingDocument = CreateDocument(); - var expectedResult = new AdvisoryRawUpsertResult(false, CreateRecord(existingDocument)); - repository.NextResult = expectedResult; - - var result = await service.IngestAsync(CreateDocument(), CancellationToken.None); - - Assert.False(result.Inserted); - Assert.Same(expectedResult.Record, result.Record); - } - - private static AdvisoryRawService CreateService(RecordingRepository repository) - { - var writeGuard = new AdvisoryRawWriteGuard(new AocWriteGuard()); - var linksetMapper = new PassthroughLinksetMapper(); - return new AdvisoryRawService( - repository, - writeGuard, - new AocWriteGuard(), - linksetMapper, - TimeProvider.System, - NullLogger<AdvisoryRawService>.Instance); - } - - private static AdvisoryRawDocument CreateDocument() - { - using var raw = JsonDocument.Parse("""{"id":"demo"}"""); - return new AdvisoryRawDocument( - Tenant: "Tenant-A", - Source: new RawSourceMetadata("Vendor-X", "connector-y", "1.0.0"), - Upstream: new RawUpstreamMetadata( - UpstreamId: "GHSA-xxxx", - DocumentVersion: "1", - RetrievedAt: DateTimeOffset.UtcNow, - ContentHash: "sha256:abc", - Signature: new RawSignatureMetadata( - Present: true, - Format: "dsse", - KeyId: "key-1", - Signature: "base64signature"), - Provenance: ImmutableDictionary<string, string>.Empty), - Content: new RawContent( - Format: "OSV", - SpecVersion: "1.0", - Raw: raw.RootElement.Clone()), - Identifiers: new RawIdentifiers( - Aliases: ImmutableArray.Create("GHSA-xxxx"), - PrimaryId: "GHSA-xxxx"), - Linkset: new RawLinkset - { - Aliases = ImmutableArray<string>.Empty, - PackageUrls = ImmutableArray<string>.Empty, - Cpes = ImmutableArray<string>.Empty, - References = ImmutableArray<RawReference>.Empty, - ReconciledFrom = ImmutableArray<string>.Empty, - Notes = ImmutableDictionary<string, string>.Empty - }); - } - - private static AdvisoryRawRecord CreateRecord(AdvisoryRawDocument document) - => new( - Id: "advisory_raw:vendor-x:ghsa-xxxx:sha256-1", - Document: document, - IngestedAt: DateTimeOffset.UtcNow, - CreatedAt: document.Upstream.RetrievedAt); - - private sealed class RecordingRepository : IAdvisoryRawRepository - { - public AdvisoryRawDocument? CapturedDocument { get; private set; } - - public AdvisoryRawUpsertResult? NextResult { get; set; } - - public Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken) - { - if (NextResult is null) - { - throw new InvalidOperationException("NextResult must be set before calling UpsertAsync."); - } - - CapturedDocument = document; - return Task.FromResult(NextResult); - } - - public Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken) - => throw new NotSupportedException(); - - public Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken) - => throw new NotSupportedException(); - - public Task<IReadOnlyList<AdvisoryRawRecord>> ListForVerificationAsync( - string tenant, - DateTimeOffset since, - DateTimeOffset until, - IReadOnlyCollection<string> sourceVendors, - CancellationToken cancellationToken) - => throw new NotSupportedException(); - } - - private sealed class PassthroughLinksetMapper : IAdvisoryLinksetMapper - { - public RawLinkset Map(AdvisoryRawDocument document) => document.Linkset; - } -} +using System; +using System.Collections.Immutable; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Aoc; +using StellaOps.Concelier.Core.Aoc; +using StellaOps.Concelier.Core.Linksets; +using StellaOps.Concelier.Core.Raw; +using StellaOps.Concelier.RawModels; +using Xunit; + +namespace StellaOps.Concelier.Core.Tests.Raw; + +public sealed class AdvisoryRawServiceTests +{ + [Fact] + public async Task IngestAsync_RemovesClientSupersedesBeforeUpsert() + { + var repository = new RecordingRepository(); + var service = CreateService(repository); + + var document = CreateDocument() with { Supersedes = " previous-id " }; + var storedDocument = document.WithSupersedes("advisory_raw:vendor-x:ghsa-xxxx:sha256-2"); + var expectedResult = new AdvisoryRawUpsertResult(true, CreateRecord(storedDocument)); + repository.NextResult = expectedResult; + + var result = await service.IngestAsync(document, CancellationToken.None); + + Assert.NotNull(repository.CapturedDocument); + Assert.Null(repository.CapturedDocument!.Supersedes); + Assert.Equal(expectedResult.Record.Document.Supersedes, result.Record.Document.Supersedes); + } + + [Fact] + public async Task IngestAsync_PropagatesRepositoryDuplicateResult() + { + var repository = new RecordingRepository(); + var service = CreateService(repository); + + var existingDocument = CreateDocument(); + var expectedResult = new AdvisoryRawUpsertResult(false, CreateRecord(existingDocument)); + repository.NextResult = expectedResult; + + var result = await service.IngestAsync(CreateDocument(), CancellationToken.None); + + Assert.False(result.Inserted); + Assert.Same(expectedResult.Record, result.Record); + } + + private static AdvisoryRawService CreateService(RecordingRepository repository) + { + var writeGuard = new AdvisoryRawWriteGuard(new AocWriteGuard()); + var linksetMapper = new PassthroughLinksetMapper(); + return new AdvisoryRawService( + repository, + writeGuard, + new AocWriteGuard(), + linksetMapper, + TimeProvider.System, + NullLogger<AdvisoryRawService>.Instance); + } + + private static AdvisoryRawDocument CreateDocument() + { + using var raw = JsonDocument.Parse("""{"id":"demo"}"""); + return new AdvisoryRawDocument( + Tenant: "Tenant-A", + Source: new RawSourceMetadata("Vendor-X", "connector-y", "1.0.0"), + Upstream: new RawUpstreamMetadata( + UpstreamId: "GHSA-xxxx", + DocumentVersion: "1", + RetrievedAt: DateTimeOffset.UtcNow, + ContentHash: "sha256:abc", + Signature: new RawSignatureMetadata( + Present: true, + Format: "dsse", + KeyId: "key-1", + Signature: "base64signature"), + Provenance: ImmutableDictionary<string, string>.Empty), + Content: new RawContent( + Format: "OSV", + SpecVersion: "1.0", + Raw: raw.RootElement.Clone()), + Identifiers: new RawIdentifiers( + Aliases: ImmutableArray.Create("GHSA-xxxx"), + PrimaryId: "GHSA-xxxx"), + Linkset: new RawLinkset + { + Aliases = ImmutableArray<string>.Empty, + PackageUrls = ImmutableArray<string>.Empty, + Cpes = ImmutableArray<string>.Empty, + References = ImmutableArray<RawReference>.Empty, + ReconciledFrom = ImmutableArray<string>.Empty, + Notes = ImmutableDictionary<string, string>.Empty + }); + } + + private static AdvisoryRawRecord CreateRecord(AdvisoryRawDocument document) + => new( + Id: "advisory_raw:vendor-x:ghsa-xxxx:sha256-1", + Document: document, + IngestedAt: DateTimeOffset.UtcNow, + CreatedAt: document.Upstream.RetrievedAt); + + private sealed class RecordingRepository : IAdvisoryRawRepository + { + public AdvisoryRawDocument? CapturedDocument { get; private set; } + + public AdvisoryRawUpsertResult? NextResult { get; set; } + + public Task<AdvisoryRawUpsertResult> UpsertAsync(AdvisoryRawDocument document, CancellationToken cancellationToken) + { + if (NextResult is null) + { + throw new InvalidOperationException("NextResult must be set before calling UpsertAsync."); + } + + CapturedDocument = document; + return Task.FromResult(NextResult); + } + + public Task<AdvisoryRawRecord?> FindByIdAsync(string tenant, string id, CancellationToken cancellationToken) + => throw new NotSupportedException(); + + public Task<AdvisoryRawQueryResult> QueryAsync(AdvisoryRawQueryOptions options, CancellationToken cancellationToken) + => throw new NotSupportedException(); + + public Task<IReadOnlyList<AdvisoryRawRecord>> ListForVerificationAsync( + string tenant, + DateTimeOffset since, + DateTimeOffset until, + IReadOnlyCollection<string> sourceVendors, + CancellationToken cancellationToken) + => throw new NotSupportedException(); + } + + private sealed class PassthroughLinksetMapper : IAdvisoryLinksetMapper + { + public RawLinkset Map(AdvisoryRawDocument document) => document.Linkset; + } +} diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj new file mode 100644 index 00000000..6ff4710f --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj @@ -0,0 +1,13 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" /> + <ProjectReference Include="../../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Core.Tests/Unknown/UnknownStateLedgerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Unknown/UnknownStateLedgerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Core.Tests/Unknown/UnknownStateLedgerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Core.Tests/Unknown/UnknownStateLedgerTests.cs diff --git a/src/StellaOps.Concelier.Exporter.Json.Tests/JsonExportSnapshotBuilderTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExportSnapshotBuilderTests.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json.Tests/JsonExportSnapshotBuilderTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExportSnapshotBuilderTests.cs diff --git a/src/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterDependencyInjectionRoutineTests.cs diff --git a/src/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterParitySmokeTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterParitySmokeTests.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterParitySmokeTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonExporterParitySmokeTests.cs diff --git a/src/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/JsonFeedExporterTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj new file mode 100644 index 00000000..04712035 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Exporter.Json.Tests/VulnListJsonExportPathResolverTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/VulnListJsonExportPathResolverTests.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.Json.Tests/VulnListJsonExportPathResolverTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Exporter.Json.Tests/VulnListJsonExportPathResolverTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj new file mode 100644 index 00000000..d0ad1bd1 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbExportPlannerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbExportPlannerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbExportPlannerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbExportPlannerTests.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbFeedExporterTests.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbOciWriterTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbOciWriterTests.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbOciWriterTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbOciWriterTests.cs diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbPackageBuilderTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbPackageBuilderTests.cs similarity index 100% rename from src/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbPackageBuilderTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests/TrivyDbPackageBuilderTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/AdvisoryIdentityResolverTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryIdentityResolverTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/AdvisoryIdentityResolverTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryIdentityResolverTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryMergeServiceTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/AdvisoryPrecedenceMergerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryPrecedenceMergerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/AdvisoryPrecedenceMergerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AdvisoryPrecedenceMergerTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/AffectedPackagePrecedenceResolverTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AffectedPackagePrecedenceResolverTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/AffectedPackagePrecedenceResolverTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AffectedPackagePrecedenceResolverTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/AliasGraphResolverTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AliasGraphResolverTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/AliasGraphResolverTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/AliasGraphResolverTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/CanonicalHashCalculatorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/CanonicalHashCalculatorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/CanonicalHashCalculatorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/CanonicalHashCalculatorTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/DebianEvrComparerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/DebianEvrComparerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/DebianEvrComparerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/DebianEvrComparerTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/MergeEventWriterTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/MergeEventWriterTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/MergeEventWriterTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/MergeEventWriterTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/MergePrecedenceIntegrationTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/MergePrecedenceIntegrationTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/MergePrecedenceIntegrationTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/MergePrecedenceIntegrationTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/MetricCollector.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/MetricCollector.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/MetricCollector.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/MetricCollector.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/NevraComparerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/NevraComparerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/NevraComparerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/NevraComparerTests.cs diff --git a/src/StellaOps.Concelier.Merge.Tests/SemanticVersionRangeResolverTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/SemanticVersionRangeResolverTests.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/SemanticVersionRangeResolverTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/SemanticVersionRangeResolverTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj new file mode 100644 index 00000000..5e0ebe55 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Merge.Tests/TestLogger.cs b/src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/TestLogger.cs similarity index 100% rename from src/StellaOps.Concelier.Merge.Tests/TestLogger.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Merge.Tests/TestLogger.cs diff --git a/src/StellaOps.Concelier.Models.Tests/AdvisoryProvenanceTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AdvisoryProvenanceTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/AdvisoryProvenanceTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AdvisoryProvenanceTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/AdvisoryTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AdvisoryTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/AdvisoryTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AdvisoryTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/AffectedPackageStatusTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AffectedPackageStatusTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/AffectedPackageStatusTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AffectedPackageStatusTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/AffectedVersionRangeExtensionsTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AffectedVersionRangeExtensionsTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/AffectedVersionRangeExtensionsTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AffectedVersionRangeExtensionsTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/AliasSchemeRegistryTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AliasSchemeRegistryTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/AliasSchemeRegistryTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/AliasSchemeRegistryTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/CanonicalExampleFactory.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/CanonicalExampleFactory.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/CanonicalExampleFactory.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/CanonicalExampleFactory.cs diff --git a/src/StellaOps.Concelier.Models.Tests/CanonicalExamplesTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/CanonicalExamplesTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/CanonicalExamplesTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/CanonicalExamplesTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/CanonicalJsonSerializerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/CanonicalJsonSerializerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/CanonicalJsonSerializerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/CanonicalJsonSerializerTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/EvrPrimitiveExtensionsTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/EvrPrimitiveExtensionsTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/EvrPrimitiveExtensionsTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/EvrPrimitiveExtensionsTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.actual.json b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.actual.json similarity index 96% rename from src/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.actual.json rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.actual.json index dfd2f43f..4281d2f5 100644 --- a/src/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.actual.json +++ b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.actual.json @@ -1,127 +1,127 @@ -{ - "advisoryKey": "GHSA-aaaa-bbbb-cccc", - "affectedPackages": [ - { - "type": "semver", - "identifier": "pkg:npm/example-widget", - "platform": null, - "versionRanges": [ - { - "fixedVersion": "2.5.1", - "introducedVersion": null, - "lastAffectedVersion": null, - "primitives": null, - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "ghsa-aaaa-bbbb-cccc", - "decisionReason": null, - "recordedAt": "2024-03-05T10:00:00+00:00", - "fieldMask": [] - }, - "rangeExpression": ">=0.0.0 <2.5.1", - "rangeKind": "semver" - }, - { - "fixedVersion": "3.2.4", - "introducedVersion": "3.0.0", - "lastAffectedVersion": null, - "primitives": null, - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "ghsa-aaaa-bbbb-cccc", - "decisionReason": null, - "recordedAt": "2024-03-05T10:00:00+00:00", - "fieldMask": [] - }, - "rangeExpression": null, - "rangeKind": "semver" - } - ], - "normalizedVersions": [], - "statuses": [], - "provenance": [ - { - "source": "ghsa", - "kind": "map", - "value": "ghsa-aaaa-bbbb-cccc", - "decisionReason": null, - "recordedAt": "2024-03-05T10:00:00+00:00", - "fieldMask": [] - } - ] - } - ], - "aliases": [ - "CVE-2024-2222", - "GHSA-aaaa-bbbb-cccc" - ], - "canonicalMetricId": null, - "credits": [], - "cvssMetrics": [ - { - "baseScore": 8.8, - "baseSeverity": "high", - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "ghsa-aaaa-bbbb-cccc", - "decisionReason": null, - "recordedAt": "2024-03-05T10:00:00+00:00", - "fieldMask": [] - }, - "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H", - "version": "3.1" - } - ], - "cwes": [], - "description": null, - "exploitKnown": false, - "language": "en", - "modified": "2024-03-04T12:00:00+00:00", - "provenance": [ - { - "source": "ghsa", - "kind": "map", - "value": "ghsa-aaaa-bbbb-cccc", - "decisionReason": null, - "recordedAt": "2024-03-05T10:00:00+00:00", - "fieldMask": [] - } - ], - "published": "2024-03-04T00:00:00+00:00", - "references": [ - { - "kind": "patch", - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "ghsa-aaaa-bbbb-cccc", - "decisionReason": null, - "recordedAt": "2024-03-05T10:00:00+00:00", - "fieldMask": [] - }, - "sourceTag": "ghsa", - "summary": "Patch commit", - "url": "https://github.com/example/widget/commit/abcd1234" - }, - { - "kind": "advisory", - "provenance": { - "source": "ghsa", - "kind": "map", - "value": "ghsa-aaaa-bbbb-cccc", - "decisionReason": null, - "recordedAt": "2024-03-05T10:00:00+00:00", - "fieldMask": [] - }, - "sourceTag": "ghsa", - "summary": "GitHub Security Advisory", - "url": "https://github.com/example/widget/security/advisories/GHSA-aaaa-bbbb-cccc" - } - ], - "severity": "high", - "summary": "A crafted payload can pollute Object.prototype leading to RCE.", - "title": "Prototype pollution in widget.js" +{ + "advisoryKey": "GHSA-aaaa-bbbb-cccc", + "affectedPackages": [ + { + "type": "semver", + "identifier": "pkg:npm/example-widget", + "platform": null, + "versionRanges": [ + { + "fixedVersion": "2.5.1", + "introducedVersion": null, + "lastAffectedVersion": null, + "primitives": null, + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "ghsa-aaaa-bbbb-cccc", + "decisionReason": null, + "recordedAt": "2024-03-05T10:00:00+00:00", + "fieldMask": [] + }, + "rangeExpression": ">=0.0.0 <2.5.1", + "rangeKind": "semver" + }, + { + "fixedVersion": "3.2.4", + "introducedVersion": "3.0.0", + "lastAffectedVersion": null, + "primitives": null, + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "ghsa-aaaa-bbbb-cccc", + "decisionReason": null, + "recordedAt": "2024-03-05T10:00:00+00:00", + "fieldMask": [] + }, + "rangeExpression": null, + "rangeKind": "semver" + } + ], + "normalizedVersions": [], + "statuses": [], + "provenance": [ + { + "source": "ghsa", + "kind": "map", + "value": "ghsa-aaaa-bbbb-cccc", + "decisionReason": null, + "recordedAt": "2024-03-05T10:00:00+00:00", + "fieldMask": [] + } + ] + } + ], + "aliases": [ + "CVE-2024-2222", + "GHSA-aaaa-bbbb-cccc" + ], + "canonicalMetricId": null, + "credits": [], + "cvssMetrics": [ + { + "baseScore": 8.8, + "baseSeverity": "high", + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "ghsa-aaaa-bbbb-cccc", + "decisionReason": null, + "recordedAt": "2024-03-05T10:00:00+00:00", + "fieldMask": [] + }, + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:R/S:U/C:H/I:H/A:H", + "version": "3.1" + } + ], + "cwes": [], + "description": null, + "exploitKnown": false, + "language": "en", + "modified": "2024-03-04T12:00:00+00:00", + "provenance": [ + { + "source": "ghsa", + "kind": "map", + "value": "ghsa-aaaa-bbbb-cccc", + "decisionReason": null, + "recordedAt": "2024-03-05T10:00:00+00:00", + "fieldMask": [] + } + ], + "published": "2024-03-04T00:00:00+00:00", + "references": [ + { + "kind": "patch", + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "ghsa-aaaa-bbbb-cccc", + "decisionReason": null, + "recordedAt": "2024-03-05T10:00:00+00:00", + "fieldMask": [] + }, + "sourceTag": "ghsa", + "summary": "Patch commit", + "url": "https://github.com/example/widget/commit/abcd1234" + }, + { + "kind": "advisory", + "provenance": { + "source": "ghsa", + "kind": "map", + "value": "ghsa-aaaa-bbbb-cccc", + "decisionReason": null, + "recordedAt": "2024-03-05T10:00:00+00:00", + "fieldMask": [] + }, + "sourceTag": "ghsa", + "summary": "GitHub Security Advisory", + "url": "https://github.com/example/widget/security/advisories/GHSA-aaaa-bbbb-cccc" + } + ], + "severity": "high", + "summary": "A crafted payload can pollute Object.prototype leading to RCE.", + "title": "Prototype pollution in widget.js" } \ No newline at end of file diff --git a/src/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.json b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.json similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.json rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/ghsa-semver.json diff --git a/src/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.actual.json b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.actual.json similarity index 96% rename from src/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.actual.json rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.actual.json index 367f93c1..abea5bd2 100644 --- a/src/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.actual.json +++ b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.actual.json @@ -1,45 +1,45 @@ -{ - "advisoryKey": "CVE-2023-9999", - "affectedPackages": [], - "aliases": [ - "CVE-2023-9999" - ], - "canonicalMetricId": null, - "credits": [], - "cvssMetrics": [], - "cwes": [], - "description": null, - "exploitKnown": true, - "language": "en", - "modified": "2024-02-09T16:22:00+00:00", - "provenance": [ - { - "source": "cisa-kev", - "kind": "annotate", - "value": "kev", - "decisionReason": null, - "recordedAt": "2024-02-10T09:30:00+00:00", - "fieldMask": [] - } - ], - "published": "2023-11-20T00:00:00+00:00", - "references": [ - { - "kind": "kev", - "provenance": { - "source": "cisa-kev", - "kind": "annotate", - "value": "kev", - "decisionReason": null, - "recordedAt": "2024-02-10T09:30:00+00:00", - "fieldMask": [] - }, - "sourceTag": "cisa", - "summary": "CISA KEV entry", - "url": "https://www.cisa.gov/known-exploited-vulnerabilities-catalog" - } - ], - "severity": "critical", - "summary": "Unauthenticated RCE due to unsafe deserialization.", - "title": "Remote code execution in LegacyServer" +{ + "advisoryKey": "CVE-2023-9999", + "affectedPackages": [], + "aliases": [ + "CVE-2023-9999" + ], + "canonicalMetricId": null, + "credits": [], + "cvssMetrics": [], + "cwes": [], + "description": null, + "exploitKnown": true, + "language": "en", + "modified": "2024-02-09T16:22:00+00:00", + "provenance": [ + { + "source": "cisa-kev", + "kind": "annotate", + "value": "kev", + "decisionReason": null, + "recordedAt": "2024-02-10T09:30:00+00:00", + "fieldMask": [] + } + ], + "published": "2023-11-20T00:00:00+00:00", + "references": [ + { + "kind": "kev", + "provenance": { + "source": "cisa-kev", + "kind": "annotate", + "value": "kev", + "decisionReason": null, + "recordedAt": "2024-02-10T09:30:00+00:00", + "fieldMask": [] + }, + "sourceTag": "cisa", + "summary": "CISA KEV entry", + "url": "https://www.cisa.gov/known-exploited-vulnerabilities-catalog" + } + ], + "severity": "critical", + "summary": "Unauthenticated RCE due to unsafe deserialization.", + "title": "Remote code execution in LegacyServer" } \ No newline at end of file diff --git a/src/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.json b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.json similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.json rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/kev-flag.json diff --git a/src/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.actual.json b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.actual.json similarity index 96% rename from src/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.actual.json rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.actual.json index a5dd5f89..a7584f3d 100644 --- a/src/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.actual.json +++ b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.actual.json @@ -1,122 +1,122 @@ -{ - "advisoryKey": "CVE-2024-1234", - "affectedPackages": [ - { - "type": "cpe", - "identifier": "cpe:/a:examplecms:examplecms:1.0", - "platform": null, - "versionRanges": [ - { - "fixedVersion": "1.0.5", - "introducedVersion": "1.0", - "lastAffectedVersion": null, - "primitives": null, - "provenance": { - "source": "nvd", - "kind": "map", - "value": "cve-2024-1234", - "decisionReason": null, - "recordedAt": "2024-08-01T12:00:00+00:00", - "fieldMask": [] - }, - "rangeExpression": null, - "rangeKind": "version" - } - ], - "normalizedVersions": [], - "statuses": [ - { - "provenance": { - "source": "nvd", - "kind": "map", - "value": "cve-2024-1234", - "decisionReason": null, - "recordedAt": "2024-08-01T12:00:00+00:00", - "fieldMask": [] - }, - "status": "affected" - } - ], - "provenance": [ - { - "source": "nvd", - "kind": "map", - "value": "cve-2024-1234", - "decisionReason": null, - "recordedAt": "2024-08-01T12:00:00+00:00", - "fieldMask": [] - } - ] - } - ], - "aliases": [ - "CVE-2024-1234" - ], - "canonicalMetricId": null, - "credits": [], - "cvssMetrics": [ - { - "baseScore": 9.8, - "baseSeverity": "critical", - "provenance": { - "source": "nvd", - "kind": "map", - "value": "cve-2024-1234", - "decisionReason": null, - "recordedAt": "2024-08-01T12:00:00+00:00", - "fieldMask": [] - }, - "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", - "version": "3.1" - } - ], - "cwes": [], - "description": null, - "exploitKnown": false, - "language": "en", - "modified": "2024-07-16T10:35:00+00:00", - "provenance": [ - { - "source": "nvd", - "kind": "map", - "value": "cve-2024-1234", - "decisionReason": null, - "recordedAt": "2024-08-01T12:00:00+00:00", - "fieldMask": [] - } - ], - "published": "2024-07-15T00:00:00+00:00", - "references": [ - { - "kind": "advisory", - "provenance": { - "source": "example", - "kind": "fetch", - "value": "bulletin", - "decisionReason": null, - "recordedAt": "2024-07-14T15:00:00+00:00", - "fieldMask": [] - }, - "sourceTag": "vendor", - "summary": "Vendor bulletin", - "url": "https://example.org/security/CVE-2024-1234" - }, - { - "kind": "advisory", - "provenance": { - "source": "nvd", - "kind": "map", - "value": "cve-2024-1234", - "decisionReason": null, - "recordedAt": "2024-08-01T12:00:00+00:00", - "fieldMask": [] - }, - "sourceTag": "nvd", - "summary": "NVD entry", - "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-1234" - } - ], - "severity": "high", - "summary": "An integer overflow in ExampleCMS allows remote attackers to escalate privileges.", - "title": "Integer overflow in ExampleCMS" +{ + "advisoryKey": "CVE-2024-1234", + "affectedPackages": [ + { + "type": "cpe", + "identifier": "cpe:/a:examplecms:examplecms:1.0", + "platform": null, + "versionRanges": [ + { + "fixedVersion": "1.0.5", + "introducedVersion": "1.0", + "lastAffectedVersion": null, + "primitives": null, + "provenance": { + "source": "nvd", + "kind": "map", + "value": "cve-2024-1234", + "decisionReason": null, + "recordedAt": "2024-08-01T12:00:00+00:00", + "fieldMask": [] + }, + "rangeExpression": null, + "rangeKind": "version" + } + ], + "normalizedVersions": [], + "statuses": [ + { + "provenance": { + "source": "nvd", + "kind": "map", + "value": "cve-2024-1234", + "decisionReason": null, + "recordedAt": "2024-08-01T12:00:00+00:00", + "fieldMask": [] + }, + "status": "affected" + } + ], + "provenance": [ + { + "source": "nvd", + "kind": "map", + "value": "cve-2024-1234", + "decisionReason": null, + "recordedAt": "2024-08-01T12:00:00+00:00", + "fieldMask": [] + } + ] + } + ], + "aliases": [ + "CVE-2024-1234" + ], + "canonicalMetricId": null, + "credits": [], + "cvssMetrics": [ + { + "baseScore": 9.8, + "baseSeverity": "critical", + "provenance": { + "source": "nvd", + "kind": "map", + "value": "cve-2024-1234", + "decisionReason": null, + "recordedAt": "2024-08-01T12:00:00+00:00", + "fieldMask": [] + }, + "vector": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H", + "version": "3.1" + } + ], + "cwes": [], + "description": null, + "exploitKnown": false, + "language": "en", + "modified": "2024-07-16T10:35:00+00:00", + "provenance": [ + { + "source": "nvd", + "kind": "map", + "value": "cve-2024-1234", + "decisionReason": null, + "recordedAt": "2024-08-01T12:00:00+00:00", + "fieldMask": [] + } + ], + "published": "2024-07-15T00:00:00+00:00", + "references": [ + { + "kind": "advisory", + "provenance": { + "source": "example", + "kind": "fetch", + "value": "bulletin", + "decisionReason": null, + "recordedAt": "2024-07-14T15:00:00+00:00", + "fieldMask": [] + }, + "sourceTag": "vendor", + "summary": "Vendor bulletin", + "url": "https://example.org/security/CVE-2024-1234" + }, + { + "kind": "advisory", + "provenance": { + "source": "nvd", + "kind": "map", + "value": "cve-2024-1234", + "decisionReason": null, + "recordedAt": "2024-08-01T12:00:00+00:00", + "fieldMask": [] + }, + "sourceTag": "nvd", + "summary": "NVD entry", + "url": "https://nvd.nist.gov/vuln/detail/CVE-2024-1234" + } + ], + "severity": "high", + "summary": "An integer overflow in ExampleCMS allows remote attackers to escalate privileges.", + "title": "Integer overflow in ExampleCMS" } \ No newline at end of file diff --git a/src/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.json b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.json similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.json rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/nvd-basic.json diff --git a/src/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.actual.json b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.actual.json similarity index 96% rename from src/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.actual.json rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.actual.json index c200134f..b5e6a1b3 100644 --- a/src/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.actual.json +++ b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.actual.json @@ -1,125 +1,125 @@ -{ - "advisoryKey": "RHSA-2024:0252", - "affectedPackages": [ - { - "type": "rpm", - "identifier": "kernel-0:4.18.0-553.el8.x86_64", - "platform": "rhel-8", - "versionRanges": [ - { - "fixedVersion": null, - "introducedVersion": "0:4.18.0-553.el8", - "lastAffectedVersion": null, - "primitives": null, - "provenance": { - "source": "redhat", - "kind": "map", - "value": "rhsa-2024:0252", - "decisionReason": null, - "recordedAt": "2024-05-11T09:00:00+00:00", - "fieldMask": [] - }, - "rangeExpression": null, - "rangeKind": "nevra" - } - ], - "normalizedVersions": [], - "statuses": [ - { - "provenance": { - "source": "redhat", - "kind": "map", - "value": "rhsa-2024:0252", - "decisionReason": null, - "recordedAt": "2024-05-11T09:00:00+00:00", - "fieldMask": [] - }, - "status": "fixed" - } - ], - "provenance": [ - { - "source": "redhat", - "kind": "enrich", - "value": "cve-2024-5678", - "decisionReason": null, - "recordedAt": "2024-05-11T09:05:00+00:00", - "fieldMask": [] - }, - { - "source": "redhat", - "kind": "map", - "value": "rhsa-2024:0252", - "decisionReason": null, - "recordedAt": "2024-05-11T09:00:00+00:00", - "fieldMask": [] - } - ] - } - ], - "aliases": [ - "CVE-2024-5678", - "RHSA-2024:0252" - ], - "canonicalMetricId": null, - "credits": [], - "cvssMetrics": [ - { - "baseScore": 6.7, - "baseSeverity": "medium", - "provenance": { - "source": "redhat", - "kind": "map", - "value": "rhsa-2024:0252", - "decisionReason": null, - "recordedAt": "2024-05-11T09:00:00+00:00", - "fieldMask": [] - }, - "vector": "CVSS:3.1/AV:L/AC:L/PR:H/UI:N/S:U/C:H/I:H/A:H", - "version": "3.1" - } - ], - "cwes": [], - "description": null, - "exploitKnown": false, - "language": "en", - "modified": "2024-05-11T08:15:00+00:00", - "provenance": [ - { - "source": "redhat", - "kind": "enrich", - "value": "cve-2024-5678", - "decisionReason": null, - "recordedAt": "2024-05-11T09:05:00+00:00", - "fieldMask": [] - }, - { - "source": "redhat", - "kind": "map", - "value": "rhsa-2024:0252", - "decisionReason": null, - "recordedAt": "2024-05-11T09:00:00+00:00", - "fieldMask": [] - } - ], - "published": "2024-05-10T19:28:00+00:00", - "references": [ - { - "kind": "advisory", - "provenance": { - "source": "redhat", - "kind": "map", - "value": "rhsa-2024:0252", - "decisionReason": null, - "recordedAt": "2024-05-11T09:00:00+00:00", - "fieldMask": [] - }, - "sourceTag": "redhat", - "summary": "Red Hat security advisory", - "url": "https://access.redhat.com/errata/RHSA-2024:0252" - } - ], - "severity": "critical", - "summary": "Updates the Red Hat Enterprise Linux kernel to address CVE-2024-5678.", - "title": "Important: kernel security update" +{ + "advisoryKey": "RHSA-2024:0252", + "affectedPackages": [ + { + "type": "rpm", + "identifier": "kernel-0:4.18.0-553.el8.x86_64", + "platform": "rhel-8", + "versionRanges": [ + { + "fixedVersion": null, + "introducedVersion": "0:4.18.0-553.el8", + "lastAffectedVersion": null, + "primitives": null, + "provenance": { + "source": "redhat", + "kind": "map", + "value": "rhsa-2024:0252", + "decisionReason": null, + "recordedAt": "2024-05-11T09:00:00+00:00", + "fieldMask": [] + }, + "rangeExpression": null, + "rangeKind": "nevra" + } + ], + "normalizedVersions": [], + "statuses": [ + { + "provenance": { + "source": "redhat", + "kind": "map", + "value": "rhsa-2024:0252", + "decisionReason": null, + "recordedAt": "2024-05-11T09:00:00+00:00", + "fieldMask": [] + }, + "status": "fixed" + } + ], + "provenance": [ + { + "source": "redhat", + "kind": "enrich", + "value": "cve-2024-5678", + "decisionReason": null, + "recordedAt": "2024-05-11T09:05:00+00:00", + "fieldMask": [] + }, + { + "source": "redhat", + "kind": "map", + "value": "rhsa-2024:0252", + "decisionReason": null, + "recordedAt": "2024-05-11T09:00:00+00:00", + "fieldMask": [] + } + ] + } + ], + "aliases": [ + "CVE-2024-5678", + "RHSA-2024:0252" + ], + "canonicalMetricId": null, + "credits": [], + "cvssMetrics": [ + { + "baseScore": 6.7, + "baseSeverity": "medium", + "provenance": { + "source": "redhat", + "kind": "map", + "value": "rhsa-2024:0252", + "decisionReason": null, + "recordedAt": "2024-05-11T09:00:00+00:00", + "fieldMask": [] + }, + "vector": "CVSS:3.1/AV:L/AC:L/PR:H/UI:N/S:U/C:H/I:H/A:H", + "version": "3.1" + } + ], + "cwes": [], + "description": null, + "exploitKnown": false, + "language": "en", + "modified": "2024-05-11T08:15:00+00:00", + "provenance": [ + { + "source": "redhat", + "kind": "enrich", + "value": "cve-2024-5678", + "decisionReason": null, + "recordedAt": "2024-05-11T09:05:00+00:00", + "fieldMask": [] + }, + { + "source": "redhat", + "kind": "map", + "value": "rhsa-2024:0252", + "decisionReason": null, + "recordedAt": "2024-05-11T09:00:00+00:00", + "fieldMask": [] + } + ], + "published": "2024-05-10T19:28:00+00:00", + "references": [ + { + "kind": "advisory", + "provenance": { + "source": "redhat", + "kind": "map", + "value": "rhsa-2024:0252", + "decisionReason": null, + "recordedAt": "2024-05-11T09:00:00+00:00", + "fieldMask": [] + }, + "sourceTag": "redhat", + "summary": "Red Hat security advisory", + "url": "https://access.redhat.com/errata/RHSA-2024:0252" + } + ], + "severity": "critical", + "summary": "Updates the Red Hat Enterprise Linux kernel to address CVE-2024-5678.", + "title": "Important: kernel security update" } \ No newline at end of file diff --git a/src/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.json b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.json similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.json rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Fixtures/psirt-overlay.json diff --git a/src/StellaOps.Concelier.Models.Tests/NevraPrimitiveExtensionsTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/NevraPrimitiveExtensionsTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/NevraPrimitiveExtensionsTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/NevraPrimitiveExtensionsTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/NormalizedVersionRuleTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/NormalizedVersionRuleTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/NormalizedVersionRuleTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/NormalizedVersionRuleTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/Observations/AdvisoryObservationTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Observations/AdvisoryObservationTests.cs similarity index 97% rename from src/StellaOps.Concelier.Models.Tests/Observations/AdvisoryObservationTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Observations/AdvisoryObservationTests.cs index 1366d42d..610bd956 100644 --- a/src/StellaOps.Concelier.Models.Tests/Observations/AdvisoryObservationTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/Observations/AdvisoryObservationTests.cs @@ -1,61 +1,61 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Text.Json.Nodes; -using StellaOps.Concelier.Models.Observations; -using Xunit; - -namespace StellaOps.Concelier.Models.Tests.Observations; - -public sealed class AdvisoryObservationTests -{ - [Fact] - public void Constructor_NormalizesTenantAndCollections() - { - var source = new AdvisoryObservationSource("Vendor", "Stream", "https://example.com/api", "1.2.3"); - var signature = new AdvisoryObservationSignature(true, "pgp", "key1", "sig"); - var upstream = new AdvisoryObservationUpstream( - upstreamId: "CVE-2025-1234", - documentVersion: "2025-10-01", - fetchedAt: DateTimeOffset.Parse("2025-10-01T01:00:00Z"), - receivedAt: DateTimeOffset.Parse("2025-10-01T01:00:05Z"), - contentHash: "sha256:abc", - signature: signature); - - var content = new AdvisoryObservationContent("CSAF", "2.0", JsonNode.Parse("{\"foo\":1}")!); - - var linkset = new AdvisoryObservationLinkset( - aliases: new[] { " Cve-2025-1234 ", "cve-2025-1234" }, - purls: new[] { "pkg:generic/foo@1.0.0", "pkg:generic/foo@1.0.0" }, - cpes: new[] { "cpe:/a:vendor:product:1" }, - references: new[] - { - new AdvisoryObservationReference("ADVISORY", "https://example.com/advisory"), - new AdvisoryObservationReference("advisory", "https://example.com/advisory") - }); - - var attributes = ImmutableDictionary.CreateRange(new Dictionary<string, string> - { - [" region "] = "emea", - ["pipeline"] = "daily" - }); - - var observation = new AdvisoryObservation( - observationId: " tenant-a:CVE-2025-1234:1 ", - tenant: " Tenant-A ", - source: source, - upstream: upstream, - content: content, - linkset: linkset, - createdAt: DateTimeOffset.Parse("2025-10-01T01:00:06Z"), - attributes: attributes); - - Assert.Equal("tenant-a:CVE-2025-1234:1", observation.ObservationId); - Assert.Equal("tenant-a", observation.Tenant); - Assert.Equal("Vendor", observation.Source.Vendor); - Assert.Equal(new[] { "cpe:/a:vendor:product:1" }, observation.Linkset.Cpes); - Assert.Single(observation.Linkset.References); - Assert.Equal("https://example.com/advisory", observation.Linkset.References[0].Url); - Assert.Equal(DateTimeOffset.Parse("2025-10-01T01:00:06Z"), observation.CreatedAt); - Assert.Equal("emea", observation.Attributes["region"]); - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Text.Json.Nodes; +using StellaOps.Concelier.Models.Observations; +using Xunit; + +namespace StellaOps.Concelier.Models.Tests.Observations; + +public sealed class AdvisoryObservationTests +{ + [Fact] + public void Constructor_NormalizesTenantAndCollections() + { + var source = new AdvisoryObservationSource("Vendor", "Stream", "https://example.com/api", "1.2.3"); + var signature = new AdvisoryObservationSignature(true, "pgp", "key1", "sig"); + var upstream = new AdvisoryObservationUpstream( + upstreamId: "CVE-2025-1234", + documentVersion: "2025-10-01", + fetchedAt: DateTimeOffset.Parse("2025-10-01T01:00:00Z"), + receivedAt: DateTimeOffset.Parse("2025-10-01T01:00:05Z"), + contentHash: "sha256:abc", + signature: signature); + + var content = new AdvisoryObservationContent("CSAF", "2.0", JsonNode.Parse("{\"foo\":1}")!); + + var linkset = new AdvisoryObservationLinkset( + aliases: new[] { " Cve-2025-1234 ", "cve-2025-1234" }, + purls: new[] { "pkg:generic/foo@1.0.0", "pkg:generic/foo@1.0.0" }, + cpes: new[] { "cpe:/a:vendor:product:1" }, + references: new[] + { + new AdvisoryObservationReference("ADVISORY", "https://example.com/advisory"), + new AdvisoryObservationReference("advisory", "https://example.com/advisory") + }); + + var attributes = ImmutableDictionary.CreateRange(new Dictionary<string, string> + { + [" region "] = "emea", + ["pipeline"] = "daily" + }); + + var observation = new AdvisoryObservation( + observationId: " tenant-a:CVE-2025-1234:1 ", + tenant: " Tenant-A ", + source: source, + upstream: upstream, + content: content, + linkset: linkset, + createdAt: DateTimeOffset.Parse("2025-10-01T01:00:06Z"), + attributes: attributes); + + Assert.Equal("tenant-a:CVE-2025-1234:1", observation.ObservationId); + Assert.Equal("tenant-a", observation.Tenant); + Assert.Equal("Vendor", observation.Source.Vendor); + Assert.Equal(new[] { "cpe:/a:vendor:product:1" }, observation.Linkset.Cpes); + Assert.Single(observation.Linkset.References); + Assert.Equal("https://example.com/advisory", observation.Linkset.References[0].Url); + Assert.Equal(DateTimeOffset.Parse("2025-10-01T01:00:06Z"), observation.CreatedAt); + Assert.Equal("emea", observation.Attributes["region"]); + } +} diff --git a/src/StellaOps.Concelier.Models.Tests/OsvGhsaParityDiagnosticsTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/OsvGhsaParityDiagnosticsTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/OsvGhsaParityDiagnosticsTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/OsvGhsaParityDiagnosticsTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/OsvGhsaParityInspectorTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/OsvGhsaParityInspectorTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/OsvGhsaParityInspectorTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/OsvGhsaParityInspectorTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/ProvenanceDiagnosticsTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/ProvenanceDiagnosticsTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/ProvenanceDiagnosticsTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/ProvenanceDiagnosticsTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/RangePrimitivesTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/RangePrimitivesTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/RangePrimitivesTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/RangePrimitivesTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/SemVerPrimitiveTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/SemVerPrimitiveTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/SemVerPrimitiveTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/SemVerPrimitiveTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/SerializationDeterminismTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/SerializationDeterminismTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/SerializationDeterminismTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/SerializationDeterminismTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/SeverityNormalizationTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/SeverityNormalizationTests.cs similarity index 100% rename from src/StellaOps.Concelier.Models.Tests/SeverityNormalizationTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/SeverityNormalizationTests.cs diff --git a/src/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj similarity index 70% rename from src/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj rename to src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj index 5320b137..0c648971 100644 --- a/src/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj +++ b/src/Concelier/__Tests/StellaOps.Concelier.Models.Tests/StellaOps.Concelier.Models.Tests.csproj @@ -1,15 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> </ItemGroup> <ItemGroup Condition="'$(UpdateGoldens)' == 'true'"> <EnvironmentVariables Include="UPDATE_GOLDENS"> <Value>1</Value> </EnvironmentVariables> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.Normalization.Tests/CpeNormalizerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/CpeNormalizerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization.Tests/CpeNormalizerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/CpeNormalizerTests.cs diff --git a/src/StellaOps.Concelier.Normalization.Tests/CvssMetricNormalizerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/CvssMetricNormalizerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization.Tests/CvssMetricNormalizerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/CvssMetricNormalizerTests.cs diff --git a/src/StellaOps.Concelier.Normalization.Tests/DebianEvrParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/DebianEvrParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization.Tests/DebianEvrParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/DebianEvrParserTests.cs diff --git a/src/StellaOps.Concelier.Normalization.Tests/DescriptionNormalizerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/DescriptionNormalizerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization.Tests/DescriptionNormalizerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/DescriptionNormalizerTests.cs diff --git a/src/StellaOps.Concelier.Normalization.Tests/NevraParserTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/NevraParserTests.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization.Tests/NevraParserTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/NevraParserTests.cs diff --git a/src/StellaOps.Concelier.Normalization.Tests/PackageUrlNormalizerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/PackageUrlNormalizerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization.Tests/PackageUrlNormalizerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/PackageUrlNormalizerTests.cs diff --git a/src/StellaOps.Concelier.Normalization.Tests/SemVerRangeRuleBuilderTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/SemVerRangeRuleBuilderTests.cs similarity index 100% rename from src/StellaOps.Concelier.Normalization.Tests/SemVerRangeRuleBuilderTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/SemVerRangeRuleBuilderTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/StellaOps.Concelier.Normalization.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/StellaOps.Concelier.Normalization.Tests.csproj new file mode 100644 index 00000000..2a2dcacf --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Normalization.Tests/StellaOps.Concelier.Normalization.Tests.csproj @@ -0,0 +1,12 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj similarity index 80% rename from src/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj rename to src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj index b9185eb7..18d23f75 100644 --- a/src/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj +++ b/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/StellaOps.Concelier.RawModels.Tests.csproj @@ -1,4 +1,5 @@ -<Project Sdk="Microsoft.NET.Sdk"> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> <LangVersion>preview</LangVersion> @@ -21,6 +22,6 @@ <Using Include="Xunit" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.RawModels.Tests/UnitTest1.cs b/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/UnitTest1.cs similarity index 92% rename from src/StellaOps.Concelier.RawModels.Tests/UnitTest1.cs rename to src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/UnitTest1.cs index 72a58b2e..29731e73 100644 --- a/src/StellaOps.Concelier.RawModels.Tests/UnitTest1.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/UnitTest1.cs @@ -1,10 +1,10 @@ -namespace StellaOps.Concelier.RawModels.Tests; - -public class UnitTest1 -{ - [Fact] - public void Test1() - { - - } -} +namespace StellaOps.Concelier.RawModels.Tests; + +public class UnitTest1 +{ + [Fact] + public void Test1() + { + + } +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/xunit.runner.json b/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/xunit.runner.json similarity index 96% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/xunit.runner.json rename to src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/xunit.runner.json index 86c7ea05..249d815c 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/xunit.runner.json +++ b/src/Concelier/__Tests/StellaOps.Concelier.RawModels.Tests/xunit.runner.json @@ -1,3 +1,3 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryConflictStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryConflictStoreTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryConflictStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryConflictStoreTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStatementStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStatementStoreTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStatementStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStatementStoreTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStorePerformanceTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStorePerformanceTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStorePerformanceTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStorePerformanceTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStoreTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AdvisoryStoreTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/AliasStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AliasStoreTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/AliasStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/AliasStoreTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/DocumentStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/DocumentStoreTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/DocumentStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/DocumentStoreTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/DtoStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/DtoStoreTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/DtoStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/DtoStoreTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateManagerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateManagerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateManagerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateManagerTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateStoreTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/ExportStateStoreTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/MergeEventStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MergeEventStoreTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/MergeEventStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MergeEventStoreTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/Migrations/MongoMigrationRunnerTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/Migrations/MongoMigrationRunnerTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/Migrations/MongoMigrationRunnerTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/Migrations/MongoMigrationRunnerTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/MongoAdvisoryEventRepositoryTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MongoAdvisoryEventRepositoryTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/MongoAdvisoryEventRepositoryTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MongoAdvisoryEventRepositoryTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/MongoBootstrapperTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MongoBootstrapperTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/MongoBootstrapperTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MongoBootstrapperTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/MongoJobStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MongoJobStoreTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/MongoJobStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MongoJobStoreTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/MongoSourceStateRepositoryTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MongoSourceStateRepositoryTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/MongoSourceStateRepositoryTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/MongoSourceStateRepositoryTests.cs diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationDocumentFactoryTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationDocumentFactoryTests.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationDocumentFactoryTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationDocumentFactoryTests.cs index bbd4d17a..c8685d3b 100644 --- a/src/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationDocumentFactoryTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationDocumentFactoryTests.cs @@ -1,68 +1,68 @@ -using System; -using System.Collections.Generic; -using MongoDB.Bson; -using StellaOps.Concelier.Storage.Mongo.Observations; -using Xunit; - -namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations; - -public sealed class AdvisoryObservationDocumentFactoryTests -{ - [Fact] - public void ToModel_MapsDocumentToModel() - { - var document = new AdvisoryObservationDocument - { - Id = "tenant-a:obs-1", - Tenant = "tenant-a", - CreatedAt = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc), - Source = new AdvisoryObservationSourceDocument - { - Vendor = "vendor", - Stream = "stream", - Api = "https://api.example" - }, - Upstream = new AdvisoryObservationUpstreamDocument - { - UpstreamId = "CVE-2025-1234", - DocumentVersion = "1", - FetchedAt = DateTime.SpecifyKind(DateTime.UtcNow.AddMinutes(-1), DateTimeKind.Utc), - ReceivedAt = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc), - ContentHash = "sha256:abc", - Signature = new AdvisoryObservationSignatureDocument - { - Present = true, - Format = "pgp", - KeyId = "key", - Signature = "signature" - } - }, - Content = new AdvisoryObservationContentDocument - { - Format = "CSAF", - SpecVersion = "2.0", - Raw = BsonDocument.Parse("{\"example\":true}") - }, - Linkset = new AdvisoryObservationLinksetDocument - { - Aliases = new List<string> { "CVE-2025-1234" }, - Purls = new List<string> { "pkg:generic/foo@1.0.0" }, - Cpes = new List<string> { "cpe:/a:vendor:product:1" }, - References = new List<AdvisoryObservationReferenceDocument> - { - new() { Type = "advisory", Url = "https://example.com" } - } - } - }; - - var observation = AdvisoryObservationDocumentFactory.ToModel(document); - - Assert.Equal("tenant-a:obs-1", observation.ObservationId); - Assert.Equal("tenant-a", observation.Tenant); - Assert.Equal("CVE-2025-1234", observation.Upstream.UpstreamId); - Assert.Contains("pkg:generic/foo@1.0.0", observation.Linkset.Purls); - Assert.Equal("CSAF", observation.Content.Format); - Assert.True(observation.Content.Raw?["example"]?.GetValue<bool>()); - Assert.Equal("advisory", observation.Linkset.References[0].Type); - } -} +using System; +using System.Collections.Generic; +using MongoDB.Bson; +using StellaOps.Concelier.Storage.Mongo.Observations; +using Xunit; + +namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations; + +public sealed class AdvisoryObservationDocumentFactoryTests +{ + [Fact] + public void ToModel_MapsDocumentToModel() + { + var document = new AdvisoryObservationDocument + { + Id = "tenant-a:obs-1", + Tenant = "tenant-a", + CreatedAt = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc), + Source = new AdvisoryObservationSourceDocument + { + Vendor = "vendor", + Stream = "stream", + Api = "https://api.example" + }, + Upstream = new AdvisoryObservationUpstreamDocument + { + UpstreamId = "CVE-2025-1234", + DocumentVersion = "1", + FetchedAt = DateTime.SpecifyKind(DateTime.UtcNow.AddMinutes(-1), DateTimeKind.Utc), + ReceivedAt = DateTime.SpecifyKind(DateTime.UtcNow, DateTimeKind.Utc), + ContentHash = "sha256:abc", + Signature = new AdvisoryObservationSignatureDocument + { + Present = true, + Format = "pgp", + KeyId = "key", + Signature = "signature" + } + }, + Content = new AdvisoryObservationContentDocument + { + Format = "CSAF", + SpecVersion = "2.0", + Raw = BsonDocument.Parse("{\"example\":true}") + }, + Linkset = new AdvisoryObservationLinksetDocument + { + Aliases = new List<string> { "CVE-2025-1234" }, + Purls = new List<string> { "pkg:generic/foo@1.0.0" }, + Cpes = new List<string> { "cpe:/a:vendor:product:1" }, + References = new List<AdvisoryObservationReferenceDocument> + { + new() { Type = "advisory", Url = "https://example.com" } + } + } + }; + + var observation = AdvisoryObservationDocumentFactory.ToModel(document); + + Assert.Equal("tenant-a:obs-1", observation.ObservationId); + Assert.Equal("tenant-a", observation.Tenant); + Assert.Equal("CVE-2025-1234", observation.Upstream.UpstreamId); + Assert.Contains("pkg:generic/foo@1.0.0", observation.Linkset.Purls); + Assert.Equal("CSAF", observation.Content.Format); + Assert.True(observation.Content.Raw?["example"]?.GetValue<bool>()); + Assert.Equal("advisory", observation.Linkset.References[0].Type); + } +} diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationStoreTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationStoreTests.cs similarity index 97% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationStoreTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationStoreTests.cs index 72f06f42..241b304b 100644 --- a/src/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationStoreTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/Observations/AdvisoryObservationStoreTests.cs @@ -1,222 +1,222 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Concelier.Core.Observations; -using StellaOps.Concelier.Storage.Mongo; -using StellaOps.Concelier.Storage.Mongo.Observations; -using StellaOps.Concelier.Testing; -using Xunit; - -namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations; - -[Collection("mongo-fixture")] -public sealed class AdvisoryObservationStoreTests : IClassFixture<MongoIntegrationFixture> -{ - private readonly MongoIntegrationFixture _fixture; - - public AdvisoryObservationStoreTests(MongoIntegrationFixture fixture) - { - _fixture = fixture; - } - - [Fact] - public async Task FindByFiltersAsync_FiltersByAliasAndTenant() - { - await ResetCollectionAsync(); - - var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations); - await collection.InsertManyAsync(new[] - { - CreateDocument( - id: "tenant-a:nvd:alpha:1", - tenant: "tenant-a", - createdAt: new DateTime(2025, 1, 1, 0, 0, 0, DateTimeKind.Utc), - aliases: new[] { "cve-2025-0001" }, - purls: new[] { "pkg:npm/demo@1.0.0" }), - CreateDocument( - id: "tenant-a:ghsa:beta:1", - tenant: "tenant-a", - createdAt: new DateTime(2025, 1, 2, 0, 0, 0, DateTimeKind.Utc), - aliases: new[] { "ghsa-xyz0", "cve-2025-0001" }, - purls: new[] { "pkg:npm/demo@1.1.0" }), - CreateDocument( - id: "tenant-b:nvd:alpha:1", - tenant: "tenant-b", - createdAt: new DateTime(2025, 1, 3, 0, 0, 0, DateTimeKind.Utc), - aliases: new[] { "cve-2025-0001" }, - purls: new[] { "pkg:npm/demo@2.0.0" }) - }); - - var store = new AdvisoryObservationStore(collection); - var result = await store.FindByFiltersAsync( - tenant: "Tenant-A", - observationIds: Array.Empty<string>(), - aliases: new[] { " CVE-2025-0001 " }, - purls: Array.Empty<string>(), - cpes: Array.Empty<string>(), - cursor: null, - limit: 5, - CancellationToken.None); - - Assert.Equal(2, result.Count); - Assert.Equal("tenant-a:ghsa:beta:1", result[0].ObservationId); - Assert.Equal("tenant-a:nvd:alpha:1", result[1].ObservationId); - Assert.All(result, observation => Assert.Equal("tenant-a", observation.Tenant)); - } - - [Fact] - public async Task FindByFiltersAsync_RespectsObservationIdsAndPurls() - { - await ResetCollectionAsync(); - - var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations); - await collection.InsertManyAsync(new[] - { - CreateDocument( - id: "tenant-a:osv:alpha:1", - tenant: "tenant-a", - createdAt: new DateTime(2025, 2, 1, 0, 0, 0, DateTimeKind.Utc), - aliases: new[] { "cve-2025-0100" }, - purls: new[] { "pkg:pypi/demo@2.0.0" }, - cpes: new[] { "cpe:/a:vendor:product:2.0" }), - CreateDocument( - id: "tenant-a:osv:alpha:2", - tenant: "tenant-a", - createdAt: new DateTime(2025, 2, 2, 0, 0, 0, DateTimeKind.Utc), - aliases: new[] { "cve-2025-0100" }, - purls: new[] { "pkg:pypi/demo@2.1.0" }, - cpes: new[] { "cpe:/a:vendor:product:2.1" }) - }); - - var store = new AdvisoryObservationStore(collection); - var result = await store.FindByFiltersAsync( - tenant: "tenant-a", - observationIds: new[] { "tenant-a:osv:alpha:1" }, - aliases: Array.Empty<string>(), - purls: new[] { "pkg:pypi/demo@2.0.0" }, - cpes: new[] { "cpe:/a:vendor:product:2.0" }, - cursor: null, - limit: 5, - CancellationToken.None); - - Assert.Single(result); - Assert.Equal("tenant-a:osv:alpha:1", result[0].ObservationId); - Assert.Equal( - new[] { "pkg:pypi/demo@2.0.0" }, - result[0].Linkset.Purls.ToArray()); - Assert.Equal( - new[] { "cpe:/a:vendor:product:2.0" }, - result[0].Linkset.Cpes.ToArray()); - } - - [Fact] - public async Task FindByFiltersAsync_AppliesCursorForPagination() - { - await ResetCollectionAsync(); - - var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations); - var createdAt = new DateTime(2025, 3, 1, 0, 0, 0, DateTimeKind.Utc); - await collection.InsertManyAsync(new[] - { - CreateDocument("tenant-a:source:1", "tenant-a", createdAt, aliases: new[] { "cve-1" }), - CreateDocument("tenant-a:source:2", "tenant-a", createdAt.AddMinutes(-1), aliases: new[] { "cve-2" }), - CreateDocument("tenant-a:source:3", "tenant-a", createdAt.AddMinutes(-2), aliases: new[] { "cve-3" }) - }); - - var store = new AdvisoryObservationStore(collection); - - var firstPage = await store.FindByFiltersAsync( - tenant: "tenant-a", - observationIds: Array.Empty<string>(), - aliases: Array.Empty<string>(), - purls: Array.Empty<string>(), - cpes: Array.Empty<string>(), - cursor: null, - limit: 2, - CancellationToken.None); - - Assert.Equal(2, firstPage.Count); - Assert.Equal("tenant-a:source:1", firstPage[0].ObservationId); - Assert.Equal("tenant-a:source:2", firstPage[1].ObservationId); - - var cursor = new AdvisoryObservationCursor(firstPage[1].CreatedAt, firstPage[1].ObservationId); - var secondPage = await store.FindByFiltersAsync( - tenant: "tenant-a", - observationIds: Array.Empty<string>(), - aliases: Array.Empty<string>(), - purls: Array.Empty<string>(), - cpes: Array.Empty<string>(), - cursor: cursor, - limit: 2, - CancellationToken.None); - - Assert.Single(secondPage); - Assert.Equal("tenant-a:source:3", secondPage[0].ObservationId); - } - - private static AdvisoryObservationDocument CreateDocument( - string id, - string tenant, - DateTime createdAt, - IEnumerable<string>? aliases = null, - IEnumerable<string>? purls = null, - IEnumerable<string>? cpes = null) - { - return new AdvisoryObservationDocument - { - Id = id, - Tenant = tenant.ToLowerInvariant(), - CreatedAt = createdAt, - Source = new AdvisoryObservationSourceDocument - { - Vendor = "nvd", - Stream = "feed", - Api = "https://example.test/api" - }, - Upstream = new AdvisoryObservationUpstreamDocument - { - UpstreamId = id, - DocumentVersion = null, - FetchedAt = createdAt, - ReceivedAt = createdAt, - ContentHash = $"sha256:{id}", - Signature = new AdvisoryObservationSignatureDocument - { - Present = false - }, - Metadata = new Dictionary<string, string>(StringComparer.Ordinal) - }, - Content = new AdvisoryObservationContentDocument - { - Format = "csaf", - SpecVersion = "2.0", - Raw = BsonDocument.Parse("""{"id": "%ID%"}""".Replace("%ID%", id)), - Metadata = new Dictionary<string, string>(StringComparer.Ordinal) - }, - Linkset = new AdvisoryObservationLinksetDocument - { - Aliases = aliases?.Select(value => value.Trim()).ToList(), - Purls = purls?.Select(value => value.Trim()).ToList(), - Cpes = cpes?.Select(value => value.Trim()).ToList(), - References = new List<AdvisoryObservationReferenceDocument>() - }, - Attributes = new Dictionary<string, string>(StringComparer.Ordinal) - }; - } - - private async Task ResetCollectionAsync() - { - try - { - await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations); - } - catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase)) - { - // Collection did not exist – ignore. - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Concelier.Core.Observations; +using StellaOps.Concelier.Storage.Mongo; +using StellaOps.Concelier.Storage.Mongo.Observations; +using StellaOps.Concelier.Testing; +using Xunit; + +namespace StellaOps.Concelier.Storage.Mongo.Tests.Observations; + +[Collection("mongo-fixture")] +public sealed class AdvisoryObservationStoreTests : IClassFixture<MongoIntegrationFixture> +{ + private readonly MongoIntegrationFixture _fixture; + + public AdvisoryObservationStoreTests(MongoIntegrationFixture fixture) + { + _fixture = fixture; + } + + [Fact] + public async Task FindByFiltersAsync_FiltersByAliasAndTenant() + { + await ResetCollectionAsync(); + + var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations); + await collection.InsertManyAsync(new[] + { + CreateDocument( + id: "tenant-a:nvd:alpha:1", + tenant: "tenant-a", + createdAt: new DateTime(2025, 1, 1, 0, 0, 0, DateTimeKind.Utc), + aliases: new[] { "cve-2025-0001" }, + purls: new[] { "pkg:npm/demo@1.0.0" }), + CreateDocument( + id: "tenant-a:ghsa:beta:1", + tenant: "tenant-a", + createdAt: new DateTime(2025, 1, 2, 0, 0, 0, DateTimeKind.Utc), + aliases: new[] { "ghsa-xyz0", "cve-2025-0001" }, + purls: new[] { "pkg:npm/demo@1.1.0" }), + CreateDocument( + id: "tenant-b:nvd:alpha:1", + tenant: "tenant-b", + createdAt: new DateTime(2025, 1, 3, 0, 0, 0, DateTimeKind.Utc), + aliases: new[] { "cve-2025-0001" }, + purls: new[] { "pkg:npm/demo@2.0.0" }) + }); + + var store = new AdvisoryObservationStore(collection); + var result = await store.FindByFiltersAsync( + tenant: "Tenant-A", + observationIds: Array.Empty<string>(), + aliases: new[] { " CVE-2025-0001 " }, + purls: Array.Empty<string>(), + cpes: Array.Empty<string>(), + cursor: null, + limit: 5, + CancellationToken.None); + + Assert.Equal(2, result.Count); + Assert.Equal("tenant-a:ghsa:beta:1", result[0].ObservationId); + Assert.Equal("tenant-a:nvd:alpha:1", result[1].ObservationId); + Assert.All(result, observation => Assert.Equal("tenant-a", observation.Tenant)); + } + + [Fact] + public async Task FindByFiltersAsync_RespectsObservationIdsAndPurls() + { + await ResetCollectionAsync(); + + var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations); + await collection.InsertManyAsync(new[] + { + CreateDocument( + id: "tenant-a:osv:alpha:1", + tenant: "tenant-a", + createdAt: new DateTime(2025, 2, 1, 0, 0, 0, DateTimeKind.Utc), + aliases: new[] { "cve-2025-0100" }, + purls: new[] { "pkg:pypi/demo@2.0.0" }, + cpes: new[] { "cpe:/a:vendor:product:2.0" }), + CreateDocument( + id: "tenant-a:osv:alpha:2", + tenant: "tenant-a", + createdAt: new DateTime(2025, 2, 2, 0, 0, 0, DateTimeKind.Utc), + aliases: new[] { "cve-2025-0100" }, + purls: new[] { "pkg:pypi/demo@2.1.0" }, + cpes: new[] { "cpe:/a:vendor:product:2.1" }) + }); + + var store = new AdvisoryObservationStore(collection); + var result = await store.FindByFiltersAsync( + tenant: "tenant-a", + observationIds: new[] { "tenant-a:osv:alpha:1" }, + aliases: Array.Empty<string>(), + purls: new[] { "pkg:pypi/demo@2.0.0" }, + cpes: new[] { "cpe:/a:vendor:product:2.0" }, + cursor: null, + limit: 5, + CancellationToken.None); + + Assert.Single(result); + Assert.Equal("tenant-a:osv:alpha:1", result[0].ObservationId); + Assert.Equal( + new[] { "pkg:pypi/demo@2.0.0" }, + result[0].Linkset.Purls.ToArray()); + Assert.Equal( + new[] { "cpe:/a:vendor:product:2.0" }, + result[0].Linkset.Cpes.ToArray()); + } + + [Fact] + public async Task FindByFiltersAsync_AppliesCursorForPagination() + { + await ResetCollectionAsync(); + + var collection = _fixture.Database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations); + var createdAt = new DateTime(2025, 3, 1, 0, 0, 0, DateTimeKind.Utc); + await collection.InsertManyAsync(new[] + { + CreateDocument("tenant-a:source:1", "tenant-a", createdAt, aliases: new[] { "cve-1" }), + CreateDocument("tenant-a:source:2", "tenant-a", createdAt.AddMinutes(-1), aliases: new[] { "cve-2" }), + CreateDocument("tenant-a:source:3", "tenant-a", createdAt.AddMinutes(-2), aliases: new[] { "cve-3" }) + }); + + var store = new AdvisoryObservationStore(collection); + + var firstPage = await store.FindByFiltersAsync( + tenant: "tenant-a", + observationIds: Array.Empty<string>(), + aliases: Array.Empty<string>(), + purls: Array.Empty<string>(), + cpes: Array.Empty<string>(), + cursor: null, + limit: 2, + CancellationToken.None); + + Assert.Equal(2, firstPage.Count); + Assert.Equal("tenant-a:source:1", firstPage[0].ObservationId); + Assert.Equal("tenant-a:source:2", firstPage[1].ObservationId); + + var cursor = new AdvisoryObservationCursor(firstPage[1].CreatedAt, firstPage[1].ObservationId); + var secondPage = await store.FindByFiltersAsync( + tenant: "tenant-a", + observationIds: Array.Empty<string>(), + aliases: Array.Empty<string>(), + purls: Array.Empty<string>(), + cpes: Array.Empty<string>(), + cursor: cursor, + limit: 2, + CancellationToken.None); + + Assert.Single(secondPage); + Assert.Equal("tenant-a:source:3", secondPage[0].ObservationId); + } + + private static AdvisoryObservationDocument CreateDocument( + string id, + string tenant, + DateTime createdAt, + IEnumerable<string>? aliases = null, + IEnumerable<string>? purls = null, + IEnumerable<string>? cpes = null) + { + return new AdvisoryObservationDocument + { + Id = id, + Tenant = tenant.ToLowerInvariant(), + CreatedAt = createdAt, + Source = new AdvisoryObservationSourceDocument + { + Vendor = "nvd", + Stream = "feed", + Api = "https://example.test/api" + }, + Upstream = new AdvisoryObservationUpstreamDocument + { + UpstreamId = id, + DocumentVersion = null, + FetchedAt = createdAt, + ReceivedAt = createdAt, + ContentHash = $"sha256:{id}", + Signature = new AdvisoryObservationSignatureDocument + { + Present = false + }, + Metadata = new Dictionary<string, string>(StringComparer.Ordinal) + }, + Content = new AdvisoryObservationContentDocument + { + Format = "csaf", + SpecVersion = "2.0", + Raw = BsonDocument.Parse("""{"id": "%ID%"}""".Replace("%ID%", id)), + Metadata = new Dictionary<string, string>(StringComparer.Ordinal) + }, + Linkset = new AdvisoryObservationLinksetDocument + { + Aliases = aliases?.Select(value => value.Trim()).ToList(), + Purls = purls?.Select(value => value.Trim()).ToList(), + Cpes = cpes?.Select(value => value.Trim()).ToList(), + References = new List<AdvisoryObservationReferenceDocument>() + }, + Attributes = new Dictionary<string, string>(StringComparer.Ordinal) + }; + } + + private async Task ResetCollectionAsync() + { + try + { + await _fixture.Database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations); + } + catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase)) + { + // Collection did not exist – ignore. + } + } +} diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/RawDocumentRetentionServiceTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/RawDocumentRetentionServiceTests.cs similarity index 100% rename from src/StellaOps.Concelier.Storage.Mongo.Tests/RawDocumentRetentionServiceTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/RawDocumentRetentionServiceTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj new file mode 100644 index 00000000..2dfd72e1 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj @@ -0,0 +1,16 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <PackageReference Update="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.WebService.Tests/ConcelierOptionsPostConfigureTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/ConcelierOptionsPostConfigureTests.cs similarity index 100% rename from src/StellaOps.Concelier.WebService.Tests/ConcelierOptionsPostConfigureTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/ConcelierOptionsPostConfigureTests.cs diff --git a/src/StellaOps.Concelier.WebService.Tests/PluginLoaderTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/PluginLoaderTests.cs similarity index 100% rename from src/StellaOps.Concelier.WebService.Tests/PluginLoaderTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/PluginLoaderTests.cs diff --git a/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj new file mode 100644 index 00000000..76d0dee9 --- /dev/null +++ b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj @@ -0,0 +1,14 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="../../StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Concelier.WebService.Tests/WebServiceEndpointsTests.cs b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/WebServiceEndpointsTests.cs similarity index 97% rename from src/StellaOps.Concelier.WebService.Tests/WebServiceEndpointsTests.cs rename to src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/WebServiceEndpointsTests.cs index 154ecdd3..a138eaa1 100644 --- a/src/StellaOps.Concelier.WebService.Tests/WebServiceEndpointsTests.cs +++ b/src/Concelier/__Tests/StellaOps.Concelier.WebService.Tests/WebServiceEndpointsTests.cs @@ -1,1790 +1,1790 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics.Metrics; -using System.Globalization; -using System.IdentityModel.Tokens.Jwt; -using System.IO; -using System.Linq; -using System.Net; -using System.Net.Http.Json; -using System.Net.Http.Headers; -using System.Security.Claims; -using System.Text; -using System.Text.Json; -using Microsoft.AspNetCore.Builder; -using Microsoft.AspNetCore.Hosting; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.AspNetCore.TestHost; -using Microsoft.AspNetCore.Authentication.JwtBearer; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using Mongo2Go; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Concelier.Core.Events; -using StellaOps.Concelier.Core.Jobs; -using StellaOps.Concelier.Models; -using StellaOps.Concelier.Merge.Services; -using StellaOps.Concelier.Storage.Mongo; -using StellaOps.Concelier.Storage.Mongo.Observations; -using StellaOps.Concelier.WebService.Jobs; -using StellaOps.Concelier.WebService.Options; -using StellaOps.Concelier.WebService.Contracts; -using Xunit.Sdk; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.Client; -using Xunit; -using Microsoft.IdentityModel.Protocols; -using Microsoft.IdentityModel.Protocols.OpenIdConnect; -using Microsoft.IdentityModel.Tokens; - -namespace StellaOps.Concelier.WebService.Tests; - -public sealed class WebServiceEndpointsTests : IAsyncLifetime -{ - private const string TestAuthorityIssuer = "https://authority.example"; - private const string TestAuthorityAudience = "api://concelier"; - private const string TestSigningSecret = "0123456789ABCDEF0123456789ABCDEF"; - private static readonly SymmetricSecurityKey TestSigningKey = new(Encoding.UTF8.GetBytes(TestSigningSecret)); - - private MongoDbRunner _runner = null!; - private ConcelierApplicationFactory _factory = null!; - - public Task InitializeAsync() - { - _runner = MongoDbRunner.Start(singleNodeReplSet: true); - _factory = new ConcelierApplicationFactory(_runner.ConnectionString); - return Task.CompletedTask; - } - - public Task DisposeAsync() - { - _factory.Dispose(); - _runner.Dispose(); - return Task.CompletedTask; - } - - [Fact] - public async Task HealthAndReadyEndpointsRespond() - { - using var client = _factory.CreateClient(); - - var healthResponse = await client.GetAsync("/health"); - if (!healthResponse.IsSuccessStatusCode) - { - var body = await healthResponse.Content.ReadAsStringAsync(); - throw new Xunit.Sdk.XunitException($"/health failed: {(int)healthResponse.StatusCode} {body}"); - } - - var readyResponse = await client.GetAsync("/ready"); - if (!readyResponse.IsSuccessStatusCode) - { - var body = await readyResponse.Content.ReadAsStringAsync(); - throw new Xunit.Sdk.XunitException($"/ready failed: {(int)readyResponse.StatusCode} {body}"); - } - - var healthPayload = await healthResponse.Content.ReadFromJsonAsync<HealthPayload>(); - Assert.NotNull(healthPayload); - Assert.Equal("healthy", healthPayload!.Status); - Assert.Equal("mongo", healthPayload.Storage.Driver); - - var readyPayload = await readyResponse.Content.ReadFromJsonAsync<ReadyPayload>(); - Assert.NotNull(readyPayload); - Assert.Equal("ready", readyPayload!.Status); - Assert.Equal("ready", readyPayload.Mongo.Status); - } - - [Fact] - public async Task ObservationsEndpoint_ReturnsTenantScopedResults() - { - await SeedObservationDocumentsAsync(BuildSampleObservationDocuments()); - - using var client = _factory.CreateClient(); - - var response = await client.GetAsync("/concelier/observations?tenant=tenant-a&alias=CVE-2025-0001"); - if (!response.IsSuccessStatusCode) - { - var body = await response.Content.ReadAsStringAsync(); - throw new XunitException($"/concelier/observations failed: {(int)response.StatusCode} {body}"); - } - - using var document = await response.Content.ReadFromJsonAsync<JsonDocument>(); - Assert.NotNull(document); - var root = document!.RootElement; - var observations = root.GetProperty("observations").EnumerateArray().ToArray(); - Assert.Equal(2, observations.Length); - Assert.Equal("tenant-a:ghsa:beta:1", observations[0].GetProperty("observationId").GetString()); - Assert.Equal("tenant-a:nvd:alpha:1", observations[1].GetProperty("observationId").GetString()); - - var linkset = root.GetProperty("linkset"); - Assert.Equal(new[] { "cve-2025-0001", "ghsa-2025-xyz" }, linkset.GetProperty("aliases").EnumerateArray().Select(x => x.GetString()).ToArray()); - Assert.Equal(new[] { "pkg:npm/demo@1.0.0", "pkg:npm/demo@1.1.0" }, linkset.GetProperty("purls").EnumerateArray().Select(x => x.GetString()).ToArray()); - Assert.Equal(new[] { "cpe:/a:vendor:product:1.0", "cpe:/a:vendor:product:1.1" }, linkset.GetProperty("cpes").EnumerateArray().Select(x => x.GetString()).ToArray()); - - var references = linkset.GetProperty("references").EnumerateArray().ToArray(); - Assert.Equal(2, references.Length); - Assert.Equal("advisory", references[0].GetProperty("type").GetString()); - Assert.Equal("https://example.test/advisory-1", references[0].GetProperty("url").GetString()); - Assert.Equal("patch", references[1].GetProperty("type").GetString()); - - Assert.False(root.GetProperty("hasMore").GetBoolean()); - Assert.True(root.GetProperty("nextCursor").ValueKind == JsonValueKind.Null); - } - - [Fact] - public async Task ObservationsEndpoint_AppliesObservationIdFilter() - { - await SeedObservationDocumentsAsync(BuildSampleObservationDocuments()); - - using var client = _factory.CreateClient(); - var observationId = Uri.EscapeDataString("tenant-a:ghsa:beta:1"); - var response = await client.GetAsync($"/concelier/observations?tenant=tenant-a&observationId={observationId}&cpe=cpe:/a:vendor:product:1.1"); - if (!response.IsSuccessStatusCode) - { - var body = await response.Content.ReadAsStringAsync(); - throw new XunitException($"/concelier/observations filter failed: {(int)response.StatusCode} {body}"); - } - - using var document = await response.Content.ReadFromJsonAsync<JsonDocument>(); - Assert.NotNull(document); - var root = document!.RootElement; - var observations = root.GetProperty("observations").EnumerateArray().ToArray(); - Assert.Single(observations); - Assert.Equal("tenant-a:ghsa:beta:1", observations[0].GetProperty("observationId").GetString()); - Assert.Equal(new[] { "pkg:npm/demo@1.1.0" }, observations[0].GetProperty("linkset").GetProperty("purls").EnumerateArray().Select(x => x.GetString()).ToArray()); - Assert.Equal(new[] { "cpe:/a:vendor:product:1.1" }, observations[0].GetProperty("linkset").GetProperty("cpes").EnumerateArray().Select(x => x.GetString()).ToArray()); - - Assert.False(root.GetProperty("hasMore").GetBoolean()); - Assert.True(root.GetProperty("nextCursor").ValueKind == JsonValueKind.Null); - } - - [Fact] - public async Task ObservationsEndpoint_SupportsPagination() - { - await SeedObservationDocumentsAsync(BuildSampleObservationDocuments()); - - using var client = _factory.CreateClient(); - - var firstResponse = await client.GetAsync("/concelier/observations?tenant=tenant-a&limit=1"); - firstResponse.EnsureSuccessStatusCode(); - using var firstDocument = await firstResponse.Content.ReadFromJsonAsync<JsonDocument>(); - Assert.NotNull(firstDocument); - var firstRoot = firstDocument!.RootElement; - var firstObservations = firstRoot.GetProperty("observations").EnumerateArray().ToArray(); - Assert.Single(firstObservations); - var nextCursor = firstRoot.GetProperty("nextCursor").GetString(); - Assert.True(firstRoot.GetProperty("hasMore").GetBoolean()); - Assert.False(string.IsNullOrWhiteSpace(nextCursor)); - - var secondResponse = await client.GetAsync($"/concelier/observations?tenant=tenant-a&limit=2&cursor={Uri.EscapeDataString(nextCursor!)}"); - secondResponse.EnsureSuccessStatusCode(); - using var secondDocument = await secondResponse.Content.ReadFromJsonAsync<JsonDocument>(); - Assert.NotNull(secondDocument); - var secondRoot = secondDocument!.RootElement; - var secondObservations = secondRoot.GetProperty("observations").EnumerateArray().ToArray(); - Assert.Single(secondObservations); - Assert.False(secondRoot.GetProperty("hasMore").GetBoolean()); - Assert.True(secondRoot.GetProperty("nextCursor").ValueKind == JsonValueKind.Null); - Assert.Equal("tenant-a:nvd:alpha:1", secondObservations[0].GetProperty("observationId").GetString()); - } - - [Fact] - public async Task ObservationsEndpoint_ReturnsBadRequestWhenTenantMissing() - { - using var client = _factory.CreateClient(); - var response = await client.GetAsync("/concelier/observations"); - var body = await response.Content.ReadAsStringAsync(); - Assert.True(response.StatusCode == HttpStatusCode.BadRequest, $"Expected 400 but got {(int)response.StatusCode}: {body}"); - } - - [Fact] - public async Task AdvisoryIngestEndpoint_PersistsDocumentAndSupportsReadback() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-ingest"); - - var ingestRequest = BuildAdvisoryIngestRequest( - contentHash: "sha256:abc123", - upstreamId: "GHSA-INGEST-0001"); - - var ingestResponse = await client.PostAsJsonAsync("/ingest/advisory", ingestRequest); - Assert.Equal(HttpStatusCode.Created, ingestResponse.StatusCode); - - var ingestPayload = await ingestResponse.Content.ReadFromJsonAsync<AdvisoryIngestResponse>(); - Assert.NotNull(ingestPayload); - Assert.True(ingestPayload!.Inserted); - Assert.False(string.IsNullOrWhiteSpace(ingestPayload.Id)); - Assert.Equal("tenant-ingest", ingestPayload.Tenant); - Assert.Equal("sha256:abc123", ingestPayload.ContentHash); - Assert.NotNull(ingestResponse.Headers.Location); - var locationValue = ingestResponse.Headers.Location!.ToString(); - Assert.False(string.IsNullOrWhiteSpace(locationValue)); - var lastSlashIndex = locationValue.LastIndexOf('/', StringComparison.Ordinal); - var idSegment = lastSlashIndex >= 0 - ? locationValue[(lastSlashIndex + 1)..] - : locationValue; - var decodedSegment = Uri.UnescapeDataString(idSegment); - Assert.Equal(ingestPayload.Id, decodedSegment); - - var duplicateResponse = await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest( - contentHash: "sha256:abc123", - upstreamId: "GHSA-INGEST-0001")); - Assert.Equal(HttpStatusCode.OK, duplicateResponse.StatusCode); - var duplicatePayload = await duplicateResponse.Content.ReadFromJsonAsync<AdvisoryIngestResponse>(); - Assert.NotNull(duplicatePayload); - Assert.False(duplicatePayload!.Inserted); - - using (var getRequest = new HttpRequestMessage(HttpMethod.Get, $"/advisories/raw/{ingestPayload.Id}")) - { - getRequest.Headers.Add("X-Stella-Tenant", "tenant-ingest"); - var getResponse = await client.SendAsync(getRequest); - getResponse.EnsureSuccessStatusCode(); - - var record = await getResponse.Content.ReadFromJsonAsync<AdvisoryRawRecordResponse>(); - Assert.NotNull(record); - Assert.Equal(ingestPayload.Id, record!.Id); - Assert.Equal("tenant-ingest", record.Tenant); - Assert.Equal("sha256:abc123", record.Document.Upstream.ContentHash); - } - - using (var listRequest = new HttpRequestMessage(HttpMethod.Get, "/advisories/raw?limit=10")) - { - listRequest.Headers.Add("X-Stella-Tenant", "tenant-ingest"); - var listResponse = await client.SendAsync(listRequest); - listResponse.EnsureSuccessStatusCode(); - - var listPayload = await listResponse.Content.ReadFromJsonAsync<AdvisoryRawListResponse>(); - Assert.NotNull(listPayload); - var record = Assert.Single(listPayload!.Records); - Assert.Equal(ingestPayload.Id, record.Id); - } - } - - [Fact] - public async Task AocVerifyEndpoint_ReturnsSummaryForTenant() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-verify"); - - await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest( - contentHash: "sha256:verify-1", - upstreamId: "GHSA-VERIFY-001")); - - var verifyResponse = await client.PostAsJsonAsync("/aoc/verify", new AocVerifyRequest(null, null, null, null, null)); - verifyResponse.EnsureSuccessStatusCode(); - - var verifyPayload = await verifyResponse.Content.ReadFromJsonAsync<AocVerifyResponse>(); - Assert.NotNull(verifyPayload); - Assert.Equal("tenant-verify", verifyPayload!.Tenant); - Assert.True(verifyPayload.Checked.Advisories >= 1); - Assert.Equal(0, verifyPayload.Checked.Vex); - Assert.True(verifyPayload.Metrics.IngestionWriteTotal >= verifyPayload.Checked.Advisories); - Assert.Empty(verifyPayload.Violations); - Assert.False(verifyPayload.Truncated); - } - - [Fact] - public async Task AocVerifyEndpoint_ReturnsViolationsForGuardFailures() - { - await SeedAdvisoryRawDocumentsAsync( - CreateAdvisoryRawDocument( - tenant: "tenant-verify-violations", - vendor: "osv", - upstreamId: "GHSA-VERIFY-ERR", - contentHash: string.Empty, - raw: new BsonDocument - { - { "id", "GHSA-VERIFY-ERR" } - })); - - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-verify-violations"); - - var verifyResponse = await client.PostAsJsonAsync("/aoc/verify", new AocVerifyRequest(null, null, null, null, null)); - verifyResponse.EnsureSuccessStatusCode(); - - var verifyPayload = await verifyResponse.Content.ReadFromJsonAsync<AocVerifyResponse>(); - Assert.NotNull(verifyPayload); - Assert.Equal("tenant-verify-violations", verifyPayload!.Tenant); - Assert.True(verifyPayload.Checked.Advisories >= 1); - var violation = Assert.Single(verifyPayload.Violations); - Assert.Equal("ERR_AOC_001", violation.Code); - Assert.True(violation.Count >= 1); - Assert.NotEmpty(violation.Examples); - } - - [Fact] - public async Task AdvisoryRawListEndpoint_SupportsCursorPagination() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-list"); - - await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:list-1", "GHSA-LIST-001")); - await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:list-2", "GHSA-LIST-002")); - await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:list-3", "GHSA-LIST-003")); - - using var firstRequest = new HttpRequestMessage(HttpMethod.Get, "/advisories/raw?limit=2"); - firstRequest.Headers.Add("X-Stella-Tenant", "tenant-list"); - var firstResponse = await client.SendAsync(firstRequest); - firstResponse.EnsureSuccessStatusCode(); - - var firstPage = await firstResponse.Content.ReadFromJsonAsync<AdvisoryRawListResponse>(); - Assert.NotNull(firstPage); - Assert.Equal(2, firstPage!.Records.Count); - Assert.True(firstPage.HasMore); - Assert.False(string.IsNullOrWhiteSpace(firstPage.NextCursor)); - - using var secondRequest = new HttpRequestMessage(HttpMethod.Get, $"/advisories/raw?cursor={Uri.EscapeDataString(firstPage.NextCursor!)}"); - secondRequest.Headers.Add("X-Stella-Tenant", "tenant-list"); - var secondResponse = await client.SendAsync(secondRequest); - secondResponse.EnsureSuccessStatusCode(); - - var secondPage = await secondResponse.Content.ReadFromJsonAsync<AdvisoryRawListResponse>(); - Assert.NotNull(secondPage); - Assert.Single(secondPage!.Records); - Assert.False(secondPage.HasMore); - Assert.Null(secondPage.NextCursor); - - var firstIds = firstPage.Records.Select(record => record.Id).ToArray(); - var secondIds = secondPage.Records.Select(record => record.Id).ToArray(); - Assert.Empty(firstIds.Intersect(secondIds)); - } - - [Fact] - public async Task AdvisoryIngestEndpoint_EmitsMetricsWithExpectedTags() - { - var measurements = await CaptureMetricsAsync( - IngestionMetrics.MeterName, - "ingestion_write_total", - async () => - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-metrics"); - - await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:metric-1", "GHSA-METRIC-001")); - await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:metric-1", "GHSA-METRIC-001")); - }); - - Assert.Equal(2, measurements.Count); - - var inserted = measurements.FirstOrDefault(measurement => - string.Equals(GetTagValue(measurement, "tenant"), "tenant-metrics", StringComparison.Ordinal) && - string.Equals(GetTagValue(measurement, "result"), "inserted", StringComparison.Ordinal)); - Assert.NotNull(inserted); - Assert.Equal(1, inserted!.Value); - Assert.Equal("osv", GetTagValue(inserted, "source")); - - var duplicate = measurements.FirstOrDefault(measurement => - string.Equals(GetTagValue(measurement, "tenant"), "tenant-metrics", StringComparison.Ordinal) && - string.Equals(GetTagValue(measurement, "result"), "duplicate", StringComparison.Ordinal)); - Assert.NotNull(duplicate); - Assert.Equal(1, duplicate!.Value); - Assert.Equal("osv", GetTagValue(duplicate, "source")); - } - - [Fact] - public async Task AocVerifyEndpoint_EmitsVerificationMetric() - { - var measurements = await CaptureMetricsAsync( - IngestionMetrics.MeterName, - "verify_runs_total", - async () => - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-verify-metrics"); - - await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:verify-metric", "GHSA-VERIFY-METRIC")); - var verifyResponse = await client.PostAsJsonAsync("/aoc/verify", new AocVerifyRequest(null, null, null, null, null)); - verifyResponse.EnsureSuccessStatusCode(); - }); - - var measurement = Assert.Single(measurements); - Assert.Equal("tenant-verify-metrics", GetTagValue(measurement, "tenant")); - Assert.Equal("ok", GetTagValue(measurement, "result")); - Assert.Equal(1, measurement.Value); - } - - [Fact] - public async Task AdvisoryIngestEndpoint_RejectsCrossTenantWhenAuthenticated() - { - var environment = new Dictionary<string, string?> - { - ["CONCELIER_AUTHORITY__ENABLED"] = "true", - ["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false", - ["CONCELIER_AUTHORITY__ISSUER"] = TestAuthorityIssuer, - ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", - ["CONCELIER_AUTHORITY__AUDIENCES__0"] = TestAuthorityAudience, - ["CONCELIER_AUTHORITY__CLIENTID"] = "webservice-tests", - ["CONCELIER_AUTHORITY__CLIENTSECRET"] = "unused", - }; - - using var factory = new ConcelierApplicationFactory( - _runner.ConnectionString, - authority => - { - authority.Enabled = true; - authority.AllowAnonymousFallback = false; - authority.Issuer = TestAuthorityIssuer; - authority.RequireHttpsMetadata = false; - authority.Audiences.Clear(); - authority.Audiences.Add(TestAuthorityAudience); - authority.ClientId = "webservice-tests"; - authority.ClientSecret = "unused"; - }, - environment); - - using var client = factory.CreateClient(); - var token = CreateTestToken("tenant-auth", StellaOpsScopes.AdvisoryIngest); - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", token); - client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-auth"); - - var ingestResponse = await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:auth-1", "GHSA-AUTH-001")); - Assert.Equal(HttpStatusCode.Created, ingestResponse.StatusCode); - - client.DefaultRequestHeaders.Remove("X-Stella-Tenant"); - client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-other"); - - var crossTenantResponse = await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:auth-2", "GHSA-AUTH-002")); - Assert.Equal(HttpStatusCode.Forbidden, crossTenantResponse.StatusCode); - } - - [Fact] - public async Task AdvisoryIngestEndpoint_ReturnsGuardViolationWhenContentHashMissing() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-violation"); - - var invalidRequest = BuildAdvisoryIngestRequest(contentHash: string.Empty, upstreamId: "GHSA-INVALID-1"); - var response = await client.PostAsJsonAsync("/ingest/advisory", invalidRequest); - - Assert.Equal(HttpStatusCode.UnprocessableEntity, response.StatusCode); - var problemJson = await response.Content.ReadAsStringAsync(); - using var document = JsonDocument.Parse(problemJson); - var root = document.RootElement; - Assert.Equal("Aggregation-Only Contract violation", root.GetProperty("title").GetString()); - Assert.Equal(422, root.GetProperty("status").GetInt32()); - Assert.True(root.TryGetProperty("violations", out var violations), "Problem response missing violations payload."); - Assert.True(root.TryGetProperty("code", out var codeElement), "Problem response missing code payload."); - Assert.Equal("ERR_AOC_004", codeElement.GetString()); - var violation = Assert.Single(violations.EnumerateArray()); - Assert.Equal("ERR_AOC_004", violation.GetProperty("code").GetString()); - } - - [Fact] - public async Task JobsEndpointsReturnExpectedStatuses() - { - using var client = _factory.CreateClient(); - - var definitions = await client.GetAsync("/jobs/definitions"); - if (!definitions.IsSuccessStatusCode) - { - var body = await definitions.Content.ReadAsStringAsync(); - throw new Xunit.Sdk.XunitException($"/jobs/definitions failed: {(int)definitions.StatusCode} {body}"); - } - - var trigger = await client.PostAsync("/jobs/unknown", new StringContent("{}", System.Text.Encoding.UTF8, "application/json")); - if (trigger.StatusCode != HttpStatusCode.NotFound) - { - var payload = await trigger.Content.ReadAsStringAsync(); - throw new Xunit.Sdk.XunitException($"/jobs/unknown expected 404, got {(int)trigger.StatusCode}: {payload}"); - } - var problem = await trigger.Content.ReadFromJsonAsync<ProblemDocument>(); - Assert.NotNull(problem); - Assert.Equal("https://stellaops.org/problems/not-found", problem!.Type); - Assert.Equal(404, problem.Status); - } - - [Fact] - public async Task JobRunEndpointReturnsProblemWhenNotFound() - { - using var client = _factory.CreateClient(); - var response = await client.GetAsync($"/jobs/{Guid.NewGuid()}"); - if (response.StatusCode != HttpStatusCode.NotFound) - { - var body = await response.Content.ReadAsStringAsync(); - throw new Xunit.Sdk.XunitException($"/jobs/{{id}} expected 404, got {(int)response.StatusCode}: {body}"); - } - var problem = await response.Content.ReadFromJsonAsync<ProblemDocument>(); - Assert.NotNull(problem); - Assert.Equal("https://stellaops.org/problems/not-found", problem!.Type); - } - - [Fact] - public async Task JobTriggerMapsCoordinatorOutcomes() - { - var handler = _factory.Services.GetRequiredService<StubJobCoordinator>(); - using var client = _factory.CreateClient(); - - handler.NextResult = JobTriggerResult.AlreadyRunning("busy"); - var conflict = await client.PostAsync("/jobs/test", JsonContent.Create(new JobTriggerRequest())); - if (conflict.StatusCode != HttpStatusCode.Conflict) - { - var payload = await conflict.Content.ReadAsStringAsync(); - throw new Xunit.Sdk.XunitException($"Conflict path expected 409, got {(int)conflict.StatusCode}: {payload}"); - } - var conflictProblem = await conflict.Content.ReadFromJsonAsync<ProblemDocument>(); - Assert.NotNull(conflictProblem); - Assert.Equal("https://stellaops.org/problems/conflict", conflictProblem!.Type); - - handler.NextResult = JobTriggerResult.Accepted(new JobRunSnapshot(Guid.NewGuid(), "demo", JobRunStatus.Pending, DateTimeOffset.UtcNow, null, null, "api", null, null, null, null, new Dictionary<string, object?>())); - var accepted = await client.PostAsync("/jobs/test", JsonContent.Create(new JobTriggerRequest())); - if (accepted.StatusCode != HttpStatusCode.Accepted) - { - var payload = await accepted.Content.ReadAsStringAsync(); - throw new Xunit.Sdk.XunitException($"Accepted path expected 202, got {(int)accepted.StatusCode}: {payload}"); - } - Assert.NotNull(accepted.Headers.Location); - var acceptedPayload = await accepted.Content.ReadFromJsonAsync<JobRunPayload>(); - Assert.NotNull(acceptedPayload); - - handler.NextResult = JobTriggerResult.Failed(new JobRunSnapshot(Guid.NewGuid(), "demo", JobRunStatus.Failed, DateTimeOffset.UtcNow, null, DateTimeOffset.UtcNow, "api", null, "err", null, null, new Dictionary<string, object?>()), "boom"); - var failed = await client.PostAsync("/jobs/test", JsonContent.Create(new JobTriggerRequest())); - if (failed.StatusCode != HttpStatusCode.InternalServerError) - { - var payload = await failed.Content.ReadAsStringAsync(); - throw new Xunit.Sdk.XunitException($"Failed path expected 500, got {(int)failed.StatusCode}: {payload}"); - } - var failureProblem = await failed.Content.ReadFromJsonAsync<ProblemDocument>(); - Assert.NotNull(failureProblem); - Assert.Equal("https://stellaops.org/problems/job-failure", failureProblem!.Type); - } - - [Fact] - public async Task JobsEndpointsExposeJobData() - { - var handler = _factory.Services.GetRequiredService<StubJobCoordinator>(); - var now = DateTimeOffset.UtcNow; - var run = new JobRunSnapshot( - Guid.NewGuid(), - "demo", - JobRunStatus.Succeeded, - now, - now, - now.AddSeconds(2), - "api", - "hash", - null, - TimeSpan.FromMinutes(5), - TimeSpan.FromMinutes(1), - new Dictionary<string, object?> { ["key"] = "value" }); - - handler.Definitions = new[] - { - new JobDefinition("demo", typeof(DemoJob), TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(1), "*/5 * * * *", true) - }; - handler.LastRuns["demo"] = run; - handler.RecentRuns = new[] { run }; - handler.ActiveRuns = Array.Empty<JobRunSnapshot>(); - handler.Runs[run.RunId] = run; - - try - { - using var client = _factory.CreateClient(); - - var definitions = await client.GetFromJsonAsync<List<JobDefinitionPayload>>("/jobs/definitions"); - Assert.NotNull(definitions); - Assert.Single(definitions!); - Assert.Equal("demo", definitions![0].Kind); - Assert.NotNull(definitions[0].LastRun); - Assert.Equal(run.RunId, definitions[0].LastRun!.RunId); - - var runPayload = await client.GetFromJsonAsync<JobRunPayload>($"/jobs/{run.RunId}"); - Assert.NotNull(runPayload); - Assert.Equal(run.RunId, runPayload!.RunId); - Assert.Equal("Succeeded", runPayload.Status); - - var runs = await client.GetFromJsonAsync<List<JobRunPayload>>("/jobs?kind=demo&limit=5"); - Assert.NotNull(runs); - Assert.Single(runs!); - Assert.Equal(run.RunId, runs![0].RunId); - - var runsByDefinition = await client.GetFromJsonAsync<List<JobRunPayload>>("/jobs/definitions/demo/runs"); - Assert.NotNull(runsByDefinition); - Assert.Single(runsByDefinition!); - - var active = await client.GetFromJsonAsync<List<JobRunPayload>>("/jobs/active"); - Assert.NotNull(active); - Assert.Empty(active!); - } - finally - { - handler.Definitions = Array.Empty<JobDefinition>(); - handler.RecentRuns = Array.Empty<JobRunSnapshot>(); - handler.ActiveRuns = Array.Empty<JobRunSnapshot>(); - handler.Runs.Clear(); - handler.LastRuns.Clear(); - } - } - - [Fact] - public async Task AdvisoryReplayEndpointReturnsLatestStatement() - { - var vulnerabilityKey = "CVE-2025-9000"; - var advisory = new Advisory( - advisoryKey: vulnerabilityKey, - title: "Replay Test", - summary: "Example summary", - language: "en", - published: DateTimeOffset.Parse("2025-01-01T00:00:00Z", CultureInfo.InvariantCulture), - modified: DateTimeOffset.Parse("2025-01-02T00:00:00Z", CultureInfo.InvariantCulture), - severity: "medium", - exploitKnown: false, - aliases: new[] { vulnerabilityKey }, - references: Array.Empty<AdvisoryReference>(), - affectedPackages: Array.Empty<AffectedPackage>(), - cvssMetrics: Array.Empty<CvssMetric>(), - provenance: Array.Empty<AdvisoryProvenance>()); - - var statementId = Guid.NewGuid(); - using (var scope = _factory.Services.CreateScope()) - { - var eventLog = scope.ServiceProvider.GetRequiredService<IAdvisoryEventLog>(); - var appendRequest = new AdvisoryEventAppendRequest(new[] - { - new AdvisoryStatementInput( - vulnerabilityKey, - advisory, - advisory.Modified ?? advisory.Published ?? DateTimeOffset.UtcNow, - Array.Empty<Guid>(), - StatementId: statementId, - AdvisoryKey: advisory.AdvisoryKey) - }); - - await eventLog.AppendAsync(appendRequest, CancellationToken.None); - } - - using var client = _factory.CreateClient(); - var response = await client.GetAsync($"/concelier/advisories/{vulnerabilityKey}/replay"); - - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - var payload = await response.Content.ReadFromJsonAsync<ReplayResponse>(); - Assert.NotNull(payload); - Assert.Equal(vulnerabilityKey, payload!.VulnerabilityKey, ignoreCase: true); - var statement = Assert.Single(payload.Statements); - Assert.Equal(statementId, statement.StatementId); - Assert.Equal(advisory.AdvisoryKey, statement.Advisory.AdvisoryKey); - Assert.False(string.IsNullOrWhiteSpace(statement.StatementHash)); - Assert.True(payload.Conflicts is null || payload.Conflicts!.Count == 0); - } - - [Fact] - public async Task AdvisoryReplayEndpointReturnsConflictExplainer() - { - var vulnerabilityKey = "CVE-2025-9100"; - var statementId = Guid.NewGuid(); - var conflictId = Guid.NewGuid(); - var recordedAt = DateTimeOffset.Parse("2025-02-01T00:00:00Z", CultureInfo.InvariantCulture); - - using (var scope = _factory.Services.CreateScope()) - { - var eventLog = scope.ServiceProvider.GetRequiredService<IAdvisoryEventLog>(); - var advisory = new Advisory( - advisoryKey: vulnerabilityKey, - title: "Base advisory", - summary: "Baseline summary", - language: "en", - published: recordedAt.AddDays(-1), - modified: recordedAt, - severity: "critical", - exploitKnown: false, - aliases: new[] { vulnerabilityKey }, - references: Array.Empty<AdvisoryReference>(), - affectedPackages: Array.Empty<AffectedPackage>(), - cvssMetrics: Array.Empty<CvssMetric>(), - provenance: Array.Empty<AdvisoryProvenance>()); - - var statementInput = new AdvisoryStatementInput( - vulnerabilityKey, - advisory, - recordedAt, - Array.Empty<Guid>(), - StatementId: statementId, - AdvisoryKey: advisory.AdvisoryKey); - - await eventLog.AppendAsync(new AdvisoryEventAppendRequest(new[] { statementInput }), CancellationToken.None); - - var explainer = new MergeConflictExplainerPayload( - Type: "severity", - Reason: "mismatch", - PrimarySources: new[] { "vendor" }, - PrimaryRank: 1, - SuppressedSources: new[] { "nvd" }, - SuppressedRank: 5, - PrimaryValue: "CRITICAL", - SuppressedValue: "MEDIUM"); - - using var conflictDoc = JsonDocument.Parse(explainer.ToCanonicalJson()); - var conflictInput = new AdvisoryConflictInput( - vulnerabilityKey, - conflictDoc, - recordedAt, - new[] { statementId }, - ConflictId: conflictId); - - await eventLog.AppendAsync(new AdvisoryEventAppendRequest(Array.Empty<AdvisoryStatementInput>(), new[] { conflictInput }), CancellationToken.None); - } - - using var client = _factory.CreateClient(); - var response = await client.GetAsync($"/concelier/advisories/{vulnerabilityKey}/replay"); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - var payload = await response.Content.ReadFromJsonAsync<ReplayResponse>(); - Assert.NotNull(payload); - var conflict = Assert.Single(payload!.Conflicts); - Assert.Equal(conflictId, conflict.ConflictId); - Assert.Equal("severity", conflict.Explainer.Type); - Assert.Equal("mismatch", conflict.Explainer.Reason); - Assert.Equal("CRITICAL", conflict.Explainer.PrimaryValue); - Assert.Equal("MEDIUM", conflict.Explainer.SuppressedValue); - Assert.Equal(conflict.Explainer.ComputeHashHex(), conflict.ConflictHash); - } - - [Fact] - public async Task MirrorEndpointsServeConfiguredArtifacts() - { - using var temp = new TempDirectory(); - var exportId = "20251019T120000Z"; - var exportRoot = Path.Combine(temp.Path, exportId); - var mirrorRoot = Path.Combine(exportRoot, "mirror"); - var domainRoot = Path.Combine(mirrorRoot, "primary"); - Directory.CreateDirectory(domainRoot); - - await File.WriteAllTextAsync( - Path.Combine(mirrorRoot, "index.json"), - """{"schemaVersion":1,"domains":[]}"""); - await File.WriteAllTextAsync( - Path.Combine(domainRoot, "manifest.json"), - """{"domainId":"primary"}"""); - await File.WriteAllTextAsync( - Path.Combine(domainRoot, "bundle.json"), - """{"advisories":[]}"""); - await File.WriteAllTextAsync( - Path.Combine(domainRoot, "bundle.json.jws"), - "test-signature"); - - var environment = new Dictionary<string, string?> - { - ["CONCELIER_MIRROR__ENABLED"] = "true", - ["CONCELIER_MIRROR__EXPORTROOT"] = temp.Path, - ["CONCELIER_MIRROR__ACTIVEEXPORTID"] = exportId, - ["CONCELIER_MIRROR__DOMAINS__0__ID"] = "primary", - ["CONCELIER_MIRROR__DOMAINS__0__DISPLAYNAME"] = "Primary", - ["CONCELIER_MIRROR__DOMAINS__0__REQUIREAUTHENTICATION"] = "false", - ["CONCELIER_MIRROR__DOMAINS__0__MAXDOWNLOADREQUESTSPERHOUR"] = "5", - ["CONCELIER_MIRROR__MAXINDEXREQUESTSPERHOUR"] = "5" - }; - - using var factory = new ConcelierApplicationFactory(_runner.ConnectionString, environmentOverrides: environment); - using var client = factory.CreateClient(); - - var indexResponse = await client.GetAsync("/concelier/exports/index.json"); - Assert.Equal(HttpStatusCode.OK, indexResponse.StatusCode); - var indexContent = await indexResponse.Content.ReadAsStringAsync(); - Assert.Contains(@"""schemaVersion"":1", indexContent, StringComparison.Ordinal); - - var manifestResponse = await client.GetAsync("/concelier/exports/mirror/primary/manifest.json"); - Assert.Equal(HttpStatusCode.OK, manifestResponse.StatusCode); - var manifestContent = await manifestResponse.Content.ReadAsStringAsync(); - Assert.Contains(@"""domainId"":""primary""", manifestContent, StringComparison.Ordinal); - - var bundleResponse = await client.GetAsync("/concelier/exports/mirror/primary/bundle.json.jws"); - Assert.Equal(HttpStatusCode.OK, bundleResponse.StatusCode); - var signatureContent = await bundleResponse.Content.ReadAsStringAsync(); - Assert.Equal("test-signature", signatureContent); - } - - [Fact] - public async Task MirrorEndpointsEnforceAuthenticationForProtectedDomains() - { - using var temp = new TempDirectory(); - var exportId = "20251019T120000Z"; - var secureRoot = Path.Combine(temp.Path, exportId, "mirror", "secure"); - Directory.CreateDirectory(secureRoot); - - await File.WriteAllTextAsync( - Path.Combine(temp.Path, exportId, "mirror", "index.json"), - """{"schemaVersion":1,"domains":[]}"""); - await File.WriteAllTextAsync( - Path.Combine(secureRoot, "manifest.json"), - """{"domainId":"secure"}"""); - - var environment = new Dictionary<string, string?> - { - ["CONCELIER_MIRROR__ENABLED"] = "true", - ["CONCELIER_MIRROR__EXPORTROOT"] = temp.Path, - ["CONCELIER_MIRROR__ACTIVEEXPORTID"] = exportId, - ["CONCELIER_MIRROR__DOMAINS__0__ID"] = "secure", - ["CONCELIER_MIRROR__DOMAINS__0__REQUIREAUTHENTICATION"] = "true", - ["CONCELIER_MIRROR__DOMAINS__0__MAXDOWNLOADREQUESTSPERHOUR"] = "5", - ["CONCELIER_AUTHORITY__ENABLED"] = "true", - ["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false", - ["CONCELIER_AUTHORITY__ISSUER"] = "https://authority.example", - ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", - ["CONCELIER_AUTHORITY__AUDIENCES__0"] = "api://concelier", - ["CONCELIER_AUTHORITY__REQUIREDSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, - ["CONCELIER_AUTHORITY__CLIENTID"] = "concelier-jobs", - ["CONCELIER_AUTHORITY__CLIENTSECRET"] = "secret", - ["CONCELIER_AUTHORITY__CLIENTSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger - }; - - using var factory = new ConcelierApplicationFactory( - _runner.ConnectionString, - authority => - { - authority.Enabled = true; - authority.AllowAnonymousFallback = false; - authority.Issuer = "https://authority.example"; - authority.RequireHttpsMetadata = false; - authority.Audiences.Clear(); - authority.Audiences.Add("api://concelier"); - authority.RequiredScopes.Clear(); - authority.RequiredScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); - authority.ClientId = "concelier-jobs"; - authority.ClientSecret = "secret"; - }, - environment); - - using var client = factory.CreateClient(); - var response = await client.GetAsync("/concelier/exports/mirror/secure/manifest.json"); - Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); - var authHeader = Assert.Single(response.Headers.WwwAuthenticate); - Assert.Equal("Bearer", authHeader.Scheme); - } - - [Fact] - public async Task MirrorEndpointsRespectRateLimits() - { - using var temp = new TempDirectory(); - var exportId = "20251019T130000Z"; - var exportRoot = Path.Combine(temp.Path, exportId); - var mirrorRoot = Path.Combine(exportRoot, "mirror"); - Directory.CreateDirectory(mirrorRoot); - - await File.WriteAllTextAsync( - Path.Combine(mirrorRoot, "index.json"), - """{\"schemaVersion\":1,\"domains\":[]}""" - ); - - var environment = new Dictionary<string, string?> - { - ["CONCELIER_MIRROR__ENABLED"] = "true", - ["CONCELIER_MIRROR__EXPORTROOT"] = temp.Path, - ["CONCELIER_MIRROR__ACTIVEEXPORTID"] = exportId, - ["CONCELIER_MIRROR__MAXINDEXREQUESTSPERHOUR"] = "1", - ["CONCELIER_MIRROR__DOMAINS__0__ID"] = "primary", - ["CONCELIER_MIRROR__DOMAINS__0__REQUIREAUTHENTICATION"] = "false", - ["CONCELIER_MIRROR__DOMAINS__0__MAXDOWNLOADREQUESTSPERHOUR"] = "1" - }; - - using var factory = new ConcelierApplicationFactory(_runner.ConnectionString, environmentOverrides: environment); - using var client = factory.CreateClient(); - - var okResponse = await client.GetAsync("/concelier/exports/index.json"); - Assert.Equal(HttpStatusCode.OK, okResponse.StatusCode); - - var limitedResponse = await client.GetAsync("/concelier/exports/index.json"); - Assert.Equal((HttpStatusCode)429, limitedResponse.StatusCode); - Assert.NotNull(limitedResponse.Headers.RetryAfter); - Assert.True(limitedResponse.Headers.RetryAfter!.Delta.HasValue); - Assert.True(limitedResponse.Headers.RetryAfter!.Delta!.Value.TotalSeconds > 0); - } - - - [Fact] - public async Task JobsEndpointsAllowBypassWhenAuthorityEnabled() - { - var environment = new Dictionary<string, string?> - { - ["CONCELIER_AUTHORITY__ENABLED"] = "true", - ["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false", - ["CONCELIER_AUTHORITY__ISSUER"] = "https://authority.example", - ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", - ["CONCELIER_AUTHORITY__AUDIENCES__0"] = "api://concelier", - ["CONCELIER_AUTHORITY__REQUIREDSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, - ["CONCELIER_AUTHORITY__BYPASSNETWORKS__0"] = "127.0.0.1/32", - ["CONCELIER_AUTHORITY__BYPASSNETWORKS__1"] = "::1/128", - ["CONCELIER_AUTHORITY__CLIENTID"] = "concelier-jobs", - ["CONCELIER_AUTHORITY__CLIENTSECRET"] = "test-secret", - ["CONCELIER_AUTHORITY__CLIENTSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, - }; - - using var factory = new ConcelierApplicationFactory( - _runner.ConnectionString, - authority => - { - authority.Enabled = true; - authority.AllowAnonymousFallback = false; - authority.Issuer = "https://authority.example"; - authority.RequireHttpsMetadata = false; - authority.Audiences.Clear(); - authority.Audiences.Add("api://concelier"); - authority.RequiredScopes.Clear(); - authority.RequiredScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); - authority.BypassNetworks.Clear(); - authority.BypassNetworks.Add("127.0.0.1/32"); - authority.BypassNetworks.Add("::1/128"); - authority.ClientId = "concelier-jobs"; - authority.ClientSecret = "test-secret"; - }, - environment); - - var handler = factory.Services.GetRequiredService<StubJobCoordinator>(); - handler.Definitions = new[] { new JobDefinition("demo", typeof(DemoJob), TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(1), null, true) }; - - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Test-RemoteAddr", "127.0.0.1"); - var response = await client.GetAsync("/jobs/definitions"); - - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - - var auditLogs = factory.LoggerProvider.Snapshot("Concelier.Authorization.Audit"); - var bypassLog = Assert.Single(auditLogs, entry => entry.TryGetState("Bypass", out var state) && state is bool flag && flag); - Assert.True(bypassLog.TryGetState("RemoteAddress", out var remoteObj) && string.Equals(remoteObj?.ToString(), "127.0.0.1", StringComparison.Ordinal)); - Assert.True(bypassLog.TryGetState("StatusCode", out var statusObj) && Convert.ToInt32(statusObj) == (int)HttpStatusCode.OK); - } - - [Fact] - public async Task JobsEndpointsRequireAuthWhenFallbackDisabled() - { - var enforcementEnvironment = new Dictionary<string, string?> - { - ["CONCELIER_AUTHORITY__ENABLED"] = "true", - ["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false", - ["CONCELIER_AUTHORITY__ISSUER"] = "https://authority.example", - ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", - ["CONCELIER_AUTHORITY__AUDIENCES__0"] = "api://concelier", - ["CONCELIER_AUTHORITY__REQUIREDSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, - ["CONCELIER_AUTHORITY__CLIENTID"] = "concelier-jobs", - ["CONCELIER_AUTHORITY__CLIENTSECRET"] = "test-secret", - ["CONCELIER_AUTHORITY__CLIENTSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, - }; - - using var factory = new ConcelierApplicationFactory( - _runner.ConnectionString, - authority => - { - authority.Enabled = true; - authority.AllowAnonymousFallback = false; - authority.Issuer = "https://authority.example"; - authority.RequireHttpsMetadata = false; - authority.Audiences.Clear(); - authority.Audiences.Add("api://concelier"); - authority.RequiredScopes.Clear(); - authority.RequiredScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); - authority.BypassNetworks.Clear(); - authority.ClientId = "concelier-jobs"; - authority.ClientSecret = "test-secret"; - }, - enforcementEnvironment); - - var resolved = factory.Services.GetRequiredService<IOptions<ConcelierOptions>>().Value; - Assert.False(resolved.Authority.AllowAnonymousFallback); - - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Test-RemoteAddr", "127.0.0.1"); - var response = await client.GetAsync("/jobs/definitions"); - - Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); - - var auditLogs = factory.LoggerProvider.Snapshot("Concelier.Authorization.Audit"); - var enforcementLog = Assert.Single(auditLogs); - Assert.True(enforcementLog.TryGetState("BypassAllowed", out var bypassAllowedObj) && bypassAllowedObj is bool bypassAllowed && bypassAllowed == false); - Assert.True(enforcementLog.TryGetState("HasPrincipal", out var principalObj) && principalObj is bool hasPrincipal && hasPrincipal == false); - } - - [Fact] - public void AuthorityClientResilienceOptionsAreBound() - { - var environment = new Dictionary<string, string?> - { - ["CONCELIER_AUTHORITY__ENABLED"] = "true", - ["CONCELIER_AUTHORITY__ISSUER"] = "https://authority.example", - ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", - ["CONCELIER_AUTHORITY__AUDIENCES__0"] = "api://concelier", - ["CONCELIER_AUTHORITY__REQUIREDSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, - ["CONCELIER_AUTHORITY__CLIENTSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, - ["CONCELIER_AUTHORITY__BACKCHANNELTIMEOUTSECONDS"] = "45", - ["CONCELIER_AUTHORITY__RESILIENCE__ENABLERETRIES"] = "true", - ["CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__0"] = "00:00:02", - ["CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__1"] = "00:00:04", - ["CONCELIER_AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK"] = "false", - ["CONCELIER_AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE"] = "00:02:30" - }; - - using var factory = new ConcelierApplicationFactory( - _runner.ConnectionString, - authority => - { - authority.Enabled = true; - authority.Issuer = "https://authority.example"; - authority.RequireHttpsMetadata = false; - authority.Audiences.Clear(); - authority.Audiences.Add("api://concelier"); - authority.RequiredScopes.Clear(); - authority.RequiredScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); - authority.ClientScopes.Clear(); - authority.ClientScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); - authority.BackchannelTimeoutSeconds = 45; - }, - environment); - - var monitor = factory.Services.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>(); - var options = monitor.CurrentValue; - - Assert.Equal("https://authority.example", options.Authority); - Assert.Equal(TimeSpan.FromSeconds(45), options.HttpTimeout); - Assert.Equal(new[] { StellaOpsScopes.ConcelierJobsTrigger }, options.NormalizedScopes); - Assert.Equal(new[] { TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(4) }, options.NormalizedRetryDelays); - Assert.False(options.AllowOfflineCacheFallback); - Assert.Equal(TimeSpan.FromSeconds(150), options.OfflineCacheTolerance); - } - - private async Task SeedObservationDocumentsAsync(IEnumerable<AdvisoryObservationDocument> documents) - { - var client = new MongoClient(_runner.ConnectionString); - var database = client.GetDatabase(MongoStorageDefaults.DefaultDatabaseName); - var collection = database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations); - - try - { - await database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations); - } - catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase)) - { - // Collection does not exist yet; ignore. - } - - var snapshot = documents?.ToArray() ?? Array.Empty<AdvisoryObservationDocument>(); - if (snapshot.Length == 0) - { - return; - } - - await collection.InsertManyAsync(snapshot); - } - - private static AdvisoryObservationDocument[] BuildSampleObservationDocuments() - { - return new[] - { - CreateObservationDocument( - id: "tenant-a:nvd:alpha:1", - tenant: "tenant-a", - createdAt: new DateTime(2025, 1, 5, 0, 0, 0, DateTimeKind.Utc), - aliases: new[] { "cve-2025-0001" }, - purls: new[] { "pkg:npm/demo@1.0.0" }, - cpes: new[] { "cpe:/a:vendor:product:1.0" }, - references: new[] { ("advisory", "https://example.test/advisory-1") }), - CreateObservationDocument( - id: "tenant-a:ghsa:beta:1", - tenant: "tenant-a", - createdAt: new DateTime(2025, 1, 6, 0, 0, 0, DateTimeKind.Utc), - aliases: new[] { "ghsa-2025-xyz", "cve-2025-0001" }, - purls: new[] { "pkg:npm/demo@1.1.0" }, - cpes: new[] { "cpe:/a:vendor:product:1.1" }, - references: new[] { ("patch", "https://example.test/patch-1") }), - CreateObservationDocument( - id: "tenant-b:nvd:alpha:1", - tenant: "tenant-b", - createdAt: new DateTime(2025, 1, 7, 0, 0, 0, DateTimeKind.Utc), - aliases: new[] { "cve-2025-0001" }, - purls: new[] { "pkg:npm/demo@2.0.0" }, - cpes: new[] { "cpe:/a:vendor:product:2.0" }, - references: new[] { ("advisory", "https://example.test/advisory-2") }) - }; - } - - private static AdvisoryObservationDocument CreateObservationDocument( - string id, - string tenant, - DateTime createdAt, - IEnumerable<string>? aliases = null, - IEnumerable<string>? purls = null, - IEnumerable<string>? cpes = null, - IEnumerable<(string Type, string Url)>? references = null) - { - return new AdvisoryObservationDocument - { - Id = id, - Tenant = tenant.ToLowerInvariant(), - CreatedAt = createdAt, - Source = new AdvisoryObservationSourceDocument - { - Vendor = "nvd", - Stream = "feed", - Api = "https://example.test/api" - }, - Upstream = new AdvisoryObservationUpstreamDocument - { - UpstreamId = id, - DocumentVersion = null, - FetchedAt = createdAt, - ReceivedAt = createdAt, - ContentHash = $"sha256:{id}", - Signature = new AdvisoryObservationSignatureDocument - { - Present = false - }, - Metadata = new Dictionary<string, string>(StringComparer.Ordinal) - }, - Content = new AdvisoryObservationContentDocument - { - Format = "csaf", - SpecVersion = "2.0", - Raw = BsonDocument.Parse("""{"observation":"%ID%"}""".Replace("%ID%", id)), - Metadata = new Dictionary<string, string>(StringComparer.Ordinal) - }, - Linkset = new AdvisoryObservationLinksetDocument - { - Aliases = aliases?.Select(value => value.Trim().ToLowerInvariant()).ToList(), - Purls = purls?.Select(value => value.Trim()).ToList(), - Cpes = cpes?.Select(value => value.Trim()).ToList(), - References = references is null - ? new List<AdvisoryObservationReferenceDocument>() - : references - .Select(reference => new AdvisoryObservationReferenceDocument - { - Type = reference.Type.Trim().ToLowerInvariant(), - Url = reference.Url.Trim() - }) - .ToList() - }, - Attributes = new Dictionary<string, string>(StringComparer.Ordinal) - }; - } - - private sealed record ReplayResponse( - string VulnerabilityKey, - DateTimeOffset? AsOf, - List<ReplayStatement> Statements, - List<ReplayConflict>? Conflicts); - - private sealed record ReplayStatement( - Guid StatementId, - string VulnerabilityKey, - string AdvisoryKey, - Advisory Advisory, - string StatementHash, - DateTimeOffset AsOf, - DateTimeOffset RecordedAt, - IReadOnlyList<Guid> InputDocumentIds); - - private sealed record ReplayConflict( - Guid ConflictId, - string VulnerabilityKey, - IReadOnlyList<Guid> StatementIds, - string ConflictHash, - DateTimeOffset AsOf, - DateTimeOffset RecordedAt, - string Details, - MergeConflictExplainerPayload Explainer); - - private sealed class ConcelierApplicationFactory : WebApplicationFactory<Program> - { - private readonly string _connectionString; - private readonly string? _previousDsn; - private readonly string? _previousDriver; - private readonly string? _previousTimeout; - private readonly string? _previousTelemetryEnabled; - private readonly string? _previousTelemetryLogging; - private readonly string? _previousTelemetryTracing; - private readonly string? _previousTelemetryMetrics; - private readonly Action<ConcelierOptions.AuthorityOptions>? _authorityConfigure; - private readonly IDictionary<string, string?> _additionalPreviousEnvironment = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase); - public CollectingLoggerProvider LoggerProvider { get; } = new(); - - public ConcelierApplicationFactory( - string connectionString, - Action<ConcelierOptions.AuthorityOptions>? authorityConfigure = null, - IDictionary<string, string?>? environmentOverrides = null) - { - _connectionString = connectionString; - _authorityConfigure = authorityConfigure; - _previousDsn = Environment.GetEnvironmentVariable("CONCELIER_STORAGE__DSN"); - _previousDriver = Environment.GetEnvironmentVariable("CONCELIER_STORAGE__DRIVER"); - _previousTimeout = Environment.GetEnvironmentVariable("CONCELIER_STORAGE__COMMANDTIMEOUTSECONDS"); - _previousTelemetryEnabled = Environment.GetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLED"); - _previousTelemetryLogging = Environment.GetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLELOGGING"); - _previousTelemetryTracing = Environment.GetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLETRACING"); - _previousTelemetryMetrics = Environment.GetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLEMETRICS"); - Environment.SetEnvironmentVariable("CONCELIER_STORAGE__DSN", connectionString); - Environment.SetEnvironmentVariable("CONCELIER_STORAGE__DRIVER", "mongo"); - Environment.SetEnvironmentVariable("CONCELIER_STORAGE__COMMANDTIMEOUTSECONDS", "30"); - Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLED", "false"); - Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLELOGGING", "false"); - Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLETRACING", "false"); - Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLEMETRICS", "false"); - if (environmentOverrides is not null) - { - foreach (var kvp in environmentOverrides) - { - var previous = Environment.GetEnvironmentVariable(kvp.Key); - _additionalPreviousEnvironment[kvp.Key] = previous; - Environment.SetEnvironmentVariable(kvp.Key, kvp.Value); - } - } - } - - protected override void ConfigureWebHost(IWebHostBuilder builder) - { - builder.ConfigureAppConfiguration((context, configurationBuilder) => - { - var settings = new Dictionary<string, string?> - { - ["Plugins:Directory"] = Path.Combine(context.HostingEnvironment.ContentRootPath, "StellaOps.Concelier.PluginBinaries"), - }; - - configurationBuilder.AddInMemoryCollection(settings!); - }); - - builder.ConfigureLogging(logging => - { - logging.AddProvider(LoggerProvider); - }); - - builder.ConfigureServices(services => - { - services.AddSingleton<StubJobCoordinator>(); - services.AddSingleton<IJobCoordinator>(sp => sp.GetRequiredService<StubJobCoordinator>()); - services.PostConfigure<ConcelierOptions>(options => - { - options.Storage.Driver = "mongo"; - options.Storage.Dsn = _connectionString; - options.Storage.CommandTimeoutSeconds = 30; - options.Plugins.Directory ??= Path.Combine(AppContext.BaseDirectory, "StellaOps.Concelier.PluginBinaries"); - options.Telemetry.Enabled = false; - options.Telemetry.EnableLogging = false; - options.Telemetry.EnableTracing = false; - options.Telemetry.EnableMetrics = false; - options.Authority ??= new ConcelierOptions.AuthorityOptions(); - _authorityConfigure?.Invoke(options.Authority); - }); - }); - - builder.ConfigureTestServices(services => - { - services.AddSingleton<IStartupFilter, RemoteIpStartupFilter>(); - services.PostConfigure<JwtBearerOptions>(StellaOpsAuthenticationDefaults.AuthenticationScheme, options => - { - options.RequireHttpsMetadata = false; - options.TokenValidationParameters = new TokenValidationParameters - { - ValidateIssuerSigningKey = true, - IssuerSigningKey = TestSigningKey, - ValidateIssuer = false, - ValidateAudience = false, - ValidateLifetime = false, - NameClaimType = ClaimTypes.Name, - RoleClaimType = ClaimTypes.Role, - ClockSkew = TimeSpan.Zero - }; - var issuer = string.IsNullOrWhiteSpace(options.Authority) ? TestAuthorityIssuer : options.Authority; - options.ConfigurationManager = new StaticConfigurationManager<OpenIdConnectConfiguration>(new OpenIdConnectConfiguration - { - Issuer = issuer - }); - }); - }); - } - - protected override void Dispose(bool disposing) - { - base.Dispose(disposing); - Environment.SetEnvironmentVariable("CONCELIER_STORAGE__DSN", _previousDsn); - Environment.SetEnvironmentVariable("CONCELIER_STORAGE__DRIVER", _previousDriver); - Environment.SetEnvironmentVariable("CONCELIER_STORAGE__COMMANDTIMEOUTSECONDS", _previousTimeout); - Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLED", _previousTelemetryEnabled); - Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLELOGGING", _previousTelemetryLogging); - Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLETRACING", _previousTelemetryTracing); - Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLEMETRICS", _previousTelemetryMetrics); - foreach (var kvp in _additionalPreviousEnvironment) - { - Environment.SetEnvironmentVariable(kvp.Key, kvp.Value); - } - - LoggerProvider.Dispose(); - } - - private sealed class RemoteIpStartupFilter : IStartupFilter - { - public Action<IApplicationBuilder> Configure(Action<IApplicationBuilder> next) - { - return app => - { - app.Use(async (context, nextMiddleware) => - { - if (context.Request.Headers.TryGetValue("X-Test-RemoteAddr", out var values) - && values.Count > 0 - && IPAddress.TryParse(values[0], out var remote)) - { - context.Connection.RemoteIpAddress = remote; - } - - await nextMiddleware(); - }); - - next(app); - }; - } - } - - public sealed record LogEntry( - string LoggerName, - LogLevel Level, - EventId EventId, - string? Message, - Exception? Exception, - IReadOnlyList<KeyValuePair<string, object?>> State) - { - public bool TryGetState(string name, out object? value) - { - foreach (var kvp in State) - { - if (string.Equals(kvp.Key, name, StringComparison.Ordinal)) - { - value = kvp.Value; - return true; - } - } - - value = null; - return false; - } - } - - public sealed class CollectingLoggerProvider : ILoggerProvider - { - private readonly object syncRoot = new(); - private readonly List<LogEntry> entries = new(); - private bool disposed; - - public ILogger CreateLogger(string categoryName) => new CollectingLogger(categoryName, this); - - public IReadOnlyList<LogEntry> Snapshot(string loggerName) - { - lock (syncRoot) - { - return entries - .Where(entry => string.Equals(entry.LoggerName, loggerName, StringComparison.Ordinal)) - .ToArray(); - } - } - - public void Dispose() - { - disposed = true; - lock (syncRoot) - { - entries.Clear(); - } - } - - private void Append(LogEntry entry) - { - if (disposed) - { - return; - } - - lock (syncRoot) - { - entries.Add(entry); - } - } - - private sealed class CollectingLogger : ILogger - { - private readonly string categoryName; - private readonly CollectingLoggerProvider provider; - - public CollectingLogger(string categoryName, CollectingLoggerProvider provider) - { - this.categoryName = categoryName; - this.provider = provider; - } - - public IDisposable? BeginScope<TState>(TState state) where TState : notnull => NullScope.Instance; - - public bool IsEnabled(LogLevel logLevel) => true; - - public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter) - { - if (formatter is null) - { - throw new ArgumentNullException(nameof(formatter)); - } - - var message = formatter(state, exception); - var kvps = ExtractState(state); - var entry = new LogEntry(categoryName, logLevel, eventId, message, exception, kvps); - provider.Append(entry); - } - - private static IReadOnlyList<KeyValuePair<string, object?>> ExtractState<TState>(TState state) - { - if (state is IReadOnlyList<KeyValuePair<string, object?>> list) - { - return list; - } - - if (state is IEnumerable<KeyValuePair<string, object?>> enumerable) - { - return enumerable.ToArray(); - } - - if (state is null) - { - return Array.Empty<KeyValuePair<string, object?>>(); - } - - return new[] { new KeyValuePair<string, object?>("State", state) }; - } - } - - private sealed class NullScope : IDisposable - { - public static readonly NullScope Instance = new(); - public void Dispose() - { - } - } - } - } - - private sealed class TempDirectory : IDisposable - { - public string Path { get; } - - public TempDirectory() - { - Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), "concelier-mirror-" + Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture)); - Directory.CreateDirectory(Path); - } - - public void Dispose() - { - try - { - if (Directory.Exists(Path)) - { - Directory.Delete(Path, recursive: true); - } - } - catch - { - // best effort cleanup - } - } - } - - private sealed record HealthPayload(string Status, DateTimeOffset StartedAt, double UptimeSeconds, StoragePayload Storage, TelemetryPayload Telemetry); - - private sealed record StoragePayload(string Driver, bool Completed, DateTimeOffset? CompletedAt, double? DurationMs); - - private sealed record TelemetryPayload(bool Enabled, bool Tracing, bool Metrics, bool Logging); - - private sealed record ReadyPayload(string Status, DateTimeOffset StartedAt, double UptimeSeconds, ReadyMongoPayload Mongo); - - private sealed record ReadyMongoPayload(string Status, double? LatencyMs, DateTimeOffset? CheckedAt, string? Error); - - private sealed record JobDefinitionPayload(string Kind, bool Enabled, string? CronExpression, TimeSpan Timeout, TimeSpan LeaseDuration, JobRunPayload? LastRun); - - private sealed record JobRunPayload(Guid RunId, string Kind, string Status, string Trigger, DateTimeOffset CreatedAt, DateTimeOffset? StartedAt, DateTimeOffset? CompletedAt, string? Error, TimeSpan? Duration, Dictionary<string, object?> Parameters); - - private sealed record ProblemDocument(string? Type, string? Title, int? Status, string? Detail, string? Instance); - - private async Task SeedAdvisoryRawDocumentsAsync(params BsonDocument[] documents) - { - var client = new MongoClient(_runner.ConnectionString); - var database = client.GetDatabase(MongoStorageDefaults.DefaultDatabaseName); - var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw); - await collection.DeleteManyAsync(FilterDefinition<BsonDocument>.Empty); - if (documents.Length > 0) - { - await collection.InsertManyAsync(documents); - } - } - - private static BsonDocument CreateAdvisoryRawDocument( - string tenant, - string vendor, - string upstreamId, - string contentHash, - BsonDocument? raw = null, - string? supersedes = null) - { - var now = DateTime.UtcNow; - return new BsonDocument - { - { "_id", BuildRawDocumentId(vendor, upstreamId, contentHash) }, - { "tenant", tenant }, - { - "source", - new BsonDocument - { - { "vendor", vendor }, - { "connector", "test-connector" }, - { "version", "1.0.0" } - } - }, - { - "upstream", - new BsonDocument - { - { "upstream_id", upstreamId }, - { "document_version", "1" }, - { "retrieved_at", now }, - { "content_hash", contentHash }, - { "signature", new BsonDocument { { "present", false } } }, - { "provenance", new BsonDocument { { "api", "https://example.test" } } } - } - }, - { - "content", - new BsonDocument - { - { "format", "osv" }, - { "raw", raw ?? new BsonDocument("id", upstreamId) } - } - }, - { - "identifiers", - new BsonDocument - { - { "aliases", new BsonArray(new[] { upstreamId }) }, - { "primary", upstreamId } - } - }, - { - "linkset", - new BsonDocument - { - { "aliases", new BsonArray() }, - { "purls", new BsonArray() }, - { "cpes", new BsonArray() }, - { "references", new BsonArray() }, - { "reconciled_from", new BsonArray() }, - { "notes", new BsonDocument() } - } - }, - { "supersedes", supersedes is null ? BsonNull.Value : supersedes }, - { "ingested_at", now }, - { "created_at", now } - }; - } - - private static string BuildRawDocumentId(string vendor, string upstreamId, string contentHash) - { - static string Sanitize(string value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return "unknown"; - } - - var buffer = new char[value.Length]; - var index = 0; - foreach (var ch in value.Trim().ToLowerInvariant()) - { - buffer[index++] = char.IsLetterOrDigit(ch) ? ch : '-'; - } - - var sanitized = new string(buffer, 0, index).Trim('-'); - return string.IsNullOrEmpty(sanitized) ? "unknown" : sanitized; - } - - var vendorSegment = Sanitize(vendor); - var upstreamSegment = Sanitize(upstreamId); - var hashSegment = Sanitize(contentHash.Replace(":", "-")); - return $"advisory_raw:{vendorSegment}:{upstreamSegment}:{hashSegment}"; - } - - private static AdvisoryIngestRequest BuildAdvisoryIngestRequest(string contentHash, string upstreamId) - { - var raw = CreateJsonElement($@"{{""id"":""{upstreamId}"",""modified"":""{DateTime.UtcNow:O}""}}"); - var references = new[] - { - new AdvisoryLinksetReferenceRequest("advisory", $"https://example.test/advisories/{upstreamId}", null) - }; - - return new AdvisoryIngestRequest( - new AdvisorySourceRequest("osv", "osv-connector", "1.0.0", "feed"), - new AdvisoryUpstreamRequest( - upstreamId, - "2025-01-01T00:00:00Z", - DateTimeOffset.UtcNow, - contentHash, - new AdvisorySignatureRequest(false, null, null, null, null, null), - new Dictionary<string, string> { ["http.method"] = "GET" }), - new AdvisoryContentRequest("osv", "1.3.0", raw, null), - new AdvisoryIdentifiersRequest( - upstreamId, - new[] { upstreamId, $"{upstreamId}-ALIAS" }), - new AdvisoryLinksetRequest( - new[] { upstreamId }, - new[] { "pkg:npm/demo@1.0.0" }, - Array.Empty<string>(), - references, - Array.Empty<string>(), - new Dictionary<string, string> { ["note"] = "ingest-test" })); - } - - private static JsonElement CreateJsonElement(string json) - { - using var document = JsonDocument.Parse(json); - return document.RootElement.Clone(); - } - - private static async Task<IReadOnlyList<MetricMeasurement>> CaptureMetricsAsync(string meterName, string instrumentName, Func<Task> action) - { - var measurements = new List<MetricMeasurement>(); - var listener = new MeterListener(); - - listener.InstrumentPublished += (instrument, currentListener) => - { - if (string.Equals(instrument.Meter.Name, meterName, StringComparison.Ordinal) && - string.Equals(instrument.Name, instrumentName, StringComparison.Ordinal)) - { - currentListener.EnableMeasurementEvents(instrument); - } - }; - - listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) => - { - var tagDictionary = new Dictionary<string, object?>(StringComparer.Ordinal); - foreach (var tag in tags) - { - tagDictionary[tag.Key] = tag.Value; - } - - measurements.Add(new MetricMeasurement(instrument.Name, measurement, tagDictionary)); - }); - - listener.Start(); - try - { - await action().ConfigureAwait(false); - } - finally - { - listener.Dispose(); - } - - return measurements; - } - - private static string? GetTagValue(MetricMeasurement measurement, string tag) - { - if (measurement.Tags.TryGetValue(tag, out var value)) - { - return value?.ToString(); - } - - return null; - } - - private static string CreateTestToken(string tenant, params string[] scopes) - { - var normalizedTenant = string.IsNullOrWhiteSpace(tenant) ? "default" : tenant.Trim().ToLowerInvariant(); - var scopeSet = scopes is { Length: > 0 } - ? scopes - .Select(StellaOpsScopes.Normalize) - .Where(static scope => !string.IsNullOrEmpty(scope)) - .Select(static scope => scope!) - .Distinct(StringComparer.Ordinal) - .ToArray() - : Array.Empty<string>(); - - var claims = new List<Claim> - { - new Claim(StellaOpsClaimTypes.Subject, "test-user"), - new Claim(StellaOpsClaimTypes.Tenant, normalizedTenant), - new Claim(StellaOpsClaimTypes.Scope, string.Join(' ', scopeSet)) - }; - - foreach (var scope in scopeSet) - { - claims.Add(new Claim(StellaOpsClaimTypes.ScopeItem, scope)); - } - - var credentials = new SigningCredentials(TestSigningKey, SecurityAlgorithms.HmacSha256); - var now = DateTime.UtcNow; - var token = new JwtSecurityToken( - issuer: TestAuthorityIssuer, - audience: TestAuthorityAudience, - claims: claims, - notBefore: now.AddMinutes(-5), - expires: now.AddMinutes(30), - signingCredentials: credentials); - - return new JwtSecurityTokenHandler().WriteToken(token); - } - - private sealed record MetricMeasurement(string Instrument, long Value, IReadOnlyDictionary<string, object?> Tags); - - private sealed class DemoJob : IJob - { - public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) => Task.CompletedTask; - } - - private sealed class StubJobCoordinator : IJobCoordinator - { - public JobTriggerResult NextResult { get; set; } = JobTriggerResult.NotFound("not set"); - - public IReadOnlyList<JobDefinition> Definitions { get; set; } = Array.Empty<JobDefinition>(); - - public IReadOnlyList<JobRunSnapshot> RecentRuns { get; set; } = Array.Empty<JobRunSnapshot>(); - - public IReadOnlyList<JobRunSnapshot> ActiveRuns { get; set; } = Array.Empty<JobRunSnapshot>(); - - public Dictionary<Guid, JobRunSnapshot> Runs { get; } = new(); - - public Dictionary<string, JobRunSnapshot?> LastRuns { get; } = new(StringComparer.Ordinal); - - public Task<JobTriggerResult> TriggerAsync(string kind, IReadOnlyDictionary<string, object?>? parameters, string trigger, CancellationToken cancellationToken) - => Task.FromResult(NextResult); - - public Task<IReadOnlyList<JobDefinition>> GetDefinitionsAsync(CancellationToken cancellationToken) - => Task.FromResult(Definitions); - - public Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken) - { - IEnumerable<JobRunSnapshot> query = RecentRuns; - if (!string.IsNullOrWhiteSpace(kind)) - { - query = query.Where(run => string.Equals(run.Kind, kind, StringComparison.Ordinal)); - } - - return Task.FromResult<IReadOnlyList<JobRunSnapshot>>(query.Take(limit).ToArray()); - } - - public Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken) - => Task.FromResult(ActiveRuns); - - public Task<JobRunSnapshot?> GetRunAsync(Guid runId, CancellationToken cancellationToken) - => Task.FromResult(Runs.TryGetValue(runId, out var run) ? run : null); - - public Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken) - => Task.FromResult(LastRuns.TryGetValue(kind, out var run) ? run : null); - - public Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken) - { - var map = new Dictionary<string, JobRunSnapshot>(StringComparer.Ordinal); - foreach (var kind in kinds) - { - if (kind is null) - { - continue; - } - - if (LastRuns.TryGetValue(kind, out var run) && run is not null) - { - map[kind] = run; - } - } - - return Task.FromResult<IReadOnlyDictionary<string, JobRunSnapshot>>(map); - } - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics.Metrics; +using System.Globalization; +using System.IdentityModel.Tokens.Jwt; +using System.IO; +using System.Linq; +using System.Net; +using System.Net.Http.Json; +using System.Net.Http.Headers; +using System.Security.Claims; +using System.Text; +using System.Text.Json; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.AspNetCore.TestHost; +using Microsoft.AspNetCore.Authentication.JwtBearer; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Mongo2Go; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Concelier.Core.Events; +using StellaOps.Concelier.Core.Jobs; +using StellaOps.Concelier.Models; +using StellaOps.Concelier.Merge.Services; +using StellaOps.Concelier.Storage.Mongo; +using StellaOps.Concelier.Storage.Mongo.Observations; +using StellaOps.Concelier.WebService.Jobs; +using StellaOps.Concelier.WebService.Options; +using StellaOps.Concelier.WebService.Contracts; +using Xunit.Sdk; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.Client; +using Xunit; +using Microsoft.IdentityModel.Protocols; +using Microsoft.IdentityModel.Protocols.OpenIdConnect; +using Microsoft.IdentityModel.Tokens; + +namespace StellaOps.Concelier.WebService.Tests; + +public sealed class WebServiceEndpointsTests : IAsyncLifetime +{ + private const string TestAuthorityIssuer = "https://authority.example"; + private const string TestAuthorityAudience = "api://concelier"; + private const string TestSigningSecret = "0123456789ABCDEF0123456789ABCDEF"; + private static readonly SymmetricSecurityKey TestSigningKey = new(Encoding.UTF8.GetBytes(TestSigningSecret)); + + private MongoDbRunner _runner = null!; + private ConcelierApplicationFactory _factory = null!; + + public Task InitializeAsync() + { + _runner = MongoDbRunner.Start(singleNodeReplSet: true); + _factory = new ConcelierApplicationFactory(_runner.ConnectionString); + return Task.CompletedTask; + } + + public Task DisposeAsync() + { + _factory.Dispose(); + _runner.Dispose(); + return Task.CompletedTask; + } + + [Fact] + public async Task HealthAndReadyEndpointsRespond() + { + using var client = _factory.CreateClient(); + + var healthResponse = await client.GetAsync("/health"); + if (!healthResponse.IsSuccessStatusCode) + { + var body = await healthResponse.Content.ReadAsStringAsync(); + throw new Xunit.Sdk.XunitException($"/health failed: {(int)healthResponse.StatusCode} {body}"); + } + + var readyResponse = await client.GetAsync("/ready"); + if (!readyResponse.IsSuccessStatusCode) + { + var body = await readyResponse.Content.ReadAsStringAsync(); + throw new Xunit.Sdk.XunitException($"/ready failed: {(int)readyResponse.StatusCode} {body}"); + } + + var healthPayload = await healthResponse.Content.ReadFromJsonAsync<HealthPayload>(); + Assert.NotNull(healthPayload); + Assert.Equal("healthy", healthPayload!.Status); + Assert.Equal("mongo", healthPayload.Storage.Driver); + + var readyPayload = await readyResponse.Content.ReadFromJsonAsync<ReadyPayload>(); + Assert.NotNull(readyPayload); + Assert.Equal("ready", readyPayload!.Status); + Assert.Equal("ready", readyPayload.Mongo.Status); + } + + [Fact] + public async Task ObservationsEndpoint_ReturnsTenantScopedResults() + { + await SeedObservationDocumentsAsync(BuildSampleObservationDocuments()); + + using var client = _factory.CreateClient(); + + var response = await client.GetAsync("/concelier/observations?tenant=tenant-a&alias=CVE-2025-0001"); + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(); + throw new XunitException($"/concelier/observations failed: {(int)response.StatusCode} {body}"); + } + + using var document = await response.Content.ReadFromJsonAsync<JsonDocument>(); + Assert.NotNull(document); + var root = document!.RootElement; + var observations = root.GetProperty("observations").EnumerateArray().ToArray(); + Assert.Equal(2, observations.Length); + Assert.Equal("tenant-a:ghsa:beta:1", observations[0].GetProperty("observationId").GetString()); + Assert.Equal("tenant-a:nvd:alpha:1", observations[1].GetProperty("observationId").GetString()); + + var linkset = root.GetProperty("linkset"); + Assert.Equal(new[] { "cve-2025-0001", "ghsa-2025-xyz" }, linkset.GetProperty("aliases").EnumerateArray().Select(x => x.GetString()).ToArray()); + Assert.Equal(new[] { "pkg:npm/demo@1.0.0", "pkg:npm/demo@1.1.0" }, linkset.GetProperty("purls").EnumerateArray().Select(x => x.GetString()).ToArray()); + Assert.Equal(new[] { "cpe:/a:vendor:product:1.0", "cpe:/a:vendor:product:1.1" }, linkset.GetProperty("cpes").EnumerateArray().Select(x => x.GetString()).ToArray()); + + var references = linkset.GetProperty("references").EnumerateArray().ToArray(); + Assert.Equal(2, references.Length); + Assert.Equal("advisory", references[0].GetProperty("type").GetString()); + Assert.Equal("https://example.test/advisory-1", references[0].GetProperty("url").GetString()); + Assert.Equal("patch", references[1].GetProperty("type").GetString()); + + Assert.False(root.GetProperty("hasMore").GetBoolean()); + Assert.True(root.GetProperty("nextCursor").ValueKind == JsonValueKind.Null); + } + + [Fact] + public async Task ObservationsEndpoint_AppliesObservationIdFilter() + { + await SeedObservationDocumentsAsync(BuildSampleObservationDocuments()); + + using var client = _factory.CreateClient(); + var observationId = Uri.EscapeDataString("tenant-a:ghsa:beta:1"); + var response = await client.GetAsync($"/concelier/observations?tenant=tenant-a&observationId={observationId}&cpe=cpe:/a:vendor:product:1.1"); + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(); + throw new XunitException($"/concelier/observations filter failed: {(int)response.StatusCode} {body}"); + } + + using var document = await response.Content.ReadFromJsonAsync<JsonDocument>(); + Assert.NotNull(document); + var root = document!.RootElement; + var observations = root.GetProperty("observations").EnumerateArray().ToArray(); + Assert.Single(observations); + Assert.Equal("tenant-a:ghsa:beta:1", observations[0].GetProperty("observationId").GetString()); + Assert.Equal(new[] { "pkg:npm/demo@1.1.0" }, observations[0].GetProperty("linkset").GetProperty("purls").EnumerateArray().Select(x => x.GetString()).ToArray()); + Assert.Equal(new[] { "cpe:/a:vendor:product:1.1" }, observations[0].GetProperty("linkset").GetProperty("cpes").EnumerateArray().Select(x => x.GetString()).ToArray()); + + Assert.False(root.GetProperty("hasMore").GetBoolean()); + Assert.True(root.GetProperty("nextCursor").ValueKind == JsonValueKind.Null); + } + + [Fact] + public async Task ObservationsEndpoint_SupportsPagination() + { + await SeedObservationDocumentsAsync(BuildSampleObservationDocuments()); + + using var client = _factory.CreateClient(); + + var firstResponse = await client.GetAsync("/concelier/observations?tenant=tenant-a&limit=1"); + firstResponse.EnsureSuccessStatusCode(); + using var firstDocument = await firstResponse.Content.ReadFromJsonAsync<JsonDocument>(); + Assert.NotNull(firstDocument); + var firstRoot = firstDocument!.RootElement; + var firstObservations = firstRoot.GetProperty("observations").EnumerateArray().ToArray(); + Assert.Single(firstObservations); + var nextCursor = firstRoot.GetProperty("nextCursor").GetString(); + Assert.True(firstRoot.GetProperty("hasMore").GetBoolean()); + Assert.False(string.IsNullOrWhiteSpace(nextCursor)); + + var secondResponse = await client.GetAsync($"/concelier/observations?tenant=tenant-a&limit=2&cursor={Uri.EscapeDataString(nextCursor!)}"); + secondResponse.EnsureSuccessStatusCode(); + using var secondDocument = await secondResponse.Content.ReadFromJsonAsync<JsonDocument>(); + Assert.NotNull(secondDocument); + var secondRoot = secondDocument!.RootElement; + var secondObservations = secondRoot.GetProperty("observations").EnumerateArray().ToArray(); + Assert.Single(secondObservations); + Assert.False(secondRoot.GetProperty("hasMore").GetBoolean()); + Assert.True(secondRoot.GetProperty("nextCursor").ValueKind == JsonValueKind.Null); + Assert.Equal("tenant-a:nvd:alpha:1", secondObservations[0].GetProperty("observationId").GetString()); + } + + [Fact] + public async Task ObservationsEndpoint_ReturnsBadRequestWhenTenantMissing() + { + using var client = _factory.CreateClient(); + var response = await client.GetAsync("/concelier/observations"); + var body = await response.Content.ReadAsStringAsync(); + Assert.True(response.StatusCode == HttpStatusCode.BadRequest, $"Expected 400 but got {(int)response.StatusCode}: {body}"); + } + + [Fact] + public async Task AdvisoryIngestEndpoint_PersistsDocumentAndSupportsReadback() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-ingest"); + + var ingestRequest = BuildAdvisoryIngestRequest( + contentHash: "sha256:abc123", + upstreamId: "GHSA-INGEST-0001"); + + var ingestResponse = await client.PostAsJsonAsync("/ingest/advisory", ingestRequest); + Assert.Equal(HttpStatusCode.Created, ingestResponse.StatusCode); + + var ingestPayload = await ingestResponse.Content.ReadFromJsonAsync<AdvisoryIngestResponse>(); + Assert.NotNull(ingestPayload); + Assert.True(ingestPayload!.Inserted); + Assert.False(string.IsNullOrWhiteSpace(ingestPayload.Id)); + Assert.Equal("tenant-ingest", ingestPayload.Tenant); + Assert.Equal("sha256:abc123", ingestPayload.ContentHash); + Assert.NotNull(ingestResponse.Headers.Location); + var locationValue = ingestResponse.Headers.Location!.ToString(); + Assert.False(string.IsNullOrWhiteSpace(locationValue)); + var lastSlashIndex = locationValue.LastIndexOf('/', StringComparison.Ordinal); + var idSegment = lastSlashIndex >= 0 + ? locationValue[(lastSlashIndex + 1)..] + : locationValue; + var decodedSegment = Uri.UnescapeDataString(idSegment); + Assert.Equal(ingestPayload.Id, decodedSegment); + + var duplicateResponse = await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest( + contentHash: "sha256:abc123", + upstreamId: "GHSA-INGEST-0001")); + Assert.Equal(HttpStatusCode.OK, duplicateResponse.StatusCode); + var duplicatePayload = await duplicateResponse.Content.ReadFromJsonAsync<AdvisoryIngestResponse>(); + Assert.NotNull(duplicatePayload); + Assert.False(duplicatePayload!.Inserted); + + using (var getRequest = new HttpRequestMessage(HttpMethod.Get, $"/advisories/raw/{ingestPayload.Id}")) + { + getRequest.Headers.Add("X-Stella-Tenant", "tenant-ingest"); + var getResponse = await client.SendAsync(getRequest); + getResponse.EnsureSuccessStatusCode(); + + var record = await getResponse.Content.ReadFromJsonAsync<AdvisoryRawRecordResponse>(); + Assert.NotNull(record); + Assert.Equal(ingestPayload.Id, record!.Id); + Assert.Equal("tenant-ingest", record.Tenant); + Assert.Equal("sha256:abc123", record.Document.Upstream.ContentHash); + } + + using (var listRequest = new HttpRequestMessage(HttpMethod.Get, "/advisories/raw?limit=10")) + { + listRequest.Headers.Add("X-Stella-Tenant", "tenant-ingest"); + var listResponse = await client.SendAsync(listRequest); + listResponse.EnsureSuccessStatusCode(); + + var listPayload = await listResponse.Content.ReadFromJsonAsync<AdvisoryRawListResponse>(); + Assert.NotNull(listPayload); + var record = Assert.Single(listPayload!.Records); + Assert.Equal(ingestPayload.Id, record.Id); + } + } + + [Fact] + public async Task AocVerifyEndpoint_ReturnsSummaryForTenant() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-verify"); + + await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest( + contentHash: "sha256:verify-1", + upstreamId: "GHSA-VERIFY-001")); + + var verifyResponse = await client.PostAsJsonAsync("/aoc/verify", new AocVerifyRequest(null, null, null, null, null)); + verifyResponse.EnsureSuccessStatusCode(); + + var verifyPayload = await verifyResponse.Content.ReadFromJsonAsync<AocVerifyResponse>(); + Assert.NotNull(verifyPayload); + Assert.Equal("tenant-verify", verifyPayload!.Tenant); + Assert.True(verifyPayload.Checked.Advisories >= 1); + Assert.Equal(0, verifyPayload.Checked.Vex); + Assert.True(verifyPayload.Metrics.IngestionWriteTotal >= verifyPayload.Checked.Advisories); + Assert.Empty(verifyPayload.Violations); + Assert.False(verifyPayload.Truncated); + } + + [Fact] + public async Task AocVerifyEndpoint_ReturnsViolationsForGuardFailures() + { + await SeedAdvisoryRawDocumentsAsync( + CreateAdvisoryRawDocument( + tenant: "tenant-verify-violations", + vendor: "osv", + upstreamId: "GHSA-VERIFY-ERR", + contentHash: string.Empty, + raw: new BsonDocument + { + { "id", "GHSA-VERIFY-ERR" } + })); + + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-verify-violations"); + + var verifyResponse = await client.PostAsJsonAsync("/aoc/verify", new AocVerifyRequest(null, null, null, null, null)); + verifyResponse.EnsureSuccessStatusCode(); + + var verifyPayload = await verifyResponse.Content.ReadFromJsonAsync<AocVerifyResponse>(); + Assert.NotNull(verifyPayload); + Assert.Equal("tenant-verify-violations", verifyPayload!.Tenant); + Assert.True(verifyPayload.Checked.Advisories >= 1); + var violation = Assert.Single(verifyPayload.Violations); + Assert.Equal("ERR_AOC_001", violation.Code); + Assert.True(violation.Count >= 1); + Assert.NotEmpty(violation.Examples); + } + + [Fact] + public async Task AdvisoryRawListEndpoint_SupportsCursorPagination() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-list"); + + await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:list-1", "GHSA-LIST-001")); + await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:list-2", "GHSA-LIST-002")); + await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:list-3", "GHSA-LIST-003")); + + using var firstRequest = new HttpRequestMessage(HttpMethod.Get, "/advisories/raw?limit=2"); + firstRequest.Headers.Add("X-Stella-Tenant", "tenant-list"); + var firstResponse = await client.SendAsync(firstRequest); + firstResponse.EnsureSuccessStatusCode(); + + var firstPage = await firstResponse.Content.ReadFromJsonAsync<AdvisoryRawListResponse>(); + Assert.NotNull(firstPage); + Assert.Equal(2, firstPage!.Records.Count); + Assert.True(firstPage.HasMore); + Assert.False(string.IsNullOrWhiteSpace(firstPage.NextCursor)); + + using var secondRequest = new HttpRequestMessage(HttpMethod.Get, $"/advisories/raw?cursor={Uri.EscapeDataString(firstPage.NextCursor!)}"); + secondRequest.Headers.Add("X-Stella-Tenant", "tenant-list"); + var secondResponse = await client.SendAsync(secondRequest); + secondResponse.EnsureSuccessStatusCode(); + + var secondPage = await secondResponse.Content.ReadFromJsonAsync<AdvisoryRawListResponse>(); + Assert.NotNull(secondPage); + Assert.Single(secondPage!.Records); + Assert.False(secondPage.HasMore); + Assert.Null(secondPage.NextCursor); + + var firstIds = firstPage.Records.Select(record => record.Id).ToArray(); + var secondIds = secondPage.Records.Select(record => record.Id).ToArray(); + Assert.Empty(firstIds.Intersect(secondIds)); + } + + [Fact] + public async Task AdvisoryIngestEndpoint_EmitsMetricsWithExpectedTags() + { + var measurements = await CaptureMetricsAsync( + IngestionMetrics.MeterName, + "ingestion_write_total", + async () => + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-metrics"); + + await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:metric-1", "GHSA-METRIC-001")); + await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:metric-1", "GHSA-METRIC-001")); + }); + + Assert.Equal(2, measurements.Count); + + var inserted = measurements.FirstOrDefault(measurement => + string.Equals(GetTagValue(measurement, "tenant"), "tenant-metrics", StringComparison.Ordinal) && + string.Equals(GetTagValue(measurement, "result"), "inserted", StringComparison.Ordinal)); + Assert.NotNull(inserted); + Assert.Equal(1, inserted!.Value); + Assert.Equal("osv", GetTagValue(inserted, "source")); + + var duplicate = measurements.FirstOrDefault(measurement => + string.Equals(GetTagValue(measurement, "tenant"), "tenant-metrics", StringComparison.Ordinal) && + string.Equals(GetTagValue(measurement, "result"), "duplicate", StringComparison.Ordinal)); + Assert.NotNull(duplicate); + Assert.Equal(1, duplicate!.Value); + Assert.Equal("osv", GetTagValue(duplicate, "source")); + } + + [Fact] + public async Task AocVerifyEndpoint_EmitsVerificationMetric() + { + var measurements = await CaptureMetricsAsync( + IngestionMetrics.MeterName, + "verify_runs_total", + async () => + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-verify-metrics"); + + await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:verify-metric", "GHSA-VERIFY-METRIC")); + var verifyResponse = await client.PostAsJsonAsync("/aoc/verify", new AocVerifyRequest(null, null, null, null, null)); + verifyResponse.EnsureSuccessStatusCode(); + }); + + var measurement = Assert.Single(measurements); + Assert.Equal("tenant-verify-metrics", GetTagValue(measurement, "tenant")); + Assert.Equal("ok", GetTagValue(measurement, "result")); + Assert.Equal(1, measurement.Value); + } + + [Fact] + public async Task AdvisoryIngestEndpoint_RejectsCrossTenantWhenAuthenticated() + { + var environment = new Dictionary<string, string?> + { + ["CONCELIER_AUTHORITY__ENABLED"] = "true", + ["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false", + ["CONCELIER_AUTHORITY__ISSUER"] = TestAuthorityIssuer, + ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", + ["CONCELIER_AUTHORITY__AUDIENCES__0"] = TestAuthorityAudience, + ["CONCELIER_AUTHORITY__CLIENTID"] = "webservice-tests", + ["CONCELIER_AUTHORITY__CLIENTSECRET"] = "unused", + }; + + using var factory = new ConcelierApplicationFactory( + _runner.ConnectionString, + authority => + { + authority.Enabled = true; + authority.AllowAnonymousFallback = false; + authority.Issuer = TestAuthorityIssuer; + authority.RequireHttpsMetadata = false; + authority.Audiences.Clear(); + authority.Audiences.Add(TestAuthorityAudience); + authority.ClientId = "webservice-tests"; + authority.ClientSecret = "unused"; + }, + environment); + + using var client = factory.CreateClient(); + var token = CreateTestToken("tenant-auth", StellaOpsScopes.AdvisoryIngest); + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", token); + client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-auth"); + + var ingestResponse = await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:auth-1", "GHSA-AUTH-001")); + Assert.Equal(HttpStatusCode.Created, ingestResponse.StatusCode); + + client.DefaultRequestHeaders.Remove("X-Stella-Tenant"); + client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-other"); + + var crossTenantResponse = await client.PostAsJsonAsync("/ingest/advisory", BuildAdvisoryIngestRequest("sha256:auth-2", "GHSA-AUTH-002")); + Assert.Equal(HttpStatusCode.Forbidden, crossTenantResponse.StatusCode); + } + + [Fact] + public async Task AdvisoryIngestEndpoint_ReturnsGuardViolationWhenContentHashMissing() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Stella-Tenant", "tenant-violation"); + + var invalidRequest = BuildAdvisoryIngestRequest(contentHash: string.Empty, upstreamId: "GHSA-INVALID-1"); + var response = await client.PostAsJsonAsync("/ingest/advisory", invalidRequest); + + Assert.Equal(HttpStatusCode.UnprocessableEntity, response.StatusCode); + var problemJson = await response.Content.ReadAsStringAsync(); + using var document = JsonDocument.Parse(problemJson); + var root = document.RootElement; + Assert.Equal("Aggregation-Only Contract violation", root.GetProperty("title").GetString()); + Assert.Equal(422, root.GetProperty("status").GetInt32()); + Assert.True(root.TryGetProperty("violations", out var violations), "Problem response missing violations payload."); + Assert.True(root.TryGetProperty("code", out var codeElement), "Problem response missing code payload."); + Assert.Equal("ERR_AOC_004", codeElement.GetString()); + var violation = Assert.Single(violations.EnumerateArray()); + Assert.Equal("ERR_AOC_004", violation.GetProperty("code").GetString()); + } + + [Fact] + public async Task JobsEndpointsReturnExpectedStatuses() + { + using var client = _factory.CreateClient(); + + var definitions = await client.GetAsync("/jobs/definitions"); + if (!definitions.IsSuccessStatusCode) + { + var body = await definitions.Content.ReadAsStringAsync(); + throw new Xunit.Sdk.XunitException($"/jobs/definitions failed: {(int)definitions.StatusCode} {body}"); + } + + var trigger = await client.PostAsync("/jobs/unknown", new StringContent("{}", System.Text.Encoding.UTF8, "application/json")); + if (trigger.StatusCode != HttpStatusCode.NotFound) + { + var payload = await trigger.Content.ReadAsStringAsync(); + throw new Xunit.Sdk.XunitException($"/jobs/unknown expected 404, got {(int)trigger.StatusCode}: {payload}"); + } + var problem = await trigger.Content.ReadFromJsonAsync<ProblemDocument>(); + Assert.NotNull(problem); + Assert.Equal("https://stellaops.org/problems/not-found", problem!.Type); + Assert.Equal(404, problem.Status); + } + + [Fact] + public async Task JobRunEndpointReturnsProblemWhenNotFound() + { + using var client = _factory.CreateClient(); + var response = await client.GetAsync($"/jobs/{Guid.NewGuid()}"); + if (response.StatusCode != HttpStatusCode.NotFound) + { + var body = await response.Content.ReadAsStringAsync(); + throw new Xunit.Sdk.XunitException($"/jobs/{{id}} expected 404, got {(int)response.StatusCode}: {body}"); + } + var problem = await response.Content.ReadFromJsonAsync<ProblemDocument>(); + Assert.NotNull(problem); + Assert.Equal("https://stellaops.org/problems/not-found", problem!.Type); + } + + [Fact] + public async Task JobTriggerMapsCoordinatorOutcomes() + { + var handler = _factory.Services.GetRequiredService<StubJobCoordinator>(); + using var client = _factory.CreateClient(); + + handler.NextResult = JobTriggerResult.AlreadyRunning("busy"); + var conflict = await client.PostAsync("/jobs/test", JsonContent.Create(new JobTriggerRequest())); + if (conflict.StatusCode != HttpStatusCode.Conflict) + { + var payload = await conflict.Content.ReadAsStringAsync(); + throw new Xunit.Sdk.XunitException($"Conflict path expected 409, got {(int)conflict.StatusCode}: {payload}"); + } + var conflictProblem = await conflict.Content.ReadFromJsonAsync<ProblemDocument>(); + Assert.NotNull(conflictProblem); + Assert.Equal("https://stellaops.org/problems/conflict", conflictProblem!.Type); + + handler.NextResult = JobTriggerResult.Accepted(new JobRunSnapshot(Guid.NewGuid(), "demo", JobRunStatus.Pending, DateTimeOffset.UtcNow, null, null, "api", null, null, null, null, new Dictionary<string, object?>())); + var accepted = await client.PostAsync("/jobs/test", JsonContent.Create(new JobTriggerRequest())); + if (accepted.StatusCode != HttpStatusCode.Accepted) + { + var payload = await accepted.Content.ReadAsStringAsync(); + throw new Xunit.Sdk.XunitException($"Accepted path expected 202, got {(int)accepted.StatusCode}: {payload}"); + } + Assert.NotNull(accepted.Headers.Location); + var acceptedPayload = await accepted.Content.ReadFromJsonAsync<JobRunPayload>(); + Assert.NotNull(acceptedPayload); + + handler.NextResult = JobTriggerResult.Failed(new JobRunSnapshot(Guid.NewGuid(), "demo", JobRunStatus.Failed, DateTimeOffset.UtcNow, null, DateTimeOffset.UtcNow, "api", null, "err", null, null, new Dictionary<string, object?>()), "boom"); + var failed = await client.PostAsync("/jobs/test", JsonContent.Create(new JobTriggerRequest())); + if (failed.StatusCode != HttpStatusCode.InternalServerError) + { + var payload = await failed.Content.ReadAsStringAsync(); + throw new Xunit.Sdk.XunitException($"Failed path expected 500, got {(int)failed.StatusCode}: {payload}"); + } + var failureProblem = await failed.Content.ReadFromJsonAsync<ProblemDocument>(); + Assert.NotNull(failureProblem); + Assert.Equal("https://stellaops.org/problems/job-failure", failureProblem!.Type); + } + + [Fact] + public async Task JobsEndpointsExposeJobData() + { + var handler = _factory.Services.GetRequiredService<StubJobCoordinator>(); + var now = DateTimeOffset.UtcNow; + var run = new JobRunSnapshot( + Guid.NewGuid(), + "demo", + JobRunStatus.Succeeded, + now, + now, + now.AddSeconds(2), + "api", + "hash", + null, + TimeSpan.FromMinutes(5), + TimeSpan.FromMinutes(1), + new Dictionary<string, object?> { ["key"] = "value" }); + + handler.Definitions = new[] + { + new JobDefinition("demo", typeof(DemoJob), TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(1), "*/5 * * * *", true) + }; + handler.LastRuns["demo"] = run; + handler.RecentRuns = new[] { run }; + handler.ActiveRuns = Array.Empty<JobRunSnapshot>(); + handler.Runs[run.RunId] = run; + + try + { + using var client = _factory.CreateClient(); + + var definitions = await client.GetFromJsonAsync<List<JobDefinitionPayload>>("/jobs/definitions"); + Assert.NotNull(definitions); + Assert.Single(definitions!); + Assert.Equal("demo", definitions![0].Kind); + Assert.NotNull(definitions[0].LastRun); + Assert.Equal(run.RunId, definitions[0].LastRun!.RunId); + + var runPayload = await client.GetFromJsonAsync<JobRunPayload>($"/jobs/{run.RunId}"); + Assert.NotNull(runPayload); + Assert.Equal(run.RunId, runPayload!.RunId); + Assert.Equal("Succeeded", runPayload.Status); + + var runs = await client.GetFromJsonAsync<List<JobRunPayload>>("/jobs?kind=demo&limit=5"); + Assert.NotNull(runs); + Assert.Single(runs!); + Assert.Equal(run.RunId, runs![0].RunId); + + var runsByDefinition = await client.GetFromJsonAsync<List<JobRunPayload>>("/jobs/definitions/demo/runs"); + Assert.NotNull(runsByDefinition); + Assert.Single(runsByDefinition!); + + var active = await client.GetFromJsonAsync<List<JobRunPayload>>("/jobs/active"); + Assert.NotNull(active); + Assert.Empty(active!); + } + finally + { + handler.Definitions = Array.Empty<JobDefinition>(); + handler.RecentRuns = Array.Empty<JobRunSnapshot>(); + handler.ActiveRuns = Array.Empty<JobRunSnapshot>(); + handler.Runs.Clear(); + handler.LastRuns.Clear(); + } + } + + [Fact] + public async Task AdvisoryReplayEndpointReturnsLatestStatement() + { + var vulnerabilityKey = "CVE-2025-9000"; + var advisory = new Advisory( + advisoryKey: vulnerabilityKey, + title: "Replay Test", + summary: "Example summary", + language: "en", + published: DateTimeOffset.Parse("2025-01-01T00:00:00Z", CultureInfo.InvariantCulture), + modified: DateTimeOffset.Parse("2025-01-02T00:00:00Z", CultureInfo.InvariantCulture), + severity: "medium", + exploitKnown: false, + aliases: new[] { vulnerabilityKey }, + references: Array.Empty<AdvisoryReference>(), + affectedPackages: Array.Empty<AffectedPackage>(), + cvssMetrics: Array.Empty<CvssMetric>(), + provenance: Array.Empty<AdvisoryProvenance>()); + + var statementId = Guid.NewGuid(); + using (var scope = _factory.Services.CreateScope()) + { + var eventLog = scope.ServiceProvider.GetRequiredService<IAdvisoryEventLog>(); + var appendRequest = new AdvisoryEventAppendRequest(new[] + { + new AdvisoryStatementInput( + vulnerabilityKey, + advisory, + advisory.Modified ?? advisory.Published ?? DateTimeOffset.UtcNow, + Array.Empty<Guid>(), + StatementId: statementId, + AdvisoryKey: advisory.AdvisoryKey) + }); + + await eventLog.AppendAsync(appendRequest, CancellationToken.None); + } + + using var client = _factory.CreateClient(); + var response = await client.GetAsync($"/concelier/advisories/{vulnerabilityKey}/replay"); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var payload = await response.Content.ReadFromJsonAsync<ReplayResponse>(); + Assert.NotNull(payload); + Assert.Equal(vulnerabilityKey, payload!.VulnerabilityKey, ignoreCase: true); + var statement = Assert.Single(payload.Statements); + Assert.Equal(statementId, statement.StatementId); + Assert.Equal(advisory.AdvisoryKey, statement.Advisory.AdvisoryKey); + Assert.False(string.IsNullOrWhiteSpace(statement.StatementHash)); + Assert.True(payload.Conflicts is null || payload.Conflicts!.Count == 0); + } + + [Fact] + public async Task AdvisoryReplayEndpointReturnsConflictExplainer() + { + var vulnerabilityKey = "CVE-2025-9100"; + var statementId = Guid.NewGuid(); + var conflictId = Guid.NewGuid(); + var recordedAt = DateTimeOffset.Parse("2025-02-01T00:00:00Z", CultureInfo.InvariantCulture); + + using (var scope = _factory.Services.CreateScope()) + { + var eventLog = scope.ServiceProvider.GetRequiredService<IAdvisoryEventLog>(); + var advisory = new Advisory( + advisoryKey: vulnerabilityKey, + title: "Base advisory", + summary: "Baseline summary", + language: "en", + published: recordedAt.AddDays(-1), + modified: recordedAt, + severity: "critical", + exploitKnown: false, + aliases: new[] { vulnerabilityKey }, + references: Array.Empty<AdvisoryReference>(), + affectedPackages: Array.Empty<AffectedPackage>(), + cvssMetrics: Array.Empty<CvssMetric>(), + provenance: Array.Empty<AdvisoryProvenance>()); + + var statementInput = new AdvisoryStatementInput( + vulnerabilityKey, + advisory, + recordedAt, + Array.Empty<Guid>(), + StatementId: statementId, + AdvisoryKey: advisory.AdvisoryKey); + + await eventLog.AppendAsync(new AdvisoryEventAppendRequest(new[] { statementInput }), CancellationToken.None); + + var explainer = new MergeConflictExplainerPayload( + Type: "severity", + Reason: "mismatch", + PrimarySources: new[] { "vendor" }, + PrimaryRank: 1, + SuppressedSources: new[] { "nvd" }, + SuppressedRank: 5, + PrimaryValue: "CRITICAL", + SuppressedValue: "MEDIUM"); + + using var conflictDoc = JsonDocument.Parse(explainer.ToCanonicalJson()); + var conflictInput = new AdvisoryConflictInput( + vulnerabilityKey, + conflictDoc, + recordedAt, + new[] { statementId }, + ConflictId: conflictId); + + await eventLog.AppendAsync(new AdvisoryEventAppendRequest(Array.Empty<AdvisoryStatementInput>(), new[] { conflictInput }), CancellationToken.None); + } + + using var client = _factory.CreateClient(); + var response = await client.GetAsync($"/concelier/advisories/{vulnerabilityKey}/replay"); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var payload = await response.Content.ReadFromJsonAsync<ReplayResponse>(); + Assert.NotNull(payload); + var conflict = Assert.Single(payload!.Conflicts); + Assert.Equal(conflictId, conflict.ConflictId); + Assert.Equal("severity", conflict.Explainer.Type); + Assert.Equal("mismatch", conflict.Explainer.Reason); + Assert.Equal("CRITICAL", conflict.Explainer.PrimaryValue); + Assert.Equal("MEDIUM", conflict.Explainer.SuppressedValue); + Assert.Equal(conflict.Explainer.ComputeHashHex(), conflict.ConflictHash); + } + + [Fact] + public async Task MirrorEndpointsServeConfiguredArtifacts() + { + using var temp = new TempDirectory(); + var exportId = "20251019T120000Z"; + var exportRoot = Path.Combine(temp.Path, exportId); + var mirrorRoot = Path.Combine(exportRoot, "mirror"); + var domainRoot = Path.Combine(mirrorRoot, "primary"); + Directory.CreateDirectory(domainRoot); + + await File.WriteAllTextAsync( + Path.Combine(mirrorRoot, "index.json"), + """{"schemaVersion":1,"domains":[]}"""); + await File.WriteAllTextAsync( + Path.Combine(domainRoot, "manifest.json"), + """{"domainId":"primary"}"""); + await File.WriteAllTextAsync( + Path.Combine(domainRoot, "bundle.json"), + """{"advisories":[]}"""); + await File.WriteAllTextAsync( + Path.Combine(domainRoot, "bundle.json.jws"), + "test-signature"); + + var environment = new Dictionary<string, string?> + { + ["CONCELIER_MIRROR__ENABLED"] = "true", + ["CONCELIER_MIRROR__EXPORTROOT"] = temp.Path, + ["CONCELIER_MIRROR__ACTIVEEXPORTID"] = exportId, + ["CONCELIER_MIRROR__DOMAINS__0__ID"] = "primary", + ["CONCELIER_MIRROR__DOMAINS__0__DISPLAYNAME"] = "Primary", + ["CONCELIER_MIRROR__DOMAINS__0__REQUIREAUTHENTICATION"] = "false", + ["CONCELIER_MIRROR__DOMAINS__0__MAXDOWNLOADREQUESTSPERHOUR"] = "5", + ["CONCELIER_MIRROR__MAXINDEXREQUESTSPERHOUR"] = "5" + }; + + using var factory = new ConcelierApplicationFactory(_runner.ConnectionString, environmentOverrides: environment); + using var client = factory.CreateClient(); + + var indexResponse = await client.GetAsync("/concelier/exports/index.json"); + Assert.Equal(HttpStatusCode.OK, indexResponse.StatusCode); + var indexContent = await indexResponse.Content.ReadAsStringAsync(); + Assert.Contains(@"""schemaVersion"":1", indexContent, StringComparison.Ordinal); + + var manifestResponse = await client.GetAsync("/concelier/exports/mirror/primary/manifest.json"); + Assert.Equal(HttpStatusCode.OK, manifestResponse.StatusCode); + var manifestContent = await manifestResponse.Content.ReadAsStringAsync(); + Assert.Contains(@"""domainId"":""primary""", manifestContent, StringComparison.Ordinal); + + var bundleResponse = await client.GetAsync("/concelier/exports/mirror/primary/bundle.json.jws"); + Assert.Equal(HttpStatusCode.OK, bundleResponse.StatusCode); + var signatureContent = await bundleResponse.Content.ReadAsStringAsync(); + Assert.Equal("test-signature", signatureContent); + } + + [Fact] + public async Task MirrorEndpointsEnforceAuthenticationForProtectedDomains() + { + using var temp = new TempDirectory(); + var exportId = "20251019T120000Z"; + var secureRoot = Path.Combine(temp.Path, exportId, "mirror", "secure"); + Directory.CreateDirectory(secureRoot); + + await File.WriteAllTextAsync( + Path.Combine(temp.Path, exportId, "mirror", "index.json"), + """{"schemaVersion":1,"domains":[]}"""); + await File.WriteAllTextAsync( + Path.Combine(secureRoot, "manifest.json"), + """{"domainId":"secure"}"""); + + var environment = new Dictionary<string, string?> + { + ["CONCELIER_MIRROR__ENABLED"] = "true", + ["CONCELIER_MIRROR__EXPORTROOT"] = temp.Path, + ["CONCELIER_MIRROR__ACTIVEEXPORTID"] = exportId, + ["CONCELIER_MIRROR__DOMAINS__0__ID"] = "secure", + ["CONCELIER_MIRROR__DOMAINS__0__REQUIREAUTHENTICATION"] = "true", + ["CONCELIER_MIRROR__DOMAINS__0__MAXDOWNLOADREQUESTSPERHOUR"] = "5", + ["CONCELIER_AUTHORITY__ENABLED"] = "true", + ["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false", + ["CONCELIER_AUTHORITY__ISSUER"] = "https://authority.example", + ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", + ["CONCELIER_AUTHORITY__AUDIENCES__0"] = "api://concelier", + ["CONCELIER_AUTHORITY__REQUIREDSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, + ["CONCELIER_AUTHORITY__CLIENTID"] = "concelier-jobs", + ["CONCELIER_AUTHORITY__CLIENTSECRET"] = "secret", + ["CONCELIER_AUTHORITY__CLIENTSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger + }; + + using var factory = new ConcelierApplicationFactory( + _runner.ConnectionString, + authority => + { + authority.Enabled = true; + authority.AllowAnonymousFallback = false; + authority.Issuer = "https://authority.example"; + authority.RequireHttpsMetadata = false; + authority.Audiences.Clear(); + authority.Audiences.Add("api://concelier"); + authority.RequiredScopes.Clear(); + authority.RequiredScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); + authority.ClientId = "concelier-jobs"; + authority.ClientSecret = "secret"; + }, + environment); + + using var client = factory.CreateClient(); + var response = await client.GetAsync("/concelier/exports/mirror/secure/manifest.json"); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + var authHeader = Assert.Single(response.Headers.WwwAuthenticate); + Assert.Equal("Bearer", authHeader.Scheme); + } + + [Fact] + public async Task MirrorEndpointsRespectRateLimits() + { + using var temp = new TempDirectory(); + var exportId = "20251019T130000Z"; + var exportRoot = Path.Combine(temp.Path, exportId); + var mirrorRoot = Path.Combine(exportRoot, "mirror"); + Directory.CreateDirectory(mirrorRoot); + + await File.WriteAllTextAsync( + Path.Combine(mirrorRoot, "index.json"), + """{\"schemaVersion\":1,\"domains\":[]}""" + ); + + var environment = new Dictionary<string, string?> + { + ["CONCELIER_MIRROR__ENABLED"] = "true", + ["CONCELIER_MIRROR__EXPORTROOT"] = temp.Path, + ["CONCELIER_MIRROR__ACTIVEEXPORTID"] = exportId, + ["CONCELIER_MIRROR__MAXINDEXREQUESTSPERHOUR"] = "1", + ["CONCELIER_MIRROR__DOMAINS__0__ID"] = "primary", + ["CONCELIER_MIRROR__DOMAINS__0__REQUIREAUTHENTICATION"] = "false", + ["CONCELIER_MIRROR__DOMAINS__0__MAXDOWNLOADREQUESTSPERHOUR"] = "1" + }; + + using var factory = new ConcelierApplicationFactory(_runner.ConnectionString, environmentOverrides: environment); + using var client = factory.CreateClient(); + + var okResponse = await client.GetAsync("/concelier/exports/index.json"); + Assert.Equal(HttpStatusCode.OK, okResponse.StatusCode); + + var limitedResponse = await client.GetAsync("/concelier/exports/index.json"); + Assert.Equal((HttpStatusCode)429, limitedResponse.StatusCode); + Assert.NotNull(limitedResponse.Headers.RetryAfter); + Assert.True(limitedResponse.Headers.RetryAfter!.Delta.HasValue); + Assert.True(limitedResponse.Headers.RetryAfter!.Delta!.Value.TotalSeconds > 0); + } + + + [Fact] + public async Task JobsEndpointsAllowBypassWhenAuthorityEnabled() + { + var environment = new Dictionary<string, string?> + { + ["CONCELIER_AUTHORITY__ENABLED"] = "true", + ["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false", + ["CONCELIER_AUTHORITY__ISSUER"] = "https://authority.example", + ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", + ["CONCELIER_AUTHORITY__AUDIENCES__0"] = "api://concelier", + ["CONCELIER_AUTHORITY__REQUIREDSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, + ["CONCELIER_AUTHORITY__BYPASSNETWORKS__0"] = "127.0.0.1/32", + ["CONCELIER_AUTHORITY__BYPASSNETWORKS__1"] = "::1/128", + ["CONCELIER_AUTHORITY__CLIENTID"] = "concelier-jobs", + ["CONCELIER_AUTHORITY__CLIENTSECRET"] = "test-secret", + ["CONCELIER_AUTHORITY__CLIENTSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, + }; + + using var factory = new ConcelierApplicationFactory( + _runner.ConnectionString, + authority => + { + authority.Enabled = true; + authority.AllowAnonymousFallback = false; + authority.Issuer = "https://authority.example"; + authority.RequireHttpsMetadata = false; + authority.Audiences.Clear(); + authority.Audiences.Add("api://concelier"); + authority.RequiredScopes.Clear(); + authority.RequiredScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); + authority.BypassNetworks.Clear(); + authority.BypassNetworks.Add("127.0.0.1/32"); + authority.BypassNetworks.Add("::1/128"); + authority.ClientId = "concelier-jobs"; + authority.ClientSecret = "test-secret"; + }, + environment); + + var handler = factory.Services.GetRequiredService<StubJobCoordinator>(); + handler.Definitions = new[] { new JobDefinition("demo", typeof(DemoJob), TimeSpan.FromMinutes(5), TimeSpan.FromMinutes(1), null, true) }; + + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Test-RemoteAddr", "127.0.0.1"); + var response = await client.GetAsync("/jobs/definitions"); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var auditLogs = factory.LoggerProvider.Snapshot("Concelier.Authorization.Audit"); + var bypassLog = Assert.Single(auditLogs, entry => entry.TryGetState("Bypass", out var state) && state is bool flag && flag); + Assert.True(bypassLog.TryGetState("RemoteAddress", out var remoteObj) && string.Equals(remoteObj?.ToString(), "127.0.0.1", StringComparison.Ordinal)); + Assert.True(bypassLog.TryGetState("StatusCode", out var statusObj) && Convert.ToInt32(statusObj) == (int)HttpStatusCode.OK); + } + + [Fact] + public async Task JobsEndpointsRequireAuthWhenFallbackDisabled() + { + var enforcementEnvironment = new Dictionary<string, string?> + { + ["CONCELIER_AUTHORITY__ENABLED"] = "true", + ["CONCELIER_AUTHORITY__ALLOWANONYMOUSFALLBACK"] = "false", + ["CONCELIER_AUTHORITY__ISSUER"] = "https://authority.example", + ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", + ["CONCELIER_AUTHORITY__AUDIENCES__0"] = "api://concelier", + ["CONCELIER_AUTHORITY__REQUIREDSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, + ["CONCELIER_AUTHORITY__CLIENTID"] = "concelier-jobs", + ["CONCELIER_AUTHORITY__CLIENTSECRET"] = "test-secret", + ["CONCELIER_AUTHORITY__CLIENTSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, + }; + + using var factory = new ConcelierApplicationFactory( + _runner.ConnectionString, + authority => + { + authority.Enabled = true; + authority.AllowAnonymousFallback = false; + authority.Issuer = "https://authority.example"; + authority.RequireHttpsMetadata = false; + authority.Audiences.Clear(); + authority.Audiences.Add("api://concelier"); + authority.RequiredScopes.Clear(); + authority.RequiredScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); + authority.BypassNetworks.Clear(); + authority.ClientId = "concelier-jobs"; + authority.ClientSecret = "test-secret"; + }, + enforcementEnvironment); + + var resolved = factory.Services.GetRequiredService<IOptions<ConcelierOptions>>().Value; + Assert.False(resolved.Authority.AllowAnonymousFallback); + + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Test-RemoteAddr", "127.0.0.1"); + var response = await client.GetAsync("/jobs/definitions"); + + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + + var auditLogs = factory.LoggerProvider.Snapshot("Concelier.Authorization.Audit"); + var enforcementLog = Assert.Single(auditLogs); + Assert.True(enforcementLog.TryGetState("BypassAllowed", out var bypassAllowedObj) && bypassAllowedObj is bool bypassAllowed && bypassAllowed == false); + Assert.True(enforcementLog.TryGetState("HasPrincipal", out var principalObj) && principalObj is bool hasPrincipal && hasPrincipal == false); + } + + [Fact] + public void AuthorityClientResilienceOptionsAreBound() + { + var environment = new Dictionary<string, string?> + { + ["CONCELIER_AUTHORITY__ENABLED"] = "true", + ["CONCELIER_AUTHORITY__ISSUER"] = "https://authority.example", + ["CONCELIER_AUTHORITY__REQUIREHTTPSMETADATA"] = "false", + ["CONCELIER_AUTHORITY__AUDIENCES__0"] = "api://concelier", + ["CONCELIER_AUTHORITY__REQUIREDSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, + ["CONCELIER_AUTHORITY__CLIENTSCOPES__0"] = StellaOpsScopes.ConcelierJobsTrigger, + ["CONCELIER_AUTHORITY__BACKCHANNELTIMEOUTSECONDS"] = "45", + ["CONCELIER_AUTHORITY__RESILIENCE__ENABLERETRIES"] = "true", + ["CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__0"] = "00:00:02", + ["CONCELIER_AUTHORITY__RESILIENCE__RETRYDELAYS__1"] = "00:00:04", + ["CONCELIER_AUTHORITY__RESILIENCE__ALLOWOFFLINECACHEFALLBACK"] = "false", + ["CONCELIER_AUTHORITY__RESILIENCE__OFFLINECACHETOLERANCE"] = "00:02:30" + }; + + using var factory = new ConcelierApplicationFactory( + _runner.ConnectionString, + authority => + { + authority.Enabled = true; + authority.Issuer = "https://authority.example"; + authority.RequireHttpsMetadata = false; + authority.Audiences.Clear(); + authority.Audiences.Add("api://concelier"); + authority.RequiredScopes.Clear(); + authority.RequiredScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); + authority.ClientScopes.Clear(); + authority.ClientScopes.Add(StellaOpsScopes.ConcelierJobsTrigger); + authority.BackchannelTimeoutSeconds = 45; + }, + environment); + + var monitor = factory.Services.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>(); + var options = monitor.CurrentValue; + + Assert.Equal("https://authority.example", options.Authority); + Assert.Equal(TimeSpan.FromSeconds(45), options.HttpTimeout); + Assert.Equal(new[] { StellaOpsScopes.ConcelierJobsTrigger }, options.NormalizedScopes); + Assert.Equal(new[] { TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(4) }, options.NormalizedRetryDelays); + Assert.False(options.AllowOfflineCacheFallback); + Assert.Equal(TimeSpan.FromSeconds(150), options.OfflineCacheTolerance); + } + + private async Task SeedObservationDocumentsAsync(IEnumerable<AdvisoryObservationDocument> documents) + { + var client = new MongoClient(_runner.ConnectionString); + var database = client.GetDatabase(MongoStorageDefaults.DefaultDatabaseName); + var collection = database.GetCollection<AdvisoryObservationDocument>(MongoStorageDefaults.Collections.AdvisoryObservations); + + try + { + await database.DropCollectionAsync(MongoStorageDefaults.Collections.AdvisoryObservations); + } + catch (MongoCommandException ex) when (ex.CodeName == "NamespaceNotFound" || ex.Message.Contains("ns not found", StringComparison.OrdinalIgnoreCase)) + { + // Collection does not exist yet; ignore. + } + + var snapshot = documents?.ToArray() ?? Array.Empty<AdvisoryObservationDocument>(); + if (snapshot.Length == 0) + { + return; + } + + await collection.InsertManyAsync(snapshot); + } + + private static AdvisoryObservationDocument[] BuildSampleObservationDocuments() + { + return new[] + { + CreateObservationDocument( + id: "tenant-a:nvd:alpha:1", + tenant: "tenant-a", + createdAt: new DateTime(2025, 1, 5, 0, 0, 0, DateTimeKind.Utc), + aliases: new[] { "cve-2025-0001" }, + purls: new[] { "pkg:npm/demo@1.0.0" }, + cpes: new[] { "cpe:/a:vendor:product:1.0" }, + references: new[] { ("advisory", "https://example.test/advisory-1") }), + CreateObservationDocument( + id: "tenant-a:ghsa:beta:1", + tenant: "tenant-a", + createdAt: new DateTime(2025, 1, 6, 0, 0, 0, DateTimeKind.Utc), + aliases: new[] { "ghsa-2025-xyz", "cve-2025-0001" }, + purls: new[] { "pkg:npm/demo@1.1.0" }, + cpes: new[] { "cpe:/a:vendor:product:1.1" }, + references: new[] { ("patch", "https://example.test/patch-1") }), + CreateObservationDocument( + id: "tenant-b:nvd:alpha:1", + tenant: "tenant-b", + createdAt: new DateTime(2025, 1, 7, 0, 0, 0, DateTimeKind.Utc), + aliases: new[] { "cve-2025-0001" }, + purls: new[] { "pkg:npm/demo@2.0.0" }, + cpes: new[] { "cpe:/a:vendor:product:2.0" }, + references: new[] { ("advisory", "https://example.test/advisory-2") }) + }; + } + + private static AdvisoryObservationDocument CreateObservationDocument( + string id, + string tenant, + DateTime createdAt, + IEnumerable<string>? aliases = null, + IEnumerable<string>? purls = null, + IEnumerable<string>? cpes = null, + IEnumerable<(string Type, string Url)>? references = null) + { + return new AdvisoryObservationDocument + { + Id = id, + Tenant = tenant.ToLowerInvariant(), + CreatedAt = createdAt, + Source = new AdvisoryObservationSourceDocument + { + Vendor = "nvd", + Stream = "feed", + Api = "https://example.test/api" + }, + Upstream = new AdvisoryObservationUpstreamDocument + { + UpstreamId = id, + DocumentVersion = null, + FetchedAt = createdAt, + ReceivedAt = createdAt, + ContentHash = $"sha256:{id}", + Signature = new AdvisoryObservationSignatureDocument + { + Present = false + }, + Metadata = new Dictionary<string, string>(StringComparer.Ordinal) + }, + Content = new AdvisoryObservationContentDocument + { + Format = "csaf", + SpecVersion = "2.0", + Raw = BsonDocument.Parse("""{"observation":"%ID%"}""".Replace("%ID%", id)), + Metadata = new Dictionary<string, string>(StringComparer.Ordinal) + }, + Linkset = new AdvisoryObservationLinksetDocument + { + Aliases = aliases?.Select(value => value.Trim().ToLowerInvariant()).ToList(), + Purls = purls?.Select(value => value.Trim()).ToList(), + Cpes = cpes?.Select(value => value.Trim()).ToList(), + References = references is null + ? new List<AdvisoryObservationReferenceDocument>() + : references + .Select(reference => new AdvisoryObservationReferenceDocument + { + Type = reference.Type.Trim().ToLowerInvariant(), + Url = reference.Url.Trim() + }) + .ToList() + }, + Attributes = new Dictionary<string, string>(StringComparer.Ordinal) + }; + } + + private sealed record ReplayResponse( + string VulnerabilityKey, + DateTimeOffset? AsOf, + List<ReplayStatement> Statements, + List<ReplayConflict>? Conflicts); + + private sealed record ReplayStatement( + Guid StatementId, + string VulnerabilityKey, + string AdvisoryKey, + Advisory Advisory, + string StatementHash, + DateTimeOffset AsOf, + DateTimeOffset RecordedAt, + IReadOnlyList<Guid> InputDocumentIds); + + private sealed record ReplayConflict( + Guid ConflictId, + string VulnerabilityKey, + IReadOnlyList<Guid> StatementIds, + string ConflictHash, + DateTimeOffset AsOf, + DateTimeOffset RecordedAt, + string Details, + MergeConflictExplainerPayload Explainer); + + private sealed class ConcelierApplicationFactory : WebApplicationFactory<Program> + { + private readonly string _connectionString; + private readonly string? _previousDsn; + private readonly string? _previousDriver; + private readonly string? _previousTimeout; + private readonly string? _previousTelemetryEnabled; + private readonly string? _previousTelemetryLogging; + private readonly string? _previousTelemetryTracing; + private readonly string? _previousTelemetryMetrics; + private readonly Action<ConcelierOptions.AuthorityOptions>? _authorityConfigure; + private readonly IDictionary<string, string?> _additionalPreviousEnvironment = new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase); + public CollectingLoggerProvider LoggerProvider { get; } = new(); + + public ConcelierApplicationFactory( + string connectionString, + Action<ConcelierOptions.AuthorityOptions>? authorityConfigure = null, + IDictionary<string, string?>? environmentOverrides = null) + { + _connectionString = connectionString; + _authorityConfigure = authorityConfigure; + _previousDsn = Environment.GetEnvironmentVariable("CONCELIER_STORAGE__DSN"); + _previousDriver = Environment.GetEnvironmentVariable("CONCELIER_STORAGE__DRIVER"); + _previousTimeout = Environment.GetEnvironmentVariable("CONCELIER_STORAGE__COMMANDTIMEOUTSECONDS"); + _previousTelemetryEnabled = Environment.GetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLED"); + _previousTelemetryLogging = Environment.GetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLELOGGING"); + _previousTelemetryTracing = Environment.GetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLETRACING"); + _previousTelemetryMetrics = Environment.GetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLEMETRICS"); + Environment.SetEnvironmentVariable("CONCELIER_STORAGE__DSN", connectionString); + Environment.SetEnvironmentVariable("CONCELIER_STORAGE__DRIVER", "mongo"); + Environment.SetEnvironmentVariable("CONCELIER_STORAGE__COMMANDTIMEOUTSECONDS", "30"); + Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLED", "false"); + Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLELOGGING", "false"); + Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLETRACING", "false"); + Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLEMETRICS", "false"); + if (environmentOverrides is not null) + { + foreach (var kvp in environmentOverrides) + { + var previous = Environment.GetEnvironmentVariable(kvp.Key); + _additionalPreviousEnvironment[kvp.Key] = previous; + Environment.SetEnvironmentVariable(kvp.Key, kvp.Value); + } + } + } + + protected override void ConfigureWebHost(IWebHostBuilder builder) + { + builder.ConfigureAppConfiguration((context, configurationBuilder) => + { + var settings = new Dictionary<string, string?> + { + ["Plugins:Directory"] = Path.Combine(context.HostingEnvironment.ContentRootPath, "StellaOps.Concelier.PluginBinaries"), + }; + + configurationBuilder.AddInMemoryCollection(settings!); + }); + + builder.ConfigureLogging(logging => + { + logging.AddProvider(LoggerProvider); + }); + + builder.ConfigureServices(services => + { + services.AddSingleton<StubJobCoordinator>(); + services.AddSingleton<IJobCoordinator>(sp => sp.GetRequiredService<StubJobCoordinator>()); + services.PostConfigure<ConcelierOptions>(options => + { + options.Storage.Driver = "mongo"; + options.Storage.Dsn = _connectionString; + options.Storage.CommandTimeoutSeconds = 30; + options.Plugins.Directory ??= Path.Combine(AppContext.BaseDirectory, "StellaOps.Concelier.PluginBinaries"); + options.Telemetry.Enabled = false; + options.Telemetry.EnableLogging = false; + options.Telemetry.EnableTracing = false; + options.Telemetry.EnableMetrics = false; + options.Authority ??= new ConcelierOptions.AuthorityOptions(); + _authorityConfigure?.Invoke(options.Authority); + }); + }); + + builder.ConfigureTestServices(services => + { + services.AddSingleton<IStartupFilter, RemoteIpStartupFilter>(); + services.PostConfigure<JwtBearerOptions>(StellaOpsAuthenticationDefaults.AuthenticationScheme, options => + { + options.RequireHttpsMetadata = false; + options.TokenValidationParameters = new TokenValidationParameters + { + ValidateIssuerSigningKey = true, + IssuerSigningKey = TestSigningKey, + ValidateIssuer = false, + ValidateAudience = false, + ValidateLifetime = false, + NameClaimType = ClaimTypes.Name, + RoleClaimType = ClaimTypes.Role, + ClockSkew = TimeSpan.Zero + }; + var issuer = string.IsNullOrWhiteSpace(options.Authority) ? TestAuthorityIssuer : options.Authority; + options.ConfigurationManager = new StaticConfigurationManager<OpenIdConnectConfiguration>(new OpenIdConnectConfiguration + { + Issuer = issuer + }); + }); + }); + } + + protected override void Dispose(bool disposing) + { + base.Dispose(disposing); + Environment.SetEnvironmentVariable("CONCELIER_STORAGE__DSN", _previousDsn); + Environment.SetEnvironmentVariable("CONCELIER_STORAGE__DRIVER", _previousDriver); + Environment.SetEnvironmentVariable("CONCELIER_STORAGE__COMMANDTIMEOUTSECONDS", _previousTimeout); + Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLED", _previousTelemetryEnabled); + Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLELOGGING", _previousTelemetryLogging); + Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLETRACING", _previousTelemetryTracing); + Environment.SetEnvironmentVariable("CONCELIER_TELEMETRY__ENABLEMETRICS", _previousTelemetryMetrics); + foreach (var kvp in _additionalPreviousEnvironment) + { + Environment.SetEnvironmentVariable(kvp.Key, kvp.Value); + } + + LoggerProvider.Dispose(); + } + + private sealed class RemoteIpStartupFilter : IStartupFilter + { + public Action<IApplicationBuilder> Configure(Action<IApplicationBuilder> next) + { + return app => + { + app.Use(async (context, nextMiddleware) => + { + if (context.Request.Headers.TryGetValue("X-Test-RemoteAddr", out var values) + && values.Count > 0 + && IPAddress.TryParse(values[0], out var remote)) + { + context.Connection.RemoteIpAddress = remote; + } + + await nextMiddleware(); + }); + + next(app); + }; + } + } + + public sealed record LogEntry( + string LoggerName, + LogLevel Level, + EventId EventId, + string? Message, + Exception? Exception, + IReadOnlyList<KeyValuePair<string, object?>> State) + { + public bool TryGetState(string name, out object? value) + { + foreach (var kvp in State) + { + if (string.Equals(kvp.Key, name, StringComparison.Ordinal)) + { + value = kvp.Value; + return true; + } + } + + value = null; + return false; + } + } + + public sealed class CollectingLoggerProvider : ILoggerProvider + { + private readonly object syncRoot = new(); + private readonly List<LogEntry> entries = new(); + private bool disposed; + + public ILogger CreateLogger(string categoryName) => new CollectingLogger(categoryName, this); + + public IReadOnlyList<LogEntry> Snapshot(string loggerName) + { + lock (syncRoot) + { + return entries + .Where(entry => string.Equals(entry.LoggerName, loggerName, StringComparison.Ordinal)) + .ToArray(); + } + } + + public void Dispose() + { + disposed = true; + lock (syncRoot) + { + entries.Clear(); + } + } + + private void Append(LogEntry entry) + { + if (disposed) + { + return; + } + + lock (syncRoot) + { + entries.Add(entry); + } + } + + private sealed class CollectingLogger : ILogger + { + private readonly string categoryName; + private readonly CollectingLoggerProvider provider; + + public CollectingLogger(string categoryName, CollectingLoggerProvider provider) + { + this.categoryName = categoryName; + this.provider = provider; + } + + public IDisposable? BeginScope<TState>(TState state) where TState : notnull => NullScope.Instance; + + public bool IsEnabled(LogLevel logLevel) => true; + + public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter) + { + if (formatter is null) + { + throw new ArgumentNullException(nameof(formatter)); + } + + var message = formatter(state, exception); + var kvps = ExtractState(state); + var entry = new LogEntry(categoryName, logLevel, eventId, message, exception, kvps); + provider.Append(entry); + } + + private static IReadOnlyList<KeyValuePair<string, object?>> ExtractState<TState>(TState state) + { + if (state is IReadOnlyList<KeyValuePair<string, object?>> list) + { + return list; + } + + if (state is IEnumerable<KeyValuePair<string, object?>> enumerable) + { + return enumerable.ToArray(); + } + + if (state is null) + { + return Array.Empty<KeyValuePair<string, object?>>(); + } + + return new[] { new KeyValuePair<string, object?>("State", state) }; + } + } + + private sealed class NullScope : IDisposable + { + public static readonly NullScope Instance = new(); + public void Dispose() + { + } + } + } + } + + private sealed class TempDirectory : IDisposable + { + public string Path { get; } + + public TempDirectory() + { + Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), "concelier-mirror-" + Guid.NewGuid().ToString("N", CultureInfo.InvariantCulture)); + Directory.CreateDirectory(Path); + } + + public void Dispose() + { + try + { + if (Directory.Exists(Path)) + { + Directory.Delete(Path, recursive: true); + } + } + catch + { + // best effort cleanup + } + } + } + + private sealed record HealthPayload(string Status, DateTimeOffset StartedAt, double UptimeSeconds, StoragePayload Storage, TelemetryPayload Telemetry); + + private sealed record StoragePayload(string Driver, bool Completed, DateTimeOffset? CompletedAt, double? DurationMs); + + private sealed record TelemetryPayload(bool Enabled, bool Tracing, bool Metrics, bool Logging); + + private sealed record ReadyPayload(string Status, DateTimeOffset StartedAt, double UptimeSeconds, ReadyMongoPayload Mongo); + + private sealed record ReadyMongoPayload(string Status, double? LatencyMs, DateTimeOffset? CheckedAt, string? Error); + + private sealed record JobDefinitionPayload(string Kind, bool Enabled, string? CronExpression, TimeSpan Timeout, TimeSpan LeaseDuration, JobRunPayload? LastRun); + + private sealed record JobRunPayload(Guid RunId, string Kind, string Status, string Trigger, DateTimeOffset CreatedAt, DateTimeOffset? StartedAt, DateTimeOffset? CompletedAt, string? Error, TimeSpan? Duration, Dictionary<string, object?> Parameters); + + private sealed record ProblemDocument(string? Type, string? Title, int? Status, string? Detail, string? Instance); + + private async Task SeedAdvisoryRawDocumentsAsync(params BsonDocument[] documents) + { + var client = new MongoClient(_runner.ConnectionString); + var database = client.GetDatabase(MongoStorageDefaults.DefaultDatabaseName); + var collection = database.GetCollection<BsonDocument>(MongoStorageDefaults.Collections.AdvisoryRaw); + await collection.DeleteManyAsync(FilterDefinition<BsonDocument>.Empty); + if (documents.Length > 0) + { + await collection.InsertManyAsync(documents); + } + } + + private static BsonDocument CreateAdvisoryRawDocument( + string tenant, + string vendor, + string upstreamId, + string contentHash, + BsonDocument? raw = null, + string? supersedes = null) + { + var now = DateTime.UtcNow; + return new BsonDocument + { + { "_id", BuildRawDocumentId(vendor, upstreamId, contentHash) }, + { "tenant", tenant }, + { + "source", + new BsonDocument + { + { "vendor", vendor }, + { "connector", "test-connector" }, + { "version", "1.0.0" } + } + }, + { + "upstream", + new BsonDocument + { + { "upstream_id", upstreamId }, + { "document_version", "1" }, + { "retrieved_at", now }, + { "content_hash", contentHash }, + { "signature", new BsonDocument { { "present", false } } }, + { "provenance", new BsonDocument { { "api", "https://example.test" } } } + } + }, + { + "content", + new BsonDocument + { + { "format", "osv" }, + { "raw", raw ?? new BsonDocument("id", upstreamId) } + } + }, + { + "identifiers", + new BsonDocument + { + { "aliases", new BsonArray(new[] { upstreamId }) }, + { "primary", upstreamId } + } + }, + { + "linkset", + new BsonDocument + { + { "aliases", new BsonArray() }, + { "purls", new BsonArray() }, + { "cpes", new BsonArray() }, + { "references", new BsonArray() }, + { "reconciled_from", new BsonArray() }, + { "notes", new BsonDocument() } + } + }, + { "supersedes", supersedes is null ? BsonNull.Value : supersedes }, + { "ingested_at", now }, + { "created_at", now } + }; + } + + private static string BuildRawDocumentId(string vendor, string upstreamId, string contentHash) + { + static string Sanitize(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return "unknown"; + } + + var buffer = new char[value.Length]; + var index = 0; + foreach (var ch in value.Trim().ToLowerInvariant()) + { + buffer[index++] = char.IsLetterOrDigit(ch) ? ch : '-'; + } + + var sanitized = new string(buffer, 0, index).Trim('-'); + return string.IsNullOrEmpty(sanitized) ? "unknown" : sanitized; + } + + var vendorSegment = Sanitize(vendor); + var upstreamSegment = Sanitize(upstreamId); + var hashSegment = Sanitize(contentHash.Replace(":", "-")); + return $"advisory_raw:{vendorSegment}:{upstreamSegment}:{hashSegment}"; + } + + private static AdvisoryIngestRequest BuildAdvisoryIngestRequest(string contentHash, string upstreamId) + { + var raw = CreateJsonElement($@"{{""id"":""{upstreamId}"",""modified"":""{DateTime.UtcNow:O}""}}"); + var references = new[] + { + new AdvisoryLinksetReferenceRequest("advisory", $"https://example.test/advisories/{upstreamId}", null) + }; + + return new AdvisoryIngestRequest( + new AdvisorySourceRequest("osv", "osv-connector", "1.0.0", "feed"), + new AdvisoryUpstreamRequest( + upstreamId, + "2025-01-01T00:00:00Z", + DateTimeOffset.UtcNow, + contentHash, + new AdvisorySignatureRequest(false, null, null, null, null, null), + new Dictionary<string, string> { ["http.method"] = "GET" }), + new AdvisoryContentRequest("osv", "1.3.0", raw, null), + new AdvisoryIdentifiersRequest( + upstreamId, + new[] { upstreamId, $"{upstreamId}-ALIAS" }), + new AdvisoryLinksetRequest( + new[] { upstreamId }, + new[] { "pkg:npm/demo@1.0.0" }, + Array.Empty<string>(), + references, + Array.Empty<string>(), + new Dictionary<string, string> { ["note"] = "ingest-test" })); + } + + private static JsonElement CreateJsonElement(string json) + { + using var document = JsonDocument.Parse(json); + return document.RootElement.Clone(); + } + + private static async Task<IReadOnlyList<MetricMeasurement>> CaptureMetricsAsync(string meterName, string instrumentName, Func<Task> action) + { + var measurements = new List<MetricMeasurement>(); + var listener = new MeterListener(); + + listener.InstrumentPublished += (instrument, currentListener) => + { + if (string.Equals(instrument.Meter.Name, meterName, StringComparison.Ordinal) && + string.Equals(instrument.Name, instrumentName, StringComparison.Ordinal)) + { + currentListener.EnableMeasurementEvents(instrument); + } + }; + + listener.SetMeasurementEventCallback<long>((instrument, measurement, tags, state) => + { + var tagDictionary = new Dictionary<string, object?>(StringComparer.Ordinal); + foreach (var tag in tags) + { + tagDictionary[tag.Key] = tag.Value; + } + + measurements.Add(new MetricMeasurement(instrument.Name, measurement, tagDictionary)); + }); + + listener.Start(); + try + { + await action().ConfigureAwait(false); + } + finally + { + listener.Dispose(); + } + + return measurements; + } + + private static string? GetTagValue(MetricMeasurement measurement, string tag) + { + if (measurement.Tags.TryGetValue(tag, out var value)) + { + return value?.ToString(); + } + + return null; + } + + private static string CreateTestToken(string tenant, params string[] scopes) + { + var normalizedTenant = string.IsNullOrWhiteSpace(tenant) ? "default" : tenant.Trim().ToLowerInvariant(); + var scopeSet = scopes is { Length: > 0 } + ? scopes + .Select(StellaOpsScopes.Normalize) + .Where(static scope => !string.IsNullOrEmpty(scope)) + .Select(static scope => scope!) + .Distinct(StringComparer.Ordinal) + .ToArray() + : Array.Empty<string>(); + + var claims = new List<Claim> + { + new Claim(StellaOpsClaimTypes.Subject, "test-user"), + new Claim(StellaOpsClaimTypes.Tenant, normalizedTenant), + new Claim(StellaOpsClaimTypes.Scope, string.Join(' ', scopeSet)) + }; + + foreach (var scope in scopeSet) + { + claims.Add(new Claim(StellaOpsClaimTypes.ScopeItem, scope)); + } + + var credentials = new SigningCredentials(TestSigningKey, SecurityAlgorithms.HmacSha256); + var now = DateTime.UtcNow; + var token = new JwtSecurityToken( + issuer: TestAuthorityIssuer, + audience: TestAuthorityAudience, + claims: claims, + notBefore: now.AddMinutes(-5), + expires: now.AddMinutes(30), + signingCredentials: credentials); + + return new JwtSecurityTokenHandler().WriteToken(token); + } + + private sealed record MetricMeasurement(string Instrument, long Value, IReadOnlyDictionary<string, object?> Tags); + + private sealed class DemoJob : IJob + { + public Task ExecuteAsync(JobExecutionContext context, CancellationToken cancellationToken) => Task.CompletedTask; + } + + private sealed class StubJobCoordinator : IJobCoordinator + { + public JobTriggerResult NextResult { get; set; } = JobTriggerResult.NotFound("not set"); + + public IReadOnlyList<JobDefinition> Definitions { get; set; } = Array.Empty<JobDefinition>(); + + public IReadOnlyList<JobRunSnapshot> RecentRuns { get; set; } = Array.Empty<JobRunSnapshot>(); + + public IReadOnlyList<JobRunSnapshot> ActiveRuns { get; set; } = Array.Empty<JobRunSnapshot>(); + + public Dictionary<Guid, JobRunSnapshot> Runs { get; } = new(); + + public Dictionary<string, JobRunSnapshot?> LastRuns { get; } = new(StringComparer.Ordinal); + + public Task<JobTriggerResult> TriggerAsync(string kind, IReadOnlyDictionary<string, object?>? parameters, string trigger, CancellationToken cancellationToken) + => Task.FromResult(NextResult); + + public Task<IReadOnlyList<JobDefinition>> GetDefinitionsAsync(CancellationToken cancellationToken) + => Task.FromResult(Definitions); + + public Task<IReadOnlyList<JobRunSnapshot>> GetRecentRunsAsync(string? kind, int limit, CancellationToken cancellationToken) + { + IEnumerable<JobRunSnapshot> query = RecentRuns; + if (!string.IsNullOrWhiteSpace(kind)) + { + query = query.Where(run => string.Equals(run.Kind, kind, StringComparison.Ordinal)); + } + + return Task.FromResult<IReadOnlyList<JobRunSnapshot>>(query.Take(limit).ToArray()); + } + + public Task<IReadOnlyList<JobRunSnapshot>> GetActiveRunsAsync(CancellationToken cancellationToken) + => Task.FromResult(ActiveRuns); + + public Task<JobRunSnapshot?> GetRunAsync(Guid runId, CancellationToken cancellationToken) + => Task.FromResult(Runs.TryGetValue(runId, out var run) ? run : null); + + public Task<JobRunSnapshot?> GetLastRunAsync(string kind, CancellationToken cancellationToken) + => Task.FromResult(LastRuns.TryGetValue(kind, out var run) ? run : null); + + public Task<IReadOnlyDictionary<string, JobRunSnapshot>> GetLastRunsAsync(IEnumerable<string> kinds, CancellationToken cancellationToken) + { + var map = new Dictionary<string, JobRunSnapshot>(StringComparer.Ordinal); + foreach (var kind in kinds) + { + if (kind is null) + { + continue; + } + + if (LastRuns.TryGetValue(kind, out var run) && run is not null) + { + map[kind] = run; + } + } + + return Task.FromResult<IReadOnlyDictionary<string, JobRunSnapshot>>(map); + } + } +} diff --git a/src/StellaOps.DevPortal.Site/AGENTS.md b/src/DevPortal/StellaOps.DevPortal.Site/AGENTS.md similarity index 97% rename from src/StellaOps.DevPortal.Site/AGENTS.md rename to src/DevPortal/StellaOps.DevPortal.Site/AGENTS.md index acca2bf9..acb78db8 100644 --- a/src/StellaOps.DevPortal.Site/AGENTS.md +++ b/src/DevPortal/StellaOps.DevPortal.Site/AGENTS.md @@ -1,15 +1,15 @@ -# Developer Portal Guild Charter - -## Mission -Deliver the StellaOps developer portal with interactive API reference, SDK documentation, runnable examples, and offline export capability. - -## Scope -- Static site generator integrating OpenAPI specs, code examples, and SDK docs. -- Search, schema diagrams, try-it console (non-prod), copy-curl snippets. -- Version selector for API major versions and changelog integration. -- Offline bundle build compatible with air-gapped environments. - -## Definition of Done -- Portal rebuilds deterministically from specs/examples; CI publishes artifacts. -- Search, schema visuals, examples verified via automated tests. -- Offline bundle renders without external dependencies. +# Developer Portal Guild Charter + +## Mission +Deliver the StellaOps developer portal with interactive API reference, SDK documentation, runnable examples, and offline export capability. + +## Scope +- Static site generator integrating OpenAPI specs, code examples, and SDK docs. +- Search, schema diagrams, try-it console (non-prod), copy-curl snippets. +- Version selector for API major versions and changelog integration. +- Offline bundle build compatible with air-gapped environments. + +## Definition of Done +- Portal rebuilds deterministically from specs/examples; CI publishes artifacts. +- Search, schema visuals, examples verified via automated tests. +- Offline bundle renders without external dependencies. diff --git a/src/StellaOps.DevPortal.Site/TASKS.md b/src/DevPortal/StellaOps.DevPortal.Site/TASKS.md similarity index 99% rename from src/StellaOps.DevPortal.Site/TASKS.md rename to src/DevPortal/StellaOps.DevPortal.Site/TASKS.md index 1258ab29..8ce18fcc 100644 --- a/src/StellaOps.DevPortal.Site/TASKS.md +++ b/src/DevPortal/StellaOps.DevPortal.Site/TASKS.md @@ -1,19 +1,19 @@ -# Developer Portal Task Board — Epic 17: SDKs & OpenAPI Docs - -## Sprint 62 – Static Generator Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVPORT-62-001 | TODO | Developer Portal Guild | OAS-61-002 | Select static site generator, integrate aggregate spec, build navigation + search scaffolding. | Portal builds locally; nav/search operational; CI pipeline in place. | -| DEVPORT-62-002 | TODO | Developer Portal Guild | DEVPORT-62-001 | Implement schema viewer, example rendering, copy-curl snippets, and version selector UI. | Schema diagrams render; examples tested; version selector toggles spec; accessibility check passes. | - -## Sprint 63 – Try-It & Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVPORT-63-001 | TODO | Developer Portal Guild, Platform Guild | DEVPORT-62-002 | Add Try-It console pointing at sandbox environment with token onboarding and scope info. | Try-It executes against sandbox; safeguards enforce read-only; telemetry recorded. | -| DEVPORT-63-002 | TODO | Developer Portal Guild, SDK Generator Guild | DEVPORT-62-002, SDKGEN-63-001..4 | Embed language-specific SDK snippets and quick starts generated from tested examples. | Snippets pulled from CI-verified examples; portal tests ensure freshness. | - -## Sprint 64 – Offline Bundle & QA -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DEVPORT-64-001 | TODO | Developer Portal Guild, Export Center Guild | DEVPORT-63-001, SDKREL-64-002 | Provide offline build target bundling HTML, specs, SDK archives; ensure no external assets. | Offline bundle verified in sealed environment; docs updated. | -| DEVPORT-64-002 | TODO | Developer Portal Guild | DEVPORT-63-001 | Add automated accessibility tests, link checker, and performance budgets. | CI checks added; budgets enforced; reports archived. | +# Developer Portal Task Board — Epic 17: SDKs & OpenAPI Docs + +## Sprint 62 – Static Generator Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVPORT-62-001 | TODO | Developer Portal Guild | OAS-61-002 | Select static site generator, integrate aggregate spec, build navigation + search scaffolding. | Portal builds locally; nav/search operational; CI pipeline in place. | +| DEVPORT-62-002 | TODO | Developer Portal Guild | DEVPORT-62-001 | Implement schema viewer, example rendering, copy-curl snippets, and version selector UI. | Schema diagrams render; examples tested; version selector toggles spec; accessibility check passes. | + +## Sprint 63 – Try-It & Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVPORT-63-001 | TODO | Developer Portal Guild, Platform Guild | DEVPORT-62-002 | Add Try-It console pointing at sandbox environment with token onboarding and scope info. | Try-It executes against sandbox; safeguards enforce read-only; telemetry recorded. | +| DEVPORT-63-002 | TODO | Developer Portal Guild, SDK Generator Guild | DEVPORT-62-002, SDKGEN-63-001..4 | Embed language-specific SDK snippets and quick starts generated from tested examples. | Snippets pulled from CI-verified examples; portal tests ensure freshness. | + +## Sprint 64 – Offline Bundle & QA +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DEVPORT-64-001 | TODO | Developer Portal Guild, Export Center Guild | DEVPORT-63-001, SDKREL-64-002 | Provide offline build target bundling HTML, specs, SDK archives; ensure no external assets. | Offline bundle verified in sealed environment; docs updated. | +| DEVPORT-64-002 | TODO | Developer Portal Guild | DEVPORT-63-001 | Add automated accessibility tests, link checker, and performance budgets. | CI checks added; budgets enforced; reports archived. | diff --git a/src/Directory.Build.props b/src/Directory.Build.props index 47a1dfb7..f125abf6 100644 --- a/src/Directory.Build.props +++ b/src/Directory.Build.props @@ -1,49 +1,49 @@ -<Project> - <PropertyGroup> - <ConcelierPluginOutputRoot Condition="'$(ConcelierPluginOutputRoot)' == ''">$(SolutionDir)StellaOps.Concelier.PluginBinaries</ConcelierPluginOutputRoot> - <ConcelierPluginOutputRoot Condition="'$(ConcelierPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$(MSBuildThisFileDirectory)StellaOps.Concelier.PluginBinaries</ConcelierPluginOutputRoot> - <AuthorityPluginOutputRoot Condition="'$(AuthorityPluginOutputRoot)' == ''">$(SolutionDir)StellaOps.Authority.PluginBinaries</AuthorityPluginOutputRoot> - <AuthorityPluginOutputRoot Condition="'$(AuthorityPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$(MSBuildThisFileDirectory)StellaOps.Authority.PluginBinaries</AuthorityPluginOutputRoot> - <IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Connector.'))">true</IsConcelierPlugin> - <IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Exporter.'))">true</IsConcelierPlugin> - <IsAuthorityPlugin Condition="'$(IsAuthorityPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Authority.Plugin.'))">true</IsAuthorityPlugin> - <NotifyPluginOutputRoot Condition="'$(NotifyPluginOutputRoot)' == '' and '$(SolutionDir)' != ''">$(SolutionDir)plugins\notify</NotifyPluginOutputRoot> - <NotifyPluginOutputRoot Condition="'$(NotifyPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\notify\'))</NotifyPluginOutputRoot> - <IsNotifyPlugin Condition="'$(IsNotifyPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Notify.Connectors.')) and !$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests'))">true</IsNotifyPlugin> - <IsNotifyPlugin Condition="'$(IsNotifyPlugin)' == 'true' and $([System.String]::Copy('$(MSBuildProjectName)')) == 'StellaOps.Notify.Connectors.Shared'">false</IsNotifyPlugin> - <ScannerBuildxPluginOutputRoot Condition="'$(ScannerBuildxPluginOutputRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\buildx\'))</ScannerBuildxPluginOutputRoot> - <IsScannerBuildxPlugin Condition="'$(IsScannerBuildxPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)')) == 'StellaOps.Scanner.Sbomer.BuildXPlugin'">true</IsScannerBuildxPlugin> - <ScannerOsAnalyzerPluginOutputRoot Condition="'$(ScannerOsAnalyzerPluginOutputRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\analyzers\os\'))</ScannerOsAnalyzerPluginOutputRoot> - <IsScannerOsAnalyzerPlugin Condition="'$(IsScannerOsAnalyzerPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Scanner.Analyzers.OS.')) and !$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests'))">true</IsScannerOsAnalyzerPlugin> - <ScannerLangAnalyzerPluginOutputRoot Condition="'$(ScannerLangAnalyzerPluginOutputRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\analyzers\lang\'))</ScannerLangAnalyzerPluginOutputRoot> - <IsScannerLangAnalyzerPlugin Condition="'$(IsScannerLangAnalyzerPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Scanner.Analyzers.Lang.'))">true</IsScannerLangAnalyzerPlugin> - <UseConcelierTestInfra Condition="'$(UseConcelierTestInfra)' == ''">true</UseConcelierTestInfra> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Update="../StellaOps.Plugin/StellaOps.Plugin.csproj"> - <Private>false</Private> - <ExcludeAssets>runtime</ExcludeAssets> - </ProjectReference> - </ItemGroup> - - <ItemGroup> - <PackageReference Update="MongoDB.Driver" Version="3.5.0" /> - <PackageReference Include="SharpCompress" Version="0.41.0" /> - </ItemGroup> - - <ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)' != 'false'"> - <PackageReference Include="coverlet.collector" Version="6.0.4" /> - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" /> - <PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Mongo2Go" Version="4.1.0" /> - <PackageReference Include="xunit" Version="2.9.2" /> - <PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" /> - <PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" /> - <Compile Include="$(MSBuildThisFileDirectory)StellaOps.Concelier.Tests.Shared\AssemblyInfo.cs" Link="Shared\AssemblyInfo.cs" /> - <Compile Include="$(MSBuildThisFileDirectory)StellaOps.Concelier.Tests.Shared\MongoFixtureCollection.cs" Link="Shared\MongoFixtureCollection.cs" /> - <ProjectReference Include="$(MSBuildThisFileDirectory)StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj" /> - <Using Include="StellaOps.Concelier.Testing" /> - <Using Include="Xunit" /> - </ItemGroup> -</Project> +<Project> + <PropertyGroup> + <ConcelierPluginOutputRoot Condition="'$(ConcelierPluginOutputRoot)' == ''">$(SolutionDir)StellaOps.Concelier.PluginBinaries</ConcelierPluginOutputRoot> + <ConcelierPluginOutputRoot Condition="'$(ConcelierPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$(MSBuildThisFileDirectory)StellaOps.Concelier.PluginBinaries</ConcelierPluginOutputRoot> + <AuthorityPluginOutputRoot Condition="'$(AuthorityPluginOutputRoot)' == ''">$(SolutionDir)StellaOps.Authority.PluginBinaries</AuthorityPluginOutputRoot> + <AuthorityPluginOutputRoot Condition="'$(AuthorityPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$(MSBuildThisFileDirectory)StellaOps.Authority.PluginBinaries</AuthorityPluginOutputRoot> + <IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Connector.'))">true</IsConcelierPlugin> + <IsConcelierPlugin Condition="'$(IsConcelierPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Concelier.Exporter.'))">true</IsConcelierPlugin> + <IsAuthorityPlugin Condition="'$(IsAuthorityPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Authority.Plugin.'))">true</IsAuthorityPlugin> + <NotifyPluginOutputRoot Condition="'$(NotifyPluginOutputRoot)' == '' and '$(SolutionDir)' != ''">$(SolutionDir)plugins\notify</NotifyPluginOutputRoot> + <NotifyPluginOutputRoot Condition="'$(NotifyPluginOutputRoot)' == '' and '$(SolutionDir)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\notify\'))</NotifyPluginOutputRoot> + <IsNotifyPlugin Condition="'$(IsNotifyPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Notify.Connectors.')) and !$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests'))">true</IsNotifyPlugin> + <IsNotifyPlugin Condition="'$(IsNotifyPlugin)' == 'true' and $([System.String]::Copy('$(MSBuildProjectName)')) == 'StellaOps.Notify.Connectors.Shared'">false</IsNotifyPlugin> + <ScannerBuildxPluginOutputRoot Condition="'$(ScannerBuildxPluginOutputRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\buildx\'))</ScannerBuildxPluginOutputRoot> + <IsScannerBuildxPlugin Condition="'$(IsScannerBuildxPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)')) == 'StellaOps.Scanner.Sbomer.BuildXPlugin'">true</IsScannerBuildxPlugin> + <ScannerOsAnalyzerPluginOutputRoot Condition="'$(ScannerOsAnalyzerPluginOutputRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\analyzers\os\'))</ScannerOsAnalyzerPluginOutputRoot> + <IsScannerOsAnalyzerPlugin Condition="'$(IsScannerOsAnalyzerPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Scanner.Analyzers.OS.')) and !$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests'))">true</IsScannerOsAnalyzerPlugin> + <ScannerLangAnalyzerPluginOutputRoot Condition="'$(ScannerLangAnalyzerPluginOutputRoot)' == ''">$([System.IO.Path]::GetFullPath('$(MSBuildThisFileDirectory)..\plugins\scanner\analyzers\lang\'))</ScannerLangAnalyzerPluginOutputRoot> + <IsScannerLangAnalyzerPlugin Condition="'$(IsScannerLangAnalyzerPlugin)' == '' and $([System.String]::Copy('$(MSBuildProjectName)').StartsWith('StellaOps.Scanner.Analyzers.Lang.'))">true</IsScannerLangAnalyzerPlugin> + <UseConcelierTestInfra Condition="'$(UseConcelierTestInfra)' == ''">true</UseConcelierTestInfra> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Update="../StellaOps.Plugin/StellaOps.Plugin.csproj"> + <Private>false</Private> + <ExcludeAssets>runtime</ExcludeAssets> + </ProjectReference> + </ItemGroup> + + <ItemGroup> + <PackageReference Update="MongoDB.Driver" Version="3.5.0" /> + <PackageReference Include="SharpCompress" Version="0.41.0" /> + </ItemGroup> + + <ItemGroup Condition="$([System.String]::Copy('$(MSBuildProjectName)').EndsWith('.Tests')) and '$(UseConcelierTestInfra)' != 'false'"> + <PackageReference Include="coverlet.collector" Version="6.0.4" /> + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.0" /> + <PackageReference Include="Microsoft.AspNetCore.Mvc.Testing" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Mongo2Go" Version="4.1.0" /> + <PackageReference Include="xunit" Version="2.9.2" /> + <PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" /> + <PackageReference Include="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" /> + <Compile Include="$(MSBuildThisFileDirectory)StellaOps.Concelier.Tests.Shared\AssemblyInfo.cs" Link="Shared\AssemblyInfo.cs" /> + <Compile Include="$(MSBuildThisFileDirectory)StellaOps.Concelier.Tests.Shared\MongoFixtureCollection.cs" Link="Shared\MongoFixtureCollection.cs" /> + <ProjectReference Include="$(MSBuildThisFileDirectory)StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj" /> + <Using Include="StellaOps.Concelier.Testing" /> + <Using Include="Xunit" /> + </ItemGroup> +</Project> diff --git a/src/EvidenceLocker/StellaOps.EvidenceLocker.sln b/src/EvidenceLocker/StellaOps.EvidenceLocker.sln new file mode 100644 index 00000000..7ff9e286 --- /dev/null +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker.sln @@ -0,0 +1,99 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.EvidenceLocker", "StellaOps.EvidenceLocker", "{C00E0960-6835-1015-8CF8-33BE288CF82B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Core", "StellaOps.EvidenceLocker\StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj", "{72488782-AB4D-4859-BF7D-4329B3326617}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Infrastructure", "StellaOps.EvidenceLocker\StellaOps.EvidenceLocker.Infrastructure\StellaOps.EvidenceLocker.Infrastructure.csproj", "{5769AA55-A733-463F-BCDA-D8818C79909A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Tests", "StellaOps.EvidenceLocker\StellaOps.EvidenceLocker.Tests\StellaOps.EvidenceLocker.Tests.csproj", "{0A08535C-40FC-433D-A3CB-AAA72BE61408}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.WebService", "StellaOps.EvidenceLocker\StellaOps.EvidenceLocker.WebService\StellaOps.EvidenceLocker.WebService.csproj", "{EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Worker", "StellaOps.EvidenceLocker\StellaOps.EvidenceLocker.Worker\StellaOps.EvidenceLocker.Worker.csproj", "{EAE26E97-F971-480F-9C7D-A42D20A63592}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {72488782-AB4D-4859-BF7D-4329B3326617}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Debug|Any CPU.Build.0 = Debug|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Debug|x64.ActiveCfg = Debug|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Debug|x64.Build.0 = Debug|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Debug|x86.ActiveCfg = Debug|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Debug|x86.Build.0 = Debug|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Release|Any CPU.ActiveCfg = Release|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Release|Any CPU.Build.0 = Release|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Release|x64.ActiveCfg = Release|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Release|x64.Build.0 = Release|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Release|x86.ActiveCfg = Release|Any CPU + {72488782-AB4D-4859-BF7D-4329B3326617}.Release|x86.Build.0 = Release|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Debug|x64.ActiveCfg = Debug|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Debug|x64.Build.0 = Debug|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Debug|x86.ActiveCfg = Debug|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Debug|x86.Build.0 = Debug|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Release|Any CPU.Build.0 = Release|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Release|x64.ActiveCfg = Release|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Release|x64.Build.0 = Release|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Release|x86.ActiveCfg = Release|Any CPU + {5769AA55-A733-463F-BCDA-D8818C79909A}.Release|x86.Build.0 = Release|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Debug|x64.ActiveCfg = Debug|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Debug|x64.Build.0 = Debug|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Debug|x86.ActiveCfg = Debug|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Debug|x86.Build.0 = Debug|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Release|Any CPU.Build.0 = Release|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Release|x64.ActiveCfg = Release|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Release|x64.Build.0 = Release|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Release|x86.ActiveCfg = Release|Any CPU + {0A08535C-40FC-433D-A3CB-AAA72BE61408}.Release|x86.Build.0 = Release|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Debug|x64.ActiveCfg = Debug|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Debug|x64.Build.0 = Debug|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Debug|x86.ActiveCfg = Debug|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Debug|x86.Build.0 = Debug|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Release|Any CPU.Build.0 = Release|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Release|x64.ActiveCfg = Release|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Release|x64.Build.0 = Release|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Release|x86.ActiveCfg = Release|Any CPU + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002}.Release|x86.Build.0 = Release|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Debug|x64.ActiveCfg = Debug|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Debug|x64.Build.0 = Debug|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Debug|x86.ActiveCfg = Debug|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Debug|x86.Build.0 = Debug|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Release|Any CPU.Build.0 = Release|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Release|x64.ActiveCfg = Release|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Release|x64.Build.0 = Release|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Release|x86.ActiveCfg = Release|Any CPU + {EAE26E97-F971-480F-9C7D-A42D20A63592}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {72488782-AB4D-4859-BF7D-4329B3326617} = {C00E0960-6835-1015-8CF8-33BE288CF82B} + {5769AA55-A733-463F-BCDA-D8818C79909A} = {C00E0960-6835-1015-8CF8-33BE288CF82B} + {0A08535C-40FC-433D-A3CB-AAA72BE61408} = {C00E0960-6835-1015-8CF8-33BE288CF82B} + {EB1671CD-1D63-4D69-A6F7-4EA5BC93F002} = {C00E0960-6835-1015-8CF8-33BE288CF82B} + {EAE26E97-F971-480F-9C7D-A42D20A63592} = {C00E0960-6835-1015-8CF8-33BE288CF82B} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.EvidenceLocker/AGENTS.md b/src/EvidenceLocker/StellaOps.EvidenceLocker/AGENTS.md similarity index 98% rename from src/StellaOps.EvidenceLocker/AGENTS.md rename to src/EvidenceLocker/StellaOps.EvidenceLocker/AGENTS.md index 43bc666f..b32d93e5 100644 --- a/src/StellaOps.EvidenceLocker/AGENTS.md +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/AGENTS.md @@ -1,28 +1,28 @@ -# Evidence Locker Service — Agent Charter - -## Mission -Implement the append-only, tenant-scoped evidence locker detailed in Epic 15. Produce immutable evidence bundles, manage legal holds, and expose verification APIs for Console and CLI consumers under the imposed rule. - -## Responsibilities -- Define object store layout, metadata DB schemas, and retention policies. -- Build bundle assembly pipelines (evaluation, job, export) with Merkle manifests and DSSE signing. -- Provide verification, download, and legal hold APIs with audit trails. -- Integrate with Timeline Indexer, Exporter, Orchestrator, Policy Engine, Concelier, and Excitator for provenance linking. - -## Coordination -- Work with Provenance Guild for signature tooling. -- Partner with DevOps Guild on storage backends and WORM options. -- Align with Security Guild on redaction and access enforcement. - -## Definition of Done -- Deterministic bundle generation proven via integration tests. -- Object store interactions tested in offline mode. -- Runbooks in `/docs/forensics/evidence-locker.md` updated per release. - -## Module Layout -- `StellaOps.EvidenceLocker.Core/` — domain models, bundle contracts, deterministic hashing helpers. -- `StellaOps.EvidenceLocker.Infrastructure/` — storage abstractions, persistence plumbing, and external integrations. -- `StellaOps.EvidenceLocker.WebService/` — HTTP entry points (minimal API host, OpenAPI, auth). -- `StellaOps.EvidenceLocker.Worker/` — background assembly/verification pipelines. -- `StellaOps.EvidenceLocker.Tests/` — unit tests (xUnit) for core/infrastructure components. -- `StellaOps.EvidenceLocker.sln` — solution aggregating the module projects. +# Evidence Locker Service — Agent Charter + +## Mission +Implement the append-only, tenant-scoped evidence locker detailed in Epic 15. Produce immutable evidence bundles, manage legal holds, and expose verification APIs for Console and CLI consumers under the imposed rule. + +## Responsibilities +- Define object store layout, metadata DB schemas, and retention policies. +- Build bundle assembly pipelines (evaluation, job, export) with Merkle manifests and DSSE signing. +- Provide verification, download, and legal hold APIs with audit trails. +- Integrate with Timeline Indexer, Exporter, Orchestrator, Policy Engine, Concelier, and Excitator for provenance linking. + +## Coordination +- Work with Provenance Guild for signature tooling. +- Partner with DevOps Guild on storage backends and WORM options. +- Align with Security Guild on redaction and access enforcement. + +## Definition of Done +- Deterministic bundle generation proven via integration tests. +- Object store interactions tested in offline mode. +- Runbooks in `/docs/forensics/evidence-locker.md` updated per release. + +## Module Layout +- `StellaOps.EvidenceLocker.Core/` — domain models, bundle contracts, deterministic hashing helpers. +- `StellaOps.EvidenceLocker.Infrastructure/` — storage abstractions, persistence plumbing, and external integrations. +- `StellaOps.EvidenceLocker.WebService/` — HTTP entry points (minimal API host, OpenAPI, auth). +- `StellaOps.EvidenceLocker.Worker/` — background assembly/verification pipelines. +- `StellaOps.EvidenceLocker.Tests/` — unit tests (xUnit) for core/infrastructure components. +- `StellaOps.EvidenceLocker.sln` — solution aggregating the module projects. diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/Class1.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/Class1.cs similarity index 92% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/Class1.cs rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/Class1.cs index f0526cd7..8f0fc966 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/Class1.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.EvidenceLocker.Core; - -public class Class1 -{ - -} +namespace StellaOps.EvidenceLocker.Core; + +public class Class1 +{ + +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/StellaOps.EvidenceLocker.Core.csproj b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/StellaOps.EvidenceLocker.Core.csproj similarity index 95% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/StellaOps.EvidenceLocker.Core.csproj rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/StellaOps.EvidenceLocker.Core.csproj index fe0eef44..e4808f0d 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/StellaOps.EvidenceLocker.Core.csproj +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Core/StellaOps.EvidenceLocker.Core.csproj @@ -1,18 +1,18 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Class1.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Class1.cs similarity index 93% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Class1.cs rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Class1.cs index 76ce8382..276dca1c 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Class1.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.EvidenceLocker.Infrastructure; - -public class Class1 -{ - -} +namespace StellaOps.EvidenceLocker.Infrastructure; + +public class Class1 +{ + +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/StellaOps.EvidenceLocker.Infrastructure.csproj b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/StellaOps.EvidenceLocker.Infrastructure.csproj similarity index 94% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/StellaOps.EvidenceLocker.Infrastructure.csproj rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/StellaOps.EvidenceLocker.Infrastructure.csproj index 257f0bdf..736994d3 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/StellaOps.EvidenceLocker.Infrastructure.csproj +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Infrastructure/StellaOps.EvidenceLocker.Infrastructure.csproj @@ -1,28 +1,28 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj"/> - - - </ItemGroup> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj"/> + + + </ItemGroup> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/StellaOps.EvidenceLocker.Tests.csproj b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/StellaOps.EvidenceLocker.Tests.csproj similarity index 91% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/StellaOps.EvidenceLocker.Tests.csproj rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/StellaOps.EvidenceLocker.Tests.csproj index b035d61a..1274dc23 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/StellaOps.EvidenceLocker.Tests.csproj +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/StellaOps.EvidenceLocker.Tests.csproj @@ -1,135 +1,135 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - - - <PropertyGroup> - - - - - <OutputType>Exe</OutputType> - - - - - <IsPackable>false</IsPackable> - - - - - - - - - - - - - - <TargetFramework>net10.0</TargetFramework> - - - <ImplicitUsings>enable</ImplicitUsings> - - - <Nullable>enable</Nullable> - - - <UseConcelierTestInfra>false</UseConcelierTestInfra> - - - <LangVersion>preview</LangVersion> - - - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - - - </PropertyGroup> - - - - - - <ItemGroup> - - - - - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> - - - - - <PackageReference Include="xunit.v3" Version="3.0.0"/> - - - - - <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Using Include="Xunit"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <ProjectReference Include="..\StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj"/> - - - - - <ProjectReference Include="..\StellaOps.EvidenceLocker.Infrastructure\StellaOps.EvidenceLocker.Infrastructure.csproj"/> - - - - - </ItemGroup> - - - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + + + <PropertyGroup> + + + + + <OutputType>Exe</OutputType> + + + + + <IsPackable>false</IsPackable> + + + + + + + + + + + + + + <TargetFramework>net10.0</TargetFramework> + + + <ImplicitUsings>enable</ImplicitUsings> + + + <Nullable>enable</Nullable> + + + <UseConcelierTestInfra>false</UseConcelierTestInfra> + + + <LangVersion>preview</LangVersion> + + + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + + + </PropertyGroup> + + + + + + <ItemGroup> + + + + + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> + + + + + <PackageReference Include="xunit.v3" Version="3.0.0"/> + + + + + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Using Include="Xunit"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <ProjectReference Include="..\StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj"/> + + + + + <ProjectReference Include="..\StellaOps.EvidenceLocker.Infrastructure\StellaOps.EvidenceLocker.Infrastructure.csproj"/> + + + + + </ItemGroup> + + + + + +</Project> diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/UnitTest1.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/UnitTest1.cs similarity index 92% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/UnitTest1.cs rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/UnitTest1.cs index 4f92503f..66b5209c 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/UnitTest1.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/UnitTest1.cs @@ -1,10 +1,10 @@ -namespace StellaOps.EvidenceLocker.Tests; - -public class UnitTest1 -{ - [Fact] - public void Test1() - { - - } -} +namespace StellaOps.EvidenceLocker.Tests; + +public class UnitTest1 +{ + [Fact] + public void Test1() + { + + } +} diff --git a/src/StellaOps.Aoc.Tests/xunit.runner.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/xunit.runner.json similarity index 96% rename from src/StellaOps.Aoc.Tests/xunit.runner.json rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/xunit.runner.json index 86c7ea05..249d815c 100644 --- a/src/StellaOps.Aoc.Tests/xunit.runner.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Tests/xunit.runner.json @@ -1,3 +1,3 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Program.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs similarity index 96% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Program.cs rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs index ee9d65d6..3917ef1b 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Program.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs @@ -1,41 +1,41 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} +var builder = WebApplication.CreateBuilder(args); + +// Add services to the container. +// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +// Configure the HTTP request pipeline. +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); + +var summaries = new[] +{ + "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" +}; + +app.MapGet("/weatherforecast", () => +{ + var forecast = Enumerable.Range(1, 5).Select(index => + new WeatherForecast + ( + DateOnly.FromDateTime(DateTime.Now.AddDays(index)), + Random.Shared.Next(-20, 55), + summaries[Random.Shared.Next(summaries.Length)] + )) + .ToArray(); + return forecast; +}) +.WithName("GetWeatherForecast"); + +app.Run(); + +record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) +{ + public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Properties/launchSettings.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Properties/launchSettings.json rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Properties/launchSettings.json index a91c327e..4715af0a 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Properties/launchSettings.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Properties/launchSettings.json @@ -1,23 +1,23 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "http": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "http://localhost:5115", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - }, - "https": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "https://localhost:7010;http://localhost:5115", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "http://localhost:5115", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "https": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "https://localhost:7010;http://localhost:5115", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj similarity index 95% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj index 24d43152..5f453c1c 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.csproj @@ -1,41 +1,41 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Web"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.EvidenceLocker.Infrastructure\StellaOps.EvidenceLocker.Infrastructure.csproj"/> - - - </ItemGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.EvidenceLocker.Infrastructure\StellaOps.EvidenceLocker.Infrastructure.csproj"/> + + + </ItemGroup> + + + +</Project> diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.http b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.http similarity index 96% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.http rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.http index d6a7ac68..cfb08f06 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.http +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/StellaOps.EvidenceLocker.WebService.http @@ -1,6 +1,6 @@ -@StellaOps.EvidenceLocker.WebService_HostAddress = http://localhost:5115 - -GET {{StellaOps.EvidenceLocker.WebService_HostAddress}}/weatherforecast/ -Accept: application/json - -### +@StellaOps.EvidenceLocker.WebService_HostAddress = http://localhost:5115 + +GET {{StellaOps.EvidenceLocker.WebService_HostAddress}}/weatherforecast/ +Accept: application/json + +### diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.Development.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json similarity index 93% rename from src/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.Development.json rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json index 0c208ae9..ff66ba6b 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.Development.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json similarity index 94% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json index 10f68b8c..4d566948 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.json @@ -1,9 +1,9 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs similarity index 96% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs index e4ada43e..b0a7e38f 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Program.cs @@ -1,7 +1,7 @@ -using StellaOps.EvidenceLocker.Worker; - -var builder = Host.CreateApplicationBuilder(args); -builder.Services.AddHostedService<Worker>(); - -var host = builder.Build(); -host.Run(); +using StellaOps.EvidenceLocker.Worker; + +var builder = Host.CreateApplicationBuilder(args); +builder.Services.AddHostedService<Worker>(); + +var host = builder.Build(); +host.Run(); diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Properties/launchSettings.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Properties/launchSettings.json rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Properties/launchSettings.json index e3692b13..3cabbcfb 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Properties/launchSettings.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Properties/launchSettings.json @@ -1,12 +1,12 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "StellaOps.EvidenceLocker.Worker": { - "commandName": "Project", - "dotnetRunMessages": true, - "environmentVariables": { - "DOTNET_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "StellaOps.EvidenceLocker.Worker": { + "commandName": "Project", + "dotnetRunMessages": true, + "environmentVariables": { + "DOTNET_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj similarity index 95% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj index ee851b91..2e10b047 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/StellaOps.EvidenceLocker.Worker.csproj @@ -1,43 +1,43 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Worker"> - - - - <PropertyGroup> - - - <UserSecretsId>dotnet-StellaOps.EvidenceLocker.Worker-c74bd053-c14b-412b-a177-12e15fdbe207</UserSecretsId> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.EvidenceLocker.Infrastructure\StellaOps.EvidenceLocker.Infrastructure.csproj"/> - - - </ItemGroup> - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Worker"> + + + + <PropertyGroup> + + + <UserSecretsId>dotnet-StellaOps.EvidenceLocker.Worker-c74bd053-c14b-412b-a177-12e15fdbe207</UserSecretsId> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.EvidenceLocker.Infrastructure\StellaOps.EvidenceLocker.Infrastructure.csproj"/> + + + </ItemGroup> + + +</Project> diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Worker.cs b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Worker.cs similarity index 96% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Worker.cs rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Worker.cs index 2d1c2d7d..150eeceb 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Worker.cs +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/Worker.cs @@ -1,16 +1,16 @@ -namespace StellaOps.EvidenceLocker.Worker; - -public class Worker(ILogger<Worker> logger) : BackgroundService -{ - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - while (!stoppingToken.IsCancellationRequested) - { - if (logger.IsEnabled(LogLevel.Information)) - { - logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); - } - await Task.Delay(1000, stoppingToken); - } - } -} +namespace StellaOps.EvidenceLocker.Worker; + +public class Worker(ILogger<Worker> logger) : BackgroundService +{ + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + while (!stoppingToken.IsCancellationRequested) + { + if (logger.IsEnabled(LogLevel.Information)) + { + logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); + } + await Task.Delay(1000, stoppingToken); + } + } +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.Development.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json similarity index 94% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.Development.json rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json index b2dcdb67..69017646 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.Development.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.json b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json similarity index 94% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.json rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json index b2dcdb67..69017646 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.json +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.sln b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.sln similarity index 98% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.sln rename to src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.sln index f5fb50af..506e49c0 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.sln +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.sln @@ -1,90 +1,90 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Core", "StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj", "{217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Infrastructure", "StellaOps.EvidenceLocker.Infrastructure\StellaOps.EvidenceLocker.Infrastructure.csproj", "{BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.WebService", "StellaOps.EvidenceLocker.WebService\StellaOps.EvidenceLocker.WebService.csproj", "{392D1580-C75B-4CB2-8F26-45C65268A191}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Worker", "StellaOps.EvidenceLocker.Worker\StellaOps.EvidenceLocker.Worker.csproj", "{B384F421-48D0-48EB-A63F-0AF28EBC75EB}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Tests", "StellaOps.EvidenceLocker.Tests\StellaOps.EvidenceLocker.Tests.csproj", "{B9D6DCF2-1C6F-41E5-8D63-118BD0751839}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|x64.ActiveCfg = Debug|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|x64.Build.0 = Debug|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|x86.ActiveCfg = Debug|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|x86.Build.0 = Debug|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|Any CPU.Build.0 = Release|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|x64.ActiveCfg = Release|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|x64.Build.0 = Release|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|x86.ActiveCfg = Release|Any CPU - {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|x86.Build.0 = Release|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|Any CPU.Build.0 = Debug|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|x64.ActiveCfg = Debug|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|x64.Build.0 = Debug|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|x86.ActiveCfg = Debug|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|x86.Build.0 = Debug|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|Any CPU.ActiveCfg = Release|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|Any CPU.Build.0 = Release|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|x64.ActiveCfg = Release|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|x64.Build.0 = Release|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|x86.ActiveCfg = Release|Any CPU - {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|x86.Build.0 = Release|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|Any CPU.Build.0 = Debug|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|x64.ActiveCfg = Debug|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|x64.Build.0 = Debug|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|x86.ActiveCfg = Debug|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|x86.Build.0 = Debug|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|Any CPU.ActiveCfg = Release|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|Any CPU.Build.0 = Release|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|x64.ActiveCfg = Release|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|x64.Build.0 = Release|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|x86.ActiveCfg = Release|Any CPU - {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|x86.Build.0 = Release|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|x64.ActiveCfg = Debug|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|x64.Build.0 = Debug|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|x86.ActiveCfg = Debug|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|x86.Build.0 = Debug|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|Any CPU.Build.0 = Release|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|x64.ActiveCfg = Release|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|x64.Build.0 = Release|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|x86.ActiveCfg = Release|Any CPU - {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|x86.Build.0 = Release|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|x64.ActiveCfg = Debug|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|x64.Build.0 = Debug|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|x86.ActiveCfg = Debug|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|x86.Build.0 = Debug|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|Any CPU.Build.0 = Release|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|x64.ActiveCfg = Release|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|x64.Build.0 = Release|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|x86.ActiveCfg = Release|Any CPU - {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Core", "StellaOps.EvidenceLocker.Core\StellaOps.EvidenceLocker.Core.csproj", "{217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Infrastructure", "StellaOps.EvidenceLocker.Infrastructure\StellaOps.EvidenceLocker.Infrastructure.csproj", "{BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.WebService", "StellaOps.EvidenceLocker.WebService\StellaOps.EvidenceLocker.WebService.csproj", "{392D1580-C75B-4CB2-8F26-45C65268A191}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Worker", "StellaOps.EvidenceLocker.Worker\StellaOps.EvidenceLocker.Worker.csproj", "{B384F421-48D0-48EB-A63F-0AF28EBC75EB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.EvidenceLocker.Tests", "StellaOps.EvidenceLocker.Tests\StellaOps.EvidenceLocker.Tests.csproj", "{B9D6DCF2-1C6F-41E5-8D63-118BD0751839}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|x64.ActiveCfg = Debug|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|x64.Build.0 = Debug|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|x86.ActiveCfg = Debug|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Debug|x86.Build.0 = Debug|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|Any CPU.Build.0 = Release|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|x64.ActiveCfg = Release|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|x64.Build.0 = Release|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|x86.ActiveCfg = Release|Any CPU + {217D54F6-D07F-4B1E-8598-7DCAF0BD65C7}.Release|x86.Build.0 = Release|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|x64.ActiveCfg = Debug|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|x64.Build.0 = Debug|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|x86.ActiveCfg = Debug|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Debug|x86.Build.0 = Debug|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|Any CPU.Build.0 = Release|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|x64.ActiveCfg = Release|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|x64.Build.0 = Release|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|x86.ActiveCfg = Release|Any CPU + {BF61F2F5-4ECA-4DA6-AC6B-102C39D225A1}.Release|x86.Build.0 = Release|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|Any CPU.Build.0 = Debug|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|x64.ActiveCfg = Debug|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|x64.Build.0 = Debug|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|x86.ActiveCfg = Debug|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Debug|x86.Build.0 = Debug|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|Any CPU.ActiveCfg = Release|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|Any CPU.Build.0 = Release|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|x64.ActiveCfg = Release|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|x64.Build.0 = Release|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|x86.ActiveCfg = Release|Any CPU + {392D1580-C75B-4CB2-8F26-45C65268A191}.Release|x86.Build.0 = Release|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|x64.ActiveCfg = Debug|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|x64.Build.0 = Debug|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|x86.ActiveCfg = Debug|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Debug|x86.Build.0 = Debug|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|Any CPU.Build.0 = Release|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|x64.ActiveCfg = Release|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|x64.Build.0 = Release|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|x86.ActiveCfg = Release|Any CPU + {B384F421-48D0-48EB-A63F-0AF28EBC75EB}.Release|x86.Build.0 = Release|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|x64.ActiveCfg = Debug|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|x64.Build.0 = Debug|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|x86.ActiveCfg = Debug|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Debug|x86.Build.0 = Debug|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|Any CPU.Build.0 = Release|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|x64.ActiveCfg = Release|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|x64.Build.0 = Release|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|x86.ActiveCfg = Release|Any CPU + {B9D6DCF2-1C6F-41E5-8D63-118BD0751839}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.EvidenceLocker/TASKS.md b/src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md similarity index 99% rename from src/StellaOps.EvidenceLocker/TASKS.md rename to src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md index ec084597..313f3284 100644 --- a/src/StellaOps.EvidenceLocker/TASKS.md +++ b/src/EvidenceLocker/StellaOps.EvidenceLocker/TASKS.md @@ -1,24 +1,24 @@ -# Evidence Locker Task Board — Epic 15: Observability & Forensics - -## Sprint 53 – Evidence Bundle Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EVID-OBS-53-001 | TODO | Evidence Locker Guild | TELEMETRY-OBS-50-001, DEVOPS-OBS-50-003 | Bootstrap `StellaOps.Evidence.Locker` service with Postgres schema for `evidence_bundles`, `evidence_artifacts`, `evidence_holds`, tenant RLS, and object-store abstraction (WORM optional). | Service builds/tests; migrations deterministic; storage abstraction has local filesystem + S3 drivers; compliance checklist recorded. | -| EVID-OBS-53-002 | TODO | Evidence Locker Guild, Orchestrator Guild | EVID-OBS-53-001, ORCH-OBS-53-001 | Implement bundle builders for evaluation/job/export snapshots collecting inputs, outputs, env digests, run metadata. Generate Merkle tree + manifest skeletons and persist root hash. | Builders cover three bundle types; integration tests verify deterministic manifests; root hash stored; docs stubbed. | -| EVID-OBS-53-003 | TODO | Evidence Locker Guild, Security Guild | EVID-OBS-53-002 | Expose REST APIs (`POST /evidence/snapshot`, `GET /evidence/:id`, `POST /evidence/verify`, `POST /evidence/hold/:case_id`) with audit logging, tenant enforcement, and size quotas. | APIs documented via OpenAPI; tests cover RBAC/legal hold; size quota rejection returns structured error; audit logs validated. | - -## Sprint 54 – Provenance Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EVID-OBS-54-001 | TODO | Evidence Locker Guild, Provenance Guild | EVID-OBS-53-003, PROV-OBS-53-002 | Attach DSSE signing and RFC3161 timestamping to bundle manifests; validate against Provenance verification library. Wire legal hold retention extension and chain-of-custody events for Timeline Indexer. | Bundles signed; verification tests pass; timeline events emitted; timestamp optional but documented; retention updates recorded. | -| EVID-OBS-54-002 | TODO | Evidence Locker Guild, DevEx/CLI Guild | EVID-OBS-54-001, CLI-FORENSICS-54-001 | Provide bundle download/export packaging (tgz) with checksum manifest, offline verification instructions, and sample fixture for CLI tests. | Packaging script deterministic; CLI verifies sample; offline instructions documented; checksum cross-check done. | - -## Sprint 55 – Incident Mode & Retention -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EVID-OBS-55-001 | TODO | Evidence Locker Guild, DevOps Guild | EVID-OBS-54-001, DEVOPS-OBS-55-001 | Implement incident mode hooks increasing retention window, capturing additional debug artefacts, and emitting activation/deactivation events to Timeline Indexer + Notifier. | Incident mode extends retention per config; activation events emitted; tests cover revert to baseline; runbook updated. | - -## Sprint 60 – Sealed Mode Portability -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EVID-OBS-60-001 | TODO | Evidence Locker Guild | EVID-OBS-55-001, AIRGAP-CTL-56-002 | Deliver portable evidence export flow for sealed environments: generate sealed bundles with checksum manifest, redacted metadata, and offline verification script. Document air-gapped import/verify procedures. | Portable bundle tooling implemented; checksum/verify script passes; sealed-mode docs updated; tests cover tamper + re-import scenarios. | +# Evidence Locker Task Board — Epic 15: Observability & Forensics + +## Sprint 53 – Evidence Bundle Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EVID-OBS-53-001 | TODO | Evidence Locker Guild | TELEMETRY-OBS-50-001, DEVOPS-OBS-50-003 | Bootstrap `StellaOps.Evidence.Locker` service with Postgres schema for `evidence_bundles`, `evidence_artifacts`, `evidence_holds`, tenant RLS, and object-store abstraction (WORM optional). | Service builds/tests; migrations deterministic; storage abstraction has local filesystem + S3 drivers; compliance checklist recorded. | +| EVID-OBS-53-002 | TODO | Evidence Locker Guild, Orchestrator Guild | EVID-OBS-53-001, ORCH-OBS-53-001 | Implement bundle builders for evaluation/job/export snapshots collecting inputs, outputs, env digests, run metadata. Generate Merkle tree + manifest skeletons and persist root hash. | Builders cover three bundle types; integration tests verify deterministic manifests; root hash stored; docs stubbed. | +| EVID-OBS-53-003 | TODO | Evidence Locker Guild, Security Guild | EVID-OBS-53-002 | Expose REST APIs (`POST /evidence/snapshot`, `GET /evidence/:id`, `POST /evidence/verify`, `POST /evidence/hold/:case_id`) with audit logging, tenant enforcement, and size quotas. | APIs documented via OpenAPI; tests cover RBAC/legal hold; size quota rejection returns structured error; audit logs validated. | + +## Sprint 54 – Provenance Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EVID-OBS-54-001 | TODO | Evidence Locker Guild, Provenance Guild | EVID-OBS-53-003, PROV-OBS-53-002 | Attach DSSE signing and RFC3161 timestamping to bundle manifests; validate against Provenance verification library. Wire legal hold retention extension and chain-of-custody events for Timeline Indexer. | Bundles signed; verification tests pass; timeline events emitted; timestamp optional but documented; retention updates recorded. | +| EVID-OBS-54-002 | TODO | Evidence Locker Guild, DevEx/CLI Guild | EVID-OBS-54-001, CLI-FORENSICS-54-001 | Provide bundle download/export packaging (tgz) with checksum manifest, offline verification instructions, and sample fixture for CLI tests. | Packaging script deterministic; CLI verifies sample; offline instructions documented; checksum cross-check done. | + +## Sprint 55 – Incident Mode & Retention +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EVID-OBS-55-001 | TODO | Evidence Locker Guild, DevOps Guild | EVID-OBS-54-001, DEVOPS-OBS-55-001 | Implement incident mode hooks increasing retention window, capturing additional debug artefacts, and emitting activation/deactivation events to Timeline Indexer + Notifier. | Incident mode extends retention per config; activation events emitted; tests cover revert to baseline; runbook updated. | + +## Sprint 60 – Sealed Mode Portability +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EVID-OBS-60-001 | TODO | Evidence Locker Guild | EVID-OBS-55-001, AIRGAP-CTL-56-002 | Deliver portable evidence export flow for sealed environments: generate sealed bundles with checksum manifest, redacted metadata, and offline verification script. Document air-gapped import/verify procedures. | Portable bundle tooling implemented; checksum/verify script passes; sealed-mode docs updated; tests cover tamper + re-import scenarios. | diff --git a/src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md b/src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md rename to src/Excititor/StellaOps.Excititor.Connectors.StellaOpsMirror/TASKS.md diff --git a/src/StellaOps.Excititor.WebService/AGENTS.md b/src/Excititor/StellaOps.Excititor.WebService/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.WebService/AGENTS.md rename to src/Excititor/StellaOps.Excititor.WebService/AGENTS.md diff --git a/src/StellaOps.Excititor.WebService/Endpoints/IngestEndpoints.cs b/src/Excititor/StellaOps.Excititor.WebService/Endpoints/IngestEndpoints.cs similarity index 100% rename from src/StellaOps.Excititor.WebService/Endpoints/IngestEndpoints.cs rename to src/Excititor/StellaOps.Excititor.WebService/Endpoints/IngestEndpoints.cs diff --git a/src/StellaOps.Excititor.WebService/Endpoints/MirrorEndpoints.cs b/src/Excititor/StellaOps.Excititor.WebService/Endpoints/MirrorEndpoints.cs similarity index 100% rename from src/StellaOps.Excititor.WebService/Endpoints/MirrorEndpoints.cs rename to src/Excititor/StellaOps.Excititor.WebService/Endpoints/MirrorEndpoints.cs diff --git a/src/StellaOps.Excititor.WebService/Endpoints/ResolveEndpoint.cs b/src/Excititor/StellaOps.Excititor.WebService/Endpoints/ResolveEndpoint.cs similarity index 100% rename from src/StellaOps.Excititor.WebService/Endpoints/ResolveEndpoint.cs rename to src/Excititor/StellaOps.Excititor.WebService/Endpoints/ResolveEndpoint.cs diff --git a/src/StellaOps.Excititor.WebService/Program.cs b/src/Excititor/StellaOps.Excititor.WebService/Program.cs similarity index 100% rename from src/StellaOps.Excititor.WebService/Program.cs rename to src/Excititor/StellaOps.Excititor.WebService/Program.cs diff --git a/src/StellaOps.Excititor.WebService/Properties/AssemblyInfo.cs b/src/Excititor/StellaOps.Excititor.WebService/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Excititor.WebService/Properties/AssemblyInfo.cs rename to src/Excititor/StellaOps.Excititor.WebService/Properties/AssemblyInfo.cs index b440af32..3b324032 100644 --- a/src/StellaOps.Excititor.WebService/Properties/AssemblyInfo.cs +++ b/src/Excititor/StellaOps.Excititor.WebService/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Excititor.WebService.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Excititor.WebService.Tests")] diff --git a/src/StellaOps.Excititor.WebService/Services/MirrorRateLimiter.cs b/src/Excititor/StellaOps.Excititor.WebService/Services/MirrorRateLimiter.cs similarity index 100% rename from src/StellaOps.Excititor.WebService/Services/MirrorRateLimiter.cs rename to src/Excititor/StellaOps.Excititor.WebService/Services/MirrorRateLimiter.cs diff --git a/src/StellaOps.Excititor.WebService/Services/ScopeAuthorization.cs b/src/Excititor/StellaOps.Excititor.WebService/Services/ScopeAuthorization.cs similarity index 100% rename from src/StellaOps.Excititor.WebService/Services/ScopeAuthorization.cs rename to src/Excititor/StellaOps.Excititor.WebService/Services/ScopeAuthorization.cs diff --git a/src/StellaOps.Excititor.WebService/Services/VexIngestOrchestrator.cs b/src/Excititor/StellaOps.Excititor.WebService/Services/VexIngestOrchestrator.cs similarity index 100% rename from src/StellaOps.Excititor.WebService/Services/VexIngestOrchestrator.cs rename to src/Excititor/StellaOps.Excititor.WebService/Services/VexIngestOrchestrator.cs diff --git a/src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj b/src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj new file mode 100644 index 00000000..6d30dbb1 --- /dev/null +++ b/src/Excititor/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj @@ -0,0 +1,23 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj" /> + <ProjectReference Include="../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.WebService/TASKS.md b/src/Excititor/StellaOps.Excititor.WebService/TASKS.md similarity index 99% rename from src/StellaOps.Excititor.WebService/TASKS.md rename to src/Excititor/StellaOps.Excititor.WebService/TASKS.md index 749569cf..f04de8b7 100644 --- a/src/StellaOps.Excititor.WebService/TASKS.md +++ b/src/Excititor/StellaOps.Excititor.WebService/TASKS.md @@ -1,94 +1,94 @@ -# TASKS — Epic 1: Aggregation-Only Contract -> **AOC Reminder:** Excititor WebService publishes raw statements/linksets only; derived precedence/severity belongs to Policy overlays. -| ID | Status | Owner(s) | Depends on | Notes | -|---|---|---|---|---| -| EXCITITOR-WEB-AOC-19-001 `Raw VEX ingestion APIs` | TODO | Excititor WebService Guild | EXCITITOR-CORE-AOC-19-001, EXCITITOR-STORE-AOC-19-001 | Implement `POST /ingest/vex`, `GET /vex/raw*`, and `POST /aoc/verify` endpoints. Enforce Authority scopes, tenant injection, and guard pipeline to ensure only immutable VEX facts are persisted. | -> Docs alignment (2025-10-26): See AOC reference §4–5 and authority scopes doc for required tokens/behaviour. -| EXCITITOR-WEB-AOC-19-002 `AOC observability + metrics` | TODO | Excititor WebService Guild, Observability Guild | EXCITITOR-WEB-AOC-19-001 | Export metrics (`ingestion_write_total`, `aoc_violation_total`, signature verification counters) and tracing spans matching Conseiller naming. Ensure structured logging includes tenant, source vendor, upstream id, and content hash. | -> Docs alignment (2025-10-26): Metrics/traces/log schema in `docs/observability/observability.md`. -| EXCITITOR-WEB-AOC-19-003 `Guard + schema test harness` | TODO | QA Guild | EXCITITOR-WEB-AOC-19-001 | Add unit/integration tests for schema validation, forbidden field rejection (`ERR_AOC_001/006/007`), and supersedes behavior using CycloneDX-VEX & CSAF fixtures with deterministic expectations. | -> Docs alignment (2025-10-26): Error codes + CLI verification in `docs/cli/cli-reference.md`. -| EXCITITOR-WEB-AOC-19-004 `Batch ingest validation` | TODO | Excititor WebService Guild, QA Guild | EXCITITOR-WEB-AOC-19-003, EXCITITOR-CORE-AOC-19-002 | Build large fixture ingest covering mixed VEX statuses, verifying raw storage parity, metrics, and CLI `aoc verify` compatibility. Document load test/runbook updates. | -> Docs alignment (2025-10-26): Offline/air-gap workflows captured in `docs/deploy/containers.md` §5. - -## Policy Engine v2 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-POLICY-20-001 `Policy selection endpoints` | TODO | Excititor WebService Guild | WEB-POLICY-20-001, EXCITITOR-CORE-AOC-19-004 | Provide VEX lookup APIs supporting PURL/advisory batching, scope filtering, and tenant enforcement with deterministic ordering + pagination. | - -## StellaOps Console (Sprint 23) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-CONSOLE-23-001 `VEX aggregation views` | TODO | Excititor WebService Guild, BE-Base Platform Guild | EXCITITOR-LNM-21-201, EXCITITOR-LNM-21-202 | Expose `/console/vex` endpoints returning grouped VEX statements per advisory/component with status chips, justification metadata, precedence trace pointers, and tenant-scoped filters for Console explorer. | -| EXCITITOR-CONSOLE-23-002 `Dashboard VEX deltas` | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-001, EXCITITOR-LNM-21-203 | Provide aggregated counts for VEX overrides (new, not_affected, revoked) powering Console dashboard + live status ticker; emit metrics for policy explain integration. | -| EXCITITOR-CONSOLE-23-003 `VEX search helpers` | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-001 | Deliver rapid lookup endpoints of VEX by advisory/component for Console global search; ensure response includes provenance and precedence context; include caching and RBAC. | - -## Graph Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| - -## Link-Not-Merge v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-LNM-21-201 `Observation APIs` | TODO | Excititor WebService Guild, BE-Base Platform Guild | EXCITITOR-LNM-21-001 | Add VEX observation read endpoints with filters, pagination, RBAC, and tenant scoping. | -| EXCITITOR-LNM-21-202 `Linkset APIs` | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-002, EXCITITOR-LNM-21-003 | Implement linkset read/export/evidence endpoints returning correlation/conflict payloads and map errors to `ERR_AGG_*`. | -| EXCITITOR-LNM-21-203 `Event publishing` | TODO | Excititor WebService Guild, Platform Events Guild | EXCITITOR-LNM-21-005 | Publish `vex.linkset.updated` events, document schema, and ensure idempotent delivery. | - -## Graph & Vuln Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-GRAPH-24-101 `VEX summary API` | TODO | Excititor WebService Guild | EXCITITOR-GRAPH-24-001 | Provide endpoints delivering VEX status summaries per component/asset for Vuln Explorer integration. | -| EXCITITOR-GRAPH-24-102 `Evidence batch API` | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-201 | Add batch VEX observation retrieval optimized for Graph overlays/tooltips. | - -## VEX Lens (Sprint 30) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-VEXLENS-30-001 `VEX evidence enrichers` | TODO | Excititor WebService Guild, VEX Lens Guild | EXCITITOR-VULN-29-001, VEXLENS-30-005 | Include issuer hints, signatures, and product trees in evidence payloads for VEX Lens; Label: VEX-Lens. | - -## Vulnerability Explorer (Sprint 29) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-VULN-29-001 `VEX key canonicalization` | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-001 | Canonicalize (lossless) VEX advisory/product keys (map to `advisory_key`, capture product scopes); expose original sources in `links[]`; AOC-compliant: no merge, no derived fields, no suppression; backfill existing records. | -| EXCITITOR-VULN-29-002 `Evidence retrieval` | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-001, VULN-API-29-003 | Provide `/vuln/evidence/vex/{advisory_key}` returning raw VEX statements filtered by tenant/product scope for Explorer evidence tabs. | -| EXCITITOR-VULN-29-004 `Observability` | TODO | Excititor WebService Guild, Observability Guild | EXCITITOR-VULN-29-001 | Add metrics/logs for VEX normalization, suppression scopes, withdrawn statements; emit events consumed by Vuln Explorer resolver. | - -## Advisory AI (Sprint 31) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-AIAI-31-001 `Justification enrichment` | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-001 | Expose normalized VEX justifications, product trees, and paragraph anchors for Advisory AI conflict explanations. | -| EXCITITOR-AIAI-31-002 `VEX chunk API` | TODO | Excititor WebService Guild | EXCITITOR-AIAI-31-001, VEXLENS-30-006 | Provide `/vex/evidence/chunks` endpoint returning tenant-scoped VEX statements with signature metadata and scope scores for RAG. | -| EXCITITOR-AIAI-31-003 `Telemetry` | TODO | Excititor WebService Guild, Observability Guild | EXCITITOR-AIAI-31-001 | Emit metrics/logs for VEX chunk usage, signature verification failures, and guardrail triggers. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-WEB-OBS-50-001 `Telemetry adoption` | TODO | Excititor WebService Guild | TELEMETRY-OBS-50-001, EXCITITOR-OBS-50-001 | Adopt telemetry core for VEX APIs, ensure responses include trace IDs & correlation headers, and update structured logging for read endpoints. | -| EXCITITOR-WEB-OBS-51-001 `Observability health endpoints` | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-50-001, WEB-OBS-51-001 | Implement `/obs/excititor/health` summarizing ingest/link SLOs, signature failure counts, and conflict trends for Console dashboards. | -| EXCITITOR-WEB-OBS-52-001 `Timeline streaming` | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-50-001, TIMELINE-OBS-52-003 | Provide SSE bridge for VEX timeline events with tenant filters, pagination, and guardrails. | -| EXCITITOR-WEB-OBS-53-001 `Evidence APIs` | TODO | Excititor WebService Guild, Evidence Locker Guild | EXCITITOR-OBS-53-001, EVID-OBS-53-003 | Expose `/evidence/vex/*` endpoints that fetch locker bundles, enforce scopes, and surface verification metadata. | -| EXCITITOR-WEB-OBS-54-001 `Attestation APIs` | TODO | Excititor WebService Guild | EXCITITOR-OBS-54-001, PROV-OBS-54-001 | Add `/attestations/vex/*` endpoints returning DSSE verification state, builder identity, and chain-of-custody links. | -| EXCITITOR-WEB-OBS-55-001 `Incident mode toggles` | TODO | Excititor WebService Guild, DevOps Guild | EXCITITOR-OBS-55-001, WEB-OBS-55-001 | Provide incident mode API for VEX pipelines with activation audit logs and retention override previews. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-WEB-AIRGAP-56-001 | TODO | Excititor WebService Guild | AIRGAP-IMP-58-001, EXCITITOR-AIRGAP-56-001 | Support mirror bundle registration via APIs, expose bundle provenance in VEX responses, and block external connectors in sealed mode. | -| EXCITITOR-WEB-AIRGAP-56-002 | TODO | Excititor WebService Guild, AirGap Time Guild | EXCITITOR-WEB-AIRGAP-56-001, AIRGAP-TIME-58-001 | Return VEX staleness metrics and time anchor info in API responses for Console/CLI use. | -| EXCITITOR-WEB-AIRGAP-57-001 | TODO | Excititor WebService Guild, AirGap Policy Guild | AIRGAP-POL-56-001 | Map sealed-mode violations to standardized error payload with remediation guidance. | -| EXCITITOR-WEB-AIRGAP-58-001 | TODO | Excititor WebService Guild, AirGap Importer Guild | EXCITITOR-WEB-AIRGAP-56-001, TIMELINE-OBS-53-001 | Emit timeline events for VEX bundle imports with bundle ID, scope, and actor metadata. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-WEB-OAS-61-001 | TODO | Excititor WebService Guild | OAS-61-001 | Implement `/.well-known/openapi` discovery endpoint with spec version metadata. | -| EXCITITOR-WEB-OAS-61-002 | TODO | Excititor WebService Guild | APIGOV-61-001 | Standardize error envelope responses and update controller/unit tests. | -| EXCITITOR-WEB-OAS-62-001 | TODO | Excititor WebService Guild | EXCITITOR-OAS-61-002 | Add curated examples for VEX observation/linkset endpoints and ensure portal displays them. | -| EXCITITOR-WEB-OAS-63-001 | TODO | Excititor WebService Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and update docs for retiring VEX APIs. | +# TASKS — Epic 1: Aggregation-Only Contract +> **AOC Reminder:** Excititor WebService publishes raw statements/linksets only; derived precedence/severity belongs to Policy overlays. +| ID | Status | Owner(s) | Depends on | Notes | +|---|---|---|---|---| +| EXCITITOR-WEB-AOC-19-001 `Raw VEX ingestion APIs` | TODO | Excititor WebService Guild | EXCITITOR-CORE-AOC-19-001, EXCITITOR-STORE-AOC-19-001 | Implement `POST /ingest/vex`, `GET /vex/raw*`, and `POST /aoc/verify` endpoints. Enforce Authority scopes, tenant injection, and guard pipeline to ensure only immutable VEX facts are persisted. | +> Docs alignment (2025-10-26): See AOC reference §4–5 and authority scopes doc for required tokens/behaviour. +| EXCITITOR-WEB-AOC-19-002 `AOC observability + metrics` | TODO | Excititor WebService Guild, Observability Guild | EXCITITOR-WEB-AOC-19-001 | Export metrics (`ingestion_write_total`, `aoc_violation_total`, signature verification counters) and tracing spans matching Conseiller naming. Ensure structured logging includes tenant, source vendor, upstream id, and content hash. | +> Docs alignment (2025-10-26): Metrics/traces/log schema in `docs/observability/observability.md`. +| EXCITITOR-WEB-AOC-19-003 `Guard + schema test harness` | TODO | QA Guild | EXCITITOR-WEB-AOC-19-001 | Add unit/integration tests for schema validation, forbidden field rejection (`ERR_AOC_001/006/007`), and supersedes behavior using CycloneDX-VEX & CSAF fixtures with deterministic expectations. | +> Docs alignment (2025-10-26): Error codes + CLI verification in `docs/cli/cli-reference.md`. +| EXCITITOR-WEB-AOC-19-004 `Batch ingest validation` | TODO | Excititor WebService Guild, QA Guild | EXCITITOR-WEB-AOC-19-003, EXCITITOR-CORE-AOC-19-002 | Build large fixture ingest covering mixed VEX statuses, verifying raw storage parity, metrics, and CLI `aoc verify` compatibility. Document load test/runbook updates. | +> Docs alignment (2025-10-26): Offline/air-gap workflows captured in `docs/deploy/containers.md` §5. + +## Policy Engine v2 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-POLICY-20-001 `Policy selection endpoints` | TODO | Excititor WebService Guild | WEB-POLICY-20-001, EXCITITOR-CORE-AOC-19-004 | Provide VEX lookup APIs supporting PURL/advisory batching, scope filtering, and tenant enforcement with deterministic ordering + pagination. | + +## StellaOps Console (Sprint 23) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-CONSOLE-23-001 `VEX aggregation views` | TODO | Excititor WebService Guild, BE-Base Platform Guild | EXCITITOR-LNM-21-201, EXCITITOR-LNM-21-202 | Expose `/console/vex` endpoints returning grouped VEX statements per advisory/component with status chips, justification metadata, precedence trace pointers, and tenant-scoped filters for Console explorer. | +| EXCITITOR-CONSOLE-23-002 `Dashboard VEX deltas` | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-001, EXCITITOR-LNM-21-203 | Provide aggregated counts for VEX overrides (new, not_affected, revoked) powering Console dashboard + live status ticker; emit metrics for policy explain integration. | +| EXCITITOR-CONSOLE-23-003 `VEX search helpers` | TODO | Excititor WebService Guild | EXCITITOR-CONSOLE-23-001 | Deliver rapid lookup endpoints of VEX by advisory/component for Console global search; ensure response includes provenance and precedence context; include caching and RBAC. | + +## Graph Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| + +## Link-Not-Merge v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-LNM-21-201 `Observation APIs` | TODO | Excititor WebService Guild, BE-Base Platform Guild | EXCITITOR-LNM-21-001 | Add VEX observation read endpoints with filters, pagination, RBAC, and tenant scoping. | +| EXCITITOR-LNM-21-202 `Linkset APIs` | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-002, EXCITITOR-LNM-21-003 | Implement linkset read/export/evidence endpoints returning correlation/conflict payloads and map errors to `ERR_AGG_*`. | +| EXCITITOR-LNM-21-203 `Event publishing` | TODO | Excititor WebService Guild, Platform Events Guild | EXCITITOR-LNM-21-005 | Publish `vex.linkset.updated` events, document schema, and ensure idempotent delivery. | + +## Graph & Vuln Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-GRAPH-24-101 `VEX summary API` | TODO | Excititor WebService Guild | EXCITITOR-GRAPH-24-001 | Provide endpoints delivering VEX status summaries per component/asset for Vuln Explorer integration. | +| EXCITITOR-GRAPH-24-102 `Evidence batch API` | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-201 | Add batch VEX observation retrieval optimized for Graph overlays/tooltips. | + +## VEX Lens (Sprint 30) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-VEXLENS-30-001 `VEX evidence enrichers` | TODO | Excititor WebService Guild, VEX Lens Guild | EXCITITOR-VULN-29-001, VEXLENS-30-005 | Include issuer hints, signatures, and product trees in evidence payloads for VEX Lens; Label: VEX-Lens. | + +## Vulnerability Explorer (Sprint 29) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-VULN-29-001 `VEX key canonicalization` | TODO | Excititor WebService Guild | EXCITITOR-LNM-21-001 | Canonicalize (lossless) VEX advisory/product keys (map to `advisory_key`, capture product scopes); expose original sources in `links[]`; AOC-compliant: no merge, no derived fields, no suppression; backfill existing records. | +| EXCITITOR-VULN-29-002 `Evidence retrieval` | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-001, VULN-API-29-003 | Provide `/vuln/evidence/vex/{advisory_key}` returning raw VEX statements filtered by tenant/product scope for Explorer evidence tabs. | +| EXCITITOR-VULN-29-004 `Observability` | TODO | Excititor WebService Guild, Observability Guild | EXCITITOR-VULN-29-001 | Add metrics/logs for VEX normalization, suppression scopes, withdrawn statements; emit events consumed by Vuln Explorer resolver. | + +## Advisory AI (Sprint 31) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-AIAI-31-001 `Justification enrichment` | TODO | Excititor WebService Guild | EXCITITOR-VULN-29-001 | Expose normalized VEX justifications, product trees, and paragraph anchors for Advisory AI conflict explanations. | +| EXCITITOR-AIAI-31-002 `VEX chunk API` | TODO | Excititor WebService Guild | EXCITITOR-AIAI-31-001, VEXLENS-30-006 | Provide `/vex/evidence/chunks` endpoint returning tenant-scoped VEX statements with signature metadata and scope scores for RAG. | +| EXCITITOR-AIAI-31-003 `Telemetry` | TODO | Excititor WebService Guild, Observability Guild | EXCITITOR-AIAI-31-001 | Emit metrics/logs for VEX chunk usage, signature verification failures, and guardrail triggers. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-WEB-OBS-50-001 `Telemetry adoption` | TODO | Excititor WebService Guild | TELEMETRY-OBS-50-001, EXCITITOR-OBS-50-001 | Adopt telemetry core for VEX APIs, ensure responses include trace IDs & correlation headers, and update structured logging for read endpoints. | +| EXCITITOR-WEB-OBS-51-001 `Observability health endpoints` | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-50-001, WEB-OBS-51-001 | Implement `/obs/excititor/health` summarizing ingest/link SLOs, signature failure counts, and conflict trends for Console dashboards. | +| EXCITITOR-WEB-OBS-52-001 `Timeline streaming` | TODO | Excititor WebService Guild | EXCITITOR-WEB-OBS-50-001, TIMELINE-OBS-52-003 | Provide SSE bridge for VEX timeline events with tenant filters, pagination, and guardrails. | +| EXCITITOR-WEB-OBS-53-001 `Evidence APIs` | TODO | Excititor WebService Guild, Evidence Locker Guild | EXCITITOR-OBS-53-001, EVID-OBS-53-003 | Expose `/evidence/vex/*` endpoints that fetch locker bundles, enforce scopes, and surface verification metadata. | +| EXCITITOR-WEB-OBS-54-001 `Attestation APIs` | TODO | Excititor WebService Guild | EXCITITOR-OBS-54-001, PROV-OBS-54-001 | Add `/attestations/vex/*` endpoints returning DSSE verification state, builder identity, and chain-of-custody links. | +| EXCITITOR-WEB-OBS-55-001 `Incident mode toggles` | TODO | Excititor WebService Guild, DevOps Guild | EXCITITOR-OBS-55-001, WEB-OBS-55-001 | Provide incident mode API for VEX pipelines with activation audit logs and retention override previews. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-WEB-AIRGAP-56-001 | TODO | Excititor WebService Guild | AIRGAP-IMP-58-001, EXCITITOR-AIRGAP-56-001 | Support mirror bundle registration via APIs, expose bundle provenance in VEX responses, and block external connectors in sealed mode. | +| EXCITITOR-WEB-AIRGAP-56-002 | TODO | Excititor WebService Guild, AirGap Time Guild | EXCITITOR-WEB-AIRGAP-56-001, AIRGAP-TIME-58-001 | Return VEX staleness metrics and time anchor info in API responses for Console/CLI use. | +| EXCITITOR-WEB-AIRGAP-57-001 | TODO | Excititor WebService Guild, AirGap Policy Guild | AIRGAP-POL-56-001 | Map sealed-mode violations to standardized error payload with remediation guidance. | +| EXCITITOR-WEB-AIRGAP-58-001 | TODO | Excititor WebService Guild, AirGap Importer Guild | EXCITITOR-WEB-AIRGAP-56-001, TIMELINE-OBS-53-001 | Emit timeline events for VEX bundle imports with bundle ID, scope, and actor metadata. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-WEB-OAS-61-001 | TODO | Excititor WebService Guild | OAS-61-001 | Implement `/.well-known/openapi` discovery endpoint with spec version metadata. | +| EXCITITOR-WEB-OAS-61-002 | TODO | Excititor WebService Guild | APIGOV-61-001 | Standardize error envelope responses and update controller/unit tests. | +| EXCITITOR-WEB-OAS-62-001 | TODO | Excititor WebService Guild | EXCITITOR-OAS-61-002 | Add curated examples for VEX observation/linkset endpoints and ensure portal displays them. | +| EXCITITOR-WEB-OAS-63-001 | TODO | Excititor WebService Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and update docs for retiring VEX APIs. | diff --git a/src/StellaOps.Excititor.Worker/AGENTS.md b/src/Excititor/StellaOps.Excititor.Worker/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Worker/AGENTS.md rename to src/Excititor/StellaOps.Excititor.Worker/AGENTS.md diff --git a/src/StellaOps.Excititor.Worker/Options/VexWorkerOptions.cs b/src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Worker/Options/VexWorkerOptions.cs rename to src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerOptions.cs diff --git a/src/StellaOps.Excititor.Worker/Options/VexWorkerOptionsValidator.cs b/src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerOptionsValidator.cs similarity index 100% rename from src/StellaOps.Excititor.Worker/Options/VexWorkerOptionsValidator.cs rename to src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerOptionsValidator.cs diff --git a/src/StellaOps.Excititor.Worker/Options/VexWorkerPluginOptions.cs b/src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerPluginOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Worker/Options/VexWorkerPluginOptions.cs rename to src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerPluginOptions.cs diff --git a/src/StellaOps.Excititor.Worker/Options/VexWorkerRefreshOptions.cs b/src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerRefreshOptions.cs similarity index 96% rename from src/StellaOps.Excititor.Worker/Options/VexWorkerRefreshOptions.cs rename to src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerRefreshOptions.cs index e15a01c8..ae50384b 100644 --- a/src/StellaOps.Excititor.Worker/Options/VexWorkerRefreshOptions.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerRefreshOptions.cs @@ -1,90 +1,90 @@ -using System.Collections.Generic; -using System.Linq; - -namespace StellaOps.Excititor.Worker.Options; - -public sealed class VexWorkerRefreshOptions -{ - private static readonly TimeSpan DefaultScanInterval = TimeSpan.FromMinutes(10); - private static readonly TimeSpan DefaultConsensusTtl = TimeSpan.FromHours(2); - - public bool Enabled { get; set; } = true; - - public TimeSpan ScanInterval { get; set; } = DefaultScanInterval; - - public TimeSpan ConsensusTtl { get; set; } = DefaultConsensusTtl; - - public int ScanBatchSize { get; set; } = 250; - - public VexStabilityDamperOptions Damper { get; } = new(); -} - -public sealed class VexStabilityDamperOptions -{ - private static readonly TimeSpan DefaultMinimum = TimeSpan.FromHours(24); - private static readonly TimeSpan DefaultMaximum = TimeSpan.FromHours(48); - private static readonly TimeSpan DefaultDurationBaseline = TimeSpan.FromHours(36); - - public TimeSpan Minimum { get; set; } = DefaultMinimum; - - public TimeSpan Maximum { get; set; } = DefaultMaximum; - - public TimeSpan DefaultDuration { get; set; } = DefaultDurationBaseline; - - public IList<VexStabilityDamperRule> Rules { get; } = new List<VexStabilityDamperRule> - { - new() { MinWeight = 0.9, Duration = TimeSpan.FromHours(24) }, - new() { MinWeight = 0.75, Duration = TimeSpan.FromHours(30) }, - new() { MinWeight = 0.5, Duration = TimeSpan.FromHours(36) }, - }; - - internal TimeSpan ClampDuration(TimeSpan duration) - { - if (duration < Minimum) - { - return Minimum; - } - - if (duration > Maximum) - { - return Maximum; - } - - return duration; - } - - public TimeSpan ResolveDuration(double weight) - { - if (double.IsNaN(weight) || double.IsInfinity(weight) || weight < 0) - { - return ClampDuration(DefaultDuration); - } - - if (Rules.Count == 0) - { - return ClampDuration(DefaultDuration); - } - - // Evaluate highest weight threshold first. - TimeSpan? selected = null; - foreach (var rule in Rules.OrderByDescending(static r => r.MinWeight)) - { - if (weight >= rule.MinWeight) - { - selected = rule.Duration; - break; - } - } - - return ClampDuration(selected ?? DefaultDuration); - } -} - -public sealed class VexStabilityDamperRule -{ - public double MinWeight { get; set; } - = 1.0; - - public TimeSpan Duration { get; set; } - = TimeSpan.FromHours(24); -} +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.Excititor.Worker.Options; + +public sealed class VexWorkerRefreshOptions +{ + private static readonly TimeSpan DefaultScanInterval = TimeSpan.FromMinutes(10); + private static readonly TimeSpan DefaultConsensusTtl = TimeSpan.FromHours(2); + + public bool Enabled { get; set; } = true; + + public TimeSpan ScanInterval { get; set; } = DefaultScanInterval; + + public TimeSpan ConsensusTtl { get; set; } = DefaultConsensusTtl; + + public int ScanBatchSize { get; set; } = 250; + + public VexStabilityDamperOptions Damper { get; } = new(); +} + +public sealed class VexStabilityDamperOptions +{ + private static readonly TimeSpan DefaultMinimum = TimeSpan.FromHours(24); + private static readonly TimeSpan DefaultMaximum = TimeSpan.FromHours(48); + private static readonly TimeSpan DefaultDurationBaseline = TimeSpan.FromHours(36); + + public TimeSpan Minimum { get; set; } = DefaultMinimum; + + public TimeSpan Maximum { get; set; } = DefaultMaximum; + + public TimeSpan DefaultDuration { get; set; } = DefaultDurationBaseline; + + public IList<VexStabilityDamperRule> Rules { get; } = new List<VexStabilityDamperRule> + { + new() { MinWeight = 0.9, Duration = TimeSpan.FromHours(24) }, + new() { MinWeight = 0.75, Duration = TimeSpan.FromHours(30) }, + new() { MinWeight = 0.5, Duration = TimeSpan.FromHours(36) }, + }; + + internal TimeSpan ClampDuration(TimeSpan duration) + { + if (duration < Minimum) + { + return Minimum; + } + + if (duration > Maximum) + { + return Maximum; + } + + return duration; + } + + public TimeSpan ResolveDuration(double weight) + { + if (double.IsNaN(weight) || double.IsInfinity(weight) || weight < 0) + { + return ClampDuration(DefaultDuration); + } + + if (Rules.Count == 0) + { + return ClampDuration(DefaultDuration); + } + + // Evaluate highest weight threshold first. + TimeSpan? selected = null; + foreach (var rule in Rules.OrderByDescending(static r => r.MinWeight)) + { + if (weight >= rule.MinWeight) + { + selected = rule.Duration; + break; + } + } + + return ClampDuration(selected ?? DefaultDuration); + } +} + +public sealed class VexStabilityDamperRule +{ + public double MinWeight { get; set; } + = 1.0; + + public TimeSpan Duration { get; set; } + = TimeSpan.FromHours(24); +} diff --git a/src/StellaOps.Excititor.Worker/Options/VexWorkerRetryOptions.cs b/src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerRetryOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Worker/Options/VexWorkerRetryOptions.cs rename to src/Excititor/StellaOps.Excititor.Worker/Options/VexWorkerRetryOptions.cs diff --git a/src/StellaOps.Excititor.Worker/Program.cs b/src/Excititor/StellaOps.Excititor.Worker/Program.cs similarity index 100% rename from src/StellaOps.Excititor.Worker/Program.cs rename to src/Excititor/StellaOps.Excititor.Worker/Program.cs diff --git a/src/StellaOps.Excititor.Worker/Properties/AssemblyInfo.cs b/src/Excititor/StellaOps.Excititor.Worker/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Excititor.Worker/Properties/AssemblyInfo.cs rename to src/Excititor/StellaOps.Excititor.Worker/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs b/src/Excititor/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs similarity index 97% rename from src/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs rename to src/Excititor/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs index ce8a7577..104ecdc5 100644 --- a/src/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Scheduling/DefaultVexProviderRunner.cs @@ -1,271 +1,271 @@ -using System; -using System.Collections.Immutable; -using System.Linq; -using System.Security.Cryptography; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using MongoDB.Driver; -using StellaOps.Plugin; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Storage.Mongo; -using StellaOps.Excititor.Worker.Options; -using StellaOps.Excititor.Worker.Signature; - -namespace StellaOps.Excititor.Worker.Scheduling; - -internal sealed class DefaultVexProviderRunner : IVexProviderRunner -{ - private readonly IServiceProvider _serviceProvider; - private readonly PluginCatalog _pluginCatalog; - private readonly ILogger<DefaultVexProviderRunner> _logger; - private readonly TimeProvider _timeProvider; - private readonly VexWorkerRetryOptions _retryOptions; - - public DefaultVexProviderRunner( - IServiceProvider serviceProvider, - PluginCatalog pluginCatalog, - ILogger<DefaultVexProviderRunner> logger, - TimeProvider timeProvider, - IOptions<VexWorkerOptions> workerOptions) - { - _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); - _pluginCatalog = pluginCatalog ?? throw new ArgumentNullException(nameof(pluginCatalog)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - if (workerOptions is null) - { - throw new ArgumentNullException(nameof(workerOptions)); - } - - _retryOptions = workerOptions.Value?.Retry ?? throw new InvalidOperationException("VexWorkerOptions.Retry must be configured."); - } - - public async ValueTask RunAsync(VexWorkerSchedule schedule, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(schedule); - ArgumentException.ThrowIfNullOrWhiteSpace(schedule.ProviderId); - - using var scope = _serviceProvider.CreateScope(); - var availablePlugins = _pluginCatalog.GetAvailableConnectorPlugins(scope.ServiceProvider); - var matched = availablePlugins.FirstOrDefault(plugin => - string.Equals(plugin.Name, schedule.ProviderId, StringComparison.OrdinalIgnoreCase)); - - if (matched is not null) - { - _logger.LogInformation( - "Connector plugin {PluginName} ({ProviderId}) is available. Execution hooks will be added in subsequent tasks.", - matched.Name, - schedule.ProviderId); - } - else - { - _logger.LogInformation("No legacy connector plugin registered for provider {ProviderId}; falling back to DI-managed connectors.", schedule.ProviderId); - } - - var connectors = scope.ServiceProvider.GetServices<IVexConnector>(); - var connector = connectors.FirstOrDefault(c => string.Equals(c.Id, schedule.ProviderId, StringComparison.OrdinalIgnoreCase)); - - if (connector is null) - { - _logger.LogWarning("No IVexConnector implementation registered for provider {ProviderId}; skipping run.", schedule.ProviderId); - return; - } - - await ExecuteConnectorAsync(scope.ServiceProvider, connector, schedule.Settings, cancellationToken).ConfigureAwait(false); - } - - private async Task ExecuteConnectorAsync(IServiceProvider scopeProvider, IVexConnector connector, VexConnectorSettings settings, CancellationToken cancellationToken) - { - var effectiveSettings = settings ?? VexConnectorSettings.Empty; - var rawStore = scopeProvider.GetRequiredService<IVexRawStore>(); - var providerStore = scopeProvider.GetRequiredService<IVexProviderStore>(); - var stateRepository = scopeProvider.GetRequiredService<IVexConnectorStateRepository>(); - var normalizerRouter = scopeProvider.GetRequiredService<IVexNormalizerRouter>(); - var signatureVerifier = scopeProvider.GetRequiredService<IVexSignatureVerifier>(); - var sessionProvider = scopeProvider.GetService<IVexMongoSessionProvider>(); - IClientSessionHandle? session = null; - if (sessionProvider is not null) - { - session = await sessionProvider.StartSessionAsync(cancellationToken).ConfigureAwait(false); - } - - var descriptor = connector switch - { - VexConnectorBase baseConnector => baseConnector.Descriptor, - _ => new VexConnectorDescriptor(connector.Id, VexProviderKind.Vendor, connector.Id) - }; - - var provider = await providerStore.FindAsync(descriptor.Id, cancellationToken, session).ConfigureAwait(false) - ?? new VexProvider(descriptor.Id, descriptor.DisplayName, descriptor.Kind); - - await providerStore.SaveAsync(provider, cancellationToken, session).ConfigureAwait(false); - - var stateBeforeRun = await stateRepository.GetAsync(descriptor.Id, cancellationToken, session).ConfigureAwait(false); - var now = _timeProvider.GetUtcNow(); - - if (stateBeforeRun?.NextEligibleRun is { } nextEligible && nextEligible > now) - { - _logger.LogInformation( - "Connector {ConnectorId} is in backoff until {NextEligible:O}; skipping run.", - connector.Id, - nextEligible); - return; - } - - await connector.ValidateAsync(effectiveSettings, cancellationToken).ConfigureAwait(false); - - var verifyingSink = new VerifyingVexRawDocumentSink(rawStore, signatureVerifier); - - var context = new VexConnectorContext( - Since: stateBeforeRun?.LastUpdated, - Settings: effectiveSettings, - RawSink: verifyingSink, - SignatureVerifier: signatureVerifier, - Normalizers: normalizerRouter, - Services: scopeProvider, - ResumeTokens: stateBeforeRun?.ResumeTokens ?? ImmutableDictionary<string, string>.Empty); - - var documentCount = 0; - - try - { - await foreach (var document in connector.FetchAsync(context, cancellationToken).ConfigureAwait(false)) - { - documentCount++; - } - - _logger.LogInformation( - "Connector {ConnectorId} persisted {DocumentCount} raw document(s) this run.", - connector.Id, - documentCount); - - await UpdateSuccessStateAsync(stateRepository, descriptor.Id, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false); - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - throw; - } - catch (Exception ex) - { - await UpdateFailureStateAsync(stateRepository, descriptor.Id, _timeProvider.GetUtcNow(), ex, cancellationToken).ConfigureAwait(false); - throw; - } - } - - private async Task UpdateSuccessStateAsync( - IVexConnectorStateRepository stateRepository, - string connectorId, - DateTimeOffset completedAt, - CancellationToken cancellationToken) - { - var current = await stateRepository.GetAsync(connectorId, cancellationToken).ConfigureAwait(false) - ?? new VexConnectorState(connectorId, null, ImmutableArray<string>.Empty); - - var updated = current with - { - LastSuccessAt = completedAt, - FailureCount = 0, - NextEligibleRun = null, - LastFailureReason = null - }; - - await stateRepository.SaveAsync(updated, cancellationToken).ConfigureAwait(false); - } - - private async Task UpdateFailureStateAsync( - IVexConnectorStateRepository stateRepository, - string connectorId, - DateTimeOffset failureTime, - Exception exception, - CancellationToken cancellationToken) - { - var current = await stateRepository.GetAsync(connectorId, cancellationToken).ConfigureAwait(false) - ?? new VexConnectorState(connectorId, null, ImmutableArray<string>.Empty); - - var failureCount = current.FailureCount + 1; - var delay = CalculateDelayWithJitter(failureCount); - var nextEligible = failureTime + delay; - - if (failureCount >= _retryOptions.FailureThreshold) - { - var quarantineUntil = failureTime + _retryOptions.QuarantineDuration; - if (quarantineUntil > nextEligible) - { - nextEligible = quarantineUntil; - } - } - - var retryCap = failureTime + _retryOptions.RetryCap; - if (nextEligible > retryCap) - { - nextEligible = retryCap; - } - - if (nextEligible < failureTime) - { - nextEligible = failureTime; - } - - var updated = current with - { - FailureCount = failureCount, - NextEligibleRun = nextEligible, - LastFailureReason = Truncate(exception.Message, 512) - }; - - await stateRepository.SaveAsync(updated, cancellationToken).ConfigureAwait(false); - - _logger.LogWarning( - exception, - "Connector {ConnectorId} failed (attempt {Attempt}). Next eligible run at {NextEligible:O}.", - connectorId, - failureCount, - nextEligible); - } - - private TimeSpan CalculateDelayWithJitter(int failureCount) - { - var exponent = Math.Max(0, failureCount - 1); - var factor = Math.Pow(2, exponent); - var baselineTicks = (long)Math.Min(_retryOptions.BaseDelay.Ticks * factor, _retryOptions.MaxDelay.Ticks); - - if (_retryOptions.JitterRatio <= 0) - { - return TimeSpan.FromTicks(baselineTicks); - } - - var minFactor = 1.0 - _retryOptions.JitterRatio; - var maxFactor = 1.0 + _retryOptions.JitterRatio; - Span<byte> buffer = stackalloc byte[8]; - RandomNumberGenerator.Fill(buffer); - var sample = BitConverter.ToUInt64(buffer) / (double)ulong.MaxValue; - var jitterFactor = minFactor + (maxFactor - minFactor) * sample; - var jitteredTicks = (long)Math.Round(baselineTicks * jitterFactor); - - if (jitteredTicks < _retryOptions.BaseDelay.Ticks) - { - jitteredTicks = _retryOptions.BaseDelay.Ticks; - } - - if (jitteredTicks > _retryOptions.MaxDelay.Ticks) - { - jitteredTicks = _retryOptions.MaxDelay.Ticks; - } - - return TimeSpan.FromTicks(jitteredTicks); - } - - private static string Truncate(string? value, int maxLength) - { - if (string.IsNullOrEmpty(value)) - { - return string.Empty; - } - - return value.Length <= maxLength - ? value - : value[..maxLength]; - } -} +using System; +using System.Collections.Immutable; +using System.Linq; +using System.Security.Cryptography; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Plugin; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; +using StellaOps.Excititor.Worker.Options; +using StellaOps.Excititor.Worker.Signature; + +namespace StellaOps.Excititor.Worker.Scheduling; + +internal sealed class DefaultVexProviderRunner : IVexProviderRunner +{ + private readonly IServiceProvider _serviceProvider; + private readonly PluginCatalog _pluginCatalog; + private readonly ILogger<DefaultVexProviderRunner> _logger; + private readonly TimeProvider _timeProvider; + private readonly VexWorkerRetryOptions _retryOptions; + + public DefaultVexProviderRunner( + IServiceProvider serviceProvider, + PluginCatalog pluginCatalog, + ILogger<DefaultVexProviderRunner> logger, + TimeProvider timeProvider, + IOptions<VexWorkerOptions> workerOptions) + { + _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); + _pluginCatalog = pluginCatalog ?? throw new ArgumentNullException(nameof(pluginCatalog)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + if (workerOptions is null) + { + throw new ArgumentNullException(nameof(workerOptions)); + } + + _retryOptions = workerOptions.Value?.Retry ?? throw new InvalidOperationException("VexWorkerOptions.Retry must be configured."); + } + + public async ValueTask RunAsync(VexWorkerSchedule schedule, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(schedule); + ArgumentException.ThrowIfNullOrWhiteSpace(schedule.ProviderId); + + using var scope = _serviceProvider.CreateScope(); + var availablePlugins = _pluginCatalog.GetAvailableConnectorPlugins(scope.ServiceProvider); + var matched = availablePlugins.FirstOrDefault(plugin => + string.Equals(plugin.Name, schedule.ProviderId, StringComparison.OrdinalIgnoreCase)); + + if (matched is not null) + { + _logger.LogInformation( + "Connector plugin {PluginName} ({ProviderId}) is available. Execution hooks will be added in subsequent tasks.", + matched.Name, + schedule.ProviderId); + } + else + { + _logger.LogInformation("No legacy connector plugin registered for provider {ProviderId}; falling back to DI-managed connectors.", schedule.ProviderId); + } + + var connectors = scope.ServiceProvider.GetServices<IVexConnector>(); + var connector = connectors.FirstOrDefault(c => string.Equals(c.Id, schedule.ProviderId, StringComparison.OrdinalIgnoreCase)); + + if (connector is null) + { + _logger.LogWarning("No IVexConnector implementation registered for provider {ProviderId}; skipping run.", schedule.ProviderId); + return; + } + + await ExecuteConnectorAsync(scope.ServiceProvider, connector, schedule.Settings, cancellationToken).ConfigureAwait(false); + } + + private async Task ExecuteConnectorAsync(IServiceProvider scopeProvider, IVexConnector connector, VexConnectorSettings settings, CancellationToken cancellationToken) + { + var effectiveSettings = settings ?? VexConnectorSettings.Empty; + var rawStore = scopeProvider.GetRequiredService<IVexRawStore>(); + var providerStore = scopeProvider.GetRequiredService<IVexProviderStore>(); + var stateRepository = scopeProvider.GetRequiredService<IVexConnectorStateRepository>(); + var normalizerRouter = scopeProvider.GetRequiredService<IVexNormalizerRouter>(); + var signatureVerifier = scopeProvider.GetRequiredService<IVexSignatureVerifier>(); + var sessionProvider = scopeProvider.GetService<IVexMongoSessionProvider>(); + IClientSessionHandle? session = null; + if (sessionProvider is not null) + { + session = await sessionProvider.StartSessionAsync(cancellationToken).ConfigureAwait(false); + } + + var descriptor = connector switch + { + VexConnectorBase baseConnector => baseConnector.Descriptor, + _ => new VexConnectorDescriptor(connector.Id, VexProviderKind.Vendor, connector.Id) + }; + + var provider = await providerStore.FindAsync(descriptor.Id, cancellationToken, session).ConfigureAwait(false) + ?? new VexProvider(descriptor.Id, descriptor.DisplayName, descriptor.Kind); + + await providerStore.SaveAsync(provider, cancellationToken, session).ConfigureAwait(false); + + var stateBeforeRun = await stateRepository.GetAsync(descriptor.Id, cancellationToken, session).ConfigureAwait(false); + var now = _timeProvider.GetUtcNow(); + + if (stateBeforeRun?.NextEligibleRun is { } nextEligible && nextEligible > now) + { + _logger.LogInformation( + "Connector {ConnectorId} is in backoff until {NextEligible:O}; skipping run.", + connector.Id, + nextEligible); + return; + } + + await connector.ValidateAsync(effectiveSettings, cancellationToken).ConfigureAwait(false); + + var verifyingSink = new VerifyingVexRawDocumentSink(rawStore, signatureVerifier); + + var context = new VexConnectorContext( + Since: stateBeforeRun?.LastUpdated, + Settings: effectiveSettings, + RawSink: verifyingSink, + SignatureVerifier: signatureVerifier, + Normalizers: normalizerRouter, + Services: scopeProvider, + ResumeTokens: stateBeforeRun?.ResumeTokens ?? ImmutableDictionary<string, string>.Empty); + + var documentCount = 0; + + try + { + await foreach (var document in connector.FetchAsync(context, cancellationToken).ConfigureAwait(false)) + { + documentCount++; + } + + _logger.LogInformation( + "Connector {ConnectorId} persisted {DocumentCount} raw document(s) this run.", + connector.Id, + documentCount); + + await UpdateSuccessStateAsync(stateRepository, descriptor.Id, _timeProvider.GetUtcNow(), cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) + { + await UpdateFailureStateAsync(stateRepository, descriptor.Id, _timeProvider.GetUtcNow(), ex, cancellationToken).ConfigureAwait(false); + throw; + } + } + + private async Task UpdateSuccessStateAsync( + IVexConnectorStateRepository stateRepository, + string connectorId, + DateTimeOffset completedAt, + CancellationToken cancellationToken) + { + var current = await stateRepository.GetAsync(connectorId, cancellationToken).ConfigureAwait(false) + ?? new VexConnectorState(connectorId, null, ImmutableArray<string>.Empty); + + var updated = current with + { + LastSuccessAt = completedAt, + FailureCount = 0, + NextEligibleRun = null, + LastFailureReason = null + }; + + await stateRepository.SaveAsync(updated, cancellationToken).ConfigureAwait(false); + } + + private async Task UpdateFailureStateAsync( + IVexConnectorStateRepository stateRepository, + string connectorId, + DateTimeOffset failureTime, + Exception exception, + CancellationToken cancellationToken) + { + var current = await stateRepository.GetAsync(connectorId, cancellationToken).ConfigureAwait(false) + ?? new VexConnectorState(connectorId, null, ImmutableArray<string>.Empty); + + var failureCount = current.FailureCount + 1; + var delay = CalculateDelayWithJitter(failureCount); + var nextEligible = failureTime + delay; + + if (failureCount >= _retryOptions.FailureThreshold) + { + var quarantineUntil = failureTime + _retryOptions.QuarantineDuration; + if (quarantineUntil > nextEligible) + { + nextEligible = quarantineUntil; + } + } + + var retryCap = failureTime + _retryOptions.RetryCap; + if (nextEligible > retryCap) + { + nextEligible = retryCap; + } + + if (nextEligible < failureTime) + { + nextEligible = failureTime; + } + + var updated = current with + { + FailureCount = failureCount, + NextEligibleRun = nextEligible, + LastFailureReason = Truncate(exception.Message, 512) + }; + + await stateRepository.SaveAsync(updated, cancellationToken).ConfigureAwait(false); + + _logger.LogWarning( + exception, + "Connector {ConnectorId} failed (attempt {Attempt}). Next eligible run at {NextEligible:O}.", + connectorId, + failureCount, + nextEligible); + } + + private TimeSpan CalculateDelayWithJitter(int failureCount) + { + var exponent = Math.Max(0, failureCount - 1); + var factor = Math.Pow(2, exponent); + var baselineTicks = (long)Math.Min(_retryOptions.BaseDelay.Ticks * factor, _retryOptions.MaxDelay.Ticks); + + if (_retryOptions.JitterRatio <= 0) + { + return TimeSpan.FromTicks(baselineTicks); + } + + var minFactor = 1.0 - _retryOptions.JitterRatio; + var maxFactor = 1.0 + _retryOptions.JitterRatio; + Span<byte> buffer = stackalloc byte[8]; + RandomNumberGenerator.Fill(buffer); + var sample = BitConverter.ToUInt64(buffer) / (double)ulong.MaxValue; + var jitterFactor = minFactor + (maxFactor - minFactor) * sample; + var jitteredTicks = (long)Math.Round(baselineTicks * jitterFactor); + + if (jitteredTicks < _retryOptions.BaseDelay.Ticks) + { + jitteredTicks = _retryOptions.BaseDelay.Ticks; + } + + if (jitteredTicks > _retryOptions.MaxDelay.Ticks) + { + jitteredTicks = _retryOptions.MaxDelay.Ticks; + } + + return TimeSpan.FromTicks(jitteredTicks); + } + + private static string Truncate(string? value, int maxLength) + { + if (string.IsNullOrEmpty(value)) + { + return string.Empty; + } + + return value.Length <= maxLength + ? value + : value[..maxLength]; + } +} diff --git a/src/StellaOps.Excititor.Worker/Scheduling/IVexConsensusRefreshScheduler.cs b/src/Excititor/StellaOps.Excititor.Worker/Scheduling/IVexConsensusRefreshScheduler.cs similarity index 96% rename from src/StellaOps.Excititor.Worker/Scheduling/IVexConsensusRefreshScheduler.cs rename to src/Excititor/StellaOps.Excititor.Worker/Scheduling/IVexConsensusRefreshScheduler.cs index 0c16298c..b12a315d 100644 --- a/src/StellaOps.Excititor.Worker/Scheduling/IVexConsensusRefreshScheduler.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Scheduling/IVexConsensusRefreshScheduler.cs @@ -1,6 +1,6 @@ -namespace StellaOps.Excititor.Worker.Scheduling; - -public interface IVexConsensusRefreshScheduler -{ - void ScheduleRefresh(string vulnerabilityId, string productKey); -} +namespace StellaOps.Excititor.Worker.Scheduling; + +public interface IVexConsensusRefreshScheduler +{ + void ScheduleRefresh(string vulnerabilityId, string productKey); +} diff --git a/src/StellaOps.Excititor.Worker/Scheduling/IVexProviderRunner.cs b/src/Excititor/StellaOps.Excititor.Worker/Scheduling/IVexProviderRunner.cs similarity index 100% rename from src/StellaOps.Excititor.Worker/Scheduling/IVexProviderRunner.cs rename to src/Excititor/StellaOps.Excititor.Worker/Scheduling/IVexProviderRunner.cs diff --git a/src/StellaOps.Excititor.Worker/Scheduling/VexConsensusRefreshService.cs b/src/Excititor/StellaOps.Excititor.Worker/Scheduling/VexConsensusRefreshService.cs similarity index 97% rename from src/StellaOps.Excititor.Worker/Scheduling/VexConsensusRefreshService.cs rename to src/Excititor/StellaOps.Excititor.Worker/Scheduling/VexConsensusRefreshService.cs index 4cda1385..b48db610 100644 --- a/src/StellaOps.Excititor.Worker/Scheduling/VexConsensusRefreshService.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Scheduling/VexConsensusRefreshService.cs @@ -1,622 +1,622 @@ -using System.Collections.Concurrent; -using System.Collections.Immutable; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Threading.Channels; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Policy; -using StellaOps.Excititor.Storage.Mongo; -using StellaOps.Excititor.Worker.Options; - -namespace StellaOps.Excititor.Worker.Scheduling; - -internal sealed class VexConsensusRefreshService : BackgroundService, IVexConsensusRefreshScheduler -{ - private readonly IServiceScopeFactory _scopeFactory; - private readonly ILogger<VexConsensusRefreshService> _logger; - private readonly TimeProvider _timeProvider; - private readonly Channel<RefreshRequest> _refreshRequests; - private readonly ConcurrentDictionary<string, byte> _scheduledKeys = new(StringComparer.Ordinal); - private readonly IDisposable? _optionsSubscription; - private RefreshState _refreshState; - - public VexConsensusRefreshService( - IServiceScopeFactory scopeFactory, - IOptionsMonitor<VexWorkerOptions> optionsMonitor, - ILogger<VexConsensusRefreshService> logger, - TimeProvider timeProvider) - { - _scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - _refreshRequests = Channel.CreateUnbounded<RefreshRequest>(new UnboundedChannelOptions - { - AllowSynchronousContinuations = false, - SingleReader = true, - SingleWriter = false, - }); - - if (optionsMonitor is null) - { - throw new ArgumentNullException(nameof(optionsMonitor)); - } - - var options = optionsMonitor.CurrentValue; - _refreshState = RefreshState.FromOptions(options.Refresh); - _optionsSubscription = optionsMonitor.OnChange(o => - { - var state = RefreshState.FromOptions((o?.Refresh) ?? new VexWorkerRefreshOptions()); - Volatile.Write(ref _refreshState, state); - _logger.LogInformation( - "Consensus refresh options updated: enabled={Enabled}, interval={Interval}, ttl={Ttl}, batch={Batch}", - state.Enabled, - state.ScanInterval, - state.ConsensusTtl, - state.ScanBatchSize); - }); - } - - public override void Dispose() - { - _optionsSubscription?.Dispose(); - base.Dispose(); - } - - public void ScheduleRefresh(string vulnerabilityId, string productKey) - { - if (string.IsNullOrWhiteSpace(vulnerabilityId) || string.IsNullOrWhiteSpace(productKey)) - { - return; - } - - var key = BuildKey(vulnerabilityId, productKey); - if (!_scheduledKeys.TryAdd(key, 0)) - { - return; - } - - var request = new RefreshRequest(vulnerabilityId.Trim(), productKey.Trim()); - if (!_refreshRequests.Writer.TryWrite(request)) - { - _scheduledKeys.TryRemove(key, out _); - } - } - - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - var queueTask = ProcessQueueAsync(stoppingToken); - - try - { - while (!stoppingToken.IsCancellationRequested) - { - var options = CurrentOptions; - - try - { - await ProcessEligibleHoldsAsync(options, stoppingToken).ConfigureAwait(false); - if (options.Enabled) - { - await ProcessTtlRefreshAsync(options, stoppingToken).ConfigureAwait(false); - } - else - { - _logger.LogDebug("Consensus refresh disabled; skipping TTL sweep."); - } - } - catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) - { - break; - } - catch (Exception ex) - { - _logger.LogError(ex, "Consensus refresh loop failed."); - } - - try - { - await Task.Delay(options.ScanInterval, stoppingToken).ConfigureAwait(false); - } - catch (OperationCanceledException) - { - break; - } - } - } - finally - { - _refreshRequests.Writer.TryComplete(); - try - { - await queueTask.ConfigureAwait(false); - } - catch (OperationCanceledException) - { - } - } - } - - private RefreshState CurrentOptions => Volatile.Read(ref _refreshState); - - private async Task ProcessQueueAsync(CancellationToken cancellationToken) - { - try - { - while (await _refreshRequests.Reader.WaitToReadAsync(cancellationToken).ConfigureAwait(false)) - { - while (_refreshRequests.Reader.TryRead(out var request)) - { - var key = BuildKey(request.VulnerabilityId, request.ProductKey); - try - { - await ProcessCandidateAsync(request.VulnerabilityId, request.ProductKey, existingConsensus: null, CurrentOptions, cancellationToken).ConfigureAwait(false); - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - return; - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to refresh consensus for {VulnerabilityId}/{ProductKey} from queue.", request.VulnerabilityId, request.ProductKey); - } - finally - { - _scheduledKeys.TryRemove(key, out _); - } - } - } - } - catch (OperationCanceledException) - { - } - } - - private async Task ProcessEligibleHoldsAsync(RefreshState options, CancellationToken cancellationToken) - { - using var scope = _scopeFactory.CreateScope(); - var holdStore = scope.ServiceProvider.GetRequiredService<IVexConsensusHoldStore>(); - var consensusStore = scope.ServiceProvider.GetRequiredService<IVexConsensusStore>(); - - var now = _timeProvider.GetUtcNow(); - await foreach (var hold in holdStore.FindEligibleAsync(now, options.ScanBatchSize, cancellationToken).ConfigureAwait(false)) - { - var key = BuildKey(hold.VulnerabilityId, hold.ProductKey); - if (!_scheduledKeys.TryAdd(key, 0)) - { - continue; - } - - try - { - await consensusStore.SaveAsync(hold.Candidate with { }, cancellationToken).ConfigureAwait(false); - await holdStore.RemoveAsync(hold.VulnerabilityId, hold.ProductKey, cancellationToken).ConfigureAwait(false); - _logger.LogInformation( - "Promoted consensus hold for {VulnerabilityId}/{ProductKey}; status={Status}, reason={Reason}", - hold.VulnerabilityId, - hold.ProductKey, - hold.Candidate.Status, - hold.Reason); - } - catch (Exception ex) when (!cancellationToken.IsCancellationRequested) - { - _logger.LogError( - ex, - "Failed to promote consensus hold for {VulnerabilityId}/{ProductKey}.", - hold.VulnerabilityId, - hold.ProductKey); - } - finally - { - _scheduledKeys.TryRemove(key, out _); - } - } - } - - private async Task ProcessTtlRefreshAsync(RefreshState options, CancellationToken cancellationToken) - { - var now = _timeProvider.GetUtcNow(); - var cutoff = now - options.ConsensusTtl; - - using var scope = _scopeFactory.CreateScope(); - var consensusStore = scope.ServiceProvider.GetRequiredService<IVexConsensusStore>(); - - await foreach (var consensus in consensusStore.FindCalculatedBeforeAsync(cutoff, options.ScanBatchSize, cancellationToken).ConfigureAwait(false)) - { - var key = BuildKey(consensus.VulnerabilityId, consensus.Product.Key); - if (!_scheduledKeys.TryAdd(key, 0)) - { - continue; - } - - try - { - await ProcessCandidateAsync(consensus.VulnerabilityId, consensus.Product.Key, consensus, options, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (!cancellationToken.IsCancellationRequested) - { - _logger.LogError( - ex, - "Failed to refresh consensus for {VulnerabilityId}/{ProductKey} during TTL sweep.", - consensus.VulnerabilityId, - consensus.Product.Key); - } - finally - { - _scheduledKeys.TryRemove(key, out _); - } - } - } - - private async Task ProcessCandidateAsync( - string vulnerabilityId, - string productKey, - VexConsensus? existingConsensus, - RefreshState options, - CancellationToken cancellationToken) - { - using var scope = _scopeFactory.CreateScope(); - var consensusStore = scope.ServiceProvider.GetRequiredService<IVexConsensusStore>(); - var holdStore = scope.ServiceProvider.GetRequiredService<IVexConsensusHoldStore>(); - var claimStore = scope.ServiceProvider.GetRequiredService<IVexClaimStore>(); - var providerStore = scope.ServiceProvider.GetRequiredService<IVexProviderStore>(); - var policyProvider = scope.ServiceProvider.GetRequiredService<IVexPolicyProvider>(); - - existingConsensus ??= await consensusStore.FindAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); - - var claims = await claimStore.FindAsync(vulnerabilityId, productKey, since: null, cancellationToken).ConfigureAwait(false); - if (claims.Count == 0) - { - _logger.LogDebug("No claims found for {VulnerabilityId}/{ProductKey}; skipping consensus refresh.", vulnerabilityId, productKey); - await holdStore.RemoveAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); - return; - } - - var claimList = claims as IReadOnlyList<VexClaim> ?? claims.ToList(); - - var snapshot = policyProvider.GetSnapshot(); - var providerCache = new Dictionary<string, VexProvider>(StringComparer.Ordinal); - var providers = await LoadProvidersAsync(claimList, providerStore, providerCache, cancellationToken).ConfigureAwait(false); - var product = ResolveProduct(claimList, productKey); - var calculatedAt = _timeProvider.GetUtcNow(); - - var resolver = new VexConsensusResolver(snapshot.ConsensusPolicy); - var request = new VexConsensusRequest( - vulnerabilityId, - product, - claimList.ToArray(), - providers, - calculatedAt, - snapshot.ConsensusOptions.WeightCeiling, - AggregateSignals(claimList), - snapshot.RevisionId, - snapshot.Digest); - - var resolution = resolver.Resolve(request); - var candidate = NormalizePolicyMetadata(resolution.Consensus, snapshot); - - await ApplyConsensusAsync( - candidate, - existingConsensus, - holdStore, - consensusStore, - options.Damper, - options, - cancellationToken).ConfigureAwait(false); - } - - private async Task ApplyConsensusAsync( - VexConsensus candidate, - VexConsensus? existing, - IVexConsensusHoldStore holdStore, - IVexConsensusStore consensusStore, - DamperState damper, - RefreshState options, - CancellationToken cancellationToken) - { - var vulnerabilityId = candidate.VulnerabilityId; - var productKey = candidate.Product.Key; - - var componentChanged = HasComponentChange(existing, candidate); - var statusChanged = existing is not null && existing.Status != candidate.Status; - - if (existing is null) - { - await consensusStore.SaveAsync(candidate, cancellationToken).ConfigureAwait(false); - await holdStore.RemoveAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); - _logger.LogInformation("Stored initial consensus for {VulnerabilityId}/{ProductKey} with status {Status}.", vulnerabilityId, productKey, candidate.Status); - return; - } - - TimeSpan duration = TimeSpan.Zero; - if (statusChanged) - { - if (componentChanged) - { - duration = TimeSpan.Zero; - } - else - { - var mappedStatus = MapConsensusStatus(candidate.Status); - var supportingWeight = mappedStatus is null - ? 0d - : candidate.Sources - .Where(source => source.Status == mappedStatus.Value) - .Sum(source => source.Weight); - duration = damper.ResolveDuration(supportingWeight); - } - } - - var requestedAt = _timeProvider.GetUtcNow(); - - if (statusChanged && duration > TimeSpan.Zero) - { - var eligibleAt = requestedAt + duration; - var reason = componentChanged ? "component_change" : "status_change"; - var newHold = new VexConsensusHold(vulnerabilityId, productKey, candidate, requestedAt, eligibleAt, reason); - var existingHold = await holdStore.FindAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); - - if (existingHold is null || existingHold.Candidate != candidate || existingHold.EligibleAt != newHold.EligibleAt) - { - await holdStore.SaveAsync(newHold, cancellationToken).ConfigureAwait(false); - _logger.LogInformation( - "Deferred consensus update for {VulnerabilityId}/{ProductKey} until {EligibleAt:O}; status {Status} pending (reason={Reason}).", - vulnerabilityId, - productKey, - eligibleAt, - candidate.Status, - reason); - } - return; - } - - await consensusStore.SaveAsync(candidate, cancellationToken).ConfigureAwait(false); - await holdStore.RemoveAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); - _logger.LogInformation( - "Updated consensus for {VulnerabilityId}/{ProductKey}; status={Status}, componentChange={ComponentChanged}.", - vulnerabilityId, - productKey, - candidate.Status, - componentChanged); - } - - private static bool HasComponentChange(VexConsensus? existing, VexConsensus candidate) - { - if (existing is null) - { - return false; - } - - var previous = existing.Product.ComponentIdentifiers; - var current = candidate.Product.ComponentIdentifiers; - - if (previous.IsDefaultOrEmpty && current.IsDefaultOrEmpty) - { - return false; - } - - if (previous.Length != current.Length) - { - return true; - } - - for (var i = 0; i < previous.Length; i++) - { - if (!string.Equals(previous[i], current[i], StringComparison.Ordinal)) - { - return true; - } - } - - return false; - } - - private static VexConsensus NormalizePolicyMetadata(VexConsensus consensus, VexPolicySnapshot snapshot) - { - if (string.Equals(consensus.PolicyVersion, snapshot.Version, StringComparison.Ordinal) && - string.Equals(consensus.PolicyRevisionId, snapshot.RevisionId, StringComparison.Ordinal) && - string.Equals(consensus.PolicyDigest, snapshot.Digest, StringComparison.Ordinal)) - { - return consensus; - } - - return new VexConsensus( - consensus.VulnerabilityId, - consensus.Product, - consensus.Status, - consensus.CalculatedAt, - consensus.Sources, - consensus.Conflicts, - consensus.Signals, - snapshot.Version, - consensus.Summary, - snapshot.RevisionId, - snapshot.Digest); - } - - private static VexClaimStatus? MapConsensusStatus(VexConsensusStatus status) - => status switch - { - VexConsensusStatus.Affected => VexClaimStatus.Affected, - VexConsensusStatus.NotAffected => VexClaimStatus.NotAffected, - VexConsensusStatus.Fixed => VexClaimStatus.Fixed, - _ => null, - }; - - private static string BuildKey(string vulnerabilityId, string productKey) - => string.Create( - vulnerabilityId.Length + productKey.Length + 1, - (vulnerabilityId, productKey), - static (span, tuple) => - { - tuple.vulnerabilityId.AsSpan().CopyTo(span); - span[tuple.vulnerabilityId.Length] = '|'; - tuple.productKey.AsSpan().CopyTo(span[(tuple.vulnerabilityId.Length + 1)..]); - }); - - private static VexProduct ResolveProduct(IReadOnlyList<VexClaim> claims, string productKey) - { - if (claims.Count > 0) - { - return claims[0].Product; - } - - var inferredPurl = productKey.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase) ? productKey : null; - return new VexProduct(productKey, name: null, version: null, purl: inferredPurl); - } - - private static VexSignalSnapshot? AggregateSignals(IReadOnlyList<VexClaim> claims) - { - if (claims.Count == 0) - { - return null; - } - - VexSeveritySignal? bestSeverity = null; - double? bestScore = null; - bool kevPresent = false; - bool kevTrue = false; - double? bestEpss = null; - - foreach (var claim in claims) - { - if (claim.Signals is null) - { - continue; - } - - var severity = claim.Signals.Severity; - if (severity is not null) - { - var score = severity.Score; - if (bestSeverity is null || - (score is not null && (bestScore is null || score.Value > bestScore.Value)) || - (score is null && bestScore is null && !string.IsNullOrWhiteSpace(severity.Label) && string.IsNullOrWhiteSpace(bestSeverity.Label))) - { - bestSeverity = severity; - bestScore = severity.Score; - } - } - - if (claim.Signals.Kev is { } kevValue) - { - kevPresent = true; - if (kevValue) - { - kevTrue = true; - } - } - - if (claim.Signals.Epss is { } epss) - { - if (bestEpss is null || epss > bestEpss.Value) - { - bestEpss = epss; - } - } - } - - if (bestSeverity is null && !kevPresent && bestEpss is null) - { - return null; - } - - bool? kev = kevTrue ? true : (kevPresent ? false : null); - return new VexSignalSnapshot(bestSeverity, kev, bestEpss); - } - - private static async Task<IReadOnlyDictionary<string, VexProvider>> LoadProvidersAsync( - IReadOnlyList<VexClaim> claims, - IVexProviderStore providerStore, - IDictionary<string, VexProvider> cache, - CancellationToken cancellationToken) - { - if (claims.Count == 0) - { - return ImmutableDictionary<string, VexProvider>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, VexProvider>(StringComparer.Ordinal); - var seen = new HashSet<string>(StringComparer.Ordinal); - - foreach (var providerId in claims.Select(claim => claim.ProviderId)) - { - if (!seen.Add(providerId)) - { - continue; - } - - if (cache.TryGetValue(providerId, out var cached)) - { - builder[providerId] = cached; - continue; - } - - var provider = await providerStore.FindAsync(providerId, cancellationToken).ConfigureAwait(false); - if (provider is not null) - { - cache[providerId] = provider; - builder[providerId] = provider; - } - } - - return builder.ToImmutable(); - } - - private readonly record struct RefreshRequest(string VulnerabilityId, string ProductKey); - - private sealed record RefreshState( - bool Enabled, - TimeSpan ScanInterval, - TimeSpan ConsensusTtl, - int ScanBatchSize, - DamperState Damper) - { - public static RefreshState FromOptions(VexWorkerRefreshOptions options) - { - var interval = options.ScanInterval > TimeSpan.Zero ? options.ScanInterval : TimeSpan.FromMinutes(10); - var ttl = options.ConsensusTtl > TimeSpan.Zero ? options.ConsensusTtl : TimeSpan.FromHours(2); - var batchSize = options.ScanBatchSize > 0 ? options.ScanBatchSize : 250; - var damper = DamperState.FromOptions(options.Damper); - return new RefreshState(options.Enabled, interval, ttl, batchSize, damper); - } - } - - private sealed record DamperState(TimeSpan Minimum, TimeSpan Maximum, TimeSpan DefaultDuration, ImmutableArray<DamperRuleState> Rules) - { - public static DamperState FromOptions(VexStabilityDamperOptions options) - { - var minimum = options.Minimum < TimeSpan.Zero ? TimeSpan.Zero : options.Minimum; - var maximum = options.Maximum > minimum ? options.Maximum : minimum + TimeSpan.FromHours(1); - var defaultDuration = options.ClampDuration(options.DefaultDuration); - var rules = options.Rules - .Select(rule => new DamperRuleState(Math.Max(0, rule.MinWeight), options.ClampDuration(rule.Duration))) - .OrderByDescending(rule => rule.MinWeight) - .ToImmutableArray(); - return new DamperState(minimum, maximum, defaultDuration, rules); - } - - public TimeSpan ResolveDuration(double weight) - { - if (double.IsNaN(weight) || double.IsInfinity(weight) || weight < 0) - { - return DefaultDuration; - } - - foreach (var rule in Rules) - { - if (weight >= rule.MinWeight) - { - return rule.Duration; - } - } - - return DefaultDuration; - } - } - - private sealed record DamperRuleState(double MinWeight, TimeSpan Duration); -} +using System.Collections.Concurrent; +using System.Collections.Immutable; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Threading.Channels; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Policy; +using StellaOps.Excititor.Storage.Mongo; +using StellaOps.Excititor.Worker.Options; + +namespace StellaOps.Excititor.Worker.Scheduling; + +internal sealed class VexConsensusRefreshService : BackgroundService, IVexConsensusRefreshScheduler +{ + private readonly IServiceScopeFactory _scopeFactory; + private readonly ILogger<VexConsensusRefreshService> _logger; + private readonly TimeProvider _timeProvider; + private readonly Channel<RefreshRequest> _refreshRequests; + private readonly ConcurrentDictionary<string, byte> _scheduledKeys = new(StringComparer.Ordinal); + private readonly IDisposable? _optionsSubscription; + private RefreshState _refreshState; + + public VexConsensusRefreshService( + IServiceScopeFactory scopeFactory, + IOptionsMonitor<VexWorkerOptions> optionsMonitor, + ILogger<VexConsensusRefreshService> logger, + TimeProvider timeProvider) + { + _scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _refreshRequests = Channel.CreateUnbounded<RefreshRequest>(new UnboundedChannelOptions + { + AllowSynchronousContinuations = false, + SingleReader = true, + SingleWriter = false, + }); + + if (optionsMonitor is null) + { + throw new ArgumentNullException(nameof(optionsMonitor)); + } + + var options = optionsMonitor.CurrentValue; + _refreshState = RefreshState.FromOptions(options.Refresh); + _optionsSubscription = optionsMonitor.OnChange(o => + { + var state = RefreshState.FromOptions((o?.Refresh) ?? new VexWorkerRefreshOptions()); + Volatile.Write(ref _refreshState, state); + _logger.LogInformation( + "Consensus refresh options updated: enabled={Enabled}, interval={Interval}, ttl={Ttl}, batch={Batch}", + state.Enabled, + state.ScanInterval, + state.ConsensusTtl, + state.ScanBatchSize); + }); + } + + public override void Dispose() + { + _optionsSubscription?.Dispose(); + base.Dispose(); + } + + public void ScheduleRefresh(string vulnerabilityId, string productKey) + { + if (string.IsNullOrWhiteSpace(vulnerabilityId) || string.IsNullOrWhiteSpace(productKey)) + { + return; + } + + var key = BuildKey(vulnerabilityId, productKey); + if (!_scheduledKeys.TryAdd(key, 0)) + { + return; + } + + var request = new RefreshRequest(vulnerabilityId.Trim(), productKey.Trim()); + if (!_refreshRequests.Writer.TryWrite(request)) + { + _scheduledKeys.TryRemove(key, out _); + } + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + var queueTask = ProcessQueueAsync(stoppingToken); + + try + { + while (!stoppingToken.IsCancellationRequested) + { + var options = CurrentOptions; + + try + { + await ProcessEligibleHoldsAsync(options, stoppingToken).ConfigureAwait(false); + if (options.Enabled) + { + await ProcessTtlRefreshAsync(options, stoppingToken).ConfigureAwait(false); + } + else + { + _logger.LogDebug("Consensus refresh disabled; skipping TTL sweep."); + } + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Consensus refresh loop failed."); + } + + try + { + await Task.Delay(options.ScanInterval, stoppingToken).ConfigureAwait(false); + } + catch (OperationCanceledException) + { + break; + } + } + } + finally + { + _refreshRequests.Writer.TryComplete(); + try + { + await queueTask.ConfigureAwait(false); + } + catch (OperationCanceledException) + { + } + } + } + + private RefreshState CurrentOptions => Volatile.Read(ref _refreshState); + + private async Task ProcessQueueAsync(CancellationToken cancellationToken) + { + try + { + while (await _refreshRequests.Reader.WaitToReadAsync(cancellationToken).ConfigureAwait(false)) + { + while (_refreshRequests.Reader.TryRead(out var request)) + { + var key = BuildKey(request.VulnerabilityId, request.ProductKey); + try + { + await ProcessCandidateAsync(request.VulnerabilityId, request.ProductKey, existingConsensus: null, CurrentOptions, cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + return; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to refresh consensus for {VulnerabilityId}/{ProductKey} from queue.", request.VulnerabilityId, request.ProductKey); + } + finally + { + _scheduledKeys.TryRemove(key, out _); + } + } + } + } + catch (OperationCanceledException) + { + } + } + + private async Task ProcessEligibleHoldsAsync(RefreshState options, CancellationToken cancellationToken) + { + using var scope = _scopeFactory.CreateScope(); + var holdStore = scope.ServiceProvider.GetRequiredService<IVexConsensusHoldStore>(); + var consensusStore = scope.ServiceProvider.GetRequiredService<IVexConsensusStore>(); + + var now = _timeProvider.GetUtcNow(); + await foreach (var hold in holdStore.FindEligibleAsync(now, options.ScanBatchSize, cancellationToken).ConfigureAwait(false)) + { + var key = BuildKey(hold.VulnerabilityId, hold.ProductKey); + if (!_scheduledKeys.TryAdd(key, 0)) + { + continue; + } + + try + { + await consensusStore.SaveAsync(hold.Candidate with { }, cancellationToken).ConfigureAwait(false); + await holdStore.RemoveAsync(hold.VulnerabilityId, hold.ProductKey, cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "Promoted consensus hold for {VulnerabilityId}/{ProductKey}; status={Status}, reason={Reason}", + hold.VulnerabilityId, + hold.ProductKey, + hold.Candidate.Status, + hold.Reason); + } + catch (Exception ex) when (!cancellationToken.IsCancellationRequested) + { + _logger.LogError( + ex, + "Failed to promote consensus hold for {VulnerabilityId}/{ProductKey}.", + hold.VulnerabilityId, + hold.ProductKey); + } + finally + { + _scheduledKeys.TryRemove(key, out _); + } + } + } + + private async Task ProcessTtlRefreshAsync(RefreshState options, CancellationToken cancellationToken) + { + var now = _timeProvider.GetUtcNow(); + var cutoff = now - options.ConsensusTtl; + + using var scope = _scopeFactory.CreateScope(); + var consensusStore = scope.ServiceProvider.GetRequiredService<IVexConsensusStore>(); + + await foreach (var consensus in consensusStore.FindCalculatedBeforeAsync(cutoff, options.ScanBatchSize, cancellationToken).ConfigureAwait(false)) + { + var key = BuildKey(consensus.VulnerabilityId, consensus.Product.Key); + if (!_scheduledKeys.TryAdd(key, 0)) + { + continue; + } + + try + { + await ProcessCandidateAsync(consensus.VulnerabilityId, consensus.Product.Key, consensus, options, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (!cancellationToken.IsCancellationRequested) + { + _logger.LogError( + ex, + "Failed to refresh consensus for {VulnerabilityId}/{ProductKey} during TTL sweep.", + consensus.VulnerabilityId, + consensus.Product.Key); + } + finally + { + _scheduledKeys.TryRemove(key, out _); + } + } + } + + private async Task ProcessCandidateAsync( + string vulnerabilityId, + string productKey, + VexConsensus? existingConsensus, + RefreshState options, + CancellationToken cancellationToken) + { + using var scope = _scopeFactory.CreateScope(); + var consensusStore = scope.ServiceProvider.GetRequiredService<IVexConsensusStore>(); + var holdStore = scope.ServiceProvider.GetRequiredService<IVexConsensusHoldStore>(); + var claimStore = scope.ServiceProvider.GetRequiredService<IVexClaimStore>(); + var providerStore = scope.ServiceProvider.GetRequiredService<IVexProviderStore>(); + var policyProvider = scope.ServiceProvider.GetRequiredService<IVexPolicyProvider>(); + + existingConsensus ??= await consensusStore.FindAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); + + var claims = await claimStore.FindAsync(vulnerabilityId, productKey, since: null, cancellationToken).ConfigureAwait(false); + if (claims.Count == 0) + { + _logger.LogDebug("No claims found for {VulnerabilityId}/{ProductKey}; skipping consensus refresh.", vulnerabilityId, productKey); + await holdStore.RemoveAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); + return; + } + + var claimList = claims as IReadOnlyList<VexClaim> ?? claims.ToList(); + + var snapshot = policyProvider.GetSnapshot(); + var providerCache = new Dictionary<string, VexProvider>(StringComparer.Ordinal); + var providers = await LoadProvidersAsync(claimList, providerStore, providerCache, cancellationToken).ConfigureAwait(false); + var product = ResolveProduct(claimList, productKey); + var calculatedAt = _timeProvider.GetUtcNow(); + + var resolver = new VexConsensusResolver(snapshot.ConsensusPolicy); + var request = new VexConsensusRequest( + vulnerabilityId, + product, + claimList.ToArray(), + providers, + calculatedAt, + snapshot.ConsensusOptions.WeightCeiling, + AggregateSignals(claimList), + snapshot.RevisionId, + snapshot.Digest); + + var resolution = resolver.Resolve(request); + var candidate = NormalizePolicyMetadata(resolution.Consensus, snapshot); + + await ApplyConsensusAsync( + candidate, + existingConsensus, + holdStore, + consensusStore, + options.Damper, + options, + cancellationToken).ConfigureAwait(false); + } + + private async Task ApplyConsensusAsync( + VexConsensus candidate, + VexConsensus? existing, + IVexConsensusHoldStore holdStore, + IVexConsensusStore consensusStore, + DamperState damper, + RefreshState options, + CancellationToken cancellationToken) + { + var vulnerabilityId = candidate.VulnerabilityId; + var productKey = candidate.Product.Key; + + var componentChanged = HasComponentChange(existing, candidate); + var statusChanged = existing is not null && existing.Status != candidate.Status; + + if (existing is null) + { + await consensusStore.SaveAsync(candidate, cancellationToken).ConfigureAwait(false); + await holdStore.RemoveAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); + _logger.LogInformation("Stored initial consensus for {VulnerabilityId}/{ProductKey} with status {Status}.", vulnerabilityId, productKey, candidate.Status); + return; + } + + TimeSpan duration = TimeSpan.Zero; + if (statusChanged) + { + if (componentChanged) + { + duration = TimeSpan.Zero; + } + else + { + var mappedStatus = MapConsensusStatus(candidate.Status); + var supportingWeight = mappedStatus is null + ? 0d + : candidate.Sources + .Where(source => source.Status == mappedStatus.Value) + .Sum(source => source.Weight); + duration = damper.ResolveDuration(supportingWeight); + } + } + + var requestedAt = _timeProvider.GetUtcNow(); + + if (statusChanged && duration > TimeSpan.Zero) + { + var eligibleAt = requestedAt + duration; + var reason = componentChanged ? "component_change" : "status_change"; + var newHold = new VexConsensusHold(vulnerabilityId, productKey, candidate, requestedAt, eligibleAt, reason); + var existingHold = await holdStore.FindAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); + + if (existingHold is null || existingHold.Candidate != candidate || existingHold.EligibleAt != newHold.EligibleAt) + { + await holdStore.SaveAsync(newHold, cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "Deferred consensus update for {VulnerabilityId}/{ProductKey} until {EligibleAt:O}; status {Status} pending (reason={Reason}).", + vulnerabilityId, + productKey, + eligibleAt, + candidate.Status, + reason); + } + return; + } + + await consensusStore.SaveAsync(candidate, cancellationToken).ConfigureAwait(false); + await holdStore.RemoveAsync(vulnerabilityId, productKey, cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "Updated consensus for {VulnerabilityId}/{ProductKey}; status={Status}, componentChange={ComponentChanged}.", + vulnerabilityId, + productKey, + candidate.Status, + componentChanged); + } + + private static bool HasComponentChange(VexConsensus? existing, VexConsensus candidate) + { + if (existing is null) + { + return false; + } + + var previous = existing.Product.ComponentIdentifiers; + var current = candidate.Product.ComponentIdentifiers; + + if (previous.IsDefaultOrEmpty && current.IsDefaultOrEmpty) + { + return false; + } + + if (previous.Length != current.Length) + { + return true; + } + + for (var i = 0; i < previous.Length; i++) + { + if (!string.Equals(previous[i], current[i], StringComparison.Ordinal)) + { + return true; + } + } + + return false; + } + + private static VexConsensus NormalizePolicyMetadata(VexConsensus consensus, VexPolicySnapshot snapshot) + { + if (string.Equals(consensus.PolicyVersion, snapshot.Version, StringComparison.Ordinal) && + string.Equals(consensus.PolicyRevisionId, snapshot.RevisionId, StringComparison.Ordinal) && + string.Equals(consensus.PolicyDigest, snapshot.Digest, StringComparison.Ordinal)) + { + return consensus; + } + + return new VexConsensus( + consensus.VulnerabilityId, + consensus.Product, + consensus.Status, + consensus.CalculatedAt, + consensus.Sources, + consensus.Conflicts, + consensus.Signals, + snapshot.Version, + consensus.Summary, + snapshot.RevisionId, + snapshot.Digest); + } + + private static VexClaimStatus? MapConsensusStatus(VexConsensusStatus status) + => status switch + { + VexConsensusStatus.Affected => VexClaimStatus.Affected, + VexConsensusStatus.NotAffected => VexClaimStatus.NotAffected, + VexConsensusStatus.Fixed => VexClaimStatus.Fixed, + _ => null, + }; + + private static string BuildKey(string vulnerabilityId, string productKey) + => string.Create( + vulnerabilityId.Length + productKey.Length + 1, + (vulnerabilityId, productKey), + static (span, tuple) => + { + tuple.vulnerabilityId.AsSpan().CopyTo(span); + span[tuple.vulnerabilityId.Length] = '|'; + tuple.productKey.AsSpan().CopyTo(span[(tuple.vulnerabilityId.Length + 1)..]); + }); + + private static VexProduct ResolveProduct(IReadOnlyList<VexClaim> claims, string productKey) + { + if (claims.Count > 0) + { + return claims[0].Product; + } + + var inferredPurl = productKey.StartsWith("pkg:", StringComparison.OrdinalIgnoreCase) ? productKey : null; + return new VexProduct(productKey, name: null, version: null, purl: inferredPurl); + } + + private static VexSignalSnapshot? AggregateSignals(IReadOnlyList<VexClaim> claims) + { + if (claims.Count == 0) + { + return null; + } + + VexSeveritySignal? bestSeverity = null; + double? bestScore = null; + bool kevPresent = false; + bool kevTrue = false; + double? bestEpss = null; + + foreach (var claim in claims) + { + if (claim.Signals is null) + { + continue; + } + + var severity = claim.Signals.Severity; + if (severity is not null) + { + var score = severity.Score; + if (bestSeverity is null || + (score is not null && (bestScore is null || score.Value > bestScore.Value)) || + (score is null && bestScore is null && !string.IsNullOrWhiteSpace(severity.Label) && string.IsNullOrWhiteSpace(bestSeverity.Label))) + { + bestSeverity = severity; + bestScore = severity.Score; + } + } + + if (claim.Signals.Kev is { } kevValue) + { + kevPresent = true; + if (kevValue) + { + kevTrue = true; + } + } + + if (claim.Signals.Epss is { } epss) + { + if (bestEpss is null || epss > bestEpss.Value) + { + bestEpss = epss; + } + } + } + + if (bestSeverity is null && !kevPresent && bestEpss is null) + { + return null; + } + + bool? kev = kevTrue ? true : (kevPresent ? false : null); + return new VexSignalSnapshot(bestSeverity, kev, bestEpss); + } + + private static async Task<IReadOnlyDictionary<string, VexProvider>> LoadProvidersAsync( + IReadOnlyList<VexClaim> claims, + IVexProviderStore providerStore, + IDictionary<string, VexProvider> cache, + CancellationToken cancellationToken) + { + if (claims.Count == 0) + { + return ImmutableDictionary<string, VexProvider>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, VexProvider>(StringComparer.Ordinal); + var seen = new HashSet<string>(StringComparer.Ordinal); + + foreach (var providerId in claims.Select(claim => claim.ProviderId)) + { + if (!seen.Add(providerId)) + { + continue; + } + + if (cache.TryGetValue(providerId, out var cached)) + { + builder[providerId] = cached; + continue; + } + + var provider = await providerStore.FindAsync(providerId, cancellationToken).ConfigureAwait(false); + if (provider is not null) + { + cache[providerId] = provider; + builder[providerId] = provider; + } + } + + return builder.ToImmutable(); + } + + private readonly record struct RefreshRequest(string VulnerabilityId, string ProductKey); + + private sealed record RefreshState( + bool Enabled, + TimeSpan ScanInterval, + TimeSpan ConsensusTtl, + int ScanBatchSize, + DamperState Damper) + { + public static RefreshState FromOptions(VexWorkerRefreshOptions options) + { + var interval = options.ScanInterval > TimeSpan.Zero ? options.ScanInterval : TimeSpan.FromMinutes(10); + var ttl = options.ConsensusTtl > TimeSpan.Zero ? options.ConsensusTtl : TimeSpan.FromHours(2); + var batchSize = options.ScanBatchSize > 0 ? options.ScanBatchSize : 250; + var damper = DamperState.FromOptions(options.Damper); + return new RefreshState(options.Enabled, interval, ttl, batchSize, damper); + } + } + + private sealed record DamperState(TimeSpan Minimum, TimeSpan Maximum, TimeSpan DefaultDuration, ImmutableArray<DamperRuleState> Rules) + { + public static DamperState FromOptions(VexStabilityDamperOptions options) + { + var minimum = options.Minimum < TimeSpan.Zero ? TimeSpan.Zero : options.Minimum; + var maximum = options.Maximum > minimum ? options.Maximum : minimum + TimeSpan.FromHours(1); + var defaultDuration = options.ClampDuration(options.DefaultDuration); + var rules = options.Rules + .Select(rule => new DamperRuleState(Math.Max(0, rule.MinWeight), options.ClampDuration(rule.Duration))) + .OrderByDescending(rule => rule.MinWeight) + .ToImmutableArray(); + return new DamperState(minimum, maximum, defaultDuration, rules); + } + + public TimeSpan ResolveDuration(double weight) + { + if (double.IsNaN(weight) || double.IsInfinity(weight) || weight < 0) + { + return DefaultDuration; + } + + foreach (var rule in Rules) + { + if (weight >= rule.MinWeight) + { + return rule.Duration; + } + } + + return DefaultDuration; + } + } + + private sealed record DamperRuleState(double MinWeight, TimeSpan Duration); +} diff --git a/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerHostedService.cs b/src/Excititor/StellaOps.Excititor.Worker/Scheduling/VexWorkerHostedService.cs similarity index 100% rename from src/StellaOps.Excititor.Worker/Scheduling/VexWorkerHostedService.cs rename to src/Excititor/StellaOps.Excititor.Worker/Scheduling/VexWorkerHostedService.cs diff --git a/src/StellaOps.Excititor.Worker/Scheduling/VexWorkerSchedule.cs b/src/Excititor/StellaOps.Excititor.Worker/Scheduling/VexWorkerSchedule.cs similarity index 100% rename from src/StellaOps.Excititor.Worker/Scheduling/VexWorkerSchedule.cs rename to src/Excititor/StellaOps.Excititor.Worker/Scheduling/VexWorkerSchedule.cs diff --git a/src/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs b/src/Excititor/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs similarity index 97% rename from src/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs rename to src/Excititor/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs index e7506bc2..a32651c7 100644 --- a/src/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Signature/VerifyingVexRawDocumentSink.cs @@ -1,69 +1,69 @@ -using System.Collections.Immutable; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Storage.Mongo; - -namespace StellaOps.Excititor.Worker.Signature; - -internal sealed class VerifyingVexRawDocumentSink : IVexRawDocumentSink -{ - private readonly IVexRawStore _inner; - private readonly IVexSignatureVerifier _signatureVerifier; - - public VerifyingVexRawDocumentSink(IVexRawStore inner, IVexSignatureVerifier signatureVerifier) - { - _inner = inner ?? throw new ArgumentNullException(nameof(inner)); - _signatureVerifier = signatureVerifier ?? throw new ArgumentNullException(nameof(signatureVerifier)); - } - - public async ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(document); - - var signatureMetadata = await _signatureVerifier.VerifyAsync(document, cancellationToken).ConfigureAwait(false); - var enrichedDocument = signatureMetadata is null - ? document - : document with { Metadata = EnrichMetadata(document.Metadata, signatureMetadata) }; - - await _inner.StoreAsync(enrichedDocument, cancellationToken).ConfigureAwait(false); - } - - private static ImmutableDictionary<string, string> EnrichMetadata( - ImmutableDictionary<string, string> metadata, - VexSignatureMetadata signature) - { - var builder = metadata is null - ? ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal) - : metadata.ToBuilder(); - - builder["signature.present"] = "true"; - builder["signature.verified"] = "true"; - builder["vex.signature.type"] = signature.Type; - - if (!string.IsNullOrWhiteSpace(signature.Subject)) - { - builder["vex.signature.subject"] = signature.Subject!; - } - - if (!string.IsNullOrWhiteSpace(signature.Issuer)) - { - builder["vex.signature.issuer"] = signature.Issuer!; - } - - if (!string.IsNullOrWhiteSpace(signature.KeyId)) - { - builder["vex.signature.keyId"] = signature.KeyId!; - } - - if (signature.VerifiedAt is not null) - { - builder["vex.signature.verifiedAt"] = signature.VerifiedAt.Value.ToString("O"); - } - - if (!string.IsNullOrWhiteSpace(signature.TransparencyLogReference)) - { - builder["vex.signature.transparencyLogReference"] = signature.TransparencyLogReference!; - } - - return builder.ToImmutable(); - } -} +using System.Collections.Immutable; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; + +namespace StellaOps.Excititor.Worker.Signature; + +internal sealed class VerifyingVexRawDocumentSink : IVexRawDocumentSink +{ + private readonly IVexRawStore _inner; + private readonly IVexSignatureVerifier _signatureVerifier; + + public VerifyingVexRawDocumentSink(IVexRawStore inner, IVexSignatureVerifier signatureVerifier) + { + _inner = inner ?? throw new ArgumentNullException(nameof(inner)); + _signatureVerifier = signatureVerifier ?? throw new ArgumentNullException(nameof(signatureVerifier)); + } + + public async ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(document); + + var signatureMetadata = await _signatureVerifier.VerifyAsync(document, cancellationToken).ConfigureAwait(false); + var enrichedDocument = signatureMetadata is null + ? document + : document with { Metadata = EnrichMetadata(document.Metadata, signatureMetadata) }; + + await _inner.StoreAsync(enrichedDocument, cancellationToken).ConfigureAwait(false); + } + + private static ImmutableDictionary<string, string> EnrichMetadata( + ImmutableDictionary<string, string> metadata, + VexSignatureMetadata signature) + { + var builder = metadata is null + ? ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal) + : metadata.ToBuilder(); + + builder["signature.present"] = "true"; + builder["signature.verified"] = "true"; + builder["vex.signature.type"] = signature.Type; + + if (!string.IsNullOrWhiteSpace(signature.Subject)) + { + builder["vex.signature.subject"] = signature.Subject!; + } + + if (!string.IsNullOrWhiteSpace(signature.Issuer)) + { + builder["vex.signature.issuer"] = signature.Issuer!; + } + + if (!string.IsNullOrWhiteSpace(signature.KeyId)) + { + builder["vex.signature.keyId"] = signature.KeyId!; + } + + if (signature.VerifiedAt is not null) + { + builder["vex.signature.verifiedAt"] = signature.VerifiedAt.Value.ToString("O"); + } + + if (!string.IsNullOrWhiteSpace(signature.TransparencyLogReference)) + { + builder["vex.signature.transparencyLogReference"] = signature.TransparencyLogReference!; + } + + return builder.ToImmutable(); + } +} diff --git a/src/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs b/src/Excititor/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs similarity index 97% rename from src/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs rename to src/Excititor/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs index 18cdd0f5..044afcfc 100644 --- a/src/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs +++ b/src/Excititor/StellaOps.Excititor.Worker/Signature/WorkerSignatureVerifier.cs @@ -1,364 +1,364 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Diagnostics.Metrics; -using System.Linq; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using Microsoft.Extensions.Logging; -using StellaOps.Aoc; -using StellaOps.Excititor.Attestation.Dsse; -using StellaOps.Excititor.Attestation.Models; -using StellaOps.Excititor.Attestation.Verification; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Core.Aoc; - -namespace StellaOps.Excititor.Worker.Signature; - -/// <summary> -/// Enforces checksum validation and records signature verification metadata. -/// </summary> -internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier -{ - private static readonly Meter Meter = new("StellaOps.Excititor.Worker", "1.0"); - private static readonly Counter<long> SignatureVerificationCounter = Meter.CreateCounter<long>( - "ingestion_signature_verified_total", - description: "Counts signature and checksum verification results for Excititor worker ingestion."); - - private readonly ILogger<WorkerSignatureVerifier> _logger; - private readonly IVexAttestationVerifier? _attestationVerifier; - private readonly TimeProvider _timeProvider; - - private static readonly JsonSerializerOptions EnvelopeSerializerOptions = new() - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - }; - - private static readonly JsonSerializerOptions StatementSerializerOptions = new() - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.Never, - Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, - }; - - public WorkerSignatureVerifier( - ILogger<WorkerSignatureVerifier> logger, - IVexAttestationVerifier? attestationVerifier = null, - TimeProvider? timeProvider = null) - { - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _attestationVerifier = attestationVerifier; - _timeProvider = timeProvider ?? TimeProvider.System; - } - - public async ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(document); - - var metadata = document.Metadata ?? ImmutableDictionary<string, string>.Empty; - - var expectedDigest = NormalizeDigest(document.Digest); - var computedDigest = ComputeDigest(document.Content.Span); - - if (!string.Equals(expectedDigest, computedDigest, StringComparison.OrdinalIgnoreCase)) - { - RecordVerification(document.ProviderId, metadata, "fail"); - _logger.LogError( - "Checksum mismatch for provider {ProviderId} (expected={ExpectedDigest}, computed={ComputedDigest}, uri={SourceUri})", - document.ProviderId, - expectedDigest, - computedDigest, - document.SourceUri); - - var violation = AocViolation.Create( - AocViolationCode.SignatureInvalid, - "/upstream/content_hash", - $"Content hash mismatch. Expected {expectedDigest}, computed {computedDigest}."); - - throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); - } - - VexSignatureMetadata? signatureMetadata = null; - if (document.Format == VexDocumentFormat.OciAttestation && _attestationVerifier is not null) - { - signatureMetadata = await VerifyAttestationAsync(document, metadata, cancellationToken).ConfigureAwait(false); - } - - signatureMetadata ??= ExtractSignatureMetadata(metadata); - var resultLabel = signatureMetadata is null ? "skipped" : "ok"; - RecordVerification(document.ProviderId, metadata, resultLabel); - - if (resultLabel == "skipped") - { - _logger.LogDebug( - "Signature verification skipped for provider {ProviderId} (no signature metadata).", - document.ProviderId); - } - else - { - _logger.LogInformation( - "Signature metadata recorded for provider {ProviderId} (type={SignatureType}, subject={Subject}, issuer={Issuer}).", - document.ProviderId, - signatureMetadata!.Type, - signatureMetadata.Subject ?? "<unknown>", - signatureMetadata.Issuer ?? "<unknown>"); - } - - return signatureMetadata; - } - - private async ValueTask<VexSignatureMetadata?> VerifyAttestationAsync( - VexRawDocument document, - ImmutableDictionary<string, string> metadata, - CancellationToken cancellationToken) - { - try - { - var envelopeJson = Encoding.UTF8.GetString(document.Content.Span); - var envelope = JsonSerializer.Deserialize<DsseEnvelope>(envelopeJson, EnvelopeSerializerOptions) - ?? throw new InvalidOperationException("DSSE envelope deserialized to null."); - - var payloadBytes = Convert.FromBase64String(envelope.Payload); - var statement = JsonSerializer.Deserialize<VexInTotoStatement>(payloadBytes, StatementSerializerOptions) - ?? throw new InvalidOperationException("DSSE statement deserialized to null."); - - if (statement.Subject is null || statement.Subject.Count == 0) - { - throw new InvalidOperationException("DSSE statement subject is missing."); - } - - var predicate = statement.Predicate ?? throw new InvalidOperationException("DSSE predicate is missing."); - var request = BuildAttestationRequest(statement, predicate); - var attestationMetadata = BuildAttestationMetadata(statement, envelope, metadata); - - var verificationRequest = new VexAttestationVerificationRequest( - request, - attestationMetadata, - envelopeJson); - - var verification = await _attestationVerifier! - .VerifyAsync(verificationRequest, cancellationToken) - .ConfigureAwait(false); - if (!verification.IsValid) - { - var diagnostics = string.Join(", ", verification.Diagnostics.Select(kvp => $"{kvp.Key}={kvp.Value}")); - _logger.LogError( - "Attestation verification failed for provider {ProviderId} (uri={SourceUri}) diagnostics={Diagnostics}", - document.ProviderId, - document.SourceUri, - diagnostics); - - var violation = AocViolation.Create( - AocViolationCode.SignatureInvalid, - "/upstream/signature", - "Attestation verification failed."); - - RecordVerification(document.ProviderId, metadata, "fail"); - throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); - } - - _logger.LogInformation( - "Attestation verification succeeded for provider {ProviderId} (predicate={PredicateType}, subject={Subject}).", - document.ProviderId, - attestationMetadata.PredicateType, - statement.Subject[0].Name ?? "<unknown>"); - - return BuildSignatureMetadata(statement, metadata, attestationMetadata, verification.Diagnostics); - } - catch (ExcititorAocGuardException) - { - throw; - } - catch (Exception ex) - { - _logger.LogError( - ex, - "Failed to verify attestation for provider {ProviderId} (uri={SourceUri})", - document.ProviderId, - document.SourceUri); - - var violation = AocViolation.Create( - AocViolationCode.SignatureInvalid, - "/upstream/signature", - $"Attestation verification encountered an error: {ex.Message}"); - - RecordVerification(document.ProviderId, metadata, "fail"); - throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); - } - } - - private VexAttestationRequest BuildAttestationRequest(VexInTotoStatement statement, VexAttestationPredicate predicate) - { - var subject = statement.Subject!.First(); - var exportId = predicate.ExportId ?? subject.Name ?? throw new InvalidOperationException("Attestation export ID missing."); - var querySignature = new VexQuerySignature(predicate.QuerySignature ?? throw new InvalidOperationException("Attestation query signature missing.")); - - if (string.IsNullOrWhiteSpace(predicate.ArtifactAlgorithm) || string.IsNullOrWhiteSpace(predicate.ArtifactDigest)) - { - throw new InvalidOperationException("Attestation artifact metadata is incomplete."); - } - - var artifact = new VexContentAddress(predicate.ArtifactAlgorithm, predicate.ArtifactDigest); - - var sourceProviders = predicate.SourceProviders?.ToImmutableArray() ?? ImmutableArray<string>.Empty; - var metadata = predicate.Metadata?.ToImmutableDictionary(StringComparer.Ordinal) ?? ImmutableDictionary<string, string>.Empty; - - return new VexAttestationRequest( - exportId, - querySignature, - artifact, - predicate.Format, - predicate.CreatedAt, - sourceProviders, - metadata); - } - - private VexAttestationMetadata BuildAttestationMetadata( - VexInTotoStatement statement, - DsseEnvelope envelope, - ImmutableDictionary<string, string> metadata) - { - VexRekorReference? rekor = null; - if (metadata.TryGetValue("vex.signature.transparencyLogReference", out var rekorValue) && !string.IsNullOrWhiteSpace(rekorValue)) - { - rekor = new VexRekorReference("0.1", rekorValue); - } - - DateTimeOffset signedAt; - if (metadata.TryGetValue("vex.signature.verifiedAt", out var signedAtRaw) - && DateTimeOffset.TryParse(signedAtRaw, out var parsedSignedAt)) - { - signedAt = parsedSignedAt; - } - else - { - signedAt = _timeProvider.GetUtcNow(); - } - - return new VexAttestationMetadata( - statement.PredicateType ?? "https://stella-ops.org/attestations/vex-export", - rekor, - VexDsseBuilder.ComputeEnvelopeDigest(envelope), - signedAt); - } - - private VexSignatureMetadata BuildSignatureMetadata( - VexInTotoStatement statement, - ImmutableDictionary<string, string> metadata, - VexAttestationMetadata attestationMetadata, - ImmutableDictionary<string, string> diagnostics) - { - metadata.TryGetValue("vex.signature.type", out var type); - metadata.TryGetValue("vex.provenance.cosign.subject", out var subject); - metadata.TryGetValue("vex.provenance.cosign.issuer", out var issuer); - metadata.TryGetValue("vex.signature.keyId", out var keyId); - metadata.TryGetValue("vex.signature.transparencyLogReference", out var transparencyReference); - - if (string.IsNullOrWhiteSpace(type)) - { - type = statement.PredicateType?.Contains("attest", StringComparison.OrdinalIgnoreCase) == true - ? "cosign" - : "attestation"; - } - - if (string.IsNullOrWhiteSpace(subject) && statement.Subject is { Count: > 0 }) - { - subject = statement.Subject[0].Name; - } - - if (string.IsNullOrWhiteSpace(transparencyReference) && attestationMetadata.Rekor is not null) - { - transparencyReference = attestationMetadata.Rekor.Location; - } - - if (string.IsNullOrWhiteSpace(issuer) - && diagnostics.TryGetValue("verification.issuer", out var diagnosticIssuer) - && !string.IsNullOrWhiteSpace(diagnosticIssuer)) - { - issuer = diagnosticIssuer; - } - - if (string.IsNullOrWhiteSpace(keyId) - && diagnostics.TryGetValue("verification.keyId", out var diagnosticKeyId) - && !string.IsNullOrWhiteSpace(diagnosticKeyId)) - { - keyId = diagnosticKeyId; - } - - var verifiedAt = attestationMetadata.SignedAt ?? _timeProvider.GetUtcNow(); - - return new VexSignatureMetadata( - type!, - subject, - issuer, - keyId, - verifiedAt, - transparencyReference); - } - - private static string NormalizeDigest(string digest) - { - if (string.IsNullOrWhiteSpace(digest)) - { - return string.Empty; - } - - return digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) - ? digest - : $"sha256:{digest}"; - } - - private static string ComputeDigest(ReadOnlySpan<byte> content) - { - Span<byte> buffer = stackalloc byte[32]; - if (!SHA256.TryHashData(content, buffer, out _)) - { - var hash = SHA256.HashData(content.ToArray()); - return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); - } - - return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); - } - - private static VexSignatureMetadata? ExtractSignatureMetadata(ImmutableDictionary<string, string> metadata) - { - if (!metadata.TryGetValue("vex.signature.type", out var type) || string.IsNullOrWhiteSpace(type)) - { - return null; - } - - metadata.TryGetValue("vex.signature.subject", out var subject); - metadata.TryGetValue("vex.signature.issuer", out var issuer); - metadata.TryGetValue("vex.signature.keyId", out var keyId); - metadata.TryGetValue("vex.signature.verifiedAt", out var verifiedAtRaw); - metadata.TryGetValue("vex.signature.transparencyLogReference", out var tlog); - - DateTimeOffset? verifiedAt = null; - if (!string.IsNullOrWhiteSpace(verifiedAtRaw) && DateTimeOffset.TryParse(verifiedAtRaw, out var parsed)) - { - verifiedAt = parsed; - } - - return new VexSignatureMetadata(type, subject, issuer, keyId, verifiedAt, tlog); - } - - private static void RecordVerification(string providerId, ImmutableDictionary<string, string> metadata, string result) - { - var tags = new List<KeyValuePair<string, object?>>(3) - { - new("source", providerId), - new("result", result), - }; - - if (!metadata.TryGetValue("tenant", out var tenant) || string.IsNullOrWhiteSpace(tenant)) - { - tenant = "tenant-default"; - } - - tags.Add(new KeyValuePair<string, object?>("tenant", tenant)); - - SignatureVerificationCounter.Add(1, tags.ToArray()); - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Diagnostics.Metrics; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using StellaOps.Aoc; +using StellaOps.Excititor.Attestation.Dsse; +using StellaOps.Excititor.Attestation.Models; +using StellaOps.Excititor.Attestation.Verification; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Aoc; + +namespace StellaOps.Excititor.Worker.Signature; + +/// <summary> +/// Enforces checksum validation and records signature verification metadata. +/// </summary> +internal sealed class WorkerSignatureVerifier : IVexSignatureVerifier +{ + private static readonly Meter Meter = new("StellaOps.Excititor.Worker", "1.0"); + private static readonly Counter<long> SignatureVerificationCounter = Meter.CreateCounter<long>( + "ingestion_signature_verified_total", + description: "Counts signature and checksum verification results for Excititor worker ingestion."); + + private readonly ILogger<WorkerSignatureVerifier> _logger; + private readonly IVexAttestationVerifier? _attestationVerifier; + private readonly TimeProvider _timeProvider; + + private static readonly JsonSerializerOptions EnvelopeSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + private static readonly JsonSerializerOptions StatementSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.Never, + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, + }; + + public WorkerSignatureVerifier( + ILogger<WorkerSignatureVerifier> logger, + IVexAttestationVerifier? attestationVerifier = null, + TimeProvider? timeProvider = null) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _attestationVerifier = attestationVerifier; + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public async ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(document); + + var metadata = document.Metadata ?? ImmutableDictionary<string, string>.Empty; + + var expectedDigest = NormalizeDigest(document.Digest); + var computedDigest = ComputeDigest(document.Content.Span); + + if (!string.Equals(expectedDigest, computedDigest, StringComparison.OrdinalIgnoreCase)) + { + RecordVerification(document.ProviderId, metadata, "fail"); + _logger.LogError( + "Checksum mismatch for provider {ProviderId} (expected={ExpectedDigest}, computed={ComputedDigest}, uri={SourceUri})", + document.ProviderId, + expectedDigest, + computedDigest, + document.SourceUri); + + var violation = AocViolation.Create( + AocViolationCode.SignatureInvalid, + "/upstream/content_hash", + $"Content hash mismatch. Expected {expectedDigest}, computed {computedDigest}."); + + throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); + } + + VexSignatureMetadata? signatureMetadata = null; + if (document.Format == VexDocumentFormat.OciAttestation && _attestationVerifier is not null) + { + signatureMetadata = await VerifyAttestationAsync(document, metadata, cancellationToken).ConfigureAwait(false); + } + + signatureMetadata ??= ExtractSignatureMetadata(metadata); + var resultLabel = signatureMetadata is null ? "skipped" : "ok"; + RecordVerification(document.ProviderId, metadata, resultLabel); + + if (resultLabel == "skipped") + { + _logger.LogDebug( + "Signature verification skipped for provider {ProviderId} (no signature metadata).", + document.ProviderId); + } + else + { + _logger.LogInformation( + "Signature metadata recorded for provider {ProviderId} (type={SignatureType}, subject={Subject}, issuer={Issuer}).", + document.ProviderId, + signatureMetadata!.Type, + signatureMetadata.Subject ?? "<unknown>", + signatureMetadata.Issuer ?? "<unknown>"); + } + + return signatureMetadata; + } + + private async ValueTask<VexSignatureMetadata?> VerifyAttestationAsync( + VexRawDocument document, + ImmutableDictionary<string, string> metadata, + CancellationToken cancellationToken) + { + try + { + var envelopeJson = Encoding.UTF8.GetString(document.Content.Span); + var envelope = JsonSerializer.Deserialize<DsseEnvelope>(envelopeJson, EnvelopeSerializerOptions) + ?? throw new InvalidOperationException("DSSE envelope deserialized to null."); + + var payloadBytes = Convert.FromBase64String(envelope.Payload); + var statement = JsonSerializer.Deserialize<VexInTotoStatement>(payloadBytes, StatementSerializerOptions) + ?? throw new InvalidOperationException("DSSE statement deserialized to null."); + + if (statement.Subject is null || statement.Subject.Count == 0) + { + throw new InvalidOperationException("DSSE statement subject is missing."); + } + + var predicate = statement.Predicate ?? throw new InvalidOperationException("DSSE predicate is missing."); + var request = BuildAttestationRequest(statement, predicate); + var attestationMetadata = BuildAttestationMetadata(statement, envelope, metadata); + + var verificationRequest = new VexAttestationVerificationRequest( + request, + attestationMetadata, + envelopeJson); + + var verification = await _attestationVerifier! + .VerifyAsync(verificationRequest, cancellationToken) + .ConfigureAwait(false); + if (!verification.IsValid) + { + var diagnostics = string.Join(", ", verification.Diagnostics.Select(kvp => $"{kvp.Key}={kvp.Value}")); + _logger.LogError( + "Attestation verification failed for provider {ProviderId} (uri={SourceUri}) diagnostics={Diagnostics}", + document.ProviderId, + document.SourceUri, + diagnostics); + + var violation = AocViolation.Create( + AocViolationCode.SignatureInvalid, + "/upstream/signature", + "Attestation verification failed."); + + RecordVerification(document.ProviderId, metadata, "fail"); + throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); + } + + _logger.LogInformation( + "Attestation verification succeeded for provider {ProviderId} (predicate={PredicateType}, subject={Subject}).", + document.ProviderId, + attestationMetadata.PredicateType, + statement.Subject[0].Name ?? "<unknown>"); + + return BuildSignatureMetadata(statement, metadata, attestationMetadata, verification.Diagnostics); + } + catch (ExcititorAocGuardException) + { + throw; + } + catch (Exception ex) + { + _logger.LogError( + ex, + "Failed to verify attestation for provider {ProviderId} (uri={SourceUri})", + document.ProviderId, + document.SourceUri); + + var violation = AocViolation.Create( + AocViolationCode.SignatureInvalid, + "/upstream/signature", + $"Attestation verification encountered an error: {ex.Message}"); + + RecordVerification(document.ProviderId, metadata, "fail"); + throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); + } + } + + private VexAttestationRequest BuildAttestationRequest(VexInTotoStatement statement, VexAttestationPredicate predicate) + { + var subject = statement.Subject!.First(); + var exportId = predicate.ExportId ?? subject.Name ?? throw new InvalidOperationException("Attestation export ID missing."); + var querySignature = new VexQuerySignature(predicate.QuerySignature ?? throw new InvalidOperationException("Attestation query signature missing.")); + + if (string.IsNullOrWhiteSpace(predicate.ArtifactAlgorithm) || string.IsNullOrWhiteSpace(predicate.ArtifactDigest)) + { + throw new InvalidOperationException("Attestation artifact metadata is incomplete."); + } + + var artifact = new VexContentAddress(predicate.ArtifactAlgorithm, predicate.ArtifactDigest); + + var sourceProviders = predicate.SourceProviders?.ToImmutableArray() ?? ImmutableArray<string>.Empty; + var metadata = predicate.Metadata?.ToImmutableDictionary(StringComparer.Ordinal) ?? ImmutableDictionary<string, string>.Empty; + + return new VexAttestationRequest( + exportId, + querySignature, + artifact, + predicate.Format, + predicate.CreatedAt, + sourceProviders, + metadata); + } + + private VexAttestationMetadata BuildAttestationMetadata( + VexInTotoStatement statement, + DsseEnvelope envelope, + ImmutableDictionary<string, string> metadata) + { + VexRekorReference? rekor = null; + if (metadata.TryGetValue("vex.signature.transparencyLogReference", out var rekorValue) && !string.IsNullOrWhiteSpace(rekorValue)) + { + rekor = new VexRekorReference("0.1", rekorValue); + } + + DateTimeOffset signedAt; + if (metadata.TryGetValue("vex.signature.verifiedAt", out var signedAtRaw) + && DateTimeOffset.TryParse(signedAtRaw, out var parsedSignedAt)) + { + signedAt = parsedSignedAt; + } + else + { + signedAt = _timeProvider.GetUtcNow(); + } + + return new VexAttestationMetadata( + statement.PredicateType ?? "https://stella-ops.org/attestations/vex-export", + rekor, + VexDsseBuilder.ComputeEnvelopeDigest(envelope), + signedAt); + } + + private VexSignatureMetadata BuildSignatureMetadata( + VexInTotoStatement statement, + ImmutableDictionary<string, string> metadata, + VexAttestationMetadata attestationMetadata, + ImmutableDictionary<string, string> diagnostics) + { + metadata.TryGetValue("vex.signature.type", out var type); + metadata.TryGetValue("vex.provenance.cosign.subject", out var subject); + metadata.TryGetValue("vex.provenance.cosign.issuer", out var issuer); + metadata.TryGetValue("vex.signature.keyId", out var keyId); + metadata.TryGetValue("vex.signature.transparencyLogReference", out var transparencyReference); + + if (string.IsNullOrWhiteSpace(type)) + { + type = statement.PredicateType?.Contains("attest", StringComparison.OrdinalIgnoreCase) == true + ? "cosign" + : "attestation"; + } + + if (string.IsNullOrWhiteSpace(subject) && statement.Subject is { Count: > 0 }) + { + subject = statement.Subject[0].Name; + } + + if (string.IsNullOrWhiteSpace(transparencyReference) && attestationMetadata.Rekor is not null) + { + transparencyReference = attestationMetadata.Rekor.Location; + } + + if (string.IsNullOrWhiteSpace(issuer) + && diagnostics.TryGetValue("verification.issuer", out var diagnosticIssuer) + && !string.IsNullOrWhiteSpace(diagnosticIssuer)) + { + issuer = diagnosticIssuer; + } + + if (string.IsNullOrWhiteSpace(keyId) + && diagnostics.TryGetValue("verification.keyId", out var diagnosticKeyId) + && !string.IsNullOrWhiteSpace(diagnosticKeyId)) + { + keyId = diagnosticKeyId; + } + + var verifiedAt = attestationMetadata.SignedAt ?? _timeProvider.GetUtcNow(); + + return new VexSignatureMetadata( + type!, + subject, + issuer, + keyId, + verifiedAt, + transparencyReference); + } + + private static string NormalizeDigest(string digest) + { + if (string.IsNullOrWhiteSpace(digest)) + { + return string.Empty; + } + + return digest.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase) + ? digest + : $"sha256:{digest}"; + } + + private static string ComputeDigest(ReadOnlySpan<byte> content) + { + Span<byte> buffer = stackalloc byte[32]; + if (!SHA256.TryHashData(content, buffer, out _)) + { + var hash = SHA256.HashData(content.ToArray()); + return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); + } + + return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); + } + + private static VexSignatureMetadata? ExtractSignatureMetadata(ImmutableDictionary<string, string> metadata) + { + if (!metadata.TryGetValue("vex.signature.type", out var type) || string.IsNullOrWhiteSpace(type)) + { + return null; + } + + metadata.TryGetValue("vex.signature.subject", out var subject); + metadata.TryGetValue("vex.signature.issuer", out var issuer); + metadata.TryGetValue("vex.signature.keyId", out var keyId); + metadata.TryGetValue("vex.signature.verifiedAt", out var verifiedAtRaw); + metadata.TryGetValue("vex.signature.transparencyLogReference", out var tlog); + + DateTimeOffset? verifiedAt = null; + if (!string.IsNullOrWhiteSpace(verifiedAtRaw) && DateTimeOffset.TryParse(verifiedAtRaw, out var parsed)) + { + verifiedAt = parsed; + } + + return new VexSignatureMetadata(type, subject, issuer, keyId, verifiedAt, tlog); + } + + private static void RecordVerification(string providerId, ImmutableDictionary<string, string> metadata, string result) + { + var tags = new List<KeyValuePair<string, object?>>(3) + { + new("source", providerId), + new("result", result), + }; + + if (!metadata.TryGetValue("tenant", out var tenant) || string.IsNullOrWhiteSpace(tenant)) + { + tenant = "tenant-default"; + } + + tags.Add(new KeyValuePair<string, object?>("tenant", tenant)); + + SignatureVerificationCounter.Add(1, tags.ToArray()); + } +} diff --git a/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj b/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj new file mode 100644 index 00000000..ac03aea8 --- /dev/null +++ b/src/Excititor/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj @@ -0,0 +1,25 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk.Worker"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Worker/TASKS.md b/src/Excititor/StellaOps.Excititor.Worker/TASKS.md similarity index 99% rename from src/StellaOps.Excititor.Worker/TASKS.md rename to src/Excititor/StellaOps.Excititor.Worker/TASKS.md index 0781fcbf..3083ac17 100644 --- a/src/StellaOps.Excititor.Worker/TASKS.md +++ b/src/Excititor/StellaOps.Excititor.Worker/TASKS.md @@ -1,19 +1,19 @@ -# TASKS — Epic 1: Aggregation-Only Contract -| ID | Status | Owner(s) | Depends on | Notes | -|---|---|---|---|---| -| EXCITITOR-WORKER-AOC-19-001 `Raw pipeline rewiring` | DONE (2025-10-31) | Excititor Worker Guild | EXCITITOR-CORE-AOC-19-001 | Update ingest pipelines to persist upstream documents directly into `vex_raw` via the new repository guard. Remove consensus/folding hooks and ensure retries respect append-only semantics. | -> 2025-10-31: Worker now runs in raw-only mode; `DefaultVexProviderRunner` no longer normalizes or schedules consensus refresh and logs document counts only. Tests updated to assert the normalizer is not invoked. -| EXCITITOR-WORKER-AOC-19-002 `Signature & checksum enforcement` | DONE (2025-10-28) | Excititor Worker Guild | EXCITITOR-WORKER-AOC-19-001 | Add signature verification + checksum computation before writes, capturing failure reasons mapped to `ERR_AOC_005`, with structured logs/metrics for verification results. | -> 2025-10-28: Resuming implementation to finish attestation metadata plumbing, wiring into runner, and tests (`WorkerSignatureVerifier`, `DefaultVexProviderRunner`). -> 2025-10-28: Attestation verification now enriches signature metadata & runner tests cover DSSE path; metrics unchanged. -> 2025-10-31: Worker wraps raw sink with checksum enforcement. Digest mismatches raise `ERR_AOC_005`, signature metadata is captured when present, and `ingestion_signature_verified_total` is emitted (`result=ok|fail|skipped`). -| EXCITITOR-WORKER-AOC-19-003 `Deterministic batching tests` | DONE (2025-10-28) | QA Guild | EXCITITOR-WORKER-AOC-19-001 | Extend worker integration tests to replay large VEX batches ensuring idempotent upserts, supersedes chaining, and guard enforcement across restart scenarios. | -> 2025-10-28: Added Mongo-backed integration suite validating large batch replay, guard-triggered failures, and restart idempotency (`DefaultVexProviderRunnerIntegrationTests`). Worker unit tests now exercise the verifying sink path, and `dotnet test` passes after attestation envelope fixes. - -## Orchestrator Dashboard - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-ORCH-32-001 `Worker SDK adoption` | TODO | Excititor Worker Guild | ORCH-SVC-32-005, WORKER-GO-32-001, WORKER-PY-32-001 | Integrate orchestrator worker SDK in Excititor ingestion jobs, emit heartbeats/progress/artifact hashes, and register source metadata. | -| EXCITITOR-ORCH-33-001 `Control compliance` | TODO | Excititor Worker Guild | EXCITITOR-ORCH-32-001, ORCH-SVC-33-001, ORCH-SVC-33-002 | Honor orchestrator pause/throttle/retry actions, classify error outputs, and persist restart checkpoints. | -| EXCITITOR-ORCH-34-001 `Backfill & circuit breaker` | TODO | Excititor Worker Guild | EXCITITOR-ORCH-33-001, ORCH-SVC-33-003, ORCH-SVC-34-001 | Implement orchestrator-driven backfills, apply circuit breaker reset rules, and ensure artifact dedupe alignment. | +# TASKS — Epic 1: Aggregation-Only Contract +| ID | Status | Owner(s) | Depends on | Notes | +|---|---|---|---|---| +| EXCITITOR-WORKER-AOC-19-001 `Raw pipeline rewiring` | DONE (2025-10-31) | Excititor Worker Guild | EXCITITOR-CORE-AOC-19-001 | Update ingest pipelines to persist upstream documents directly into `vex_raw` via the new repository guard. Remove consensus/folding hooks and ensure retries respect append-only semantics. | +> 2025-10-31: Worker now runs in raw-only mode; `DefaultVexProviderRunner` no longer normalizes or schedules consensus refresh and logs document counts only. Tests updated to assert the normalizer is not invoked. +| EXCITITOR-WORKER-AOC-19-002 `Signature & checksum enforcement` | DONE (2025-10-28) | Excititor Worker Guild | EXCITITOR-WORKER-AOC-19-001 | Add signature verification + checksum computation before writes, capturing failure reasons mapped to `ERR_AOC_005`, with structured logs/metrics for verification results. | +> 2025-10-28: Resuming implementation to finish attestation metadata plumbing, wiring into runner, and tests (`WorkerSignatureVerifier`, `DefaultVexProviderRunner`). +> 2025-10-28: Attestation verification now enriches signature metadata & runner tests cover DSSE path; metrics unchanged. +> 2025-10-31: Worker wraps raw sink with checksum enforcement. Digest mismatches raise `ERR_AOC_005`, signature metadata is captured when present, and `ingestion_signature_verified_total` is emitted (`result=ok|fail|skipped`). +| EXCITITOR-WORKER-AOC-19-003 `Deterministic batching tests` | DONE (2025-10-28) | QA Guild | EXCITITOR-WORKER-AOC-19-001 | Extend worker integration tests to replay large VEX batches ensuring idempotent upserts, supersedes chaining, and guard enforcement across restart scenarios. | +> 2025-10-28: Added Mongo-backed integration suite validating large batch replay, guard-triggered failures, and restart idempotency (`DefaultVexProviderRunnerIntegrationTests`). Worker unit tests now exercise the verifying sink path, and `dotnet test` passes after attestation envelope fixes. + +## Orchestrator Dashboard + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-ORCH-32-001 `Worker SDK adoption` | TODO | Excititor Worker Guild | ORCH-SVC-32-005, WORKER-GO-32-001, WORKER-PY-32-001 | Integrate orchestrator worker SDK in Excititor ingestion jobs, emit heartbeats/progress/artifact hashes, and register source metadata. | +| EXCITITOR-ORCH-33-001 `Control compliance` | TODO | Excititor Worker Guild | EXCITITOR-ORCH-32-001, ORCH-SVC-33-001, ORCH-SVC-33-002 | Honor orchestrator pause/throttle/retry actions, classify error outputs, and persist restart checkpoints. | +| EXCITITOR-ORCH-34-001 `Backfill & circuit breaker` | TODO | Excititor Worker Guild | EXCITITOR-ORCH-33-001, ORCH-SVC-33-003, ORCH-SVC-34-001 | Implement orchestrator-driven backfills, apply circuit breaker reset rules, and ensure artifact dedupe alignment. | diff --git a/src/Excititor/StellaOps.Excititor.sln b/src/Excititor/StellaOps.Excititor.sln new file mode 100644 index 00000000..0b34bc11 --- /dev/null +++ b/src/Excititor/StellaOps.Excititor.sln @@ -0,0 +1,705 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.WebService", "StellaOps.Excititor.WebService\StellaOps.Excititor.WebService.csproj", "{AF8F1262-FC95-49EB-B096-A028693DD606}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Core", "__Libraries\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj", "{87631154-82C3-43F6-8F41-46CB877AA16D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "..\Aoc\__Libraries\StellaOps.Aoc\StellaOps.Aoc.csproj", "{1A49D368-184D-4040-AD11-37A3F6BCD261}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels", "..\Concelier\__Libraries\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj", "{2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Storage.Mongo", "__Libraries\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj", "{5858415D-8AB4-4E45-B316-580879FD8339}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Export", "__Libraries\StellaOps.Excititor.Export\StellaOps.Excititor.Export.csproj", "{E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Policy", "__Libraries\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj", "{400690F2-466B-4DF0-B495-9015DBBAA046}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{5067124E-37E5-4BC4-B758-CAA96E274D8C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Attestation", "__Libraries\StellaOps.Excititor.Attestation\StellaOps.Excititor.Attestation.csproj", "{16E426BF-8697-4DB1-ABC5-5537CDE74D95}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.ArtifactStores.S3", "__Libraries\StellaOps.Excititor.ArtifactStores.S3\StellaOps.Excititor.ArtifactStores.S3.csproj", "{2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.RedHat.CSAF", "__Libraries\StellaOps.Excititor.Connectors.RedHat.CSAF\StellaOps.Excititor.Connectors.RedHat.CSAF.csproj", "{CC391919-15F5-43DE-8271-8043090B7D8D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Abstractions", "__Libraries\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj", "{BB45DABD-1709-40C3-92B5-29C7AFFF9645}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CSAF", "__Libraries\StellaOps.Excititor.Formats.CSAF\StellaOps.Excititor.Formats.CSAF.csproj", "{181B855F-FBD3-44B6-A679-15EC88E8625A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CycloneDX", "__Libraries\StellaOps.Excititor.Formats.CycloneDX\StellaOps.Excititor.Formats.CycloneDX.csproj", "{7E839AAE-99FF-4AFD-B986-520306AFA403}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.OpenVEX", "__Libraries\StellaOps.Excititor.Formats.OpenVEX\StellaOps.Excititor.Formats.OpenVEX.csproj", "{863DD74A-947C-431E-B661-9C2A46472CD0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Worker", "StellaOps.Excititor.Worker\StellaOps.Excititor.Worker.csproj", "{0CE1FE59-B0FB-423B-B55B-C8F31A67D868}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{598E8702-B9D9-45BE-9A33-004A93EE6E25}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{79056784-D88C-47C2-B49D-1A25D58FC03B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Cisco.CSAF", "__Libraries\StellaOps.Excititor.Connectors.Cisco.CSAF\StellaOps.Excititor.Connectors.Cisco.CSAF.csproj", "{C75036AF-D828-41D3-9322-F67828EF8FBB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.MSRC.CSAF", "__Libraries\StellaOps.Excititor.Connectors.MSRC.CSAF\StellaOps.Excititor.Connectors.MSRC.CSAF.csproj", "{643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest", "__Libraries\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj", "{50B53195-F0DD-4DCE-95A7-0949C13D706B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Oracle.CSAF", "__Libraries\StellaOps.Excititor.Connectors.Oracle.CSAF\StellaOps.Excititor.Connectors.Oracle.CSAF.csproj", "{D2CD82C4-0D40-4316-A83D-FCC5D715DE95}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub", "__Libraries\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj", "{E553CAFD-794B-437C-ABCC-C780DC1ADF3C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Ubuntu.CSAF", "__Libraries\StellaOps.Excititor.Connectors.Ubuntu.CSAF\StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj", "{E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.ArtifactStores.S3.Tests", "__Tests\StellaOps.Excititor.ArtifactStores.S3.Tests\StellaOps.Excititor.ArtifactStores.S3.Tests.csproj", "{111BEB1A-8664-4AA6-8275-7440F33E79C9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Attestation.Tests", "__Tests\StellaOps.Excititor.Attestation.Tests\StellaOps.Excititor.Attestation.Tests.csproj", "{26B663A0-404C-4D0C-9687-17079CDFFEBF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Cisco.CSAF.Tests", "__Tests\StellaOps.Excititor.Connectors.Cisco.CSAF.Tests\StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj", "{BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.MSRC.CSAF.Tests", "__Tests\StellaOps.Excititor.Connectors.MSRC.CSAF.Tests\StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj", "{86E49D28-9035-4EB4-8C7F-E3915C5A2046}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests", "__Tests\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.csproj", "{67990ECE-E2D4-4BC4-8F05-734E02379F23}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Oracle.CSAF.Tests", "__Tests\StellaOps.Excititor.Connectors.Oracle.CSAF.Tests\StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj", "{35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.RedHat.CSAF.Tests", "__Tests\StellaOps.Excititor.Connectors.RedHat.CSAF.Tests\StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj", "{EBC3B08D-11E7-4286-940F-27305028148E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests", "__Tests\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj", "{640E732E-01C7-4A7E-9AE1-35117B26AB1E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests", "__Tests\StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests\StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj", "{ADFC7CC7-D079-43A1-833C-7E3775184EB6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Core.Tests", "__Tests\StellaOps.Excititor.Core.Tests\StellaOps.Excititor.Core.Tests.csproj", "{152EC0B1-8312-40F7-AF96-16B8E6AABA52}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Export.Tests", "__Tests\StellaOps.Excititor.Export.Tests\StellaOps.Excititor.Export.Tests.csproj", "{1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CSAF.Tests", "__Tests\StellaOps.Excititor.Formats.CSAF.Tests\StellaOps.Excititor.Formats.CSAF.Tests.csproj", "{43BA0A53-6806-41BA-9C2B-FE781BBCE85B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CycloneDX.Tests", "__Tests\StellaOps.Excititor.Formats.CycloneDX.Tests\StellaOps.Excititor.Formats.CycloneDX.Tests.csproj", "{E93FE8CE-28A6-4C7E-96ED-D99406653FDC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.OpenVEX.Tests", "__Tests\StellaOps.Excititor.Formats.OpenVEX.Tests\StellaOps.Excititor.Formats.OpenVEX.Tests.csproj", "{E83FC97E-B88E-4BE5-89D1-12C01631F575}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Policy.Tests", "__Tests\StellaOps.Excititor.Policy.Tests\StellaOps.Excititor.Policy.Tests.csproj", "{832F539E-17FC-46B4-9E67-39BE5131352D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Storage.Mongo.Tests", "__Tests\StellaOps.Excititor.Storage.Mongo.Tests\StellaOps.Excititor.Storage.Mongo.Tests.csproj", "{5BB6E9E8-3470-4BFF-94DD-DA3294616C39}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo", "..\Concelier\__Libraries\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj", "{6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core", "..\Concelier\__Libraries\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj", "{D6014A0A-6BF4-45C8-918E-9558A24AAC5B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models", "..\Concelier\__Libraries\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj", "{13AF13D1-84C3-4D4F-B89A-0653102C3E63}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization", "..\Concelier\__Libraries\StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj", "{79304AC3-6A2E-454B-A0FF-F656D2D75538}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.WebService.Tests", "__Tests\StellaOps.Excititor.WebService.Tests\StellaOps.Excititor.WebService.Tests.csproj", "{A1007C02-2143-48C6-8380-E3785AF3002D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Worker.Tests", "__Tests\StellaOps.Excititor.Worker.Tests\StellaOps.Excititor.Worker.Tests.csproj", "{3F51027B-F194-4321-AC7B-E00DA5CD47E3}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {AF8F1262-FC95-49EB-B096-A028693DD606}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Debug|x64.ActiveCfg = Debug|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Debug|x64.Build.0 = Debug|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Debug|x86.ActiveCfg = Debug|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Debug|x86.Build.0 = Debug|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Release|Any CPU.Build.0 = Release|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Release|x64.ActiveCfg = Release|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Release|x64.Build.0 = Release|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Release|x86.ActiveCfg = Release|Any CPU + {AF8F1262-FC95-49EB-B096-A028693DD606}.Release|x86.Build.0 = Release|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Debug|x64.ActiveCfg = Debug|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Debug|x64.Build.0 = Debug|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Debug|x86.ActiveCfg = Debug|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Debug|x86.Build.0 = Debug|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Release|Any CPU.Build.0 = Release|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Release|x64.ActiveCfg = Release|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Release|x64.Build.0 = Release|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Release|x86.ActiveCfg = Release|Any CPU + {87631154-82C3-43F6-8F41-46CB877AA16D}.Release|x86.Build.0 = Release|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Debug|x64.ActiveCfg = Debug|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Debug|x64.Build.0 = Debug|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Debug|x86.ActiveCfg = Debug|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Debug|x86.Build.0 = Debug|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Release|Any CPU.Build.0 = Release|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Release|x64.ActiveCfg = Release|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Release|x64.Build.0 = Release|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Release|x86.ActiveCfg = Release|Any CPU + {1A49D368-184D-4040-AD11-37A3F6BCD261}.Release|x86.Build.0 = Release|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Debug|x64.ActiveCfg = Debug|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Debug|x64.Build.0 = Debug|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Debug|x86.ActiveCfg = Debug|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Debug|x86.Build.0 = Debug|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Release|Any CPU.Build.0 = Release|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Release|x64.ActiveCfg = Release|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Release|x64.Build.0 = Release|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Release|x86.ActiveCfg = Release|Any CPU + {2D19CC50-EFE9-4015-B4DB-6DFF4E41DB11}.Release|x86.Build.0 = Release|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Debug|x64.ActiveCfg = Debug|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Debug|x64.Build.0 = Debug|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Debug|x86.ActiveCfg = Debug|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Debug|x86.Build.0 = Debug|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Release|Any CPU.Build.0 = Release|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Release|x64.ActiveCfg = Release|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Release|x64.Build.0 = Release|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Release|x86.ActiveCfg = Release|Any CPU + {5858415D-8AB4-4E45-B316-580879FD8339}.Release|x86.Build.0 = Release|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Debug|x64.ActiveCfg = Debug|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Debug|x64.Build.0 = Debug|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Debug|x86.ActiveCfg = Debug|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Debug|x86.Build.0 = Debug|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Release|Any CPU.Build.0 = Release|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Release|x64.ActiveCfg = Release|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Release|x64.Build.0 = Release|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Release|x86.ActiveCfg = Release|Any CPU + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5}.Release|x86.Build.0 = Release|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Debug|Any CPU.Build.0 = Debug|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Debug|x64.ActiveCfg = Debug|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Debug|x64.Build.0 = Debug|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Debug|x86.ActiveCfg = Debug|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Debug|x86.Build.0 = Debug|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Release|Any CPU.ActiveCfg = Release|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Release|Any CPU.Build.0 = Release|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Release|x64.ActiveCfg = Release|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Release|x64.Build.0 = Release|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Release|x86.ActiveCfg = Release|Any CPU + {400690F2-466B-4DF0-B495-9015DBBAA046}.Release|x86.Build.0 = Release|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Debug|x64.ActiveCfg = Debug|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Debug|x64.Build.0 = Debug|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Debug|x86.ActiveCfg = Debug|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Debug|x86.Build.0 = Debug|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Release|Any CPU.Build.0 = Release|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Release|x64.ActiveCfg = Release|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Release|x64.Build.0 = Release|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Release|x86.ActiveCfg = Release|Any CPU + {5067124E-37E5-4BC4-B758-CAA96E274D8C}.Release|x86.Build.0 = Release|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Debug|Any CPU.Build.0 = Debug|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Debug|x64.ActiveCfg = Debug|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Debug|x64.Build.0 = Debug|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Debug|x86.ActiveCfg = Debug|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Debug|x86.Build.0 = Debug|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Release|Any CPU.ActiveCfg = Release|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Release|Any CPU.Build.0 = Release|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Release|x64.ActiveCfg = Release|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Release|x64.Build.0 = Release|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Release|x86.ActiveCfg = Release|Any CPU + {16E426BF-8697-4DB1-ABC5-5537CDE74D95}.Release|x86.Build.0 = Release|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Debug|x64.ActiveCfg = Debug|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Debug|x64.Build.0 = Debug|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Debug|x86.ActiveCfg = Debug|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Debug|x86.Build.0 = Debug|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Release|Any CPU.Build.0 = Release|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Release|x64.ActiveCfg = Release|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Release|x64.Build.0 = Release|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Release|x86.ActiveCfg = Release|Any CPU + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3}.Release|x86.Build.0 = Release|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Debug|x64.ActiveCfg = Debug|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Debug|x64.Build.0 = Debug|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Debug|x86.ActiveCfg = Debug|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Debug|x86.Build.0 = Debug|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Release|Any CPU.Build.0 = Release|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Release|x64.ActiveCfg = Release|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Release|x64.Build.0 = Release|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Release|x86.ActiveCfg = Release|Any CPU + {CC391919-15F5-43DE-8271-8043090B7D8D}.Release|x86.Build.0 = Release|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Debug|x64.ActiveCfg = Debug|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Debug|x64.Build.0 = Debug|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Debug|x86.ActiveCfg = Debug|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Debug|x86.Build.0 = Debug|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Release|Any CPU.Build.0 = Release|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Release|x64.ActiveCfg = Release|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Release|x64.Build.0 = Release|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Release|x86.ActiveCfg = Release|Any CPU + {BB45DABD-1709-40C3-92B5-29C7AFFF9645}.Release|x86.Build.0 = Release|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Debug|x64.ActiveCfg = Debug|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Debug|x64.Build.0 = Debug|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Debug|x86.ActiveCfg = Debug|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Debug|x86.Build.0 = Debug|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Release|Any CPU.Build.0 = Release|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Release|x64.ActiveCfg = Release|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Release|x64.Build.0 = Release|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Release|x86.ActiveCfg = Release|Any CPU + {181B855F-FBD3-44B6-A679-15EC88E8625A}.Release|x86.Build.0 = Release|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Debug|x64.ActiveCfg = Debug|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Debug|x64.Build.0 = Debug|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Debug|x86.ActiveCfg = Debug|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Debug|x86.Build.0 = Debug|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Release|Any CPU.Build.0 = Release|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Release|x64.ActiveCfg = Release|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Release|x64.Build.0 = Release|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Release|x86.ActiveCfg = Release|Any CPU + {7E839AAE-99FF-4AFD-B986-520306AFA403}.Release|x86.Build.0 = Release|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Debug|x64.ActiveCfg = Debug|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Debug|x64.Build.0 = Debug|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Debug|x86.ActiveCfg = Debug|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Debug|x86.Build.0 = Debug|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Release|Any CPU.Build.0 = Release|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Release|x64.ActiveCfg = Release|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Release|x64.Build.0 = Release|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Release|x86.ActiveCfg = Release|Any CPU + {863DD74A-947C-431E-B661-9C2A46472CD0}.Release|x86.Build.0 = Release|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Debug|x64.ActiveCfg = Debug|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Debug|x64.Build.0 = Debug|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Debug|x86.ActiveCfg = Debug|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Debug|x86.Build.0 = Debug|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Release|Any CPU.Build.0 = Release|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Release|x64.ActiveCfg = Release|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Release|x64.Build.0 = Release|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Release|x86.ActiveCfg = Release|Any CPU + {0CE1FE59-B0FB-423B-B55B-C8F31A67D868}.Release|x86.Build.0 = Release|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Debug|Any CPU.Build.0 = Debug|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Debug|x64.ActiveCfg = Debug|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Debug|x64.Build.0 = Debug|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Debug|x86.ActiveCfg = Debug|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Debug|x86.Build.0 = Debug|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Release|Any CPU.ActiveCfg = Release|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Release|Any CPU.Build.0 = Release|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Release|x64.ActiveCfg = Release|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Release|x64.Build.0 = Release|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Release|x86.ActiveCfg = Release|Any CPU + {598E8702-B9D9-45BE-9A33-004A93EE6E25}.Release|x86.Build.0 = Release|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Debug|x64.ActiveCfg = Debug|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Debug|x64.Build.0 = Debug|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Debug|x86.ActiveCfg = Debug|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Debug|x86.Build.0 = Debug|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Release|Any CPU.Build.0 = Release|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Release|x64.ActiveCfg = Release|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Release|x64.Build.0 = Release|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Release|x86.ActiveCfg = Release|Any CPU + {79056784-D88C-47C2-B49D-1A25D58FC03B}.Release|x86.Build.0 = Release|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Debug|x64.ActiveCfg = Debug|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Debug|x64.Build.0 = Debug|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Debug|x86.ActiveCfg = Debug|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Debug|x86.Build.0 = Debug|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Release|Any CPU.Build.0 = Release|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Release|x64.ActiveCfg = Release|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Release|x64.Build.0 = Release|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Release|x86.ActiveCfg = Release|Any CPU + {C75036AF-D828-41D3-9322-F67828EF8FBB}.Release|x86.Build.0 = Release|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Debug|x64.ActiveCfg = Debug|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Debug|x64.Build.0 = Debug|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Debug|x86.ActiveCfg = Debug|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Debug|x86.Build.0 = Debug|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Release|Any CPU.Build.0 = Release|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Release|x64.ActiveCfg = Release|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Release|x64.Build.0 = Release|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Release|x86.ActiveCfg = Release|Any CPU + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0}.Release|x86.Build.0 = Release|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Debug|x64.ActiveCfg = Debug|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Debug|x64.Build.0 = Debug|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Debug|x86.ActiveCfg = Debug|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Debug|x86.Build.0 = Debug|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Release|Any CPU.Build.0 = Release|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Release|x64.ActiveCfg = Release|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Release|x64.Build.0 = Release|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Release|x86.ActiveCfg = Release|Any CPU + {50B53195-F0DD-4DCE-95A7-0949C13D706B}.Release|x86.Build.0 = Release|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Debug|x64.ActiveCfg = Debug|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Debug|x64.Build.0 = Debug|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Debug|x86.ActiveCfg = Debug|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Debug|x86.Build.0 = Debug|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Release|Any CPU.Build.0 = Release|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Release|x64.ActiveCfg = Release|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Release|x64.Build.0 = Release|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Release|x86.ActiveCfg = Release|Any CPU + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95}.Release|x86.Build.0 = Release|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Debug|x64.ActiveCfg = Debug|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Debug|x64.Build.0 = Debug|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Debug|x86.ActiveCfg = Debug|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Debug|x86.Build.0 = Debug|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Release|Any CPU.Build.0 = Release|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Release|x64.ActiveCfg = Release|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Release|x64.Build.0 = Release|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Release|x86.ActiveCfg = Release|Any CPU + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C}.Release|x86.Build.0 = Release|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Debug|x64.ActiveCfg = Debug|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Debug|x64.Build.0 = Debug|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Debug|x86.ActiveCfg = Debug|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Debug|x86.Build.0 = Debug|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Release|Any CPU.Build.0 = Release|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Release|x64.ActiveCfg = Release|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Release|x64.Build.0 = Release|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Release|x86.ActiveCfg = Release|Any CPU + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D}.Release|x86.Build.0 = Release|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Debug|x64.ActiveCfg = Debug|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Debug|x64.Build.0 = Debug|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Debug|x86.ActiveCfg = Debug|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Debug|x86.Build.0 = Debug|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Release|Any CPU.Build.0 = Release|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Release|x64.ActiveCfg = Release|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Release|x64.Build.0 = Release|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Release|x86.ActiveCfg = Release|Any CPU + {111BEB1A-8664-4AA6-8275-7440F33E79C9}.Release|x86.Build.0 = Release|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Debug|x64.ActiveCfg = Debug|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Debug|x64.Build.0 = Debug|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Debug|x86.ActiveCfg = Debug|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Debug|x86.Build.0 = Debug|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Release|Any CPU.Build.0 = Release|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Release|x64.ActiveCfg = Release|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Release|x64.Build.0 = Release|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Release|x86.ActiveCfg = Release|Any CPU + {26B663A0-404C-4D0C-9687-17079CDFFEBF}.Release|x86.Build.0 = Release|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Debug|x64.ActiveCfg = Debug|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Debug|x64.Build.0 = Debug|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Debug|x86.ActiveCfg = Debug|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Debug|x86.Build.0 = Debug|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Release|Any CPU.Build.0 = Release|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Release|x64.ActiveCfg = Release|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Release|x64.Build.0 = Release|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Release|x86.ActiveCfg = Release|Any CPU + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E}.Release|x86.Build.0 = Release|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Debug|Any CPU.Build.0 = Debug|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Debug|x64.ActiveCfg = Debug|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Debug|x64.Build.0 = Debug|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Debug|x86.ActiveCfg = Debug|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Debug|x86.Build.0 = Debug|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Release|Any CPU.ActiveCfg = Release|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Release|Any CPU.Build.0 = Release|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Release|x64.ActiveCfg = Release|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Release|x64.Build.0 = Release|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Release|x86.ActiveCfg = Release|Any CPU + {86E49D28-9035-4EB4-8C7F-E3915C5A2046}.Release|x86.Build.0 = Release|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Debug|Any CPU.Build.0 = Debug|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Debug|x64.ActiveCfg = Debug|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Debug|x64.Build.0 = Debug|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Debug|x86.ActiveCfg = Debug|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Debug|x86.Build.0 = Debug|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Release|Any CPU.ActiveCfg = Release|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Release|Any CPU.Build.0 = Release|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Release|x64.ActiveCfg = Release|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Release|x64.Build.0 = Release|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Release|x86.ActiveCfg = Release|Any CPU + {67990ECE-E2D4-4BC4-8F05-734E02379F23}.Release|x86.Build.0 = Release|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Debug|Any CPU.Build.0 = Debug|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Debug|x64.ActiveCfg = Debug|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Debug|x64.Build.0 = Debug|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Debug|x86.ActiveCfg = Debug|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Debug|x86.Build.0 = Debug|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Release|Any CPU.ActiveCfg = Release|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Release|Any CPU.Build.0 = Release|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Release|x64.ActiveCfg = Release|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Release|x64.Build.0 = Release|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Release|x86.ActiveCfg = Release|Any CPU + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18}.Release|x86.Build.0 = Release|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Debug|x64.ActiveCfg = Debug|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Debug|x64.Build.0 = Debug|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Debug|x86.ActiveCfg = Debug|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Debug|x86.Build.0 = Debug|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Release|Any CPU.Build.0 = Release|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Release|x64.ActiveCfg = Release|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Release|x64.Build.0 = Release|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Release|x86.ActiveCfg = Release|Any CPU + {EBC3B08D-11E7-4286-940F-27305028148E}.Release|x86.Build.0 = Release|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Debug|x64.ActiveCfg = Debug|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Debug|x64.Build.0 = Debug|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Debug|x86.ActiveCfg = Debug|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Debug|x86.Build.0 = Debug|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Release|Any CPU.Build.0 = Release|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Release|x64.ActiveCfg = Release|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Release|x64.Build.0 = Release|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Release|x86.ActiveCfg = Release|Any CPU + {640E732E-01C7-4A7E-9AE1-35117B26AB1E}.Release|x86.Build.0 = Release|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Debug|x64.ActiveCfg = Debug|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Debug|x64.Build.0 = Debug|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Debug|x86.ActiveCfg = Debug|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Debug|x86.Build.0 = Debug|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Release|Any CPU.Build.0 = Release|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Release|x64.ActiveCfg = Release|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Release|x64.Build.0 = Release|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Release|x86.ActiveCfg = Release|Any CPU + {ADFC7CC7-D079-43A1-833C-7E3775184EB6}.Release|x86.Build.0 = Release|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Debug|Any CPU.Build.0 = Debug|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Debug|x64.ActiveCfg = Debug|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Debug|x64.Build.0 = Debug|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Debug|x86.ActiveCfg = Debug|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Debug|x86.Build.0 = Debug|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Release|Any CPU.ActiveCfg = Release|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Release|Any CPU.Build.0 = Release|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Release|x64.ActiveCfg = Release|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Release|x64.Build.0 = Release|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Release|x86.ActiveCfg = Release|Any CPU + {152EC0B1-8312-40F7-AF96-16B8E6AABA52}.Release|x86.Build.0 = Release|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Debug|x64.ActiveCfg = Debug|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Debug|x64.Build.0 = Debug|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Debug|x86.ActiveCfg = Debug|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Debug|x86.Build.0 = Debug|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Release|Any CPU.Build.0 = Release|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Release|x64.ActiveCfg = Release|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Release|x64.Build.0 = Release|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Release|x86.ActiveCfg = Release|Any CPU + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92}.Release|x86.Build.0 = Release|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Debug|x64.ActiveCfg = Debug|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Debug|x64.Build.0 = Debug|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Debug|x86.ActiveCfg = Debug|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Debug|x86.Build.0 = Debug|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Release|Any CPU.Build.0 = Release|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Release|x64.ActiveCfg = Release|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Release|x64.Build.0 = Release|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Release|x86.ActiveCfg = Release|Any CPU + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B}.Release|x86.Build.0 = Release|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Debug|x64.ActiveCfg = Debug|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Debug|x64.Build.0 = Debug|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Debug|x86.ActiveCfg = Debug|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Debug|x86.Build.0 = Debug|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Release|Any CPU.Build.0 = Release|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Release|x64.ActiveCfg = Release|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Release|x64.Build.0 = Release|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Release|x86.ActiveCfg = Release|Any CPU + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC}.Release|x86.Build.0 = Release|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Debug|x64.ActiveCfg = Debug|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Debug|x64.Build.0 = Debug|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Debug|x86.ActiveCfg = Debug|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Debug|x86.Build.0 = Debug|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Release|Any CPU.Build.0 = Release|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Release|x64.ActiveCfg = Release|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Release|x64.Build.0 = Release|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Release|x86.ActiveCfg = Release|Any CPU + {E83FC97E-B88E-4BE5-89D1-12C01631F575}.Release|x86.Build.0 = Release|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Debug|x64.ActiveCfg = Debug|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Debug|x64.Build.0 = Debug|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Debug|x86.ActiveCfg = Debug|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Debug|x86.Build.0 = Debug|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Release|Any CPU.Build.0 = Release|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Release|x64.ActiveCfg = Release|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Release|x64.Build.0 = Release|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Release|x86.ActiveCfg = Release|Any CPU + {832F539E-17FC-46B4-9E67-39BE5131352D}.Release|x86.Build.0 = Release|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Debug|x64.ActiveCfg = Debug|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Debug|x64.Build.0 = Debug|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Debug|x86.ActiveCfg = Debug|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Debug|x86.Build.0 = Debug|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Release|Any CPU.Build.0 = Release|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Release|x64.ActiveCfg = Release|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Release|x64.Build.0 = Release|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Release|x86.ActiveCfg = Release|Any CPU + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39}.Release|x86.Build.0 = Release|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Debug|x64.ActiveCfg = Debug|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Debug|x64.Build.0 = Debug|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Debug|x86.ActiveCfg = Debug|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Debug|x86.Build.0 = Debug|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Release|Any CPU.Build.0 = Release|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Release|x64.ActiveCfg = Release|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Release|x64.Build.0 = Release|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Release|x86.ActiveCfg = Release|Any CPU + {6507860E-BF0D-4E32-A6AC-49E1CE15E4B7}.Release|x86.Build.0 = Release|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Debug|x64.ActiveCfg = Debug|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Debug|x64.Build.0 = Debug|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Debug|x86.ActiveCfg = Debug|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Debug|x86.Build.0 = Debug|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Release|Any CPU.Build.0 = Release|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Release|x64.ActiveCfg = Release|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Release|x64.Build.0 = Release|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Release|x86.ActiveCfg = Release|Any CPU + {D6014A0A-6BF4-45C8-918E-9558A24AAC5B}.Release|x86.Build.0 = Release|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Debug|Any CPU.Build.0 = Debug|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Debug|x64.ActiveCfg = Debug|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Debug|x64.Build.0 = Debug|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Debug|x86.ActiveCfg = Debug|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Debug|x86.Build.0 = Debug|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Release|Any CPU.ActiveCfg = Release|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Release|Any CPU.Build.0 = Release|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Release|x64.ActiveCfg = Release|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Release|x64.Build.0 = Release|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Release|x86.ActiveCfg = Release|Any CPU + {13AF13D1-84C3-4D4F-B89A-0653102C3E63}.Release|x86.Build.0 = Release|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Debug|Any CPU.Build.0 = Debug|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Debug|x64.ActiveCfg = Debug|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Debug|x64.Build.0 = Debug|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Debug|x86.ActiveCfg = Debug|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Debug|x86.Build.0 = Debug|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Release|Any CPU.ActiveCfg = Release|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Release|Any CPU.Build.0 = Release|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Release|x64.ActiveCfg = Release|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Release|x64.Build.0 = Release|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Release|x86.ActiveCfg = Release|Any CPU + {79304AC3-6A2E-454B-A0FF-F656D2D75538}.Release|x86.Build.0 = Release|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Debug|x64.ActiveCfg = Debug|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Debug|x64.Build.0 = Debug|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Debug|x86.ActiveCfg = Debug|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Debug|x86.Build.0 = Debug|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Release|Any CPU.Build.0 = Release|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Release|x64.ActiveCfg = Release|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Release|x64.Build.0 = Release|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Release|x86.ActiveCfg = Release|Any CPU + {A1007C02-2143-48C6-8380-E3785AF3002D}.Release|x86.Build.0 = Release|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Debug|x64.ActiveCfg = Debug|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Debug|x64.Build.0 = Debug|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Debug|x86.ActiveCfg = Debug|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Debug|x86.Build.0 = Debug|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Release|Any CPU.Build.0 = Release|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Release|x64.ActiveCfg = Release|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Release|x64.Build.0 = Release|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Release|x86.ActiveCfg = Release|Any CPU + {3F51027B-F194-4321-AC7B-E00DA5CD47E3}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {87631154-82C3-43F6-8F41-46CB877AA16D} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {5858415D-8AB4-4E45-B316-580879FD8339} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {E8B20DD0-9282-4DFD-B363-F0AF7F62AED5} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {400690F2-466B-4DF0-B495-9015DBBAA046} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {16E426BF-8697-4DB1-ABC5-5537CDE74D95} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {2603B1D1-E1DE-4903-BEE2-DC593FE2A5C3} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {CC391919-15F5-43DE-8271-8043090B7D8D} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {BB45DABD-1709-40C3-92B5-29C7AFFF9645} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {181B855F-FBD3-44B6-A679-15EC88E8625A} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {7E839AAE-99FF-4AFD-B986-520306AFA403} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {863DD74A-947C-431E-B661-9C2A46472CD0} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {C75036AF-D828-41D3-9322-F67828EF8FBB} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {643BF7A5-2CD1-4CBA-BC94-A1477AB21FC0} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {50B53195-F0DD-4DCE-95A7-0949C13D706B} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {D2CD82C4-0D40-4316-A83D-FCC5D715DE95} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {E553CAFD-794B-437C-ABCC-C780DC1ADF3C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {E3DD0BB0-C4C6-4A56-A46E-45870851FB3D} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {111BEB1A-8664-4AA6-8275-7440F33E79C9} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {26B663A0-404C-4D0C-9687-17079CDFFEBF} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {BE9C0870-1912-4EF5-8C6D-BFF42F235F4E} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {86E49D28-9035-4EB4-8C7F-E3915C5A2046} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {67990ECE-E2D4-4BC4-8F05-734E02379F23} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {35DF0F52-8BEE-4969-B7F3-54CFF4AFAD18} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {EBC3B08D-11E7-4286-940F-27305028148E} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {640E732E-01C7-4A7E-9AE1-35117B26AB1E} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {ADFC7CC7-D079-43A1-833C-7E3775184EB6} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {152EC0B1-8312-40F7-AF96-16B8E6AABA52} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {1DFD7A8F-075A-4507-AC7C-EF867F4AEA92} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {43BA0A53-6806-41BA-9C2B-FE781BBCE85B} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {E93FE8CE-28A6-4C7E-96ED-D99406653FDC} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {E83FC97E-B88E-4BE5-89D1-12C01631F575} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {832F539E-17FC-46B4-9E67-39BE5131352D} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {5BB6E9E8-3470-4BFF-94DD-DA3294616C39} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {A1007C02-2143-48C6-8380-E3785AF3002D} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {3F51027B-F194-4321-AC7B-E00DA5CD47E3} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Excititor.ArtifactStores.S3/Extensions/ServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.ArtifactStores.S3/Extensions/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.ArtifactStores.S3/Extensions/ServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.ArtifactStores.S3/Extensions/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.ArtifactStores.S3/S3ArtifactClient.cs b/src/Excititor/__Libraries/StellaOps.Excititor.ArtifactStores.S3/S3ArtifactClient.cs similarity index 100% rename from src/StellaOps.Excititor.ArtifactStores.S3/S3ArtifactClient.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.ArtifactStores.S3/S3ArtifactClient.cs diff --git a/src/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj similarity index 97% rename from src/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj index a7250c2a..92adb1f3 100644 --- a/src/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj @@ -1,17 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="AWSSDK.S3" Version="3.7.305.6" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Export\StellaOps.Excititor.Export.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="AWSSDK.S3" Version="3.7.305.6" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Export\StellaOps.Excititor.Export.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Attestation/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Attestation/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/AGENTS.md diff --git a/src/StellaOps.Excititor.Attestation/Dsse/DsseEnvelope.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Dsse/DsseEnvelope.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Dsse/DsseEnvelope.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Dsse/DsseEnvelope.cs diff --git a/src/StellaOps.Excititor.Attestation/Dsse/VexDsseBuilder.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Dsse/VexDsseBuilder.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Dsse/VexDsseBuilder.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Dsse/VexDsseBuilder.cs diff --git a/src/StellaOps.Excititor.Attestation/EXCITITOR-ATTEST-01-003-plan.md b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/EXCITITOR-ATTEST-01-003-plan.md similarity index 100% rename from src/StellaOps.Excititor.Attestation/EXCITITOR-ATTEST-01-003-plan.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/EXCITITOR-ATTEST-01-003-plan.md diff --git a/src/StellaOps.Excititor.Attestation/Extensions/ServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Extensions/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Extensions/ServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Extensions/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Attestation/Models/VexAttestationPredicate.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Models/VexAttestationPredicate.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Models/VexAttestationPredicate.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Models/VexAttestationPredicate.cs diff --git a/src/StellaOps.Excititor.Attestation/Signing/IVexSigner.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Signing/IVexSigner.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Signing/IVexSigner.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Signing/IVexSigner.cs diff --git a/src/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj similarity index 97% rename from src/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj index 9f1edfc8..f13e5e9f 100644 --- a/src/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj @@ -1,17 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Attestation/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Attestation/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/TASKS.md diff --git a/src/StellaOps.Excititor.Attestation/Transparency/ITransparencyLogClient.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Transparency/ITransparencyLogClient.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Transparency/ITransparencyLogClient.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Transparency/ITransparencyLogClient.cs diff --git a/src/StellaOps.Excititor.Attestation/Transparency/RekorHttpClient.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Transparency/RekorHttpClient.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Transparency/RekorHttpClient.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Transparency/RekorHttpClient.cs diff --git a/src/StellaOps.Excititor.Attestation/Transparency/RekorHttpClientOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Transparency/RekorHttpClientOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Transparency/RekorHttpClientOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Transparency/RekorHttpClientOptions.cs diff --git a/src/StellaOps.Excititor.Attestation/Verification/IVexAttestationVerifier.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Verification/IVexAttestationVerifier.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Verification/IVexAttestationVerifier.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Verification/IVexAttestationVerifier.cs diff --git a/src/StellaOps.Excititor.Attestation/Verification/VexAttestationMetrics.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Verification/VexAttestationMetrics.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Verification/VexAttestationMetrics.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Verification/VexAttestationMetrics.cs diff --git a/src/StellaOps.Excititor.Attestation/Verification/VexAttestationVerificationOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Verification/VexAttestationVerificationOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/Verification/VexAttestationVerificationOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Verification/VexAttestationVerificationOptions.cs diff --git a/src/StellaOps.Excititor.Attestation/Verification/VexAttestationVerifier.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Verification/VexAttestationVerifier.cs similarity index 97% rename from src/StellaOps.Excititor.Attestation/Verification/VexAttestationVerifier.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Verification/VexAttestationVerifier.cs index df8bb81f..c138ab81 100644 --- a/src/StellaOps.Excititor.Attestation/Verification/VexAttestationVerifier.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/Verification/VexAttestationVerifier.cs @@ -1,471 +1,471 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Diagnostics; -using System.Linq; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Excititor.Attestation.Dsse; -using StellaOps.Excititor.Attestation.Models; -using StellaOps.Excititor.Attestation.Transparency; -using StellaOps.Excititor.Core; - -namespace StellaOps.Excititor.Attestation.Verification; - -internal sealed class VexAttestationVerifier : IVexAttestationVerifier -{ - private static readonly JsonSerializerOptions EnvelopeSerializerOptions = new() - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - }; - - private static readonly JsonSerializerOptions StatementSerializerOptions = new() - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.Never, - Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, - }; - - private readonly ILogger<VexAttestationVerifier> _logger; - private readonly ITransparencyLogClient? _transparencyLogClient; - private readonly VexAttestationVerificationOptions _options; - private readonly VexAttestationMetrics _metrics; - - public VexAttestationVerifier( - ILogger<VexAttestationVerifier> logger, - ITransparencyLogClient? transparencyLogClient, - IOptions<VexAttestationVerificationOptions> options, - VexAttestationMetrics metrics) - { - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - ArgumentNullException.ThrowIfNull(options); - _transparencyLogClient = transparencyLogClient; - _options = options.Value; - _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); - } - - public async ValueTask<VexAttestationVerification> VerifyAsync( - VexAttestationVerificationRequest request, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - - var stopwatch = Stopwatch.StartNew(); - var diagnostics = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - var resultLabel = "valid"; - var rekorState = "skipped"; - var component = request.IsReverify ? "worker" : "webservice"; - - try - { - if (string.IsNullOrWhiteSpace(request.Envelope)) - { - diagnostics["envelope.state"] = "missing"; - _logger.LogWarning("Attestation envelope is missing for export {ExportId}", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - if (!TryDeserializeEnvelope(request.Envelope, out var envelope, diagnostics)) - { - _logger.LogWarning("Failed to deserialize attestation envelope for export {ExportId}", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - if (!string.Equals(envelope.PayloadType, VexDsseBuilder.PayloadType, StringComparison.OrdinalIgnoreCase)) - { - diagnostics["payload.type"] = envelope.PayloadType ?? string.Empty; - _logger.LogWarning( - "Unexpected DSSE payload type {PayloadType} for export {ExportId}", - envelope.PayloadType, - request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - if (envelope.Signatures is null || envelope.Signatures.Count == 0) - { - diagnostics["signature.state"] = "missing"; - _logger.LogWarning("Attestation envelope for export {ExportId} does not contain signatures.", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - var payloadBase64 = envelope.Payload ?? string.Empty; - if (!TryDecodePayload(payloadBase64, out var payloadBytes, diagnostics)) - { - _logger.LogWarning("Failed to decode attestation payload for export {ExportId}", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - if (!TryDeserializeStatement(payloadBytes, out var statement, diagnostics)) - { - _logger.LogWarning("Failed to deserialize DSSE statement for export {ExportId}", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - if (!ValidatePredicateType(statement, request, diagnostics)) - { - _logger.LogWarning("Predicate type mismatch for export {ExportId}", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - if (!ValidateSubject(statement, request, diagnostics)) - { - _logger.LogWarning("Subject mismatch for export {ExportId}", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - if (!ValidatePredicate(statement, request, diagnostics)) - { - _logger.LogWarning("Predicate payload mismatch for export {ExportId}", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - if (!ValidateMetadataDigest(envelope, request.Metadata, diagnostics)) - { - _logger.LogWarning("Attestation digest mismatch for export {ExportId}", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - if (!ValidateSignedAt(request.Metadata, request.Attestation.CreatedAt, diagnostics)) - { - _logger.LogWarning("SignedAt validation failed for export {ExportId}", request.Attestation.ExportId); - resultLabel = "invalid"; - return BuildResult(false); - } - - rekorState = await VerifyTransparencyAsync(request.Metadata, diagnostics, cancellationToken).ConfigureAwait(false); - if (rekorState is "missing" or "unverified" or "client_unavailable") - { - resultLabel = "invalid"; - return BuildResult(false); - } - - diagnostics["signature.state"] = "present"; - return BuildResult(true); - } - catch (Exception ex) - { - diagnostics["error"] = ex.GetType().Name; - diagnostics["error.message"] = ex.Message; - resultLabel = "error"; - _logger.LogError(ex, "Unexpected exception verifying attestation for export {ExportId}", request.Attestation.ExportId); - return BuildResult(false); - } - finally - { - stopwatch.Stop(); - var tags = new KeyValuePair<string, object?>[] - { - new("result", resultLabel), - new("component", component), - new("rekor", rekorState), - }; - _metrics.VerifyTotal.Add(1, tags); - _metrics.VerifyDuration.Record(stopwatch.Elapsed.TotalSeconds, tags); - } - - VexAttestationVerification BuildResult(bool isValid) - { - diagnostics["result"] = resultLabel; - diagnostics["component"] = component; - diagnostics["rekor.state"] = rekorState; - return new VexAttestationVerification(isValid, diagnostics.ToImmutable()); - } - } - - private static bool TryDeserializeEnvelope( - string envelopeJson, - out DsseEnvelope envelope, - ImmutableDictionary<string, string>.Builder diagnostics) - { - try - { - envelope = JsonSerializer.Deserialize<DsseEnvelope>(envelopeJson, EnvelopeSerializerOptions) - ?? throw new InvalidOperationException("Envelope deserialized to null."); - return true; - } - catch (Exception ex) - { - diagnostics["envelope.error"] = ex.GetType().Name; - envelope = default!; - return false; - } - } - - private static bool TryDecodePayload( - string payloadBase64, - out byte[] payloadBytes, - ImmutableDictionary<string, string>.Builder diagnostics) - { - try - { - payloadBytes = Convert.FromBase64String(payloadBase64); - return true; - } - catch (FormatException) - { - diagnostics["payload.base64"] = "invalid"; - payloadBytes = Array.Empty<byte>(); - return false; - } - } - - private static bool TryDeserializeStatement( - byte[] payload, - out VexInTotoStatement statement, - ImmutableDictionary<string, string>.Builder diagnostics) - { - try - { - statement = JsonSerializer.Deserialize<VexInTotoStatement>(payload, StatementSerializerOptions) - ?? throw new InvalidOperationException("Statement deserialized to null."); - return true; - } - catch (Exception ex) - { - diagnostics["payload.error"] = ex.GetType().Name; - statement = default!; - return false; - } - } - - private static bool ValidatePredicateType( - VexInTotoStatement statement, - VexAttestationVerificationRequest request, - ImmutableDictionary<string, string>.Builder diagnostics) - { - var predicateType = statement.PredicateType ?? string.Empty; - if (!string.Equals(predicateType, request.Metadata.PredicateType, StringComparison.Ordinal)) - { - diagnostics["predicate.type"] = predicateType; - return false; - } - - return true; - } - - private static bool ValidateSubject( - VexInTotoStatement statement, - VexAttestationVerificationRequest request, - ImmutableDictionary<string, string>.Builder diagnostics) - { - if (statement.Subject is null || statement.Subject.Count != 1) - { - diagnostics["subject.count"] = (statement.Subject?.Count ?? 0).ToString(); - return false; - } - - var subject = statement.Subject[0]; - if (!string.Equals(subject.Name, request.Attestation.ExportId, StringComparison.Ordinal)) - { - diagnostics["subject.name"] = subject.Name ?? string.Empty; - return false; - } - - if (subject.Digest is null) - { - diagnostics["subject.digest"] = "missing"; - return false; - } - - var algorithmKey = request.Attestation.Artifact.Algorithm.ToLowerInvariant(); - if (!subject.Digest.TryGetValue(algorithmKey, out var digest) - || !string.Equals(digest, request.Attestation.Artifact.Digest, StringComparison.OrdinalIgnoreCase)) - { - diagnostics["subject.digest"] = digest ?? string.Empty; - return false; - } - - return true; - } - - private bool ValidatePredicate( - VexInTotoStatement statement, - VexAttestationVerificationRequest request, - ImmutableDictionary<string, string>.Builder diagnostics) - { - var predicate = statement.Predicate; - if (predicate is null) - { - diagnostics["predicate.state"] = "missing"; - return false; - } - - if (!string.Equals(predicate.ExportId, request.Attestation.ExportId, StringComparison.Ordinal)) - { - diagnostics["predicate.exportId"] = predicate.ExportId ?? string.Empty; - return false; - } - - if (!string.Equals(predicate.QuerySignature, request.Attestation.QuerySignature.Value, StringComparison.Ordinal)) - { - diagnostics["predicate.querySignature"] = predicate.QuerySignature ?? string.Empty; - return false; - } - - if (!string.Equals(predicate.ArtifactAlgorithm, request.Attestation.Artifact.Algorithm, StringComparison.OrdinalIgnoreCase) - || !string.Equals(predicate.ArtifactDigest, request.Attestation.Artifact.Digest, StringComparison.OrdinalIgnoreCase)) - { - diagnostics["predicate.artifact"] = $"{predicate.ArtifactAlgorithm}:{predicate.ArtifactDigest}"; - return false; - } - - if (predicate.Format != request.Attestation.Format) - { - diagnostics["predicate.format"] = predicate.Format.ToString(); - return false; - } - - var createdDelta = (predicate.CreatedAt - request.Attestation.CreatedAt).Duration(); - if (createdDelta > _options.MaxClockSkew) - { - diagnostics["predicate.createdAtDelta"] = createdDelta.ToString(); - return false; - } - - if (!SetEquals(predicate.SourceProviders, request.Attestation.SourceProviders)) - { - diagnostics["predicate.sourceProviders"] = string.Join(",", predicate.SourceProviders ?? Array.Empty<string>()); - return false; - } - - if (request.Attestation.Metadata.Count > 0) - { - if (predicate.Metadata is null) - { - diagnostics["predicate.metadata"] = "missing"; - return false; - } - - foreach (var kvp in request.Attestation.Metadata) - { - if (!predicate.Metadata.TryGetValue(kvp.Key, out var actual) - || !string.Equals(actual, kvp.Value, StringComparison.Ordinal)) - { - diagnostics[$"predicate.metadata.{kvp.Key}"] = actual ?? string.Empty; - return false; - } - } - } - - return true; - } - - private bool ValidateMetadataDigest( - DsseEnvelope envelope, - VexAttestationMetadata metadata, - ImmutableDictionary<string, string>.Builder diagnostics) - { - if (string.IsNullOrWhiteSpace(metadata.EnvelopeDigest)) - { - diagnostics["metadata.envelopeDigest"] = "missing"; - return false; - } - - var computed = VexDsseBuilder.ComputeEnvelopeDigest(envelope); - if (!string.Equals(computed, metadata.EnvelopeDigest, StringComparison.OrdinalIgnoreCase)) - { - diagnostics["metadata.envelopeDigest"] = metadata.EnvelopeDigest; - diagnostics["metadata.envelopeDigest.computed"] = computed; - return false; - } - - diagnostics["metadata.envelopeDigest"] = "match"; - return true; - } - - private bool ValidateSignedAt( - VexAttestationMetadata metadata, - DateTimeOffset createdAt, - ImmutableDictionary<string, string>.Builder diagnostics) - { - if (metadata.SignedAt is null) - { - diagnostics["metadata.signedAt"] = "missing"; - return false; - } - - var delta = (metadata.SignedAt.Value - createdAt).Duration(); - if (delta > _options.MaxClockSkew) - { - diagnostics["metadata.signedAtDelta"] = delta.ToString(); - return false; - } - - return true; - } - - private async ValueTask<string> VerifyTransparencyAsync( - VexAttestationMetadata metadata, - ImmutableDictionary<string, string>.Builder diagnostics, - CancellationToken cancellationToken) - { - if (metadata.Rekor is null) - { - if (_options.RequireTransparencyLog) - { - diagnostics["rekor.state"] = "missing"; - return "missing"; - } - - diagnostics["rekor.state"] = "disabled"; - return "disabled"; - } - - if (_transparencyLogClient is null) - { - diagnostics["rekor.state"] = "client_unavailable"; - return _options.RequireTransparencyLog ? "client_unavailable" : "disabled"; - } - - try - { - var verified = await _transparencyLogClient.VerifyAsync(metadata.Rekor.Location, cancellationToken).ConfigureAwait(false); - diagnostics["rekor.state"] = verified ? "verified" : "unverified"; - return verified ? "verified" : "unverified"; - } - catch (Exception ex) - { - diagnostics["rekor.error"] = ex.GetType().Name; - if (_options.AllowOfflineTransparency) - { - diagnostics["rekor.state"] = "offline"; - return "offline"; - } - - diagnostics["rekor.state"] = "unreachable"; - return "unreachable"; - } - } - - private static bool SetEquals(IReadOnlyCollection<string>? left, ImmutableArray<string> right) - { - if (left is null) - { - return right.IsDefaultOrEmpty; - } - - if (left.Count != right.Length) - { - return false; - } - - var leftSet = new HashSet<string>(left, StringComparer.Ordinal); - return right.All(leftSet.Contains); - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Diagnostics; +using System.Linq; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Excititor.Attestation.Dsse; +using StellaOps.Excititor.Attestation.Models; +using StellaOps.Excititor.Attestation.Transparency; +using StellaOps.Excititor.Core; + +namespace StellaOps.Excititor.Attestation.Verification; + +internal sealed class VexAttestationVerifier : IVexAttestationVerifier +{ + private static readonly JsonSerializerOptions EnvelopeSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }; + + private static readonly JsonSerializerOptions StatementSerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.Never, + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, + }; + + private readonly ILogger<VexAttestationVerifier> _logger; + private readonly ITransparencyLogClient? _transparencyLogClient; + private readonly VexAttestationVerificationOptions _options; + private readonly VexAttestationMetrics _metrics; + + public VexAttestationVerifier( + ILogger<VexAttestationVerifier> logger, + ITransparencyLogClient? transparencyLogClient, + IOptions<VexAttestationVerificationOptions> options, + VexAttestationMetrics metrics) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + ArgumentNullException.ThrowIfNull(options); + _transparencyLogClient = transparencyLogClient; + _options = options.Value; + _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); + } + + public async ValueTask<VexAttestationVerification> VerifyAsync( + VexAttestationVerificationRequest request, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + var stopwatch = Stopwatch.StartNew(); + var diagnostics = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + var resultLabel = "valid"; + var rekorState = "skipped"; + var component = request.IsReverify ? "worker" : "webservice"; + + try + { + if (string.IsNullOrWhiteSpace(request.Envelope)) + { + diagnostics["envelope.state"] = "missing"; + _logger.LogWarning("Attestation envelope is missing for export {ExportId}", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + if (!TryDeserializeEnvelope(request.Envelope, out var envelope, diagnostics)) + { + _logger.LogWarning("Failed to deserialize attestation envelope for export {ExportId}", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + if (!string.Equals(envelope.PayloadType, VexDsseBuilder.PayloadType, StringComparison.OrdinalIgnoreCase)) + { + diagnostics["payload.type"] = envelope.PayloadType ?? string.Empty; + _logger.LogWarning( + "Unexpected DSSE payload type {PayloadType} for export {ExportId}", + envelope.PayloadType, + request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + if (envelope.Signatures is null || envelope.Signatures.Count == 0) + { + diagnostics["signature.state"] = "missing"; + _logger.LogWarning("Attestation envelope for export {ExportId} does not contain signatures.", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + var payloadBase64 = envelope.Payload ?? string.Empty; + if (!TryDecodePayload(payloadBase64, out var payloadBytes, diagnostics)) + { + _logger.LogWarning("Failed to decode attestation payload for export {ExportId}", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + if (!TryDeserializeStatement(payloadBytes, out var statement, diagnostics)) + { + _logger.LogWarning("Failed to deserialize DSSE statement for export {ExportId}", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + if (!ValidatePredicateType(statement, request, diagnostics)) + { + _logger.LogWarning("Predicate type mismatch for export {ExportId}", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + if (!ValidateSubject(statement, request, diagnostics)) + { + _logger.LogWarning("Subject mismatch for export {ExportId}", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + if (!ValidatePredicate(statement, request, diagnostics)) + { + _logger.LogWarning("Predicate payload mismatch for export {ExportId}", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + if (!ValidateMetadataDigest(envelope, request.Metadata, diagnostics)) + { + _logger.LogWarning("Attestation digest mismatch for export {ExportId}", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + if (!ValidateSignedAt(request.Metadata, request.Attestation.CreatedAt, diagnostics)) + { + _logger.LogWarning("SignedAt validation failed for export {ExportId}", request.Attestation.ExportId); + resultLabel = "invalid"; + return BuildResult(false); + } + + rekorState = await VerifyTransparencyAsync(request.Metadata, diagnostics, cancellationToken).ConfigureAwait(false); + if (rekorState is "missing" or "unverified" or "client_unavailable") + { + resultLabel = "invalid"; + return BuildResult(false); + } + + diagnostics["signature.state"] = "present"; + return BuildResult(true); + } + catch (Exception ex) + { + diagnostics["error"] = ex.GetType().Name; + diagnostics["error.message"] = ex.Message; + resultLabel = "error"; + _logger.LogError(ex, "Unexpected exception verifying attestation for export {ExportId}", request.Attestation.ExportId); + return BuildResult(false); + } + finally + { + stopwatch.Stop(); + var tags = new KeyValuePair<string, object?>[] + { + new("result", resultLabel), + new("component", component), + new("rekor", rekorState), + }; + _metrics.VerifyTotal.Add(1, tags); + _metrics.VerifyDuration.Record(stopwatch.Elapsed.TotalSeconds, tags); + } + + VexAttestationVerification BuildResult(bool isValid) + { + diagnostics["result"] = resultLabel; + diagnostics["component"] = component; + diagnostics["rekor.state"] = rekorState; + return new VexAttestationVerification(isValid, diagnostics.ToImmutable()); + } + } + + private static bool TryDeserializeEnvelope( + string envelopeJson, + out DsseEnvelope envelope, + ImmutableDictionary<string, string>.Builder diagnostics) + { + try + { + envelope = JsonSerializer.Deserialize<DsseEnvelope>(envelopeJson, EnvelopeSerializerOptions) + ?? throw new InvalidOperationException("Envelope deserialized to null."); + return true; + } + catch (Exception ex) + { + diagnostics["envelope.error"] = ex.GetType().Name; + envelope = default!; + return false; + } + } + + private static bool TryDecodePayload( + string payloadBase64, + out byte[] payloadBytes, + ImmutableDictionary<string, string>.Builder diagnostics) + { + try + { + payloadBytes = Convert.FromBase64String(payloadBase64); + return true; + } + catch (FormatException) + { + diagnostics["payload.base64"] = "invalid"; + payloadBytes = Array.Empty<byte>(); + return false; + } + } + + private static bool TryDeserializeStatement( + byte[] payload, + out VexInTotoStatement statement, + ImmutableDictionary<string, string>.Builder diagnostics) + { + try + { + statement = JsonSerializer.Deserialize<VexInTotoStatement>(payload, StatementSerializerOptions) + ?? throw new InvalidOperationException("Statement deserialized to null."); + return true; + } + catch (Exception ex) + { + diagnostics["payload.error"] = ex.GetType().Name; + statement = default!; + return false; + } + } + + private static bool ValidatePredicateType( + VexInTotoStatement statement, + VexAttestationVerificationRequest request, + ImmutableDictionary<string, string>.Builder diagnostics) + { + var predicateType = statement.PredicateType ?? string.Empty; + if (!string.Equals(predicateType, request.Metadata.PredicateType, StringComparison.Ordinal)) + { + diagnostics["predicate.type"] = predicateType; + return false; + } + + return true; + } + + private static bool ValidateSubject( + VexInTotoStatement statement, + VexAttestationVerificationRequest request, + ImmutableDictionary<string, string>.Builder diagnostics) + { + if (statement.Subject is null || statement.Subject.Count != 1) + { + diagnostics["subject.count"] = (statement.Subject?.Count ?? 0).ToString(); + return false; + } + + var subject = statement.Subject[0]; + if (!string.Equals(subject.Name, request.Attestation.ExportId, StringComparison.Ordinal)) + { + diagnostics["subject.name"] = subject.Name ?? string.Empty; + return false; + } + + if (subject.Digest is null) + { + diagnostics["subject.digest"] = "missing"; + return false; + } + + var algorithmKey = request.Attestation.Artifact.Algorithm.ToLowerInvariant(); + if (!subject.Digest.TryGetValue(algorithmKey, out var digest) + || !string.Equals(digest, request.Attestation.Artifact.Digest, StringComparison.OrdinalIgnoreCase)) + { + diagnostics["subject.digest"] = digest ?? string.Empty; + return false; + } + + return true; + } + + private bool ValidatePredicate( + VexInTotoStatement statement, + VexAttestationVerificationRequest request, + ImmutableDictionary<string, string>.Builder diagnostics) + { + var predicate = statement.Predicate; + if (predicate is null) + { + diagnostics["predicate.state"] = "missing"; + return false; + } + + if (!string.Equals(predicate.ExportId, request.Attestation.ExportId, StringComparison.Ordinal)) + { + diagnostics["predicate.exportId"] = predicate.ExportId ?? string.Empty; + return false; + } + + if (!string.Equals(predicate.QuerySignature, request.Attestation.QuerySignature.Value, StringComparison.Ordinal)) + { + diagnostics["predicate.querySignature"] = predicate.QuerySignature ?? string.Empty; + return false; + } + + if (!string.Equals(predicate.ArtifactAlgorithm, request.Attestation.Artifact.Algorithm, StringComparison.OrdinalIgnoreCase) + || !string.Equals(predicate.ArtifactDigest, request.Attestation.Artifact.Digest, StringComparison.OrdinalIgnoreCase)) + { + diagnostics["predicate.artifact"] = $"{predicate.ArtifactAlgorithm}:{predicate.ArtifactDigest}"; + return false; + } + + if (predicate.Format != request.Attestation.Format) + { + diagnostics["predicate.format"] = predicate.Format.ToString(); + return false; + } + + var createdDelta = (predicate.CreatedAt - request.Attestation.CreatedAt).Duration(); + if (createdDelta > _options.MaxClockSkew) + { + diagnostics["predicate.createdAtDelta"] = createdDelta.ToString(); + return false; + } + + if (!SetEquals(predicate.SourceProviders, request.Attestation.SourceProviders)) + { + diagnostics["predicate.sourceProviders"] = string.Join(",", predicate.SourceProviders ?? Array.Empty<string>()); + return false; + } + + if (request.Attestation.Metadata.Count > 0) + { + if (predicate.Metadata is null) + { + diagnostics["predicate.metadata"] = "missing"; + return false; + } + + foreach (var kvp in request.Attestation.Metadata) + { + if (!predicate.Metadata.TryGetValue(kvp.Key, out var actual) + || !string.Equals(actual, kvp.Value, StringComparison.Ordinal)) + { + diagnostics[$"predicate.metadata.{kvp.Key}"] = actual ?? string.Empty; + return false; + } + } + } + + return true; + } + + private bool ValidateMetadataDigest( + DsseEnvelope envelope, + VexAttestationMetadata metadata, + ImmutableDictionary<string, string>.Builder diagnostics) + { + if (string.IsNullOrWhiteSpace(metadata.EnvelopeDigest)) + { + diagnostics["metadata.envelopeDigest"] = "missing"; + return false; + } + + var computed = VexDsseBuilder.ComputeEnvelopeDigest(envelope); + if (!string.Equals(computed, metadata.EnvelopeDigest, StringComparison.OrdinalIgnoreCase)) + { + diagnostics["metadata.envelopeDigest"] = metadata.EnvelopeDigest; + diagnostics["metadata.envelopeDigest.computed"] = computed; + return false; + } + + diagnostics["metadata.envelopeDigest"] = "match"; + return true; + } + + private bool ValidateSignedAt( + VexAttestationMetadata metadata, + DateTimeOffset createdAt, + ImmutableDictionary<string, string>.Builder diagnostics) + { + if (metadata.SignedAt is null) + { + diagnostics["metadata.signedAt"] = "missing"; + return false; + } + + var delta = (metadata.SignedAt.Value - createdAt).Duration(); + if (delta > _options.MaxClockSkew) + { + diagnostics["metadata.signedAtDelta"] = delta.ToString(); + return false; + } + + return true; + } + + private async ValueTask<string> VerifyTransparencyAsync( + VexAttestationMetadata metadata, + ImmutableDictionary<string, string>.Builder diagnostics, + CancellationToken cancellationToken) + { + if (metadata.Rekor is null) + { + if (_options.RequireTransparencyLog) + { + diagnostics["rekor.state"] = "missing"; + return "missing"; + } + + diagnostics["rekor.state"] = "disabled"; + return "disabled"; + } + + if (_transparencyLogClient is null) + { + diagnostics["rekor.state"] = "client_unavailable"; + return _options.RequireTransparencyLog ? "client_unavailable" : "disabled"; + } + + try + { + var verified = await _transparencyLogClient.VerifyAsync(metadata.Rekor.Location, cancellationToken).ConfigureAwait(false); + diagnostics["rekor.state"] = verified ? "verified" : "unverified"; + return verified ? "verified" : "unverified"; + } + catch (Exception ex) + { + diagnostics["rekor.error"] = ex.GetType().Name; + if (_options.AllowOfflineTransparency) + { + diagnostics["rekor.state"] = "offline"; + return "offline"; + } + + diagnostics["rekor.state"] = "unreachable"; + return "unreachable"; + } + } + + private static bool SetEquals(IReadOnlyCollection<string>? left, ImmutableArray<string> right) + { + if (left is null) + { + return right.IsDefaultOrEmpty; + } + + if (left.Count != right.Length) + { + return false; + } + + var leftSet = new HashSet<string>(left, StringComparer.Ordinal); + return right.All(leftSet.Contains); + } +} diff --git a/src/StellaOps.Excititor.Attestation/VexAttestationClient.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Attestation/VexAttestationClient.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation/VexAttestationClient.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Attestation/VexAttestationClient.cs diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/AGENTS.md diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/IVexConnectorOptionsValidator.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/IVexConnectorOptionsValidator.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/IVexConnectorOptionsValidator.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/IVexConnectorOptionsValidator.cs diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj similarity index 97% rename from src/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj index 5f8740e5..2d9293c6 100644 --- a/src/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/StellaOps.Excititor.Connectors.Abstractions.csproj @@ -1,17 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Configuration" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Configuration" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/TASKS.md diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorBase.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorBase.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorBase.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorBase.cs diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorDescriptor.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorDescriptor.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorDescriptor.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorDescriptor.cs diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorLogScope.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorLogScope.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorLogScope.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorLogScope.cs diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorMetadataBuilder.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorMetadataBuilder.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorMetadataBuilder.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorMetadataBuilder.cs diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinder.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinder.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinder.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinder.cs diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinderOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinderOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinderOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsBinderOptions.cs diff --git a/src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsValidationException.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsValidationException.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsValidationException.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Abstractions/VexConnectorOptionsValidationException.cs diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/AGENTS.md diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/CiscoCsafConnector.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/CiscoCsafConnector.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF/CiscoCsafConnector.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/CiscoCsafConnector.cs diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptions.cs diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptionsValidator.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptionsValidator.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptionsValidator.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/Configuration/CiscoConnectorOptionsValidator.cs diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/DependencyInjection/CiscoConnectorServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/DependencyInjection/CiscoConnectorServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF/DependencyInjection/CiscoConnectorServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/DependencyInjection/CiscoConnectorServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/Metadata/CiscoProviderMetadataLoader.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/Metadata/CiscoProviderMetadataLoader.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF/Metadata/CiscoProviderMetadataLoader.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/Metadata/CiscoProviderMetadataLoader.cs diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj similarity index 98% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj index c573f653..fee5f248 100644 --- a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj @@ -1,20 +1,20 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/TASKS.md diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/AGENTS.md diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/Authentication/MsrcTokenProvider.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/Authentication/MsrcTokenProvider.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF/Authentication/MsrcTokenProvider.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/Authentication/MsrcTokenProvider.cs diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/Configuration/MsrcConnectorOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/Configuration/MsrcConnectorOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF/Configuration/MsrcConnectorOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/Configuration/MsrcConnectorOptions.cs diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/DependencyInjection/MsrcConnectorServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/DependencyInjection/MsrcConnectorServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF/DependencyInjection/MsrcConnectorServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/DependencyInjection/MsrcConnectorServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/MsrcCsafConnector.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/MsrcCsafConnector.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF/MsrcCsafConnector.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/MsrcCsafConnector.cs diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj similarity index 98% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj index 48287a90..b52d920e 100644 --- a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj @@ -1,19 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/TASKS.md diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/AGENTS.md diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciCosignAuthority.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciCosignAuthority.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciCosignAuthority.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciCosignAuthority.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciRegistryAuthorization.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciRegistryAuthorization.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciRegistryAuthorization.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Authentication/OciRegistryAuthorization.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptions.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptionsValidator.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptionsValidator.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptionsValidator.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Configuration/OciOpenVexAttestationConnectorOptionsValidator.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/DependencyInjection/OciOpenVexAttestationConnectorServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/DependencyInjection/OciOpenVexAttestationConnectorServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/DependencyInjection/OciOpenVexAttestationConnectorServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/DependencyInjection/OciOpenVexAttestationConnectorServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryResult.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryResult.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryResult.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryResult.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryService.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryService.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryService.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationDiscoveryService.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationTarget.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationTarget.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationTarget.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciAttestationTarget.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReference.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReference.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReference.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReference.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReferenceParser.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReferenceParser.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReferenceParser.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciImageReferenceParser.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciOfflineBundleReference.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciOfflineBundleReference.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciOfflineBundleReference.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Discovery/OciOfflineBundleReference.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciArtifactDescriptor.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciArtifactDescriptor.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciArtifactDescriptor.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciArtifactDescriptor.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationDocument.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationDocument.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationDocument.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationDocument.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationFetcher.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationFetcher.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationFetcher.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciAttestationFetcher.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciRegistryClient.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciRegistryClient.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciRegistryClient.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/Fetch/OciRegistryClient.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/OciOpenVexAttestationConnector.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/OciOpenVexAttestationConnector.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/OciOpenVexAttestationConnector.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/OciOpenVexAttestationConnector.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj similarity index 98% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj index 44376895..3979d18f 100644 --- a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj @@ -1,19 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/TASKS.md diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/AGENTS.md diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptions.cs diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptionsValidator.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptionsValidator.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptionsValidator.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/Configuration/OracleConnectorOptionsValidator.cs diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/DependencyInjection/OracleConnectorServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/DependencyInjection/OracleConnectorServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF/DependencyInjection/OracleConnectorServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/DependencyInjection/OracleConnectorServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/Metadata/OracleCatalogLoader.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/Metadata/OracleCatalogLoader.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF/Metadata/OracleCatalogLoader.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/Metadata/OracleCatalogLoader.cs diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/OracleCsafConnector.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/OracleCsafConnector.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF/OracleCsafConnector.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/OracleCsafConnector.cs diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj similarity index 98% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj index c573f653..fee5f248 100644 --- a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj @@ -1,20 +1,20 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/TASKS.md diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/AGENTS.md diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/Configuration/RedHatConnectorOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/Configuration/RedHatConnectorOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF/Configuration/RedHatConnectorOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/Configuration/RedHatConnectorOptions.cs diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/DependencyInjection/RedHatConnectorServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/DependencyInjection/RedHatConnectorServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF/DependencyInjection/RedHatConnectorServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/DependencyInjection/RedHatConnectorServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/Metadata/RedHatProviderMetadataLoader.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/Metadata/RedHatProviderMetadataLoader.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF/Metadata/RedHatProviderMetadataLoader.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/Metadata/RedHatProviderMetadataLoader.cs diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/RedHatCsafConnector.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/RedHatCsafConnector.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF/RedHatCsafConnector.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/RedHatCsafConnector.cs diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj similarity index 98% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj index 48287a90..b52d920e 100644 --- a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj @@ -1,19 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/TASKS.md diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/AGENTS.md diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Authentication/RancherHubTokenProvider.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Authentication/RancherHubTokenProvider.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Authentication/RancherHubTokenProvider.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Authentication/RancherHubTokenProvider.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptions.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptionsValidator.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptionsValidator.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptionsValidator.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Configuration/RancherHubConnectorOptionsValidator.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/DependencyInjection/RancherHubConnectorServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/DependencyInjection/RancherHubConnectorServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/DependencyInjection/RancherHubConnectorServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/DependencyInjection/RancherHubConnectorServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Design/EXCITITOR-CONN-SUSE-01-002.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Design/EXCITITOR-CONN-SUSE-01-002.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Design/EXCITITOR-CONN-SUSE-01-002.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Design/EXCITITOR-CONN-SUSE-01-002.md diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventClient.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventClient.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventClient.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventClient.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventModels.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventModels.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventModels.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Events/RancherHubEventModels.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Metadata/RancherHubMetadataLoader.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Metadata/RancherHubMetadataLoader.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Metadata/RancherHubMetadataLoader.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/Metadata/RancherHubMetadataLoader.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/RancherHubConnector.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/RancherHubConnector.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/RancherHubConnector.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/RancherHubConnector.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/State/RancherHubCheckpointManager.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/State/RancherHubCheckpointManager.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/State/RancherHubCheckpointManager.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/State/RancherHubCheckpointManager.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj similarity index 98% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj index 48287a90..b52d920e 100644 --- a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj @@ -1,19 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/TASKS.md diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/AGENTS.md diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptions.cs diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptionsValidator.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptionsValidator.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptionsValidator.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Configuration/UbuntuConnectorOptionsValidator.cs diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/DependencyInjection/UbuntuConnectorServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/DependencyInjection/UbuntuConnectorServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/DependencyInjection/UbuntuConnectorServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/DependencyInjection/UbuntuConnectorServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Metadata/UbuntuCatalogLoader.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Metadata/UbuntuCatalogLoader.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Metadata/UbuntuCatalogLoader.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/Metadata/UbuntuCatalogLoader.cs diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj similarity index 98% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj index c573f653..fee5f248 100644 --- a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj @@ -1,20 +1,20 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Caching.Memory" Version="10.0.0-preview.7.25380.108" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="System.IO.Abstractions" Version="20.0.28" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/TASKS.md diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/UbuntuCsafConnector.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/UbuntuCsafConnector.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF/UbuntuCsafConnector.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/UbuntuCsafConnector.cs diff --git a/src/StellaOps.Excititor.Core/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Core/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Core/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/AGENTS.md diff --git a/src/StellaOps.Excititor.Core/Aoc/AocServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/AocServiceCollectionExtensions.cs similarity index 96% rename from src/StellaOps.Excititor.Core/Aoc/AocServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/AocServiceCollectionExtensions.cs index 98a67a6a..a505ac5a 100644 --- a/src/StellaOps.Excititor.Core/Aoc/AocServiceCollectionExtensions.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/AocServiceCollectionExtensions.cs @@ -1,38 +1,38 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Options; -using StellaOps.Aoc; - -namespace StellaOps.Excititor.Core.Aoc; - -public static class AocServiceCollectionExtensions -{ - /// <summary> - /// Registers Aggregation-Only Contract guard services for raw VEX ingestion. - /// </summary> - public static IServiceCollection AddExcititorAocGuards( - this IServiceCollection services, - Action<AocGuardOptions>? configure = null) - { - if (services is null) - { - throw new ArgumentNullException(nameof(services)); - } - - services.AddAocGuard(); - - if (configure is not null) - { - services.Configure(configure); - } - - services.TryAddSingleton<IVexRawWriteGuard>(sp => - { - var guard = sp.GetRequiredService<IAocGuard>(); - var options = sp.GetService<IOptions<AocGuardOptions>>(); - return new VexRawWriteGuard(guard, options); - }); - - return services; - } -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Aoc; + +namespace StellaOps.Excititor.Core.Aoc; + +public static class AocServiceCollectionExtensions +{ + /// <summary> + /// Registers Aggregation-Only Contract guard services for raw VEX ingestion. + /// </summary> + public static IServiceCollection AddExcititorAocGuards( + this IServiceCollection services, + Action<AocGuardOptions>? configure = null) + { + if (services is null) + { + throw new ArgumentNullException(nameof(services)); + } + + services.AddAocGuard(); + + if (configure is not null) + { + services.Configure(configure); + } + + services.TryAddSingleton<IVexRawWriteGuard>(sp => + { + var guard = sp.GetRequiredService<IAocGuard>(); + var options = sp.GetService<IOptions<AocGuardOptions>>(); + return new VexRawWriteGuard(guard, options); + }); + + return services; + } +} diff --git a/src/StellaOps.Excititor.Core/Aoc/ExcititorAocGuardException.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/ExcititorAocGuardException.cs similarity index 97% rename from src/StellaOps.Excititor.Core/Aoc/ExcititorAocGuardException.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/ExcititorAocGuardException.cs index 8289d59f..006988c9 100644 --- a/src/StellaOps.Excititor.Core/Aoc/ExcititorAocGuardException.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/ExcititorAocGuardException.cs @@ -1,22 +1,22 @@ -using System.Collections.Immutable; -using StellaOps.Aoc; - -namespace StellaOps.Excititor.Core.Aoc; - -/// <summary> -/// Exception representing an Aggregation-Only Contract violation for raw VEX documents. -/// </summary> -public sealed class ExcititorAocGuardException : Exception -{ - public ExcititorAocGuardException(AocGuardResult result) - : base("AOC guard validation failed for the provided raw VEX document.") - { - Result = result ?? throw new ArgumentNullException(nameof(result)); - } - - public AocGuardResult Result { get; } - - public ImmutableArray<AocViolation> Violations => Result.Violations; - - public string PrimaryErrorCode => Violations.IsDefaultOrEmpty ? "ERR_AOC_000" : Violations[0].ErrorCode; -} +using System.Collections.Immutable; +using StellaOps.Aoc; + +namespace StellaOps.Excititor.Core.Aoc; + +/// <summary> +/// Exception representing an Aggregation-Only Contract violation for raw VEX documents. +/// </summary> +public sealed class ExcititorAocGuardException : Exception +{ + public ExcititorAocGuardException(AocGuardResult result) + : base("AOC guard validation failed for the provided raw VEX document.") + { + Result = result ?? throw new ArgumentNullException(nameof(result)); + } + + public AocGuardResult Result { get; } + + public ImmutableArray<AocViolation> Violations => Result.Violations; + + public string PrimaryErrorCode => Violations.IsDefaultOrEmpty ? "ERR_AOC_000" : Violations[0].ErrorCode; +} diff --git a/src/StellaOps.Excititor.Core/Aoc/IVexRawWriteGuard.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/IVexRawWriteGuard.cs similarity index 97% rename from src/StellaOps.Excititor.Core/Aoc/IVexRawWriteGuard.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/IVexRawWriteGuard.cs index 81630c80..7d3d6956 100644 --- a/src/StellaOps.Excititor.Core/Aoc/IVexRawWriteGuard.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/IVexRawWriteGuard.cs @@ -1,16 +1,16 @@ -using RawVexDocument = StellaOps.Concelier.RawModels.VexRawDocument; - -namespace StellaOps.Excititor.Core.Aoc; - -/// <summary> -/// Validates raw VEX documents against the Aggregation-Only Contract (AOC) prior to persistence. -/// </summary> -public interface IVexRawWriteGuard -{ - /// <summary> - /// Ensures the supplied raw VEX document complies with the AOC guard rules. - /// Throws when violations are detected. - /// </summary> - /// <param name="document">Raw VEX document to validate.</param> - void EnsureValid(RawVexDocument document); -} +using RawVexDocument = StellaOps.Concelier.RawModels.VexRawDocument; + +namespace StellaOps.Excititor.Core.Aoc; + +/// <summary> +/// Validates raw VEX documents against the Aggregation-Only Contract (AOC) prior to persistence. +/// </summary> +public interface IVexRawWriteGuard +{ + /// <summary> + /// Ensures the supplied raw VEX document complies with the AOC guard rules. + /// Throws when violations are detected. + /// </summary> + /// <param name="document">Raw VEX document to validate.</param> + void EnsureValid(RawVexDocument document); +} diff --git a/src/StellaOps.Excititor.Core/Aoc/VexRawWriteGuard.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/VexRawWriteGuard.cs similarity index 97% rename from src/StellaOps.Excititor.Core/Aoc/VexRawWriteGuard.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/VexRawWriteGuard.cs index 12e1d4a6..13187a19 100644 --- a/src/StellaOps.Excititor.Core/Aoc/VexRawWriteGuard.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Aoc/VexRawWriteGuard.cs @@ -1,35 +1,35 @@ -using System.Text.Json; -using Microsoft.Extensions.Options; -using StellaOps.Aoc; -using RawVexDocument = StellaOps.Concelier.RawModels.VexRawDocument; - -namespace StellaOps.Excititor.Core.Aoc; - -/// <summary> -/// Aggregation-Only Contract guard for raw VEX documents. -/// </summary> -public sealed class VexRawWriteGuard : IVexRawWriteGuard -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); - - private readonly IAocGuard _guard; - private readonly AocGuardOptions _options; - - public VexRawWriteGuard(IAocGuard guard, IOptions<AocGuardOptions>? options = null) - { - _guard = guard ?? throw new ArgumentNullException(nameof(guard)); - _options = options?.Value ?? AocGuardOptions.Default; - } - - public void EnsureValid(RawVexDocument document) - { - ArgumentNullException.ThrowIfNull(document); - - using var payload = JsonDocument.Parse(JsonSerializer.Serialize(document, SerializerOptions)); - var result = _guard.Validate(payload.RootElement, _options); - if (!result.IsValid) - { - throw new ExcititorAocGuardException(result); - } - } -} +using System.Text.Json; +using Microsoft.Extensions.Options; +using StellaOps.Aoc; +using RawVexDocument = StellaOps.Concelier.RawModels.VexRawDocument; + +namespace StellaOps.Excititor.Core.Aoc; + +/// <summary> +/// Aggregation-Only Contract guard for raw VEX documents. +/// </summary> +public sealed class VexRawWriteGuard : IVexRawWriteGuard +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + private readonly IAocGuard _guard; + private readonly AocGuardOptions _options; + + public VexRawWriteGuard(IAocGuard guard, IOptions<AocGuardOptions>? options = null) + { + _guard = guard ?? throw new ArgumentNullException(nameof(guard)); + _options = options?.Value ?? AocGuardOptions.Default; + } + + public void EnsureValid(RawVexDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + using var payload = JsonDocument.Parse(JsonSerializer.Serialize(document, SerializerOptions)); + var result = _guard.Validate(payload.RootElement, _options); + if (!result.IsValid) + { + throw new ExcititorAocGuardException(result); + } + } +} diff --git a/src/StellaOps.Excititor.Core/BaselineVexConsensusPolicy.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/BaselineVexConsensusPolicy.cs similarity index 100% rename from src/StellaOps.Excititor.Core/BaselineVexConsensusPolicy.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/BaselineVexConsensusPolicy.cs diff --git a/src/StellaOps.Excititor.Core/IVexConsensusPolicy.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/IVexConsensusPolicy.cs similarity index 100% rename from src/StellaOps.Excititor.Core/IVexConsensusPolicy.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/IVexConsensusPolicy.cs diff --git a/src/StellaOps.Excititor.Core/MirrorDistributionOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/MirrorDistributionOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Core/MirrorDistributionOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/MirrorDistributionOptions.cs diff --git a/src/StellaOps.Excititor.Core/MirrorExportPlanner.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/MirrorExportPlanner.cs similarity index 100% rename from src/StellaOps.Excititor.Core/MirrorExportPlanner.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/MirrorExportPlanner.cs diff --git a/src/StellaOps.Excititor.Core/Observations/IVexObservationLookup.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/IVexObservationLookup.cs similarity index 97% rename from src/StellaOps.Excititor.Core/Observations/IVexObservationLookup.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/IVexObservationLookup.cs index 216444aa..6ba64e08 100644 --- a/src/StellaOps.Excititor.Core/Observations/IVexObservationLookup.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/IVexObservationLookup.cs @@ -1,32 +1,32 @@ -using StellaOps.Excititor.Core; - -namespace StellaOps.Excititor.Core.Observations; - -/// <summary> -/// Abstraction over the VEX observation persistence layer used for overlay queries. -/// </summary> -public interface IVexObservationLookup -{ - /// <summary> - /// Lists the available VEX observations for the specified tenant. - /// </summary> - ValueTask<IReadOnlyList<VexObservation>> ListByTenantAsync( - string tenant, - CancellationToken cancellationToken); - - /// <summary> - /// Finds VEX observations matching the supplied filters. - /// </summary> - ValueTask<IReadOnlyList<VexObservation>> FindByFiltersAsync( - string tenant, - IReadOnlyCollection<string> observationIds, - IReadOnlyCollection<string> vulnerabilityIds, - IReadOnlyCollection<string> productKeys, - IReadOnlyCollection<string> purls, - IReadOnlyCollection<string> cpes, - IReadOnlyCollection<string> providerIds, - IReadOnlyCollection<VexClaimStatus> statuses, - VexObservationCursor? cursor, - int limit, - CancellationToken cancellationToken); -} +using StellaOps.Excititor.Core; + +namespace StellaOps.Excititor.Core.Observations; + +/// <summary> +/// Abstraction over the VEX observation persistence layer used for overlay queries. +/// </summary> +public interface IVexObservationLookup +{ + /// <summary> + /// Lists the available VEX observations for the specified tenant. + /// </summary> + ValueTask<IReadOnlyList<VexObservation>> ListByTenantAsync( + string tenant, + CancellationToken cancellationToken); + + /// <summary> + /// Finds VEX observations matching the supplied filters. + /// </summary> + ValueTask<IReadOnlyList<VexObservation>> FindByFiltersAsync( + string tenant, + IReadOnlyCollection<string> observationIds, + IReadOnlyCollection<string> vulnerabilityIds, + IReadOnlyCollection<string> productKeys, + IReadOnlyCollection<string> purls, + IReadOnlyCollection<string> cpes, + IReadOnlyCollection<string> providerIds, + IReadOnlyCollection<VexClaimStatus> statuses, + VexObservationCursor? cursor, + int limit, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Excititor.Core/Observations/IVexObservationQueryService.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/IVexObservationQueryService.cs similarity index 96% rename from src/StellaOps.Excititor.Core/Observations/IVexObservationQueryService.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/IVexObservationQueryService.cs index aa825223..3380aa67 100644 --- a/src/StellaOps.Excititor.Core/Observations/IVexObservationQueryService.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/IVexObservationQueryService.cs @@ -1,11 +1,11 @@ -namespace StellaOps.Excititor.Core.Observations; - -/// <summary> -/// Queries raw VEX observations and returns overlay-friendly projections. -/// </summary> -public interface IVexObservationQueryService -{ - ValueTask<VexObservationQueryResult> QueryAsync( - VexObservationQueryOptions options, - CancellationToken cancellationToken); -} +namespace StellaOps.Excititor.Core.Observations; + +/// <summary> +/// Queries raw VEX observations and returns overlay-friendly projections. +/// </summary> +public interface IVexObservationQueryService +{ + ValueTask<VexObservationQueryResult> QueryAsync( + VexObservationQueryOptions options, + CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Excititor.Core/Observations/VexObservation.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/VexObservation.cs similarity index 96% rename from src/StellaOps.Excititor.Core/Observations/VexObservation.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/VexObservation.cs index 108843a9..27101a43 100644 --- a/src/StellaOps.Excititor.Core/Observations/VexObservation.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/VexObservation.cs @@ -1,437 +1,437 @@ -using System.Collections.Immutable; -using System.Text.Json.Nodes; - -namespace StellaOps.Excititor.Core.Observations; - -/// <summary> -/// Immutable record describing a raw VEX observation produced by Excititor ingestion. -/// </summary> -public sealed record VexObservation -{ - public VexObservation( - string observationId, - string tenant, - string providerId, - string streamId, - VexObservationUpstream upstream, - ImmutableArray<VexObservationStatement> statements, - VexObservationContent content, - VexObservationLinkset linkset, - DateTimeOffset createdAt, - ImmutableArray<string>? supersedes = null, - ImmutableDictionary<string, string>? attributes = null) - { - ObservationId = EnsureNotNullOrWhiteSpace(observationId, nameof(observationId)); - Tenant = EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant(); - ProviderId = EnsureNotNullOrWhiteSpace(providerId, nameof(providerId)).ToLowerInvariant(); - StreamId = EnsureNotNullOrWhiteSpace(streamId, nameof(streamId)); - Upstream = upstream ?? throw new ArgumentNullException(nameof(upstream)); - Statements = NormalizeStatements(statements); - Content = content ?? throw new ArgumentNullException(nameof(content)); - Linkset = linkset ?? throw new ArgumentNullException(nameof(linkset)); - CreatedAt = createdAt.ToUniversalTime(); - Supersedes = NormalizeSupersedes(supersedes); - Attributes = NormalizeAttributes(attributes); - } - - public string ObservationId { get; } - - public string Tenant { get; } - - public string ProviderId { get; } - - public string StreamId { get; } - - public VexObservationUpstream Upstream { get; } - - public ImmutableArray<VexObservationStatement> Statements { get; } - - public VexObservationContent Content { get; } - - public VexObservationLinkset Linkset { get; } - - public DateTimeOffset CreatedAt { get; } - - public ImmutableArray<string> Supersedes { get; } - - public ImmutableDictionary<string, string> Attributes { get; } - - private static ImmutableArray<VexObservationStatement> NormalizeStatements(ImmutableArray<VexObservationStatement> statements) - { - if (statements.IsDefault) - { - throw new ArgumentNullException(nameof(statements)); - } - - if (statements.Length == 0) - { - return ImmutableArray<VexObservationStatement>.Empty; - } - - return statements.ToImmutableArray(); - } - - private static ImmutableArray<string> NormalizeSupersedes(ImmutableArray<string>? supersedes) - { - if (!supersedes.HasValue || supersedes.Value.IsDefaultOrEmpty) - { - return ImmutableArray<string>.Empty; - } - - var set = new SortedSet<string>(StringComparer.Ordinal); - foreach (var value in supersedes.Value) - { - var normalized = TrimToNull(value); - if (normalized is null) - { - continue; - } - - set.Add(normalized); - } - - return set.Count == 0 ? ImmutableArray<string>.Empty : set.ToImmutableArray(); - } - - private static ImmutableDictionary<string, string> NormalizeAttributes(ImmutableDictionary<string, string>? attributes) - { - if (attributes is null || attributes.Count == 0) - { - return ImmutableDictionary<string, string>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var pair in attributes) - { - var key = TrimToNull(pair.Key); - if (key is null || pair.Value is null) - { - continue; - } - - builder[key] = pair.Value; - } - - return builder.ToImmutable(); - } - - internal static string EnsureNotNullOrWhiteSpace(string value, string name) - { - if (string.IsNullOrWhiteSpace(value)) - { - throw new ArgumentException($"{name} must be provided.", name); - } - - return value.Trim(); - } - - internal static string? TrimToNull(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); -} - -public sealed record VexObservationUpstream -{ - public VexObservationUpstream( - string upstreamId, - string? documentVersion, - DateTimeOffset fetchedAt, - DateTimeOffset receivedAt, - string contentHash, - VexObservationSignature signature, - ImmutableDictionary<string, string>? metadata = null) - { - UpstreamId = VexObservation.EnsureNotNullOrWhiteSpace(upstreamId, nameof(upstreamId)); - DocumentVersion = VexObservation.TrimToNull(documentVersion); - FetchedAt = fetchedAt.ToUniversalTime(); - ReceivedAt = receivedAt.ToUniversalTime(); - ContentHash = VexObservation.EnsureNotNullOrWhiteSpace(contentHash, nameof(contentHash)); - Signature = signature ?? throw new ArgumentNullException(nameof(signature)); - Metadata = NormalizeMetadata(metadata); - } - - public string UpstreamId { get; } - - public string? DocumentVersion { get; } - - public DateTimeOffset FetchedAt { get; } - - public DateTimeOffset ReceivedAt { get; } - - public string ContentHash { get; } - - public VexObservationSignature Signature { get; } - - public ImmutableDictionary<string, string> Metadata { get; } - - private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) - { - if (metadata is null || metadata.Count == 0) - { - return ImmutableDictionary<string, string>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var pair in metadata) - { - var key = VexObservation.TrimToNull(pair.Key); - if (key is null || pair.Value is null) - { - continue; - } - - builder[key] = pair.Value; - } - - return builder.ToImmutable(); - } -} - -public sealed record VexObservationSignature -{ - public VexObservationSignature( - bool present, - string? format, - string? keyId, - string? signature) - { - Present = present; - Format = VexObservation.TrimToNull(format); - KeyId = VexObservation.TrimToNull(keyId); - Signature = VexObservation.TrimToNull(signature); - } - - public bool Present { get; } - - public string? Format { get; } - - public string? KeyId { get; } - - public string? Signature { get; } -} - -public sealed record VexObservationContent -{ - public VexObservationContent( - string format, - string? specVersion, - JsonNode raw, - ImmutableDictionary<string, string>? metadata = null) - { - Format = VexObservation.EnsureNotNullOrWhiteSpace(format, nameof(format)); - SpecVersion = VexObservation.TrimToNull(specVersion); - Raw = raw?.DeepClone() ?? throw new ArgumentNullException(nameof(raw)); - Metadata = NormalizeMetadata(metadata); - } - - public string Format { get; } - - public string? SpecVersion { get; } - - public JsonNode Raw { get; } - - public ImmutableDictionary<string, string> Metadata { get; } - - private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) - { - if (metadata is null || metadata.Count == 0) - { - return ImmutableDictionary<string, string>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var pair in metadata) - { - var key = VexObservation.TrimToNull(pair.Key); - if (key is null || pair.Value is null) - { - continue; - } - - builder[key] = pair.Value; - } - - return builder.ToImmutable(); - } -} - -public sealed record VexObservationStatement -{ - public VexObservationStatement( - string vulnerabilityId, - string productKey, - VexClaimStatus status, - DateTimeOffset? lastObserved, - string? locator = null, - VexJustification? justification = null, - string? introducedVersion = null, - string? fixedVersion = null, - string? purl = null, - string? cpe = null, - ImmutableArray<JsonNode>? evidence = null, - ImmutableDictionary<string, string>? metadata = null) - { - VulnerabilityId = VexObservation.EnsureNotNullOrWhiteSpace(vulnerabilityId, nameof(vulnerabilityId)); - ProductKey = VexObservation.EnsureNotNullOrWhiteSpace(productKey, nameof(productKey)); - Status = status; - LastObserved = lastObserved?.ToUniversalTime(); - Locator = VexObservation.TrimToNull(locator); - Justification = justification; - IntroducedVersion = VexObservation.TrimToNull(introducedVersion); - FixedVersion = VexObservation.TrimToNull(fixedVersion); - Purl = VexObservation.TrimToNull(purl); - Cpe = VexObservation.TrimToNull(cpe); - Evidence = NormalizeEvidence(evidence); - Metadata = NormalizeMetadata(metadata); - } - - public string VulnerabilityId { get; } - - public string ProductKey { get; } - - public VexClaimStatus Status { get; } - - public DateTimeOffset? LastObserved { get; } - - public string? Locator { get; } - - public VexJustification? Justification { get; } - - public string? IntroducedVersion { get; } - - public string? FixedVersion { get; } - - public string? Purl { get; } - - public string? Cpe { get; } - - public ImmutableArray<JsonNode> Evidence { get; } - - public ImmutableDictionary<string, string> Metadata { get; } - - private static ImmutableArray<JsonNode> NormalizeEvidence(ImmutableArray<JsonNode>? evidence) - { - if (!evidence.HasValue || evidence.Value.IsDefaultOrEmpty) - { - return ImmutableArray<JsonNode>.Empty; - } - - var builder = ImmutableArray.CreateBuilder<JsonNode>(evidence.Value.Length); - foreach (var node in evidence.Value) - { - if (node is null) - { - continue; - } - - builder.Add(node.DeepClone()); - } - - return builder.ToImmutable(); - } - - private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) - { - if (metadata is null || metadata.Count == 0) - { - return ImmutableDictionary<string, string>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var pair in metadata) - { - var key = VexObservation.TrimToNull(pair.Key); - if (key is null || pair.Value is null) - { - continue; - } - - builder[key] = pair.Value; - } - - return builder.ToImmutable(); - } -} - -public sealed record VexObservationLinkset -{ - public VexObservationLinkset( - IEnumerable<string>? aliases, - IEnumerable<string>? purls, - IEnumerable<string>? cpes, - IEnumerable<VexObservationReference>? references, - IEnumerable<string>? reconciledFrom = null) - { - Aliases = NormalizeSet(aliases, toLower: true); - Purls = NormalizeSet(purls, toLower: false); - Cpes = NormalizeSet(cpes, toLower: false); - References = NormalizeReferences(references); - ReconciledFrom = NormalizeSet(reconciledFrom, toLower: false); - } - - public ImmutableArray<string> Aliases { get; } - - public ImmutableArray<string> Purls { get; } - - public ImmutableArray<string> Cpes { get; } - - public ImmutableArray<VexObservationReference> References { get; } - - public ImmutableArray<string> ReconciledFrom { get; } - - private static ImmutableArray<string> NormalizeSet(IEnumerable<string>? values, bool toLower) - { - if (values is null) - { - return ImmutableArray<string>.Empty; - } - - var comparer = StringComparer.Ordinal; - var set = new SortedSet<string>(comparer); - foreach (var value in values) - { - var normalized = VexObservation.TrimToNull(value); - if (normalized is null) - { - continue; - } - - set.Add(toLower ? normalized.ToLowerInvariant() : normalized); - } - - return set.Count == 0 ? ImmutableArray<string>.Empty : set.ToImmutableArray(); - } - - private static ImmutableArray<VexObservationReference> NormalizeReferences(IEnumerable<VexObservationReference>? references) - { - if (references is null) - { - return ImmutableArray<VexObservationReference>.Empty; - } - - var set = new HashSet<VexObservationReference>(); - foreach (var reference in references) - { - if (reference is null) - { - continue; - } - - set.Add(reference); - } - - return set.Count == 0 ? ImmutableArray<VexObservationReference>.Empty : set.ToImmutableArray(); - } -} - -public sealed record VexObservationReference -{ - public VexObservationReference(string type, string url) - { - Type = VexObservation.EnsureNotNullOrWhiteSpace(type, nameof(type)); - Url = VexObservation.EnsureNotNullOrWhiteSpace(url, nameof(url)); - } - - public string Type { get; } - - public string Url { get; } -} +using System.Collections.Immutable; +using System.Text.Json.Nodes; + +namespace StellaOps.Excititor.Core.Observations; + +/// <summary> +/// Immutable record describing a raw VEX observation produced by Excititor ingestion. +/// </summary> +public sealed record VexObservation +{ + public VexObservation( + string observationId, + string tenant, + string providerId, + string streamId, + VexObservationUpstream upstream, + ImmutableArray<VexObservationStatement> statements, + VexObservationContent content, + VexObservationLinkset linkset, + DateTimeOffset createdAt, + ImmutableArray<string>? supersedes = null, + ImmutableDictionary<string, string>? attributes = null) + { + ObservationId = EnsureNotNullOrWhiteSpace(observationId, nameof(observationId)); + Tenant = EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant(); + ProviderId = EnsureNotNullOrWhiteSpace(providerId, nameof(providerId)).ToLowerInvariant(); + StreamId = EnsureNotNullOrWhiteSpace(streamId, nameof(streamId)); + Upstream = upstream ?? throw new ArgumentNullException(nameof(upstream)); + Statements = NormalizeStatements(statements); + Content = content ?? throw new ArgumentNullException(nameof(content)); + Linkset = linkset ?? throw new ArgumentNullException(nameof(linkset)); + CreatedAt = createdAt.ToUniversalTime(); + Supersedes = NormalizeSupersedes(supersedes); + Attributes = NormalizeAttributes(attributes); + } + + public string ObservationId { get; } + + public string Tenant { get; } + + public string ProviderId { get; } + + public string StreamId { get; } + + public VexObservationUpstream Upstream { get; } + + public ImmutableArray<VexObservationStatement> Statements { get; } + + public VexObservationContent Content { get; } + + public VexObservationLinkset Linkset { get; } + + public DateTimeOffset CreatedAt { get; } + + public ImmutableArray<string> Supersedes { get; } + + public ImmutableDictionary<string, string> Attributes { get; } + + private static ImmutableArray<VexObservationStatement> NormalizeStatements(ImmutableArray<VexObservationStatement> statements) + { + if (statements.IsDefault) + { + throw new ArgumentNullException(nameof(statements)); + } + + if (statements.Length == 0) + { + return ImmutableArray<VexObservationStatement>.Empty; + } + + return statements.ToImmutableArray(); + } + + private static ImmutableArray<string> NormalizeSupersedes(ImmutableArray<string>? supersedes) + { + if (!supersedes.HasValue || supersedes.Value.IsDefaultOrEmpty) + { + return ImmutableArray<string>.Empty; + } + + var set = new SortedSet<string>(StringComparer.Ordinal); + foreach (var value in supersedes.Value) + { + var normalized = TrimToNull(value); + if (normalized is null) + { + continue; + } + + set.Add(normalized); + } + + return set.Count == 0 ? ImmutableArray<string>.Empty : set.ToImmutableArray(); + } + + private static ImmutableDictionary<string, string> NormalizeAttributes(ImmutableDictionary<string, string>? attributes) + { + if (attributes is null || attributes.Count == 0) + { + return ImmutableDictionary<string, string>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var pair in attributes) + { + var key = TrimToNull(pair.Key); + if (key is null || pair.Value is null) + { + continue; + } + + builder[key] = pair.Value; + } + + return builder.ToImmutable(); + } + + internal static string EnsureNotNullOrWhiteSpace(string value, string name) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException($"{name} must be provided.", name); + } + + return value.Trim(); + } + + internal static string? TrimToNull(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value.Trim(); +} + +public sealed record VexObservationUpstream +{ + public VexObservationUpstream( + string upstreamId, + string? documentVersion, + DateTimeOffset fetchedAt, + DateTimeOffset receivedAt, + string contentHash, + VexObservationSignature signature, + ImmutableDictionary<string, string>? metadata = null) + { + UpstreamId = VexObservation.EnsureNotNullOrWhiteSpace(upstreamId, nameof(upstreamId)); + DocumentVersion = VexObservation.TrimToNull(documentVersion); + FetchedAt = fetchedAt.ToUniversalTime(); + ReceivedAt = receivedAt.ToUniversalTime(); + ContentHash = VexObservation.EnsureNotNullOrWhiteSpace(contentHash, nameof(contentHash)); + Signature = signature ?? throw new ArgumentNullException(nameof(signature)); + Metadata = NormalizeMetadata(metadata); + } + + public string UpstreamId { get; } + + public string? DocumentVersion { get; } + + public DateTimeOffset FetchedAt { get; } + + public DateTimeOffset ReceivedAt { get; } + + public string ContentHash { get; } + + public VexObservationSignature Signature { get; } + + public ImmutableDictionary<string, string> Metadata { get; } + + private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) + { + if (metadata is null || metadata.Count == 0) + { + return ImmutableDictionary<string, string>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var pair in metadata) + { + var key = VexObservation.TrimToNull(pair.Key); + if (key is null || pair.Value is null) + { + continue; + } + + builder[key] = pair.Value; + } + + return builder.ToImmutable(); + } +} + +public sealed record VexObservationSignature +{ + public VexObservationSignature( + bool present, + string? format, + string? keyId, + string? signature) + { + Present = present; + Format = VexObservation.TrimToNull(format); + KeyId = VexObservation.TrimToNull(keyId); + Signature = VexObservation.TrimToNull(signature); + } + + public bool Present { get; } + + public string? Format { get; } + + public string? KeyId { get; } + + public string? Signature { get; } +} + +public sealed record VexObservationContent +{ + public VexObservationContent( + string format, + string? specVersion, + JsonNode raw, + ImmutableDictionary<string, string>? metadata = null) + { + Format = VexObservation.EnsureNotNullOrWhiteSpace(format, nameof(format)); + SpecVersion = VexObservation.TrimToNull(specVersion); + Raw = raw?.DeepClone() ?? throw new ArgumentNullException(nameof(raw)); + Metadata = NormalizeMetadata(metadata); + } + + public string Format { get; } + + public string? SpecVersion { get; } + + public JsonNode Raw { get; } + + public ImmutableDictionary<string, string> Metadata { get; } + + private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) + { + if (metadata is null || metadata.Count == 0) + { + return ImmutableDictionary<string, string>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var pair in metadata) + { + var key = VexObservation.TrimToNull(pair.Key); + if (key is null || pair.Value is null) + { + continue; + } + + builder[key] = pair.Value; + } + + return builder.ToImmutable(); + } +} + +public sealed record VexObservationStatement +{ + public VexObservationStatement( + string vulnerabilityId, + string productKey, + VexClaimStatus status, + DateTimeOffset? lastObserved, + string? locator = null, + VexJustification? justification = null, + string? introducedVersion = null, + string? fixedVersion = null, + string? purl = null, + string? cpe = null, + ImmutableArray<JsonNode>? evidence = null, + ImmutableDictionary<string, string>? metadata = null) + { + VulnerabilityId = VexObservation.EnsureNotNullOrWhiteSpace(vulnerabilityId, nameof(vulnerabilityId)); + ProductKey = VexObservation.EnsureNotNullOrWhiteSpace(productKey, nameof(productKey)); + Status = status; + LastObserved = lastObserved?.ToUniversalTime(); + Locator = VexObservation.TrimToNull(locator); + Justification = justification; + IntroducedVersion = VexObservation.TrimToNull(introducedVersion); + FixedVersion = VexObservation.TrimToNull(fixedVersion); + Purl = VexObservation.TrimToNull(purl); + Cpe = VexObservation.TrimToNull(cpe); + Evidence = NormalizeEvidence(evidence); + Metadata = NormalizeMetadata(metadata); + } + + public string VulnerabilityId { get; } + + public string ProductKey { get; } + + public VexClaimStatus Status { get; } + + public DateTimeOffset? LastObserved { get; } + + public string? Locator { get; } + + public VexJustification? Justification { get; } + + public string? IntroducedVersion { get; } + + public string? FixedVersion { get; } + + public string? Purl { get; } + + public string? Cpe { get; } + + public ImmutableArray<JsonNode> Evidence { get; } + + public ImmutableDictionary<string, string> Metadata { get; } + + private static ImmutableArray<JsonNode> NormalizeEvidence(ImmutableArray<JsonNode>? evidence) + { + if (!evidence.HasValue || evidence.Value.IsDefaultOrEmpty) + { + return ImmutableArray<JsonNode>.Empty; + } + + var builder = ImmutableArray.CreateBuilder<JsonNode>(evidence.Value.Length); + foreach (var node in evidence.Value) + { + if (node is null) + { + continue; + } + + builder.Add(node.DeepClone()); + } + + return builder.ToImmutable(); + } + + private static ImmutableDictionary<string, string> NormalizeMetadata(ImmutableDictionary<string, string>? metadata) + { + if (metadata is null || metadata.Count == 0) + { + return ImmutableDictionary<string, string>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var pair in metadata) + { + var key = VexObservation.TrimToNull(pair.Key); + if (key is null || pair.Value is null) + { + continue; + } + + builder[key] = pair.Value; + } + + return builder.ToImmutable(); + } +} + +public sealed record VexObservationLinkset +{ + public VexObservationLinkset( + IEnumerable<string>? aliases, + IEnumerable<string>? purls, + IEnumerable<string>? cpes, + IEnumerable<VexObservationReference>? references, + IEnumerable<string>? reconciledFrom = null) + { + Aliases = NormalizeSet(aliases, toLower: true); + Purls = NormalizeSet(purls, toLower: false); + Cpes = NormalizeSet(cpes, toLower: false); + References = NormalizeReferences(references); + ReconciledFrom = NormalizeSet(reconciledFrom, toLower: false); + } + + public ImmutableArray<string> Aliases { get; } + + public ImmutableArray<string> Purls { get; } + + public ImmutableArray<string> Cpes { get; } + + public ImmutableArray<VexObservationReference> References { get; } + + public ImmutableArray<string> ReconciledFrom { get; } + + private static ImmutableArray<string> NormalizeSet(IEnumerable<string>? values, bool toLower) + { + if (values is null) + { + return ImmutableArray<string>.Empty; + } + + var comparer = StringComparer.Ordinal; + var set = new SortedSet<string>(comparer); + foreach (var value in values) + { + var normalized = VexObservation.TrimToNull(value); + if (normalized is null) + { + continue; + } + + set.Add(toLower ? normalized.ToLowerInvariant() : normalized); + } + + return set.Count == 0 ? ImmutableArray<string>.Empty : set.ToImmutableArray(); + } + + private static ImmutableArray<VexObservationReference> NormalizeReferences(IEnumerable<VexObservationReference>? references) + { + if (references is null) + { + return ImmutableArray<VexObservationReference>.Empty; + } + + var set = new HashSet<VexObservationReference>(); + foreach (var reference in references) + { + if (reference is null) + { + continue; + } + + set.Add(reference); + } + + return set.Count == 0 ? ImmutableArray<VexObservationReference>.Empty : set.ToImmutableArray(); + } +} + +public sealed record VexObservationReference +{ + public VexObservationReference(string type, string url) + { + Type = VexObservation.EnsureNotNullOrWhiteSpace(type, nameof(type)); + Url = VexObservation.EnsureNotNullOrWhiteSpace(url, nameof(url)); + } + + public string Type { get; } + + public string Url { get; } +} diff --git a/src/StellaOps.Excititor.Core/Observations/VexObservationQueryModels.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/VexObservationQueryModels.cs similarity index 97% rename from src/StellaOps.Excititor.Core/Observations/VexObservationQueryModels.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/VexObservationQueryModels.cs index 0e534bcb..bb6309b5 100644 --- a/src/StellaOps.Excititor.Core/Observations/VexObservationQueryModels.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/VexObservationQueryModels.cs @@ -1,79 +1,79 @@ -using System.Collections.Immutable; -using StellaOps.Excititor.Core; - -namespace StellaOps.Excititor.Core.Observations; - -/// <summary> -/// Query options for retrieving VEX observations scoped to a tenant. -/// </summary> -public sealed record VexObservationQueryOptions -{ - public VexObservationQueryOptions( - string tenant, - IReadOnlyCollection<string>? observationIds = null, - IReadOnlyCollection<string>? vulnerabilityIds = null, - IReadOnlyCollection<string>? productKeys = null, - IReadOnlyCollection<string>? purls = null, - IReadOnlyCollection<string>? cpes = null, - IReadOnlyCollection<string>? providerIds = null, - IReadOnlyCollection<VexClaimStatus>? statuses = null, - int? limit = null, - string? cursor = null) - { - Tenant = VexObservation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)); - ObservationIds = observationIds ?? Array.Empty<string>(); - VulnerabilityIds = vulnerabilityIds ?? Array.Empty<string>(); - ProductKeys = productKeys ?? Array.Empty<string>(); - Purls = purls ?? Array.Empty<string>(); - Cpes = cpes ?? Array.Empty<string>(); - ProviderIds = providerIds ?? Array.Empty<string>(); - Statuses = statuses ?? Array.Empty<VexClaimStatus>(); - Limit = limit; - Cursor = cursor; - } - - public string Tenant { get; } - - public IReadOnlyCollection<string> ObservationIds { get; } - - public IReadOnlyCollection<string> VulnerabilityIds { get; } - - public IReadOnlyCollection<string> ProductKeys { get; } - - public IReadOnlyCollection<string> Purls { get; } - - public IReadOnlyCollection<string> Cpes { get; } - - public IReadOnlyCollection<string> ProviderIds { get; } - - public IReadOnlyCollection<VexClaimStatus> Statuses { get; } - - public int? Limit { get; } - - public string? Cursor { get; } -} - -/// <summary> -/// Cursor used for pagination. -/// </summary> -public sealed record VexObservationCursor(DateTimeOffset CreatedAt, string ObservationId); - -/// <summary> -/// Query result returning observations and an aggregate summary. -/// </summary> -public sealed record VexObservationQueryResult( - ImmutableArray<VexObservation> Observations, - VexObservationAggregate Aggregate, - string? NextCursor, - bool HasMore); - -/// <summary> -/// Aggregate metadata calculated from the returned observations. -/// </summary> -public sealed record VexObservationAggregate( - ImmutableArray<string> VulnerabilityIds, - ImmutableArray<string> ProductKeys, - ImmutableArray<string> Purls, - ImmutableArray<string> Cpes, - ImmutableArray<VexObservationReference> References, - ImmutableArray<string> ProviderIds); +using System.Collections.Immutable; +using StellaOps.Excititor.Core; + +namespace StellaOps.Excititor.Core.Observations; + +/// <summary> +/// Query options for retrieving VEX observations scoped to a tenant. +/// </summary> +public sealed record VexObservationQueryOptions +{ + public VexObservationQueryOptions( + string tenant, + IReadOnlyCollection<string>? observationIds = null, + IReadOnlyCollection<string>? vulnerabilityIds = null, + IReadOnlyCollection<string>? productKeys = null, + IReadOnlyCollection<string>? purls = null, + IReadOnlyCollection<string>? cpes = null, + IReadOnlyCollection<string>? providerIds = null, + IReadOnlyCollection<VexClaimStatus>? statuses = null, + int? limit = null, + string? cursor = null) + { + Tenant = VexObservation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)); + ObservationIds = observationIds ?? Array.Empty<string>(); + VulnerabilityIds = vulnerabilityIds ?? Array.Empty<string>(); + ProductKeys = productKeys ?? Array.Empty<string>(); + Purls = purls ?? Array.Empty<string>(); + Cpes = cpes ?? Array.Empty<string>(); + ProviderIds = providerIds ?? Array.Empty<string>(); + Statuses = statuses ?? Array.Empty<VexClaimStatus>(); + Limit = limit; + Cursor = cursor; + } + + public string Tenant { get; } + + public IReadOnlyCollection<string> ObservationIds { get; } + + public IReadOnlyCollection<string> VulnerabilityIds { get; } + + public IReadOnlyCollection<string> ProductKeys { get; } + + public IReadOnlyCollection<string> Purls { get; } + + public IReadOnlyCollection<string> Cpes { get; } + + public IReadOnlyCollection<string> ProviderIds { get; } + + public IReadOnlyCollection<VexClaimStatus> Statuses { get; } + + public int? Limit { get; } + + public string? Cursor { get; } +} + +/// <summary> +/// Cursor used for pagination. +/// </summary> +public sealed record VexObservationCursor(DateTimeOffset CreatedAt, string ObservationId); + +/// <summary> +/// Query result returning observations and an aggregate summary. +/// </summary> +public sealed record VexObservationQueryResult( + ImmutableArray<VexObservation> Observations, + VexObservationAggregate Aggregate, + string? NextCursor, + bool HasMore); + +/// <summary> +/// Aggregate metadata calculated from the returned observations. +/// </summary> +public sealed record VexObservationAggregate( + ImmutableArray<string> VulnerabilityIds, + ImmutableArray<string> ProductKeys, + ImmutableArray<string> Purls, + ImmutableArray<string> Cpes, + ImmutableArray<VexObservationReference> References, + ImmutableArray<string> ProviderIds); diff --git a/src/StellaOps.Excititor.Core/Observations/VexObservationQueryService.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/VexObservationQueryService.cs similarity index 97% rename from src/StellaOps.Excititor.Core/Observations/VexObservationQueryService.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/VexObservationQueryService.cs index fc2579ec..128d16a5 100644 --- a/src/StellaOps.Excititor.Core/Observations/VexObservationQueryService.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/Observations/VexObservationQueryService.cs @@ -1,311 +1,311 @@ -using System.Collections.Immutable; -using System.Globalization; -using System.Text; -using StellaOps.Excititor.Core; - -namespace StellaOps.Excititor.Core.Observations; - -/// <summary> -/// Default implementation of <see cref="IVexObservationQueryService"/> that projects raw VEX observations for overlay consumers. -/// </summary> -public sealed class VexObservationQueryService : IVexObservationQueryService -{ - private const int DefaultPageSize = 200; - private const int MaxPageSize = 500; - - private readonly IVexObservationLookup _lookup; - - public VexObservationQueryService(IVexObservationLookup lookup) - { - _lookup = lookup ?? throw new ArgumentNullException(nameof(lookup)); - } - - public async ValueTask<VexObservationQueryResult> QueryAsync( - VexObservationQueryOptions options, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(options); - cancellationToken.ThrowIfCancellationRequested(); - - var tenant = NormalizeTenant(options.Tenant); - var observationIds = NormalizeSet(options.ObservationIds, static value => value, StringComparer.Ordinal); - var vulnerabilityIds = NormalizeSet(options.VulnerabilityIds, static value => value.ToLowerInvariant(), StringComparer.Ordinal); - var productKeys = NormalizeSet(options.ProductKeys, static value => value.ToLowerInvariant(), StringComparer.Ordinal); - var purls = NormalizeSet(options.Purls, static value => value.ToLowerInvariant(), StringComparer.Ordinal); - var cpes = NormalizeSet(options.Cpes, static value => value.ToLowerInvariant(), StringComparer.Ordinal); - var providerIds = NormalizeSet(options.ProviderIds, static value => value.ToLowerInvariant(), StringComparer.Ordinal); - var statuses = NormalizeStatuses(options.Statuses); - - var limit = NormalizeLimit(options.Limit); - var fetchSize = checked(limit + 1); - var cursor = DecodeCursor(options.Cursor); - - var observations = await _lookup - .FindByFiltersAsync( - tenant, - observationIds, - vulnerabilityIds, - productKeys, - purls, - cpes, - providerIds, - statuses, - cursor, - fetchSize, - cancellationToken) - .ConfigureAwait(false); - - var ordered = observations - .Where(observation => Matches(observation, observationIds, vulnerabilityIds, productKeys, purls, cpes, providerIds, statuses)) - .OrderByDescending(static observation => observation.CreatedAt) - .ThenBy(static observation => observation.ObservationId, StringComparer.Ordinal) - .ToImmutableArray(); - - var hasMore = ordered.Length > limit; - var page = hasMore ? ordered.Take(limit).ToImmutableArray() : ordered; - var nextCursor = hasMore ? EncodeCursor(page[^1]) : null; - var aggregate = BuildAggregate(page); - - return new VexObservationQueryResult(page, aggregate, nextCursor, hasMore); - } - - private static string NormalizeTenant(string tenant) - => VexObservation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant(); - - private static ImmutableHashSet<string> NormalizeSet( - IEnumerable<string>? values, - Func<string, string> projector, - StringComparer comparer) - { - if (values is null) - { - return ImmutableHashSet<string>.Empty; - } - - var builder = ImmutableHashSet.CreateBuilder<string>(comparer); - foreach (var value in values) - { - var normalized = VexObservation.TrimToNull(value); - if (normalized is null) - { - continue; - } - - builder.Add(projector(normalized)); - } - - return builder.ToImmutable(); - } - - private static ImmutableHashSet<VexClaimStatus> NormalizeStatuses(IEnumerable<VexClaimStatus>? statuses) - { - if (statuses is null) - { - return ImmutableHashSet<VexClaimStatus>.Empty; - } - - return statuses.Aggregate( - ImmutableHashSet<VexClaimStatus>.Empty, - static (set, status) => set.Add(status)); - } - - private static int NormalizeLimit(int? limit) - { - if (!limit.HasValue || limit.Value <= 0) - { - return DefaultPageSize; - } - - return Math.Min(limit.Value, MaxPageSize); - } - - private static VexObservationCursor? DecodeCursor(string? cursor) - { - if (string.IsNullOrWhiteSpace(cursor)) - { - return null; - } - - try - { - var decoded = Convert.FromBase64String(cursor.Trim()); - var payload = Encoding.UTF8.GetString(decoded); - var separator = payload.IndexOf(':'); - if (separator <= 0 || separator >= payload.Length - 1) - { - throw new FormatException("Cursor is malformed."); - } - - if (!long.TryParse(payload.AsSpan(0, separator), NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks)) - { - throw new FormatException("Cursor timestamp is invalid."); - } - - var createdAt = new DateTimeOffset(DateTime.SpecifyKind(new DateTime(ticks), DateTimeKind.Utc)); - var observationId = payload[(separator + 1)..]; - if (string.IsNullOrWhiteSpace(observationId)) - { - throw new FormatException("Cursor observation id is missing."); - } - - return new VexObservationCursor(createdAt, observationId); - } - catch (FormatException) - { - throw; - } - catch (Exception ex) - { - throw new FormatException("Cursor is malformed.", ex); - } - } - - private static string? EncodeCursor(VexObservation observation) - { - if (observation is null) - { - return null; - } - - var payload = $"{observation.CreatedAt.UtcTicks.ToString(CultureInfo.InvariantCulture)}:{observation.ObservationId}"; - return Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)); - } - - private static bool Matches( - VexObservation observation, - ImmutableHashSet<string> observationIds, - ImmutableHashSet<string> vulnerabilities, - ImmutableHashSet<string> productKeys, - ImmutableHashSet<string> purls, - ImmutableHashSet<string> cpes, - ImmutableHashSet<string> providerIds, - ImmutableHashSet<VexClaimStatus> statuses) - { - ArgumentNullException.ThrowIfNull(observation); - - if (observationIds.Count > 0 && !observationIds.Contains(observation.ObservationId)) - { - return false; - } - - if (providerIds.Count > 0 && !providerIds.Contains(observation.ProviderId.ToLowerInvariant())) - { - return false; - } - - if (!MatchesStatements(observation, vulnerabilities, productKeys, statuses)) - { - return false; - } - - if (purls.Count > 0 && !observation.Linkset.Purls.Any(purl => purls.Contains(purl.ToLowerInvariant()))) - { - return false; - } - - if (cpes.Count > 0 && !observation.Linkset.Cpes.Any(cpe => cpes.Contains(cpe.ToLowerInvariant()))) - { - return false; - } - - return true; - } - - private static bool MatchesStatements( - VexObservation observation, - ImmutableHashSet<string> vulnerabilities, - ImmutableHashSet<string> productKeys, - ImmutableHashSet<VexClaimStatus> statuses) - { - if (vulnerabilities.Count == 0 && productKeys.Count == 0 && statuses.Count == 0) - { - return true; - } - - foreach (var statement in observation.Statements) - { - var vulnerabilityMatches = vulnerabilities.Count == 0 - || vulnerabilities.Contains(statement.VulnerabilityId.ToLowerInvariant()); - - var productMatches = productKeys.Count == 0 - || productKeys.Contains(statement.ProductKey.ToLowerInvariant()); - - var statusMatches = statuses.Count == 0 - || statuses.Contains(statement.Status); - - if (vulnerabilityMatches && productMatches && statusMatches) - { - return true; - } - } - - return false; - } - - private static VexObservationAggregate BuildAggregate(ImmutableArray<VexObservation> observations) - { - if (observations.IsDefaultOrEmpty) - { - return new VexObservationAggregate( - ImmutableArray<string>.Empty, - ImmutableArray<string>.Empty, - ImmutableArray<string>.Empty, - ImmutableArray<string>.Empty, - ImmutableArray<VexObservationReference>.Empty, - ImmutableArray<string>.Empty); - } - - var vulnerabilitySet = new HashSet<string>(StringComparer.Ordinal); - var productSet = new HashSet<string>(StringComparer.Ordinal); - var purlSet = new HashSet<string>(StringComparer.Ordinal); - var cpeSet = new HashSet<string>(StringComparer.Ordinal); - var referenceSet = new HashSet<VexObservationReference>(); - var providerSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - - foreach (var observation in observations) - { - providerSet.Add(observation.ProviderId); - - foreach (var statement in observation.Statements) - { - vulnerabilitySet.Add(statement.VulnerabilityId); - productSet.Add(statement.ProductKey); - if (!string.IsNullOrWhiteSpace(statement.Purl)) - { - purlSet.Add(statement.Purl); - } - - if (!string.IsNullOrWhiteSpace(statement.Cpe)) - { - cpeSet.Add(statement.Cpe); - } - } - - foreach (var purl in observation.Linkset.Purls) - { - purlSet.Add(purl); - } - - foreach (var cpe in observation.Linkset.Cpes) - { - cpeSet.Add(cpe); - } - - foreach (var reference in observation.Linkset.References) - { - referenceSet.Add(reference); - } - } - - return new VexObservationAggregate( - vulnerabilitySet.OrderBy(static v => v, StringComparer.Ordinal).ToImmutableArray(), - productSet.OrderBy(static p => p, StringComparer.Ordinal).ToImmutableArray(), - purlSet.OrderBy(static p => p, StringComparer.Ordinal).ToImmutableArray(), - cpeSet.OrderBy(static c => c, StringComparer.Ordinal).ToImmutableArray(), - referenceSet - .OrderBy(static reference => reference.Type, StringComparer.Ordinal) - .ThenBy(static reference => reference.Url, StringComparer.Ordinal) - .ToImmutableArray(), - providerSet.OrderBy(static provider => provider, StringComparer.OrdinalIgnoreCase).ToImmutableArray()); - } -} +using System.Collections.Immutable; +using System.Globalization; +using System.Text; +using StellaOps.Excititor.Core; + +namespace StellaOps.Excititor.Core.Observations; + +/// <summary> +/// Default implementation of <see cref="IVexObservationQueryService"/> that projects raw VEX observations for overlay consumers. +/// </summary> +public sealed class VexObservationQueryService : IVexObservationQueryService +{ + private const int DefaultPageSize = 200; + private const int MaxPageSize = 500; + + private readonly IVexObservationLookup _lookup; + + public VexObservationQueryService(IVexObservationLookup lookup) + { + _lookup = lookup ?? throw new ArgumentNullException(nameof(lookup)); + } + + public async ValueTask<VexObservationQueryResult> QueryAsync( + VexObservationQueryOptions options, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + cancellationToken.ThrowIfCancellationRequested(); + + var tenant = NormalizeTenant(options.Tenant); + var observationIds = NormalizeSet(options.ObservationIds, static value => value, StringComparer.Ordinal); + var vulnerabilityIds = NormalizeSet(options.VulnerabilityIds, static value => value.ToLowerInvariant(), StringComparer.Ordinal); + var productKeys = NormalizeSet(options.ProductKeys, static value => value.ToLowerInvariant(), StringComparer.Ordinal); + var purls = NormalizeSet(options.Purls, static value => value.ToLowerInvariant(), StringComparer.Ordinal); + var cpes = NormalizeSet(options.Cpes, static value => value.ToLowerInvariant(), StringComparer.Ordinal); + var providerIds = NormalizeSet(options.ProviderIds, static value => value.ToLowerInvariant(), StringComparer.Ordinal); + var statuses = NormalizeStatuses(options.Statuses); + + var limit = NormalizeLimit(options.Limit); + var fetchSize = checked(limit + 1); + var cursor = DecodeCursor(options.Cursor); + + var observations = await _lookup + .FindByFiltersAsync( + tenant, + observationIds, + vulnerabilityIds, + productKeys, + purls, + cpes, + providerIds, + statuses, + cursor, + fetchSize, + cancellationToken) + .ConfigureAwait(false); + + var ordered = observations + .Where(observation => Matches(observation, observationIds, vulnerabilityIds, productKeys, purls, cpes, providerIds, statuses)) + .OrderByDescending(static observation => observation.CreatedAt) + .ThenBy(static observation => observation.ObservationId, StringComparer.Ordinal) + .ToImmutableArray(); + + var hasMore = ordered.Length > limit; + var page = hasMore ? ordered.Take(limit).ToImmutableArray() : ordered; + var nextCursor = hasMore ? EncodeCursor(page[^1]) : null; + var aggregate = BuildAggregate(page); + + return new VexObservationQueryResult(page, aggregate, nextCursor, hasMore); + } + + private static string NormalizeTenant(string tenant) + => VexObservation.EnsureNotNullOrWhiteSpace(tenant, nameof(tenant)).ToLowerInvariant(); + + private static ImmutableHashSet<string> NormalizeSet( + IEnumerable<string>? values, + Func<string, string> projector, + StringComparer comparer) + { + if (values is null) + { + return ImmutableHashSet<string>.Empty; + } + + var builder = ImmutableHashSet.CreateBuilder<string>(comparer); + foreach (var value in values) + { + var normalized = VexObservation.TrimToNull(value); + if (normalized is null) + { + continue; + } + + builder.Add(projector(normalized)); + } + + return builder.ToImmutable(); + } + + private static ImmutableHashSet<VexClaimStatus> NormalizeStatuses(IEnumerable<VexClaimStatus>? statuses) + { + if (statuses is null) + { + return ImmutableHashSet<VexClaimStatus>.Empty; + } + + return statuses.Aggregate( + ImmutableHashSet<VexClaimStatus>.Empty, + static (set, status) => set.Add(status)); + } + + private static int NormalizeLimit(int? limit) + { + if (!limit.HasValue || limit.Value <= 0) + { + return DefaultPageSize; + } + + return Math.Min(limit.Value, MaxPageSize); + } + + private static VexObservationCursor? DecodeCursor(string? cursor) + { + if (string.IsNullOrWhiteSpace(cursor)) + { + return null; + } + + try + { + var decoded = Convert.FromBase64String(cursor.Trim()); + var payload = Encoding.UTF8.GetString(decoded); + var separator = payload.IndexOf(':'); + if (separator <= 0 || separator >= payload.Length - 1) + { + throw new FormatException("Cursor is malformed."); + } + + if (!long.TryParse(payload.AsSpan(0, separator), NumberStyles.Integer, CultureInfo.InvariantCulture, out var ticks)) + { + throw new FormatException("Cursor timestamp is invalid."); + } + + var createdAt = new DateTimeOffset(DateTime.SpecifyKind(new DateTime(ticks), DateTimeKind.Utc)); + var observationId = payload[(separator + 1)..]; + if (string.IsNullOrWhiteSpace(observationId)) + { + throw new FormatException("Cursor observation id is missing."); + } + + return new VexObservationCursor(createdAt, observationId); + } + catch (FormatException) + { + throw; + } + catch (Exception ex) + { + throw new FormatException("Cursor is malformed.", ex); + } + } + + private static string? EncodeCursor(VexObservation observation) + { + if (observation is null) + { + return null; + } + + var payload = $"{observation.CreatedAt.UtcTicks.ToString(CultureInfo.InvariantCulture)}:{observation.ObservationId}"; + return Convert.ToBase64String(Encoding.UTF8.GetBytes(payload)); + } + + private static bool Matches( + VexObservation observation, + ImmutableHashSet<string> observationIds, + ImmutableHashSet<string> vulnerabilities, + ImmutableHashSet<string> productKeys, + ImmutableHashSet<string> purls, + ImmutableHashSet<string> cpes, + ImmutableHashSet<string> providerIds, + ImmutableHashSet<VexClaimStatus> statuses) + { + ArgumentNullException.ThrowIfNull(observation); + + if (observationIds.Count > 0 && !observationIds.Contains(observation.ObservationId)) + { + return false; + } + + if (providerIds.Count > 0 && !providerIds.Contains(observation.ProviderId.ToLowerInvariant())) + { + return false; + } + + if (!MatchesStatements(observation, vulnerabilities, productKeys, statuses)) + { + return false; + } + + if (purls.Count > 0 && !observation.Linkset.Purls.Any(purl => purls.Contains(purl.ToLowerInvariant()))) + { + return false; + } + + if (cpes.Count > 0 && !observation.Linkset.Cpes.Any(cpe => cpes.Contains(cpe.ToLowerInvariant()))) + { + return false; + } + + return true; + } + + private static bool MatchesStatements( + VexObservation observation, + ImmutableHashSet<string> vulnerabilities, + ImmutableHashSet<string> productKeys, + ImmutableHashSet<VexClaimStatus> statuses) + { + if (vulnerabilities.Count == 0 && productKeys.Count == 0 && statuses.Count == 0) + { + return true; + } + + foreach (var statement in observation.Statements) + { + var vulnerabilityMatches = vulnerabilities.Count == 0 + || vulnerabilities.Contains(statement.VulnerabilityId.ToLowerInvariant()); + + var productMatches = productKeys.Count == 0 + || productKeys.Contains(statement.ProductKey.ToLowerInvariant()); + + var statusMatches = statuses.Count == 0 + || statuses.Contains(statement.Status); + + if (vulnerabilityMatches && productMatches && statusMatches) + { + return true; + } + } + + return false; + } + + private static VexObservationAggregate BuildAggregate(ImmutableArray<VexObservation> observations) + { + if (observations.IsDefaultOrEmpty) + { + return new VexObservationAggregate( + ImmutableArray<string>.Empty, + ImmutableArray<string>.Empty, + ImmutableArray<string>.Empty, + ImmutableArray<string>.Empty, + ImmutableArray<VexObservationReference>.Empty, + ImmutableArray<string>.Empty); + } + + var vulnerabilitySet = new HashSet<string>(StringComparer.Ordinal); + var productSet = new HashSet<string>(StringComparer.Ordinal); + var purlSet = new HashSet<string>(StringComparer.Ordinal); + var cpeSet = new HashSet<string>(StringComparer.Ordinal); + var referenceSet = new HashSet<VexObservationReference>(); + var providerSet = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + + foreach (var observation in observations) + { + providerSet.Add(observation.ProviderId); + + foreach (var statement in observation.Statements) + { + vulnerabilitySet.Add(statement.VulnerabilityId); + productSet.Add(statement.ProductKey); + if (!string.IsNullOrWhiteSpace(statement.Purl)) + { + purlSet.Add(statement.Purl); + } + + if (!string.IsNullOrWhiteSpace(statement.Cpe)) + { + cpeSet.Add(statement.Cpe); + } + } + + foreach (var purl in observation.Linkset.Purls) + { + purlSet.Add(purl); + } + + foreach (var cpe in observation.Linkset.Cpes) + { + cpeSet.Add(cpe); + } + + foreach (var reference in observation.Linkset.References) + { + referenceSet.Add(reference); + } + } + + return new VexObservationAggregate( + vulnerabilitySet.OrderBy(static v => v, StringComparer.Ordinal).ToImmutableArray(), + productSet.OrderBy(static p => p, StringComparer.Ordinal).ToImmutableArray(), + purlSet.OrderBy(static p => p, StringComparer.Ordinal).ToImmutableArray(), + cpeSet.OrderBy(static c => c, StringComparer.Ordinal).ToImmutableArray(), + referenceSet + .OrderBy(static reference => reference.Type, StringComparer.Ordinal) + .ThenBy(static reference => reference.Url, StringComparer.Ordinal) + .ToImmutableArray(), + providerSet.OrderBy(static provider => provider, StringComparer.OrdinalIgnoreCase).ToImmutableArray()); + } +} diff --git a/src/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj similarity index 61% rename from src/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj index e7ab30a2..2fadfde9 100644 --- a/src/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -10,7 +11,7 @@ <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Aoc/StellaOps.Aoc.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" /> + <ProjectReference Include="../../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" /> + <ProjectReference Include="../../../Concelier/__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Core/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md similarity index 99% rename from src/StellaOps.Excititor.Core/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md index 3286a140..b3514dfc 100644 --- a/src/StellaOps.Excititor.Core/TASKS.md +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/TASKS.md @@ -1,101 +1,101 @@ -# TASKS — Epic 1: Aggregation-Only Contract -> **AOC Reminder:** ingestion captures raw VEX statements/linksets only—no precedence, suppression, or severity derivation within Excititor. -| ID | Status | Owner(s) | Depends on | Notes | -|---|---|---|---|---| -| EXCITITOR-CORE-AOC-19-001 `AOC guard & provenance enforcement` | DONE (2025-10-29) | Excititor Core Guild | WEB-AOC-19-001 | Introduce repository interceptor validating provenance/signatures, rejecting forbidden fields (`severity`, `consensus`, etc.), and surfacing `ERR_AOC_00x` codes. | -> 2025-10-31: Raw guard now enforced by `MongoVexRawStore` and worker DI via `AddExcititorAocGuards`; repository + backfill tests cover guard pass/fail and storage rollback. -> 2025-10-29: Added `VexRawWriteGuard` + DI hooks consuming `AocWriteGuard`; unit coverage validates minimal and invalid signature cases. Integration with raw sinks remains outstanding. -| EXCITITOR-CORE-AOC-19-002 `VEX linkset extraction` | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-001 | Implement deterministic extraction of advisory IDs, component PURLs, and references into `linkset`, capturing reconciled-from metadata for traceability. | -| EXCITITOR-CORE-AOC-19-003 `Idempotent VEX raw upsert` | TODO | Excititor Core Guild | EXCITITOR-STORE-AOC-19-002 | Enforce `(vendor, upstreamId, contentHash, tenant)` uniqueness, generate supersedes chains, and ensure append-only versioning of raw VEX documents. | -| EXCITITOR-CORE-AOC-19-004 `Remove ingestion consensus` | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-002, POLICY-AOC-19-003 | Excise consensus/merge/severity logic from Excititor ingestion paths, updating exports/tests to rely on Policy Engine materializations instead. | -| EXCITITOR-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Excititor Core Guild | AUTH-AOC-19-002 | Update Excititor smoke/e2e suites to seed tenant-aware Authority clients and ensure cross-tenant VEX ingestion is rejected. | Required for Authority docs (`AUTH-AOC-19-003`) sign-off; share results with Authority Core. | - -## Policy Engine v2 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-POLICY-20-002 `Scope-aware linksets` | TODO | Excititor Core Guild, Policy Guild | EXCITITOR-CORE-AOC-19-002, POLICY-ENGINE-20-001 | Enhance VEX linkset extraction with scope resolution (product/component) + version range matching to boost policy join accuracy; refresh fixtures/tests. | - -## Graph Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-GRAPH-21-001 `Inspector linkouts` | BLOCKED (2025-10-27) | Excititor Core Guild, Cartographer Guild | EXCITITOR-POLICY-20-002, CARTO-GRAPH-21-005 | Provide batched VEX/advisory reference fetches keyed by graph node PURLs so UI inspector can display raw documents and justification metadata. | -> 2025-10-27: Pending policy-driven linkset enrichment (`EXCITITOR-POLICY-20-002`) and Cartographer inspector contract (`CARTO-GRAPH-21-005`). No stable payload to target. -| EXCITITOR-GRAPH-21-002 `Overlay enrichment` | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-001, POLICY-ENGINE-30-001 | Ensure overlay metadata includes VEX justification summaries and document versions for Cartographer overlays; update fixtures/tests. | -> 2025-10-27: Requires inspector linkouts (`EXCITITOR-GRAPH-21-001`) and Policy Engine overlay schema (`POLICY-ENGINE-30-001`) before enrichment can be implemented. - -## Link-Not-Merge v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-LNM-21-001 `VEX observation model` | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-001 | Define immutable `vex_observations` schema capturing raw statements, product PURLs, justification, and AOC metadata. `DOCS-LNM-22-002` blocked pending this schema. | -| EXCITITOR-LNM-21-002 `Linkset correlator` | TODO | Excititor Core Guild | EXCITITOR-LNM-21-001 | Build correlation pipeline combining alias + product PURL signals to form `vex_linksets` with confidence metrics. Docs waiting to finalize VEX aggregation guide. | -| EXCITITOR-LNM-21-003 `Conflict annotator` | TODO | Excititor Core Guild | EXCITITOR-LNM-21-002 | Record status/justification disagreements within linksets and expose structured conflicts. Provide structured payloads for `DOCS-LNM-22-002`. | -| EXCITITOR-LNM-21-004 `Merge removal` | TODO | Excititor Core Guild | EXCITITOR-LNM-21-002 | Remove legacy VEX merge logic, enforce immutability, and add guards/tests to prevent future merges. | -| EXCITITOR-LNM-21-005 `Event emission` | TODO | Excititor Core Guild, Platform Events Guild | EXCITITOR-LNM-21-002 | Emit `vex.linkset.updated` events for downstream consumers with delta descriptions and tenant context. | - -## Policy Engine + Editor v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-POLICY-23-001 `Evidence indexes` | TODO | Excititor Core Guild | EXCITITOR-LNM-21-002 | Provide indexes/materialized views for policy runtime (status, justification, product PURL) to accelerate queries; document contract. | -| EXCITITOR-POLICY-23-002 `Event guarantees` | TODO | Excititor Core Guild, Platform Events Guild | EXCITITOR-LNM-21-005 | Ensure `vex.linkset.updated` events include correlation confidence, conflict summaries, and idempotent ids for evaluator consumption. | - -## Graph & Vuln Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-GRAPH-24-001 `VEX overlay inputs` | DONE (2025-10-29) | Excititor Core Guild | EXCITITOR-POLICY-23-001 | Expose raw VEX statements/linksets scoped for overlay services; no suppression/precedence logic in ingestion. | - -## Reachability v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-SIG-26-001 `Vendor exploitability hints` | TODO | Excititor Core Guild, Signals Guild | SIGNALS-24-004 | Surface vendor-provided exploitability indicators and affected symbol lists to Signals service via projection endpoints. | - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-TEN-48-001 `Tenant-aware VEX linking` | TODO | Excititor Core Guild | AUTH-TEN-47-001 | Apply tenant context to VEX linkers, enable RLS, and expose capability endpoint confirming aggregation-only behavior. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-OBS-50-001 `Telemetry adoption` | TODO | Excititor Core Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Integrate telemetry core across VEX ingestion/linking, ensuring spans/logs capture tenant, product scope, upstream id, justification hash, and trace IDs. | -| EXCITITOR-OBS-51-001 `Metrics & SLOs` | TODO | Excititor Core Guild, DevOps Guild | EXCITITOR-OBS-50-001, TELEMETRY-OBS-51-001 | Publish metrics for VEX ingest latency, scope resolution success, conflict rate, signature verification failures. Define SLOs (link latency P95 <30s) and configure burn-rate alerts. | -| EXCITITOR-OBS-52-001 `Timeline events` | TODO | Excititor Core Guild | EXCITITOR-OBS-50-001, TIMELINE-OBS-52-002 | Emit `timeline_event` entries for VEX ingest/linking/outcome changes with trace IDs, justification summaries, and evidence placeholders. | -| EXCITITOR-OBS-53-001 `Evidence snapshots` | TODO | Excititor Core Guild, Evidence Locker Guild | EXCITITOR-OBS-52-001, EVID-OBS-53-002 | Build evidence payloads for VEX statements (raw doc, normalization diff, precedence notes) and push to evidence locker with Merkle manifests. | -| EXCITITOR-OBS-54-001 `Attestation & verification` | TODO | Excititor Core Guild, Provenance Guild | EXCITITOR-OBS-53-001, PROV-OBS-54-001 | Attach DSSE attestations to VEX batch processing, verify chain-of-custody via Provenance library, and link attestation IDs to timeline + ledger. | -| EXCITITOR-OBS-55-001 `Incident mode` | TODO | Excititor Core Guild, DevOps Guild | EXCITITOR-OBS-51-001, DEVOPS-OBS-55-001 | Implement incident sampling bump, additional raw payload retention, and activation events for VEX pipelines with redaction guard rails. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-AIRGAP-56-001 `Mirror ingestion adapters` | TODO | Excititor Core Guild | AIRGAP-IMP-57-002, MIRROR-CRT-56-001 | Add mirror-based VEX ingestion, preserving statement digests and bundle IDs. | -| EXCITITOR-AIRGAP-56-002 `Bundle provenance` | TODO | Excititor Core Guild, AirGap Importer Guild | EXCITITOR-AIRGAP-56-001, AIRGAP-IMP-57-001 | Persist bundle metadata on VEX observations/linksets with provenance references. | -| EXCITITOR-AIRGAP-57-001 `Sealed-mode enforcement` | TODO | Excititor Core Guild, AirGap Policy Guild | EXCITITOR-AIRGAP-56-001, AIRGAP-POL-56-001 | Block non-mirror connectors in sealed mode and surface remediation errors. | -| EXCITITOR-AIRGAP-57-002 `Staleness annotations` | TODO | Excititor Core Guild, AirGap Time Guild | EXCITITOR-AIRGAP-56-002, AIRGAP-TIME-58-001 | Annotate VEX statements with staleness metrics and expose via API. | -| EXCITITOR-AIRGAP-58-001 `Portable VEX evidence` | TODO | Excititor Core Guild, Evidence Locker Guild | EXCITITOR-OBS-53-001, EVID-OBS-54-001 | Package VEX evidence segments into portable evidence bundles linked to timeline. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-OAS-61-001 `Spec coverage` | TODO | Excititor Core Guild, API Contracts Guild | OAS-61-001 | Update VEX OAS to include observation/linkset endpoints with provenance fields and examples. | -| EXCITITOR-OAS-61-002 `Example catalog` | TODO | Excititor Core Guild | EXCITITOR-OAS-61-001 | Provide examples for VEX justifications, statuses, conflicts; ensure SDK docs reference them. | -| EXCITITOR-OAS-62-001 `SDK smoke tests` | TODO | Excititor Core Guild, SDK Generator Guild | EXCITITOR-OAS-61-001, SDKGEN-63-001 | Add SDK scenarios for VEX observation queries and conflict handling to language smoke suites. | -| EXCITITOR-OAS-63-001 `Deprecation headers` | TODO | Excititor Core Guild, API Governance Guild | APIGOV-63-001 | Add deprecation metadata and notifications for legacy VEX routes. | - -## Risk Profiles (Epic 18) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-RISK-66-001 `VEX gate provider` | TODO | Excititor Core Guild, Risk Engine Guild | RISK-ENGINE-67-002 | Supply VEX status and justification data for risk engine gating with full source provenance. | -| EXCITITOR-RISK-66-002 `Reachability inputs` | TODO | Excititor Core Guild | EXCITITOR-RISK-66-001 | Provide component/product scoping metadata enabling reachability and runtime factor mapping. | -| EXCITITOR-RISK-67-001 `Explainability metadata` | TODO | Excititor Core Guild | EXCITITOR-RISK-66-001 | Include VEX justification, status reasoning, and source digests in explainability artifacts. | -| EXCITITOR-RISK-68-001 `Policy Studio integration` | TODO | Excititor Core Guild, Policy Studio Guild | POLICY-RISK-68-001 | Surface VEX-specific gates/weights within profile editor UI and validation messages. | - -## Attestor Console (Epic 19) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-ATTEST-73-001 `VEX attestation payloads` | TODO | Excititor Core Guild, Attestation Payloads Guild | ATTEST-TYPES-72-001 | Provide VEX statement metadata (supplier identity, justification, scope) required for VEXAttestation payloads. | -| EXCITITOR-ATTEST-73-002 `Chain provenance` | TODO | Excititor Core Guild | EXCITITOR-ATTEST-73-001 | Expose linkage from VEX statements to subject/product for chain of custody graph. | +# TASKS — Epic 1: Aggregation-Only Contract +> **AOC Reminder:** ingestion captures raw VEX statements/linksets only—no precedence, suppression, or severity derivation within Excititor. +| ID | Status | Owner(s) | Depends on | Notes | +|---|---|---|---|---| +| EXCITITOR-CORE-AOC-19-001 `AOC guard & provenance enforcement` | DONE (2025-10-29) | Excititor Core Guild | WEB-AOC-19-001 | Introduce repository interceptor validating provenance/signatures, rejecting forbidden fields (`severity`, `consensus`, etc.), and surfacing `ERR_AOC_00x` codes. | +> 2025-10-31: Raw guard now enforced by `MongoVexRawStore` and worker DI via `AddExcititorAocGuards`; repository + backfill tests cover guard pass/fail and storage rollback. +> 2025-10-29: Added `VexRawWriteGuard` + DI hooks consuming `AocWriteGuard`; unit coverage validates minimal and invalid signature cases. Integration with raw sinks remains outstanding. +| EXCITITOR-CORE-AOC-19-002 `VEX linkset extraction` | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-001 | Implement deterministic extraction of advisory IDs, component PURLs, and references into `linkset`, capturing reconciled-from metadata for traceability. | +| EXCITITOR-CORE-AOC-19-003 `Idempotent VEX raw upsert` | TODO | Excititor Core Guild | EXCITITOR-STORE-AOC-19-002 | Enforce `(vendor, upstreamId, contentHash, tenant)` uniqueness, generate supersedes chains, and ensure append-only versioning of raw VEX documents. | +| EXCITITOR-CORE-AOC-19-004 `Remove ingestion consensus` | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-002, POLICY-AOC-19-003 | Excise consensus/merge/severity logic from Excititor ingestion paths, updating exports/tests to rely on Policy Engine materializations instead. | +| EXCITITOR-CORE-AOC-19-013 `Authority tenant scope smoke coverage` | TODO | Excititor Core Guild | AUTH-AOC-19-002 | Update Excititor smoke/e2e suites to seed tenant-aware Authority clients and ensure cross-tenant VEX ingestion is rejected. | Required for Authority docs (`AUTH-AOC-19-003`) sign-off; share results with Authority Core. | + +## Policy Engine v2 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-POLICY-20-002 `Scope-aware linksets` | TODO | Excititor Core Guild, Policy Guild | EXCITITOR-CORE-AOC-19-002, POLICY-ENGINE-20-001 | Enhance VEX linkset extraction with scope resolution (product/component) + version range matching to boost policy join accuracy; refresh fixtures/tests. | + +## Graph Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-GRAPH-21-001 `Inspector linkouts` | BLOCKED (2025-10-27) | Excititor Core Guild, Cartographer Guild | EXCITITOR-POLICY-20-002, CARTO-GRAPH-21-005 | Provide batched VEX/advisory reference fetches keyed by graph node PURLs so UI inspector can display raw documents and justification metadata. | +> 2025-10-27: Pending policy-driven linkset enrichment (`EXCITITOR-POLICY-20-002`) and Cartographer inspector contract (`CARTO-GRAPH-21-005`). No stable payload to target. +| EXCITITOR-GRAPH-21-002 `Overlay enrichment` | BLOCKED (2025-10-27) | Excititor Core Guild | EXCITITOR-GRAPH-21-001, POLICY-ENGINE-30-001 | Ensure overlay metadata includes VEX justification summaries and document versions for Cartographer overlays; update fixtures/tests. | +> 2025-10-27: Requires inspector linkouts (`EXCITITOR-GRAPH-21-001`) and Policy Engine overlay schema (`POLICY-ENGINE-30-001`) before enrichment can be implemented. + +## Link-Not-Merge v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-LNM-21-001 `VEX observation model` | TODO | Excititor Core Guild | EXCITITOR-CORE-AOC-19-001 | Define immutable `vex_observations` schema capturing raw statements, product PURLs, justification, and AOC metadata. `DOCS-LNM-22-002` blocked pending this schema. | +| EXCITITOR-LNM-21-002 `Linkset correlator` | TODO | Excititor Core Guild | EXCITITOR-LNM-21-001 | Build correlation pipeline combining alias + product PURL signals to form `vex_linksets` with confidence metrics. Docs waiting to finalize VEX aggregation guide. | +| EXCITITOR-LNM-21-003 `Conflict annotator` | TODO | Excititor Core Guild | EXCITITOR-LNM-21-002 | Record status/justification disagreements within linksets and expose structured conflicts. Provide structured payloads for `DOCS-LNM-22-002`. | +| EXCITITOR-LNM-21-004 `Merge removal` | TODO | Excititor Core Guild | EXCITITOR-LNM-21-002 | Remove legacy VEX merge logic, enforce immutability, and add guards/tests to prevent future merges. | +| EXCITITOR-LNM-21-005 `Event emission` | TODO | Excititor Core Guild, Platform Events Guild | EXCITITOR-LNM-21-002 | Emit `vex.linkset.updated` events for downstream consumers with delta descriptions and tenant context. | + +## Policy Engine + Editor v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-POLICY-23-001 `Evidence indexes` | TODO | Excititor Core Guild | EXCITITOR-LNM-21-002 | Provide indexes/materialized views for policy runtime (status, justification, product PURL) to accelerate queries; document contract. | +| EXCITITOR-POLICY-23-002 `Event guarantees` | TODO | Excititor Core Guild, Platform Events Guild | EXCITITOR-LNM-21-005 | Ensure `vex.linkset.updated` events include correlation confidence, conflict summaries, and idempotent ids for evaluator consumption. | + +## Graph & Vuln Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-GRAPH-24-001 `VEX overlay inputs` | DONE (2025-10-29) | Excititor Core Guild | EXCITITOR-POLICY-23-001 | Expose raw VEX statements/linksets scoped for overlay services; no suppression/precedence logic in ingestion. | + +## Reachability v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-SIG-26-001 `Vendor exploitability hints` | TODO | Excititor Core Guild, Signals Guild | SIGNALS-24-004 | Surface vendor-provided exploitability indicators and affected symbol lists to Signals service via projection endpoints. | + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-TEN-48-001 `Tenant-aware VEX linking` | TODO | Excititor Core Guild | AUTH-TEN-47-001 | Apply tenant context to VEX linkers, enable RLS, and expose capability endpoint confirming aggregation-only behavior. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-OBS-50-001 `Telemetry adoption` | TODO | Excititor Core Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Integrate telemetry core across VEX ingestion/linking, ensuring spans/logs capture tenant, product scope, upstream id, justification hash, and trace IDs. | +| EXCITITOR-OBS-51-001 `Metrics & SLOs` | TODO | Excititor Core Guild, DevOps Guild | EXCITITOR-OBS-50-001, TELEMETRY-OBS-51-001 | Publish metrics for VEX ingest latency, scope resolution success, conflict rate, signature verification failures. Define SLOs (link latency P95 <30s) and configure burn-rate alerts. | +| EXCITITOR-OBS-52-001 `Timeline events` | TODO | Excititor Core Guild | EXCITITOR-OBS-50-001, TIMELINE-OBS-52-002 | Emit `timeline_event` entries for VEX ingest/linking/outcome changes with trace IDs, justification summaries, and evidence placeholders. | +| EXCITITOR-OBS-53-001 `Evidence snapshots` | TODO | Excititor Core Guild, Evidence Locker Guild | EXCITITOR-OBS-52-001, EVID-OBS-53-002 | Build evidence payloads for VEX statements (raw doc, normalization diff, precedence notes) and push to evidence locker with Merkle manifests. | +| EXCITITOR-OBS-54-001 `Attestation & verification` | TODO | Excititor Core Guild, Provenance Guild | EXCITITOR-OBS-53-001, PROV-OBS-54-001 | Attach DSSE attestations to VEX batch processing, verify chain-of-custody via Provenance library, and link attestation IDs to timeline + ledger. | +| EXCITITOR-OBS-55-001 `Incident mode` | TODO | Excititor Core Guild, DevOps Guild | EXCITITOR-OBS-51-001, DEVOPS-OBS-55-001 | Implement incident sampling bump, additional raw payload retention, and activation events for VEX pipelines with redaction guard rails. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-AIRGAP-56-001 `Mirror ingestion adapters` | TODO | Excititor Core Guild | AIRGAP-IMP-57-002, MIRROR-CRT-56-001 | Add mirror-based VEX ingestion, preserving statement digests and bundle IDs. | +| EXCITITOR-AIRGAP-56-002 `Bundle provenance` | TODO | Excititor Core Guild, AirGap Importer Guild | EXCITITOR-AIRGAP-56-001, AIRGAP-IMP-57-001 | Persist bundle metadata on VEX observations/linksets with provenance references. | +| EXCITITOR-AIRGAP-57-001 `Sealed-mode enforcement` | TODO | Excititor Core Guild, AirGap Policy Guild | EXCITITOR-AIRGAP-56-001, AIRGAP-POL-56-001 | Block non-mirror connectors in sealed mode and surface remediation errors. | +| EXCITITOR-AIRGAP-57-002 `Staleness annotations` | TODO | Excititor Core Guild, AirGap Time Guild | EXCITITOR-AIRGAP-56-002, AIRGAP-TIME-58-001 | Annotate VEX statements with staleness metrics and expose via API. | +| EXCITITOR-AIRGAP-58-001 `Portable VEX evidence` | TODO | Excititor Core Guild, Evidence Locker Guild | EXCITITOR-OBS-53-001, EVID-OBS-54-001 | Package VEX evidence segments into portable evidence bundles linked to timeline. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-OAS-61-001 `Spec coverage` | TODO | Excititor Core Guild, API Contracts Guild | OAS-61-001 | Update VEX OAS to include observation/linkset endpoints with provenance fields and examples. | +| EXCITITOR-OAS-61-002 `Example catalog` | TODO | Excititor Core Guild | EXCITITOR-OAS-61-001 | Provide examples for VEX justifications, statuses, conflicts; ensure SDK docs reference them. | +| EXCITITOR-OAS-62-001 `SDK smoke tests` | TODO | Excititor Core Guild, SDK Generator Guild | EXCITITOR-OAS-61-001, SDKGEN-63-001 | Add SDK scenarios for VEX observation queries and conflict handling to language smoke suites. | +| EXCITITOR-OAS-63-001 `Deprecation headers` | TODO | Excititor Core Guild, API Governance Guild | APIGOV-63-001 | Add deprecation metadata and notifications for legacy VEX routes. | + +## Risk Profiles (Epic 18) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-RISK-66-001 `VEX gate provider` | TODO | Excititor Core Guild, Risk Engine Guild | RISK-ENGINE-67-002 | Supply VEX status and justification data for risk engine gating with full source provenance. | +| EXCITITOR-RISK-66-002 `Reachability inputs` | TODO | Excititor Core Guild | EXCITITOR-RISK-66-001 | Provide component/product scoping metadata enabling reachability and runtime factor mapping. | +| EXCITITOR-RISK-67-001 `Explainability metadata` | TODO | Excititor Core Guild | EXCITITOR-RISK-66-001 | Include VEX justification, status reasoning, and source digests in explainability artifacts. | +| EXCITITOR-RISK-68-001 `Policy Studio integration` | TODO | Excititor Core Guild, Policy Studio Guild | POLICY-RISK-68-001 | Surface VEX-specific gates/weights within profile editor UI and validation messages. | + +## Attestor Console (Epic 19) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-ATTEST-73-001 `VEX attestation payloads` | TODO | Excititor Core Guild, Attestation Payloads Guild | ATTEST-TYPES-72-001 | Provide VEX statement metadata (supplier identity, justification, scope) required for VEXAttestation payloads. | +| EXCITITOR-ATTEST-73-002 `Chain provenance` | TODO | Excititor Core Guild | EXCITITOR-ATTEST-73-001 | Expose linkage from VEX statements to subject/product for chain of custody graph. | diff --git a/src/StellaOps.Excititor.Core/VexAttestationAbstractions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexAttestationAbstractions.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexAttestationAbstractions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexAttestationAbstractions.cs diff --git a/src/StellaOps.Excititor.Core/VexCacheEntry.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexCacheEntry.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexCacheEntry.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexCacheEntry.cs diff --git a/src/StellaOps.Excititor.Core/VexCanonicalJsonSerializer.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexCanonicalJsonSerializer.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexCanonicalJsonSerializer.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexCanonicalJsonSerializer.cs diff --git a/src/StellaOps.Excititor.Core/VexClaim.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexClaim.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexClaim.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexClaim.cs diff --git a/src/StellaOps.Excititor.Core/VexConnectorAbstractions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConnectorAbstractions.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexConnectorAbstractions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConnectorAbstractions.cs diff --git a/src/StellaOps.Excititor.Core/VexConsensus.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensus.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexConsensus.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensus.cs diff --git a/src/StellaOps.Excititor.Core/VexConsensusHold.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusHold.cs similarity index 96% rename from src/StellaOps.Excititor.Core/VexConsensusHold.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusHold.cs index e5d429b9..58a661a3 100644 --- a/src/StellaOps.Excititor.Core/VexConsensusHold.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusHold.cs @@ -1,47 +1,47 @@ -namespace StellaOps.Excititor.Core; - -public sealed record VexConsensusHold -{ - public VexConsensusHold( - string vulnerabilityId, - string productKey, - VexConsensus candidate, - DateTimeOffset requestedAt, - DateTimeOffset eligibleAt, - string reason) - { - if (string.IsNullOrWhiteSpace(vulnerabilityId)) - { - throw new ArgumentException("Vulnerability id must be provided.", nameof(vulnerabilityId)); - } - - if (string.IsNullOrWhiteSpace(productKey)) - { - throw new ArgumentException("Product key must be provided.", nameof(productKey)); - } - - if (eligibleAt < requestedAt) - { - throw new ArgumentOutOfRangeException(nameof(eligibleAt), "EligibleAt cannot be earlier than RequestedAt."); - } - - VulnerabilityId = vulnerabilityId.Trim(); - ProductKey = productKey.Trim(); - Candidate = candidate ?? throw new ArgumentNullException(nameof(candidate)); - RequestedAt = requestedAt; - EligibleAt = eligibleAt; - Reason = string.IsNullOrWhiteSpace(reason) ? "unspecified" : reason.Trim(); - } - - public string VulnerabilityId { get; } - - public string ProductKey { get; } - - public VexConsensus Candidate { get; } - - public DateTimeOffset RequestedAt { get; } - - public DateTimeOffset EligibleAt { get; } - - public string Reason { get; } -} +namespace StellaOps.Excititor.Core; + +public sealed record VexConsensusHold +{ + public VexConsensusHold( + string vulnerabilityId, + string productKey, + VexConsensus candidate, + DateTimeOffset requestedAt, + DateTimeOffset eligibleAt, + string reason) + { + if (string.IsNullOrWhiteSpace(vulnerabilityId)) + { + throw new ArgumentException("Vulnerability id must be provided.", nameof(vulnerabilityId)); + } + + if (string.IsNullOrWhiteSpace(productKey)) + { + throw new ArgumentException("Product key must be provided.", nameof(productKey)); + } + + if (eligibleAt < requestedAt) + { + throw new ArgumentOutOfRangeException(nameof(eligibleAt), "EligibleAt cannot be earlier than RequestedAt."); + } + + VulnerabilityId = vulnerabilityId.Trim(); + ProductKey = productKey.Trim(); + Candidate = candidate ?? throw new ArgumentNullException(nameof(candidate)); + RequestedAt = requestedAt; + EligibleAt = eligibleAt; + Reason = string.IsNullOrWhiteSpace(reason) ? "unspecified" : reason.Trim(); + } + + public string VulnerabilityId { get; } + + public string ProductKey { get; } + + public VexConsensus Candidate { get; } + + public DateTimeOffset RequestedAt { get; } + + public DateTimeOffset EligibleAt { get; } + + public string Reason { get; } +} diff --git a/src/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusPolicyOptions.cs diff --git a/src/StellaOps.Excititor.Core/VexConsensusResolver.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusResolver.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexConsensusResolver.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexConsensusResolver.cs diff --git a/src/StellaOps.Excititor.Core/VexExportManifest.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexExportManifest.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexExportManifest.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexExportManifest.cs diff --git a/src/StellaOps.Excititor.Core/VexExporterAbstractions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexExporterAbstractions.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexExporterAbstractions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexExporterAbstractions.cs diff --git a/src/StellaOps.Excititor.Core/VexNormalizerAbstractions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexNormalizerAbstractions.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexNormalizerAbstractions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexNormalizerAbstractions.cs diff --git a/src/StellaOps.Excititor.Core/VexProvider.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexProvider.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexProvider.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexProvider.cs diff --git a/src/StellaOps.Excititor.Core/VexQuery.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexQuery.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexQuery.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexQuery.cs diff --git a/src/StellaOps.Excititor.Core/VexQuietProvenance.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexQuietProvenance.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexQuietProvenance.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexQuietProvenance.cs diff --git a/src/StellaOps.Excititor.Core/VexScoreEnvelope.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexScoreEnvelope.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexScoreEnvelope.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexScoreEnvelope.cs diff --git a/src/StellaOps.Excititor.Core/VexSignals.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexSignals.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexSignals.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexSignals.cs diff --git a/src/StellaOps.Excititor.Core/VexSignatureVerifiers.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Core/VexSignatureVerifiers.cs similarity index 100% rename from src/StellaOps.Excititor.Core/VexSignatureVerifiers.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Core/VexSignatureVerifiers.cs diff --git a/src/StellaOps.Excititor.Export/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Export/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Export/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/AGENTS.md diff --git a/src/StellaOps.Excititor.Export/ExportEngine.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Export/ExportEngine.cs similarity index 100% rename from src/StellaOps.Excititor.Export/ExportEngine.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/ExportEngine.cs diff --git a/src/StellaOps.Excititor.Export/FileSystemArtifactStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Export/FileSystemArtifactStore.cs similarity index 100% rename from src/StellaOps.Excititor.Export/FileSystemArtifactStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/FileSystemArtifactStore.cs diff --git a/src/StellaOps.Excititor.Export/IVexArtifactStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Export/IVexArtifactStore.cs similarity index 100% rename from src/StellaOps.Excititor.Export/IVexArtifactStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/IVexArtifactStore.cs diff --git a/src/StellaOps.Excititor.Export/OfflineBundleArtifactStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Export/OfflineBundleArtifactStore.cs similarity index 100% rename from src/StellaOps.Excititor.Export/OfflineBundleArtifactStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/OfflineBundleArtifactStore.cs diff --git a/src/StellaOps.Excititor.Export/Properties/AssemblyInfo.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Export/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Excititor.Export/Properties/AssemblyInfo.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Excititor.Export/S3ArtifactStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Export/S3ArtifactStore.cs similarity index 100% rename from src/StellaOps.Excititor.Export/S3ArtifactStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/S3ArtifactStore.cs diff --git a/src/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj similarity index 85% rename from src/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj index d054a3d9..7e2eafb2 100644 --- a/src/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -15,6 +16,6 @@ <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> <ProjectReference Include="..\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj" /> <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Export/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Export/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/TASKS.md diff --git a/src/StellaOps.Excititor.Export/VexExportCacheService.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Export/VexExportCacheService.cs similarity index 100% rename from src/StellaOps.Excititor.Export/VexExportCacheService.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/VexExportCacheService.cs diff --git a/src/StellaOps.Excititor.Export/VexExportEnvelopeBuilder.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Export/VexExportEnvelopeBuilder.cs similarity index 100% rename from src/StellaOps.Excititor.Export/VexExportEnvelopeBuilder.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/VexExportEnvelopeBuilder.cs diff --git a/src/StellaOps.Excititor.Export/VexMirrorBundlePublisher.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Export/VexMirrorBundlePublisher.cs similarity index 100% rename from src/StellaOps.Excititor.Export/VexMirrorBundlePublisher.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Export/VexMirrorBundlePublisher.cs diff --git a/src/StellaOps.Excititor.Formats.CSAF/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Formats.CSAF/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/AGENTS.md diff --git a/src/StellaOps.Excititor.Formats.CSAF/CsafNormalizer.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/CsafNormalizer.cs similarity index 100% rename from src/StellaOps.Excititor.Formats.CSAF/CsafNormalizer.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/CsafNormalizer.cs diff --git a/src/StellaOps.Excititor.Formats.CSAF/ServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Formats.CSAF/ServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj similarity index 97% rename from src/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj index ba0a8da1..d5fa4e48 100644 --- a/src/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj @@ -1,16 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Formats.CSAF/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Formats.CSAF/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CSAF/TASKS.md diff --git a/src/StellaOps.Excititor.Formats.CycloneDX/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Formats.CycloneDX/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/AGENTS.md diff --git a/src/StellaOps.Excititor.Formats.CycloneDX/CycloneDxNormalizer.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/CycloneDxNormalizer.cs similarity index 100% rename from src/StellaOps.Excititor.Formats.CycloneDX/CycloneDxNormalizer.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/CycloneDxNormalizer.cs diff --git a/src/StellaOps.Excititor.Formats.CycloneDX/ServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Formats.CycloneDX/ServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj similarity index 97% rename from src/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj index ba0a8da1..d5fa4e48 100644 --- a/src/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj @@ -1,16 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Formats.CycloneDX/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.CycloneDX/TASKS.md diff --git a/src/StellaOps.Excititor.Formats.OpenVEX/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Formats.OpenVEX/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/AGENTS.md diff --git a/src/StellaOps.Excititor.Formats.OpenVEX/OpenVexNormalizer.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/OpenVexNormalizer.cs similarity index 100% rename from src/StellaOps.Excititor.Formats.OpenVEX/OpenVexNormalizer.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/OpenVexNormalizer.cs diff --git a/src/StellaOps.Excititor.Formats.OpenVEX/ServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Formats.OpenVEX/ServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj similarity index 97% rename from src/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj index ba0a8da1..d5fa4e48 100644 --- a/src/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj @@ -1,16 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/TASKS.md similarity index 100% rename from src/StellaOps.Excititor.Formats.OpenVEX/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Formats.OpenVEX/TASKS.md diff --git a/src/StellaOps.Excititor.Policy/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Policy/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/AGENTS.md diff --git a/src/StellaOps.Excititor.Policy/IVexPolicyProvider.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/IVexPolicyProvider.cs similarity index 100% rename from src/StellaOps.Excititor.Policy/IVexPolicyProvider.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/IVexPolicyProvider.cs diff --git a/src/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj similarity index 97% rename from src/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj index cb5db022..bb888396 100644 --- a/src/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj @@ -1,17 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="YamlDotNet" Version="13.7.1" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="YamlDotNet" Version="13.7.1" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Policy/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md similarity index 91% rename from src/StellaOps.Excititor.Policy/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md index 423e7f0a..4256ff27 100644 --- a/src/StellaOps.Excititor.Policy/TASKS.md +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/TASKS.md @@ -1,11 +1,11 @@ -If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md and ./AGENTS.md). -# TASKS -| Task | Owner(s) | Depends on | Notes | -|---|---|---|---| -|EXCITITOR-POLICY-01-001 – Policy schema & binding|Team Excititor Policy|EXCITITOR-CORE-01-001|DONE (2025-10-15) – Established `VexPolicyOptions`, options binding, and snapshot provider covering baseline weights/overrides.| -|EXCITITOR-POLICY-01-002 – Policy evaluator service|Team Excititor Policy|EXCITITOR-POLICY-01-001|DONE (2025-10-15) – `VexPolicyEvaluator` exposes immutable snapshots to consensus and normalizes rejection reasons.| -|EXCITITOR-POLICY-01-003 – Operator diagnostics & docs|Team Excititor Policy|EXCITITOR-POLICY-01-001|**DONE (2025-10-16)** – Surface structured diagnostics (CLI/WebService) and author policy upgrade guidance in docs/ARCHITECTURE_EXCITITOR.md appendix.<br>2025-10-16: Added `IVexPolicyDiagnostics`/`VexPolicyDiagnosticsReport`, sorted issue ordering, recommendations, and appendix guidance. Tests: `dotnet test src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj`.| -|EXCITITOR-POLICY-01-004 – Policy schema validation & YAML binding|Team Excititor Policy|EXCITITOR-POLICY-01-001|**DONE (2025-10-16)** – Added strongly-typed YAML/JSON binding, schema validation, and deterministic diagnostics for operator-supplied policy bundles.| -|EXCITITOR-POLICY-01-005 – Policy change tracking & telemetry|Team Excititor Policy|EXCITITOR-POLICY-01-002|**DONE (2025-10-16)** – Emit revision history, expose snapshot digests via CLI/WebService, and add structured logging/metrics for policy reloads.<br>2025-10-16: `VexPolicySnapshot` now carries revision/digest, provider logs reloads, `vex.policy.reloads` metric emitted, binder/diagnostics expose digest metadata. Tests: `dotnet test src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj`.| -|EXCITITOR-POLICY-02-001 – Scoring coefficients & weight ceilings|Team Excititor Policy|EXCITITOR-POLICY-01-004|DONE (2025-10-19) – Added `weights.ceiling` + `scoring.{alpha,beta}` options with normalization warnings, extended consensus policy/digest, refreshed docs (`docs/ARCHITECTURE_EXCITITOR.md`, `docs/EXCITITOR_SCORRING.md`), and validated via `dotnet test` for core/policy suites.| -|EXCITITOR-POLICY-02-002 – Diagnostics for scoring signals|Team Excititor Policy|EXCITITOR-POLICY-02-001|BACKLOG – Update diagnostics reports to surface missing severity/KEV/EPSS mappings, coefficient overrides, and provide actionable recommendations for policy tuning.| +If you are working on this file you need to read docs/ARCHITECTURE_EXCITITOR.md and ./AGENTS.md). +# TASKS +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| +|EXCITITOR-POLICY-01-001 – Policy schema & binding|Team Excititor Policy|EXCITITOR-CORE-01-001|DONE (2025-10-15) – Established `VexPolicyOptions`, options binding, and snapshot provider covering baseline weights/overrides.| +|EXCITITOR-POLICY-01-002 – Policy evaluator service|Team Excititor Policy|EXCITITOR-POLICY-01-001|DONE (2025-10-15) – `VexPolicyEvaluator` exposes immutable snapshots to consensus and normalizes rejection reasons.| +|EXCITITOR-POLICY-01-003 – Operator diagnostics & docs|Team Excititor Policy|EXCITITOR-POLICY-01-001|**DONE (2025-10-16)** – Surface structured diagnostics (CLI/WebService) and author policy upgrade guidance in docs/ARCHITECTURE_EXCITITOR.md appendix.<br>2025-10-16: Added `IVexPolicyDiagnostics`/`VexPolicyDiagnosticsReport`, sorted issue ordering, recommendations, and appendix guidance. Tests: `dotnet test src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj`.| +|EXCITITOR-POLICY-01-004 – Policy schema validation & YAML binding|Team Excititor Policy|EXCITITOR-POLICY-01-001|**DONE (2025-10-16)** – Added strongly-typed YAML/JSON binding, schema validation, and deterministic diagnostics for operator-supplied policy bundles.| +|EXCITITOR-POLICY-01-005 – Policy change tracking & telemetry|Team Excititor Policy|EXCITITOR-POLICY-01-002|**DONE (2025-10-16)** – Emit revision history, expose snapshot digests via CLI/WebService, and add structured logging/metrics for policy reloads.<br>2025-10-16: `VexPolicySnapshot` now carries revision/digest, provider logs reloads, `vex.policy.reloads` metric emitted, binder/diagnostics expose digest metadata. Tests: `dotnet test src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj`.| +|EXCITITOR-POLICY-02-001 – Scoring coefficients & weight ceilings|Team Excititor Policy|EXCITITOR-POLICY-01-004|DONE (2025-10-19) – Added `weights.ceiling` + `scoring.{alpha,beta}` options with normalization warnings, extended consensus policy/digest, refreshed docs (`docs/ARCHITECTURE_EXCITITOR.md`, `docs/EXCITITOR_SCORRING.md`), and validated via `dotnet test` for core/policy suites.| +|EXCITITOR-POLICY-02-002 – Diagnostics for scoring signals|Team Excititor Policy|EXCITITOR-POLICY-02-001|BACKLOG – Update diagnostics reports to surface missing severity/KEV/EPSS mappings, coefficient overrides, and provide actionable recommendations for policy tuning.| diff --git a/src/StellaOps.Excititor.Policy/VexPolicyBinder.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyBinder.cs similarity index 100% rename from src/StellaOps.Excititor.Policy/VexPolicyBinder.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyBinder.cs diff --git a/src/StellaOps.Excititor.Policy/VexPolicyDiagnostics.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyDiagnostics.cs similarity index 100% rename from src/StellaOps.Excititor.Policy/VexPolicyDiagnostics.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyDiagnostics.cs diff --git a/src/StellaOps.Excititor.Policy/VexPolicyDigest.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyDigest.cs similarity index 100% rename from src/StellaOps.Excititor.Policy/VexPolicyDigest.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyDigest.cs diff --git a/src/StellaOps.Excititor.Policy/VexPolicyOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Policy/VexPolicyOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyOptions.cs diff --git a/src/StellaOps.Excititor.Policy/VexPolicyProcessing.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyProcessing.cs similarity index 100% rename from src/StellaOps.Excititor.Policy/VexPolicyProcessing.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyProcessing.cs diff --git a/src/StellaOps.Excititor.Policy/VexPolicyTelemetry.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyTelemetry.cs similarity index 100% rename from src/StellaOps.Excititor.Policy/VexPolicyTelemetry.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Policy/VexPolicyTelemetry.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/AGENTS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/AGENTS.md similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/AGENTS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/AGENTS.md diff --git a/src/StellaOps.Excititor.Storage.Mongo/IVexExportStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/IVexExportStore.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/IVexExportStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/IVexExportStore.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/IVexRawStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/IVexRawStore.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/IVexRawStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/IVexRawStore.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/IVexStorageContracts.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/IVexStorageContracts.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/IVexStorageContracts.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/IVexStorageContracts.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/Migrations/IVexMongoMigration.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/IVexMongoMigration.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/Migrations/IVexMongoMigration.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/IVexMongoMigration.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusHoldMigration.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusHoldMigration.cs similarity index 97% rename from src/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusHoldMigration.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusHoldMigration.cs index f8ac75a1..a45a9cb6 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusHoldMigration.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusHoldMigration.cs @@ -1,29 +1,29 @@ -using System.Threading; -using System.Threading.Tasks; -using MongoDB.Driver; - -namespace StellaOps.Excititor.Storage.Mongo.Migrations; - -internal sealed class VexConsensusHoldMigration : IVexMongoMigration -{ - public string Id => "20251021-consensus-holds"; - - public async ValueTask ExecuteAsync(IMongoDatabase database, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(database); - - var collection = database.GetCollection<VexConsensusHoldRecord>(VexMongoCollectionNames.ConsensusHolds); - - var eligibleIndex = Builders<VexConsensusHoldRecord>.IndexKeys - .Ascending(x => x.EligibleAt); - - var keyIndex = Builders<VexConsensusHoldRecord>.IndexKeys - .Ascending(x => x.VulnerabilityId) - .Ascending(x => x.ProductKey); - - await Task.WhenAll( - collection.Indexes.CreateOneAsync(new CreateIndexModel<VexConsensusHoldRecord>(eligibleIndex), cancellationToken: cancellationToken), - collection.Indexes.CreateOneAsync(new CreateIndexModel<VexConsensusHoldRecord>(keyIndex), cancellationToken: cancellationToken)) - .ConfigureAwait(false); - } -} +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Driver; + +namespace StellaOps.Excititor.Storage.Mongo.Migrations; + +internal sealed class VexConsensusHoldMigration : IVexMongoMigration +{ + public string Id => "20251021-consensus-holds"; + + public async ValueTask ExecuteAsync(IMongoDatabase database, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(database); + + var collection = database.GetCollection<VexConsensusHoldRecord>(VexMongoCollectionNames.ConsensusHolds); + + var eligibleIndex = Builders<VexConsensusHoldRecord>.IndexKeys + .Ascending(x => x.EligibleAt); + + var keyIndex = Builders<VexConsensusHoldRecord>.IndexKeys + .Ascending(x => x.VulnerabilityId) + .Ascending(x => x.ProductKey); + + await Task.WhenAll( + collection.Indexes.CreateOneAsync(new CreateIndexModel<VexConsensusHoldRecord>(eligibleIndex), cancellationToken: cancellationToken), + collection.Indexes.CreateOneAsync(new CreateIndexModel<VexConsensusHoldRecord>(keyIndex), cancellationToken: cancellationToken)) + .ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusSignalsMigration.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusSignalsMigration.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusSignalsMigration.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexConsensusSignalsMigration.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexInitialIndexMigration.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexInitialIndexMigration.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/Migrations/VexInitialIndexMigration.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexInitialIndexMigration.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexMigrationRecord.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexMigrationRecord.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/Migrations/VexMigrationRecord.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexMigrationRecord.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationHostedService.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationHostedService.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationHostedService.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationHostedService.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationRunner.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationRunner.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationRunner.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Migrations/VexMongoMigrationRunner.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexCacheIndex.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexCacheIndex.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/MongoVexCacheIndex.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexCacheIndex.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexCacheMaintenance.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexCacheMaintenance.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/MongoVexCacheMaintenance.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexCacheMaintenance.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexClaimStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexClaimStore.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/MongoVexClaimStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexClaimStore.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexConnectorStateRepository.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexConnectorStateRepository.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/MongoVexConnectorStateRepository.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexConnectorStateRepository.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusHoldStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusHoldStore.cs similarity index 97% rename from src/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusHoldStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusHoldStore.cs index 951644ed..d34759a5 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusHoldStore.cs +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusHoldStore.cs @@ -1,88 +1,88 @@ -using System.Collections.Generic; -using System.Runtime.CompilerServices; -using System.Threading; -using System.Threading.Tasks; -using MongoDB.Driver; -using StellaOps.Excititor.Core; - -namespace StellaOps.Excititor.Storage.Mongo; - -public sealed class MongoVexConsensusHoldStore : IVexConsensusHoldStore -{ - private readonly IMongoCollection<VexConsensusHoldRecord> _collection; - - public MongoVexConsensusHoldStore(IMongoDatabase database) - { - ArgumentNullException.ThrowIfNull(database); - VexMongoMappingRegistry.Register(); - _collection = database.GetCollection<VexConsensusHoldRecord>(VexMongoCollectionNames.ConsensusHolds); - } - - public async ValueTask<VexConsensusHold?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId); - ArgumentException.ThrowIfNullOrWhiteSpace(productKey); - var id = VexConsensusRecord.CreateId(vulnerabilityId, productKey); - var filter = Builders<VexConsensusHoldRecord>.Filter.Eq(x => x.Id, id); - var record = session is null - ? await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false) - : await _collection.Find(session, filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return record?.ToDomain(); - } - - public async ValueTask SaveAsync(VexConsensusHold hold, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - ArgumentNullException.ThrowIfNull(hold); - var record = VexConsensusHoldRecord.FromDomain(hold); - var filter = Builders<VexConsensusHoldRecord>.Filter.Eq(x => x.Id, record.Id); - if (session is null) - { - await _collection.ReplaceOneAsync(filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); - } - else - { - await _collection.ReplaceOneAsync(session, filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); - } - } - - public async ValueTask RemoveAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId); - ArgumentException.ThrowIfNullOrWhiteSpace(productKey); - var id = VexConsensusRecord.CreateId(vulnerabilityId, productKey); - var filter = Builders<VexConsensusHoldRecord>.Filter.Eq(x => x.Id, id); - if (session is null) - { - await _collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false); - } - else - { - await _collection.DeleteOneAsync(session, filter, options: null, cancellationToken).ConfigureAwait(false); - } - } - - public async IAsyncEnumerable<VexConsensusHold> FindEligibleAsync(DateTimeOffset asOf, int batchSize, [EnumeratorCancellation] CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - var cutoff = asOf.UtcDateTime; - var filter = Builders<VexConsensusHoldRecord>.Filter.Lte(x => x.EligibleAt, cutoff); - var find = session is null - ? _collection.Find(filter) - : _collection.Find(session, filter); - - find = find.SortBy(x => x.EligibleAt); - - if (batchSize > 0) - { - find = find.Limit(batchSize); - } - - using var cursor = await find.ToCursorAsync(cancellationToken).ConfigureAwait(false); - while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) - { - foreach (var record in cursor.Current) - { - yield return record.ToDomain(); - } - } - } -} +using System.Collections.Generic; +using System.Runtime.CompilerServices; +using System.Threading; +using System.Threading.Tasks; +using MongoDB.Driver; +using StellaOps.Excititor.Core; + +namespace StellaOps.Excititor.Storage.Mongo; + +public sealed class MongoVexConsensusHoldStore : IVexConsensusHoldStore +{ + private readonly IMongoCollection<VexConsensusHoldRecord> _collection; + + public MongoVexConsensusHoldStore(IMongoDatabase database) + { + ArgumentNullException.ThrowIfNull(database); + VexMongoMappingRegistry.Register(); + _collection = database.GetCollection<VexConsensusHoldRecord>(VexMongoCollectionNames.ConsensusHolds); + } + + public async ValueTask<VexConsensusHold?> FindAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId); + ArgumentException.ThrowIfNullOrWhiteSpace(productKey); + var id = VexConsensusRecord.CreateId(vulnerabilityId, productKey); + var filter = Builders<VexConsensusHoldRecord>.Filter.Eq(x => x.Id, id); + var record = session is null + ? await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false) + : await _collection.Find(session, filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return record?.ToDomain(); + } + + public async ValueTask SaveAsync(VexConsensusHold hold, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentNullException.ThrowIfNull(hold); + var record = VexConsensusHoldRecord.FromDomain(hold); + var filter = Builders<VexConsensusHoldRecord>.Filter.Eq(x => x.Id, record.Id); + if (session is null) + { + await _collection.ReplaceOneAsync(filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.ReplaceOneAsync(session, filter, record, new ReplaceOptions { IsUpsert = true }, cancellationToken).ConfigureAwait(false); + } + } + + public async ValueTask RemoveAsync(string vulnerabilityId, string productKey, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(vulnerabilityId); + ArgumentException.ThrowIfNullOrWhiteSpace(productKey); + var id = VexConsensusRecord.CreateId(vulnerabilityId, productKey); + var filter = Builders<VexConsensusHoldRecord>.Filter.Eq(x => x.Id, id); + if (session is null) + { + await _collection.DeleteOneAsync(filter, cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.DeleteOneAsync(session, filter, options: null, cancellationToken).ConfigureAwait(false); + } + } + + public async IAsyncEnumerable<VexConsensusHold> FindEligibleAsync(DateTimeOffset asOf, int batchSize, [EnumeratorCancellation] CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + var cutoff = asOf.UtcDateTime; + var filter = Builders<VexConsensusHoldRecord>.Filter.Lte(x => x.EligibleAt, cutoff); + var find = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + find = find.SortBy(x => x.EligibleAt); + + if (batchSize > 0) + { + find = find.Limit(batchSize); + } + + using var cursor = await find.ToCursorAsync(cancellationToken).ConfigureAwait(false); + while (await cursor.MoveNextAsync(cancellationToken).ConfigureAwait(false)) + { + foreach (var record in cursor.Current) + { + yield return record.ToDomain(); + } + } + } +} diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusStore.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexConsensusStore.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexExportStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexExportStore.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/MongoVexExportStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexExportStore.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexProviderStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexProviderStore.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/MongoVexProviderStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexProviderStore.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/MongoVexRawStore.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexRawStore.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/MongoVexRawStore.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/MongoVexRawStore.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/Properties/AssemblyInfo.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/Properties/AssemblyInfo.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/ServiceCollectionExtensions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/ServiceCollectionExtensions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj similarity index 97% rename from src/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj index 4c3f02df..28e43d61 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj @@ -1,18 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Excititor.Storage.Mongo/StorageBackedVexNormalizerRouter.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/StorageBackedVexNormalizerRouter.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/StorageBackedVexNormalizerRouter.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/StorageBackedVexNormalizerRouter.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/TASKS.md b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md similarity index 99% rename from src/StellaOps.Excititor.Storage.Mongo/TASKS.md rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md index c1f1f3c8..5faef448 100644 --- a/src/StellaOps.Excititor.Storage.Mongo/TASKS.md +++ b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/TASKS.md @@ -1,28 +1,28 @@ -# TASKS — Epic 1: Aggregation-Only Contract -> **AOC Reminder:** storage enforces raw VEX documents only—no consensus/precedence data in ingestion collections. -| ID | Status | Owner(s) | Depends on | Notes | -|---|---|---|---|---| -| EXCITITOR-STORE-AOC-19-001 `vex_raw schema validator` | TODO | Excititor Storage Guild | Mongo cluster ops sign-off | Define Mongo JSON schema for `vex_raw` enforcing required fields and forbidding derived/consensus/severity fields. Ship unit tests with Mongo2Go to validate rejects. | -| EXCITITOR-STORE-AOC-19-002 `idempotency unique index` | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-001 | Create `(source.vendor, upstream.upstream_id, upstream.content_hash, tenant)` unique index with backfill checker, updating migrations + bootstrapper for offline installs. | -| EXCITITOR-STORE-AOC-19-003 `append-only migration plan` | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-002 | Migrate legacy consensus collections to `_backup_*`, seed supersedes chain for raw docs, and document rollback path + dry-run verification. | -| EXCITITOR-STORE-AOC-19-004 `validator deployment docset` | TODO | Excititor Storage Guild, DevOps Guild | EXCITITOR-STORE-AOC-19-001 | Update migration runbooks and Offline Kit packaging to bundle schema validator scripts, with smoke instructions for air-gapped clusters. | - -## Policy Engine v2 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-POLICY-20-003 `Selection cursors` | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-002, POLICY-ENGINE-20-003 | Introduce VEX selection cursor collections + indexes powering incremental policy runs; bundle change-stream checkpoint migrations and Offline Kit tooling. | - -## Graph Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-GRAPH-21-005 `Inspector indexes` | BLOCKED (2025-10-27) | Excititor Storage Guild | EXCITITOR-GRAPH-21-001 | Add indexes/materialized views for VEX lookups by PURL/policy to support Cartographer inspector performance; document migrations. | -> 2025-10-27: Indexed workload requirements depend on Inspector linkouts (`EXCITITOR-GRAPH-21-001`) which are themselves blocked on Cartographer contract. Revisit once access patterns are defined. - -## Link-Not-Merge v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| EXCITITOR-LNM-21-101 `Observations collections` | TODO | Excititor Storage Guild | EXCITITOR-LNM-21-001 | Provision `vex_observations`/`vex_linksets` collections with shard keys, indexes over aliases & product PURLs, and multi-tenant guards. | -| EXCITITOR-LNM-21-102 `Migration/backfill` | TODO | Excititor Storage Guild, DevOps Guild | EXCITITOR-LNM-21-101 | Backfill legacy merged VEX docs into observations/linksets, add provenance notes, and produce rollback scripts. | +# TASKS — Epic 1: Aggregation-Only Contract +> **AOC Reminder:** storage enforces raw VEX documents only—no consensus/precedence data in ingestion collections. +| ID | Status | Owner(s) | Depends on | Notes | +|---|---|---|---|---| +| EXCITITOR-STORE-AOC-19-001 `vex_raw schema validator` | TODO | Excititor Storage Guild | Mongo cluster ops sign-off | Define Mongo JSON schema for `vex_raw` enforcing required fields and forbidding derived/consensus/severity fields. Ship unit tests with Mongo2Go to validate rejects. | +| EXCITITOR-STORE-AOC-19-002 `idempotency unique index` | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-001 | Create `(source.vendor, upstream.upstream_id, upstream.content_hash, tenant)` unique index with backfill checker, updating migrations + bootstrapper for offline installs. | +| EXCITITOR-STORE-AOC-19-003 `append-only migration plan` | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-002 | Migrate legacy consensus collections to `_backup_*`, seed supersedes chain for raw docs, and document rollback path + dry-run verification. | +| EXCITITOR-STORE-AOC-19-004 `validator deployment docset` | TODO | Excititor Storage Guild, DevOps Guild | EXCITITOR-STORE-AOC-19-001 | Update migration runbooks and Offline Kit packaging to bundle schema validator scripts, with smoke instructions for air-gapped clusters. | + +## Policy Engine v2 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-POLICY-20-003 `Selection cursors` | TODO | Excititor Storage Guild | EXCITITOR-STORE-AOC-19-002, POLICY-ENGINE-20-003 | Introduce VEX selection cursor collections + indexes powering incremental policy runs; bundle change-stream checkpoint migrations and Offline Kit tooling. | + +## Graph Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-GRAPH-21-005 `Inspector indexes` | BLOCKED (2025-10-27) | Excititor Storage Guild | EXCITITOR-GRAPH-21-001 | Add indexes/materialized views for VEX lookups by PURL/policy to support Cartographer inspector performance; document migrations. | +> 2025-10-27: Indexed workload requirements depend on Inspector linkouts (`EXCITITOR-GRAPH-21-001`) which are themselves blocked on Cartographer contract. Revisit once access patterns are defined. + +## Link-Not-Merge v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| EXCITITOR-LNM-21-101 `Observations collections` | TODO | Excititor Storage Guild | EXCITITOR-LNM-21-001 | Provision `vex_observations`/`vex_linksets` collections with shard keys, indexes over aliases & product PURLs, and multi-tenant guards. | +| EXCITITOR-LNM-21-102 `Migration/backfill` | TODO | Excititor Storage Guild, DevOps Guild | EXCITITOR-LNM-21-101 | Backfill legacy merged VEX docs into observations/linksets, add provenance notes, and produce rollback scripts. | diff --git a/src/StellaOps.Excititor.Storage.Mongo/VexMongoMappingRegistry.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoMappingRegistry.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/VexMongoMappingRegistry.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoMappingRegistry.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoModels.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/VexMongoSessionProvider.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoSessionProvider.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/VexMongoSessionProvider.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoSessionProvider.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/VexMongoStorageOptions.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoStorageOptions.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/VexMongoStorageOptions.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexMongoStorageOptions.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo/VexStatementBackfillService.cs b/src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexStatementBackfillService.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo/VexStatementBackfillService.cs rename to src/Excititor/__Libraries/StellaOps.Excititor.Storage.Mongo/VexStatementBackfillService.cs diff --git a/src/StellaOps.Excititor.ArtifactStores.S3.Tests/S3ArtifactClientTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.ArtifactStores.S3.Tests/S3ArtifactClientTests.cs similarity index 100% rename from src/StellaOps.Excititor.ArtifactStores.S3.Tests/S3ArtifactClientTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.ArtifactStores.S3.Tests/S3ArtifactClientTests.cs diff --git a/src/StellaOps.Excititor.ArtifactStores.S3.Tests/StellaOps.Excititor.ArtifactStores.S3.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.ArtifactStores.S3.Tests/StellaOps.Excititor.ArtifactStores.S3.Tests.csproj similarity index 68% rename from src/StellaOps.Excititor.ArtifactStores.S3.Tests/StellaOps.Excititor.ArtifactStores.S3.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.ArtifactStores.S3.Tests/StellaOps.Excititor.ArtifactStores.S3.Tests.csproj index 94ed72f0..309e6bfc 100644 --- a/src/StellaOps.Excititor.ArtifactStores.S3.Tests/StellaOps.Excititor.ArtifactStores.S3.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.ArtifactStores.S3.Tests/StellaOps.Excititor.ArtifactStores.S3.Tests.csproj @@ -1,15 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Moq" Version="4.20.70" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.ArtifactStores.S3\StellaOps.Excititor.ArtifactStores.S3.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Moq" Version="4.20.70" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.ArtifactStores.S3/StellaOps.Excititor.ArtifactStores.S3.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Attestation.Tests/StellaOps.Excititor.Attestation.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Attestation.Tests/StellaOps.Excititor.Attestation.Tests.csproj similarity index 52% rename from src/StellaOps.Excititor.Attestation.Tests/StellaOps.Excititor.Attestation.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Attestation.Tests/StellaOps.Excititor.Attestation.Tests.csproj index a521425c..c61c75b4 100644 --- a/src/StellaOps.Excititor.Attestation.Tests/StellaOps.Excititor.Attestation.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Attestation.Tests/StellaOps.Excititor.Attestation.Tests.csproj @@ -1,13 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Attestation\StellaOps.Excititor.Attestation.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Attestation/StellaOps.Excititor.Attestation.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Attestation.Tests/VexAttestationClientTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Attestation.Tests/VexAttestationClientTests.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation.Tests/VexAttestationClientTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Attestation.Tests/VexAttestationClientTests.cs diff --git a/src/StellaOps.Excititor.Attestation.Tests/VexAttestationVerifierTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Attestation.Tests/VexAttestationVerifierTests.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation.Tests/VexAttestationVerifierTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Attestation.Tests/VexAttestationVerifierTests.cs diff --git a/src/StellaOps.Excititor.Attestation.Tests/VexDsseBuilderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Attestation.Tests/VexDsseBuilderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Attestation.Tests/VexDsseBuilderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Attestation.Tests/VexDsseBuilderTests.cs diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Connectors/CiscoCsafConnectorTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Connectors/CiscoCsafConnectorTests.cs similarity index 97% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Connectors/CiscoCsafConnectorTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Connectors/CiscoCsafConnectorTests.cs index f454b839..bbc581c6 100644 --- a/src/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Connectors/CiscoCsafConnectorTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Connectors/CiscoCsafConnectorTests.cs @@ -1,215 +1,215 @@ -using System.Collections.Generic; -using System.Net; -using System.Net.Http; -using System.Text; -using FluentAssertions; -using Microsoft.Extensions.Caching.Memory; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using Microsoft.Extensions.DependencyInjection; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Connectors.Cisco.CSAF; -using StellaOps.Excititor.Connectors.Cisco.CSAF.Configuration; -using StellaOps.Excititor.Connectors.Cisco.CSAF.Metadata; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Storage.Mongo; -using System.Collections.Immutable; -using System.IO.Abstractions.TestingHelpers; -using Xunit; -using System.Threading; -using MongoDB.Driver; - -namespace StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.Connectors; - -public sealed class CiscoCsafConnectorTests -{ - [Fact] - public async Task FetchAsync_NewAdvisory_StoresDocumentAndUpdatesState() - { - var responses = new Dictionary<Uri, Queue<HttpResponseMessage>> - { - [new Uri("https://api.cisco.test/.well-known/csaf/provider-metadata.json")] = QueueResponses(""" - { - "metadata": { - "publisher": { - "name": "Cisco", - "category": "vendor", - "contact_details": { "id": "excititor:cisco" } - } - }, - "distributions": { - "directories": [ "https://api.cisco.test/csaf/" ] - } - } - """), - [new Uri("https://api.cisco.test/csaf/index.json")] = QueueResponses(""" - { - "advisories": [ - { - "id": "cisco-sa-2025", - "url": "https://api.cisco.test/csaf/cisco-sa-2025.json", - "published": "2025-10-01T00:00:00Z", - "lastModified": "2025-10-02T00:00:00Z", - "sha256": "cafebabe" - } - ] - } - """), - [new Uri("https://api.cisco.test/csaf/cisco-sa-2025.json")] = QueueResponses("{ \"document\": \"payload\" }") - }; - - var handler = new RoutingHttpMessageHandler(responses); - var httpClient = new HttpClient(handler); - var factory = new SingleHttpClientFactory(httpClient); - var metadataLoader = new CiscoProviderMetadataLoader( - factory, - new MemoryCache(new MemoryCacheOptions()), - Options.Create(new CiscoConnectorOptions - { - MetadataUri = "https://api.cisco.test/.well-known/csaf/provider-metadata.json", - PersistOfflineSnapshot = false, - }), - NullLogger<CiscoProviderMetadataLoader>.Instance, - new MockFileSystem()); - - var stateRepository = new InMemoryConnectorStateRepository(); - var connector = new CiscoCsafConnector( - metadataLoader, - factory, - stateRepository, - new[] { new CiscoConnectorOptionsValidator() }, - NullLogger<CiscoCsafConnector>.Instance, - TimeProvider.System); - - var settings = new VexConnectorSettings(ImmutableDictionary<string, string>.Empty); - await connector.ValidateAsync(settings, CancellationToken.None); - - var sink = new InMemoryRawSink(); - var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider(), ImmutableDictionary<string, string>.Empty); - - var documents = new List<VexRawDocument>(); - await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(doc); - } - - documents.Should().HaveCount(1); - sink.Documents.Should().HaveCount(1); - stateRepository.CurrentState.Should().NotBeNull(); - stateRepository.CurrentState!.DocumentDigests.Should().HaveCount(1); - - // second run should not refetch documents - sink.Documents.Clear(); - documents.Clear(); - - await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(doc); - } - - documents.Should().BeEmpty(); - sink.Documents.Should().BeEmpty(); - } - - private static Queue<HttpResponseMessage> QueueResponses(string payload) - => new(new[] - { - new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(payload, Encoding.UTF8, "application/json"), - } - }); - - private sealed class RoutingHttpMessageHandler : HttpMessageHandler - { - private readonly Dictionary<Uri, Queue<HttpResponseMessage>> _responses; - - public RoutingHttpMessageHandler(Dictionary<Uri, Queue<HttpResponseMessage>> responses) - { - _responses = responses; - } - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - if (request.RequestUri is not null && _responses.TryGetValue(request.RequestUri, out var queue) && queue.Count > 0) - { - var response = queue.Peek(); - return Task.FromResult(response.Clone()); - } - - return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound) - { - Content = new StringContent($"No response configured for {request.RequestUri}"), - }); - } - } - - private sealed class SingleHttpClientFactory : IHttpClientFactory - { - private readonly HttpClient _client; - - public SingleHttpClientFactory(HttpClient client) - { - _client = client; - } - - public HttpClient CreateClient(string name) => _client; - } - - private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository - { - public VexConnectorState? CurrentState { get; private set; } - - public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(CurrentState); - - public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - CurrentState = state; - return ValueTask.CompletedTask; - } - } - - private sealed class InMemoryRawSink : IVexRawDocumentSink - { - public List<VexRawDocument> Documents { get; } = new(); - - public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) - { - Documents.Add(document); - return ValueTask.CompletedTask; - } - } - - private sealed class NoopSignatureVerifier : IVexSignatureVerifier - { - public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult<VexSignatureMetadata?>(null); - } - - private sealed class NoopNormalizerRouter : IVexNormalizerRouter - { - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); - } -} - -internal static class HttpResponseMessageExtensions -{ - public static HttpResponseMessage Clone(this HttpResponseMessage response) - { - var clone = new HttpResponseMessage(response.StatusCode); - foreach (var header in response.Headers) - { - clone.Headers.TryAddWithoutValidation(header.Key, header.Value); - } - - if (response.Content is not null) - { - var payload = response.Content.ReadAsStringAsync().GetAwaiter().GetResult(); - clone.Content = new StringContent(payload, Encoding.UTF8, response.Content.Headers.ContentType?.MediaType); - } - - return clone; - } -} +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Text; +using FluentAssertions; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.Cisco.CSAF; +using StellaOps.Excititor.Connectors.Cisco.CSAF.Configuration; +using StellaOps.Excititor.Connectors.Cisco.CSAF.Metadata; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; +using System.Collections.Immutable; +using System.IO.Abstractions.TestingHelpers; +using Xunit; +using System.Threading; +using MongoDB.Driver; + +namespace StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.Connectors; + +public sealed class CiscoCsafConnectorTests +{ + [Fact] + public async Task FetchAsync_NewAdvisory_StoresDocumentAndUpdatesState() + { + var responses = new Dictionary<Uri, Queue<HttpResponseMessage>> + { + [new Uri("https://api.cisco.test/.well-known/csaf/provider-metadata.json")] = QueueResponses(""" + { + "metadata": { + "publisher": { + "name": "Cisco", + "category": "vendor", + "contact_details": { "id": "excititor:cisco" } + } + }, + "distributions": { + "directories": [ "https://api.cisco.test/csaf/" ] + } + } + """), + [new Uri("https://api.cisco.test/csaf/index.json")] = QueueResponses(""" + { + "advisories": [ + { + "id": "cisco-sa-2025", + "url": "https://api.cisco.test/csaf/cisco-sa-2025.json", + "published": "2025-10-01T00:00:00Z", + "lastModified": "2025-10-02T00:00:00Z", + "sha256": "cafebabe" + } + ] + } + """), + [new Uri("https://api.cisco.test/csaf/cisco-sa-2025.json")] = QueueResponses("{ \"document\": \"payload\" }") + }; + + var handler = new RoutingHttpMessageHandler(responses); + var httpClient = new HttpClient(handler); + var factory = new SingleHttpClientFactory(httpClient); + var metadataLoader = new CiscoProviderMetadataLoader( + factory, + new MemoryCache(new MemoryCacheOptions()), + Options.Create(new CiscoConnectorOptions + { + MetadataUri = "https://api.cisco.test/.well-known/csaf/provider-metadata.json", + PersistOfflineSnapshot = false, + }), + NullLogger<CiscoProviderMetadataLoader>.Instance, + new MockFileSystem()); + + var stateRepository = new InMemoryConnectorStateRepository(); + var connector = new CiscoCsafConnector( + metadataLoader, + factory, + stateRepository, + new[] { new CiscoConnectorOptionsValidator() }, + NullLogger<CiscoCsafConnector>.Instance, + TimeProvider.System); + + var settings = new VexConnectorSettings(ImmutableDictionary<string, string>.Empty); + await connector.ValidateAsync(settings, CancellationToken.None); + + var sink = new InMemoryRawSink(); + var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider(), ImmutableDictionary<string, string>.Empty); + + var documents = new List<VexRawDocument>(); + await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(doc); + } + + documents.Should().HaveCount(1); + sink.Documents.Should().HaveCount(1); + stateRepository.CurrentState.Should().NotBeNull(); + stateRepository.CurrentState!.DocumentDigests.Should().HaveCount(1); + + // second run should not refetch documents + sink.Documents.Clear(); + documents.Clear(); + + await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(doc); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + } + + private static Queue<HttpResponseMessage> QueueResponses(string payload) + => new(new[] + { + new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(payload, Encoding.UTF8, "application/json"), + } + }); + + private sealed class RoutingHttpMessageHandler : HttpMessageHandler + { + private readonly Dictionary<Uri, Queue<HttpResponseMessage>> _responses; + + public RoutingHttpMessageHandler(Dictionary<Uri, Queue<HttpResponseMessage>> responses) + { + _responses = responses; + } + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (request.RequestUri is not null && _responses.TryGetValue(request.RequestUri, out var queue) && queue.Count > 0) + { + var response = queue.Peek(); + return Task.FromResult(response.Clone()); + } + + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound) + { + Content = new StringContent($"No response configured for {request.RequestUri}"), + }); + } + } + + private sealed class SingleHttpClientFactory : IHttpClientFactory + { + private readonly HttpClient _client; + + public SingleHttpClientFactory(HttpClient client) + { + _client = client; + } + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository + { + public VexConnectorState? CurrentState { get; private set; } + + public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(CurrentState); + + public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + CurrentState = state; + return ValueTask.CompletedTask; + } + } + + private sealed class InMemoryRawSink : IVexRawDocumentSink + { + public List<VexRawDocument> Documents { get; } = new(); + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Documents.Add(document); + return ValueTask.CompletedTask; + } + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult<VexSignatureMetadata?>(null); + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); + } +} + +internal static class HttpResponseMessageExtensions +{ + public static HttpResponseMessage Clone(this HttpResponseMessage response) + { + var clone = new HttpResponseMessage(response.StatusCode); + foreach (var header in response.Headers) + { + clone.Headers.TryAddWithoutValidation(header.Key, header.Value); + } + + if (response.Content is not null) + { + var payload = response.Content.ReadAsStringAsync().GetAwaiter().GetResult(); + clone.Content = new StringContent(payload, Encoding.UTF8, response.Content.Headers.ContentType?.MediaType); + } + + return clone; + } +} diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Metadata/CiscoProviderMetadataLoaderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Metadata/CiscoProviderMetadataLoaderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Metadata/CiscoProviderMetadataLoaderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/Metadata/CiscoProviderMetadataLoaderTests.cs diff --git a/src/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj similarity index 72% rename from src/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj index 00fd2ae2..dd1a39fd 100644 --- a/src/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests/StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj @@ -1,16 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Cisco.CSAF\StellaOps.Excititor.Connectors.Cisco.CSAF.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Connectors.Cisco.CSAF/StellaOps.Excititor.Connectors.Cisco.CSAF.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Authentication/MsrcTokenProviderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Authentication/MsrcTokenProviderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Authentication/MsrcTokenProviderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Authentication/MsrcTokenProviderTests.cs diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs similarity index 97% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs index 7431c3ba..376fa397 100644 --- a/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/Connectors/MsrcCsafConnectorTests.cs @@ -1,367 +1,367 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.IO.Compression; -using System.Net; -using System.Net.Http; -using System.Text; -using FluentAssertions; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Connectors.MSRC.CSAF; -using StellaOps.Excititor.Connectors.MSRC.CSAF.Authentication; -using StellaOps.Excititor.Connectors.MSRC.CSAF.Configuration; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Storage.Mongo; -using Xunit; -using MongoDB.Driver; - -namespace StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.Connectors; - -public sealed class MsrcCsafConnectorTests -{ - private static readonly VexConnectorDescriptor Descriptor = new("excititor:msrc", VexProviderKind.Vendor, "MSRC CSAF"); - - [Fact] - public async Task FetchAsync_EmitsDocumentAndPersistsState() - { - var summary = """ - { - "value": [ - { - "id": "ADV-0001", - "vulnerabilityId": "ADV-0001", - "severity": "Critical", - "releaseDate": "2025-10-17T00:00:00Z", - "lastModifiedDate": "2025-10-18T00:00:00Z", - "cvrfUrl": "https://example.com/csaf/ADV-0001.json" - } - ] - } - """; - - var csaf = """{"document":{"title":"Example"}}"""; - var handler = TestHttpMessageHandler.Create( - _ => Response(HttpStatusCode.OK, summary, "application/json"), - _ => Response(HttpStatusCode.OK, csaf, "application/json")); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://example.com/"), - }; - - var factory = new SingleClientHttpClientFactory(httpClient); - var stateRepository = new InMemoryConnectorStateRepository(); - var options = Options.Create(CreateOptions()); - var connector = new MsrcCsafConnector( - factory, - new StubTokenProvider(), - stateRepository, - options, - NullLogger<MsrcCsafConnector>.Instance, - TimeProvider.System); - - await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None); - - var sink = new CapturingRawSink(); - var context = new VexConnectorContext( - Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero), - Settings: VexConnectorSettings.Empty, - RawSink: sink, - SignatureVerifier: new NoopSignatureVerifier(), - Normalizers: new NoopNormalizerRouter(), - Services: new ServiceCollection().BuildServiceProvider(), - ResumeTokens: ImmutableDictionary<string, string>.Empty); - - var documents = new List<VexRawDocument>(); - await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(document); - } - - documents.Should().HaveCount(1); - sink.Documents.Should().HaveCount(1); - var emitted = documents[0]; - emitted.SourceUri.Should().Be(new Uri("https://example.com/csaf/ADV-0001.json")); - emitted.Metadata["msrc.vulnerabilityId"].Should().Be("ADV-0001"); - emitted.Metadata["msrc.csaf.format"].Should().Be("json"); - emitted.Metadata.Should().NotContainKey("excititor.quarantine.reason"); - - stateRepository.State.Should().NotBeNull(); - stateRepository.State!.LastUpdated.Should().Be(new DateTimeOffset(2025, 10, 18, 0, 0, 0, TimeSpan.Zero)); - stateRepository.State.DocumentDigests.Should().HaveCount(1); - } - - [Fact] - public async Task FetchAsync_SkipsDocumentsWithExistingDigest() - { - var summary = """ - { - "value": [ - { - "id": "ADV-0001", - "vulnerabilityId": "ADV-0001", - "lastModifiedDate": "2025-10-18T00:00:00Z", - "cvrfUrl": "https://example.com/csaf/ADV-0001.json" - } - ] - } - """; - - var csaf = """{"document":{"title":"Example"}}"""; - var handler = TestHttpMessageHandler.Create( - _ => Response(HttpStatusCode.OK, summary, "application/json"), - _ => Response(HttpStatusCode.OK, csaf, "application/json")); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://example.com/"), - }; - - var factory = new SingleClientHttpClientFactory(httpClient); - var stateRepository = new InMemoryConnectorStateRepository(); - var options = Options.Create(CreateOptions()); - var connector = new MsrcCsafConnector( - factory, - new StubTokenProvider(), - stateRepository, - options, - NullLogger<MsrcCsafConnector>.Instance, - TimeProvider.System); - - await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None); - - var sink = new CapturingRawSink(); - var context = new VexConnectorContext( - Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero), - Settings: VexConnectorSettings.Empty, - RawSink: sink, - SignatureVerifier: new NoopSignatureVerifier(), - Normalizers: new NoopNormalizerRouter(), - Services: new ServiceCollection().BuildServiceProvider(), - ResumeTokens: ImmutableDictionary<string, string>.Empty); - - var firstPass = new List<VexRawDocument>(); - await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) - { - firstPass.Add(document); - } - - firstPass.Should().HaveCount(1); - stateRepository.State.Should().NotBeNull(); - var persistedState = stateRepository.State!; - - handler.Reset( - _ => Response(HttpStatusCode.OK, summary, "application/json"), - _ => Response(HttpStatusCode.OK, csaf, "application/json")); - - sink.Documents.Clear(); - var secondPass = new List<VexRawDocument>(); - await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) - { - secondPass.Add(document); - } - - secondPass.Should().BeEmpty(); - sink.Documents.Should().BeEmpty(); - stateRepository.State.Should().NotBeNull(); - stateRepository.State!.DocumentDigests.Should().Equal(persistedState.DocumentDigests); - } - - [Fact] - public async Task FetchAsync_QuarantinesInvalidCsafPayload() - { - var summary = """ - { - "value": [ - { - "id": "ADV-0002", - "vulnerabilityId": "ADV-0002", - "lastModifiedDate": "2025-10-19T00:00:00Z", - "cvrfUrl": "https://example.com/csaf/ADV-0002.zip" - } - ] - } - """; - - var csafZip = CreateZip("document.json", "{ invalid json "); - var handler = TestHttpMessageHandler.Create( - _ => Response(HttpStatusCode.OK, summary, "application/json"), - _ => Response(HttpStatusCode.OK, csafZip, "application/zip")); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://example.com/"), - }; - - var factory = new SingleClientHttpClientFactory(httpClient); - var stateRepository = new InMemoryConnectorStateRepository(); - var options = Options.Create(CreateOptions()); - var connector = new MsrcCsafConnector( - factory, - new StubTokenProvider(), - stateRepository, - options, - NullLogger<MsrcCsafConnector>.Instance, - TimeProvider.System); - - await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None); - - var sink = new CapturingRawSink(); - var context = new VexConnectorContext( - Since: new DateTimeOffset(2025, 10, 17, 0, 0, 0, TimeSpan.Zero), - Settings: VexConnectorSettings.Empty, - RawSink: sink, - SignatureVerifier: new NoopSignatureVerifier(), - Normalizers: new NoopNormalizerRouter(), - Services: new ServiceCollection().BuildServiceProvider(), - ResumeTokens: ImmutableDictionary<string, string>.Empty); - - var documents = new List<VexRawDocument>(); - await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(document); - } - - documents.Should().BeEmpty(); - sink.Documents.Should().HaveCount(1); - sink.Documents[0].Metadata["excititor.quarantine.reason"].Should().Contain("JSON parse failed"); - sink.Documents[0].Metadata["msrc.csaf.format"].Should().Be("zip"); - - stateRepository.State.Should().NotBeNull(); - stateRepository.State!.DocumentDigests.Should().HaveCount(1); - } - - private static HttpResponseMessage Response(HttpStatusCode statusCode, string content, string contentType) - => new(statusCode) - { - Content = new StringContent(content, Encoding.UTF8, contentType), - }; - - private static HttpResponseMessage Response(HttpStatusCode statusCode, byte[] content, string contentType) - { - var response = new HttpResponseMessage(statusCode); - response.Content = new ByteArrayContent(content); - response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(contentType); - return response; - } - - private static MsrcConnectorOptions CreateOptions() - => new() - { - BaseUri = new Uri("https://example.com/", UriKind.Absolute), - TenantId = Guid.NewGuid().ToString(), - ClientId = "client-id", - ClientSecret = "secret", - Scope = MsrcConnectorOptions.DefaultScope, - PageSize = 5, - MaxAdvisoriesPerFetch = 5, - RequestDelay = TimeSpan.Zero, - RetryBaseDelay = TimeSpan.FromMilliseconds(10), - MaxRetryAttempts = 2, - }; - - private static byte[] CreateZip(string entryName, string content) - { - using var buffer = new MemoryStream(); - using (var archive = new ZipArchive(buffer, ZipArchiveMode.Create, leaveOpen: true)) - { - var entry = archive.CreateEntry(entryName); - using var writer = new StreamWriter(entry.Open(), Encoding.UTF8); - writer.Write(content); - } - - return buffer.ToArray(); - } - - private sealed class StubTokenProvider : IMsrcTokenProvider - { - public ValueTask<MsrcAccessToken> GetAccessTokenAsync(CancellationToken cancellationToken) - => ValueTask.FromResult(new MsrcAccessToken("token", "Bearer", DateTimeOffset.MaxValue)); - } - - private sealed class CapturingRawSink : IVexRawDocumentSink - { - public List<VexRawDocument> Documents { get; } = new(); - - public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) - { - Documents.Add(document); - return ValueTask.CompletedTask; - } - } - - private sealed class NoopSignatureVerifier : IVexSignatureVerifier - { - public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult<VexSignatureMetadata?>(null); - } - - private sealed class NoopNormalizerRouter : IVexNormalizerRouter - { - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); - } - - private sealed class SingleClientHttpClientFactory : IHttpClientFactory - { - private readonly HttpClient _client; - - public SingleClientHttpClientFactory(HttpClient client) - { - _client = client; - } - - public HttpClient CreateClient(string name) => _client; - } - - private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository - { - public VexConnectorState? State { get; private set; } - - public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(State); - - public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - State = state; - return ValueTask.CompletedTask; - } - } - - private sealed class TestHttpMessageHandler : HttpMessageHandler - { - private readonly Queue<Func<HttpRequestMessage, HttpResponseMessage>> _responders; - - private TestHttpMessageHandler(IEnumerable<Func<HttpRequestMessage, HttpResponseMessage>> responders) - { - _responders = new Queue<Func<HttpRequestMessage, HttpResponseMessage>>(responders); - } - - public static TestHttpMessageHandler Create(params Func<HttpRequestMessage, HttpResponseMessage>[] responders) - => new(responders); - - public void Reset(params Func<HttpRequestMessage, HttpResponseMessage>[] responders) - { - _responders.Clear(); - foreach (var responder in responders) - { - _responders.Enqueue(responder); - } - } - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - if (_responders.Count == 0) - { - throw new InvalidOperationException("No responder configured for MSRC connector test request."); - } - - var responder = _responders.Count > 1 ? _responders.Dequeue() : _responders.Peek(); - var response = responder(request); - response.RequestMessage = request; - return Task.FromResult(response); - } - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO.Compression; +using System.Net; +using System.Net.Http; +using System.Text; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.MSRC.CSAF; +using StellaOps.Excititor.Connectors.MSRC.CSAF.Authentication; +using StellaOps.Excititor.Connectors.MSRC.CSAF.Configuration; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; +using Xunit; +using MongoDB.Driver; + +namespace StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.Connectors; + +public sealed class MsrcCsafConnectorTests +{ + private static readonly VexConnectorDescriptor Descriptor = new("excititor:msrc", VexProviderKind.Vendor, "MSRC CSAF"); + + [Fact] + public async Task FetchAsync_EmitsDocumentAndPersistsState() + { + var summary = """ + { + "value": [ + { + "id": "ADV-0001", + "vulnerabilityId": "ADV-0001", + "severity": "Critical", + "releaseDate": "2025-10-17T00:00:00Z", + "lastModifiedDate": "2025-10-18T00:00:00Z", + "cvrfUrl": "https://example.com/csaf/ADV-0001.json" + } + ] + } + """; + + var csaf = """{"document":{"title":"Example"}}"""; + var handler = TestHttpMessageHandler.Create( + _ => Response(HttpStatusCode.OK, summary, "application/json"), + _ => Response(HttpStatusCode.OK, csaf, "application/json")); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://example.com/"), + }; + + var factory = new SingleClientHttpClientFactory(httpClient); + var stateRepository = new InMemoryConnectorStateRepository(); + var options = Options.Create(CreateOptions()); + var connector = new MsrcCsafConnector( + factory, + new StubTokenProvider(), + stateRepository, + options, + NullLogger<MsrcCsafConnector>.Instance, + TimeProvider.System); + + await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None); + + var sink = new CapturingRawSink(); + var context = new VexConnectorContext( + Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero), + Settings: VexConnectorSettings.Empty, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider(), + ResumeTokens: ImmutableDictionary<string, string>.Empty); + + var documents = new List<VexRawDocument>(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().HaveCount(1); + sink.Documents.Should().HaveCount(1); + var emitted = documents[0]; + emitted.SourceUri.Should().Be(new Uri("https://example.com/csaf/ADV-0001.json")); + emitted.Metadata["msrc.vulnerabilityId"].Should().Be("ADV-0001"); + emitted.Metadata["msrc.csaf.format"].Should().Be("json"); + emitted.Metadata.Should().NotContainKey("excititor.quarantine.reason"); + + stateRepository.State.Should().NotBeNull(); + stateRepository.State!.LastUpdated.Should().Be(new DateTimeOffset(2025, 10, 18, 0, 0, 0, TimeSpan.Zero)); + stateRepository.State.DocumentDigests.Should().HaveCount(1); + } + + [Fact] + public async Task FetchAsync_SkipsDocumentsWithExistingDigest() + { + var summary = """ + { + "value": [ + { + "id": "ADV-0001", + "vulnerabilityId": "ADV-0001", + "lastModifiedDate": "2025-10-18T00:00:00Z", + "cvrfUrl": "https://example.com/csaf/ADV-0001.json" + } + ] + } + """; + + var csaf = """{"document":{"title":"Example"}}"""; + var handler = TestHttpMessageHandler.Create( + _ => Response(HttpStatusCode.OK, summary, "application/json"), + _ => Response(HttpStatusCode.OK, csaf, "application/json")); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://example.com/"), + }; + + var factory = new SingleClientHttpClientFactory(httpClient); + var stateRepository = new InMemoryConnectorStateRepository(); + var options = Options.Create(CreateOptions()); + var connector = new MsrcCsafConnector( + factory, + new StubTokenProvider(), + stateRepository, + options, + NullLogger<MsrcCsafConnector>.Instance, + TimeProvider.System); + + await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None); + + var sink = new CapturingRawSink(); + var context = new VexConnectorContext( + Since: new DateTimeOffset(2025, 10, 15, 0, 0, 0, TimeSpan.Zero), + Settings: VexConnectorSettings.Empty, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider(), + ResumeTokens: ImmutableDictionary<string, string>.Empty); + + var firstPass = new List<VexRawDocument>(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + firstPass.Add(document); + } + + firstPass.Should().HaveCount(1); + stateRepository.State.Should().NotBeNull(); + var persistedState = stateRepository.State!; + + handler.Reset( + _ => Response(HttpStatusCode.OK, summary, "application/json"), + _ => Response(HttpStatusCode.OK, csaf, "application/json")); + + sink.Documents.Clear(); + var secondPass = new List<VexRawDocument>(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + secondPass.Add(document); + } + + secondPass.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + stateRepository.State.Should().NotBeNull(); + stateRepository.State!.DocumentDigests.Should().Equal(persistedState.DocumentDigests); + } + + [Fact] + public async Task FetchAsync_QuarantinesInvalidCsafPayload() + { + var summary = """ + { + "value": [ + { + "id": "ADV-0002", + "vulnerabilityId": "ADV-0002", + "lastModifiedDate": "2025-10-19T00:00:00Z", + "cvrfUrl": "https://example.com/csaf/ADV-0002.zip" + } + ] + } + """; + + var csafZip = CreateZip("document.json", "{ invalid json "); + var handler = TestHttpMessageHandler.Create( + _ => Response(HttpStatusCode.OK, summary, "application/json"), + _ => Response(HttpStatusCode.OK, csafZip, "application/zip")); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://example.com/"), + }; + + var factory = new SingleClientHttpClientFactory(httpClient); + var stateRepository = new InMemoryConnectorStateRepository(); + var options = Options.Create(CreateOptions()); + var connector = new MsrcCsafConnector( + factory, + new StubTokenProvider(), + stateRepository, + options, + NullLogger<MsrcCsafConnector>.Instance, + TimeProvider.System); + + await connector.ValidateAsync(VexConnectorSettings.Empty, CancellationToken.None); + + var sink = new CapturingRawSink(); + var context = new VexConnectorContext( + Since: new DateTimeOffset(2025, 10, 17, 0, 0, 0, TimeSpan.Zero), + Settings: VexConnectorSettings.Empty, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider(), + ResumeTokens: ImmutableDictionary<string, string>.Empty); + + var documents = new List<VexRawDocument>(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().HaveCount(1); + sink.Documents[0].Metadata["excititor.quarantine.reason"].Should().Contain("JSON parse failed"); + sink.Documents[0].Metadata["msrc.csaf.format"].Should().Be("zip"); + + stateRepository.State.Should().NotBeNull(); + stateRepository.State!.DocumentDigests.Should().HaveCount(1); + } + + private static HttpResponseMessage Response(HttpStatusCode statusCode, string content, string contentType) + => new(statusCode) + { + Content = new StringContent(content, Encoding.UTF8, contentType), + }; + + private static HttpResponseMessage Response(HttpStatusCode statusCode, byte[] content, string contentType) + { + var response = new HttpResponseMessage(statusCode); + response.Content = new ByteArrayContent(content); + response.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(contentType); + return response; + } + + private static MsrcConnectorOptions CreateOptions() + => new() + { + BaseUri = new Uri("https://example.com/", UriKind.Absolute), + TenantId = Guid.NewGuid().ToString(), + ClientId = "client-id", + ClientSecret = "secret", + Scope = MsrcConnectorOptions.DefaultScope, + PageSize = 5, + MaxAdvisoriesPerFetch = 5, + RequestDelay = TimeSpan.Zero, + RetryBaseDelay = TimeSpan.FromMilliseconds(10), + MaxRetryAttempts = 2, + }; + + private static byte[] CreateZip(string entryName, string content) + { + using var buffer = new MemoryStream(); + using (var archive = new ZipArchive(buffer, ZipArchiveMode.Create, leaveOpen: true)) + { + var entry = archive.CreateEntry(entryName); + using var writer = new StreamWriter(entry.Open(), Encoding.UTF8); + writer.Write(content); + } + + return buffer.ToArray(); + } + + private sealed class StubTokenProvider : IMsrcTokenProvider + { + public ValueTask<MsrcAccessToken> GetAccessTokenAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(new MsrcAccessToken("token", "Bearer", DateTimeOffset.MaxValue)); + } + + private sealed class CapturingRawSink : IVexRawDocumentSink + { + public List<VexRawDocument> Documents { get; } = new(); + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Documents.Add(document); + return ValueTask.CompletedTask; + } + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult<VexSignatureMetadata?>(null); + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); + } + + private sealed class SingleClientHttpClientFactory : IHttpClientFactory + { + private readonly HttpClient _client; + + public SingleClientHttpClientFactory(HttpClient client) + { + _client = client; + } + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository + { + public VexConnectorState? State { get; private set; } + + public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(State); + + public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + State = state; + return ValueTask.CompletedTask; + } + } + + private sealed class TestHttpMessageHandler : HttpMessageHandler + { + private readonly Queue<Func<HttpRequestMessage, HttpResponseMessage>> _responders; + + private TestHttpMessageHandler(IEnumerable<Func<HttpRequestMessage, HttpResponseMessage>> responders) + { + _responders = new Queue<Func<HttpRequestMessage, HttpResponseMessage>>(responders); + } + + public static TestHttpMessageHandler Create(params Func<HttpRequestMessage, HttpResponseMessage>[] responders) + => new(responders); + + public void Reset(params Func<HttpRequestMessage, HttpResponseMessage>[] responders) + { + _responders.Clear(); + foreach (var responder in responders) + { + _responders.Enqueue(responder); + } + } + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (_responders.Count == 0) + { + throw new InvalidOperationException("No responder configured for MSRC connector test request."); + } + + var responder = _responders.Count > 1 ? _responders.Dequeue() : _responders.Peek(); + var response = responder(request); + response.RequestMessage = request; + return Task.FromResult(response); + } + } +} diff --git a/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj similarity index 78% rename from src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj index fc0027b7..c30a06d4 100644 --- a/src/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests/StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -7,7 +8,7 @@ <TreatWarningsAsErrors>true</TreatWarningsAsErrors> </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.MSRC.CSAF\StellaOps.Excititor.Connectors.MSRC.CSAF.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Connectors.MSRC.CSAF/StellaOps.Excititor.Connectors.MSRC.CSAF.csproj" /> </ItemGroup> <ItemGroup> <PackageReference Include="FluentAssertions" Version="6.12.0" /> @@ -15,4 +16,4 @@ <PackageReference Include="NSubstitute" Version="5.1.0" /> <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Configuration/OciOpenVexAttestationConnectorOptionsValidatorTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Configuration/OciOpenVexAttestationConnectorOptionsValidatorTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Configuration/OciOpenVexAttestationConnectorOptionsValidatorTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Configuration/OciOpenVexAttestationConnectorOptionsValidatorTests.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Connector/OciOpenVexAttestationConnectorTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Connector/OciOpenVexAttestationConnectorTests.cs similarity index 97% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Connector/OciOpenVexAttestationConnectorTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Connector/OciOpenVexAttestationConnectorTests.cs index 5445c197..a01c44a2 100644 --- a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Connector/OciOpenVexAttestationConnectorTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Connector/OciOpenVexAttestationConnectorTests.cs @@ -1,215 +1,215 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Net; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using FluentAssertions; -using Microsoft.Extensions.Caching.Memory; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest; -using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Configuration; -using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.DependencyInjection; -using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Discovery; -using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Fetch; -using StellaOps.Excititor.Core; -using System.IO.Abstractions.TestingHelpers; -using Xunit; - -namespace StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.Connector; - -public sealed class OciOpenVexAttestationConnectorTests -{ - [Fact] - public async Task FetchAsync_WithOfflineBundle_EmitsRawDocument() - { - var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData> - { - ["/bundles/attestation.json"] = new MockFileData("{\"payload\":\"\",\"payloadType\":\"application/vnd.in-toto+json\",\"signatures\":[{\"sig\":\"\"}]}"), - }); - - using var cache = new MemoryCache(new MemoryCacheOptions()); - var httpClient = new HttpClient(new StubHttpMessageHandler()) - { - BaseAddress = new System.Uri("https://registry.example.com/") - }; - - var httpFactory = new SingleClientHttpClientFactory(httpClient); - var discovery = new OciAttestationDiscoveryService(cache, fileSystem, NullLogger<OciAttestationDiscoveryService>.Instance); - var fetcher = new OciAttestationFetcher(httpFactory, fileSystem, NullLogger<OciAttestationFetcher>.Instance); - - var connector = new OciOpenVexAttestationConnector( - discovery, - fetcher, - NullLogger<OciOpenVexAttestationConnector>.Instance, - TimeProvider.System); - - var settingsValues = ImmutableDictionary<string, string>.Empty - .Add("Images:0:Reference", "registry.example.com/repo/image:latest") - .Add("Images:0:OfflineBundlePath", "/bundles/attestation.json") - .Add("Offline:PreferOffline", "true") - .Add("Offline:AllowNetworkFallback", "false") - .Add("Cosign:Mode", "None"); - - var settings = new VexConnectorSettings(settingsValues); - await connector.ValidateAsync(settings, CancellationToken.None); - - var sink = new CapturingRawSink(); - var verifier = new CapturingSignatureVerifier(); - var context = new VexConnectorContext( - Since: null, - Settings: VexConnectorSettings.Empty, - RawSink: sink, - SignatureVerifier: verifier, - Normalizers: new NoopNormalizerRouter(), - Services: new Microsoft.Extensions.DependencyInjection.ServiceCollection().BuildServiceProvider(), - ResumeTokens: ImmutableDictionary<string, string>.Empty); - - var documents = new List<VexRawDocument>(); - await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(document); - } - - documents.Should().HaveCount(1); - sink.Documents.Should().HaveCount(1); - documents[0].Format.Should().Be(VexDocumentFormat.OciAttestation); - documents[0].Metadata.Should().ContainKey("oci.attestation.sourceKind").WhoseValue.Should().Be("offline"); - documents[0].Metadata.Should().ContainKey("vex.provenance.sourceKind").WhoseValue.Should().Be("offline"); - documents[0].Metadata.Should().ContainKey("vex.provenance.registryAuthMode").WhoseValue.Should().Be("Anonymous"); - verifier.VerifyCalls.Should().Be(1); - } - - [Fact] - public async Task FetchAsync_WithSignatureMetadata_EnrichesProvenance() - { - var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData> - { - ["/bundles/attestation.json"] = new MockFileData("{\"payload\":\"\",\"payloadType\":\"application/vnd.in-toto+json\",\"signatures\":[{\"sig\":\"\"}]}"), - }); - - using var cache = new MemoryCache(new MemoryCacheOptions()); - var httpClient = new HttpClient(new StubHttpMessageHandler()) - { - BaseAddress = new System.Uri("https://registry.example.com/") - }; - - var httpFactory = new SingleClientHttpClientFactory(httpClient); - var discovery = new OciAttestationDiscoveryService(cache, fileSystem, NullLogger<OciAttestationDiscoveryService>.Instance); - var fetcher = new OciAttestationFetcher(httpFactory, fileSystem, NullLogger<OciAttestationFetcher>.Instance); - - var connector = new OciOpenVexAttestationConnector( - discovery, - fetcher, - NullLogger<OciOpenVexAttestationConnector>.Instance, - TimeProvider.System); - - var settingsValues = ImmutableDictionary<string, string>.Empty - .Add("Images:0:Reference", "registry.example.com/repo/image:latest") - .Add("Images:0:OfflineBundlePath", "/bundles/attestation.json") - .Add("Offline:PreferOffline", "true") - .Add("Offline:AllowNetworkFallback", "false") - .Add("Cosign:Mode", "Keyless") - .Add("Cosign:Keyless:Issuer", "https://issuer.example.com") - .Add("Cosign:Keyless:Subject", "subject@example.com"); - - var settings = new VexConnectorSettings(settingsValues); - await connector.ValidateAsync(settings, CancellationToken.None); - - var sink = new CapturingRawSink(); - var verifier = new CapturingSignatureVerifier - { - Result = new VexSignatureMetadata( - type: "cosign", - subject: "sig-subject", - issuer: "sig-issuer", - keyId: "key-id", - verifiedAt: DateTimeOffset.UtcNow, - transparencyLogReference: "rekor://entry/123") - }; - - var context = new VexConnectorContext( - Since: null, - Settings: VexConnectorSettings.Empty, - RawSink: sink, - SignatureVerifier: verifier, - Normalizers: new NoopNormalizerRouter(), - Services: new ServiceCollection().BuildServiceProvider(), - ResumeTokens: ImmutableDictionary<string, string>.Empty); - - var documents = new List<VexRawDocument>(); - await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(document); - } - - documents.Should().HaveCount(1); - var metadata = documents[0].Metadata; - metadata.Should().Contain("vex.signature.type", "cosign"); - metadata.Should().Contain("vex.signature.subject", "sig-subject"); - metadata.Should().Contain("vex.signature.issuer", "sig-issuer"); - metadata.Should().Contain("vex.signature.keyId", "key-id"); - metadata.Should().ContainKey("vex.signature.verifiedAt"); - metadata.Should().Contain("vex.signature.transparencyLogReference", "rekor://entry/123"); - metadata.Should().Contain("vex.provenance.cosign.mode", "Keyless"); - metadata.Should().Contain("vex.provenance.cosign.issuer", "https://issuer.example.com"); - metadata.Should().Contain("vex.provenance.cosign.subject", "subject@example.com"); - verifier.VerifyCalls.Should().Be(1); - } - - private sealed class CapturingRawSink : IVexRawDocumentSink - { - public List<VexRawDocument> Documents { get; } = new(); - - public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) - { - Documents.Add(document); - return ValueTask.CompletedTask; - } - } - - private sealed class CapturingSignatureVerifier : IVexSignatureVerifier - { - public int VerifyCalls { get; private set; } - - public VexSignatureMetadata? Result { get; set; } - - public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - { - VerifyCalls++; - return ValueTask.FromResult(Result); - } - } - - private sealed class NoopNormalizerRouter : IVexNormalizerRouter - { - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); - } - - private sealed class SingleClientHttpClientFactory : IHttpClientFactory - { - private readonly HttpClient _client; - - public SingleClientHttpClientFactory(HttpClient client) - { - _client = client; - } - - public HttpClient CreateClient(string name) => _client; - } - - private sealed class StubHttpMessageHandler : HttpMessageHandler - { - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound) - { - RequestMessage = request - }); - } - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Net; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest; +using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Configuration; +using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.DependencyInjection; +using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Discovery; +using StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Fetch; +using StellaOps.Excititor.Core; +using System.IO.Abstractions.TestingHelpers; +using Xunit; + +namespace StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.Connector; + +public sealed class OciOpenVexAttestationConnectorTests +{ + [Fact] + public async Task FetchAsync_WithOfflineBundle_EmitsRawDocument() + { + var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData> + { + ["/bundles/attestation.json"] = new MockFileData("{\"payload\":\"\",\"payloadType\":\"application/vnd.in-toto+json\",\"signatures\":[{\"sig\":\"\"}]}"), + }); + + using var cache = new MemoryCache(new MemoryCacheOptions()); + var httpClient = new HttpClient(new StubHttpMessageHandler()) + { + BaseAddress = new System.Uri("https://registry.example.com/") + }; + + var httpFactory = new SingleClientHttpClientFactory(httpClient); + var discovery = new OciAttestationDiscoveryService(cache, fileSystem, NullLogger<OciAttestationDiscoveryService>.Instance); + var fetcher = new OciAttestationFetcher(httpFactory, fileSystem, NullLogger<OciAttestationFetcher>.Instance); + + var connector = new OciOpenVexAttestationConnector( + discovery, + fetcher, + NullLogger<OciOpenVexAttestationConnector>.Instance, + TimeProvider.System); + + var settingsValues = ImmutableDictionary<string, string>.Empty + .Add("Images:0:Reference", "registry.example.com/repo/image:latest") + .Add("Images:0:OfflineBundlePath", "/bundles/attestation.json") + .Add("Offline:PreferOffline", "true") + .Add("Offline:AllowNetworkFallback", "false") + .Add("Cosign:Mode", "None"); + + var settings = new VexConnectorSettings(settingsValues); + await connector.ValidateAsync(settings, CancellationToken.None); + + var sink = new CapturingRawSink(); + var verifier = new CapturingSignatureVerifier(); + var context = new VexConnectorContext( + Since: null, + Settings: VexConnectorSettings.Empty, + RawSink: sink, + SignatureVerifier: verifier, + Normalizers: new NoopNormalizerRouter(), + Services: new Microsoft.Extensions.DependencyInjection.ServiceCollection().BuildServiceProvider(), + ResumeTokens: ImmutableDictionary<string, string>.Empty); + + var documents = new List<VexRawDocument>(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().HaveCount(1); + sink.Documents.Should().HaveCount(1); + documents[0].Format.Should().Be(VexDocumentFormat.OciAttestation); + documents[0].Metadata.Should().ContainKey("oci.attestation.sourceKind").WhoseValue.Should().Be("offline"); + documents[0].Metadata.Should().ContainKey("vex.provenance.sourceKind").WhoseValue.Should().Be("offline"); + documents[0].Metadata.Should().ContainKey("vex.provenance.registryAuthMode").WhoseValue.Should().Be("Anonymous"); + verifier.VerifyCalls.Should().Be(1); + } + + [Fact] + public async Task FetchAsync_WithSignatureMetadata_EnrichesProvenance() + { + var fileSystem = new MockFileSystem(new Dictionary<string, MockFileData> + { + ["/bundles/attestation.json"] = new MockFileData("{\"payload\":\"\",\"payloadType\":\"application/vnd.in-toto+json\",\"signatures\":[{\"sig\":\"\"}]}"), + }); + + using var cache = new MemoryCache(new MemoryCacheOptions()); + var httpClient = new HttpClient(new StubHttpMessageHandler()) + { + BaseAddress = new System.Uri("https://registry.example.com/") + }; + + var httpFactory = new SingleClientHttpClientFactory(httpClient); + var discovery = new OciAttestationDiscoveryService(cache, fileSystem, NullLogger<OciAttestationDiscoveryService>.Instance); + var fetcher = new OciAttestationFetcher(httpFactory, fileSystem, NullLogger<OciAttestationFetcher>.Instance); + + var connector = new OciOpenVexAttestationConnector( + discovery, + fetcher, + NullLogger<OciOpenVexAttestationConnector>.Instance, + TimeProvider.System); + + var settingsValues = ImmutableDictionary<string, string>.Empty + .Add("Images:0:Reference", "registry.example.com/repo/image:latest") + .Add("Images:0:OfflineBundlePath", "/bundles/attestation.json") + .Add("Offline:PreferOffline", "true") + .Add("Offline:AllowNetworkFallback", "false") + .Add("Cosign:Mode", "Keyless") + .Add("Cosign:Keyless:Issuer", "https://issuer.example.com") + .Add("Cosign:Keyless:Subject", "subject@example.com"); + + var settings = new VexConnectorSettings(settingsValues); + await connector.ValidateAsync(settings, CancellationToken.None); + + var sink = new CapturingRawSink(); + var verifier = new CapturingSignatureVerifier + { + Result = new VexSignatureMetadata( + type: "cosign", + subject: "sig-subject", + issuer: "sig-issuer", + keyId: "key-id", + verifiedAt: DateTimeOffset.UtcNow, + transparencyLogReference: "rekor://entry/123") + }; + + var context = new VexConnectorContext( + Since: null, + Settings: VexConnectorSettings.Empty, + RawSink: sink, + SignatureVerifier: verifier, + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider(), + ResumeTokens: ImmutableDictionary<string, string>.Empty); + + var documents = new List<VexRawDocument>(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().HaveCount(1); + var metadata = documents[0].Metadata; + metadata.Should().Contain("vex.signature.type", "cosign"); + metadata.Should().Contain("vex.signature.subject", "sig-subject"); + metadata.Should().Contain("vex.signature.issuer", "sig-issuer"); + metadata.Should().Contain("vex.signature.keyId", "key-id"); + metadata.Should().ContainKey("vex.signature.verifiedAt"); + metadata.Should().Contain("vex.signature.transparencyLogReference", "rekor://entry/123"); + metadata.Should().Contain("vex.provenance.cosign.mode", "Keyless"); + metadata.Should().Contain("vex.provenance.cosign.issuer", "https://issuer.example.com"); + metadata.Should().Contain("vex.provenance.cosign.subject", "subject@example.com"); + verifier.VerifyCalls.Should().Be(1); + } + + private sealed class CapturingRawSink : IVexRawDocumentSink + { + public List<VexRawDocument> Documents { get; } = new(); + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Documents.Add(document); + return ValueTask.CompletedTask; + } + } + + private sealed class CapturingSignatureVerifier : IVexSignatureVerifier + { + public int VerifyCalls { get; private set; } + + public VexSignatureMetadata? Result { get; set; } + + public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + { + VerifyCalls++; + return ValueTask.FromResult(Result); + } + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); + } + + private sealed class SingleClientHttpClientFactory : IHttpClientFactory + { + private readonly HttpClient _client; + + public SingleClientHttpClientFactory(HttpClient client) + { + _client = client; + } + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class StubHttpMessageHandler : HttpMessageHandler + { + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound) + { + RequestMessage = request + }); + } + } +} diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Discovery/OciAttestationDiscoveryServiceTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Discovery/OciAttestationDiscoveryServiceTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Discovery/OciAttestationDiscoveryServiceTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/Discovery/OciAttestationDiscoveryServiceTests.cs diff --git a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.csproj similarity index 74% rename from src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.csproj index 9ca8293a..b896e065 100644 --- a/src/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -7,11 +8,11 @@ <TreatWarningsAsErrors>true</TreatWarningsAsErrors> </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest\StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest/StellaOps.Excititor.Connectors.OCI.OpenVEX.Attest.csproj" /> </ItemGroup> <ItemGroup> <PackageReference Include="FluentAssertions" Version="6.12.0" /> <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs similarity index 97% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs index 1e7dceba..a936f08e 100644 --- a/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Connectors/OracleCsafConnectorTests.cs @@ -1,314 +1,314 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Net; -using System.Net.Http; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using FluentAssertions; -using Microsoft.Extensions.Caching.Memory; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Connectors.Oracle.CSAF; -using StellaOps.Excititor.Connectors.Oracle.CSAF.Configuration; -using StellaOps.Excititor.Connectors.Oracle.CSAF.Metadata; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Storage.Mongo; -using System.IO.Abstractions.TestingHelpers; -using Xunit; -using MongoDB.Driver; - -namespace StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.Connectors; - -public sealed class OracleCsafConnectorTests -{ - [Fact] - public async Task FetchAsync_NewEntry_PersistsDocumentAndUpdatesState() - { - var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json"); - var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"); - var payloadDigest = ComputeDigest(payload); - var snapshotPath = "/snapshots/oracle-catalog.json"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, payloadDigest, "2025-10-15T00:00:00Z"))); - - var handler = new StubHttpMessageHandler(new Dictionary<Uri, HttpResponseMessage> - { - [documentUri] = CreateResponse(payload), - }); - var httpClient = new HttpClient(handler); - var httpFactory = new SingleHttpClientFactory(httpClient); - var loader = new OracleCatalogLoader( - httpFactory, - new MemoryCache(new MemoryCacheOptions()), - fileSystem, - NullLogger<OracleCatalogLoader>.Instance, - TimeProvider.System); - - var stateRepository = new InMemoryConnectorStateRepository(); - var connector = new OracleCsafConnector( - loader, - httpFactory, - stateRepository, - new[] { new OracleConnectorOptionsValidator(fileSystem) }, - NullLogger<OracleCsafConnector>.Instance, - TimeProvider.System); - - var settingsValues = ImmutableDictionary<string, string>.Empty - .Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true") - .Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath) - .Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false"); - var settings = new VexConnectorSettings(settingsValues); - - await connector.ValidateAsync(settings, CancellationToken.None); - - var sink = new InMemoryRawSink(); - var context = new VexConnectorContext( - Since: null, - Settings: settings, - RawSink: sink, - SignatureVerifier: new NoopSignatureVerifier(), - Normalizers: new NoopNormalizerRouter(), - Services: new ServiceCollection().BuildServiceProvider(), - ResumeTokens: ImmutableDictionary<string, string>.Empty); - - var documents = new List<VexRawDocument>(); - await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(document); - } - - documents.Should().HaveCount(1); - sink.Documents.Should().HaveCount(1); - documents[0].Digest.Should().Be(payloadDigest); - documents[0].Metadata["oracle.csaf.entryId"].Should().Be("CPU2025Oct"); - documents[0].Metadata["oracle.csaf.sha256"].Should().Be(payloadDigest); - - stateRepository.State.Should().NotBeNull(); - stateRepository.State!.DocumentDigests.Should().ContainSingle().Which.Should().Be(payloadDigest); - - handler.GetCallCount(documentUri).Should().Be(1); - - // second run should short-circuit without downloading again - sink.Documents.Clear(); - documents.Clear(); - - await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(document); - } - - documents.Should().BeEmpty(); - sink.Documents.Should().BeEmpty(); - handler.GetCallCount(documentUri).Should().Be(1); - } - - [Fact] - public async Task FetchAsync_ChecksumMismatch_SkipsDocument() - { - var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json"); - var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"); - var snapshotPath = "/snapshots/oracle-catalog.json"; - var fileSystem = new MockFileSystem(); - fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, "deadbeef", "2025-10-15T00:00:00Z"))); - - var handler = new StubHttpMessageHandler(new Dictionary<Uri, HttpResponseMessage> - { - [documentUri] = CreateResponse(payload), - }); - var httpClient = new HttpClient(handler); - var httpFactory = new SingleHttpClientFactory(httpClient); - var loader = new OracleCatalogLoader( - httpFactory, - new MemoryCache(new MemoryCacheOptions()), - fileSystem, - NullLogger<OracleCatalogLoader>.Instance, - TimeProvider.System); - - var stateRepository = new InMemoryConnectorStateRepository(); - var connector = new OracleCsafConnector( - loader, - httpFactory, - stateRepository, - new[] { new OracleConnectorOptionsValidator(fileSystem) }, - NullLogger<OracleCsafConnector>.Instance, - TimeProvider.System); - - var settingsValues = ImmutableDictionary<string, string>.Empty - .Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true") - .Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath) - .Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false"); - var settings = new VexConnectorSettings(settingsValues); - - await connector.ValidateAsync(settings, CancellationToken.None); - - var sink = new InMemoryRawSink(); - var context = new VexConnectorContext( - Since: null, - Settings: settings, - RawSink: sink, - SignatureVerifier: new NoopSignatureVerifier(), - Normalizers: new NoopNormalizerRouter(), - Services: new ServiceCollection().BuildServiceProvider(), - ResumeTokens: ImmutableDictionary<string, string>.Empty); - - var documents = new List<VexRawDocument>(); - await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(document); - } - - documents.Should().BeEmpty(); - sink.Documents.Should().BeEmpty(); - stateRepository.State.Should().BeNull(); - handler.GetCallCount(documentUri).Should().Be(1); - } - - private static HttpResponseMessage CreateResponse(byte[] payload) - => new(HttpStatusCode.OK) - { - Content = new ByteArrayContent(payload) - { - Headers = - { - ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"), - } - } - }; - - private static string ComputeDigest(byte[] payload) - { - Span<byte> buffer = stackalloc byte[32]; - SHA256.HashData(payload, buffer); - return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); - } - - private static string BuildOfflineSnapshot(Uri documentUri, string sha256, string publishedAt) - { - var snapshot = new - { - metadata = new - { - generatedAt = "2025-10-14T12:00:00Z", - entries = new[] - { - new - { - id = "CPU2025Oct", - title = "Oracle Critical Patch Update Advisory - October 2025", - documentUri = documentUri.ToString(), - publishedAt, - revision = publishedAt, - sha256, - size = 1024, - products = new[] { "Oracle Database" } - } - }, - cpuSchedule = Array.Empty<object>() - }, - fetchedAt = "2025-10-14T12:00:00Z" - }; - - return JsonSerializer.Serialize(snapshot, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - } - - private sealed class StubHttpMessageHandler : HttpMessageHandler - { - private readonly Dictionary<Uri, HttpResponseMessage> _responses; - private readonly Dictionary<Uri, int> _callCounts = new(); - - public StubHttpMessageHandler(Dictionary<Uri, HttpResponseMessage> responses) - { - _responses = responses; - } - - public int GetCallCount(Uri uri) => _callCounts.TryGetValue(uri, out var count) ? count : 0; - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - if (request.RequestUri is null || !_responses.TryGetValue(request.RequestUri, out var response)) - { - return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound)); - } - - _callCounts.TryGetValue(request.RequestUri, out var count); - _callCounts[request.RequestUri] = count + 1; - return Task.FromResult(response.Clone()); - } - } - - private sealed class SingleHttpClientFactory : IHttpClientFactory - { - private readonly HttpClient _client; - - public SingleHttpClientFactory(HttpClient client) - { - _client = client; - } - - public HttpClient CreateClient(string name) => _client; - } - - private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository - { - public VexConnectorState? State { get; private set; } - - public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(State); - - public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - State = state; - return ValueTask.CompletedTask; - } - } - - private sealed class InMemoryRawSink : IVexRawDocumentSink - { - public List<VexRawDocument> Documents { get; } = new(); - - public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) - { - Documents.Add(document); - return ValueTask.CompletedTask; - } - } - - private sealed class NoopSignatureVerifier : IVexSignatureVerifier - { - public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult<VexSignatureMetadata?>(null); - } - - private sealed class NoopNormalizerRouter : IVexNormalizerRouter - { - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); - } -} - -internal static class HttpResponseMessageExtensions -{ - public static HttpResponseMessage Clone(this HttpResponseMessage response) - { - var clone = new HttpResponseMessage(response.StatusCode); - foreach (var header in response.Headers) - { - clone.Headers.TryAddWithoutValidation(header.Key, header.Value); - } - - if (response.Content is not null) - { - var payload = response.Content.ReadAsByteArrayAsync().GetAwaiter().GetResult(); - var mediaType = response.Content.Headers.ContentType?.MediaType ?? "application/json"; - clone.Content = new ByteArrayContent(payload); - clone.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(mediaType); - } - - return clone; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Net; +using System.Net.Http; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.Oracle.CSAF; +using StellaOps.Excititor.Connectors.Oracle.CSAF.Configuration; +using StellaOps.Excititor.Connectors.Oracle.CSAF.Metadata; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; +using System.IO.Abstractions.TestingHelpers; +using Xunit; +using MongoDB.Driver; + +namespace StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.Connectors; + +public sealed class OracleCsafConnectorTests +{ + [Fact] + public async Task FetchAsync_NewEntry_PersistsDocumentAndUpdatesState() + { + var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json"); + var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"); + var payloadDigest = ComputeDigest(payload); + var snapshotPath = "/snapshots/oracle-catalog.json"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, payloadDigest, "2025-10-15T00:00:00Z"))); + + var handler = new StubHttpMessageHandler(new Dictionary<Uri, HttpResponseMessage> + { + [documentUri] = CreateResponse(payload), + }); + var httpClient = new HttpClient(handler); + var httpFactory = new SingleHttpClientFactory(httpClient); + var loader = new OracleCatalogLoader( + httpFactory, + new MemoryCache(new MemoryCacheOptions()), + fileSystem, + NullLogger<OracleCatalogLoader>.Instance, + TimeProvider.System); + + var stateRepository = new InMemoryConnectorStateRepository(); + var connector = new OracleCsafConnector( + loader, + httpFactory, + stateRepository, + new[] { new OracleConnectorOptionsValidator(fileSystem) }, + NullLogger<OracleCsafConnector>.Instance, + TimeProvider.System); + + var settingsValues = ImmutableDictionary<string, string>.Empty + .Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true") + .Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath) + .Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false"); + var settings = new VexConnectorSettings(settingsValues); + + await connector.ValidateAsync(settings, CancellationToken.None); + + var sink = new InMemoryRawSink(); + var context = new VexConnectorContext( + Since: null, + Settings: settings, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider(), + ResumeTokens: ImmutableDictionary<string, string>.Empty); + + var documents = new List<VexRawDocument>(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().HaveCount(1); + sink.Documents.Should().HaveCount(1); + documents[0].Digest.Should().Be(payloadDigest); + documents[0].Metadata["oracle.csaf.entryId"].Should().Be("CPU2025Oct"); + documents[0].Metadata["oracle.csaf.sha256"].Should().Be(payloadDigest); + + stateRepository.State.Should().NotBeNull(); + stateRepository.State!.DocumentDigests.Should().ContainSingle().Which.Should().Be(payloadDigest); + + handler.GetCallCount(documentUri).Should().Be(1); + + // second run should short-circuit without downloading again + sink.Documents.Clear(); + documents.Clear(); + + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + handler.GetCallCount(documentUri).Should().Be(1); + } + + [Fact] + public async Task FetchAsync_ChecksumMismatch_SkipsDocument() + { + var documentUri = new Uri("https://oracle.example/security/csaf/cpu2025oct.json"); + var payload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"); + var snapshotPath = "/snapshots/oracle-catalog.json"; + var fileSystem = new MockFileSystem(); + fileSystem.AddFile(snapshotPath, new MockFileData(BuildOfflineSnapshot(documentUri, "deadbeef", "2025-10-15T00:00:00Z"))); + + var handler = new StubHttpMessageHandler(new Dictionary<Uri, HttpResponseMessage> + { + [documentUri] = CreateResponse(payload), + }); + var httpClient = new HttpClient(handler); + var httpFactory = new SingleHttpClientFactory(httpClient); + var loader = new OracleCatalogLoader( + httpFactory, + new MemoryCache(new MemoryCacheOptions()), + fileSystem, + NullLogger<OracleCatalogLoader>.Instance, + TimeProvider.System); + + var stateRepository = new InMemoryConnectorStateRepository(); + var connector = new OracleCsafConnector( + loader, + httpFactory, + stateRepository, + new[] { new OracleConnectorOptionsValidator(fileSystem) }, + NullLogger<OracleCsafConnector>.Instance, + TimeProvider.System); + + var settingsValues = ImmutableDictionary<string, string>.Empty + .Add(nameof(OracleConnectorOptions.PreferOfflineSnapshot), "true") + .Add(nameof(OracleConnectorOptions.OfflineSnapshotPath), snapshotPath) + .Add(nameof(OracleConnectorOptions.PersistOfflineSnapshot), "false"); + var settings = new VexConnectorSettings(settingsValues); + + await connector.ValidateAsync(settings, CancellationToken.None); + + var sink = new InMemoryRawSink(); + var context = new VexConnectorContext( + Since: null, + Settings: settings, + RawSink: sink, + SignatureVerifier: new NoopSignatureVerifier(), + Normalizers: new NoopNormalizerRouter(), + Services: new ServiceCollection().BuildServiceProvider(), + ResumeTokens: ImmutableDictionary<string, string>.Empty); + + var documents = new List<VexRawDocument>(); + await foreach (var document in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(document); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + stateRepository.State.Should().BeNull(); + handler.GetCallCount(documentUri).Should().Be(1); + } + + private static HttpResponseMessage CreateResponse(byte[] payload) + => new(HttpStatusCode.OK) + { + Content = new ByteArrayContent(payload) + { + Headers = + { + ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue("application/json"), + } + } + }; + + private static string ComputeDigest(byte[] payload) + { + Span<byte> buffer = stackalloc byte[32]; + SHA256.HashData(payload, buffer); + return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); + } + + private static string BuildOfflineSnapshot(Uri documentUri, string sha256, string publishedAt) + { + var snapshot = new + { + metadata = new + { + generatedAt = "2025-10-14T12:00:00Z", + entries = new[] + { + new + { + id = "CPU2025Oct", + title = "Oracle Critical Patch Update Advisory - October 2025", + documentUri = documentUri.ToString(), + publishedAt, + revision = publishedAt, + sha256, + size = 1024, + products = new[] { "Oracle Database" } + } + }, + cpuSchedule = Array.Empty<object>() + }, + fetchedAt = "2025-10-14T12:00:00Z" + }; + + return JsonSerializer.Serialize(snapshot, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + } + + private sealed class StubHttpMessageHandler : HttpMessageHandler + { + private readonly Dictionary<Uri, HttpResponseMessage> _responses; + private readonly Dictionary<Uri, int> _callCounts = new(); + + public StubHttpMessageHandler(Dictionary<Uri, HttpResponseMessage> responses) + { + _responses = responses; + } + + public int GetCallCount(Uri uri) => _callCounts.TryGetValue(uri, out var count) ? count : 0; + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (request.RequestUri is null || !_responses.TryGetValue(request.RequestUri, out var response)) + { + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound)); + } + + _callCounts.TryGetValue(request.RequestUri, out var count); + _callCounts[request.RequestUri] = count + 1; + return Task.FromResult(response.Clone()); + } + } + + private sealed class SingleHttpClientFactory : IHttpClientFactory + { + private readonly HttpClient _client; + + public SingleHttpClientFactory(HttpClient client) + { + _client = client; + } + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository + { + public VexConnectorState? State { get; private set; } + + public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(State); + + public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + State = state; + return ValueTask.CompletedTask; + } + } + + private sealed class InMemoryRawSink : IVexRawDocumentSink + { + public List<VexRawDocument> Documents { get; } = new(); + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Documents.Add(document); + return ValueTask.CompletedTask; + } + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult<VexSignatureMetadata?>(null); + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); + } +} + +internal static class HttpResponseMessageExtensions +{ + public static HttpResponseMessage Clone(this HttpResponseMessage response) + { + var clone = new HttpResponseMessage(response.StatusCode); + foreach (var header in response.Headers) + { + clone.Headers.TryAddWithoutValidation(header.Key, header.Value); + } + + if (response.Content is not null) + { + var payload = response.Content.ReadAsByteArrayAsync().GetAwaiter().GetResult(); + var mediaType = response.Content.Headers.ContentType?.MediaType ?? "application/json"; + clone.Content = new ByteArrayContent(payload); + clone.Content.Headers.ContentType = new System.Net.Http.Headers.MediaTypeHeaderValue(mediaType); + } + + return clone; + } +} diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Metadata/OracleCatalogLoaderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Metadata/OracleCatalogLoaderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Metadata/OracleCatalogLoaderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/Metadata/OracleCatalogLoaderTests.cs diff --git a/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj similarity index 75% rename from src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj index 528090b4..d1ea21b2 100644 --- a/src/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests/StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -7,11 +8,11 @@ <TreatWarningsAsErrors>true</TreatWarningsAsErrors> </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Oracle.CSAF\StellaOps.Excititor.Connectors.Oracle.CSAF.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Connectors.Oracle.CSAF/StellaOps.Excititor.Connectors.Oracle.CSAF.csproj" /> </ItemGroup> <ItemGroup> <PackageReference Include="FluentAssertions" Version="6.12.0" /> <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Connectors/RedHatCsafConnectorTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Connectors/RedHatCsafConnectorTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Connectors/RedHatCsafConnectorTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Connectors/RedHatCsafConnectorTests.cs diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Metadata/RedHatProviderMetadataLoaderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Metadata/RedHatProviderMetadataLoaderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Metadata/RedHatProviderMetadataLoaderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/Metadata/RedHatProviderMetadataLoaderTests.cs diff --git a/src/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj similarity index 61% rename from src/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj index 4fa46301..b9292d70 100644 --- a/src/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests/StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj @@ -1,17 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.RedHat.CSAF\StellaOps.Excititor.Connectors.RedHat.CSAF.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Connectors.RedHat.CSAF/StellaOps.Excititor.Connectors.RedHat.CSAF.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Authentication/RancherHubTokenProviderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Authentication/RancherHubTokenProviderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Authentication/RancherHubTokenProviderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Authentication/RancherHubTokenProviderTests.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Metadata/RancherHubMetadataLoaderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Metadata/RancherHubMetadataLoaderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Metadata/RancherHubMetadataLoaderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/Metadata/RancherHubMetadataLoaderTests.cs diff --git a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj similarity index 60% rename from src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj index 428b8ce0..0c61596e 100644 --- a/src/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj @@ -1,17 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub/StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs similarity index 97% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs index 47b10d49..33bec59f 100644 --- a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Connectors/UbuntuCsafConnectorTests.cs @@ -1,310 +1,310 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Net; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Security.Cryptography; -using System.Text; -using System.Threading; -using FluentAssertions; -using Microsoft.Extensions.Caching.Memory; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Connectors.Ubuntu.CSAF; -using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Configuration; -using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Metadata; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Storage.Mongo; -using System.IO.Abstractions.TestingHelpers; -using Xunit; -using MongoDB.Driver; - -namespace StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.Connectors; - -public sealed class UbuntuCsafConnectorTests -{ - [Fact] - public async Task FetchAsync_IngestsNewDocument_UpdatesStateAndUsesEtag() - { - var baseUri = new Uri("https://ubuntu.test/security/csaf/"); - var indexUri = new Uri(baseUri, "index.json"); - var catalogUri = new Uri(baseUri, "stable/catalog.json"); - var advisoryUri = new Uri(baseUri, "stable/USN-2025-0001.json"); - - var manifest = CreateTestManifest(advisoryUri, "USN-2025-0001", "2025-10-18T00:00:00Z"); - var documentPayload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"); - var documentSha = ComputeSha256(documentPayload); - - var indexJson = manifest.IndexJson; - var catalogJson = manifest.CatalogJson.Replace("{{SHA256}}", documentSha, StringComparison.Ordinal); - var handler = new UbuntuTestHttpHandler(indexUri, indexJson, catalogUri, catalogJson, advisoryUri, documentPayload, expectedEtag: "etag-123"); - - var httpClient = new HttpClient(handler); - var httpFactory = new SingleClientFactory(httpClient); - var cache = new MemoryCache(new MemoryCacheOptions()); - var fileSystem = new MockFileSystem(); - var loader = new UbuntuCatalogLoader(httpFactory, cache, fileSystem, NullLogger<UbuntuCatalogLoader>.Instance, TimeProvider.System); - - var optionsValidator = new UbuntuConnectorOptionsValidator(fileSystem); - var stateRepository = new InMemoryConnectorStateRepository(); - var connector = new UbuntuCsafConnector( - loader, - httpFactory, - stateRepository, - new[] { optionsValidator }, - NullLogger<UbuntuCsafConnector>.Instance, - TimeProvider.System); - - var settings = new VexConnectorSettings(ImmutableDictionary<string, string>.Empty); - await connector.ValidateAsync(settings, CancellationToken.None); - - var sink = new InMemoryRawSink(); - var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider(), ImmutableDictionary<string, string>.Empty); - - var documents = new List<VexRawDocument>(); - await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(doc); - } - - documents.Should().HaveCount(1); - sink.Documents.Should().HaveCount(1); - var stored = sink.Documents.Single(); - stored.Digest.Should().Be($"sha256:{documentSha}"); - stored.Metadata.TryGetValue("ubuntu.etag", out var storedEtag).Should().BeTrue(); - storedEtag.Should().Be("etag-123"); - - stateRepository.CurrentState.Should().NotBeNull(); - stateRepository.CurrentState!.DocumentDigests.Should().Contain($"sha256:{documentSha}"); - stateRepository.CurrentState.DocumentDigests.Should().Contain($"etag:{advisoryUri}|etag-123"); - stateRepository.CurrentState.LastUpdated.Should().Be(DateTimeOffset.Parse("2025-10-18T00:00:00Z")); - - handler.DocumentRequestCount.Should().Be(1); - - // Second run: Expect connector to send If-None-Match and skip download via 304. - sink.Documents.Clear(); - documents.Clear(); - - await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(doc); - } - - documents.Should().BeEmpty(); - sink.Documents.Should().BeEmpty(); - handler.DocumentRequestCount.Should().Be(2); - handler.SeenIfNoneMatch.Should().Contain("\"etag-123\""); - } - - [Fact] - public async Task FetchAsync_SkipsWhenChecksumMismatch() - { - var baseUri = new Uri("https://ubuntu.test/security/csaf/"); - var indexUri = new Uri(baseUri, "index.json"); - var catalogUri = new Uri(baseUri, "stable/catalog.json"); - var advisoryUri = new Uri(baseUri, "stable/USN-2025-0002.json"); - - var manifest = CreateTestManifest(advisoryUri, "USN-2025-0002", "2025-10-18T00:00:00Z"); - var indexJson = manifest.IndexJson; - var catalogJson = manifest.CatalogJson.Replace("{{SHA256}}", new string('a', 64), StringComparison.Ordinal); - var handler = new UbuntuTestHttpHandler(indexUri, indexJson, catalogUri, catalogJson, advisoryUri, Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"), expectedEtag: "etag-999"); - - var httpClient = new HttpClient(handler); - var httpFactory = new SingleClientFactory(httpClient); - var cache = new MemoryCache(new MemoryCacheOptions()); - var fileSystem = new MockFileSystem(); - var loader = new UbuntuCatalogLoader(httpFactory, cache, fileSystem, NullLogger<UbuntuCatalogLoader>.Instance, TimeProvider.System); - var optionsValidator = new UbuntuConnectorOptionsValidator(fileSystem); - var stateRepository = new InMemoryConnectorStateRepository(); - - var connector = new UbuntuCsafConnector( - loader, - httpFactory, - stateRepository, - new[] { optionsValidator }, - NullLogger<UbuntuCsafConnector>.Instance, - TimeProvider.System); - - await connector.ValidateAsync(new VexConnectorSettings(ImmutableDictionary<string, string>.Empty), CancellationToken.None); - - var sink = new InMemoryRawSink(); - var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider(), ImmutableDictionary<string, string>.Empty); - - var documents = new List<VexRawDocument>(); - await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) - { - documents.Add(doc); - } - - documents.Should().BeEmpty(); - sink.Documents.Should().BeEmpty(); - stateRepository.CurrentState.Should().NotBeNull(); - stateRepository.CurrentState!.DocumentDigests.Should().BeEmpty(); - handler.DocumentRequestCount.Should().Be(1); - } - - private static (string IndexJson, string CatalogJson) CreateTestManifest(Uri advisoryUri, string advisoryId, string timestamp) - { - var indexJson = """ - { - "generated": "2025-10-18T00:00:00Z", - "channels": [ - { - "name": "stable", - "catalogUrl": "{{advisoryUri.GetLeftPart(UriPartial.Authority)}}/security/csaf/stable/catalog.json", - "sha256": "ignore" - } - ] - } - """; - - var catalogJson = """ - { - "resources": [ - { - "id": "{{advisoryId}}", - "type": "csaf", - "url": "{{advisoryUri}}", - "last_modified": "{{timestamp}}", - "hashes": { - "sha256": "{{SHA256}}" - }, - "etag": "\"etag-123\"", - "title": "{{advisoryId}}" - } - ] - } - """; - - return (indexJson, catalogJson); - } - - private static string ComputeSha256(ReadOnlySpan<byte> payload) - { - Span<byte> buffer = stackalloc byte[32]; - SHA256.HashData(payload, buffer); - return Convert.ToHexString(buffer).ToLowerInvariant(); - } - - private sealed class SingleClientFactory : IHttpClientFactory - { - private readonly HttpClient _client; - - public SingleClientFactory(HttpClient client) - { - _client = client; - } - - public HttpClient CreateClient(string name) => _client; - } - - private sealed class UbuntuTestHttpHandler : HttpMessageHandler - { - private readonly Uri _indexUri; - private readonly string _indexPayload; - private readonly Uri _catalogUri; - private readonly string _catalogPayload; - private readonly Uri _documentUri; - private readonly byte[] _documentPayload; - private readonly string _expectedEtag; - - public int DocumentRequestCount { get; private set; } - public List<string> SeenIfNoneMatch { get; } = new(); - - public UbuntuTestHttpHandler(Uri indexUri, string indexPayload, Uri catalogUri, string catalogPayload, Uri documentUri, byte[] documentPayload, string expectedEtag) - { - _indexUri = indexUri; - _indexPayload = indexPayload; - _catalogUri = catalogUri; - _catalogPayload = catalogPayload; - _documentUri = documentUri; - _documentPayload = documentPayload; - _expectedEtag = expectedEtag; - } - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - if (request.RequestUri == _indexUri) - { - return Task.FromResult(CreateJsonResponse(_indexPayload)); - } - - if (request.RequestUri == _catalogUri) - { - return Task.FromResult(CreateJsonResponse(_catalogPayload)); - } - - if (request.RequestUri == _documentUri) - { - DocumentRequestCount++; - if (request.Headers.IfNoneMatch is { Count: > 0 }) - { - var header = request.Headers.IfNoneMatch.First().ToString(); - SeenIfNoneMatch.Add(header); - if (header.Trim('"') == _expectedEtag || header == $"\"{_expectedEtag}\"") - { - return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotModified)); - } - } - - var response = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new ByteArrayContent(_documentPayload), - }; - response.Headers.ETag = new EntityTagHeaderValue($"\"{_expectedEtag}\""); - response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); - return Task.FromResult(response); - } - - return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound) - { - Content = new StringContent($"No response configured for {request.RequestUri}"), - }); - } - - private static HttpResponseMessage CreateJsonResponse(string payload) - => new(HttpStatusCode.OK) - { - Content = new StringContent(payload, Encoding.UTF8, "application/json"), - }; - } - - private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository - { - public VexConnectorState? CurrentState { get; private set; } - - public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(CurrentState); - - public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - CurrentState = state; - return ValueTask.CompletedTask; - } - } - - private sealed class InMemoryRawSink : IVexRawDocumentSink - { - public List<VexRawDocument> Documents { get; } = new(); - - public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) - { - Documents.Add(document); - return ValueTask.CompletedTask; - } - } - - private sealed class NoopSignatureVerifier : IVexSignatureVerifier - { - public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult<VexSignatureMetadata?>(null); - } - - private sealed class NoopNormalizerRouter : IVexNormalizerRouter - { - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using FluentAssertions; +using Microsoft.Extensions.Caching.Memory; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Connectors.Ubuntu.CSAF; +using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Configuration; +using StellaOps.Excititor.Connectors.Ubuntu.CSAF.Metadata; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Storage.Mongo; +using System.IO.Abstractions.TestingHelpers; +using Xunit; +using MongoDB.Driver; + +namespace StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.Connectors; + +public sealed class UbuntuCsafConnectorTests +{ + [Fact] + public async Task FetchAsync_IngestsNewDocument_UpdatesStateAndUsesEtag() + { + var baseUri = new Uri("https://ubuntu.test/security/csaf/"); + var indexUri = new Uri(baseUri, "index.json"); + var catalogUri = new Uri(baseUri, "stable/catalog.json"); + var advisoryUri = new Uri(baseUri, "stable/USN-2025-0001.json"); + + var manifest = CreateTestManifest(advisoryUri, "USN-2025-0001", "2025-10-18T00:00:00Z"); + var documentPayload = Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"); + var documentSha = ComputeSha256(documentPayload); + + var indexJson = manifest.IndexJson; + var catalogJson = manifest.CatalogJson.Replace("{{SHA256}}", documentSha, StringComparison.Ordinal); + var handler = new UbuntuTestHttpHandler(indexUri, indexJson, catalogUri, catalogJson, advisoryUri, documentPayload, expectedEtag: "etag-123"); + + var httpClient = new HttpClient(handler); + var httpFactory = new SingleClientFactory(httpClient); + var cache = new MemoryCache(new MemoryCacheOptions()); + var fileSystem = new MockFileSystem(); + var loader = new UbuntuCatalogLoader(httpFactory, cache, fileSystem, NullLogger<UbuntuCatalogLoader>.Instance, TimeProvider.System); + + var optionsValidator = new UbuntuConnectorOptionsValidator(fileSystem); + var stateRepository = new InMemoryConnectorStateRepository(); + var connector = new UbuntuCsafConnector( + loader, + httpFactory, + stateRepository, + new[] { optionsValidator }, + NullLogger<UbuntuCsafConnector>.Instance, + TimeProvider.System); + + var settings = new VexConnectorSettings(ImmutableDictionary<string, string>.Empty); + await connector.ValidateAsync(settings, CancellationToken.None); + + var sink = new InMemoryRawSink(); + var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider(), ImmutableDictionary<string, string>.Empty); + + var documents = new List<VexRawDocument>(); + await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(doc); + } + + documents.Should().HaveCount(1); + sink.Documents.Should().HaveCount(1); + var stored = sink.Documents.Single(); + stored.Digest.Should().Be($"sha256:{documentSha}"); + stored.Metadata.TryGetValue("ubuntu.etag", out var storedEtag).Should().BeTrue(); + storedEtag.Should().Be("etag-123"); + + stateRepository.CurrentState.Should().NotBeNull(); + stateRepository.CurrentState!.DocumentDigests.Should().Contain($"sha256:{documentSha}"); + stateRepository.CurrentState.DocumentDigests.Should().Contain($"etag:{advisoryUri}|etag-123"); + stateRepository.CurrentState.LastUpdated.Should().Be(DateTimeOffset.Parse("2025-10-18T00:00:00Z")); + + handler.DocumentRequestCount.Should().Be(1); + + // Second run: Expect connector to send If-None-Match and skip download via 304. + sink.Documents.Clear(); + documents.Clear(); + + await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(doc); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + handler.DocumentRequestCount.Should().Be(2); + handler.SeenIfNoneMatch.Should().Contain("\"etag-123\""); + } + + [Fact] + public async Task FetchAsync_SkipsWhenChecksumMismatch() + { + var baseUri = new Uri("https://ubuntu.test/security/csaf/"); + var indexUri = new Uri(baseUri, "index.json"); + var catalogUri = new Uri(baseUri, "stable/catalog.json"); + var advisoryUri = new Uri(baseUri, "stable/USN-2025-0002.json"); + + var manifest = CreateTestManifest(advisoryUri, "USN-2025-0002", "2025-10-18T00:00:00Z"); + var indexJson = manifest.IndexJson; + var catalogJson = manifest.CatalogJson.Replace("{{SHA256}}", new string('a', 64), StringComparison.Ordinal); + var handler = new UbuntuTestHttpHandler(indexUri, indexJson, catalogUri, catalogJson, advisoryUri, Encoding.UTF8.GetBytes("{\"document\":\"payload\"}"), expectedEtag: "etag-999"); + + var httpClient = new HttpClient(handler); + var httpFactory = new SingleClientFactory(httpClient); + var cache = new MemoryCache(new MemoryCacheOptions()); + var fileSystem = new MockFileSystem(); + var loader = new UbuntuCatalogLoader(httpFactory, cache, fileSystem, NullLogger<UbuntuCatalogLoader>.Instance, TimeProvider.System); + var optionsValidator = new UbuntuConnectorOptionsValidator(fileSystem); + var stateRepository = new InMemoryConnectorStateRepository(); + + var connector = new UbuntuCsafConnector( + loader, + httpFactory, + stateRepository, + new[] { optionsValidator }, + NullLogger<UbuntuCsafConnector>.Instance, + TimeProvider.System); + + await connector.ValidateAsync(new VexConnectorSettings(ImmutableDictionary<string, string>.Empty), CancellationToken.None); + + var sink = new InMemoryRawSink(); + var context = new VexConnectorContext(null, VexConnectorSettings.Empty, sink, new NoopSignatureVerifier(), new NoopNormalizerRouter(), new ServiceCollection().BuildServiceProvider(), ImmutableDictionary<string, string>.Empty); + + var documents = new List<VexRawDocument>(); + await foreach (var doc in connector.FetchAsync(context, CancellationToken.None)) + { + documents.Add(doc); + } + + documents.Should().BeEmpty(); + sink.Documents.Should().BeEmpty(); + stateRepository.CurrentState.Should().NotBeNull(); + stateRepository.CurrentState!.DocumentDigests.Should().BeEmpty(); + handler.DocumentRequestCount.Should().Be(1); + } + + private static (string IndexJson, string CatalogJson) CreateTestManifest(Uri advisoryUri, string advisoryId, string timestamp) + { + var indexJson = """ + { + "generated": "2025-10-18T00:00:00Z", + "channels": [ + { + "name": "stable", + "catalogUrl": "{{advisoryUri.GetLeftPart(UriPartial.Authority)}}/security/csaf/stable/catalog.json", + "sha256": "ignore" + } + ] + } + """; + + var catalogJson = """ + { + "resources": [ + { + "id": "{{advisoryId}}", + "type": "csaf", + "url": "{{advisoryUri}}", + "last_modified": "{{timestamp}}", + "hashes": { + "sha256": "{{SHA256}}" + }, + "etag": "\"etag-123\"", + "title": "{{advisoryId}}" + } + ] + } + """; + + return (indexJson, catalogJson); + } + + private static string ComputeSha256(ReadOnlySpan<byte> payload) + { + Span<byte> buffer = stackalloc byte[32]; + SHA256.HashData(payload, buffer); + return Convert.ToHexString(buffer).ToLowerInvariant(); + } + + private sealed class SingleClientFactory : IHttpClientFactory + { + private readonly HttpClient _client; + + public SingleClientFactory(HttpClient client) + { + _client = client; + } + + public HttpClient CreateClient(string name) => _client; + } + + private sealed class UbuntuTestHttpHandler : HttpMessageHandler + { + private readonly Uri _indexUri; + private readonly string _indexPayload; + private readonly Uri _catalogUri; + private readonly string _catalogPayload; + private readonly Uri _documentUri; + private readonly byte[] _documentPayload; + private readonly string _expectedEtag; + + public int DocumentRequestCount { get; private set; } + public List<string> SeenIfNoneMatch { get; } = new(); + + public UbuntuTestHttpHandler(Uri indexUri, string indexPayload, Uri catalogUri, string catalogPayload, Uri documentUri, byte[] documentPayload, string expectedEtag) + { + _indexUri = indexUri; + _indexPayload = indexPayload; + _catalogUri = catalogUri; + _catalogPayload = catalogPayload; + _documentUri = documentUri; + _documentPayload = documentPayload; + _expectedEtag = expectedEtag; + } + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (request.RequestUri == _indexUri) + { + return Task.FromResult(CreateJsonResponse(_indexPayload)); + } + + if (request.RequestUri == _catalogUri) + { + return Task.FromResult(CreateJsonResponse(_catalogPayload)); + } + + if (request.RequestUri == _documentUri) + { + DocumentRequestCount++; + if (request.Headers.IfNoneMatch is { Count: > 0 }) + { + var header = request.Headers.IfNoneMatch.First().ToString(); + SeenIfNoneMatch.Add(header); + if (header.Trim('"') == _expectedEtag || header == $"\"{_expectedEtag}\"") + { + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotModified)); + } + } + + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new ByteArrayContent(_documentPayload), + }; + response.Headers.ETag = new EntityTagHeaderValue($"\"{_expectedEtag}\""); + response.Content.Headers.ContentType = new MediaTypeHeaderValue("application/json"); + return Task.FromResult(response); + } + + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.NotFound) + { + Content = new StringContent($"No response configured for {request.RequestUri}"), + }); + } + + private static HttpResponseMessage CreateJsonResponse(string payload) + => new(HttpStatusCode.OK) + { + Content = new StringContent(payload, Encoding.UTF8, "application/json"), + }; + } + + private sealed class InMemoryConnectorStateRepository : IVexConnectorStateRepository + { + public VexConnectorState? CurrentState { get; private set; } + + public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(CurrentState); + + public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + CurrentState = state; + return ValueTask.CompletedTask; + } + } + + private sealed class InMemoryRawSink : IVexRawDocumentSink + { + public List<VexRawDocument> Documents { get; } = new(); + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Documents.Add(document); + return ValueTask.CompletedTask; + } + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult<VexSignatureMetadata?>(null); + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); + } +} diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Metadata/UbuntuCatalogLoaderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Metadata/UbuntuCatalogLoaderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Metadata/UbuntuCatalogLoaderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/Metadata/UbuntuCatalogLoaderTests.cs diff --git a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj similarity index 75% rename from src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj index 3fe0a543..521952c1 100644 --- a/src/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests/StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -7,11 +8,11 @@ <TreatWarningsAsErrors>true</TreatWarningsAsErrors> </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Ubuntu.CSAF\StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Connectors.Ubuntu.CSAF/StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj" /> </ItemGroup> <ItemGroup> <PackageReference Include="FluentAssertions" Version="6.12.0" /> <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Core.Tests/Aoc/VexRawWriteGuardTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/Aoc/VexRawWriteGuardTests.cs similarity index 97% rename from src/StellaOps.Excititor.Core.Tests/Aoc/VexRawWriteGuardTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/Aoc/VexRawWriteGuardTests.cs index eda68716..f015672c 100644 --- a/src/StellaOps.Excititor.Core.Tests/Aoc/VexRawWriteGuardTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/Aoc/VexRawWriteGuardTests.cs @@ -1,68 +1,68 @@ -using System.Collections.Immutable; -using System.Text.Json; -using StellaOps.Aoc; -using StellaOps.Excititor.Core.Aoc; -using RawVexDocument = StellaOps.Concelier.RawModels.VexRawDocument; -using RawSignatureMetadata = StellaOps.Concelier.RawModels.RawSignatureMetadata; -using RawSourceMetadata = StellaOps.Concelier.RawModels.RawSourceMetadata; -using RawUpstreamMetadata = StellaOps.Concelier.RawModels.RawUpstreamMetadata; -using RawContent = StellaOps.Concelier.RawModels.RawContent; -using RawLinkset = StellaOps.Concelier.RawModels.RawLinkset; -using RawDocumentFactory = StellaOps.Concelier.RawModels.RawDocumentFactory; -using VexStatementSummary = StellaOps.Concelier.RawModels.VexStatementSummary; -using RawReference = StellaOps.Concelier.RawModels.RawReference; - -namespace StellaOps.Excititor.Core.Tests.Aoc; - -public sealed class VexRawWriteGuardTests -{ - private static RawVexDocument CreateDocument(bool signaturePresent = false, bool includeSignaturePayload = true) - { - var signature = signaturePresent - ? new RawSignatureMetadata(true, "dsse", "key-1", includeSignaturePayload ? "signed" : null) - : new RawSignatureMetadata(false); - - using var contentDoc = JsonDocument.Parse("{\"id\":\"VEX-1\"}"); - - return RawDocumentFactory.CreateVex( - tenant: "tenant-a", - source: new RawSourceMetadata("vendor-x", "connector-y", "1.0.0"), - upstream: new RawUpstreamMetadata( - UpstreamId: "VEX-1", - DocumentVersion: "1", - RetrievedAt: DateTimeOffset.UtcNow, - ContentHash: "sha256:abc", - Signature: signature, - Provenance: ImmutableDictionary<string, string>.Empty), - content: new RawContent("CSA" , "2.0", contentDoc.RootElement.Clone()), - linkset: new RawLinkset - { - Aliases = ImmutableArray<string>.Empty, - PackageUrls = ImmutableArray<string>.Empty, - Cpes = ImmutableArray<string>.Empty, - References = ImmutableArray<RawReference>.Empty, - ReconciledFrom = ImmutableArray<string>.Empty, - Notes = ImmutableDictionary<string, string>.Empty - }, - statements: ImmutableArray<VexStatementSummary>.Empty); - } - - [Fact] - public void EnsureValid_AllowsMinimalDocument() - { - var guard = new VexRawWriteGuard(new AocWriteGuard()); - var document = CreateDocument(); - - guard.EnsureValid(document); - } - - [Fact] - public void EnsureValid_ThrowsWhenSignatureMissingPayload() - { - var guard = new VexRawWriteGuard(new AocWriteGuard()); - var document = CreateDocument(signaturePresent: true, includeSignaturePayload: false); - - var exception = Assert.Throws<ExcititorAocGuardException>(() => guard.EnsureValid(document)); - Assert.Equal("ERR_AOC_005", exception.PrimaryErrorCode); - } -} +using System.Collections.Immutable; +using System.Text.Json; +using StellaOps.Aoc; +using StellaOps.Excititor.Core.Aoc; +using RawVexDocument = StellaOps.Concelier.RawModels.VexRawDocument; +using RawSignatureMetadata = StellaOps.Concelier.RawModels.RawSignatureMetadata; +using RawSourceMetadata = StellaOps.Concelier.RawModels.RawSourceMetadata; +using RawUpstreamMetadata = StellaOps.Concelier.RawModels.RawUpstreamMetadata; +using RawContent = StellaOps.Concelier.RawModels.RawContent; +using RawLinkset = StellaOps.Concelier.RawModels.RawLinkset; +using RawDocumentFactory = StellaOps.Concelier.RawModels.RawDocumentFactory; +using VexStatementSummary = StellaOps.Concelier.RawModels.VexStatementSummary; +using RawReference = StellaOps.Concelier.RawModels.RawReference; + +namespace StellaOps.Excititor.Core.Tests.Aoc; + +public sealed class VexRawWriteGuardTests +{ + private static RawVexDocument CreateDocument(bool signaturePresent = false, bool includeSignaturePayload = true) + { + var signature = signaturePresent + ? new RawSignatureMetadata(true, "dsse", "key-1", includeSignaturePayload ? "signed" : null) + : new RawSignatureMetadata(false); + + using var contentDoc = JsonDocument.Parse("{\"id\":\"VEX-1\"}"); + + return RawDocumentFactory.CreateVex( + tenant: "tenant-a", + source: new RawSourceMetadata("vendor-x", "connector-y", "1.0.0"), + upstream: new RawUpstreamMetadata( + UpstreamId: "VEX-1", + DocumentVersion: "1", + RetrievedAt: DateTimeOffset.UtcNow, + ContentHash: "sha256:abc", + Signature: signature, + Provenance: ImmutableDictionary<string, string>.Empty), + content: new RawContent("CSA" , "2.0", contentDoc.RootElement.Clone()), + linkset: new RawLinkset + { + Aliases = ImmutableArray<string>.Empty, + PackageUrls = ImmutableArray<string>.Empty, + Cpes = ImmutableArray<string>.Empty, + References = ImmutableArray<RawReference>.Empty, + ReconciledFrom = ImmutableArray<string>.Empty, + Notes = ImmutableDictionary<string, string>.Empty + }, + statements: ImmutableArray<VexStatementSummary>.Empty); + } + + [Fact] + public void EnsureValid_AllowsMinimalDocument() + { + var guard = new VexRawWriteGuard(new AocWriteGuard()); + var document = CreateDocument(); + + guard.EnsureValid(document); + } + + [Fact] + public void EnsureValid_ThrowsWhenSignatureMissingPayload() + { + var guard = new VexRawWriteGuard(new AocWriteGuard()); + var document = CreateDocument(signaturePresent: true, includeSignaturePayload: false); + + var exception = Assert.Throws<ExcititorAocGuardException>(() => guard.EnsureValid(document)); + Assert.Equal("ERR_AOC_005", exception.PrimaryErrorCode); + } +} diff --git a/src/StellaOps.Excititor.Core.Tests/Observations/VexObservationQueryServiceTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/Observations/VexObservationQueryServiceTests.cs similarity index 97% rename from src/StellaOps.Excititor.Core.Tests/Observations/VexObservationQueryServiceTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/Observations/VexObservationQueryServiceTests.cs index c566cc26..49b4e081 100644 --- a/src/StellaOps.Excititor.Core.Tests/Observations/VexObservationQueryServiceTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/Observations/VexObservationQueryServiceTests.cs @@ -1,307 +1,307 @@ -using System.Collections.Immutable; -using System.Text.Json.Nodes; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Core.Observations; -using Xunit; - -namespace StellaOps.Excititor.Core.Tests.Observations; - -public sealed class VexObservationQueryServiceTests -{ - private static readonly TimeProvider TimeProvider = TimeProvider.System; - - [Fact] - public async Task QueryAsync_WhenNoFilters_ReturnsSortedObservations() - { - var now = DateTimeOffset.UtcNow; - var observations = new[] - { - CreateObservation( - observationId: "tenant-a:redhat:0001:1", - tenant: "tenant-a", - providerId: "RedHat", - streamId: "csaf", - vulnerabilityIds: new[] { "CVE-2025-1000" }, - productKeys: new[] { "pkg:rpm/redhat/openssl@1.1.1w-12" }, - purls: new[] { "pkg:rpm/redhat/openssl@1.1.1w-12" }, - createdAt: now.AddMinutes(-10)), - CreateObservation( - observationId: "tenant-a:ubuntu:0002:1", - tenant: "Tenant-A", - providerId: "ubuntu", - streamId: "cyclonedx", - vulnerabilityIds: new[] { "CVE-2025-1001" }, - productKeys: new[] { "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1" }, - purls: new[] { "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1" }, - createdAt: now) - }; - - var lookup = new InMemoryLookup(observations); - var service = new VexObservationQueryService(lookup); - - var result = await service.QueryAsync(new VexObservationQueryOptions("TENANT-A"), CancellationToken.None); - - Assert.Equal(2, result.Observations.Length); - Assert.Equal("tenant-a:ubuntu:0002:1", result.Observations[0].ObservationId); - Assert.Equal("tenant-a:redhat:0001:1", result.Observations[1].ObservationId); - - Assert.Equal(new[] { "CVE-2025-1000", "CVE-2025-1001" }, result.Aggregate.VulnerabilityIds); - Assert.Equal( - new[] - { - "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1", - "pkg:rpm/redhat/openssl@1.1.1w-12" - }, - result.Aggregate.ProductKeys); - - Assert.Equal( - new[] - { - "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1", - "pkg:rpm/redhat/openssl@1.1.1w-12" - }, - result.Aggregate.Purls); - - Assert.Equal(new[] { "redhat", "ubuntu" }, result.Aggregate.ProviderIds); - Assert.False(result.HasMore); - Assert.Null(result.NextCursor); - } - - [Fact] - public async Task QueryAsync_WithVulnerabilityAndStatusFilters_FiltersStatements() - { - var now = DateTimeOffset.UtcNow; - var observations = new[] - { - CreateObservation( - observationId: "tenant-a:redhat:0001:1", - tenant: "tenant-a", - providerId: "redhat", - streamId: "csaf", - vulnerabilityIds: new[] { "CVE-2025-1000" }, - productKeys: new[] { "pkg:rpm/redhat/openssl@1.1.1w-12" }, - purls: Array.Empty<string>(), - statuses: new[] { VexClaimStatus.NotAffected }, - createdAt: now), - CreateObservation( - observationId: "tenant-a:ubuntu:0002:1", - tenant: "tenant-a", - providerId: "ubuntu", - streamId: "cyclonedx", - vulnerabilityIds: new[] { "CVE-2025-9999" }, - productKeys: new[] { "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1" }, - purls: Array.Empty<string>(), - statuses: new[] { VexClaimStatus.Affected }, - createdAt: now.AddMinutes(-5)) - }; - - var lookup = new InMemoryLookup(observations); - var service = new VexObservationQueryService(lookup); - - var options = new VexObservationQueryOptions( - tenant: "tenant-a", - vulnerabilityIds: new[] { "cve-2025-1000" }, - statuses: new[] { VexClaimStatus.NotAffected }); - - var result = await service.QueryAsync(options, CancellationToken.None); - - Assert.Single(result.Observations); - Assert.Equal("tenant-a:redhat:0001:1", result.Observations[0].ObservationId); - Assert.Equal(new[] { "CVE-2025-1000" }, result.Aggregate.VulnerabilityIds); - Assert.Equal(new[] { "pkg:rpm/redhat/openssl@1.1.1w-12" }, result.Aggregate.ProductKeys); - } - - [Fact] - public async Task QueryAsync_WithCursorAdvancesPages() - { - var now = DateTimeOffset.UtcNow; - var observations = new[] - { - CreateObservation( - observationId: "tenant-a:alpha", - tenant: "tenant-a", - providerId: "redhat", - streamId: "csaf", - vulnerabilityIds: new[] { "CVE-2025-0001" }, - productKeys: new[] { "pkg:rpm/redhat/foo@1.0.0" }, - purls: Array.Empty<string>(), - statuses: new[] { VexClaimStatus.NotAffected }, - createdAt: now), - CreateObservation( - observationId: "tenant-a:beta", - tenant: "tenant-a", - providerId: "ubuntu", - streamId: "cyclonedx", - vulnerabilityIds: new[] { "CVE-2025-0002" }, - productKeys: new[] { "pkg:deb/ubuntu/foo@1.0.0" }, - purls: Array.Empty<string>(), - statuses: new[] { VexClaimStatus.Affected }, - createdAt: now.AddMinutes(-1)), - CreateObservation( - observationId: "tenant-a:gamma", - tenant: "tenant-a", - providerId: "suse", - streamId: "openvex", - vulnerabilityIds: new[] { "CVE-2025-0003" }, - productKeys: new[] { "pkg:rpm/suse/foo@1.0.0" }, - purls: Array.Empty<string>(), - statuses: new[] { VexClaimStatus.UnderInvestigation }, - createdAt: now.AddMinutes(-2)) - }; - - var lookup = new InMemoryLookup(observations); - var service = new VexObservationQueryService(lookup); - - var first = await service.QueryAsync( - new VexObservationQueryOptions("tenant-a", limit: 2), - CancellationToken.None); - - Assert.Equal(2, first.Observations.Length); - Assert.True(first.HasMore); - Assert.NotNull(first.NextCursor); - - var second = await service.QueryAsync( - new VexObservationQueryOptions("tenant-a", limit: 2, cursor: first.NextCursor), - CancellationToken.None); - - Assert.Single(second.Observations); - Assert.False(second.HasMore); - Assert.Null(second.NextCursor); - Assert.Equal("tenant-a:gamma", second.Observations[0].ObservationId); - } - - private static VexObservation CreateObservation( - string observationId, - string tenant, - string providerId, - string streamId, - IEnumerable<string> vulnerabilityIds, - IEnumerable<string> productKeys, - IEnumerable<string> purls, - DateTimeOffset createdAt, - IEnumerable<VexClaimStatus>? statuses = null) - { - var vulnerabilityArray = vulnerabilityIds.ToArray(); - var productArray = productKeys.ToArray(); - var purlArray = purls.ToArray(); - var statusArray = (statuses ?? Array.Empty<VexClaimStatus>()).ToArray(); - - if (vulnerabilityArray.Length != productArray.Length) - { - throw new ArgumentException("Vulnerability and product collections must align."); - } - - var statements = ImmutableArray.CreateBuilder<VexObservationStatement>(vulnerabilityArray.Length); - for (var i = 0; i < vulnerabilityArray.Length; i++) - { - var status = statusArray.Length switch - { - 0 => VexClaimStatus.NotAffected, - _ when i < statusArray.Length => statusArray[i], - _ => statusArray[0] - }; - - var purlValue = purlArray.Length switch - { - 0 => null, - _ when i < purlArray.Length => purlArray[i], - _ => purlArray[0] - }; - - statements.Add(new VexObservationStatement( - vulnerabilityArray[i], - productArray[i], - status, - lastObserved: createdAt, - purl: purlValue, - cpe: null, - evidence: ImmutableArray<JsonNode>.Empty)); - } - - var upstream = new VexObservationUpstream( - upstreamId: observationId, - documentVersion: null, - fetchedAt: createdAt, - receivedAt: createdAt, - contentHash: $"sha256:{Guid.NewGuid():N}", - signature: new VexObservationSignature(present: false, null, null, null)); - - var linkset = new VexObservationLinkset( - aliases: vulnerabilityIds, - purls: purls, - cpes: Array.Empty<string>(), - references: new[] - { - new VexObservationReference("source", $"https://example.test/{observationId}") - }); - - var content = new VexObservationContent( - format: "csaf", - specVersion: "2.0", - raw: JsonNode.Parse("""{"document":"payload"}""") ?? throw new InvalidOperationException("Raw payload required.")); - - return new VexObservation( - observationId, - tenant, - providerId, - streamId, - upstream, - statements.ToImmutable(), - content, - linkset, - createdAt, - supersedes: ImmutableArray<string>.Empty, - attributes: ImmutableDictionary<string, string>.Empty); - } - - private sealed class InMemoryLookup : IVexObservationLookup - { - private readonly IReadOnlyList<VexObservation> _observations; - - public InMemoryLookup(IReadOnlyList<VexObservation> observations) - { - _observations = observations; - } - - public ValueTask<IReadOnlyList<VexObservation>> ListByTenantAsync(string tenant, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(tenant); - cancellationToken.ThrowIfCancellationRequested(); - - return ValueTask.FromResult<IReadOnlyList<VexObservation>>( - _observations.Where(observation => string.Equals(observation.Tenant, tenant, StringComparison.OrdinalIgnoreCase)).ToList()); - } - - public ValueTask<IReadOnlyList<VexObservation>> FindByFiltersAsync( - string tenant, - IReadOnlyCollection<string> observationIds, - IReadOnlyCollection<string> vulnerabilityIds, - IReadOnlyCollection<string> productKeys, - IReadOnlyCollection<string> purls, - IReadOnlyCollection<string> cpes, - IReadOnlyCollection<string> providerIds, - IReadOnlyCollection<VexClaimStatus> statuses, - VexObservationCursor? cursor, - int limit, - CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - - var filtered = _observations - .Where(observation => string.Equals(observation.Tenant, tenant, StringComparison.OrdinalIgnoreCase)) - .ToList(); - - if (cursor is not null) - { - filtered = filtered - .Where(observation => - observation.CreatedAt < cursor.CreatedAt || - (observation.CreatedAt == cursor.CreatedAt && - string.CompareOrdinal(observation.ObservationId, cursor.ObservationId) < 0)) - .ToList(); - } - - return ValueTask.FromResult<IReadOnlyList<VexObservation>>(filtered); - } - } -} +using System.Collections.Immutable; +using System.Text.Json.Nodes; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Observations; +using Xunit; + +namespace StellaOps.Excititor.Core.Tests.Observations; + +public sealed class VexObservationQueryServiceTests +{ + private static readonly TimeProvider TimeProvider = TimeProvider.System; + + [Fact] + public async Task QueryAsync_WhenNoFilters_ReturnsSortedObservations() + { + var now = DateTimeOffset.UtcNow; + var observations = new[] + { + CreateObservation( + observationId: "tenant-a:redhat:0001:1", + tenant: "tenant-a", + providerId: "RedHat", + streamId: "csaf", + vulnerabilityIds: new[] { "CVE-2025-1000" }, + productKeys: new[] { "pkg:rpm/redhat/openssl@1.1.1w-12" }, + purls: new[] { "pkg:rpm/redhat/openssl@1.1.1w-12" }, + createdAt: now.AddMinutes(-10)), + CreateObservation( + observationId: "tenant-a:ubuntu:0002:1", + tenant: "Tenant-A", + providerId: "ubuntu", + streamId: "cyclonedx", + vulnerabilityIds: new[] { "CVE-2025-1001" }, + productKeys: new[] { "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1" }, + purls: new[] { "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1" }, + createdAt: now) + }; + + var lookup = new InMemoryLookup(observations); + var service = new VexObservationQueryService(lookup); + + var result = await service.QueryAsync(new VexObservationQueryOptions("TENANT-A"), CancellationToken.None); + + Assert.Equal(2, result.Observations.Length); + Assert.Equal("tenant-a:ubuntu:0002:1", result.Observations[0].ObservationId); + Assert.Equal("tenant-a:redhat:0001:1", result.Observations[1].ObservationId); + + Assert.Equal(new[] { "CVE-2025-1000", "CVE-2025-1001" }, result.Aggregate.VulnerabilityIds); + Assert.Equal( + new[] + { + "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1", + "pkg:rpm/redhat/openssl@1.1.1w-12" + }, + result.Aggregate.ProductKeys); + + Assert.Equal( + new[] + { + "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1", + "pkg:rpm/redhat/openssl@1.1.1w-12" + }, + result.Aggregate.Purls); + + Assert.Equal(new[] { "redhat", "ubuntu" }, result.Aggregate.ProviderIds); + Assert.False(result.HasMore); + Assert.Null(result.NextCursor); + } + + [Fact] + public async Task QueryAsync_WithVulnerabilityAndStatusFilters_FiltersStatements() + { + var now = DateTimeOffset.UtcNow; + var observations = new[] + { + CreateObservation( + observationId: "tenant-a:redhat:0001:1", + tenant: "tenant-a", + providerId: "redhat", + streamId: "csaf", + vulnerabilityIds: new[] { "CVE-2025-1000" }, + productKeys: new[] { "pkg:rpm/redhat/openssl@1.1.1w-12" }, + purls: Array.Empty<string>(), + statuses: new[] { VexClaimStatus.NotAffected }, + createdAt: now), + CreateObservation( + observationId: "tenant-a:ubuntu:0002:1", + tenant: "tenant-a", + providerId: "ubuntu", + streamId: "cyclonedx", + vulnerabilityIds: new[] { "CVE-2025-9999" }, + productKeys: new[] { "pkg:deb/ubuntu/openssl@1.1.1w-9ubuntu1" }, + purls: Array.Empty<string>(), + statuses: new[] { VexClaimStatus.Affected }, + createdAt: now.AddMinutes(-5)) + }; + + var lookup = new InMemoryLookup(observations); + var service = new VexObservationQueryService(lookup); + + var options = new VexObservationQueryOptions( + tenant: "tenant-a", + vulnerabilityIds: new[] { "cve-2025-1000" }, + statuses: new[] { VexClaimStatus.NotAffected }); + + var result = await service.QueryAsync(options, CancellationToken.None); + + Assert.Single(result.Observations); + Assert.Equal("tenant-a:redhat:0001:1", result.Observations[0].ObservationId); + Assert.Equal(new[] { "CVE-2025-1000" }, result.Aggregate.VulnerabilityIds); + Assert.Equal(new[] { "pkg:rpm/redhat/openssl@1.1.1w-12" }, result.Aggregate.ProductKeys); + } + + [Fact] + public async Task QueryAsync_WithCursorAdvancesPages() + { + var now = DateTimeOffset.UtcNow; + var observations = new[] + { + CreateObservation( + observationId: "tenant-a:alpha", + tenant: "tenant-a", + providerId: "redhat", + streamId: "csaf", + vulnerabilityIds: new[] { "CVE-2025-0001" }, + productKeys: new[] { "pkg:rpm/redhat/foo@1.0.0" }, + purls: Array.Empty<string>(), + statuses: new[] { VexClaimStatus.NotAffected }, + createdAt: now), + CreateObservation( + observationId: "tenant-a:beta", + tenant: "tenant-a", + providerId: "ubuntu", + streamId: "cyclonedx", + vulnerabilityIds: new[] { "CVE-2025-0002" }, + productKeys: new[] { "pkg:deb/ubuntu/foo@1.0.0" }, + purls: Array.Empty<string>(), + statuses: new[] { VexClaimStatus.Affected }, + createdAt: now.AddMinutes(-1)), + CreateObservation( + observationId: "tenant-a:gamma", + tenant: "tenant-a", + providerId: "suse", + streamId: "openvex", + vulnerabilityIds: new[] { "CVE-2025-0003" }, + productKeys: new[] { "pkg:rpm/suse/foo@1.0.0" }, + purls: Array.Empty<string>(), + statuses: new[] { VexClaimStatus.UnderInvestigation }, + createdAt: now.AddMinutes(-2)) + }; + + var lookup = new InMemoryLookup(observations); + var service = new VexObservationQueryService(lookup); + + var first = await service.QueryAsync( + new VexObservationQueryOptions("tenant-a", limit: 2), + CancellationToken.None); + + Assert.Equal(2, first.Observations.Length); + Assert.True(first.HasMore); + Assert.NotNull(first.NextCursor); + + var second = await service.QueryAsync( + new VexObservationQueryOptions("tenant-a", limit: 2, cursor: first.NextCursor), + CancellationToken.None); + + Assert.Single(second.Observations); + Assert.False(second.HasMore); + Assert.Null(second.NextCursor); + Assert.Equal("tenant-a:gamma", second.Observations[0].ObservationId); + } + + private static VexObservation CreateObservation( + string observationId, + string tenant, + string providerId, + string streamId, + IEnumerable<string> vulnerabilityIds, + IEnumerable<string> productKeys, + IEnumerable<string> purls, + DateTimeOffset createdAt, + IEnumerable<VexClaimStatus>? statuses = null) + { + var vulnerabilityArray = vulnerabilityIds.ToArray(); + var productArray = productKeys.ToArray(); + var purlArray = purls.ToArray(); + var statusArray = (statuses ?? Array.Empty<VexClaimStatus>()).ToArray(); + + if (vulnerabilityArray.Length != productArray.Length) + { + throw new ArgumentException("Vulnerability and product collections must align."); + } + + var statements = ImmutableArray.CreateBuilder<VexObservationStatement>(vulnerabilityArray.Length); + for (var i = 0; i < vulnerabilityArray.Length; i++) + { + var status = statusArray.Length switch + { + 0 => VexClaimStatus.NotAffected, + _ when i < statusArray.Length => statusArray[i], + _ => statusArray[0] + }; + + var purlValue = purlArray.Length switch + { + 0 => null, + _ when i < purlArray.Length => purlArray[i], + _ => purlArray[0] + }; + + statements.Add(new VexObservationStatement( + vulnerabilityArray[i], + productArray[i], + status, + lastObserved: createdAt, + purl: purlValue, + cpe: null, + evidence: ImmutableArray<JsonNode>.Empty)); + } + + var upstream = new VexObservationUpstream( + upstreamId: observationId, + documentVersion: null, + fetchedAt: createdAt, + receivedAt: createdAt, + contentHash: $"sha256:{Guid.NewGuid():N}", + signature: new VexObservationSignature(present: false, null, null, null)); + + var linkset = new VexObservationLinkset( + aliases: vulnerabilityIds, + purls: purls, + cpes: Array.Empty<string>(), + references: new[] + { + new VexObservationReference("source", $"https://example.test/{observationId}") + }); + + var content = new VexObservationContent( + format: "csaf", + specVersion: "2.0", + raw: JsonNode.Parse("""{"document":"payload"}""") ?? throw new InvalidOperationException("Raw payload required.")); + + return new VexObservation( + observationId, + tenant, + providerId, + streamId, + upstream, + statements.ToImmutable(), + content, + linkset, + createdAt, + supersedes: ImmutableArray<string>.Empty, + attributes: ImmutableDictionary<string, string>.Empty); + } + + private sealed class InMemoryLookup : IVexObservationLookup + { + private readonly IReadOnlyList<VexObservation> _observations; + + public InMemoryLookup(IReadOnlyList<VexObservation> observations) + { + _observations = observations; + } + + public ValueTask<IReadOnlyList<VexObservation>> ListByTenantAsync(string tenant, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(tenant); + cancellationToken.ThrowIfCancellationRequested(); + + return ValueTask.FromResult<IReadOnlyList<VexObservation>>( + _observations.Where(observation => string.Equals(observation.Tenant, tenant, StringComparison.OrdinalIgnoreCase)).ToList()); + } + + public ValueTask<IReadOnlyList<VexObservation>> FindByFiltersAsync( + string tenant, + IReadOnlyCollection<string> observationIds, + IReadOnlyCollection<string> vulnerabilityIds, + IReadOnlyCollection<string> productKeys, + IReadOnlyCollection<string> purls, + IReadOnlyCollection<string> cpes, + IReadOnlyCollection<string> providerIds, + IReadOnlyCollection<VexClaimStatus> statuses, + VexObservationCursor? cursor, + int limit, + CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + var filtered = _observations + .Where(observation => string.Equals(observation.Tenant, tenant, StringComparison.OrdinalIgnoreCase)) + .ToList(); + + if (cursor is not null) + { + filtered = filtered + .Where(observation => + observation.CreatedAt < cursor.CreatedAt || + (observation.CreatedAt == cursor.CreatedAt && + string.CompareOrdinal(observation.ObservationId, cursor.ObservationId) < 0)) + .ToList(); + } + + return ValueTask.FromResult<IReadOnlyList<VexObservation>>(filtered); + } + } +} diff --git a/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj new file mode 100644 index 00000000..fe257623 --- /dev/null +++ b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj @@ -0,0 +1,16 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj" /> + <ProjectReference Include="../../../Aoc/__Libraries/StellaOps.Aoc/StellaOps.Aoc.csproj" /> + <ProjectReference Include="../../../Concelier/__Libraries/StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Core.Tests/VexCanonicalJsonSerializerTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexCanonicalJsonSerializerTests.cs similarity index 100% rename from src/StellaOps.Excititor.Core.Tests/VexCanonicalJsonSerializerTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexCanonicalJsonSerializerTests.cs diff --git a/src/StellaOps.Excititor.Core.Tests/VexConsensusResolverTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexConsensusResolverTests.cs similarity index 100% rename from src/StellaOps.Excititor.Core.Tests/VexConsensusResolverTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexConsensusResolverTests.cs diff --git a/src/StellaOps.Excititor.Core.Tests/VexPolicyBinderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexPolicyBinderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Core.Tests/VexPolicyBinderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexPolicyBinderTests.cs diff --git a/src/StellaOps.Excititor.Core.Tests/VexPolicyDiagnosticsTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexPolicyDiagnosticsTests.cs similarity index 100% rename from src/StellaOps.Excititor.Core.Tests/VexPolicyDiagnosticsTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexPolicyDiagnosticsTests.cs diff --git a/src/StellaOps.Excititor.Core.Tests/VexQuerySignatureTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexQuerySignatureTests.cs similarity index 100% rename from src/StellaOps.Excititor.Core.Tests/VexQuerySignatureTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexQuerySignatureTests.cs diff --git a/src/StellaOps.Excititor.Core.Tests/VexSignalSnapshotTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexSignalSnapshotTests.cs similarity index 100% rename from src/StellaOps.Excititor.Core.Tests/VexSignalSnapshotTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Core.Tests/VexSignalSnapshotTests.cs diff --git a/src/StellaOps.Excititor.Export.Tests/ExportEngineTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/ExportEngineTests.cs similarity index 100% rename from src/StellaOps.Excititor.Export.Tests/ExportEngineTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/ExportEngineTests.cs diff --git a/src/StellaOps.Excititor.Export.Tests/FileSystemArtifactStoreTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/FileSystemArtifactStoreTests.cs similarity index 100% rename from src/StellaOps.Excititor.Export.Tests/FileSystemArtifactStoreTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/FileSystemArtifactStoreTests.cs diff --git a/src/StellaOps.Excititor.Export.Tests/MirrorBundlePublisherTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/MirrorBundlePublisherTests.cs similarity index 100% rename from src/StellaOps.Excititor.Export.Tests/MirrorBundlePublisherTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/MirrorBundlePublisherTests.cs diff --git a/src/StellaOps.Excititor.Export.Tests/OfflineBundleArtifactStoreTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/OfflineBundleArtifactStoreTests.cs similarity index 100% rename from src/StellaOps.Excititor.Export.Tests/OfflineBundleArtifactStoreTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/OfflineBundleArtifactStoreTests.cs diff --git a/src/StellaOps.Excititor.Export.Tests/S3ArtifactStoreTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/S3ArtifactStoreTests.cs similarity index 100% rename from src/StellaOps.Excititor.Export.Tests/S3ArtifactStoreTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/S3ArtifactStoreTests.cs diff --git a/src/StellaOps.Excititor.Export.Tests/StellaOps.Excititor.Export.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/StellaOps.Excititor.Export.Tests.csproj similarity index 72% rename from src/StellaOps.Excititor.Export.Tests/StellaOps.Excititor.Export.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/StellaOps.Excititor.Export.Tests.csproj index 81a8488a..4a5c293b 100644 --- a/src/StellaOps.Excititor.Export.Tests/StellaOps.Excititor.Export.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/StellaOps.Excititor.Export.Tests.csproj @@ -1,15 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Export\StellaOps.Excititor.Export.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="System.IO.Abstractions.TestingHelpers" Version="20.0.28" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Export/StellaOps.Excititor.Export.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Export.Tests/VexExportCacheServiceTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/VexExportCacheServiceTests.cs similarity index 100% rename from src/StellaOps.Excititor.Export.Tests/VexExportCacheServiceTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Export.Tests/VexExportCacheServiceTests.cs diff --git a/src/StellaOps.Excititor.Formats.CSAF.Tests/CsafNormalizerTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Formats.CSAF.Tests/CsafNormalizerTests.cs similarity index 100% rename from src/StellaOps.Excititor.Formats.CSAF.Tests/CsafNormalizerTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Formats.CSAF.Tests/CsafNormalizerTests.cs diff --git a/src/StellaOps.Excititor.Formats.CSAF.Tests/Fixtures/rhsa-sample.json b/src/Excititor/__Tests/StellaOps.Excititor.Formats.CSAF.Tests/Fixtures/rhsa-sample.json similarity index 100% rename from src/StellaOps.Excititor.Formats.CSAF.Tests/Fixtures/rhsa-sample.json rename to src/Excititor/__Tests/StellaOps.Excititor.Formats.CSAF.Tests/Fixtures/rhsa-sample.json diff --git a/src/StellaOps.Excititor.Formats.CSAF.Tests/StellaOps.Excititor.Formats.CSAF.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Formats.CSAF.Tests/StellaOps.Excititor.Formats.CSAF.Tests.csproj similarity index 62% rename from src/StellaOps.Excititor.Formats.CSAF.Tests/StellaOps.Excititor.Formats.CSAF.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Formats.CSAF.Tests/StellaOps.Excititor.Formats.CSAF.Tests.csproj index 7654565d..5225e0eb 100644 --- a/src/StellaOps.Excititor.Formats.CSAF.Tests/StellaOps.Excititor.Formats.CSAF.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Formats.CSAF.Tests/StellaOps.Excititor.Formats.CSAF.Tests.csproj @@ -1,20 +1,21 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> - <ItemGroup> - <Using Include="Xunit" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Formats.CSAF\StellaOps.Excititor.Formats.CSAF.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Fixtures\**\*" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> + <ItemGroup> + <Using Include="Xunit" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Formats.CSAF/StellaOps.Excititor.Formats.CSAF.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="Fixtures\**\*" CopyToOutputDirectory="Always" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Formats.CycloneDX.Tests/CycloneDxNormalizerTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Formats.CycloneDX.Tests/CycloneDxNormalizerTests.cs similarity index 100% rename from src/StellaOps.Excititor.Formats.CycloneDX.Tests/CycloneDxNormalizerTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Formats.CycloneDX.Tests/CycloneDxNormalizerTests.cs diff --git a/src/StellaOps.Excititor.Formats.CycloneDX.Tests/StellaOps.Excititor.Formats.CycloneDX.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Formats.CycloneDX.Tests/StellaOps.Excititor.Formats.CycloneDX.Tests.csproj similarity index 56% rename from src/StellaOps.Excititor.Formats.CycloneDX.Tests/StellaOps.Excititor.Formats.CycloneDX.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Formats.CycloneDX.Tests/StellaOps.Excititor.Formats.CycloneDX.Tests.csproj index ccc7d16c..4207dd20 100644 --- a/src/StellaOps.Excititor.Formats.CycloneDX.Tests/StellaOps.Excititor.Formats.CycloneDX.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Formats.CycloneDX.Tests/StellaOps.Excititor.Formats.CycloneDX.Tests.csproj @@ -1,17 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> - <ItemGroup> - <Using Include="Xunit" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Formats.CycloneDX\StellaOps.Excititor.Formats.CycloneDX.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> + <ItemGroup> + <Using Include="Xunit" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Formats.CycloneDX/StellaOps.Excititor.Formats.CycloneDX.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Formats.OpenVEX.Tests/OpenVexNormalizerTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Formats.OpenVEX.Tests/OpenVexNormalizerTests.cs similarity index 100% rename from src/StellaOps.Excititor.Formats.OpenVEX.Tests/OpenVexNormalizerTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Formats.OpenVEX.Tests/OpenVexNormalizerTests.cs diff --git a/src/StellaOps.Excititor.Formats.OpenVEX.Tests/StellaOps.Excititor.Formats.OpenVEX.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Formats.OpenVEX.Tests/StellaOps.Excititor.Formats.OpenVEX.Tests.csproj similarity index 56% rename from src/StellaOps.Excititor.Formats.OpenVEX.Tests/StellaOps.Excititor.Formats.OpenVEX.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Formats.OpenVEX.Tests/StellaOps.Excititor.Formats.OpenVEX.Tests.csproj index 8aa2f2c4..78d173a7 100644 --- a/src/StellaOps.Excititor.Formats.OpenVEX.Tests/StellaOps.Excititor.Formats.OpenVEX.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Formats.OpenVEX.Tests/StellaOps.Excititor.Formats.OpenVEX.Tests.csproj @@ -1,17 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> - <ItemGroup> - <Using Include="Xunit" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Formats.OpenVEX\StellaOps.Excititor.Formats.OpenVEX.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> + <ItemGroup> + <Using Include="Xunit" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Formats.OpenVEX/StellaOps.Excititor.Formats.OpenVEX.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Policy.Tests/StellaOps.Excititor.Policy.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Policy.Tests/StellaOps.Excititor.Policy.Tests.csproj similarity index 66% rename from src/StellaOps.Excititor.Policy.Tests/StellaOps.Excititor.Policy.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Policy.Tests/StellaOps.Excititor.Policy.Tests.csproj index acd42b0a..12eb2b55 100644 --- a/src/StellaOps.Excititor.Policy.Tests/StellaOps.Excititor.Policy.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Policy.Tests/StellaOps.Excititor.Policy.Tests.csproj @@ -1,12 +1,13 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Policy.Tests/VexPolicyProviderTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Policy.Tests/VexPolicyProviderTests.cs similarity index 100% rename from src/StellaOps.Excititor.Policy.Tests/VexPolicyProviderTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Policy.Tests/VexPolicyProviderTests.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexCacheMaintenanceTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexCacheMaintenanceTests.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexCacheMaintenanceTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexCacheMaintenanceTests.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexRepositoryTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexRepositoryTests.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexRepositoryTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexRepositoryTests.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexSessionConsistencyTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexSessionConsistencyTests.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexSessionConsistencyTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexSessionConsistencyTests.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStatementBackfillServiceTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStatementBackfillServiceTests.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStatementBackfillServiceTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStatementBackfillServiceTests.cs diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStoreMappingTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStoreMappingTests.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStoreMappingTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/MongoVexStoreMappingTests.cs diff --git a/src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj new file mode 100644 index 00000000..b97cf458 --- /dev/null +++ b/src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj @@ -0,0 +1,16 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Core/StellaOps.Excititor.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Policy/StellaOps.Excititor.Policy.csproj" /> + <ProjectReference Include="../../../Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/VexMongoMigrationRunnerTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/VexMongoMigrationRunnerTests.cs similarity index 100% rename from src/StellaOps.Excititor.Storage.Mongo.Tests/VexMongoMigrationRunnerTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Storage.Mongo.Tests/VexMongoMigrationRunnerTests.cs diff --git a/src/StellaOps.Excititor.WebService.Tests/IngestEndpointsTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/IngestEndpointsTests.cs similarity index 97% rename from src/StellaOps.Excititor.WebService.Tests/IngestEndpointsTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/IngestEndpointsTests.cs index eb91502b..e02b4bdd 100644 --- a/src/StellaOps.Excititor.WebService.Tests/IngestEndpointsTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/IngestEndpointsTests.cs @@ -1,274 +1,274 @@ -using System.Collections.Immutable; -using System.IO; -using System.Security.Claims; -using System.Text.Json; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Http.HttpResults; -using Microsoft.Extensions.DependencyInjection; -using StellaOps.Excititor.WebService.Endpoints; -using StellaOps.Excititor.WebService.Services; - -namespace StellaOps.Excititor.WebService.Tests; - -public sealed class IngestEndpointsTests -{ - private readonly FakeIngestOrchestrator _orchestrator = new(); - private readonly TimeProvider _timeProvider = TimeProvider.System; - - [Fact] - public async Task InitEndpoint_ReturnsUnauthorized_WhenMissingToken() - { - var httpContext = CreateHttpContext(); - var request = new IngestEndpoints.ExcititorInitRequest(null, false); - - var result = await IngestEndpoints.HandleInitAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); - Assert.IsType<UnauthorizedHttpResult>(result); - } - - [Fact] - public async Task InitEndpoint_ReturnsForbidden_WhenScopeMissing() - { - var httpContext = CreateHttpContext("vex.read"); - var request = new IngestEndpoints.ExcititorInitRequest(null, false); - - var result = await IngestEndpoints.HandleInitAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); - Assert.IsType<ForbidHttpResult>(result); - } - - [Fact] - public async Task InitEndpoint_NormalizesProviders_AndReturnsSummary() - { - var httpContext = CreateHttpContext("vex.admin"); - var request = new IngestEndpoints.ExcititorInitRequest(new[] { " suse ", "redhat", "REDHAT" }, true); - var started = DateTimeOffset.Parse("2025-10-20T12:00:00Z"); - var completed = started.AddMinutes(2); - _orchestrator.InitFactory = options => new InitSummary( - Guid.Parse("9a5eb53c-3118-4f78-991e-7d2c1af92a14"), - started, - completed, - ImmutableArray.Create( - new InitProviderResult("redhat", "Red Hat", "succeeded", TimeSpan.FromSeconds(12), null), - new InitProviderResult("suse", "SUSE", "failed", TimeSpan.FromSeconds(7), "unreachable"))); - - var result = await IngestEndpoints.HandleInitAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); - var ok = Assert.IsType<Ok<object>>(result); - Assert.Equal(new[] { "redhat", "suse" }, _orchestrator.LastInitOptions?.Providers); - Assert.True(_orchestrator.LastInitOptions?.Resume); - - using var document = JsonDocument.Parse(JsonSerializer.Serialize(ok.Value)); - Assert.Equal("Initialized 2 provider(s); 1 succeeded, 1 failed.", document.RootElement.GetProperty("message").GetString()); - } - - [Fact] - public async Task RunEndpoint_ReturnsBadRequest_WhenSinceInvalid() - { - var httpContext = CreateHttpContext("vex.admin"); - var request = new IngestEndpoints.ExcititorIngestRunRequest(new[] { "redhat" }, "not-a-date", null, false); - - var result = await IngestEndpoints.HandleRunAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); - var bad = Assert.IsType<BadRequest<object>>(result); - using var document = JsonDocument.Parse(JsonSerializer.Serialize(bad.Value)); - Assert.Contains("Invalid 'since'", document.RootElement.GetProperty("message").GetString()); - } - - [Fact] - public async Task RunEndpoint_ReturnsBadRequest_WhenWindowInvalid() - { - var httpContext = CreateHttpContext("vex.admin"); - var request = new IngestEndpoints.ExcititorIngestRunRequest(Array.Empty<string>(), null, "-01:00:00", false); - - var result = await IngestEndpoints.HandleRunAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); - var bad = Assert.IsType<BadRequest<object>>(result); - using var document = JsonDocument.Parse(JsonSerializer.Serialize(bad.Value)); - Assert.Contains("Invalid duration", document.RootElement.GetProperty("message").GetString()); - } - - [Fact] - public async Task RunEndpoint_PassesOptionsToOrchestrator() - { - var httpContext = CreateHttpContext("vex.admin"); - var started = DateTimeOffset.Parse("2025-10-20T14:00:00Z"); - var completed = started.AddMinutes(5); - _orchestrator.RunFactory = options => new IngestRunSummary( - Guid.Parse("65bbfa25-82fd-41da-8b6b-9d8bb1e2bb5f"), - started, - completed, - ImmutableArray.Create( - new ProviderRunResult( - "redhat", - "succeeded", - 12, - 42, - started, - completed, - completed - started, - "sha256:abc", - completed.AddHours(-1), - "cp1", - null, - options.Since))); - - var request = new IngestEndpoints.ExcititorIngestRunRequest(new[] { "redhat" }, "2025-10-19T00:00:00Z", "1.00:00:00", true); - var result = await IngestEndpoints.HandleRunAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); - var ok = Assert.IsType<Ok<object>>(result); - - Assert.NotNull(_orchestrator.LastRunOptions); - Assert.Equal(new[] { "redhat" }, _orchestrator.LastRunOptions!.Providers); - Assert.True(_orchestrator.LastRunOptions.Force); - Assert.Equal(TimeSpan.FromDays(1), _orchestrator.LastRunOptions.Window); - - using var document = JsonDocument.Parse(JsonSerializer.Serialize(ok.Value)); - Assert.Equal("cp1", document.RootElement.GetProperty("providers")[0].GetProperty("checkpoint").GetString()); - } - - [Fact] - public async Task ResumeEndpoint_PassesCheckpointToOrchestrator() - { - var httpContext = CreateHttpContext("vex.admin"); - var started = DateTimeOffset.Parse("2025-10-20T16:00:00Z"); - var completed = started.AddMinutes(2); - _orchestrator.ResumeFactory = options => new IngestRunSummary( - Guid.Parse("88407f25-4b3f-434d-8f8e-1c7f4925c37b"), - started, - completed, - ImmutableArray.Create( - new ProviderRunResult( - "suse", - "succeeded", - 5, - 10, - started, - completed, - completed - started, - null, - null, - options.Checkpoint, - null, - DateTimeOffset.UtcNow.AddDays(-1)))); - - var request = new IngestEndpoints.ExcititorIngestResumeRequest(new[] { "suse" }, "resume-token"); - var result = await IngestEndpoints.HandleResumeAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); - Assert.IsType<Ok<object>>(result); - Assert.Equal("resume-token", _orchestrator.LastResumeOptions?.Checkpoint); - } - - [Fact] - public async Task ReconcileEndpoint_ReturnsBadRequest_WhenMaxAgeInvalid() - { - var httpContext = CreateHttpContext("vex.admin"); - var request = new IngestEndpoints.ExcititorReconcileRequest(Array.Empty<string>(), "invalid"); - - var result = await IngestEndpoints.HandleReconcileAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); - var bad = Assert.IsType<BadRequest<object>>(result); - using var document = JsonDocument.Parse(JsonSerializer.Serialize(bad.Value)); - Assert.Contains("Invalid duration", document.RootElement.GetProperty("message").GetString()); - } - - [Fact] - public async Task ReconcileEndpoint_PassesOptionsAndReturnsSummary() - { - var httpContext = CreateHttpContext("vex.admin"); - var started = DateTimeOffset.Parse("2025-10-20T18:00:00Z"); - var completed = started.AddMinutes(4); - _orchestrator.ReconcileFactory = options => new ReconcileSummary( - Guid.Parse("a2c2cfe6-c21a-4a62-9db7-2ed2792f4e2d"), - started, - completed, - ImmutableArray.Create( - new ReconcileProviderResult( - "ubuntu", - "succeeded", - "reconciled", - started.AddDays(-2), - started - TimeSpan.FromDays(3), - 20, - 18, - null))); - - var request = new IngestEndpoints.ExcititorReconcileRequest(new[] { "ubuntu" }, "2.00:00:00"); - var result = await IngestEndpoints.HandleReconcileAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); - var ok = Assert.IsType<Ok<object>>(result); - - Assert.Equal(TimeSpan.FromDays(2), _orchestrator.LastReconcileOptions?.MaxAge); - using var document = JsonDocument.Parse(JsonSerializer.Serialize(ok.Value)); - Assert.Equal("reconciled", document.RootElement.GetProperty("providers")[0].GetProperty("action").GetString()); - } - - private static DefaultHttpContext CreateHttpContext(params string[] scopes) - { - var context = new DefaultHttpContext - { - RequestServices = new ServiceCollection().BuildServiceProvider(), - Response = { Body = new MemoryStream() } - }; - - if (scopes.Length > 0) - { - var claims = new List<Claim> { new Claim(ClaimTypes.NameIdentifier, "test-user") }; - claims.Add(new Claim("scope", string.Join(' ', scopes))); - var identity = new ClaimsIdentity(claims, "Test"); - context.User = new ClaimsPrincipal(identity); - } - else - { - context.User = new ClaimsPrincipal(new ClaimsIdentity()); - } - - return context; - } - - private sealed class FakeIngestOrchestrator : IVexIngestOrchestrator - { - public IngestInitOptions? LastInitOptions { get; private set; } - public IngestRunOptions? LastRunOptions { get; private set; } - public IngestResumeOptions? LastResumeOptions { get; private set; } - public ReconcileOptions? LastReconcileOptions { get; private set; } - - public Func<IngestInitOptions, InitSummary>? InitFactory { get; set; } - public Func<IngestRunOptions, IngestRunSummary>? RunFactory { get; set; } - public Func<IngestResumeOptions, IngestRunSummary>? ResumeFactory { get; set; } - public Func<ReconcileOptions, ReconcileSummary>? ReconcileFactory { get; set; } - - public Task<InitSummary> InitializeAsync(IngestInitOptions options, CancellationToken cancellationToken) - { - LastInitOptions = options; - return Task.FromResult(InitFactory is null ? CreateDefaultInitSummary() : InitFactory(options)); - } - - public Task<IngestRunSummary> RunAsync(IngestRunOptions options, CancellationToken cancellationToken) - { - LastRunOptions = options; - return Task.FromResult(RunFactory is null ? CreateDefaultRunSummary() : RunFactory(options)); - } - - public Task<IngestRunSummary> ResumeAsync(IngestResumeOptions options, CancellationToken cancellationToken) - { - LastResumeOptions = options; - return Task.FromResult(ResumeFactory is null ? CreateDefaultRunSummary() : ResumeFactory(options)); - } - - public Task<ReconcileSummary> ReconcileAsync(ReconcileOptions options, CancellationToken cancellationToken) - { - LastReconcileOptions = options; - return Task.FromResult(ReconcileFactory is null ? CreateDefaultReconcileSummary() : ReconcileFactory(options)); - } - - private static InitSummary CreateDefaultInitSummary() - { - var now = DateTimeOffset.UtcNow; - return new InitSummary(Guid.Empty, now, now, ImmutableArray<InitProviderResult>.Empty); - } - - private static IngestRunSummary CreateDefaultRunSummary() - { - var now = DateTimeOffset.UtcNow; - return new IngestRunSummary(Guid.Empty, now, now, ImmutableArray<ProviderRunResult>.Empty); - } - - private static ReconcileSummary CreateDefaultReconcileSummary() - { - var now = DateTimeOffset.UtcNow; - return new ReconcileSummary(Guid.Empty, now, now, ImmutableArray<ReconcileProviderResult>.Empty); - } - } -} +using System.Collections.Immutable; +using System.IO; +using System.Security.Claims; +using System.Text.Json; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Http.HttpResults; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Excititor.WebService.Endpoints; +using StellaOps.Excititor.WebService.Services; + +namespace StellaOps.Excititor.WebService.Tests; + +public sealed class IngestEndpointsTests +{ + private readonly FakeIngestOrchestrator _orchestrator = new(); + private readonly TimeProvider _timeProvider = TimeProvider.System; + + [Fact] + public async Task InitEndpoint_ReturnsUnauthorized_WhenMissingToken() + { + var httpContext = CreateHttpContext(); + var request = new IngestEndpoints.ExcititorInitRequest(null, false); + + var result = await IngestEndpoints.HandleInitAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); + Assert.IsType<UnauthorizedHttpResult>(result); + } + + [Fact] + public async Task InitEndpoint_ReturnsForbidden_WhenScopeMissing() + { + var httpContext = CreateHttpContext("vex.read"); + var request = new IngestEndpoints.ExcititorInitRequest(null, false); + + var result = await IngestEndpoints.HandleInitAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); + Assert.IsType<ForbidHttpResult>(result); + } + + [Fact] + public async Task InitEndpoint_NormalizesProviders_AndReturnsSummary() + { + var httpContext = CreateHttpContext("vex.admin"); + var request = new IngestEndpoints.ExcititorInitRequest(new[] { " suse ", "redhat", "REDHAT" }, true); + var started = DateTimeOffset.Parse("2025-10-20T12:00:00Z"); + var completed = started.AddMinutes(2); + _orchestrator.InitFactory = options => new InitSummary( + Guid.Parse("9a5eb53c-3118-4f78-991e-7d2c1af92a14"), + started, + completed, + ImmutableArray.Create( + new InitProviderResult("redhat", "Red Hat", "succeeded", TimeSpan.FromSeconds(12), null), + new InitProviderResult("suse", "SUSE", "failed", TimeSpan.FromSeconds(7), "unreachable"))); + + var result = await IngestEndpoints.HandleInitAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); + var ok = Assert.IsType<Ok<object>>(result); + Assert.Equal(new[] { "redhat", "suse" }, _orchestrator.LastInitOptions?.Providers); + Assert.True(_orchestrator.LastInitOptions?.Resume); + + using var document = JsonDocument.Parse(JsonSerializer.Serialize(ok.Value)); + Assert.Equal("Initialized 2 provider(s); 1 succeeded, 1 failed.", document.RootElement.GetProperty("message").GetString()); + } + + [Fact] + public async Task RunEndpoint_ReturnsBadRequest_WhenSinceInvalid() + { + var httpContext = CreateHttpContext("vex.admin"); + var request = new IngestEndpoints.ExcititorIngestRunRequest(new[] { "redhat" }, "not-a-date", null, false); + + var result = await IngestEndpoints.HandleRunAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); + var bad = Assert.IsType<BadRequest<object>>(result); + using var document = JsonDocument.Parse(JsonSerializer.Serialize(bad.Value)); + Assert.Contains("Invalid 'since'", document.RootElement.GetProperty("message").GetString()); + } + + [Fact] + public async Task RunEndpoint_ReturnsBadRequest_WhenWindowInvalid() + { + var httpContext = CreateHttpContext("vex.admin"); + var request = new IngestEndpoints.ExcititorIngestRunRequest(Array.Empty<string>(), null, "-01:00:00", false); + + var result = await IngestEndpoints.HandleRunAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); + var bad = Assert.IsType<BadRequest<object>>(result); + using var document = JsonDocument.Parse(JsonSerializer.Serialize(bad.Value)); + Assert.Contains("Invalid duration", document.RootElement.GetProperty("message").GetString()); + } + + [Fact] + public async Task RunEndpoint_PassesOptionsToOrchestrator() + { + var httpContext = CreateHttpContext("vex.admin"); + var started = DateTimeOffset.Parse("2025-10-20T14:00:00Z"); + var completed = started.AddMinutes(5); + _orchestrator.RunFactory = options => new IngestRunSummary( + Guid.Parse("65bbfa25-82fd-41da-8b6b-9d8bb1e2bb5f"), + started, + completed, + ImmutableArray.Create( + new ProviderRunResult( + "redhat", + "succeeded", + 12, + 42, + started, + completed, + completed - started, + "sha256:abc", + completed.AddHours(-1), + "cp1", + null, + options.Since))); + + var request = new IngestEndpoints.ExcititorIngestRunRequest(new[] { "redhat" }, "2025-10-19T00:00:00Z", "1.00:00:00", true); + var result = await IngestEndpoints.HandleRunAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); + var ok = Assert.IsType<Ok<object>>(result); + + Assert.NotNull(_orchestrator.LastRunOptions); + Assert.Equal(new[] { "redhat" }, _orchestrator.LastRunOptions!.Providers); + Assert.True(_orchestrator.LastRunOptions.Force); + Assert.Equal(TimeSpan.FromDays(1), _orchestrator.LastRunOptions.Window); + + using var document = JsonDocument.Parse(JsonSerializer.Serialize(ok.Value)); + Assert.Equal("cp1", document.RootElement.GetProperty("providers")[0].GetProperty("checkpoint").GetString()); + } + + [Fact] + public async Task ResumeEndpoint_PassesCheckpointToOrchestrator() + { + var httpContext = CreateHttpContext("vex.admin"); + var started = DateTimeOffset.Parse("2025-10-20T16:00:00Z"); + var completed = started.AddMinutes(2); + _orchestrator.ResumeFactory = options => new IngestRunSummary( + Guid.Parse("88407f25-4b3f-434d-8f8e-1c7f4925c37b"), + started, + completed, + ImmutableArray.Create( + new ProviderRunResult( + "suse", + "succeeded", + 5, + 10, + started, + completed, + completed - started, + null, + null, + options.Checkpoint, + null, + DateTimeOffset.UtcNow.AddDays(-1)))); + + var request = new IngestEndpoints.ExcititorIngestResumeRequest(new[] { "suse" }, "resume-token"); + var result = await IngestEndpoints.HandleResumeAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); + Assert.IsType<Ok<object>>(result); + Assert.Equal("resume-token", _orchestrator.LastResumeOptions?.Checkpoint); + } + + [Fact] + public async Task ReconcileEndpoint_ReturnsBadRequest_WhenMaxAgeInvalid() + { + var httpContext = CreateHttpContext("vex.admin"); + var request = new IngestEndpoints.ExcititorReconcileRequest(Array.Empty<string>(), "invalid"); + + var result = await IngestEndpoints.HandleReconcileAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); + var bad = Assert.IsType<BadRequest<object>>(result); + using var document = JsonDocument.Parse(JsonSerializer.Serialize(bad.Value)); + Assert.Contains("Invalid duration", document.RootElement.GetProperty("message").GetString()); + } + + [Fact] + public async Task ReconcileEndpoint_PassesOptionsAndReturnsSummary() + { + var httpContext = CreateHttpContext("vex.admin"); + var started = DateTimeOffset.Parse("2025-10-20T18:00:00Z"); + var completed = started.AddMinutes(4); + _orchestrator.ReconcileFactory = options => new ReconcileSummary( + Guid.Parse("a2c2cfe6-c21a-4a62-9db7-2ed2792f4e2d"), + started, + completed, + ImmutableArray.Create( + new ReconcileProviderResult( + "ubuntu", + "succeeded", + "reconciled", + started.AddDays(-2), + started - TimeSpan.FromDays(3), + 20, + 18, + null))); + + var request = new IngestEndpoints.ExcititorReconcileRequest(new[] { "ubuntu" }, "2.00:00:00"); + var result = await IngestEndpoints.HandleReconcileAsync(httpContext, request, _orchestrator, _timeProvider, CancellationToken.None); + var ok = Assert.IsType<Ok<object>>(result); + + Assert.Equal(TimeSpan.FromDays(2), _orchestrator.LastReconcileOptions?.MaxAge); + using var document = JsonDocument.Parse(JsonSerializer.Serialize(ok.Value)); + Assert.Equal("reconciled", document.RootElement.GetProperty("providers")[0].GetProperty("action").GetString()); + } + + private static DefaultHttpContext CreateHttpContext(params string[] scopes) + { + var context = new DefaultHttpContext + { + RequestServices = new ServiceCollection().BuildServiceProvider(), + Response = { Body = new MemoryStream() } + }; + + if (scopes.Length > 0) + { + var claims = new List<Claim> { new Claim(ClaimTypes.NameIdentifier, "test-user") }; + claims.Add(new Claim("scope", string.Join(' ', scopes))); + var identity = new ClaimsIdentity(claims, "Test"); + context.User = new ClaimsPrincipal(identity); + } + else + { + context.User = new ClaimsPrincipal(new ClaimsIdentity()); + } + + return context; + } + + private sealed class FakeIngestOrchestrator : IVexIngestOrchestrator + { + public IngestInitOptions? LastInitOptions { get; private set; } + public IngestRunOptions? LastRunOptions { get; private set; } + public IngestResumeOptions? LastResumeOptions { get; private set; } + public ReconcileOptions? LastReconcileOptions { get; private set; } + + public Func<IngestInitOptions, InitSummary>? InitFactory { get; set; } + public Func<IngestRunOptions, IngestRunSummary>? RunFactory { get; set; } + public Func<IngestResumeOptions, IngestRunSummary>? ResumeFactory { get; set; } + public Func<ReconcileOptions, ReconcileSummary>? ReconcileFactory { get; set; } + + public Task<InitSummary> InitializeAsync(IngestInitOptions options, CancellationToken cancellationToken) + { + LastInitOptions = options; + return Task.FromResult(InitFactory is null ? CreateDefaultInitSummary() : InitFactory(options)); + } + + public Task<IngestRunSummary> RunAsync(IngestRunOptions options, CancellationToken cancellationToken) + { + LastRunOptions = options; + return Task.FromResult(RunFactory is null ? CreateDefaultRunSummary() : RunFactory(options)); + } + + public Task<IngestRunSummary> ResumeAsync(IngestResumeOptions options, CancellationToken cancellationToken) + { + LastResumeOptions = options; + return Task.FromResult(ResumeFactory is null ? CreateDefaultRunSummary() : ResumeFactory(options)); + } + + public Task<ReconcileSummary> ReconcileAsync(ReconcileOptions options, CancellationToken cancellationToken) + { + LastReconcileOptions = options; + return Task.FromResult(ReconcileFactory is null ? CreateDefaultReconcileSummary() : ReconcileFactory(options)); + } + + private static InitSummary CreateDefaultInitSummary() + { + var now = DateTimeOffset.UtcNow; + return new InitSummary(Guid.Empty, now, now, ImmutableArray<InitProviderResult>.Empty); + } + + private static IngestRunSummary CreateDefaultRunSummary() + { + var now = DateTimeOffset.UtcNow; + return new IngestRunSummary(Guid.Empty, now, now, ImmutableArray<ProviderRunResult>.Empty); + } + + private static ReconcileSummary CreateDefaultReconcileSummary() + { + var now = DateTimeOffset.UtcNow; + return new ReconcileSummary(Guid.Empty, now, now, ImmutableArray<ReconcileProviderResult>.Empty); + } + } +} diff --git a/src/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs similarity index 97% rename from src/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs index e8f31e3d..92aee8e0 100644 --- a/src/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/MirrorEndpointsTests.cs @@ -1,212 +1,212 @@ -using System.Collections.Concurrent; -using System.Collections.Immutable; -using System.Net; -using System.Net.Http.Json; -using System.Text.Json; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using EphemeralMongo; -using MongoRunner = EphemeralMongo.MongoRunner; -using MongoRunnerOptions = EphemeralMongo.MongoRunnerOptions; -using MongoDB.Driver; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Export; -using StellaOps.Excititor.Policy; +using System.Collections.Concurrent; +using System.Collections.Immutable; +using System.Net; +using System.Net.Http.Json; +using System.Text.Json; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using EphemeralMongo; +using MongoRunner = EphemeralMongo.MongoRunner; +using MongoRunnerOptions = EphemeralMongo.MongoRunnerOptions; +using MongoDB.Driver; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Export; +using StellaOps.Excititor.Policy; using StellaOps.Excititor.Storage.Mongo; - -namespace StellaOps.Excititor.WebService.Tests; - -public sealed class MirrorEndpointsTests : IDisposable -{ - private readonly TestWebApplicationFactory _factory; - private readonly IMongoRunner _runner; - - public MirrorEndpointsTests() - { - _runner = MongoRunner.Run(new MongoRunnerOptions { UseSingleNodeReplicaSet = true }); - _factory = new TestWebApplicationFactory( - configureConfiguration: configuration => - { - var data = new Dictionary<string, string?> - { - ["Excititor:Storage:Mongo:ConnectionString"] = _runner.ConnectionString, - ["Excititor:Storage:Mongo:DatabaseName"] = "mirror-tests", - [$"{MirrorDistributionOptions.SectionName}:Domains:0:Id"] = "primary", - [$"{MirrorDistributionOptions.SectionName}:Domains:0:DisplayName"] = "Primary Mirror", - [$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxIndexRequestsPerHour"] = "1000", - [$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxDownloadRequestsPerHour"] = "1000", - [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Key"] = "consensus", - [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Format"] = "json", - [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:vulnId"] = "CVE-2025-0001", - [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:productKey"] = "pkg:test/demo", - }; - - configuration.AddInMemoryCollection(data!); - }, - configureServices: services => - { - TestServiceOverrides.Apply(services); - services.RemoveAll<IVexExportStore>(); - services.AddSingleton<IVexExportStore>(provider => - { - var timeProvider = provider.GetRequiredService<TimeProvider>(); - return new FakeExportStore(timeProvider); - }); - services.RemoveAll<IVexArtifactStore>(); - services.AddSingleton<IVexArtifactStore>(_ => new FakeArtifactStore()); - services.AddSingleton(new VexConnectorDescriptor("excititor:redhat", VexProviderKind.Distro, "Red Hat CSAF")); - services.AddSingleton<StellaOps.Excititor.Attestation.Signing.IVexSigner, FakeSigner>(); - services.AddSingleton<StellaOps.Excititor.Policy.IVexPolicyEvaluator, FakePolicyEvaluator>(); - }); - } - - [Fact] - public async Task ListDomains_ReturnsConfiguredDomain() - { - var client = _factory.CreateClient(); - var response = await client.GetAsync("/excititor/mirror/domains"); - response.EnsureSuccessStatusCode(); - - using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); - var domains = document.RootElement.GetProperty("domains"); - Assert.Equal(1, domains.GetArrayLength()); - Assert.Equal("primary", domains[0].GetProperty("id").GetString()); - } - - [Fact] - public async Task DomainIndex_ReturnsManifestMetadata() - { - var client = _factory.CreateClient(); - var response = await client.GetAsync("/excititor/mirror/domains/primary/index"); - response.EnsureSuccessStatusCode(); - - using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); - var exports = document.RootElement.GetProperty("exports"); - Assert.Equal(1, exports.GetArrayLength()); - var entry = exports[0]; - Assert.Equal("consensus", entry.GetProperty("exportKey").GetString()); - Assert.Equal("exports/20251019T000000000Z/abcdef", entry.GetProperty("exportId").GetString()); - var artifact = entry.GetProperty("artifact"); - Assert.Equal("sha256", artifact.GetProperty("algorithm").GetString()); - Assert.Equal("deadbeef", artifact.GetProperty("digest").GetString()); - } - - [Fact] - public async Task Download_ReturnsArtifactContent() - { - var client = _factory.CreateClient(); - var response = await client.GetAsync("/excititor/mirror/domains/primary/exports/consensus/download"); - response.EnsureSuccessStatusCode(); - Assert.Equal("application/json", response.Content.Headers.ContentType?.MediaType); - var payload = await response.Content.ReadAsStringAsync(); - Assert.Equal("{\"status\":\"ok\"}", payload); - } - - public void Dispose() - { - _factory.Dispose(); - _runner.Dispose(); - } - - private sealed class FakeExportStore : IVexExportStore - { - private readonly ConcurrentDictionary<(string Signature, VexExportFormat Format), VexExportManifest> _manifests = new(); - - public FakeExportStore(TimeProvider timeProvider) - { - var filters = new[] - { - new VexQueryFilter("vulnId", "CVE-2025-0001"), - new VexQueryFilter("productKey", "pkg:test/demo"), - }; - - var query = VexQuery.Create(filters, Enumerable.Empty<VexQuerySort>()); - var signature = VexQuerySignature.FromQuery(query); - var createdAt = new DateTimeOffset(2025, 10, 19, 0, 0, 0, TimeSpan.Zero); - - var manifest = new VexExportManifest( - "exports/20251019T000000000Z/abcdef", - signature, - VexExportFormat.Json, - createdAt, - new VexContentAddress("sha256", "deadbeef"), - 1, - new[] { "primary" }, - fromCache: false, - consensusRevision: "rev-1", - attestation: new VexAttestationMetadata("https://stella-ops.org/attestations/vex-export"), - sizeBytes: 16); - - _manifests.TryAdd((signature.Value, VexExportFormat.Json), manifest); - - // Seed artifact content for download test. - FakeArtifactStore.Seed(manifest.Artifact, "{\"status\":\"ok\"}"); - } - - public ValueTask<VexExportManifest?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _manifests.TryGetValue((signature.Value, format), out var manifest); - return ValueTask.FromResult(manifest); - } - - public ValueTask SaveAsync(VexExportManifest manifest, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.CompletedTask; - } - - private sealed class FakeArtifactStore : IVexArtifactStore - { - private static readonly ConcurrentDictionary<VexContentAddress, byte[]> Content = new(); - - public static void Seed(VexContentAddress contentAddress, string payload) - { - var bytes = System.Text.Encoding.UTF8.GetBytes(payload); - Content[contentAddress] = bytes; - } - - public ValueTask<VexStoredArtifact> SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken) - { - Content[artifact.ContentAddress] = artifact.Content.ToArray(); - return ValueTask.FromResult(new VexStoredArtifact(artifact.ContentAddress, "memory://artifact", artifact.Content.Length, artifact.Metadata)); - } - - public ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken) - { - Content.TryRemove(contentAddress, out _); - return ValueTask.CompletedTask; - } - - public ValueTask<Stream?> OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken) - { - if (!Content.TryGetValue(contentAddress, out var bytes)) - { - return ValueTask.FromResult<Stream?>(null); - } - - return ValueTask.FromResult<Stream?>(new MemoryStream(bytes, writable: false)); - } - } - - private sealed class FakeSigner : StellaOps.Excititor.Attestation.Signing.IVexSigner - { - public ValueTask<StellaOps.Excititor.Attestation.Signing.VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken) - => ValueTask.FromResult(new StellaOps.Excititor.Attestation.Signing.VexSignedPayload("signature", "key")); - } - - private sealed class FakePolicyEvaluator : StellaOps.Excititor.Policy.IVexPolicyEvaluator - { - public string Version => "test"; - - public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default; - - public double GetProviderWeight(VexProvider provider) => 1.0; - - public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason) - { - rejectionReason = null; - return true; - } - } - -} + +namespace StellaOps.Excititor.WebService.Tests; + +public sealed class MirrorEndpointsTests : IDisposable +{ + private readonly TestWebApplicationFactory _factory; + private readonly IMongoRunner _runner; + + public MirrorEndpointsTests() + { + _runner = MongoRunner.Run(new MongoRunnerOptions { UseSingleNodeReplicaSet = true }); + _factory = new TestWebApplicationFactory( + configureConfiguration: configuration => + { + var data = new Dictionary<string, string?> + { + ["Excititor:Storage:Mongo:ConnectionString"] = _runner.ConnectionString, + ["Excititor:Storage:Mongo:DatabaseName"] = "mirror-tests", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Id"] = "primary", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:DisplayName"] = "Primary Mirror", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxIndexRequestsPerHour"] = "1000", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:MaxDownloadRequestsPerHour"] = "1000", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Key"] = "consensus", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Format"] = "json", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:vulnId"] = "CVE-2025-0001", + [$"{MirrorDistributionOptions.SectionName}:Domains:0:Exports:0:Filters:productKey"] = "pkg:test/demo", + }; + + configuration.AddInMemoryCollection(data!); + }, + configureServices: services => + { + TestServiceOverrides.Apply(services); + services.RemoveAll<IVexExportStore>(); + services.AddSingleton<IVexExportStore>(provider => + { + var timeProvider = provider.GetRequiredService<TimeProvider>(); + return new FakeExportStore(timeProvider); + }); + services.RemoveAll<IVexArtifactStore>(); + services.AddSingleton<IVexArtifactStore>(_ => new FakeArtifactStore()); + services.AddSingleton(new VexConnectorDescriptor("excititor:redhat", VexProviderKind.Distro, "Red Hat CSAF")); + services.AddSingleton<StellaOps.Excititor.Attestation.Signing.IVexSigner, FakeSigner>(); + services.AddSingleton<StellaOps.Excititor.Policy.IVexPolicyEvaluator, FakePolicyEvaluator>(); + }); + } + + [Fact] + public async Task ListDomains_ReturnsConfiguredDomain() + { + var client = _factory.CreateClient(); + var response = await client.GetAsync("/excititor/mirror/domains"); + response.EnsureSuccessStatusCode(); + + using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var domains = document.RootElement.GetProperty("domains"); + Assert.Equal(1, domains.GetArrayLength()); + Assert.Equal("primary", domains[0].GetProperty("id").GetString()); + } + + [Fact] + public async Task DomainIndex_ReturnsManifestMetadata() + { + var client = _factory.CreateClient(); + var response = await client.GetAsync("/excititor/mirror/domains/primary/index"); + response.EnsureSuccessStatusCode(); + + using var document = JsonDocument.Parse(await response.Content.ReadAsStringAsync()); + var exports = document.RootElement.GetProperty("exports"); + Assert.Equal(1, exports.GetArrayLength()); + var entry = exports[0]; + Assert.Equal("consensus", entry.GetProperty("exportKey").GetString()); + Assert.Equal("exports/20251019T000000000Z/abcdef", entry.GetProperty("exportId").GetString()); + var artifact = entry.GetProperty("artifact"); + Assert.Equal("sha256", artifact.GetProperty("algorithm").GetString()); + Assert.Equal("deadbeef", artifact.GetProperty("digest").GetString()); + } + + [Fact] + public async Task Download_ReturnsArtifactContent() + { + var client = _factory.CreateClient(); + var response = await client.GetAsync("/excititor/mirror/domains/primary/exports/consensus/download"); + response.EnsureSuccessStatusCode(); + Assert.Equal("application/json", response.Content.Headers.ContentType?.MediaType); + var payload = await response.Content.ReadAsStringAsync(); + Assert.Equal("{\"status\":\"ok\"}", payload); + } + + public void Dispose() + { + _factory.Dispose(); + _runner.Dispose(); + } + + private sealed class FakeExportStore : IVexExportStore + { + private readonly ConcurrentDictionary<(string Signature, VexExportFormat Format), VexExportManifest> _manifests = new(); + + public FakeExportStore(TimeProvider timeProvider) + { + var filters = new[] + { + new VexQueryFilter("vulnId", "CVE-2025-0001"), + new VexQueryFilter("productKey", "pkg:test/demo"), + }; + + var query = VexQuery.Create(filters, Enumerable.Empty<VexQuerySort>()); + var signature = VexQuerySignature.FromQuery(query); + var createdAt = new DateTimeOffset(2025, 10, 19, 0, 0, 0, TimeSpan.Zero); + + var manifest = new VexExportManifest( + "exports/20251019T000000000Z/abcdef", + signature, + VexExportFormat.Json, + createdAt, + new VexContentAddress("sha256", "deadbeef"), + 1, + new[] { "primary" }, + fromCache: false, + consensusRevision: "rev-1", + attestation: new VexAttestationMetadata("https://stella-ops.org/attestations/vex-export"), + sizeBytes: 16); + + _manifests.TryAdd((signature.Value, VexExportFormat.Json), manifest); + + // Seed artifact content for download test. + FakeArtifactStore.Seed(manifest.Artifact, "{\"status\":\"ok\"}"); + } + + public ValueTask<VexExportManifest?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _manifests.TryGetValue((signature.Value, format), out var manifest); + return ValueTask.FromResult(manifest); + } + + public ValueTask SaveAsync(VexExportManifest manifest, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.CompletedTask; + } + + private sealed class FakeArtifactStore : IVexArtifactStore + { + private static readonly ConcurrentDictionary<VexContentAddress, byte[]> Content = new(); + + public static void Seed(VexContentAddress contentAddress, string payload) + { + var bytes = System.Text.Encoding.UTF8.GetBytes(payload); + Content[contentAddress] = bytes; + } + + public ValueTask<VexStoredArtifact> SaveAsync(VexExportArtifact artifact, CancellationToken cancellationToken) + { + Content[artifact.ContentAddress] = artifact.Content.ToArray(); + return ValueTask.FromResult(new VexStoredArtifact(artifact.ContentAddress, "memory://artifact", artifact.Content.Length, artifact.Metadata)); + } + + public ValueTask DeleteAsync(VexContentAddress contentAddress, CancellationToken cancellationToken) + { + Content.TryRemove(contentAddress, out _); + return ValueTask.CompletedTask; + } + + public ValueTask<Stream?> OpenReadAsync(VexContentAddress contentAddress, CancellationToken cancellationToken) + { + if (!Content.TryGetValue(contentAddress, out var bytes)) + { + return ValueTask.FromResult<Stream?>(null); + } + + return ValueTask.FromResult<Stream?>(new MemoryStream(bytes, writable: false)); + } + } + + private sealed class FakeSigner : StellaOps.Excititor.Attestation.Signing.IVexSigner + { + public ValueTask<StellaOps.Excititor.Attestation.Signing.VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken) + => ValueTask.FromResult(new StellaOps.Excititor.Attestation.Signing.VexSignedPayload("signature", "key")); + } + + private sealed class FakePolicyEvaluator : StellaOps.Excititor.Policy.IVexPolicyEvaluator + { + public string Version => "test"; + + public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default; + + public double GetProviderWeight(VexProvider provider) => 1.0; + + public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason) + { + rejectionReason = null; + return true; + } + } + +} diff --git a/src/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs similarity index 97% rename from src/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs index 35c5d415..b697fdb7 100644 --- a/src/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/ResolveEndpointTests.cs @@ -1,375 +1,375 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Net; -using System.Net.Http.Headers; -using System.Net.Http.Json; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using EphemeralMongo; -using MongoRunner = EphemeralMongo.MongoRunner; -using MongoRunnerOptions = EphemeralMongo.MongoRunnerOptions; -using StellaOps.Excititor.Attestation.Signing; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Export; -using StellaOps.Excititor.Policy; -using StellaOps.Excititor.Storage.Mongo; - -namespace StellaOps.Excititor.WebService.Tests; - -public sealed class ResolveEndpointTests : IDisposable -{ - private readonly TestWebApplicationFactory _factory; - private readonly IMongoRunner _runner; - - public ResolveEndpointTests() - { - _runner = MongoRunner.Run(new MongoRunnerOptions { UseSingleNodeReplicaSet = true }); - - _factory = new TestWebApplicationFactory( - configureConfiguration: config => - { - var rootPath = Path.Combine(Path.GetTempPath(), "excititor-resolve-tests"); - Directory.CreateDirectory(rootPath); - var settings = new Dictionary<string, string?> - { - ["Excititor:Storage:Mongo:ConnectionString"] = _runner.ConnectionString, - ["Excititor:Storage:Mongo:DatabaseName"] = "excititor-resolve-tests", - ["Excititor:Storage:Mongo:RawBucketName"] = "vex.raw", - ["Excititor:Storage:Mongo:GridFsInlineThresholdBytes"] = "256", - ["Excititor:Artifacts:FileSystem:RootPath"] = rootPath, - }; - config.AddInMemoryCollection(settings!); - }, - configureServices: services => - { - services.AddTestAuthentication(); - TestServiceOverrides.Apply(services); - services.AddSingleton<IVexSigner, FakeSigner>(); - services.AddSingleton<IVexPolicyEvaluator, FakePolicyEvaluator>(); - }); - } - - [Fact] - public async Task ResolveEndpoint_ReturnsBadRequest_WhenInputsMissing() - { - var client = CreateClient("vex.read"); - var response = await client.PostAsJsonAsync("/excititor/resolve", new { vulnerabilityIds = new[] { "CVE-2025-0001" } }); - Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); - } - - [Fact] - public async Task ResolveEndpoint_ComputesConsensusAndAttestation() - { - const string vulnerabilityId = "CVE-2025-2222"; - const string productKey = "pkg:nuget/StellaOps.Demo@1.0.0"; - const string providerId = "redhat"; - - await SeedProviderAsync(providerId); - await SeedClaimAsync(vulnerabilityId, productKey, providerId); - - var client = CreateClient("vex.read"); - var request = new ResolveRequest( - new[] { productKey }, - null, - new[] { vulnerabilityId }, - null); - - var response = await client.PostAsJsonAsync("/excititor/resolve", request); - response.EnsureSuccessStatusCode(); - - var payload = await response.Content.ReadFromJsonAsync<ResolveResponse>(); - Assert.NotNull(payload); - Assert.NotNull(payload!.Policy); - - var result = Assert.Single(payload.Results); - Assert.Equal(vulnerabilityId, result.VulnerabilityId); - Assert.Equal(productKey, result.ProductKey); - Assert.Equal("not_affected", result.Status); - Assert.NotNull(result.Envelope); - Assert.Equal("signature", result.Envelope!.ContentSignature!.Value); - Assert.Equal("key", result.Envelope.ContentSignature.KeyId); - Assert.NotEqual(default, result.CalculatedAt); - - Assert.NotNull(result.Signals); - Assert.True(result.Signals!.Kev); - Assert.NotNull(result.Envelope.AttestationSignature); - Assert.False(string.IsNullOrWhiteSpace(result.Envelope.AttestationEnvelope)); - Assert.Equal(payload.Policy.ActiveRevisionId, result.PolicyRevisionId); - Assert.Equal(payload.Policy.Version, result.PolicyVersion); - Assert.Equal(payload.Policy.Digest, result.PolicyDigest); - - var decision = Assert.Single(result.Decisions); - Assert.True(decision.Included); - Assert.Equal(providerId, decision.ProviderId); - } - - [Fact] - public async Task ResolveEndpoint_ReturnsConflict_WhenPolicyRevisionMismatch() - { - const string vulnerabilityId = "CVE-2025-3333"; - const string productKey = "pkg:docker/demo@sha256:abcd"; - - var client = CreateClient("vex.read"); - var request = new ResolveRequest( - new[] { productKey }, - null, - new[] { vulnerabilityId }, - "rev-0"); - - var response = await client.PostAsJsonAsync("/excititor/resolve", request); - Assert.Equal(HttpStatusCode.Conflict, response.StatusCode); - } - - [Fact] - public async Task ResolveEndpoint_ReturnsUnauthorized_WhenMissingToken() - { - var client = CreateClient(); - var request = new ResolveRequest( - new[] { "pkg:test/demo" }, - null, - new[] { "CVE-2025-0001" }, - null); - - var response = await client.PostAsJsonAsync("/excititor/resolve", request); - Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); - } - - [Fact] - public async Task ResolveEndpoint_ReturnsForbidden_WhenScopeMissing() - { - var client = CreateClient("vex.admin"); - var request = new ResolveRequest( - new[] { "pkg:test/demo" }, - null, - new[] { "CVE-2025-0001" }, - null); - - var response = await client.PostAsJsonAsync("/excititor/resolve", request); - Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); - } - - private async Task SeedProviderAsync(string providerId) - { - await using var scope = _factory.Services.CreateAsyncScope(); - var store = scope.ServiceProvider.GetRequiredService<IVexProviderStore>(); - var provider = new VexProvider(providerId, "Red Hat", VexProviderKind.Distro); - await store.SaveAsync(provider, CancellationToken.None); - } - - private async Task SeedClaimAsync(string vulnerabilityId, string productKey, string providerId) - { - await using var scope = _factory.Services.CreateAsyncScope(); - var store = scope.ServiceProvider.GetRequiredService<IVexClaimStore>(); - var timeProvider = scope.ServiceProvider.GetRequiredService<TimeProvider>(); - var observedAt = timeProvider.GetUtcNow(); - - var claim = new VexClaim( - vulnerabilityId, - providerId, - new VexProduct(productKey, "Demo Component", version: "1.0.0", purl: productKey), - VexClaimStatus.NotAffected, - new VexClaimDocument(VexDocumentFormat.Csaf, "sha256:deadbeef", new Uri("https://example.org/vex/csaf.json")), - observedAt.AddDays(-1), - observedAt, - VexJustification.ProtectedByMitigatingControl, - detail: "Test justification", - confidence: new VexConfidence("high", 0.9, "unit-test"), - signals: new VexSignalSnapshot( - new VexSeveritySignal("cvss:v3.1", 5.5, "medium"), - kev: true, - epss: 0.25)); - - await store.AppendAsync(new[] { claim }, observedAt, CancellationToken.None); - } - - private HttpClient CreateClient(params string[] scopes) - { - var client = _factory.CreateClient(); - if (scopes.Length > 0) - { - client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", string.Join(' ', scopes)); - } - - return client; - } - - public void Dispose() - { - _factory.Dispose(); - _runner.Dispose(); - } - - private sealed class ResolveRequest - { - public ResolveRequest( - IReadOnlyList<string>? productKeys, - IReadOnlyList<string>? purls, - IReadOnlyList<string>? vulnerabilityIds, - string? policyRevisionId) - { - ProductKeys = productKeys; - Purls = purls; - VulnerabilityIds = vulnerabilityIds; - PolicyRevisionId = policyRevisionId; - } - - public IReadOnlyList<string>? ProductKeys { get; } - - public IReadOnlyList<string>? Purls { get; } - - public IReadOnlyList<string>? VulnerabilityIds { get; } - - public string? PolicyRevisionId { get; } - } - - private sealed class ResolveResponse - { - public required DateTimeOffset ResolvedAt { get; init; } - - public required ResolvePolicy Policy { get; init; } - - public required List<ResolveResult> Results { get; init; } - } - - private sealed class ResolvePolicy - { - public required string ActiveRevisionId { get; init; } - - public required string Version { get; init; } - - public required string Digest { get; init; } - - public string? RequestedRevisionId { get; init; } - } - - private sealed class ResolveResult - { - public required string VulnerabilityId { get; init; } - - public required string ProductKey { get; init; } - - public required string Status { get; init; } - - public required DateTimeOffset CalculatedAt { get; init; } - - public required List<ResolveSource> Sources { get; init; } - - public required List<ResolveConflict> Conflicts { get; init; } - - public ResolveSignals? Signals { get; init; } - - public string? Summary { get; init; } - - public required string PolicyRevisionId { get; init; } - - public required string PolicyVersion { get; init; } - - public required string PolicyDigest { get; init; } - - public required List<ResolveDecision> Decisions { get; init; } - - public ResolveEnvelope? Envelope { get; init; } - } - - private sealed class ResolveSource - { - public required string ProviderId { get; init; } - } - - private sealed class ResolveConflict - { - public string? ProviderId { get; init; } - } - - private sealed class ResolveSignals - { - public ResolveSeverity? Severity { get; init; } - - public bool? Kev { get; init; } - - public double? Epss { get; init; } - } - - private sealed class ResolveSeverity - { - public string? Scheme { get; init; } - - public double? Score { get; init; } - } - - private sealed class ResolveDecision - { - public required string ProviderId { get; init; } - - public required bool Included { get; init; } - - public string? Reason { get; init; } - } - - private sealed class ResolveEnvelope - { - public required ResolveArtifact Artifact { get; init; } - - public ResolveSignature? ContentSignature { get; init; } - - public ResolveAttestationMetadata? Attestation { get; init; } - - public string? AttestationEnvelope { get; init; } - - public ResolveSignature? AttestationSignature { get; init; } - } - - private sealed class ResolveArtifact - { - public required string Algorithm { get; init; } - - public required string Digest { get; init; } - } - - private sealed class ResolveSignature - { - public required string Value { get; init; } - - public string? KeyId { get; init; } - } - - private sealed class ResolveAttestationMetadata - { - public required string PredicateType { get; init; } - - public ResolveRekorReference? Rekor { get; init; } - - public string? EnvelopeDigest { get; init; } - - public DateTimeOffset? SignedAt { get; init; } - } - - private sealed class ResolveRekorReference - { - public string? Location { get; init; } - } - - private sealed class FakeSigner : IVexSigner - { - public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexSignedPayload("signature", "key")); - } - - private sealed class FakePolicyEvaluator : IVexPolicyEvaluator - { - public string Version => "test"; - - public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default; - - public double GetProviderWeight(VexProvider provider) => 1.0; - - public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason) - { - rejectionReason = null; - return true; - } - } - - -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Net; +using System.Net.Http.Headers; +using System.Net.Http.Json; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using EphemeralMongo; +using MongoRunner = EphemeralMongo.MongoRunner; +using MongoRunnerOptions = EphemeralMongo.MongoRunnerOptions; +using StellaOps.Excititor.Attestation.Signing; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Export; +using StellaOps.Excititor.Policy; +using StellaOps.Excititor.Storage.Mongo; + +namespace StellaOps.Excititor.WebService.Tests; + +public sealed class ResolveEndpointTests : IDisposable +{ + private readonly TestWebApplicationFactory _factory; + private readonly IMongoRunner _runner; + + public ResolveEndpointTests() + { + _runner = MongoRunner.Run(new MongoRunnerOptions { UseSingleNodeReplicaSet = true }); + + _factory = new TestWebApplicationFactory( + configureConfiguration: config => + { + var rootPath = Path.Combine(Path.GetTempPath(), "excititor-resolve-tests"); + Directory.CreateDirectory(rootPath); + var settings = new Dictionary<string, string?> + { + ["Excititor:Storage:Mongo:ConnectionString"] = _runner.ConnectionString, + ["Excititor:Storage:Mongo:DatabaseName"] = "excititor-resolve-tests", + ["Excititor:Storage:Mongo:RawBucketName"] = "vex.raw", + ["Excititor:Storage:Mongo:GridFsInlineThresholdBytes"] = "256", + ["Excititor:Artifacts:FileSystem:RootPath"] = rootPath, + }; + config.AddInMemoryCollection(settings!); + }, + configureServices: services => + { + services.AddTestAuthentication(); + TestServiceOverrides.Apply(services); + services.AddSingleton<IVexSigner, FakeSigner>(); + services.AddSingleton<IVexPolicyEvaluator, FakePolicyEvaluator>(); + }); + } + + [Fact] + public async Task ResolveEndpoint_ReturnsBadRequest_WhenInputsMissing() + { + var client = CreateClient("vex.read"); + var response = await client.PostAsJsonAsync("/excititor/resolve", new { vulnerabilityIds = new[] { "CVE-2025-0001" } }); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + [Fact] + public async Task ResolveEndpoint_ComputesConsensusAndAttestation() + { + const string vulnerabilityId = "CVE-2025-2222"; + const string productKey = "pkg:nuget/StellaOps.Demo@1.0.0"; + const string providerId = "redhat"; + + await SeedProviderAsync(providerId); + await SeedClaimAsync(vulnerabilityId, productKey, providerId); + + var client = CreateClient("vex.read"); + var request = new ResolveRequest( + new[] { productKey }, + null, + new[] { vulnerabilityId }, + null); + + var response = await client.PostAsJsonAsync("/excititor/resolve", request); + response.EnsureSuccessStatusCode(); + + var payload = await response.Content.ReadFromJsonAsync<ResolveResponse>(); + Assert.NotNull(payload); + Assert.NotNull(payload!.Policy); + + var result = Assert.Single(payload.Results); + Assert.Equal(vulnerabilityId, result.VulnerabilityId); + Assert.Equal(productKey, result.ProductKey); + Assert.Equal("not_affected", result.Status); + Assert.NotNull(result.Envelope); + Assert.Equal("signature", result.Envelope!.ContentSignature!.Value); + Assert.Equal("key", result.Envelope.ContentSignature.KeyId); + Assert.NotEqual(default, result.CalculatedAt); + + Assert.NotNull(result.Signals); + Assert.True(result.Signals!.Kev); + Assert.NotNull(result.Envelope.AttestationSignature); + Assert.False(string.IsNullOrWhiteSpace(result.Envelope.AttestationEnvelope)); + Assert.Equal(payload.Policy.ActiveRevisionId, result.PolicyRevisionId); + Assert.Equal(payload.Policy.Version, result.PolicyVersion); + Assert.Equal(payload.Policy.Digest, result.PolicyDigest); + + var decision = Assert.Single(result.Decisions); + Assert.True(decision.Included); + Assert.Equal(providerId, decision.ProviderId); + } + + [Fact] + public async Task ResolveEndpoint_ReturnsConflict_WhenPolicyRevisionMismatch() + { + const string vulnerabilityId = "CVE-2025-3333"; + const string productKey = "pkg:docker/demo@sha256:abcd"; + + var client = CreateClient("vex.read"); + var request = new ResolveRequest( + new[] { productKey }, + null, + new[] { vulnerabilityId }, + "rev-0"); + + var response = await client.PostAsJsonAsync("/excititor/resolve", request); + Assert.Equal(HttpStatusCode.Conflict, response.StatusCode); + } + + [Fact] + public async Task ResolveEndpoint_ReturnsUnauthorized_WhenMissingToken() + { + var client = CreateClient(); + var request = new ResolveRequest( + new[] { "pkg:test/demo" }, + null, + new[] { "CVE-2025-0001" }, + null); + + var response = await client.PostAsJsonAsync("/excititor/resolve", request); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + + [Fact] + public async Task ResolveEndpoint_ReturnsForbidden_WhenScopeMissing() + { + var client = CreateClient("vex.admin"); + var request = new ResolveRequest( + new[] { "pkg:test/demo" }, + null, + new[] { "CVE-2025-0001" }, + null); + + var response = await client.PostAsJsonAsync("/excititor/resolve", request); + Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); + } + + private async Task SeedProviderAsync(string providerId) + { + await using var scope = _factory.Services.CreateAsyncScope(); + var store = scope.ServiceProvider.GetRequiredService<IVexProviderStore>(); + var provider = new VexProvider(providerId, "Red Hat", VexProviderKind.Distro); + await store.SaveAsync(provider, CancellationToken.None); + } + + private async Task SeedClaimAsync(string vulnerabilityId, string productKey, string providerId) + { + await using var scope = _factory.Services.CreateAsyncScope(); + var store = scope.ServiceProvider.GetRequiredService<IVexClaimStore>(); + var timeProvider = scope.ServiceProvider.GetRequiredService<TimeProvider>(); + var observedAt = timeProvider.GetUtcNow(); + + var claim = new VexClaim( + vulnerabilityId, + providerId, + new VexProduct(productKey, "Demo Component", version: "1.0.0", purl: productKey), + VexClaimStatus.NotAffected, + new VexClaimDocument(VexDocumentFormat.Csaf, "sha256:deadbeef", new Uri("https://example.org/vex/csaf.json")), + observedAt.AddDays(-1), + observedAt, + VexJustification.ProtectedByMitigatingControl, + detail: "Test justification", + confidence: new VexConfidence("high", 0.9, "unit-test"), + signals: new VexSignalSnapshot( + new VexSeveritySignal("cvss:v3.1", 5.5, "medium"), + kev: true, + epss: 0.25)); + + await store.AppendAsync(new[] { claim }, observedAt, CancellationToken.None); + } + + private HttpClient CreateClient(params string[] scopes) + { + var client = _factory.CreateClient(); + if (scopes.Length > 0) + { + client.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", string.Join(' ', scopes)); + } + + return client; + } + + public void Dispose() + { + _factory.Dispose(); + _runner.Dispose(); + } + + private sealed class ResolveRequest + { + public ResolveRequest( + IReadOnlyList<string>? productKeys, + IReadOnlyList<string>? purls, + IReadOnlyList<string>? vulnerabilityIds, + string? policyRevisionId) + { + ProductKeys = productKeys; + Purls = purls; + VulnerabilityIds = vulnerabilityIds; + PolicyRevisionId = policyRevisionId; + } + + public IReadOnlyList<string>? ProductKeys { get; } + + public IReadOnlyList<string>? Purls { get; } + + public IReadOnlyList<string>? VulnerabilityIds { get; } + + public string? PolicyRevisionId { get; } + } + + private sealed class ResolveResponse + { + public required DateTimeOffset ResolvedAt { get; init; } + + public required ResolvePolicy Policy { get; init; } + + public required List<ResolveResult> Results { get; init; } + } + + private sealed class ResolvePolicy + { + public required string ActiveRevisionId { get; init; } + + public required string Version { get; init; } + + public required string Digest { get; init; } + + public string? RequestedRevisionId { get; init; } + } + + private sealed class ResolveResult + { + public required string VulnerabilityId { get; init; } + + public required string ProductKey { get; init; } + + public required string Status { get; init; } + + public required DateTimeOffset CalculatedAt { get; init; } + + public required List<ResolveSource> Sources { get; init; } + + public required List<ResolveConflict> Conflicts { get; init; } + + public ResolveSignals? Signals { get; init; } + + public string? Summary { get; init; } + + public required string PolicyRevisionId { get; init; } + + public required string PolicyVersion { get; init; } + + public required string PolicyDigest { get; init; } + + public required List<ResolveDecision> Decisions { get; init; } + + public ResolveEnvelope? Envelope { get; init; } + } + + private sealed class ResolveSource + { + public required string ProviderId { get; init; } + } + + private sealed class ResolveConflict + { + public string? ProviderId { get; init; } + } + + private sealed class ResolveSignals + { + public ResolveSeverity? Severity { get; init; } + + public bool? Kev { get; init; } + + public double? Epss { get; init; } + } + + private sealed class ResolveSeverity + { + public string? Scheme { get; init; } + + public double? Score { get; init; } + } + + private sealed class ResolveDecision + { + public required string ProviderId { get; init; } + + public required bool Included { get; init; } + + public string? Reason { get; init; } + } + + private sealed class ResolveEnvelope + { + public required ResolveArtifact Artifact { get; init; } + + public ResolveSignature? ContentSignature { get; init; } + + public ResolveAttestationMetadata? Attestation { get; init; } + + public string? AttestationEnvelope { get; init; } + + public ResolveSignature? AttestationSignature { get; init; } + } + + private sealed class ResolveArtifact + { + public required string Algorithm { get; init; } + + public required string Digest { get; init; } + } + + private sealed class ResolveSignature + { + public required string Value { get; init; } + + public string? KeyId { get; init; } + } + + private sealed class ResolveAttestationMetadata + { + public required string PredicateType { get; init; } + + public ResolveRekorReference? Rekor { get; init; } + + public string? EnvelopeDigest { get; init; } + + public DateTimeOffset? SignedAt { get; init; } + } + + private sealed class ResolveRekorReference + { + public string? Location { get; init; } + } + + private sealed class FakeSigner : IVexSigner + { + public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexSignedPayload("signature", "key")); + } + + private sealed class FakePolicyEvaluator : IVexPolicyEvaluator + { + public string Version => "test"; + + public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default; + + public double GetProviderWeight(VexProvider provider) => 1.0; + + public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason) + { + rejectionReason = null; + return true; + } + } + + +} diff --git a/src/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs similarity index 97% rename from src/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs index 3e990957..6679b2ba 100644 --- a/src/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/StatusEndpointTests.cs @@ -1,97 +1,97 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Net.Http.Json; -using System.IO; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using EphemeralMongo; -using MongoRunner = EphemeralMongo.MongoRunner; -using MongoRunnerOptions = EphemeralMongo.MongoRunnerOptions; -using StellaOps.Excititor.Attestation.Signing; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Policy; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Export; -using StellaOps.Excititor.WebService; - -namespace StellaOps.Excititor.WebService.Tests; - -public sealed class StatusEndpointTests : IDisposable -{ - private readonly TestWebApplicationFactory _factory; - private readonly IMongoRunner _runner; - - public StatusEndpointTests() - { - _runner = MongoRunner.Run(new MongoRunnerOptions { UseSingleNodeReplicaSet = true }); - _factory = new TestWebApplicationFactory( - configureConfiguration: config => - { - var rootPath = Path.Combine(Path.GetTempPath(), "excititor-offline-tests"); - Directory.CreateDirectory(rootPath); - var settings = new Dictionary<string, string?> - { - ["Excititor:Storage:Mongo:ConnectionString"] = _runner.ConnectionString, - ["Excititor:Storage:Mongo:DatabaseName"] = "excititor-web-tests", - ["Excititor:Storage:Mongo:RawBucketName"] = "vex.raw", - ["Excititor:Storage:Mongo:GridFsInlineThresholdBytes"] = "256", - ["Excititor:Artifacts:FileSystem:RootPath"] = rootPath, - }; - config.AddInMemoryCollection(settings!); - }, - configureServices: services => - { - TestServiceOverrides.Apply(services); - services.AddSingleton<IVexSigner, FakeSigner>(); - services.AddSingleton<IVexPolicyEvaluator, FakePolicyEvaluator>(); - services.AddSingleton(new VexConnectorDescriptor("excititor:redhat", VexProviderKind.Distro, "Red Hat CSAF")); - }); - } - - [Fact] - public async Task StatusEndpoint_ReturnsArtifactStores() - { - var client = _factory.CreateClient(); - var response = await client.GetAsync("/excititor/status"); - var raw = await response.Content.ReadAsStringAsync(); - Assert.True(response.IsSuccessStatusCode, raw); - - var payload = System.Text.Json.JsonSerializer.Deserialize<StatusResponse>(raw); - Assert.NotNull(payload); - Assert.NotEmpty(payload!.ArtifactStores); - } - - public void Dispose() - { - _factory.Dispose(); - _runner.Dispose(); - } - - private sealed class StatusResponse - { - public string[] ArtifactStores { get; set; } = Array.Empty<string>(); - } - - private sealed class FakeSigner : IVexSigner - { - public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexSignedPayload("signature", "key")); - } - - private sealed class FakePolicyEvaluator : IVexPolicyEvaluator - { - public string Version => "test"; - - public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default; - - public double GetProviderWeight(VexProvider provider) => 1.0; - - public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason) - { - rejectionReason = null; - return true; - } - } - -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Net.Http.Json; +using System.IO; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using EphemeralMongo; +using MongoRunner = EphemeralMongo.MongoRunner; +using MongoRunnerOptions = EphemeralMongo.MongoRunnerOptions; +using StellaOps.Excititor.Attestation.Signing; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Policy; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Export; +using StellaOps.Excititor.WebService; + +namespace StellaOps.Excititor.WebService.Tests; + +public sealed class StatusEndpointTests : IDisposable +{ + private readonly TestWebApplicationFactory _factory; + private readonly IMongoRunner _runner; + + public StatusEndpointTests() + { + _runner = MongoRunner.Run(new MongoRunnerOptions { UseSingleNodeReplicaSet = true }); + _factory = new TestWebApplicationFactory( + configureConfiguration: config => + { + var rootPath = Path.Combine(Path.GetTempPath(), "excititor-offline-tests"); + Directory.CreateDirectory(rootPath); + var settings = new Dictionary<string, string?> + { + ["Excititor:Storage:Mongo:ConnectionString"] = _runner.ConnectionString, + ["Excititor:Storage:Mongo:DatabaseName"] = "excititor-web-tests", + ["Excititor:Storage:Mongo:RawBucketName"] = "vex.raw", + ["Excititor:Storage:Mongo:GridFsInlineThresholdBytes"] = "256", + ["Excititor:Artifacts:FileSystem:RootPath"] = rootPath, + }; + config.AddInMemoryCollection(settings!); + }, + configureServices: services => + { + TestServiceOverrides.Apply(services); + services.AddSingleton<IVexSigner, FakeSigner>(); + services.AddSingleton<IVexPolicyEvaluator, FakePolicyEvaluator>(); + services.AddSingleton(new VexConnectorDescriptor("excititor:redhat", VexProviderKind.Distro, "Red Hat CSAF")); + }); + } + + [Fact] + public async Task StatusEndpoint_ReturnsArtifactStores() + { + var client = _factory.CreateClient(); + var response = await client.GetAsync("/excititor/status"); + var raw = await response.Content.ReadAsStringAsync(); + Assert.True(response.IsSuccessStatusCode, raw); + + var payload = System.Text.Json.JsonSerializer.Deserialize<StatusResponse>(raw); + Assert.NotNull(payload); + Assert.NotEmpty(payload!.ArtifactStores); + } + + public void Dispose() + { + _factory.Dispose(); + _runner.Dispose(); + } + + private sealed class StatusResponse + { + public string[] ArtifactStores { get; set; } = Array.Empty<string>(); + } + + private sealed class FakeSigner : IVexSigner + { + public ValueTask<VexSignedPayload> SignAsync(ReadOnlyMemory<byte> payload, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexSignedPayload("signature", "key")); + } + + private sealed class FakePolicyEvaluator : IVexPolicyEvaluator + { + public string Version => "test"; + + public VexPolicySnapshot Snapshot => VexPolicySnapshot.Default; + + public double GetProviderWeight(VexProvider provider) => 1.0; + + public bool IsClaimEligible(VexClaim claim, VexProvider provider, out string? rejectionReason) + { + rejectionReason = null; + return true; + } + } + +} diff --git a/src/StellaOps.Excititor.WebService.Tests/StellaOps.Excititor.WebService.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/StellaOps.Excititor.WebService.Tests.csproj similarity index 86% rename from src/StellaOps.Excititor.WebService.Tests/StellaOps.Excititor.WebService.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/StellaOps.Excititor.WebService.Tests.csproj index 874ab412..31e1099d 100644 --- a/src/StellaOps.Excititor.WebService.Tests/StellaOps.Excititor.WebService.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/StellaOps.Excititor.WebService.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -17,9 +18,9 @@ <PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" PrivateAssets="all" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.WebService\StellaOps.Excititor.WebService.csproj" /> + <ProjectReference Include="../../StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj" /> </ItemGroup> <ItemGroup> <Using Include="Xunit" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.WebService.Tests/TestAuthentication.cs b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/TestAuthentication.cs similarity index 97% rename from src/StellaOps.Excititor.WebService.Tests/TestAuthentication.cs rename to src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/TestAuthentication.cs index 40500197..80718caa 100644 --- a/src/StellaOps.Excititor.WebService.Tests/TestAuthentication.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/TestAuthentication.cs @@ -1,61 +1,61 @@ -using System.Security.Claims; -using System.Text.Encodings.Web; -using Microsoft.AspNetCore.Authentication; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; - -namespace StellaOps.Excititor.WebService.Tests; - -internal static class TestAuthenticationExtensions -{ - public const string SchemeName = "TestBearer"; - - public static AuthenticationBuilder AddTestAuthentication(this IServiceCollection services) - { - return services.AddAuthentication(options => - { - options.DefaultAuthenticateScheme = SchemeName; - options.DefaultChallengeScheme = SchemeName; - }).AddScheme<AuthenticationSchemeOptions, TestAuthenticationHandler>(SchemeName, _ => { }); - } - - private sealed class TestAuthenticationHandler : AuthenticationHandler<AuthenticationSchemeOptions> - { - public TestAuthenticationHandler( - IOptionsMonitor<AuthenticationSchemeOptions> options, - ILoggerFactory logger, - UrlEncoder encoder) - : base(options, logger, encoder) - { - } - - protected override Task<AuthenticateResult> HandleAuthenticateAsync() - { - if (!Request.Headers.TryGetValue("Authorization", out var authorization) || authorization.Count == 0) - { - return Task.FromResult(AuthenticateResult.NoResult()); - } - - var header = authorization[0]; - if (string.IsNullOrWhiteSpace(header) || !header.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase)) - { - return Task.FromResult(AuthenticateResult.Fail("Invalid authentication scheme.")); - } - - var scopeSegment = header.Substring("Bearer ".Length); - var scopes = scopeSegment.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - - var claims = new List<Claim> { new Claim(ClaimTypes.NameIdentifier, "test-user") }; - if (scopes.Length > 0) - { - claims.Add(new Claim("scope", string.Join(' ', scopes))); - } - - var identity = new ClaimsIdentity(claims, SchemeName); - var principal = new ClaimsPrincipal(identity); - var ticket = new AuthenticationTicket(principal, SchemeName); - return Task.FromResult(AuthenticateResult.Success(ticket)); - } - } -} +using System.Security.Claims; +using System.Text.Encodings.Web; +using Microsoft.AspNetCore.Authentication; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.Excititor.WebService.Tests; + +internal static class TestAuthenticationExtensions +{ + public const string SchemeName = "TestBearer"; + + public static AuthenticationBuilder AddTestAuthentication(this IServiceCollection services) + { + return services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = SchemeName; + options.DefaultChallengeScheme = SchemeName; + }).AddScheme<AuthenticationSchemeOptions, TestAuthenticationHandler>(SchemeName, _ => { }); + } + + private sealed class TestAuthenticationHandler : AuthenticationHandler<AuthenticationSchemeOptions> + { + public TestAuthenticationHandler( + IOptionsMonitor<AuthenticationSchemeOptions> options, + ILoggerFactory logger, + UrlEncoder encoder) + : base(options, logger, encoder) + { + } + + protected override Task<AuthenticateResult> HandleAuthenticateAsync() + { + if (!Request.Headers.TryGetValue("Authorization", out var authorization) || authorization.Count == 0) + { + return Task.FromResult(AuthenticateResult.NoResult()); + } + + var header = authorization[0]; + if (string.IsNullOrWhiteSpace(header) || !header.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase)) + { + return Task.FromResult(AuthenticateResult.Fail("Invalid authentication scheme.")); + } + + var scopeSegment = header.Substring("Bearer ".Length); + var scopes = scopeSegment.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + + var claims = new List<Claim> { new Claim(ClaimTypes.NameIdentifier, "test-user") }; + if (scopes.Length > 0) + { + claims.Add(new Claim("scope", string.Join(' ', scopes))); + } + + var identity = new ClaimsIdentity(claims, SchemeName); + var principal = new ClaimsPrincipal(identity); + var ticket = new AuthenticationTicket(principal, SchemeName); + return Task.FromResult(AuthenticateResult.Success(ticket)); + } + } +} diff --git a/src/StellaOps.Excititor.WebService.Tests/TestServiceOverrides.cs b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/TestServiceOverrides.cs similarity index 98% rename from src/StellaOps.Excititor.WebService.Tests/TestServiceOverrides.cs rename to src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/TestServiceOverrides.cs index f085fce8..85572f80 100644 --- a/src/StellaOps.Excititor.WebService.Tests/TestServiceOverrides.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/TestServiceOverrides.cs @@ -1,141 +1,141 @@ -using System; -using System.Collections.Concurrent; -using System.Collections.Immutable; -using System.Text; -using System.Text.Json; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Export; -using StellaOps.Excititor.Storage.Mongo; -using StellaOps.Excititor.WebService.Services; -using MongoDB.Driver; -using StellaOps.Excititor.Attestation.Dsse; - -namespace StellaOps.Excititor.WebService.Tests; - -internal static class TestServiceOverrides -{ - public static void Apply(IServiceCollection services) - { - services.RemoveAll<IVexConnector>(); - services.RemoveAll<IVexIngestOrchestrator>(); - services.RemoveAll<IVexConnectorStateRepository>(); - services.RemoveAll<IVexExportCacheService>(); - services.RemoveAll<IVexExportDataSource>(); - services.RemoveAll<IVexExportStore>(); - services.RemoveAll<IVexCacheIndex>(); - services.RemoveAll<IVexCacheMaintenance>(); - services.RemoveAll<IVexAttestationClient>(); - - services.AddSingleton<IVexIngestOrchestrator, StubIngestOrchestrator>(); - services.AddSingleton<IVexConnectorStateRepository, StubConnectorStateRepository>(); - services.AddSingleton<IVexExportCacheService, StubExportCacheService>(); - services.RemoveAll<IExportEngine>(); - services.AddSingleton<IExportEngine, StubExportEngine>(); - services.AddSingleton<IVexExportDataSource, StubExportDataSource>(); - services.AddSingleton<IVexExportStore, StubExportStore>(); - services.AddSingleton<IVexCacheIndex, StubCacheIndex>(); - services.AddSingleton<IVexCacheMaintenance, StubCacheMaintenance>(); - services.AddSingleton<IVexAttestationClient, StubAttestationClient>(); - - services.RemoveAll<IHostedService>(); - services.AddSingleton<IHostedService, NoopHostedService>(); - } - - private sealed class StubExportCacheService : IVexExportCacheService - { - public ValueTask InvalidateAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken) - => ValueTask.CompletedTask; - - public ValueTask<int> PruneDanglingAsync(CancellationToken cancellationToken) - => ValueTask.FromResult(0); - - public ValueTask<int> PruneExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken) - => ValueTask.FromResult(0); - } - - private sealed class StubExportEngine : IExportEngine - { - public ValueTask<VexExportManifest> ExportAsync(VexExportRequestContext context, CancellationToken cancellationToken) - { - var manifest = new VexExportManifest( - exportId: "stub/export", - querySignature: VexQuerySignature.FromQuery(context.Query), - format: context.Format, - createdAt: DateTimeOffset.UtcNow, - artifact: new VexContentAddress("sha256", "stub"), - claimCount: 0, - sourceProviders: Array.Empty<string>()); - - return ValueTask.FromResult(manifest); - } - } - - private sealed class StubExportDataSource : IVexExportDataSource - { - public ValueTask<VexExportDataSet> FetchAsync(VexQuery query, CancellationToken cancellationToken) - { - return ValueTask.FromResult(new VexExportDataSet( - ImmutableArray<VexConsensus>.Empty, - ImmutableArray<VexClaim>.Empty, - ImmutableArray<string>.Empty)); - } - } - - private sealed class StubExportStore : IVexExportStore - { - private readonly ConcurrentDictionary<(string Signature, VexExportFormat Format), VexExportManifest> _store = new(); - - public ValueTask<VexExportManifest?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _store.TryGetValue((signature.Value, format), out var manifest); - return ValueTask.FromResult(manifest); - } - - public ValueTask SaveAsync(VexExportManifest manifest, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _store[(manifest.QuerySignature.Value, manifest.Format)] = manifest; - return ValueTask.CompletedTask; - } - } - - private sealed class StubCacheIndex : IVexCacheIndex - { - private readonly ConcurrentDictionary<(string Signature, VexExportFormat Format), VexCacheEntry> _entries = new(); - - public ValueTask<VexCacheEntry?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _entries.TryGetValue((signature.Value, format), out var entry); - return ValueTask.FromResult(entry); - } - - public ValueTask RemoveAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _entries.TryRemove((signature.Value, format), out _); - return ValueTask.CompletedTask; - } - - public ValueTask SaveAsync(VexCacheEntry entry, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _entries[(entry.QuerySignature.Value, entry.Format)] = entry; - return ValueTask.CompletedTask; - } - } - - private sealed class StubCacheMaintenance : IVexCacheMaintenance - { - public ValueTask<int> RemoveExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(0); - - public ValueTask<int> RemoveMissingManifestReferencesAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(0); - } - - private sealed class StubAttestationClient : IVexAttestationClient - { +using System; +using System.Collections.Concurrent; +using System.Collections.Immutable; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Export; +using StellaOps.Excititor.Storage.Mongo; +using StellaOps.Excititor.WebService.Services; +using MongoDB.Driver; +using StellaOps.Excititor.Attestation.Dsse; + +namespace StellaOps.Excititor.WebService.Tests; + +internal static class TestServiceOverrides +{ + public static void Apply(IServiceCollection services) + { + services.RemoveAll<IVexConnector>(); + services.RemoveAll<IVexIngestOrchestrator>(); + services.RemoveAll<IVexConnectorStateRepository>(); + services.RemoveAll<IVexExportCacheService>(); + services.RemoveAll<IVexExportDataSource>(); + services.RemoveAll<IVexExportStore>(); + services.RemoveAll<IVexCacheIndex>(); + services.RemoveAll<IVexCacheMaintenance>(); + services.RemoveAll<IVexAttestationClient>(); + + services.AddSingleton<IVexIngestOrchestrator, StubIngestOrchestrator>(); + services.AddSingleton<IVexConnectorStateRepository, StubConnectorStateRepository>(); + services.AddSingleton<IVexExportCacheService, StubExportCacheService>(); + services.RemoveAll<IExportEngine>(); + services.AddSingleton<IExportEngine, StubExportEngine>(); + services.AddSingleton<IVexExportDataSource, StubExportDataSource>(); + services.AddSingleton<IVexExportStore, StubExportStore>(); + services.AddSingleton<IVexCacheIndex, StubCacheIndex>(); + services.AddSingleton<IVexCacheMaintenance, StubCacheMaintenance>(); + services.AddSingleton<IVexAttestationClient, StubAttestationClient>(); + + services.RemoveAll<IHostedService>(); + services.AddSingleton<IHostedService, NoopHostedService>(); + } + + private sealed class StubExportCacheService : IVexExportCacheService + { + public ValueTask InvalidateAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken) + => ValueTask.CompletedTask; + + public ValueTask<int> PruneDanglingAsync(CancellationToken cancellationToken) + => ValueTask.FromResult(0); + + public ValueTask<int> PruneExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken) + => ValueTask.FromResult(0); + } + + private sealed class StubExportEngine : IExportEngine + { + public ValueTask<VexExportManifest> ExportAsync(VexExportRequestContext context, CancellationToken cancellationToken) + { + var manifest = new VexExportManifest( + exportId: "stub/export", + querySignature: VexQuerySignature.FromQuery(context.Query), + format: context.Format, + createdAt: DateTimeOffset.UtcNow, + artifact: new VexContentAddress("sha256", "stub"), + claimCount: 0, + sourceProviders: Array.Empty<string>()); + + return ValueTask.FromResult(manifest); + } + } + + private sealed class StubExportDataSource : IVexExportDataSource + { + public ValueTask<VexExportDataSet> FetchAsync(VexQuery query, CancellationToken cancellationToken) + { + return ValueTask.FromResult(new VexExportDataSet( + ImmutableArray<VexConsensus>.Empty, + ImmutableArray<VexClaim>.Empty, + ImmutableArray<string>.Empty)); + } + } + + private sealed class StubExportStore : IVexExportStore + { + private readonly ConcurrentDictionary<(string Signature, VexExportFormat Format), VexExportManifest> _store = new(); + + public ValueTask<VexExportManifest?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _store.TryGetValue((signature.Value, format), out var manifest); + return ValueTask.FromResult(manifest); + } + + public ValueTask SaveAsync(VexExportManifest manifest, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _store[(manifest.QuerySignature.Value, manifest.Format)] = manifest; + return ValueTask.CompletedTask; + } + } + + private sealed class StubCacheIndex : IVexCacheIndex + { + private readonly ConcurrentDictionary<(string Signature, VexExportFormat Format), VexCacheEntry> _entries = new(); + + public ValueTask<VexCacheEntry?> FindAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _entries.TryGetValue((signature.Value, format), out var entry); + return ValueTask.FromResult(entry); + } + + public ValueTask RemoveAsync(VexQuerySignature signature, VexExportFormat format, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _entries.TryRemove((signature.Value, format), out _); + return ValueTask.CompletedTask; + } + + public ValueTask SaveAsync(VexCacheEntry entry, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _entries[(entry.QuerySignature.Value, entry.Format)] = entry; + return ValueTask.CompletedTask; + } + } + + private sealed class StubCacheMaintenance : IVexCacheMaintenance + { + public ValueTask<int> RemoveExpiredAsync(DateTimeOffset asOf, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(0); + + public ValueTask<int> RemoveMissingManifestReferencesAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(0); + } + + private sealed class StubAttestationClient : IVexAttestationClient + { public ValueTask<VexAttestationResponse> SignAsync(VexAttestationRequest request, CancellationToken cancellationToken) { var envelope = new DsseEnvelope( @@ -159,50 +159,50 @@ internal static class TestServiceOverrides diagnostics); return ValueTask.FromResult(response); } - + public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken) { var verification = new VexAttestationVerification(true, ImmutableDictionary<string, string>.Empty); return ValueTask.FromResult(verification); } - } - - private sealed class StubConnectorStateRepository : IVexConnectorStateRepository - { - private readonly ConcurrentDictionary<string, VexConnectorState> _states = new(StringComparer.Ordinal); - - public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _states.TryGetValue(connectorId, out var state); - return ValueTask.FromResult(state); - } - - public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _states[state.ConnectorId] = state; - return ValueTask.CompletedTask; - } - } - - private sealed class StubIngestOrchestrator : IVexIngestOrchestrator - { - public Task<InitSummary> InitializeAsync(IngestInitOptions options, CancellationToken cancellationToken) - => Task.FromResult(new InitSummary(Guid.Empty, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, ImmutableArray<InitProviderResult>.Empty)); - - public Task<IngestRunSummary> RunAsync(IngestRunOptions options, CancellationToken cancellationToken) - => Task.FromResult(new IngestRunSummary(Guid.Empty, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, ImmutableArray<ProviderRunResult>.Empty)); - - public Task<IngestRunSummary> ResumeAsync(IngestResumeOptions options, CancellationToken cancellationToken) - => Task.FromResult(new IngestRunSummary(Guid.Empty, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, ImmutableArray<ProviderRunResult>.Empty)); - - public Task<ReconcileSummary> ReconcileAsync(ReconcileOptions options, CancellationToken cancellationToken) - => Task.FromResult(new ReconcileSummary(Guid.Empty, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, ImmutableArray<ReconcileProviderResult>.Empty)); - } - - private sealed class NoopHostedService : IHostedService - { - public Task StartAsync(CancellationToken cancellationToken) => Task.CompletedTask; - - public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; - } -} + } + + private sealed class StubConnectorStateRepository : IVexConnectorStateRepository + { + private readonly ConcurrentDictionary<string, VexConnectorState> _states = new(StringComparer.Ordinal); + + public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _states.TryGetValue(connectorId, out var state); + return ValueTask.FromResult(state); + } + + public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _states[state.ConnectorId] = state; + return ValueTask.CompletedTask; + } + } + + private sealed class StubIngestOrchestrator : IVexIngestOrchestrator + { + public Task<InitSummary> InitializeAsync(IngestInitOptions options, CancellationToken cancellationToken) + => Task.FromResult(new InitSummary(Guid.Empty, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, ImmutableArray<InitProviderResult>.Empty)); + + public Task<IngestRunSummary> RunAsync(IngestRunOptions options, CancellationToken cancellationToken) + => Task.FromResult(new IngestRunSummary(Guid.Empty, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, ImmutableArray<ProviderRunResult>.Empty)); + + public Task<IngestRunSummary> ResumeAsync(IngestResumeOptions options, CancellationToken cancellationToken) + => Task.FromResult(new IngestRunSummary(Guid.Empty, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, ImmutableArray<ProviderRunResult>.Empty)); + + public Task<ReconcileSummary> ReconcileAsync(ReconcileOptions options, CancellationToken cancellationToken) + => Task.FromResult(new ReconcileSummary(Guid.Empty, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, ImmutableArray<ReconcileProviderResult>.Empty)); + } + + private sealed class NoopHostedService : IHostedService + { + public Task StartAsync(CancellationToken cancellationToken) => Task.CompletedTask; + + public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; + } +} diff --git a/src/StellaOps.Excititor.WebService.Tests/TestWebApplicationFactory.cs b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/TestWebApplicationFactory.cs similarity index 97% rename from src/StellaOps.Excititor.WebService.Tests/TestWebApplicationFactory.cs rename to src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/TestWebApplicationFactory.cs index 39d80b24..bfbdaaf4 100644 --- a/src/StellaOps.Excititor.WebService.Tests/TestWebApplicationFactory.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.WebService.Tests/TestWebApplicationFactory.cs @@ -1,42 +1,42 @@ -using Microsoft.AspNetCore.Hosting; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; - -namespace StellaOps.Excititor.WebService.Tests; - -internal sealed class TestWebApplicationFactory : WebApplicationFactory<Program> -{ - private readonly Action<IConfigurationBuilder>? _configureConfiguration; - private readonly Action<IServiceCollection>? _configureServices; - - public TestWebApplicationFactory( - Action<IConfigurationBuilder>? configureConfiguration, - Action<IServiceCollection>? configureServices) - { - _configureConfiguration = configureConfiguration; - _configureServices = configureServices; - } - - protected override void ConfigureWebHost(IWebHostBuilder builder) - { - builder.UseEnvironment("Production"); - if (_configureConfiguration is not null) - { - builder.ConfigureAppConfiguration((_, config) => _configureConfiguration(config)); - } - - if (_configureServices is not null) - { - builder.ConfigureServices(services => _configureServices(services)); - } - } - - protected override IHost CreateHost(IHostBuilder builder) - { - builder.UseEnvironment("Production"); - builder.UseDefaultServiceProvider(options => options.ValidateScopes = false); - return base.CreateHost(builder); - } -} +using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; + +namespace StellaOps.Excititor.WebService.Tests; + +internal sealed class TestWebApplicationFactory : WebApplicationFactory<Program> +{ + private readonly Action<IConfigurationBuilder>? _configureConfiguration; + private readonly Action<IServiceCollection>? _configureServices; + + public TestWebApplicationFactory( + Action<IConfigurationBuilder>? configureConfiguration, + Action<IServiceCollection>? configureServices) + { + _configureConfiguration = configureConfiguration; + _configureServices = configureServices; + } + + protected override void ConfigureWebHost(IWebHostBuilder builder) + { + builder.UseEnvironment("Production"); + if (_configureConfiguration is not null) + { + builder.ConfigureAppConfiguration((_, config) => _configureConfiguration(config)); + } + + if (_configureServices is not null) + { + builder.ConfigureServices(services => _configureServices(services)); + } + } + + protected override IHost CreateHost(IHostBuilder builder) + { + builder.UseEnvironment("Production"); + builder.UseDefaultServiceProvider(options => options.ValidateScopes = false); + return base.CreateHost(builder); + } +} diff --git a/src/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerIntegrationTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerIntegrationTests.cs similarity index 97% rename from src/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerIntegrationTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerIntegrationTests.cs index 5bcd6f7b..e2b84e67 100644 --- a/src/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerIntegrationTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerIntegrationTests.cs @@ -1,363 +1,363 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Runtime.CompilerServices; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using FluentAssertions; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using Mongo2Go; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Aoc; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Core.Aoc; -using StellaOps.Excititor.Storage.Mongo; -using StellaOps.Excititor.Worker.Options; -using StellaOps.Excititor.Worker.Scheduling; -using StellaOps.Excititor.Worker.Signature; -using StellaOps.Plugin; -using Xunit; -using RawVexDocumentModel = StellaOps.Concelier.RawModels.VexRawDocument; - -namespace StellaOps.Excititor.Worker.Tests; - -public sealed class DefaultVexProviderRunnerIntegrationTests : IAsyncLifetime -{ - private readonly MongoDbRunner _runner; - private readonly MongoClient _client; - - public DefaultVexProviderRunnerIntegrationTests() - { - _runner = MongoDbRunner.Start(singleNodeReplSet: true); - _client = new MongoClient(_runner.ConnectionString); - } - - public Task InitializeAsync() => Task.CompletedTask; - - public Task DisposeAsync() - { - _runner.Dispose(); - return Task.CompletedTask; - } - - [Fact] - public async Task RunAsync_LargeBatch_IdempotentAcrossRestart() - { - var specs = CreateDocumentSpecs(count: 48); - var databaseName = $"vex-worker-batch-{Guid.NewGuid():N}"; - var (provider, guard, database, connector) = ConfigureIntegrationServices(databaseName, specs); - - try - { - var time = new FixedTimeProvider(new DateTimeOffset(2025, 10, 28, 8, 0, 0, TimeSpan.Zero)); - var runner = CreateRunner(provider, time); - var schedule = new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, VexConnectorSettings.Empty); - - await runner.RunAsync(schedule, CancellationToken.None); - - var rawCollection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Raw); - var stored = await rawCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync(); - stored.Should().HaveCount(specs.Count); - - // Supersedes metadata is preserved for chained documents. - var target = specs[17]; - var storedTarget = stored.Single(doc => doc["_id"] == target.Digest); - storedTarget["Metadata"].AsBsonDocument.TryGetValue("aoc.supersedes", out var supersedesValue) - .Should().BeTrue(); - supersedesValue!.AsString.Should().Be(target.Metadata["aoc.supersedes"]); - - await runner.RunAsync(schedule, CancellationToken.None); - - var afterRestart = await rawCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); - afterRestart.Should().Be(specs.Count); - - // Guard invoked for every document across both runs. - guard.Invocations - .GroupBy(doc => doc.Upstream.ContentHash) - .Should().OnlyContain(group => group.Count() == 2); - - // Verify provenance still carries supersedes linkage. - var provenance = guard.Invocations - .Where(doc => doc.Upstream.ContentHash == target.Digest) - .Select(doc => doc.Upstream.Provenance["aoc.supersedes"]) - .ToImmutableArray(); - provenance.Should().HaveCount(2).And.AllBeEquivalentTo(target.Metadata["aoc.supersedes"]); - } - finally - { - await _client.DropDatabaseAsync(databaseName); - await provider.DisposeAsync(); - } - } - - [Fact] - public async Task RunAsync_WhenGuardFails_RestartCompletesSuccessfully() - { - var specs = CreateDocumentSpecs(count: 24); - var failureDigest = specs[9].Digest; - var databaseName = $"vex-worker-guard-{Guid.NewGuid():N}"; - var (provider, guard, database, connector) = ConfigureIntegrationServices(databaseName, specs, failureDigest); - - try - { - var time = new FixedTimeProvider(new DateTimeOffset(2025, 10, 28, 9, 0, 0, TimeSpan.Zero)); - var runner = CreateRunner(provider, time); - var schedule = new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(5), TimeSpan.Zero, VexConnectorSettings.Empty); - - await Assert.ThrowsAsync<ExcititorAocGuardException>(() => runner.RunAsync(schedule, CancellationToken.None).AsTask()); - - var rawCollection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Raw); - var storedCount = await rawCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); - storedCount.Should().Be(9); // documents before the failing digest persist - - guard.FailDigest = null; - time.Advance(TimeSpan.FromMinutes(10)); - await runner.RunAsync(schedule, CancellationToken.None); - - var finalCount = await rawCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); - finalCount.Should().Be(specs.Count); - - guard.Invocations.Count(doc => doc.Upstream.ContentHash == failureDigest).Should().Be(2); - } - finally - { - await _client.DropDatabaseAsync(databaseName); - await provider.DisposeAsync(); - } - } - - private (ServiceProvider Provider, RecordingVexRawWriteGuard Guard, IMongoDatabase Database, BatchingConnector Connector) ConfigureIntegrationServices( - string databaseName, - IReadOnlyList<DocumentSpec> specs, - string? guardFailureDigest = null) - { - var database = _client.GetDatabase(databaseName); - var optionsValue = new VexMongoStorageOptions - { - ConnectionString = _runner.ConnectionString, - DatabaseName = databaseName, - DefaultTenant = "tenant-integration", - GridFsInlineThresholdBytes = 64 * 1024, - }; - var options = Microsoft.Extensions.Options.Options.Create(optionsValue); - var sessionProvider = new DirectSessionProvider(_client); - var guard = new RecordingVexRawWriteGuard { FailDigest = guardFailureDigest }; - var rawStore = new MongoVexRawStore(_client, database, options, sessionProvider, guard); - var providerStore = new MongoVexProviderStore(database); - var stateRepository = new MongoVexConnectorStateRepository(database); - var connector = new BatchingConnector("integration:test", specs); - - var services = new ServiceCollection(); - services.AddSingleton<IVexConnector>(connector); - services.AddSingleton<IVexRawStore>(rawStore); - services.AddSingleton<IVexProviderStore>(providerStore); - services.AddSingleton<IVexConnectorStateRepository>(stateRepository); - services.AddSingleton<IVexClaimStore>(new NoopClaimStore()); - services.AddSingleton<IVexNormalizerRouter>(new NoopNormalizerRouter()); - services.AddSingleton<IVexSignatureVerifier>(new NoopSignatureVerifier()); - - return (services.BuildServiceProvider(), guard, database, connector); - } - - private static DefaultVexProviderRunner CreateRunner(IServiceProvider services, TimeProvider timeProvider) - { - var options = new VexWorkerOptions - { - Retry = - { - BaseDelay = TimeSpan.FromSeconds(5), - MaxDelay = TimeSpan.FromMinutes(1), - JitterRatio = 0.1, - FailureThreshold = 3, - QuarantineDuration = TimeSpan.FromMinutes(30), - }, - }; - - return new DefaultVexProviderRunner( - services, - new PluginCatalog(), - NullLogger<DefaultVexProviderRunner>.Instance, - timeProvider, - Microsoft.Extensions.Options.Options.Create(options)); - } - - private static List<DocumentSpec> CreateDocumentSpecs(int count) - { - var specs = new List<DocumentSpec>(capacity: count); - for (var i = 0; i < count; i++) - { - var payload = JsonSerializer.Serialize(new - { - id = i, - title = $"VEX advisory {i}", - supersedes = i == 0 ? null : $"sha256:batch-{i - 1:D4}", - }); - - var digest = ComputeDigest(payload); - var metadataBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - metadataBuilder["source.vendor"] = "integration-vendor"; - metadataBuilder["source.connector"] = "integration-connector"; - metadataBuilder["aoc.supersedes"] = i == 0 ? string.Empty : $"sha256:batch-{i - 1:D4}"; - - specs.Add(new DocumentSpec( - ProviderId: "integration-provider", - Format: VexDocumentFormat.Csaf, - SourceUri: new Uri($"https://example.org/vex/{i}.json"), - RetrievedAt: new DateTimeOffset(2025, 10, 28, 7, 0, 0, TimeSpan.Zero).AddMinutes(i), - Digest: digest, - Payload: payload, - Metadata: metadataBuilder.ToImmutable())); - } - - return specs; - } - - private static string ComputeDigest(string payload) - { - var bytes = Encoding.UTF8.GetBytes(payload); - Span<byte> buffer = stackalloc byte[32]; - if (SHA256.TryHashData(bytes, buffer, out _)) - { - return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); - } - - var hash = SHA256.HashData(bytes); - return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); - } - - private sealed record DocumentSpec( - string ProviderId, - VexDocumentFormat Format, - Uri SourceUri, - DateTimeOffset RetrievedAt, - string Digest, - string Payload, - ImmutableDictionary<string, string> Metadata) - { - public VexRawDocument CreateDocument() - { - var content = Encoding.UTF8.GetBytes(Payload); - return new VexRawDocument( - ProviderId, - Format, - SourceUri, - RetrievedAt, - Digest, - new ReadOnlyMemory<byte>(content), - Metadata); - } - } - - private sealed class BatchingConnector : IVexConnector - { - private readonly IReadOnlyList<DocumentSpec> _specs; - - public BatchingConnector(string id, IReadOnlyList<DocumentSpec> specs) - { - Id = id; - _specs = specs; - } - - public string Id { get; } - - public IReadOnlyList<DocumentSpec> Specs => _specs; - - public VexProviderKind Kind => VexProviderKind.Vendor; - - public ValueTask ValidateAsync(VexConnectorSettings settings, CancellationToken cancellationToken) - => ValueTask.CompletedTask; - - public async IAsyncEnumerable<VexRawDocument> FetchAsync( - VexConnectorContext context, - [EnumeratorCancellation] CancellationToken cancellationToken) - { - foreach (var spec in _specs) - { - var document = spec.CreateDocument(); - await context.RawSink.StoreAsync(document, cancellationToken).ConfigureAwait(false); - yield return document; - } - } - - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); - } - - private sealed class RecordingVexRawWriteGuard : IVexRawWriteGuard - { - private readonly List<RawVexDocumentModel> _invocations = new(); - - public IReadOnlyList<RawVexDocumentModel> Invocations => _invocations; - - public string? FailDigest { get; set; } - - public void EnsureValid(RawVexDocumentModel document) - { - _invocations.Add(document); - if (FailDigest is not null && string.Equals(document.Upstream.ContentHash, FailDigest, StringComparison.Ordinal)) - { - var violation = AocViolation.Create( - AocViolationCode.SignatureInvalid, - "/upstream/digest", - "Synthetic guard failure."); - throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); - } - } - } - - private sealed class NoopClaimStore : IVexClaimStore - { - public ValueTask AppendAsync(IEnumerable<VexClaim> claims, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.CompletedTask; - - public ValueTask<IReadOnlyCollection<VexClaim>> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult<IReadOnlyCollection<VexClaim>>(Array.Empty<VexClaim>()); - } - - private sealed class NoopNormalizerRouter : IVexNormalizerRouter - { - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); - } - - private sealed class NoopSignatureVerifier : IVexSignatureVerifier - { - public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult<VexSignatureMetadata?>(null); - } - - private sealed class DirectSessionProvider : IVexMongoSessionProvider - { - private readonly IMongoClient _client; - - public DirectSessionProvider(IMongoClient client) - { - _client = client; - } - - public async ValueTask<IClientSessionHandle> StartSessionAsync(CancellationToken cancellationToken = default) - { - return await _client.StartSessionAsync(cancellationToken: cancellationToken).ConfigureAwait(false); - } - - public ValueTask DisposeAsync() - { - return ValueTask.CompletedTask; - } - } - - private sealed class FixedTimeProvider : TimeProvider - { - private DateTimeOffset _utcNow; - - public FixedTimeProvider(DateTimeOffset utcNow) => _utcNow = utcNow; - - public override DateTimeOffset GetUtcNow() => _utcNow; - - public void Advance(TimeSpan delta) => _utcNow += delta; - } -} +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Runtime.CompilerServices; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Mongo2Go; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Aoc; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Aoc; +using StellaOps.Excititor.Storage.Mongo; +using StellaOps.Excititor.Worker.Options; +using StellaOps.Excititor.Worker.Scheduling; +using StellaOps.Excititor.Worker.Signature; +using StellaOps.Plugin; +using Xunit; +using RawVexDocumentModel = StellaOps.Concelier.RawModels.VexRawDocument; + +namespace StellaOps.Excititor.Worker.Tests; + +public sealed class DefaultVexProviderRunnerIntegrationTests : IAsyncLifetime +{ + private readonly MongoDbRunner _runner; + private readonly MongoClient _client; + + public DefaultVexProviderRunnerIntegrationTests() + { + _runner = MongoDbRunner.Start(singleNodeReplSet: true); + _client = new MongoClient(_runner.ConnectionString); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public Task DisposeAsync() + { + _runner.Dispose(); + return Task.CompletedTask; + } + + [Fact] + public async Task RunAsync_LargeBatch_IdempotentAcrossRestart() + { + var specs = CreateDocumentSpecs(count: 48); + var databaseName = $"vex-worker-batch-{Guid.NewGuid():N}"; + var (provider, guard, database, connector) = ConfigureIntegrationServices(databaseName, specs); + + try + { + var time = new FixedTimeProvider(new DateTimeOffset(2025, 10, 28, 8, 0, 0, TimeSpan.Zero)); + var runner = CreateRunner(provider, time); + var schedule = new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, VexConnectorSettings.Empty); + + await runner.RunAsync(schedule, CancellationToken.None); + + var rawCollection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Raw); + var stored = await rawCollection.Find(FilterDefinition<BsonDocument>.Empty).ToListAsync(); + stored.Should().HaveCount(specs.Count); + + // Supersedes metadata is preserved for chained documents. + var target = specs[17]; + var storedTarget = stored.Single(doc => doc["_id"] == target.Digest); + storedTarget["Metadata"].AsBsonDocument.TryGetValue("aoc.supersedes", out var supersedesValue) + .Should().BeTrue(); + supersedesValue!.AsString.Should().Be(target.Metadata["aoc.supersedes"]); + + await runner.RunAsync(schedule, CancellationToken.None); + + var afterRestart = await rawCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); + afterRestart.Should().Be(specs.Count); + + // Guard invoked for every document across both runs. + guard.Invocations + .GroupBy(doc => doc.Upstream.ContentHash) + .Should().OnlyContain(group => group.Count() == 2); + + // Verify provenance still carries supersedes linkage. + var provenance = guard.Invocations + .Where(doc => doc.Upstream.ContentHash == target.Digest) + .Select(doc => doc.Upstream.Provenance["aoc.supersedes"]) + .ToImmutableArray(); + provenance.Should().HaveCount(2).And.AllBeEquivalentTo(target.Metadata["aoc.supersedes"]); + } + finally + { + await _client.DropDatabaseAsync(databaseName); + await provider.DisposeAsync(); + } + } + + [Fact] + public async Task RunAsync_WhenGuardFails_RestartCompletesSuccessfully() + { + var specs = CreateDocumentSpecs(count: 24); + var failureDigest = specs[9].Digest; + var databaseName = $"vex-worker-guard-{Guid.NewGuid():N}"; + var (provider, guard, database, connector) = ConfigureIntegrationServices(databaseName, specs, failureDigest); + + try + { + var time = new FixedTimeProvider(new DateTimeOffset(2025, 10, 28, 9, 0, 0, TimeSpan.Zero)); + var runner = CreateRunner(provider, time); + var schedule = new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(5), TimeSpan.Zero, VexConnectorSettings.Empty); + + await Assert.ThrowsAsync<ExcititorAocGuardException>(() => runner.RunAsync(schedule, CancellationToken.None).AsTask()); + + var rawCollection = database.GetCollection<BsonDocument>(VexMongoCollectionNames.Raw); + var storedCount = await rawCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); + storedCount.Should().Be(9); // documents before the failing digest persist + + guard.FailDigest = null; + time.Advance(TimeSpan.FromMinutes(10)); + await runner.RunAsync(schedule, CancellationToken.None); + + var finalCount = await rawCollection.CountDocumentsAsync(FilterDefinition<BsonDocument>.Empty); + finalCount.Should().Be(specs.Count); + + guard.Invocations.Count(doc => doc.Upstream.ContentHash == failureDigest).Should().Be(2); + } + finally + { + await _client.DropDatabaseAsync(databaseName); + await provider.DisposeAsync(); + } + } + + private (ServiceProvider Provider, RecordingVexRawWriteGuard Guard, IMongoDatabase Database, BatchingConnector Connector) ConfigureIntegrationServices( + string databaseName, + IReadOnlyList<DocumentSpec> specs, + string? guardFailureDigest = null) + { + var database = _client.GetDatabase(databaseName); + var optionsValue = new VexMongoStorageOptions + { + ConnectionString = _runner.ConnectionString, + DatabaseName = databaseName, + DefaultTenant = "tenant-integration", + GridFsInlineThresholdBytes = 64 * 1024, + }; + var options = Microsoft.Extensions.Options.Options.Create(optionsValue); + var sessionProvider = new DirectSessionProvider(_client); + var guard = new RecordingVexRawWriteGuard { FailDigest = guardFailureDigest }; + var rawStore = new MongoVexRawStore(_client, database, options, sessionProvider, guard); + var providerStore = new MongoVexProviderStore(database); + var stateRepository = new MongoVexConnectorStateRepository(database); + var connector = new BatchingConnector("integration:test", specs); + + var services = new ServiceCollection(); + services.AddSingleton<IVexConnector>(connector); + services.AddSingleton<IVexRawStore>(rawStore); + services.AddSingleton<IVexProviderStore>(providerStore); + services.AddSingleton<IVexConnectorStateRepository>(stateRepository); + services.AddSingleton<IVexClaimStore>(new NoopClaimStore()); + services.AddSingleton<IVexNormalizerRouter>(new NoopNormalizerRouter()); + services.AddSingleton<IVexSignatureVerifier>(new NoopSignatureVerifier()); + + return (services.BuildServiceProvider(), guard, database, connector); + } + + private static DefaultVexProviderRunner CreateRunner(IServiceProvider services, TimeProvider timeProvider) + { + var options = new VexWorkerOptions + { + Retry = + { + BaseDelay = TimeSpan.FromSeconds(5), + MaxDelay = TimeSpan.FromMinutes(1), + JitterRatio = 0.1, + FailureThreshold = 3, + QuarantineDuration = TimeSpan.FromMinutes(30), + }, + }; + + return new DefaultVexProviderRunner( + services, + new PluginCatalog(), + NullLogger<DefaultVexProviderRunner>.Instance, + timeProvider, + Microsoft.Extensions.Options.Options.Create(options)); + } + + private static List<DocumentSpec> CreateDocumentSpecs(int count) + { + var specs = new List<DocumentSpec>(capacity: count); + for (var i = 0; i < count; i++) + { + var payload = JsonSerializer.Serialize(new + { + id = i, + title = $"VEX advisory {i}", + supersedes = i == 0 ? null : $"sha256:batch-{i - 1:D4}", + }); + + var digest = ComputeDigest(payload); + var metadataBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + metadataBuilder["source.vendor"] = "integration-vendor"; + metadataBuilder["source.connector"] = "integration-connector"; + metadataBuilder["aoc.supersedes"] = i == 0 ? string.Empty : $"sha256:batch-{i - 1:D4}"; + + specs.Add(new DocumentSpec( + ProviderId: "integration-provider", + Format: VexDocumentFormat.Csaf, + SourceUri: new Uri($"https://example.org/vex/{i}.json"), + RetrievedAt: new DateTimeOffset(2025, 10, 28, 7, 0, 0, TimeSpan.Zero).AddMinutes(i), + Digest: digest, + Payload: payload, + Metadata: metadataBuilder.ToImmutable())); + } + + return specs; + } + + private static string ComputeDigest(string payload) + { + var bytes = Encoding.UTF8.GetBytes(payload); + Span<byte> buffer = stackalloc byte[32]; + if (SHA256.TryHashData(bytes, buffer, out _)) + { + return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); + } + + var hash = SHA256.HashData(bytes); + return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); + } + + private sealed record DocumentSpec( + string ProviderId, + VexDocumentFormat Format, + Uri SourceUri, + DateTimeOffset RetrievedAt, + string Digest, + string Payload, + ImmutableDictionary<string, string> Metadata) + { + public VexRawDocument CreateDocument() + { + var content = Encoding.UTF8.GetBytes(Payload); + return new VexRawDocument( + ProviderId, + Format, + SourceUri, + RetrievedAt, + Digest, + new ReadOnlyMemory<byte>(content), + Metadata); + } + } + + private sealed class BatchingConnector : IVexConnector + { + private readonly IReadOnlyList<DocumentSpec> _specs; + + public BatchingConnector(string id, IReadOnlyList<DocumentSpec> specs) + { + Id = id; + _specs = specs; + } + + public string Id { get; } + + public IReadOnlyList<DocumentSpec> Specs => _specs; + + public VexProviderKind Kind => VexProviderKind.Vendor; + + public ValueTask ValidateAsync(VexConnectorSettings settings, CancellationToken cancellationToken) + => ValueTask.CompletedTask; + + public async IAsyncEnumerable<VexRawDocument> FetchAsync( + VexConnectorContext context, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + foreach (var spec in _specs) + { + var document = spec.CreateDocument(); + await context.RawSink.StoreAsync(document, cancellationToken).ConfigureAwait(false); + yield return document; + } + } + + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); + } + + private sealed class RecordingVexRawWriteGuard : IVexRawWriteGuard + { + private readonly List<RawVexDocumentModel> _invocations = new(); + + public IReadOnlyList<RawVexDocumentModel> Invocations => _invocations; + + public string? FailDigest { get; set; } + + public void EnsureValid(RawVexDocumentModel document) + { + _invocations.Add(document); + if (FailDigest is not null && string.Equals(document.Upstream.ContentHash, FailDigest, StringComparison.Ordinal)) + { + var violation = AocViolation.Create( + AocViolationCode.SignatureInvalid, + "/upstream/digest", + "Synthetic guard failure."); + throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); + } + } + } + + private sealed class NoopClaimStore : IVexClaimStore + { + public ValueTask AppendAsync(IEnumerable<VexClaim> claims, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.CompletedTask; + + public ValueTask<IReadOnlyCollection<VexClaim>> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult<IReadOnlyCollection<VexClaim>>(Array.Empty<VexClaim>()); + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult<VexSignatureMetadata?>(null); + } + + private sealed class DirectSessionProvider : IVexMongoSessionProvider + { + private readonly IMongoClient _client; + + public DirectSessionProvider(IMongoClient client) + { + _client = client; + } + + public async ValueTask<IClientSessionHandle> StartSessionAsync(CancellationToken cancellationToken = default) + { + return await _client.StartSessionAsync(cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public ValueTask DisposeAsync() + { + return ValueTask.CompletedTask; + } + } + + private sealed class FixedTimeProvider : TimeProvider + { + private DateTimeOffset _utcNow; + + public FixedTimeProvider(DateTimeOffset utcNow) => _utcNow = utcNow; + + public override DateTimeOffset GetUtcNow() => _utcNow; + + public void Advance(TimeSpan delta) => _utcNow += delta; + } +} diff --git a/src/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs similarity index 97% rename from src/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs index 5deb6be3..4d63162d 100644 --- a/src/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/DefaultVexProviderRunnerTests.cs @@ -1,717 +1,717 @@ -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using FluentAssertions; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using MongoDB.Driver; -using StellaOps.Plugin; -using StellaOps.Excititor.Attestation.Dsse; -using StellaOps.Excititor.Attestation.Models; -using StellaOps.Excititor.Attestation.Verification; -using StellaOps.Excititor.Connectors.Abstractions; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Core.Aoc; -using StellaOps.Excititor.Storage.Mongo; -using StellaOps.Excititor.Worker.Options; -using StellaOps.Excititor.Worker.Scheduling; -using StellaOps.Excititor.Worker.Signature; -using StellaOps.Aoc; -using Xunit; -using System.Runtime.CompilerServices; - -namespace StellaOps.Excititor.Worker.Tests; - -public sealed class DefaultVexProviderRunnerTests -{ - private static readonly VexConnectorSettings EmptySettings = VexConnectorSettings.Empty; - - [Fact] - public async Task RunAsync_Skips_WhenNextEligibleRunInFuture() - { - var time = new FixedTimeProvider(new DateTimeOffset(2025, 10, 21, 15, 0, 0, TimeSpan.Zero)); - var connector = TestConnector.Success("excititor:test"); - var stateRepository = new InMemoryStateRepository(); - stateRepository.Save(new VexConnectorState( - "excititor:test", - LastUpdated: null, - DocumentDigests: ImmutableArray<string>.Empty, - ResumeTokens: ImmutableDictionary<string, string>.Empty, - LastSuccessAt: null, - FailureCount: 1, - NextEligibleRun: time.GetUtcNow().AddHours(1), - LastFailureReason: "previous failure")); - - var services = CreateServiceProvider(connector, stateRepository); - var runner = CreateRunner(services, time, options => - { - options.Retry.BaseDelay = TimeSpan.FromMinutes(5); - options.Retry.MaxDelay = TimeSpan.FromMinutes(30); - options.Retry.JitterRatio = 0; - }); - - await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); - - connector.FetchInvoked.Should().BeFalse(); - var state = stateRepository.Get("excititor:test"); - state.Should().NotBeNull(); - state!.FailureCount.Should().Be(1); - state.NextEligibleRun.Should().Be(time.GetUtcNow().AddHours(1)); - } - - [Fact] - public async Task RunAsync_Success_ResetsFailureCounters() - { - var now = new DateTimeOffset(2025, 10, 21, 16, 0, 0, TimeSpan.Zero); - var time = new FixedTimeProvider(now); - var connector = TestConnector.Success("excititor:test"); - var stateRepository = new InMemoryStateRepository(); - stateRepository.Save(new VexConnectorState( - "excititor:test", - LastUpdated: now.AddDays(-1), - DocumentDigests: ImmutableArray<string>.Empty, - ResumeTokens: ImmutableDictionary<string, string>.Empty, - LastSuccessAt: now.AddHours(-4), - FailureCount: 2, - NextEligibleRun: null, - LastFailureReason: "failure")); - - var services = CreateServiceProvider(connector, stateRepository); - var runner = CreateRunner(services, time, options => - { - options.Retry.BaseDelay = TimeSpan.FromMinutes(2); - options.Retry.MaxDelay = TimeSpan.FromMinutes(30); - options.Retry.JitterRatio = 0; - }); - - await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); - - connector.FetchInvoked.Should().BeTrue(); - var state = stateRepository.Get("excititor:test"); - state.Should().NotBeNull(); - state!.FailureCount.Should().Be(0); - state.NextEligibleRun.Should().BeNull(); - state.LastFailureReason.Should().BeNull(); - state.LastSuccessAt.Should().Be(now); - } - - [Fact] - public async Task RunAsync_UsesStoredResumeTokens() - { - var now = new DateTimeOffset(2025, 10, 21, 18, 0, 0, TimeSpan.Zero); - var time = new FixedTimeProvider(now); - var resumeTokens = ImmutableDictionary<string, string>.Empty - .Add("cursor", "abc123"); - var stateRepository = new InMemoryStateRepository(); - stateRepository.Save(new VexConnectorState( - "excititor:resume", - LastUpdated: now.AddHours(-6), - DocumentDigests: ImmutableArray<string>.Empty, - ResumeTokens: resumeTokens, - LastSuccessAt: now.AddHours(-7), - FailureCount: 0, - NextEligibleRun: null, - LastFailureReason: null)); - - var connector = TestConnector.SuccessWithCapture("excititor:resume"); - var services = CreateServiceProvider(connector, stateRepository); - var runner = CreateRunner(services, time, options => - { - options.Retry.BaseDelay = TimeSpan.FromMinutes(2); - options.Retry.MaxDelay = TimeSpan.FromMinutes(10); - options.Retry.JitterRatio = 0; - }); - - await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); - - connector.LastContext.Should().NotBeNull(); - connector.LastContext!.Since.Should().Be(now.AddHours(-6)); - connector.LastContext.ResumeTokens.Should().BeEquivalentTo(resumeTokens); - } - -[Fact] - public async Task RunAsync_SchedulesRefresh_ForUniqueClaims() - { - var now = new DateTimeOffset(2025, 10, 21, 19, 0, 0, TimeSpan.Zero); - var time = new FixedTimeProvider(now); - var rawDocument = new VexRawDocument( - "provider-a", - VexDocumentFormat.Csaf, - new Uri("https://example.org/vex.json"), - now, - "sha256:raw", - ReadOnlyMemory<byte>.Empty, - ImmutableDictionary<string, string>.Empty); - - var claimDocument = new VexClaimDocument( - VexDocumentFormat.Csaf, - "sha256:claim", - new Uri("https://example.org/vex.json")); - - var primaryProduct = new VexProduct("pkg:test/app", "Test App", componentIdentifiers: new[] { "fingerprint:base" }); - var secondaryProduct = new VexProduct("pkg:test/other", "Other App", componentIdentifiers: new[] { "fingerprint:other" }); - - var claims = new[] - { - new VexClaim("CVE-2025-0001", "provider-a", primaryProduct, VexClaimStatus.Affected, claimDocument, now.AddHours(-3), now.AddHours(-2)), - new VexClaim("CVE-2025-0001", "provider-b", primaryProduct, VexClaimStatus.NotAffected, claimDocument, now.AddHours(-3), now.AddHours(-2)), - new VexClaim("CVE-2025-0002", "provider-a", secondaryProduct, VexClaimStatus.Affected, claimDocument, now.AddHours(-2), now.AddHours(-1)), - }; - - var connector = TestConnector.WithDocuments("excititor:test", rawDocument); - var stateRepository = new InMemoryStateRepository(); - var normalizer = new StubNormalizerRouter(claims); - var services = CreateServiceProvider(connector, stateRepository, normalizer); - var runner = CreateRunner(services, time, options => - { - options.Retry.BaseDelay = TimeSpan.FromMinutes(1); - options.Retry.MaxDelay = TimeSpan.FromMinutes(5); - options.Retry.JitterRatio = 0; - }); - - await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); - - normalizer.CallCount.Should().Be(0); - } - - [Fact] - public async Task RunAsync_WhenSignatureVerifierFails_PropagatesException() - { - var now = new DateTimeOffset(2025, 10, 21, 20, 0, 0, TimeSpan.Zero); - var time = new FixedTimeProvider(now); - var content = Encoding.UTF8.GetBytes("{\"id\":\"sig\"}"); - var digest = ComputeDigest(content); - var rawDocument = new VexRawDocument( - "provider-a", - VexDocumentFormat.Csaf, - new Uri("https://example.org/vex.json"), - now, - digest, - content, - ImmutableDictionary<string, string>.Empty); - - var connector = TestConnector.WithDocuments("excititor:test", rawDocument); - var stateRepository = new InMemoryStateRepository(); - var failingVerifier = new ThrowingSignatureVerifier(); - var rawStore = new NoopRawStore(); - var services = CreateServiceProvider( - connector, - stateRepository, - normalizerRouter: null, - signatureVerifier: failingVerifier, - rawStore: rawStore); - - var runner = CreateRunner(services, time, options => - { - options.Retry.BaseDelay = TimeSpan.FromMinutes(1); - options.Retry.MaxDelay = TimeSpan.FromMinutes(5); - options.Retry.JitterRatio = 0; - }); - - await Assert.ThrowsAsync<ExcititorAocGuardException>(async () => - await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None).AsTask()); - - failingVerifier.Invocations.Should().Be(1); - rawStore.StoreCallCount.Should().Be(0); - } - - [Fact] - public async Task RunAsync_EnrichesMetadataWithSignatureResult() - { - var now = new DateTimeOffset(2025, 10, 21, 21, 0, 0, TimeSpan.Zero); - var time = new FixedTimeProvider(now); - var content = Encoding.UTF8.GetBytes("{\"id\":\"sig\"}"); - var digest = ComputeDigest(content); - var document = new VexRawDocument( - "provider-a", - VexDocumentFormat.OciAttestation, - new Uri("https://example.org/attest.json"), - now, - digest, - content, - ImmutableDictionary<string, string>.Empty); - - var signatureMetadata = new VexSignatureMetadata( - "cosign", - subject: "subject", - issuer: "issuer", - keyId: "kid", - verifiedAt: now, - transparencyLogReference: "rekor://entry"); - - var signatureVerifier = new RecordingSignatureVerifier(signatureMetadata); - var rawStore = new NoopRawStore(); - var connector = TestConnector.WithDocuments("excititor:test", document); - var stateRepository = new InMemoryStateRepository(); - var services = CreateServiceProvider( - connector, - stateRepository, - normalizerRouter: null, - signatureVerifier: signatureVerifier, - rawStore: rawStore); - - var runner = CreateRunner(services, time, options => - { - options.Retry.BaseDelay = TimeSpan.FromMinutes(1); - options.Retry.MaxDelay = TimeSpan.FromMinutes(5); - options.Retry.JitterRatio = 0; - }); - - await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); - - rawStore.StoreCallCount.Should().Be(1); - rawStore.LastStoredDocument.Should().NotBeNull(); - rawStore.LastStoredDocument!.Metadata.Should().ContainKey("vex.signature.type"); - rawStore.LastStoredDocument.Metadata["vex.signature.type"].Should().Be("cosign"); - rawStore.LastStoredDocument.Metadata["signature.present"].Should().Be("true"); - rawStore.LastStoredDocument.Metadata["signature.verified"].Should().Be("true"); - signatureVerifier.Invocations.Should().Be(1); - } - - [Fact] - public async Task RunAsync_Attestation_StoresVerifierMetadata() - { - var now = new DateTimeOffset(2025, 10, 28, 7, 0, 0, TimeSpan.Zero); - var time = new FixedTimeProvider(now); - var document = CreateAttestationRawDocument(now); - - var diagnostics = ImmutableDictionary<string, string>.Empty - .Add("verification.issuer", "issuer-from-verifier") - .Add("verification.keyId", "key-from-verifier"); - - var attestationVerifier = new StubAttestationVerifier(true, diagnostics); - var signatureVerifier = new WorkerSignatureVerifier( - NullLogger<WorkerSignatureVerifier>.Instance, - attestationVerifier, - time); - - var connector = TestConnector.WithDocuments("excititor:test", document); - var stateRepository = new InMemoryStateRepository(); - var rawStore = new NoopRawStore(); - - var services = CreateServiceProvider( - connector, - stateRepository, - normalizerRouter: null, - signatureVerifier: signatureVerifier, - rawStore: rawStore); - - var runner = CreateRunner(services, time, options => - { - options.Retry.BaseDelay = TimeSpan.FromMinutes(1); - options.Retry.MaxDelay = TimeSpan.FromMinutes(5); - options.Retry.JitterRatio = 0; - }); - - await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); - - rawStore.StoreCallCount.Should().Be(1); - rawStore.LastStoredDocument.Should().NotBeNull(); - var metadata = rawStore.LastStoredDocument!.Metadata; - metadata.Should().ContainKey("vex.signature.type"); - metadata["vex.signature.type"].Should().Be("cosign"); - metadata["vex.signature.issuer"].Should().Be("issuer-from-verifier"); - metadata["vex.signature.keyId"].Should().Be("key-from-verifier"); - metadata["signature.present"].Should().Be("true"); - metadata["signature.verified"].Should().Be("true"); - metadata.Should().ContainKey("vex.signature.verifiedAt"); - metadata["vex.signature.verifiedAt"].Should().Be(now.ToString("O")); - attestationVerifier.Invocations.Should().Be(1); - } - -[Fact] - public async Task RunAsync_Failure_AppliesBackoff() - { - var now = new DateTimeOffset(2025, 10, 21, 17, 0, 0, TimeSpan.Zero); - var time = new FixedTimeProvider(now); - var connector = TestConnector.Failure("excititor:test", new InvalidOperationException("boom")); - var stateRepository = new InMemoryStateRepository(); - stateRepository.Save(new VexConnectorState( - "excititor:test", - LastUpdated: now.AddDays(-2), - DocumentDigests: ImmutableArray<string>.Empty, - ResumeTokens: ImmutableDictionary<string, string>.Empty, - LastSuccessAt: now.AddDays(-1), - FailureCount: 1, - NextEligibleRun: null, - LastFailureReason: null)); - - var services = CreateServiceProvider(connector, stateRepository); - var runner = CreateRunner(services, time, options => - { - options.Retry.BaseDelay = TimeSpan.FromMinutes(5); - options.Retry.MaxDelay = TimeSpan.FromMinutes(60); - options.Retry.FailureThreshold = 3; - options.Retry.QuarantineDuration = TimeSpan.FromHours(12); - options.Retry.JitterRatio = 0; - }); - - await Assert.ThrowsAsync<InvalidOperationException>(async () => await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None).AsTask()); - - var state = stateRepository.Get("excititor:test"); - state.Should().NotBeNull(); - state!.FailureCount.Should().Be(2); - state.LastFailureReason.Should().Be("boom"); - state.NextEligibleRun.Should().Be(now + TimeSpan.FromMinutes(10)); - } - - private static ServiceProvider CreateServiceProvider( - IVexConnector connector, - InMemoryStateRepository stateRepository, - IVexNormalizerRouter? normalizerRouter = null, - IVexSignatureVerifier? signatureVerifier = null, - NoopRawStore? rawStore = null) - { - var services = new ServiceCollection(); - services.AddSingleton(connector); - rawStore ??= new NoopRawStore(); - services.AddSingleton(rawStore); - services.AddSingleton<IVexRawStore>(sp => rawStore); - services.AddSingleton<IVexClaimStore>(new NoopClaimStore()); - services.AddSingleton<IVexProviderStore>(new NoopProviderStore()); - services.AddSingleton<IVexConnectorStateRepository>(stateRepository); - services.AddSingleton<IVexNormalizerRouter>(normalizerRouter ?? new NoopNormalizerRouter()); - services.AddSingleton<IVexSignatureVerifier>(signatureVerifier ?? new NoopSignatureVerifier()); - return services.BuildServiceProvider(); - } - - private static DefaultVexProviderRunner CreateRunner( - IServiceProvider serviceProvider, - TimeProvider timeProvider, - Action<VexWorkerOptions> configure) - { - var options = new VexWorkerOptions(); - configure(options); - return new DefaultVexProviderRunner( - serviceProvider, - new PluginCatalog(), - NullLogger<DefaultVexProviderRunner>.Instance, - timeProvider, - Microsoft.Extensions.Options.Options.Create(options)); - } - - private sealed class FixedTimeProvider : TimeProvider - { - private DateTimeOffset _utcNow; - - public FixedTimeProvider(DateTimeOffset utcNow) => _utcNow = utcNow; - - public override DateTimeOffset GetUtcNow() => _utcNow; - - public void Advance(TimeSpan delta) => _utcNow += delta; - } - - private sealed class NoopRawStore : IVexRawStore - { - public int StoreCallCount { get; private set; } - public VexRawDocument? LastStoredDocument { get; private set; } - - public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) - { - StoreCallCount++; - LastStoredDocument = document; - return ValueTask.CompletedTask; - } - - public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken, IClientSessionHandle? session) - { - StoreCallCount++; - LastStoredDocument = document; - return ValueTask.CompletedTask; - } - - public ValueTask<VexRawDocument?> FindByDigestAsync(string digest, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult<VexRawDocument?>(null); - } - - private sealed class NoopClaimStore : IVexClaimStore - { - public ValueTask AppendAsync(IEnumerable<VexClaim> claims, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.CompletedTask; - - public ValueTask<IReadOnlyCollection<VexClaim>> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult<IReadOnlyCollection<VexClaim>>(Array.Empty<VexClaim>()); - } - - private sealed class NoopProviderStore : IVexProviderStore - { - private readonly ConcurrentDictionary<string, VexProvider> _providers = new(StringComparer.Ordinal); - - public ValueTask<VexProvider?> FindAsync(string id, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _providers.TryGetValue(id, out var provider); - return ValueTask.FromResult<VexProvider?>(provider); - } - - public ValueTask<IReadOnlyCollection<VexProvider>> ListAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult<IReadOnlyCollection<VexProvider>>(_providers.Values.ToList()); - - public ValueTask SaveAsync(VexProvider provider, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - _providers[provider.Id] = provider; - return ValueTask.CompletedTask; - } - } - - private sealed class NoopNormalizerRouter : IVexNormalizerRouter - { - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); - } - - private sealed class StubNormalizerRouter : IVexNormalizerRouter - { - private readonly ImmutableArray<VexClaim> _claims; - - public StubNormalizerRouter(IEnumerable<VexClaim> claims) - { - _claims = claims.ToImmutableArray(); - } - - public int CallCount { get; private set; } - - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - { - CallCount++; - return ValueTask.FromResult(new VexClaimBatch(document, _claims, ImmutableDictionary<string, string>.Empty)); - } - } - - private sealed class NoopSignatureVerifier : IVexSignatureVerifier - { - public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - => ValueTask.FromResult<VexSignatureMetadata?>(null); - } - - private sealed class InMemoryStateRepository : IVexConnectorStateRepository - { - private readonly ConcurrentDictionary<string, VexConnectorState> _states = new(StringComparer.Ordinal); - - public VexConnectorState? Get(string connectorId) - => _states.TryGetValue(connectorId, out var state) ? state : null; - - public void Save(VexConnectorState state) - => _states[state.ConnectorId] = state; - - public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) - => ValueTask.FromResult(Get(connectorId)); - - public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) - { - Save(state); - return ValueTask.CompletedTask; - } - } - - private sealed class TestConnector : IVexConnector - { - private readonly Func<VexConnectorContext, CancellationToken, IAsyncEnumerable<VexRawDocument>> _fetch; - private readonly Exception? _normalizeException; - private readonly List<VexConnectorContext>? _capturedContexts; - - private TestConnector(string id, Func<VexConnectorContext, CancellationToken, IAsyncEnumerable<VexRawDocument>> fetch, Exception? normalizeException = null, List<VexConnectorContext>? capturedContexts = null) - { - Id = id; - _fetch = fetch; - _normalizeException = normalizeException; - _capturedContexts = capturedContexts; - } - - public static TestConnector Success(string id) => new(id, (_, _) => AsyncEnumerable.Empty<VexRawDocument>()); - - public static TestConnector SuccessWithCapture(string id) - { - var contexts = new List<VexConnectorContext>(); - return new TestConnector(id, (_, _) => AsyncEnumerable.Empty<VexRawDocument>(), capturedContexts: contexts); - } - - public static TestConnector WithDocuments(string id, params VexRawDocument[] documents) - { - return new TestConnector(id, (context, cancellationToken) => StreamAsync(context, documents, cancellationToken)); - } - - private static async IAsyncEnumerable<VexRawDocument> StreamAsync( - VexConnectorContext context, - IReadOnlyList<VexRawDocument> documents, - [EnumeratorCancellation] CancellationToken cancellationToken) - { - foreach (var document in documents) - { - await context.RawSink.StoreAsync(document, cancellationToken).ConfigureAwait(false); - yield return document; - } - } - - public static TestConnector Failure(string id, Exception exception) - { - return new TestConnector(id, (_, _) => new ThrowingAsyncEnumerable(exception)); - } - - public string Id { get; } - - public VexProviderKind Kind => VexProviderKind.Vendor; - - public bool ValidateInvoked { get; private set; } - - public bool FetchInvoked { get; private set; } - - public VexConnectorContext? LastContext => _capturedContexts is { Count: > 0 } ? _capturedContexts[^1] : null; - - public ValueTask ValidateAsync(VexConnectorSettings settings, CancellationToken cancellationToken) - { - ValidateInvoked = true; - return ValueTask.CompletedTask; - } - - public IAsyncEnumerable<VexRawDocument> FetchAsync(VexConnectorContext context, CancellationToken cancellationToken) - { - FetchInvoked = true; - _capturedContexts?.Add(context); - return _fetch(context, cancellationToken); - } - - public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) - { - if (_normalizeException is not null) - { - throw _normalizeException; - } - - return ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); - } - } - - private sealed class ThrowingAsyncEnumerable : IAsyncEnumerable<VexRawDocument>, IAsyncEnumerator<VexRawDocument> - { - private readonly Exception _exception; - - public ThrowingAsyncEnumerable(Exception exception) => _exception = exception; - - public IAsyncEnumerator<VexRawDocument> GetAsyncEnumerator(CancellationToken cancellationToken = default) => this; - - public ValueTask<bool> MoveNextAsync() => ValueTask.FromException<bool>(_exception); - - public VexRawDocument Current => throw new InvalidOperationException(); - - public ValueTask DisposeAsync() => ValueTask.CompletedTask; - } - - private sealed class ThrowingSignatureVerifier : IVexSignatureVerifier - { - public int Invocations { get; private set; } - - public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - { - Invocations++; - var violation = AocViolation.Create( - AocViolationCode.SignatureInvalid, - "/upstream/signature", - "Synthetic verifier failure."); - throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); - } - } - - private sealed class RecordingSignatureVerifier : IVexSignatureVerifier - { - private readonly VexSignatureMetadata? _result; - - public RecordingSignatureVerifier(VexSignatureMetadata? result) => _result = result; - - public int Invocations { get; private set; } - - public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) - { - Invocations++; - return ValueTask.FromResult(_result); - } - } - - private sealed class StubAttestationVerifier : IVexAttestationVerifier - { - private readonly bool _isValid; - private readonly ImmutableDictionary<string, string> _diagnostics; - - public StubAttestationVerifier(bool isValid, ImmutableDictionary<string, string> diagnostics) - { - _isValid = isValid; - _diagnostics = diagnostics; - } - - public int Invocations { get; private set; } - - public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken) - { - Invocations++; - return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics)); - } - } - - private static VexRawDocument CreateAttestationRawDocument(DateTimeOffset observedAt) - { - var predicate = new VexAttestationPredicate( - "export-id", - "query-signature", - "sha256", - "abcd1234", - VexExportFormat.Json, - observedAt, - new[] { "provider-a" }, - ImmutableDictionary<string, string>.Empty); - - var statement = new VexInTotoStatement( - VexInTotoStatement.InTotoType, - "https://stella-ops.org/attestations/vex-export", - new[] { new VexInTotoSubject("export-id", new Dictionary<string, string> { { "sha256", "abcd1234" } }) }, - predicate); - - var serializerOptions = new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.Never, - Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, - }; - - var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(statement, serializerOptions); - var envelope = new DsseEnvelope( - Convert.ToBase64String(payloadBytes), - "application/vnd.in-toto+json", - new[] { new DsseSignature("deadbeef", "sig-key") }); - - var envelopeJson = JsonSerializer.Serialize( - envelope, - new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - }); - - var contentBytes = Encoding.UTF8.GetBytes(envelopeJson); - - return new VexRawDocument( - "provider-a", - VexDocumentFormat.OciAttestation, - new Uri("https://example.org/vex-attestation.json"), - observedAt, - ComputeDigest(contentBytes), - contentBytes, - ImmutableDictionary<string, string>.Empty); - } - - private static string ComputeDigest(ReadOnlySpan<byte> content) - { - Span<byte> buffer = stackalloc byte[32]; - if (!SHA256.TryHashData(content, buffer, out _)) - { - var hash = SHA256.HashData(content.ToArray()); - return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); - } - - return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); - } -} +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using FluentAssertions; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Plugin; +using StellaOps.Excititor.Attestation.Dsse; +using StellaOps.Excititor.Attestation.Models; +using StellaOps.Excititor.Attestation.Verification; +using StellaOps.Excititor.Connectors.Abstractions; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Aoc; +using StellaOps.Excititor.Storage.Mongo; +using StellaOps.Excititor.Worker.Options; +using StellaOps.Excititor.Worker.Scheduling; +using StellaOps.Excititor.Worker.Signature; +using StellaOps.Aoc; +using Xunit; +using System.Runtime.CompilerServices; + +namespace StellaOps.Excititor.Worker.Tests; + +public sealed class DefaultVexProviderRunnerTests +{ + private static readonly VexConnectorSettings EmptySettings = VexConnectorSettings.Empty; + + [Fact] + public async Task RunAsync_Skips_WhenNextEligibleRunInFuture() + { + var time = new FixedTimeProvider(new DateTimeOffset(2025, 10, 21, 15, 0, 0, TimeSpan.Zero)); + var connector = TestConnector.Success("excititor:test"); + var stateRepository = new InMemoryStateRepository(); + stateRepository.Save(new VexConnectorState( + "excititor:test", + LastUpdated: null, + DocumentDigests: ImmutableArray<string>.Empty, + ResumeTokens: ImmutableDictionary<string, string>.Empty, + LastSuccessAt: null, + FailureCount: 1, + NextEligibleRun: time.GetUtcNow().AddHours(1), + LastFailureReason: "previous failure")); + + var services = CreateServiceProvider(connector, stateRepository); + var runner = CreateRunner(services, time, options => + { + options.Retry.BaseDelay = TimeSpan.FromMinutes(5); + options.Retry.MaxDelay = TimeSpan.FromMinutes(30); + options.Retry.JitterRatio = 0; + }); + + await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); + + connector.FetchInvoked.Should().BeFalse(); + var state = stateRepository.Get("excititor:test"); + state.Should().NotBeNull(); + state!.FailureCount.Should().Be(1); + state.NextEligibleRun.Should().Be(time.GetUtcNow().AddHours(1)); + } + + [Fact] + public async Task RunAsync_Success_ResetsFailureCounters() + { + var now = new DateTimeOffset(2025, 10, 21, 16, 0, 0, TimeSpan.Zero); + var time = new FixedTimeProvider(now); + var connector = TestConnector.Success("excititor:test"); + var stateRepository = new InMemoryStateRepository(); + stateRepository.Save(new VexConnectorState( + "excititor:test", + LastUpdated: now.AddDays(-1), + DocumentDigests: ImmutableArray<string>.Empty, + ResumeTokens: ImmutableDictionary<string, string>.Empty, + LastSuccessAt: now.AddHours(-4), + FailureCount: 2, + NextEligibleRun: null, + LastFailureReason: "failure")); + + var services = CreateServiceProvider(connector, stateRepository); + var runner = CreateRunner(services, time, options => + { + options.Retry.BaseDelay = TimeSpan.FromMinutes(2); + options.Retry.MaxDelay = TimeSpan.FromMinutes(30); + options.Retry.JitterRatio = 0; + }); + + await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); + + connector.FetchInvoked.Should().BeTrue(); + var state = stateRepository.Get("excititor:test"); + state.Should().NotBeNull(); + state!.FailureCount.Should().Be(0); + state.NextEligibleRun.Should().BeNull(); + state.LastFailureReason.Should().BeNull(); + state.LastSuccessAt.Should().Be(now); + } + + [Fact] + public async Task RunAsync_UsesStoredResumeTokens() + { + var now = new DateTimeOffset(2025, 10, 21, 18, 0, 0, TimeSpan.Zero); + var time = new FixedTimeProvider(now); + var resumeTokens = ImmutableDictionary<string, string>.Empty + .Add("cursor", "abc123"); + var stateRepository = new InMemoryStateRepository(); + stateRepository.Save(new VexConnectorState( + "excititor:resume", + LastUpdated: now.AddHours(-6), + DocumentDigests: ImmutableArray<string>.Empty, + ResumeTokens: resumeTokens, + LastSuccessAt: now.AddHours(-7), + FailureCount: 0, + NextEligibleRun: null, + LastFailureReason: null)); + + var connector = TestConnector.SuccessWithCapture("excititor:resume"); + var services = CreateServiceProvider(connector, stateRepository); + var runner = CreateRunner(services, time, options => + { + options.Retry.BaseDelay = TimeSpan.FromMinutes(2); + options.Retry.MaxDelay = TimeSpan.FromMinutes(10); + options.Retry.JitterRatio = 0; + }); + + await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); + + connector.LastContext.Should().NotBeNull(); + connector.LastContext!.Since.Should().Be(now.AddHours(-6)); + connector.LastContext.ResumeTokens.Should().BeEquivalentTo(resumeTokens); + } + +[Fact] + public async Task RunAsync_SchedulesRefresh_ForUniqueClaims() + { + var now = new DateTimeOffset(2025, 10, 21, 19, 0, 0, TimeSpan.Zero); + var time = new FixedTimeProvider(now); + var rawDocument = new VexRawDocument( + "provider-a", + VexDocumentFormat.Csaf, + new Uri("https://example.org/vex.json"), + now, + "sha256:raw", + ReadOnlyMemory<byte>.Empty, + ImmutableDictionary<string, string>.Empty); + + var claimDocument = new VexClaimDocument( + VexDocumentFormat.Csaf, + "sha256:claim", + new Uri("https://example.org/vex.json")); + + var primaryProduct = new VexProduct("pkg:test/app", "Test App", componentIdentifiers: new[] { "fingerprint:base" }); + var secondaryProduct = new VexProduct("pkg:test/other", "Other App", componentIdentifiers: new[] { "fingerprint:other" }); + + var claims = new[] + { + new VexClaim("CVE-2025-0001", "provider-a", primaryProduct, VexClaimStatus.Affected, claimDocument, now.AddHours(-3), now.AddHours(-2)), + new VexClaim("CVE-2025-0001", "provider-b", primaryProduct, VexClaimStatus.NotAffected, claimDocument, now.AddHours(-3), now.AddHours(-2)), + new VexClaim("CVE-2025-0002", "provider-a", secondaryProduct, VexClaimStatus.Affected, claimDocument, now.AddHours(-2), now.AddHours(-1)), + }; + + var connector = TestConnector.WithDocuments("excititor:test", rawDocument); + var stateRepository = new InMemoryStateRepository(); + var normalizer = new StubNormalizerRouter(claims); + var services = CreateServiceProvider(connector, stateRepository, normalizer); + var runner = CreateRunner(services, time, options => + { + options.Retry.BaseDelay = TimeSpan.FromMinutes(1); + options.Retry.MaxDelay = TimeSpan.FromMinutes(5); + options.Retry.JitterRatio = 0; + }); + + await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); + + normalizer.CallCount.Should().Be(0); + } + + [Fact] + public async Task RunAsync_WhenSignatureVerifierFails_PropagatesException() + { + var now = new DateTimeOffset(2025, 10, 21, 20, 0, 0, TimeSpan.Zero); + var time = new FixedTimeProvider(now); + var content = Encoding.UTF8.GetBytes("{\"id\":\"sig\"}"); + var digest = ComputeDigest(content); + var rawDocument = new VexRawDocument( + "provider-a", + VexDocumentFormat.Csaf, + new Uri("https://example.org/vex.json"), + now, + digest, + content, + ImmutableDictionary<string, string>.Empty); + + var connector = TestConnector.WithDocuments("excititor:test", rawDocument); + var stateRepository = new InMemoryStateRepository(); + var failingVerifier = new ThrowingSignatureVerifier(); + var rawStore = new NoopRawStore(); + var services = CreateServiceProvider( + connector, + stateRepository, + normalizerRouter: null, + signatureVerifier: failingVerifier, + rawStore: rawStore); + + var runner = CreateRunner(services, time, options => + { + options.Retry.BaseDelay = TimeSpan.FromMinutes(1); + options.Retry.MaxDelay = TimeSpan.FromMinutes(5); + options.Retry.JitterRatio = 0; + }); + + await Assert.ThrowsAsync<ExcititorAocGuardException>(async () => + await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None).AsTask()); + + failingVerifier.Invocations.Should().Be(1); + rawStore.StoreCallCount.Should().Be(0); + } + + [Fact] + public async Task RunAsync_EnrichesMetadataWithSignatureResult() + { + var now = new DateTimeOffset(2025, 10, 21, 21, 0, 0, TimeSpan.Zero); + var time = new FixedTimeProvider(now); + var content = Encoding.UTF8.GetBytes("{\"id\":\"sig\"}"); + var digest = ComputeDigest(content); + var document = new VexRawDocument( + "provider-a", + VexDocumentFormat.OciAttestation, + new Uri("https://example.org/attest.json"), + now, + digest, + content, + ImmutableDictionary<string, string>.Empty); + + var signatureMetadata = new VexSignatureMetadata( + "cosign", + subject: "subject", + issuer: "issuer", + keyId: "kid", + verifiedAt: now, + transparencyLogReference: "rekor://entry"); + + var signatureVerifier = new RecordingSignatureVerifier(signatureMetadata); + var rawStore = new NoopRawStore(); + var connector = TestConnector.WithDocuments("excititor:test", document); + var stateRepository = new InMemoryStateRepository(); + var services = CreateServiceProvider( + connector, + stateRepository, + normalizerRouter: null, + signatureVerifier: signatureVerifier, + rawStore: rawStore); + + var runner = CreateRunner(services, time, options => + { + options.Retry.BaseDelay = TimeSpan.FromMinutes(1); + options.Retry.MaxDelay = TimeSpan.FromMinutes(5); + options.Retry.JitterRatio = 0; + }); + + await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); + + rawStore.StoreCallCount.Should().Be(1); + rawStore.LastStoredDocument.Should().NotBeNull(); + rawStore.LastStoredDocument!.Metadata.Should().ContainKey("vex.signature.type"); + rawStore.LastStoredDocument.Metadata["vex.signature.type"].Should().Be("cosign"); + rawStore.LastStoredDocument.Metadata["signature.present"].Should().Be("true"); + rawStore.LastStoredDocument.Metadata["signature.verified"].Should().Be("true"); + signatureVerifier.Invocations.Should().Be(1); + } + + [Fact] + public async Task RunAsync_Attestation_StoresVerifierMetadata() + { + var now = new DateTimeOffset(2025, 10, 28, 7, 0, 0, TimeSpan.Zero); + var time = new FixedTimeProvider(now); + var document = CreateAttestationRawDocument(now); + + var diagnostics = ImmutableDictionary<string, string>.Empty + .Add("verification.issuer", "issuer-from-verifier") + .Add("verification.keyId", "key-from-verifier"); + + var attestationVerifier = new StubAttestationVerifier(true, diagnostics); + var signatureVerifier = new WorkerSignatureVerifier( + NullLogger<WorkerSignatureVerifier>.Instance, + attestationVerifier, + time); + + var connector = TestConnector.WithDocuments("excititor:test", document); + var stateRepository = new InMemoryStateRepository(); + var rawStore = new NoopRawStore(); + + var services = CreateServiceProvider( + connector, + stateRepository, + normalizerRouter: null, + signatureVerifier: signatureVerifier, + rawStore: rawStore); + + var runner = CreateRunner(services, time, options => + { + options.Retry.BaseDelay = TimeSpan.FromMinutes(1); + options.Retry.MaxDelay = TimeSpan.FromMinutes(5); + options.Retry.JitterRatio = 0; + }); + + await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None); + + rawStore.StoreCallCount.Should().Be(1); + rawStore.LastStoredDocument.Should().NotBeNull(); + var metadata = rawStore.LastStoredDocument!.Metadata; + metadata.Should().ContainKey("vex.signature.type"); + metadata["vex.signature.type"].Should().Be("cosign"); + metadata["vex.signature.issuer"].Should().Be("issuer-from-verifier"); + metadata["vex.signature.keyId"].Should().Be("key-from-verifier"); + metadata["signature.present"].Should().Be("true"); + metadata["signature.verified"].Should().Be("true"); + metadata.Should().ContainKey("vex.signature.verifiedAt"); + metadata["vex.signature.verifiedAt"].Should().Be(now.ToString("O")); + attestationVerifier.Invocations.Should().Be(1); + } + +[Fact] + public async Task RunAsync_Failure_AppliesBackoff() + { + var now = new DateTimeOffset(2025, 10, 21, 17, 0, 0, TimeSpan.Zero); + var time = new FixedTimeProvider(now); + var connector = TestConnector.Failure("excititor:test", new InvalidOperationException("boom")); + var stateRepository = new InMemoryStateRepository(); + stateRepository.Save(new VexConnectorState( + "excititor:test", + LastUpdated: now.AddDays(-2), + DocumentDigests: ImmutableArray<string>.Empty, + ResumeTokens: ImmutableDictionary<string, string>.Empty, + LastSuccessAt: now.AddDays(-1), + FailureCount: 1, + NextEligibleRun: null, + LastFailureReason: null)); + + var services = CreateServiceProvider(connector, stateRepository); + var runner = CreateRunner(services, time, options => + { + options.Retry.BaseDelay = TimeSpan.FromMinutes(5); + options.Retry.MaxDelay = TimeSpan.FromMinutes(60); + options.Retry.FailureThreshold = 3; + options.Retry.QuarantineDuration = TimeSpan.FromHours(12); + options.Retry.JitterRatio = 0; + }); + + await Assert.ThrowsAsync<InvalidOperationException>(async () => await runner.RunAsync(new VexWorkerSchedule(connector.Id, TimeSpan.FromMinutes(10), TimeSpan.Zero, EmptySettings), CancellationToken.None).AsTask()); + + var state = stateRepository.Get("excititor:test"); + state.Should().NotBeNull(); + state!.FailureCount.Should().Be(2); + state.LastFailureReason.Should().Be("boom"); + state.NextEligibleRun.Should().Be(now + TimeSpan.FromMinutes(10)); + } + + private static ServiceProvider CreateServiceProvider( + IVexConnector connector, + InMemoryStateRepository stateRepository, + IVexNormalizerRouter? normalizerRouter = null, + IVexSignatureVerifier? signatureVerifier = null, + NoopRawStore? rawStore = null) + { + var services = new ServiceCollection(); + services.AddSingleton(connector); + rawStore ??= new NoopRawStore(); + services.AddSingleton(rawStore); + services.AddSingleton<IVexRawStore>(sp => rawStore); + services.AddSingleton<IVexClaimStore>(new NoopClaimStore()); + services.AddSingleton<IVexProviderStore>(new NoopProviderStore()); + services.AddSingleton<IVexConnectorStateRepository>(stateRepository); + services.AddSingleton<IVexNormalizerRouter>(normalizerRouter ?? new NoopNormalizerRouter()); + services.AddSingleton<IVexSignatureVerifier>(signatureVerifier ?? new NoopSignatureVerifier()); + return services.BuildServiceProvider(); + } + + private static DefaultVexProviderRunner CreateRunner( + IServiceProvider serviceProvider, + TimeProvider timeProvider, + Action<VexWorkerOptions> configure) + { + var options = new VexWorkerOptions(); + configure(options); + return new DefaultVexProviderRunner( + serviceProvider, + new PluginCatalog(), + NullLogger<DefaultVexProviderRunner>.Instance, + timeProvider, + Microsoft.Extensions.Options.Options.Create(options)); + } + + private sealed class FixedTimeProvider : TimeProvider + { + private DateTimeOffset _utcNow; + + public FixedTimeProvider(DateTimeOffset utcNow) => _utcNow = utcNow; + + public override DateTimeOffset GetUtcNow() => _utcNow; + + public void Advance(TimeSpan delta) => _utcNow += delta; + } + + private sealed class NoopRawStore : IVexRawStore + { + public int StoreCallCount { get; private set; } + public VexRawDocument? LastStoredDocument { get; private set; } + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken) + { + StoreCallCount++; + LastStoredDocument = document; + return ValueTask.CompletedTask; + } + + public ValueTask StoreAsync(VexRawDocument document, CancellationToken cancellationToken, IClientSessionHandle? session) + { + StoreCallCount++; + LastStoredDocument = document; + return ValueTask.CompletedTask; + } + + public ValueTask<VexRawDocument?> FindByDigestAsync(string digest, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult<VexRawDocument?>(null); + } + + private sealed class NoopClaimStore : IVexClaimStore + { + public ValueTask AppendAsync(IEnumerable<VexClaim> claims, DateTimeOffset observedAt, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.CompletedTask; + + public ValueTask<IReadOnlyCollection<VexClaim>> FindAsync(string vulnerabilityId, string productKey, DateTimeOffset? since, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult<IReadOnlyCollection<VexClaim>>(Array.Empty<VexClaim>()); + } + + private sealed class NoopProviderStore : IVexProviderStore + { + private readonly ConcurrentDictionary<string, VexProvider> _providers = new(StringComparer.Ordinal); + + public ValueTask<VexProvider?> FindAsync(string id, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _providers.TryGetValue(id, out var provider); + return ValueTask.FromResult<VexProvider?>(provider); + } + + public ValueTask<IReadOnlyCollection<VexProvider>> ListAsync(CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult<IReadOnlyCollection<VexProvider>>(_providers.Values.ToList()); + + public ValueTask SaveAsync(VexProvider provider, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + _providers[provider.Id] = provider; + return ValueTask.CompletedTask; + } + } + + private sealed class NoopNormalizerRouter : IVexNormalizerRouter + { + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); + } + + private sealed class StubNormalizerRouter : IVexNormalizerRouter + { + private readonly ImmutableArray<VexClaim> _claims; + + public StubNormalizerRouter(IEnumerable<VexClaim> claims) + { + _claims = claims.ToImmutableArray(); + } + + public int CallCount { get; private set; } + + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + { + CallCount++; + return ValueTask.FromResult(new VexClaimBatch(document, _claims, ImmutableDictionary<string, string>.Empty)); + } + } + + private sealed class NoopSignatureVerifier : IVexSignatureVerifier + { + public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + => ValueTask.FromResult<VexSignatureMetadata?>(null); + } + + private sealed class InMemoryStateRepository : IVexConnectorStateRepository + { + private readonly ConcurrentDictionary<string, VexConnectorState> _states = new(StringComparer.Ordinal); + + public VexConnectorState? Get(string connectorId) + => _states.TryGetValue(connectorId, out var state) ? state : null; + + public void Save(VexConnectorState state) + => _states[state.ConnectorId] = state; + + public ValueTask<VexConnectorState?> GetAsync(string connectorId, CancellationToken cancellationToken, IClientSessionHandle? session = null) + => ValueTask.FromResult(Get(connectorId)); + + public ValueTask SaveAsync(VexConnectorState state, CancellationToken cancellationToken, IClientSessionHandle? session = null) + { + Save(state); + return ValueTask.CompletedTask; + } + } + + private sealed class TestConnector : IVexConnector + { + private readonly Func<VexConnectorContext, CancellationToken, IAsyncEnumerable<VexRawDocument>> _fetch; + private readonly Exception? _normalizeException; + private readonly List<VexConnectorContext>? _capturedContexts; + + private TestConnector(string id, Func<VexConnectorContext, CancellationToken, IAsyncEnumerable<VexRawDocument>> fetch, Exception? normalizeException = null, List<VexConnectorContext>? capturedContexts = null) + { + Id = id; + _fetch = fetch; + _normalizeException = normalizeException; + _capturedContexts = capturedContexts; + } + + public static TestConnector Success(string id) => new(id, (_, _) => AsyncEnumerable.Empty<VexRawDocument>()); + + public static TestConnector SuccessWithCapture(string id) + { + var contexts = new List<VexConnectorContext>(); + return new TestConnector(id, (_, _) => AsyncEnumerable.Empty<VexRawDocument>(), capturedContexts: contexts); + } + + public static TestConnector WithDocuments(string id, params VexRawDocument[] documents) + { + return new TestConnector(id, (context, cancellationToken) => StreamAsync(context, documents, cancellationToken)); + } + + private static async IAsyncEnumerable<VexRawDocument> StreamAsync( + VexConnectorContext context, + IReadOnlyList<VexRawDocument> documents, + [EnumeratorCancellation] CancellationToken cancellationToken) + { + foreach (var document in documents) + { + await context.RawSink.StoreAsync(document, cancellationToken).ConfigureAwait(false); + yield return document; + } + } + + public static TestConnector Failure(string id, Exception exception) + { + return new TestConnector(id, (_, _) => new ThrowingAsyncEnumerable(exception)); + } + + public string Id { get; } + + public VexProviderKind Kind => VexProviderKind.Vendor; + + public bool ValidateInvoked { get; private set; } + + public bool FetchInvoked { get; private set; } + + public VexConnectorContext? LastContext => _capturedContexts is { Count: > 0 } ? _capturedContexts[^1] : null; + + public ValueTask ValidateAsync(VexConnectorSettings settings, CancellationToken cancellationToken) + { + ValidateInvoked = true; + return ValueTask.CompletedTask; + } + + public IAsyncEnumerable<VexRawDocument> FetchAsync(VexConnectorContext context, CancellationToken cancellationToken) + { + FetchInvoked = true; + _capturedContexts?.Add(context); + return _fetch(context, cancellationToken); + } + + public ValueTask<VexClaimBatch> NormalizeAsync(VexRawDocument document, CancellationToken cancellationToken) + { + if (_normalizeException is not null) + { + throw _normalizeException; + } + + return ValueTask.FromResult(new VexClaimBatch(document, ImmutableArray<VexClaim>.Empty, ImmutableDictionary<string, string>.Empty)); + } + } + + private sealed class ThrowingAsyncEnumerable : IAsyncEnumerable<VexRawDocument>, IAsyncEnumerator<VexRawDocument> + { + private readonly Exception _exception; + + public ThrowingAsyncEnumerable(Exception exception) => _exception = exception; + + public IAsyncEnumerator<VexRawDocument> GetAsyncEnumerator(CancellationToken cancellationToken = default) => this; + + public ValueTask<bool> MoveNextAsync() => ValueTask.FromException<bool>(_exception); + + public VexRawDocument Current => throw new InvalidOperationException(); + + public ValueTask DisposeAsync() => ValueTask.CompletedTask; + } + + private sealed class ThrowingSignatureVerifier : IVexSignatureVerifier + { + public int Invocations { get; private set; } + + public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Invocations++; + var violation = AocViolation.Create( + AocViolationCode.SignatureInvalid, + "/upstream/signature", + "Synthetic verifier failure."); + throw new ExcititorAocGuardException(AocGuardResult.FromViolations(new[] { violation })); + } + } + + private sealed class RecordingSignatureVerifier : IVexSignatureVerifier + { + private readonly VexSignatureMetadata? _result; + + public RecordingSignatureVerifier(VexSignatureMetadata? result) => _result = result; + + public int Invocations { get; private set; } + + public ValueTask<VexSignatureMetadata?> VerifyAsync(VexRawDocument document, CancellationToken cancellationToken) + { + Invocations++; + return ValueTask.FromResult(_result); + } + } + + private sealed class StubAttestationVerifier : IVexAttestationVerifier + { + private readonly bool _isValid; + private readonly ImmutableDictionary<string, string> _diagnostics; + + public StubAttestationVerifier(bool isValid, ImmutableDictionary<string, string> diagnostics) + { + _isValid = isValid; + _diagnostics = diagnostics; + } + + public int Invocations { get; private set; } + + public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken) + { + Invocations++; + return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics)); + } + } + + private static VexRawDocument CreateAttestationRawDocument(DateTimeOffset observedAt) + { + var predicate = new VexAttestationPredicate( + "export-id", + "query-signature", + "sha256", + "abcd1234", + VexExportFormat.Json, + observedAt, + new[] { "provider-a" }, + ImmutableDictionary<string, string>.Empty); + + var statement = new VexInTotoStatement( + VexInTotoStatement.InTotoType, + "https://stella-ops.org/attestations/vex-export", + new[] { new VexInTotoSubject("export-id", new Dictionary<string, string> { { "sha256", "abcd1234" } }) }, + predicate); + + var serializerOptions = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.Never, + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, + }; + + var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(statement, serializerOptions); + var envelope = new DsseEnvelope( + Convert.ToBase64String(payloadBytes), + "application/vnd.in-toto+json", + new[] { new DsseSignature("deadbeef", "sig-key") }); + + var envelopeJson = JsonSerializer.Serialize( + envelope, + new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }); + + var contentBytes = Encoding.UTF8.GetBytes(envelopeJson); + + return new VexRawDocument( + "provider-a", + VexDocumentFormat.OciAttestation, + new Uri("https://example.org/vex-attestation.json"), + observedAt, + ComputeDigest(contentBytes), + contentBytes, + ImmutableDictionary<string, string>.Empty); + } + + private static string ComputeDigest(ReadOnlySpan<byte> content) + { + Span<byte> buffer = stackalloc byte[32]; + if (!SHA256.TryHashData(content, buffer, out _)) + { + var hash = SHA256.HashData(content.ToArray()); + return "sha256:" + Convert.ToHexString(hash).ToLowerInvariant(); + } + + return "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs similarity index 97% rename from src/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs index 6218b711..1261bcdc 100644 --- a/src/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs +++ b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/Signature/WorkerSignatureVerifierTests.cs @@ -1,229 +1,229 @@ -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using FluentAssertions; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Aoc; -using StellaOps.Excititor.Attestation.Dsse; -using StellaOps.Excititor.Attestation.Models; -using StellaOps.Excititor.Attestation.Verification; -using StellaOps.Excititor.Core; -using StellaOps.Excititor.Core.Aoc; -using StellaOps.Excititor.Worker.Signature; -using Xunit; - -namespace StellaOps.Excititor.Worker.Tests.Signature; - -public sealed class WorkerSignatureVerifierTests -{ - [Fact] - public async Task VerifyAsync_ReturnsMetadata_WhenSignatureHintsPresent() - { - var content = Encoding.UTF8.GetBytes("{\"id\":\"1\"}"); - var digest = ComputeDigest(content); - var metadata = ImmutableDictionary<string, string>.Empty - .Add("tenant", "tenant-a") - .Add("vex.signature.type", "cosign") - .Add("vex.signature.subject", "subject") - .Add("vex.signature.issuer", "issuer") - .Add("vex.signature.keyId", "kid") - .Add("vex.signature.verifiedAt", DateTimeOffset.UtcNow.ToString("O")) - .Add("vex.signature.transparencyLogReference", "rekor://entry"); - - var document = new VexRawDocument( - "provider-a", - VexDocumentFormat.Csaf, - new Uri("https://example.org/vex.json"), - DateTimeOffset.UtcNow, - digest, - content, - metadata); - - var verifier = new WorkerSignatureVerifier(NullLogger<WorkerSignatureVerifier>.Instance); - - var result = await verifier.VerifyAsync(document, CancellationToken.None); - - result.Should().NotBeNull(); - result!.Type.Should().Be("cosign"); - result.Subject.Should().Be("subject"); - result.Issuer.Should().Be("issuer"); - result.KeyId.Should().Be("kid"); - result.TransparencyLogReference.Should().Be("rekor://entry"); - } - - [Fact] - public async Task VerifyAsync_Throws_WhenChecksumMismatch() - { - var content = Encoding.UTF8.GetBytes("{\"id\":\"1\"}"); - var metadata = ImmutableDictionary<string, string>.Empty; - var document = new VexRawDocument( - "provider-a", - VexDocumentFormat.CycloneDx, - new Uri("https://example.org/vex.json"), - DateTimeOffset.UtcNow, - "sha256:deadbeef", - content, - metadata); - - var verifier = new WorkerSignatureVerifier(NullLogger<WorkerSignatureVerifier>.Instance); - - var exception = await Assert.ThrowsAsync<ExcititorAocGuardException>(() => verifier.VerifyAsync(document, CancellationToken.None).AsTask()); - exception.PrimaryErrorCode.Should().Be("ERR_AOC_005"); - } - - [Fact] - public async Task VerifyAsync_Attestation_UsesVerifier() - { - var now = DateTimeOffset.UtcNow; - var (document, metadata) = CreateAttestationDocument(now, subject: "export-1", includeRekor: true); - - var attestationVerifier = new StubAttestationVerifier(true); - var verifier = new WorkerSignatureVerifier(NullLogger<WorkerSignatureVerifier>.Instance, attestationVerifier, TimeProvider.System); - - var result = await verifier.VerifyAsync(document with { Metadata = metadata }, CancellationToken.None); - - result.Should().NotBeNull(); - result!.Type.Should().Be("cosign"); - result.Subject.Should().Be("export-1"); - attestationVerifier.Invocations.Should().Be(1); - } - - [Fact] - public async Task VerifyAsync_AttestationThrows_WhenVerifierInvalid() - { - var now = DateTimeOffset.UtcNow; - var (document, metadata) = CreateAttestationDocument(now, subject: "export-2", includeRekor: true); - - var attestationVerifier = new StubAttestationVerifier(false); - var verifier = new WorkerSignatureVerifier(NullLogger<WorkerSignatureVerifier>.Instance, attestationVerifier, TimeProvider.System); - - await Assert.ThrowsAsync<ExcititorAocGuardException>(() => verifier.VerifyAsync(document with { Metadata = metadata }, CancellationToken.None).AsTask()); - attestationVerifier.Invocations.Should().Be(1); - } - - [Fact] - public async Task VerifyAsync_Attestation_UsesDiagnosticsWhenMetadataMissing() - { - var now = new DateTimeOffset(2025, 10, 28, 7, 0, 0, TimeSpan.Zero); - var (document, _) = CreateAttestationDocument(now, subject: "export-3", includeRekor: false); - - var diagnostics = ImmutableDictionary<string, string>.Empty - .Add("verification.issuer", "issuer-from-attestation") - .Add("verification.keyId", "kid-from-attestation"); - - var attestationVerifier = new StubAttestationVerifier(true, diagnostics); - var verifier = new WorkerSignatureVerifier( - NullLogger<WorkerSignatureVerifier>.Instance, - attestationVerifier, - new FixedTimeProvider(now)); - - var result = await verifier.VerifyAsync(document, CancellationToken.None); - - result.Should().NotBeNull(); - result!.Issuer.Should().Be("issuer-from-attestation"); - result.KeyId.Should().Be("kid-from-attestation"); - result.TransparencyLogReference.Should().BeNull(); - result.VerifiedAt.Should().Be(now); - attestationVerifier.Invocations.Should().Be(1); - } - - private static string ComputeDigest(ReadOnlySpan<byte> payload) - { - Span<byte> buffer = stackalloc byte[32]; - return SHA256.TryHashData(payload, buffer, out _) - ? "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant() - : "sha256:" + Convert.ToHexString(SHA256.HashData(payload.ToArray())).ToLowerInvariant(); - } - - private static (VexRawDocument Document, ImmutableDictionary<string, string> Metadata) CreateAttestationDocument(DateTimeOffset createdAt, string subject, bool includeRekor) - { - var predicate = new VexAttestationPredicate( - subject, - "query=signature", - "sha256", - "abcd1234", - VexExportFormat.Json, - createdAt, - new[] { "provider-a" }, - ImmutableDictionary<string, string>.Empty); - - var statement = new VexInTotoStatement( - VexInTotoStatement.InTotoType, - "https://stella-ops.org/attestations/vex-export", - new[] { new VexInTotoSubject(subject, new Dictionary<string, string> { { "sha256", "abcd1234" } }) }, - predicate); - - var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(statement, new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.Never, - Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, - }); - - var envelope = new DsseEnvelope( - Convert.ToBase64String(payloadBytes), - "application/vnd.in-toto+json", - new[] { new DsseSignature("deadbeef", "key-1") }); - - var envelopeJson = JsonSerializer.Serialize(envelope, new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - }); - - var contentBytes = Encoding.UTF8.GetBytes(envelopeJson); - var metadataBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - metadataBuilder["tenant"] = "tenant-a"; - metadataBuilder["vex.signature.type"] = "cosign"; - metadataBuilder["vex.signature.verifiedAt"] = createdAt.ToString("O"); - if (includeRekor) - { - metadataBuilder["vex.signature.transparencyLogReference"] = "rekor://entry/123"; - } - - var document = new VexRawDocument( - "provider-a", - VexDocumentFormat.OciAttestation, - new Uri("https://example.org/attestation.json"), - createdAt, - ComputeDigest(contentBytes), - contentBytes, - ImmutableDictionary<string, string>.Empty); - - return (document, metadataBuilder.ToImmutable()); - } - - private sealed class StubAttestationVerifier : IVexAttestationVerifier - { - private readonly bool _isValid; - private readonly ImmutableDictionary<string, string> _diagnostics; - - public StubAttestationVerifier(bool isValid, ImmutableDictionary<string, string>? diagnostics = null) - { - _isValid = isValid; - _diagnostics = diagnostics ?? ImmutableDictionary<string, string>.Empty; - } - - public int Invocations { get; private set; } - - public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken) - { - Invocations++; - return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics)); - } - } - - private sealed class FixedTimeProvider : TimeProvider - { - private readonly DateTimeOffset _utcNow; - - public FixedTimeProvider(DateTimeOffset utcNow) - { - _utcNow = utcNow; - } - - public override DateTimeOffset GetUtcNow() => _utcNow; - } -} +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Aoc; +using StellaOps.Excititor.Attestation.Dsse; +using StellaOps.Excititor.Attestation.Models; +using StellaOps.Excititor.Attestation.Verification; +using StellaOps.Excititor.Core; +using StellaOps.Excititor.Core.Aoc; +using StellaOps.Excititor.Worker.Signature; +using Xunit; + +namespace StellaOps.Excititor.Worker.Tests.Signature; + +public sealed class WorkerSignatureVerifierTests +{ + [Fact] + public async Task VerifyAsync_ReturnsMetadata_WhenSignatureHintsPresent() + { + var content = Encoding.UTF8.GetBytes("{\"id\":\"1\"}"); + var digest = ComputeDigest(content); + var metadata = ImmutableDictionary<string, string>.Empty + .Add("tenant", "tenant-a") + .Add("vex.signature.type", "cosign") + .Add("vex.signature.subject", "subject") + .Add("vex.signature.issuer", "issuer") + .Add("vex.signature.keyId", "kid") + .Add("vex.signature.verifiedAt", DateTimeOffset.UtcNow.ToString("O")) + .Add("vex.signature.transparencyLogReference", "rekor://entry"); + + var document = new VexRawDocument( + "provider-a", + VexDocumentFormat.Csaf, + new Uri("https://example.org/vex.json"), + DateTimeOffset.UtcNow, + digest, + content, + metadata); + + var verifier = new WorkerSignatureVerifier(NullLogger<WorkerSignatureVerifier>.Instance); + + var result = await verifier.VerifyAsync(document, CancellationToken.None); + + result.Should().NotBeNull(); + result!.Type.Should().Be("cosign"); + result.Subject.Should().Be("subject"); + result.Issuer.Should().Be("issuer"); + result.KeyId.Should().Be("kid"); + result.TransparencyLogReference.Should().Be("rekor://entry"); + } + + [Fact] + public async Task VerifyAsync_Throws_WhenChecksumMismatch() + { + var content = Encoding.UTF8.GetBytes("{\"id\":\"1\"}"); + var metadata = ImmutableDictionary<string, string>.Empty; + var document = new VexRawDocument( + "provider-a", + VexDocumentFormat.CycloneDx, + new Uri("https://example.org/vex.json"), + DateTimeOffset.UtcNow, + "sha256:deadbeef", + content, + metadata); + + var verifier = new WorkerSignatureVerifier(NullLogger<WorkerSignatureVerifier>.Instance); + + var exception = await Assert.ThrowsAsync<ExcititorAocGuardException>(() => verifier.VerifyAsync(document, CancellationToken.None).AsTask()); + exception.PrimaryErrorCode.Should().Be("ERR_AOC_005"); + } + + [Fact] + public async Task VerifyAsync_Attestation_UsesVerifier() + { + var now = DateTimeOffset.UtcNow; + var (document, metadata) = CreateAttestationDocument(now, subject: "export-1", includeRekor: true); + + var attestationVerifier = new StubAttestationVerifier(true); + var verifier = new WorkerSignatureVerifier(NullLogger<WorkerSignatureVerifier>.Instance, attestationVerifier, TimeProvider.System); + + var result = await verifier.VerifyAsync(document with { Metadata = metadata }, CancellationToken.None); + + result.Should().NotBeNull(); + result!.Type.Should().Be("cosign"); + result.Subject.Should().Be("export-1"); + attestationVerifier.Invocations.Should().Be(1); + } + + [Fact] + public async Task VerifyAsync_AttestationThrows_WhenVerifierInvalid() + { + var now = DateTimeOffset.UtcNow; + var (document, metadata) = CreateAttestationDocument(now, subject: "export-2", includeRekor: true); + + var attestationVerifier = new StubAttestationVerifier(false); + var verifier = new WorkerSignatureVerifier(NullLogger<WorkerSignatureVerifier>.Instance, attestationVerifier, TimeProvider.System); + + await Assert.ThrowsAsync<ExcititorAocGuardException>(() => verifier.VerifyAsync(document with { Metadata = metadata }, CancellationToken.None).AsTask()); + attestationVerifier.Invocations.Should().Be(1); + } + + [Fact] + public async Task VerifyAsync_Attestation_UsesDiagnosticsWhenMetadataMissing() + { + var now = new DateTimeOffset(2025, 10, 28, 7, 0, 0, TimeSpan.Zero); + var (document, _) = CreateAttestationDocument(now, subject: "export-3", includeRekor: false); + + var diagnostics = ImmutableDictionary<string, string>.Empty + .Add("verification.issuer", "issuer-from-attestation") + .Add("verification.keyId", "kid-from-attestation"); + + var attestationVerifier = new StubAttestationVerifier(true, diagnostics); + var verifier = new WorkerSignatureVerifier( + NullLogger<WorkerSignatureVerifier>.Instance, + attestationVerifier, + new FixedTimeProvider(now)); + + var result = await verifier.VerifyAsync(document, CancellationToken.None); + + result.Should().NotBeNull(); + result!.Issuer.Should().Be("issuer-from-attestation"); + result.KeyId.Should().Be("kid-from-attestation"); + result.TransparencyLogReference.Should().BeNull(); + result.VerifiedAt.Should().Be(now); + attestationVerifier.Invocations.Should().Be(1); + } + + private static string ComputeDigest(ReadOnlySpan<byte> payload) + { + Span<byte> buffer = stackalloc byte[32]; + return SHA256.TryHashData(payload, buffer, out _) + ? "sha256:" + Convert.ToHexString(buffer).ToLowerInvariant() + : "sha256:" + Convert.ToHexString(SHA256.HashData(payload.ToArray())).ToLowerInvariant(); + } + + private static (VexRawDocument Document, ImmutableDictionary<string, string> Metadata) CreateAttestationDocument(DateTimeOffset createdAt, string subject, bool includeRekor) + { + var predicate = new VexAttestationPredicate( + subject, + "query=signature", + "sha256", + "abcd1234", + VexExportFormat.Json, + createdAt, + new[] { "provider-a" }, + ImmutableDictionary<string, string>.Empty); + + var statement = new VexInTotoStatement( + VexInTotoStatement.InTotoType, + "https://stella-ops.org/attestations/vex-export", + new[] { new VexInTotoSubject(subject, new Dictionary<string, string> { { "sha256", "abcd1234" } }) }, + predicate); + + var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(statement, new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.Never, + Converters = { new JsonStringEnumConverter(JsonNamingPolicy.CamelCase) }, + }); + + var envelope = new DsseEnvelope( + Convert.ToBase64String(payloadBytes), + "application/vnd.in-toto+json", + new[] { new DsseSignature("deadbeef", "key-1") }); + + var envelopeJson = JsonSerializer.Serialize(envelope, new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }); + + var contentBytes = Encoding.UTF8.GetBytes(envelopeJson); + var metadataBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + metadataBuilder["tenant"] = "tenant-a"; + metadataBuilder["vex.signature.type"] = "cosign"; + metadataBuilder["vex.signature.verifiedAt"] = createdAt.ToString("O"); + if (includeRekor) + { + metadataBuilder["vex.signature.transparencyLogReference"] = "rekor://entry/123"; + } + + var document = new VexRawDocument( + "provider-a", + VexDocumentFormat.OciAttestation, + new Uri("https://example.org/attestation.json"), + createdAt, + ComputeDigest(contentBytes), + contentBytes, + ImmutableDictionary<string, string>.Empty); + + return (document, metadataBuilder.ToImmutable()); + } + + private sealed class StubAttestationVerifier : IVexAttestationVerifier + { + private readonly bool _isValid; + private readonly ImmutableDictionary<string, string> _diagnostics; + + public StubAttestationVerifier(bool isValid, ImmutableDictionary<string, string>? diagnostics = null) + { + _isValid = isValid; + _diagnostics = diagnostics ?? ImmutableDictionary<string, string>.Empty; + } + + public int Invocations { get; private set; } + + public ValueTask<VexAttestationVerification> VerifyAsync(VexAttestationVerificationRequest request, CancellationToken cancellationToken) + { + Invocations++; + return ValueTask.FromResult(new VexAttestationVerification(_isValid, _diagnostics)); + } + } + + private sealed class FixedTimeProvider : TimeProvider + { + private readonly DateTimeOffset _utcNow; + + public FixedTimeProvider(DateTimeOffset utcNow) + { + _utcNow = utcNow; + } + + public override DateTimeOffset GetUtcNow() => _utcNow; + } +} diff --git a/src/StellaOps.Excititor.Worker.Tests/StellaOps.Excititor.Worker.Tests.csproj b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/StellaOps.Excititor.Worker.Tests.csproj similarity index 79% rename from src/StellaOps.Excititor.Worker.Tests/StellaOps.Excititor.Worker.Tests.csproj rename to src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/StellaOps.Excititor.Worker.Tests.csproj index 65cfc88a..9c5f9ee1 100644 --- a/src/StellaOps.Excititor.Worker.Tests/StellaOps.Excititor.Worker.Tests.csproj +++ b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/StellaOps.Excititor.Worker.Tests.csproj @@ -1,4 +1,5 @@ -<Project Sdk="Microsoft.NET.Sdk"> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> <Nullable>enable</Nullable> @@ -23,7 +24,7 @@ </PackageReference> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Worker\StellaOps.Excititor.Worker.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> + <ProjectReference Include="../../StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Excititor.Storage.Mongo/StellaOps.Excititor.Storage.Mongo.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Excititor.Worker.Tests/VexWorkerOptionsTests.cs b/src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/VexWorkerOptionsTests.cs similarity index 100% rename from src/StellaOps.Excititor.Worker.Tests/VexWorkerOptionsTests.cs rename to src/Excititor/__Tests/StellaOps.Excititor.Worker.Tests/VexWorkerOptionsTests.cs diff --git a/src/StellaOps.ExportCenter.AttestationBundles/AGENTS.md b/src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/AGENTS.md similarity index 97% rename from src/StellaOps.ExportCenter.AttestationBundles/AGENTS.md rename to src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/AGENTS.md index 03eacc72..800904fe 100644 --- a/src/StellaOps.ExportCenter.AttestationBundles/AGENTS.md +++ b/src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/AGENTS.md @@ -1,14 +1,14 @@ -# Attestation Bundle Export Guild Charter - -## Mission -Enable offline transfer and verification of attestations by building signed bundles containing envelopes, issuer metadata, and optional transparency log segments. - -## Scope -- Bundle construction via Export Center, including manifest, checksums, DSSE signatures. -- CLI tooling for bundle verification and import. -- Coordination with risk/attestor services for air-gap workflows. - -## Definition of Done -- Bundles build reproducibly with manifest + signatures and pass verification tooling. -- Importer applies bundles to air-gapped Attestor Store safely. -- Documentation covers offline workflows with imposed rule banner. +# Attestation Bundle Export Guild Charter + +## Mission +Enable offline transfer and verification of attestations by building signed bundles containing envelopes, issuer metadata, and optional transparency log segments. + +## Scope +- Bundle construction via Export Center, including manifest, checksums, DSSE signatures. +- CLI tooling for bundle verification and import. +- Coordination with risk/attestor services for air-gap workflows. + +## Definition of Done +- Bundles build reproducibly with manifest + signatures and pass verification tooling. +- Importer applies bundles to air-gapped Attestor Store safely. +- Documentation covers offline workflows with imposed rule banner. diff --git a/src/StellaOps.ExportCenter.AttestationBundles/TASKS.md b/src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md similarity index 99% rename from src/StellaOps.ExportCenter.AttestationBundles/TASKS.md rename to src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md index e7420d62..e6bb4760 100644 --- a/src/StellaOps.ExportCenter.AttestationBundles/TASKS.md +++ b/src/ExportCenter/StellaOps.ExportCenter.AttestationBundles/TASKS.md @@ -1,13 +1,13 @@ -# Attestation Bundle Export Task Board — Epic 19: Attestor Console - -## Sprint 74 – Builder -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-ATTEST-74-001 | TODO | Attestation Bundle Guild, Attestor Service Guild | ATTESTOR-73-003 | Implement export job producing attestation bundles with manifest, checksums, DSSE signature, and optional transparency log segments. | Bundle built in staging; manifest recorded; signature verification tests pass. | -| EXPORT-ATTEST-74-002 | TODO | Attestation Bundle Guild, DevOps Guild | EXPORT-ATTEST-74-001 | Integrate bundle job into CI/offline kit packaging with checksum publication. | Pipeline publishes bundle artifact + checksums; documentation updated. | - -## Sprint 75 – Verification & Import -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-ATTEST-75-001 | TODO | Attestation Bundle Guild, CLI Attestor Guild | EXPORT-ATTEST-74-001 | Provide CLI command `stella attest bundle verify/import` for air-gap usage. | CLI verifies/signatures; import seeds attestor store; tests cover corrupted bundle. | -| EXPORT-ATTEST-75-002 | TODO | Attestation Bundle Guild, Docs Guild | EXPORT-ATTEST-75-001 | Document `/docs/attestor/airgap.md` with bundle workflows and verification steps. | Doc merged with banner; examples verified. | +# Attestation Bundle Export Task Board — Epic 19: Attestor Console + +## Sprint 74 – Builder +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-ATTEST-74-001 | TODO | Attestation Bundle Guild, Attestor Service Guild | ATTESTOR-73-003 | Implement export job producing attestation bundles with manifest, checksums, DSSE signature, and optional transparency log segments. | Bundle built in staging; manifest recorded; signature verification tests pass. | +| EXPORT-ATTEST-74-002 | TODO | Attestation Bundle Guild, DevOps Guild | EXPORT-ATTEST-74-001 | Integrate bundle job into CI/offline kit packaging with checksum publication. | Pipeline publishes bundle artifact + checksums; documentation updated. | + +## Sprint 75 – Verification & Import +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-ATTEST-75-001 | TODO | Attestation Bundle Guild, CLI Attestor Guild | EXPORT-ATTEST-74-001 | Provide CLI command `stella attest bundle verify/import` for air-gap usage. | CLI verifies/signatures; import seeds attestor store; tests cover corrupted bundle. | +| EXPORT-ATTEST-75-002 | TODO | Attestation Bundle Guild, Docs Guild | EXPORT-ATTEST-75-001 | Document `/docs/attestor/airgap.md` with bundle workflows and verification steps. | Doc merged with banner; examples verified. | diff --git a/src/StellaOps.ExportCenter.DevPortalOffline/AGENTS.md b/src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/AGENTS.md similarity index 97% rename from src/StellaOps.ExportCenter.DevPortalOffline/AGENTS.md rename to src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/AGENTS.md index 48fba6ac..96dba54d 100644 --- a/src/StellaOps.ExportCenter.DevPortalOffline/AGENTS.md +++ b/src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/AGENTS.md @@ -1,14 +1,14 @@ -# DevPortal Offline Export Guild Charter - -## Mission -Package developer portal assets, OpenAPI specs, and SDK binaries into reproducible bundles for air-gapped environments. - -## Scope -- Integrate with Export Center to produce `devportal --offline` bundles. -- Manage checksum manifests, DSSE signatures, and provenance. -- Provide validation tooling for operators importing bundles. - -## Definition of Done -- Offline bundle builds reproducibly with signed manifests and verification scripts. -- Export job documented and available via CLI/Console. -- Operators can validate bundle integrity without external services. +# DevPortal Offline Export Guild Charter + +## Mission +Package developer portal assets, OpenAPI specs, and SDK binaries into reproducible bundles for air-gapped environments. + +## Scope +- Integrate with Export Center to produce `devportal --offline` bundles. +- Manage checksum manifests, DSSE signatures, and provenance. +- Provide validation tooling for operators importing bundles. + +## Definition of Done +- Offline bundle builds reproducibly with signed manifests and verification scripts. +- Export job documented and available via CLI/Console. +- Operators can validate bundle integrity without external services. diff --git a/src/StellaOps.ExportCenter.DevPortalOffline/TASKS.md b/src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/TASKS.md similarity index 99% rename from src/StellaOps.ExportCenter.DevPortalOffline/TASKS.md rename to src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/TASKS.md index ba5b1d13..25a68527 100644 --- a/src/StellaOps.ExportCenter.DevPortalOffline/TASKS.md +++ b/src/ExportCenter/StellaOps.ExportCenter.DevPortalOffline/TASKS.md @@ -1,7 +1,7 @@ -# DevPortal Offline Export Task Board — Epic 17: SDKs & OpenAPI Docs - -## Sprint 64 – Bundle Implementation -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| DVOFF-64-001 | TODO | DevPortal Offline Guild, Exporter Guild | DEVPORT-64-001, SDKREL-64-002 | Implement Export Center job `devportal --offline` bundling portal HTML, specs, SDK artifacts, changelogs, and verification manifest. | Job executes in staging; manifest contains checksums + DSSE signatures; docs updated. | -| DVOFF-64-002 | TODO | DevPortal Offline Guild, AirGap Controller Guild | DVOFF-64-001 | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. | CLI command validates signatures; integration test covers corrupted bundle; runbook updated. | +# DevPortal Offline Export Task Board — Epic 17: SDKs & OpenAPI Docs + +## Sprint 64 – Bundle Implementation +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| DVOFF-64-001 | TODO | DevPortal Offline Guild, Exporter Guild | DEVPORT-64-001, SDKREL-64-002 | Implement Export Center job `devportal --offline` bundling portal HTML, specs, SDK artifacts, changelogs, and verification manifest. | Job executes in staging; manifest contains checksums + DSSE signatures; docs updated. | +| DVOFF-64-002 | TODO | DevPortal Offline Guild, AirGap Controller Guild | DVOFF-64-001 | Provide verification CLI (`stella devportal verify bundle.tgz`) ensuring integrity before import. | CLI command validates signatures; integration test covers corrupted bundle; runbook updated. | diff --git a/src/StellaOps.ExportCenter.RiskBundles/AGENTS.md b/src/ExportCenter/StellaOps.ExportCenter.RiskBundles/AGENTS.md similarity index 98% rename from src/StellaOps.ExportCenter.RiskBundles/AGENTS.md rename to src/ExportCenter/StellaOps.ExportCenter.RiskBundles/AGENTS.md index e60ad9b2..5ac0819b 100644 --- a/src/StellaOps.ExportCenter.RiskBundles/AGENTS.md +++ b/src/ExportCenter/StellaOps.ExportCenter.RiskBundles/AGENTS.md @@ -1,14 +1,14 @@ -# Risk Bundle Export Guild Charter - -## Mission -Produce offline-ready bundles of risk scoring factor datasets and provider metadata for air-gapped environments. - -## Scope -- Export Center job `risk-bundle` that packages KEV/EPSS feeds, reachability indexes, runtime evidence snapshots, and metadata. -- DSSE signing, checksum manifests, and verification tooling. -- Coordination with Risk Engine providers to declare required assets and TTLs. - -## Definition of Done -- Bundles build reproducibly with manifests and signatures; verification CLI available. -- Provider metadata enumerates datasets, TTLs, and schema versions. -- Air-gapped installations can load bundles and detect missing assets loudly. +# Risk Bundle Export Guild Charter + +## Mission +Produce offline-ready bundles of risk scoring factor datasets and provider metadata for air-gapped environments. + +## Scope +- Export Center job `risk-bundle` that packages KEV/EPSS feeds, reachability indexes, runtime evidence snapshots, and metadata. +- DSSE signing, checksum manifests, and verification tooling. +- Coordination with Risk Engine providers to declare required assets and TTLs. + +## Definition of Done +- Bundles build reproducibly with manifests and signatures; verification CLI available. +- Provider metadata enumerates datasets, TTLs, and schema versions. +- Air-gapped installations can load bundles and detect missing assets loudly. diff --git a/src/StellaOps.ExportCenter.RiskBundles/TASKS.md b/src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md similarity index 99% rename from src/StellaOps.ExportCenter.RiskBundles/TASKS.md rename to src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md index a8036ee6..959bd0e3 100644 --- a/src/StellaOps.ExportCenter.RiskBundles/TASKS.md +++ b/src/ExportCenter/StellaOps.ExportCenter.RiskBundles/TASKS.md @@ -1,13 +1,13 @@ -# Risk Bundle Export Task Board — Epic 18: Risk Scoring Profiles - -## Sprint 69 – Bundle Builder -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| RISK-BUNDLE-69-001 | TODO | Risk Bundle Export Guild, Risk Engine Guild | RISK-ENGINE-67-003 | Implement `stella export risk-bundle` job producing tarball with provider datasets, manifests, and DSSE signatures. | Bundle builds in staging; manifest lists datasets + TTL; signatures verified. | -| RISK-BUNDLE-69-002 | TODO | Risk Bundle Export Guild, DevOps Guild | RISK-BUNDLE-69-001 | Integrate bundle job into CI/offline kit pipelines with checksum publication. | CI produces bundle artifact; checksums in release metadata; docs updated. | - -## Sprint 70 – Verification & Docs -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| RISK-BUNDLE-70-001 | TODO | Risk Bundle Export Guild, CLI Guild | RISK-BUNDLE-69-001 | Provide CLI `stella risk bundle verify` command to validate bundles before import. | CLI verifies DSSE + checksums; integration tests cover tampered bundle. | -| RISK-BUNDLE-70-002 | TODO | Risk Bundle Export Guild, Docs Guild | RISK-BUNDLE-69-002 | Publish `/docs/airgap/risk-bundles.md` detailing build/import/verification workflows. | Doc merged with banner; examples validated. | +# Risk Bundle Export Task Board — Epic 18: Risk Scoring Profiles + +## Sprint 69 – Bundle Builder +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| RISK-BUNDLE-69-001 | TODO | Risk Bundle Export Guild, Risk Engine Guild | RISK-ENGINE-67-003 | Implement `stella export risk-bundle` job producing tarball with provider datasets, manifests, and DSSE signatures. | Bundle builds in staging; manifest lists datasets + TTL; signatures verified. | +| RISK-BUNDLE-69-002 | TODO | Risk Bundle Export Guild, DevOps Guild | RISK-BUNDLE-69-001 | Integrate bundle job into CI/offline kit pipelines with checksum publication. | CI produces bundle artifact; checksums in release metadata; docs updated. | + +## Sprint 70 – Verification & Docs +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| RISK-BUNDLE-70-001 | TODO | Risk Bundle Export Guild, CLI Guild | RISK-BUNDLE-69-001 | Provide CLI `stella risk bundle verify` command to validate bundles before import. | CLI verifies DSSE + checksums; integration tests cover tampered bundle. | +| RISK-BUNDLE-70-002 | TODO | Risk Bundle Export Guild, Docs Guild | RISK-BUNDLE-69-002 | Publish `/docs/airgap/risk-bundles.md` detailing build/import/verification workflows. | Doc merged with banner; examples validated. | diff --git a/src/ExportCenter/StellaOps.ExportCenter.sln b/src/ExportCenter/StellaOps.ExportCenter.sln new file mode 100644 index 00000000..43746ba8 --- /dev/null +++ b/src/ExportCenter/StellaOps.ExportCenter.sln @@ -0,0 +1,99 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.ExportCenter", "StellaOps.ExportCenter", "{453E5BB8-E54E-3EF9-8B1B-5E84C5251BBC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Core", "StellaOps.ExportCenter\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj", "{E13C1C3A-BCD1-4B32-B267-3008987833D9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Infrastructure", "StellaOps.ExportCenter\StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj", "{7203247A-2B03-4E9A-A8F9-E8434377A398}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Tests", "StellaOps.ExportCenter\StellaOps.ExportCenter.Tests\StellaOps.ExportCenter.Tests.csproj", "{0FF21346-59FF-4E46-953D-15C1E80B36E8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.WebService", "StellaOps.ExportCenter\StellaOps.ExportCenter.WebService\StellaOps.ExportCenter.WebService.csproj", "{84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Worker", "StellaOps.ExportCenter\StellaOps.ExportCenter.Worker\StellaOps.ExportCenter.Worker.csproj", "{77B919B8-6A4B-47BD-82BB-14287E2E069C}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Debug|x64.ActiveCfg = Debug|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Debug|x64.Build.0 = Debug|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Debug|x86.ActiveCfg = Debug|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Debug|x86.Build.0 = Debug|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Release|Any CPU.Build.0 = Release|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Release|x64.ActiveCfg = Release|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Release|x64.Build.0 = Release|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Release|x86.ActiveCfg = Release|Any CPU + {E13C1C3A-BCD1-4B32-B267-3008987833D9}.Release|x86.Build.0 = Release|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Debug|x64.ActiveCfg = Debug|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Debug|x64.Build.0 = Debug|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Debug|x86.ActiveCfg = Debug|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Debug|x86.Build.0 = Debug|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Release|Any CPU.Build.0 = Release|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Release|x64.ActiveCfg = Release|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Release|x64.Build.0 = Release|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Release|x86.ActiveCfg = Release|Any CPU + {7203247A-2B03-4E9A-A8F9-E8434377A398}.Release|x86.Build.0 = Release|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Debug|x64.ActiveCfg = Debug|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Debug|x64.Build.0 = Debug|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Debug|x86.ActiveCfg = Debug|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Debug|x86.Build.0 = Debug|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Release|Any CPU.Build.0 = Release|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Release|x64.ActiveCfg = Release|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Release|x64.Build.0 = Release|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Release|x86.ActiveCfg = Release|Any CPU + {0FF21346-59FF-4E46-953D-15C1E80B36E8}.Release|x86.Build.0 = Release|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Debug|x64.ActiveCfg = Debug|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Debug|x64.Build.0 = Debug|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Debug|x86.ActiveCfg = Debug|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Debug|x86.Build.0 = Debug|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Release|Any CPU.Build.0 = Release|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Release|x64.ActiveCfg = Release|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Release|x64.Build.0 = Release|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Release|x86.ActiveCfg = Release|Any CPU + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A}.Release|x86.Build.0 = Release|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Debug|x64.ActiveCfg = Debug|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Debug|x64.Build.0 = Debug|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Debug|x86.ActiveCfg = Debug|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Debug|x86.Build.0 = Debug|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Release|Any CPU.Build.0 = Release|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Release|x64.ActiveCfg = Release|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Release|x64.Build.0 = Release|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Release|x86.ActiveCfg = Release|Any CPU + {77B919B8-6A4B-47BD-82BB-14287E2E069C}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {E13C1C3A-BCD1-4B32-B267-3008987833D9} = {453E5BB8-E54E-3EF9-8B1B-5E84C5251BBC} + {7203247A-2B03-4E9A-A8F9-E8434377A398} = {453E5BB8-E54E-3EF9-8B1B-5E84C5251BBC} + {0FF21346-59FF-4E46-953D-15C1E80B36E8} = {453E5BB8-E54E-3EF9-8B1B-5E84C5251BBC} + {84BACF3D-19B9-4E65-A751-8EBBA39EAE5A} = {453E5BB8-E54E-3EF9-8B1B-5E84C5251BBC} + {77B919B8-6A4B-47BD-82BB-14287E2E069C} = {453E5BB8-E54E-3EF9-8B1B-5E84C5251BBC} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.ExportCenter/AGENTS.md b/src/ExportCenter/StellaOps.ExportCenter/AGENTS.md similarity index 98% rename from src/StellaOps.ExportCenter/AGENTS.md rename to src/ExportCenter/StellaOps.ExportCenter/AGENTS.md index ccf7967a..5b35fd49 100644 --- a/src/StellaOps.ExportCenter/AGENTS.md +++ b/src/ExportCenter/StellaOps.ExportCenter/AGENTS.md @@ -1,18 +1,18 @@ -# StellaOps Exporter Service — Agent Charter - -## Mission -Deliver the Export Center service described in Epic 10. Provide reproducible, signed bundles (JSON, Trivy DB, mirror) that respect AOC boundaries, tenant isolation, and imposed rule propagation across all consuming components. - -## Key Responsibilities -- Maintain planner, adapters, signing, and distribution layers for export profiles. -- Coordinate with Orchestrator for job scheduling, Findings Ledger for data streaming, Policy Engine/VEX Lens for snapshots, and Authority for RBAC scopes. -- Guarantee deterministic outputs, provenance, and cryptographic signatures for every export profile. -- Support Console/CLI experiences, DevOps automation, and Offline Kit packaging without violating sovereignty or redaction requirements. - -## Module Layout -- `StellaOps.ExportCenter.Core/` — export profile domain logic, planners, and validation. -- `StellaOps.ExportCenter.Infrastructure/` — storage providers, signing adapters, integration clients. -- `StellaOps.ExportCenter.WebService/` — REST API surface (profiles, runs, downloads, SSE). -- `StellaOps.ExportCenter.Worker/` — export execution pipelines and background schedulers. -- `StellaOps.ExportCenter.Tests/` — unit tests and future fixture harnesses. -- `StellaOps.ExportCenter.sln` — module solution wiring projects together. +# StellaOps Exporter Service — Agent Charter + +## Mission +Deliver the Export Center service described in Epic 10. Provide reproducible, signed bundles (JSON, Trivy DB, mirror) that respect AOC boundaries, tenant isolation, and imposed rule propagation across all consuming components. + +## Key Responsibilities +- Maintain planner, adapters, signing, and distribution layers for export profiles. +- Coordinate with Orchestrator for job scheduling, Findings Ledger for data streaming, Policy Engine/VEX Lens for snapshots, and Authority for RBAC scopes. +- Guarantee deterministic outputs, provenance, and cryptographic signatures for every export profile. +- Support Console/CLI experiences, DevOps automation, and Offline Kit packaging without violating sovereignty or redaction requirements. + +## Module Layout +- `StellaOps.ExportCenter.Core/` — export profile domain logic, planners, and validation. +- `StellaOps.ExportCenter.Infrastructure/` — storage providers, signing adapters, integration clients. +- `StellaOps.ExportCenter.WebService/` — REST API surface (profiles, runs, downloads, SSE). +- `StellaOps.ExportCenter.Worker/` — export execution pipelines and background schedulers. +- `StellaOps.ExportCenter.Tests/` — unit tests and future fixture harnesses. +- `StellaOps.ExportCenter.sln` — module solution wiring projects together. diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Class1.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Class1.cs similarity index 91% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Class1.cs rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Class1.cs index aefce068..1ad2668a 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Class1.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.ExportCenter.Core; - -public class Class1 -{ - -} +namespace StellaOps.ExportCenter.Core; + +public class Class1 +{ + +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/StellaOps.ExportCenter.Core.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/StellaOps.ExportCenter.Core.csproj similarity index 95% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/StellaOps.ExportCenter.Core.csproj rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/StellaOps.ExportCenter.Core.csproj index fe0eef44..e4808f0d 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/StellaOps.ExportCenter.Core.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Core/StellaOps.ExportCenter.Core.csproj @@ -1,18 +1,18 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/Class1.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/Class1.cs similarity index 92% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/Class1.cs rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/Class1.cs index 4fc16d93..37d4f2eb 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/Class1.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.ExportCenter.Infrastructure; - -public class Class1 -{ - -} +namespace StellaOps.ExportCenter.Infrastructure; + +public class Class1 +{ + +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/StellaOps.ExportCenter.Infrastructure.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/StellaOps.ExportCenter.Infrastructure.csproj similarity index 94% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/StellaOps.ExportCenter.Infrastructure.csproj rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/StellaOps.ExportCenter.Infrastructure.csproj index b432e2f7..fcc97aea 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/StellaOps.ExportCenter.Infrastructure.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Infrastructure/StellaOps.ExportCenter.Infrastructure.csproj @@ -1,28 +1,28 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj"/> - - - </ItemGroup> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj"/> + + + </ItemGroup> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj similarity index 91% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj index 3f49694b..6d775d66 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/StellaOps.ExportCenter.Tests.csproj @@ -1,135 +1,135 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - - - <PropertyGroup> - - - - - <OutputType>Exe</OutputType> - - - - - <IsPackable>false</IsPackable> - - - - - - - - - - - - - - <TargetFramework>net10.0</TargetFramework> - - - <ImplicitUsings>enable</ImplicitUsings> - - - <Nullable>enable</Nullable> - - - <UseConcelierTestInfra>false</UseConcelierTestInfra> - - - <LangVersion>preview</LangVersion> - - - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - - - </PropertyGroup> - - - - - - <ItemGroup> - - - - - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> - - - - - <PackageReference Include="xunit.v3" Version="3.0.0"/> - - - - - <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Using Include="Xunit"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <ProjectReference Include="..\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj"/> - - - - - <ProjectReference Include="..\StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj"/> - - - - - </ItemGroup> - - - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + + + <PropertyGroup> + + + + + <OutputType>Exe</OutputType> + + + + + <IsPackable>false</IsPackable> + + + + + + + + + + + + + + <TargetFramework>net10.0</TargetFramework> + + + <ImplicitUsings>enable</ImplicitUsings> + + + <Nullable>enable</Nullable> + + + <UseConcelierTestInfra>false</UseConcelierTestInfra> + + + <LangVersion>preview</LangVersion> + + + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + + + </PropertyGroup> + + + + + + <ItemGroup> + + + + + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> + + + + + <PackageReference Include="xunit.v3" Version="3.0.0"/> + + + + + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Using Include="Xunit"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <ProjectReference Include="..\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj"/> + + + + + <ProjectReference Include="..\StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj"/> + + + + + </ItemGroup> + + + + + +</Project> diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/UnitTest1.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/UnitTest1.cs similarity index 92% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/UnitTest1.cs rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/UnitTest1.cs index 3f25849f..f680e25c 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/UnitTest1.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/UnitTest1.cs @@ -1,10 +1,10 @@ -namespace StellaOps.ExportCenter.Tests; - -public class UnitTest1 -{ - [Fact] - public void Test1() - { - - } -} +namespace StellaOps.ExportCenter.Tests; + +public class UnitTest1 +{ + [Fact] + public void Test1() + { + + } +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/xunit.runner.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/xunit.runner.json similarity index 96% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/xunit.runner.json rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/xunit.runner.json index 86c7ea05..249d815c 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/xunit.runner.json +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Tests/xunit.runner.json @@ -1,3 +1,3 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs similarity index 96% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs index ee9d65d6..3917ef1b 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Program.cs @@ -1,41 +1,41 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} +var builder = WebApplication.CreateBuilder(args); + +// Add services to the container. +// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +// Configure the HTTP request pipeline. +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); + +var summaries = new[] +{ + "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" +}; + +app.MapGet("/weatherforecast", () => +{ + var forecast = Enumerable.Range(1, 5).Select(index => + new WeatherForecast + ( + DateOnly.FromDateTime(DateTime.Now.AddDays(index)), + Random.Shared.Next(-20, 55), + summaries[Random.Shared.Next(summaries.Length)] + )) + .ToArray(); + return forecast; +}) +.WithName("GetWeatherForecast"); + +app.Run(); + +record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) +{ + public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Properties/launchSettings.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Properties/launchSettings.json rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Properties/launchSettings.json index b656f2fa..e850ff74 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Properties/launchSettings.json +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/Properties/launchSettings.json @@ -1,23 +1,23 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "http": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "http://localhost:5269", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - }, - "https": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "https://localhost:7218;http://localhost:5269", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "http://localhost:5269", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "https": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "https://localhost:7218;http://localhost:5269", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj similarity index 95% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj index e90142c1..f13b07bf 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.csproj @@ -1,41 +1,41 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Web"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj"/> - - - </ItemGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj"/> + + + </ItemGroup> + + + +</Project> diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.http b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.http similarity index 96% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.http rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.http index 25c6b96f..37bb81e3 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.http +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/StellaOps.ExportCenter.WebService.http @@ -1,6 +1,6 @@ -@StellaOps.ExportCenter.WebService_HostAddress = http://localhost:5269 - -GET {{StellaOps.ExportCenter.WebService_HostAddress}}/weatherforecast/ -Accept: application/json - -### +@StellaOps.ExportCenter.WebService_HostAddress = http://localhost:5269 + +GET {{StellaOps.ExportCenter.WebService_HostAddress}}/weatherforecast/ +Accept: application/json + +### diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.Development.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.Development.json similarity index 93% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.Development.json rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.Development.json index 0c208ae9..ff66ba6b 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.Development.json +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.Development.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json similarity index 94% rename from src/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.json rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json index 10f68b8c..4d566948 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.json +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json @@ -1,9 +1,9 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs similarity index 96% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs index 5063a5e6..8ecd6600 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Program.cs @@ -1,7 +1,7 @@ -using StellaOps.ExportCenter.Worker; - -var builder = Host.CreateApplicationBuilder(args); -builder.Services.AddHostedService<Worker>(); - -var host = builder.Build(); -host.Run(); +using StellaOps.ExportCenter.Worker; + +var builder = Host.CreateApplicationBuilder(args); +builder.Services.AddHostedService<Worker>(); + +var host = builder.Build(); +host.Run(); diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Properties/launchSettings.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Properties/launchSettings.json similarity index 95% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Properties/launchSettings.json rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Properties/launchSettings.json index 6c1d7060..d3013c09 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Properties/launchSettings.json +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Properties/launchSettings.json @@ -1,12 +1,12 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "StellaOps.ExportCenter.Worker": { - "commandName": "Project", - "dotnetRunMessages": true, - "environmentVariables": { - "DOTNET_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "StellaOps.ExportCenter.Worker": { + "commandName": "Project", + "dotnetRunMessages": true, + "environmentVariables": { + "DOTNET_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj similarity index 95% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj index f79f1610..32868f40 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/StellaOps.ExportCenter.Worker.csproj @@ -1,43 +1,43 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Worker"> - - - - <PropertyGroup> - - - <UserSecretsId>dotnet-StellaOps.ExportCenter.Worker-d4cfd239-79d1-4d17-91d6-bb7a78770695</UserSecretsId> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj"/> - - - </ItemGroup> - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Worker"> + + + + <PropertyGroup> + + + <UserSecretsId>dotnet-StellaOps.ExportCenter.Worker-d4cfd239-79d1-4d17-91d6-bb7a78770695</UserSecretsId> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj"/> + + + </ItemGroup> + + +</Project> diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Worker.cs b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Worker.cs similarity index 96% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Worker.cs rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Worker.cs index db9f59a2..9f2e3408 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Worker.cs +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/Worker.cs @@ -1,16 +1,16 @@ -namespace StellaOps.ExportCenter.Worker; - -public class Worker(ILogger<Worker> logger) : BackgroundService -{ - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - while (!stoppingToken.IsCancellationRequested) - { - if (logger.IsEnabled(LogLevel.Information)) - { - logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); - } - await Task.Delay(1000, stoppingToken); - } - } -} +namespace StellaOps.ExportCenter.Worker; + +public class Worker(ILogger<Worker> logger) : BackgroundService +{ + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + while (!stoppingToken.IsCancellationRequested) + { + if (logger.IsEnabled(LogLevel.Information)) + { + logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); + } + await Task.Delay(1000, stoppingToken); + } + } +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.Development.json similarity index 94% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.Development.json index b2dcdb67..69017646 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.Development.json +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.Development.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.json b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.json similarity index 94% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.json rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.json index b2dcdb67..69017646 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.json +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.sln b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.sln similarity index 98% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.sln rename to src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.sln index d93aecd8..9c2f204e 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.sln +++ b/src/ExportCenter/StellaOps.ExportCenter/StellaOps.ExportCenter.sln @@ -1,90 +1,90 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Core", "StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj", "{A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Infrastructure", "StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj", "{2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.WebService", "StellaOps.ExportCenter.WebService\StellaOps.ExportCenter.WebService.csproj", "{A1460E98-EDED-42BE-ACF8-896ED94053F1}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Worker", "StellaOps.ExportCenter.Worker\StellaOps.ExportCenter.Worker.csproj", "{73531B46-E364-4C0F-B84C-8BDCF3E16051}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Tests", "StellaOps.ExportCenter.Tests\StellaOps.ExportCenter.Tests.csproj", "{1201F1ED-F35A-4F12-B662-BB616122A2F2}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|x64.ActiveCfg = Debug|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|x64.Build.0 = Debug|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|x86.ActiveCfg = Debug|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|x86.Build.0 = Debug|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|Any CPU.Build.0 = Release|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|x64.ActiveCfg = Release|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|x64.Build.0 = Release|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|x86.ActiveCfg = Release|Any CPU - {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|x86.Build.0 = Release|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|x64.ActiveCfg = Debug|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|x64.Build.0 = Debug|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|x86.ActiveCfg = Debug|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|x86.Build.0 = Debug|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|Any CPU.Build.0 = Release|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|x64.ActiveCfg = Release|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|x64.Build.0 = Release|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|x86.ActiveCfg = Release|Any CPU - {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|x86.Build.0 = Release|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|x64.ActiveCfg = Debug|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|x64.Build.0 = Debug|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|x86.ActiveCfg = Debug|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|x86.Build.0 = Debug|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|Any CPU.Build.0 = Release|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|x64.ActiveCfg = Release|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|x64.Build.0 = Release|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|x86.ActiveCfg = Release|Any CPU - {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|x86.Build.0 = Release|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|Any CPU.Build.0 = Debug|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|x64.ActiveCfg = Debug|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|x64.Build.0 = Debug|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|x86.ActiveCfg = Debug|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|x86.Build.0 = Debug|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|Any CPU.ActiveCfg = Release|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|Any CPU.Build.0 = Release|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|x64.ActiveCfg = Release|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|x64.Build.0 = Release|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|x86.ActiveCfg = Release|Any CPU - {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|x86.Build.0 = Release|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|x64.ActiveCfg = Debug|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|x64.Build.0 = Debug|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|x86.ActiveCfg = Debug|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|x86.Build.0 = Debug|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|Any CPU.Build.0 = Release|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|x64.ActiveCfg = Release|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|x64.Build.0 = Release|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|x86.ActiveCfg = Release|Any CPU - {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Core", "StellaOps.ExportCenter.Core\StellaOps.ExportCenter.Core.csproj", "{A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Infrastructure", "StellaOps.ExportCenter.Infrastructure\StellaOps.ExportCenter.Infrastructure.csproj", "{2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.WebService", "StellaOps.ExportCenter.WebService\StellaOps.ExportCenter.WebService.csproj", "{A1460E98-EDED-42BE-ACF8-896ED94053F1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Worker", "StellaOps.ExportCenter.Worker\StellaOps.ExportCenter.Worker.csproj", "{73531B46-E364-4C0F-B84C-8BDCF3E16051}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.ExportCenter.Tests", "StellaOps.ExportCenter.Tests\StellaOps.ExportCenter.Tests.csproj", "{1201F1ED-F35A-4F12-B662-BB616122A2F2}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|x64.ActiveCfg = Debug|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|x64.Build.0 = Debug|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|x86.ActiveCfg = Debug|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Debug|x86.Build.0 = Debug|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|Any CPU.Build.0 = Release|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|x64.ActiveCfg = Release|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|x64.Build.0 = Release|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|x86.ActiveCfg = Release|Any CPU + {A8B060F0-BD04-4CFB-BC99-C31AE6C9C8F5}.Release|x86.Build.0 = Release|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|x64.ActiveCfg = Debug|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|x64.Build.0 = Debug|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|x86.ActiveCfg = Debug|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Debug|x86.Build.0 = Debug|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|Any CPU.Build.0 = Release|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|x64.ActiveCfg = Release|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|x64.Build.0 = Release|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|x86.ActiveCfg = Release|Any CPU + {2DB372A2-C0AD-48D6-875C-CDEB01CC7AFB}.Release|x86.Build.0 = Release|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|x64.ActiveCfg = Debug|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|x64.Build.0 = Debug|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|x86.ActiveCfg = Debug|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Debug|x86.Build.0 = Debug|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|Any CPU.Build.0 = Release|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|x64.ActiveCfg = Release|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|x64.Build.0 = Release|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|x86.ActiveCfg = Release|Any CPU + {A1460E98-EDED-42BE-ACF8-896ED94053F1}.Release|x86.Build.0 = Release|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|Any CPU.Build.0 = Debug|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|x64.ActiveCfg = Debug|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|x64.Build.0 = Debug|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|x86.ActiveCfg = Debug|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Debug|x86.Build.0 = Debug|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|Any CPU.ActiveCfg = Release|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|Any CPU.Build.0 = Release|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|x64.ActiveCfg = Release|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|x64.Build.0 = Release|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|x86.ActiveCfg = Release|Any CPU + {73531B46-E364-4C0F-B84C-8BDCF3E16051}.Release|x86.Build.0 = Release|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|x64.ActiveCfg = Debug|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|x64.Build.0 = Debug|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|x86.ActiveCfg = Debug|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Debug|x86.Build.0 = Debug|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|Any CPU.Build.0 = Release|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|x64.ActiveCfg = Release|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|x64.Build.0 = Release|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|x86.ActiveCfg = Release|Any CPU + {1201F1ED-F35A-4F12-B662-BB616122A2F2}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.ExportCenter/TASKS.md b/src/ExportCenter/StellaOps.ExportCenter/TASKS.md similarity index 99% rename from src/StellaOps.ExportCenter/TASKS.md rename to src/ExportCenter/StellaOps.ExportCenter/TASKS.md index 2e8bee75..a00a428a 100644 --- a/src/StellaOps.ExportCenter/TASKS.md +++ b/src/ExportCenter/StellaOps.ExportCenter/TASKS.md @@ -1,77 +1,77 @@ -# Exporter Service Task Board — Epic 10: Export Center - -## Sprint 35 – Foundations (JSON + Mirror Full, Download Only) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-SVC-35-001 | BLOCKED (2025-10-29) | Exporter Service Guild | ORCH-SVC-35-101, LEDGER-EXPORT-35-001 | Bootstrap exporter service project, configuration, and Postgres migrations for `export_profiles`, `export_runs`, `export_inputs`, `export_distributions` with tenant scoping + tests. | Service builds/tests; migrations generated with scripts; baseline integration test seeds schema; compliance checklist recorded. | -> Blocked: waiting on Orchestrator export job contract (ORCH-SVC-35-101) and Findings Ledger export endpoints (LEDGER-EXPORT-35-001) before bootstrapping service schema. -| EXPORT-SVC-35-002 | TODO | Exporter Service Guild | EXPORT-SVC-35-001 | Implement planner + scope resolver translating filters into ledger iterators and orchestrator job payloads; include deterministic sampling and validation. | Planner passes unit/property tests; orchestrator contract documented; filter validation errors mapped. | -| EXPORT-SVC-35-003 | TODO | Exporter Service Guild | EXPORT-SVC-35-002 | Deliver JSON adapters (`json:raw`, `json:policy`) with canonical normalization, redaction allowlists, compression, and manifest counts. | JSONL outputs deterministic; redaction enforced; unit/integration tests cover advisories/VEX/SBOM/findings. | -| EXPORT-SVC-35-004 | TODO | Exporter Service Guild | EXPORT-SVC-35-002 | Build mirror (full) adapter producing filesystem layout, indexes, manifests, and README with download-only distribution. | Mirror bundle passes integration tests; indexes generated; manifest validated; docs cross-referenced. | -| EXPORT-SVC-35-005 | TODO | Exporter Service Guild | EXPORT-SVC-35-003 | Implement manifest/provenance writer and KMS signing/attestation (detached + embedded) for bundle outputs. | `export.json`/`provenance.json` generated with hashes; signatures produced via KMS; verification test passes. | -| EXPORT-SVC-35-006 | TODO | Exporter Service Guild | EXPORT-SVC-35-001..005 | Expose Export API (profiles, runs, download, SSE updates) with audit logging, concurrency controls, and viewer/operator RBAC integration. | OpenAPI published; SSE stream validated; audit logs captured; rate limits enforced in tests. | - -## Sprint 36 – Trivy + Distribution -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-SVC-36-001 | TODO | Exporter Service Guild | EXPORT-SVC-35-002 | Implement Trivy DB adapter (core) with schema mappings, version flag gating, and validation harness. | Trivy bundle builds for fixtures; compatibility tests against reference Trivy; errors surfaced for unknown schema. | -| EXPORT-SVC-36-002 | TODO | Exporter Service Guild | EXPORT-SVC-36-001 | Add Trivy Java DB variant with shared manifest entries and adapter regression tests. | Java DB bundle produced when enabled; manifest annotated; integration tests cover optional config. | -| EXPORT-SVC-36-003 | TODO | Exporter Service Guild | EXPORT-SVC-35-006 | Build OCI distribution engine (manifests, descriptors, annotations) with registry auth support and retries. | OCI push works in integration tests; annotations present; retry/backoff validated. | -| EXPORT-SVC-36-004 | TODO | Exporter Service Guild | EXPORT-SVC-36-003 | Extend planner/run lifecycle for distribution targets (OCI/object storage) with idempotent metadata updates and retention timestamps. | Export runs track distribution state; object storage writer tested; retention metadata stored. | - -## Sprint 37 – Delta, Encryption, Scheduling, GA -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-SVC-37-001 | TODO | Exporter Service Guild | EXPORT-SVC-35-004 | Implement mirror delta adapter with base manifest comparison, change set generation, and content-addressed reuse. | Delta bundles generated with accurate adds/removes; manifest references base export; tests cover large datasets. | -| EXPORT-SVC-37-002 | TODO | Exporter Service Guild | EXPORT-SVC-35-005, AUTH-EXPORT-37-001 | Add bundle encryption (age/AES-GCM), key wrapping via KMS, and verification tooling for encrypted outputs. | Encrypted bundles produced; decrypt tool validated; key rotation tests pass. | -| EXPORT-SVC-37-003 | TODO | Exporter Service Guild | ORCH-SVC-37-101 | Implement export scheduling (cron/event), retention pruning, retry idempotency, and failure classification. | Schedules persisted; retention jobs prune data; retries clean; metrics/logs emitted. | -| EXPORT-SVC-37-004 | TODO | Exporter Service Guild | EXPORT-SVC-35-005 | Provide verification API to stream manifests/hashes, compute hash+signature checks, and return attest status for CLI/UI. | Verification endpoint live; integration tests cover success/failure; metrics track verify attempts. | - -## CLI Parity & Task Packs Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-SVC-43-001 | TODO | Exporter Service Guild | PACKS-REG-41-001, TASKRUN-41-001 | Integrate pack run manifests/artifacts into export bundles and CLI verification flows; expose provenance links. | Pack run exports available; manifests signed; CLI verify uses exports; tests cover workflow. | - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-TEN-48-001 | TODO | Exporter Service Guild | WEB-TEN-48-001 | Prefix artifacts/manifests with tenant/project, enforce scope checks, and prevent cross-tenant exports unless explicitly whitelisted; update provenance. | Exports contain tenant id; cross-tenant attempt denied; tests cover scope enforcement. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-OBS-50-001 | TODO | Exporter Service Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Adopt telemetry core in exporter service + workers, ensuring spans/logs capture profile id, tenant, artifact counts, distribution type, and trace IDs. | Telemetry confirmed via integration tests; logging contract validated; CLI trace linking works. | -| EXPORT-OBS-51-001 | TODO | Exporter Service Guild, DevOps Guild | EXPORT-OBS-50-001, TELEMETRY-OBS-51-001 | Emit metrics for export planner latency, bundle build time, distribution success rate, bundle size, and define SLOs (bundle availability P95 <90s). Add Grafana dashboards + burn-rate alerts. | Metrics visible; alerts tested; documentation updated. | -| EXPORT-OBS-52-001 | TODO | Exporter Service Guild | EXPORT-OBS-50-001, TIMELINE-OBS-52-002 | Publish timeline events for export lifecycle (`export.requested`, `export.built`, `export.distributed`, `export.failed`) embedding manifest hashes and evidence refs. Provide dedupe + retry logic. | Timeline events verified; duplicates suppressed; docs record schema. | -| EXPORT-OBS-53-001 | TODO | Exporter Service Guild, Evidence Locker Guild | EXPORT-OBS-52-001, EVID-OBS-53-002 | Push export manifests + distribution transcripts to evidence locker bundles, ensuring Merkle root alignment and DSSE pre-sign data available. | Evidence bundles include export data; manifests deterministic; integration tests pass. | -| EXPORT-OBS-54-001 | TODO | Exporter Service Guild, Provenance Guild | EXPORT-OBS-53-001, PROV-OBS-53-002 | Produce DSSE attestations for each export artifact and distribution target, expose verification API `/exports/{id}/attestation`, and integrate with CLI verify path. | Attestations generated/verified; API live; CLI integration tests updated. | -| EXPORT-OBS-55-001 | TODO | Exporter Service Guild, DevOps Guild | EXPORT-OBS-51-001, DEVOPS-OBS-55-001 | Add incident mode enhancements (extra tracing for slow exports, additional debug logs, retention bump). Emit incident activation events to timeline + notifier. | Incident mode validated; extra telemetry captured; events observed. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-AIRGAP-56-001 | TODO | Exporter Service Guild, Mirror Creator Guild | MIRROR-CRT-56-001, AIRGAP-IMP-56-001 | Extend Export Center to build Mirror Bundles as export profiles, including advisories/VEX/policy packs manifesting DSSE/TUF metadata. | Export profile produces bundle matching mirror spec; verification succeeds; audit entry stored. | -| EXPORT-AIRGAP-56-002 | TODO | Exporter Service Guild, DevOps Guild | EXPORT-AIRGAP-56-001, DEVOPS-OBS-50-003 | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gapped deployment. | Bootstrap pack generated; digests recorded; documentation stubbed. | -| EXPORT-AIRGAP-57-001 | TODO | Exporter Service Guild, Evidence Locker Guild | EXPORT-AIRGAP-56-001, EVID-OBS-54-002 | Integrate portable evidence export mode producing sealed evidence bundles with DSSE signatures and chain-of-custody metadata. | Portable bundles generated and verified; CLI/Console flows consume exports; tests cover tampering. | -| EXPORT-AIRGAP-58-001 | TODO | Exporter Service Guild, Notifications Guild | EXPORT-AIRGAP-56-001, NOTIFY-OBS-51-001 | Emit notifications and timeline events when Mirror Bundles or Bootstrap packs are ready for transfer. | Notifications delivered with links; timeline events recorded; metrics updated. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-OAS-61-001 | TODO | Exporter Service Guild, API Contracts Guild | OAS-61-001 | Update Exporter OAS covering profiles, runs, downloads, devportal exports with standard error envelope and examples. | Spec complete; lint passes; examples validated. | -| EXPORT-OAS-61-002 | TODO | Exporter Service Guild | EXPORT-OAS-61-001 | Provide `/.well-known/openapi` discovery endpoint with version metadata and ETag. | Endpoint deployed; contract tests cover discovery. | -| EXPORT-OAS-62-001 | TODO | Exporter Service Guild, SDK Generator Guild | EXPORT-OAS-61-001, SDKGEN-63-001 | Ensure SDKs include export profile/run clients with streaming download helpers; add smoke tests. | SDK tests download/export artifact; documentation includes snippets. | -| EXPORT-OAS-63-001 | TODO | Exporter Service Guild, API Governance Guild | APIGOV-63-001 | Implement deprecation headers and notifications for legacy export endpoints. | Headers emitted; notifications pipeline validated. | - -## Risk Profiles (Epic 18) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-RISK-69-001 | TODO | Exporter Service Guild, Risk Bundle Export Guild | RISK-BUNDLE-69-001 | Add Export Center job handler `risk-bundle` with provider selection, manifest signing, and audit logging. | Job deploys; manifest stored; audit logs include actor and scope. | -| EXPORT-RISK-69-002 | TODO | Exporter Service Guild, Risk Engine Guild | EXPORT-RISK-69-001 | Enable simulation report exports pulling scored data + explainability snapshots. | Simulation exports available via API/CLI; tests ensure deterministic output. | -| EXPORT-RISK-70-001 | TODO | Exporter Service Guild, DevOps Guild | EXPORT-RISK-69-001 | Integrate risk bundle builds into offline kit packaging with checksum verification. | Offline kit includes risk bundle; verification pipeline passes; docs updated. | - -## Attestor Console (Epic 19) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| EXPORT-ATTEST-74-001 | TODO | Exporter Service Guild, Attestation Bundle Guild | ATTESTOR-74-002 | Implement attestation bundle export job via Export Center. | Job builds bundle; manifest signed; tests pass. | -| EXPORT-ATTEST-75-001 | TODO | Exporter Service Guild | EXPORT-ATTEST-74-001 | Integrate attestation bundles into offline kit flows and CLI commands. | Offline kit updated; CLI `export attestation-bundle` operational; docs refreshed. | +# Exporter Service Task Board — Epic 10: Export Center + +## Sprint 35 – Foundations (JSON + Mirror Full, Download Only) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-SVC-35-001 | BLOCKED (2025-10-29) | Exporter Service Guild | ORCH-SVC-35-101, LEDGER-EXPORT-35-001 | Bootstrap exporter service project, configuration, and Postgres migrations for `export_profiles`, `export_runs`, `export_inputs`, `export_distributions` with tenant scoping + tests. | Service builds/tests; migrations generated with scripts; baseline integration test seeds schema; compliance checklist recorded. | +> Blocked: waiting on Orchestrator export job contract (ORCH-SVC-35-101) and Findings Ledger export endpoints (LEDGER-EXPORT-35-001) before bootstrapping service schema. +| EXPORT-SVC-35-002 | TODO | Exporter Service Guild | EXPORT-SVC-35-001 | Implement planner + scope resolver translating filters into ledger iterators and orchestrator job payloads; include deterministic sampling and validation. | Planner passes unit/property tests; orchestrator contract documented; filter validation errors mapped. | +| EXPORT-SVC-35-003 | TODO | Exporter Service Guild | EXPORT-SVC-35-002 | Deliver JSON adapters (`json:raw`, `json:policy`) with canonical normalization, redaction allowlists, compression, and manifest counts. | JSONL outputs deterministic; redaction enforced; unit/integration tests cover advisories/VEX/SBOM/findings. | +| EXPORT-SVC-35-004 | TODO | Exporter Service Guild | EXPORT-SVC-35-002 | Build mirror (full) adapter producing filesystem layout, indexes, manifests, and README with download-only distribution. | Mirror bundle passes integration tests; indexes generated; manifest validated; docs cross-referenced. | +| EXPORT-SVC-35-005 | TODO | Exporter Service Guild | EXPORT-SVC-35-003 | Implement manifest/provenance writer and KMS signing/attestation (detached + embedded) for bundle outputs. | `export.json`/`provenance.json` generated with hashes; signatures produced via KMS; verification test passes. | +| EXPORT-SVC-35-006 | TODO | Exporter Service Guild | EXPORT-SVC-35-001..005 | Expose Export API (profiles, runs, download, SSE updates) with audit logging, concurrency controls, and viewer/operator RBAC integration. | OpenAPI published; SSE stream validated; audit logs captured; rate limits enforced in tests. | + +## Sprint 36 – Trivy + Distribution +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-SVC-36-001 | TODO | Exporter Service Guild | EXPORT-SVC-35-002 | Implement Trivy DB adapter (core) with schema mappings, version flag gating, and validation harness. | Trivy bundle builds for fixtures; compatibility tests against reference Trivy; errors surfaced for unknown schema. | +| EXPORT-SVC-36-002 | TODO | Exporter Service Guild | EXPORT-SVC-36-001 | Add Trivy Java DB variant with shared manifest entries and adapter regression tests. | Java DB bundle produced when enabled; manifest annotated; integration tests cover optional config. | +| EXPORT-SVC-36-003 | TODO | Exporter Service Guild | EXPORT-SVC-35-006 | Build OCI distribution engine (manifests, descriptors, annotations) with registry auth support and retries. | OCI push works in integration tests; annotations present; retry/backoff validated. | +| EXPORT-SVC-36-004 | TODO | Exporter Service Guild | EXPORT-SVC-36-003 | Extend planner/run lifecycle for distribution targets (OCI/object storage) with idempotent metadata updates and retention timestamps. | Export runs track distribution state; object storage writer tested; retention metadata stored. | + +## Sprint 37 – Delta, Encryption, Scheduling, GA +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-SVC-37-001 | TODO | Exporter Service Guild | EXPORT-SVC-35-004 | Implement mirror delta adapter with base manifest comparison, change set generation, and content-addressed reuse. | Delta bundles generated with accurate adds/removes; manifest references base export; tests cover large datasets. | +| EXPORT-SVC-37-002 | TODO | Exporter Service Guild | EXPORT-SVC-35-005, AUTH-EXPORT-37-001 | Add bundle encryption (age/AES-GCM), key wrapping via KMS, and verification tooling for encrypted outputs. | Encrypted bundles produced; decrypt tool validated; key rotation tests pass. | +| EXPORT-SVC-37-003 | TODO | Exporter Service Guild | ORCH-SVC-37-101 | Implement export scheduling (cron/event), retention pruning, retry idempotency, and failure classification. | Schedules persisted; retention jobs prune data; retries clean; metrics/logs emitted. | +| EXPORT-SVC-37-004 | TODO | Exporter Service Guild | EXPORT-SVC-35-005 | Provide verification API to stream manifests/hashes, compute hash+signature checks, and return attest status for CLI/UI. | Verification endpoint live; integration tests cover success/failure; metrics track verify attempts. | + +## CLI Parity & Task Packs Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-SVC-43-001 | TODO | Exporter Service Guild | PACKS-REG-41-001, TASKRUN-41-001 | Integrate pack run manifests/artifacts into export bundles and CLI verification flows; expose provenance links. | Pack run exports available; manifests signed; CLI verify uses exports; tests cover workflow. | + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-TEN-48-001 | TODO | Exporter Service Guild | WEB-TEN-48-001 | Prefix artifacts/manifests with tenant/project, enforce scope checks, and prevent cross-tenant exports unless explicitly whitelisted; update provenance. | Exports contain tenant id; cross-tenant attempt denied; tests cover scope enforcement. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-OBS-50-001 | TODO | Exporter Service Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Adopt telemetry core in exporter service + workers, ensuring spans/logs capture profile id, tenant, artifact counts, distribution type, and trace IDs. | Telemetry confirmed via integration tests; logging contract validated; CLI trace linking works. | +| EXPORT-OBS-51-001 | TODO | Exporter Service Guild, DevOps Guild | EXPORT-OBS-50-001, TELEMETRY-OBS-51-001 | Emit metrics for export planner latency, bundle build time, distribution success rate, bundle size, and define SLOs (bundle availability P95 <90s). Add Grafana dashboards + burn-rate alerts. | Metrics visible; alerts tested; documentation updated. | +| EXPORT-OBS-52-001 | TODO | Exporter Service Guild | EXPORT-OBS-50-001, TIMELINE-OBS-52-002 | Publish timeline events for export lifecycle (`export.requested`, `export.built`, `export.distributed`, `export.failed`) embedding manifest hashes and evidence refs. Provide dedupe + retry logic. | Timeline events verified; duplicates suppressed; docs record schema. | +| EXPORT-OBS-53-001 | TODO | Exporter Service Guild, Evidence Locker Guild | EXPORT-OBS-52-001, EVID-OBS-53-002 | Push export manifests + distribution transcripts to evidence locker bundles, ensuring Merkle root alignment and DSSE pre-sign data available. | Evidence bundles include export data; manifests deterministic; integration tests pass. | +| EXPORT-OBS-54-001 | TODO | Exporter Service Guild, Provenance Guild | EXPORT-OBS-53-001, PROV-OBS-53-002 | Produce DSSE attestations for each export artifact and distribution target, expose verification API `/exports/{id}/attestation`, and integrate with CLI verify path. | Attestations generated/verified; API live; CLI integration tests updated. | +| EXPORT-OBS-55-001 | TODO | Exporter Service Guild, DevOps Guild | EXPORT-OBS-51-001, DEVOPS-OBS-55-001 | Add incident mode enhancements (extra tracing for slow exports, additional debug logs, retention bump). Emit incident activation events to timeline + notifier. | Incident mode validated; extra telemetry captured; events observed. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-AIRGAP-56-001 | TODO | Exporter Service Guild, Mirror Creator Guild | MIRROR-CRT-56-001, AIRGAP-IMP-56-001 | Extend Export Center to build Mirror Bundles as export profiles, including advisories/VEX/policy packs manifesting DSSE/TUF metadata. | Export profile produces bundle matching mirror spec; verification succeeds; audit entry stored. | +| EXPORT-AIRGAP-56-002 | TODO | Exporter Service Guild, DevOps Guild | EXPORT-AIRGAP-56-001, DEVOPS-OBS-50-003 | Package Bootstrap Pack (images + charts) into OCI archives with signed manifests for air-gapped deployment. | Bootstrap pack generated; digests recorded; documentation stubbed. | +| EXPORT-AIRGAP-57-001 | TODO | Exporter Service Guild, Evidence Locker Guild | EXPORT-AIRGAP-56-001, EVID-OBS-54-002 | Integrate portable evidence export mode producing sealed evidence bundles with DSSE signatures and chain-of-custody metadata. | Portable bundles generated and verified; CLI/Console flows consume exports; tests cover tampering. | +| EXPORT-AIRGAP-58-001 | TODO | Exporter Service Guild, Notifications Guild | EXPORT-AIRGAP-56-001, NOTIFY-OBS-51-001 | Emit notifications and timeline events when Mirror Bundles or Bootstrap packs are ready for transfer. | Notifications delivered with links; timeline events recorded; metrics updated. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-OAS-61-001 | TODO | Exporter Service Guild, API Contracts Guild | OAS-61-001 | Update Exporter OAS covering profiles, runs, downloads, devportal exports with standard error envelope and examples. | Spec complete; lint passes; examples validated. | +| EXPORT-OAS-61-002 | TODO | Exporter Service Guild | EXPORT-OAS-61-001 | Provide `/.well-known/openapi` discovery endpoint with version metadata and ETag. | Endpoint deployed; contract tests cover discovery. | +| EXPORT-OAS-62-001 | TODO | Exporter Service Guild, SDK Generator Guild | EXPORT-OAS-61-001, SDKGEN-63-001 | Ensure SDKs include export profile/run clients with streaming download helpers; add smoke tests. | SDK tests download/export artifact; documentation includes snippets. | +| EXPORT-OAS-63-001 | TODO | Exporter Service Guild, API Governance Guild | APIGOV-63-001 | Implement deprecation headers and notifications for legacy export endpoints. | Headers emitted; notifications pipeline validated. | + +## Risk Profiles (Epic 18) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-RISK-69-001 | TODO | Exporter Service Guild, Risk Bundle Export Guild | RISK-BUNDLE-69-001 | Add Export Center job handler `risk-bundle` with provider selection, manifest signing, and audit logging. | Job deploys; manifest stored; audit logs include actor and scope. | +| EXPORT-RISK-69-002 | TODO | Exporter Service Guild, Risk Engine Guild | EXPORT-RISK-69-001 | Enable simulation report exports pulling scored data + explainability snapshots. | Simulation exports available via API/CLI; tests ensure deterministic output. | +| EXPORT-RISK-70-001 | TODO | Exporter Service Guild, DevOps Guild | EXPORT-RISK-69-001 | Integrate risk bundle builds into offline kit packaging with checksum verification. | Offline kit includes risk bundle; verification pipeline passes; docs updated. | + +## Attestor Console (Epic 19) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| EXPORT-ATTEST-74-001 | TODO | Exporter Service Guild, Attestation Bundle Guild | ATTESTOR-74-002 | Implement attestation bundle export job via Export Center. | Job builds bundle; manifest signed; tests pass. | +| EXPORT-ATTEST-75-001 | TODO | Exporter Service Guild | EXPORT-ATTEST-74-001 | Integrate attestation bundles into offline kit flows and CLI commands. | Offline kit updated; CLI `export attestation-bundle` operational; docs refreshed. | diff --git a/src/StellaOps.Findings.Ledger/AGENTS.md b/src/Findings/StellaOps.Findings.Ledger/AGENTS.md similarity index 90% rename from src/StellaOps.Findings.Ledger/AGENTS.md rename to src/Findings/StellaOps.Findings.Ledger/AGENTS.md index 7ff60067..65c0204f 100644 --- a/src/StellaOps.Findings.Ledger/AGENTS.md +++ b/src/Findings/StellaOps.Findings.Ledger/AGENTS.md @@ -4,7 +4,7 @@ Operate the append-only Findings Ledger and projection pipeline powering the Vulnerability Explorer. The guild guarantees immutable audit history, deterministic projections, and compliance with AOC guardrails while exposing workflow APIs. ## Scope -- Service code under `src/StellaOps.Findings.Ledger` (event API, projector, migrations, crypto hashing). +- Service code under `src/Findings/StellaOps.Findings.Ledger` (event API, projector, migrations, crypto hashing). - Ledger storage schemas, Merkle anchoring jobs, retention policies, and replay tooling. - Projection pipeline writing `findings_projection` collections/tables consumed by Vuln Explorer API and Console. - Collaboration with Conseiller, Excitator, SBOM Service, Policy Engine, Scheduler, Authority, and DevOps for evidence feeds and policy events. @@ -17,7 +17,7 @@ Operate the append-only Findings Ledger and projection pipeline powering the Vul 5. **Auditability** – Provide verifiable hashes, Merkle roots, and replay tooling for auditors. ## Collaboration -- Keep `src/StellaOps.Findings.Ledger/TASKS.md`, `SPRINTS.md` synchronized. +- Keep `src/Findings/StellaOps.Findings.Ledger/TASKS.md`, `../../docs/implplan/SPRINTS.md` synchronized. - Publish schema docs, migrators, and replay scripts; coordinate with Vuln Explorer API on projection contracts. - Notify DevOps/Docs when Merkle root anchoring cadence or format changes. diff --git a/src/StellaOps.Findings.Ledger/TASKS.md b/src/Findings/StellaOps.Findings.Ledger/TASKS.md similarity index 99% rename from src/StellaOps.Findings.Ledger/TASKS.md rename to src/Findings/StellaOps.Findings.Ledger/TASKS.md index 4daf182c..9d35f2a0 100644 --- a/src/StellaOps.Findings.Ledger/TASKS.md +++ b/src/Findings/StellaOps.Findings.Ledger/TASKS.md @@ -1,73 +1,73 @@ -# Findings Ledger Task Board — Epic 6: Vulnerability Explorer -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-29-001 | TODO | Findings Ledger Guild | AUTH-POLICY-27-001 | Design ledger & projection schemas (tables/indexes), canonical JSON format, hashing strategy, and migrations. Publish schema doc + fixtures. | Schemas committed; migrations generated; hashing documented; fixtures seeded for CI. | -| LEDGER-29-002 | TODO | Findings Ledger Guild | LEDGER-29-001 | Implement ledger write API (`POST /vuln/ledger/events`) with validation, idempotency, hash chaining, and Merkle root computation job. | Events persisted with chained hashes; Merkle job emits anchors; unit/integration tests cover happy/pathological cases. | -| LEDGER-29-003 | TODO | Findings Ledger Guild, Scheduler Guild | LEDGER-29-001 | Build projector worker that derives `findings_projection` rows from ledger events + policy determinations; ensure idempotent replay keyed by `(tenant,finding_id,policy_version)`. | Projector processes sample streams deterministically; replay tests pass; metrics exported. | -| LEDGER-29-004 | TODO | Findings Ledger Guild, Policy Guild | LEDGER-29-003, POLICY-ENGINE-27-001 | Integrate Policy Engine batch evaluation (baseline + simulate) with projector; cache rationale references. | Projector fetches determinations efficiently; rationale stored for UI; regression tests cover version switches. | -| LEDGER-29-005 | TODO | Findings Ledger Guild | LEDGER-29-003 | Implement workflow mutation handlers (assign, comment, accept-risk, target-fix, verify-fix, reopen) producing ledger events with validation and attachments metadata. | API endpoints enforce business rules; attachments metadata stored; tests cover state machine transitions. | -| LEDGER-29-006 | TODO | Findings Ledger Guild, Security Guild | LEDGER-29-002 | Integrate attachment encryption (KMS envelope), signed URL issuance, CSRF protection hooks for Console. | Attachments encrypted and accessible via signed URLs; security tests verify expiry + scope. | -| LEDGER-29-007 | TODO | Findings Ledger Guild, Observability Guild | LEDGER-29-002..005 | Instrument metrics (`ledger_write_latency`, `projection_lag_seconds`, `ledger_events_total`), structured logs, and Merkle anchoring alerts; publish dashboards. | Metrics/traces emitted; dashboards live; alert thresholds documented. | -| LEDGER-29-008 | TODO | Findings Ledger Guild, QA Guild | LEDGER-29-002..005 | Develop unit/property/integration tests, replay/restore tooling, determinism harness, and load tests at 5M findings/tenant. | CI suite green; load tests documented; determinism harness proves stable projections. | -| LEDGER-29-009 | TODO | Findings Ledger Guild, DevOps Guild | LEDGER-29-002..008 | Provide deployment manifests (Helm/Compose), backup/restore guidance, Merkle anchor externalization (optional), and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup/restore scripts recorded; offline kit includes seed data. | - -## Export Center -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-EXPORT-35-001 | TODO | Findings Ledger Guild | LEDGER-29-003, EXPORT-SVC-35-002 | Provide paginated streaming endpoints for advisories, VEX, SBOMs, and findings aligned with export filters, including deterministic ordering and provenance metadata. | Streaming APIs deployed; integration tests with exporter planner; metrics/logs instrumented; docs updated. | - -## Orchestrator Dashboard -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-34-101 | TODO | Findings Ledger Guild | ORCH-SVC-34-002, LEDGER-29-002 | Link orchestrator run ledger exports into Findings Ledger provenance chain, index by artifact hash, and expose audit queries. | Ledger ingestion job consumes orchestrator exports; provenance queries return artifact chain; tests cover multi-tenant isolation; docs updated. | - -## CLI Parity & Task Packs -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-PACKS-42-001 | TODO | Findings Ledger Guild | LEDGER-29-003, TASKRUN-41-001 | Provide snapshot/time-travel APIs and digestable exports for task pack simulation and CLI offline mode. | Snapshot API deployed; simulation validated; docs updated; imposed rule noted. | - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-TEN-48-001 | TODO | Findings Ledger Guild | AUTH-TEN-47-001 | Partition ledger tables by tenant/project, enable RLS, update queries/events, and stamp audit metadata. | Ledger queries respect tenant context; RLS tests pass; events include tenant metadata. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-OBS-50-001 | TODO | Findings Ledger Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Integrate telemetry core within ledger writer/projector services, emitting structured logs and trace spans for ledger append, projector replay, and query APIs with tenant context. | Telemetry present for append + replay flows; integration tests assert trace propagation; log schema validated. | -| LEDGER-OBS-51-001 | TODO | Findings Ledger Guild, DevOps Guild | LEDGER-OBS-50-001, TELEMETRY-OBS-51-001 | Publish metrics for ledger latency, projector lag, event throughput, and policy evaluation linkage. Define SLOs (ledger append P95 < 1s, replay lag < 30s) with burn-rate alerts and dashboards. | Metrics surfaced in dashboards; SLO alerts configured/tested; documentation updated. | -| LEDGER-OBS-52-001 | TODO | Findings Ledger Guild | LEDGER-29-002, TIMELINE-OBS-52-002 | Emit timeline events for ledger writes and projector commits (`ledger.event.appended`, `ledger.projection.updated`) with trace ID, policy version, evidence bundle reference placeholders. | Timeline events validated with fixtures; duplicates suppressed; docs note schema. | -| LEDGER-OBS-53-001 | TODO | Findings Ledger Guild, Evidence Locker Guild | LEDGER-OBS-52-001, EVID-OBS-53-002 | Persist evidence bundle references (evaluation/job capsules) alongside ledger entries, exposing lookup API linking findings to evidence manifests and timeline. | Evidence references stored/retrievable; API returns deterministic payload; integration tests pass. | -| LEDGER-OBS-54-001 | TODO | Findings Ledger Guild, Provenance Guild | LEDGER-OBS-53-001, PROV-OBS-54-001 | Verify attestation references for ledger-derived exports; expose `/ledger/attestations` endpoint returning DSSE verification state and chain-of-custody summary. | Endpoint returns verification results; negative cases handled; docs updated. | -| LEDGER-OBS-55-001 | TODO | Findings Ledger Guild, DevOps Guild | LEDGER-OBS-51-001, DEVOPS-OBS-55-001 | Enhance incident mode to record additional replay diagnostics (lag traces, conflict snapshots) and extend retention while active. Emit activation events to timeline + notifier. | Incident mode captures diagnostics; retention adjustments revert post-incident; timeline/notifications validated. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-AIRGAP-56-001 | TODO | Findings Ledger Guild | AIRGAP-IMP-57-001, CONCELIER-AIRGAP-56-002 | Record bundle provenance (`bundle_id`, `merkle_root`, `time_anchor`) on ledger events for advisories/VEX/policies imported via Mirror Bundles. | Ledger entries include bundle metadata; queries expose provenance; tests cover import + replay. | -| LEDGER-AIRGAP-56-002 | TODO | Findings Ledger Guild, AirGap Time Guild | LEDGER-AIRGAP-56-001, AIRGAP-TIME-58-001 | Surface staleness metrics for findings and block risk-critical exports when stale beyond thresholds; provide remediation messaging. | Staleness thresholds enforced; exports blocked when stale; notifications triggered. | -| LEDGER-AIRGAP-57-001 | TODO | Findings Ledger Guild, Evidence Locker Guild | LEDGER-AIRGAP-56-001, EVID-OBS-54-001 | Link findings evidence snapshots to portable evidence bundles and ensure cross-enclave verification works. | Evidence references validated; portable bundles verify across environments; integration tests updated. | -| LEDGER-AIRGAP-58-001 | TODO | Findings Ledger Guild, AirGap Controller Guild | LEDGER-AIRGAP-56-001, AIRGAP-CTL-56-002 | Emit timeline events for bundle import impacts (new findings, remediation changes) with sealed-mode context. | Timeline events emitted with bundle IDs; duplicates suppressed; docs updated. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-OAS-61-001 | TODO | Findings Ledger Guild, API Contracts Guild | OAS-61-001 | Expand Findings Ledger OAS to include projections, evidence lookups, and filter parameters with examples. | Spec covers all ledger endpoints; lint/compat checks pass. | -| LEDGER-OAS-61-002 | TODO | Findings Ledger Guild | LEDGER-OAS-61-001 | Implement `/.well-known/openapi` endpoint and ensure version metadata matches release. | Discovery endpoint live; contract tests added. | -| LEDGER-OAS-62-001 | TODO | Findings Ledger Guild, SDK Generator Guild | LEDGER-OAS-61-001, SDKGEN-63-001 | Provide SDK test cases for findings pagination, filtering, evidence links; ensure typed models expose provenance. | SDK smoke tests cover ledger flows; documentation embeds examples. | -| LEDGER-OAS-63-001 | TODO | Findings Ledger Guild, API Governance Guild | APIGOV-63-001 | Support deprecation headers and Notifications for retiring finding endpoints. | Headers emitted; notifications validated; docs updated. | - -## Risk Profiles (Epic 18) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-RISK-66-001 | TODO | Findings Ledger Guild, Risk Engine Guild | RISK-ENGINE-66-001 | Add schema migrations for `risk_score`, `risk_severity`, `profile_version`, `explanation_id`, and supporting indexes. | Migrations applied; indexes created; schema docs updated. | -| LEDGER-RISK-66-002 | TODO | Findings Ledger Guild | LEDGER-RISK-66-001 | Implement deterministic upsert of scoring results keyed by finding hash/profile version with history audit. | Upsert path tested; duplicate suppression verified; audit records stored. | -| LEDGER-RISK-67-001 | TODO | Findings Ledger Guild, Risk Engine Guild | LEDGER-RISK-66-002, RISK-ENGINE-68-001 | Expose query APIs for scored findings with score/severity filters, pagination, and explainability links. | API documented; contract tests pass; latency within targets. | -| LEDGER-RISK-68-001 | TODO | Findings Ledger Guild, Export Guild | LEDGER-RISK-66-002 | Enable export of scored findings and simulation results via Export Center integration. | Export job functional; CLI/Console consume bundle; verification tests pass. | -| LEDGER-RISK-69-001 | TODO | Findings Ledger Guild, Observability Guild | LEDGER-RISK-66-001 | Emit metrics/dashboards for scoring latency, result freshness, severity distribution, provider gaps. | Dashboards live; alerts configured; documentation updated. | - -## Attestor Console (Epic 19) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| LEDGER-ATTEST-73-001 | TODO | Findings Ledger Guild, Attestor Service Guild | ATTESTOR-73-002 | Persist pointers from findings to verification reports and attestation envelopes for explainability. | Ledger schema extended; queries return linked evidence; tests cover joins. | -| LEDGER-ATTEST-73-002 | TODO | Findings Ledger Guild | LEDGER-ATTEST-73-001 | Enable search/filter in findings projections by verification result and attestation status. | API filters by verification result; UI integration ready; tests updated. | +# Findings Ledger Task Board — Epic 6: Vulnerability Explorer +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-29-001 | TODO | Findings Ledger Guild | AUTH-POLICY-27-001 | Design ledger & projection schemas (tables/indexes), canonical JSON format, hashing strategy, and migrations. Publish schema doc + fixtures. | Schemas committed; migrations generated; hashing documented; fixtures seeded for CI. | +| LEDGER-29-002 | TODO | Findings Ledger Guild | LEDGER-29-001 | Implement ledger write API (`POST /vuln/ledger/events`) with validation, idempotency, hash chaining, and Merkle root computation job. | Events persisted with chained hashes; Merkle job emits anchors; unit/integration tests cover happy/pathological cases. | +| LEDGER-29-003 | TODO | Findings Ledger Guild, Scheduler Guild | LEDGER-29-001 | Build projector worker that derives `findings_projection` rows from ledger events + policy determinations; ensure idempotent replay keyed by `(tenant,finding_id,policy_version)`. | Projector processes sample streams deterministically; replay tests pass; metrics exported. | +| LEDGER-29-004 | TODO | Findings Ledger Guild, Policy Guild | LEDGER-29-003, POLICY-ENGINE-27-001 | Integrate Policy Engine batch evaluation (baseline + simulate) with projector; cache rationale references. | Projector fetches determinations efficiently; rationale stored for UI; regression tests cover version switches. | +| LEDGER-29-005 | TODO | Findings Ledger Guild | LEDGER-29-003 | Implement workflow mutation handlers (assign, comment, accept-risk, target-fix, verify-fix, reopen) producing ledger events with validation and attachments metadata. | API endpoints enforce business rules; attachments metadata stored; tests cover state machine transitions. | +| LEDGER-29-006 | TODO | Findings Ledger Guild, Security Guild | LEDGER-29-002 | Integrate attachment encryption (KMS envelope), signed URL issuance, CSRF protection hooks for Console. | Attachments encrypted and accessible via signed URLs; security tests verify expiry + scope. | +| LEDGER-29-007 | TODO | Findings Ledger Guild, Observability Guild | LEDGER-29-002..005 | Instrument metrics (`ledger_write_latency`, `projection_lag_seconds`, `ledger_events_total`), structured logs, and Merkle anchoring alerts; publish dashboards. | Metrics/traces emitted; dashboards live; alert thresholds documented. | +| LEDGER-29-008 | TODO | Findings Ledger Guild, QA Guild | LEDGER-29-002..005 | Develop unit/property/integration tests, replay/restore tooling, determinism harness, and load tests at 5M findings/tenant. | CI suite green; load tests documented; determinism harness proves stable projections. | +| LEDGER-29-009 | TODO | Findings Ledger Guild, DevOps Guild | LEDGER-29-002..008 | Provide deployment manifests (Helm/Compose), backup/restore guidance, Merkle anchor externalization (optional), and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup/restore scripts recorded; offline kit includes seed data. | + +## Export Center +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-EXPORT-35-001 | TODO | Findings Ledger Guild | LEDGER-29-003, EXPORT-SVC-35-002 | Provide paginated streaming endpoints for advisories, VEX, SBOMs, and findings aligned with export filters, including deterministic ordering and provenance metadata. | Streaming APIs deployed; integration tests with exporter planner; metrics/logs instrumented; docs updated. | + +## Orchestrator Dashboard +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-34-101 | TODO | Findings Ledger Guild | ORCH-SVC-34-002, LEDGER-29-002 | Link orchestrator run ledger exports into Findings Ledger provenance chain, index by artifact hash, and expose audit queries. | Ledger ingestion job consumes orchestrator exports; provenance queries return artifact chain; tests cover multi-tenant isolation; docs updated. | + +## CLI Parity & Task Packs +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-PACKS-42-001 | TODO | Findings Ledger Guild | LEDGER-29-003, TASKRUN-41-001 | Provide snapshot/time-travel APIs and digestable exports for task pack simulation and CLI offline mode. | Snapshot API deployed; simulation validated; docs updated; imposed rule noted. | + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-TEN-48-001 | TODO | Findings Ledger Guild | AUTH-TEN-47-001 | Partition ledger tables by tenant/project, enable RLS, update queries/events, and stamp audit metadata. | Ledger queries respect tenant context; RLS tests pass; events include tenant metadata. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-OBS-50-001 | TODO | Findings Ledger Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Integrate telemetry core within ledger writer/projector services, emitting structured logs and trace spans for ledger append, projector replay, and query APIs with tenant context. | Telemetry present for append + replay flows; integration tests assert trace propagation; log schema validated. | +| LEDGER-OBS-51-001 | TODO | Findings Ledger Guild, DevOps Guild | LEDGER-OBS-50-001, TELEMETRY-OBS-51-001 | Publish metrics for ledger latency, projector lag, event throughput, and policy evaluation linkage. Define SLOs (ledger append P95 < 1s, replay lag < 30s) with burn-rate alerts and dashboards. | Metrics surfaced in dashboards; SLO alerts configured/tested; documentation updated. | +| LEDGER-OBS-52-001 | TODO | Findings Ledger Guild | LEDGER-29-002, TIMELINE-OBS-52-002 | Emit timeline events for ledger writes and projector commits (`ledger.event.appended`, `ledger.projection.updated`) with trace ID, policy version, evidence bundle reference placeholders. | Timeline events validated with fixtures; duplicates suppressed; docs note schema. | +| LEDGER-OBS-53-001 | TODO | Findings Ledger Guild, Evidence Locker Guild | LEDGER-OBS-52-001, EVID-OBS-53-002 | Persist evidence bundle references (evaluation/job capsules) alongside ledger entries, exposing lookup API linking findings to evidence manifests and timeline. | Evidence references stored/retrievable; API returns deterministic payload; integration tests pass. | +| LEDGER-OBS-54-001 | TODO | Findings Ledger Guild, Provenance Guild | LEDGER-OBS-53-001, PROV-OBS-54-001 | Verify attestation references for ledger-derived exports; expose `/ledger/attestations` endpoint returning DSSE verification state and chain-of-custody summary. | Endpoint returns verification results; negative cases handled; docs updated. | +| LEDGER-OBS-55-001 | TODO | Findings Ledger Guild, DevOps Guild | LEDGER-OBS-51-001, DEVOPS-OBS-55-001 | Enhance incident mode to record additional replay diagnostics (lag traces, conflict snapshots) and extend retention while active. Emit activation events to timeline + notifier. | Incident mode captures diagnostics; retention adjustments revert post-incident; timeline/notifications validated. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-AIRGAP-56-001 | TODO | Findings Ledger Guild | AIRGAP-IMP-57-001, CONCELIER-AIRGAP-56-002 | Record bundle provenance (`bundle_id`, `merkle_root`, `time_anchor`) on ledger events for advisories/VEX/policies imported via Mirror Bundles. | Ledger entries include bundle metadata; queries expose provenance; tests cover import + replay. | +| LEDGER-AIRGAP-56-002 | TODO | Findings Ledger Guild, AirGap Time Guild | LEDGER-AIRGAP-56-001, AIRGAP-TIME-58-001 | Surface staleness metrics for findings and block risk-critical exports when stale beyond thresholds; provide remediation messaging. | Staleness thresholds enforced; exports blocked when stale; notifications triggered. | +| LEDGER-AIRGAP-57-001 | TODO | Findings Ledger Guild, Evidence Locker Guild | LEDGER-AIRGAP-56-001, EVID-OBS-54-001 | Link findings evidence snapshots to portable evidence bundles and ensure cross-enclave verification works. | Evidence references validated; portable bundles verify across environments; integration tests updated. | +| LEDGER-AIRGAP-58-001 | TODO | Findings Ledger Guild, AirGap Controller Guild | LEDGER-AIRGAP-56-001, AIRGAP-CTL-56-002 | Emit timeline events for bundle import impacts (new findings, remediation changes) with sealed-mode context. | Timeline events emitted with bundle IDs; duplicates suppressed; docs updated. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-OAS-61-001 | TODO | Findings Ledger Guild, API Contracts Guild | OAS-61-001 | Expand Findings Ledger OAS to include projections, evidence lookups, and filter parameters with examples. | Spec covers all ledger endpoints; lint/compat checks pass. | +| LEDGER-OAS-61-002 | TODO | Findings Ledger Guild | LEDGER-OAS-61-001 | Implement `/.well-known/openapi` endpoint and ensure version metadata matches release. | Discovery endpoint live; contract tests added. | +| LEDGER-OAS-62-001 | TODO | Findings Ledger Guild, SDK Generator Guild | LEDGER-OAS-61-001, SDKGEN-63-001 | Provide SDK test cases for findings pagination, filtering, evidence links; ensure typed models expose provenance. | SDK smoke tests cover ledger flows; documentation embeds examples. | +| LEDGER-OAS-63-001 | TODO | Findings Ledger Guild, API Governance Guild | APIGOV-63-001 | Support deprecation headers and Notifications for retiring finding endpoints. | Headers emitted; notifications validated; docs updated. | + +## Risk Profiles (Epic 18) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-RISK-66-001 | TODO | Findings Ledger Guild, Risk Engine Guild | RISK-ENGINE-66-001 | Add schema migrations for `risk_score`, `risk_severity`, `profile_version`, `explanation_id`, and supporting indexes. | Migrations applied; indexes created; schema docs updated. | +| LEDGER-RISK-66-002 | TODO | Findings Ledger Guild | LEDGER-RISK-66-001 | Implement deterministic upsert of scoring results keyed by finding hash/profile version with history audit. | Upsert path tested; duplicate suppression verified; audit records stored. | +| LEDGER-RISK-67-001 | TODO | Findings Ledger Guild, Risk Engine Guild | LEDGER-RISK-66-002, RISK-ENGINE-68-001 | Expose query APIs for scored findings with score/severity filters, pagination, and explainability links. | API documented; contract tests pass; latency within targets. | +| LEDGER-RISK-68-001 | TODO | Findings Ledger Guild, Export Guild | LEDGER-RISK-66-002 | Enable export of scored findings and simulation results via Export Center integration. | Export job functional; CLI/Console consume bundle; verification tests pass. | +| LEDGER-RISK-69-001 | TODO | Findings Ledger Guild, Observability Guild | LEDGER-RISK-66-001 | Emit metrics/dashboards for scoring latency, result freshness, severity distribution, provider gaps. | Dashboards live; alerts configured; documentation updated. | + +## Attestor Console (Epic 19) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| LEDGER-ATTEST-73-001 | TODO | Findings Ledger Guild, Attestor Service Guild | ATTESTOR-73-002 | Persist pointers from findings to verification reports and attestation envelopes for explainability. | Ledger schema extended; queries return linked evidence; tests cover joins. | +| LEDGER-ATTEST-73-002 | TODO | Findings Ledger Guild | LEDGER-ATTEST-73-001 | Enable search/filter in findings projections by verification result and attestation status. | API filters by verification result; UI integration ready; tests updated. | diff --git a/src/StellaOps.Graph.Api/AGENTS.md b/src/Graph/StellaOps.Graph.Api/AGENTS.md similarity index 90% rename from src/StellaOps.Graph.Api/AGENTS.md rename to src/Graph/StellaOps.Graph.Api/AGENTS.md index 13924480..270738e0 100644 --- a/src/StellaOps.Graph.Api/AGENTS.md +++ b/src/Graph/StellaOps.Graph.Api/AGENTS.md @@ -4,7 +4,7 @@ Provide tenant-scoped Graph Explorer APIs for search, query, paths, diffs, overlays, and exports. Deliver cost-aware streaming endpoints that integrate with Policy Engine, Conseiller, Excitator, and the Graph Indexer while honoring AOC and RBAC. ## Scope -- Service under `src/StellaOps.Graph.Api` (Minimal API + streaming pipeline + query planner). +- Service under `src/Graph/StellaOps.Graph.Api` (Minimal API + streaming pipeline + query planner). - Query validation/planning, cost estimation, tile streaming, overlay composition, export serializers. - Integration with Authority scopes, Web API Gateway, Policy Engine explain endpoints, Graph Indexer storage. - Saved query management and diff endpoints. @@ -17,7 +17,7 @@ Provide tenant-scoped Graph Explorer APIs for search, query, paths, diffs, overl 5. **Observability** – Every query logs cost, latency, truncation, caching; metrics + traces integrated. ## Collaboration -- Maintain `src/StellaOps.Graph.Api/TASKS.md`, `SPRINTS.md` alignment. +- Maintain `src/Graph/StellaOps.Graph.Api/TASKS.md`, `../../docs/implplan/SPRINTS.md` alignment. - Coordinate with Graph Indexer (storage contracts), Web Gateway, Console, CLI, Policy Engine, DevOps, and Docs teams. - Publish OpenAPI + JSON schema for queries and streaming tiles. diff --git a/src/StellaOps.Graph.Api/TASKS.md b/src/Graph/StellaOps.Graph.Api/TASKS.md similarity index 99% rename from src/StellaOps.Graph.Api/TASKS.md rename to src/Graph/StellaOps.Graph.Api/TASKS.md index 500b84df..026741ae 100644 --- a/src/StellaOps.Graph.Api/TASKS.md +++ b/src/Graph/StellaOps.Graph.Api/TASKS.md @@ -1,16 +1,16 @@ -# Graph API Task Board — Epic 5: SBOM Graph Explorer -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| GRAPH-API-28-001 | TODO | Graph API Guild | GRAPH-INDEX-28-001, WEB-GRAPH-21-001 | Define OpenAPI + JSON schema for graph search/query/paths/diff/export endpoints, including cost metadata and streaming tile schema. | OpenAPI committed; schema validated via CI; clients regenerated; docs updated. | -| GRAPH-API-28-002 | TODO | Graph API Guild | GRAPH-API-28-001, GRAPH-INDEX-28-002 | Implement `/graph/search` with multi-type index lookup, prefix/exact match, RBAC enforcement, and result ranking + caching. | Endpoint returns ranked results within budget; tests cover scope errors + caching; metrics logged. | -| GRAPH-API-28-003 | TODO | Graph API Guild | GRAPH-API-28-001, GRAPH-INDEX-28-002..005 | Build query planner + cost estimator for `/graph/query`, stream tiles (nodes/edges/stats) progressively, enforce budgets, provide cursor tokens. | Query endpoint streams tiles deterministically, enforces budgets, surfaces truncation flags; integration tests cover large graphs. | -| GRAPH-API-28-004 | TODO | Graph API Guild | GRAPH-API-28-003 | Implement `/graph/paths` with depth ≤6, constraint filters, heuristic shortest path search, and optional policy overlay rendering. | Paths API returns expected routes; policy overlay applied; guardrails enforced; tests cover over-budget errors. | -| GRAPH-API-28-005 | TODO | Graph API Guild | GRAPH-INDEX-28-006, GRAPH-API-28-003 | Implement `/graph/diff` streaming added/removed/changed nodes/edges between SBOM snapshots; include overlay deltas and policy/VEX/advisory metadata. | Diff endpoint streams deterministic results; tests cover sample diffs; metrics record diff compute time. | -| GRAPH-API-28-006 | TODO | Graph API Guild | GRAPH-INDEX-28-002..005, POLICY-ENGINE-27-001 | Consume Policy Engine overlay contract (`POLICY-ENGINE-30-001..003`) and surface advisory/VEX/policy overlays with caching, partial materialization, and explain trace sampling for focused nodes. | Overlay pipeline delivers heatmap stats + explain samples; caches invalidate on policy/VEX/advisory change; tests cover concurrency. | -| GRAPH-API-28-007 | TODO | Graph API Guild | GRAPH-API-28-003..006 | Implement exports (`graphml`, `csv`, `ndjson`, `png`, `svg`) with async job management, checksum manifests, and streaming downloads. | Export job API returns manifest + download URLs; tests validate formats; docs updated. | -| GRAPH-API-28-008 | TODO | Graph API Guild, Authority Guild | AUTH-GRAPH-26-001, AUTH-GRAPH-21-001 | Integrate RBAC scopes (`graph:read`, `graph:query`, `graph:export`), tenant headers, audit logging, and rate limiting. | Unauthorized access rejected; audit logs include query hash & scope; rate limits enforced; integration tests pass; scope checks use `StellaOpsScopes` constants (no string literals). | - -> 2025-10-26 — Waiting on Graph API host scaffolding. When endpoints land, ensure all scope enforcement relies on `StellaOpsScopes` before closing GRAPH-API-28-008. -| GRAPH-API-28-009 | TODO | Graph API Guild, Observability Guild | GRAPH-API-28-002..007 | Instrument metrics (`graph_tile_latency_seconds`, `graph_query_budget_denied_total`, `graph_overlay_cache_hit_ratio`), structured logs, and traces per query stage; publish dashboards. | Metrics exposed; dashboards live; alerts configured; docs updated. | -| GRAPH-API-28-010 | TODO | Graph API Guild, QA Guild | GRAPH-API-28-002..007 | Build unit/integration/load tests with synthetic datasets (500k nodes/2M edges), fuzz query validation, verify determinism across runs. | Test suite green; load test report captured; determinism harness passes with fixed seed. | -| GRAPH-API-28-011 | TODO | Graph API Guild, DevOps Guild | GRAPH-API-28-003..007 | Provide deployment manifests, offline kit support, API gateway integration docs, and smoke tests. | Deployment descriptors merged; gateway routes documented; offline kit instructions updated; smoke tests executed. | +# Graph API Task Board — Epic 5: SBOM Graph Explorer +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| GRAPH-API-28-001 | TODO | Graph API Guild | GRAPH-INDEX-28-001, WEB-GRAPH-21-001 | Define OpenAPI + JSON schema for graph search/query/paths/diff/export endpoints, including cost metadata and streaming tile schema. | OpenAPI committed; schema validated via CI; clients regenerated; docs updated. | +| GRAPH-API-28-002 | TODO | Graph API Guild | GRAPH-API-28-001, GRAPH-INDEX-28-002 | Implement `/graph/search` with multi-type index lookup, prefix/exact match, RBAC enforcement, and result ranking + caching. | Endpoint returns ranked results within budget; tests cover scope errors + caching; metrics logged. | +| GRAPH-API-28-003 | TODO | Graph API Guild | GRAPH-API-28-001, GRAPH-INDEX-28-002..005 | Build query planner + cost estimator for `/graph/query`, stream tiles (nodes/edges/stats) progressively, enforce budgets, provide cursor tokens. | Query endpoint streams tiles deterministically, enforces budgets, surfaces truncation flags; integration tests cover large graphs. | +| GRAPH-API-28-004 | TODO | Graph API Guild | GRAPH-API-28-003 | Implement `/graph/paths` with depth ≤6, constraint filters, heuristic shortest path search, and optional policy overlay rendering. | Paths API returns expected routes; policy overlay applied; guardrails enforced; tests cover over-budget errors. | +| GRAPH-API-28-005 | TODO | Graph API Guild | GRAPH-INDEX-28-006, GRAPH-API-28-003 | Implement `/graph/diff` streaming added/removed/changed nodes/edges between SBOM snapshots; include overlay deltas and policy/VEX/advisory metadata. | Diff endpoint streams deterministic results; tests cover sample diffs; metrics record diff compute time. | +| GRAPH-API-28-006 | TODO | Graph API Guild | GRAPH-INDEX-28-002..005, POLICY-ENGINE-27-001 | Consume Policy Engine overlay contract (`POLICY-ENGINE-30-001..003`) and surface advisory/VEX/policy overlays with caching, partial materialization, and explain trace sampling for focused nodes. | Overlay pipeline delivers heatmap stats + explain samples; caches invalidate on policy/VEX/advisory change; tests cover concurrency. | +| GRAPH-API-28-007 | TODO | Graph API Guild | GRAPH-API-28-003..006 | Implement exports (`graphml`, `csv`, `ndjson`, `png`, `svg`) with async job management, checksum manifests, and streaming downloads. | Export job API returns manifest + download URLs; tests validate formats; docs updated. | +| GRAPH-API-28-008 | TODO | Graph API Guild, Authority Guild | AUTH-GRAPH-26-001, AUTH-GRAPH-21-001 | Integrate RBAC scopes (`graph:read`, `graph:query`, `graph:export`), tenant headers, audit logging, and rate limiting. | Unauthorized access rejected; audit logs include query hash & scope; rate limits enforced; integration tests pass; scope checks use `StellaOpsScopes` constants (no string literals). | + +> 2025-10-26 — Waiting on Graph API host scaffolding. When endpoints land, ensure all scope enforcement relies on `StellaOpsScopes` before closing GRAPH-API-28-008. +| GRAPH-API-28-009 | TODO | Graph API Guild, Observability Guild | GRAPH-API-28-002..007 | Instrument metrics (`graph_tile_latency_seconds`, `graph_query_budget_denied_total`, `graph_overlay_cache_hit_ratio`), structured logs, and traces per query stage; publish dashboards. | Metrics exposed; dashboards live; alerts configured; docs updated. | +| GRAPH-API-28-010 | TODO | Graph API Guild, QA Guild | GRAPH-API-28-002..007 | Build unit/integration/load tests with synthetic datasets (500k nodes/2M edges), fuzz query validation, verify determinism across runs. | Test suite green; load test report captured; determinism harness passes with fixed seed. | +| GRAPH-API-28-011 | TODO | Graph API Guild, DevOps Guild | GRAPH-API-28-003..007 | Provide deployment manifests, offline kit support, API gateway integration docs, and smoke tests. | Deployment descriptors merged; gateway routes documented; offline kit instructions updated; smoke tests executed. | diff --git a/src/StellaOps.Graph.Indexer/AGENTS.md b/src/Graph/StellaOps.Graph.Indexer/AGENTS.md similarity index 90% rename from src/StellaOps.Graph.Indexer/AGENTS.md rename to src/Graph/StellaOps.Graph.Indexer/AGENTS.md index b5c4f0b4..35140817 100644 --- a/src/StellaOps.Graph.Indexer/AGENTS.md +++ b/src/Graph/StellaOps.Graph.Indexer/AGENTS.md @@ -4,7 +4,7 @@ Project SBOM, advisory, VEX, and policy overlay data into a tenant-scoped property graph powering the SBOM Graph Explorer. Own ingestion pipelines, node/edge storage, aggregates, clustering, and snapshot lineage. ## Scope -- Service source under `src/StellaOps.Graph.Indexer` (workers, ingestion pipelines, schema builders). +- Service source under `src/Graph/StellaOps.Graph.Indexer` (workers, ingestion pipelines, schema builders). - Mongo collections/object storage for `graph_nodes`, `graph_edges`, `graph_snapshots`, clustering metadata. - Event consumers: SBOM ingest, Conseiller advisories, Excitator VEX, Policy overlay materials. - Incremental rebuild, diff, and cache warmers for graph overlays. @@ -17,7 +17,7 @@ Project SBOM, advisory, VEX, and policy overlay data into a tenant-scoped proper 5. **Performance & telemetry** – Every job emits metrics (latency, node/edge counts, queue lag) and structured logs. ## Collaboration -- Keep `src/StellaOps.Graph.Indexer/TASKS.md`, `SPRINTS.md` synchronized. +- Keep `src/Graph/StellaOps.Graph.Indexer/TASKS.md`, `../../docs/implplan/SPRINTS.md` synchronized. - Coordinate with SBOM Service, Policy Engine, Conseiller, Excitator, Scheduler, Web Gateway, and Console teams. - Publish schema docs and fixtures for clients; share cost/identity conventions across services. diff --git a/src/StellaOps.Graph.Indexer/TASKS.md b/src/Graph/StellaOps.Graph.Indexer/TASKS.md similarity index 99% rename from src/StellaOps.Graph.Indexer/TASKS.md rename to src/Graph/StellaOps.Graph.Indexer/TASKS.md index f44d3f40..cd381b16 100644 --- a/src/StellaOps.Graph.Indexer/TASKS.md +++ b/src/Graph/StellaOps.Graph.Indexer/TASKS.md @@ -1,13 +1,13 @@ -# Graph Indexer Task Board — Epic 5: SBOM Graph Explorer -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| GRAPH-INDEX-28-001 | TODO | Graph Indexer Guild | SBOM-SERVICE-21-001, CARTO-GRAPH-21-001 | Define canonical node/edge schemas, attribute dictionaries, identity rules, and seed fixtures; publish schema doc. | Schema doc merged; identity property tests pass; fixtures committed for CI usage. | -| GRAPH-INDEX-28-002 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-001, SBOM-SERVICE-21-002 | Implement SBOM ingest consumer producing artifact/package/file nodes and edges with `valid_from/valid_to`, scope metadata, and provenance links. | Ingest pipeline processes sample SBOMs deterministically; metrics recorded; unit tests cover identity stability. | -| GRAPH-INDEX-28-003 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-001, CONCELIER-CONSOLE-23-001 | Project Concelier linksets into overlay tiles (`affected_by` edges, evidence refs) without mutating source observations; keep advisory aggregates in overlay store only. | Overlay documents generated deterministically; raw node/edge collections remain immutable; tests cover overlay refresh and eviction. | -| GRAPH-INDEX-28-004 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-001, EXCITITOR-CONSOLE-23-001 | Integrate VEX statements (`vex_exempts` edges) with justification metadata and precedence markers for overlays. | VEX edges generated; conflicts resolved deterministically; tests cover status transitions. | -| GRAPH-INDEX-28-005 | TODO | Graph Indexer Guild, Policy Guild | POLICY-ENGINE-27-001, POLICY-ENGINE-27-002 | Hydrate policy overlays into graph (`governs_with` nodes/edges) referencing effective findings and explain hashes for sampled nodes. | Overlay nodes stored with policy version id, severity, status; explain references captured; validation tests pass. | -| GRAPH-INDEX-28-006 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-002..005 | Generate graph snapshots per SBOM with lineage (`derived_from`), adjacency manifests, and metadata for diff jobs. | Snapshot documents produced; lineage recorded; tests assert diff readiness; metrics emitted. | -| GRAPH-INDEX-28-007 | TODO | Graph Indexer Guild, Observability Guild | GRAPH-INDEX-28-002..006 | Implement clustering/centrality background jobs (Louvain/degree/betweenness approximations) with configurable schedules and store cluster ids on nodes. | Clustering jobs run on fixtures; metrics logged; cluster ids accessible via API; SLA documented. | -| GRAPH-INDEX-28-008 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-002..007 | Provide incremental update + backfill pipeline with change streams, retry/backoff, idempotent operations, and backlog metrics. | Incremental updates replay sample change logs; retries/backoff validated; backlog metrics exported. | -| GRAPH-INDEX-28-009 | TODO | Graph Indexer Guild, QA Guild | GRAPH-INDEX-28-002..008 | Add unit/property/integration tests, synthetic large graph fixtures, chaos testing (missing overlays, cycles), and determinism checks across runs. | Test suite green; determinism harness passes across two runs; perf metrics recorded. | -| GRAPH-INDEX-28-010 | TODO | Graph Indexer Guild, DevOps Guild | GRAPH-INDEX-28-008 | Package deployment artifacts (Helm/Compose), offline seed bundles, and configuration docs; integrate Offline Kit. | Deployment descriptors merged; offline seed bundle documented; smoke deploy tested. | +# Graph Indexer Task Board — Epic 5: SBOM Graph Explorer +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| GRAPH-INDEX-28-001 | TODO | Graph Indexer Guild | SBOM-SERVICE-21-001, CARTO-GRAPH-21-001 | Define canonical node/edge schemas, attribute dictionaries, identity rules, and seed fixtures; publish schema doc. | Schema doc merged; identity property tests pass; fixtures committed for CI usage. | +| GRAPH-INDEX-28-002 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-001, SBOM-SERVICE-21-002 | Implement SBOM ingest consumer producing artifact/package/file nodes and edges with `valid_from/valid_to`, scope metadata, and provenance links. | Ingest pipeline processes sample SBOMs deterministically; metrics recorded; unit tests cover identity stability. | +| GRAPH-INDEX-28-003 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-001, CONCELIER-CONSOLE-23-001 | Project Concelier linksets into overlay tiles (`affected_by` edges, evidence refs) without mutating source observations; keep advisory aggregates in overlay store only. | Overlay documents generated deterministically; raw node/edge collections remain immutable; tests cover overlay refresh and eviction. | +| GRAPH-INDEX-28-004 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-001, EXCITITOR-CONSOLE-23-001 | Integrate VEX statements (`vex_exempts` edges) with justification metadata and precedence markers for overlays. | VEX edges generated; conflicts resolved deterministically; tests cover status transitions. | +| GRAPH-INDEX-28-005 | TODO | Graph Indexer Guild, Policy Guild | POLICY-ENGINE-27-001, POLICY-ENGINE-27-002 | Hydrate policy overlays into graph (`governs_with` nodes/edges) referencing effective findings and explain hashes for sampled nodes. | Overlay nodes stored with policy version id, severity, status; explain references captured; validation tests pass. | +| GRAPH-INDEX-28-006 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-002..005 | Generate graph snapshots per SBOM with lineage (`derived_from`), adjacency manifests, and metadata for diff jobs. | Snapshot documents produced; lineage recorded; tests assert diff readiness; metrics emitted. | +| GRAPH-INDEX-28-007 | TODO | Graph Indexer Guild, Observability Guild | GRAPH-INDEX-28-002..006 | Implement clustering/centrality background jobs (Louvain/degree/betweenness approximations) with configurable schedules and store cluster ids on nodes. | Clustering jobs run on fixtures; metrics logged; cluster ids accessible via API; SLA documented. | +| GRAPH-INDEX-28-008 | TODO | Graph Indexer Guild | GRAPH-INDEX-28-002..007 | Provide incremental update + backfill pipeline with change streams, retry/backoff, idempotent operations, and backlog metrics. | Incremental updates replay sample change logs; retries/backoff validated; backlog metrics exported. | +| GRAPH-INDEX-28-009 | TODO | Graph Indexer Guild, QA Guild | GRAPH-INDEX-28-002..008 | Add unit/property/integration tests, synthetic large graph fixtures, chaos testing (missing overlays, cycles), and determinism checks across runs. | Test suite green; determinism harness passes across two runs; perf metrics recorded. | +| GRAPH-INDEX-28-010 | TODO | Graph Indexer Guild, DevOps Guild | GRAPH-INDEX-28-008 | Package deployment artifacts (Helm/Compose), offline seed bundles, and configuration docs; integrate Offline Kit. | Deployment descriptors merged; offline seed bundle documented; smoke deploy tested. | diff --git a/src/StellaOps.IssuerDirectory/AGENTS.md b/src/IssuerDirectory/StellaOps.IssuerDirectory/AGENTS.md similarity index 87% rename from src/StellaOps.IssuerDirectory/AGENTS.md rename to src/IssuerDirectory/StellaOps.IssuerDirectory/AGENTS.md index 41e05eec..0432b893 100644 --- a/src/StellaOps.IssuerDirectory/AGENTS.md +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/AGENTS.md @@ -4,7 +4,7 @@ Manage trusted VEX issuer metadata, keys, and trust overrides used by the VEX Lens, Policy Engine, and downstream services. ## Scope -- Service `src/StellaOps.IssuerDirectory` providing REST APIs and admin tooling for issuers, keys, trust weights, audit logs. +- Service `src/IssuerDirectory/StellaOps.IssuerDirectory` providing REST APIs and admin tooling for issuers, keys, trust weights, audit logs. - Integration with Excitator/VEX Lens/Policy Engine for signature verification and trust weighting. - Tenant overrides, import of CSAF publisher metadata, and compliance logging. diff --git a/src/StellaOps.IssuerDirectory/TASKS.md b/src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md similarity index 99% rename from src/StellaOps.IssuerDirectory/TASKS.md rename to src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md index a71c1f77..e59035cd 100644 --- a/src/StellaOps.IssuerDirectory/TASKS.md +++ b/src/IssuerDirectory/StellaOps.IssuerDirectory/TASKS.md @@ -1,9 +1,9 @@ -# Issuer Directory Task Board — Epic 7 -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ISSUER-30-001 | TODO | Issuer Directory Guild | AUTH-VULN-29-001 | Implement issuer CRUD API with RBAC, audit logging, and tenant scoping; seed CSAF publisher metadata. | APIs deployed; audit logs capture actor/reason; seed data imported; tests cover RBAC. | -| ISSUER-30-002 | TODO | Issuer Directory Guild, Security Guild | ISSUER-30-001 | Implement key management endpoints (add/rotate/revoke keys), enforce expiry, validate formats (Ed25519, X.509, DSSE). | Keys stored securely; expiry enforced; validation tests cover key types; docs updated. | -| ISSUER-30-003 | TODO | Issuer Directory Guild, Policy Guild | ISSUER-30-001 | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Trust overrides persisted; policy integration confirmed; tests cover overrides. | -| ISSUER-30-004 | TODO | Issuer Directory Guild, VEX Lens Guild | ISSUER-30-001..003 | Integrate with VEX Lens and Excitator signature verification (client SDK, caching, retries). | Lens/Excitator resolve issuer metadata via SDK; integration tests cover network failures. | -| ISSUER-30-005 | TODO | Issuer Directory Guild, Observability Guild | ISSUER-30-001..004 | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. | Telemetry live; alerts configured; docs updated. | -| ISSUER-30-006 | TODO | Issuer Directory Guild, DevOps Guild | ISSUER-30-001..005 | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup tested; offline kit updated. | +# Issuer Directory Task Board — Epic 7 +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ISSUER-30-001 | TODO | Issuer Directory Guild | AUTH-VULN-29-001 | Implement issuer CRUD API with RBAC, audit logging, and tenant scoping; seed CSAF publisher metadata. | APIs deployed; audit logs capture actor/reason; seed data imported; tests cover RBAC. | +| ISSUER-30-002 | TODO | Issuer Directory Guild, Security Guild | ISSUER-30-001 | Implement key management endpoints (add/rotate/revoke keys), enforce expiry, validate formats (Ed25519, X.509, DSSE). | Keys stored securely; expiry enforced; validation tests cover key types; docs updated. | +| ISSUER-30-003 | TODO | Issuer Directory Guild, Policy Guild | ISSUER-30-001 | Provide trust weight APIs and tenant overrides with validation (+/- bounds) and audit trails. | Trust overrides persisted; policy integration confirmed; tests cover overrides. | +| ISSUER-30-004 | TODO | Issuer Directory Guild, VEX Lens Guild | ISSUER-30-001..003 | Integrate with VEX Lens and Excitator signature verification (client SDK, caching, retries). | Lens/Excitator resolve issuer metadata via SDK; integration tests cover network failures. | +| ISSUER-30-005 | TODO | Issuer Directory Guild, Observability Guild | ISSUER-30-001..004 | Instrument metrics/logs (issuer changes, key rotation, verification failures) and dashboards/alerts. | Telemetry live; alerts configured; docs updated. | +| ISSUER-30-006 | TODO | Issuer Directory Guild, DevOps Guild | ISSUER-30-001..005 | Provide deployment manifests, backup/restore, secure secret storage, and offline kit instructions. | Deployment docs merged; smoke deploy validated; backup tested; offline kit updated. | diff --git a/src/StellaOps.Mirror.Creator/AGENTS.md b/src/Mirror/StellaOps.Mirror.Creator/AGENTS.md similarity index 98% rename from src/StellaOps.Mirror.Creator/AGENTS.md rename to src/Mirror/StellaOps.Mirror.Creator/AGENTS.md index 944fe36e..d1a9a27d 100644 --- a/src/StellaOps.Mirror.Creator/AGENTS.md +++ b/src/Mirror/StellaOps.Mirror.Creator/AGENTS.md @@ -1,15 +1,15 @@ -# StellaOps Mirror Creator Guild Charter - -## Mission -Deliver connected-environment tooling that assembles signed Mirror Bundles for air-gapped deployments, covering content selection, signing, and distribution. - -## Scope -- Bundle assembly pipeline (advisories, VEX, policy packs, images, dashboards). -- Integration with Export Center for bundle scheduling and verification. -- CLI commands for bundle creation, inspection, and rotation management. -- Test fixtures ensuring determinism across bundle builds. - -## Definition of Done -- Bundles are deterministic given the same inputs; regression tests verify Merkle root stability. -- Signing workflows documented and automated with dual-control for root rotation. -- Bundle metadata published for import verification. +# StellaOps Mirror Creator Guild Charter + +## Mission +Deliver connected-environment tooling that assembles signed Mirror Bundles for air-gapped deployments, covering content selection, signing, and distribution. + +## Scope +- Bundle assembly pipeline (advisories, VEX, policy packs, images, dashboards). +- Integration with Export Center for bundle scheduling and verification. +- CLI commands for bundle creation, inspection, and rotation management. +- Test fixtures ensuring determinism across bundle builds. + +## Definition of Done +- Bundles are deterministic given the same inputs; regression tests verify Merkle root stability. +- Signing workflows documented and automated with dual-control for root rotation. +- Bundle metadata published for import verification. diff --git a/src/StellaOps.Mirror.Creator/TASKS.md b/src/Mirror/StellaOps.Mirror.Creator/TASKS.md similarity index 99% rename from src/StellaOps.Mirror.Creator/TASKS.md rename to src/Mirror/StellaOps.Mirror.Creator/TASKS.md index 7cb9d3be..d1d578f8 100644 --- a/src/StellaOps.Mirror.Creator/TASKS.md +++ b/src/Mirror/StellaOps.Mirror.Creator/TASKS.md @@ -1,19 +1,19 @@ -# Mirror Creator Task Board — Epic 16: Air-Gapped Mode - -## Sprint 56 – Bundle Assembly -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| MIRROR-CRT-56-001 | TODO | Mirror Creator Guild | EXPORT-OBS-51-001 | Implement deterministic bundle assembler supporting advisories, VEX, policy packs with Zstandard compression and manifest generation. | Bundle build produces deterministic manifest; unit tests compare against golden outputs. | -| MIRROR-CRT-56-002 | TODO | Mirror Creator Guild, Security Guild | MIRROR-CRT-56-001, PROV-OBS-53-001 | Integrate DSSE signing and TUF metadata generation (`root`, `snapshot`, `timestamp`, `targets`). | Signed bundle verified by importer tests; root rotation procedure documented. | - -## Sprint 57 – OCI Images & Time Anchors -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| MIRROR-CRT-57-001 | TODO | Mirror Creator Guild, DevOps Guild | MIRROR-CRT-56-001 | Add optional OCI image collection producing oci-archive layout with digests recorded in manifest. | Image bundles integrate with air-gapped registry; tests confirm digest equality. | -| MIRROR-CRT-57-002 | TODO | Mirror Creator Guild, AirGap Time Guild | MIRROR-CRT-56-002, AIRGAP-TIME-57-001 | Embed signed time anchor metadata (`meta/time-anchor.json`) sourced from trusted authority. | Time anchor included in bundles; verification tests confirm signature; docs updated. | - -## Sprint 58 – CLI and Scheduling -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| MIRROR-CRT-58-001 | TODO | Mirror Creator Guild, CLI Guild | MIRROR-CRT-56-002, CLI-AIRGAP-56-001 | Deliver CLI `stella mirror create|verify` commands with content selection flags, delta mode, and dry-run verification. | CLI builds bundles deterministically; verify command reports DSSE/TUF status; integration tests cover options. | -| MIRROR-CRT-58-002 | TODO | Mirror Creator Guild, Exporter Guild | MIRROR-CRT-56-002, EXPORT-OBS-54-001 | Integrate with Export Center scheduling to automate mirror bundle creation with audit logs. | Scheduler triggers bundle builds; audit entries recorded; docs updated. | +# Mirror Creator Task Board — Epic 16: Air-Gapped Mode + +## Sprint 56 – Bundle Assembly +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| MIRROR-CRT-56-001 | TODO | Mirror Creator Guild | EXPORT-OBS-51-001 | Implement deterministic bundle assembler supporting advisories, VEX, policy packs with Zstandard compression and manifest generation. | Bundle build produces deterministic manifest; unit tests compare against golden outputs. | +| MIRROR-CRT-56-002 | TODO | Mirror Creator Guild, Security Guild | MIRROR-CRT-56-001, PROV-OBS-53-001 | Integrate DSSE signing and TUF metadata generation (`root`, `snapshot`, `timestamp`, `targets`). | Signed bundle verified by importer tests; root rotation procedure documented. | + +## Sprint 57 – OCI Images & Time Anchors +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| MIRROR-CRT-57-001 | TODO | Mirror Creator Guild, DevOps Guild | MIRROR-CRT-56-001 | Add optional OCI image collection producing oci-archive layout with digests recorded in manifest. | Image bundles integrate with air-gapped registry; tests confirm digest equality. | +| MIRROR-CRT-57-002 | TODO | Mirror Creator Guild, AirGap Time Guild | MIRROR-CRT-56-002, AIRGAP-TIME-57-001 | Embed signed time anchor metadata (`meta/time-anchor.json`) sourced from trusted authority. | Time anchor included in bundles; verification tests confirm signature; docs updated. | + +## Sprint 58 – CLI and Scheduling +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| MIRROR-CRT-58-001 | TODO | Mirror Creator Guild, CLI Guild | MIRROR-CRT-56-002, CLI-AIRGAP-56-001 | Deliver CLI `stella mirror create|verify` commands with content selection flags, delta mode, and dry-run verification. | CLI builds bundles deterministically; verify command reports DSSE/TUF status; integration tests cover options. | +| MIRROR-CRT-58-002 | TODO | Mirror Creator Guild, Exporter Guild | MIRROR-CRT-56-002, EXPORT-OBS-54-001 | Integrate with Export Center scheduling to automate mirror bundle creation with audit logs. | Scheduler triggers bundle builds; audit entries recorded; docs updated. | diff --git a/src/Notifier/StellaOps.Notifier.sln b/src/Notifier/StellaOps.Notifier.sln new file mode 100644 index 00000000..09ce8c2b --- /dev/null +++ b/src/Notifier/StellaOps.Notifier.sln @@ -0,0 +1,125 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Notifier", "StellaOps.Notifier", "{B561C84F-7AB2-7B4E-D703-D6D5908493D1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notifier.Tests", "StellaOps.Notifier\StellaOps.Notifier.Tests\StellaOps.Notifier.Tests.csproj", "{65E29FD4-99F5-49DA-BBCC-BE04096F9E54}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notifier.Worker", "StellaOps.Notifier\StellaOps.Notifier.Worker\StellaOps.Notifier.Worker.csproj", "{1488AD55-0086-46D2-967B-8D0E07161876}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Models", "..\Notify\__Libraries\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj", "{52391B39-F69D-4C9A-9588-EAC5AD023546}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Queue", "..\Notify\__Libraries\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj", "{6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Storage.Mongo", "..\Notify\__Libraries\StellaOps.Notify.Storage.Mongo\StellaOps.Notify.Storage.Mongo.csproj", "{6F58764A-34A9-4880-BF08-C7FB61B5819B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Engine", "..\Notify\__Libraries\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj", "{E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notifier.WebService", "StellaOps.Notifier\StellaOps.Notifier.WebService\StellaOps.Notifier.WebService.csproj", "{F6252853-A408-4658-9006-5DDF140A536A}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Debug|Any CPU.Build.0 = Debug|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Debug|x64.ActiveCfg = Debug|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Debug|x64.Build.0 = Debug|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Debug|x86.ActiveCfg = Debug|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Debug|x86.Build.0 = Debug|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Release|Any CPU.ActiveCfg = Release|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Release|Any CPU.Build.0 = Release|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Release|x64.ActiveCfg = Release|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Release|x64.Build.0 = Release|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Release|x86.ActiveCfg = Release|Any CPU + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54}.Release|x86.Build.0 = Release|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Debug|x64.ActiveCfg = Debug|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Debug|x64.Build.0 = Debug|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Debug|x86.ActiveCfg = Debug|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Debug|x86.Build.0 = Debug|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Release|Any CPU.Build.0 = Release|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Release|x64.ActiveCfg = Release|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Release|x64.Build.0 = Release|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Release|x86.ActiveCfg = Release|Any CPU + {1488AD55-0086-46D2-967B-8D0E07161876}.Release|x86.Build.0 = Release|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Debug|Any CPU.Build.0 = Debug|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Debug|x64.ActiveCfg = Debug|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Debug|x64.Build.0 = Debug|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Debug|x86.ActiveCfg = Debug|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Debug|x86.Build.0 = Debug|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Release|Any CPU.ActiveCfg = Release|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Release|Any CPU.Build.0 = Release|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Release|x64.ActiveCfg = Release|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Release|x64.Build.0 = Release|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Release|x86.ActiveCfg = Release|Any CPU + {52391B39-F69D-4C9A-9588-EAC5AD023546}.Release|x86.Build.0 = Release|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Debug|x64.ActiveCfg = Debug|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Debug|x64.Build.0 = Debug|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Debug|x86.ActiveCfg = Debug|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Debug|x86.Build.0 = Debug|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Release|Any CPU.Build.0 = Release|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Release|x64.ActiveCfg = Release|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Release|x64.Build.0 = Release|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Release|x86.ActiveCfg = Release|Any CPU + {6D2D2F1F-45AA-4F52-AD1B-1F7562F7C714}.Release|x86.Build.0 = Release|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Debug|x64.ActiveCfg = Debug|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Debug|x64.Build.0 = Debug|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Debug|x86.ActiveCfg = Debug|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Debug|x86.Build.0 = Debug|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Release|Any CPU.Build.0 = Release|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Release|x64.ActiveCfg = Release|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Release|x64.Build.0 = Release|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Release|x86.ActiveCfg = Release|Any CPU + {6F58764A-34A9-4880-BF08-C7FB61B5819B}.Release|x86.Build.0 = Release|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Debug|x64.ActiveCfg = Debug|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Debug|x64.Build.0 = Debug|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Debug|x86.ActiveCfg = Debug|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Debug|x86.Build.0 = Debug|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Release|Any CPU.Build.0 = Release|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Release|x64.ActiveCfg = Release|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Release|x64.Build.0 = Release|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Release|x86.ActiveCfg = Release|Any CPU + {E61AA8CA-29C2-4BEB-B53B-36B7DE31E9AE}.Release|x86.Build.0 = Release|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Debug|x64.ActiveCfg = Debug|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Debug|x64.Build.0 = Debug|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Debug|x86.ActiveCfg = Debug|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Debug|x86.Build.0 = Debug|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Release|Any CPU.Build.0 = Release|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Release|x64.ActiveCfg = Release|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Release|x64.Build.0 = Release|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Release|x86.ActiveCfg = Release|Any CPU + {F6252853-A408-4658-9006-5DDF140A536A}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {65E29FD4-99F5-49DA-BBCC-BE04096F9E54} = {B561C84F-7AB2-7B4E-D703-D6D5908493D1} + {1488AD55-0086-46D2-967B-8D0E07161876} = {B561C84F-7AB2-7B4E-D703-D6D5908493D1} + {F6252853-A408-4658-9006-5DDF140A536A} = {B561C84F-7AB2-7B4E-D703-D6D5908493D1} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Notifier/AGENTS.md b/src/Notifier/StellaOps.Notifier/AGENTS.md similarity index 98% rename from src/StellaOps.Notifier/AGENTS.md rename to src/Notifier/StellaOps.Notifier/AGENTS.md index 1106d623..f1ed534b 100644 --- a/src/StellaOps.Notifier/AGENTS.md +++ b/src/Notifier/StellaOps.Notifier/AGENTS.md @@ -1,17 +1,17 @@ -# StellaOps Notifier Service — Agent Charter - -## Mission -Build Notifications Studio (Epic 11) so StellaOps delivers policy-aware, explainable, tenant-scoped notifications without flooding humans. Honor the imposed rule: any work of this type must propagate everywhere it belongs. - -## Responsibilities -- Maintain event ingestion, rule evaluation, correlation, throttling, templating, dispatch, digests, and escalation pipelines. -- Coordinate with Orchestrator, Policy Engine, Findings Ledger, VEX Lens, Export Center, Authority, Console, CLI, and DevOps teams to ensure consistent event envelopes, provenance links, and RBAC. -- Guarantee deterministic, auditable notification outcomes with provenance, signing/ack security, and localization. - -## Module Layout -- `StellaOps.Notifier.Core/` — rule engine, routing, correlation, and template orchestration primitives. -- `StellaOps.Notifier.Infrastructure/` — persistence, integration adapters, and channel implementations. -- `StellaOps.Notifier.WebService/` — HTTP APIs (rules, incidents, templates, feeds). -- `StellaOps.Notifier.Worker/` — background dispatchers, digest builders, simulation hosts. -- `StellaOps.Notifier.Tests/` — foundational unit tests covering core/infrastructure behavior. -- `StellaOps.Notifier.sln` — solution bundling the Notifier projects. +# StellaOps Notifier Service — Agent Charter + +## Mission +Build Notifications Studio (Epic 11) so StellaOps delivers policy-aware, explainable, tenant-scoped notifications without flooding humans. Honor the imposed rule: any work of this type must propagate everywhere it belongs. + +## Responsibilities +- Maintain event ingestion, rule evaluation, correlation, throttling, templating, dispatch, digests, and escalation pipelines. +- Coordinate with Orchestrator, Policy Engine, Findings Ledger, VEX Lens, Export Center, Authority, Console, CLI, and DevOps teams to ensure consistent event envelopes, provenance links, and RBAC. +- Guarantee deterministic, auditable notification outcomes with provenance, signing/ack security, and localization. + +## Module Layout +- `StellaOps.Notifier.Core/` — rule engine, routing, correlation, and template orchestration primitives. +- `StellaOps.Notifier.Infrastructure/` — persistence, integration adapters, and channel implementations. +- `StellaOps.Notifier.WebService/` — HTTP APIs (rules, incidents, templates, feeds). +- `StellaOps.Notifier.Worker/` — background dispatchers, digest builders, simulation hosts. +- `StellaOps.Notifier.Tests/` — foundational unit tests covering core/infrastructure behavior. +- `StellaOps.Notifier.sln` — solution bundling the Notifier projects. diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/EventProcessorTests.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/EventProcessorTests.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Tests/EventProcessorTests.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/EventProcessorTests.cs index 84510c2a..bd3d0a1b 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/EventProcessorTests.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/EventProcessorTests.cs @@ -1,83 +1,83 @@ -using System.Text.Json.Nodes; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Notifier.Tests.Support; -using StellaOps.Notifier.Worker.Options; -using StellaOps.Notifier.Worker.Processing; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; -using Xunit; - -namespace StellaOps.Notifier.Tests; - -public sealed class EventProcessorTests -{ - [Fact] - public async Task ProcessAsync_MatchesRule_StoresSingleDeliveryWithIdempotency() - { - var ruleRepository = new InMemoryRuleRepository(); - var deliveryRepository = new InMemoryDeliveryRepository(); - var lockRepository = new InMemoryLockRepository(); - var evaluator = new DefaultNotifyRuleEvaluator(); - var options = Options.Create(new NotifierWorkerOptions - { - DefaultIdempotencyTtl = TimeSpan.FromMinutes(5) - }); - - var processor = new NotifierEventProcessor( - ruleRepository, - deliveryRepository, - lockRepository, - evaluator, - options, - TimeProvider.System, - NullLogger<NotifierEventProcessor>.Instance); - - var rule = NotifyRule.Create( - ruleId: "rule-1", - tenantId: "tenant-a", - name: "Failing policies", - match: NotifyRuleMatch.Create(eventKinds: new[] { "policy.violation" }), - actions: new[] - { - NotifyRuleAction.Create( - actionId: "act-slack", - channel: "chn-slack") - }); - - ruleRepository.Seed("tenant-a", rule); - - var payload = new JsonObject - { - ["verdict"] = "fail", - ["severity"] = "high" - }; - - var notifyEvent = NotifyEvent.Create( - eventId: Guid.NewGuid(), - kind: "policy.violation", - tenant: "tenant-a", - ts: DateTimeOffset.UtcNow, - payload: payload, - actor: "policy-engine", - version: "1"); - - var deliveriesFirst = await processor.ProcessAsync(notifyEvent, "worker-1", TestContext.Current.CancellationToken); - var key = IdempotencyKeyBuilder.Build("tenant-a", rule.RuleId, "act-slack", notifyEvent); - var reservedAfterFirst = await lockRepository.TryAcquireAsync("tenant-a", key, "worker-verify", TimeSpan.FromMinutes(5), TestContext.Current.CancellationToken); - var deliveriesSecond = await processor.ProcessAsync(notifyEvent, "worker-1", TestContext.Current.CancellationToken); - - Assert.Equal(1, deliveriesFirst); - Assert.False(reservedAfterFirst); - - Assert.Equal(1, lockRepository.SuccessfulReservations); - Assert.Equal(3, lockRepository.ReservationAttempts); - - var record = Assert.Single(deliveryRepository.Records("tenant-a")); - Assert.Equal("chn-slack", record.Metadata["channel"]); - Assert.Equal(notifyEvent.EventId, record.EventId); - - // TODO: deliveriesSecond should be 0 once idempotency locks are enforced end-to-end. - // Assert.Equal(0, deliveriesSecond); - } -} +using System.Text.Json.Nodes; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Notifier.Tests.Support; +using StellaOps.Notifier.Worker.Options; +using StellaOps.Notifier.Worker.Processing; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; +using Xunit; + +namespace StellaOps.Notifier.Tests; + +public sealed class EventProcessorTests +{ + [Fact] + public async Task ProcessAsync_MatchesRule_StoresSingleDeliveryWithIdempotency() + { + var ruleRepository = new InMemoryRuleRepository(); + var deliveryRepository = new InMemoryDeliveryRepository(); + var lockRepository = new InMemoryLockRepository(); + var evaluator = new DefaultNotifyRuleEvaluator(); + var options = Options.Create(new NotifierWorkerOptions + { + DefaultIdempotencyTtl = TimeSpan.FromMinutes(5) + }); + + var processor = new NotifierEventProcessor( + ruleRepository, + deliveryRepository, + lockRepository, + evaluator, + options, + TimeProvider.System, + NullLogger<NotifierEventProcessor>.Instance); + + var rule = NotifyRule.Create( + ruleId: "rule-1", + tenantId: "tenant-a", + name: "Failing policies", + match: NotifyRuleMatch.Create(eventKinds: new[] { "policy.violation" }), + actions: new[] + { + NotifyRuleAction.Create( + actionId: "act-slack", + channel: "chn-slack") + }); + + ruleRepository.Seed("tenant-a", rule); + + var payload = new JsonObject + { + ["verdict"] = "fail", + ["severity"] = "high" + }; + + var notifyEvent = NotifyEvent.Create( + eventId: Guid.NewGuid(), + kind: "policy.violation", + tenant: "tenant-a", + ts: DateTimeOffset.UtcNow, + payload: payload, + actor: "policy-engine", + version: "1"); + + var deliveriesFirst = await processor.ProcessAsync(notifyEvent, "worker-1", TestContext.Current.CancellationToken); + var key = IdempotencyKeyBuilder.Build("tenant-a", rule.RuleId, "act-slack", notifyEvent); + var reservedAfterFirst = await lockRepository.TryAcquireAsync("tenant-a", key, "worker-verify", TimeSpan.FromMinutes(5), TestContext.Current.CancellationToken); + var deliveriesSecond = await processor.ProcessAsync(notifyEvent, "worker-1", TestContext.Current.CancellationToken); + + Assert.Equal(1, deliveriesFirst); + Assert.False(reservedAfterFirst); + + Assert.Equal(1, lockRepository.SuccessfulReservations); + Assert.Equal(3, lockRepository.ReservationAttempts); + + var record = Assert.Single(deliveryRepository.Records("tenant-a")); + Assert.Equal("chn-slack", record.Metadata["channel"]); + Assert.Equal(notifyEvent.EventId, record.EventId); + + // TODO: deliveriesSecond should be 0 once idempotency locks are enforced end-to-end. + // Assert.Equal(0, deliveriesSecond); + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/RuleEvaluatorTests.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/RuleEvaluatorTests.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Tests/RuleEvaluatorTests.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/RuleEvaluatorTests.cs index 7ec1adeb..87f0eff2 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/RuleEvaluatorTests.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/RuleEvaluatorTests.cs @@ -1,60 +1,60 @@ -using System.Text.Json.Nodes; -using StellaOps.Notifier.Worker.Processing; -using StellaOps.Notify.Models; -using Xunit; - -namespace StellaOps.Notifier.Tests; - -public sealed class RuleEvaluatorTests -{ - [Fact] - public void Evaluate_MatchingPolicyViolation_ReturnsActions() - { - var rule = NotifyRule.Create( - ruleId: "rule-critical", - tenantId: "tenant-a", - name: "Critical policy violation", - match: NotifyRuleMatch.Create( - eventKinds: new[] { "policy.violation" }, - labels: new[] { "kev" }, - minSeverity: "high", - verdicts: new[] { "fail" }), - actions: new[] - { - NotifyRuleAction.Create( - actionId: "act-slack", - channel: "chn-slack", - throttle: TimeSpan.FromMinutes(10)) - }); - - var payload = new JsonObject - { - ["verdict"] = "fail", - ["severity"] = "critical", - ["labels"] = new JsonArray("kev", "policy") - }; - - var notifyEvent = NotifyEvent.Create( - eventId: Guid.NewGuid(), - kind: "policy.violation", - tenant: "tenant-a", - ts: DateTimeOffset.UtcNow, - payload: payload, - scope: NotifyEventScope.Create(repo: "registry.local/api", digest: "sha256:123"), - actor: "policy-engine", - version: "1", - attributes: new[] - { - new KeyValuePair<string, string>("severity", "critical"), - new KeyValuePair<string, string>("verdict", "fail"), - new KeyValuePair<string, string>("kev", "true") - }); - - var evaluator = new DefaultNotifyRuleEvaluator(); - var outcome = evaluator.Evaluate(rule, notifyEvent); - - Assert.True(outcome.IsMatch); - Assert.Single(outcome.Actions); - Assert.Equal("act-slack", outcome.Actions[0].ActionId); - } -} +using System.Text.Json.Nodes; +using StellaOps.Notifier.Worker.Processing; +using StellaOps.Notify.Models; +using Xunit; + +namespace StellaOps.Notifier.Tests; + +public sealed class RuleEvaluatorTests +{ + [Fact] + public void Evaluate_MatchingPolicyViolation_ReturnsActions() + { + var rule = NotifyRule.Create( + ruleId: "rule-critical", + tenantId: "tenant-a", + name: "Critical policy violation", + match: NotifyRuleMatch.Create( + eventKinds: new[] { "policy.violation" }, + labels: new[] { "kev" }, + minSeverity: "high", + verdicts: new[] { "fail" }), + actions: new[] + { + NotifyRuleAction.Create( + actionId: "act-slack", + channel: "chn-slack", + throttle: TimeSpan.FromMinutes(10)) + }); + + var payload = new JsonObject + { + ["verdict"] = "fail", + ["severity"] = "critical", + ["labels"] = new JsonArray("kev", "policy") + }; + + var notifyEvent = NotifyEvent.Create( + eventId: Guid.NewGuid(), + kind: "policy.violation", + tenant: "tenant-a", + ts: DateTimeOffset.UtcNow, + payload: payload, + scope: NotifyEventScope.Create(repo: "registry.local/api", digest: "sha256:123"), + actor: "policy-engine", + version: "1", + attributes: new[] + { + new KeyValuePair<string, string>("severity", "critical"), + new KeyValuePair<string, string>("verdict", "fail"), + new KeyValuePair<string, string>("kev", "true") + }); + + var evaluator = new DefaultNotifyRuleEvaluator(); + var outcome = evaluator.Evaluate(rule, notifyEvent); + + Assert.True(outcome.IsMatch); + Assert.Single(outcome.Actions); + Assert.Equal("act-slack", outcome.Actions[0].ActionId); + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/StellaOps.Notifier.Tests.csproj b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/StellaOps.Notifier.Tests.csproj similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Tests/StellaOps.Notifier.Tests.csproj rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/StellaOps.Notifier.Tests.csproj index 5ab3b7e5..1228d27b 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/StellaOps.Notifier.Tests.csproj +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/StellaOps.Notifier.Tests.csproj @@ -1,33 +1,33 @@ -<?xml version="1.0" encoding="utf-8"?> -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <OutputType>Exe</OutputType> - <IsPackable>false</IsPackable> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <UseConcelierTestInfra>false</UseConcelierTestInfra> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" /> - <PackageReference Include="xunit.v3" Version="3.0.0" /> - <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3" /> - </ItemGroup> - - <ItemGroup> - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" /> - </ItemGroup> - - <ItemGroup> - <Using Include="Xunit" /> - </ItemGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Notifier.Worker\StellaOps.Notifier.Worker.csproj" /> - </ItemGroup> -</Project> +<?xml version="1.0" encoding="utf-8"?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <OutputType>Exe</OutputType> + <IsPackable>false</IsPackable> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <UseConcelierTestInfra>false</UseConcelierTestInfra> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" /> + <PackageReference Include="xunit.v3" Version="3.0.0" /> + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3" /> + </ItemGroup> + + <ItemGroup> + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" /> + </ItemGroup> + + <ItemGroup> + <Using Include="Xunit" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="..\StellaOps.Notifier.Worker\StellaOps.Notifier.Worker.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/Support/InMemoryStores.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/Support/InMemoryStores.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Tests/Support/InMemoryStores.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/Support/InMemoryStores.cs index d57d751e..e3632ee7 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/Support/InMemoryStores.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/Support/InMemoryStores.cs @@ -1,173 +1,173 @@ -using System.Collections.Concurrent; -using StellaOps.Notify.Models; -using StellaOps.Notify.Storage.Mongo.Repositories; - -namespace StellaOps.Notifier.Tests.Support; - -internal sealed class InMemoryRuleRepository : INotifyRuleRepository -{ - private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, NotifyRule>> _rules = new(StringComparer.Ordinal); - - public Task UpsertAsync(NotifyRule rule, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(rule); - var tenantRules = _rules.GetOrAdd(rule.TenantId, _ => new ConcurrentDictionary<string, NotifyRule>(StringComparer.Ordinal)); - tenantRules[rule.RuleId] = rule; - return Task.CompletedTask; - } - - public Task<NotifyRule?> GetAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default) - { - if (_rules.TryGetValue(tenantId, out var rules) && rules.TryGetValue(ruleId, out var rule)) - { - return Task.FromResult<NotifyRule?>(rule); - } - - return Task.FromResult<NotifyRule?>(null); - } - - public Task<IReadOnlyList<NotifyRule>> ListAsync(string tenantId, CancellationToken cancellationToken = default) - { - if (_rules.TryGetValue(tenantId, out var rules)) - { - return Task.FromResult<IReadOnlyList<NotifyRule>>(rules.Values.ToArray()); - } - - return Task.FromResult<IReadOnlyList<NotifyRule>>(Array.Empty<NotifyRule>()); - } - - public Task DeleteAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default) - { - if (_rules.TryGetValue(tenantId, out var rules)) - { - rules.TryRemove(ruleId, out _); - } - - return Task.CompletedTask; - } - - public void Seed(string tenantId, params NotifyRule[] rules) - { - var tenantRules = _rules.GetOrAdd(tenantId, _ => new ConcurrentDictionary<string, NotifyRule>(StringComparer.Ordinal)); - foreach (var rule in rules) - { - tenantRules[rule.RuleId] = rule; - } - } -} - -internal sealed class InMemoryDeliveryRepository : INotifyDeliveryRepository -{ - private readonly ConcurrentDictionary<string, List<NotifyDelivery>> _deliveries = new(StringComparer.Ordinal); - - public Task AppendAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(delivery); - var list = _deliveries.GetOrAdd(delivery.TenantId, _ => new List<NotifyDelivery>()); - lock (list) - { - list.Add(delivery); - } - - return Task.CompletedTask; - } - - public Task UpdateAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(delivery); - var list = _deliveries.GetOrAdd(delivery.TenantId, _ => new List<NotifyDelivery>()); - lock (list) - { - var index = list.FindIndex(existing => existing.DeliveryId == delivery.DeliveryId); - if (index >= 0) - { - list[index] = delivery; - } - else - { - list.Add(delivery); - } - } - - return Task.CompletedTask; - } - - public Task<NotifyDelivery?> GetAsync(string tenantId, string deliveryId, CancellationToken cancellationToken = default) - { - if (_deliveries.TryGetValue(tenantId, out var list)) - { - lock (list) - { - return Task.FromResult<NotifyDelivery?>(list.FirstOrDefault(delivery => delivery.DeliveryId == deliveryId)); - } - } - - return Task.FromResult<NotifyDelivery?>(null); - } - - public Task<NotifyDeliveryQueryResult> QueryAsync( - string tenantId, - DateTimeOffset? since, - string? status, - int? limit, - string? continuationToken = null, - CancellationToken cancellationToken = default) - { - throw new NotImplementedException(); - } - - public IReadOnlyCollection<NotifyDelivery> Records(string tenantId) - { - if (_deliveries.TryGetValue(tenantId, out var list)) - { - lock (list) - { - return list.ToArray(); - } - } - - return Array.Empty<NotifyDelivery>(); - } -} - -internal sealed class InMemoryLockRepository : INotifyLockRepository -{ - private readonly object _sync = new(); - private readonly Dictionary<(string TenantId, string Resource), (string Owner, DateTimeOffset Expiry)> _locks = new(); - - public int SuccessfulReservations { get; private set; } - public int ReservationAttempts { get; private set; } - - public Task<bool> TryAcquireAsync(string tenantId, string resource, string owner, TimeSpan ttl, CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(resource); - ArgumentException.ThrowIfNullOrWhiteSpace(owner); - - lock (_sync) - { - ReservationAttempts++; - var key = (tenantId, resource); - var now = DateTimeOffset.UtcNow; - - if (_locks.TryGetValue(key, out var existing) && existing.Expiry > now) - { - return Task.FromResult(false); - } - - _locks[key] = (owner, now + ttl); - SuccessfulReservations++; - return Task.FromResult(true); - } - } - - public Task ReleaseAsync(string tenantId, string resource, string owner, CancellationToken cancellationToken = default) - { - lock (_sync) - { - var key = (tenantId, resource); - _locks.Remove(key); - return Task.CompletedTask; - } - } -} +using System.Collections.Concurrent; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Repositories; + +namespace StellaOps.Notifier.Tests.Support; + +internal sealed class InMemoryRuleRepository : INotifyRuleRepository +{ + private readonly ConcurrentDictionary<string, ConcurrentDictionary<string, NotifyRule>> _rules = new(StringComparer.Ordinal); + + public Task UpsertAsync(NotifyRule rule, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(rule); + var tenantRules = _rules.GetOrAdd(rule.TenantId, _ => new ConcurrentDictionary<string, NotifyRule>(StringComparer.Ordinal)); + tenantRules[rule.RuleId] = rule; + return Task.CompletedTask; + } + + public Task<NotifyRule?> GetAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default) + { + if (_rules.TryGetValue(tenantId, out var rules) && rules.TryGetValue(ruleId, out var rule)) + { + return Task.FromResult<NotifyRule?>(rule); + } + + return Task.FromResult<NotifyRule?>(null); + } + + public Task<IReadOnlyList<NotifyRule>> ListAsync(string tenantId, CancellationToken cancellationToken = default) + { + if (_rules.TryGetValue(tenantId, out var rules)) + { + return Task.FromResult<IReadOnlyList<NotifyRule>>(rules.Values.ToArray()); + } + + return Task.FromResult<IReadOnlyList<NotifyRule>>(Array.Empty<NotifyRule>()); + } + + public Task DeleteAsync(string tenantId, string ruleId, CancellationToken cancellationToken = default) + { + if (_rules.TryGetValue(tenantId, out var rules)) + { + rules.TryRemove(ruleId, out _); + } + + return Task.CompletedTask; + } + + public void Seed(string tenantId, params NotifyRule[] rules) + { + var tenantRules = _rules.GetOrAdd(tenantId, _ => new ConcurrentDictionary<string, NotifyRule>(StringComparer.Ordinal)); + foreach (var rule in rules) + { + tenantRules[rule.RuleId] = rule; + } + } +} + +internal sealed class InMemoryDeliveryRepository : INotifyDeliveryRepository +{ + private readonly ConcurrentDictionary<string, List<NotifyDelivery>> _deliveries = new(StringComparer.Ordinal); + + public Task AppendAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(delivery); + var list = _deliveries.GetOrAdd(delivery.TenantId, _ => new List<NotifyDelivery>()); + lock (list) + { + list.Add(delivery); + } + + return Task.CompletedTask; + } + + public Task UpdateAsync(NotifyDelivery delivery, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(delivery); + var list = _deliveries.GetOrAdd(delivery.TenantId, _ => new List<NotifyDelivery>()); + lock (list) + { + var index = list.FindIndex(existing => existing.DeliveryId == delivery.DeliveryId); + if (index >= 0) + { + list[index] = delivery; + } + else + { + list.Add(delivery); + } + } + + return Task.CompletedTask; + } + + public Task<NotifyDelivery?> GetAsync(string tenantId, string deliveryId, CancellationToken cancellationToken = default) + { + if (_deliveries.TryGetValue(tenantId, out var list)) + { + lock (list) + { + return Task.FromResult<NotifyDelivery?>(list.FirstOrDefault(delivery => delivery.DeliveryId == deliveryId)); + } + } + + return Task.FromResult<NotifyDelivery?>(null); + } + + public Task<NotifyDeliveryQueryResult> QueryAsync( + string tenantId, + DateTimeOffset? since, + string? status, + int? limit, + string? continuationToken = null, + CancellationToken cancellationToken = default) + { + throw new NotImplementedException(); + } + + public IReadOnlyCollection<NotifyDelivery> Records(string tenantId) + { + if (_deliveries.TryGetValue(tenantId, out var list)) + { + lock (list) + { + return list.ToArray(); + } + } + + return Array.Empty<NotifyDelivery>(); + } +} + +internal sealed class InMemoryLockRepository : INotifyLockRepository +{ + private readonly object _sync = new(); + private readonly Dictionary<(string TenantId, string Resource), (string Owner, DateTimeOffset Expiry)> _locks = new(); + + public int SuccessfulReservations { get; private set; } + public int ReservationAttempts { get; private set; } + + public Task<bool> TryAcquireAsync(string tenantId, string resource, string owner, TimeSpan ttl, CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(resource); + ArgumentException.ThrowIfNullOrWhiteSpace(owner); + + lock (_sync) + { + ReservationAttempts++; + var key = (tenantId, resource); + var now = DateTimeOffset.UtcNow; + + if (_locks.TryGetValue(key, out var existing) && existing.Expiry > now) + { + return Task.FromResult(false); + } + + _locks[key] = (owner, now + ttl); + SuccessfulReservations++; + return Task.FromResult(true); + } + } + + public Task ReleaseAsync(string tenantId, string resource, string owner, CancellationToken cancellationToken = default) + { + lock (_sync) + { + var key = (tenantId, resource); + _locks.Remove(key); + return Task.CompletedTask; + } + } +} diff --git a/src/StellaOps.Concelier.RawModels.Tests/xunit.runner.json b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/xunit.runner.json similarity index 96% rename from src/StellaOps.Concelier.RawModels.Tests/xunit.runner.json rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/xunit.runner.json index 86c7ea05..249d815c 100644 --- a/src/StellaOps.Concelier.RawModels.Tests/xunit.runner.json +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Tests/xunit.runner.json @@ -1,3 +1,3 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/Program.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/Program.cs similarity index 96% rename from src/StellaOps.Notifier/StellaOps.Notifier.WebService/Program.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/Program.cs index 35d27a09..d817c490 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/Program.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/Program.cs @@ -1,24 +1,24 @@ -using Microsoft.AspNetCore.Builder; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using StellaOps.Notify.Storage.Mongo; -using StellaOps.Notifier.WebService.Setup; - -var builder = WebApplication.CreateBuilder(args); - -builder.Configuration - .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables(prefix: "NOTIFIER_"); - -var mongoSection = builder.Configuration.GetSection("notifier:storage:mongo"); -builder.Services.AddNotifyMongoStorage(mongoSection); - -builder.Services.AddHealthChecks(); -builder.Services.AddHostedService<MongoInitializationHostedService>(); - -var app = builder.Build(); - -app.MapHealthChecks("/healthz"); - -app.Run(); +using Microsoft.AspNetCore.Builder; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using StellaOps.Notify.Storage.Mongo; +using StellaOps.Notifier.WebService.Setup; + +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration + .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables(prefix: "NOTIFIER_"); + +var mongoSection = builder.Configuration.GetSection("notifier:storage:mongo"); +builder.Services.AddNotifyMongoStorage(mongoSection); + +builder.Services.AddHealthChecks(); +builder.Services.AddHostedService<MongoInitializationHostedService>(); + +var app = builder.Build(); + +app.MapHealthChecks("/healthz"); + +app.Run(); diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/Properties/launchSettings.json b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.Notifier/StellaOps.Notifier.WebService/Properties/launchSettings.json rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/Properties/launchSettings.json index fdd325cf..840416b9 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/Properties/launchSettings.json +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/Properties/launchSettings.json @@ -1,23 +1,23 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "http": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "http://localhost:5124", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - }, - "https": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "https://localhost:7202;http://localhost:5124", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "http://localhost:5124", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "https": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "https://localhost:7202;http://localhost:5124", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/Setup/MongoInitializationHostedService.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/Setup/MongoInitializationHostedService.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.WebService/Setup/MongoInitializationHostedService.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/Setup/MongoInitializationHostedService.cs index c115d86e..fd52d943 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/Setup/MongoInitializationHostedService.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/Setup/MongoInitializationHostedService.cs @@ -1,60 +1,60 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; - -namespace StellaOps.Notifier.WebService.Setup; - -internal sealed class MongoInitializationHostedService : IHostedService -{ - private const string InitializerTypeName = "StellaOps.Notify.Storage.Mongo.Internal.NotifyMongoInitializer, StellaOps.Notify.Storage.Mongo"; - - private readonly IServiceProvider _serviceProvider; - private readonly ILogger<MongoInitializationHostedService> _logger; - - public MongoInitializationHostedService(IServiceProvider serviceProvider, ILogger<MongoInitializationHostedService> logger) - { - _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task StartAsync(CancellationToken cancellationToken) - { - var initializerType = Type.GetType(InitializerTypeName, throwOnError: false, ignoreCase: false); - if (initializerType is null) - { - _logger.LogWarning("Notify Mongo initializer type {TypeName} was not found; skipping migration run.", InitializerTypeName); - return; - } - - using var scope = _serviceProvider.CreateScope(); - var initializer = scope.ServiceProvider.GetService(initializerType); - if (initializer is null) - { - _logger.LogWarning("Notify Mongo initializer could not be resolved from the service provider."); - return; - } - - var method = initializerType.GetMethod("EnsureIndexesAsync"); - if (method is null) - { - _logger.LogWarning("Notify Mongo initializer does not expose EnsureIndexesAsync; skipping migration run."); - return; - } - - try - { - var task = method.Invoke(initializer, new object?[] { cancellationToken }) as Task; - if (task is not null) - { - await task.ConfigureAwait(false); - } - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to run Notify Mongo migrations."); - throw; - } - } - - public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Notifier.WebService.Setup; + +internal sealed class MongoInitializationHostedService : IHostedService +{ + private const string InitializerTypeName = "StellaOps.Notify.Storage.Mongo.Internal.NotifyMongoInitializer, StellaOps.Notify.Storage.Mongo"; + + private readonly IServiceProvider _serviceProvider; + private readonly ILogger<MongoInitializationHostedService> _logger; + + public MongoInitializationHostedService(IServiceProvider serviceProvider, ILogger<MongoInitializationHostedService> logger) + { + _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + var initializerType = Type.GetType(InitializerTypeName, throwOnError: false, ignoreCase: false); + if (initializerType is null) + { + _logger.LogWarning("Notify Mongo initializer type {TypeName} was not found; skipping migration run.", InitializerTypeName); + return; + } + + using var scope = _serviceProvider.CreateScope(); + var initializer = scope.ServiceProvider.GetService(initializerType); + if (initializer is null) + { + _logger.LogWarning("Notify Mongo initializer could not be resolved from the service provider."); + return; + } + + var method = initializerType.GetMethod("EnsureIndexesAsync"); + if (method is null) + { + _logger.LogWarning("Notify Mongo initializer does not expose EnsureIndexesAsync; skipping migration run."); + return; + } + + try + { + var task = method.Invoke(initializer, new object?[] { cancellationToken }) as Task; + if (task is not null) + { + await task.ConfigureAwait(false); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to run Notify Mongo migrations."); + throw; + } + } + + public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.csproj b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.csproj similarity index 64% rename from src/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.csproj rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.csproj index e97683af..70215b97 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.csproj +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.csproj @@ -1,4 +1,4 @@ -<?xml version="1.0" encoding="utf-8"?> +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk.Web"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -9,6 +9,6 @@ </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\..\StellaOps.Notify.Storage.Mongo\StellaOps.Notify.Storage.Mongo.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.http b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.http similarity index 96% rename from src/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.http rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.http index 1d025e4f..e9562bef 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.http +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/StellaOps.Notifier.WebService.http @@ -1,6 +1,6 @@ -@StellaOps.Notifier.WebService_HostAddress = http://localhost:5124 - -GET {{StellaOps.Notifier.WebService_HostAddress}}/weatherforecast/ -Accept: application/json - -### +@StellaOps.Notifier.WebService_HostAddress = http://localhost:5124 + +GET {{StellaOps.Notifier.WebService_HostAddress}}/weatherforecast/ +Accept: application/json + +### diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.Development.json b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.Development.json similarity index 93% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.Development.json rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.Development.json index 0c208ae9..ff66ba6b 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.Development.json +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.Development.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.json similarity index 94% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.json index 10f68b8c..4d566948 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.WebService/appsettings.json +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.WebService/appsettings.json @@ -1,9 +1,9 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Options/NotifierWorkerOptions.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Options/NotifierWorkerOptions.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/Options/NotifierWorkerOptions.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Options/NotifierWorkerOptions.cs index 104a1b46..3236c2b2 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Options/NotifierWorkerOptions.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Options/NotifierWorkerOptions.cs @@ -1,19 +1,19 @@ -namespace StellaOps.Notifier.Worker.Options; - -public sealed class NotifierWorkerOptions -{ - /// <summary> - /// Maximum number of events leased in a single batch. - /// </summary> - public int LeaseBatchSize { get; set; } = 50; - - /// <summary> - /// Duration for which a lease is held before being retried. - /// </summary> - public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromSeconds(60); - - /// <summary> - /// Default TTL for idempotency reservations when actions do not specify a throttle. - /// </summary> - public TimeSpan DefaultIdempotencyTtl { get; set; } = TimeSpan.FromMinutes(30); -} +namespace StellaOps.Notifier.Worker.Options; + +public sealed class NotifierWorkerOptions +{ + /// <summary> + /// Maximum number of events leased in a single batch. + /// </summary> + public int LeaseBatchSize { get; set; } = 50; + + /// <summary> + /// Duration for which a lease is held before being retried. + /// </summary> + public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromSeconds(60); + + /// <summary> + /// Default TTL for idempotency reservations when actions do not specify a throttle. + /// </summary> + public TimeSpan DefaultIdempotencyTtl { get; set; } = TimeSpan.FromMinutes(30); +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/DefaultNotifyRuleEvaluator.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/DefaultNotifyRuleEvaluator.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/DefaultNotifyRuleEvaluator.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/DefaultNotifyRuleEvaluator.cs index 88efbe91..100ae72e 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/DefaultNotifyRuleEvaluator.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/DefaultNotifyRuleEvaluator.cs @@ -1,300 +1,300 @@ -using System.Collections.Immutable; -using System.Text.Json.Nodes; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; - -namespace StellaOps.Notifier.Worker.Processing; - -internal sealed class DefaultNotifyRuleEvaluator : INotifyRuleEvaluator -{ - private static readonly IDictionary<string, int> SeverityRank = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase) - { - ["none"] = 0, - ["info"] = 1, - ["low"] = 2, - ["medium"] = 3, - ["moderate"] = 3, - ["high"] = 4, - ["critical"] = 5, - ["blocker"] = 6, - }; - - public NotifyRuleEvaluationOutcome Evaluate(NotifyRule rule, NotifyEvent @event, DateTimeOffset? evaluationTimestamp = null) - { - ArgumentNullException.ThrowIfNull(rule); - ArgumentNullException.ThrowIfNull(@event); - - if (!rule.Enabled) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "rule_disabled"); - } - - var match = rule.Match; - - if (!match.EventKinds.IsDefaultOrEmpty && !match.EventKinds.Contains(@event.Kind)) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "event_kind_mismatch"); - } - - if (!match.Namespaces.IsDefaultOrEmpty) - { - var ns = @event.Scope?.Namespace ?? string.Empty; - if (!match.Namespaces.Contains(ns)) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "namespace_mismatch"); - } - } - - if (!match.Repositories.IsDefaultOrEmpty) - { - var repo = @event.Scope?.Repo ?? string.Empty; - if (!match.Repositories.Contains(repo)) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "repository_mismatch"); - } - } - - if (!match.Digests.IsDefaultOrEmpty) - { - var digest = @event.Scope?.Digest ?? string.Empty; - if (!match.Digests.Contains(digest)) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "digest_mismatch"); - } - } - - if (!match.ComponentPurls.IsDefaultOrEmpty) - { - var components = ExtractComponentPurls(@event.Payload); - if (!components.Overlaps(match.ComponentPurls)) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "component_mismatch"); - } - } - - if (match.KevOnly == true && !ExtractLabels(@event).Contains("kev")) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "kev_required"); - } - - if (!match.Labels.IsDefaultOrEmpty) - { - var labels = ExtractLabels(@event); - if (!labels.IsSupersetOf(match.Labels)) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "label_mismatch"); - } - } - - if (!string.IsNullOrWhiteSpace(match.MinSeverity)) - { - var eventSeverity = ResolveSeverity(@event); - if (!MeetsSeverity(match.MinSeverity!, eventSeverity)) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "severity_below_threshold"); - } - } - - if (!match.Verdicts.IsDefaultOrEmpty) - { - var verdict = ResolveVerdict(@event); - if (verdict is null || !match.Verdicts.Contains(verdict)) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "verdict_mismatch"); - } - } - - var actions = rule.Actions - .Where(static action => action is not null && action.Enabled) - .Distinct() - .OrderBy(static action => action.ActionId, StringComparer.Ordinal) - .ToImmutableArray(); - - if (actions.IsDefaultOrEmpty) - { - return NotifyRuleEvaluationOutcome.NotMatched(rule, "no_enabled_actions"); - } - - var matchedAt = evaluationTimestamp ?? DateTimeOffset.UtcNow; - return NotifyRuleEvaluationOutcome.Matched(rule, actions, matchedAt); - } - - public ImmutableArray<NotifyRuleEvaluationOutcome> Evaluate( - IEnumerable<NotifyRule> rules, - NotifyEvent @event, - DateTimeOffset? evaluationTimestamp = null) - { - if (rules is null) - { - return ImmutableArray<NotifyRuleEvaluationOutcome>.Empty; - } - - return rules - .Select(rule => Evaluate(rule, @event, evaluationTimestamp)) - .Where(static outcome => outcome.IsMatch) - .ToImmutableArray(); - } - - private static bool MeetsSeverity(string required, string actual) - { - if (!SeverityRank.TryGetValue(required, out var requiredRank)) - { - return true; - } - - if (!SeverityRank.TryGetValue(actual, out var actualRank)) - { - actualRank = 0; - } - - return actualRank >= requiredRank; - } - - private static string ResolveSeverity(NotifyEvent @event) - { - if (@event.Attributes.TryGetValue("severity", out var attributeSeverity) && !string.IsNullOrWhiteSpace(attributeSeverity)) - { - return attributeSeverity.ToLowerInvariant(); - } - - if (@event.Payload is JsonObject obj) - { - if (TryGetString(obj, "severity", out var severity)) - { - return severity.ToLowerInvariant(); - } - - if (obj.TryGetPropertyValue("summary", out var summaryNode) && summaryNode is JsonObject summaryObj) - { - if (TryGetString(summaryObj, "highestSeverity", out var summarySeverity)) - { - return summarySeverity.ToLowerInvariant(); - } - } - } - - return "unknown"; - } - - private static string? ResolveVerdict(NotifyEvent @event) - { - if (@event.Attributes.TryGetValue("verdict", out var attributeVerdict) && !string.IsNullOrWhiteSpace(attributeVerdict)) - { - return attributeVerdict.ToLowerInvariant(); - } - - if (@event.Payload is JsonObject obj) - { - if (TryGetString(obj, "verdict", out var verdict)) - { - return verdict.ToLowerInvariant(); - } - - if (obj.TryGetPropertyValue("summary", out var summaryNode) && summaryNode is JsonObject summaryObj) - { - if (TryGetString(summaryObj, "verdict", out var summaryVerdict)) - { - return summaryVerdict.ToLowerInvariant(); - } - } - } - - return null; - } - - private static bool TryGetString(JsonObject obj, string propertyName, out string value) - { - if (obj.TryGetPropertyValue(propertyName, out var node) && node is JsonValue jsonValue && jsonValue.TryGetValue(out string? str) && !string.IsNullOrWhiteSpace(str)) - { - value = str.Trim(); - return true; - } - - value = string.Empty; - return false; - } - - private static ImmutableHashSet<string> ExtractComponentPurls(JsonNode? payload) - { - var builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.OrdinalIgnoreCase); - - if (payload is JsonObject obj && obj.TryGetPropertyValue("componentPurls", out var arrayNode) && arrayNode is JsonArray array) - { - foreach (var item in array) - { - if (item is JsonValue value && value.TryGetValue(out string? str) && !string.IsNullOrWhiteSpace(str)) - { - builder.Add(str.Trim()); - } - } - } - - return builder.ToImmutable(); - } - - private static ImmutableHashSet<string> ExtractLabels(NotifyEvent @event) - { - var builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.OrdinalIgnoreCase); - - foreach (var (key, value) in @event.Attributes) - { - if (!string.IsNullOrWhiteSpace(key)) - { - builder.Add(key.Trim()); - } - - if (!string.IsNullOrWhiteSpace(value)) - { - builder.Add(value.Trim()); - } - } - - if (@event.Scope?.Labels is { Count: > 0 } scopeLabels) - { - foreach (var (key, value) in scopeLabels) - { - if (!string.IsNullOrWhiteSpace(key)) - { - builder.Add(key.Trim()); - } - - if (!string.IsNullOrWhiteSpace(value)) - { - builder.Add(value.Trim()); - } - } - } - - if (@event.Payload is JsonObject obj && obj.TryGetPropertyValue("labels", out var labelsNode)) - { - switch (labelsNode) - { - case JsonArray array: - foreach (var item in array) - { - if (item is JsonValue value && value.TryGetValue(out string? str) && !string.IsNullOrWhiteSpace(str)) - { - builder.Add(str.Trim()); - } - } - break; - case JsonObject labelObj: - foreach (var (key, value) in labelObj) - { - if (!string.IsNullOrWhiteSpace(key)) - { - builder.Add(key.Trim()); - } - - if (value is JsonValue v && v.TryGetValue(out string? str) && !string.IsNullOrWhiteSpace(str)) - { - builder.Add(str.Trim()); - } - } - break; - } - } - - return builder.ToImmutable(); - } -} +using System.Collections.Immutable; +using System.Text.Json.Nodes; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; + +namespace StellaOps.Notifier.Worker.Processing; + +internal sealed class DefaultNotifyRuleEvaluator : INotifyRuleEvaluator +{ + private static readonly IDictionary<string, int> SeverityRank = new Dictionary<string, int>(StringComparer.OrdinalIgnoreCase) + { + ["none"] = 0, + ["info"] = 1, + ["low"] = 2, + ["medium"] = 3, + ["moderate"] = 3, + ["high"] = 4, + ["critical"] = 5, + ["blocker"] = 6, + }; + + public NotifyRuleEvaluationOutcome Evaluate(NotifyRule rule, NotifyEvent @event, DateTimeOffset? evaluationTimestamp = null) + { + ArgumentNullException.ThrowIfNull(rule); + ArgumentNullException.ThrowIfNull(@event); + + if (!rule.Enabled) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "rule_disabled"); + } + + var match = rule.Match; + + if (!match.EventKinds.IsDefaultOrEmpty && !match.EventKinds.Contains(@event.Kind)) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "event_kind_mismatch"); + } + + if (!match.Namespaces.IsDefaultOrEmpty) + { + var ns = @event.Scope?.Namespace ?? string.Empty; + if (!match.Namespaces.Contains(ns)) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "namespace_mismatch"); + } + } + + if (!match.Repositories.IsDefaultOrEmpty) + { + var repo = @event.Scope?.Repo ?? string.Empty; + if (!match.Repositories.Contains(repo)) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "repository_mismatch"); + } + } + + if (!match.Digests.IsDefaultOrEmpty) + { + var digest = @event.Scope?.Digest ?? string.Empty; + if (!match.Digests.Contains(digest)) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "digest_mismatch"); + } + } + + if (!match.ComponentPurls.IsDefaultOrEmpty) + { + var components = ExtractComponentPurls(@event.Payload); + if (!components.Overlaps(match.ComponentPurls)) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "component_mismatch"); + } + } + + if (match.KevOnly == true && !ExtractLabels(@event).Contains("kev")) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "kev_required"); + } + + if (!match.Labels.IsDefaultOrEmpty) + { + var labels = ExtractLabels(@event); + if (!labels.IsSupersetOf(match.Labels)) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "label_mismatch"); + } + } + + if (!string.IsNullOrWhiteSpace(match.MinSeverity)) + { + var eventSeverity = ResolveSeverity(@event); + if (!MeetsSeverity(match.MinSeverity!, eventSeverity)) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "severity_below_threshold"); + } + } + + if (!match.Verdicts.IsDefaultOrEmpty) + { + var verdict = ResolveVerdict(@event); + if (verdict is null || !match.Verdicts.Contains(verdict)) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "verdict_mismatch"); + } + } + + var actions = rule.Actions + .Where(static action => action is not null && action.Enabled) + .Distinct() + .OrderBy(static action => action.ActionId, StringComparer.Ordinal) + .ToImmutableArray(); + + if (actions.IsDefaultOrEmpty) + { + return NotifyRuleEvaluationOutcome.NotMatched(rule, "no_enabled_actions"); + } + + var matchedAt = evaluationTimestamp ?? DateTimeOffset.UtcNow; + return NotifyRuleEvaluationOutcome.Matched(rule, actions, matchedAt); + } + + public ImmutableArray<NotifyRuleEvaluationOutcome> Evaluate( + IEnumerable<NotifyRule> rules, + NotifyEvent @event, + DateTimeOffset? evaluationTimestamp = null) + { + if (rules is null) + { + return ImmutableArray<NotifyRuleEvaluationOutcome>.Empty; + } + + return rules + .Select(rule => Evaluate(rule, @event, evaluationTimestamp)) + .Where(static outcome => outcome.IsMatch) + .ToImmutableArray(); + } + + private static bool MeetsSeverity(string required, string actual) + { + if (!SeverityRank.TryGetValue(required, out var requiredRank)) + { + return true; + } + + if (!SeverityRank.TryGetValue(actual, out var actualRank)) + { + actualRank = 0; + } + + return actualRank >= requiredRank; + } + + private static string ResolveSeverity(NotifyEvent @event) + { + if (@event.Attributes.TryGetValue("severity", out var attributeSeverity) && !string.IsNullOrWhiteSpace(attributeSeverity)) + { + return attributeSeverity.ToLowerInvariant(); + } + + if (@event.Payload is JsonObject obj) + { + if (TryGetString(obj, "severity", out var severity)) + { + return severity.ToLowerInvariant(); + } + + if (obj.TryGetPropertyValue("summary", out var summaryNode) && summaryNode is JsonObject summaryObj) + { + if (TryGetString(summaryObj, "highestSeverity", out var summarySeverity)) + { + return summarySeverity.ToLowerInvariant(); + } + } + } + + return "unknown"; + } + + private static string? ResolveVerdict(NotifyEvent @event) + { + if (@event.Attributes.TryGetValue("verdict", out var attributeVerdict) && !string.IsNullOrWhiteSpace(attributeVerdict)) + { + return attributeVerdict.ToLowerInvariant(); + } + + if (@event.Payload is JsonObject obj) + { + if (TryGetString(obj, "verdict", out var verdict)) + { + return verdict.ToLowerInvariant(); + } + + if (obj.TryGetPropertyValue("summary", out var summaryNode) && summaryNode is JsonObject summaryObj) + { + if (TryGetString(summaryObj, "verdict", out var summaryVerdict)) + { + return summaryVerdict.ToLowerInvariant(); + } + } + } + + return null; + } + + private static bool TryGetString(JsonObject obj, string propertyName, out string value) + { + if (obj.TryGetPropertyValue(propertyName, out var node) && node is JsonValue jsonValue && jsonValue.TryGetValue(out string? str) && !string.IsNullOrWhiteSpace(str)) + { + value = str.Trim(); + return true; + } + + value = string.Empty; + return false; + } + + private static ImmutableHashSet<string> ExtractComponentPurls(JsonNode? payload) + { + var builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.OrdinalIgnoreCase); + + if (payload is JsonObject obj && obj.TryGetPropertyValue("componentPurls", out var arrayNode) && arrayNode is JsonArray array) + { + foreach (var item in array) + { + if (item is JsonValue value && value.TryGetValue(out string? str) && !string.IsNullOrWhiteSpace(str)) + { + builder.Add(str.Trim()); + } + } + } + + return builder.ToImmutable(); + } + + private static ImmutableHashSet<string> ExtractLabels(NotifyEvent @event) + { + var builder = ImmutableHashSet.CreateBuilder<string>(StringComparer.OrdinalIgnoreCase); + + foreach (var (key, value) in @event.Attributes) + { + if (!string.IsNullOrWhiteSpace(key)) + { + builder.Add(key.Trim()); + } + + if (!string.IsNullOrWhiteSpace(value)) + { + builder.Add(value.Trim()); + } + } + + if (@event.Scope?.Labels is { Count: > 0 } scopeLabels) + { + foreach (var (key, value) in scopeLabels) + { + if (!string.IsNullOrWhiteSpace(key)) + { + builder.Add(key.Trim()); + } + + if (!string.IsNullOrWhiteSpace(value)) + { + builder.Add(value.Trim()); + } + } + } + + if (@event.Payload is JsonObject obj && obj.TryGetPropertyValue("labels", out var labelsNode)) + { + switch (labelsNode) + { + case JsonArray array: + foreach (var item in array) + { + if (item is JsonValue value && value.TryGetValue(out string? str) && !string.IsNullOrWhiteSpace(str)) + { + builder.Add(str.Trim()); + } + } + break; + case JsonObject labelObj: + foreach (var (key, value) in labelObj) + { + if (!string.IsNullOrWhiteSpace(key)) + { + builder.Add(key.Trim()); + } + + if (value is JsonValue v && v.TryGetValue(out string? str) && !string.IsNullOrWhiteSpace(str)) + { + builder.Add(str.Trim()); + } + } + break; + } + } + + return builder.ToImmutable(); + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/IdempotencyKeyBuilder.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/IdempotencyKeyBuilder.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/IdempotencyKeyBuilder.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/IdempotencyKeyBuilder.cs index d3659b1a..f9fabaaa 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/IdempotencyKeyBuilder.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/IdempotencyKeyBuilder.cs @@ -1,30 +1,30 @@ -using System.Security.Cryptography; -using System.Text; -using StellaOps.Notify.Models; - -namespace StellaOps.Notifier.Worker.Processing; - -internal static class IdempotencyKeyBuilder -{ - public static string Build(string tenantId, string ruleId, string actionId, NotifyEvent notifyEvent) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(ruleId); - ArgumentException.ThrowIfNullOrWhiteSpace(actionId); - ArgumentNullException.ThrowIfNull(notifyEvent); - - var scopeDigest = notifyEvent.Scope?.Digest ?? string.Empty; - var source = string.Join( - '|', - tenantId, - ruleId, - actionId, - notifyEvent.Kind, - scopeDigest, - notifyEvent.EventId.ToString("N")); - - var bytes = Encoding.UTF8.GetBytes(source); - var hash = SHA256.HashData(bytes); - return Convert.ToHexString(hash).ToLowerInvariant(); - } -} +using System.Security.Cryptography; +using System.Text; +using StellaOps.Notify.Models; + +namespace StellaOps.Notifier.Worker.Processing; + +internal static class IdempotencyKeyBuilder +{ + public static string Build(string tenantId, string ruleId, string actionId, NotifyEvent notifyEvent) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(ruleId); + ArgumentException.ThrowIfNullOrWhiteSpace(actionId); + ArgumentNullException.ThrowIfNull(notifyEvent); + + var scopeDigest = notifyEvent.Scope?.Digest ?? string.Empty; + var source = string.Join( + '|', + tenantId, + ruleId, + actionId, + notifyEvent.Kind, + scopeDigest, + notifyEvent.EventId.ToString("N")); + + var bytes = Encoding.UTF8.GetBytes(source); + var hash = SHA256.HashData(bytes); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/MongoInitializationHostedService.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/MongoInitializationHostedService.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/MongoInitializationHostedService.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/MongoInitializationHostedService.cs index 939fa94d..2e5bfd60 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/MongoInitializationHostedService.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/MongoInitializationHostedService.cs @@ -1,60 +1,60 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; - -namespace StellaOps.Notifier.Worker.Processing; - -internal sealed class MongoInitializationHostedService : IHostedService -{ - private const string InitializerTypeName = "StellaOps.Notify.Storage.Mongo.Internal.NotifyMongoInitializer, StellaOps.Notify.Storage.Mongo"; - - private readonly IServiceProvider _serviceProvider; - private readonly ILogger<MongoInitializationHostedService> _logger; - - public MongoInitializationHostedService(IServiceProvider serviceProvider, ILogger<MongoInitializationHostedService> logger) - { - _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task StartAsync(CancellationToken cancellationToken) - { - var initializerType = Type.GetType(InitializerTypeName, throwOnError: false, ignoreCase: false); - if (initializerType is null) - { - _logger.LogWarning("Notify Mongo initializer type {TypeName} was not found; skipping migration run.", InitializerTypeName); - return; - } - - using var scope = _serviceProvider.CreateScope(); - var initializer = scope.ServiceProvider.GetService(initializerType); - if (initializer is null) - { - _logger.LogWarning("Notify Mongo initializer could not be resolved from the service provider."); - return; - } - - var method = initializerType.GetMethod("EnsureIndexesAsync"); - if (method is null) - { - _logger.LogWarning("Notify Mongo initializer does not expose EnsureIndexesAsync; skipping migration run."); - return; - } - - try - { - var task = method.Invoke(initializer, new object?[] { cancellationToken }) as Task; - if (task is not null) - { - await task.ConfigureAwait(false); - } - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to run Notify Mongo migrations."); - throw; - } - } - - public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Notifier.Worker.Processing; + +internal sealed class MongoInitializationHostedService : IHostedService +{ + private const string InitializerTypeName = "StellaOps.Notify.Storage.Mongo.Internal.NotifyMongoInitializer, StellaOps.Notify.Storage.Mongo"; + + private readonly IServiceProvider _serviceProvider; + private readonly ILogger<MongoInitializationHostedService> _logger; + + public MongoInitializationHostedService(IServiceProvider serviceProvider, ILogger<MongoInitializationHostedService> logger) + { + _serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task StartAsync(CancellationToken cancellationToken) + { + var initializerType = Type.GetType(InitializerTypeName, throwOnError: false, ignoreCase: false); + if (initializerType is null) + { + _logger.LogWarning("Notify Mongo initializer type {TypeName} was not found; skipping migration run.", InitializerTypeName); + return; + } + + using var scope = _serviceProvider.CreateScope(); + var initializer = scope.ServiceProvider.GetService(initializerType); + if (initializer is null) + { + _logger.LogWarning("Notify Mongo initializer could not be resolved from the service provider."); + return; + } + + var method = initializerType.GetMethod("EnsureIndexesAsync"); + if (method is null) + { + _logger.LogWarning("Notify Mongo initializer does not expose EnsureIndexesAsync; skipping migration run."); + return; + } + + try + { + var task = method.Invoke(initializer, new object?[] { cancellationToken }) as Task; + if (task is not null) + { + await task.ConfigureAwait(false); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to run Notify Mongo migrations."); + throw; + } + } + + public Task StopAsync(CancellationToken cancellationToken) => Task.CompletedTask; +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventProcessor.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventProcessor.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventProcessor.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventProcessor.cs index e5c1fae8..a76a324a 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventProcessor.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventProcessor.cs @@ -1,194 +1,194 @@ -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; -using StellaOps.Notify.Storage.Mongo.Repositories; -using StellaOps.Notifier.Worker.Options; - -namespace StellaOps.Notifier.Worker.Processing; - -internal sealed class NotifierEventProcessor -{ - private readonly INotifyRuleRepository _ruleRepository; - private readonly INotifyDeliveryRepository _deliveryRepository; - private readonly INotifyLockRepository _lockRepository; - private readonly INotifyRuleEvaluator _ruleEvaluator; - private readonly NotifierWorkerOptions _options; - private readonly TimeProvider _timeProvider; - private readonly ILogger<NotifierEventProcessor> _logger; - - public NotifierEventProcessor( - INotifyRuleRepository ruleRepository, - INotifyDeliveryRepository deliveryRepository, - INotifyLockRepository lockRepository, - INotifyRuleEvaluator ruleEvaluator, - IOptions<NotifierWorkerOptions> options, - TimeProvider timeProvider, - ILogger<NotifierEventProcessor> logger) - { - _ruleRepository = ruleRepository ?? throw new ArgumentNullException(nameof(ruleRepository)); - _deliveryRepository = deliveryRepository ?? throw new ArgumentNullException(nameof(deliveryRepository)); - _lockRepository = lockRepository ?? throw new ArgumentNullException(nameof(lockRepository)); - _ruleEvaluator = ruleEvaluator ?? throw new ArgumentNullException(nameof(ruleEvaluator)); - _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); - _timeProvider = timeProvider ?? TimeProvider.System; - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<int> ProcessAsync(NotifyEvent notifyEvent, string workerId, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(notifyEvent); - ArgumentException.ThrowIfNullOrWhiteSpace(workerId); - - var tenantId = notifyEvent.Tenant; - var evaluationTime = _timeProvider.GetUtcNow(); - - IReadOnlyList<NotifyRule> rules; - try - { - rules = await _ruleRepository.ListAsync(tenantId, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to load rules for tenant {TenantId}.", tenantId); - throw; - } - - if (rules.Count == 0) - { - _logger.LogDebug("No rules found for tenant {TenantId}.", tenantId); - return 0; - } - - var enabledRules = rules.Where(static rule => rule.Enabled).ToArray(); - if (enabledRules.Length == 0) - { - _logger.LogDebug("All rules are disabled for tenant {TenantId}.", tenantId); - return 0; - } - - var outcomes = _ruleEvaluator.Evaluate(enabledRules, notifyEvent, evaluationTime); - if (outcomes.IsDefaultOrEmpty) - { - _logger.LogDebug( - "Event {EventId} produced no matches for tenant {TenantId}.", - notifyEvent.EventId, - tenantId); - return 0; - } - - var created = 0; - foreach (var outcome in outcomes) - { - foreach (var action in outcome.Actions) - { - var ttl = ResolveIdempotencyTtl(action); - var idempotencyKey = IdempotencyKeyBuilder.Build(tenantId, outcome.Rule.RuleId, action.ActionId, notifyEvent); - - bool reserved; - try - { - reserved = await _lockRepository.TryAcquireAsync(tenantId, idempotencyKey, workerId, ttl, cancellationToken) - .ConfigureAwait(false); - } - catch (Exception ex) - { - _logger.LogError( - ex, - "Failed to reserve idempotency token for tenant {TenantId}, rule {RuleId}, action {ActionId}.", - tenantId, - outcome.Rule.RuleId, - action.ActionId); - throw; - } - - if (!reserved) - { - _logger.LogInformation( - "Skipped event {EventId} for tenant {TenantId}, rule {RuleId}, action {ActionId} due to idempotency.", - notifyEvent.EventId, - tenantId, - outcome.Rule.RuleId, - action.ActionId); - continue; - } - - var delivery = NotifyDelivery.Create( - deliveryId: Guid.NewGuid().ToString("N"), - tenantId: tenantId, - ruleId: outcome.Rule.RuleId, - actionId: action.ActionId, - eventId: notifyEvent.EventId, - kind: notifyEvent.Kind, - status: NotifyDeliveryStatus.Pending, - metadata: BuildDeliveryMetadata(action)); - - try - { - await _deliveryRepository.AppendAsync(delivery, cancellationToken).ConfigureAwait(false); - created++; - } - catch (Exception ex) - { - _logger.LogError( - ex, - "Failed to persist delivery record for tenant {TenantId}, rule {RuleId}, action {ActionId}.", - tenantId, - outcome.Rule.RuleId, - action.ActionId); - throw; - } - } - } - - return created; - } - - private TimeSpan ResolveIdempotencyTtl(NotifyRuleAction action) - { - if (action.Throttle is { Ticks: > 0 } throttle) - { - return throttle; - } - - if (_options.DefaultIdempotencyTtl > TimeSpan.Zero) - { - return _options.DefaultIdempotencyTtl; - } - - return TimeSpan.FromMinutes(5); - } - - private static IEnumerable<KeyValuePair<string, string>> BuildDeliveryMetadata(NotifyRuleAction action) - { - var metadata = new List<KeyValuePair<string, string>> - { - new("channel", action.Channel) - }; - - if (!string.IsNullOrWhiteSpace(action.Template)) - { - metadata.Add(new("template", action.Template)); - } - - if (!string.IsNullOrWhiteSpace(action.Digest)) - { - metadata.Add(new("digest", action.Digest)); - } - - if (!string.IsNullOrWhiteSpace(action.Locale)) - { - metadata.Add(new("locale", action.Locale)); - } - - foreach (var (key, value) in action.Metadata) - { - if (!string.IsNullOrWhiteSpace(key) && !string.IsNullOrWhiteSpace(value)) - { - metadata.Add(new(key, value)); - } - } - - return metadata; - } -} +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; +using StellaOps.Notify.Storage.Mongo.Repositories; +using StellaOps.Notifier.Worker.Options; + +namespace StellaOps.Notifier.Worker.Processing; + +internal sealed class NotifierEventProcessor +{ + private readonly INotifyRuleRepository _ruleRepository; + private readonly INotifyDeliveryRepository _deliveryRepository; + private readonly INotifyLockRepository _lockRepository; + private readonly INotifyRuleEvaluator _ruleEvaluator; + private readonly NotifierWorkerOptions _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger<NotifierEventProcessor> _logger; + + public NotifierEventProcessor( + INotifyRuleRepository ruleRepository, + INotifyDeliveryRepository deliveryRepository, + INotifyLockRepository lockRepository, + INotifyRuleEvaluator ruleEvaluator, + IOptions<NotifierWorkerOptions> options, + TimeProvider timeProvider, + ILogger<NotifierEventProcessor> logger) + { + _ruleRepository = ruleRepository ?? throw new ArgumentNullException(nameof(ruleRepository)); + _deliveryRepository = deliveryRepository ?? throw new ArgumentNullException(nameof(deliveryRepository)); + _lockRepository = lockRepository ?? throw new ArgumentNullException(nameof(lockRepository)); + _ruleEvaluator = ruleEvaluator ?? throw new ArgumentNullException(nameof(ruleEvaluator)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<int> ProcessAsync(NotifyEvent notifyEvent, string workerId, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(notifyEvent); + ArgumentException.ThrowIfNullOrWhiteSpace(workerId); + + var tenantId = notifyEvent.Tenant; + var evaluationTime = _timeProvider.GetUtcNow(); + + IReadOnlyList<NotifyRule> rules; + try + { + rules = await _ruleRepository.ListAsync(tenantId, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to load rules for tenant {TenantId}.", tenantId); + throw; + } + + if (rules.Count == 0) + { + _logger.LogDebug("No rules found for tenant {TenantId}.", tenantId); + return 0; + } + + var enabledRules = rules.Where(static rule => rule.Enabled).ToArray(); + if (enabledRules.Length == 0) + { + _logger.LogDebug("All rules are disabled for tenant {TenantId}.", tenantId); + return 0; + } + + var outcomes = _ruleEvaluator.Evaluate(enabledRules, notifyEvent, evaluationTime); + if (outcomes.IsDefaultOrEmpty) + { + _logger.LogDebug( + "Event {EventId} produced no matches for tenant {TenantId}.", + notifyEvent.EventId, + tenantId); + return 0; + } + + var created = 0; + foreach (var outcome in outcomes) + { + foreach (var action in outcome.Actions) + { + var ttl = ResolveIdempotencyTtl(action); + var idempotencyKey = IdempotencyKeyBuilder.Build(tenantId, outcome.Rule.RuleId, action.ActionId, notifyEvent); + + bool reserved; + try + { + reserved = await _lockRepository.TryAcquireAsync(tenantId, idempotencyKey, workerId, ttl, cancellationToken) + .ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError( + ex, + "Failed to reserve idempotency token for tenant {TenantId}, rule {RuleId}, action {ActionId}.", + tenantId, + outcome.Rule.RuleId, + action.ActionId); + throw; + } + + if (!reserved) + { + _logger.LogInformation( + "Skipped event {EventId} for tenant {TenantId}, rule {RuleId}, action {ActionId} due to idempotency.", + notifyEvent.EventId, + tenantId, + outcome.Rule.RuleId, + action.ActionId); + continue; + } + + var delivery = NotifyDelivery.Create( + deliveryId: Guid.NewGuid().ToString("N"), + tenantId: tenantId, + ruleId: outcome.Rule.RuleId, + actionId: action.ActionId, + eventId: notifyEvent.EventId, + kind: notifyEvent.Kind, + status: NotifyDeliveryStatus.Pending, + metadata: BuildDeliveryMetadata(action)); + + try + { + await _deliveryRepository.AppendAsync(delivery, cancellationToken).ConfigureAwait(false); + created++; + } + catch (Exception ex) + { + _logger.LogError( + ex, + "Failed to persist delivery record for tenant {TenantId}, rule {RuleId}, action {ActionId}.", + tenantId, + outcome.Rule.RuleId, + action.ActionId); + throw; + } + } + } + + return created; + } + + private TimeSpan ResolveIdempotencyTtl(NotifyRuleAction action) + { + if (action.Throttle is { Ticks: > 0 } throttle) + { + return throttle; + } + + if (_options.DefaultIdempotencyTtl > TimeSpan.Zero) + { + return _options.DefaultIdempotencyTtl; + } + + return TimeSpan.FromMinutes(5); + } + + private static IEnumerable<KeyValuePair<string, string>> BuildDeliveryMetadata(NotifyRuleAction action) + { + var metadata = new List<KeyValuePair<string, string>> + { + new("channel", action.Channel) + }; + + if (!string.IsNullOrWhiteSpace(action.Template)) + { + metadata.Add(new("template", action.Template)); + } + + if (!string.IsNullOrWhiteSpace(action.Digest)) + { + metadata.Add(new("digest", action.Digest)); + } + + if (!string.IsNullOrWhiteSpace(action.Locale)) + { + metadata.Add(new("locale", action.Locale)); + } + + foreach (var (key, value) in action.Metadata) + { + if (!string.IsNullOrWhiteSpace(key) && !string.IsNullOrWhiteSpace(value)) + { + metadata.Add(new(key, value)); + } + } + + return metadata; + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventWorker.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventWorker.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventWorker.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventWorker.cs index db1121e6..86f8a021 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventWorker.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Processing/NotifierEventWorker.cs @@ -1,120 +1,120 @@ -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Notify.Queue; -using StellaOps.Notifier.Worker.Options; - -namespace StellaOps.Notifier.Worker.Processing; - -internal sealed class NotifierEventWorker : BackgroundService -{ - private readonly INotifyEventQueue _queue; - private readonly NotifierEventProcessor _processor; - private readonly NotifierWorkerOptions _options; - private readonly ILogger<NotifierEventWorker> _logger; - private readonly string _workerId; - private readonly TimeProvider _timeProvider; - - public NotifierEventWorker( - INotifyEventQueue queue, - NotifierEventProcessor processor, - IOptions<NotifierWorkerOptions> options, - TimeProvider timeProvider, - ILogger<NotifierEventWorker> logger) - { - _queue = queue ?? throw new ArgumentNullException(nameof(queue)); - _processor = processor ?? throw new ArgumentNullException(nameof(processor)); - _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); - _timeProvider = timeProvider ?? TimeProvider.System; - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _workerId = $"notifier-worker-{Environment.MachineName}-{Guid.NewGuid():N}"; - } - - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - _logger.LogInformation("Notifier event worker {WorkerId} started.", _workerId); - - while (!stoppingToken.IsCancellationRequested) - { - try - { - var leases = await _queue.LeaseAsync(BuildLeaseRequest(), stoppingToken).ConfigureAwait(false); - if (leases.Count == 0) - { - await Task.Delay(TimeSpan.FromSeconds(1), stoppingToken).ConfigureAwait(false); - continue; - } - - foreach (var lease in leases) - { - stoppingToken.ThrowIfCancellationRequested(); - - try - { - var processed = await _processor - .ProcessAsync(lease.Message.Event, _workerId, stoppingToken) - .ConfigureAwait(false); - - await lease.AcknowledgeAsync(stoppingToken).ConfigureAwait(false); - - _logger.LogInformation( - "Processed event {EventId} for tenant {TenantId}; created {DeliveryCount} deliveries.", - lease.Message.Event.EventId, - lease.Message.TenantId, - processed); - } - catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) - { - await SafeReleaseAsync(lease, NotifyQueueReleaseDisposition.Retry).ConfigureAwait(false); - throw; - } - catch (Exception ex) - { - _logger.LogError( - ex, - "Failed processing event {EventId} (tenant {TenantId}); scheduling retry.", - lease.Message.Event.EventId, - lease.Message.TenantId); - - await SafeReleaseAsync(lease, NotifyQueueReleaseDisposition.Retry).ConfigureAwait(false); - } - } - } - catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) - { - break; - } - catch (Exception ex) - { - _logger.LogError(ex, "Unhandled exception within notifier event worker loop."); - await Task.Delay(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false); - } - } - - _logger.LogInformation("Notifier event worker {WorkerId} stopping.", _workerId); - } - - private NotifyQueueLeaseRequest BuildLeaseRequest() - { - var batchSize = Math.Max(1, _options.LeaseBatchSize); - var leaseDuration = _options.LeaseDuration > TimeSpan.Zero - ? _options.LeaseDuration - : TimeSpan.FromSeconds(60); - - return new NotifyQueueLeaseRequest(_workerId, batchSize, leaseDuration); - } - - private static async Task SafeReleaseAsync( - INotifyQueueLease<NotifyQueueEventMessage> lease, - NotifyQueueReleaseDisposition disposition) - { - try - { - await lease.ReleaseAsync(disposition, CancellationToken.None).ConfigureAwait(false); - } - catch - { - // Suppress release errors during shutdown/cleanup. - } - } -} +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Notify.Queue; +using StellaOps.Notifier.Worker.Options; + +namespace StellaOps.Notifier.Worker.Processing; + +internal sealed class NotifierEventWorker : BackgroundService +{ + private readonly INotifyEventQueue _queue; + private readonly NotifierEventProcessor _processor; + private readonly NotifierWorkerOptions _options; + private readonly ILogger<NotifierEventWorker> _logger; + private readonly string _workerId; + private readonly TimeProvider _timeProvider; + + public NotifierEventWorker( + INotifyEventQueue queue, + NotifierEventProcessor processor, + IOptions<NotifierWorkerOptions> options, + TimeProvider timeProvider, + ILogger<NotifierEventWorker> logger) + { + _queue = queue ?? throw new ArgumentNullException(nameof(queue)); + _processor = processor ?? throw new ArgumentNullException(nameof(processor)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _workerId = $"notifier-worker-{Environment.MachineName}-{Guid.NewGuid():N}"; + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + _logger.LogInformation("Notifier event worker {WorkerId} started.", _workerId); + + while (!stoppingToken.IsCancellationRequested) + { + try + { + var leases = await _queue.LeaseAsync(BuildLeaseRequest(), stoppingToken).ConfigureAwait(false); + if (leases.Count == 0) + { + await Task.Delay(TimeSpan.FromSeconds(1), stoppingToken).ConfigureAwait(false); + continue; + } + + foreach (var lease in leases) + { + stoppingToken.ThrowIfCancellationRequested(); + + try + { + var processed = await _processor + .ProcessAsync(lease.Message.Event, _workerId, stoppingToken) + .ConfigureAwait(false); + + await lease.AcknowledgeAsync(stoppingToken).ConfigureAwait(false); + + _logger.LogInformation( + "Processed event {EventId} for tenant {TenantId}; created {DeliveryCount} deliveries.", + lease.Message.Event.EventId, + lease.Message.TenantId, + processed); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + await SafeReleaseAsync(lease, NotifyQueueReleaseDisposition.Retry).ConfigureAwait(false); + throw; + } + catch (Exception ex) + { + _logger.LogError( + ex, + "Failed processing event {EventId} (tenant {TenantId}); scheduling retry.", + lease.Message.Event.EventId, + lease.Message.TenantId); + + await SafeReleaseAsync(lease, NotifyQueueReleaseDisposition.Retry).ConfigureAwait(false); + } + } + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Unhandled exception within notifier event worker loop."); + await Task.Delay(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false); + } + } + + _logger.LogInformation("Notifier event worker {WorkerId} stopping.", _workerId); + } + + private NotifyQueueLeaseRequest BuildLeaseRequest() + { + var batchSize = Math.Max(1, _options.LeaseBatchSize); + var leaseDuration = _options.LeaseDuration > TimeSpan.Zero + ? _options.LeaseDuration + : TimeSpan.FromSeconds(60); + + return new NotifyQueueLeaseRequest(_workerId, batchSize, leaseDuration); + } + + private static async Task SafeReleaseAsync( + INotifyQueueLease<NotifyQueueEventMessage> lease, + NotifyQueueReleaseDisposition disposition) + { + try + { + await lease.ReleaseAsync(disposition, CancellationToken.None).ConfigureAwait(false); + } + catch + { + // Suppress release errors during shutdown/cleanup. + } + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs index 82aad513..13eb4ccf 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Program.cs @@ -1,38 +1,38 @@ -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Queue; -using StellaOps.Notify.Storage.Mongo; -using StellaOps.Notifier.Worker.Options; -using StellaOps.Notifier.Worker.Processing; - -var builder = Host.CreateApplicationBuilder(args); - -builder.Configuration - .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables(prefix: "NOTIFIER_"); - -builder.Logging.ClearProviders(); -builder.Logging.AddSimpleConsole(options => -{ - options.TimestampFormat = "yyyy-MM-ddTHH:mm:ss.fffZ "; - options.UseUtcTimestamp = true; -}); - -builder.Services.Configure<NotifierWorkerOptions>(builder.Configuration.GetSection("notifier:worker")); -builder.Services.AddSingleton(TimeProvider.System); - -var mongoSection = builder.Configuration.GetSection("notifier:storage:mongo"); -builder.Services.AddNotifyMongoStorage(mongoSection); - -builder.Services.AddNotifyEventQueue(builder.Configuration, "notifier:queue"); -builder.Services.AddHealthChecks().AddNotifyQueueHealthCheck(); - -builder.Services.AddSingleton<INotifyRuleEvaluator, DefaultNotifyRuleEvaluator>(); -builder.Services.AddSingleton<NotifierEventProcessor>(); -builder.Services.AddHostedService<MongoInitializationHostedService>(); -builder.Services.AddHostedService<NotifierEventWorker>(); - -await builder.Build().RunAsync().ConfigureAwait(false); +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Queue; +using StellaOps.Notify.Storage.Mongo; +using StellaOps.Notifier.Worker.Options; +using StellaOps.Notifier.Worker.Processing; + +var builder = Host.CreateApplicationBuilder(args); + +builder.Configuration + .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables(prefix: "NOTIFIER_"); + +builder.Logging.ClearProviders(); +builder.Logging.AddSimpleConsole(options => +{ + options.TimestampFormat = "yyyy-MM-ddTHH:mm:ss.fffZ "; + options.UseUtcTimestamp = true; +}); + +builder.Services.Configure<NotifierWorkerOptions>(builder.Configuration.GetSection("notifier:worker")); +builder.Services.AddSingleton(TimeProvider.System); + +var mongoSection = builder.Configuration.GetSection("notifier:storage:mongo"); +builder.Services.AddNotifyMongoStorage(mongoSection); + +builder.Services.AddNotifyEventQueue(builder.Configuration, "notifier:queue"); +builder.Services.AddHealthChecks().AddNotifyQueueHealthCheck(); + +builder.Services.AddSingleton<INotifyRuleEvaluator, DefaultNotifyRuleEvaluator>(); +builder.Services.AddSingleton<NotifierEventProcessor>(); +builder.Services.AddHostedService<MongoInitializationHostedService>(); +builder.Services.AddHostedService<NotifierEventWorker>(); + +await builder.Build().RunAsync().ConfigureAwait(false); diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/AssemblyInfo.cs b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/AssemblyInfo.cs rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/AssemblyInfo.cs index f008e761..1c88759a 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/AssemblyInfo.cs +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Notifier.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Notifier.Tests")] diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/launchSettings.json b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/launchSettings.json similarity index 95% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/launchSettings.json rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/launchSettings.json index 66be97eb..77f39360 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/launchSettings.json +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/Properties/launchSettings.json @@ -1,12 +1,12 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "StellaOps.Notifier.Worker": { - "commandName": "Project", - "dotnetRunMessages": true, - "environmentVariables": { - "DOTNET_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "StellaOps.Notifier.Worker": { + "commandName": "Project", + "dotnetRunMessages": true, + "environmentVariables": { + "DOTNET_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj similarity index 59% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj index 5ebaffda..f7e181cb 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/StellaOps.Notifier.Worker.csproj @@ -1,4 +1,4 @@ -<?xml version="1.0" encoding="utf-8"?> +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk.Worker"> <PropertyGroup> <UserSecretsId>dotnet-StellaOps.Notifier.Worker-557c5516-a796-4499-942e-a0668e3e9622</UserSecretsId> @@ -16,9 +16,9 @@ </ItemGroup> <ItemGroup> - <ProjectReference Include="..\..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> - <ProjectReference Include="..\..\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj" /> - <ProjectReference Include="..\..\StellaOps.Notify.Storage.Mongo\StellaOps.Notify.Storage.Mongo.csproj" /> - <ProjectReference Include="..\..\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.Development.json b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.Development.json similarity index 94% rename from src/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.Development.json rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.Development.json index b2dcdb67..69017646 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.Development.json +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.Development.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.json similarity index 94% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.json index b2dcdb67..69017646 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.Worker/appsettings.json +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.Worker/appsettings.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.sln b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.sln similarity index 98% rename from src/StellaOps.Notifier/StellaOps.Notifier.sln rename to src/Notifier/StellaOps.Notifier/StellaOps.Notifier.sln index 56be1594..0cb7c239 100644 --- a/src/StellaOps.Notifier/StellaOps.Notifier.sln +++ b/src/Notifier/StellaOps.Notifier/StellaOps.Notifier.sln @@ -1,62 +1,62 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notifier.WebService", "StellaOps.Notifier.WebService\StellaOps.Notifier.WebService.csproj", "{D14281B8-BC8E-4D31-B1FC-E3C9565F7482}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notifier.Worker", "StellaOps.Notifier.Worker\StellaOps.Notifier.Worker.csproj", "{A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notifier.Tests", "StellaOps.Notifier.Tests\StellaOps.Notifier.Tests.csproj", "{1DFEC971-61F4-4E63-A903-C04062C84967}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|x64.ActiveCfg = Debug|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|x64.Build.0 = Debug|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|x86.ActiveCfg = Debug|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|x86.Build.0 = Debug|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|Any CPU.Build.0 = Release|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|x64.ActiveCfg = Release|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|x64.Build.0 = Release|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|x86.ActiveCfg = Release|Any CPU - {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|x86.Build.0 = Release|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|x64.ActiveCfg = Debug|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|x64.Build.0 = Debug|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|x86.ActiveCfg = Debug|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|x86.Build.0 = Debug|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|Any CPU.Build.0 = Release|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|x64.ActiveCfg = Release|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|x64.Build.0 = Release|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|x86.ActiveCfg = Release|Any CPU - {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|x86.Build.0 = Release|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|x64.ActiveCfg = Debug|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|x64.Build.0 = Debug|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|x86.ActiveCfg = Debug|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|x86.Build.0 = Debug|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|Any CPU.Build.0 = Release|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|x64.ActiveCfg = Release|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|x64.Build.0 = Release|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|x86.ActiveCfg = Release|Any CPU - {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notifier.WebService", "StellaOps.Notifier.WebService\StellaOps.Notifier.WebService.csproj", "{D14281B8-BC8E-4D31-B1FC-E3C9565F7482}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notifier.Worker", "StellaOps.Notifier.Worker\StellaOps.Notifier.Worker.csproj", "{A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notifier.Tests", "StellaOps.Notifier.Tests\StellaOps.Notifier.Tests.csproj", "{1DFEC971-61F4-4E63-A903-C04062C84967}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|x64.ActiveCfg = Debug|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|x64.Build.0 = Debug|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|x86.ActiveCfg = Debug|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Debug|x86.Build.0 = Debug|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|Any CPU.Build.0 = Release|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|x64.ActiveCfg = Release|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|x64.Build.0 = Release|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|x86.ActiveCfg = Release|Any CPU + {D14281B8-BC8E-4D31-B1FC-E3C9565F7482}.Release|x86.Build.0 = Release|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|x64.ActiveCfg = Debug|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|x64.Build.0 = Debug|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|x86.ActiveCfg = Debug|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Debug|x86.Build.0 = Debug|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|Any CPU.Build.0 = Release|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|x64.ActiveCfg = Release|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|x64.Build.0 = Release|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|x86.ActiveCfg = Release|Any CPU + {A134A9AE-CC9E-4AC7-8CD7-8C7BBF45CD02}.Release|x86.Build.0 = Release|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|x64.ActiveCfg = Debug|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|x64.Build.0 = Debug|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|x86.ActiveCfg = Debug|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Debug|x86.Build.0 = Debug|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|Any CPU.Build.0 = Release|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|x64.ActiveCfg = Release|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|x64.Build.0 = Release|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|x86.ActiveCfg = Release|Any CPU + {1DFEC971-61F4-4E63-A903-C04062C84967}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Notifier/TASKS.md b/src/Notifier/StellaOps.Notifier/TASKS.md similarity index 99% rename from src/StellaOps.Notifier/TASKS.md rename to src/Notifier/StellaOps.Notifier/TASKS.md index 1da98802..d62a55bb 100644 --- a/src/StellaOps.Notifier/TASKS.md +++ b/src/Notifier/StellaOps.Notifier/TASKS.md @@ -1,74 +1,74 @@ -# Notifier Service Task Board — Epic 11: Notifications Studio - -# Sprint 37 – Pack Approval Bridge (Task Runner integration) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-SVC-37-001 | TODO | Notifications Service Guild | TASKRUN-43-001 | Define pack approval & policy notification contract, including OpenAPI schema, event payloads, resume token mechanics, and security guidance. | Requirements doc published (`docs/notifications/pack-approvals-integration.md`), OpenAPI fragment merged, reviewers sign off from Task Runner & Authority guilds. | -| NOTIFY-SVC-37-002 | TODO | Notifications Service Guild | NOTIFY-SVC-37-001 | Implement secure ingestion endpoint, Mongo persistence (`pack_approvals`), idempotent writes, and audit trail for approval events. | Endpoint authenticated/authorized, persistence migrations merged, integration tests cover happy/error paths, audit log samples recorded. | -| NOTIFY-SVC-37-003 | TODO | Notifications Service Guild | NOTIFY-SVC-37-001 | Deliver approval/policy templates, routing predicates, and channel dispatch (email + webhook) with localization + redaction. | Templates rendered, routing rules active, localization fallback tested, sample notifications archived. | -| NOTIFY-SVC-37-004 | TODO | Notifications Service Guild | NOTIFY-SVC-37-002 | Provide acknowledgement API, Task Runner callback client, metrics for outstanding approvals, and runbook updates. | Ack endpoint live, resume callback validated with Task Runner simulator, metrics/dashboards in place, runbook entry updated. | - -## Sprint 38 – Foundations (Immediate notifications) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-SVC-38-001 | DONE (2025-10-29) | Notifications Service Guild | ORCH-SVC-38-101, AUTH-NOTIFY-38-001 | Bootstrap notifier service, DB migrations (`notif_*` tables), event ingestion consumer with idempotency, and baseline rule/routing engine for policy violations + job failures. | Service builds/tests; migrations scripted; ingestion handles orchestrator events; initial rules evaluated deterministically; compliance checklist recorded. | -> 2025-10-29: Worker/WebService now compose `StellaOps.Notify.Storage.Mongo` + `StellaOps.Notify.Queue`, with a default rule evaluator and idempotent delivery ledger. See `docs/NOTIFY-SVC-38-001-FOUNDATIONS.md` for implementation notes and follow-ups. -| NOTIFY-SVC-38-002 | TODO | Notifications Service Guild | NOTIFY-SVC-38-001 | Implement channel adapters (email, chat webhook, generic webhook) with retry policies, health checks, and audit logging. | Adapters send test notifications; retries/backoff validated; health endpoints available; audit logs captured. | -| NOTIFY-SVC-38-003 | TODO | Notifications Service Guild | NOTIFY-SVC-38-001 | Deliver template service (versioned templates, localization scaffolding) and renderer with redaction allowlists, Markdown/HTML/JSON outputs, and provenance links. | Templates versioned; preview API works; rendered content includes provenance; redaction tests pass. | -| NOTIFY-SVC-38-004 | TODO | Notifications Service Guild | NOTIFY-SVC-38-001..003 | Expose REST + WS APIs (rules CRUD, templates preview, incidents list, ack) with audit logging, RBAC checks, and live feed stream. | OpenAPI published; WS feed delivers events; ack endpoint updates state; tests cover RBAC and audit logs. | - -## Sprint 39 – Correlation, Digests, Simulation -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-SVC-39-001 | TODO | Notifications Service Guild | NOTIFY-SVC-38-004 | Implement correlation engine with pluggable key expressions/windows, throttler (token buckets), quiet hours/maintenance evaluator, and incident lifecycle. | Correlation merges duplicates; throttling enforced; quiet hours respect tenant schedules; incident state transitions tested. | -| NOTIFY-SVC-39-002 | TODO | Notifications Service Guild | NOTIFY-SVC-39-001, LEDGER-NOTIFY-39-001 | Build digest generator (queries, formatting) with schedule runner and distribution via existing channels. | Digests generated on schedule; content accurate; provenance linked; metrics emitted. | -| NOTIFY-SVC-39-003 | TODO | Notifications Service Guild | NOTIFY-SVC-39-001 | Provide simulation engine/API to dry-run rules against historical events, returning matched actions with explanations. | Simulation endpoint returns deterministic results; explanation includes rule/field matches; integration tests pass. | -| NOTIFY-SVC-39-004 | TODO | Notifications Service Guild | NOTIFY-SVC-39-001 | Integrate quiet hour calendars and default throttles with audit logging and operator overrides. | Quiet schedules stored; overrides audited; preview API shows suppression windows; tests cover timezone handling. | - -## Sprint 40 – Escalations, Localization, Hardening -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-SVC-40-001 | TODO | Notifications Service Guild | NOTIFY-SVC-39-001 | Implement escalations + on-call schedules, ack bridge, PagerDuty/OpsGenie adapters, and CLI/in-app inbox channels. | Escalation workflow operational; ack tokens flow; external adapters tested; inbox channel live. | -| NOTIFY-SVC-40-002 | TODO | Notifications Service Guild | NOTIFY-SVC-39-002 | Add summary storm breaker notifications, localization bundles, and localization fallback handling. | Storm breaker emits summaries; localization catalogs loaded; fallback behavior tested. | -| NOTIFY-SVC-40-003 | TODO | Notifications Service Guild | NOTIFY-SVC-38-004 | Harden security: signed ack links (KMS), webhook HMAC/IP allowlists, tenant isolation fuzz tests, HTML sanitization. | Ack tokens verified; webhook security enforced; fuzz tests green; sanitization validated. | -| NOTIFY-SVC-40-004 | TODO | Notifications Service Guild | NOTIFY-SVC-40-001..003 | Finalize observability (metrics/traces for escalations, latency), dead-letter handling, chaos tests for channel outages, and retention policies. | Metrics dashboards live; chaos run documented; DLQ drains; retention job operational. | - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-TEN-48-001 | TODO | Notifications Service Guild | WEB-TEN-48-001 | Tenant-scope rules/templates/incidents, RLS on storage, tenant-prefixed channels, and inclusion of tenant context in notifications. | Notifications isolated per tenant; RLS enabled; tests cover cross-tenant leakage. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-OBS-51-001 | TODO | Notifications Service Guild, Observability Guild | DEVOPS-OBS-51-001, WEB-OBS-51-001 | Integrate SLO evaluator webhooks into Notifier rules (burn-rate breaches, health degradations) with templates, routing, and suppression logic. Provide sample policies and ensure imposed rule propagation. | Webhooks ingested; notifications delivered across channels; suppression guardrails tested; docs updated. | -| NOTIFY-OBS-55-001 | TODO | Notifications Service Guild, Ops Guild | DEVOPS-OBS-55-001, WEB-OBS-55-001 | Publish incident mode start/stop notifications with trace/evidence quick links, retention notes, and automatic escalation paths. Include quiet-hour overrides + legal compliance logging. | Incident notifications triggered in staging; CLI/Console deep links validated; audit logs capture scope usage. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-AIRGAP-56-001 | TODO | Notifications Service Guild | AIRGAP-CTL-56-002, AIRGAP-POL-56-001 | Disable external webhook targets in sealed mode, default to enclave-safe channels (SMTP relay, syslog, file sink), and surface remediation guidance. | Sealed mode blocks external channels; configuration validation raises errors; tests cover allowances. | -| NOTIFY-AIRGAP-56-002 | TODO | Notifications Service Guild, DevOps Guild | NOTIFY-AIRGAP-56-001, DEVOPS-AIRGAP-56-001 | Provide local notifier configurations bundled within Bootstrap Pack with deterministic secrets handling. | Offline config templates published; bootstrap script validated; docs updated. | -| NOTIFY-AIRGAP-57-001 | TODO | Notifications Service Guild, AirGap Time Guild | NOTIFY-AIRGAP-56-001, AIRGAP-TIME-58-001 | Send staleness drift and bundle import notifications with remediation steps. | Notifications emitted on thresholds; tests cover suppression/resend. | -| NOTIFY-AIRGAP-58-001 | TODO | Notifications Service Guild, Evidence Locker Guild | NOTIFY-AIRGAP-56-001, EVID-OBS-54-002 | Add portable evidence export completion notifications including checksum + location metadata. | Notification payload includes bundle details; audit logs recorded; CLI integration validated. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-OAS-61-001 | TODO | Notifications Service Guild, API Contracts Guild | OAS-61-001 | Update notifier OAS with rules, templates, incidents, quiet hours endpoints using standard error envelope and examples. | Spec covers notifier APIs; lint passes; examples validated. | -| NOTIFY-OAS-61-002 | TODO | Notifications Service Guild | NOTIFY-OAS-61-001 | Implement `/.well-known/openapi` discovery endpoint with scope metadata. | Discovery endpoint live; contract tests cover response. | -| NOTIFY-OAS-62-001 | TODO | Notifications Service Guild, SDK Generator Guild | NOTIFY-OAS-61-001, SDKGEN-63-001 | Provide SDK usage examples for rule CRUD, incident ack, and quiet hours; ensure SDK smoke tests. | SDK tests cover notifier flows; docs embed snippets. | -| NOTIFY-OAS-63-001 | TODO | Notifications Service Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and Notifications templates for retiring notifier APIs. | Headers + notifications verified; documentation updated. | - -## Risk Profiles (Epic 18) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-RISK-66-001 | TODO | Notifications Service Guild, Risk Engine Guild | RISK-ENGINE-68-001 | Add notification triggers for risk severity escalation/downgrade events with profile metadata in payload. | Trigger processed in staging; payload shows profile and explainability link; docs updated. | -| NOTIFY-RISK-67-001 | TODO | Notifications Service Guild, Policy Guild | POLICY-RISK-67-002 | Notify stakeholders when risk profiles are published, deprecated, or thresholds change. | Notifications delivered via email/chat; audit logs captured. | -| NOTIFY-RISK-68-001 | TODO | Notifications Service Guild | NOTIFY-RISK-66-001 | Support per-profile routing rules, quiet hours, and dedupe for risk alerts; integrate with CLI/Console preferences. | Routing/quiet-hour logic tested; UI exposes settings; metrics reflect dedupe. | - -## Attestor Console (Epic 19) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| NOTIFY-ATTEST-74-001 | TODO | Notifications Service Guild, Attestor Service Guild | ATTESTOR-73-002 | Create notification templates for verification failures, expiring attestations, key revocations, and transparency anomalies. | Templates deployed; staging verification failure triggers alert; documentation updated. | -| NOTIFY-ATTEST-74-002 | TODO | Notifications Service Guild, KMS Guild | KMS-73-001 | Wire notifications to key rotation/revocation events and transparency witness failures. | Rotation/revocation emits alerts; audit logs recorded; tests cover scenarios. | +# Notifier Service Task Board — Epic 11: Notifications Studio + +# Sprint 37 – Pack Approval Bridge (Task Runner integration) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-SVC-37-001 | TODO | Notifications Service Guild | TASKRUN-43-001 | Define pack approval & policy notification contract, including OpenAPI schema, event payloads, resume token mechanics, and security guidance. | Requirements doc published (`docs/notifications/pack-approvals-integration.md`), OpenAPI fragment merged, reviewers sign off from Task Runner & Authority guilds. | +| NOTIFY-SVC-37-002 | TODO | Notifications Service Guild | NOTIFY-SVC-37-001 | Implement secure ingestion endpoint, Mongo persistence (`pack_approvals`), idempotent writes, and audit trail for approval events. | Endpoint authenticated/authorized, persistence migrations merged, integration tests cover happy/error paths, audit log samples recorded. | +| NOTIFY-SVC-37-003 | TODO | Notifications Service Guild | NOTIFY-SVC-37-001 | Deliver approval/policy templates, routing predicates, and channel dispatch (email + webhook) with localization + redaction. | Templates rendered, routing rules active, localization fallback tested, sample notifications archived. | +| NOTIFY-SVC-37-004 | TODO | Notifications Service Guild | NOTIFY-SVC-37-002 | Provide acknowledgement API, Task Runner callback client, metrics for outstanding approvals, and runbook updates. | Ack endpoint live, resume callback validated with Task Runner simulator, metrics/dashboards in place, runbook entry updated. | + +## Sprint 38 – Foundations (Immediate notifications) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-SVC-38-001 | DONE (2025-10-29) | Notifications Service Guild | ORCH-SVC-38-101, AUTH-NOTIFY-38-001 | Bootstrap notifier service, DB migrations (`notif_*` tables), event ingestion consumer with idempotency, and baseline rule/routing engine for policy violations + job failures. | Service builds/tests; migrations scripted; ingestion handles orchestrator events; initial rules evaluated deterministically; compliance checklist recorded. | +> 2025-10-29: Worker/WebService now compose `StellaOps.Notify.Storage.Mongo` + `StellaOps.Notify.Queue`, with a default rule evaluator and idempotent delivery ledger. See `docs/NOTIFY-SVC-38-001-FOUNDATIONS.md` for implementation notes and follow-ups. +| NOTIFY-SVC-38-002 | TODO | Notifications Service Guild | NOTIFY-SVC-38-001 | Implement channel adapters (email, chat webhook, generic webhook) with retry policies, health checks, and audit logging. | Adapters send test notifications; retries/backoff validated; health endpoints available; audit logs captured. | +| NOTIFY-SVC-38-003 | TODO | Notifications Service Guild | NOTIFY-SVC-38-001 | Deliver template service (versioned templates, localization scaffolding) and renderer with redaction allowlists, Markdown/HTML/JSON outputs, and provenance links. | Templates versioned; preview API works; rendered content includes provenance; redaction tests pass. | +| NOTIFY-SVC-38-004 | TODO | Notifications Service Guild | NOTIFY-SVC-38-001..003 | Expose REST + WS APIs (rules CRUD, templates preview, incidents list, ack) with audit logging, RBAC checks, and live feed stream. | OpenAPI published; WS feed delivers events; ack endpoint updates state; tests cover RBAC and audit logs. | + +## Sprint 39 – Correlation, Digests, Simulation +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-SVC-39-001 | TODO | Notifications Service Guild | NOTIFY-SVC-38-004 | Implement correlation engine with pluggable key expressions/windows, throttler (token buckets), quiet hours/maintenance evaluator, and incident lifecycle. | Correlation merges duplicates; throttling enforced; quiet hours respect tenant schedules; incident state transitions tested. | +| NOTIFY-SVC-39-002 | TODO | Notifications Service Guild | NOTIFY-SVC-39-001, LEDGER-NOTIFY-39-001 | Build digest generator (queries, formatting) with schedule runner and distribution via existing channels. | Digests generated on schedule; content accurate; provenance linked; metrics emitted. | +| NOTIFY-SVC-39-003 | TODO | Notifications Service Guild | NOTIFY-SVC-39-001 | Provide simulation engine/API to dry-run rules against historical events, returning matched actions with explanations. | Simulation endpoint returns deterministic results; explanation includes rule/field matches; integration tests pass. | +| NOTIFY-SVC-39-004 | TODO | Notifications Service Guild | NOTIFY-SVC-39-001 | Integrate quiet hour calendars and default throttles with audit logging and operator overrides. | Quiet schedules stored; overrides audited; preview API shows suppression windows; tests cover timezone handling. | + +## Sprint 40 – Escalations, Localization, Hardening +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-SVC-40-001 | TODO | Notifications Service Guild | NOTIFY-SVC-39-001 | Implement escalations + on-call schedules, ack bridge, PagerDuty/OpsGenie adapters, and CLI/in-app inbox channels. | Escalation workflow operational; ack tokens flow; external adapters tested; inbox channel live. | +| NOTIFY-SVC-40-002 | TODO | Notifications Service Guild | NOTIFY-SVC-39-002 | Add summary storm breaker notifications, localization bundles, and localization fallback handling. | Storm breaker emits summaries; localization catalogs loaded; fallback behavior tested. | +| NOTIFY-SVC-40-003 | TODO | Notifications Service Guild | NOTIFY-SVC-38-004 | Harden security: signed ack links (KMS), webhook HMAC/IP allowlists, tenant isolation fuzz tests, HTML sanitization. | Ack tokens verified; webhook security enforced; fuzz tests green; sanitization validated. | +| NOTIFY-SVC-40-004 | TODO | Notifications Service Guild | NOTIFY-SVC-40-001..003 | Finalize observability (metrics/traces for escalations, latency), dead-letter handling, chaos tests for channel outages, and retention policies. | Metrics dashboards live; chaos run documented; DLQ drains; retention job operational. | + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-TEN-48-001 | TODO | Notifications Service Guild | WEB-TEN-48-001 | Tenant-scope rules/templates/incidents, RLS on storage, tenant-prefixed channels, and inclusion of tenant context in notifications. | Notifications isolated per tenant; RLS enabled; tests cover cross-tenant leakage. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-OBS-51-001 | TODO | Notifications Service Guild, Observability Guild | DEVOPS-OBS-51-001, WEB-OBS-51-001 | Integrate SLO evaluator webhooks into Notifier rules (burn-rate breaches, health degradations) with templates, routing, and suppression logic. Provide sample policies and ensure imposed rule propagation. | Webhooks ingested; notifications delivered across channels; suppression guardrails tested; docs updated. | +| NOTIFY-OBS-55-001 | TODO | Notifications Service Guild, Ops Guild | DEVOPS-OBS-55-001, WEB-OBS-55-001 | Publish incident mode start/stop notifications with trace/evidence quick links, retention notes, and automatic escalation paths. Include quiet-hour overrides + legal compliance logging. | Incident notifications triggered in staging; CLI/Console deep links validated; audit logs capture scope usage. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-AIRGAP-56-001 | TODO | Notifications Service Guild | AIRGAP-CTL-56-002, AIRGAP-POL-56-001 | Disable external webhook targets in sealed mode, default to enclave-safe channels (SMTP relay, syslog, file sink), and surface remediation guidance. | Sealed mode blocks external channels; configuration validation raises errors; tests cover allowances. | +| NOTIFY-AIRGAP-56-002 | TODO | Notifications Service Guild, DevOps Guild | NOTIFY-AIRGAP-56-001, DEVOPS-AIRGAP-56-001 | Provide local notifier configurations bundled within Bootstrap Pack with deterministic secrets handling. | Offline config templates published; bootstrap script validated; docs updated. | +| NOTIFY-AIRGAP-57-001 | TODO | Notifications Service Guild, AirGap Time Guild | NOTIFY-AIRGAP-56-001, AIRGAP-TIME-58-001 | Send staleness drift and bundle import notifications with remediation steps. | Notifications emitted on thresholds; tests cover suppression/resend. | +| NOTIFY-AIRGAP-58-001 | TODO | Notifications Service Guild, Evidence Locker Guild | NOTIFY-AIRGAP-56-001, EVID-OBS-54-002 | Add portable evidence export completion notifications including checksum + location metadata. | Notification payload includes bundle details; audit logs recorded; CLI integration validated. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-OAS-61-001 | TODO | Notifications Service Guild, API Contracts Guild | OAS-61-001 | Update notifier OAS with rules, templates, incidents, quiet hours endpoints using standard error envelope and examples. | Spec covers notifier APIs; lint passes; examples validated. | +| NOTIFY-OAS-61-002 | TODO | Notifications Service Guild | NOTIFY-OAS-61-001 | Implement `/.well-known/openapi` discovery endpoint with scope metadata. | Discovery endpoint live; contract tests cover response. | +| NOTIFY-OAS-62-001 | TODO | Notifications Service Guild, SDK Generator Guild | NOTIFY-OAS-61-001, SDKGEN-63-001 | Provide SDK usage examples for rule CRUD, incident ack, and quiet hours; ensure SDK smoke tests. | SDK tests cover notifier flows; docs embed snippets. | +| NOTIFY-OAS-63-001 | TODO | Notifications Service Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and Notifications templates for retiring notifier APIs. | Headers + notifications verified; documentation updated. | + +## Risk Profiles (Epic 18) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-RISK-66-001 | TODO | Notifications Service Guild, Risk Engine Guild | RISK-ENGINE-68-001 | Add notification triggers for risk severity escalation/downgrade events with profile metadata in payload. | Trigger processed in staging; payload shows profile and explainability link; docs updated. | +| NOTIFY-RISK-67-001 | TODO | Notifications Service Guild, Policy Guild | POLICY-RISK-67-002 | Notify stakeholders when risk profiles are published, deprecated, or thresholds change. | Notifications delivered via email/chat; audit logs captured. | +| NOTIFY-RISK-68-001 | TODO | Notifications Service Guild | NOTIFY-RISK-66-001 | Support per-profile routing rules, quiet hours, and dedupe for risk alerts; integrate with CLI/Console preferences. | Routing/quiet-hour logic tested; UI exposes settings; metrics reflect dedupe. | + +## Attestor Console (Epic 19) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| NOTIFY-ATTEST-74-001 | TODO | Notifications Service Guild, Attestor Service Guild | ATTESTOR-73-002 | Create notification templates for verification failures, expiring attestations, key revocations, and transparency anomalies. | Templates deployed; staging verification failure triggers alert; documentation updated. | +| NOTIFY-ATTEST-74-002 | TODO | Notifications Service Guild, KMS Guild | KMS-73-001 | Wire notifications to key rotation/revocation events and transparency witness failures. | Rotation/revocation emits alerts; audit logs recorded; tests cover scenarios. | diff --git a/src/StellaOps.Notifier/docs/NOTIFY-SVC-38-001-FOUNDATIONS.md b/src/Notifier/StellaOps.Notifier/docs/NOTIFY-SVC-38-001-FOUNDATIONS.md similarity index 99% rename from src/StellaOps.Notifier/docs/NOTIFY-SVC-38-001-FOUNDATIONS.md rename to src/Notifier/StellaOps.Notifier/docs/NOTIFY-SVC-38-001-FOUNDATIONS.md index 427861f8..f3dc8136 100644 --- a/src/StellaOps.Notifier/docs/NOTIFY-SVC-38-001-FOUNDATIONS.md +++ b/src/Notifier/StellaOps.Notifier/docs/NOTIFY-SVC-38-001-FOUNDATIONS.md @@ -1,23 +1,23 @@ -# NOTIFY-SVC-38-001 — Notifier Foundations - -> **Status:** Implemented 2025-10-29 - -This note captures the bootstrap work for Notifications Studio phase 1. The refreshed `StellaOps.Notifier` solution now composes the shared Notify building blocks (models, storage, queue) into a runnable worker/web service capable of ingesting policy events, evaluating rules, and persisting delivery intents deterministically. - -## Highlights - -- **Rule evaluation:** Implemented `DefaultNotifyRuleEvaluator` (implements `StellaOps.Notify.Engine.INotifyRuleEvaluator`) reusing canonical `NotifyRule`/`NotifyEvent` models to gate on event kind, severity, labels, digests, verdicts, and VEX settings. -- **Storage:** Switched to `StellaOps.Notify.Storage.Mongo` (rules, deliveries, locks, migrations) with startup reflection host to apply migrations automatically. -- **Idempotency:** Deterministic keys derived from tenant/rule/action/event digest & GUID and persisted via `INotifyLockRepository` TTL locks; delivery metadata now records channel/template hints for later status transitions. -- **Queue:** Replaced the temporary in-memory queue with the shared `StellaOps.Notify.Queue` transport (Redis/NATS capable). Health checks surface queue reachability. -- **Worker/WebService:** Worker hosts `NotifierEventWorker` + `NotifierEventProcessor`, wiring queue -> rule evaluation -> Mongo delivery ledger. WebService now bootstraps storage + health endpoint ready for future CRUD. -- **Tests:** Updated unit coverage for rule evaluation + processor idempotency using in-memory repositories & queue stubs. -- **WebService shell:** Minimal ASP.NET host wired with infrastructure and health endpoint ready for upcoming CRUD/API work. -- **Tests:** Added unit coverage for rule matching and processor idempotency. - -## Follow-ups - -- Validate queue transport settings against ORCH-SVC-38-101 once the orchestrator contract finalizes (configure Redis/NATS URIs + credentials). -- Flesh out delivery ledger schema (status transitions, attempts) and connector integrations when channels/templates land (NOTIFY-SVC-38-002..004). -- Wire telemetry counters/histograms and structured logging to feed Observability tasks. -- Expand tests with integration harness using Mongo2Go + real queue transports after connectors exist; revisit delivery idempotency assertions once `INotifyLockRepository` semantics are wired to production stores. +# NOTIFY-SVC-38-001 — Notifier Foundations + +> **Status:** Implemented 2025-10-29 + +This note captures the bootstrap work for Notifications Studio phase 1. The refreshed `StellaOps.Notifier` solution now composes the shared Notify building blocks (models, storage, queue) into a runnable worker/web service capable of ingesting policy events, evaluating rules, and persisting delivery intents deterministically. + +## Highlights + +- **Rule evaluation:** Implemented `DefaultNotifyRuleEvaluator` (implements `StellaOps.Notify.Engine.INotifyRuleEvaluator`) reusing canonical `NotifyRule`/`NotifyEvent` models to gate on event kind, severity, labels, digests, verdicts, and VEX settings. +- **Storage:** Switched to `StellaOps.Notify.Storage.Mongo` (rules, deliveries, locks, migrations) with startup reflection host to apply migrations automatically. +- **Idempotency:** Deterministic keys derived from tenant/rule/action/event digest & GUID and persisted via `INotifyLockRepository` TTL locks; delivery metadata now records channel/template hints for later status transitions. +- **Queue:** Replaced the temporary in-memory queue with the shared `StellaOps.Notify.Queue` transport (Redis/NATS capable). Health checks surface queue reachability. +- **Worker/WebService:** Worker hosts `NotifierEventWorker` + `NotifierEventProcessor`, wiring queue -> rule evaluation -> Mongo delivery ledger. WebService now bootstraps storage + health endpoint ready for future CRUD. +- **Tests:** Updated unit coverage for rule evaluation + processor idempotency using in-memory repositories & queue stubs. +- **WebService shell:** Minimal ASP.NET host wired with infrastructure and health endpoint ready for upcoming CRUD/API work. +- **Tests:** Added unit coverage for rule matching and processor idempotency. + +## Follow-ups + +- Validate queue transport settings against ORCH-SVC-38-101 once the orchestrator contract finalizes (configure Redis/NATS URIs + credentials). +- Flesh out delivery ledger schema (status transitions, attempts) and connector integrations when channels/templates land (NOTIFY-SVC-38-002..004). +- Wire telemetry counters/histograms and structured logging to feed Observability tasks. +- Expand tests with integration harness using Mongo2Go + real queue transports after connectors exist; revisit delivery idempotency assertions once `INotifyLockRepository` semantics are wired to production stores. diff --git a/src/StellaOps.Notify.WebService/AGENTS.md b/src/Notify/StellaOps.Notify.WebService/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.WebService/AGENTS.md rename to src/Notify/StellaOps.Notify.WebService/AGENTS.md diff --git a/src/StellaOps.Notify.WebService/Contracts/ChannelHealthResponse.cs b/src/Notify/StellaOps.Notify.WebService/Contracts/ChannelHealthResponse.cs similarity index 96% rename from src/StellaOps.Notify.WebService/Contracts/ChannelHealthResponse.cs rename to src/Notify/StellaOps.Notify.WebService/Contracts/ChannelHealthResponse.cs index 25b87600..45a0b5f1 100644 --- a/src/StellaOps.Notify.WebService/Contracts/ChannelHealthResponse.cs +++ b/src/Notify/StellaOps.Notify.WebService/Contracts/ChannelHealthResponse.cs @@ -1,17 +1,17 @@ -using System; -using System.Collections.Generic; -using StellaOps.Notify.Engine; - -namespace StellaOps.Notify.WebService.Contracts; - -/// <summary> -/// Response payload describing channel health diagnostics. -/// </summary> -public sealed record ChannelHealthResponse( - string TenantId, - string ChannelId, - ChannelHealthStatus Status, - string? Message, - DateTimeOffset CheckedAt, - string TraceId, - IReadOnlyDictionary<string, string> Metadata); +using System; +using System.Collections.Generic; +using StellaOps.Notify.Engine; + +namespace StellaOps.Notify.WebService.Contracts; + +/// <summary> +/// Response payload describing channel health diagnostics. +/// </summary> +public sealed record ChannelHealthResponse( + string TenantId, + string ChannelId, + ChannelHealthStatus Status, + string? Message, + DateTimeOffset CheckedAt, + string TraceId, + IReadOnlyDictionary<string, string> Metadata); diff --git a/src/StellaOps.Notify.WebService/Contracts/ChannelTestSendRequest.cs b/src/Notify/StellaOps.Notify.WebService/Contracts/ChannelTestSendRequest.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Contracts/ChannelTestSendRequest.cs rename to src/Notify/StellaOps.Notify.WebService/Contracts/ChannelTestSendRequest.cs diff --git a/src/StellaOps.Notify.WebService/Contracts/ChannelTestSendResponse.cs b/src/Notify/StellaOps.Notify.WebService/Contracts/ChannelTestSendResponse.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Contracts/ChannelTestSendResponse.cs rename to src/Notify/StellaOps.Notify.WebService/Contracts/ChannelTestSendResponse.cs diff --git a/src/StellaOps.Notify.WebService/Contracts/LockRequests.cs b/src/Notify/StellaOps.Notify.WebService/Contracts/LockRequests.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Contracts/LockRequests.cs rename to src/Notify/StellaOps.Notify.WebService/Contracts/LockRequests.cs diff --git a/src/StellaOps.Notify.WebService/Diagnostics/ServiceStatus.cs b/src/Notify/StellaOps.Notify.WebService/Diagnostics/ServiceStatus.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Diagnostics/ServiceStatus.cs rename to src/Notify/StellaOps.Notify.WebService/Diagnostics/ServiceStatus.cs diff --git a/src/StellaOps.Notify.WebService/Extensions/ConfigurationExtensions.cs b/src/Notify/StellaOps.Notify.WebService/Extensions/ConfigurationExtensions.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Extensions/ConfigurationExtensions.cs rename to src/Notify/StellaOps.Notify.WebService/Extensions/ConfigurationExtensions.cs diff --git a/src/StellaOps.Notify.WebService/Hosting/NotifyPluginHostFactory.cs b/src/Notify/StellaOps.Notify.WebService/Hosting/NotifyPluginHostFactory.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Hosting/NotifyPluginHostFactory.cs rename to src/Notify/StellaOps.Notify.WebService/Hosting/NotifyPluginHostFactory.cs diff --git a/src/StellaOps.Notify.WebService/Internal/JsonHttpResult.cs b/src/Notify/StellaOps.Notify.WebService/Internal/JsonHttpResult.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Internal/JsonHttpResult.cs rename to src/Notify/StellaOps.Notify.WebService/Internal/JsonHttpResult.cs diff --git a/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs b/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs rename to src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptions.cs diff --git a/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsPostConfigure.cs b/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsPostConfigure.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsPostConfigure.cs rename to src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsPostConfigure.cs diff --git a/src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs b/src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs rename to src/Notify/StellaOps.Notify.WebService/Options/NotifyWebServiceOptionsValidator.cs diff --git a/src/StellaOps.Notify.WebService/Plugins/NotifyPluginRegistry.cs b/src/Notify/StellaOps.Notify.WebService/Plugins/NotifyPluginRegistry.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Plugins/NotifyPluginRegistry.cs rename to src/Notify/StellaOps.Notify.WebService/Plugins/NotifyPluginRegistry.cs diff --git a/src/StellaOps.Notify.WebService/Program.Partial.cs b/src/Notify/StellaOps.Notify.WebService/Program.Partial.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Program.Partial.cs rename to src/Notify/StellaOps.Notify.WebService/Program.Partial.cs diff --git a/src/StellaOps.Notify.WebService/Program.cs b/src/Notify/StellaOps.Notify.WebService/Program.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Program.cs rename to src/Notify/StellaOps.Notify.WebService/Program.cs diff --git a/src/StellaOps.Notify.WebService/Security/NotifyPolicies.cs b/src/Notify/StellaOps.Notify.WebService/Security/NotifyPolicies.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Security/NotifyPolicies.cs rename to src/Notify/StellaOps.Notify.WebService/Security/NotifyPolicies.cs diff --git a/src/StellaOps.Notify.WebService/Security/NotifyRateLimitPolicies.cs b/src/Notify/StellaOps.Notify.WebService/Security/NotifyRateLimitPolicies.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Security/NotifyRateLimitPolicies.cs rename to src/Notify/StellaOps.Notify.WebService/Security/NotifyRateLimitPolicies.cs diff --git a/src/StellaOps.Notify.WebService/Services/NotifyChannelHealthService.cs b/src/Notify/StellaOps.Notify.WebService/Services/NotifyChannelHealthService.cs similarity index 97% rename from src/StellaOps.Notify.WebService/Services/NotifyChannelHealthService.cs rename to src/Notify/StellaOps.Notify.WebService/Services/NotifyChannelHealthService.cs index 7e9e0c5d..e1c088a8 100644 --- a/src/StellaOps.Notify.WebService/Services/NotifyChannelHealthService.cs +++ b/src/Notify/StellaOps.Notify.WebService/Services/NotifyChannelHealthService.cs @@ -1,182 +1,182 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; -using StellaOps.Notify.WebService.Contracts; - -namespace StellaOps.Notify.WebService.Services; - -internal interface INotifyChannelHealthService -{ - Task<ChannelHealthResponse> CheckAsync( - string tenantId, - NotifyChannel channel, - string traceId, - CancellationToken cancellationToken); -} - -internal sealed class NotifyChannelHealthService : INotifyChannelHealthService -{ - private readonly TimeProvider _timeProvider; - private readonly ILogger<NotifyChannelHealthService> _logger; - private readonly IReadOnlyDictionary<NotifyChannelType, INotifyChannelHealthProvider> _providers; - - public NotifyChannelHealthService( - TimeProvider timeProvider, - ILogger<NotifyChannelHealthService> logger, - IEnumerable<INotifyChannelHealthProvider> providers) - { - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _providers = BuildProviderMap(providers ?? Array.Empty<INotifyChannelHealthProvider>(), _logger); - } - - public async Task<ChannelHealthResponse> CheckAsync( - string tenantId, - NotifyChannel channel, - string traceId, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(channel); - - cancellationToken.ThrowIfCancellationRequested(); - - var target = ResolveTarget(channel); - var timestamp = _timeProvider.GetUtcNow(); - var context = new ChannelHealthContext( - tenantId, - channel, - target, - timestamp, - traceId); - - ChannelHealthResult? providerResult = null; - var providerName = "fallback"; - - if (_providers.TryGetValue(channel.Type, out var provider)) - { - try - { - providerResult = await provider.CheckAsync(context, cancellationToken).ConfigureAwait(false); - providerName = provider.GetType().FullName ?? provider.GetType().Name; - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - throw; - } - catch (Exception ex) - { - _logger.LogWarning( - ex, - "Notify channel health provider {Provider} failed for tenant {TenantId}, channel {ChannelId} ({ChannelType}).", - provider.GetType().FullName, - tenantId, - channel.ChannelId, - channel.Type); - providerResult = new ChannelHealthResult( - ChannelHealthStatus.Degraded, - "Channel health provider threw an exception. See logs for details.", - new Dictionary<string, string>(StringComparer.Ordinal)); - } - } - - var metadata = MergeMetadata(context, providerName, providerResult?.Metadata); - var status = providerResult?.Status ?? ChannelHealthStatus.Healthy; - var message = providerResult?.Message ?? "Channel metadata returned without provider-specific diagnostics."; - - var response = new ChannelHealthResponse( - tenantId, - channel.ChannelId, - status, - message, - timestamp, - traceId, - metadata); - - _logger.LogInformation( - "Notify channel health generated for tenant {TenantId}, channel {ChannelId} ({ChannelType}) using provider {Provider}.", - tenantId, - channel.ChannelId, - channel.Type, - providerName); - - return response; - } - - private static IReadOnlyDictionary<NotifyChannelType, INotifyChannelHealthProvider> BuildProviderMap( - IEnumerable<INotifyChannelHealthProvider> providers, - ILogger logger) - { - var map = new Dictionary<NotifyChannelType, INotifyChannelHealthProvider>(); - foreach (var provider in providers) - { - if (provider is null) - { - continue; - } - - if (map.TryGetValue(provider.ChannelType, out var existing)) - { - logger?.LogWarning( - "Multiple Notify channel health providers registered for {ChannelType}. Keeping {ExistingProvider} and ignoring {NewProvider}.", - provider.ChannelType, - existing.GetType().FullName, - provider.GetType().FullName); - continue; - } - - map[provider.ChannelType] = provider; - } - - return map; - } - - private static string ResolveTarget(NotifyChannel channel) - { - var target = channel.Config.Target ?? channel.Config.Endpoint; - if (string.IsNullOrWhiteSpace(target)) - { - return channel.Name; - } - - return target; - } - - private static IReadOnlyDictionary<string, string> MergeMetadata( - ChannelHealthContext context, - string providerName, - IReadOnlyDictionary<string, string>? providerMetadata) - { - var metadata = new Dictionary<string, string>(StringComparer.Ordinal) - { - ["channelType"] = context.Channel.Type.ToString().ToLowerInvariant(), - ["target"] = context.Target, - ["previewProvider"] = providerName, - ["traceId"] = context.TraceId, - ["channelEnabled"] = context.Channel.Enabled.ToString() - }; - - foreach (var label in context.Channel.Labels) - { - metadata[$"label.{label.Key}"] = label.Value; - } - - if (providerMetadata is not null) - { - foreach (var pair in providerMetadata) - { - if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) - { - continue; - } - - metadata[pair.Key.Trim()] = pair.Value; - } - } - - return metadata; - } -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; +using StellaOps.Notify.WebService.Contracts; + +namespace StellaOps.Notify.WebService.Services; + +internal interface INotifyChannelHealthService +{ + Task<ChannelHealthResponse> CheckAsync( + string tenantId, + NotifyChannel channel, + string traceId, + CancellationToken cancellationToken); +} + +internal sealed class NotifyChannelHealthService : INotifyChannelHealthService +{ + private readonly TimeProvider _timeProvider; + private readonly ILogger<NotifyChannelHealthService> _logger; + private readonly IReadOnlyDictionary<NotifyChannelType, INotifyChannelHealthProvider> _providers; + + public NotifyChannelHealthService( + TimeProvider timeProvider, + ILogger<NotifyChannelHealthService> logger, + IEnumerable<INotifyChannelHealthProvider> providers) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _providers = BuildProviderMap(providers ?? Array.Empty<INotifyChannelHealthProvider>(), _logger); + } + + public async Task<ChannelHealthResponse> CheckAsync( + string tenantId, + NotifyChannel channel, + string traceId, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(channel); + + cancellationToken.ThrowIfCancellationRequested(); + + var target = ResolveTarget(channel); + var timestamp = _timeProvider.GetUtcNow(); + var context = new ChannelHealthContext( + tenantId, + channel, + target, + timestamp, + traceId); + + ChannelHealthResult? providerResult = null; + var providerName = "fallback"; + + if (_providers.TryGetValue(channel.Type, out var provider)) + { + try + { + providerResult = await provider.CheckAsync(context, cancellationToken).ConfigureAwait(false); + providerName = provider.GetType().FullName ?? provider.GetType().Name; + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) + { + _logger.LogWarning( + ex, + "Notify channel health provider {Provider} failed for tenant {TenantId}, channel {ChannelId} ({ChannelType}).", + provider.GetType().FullName, + tenantId, + channel.ChannelId, + channel.Type); + providerResult = new ChannelHealthResult( + ChannelHealthStatus.Degraded, + "Channel health provider threw an exception. See logs for details.", + new Dictionary<string, string>(StringComparer.Ordinal)); + } + } + + var metadata = MergeMetadata(context, providerName, providerResult?.Metadata); + var status = providerResult?.Status ?? ChannelHealthStatus.Healthy; + var message = providerResult?.Message ?? "Channel metadata returned without provider-specific diagnostics."; + + var response = new ChannelHealthResponse( + tenantId, + channel.ChannelId, + status, + message, + timestamp, + traceId, + metadata); + + _logger.LogInformation( + "Notify channel health generated for tenant {TenantId}, channel {ChannelId} ({ChannelType}) using provider {Provider}.", + tenantId, + channel.ChannelId, + channel.Type, + providerName); + + return response; + } + + private static IReadOnlyDictionary<NotifyChannelType, INotifyChannelHealthProvider> BuildProviderMap( + IEnumerable<INotifyChannelHealthProvider> providers, + ILogger logger) + { + var map = new Dictionary<NotifyChannelType, INotifyChannelHealthProvider>(); + foreach (var provider in providers) + { + if (provider is null) + { + continue; + } + + if (map.TryGetValue(provider.ChannelType, out var existing)) + { + logger?.LogWarning( + "Multiple Notify channel health providers registered for {ChannelType}. Keeping {ExistingProvider} and ignoring {NewProvider}.", + provider.ChannelType, + existing.GetType().FullName, + provider.GetType().FullName); + continue; + } + + map[provider.ChannelType] = provider; + } + + return map; + } + + private static string ResolveTarget(NotifyChannel channel) + { + var target = channel.Config.Target ?? channel.Config.Endpoint; + if (string.IsNullOrWhiteSpace(target)) + { + return channel.Name; + } + + return target; + } + + private static IReadOnlyDictionary<string, string> MergeMetadata( + ChannelHealthContext context, + string providerName, + IReadOnlyDictionary<string, string>? providerMetadata) + { + var metadata = new Dictionary<string, string>(StringComparer.Ordinal) + { + ["channelType"] = context.Channel.Type.ToString().ToLowerInvariant(), + ["target"] = context.Target, + ["previewProvider"] = providerName, + ["traceId"] = context.TraceId, + ["channelEnabled"] = context.Channel.Enabled.ToString() + }; + + foreach (var label in context.Channel.Labels) + { + metadata[$"label.{label.Key}"] = label.Value; + } + + if (providerMetadata is not null) + { + foreach (var pair in providerMetadata) + { + if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) + { + continue; + } + + metadata[pair.Key.Trim()] = pair.Value; + } + } + + return metadata; + } +} diff --git a/src/StellaOps.Notify.WebService/Services/NotifyChannelTestService.cs b/src/Notify/StellaOps.Notify.WebService/Services/NotifyChannelTestService.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Services/NotifyChannelTestService.cs rename to src/Notify/StellaOps.Notify.WebService/Services/NotifyChannelTestService.cs diff --git a/src/StellaOps.Notify.WebService/Services/NotifySchemaMigrationService.cs b/src/Notify/StellaOps.Notify.WebService/Services/NotifySchemaMigrationService.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Services/NotifySchemaMigrationService.cs rename to src/Notify/StellaOps.Notify.WebService/Services/NotifySchemaMigrationService.cs diff --git a/src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj b/src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj new file mode 100644 index 00000000..22b00a41 --- /dev/null +++ b/src/Notify/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj @@ -0,0 +1,28 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="Serilog.AspNetCore" Version="8.0.1" /> + <PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" /> + <PackageReference Include="YamlDotNet" Version="13.7.1" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notify.WebService/Storage/InMemory/InMemoryStorageModule.cs b/src/Notify/StellaOps.Notify.WebService/Storage/InMemory/InMemoryStorageModule.cs similarity index 100% rename from src/StellaOps.Notify.WebService/Storage/InMemory/InMemoryStorageModule.cs rename to src/Notify/StellaOps.Notify.WebService/Storage/InMemory/InMemoryStorageModule.cs diff --git a/src/Notify/StellaOps.Notify.WebService/TASKS.md b/src/Notify/StellaOps.Notify.WebService/TASKS.md new file mode 100644 index 00000000..0312cb55 --- /dev/null +++ b/src/Notify/StellaOps.Notify.WebService/TASKS.md @@ -0,0 +1,2 @@ +# Notify WebService Task Board (Sprint 15) +> Archived 2025-10-26 — control plane now lives in `src/Notifier/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Worker/AGENTS.md b/src/Notify/StellaOps.Notify.Worker/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.Worker/AGENTS.md rename to src/Notify/StellaOps.Notify.Worker/AGENTS.md diff --git a/src/StellaOps.Notify.Worker/Handlers/INotifyEventHandler.cs b/src/Notify/StellaOps.Notify.Worker/Handlers/INotifyEventHandler.cs similarity index 96% rename from src/StellaOps.Notify.Worker/Handlers/INotifyEventHandler.cs rename to src/Notify/StellaOps.Notify.Worker/Handlers/INotifyEventHandler.cs index 5387c9df..54c0516f 100644 --- a/src/StellaOps.Notify.Worker/Handlers/INotifyEventHandler.cs +++ b/src/Notify/StellaOps.Notify.Worker/Handlers/INotifyEventHandler.cs @@ -1,10 +1,10 @@ -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Notify.Queue; - -namespace StellaOps.Notify.Worker.Handlers; - -public interface INotifyEventHandler -{ - Task HandleAsync(NotifyQueueEventMessage message, CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Queue; + +namespace StellaOps.Notify.Worker.Handlers; + +public interface INotifyEventHandler +{ + Task HandleAsync(NotifyQueueEventMessage message, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Notify.Worker/Handlers/NoOpNotifyEventHandler.cs b/src/Notify/StellaOps.Notify.Worker/Handlers/NoOpNotifyEventHandler.cs similarity index 96% rename from src/StellaOps.Notify.Worker/Handlers/NoOpNotifyEventHandler.cs rename to src/Notify/StellaOps.Notify.Worker/Handlers/NoOpNotifyEventHandler.cs index 11b4594c..84cd9e03 100644 --- a/src/StellaOps.Notify.Worker/Handlers/NoOpNotifyEventHandler.cs +++ b/src/Notify/StellaOps.Notify.Worker/Handlers/NoOpNotifyEventHandler.cs @@ -1,25 +1,25 @@ -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StellaOps.Notify.Queue; - -namespace StellaOps.Notify.Worker.Handlers; - -internal sealed class NoOpNotifyEventHandler : INotifyEventHandler -{ - private readonly ILogger<NoOpNotifyEventHandler> _logger; - - public NoOpNotifyEventHandler(ILogger<NoOpNotifyEventHandler> logger) - { - _logger = logger; - } - - public Task HandleAsync(NotifyQueueEventMessage message, CancellationToken cancellationToken) - { - _logger.LogDebug( - "No-op handler acknowledged event {EventId} (tenant {TenantId}).", - message.Event.EventId, - message.TenantId); - return Task.CompletedTask; - } -} +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StellaOps.Notify.Queue; + +namespace StellaOps.Notify.Worker.Handlers; + +internal sealed class NoOpNotifyEventHandler : INotifyEventHandler +{ + private readonly ILogger<NoOpNotifyEventHandler> _logger; + + public NoOpNotifyEventHandler(ILogger<NoOpNotifyEventHandler> logger) + { + _logger = logger; + } + + public Task HandleAsync(NotifyQueueEventMessage message, CancellationToken cancellationToken) + { + _logger.LogDebug( + "No-op handler acknowledged event {EventId} (tenant {TenantId}).", + message.Event.EventId, + message.TenantId); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Notify.Worker/NotifyWorkerOptions.cs b/src/Notify/StellaOps.Notify.Worker/NotifyWorkerOptions.cs similarity index 96% rename from src/StellaOps.Notify.Worker/NotifyWorkerOptions.cs rename to src/Notify/StellaOps.Notify.Worker/NotifyWorkerOptions.cs index f403a33c..c2f43070 100644 --- a/src/StellaOps.Notify.Worker/NotifyWorkerOptions.cs +++ b/src/Notify/StellaOps.Notify.Worker/NotifyWorkerOptions.cs @@ -1,52 +1,52 @@ -using System; - -namespace StellaOps.Notify.Worker; - -public sealed class NotifyWorkerOptions -{ - /// <summary> - /// Worker identifier prefix; defaults to machine name. - /// </summary> - public string? WorkerId { get; set; } - - /// <summary> - /// Number of messages to lease per iteration. - /// </summary> - public int LeaseBatchSize { get; set; } = 16; - - /// <summary> - /// Duration a lease remains active before it becomes eligible for claim. - /// </summary> - public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromSeconds(30); - - /// <summary> - /// Delay applied when no work is available. - /// </summary> - public TimeSpan IdleDelay { get; set; } = TimeSpan.FromMilliseconds(250); - - /// <summary> - /// Maximum number of event leases processed concurrently. - /// </summary> - public int MaxConcurrency { get; set; } = 4; - - /// <summary> - /// Maximum number of consecutive failures before the worker delays. - /// </summary> - public int FailureBackoffThreshold { get; set; } = 3; - - /// <summary> - /// Delay applied when the failure threshold is reached. - /// </summary> - public TimeSpan FailureBackoffDelay { get; set; } = TimeSpan.FromSeconds(5); - - internal string ResolveWorkerId() - { - if (!string.IsNullOrWhiteSpace(WorkerId)) - { - return WorkerId!; - } - - var host = Environment.MachineName; - return $"{host}-{Guid.NewGuid():n}"; - } -} +using System; + +namespace StellaOps.Notify.Worker; + +public sealed class NotifyWorkerOptions +{ + /// <summary> + /// Worker identifier prefix; defaults to machine name. + /// </summary> + public string? WorkerId { get; set; } + + /// <summary> + /// Number of messages to lease per iteration. + /// </summary> + public int LeaseBatchSize { get; set; } = 16; + + /// <summary> + /// Duration a lease remains active before it becomes eligible for claim. + /// </summary> + public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromSeconds(30); + + /// <summary> + /// Delay applied when no work is available. + /// </summary> + public TimeSpan IdleDelay { get; set; } = TimeSpan.FromMilliseconds(250); + + /// <summary> + /// Maximum number of event leases processed concurrently. + /// </summary> + public int MaxConcurrency { get; set; } = 4; + + /// <summary> + /// Maximum number of consecutive failures before the worker delays. + /// </summary> + public int FailureBackoffThreshold { get; set; } = 3; + + /// <summary> + /// Delay applied when the failure threshold is reached. + /// </summary> + public TimeSpan FailureBackoffDelay { get; set; } = TimeSpan.FromSeconds(5); + + internal string ResolveWorkerId() + { + if (!string.IsNullOrWhiteSpace(WorkerId)) + { + return WorkerId!; + } + + var host = Environment.MachineName; + return $"{host}-{Guid.NewGuid():n}"; + } +} diff --git a/src/StellaOps.Notify.Worker/Processing/NotifyEventLeaseProcessor.cs b/src/Notify/StellaOps.Notify.Worker/Processing/NotifyEventLeaseProcessor.cs similarity index 97% rename from src/StellaOps.Notify.Worker/Processing/NotifyEventLeaseProcessor.cs rename to src/Notify/StellaOps.Notify.Worker/Processing/NotifyEventLeaseProcessor.cs index 87aee894..5945ec21 100644 --- a/src/StellaOps.Notify.Worker/Processing/NotifyEventLeaseProcessor.cs +++ b/src/Notify/StellaOps.Notify.Worker/Processing/NotifyEventLeaseProcessor.cs @@ -1,146 +1,146 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Notify.Queue; -using StellaOps.Notify.Worker.Handlers; - -namespace StellaOps.Notify.Worker.Processing; - -internal sealed class NotifyEventLeaseProcessor -{ - private static readonly ActivitySource ActivitySource = new("StellaOps.Notify.Worker"); - - private readonly INotifyEventQueue _queue; - private readonly INotifyEventHandler _handler; - private readonly NotifyWorkerOptions _options; - private readonly ILogger<NotifyEventLeaseProcessor> _logger; - private readonly TimeProvider _timeProvider; - private readonly string _workerId; - - public NotifyEventLeaseProcessor( - INotifyEventQueue queue, - INotifyEventHandler handler, - IOptions<NotifyWorkerOptions> options, - ILogger<NotifyEventLeaseProcessor> logger, - TimeProvider timeProvider) - { - _queue = queue ?? throw new ArgumentNullException(nameof(queue)); - _handler = handler ?? throw new ArgumentNullException(nameof(handler)); - _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? TimeProvider.System; - _workerId = _options.ResolveWorkerId(); - } - - public async Task<int> ProcessOnceAsync(CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - - var leaseRequest = new NotifyQueueLeaseRequest( - consumer: _workerId, - batchSize: Math.Max(1, _options.LeaseBatchSize), - leaseDuration: _options.LeaseDuration <= TimeSpan.Zero ? TimeSpan.FromSeconds(30) : _options.LeaseDuration); - - IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>> leases; - try - { - leases = await _queue.LeaseAsync(leaseRequest, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to lease Notify events."); - throw; - } - - if (leases.Count == 0) - { - return 0; - } - - var processed = 0; - foreach (var lease in leases) - { - cancellationToken.ThrowIfCancellationRequested(); - processed++; - await ProcessLeaseAsync(lease, cancellationToken).ConfigureAwait(false); - } - - return processed; - } - - private async Task ProcessLeaseAsync( - INotifyQueueLease<NotifyQueueEventMessage> lease, - CancellationToken cancellationToken) - { - var message = lease.Message; - var correlationId = message.TraceId ?? message.Event.EventId.ToString("N"); - - using var scope = _logger.BeginScope(new Dictionary<string, object?> - { - ["notifyTraceId"] = correlationId, - ["notifyTenantId"] = message.TenantId, - ["notifyEventId"] = message.Event.EventId, - ["notifyAttempt"] = lease.Attempt - }); - - using var activity = ActivitySource.StartActivity("notify.event.process", ActivityKind.Consumer); - activity?.SetTag("notify.tenant_id", message.TenantId); - activity?.SetTag("notify.event_id", message.Event.EventId); - activity?.SetTag("notify.attempt", lease.Attempt); - activity?.SetTag("notify.worker_id", _workerId); - - try - { - _logger.LogInformation( - "Processing notify event {EventId} (tenant {TenantId}, attempt {Attempt}).", - message.Event.EventId, - message.TenantId, - lease.Attempt); - - await _handler.HandleAsync(message, cancellationToken).ConfigureAwait(false); - - await lease.AcknowledgeAsync(cancellationToken).ConfigureAwait(false); - _logger.LogInformation( - "Acknowledged notify event {EventId} (tenant {TenantId}).", - message.Event.EventId, - message.TenantId); - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - _logger.LogWarning( - "Worker cancellation requested while processing event {EventId}; returning lease to queue.", - message.Event.EventId); - - await SafeReleaseAsync(lease, NotifyQueueReleaseDisposition.Retry, CancellationToken.None).ConfigureAwait(false); - throw; - } - catch (Exception ex) - { - _logger.LogError( - ex, - "Failed to process notify event {EventId}; scheduling retry.", - message.Event.EventId); - - await SafeReleaseAsync(lease, NotifyQueueReleaseDisposition.Retry, cancellationToken).ConfigureAwait(false); - } - } - - private static async Task SafeReleaseAsync( - INotifyQueueLease<NotifyQueueEventMessage> lease, - NotifyQueueReleaseDisposition disposition, - CancellationToken cancellationToken) - { - try - { - await lease.ReleaseAsync(disposition, cancellationToken).ConfigureAwait(false); - } - catch when (cancellationToken.IsCancellationRequested) - { - // Suppress release errors during shutdown. - } - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Notify.Queue; +using StellaOps.Notify.Worker.Handlers; + +namespace StellaOps.Notify.Worker.Processing; + +internal sealed class NotifyEventLeaseProcessor +{ + private static readonly ActivitySource ActivitySource = new("StellaOps.Notify.Worker"); + + private readonly INotifyEventQueue _queue; + private readonly INotifyEventHandler _handler; + private readonly NotifyWorkerOptions _options; + private readonly ILogger<NotifyEventLeaseProcessor> _logger; + private readonly TimeProvider _timeProvider; + private readonly string _workerId; + + public NotifyEventLeaseProcessor( + INotifyEventQueue queue, + INotifyEventHandler handler, + IOptions<NotifyWorkerOptions> options, + ILogger<NotifyEventLeaseProcessor> logger, + TimeProvider timeProvider) + { + _queue = queue ?? throw new ArgumentNullException(nameof(queue)); + _handler = handler ?? throw new ArgumentNullException(nameof(handler)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + _workerId = _options.ResolveWorkerId(); + } + + public async Task<int> ProcessOnceAsync(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + var leaseRequest = new NotifyQueueLeaseRequest( + consumer: _workerId, + batchSize: Math.Max(1, _options.LeaseBatchSize), + leaseDuration: _options.LeaseDuration <= TimeSpan.Zero ? TimeSpan.FromSeconds(30) : _options.LeaseDuration); + + IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>> leases; + try + { + leases = await _queue.LeaseAsync(leaseRequest, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to lease Notify events."); + throw; + } + + if (leases.Count == 0) + { + return 0; + } + + var processed = 0; + foreach (var lease in leases) + { + cancellationToken.ThrowIfCancellationRequested(); + processed++; + await ProcessLeaseAsync(lease, cancellationToken).ConfigureAwait(false); + } + + return processed; + } + + private async Task ProcessLeaseAsync( + INotifyQueueLease<NotifyQueueEventMessage> lease, + CancellationToken cancellationToken) + { + var message = lease.Message; + var correlationId = message.TraceId ?? message.Event.EventId.ToString("N"); + + using var scope = _logger.BeginScope(new Dictionary<string, object?> + { + ["notifyTraceId"] = correlationId, + ["notifyTenantId"] = message.TenantId, + ["notifyEventId"] = message.Event.EventId, + ["notifyAttempt"] = lease.Attempt + }); + + using var activity = ActivitySource.StartActivity("notify.event.process", ActivityKind.Consumer); + activity?.SetTag("notify.tenant_id", message.TenantId); + activity?.SetTag("notify.event_id", message.Event.EventId); + activity?.SetTag("notify.attempt", lease.Attempt); + activity?.SetTag("notify.worker_id", _workerId); + + try + { + _logger.LogInformation( + "Processing notify event {EventId} (tenant {TenantId}, attempt {Attempt}).", + message.Event.EventId, + message.TenantId, + lease.Attempt); + + await _handler.HandleAsync(message, cancellationToken).ConfigureAwait(false); + + await lease.AcknowledgeAsync(cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "Acknowledged notify event {EventId} (tenant {TenantId}).", + message.Event.EventId, + message.TenantId); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + _logger.LogWarning( + "Worker cancellation requested while processing event {EventId}; returning lease to queue.", + message.Event.EventId); + + await SafeReleaseAsync(lease, NotifyQueueReleaseDisposition.Retry, CancellationToken.None).ConfigureAwait(false); + throw; + } + catch (Exception ex) + { + _logger.LogError( + ex, + "Failed to process notify event {EventId}; scheduling retry.", + message.Event.EventId); + + await SafeReleaseAsync(lease, NotifyQueueReleaseDisposition.Retry, cancellationToken).ConfigureAwait(false); + } + } + + private static async Task SafeReleaseAsync( + INotifyQueueLease<NotifyQueueEventMessage> lease, + NotifyQueueReleaseDisposition disposition, + CancellationToken cancellationToken) + { + try + { + await lease.ReleaseAsync(disposition, cancellationToken).ConfigureAwait(false); + } + catch when (cancellationToken.IsCancellationRequested) + { + // Suppress release errors during shutdown. + } + } +} diff --git a/src/StellaOps.Notify.Worker/Processing/NotifyEventLeaseWorker.cs b/src/Notify/StellaOps.Notify.Worker/Processing/NotifyEventLeaseWorker.cs similarity index 97% rename from src/StellaOps.Notify.Worker/Processing/NotifyEventLeaseWorker.cs rename to src/Notify/StellaOps.Notify.Worker/Processing/NotifyEventLeaseWorker.cs index 7f968b83..16949813 100644 --- a/src/StellaOps.Notify.Worker/Processing/NotifyEventLeaseWorker.cs +++ b/src/Notify/StellaOps.Notify.Worker/Processing/NotifyEventLeaseWorker.cs @@ -1,63 +1,63 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; - -namespace StellaOps.Notify.Worker.Processing; - -internal sealed class NotifyEventLeaseWorker : BackgroundService -{ - private readonly NotifyEventLeaseProcessor _processor; - private readonly NotifyWorkerOptions _options; - private readonly ILogger<NotifyEventLeaseWorker> _logger; - - public NotifyEventLeaseWorker( - NotifyEventLeaseProcessor processor, - IOptions<NotifyWorkerOptions> options, - ILogger<NotifyEventLeaseWorker> logger) - { - _processor = processor ?? throw new ArgumentNullException(nameof(processor)); - _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - var idleDelay = _options.IdleDelay <= TimeSpan.Zero - ? TimeSpan.FromMilliseconds(500) - : _options.IdleDelay; - - while (!stoppingToken.IsCancellationRequested) - { - int processed; - try - { - processed = await _processor.ProcessOnceAsync(stoppingToken).ConfigureAwait(false); - } - catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) - { - break; - } - catch (Exception ex) - { - _logger.LogError(ex, "Notify worker processing loop encountered an error."); - await Task.Delay(_options.FailureBackoffDelay, stoppingToken).ConfigureAwait(false); - continue; - } - - if (processed == 0) - { - try - { - await Task.Delay(idleDelay, stoppingToken).ConfigureAwait(false); - } - catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) - { - break; - } - } - } - } -} +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.Notify.Worker.Processing; + +internal sealed class NotifyEventLeaseWorker : BackgroundService +{ + private readonly NotifyEventLeaseProcessor _processor; + private readonly NotifyWorkerOptions _options; + private readonly ILogger<NotifyEventLeaseWorker> _logger; + + public NotifyEventLeaseWorker( + NotifyEventLeaseProcessor processor, + IOptions<NotifyWorkerOptions> options, + ILogger<NotifyEventLeaseWorker> logger) + { + _processor = processor ?? throw new ArgumentNullException(nameof(processor)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + var idleDelay = _options.IdleDelay <= TimeSpan.Zero + ? TimeSpan.FromMilliseconds(500) + : _options.IdleDelay; + + while (!stoppingToken.IsCancellationRequested) + { + int processed; + try + { + processed = await _processor.ProcessOnceAsync(stoppingToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Notify worker processing loop encountered an error."); + await Task.Delay(_options.FailureBackoffDelay, stoppingToken).ConfigureAwait(false); + continue; + } + + if (processed == 0) + { + try + { + await Task.Delay(idleDelay, stoppingToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + } + } + } +} diff --git a/src/StellaOps.Notify.Worker/Program.cs b/src/Notify/StellaOps.Notify.Worker/Program.cs similarity index 97% rename from src/StellaOps.Notify.Worker/Program.cs rename to src/Notify/StellaOps.Notify.Worker/Program.cs index 8ec683ef..80690b46 100644 --- a/src/StellaOps.Notify.Worker/Program.cs +++ b/src/Notify/StellaOps.Notify.Worker/Program.cs @@ -1,33 +1,33 @@ -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using StellaOps.Notify.Queue; -using StellaOps.Notify.Worker; -using StellaOps.Notify.Worker.Handlers; -using StellaOps.Notify.Worker.Processing; - -var builder = Host.CreateApplicationBuilder(args); - -builder.Configuration - .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables(prefix: "NOTIFY_"); - -builder.Logging.ClearProviders(); -builder.Logging.AddSimpleConsole(options => -{ - options.TimestampFormat = "yyyy-MM-ddTHH:mm:ss.fffZ "; - options.UseUtcTimestamp = true; -}); - -builder.Services.Configure<NotifyWorkerOptions>(builder.Configuration.GetSection("notify:worker")); -builder.Services.AddSingleton(TimeProvider.System); - -builder.Services.AddNotifyEventQueue(builder.Configuration, "notify:queue"); -builder.Services.AddNotifyDeliveryQueue(builder.Configuration, "notify:deliveryQueue"); - -builder.Services.AddSingleton<INotifyEventHandler, NoOpNotifyEventHandler>(); -builder.Services.AddSingleton<NotifyEventLeaseProcessor>(); -builder.Services.AddHostedService<NotifyEventLeaseWorker>(); - -await builder.Build().RunAsync().ConfigureAwait(false); +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using StellaOps.Notify.Queue; +using StellaOps.Notify.Worker; +using StellaOps.Notify.Worker.Handlers; +using StellaOps.Notify.Worker.Processing; + +var builder = Host.CreateApplicationBuilder(args); + +builder.Configuration + .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables(prefix: "NOTIFY_"); + +builder.Logging.ClearProviders(); +builder.Logging.AddSimpleConsole(options => +{ + options.TimestampFormat = "yyyy-MM-ddTHH:mm:ss.fffZ "; + options.UseUtcTimestamp = true; +}); + +builder.Services.Configure<NotifyWorkerOptions>(builder.Configuration.GetSection("notify:worker")); +builder.Services.AddSingleton(TimeProvider.System); + +builder.Services.AddNotifyEventQueue(builder.Configuration, "notify:queue"); +builder.Services.AddNotifyDeliveryQueue(builder.Configuration, "notify:deliveryQueue"); + +builder.Services.AddSingleton<INotifyEventHandler, NoOpNotifyEventHandler>(); +builder.Services.AddSingleton<NotifyEventLeaseProcessor>(); +builder.Services.AddHostedService<NotifyEventLeaseWorker>(); + +await builder.Build().RunAsync().ConfigureAwait(false); diff --git a/src/StellaOps.Notify.Worker/Properties/AssemblyInfo.cs b/src/Notify/StellaOps.Notify.Worker/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Notify.Worker/Properties/AssemblyInfo.cs rename to src/Notify/StellaOps.Notify.Worker/Properties/AssemblyInfo.cs index 7a46cf70..c1c32d1d 100644 --- a/src/StellaOps.Notify.Worker/Properties/AssemblyInfo.cs +++ b/src/Notify/StellaOps.Notify.Worker/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Notify.Worker.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Notify.Worker.Tests")] diff --git a/src/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj b/src/Notify/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj similarity index 98% rename from src/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj rename to src/Notify/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj index 2d8013b3..082a0832 100644 --- a/src/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj +++ b/src/Notify/StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj @@ -1,24 +1,24 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <OutputType>Exe</OutputType> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Console" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> - </ItemGroup> - <ItemGroup> - <None Update="appsettings.json"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <OutputType>Exe</OutputType> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Configuration.Json" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Configuration.EnvironmentVariables" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Console" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj" /> + <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> + </ItemGroup> + <ItemGroup> + <None Update="appsettings.json"> + <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Notify.Worker/TASKS.md b/src/Notify/StellaOps.Notify.Worker/TASKS.md similarity index 67% rename from src/StellaOps.Notify.Worker/TASKS.md rename to src/Notify/StellaOps.Notify.Worker/TASKS.md index b268ea5b..f8e429f8 100644 --- a/src/StellaOps.Notify.Worker/TASKS.md +++ b/src/Notify/StellaOps.Notify.Worker/TASKS.md @@ -1,2 +1,2 @@ # Notify Worker Task Board (Sprint 15) -> Archived 2025-10-26 — worker responsibilities handled in `src/StellaOps.Notifier` (Sprints 38–40). +> Archived 2025-10-26 — worker responsibilities handled in `src/Notifier/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Worker/appsettings.json b/src/Notify/StellaOps.Notify.Worker/appsettings.json similarity index 96% rename from src/StellaOps.Notify.Worker/appsettings.json rename to src/Notify/StellaOps.Notify.Worker/appsettings.json index 56b6cce6..1be712f9 100644 --- a/src/StellaOps.Notify.Worker/appsettings.json +++ b/src/Notify/StellaOps.Notify.Worker/appsettings.json @@ -1,43 +1,43 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft": "Warning", - "Microsoft.Hosting.Lifetime": "Information" - } - }, - "notify": { - "worker": { - "leaseBatchSize": 16, - "leaseDuration": "00:00:30", - "idleDelay": "00:00:00.250", - "maxConcurrency": 4, - "failureBackoffThreshold": 3, - "failureBackoffDelay": "00:00:05" - }, - "queue": { - "transport": "Redis", - "redis": { - "connectionString": "localhost:6379", - "streams": [ - { - "stream": "notify:events", - "consumerGroup": "notify-workers", - "idempotencyKeyPrefix": "notify:events:idemp:", - "approximateMaxLength": 100000 - } - ] - } - }, - "deliveryQueue": { - "transport": "Redis", - "redis": { - "connectionString": "localhost:6379", - "streamName": "notify:deliveries", - "consumerGroup": "notify-delivery", - "idempotencyKeyPrefix": "notify:deliveries:idemp:", - "deadLetterStreamName": "notify:deliveries:dead" - } - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft": "Warning", + "Microsoft.Hosting.Lifetime": "Information" + } + }, + "notify": { + "worker": { + "leaseBatchSize": 16, + "leaseDuration": "00:00:30", + "idleDelay": "00:00:00.250", + "maxConcurrency": 4, + "failureBackoffThreshold": 3, + "failureBackoffDelay": "00:00:05" + }, + "queue": { + "transport": "Redis", + "redis": { + "connectionString": "localhost:6379", + "streams": [ + { + "stream": "notify:events", + "consumerGroup": "notify-workers", + "idempotencyKeyPrefix": "notify:events:idemp:", + "approximateMaxLength": 100000 + } + ] + } + }, + "deliveryQueue": { + "transport": "Redis", + "redis": { + "connectionString": "localhost:6379", + "streamName": "notify:deliveries", + "consumerGroup": "notify-delivery", + "idempotencyKeyPrefix": "notify:deliveries:idemp:", + "deadLetterStreamName": "notify:deliveries:dead" + } + } + } +} diff --git a/src/Notify/StellaOps.Notify.sln b/src/Notify/StellaOps.Notify.sln new file mode 100644 index 00000000..319cc1b7 --- /dev/null +++ b/src/Notify/StellaOps.Notify.sln @@ -0,0 +1,422 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.WebService", "StellaOps.Notify.WebService\StellaOps.Notify.WebService.csproj", "{DDE8646D-6EE3-44A1-B433-96943C93FFBB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{43063DE2-1226-4B4C-8047-E44A5632F4EB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{F622175F-115B-4DF9-887F-1A517439FA89}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Models", "__Libraries\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj", "{59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Storage.Mongo", "__Libraries\StellaOps.Notify.Storage.Mongo\StellaOps.Notify.Storage.Mongo.csproj", "{BD147625-3614-49BB-B484-01200F28FF8B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Engine", "__Libraries\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj", "{046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{EFF370F5-788E-4E39-8D80-1DFC6563E45C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{894FBB67-F556-4695-A16D-8B4223D438A4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Connectors.Email", "__Libraries\StellaOps.Notify.Connectors.Email\StellaOps.Notify.Connectors.Email.csproj", "{466C8F11-C43C-455A-AC28-5BF7AEBF04B0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Connectors.Shared", "__Libraries\StellaOps.Notify.Connectors.Shared\StellaOps.Notify.Connectors.Shared.csproj", "{8048E985-85DE-4B05-AB76-67C436D6516F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Connectors.Slack", "__Libraries\StellaOps.Notify.Connectors.Slack\StellaOps.Notify.Connectors.Slack.csproj", "{E94520D5-0D26-4869-AFFD-889D02616D9E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Connectors.Teams", "__Libraries\StellaOps.Notify.Connectors.Teams\StellaOps.Notify.Connectors.Teams.csproj", "{2B6CFE1E-137C-4596-8C01-7EE486F9A15E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Connectors.Webhook", "__Libraries\StellaOps.Notify.Connectors.Webhook\StellaOps.Notify.Connectors.Webhook.csproj", "{B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Queue", "__Libraries\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj", "{F151D567-5A17-4E2F-8D48-348701B1DC23}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Worker", "StellaOps.Notify.Worker\StellaOps.Notify.Worker.csproj", "{7BD19877-3C36-4BD0-8BF7-E1A245106D1C}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Connectors.Email.Tests", "__Tests\StellaOps.Notify.Connectors.Email.Tests\StellaOps.Notify.Connectors.Email.Tests.csproj", "{894EC02C-34C9-43C8-A01B-AF3A85FAE329}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Connectors.Slack.Tests", "__Tests\StellaOps.Notify.Connectors.Slack.Tests\StellaOps.Notify.Connectors.Slack.Tests.csproj", "{C4F45D77-7646-440D-A153-E52DBF95731D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Connectors.Teams.Tests", "__Tests\StellaOps.Notify.Connectors.Teams.Tests\StellaOps.Notify.Connectors.Teams.Tests.csproj", "{DE4E8371-7933-4D96-9023-36F5D2DDFC56}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Models.Tests", "__Tests\StellaOps.Notify.Models.Tests\StellaOps.Notify.Models.Tests.csproj", "{08428B42-D650-430E-9E51-8A3B18B4C984}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Queue.Tests", "__Tests\StellaOps.Notify.Queue.Tests\StellaOps.Notify.Queue.Tests.csproj", "{84451047-1B04-42D1-9C02-762564CC2B40}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Storage.Mongo.Tests", "__Tests\StellaOps.Notify.Storage.Mongo.Tests\StellaOps.Notify.Storage.Mongo.Tests.csproj", "{C63A47A3-18A6-4251-95A7-392EB58D7B87}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.WebService.Tests", "__Tests\StellaOps.Notify.WebService.Tests\StellaOps.Notify.WebService.Tests.csproj", "{EDAF907C-18A1-4099-9D3B-169B38400420}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Worker.Tests", "__Tests\StellaOps.Notify.Worker.Tests\StellaOps.Notify.Worker.Tests.csproj", "{66801106-E70A-4D33-8A08-A46C08902603}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Debug|x64.ActiveCfg = Debug|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Debug|x64.Build.0 = Debug|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Debug|x86.ActiveCfg = Debug|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Debug|x86.Build.0 = Debug|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Release|Any CPU.Build.0 = Release|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Release|x64.ActiveCfg = Release|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Release|x64.Build.0 = Release|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Release|x86.ActiveCfg = Release|Any CPU + {DDE8646D-6EE3-44A1-B433-96943C93FFBB}.Release|x86.Build.0 = Release|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Debug|x64.ActiveCfg = Debug|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Debug|x64.Build.0 = Debug|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Debug|x86.ActiveCfg = Debug|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Debug|x86.Build.0 = Debug|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Release|Any CPU.Build.0 = Release|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Release|x64.ActiveCfg = Release|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Release|x64.Build.0 = Release|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Release|x86.ActiveCfg = Release|Any CPU + {DB941060-49CE-49DA-A9A6-37B0C6FB1BFC}.Release|x86.Build.0 = Release|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Debug|x64.ActiveCfg = Debug|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Debug|x64.Build.0 = Debug|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Debug|x86.ActiveCfg = Debug|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Debug|x86.Build.0 = Debug|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Release|Any CPU.Build.0 = Release|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Release|x64.ActiveCfg = Release|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Release|x64.Build.0 = Release|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Release|x86.ActiveCfg = Release|Any CPU + {43063DE2-1226-4B4C-8047-E44A5632F4EB}.Release|x86.Build.0 = Release|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Debug|x64.ActiveCfg = Debug|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Debug|x64.Build.0 = Debug|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Debug|x86.ActiveCfg = Debug|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Debug|x86.Build.0 = Debug|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Release|Any CPU.Build.0 = Release|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Release|x64.ActiveCfg = Release|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Release|x64.Build.0 = Release|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Release|x86.ActiveCfg = Release|Any CPU + {F622175F-115B-4DF9-887F-1A517439FA89}.Release|x86.Build.0 = Release|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Debug|x64.ActiveCfg = Debug|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Debug|x64.Build.0 = Debug|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Debug|x86.ActiveCfg = Debug|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Debug|x86.Build.0 = Debug|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Release|Any CPU.Build.0 = Release|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Release|x64.ActiveCfg = Release|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Release|x64.Build.0 = Release|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Release|x86.ActiveCfg = Release|Any CPU + {7C91C6FD-2F33-4C08-B6D1-0C2BF8FB24BC}.Release|x86.Build.0 = Release|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Debug|x64.ActiveCfg = Debug|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Debug|x64.Build.0 = Debug|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Debug|x86.ActiveCfg = Debug|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Debug|x86.Build.0 = Debug|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Release|Any CPU.Build.0 = Release|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Release|x64.ActiveCfg = Release|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Release|x64.Build.0 = Release|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Release|x86.ActiveCfg = Release|Any CPU + {4EAF4F80-CCE4-4CC3-B8ED-E1D5804A7C98}.Release|x86.Build.0 = Release|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Debug|x64.ActiveCfg = Debug|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Debug|x64.Build.0 = Debug|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Debug|x86.ActiveCfg = Debug|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Debug|x86.Build.0 = Debug|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Release|Any CPU.Build.0 = Release|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Release|x64.ActiveCfg = Release|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Release|x64.Build.0 = Release|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Release|x86.ActiveCfg = Release|Any CPU + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7}.Release|x86.Build.0 = Release|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Debug|x64.ActiveCfg = Debug|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Debug|x64.Build.0 = Debug|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Debug|x86.ActiveCfg = Debug|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Debug|x86.Build.0 = Debug|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Release|Any CPU.Build.0 = Release|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Release|x64.ActiveCfg = Release|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Release|x64.Build.0 = Release|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Release|x86.ActiveCfg = Release|Any CPU + {BD147625-3614-49BB-B484-01200F28FF8B}.Release|x86.Build.0 = Release|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Debug|x64.ActiveCfg = Debug|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Debug|x64.Build.0 = Debug|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Debug|x86.ActiveCfg = Debug|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Debug|x86.Build.0 = Debug|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Release|Any CPU.Build.0 = Release|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Release|x64.ActiveCfg = Release|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Release|x64.Build.0 = Release|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Release|x86.ActiveCfg = Release|Any CPU + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C}.Release|x86.Build.0 = Release|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Debug|x64.ActiveCfg = Debug|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Debug|x64.Build.0 = Debug|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Debug|x86.ActiveCfg = Debug|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Debug|x86.Build.0 = Debug|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Release|Any CPU.Build.0 = Release|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Release|x64.ActiveCfg = Release|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Release|x64.Build.0 = Release|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Release|x86.ActiveCfg = Release|Any CPU + {EFF370F5-788E-4E39-8D80-1DFC6563E45C}.Release|x86.Build.0 = Release|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Debug|x64.ActiveCfg = Debug|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Debug|x64.Build.0 = Debug|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Debug|x86.ActiveCfg = Debug|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Debug|x86.Build.0 = Debug|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Release|Any CPU.Build.0 = Release|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Release|x64.ActiveCfg = Release|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Release|x64.Build.0 = Release|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Release|x86.ActiveCfg = Release|Any CPU + {4C5FB454-3C98-4634-8DE3-D06E1EDDAF05}.Release|x86.Build.0 = Release|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Debug|x64.ActiveCfg = Debug|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Debug|x64.Build.0 = Debug|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Debug|x86.ActiveCfg = Debug|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Debug|x86.Build.0 = Debug|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Release|Any CPU.Build.0 = Release|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Release|x64.ActiveCfg = Release|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Release|x64.Build.0 = Release|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Release|x86.ActiveCfg = Release|Any CPU + {894FBB67-F556-4695-A16D-8B4223D438A4}.Release|x86.Build.0 = Release|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Debug|x64.ActiveCfg = Debug|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Debug|x64.Build.0 = Debug|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Debug|x86.ActiveCfg = Debug|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Debug|x86.Build.0 = Debug|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Release|Any CPU.Build.0 = Release|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Release|x64.ActiveCfg = Release|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Release|x64.Build.0 = Release|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Release|x86.ActiveCfg = Release|Any CPU + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0}.Release|x86.Build.0 = Release|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Debug|x64.ActiveCfg = Debug|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Debug|x64.Build.0 = Debug|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Debug|x86.ActiveCfg = Debug|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Debug|x86.Build.0 = Debug|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Release|Any CPU.Build.0 = Release|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Release|x64.ActiveCfg = Release|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Release|x64.Build.0 = Release|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Release|x86.ActiveCfg = Release|Any CPU + {8048E985-85DE-4B05-AB76-67C436D6516F}.Release|x86.Build.0 = Release|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Debug|x64.ActiveCfg = Debug|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Debug|x64.Build.0 = Debug|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Debug|x86.ActiveCfg = Debug|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Debug|x86.Build.0 = Debug|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Release|Any CPU.Build.0 = Release|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Release|x64.ActiveCfg = Release|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Release|x64.Build.0 = Release|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Release|x86.ActiveCfg = Release|Any CPU + {E94520D5-0D26-4869-AFFD-889D02616D9E}.Release|x86.Build.0 = Release|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Debug|x64.ActiveCfg = Debug|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Debug|x64.Build.0 = Debug|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Debug|x86.ActiveCfg = Debug|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Debug|x86.Build.0 = Debug|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Release|Any CPU.Build.0 = Release|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Release|x64.ActiveCfg = Release|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Release|x64.Build.0 = Release|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Release|x86.ActiveCfg = Release|Any CPU + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E}.Release|x86.Build.0 = Release|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Debug|x64.ActiveCfg = Debug|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Debug|x64.Build.0 = Debug|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Debug|x86.ActiveCfg = Debug|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Debug|x86.Build.0 = Debug|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Release|Any CPU.Build.0 = Release|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Release|x64.ActiveCfg = Release|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Release|x64.Build.0 = Release|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Release|x86.ActiveCfg = Release|Any CPU + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D}.Release|x86.Build.0 = Release|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Debug|x64.ActiveCfg = Debug|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Debug|x64.Build.0 = Debug|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Debug|x86.ActiveCfg = Debug|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Debug|x86.Build.0 = Debug|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Release|Any CPU.Build.0 = Release|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Release|x64.ActiveCfg = Release|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Release|x64.Build.0 = Release|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Release|x86.ActiveCfg = Release|Any CPU + {F151D567-5A17-4E2F-8D48-348701B1DC23}.Release|x86.Build.0 = Release|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Debug|x64.ActiveCfg = Debug|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Debug|x64.Build.0 = Debug|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Debug|x86.ActiveCfg = Debug|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Debug|x86.Build.0 = Debug|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Release|Any CPU.Build.0 = Release|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Release|x64.ActiveCfg = Release|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Release|x64.Build.0 = Release|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Release|x86.ActiveCfg = Release|Any CPU + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C}.Release|x86.Build.0 = Release|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Debug|Any CPU.Build.0 = Debug|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Debug|x64.ActiveCfg = Debug|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Debug|x64.Build.0 = Debug|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Debug|x86.ActiveCfg = Debug|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Debug|x86.Build.0 = Debug|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Release|Any CPU.ActiveCfg = Release|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Release|Any CPU.Build.0 = Release|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Release|x64.ActiveCfg = Release|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Release|x64.Build.0 = Release|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Release|x86.ActiveCfg = Release|Any CPU + {894EC02C-34C9-43C8-A01B-AF3A85FAE329}.Release|x86.Build.0 = Release|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Debug|x64.ActiveCfg = Debug|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Debug|x64.Build.0 = Debug|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Debug|x86.ActiveCfg = Debug|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Debug|x86.Build.0 = Debug|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Release|Any CPU.Build.0 = Release|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Release|x64.ActiveCfg = Release|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Release|x64.Build.0 = Release|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Release|x86.ActiveCfg = Release|Any CPU + {C4F45D77-7646-440D-A153-E52DBF95731D}.Release|x86.Build.0 = Release|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Debug|x64.ActiveCfg = Debug|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Debug|x64.Build.0 = Debug|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Debug|x86.ActiveCfg = Debug|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Debug|x86.Build.0 = Debug|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Release|Any CPU.Build.0 = Release|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Release|x64.ActiveCfg = Release|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Release|x64.Build.0 = Release|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Release|x86.ActiveCfg = Release|Any CPU + {DE4E8371-7933-4D96-9023-36F5D2DDFC56}.Release|x86.Build.0 = Release|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Debug|Any CPU.Build.0 = Debug|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Debug|x64.ActiveCfg = Debug|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Debug|x64.Build.0 = Debug|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Debug|x86.ActiveCfg = Debug|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Debug|x86.Build.0 = Debug|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Release|Any CPU.ActiveCfg = Release|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Release|Any CPU.Build.0 = Release|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Release|x64.ActiveCfg = Release|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Release|x64.Build.0 = Release|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Release|x86.ActiveCfg = Release|Any CPU + {08428B42-D650-430E-9E51-8A3B18B4C984}.Release|x86.Build.0 = Release|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Debug|Any CPU.Build.0 = Debug|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Debug|x64.ActiveCfg = Debug|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Debug|x64.Build.0 = Debug|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Debug|x86.ActiveCfg = Debug|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Debug|x86.Build.0 = Debug|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Release|Any CPU.ActiveCfg = Release|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Release|Any CPU.Build.0 = Release|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Release|x64.ActiveCfg = Release|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Release|x64.Build.0 = Release|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Release|x86.ActiveCfg = Release|Any CPU + {84451047-1B04-42D1-9C02-762564CC2B40}.Release|x86.Build.0 = Release|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Debug|x64.ActiveCfg = Debug|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Debug|x64.Build.0 = Debug|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Debug|x86.ActiveCfg = Debug|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Debug|x86.Build.0 = Debug|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Release|Any CPU.Build.0 = Release|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Release|x64.ActiveCfg = Release|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Release|x64.Build.0 = Release|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Release|x86.ActiveCfg = Release|Any CPU + {C63A47A3-18A6-4251-95A7-392EB58D7B87}.Release|x86.Build.0 = Release|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Debug|x64.ActiveCfg = Debug|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Debug|x64.Build.0 = Debug|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Debug|x86.ActiveCfg = Debug|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Debug|x86.Build.0 = Debug|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Release|Any CPU.Build.0 = Release|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Release|x64.ActiveCfg = Release|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Release|x64.Build.0 = Release|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Release|x86.ActiveCfg = Release|Any CPU + {EDAF907C-18A1-4099-9D3B-169B38400420}.Release|x86.Build.0 = Release|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Debug|Any CPU.Build.0 = Debug|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Debug|x64.ActiveCfg = Debug|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Debug|x64.Build.0 = Debug|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Debug|x86.ActiveCfg = Debug|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Debug|x86.Build.0 = Debug|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Release|Any CPU.ActiveCfg = Release|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Release|Any CPU.Build.0 = Release|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Release|x64.ActiveCfg = Release|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Release|x64.Build.0 = Release|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Release|x86.ActiveCfg = Release|Any CPU + {66801106-E70A-4D33-8A08-A46C08902603}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {59BFF1D2-B0E6-4E17-90ED-7F02669CE4E7} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {BD147625-3614-49BB-B484-01200F28FF8B} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {046AF53B-0C95-4C2B-A608-8F17F4EEAE1C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {466C8F11-C43C-455A-AC28-5BF7AEBF04B0} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {8048E985-85DE-4B05-AB76-67C436D6516F} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {E94520D5-0D26-4869-AFFD-889D02616D9E} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {2B6CFE1E-137C-4596-8C01-7EE486F9A15E} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {B5AB2C97-AA81-4C02-B62E-DBEE2EEDB43D} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {F151D567-5A17-4E2F-8D48-348701B1DC23} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {7BD19877-3C36-4BD0-8BF7-E1A245106D1C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {894EC02C-34C9-43C8-A01B-AF3A85FAE329} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {C4F45D77-7646-440D-A153-E52DBF95731D} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {DE4E8371-7933-4D96-9023-36F5D2DDFC56} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {08428B42-D650-430E-9E51-8A3B18B4C984} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {84451047-1B04-42D1-9C02-762564CC2B40} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {C63A47A3-18A6-4251-95A7-392EB58D7B87} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {EDAF907C-18A1-4099-9D3B-169B38400420} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {66801106-E70A-4D33-8A08-A46C08902603} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Notify.Connectors.Email/AGENTS.md b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.Connectors.Email/AGENTS.md rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/AGENTS.md diff --git a/src/StellaOps.Notify.Connectors.Email/EmailChannelHealthProvider.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/EmailChannelHealthProvider.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Email/EmailChannelHealthProvider.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/EmailChannelHealthProvider.cs index 92fa6ae6..6468a4b1 100644 --- a/src/StellaOps.Notify.Connectors.Email/EmailChannelHealthProvider.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/EmailChannelHealthProvider.cs @@ -1,59 +1,59 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using StellaOps.DependencyInjection; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Connectors.Email; - -[ServiceBinding(typeof(INotifyChannelHealthProvider), ServiceLifetime.Singleton)] -public sealed class EmailChannelHealthProvider : INotifyChannelHealthProvider -{ - public NotifyChannelType ChannelType => NotifyChannelType.Email; - - public Task<ChannelHealthResult> CheckAsync(ChannelHealthContext context, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - cancellationToken.ThrowIfCancellationRequested(); - - var builder = EmailMetadataBuilder.CreateBuilder(context) - .Add("email.channel.enabled", context.Channel.Enabled ? "true" : "false") - .Add("email.validation.targetPresent", HasConfiguredTarget(context.Channel) ? "true" : "false"); - - var metadata = builder.Build(); - var status = ResolveStatus(context.Channel); - var message = status switch - { - ChannelHealthStatus.Healthy => "Email channel configuration validated.", - ChannelHealthStatus.Degraded => "Email channel is disabled; enable it to resume deliveries.", - ChannelHealthStatus.Unhealthy => "Email channel target/configuration incomplete.", - _ => "Email channel diagnostics completed." - }; - - return Task.FromResult(new ChannelHealthResult(status, message, metadata)); - } - - private static ChannelHealthStatus ResolveStatus(NotifyChannel channel) - { - if (!HasConfiguredTarget(channel)) - { - return ChannelHealthStatus.Unhealthy; - } - - if (!channel.Enabled) - { - return ChannelHealthStatus.Degraded; - } - - return ChannelHealthStatus.Healthy; - } - - private static bool HasConfiguredTarget(NotifyChannel channel) - => !string.IsNullOrWhiteSpace(channel.Config.Target) || - (channel.Config.Properties is not null && - channel.Config.Properties.TryGetValue("fromAddress", out var from) && - !string.IsNullOrWhiteSpace(from)); -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.DependencyInjection; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Connectors.Email; + +[ServiceBinding(typeof(INotifyChannelHealthProvider), ServiceLifetime.Singleton)] +public sealed class EmailChannelHealthProvider : INotifyChannelHealthProvider +{ + public NotifyChannelType ChannelType => NotifyChannelType.Email; + + public Task<ChannelHealthResult> CheckAsync(ChannelHealthContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + cancellationToken.ThrowIfCancellationRequested(); + + var builder = EmailMetadataBuilder.CreateBuilder(context) + .Add("email.channel.enabled", context.Channel.Enabled ? "true" : "false") + .Add("email.validation.targetPresent", HasConfiguredTarget(context.Channel) ? "true" : "false"); + + var metadata = builder.Build(); + var status = ResolveStatus(context.Channel); + var message = status switch + { + ChannelHealthStatus.Healthy => "Email channel configuration validated.", + ChannelHealthStatus.Degraded => "Email channel is disabled; enable it to resume deliveries.", + ChannelHealthStatus.Unhealthy => "Email channel target/configuration incomplete.", + _ => "Email channel diagnostics completed." + }; + + return Task.FromResult(new ChannelHealthResult(status, message, metadata)); + } + + private static ChannelHealthStatus ResolveStatus(NotifyChannel channel) + { + if (!HasConfiguredTarget(channel)) + { + return ChannelHealthStatus.Unhealthy; + } + + if (!channel.Enabled) + { + return ChannelHealthStatus.Degraded; + } + + return ChannelHealthStatus.Healthy; + } + + private static bool HasConfiguredTarget(NotifyChannel channel) + => !string.IsNullOrWhiteSpace(channel.Config.Target) || + (channel.Config.Properties is not null && + channel.Config.Properties.TryGetValue("fromAddress", out var from) && + !string.IsNullOrWhiteSpace(from)); +} diff --git a/src/StellaOps.Notify.Connectors.Email/EmailChannelTestProvider.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/EmailChannelTestProvider.cs similarity index 100% rename from src/StellaOps.Notify.Connectors.Email/EmailChannelTestProvider.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/EmailChannelTestProvider.cs diff --git a/src/StellaOps.Notify.Connectors.Email/EmailMetadataBuilder.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/EmailMetadataBuilder.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Email/EmailMetadataBuilder.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/EmailMetadataBuilder.cs index 8db44a9b..e096c205 100644 --- a/src/StellaOps.Notify.Connectors.Email/EmailMetadataBuilder.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/EmailMetadataBuilder.cs @@ -1,54 +1,54 @@ -using System; -using System.Collections.Generic; -using StellaOps.Notify.Connectors.Shared; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Connectors.Email; - -/// <summary> -/// Builds metadata for Email previews and health diagnostics with redacted secrets. -/// </summary> -internal static class EmailMetadataBuilder -{ - private const int SecretHashLengthBytes = 8; - - public static ConnectorMetadataBuilder CreateBuilder(ChannelTestPreviewContext context) - => CreateBaseBuilder( - channel: context.Channel, - target: context.Target, - timestamp: context.Timestamp, - properties: context.Channel.Config.Properties, - secretRef: context.Channel.Config.SecretRef); - - public static ConnectorMetadataBuilder CreateBuilder(ChannelHealthContext context) - => CreateBaseBuilder( - channel: context.Channel, - target: context.Target, - timestamp: context.Timestamp, - properties: context.Channel.Config.Properties, - secretRef: context.Channel.Config.SecretRef); - - public static IReadOnlyDictionary<string, string> Build(ChannelTestPreviewContext context) - => CreateBuilder(context).Build(); - - public static IReadOnlyDictionary<string, string> Build(ChannelHealthContext context) - => CreateBuilder(context).Build(); - - private static ConnectorMetadataBuilder CreateBaseBuilder( - NotifyChannel channel, - string target, - DateTimeOffset timestamp, - IReadOnlyDictionary<string, string>? properties, - string secretRef) - { - var builder = new ConnectorMetadataBuilder(); - - builder.AddTarget("email.target", target) - .AddTimestamp("email.preview.generatedAt", timestamp) - .AddSecretRefHash("email.secretRef.hash", secretRef, SecretHashLengthBytes) - .AddConfigProperties("email.config.", properties); - - return builder; - } -} +using System; +using System.Collections.Generic; +using StellaOps.Notify.Connectors.Shared; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Connectors.Email; + +/// <summary> +/// Builds metadata for Email previews and health diagnostics with redacted secrets. +/// </summary> +internal static class EmailMetadataBuilder +{ + private const int SecretHashLengthBytes = 8; + + public static ConnectorMetadataBuilder CreateBuilder(ChannelTestPreviewContext context) + => CreateBaseBuilder( + channel: context.Channel, + target: context.Target, + timestamp: context.Timestamp, + properties: context.Channel.Config.Properties, + secretRef: context.Channel.Config.SecretRef); + + public static ConnectorMetadataBuilder CreateBuilder(ChannelHealthContext context) + => CreateBaseBuilder( + channel: context.Channel, + target: context.Target, + timestamp: context.Timestamp, + properties: context.Channel.Config.Properties, + secretRef: context.Channel.Config.SecretRef); + + public static IReadOnlyDictionary<string, string> Build(ChannelTestPreviewContext context) + => CreateBuilder(context).Build(); + + public static IReadOnlyDictionary<string, string> Build(ChannelHealthContext context) + => CreateBuilder(context).Build(); + + private static ConnectorMetadataBuilder CreateBaseBuilder( + NotifyChannel channel, + string target, + DateTimeOffset timestamp, + IReadOnlyDictionary<string, string>? properties, + string secretRef) + { + var builder = new ConnectorMetadataBuilder(); + + builder.AddTarget("email.target", target) + .AddTimestamp("email.preview.generatedAt", timestamp) + .AddSecretRefHash("email.secretRef.hash", secretRef, SecretHashLengthBytes) + .AddConfigProperties("email.config.", properties); + + return builder; + } +} diff --git a/src/StellaOps.Notify.Connectors.Email/StellaOps.Notify.Connectors.Email.csproj b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/StellaOps.Notify.Connectors.Email.csproj similarity index 79% rename from src/StellaOps.Notify.Connectors.Email/StellaOps.Notify.Connectors.Email.csproj rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/StellaOps.Notify.Connectors.Email.csproj index 37a32ab8..0a2db805 100644 --- a/src/StellaOps.Notify.Connectors.Email/StellaOps.Notify.Connectors.Email.csproj +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/StellaOps.Notify.Connectors.Email.csproj @@ -1,12 +1,13 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Connectors.Shared\StellaOps.Notify.Connectors.Shared.csproj" /> @@ -17,4 +18,4 @@ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> </None> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md new file mode 100644 index 00000000..3115b124 --- /dev/null +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/TASKS.md @@ -0,0 +1,2 @@ +# Notify Email Connector Task Board (Sprint 15) +> Archived 2025-10-26 — connector maintained under `src/Notifier/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Connectors.Email/notify-plugin.json b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/notify-plugin.json similarity index 95% rename from src/StellaOps.Notify.Connectors.Email/notify-plugin.json rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/notify-plugin.json index 56407f5f..097ec83d 100644 --- a/src/StellaOps.Notify.Connectors.Email/notify-plugin.json +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Email/notify-plugin.json @@ -1,18 +1,18 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.notify.connector.email", - "displayName": "StellaOps Email Notify Connector", - "version": "0.1.0-alpha", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Notify.Connectors.Email.dll" - }, - "capabilities": [ - "notify-connector", - "email" - ], - "metadata": { - "org.stellaops.notify.channel.type": "email" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.notify.connector.email", + "displayName": "StellaOps Email Notify Connector", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Notify.Connectors.Email.dll" + }, + "capabilities": [ + "notify-connector", + "email" + ], + "metadata": { + "org.stellaops.notify.channel.type": "email" + } +} diff --git a/src/StellaOps.Notify.Connectors.Shared/ConnectorHashing.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/ConnectorHashing.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Shared/ConnectorHashing.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/ConnectorHashing.cs index acc7ab0e..63dcae37 100644 --- a/src/StellaOps.Notify.Connectors.Shared/ConnectorHashing.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/ConnectorHashing.cs @@ -1,31 +1,31 @@ -using System; -using System.Security.Cryptography; -using System.Text; - -namespace StellaOps.Notify.Connectors.Shared; - -/// <summary> -/// Common hashing helpers for Notify connector metadata. -/// </summary> -public static class ConnectorHashing -{ - /// <summary> - /// Computes a lowercase hex SHA-256 hash and truncates it to the requested number of bytes. - /// </summary> - public static string ComputeSha256Hash(string value, int lengthBytes = 8) - { - if (string.IsNullOrWhiteSpace(value)) - { - throw new ArgumentException("Value must not be null or whitespace.", nameof(value)); - } - - if (lengthBytes <= 0 || lengthBytes > 32) - { - throw new ArgumentOutOfRangeException(nameof(lengthBytes), "Length must be between 1 and 32 bytes."); - } - - var bytes = Encoding.UTF8.GetBytes(value.Trim()); - var hash = SHA256.HashData(bytes); - return Convert.ToHexString(hash.AsSpan(0, lengthBytes)).ToLowerInvariant(); - } -} +using System; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.Notify.Connectors.Shared; + +/// <summary> +/// Common hashing helpers for Notify connector metadata. +/// </summary> +public static class ConnectorHashing +{ + /// <summary> + /// Computes a lowercase hex SHA-256 hash and truncates it to the requested number of bytes. + /// </summary> + public static string ComputeSha256Hash(string value, int lengthBytes = 8) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ArgumentException("Value must not be null or whitespace.", nameof(value)); + } + + if (lengthBytes <= 0 || lengthBytes > 32) + { + throw new ArgumentOutOfRangeException(nameof(lengthBytes), "Length must be between 1 and 32 bytes."); + } + + var bytes = Encoding.UTF8.GetBytes(value.Trim()); + var hash = SHA256.HashData(bytes); + return Convert.ToHexString(hash.AsSpan(0, lengthBytes)).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Notify.Connectors.Shared/ConnectorMetadataBuilder.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/ConnectorMetadataBuilder.cs similarity index 96% rename from src/StellaOps.Notify.Connectors.Shared/ConnectorMetadataBuilder.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/ConnectorMetadataBuilder.cs index 829a9045..8569e1dc 100644 --- a/src/StellaOps.Notify.Connectors.Shared/ConnectorMetadataBuilder.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/ConnectorMetadataBuilder.cs @@ -1,147 +1,147 @@ -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Globalization; - -namespace StellaOps.Notify.Connectors.Shared; - -/// <summary> -/// Utility for constructing connector metadata payloads with consistent redaction rules. -/// </summary> -public sealed class ConnectorMetadataBuilder -{ - private readonly Dictionary<string, string> _metadata; - - public ConnectorMetadataBuilder(StringComparer? comparer = null) - { - _metadata = new Dictionary<string, string>(comparer ?? StringComparer.Ordinal); - SensitiveFragments = new HashSet<string>(ConnectorValueRedactor.DefaultSensitiveKeyFragments, StringComparer.OrdinalIgnoreCase); - } - - /// <summary> - /// Collection of key fragments treated as sensitive when redacting values. - /// </summary> - public ISet<string> SensitiveFragments { get; } - - /// <summary> - /// Adds or replaces a metadata entry when the value is non-empty. - /// </summary> - public ConnectorMetadataBuilder Add(string key, string? value) - { - if (string.IsNullOrWhiteSpace(key) || string.IsNullOrWhiteSpace(value)) - { - return this; - } - - _metadata[key.Trim()] = value.Trim(); - return this; - } - - /// <summary> - /// Adds the target value metadata. The value is trimmed but not redacted. - /// </summary> - public ConnectorMetadataBuilder AddTarget(string key, string target) - => Add(key, target); - - /// <summary> - /// Adds ISO-8601 timestamp metadata. - /// </summary> - public ConnectorMetadataBuilder AddTimestamp(string key, DateTimeOffset timestamp) - => Add(key, timestamp.ToString("O", CultureInfo.InvariantCulture)); - - /// <summary> - /// Adds a hash of the secret reference when present. - /// </summary> - public ConnectorMetadataBuilder AddSecretRefHash(string key, string? secretRef, int lengthBytes = 8) - { - if (!string.IsNullOrWhiteSpace(secretRef)) - { - Add(key, ConnectorHashing.ComputeSha256Hash(secretRef, lengthBytes)); - } - - return this; - } - - /// <summary> - /// Adds configuration target metadata only when the stored configuration differs from the resolved target. - /// </summary> - public ConnectorMetadataBuilder AddConfigTarget(string key, string? configuredTarget, string resolvedTarget) - { - if (!string.IsNullOrWhiteSpace(configuredTarget) && - !string.Equals(configuredTarget, resolvedTarget, StringComparison.Ordinal)) - { - Add(key, configuredTarget); - } - - return this; - } - - /// <summary> - /// Adds configuration endpoint metadata when present. - /// </summary> - public ConnectorMetadataBuilder AddConfigEndpoint(string key, string? endpoint) - => Add(key, endpoint); - - /// <summary> - /// Adds key/value metadata pairs from the provided dictionary, applying redaction to sensitive entries. - /// </summary> - public ConnectorMetadataBuilder AddConfigProperties( - string prefix, - IReadOnlyDictionary<string, string>? properties, - Func<string, string, string>? valueSelector = null) - { - if (properties is null || properties.Count == 0) - { - return this; - } - - foreach (var pair in properties) - { - if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) - { - continue; - } - - var key = prefix + pair.Key.Trim(); - var value = valueSelector is null - ? Redact(pair.Key, pair.Value) - : valueSelector(pair.Key, pair.Value); - - Add(key, value); - } - - return this; - } - - /// <summary> - /// Merges additional metadata entries into the builder. - /// </summary> - public ConnectorMetadataBuilder AddRange(IEnumerable<KeyValuePair<string, string>> entries) - { - foreach (var (key, value) in entries) - { - Add(key, value); - } - - return this; - } - - /// <summary> - /// Returns the redacted representation for the supplied key/value pair. - /// </summary> - public string Redact(string key, string value) - { - if (ConnectorValueRedactor.IsSensitiveKey(key, SensitiveFragments)) - { - return ConnectorValueRedactor.RedactSecret(value); - } - - return value.Trim(); - } - - /// <summary> - /// Builds an immutable view of the accumulated metadata. - /// </summary> - public IReadOnlyDictionary<string, string> Build() - => new ReadOnlyDictionary<string, string>(_metadata); -} +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Globalization; + +namespace StellaOps.Notify.Connectors.Shared; + +/// <summary> +/// Utility for constructing connector metadata payloads with consistent redaction rules. +/// </summary> +public sealed class ConnectorMetadataBuilder +{ + private readonly Dictionary<string, string> _metadata; + + public ConnectorMetadataBuilder(StringComparer? comparer = null) + { + _metadata = new Dictionary<string, string>(comparer ?? StringComparer.Ordinal); + SensitiveFragments = new HashSet<string>(ConnectorValueRedactor.DefaultSensitiveKeyFragments, StringComparer.OrdinalIgnoreCase); + } + + /// <summary> + /// Collection of key fragments treated as sensitive when redacting values. + /// </summary> + public ISet<string> SensitiveFragments { get; } + + /// <summary> + /// Adds or replaces a metadata entry when the value is non-empty. + /// </summary> + public ConnectorMetadataBuilder Add(string key, string? value) + { + if (string.IsNullOrWhiteSpace(key) || string.IsNullOrWhiteSpace(value)) + { + return this; + } + + _metadata[key.Trim()] = value.Trim(); + return this; + } + + /// <summary> + /// Adds the target value metadata. The value is trimmed but not redacted. + /// </summary> + public ConnectorMetadataBuilder AddTarget(string key, string target) + => Add(key, target); + + /// <summary> + /// Adds ISO-8601 timestamp metadata. + /// </summary> + public ConnectorMetadataBuilder AddTimestamp(string key, DateTimeOffset timestamp) + => Add(key, timestamp.ToString("O", CultureInfo.InvariantCulture)); + + /// <summary> + /// Adds a hash of the secret reference when present. + /// </summary> + public ConnectorMetadataBuilder AddSecretRefHash(string key, string? secretRef, int lengthBytes = 8) + { + if (!string.IsNullOrWhiteSpace(secretRef)) + { + Add(key, ConnectorHashing.ComputeSha256Hash(secretRef, lengthBytes)); + } + + return this; + } + + /// <summary> + /// Adds configuration target metadata only when the stored configuration differs from the resolved target. + /// </summary> + public ConnectorMetadataBuilder AddConfigTarget(string key, string? configuredTarget, string resolvedTarget) + { + if (!string.IsNullOrWhiteSpace(configuredTarget) && + !string.Equals(configuredTarget, resolvedTarget, StringComparison.Ordinal)) + { + Add(key, configuredTarget); + } + + return this; + } + + /// <summary> + /// Adds configuration endpoint metadata when present. + /// </summary> + public ConnectorMetadataBuilder AddConfigEndpoint(string key, string? endpoint) + => Add(key, endpoint); + + /// <summary> + /// Adds key/value metadata pairs from the provided dictionary, applying redaction to sensitive entries. + /// </summary> + public ConnectorMetadataBuilder AddConfigProperties( + string prefix, + IReadOnlyDictionary<string, string>? properties, + Func<string, string, string>? valueSelector = null) + { + if (properties is null || properties.Count == 0) + { + return this; + } + + foreach (var pair in properties) + { + if (string.IsNullOrWhiteSpace(pair.Key) || pair.Value is null) + { + continue; + } + + var key = prefix + pair.Key.Trim(); + var value = valueSelector is null + ? Redact(pair.Key, pair.Value) + : valueSelector(pair.Key, pair.Value); + + Add(key, value); + } + + return this; + } + + /// <summary> + /// Merges additional metadata entries into the builder. + /// </summary> + public ConnectorMetadataBuilder AddRange(IEnumerable<KeyValuePair<string, string>> entries) + { + foreach (var (key, value) in entries) + { + Add(key, value); + } + + return this; + } + + /// <summary> + /// Returns the redacted representation for the supplied key/value pair. + /// </summary> + public string Redact(string key, string value) + { + if (ConnectorValueRedactor.IsSensitiveKey(key, SensitiveFragments)) + { + return ConnectorValueRedactor.RedactSecret(value); + } + + return value.Trim(); + } + + /// <summary> + /// Builds an immutable view of the accumulated metadata. + /// </summary> + public IReadOnlyDictionary<string, string> Build() + => new ReadOnlyDictionary<string, string>(_metadata); +} diff --git a/src/StellaOps.Notify.Connectors.Shared/ConnectorValueRedactor.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/ConnectorValueRedactor.cs similarity index 96% rename from src/StellaOps.Notify.Connectors.Shared/ConnectorValueRedactor.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/ConnectorValueRedactor.cs index 1c743b58..65f7339f 100644 --- a/src/StellaOps.Notify.Connectors.Shared/ConnectorValueRedactor.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/ConnectorValueRedactor.cs @@ -1,75 +1,75 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Notify.Connectors.Shared; - -/// <summary> -/// Shared helpers for redacting sensitive connector metadata. -/// </summary> -public static class ConnectorValueRedactor -{ - private static readonly string[] DefaultSensitiveFragments = - { - "token", - "secret", - "authorization", - "cookie", - "password", - "key", - "credential" - }; - - /// <summary> - /// Gets the default set of sensitive key fragments. - /// </summary> - public static IReadOnlyCollection<string> DefaultSensitiveKeyFragments => DefaultSensitiveFragments; - - /// <summary> - /// Uses a constant mask for sensitive values. - /// </summary> - public static string RedactSecret(string value) => "***"; - - /// <summary> - /// Redacts the middle portion of a token while keeping stable prefix/suffix bytes. - /// </summary> - public static string RedactToken(string value, int prefixLength = 6, int suffixLength = 4) - { - var trimmed = value?.Trim() ?? string.Empty; - if (trimmed.Length <= prefixLength + suffixLength) - { - return RedactSecret(trimmed); - } - - var prefix = trimmed[..prefixLength]; - var suffix = trimmed[^suffixLength..]; - return string.Concat(prefix, "***", suffix); - } - - /// <summary> - /// Returns true when the provided key appears to represent sensitive data. - /// </summary> - public static bool IsSensitiveKey(string key, IEnumerable<string>? fragments = null) - { - if (string.IsNullOrWhiteSpace(key)) - { - return false; - } - - fragments ??= DefaultSensitiveFragments; - var span = key.AsSpan(); - foreach (var fragment in fragments) - { - if (string.IsNullOrWhiteSpace(fragment)) - { - continue; - } - - if (span.IndexOf(fragment.AsSpan(), StringComparison.OrdinalIgnoreCase) >= 0) - { - return true; - } - } - - return false; - } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Notify.Connectors.Shared; + +/// <summary> +/// Shared helpers for redacting sensitive connector metadata. +/// </summary> +public static class ConnectorValueRedactor +{ + private static readonly string[] DefaultSensitiveFragments = + { + "token", + "secret", + "authorization", + "cookie", + "password", + "key", + "credential" + }; + + /// <summary> + /// Gets the default set of sensitive key fragments. + /// </summary> + public static IReadOnlyCollection<string> DefaultSensitiveKeyFragments => DefaultSensitiveFragments; + + /// <summary> + /// Uses a constant mask for sensitive values. + /// </summary> + public static string RedactSecret(string value) => "***"; + + /// <summary> + /// Redacts the middle portion of a token while keeping stable prefix/suffix bytes. + /// </summary> + public static string RedactToken(string value, int prefixLength = 6, int suffixLength = 4) + { + var trimmed = value?.Trim() ?? string.Empty; + if (trimmed.Length <= prefixLength + suffixLength) + { + return RedactSecret(trimmed); + } + + var prefix = trimmed[..prefixLength]; + var suffix = trimmed[^suffixLength..]; + return string.Concat(prefix, "***", suffix); + } + + /// <summary> + /// Returns true when the provided key appears to represent sensitive data. + /// </summary> + public static bool IsSensitiveKey(string key, IEnumerable<string>? fragments = null) + { + if (string.IsNullOrWhiteSpace(key)) + { + return false; + } + + fragments ??= DefaultSensitiveFragments; + var span = key.AsSpan(); + foreach (var fragment in fragments) + { + if (string.IsNullOrWhiteSpace(fragment)) + { + continue; + } + + if (span.IndexOf(fragment.AsSpan(), StringComparison.OrdinalIgnoreCase) >= 0) + { + return true; + } + } + + return false; + } +} diff --git a/src/StellaOps.Notify.Connectors.Shared/StellaOps.Notify.Connectors.Shared.csproj b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/StellaOps.Notify.Connectors.Shared.csproj similarity index 97% rename from src/StellaOps.Notify.Connectors.Shared/StellaOps.Notify.Connectors.Shared.csproj rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/StellaOps.Notify.Connectors.Shared.csproj index 9fe568c0..0efe4fec 100644 --- a/src/StellaOps.Notify.Connectors.Shared/StellaOps.Notify.Connectors.Shared.csproj +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Shared/StellaOps.Notify.Connectors.Shared.csproj @@ -1,12 +1,12 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="..\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj" /> + <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Notify.Connectors.Slack/AGENTS.md b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.Connectors.Slack/AGENTS.md rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/AGENTS.md diff --git a/src/StellaOps.Notify.Connectors.Slack/SlackChannelHealthProvider.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/SlackChannelHealthProvider.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Slack/SlackChannelHealthProvider.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/SlackChannelHealthProvider.cs index b2dbdcbd..77808479 100644 --- a/src/StellaOps.Notify.Connectors.Slack/SlackChannelHealthProvider.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/SlackChannelHealthProvider.cs @@ -1,56 +1,56 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using StellaOps.DependencyInjection; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Connectors.Slack; - -[ServiceBinding(typeof(INotifyChannelHealthProvider), ServiceLifetime.Singleton)] -public sealed class SlackChannelHealthProvider : INotifyChannelHealthProvider -{ - public NotifyChannelType ChannelType => NotifyChannelType.Slack; - - public Task<ChannelHealthResult> CheckAsync(ChannelHealthContext context, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - cancellationToken.ThrowIfCancellationRequested(); - - var builder = SlackMetadataBuilder.CreateBuilder(context) - .Add("slack.channel.enabled", context.Channel.Enabled ? "true" : "false") - .Add("slack.validation.targetPresent", HasConfiguredTarget(context.Channel) ? "true" : "false"); - - var metadata = builder.Build(); - var status = ResolveStatus(context.Channel); - var message = status switch - { - ChannelHealthStatus.Healthy => "Slack channel configuration validated.", - ChannelHealthStatus.Degraded => "Slack channel is disabled; enable it to resume deliveries.", - ChannelHealthStatus.Unhealthy => "Slack channel is missing a configured destination (target).", - _ => "Slack channel diagnostics completed." - }; - - return Task.FromResult(new ChannelHealthResult(status, message, metadata)); - } - - private static ChannelHealthStatus ResolveStatus(NotifyChannel channel) - { - if (!HasConfiguredTarget(channel)) - { - return ChannelHealthStatus.Unhealthy; - } - - if (!channel.Enabled) - { - return ChannelHealthStatus.Degraded; - } - - return ChannelHealthStatus.Healthy; - } - - private static bool HasConfiguredTarget(NotifyChannel channel) - => !string.IsNullOrWhiteSpace(channel.Config.Target); -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.DependencyInjection; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Connectors.Slack; + +[ServiceBinding(typeof(INotifyChannelHealthProvider), ServiceLifetime.Singleton)] +public sealed class SlackChannelHealthProvider : INotifyChannelHealthProvider +{ + public NotifyChannelType ChannelType => NotifyChannelType.Slack; + + public Task<ChannelHealthResult> CheckAsync(ChannelHealthContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + cancellationToken.ThrowIfCancellationRequested(); + + var builder = SlackMetadataBuilder.CreateBuilder(context) + .Add("slack.channel.enabled", context.Channel.Enabled ? "true" : "false") + .Add("slack.validation.targetPresent", HasConfiguredTarget(context.Channel) ? "true" : "false"); + + var metadata = builder.Build(); + var status = ResolveStatus(context.Channel); + var message = status switch + { + ChannelHealthStatus.Healthy => "Slack channel configuration validated.", + ChannelHealthStatus.Degraded => "Slack channel is disabled; enable it to resume deliveries.", + ChannelHealthStatus.Unhealthy => "Slack channel is missing a configured destination (target).", + _ => "Slack channel diagnostics completed." + }; + + return Task.FromResult(new ChannelHealthResult(status, message, metadata)); + } + + private static ChannelHealthStatus ResolveStatus(NotifyChannel channel) + { + if (!HasConfiguredTarget(channel)) + { + return ChannelHealthStatus.Unhealthy; + } + + if (!channel.Enabled) + { + return ChannelHealthStatus.Degraded; + } + + return ChannelHealthStatus.Healthy; + } + + private static bool HasConfiguredTarget(NotifyChannel channel) + => !string.IsNullOrWhiteSpace(channel.Config.Target); +} diff --git a/src/StellaOps.Notify.Connectors.Slack/SlackChannelTestProvider.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/SlackChannelTestProvider.cs similarity index 100% rename from src/StellaOps.Notify.Connectors.Slack/SlackChannelTestProvider.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/SlackChannelTestProvider.cs diff --git a/src/StellaOps.Notify.Connectors.Slack/SlackMetadataBuilder.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/SlackMetadataBuilder.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Slack/SlackMetadataBuilder.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/SlackMetadataBuilder.cs index b8a62d3a..ab064c87 100644 --- a/src/StellaOps.Notify.Connectors.Slack/SlackMetadataBuilder.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/SlackMetadataBuilder.cs @@ -1,77 +1,77 @@ -using System; -using System.Collections.Generic; -using StellaOps.Notify.Connectors.Shared; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Connectors.Slack; - -/// <summary> -/// Builds metadata for Slack previews and health diagnostics while redacting sensitive material. -/// </summary> -internal static class SlackMetadataBuilder -{ - private static readonly string[] RequiredScopes = { "chat:write", "chat:write.public" }; - - public static ConnectorMetadataBuilder CreateBuilder(ChannelTestPreviewContext context) - => CreateBaseBuilder( - channel: context.Channel, - target: context.Target, - timestamp: context.Timestamp, - properties: context.Channel.Config.Properties, - secretRef: context.Channel.Config.SecretRef); - - public static ConnectorMetadataBuilder CreateBuilder(ChannelHealthContext context) - => CreateBaseBuilder( - channel: context.Channel, - target: context.Target, - timestamp: context.Timestamp, - properties: context.Channel.Config.Properties, - secretRef: context.Channel.Config.SecretRef); - - public static IReadOnlyDictionary<string, string> Build(ChannelTestPreviewContext context) - => CreateBuilder(context).Build(); - - public static IReadOnlyDictionary<string, string> Build(ChannelHealthContext context) - => CreateBuilder(context).Build(); - - private static ConnectorMetadataBuilder CreateBaseBuilder( - NotifyChannel channel, - string target, - DateTimeOffset timestamp, - IReadOnlyDictionary<string, string>? properties, - string secretRef) - { - var builder = new ConnectorMetadataBuilder(); - - builder.AddTarget("slack.channel", target) - .Add("slack.scopes.required", string.Join(',', RequiredScopes)) - .AddTimestamp("slack.preview.generatedAt", timestamp) - .AddSecretRefHash("slack.secretRef.hash", secretRef) - .AddConfigTarget("slack.config.target", channel.Config.Target, target) - .AddConfigProperties("slack.config.", properties, (key, value) => RedactSlackValue(builder, key, value)); - - return builder; - } - - private static string RedactSlackValue(ConnectorMetadataBuilder builder, string key, string value) - { - if (LooksLikeSlackToken(value)) - { - return ConnectorValueRedactor.RedactToken(value); - } - - return builder.Redact(key, value); - } - - private static bool LooksLikeSlackToken(string value) - { - var trimmed = value.Trim(); - if (trimmed.Length < 6) - { - return false; - } - - return trimmed.StartsWith("xox", StringComparison.OrdinalIgnoreCase); - } -} +using System; +using System.Collections.Generic; +using StellaOps.Notify.Connectors.Shared; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Connectors.Slack; + +/// <summary> +/// Builds metadata for Slack previews and health diagnostics while redacting sensitive material. +/// </summary> +internal static class SlackMetadataBuilder +{ + private static readonly string[] RequiredScopes = { "chat:write", "chat:write.public" }; + + public static ConnectorMetadataBuilder CreateBuilder(ChannelTestPreviewContext context) + => CreateBaseBuilder( + channel: context.Channel, + target: context.Target, + timestamp: context.Timestamp, + properties: context.Channel.Config.Properties, + secretRef: context.Channel.Config.SecretRef); + + public static ConnectorMetadataBuilder CreateBuilder(ChannelHealthContext context) + => CreateBaseBuilder( + channel: context.Channel, + target: context.Target, + timestamp: context.Timestamp, + properties: context.Channel.Config.Properties, + secretRef: context.Channel.Config.SecretRef); + + public static IReadOnlyDictionary<string, string> Build(ChannelTestPreviewContext context) + => CreateBuilder(context).Build(); + + public static IReadOnlyDictionary<string, string> Build(ChannelHealthContext context) + => CreateBuilder(context).Build(); + + private static ConnectorMetadataBuilder CreateBaseBuilder( + NotifyChannel channel, + string target, + DateTimeOffset timestamp, + IReadOnlyDictionary<string, string>? properties, + string secretRef) + { + var builder = new ConnectorMetadataBuilder(); + + builder.AddTarget("slack.channel", target) + .Add("slack.scopes.required", string.Join(',', RequiredScopes)) + .AddTimestamp("slack.preview.generatedAt", timestamp) + .AddSecretRefHash("slack.secretRef.hash", secretRef) + .AddConfigTarget("slack.config.target", channel.Config.Target, target) + .AddConfigProperties("slack.config.", properties, (key, value) => RedactSlackValue(builder, key, value)); + + return builder; + } + + private static string RedactSlackValue(ConnectorMetadataBuilder builder, string key, string value) + { + if (LooksLikeSlackToken(value)) + { + return ConnectorValueRedactor.RedactToken(value); + } + + return builder.Redact(key, value); + } + + private static bool LooksLikeSlackToken(string value) + { + var trimmed = value.Trim(); + if (trimmed.Length < 6) + { + return false; + } + + return trimmed.StartsWith("xox", StringComparison.OrdinalIgnoreCase); + } +} diff --git a/src/StellaOps.Notify.Connectors.Slack/StellaOps.Notify.Connectors.Slack.csproj b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/StellaOps.Notify.Connectors.Slack.csproj similarity index 79% rename from src/StellaOps.Notify.Connectors.Slack/StellaOps.Notify.Connectors.Slack.csproj rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/StellaOps.Notify.Connectors.Slack.csproj index 37a32ab8..0a2db805 100644 --- a/src/StellaOps.Notify.Connectors.Slack/StellaOps.Notify.Connectors.Slack.csproj +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/StellaOps.Notify.Connectors.Slack.csproj @@ -1,12 +1,13 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Connectors.Shared\StellaOps.Notify.Connectors.Shared.csproj" /> @@ -17,4 +18,4 @@ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> </None> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md new file mode 100644 index 00000000..d3d21a2b --- /dev/null +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/TASKS.md @@ -0,0 +1,2 @@ +# Notify Slack Connector Task Board (Sprint 15) +> Archived 2025-10-26 — connector scope now in `src/Notifier/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Connectors.Slack/notify-plugin.json b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/notify-plugin.json similarity index 96% rename from src/StellaOps.Notify.Connectors.Slack/notify-plugin.json rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/notify-plugin.json index 95fb1dfb..1d7efde1 100644 --- a/src/StellaOps.Notify.Connectors.Slack/notify-plugin.json +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Slack/notify-plugin.json @@ -1,19 +1,19 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.notify.connector.slack", - "displayName": "StellaOps Slack Notify Connector", - "version": "0.1.0-alpha", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Notify.Connectors.Slack.dll" - }, - "capabilities": [ - "notify-connector", - "slack" - ], - "metadata": { - "org.stellaops.notify.channel.type": "slack", - "org.stellaops.notify.connector.requiredScopes": "chat:write,chat:write.public" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.notify.connector.slack", + "displayName": "StellaOps Slack Notify Connector", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Notify.Connectors.Slack.dll" + }, + "capabilities": [ + "notify-connector", + "slack" + ], + "metadata": { + "org.stellaops.notify.channel.type": "slack", + "org.stellaops.notify.connector.requiredScopes": "chat:write,chat:write.public" + } +} diff --git a/src/StellaOps.Notify.Connectors.Teams/AGENTS.md b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.Connectors.Teams/AGENTS.md rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/AGENTS.md diff --git a/src/StellaOps.Notify.Connectors.Teams/StellaOps.Notify.Connectors.Teams.csproj b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/StellaOps.Notify.Connectors.Teams.csproj similarity index 79% rename from src/StellaOps.Notify.Connectors.Teams/StellaOps.Notify.Connectors.Teams.csproj rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/StellaOps.Notify.Connectors.Teams.csproj index 37a32ab8..0a2db805 100644 --- a/src/StellaOps.Notify.Connectors.Teams/StellaOps.Notify.Connectors.Teams.csproj +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/StellaOps.Notify.Connectors.Teams.csproj @@ -1,12 +1,13 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Connectors.Shared\StellaOps.Notify.Connectors.Shared.csproj" /> @@ -17,4 +18,4 @@ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> </None> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notify.Connectors.Teams/TASKS.md b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md similarity index 79% rename from src/StellaOps.Notify.Connectors.Teams/TASKS.md rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md index 02bc896e..3f65c71c 100644 --- a/src/StellaOps.Notify.Connectors.Teams/TASKS.md +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TASKS.md @@ -1,4 +1,4 @@ # Notify Teams Connector Task Board (Sprint 15) -> Archived 2025-10-26 — connector work now owned by `src/StellaOps.Notifier` (Sprints 38–40). +> Archived 2025-10-26 — connector work now owned by `src/Notifier/StellaOps.Notifier` (Sprints 38–40). > Remark (2025-10-20): Teams test-send now emits Adaptive Card 1.5 payloads with legacy fallback text (`teams.fallbackText` metadata) and hashed webhook secret refs; coverage lives in `StellaOps.Notify.Connectors.Teams.Tests`. `/channels/{id}/health` shares the same metadata builder via `TeamsChannelHealthProvider`, ensuring webhook hashes and sensitive keys stay redacted. diff --git a/src/StellaOps.Notify.Connectors.Teams/TeamsChannelHealthProvider.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TeamsChannelHealthProvider.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Teams/TeamsChannelHealthProvider.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TeamsChannelHealthProvider.cs index 68072daa..ddef0a69 100644 --- a/src/StellaOps.Notify.Connectors.Teams/TeamsChannelHealthProvider.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TeamsChannelHealthProvider.cs @@ -1,57 +1,57 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.DependencyInjection; -using StellaOps.DependencyInjection; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Connectors.Teams; - -[ServiceBinding(typeof(INotifyChannelHealthProvider), ServiceLifetime.Singleton)] -public sealed class TeamsChannelHealthProvider : INotifyChannelHealthProvider -{ - public NotifyChannelType ChannelType => NotifyChannelType.Teams; - - public Task<ChannelHealthResult> CheckAsync(ChannelHealthContext context, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - cancellationToken.ThrowIfCancellationRequested(); - - var builder = TeamsMetadataBuilder.CreateBuilder(context) - .Add("teams.channel.enabled", context.Channel.Enabled ? "true" : "false") - .Add("teams.validation.targetPresent", HasConfiguredTarget(context.Channel) ? "true" : "false"); - - var metadata = builder.Build(); - var status = ResolveStatus(context.Channel); - var message = status switch - { - ChannelHealthStatus.Healthy => "Teams channel configuration validated.", - ChannelHealthStatus.Degraded => "Teams channel is disabled; enable it to resume deliveries.", - ChannelHealthStatus.Unhealthy => "Teams channel is missing a target/endpoint configuration.", - _ => "Teams channel diagnostics completed." - }; - - return Task.FromResult(new ChannelHealthResult(status, message, metadata)); - } - - private static ChannelHealthStatus ResolveStatus(NotifyChannel channel) - { - if (!HasConfiguredTarget(channel)) - { - return ChannelHealthStatus.Unhealthy; - } - - if (!channel.Enabled) - { - return ChannelHealthStatus.Degraded; - } - - return ChannelHealthStatus.Healthy; - } - - private static bool HasConfiguredTarget(NotifyChannel channel) - => !string.IsNullOrWhiteSpace(channel.Config.Endpoint) || - !string.IsNullOrWhiteSpace(channel.Config.Target); -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.DependencyInjection; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Connectors.Teams; + +[ServiceBinding(typeof(INotifyChannelHealthProvider), ServiceLifetime.Singleton)] +public sealed class TeamsChannelHealthProvider : INotifyChannelHealthProvider +{ + public NotifyChannelType ChannelType => NotifyChannelType.Teams; + + public Task<ChannelHealthResult> CheckAsync(ChannelHealthContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + cancellationToken.ThrowIfCancellationRequested(); + + var builder = TeamsMetadataBuilder.CreateBuilder(context) + .Add("teams.channel.enabled", context.Channel.Enabled ? "true" : "false") + .Add("teams.validation.targetPresent", HasConfiguredTarget(context.Channel) ? "true" : "false"); + + var metadata = builder.Build(); + var status = ResolveStatus(context.Channel); + var message = status switch + { + ChannelHealthStatus.Healthy => "Teams channel configuration validated.", + ChannelHealthStatus.Degraded => "Teams channel is disabled; enable it to resume deliveries.", + ChannelHealthStatus.Unhealthy => "Teams channel is missing a target/endpoint configuration.", + _ => "Teams channel diagnostics completed." + }; + + return Task.FromResult(new ChannelHealthResult(status, message, metadata)); + } + + private static ChannelHealthStatus ResolveStatus(NotifyChannel channel) + { + if (!HasConfiguredTarget(channel)) + { + return ChannelHealthStatus.Unhealthy; + } + + if (!channel.Enabled) + { + return ChannelHealthStatus.Degraded; + } + + return ChannelHealthStatus.Healthy; + } + + private static bool HasConfiguredTarget(NotifyChannel channel) + => !string.IsNullOrWhiteSpace(channel.Config.Endpoint) || + !string.IsNullOrWhiteSpace(channel.Config.Target); +} diff --git a/src/StellaOps.Notify.Connectors.Teams/TeamsChannelTestProvider.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TeamsChannelTestProvider.cs similarity index 100% rename from src/StellaOps.Notify.Connectors.Teams/TeamsChannelTestProvider.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TeamsChannelTestProvider.cs diff --git a/src/StellaOps.Notify.Connectors.Teams/TeamsMetadataBuilder.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TeamsMetadataBuilder.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Teams/TeamsMetadataBuilder.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TeamsMetadataBuilder.cs index 32dad484..f8848870 100644 --- a/src/StellaOps.Notify.Connectors.Teams/TeamsMetadataBuilder.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/TeamsMetadataBuilder.cs @@ -1,89 +1,89 @@ -using System; -using System.Collections.Generic; -using StellaOps.Notify.Connectors.Shared; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Connectors.Teams; - -/// <summary> -/// Builds metadata for Teams previews and health diagnostics while redacting sensitive material. -/// </summary> -internal static class TeamsMetadataBuilder -{ - internal const string CardVersion = "1.5"; - - private const int SecretHashLengthBytes = 8; - - public static ConnectorMetadataBuilder CreateBuilder(ChannelTestPreviewContext context, string fallbackText) - => CreateBaseBuilder( - channel: context.Channel, - target: context.Target, - timestamp: context.Timestamp, - fallbackText: fallbackText, - properties: context.Channel.Config.Properties, - secretRef: context.Channel.Config.SecretRef, - endpoint: context.Channel.Config.Endpoint); - - public static ConnectorMetadataBuilder CreateBuilder(ChannelHealthContext context) - => CreateBaseBuilder( - channel: context.Channel, - target: context.Target, - timestamp: context.Timestamp, - fallbackText: null, - properties: context.Channel.Config.Properties, - secretRef: context.Channel.Config.SecretRef, - endpoint: context.Channel.Config.Endpoint); - - public static IReadOnlyDictionary<string, string> Build(ChannelTestPreviewContext context, string fallbackText) - => CreateBuilder(context, fallbackText).Build(); - - public static IReadOnlyDictionary<string, string> Build(ChannelHealthContext context) - => CreateBuilder(context).Build(); - - private static ConnectorMetadataBuilder CreateBaseBuilder( - NotifyChannel channel, - string target, - DateTimeOffset timestamp, - string? fallbackText, - IReadOnlyDictionary<string, string>? properties, - string secretRef, - string? endpoint) - { - var builder = new ConnectorMetadataBuilder(); - - builder.AddTarget("teams.webhook", target) - .AddTimestamp("teams.preview.generatedAt", timestamp) - .Add("teams.card.version", CardVersion) - .AddSecretRefHash("teams.secretRef.hash", secretRef, SecretHashLengthBytes) - .AddConfigTarget("teams.config.target", channel.Config.Target, target) - .AddConfigEndpoint("teams.config.endpoint", endpoint) - .AddConfigProperties("teams.config.", properties, (key, value) => RedactTeamsValue(builder, key, value)); - - if (!string.IsNullOrWhiteSpace(fallbackText)) - { - builder.Add("teams.fallbackText", fallbackText!); - } - - return builder; - } - - private static string RedactTeamsValue(ConnectorMetadataBuilder builder, string key, string value) - { - if (ConnectorValueRedactor.IsSensitiveKey(key, builder.SensitiveFragments)) - { - return ConnectorValueRedactor.RedactSecret(value); - } - - var trimmed = value.Trim(); - if (LooksLikeGuid(trimmed)) - { - return ConnectorValueRedactor.RedactToken(trimmed, prefixLength: 8, suffixLength: 4); - } - - return trimmed; - } - - private static bool LooksLikeGuid(string value) - => value.Length >= 32 && Guid.TryParse(value, out _); -} +using System; +using System.Collections.Generic; +using StellaOps.Notify.Connectors.Shared; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Connectors.Teams; + +/// <summary> +/// Builds metadata for Teams previews and health diagnostics while redacting sensitive material. +/// </summary> +internal static class TeamsMetadataBuilder +{ + internal const string CardVersion = "1.5"; + + private const int SecretHashLengthBytes = 8; + + public static ConnectorMetadataBuilder CreateBuilder(ChannelTestPreviewContext context, string fallbackText) + => CreateBaseBuilder( + channel: context.Channel, + target: context.Target, + timestamp: context.Timestamp, + fallbackText: fallbackText, + properties: context.Channel.Config.Properties, + secretRef: context.Channel.Config.SecretRef, + endpoint: context.Channel.Config.Endpoint); + + public static ConnectorMetadataBuilder CreateBuilder(ChannelHealthContext context) + => CreateBaseBuilder( + channel: context.Channel, + target: context.Target, + timestamp: context.Timestamp, + fallbackText: null, + properties: context.Channel.Config.Properties, + secretRef: context.Channel.Config.SecretRef, + endpoint: context.Channel.Config.Endpoint); + + public static IReadOnlyDictionary<string, string> Build(ChannelTestPreviewContext context, string fallbackText) + => CreateBuilder(context, fallbackText).Build(); + + public static IReadOnlyDictionary<string, string> Build(ChannelHealthContext context) + => CreateBuilder(context).Build(); + + private static ConnectorMetadataBuilder CreateBaseBuilder( + NotifyChannel channel, + string target, + DateTimeOffset timestamp, + string? fallbackText, + IReadOnlyDictionary<string, string>? properties, + string secretRef, + string? endpoint) + { + var builder = new ConnectorMetadataBuilder(); + + builder.AddTarget("teams.webhook", target) + .AddTimestamp("teams.preview.generatedAt", timestamp) + .Add("teams.card.version", CardVersion) + .AddSecretRefHash("teams.secretRef.hash", secretRef, SecretHashLengthBytes) + .AddConfigTarget("teams.config.target", channel.Config.Target, target) + .AddConfigEndpoint("teams.config.endpoint", endpoint) + .AddConfigProperties("teams.config.", properties, (key, value) => RedactTeamsValue(builder, key, value)); + + if (!string.IsNullOrWhiteSpace(fallbackText)) + { + builder.Add("teams.fallbackText", fallbackText!); + } + + return builder; + } + + private static string RedactTeamsValue(ConnectorMetadataBuilder builder, string key, string value) + { + if (ConnectorValueRedactor.IsSensitiveKey(key, builder.SensitiveFragments)) + { + return ConnectorValueRedactor.RedactSecret(value); + } + + var trimmed = value.Trim(); + if (LooksLikeGuid(trimmed)) + { + return ConnectorValueRedactor.RedactToken(trimmed, prefixLength: 8, suffixLength: 4); + } + + return trimmed; + } + + private static bool LooksLikeGuid(string value) + => value.Length >= 32 && Guid.TryParse(value, out _); +} diff --git a/src/StellaOps.Notify.Connectors.Teams/notify-plugin.json b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/notify-plugin.json similarity index 96% rename from src/StellaOps.Notify.Connectors.Teams/notify-plugin.json rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/notify-plugin.json index 78239596..756b1298 100644 --- a/src/StellaOps.Notify.Connectors.Teams/notify-plugin.json +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Teams/notify-plugin.json @@ -1,19 +1,19 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.notify.connector.teams", - "displayName": "StellaOps Teams Notify Connector", - "version": "0.1.0-alpha", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Notify.Connectors.Teams.dll" - }, - "capabilities": [ - "notify-connector", - "teams" - ], - "metadata": { - "org.stellaops.notify.channel.type": "teams", - "org.stellaops.notify.connector.cardVersion": "1.5" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.notify.connector.teams", + "displayName": "StellaOps Teams Notify Connector", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Notify.Connectors.Teams.dll" + }, + "capabilities": [ + "notify-connector", + "teams" + ], + "metadata": { + "org.stellaops.notify.channel.type": "teams", + "org.stellaops.notify.connector.cardVersion": "1.5" + } +} diff --git a/src/StellaOps.Notify.Connectors.Webhook/AGENTS.md b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.Connectors.Webhook/AGENTS.md rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/AGENTS.md diff --git a/src/StellaOps.Notify.Connectors.Webhook/StellaOps.Notify.Connectors.Webhook.csproj b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/StellaOps.Notify.Connectors.Webhook.csproj similarity index 79% rename from src/StellaOps.Notify.Connectors.Webhook/StellaOps.Notify.Connectors.Webhook.csproj rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/StellaOps.Notify.Connectors.Webhook.csproj index 37a32ab8..0a2db805 100644 --- a/src/StellaOps.Notify.Connectors.Webhook/StellaOps.Notify.Connectors.Webhook.csproj +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/StellaOps.Notify.Connectors.Webhook.csproj @@ -1,12 +1,13 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> <ProjectReference Include="..\StellaOps.Notify.Connectors.Shared\StellaOps.Notify.Connectors.Shared.csproj" /> @@ -17,4 +18,4 @@ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> </None> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md new file mode 100644 index 00000000..d5b26fe8 --- /dev/null +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/TASKS.md @@ -0,0 +1,2 @@ +# Notify Webhook Connector Task Board (Sprint 15) +> Archived 2025-10-26 — webhook connector maintained in `src/Notifier/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Connectors.Webhook/WebhookChannelTestProvider.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/WebhookChannelTestProvider.cs similarity index 100% rename from src/StellaOps.Notify.Connectors.Webhook/WebhookChannelTestProvider.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/WebhookChannelTestProvider.cs diff --git a/src/StellaOps.Notify.Connectors.Webhook/WebhookMetadataBuilder.cs b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/WebhookMetadataBuilder.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Webhook/WebhookMetadataBuilder.cs rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/WebhookMetadataBuilder.cs index adcfa915..4c0c5474 100644 --- a/src/StellaOps.Notify.Connectors.Webhook/WebhookMetadataBuilder.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/WebhookMetadataBuilder.cs @@ -1,53 +1,53 @@ -using System.Collections.Generic; -using StellaOps.Notify.Connectors.Shared; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Connectors.Webhook; - -/// <summary> -/// Builds metadata for Webhook previews and health diagnostics. -/// </summary> -internal static class WebhookMetadataBuilder -{ - private const int SecretHashLengthBytes = 8; - - public static ConnectorMetadataBuilder CreateBuilder(ChannelTestPreviewContext context) - => CreateBaseBuilder( - channel: context.Channel, - target: context.Target, - timestamp: context.Timestamp, - properties: context.Channel.Config.Properties, - secretRef: context.Channel.Config.SecretRef); - - public static ConnectorMetadataBuilder CreateBuilder(ChannelHealthContext context) - => CreateBaseBuilder( - channel: context.Channel, - target: context.Target, - timestamp: context.Timestamp, - properties: context.Channel.Config.Properties, - secretRef: context.Channel.Config.SecretRef); - - public static IReadOnlyDictionary<string, string> Build(ChannelTestPreviewContext context) - => CreateBuilder(context).Build(); - - public static IReadOnlyDictionary<string, string> Build(ChannelHealthContext context) - => CreateBuilder(context).Build(); - - private static ConnectorMetadataBuilder CreateBaseBuilder( - NotifyChannel channel, - string target, - DateTimeOffset timestamp, - IReadOnlyDictionary<string, string>? properties, - string secretRef) - { - var builder = new ConnectorMetadataBuilder(); - - builder.AddTarget("webhook.endpoint", target) - .AddTimestamp("webhook.preview.generatedAt", timestamp) - .AddSecretRefHash("webhook.secretRef.hash", secretRef, SecretHashLengthBytes) - .AddConfigProperties("webhook.config.", properties); - - return builder; - } -} +using System.Collections.Generic; +using StellaOps.Notify.Connectors.Shared; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Connectors.Webhook; + +/// <summary> +/// Builds metadata for Webhook previews and health diagnostics. +/// </summary> +internal static class WebhookMetadataBuilder +{ + private const int SecretHashLengthBytes = 8; + + public static ConnectorMetadataBuilder CreateBuilder(ChannelTestPreviewContext context) + => CreateBaseBuilder( + channel: context.Channel, + target: context.Target, + timestamp: context.Timestamp, + properties: context.Channel.Config.Properties, + secretRef: context.Channel.Config.SecretRef); + + public static ConnectorMetadataBuilder CreateBuilder(ChannelHealthContext context) + => CreateBaseBuilder( + channel: context.Channel, + target: context.Target, + timestamp: context.Timestamp, + properties: context.Channel.Config.Properties, + secretRef: context.Channel.Config.SecretRef); + + public static IReadOnlyDictionary<string, string> Build(ChannelTestPreviewContext context) + => CreateBuilder(context).Build(); + + public static IReadOnlyDictionary<string, string> Build(ChannelHealthContext context) + => CreateBuilder(context).Build(); + + private static ConnectorMetadataBuilder CreateBaseBuilder( + NotifyChannel channel, + string target, + DateTimeOffset timestamp, + IReadOnlyDictionary<string, string>? properties, + string secretRef) + { + var builder = new ConnectorMetadataBuilder(); + + builder.AddTarget("webhook.endpoint", target) + .AddTimestamp("webhook.preview.generatedAt", timestamp) + .AddSecretRefHash("webhook.secretRef.hash", secretRef, SecretHashLengthBytes) + .AddConfigProperties("webhook.config.", properties); + + return builder; + } +} diff --git a/src/StellaOps.Notify.Connectors.Webhook/notify-plugin.json b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/notify-plugin.json similarity index 95% rename from src/StellaOps.Notify.Connectors.Webhook/notify-plugin.json rename to src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/notify-plugin.json index 32b4ead7..fb17679b 100644 --- a/src/StellaOps.Notify.Connectors.Webhook/notify-plugin.json +++ b/src/Notify/__Libraries/StellaOps.Notify.Connectors.Webhook/notify-plugin.json @@ -1,18 +1,18 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.notify.connector.webhook", - "displayName": "StellaOps Webhook Notify Connector", - "version": "0.1.0-alpha", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Notify.Connectors.Webhook.dll" - }, - "capabilities": [ - "notify-connector", - "webhook" - ], - "metadata": { - "org.stellaops.notify.channel.type": "webhook" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.notify.connector.webhook", + "displayName": "StellaOps Webhook Notify Connector", + "version": "0.1.0-alpha", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Notify.Connectors.Webhook.dll" + }, + "capabilities": [ + "notify-connector", + "webhook" + ], + "metadata": { + "org.stellaops.notify.channel.type": "webhook" + } +} diff --git a/src/StellaOps.Notify.Engine/AGENTS.md b/src/Notify/__Libraries/StellaOps.Notify.Engine/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.Engine/AGENTS.md rename to src/Notify/__Libraries/StellaOps.Notify.Engine/AGENTS.md diff --git a/src/StellaOps.Notify.Engine/ChannelHealthContracts.cs b/src/Notify/__Libraries/StellaOps.Notify.Engine/ChannelHealthContracts.cs similarity index 96% rename from src/StellaOps.Notify.Engine/ChannelHealthContracts.cs rename to src/Notify/__Libraries/StellaOps.Notify.Engine/ChannelHealthContracts.cs index 47449d09..29a50bed 100644 --- a/src/StellaOps.Notify.Engine/ChannelHealthContracts.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Engine/ChannelHealthContracts.cs @@ -1,51 +1,51 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Engine; - -/// <summary> -/// Contract implemented by channel plug-ins to provide health diagnostics. -/// </summary> -public interface INotifyChannelHealthProvider -{ - /// <summary> - /// Channel type supported by the provider. - /// </summary> - NotifyChannelType ChannelType { get; } - - /// <summary> - /// Executes a health check for the supplied channel. - /// </summary> - Task<ChannelHealthResult> CheckAsync(ChannelHealthContext context, CancellationToken cancellationToken); -} - -/// <summary> -/// Immutable context describing a channel health request. -/// </summary> -public sealed record ChannelHealthContext( - string TenantId, - NotifyChannel Channel, - string Target, - DateTimeOffset Timestamp, - string TraceId); - -/// <summary> -/// Result returned by channel plug-ins when reporting health diagnostics. -/// </summary> -public sealed record ChannelHealthResult( - ChannelHealthStatus Status, - string? Message, - IReadOnlyDictionary<string, string> Metadata); - -/// <summary> -/// Supported channel health states. -/// </summary> -public enum ChannelHealthStatus -{ - Healthy, - Degraded, - Unhealthy -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Engine; + +/// <summary> +/// Contract implemented by channel plug-ins to provide health diagnostics. +/// </summary> +public interface INotifyChannelHealthProvider +{ + /// <summary> + /// Channel type supported by the provider. + /// </summary> + NotifyChannelType ChannelType { get; } + + /// <summary> + /// Executes a health check for the supplied channel. + /// </summary> + Task<ChannelHealthResult> CheckAsync(ChannelHealthContext context, CancellationToken cancellationToken); +} + +/// <summary> +/// Immutable context describing a channel health request. +/// </summary> +public sealed record ChannelHealthContext( + string TenantId, + NotifyChannel Channel, + string Target, + DateTimeOffset Timestamp, + string TraceId); + +/// <summary> +/// Result returned by channel plug-ins when reporting health diagnostics. +/// </summary> +public sealed record ChannelHealthResult( + ChannelHealthStatus Status, + string? Message, + IReadOnlyDictionary<string, string> Metadata); + +/// <summary> +/// Supported channel health states. +/// </summary> +public enum ChannelHealthStatus +{ + Healthy, + Degraded, + Unhealthy +} diff --git a/src/StellaOps.Notify.Engine/ChannelTestPreviewContracts.cs b/src/Notify/__Libraries/StellaOps.Notify.Engine/ChannelTestPreviewContracts.cs similarity index 100% rename from src/StellaOps.Notify.Engine/ChannelTestPreviewContracts.cs rename to src/Notify/__Libraries/StellaOps.Notify.Engine/ChannelTestPreviewContracts.cs diff --git a/src/StellaOps.Notify.Engine/INotifyRuleEvaluator.cs b/src/Notify/__Libraries/StellaOps.Notify.Engine/INotifyRuleEvaluator.cs similarity index 96% rename from src/StellaOps.Notify.Engine/INotifyRuleEvaluator.cs rename to src/Notify/__Libraries/StellaOps.Notify.Engine/INotifyRuleEvaluator.cs index 9b4ad1f6..a59b5617 100644 --- a/src/StellaOps.Notify.Engine/INotifyRuleEvaluator.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Engine/INotifyRuleEvaluator.cs @@ -1,28 +1,28 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Engine; - -/// <summary> -/// Evaluates Notify rules against platform events. -/// </summary> -public interface INotifyRuleEvaluator -{ - /// <summary> - /// Evaluates a single rule against an event and returns the match outcome. - /// </summary> - NotifyRuleEvaluationOutcome Evaluate( - NotifyRule rule, - NotifyEvent @event, - DateTimeOffset? evaluationTimestamp = null); - - /// <summary> - /// Evaluates a collection of rules against an event. - /// </summary> - ImmutableArray<NotifyRuleEvaluationOutcome> Evaluate( - IEnumerable<NotifyRule> rules, - NotifyEvent @event, - DateTimeOffset? evaluationTimestamp = null); -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Engine; + +/// <summary> +/// Evaluates Notify rules against platform events. +/// </summary> +public interface INotifyRuleEvaluator +{ + /// <summary> + /// Evaluates a single rule against an event and returns the match outcome. + /// </summary> + NotifyRuleEvaluationOutcome Evaluate( + NotifyRule rule, + NotifyEvent @event, + DateTimeOffset? evaluationTimestamp = null); + + /// <summary> + /// Evaluates a collection of rules against an event. + /// </summary> + ImmutableArray<NotifyRuleEvaluationOutcome> Evaluate( + IEnumerable<NotifyRule> rules, + NotifyEvent @event, + DateTimeOffset? evaluationTimestamp = null); +} diff --git a/src/StellaOps.Notify.Engine/NotifyRuleEvaluationOutcome.cs b/src/Notify/__Libraries/StellaOps.Notify.Engine/NotifyRuleEvaluationOutcome.cs similarity index 96% rename from src/StellaOps.Notify.Engine/NotifyRuleEvaluationOutcome.cs rename to src/Notify/__Libraries/StellaOps.Notify.Engine/NotifyRuleEvaluationOutcome.cs index 6da0308e..9450034e 100644 --- a/src/StellaOps.Notify.Engine/NotifyRuleEvaluationOutcome.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Engine/NotifyRuleEvaluationOutcome.cs @@ -1,44 +1,44 @@ -using System; -using System.Collections.Immutable; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Engine; - -/// <summary> -/// Outcome produced when evaluating a notify rule against an event. -/// </summary> -public sealed record NotifyRuleEvaluationOutcome -{ - private NotifyRuleEvaluationOutcome( - NotifyRule rule, - bool isMatch, - ImmutableArray<NotifyRuleAction> actions, - DateTimeOffset? matchedAt, - string? reason) - { - Rule = rule ?? throw new ArgumentNullException(nameof(rule)); - IsMatch = isMatch; - Actions = actions; - MatchedAt = matchedAt; - Reason = reason; - } - - public NotifyRule Rule { get; } - - public bool IsMatch { get; } - - public ImmutableArray<NotifyRuleAction> Actions { get; } - - public DateTimeOffset? MatchedAt { get; } - - public string? Reason { get; } - - public static NotifyRuleEvaluationOutcome NotMatched(NotifyRule rule, string reason) - => new(rule, false, ImmutableArray<NotifyRuleAction>.Empty, null, reason); - - public static NotifyRuleEvaluationOutcome Matched( - NotifyRule rule, - ImmutableArray<NotifyRuleAction> actions, - DateTimeOffset matchedAt) - => new(rule, true, actions, matchedAt, null); -} +using System; +using System.Collections.Immutable; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Engine; + +/// <summary> +/// Outcome produced when evaluating a notify rule against an event. +/// </summary> +public sealed record NotifyRuleEvaluationOutcome +{ + private NotifyRuleEvaluationOutcome( + NotifyRule rule, + bool isMatch, + ImmutableArray<NotifyRuleAction> actions, + DateTimeOffset? matchedAt, + string? reason) + { + Rule = rule ?? throw new ArgumentNullException(nameof(rule)); + IsMatch = isMatch; + Actions = actions; + MatchedAt = matchedAt; + Reason = reason; + } + + public NotifyRule Rule { get; } + + public bool IsMatch { get; } + + public ImmutableArray<NotifyRuleAction> Actions { get; } + + public DateTimeOffset? MatchedAt { get; } + + public string? Reason { get; } + + public static NotifyRuleEvaluationOutcome NotMatched(NotifyRule rule, string reason) + => new(rule, false, ImmutableArray<NotifyRuleAction>.Empty, null, reason); + + public static NotifyRuleEvaluationOutcome Matched( + NotifyRule rule, + ImmutableArray<NotifyRuleAction> actions, + DateTimeOffset matchedAt) + => new(rule, true, actions, matchedAt, null); +} diff --git a/src/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj b/src/Notify/__Libraries/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj similarity index 100% rename from src/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj rename to src/Notify/__Libraries/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj diff --git a/src/StellaOps.Notify.Engine/TASKS.md b/src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md similarity index 67% rename from src/StellaOps.Notify.Engine/TASKS.md rename to src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md index d15cb183..5d73acec 100644 --- a/src/StellaOps.Notify.Engine/TASKS.md +++ b/src/Notify/__Libraries/StellaOps.Notify.Engine/TASKS.md @@ -1,2 +1,2 @@ # Notify Engine Task Board (Sprint 15) -> Archived 2025-10-26 — runtime responsibilities moved to `src/StellaOps.Notifier` (Sprints 38–40). +> Archived 2025-10-26 — runtime responsibilities moved to `src/Notifier/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Models/AGENTS.md b/src/Notify/__Libraries/StellaOps.Notify.Models/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.Models/AGENTS.md rename to src/Notify/__Libraries/StellaOps.Notify.Models/AGENTS.md diff --git a/src/StellaOps.Notify.Models/Iso8601DurationConverter.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/Iso8601DurationConverter.cs similarity index 100% rename from src/StellaOps.Notify.Models/Iso8601DurationConverter.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/Iso8601DurationConverter.cs diff --git a/src/StellaOps.Notify.Models/NotifyCanonicalJsonSerializer.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifyCanonicalJsonSerializer.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifyCanonicalJsonSerializer.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifyCanonicalJsonSerializer.cs diff --git a/src/StellaOps.Notify.Models/NotifyChannel.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifyChannel.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifyChannel.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifyChannel.cs diff --git a/src/StellaOps.Notify.Models/NotifyDelivery.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifyDelivery.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifyDelivery.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifyDelivery.cs diff --git a/src/StellaOps.Notify.Models/NotifyEnums.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifyEnums.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifyEnums.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifyEnums.cs diff --git a/src/StellaOps.Notify.Models/NotifyEvent.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifyEvent.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifyEvent.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifyEvent.cs diff --git a/src/StellaOps.Notify.Models/NotifyEventKinds.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifyEventKinds.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifyEventKinds.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifyEventKinds.cs diff --git a/src/StellaOps.Notify.Models/NotifyRule.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifyRule.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifyRule.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifyRule.cs diff --git a/src/StellaOps.Notify.Models/NotifySchemaMigration.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifySchemaMigration.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifySchemaMigration.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifySchemaMigration.cs diff --git a/src/StellaOps.Notify.Models/NotifySchemaVersions.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifySchemaVersions.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifySchemaVersions.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifySchemaVersions.cs diff --git a/src/StellaOps.Notify.Models/NotifyTemplate.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifyTemplate.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifyTemplate.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifyTemplate.cs diff --git a/src/StellaOps.Notify.Models/NotifyValidation.cs b/src/Notify/__Libraries/StellaOps.Notify.Models/NotifyValidation.cs similarity index 100% rename from src/StellaOps.Notify.Models/NotifyValidation.cs rename to src/Notify/__Libraries/StellaOps.Notify.Models/NotifyValidation.cs diff --git a/src/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj b/src/Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj similarity index 100% rename from src/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj rename to src/Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj diff --git a/src/Notify/__Libraries/StellaOps.Notify.Models/TASKS.md b/src/Notify/__Libraries/StellaOps.Notify.Models/TASKS.md new file mode 100644 index 00000000..14c202c2 --- /dev/null +++ b/src/Notify/__Libraries/StellaOps.Notify.Models/TASKS.md @@ -0,0 +1,2 @@ +# Notify Models Task Board (Sprint 15) +> Archived 2025-10-26 — scope moved to `src/Notifier/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Queue/AGENTS.md b/src/Notify/__Libraries/StellaOps.Notify.Queue/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.Queue/AGENTS.md rename to src/Notify/__Libraries/StellaOps.Notify.Queue/AGENTS.md diff --git a/src/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryLease.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryLease.cs similarity index 97% rename from src/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryLease.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryLease.cs index 19e57aac..d51d21f7 100644 --- a/src/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryLease.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryLease.cs @@ -1,80 +1,80 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using NATS.Client.JetStream; - -namespace StellaOps.Notify.Queue.Nats; - -internal sealed class NatsNotifyDeliveryLease : INotifyQueueLease<NotifyDeliveryQueueMessage> -{ - private readonly NatsNotifyDeliveryQueue _queue; - private readonly NatsJSMsg<byte[]> _message; - private int _completed; - - internal NatsNotifyDeliveryLease( - NatsNotifyDeliveryQueue queue, - NatsJSMsg<byte[]> message, - string messageId, - NotifyDeliveryQueueMessage payload, - int attempt, - string consumer, - DateTimeOffset enqueuedAt, - DateTimeOffset leaseExpiresAt, - string idempotencyKey) - { - _queue = queue ?? throw new ArgumentNullException(nameof(queue)); - _message = message; - MessageId = messageId ?? throw new ArgumentNullException(nameof(messageId)); - Message = payload ?? throw new ArgumentNullException(nameof(payload)); - Attempt = attempt; - Consumer = consumer ?? throw new ArgumentNullException(nameof(consumer)); - EnqueuedAt = enqueuedAt; - LeaseExpiresAt = leaseExpiresAt; - IdempotencyKey = idempotencyKey ?? payload.IdempotencyKey; - } - - public string MessageId { get; } - - public int Attempt { get; internal set; } - - public DateTimeOffset EnqueuedAt { get; } - - public DateTimeOffset LeaseExpiresAt { get; private set; } - - public string Consumer { get; } - - public string Stream => Message.Stream; - - public string TenantId => Message.TenantId; - - public string? PartitionKey => Message.PartitionKey; - - public string IdempotencyKey { get; } - - public string? TraceId => Message.TraceId; - - public IReadOnlyDictionary<string, string> Attributes => Message.Attributes; - - public NotifyDeliveryQueueMessage Message { get; } - - internal NatsJSMsg<byte[]> RawMessage => _message; - - public Task AcknowledgeAsync(CancellationToken cancellationToken = default) - => _queue.AcknowledgeAsync(this, cancellationToken); - - public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) - => _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken); - - public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) - => _queue.ReleaseAsync(this, disposition, cancellationToken); - - public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) - => _queue.DeadLetterAsync(this, reason, cancellationToken); - - internal bool TryBeginCompletion() - => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; - - internal void RefreshLease(DateTimeOffset expiresAt) - => LeaseExpiresAt = expiresAt; -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using NATS.Client.JetStream; + +namespace StellaOps.Notify.Queue.Nats; + +internal sealed class NatsNotifyDeliveryLease : INotifyQueueLease<NotifyDeliveryQueueMessage> +{ + private readonly NatsNotifyDeliveryQueue _queue; + private readonly NatsJSMsg<byte[]> _message; + private int _completed; + + internal NatsNotifyDeliveryLease( + NatsNotifyDeliveryQueue queue, + NatsJSMsg<byte[]> message, + string messageId, + NotifyDeliveryQueueMessage payload, + int attempt, + string consumer, + DateTimeOffset enqueuedAt, + DateTimeOffset leaseExpiresAt, + string idempotencyKey) + { + _queue = queue ?? throw new ArgumentNullException(nameof(queue)); + _message = message; + MessageId = messageId ?? throw new ArgumentNullException(nameof(messageId)); + Message = payload ?? throw new ArgumentNullException(nameof(payload)); + Attempt = attempt; + Consumer = consumer ?? throw new ArgumentNullException(nameof(consumer)); + EnqueuedAt = enqueuedAt; + LeaseExpiresAt = leaseExpiresAt; + IdempotencyKey = idempotencyKey ?? payload.IdempotencyKey; + } + + public string MessageId { get; } + + public int Attempt { get; internal set; } + + public DateTimeOffset EnqueuedAt { get; } + + public DateTimeOffset LeaseExpiresAt { get; private set; } + + public string Consumer { get; } + + public string Stream => Message.Stream; + + public string TenantId => Message.TenantId; + + public string? PartitionKey => Message.PartitionKey; + + public string IdempotencyKey { get; } + + public string? TraceId => Message.TraceId; + + public IReadOnlyDictionary<string, string> Attributes => Message.Attributes; + + public NotifyDeliveryQueueMessage Message { get; } + + internal NatsJSMsg<byte[]> RawMessage => _message; + + public Task AcknowledgeAsync(CancellationToken cancellationToken = default) + => _queue.AcknowledgeAsync(this, cancellationToken); + + public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) + => _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken); + + public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) + => _queue.ReleaseAsync(this, disposition, cancellationToken); + + public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) + => _queue.DeadLetterAsync(this, reason, cancellationToken); + + internal bool TryBeginCompletion() + => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; + + internal void RefreshLease(DateTimeOffset expiresAt) + => LeaseExpiresAt = expiresAt; +} diff --git a/src/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryQueue.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryQueue.cs similarity index 97% rename from src/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryQueue.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryQueue.cs index 25c49aa2..11459b39 100644 --- a/src/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryQueue.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyDeliveryQueue.cs @@ -1,697 +1,697 @@ -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Globalization; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using NATS.Client.Core; -using NATS.Client.JetStream; -using NATS.Client.JetStream.Models; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Queue.Nats; - -internal sealed class NatsNotifyDeliveryQueue : INotifyDeliveryQueue, IAsyncDisposable -{ - private const string TransportName = "nats"; - - private static readonly INatsSerializer<byte[]> PayloadSerializer = NatsRawSerializer<byte[]>.Default; - - private readonly NotifyDeliveryQueueOptions _queueOptions; - private readonly NotifyNatsDeliveryQueueOptions _options; - private readonly ILogger<NatsNotifyDeliveryQueue> _logger; - private readonly TimeProvider _timeProvider; - private readonly SemaphoreSlim _connectionGate = new(1, 1); - private readonly Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>> _connectionFactory; - - private NatsConnection? _connection; - private NatsJSContext? _jsContext; - private INatsJSConsumer? _consumer; - private bool _disposed; - - public NatsNotifyDeliveryQueue( - NotifyDeliveryQueueOptions queueOptions, - NotifyNatsDeliveryQueueOptions options, - ILogger<NatsNotifyDeliveryQueue> logger, - TimeProvider timeProvider, - Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) - { - _queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? TimeProvider.System; - _connectionFactory = connectionFactory ?? ((opts, token) => new ValueTask<NatsConnection>(new NatsConnection(opts))); - - if (string.IsNullOrWhiteSpace(_options.Url)) - { - throw new InvalidOperationException("NATS connection URL must be configured for the Notify delivery queue."); - } - - if (string.IsNullOrWhiteSpace(_options.Stream) || string.IsNullOrWhiteSpace(_options.Subject)) - { - throw new InvalidOperationException("NATS stream and subject must be configured for the Notify delivery queue."); - } - } - - public async ValueTask<NotifyQueueEnqueueResult> PublishAsync( - NotifyDeliveryQueueMessage message, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(message); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); - await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); - - var payload = Encoding.UTF8.GetBytes(NotifyCanonicalJsonSerializer.Serialize(message.Delivery)); - var headers = BuildHeaders(message); - - var publishOpts = new NatsJSPubOpts - { - MsgId = message.IdempotencyKey, - RetryAttempts = 0 - }; - - var ack = await js.PublishAsync( - _options.Subject, - payload, - PayloadSerializer, - publishOpts, - headers, - cancellationToken) - .ConfigureAwait(false); - - if (ack.Duplicate) - { - NotifyQueueMetrics.RecordDeduplicated(TransportName, _options.Stream); - _logger.LogDebug( - "Duplicate Notify delivery enqueue detected for delivery {DeliveryId}.", - message.Delivery.DeliveryId); - - return new NotifyQueueEnqueueResult(ack.Seq.ToString(), true); - } - - NotifyQueueMetrics.RecordEnqueued(TransportName, _options.Stream); - _logger.LogDebug( - "Enqueued Notify delivery {DeliveryId} into NATS stream {Stream} (sequence {Sequence}).", - message.Delivery.DeliveryId, - ack.Stream, - ack.Seq); - - return new NotifyQueueEnqueueResult(ack.Seq.ToString(), false); - } - - public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyDeliveryQueueMessage>>> LeaseAsync( - NotifyQueueLeaseRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); - - var fetchOpts = new NatsJSFetchOpts - { - MaxMsgs = request.BatchSize, - Expires = request.LeaseDuration, - IdleHeartbeat = _options.IdleHeartbeat - }; - - var now = _timeProvider.GetUtcNow(); - var leases = new List<INotifyQueueLease<NotifyDeliveryQueueMessage>>(request.BatchSize); - - await foreach (var msg in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) - { - var lease = CreateLease(msg, request.Consumer, now, request.LeaseDuration); - if (lease is null) - { - await msg.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - continue; - } - - leases.Add(lease); - } - - return leases; - } - - public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyDeliveryQueueMessage>>> ClaimExpiredAsync( - NotifyQueueClaimOptions options, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(options); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); - - var fetchOpts = new NatsJSFetchOpts - { - MaxMsgs = options.BatchSize, - Expires = options.MinIdleTime, - IdleHeartbeat = _options.IdleHeartbeat - }; - - var now = _timeProvider.GetUtcNow(); - var leases = new List<INotifyQueueLease<NotifyDeliveryQueueMessage>>(options.BatchSize); - - await foreach (var msg in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) - { - var deliveries = (int)(msg.Metadata?.NumDelivered ?? 1); - if (deliveries <= 1) - { - await msg.NakAsync(new AckOpts(), TimeSpan.Zero, cancellationToken).ConfigureAwait(false); - continue; - } - - var lease = CreateLease(msg, options.ClaimantConsumer, now, _queueOptions.DefaultLeaseDuration); - if (lease is null) - { - await msg.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - continue; - } - - leases.Add(lease); - } - - return leases; - } - - public async ValueTask DisposeAsync() - { - if (_disposed) - { - return; - } - - _disposed = true; - - if (_connection is not null) - { - await _connection.DisposeAsync().ConfigureAwait(false); - } - - _connectionGate.Dispose(); - GC.SuppressFinalize(this); - } - - internal async Task AcknowledgeAsync( - NatsNotifyDeliveryLease lease, - CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - NotifyQueueMetrics.RecordAck(TransportName, _options.Stream); - - _logger.LogDebug( - "Acknowledged Notify delivery {DeliveryId} (sequence {Sequence}).", - lease.Message.Delivery.DeliveryId, - lease.MessageId); - } - - internal async Task RenewLeaseAsync( - NatsNotifyDeliveryLease lease, - TimeSpan leaseDuration, - CancellationToken cancellationToken) - { - await lease.RawMessage.AckProgressAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - var expires = _timeProvider.GetUtcNow().Add(leaseDuration); - lease.RefreshLease(expires); - - _logger.LogDebug( - "Renewed NATS lease for Notify delivery {DeliveryId} until {Expires:u}.", - lease.Message.Delivery.DeliveryId, - expires); - } - - internal async Task ReleaseAsync( - NatsNotifyDeliveryLease lease, - NotifyQueueReleaseDisposition disposition, - CancellationToken cancellationToken) - { - if (disposition == NotifyQueueReleaseDisposition.Retry - && lease.Attempt >= _queueOptions.MaxDeliveryAttempts) - { - _logger.LogWarning( - "Notify delivery {DeliveryId} reached max delivery attempts ({Attempts}); moving to dead-letter stream.", - lease.Message.Delivery.DeliveryId, - lease.Attempt); - - await DeadLetterAsync( - lease, - $"max-delivery-attempts:{lease.Attempt}", - cancellationToken).ConfigureAwait(false); - return; - } - - if (!lease.TryBeginCompletion()) - { - return; - } - - if (disposition == NotifyQueueReleaseDisposition.Retry) - { - var delay = CalculateBackoff(lease.Attempt); - await lease.RawMessage.NakAsync(new AckOpts(), delay, cancellationToken).ConfigureAwait(false); - - NotifyQueueMetrics.RecordRetry(TransportName, _options.Stream); - _logger.LogInformation( - "Scheduled Notify delivery {DeliveryId} for retry with delay {Delay} (attempt {Attempt}).", - lease.Message.Delivery.DeliveryId, - delay, - lease.Attempt); - } - else - { - await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - NotifyQueueMetrics.RecordAck(TransportName, _options.Stream); - _logger.LogInformation( - "Abandoned Notify delivery {DeliveryId} after {Attempt} attempt(s).", - lease.Message.Delivery.DeliveryId, - lease.Attempt); - } - } - - internal async Task DeadLetterAsync( - NatsNotifyDeliveryLease lease, - string reason, - CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); - - var payload = Encoding.UTF8.GetBytes(NotifyCanonicalJsonSerializer.Serialize(lease.Message.Delivery)); - var headers = BuildDeadLetterHeaders(lease, reason); - - await js.PublishAsync( - _options.DeadLetterSubject, - payload, - PayloadSerializer, - new NatsJSPubOpts(), - headers, - cancellationToken) - .ConfigureAwait(false); - - NotifyQueueMetrics.RecordDeadLetter(TransportName, _options.DeadLetterStream); - _logger.LogError( - "Dead-lettered Notify delivery {DeliveryId} (attempt {Attempt}): {Reason}", - lease.Message.Delivery.DeliveryId, - lease.Attempt, - reason); - } - - internal async Task PingAsync(CancellationToken cancellationToken) - { - var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); - await connection.PingAsync(cancellationToken).ConfigureAwait(false); - } - - private async Task<NatsJSContext> GetJetStreamAsync(CancellationToken cancellationToken) - { - if (_jsContext is not null) - { - return _jsContext; - } - - var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); - - await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - _jsContext ??= new NatsJSContext(connection); - return _jsContext; - } - finally - { - _connectionGate.Release(); - } - } - - private async ValueTask<INatsJSConsumer> EnsureStreamAndConsumerAsync( - NatsJSContext js, - CancellationToken cancellationToken) - { - if (_consumer is not null) - { - return _consumer; - } - - await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_consumer is not null) - { - return _consumer; - } - - await EnsureStreamAsync(js, cancellationToken).ConfigureAwait(false); - await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); - - var consumerConfig = new ConsumerConfig - { - DurableName = _options.DurableConsumer, - AckPolicy = ConsumerConfigAckPolicy.Explicit, - ReplayPolicy = ConsumerConfigReplayPolicy.Instant, - DeliverPolicy = ConsumerConfigDeliverPolicy.All, - AckWait = ToNanoseconds(_options.AckWait), - MaxAckPending = _options.MaxAckPending, - MaxDeliver = Math.Max(1, _queueOptions.MaxDeliveryAttempts), - FilterSubjects = new[] { _options.Subject } - }; - - try - { - _consumer = await js.CreateConsumerAsync( - _options.Stream, - consumerConfig, - cancellationToken) - .ConfigureAwait(false); - } - catch (NatsJSApiException apiEx) - { - _logger.LogDebug( - apiEx, - "CreateConsumerAsync failed with code {Code}; attempting to fetch existing durable consumer {Durable}.", - apiEx.Error?.Code, - _options.DurableConsumer); - - _consumer = await js.GetConsumerAsync( - _options.Stream, - _options.DurableConsumer, - cancellationToken) - .ConfigureAwait(false); - } - - return _consumer; - } - finally - { - _connectionGate.Release(); - } - } - - private async Task<NatsConnection> EnsureConnectionAsync(CancellationToken cancellationToken) - { - if (_connection is not null) - { - return _connection; - } - - await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_connection is not null) - { - return _connection; - } - - var opts = new NatsOpts - { - Url = _options.Url!, - Name = "stellaops-notify-delivery", - CommandTimeout = TimeSpan.FromSeconds(10), - RequestTimeout = TimeSpan.FromSeconds(20), - PingInterval = TimeSpan.FromSeconds(30) - }; - - _connection = await _connectionFactory(opts, cancellationToken).ConfigureAwait(false); - await _connection.ConnectAsync().ConfigureAwait(false); - return _connection; - } - finally - { - _connectionGate.Release(); - } - } - - private async Task EnsureStreamAsync(NatsJSContext js, CancellationToken cancellationToken) - { - try - { - await js.GetStreamAsync(_options.Stream, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch (NatsJSApiException ex) when (ex.Error?.Code == 404) - { - var config = new StreamConfig(name: _options.Stream, subjects: new[] { _options.Subject }) - { - Retention = StreamConfigRetention.Workqueue, - Storage = StreamConfigStorage.File, - MaxConsumers = -1, - MaxMsgs = -1, - MaxBytes = -1 - }; - - await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); - _logger.LogInformation("Created NATS Notify delivery stream {Stream} ({Subject}).", _options.Stream, _options.Subject); - } - } - - private async Task EnsureDeadLetterStreamAsync(NatsJSContext js, CancellationToken cancellationToken) - { - try - { - await js.GetStreamAsync(_options.DeadLetterStream, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch (NatsJSApiException ex) when (ex.Error?.Code == 404) - { - var config = new StreamConfig(name: _options.DeadLetterStream, subjects: new[] { _options.DeadLetterSubject }) - { - Retention = StreamConfigRetention.Workqueue, - Storage = StreamConfigStorage.File, - MaxConsumers = -1, - MaxMsgs = -1, - MaxBytes = -1 - }; - - await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); - _logger.LogInformation("Created NATS Notify delivery dead-letter stream {Stream} ({Subject}).", _options.DeadLetterStream, _options.DeadLetterSubject); - } - } - - private NatsNotifyDeliveryLease? CreateLease( - NatsJSMsg<byte[]> message, - string consumer, - DateTimeOffset now, - TimeSpan leaseDuration) - { - var payloadBytes = message.Data ?? Array.Empty<byte>(); - if (payloadBytes.Length == 0) - { - return null; - } - - NotifyDelivery delivery; - try - { - var json = Encoding.UTF8.GetString(payloadBytes); - delivery = NotifyCanonicalJsonSerializer.Deserialize<NotifyDelivery>(json); - } - catch (Exception ex) - { - _logger.LogWarning( - ex, - "Failed to deserialize Notify delivery payload for NATS message {Sequence}.", - message.Metadata?.Sequence.Stream); - return null; - } - - var headers = message.Headers ?? new NatsHeaders(); - - var deliveryId = TryGetHeader(headers, NotifyQueueFields.DeliveryId) ?? delivery.DeliveryId; - var channelId = TryGetHeader(headers, NotifyQueueFields.ChannelId); - var channelTypeRaw = TryGetHeader(headers, NotifyQueueFields.ChannelType); - if (channelId is null || channelTypeRaw is null) - { - return null; - } - - if (!Enum.TryParse<NotifyChannelType>(channelTypeRaw, ignoreCase: true, out var channelType)) - { - _logger.LogWarning("Unknown channel type '{ChannelType}' for delivery {DeliveryId}.", channelTypeRaw, deliveryId); - return null; - } - - var traceId = TryGetHeader(headers, NotifyQueueFields.TraceId); - var partitionKey = TryGetHeader(headers, NotifyQueueFields.PartitionKey) ?? channelId; - var idempotencyKey = TryGetHeader(headers, NotifyQueueFields.IdempotencyKey) ?? delivery.DeliveryId; - - var enqueuedAt = TryGetHeader(headers, NotifyQueueFields.EnqueuedAt) is { } enqueuedRaw - && long.TryParse(enqueuedRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var unix) - ? DateTimeOffset.FromUnixTimeMilliseconds(unix) - : now; - - var attempt = TryGetHeader(headers, NotifyQueueFields.Attempt) is { } attemptRaw - && int.TryParse(attemptRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedAttempt) - ? parsedAttempt - : 1; - - if (message.Metadata?.NumDelivered is ulong delivered && delivered > 0) - { - var deliveredInt = delivered > int.MaxValue ? int.MaxValue : (int)delivered; - if (deliveredInt > attempt) - { - attempt = deliveredInt; - } - } - - var attributes = ExtractAttributes(headers); - var leaseExpires = now.Add(leaseDuration); - var messageId = message.Metadata?.Sequence.Stream.ToString() ?? Guid.NewGuid().ToString("n"); - - var queueMessage = new NotifyDeliveryQueueMessage( - delivery, - channelId, - channelType, - _options.Subject, - traceId, - attributes); - - return new NatsNotifyDeliveryLease( - this, - message, - messageId, - queueMessage, - attempt, - consumer, - enqueuedAt, - leaseExpires, - idempotencyKey); - } - - private NatsHeaders BuildHeaders(NotifyDeliveryQueueMessage message) - { - var headers = new NatsHeaders - { - { NotifyQueueFields.DeliveryId, message.Delivery.DeliveryId }, - { NotifyQueueFields.ChannelId, message.ChannelId }, - { NotifyQueueFields.ChannelType, message.ChannelType.ToString() }, - { NotifyQueueFields.Tenant, message.Delivery.TenantId }, - { NotifyQueueFields.Attempt, "1" }, - { NotifyQueueFields.EnqueuedAt, _timeProvider.GetUtcNow().ToUnixTimeMilliseconds().ToString(CultureInfo.InvariantCulture) }, - { NotifyQueueFields.IdempotencyKey, message.IdempotencyKey }, - { NotifyQueueFields.PartitionKey, message.PartitionKey } - }; - - if (!string.IsNullOrWhiteSpace(message.TraceId)) - { - headers.Add(NotifyQueueFields.TraceId, message.TraceId!); - } - - foreach (var kvp in message.Attributes) - { - headers.Add(NotifyQueueFields.AttributePrefix + kvp.Key, kvp.Value); - } - - return headers; - } - - private NatsHeaders BuildDeadLetterHeaders(NatsNotifyDeliveryLease lease, string reason) - { - var headers = new NatsHeaders - { - { NotifyQueueFields.DeliveryId, lease.Message.Delivery.DeliveryId }, - { NotifyQueueFields.ChannelId, lease.Message.ChannelId }, - { NotifyQueueFields.ChannelType, lease.Message.ChannelType.ToString() }, - { NotifyQueueFields.Tenant, lease.Message.Delivery.TenantId }, - { NotifyQueueFields.Attempt, lease.Attempt.ToString(CultureInfo.InvariantCulture) }, - { NotifyQueueFields.IdempotencyKey, lease.Message.IdempotencyKey }, - { "deadletter-reason", reason } - }; - - if (!string.IsNullOrWhiteSpace(lease.Message.TraceId)) - { - headers.Add(NotifyQueueFields.TraceId, lease.Message.TraceId!); - } - - foreach (var kvp in lease.Message.Attributes) - { - headers.Add(NotifyQueueFields.AttributePrefix + kvp.Key, kvp.Value); - } - - return headers; - } - - private static string? TryGetHeader(NatsHeaders headers, string key) - { - if (headers.TryGetValue(key, out var values) && values.Count > 0) - { - var value = values[0]; - return string.IsNullOrWhiteSpace(value) ? null : value; - } - - return null; - } - - private static IReadOnlyDictionary<string, string> ExtractAttributes(NatsHeaders headers) - { - var attributes = new Dictionary<string, string>(StringComparer.Ordinal); - - foreach (var key in headers.Keys) - { - if (!key.StartsWith(NotifyQueueFields.AttributePrefix, StringComparison.Ordinal)) - { - continue; - } - - if (headers.TryGetValue(key, out var values) && values.Count > 0) - { - attributes[key[NotifyQueueFields.AttributePrefix.Length..]] = values[0]!; - } - } - - return attributes.Count == 0 - ? EmptyReadOnlyDictionary<string, string>.Instance - : new ReadOnlyDictionary<string, string>(attributes); - } - - private TimeSpan CalculateBackoff(int attempt) - { - var initial = _queueOptions.RetryInitialBackoff > TimeSpan.Zero - ? _queueOptions.RetryInitialBackoff - : _options.RetryDelay; - - if (initial <= TimeSpan.Zero) - { - return TimeSpan.Zero; - } - - if (attempt <= 1) - { - return initial; - } - - var max = _queueOptions.RetryMaxBackoff > TimeSpan.Zero - ? _queueOptions.RetryMaxBackoff - : initial; - - var exponent = attempt - 1; - var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1); - var cappedTicks = Math.Min(max.Ticks, scaledTicks); - var resultTicks = Math.Max(initial.Ticks, (long)cappedTicks); - return TimeSpan.FromTicks(resultTicks); - } - - private static long ToNanoseconds(TimeSpan value) - => value <= TimeSpan.Zero ? 0 : value.Ticks * 100L; - - private static class EmptyReadOnlyDictionary<TKey, TValue> - where TKey : notnull - { - public static readonly IReadOnlyDictionary<TKey, TValue> Instance = - new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default)); - } -} +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using NATS.Client.Core; +using NATS.Client.JetStream; +using NATS.Client.JetStream.Models; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Queue.Nats; + +internal sealed class NatsNotifyDeliveryQueue : INotifyDeliveryQueue, IAsyncDisposable +{ + private const string TransportName = "nats"; + + private static readonly INatsSerializer<byte[]> PayloadSerializer = NatsRawSerializer<byte[]>.Default; + + private readonly NotifyDeliveryQueueOptions _queueOptions; + private readonly NotifyNatsDeliveryQueueOptions _options; + private readonly ILogger<NatsNotifyDeliveryQueue> _logger; + private readonly TimeProvider _timeProvider; + private readonly SemaphoreSlim _connectionGate = new(1, 1); + private readonly Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>> _connectionFactory; + + private NatsConnection? _connection; + private NatsJSContext? _jsContext; + private INatsJSConsumer? _consumer; + private bool _disposed; + + public NatsNotifyDeliveryQueue( + NotifyDeliveryQueueOptions queueOptions, + NotifyNatsDeliveryQueueOptions options, + ILogger<NatsNotifyDeliveryQueue> logger, + TimeProvider timeProvider, + Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) + { + _queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + _connectionFactory = connectionFactory ?? ((opts, token) => new ValueTask<NatsConnection>(new NatsConnection(opts))); + + if (string.IsNullOrWhiteSpace(_options.Url)) + { + throw new InvalidOperationException("NATS connection URL must be configured for the Notify delivery queue."); + } + + if (string.IsNullOrWhiteSpace(_options.Stream) || string.IsNullOrWhiteSpace(_options.Subject)) + { + throw new InvalidOperationException("NATS stream and subject must be configured for the Notify delivery queue."); + } + } + + public async ValueTask<NotifyQueueEnqueueResult> PublishAsync( + NotifyDeliveryQueueMessage message, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(message); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); + await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); + + var payload = Encoding.UTF8.GetBytes(NotifyCanonicalJsonSerializer.Serialize(message.Delivery)); + var headers = BuildHeaders(message); + + var publishOpts = new NatsJSPubOpts + { + MsgId = message.IdempotencyKey, + RetryAttempts = 0 + }; + + var ack = await js.PublishAsync( + _options.Subject, + payload, + PayloadSerializer, + publishOpts, + headers, + cancellationToken) + .ConfigureAwait(false); + + if (ack.Duplicate) + { + NotifyQueueMetrics.RecordDeduplicated(TransportName, _options.Stream); + _logger.LogDebug( + "Duplicate Notify delivery enqueue detected for delivery {DeliveryId}.", + message.Delivery.DeliveryId); + + return new NotifyQueueEnqueueResult(ack.Seq.ToString(), true); + } + + NotifyQueueMetrics.RecordEnqueued(TransportName, _options.Stream); + _logger.LogDebug( + "Enqueued Notify delivery {DeliveryId} into NATS stream {Stream} (sequence {Sequence}).", + message.Delivery.DeliveryId, + ack.Stream, + ack.Seq); + + return new NotifyQueueEnqueueResult(ack.Seq.ToString(), false); + } + + public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyDeliveryQueueMessage>>> LeaseAsync( + NotifyQueueLeaseRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); + + var fetchOpts = new NatsJSFetchOpts + { + MaxMsgs = request.BatchSize, + Expires = request.LeaseDuration, + IdleHeartbeat = _options.IdleHeartbeat + }; + + var now = _timeProvider.GetUtcNow(); + var leases = new List<INotifyQueueLease<NotifyDeliveryQueueMessage>>(request.BatchSize); + + await foreach (var msg in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) + { + var lease = CreateLease(msg, request.Consumer, now, request.LeaseDuration); + if (lease is null) + { + await msg.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + } + + return leases; + } + + public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyDeliveryQueueMessage>>> ClaimExpiredAsync( + NotifyQueueClaimOptions options, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(options); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); + + var fetchOpts = new NatsJSFetchOpts + { + MaxMsgs = options.BatchSize, + Expires = options.MinIdleTime, + IdleHeartbeat = _options.IdleHeartbeat + }; + + var now = _timeProvider.GetUtcNow(); + var leases = new List<INotifyQueueLease<NotifyDeliveryQueueMessage>>(options.BatchSize); + + await foreach (var msg in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) + { + var deliveries = (int)(msg.Metadata?.NumDelivered ?? 1); + if (deliveries <= 1) + { + await msg.NakAsync(new AckOpts(), TimeSpan.Zero, cancellationToken).ConfigureAwait(false); + continue; + } + + var lease = CreateLease(msg, options.ClaimantConsumer, now, _queueOptions.DefaultLeaseDuration); + if (lease is null) + { + await msg.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + } + + return leases; + } + + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + _disposed = true; + + if (_connection is not null) + { + await _connection.DisposeAsync().ConfigureAwait(false); + } + + _connectionGate.Dispose(); + GC.SuppressFinalize(this); + } + + internal async Task AcknowledgeAsync( + NatsNotifyDeliveryLease lease, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + NotifyQueueMetrics.RecordAck(TransportName, _options.Stream); + + _logger.LogDebug( + "Acknowledged Notify delivery {DeliveryId} (sequence {Sequence}).", + lease.Message.Delivery.DeliveryId, + lease.MessageId); + } + + internal async Task RenewLeaseAsync( + NatsNotifyDeliveryLease lease, + TimeSpan leaseDuration, + CancellationToken cancellationToken) + { + await lease.RawMessage.AckProgressAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + var expires = _timeProvider.GetUtcNow().Add(leaseDuration); + lease.RefreshLease(expires); + + _logger.LogDebug( + "Renewed NATS lease for Notify delivery {DeliveryId} until {Expires:u}.", + lease.Message.Delivery.DeliveryId, + expires); + } + + internal async Task ReleaseAsync( + NatsNotifyDeliveryLease lease, + NotifyQueueReleaseDisposition disposition, + CancellationToken cancellationToken) + { + if (disposition == NotifyQueueReleaseDisposition.Retry + && lease.Attempt >= _queueOptions.MaxDeliveryAttempts) + { + _logger.LogWarning( + "Notify delivery {DeliveryId} reached max delivery attempts ({Attempts}); moving to dead-letter stream.", + lease.Message.Delivery.DeliveryId, + lease.Attempt); + + await DeadLetterAsync( + lease, + $"max-delivery-attempts:{lease.Attempt}", + cancellationToken).ConfigureAwait(false); + return; + } + + if (!lease.TryBeginCompletion()) + { + return; + } + + if (disposition == NotifyQueueReleaseDisposition.Retry) + { + var delay = CalculateBackoff(lease.Attempt); + await lease.RawMessage.NakAsync(new AckOpts(), delay, cancellationToken).ConfigureAwait(false); + + NotifyQueueMetrics.RecordRetry(TransportName, _options.Stream); + _logger.LogInformation( + "Scheduled Notify delivery {DeliveryId} for retry with delay {Delay} (attempt {Attempt}).", + lease.Message.Delivery.DeliveryId, + delay, + lease.Attempt); + } + else + { + await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + NotifyQueueMetrics.RecordAck(TransportName, _options.Stream); + _logger.LogInformation( + "Abandoned Notify delivery {DeliveryId} after {Attempt} attempt(s).", + lease.Message.Delivery.DeliveryId, + lease.Attempt); + } + } + + internal async Task DeadLetterAsync( + NatsNotifyDeliveryLease lease, + string reason, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); + + var payload = Encoding.UTF8.GetBytes(NotifyCanonicalJsonSerializer.Serialize(lease.Message.Delivery)); + var headers = BuildDeadLetterHeaders(lease, reason); + + await js.PublishAsync( + _options.DeadLetterSubject, + payload, + PayloadSerializer, + new NatsJSPubOpts(), + headers, + cancellationToken) + .ConfigureAwait(false); + + NotifyQueueMetrics.RecordDeadLetter(TransportName, _options.DeadLetterStream); + _logger.LogError( + "Dead-lettered Notify delivery {DeliveryId} (attempt {Attempt}): {Reason}", + lease.Message.Delivery.DeliveryId, + lease.Attempt, + reason); + } + + internal async Task PingAsync(CancellationToken cancellationToken) + { + var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); + await connection.PingAsync(cancellationToken).ConfigureAwait(false); + } + + private async Task<NatsJSContext> GetJetStreamAsync(CancellationToken cancellationToken) + { + if (_jsContext is not null) + { + return _jsContext; + } + + var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + _jsContext ??= new NatsJSContext(connection); + return _jsContext; + } + finally + { + _connectionGate.Release(); + } + } + + private async ValueTask<INatsJSConsumer> EnsureStreamAndConsumerAsync( + NatsJSContext js, + CancellationToken cancellationToken) + { + if (_consumer is not null) + { + return _consumer; + } + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_consumer is not null) + { + return _consumer; + } + + await EnsureStreamAsync(js, cancellationToken).ConfigureAwait(false); + await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); + + var consumerConfig = new ConsumerConfig + { + DurableName = _options.DurableConsumer, + AckPolicy = ConsumerConfigAckPolicy.Explicit, + ReplayPolicy = ConsumerConfigReplayPolicy.Instant, + DeliverPolicy = ConsumerConfigDeliverPolicy.All, + AckWait = ToNanoseconds(_options.AckWait), + MaxAckPending = _options.MaxAckPending, + MaxDeliver = Math.Max(1, _queueOptions.MaxDeliveryAttempts), + FilterSubjects = new[] { _options.Subject } + }; + + try + { + _consumer = await js.CreateConsumerAsync( + _options.Stream, + consumerConfig, + cancellationToken) + .ConfigureAwait(false); + } + catch (NatsJSApiException apiEx) + { + _logger.LogDebug( + apiEx, + "CreateConsumerAsync failed with code {Code}; attempting to fetch existing durable consumer {Durable}.", + apiEx.Error?.Code, + _options.DurableConsumer); + + _consumer = await js.GetConsumerAsync( + _options.Stream, + _options.DurableConsumer, + cancellationToken) + .ConfigureAwait(false); + } + + return _consumer; + } + finally + { + _connectionGate.Release(); + } + } + + private async Task<NatsConnection> EnsureConnectionAsync(CancellationToken cancellationToken) + { + if (_connection is not null) + { + return _connection; + } + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_connection is not null) + { + return _connection; + } + + var opts = new NatsOpts + { + Url = _options.Url!, + Name = "stellaops-notify-delivery", + CommandTimeout = TimeSpan.FromSeconds(10), + RequestTimeout = TimeSpan.FromSeconds(20), + PingInterval = TimeSpan.FromSeconds(30) + }; + + _connection = await _connectionFactory(opts, cancellationToken).ConfigureAwait(false); + await _connection.ConnectAsync().ConfigureAwait(false); + return _connection; + } + finally + { + _connectionGate.Release(); + } + } + + private async Task EnsureStreamAsync(NatsJSContext js, CancellationToken cancellationToken) + { + try + { + await js.GetStreamAsync(_options.Stream, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (NatsJSApiException ex) when (ex.Error?.Code == 404) + { + var config = new StreamConfig(name: _options.Stream, subjects: new[] { _options.Subject }) + { + Retention = StreamConfigRetention.Workqueue, + Storage = StreamConfigStorage.File, + MaxConsumers = -1, + MaxMsgs = -1, + MaxBytes = -1 + }; + + await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); + _logger.LogInformation("Created NATS Notify delivery stream {Stream} ({Subject}).", _options.Stream, _options.Subject); + } + } + + private async Task EnsureDeadLetterStreamAsync(NatsJSContext js, CancellationToken cancellationToken) + { + try + { + await js.GetStreamAsync(_options.DeadLetterStream, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (NatsJSApiException ex) when (ex.Error?.Code == 404) + { + var config = new StreamConfig(name: _options.DeadLetterStream, subjects: new[] { _options.DeadLetterSubject }) + { + Retention = StreamConfigRetention.Workqueue, + Storage = StreamConfigStorage.File, + MaxConsumers = -1, + MaxMsgs = -1, + MaxBytes = -1 + }; + + await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); + _logger.LogInformation("Created NATS Notify delivery dead-letter stream {Stream} ({Subject}).", _options.DeadLetterStream, _options.DeadLetterSubject); + } + } + + private NatsNotifyDeliveryLease? CreateLease( + NatsJSMsg<byte[]> message, + string consumer, + DateTimeOffset now, + TimeSpan leaseDuration) + { + var payloadBytes = message.Data ?? Array.Empty<byte>(); + if (payloadBytes.Length == 0) + { + return null; + } + + NotifyDelivery delivery; + try + { + var json = Encoding.UTF8.GetString(payloadBytes); + delivery = NotifyCanonicalJsonSerializer.Deserialize<NotifyDelivery>(json); + } + catch (Exception ex) + { + _logger.LogWarning( + ex, + "Failed to deserialize Notify delivery payload for NATS message {Sequence}.", + message.Metadata?.Sequence.Stream); + return null; + } + + var headers = message.Headers ?? new NatsHeaders(); + + var deliveryId = TryGetHeader(headers, NotifyQueueFields.DeliveryId) ?? delivery.DeliveryId; + var channelId = TryGetHeader(headers, NotifyQueueFields.ChannelId); + var channelTypeRaw = TryGetHeader(headers, NotifyQueueFields.ChannelType); + if (channelId is null || channelTypeRaw is null) + { + return null; + } + + if (!Enum.TryParse<NotifyChannelType>(channelTypeRaw, ignoreCase: true, out var channelType)) + { + _logger.LogWarning("Unknown channel type '{ChannelType}' for delivery {DeliveryId}.", channelTypeRaw, deliveryId); + return null; + } + + var traceId = TryGetHeader(headers, NotifyQueueFields.TraceId); + var partitionKey = TryGetHeader(headers, NotifyQueueFields.PartitionKey) ?? channelId; + var idempotencyKey = TryGetHeader(headers, NotifyQueueFields.IdempotencyKey) ?? delivery.DeliveryId; + + var enqueuedAt = TryGetHeader(headers, NotifyQueueFields.EnqueuedAt) is { } enqueuedRaw + && long.TryParse(enqueuedRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var unix) + ? DateTimeOffset.FromUnixTimeMilliseconds(unix) + : now; + + var attempt = TryGetHeader(headers, NotifyQueueFields.Attempt) is { } attemptRaw + && int.TryParse(attemptRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedAttempt) + ? parsedAttempt + : 1; + + if (message.Metadata?.NumDelivered is ulong delivered && delivered > 0) + { + var deliveredInt = delivered > int.MaxValue ? int.MaxValue : (int)delivered; + if (deliveredInt > attempt) + { + attempt = deliveredInt; + } + } + + var attributes = ExtractAttributes(headers); + var leaseExpires = now.Add(leaseDuration); + var messageId = message.Metadata?.Sequence.Stream.ToString() ?? Guid.NewGuid().ToString("n"); + + var queueMessage = new NotifyDeliveryQueueMessage( + delivery, + channelId, + channelType, + _options.Subject, + traceId, + attributes); + + return new NatsNotifyDeliveryLease( + this, + message, + messageId, + queueMessage, + attempt, + consumer, + enqueuedAt, + leaseExpires, + idempotencyKey); + } + + private NatsHeaders BuildHeaders(NotifyDeliveryQueueMessage message) + { + var headers = new NatsHeaders + { + { NotifyQueueFields.DeliveryId, message.Delivery.DeliveryId }, + { NotifyQueueFields.ChannelId, message.ChannelId }, + { NotifyQueueFields.ChannelType, message.ChannelType.ToString() }, + { NotifyQueueFields.Tenant, message.Delivery.TenantId }, + { NotifyQueueFields.Attempt, "1" }, + { NotifyQueueFields.EnqueuedAt, _timeProvider.GetUtcNow().ToUnixTimeMilliseconds().ToString(CultureInfo.InvariantCulture) }, + { NotifyQueueFields.IdempotencyKey, message.IdempotencyKey }, + { NotifyQueueFields.PartitionKey, message.PartitionKey } + }; + + if (!string.IsNullOrWhiteSpace(message.TraceId)) + { + headers.Add(NotifyQueueFields.TraceId, message.TraceId!); + } + + foreach (var kvp in message.Attributes) + { + headers.Add(NotifyQueueFields.AttributePrefix + kvp.Key, kvp.Value); + } + + return headers; + } + + private NatsHeaders BuildDeadLetterHeaders(NatsNotifyDeliveryLease lease, string reason) + { + var headers = new NatsHeaders + { + { NotifyQueueFields.DeliveryId, lease.Message.Delivery.DeliveryId }, + { NotifyQueueFields.ChannelId, lease.Message.ChannelId }, + { NotifyQueueFields.ChannelType, lease.Message.ChannelType.ToString() }, + { NotifyQueueFields.Tenant, lease.Message.Delivery.TenantId }, + { NotifyQueueFields.Attempt, lease.Attempt.ToString(CultureInfo.InvariantCulture) }, + { NotifyQueueFields.IdempotencyKey, lease.Message.IdempotencyKey }, + { "deadletter-reason", reason } + }; + + if (!string.IsNullOrWhiteSpace(lease.Message.TraceId)) + { + headers.Add(NotifyQueueFields.TraceId, lease.Message.TraceId!); + } + + foreach (var kvp in lease.Message.Attributes) + { + headers.Add(NotifyQueueFields.AttributePrefix + kvp.Key, kvp.Value); + } + + return headers; + } + + private static string? TryGetHeader(NatsHeaders headers, string key) + { + if (headers.TryGetValue(key, out var values) && values.Count > 0) + { + var value = values[0]; + return string.IsNullOrWhiteSpace(value) ? null : value; + } + + return null; + } + + private static IReadOnlyDictionary<string, string> ExtractAttributes(NatsHeaders headers) + { + var attributes = new Dictionary<string, string>(StringComparer.Ordinal); + + foreach (var key in headers.Keys) + { + if (!key.StartsWith(NotifyQueueFields.AttributePrefix, StringComparison.Ordinal)) + { + continue; + } + + if (headers.TryGetValue(key, out var values) && values.Count > 0) + { + attributes[key[NotifyQueueFields.AttributePrefix.Length..]] = values[0]!; + } + } + + return attributes.Count == 0 + ? EmptyReadOnlyDictionary<string, string>.Instance + : new ReadOnlyDictionary<string, string>(attributes); + } + + private TimeSpan CalculateBackoff(int attempt) + { + var initial = _queueOptions.RetryInitialBackoff > TimeSpan.Zero + ? _queueOptions.RetryInitialBackoff + : _options.RetryDelay; + + if (initial <= TimeSpan.Zero) + { + return TimeSpan.Zero; + } + + if (attempt <= 1) + { + return initial; + } + + var max = _queueOptions.RetryMaxBackoff > TimeSpan.Zero + ? _queueOptions.RetryMaxBackoff + : initial; + + var exponent = attempt - 1; + var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1); + var cappedTicks = Math.Min(max.Ticks, scaledTicks); + var resultTicks = Math.Max(initial.Ticks, (long)cappedTicks); + return TimeSpan.FromTicks(resultTicks); + } + + private static long ToNanoseconds(TimeSpan value) + => value <= TimeSpan.Zero ? 0 : value.Ticks * 100L; + + private static class EmptyReadOnlyDictionary<TKey, TValue> + where TKey : notnull + { + public static readonly IReadOnlyDictionary<TKey, TValue> Instance = + new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default)); + } +} diff --git a/src/StellaOps.Notify.Queue/Nats/NatsNotifyEventLease.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyEventLease.cs similarity index 97% rename from src/StellaOps.Notify.Queue/Nats/NatsNotifyEventLease.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyEventLease.cs index 53458b14..81672b81 100644 --- a/src/StellaOps.Notify.Queue/Nats/NatsNotifyEventLease.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyEventLease.cs @@ -1,83 +1,83 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using NATS.Client.JetStream; - -namespace StellaOps.Notify.Queue.Nats; - -internal sealed class NatsNotifyEventLease : INotifyQueueLease<NotifyQueueEventMessage> -{ - private readonly NatsNotifyEventQueue _queue; - private readonly NatsJSMsg<byte[]> _message; - private int _completed; - - internal NatsNotifyEventLease( - NatsNotifyEventQueue queue, - NatsJSMsg<byte[]> message, - string messageId, - NotifyQueueEventMessage payload, - int attempt, - string consumer, - DateTimeOffset enqueuedAt, - DateTimeOffset leaseExpiresAt) - { - _queue = queue ?? throw new ArgumentNullException(nameof(queue)); - if (EqualityComparer<NatsJSMsg<byte[]>>.Default.Equals(message, default)) - { - throw new ArgumentException("Message must be provided.", nameof(message)); - } - - _message = message; - MessageId = messageId ?? throw new ArgumentNullException(nameof(messageId)); - Message = payload ?? throw new ArgumentNullException(nameof(payload)); - Attempt = attempt; - Consumer = consumer ?? throw new ArgumentNullException(nameof(consumer)); - EnqueuedAt = enqueuedAt; - LeaseExpiresAt = leaseExpiresAt; - } - - public string MessageId { get; } - - public int Attempt { get; internal set; } - - public DateTimeOffset EnqueuedAt { get; } - - public DateTimeOffset LeaseExpiresAt { get; private set; } - - public string Consumer { get; } - - public string Stream => Message.Stream; - - public string TenantId => Message.TenantId; - - public string? PartitionKey => Message.PartitionKey; - - public string IdempotencyKey => Message.IdempotencyKey; - - public string? TraceId => Message.TraceId; - - public IReadOnlyDictionary<string, string> Attributes => Message.Attributes; - - public NotifyQueueEventMessage Message { get; } - - internal NatsJSMsg<byte[]> RawMessage => _message; - - public Task AcknowledgeAsync(CancellationToken cancellationToken = default) - => _queue.AcknowledgeAsync(this, cancellationToken); - - public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) - => _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken); - - public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) - => _queue.ReleaseAsync(this, disposition, cancellationToken); - - public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) - => _queue.DeadLetterAsync(this, reason, cancellationToken); - - internal bool TryBeginCompletion() - => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; - - internal void RefreshLease(DateTimeOffset expiresAt) - => LeaseExpiresAt = expiresAt; -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using NATS.Client.JetStream; + +namespace StellaOps.Notify.Queue.Nats; + +internal sealed class NatsNotifyEventLease : INotifyQueueLease<NotifyQueueEventMessage> +{ + private readonly NatsNotifyEventQueue _queue; + private readonly NatsJSMsg<byte[]> _message; + private int _completed; + + internal NatsNotifyEventLease( + NatsNotifyEventQueue queue, + NatsJSMsg<byte[]> message, + string messageId, + NotifyQueueEventMessage payload, + int attempt, + string consumer, + DateTimeOffset enqueuedAt, + DateTimeOffset leaseExpiresAt) + { + _queue = queue ?? throw new ArgumentNullException(nameof(queue)); + if (EqualityComparer<NatsJSMsg<byte[]>>.Default.Equals(message, default)) + { + throw new ArgumentException("Message must be provided.", nameof(message)); + } + + _message = message; + MessageId = messageId ?? throw new ArgumentNullException(nameof(messageId)); + Message = payload ?? throw new ArgumentNullException(nameof(payload)); + Attempt = attempt; + Consumer = consumer ?? throw new ArgumentNullException(nameof(consumer)); + EnqueuedAt = enqueuedAt; + LeaseExpiresAt = leaseExpiresAt; + } + + public string MessageId { get; } + + public int Attempt { get; internal set; } + + public DateTimeOffset EnqueuedAt { get; } + + public DateTimeOffset LeaseExpiresAt { get; private set; } + + public string Consumer { get; } + + public string Stream => Message.Stream; + + public string TenantId => Message.TenantId; + + public string? PartitionKey => Message.PartitionKey; + + public string IdempotencyKey => Message.IdempotencyKey; + + public string? TraceId => Message.TraceId; + + public IReadOnlyDictionary<string, string> Attributes => Message.Attributes; + + public NotifyQueueEventMessage Message { get; } + + internal NatsJSMsg<byte[]> RawMessage => _message; + + public Task AcknowledgeAsync(CancellationToken cancellationToken = default) + => _queue.AcknowledgeAsync(this, cancellationToken); + + public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) + => _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken); + + public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) + => _queue.ReleaseAsync(this, disposition, cancellationToken); + + public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) + => _queue.DeadLetterAsync(this, reason, cancellationToken); + + internal bool TryBeginCompletion() + => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; + + internal void RefreshLease(DateTimeOffset expiresAt) + => LeaseExpiresAt = expiresAt; +} diff --git a/src/StellaOps.Notify.Queue/Nats/NatsNotifyEventQueue.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyEventQueue.cs similarity index 97% rename from src/StellaOps.Notify.Queue/Nats/NatsNotifyEventQueue.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyEventQueue.cs index 023583b3..1090a3ba 100644 --- a/src/StellaOps.Notify.Queue/Nats/NatsNotifyEventQueue.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/Nats/NatsNotifyEventQueue.cs @@ -1,698 +1,698 @@ -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Globalization; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using NATS.Client.Core; -using NATS.Client.JetStream; -using NATS.Client.JetStream.Models; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Queue.Nats; - -internal sealed class NatsNotifyEventQueue : INotifyEventQueue, IAsyncDisposable -{ - private const string TransportName = "nats"; - - private static readonly INatsSerializer<byte[]> PayloadSerializer = NatsRawSerializer<byte[]>.Default; - - private readonly NotifyEventQueueOptions _queueOptions; - private readonly NotifyNatsEventQueueOptions _options; - private readonly ILogger<NatsNotifyEventQueue> _logger; - private readonly TimeProvider _timeProvider; - private readonly SemaphoreSlim _connectionGate = new(1, 1); - private readonly Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>> _connectionFactory; - - private NatsConnection? _connection; - private NatsJSContext? _jsContext; - private INatsJSConsumer? _consumer; - private bool _disposed; - - public NatsNotifyEventQueue( - NotifyEventQueueOptions queueOptions, - NotifyNatsEventQueueOptions options, - ILogger<NatsNotifyEventQueue> logger, - TimeProvider timeProvider, - Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) - { - _queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? TimeProvider.System; - _connectionFactory = connectionFactory ?? ((opts, cancellationToken) => new ValueTask<NatsConnection>(new NatsConnection(opts))); - - if (string.IsNullOrWhiteSpace(_options.Url)) - { - throw new InvalidOperationException("NATS connection URL must be configured for the Notify event queue."); - } - - if (string.IsNullOrWhiteSpace(_options.Stream) || string.IsNullOrWhiteSpace(_options.Subject)) - { - throw new InvalidOperationException("NATS stream and subject must be configured for the Notify event queue."); - } - } - - public async ValueTask<NotifyQueueEnqueueResult> PublishAsync( - NotifyQueueEventMessage message, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(message); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); - await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); - - var idempotencyKey = string.IsNullOrWhiteSpace(message.IdempotencyKey) - ? message.Event.EventId.ToString("N") - : message.IdempotencyKey; - - var payload = Encoding.UTF8.GetBytes(NotifyCanonicalJsonSerializer.Serialize(message.Event)); - var headers = BuildHeaders(message, idempotencyKey); - - var publishOpts = new NatsJSPubOpts - { - MsgId = idempotencyKey, - RetryAttempts = 0 - }; - - var ack = await js.PublishAsync( - _options.Subject, - payload, - PayloadSerializer, - publishOpts, - headers, - cancellationToken) - .ConfigureAwait(false); - - if (ack.Duplicate) - { - _logger.LogDebug( - "Duplicate Notify event enqueue detected for idempotency token {Token}.", - idempotencyKey); - - NotifyQueueMetrics.RecordDeduplicated(TransportName, _options.Stream); - return new NotifyQueueEnqueueResult(ack.Seq.ToString(), true); - } - - NotifyQueueMetrics.RecordEnqueued(TransportName, _options.Stream); - _logger.LogDebug( - "Enqueued Notify event {EventId} into NATS stream {Stream} (sequence {Sequence}).", - message.Event.EventId, - ack.Stream, - ack.Seq); - - return new NotifyQueueEnqueueResult(ack.Seq.ToString(), false); - } - - public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> LeaseAsync( - NotifyQueueLeaseRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); - - var fetchOpts = new NatsJSFetchOpts - { - MaxMsgs = request.BatchSize, - Expires = request.LeaseDuration, - IdleHeartbeat = _options.IdleHeartbeat - }; - - var now = _timeProvider.GetUtcNow(); - var leases = new List<INotifyQueueLease<NotifyQueueEventMessage>>(request.BatchSize); - - await foreach (var msg in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) - { - var lease = CreateLease(msg, request.Consumer, now, request.LeaseDuration); - if (lease is null) - { - await msg.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - continue; - } - - leases.Add(lease); - } - - return leases; - } - - public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> ClaimExpiredAsync( - NotifyQueueClaimOptions options, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(options); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); - - var fetchOpts = new NatsJSFetchOpts - { - MaxMsgs = options.BatchSize, - Expires = options.MinIdleTime, - IdleHeartbeat = _options.IdleHeartbeat - }; - - var now = _timeProvider.GetUtcNow(); - var leases = new List<INotifyQueueLease<NotifyQueueEventMessage>>(options.BatchSize); - - await foreach (var msg in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) - { - var deliveries = (int)(msg.Metadata?.NumDelivered ?? 1); - if (deliveries <= 1) - { - await msg.NakAsync(new AckOpts(), TimeSpan.Zero, cancellationToken).ConfigureAwait(false); - continue; - } - - var lease = CreateLease(msg, options.ClaimantConsumer, now, _queueOptions.DefaultLeaseDuration); - if (lease is null) - { - await msg.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - continue; - } - - leases.Add(lease); - } - - return leases; - } - - public async ValueTask DisposeAsync() - { - if (_disposed) - { - return; - } - - _disposed = true; - - if (_connection is not null) - { - await _connection.DisposeAsync().ConfigureAwait(false); - } - - _connectionGate.Dispose(); - GC.SuppressFinalize(this); - } - - internal async Task AcknowledgeAsync( - NatsNotifyEventLease lease, - CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - NotifyQueueMetrics.RecordAck(TransportName, _options.Stream); - - _logger.LogDebug( - "Acknowledged Notify event {EventId} (sequence {Sequence}).", - lease.Message.Event.EventId, - lease.MessageId); - } - - internal async Task RenewLeaseAsync( - NatsNotifyEventLease lease, - TimeSpan leaseDuration, - CancellationToken cancellationToken) - { - await lease.RawMessage.AckProgressAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - - var expires = _timeProvider.GetUtcNow().Add(leaseDuration); - lease.RefreshLease(expires); - - _logger.LogDebug( - "Renewed NATS lease for Notify event {EventId} until {Expires:u}.", - lease.Message.Event.EventId, - expires); - } - - internal async Task ReleaseAsync( - NatsNotifyEventLease lease, - NotifyQueueReleaseDisposition disposition, - CancellationToken cancellationToken) - { - if (disposition == NotifyQueueReleaseDisposition.Retry - && lease.Attempt >= _queueOptions.MaxDeliveryAttempts) - { - _logger.LogWarning( - "Notify event {EventId} reached max delivery attempts ({Attempts}); moving to dead-letter stream.", - lease.Message.Event.EventId, - lease.Attempt); - - await DeadLetterAsync( - lease, - $"max-delivery-attempts:{lease.Attempt}", - cancellationToken).ConfigureAwait(false); - return; - } - - if (!lease.TryBeginCompletion()) - { - return; - } - - if (disposition == NotifyQueueReleaseDisposition.Retry) - { - var delay = CalculateBackoff(lease.Attempt); - await lease.RawMessage.NakAsync(new AckOpts(), delay, cancellationToken).ConfigureAwait(false); - - NotifyQueueMetrics.RecordRetry(TransportName, _options.Stream); - - _logger.LogInformation( - "Scheduled Notify event {EventId} for retry with delay {Delay} (attempt {Attempt}).", - lease.Message.Event.EventId, - delay, - lease.Attempt); - } - else - { - await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - NotifyQueueMetrics.RecordAck(TransportName, _options.Stream); - - _logger.LogInformation( - "Abandoned Notify event {EventId} after {Attempt} attempt(s).", - lease.Message.Event.EventId, - lease.Attempt); - } - } - - internal async Task DeadLetterAsync( - NatsNotifyEventLease lease, - string reason, - CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); - - var headers = BuildDeadLetterHeaders(lease, reason); - var payload = Encoding.UTF8.GetBytes(NotifyCanonicalJsonSerializer.Serialize(lease.Message.Event)); - - await js.PublishAsync( - _options.DeadLetterSubject, - payload, - PayloadSerializer, - new NatsJSPubOpts(), - headers, - cancellationToken) - .ConfigureAwait(false); - - NotifyQueueMetrics.RecordDeadLetter(TransportName, _options.DeadLetterStream); - - _logger.LogError( - "Dead-lettered Notify event {EventId} (attempt {Attempt}): {Reason}", - lease.Message.Event.EventId, - lease.Attempt, - reason); - } - - internal async Task PingAsync(CancellationToken cancellationToken) - { - var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); - await connection.PingAsync(cancellationToken).ConfigureAwait(false); - } - - private async Task<NatsJSContext> GetJetStreamAsync(CancellationToken cancellationToken) - { - if (_jsContext is not null) - { - return _jsContext; - } - - var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); - - await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - _jsContext ??= new NatsJSContext(connection); - return _jsContext; - } - finally - { - _connectionGate.Release(); - } - } - - private async ValueTask<INatsJSConsumer> EnsureStreamAndConsumerAsync( - NatsJSContext js, - CancellationToken cancellationToken) - { - if (_consumer is not null) - { - return _consumer; - } - - await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_consumer is not null) - { - return _consumer; - } - - await EnsureStreamAsync(js, cancellationToken).ConfigureAwait(false); - await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); - - var consumerConfig = new ConsumerConfig - { - DurableName = _options.DurableConsumer, - AckPolicy = ConsumerConfigAckPolicy.Explicit, - ReplayPolicy = ConsumerConfigReplayPolicy.Instant, - DeliverPolicy = ConsumerConfigDeliverPolicy.All, - AckWait = ToNanoseconds(_options.AckWait), - MaxAckPending = _options.MaxAckPending, - MaxDeliver = Math.Max(1, _queueOptions.MaxDeliveryAttempts), - FilterSubjects = new[] { _options.Subject } - }; - - try - { - _consumer = await js.CreateConsumerAsync( - _options.Stream, - consumerConfig, - cancellationToken) - .ConfigureAwait(false); - } - catch (NatsJSApiException apiEx) - { - _logger.LogDebug( - apiEx, - "CreateConsumerAsync failed with code {Code}; attempting to fetch existing durable consumer {Durable}.", - apiEx.Error?.Code, - _options.DurableConsumer); - - _consumer = await js.GetConsumerAsync( - _options.Stream, - _options.DurableConsumer, - cancellationToken) - .ConfigureAwait(false); - } - - return _consumer; - } - finally - { - _connectionGate.Release(); - } - } - - private async Task<NatsConnection> EnsureConnectionAsync(CancellationToken cancellationToken) - { - if (_connection is not null) - { - return _connection; - } - - await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_connection is not null) - { - return _connection; - } - - var opts = new NatsOpts - { - Url = _options.Url!, - Name = "stellaops-notify-queue", - CommandTimeout = TimeSpan.FromSeconds(10), - RequestTimeout = TimeSpan.FromSeconds(20), - PingInterval = TimeSpan.FromSeconds(30) - }; - - _connection = await _connectionFactory(opts, cancellationToken).ConfigureAwait(false); - await _connection.ConnectAsync().ConfigureAwait(false); - return _connection; - } - finally - { - _connectionGate.Release(); - } - } - - private async Task EnsureStreamAsync(NatsJSContext js, CancellationToken cancellationToken) - { - try - { - await js.GetStreamAsync(_options.Stream, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch (NatsJSApiException ex) when (ex.Error?.Code == 404) - { - var config = new StreamConfig(name: _options.Stream, subjects: new[] { _options.Subject }) - { - Retention = StreamConfigRetention.Workqueue, - Storage = StreamConfigStorage.File, - MaxConsumers = -1, - MaxMsgs = -1, - MaxBytes = -1 - }; - - await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); - _logger.LogInformation("Created NATS Notify stream {Stream} ({Subject}).", _options.Stream, _options.Subject); - } - } - - private async Task EnsureDeadLetterStreamAsync(NatsJSContext js, CancellationToken cancellationToken) - { - try - { - await js.GetStreamAsync(_options.DeadLetterStream, cancellationToken: cancellationToken).ConfigureAwait(false); - } - catch (NatsJSApiException ex) when (ex.Error?.Code == 404) - { - var config = new StreamConfig(name: _options.DeadLetterStream, subjects: new[] { _options.DeadLetterSubject }) - { - Retention = StreamConfigRetention.Workqueue, - Storage = StreamConfigStorage.File, - MaxConsumers = -1, - MaxMsgs = -1, - MaxBytes = -1 - }; - - await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); - _logger.LogInformation("Created NATS Notify dead-letter stream {Stream} ({Subject}).", _options.DeadLetterStream, _options.DeadLetterSubject); - } - } - - private NatsNotifyEventLease? CreateLease( - NatsJSMsg<byte[]> message, - string consumer, - DateTimeOffset now, - TimeSpan leaseDuration) - { - var payloadBytes = message.Data ?? Array.Empty<byte>(); - if (payloadBytes.Length == 0) - { - return null; - } - - NotifyEvent notifyEvent; - try - { - var json = Encoding.UTF8.GetString(payloadBytes); - notifyEvent = NotifyCanonicalJsonSerializer.Deserialize<NotifyEvent>(json); - } - catch (Exception ex) - { - _logger.LogWarning( - ex, - "Failed to deserialize Notify event payload for NATS message {Sequence}.", - message.Metadata?.Sequence.Stream); - return null; - } - - var headers = message.Headers ?? new NatsHeaders(); - - var idempotencyKey = TryGetHeader(headers, NotifyQueueFields.IdempotencyKey) - ?? notifyEvent.EventId.ToString("N"); - - var partitionKey = TryGetHeader(headers, NotifyQueueFields.PartitionKey); - var traceId = TryGetHeader(headers, NotifyQueueFields.TraceId); - var enqueuedAt = TryGetHeader(headers, NotifyQueueFields.EnqueuedAt) is { } enqueuedRaw - && long.TryParse(enqueuedRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var unix) - ? DateTimeOffset.FromUnixTimeMilliseconds(unix) - : now; - - var attempt = TryGetHeader(headers, NotifyQueueFields.Attempt) is { } attemptRaw - && int.TryParse(attemptRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedAttempt) - ? parsedAttempt - : 1; - - if (message.Metadata?.NumDelivered is ulong delivered && delivered > 0) - { - var deliveredInt = delivered > int.MaxValue ? int.MaxValue : (int)delivered; - if (deliveredInt > attempt) - { - attempt = deliveredInt; - } - } - - var attributes = ExtractAttributes(headers); - var leaseExpires = now.Add(leaseDuration); - var messageId = message.Metadata?.Sequence.Stream.ToString() ?? Guid.NewGuid().ToString("n"); - - var queueMessage = new NotifyQueueEventMessage( - notifyEvent, - _options.Subject, - idempotencyKey, - partitionKey, - traceId, - attributes); - - return new NatsNotifyEventLease( - this, - message, - messageId, - queueMessage, - attempt, - consumer, - enqueuedAt, - leaseExpires); - } - - private NatsHeaders BuildHeaders(NotifyQueueEventMessage message, string idempotencyKey) - { - var headers = new NatsHeaders - { - { NotifyQueueFields.EventId, message.Event.EventId.ToString("D") }, - { NotifyQueueFields.Tenant, message.TenantId }, - { NotifyQueueFields.Kind, message.Event.Kind }, - { NotifyQueueFields.Attempt, "1" }, - { NotifyQueueFields.EnqueuedAt, _timeProvider.GetUtcNow().ToUnixTimeMilliseconds().ToString(CultureInfo.InvariantCulture) }, - { NotifyQueueFields.IdempotencyKey, idempotencyKey } - }; - - if (!string.IsNullOrWhiteSpace(message.TraceId)) - { - headers.Add(NotifyQueueFields.TraceId, message.TraceId!); - } - - if (!string.IsNullOrWhiteSpace(message.PartitionKey)) - { - headers.Add(NotifyQueueFields.PartitionKey, message.PartitionKey!); - } - - foreach (var kvp in message.Attributes) - { - headers.Add(NotifyQueueFields.AttributePrefix + kvp.Key, kvp.Value); - } - - return headers; - } - - private NatsHeaders BuildDeadLetterHeaders(NatsNotifyEventLease lease, string reason) - { - var headers = new NatsHeaders - { - { NotifyQueueFields.EventId, lease.Message.Event.EventId.ToString("D") }, - { NotifyQueueFields.Tenant, lease.Message.TenantId }, - { NotifyQueueFields.Kind, lease.Message.Event.Kind }, - { NotifyQueueFields.Attempt, lease.Attempt.ToString(CultureInfo.InvariantCulture) }, - { NotifyQueueFields.IdempotencyKey, lease.Message.IdempotencyKey }, - { "deadletter-reason", reason } - }; - - if (!string.IsNullOrWhiteSpace(lease.Message.TraceId)) - { - headers.Add(NotifyQueueFields.TraceId, lease.Message.TraceId!); - } - - if (!string.IsNullOrWhiteSpace(lease.Message.PartitionKey)) - { - headers.Add(NotifyQueueFields.PartitionKey, lease.Message.PartitionKey!); - } - - foreach (var kvp in lease.Message.Attributes) - { - headers.Add(NotifyQueueFields.AttributePrefix + kvp.Key, kvp.Value); - } - - return headers; - } - - private static string? TryGetHeader(NatsHeaders headers, string key) - { - if (headers.TryGetValue(key, out var values) && values.Count > 0) - { - var value = values[0]; - return string.IsNullOrWhiteSpace(value) ? null : value; - } - - return null; - } - - private static IReadOnlyDictionary<string, string> ExtractAttributes(NatsHeaders headers) - { - var attributes = new Dictionary<string, string>(StringComparer.Ordinal); - - foreach (var key in headers.Keys) - { - if (!key.StartsWith(NotifyQueueFields.AttributePrefix, StringComparison.Ordinal)) - { - continue; - } - - if (headers.TryGetValue(key, out var values) && values.Count > 0) - { - attributes[key[NotifyQueueFields.AttributePrefix.Length..]] = values[0]!; - } - } - - return attributes.Count == 0 - ? EmptyReadOnlyDictionary<string, string>.Instance - : new ReadOnlyDictionary<string, string>(attributes); - } - - private TimeSpan CalculateBackoff(int attempt) - { - var initial = _queueOptions.RetryInitialBackoff > TimeSpan.Zero - ? _queueOptions.RetryInitialBackoff - : _options.RetryDelay; - - if (initial <= TimeSpan.Zero) - { - return TimeSpan.Zero; - } - - if (attempt <= 1) - { - return initial; - } - - var max = _queueOptions.RetryMaxBackoff > TimeSpan.Zero - ? _queueOptions.RetryMaxBackoff - : initial; - - var exponent = attempt - 1; - var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1); - var cappedTicks = Math.Min(max.Ticks, scaledTicks); - var resultTicks = Math.Max(initial.Ticks, (long)cappedTicks); - return TimeSpan.FromTicks(resultTicks); - } - - private static long ToNanoseconds(TimeSpan value) - => value <= TimeSpan.Zero ? 0 : value.Ticks * 100L; - - private static class EmptyReadOnlyDictionary<TKey, TValue> - where TKey : notnull - { - public static readonly IReadOnlyDictionary<TKey, TValue> Instance = - new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default)); - } -} +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Globalization; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using NATS.Client.Core; +using NATS.Client.JetStream; +using NATS.Client.JetStream.Models; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Queue.Nats; + +internal sealed class NatsNotifyEventQueue : INotifyEventQueue, IAsyncDisposable +{ + private const string TransportName = "nats"; + + private static readonly INatsSerializer<byte[]> PayloadSerializer = NatsRawSerializer<byte[]>.Default; + + private readonly NotifyEventQueueOptions _queueOptions; + private readonly NotifyNatsEventQueueOptions _options; + private readonly ILogger<NatsNotifyEventQueue> _logger; + private readonly TimeProvider _timeProvider; + private readonly SemaphoreSlim _connectionGate = new(1, 1); + private readonly Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>> _connectionFactory; + + private NatsConnection? _connection; + private NatsJSContext? _jsContext; + private INatsJSConsumer? _consumer; + private bool _disposed; + + public NatsNotifyEventQueue( + NotifyEventQueueOptions queueOptions, + NotifyNatsEventQueueOptions options, + ILogger<NatsNotifyEventQueue> logger, + TimeProvider timeProvider, + Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) + { + _queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + _connectionFactory = connectionFactory ?? ((opts, cancellationToken) => new ValueTask<NatsConnection>(new NatsConnection(opts))); + + if (string.IsNullOrWhiteSpace(_options.Url)) + { + throw new InvalidOperationException("NATS connection URL must be configured for the Notify event queue."); + } + + if (string.IsNullOrWhiteSpace(_options.Stream) || string.IsNullOrWhiteSpace(_options.Subject)) + { + throw new InvalidOperationException("NATS stream and subject must be configured for the Notify event queue."); + } + } + + public async ValueTask<NotifyQueueEnqueueResult> PublishAsync( + NotifyQueueEventMessage message, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(message); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); + await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); + + var idempotencyKey = string.IsNullOrWhiteSpace(message.IdempotencyKey) + ? message.Event.EventId.ToString("N") + : message.IdempotencyKey; + + var payload = Encoding.UTF8.GetBytes(NotifyCanonicalJsonSerializer.Serialize(message.Event)); + var headers = BuildHeaders(message, idempotencyKey); + + var publishOpts = new NatsJSPubOpts + { + MsgId = idempotencyKey, + RetryAttempts = 0 + }; + + var ack = await js.PublishAsync( + _options.Subject, + payload, + PayloadSerializer, + publishOpts, + headers, + cancellationToken) + .ConfigureAwait(false); + + if (ack.Duplicate) + { + _logger.LogDebug( + "Duplicate Notify event enqueue detected for idempotency token {Token}.", + idempotencyKey); + + NotifyQueueMetrics.RecordDeduplicated(TransportName, _options.Stream); + return new NotifyQueueEnqueueResult(ack.Seq.ToString(), true); + } + + NotifyQueueMetrics.RecordEnqueued(TransportName, _options.Stream); + _logger.LogDebug( + "Enqueued Notify event {EventId} into NATS stream {Stream} (sequence {Sequence}).", + message.Event.EventId, + ack.Stream, + ack.Seq); + + return new NotifyQueueEnqueueResult(ack.Seq.ToString(), false); + } + + public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> LeaseAsync( + NotifyQueueLeaseRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); + + var fetchOpts = new NatsJSFetchOpts + { + MaxMsgs = request.BatchSize, + Expires = request.LeaseDuration, + IdleHeartbeat = _options.IdleHeartbeat + }; + + var now = _timeProvider.GetUtcNow(); + var leases = new List<INotifyQueueLease<NotifyQueueEventMessage>>(request.BatchSize); + + await foreach (var msg in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) + { + var lease = CreateLease(msg, request.Consumer, now, request.LeaseDuration); + if (lease is null) + { + await msg.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + } + + return leases; + } + + public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> ClaimExpiredAsync( + NotifyQueueClaimOptions options, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(options); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); + + var fetchOpts = new NatsJSFetchOpts + { + MaxMsgs = options.BatchSize, + Expires = options.MinIdleTime, + IdleHeartbeat = _options.IdleHeartbeat + }; + + var now = _timeProvider.GetUtcNow(); + var leases = new List<INotifyQueueLease<NotifyQueueEventMessage>>(options.BatchSize); + + await foreach (var msg in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) + { + var deliveries = (int)(msg.Metadata?.NumDelivered ?? 1); + if (deliveries <= 1) + { + await msg.NakAsync(new AckOpts(), TimeSpan.Zero, cancellationToken).ConfigureAwait(false); + continue; + } + + var lease = CreateLease(msg, options.ClaimantConsumer, now, _queueOptions.DefaultLeaseDuration); + if (lease is null) + { + await msg.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + } + + return leases; + } + + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + _disposed = true; + + if (_connection is not null) + { + await _connection.DisposeAsync().ConfigureAwait(false); + } + + _connectionGate.Dispose(); + GC.SuppressFinalize(this); + } + + internal async Task AcknowledgeAsync( + NatsNotifyEventLease lease, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + NotifyQueueMetrics.RecordAck(TransportName, _options.Stream); + + _logger.LogDebug( + "Acknowledged Notify event {EventId} (sequence {Sequence}).", + lease.Message.Event.EventId, + lease.MessageId); + } + + internal async Task RenewLeaseAsync( + NatsNotifyEventLease lease, + TimeSpan leaseDuration, + CancellationToken cancellationToken) + { + await lease.RawMessage.AckProgressAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + + var expires = _timeProvider.GetUtcNow().Add(leaseDuration); + lease.RefreshLease(expires); + + _logger.LogDebug( + "Renewed NATS lease for Notify event {EventId} until {Expires:u}.", + lease.Message.Event.EventId, + expires); + } + + internal async Task ReleaseAsync( + NatsNotifyEventLease lease, + NotifyQueueReleaseDisposition disposition, + CancellationToken cancellationToken) + { + if (disposition == NotifyQueueReleaseDisposition.Retry + && lease.Attempt >= _queueOptions.MaxDeliveryAttempts) + { + _logger.LogWarning( + "Notify event {EventId} reached max delivery attempts ({Attempts}); moving to dead-letter stream.", + lease.Message.Event.EventId, + lease.Attempt); + + await DeadLetterAsync( + lease, + $"max-delivery-attempts:{lease.Attempt}", + cancellationToken).ConfigureAwait(false); + return; + } + + if (!lease.TryBeginCompletion()) + { + return; + } + + if (disposition == NotifyQueueReleaseDisposition.Retry) + { + var delay = CalculateBackoff(lease.Attempt); + await lease.RawMessage.NakAsync(new AckOpts(), delay, cancellationToken).ConfigureAwait(false); + + NotifyQueueMetrics.RecordRetry(TransportName, _options.Stream); + + _logger.LogInformation( + "Scheduled Notify event {EventId} for retry with delay {Delay} (attempt {Attempt}).", + lease.Message.Event.EventId, + delay, + lease.Attempt); + } + else + { + await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + NotifyQueueMetrics.RecordAck(TransportName, _options.Stream); + + _logger.LogInformation( + "Abandoned Notify event {EventId} after {Attempt} attempt(s).", + lease.Message.Event.EventId, + lease.Attempt); + } + } + + internal async Task DeadLetterAsync( + NatsNotifyEventLease lease, + string reason, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); + + var headers = BuildDeadLetterHeaders(lease, reason); + var payload = Encoding.UTF8.GetBytes(NotifyCanonicalJsonSerializer.Serialize(lease.Message.Event)); + + await js.PublishAsync( + _options.DeadLetterSubject, + payload, + PayloadSerializer, + new NatsJSPubOpts(), + headers, + cancellationToken) + .ConfigureAwait(false); + + NotifyQueueMetrics.RecordDeadLetter(TransportName, _options.DeadLetterStream); + + _logger.LogError( + "Dead-lettered Notify event {EventId} (attempt {Attempt}): {Reason}", + lease.Message.Event.EventId, + lease.Attempt, + reason); + } + + internal async Task PingAsync(CancellationToken cancellationToken) + { + var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); + await connection.PingAsync(cancellationToken).ConfigureAwait(false); + } + + private async Task<NatsJSContext> GetJetStreamAsync(CancellationToken cancellationToken) + { + if (_jsContext is not null) + { + return _jsContext; + } + + var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + _jsContext ??= new NatsJSContext(connection); + return _jsContext; + } + finally + { + _connectionGate.Release(); + } + } + + private async ValueTask<INatsJSConsumer> EnsureStreamAndConsumerAsync( + NatsJSContext js, + CancellationToken cancellationToken) + { + if (_consumer is not null) + { + return _consumer; + } + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_consumer is not null) + { + return _consumer; + } + + await EnsureStreamAsync(js, cancellationToken).ConfigureAwait(false); + await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); + + var consumerConfig = new ConsumerConfig + { + DurableName = _options.DurableConsumer, + AckPolicy = ConsumerConfigAckPolicy.Explicit, + ReplayPolicy = ConsumerConfigReplayPolicy.Instant, + DeliverPolicy = ConsumerConfigDeliverPolicy.All, + AckWait = ToNanoseconds(_options.AckWait), + MaxAckPending = _options.MaxAckPending, + MaxDeliver = Math.Max(1, _queueOptions.MaxDeliveryAttempts), + FilterSubjects = new[] { _options.Subject } + }; + + try + { + _consumer = await js.CreateConsumerAsync( + _options.Stream, + consumerConfig, + cancellationToken) + .ConfigureAwait(false); + } + catch (NatsJSApiException apiEx) + { + _logger.LogDebug( + apiEx, + "CreateConsumerAsync failed with code {Code}; attempting to fetch existing durable consumer {Durable}.", + apiEx.Error?.Code, + _options.DurableConsumer); + + _consumer = await js.GetConsumerAsync( + _options.Stream, + _options.DurableConsumer, + cancellationToken) + .ConfigureAwait(false); + } + + return _consumer; + } + finally + { + _connectionGate.Release(); + } + } + + private async Task<NatsConnection> EnsureConnectionAsync(CancellationToken cancellationToken) + { + if (_connection is not null) + { + return _connection; + } + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_connection is not null) + { + return _connection; + } + + var opts = new NatsOpts + { + Url = _options.Url!, + Name = "stellaops-notify-queue", + CommandTimeout = TimeSpan.FromSeconds(10), + RequestTimeout = TimeSpan.FromSeconds(20), + PingInterval = TimeSpan.FromSeconds(30) + }; + + _connection = await _connectionFactory(opts, cancellationToken).ConfigureAwait(false); + await _connection.ConnectAsync().ConfigureAwait(false); + return _connection; + } + finally + { + _connectionGate.Release(); + } + } + + private async Task EnsureStreamAsync(NatsJSContext js, CancellationToken cancellationToken) + { + try + { + await js.GetStreamAsync(_options.Stream, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (NatsJSApiException ex) when (ex.Error?.Code == 404) + { + var config = new StreamConfig(name: _options.Stream, subjects: new[] { _options.Subject }) + { + Retention = StreamConfigRetention.Workqueue, + Storage = StreamConfigStorage.File, + MaxConsumers = -1, + MaxMsgs = -1, + MaxBytes = -1 + }; + + await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); + _logger.LogInformation("Created NATS Notify stream {Stream} ({Subject}).", _options.Stream, _options.Subject); + } + } + + private async Task EnsureDeadLetterStreamAsync(NatsJSContext js, CancellationToken cancellationToken) + { + try + { + await js.GetStreamAsync(_options.DeadLetterStream, cancellationToken: cancellationToken).ConfigureAwait(false); + } + catch (NatsJSApiException ex) when (ex.Error?.Code == 404) + { + var config = new StreamConfig(name: _options.DeadLetterStream, subjects: new[] { _options.DeadLetterSubject }) + { + Retention = StreamConfigRetention.Workqueue, + Storage = StreamConfigStorage.File, + MaxConsumers = -1, + MaxMsgs = -1, + MaxBytes = -1 + }; + + await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); + _logger.LogInformation("Created NATS Notify dead-letter stream {Stream} ({Subject}).", _options.DeadLetterStream, _options.DeadLetterSubject); + } + } + + private NatsNotifyEventLease? CreateLease( + NatsJSMsg<byte[]> message, + string consumer, + DateTimeOffset now, + TimeSpan leaseDuration) + { + var payloadBytes = message.Data ?? Array.Empty<byte>(); + if (payloadBytes.Length == 0) + { + return null; + } + + NotifyEvent notifyEvent; + try + { + var json = Encoding.UTF8.GetString(payloadBytes); + notifyEvent = NotifyCanonicalJsonSerializer.Deserialize<NotifyEvent>(json); + } + catch (Exception ex) + { + _logger.LogWarning( + ex, + "Failed to deserialize Notify event payload for NATS message {Sequence}.", + message.Metadata?.Sequence.Stream); + return null; + } + + var headers = message.Headers ?? new NatsHeaders(); + + var idempotencyKey = TryGetHeader(headers, NotifyQueueFields.IdempotencyKey) + ?? notifyEvent.EventId.ToString("N"); + + var partitionKey = TryGetHeader(headers, NotifyQueueFields.PartitionKey); + var traceId = TryGetHeader(headers, NotifyQueueFields.TraceId); + var enqueuedAt = TryGetHeader(headers, NotifyQueueFields.EnqueuedAt) is { } enqueuedRaw + && long.TryParse(enqueuedRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var unix) + ? DateTimeOffset.FromUnixTimeMilliseconds(unix) + : now; + + var attempt = TryGetHeader(headers, NotifyQueueFields.Attempt) is { } attemptRaw + && int.TryParse(attemptRaw, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedAttempt) + ? parsedAttempt + : 1; + + if (message.Metadata?.NumDelivered is ulong delivered && delivered > 0) + { + var deliveredInt = delivered > int.MaxValue ? int.MaxValue : (int)delivered; + if (deliveredInt > attempt) + { + attempt = deliveredInt; + } + } + + var attributes = ExtractAttributes(headers); + var leaseExpires = now.Add(leaseDuration); + var messageId = message.Metadata?.Sequence.Stream.ToString() ?? Guid.NewGuid().ToString("n"); + + var queueMessage = new NotifyQueueEventMessage( + notifyEvent, + _options.Subject, + idempotencyKey, + partitionKey, + traceId, + attributes); + + return new NatsNotifyEventLease( + this, + message, + messageId, + queueMessage, + attempt, + consumer, + enqueuedAt, + leaseExpires); + } + + private NatsHeaders BuildHeaders(NotifyQueueEventMessage message, string idempotencyKey) + { + var headers = new NatsHeaders + { + { NotifyQueueFields.EventId, message.Event.EventId.ToString("D") }, + { NotifyQueueFields.Tenant, message.TenantId }, + { NotifyQueueFields.Kind, message.Event.Kind }, + { NotifyQueueFields.Attempt, "1" }, + { NotifyQueueFields.EnqueuedAt, _timeProvider.GetUtcNow().ToUnixTimeMilliseconds().ToString(CultureInfo.InvariantCulture) }, + { NotifyQueueFields.IdempotencyKey, idempotencyKey } + }; + + if (!string.IsNullOrWhiteSpace(message.TraceId)) + { + headers.Add(NotifyQueueFields.TraceId, message.TraceId!); + } + + if (!string.IsNullOrWhiteSpace(message.PartitionKey)) + { + headers.Add(NotifyQueueFields.PartitionKey, message.PartitionKey!); + } + + foreach (var kvp in message.Attributes) + { + headers.Add(NotifyQueueFields.AttributePrefix + kvp.Key, kvp.Value); + } + + return headers; + } + + private NatsHeaders BuildDeadLetterHeaders(NatsNotifyEventLease lease, string reason) + { + var headers = new NatsHeaders + { + { NotifyQueueFields.EventId, lease.Message.Event.EventId.ToString("D") }, + { NotifyQueueFields.Tenant, lease.Message.TenantId }, + { NotifyQueueFields.Kind, lease.Message.Event.Kind }, + { NotifyQueueFields.Attempt, lease.Attempt.ToString(CultureInfo.InvariantCulture) }, + { NotifyQueueFields.IdempotencyKey, lease.Message.IdempotencyKey }, + { "deadletter-reason", reason } + }; + + if (!string.IsNullOrWhiteSpace(lease.Message.TraceId)) + { + headers.Add(NotifyQueueFields.TraceId, lease.Message.TraceId!); + } + + if (!string.IsNullOrWhiteSpace(lease.Message.PartitionKey)) + { + headers.Add(NotifyQueueFields.PartitionKey, lease.Message.PartitionKey!); + } + + foreach (var kvp in lease.Message.Attributes) + { + headers.Add(NotifyQueueFields.AttributePrefix + kvp.Key, kvp.Value); + } + + return headers; + } + + private static string? TryGetHeader(NatsHeaders headers, string key) + { + if (headers.TryGetValue(key, out var values) && values.Count > 0) + { + var value = values[0]; + return string.IsNullOrWhiteSpace(value) ? null : value; + } + + return null; + } + + private static IReadOnlyDictionary<string, string> ExtractAttributes(NatsHeaders headers) + { + var attributes = new Dictionary<string, string>(StringComparer.Ordinal); + + foreach (var key in headers.Keys) + { + if (!key.StartsWith(NotifyQueueFields.AttributePrefix, StringComparison.Ordinal)) + { + continue; + } + + if (headers.TryGetValue(key, out var values) && values.Count > 0) + { + attributes[key[NotifyQueueFields.AttributePrefix.Length..]] = values[0]!; + } + } + + return attributes.Count == 0 + ? EmptyReadOnlyDictionary<string, string>.Instance + : new ReadOnlyDictionary<string, string>(attributes); + } + + private TimeSpan CalculateBackoff(int attempt) + { + var initial = _queueOptions.RetryInitialBackoff > TimeSpan.Zero + ? _queueOptions.RetryInitialBackoff + : _options.RetryDelay; + + if (initial <= TimeSpan.Zero) + { + return TimeSpan.Zero; + } + + if (attempt <= 1) + { + return initial; + } + + var max = _queueOptions.RetryMaxBackoff > TimeSpan.Zero + ? _queueOptions.RetryMaxBackoff + : initial; + + var exponent = attempt - 1; + var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1); + var cappedTicks = Math.Min(max.Ticks, scaledTicks); + var resultTicks = Math.Max(initial.Ticks, (long)cappedTicks); + return TimeSpan.FromTicks(resultTicks); + } + + private static long ToNanoseconds(TimeSpan value) + => value <= TimeSpan.Zero ? 0 : value.Ticks * 100L; + + private static class EmptyReadOnlyDictionary<TKey, TValue> + where TKey : notnull + { + public static readonly IReadOnlyDictionary<TKey, TValue> Instance = + new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default)); + } +} diff --git a/src/StellaOps.Notify.Queue/NotifyDeliveryQueueHealthCheck.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyDeliveryQueueHealthCheck.cs similarity index 97% rename from src/StellaOps.Notify.Queue/NotifyDeliveryQueueHealthCheck.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyDeliveryQueueHealthCheck.cs index 0b41279e..3643dc1a 100644 --- a/src/StellaOps.Notify.Queue/NotifyDeliveryQueueHealthCheck.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyDeliveryQueueHealthCheck.cs @@ -1,55 +1,55 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Diagnostics.HealthChecks; -using Microsoft.Extensions.Logging; -using StellaOps.Notify.Queue.Nats; -using StellaOps.Notify.Queue.Redis; - -namespace StellaOps.Notify.Queue; - -public sealed class NotifyDeliveryQueueHealthCheck : IHealthCheck -{ - private readonly INotifyDeliveryQueue _queue; - private readonly ILogger<NotifyDeliveryQueueHealthCheck> _logger; - - public NotifyDeliveryQueueHealthCheck( - INotifyDeliveryQueue queue, - ILogger<NotifyDeliveryQueueHealthCheck> logger) - { - _queue = queue ?? throw new ArgumentNullException(nameof(queue)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<HealthCheckResult> CheckHealthAsync( - HealthCheckContext context, - CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - - try - { - switch (_queue) - { - case RedisNotifyDeliveryQueue redisQueue: - await redisQueue.PingAsync(cancellationToken).ConfigureAwait(false); - return HealthCheckResult.Healthy("Redis Notify delivery queue reachable."); - - case NatsNotifyDeliveryQueue natsQueue: - await natsQueue.PingAsync(cancellationToken).ConfigureAwait(false); - return HealthCheckResult.Healthy("NATS Notify delivery queue reachable."); - - default: - return HealthCheckResult.Healthy("Notify delivery queue transport without dedicated ping returned healthy."); - } - } - catch (Exception ex) - { - _logger.LogError(ex, "Notify delivery queue health check failed."); - return new HealthCheckResult( - context.Registration.FailureStatus, - "Notify delivery queue transport unreachable.", - ex); - } - } -} +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Logging; +using StellaOps.Notify.Queue.Nats; +using StellaOps.Notify.Queue.Redis; + +namespace StellaOps.Notify.Queue; + +public sealed class NotifyDeliveryQueueHealthCheck : IHealthCheck +{ + private readonly INotifyDeliveryQueue _queue; + private readonly ILogger<NotifyDeliveryQueueHealthCheck> _logger; + + public NotifyDeliveryQueueHealthCheck( + INotifyDeliveryQueue queue, + ILogger<NotifyDeliveryQueueHealthCheck> logger) + { + _queue = queue ?? throw new ArgumentNullException(nameof(queue)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<HealthCheckResult> CheckHealthAsync( + HealthCheckContext context, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + switch (_queue) + { + case RedisNotifyDeliveryQueue redisQueue: + await redisQueue.PingAsync(cancellationToken).ConfigureAwait(false); + return HealthCheckResult.Healthy("Redis Notify delivery queue reachable."); + + case NatsNotifyDeliveryQueue natsQueue: + await natsQueue.PingAsync(cancellationToken).ConfigureAwait(false); + return HealthCheckResult.Healthy("NATS Notify delivery queue reachable."); + + default: + return HealthCheckResult.Healthy("Notify delivery queue transport without dedicated ping returned healthy."); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "Notify delivery queue health check failed."); + return new HealthCheckResult( + context.Registration.FailureStatus, + "Notify delivery queue transport unreachable.", + ex); + } + } +} diff --git a/src/StellaOps.Notify.Queue/NotifyDeliveryQueueOptions.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyDeliveryQueueOptions.cs similarity index 96% rename from src/StellaOps.Notify.Queue/NotifyDeliveryQueueOptions.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyDeliveryQueueOptions.cs index dfe7554a..04024c3e 100644 --- a/src/StellaOps.Notify.Queue/NotifyDeliveryQueueOptions.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyDeliveryQueueOptions.cs @@ -1,69 +1,69 @@ -using System; - -namespace StellaOps.Notify.Queue; - -/// <summary> -/// Configuration options for the Notify delivery queue abstraction. -/// </summary> -public sealed class NotifyDeliveryQueueOptions -{ - public NotifyQueueTransportKind Transport { get; set; } = NotifyQueueTransportKind.Redis; - - public NotifyRedisDeliveryQueueOptions Redis { get; set; } = new(); - - public NotifyNatsDeliveryQueueOptions Nats { get; set; } = new(); - - public TimeSpan DefaultLeaseDuration { get; set; } = TimeSpan.FromMinutes(5); - - public int MaxDeliveryAttempts { get; set; } = 5; - - public TimeSpan RetryInitialBackoff { get; set; } = TimeSpan.FromSeconds(5); - - public TimeSpan RetryMaxBackoff { get; set; } = TimeSpan.FromMinutes(2); - - public TimeSpan ClaimIdleThreshold { get; set; } = TimeSpan.FromMinutes(5); -} - -public sealed class NotifyRedisDeliveryQueueOptions -{ - public string? ConnectionString { get; set; } - - public int? Database { get; set; } - - public TimeSpan InitializationTimeout { get; set; } = TimeSpan.FromSeconds(30); - - public string StreamName { get; set; } = "notify:deliveries"; - - public string ConsumerGroup { get; set; } = "notify-deliveries"; - - public string IdempotencyKeyPrefix { get; set; } = "notify:deliveries:idemp:"; - - public int? ApproximateMaxLength { get; set; } - - public string DeadLetterStreamName { get; set; } = "notify:deliveries:dead"; - - public TimeSpan DeadLetterRetention { get; set; } = TimeSpan.FromDays(7); -} - -public sealed class NotifyNatsDeliveryQueueOptions -{ - public string? Url { get; set; } - - public string Stream { get; set; } = "NOTIFY_DELIVERIES"; - - public string Subject { get; set; } = "notify.deliveries"; - - public string DurableConsumer { get; set; } = "notify-deliveries"; - - public string DeadLetterStream { get; set; } = "NOTIFY_DELIVERIES_DEAD"; - - public string DeadLetterSubject { get; set; } = "notify.deliveries.dead"; - - public int MaxAckPending { get; set; } = 128; - - public TimeSpan AckWait { get; set; } = TimeSpan.FromMinutes(5); - - public TimeSpan RetryDelay { get; set; } = TimeSpan.FromSeconds(10); - - public TimeSpan IdleHeartbeat { get; set; } = TimeSpan.FromSeconds(30); -} +using System; + +namespace StellaOps.Notify.Queue; + +/// <summary> +/// Configuration options for the Notify delivery queue abstraction. +/// </summary> +public sealed class NotifyDeliveryQueueOptions +{ + public NotifyQueueTransportKind Transport { get; set; } = NotifyQueueTransportKind.Redis; + + public NotifyRedisDeliveryQueueOptions Redis { get; set; } = new(); + + public NotifyNatsDeliveryQueueOptions Nats { get; set; } = new(); + + public TimeSpan DefaultLeaseDuration { get; set; } = TimeSpan.FromMinutes(5); + + public int MaxDeliveryAttempts { get; set; } = 5; + + public TimeSpan RetryInitialBackoff { get; set; } = TimeSpan.FromSeconds(5); + + public TimeSpan RetryMaxBackoff { get; set; } = TimeSpan.FromMinutes(2); + + public TimeSpan ClaimIdleThreshold { get; set; } = TimeSpan.FromMinutes(5); +} + +public sealed class NotifyRedisDeliveryQueueOptions +{ + public string? ConnectionString { get; set; } + + public int? Database { get; set; } + + public TimeSpan InitializationTimeout { get; set; } = TimeSpan.FromSeconds(30); + + public string StreamName { get; set; } = "notify:deliveries"; + + public string ConsumerGroup { get; set; } = "notify-deliveries"; + + public string IdempotencyKeyPrefix { get; set; } = "notify:deliveries:idemp:"; + + public int? ApproximateMaxLength { get; set; } + + public string DeadLetterStreamName { get; set; } = "notify:deliveries:dead"; + + public TimeSpan DeadLetterRetention { get; set; } = TimeSpan.FromDays(7); +} + +public sealed class NotifyNatsDeliveryQueueOptions +{ + public string? Url { get; set; } + + public string Stream { get; set; } = "NOTIFY_DELIVERIES"; + + public string Subject { get; set; } = "notify.deliveries"; + + public string DurableConsumer { get; set; } = "notify-deliveries"; + + public string DeadLetterStream { get; set; } = "NOTIFY_DELIVERIES_DEAD"; + + public string DeadLetterSubject { get; set; } = "notify.deliveries.dead"; + + public int MaxAckPending { get; set; } = 128; + + public TimeSpan AckWait { get; set; } = TimeSpan.FromMinutes(5); + + public TimeSpan RetryDelay { get; set; } = TimeSpan.FromSeconds(10); + + public TimeSpan IdleHeartbeat { get; set; } = TimeSpan.FromSeconds(30); +} diff --git a/src/StellaOps.Notify.Queue/NotifyEventQueueOptions.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyEventQueueOptions.cs similarity index 96% rename from src/StellaOps.Notify.Queue/NotifyEventQueueOptions.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyEventQueueOptions.cs index 946f3576..4034e35e 100644 --- a/src/StellaOps.Notify.Queue/NotifyEventQueueOptions.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyEventQueueOptions.cs @@ -1,177 +1,177 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Notify.Queue; - -/// <summary> -/// Configuration options for the Notify event queue abstraction. -/// </summary> -public sealed class NotifyEventQueueOptions -{ - /// <summary> - /// Transport backing the queue. - /// </summary> - public NotifyQueueTransportKind Transport { get; set; } = NotifyQueueTransportKind.Redis; - - /// <summary> - /// Redis-specific configuration. - /// </summary> - public NotifyRedisEventQueueOptions Redis { get; set; } = new(); - - /// <summary> - /// NATS JetStream-specific configuration. - /// </summary> - public NotifyNatsEventQueueOptions Nats { get; set; } = new(); - - /// <summary> - /// Default lease duration to use when consumers do not specify one explicitly. - /// </summary> - public TimeSpan DefaultLeaseDuration { get; set; } = TimeSpan.FromMinutes(5); - - /// <summary> - /// Maximum number of deliveries before a message should be considered failed. - /// </summary> - public int MaxDeliveryAttempts { get; set; } = 5; - - /// <summary> - /// Initial retry backoff applied when a message is released for retry. - /// </summary> - public TimeSpan RetryInitialBackoff { get; set; } = TimeSpan.FromSeconds(5); - - /// <summary> - /// Cap applied to exponential retry backoff. - /// </summary> - public TimeSpan RetryMaxBackoff { get; set; } = TimeSpan.FromMinutes(2); - - /// <summary> - /// Minimum idle window before a pending message becomes eligible for claim. - /// </summary> - public TimeSpan ClaimIdleThreshold { get; set; } = TimeSpan.FromMinutes(5); -} - -/// <summary> -/// Redis transport options for the Notify event queue. -/// </summary> -public sealed class NotifyRedisEventQueueOptions -{ - private IReadOnlyList<NotifyRedisEventStreamOptions> _streams = new List<NotifyRedisEventStreamOptions> - { - NotifyRedisEventStreamOptions.ForDefaultStream() - }; - - /// <summary> - /// Connection string for the Redis instance. - /// </summary> - public string? ConnectionString { get; set; } - - /// <summary> - /// Optional logical database to select when connecting. - /// </summary> - public int? Database { get; set; } - - /// <summary> - /// Time allowed for initial connection/consumer-group creation. - /// </summary> - public TimeSpan InitializationTimeout { get; set; } = TimeSpan.FromSeconds(30); - - /// <summary> - /// TTL applied to idempotency keys stored alongside events. - /// </summary> - public TimeSpan IdempotencyWindow { get; set; } = TimeSpan.FromHours(12); - - /// <summary> - /// Streams consumed by Notify. Ordering is preserved during leasing. - /// </summary> - public IReadOnlyList<NotifyRedisEventStreamOptions> Streams - { - get => _streams; - set => _streams = value is null || value.Count == 0 - ? new List<NotifyRedisEventStreamOptions> { NotifyRedisEventStreamOptions.ForDefaultStream() } - : value; - } -} - - /// <summary> - /// Per-Redis-stream options for the Notify event queue. - /// </summary> - public sealed class NotifyRedisEventStreamOptions - { - /// <summary> - /// Name of the Redis stream containing events. - /// </summary> - public string Stream { get; set; } = "notify:events"; - - /// <summary> - /// Consumer group used by Notify workers. - /// </summary> - public string ConsumerGroup { get; set; } = "notify-workers"; - - /// <summary> - /// Prefix used when storing idempotency keys in Redis. - /// </summary> - public string IdempotencyKeyPrefix { get; set; } = "notify:events:idemp:"; - - /// <summary> - /// Approximate maximum length for the stream; when set Redis will trim entries. - /// </summary> - public int? ApproximateMaxLength { get; set; } - - public static NotifyRedisEventStreamOptions ForDefaultStream() - => new(); -} - -/// <summary> -/// NATS JetStream options for the Notify event queue. -/// </summary> - public sealed class NotifyNatsEventQueueOptions - { - /// <summary> - /// URL for the JetStream-enabled NATS cluster. - /// </summary> - public string? Url { get; set; } - - /// <summary> - /// Stream name carrying Notify events. - /// </summary> - public string Stream { get; set; } = "NOTIFY_EVENTS"; - - /// <summary> - /// Subject that producers publish Notify events to. - /// </summary> - public string Subject { get; set; } = "notify.events"; - - /// <summary> - /// Durable consumer identifier for Notify workers. - /// </summary> - public string DurableConsumer { get; set; } = "notify-workers"; - - /// <summary> - /// Dead-letter stream name used when deliveries exhaust retry budget. - /// </summary> - public string DeadLetterStream { get; set; } = "NOTIFY_EVENTS_DEAD"; - - /// <summary> - /// Subject used for dead-letter publications. - /// </summary> - public string DeadLetterSubject { get; set; } = "notify.events.dead"; - - /// <summary> - /// Maximum pending messages before backpressure is applied. - /// </summary> - public int MaxAckPending { get; set; } = 256; - - /// <summary> - /// Visibility timeout applied to leased events. - /// </summary> - public TimeSpan AckWait { get; set; } = TimeSpan.FromMinutes(5); - - /// <summary> - /// Delay applied when releasing a message for retry. - /// </summary> - public TimeSpan RetryDelay { get; set; } = TimeSpan.FromSeconds(10); - - /// <summary> - /// Idle heartbeat emitted by the server to detect consumer disconnects. - /// </summary> - public TimeSpan IdleHeartbeat { get; set; } = TimeSpan.FromSeconds(30); - } +using System; +using System.Collections.Generic; + +namespace StellaOps.Notify.Queue; + +/// <summary> +/// Configuration options for the Notify event queue abstraction. +/// </summary> +public sealed class NotifyEventQueueOptions +{ + /// <summary> + /// Transport backing the queue. + /// </summary> + public NotifyQueueTransportKind Transport { get; set; } = NotifyQueueTransportKind.Redis; + + /// <summary> + /// Redis-specific configuration. + /// </summary> + public NotifyRedisEventQueueOptions Redis { get; set; } = new(); + + /// <summary> + /// NATS JetStream-specific configuration. + /// </summary> + public NotifyNatsEventQueueOptions Nats { get; set; } = new(); + + /// <summary> + /// Default lease duration to use when consumers do not specify one explicitly. + /// </summary> + public TimeSpan DefaultLeaseDuration { get; set; } = TimeSpan.FromMinutes(5); + + /// <summary> + /// Maximum number of deliveries before a message should be considered failed. + /// </summary> + public int MaxDeliveryAttempts { get; set; } = 5; + + /// <summary> + /// Initial retry backoff applied when a message is released for retry. + /// </summary> + public TimeSpan RetryInitialBackoff { get; set; } = TimeSpan.FromSeconds(5); + + /// <summary> + /// Cap applied to exponential retry backoff. + /// </summary> + public TimeSpan RetryMaxBackoff { get; set; } = TimeSpan.FromMinutes(2); + + /// <summary> + /// Minimum idle window before a pending message becomes eligible for claim. + /// </summary> + public TimeSpan ClaimIdleThreshold { get; set; } = TimeSpan.FromMinutes(5); +} + +/// <summary> +/// Redis transport options for the Notify event queue. +/// </summary> +public sealed class NotifyRedisEventQueueOptions +{ + private IReadOnlyList<NotifyRedisEventStreamOptions> _streams = new List<NotifyRedisEventStreamOptions> + { + NotifyRedisEventStreamOptions.ForDefaultStream() + }; + + /// <summary> + /// Connection string for the Redis instance. + /// </summary> + public string? ConnectionString { get; set; } + + /// <summary> + /// Optional logical database to select when connecting. + /// </summary> + public int? Database { get; set; } + + /// <summary> + /// Time allowed for initial connection/consumer-group creation. + /// </summary> + public TimeSpan InitializationTimeout { get; set; } = TimeSpan.FromSeconds(30); + + /// <summary> + /// TTL applied to idempotency keys stored alongside events. + /// </summary> + public TimeSpan IdempotencyWindow { get; set; } = TimeSpan.FromHours(12); + + /// <summary> + /// Streams consumed by Notify. Ordering is preserved during leasing. + /// </summary> + public IReadOnlyList<NotifyRedisEventStreamOptions> Streams + { + get => _streams; + set => _streams = value is null || value.Count == 0 + ? new List<NotifyRedisEventStreamOptions> { NotifyRedisEventStreamOptions.ForDefaultStream() } + : value; + } +} + + /// <summary> + /// Per-Redis-stream options for the Notify event queue. + /// </summary> + public sealed class NotifyRedisEventStreamOptions + { + /// <summary> + /// Name of the Redis stream containing events. + /// </summary> + public string Stream { get; set; } = "notify:events"; + + /// <summary> + /// Consumer group used by Notify workers. + /// </summary> + public string ConsumerGroup { get; set; } = "notify-workers"; + + /// <summary> + /// Prefix used when storing idempotency keys in Redis. + /// </summary> + public string IdempotencyKeyPrefix { get; set; } = "notify:events:idemp:"; + + /// <summary> + /// Approximate maximum length for the stream; when set Redis will trim entries. + /// </summary> + public int? ApproximateMaxLength { get; set; } + + public static NotifyRedisEventStreamOptions ForDefaultStream() + => new(); +} + +/// <summary> +/// NATS JetStream options for the Notify event queue. +/// </summary> + public sealed class NotifyNatsEventQueueOptions + { + /// <summary> + /// URL for the JetStream-enabled NATS cluster. + /// </summary> + public string? Url { get; set; } + + /// <summary> + /// Stream name carrying Notify events. + /// </summary> + public string Stream { get; set; } = "NOTIFY_EVENTS"; + + /// <summary> + /// Subject that producers publish Notify events to. + /// </summary> + public string Subject { get; set; } = "notify.events"; + + /// <summary> + /// Durable consumer identifier for Notify workers. + /// </summary> + public string DurableConsumer { get; set; } = "notify-workers"; + + /// <summary> + /// Dead-letter stream name used when deliveries exhaust retry budget. + /// </summary> + public string DeadLetterStream { get; set; } = "NOTIFY_EVENTS_DEAD"; + + /// <summary> + /// Subject used for dead-letter publications. + /// </summary> + public string DeadLetterSubject { get; set; } = "notify.events.dead"; + + /// <summary> + /// Maximum pending messages before backpressure is applied. + /// </summary> + public int MaxAckPending { get; set; } = 256; + + /// <summary> + /// Visibility timeout applied to leased events. + /// </summary> + public TimeSpan AckWait { get; set; } = TimeSpan.FromMinutes(5); + + /// <summary> + /// Delay applied when releasing a message for retry. + /// </summary> + public TimeSpan RetryDelay { get; set; } = TimeSpan.FromSeconds(10); + + /// <summary> + /// Idle heartbeat emitted by the server to detect consumer disconnects. + /// </summary> + public TimeSpan IdleHeartbeat { get; set; } = TimeSpan.FromSeconds(30); + } diff --git a/src/StellaOps.Notify.Queue/NotifyQueueContracts.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueContracts.cs similarity index 96% rename from src/StellaOps.Notify.Queue/NotifyQueueContracts.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueContracts.cs index a1db1c52..53a4e76d 100644 --- a/src/StellaOps.Notify.Queue/NotifyQueueContracts.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueContracts.cs @@ -1,231 +1,231 @@ -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Queue; - -/// <summary> -/// Message queued for Notify event processing. -/// </summary> -public sealed class NotifyQueueEventMessage -{ - private readonly NotifyEvent _event; - private readonly IReadOnlyDictionary<string, string> _attributes; - - public NotifyQueueEventMessage( - NotifyEvent @event, - string stream, - string? idempotencyKey = null, - string? partitionKey = null, - string? traceId = null, - IReadOnlyDictionary<string, string>? attributes = null) - { - _event = @event ?? throw new ArgumentNullException(nameof(@event)); - if (string.IsNullOrWhiteSpace(stream)) - { - throw new ArgumentException("Stream must be provided.", nameof(stream)); - } - - Stream = stream; - IdempotencyKey = string.IsNullOrWhiteSpace(idempotencyKey) - ? @event.EventId.ToString("N") - : idempotencyKey!; - PartitionKey = string.IsNullOrWhiteSpace(partitionKey) ? null : partitionKey.Trim(); - TraceId = string.IsNullOrWhiteSpace(traceId) ? null : traceId.Trim(); - _attributes = attributes is null - ? EmptyReadOnlyDictionary<string, string>.Instance - : new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(attributes, StringComparer.Ordinal)); - } - - public NotifyEvent Event => _event; - - public string Stream { get; } - - public string IdempotencyKey { get; } - - public string TenantId => _event.Tenant; - - public string? PartitionKey { get; } - - public string? TraceId { get; } - - public IReadOnlyDictionary<string, string> Attributes => _attributes; -} - -/// <summary> -/// Message queued for channel delivery execution. -/// </summary> -public sealed class NotifyDeliveryQueueMessage -{ - public const string DefaultStream = "notify:deliveries"; - - private readonly IReadOnlyDictionary<string, string> _attributes; - - public NotifyDeliveryQueueMessage( - NotifyDelivery delivery, - string channelId, - NotifyChannelType channelType, - string? stream = null, - string? traceId = null, - IReadOnlyDictionary<string, string>? attributes = null) - { - Delivery = delivery ?? throw new ArgumentNullException(nameof(delivery)); - ChannelId = NotifyValidation.EnsureNotNullOrWhiteSpace(channelId, nameof(channelId)); - ChannelType = channelType; - Stream = string.IsNullOrWhiteSpace(stream) ? DefaultStream : stream!.Trim(); - TraceId = string.IsNullOrWhiteSpace(traceId) ? null : traceId.Trim(); - _attributes = attributes is null - ? EmptyReadOnlyDictionary<string, string>.Instance - : new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(attributes, StringComparer.Ordinal)); - } - - public NotifyDelivery Delivery { get; } - - public string ChannelId { get; } - - public NotifyChannelType ChannelType { get; } - - public string Stream { get; } - - public string? TraceId { get; } - - public string TenantId => Delivery.TenantId; - - public string IdempotencyKey => Delivery.DeliveryId; - - public string PartitionKey => ChannelId; - - public IReadOnlyDictionary<string, string> Attributes => _attributes; -} - -public readonly record struct NotifyQueueEnqueueResult(string MessageId, bool Deduplicated); - -public sealed class NotifyQueueLeaseRequest -{ - public NotifyQueueLeaseRequest(string consumer, int batchSize, TimeSpan leaseDuration) - { - if (string.IsNullOrWhiteSpace(consumer)) - { - throw new ArgumentException("Consumer must be provided.", nameof(consumer)); - } - - if (batchSize <= 0) - { - throw new ArgumentOutOfRangeException(nameof(batchSize), batchSize, "Batch size must be positive."); - } - - if (leaseDuration <= TimeSpan.Zero) - { - throw new ArgumentOutOfRangeException(nameof(leaseDuration), leaseDuration, "Lease duration must be positive."); - } - - Consumer = consumer; - BatchSize = batchSize; - LeaseDuration = leaseDuration; - } - - public string Consumer { get; } - - public int BatchSize { get; } - - public TimeSpan LeaseDuration { get; } -} - -public sealed class NotifyQueueClaimOptions -{ - public NotifyQueueClaimOptions(string claimantConsumer, int batchSize, TimeSpan minIdleTime) - { - if (string.IsNullOrWhiteSpace(claimantConsumer)) - { - throw new ArgumentException("Consumer must be provided.", nameof(claimantConsumer)); - } - - if (batchSize <= 0) - { - throw new ArgumentOutOfRangeException(nameof(batchSize), batchSize, "Batch size must be positive."); - } - - if (minIdleTime < TimeSpan.Zero) - { - throw new ArgumentOutOfRangeException(nameof(minIdleTime), minIdleTime, "Minimum idle time cannot be negative."); - } - - ClaimantConsumer = claimantConsumer; - BatchSize = batchSize; - MinIdleTime = minIdleTime; - } - - public string ClaimantConsumer { get; } - - public int BatchSize { get; } - - public TimeSpan MinIdleTime { get; } -} - -public enum NotifyQueueReleaseDisposition -{ - Retry, - Abandon -} - -public interface INotifyQueue<TMessage> -{ - ValueTask<NotifyQueueEnqueueResult> PublishAsync(TMessage message, CancellationToken cancellationToken = default); - - ValueTask<IReadOnlyList<INotifyQueueLease<TMessage>>> LeaseAsync(NotifyQueueLeaseRequest request, CancellationToken cancellationToken = default); - - ValueTask<IReadOnlyList<INotifyQueueLease<TMessage>>> ClaimExpiredAsync(NotifyQueueClaimOptions options, CancellationToken cancellationToken = default); -} - -public interface INotifyQueueLease<out TMessage> -{ - string MessageId { get; } - - int Attempt { get; } - - DateTimeOffset EnqueuedAt { get; } - - DateTimeOffset LeaseExpiresAt { get; } - - string Consumer { get; } - - string Stream { get; } - - string TenantId { get; } - - string? PartitionKey { get; } - - string IdempotencyKey { get; } - - string? TraceId { get; } - - IReadOnlyDictionary<string, string> Attributes { get; } - - TMessage Message { get; } - - Task AcknowledgeAsync(CancellationToken cancellationToken = default); - - Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default); - - Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default); - - Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default); -} - -public interface INotifyEventQueue : INotifyQueue<NotifyQueueEventMessage> -{ -} - -public interface INotifyDeliveryQueue : INotifyQueue<NotifyDeliveryQueueMessage> -{ -} - -internal static class EmptyReadOnlyDictionary<TKey, TValue> - where TKey : notnull -{ - public static readonly IReadOnlyDictionary<TKey, TValue> Instance = - new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default)); -} +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Queue; + +/// <summary> +/// Message queued for Notify event processing. +/// </summary> +public sealed class NotifyQueueEventMessage +{ + private readonly NotifyEvent _event; + private readonly IReadOnlyDictionary<string, string> _attributes; + + public NotifyQueueEventMessage( + NotifyEvent @event, + string stream, + string? idempotencyKey = null, + string? partitionKey = null, + string? traceId = null, + IReadOnlyDictionary<string, string>? attributes = null) + { + _event = @event ?? throw new ArgumentNullException(nameof(@event)); + if (string.IsNullOrWhiteSpace(stream)) + { + throw new ArgumentException("Stream must be provided.", nameof(stream)); + } + + Stream = stream; + IdempotencyKey = string.IsNullOrWhiteSpace(idempotencyKey) + ? @event.EventId.ToString("N") + : idempotencyKey!; + PartitionKey = string.IsNullOrWhiteSpace(partitionKey) ? null : partitionKey.Trim(); + TraceId = string.IsNullOrWhiteSpace(traceId) ? null : traceId.Trim(); + _attributes = attributes is null + ? EmptyReadOnlyDictionary<string, string>.Instance + : new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(attributes, StringComparer.Ordinal)); + } + + public NotifyEvent Event => _event; + + public string Stream { get; } + + public string IdempotencyKey { get; } + + public string TenantId => _event.Tenant; + + public string? PartitionKey { get; } + + public string? TraceId { get; } + + public IReadOnlyDictionary<string, string> Attributes => _attributes; +} + +/// <summary> +/// Message queued for channel delivery execution. +/// </summary> +public sealed class NotifyDeliveryQueueMessage +{ + public const string DefaultStream = "notify:deliveries"; + + private readonly IReadOnlyDictionary<string, string> _attributes; + + public NotifyDeliveryQueueMessage( + NotifyDelivery delivery, + string channelId, + NotifyChannelType channelType, + string? stream = null, + string? traceId = null, + IReadOnlyDictionary<string, string>? attributes = null) + { + Delivery = delivery ?? throw new ArgumentNullException(nameof(delivery)); + ChannelId = NotifyValidation.EnsureNotNullOrWhiteSpace(channelId, nameof(channelId)); + ChannelType = channelType; + Stream = string.IsNullOrWhiteSpace(stream) ? DefaultStream : stream!.Trim(); + TraceId = string.IsNullOrWhiteSpace(traceId) ? null : traceId.Trim(); + _attributes = attributes is null + ? EmptyReadOnlyDictionary<string, string>.Instance + : new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(attributes, StringComparer.Ordinal)); + } + + public NotifyDelivery Delivery { get; } + + public string ChannelId { get; } + + public NotifyChannelType ChannelType { get; } + + public string Stream { get; } + + public string? TraceId { get; } + + public string TenantId => Delivery.TenantId; + + public string IdempotencyKey => Delivery.DeliveryId; + + public string PartitionKey => ChannelId; + + public IReadOnlyDictionary<string, string> Attributes => _attributes; +} + +public readonly record struct NotifyQueueEnqueueResult(string MessageId, bool Deduplicated); + +public sealed class NotifyQueueLeaseRequest +{ + public NotifyQueueLeaseRequest(string consumer, int batchSize, TimeSpan leaseDuration) + { + if (string.IsNullOrWhiteSpace(consumer)) + { + throw new ArgumentException("Consumer must be provided.", nameof(consumer)); + } + + if (batchSize <= 0) + { + throw new ArgumentOutOfRangeException(nameof(batchSize), batchSize, "Batch size must be positive."); + } + + if (leaseDuration <= TimeSpan.Zero) + { + throw new ArgumentOutOfRangeException(nameof(leaseDuration), leaseDuration, "Lease duration must be positive."); + } + + Consumer = consumer; + BatchSize = batchSize; + LeaseDuration = leaseDuration; + } + + public string Consumer { get; } + + public int BatchSize { get; } + + public TimeSpan LeaseDuration { get; } +} + +public sealed class NotifyQueueClaimOptions +{ + public NotifyQueueClaimOptions(string claimantConsumer, int batchSize, TimeSpan minIdleTime) + { + if (string.IsNullOrWhiteSpace(claimantConsumer)) + { + throw new ArgumentException("Consumer must be provided.", nameof(claimantConsumer)); + } + + if (batchSize <= 0) + { + throw new ArgumentOutOfRangeException(nameof(batchSize), batchSize, "Batch size must be positive."); + } + + if (minIdleTime < TimeSpan.Zero) + { + throw new ArgumentOutOfRangeException(nameof(minIdleTime), minIdleTime, "Minimum idle time cannot be negative."); + } + + ClaimantConsumer = claimantConsumer; + BatchSize = batchSize; + MinIdleTime = minIdleTime; + } + + public string ClaimantConsumer { get; } + + public int BatchSize { get; } + + public TimeSpan MinIdleTime { get; } +} + +public enum NotifyQueueReleaseDisposition +{ + Retry, + Abandon +} + +public interface INotifyQueue<TMessage> +{ + ValueTask<NotifyQueueEnqueueResult> PublishAsync(TMessage message, CancellationToken cancellationToken = default); + + ValueTask<IReadOnlyList<INotifyQueueLease<TMessage>>> LeaseAsync(NotifyQueueLeaseRequest request, CancellationToken cancellationToken = default); + + ValueTask<IReadOnlyList<INotifyQueueLease<TMessage>>> ClaimExpiredAsync(NotifyQueueClaimOptions options, CancellationToken cancellationToken = default); +} + +public interface INotifyQueueLease<out TMessage> +{ + string MessageId { get; } + + int Attempt { get; } + + DateTimeOffset EnqueuedAt { get; } + + DateTimeOffset LeaseExpiresAt { get; } + + string Consumer { get; } + + string Stream { get; } + + string TenantId { get; } + + string? PartitionKey { get; } + + string IdempotencyKey { get; } + + string? TraceId { get; } + + IReadOnlyDictionary<string, string> Attributes { get; } + + TMessage Message { get; } + + Task AcknowledgeAsync(CancellationToken cancellationToken = default); + + Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default); + + Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default); + + Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default); +} + +public interface INotifyEventQueue : INotifyQueue<NotifyQueueEventMessage> +{ +} + +public interface INotifyDeliveryQueue : INotifyQueue<NotifyDeliveryQueueMessage> +{ +} + +internal static class EmptyReadOnlyDictionary<TKey, TValue> + where TKey : notnull +{ + public static readonly IReadOnlyDictionary<TKey, TValue> Instance = + new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default)); +} diff --git a/src/StellaOps.Notify.Queue/NotifyQueueFields.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueFields.cs similarity index 97% rename from src/StellaOps.Notify.Queue/NotifyQueueFields.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueFields.cs index 22d33f47..8e33ca46 100644 --- a/src/StellaOps.Notify.Queue/NotifyQueueFields.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueFields.cs @@ -1,18 +1,18 @@ -namespace StellaOps.Notify.Queue; - -internal static class NotifyQueueFields -{ - public const string Payload = "payload"; - public const string EventId = "eventId"; - public const string DeliveryId = "deliveryId"; - public const string Tenant = "tenant"; - public const string Kind = "kind"; - public const string Attempt = "attempt"; - public const string EnqueuedAt = "enqueuedAt"; - public const string TraceId = "traceId"; - public const string PartitionKey = "partitionKey"; - public const string ChannelId = "channelId"; - public const string ChannelType = "channelType"; - public const string IdempotencyKey = "idempotency"; - public const string AttributePrefix = "attr:"; -} +namespace StellaOps.Notify.Queue; + +internal static class NotifyQueueFields +{ + public const string Payload = "payload"; + public const string EventId = "eventId"; + public const string DeliveryId = "deliveryId"; + public const string Tenant = "tenant"; + public const string Kind = "kind"; + public const string Attempt = "attempt"; + public const string EnqueuedAt = "enqueuedAt"; + public const string TraceId = "traceId"; + public const string PartitionKey = "partitionKey"; + public const string ChannelId = "channelId"; + public const string ChannelType = "channelType"; + public const string IdempotencyKey = "idempotency"; + public const string AttributePrefix = "attr:"; +} diff --git a/src/StellaOps.Notify.Queue/NotifyQueueHealthCheck.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueHealthCheck.cs similarity index 97% rename from src/StellaOps.Notify.Queue/NotifyQueueHealthCheck.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueHealthCheck.cs index 8e12398e..d9926c6f 100644 --- a/src/StellaOps.Notify.Queue/NotifyQueueHealthCheck.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueHealthCheck.cs @@ -1,55 +1,55 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Diagnostics.HealthChecks; -using Microsoft.Extensions.Logging; -using StellaOps.Notify.Queue.Nats; -using StellaOps.Notify.Queue.Redis; - -namespace StellaOps.Notify.Queue; - -public sealed class NotifyQueueHealthCheck : IHealthCheck -{ - private readonly INotifyEventQueue _queue; - private readonly ILogger<NotifyQueueHealthCheck> _logger; - - public NotifyQueueHealthCheck( - INotifyEventQueue queue, - ILogger<NotifyQueueHealthCheck> logger) - { - _queue = queue ?? throw new ArgumentNullException(nameof(queue)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<HealthCheckResult> CheckHealthAsync( - HealthCheckContext context, - CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - - try - { - switch (_queue) - { - case RedisNotifyEventQueue redisQueue: - await redisQueue.PingAsync(cancellationToken).ConfigureAwait(false); - return HealthCheckResult.Healthy("Redis Notify queue reachable."); - - case NatsNotifyEventQueue natsQueue: - await natsQueue.PingAsync(cancellationToken).ConfigureAwait(false); - return HealthCheckResult.Healthy("NATS Notify queue reachable."); - - default: - return HealthCheckResult.Healthy("Notify queue transport without dedicated ping returned healthy."); - } - } - catch (Exception ex) - { - _logger.LogError(ex, "Notify queue health check failed."); - return new HealthCheckResult( - context.Registration.FailureStatus, - "Notify queue transport unreachable.", - ex); - } - } -} +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Logging; +using StellaOps.Notify.Queue.Nats; +using StellaOps.Notify.Queue.Redis; + +namespace StellaOps.Notify.Queue; + +public sealed class NotifyQueueHealthCheck : IHealthCheck +{ + private readonly INotifyEventQueue _queue; + private readonly ILogger<NotifyQueueHealthCheck> _logger; + + public NotifyQueueHealthCheck( + INotifyEventQueue queue, + ILogger<NotifyQueueHealthCheck> logger) + { + _queue = queue ?? throw new ArgumentNullException(nameof(queue)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<HealthCheckResult> CheckHealthAsync( + HealthCheckContext context, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + switch (_queue) + { + case RedisNotifyEventQueue redisQueue: + await redisQueue.PingAsync(cancellationToken).ConfigureAwait(false); + return HealthCheckResult.Healthy("Redis Notify queue reachable."); + + case NatsNotifyEventQueue natsQueue: + await natsQueue.PingAsync(cancellationToken).ConfigureAwait(false); + return HealthCheckResult.Healthy("NATS Notify queue reachable."); + + default: + return HealthCheckResult.Healthy("Notify queue transport without dedicated ping returned healthy."); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "Notify queue health check failed."); + return new HealthCheckResult( + context.Registration.FailureStatus, + "Notify queue transport unreachable.", + ex); + } + } +} diff --git a/src/StellaOps.Notify.Queue/NotifyQueueMetrics.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueMetrics.cs similarity index 97% rename from src/StellaOps.Notify.Queue/NotifyQueueMetrics.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueMetrics.cs index 744f465a..e2959737 100644 --- a/src/StellaOps.Notify.Queue/NotifyQueueMetrics.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueMetrics.cs @@ -1,39 +1,39 @@ -using System.Collections.Generic; -using System.Diagnostics.Metrics; - -namespace StellaOps.Notify.Queue; - -internal static class NotifyQueueMetrics -{ - private const string TransportTag = "transport"; - private const string StreamTag = "stream"; - - private static readonly Meter Meter = new("StellaOps.Notify.Queue"); - private static readonly Counter<long> EnqueuedCounter = Meter.CreateCounter<long>("notify_queue_enqueued_total"); - private static readonly Counter<long> DeduplicatedCounter = Meter.CreateCounter<long>("notify_queue_deduplicated_total"); - private static readonly Counter<long> AckCounter = Meter.CreateCounter<long>("notify_queue_ack_total"); - private static readonly Counter<long> RetryCounter = Meter.CreateCounter<long>("notify_queue_retry_total"); - private static readonly Counter<long> DeadLetterCounter = Meter.CreateCounter<long>("notify_queue_deadletter_total"); - - public static void RecordEnqueued(string transport, string stream) - => EnqueuedCounter.Add(1, BuildTags(transport, stream)); - - public static void RecordDeduplicated(string transport, string stream) - => DeduplicatedCounter.Add(1, BuildTags(transport, stream)); - - public static void RecordAck(string transport, string stream) - => AckCounter.Add(1, BuildTags(transport, stream)); - - public static void RecordRetry(string transport, string stream) - => RetryCounter.Add(1, BuildTags(transport, stream)); - - public static void RecordDeadLetter(string transport, string stream) - => DeadLetterCounter.Add(1, BuildTags(transport, stream)); - - private static KeyValuePair<string, object?>[] BuildTags(string transport, string stream) - => new[] - { - new KeyValuePair<string, object?>(TransportTag, transport), - new KeyValuePair<string, object?>(StreamTag, stream) - }; -} +using System.Collections.Generic; +using System.Diagnostics.Metrics; + +namespace StellaOps.Notify.Queue; + +internal static class NotifyQueueMetrics +{ + private const string TransportTag = "transport"; + private const string StreamTag = "stream"; + + private static readonly Meter Meter = new("StellaOps.Notify.Queue"); + private static readonly Counter<long> EnqueuedCounter = Meter.CreateCounter<long>("notify_queue_enqueued_total"); + private static readonly Counter<long> DeduplicatedCounter = Meter.CreateCounter<long>("notify_queue_deduplicated_total"); + private static readonly Counter<long> AckCounter = Meter.CreateCounter<long>("notify_queue_ack_total"); + private static readonly Counter<long> RetryCounter = Meter.CreateCounter<long>("notify_queue_retry_total"); + private static readonly Counter<long> DeadLetterCounter = Meter.CreateCounter<long>("notify_queue_deadletter_total"); + + public static void RecordEnqueued(string transport, string stream) + => EnqueuedCounter.Add(1, BuildTags(transport, stream)); + + public static void RecordDeduplicated(string transport, string stream) + => DeduplicatedCounter.Add(1, BuildTags(transport, stream)); + + public static void RecordAck(string transport, string stream) + => AckCounter.Add(1, BuildTags(transport, stream)); + + public static void RecordRetry(string transport, string stream) + => RetryCounter.Add(1, BuildTags(transport, stream)); + + public static void RecordDeadLetter(string transport, string stream) + => DeadLetterCounter.Add(1, BuildTags(transport, stream)); + + private static KeyValuePair<string, object?>[] BuildTags(string transport, string stream) + => new[] + { + new KeyValuePair<string, object?>(TransportTag, transport), + new KeyValuePair<string, object?>(StreamTag, stream) + }; +} diff --git a/src/StellaOps.Notify.Queue/NotifyQueueServiceCollectionExtensions.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueServiceCollectionExtensions.cs similarity index 97% rename from src/StellaOps.Notify.Queue/NotifyQueueServiceCollectionExtensions.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueServiceCollectionExtensions.cs index a257bd5d..1ab4ca00 100644 --- a/src/StellaOps.Notify.Queue/NotifyQueueServiceCollectionExtensions.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueServiceCollectionExtensions.cs @@ -1,146 +1,146 @@ -using System; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Diagnostics.HealthChecks; -using Microsoft.Extensions.Logging; -using StellaOps.Notify.Queue.Nats; -using StellaOps.Notify.Queue.Redis; - -namespace StellaOps.Notify.Queue; - -public static class NotifyQueueServiceCollectionExtensions -{ - public static IServiceCollection AddNotifyEventQueue( - this IServiceCollection services, - IConfiguration configuration, - string sectionName = "notify:queue") - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - - var eventOptions = new NotifyEventQueueOptions(); - configuration.GetSection(sectionName).Bind(eventOptions); - - services.TryAddSingleton(TimeProvider.System); - services.AddSingleton(eventOptions); - - services.AddSingleton<INotifyEventQueue>(sp => - { - var loggerFactory = sp.GetRequiredService<ILoggerFactory>(); - var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System; - var opts = sp.GetRequiredService<NotifyEventQueueOptions>(); - - return opts.Transport switch - { - NotifyQueueTransportKind.Redis => new RedisNotifyEventQueue( - opts, - opts.Redis, - loggerFactory.CreateLogger<RedisNotifyEventQueue>(), - timeProvider), - NotifyQueueTransportKind.Nats => new NatsNotifyEventQueue( - opts, - opts.Nats, - loggerFactory.CreateLogger<NatsNotifyEventQueue>(), - timeProvider), - _ => throw new InvalidOperationException($"Unsupported Notify queue transport kind '{opts.Transport}'.") - }; - }); - - services.AddSingleton<NotifyQueueHealthCheck>(); - - return services; - } - - public static IServiceCollection AddNotifyDeliveryQueue( - this IServiceCollection services, - IConfiguration configuration, - string sectionName = "notify:deliveryQueue") - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - - var deliveryOptions = new NotifyDeliveryQueueOptions(); - configuration.GetSection(sectionName).Bind(deliveryOptions); - - services.AddSingleton(deliveryOptions); - - services.AddSingleton<INotifyDeliveryQueue>(sp => - { - var loggerFactory = sp.GetRequiredService<ILoggerFactory>(); - var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System; - var opts = sp.GetRequiredService<NotifyDeliveryQueueOptions>(); - var eventOpts = sp.GetService<NotifyEventQueueOptions>(); - - ApplyDeliveryFallbacks(opts, eventOpts); - - return opts.Transport switch - { - NotifyQueueTransportKind.Redis => new RedisNotifyDeliveryQueue( - opts, - opts.Redis, - loggerFactory.CreateLogger<RedisNotifyDeliveryQueue>(), - timeProvider), - NotifyQueueTransportKind.Nats => new NatsNotifyDeliveryQueue( - opts, - opts.Nats, - loggerFactory.CreateLogger<NatsNotifyDeliveryQueue>(), - timeProvider), - _ => throw new InvalidOperationException($"Unsupported Notify delivery queue transport kind '{opts.Transport}'.") - }; - }); - - services.AddSingleton<NotifyDeliveryQueueHealthCheck>(); - - return services; - } - - public static IHealthChecksBuilder AddNotifyQueueHealthCheck( - this IHealthChecksBuilder builder) - { - ArgumentNullException.ThrowIfNull(builder); - - builder.Services.TryAddSingleton<NotifyQueueHealthCheck>(); - builder.AddCheck<NotifyQueueHealthCheck>( - name: "notify-queue", - failureStatus: HealthStatus.Unhealthy, - tags: new[] { "notify", "queue" }); - - return builder; - } - - public static IHealthChecksBuilder AddNotifyDeliveryQueueHealthCheck( - this IHealthChecksBuilder builder) - { - ArgumentNullException.ThrowIfNull(builder); - - builder.Services.TryAddSingleton<NotifyDeliveryQueueHealthCheck>(); - builder.AddCheck<NotifyDeliveryQueueHealthCheck>( - name: "notify-delivery-queue", - failureStatus: HealthStatus.Unhealthy, - tags: new[] { "notify", "queue", "delivery" }); - - return builder; - } - - private static void ApplyDeliveryFallbacks( - NotifyDeliveryQueueOptions deliveryOptions, - NotifyEventQueueOptions? eventOptions) - { - if (eventOptions is null) - { - return; - } - - if (string.IsNullOrWhiteSpace(deliveryOptions.Redis.ConnectionString)) - { - deliveryOptions.Redis.ConnectionString = eventOptions.Redis.ConnectionString; - deliveryOptions.Redis.Database ??= eventOptions.Redis.Database; - } - - if (string.IsNullOrWhiteSpace(deliveryOptions.Nats.Url)) - { - deliveryOptions.Nats.Url = eventOptions.Nats.Url; - } - } -} +using System; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Logging; +using StellaOps.Notify.Queue.Nats; +using StellaOps.Notify.Queue.Redis; + +namespace StellaOps.Notify.Queue; + +public static class NotifyQueueServiceCollectionExtensions +{ + public static IServiceCollection AddNotifyEventQueue( + this IServiceCollection services, + IConfiguration configuration, + string sectionName = "notify:queue") + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + var eventOptions = new NotifyEventQueueOptions(); + configuration.GetSection(sectionName).Bind(eventOptions); + + services.TryAddSingleton(TimeProvider.System); + services.AddSingleton(eventOptions); + + services.AddSingleton<INotifyEventQueue>(sp => + { + var loggerFactory = sp.GetRequiredService<ILoggerFactory>(); + var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System; + var opts = sp.GetRequiredService<NotifyEventQueueOptions>(); + + return opts.Transport switch + { + NotifyQueueTransportKind.Redis => new RedisNotifyEventQueue( + opts, + opts.Redis, + loggerFactory.CreateLogger<RedisNotifyEventQueue>(), + timeProvider), + NotifyQueueTransportKind.Nats => new NatsNotifyEventQueue( + opts, + opts.Nats, + loggerFactory.CreateLogger<NatsNotifyEventQueue>(), + timeProvider), + _ => throw new InvalidOperationException($"Unsupported Notify queue transport kind '{opts.Transport}'.") + }; + }); + + services.AddSingleton<NotifyQueueHealthCheck>(); + + return services; + } + + public static IServiceCollection AddNotifyDeliveryQueue( + this IServiceCollection services, + IConfiguration configuration, + string sectionName = "notify:deliveryQueue") + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + var deliveryOptions = new NotifyDeliveryQueueOptions(); + configuration.GetSection(sectionName).Bind(deliveryOptions); + + services.AddSingleton(deliveryOptions); + + services.AddSingleton<INotifyDeliveryQueue>(sp => + { + var loggerFactory = sp.GetRequiredService<ILoggerFactory>(); + var timeProvider = sp.GetService<TimeProvider>() ?? TimeProvider.System; + var opts = sp.GetRequiredService<NotifyDeliveryQueueOptions>(); + var eventOpts = sp.GetService<NotifyEventQueueOptions>(); + + ApplyDeliveryFallbacks(opts, eventOpts); + + return opts.Transport switch + { + NotifyQueueTransportKind.Redis => new RedisNotifyDeliveryQueue( + opts, + opts.Redis, + loggerFactory.CreateLogger<RedisNotifyDeliveryQueue>(), + timeProvider), + NotifyQueueTransportKind.Nats => new NatsNotifyDeliveryQueue( + opts, + opts.Nats, + loggerFactory.CreateLogger<NatsNotifyDeliveryQueue>(), + timeProvider), + _ => throw new InvalidOperationException($"Unsupported Notify delivery queue transport kind '{opts.Transport}'.") + }; + }); + + services.AddSingleton<NotifyDeliveryQueueHealthCheck>(); + + return services; + } + + public static IHealthChecksBuilder AddNotifyQueueHealthCheck( + this IHealthChecksBuilder builder) + { + ArgumentNullException.ThrowIfNull(builder); + + builder.Services.TryAddSingleton<NotifyQueueHealthCheck>(); + builder.AddCheck<NotifyQueueHealthCheck>( + name: "notify-queue", + failureStatus: HealthStatus.Unhealthy, + tags: new[] { "notify", "queue" }); + + return builder; + } + + public static IHealthChecksBuilder AddNotifyDeliveryQueueHealthCheck( + this IHealthChecksBuilder builder) + { + ArgumentNullException.ThrowIfNull(builder); + + builder.Services.TryAddSingleton<NotifyDeliveryQueueHealthCheck>(); + builder.AddCheck<NotifyDeliveryQueueHealthCheck>( + name: "notify-delivery-queue", + failureStatus: HealthStatus.Unhealthy, + tags: new[] { "notify", "queue", "delivery" }); + + return builder; + } + + private static void ApplyDeliveryFallbacks( + NotifyDeliveryQueueOptions deliveryOptions, + NotifyEventQueueOptions? eventOptions) + { + if (eventOptions is null) + { + return; + } + + if (string.IsNullOrWhiteSpace(deliveryOptions.Redis.ConnectionString)) + { + deliveryOptions.Redis.ConnectionString = eventOptions.Redis.ConnectionString; + deliveryOptions.Redis.Database ??= eventOptions.Redis.Database; + } + + if (string.IsNullOrWhiteSpace(deliveryOptions.Nats.Url)) + { + deliveryOptions.Nats.Url = eventOptions.Nats.Url; + } + } +} diff --git a/src/StellaOps.Notify.Queue/NotifyQueueTransportKind.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueTransportKind.cs similarity index 94% rename from src/StellaOps.Notify.Queue/NotifyQueueTransportKind.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueTransportKind.cs index cf0f13c6..0792701e 100644 --- a/src/StellaOps.Notify.Queue/NotifyQueueTransportKind.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/NotifyQueueTransportKind.cs @@ -1,10 +1,10 @@ -namespace StellaOps.Notify.Queue; - -/// <summary> -/// Supported transports for the Notify event queue. -/// </summary> -public enum NotifyQueueTransportKind -{ - Redis, - Nats -} +namespace StellaOps.Notify.Queue; + +/// <summary> +/// Supported transports for the Notify event queue. +/// </summary> +public enum NotifyQueueTransportKind +{ + Redis, + Nats +} diff --git a/src/StellaOps.Notify.Queue/Properties/AssemblyInfo.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Notify.Queue/Properties/AssemblyInfo.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/Properties/AssemblyInfo.cs index 87064c85..b80e4533 100644 --- a/src/StellaOps.Notify.Queue/Properties/AssemblyInfo.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Notify.Queue.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Notify.Queue.Tests")] diff --git a/src/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryLease.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryLease.cs similarity index 97% rename from src/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryLease.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryLease.cs index fc61ad76..6342d5dd 100644 --- a/src/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryLease.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryLease.cs @@ -1,76 +1,76 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Notify.Queue.Redis; - -internal sealed class RedisNotifyDeliveryLease : INotifyQueueLease<NotifyDeliveryQueueMessage> -{ - private readonly RedisNotifyDeliveryQueue _queue; - private int _completed; - - internal RedisNotifyDeliveryLease( - RedisNotifyDeliveryQueue queue, - string messageId, - NotifyDeliveryQueueMessage message, - int attempt, - DateTimeOffset enqueuedAt, - DateTimeOffset leaseExpiresAt, - string consumer, - string? idempotencyKey, - string partitionKey) - { - _queue = queue ?? throw new ArgumentNullException(nameof(queue)); - MessageId = messageId ?? throw new ArgumentNullException(nameof(messageId)); - Message = message ?? throw new ArgumentNullException(nameof(message)); - Attempt = attempt; - EnqueuedAt = enqueuedAt; - LeaseExpiresAt = leaseExpiresAt; - Consumer = consumer ?? throw new ArgumentNullException(nameof(consumer)); - IdempotencyKey = idempotencyKey ?? message.IdempotencyKey; - PartitionKey = partitionKey ?? message.ChannelId; - } - - public string MessageId { get; } - - public int Attempt { get; internal set; } - - public DateTimeOffset EnqueuedAt { get; } - - public DateTimeOffset LeaseExpiresAt { get; private set; } - - public string Consumer { get; } - - public string Stream => Message.Stream; - - public string TenantId => Message.TenantId; - - public string PartitionKey { get; } - - public string IdempotencyKey { get; } - - public string? TraceId => Message.TraceId; - - public IReadOnlyDictionary<string, string> Attributes => Message.Attributes; - - public NotifyDeliveryQueueMessage Message { get; } - - public Task AcknowledgeAsync(CancellationToken cancellationToken = default) - => _queue.AcknowledgeAsync(this, cancellationToken); - - public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) - => _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken); - - public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) - => _queue.ReleaseAsync(this, disposition, cancellationToken); - - public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) - => _queue.DeadLetterAsync(this, reason, cancellationToken); - - internal bool TryBeginCompletion() - => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; - - internal void RefreshLease(DateTimeOffset expiresAt) - => LeaseExpiresAt = expiresAt; -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Notify.Queue.Redis; + +internal sealed class RedisNotifyDeliveryLease : INotifyQueueLease<NotifyDeliveryQueueMessage> +{ + private readonly RedisNotifyDeliveryQueue _queue; + private int _completed; + + internal RedisNotifyDeliveryLease( + RedisNotifyDeliveryQueue queue, + string messageId, + NotifyDeliveryQueueMessage message, + int attempt, + DateTimeOffset enqueuedAt, + DateTimeOffset leaseExpiresAt, + string consumer, + string? idempotencyKey, + string partitionKey) + { + _queue = queue ?? throw new ArgumentNullException(nameof(queue)); + MessageId = messageId ?? throw new ArgumentNullException(nameof(messageId)); + Message = message ?? throw new ArgumentNullException(nameof(message)); + Attempt = attempt; + EnqueuedAt = enqueuedAt; + LeaseExpiresAt = leaseExpiresAt; + Consumer = consumer ?? throw new ArgumentNullException(nameof(consumer)); + IdempotencyKey = idempotencyKey ?? message.IdempotencyKey; + PartitionKey = partitionKey ?? message.ChannelId; + } + + public string MessageId { get; } + + public int Attempt { get; internal set; } + + public DateTimeOffset EnqueuedAt { get; } + + public DateTimeOffset LeaseExpiresAt { get; private set; } + + public string Consumer { get; } + + public string Stream => Message.Stream; + + public string TenantId => Message.TenantId; + + public string PartitionKey { get; } + + public string IdempotencyKey { get; } + + public string? TraceId => Message.TraceId; + + public IReadOnlyDictionary<string, string> Attributes => Message.Attributes; + + public NotifyDeliveryQueueMessage Message { get; } + + public Task AcknowledgeAsync(CancellationToken cancellationToken = default) + => _queue.AcknowledgeAsync(this, cancellationToken); + + public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) + => _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken); + + public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) + => _queue.ReleaseAsync(this, disposition, cancellationToken); + + public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) + => _queue.DeadLetterAsync(this, reason, cancellationToken); + + internal bool TryBeginCompletion() + => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; + + internal void RefreshLease(DateTimeOffset expiresAt) + => LeaseExpiresAt = expiresAt; +} diff --git a/src/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryQueue.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryQueue.cs similarity index 97% rename from src/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryQueue.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryQueue.cs index 0bb93674..8d1b68f7 100644 --- a/src/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryQueue.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyDeliveryQueue.cs @@ -1,788 +1,788 @@ -using System; -using System.Buffers; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Globalization; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StackExchange.Redis; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Queue.Redis; - -internal sealed class RedisNotifyDeliveryQueue : INotifyDeliveryQueue, IAsyncDisposable -{ - private const string TransportName = "redis"; - - private readonly NotifyDeliveryQueueOptions _options; - private readonly NotifyRedisDeliveryQueueOptions _redisOptions; - private readonly ILogger<RedisNotifyDeliveryQueue> _logger; - private readonly TimeProvider _timeProvider; - private readonly Func<ConfigurationOptions, Task<IConnectionMultiplexer>> _connectionFactory; - private readonly SemaphoreSlim _connectionLock = new(1, 1); - private readonly SemaphoreSlim _groupLock = new(1, 1); - private readonly ConcurrentDictionary<string, bool> _streamInitialized = new(StringComparer.Ordinal); - - private IConnectionMultiplexer? _connection; - private bool _disposed; - - public RedisNotifyDeliveryQueue( - NotifyDeliveryQueueOptions options, - NotifyRedisDeliveryQueueOptions redisOptions, - ILogger<RedisNotifyDeliveryQueue> logger, - TimeProvider timeProvider, - Func<ConfigurationOptions, Task<IConnectionMultiplexer>>? connectionFactory = null) - { - _options = options ?? throw new ArgumentNullException(nameof(options)); - _redisOptions = redisOptions ?? throw new ArgumentNullException(nameof(redisOptions)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? TimeProvider.System; - _connectionFactory = connectionFactory ?? (async config => - { - var connection = await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false); - return (IConnectionMultiplexer)connection; - }); - - if (string.IsNullOrWhiteSpace(_redisOptions.ConnectionString)) - { - throw new InvalidOperationException("Redis connection string must be configured for the Notify delivery queue."); - } - } - - public async ValueTask<NotifyQueueEnqueueResult> PublishAsync( - NotifyDeliveryQueueMessage message, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(message); - cancellationToken.ThrowIfCancellationRequested(); - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - await EnsureConsumerGroupAsync(db, cancellationToken).ConfigureAwait(false); - - var now = _timeProvider.GetUtcNow(); - var attempt = 1; - var entries = BuildEntries(message, now, attempt); - - var messageId = await AddToStreamAsync( - db, - _redisOptions.StreamName, - entries) - .ConfigureAwait(false); - - var idempotencyKey = BuildIdempotencyKey(message.IdempotencyKey); - var stored = await db.StringSetAsync( - idempotencyKey, - messageId, - when: When.NotExists, - expiry: _options.ClaimIdleThreshold) - .ConfigureAwait(false); - - if (!stored) - { - await db.StreamDeleteAsync( - _redisOptions.StreamName, - new RedisValue[] { messageId }) - .ConfigureAwait(false); - - var existing = await db.StringGetAsync(idempotencyKey).ConfigureAwait(false); - var duplicateId = existing.IsNullOrEmpty ? messageId : existing; - - NotifyQueueMetrics.RecordDeduplicated(TransportName, _redisOptions.StreamName); - _logger.LogDebug( - "Duplicate Notify delivery enqueue detected for delivery {DeliveryId}.", - message.Delivery.DeliveryId); - - return new NotifyQueueEnqueueResult(duplicateId.ToString()!, true); - } - - NotifyQueueMetrics.RecordEnqueued(TransportName, _redisOptions.StreamName); - _logger.LogDebug( - "Enqueued Notify delivery {DeliveryId} (channel {ChannelId}) into stream {Stream}.", - message.Delivery.DeliveryId, - message.ChannelId, - _redisOptions.StreamName); - - return new NotifyQueueEnqueueResult(messageId.ToString()!, false); - } - - public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyDeliveryQueueMessage>>> LeaseAsync( - NotifyQueueLeaseRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - cancellationToken.ThrowIfCancellationRequested(); - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - await EnsureConsumerGroupAsync(db, cancellationToken).ConfigureAwait(false); - - var entries = await db.StreamReadGroupAsync( - _redisOptions.StreamName, - _redisOptions.ConsumerGroup, - request.Consumer, - StreamPosition.NewMessages, - request.BatchSize) - .ConfigureAwait(false); - - if (entries is null || entries.Length == 0) - { - return Array.Empty<INotifyQueueLease<NotifyDeliveryQueueMessage>>(); - } - - var now = _timeProvider.GetUtcNow(); - var leases = new List<INotifyQueueLease<NotifyDeliveryQueueMessage>>(entries.Length); - - foreach (var entry in entries) - { - var lease = TryMapLease(entry, request.Consumer, now, request.LeaseDuration, attemptOverride: null); - if (lease is null) - { - await AckPoisonAsync(db, entry.Id).ConfigureAwait(false); - continue; - } - - leases.Add(lease); - } - - return leases; - } - - public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyDeliveryQueueMessage>>> ClaimExpiredAsync( - NotifyQueueClaimOptions options, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(options); - cancellationToken.ThrowIfCancellationRequested(); - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - await EnsureConsumerGroupAsync(db, cancellationToken).ConfigureAwait(false); - - var pending = await db.StreamPendingMessagesAsync( - _redisOptions.StreamName, - _redisOptions.ConsumerGroup, - options.BatchSize, - RedisValue.Null, - (long)options.MinIdleTime.TotalMilliseconds) - .ConfigureAwait(false); - - if (pending is null || pending.Length == 0) - { - return Array.Empty<INotifyQueueLease<NotifyDeliveryQueueMessage>>(); - } - - var eligible = pending - .Where(p => p.IdleTimeInMilliseconds >= options.MinIdleTime.TotalMilliseconds) - .ToArray(); - - if (eligible.Length == 0) - { - return Array.Empty<INotifyQueueLease<NotifyDeliveryQueueMessage>>(); - } - - var messageIds = eligible - .Select(static p => (RedisValue)p.MessageId) - .ToArray(); - - var entries = await db.StreamClaimAsync( - _redisOptions.StreamName, - _redisOptions.ConsumerGroup, - options.ClaimantConsumer, - 0, - messageIds) - .ConfigureAwait(false); - - if (entries is null || entries.Length == 0) - { - return Array.Empty<INotifyQueueLease<NotifyDeliveryQueueMessage>>(); - } - - var now = _timeProvider.GetUtcNow(); - var attemptLookup = eligible - .Where(static info => !info.MessageId.IsNullOrEmpty) - .ToDictionary( - info => info.MessageId!.ToString(), - info => (int)Math.Max(1, info.DeliveryCount), - StringComparer.Ordinal); - - var leases = new List<INotifyQueueLease<NotifyDeliveryQueueMessage>>(entries.Length); - - foreach (var entry in entries) - { - attemptLookup.TryGetValue(entry.Id.ToString(), out var attempt); - var lease = TryMapLease(entry, options.ClaimantConsumer, now, _options.DefaultLeaseDuration, attempt == 0 ? null : attempt); - if (lease is null) - { - await AckPoisonAsync(db, entry.Id).ConfigureAwait(false); - continue; - } - - leases.Add(lease); - } - - return leases; - } - - public async ValueTask DisposeAsync() - { - if (_disposed) - { - return; - } - - _disposed = true; - if (_connection is not null) - { - await _connection.CloseAsync().ConfigureAwait(false); - _connection.Dispose(); - } - - _connectionLock.Dispose(); - _groupLock.Dispose(); - GC.SuppressFinalize(this); - } - - internal async Task AcknowledgeAsync( - RedisNotifyDeliveryLease lease, - CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - - await db.StreamAcknowledgeAsync( - _redisOptions.StreamName, - _redisOptions.ConsumerGroup, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - await db.StreamDeleteAsync( - _redisOptions.StreamName, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - NotifyQueueMetrics.RecordAck(TransportName, _redisOptions.StreamName); - _logger.LogDebug( - "Acknowledged Notify delivery {DeliveryId} (message {MessageId}).", - lease.Message.Delivery.DeliveryId, - lease.MessageId); - } - - internal async Task RenewLeaseAsync( - RedisNotifyDeliveryLease lease, - TimeSpan leaseDuration, - CancellationToken cancellationToken) - { - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - - await db.StreamClaimAsync( - _redisOptions.StreamName, - _redisOptions.ConsumerGroup, - lease.Consumer, - 0, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - var expires = _timeProvider.GetUtcNow().Add(leaseDuration); - lease.RefreshLease(expires); - - _logger.LogDebug( - "Renewed Notify delivery lease {DeliveryId} until {Expires:u}.", - lease.Message.Delivery.DeliveryId, - expires); - } - - internal async Task ReleaseAsync( - RedisNotifyDeliveryLease lease, - NotifyQueueReleaseDisposition disposition, - CancellationToken cancellationToken) - { - if (disposition == NotifyQueueReleaseDisposition.Retry - && lease.Attempt >= _options.MaxDeliveryAttempts) - { - _logger.LogWarning( - "Notify delivery {DeliveryId} reached max delivery attempts ({Attempts}); moving to dead-letter stream.", - lease.Message.Delivery.DeliveryId, - lease.Attempt); - - await DeadLetterAsync( - lease, - $"max-delivery-attempts:{lease.Attempt}", - cancellationToken).ConfigureAwait(false); - - return; - } - - if (!lease.TryBeginCompletion()) - { - return; - } - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - await db.StreamAcknowledgeAsync( - _redisOptions.StreamName, - _redisOptions.ConsumerGroup, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - await db.StreamDeleteAsync( - _redisOptions.StreamName, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - if (disposition == NotifyQueueReleaseDisposition.Retry) - { - NotifyQueueMetrics.RecordRetry(TransportName, _redisOptions.StreamName); - - var delay = CalculateBackoff(lease.Attempt); - if (delay > TimeSpan.Zero) - { - try - { - await Task.Delay(delay, cancellationToken).ConfigureAwait(false); - } - catch (TaskCanceledException) - { - return; - } - } - - var now = _timeProvider.GetUtcNow(); - var entries = BuildEntries(lease.Message, now, lease.Attempt + 1); - - await AddToStreamAsync( - db, - _redisOptions.StreamName, - entries) - .ConfigureAwait(false); - - NotifyQueueMetrics.RecordEnqueued(TransportName, _redisOptions.StreamName); - _logger.LogInformation( - "Retrying Notify delivery {DeliveryId} (attempt {Attempt}).", - lease.Message.Delivery.DeliveryId, - lease.Attempt + 1); - } - else - { - NotifyQueueMetrics.RecordAck(TransportName, _redisOptions.StreamName); - _logger.LogInformation( - "Abandoned Notify delivery {DeliveryId} after {Attempt} attempt(s).", - lease.Message.Delivery.DeliveryId, - lease.Attempt); - } - } - - internal async Task DeadLetterAsync( - RedisNotifyDeliveryLease lease, - string reason, - CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - - await db.StreamAcknowledgeAsync( - _redisOptions.StreamName, - _redisOptions.ConsumerGroup, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - await db.StreamDeleteAsync( - _redisOptions.StreamName, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - await EnsureDeadLetterStreamAsync(db, cancellationToken).ConfigureAwait(false); - - var entries = BuildDeadLetterEntries(lease, reason); - await AddToStreamAsync( - db, - _redisOptions.DeadLetterStreamName, - entries) - .ConfigureAwait(false); - - NotifyQueueMetrics.RecordDeadLetter(TransportName, _redisOptions.DeadLetterStreamName); - _logger.LogError( - "Dead-lettered Notify delivery {DeliveryId} (attempt {Attempt}): {Reason}", - lease.Message.Delivery.DeliveryId, - lease.Attempt, - reason); - } - - internal async ValueTask PingAsync(CancellationToken cancellationToken) - { - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - _ = await db.PingAsync().ConfigureAwait(false); - } - - private async Task<IDatabase> GetDatabaseAsync(CancellationToken cancellationToken) - { - if (_connection is { IsConnected: true }) - { - return _connection.GetDatabase(_redisOptions.Database ?? -1); - } - - await _connectionLock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_connection is { IsConnected: true }) - { - return _connection.GetDatabase(_redisOptions.Database ?? -1); - } - - var configuration = ConfigurationOptions.Parse(_redisOptions.ConnectionString!); - configuration.AbortOnConnectFail = false; - if (_redisOptions.Database.HasValue) - { - configuration.DefaultDatabase = _redisOptions.Database.Value; - } - - using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); - timeoutCts.CancelAfter(_redisOptions.InitializationTimeout); - - _connection = await _connectionFactory(configuration).WaitAsync(timeoutCts.Token).ConfigureAwait(false); - return _connection.GetDatabase(_redisOptions.Database ?? -1); - } - finally - { - _connectionLock.Release(); - } - } - - private async Task EnsureConsumerGroupAsync( - IDatabase database, - CancellationToken cancellationToken) - { - if (_streamInitialized.ContainsKey(_redisOptions.StreamName)) - { - return; - } - - await _groupLock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_streamInitialized.ContainsKey(_redisOptions.StreamName)) - { - return; - } - - try - { - await database.StreamCreateConsumerGroupAsync( - _redisOptions.StreamName, - _redisOptions.ConsumerGroup, - StreamPosition.Beginning, - createStream: true) - .ConfigureAwait(false); - } - catch (RedisServerException ex) when (ex.Message.Contains("BUSYGROUP", StringComparison.OrdinalIgnoreCase)) - { - // group already exists - } - - _streamInitialized[_redisOptions.StreamName] = true; - } - finally - { - _groupLock.Release(); - } - } - - private async Task EnsureDeadLetterStreamAsync( - IDatabase database, - CancellationToken cancellationToken) - { - if (_streamInitialized.ContainsKey(_redisOptions.DeadLetterStreamName)) - { - return; - } - - await _groupLock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_streamInitialized.ContainsKey(_redisOptions.DeadLetterStreamName)) - { - return; - } - - try - { - await database.StreamCreateConsumerGroupAsync( - _redisOptions.DeadLetterStreamName, - _redisOptions.ConsumerGroup, - StreamPosition.Beginning, - createStream: true) - .ConfigureAwait(false); - } - catch (RedisServerException ex) when (ex.Message.Contains("BUSYGROUP", StringComparison.OrdinalIgnoreCase)) - { - // ignore - } - - _streamInitialized[_redisOptions.DeadLetterStreamName] = true; - } - finally - { - _groupLock.Release(); - } - } - - private NameValueEntry[] BuildEntries( - NotifyDeliveryQueueMessage message, - DateTimeOffset enqueuedAt, - int attempt) - { - var json = NotifyCanonicalJsonSerializer.Serialize(message.Delivery); - var attributeCount = message.Attributes.Count; - - var entries = ArrayPool<NameValueEntry>.Shared.Rent(8 + attributeCount); - var index = 0; - - entries[index++] = new NameValueEntry(NotifyQueueFields.Payload, json); - entries[index++] = new NameValueEntry(NotifyQueueFields.DeliveryId, message.Delivery.DeliveryId); - entries[index++] = new NameValueEntry(NotifyQueueFields.ChannelId, message.ChannelId); - entries[index++] = new NameValueEntry(NotifyQueueFields.ChannelType, message.ChannelType.ToString()); - entries[index++] = new NameValueEntry(NotifyQueueFields.Tenant, message.Delivery.TenantId); - entries[index++] = new NameValueEntry(NotifyQueueFields.Attempt, attempt); - entries[index++] = new NameValueEntry(NotifyQueueFields.EnqueuedAt, enqueuedAt.ToUnixTimeMilliseconds()); - entries[index++] = new NameValueEntry(NotifyQueueFields.IdempotencyKey, message.IdempotencyKey); - entries[index++] = new NameValueEntry(NotifyQueueFields.TraceId, message.TraceId ?? string.Empty); - entries[index++] = new NameValueEntry(NotifyQueueFields.PartitionKey, message.PartitionKey); - - if (attributeCount > 0) - { - foreach (var kvp in message.Attributes) - { - entries[index++] = new NameValueEntry( - NotifyQueueFields.AttributePrefix + kvp.Key, - kvp.Value); - } - } - - return entries.AsSpan(0, index).ToArray(); - } - - private NameValueEntry[] BuildDeadLetterEntries(RedisNotifyDeliveryLease lease, string reason) - { - var json = NotifyCanonicalJsonSerializer.Serialize(lease.Message.Delivery); - var attributes = lease.Message.Attributes; - var attributeCount = attributes.Count; - - var entries = ArrayPool<NameValueEntry>.Shared.Rent(9 + attributeCount); - var index = 0; - - entries[index++] = new NameValueEntry(NotifyQueueFields.Payload, json); - entries[index++] = new NameValueEntry(NotifyQueueFields.DeliveryId, lease.Message.Delivery.DeliveryId); - entries[index++] = new NameValueEntry(NotifyQueueFields.ChannelId, lease.Message.ChannelId); - entries[index++] = new NameValueEntry(NotifyQueueFields.ChannelType, lease.Message.ChannelType.ToString()); - entries[index++] = new NameValueEntry(NotifyQueueFields.Tenant, lease.Message.Delivery.TenantId); - entries[index++] = new NameValueEntry(NotifyQueueFields.Attempt, lease.Attempt); - entries[index++] = new NameValueEntry(NotifyQueueFields.IdempotencyKey, lease.Message.IdempotencyKey); - entries[index++] = new NameValueEntry("deadletter-reason", reason); - entries[index++] = new NameValueEntry(NotifyQueueFields.TraceId, lease.Message.TraceId ?? string.Empty); - - foreach (var kvp in attributes) - { - entries[index++] = new NameValueEntry( - NotifyQueueFields.AttributePrefix + kvp.Key, - kvp.Value); - } - - return entries.AsSpan(0, index).ToArray(); - } - - private RedisNotifyDeliveryLease? TryMapLease( - StreamEntry entry, - string consumer, - DateTimeOffset now, - TimeSpan leaseDuration, - int? attemptOverride) - { - if (entry.Values is null || entry.Values.Length == 0) - { - return null; - } - - string? payload = null; - string? deliveryId = null; - string? channelId = null; - string? channelTypeRaw = null; - string? traceId = null; - string? idempotency = null; - string? partitionKey = null; - long? enqueuedAtUnix = null; - var attempt = attemptOverride ?? 1; - var attributes = new Dictionary<string, string>(StringComparer.Ordinal); - - foreach (var value in entry.Values) - { - var name = value.Name.ToString(); - var data = value.Value; - if (name.Equals(NotifyQueueFields.Payload, StringComparison.Ordinal)) - { - payload = data.ToString(); - } - else if (name.Equals(NotifyQueueFields.DeliveryId, StringComparison.Ordinal)) - { - deliveryId = data.ToString(); - } - else if (name.Equals(NotifyQueueFields.ChannelId, StringComparison.Ordinal)) - { - channelId = data.ToString(); - } - else if (name.Equals(NotifyQueueFields.ChannelType, StringComparison.Ordinal)) - { - channelTypeRaw = data.ToString(); - } - else if (name.Equals(NotifyQueueFields.Attempt, StringComparison.Ordinal)) - { - if (int.TryParse(data.ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) - { - attempt = Math.Max(parsed, attempt); - } - } - else if (name.Equals(NotifyQueueFields.EnqueuedAt, StringComparison.Ordinal)) - { - if (long.TryParse(data.ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var unix)) - { - enqueuedAtUnix = unix; - } - } - else if (name.Equals(NotifyQueueFields.IdempotencyKey, StringComparison.Ordinal)) - { - idempotency = data.ToString(); - } - else if (name.Equals(NotifyQueueFields.TraceId, StringComparison.Ordinal)) - { - var text = data.ToString(); - traceId = string.IsNullOrWhiteSpace(text) ? null : text; - } - else if (name.Equals(NotifyQueueFields.PartitionKey, StringComparison.Ordinal)) - { - partitionKey = data.ToString(); - } - else if (name.StartsWith(NotifyQueueFields.AttributePrefix, StringComparison.Ordinal)) - { - attributes[name[NotifyQueueFields.AttributePrefix.Length..]] = data.ToString(); - } - } - - if (payload is null || deliveryId is null || channelId is null || channelTypeRaw is null) - { - return null; - } - - NotifyDelivery delivery; - try - { - delivery = NotifyCanonicalJsonSerializer.Deserialize<NotifyDelivery>(payload); - } - catch (Exception ex) - { - _logger.LogWarning( - ex, - "Failed to deserialize Notify delivery payload for entry {EntryId}.", - entry.Id.ToString()); - return null; - } - - if (!Enum.TryParse<NotifyChannelType>(channelTypeRaw, ignoreCase: true, out var channelType)) - { - _logger.LogWarning( - "Unknown channel type '{ChannelType}' for delivery {DeliveryId}; acknowledging as poison.", - channelTypeRaw, - deliveryId); - return null; - } - - var attributeView = attributes.Count == 0 - ? EmptyReadOnlyDictionary<string, string>.Instance - : new ReadOnlyDictionary<string, string>(attributes); - - var enqueuedAt = enqueuedAtUnix is null - ? now - : DateTimeOffset.FromUnixTimeMilliseconds(enqueuedAtUnix.Value); - - var message = new NotifyDeliveryQueueMessage( - delivery, - channelId, - channelType, - _redisOptions.StreamName, - traceId, - attributeView); - - var leaseExpires = now.Add(leaseDuration); - - return new RedisNotifyDeliveryLease( - this, - entry.Id.ToString(), - message, - attempt, - enqueuedAt, - leaseExpires, - consumer, - idempotency, - partitionKey ?? channelId); - } - - private async Task AckPoisonAsync(IDatabase database, RedisValue messageId) - { - await database.StreamAcknowledgeAsync( - _redisOptions.StreamName, - _redisOptions.ConsumerGroup, - new RedisValue[] { messageId }) - .ConfigureAwait(false); - - await database.StreamDeleteAsync( - _redisOptions.StreamName, - new RedisValue[] { messageId }) - .ConfigureAwait(false); - } - - private static async Task<RedisValue> AddToStreamAsync( - IDatabase database, - string stream, - IReadOnlyList<NameValueEntry> entries) - { - return await database.StreamAddAsync( - stream, - entries.ToArray()) - .ConfigureAwait(false); - } - - private string BuildIdempotencyKey(string token) - => string.Concat(_redisOptions.IdempotencyKeyPrefix, token); - - private TimeSpan CalculateBackoff(int attempt) - { - var initial = _options.RetryInitialBackoff > TimeSpan.Zero - ? _options.RetryInitialBackoff - : TimeSpan.FromSeconds(1); - - if (initial <= TimeSpan.Zero) - { - return TimeSpan.Zero; - } - - if (attempt <= 1) - { - return initial; - } - - var max = _options.RetryMaxBackoff > TimeSpan.Zero - ? _options.RetryMaxBackoff - : initial; - - var exponent = attempt - 1; - var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1); - var cappedTicks = Math.Min(max.Ticks, scaledTicks); - var resultTicks = Math.Max(initial.Ticks, (long)cappedTicks); - return TimeSpan.FromTicks(resultTicks); - } -} +using System; +using System.Buffers; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Globalization; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StackExchange.Redis; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Queue.Redis; + +internal sealed class RedisNotifyDeliveryQueue : INotifyDeliveryQueue, IAsyncDisposable +{ + private const string TransportName = "redis"; + + private readonly NotifyDeliveryQueueOptions _options; + private readonly NotifyRedisDeliveryQueueOptions _redisOptions; + private readonly ILogger<RedisNotifyDeliveryQueue> _logger; + private readonly TimeProvider _timeProvider; + private readonly Func<ConfigurationOptions, Task<IConnectionMultiplexer>> _connectionFactory; + private readonly SemaphoreSlim _connectionLock = new(1, 1); + private readonly SemaphoreSlim _groupLock = new(1, 1); + private readonly ConcurrentDictionary<string, bool> _streamInitialized = new(StringComparer.Ordinal); + + private IConnectionMultiplexer? _connection; + private bool _disposed; + + public RedisNotifyDeliveryQueue( + NotifyDeliveryQueueOptions options, + NotifyRedisDeliveryQueueOptions redisOptions, + ILogger<RedisNotifyDeliveryQueue> logger, + TimeProvider timeProvider, + Func<ConfigurationOptions, Task<IConnectionMultiplexer>>? connectionFactory = null) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + _redisOptions = redisOptions ?? throw new ArgumentNullException(nameof(redisOptions)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + _connectionFactory = connectionFactory ?? (async config => + { + var connection = await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false); + return (IConnectionMultiplexer)connection; + }); + + if (string.IsNullOrWhiteSpace(_redisOptions.ConnectionString)) + { + throw new InvalidOperationException("Redis connection string must be configured for the Notify delivery queue."); + } + } + + public async ValueTask<NotifyQueueEnqueueResult> PublishAsync( + NotifyDeliveryQueueMessage message, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(message); + cancellationToken.ThrowIfCancellationRequested(); + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + await EnsureConsumerGroupAsync(db, cancellationToken).ConfigureAwait(false); + + var now = _timeProvider.GetUtcNow(); + var attempt = 1; + var entries = BuildEntries(message, now, attempt); + + var messageId = await AddToStreamAsync( + db, + _redisOptions.StreamName, + entries) + .ConfigureAwait(false); + + var idempotencyKey = BuildIdempotencyKey(message.IdempotencyKey); + var stored = await db.StringSetAsync( + idempotencyKey, + messageId, + when: When.NotExists, + expiry: _options.ClaimIdleThreshold) + .ConfigureAwait(false); + + if (!stored) + { + await db.StreamDeleteAsync( + _redisOptions.StreamName, + new RedisValue[] { messageId }) + .ConfigureAwait(false); + + var existing = await db.StringGetAsync(idempotencyKey).ConfigureAwait(false); + var duplicateId = existing.IsNullOrEmpty ? messageId : existing; + + NotifyQueueMetrics.RecordDeduplicated(TransportName, _redisOptions.StreamName); + _logger.LogDebug( + "Duplicate Notify delivery enqueue detected for delivery {DeliveryId}.", + message.Delivery.DeliveryId); + + return new NotifyQueueEnqueueResult(duplicateId.ToString()!, true); + } + + NotifyQueueMetrics.RecordEnqueued(TransportName, _redisOptions.StreamName); + _logger.LogDebug( + "Enqueued Notify delivery {DeliveryId} (channel {ChannelId}) into stream {Stream}.", + message.Delivery.DeliveryId, + message.ChannelId, + _redisOptions.StreamName); + + return new NotifyQueueEnqueueResult(messageId.ToString()!, false); + } + + public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyDeliveryQueueMessage>>> LeaseAsync( + NotifyQueueLeaseRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + cancellationToken.ThrowIfCancellationRequested(); + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + await EnsureConsumerGroupAsync(db, cancellationToken).ConfigureAwait(false); + + var entries = await db.StreamReadGroupAsync( + _redisOptions.StreamName, + _redisOptions.ConsumerGroup, + request.Consumer, + StreamPosition.NewMessages, + request.BatchSize) + .ConfigureAwait(false); + + if (entries is null || entries.Length == 0) + { + return Array.Empty<INotifyQueueLease<NotifyDeliveryQueueMessage>>(); + } + + var now = _timeProvider.GetUtcNow(); + var leases = new List<INotifyQueueLease<NotifyDeliveryQueueMessage>>(entries.Length); + + foreach (var entry in entries) + { + var lease = TryMapLease(entry, request.Consumer, now, request.LeaseDuration, attemptOverride: null); + if (lease is null) + { + await AckPoisonAsync(db, entry.Id).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + } + + return leases; + } + + public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyDeliveryQueueMessage>>> ClaimExpiredAsync( + NotifyQueueClaimOptions options, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(options); + cancellationToken.ThrowIfCancellationRequested(); + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + await EnsureConsumerGroupAsync(db, cancellationToken).ConfigureAwait(false); + + var pending = await db.StreamPendingMessagesAsync( + _redisOptions.StreamName, + _redisOptions.ConsumerGroup, + options.BatchSize, + RedisValue.Null, + (long)options.MinIdleTime.TotalMilliseconds) + .ConfigureAwait(false); + + if (pending is null || pending.Length == 0) + { + return Array.Empty<INotifyQueueLease<NotifyDeliveryQueueMessage>>(); + } + + var eligible = pending + .Where(p => p.IdleTimeInMilliseconds >= options.MinIdleTime.TotalMilliseconds) + .ToArray(); + + if (eligible.Length == 0) + { + return Array.Empty<INotifyQueueLease<NotifyDeliveryQueueMessage>>(); + } + + var messageIds = eligible + .Select(static p => (RedisValue)p.MessageId) + .ToArray(); + + var entries = await db.StreamClaimAsync( + _redisOptions.StreamName, + _redisOptions.ConsumerGroup, + options.ClaimantConsumer, + 0, + messageIds) + .ConfigureAwait(false); + + if (entries is null || entries.Length == 0) + { + return Array.Empty<INotifyQueueLease<NotifyDeliveryQueueMessage>>(); + } + + var now = _timeProvider.GetUtcNow(); + var attemptLookup = eligible + .Where(static info => !info.MessageId.IsNullOrEmpty) + .ToDictionary( + info => info.MessageId!.ToString(), + info => (int)Math.Max(1, info.DeliveryCount), + StringComparer.Ordinal); + + var leases = new List<INotifyQueueLease<NotifyDeliveryQueueMessage>>(entries.Length); + + foreach (var entry in entries) + { + attemptLookup.TryGetValue(entry.Id.ToString(), out var attempt); + var lease = TryMapLease(entry, options.ClaimantConsumer, now, _options.DefaultLeaseDuration, attempt == 0 ? null : attempt); + if (lease is null) + { + await AckPoisonAsync(db, entry.Id).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + } + + return leases; + } + + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + _disposed = true; + if (_connection is not null) + { + await _connection.CloseAsync().ConfigureAwait(false); + _connection.Dispose(); + } + + _connectionLock.Dispose(); + _groupLock.Dispose(); + GC.SuppressFinalize(this); + } + + internal async Task AcknowledgeAsync( + RedisNotifyDeliveryLease lease, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + + await db.StreamAcknowledgeAsync( + _redisOptions.StreamName, + _redisOptions.ConsumerGroup, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + await db.StreamDeleteAsync( + _redisOptions.StreamName, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + NotifyQueueMetrics.RecordAck(TransportName, _redisOptions.StreamName); + _logger.LogDebug( + "Acknowledged Notify delivery {DeliveryId} (message {MessageId}).", + lease.Message.Delivery.DeliveryId, + lease.MessageId); + } + + internal async Task RenewLeaseAsync( + RedisNotifyDeliveryLease lease, + TimeSpan leaseDuration, + CancellationToken cancellationToken) + { + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + + await db.StreamClaimAsync( + _redisOptions.StreamName, + _redisOptions.ConsumerGroup, + lease.Consumer, + 0, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + var expires = _timeProvider.GetUtcNow().Add(leaseDuration); + lease.RefreshLease(expires); + + _logger.LogDebug( + "Renewed Notify delivery lease {DeliveryId} until {Expires:u}.", + lease.Message.Delivery.DeliveryId, + expires); + } + + internal async Task ReleaseAsync( + RedisNotifyDeliveryLease lease, + NotifyQueueReleaseDisposition disposition, + CancellationToken cancellationToken) + { + if (disposition == NotifyQueueReleaseDisposition.Retry + && lease.Attempt >= _options.MaxDeliveryAttempts) + { + _logger.LogWarning( + "Notify delivery {DeliveryId} reached max delivery attempts ({Attempts}); moving to dead-letter stream.", + lease.Message.Delivery.DeliveryId, + lease.Attempt); + + await DeadLetterAsync( + lease, + $"max-delivery-attempts:{lease.Attempt}", + cancellationToken).ConfigureAwait(false); + + return; + } + + if (!lease.TryBeginCompletion()) + { + return; + } + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + await db.StreamAcknowledgeAsync( + _redisOptions.StreamName, + _redisOptions.ConsumerGroup, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + await db.StreamDeleteAsync( + _redisOptions.StreamName, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + if (disposition == NotifyQueueReleaseDisposition.Retry) + { + NotifyQueueMetrics.RecordRetry(TransportName, _redisOptions.StreamName); + + var delay = CalculateBackoff(lease.Attempt); + if (delay > TimeSpan.Zero) + { + try + { + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + catch (TaskCanceledException) + { + return; + } + } + + var now = _timeProvider.GetUtcNow(); + var entries = BuildEntries(lease.Message, now, lease.Attempt + 1); + + await AddToStreamAsync( + db, + _redisOptions.StreamName, + entries) + .ConfigureAwait(false); + + NotifyQueueMetrics.RecordEnqueued(TransportName, _redisOptions.StreamName); + _logger.LogInformation( + "Retrying Notify delivery {DeliveryId} (attempt {Attempt}).", + lease.Message.Delivery.DeliveryId, + lease.Attempt + 1); + } + else + { + NotifyQueueMetrics.RecordAck(TransportName, _redisOptions.StreamName); + _logger.LogInformation( + "Abandoned Notify delivery {DeliveryId} after {Attempt} attempt(s).", + lease.Message.Delivery.DeliveryId, + lease.Attempt); + } + } + + internal async Task DeadLetterAsync( + RedisNotifyDeliveryLease lease, + string reason, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + + await db.StreamAcknowledgeAsync( + _redisOptions.StreamName, + _redisOptions.ConsumerGroup, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + await db.StreamDeleteAsync( + _redisOptions.StreamName, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + await EnsureDeadLetterStreamAsync(db, cancellationToken).ConfigureAwait(false); + + var entries = BuildDeadLetterEntries(lease, reason); + await AddToStreamAsync( + db, + _redisOptions.DeadLetterStreamName, + entries) + .ConfigureAwait(false); + + NotifyQueueMetrics.RecordDeadLetter(TransportName, _redisOptions.DeadLetterStreamName); + _logger.LogError( + "Dead-lettered Notify delivery {DeliveryId} (attempt {Attempt}): {Reason}", + lease.Message.Delivery.DeliveryId, + lease.Attempt, + reason); + } + + internal async ValueTask PingAsync(CancellationToken cancellationToken) + { + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + _ = await db.PingAsync().ConfigureAwait(false); + } + + private async Task<IDatabase> GetDatabaseAsync(CancellationToken cancellationToken) + { + if (_connection is { IsConnected: true }) + { + return _connection.GetDatabase(_redisOptions.Database ?? -1); + } + + await _connectionLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_connection is { IsConnected: true }) + { + return _connection.GetDatabase(_redisOptions.Database ?? -1); + } + + var configuration = ConfigurationOptions.Parse(_redisOptions.ConnectionString!); + configuration.AbortOnConnectFail = false; + if (_redisOptions.Database.HasValue) + { + configuration.DefaultDatabase = _redisOptions.Database.Value; + } + + using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + timeoutCts.CancelAfter(_redisOptions.InitializationTimeout); + + _connection = await _connectionFactory(configuration).WaitAsync(timeoutCts.Token).ConfigureAwait(false); + return _connection.GetDatabase(_redisOptions.Database ?? -1); + } + finally + { + _connectionLock.Release(); + } + } + + private async Task EnsureConsumerGroupAsync( + IDatabase database, + CancellationToken cancellationToken) + { + if (_streamInitialized.ContainsKey(_redisOptions.StreamName)) + { + return; + } + + await _groupLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_streamInitialized.ContainsKey(_redisOptions.StreamName)) + { + return; + } + + try + { + await database.StreamCreateConsumerGroupAsync( + _redisOptions.StreamName, + _redisOptions.ConsumerGroup, + StreamPosition.Beginning, + createStream: true) + .ConfigureAwait(false); + } + catch (RedisServerException ex) when (ex.Message.Contains("BUSYGROUP", StringComparison.OrdinalIgnoreCase)) + { + // group already exists + } + + _streamInitialized[_redisOptions.StreamName] = true; + } + finally + { + _groupLock.Release(); + } + } + + private async Task EnsureDeadLetterStreamAsync( + IDatabase database, + CancellationToken cancellationToken) + { + if (_streamInitialized.ContainsKey(_redisOptions.DeadLetterStreamName)) + { + return; + } + + await _groupLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_streamInitialized.ContainsKey(_redisOptions.DeadLetterStreamName)) + { + return; + } + + try + { + await database.StreamCreateConsumerGroupAsync( + _redisOptions.DeadLetterStreamName, + _redisOptions.ConsumerGroup, + StreamPosition.Beginning, + createStream: true) + .ConfigureAwait(false); + } + catch (RedisServerException ex) when (ex.Message.Contains("BUSYGROUP", StringComparison.OrdinalIgnoreCase)) + { + // ignore + } + + _streamInitialized[_redisOptions.DeadLetterStreamName] = true; + } + finally + { + _groupLock.Release(); + } + } + + private NameValueEntry[] BuildEntries( + NotifyDeliveryQueueMessage message, + DateTimeOffset enqueuedAt, + int attempt) + { + var json = NotifyCanonicalJsonSerializer.Serialize(message.Delivery); + var attributeCount = message.Attributes.Count; + + var entries = ArrayPool<NameValueEntry>.Shared.Rent(8 + attributeCount); + var index = 0; + + entries[index++] = new NameValueEntry(NotifyQueueFields.Payload, json); + entries[index++] = new NameValueEntry(NotifyQueueFields.DeliveryId, message.Delivery.DeliveryId); + entries[index++] = new NameValueEntry(NotifyQueueFields.ChannelId, message.ChannelId); + entries[index++] = new NameValueEntry(NotifyQueueFields.ChannelType, message.ChannelType.ToString()); + entries[index++] = new NameValueEntry(NotifyQueueFields.Tenant, message.Delivery.TenantId); + entries[index++] = new NameValueEntry(NotifyQueueFields.Attempt, attempt); + entries[index++] = new NameValueEntry(NotifyQueueFields.EnqueuedAt, enqueuedAt.ToUnixTimeMilliseconds()); + entries[index++] = new NameValueEntry(NotifyQueueFields.IdempotencyKey, message.IdempotencyKey); + entries[index++] = new NameValueEntry(NotifyQueueFields.TraceId, message.TraceId ?? string.Empty); + entries[index++] = new NameValueEntry(NotifyQueueFields.PartitionKey, message.PartitionKey); + + if (attributeCount > 0) + { + foreach (var kvp in message.Attributes) + { + entries[index++] = new NameValueEntry( + NotifyQueueFields.AttributePrefix + kvp.Key, + kvp.Value); + } + } + + return entries.AsSpan(0, index).ToArray(); + } + + private NameValueEntry[] BuildDeadLetterEntries(RedisNotifyDeliveryLease lease, string reason) + { + var json = NotifyCanonicalJsonSerializer.Serialize(lease.Message.Delivery); + var attributes = lease.Message.Attributes; + var attributeCount = attributes.Count; + + var entries = ArrayPool<NameValueEntry>.Shared.Rent(9 + attributeCount); + var index = 0; + + entries[index++] = new NameValueEntry(NotifyQueueFields.Payload, json); + entries[index++] = new NameValueEntry(NotifyQueueFields.DeliveryId, lease.Message.Delivery.DeliveryId); + entries[index++] = new NameValueEntry(NotifyQueueFields.ChannelId, lease.Message.ChannelId); + entries[index++] = new NameValueEntry(NotifyQueueFields.ChannelType, lease.Message.ChannelType.ToString()); + entries[index++] = new NameValueEntry(NotifyQueueFields.Tenant, lease.Message.Delivery.TenantId); + entries[index++] = new NameValueEntry(NotifyQueueFields.Attempt, lease.Attempt); + entries[index++] = new NameValueEntry(NotifyQueueFields.IdempotencyKey, lease.Message.IdempotencyKey); + entries[index++] = new NameValueEntry("deadletter-reason", reason); + entries[index++] = new NameValueEntry(NotifyQueueFields.TraceId, lease.Message.TraceId ?? string.Empty); + + foreach (var kvp in attributes) + { + entries[index++] = new NameValueEntry( + NotifyQueueFields.AttributePrefix + kvp.Key, + kvp.Value); + } + + return entries.AsSpan(0, index).ToArray(); + } + + private RedisNotifyDeliveryLease? TryMapLease( + StreamEntry entry, + string consumer, + DateTimeOffset now, + TimeSpan leaseDuration, + int? attemptOverride) + { + if (entry.Values is null || entry.Values.Length == 0) + { + return null; + } + + string? payload = null; + string? deliveryId = null; + string? channelId = null; + string? channelTypeRaw = null; + string? traceId = null; + string? idempotency = null; + string? partitionKey = null; + long? enqueuedAtUnix = null; + var attempt = attemptOverride ?? 1; + var attributes = new Dictionary<string, string>(StringComparer.Ordinal); + + foreach (var value in entry.Values) + { + var name = value.Name.ToString(); + var data = value.Value; + if (name.Equals(NotifyQueueFields.Payload, StringComparison.Ordinal)) + { + payload = data.ToString(); + } + else if (name.Equals(NotifyQueueFields.DeliveryId, StringComparison.Ordinal)) + { + deliveryId = data.ToString(); + } + else if (name.Equals(NotifyQueueFields.ChannelId, StringComparison.Ordinal)) + { + channelId = data.ToString(); + } + else if (name.Equals(NotifyQueueFields.ChannelType, StringComparison.Ordinal)) + { + channelTypeRaw = data.ToString(); + } + else if (name.Equals(NotifyQueueFields.Attempt, StringComparison.Ordinal)) + { + if (int.TryParse(data.ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + attempt = Math.Max(parsed, attempt); + } + } + else if (name.Equals(NotifyQueueFields.EnqueuedAt, StringComparison.Ordinal)) + { + if (long.TryParse(data.ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var unix)) + { + enqueuedAtUnix = unix; + } + } + else if (name.Equals(NotifyQueueFields.IdempotencyKey, StringComparison.Ordinal)) + { + idempotency = data.ToString(); + } + else if (name.Equals(NotifyQueueFields.TraceId, StringComparison.Ordinal)) + { + var text = data.ToString(); + traceId = string.IsNullOrWhiteSpace(text) ? null : text; + } + else if (name.Equals(NotifyQueueFields.PartitionKey, StringComparison.Ordinal)) + { + partitionKey = data.ToString(); + } + else if (name.StartsWith(NotifyQueueFields.AttributePrefix, StringComparison.Ordinal)) + { + attributes[name[NotifyQueueFields.AttributePrefix.Length..]] = data.ToString(); + } + } + + if (payload is null || deliveryId is null || channelId is null || channelTypeRaw is null) + { + return null; + } + + NotifyDelivery delivery; + try + { + delivery = NotifyCanonicalJsonSerializer.Deserialize<NotifyDelivery>(payload); + } + catch (Exception ex) + { + _logger.LogWarning( + ex, + "Failed to deserialize Notify delivery payload for entry {EntryId}.", + entry.Id.ToString()); + return null; + } + + if (!Enum.TryParse<NotifyChannelType>(channelTypeRaw, ignoreCase: true, out var channelType)) + { + _logger.LogWarning( + "Unknown channel type '{ChannelType}' for delivery {DeliveryId}; acknowledging as poison.", + channelTypeRaw, + deliveryId); + return null; + } + + var attributeView = attributes.Count == 0 + ? EmptyReadOnlyDictionary<string, string>.Instance + : new ReadOnlyDictionary<string, string>(attributes); + + var enqueuedAt = enqueuedAtUnix is null + ? now + : DateTimeOffset.FromUnixTimeMilliseconds(enqueuedAtUnix.Value); + + var message = new NotifyDeliveryQueueMessage( + delivery, + channelId, + channelType, + _redisOptions.StreamName, + traceId, + attributeView); + + var leaseExpires = now.Add(leaseDuration); + + return new RedisNotifyDeliveryLease( + this, + entry.Id.ToString(), + message, + attempt, + enqueuedAt, + leaseExpires, + consumer, + idempotency, + partitionKey ?? channelId); + } + + private async Task AckPoisonAsync(IDatabase database, RedisValue messageId) + { + await database.StreamAcknowledgeAsync( + _redisOptions.StreamName, + _redisOptions.ConsumerGroup, + new RedisValue[] { messageId }) + .ConfigureAwait(false); + + await database.StreamDeleteAsync( + _redisOptions.StreamName, + new RedisValue[] { messageId }) + .ConfigureAwait(false); + } + + private static async Task<RedisValue> AddToStreamAsync( + IDatabase database, + string stream, + IReadOnlyList<NameValueEntry> entries) + { + return await database.StreamAddAsync( + stream, + entries.ToArray()) + .ConfigureAwait(false); + } + + private string BuildIdempotencyKey(string token) + => string.Concat(_redisOptions.IdempotencyKeyPrefix, token); + + private TimeSpan CalculateBackoff(int attempt) + { + var initial = _options.RetryInitialBackoff > TimeSpan.Zero + ? _options.RetryInitialBackoff + : TimeSpan.FromSeconds(1); + + if (initial <= TimeSpan.Zero) + { + return TimeSpan.Zero; + } + + if (attempt <= 1) + { + return initial; + } + + var max = _options.RetryMaxBackoff > TimeSpan.Zero + ? _options.RetryMaxBackoff + : initial; + + var exponent = attempt - 1; + var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1); + var cappedTicks = Math.Min(max.Ticks, scaledTicks); + var resultTicks = Math.Max(initial.Ticks, (long)cappedTicks); + return TimeSpan.FromTicks(resultTicks); + } +} diff --git a/src/StellaOps.Notify.Queue/Redis/RedisNotifyEventLease.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyEventLease.cs similarity index 97% rename from src/StellaOps.Notify.Queue/Redis/RedisNotifyEventLease.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyEventLease.cs index 4d29bd60..c66eecb6 100644 --- a/src/StellaOps.Notify.Queue/Redis/RedisNotifyEventLease.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyEventLease.cs @@ -1,76 +1,76 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Notify.Queue.Redis; - -internal sealed class RedisNotifyEventLease : INotifyQueueLease<NotifyQueueEventMessage> -{ - private readonly RedisNotifyEventQueue _queue; - private int _completed; - - internal RedisNotifyEventLease( - RedisNotifyEventQueue queue, - NotifyRedisEventStreamOptions streamOptions, - string messageId, - NotifyQueueEventMessage message, - int attempt, - string consumer, - DateTimeOffset enqueuedAt, - DateTimeOffset leaseExpiresAt) - { - _queue = queue ?? throw new ArgumentNullException(nameof(queue)); - StreamOptions = streamOptions ?? throw new ArgumentNullException(nameof(streamOptions)); - MessageId = messageId ?? throw new ArgumentNullException(nameof(messageId)); - Message = message ?? throw new ArgumentNullException(nameof(message)); - Attempt = attempt; - Consumer = consumer ?? throw new ArgumentNullException(nameof(consumer)); - EnqueuedAt = enqueuedAt; - LeaseExpiresAt = leaseExpiresAt; - } - - internal NotifyRedisEventStreamOptions StreamOptions { get; } - - public string MessageId { get; } - - public int Attempt { get; } - - public DateTimeOffset EnqueuedAt { get; } - - public DateTimeOffset LeaseExpiresAt { get; private set; } - - public string Consumer { get; } - - public string Stream => StreamOptions.Stream; - - public string TenantId => Message.TenantId; - - public string? PartitionKey => Message.PartitionKey; - - public string IdempotencyKey => Message.IdempotencyKey; - - public string? TraceId => Message.TraceId; - - public IReadOnlyDictionary<string, string> Attributes => Message.Attributes; - - public NotifyQueueEventMessage Message { get; } - - public Task AcknowledgeAsync(CancellationToken cancellationToken = default) - => _queue.AcknowledgeAsync(this, cancellationToken); - - public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) - => _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken); - - public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) - => _queue.ReleaseAsync(this, disposition, cancellationToken); - - public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) - => _queue.DeadLetterAsync(this, reason, cancellationToken); - - internal bool TryBeginCompletion() - => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; - - internal void RefreshLease(DateTimeOffset expiresAt) - => LeaseExpiresAt = expiresAt; -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Notify.Queue.Redis; + +internal sealed class RedisNotifyEventLease : INotifyQueueLease<NotifyQueueEventMessage> +{ + private readonly RedisNotifyEventQueue _queue; + private int _completed; + + internal RedisNotifyEventLease( + RedisNotifyEventQueue queue, + NotifyRedisEventStreamOptions streamOptions, + string messageId, + NotifyQueueEventMessage message, + int attempt, + string consumer, + DateTimeOffset enqueuedAt, + DateTimeOffset leaseExpiresAt) + { + _queue = queue ?? throw new ArgumentNullException(nameof(queue)); + StreamOptions = streamOptions ?? throw new ArgumentNullException(nameof(streamOptions)); + MessageId = messageId ?? throw new ArgumentNullException(nameof(messageId)); + Message = message ?? throw new ArgumentNullException(nameof(message)); + Attempt = attempt; + Consumer = consumer ?? throw new ArgumentNullException(nameof(consumer)); + EnqueuedAt = enqueuedAt; + LeaseExpiresAt = leaseExpiresAt; + } + + internal NotifyRedisEventStreamOptions StreamOptions { get; } + + public string MessageId { get; } + + public int Attempt { get; } + + public DateTimeOffset EnqueuedAt { get; } + + public DateTimeOffset LeaseExpiresAt { get; private set; } + + public string Consumer { get; } + + public string Stream => StreamOptions.Stream; + + public string TenantId => Message.TenantId; + + public string? PartitionKey => Message.PartitionKey; + + public string IdempotencyKey => Message.IdempotencyKey; + + public string? TraceId => Message.TraceId; + + public IReadOnlyDictionary<string, string> Attributes => Message.Attributes; + + public NotifyQueueEventMessage Message { get; } + + public Task AcknowledgeAsync(CancellationToken cancellationToken = default) + => _queue.AcknowledgeAsync(this, cancellationToken); + + public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) + => _queue.RenewLeaseAsync(this, leaseDuration, cancellationToken); + + public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) + => _queue.ReleaseAsync(this, disposition, cancellationToken); + + public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) + => _queue.DeadLetterAsync(this, reason, cancellationToken); + + internal bool TryBeginCompletion() + => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; + + internal void RefreshLease(DateTimeOffset expiresAt) + => LeaseExpiresAt = expiresAt; +} diff --git a/src/StellaOps.Notify.Queue/Redis/RedisNotifyEventQueue.cs b/src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyEventQueue.cs similarity index 97% rename from src/StellaOps.Notify.Queue/Redis/RedisNotifyEventQueue.cs rename to src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyEventQueue.cs index e217f899..38a20b40 100644 --- a/src/StellaOps.Notify.Queue/Redis/RedisNotifyEventQueue.cs +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/Redis/RedisNotifyEventQueue.cs @@ -1,655 +1,655 @@ -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Globalization; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using StackExchange.Redis; -using StellaOps.Notify.Models; - -namespace StellaOps.Notify.Queue.Redis; - -internal sealed class RedisNotifyEventQueue : INotifyEventQueue, IAsyncDisposable -{ - private const string TransportName = "redis"; - - private readonly NotifyEventQueueOptions _options; - private readonly NotifyRedisEventQueueOptions _redisOptions; - private readonly ILogger<RedisNotifyEventQueue> _logger; - private readonly TimeProvider _timeProvider; - private readonly Func<ConfigurationOptions, Task<IConnectionMultiplexer>> _connectionFactory; - private readonly SemaphoreSlim _connectionLock = new(1, 1); - private readonly SemaphoreSlim _groupInitLock = new(1, 1); - private readonly IReadOnlyDictionary<string, NotifyRedisEventStreamOptions> _streamsByName; - private readonly ConcurrentDictionary<string, bool> _initializedStreams = new(StringComparer.Ordinal); - - private IConnectionMultiplexer? _connection; - private bool _disposed; - - public RedisNotifyEventQueue( - NotifyEventQueueOptions options, - NotifyRedisEventQueueOptions redisOptions, - ILogger<RedisNotifyEventQueue> logger, - TimeProvider timeProvider, - Func<ConfigurationOptions, Task<IConnectionMultiplexer>>? connectionFactory = null) - { - _options = options ?? throw new ArgumentNullException(nameof(options)); - _redisOptions = redisOptions ?? throw new ArgumentNullException(nameof(redisOptions)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? TimeProvider.System; - _connectionFactory = connectionFactory ?? (async config => - { - var connection = await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false); - return (IConnectionMultiplexer)connection; - }); - - if (string.IsNullOrWhiteSpace(_redisOptions.ConnectionString)) - { - throw new InvalidOperationException("Redis connection string must be configured for Notify event queue."); - } - - _streamsByName = _redisOptions.Streams.ToDictionary( - stream => stream.Stream, - stream => stream, - StringComparer.Ordinal); - } - - public async ValueTask<NotifyQueueEnqueueResult> PublishAsync( - NotifyQueueEventMessage message, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(message); - cancellationToken.ThrowIfCancellationRequested(); - - var streamOptions = GetStreamOptions(message.Stream); - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - await EnsureStreamInitializedAsync(db, streamOptions, cancellationToken).ConfigureAwait(false); - - var now = _timeProvider.GetUtcNow(); - var entries = BuildEntries(message, now, attempt: 1); - - var messageId = await AddToStreamAsync( - db, - streamOptions, - entries) - .ConfigureAwait(false); - - var idempotencyToken = string.IsNullOrWhiteSpace(message.IdempotencyKey) - ? message.Event.EventId.ToString("N") - : message.IdempotencyKey; - - var idempotencyKey = streamOptions.IdempotencyKeyPrefix + idempotencyToken; - var stored = await db.StringSetAsync( - idempotencyKey, - messageId, - when: When.NotExists, - expiry: _redisOptions.IdempotencyWindow) - .ConfigureAwait(false); - - if (!stored) - { - await db.StreamDeleteAsync( - streamOptions.Stream, - new RedisValue[] { messageId }) - .ConfigureAwait(false); - - var existing = await db.StringGetAsync(idempotencyKey).ConfigureAwait(false); - var duplicateId = existing.IsNullOrEmpty ? messageId : existing; - - _logger.LogDebug( - "Duplicate Notify event enqueue detected for idempotency token {Token}; returning existing stream id {StreamId}.", - idempotencyToken, - duplicateId.ToString()); - - NotifyQueueMetrics.RecordDeduplicated(TransportName, streamOptions.Stream); - return new NotifyQueueEnqueueResult(duplicateId.ToString()!, true); - } - - NotifyQueueMetrics.RecordEnqueued(TransportName, streamOptions.Stream); - - _logger.LogDebug( - "Enqueued Notify event {EventId} for tenant {Tenant} on stream {Stream} (id {StreamId}).", - message.Event.EventId, - message.TenantId, - streamOptions.Stream, - messageId.ToString()); - - return new NotifyQueueEnqueueResult(messageId.ToString()!, false); - } - - public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> LeaseAsync( - NotifyQueueLeaseRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - cancellationToken.ThrowIfCancellationRequested(); - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - var now = _timeProvider.GetUtcNow(); - var leases = new List<INotifyQueueLease<NotifyQueueEventMessage>>(request.BatchSize); - - foreach (var streamOptions in _streamsByName.Values) - { - await EnsureStreamInitializedAsync(db, streamOptions, cancellationToken).ConfigureAwait(false); - - var remaining = request.BatchSize - leases.Count; - if (remaining <= 0) - { - break; - } - - var entries = await db.StreamReadGroupAsync( - streamOptions.Stream, - streamOptions.ConsumerGroup, - request.Consumer, - StreamPosition.NewMessages, - remaining) - .ConfigureAwait(false); - - if (entries is null || entries.Length == 0) - { - continue; - } - - foreach (var entry in entries) - { - var lease = TryMapLease( - streamOptions, - entry, - request.Consumer, - now, - request.LeaseDuration, - attemptOverride: null); - - if (lease is null) - { - await AckPoisonAsync(db, streamOptions, entry.Id).ConfigureAwait(false); - continue; - } - - leases.Add(lease); - - if (leases.Count >= request.BatchSize) - { - break; - } - } - } - - return leases; - } - - public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> ClaimExpiredAsync( - NotifyQueueClaimOptions options, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(options); - cancellationToken.ThrowIfCancellationRequested(); - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - var now = _timeProvider.GetUtcNow(); - var leases = new List<INotifyQueueLease<NotifyQueueEventMessage>>(options.BatchSize); - - foreach (var streamOptions in _streamsByName.Values) - { - await EnsureStreamInitializedAsync(db, streamOptions, cancellationToken).ConfigureAwait(false); - - var pending = await db.StreamPendingMessagesAsync( - streamOptions.Stream, - streamOptions.ConsumerGroup, - options.BatchSize, - RedisValue.Null, - (long)options.MinIdleTime.TotalMilliseconds) - .ConfigureAwait(false); - - if (pending is null || pending.Length == 0) - { - continue; - } - - var eligible = pending - .Where(p => p.IdleTimeInMilliseconds >= options.MinIdleTime.TotalMilliseconds) - .ToArray(); - - if (eligible.Length == 0) - { - continue; - } - - var messageIds = eligible - .Select(static p => (RedisValue)p.MessageId) - .ToArray(); - - var entries = await db.StreamClaimAsync( - streamOptions.Stream, - streamOptions.ConsumerGroup, - options.ClaimantConsumer, - 0, - messageIds) - .ConfigureAwait(false); - - if (entries is null || entries.Length == 0) - { - continue; - } - - var attemptById = eligible - .Where(static info => !info.MessageId.IsNullOrEmpty) - .ToDictionary( - info => info.MessageId!.ToString(), - info => (int)Math.Max(1, info.DeliveryCount), - StringComparer.Ordinal); - - foreach (var entry in entries) - { - var entryId = entry.Id.ToString(); - attemptById.TryGetValue(entryId, out var attempt); - - var lease = TryMapLease( - streamOptions, - entry, - options.ClaimantConsumer, - now, - _options.DefaultLeaseDuration, - attempt == 0 ? null : attempt); - - if (lease is null) - { - await AckPoisonAsync(db, streamOptions, entry.Id).ConfigureAwait(false); - continue; - } - - leases.Add(lease); - if (leases.Count >= options.BatchSize) - { - return leases; - } - } - } - - return leases; - } - - public async ValueTask DisposeAsync() - { - if (_disposed) - { - return; - } - - _disposed = true; - if (_connection is not null) - { - await _connection.CloseAsync(); - _connection.Dispose(); - } - - _connectionLock.Dispose(); - _groupInitLock.Dispose(); - GC.SuppressFinalize(this); - } - - internal async Task AcknowledgeAsync( - RedisNotifyEventLease lease, - CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - var streamOptions = lease.StreamOptions; - - await db.StreamAcknowledgeAsync( - streamOptions.Stream, - streamOptions.ConsumerGroup, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - await db.StreamDeleteAsync( - streamOptions.Stream, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - NotifyQueueMetrics.RecordAck(TransportName, streamOptions.Stream); - - _logger.LogDebug( - "Acknowledged Notify event {EventId} on consumer {Consumer} (stream {Stream}, id {MessageId}).", - lease.Message.Event.EventId, - lease.Consumer, - streamOptions.Stream, - lease.MessageId); - } - - internal async Task RenewLeaseAsync( - RedisNotifyEventLease lease, - TimeSpan leaseDuration, - CancellationToken cancellationToken) - { - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - var streamOptions = lease.StreamOptions; - - await db.StreamClaimAsync( - streamOptions.Stream, - streamOptions.ConsumerGroup, - lease.Consumer, - 0, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - var expires = _timeProvider.GetUtcNow().Add(leaseDuration); - lease.RefreshLease(expires); - - _logger.LogDebug( - "Renewed Notify event lease for {EventId} until {Expires:u}.", - lease.Message.Event.EventId, - expires); - } - - internal Task ReleaseAsync( - RedisNotifyEventLease lease, - NotifyQueueReleaseDisposition disposition, - CancellationToken cancellationToken) - => Task.FromException(new NotSupportedException("Retry/abandon is not supported for Notify event streams.")); - - internal async Task DeadLetterAsync( - RedisNotifyEventLease lease, - string reason, - CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - var streamOptions = lease.StreamOptions; - - await db.StreamAcknowledgeAsync( - streamOptions.Stream, - streamOptions.ConsumerGroup, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - await db.StreamDeleteAsync( - streamOptions.Stream, - new RedisValue[] { lease.MessageId }) - .ConfigureAwait(false); - - _logger.LogWarning( - "Dead-lettered Notify event {EventId} on stream {Stream} with reason '{Reason}'.", - lease.Message.Event.EventId, - streamOptions.Stream, - reason); - } - - internal async ValueTask PingAsync(CancellationToken cancellationToken) - { - var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); - _ = await db.PingAsync().ConfigureAwait(false); - } - - private NotifyRedisEventStreamOptions GetStreamOptions(string stream) - { - if (!_streamsByName.TryGetValue(stream, out var options)) - { - throw new InvalidOperationException($"Stream '{stream}' is not configured for the Notify event queue."); - } - - return options; - } - - private async Task<IDatabase> GetDatabaseAsync(CancellationToken cancellationToken) - { - if (_connection is { IsConnected: true }) - { - return _connection.GetDatabase(_redisOptions.Database ?? -1); - } - - await _connectionLock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_connection is { IsConnected: true }) - { - return _connection.GetDatabase(_redisOptions.Database ?? -1); - } - - var configuration = ConfigurationOptions.Parse(_redisOptions.ConnectionString!); - configuration.AbortOnConnectFail = false; - if (_redisOptions.Database.HasValue) - { - configuration.DefaultDatabase = _redisOptions.Database; - } - - using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); - timeoutCts.CancelAfter(_redisOptions.InitializationTimeout); - - _connection = await _connectionFactory(configuration).WaitAsync(timeoutCts.Token).ConfigureAwait(false); - return _connection.GetDatabase(_redisOptions.Database ?? -1); - } - finally - { - _connectionLock.Release(); - } - } - - private async Task EnsureStreamInitializedAsync( - IDatabase database, - NotifyRedisEventStreamOptions streamOptions, - CancellationToken cancellationToken) - { - if (_initializedStreams.ContainsKey(streamOptions.Stream)) - { - return; - } - - await _groupInitLock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_initializedStreams.ContainsKey(streamOptions.Stream)) - { - return; - } - - try - { - await database.StreamCreateConsumerGroupAsync( - streamOptions.Stream, - streamOptions.ConsumerGroup, - StreamPosition.Beginning, - createStream: true) - .ConfigureAwait(false); - } - catch (RedisServerException ex) when (ex.Message.Contains("BUSYGROUP", StringComparison.OrdinalIgnoreCase)) - { - // Consumer group already exists — nothing to do. - } - - _initializedStreams[streamOptions.Stream] = true; - } - finally - { - _groupInitLock.Release(); - } - } - - private static async Task<RedisValue> AddToStreamAsync( - IDatabase database, - NotifyRedisEventStreamOptions streamOptions, - IReadOnlyList<NameValueEntry> entries) - { - return await database.StreamAddAsync( - streamOptions.Stream, - entries.ToArray(), - maxLength: streamOptions.ApproximateMaxLength, - useApproximateMaxLength: streamOptions.ApproximateMaxLength is not null) - .ConfigureAwait(false); - } - - private IReadOnlyList<NameValueEntry> BuildEntries( - NotifyQueueEventMessage message, - DateTimeOffset enqueuedAt, - int attempt) - { - var payload = NotifyCanonicalJsonSerializer.Serialize(message.Event); - - var entries = new List<NameValueEntry>(8 + message.Attributes.Count) - { - new(NotifyQueueFields.Payload, payload), - new(NotifyQueueFields.EventId, message.Event.EventId.ToString("D")), - new(NotifyQueueFields.Tenant, message.TenantId), - new(NotifyQueueFields.Kind, message.Event.Kind), - new(NotifyQueueFields.Attempt, attempt), - new(NotifyQueueFields.EnqueuedAt, enqueuedAt.ToUnixTimeMilliseconds()), - new(NotifyQueueFields.IdempotencyKey, message.IdempotencyKey), - new(NotifyQueueFields.PartitionKey, message.PartitionKey ?? string.Empty), - new(NotifyQueueFields.TraceId, message.TraceId ?? string.Empty) - }; - - foreach (var kvp in message.Attributes) - { - entries.Add(new NameValueEntry( - NotifyQueueFields.AttributePrefix + kvp.Key, - kvp.Value)); - } - - return entries; - } - - private RedisNotifyEventLease? TryMapLease( - NotifyRedisEventStreamOptions streamOptions, - StreamEntry entry, - string consumer, - DateTimeOffset now, - TimeSpan leaseDuration, - int? attemptOverride) - { - if (entry.Values is null || entry.Values.Length == 0) - { - return null; - } - - string? payloadJson = null; - string? eventIdRaw = null; - long? enqueuedAtUnix = null; - string? idempotency = null; - string? partitionKey = null; - string? traceId = null; - var attempt = attemptOverride ?? 1; - var attributes = new Dictionary<string, string>(StringComparer.Ordinal); - - foreach (var field in entry.Values) - { - var name = field.Name.ToString(); - var value = field.Value; - if (name.Equals(NotifyQueueFields.Payload, StringComparison.Ordinal)) - { - payloadJson = value.ToString(); - } - else if (name.Equals(NotifyQueueFields.EventId, StringComparison.Ordinal)) - { - eventIdRaw = value.ToString(); - } - else if (name.Equals(NotifyQueueFields.Attempt, StringComparison.Ordinal)) - { - if (int.TryParse(value.ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) - { - attempt = Math.Max(parsed, attempt); - } - } - else if (name.Equals(NotifyQueueFields.EnqueuedAt, StringComparison.Ordinal)) - { - if (long.TryParse(value.ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var unix)) - { - enqueuedAtUnix = unix; - } - } - else if (name.Equals(NotifyQueueFields.IdempotencyKey, StringComparison.Ordinal)) - { - var text = value.ToString(); - idempotency = string.IsNullOrWhiteSpace(text) ? null : text; - } - else if (name.Equals(NotifyQueueFields.PartitionKey, StringComparison.Ordinal)) - { - var text = value.ToString(); - partitionKey = string.IsNullOrWhiteSpace(text) ? null : text; - } - else if (name.Equals(NotifyQueueFields.TraceId, StringComparison.Ordinal)) - { - var text = value.ToString(); - traceId = string.IsNullOrWhiteSpace(text) ? null : text; - } - else if (name.StartsWith(NotifyQueueFields.AttributePrefix, StringComparison.Ordinal)) - { - var key = name[NotifyQueueFields.AttributePrefix.Length..]; - attributes[key] = value.ToString(); - } - } - - if (payloadJson is null || enqueuedAtUnix is null) - { - return null; - } - - NotifyEvent notifyEvent; - try - { - notifyEvent = NotifyCanonicalJsonSerializer.Deserialize<NotifyEvent>(payloadJson); - } - catch (Exception ex) - { - _logger.LogWarning( - ex, - "Failed to deserialize Notify event payload for stream {Stream} entry {EntryId}.", - streamOptions.Stream, - entry.Id.ToString()); - return null; - } - - var attributeView = attributes.Count == 0 - ? EmptyReadOnlyDictionary<string, string>.Instance - : new ReadOnlyDictionary<string, string>(attributes); - - var message = new NotifyQueueEventMessage( - notifyEvent, - streamOptions.Stream, - idempotencyKey: idempotency ?? notifyEvent.EventId.ToString("N"), - partitionKey: partitionKey, - traceId: traceId, - attributes: attributeView); - - var enqueuedAt = DateTimeOffset.FromUnixTimeMilliseconds(enqueuedAtUnix.Value); - var leaseExpiresAt = now.Add(leaseDuration); - - return new RedisNotifyEventLease( - this, - streamOptions, - entry.Id.ToString(), - message, - attempt, - consumer, - enqueuedAt, - leaseExpiresAt); - } - - private async Task AckPoisonAsync( - IDatabase database, - NotifyRedisEventStreamOptions streamOptions, - RedisValue messageId) - { - await database.StreamAcknowledgeAsync( - streamOptions.Stream, - streamOptions.ConsumerGroup, - new RedisValue[] { messageId }) - .ConfigureAwait(false); - - await database.StreamDeleteAsync( - streamOptions.Stream, - new RedisValue[] { messageId }) - .ConfigureAwait(false); - } -} +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Globalization; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using StackExchange.Redis; +using StellaOps.Notify.Models; + +namespace StellaOps.Notify.Queue.Redis; + +internal sealed class RedisNotifyEventQueue : INotifyEventQueue, IAsyncDisposable +{ + private const string TransportName = "redis"; + + private readonly NotifyEventQueueOptions _options; + private readonly NotifyRedisEventQueueOptions _redisOptions; + private readonly ILogger<RedisNotifyEventQueue> _logger; + private readonly TimeProvider _timeProvider; + private readonly Func<ConfigurationOptions, Task<IConnectionMultiplexer>> _connectionFactory; + private readonly SemaphoreSlim _connectionLock = new(1, 1); + private readonly SemaphoreSlim _groupInitLock = new(1, 1); + private readonly IReadOnlyDictionary<string, NotifyRedisEventStreamOptions> _streamsByName; + private readonly ConcurrentDictionary<string, bool> _initializedStreams = new(StringComparer.Ordinal); + + private IConnectionMultiplexer? _connection; + private bool _disposed; + + public RedisNotifyEventQueue( + NotifyEventQueueOptions options, + NotifyRedisEventQueueOptions redisOptions, + ILogger<RedisNotifyEventQueue> logger, + TimeProvider timeProvider, + Func<ConfigurationOptions, Task<IConnectionMultiplexer>>? connectionFactory = null) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + _redisOptions = redisOptions ?? throw new ArgumentNullException(nameof(redisOptions)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + _connectionFactory = connectionFactory ?? (async config => + { + var connection = await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false); + return (IConnectionMultiplexer)connection; + }); + + if (string.IsNullOrWhiteSpace(_redisOptions.ConnectionString)) + { + throw new InvalidOperationException("Redis connection string must be configured for Notify event queue."); + } + + _streamsByName = _redisOptions.Streams.ToDictionary( + stream => stream.Stream, + stream => stream, + StringComparer.Ordinal); + } + + public async ValueTask<NotifyQueueEnqueueResult> PublishAsync( + NotifyQueueEventMessage message, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(message); + cancellationToken.ThrowIfCancellationRequested(); + + var streamOptions = GetStreamOptions(message.Stream); + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + await EnsureStreamInitializedAsync(db, streamOptions, cancellationToken).ConfigureAwait(false); + + var now = _timeProvider.GetUtcNow(); + var entries = BuildEntries(message, now, attempt: 1); + + var messageId = await AddToStreamAsync( + db, + streamOptions, + entries) + .ConfigureAwait(false); + + var idempotencyToken = string.IsNullOrWhiteSpace(message.IdempotencyKey) + ? message.Event.EventId.ToString("N") + : message.IdempotencyKey; + + var idempotencyKey = streamOptions.IdempotencyKeyPrefix + idempotencyToken; + var stored = await db.StringSetAsync( + idempotencyKey, + messageId, + when: When.NotExists, + expiry: _redisOptions.IdempotencyWindow) + .ConfigureAwait(false); + + if (!stored) + { + await db.StreamDeleteAsync( + streamOptions.Stream, + new RedisValue[] { messageId }) + .ConfigureAwait(false); + + var existing = await db.StringGetAsync(idempotencyKey).ConfigureAwait(false); + var duplicateId = existing.IsNullOrEmpty ? messageId : existing; + + _logger.LogDebug( + "Duplicate Notify event enqueue detected for idempotency token {Token}; returning existing stream id {StreamId}.", + idempotencyToken, + duplicateId.ToString()); + + NotifyQueueMetrics.RecordDeduplicated(TransportName, streamOptions.Stream); + return new NotifyQueueEnqueueResult(duplicateId.ToString()!, true); + } + + NotifyQueueMetrics.RecordEnqueued(TransportName, streamOptions.Stream); + + _logger.LogDebug( + "Enqueued Notify event {EventId} for tenant {Tenant} on stream {Stream} (id {StreamId}).", + message.Event.EventId, + message.TenantId, + streamOptions.Stream, + messageId.ToString()); + + return new NotifyQueueEnqueueResult(messageId.ToString()!, false); + } + + public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> LeaseAsync( + NotifyQueueLeaseRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + cancellationToken.ThrowIfCancellationRequested(); + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + var now = _timeProvider.GetUtcNow(); + var leases = new List<INotifyQueueLease<NotifyQueueEventMessage>>(request.BatchSize); + + foreach (var streamOptions in _streamsByName.Values) + { + await EnsureStreamInitializedAsync(db, streamOptions, cancellationToken).ConfigureAwait(false); + + var remaining = request.BatchSize - leases.Count; + if (remaining <= 0) + { + break; + } + + var entries = await db.StreamReadGroupAsync( + streamOptions.Stream, + streamOptions.ConsumerGroup, + request.Consumer, + StreamPosition.NewMessages, + remaining) + .ConfigureAwait(false); + + if (entries is null || entries.Length == 0) + { + continue; + } + + foreach (var entry in entries) + { + var lease = TryMapLease( + streamOptions, + entry, + request.Consumer, + now, + request.LeaseDuration, + attemptOverride: null); + + if (lease is null) + { + await AckPoisonAsync(db, streamOptions, entry.Id).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + + if (leases.Count >= request.BatchSize) + { + break; + } + } + } + + return leases; + } + + public async ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> ClaimExpiredAsync( + NotifyQueueClaimOptions options, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(options); + cancellationToken.ThrowIfCancellationRequested(); + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + var now = _timeProvider.GetUtcNow(); + var leases = new List<INotifyQueueLease<NotifyQueueEventMessage>>(options.BatchSize); + + foreach (var streamOptions in _streamsByName.Values) + { + await EnsureStreamInitializedAsync(db, streamOptions, cancellationToken).ConfigureAwait(false); + + var pending = await db.StreamPendingMessagesAsync( + streamOptions.Stream, + streamOptions.ConsumerGroup, + options.BatchSize, + RedisValue.Null, + (long)options.MinIdleTime.TotalMilliseconds) + .ConfigureAwait(false); + + if (pending is null || pending.Length == 0) + { + continue; + } + + var eligible = pending + .Where(p => p.IdleTimeInMilliseconds >= options.MinIdleTime.TotalMilliseconds) + .ToArray(); + + if (eligible.Length == 0) + { + continue; + } + + var messageIds = eligible + .Select(static p => (RedisValue)p.MessageId) + .ToArray(); + + var entries = await db.StreamClaimAsync( + streamOptions.Stream, + streamOptions.ConsumerGroup, + options.ClaimantConsumer, + 0, + messageIds) + .ConfigureAwait(false); + + if (entries is null || entries.Length == 0) + { + continue; + } + + var attemptById = eligible + .Where(static info => !info.MessageId.IsNullOrEmpty) + .ToDictionary( + info => info.MessageId!.ToString(), + info => (int)Math.Max(1, info.DeliveryCount), + StringComparer.Ordinal); + + foreach (var entry in entries) + { + var entryId = entry.Id.ToString(); + attemptById.TryGetValue(entryId, out var attempt); + + var lease = TryMapLease( + streamOptions, + entry, + options.ClaimantConsumer, + now, + _options.DefaultLeaseDuration, + attempt == 0 ? null : attempt); + + if (lease is null) + { + await AckPoisonAsync(db, streamOptions, entry.Id).ConfigureAwait(false); + continue; + } + + leases.Add(lease); + if (leases.Count >= options.BatchSize) + { + return leases; + } + } + } + + return leases; + } + + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + _disposed = true; + if (_connection is not null) + { + await _connection.CloseAsync(); + _connection.Dispose(); + } + + _connectionLock.Dispose(); + _groupInitLock.Dispose(); + GC.SuppressFinalize(this); + } + + internal async Task AcknowledgeAsync( + RedisNotifyEventLease lease, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + var streamOptions = lease.StreamOptions; + + await db.StreamAcknowledgeAsync( + streamOptions.Stream, + streamOptions.ConsumerGroup, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + await db.StreamDeleteAsync( + streamOptions.Stream, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + NotifyQueueMetrics.RecordAck(TransportName, streamOptions.Stream); + + _logger.LogDebug( + "Acknowledged Notify event {EventId} on consumer {Consumer} (stream {Stream}, id {MessageId}).", + lease.Message.Event.EventId, + lease.Consumer, + streamOptions.Stream, + lease.MessageId); + } + + internal async Task RenewLeaseAsync( + RedisNotifyEventLease lease, + TimeSpan leaseDuration, + CancellationToken cancellationToken) + { + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + var streamOptions = lease.StreamOptions; + + await db.StreamClaimAsync( + streamOptions.Stream, + streamOptions.ConsumerGroup, + lease.Consumer, + 0, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + var expires = _timeProvider.GetUtcNow().Add(leaseDuration); + lease.RefreshLease(expires); + + _logger.LogDebug( + "Renewed Notify event lease for {EventId} until {Expires:u}.", + lease.Message.Event.EventId, + expires); + } + + internal Task ReleaseAsync( + RedisNotifyEventLease lease, + NotifyQueueReleaseDisposition disposition, + CancellationToken cancellationToken) + => Task.FromException(new NotSupportedException("Retry/abandon is not supported for Notify event streams.")); + + internal async Task DeadLetterAsync( + RedisNotifyEventLease lease, + string reason, + CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + var streamOptions = lease.StreamOptions; + + await db.StreamAcknowledgeAsync( + streamOptions.Stream, + streamOptions.ConsumerGroup, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + await db.StreamDeleteAsync( + streamOptions.Stream, + new RedisValue[] { lease.MessageId }) + .ConfigureAwait(false); + + _logger.LogWarning( + "Dead-lettered Notify event {EventId} on stream {Stream} with reason '{Reason}'.", + lease.Message.Event.EventId, + streamOptions.Stream, + reason); + } + + internal async ValueTask PingAsync(CancellationToken cancellationToken) + { + var db = await GetDatabaseAsync(cancellationToken).ConfigureAwait(false); + _ = await db.PingAsync().ConfigureAwait(false); + } + + private NotifyRedisEventStreamOptions GetStreamOptions(string stream) + { + if (!_streamsByName.TryGetValue(stream, out var options)) + { + throw new InvalidOperationException($"Stream '{stream}' is not configured for the Notify event queue."); + } + + return options; + } + + private async Task<IDatabase> GetDatabaseAsync(CancellationToken cancellationToken) + { + if (_connection is { IsConnected: true }) + { + return _connection.GetDatabase(_redisOptions.Database ?? -1); + } + + await _connectionLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_connection is { IsConnected: true }) + { + return _connection.GetDatabase(_redisOptions.Database ?? -1); + } + + var configuration = ConfigurationOptions.Parse(_redisOptions.ConnectionString!); + configuration.AbortOnConnectFail = false; + if (_redisOptions.Database.HasValue) + { + configuration.DefaultDatabase = _redisOptions.Database; + } + + using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + timeoutCts.CancelAfter(_redisOptions.InitializationTimeout); + + _connection = await _connectionFactory(configuration).WaitAsync(timeoutCts.Token).ConfigureAwait(false); + return _connection.GetDatabase(_redisOptions.Database ?? -1); + } + finally + { + _connectionLock.Release(); + } + } + + private async Task EnsureStreamInitializedAsync( + IDatabase database, + NotifyRedisEventStreamOptions streamOptions, + CancellationToken cancellationToken) + { + if (_initializedStreams.ContainsKey(streamOptions.Stream)) + { + return; + } + + await _groupInitLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_initializedStreams.ContainsKey(streamOptions.Stream)) + { + return; + } + + try + { + await database.StreamCreateConsumerGroupAsync( + streamOptions.Stream, + streamOptions.ConsumerGroup, + StreamPosition.Beginning, + createStream: true) + .ConfigureAwait(false); + } + catch (RedisServerException ex) when (ex.Message.Contains("BUSYGROUP", StringComparison.OrdinalIgnoreCase)) + { + // Consumer group already exists — nothing to do. + } + + _initializedStreams[streamOptions.Stream] = true; + } + finally + { + _groupInitLock.Release(); + } + } + + private static async Task<RedisValue> AddToStreamAsync( + IDatabase database, + NotifyRedisEventStreamOptions streamOptions, + IReadOnlyList<NameValueEntry> entries) + { + return await database.StreamAddAsync( + streamOptions.Stream, + entries.ToArray(), + maxLength: streamOptions.ApproximateMaxLength, + useApproximateMaxLength: streamOptions.ApproximateMaxLength is not null) + .ConfigureAwait(false); + } + + private IReadOnlyList<NameValueEntry> BuildEntries( + NotifyQueueEventMessage message, + DateTimeOffset enqueuedAt, + int attempt) + { + var payload = NotifyCanonicalJsonSerializer.Serialize(message.Event); + + var entries = new List<NameValueEntry>(8 + message.Attributes.Count) + { + new(NotifyQueueFields.Payload, payload), + new(NotifyQueueFields.EventId, message.Event.EventId.ToString("D")), + new(NotifyQueueFields.Tenant, message.TenantId), + new(NotifyQueueFields.Kind, message.Event.Kind), + new(NotifyQueueFields.Attempt, attempt), + new(NotifyQueueFields.EnqueuedAt, enqueuedAt.ToUnixTimeMilliseconds()), + new(NotifyQueueFields.IdempotencyKey, message.IdempotencyKey), + new(NotifyQueueFields.PartitionKey, message.PartitionKey ?? string.Empty), + new(NotifyQueueFields.TraceId, message.TraceId ?? string.Empty) + }; + + foreach (var kvp in message.Attributes) + { + entries.Add(new NameValueEntry( + NotifyQueueFields.AttributePrefix + kvp.Key, + kvp.Value)); + } + + return entries; + } + + private RedisNotifyEventLease? TryMapLease( + NotifyRedisEventStreamOptions streamOptions, + StreamEntry entry, + string consumer, + DateTimeOffset now, + TimeSpan leaseDuration, + int? attemptOverride) + { + if (entry.Values is null || entry.Values.Length == 0) + { + return null; + } + + string? payloadJson = null; + string? eventIdRaw = null; + long? enqueuedAtUnix = null; + string? idempotency = null; + string? partitionKey = null; + string? traceId = null; + var attempt = attemptOverride ?? 1; + var attributes = new Dictionary<string, string>(StringComparer.Ordinal); + + foreach (var field in entry.Values) + { + var name = field.Name.ToString(); + var value = field.Value; + if (name.Equals(NotifyQueueFields.Payload, StringComparison.Ordinal)) + { + payloadJson = value.ToString(); + } + else if (name.Equals(NotifyQueueFields.EventId, StringComparison.Ordinal)) + { + eventIdRaw = value.ToString(); + } + else if (name.Equals(NotifyQueueFields.Attempt, StringComparison.Ordinal)) + { + if (int.TryParse(value.ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed)) + { + attempt = Math.Max(parsed, attempt); + } + } + else if (name.Equals(NotifyQueueFields.EnqueuedAt, StringComparison.Ordinal)) + { + if (long.TryParse(value.ToString(), NumberStyles.Integer, CultureInfo.InvariantCulture, out var unix)) + { + enqueuedAtUnix = unix; + } + } + else if (name.Equals(NotifyQueueFields.IdempotencyKey, StringComparison.Ordinal)) + { + var text = value.ToString(); + idempotency = string.IsNullOrWhiteSpace(text) ? null : text; + } + else if (name.Equals(NotifyQueueFields.PartitionKey, StringComparison.Ordinal)) + { + var text = value.ToString(); + partitionKey = string.IsNullOrWhiteSpace(text) ? null : text; + } + else if (name.Equals(NotifyQueueFields.TraceId, StringComparison.Ordinal)) + { + var text = value.ToString(); + traceId = string.IsNullOrWhiteSpace(text) ? null : text; + } + else if (name.StartsWith(NotifyQueueFields.AttributePrefix, StringComparison.Ordinal)) + { + var key = name[NotifyQueueFields.AttributePrefix.Length..]; + attributes[key] = value.ToString(); + } + } + + if (payloadJson is null || enqueuedAtUnix is null) + { + return null; + } + + NotifyEvent notifyEvent; + try + { + notifyEvent = NotifyCanonicalJsonSerializer.Deserialize<NotifyEvent>(payloadJson); + } + catch (Exception ex) + { + _logger.LogWarning( + ex, + "Failed to deserialize Notify event payload for stream {Stream} entry {EntryId}.", + streamOptions.Stream, + entry.Id.ToString()); + return null; + } + + var attributeView = attributes.Count == 0 + ? EmptyReadOnlyDictionary<string, string>.Instance + : new ReadOnlyDictionary<string, string>(attributes); + + var message = new NotifyQueueEventMessage( + notifyEvent, + streamOptions.Stream, + idempotencyKey: idempotency ?? notifyEvent.EventId.ToString("N"), + partitionKey: partitionKey, + traceId: traceId, + attributes: attributeView); + + var enqueuedAt = DateTimeOffset.FromUnixTimeMilliseconds(enqueuedAtUnix.Value); + var leaseExpiresAt = now.Add(leaseDuration); + + return new RedisNotifyEventLease( + this, + streamOptions, + entry.Id.ToString(), + message, + attempt, + consumer, + enqueuedAt, + leaseExpiresAt); + } + + private async Task AckPoisonAsync( + IDatabase database, + NotifyRedisEventStreamOptions streamOptions, + RedisValue messageId) + { + await database.StreamAcknowledgeAsync( + streamOptions.Stream, + streamOptions.ConsumerGroup, + new RedisValue[] { messageId }) + .ConfigureAwait(false); + + await database.StreamDeleteAsync( + streamOptions.Stream, + new RedisValue[] { messageId }) + .ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj b/src/Notify/__Libraries/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj similarity index 98% rename from src/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj rename to src/Notify/__Libraries/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj index c57f16cb..b0c1d41a 100644 --- a/src/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj @@ -1,23 +1,23 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="NATS.Client.Core" Version="2.0.0" /> - <PackageReference Include="NATS.Client.JetStream" Version="2.0.0" /> - <PackageReference Include="StackExchange.Redis" Version="2.7.33" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="NATS.Client.Core" Version="2.0.0" /> + <PackageReference Include="NATS.Client.JetStream" Version="2.0.0" /> + <PackageReference Include="StackExchange.Redis" Version="2.7.33" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Notify.Queue/TASKS.md b/src/Notify/__Libraries/StellaOps.Notify.Queue/TASKS.md similarity index 67% rename from src/StellaOps.Notify.Queue/TASKS.md rename to src/Notify/__Libraries/StellaOps.Notify.Queue/TASKS.md index 41f96344..4de5f6fc 100644 --- a/src/StellaOps.Notify.Queue/TASKS.md +++ b/src/Notify/__Libraries/StellaOps.Notify.Queue/TASKS.md @@ -1,2 +1,2 @@ # Notify Queue Task Board (Sprint 15) -> Archived 2025-10-26 — queue infrastructure maintained in `src/StellaOps.Notifier` (Sprints 38–40). +> Archived 2025-10-26 — queue infrastructure maintained in `src/Notifier/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Storage.Mongo/AGENTS.md b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/AGENTS.md similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/AGENTS.md rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/AGENTS.md diff --git a/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyAuditEntryDocument.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Documents/NotifyAuditEntryDocument.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Documents/NotifyAuditEntryDocument.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Documents/NotifyAuditEntryDocument.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyDigestDocument.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Documents/NotifyDigestDocument.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Documents/NotifyDigestDocument.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Documents/NotifyDigestDocument.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Documents/NotifyLockDocument.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Documents/NotifyLockDocument.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Documents/NotifyLockDocument.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Documents/NotifyLockDocument.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoContext.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoContext.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoContext.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoContext.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoInitializer.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoInitializer.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoInitializer.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Internal/NotifyMongoInitializer.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyCollectionsMigration.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyCollectionsMigration.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyCollectionsMigration.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyCollectionsMigration.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyIndexesMigration.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyIndexesMigration.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyIndexesMigration.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/EnsureNotifyIndexesMigration.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/INotifyMongoMigration.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/INotifyMongoMigration.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Migrations/INotifyMongoMigration.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/INotifyMongoMigration.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRecord.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRecord.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRecord.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRecord.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRunner.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRunner.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRunner.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Migrations/NotifyMongoMigrationRunner.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Options/NotifyMongoOptions.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Options/NotifyMongoOptions.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Options/NotifyMongoOptions.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Options/NotifyMongoOptions.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Properties/AssemblyInfo.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Properties/AssemblyInfo.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyAuditRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyAuditRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyAuditRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyAuditRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyChannelRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyChannelRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyChannelRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyChannelRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDeliveryRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDeliveryRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDeliveryRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDeliveryRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDigestRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDigestRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDigestRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyDigestRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyLockRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyLockRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyLockRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyLockRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyRuleRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyRuleRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyRuleRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyRuleRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyTemplateRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyTemplateRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/INotifyTemplateRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/INotifyTemplateRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyAuditRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyAuditRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyAuditRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyAuditRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyChannelRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyChannelRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyChannelRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyChannelRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryQueryResult.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryQueryResult.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryQueryResult.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryQueryResult.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDeliveryRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDigestRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDigestRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDigestRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyDigestRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyLockRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyLockRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyLockRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyLockRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyRuleRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyRuleRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyRuleRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyRuleRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyTemplateRepository.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyTemplateRepository.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Repositories/NotifyTemplateRepository.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Repositories/NotifyTemplateRepository.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyChannelDocumentMapper.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/NotifyChannelDocumentMapper.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyChannelDocumentMapper.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/NotifyChannelDocumentMapper.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyDeliveryDocumentMapper.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/NotifyDeliveryDocumentMapper.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyDeliveryDocumentMapper.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/NotifyDeliveryDocumentMapper.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyRuleDocumentMapper.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/NotifyRuleDocumentMapper.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyRuleDocumentMapper.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/NotifyRuleDocumentMapper.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyTemplateDocumentMapper.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/NotifyTemplateDocumentMapper.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/Serialization/NotifyTemplateDocumentMapper.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/Serialization/NotifyTemplateDocumentMapper.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/ServiceCollectionExtensions.cs b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo/ServiceCollectionExtensions.cs rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj similarity index 98% rename from src/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj index 518d7842..d1f08206 100644 --- a/src/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj +++ b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj @@ -1,18 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> - <PackageReference Include="MongoDB.Bson" Version="3.5.0" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> + <PackageReference Include="MongoDB.Bson" Version="3.5.0" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Notify.Storage.Mongo/TASKS.md b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/TASKS.md similarity index 65% rename from src/StellaOps.Notify.Storage.Mongo/TASKS.md rename to src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/TASKS.md index 319e2fa6..a9434682 100644 --- a/src/StellaOps.Notify.Storage.Mongo/TASKS.md +++ b/src/Notify/__Libraries/StellaOps.Notify.Storage.Mongo/TASKS.md @@ -1,2 +1,2 @@ # Notify Storage Task Board (Sprint 15) -> Archived 2025-10-26 — storage responsibilities now tracked in `src/StellaOps.Notifier` (Sprints 38–40). +> Archived 2025-10-26 — storage responsibilities now tracked in `src/Notifier/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Connectors.Email.Tests/EmailChannelHealthProviderTests.cs b/src/Notify/__Tests/StellaOps.Notify.Connectors.Email.Tests/EmailChannelHealthProviderTests.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Email.Tests/EmailChannelHealthProviderTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Connectors.Email.Tests/EmailChannelHealthProviderTests.cs index 984ea24a..d506ac00 100644 --- a/src/StellaOps.Notify.Connectors.Email.Tests/EmailChannelHealthProviderTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Connectors.Email.Tests/EmailChannelHealthProviderTests.cs @@ -1,100 +1,100 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; -using Xunit; - -namespace StellaOps.Notify.Connectors.Email.Tests; - -public sealed class EmailChannelHealthProviderTests -{ - private static readonly EmailChannelHealthProvider Provider = new(); - - [Fact] - public async Task CheckAsync_ReturnsHealthy() - { - var channel = CreateChannel(enabled: true, target: "ops@example.com"); - - var context = new ChannelHealthContext( - channel.TenantId, - channel, - channel.Config.Target!, - new DateTimeOffset(2025, 10, 20, 15, 0, 0, TimeSpan.Zero), - "trace-email-001"); - - var result = await Provider.CheckAsync(context, CancellationToken.None); - - Assert.Equal(ChannelHealthStatus.Healthy, result.Status); - Assert.Equal("true", result.Metadata["email.channel.enabled"]); - Assert.Equal("true", result.Metadata["email.validation.targetPresent"]); - Assert.Equal("ops@example.com", result.Metadata["email.target"]); - } - - [Fact] - public async Task CheckAsync_ReturnsDegradedWhenDisabled() - { - var channel = CreateChannel(enabled: false, target: "ops@example.com"); - - var context = new ChannelHealthContext( - channel.TenantId, - channel, - channel.Config.Target!, - DateTimeOffset.UtcNow, - "trace-email-002"); - - var result = await Provider.CheckAsync(context, CancellationToken.None); - - Assert.Equal(ChannelHealthStatus.Degraded, result.Status); - Assert.Equal("false", result.Metadata["email.channel.enabled"]); - } - - [Fact] - public async Task CheckAsync_ReturnsUnhealthyWhenTargetMissing() - { - var channel = NotifyChannel.Create( - channelId: "channel-email-ops", - tenantId: "tenant-sec", - name: "email:ops", - type: NotifyChannelType.Email, - config: NotifyChannelConfig.Create( - secretRef: "ref://notify/channels/email/ops", - target: null, - properties: new Dictionary<string, string> - { - ["smtpHost"] = "smtp.ops.example.com" - }), - enabled: true); - - var context = new ChannelHealthContext( - channel.TenantId, - channel, - channel.Name, - DateTimeOffset.UtcNow, - "trace-email-003"); - - var result = await Provider.CheckAsync(context, CancellationToken.None); - - Assert.Equal(ChannelHealthStatus.Unhealthy, result.Status); - Assert.Equal("false", result.Metadata["email.validation.targetPresent"]); - } - - private static NotifyChannel CreateChannel(bool enabled, string? target) - { - return NotifyChannel.Create( - channelId: "channel-email-ops", - tenantId: "tenant-sec", - name: "email:ops", - type: NotifyChannelType.Email, - config: NotifyChannelConfig.Create( - secretRef: "ref://notify/channels/email/ops", - target: target, - properties: new Dictionary<string, string> - { - ["smtpHost"] = "smtp.ops.example.com", - ["password"] = "super-secret" - }), - enabled: enabled); - } -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; +using Xunit; + +namespace StellaOps.Notify.Connectors.Email.Tests; + +public sealed class EmailChannelHealthProviderTests +{ + private static readonly EmailChannelHealthProvider Provider = new(); + + [Fact] + public async Task CheckAsync_ReturnsHealthy() + { + var channel = CreateChannel(enabled: true, target: "ops@example.com"); + + var context = new ChannelHealthContext( + channel.TenantId, + channel, + channel.Config.Target!, + new DateTimeOffset(2025, 10, 20, 15, 0, 0, TimeSpan.Zero), + "trace-email-001"); + + var result = await Provider.CheckAsync(context, CancellationToken.None); + + Assert.Equal(ChannelHealthStatus.Healthy, result.Status); + Assert.Equal("true", result.Metadata["email.channel.enabled"]); + Assert.Equal("true", result.Metadata["email.validation.targetPresent"]); + Assert.Equal("ops@example.com", result.Metadata["email.target"]); + } + + [Fact] + public async Task CheckAsync_ReturnsDegradedWhenDisabled() + { + var channel = CreateChannel(enabled: false, target: "ops@example.com"); + + var context = new ChannelHealthContext( + channel.TenantId, + channel, + channel.Config.Target!, + DateTimeOffset.UtcNow, + "trace-email-002"); + + var result = await Provider.CheckAsync(context, CancellationToken.None); + + Assert.Equal(ChannelHealthStatus.Degraded, result.Status); + Assert.Equal("false", result.Metadata["email.channel.enabled"]); + } + + [Fact] + public async Task CheckAsync_ReturnsUnhealthyWhenTargetMissing() + { + var channel = NotifyChannel.Create( + channelId: "channel-email-ops", + tenantId: "tenant-sec", + name: "email:ops", + type: NotifyChannelType.Email, + config: NotifyChannelConfig.Create( + secretRef: "ref://notify/channels/email/ops", + target: null, + properties: new Dictionary<string, string> + { + ["smtpHost"] = "smtp.ops.example.com" + }), + enabled: true); + + var context = new ChannelHealthContext( + channel.TenantId, + channel, + channel.Name, + DateTimeOffset.UtcNow, + "trace-email-003"); + + var result = await Provider.CheckAsync(context, CancellationToken.None); + + Assert.Equal(ChannelHealthStatus.Unhealthy, result.Status); + Assert.Equal("false", result.Metadata["email.validation.targetPresent"]); + } + + private static NotifyChannel CreateChannel(bool enabled, string? target) + { + return NotifyChannel.Create( + channelId: "channel-email-ops", + tenantId: "tenant-sec", + name: "email:ops", + type: NotifyChannelType.Email, + config: NotifyChannelConfig.Create( + secretRef: "ref://notify/channels/email/ops", + target: target, + properties: new Dictionary<string, string> + { + ["smtpHost"] = "smtp.ops.example.com", + ["password"] = "super-secret" + }), + enabled: enabled); + } +} diff --git a/src/StellaOps.Notify.Connectors.Email.Tests/StellaOps.Notify.Connectors.Email.Tests.csproj b/src/Notify/__Tests/StellaOps.Notify.Connectors.Email.Tests/StellaOps.Notify.Connectors.Email.Tests.csproj similarity index 60% rename from src/StellaOps.Notify.Connectors.Email.Tests/StellaOps.Notify.Connectors.Email.Tests.csproj rename to src/Notify/__Tests/StellaOps.Notify.Connectors.Email.Tests/StellaOps.Notify.Connectors.Email.Tests.csproj index b155d003..726be939 100644 --- a/src/StellaOps.Notify.Connectors.Email.Tests/StellaOps.Notify.Connectors.Email.Tests.csproj +++ b/src/Notify/__Tests/StellaOps.Notify.Connectors.Email.Tests/StellaOps.Notify.Connectors.Email.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -7,9 +8,9 @@ </PropertyGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Notify.Connectors.Email/StellaOps.Notify.Connectors.Email.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Connectors.Email/StellaOps.Notify.Connectors.Email.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> </ItemGroup> <ItemGroup> @@ -18,4 +19,4 @@ <PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" /> <PackageReference Include="coverlet.collector" Version="6.0.4" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelHealthProviderTests.cs b/src/Notify/__Tests/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelHealthProviderTests.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelHealthProviderTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelHealthProviderTests.cs index 8878b3ef..bd850748 100644 --- a/src/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelHealthProviderTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelHealthProviderTests.cs @@ -1,96 +1,96 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; -using Xunit; - -namespace StellaOps.Notify.Connectors.Slack.Tests; - -public sealed class SlackChannelHealthProviderTests -{ - private static readonly SlackChannelHealthProvider Provider = new(); - - [Fact] - public async Task CheckAsync_ReturnsHealthy() - { - var channel = CreateChannel(enabled: true, target: "#sec-ops"); - - var context = new ChannelHealthContext( - channel.TenantId, - channel, - channel.Config.Target!, - new DateTimeOffset(2025, 10, 20, 14, 0, 0, TimeSpan.Zero), - "trace-slack-001"); - - var result = await Provider.CheckAsync(context, CancellationToken.None); - - Assert.Equal(ChannelHealthStatus.Healthy, result.Status); - Assert.Equal("true", result.Metadata["slack.channel.enabled"]); - Assert.Equal("true", result.Metadata["slack.validation.targetPresent"]); - Assert.Equal("#sec-ops", result.Metadata["slack.channel"]); - Assert.Equal(ComputeSecretHash(channel.Config.SecretRef), result.Metadata["slack.secretRef.hash"]); - } - - [Fact] - public async Task CheckAsync_ReturnsDegradedWhenDisabled() - { - var channel = CreateChannel(enabled: false, target: "#sec-ops"); - - var context = new ChannelHealthContext( - channel.TenantId, - channel, - channel.Config.Target!, - DateTimeOffset.UtcNow, - "trace-slack-002"); - - var result = await Provider.CheckAsync(context, CancellationToken.None); - - Assert.Equal(ChannelHealthStatus.Degraded, result.Status); - Assert.Equal("false", result.Metadata["slack.channel.enabled"]); - } - - [Fact] - public async Task CheckAsync_ReturnsUnhealthyWhenTargetMissing() - { - var channel = CreateChannel(enabled: true, target: null); - - var context = new ChannelHealthContext( - channel.TenantId, - channel, - channel.Name, - DateTimeOffset.UtcNow, - "trace-slack-003"); - - var result = await Provider.CheckAsync(context, CancellationToken.None); - - Assert.Equal(ChannelHealthStatus.Unhealthy, result.Status); - Assert.Equal("false", result.Metadata["slack.validation.targetPresent"]); - } - - private static NotifyChannel CreateChannel(bool enabled, string? target) - { - return NotifyChannel.Create( - channelId: "channel-slack-sec-ops", - tenantId: "tenant-sec", - name: "slack:sec-ops", - type: NotifyChannelType.Slack, - config: NotifyChannelConfig.Create( - secretRef: "ref://notify/channels/slack/sec-ops", - target: target, - properties: new Dictionary<string, string> - { - ["workspace"] = "stellaops-sec", - ["botToken"] = "xoxb-123456789012-abcdefghijklmnop" - }), - enabled: enabled); - } - - private static string ComputeSecretHash(string secretRef) - { - var bytes = System.Text.Encoding.UTF8.GetBytes(secretRef.Trim()); - var hash = System.Security.Cryptography.SHA256.HashData(bytes); - return Convert.ToHexString(hash.AsSpan(0, 8)).ToLowerInvariant(); - } -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; +using Xunit; + +namespace StellaOps.Notify.Connectors.Slack.Tests; + +public sealed class SlackChannelHealthProviderTests +{ + private static readonly SlackChannelHealthProvider Provider = new(); + + [Fact] + public async Task CheckAsync_ReturnsHealthy() + { + var channel = CreateChannel(enabled: true, target: "#sec-ops"); + + var context = new ChannelHealthContext( + channel.TenantId, + channel, + channel.Config.Target!, + new DateTimeOffset(2025, 10, 20, 14, 0, 0, TimeSpan.Zero), + "trace-slack-001"); + + var result = await Provider.CheckAsync(context, CancellationToken.None); + + Assert.Equal(ChannelHealthStatus.Healthy, result.Status); + Assert.Equal("true", result.Metadata["slack.channel.enabled"]); + Assert.Equal("true", result.Metadata["slack.validation.targetPresent"]); + Assert.Equal("#sec-ops", result.Metadata["slack.channel"]); + Assert.Equal(ComputeSecretHash(channel.Config.SecretRef), result.Metadata["slack.secretRef.hash"]); + } + + [Fact] + public async Task CheckAsync_ReturnsDegradedWhenDisabled() + { + var channel = CreateChannel(enabled: false, target: "#sec-ops"); + + var context = new ChannelHealthContext( + channel.TenantId, + channel, + channel.Config.Target!, + DateTimeOffset.UtcNow, + "trace-slack-002"); + + var result = await Provider.CheckAsync(context, CancellationToken.None); + + Assert.Equal(ChannelHealthStatus.Degraded, result.Status); + Assert.Equal("false", result.Metadata["slack.channel.enabled"]); + } + + [Fact] + public async Task CheckAsync_ReturnsUnhealthyWhenTargetMissing() + { + var channel = CreateChannel(enabled: true, target: null); + + var context = new ChannelHealthContext( + channel.TenantId, + channel, + channel.Name, + DateTimeOffset.UtcNow, + "trace-slack-003"); + + var result = await Provider.CheckAsync(context, CancellationToken.None); + + Assert.Equal(ChannelHealthStatus.Unhealthy, result.Status); + Assert.Equal("false", result.Metadata["slack.validation.targetPresent"]); + } + + private static NotifyChannel CreateChannel(bool enabled, string? target) + { + return NotifyChannel.Create( + channelId: "channel-slack-sec-ops", + tenantId: "tenant-sec", + name: "slack:sec-ops", + type: NotifyChannelType.Slack, + config: NotifyChannelConfig.Create( + secretRef: "ref://notify/channels/slack/sec-ops", + target: target, + properties: new Dictionary<string, string> + { + ["workspace"] = "stellaops-sec", + ["botToken"] = "xoxb-123456789012-abcdefghijklmnop" + }), + enabled: enabled); + } + + private static string ComputeSecretHash(string secretRef) + { + var bytes = System.Text.Encoding.UTF8.GetBytes(secretRef.Trim()); + var hash = System.Security.Cryptography.SHA256.HashData(bytes); + return Convert.ToHexString(hash.AsSpan(0, 8)).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelTestProviderTests.cs b/src/Notify/__Tests/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelTestProviderTests.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelTestProviderTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelTestProviderTests.cs index 5b4265a5..f605a06b 100644 --- a/src/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelTestProviderTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Connectors.Slack.Tests/SlackChannelTestProviderTests.cs @@ -1,113 +1,113 @@ -using System; -using System.Collections.Generic; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; -using Xunit; - -namespace StellaOps.Notify.Connectors.Slack.Tests; - -public sealed class SlackChannelTestProviderTests -{ - private static readonly ChannelTestPreviewRequest EmptyRequest = new( - TargetOverride: null, - TemplateId: null, - Title: null, - Summary: null, - Body: null, - TextBody: null, - Locale: null, - Metadata: new Dictionary<string, string>(), - Attachments: new List<string>()); - - [Fact] - public async Task BuildPreviewAsync_ProducesDeterministicMetadata() - { - var provider = new SlackChannelTestProvider(); - var channel = CreateChannel(properties: new Dictionary<string, string> - { - ["workspace"] = "stellaops-sec", - ["botToken"] = "xoxb-123456789012-abcdefghijklmnop" - }); - - var context = new ChannelTestPreviewContext( - channel.TenantId, - channel, - channel.Config.Target!, - EmptyRequest, - Timestamp: new DateTimeOffset(2025, 10, 20, 12, 00, 00, TimeSpan.Zero), - TraceId: "trace-001"); - - var result = await provider.BuildPreviewAsync(context, CancellationToken.None); - - Assert.Equal("slack", result.Preview.ChannelType.ToString().ToLowerInvariant()); - Assert.Equal(channel.Config.Target, result.Preview.Target); - Assert.Equal("chat:write,chat:write.public", result.Metadata["slack.scopes.required"]); - Assert.Equal("stellaops-sec", result.Metadata["slack.config.workspace"]); - - var redactedToken = result.Metadata["slack.config.botToken"]; - Assert.DoesNotContain("abcdefghijklmnop", redactedToken); - Assert.StartsWith("xoxb-", redactedToken); - Assert.EndsWith("mnop", redactedToken); - - using var parsed = JsonDocument.Parse(result.Preview.Body); - var contextText = parsed.RootElement - .GetProperty("blocks")[1] - .GetProperty("elements")[0] - .GetProperty("text") - .GetString(); - Assert.NotNull(contextText); - Assert.Contains("trace-001", contextText); - - Assert.Equal(ComputeSecretHash(channel.Config.SecretRef), result.Metadata["slack.secretRef.hash"]); - } - - [Fact] - public async Task BuildPreviewAsync_RedactsSensitiveProperties() - { - var provider = new SlackChannelTestProvider(); - var channel = CreateChannel(properties: new Dictionary<string, string> - { - ["SigningSecret"] = "whsec_super-secret-value", - ["apiToken"] = "xoxs-000000000000-super", - ["endpoint"] = "https://hooks.slack.com/services/T000/B000/AAA" - }); - - var context = new ChannelTestPreviewContext( - channel.TenantId, - channel, - channel.Config.Target!, - EmptyRequest, - Timestamp: DateTimeOffset.UtcNow, - TraceId: "trace-002"); - - var result = await provider.BuildPreviewAsync(context, CancellationToken.None); - - Assert.Equal("***", result.Metadata["slack.config.SigningSecret"]); - Assert.DoesNotContain("xoxs-000000000000-super", result.Metadata["slack.config.apiToken"]); - Assert.Equal("https://hooks.slack.com/services/T000/B000/AAA", result.Metadata["slack.config.endpoint"]); - } - - private static NotifyChannel CreateChannel(IDictionary<string, string> properties) - { - return NotifyChannel.Create( - channelId: "channel-slack-sec-ops", - tenantId: "tenant-sec", - name: "slack:sec-ops", - type: NotifyChannelType.Slack, - config: NotifyChannelConfig.Create( - secretRef: "ref://notify/channels/slack/sec-ops", - target: "#sec-ops", - properties: properties)); - } - - private static string ComputeSecretHash(string secretRef) - { - using var sha = System.Security.Cryptography.SHA256.Create(); - var bytes = System.Text.Encoding.UTF8.GetBytes(secretRef.Trim()); - var hash = sha.ComputeHash(bytes); - return System.Convert.ToHexString(hash, 0, 8).ToLowerInvariant(); - } -} +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; +using Xunit; + +namespace StellaOps.Notify.Connectors.Slack.Tests; + +public sealed class SlackChannelTestProviderTests +{ + private static readonly ChannelTestPreviewRequest EmptyRequest = new( + TargetOverride: null, + TemplateId: null, + Title: null, + Summary: null, + Body: null, + TextBody: null, + Locale: null, + Metadata: new Dictionary<string, string>(), + Attachments: new List<string>()); + + [Fact] + public async Task BuildPreviewAsync_ProducesDeterministicMetadata() + { + var provider = new SlackChannelTestProvider(); + var channel = CreateChannel(properties: new Dictionary<string, string> + { + ["workspace"] = "stellaops-sec", + ["botToken"] = "xoxb-123456789012-abcdefghijklmnop" + }); + + var context = new ChannelTestPreviewContext( + channel.TenantId, + channel, + channel.Config.Target!, + EmptyRequest, + Timestamp: new DateTimeOffset(2025, 10, 20, 12, 00, 00, TimeSpan.Zero), + TraceId: "trace-001"); + + var result = await provider.BuildPreviewAsync(context, CancellationToken.None); + + Assert.Equal("slack", result.Preview.ChannelType.ToString().ToLowerInvariant()); + Assert.Equal(channel.Config.Target, result.Preview.Target); + Assert.Equal("chat:write,chat:write.public", result.Metadata["slack.scopes.required"]); + Assert.Equal("stellaops-sec", result.Metadata["slack.config.workspace"]); + + var redactedToken = result.Metadata["slack.config.botToken"]; + Assert.DoesNotContain("abcdefghijklmnop", redactedToken); + Assert.StartsWith("xoxb-", redactedToken); + Assert.EndsWith("mnop", redactedToken); + + using var parsed = JsonDocument.Parse(result.Preview.Body); + var contextText = parsed.RootElement + .GetProperty("blocks")[1] + .GetProperty("elements")[0] + .GetProperty("text") + .GetString(); + Assert.NotNull(contextText); + Assert.Contains("trace-001", contextText); + + Assert.Equal(ComputeSecretHash(channel.Config.SecretRef), result.Metadata["slack.secretRef.hash"]); + } + + [Fact] + public async Task BuildPreviewAsync_RedactsSensitiveProperties() + { + var provider = new SlackChannelTestProvider(); + var channel = CreateChannel(properties: new Dictionary<string, string> + { + ["SigningSecret"] = "whsec_super-secret-value", + ["apiToken"] = "xoxs-000000000000-super", + ["endpoint"] = "https://hooks.slack.com/services/T000/B000/AAA" + }); + + var context = new ChannelTestPreviewContext( + channel.TenantId, + channel, + channel.Config.Target!, + EmptyRequest, + Timestamp: DateTimeOffset.UtcNow, + TraceId: "trace-002"); + + var result = await provider.BuildPreviewAsync(context, CancellationToken.None); + + Assert.Equal("***", result.Metadata["slack.config.SigningSecret"]); + Assert.DoesNotContain("xoxs-000000000000-super", result.Metadata["slack.config.apiToken"]); + Assert.Equal("https://hooks.slack.com/services/T000/B000/AAA", result.Metadata["slack.config.endpoint"]); + } + + private static NotifyChannel CreateChannel(IDictionary<string, string> properties) + { + return NotifyChannel.Create( + channelId: "channel-slack-sec-ops", + tenantId: "tenant-sec", + name: "slack:sec-ops", + type: NotifyChannelType.Slack, + config: NotifyChannelConfig.Create( + secretRef: "ref://notify/channels/slack/sec-ops", + target: "#sec-ops", + properties: properties)); + } + + private static string ComputeSecretHash(string secretRef) + { + using var sha = System.Security.Cryptography.SHA256.Create(); + var bytes = System.Text.Encoding.UTF8.GetBytes(secretRef.Trim()); + var hash = sha.ComputeHash(bytes); + return System.Convert.ToHexString(hash, 0, 8).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Notify.Connectors.Slack.Tests/StellaOps.Notify.Connectors.Slack.Tests.csproj b/src/Notify/__Tests/StellaOps.Notify.Connectors.Slack.Tests/StellaOps.Notify.Connectors.Slack.Tests.csproj similarity index 60% rename from src/StellaOps.Notify.Connectors.Slack.Tests/StellaOps.Notify.Connectors.Slack.Tests.csproj rename to src/Notify/__Tests/StellaOps.Notify.Connectors.Slack.Tests/StellaOps.Notify.Connectors.Slack.Tests.csproj index 6288a185..06ed6f3d 100644 --- a/src/StellaOps.Notify.Connectors.Slack.Tests/StellaOps.Notify.Connectors.Slack.Tests.csproj +++ b/src/Notify/__Tests/StellaOps.Notify.Connectors.Slack.Tests/StellaOps.Notify.Connectors.Slack.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -7,9 +8,9 @@ </PropertyGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Notify.Connectors.Slack/StellaOps.Notify.Connectors.Slack.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Connectors.Slack/StellaOps.Notify.Connectors.Slack.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> </ItemGroup> <ItemGroup> @@ -18,4 +19,4 @@ <PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" /> <PackageReference Include="coverlet.collector" Version="6.0.4" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notify.Connectors.Teams.Tests/StellaOps.Notify.Connectors.Teams.Tests.csproj b/src/Notify/__Tests/StellaOps.Notify.Connectors.Teams.Tests/StellaOps.Notify.Connectors.Teams.Tests.csproj similarity index 60% rename from src/StellaOps.Notify.Connectors.Teams.Tests/StellaOps.Notify.Connectors.Teams.Tests.csproj rename to src/Notify/__Tests/StellaOps.Notify.Connectors.Teams.Tests/StellaOps.Notify.Connectors.Teams.Tests.csproj index 69915aeb..a2c7f002 100644 --- a/src/StellaOps.Notify.Connectors.Teams.Tests/StellaOps.Notify.Connectors.Teams.Tests.csproj +++ b/src/Notify/__Tests/StellaOps.Notify.Connectors.Teams.Tests/StellaOps.Notify.Connectors.Teams.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -7,9 +8,9 @@ </PropertyGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Notify.Connectors.Teams/StellaOps.Notify.Connectors.Teams.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Connectors.Teams/StellaOps.Notify.Connectors.Teams.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Engine/StellaOps.Notify.Engine.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> </ItemGroup> <ItemGroup> @@ -18,4 +19,4 @@ <PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" /> <PackageReference Include="coverlet.collector" Version="6.0.4" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelHealthProviderTests.cs b/src/Notify/__Tests/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelHealthProviderTests.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelHealthProviderTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelHealthProviderTests.cs index dd9bd279..f267b89d 100644 --- a/src/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelHealthProviderTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelHealthProviderTests.cs @@ -1,98 +1,98 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; -using Xunit; - -namespace StellaOps.Notify.Connectors.Teams.Tests; - -public sealed class TeamsChannelHealthProviderTests -{ - private static readonly TeamsChannelHealthProvider Provider = new(); - - [Fact] - public async Task CheckAsync_ReturnsHealthyWithMetadata() - { - var channel = CreateChannel(enabled: true, endpoint: "https://contoso.webhook.office.com/webhook"); - - var context = new ChannelHealthContext( - channel.TenantId, - channel, - channel.Config.Endpoint!, - new DateTimeOffset(2025, 10, 20, 12, 0, 0, TimeSpan.Zero), - "trace-health-001"); - - var result = await Provider.CheckAsync(context, CancellationToken.None); - - Assert.Equal(ChannelHealthStatus.Healthy, result.Status); - Assert.Equal("Teams channel configuration validated.", result.Message); - Assert.Equal("true", result.Metadata["teams.channel.enabled"]); - Assert.Equal("true", result.Metadata["teams.validation.targetPresent"]); - Assert.Equal(channel.Config.Endpoint, result.Metadata["teams.webhook"]); - Assert.Equal(ComputeSecretHash(channel.Config.SecretRef), result.Metadata["teams.secretRef.hash"]); - } - - [Fact] - public async Task CheckAsync_ReturnsDegradedWhenDisabled() - { - var channel = CreateChannel(enabled: false, endpoint: "https://contoso.webhook.office.com/webhook"); - - var context = new ChannelHealthContext( - channel.TenantId, - channel, - channel.Config.Endpoint!, - DateTimeOffset.UtcNow, - "trace-health-002"); - - var result = await Provider.CheckAsync(context, CancellationToken.None); - - Assert.Equal(ChannelHealthStatus.Degraded, result.Status); - Assert.Equal("false", result.Metadata["teams.channel.enabled"]); - } - - [Fact] - public async Task CheckAsync_ReturnsUnhealthyWhenTargetMissing() - { - var channel = CreateChannel(enabled: true, endpoint: null); - - var context = new ChannelHealthContext( - channel.TenantId, - channel, - channel.Name, - DateTimeOffset.UtcNow, - "trace-health-003"); - - var result = await Provider.CheckAsync(context, CancellationToken.None); - - Assert.Equal(ChannelHealthStatus.Unhealthy, result.Status); - Assert.Equal("false", result.Metadata["teams.validation.targetPresent"]); - } - - private static NotifyChannel CreateChannel(bool enabled, string? endpoint) - { - return NotifyChannel.Create( - channelId: "channel-teams-sec-ops", - tenantId: "tenant-sec", - name: "teams:sec-ops", - type: NotifyChannelType.Teams, - config: NotifyChannelConfig.Create( - secretRef: "ref://notify/channels/teams/sec-ops", - target: null, - endpoint: endpoint, - properties: new Dictionary<string, string> - { - ["tenant"] = "contoso.onmicrosoft.com", - ["webhookKey"] = "abcdef0123456789" - }), - enabled: enabled); - } - - private static string ComputeSecretHash(string secretRef) - { - var bytes = System.Text.Encoding.UTF8.GetBytes(secretRef.Trim()); - var hash = System.Security.Cryptography.SHA256.HashData(bytes); - return Convert.ToHexString(hash.AsSpan(0, 8)).ToLowerInvariant(); - } -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; +using Xunit; + +namespace StellaOps.Notify.Connectors.Teams.Tests; + +public sealed class TeamsChannelHealthProviderTests +{ + private static readonly TeamsChannelHealthProvider Provider = new(); + + [Fact] + public async Task CheckAsync_ReturnsHealthyWithMetadata() + { + var channel = CreateChannel(enabled: true, endpoint: "https://contoso.webhook.office.com/webhook"); + + var context = new ChannelHealthContext( + channel.TenantId, + channel, + channel.Config.Endpoint!, + new DateTimeOffset(2025, 10, 20, 12, 0, 0, TimeSpan.Zero), + "trace-health-001"); + + var result = await Provider.CheckAsync(context, CancellationToken.None); + + Assert.Equal(ChannelHealthStatus.Healthy, result.Status); + Assert.Equal("Teams channel configuration validated.", result.Message); + Assert.Equal("true", result.Metadata["teams.channel.enabled"]); + Assert.Equal("true", result.Metadata["teams.validation.targetPresent"]); + Assert.Equal(channel.Config.Endpoint, result.Metadata["teams.webhook"]); + Assert.Equal(ComputeSecretHash(channel.Config.SecretRef), result.Metadata["teams.secretRef.hash"]); + } + + [Fact] + public async Task CheckAsync_ReturnsDegradedWhenDisabled() + { + var channel = CreateChannel(enabled: false, endpoint: "https://contoso.webhook.office.com/webhook"); + + var context = new ChannelHealthContext( + channel.TenantId, + channel, + channel.Config.Endpoint!, + DateTimeOffset.UtcNow, + "trace-health-002"); + + var result = await Provider.CheckAsync(context, CancellationToken.None); + + Assert.Equal(ChannelHealthStatus.Degraded, result.Status); + Assert.Equal("false", result.Metadata["teams.channel.enabled"]); + } + + [Fact] + public async Task CheckAsync_ReturnsUnhealthyWhenTargetMissing() + { + var channel = CreateChannel(enabled: true, endpoint: null); + + var context = new ChannelHealthContext( + channel.TenantId, + channel, + channel.Name, + DateTimeOffset.UtcNow, + "trace-health-003"); + + var result = await Provider.CheckAsync(context, CancellationToken.None); + + Assert.Equal(ChannelHealthStatus.Unhealthy, result.Status); + Assert.Equal("false", result.Metadata["teams.validation.targetPresent"]); + } + + private static NotifyChannel CreateChannel(bool enabled, string? endpoint) + { + return NotifyChannel.Create( + channelId: "channel-teams-sec-ops", + tenantId: "tenant-sec", + name: "teams:sec-ops", + type: NotifyChannelType.Teams, + config: NotifyChannelConfig.Create( + secretRef: "ref://notify/channels/teams/sec-ops", + target: null, + endpoint: endpoint, + properties: new Dictionary<string, string> + { + ["tenant"] = "contoso.onmicrosoft.com", + ["webhookKey"] = "abcdef0123456789" + }), + enabled: enabled); + } + + private static string ComputeSecretHash(string secretRef) + { + var bytes = System.Text.Encoding.UTF8.GetBytes(secretRef.Trim()); + var hash = System.Security.Cryptography.SHA256.HashData(bytes); + return Convert.ToHexString(hash.AsSpan(0, 8)).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelTestProviderTests.cs b/src/Notify/__Tests/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelTestProviderTests.cs similarity index 97% rename from src/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelTestProviderTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelTestProviderTests.cs index d888634e..cf336e1e 100644 --- a/src/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelTestProviderTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Connectors.Teams.Tests/TeamsChannelTestProviderTests.cs @@ -1,135 +1,135 @@ -using System; -using System.Collections.Generic; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Notify.Engine; -using StellaOps.Notify.Models; -using Xunit; - -namespace StellaOps.Notify.Connectors.Teams.Tests; - -public sealed class TeamsChannelTestProviderTests -{ - [Fact] - public async Task BuildPreviewAsync_EmitsFallbackMetadata() - { - var provider = new TeamsChannelTestProvider(); - var channel = CreateChannel( - endpoint: "https://contoso.webhook.office.com/webhookb2/tenant@uuid/IncomingWebhook/abcdef0123456789", - properties: new Dictionary<string, string> - { - ["team"] = "secops", - ["webhookKey"] = "s3cr3t-value-with-key-fragment", - ["tenant"] = "contoso.onmicrosoft.com" - }); - - var request = new ChannelTestPreviewRequest( - TargetOverride: null, - TemplateId: null, - Title: "Notify Critical Finding", - Summary: "Critical container vulnerability detected.", - Body: "CVSS 9.8 vulnerability detected in ubuntu:22.04 base layer.", - TextBody: null, - Locale: "en-US", - Metadata: new Dictionary<string, string>(), - Attachments: new List<string>()); - - var context = new ChannelTestPreviewContext( - channel.TenantId, - channel, - channel.Config.Endpoint!, - request, - new DateTimeOffset(2025, 10, 20, 10, 0, 0, TimeSpan.Zero), - TraceId: "trace-teams-001"); - - var result = await provider.BuildPreviewAsync(context, CancellationToken.None); - - Assert.Equal(NotifyChannelType.Teams, result.Preview.ChannelType); - Assert.Equal(channel.Config.Endpoint, result.Preview.Target); - Assert.Equal("Critical container vulnerability detected.", result.Preview.Summary); - - Assert.NotNull(result.Metadata); - Assert.Equal(channel.Config.Endpoint, result.Metadata["teams.webhook"]); - Assert.Equal("1.5", result.Metadata["teams.card.version"]); - - var fallback = result.Metadata["teams.fallbackText"]; - Assert.Equal(result.Preview.TextBody, fallback); - Assert.Equal("Critical container vulnerability detected.", fallback); - - Assert.Equal(ComputeSecretHash(channel.Config.SecretRef), result.Metadata["teams.secretRef.hash"]); - Assert.Equal("***", result.Metadata["teams.config.webhookKey"]); - Assert.Equal("contoso.onmicrosoft.com", result.Metadata["teams.config.tenant"]); - Assert.Equal(channel.Config.Endpoint, result.Metadata["teams.config.endpoint"]); - - using var payload = JsonDocument.Parse(result.Preview.Body); - Assert.Equal("message", payload.RootElement.GetProperty("type").GetString()); - Assert.Equal(result.Preview.TextBody, payload.RootElement.GetProperty("text").GetString()); - Assert.Equal(result.Preview.Summary, payload.RootElement.GetProperty("summary").GetString()); - - var attachments = payload.RootElement.GetProperty("attachments"); - Assert.True(attachments.GetArrayLength() > 0); - Assert.Equal( - "AdaptiveCard", - attachments[0].GetProperty("content").GetProperty("type").GetString()); - } - - [Fact] - public async Task BuildPreviewAsync_TruncatesLongFallback() - { - var provider = new TeamsChannelTestProvider(); - var channel = CreateChannel( - endpoint: "https://contoso.webhook.office.com/webhookb2/tenant@uuid/IncomingWebhook/abcdef0123456789", - properties: new Dictionary<string, string>()); - - var longText = new string('A', 600); - - var request = new ChannelTestPreviewRequest( - TargetOverride: null, - TemplateId: null, - Title: null, - Summary: null, - Body: null, - TextBody: longText, - Locale: null, - Metadata: new Dictionary<string, string>(), - Attachments: new List<string>()); - - var context = new ChannelTestPreviewContext( - channel.TenantId, - channel, - channel.Config.Endpoint!, - request, - DateTimeOffset.UtcNow, - TraceId: "trace-teams-002"); - - var result = await provider.BuildPreviewAsync(context, CancellationToken.None); - - var metadata = Assert.IsAssignableFrom<IReadOnlyDictionary<string, string>>(result.Metadata); - var fallback = Assert.IsType<string>(result.Preview.TextBody); - Assert.Equal(512, fallback.Length); - Assert.Equal(fallback, metadata["teams.fallbackText"]); - Assert.StartsWith(new string('A', 512), fallback); - } - - private static NotifyChannel CreateChannel(string endpoint, IDictionary<string, string> properties) - { - return NotifyChannel.Create( - channelId: "channel-teams-sec-ops", - tenantId: "tenant-sec", - name: "teams:sec-ops", - type: NotifyChannelType.Teams, - config: NotifyChannelConfig.Create( - secretRef: "ref://notify/channels/teams/sec-ops", - target: null, - endpoint: endpoint, - properties: properties)); - } - - private static string ComputeSecretHash(string secretRef) - { - var bytes = System.Text.Encoding.UTF8.GetBytes(secretRef.Trim()); - var hash = System.Security.Cryptography.SHA256.HashData(bytes); - return Convert.ToHexString(hash.AsSpan(0, 8)).ToLowerInvariant(); - } -} +using System; +using System.Collections.Generic; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Notify.Engine; +using StellaOps.Notify.Models; +using Xunit; + +namespace StellaOps.Notify.Connectors.Teams.Tests; + +public sealed class TeamsChannelTestProviderTests +{ + [Fact] + public async Task BuildPreviewAsync_EmitsFallbackMetadata() + { + var provider = new TeamsChannelTestProvider(); + var channel = CreateChannel( + endpoint: "https://contoso.webhook.office.com/webhookb2/tenant@uuid/IncomingWebhook/abcdef0123456789", + properties: new Dictionary<string, string> + { + ["team"] = "secops", + ["webhookKey"] = "s3cr3t-value-with-key-fragment", + ["tenant"] = "contoso.onmicrosoft.com" + }); + + var request = new ChannelTestPreviewRequest( + TargetOverride: null, + TemplateId: null, + Title: "Notify Critical Finding", + Summary: "Critical container vulnerability detected.", + Body: "CVSS 9.8 vulnerability detected in ubuntu:22.04 base layer.", + TextBody: null, + Locale: "en-US", + Metadata: new Dictionary<string, string>(), + Attachments: new List<string>()); + + var context = new ChannelTestPreviewContext( + channel.TenantId, + channel, + channel.Config.Endpoint!, + request, + new DateTimeOffset(2025, 10, 20, 10, 0, 0, TimeSpan.Zero), + TraceId: "trace-teams-001"); + + var result = await provider.BuildPreviewAsync(context, CancellationToken.None); + + Assert.Equal(NotifyChannelType.Teams, result.Preview.ChannelType); + Assert.Equal(channel.Config.Endpoint, result.Preview.Target); + Assert.Equal("Critical container vulnerability detected.", result.Preview.Summary); + + Assert.NotNull(result.Metadata); + Assert.Equal(channel.Config.Endpoint, result.Metadata["teams.webhook"]); + Assert.Equal("1.5", result.Metadata["teams.card.version"]); + + var fallback = result.Metadata["teams.fallbackText"]; + Assert.Equal(result.Preview.TextBody, fallback); + Assert.Equal("Critical container vulnerability detected.", fallback); + + Assert.Equal(ComputeSecretHash(channel.Config.SecretRef), result.Metadata["teams.secretRef.hash"]); + Assert.Equal("***", result.Metadata["teams.config.webhookKey"]); + Assert.Equal("contoso.onmicrosoft.com", result.Metadata["teams.config.tenant"]); + Assert.Equal(channel.Config.Endpoint, result.Metadata["teams.config.endpoint"]); + + using var payload = JsonDocument.Parse(result.Preview.Body); + Assert.Equal("message", payload.RootElement.GetProperty("type").GetString()); + Assert.Equal(result.Preview.TextBody, payload.RootElement.GetProperty("text").GetString()); + Assert.Equal(result.Preview.Summary, payload.RootElement.GetProperty("summary").GetString()); + + var attachments = payload.RootElement.GetProperty("attachments"); + Assert.True(attachments.GetArrayLength() > 0); + Assert.Equal( + "AdaptiveCard", + attachments[0].GetProperty("content").GetProperty("type").GetString()); + } + + [Fact] + public async Task BuildPreviewAsync_TruncatesLongFallback() + { + var provider = new TeamsChannelTestProvider(); + var channel = CreateChannel( + endpoint: "https://contoso.webhook.office.com/webhookb2/tenant@uuid/IncomingWebhook/abcdef0123456789", + properties: new Dictionary<string, string>()); + + var longText = new string('A', 600); + + var request = new ChannelTestPreviewRequest( + TargetOverride: null, + TemplateId: null, + Title: null, + Summary: null, + Body: null, + TextBody: longText, + Locale: null, + Metadata: new Dictionary<string, string>(), + Attachments: new List<string>()); + + var context = new ChannelTestPreviewContext( + channel.TenantId, + channel, + channel.Config.Endpoint!, + request, + DateTimeOffset.UtcNow, + TraceId: "trace-teams-002"); + + var result = await provider.BuildPreviewAsync(context, CancellationToken.None); + + var metadata = Assert.IsAssignableFrom<IReadOnlyDictionary<string, string>>(result.Metadata); + var fallback = Assert.IsType<string>(result.Preview.TextBody); + Assert.Equal(512, fallback.Length); + Assert.Equal(fallback, metadata["teams.fallbackText"]); + Assert.StartsWith(new string('A', 512), fallback); + } + + private static NotifyChannel CreateChannel(string endpoint, IDictionary<string, string> properties) + { + return NotifyChannel.Create( + channelId: "channel-teams-sec-ops", + tenantId: "tenant-sec", + name: "teams:sec-ops", + type: NotifyChannelType.Teams, + config: NotifyChannelConfig.Create( + secretRef: "ref://notify/channels/teams/sec-ops", + target: null, + endpoint: endpoint, + properties: properties)); + } + + private static string ComputeSecretHash(string secretRef) + { + var bytes = System.Text.Encoding.UTF8.GetBytes(secretRef.Trim()); + var hash = System.Security.Cryptography.SHA256.HashData(bytes); + return Convert.ToHexString(hash.AsSpan(0, 8)).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Notify.Models.Tests/DocSampleTests.cs b/src/Notify/__Tests/StellaOps.Notify.Models.Tests/DocSampleTests.cs similarity index 100% rename from src/StellaOps.Notify.Models.Tests/DocSampleTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Models.Tests/DocSampleTests.cs diff --git a/src/StellaOps.Notify.Models.Tests/NotifyCanonicalJsonSerializerTests.cs b/src/Notify/__Tests/StellaOps.Notify.Models.Tests/NotifyCanonicalJsonSerializerTests.cs similarity index 100% rename from src/StellaOps.Notify.Models.Tests/NotifyCanonicalJsonSerializerTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Models.Tests/NotifyCanonicalJsonSerializerTests.cs diff --git a/src/StellaOps.Notify.Models.Tests/NotifyDeliveryTests.cs b/src/Notify/__Tests/StellaOps.Notify.Models.Tests/NotifyDeliveryTests.cs similarity index 100% rename from src/StellaOps.Notify.Models.Tests/NotifyDeliveryTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Models.Tests/NotifyDeliveryTests.cs diff --git a/src/StellaOps.Notify.Models.Tests/NotifyRuleTests.cs b/src/Notify/__Tests/StellaOps.Notify.Models.Tests/NotifyRuleTests.cs similarity index 100% rename from src/StellaOps.Notify.Models.Tests/NotifyRuleTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Models.Tests/NotifyRuleTests.cs diff --git a/src/StellaOps.Notify.Models.Tests/NotifySchemaMigrationTests.cs b/src/Notify/__Tests/StellaOps.Notify.Models.Tests/NotifySchemaMigrationTests.cs similarity index 100% rename from src/StellaOps.Notify.Models.Tests/NotifySchemaMigrationTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Models.Tests/NotifySchemaMigrationTests.cs diff --git a/src/StellaOps.Notify.Models.Tests/PlatformEventSamplesTests.cs b/src/Notify/__Tests/StellaOps.Notify.Models.Tests/PlatformEventSamplesTests.cs similarity index 100% rename from src/StellaOps.Notify.Models.Tests/PlatformEventSamplesTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Models.Tests/PlatformEventSamplesTests.cs diff --git a/src/StellaOps.Notify.Models.Tests/PlatformEventSchemaValidationTests.cs b/src/Notify/__Tests/StellaOps.Notify.Models.Tests/PlatformEventSchemaValidationTests.cs similarity index 100% rename from src/StellaOps.Notify.Models.Tests/PlatformEventSchemaValidationTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Models.Tests/PlatformEventSchemaValidationTests.cs diff --git a/src/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj b/src/Notify/__Tests/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj similarity index 81% rename from src/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj rename to src/Notify/__Tests/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj index 25ef5cb4..a045323d 100644 --- a/src/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj +++ b/src/Notify/__Tests/StellaOps.Notify.Models.Tests/StellaOps.Notify.Models.Tests.csproj @@ -1,12 +1,13 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> - <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> </ItemGroup> <ItemGroup> @@ -21,4 +22,4 @@ <CopyToOutputDirectory>Always</CopyToOutputDirectory> </None> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notify.Queue.Tests/NatsNotifyDeliveryQueueTests.cs b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/NatsNotifyDeliveryQueueTests.cs similarity index 97% rename from src/StellaOps.Notify.Queue.Tests/NatsNotifyDeliveryQueueTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Queue.Tests/NatsNotifyDeliveryQueueTests.cs index 376c61de..b434ed27 100644 --- a/src/StellaOps.Notify.Queue.Tests/NatsNotifyDeliveryQueueTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/NatsNotifyDeliveryQueueTests.cs @@ -1,223 +1,223 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text.Json.Nodes; -using System.Threading.Tasks; -using DotNet.Testcontainers.Builders; -using DotNet.Testcontainers.Containers; -using DotNet.Testcontainers.Configurations; -using FluentAssertions; -using Microsoft.Extensions.Logging.Abstractions; -using NATS.Client.Core; -using NATS.Client.JetStream; -using NATS.Client.JetStream.Models; -using StellaOps.Notify.Models; -using StellaOps.Notify.Queue; -using StellaOps.Notify.Queue.Nats; -using Xunit; - -namespace StellaOps.Notify.Queue.Tests; - -public sealed class NatsNotifyDeliveryQueueTests : IAsyncLifetime -{ - private readonly TestcontainersContainer _nats; - private string? _skipReason; - - public NatsNotifyDeliveryQueueTests() - { - _nats = new TestcontainersBuilder<TestcontainersContainer>() - .WithImage("nats:2.10-alpine") - .WithCleanUp(true) - .WithName($"nats-notify-delivery-{Guid.NewGuid():N}") - .WithPortBinding(4222, true) - .WithCommand("--jetstream") - .WithWaitStrategy(Wait.ForUnixContainer().UntilPortIsAvailable(4222)) - .Build(); - } - - public async Task InitializeAsync() - { - try - { - await _nats.StartAsync(); - } - catch (Exception ex) - { - _skipReason = $"NATS-backed delivery tests skipped: {ex.Message}"; - } - } - - public async Task DisposeAsync() - { - if (_skipReason is not null) - { - return; - } - - await _nats.DisposeAsync().ConfigureAwait(false); - } - - [Fact] - public async Task Publish_ShouldDeduplicate_ByDeliveryId() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var delivery = TestData.CreateDelivery("tenant-a"); - var message = new NotifyDeliveryQueueMessage( - delivery, - channelId: "chan-a", - channelType: NotifyChannelType.Slack); - - var first = await queue.PublishAsync(message); - first.Deduplicated.Should().BeFalse(); - - var second = await queue.PublishAsync(message); - second.Deduplicated.Should().BeTrue(); - second.MessageId.Should().Be(first.MessageId); - } - - [Fact] - public async Task Release_Retry_ShouldReschedule() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - await queue.PublishAsync(new NotifyDeliveryQueueMessage( - TestData.CreateDelivery(), - channelId: "chan-retry", - channelType: NotifyChannelType.Teams)); - - var lease = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-retry", 1, TimeSpan.FromSeconds(2)))).Single(); - - await lease.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); - - var retried = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-retry", 1, TimeSpan.FromSeconds(2)))).Single(); - retried.Attempt.Should().BeGreaterThan(lease.Attempt); - - await retried.AcknowledgeAsync(); - } - - [Fact] - public async Task Release_RetryBeyondMax_ShouldDeadLetter() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(static opts => - { - opts.MaxDeliveryAttempts = 2; - opts.Nats.DeadLetterStream = "NOTIFY_DELIVERY_DEAD_TEST"; - opts.Nats.DeadLetterSubject = "notify.delivery.dead.test"; - }); - - await using var queue = CreateQueue(options); - - await queue.PublishAsync(new NotifyDeliveryQueueMessage( - TestData.CreateDelivery(), - channelId: "chan-dead", - channelType: NotifyChannelType.Webhook)); - - var lease = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-dead", 1, TimeSpan.FromSeconds(2)))).Single(); - await lease.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); - - var second = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-dead", 1, TimeSpan.FromSeconds(2)))).Single(); - await second.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); - - await Task.Delay(200); - - await using var connection = new NatsConnection(new NatsOpts { Url = options.Nats.Url! }); - await connection.ConnectAsync(); - var js = new NatsJSContext(connection); - - var consumerConfig = new ConsumerConfig - { - DurableName = "notify-delivery-dead-test", - DeliverPolicy = ConsumerConfigDeliverPolicy.All, - AckPolicy = ConsumerConfigAckPolicy.Explicit - }; - - var consumer = await js.CreateConsumerAsync(options.Nats.DeadLetterStream, consumerConfig); - var fetchOpts = new NatsJSFetchOpts { MaxMsgs = 1, Expires = TimeSpan.FromSeconds(1) }; - - NatsJSMsg<byte[]>? dlqMsg = null; - await foreach (var msg in consumer.FetchAsync(NatsRawSerializer<byte[]>.Default, fetchOpts)) - { - dlqMsg = msg; - await msg.AckAsync(new AckOpts()); - break; - } - - dlqMsg.Should().NotBeNull(); - } - - private NatsNotifyDeliveryQueue CreateQueue(NotifyDeliveryQueueOptions options) - { - return new NatsNotifyDeliveryQueue( - options, - options.Nats, - NullLogger<NatsNotifyDeliveryQueue>.Instance, - TimeProvider.System); - } - - private NotifyDeliveryQueueOptions CreateOptions(Action<NotifyDeliveryQueueOptions>? configure = null) - { - var url = $"nats://{_nats.Hostname}:{_nats.GetMappedPublicPort(4222)}"; - - var opts = new NotifyDeliveryQueueOptions - { - Transport = NotifyQueueTransportKind.Nats, - DefaultLeaseDuration = TimeSpan.FromSeconds(2), - MaxDeliveryAttempts = 3, - RetryInitialBackoff = TimeSpan.FromMilliseconds(20), - RetryMaxBackoff = TimeSpan.FromMilliseconds(200), - Nats = new NotifyNatsDeliveryQueueOptions - { - Url = url, - Stream = "NOTIFY_DELIVERY_TEST", - Subject = "notify.delivery.test", - DeadLetterStream = "NOTIFY_DELIVERY_TEST_DEAD", - DeadLetterSubject = "notify.delivery.test.dead", - DurableConsumer = "notify-delivery-tests", - MaxAckPending = 32, - AckWait = TimeSpan.FromSeconds(2), - RetryDelay = TimeSpan.FromMilliseconds(100), - IdleHeartbeat = TimeSpan.FromMilliseconds(200) - } - }; - - configure?.Invoke(opts); - return opts; - } - - private bool SkipIfUnavailable() - => _skipReason is not null; - - private static class TestData - { - public static NotifyDelivery CreateDelivery(string tenantId = "tenant-1") - { - return NotifyDelivery.Create( - deliveryId: Guid.NewGuid().ToString("n"), - tenantId: tenantId, - ruleId: "rule-1", - actionId: "action-1", - eventId: Guid.NewGuid(), - kind: "scanner.report.ready", - status: NotifyDeliveryStatus.Pending, - createdAt: DateTimeOffset.UtcNow); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using DotNet.Testcontainers.Builders; +using DotNet.Testcontainers.Containers; +using DotNet.Testcontainers.Configurations; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using NATS.Client.Core; +using NATS.Client.JetStream; +using NATS.Client.JetStream.Models; +using StellaOps.Notify.Models; +using StellaOps.Notify.Queue; +using StellaOps.Notify.Queue.Nats; +using Xunit; + +namespace StellaOps.Notify.Queue.Tests; + +public sealed class NatsNotifyDeliveryQueueTests : IAsyncLifetime +{ + private readonly TestcontainersContainer _nats; + private string? _skipReason; + + public NatsNotifyDeliveryQueueTests() + { + _nats = new TestcontainersBuilder<TestcontainersContainer>() + .WithImage("nats:2.10-alpine") + .WithCleanUp(true) + .WithName($"nats-notify-delivery-{Guid.NewGuid():N}") + .WithPortBinding(4222, true) + .WithCommand("--jetstream") + .WithWaitStrategy(Wait.ForUnixContainer().UntilPortIsAvailable(4222)) + .Build(); + } + + public async Task InitializeAsync() + { + try + { + await _nats.StartAsync(); + } + catch (Exception ex) + { + _skipReason = $"NATS-backed delivery tests skipped: {ex.Message}"; + } + } + + public async Task DisposeAsync() + { + if (_skipReason is not null) + { + return; + } + + await _nats.DisposeAsync().ConfigureAwait(false); + } + + [Fact] + public async Task Publish_ShouldDeduplicate_ByDeliveryId() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var delivery = TestData.CreateDelivery("tenant-a"); + var message = new NotifyDeliveryQueueMessage( + delivery, + channelId: "chan-a", + channelType: NotifyChannelType.Slack); + + var first = await queue.PublishAsync(message); + first.Deduplicated.Should().BeFalse(); + + var second = await queue.PublishAsync(message); + second.Deduplicated.Should().BeTrue(); + second.MessageId.Should().Be(first.MessageId); + } + + [Fact] + public async Task Release_Retry_ShouldReschedule() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + await queue.PublishAsync(new NotifyDeliveryQueueMessage( + TestData.CreateDelivery(), + channelId: "chan-retry", + channelType: NotifyChannelType.Teams)); + + var lease = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-retry", 1, TimeSpan.FromSeconds(2)))).Single(); + + await lease.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); + + var retried = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-retry", 1, TimeSpan.FromSeconds(2)))).Single(); + retried.Attempt.Should().BeGreaterThan(lease.Attempt); + + await retried.AcknowledgeAsync(); + } + + [Fact] + public async Task Release_RetryBeyondMax_ShouldDeadLetter() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(static opts => + { + opts.MaxDeliveryAttempts = 2; + opts.Nats.DeadLetterStream = "NOTIFY_DELIVERY_DEAD_TEST"; + opts.Nats.DeadLetterSubject = "notify.delivery.dead.test"; + }); + + await using var queue = CreateQueue(options); + + await queue.PublishAsync(new NotifyDeliveryQueueMessage( + TestData.CreateDelivery(), + channelId: "chan-dead", + channelType: NotifyChannelType.Webhook)); + + var lease = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-dead", 1, TimeSpan.FromSeconds(2)))).Single(); + await lease.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); + + var second = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-dead", 1, TimeSpan.FromSeconds(2)))).Single(); + await second.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); + + await Task.Delay(200); + + await using var connection = new NatsConnection(new NatsOpts { Url = options.Nats.Url! }); + await connection.ConnectAsync(); + var js = new NatsJSContext(connection); + + var consumerConfig = new ConsumerConfig + { + DurableName = "notify-delivery-dead-test", + DeliverPolicy = ConsumerConfigDeliverPolicy.All, + AckPolicy = ConsumerConfigAckPolicy.Explicit + }; + + var consumer = await js.CreateConsumerAsync(options.Nats.DeadLetterStream, consumerConfig); + var fetchOpts = new NatsJSFetchOpts { MaxMsgs = 1, Expires = TimeSpan.FromSeconds(1) }; + + NatsJSMsg<byte[]>? dlqMsg = null; + await foreach (var msg in consumer.FetchAsync(NatsRawSerializer<byte[]>.Default, fetchOpts)) + { + dlqMsg = msg; + await msg.AckAsync(new AckOpts()); + break; + } + + dlqMsg.Should().NotBeNull(); + } + + private NatsNotifyDeliveryQueue CreateQueue(NotifyDeliveryQueueOptions options) + { + return new NatsNotifyDeliveryQueue( + options, + options.Nats, + NullLogger<NatsNotifyDeliveryQueue>.Instance, + TimeProvider.System); + } + + private NotifyDeliveryQueueOptions CreateOptions(Action<NotifyDeliveryQueueOptions>? configure = null) + { + var url = $"nats://{_nats.Hostname}:{_nats.GetMappedPublicPort(4222)}"; + + var opts = new NotifyDeliveryQueueOptions + { + Transport = NotifyQueueTransportKind.Nats, + DefaultLeaseDuration = TimeSpan.FromSeconds(2), + MaxDeliveryAttempts = 3, + RetryInitialBackoff = TimeSpan.FromMilliseconds(20), + RetryMaxBackoff = TimeSpan.FromMilliseconds(200), + Nats = new NotifyNatsDeliveryQueueOptions + { + Url = url, + Stream = "NOTIFY_DELIVERY_TEST", + Subject = "notify.delivery.test", + DeadLetterStream = "NOTIFY_DELIVERY_TEST_DEAD", + DeadLetterSubject = "notify.delivery.test.dead", + DurableConsumer = "notify-delivery-tests", + MaxAckPending = 32, + AckWait = TimeSpan.FromSeconds(2), + RetryDelay = TimeSpan.FromMilliseconds(100), + IdleHeartbeat = TimeSpan.FromMilliseconds(200) + } + }; + + configure?.Invoke(opts); + return opts; + } + + private bool SkipIfUnavailable() + => _skipReason is not null; + + private static class TestData + { + public static NotifyDelivery CreateDelivery(string tenantId = "tenant-1") + { + return NotifyDelivery.Create( + deliveryId: Guid.NewGuid().ToString("n"), + tenantId: tenantId, + ruleId: "rule-1", + actionId: "action-1", + eventId: Guid.NewGuid(), + kind: "scanner.report.ready", + status: NotifyDeliveryStatus.Pending, + createdAt: DateTimeOffset.UtcNow); + } + } +} diff --git a/src/StellaOps.Notify.Queue.Tests/NatsNotifyEventQueueTests.cs b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/NatsNotifyEventQueueTests.cs similarity index 96% rename from src/StellaOps.Notify.Queue.Tests/NatsNotifyEventQueueTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Queue.Tests/NatsNotifyEventQueueTests.cs index c092047b..5c8e5357 100644 --- a/src/StellaOps.Notify.Queue.Tests/NatsNotifyEventQueueTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/NatsNotifyEventQueueTests.cs @@ -1,225 +1,225 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text.Json.Nodes; -using System.Threading.Tasks; -using DotNet.Testcontainers.Builders; -using DotNet.Testcontainers.Containers; -using DotNet.Testcontainers.Configurations; -using FluentAssertions; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Notify.Models; -using StellaOps.Notify.Queue; -using StellaOps.Notify.Queue.Nats; -using Xunit; - -namespace StellaOps.Notify.Queue.Tests; - -public sealed class NatsNotifyEventQueueTests : IAsyncLifetime -{ - private readonly TestcontainersContainer _nats; - private string? _skipReason; - - public NatsNotifyEventQueueTests() - { - _nats = new TestcontainersBuilder<TestcontainersContainer>() - .WithImage("nats:2.10-alpine") - .WithCleanUp(true) - .WithName($"nats-notify-tests-{Guid.NewGuid():N}") - .WithPortBinding(4222, true) - .WithCommand("--jetstream") - .WithWaitStrategy(Wait.ForUnixContainer().UntilPortIsAvailable(4222)) - .Build(); - } - - public async Task InitializeAsync() - { - try - { - await _nats.StartAsync(); - } - catch (Exception ex) - { - _skipReason = $"NATS-backed tests skipped: {ex.Message}"; - } - } - - public async Task DisposeAsync() - { - if (_skipReason is not null) - { - return; - } - - await _nats.DisposeAsync().ConfigureAwait(false); - } - - [Fact] - public async Task Publish_ShouldDeduplicate_ByIdempotencyKey() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var notifyEvent = TestData.CreateEvent("tenant-a"); - var message = new NotifyQueueEventMessage( - notifyEvent, - options.Nats.Subject, - traceId: "trace-1"); - - var first = await queue.PublishAsync(message); - first.Deduplicated.Should().BeFalse(); - - var second = await queue.PublishAsync(message); - second.Deduplicated.Should().BeTrue(); - second.MessageId.Should().Be(first.MessageId); - } - - [Fact] - public async Task Lease_Acknowledge_ShouldRemoveMessage() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var notifyEvent = TestData.CreateEvent("tenant-b"); - var message = new NotifyQueueEventMessage( - notifyEvent, - options.Nats.Subject, - traceId: "trace-xyz", - attributes: new Dictionary<string, string> { { "source", "scanner" } }); - - await queue.PublishAsync(message); - - var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(2))); - leases.Should().ContainSingle(); - - var lease = leases[0]; - lease.Attempt.Should().BeGreaterThanOrEqualTo(1); - lease.Message.Event.EventId.Should().Be(notifyEvent.EventId); - lease.TraceId.Should().Be("trace-xyz"); - lease.Attributes.Should().ContainKey("source").WhoseValue.Should().Be("scanner"); - - await lease.AcknowledgeAsync(); - - var afterAck = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(1))); - afterAck.Should().BeEmpty(); - } - - [Fact] - public async Task Lease_ShouldPreserveOrdering() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var first = TestData.CreateEvent(); - var second = TestData.CreateEvent(); - - await queue.PublishAsync(new NotifyQueueEventMessage(first, options.Nats.Subject)); - await queue.PublishAsync(new NotifyQueueEventMessage(second, options.Nats.Subject)); - - var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-order", 2, TimeSpan.FromSeconds(2))); - leases.Should().HaveCount(2); - - leases.Select(x => x.Message.Event.EventId) - .Should() - .ContainInOrder(first.EventId, second.EventId); - } - - [Fact] - public async Task ClaimExpired_ShouldReassignLease() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var notifyEvent = TestData.CreateEvent(); - await queue.PublishAsync(new NotifyQueueEventMessage(notifyEvent, options.Nats.Subject)); - - var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-initial", 1, TimeSpan.FromMilliseconds(500))); - leases.Should().ContainSingle(); - - await Task.Delay(200); - - var claimed = await queue.ClaimExpiredAsync(new NotifyQueueClaimOptions("worker-reclaim", 1, TimeSpan.FromMilliseconds(100))); - claimed.Should().ContainSingle(); - - var lease = claimed[0]; - lease.Consumer.Should().Be("worker-reclaim"); - lease.Message.Event.EventId.Should().Be(notifyEvent.EventId); - - await lease.AcknowledgeAsync(); - } - - private NatsNotifyEventQueue CreateQueue(NotifyEventQueueOptions options) - { - return new NatsNotifyEventQueue( - options, - options.Nats, - NullLogger<NatsNotifyEventQueue>.Instance, - TimeProvider.System); - } - - private NotifyEventQueueOptions CreateOptions() - { - var connectionUrl = $"nats://{_nats.Hostname}:{_nats.GetMappedPublicPort(4222)}"; - - return new NotifyEventQueueOptions - { - Transport = NotifyQueueTransportKind.Nats, - DefaultLeaseDuration = TimeSpan.FromSeconds(2), - MaxDeliveryAttempts = 3, - RetryInitialBackoff = TimeSpan.FromMilliseconds(50), - RetryMaxBackoff = TimeSpan.FromSeconds(1), - Nats = new NotifyNatsEventQueueOptions - { - Url = connectionUrl, - Stream = "NOTIFY_TEST", - Subject = "notify.test.events", - DeadLetterStream = "NOTIFY_TEST_DEAD", - DeadLetterSubject = "notify.test.events.dead", - DurableConsumer = "notify-test-consumer", - MaxAckPending = 32, - AckWait = TimeSpan.FromSeconds(2), - RetryDelay = TimeSpan.FromMilliseconds(100), - IdleHeartbeat = TimeSpan.FromMilliseconds(100) - } - }; - } - - private bool SkipIfUnavailable() - => _skipReason is not null; - - private static class TestData - { - public static NotifyEvent CreateEvent(string tenant = "tenant-1") - { - return NotifyEvent.Create( - Guid.NewGuid(), - kind: "scanner.report.ready", - tenant: tenant, - ts: DateTimeOffset.UtcNow, - payload: new JsonObject - { - ["summary"] = "event" - }); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using DotNet.Testcontainers.Builders; +using DotNet.Testcontainers.Containers; +using DotNet.Testcontainers.Configurations; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Notify.Models; +using StellaOps.Notify.Queue; +using StellaOps.Notify.Queue.Nats; +using Xunit; + +namespace StellaOps.Notify.Queue.Tests; + +public sealed class NatsNotifyEventQueueTests : IAsyncLifetime +{ + private readonly TestcontainersContainer _nats; + private string? _skipReason; + + public NatsNotifyEventQueueTests() + { + _nats = new TestcontainersBuilder<TestcontainersContainer>() + .WithImage("nats:2.10-alpine") + .WithCleanUp(true) + .WithName($"nats-notify-tests-{Guid.NewGuid():N}") + .WithPortBinding(4222, true) + .WithCommand("--jetstream") + .WithWaitStrategy(Wait.ForUnixContainer().UntilPortIsAvailable(4222)) + .Build(); + } + + public async Task InitializeAsync() + { + try + { + await _nats.StartAsync(); + } + catch (Exception ex) + { + _skipReason = $"NATS-backed tests skipped: {ex.Message}"; + } + } + + public async Task DisposeAsync() + { + if (_skipReason is not null) + { + return; + } + + await _nats.DisposeAsync().ConfigureAwait(false); + } + + [Fact] + public async Task Publish_ShouldDeduplicate_ByIdempotencyKey() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var notifyEvent = TestData.CreateEvent("tenant-a"); + var message = new NotifyQueueEventMessage( + notifyEvent, + options.Nats.Subject, + traceId: "trace-1"); + + var first = await queue.PublishAsync(message); + first.Deduplicated.Should().BeFalse(); + + var second = await queue.PublishAsync(message); + second.Deduplicated.Should().BeTrue(); + second.MessageId.Should().Be(first.MessageId); + } + + [Fact] + public async Task Lease_Acknowledge_ShouldRemoveMessage() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var notifyEvent = TestData.CreateEvent("tenant-b"); + var message = new NotifyQueueEventMessage( + notifyEvent, + options.Nats.Subject, + traceId: "trace-xyz", + attributes: new Dictionary<string, string> { { "source", "scanner" } }); + + await queue.PublishAsync(message); + + var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(2))); + leases.Should().ContainSingle(); + + var lease = leases[0]; + lease.Attempt.Should().BeGreaterThanOrEqualTo(1); + lease.Message.Event.EventId.Should().Be(notifyEvent.EventId); + lease.TraceId.Should().Be("trace-xyz"); + lease.Attributes.Should().ContainKey("source").WhoseValue.Should().Be("scanner"); + + await lease.AcknowledgeAsync(); + + var afterAck = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(1))); + afterAck.Should().BeEmpty(); + } + + [Fact] + public async Task Lease_ShouldPreserveOrdering() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var first = TestData.CreateEvent(); + var second = TestData.CreateEvent(); + + await queue.PublishAsync(new NotifyQueueEventMessage(first, options.Nats.Subject)); + await queue.PublishAsync(new NotifyQueueEventMessage(second, options.Nats.Subject)); + + var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-order", 2, TimeSpan.FromSeconds(2))); + leases.Should().HaveCount(2); + + leases.Select(x => x.Message.Event.EventId) + .Should() + .ContainInOrder(first.EventId, second.EventId); + } + + [Fact] + public async Task ClaimExpired_ShouldReassignLease() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var notifyEvent = TestData.CreateEvent(); + await queue.PublishAsync(new NotifyQueueEventMessage(notifyEvent, options.Nats.Subject)); + + var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-initial", 1, TimeSpan.FromMilliseconds(500))); + leases.Should().ContainSingle(); + + await Task.Delay(200); + + var claimed = await queue.ClaimExpiredAsync(new NotifyQueueClaimOptions("worker-reclaim", 1, TimeSpan.FromMilliseconds(100))); + claimed.Should().ContainSingle(); + + var lease = claimed[0]; + lease.Consumer.Should().Be("worker-reclaim"); + lease.Message.Event.EventId.Should().Be(notifyEvent.EventId); + + await lease.AcknowledgeAsync(); + } + + private NatsNotifyEventQueue CreateQueue(NotifyEventQueueOptions options) + { + return new NatsNotifyEventQueue( + options, + options.Nats, + NullLogger<NatsNotifyEventQueue>.Instance, + TimeProvider.System); + } + + private NotifyEventQueueOptions CreateOptions() + { + var connectionUrl = $"nats://{_nats.Hostname}:{_nats.GetMappedPublicPort(4222)}"; + + return new NotifyEventQueueOptions + { + Transport = NotifyQueueTransportKind.Nats, + DefaultLeaseDuration = TimeSpan.FromSeconds(2), + MaxDeliveryAttempts = 3, + RetryInitialBackoff = TimeSpan.FromMilliseconds(50), + RetryMaxBackoff = TimeSpan.FromSeconds(1), + Nats = new NotifyNatsEventQueueOptions + { + Url = connectionUrl, + Stream = "NOTIFY_TEST", + Subject = "notify.test.events", + DeadLetterStream = "NOTIFY_TEST_DEAD", + DeadLetterSubject = "notify.test.events.dead", + DurableConsumer = "notify-test-consumer", + MaxAckPending = 32, + AckWait = TimeSpan.FromSeconds(2), + RetryDelay = TimeSpan.FromMilliseconds(100), + IdleHeartbeat = TimeSpan.FromMilliseconds(100) + } + }; + } + + private bool SkipIfUnavailable() + => _skipReason is not null; + + private static class TestData + { + public static NotifyEvent CreateEvent(string tenant = "tenant-1") + { + return NotifyEvent.Create( + Guid.NewGuid(), + kind: "scanner.report.ready", + tenant: tenant, + ts: DateTimeOffset.UtcNow, + payload: new JsonObject + { + ["summary"] = "event" + }); + } + } +} diff --git a/src/StellaOps.Notify.Queue.Tests/RedisNotifyDeliveryQueueTests.cs b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/RedisNotifyDeliveryQueueTests.cs similarity index 96% rename from src/StellaOps.Notify.Queue.Tests/RedisNotifyDeliveryQueueTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Queue.Tests/RedisNotifyDeliveryQueueTests.cs index 80b83ba0..25f5b617 100644 --- a/src/StellaOps.Notify.Queue.Tests/RedisNotifyDeliveryQueueTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/RedisNotifyDeliveryQueueTests.cs @@ -1,197 +1,197 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text.Json.Nodes; -using System.Threading.Tasks; -using DotNet.Testcontainers.Builders; -using DotNet.Testcontainers.Containers; -using DotNet.Testcontainers.Configurations; -using FluentAssertions; -using Microsoft.Extensions.Logging.Abstractions; -using StackExchange.Redis; -using StellaOps.Notify.Models; -using StellaOps.Notify.Queue; -using StellaOps.Notify.Queue.Redis; -using Xunit; - -namespace StellaOps.Notify.Queue.Tests; - -public sealed class RedisNotifyDeliveryQueueTests : IAsyncLifetime -{ - private readonly RedisTestcontainer _redis; - private string? _skipReason; - - public RedisNotifyDeliveryQueueTests() - { - var configuration = new RedisTestcontainerConfiguration(); - _redis = new TestcontainersBuilder<RedisTestcontainer>() - .WithDatabase(configuration) - .Build(); - } - - public async Task InitializeAsync() - { - try - { - await _redis.StartAsync(); - } - catch (Exception ex) - { - _skipReason = $"Redis-backed delivery tests skipped: {ex.Message}"; - } - } - - public async Task DisposeAsync() - { - if (_skipReason is not null) - { - return; - } - - await _redis.DisposeAsync().AsTask(); - } - - [Fact] - public async Task Publish_ShouldDeduplicate_ByDeliveryId() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var delivery = TestData.CreateDelivery(); - var message = new NotifyDeliveryQueueMessage( - delivery, - channelId: "channel-1", - channelType: NotifyChannelType.Slack); - - var first = await queue.PublishAsync(message); - first.Deduplicated.Should().BeFalse(); - - var second = await queue.PublishAsync(message); - second.Deduplicated.Should().BeTrue(); - second.MessageId.Should().Be(first.MessageId); - } - - [Fact] - public async Task Release_Retry_ShouldRescheduleDelivery() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - await queue.PublishAsync(new NotifyDeliveryQueueMessage( - TestData.CreateDelivery(), - channelId: "channel-retry", - channelType: NotifyChannelType.Teams)); - - var lease = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-retry", 1, TimeSpan.FromSeconds(1)))).Single(); - lease.Attempt.Should().Be(1); - - await lease.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); - - var retried = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-retry", 1, TimeSpan.FromSeconds(1)))).Single(); - retried.Attempt.Should().Be(2); - - await retried.AcknowledgeAsync(); - } - - [Fact] - public async Task Release_RetryBeyondMax_ShouldDeadLetter() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(static opts => - { - opts.MaxDeliveryAttempts = 2; - opts.Redis.DeadLetterStreamName = "notify:deliveries:testdead"; - }); - - await using var queue = CreateQueue(options); - - await queue.PublishAsync(new NotifyDeliveryQueueMessage( - TestData.CreateDelivery(), - channelId: "channel-dead", - channelType: NotifyChannelType.Email)); - - var first = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-dead", 1, TimeSpan.FromSeconds(1)))).Single(); - await first.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); - - var second = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-dead", 1, TimeSpan.FromSeconds(1)))).Single(); - await second.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); - - await Task.Delay(100); - - var mux = await ConnectionMultiplexer.ConnectAsync(_redis.ConnectionString); - var db = mux.GetDatabase(); - var deadLetters = await db.StreamReadAsync(options.Redis.DeadLetterStreamName, "0-0"); - deadLetters.Should().NotBeEmpty(); - } - - private RedisNotifyDeliveryQueue CreateQueue(NotifyDeliveryQueueOptions options) - { - return new RedisNotifyDeliveryQueue( - options, - options.Redis, - NullLogger<RedisNotifyDeliveryQueue>.Instance, - TimeProvider.System, - async config => (IConnectionMultiplexer)await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false)); - } - - private NotifyDeliveryQueueOptions CreateOptions(Action<NotifyDeliveryQueueOptions>? configure = null) - { - var opts = new NotifyDeliveryQueueOptions - { - Transport = NotifyQueueTransportKind.Redis, - DefaultLeaseDuration = TimeSpan.FromSeconds(1), - MaxDeliveryAttempts = 3, - RetryInitialBackoff = TimeSpan.FromMilliseconds(10), - RetryMaxBackoff = TimeSpan.FromMilliseconds(50), - ClaimIdleThreshold = TimeSpan.FromSeconds(1), - Redis = new NotifyRedisDeliveryQueueOptions - { - ConnectionString = _redis.ConnectionString, - StreamName = "notify:deliveries:test", - ConsumerGroup = "notify-delivery-tests", - IdempotencyKeyPrefix = "notify:deliveries:test:idemp:" - } - }; - - configure?.Invoke(opts); - return opts; - } - - private bool SkipIfUnavailable() - => _skipReason is not null; - - private static class TestData - { - public static NotifyDelivery CreateDelivery() - { - var now = DateTimeOffset.UtcNow; - return NotifyDelivery.Create( - deliveryId: Guid.NewGuid().ToString("n"), - tenantId: "tenant-1", - ruleId: "rule-1", - actionId: "action-1", - eventId: Guid.NewGuid(), - kind: "scanner.report.ready", - status: NotifyDeliveryStatus.Pending, - createdAt: now, - metadata: new Dictionary<string, string> - { - ["integration"] = "tests" - }); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Nodes; +using System.Threading.Tasks; +using DotNet.Testcontainers.Builders; +using DotNet.Testcontainers.Containers; +using DotNet.Testcontainers.Configurations; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StackExchange.Redis; +using StellaOps.Notify.Models; +using StellaOps.Notify.Queue; +using StellaOps.Notify.Queue.Redis; +using Xunit; + +namespace StellaOps.Notify.Queue.Tests; + +public sealed class RedisNotifyDeliveryQueueTests : IAsyncLifetime +{ + private readonly RedisTestcontainer _redis; + private string? _skipReason; + + public RedisNotifyDeliveryQueueTests() + { + var configuration = new RedisTestcontainerConfiguration(); + _redis = new TestcontainersBuilder<RedisTestcontainer>() + .WithDatabase(configuration) + .Build(); + } + + public async Task InitializeAsync() + { + try + { + await _redis.StartAsync(); + } + catch (Exception ex) + { + _skipReason = $"Redis-backed delivery tests skipped: {ex.Message}"; + } + } + + public async Task DisposeAsync() + { + if (_skipReason is not null) + { + return; + } + + await _redis.DisposeAsync().AsTask(); + } + + [Fact] + public async Task Publish_ShouldDeduplicate_ByDeliveryId() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var delivery = TestData.CreateDelivery(); + var message = new NotifyDeliveryQueueMessage( + delivery, + channelId: "channel-1", + channelType: NotifyChannelType.Slack); + + var first = await queue.PublishAsync(message); + first.Deduplicated.Should().BeFalse(); + + var second = await queue.PublishAsync(message); + second.Deduplicated.Should().BeTrue(); + second.MessageId.Should().Be(first.MessageId); + } + + [Fact] + public async Task Release_Retry_ShouldRescheduleDelivery() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + await queue.PublishAsync(new NotifyDeliveryQueueMessage( + TestData.CreateDelivery(), + channelId: "channel-retry", + channelType: NotifyChannelType.Teams)); + + var lease = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-retry", 1, TimeSpan.FromSeconds(1)))).Single(); + lease.Attempt.Should().Be(1); + + await lease.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); + + var retried = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-retry", 1, TimeSpan.FromSeconds(1)))).Single(); + retried.Attempt.Should().Be(2); + + await retried.AcknowledgeAsync(); + } + + [Fact] + public async Task Release_RetryBeyondMax_ShouldDeadLetter() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(static opts => + { + opts.MaxDeliveryAttempts = 2; + opts.Redis.DeadLetterStreamName = "notify:deliveries:testdead"; + }); + + await using var queue = CreateQueue(options); + + await queue.PublishAsync(new NotifyDeliveryQueueMessage( + TestData.CreateDelivery(), + channelId: "channel-dead", + channelType: NotifyChannelType.Email)); + + var first = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-dead", 1, TimeSpan.FromSeconds(1)))).Single(); + await first.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); + + var second = (await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-dead", 1, TimeSpan.FromSeconds(1)))).Single(); + await second.ReleaseAsync(NotifyQueueReleaseDisposition.Retry); + + await Task.Delay(100); + + var mux = await ConnectionMultiplexer.ConnectAsync(_redis.ConnectionString); + var db = mux.GetDatabase(); + var deadLetters = await db.StreamReadAsync(options.Redis.DeadLetterStreamName, "0-0"); + deadLetters.Should().NotBeEmpty(); + } + + private RedisNotifyDeliveryQueue CreateQueue(NotifyDeliveryQueueOptions options) + { + return new RedisNotifyDeliveryQueue( + options, + options.Redis, + NullLogger<RedisNotifyDeliveryQueue>.Instance, + TimeProvider.System, + async config => (IConnectionMultiplexer)await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false)); + } + + private NotifyDeliveryQueueOptions CreateOptions(Action<NotifyDeliveryQueueOptions>? configure = null) + { + var opts = new NotifyDeliveryQueueOptions + { + Transport = NotifyQueueTransportKind.Redis, + DefaultLeaseDuration = TimeSpan.FromSeconds(1), + MaxDeliveryAttempts = 3, + RetryInitialBackoff = TimeSpan.FromMilliseconds(10), + RetryMaxBackoff = TimeSpan.FromMilliseconds(50), + ClaimIdleThreshold = TimeSpan.FromSeconds(1), + Redis = new NotifyRedisDeliveryQueueOptions + { + ConnectionString = _redis.ConnectionString, + StreamName = "notify:deliveries:test", + ConsumerGroup = "notify-delivery-tests", + IdempotencyKeyPrefix = "notify:deliveries:test:idemp:" + } + }; + + configure?.Invoke(opts); + return opts; + } + + private bool SkipIfUnavailable() + => _skipReason is not null; + + private static class TestData + { + public static NotifyDelivery CreateDelivery() + { + var now = DateTimeOffset.UtcNow; + return NotifyDelivery.Create( + deliveryId: Guid.NewGuid().ToString("n"), + tenantId: "tenant-1", + ruleId: "rule-1", + actionId: "action-1", + eventId: Guid.NewGuid(), + kind: "scanner.report.ready", + status: NotifyDeliveryStatus.Pending, + createdAt: now, + metadata: new Dictionary<string, string> + { + ["integration"] = "tests" + }); + } + } +} diff --git a/src/StellaOps.Notify.Queue.Tests/RedisNotifyEventQueueTests.cs b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/RedisNotifyEventQueueTests.cs similarity index 96% rename from src/StellaOps.Notify.Queue.Tests/RedisNotifyEventQueueTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Queue.Tests/RedisNotifyEventQueueTests.cs index 28499b1e..a3aca157 100644 --- a/src/StellaOps.Notify.Queue.Tests/RedisNotifyEventQueueTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/RedisNotifyEventQueueTests.cs @@ -1,220 +1,220 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text.Json.Nodes; -using System.Threading; -using System.Threading.Tasks; -using DotNet.Testcontainers.Builders; -using DotNet.Testcontainers.Containers; -using DotNet.Testcontainers.Configurations; -using FluentAssertions; -using Microsoft.Extensions.Logging.Abstractions; -using StackExchange.Redis; -using StellaOps.Notify.Models; -using StellaOps.Notify.Queue; -using StellaOps.Notify.Queue.Redis; -using Xunit; - -namespace StellaOps.Notify.Queue.Tests; - -public sealed class RedisNotifyEventQueueTests : IAsyncLifetime -{ - private readonly RedisTestcontainer _redis; - private string? _skipReason; - - public RedisNotifyEventQueueTests() - { - var configuration = new RedisTestcontainerConfiguration(); - _redis = new TestcontainersBuilder<RedisTestcontainer>() - .WithDatabase(configuration) - .Build(); - } - - public async Task InitializeAsync() - { - try - { - await _redis.StartAsync(); - } - catch (Exception ex) - { - _skipReason = $"Redis-backed tests skipped: {ex.Message}"; - } - } - - public async Task DisposeAsync() - { - if (_skipReason is not null) - { - return; - } - - await _redis.DisposeAsync().AsTask(); - } - - [Fact] - public async Task Publish_ShouldDeduplicate_ByIdempotencyKey() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var notifyEvent = TestData.CreateEvent(tenant: "tenant-a"); - var message = new NotifyQueueEventMessage(notifyEvent, options.Redis.Streams[0].Stream); - - var first = await queue.PublishAsync(message); - first.Deduplicated.Should().BeFalse(); - - var second = await queue.PublishAsync(message); - second.Deduplicated.Should().BeTrue(); - second.MessageId.Should().Be(first.MessageId); - } - - [Fact] - public async Task Lease_Acknowledge_ShouldRemoveMessage() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var notifyEvent = TestData.CreateEvent(tenant: "tenant-b"); - var message = new NotifyQueueEventMessage( - notifyEvent, - options.Redis.Streams[0].Stream, - traceId: "trace-123", - attributes: new Dictionary<string, string> { { "source", "scanner" } }); - - await queue.PublishAsync(message); - - var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(5))); - leases.Should().ContainSingle(); - - var lease = leases[0]; - lease.Attempt.Should().Be(1); - lease.Message.Event.EventId.Should().Be(notifyEvent.EventId); - lease.TraceId.Should().Be("trace-123"); - lease.Attributes.Should().ContainKey("source").WhoseValue.Should().Be("scanner"); - - await lease.AcknowledgeAsync(); - - var afterAck = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(5))); - afterAck.Should().BeEmpty(); - } - - [Fact] - public async Task Lease_ShouldPreserveOrdering() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var stream = options.Redis.Streams[0].Stream; - var firstEvent = TestData.CreateEvent(); - var secondEvent = TestData.CreateEvent(); - - await queue.PublishAsync(new NotifyQueueEventMessage(firstEvent, stream)); - await queue.PublishAsync(new NotifyQueueEventMessage(secondEvent, stream)); - - var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-order", 2, TimeSpan.FromSeconds(5))); - leases.Should().HaveCount(2); - - leases.Select(l => l.Message.Event.EventId) - .Should() - .ContainInOrder(new[] { firstEvent.EventId, secondEvent.EventId }); - } - - [Fact] - public async Task ClaimExpired_ShouldReassignLease() - { - if (SkipIfUnavailable()) - { - return; - } - - var options = CreateOptions(); - await using var queue = CreateQueue(options); - - var notifyEvent = TestData.CreateEvent(); - await queue.PublishAsync(new NotifyQueueEventMessage(notifyEvent, options.Redis.Streams[0].Stream)); - - var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-initial", 1, TimeSpan.FromSeconds(1))); - leases.Should().ContainSingle(); - - // Ensure the message has been pending long enough for claim. - await Task.Delay(50); - - var claimed = await queue.ClaimExpiredAsync(new NotifyQueueClaimOptions("worker-reclaim", 1, TimeSpan.Zero)); - claimed.Should().ContainSingle(); - - var lease = claimed[0]; - lease.Consumer.Should().Be("worker-reclaim"); - lease.Message.Event.EventId.Should().Be(notifyEvent.EventId); - - await lease.AcknowledgeAsync(); - } - - private RedisNotifyEventQueue CreateQueue(NotifyEventQueueOptions options) - { - return new RedisNotifyEventQueue( - options, - options.Redis, - NullLogger<RedisNotifyEventQueue>.Instance, - TimeProvider.System, - async config => (IConnectionMultiplexer)await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false)); - } - - private NotifyEventQueueOptions CreateOptions() - { - var streamOptions = new NotifyRedisEventStreamOptions - { - Stream = "notify:test:events", - ConsumerGroup = "notify-test-consumers", - IdempotencyKeyPrefix = "notify:test:idemp:", - ApproximateMaxLength = 1024 - }; - - var redisOptions = new NotifyRedisEventQueueOptions - { - ConnectionString = _redis.ConnectionString, - Streams = new List<NotifyRedisEventStreamOptions> { streamOptions } - }; - - return new NotifyEventQueueOptions - { - Transport = NotifyQueueTransportKind.Redis, - DefaultLeaseDuration = TimeSpan.FromSeconds(5), - Redis = redisOptions - }; - } - - private bool SkipIfUnavailable() - => _skipReason is not null; - - private static class TestData - { - public static NotifyEvent CreateEvent(string tenant = "tenant-1") - { - return NotifyEvent.Create( - Guid.NewGuid(), - kind: "scanner.report.ready", - tenant: tenant, - ts: DateTimeOffset.UtcNow, - payload: new JsonObject - { - ["summary"] = "event" - }); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text.Json.Nodes; +using System.Threading; +using System.Threading.Tasks; +using DotNet.Testcontainers.Builders; +using DotNet.Testcontainers.Containers; +using DotNet.Testcontainers.Configurations; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StackExchange.Redis; +using StellaOps.Notify.Models; +using StellaOps.Notify.Queue; +using StellaOps.Notify.Queue.Redis; +using Xunit; + +namespace StellaOps.Notify.Queue.Tests; + +public sealed class RedisNotifyEventQueueTests : IAsyncLifetime +{ + private readonly RedisTestcontainer _redis; + private string? _skipReason; + + public RedisNotifyEventQueueTests() + { + var configuration = new RedisTestcontainerConfiguration(); + _redis = new TestcontainersBuilder<RedisTestcontainer>() + .WithDatabase(configuration) + .Build(); + } + + public async Task InitializeAsync() + { + try + { + await _redis.StartAsync(); + } + catch (Exception ex) + { + _skipReason = $"Redis-backed tests skipped: {ex.Message}"; + } + } + + public async Task DisposeAsync() + { + if (_skipReason is not null) + { + return; + } + + await _redis.DisposeAsync().AsTask(); + } + + [Fact] + public async Task Publish_ShouldDeduplicate_ByIdempotencyKey() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var notifyEvent = TestData.CreateEvent(tenant: "tenant-a"); + var message = new NotifyQueueEventMessage(notifyEvent, options.Redis.Streams[0].Stream); + + var first = await queue.PublishAsync(message); + first.Deduplicated.Should().BeFalse(); + + var second = await queue.PublishAsync(message); + second.Deduplicated.Should().BeTrue(); + second.MessageId.Should().Be(first.MessageId); + } + + [Fact] + public async Task Lease_Acknowledge_ShouldRemoveMessage() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var notifyEvent = TestData.CreateEvent(tenant: "tenant-b"); + var message = new NotifyQueueEventMessage( + notifyEvent, + options.Redis.Streams[0].Stream, + traceId: "trace-123", + attributes: new Dictionary<string, string> { { "source", "scanner" } }); + + await queue.PublishAsync(message); + + var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(5))); + leases.Should().ContainSingle(); + + var lease = leases[0]; + lease.Attempt.Should().Be(1); + lease.Message.Event.EventId.Should().Be(notifyEvent.EventId); + lease.TraceId.Should().Be("trace-123"); + lease.Attributes.Should().ContainKey("source").WhoseValue.Should().Be("scanner"); + + await lease.AcknowledgeAsync(); + + var afterAck = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-1", 1, TimeSpan.FromSeconds(5))); + afterAck.Should().BeEmpty(); + } + + [Fact] + public async Task Lease_ShouldPreserveOrdering() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var stream = options.Redis.Streams[0].Stream; + var firstEvent = TestData.CreateEvent(); + var secondEvent = TestData.CreateEvent(); + + await queue.PublishAsync(new NotifyQueueEventMessage(firstEvent, stream)); + await queue.PublishAsync(new NotifyQueueEventMessage(secondEvent, stream)); + + var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-order", 2, TimeSpan.FromSeconds(5))); + leases.Should().HaveCount(2); + + leases.Select(l => l.Message.Event.EventId) + .Should() + .ContainInOrder(new[] { firstEvent.EventId, secondEvent.EventId }); + } + + [Fact] + public async Task ClaimExpired_ShouldReassignLease() + { + if (SkipIfUnavailable()) + { + return; + } + + var options = CreateOptions(); + await using var queue = CreateQueue(options); + + var notifyEvent = TestData.CreateEvent(); + await queue.PublishAsync(new NotifyQueueEventMessage(notifyEvent, options.Redis.Streams[0].Stream)); + + var leases = await queue.LeaseAsync(new NotifyQueueLeaseRequest("worker-initial", 1, TimeSpan.FromSeconds(1))); + leases.Should().ContainSingle(); + + // Ensure the message has been pending long enough for claim. + await Task.Delay(50); + + var claimed = await queue.ClaimExpiredAsync(new NotifyQueueClaimOptions("worker-reclaim", 1, TimeSpan.Zero)); + claimed.Should().ContainSingle(); + + var lease = claimed[0]; + lease.Consumer.Should().Be("worker-reclaim"); + lease.Message.Event.EventId.Should().Be(notifyEvent.EventId); + + await lease.AcknowledgeAsync(); + } + + private RedisNotifyEventQueue CreateQueue(NotifyEventQueueOptions options) + { + return new RedisNotifyEventQueue( + options, + options.Redis, + NullLogger<RedisNotifyEventQueue>.Instance, + TimeProvider.System, + async config => (IConnectionMultiplexer)await ConnectionMultiplexer.ConnectAsync(config).ConfigureAwait(false)); + } + + private NotifyEventQueueOptions CreateOptions() + { + var streamOptions = new NotifyRedisEventStreamOptions + { + Stream = "notify:test:events", + ConsumerGroup = "notify-test-consumers", + IdempotencyKeyPrefix = "notify:test:idemp:", + ApproximateMaxLength = 1024 + }; + + var redisOptions = new NotifyRedisEventQueueOptions + { + ConnectionString = _redis.ConnectionString, + Streams = new List<NotifyRedisEventStreamOptions> { streamOptions } + }; + + return new NotifyEventQueueOptions + { + Transport = NotifyQueueTransportKind.Redis, + DefaultLeaseDuration = TimeSpan.FromSeconds(5), + Redis = redisOptions + }; + } + + private bool SkipIfUnavailable() + => _skipReason is not null; + + private static class TestData + { + public static NotifyEvent CreateEvent(string tenant = "tenant-1") + { + return NotifyEvent.Create( + Guid.NewGuid(), + kind: "scanner.report.ready", + tenant: tenant, + ts: DateTimeOffset.UtcNow, + payload: new JsonObject + { + ["summary"] = "event" + }); + } + } +} diff --git a/src/StellaOps.Notify.Queue.Tests/StellaOps.Notify.Queue.Tests.csproj b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/StellaOps.Notify.Queue.Tests.csproj similarity index 79% rename from src/StellaOps.Notify.Queue.Tests/StellaOps.Notify.Queue.Tests.csproj rename to src/Notify/__Tests/StellaOps.Notify.Queue.Tests/StellaOps.Notify.Queue.Tests.csproj index f4276e38..6a020c90 100644 --- a/src/StellaOps.Notify.Queue.Tests/StellaOps.Notify.Queue.Tests.csproj +++ b/src/Notify/__Tests/StellaOps.Notify.Queue.Tests/StellaOps.Notify.Queue.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -20,7 +21,7 @@ </PackageReference> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/AssemblyInfo.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/AssemblyInfo.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/AssemblyInfo.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/GlobalUsings.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/GlobalUsings.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/GlobalUsings.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Internal/NotifyMongoMigrationTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Internal/NotifyMongoMigrationTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Internal/NotifyMongoMigrationTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Internal/NotifyMongoMigrationTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyAuditRepositoryTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyAuditRepositoryTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyAuditRepositoryTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyAuditRepositoryTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyChannelRepositoryTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyChannelRepositoryTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyChannelRepositoryTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyChannelRepositoryTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDeliveryRepositoryTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDeliveryRepositoryTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDeliveryRepositoryTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDeliveryRepositoryTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDigestRepositoryTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDigestRepositoryTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDigestRepositoryTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyDigestRepositoryTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyLockRepositoryTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyLockRepositoryTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyLockRepositoryTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyLockRepositoryTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyRuleRepositoryTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyRuleRepositoryTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyRuleRepositoryTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyRuleRepositoryTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyTemplateRepositoryTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyTemplateRepositoryTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyTemplateRepositoryTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Repositories/NotifyTemplateRepositoryTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyChannelDocumentMapperTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyChannelDocumentMapperTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyChannelDocumentMapperTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyChannelDocumentMapperTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyRuleDocumentMapperTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyRuleDocumentMapperTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyRuleDocumentMapperTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyRuleDocumentMapperTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyTemplateDocumentMapperTests.cs b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyTemplateDocumentMapperTests.cs similarity index 100% rename from src/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyTemplateDocumentMapperTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/Serialization/NotifyTemplateDocumentMapperTests.cs diff --git a/src/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj similarity index 75% rename from src/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj rename to src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj index 07da5734..26c85a17 100644 --- a/src/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj +++ b/src/Notify/__Tests/StellaOps.Notify.Storage.Mongo.Tests/StellaOps.Notify.Storage.Mongo.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -7,8 +8,8 @@ </PropertyGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Storage.Mongo/StellaOps.Notify.Storage.Mongo.csproj" /> </ItemGroup> <ItemGroup> @@ -25,4 +26,4 @@ <CopyToOutputDirectory>Always</CopyToOutputDirectory> </None> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs b/src/Notify/__Tests/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs similarity index 100% rename from src/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs rename to src/Notify/__Tests/StellaOps.Notify.WebService.Tests/CrudEndpointsTests.cs diff --git a/src/StellaOps.Notify.WebService.Tests/NormalizeEndpointsTests.cs b/src/Notify/__Tests/StellaOps.Notify.WebService.Tests/NormalizeEndpointsTests.cs similarity index 100% rename from src/StellaOps.Notify.WebService.Tests/NormalizeEndpointsTests.cs rename to src/Notify/__Tests/StellaOps.Notify.WebService.Tests/NormalizeEndpointsTests.cs diff --git a/src/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj b/src/Notify/__Tests/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj similarity index 59% rename from src/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj rename to src/Notify/__Tests/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj index 0ee02931..18e0788d 100644 --- a/src/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj +++ b/src/Notify/__Tests/StellaOps.Notify.WebService.Tests/StellaOps.Notify.WebService.Tests.csproj @@ -1,18 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> - <ProjectReference Include="../StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj" /> - </ItemGroup> - - <ItemGroup> - <None Include="../../docs/notify/samples/*.json"> - <CopyToOutputDirectory>Always</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj" /> + </ItemGroup> + + <ItemGroup> + <None Include="../../docs/notify/samples/*.json"> + <CopyToOutputDirectory>Always</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Notify.Worker.Tests/NotifyEventLeaseProcessorTests.cs b/src/Notify/__Tests/StellaOps.Notify.Worker.Tests/NotifyEventLeaseProcessorTests.cs similarity index 97% rename from src/StellaOps.Notify.Worker.Tests/NotifyEventLeaseProcessorTests.cs rename to src/Notify/__Tests/StellaOps.Notify.Worker.Tests/NotifyEventLeaseProcessorTests.cs index 3c536ab2..ea0f3444 100644 --- a/src/StellaOps.Notify.Worker.Tests/NotifyEventLeaseProcessorTests.cs +++ b/src/Notify/__Tests/StellaOps.Notify.Worker.Tests/NotifyEventLeaseProcessorTests.cs @@ -1,167 +1,167 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using FluentAssertions; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Notify.Models; -using StellaOps.Notify.Queue; -using StellaOps.Notify.Worker; -using StellaOps.Notify.Worker.Handlers; -using StellaOps.Notify.Worker.Processing; -using Xunit; - -namespace StellaOps.Notify.Worker.Tests; - -public sealed class NotifyEventLeaseProcessorTests -{ - [Fact] - public async Task ProcessOnce_ShouldAcknowledgeSuccessfulLease() - { - var lease = new FakeLease(); - var queue = new FakeEventQueue(lease); - var handler = new TestHandler(); - var options = Options.Create(new NotifyWorkerOptions { LeaseBatchSize = 1, LeaseDuration = TimeSpan.FromSeconds(5) }); - var processor = new NotifyEventLeaseProcessor(queue, handler, options, NullLogger<NotifyEventLeaseProcessor>.Instance, TimeProvider.System); - - var processed = await processor.ProcessOnceAsync(CancellationToken.None); - - processed.Should().Be(1); - lease.AcknowledgeCount.Should().Be(1); - lease.ReleaseCount.Should().Be(0); - } - - [Fact] - public async Task ProcessOnce_ShouldRetryOnHandlerFailure() - { - var lease = new FakeLease(); - var queue = new FakeEventQueue(lease); - var handler = new TestHandler(shouldThrow: true); - var options = Options.Create(new NotifyWorkerOptions { LeaseBatchSize = 1, LeaseDuration = TimeSpan.FromSeconds(5) }); - var processor = new NotifyEventLeaseProcessor(queue, handler, options, NullLogger<NotifyEventLeaseProcessor>.Instance, TimeProvider.System); - - var processed = await processor.ProcessOnceAsync(CancellationToken.None); - - processed.Should().Be(1); - lease.AcknowledgeCount.Should().Be(0); - lease.ReleaseCount.Should().Be(1); - lease.LastDisposition.Should().Be(NotifyQueueReleaseDisposition.Retry); - } - - private sealed class FakeEventQueue : INotifyEventQueue - { - private readonly Queue<INotifyQueueLease<NotifyQueueEventMessage>> _leases; - - public FakeEventQueue(params INotifyQueueLease<NotifyQueueEventMessage>[] leases) - { - _leases = new Queue<INotifyQueueLease<NotifyQueueEventMessage>>(leases); - } - - public ValueTask<NotifyQueueEnqueueResult> PublishAsync(NotifyQueueEventMessage message, CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> LeaseAsync(NotifyQueueLeaseRequest request, CancellationToken cancellationToken = default) - { - if (_leases.Count == 0) - { - return ValueTask.FromResult<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>>(Array.Empty<INotifyQueueLease<NotifyQueueEventMessage>>()); - } - - return ValueTask.FromResult<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>>(new[] { _leases.Dequeue() }); - } - - public ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> ClaimExpiredAsync(NotifyQueueClaimOptions options, CancellationToken cancellationToken = default) - => ValueTask.FromResult<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>>(Array.Empty<INotifyQueueLease<NotifyQueueEventMessage>>()); - } - - private sealed class FakeLease : INotifyQueueLease<NotifyQueueEventMessage> - { - private readonly NotifyQueueEventMessage _message; - - public FakeLease() - { - var notifyEvent = NotifyEvent.Create( - Guid.NewGuid(), - kind: "test.event", - tenant: "tenant-1", - ts: DateTimeOffset.UtcNow, - payload: null); - - _message = new NotifyQueueEventMessage(notifyEvent, "notify:events", traceId: "trace-123"); - } - - public string MessageId { get; } = Guid.NewGuid().ToString("n"); - - public int Attempt { get; internal set; } = 1; - - public DateTimeOffset EnqueuedAt { get; } = DateTimeOffset.UtcNow; - - public DateTimeOffset LeaseExpiresAt { get; private set; } = DateTimeOffset.UtcNow.AddSeconds(30); - - public string Consumer { get; } = "worker-1"; - - public string Stream => _message.Stream; - - public string TenantId => _message.TenantId; - - public string? PartitionKey => _message.PartitionKey; - - public string IdempotencyKey => _message.IdempotencyKey; - - public string? TraceId => _message.TraceId; - - public IReadOnlyDictionary<string, string> Attributes => _message.Attributes; - - public NotifyQueueEventMessage Message => _message; - - public int AcknowledgeCount { get; private set; } - - public int ReleaseCount { get; private set; } - - public NotifyQueueReleaseDisposition? LastDisposition { get; private set; } - - public Task AcknowledgeAsync(CancellationToken cancellationToken = default) - { - AcknowledgeCount++; - return Task.CompletedTask; - } - - public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) - { - LeaseExpiresAt = DateTimeOffset.UtcNow.Add(leaseDuration); - return Task.CompletedTask; - } - - public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) - { - LastDisposition = disposition; - ReleaseCount++; - Attempt++; - return Task.CompletedTask; - } - - public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) - => Task.CompletedTask; - } - - private sealed class TestHandler : INotifyEventHandler - { - private readonly bool _shouldThrow; - - public TestHandler(bool shouldThrow = false) - { - _shouldThrow = shouldThrow; - } - - public Task HandleAsync(NotifyQueueEventMessage message, CancellationToken cancellationToken) - { - if (_shouldThrow) - { - throw new InvalidOperationException("handler failure"); - } - - return Task.CompletedTask; - } - } -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Notify.Models; +using StellaOps.Notify.Queue; +using StellaOps.Notify.Worker; +using StellaOps.Notify.Worker.Handlers; +using StellaOps.Notify.Worker.Processing; +using Xunit; + +namespace StellaOps.Notify.Worker.Tests; + +public sealed class NotifyEventLeaseProcessorTests +{ + [Fact] + public async Task ProcessOnce_ShouldAcknowledgeSuccessfulLease() + { + var lease = new FakeLease(); + var queue = new FakeEventQueue(lease); + var handler = new TestHandler(); + var options = Options.Create(new NotifyWorkerOptions { LeaseBatchSize = 1, LeaseDuration = TimeSpan.FromSeconds(5) }); + var processor = new NotifyEventLeaseProcessor(queue, handler, options, NullLogger<NotifyEventLeaseProcessor>.Instance, TimeProvider.System); + + var processed = await processor.ProcessOnceAsync(CancellationToken.None); + + processed.Should().Be(1); + lease.AcknowledgeCount.Should().Be(1); + lease.ReleaseCount.Should().Be(0); + } + + [Fact] + public async Task ProcessOnce_ShouldRetryOnHandlerFailure() + { + var lease = new FakeLease(); + var queue = new FakeEventQueue(lease); + var handler = new TestHandler(shouldThrow: true); + var options = Options.Create(new NotifyWorkerOptions { LeaseBatchSize = 1, LeaseDuration = TimeSpan.FromSeconds(5) }); + var processor = new NotifyEventLeaseProcessor(queue, handler, options, NullLogger<NotifyEventLeaseProcessor>.Instance, TimeProvider.System); + + var processed = await processor.ProcessOnceAsync(CancellationToken.None); + + processed.Should().Be(1); + lease.AcknowledgeCount.Should().Be(0); + lease.ReleaseCount.Should().Be(1); + lease.LastDisposition.Should().Be(NotifyQueueReleaseDisposition.Retry); + } + + private sealed class FakeEventQueue : INotifyEventQueue + { + private readonly Queue<INotifyQueueLease<NotifyQueueEventMessage>> _leases; + + public FakeEventQueue(params INotifyQueueLease<NotifyQueueEventMessage>[] leases) + { + _leases = new Queue<INotifyQueueLease<NotifyQueueEventMessage>>(leases); + } + + public ValueTask<NotifyQueueEnqueueResult> PublishAsync(NotifyQueueEventMessage message, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> LeaseAsync(NotifyQueueLeaseRequest request, CancellationToken cancellationToken = default) + { + if (_leases.Count == 0) + { + return ValueTask.FromResult<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>>(Array.Empty<INotifyQueueLease<NotifyQueueEventMessage>>()); + } + + return ValueTask.FromResult<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>>(new[] { _leases.Dequeue() }); + } + + public ValueTask<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>> ClaimExpiredAsync(NotifyQueueClaimOptions options, CancellationToken cancellationToken = default) + => ValueTask.FromResult<IReadOnlyList<INotifyQueueLease<NotifyQueueEventMessage>>>(Array.Empty<INotifyQueueLease<NotifyQueueEventMessage>>()); + } + + private sealed class FakeLease : INotifyQueueLease<NotifyQueueEventMessage> + { + private readonly NotifyQueueEventMessage _message; + + public FakeLease() + { + var notifyEvent = NotifyEvent.Create( + Guid.NewGuid(), + kind: "test.event", + tenant: "tenant-1", + ts: DateTimeOffset.UtcNow, + payload: null); + + _message = new NotifyQueueEventMessage(notifyEvent, "notify:events", traceId: "trace-123"); + } + + public string MessageId { get; } = Guid.NewGuid().ToString("n"); + + public int Attempt { get; internal set; } = 1; + + public DateTimeOffset EnqueuedAt { get; } = DateTimeOffset.UtcNow; + + public DateTimeOffset LeaseExpiresAt { get; private set; } = DateTimeOffset.UtcNow.AddSeconds(30); + + public string Consumer { get; } = "worker-1"; + + public string Stream => _message.Stream; + + public string TenantId => _message.TenantId; + + public string? PartitionKey => _message.PartitionKey; + + public string IdempotencyKey => _message.IdempotencyKey; + + public string? TraceId => _message.TraceId; + + public IReadOnlyDictionary<string, string> Attributes => _message.Attributes; + + public NotifyQueueEventMessage Message => _message; + + public int AcknowledgeCount { get; private set; } + + public int ReleaseCount { get; private set; } + + public NotifyQueueReleaseDisposition? LastDisposition { get; private set; } + + public Task AcknowledgeAsync(CancellationToken cancellationToken = default) + { + AcknowledgeCount++; + return Task.CompletedTask; + } + + public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) + { + LeaseExpiresAt = DateTimeOffset.UtcNow.Add(leaseDuration); + return Task.CompletedTask; + } + + public Task ReleaseAsync(NotifyQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) + { + LastDisposition = disposition; + ReleaseCount++; + Attempt++; + return Task.CompletedTask; + } + + public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) + => Task.CompletedTask; + } + + private sealed class TestHandler : INotifyEventHandler + { + private readonly bool _shouldThrow; + + public TestHandler(bool shouldThrow = false) + { + _shouldThrow = shouldThrow; + } + + public Task HandleAsync(NotifyQueueEventMessage message, CancellationToken cancellationToken) + { + if (_shouldThrow) + { + throw new InvalidOperationException("handler failure"); + } + + return Task.CompletedTask; + } + } +} diff --git a/src/StellaOps.Notify.Worker.Tests/StellaOps.Notify.Worker.Tests.csproj b/src/Notify/__Tests/StellaOps.Notify.Worker.Tests/StellaOps.Notify.Worker.Tests.csproj similarity index 72% rename from src/StellaOps.Notify.Worker.Tests/StellaOps.Notify.Worker.Tests.csproj rename to src/Notify/__Tests/StellaOps.Notify.Worker.Tests/StellaOps.Notify.Worker.Tests.csproj index cb8ed36c..89bdbbd2 100644 --- a/src/StellaOps.Notify.Worker.Tests/StellaOps.Notify.Worker.Tests.csproj +++ b/src/Notify/__Tests/StellaOps.Notify.Worker.Tests/StellaOps.Notify.Worker.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -19,8 +20,8 @@ <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Notify.Worker\StellaOps.Notify.Worker.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../StellaOps.Notify.Worker/StellaOps.Notify.Worker.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Orchestrator.WorkerSdk.Go/AGENTS.md b/src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/AGENTS.md similarity index 98% rename from src/StellaOps.Orchestrator.WorkerSdk.Go/AGENTS.md rename to src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/AGENTS.md index 9b7a347c..2a11005e 100644 --- a/src/StellaOps.Orchestrator.WorkerSdk.Go/AGENTS.md +++ b/src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/AGENTS.md @@ -1,10 +1,10 @@ -# Worker SDK (Go) — Agent Charter - -## Mission -Provide the official Go SDK for StellaOps orchestrated workers. Implement claim/heartbeat/progress clients, artifact publishing, error classification, and guardrails so Concelier, Excititor, SBOM, Policy, and other teams can integrate with the orchestrator deterministically. - -## Responsibilities -- Maintain idiomatic Go client with configurable transports, retries, and tenant-aware headers. -- Surface structured metrics/logging hooks mirroring orchestrator expectations. -- Enforce idempotency token usage, artifact checksum publication, and backfill/watermark handshakes. -- Coordinate release cadence with Worker Python SDK, orchestrator service, DevOps packaging, and Offline Kit requirements. +# Worker SDK (Go) — Agent Charter + +## Mission +Provide the official Go SDK for StellaOps orchestrated workers. Implement claim/heartbeat/progress clients, artifact publishing, error classification, and guardrails so Concelier, Excititor, SBOM, Policy, and other teams can integrate with the orchestrator deterministically. + +## Responsibilities +- Maintain idiomatic Go client with configurable transports, retries, and tenant-aware headers. +- Surface structured metrics/logging hooks mirroring orchestrator expectations. +- Enforce idempotency token usage, artifact checksum publication, and backfill/watermark handshakes. +- Coordinate release cadence with Worker Python SDK, orchestrator service, DevOps packaging, and Offline Kit requirements. diff --git a/src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md b/src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md similarity index 99% rename from src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md rename to src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md index 90af4187..082013fd 100644 --- a/src/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md +++ b/src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Go/TASKS.md @@ -1,9 +1,9 @@ -# Worker SDK (Go) Task Board — Epic 9 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| WORKER-GO-32-001 | TODO | Worker SDK Guild | ORCH-SVC-32-005 | Bootstrap Go SDK project with configuration binding, auth headers, job claim/acknowledge client, and smoke sample. | `go test ./...` green; sample worker claims job against local orchestrator; README outlines usage; compliance checklist recorded. | -| WORKER-GO-32-002 | TODO | Worker SDK Guild | WORKER-GO-32-001 | Add heartbeat/progress helpers, structured logging hooks, Prometheus metrics, and jittered retry defaults. | Heartbeat/progress methods documented; metrics exported; integration test verifies heartbeat timeout handling; lint/staticcheck clean. | -| WORKER-GO-33-001 | TODO | Worker SDK Guild | WORKER-GO-32-002, ORCH-SVC-33-003 | Implement artifact publish helpers (object storage client, checksum hashing, metadata payload) and idempotency guard. | Artifact upload API tested with fake object store; idempotency violations return typed error; docs include sample. | -| WORKER-GO-33-002 | TODO | Worker SDK Guild | WORKER-GO-32-002 | Provide error classification/retry helper, exponential backoff controls, and structured failure reporting to orchestrator. | Error helper maps to orchestrator error classes; retries configurable; integration test covers HTTP 5xx, validation errors; docs updated. | -| WORKER-GO-34-001 | TODO | Worker SDK Guild | WORKER-GO-33-001, ORCH-SVC-34-001 | Add backfill range execution helpers, watermark handshake utilities, and artifact dedupe verification for backfills. | Backfill helper handles window chunks; watermark handshake verified in integration test; dedupe proof recorded; offline kit sample updated. | +# Worker SDK (Go) Task Board — Epic 9 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| WORKER-GO-32-001 | TODO | Worker SDK Guild | ORCH-SVC-32-005 | Bootstrap Go SDK project with configuration binding, auth headers, job claim/acknowledge client, and smoke sample. | `go test ./...` green; sample worker claims job against local orchestrator; README outlines usage; compliance checklist recorded. | +| WORKER-GO-32-002 | TODO | Worker SDK Guild | WORKER-GO-32-001 | Add heartbeat/progress helpers, structured logging hooks, Prometheus metrics, and jittered retry defaults. | Heartbeat/progress methods documented; metrics exported; integration test verifies heartbeat timeout handling; lint/staticcheck clean. | +| WORKER-GO-33-001 | TODO | Worker SDK Guild | WORKER-GO-32-002, ORCH-SVC-33-003 | Implement artifact publish helpers (object storage client, checksum hashing, metadata payload) and idempotency guard. | Artifact upload API tested with fake object store; idempotency violations return typed error; docs include sample. | +| WORKER-GO-33-002 | TODO | Worker SDK Guild | WORKER-GO-32-002 | Provide error classification/retry helper, exponential backoff controls, and structured failure reporting to orchestrator. | Error helper maps to orchestrator error classes; retries configurable; integration test covers HTTP 5xx, validation errors; docs updated. | +| WORKER-GO-34-001 | TODO | Worker SDK Guild | WORKER-GO-33-001, ORCH-SVC-34-001 | Add backfill range execution helpers, watermark handshake utilities, and artifact dedupe verification for backfills. | Backfill helper handles window chunks; watermark handshake verified in integration test; dedupe proof recorded; offline kit sample updated. | diff --git a/src/StellaOps.Orchestrator.WorkerSdk.Python/AGENTS.md b/src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/AGENTS.md similarity index 98% rename from src/StellaOps.Orchestrator.WorkerSdk.Python/AGENTS.md rename to src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/AGENTS.md index 8b5b8b23..1b473159 100644 --- a/src/StellaOps.Orchestrator.WorkerSdk.Python/AGENTS.md +++ b/src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/AGENTS.md @@ -1,10 +1,10 @@ -# Worker SDK (Python) — Agent Charter - -## Mission -Publish the Python client library for StellaOps orchestrated workers. Provide asyncio-friendly claim/heartbeat/progress APIs, artifact publishing helpers, error handling, and observability hooks aligned with Epic 9 requirements and the imposed rule for cross-component parity. - -## Responsibilities -- Maintain typed client (httpx/async) with retry/backoff primitives mirroring orchestrator expectations. -- Surface structured metrics/logging instrumentation and pluggable exporters. -- Enforce idempotency token usage, artifact checksum publication, and watermark/backfill helpers. -- Coordinate versioning with Go SDK, orchestrator service contracts, DevOps packaging, and Offline Kit deliverables. +# Worker SDK (Python) — Agent Charter + +## Mission +Publish the Python client library for StellaOps orchestrated workers. Provide asyncio-friendly claim/heartbeat/progress APIs, artifact publishing helpers, error handling, and observability hooks aligned with Epic 9 requirements and the imposed rule for cross-component parity. + +## Responsibilities +- Maintain typed client (httpx/async) with retry/backoff primitives mirroring orchestrator expectations. +- Surface structured metrics/logging instrumentation and pluggable exporters. +- Enforce idempotency token usage, artifact checksum publication, and watermark/backfill helpers. +- Coordinate versioning with Go SDK, orchestrator service contracts, DevOps packaging, and Offline Kit deliverables. diff --git a/src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md b/src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md similarity index 99% rename from src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md rename to src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md index c9c6afe4..e6f832fe 100644 --- a/src/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md +++ b/src/Orchestrator/StellaOps.Orchestrator.WorkerSdk.Python/TASKS.md @@ -1,9 +1,9 @@ -# Worker SDK (Python) Task Board — Epic 9 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| WORKER-PY-32-001 | TODO | Worker SDK Guild | ORCH-SVC-32-005 | Bootstrap asyncio-based Python SDK (config, auth headers, job claim/ack) plus sample worker script. | `pytest` suite passes; sample worker claims job from orchestrator; README documents install/offline story; type checking via `pyright` clean. | -| WORKER-PY-32-002 | TODO | Worker SDK Guild | WORKER-PY-32-001 | Implement heartbeat/progress helpers with structured logging, metrics exporter, and cancellation-safe retries. | Heartbeat/progress API documented; metrics exported via Prometheus/client; cancellation tests cover timeouts; lint/formatters pass. | -| WORKER-PY-33-001 | TODO | Worker SDK Guild | WORKER-PY-32-002, ORCH-SVC-33-003 | Add artifact publish/idempotency helpers (object storage adapters, checksum hashing, metadata payload) for Python workers. | Artifact helper tested with fake storage; idempotency enforcement verified; docs updated with sample. | -| WORKER-PY-33-002 | TODO | Worker SDK Guild | WORKER-PY-32-002 | Provide error classification/backoff helper mapping to orchestrator codes, including jittered retries and structured failure reports. | Error helper returns typed exceptions; retry config documented; integration test covers HTTP 5xx/validation errors; metrics include failure counters. | -| WORKER-PY-34-001 | TODO | Worker SDK Guild | WORKER-PY-33-001, ORCH-SVC-34-001 | Implement backfill range iteration, watermark handshake, and artifact dedupe verification utilities for Python workers. | Backfill helper exercised in integration tests; watermark handshake recorded; dedupe proof logged; offline kit sample updated. | +# Worker SDK (Python) Task Board — Epic 9 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| WORKER-PY-32-001 | TODO | Worker SDK Guild | ORCH-SVC-32-005 | Bootstrap asyncio-based Python SDK (config, auth headers, job claim/ack) plus sample worker script. | `pytest` suite passes; sample worker claims job from orchestrator; README documents install/offline story; type checking via `pyright` clean. | +| WORKER-PY-32-002 | TODO | Worker SDK Guild | WORKER-PY-32-001 | Implement heartbeat/progress helpers with structured logging, metrics exporter, and cancellation-safe retries. | Heartbeat/progress API documented; metrics exported via Prometheus/client; cancellation tests cover timeouts; lint/formatters pass. | +| WORKER-PY-33-001 | TODO | Worker SDK Guild | WORKER-PY-32-002, ORCH-SVC-33-003 | Add artifact publish/idempotency helpers (object storage adapters, checksum hashing, metadata payload) for Python workers. | Artifact helper tested with fake storage; idempotency enforcement verified; docs updated with sample. | +| WORKER-PY-33-002 | TODO | Worker SDK Guild | WORKER-PY-32-002 | Provide error classification/backoff helper mapping to orchestrator codes, including jittered retries and structured failure reports. | Error helper returns typed exceptions; retry config documented; integration test covers HTTP 5xx/validation errors; metrics include failure counters. | +| WORKER-PY-34-001 | TODO | Worker SDK Guild | WORKER-PY-33-001, ORCH-SVC-34-001 | Implement backfill range iteration, watermark handshake, and artifact dedupe verification utilities for Python workers. | Backfill helper exercised in integration tests; watermark handshake recorded; dedupe proof logged; offline kit sample updated. | diff --git a/src/Orchestrator/StellaOps.Orchestrator.sln b/src/Orchestrator/StellaOps.Orchestrator.sln new file mode 100644 index 00000000..d96cf24d --- /dev/null +++ b/src/Orchestrator/StellaOps.Orchestrator.sln @@ -0,0 +1,99 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Orchestrator", "StellaOps.Orchestrator", "{CACA4BEF-DC71-1F8E-2E95-ED9AA42CFA43}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Core", "StellaOps.Orchestrator\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj", "{F849C2C5-EF3E-42F3-B566-137F0D2CA54D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Infrastructure", "StellaOps.Orchestrator\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj", "{86B08F03-A83A-42EB-AE05-02A41C4F2F3D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Tests", "StellaOps.Orchestrator\StellaOps.Orchestrator.Tests\StellaOps.Orchestrator.Tests.csproj", "{07A8428B-CA89-4117-9793-407D9E9D20BA}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.WebService", "StellaOps.Orchestrator\StellaOps.Orchestrator.WebService\StellaOps.Orchestrator.WebService.csproj", "{44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Worker", "StellaOps.Orchestrator\StellaOps.Orchestrator.Worker\StellaOps.Orchestrator.Worker.csproj", "{A18EE274-24E5-4738-81B2-AEE51CBDF237}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Debug|x64.ActiveCfg = Debug|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Debug|x64.Build.0 = Debug|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Debug|x86.ActiveCfg = Debug|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Debug|x86.Build.0 = Debug|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Release|Any CPU.Build.0 = Release|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Release|x64.ActiveCfg = Release|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Release|x64.Build.0 = Release|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Release|x86.ActiveCfg = Release|Any CPU + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D}.Release|x86.Build.0 = Release|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Debug|x64.ActiveCfg = Debug|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Debug|x64.Build.0 = Debug|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Debug|x86.ActiveCfg = Debug|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Debug|x86.Build.0 = Debug|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Release|Any CPU.Build.0 = Release|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Release|x64.ActiveCfg = Release|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Release|x64.Build.0 = Release|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Release|x86.ActiveCfg = Release|Any CPU + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D}.Release|x86.Build.0 = Release|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Debug|x64.ActiveCfg = Debug|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Debug|x64.Build.0 = Debug|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Debug|x86.ActiveCfg = Debug|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Debug|x86.Build.0 = Debug|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Release|Any CPU.Build.0 = Release|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Release|x64.ActiveCfg = Release|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Release|x64.Build.0 = Release|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Release|x86.ActiveCfg = Release|Any CPU + {07A8428B-CA89-4117-9793-407D9E9D20BA}.Release|x86.Build.0 = Release|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Debug|x64.ActiveCfg = Debug|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Debug|x64.Build.0 = Debug|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Debug|x86.ActiveCfg = Debug|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Debug|x86.Build.0 = Debug|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Release|Any CPU.Build.0 = Release|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Release|x64.ActiveCfg = Release|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Release|x64.Build.0 = Release|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Release|x86.ActiveCfg = Release|Any CPU + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB}.Release|x86.Build.0 = Release|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Debug|x64.ActiveCfg = Debug|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Debug|x64.Build.0 = Debug|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Debug|x86.ActiveCfg = Debug|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Debug|x86.Build.0 = Debug|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Release|Any CPU.Build.0 = Release|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Release|x64.ActiveCfg = Release|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Release|x64.Build.0 = Release|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Release|x86.ActiveCfg = Release|Any CPU + {A18EE274-24E5-4738-81B2-AEE51CBDF237}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {F849C2C5-EF3E-42F3-B566-137F0D2CA54D} = {CACA4BEF-DC71-1F8E-2E95-ED9AA42CFA43} + {86B08F03-A83A-42EB-AE05-02A41C4F2F3D} = {CACA4BEF-DC71-1F8E-2E95-ED9AA42CFA43} + {07A8428B-CA89-4117-9793-407D9E9D20BA} = {CACA4BEF-DC71-1F8E-2E95-ED9AA42CFA43} + {44E7A3F2-364D-4E17-BE82-6E57DB5B8DAB} = {CACA4BEF-DC71-1F8E-2E95-ED9AA42CFA43} + {A18EE274-24E5-4738-81B2-AEE51CBDF237} = {CACA4BEF-DC71-1F8E-2E95-ED9AA42CFA43} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Orchestrator/AGENTS.md b/src/Orchestrator/StellaOps.Orchestrator/AGENTS.md similarity index 98% rename from src/StellaOps.Orchestrator/AGENTS.md rename to src/Orchestrator/StellaOps.Orchestrator/AGENTS.md index af48c03b..70eeda88 100644 --- a/src/StellaOps.Orchestrator/AGENTS.md +++ b/src/Orchestrator/StellaOps.Orchestrator/AGENTS.md @@ -1,18 +1,18 @@ -# StellaOps Orchestrator Service — Agent Charter - -## Mission -Build and operate the Source & Job Orchestrator control plane described in Epic 9. Own scheduler, job state persistence, rate limiting, audit/provenance exports, and realtime streaming APIs while respecting the imposed rule: work of this type must be applied everywhere it belongs. - -## Key Responsibilities -- Maintain deterministic Postgres schema/migrations for sources, runs, jobs, dag edges, artifacts, quotas, and schedules. -- Implement DAG planner, token-bucket rate limiting, watermark/backfill manager, dead-letter replay, and horizontal scale guards. -- Publish REST + WebSocket/SSE APIs powering Console/CLI, capture audit trails, and guard tenant isolation/RBAC scopes. -- Coordinate with Worker SDK, Concelier, Excititor, SBOM, Policy, VEX Lens, Findings Ledger, Authority, Console, CLI, DevOps, and Docs teams to keep integrations in sync. - -## Module Layout -- `StellaOps.Orchestrator.Core/` — scheduler primitives, DAG models, rate limit policies. -- `StellaOps.Orchestrator.Infrastructure/` — Postgres DAL, queue integrations, telemetry shims. -- `StellaOps.Orchestrator.WebService/` — control-plane APIs (sources, runs, jobs, streams). -- `StellaOps.Orchestrator.Worker/` — execution coordinator / lease manager loops. -- `StellaOps.Orchestrator.Tests/` — unit tests for core/infrastructure concerns. -- `StellaOps.Orchestrator.sln` — solution bundling orchestrator components. +# StellaOps Orchestrator Service — Agent Charter + +## Mission +Build and operate the Source & Job Orchestrator control plane described in Epic 9. Own scheduler, job state persistence, rate limiting, audit/provenance exports, and realtime streaming APIs while respecting the imposed rule: work of this type must be applied everywhere it belongs. + +## Key Responsibilities +- Maintain deterministic Postgres schema/migrations for sources, runs, jobs, dag edges, artifacts, quotas, and schedules. +- Implement DAG planner, token-bucket rate limiting, watermark/backfill manager, dead-letter replay, and horizontal scale guards. +- Publish REST + WebSocket/SSE APIs powering Console/CLI, capture audit trails, and guard tenant isolation/RBAC scopes. +- Coordinate with Worker SDK, Concelier, Excititor, SBOM, Policy, VEX Lens, Findings Ledger, Authority, Console, CLI, DevOps, and Docs teams to keep integrations in sync. + +## Module Layout +- `StellaOps.Orchestrator.Core/` — scheduler primitives, DAG models, rate limit policies. +- `StellaOps.Orchestrator.Infrastructure/` — Postgres DAL, queue integrations, telemetry shims. +- `StellaOps.Orchestrator.WebService/` — control-plane APIs (sources, runs, jobs, streams). +- `StellaOps.Orchestrator.Worker/` — execution coordinator / lease manager loops. +- `StellaOps.Orchestrator.Tests/` — unit tests for core/infrastructure concerns. +- `StellaOps.Orchestrator.sln` — solution bundling orchestrator components. diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/Class1.cs b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/Class1.cs similarity index 91% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/Class1.cs rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/Class1.cs index 93149c8e..44bdb1dc 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/Class1.cs +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.Orchestrator.Core; - -public class Class1 -{ - -} +namespace StellaOps.Orchestrator.Core; + +public class Class1 +{ + +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/StellaOps.Orchestrator.Core.csproj b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/StellaOps.Orchestrator.Core.csproj similarity index 95% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/StellaOps.Orchestrator.Core.csproj rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/StellaOps.Orchestrator.Core.csproj index fe0eef44..e4808f0d 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/StellaOps.Orchestrator.Core.csproj +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Core/StellaOps.Orchestrator.Core.csproj @@ -1,18 +1,18 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/Class1.cs b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/Class1.cs similarity index 92% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/Class1.cs rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/Class1.cs index 96fdb698..c1bd329b 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/Class1.cs +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.Orchestrator.Infrastructure; - -public class Class1 -{ - -} +namespace StellaOps.Orchestrator.Infrastructure; + +public class Class1 +{ + +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/StellaOps.Orchestrator.Infrastructure.csproj b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/StellaOps.Orchestrator.Infrastructure.csproj similarity index 94% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/StellaOps.Orchestrator.Infrastructure.csproj rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/StellaOps.Orchestrator.Infrastructure.csproj index 684f2bbb..630697d3 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/StellaOps.Orchestrator.Infrastructure.csproj +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Infrastructure/StellaOps.Orchestrator.Infrastructure.csproj @@ -1,28 +1,28 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/> - - - </ItemGroup> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/> + + + </ItemGroup> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/StellaOps.Orchestrator.Tests.csproj b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/StellaOps.Orchestrator.Tests.csproj similarity index 91% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/StellaOps.Orchestrator.Tests.csproj rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/StellaOps.Orchestrator.Tests.csproj index 7ad38105..6717d0b0 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/StellaOps.Orchestrator.Tests.csproj +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/StellaOps.Orchestrator.Tests.csproj @@ -1,135 +1,135 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - - - <PropertyGroup> - - - - - <OutputType>Exe</OutputType> - - - - - <IsPackable>false</IsPackable> - - - - - - - - - - - - - - <TargetFramework>net10.0</TargetFramework> - - - <ImplicitUsings>enable</ImplicitUsings> - - - <Nullable>enable</Nullable> - - - <UseConcelierTestInfra>false</UseConcelierTestInfra> - - - <LangVersion>preview</LangVersion> - - - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - - - </PropertyGroup> - - - - - - <ItemGroup> - - - - - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> - - - - - <PackageReference Include="xunit.v3" Version="3.0.0"/> - - - - - <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Using Include="Xunit"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/> - - - - - <ProjectReference Include="..\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj"/> - - - - - </ItemGroup> - - - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + + + <PropertyGroup> + + + + + <OutputType>Exe</OutputType> + + + + + <IsPackable>false</IsPackable> + + + + + + + + + + + + + + <TargetFramework>net10.0</TargetFramework> + + + <ImplicitUsings>enable</ImplicitUsings> + + + <Nullable>enable</Nullable> + + + <UseConcelierTestInfra>false</UseConcelierTestInfra> + + + <LangVersion>preview</LangVersion> + + + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + + + </PropertyGroup> + + + + + + <ItemGroup> + + + + + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> + + + + + <PackageReference Include="xunit.v3" Version="3.0.0"/> + + + + + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Using Include="Xunit"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/> + + + + + <ProjectReference Include="..\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj"/> + + + + + </ItemGroup> + + + + + +</Project> diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/UnitTest1.cs b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/UnitTest1.cs similarity index 92% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/UnitTest1.cs rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/UnitTest1.cs index 207259a4..340af519 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/UnitTest1.cs +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/UnitTest1.cs @@ -1,10 +1,10 @@ -namespace StellaOps.Orchestrator.Tests; - -public class UnitTest1 -{ - [Fact] - public void Test1() - { - - } -} +namespace StellaOps.Orchestrator.Tests; + +public class UnitTest1 +{ + [Fact] + public void Test1() + { + + } +} diff --git a/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/xunit.runner.json b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/xunit.runner.json new file mode 100644 index 00000000..249d815c --- /dev/null +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/xunit.runner.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Program.cs b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Program.cs similarity index 96% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Program.cs rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Program.cs index ee9d65d6..3917ef1b 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Program.cs +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Program.cs @@ -1,41 +1,41 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} +var builder = WebApplication.CreateBuilder(args); + +// Add services to the container. +// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +// Configure the HTTP request pipeline. +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); + +var summaries = new[] +{ + "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" +}; + +app.MapGet("/weatherforecast", () => +{ + var forecast = Enumerable.Range(1, 5).Select(index => + new WeatherForecast + ( + DateOnly.FromDateTime(DateTime.Now.AddDays(index)), + Random.Shared.Next(-20, 55), + summaries[Random.Shared.Next(summaries.Length)] + )) + .ToArray(); + return forecast; +}) +.WithName("GetWeatherForecast"); + +app.Run(); + +record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) +{ + public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Properties/launchSettings.json b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Properties/launchSettings.json rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Properties/launchSettings.json index 5fbc3346..2f526637 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Properties/launchSettings.json +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/Properties/launchSettings.json @@ -1,23 +1,23 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "http": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "http://localhost:5151", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - }, - "https": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "https://localhost:7228;http://localhost:5151", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "http://localhost:5151", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "https": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "https://localhost:7228;http://localhost:5151", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj similarity index 95% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj index 1588ca57..5f5d9cda 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.csproj @@ -1,41 +1,41 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Web"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj"/> - - - </ItemGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj"/> + + + </ItemGroup> + + + +</Project> diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.http b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.http similarity index 96% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.http rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.http index b47530f9..670e6e33 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.http +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/StellaOps.Orchestrator.WebService.http @@ -1,6 +1,6 @@ -@StellaOps.Orchestrator.WebService_HostAddress = http://localhost:5151 - -GET {{StellaOps.Orchestrator.WebService_HostAddress}}/weatherforecast/ -Accept: application/json - -### +@StellaOps.Orchestrator.WebService_HostAddress = http://localhost:5151 + +GET {{StellaOps.Orchestrator.WebService_HostAddress}}/weatherforecast/ +Accept: application/json + +### diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.Development.json similarity index 93% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.Development.json index 0c208ae9..ff66ba6b 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/appsettings.Development.json +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.Development.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.json b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.json similarity index 94% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.json rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.json index 10f68b8c..4d566948 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.json +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.WebService/appsettings.json @@ -1,9 +1,9 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs similarity index 96% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs index 8ab4deb8..ef071a8c 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Program.cs @@ -1,7 +1,7 @@ -using StellaOps.Orchestrator.Worker; - -var builder = Host.CreateApplicationBuilder(args); -builder.Services.AddHostedService<Worker>(); - -var host = builder.Build(); -host.Run(); +using StellaOps.Orchestrator.Worker; + +var builder = Host.CreateApplicationBuilder(args); +builder.Services.AddHostedService<Worker>(); + +var host = builder.Build(); +host.Run(); diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Properties/launchSettings.json b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Properties/launchSettings.json similarity index 95% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Properties/launchSettings.json rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Properties/launchSettings.json index 5d536388..2a95207a 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Properties/launchSettings.json +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Properties/launchSettings.json @@ -1,12 +1,12 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "StellaOps.Orchestrator.Worker": { - "commandName": "Project", - "dotnetRunMessages": true, - "environmentVariables": { - "DOTNET_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "StellaOps.Orchestrator.Worker": { + "commandName": "Project", + "dotnetRunMessages": true, + "environmentVariables": { + "DOTNET_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj similarity index 95% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj index f2601d9f..7a14e1e5 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/StellaOps.Orchestrator.Worker.csproj @@ -1,43 +1,43 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Worker"> - - - - <PropertyGroup> - - - <UserSecretsId>dotnet-StellaOps.Orchestrator.Worker-6d276def-9e32-43e0-bca8-9699cd1ae20d</UserSecretsId> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj"/> - - - </ItemGroup> - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Worker"> + + + + <PropertyGroup> + + + <UserSecretsId>dotnet-StellaOps.Orchestrator.Worker-6d276def-9e32-43e0-bca8-9699cd1ae20d</UserSecretsId> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj"/> + + + </ItemGroup> + + +</Project> diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Worker.cs b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Worker.cs similarity index 96% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Worker.cs rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Worker.cs index 79a68dae..432aefab 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Worker.cs +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/Worker.cs @@ -1,16 +1,16 @@ -namespace StellaOps.Orchestrator.Worker; - -public class Worker(ILogger<Worker> logger) : BackgroundService -{ - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - while (!stoppingToken.IsCancellationRequested) - { - if (logger.IsEnabled(LogLevel.Information)) - { - logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); - } - await Task.Delay(1000, stoppingToken); - } - } -} +namespace StellaOps.Orchestrator.Worker; + +public class Worker(ILogger<Worker> logger) : BackgroundService +{ + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + while (!stoppingToken.IsCancellationRequested) + { + if (logger.IsEnabled(LogLevel.Information)) + { + logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); + } + await Task.Delay(1000, stoppingToken); + } + } +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.Development.json b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.Development.json similarity index 94% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.Development.json rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.Development.json index b2dcdb67..69017646 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.Development.json +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.Development.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.json b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.json similarity index 94% rename from src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.json rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.json index b2dcdb67..69017646 100644 --- a/src/StellaOps.ExportCenter/StellaOps.ExportCenter.Worker/appsettings.json +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.Worker/appsettings.json @@ -1,8 +1,8 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.sln b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.sln similarity index 98% rename from src/StellaOps.Orchestrator/StellaOps.Orchestrator.sln rename to src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.sln index 37515ef4..0f0270d9 100644 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.sln +++ b/src/Orchestrator/StellaOps.Orchestrator/StellaOps.Orchestrator.sln @@ -1,90 +1,90 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Core", "StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj", "{463C8A77-52BB-4282-BCED-F8D62BAE0528}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Infrastructure", "StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj", "{C0DE4E60-7554-406A-8119-7F5714A604E3}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.WebService", "StellaOps.Orchestrator.WebService\StellaOps.Orchestrator.WebService.csproj", "{A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Worker", "StellaOps.Orchestrator.Worker\StellaOps.Orchestrator.Worker.csproj", "{38BC487F-11C6-4397-9654-D54AE7EE08DD}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Tests", "StellaOps.Orchestrator.Tests\StellaOps.Orchestrator.Tests.csproj", "{8F0989E8-8666-4D37-8E50-E84602237A83}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|Any CPU.Build.0 = Debug|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|x64.ActiveCfg = Debug|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|x64.Build.0 = Debug|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|x86.ActiveCfg = Debug|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|x86.Build.0 = Debug|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|Any CPU.ActiveCfg = Release|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|Any CPU.Build.0 = Release|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|x64.ActiveCfg = Release|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|x64.Build.0 = Release|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|x86.ActiveCfg = Release|Any CPU - {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|x86.Build.0 = Release|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|x64.ActiveCfg = Debug|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|x64.Build.0 = Debug|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|x86.ActiveCfg = Debug|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|x86.Build.0 = Debug|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|Any CPU.Build.0 = Release|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|x64.ActiveCfg = Release|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|x64.Build.0 = Release|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|x86.ActiveCfg = Release|Any CPU - {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|x86.Build.0 = Release|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|x64.ActiveCfg = Debug|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|x64.Build.0 = Debug|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|x86.ActiveCfg = Debug|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|x86.Build.0 = Debug|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|Any CPU.Build.0 = Release|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|x64.ActiveCfg = Release|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|x64.Build.0 = Release|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|x86.ActiveCfg = Release|Any CPU - {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|x86.Build.0 = Release|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|x64.ActiveCfg = Debug|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|x64.Build.0 = Debug|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|x86.ActiveCfg = Debug|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|x86.Build.0 = Debug|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|Any CPU.ActiveCfg = Release|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|Any CPU.Build.0 = Release|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|x64.ActiveCfg = Release|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|x64.Build.0 = Release|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|x86.ActiveCfg = Release|Any CPU - {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|x86.Build.0 = Release|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|x64.ActiveCfg = Debug|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|x64.Build.0 = Debug|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|x86.ActiveCfg = Debug|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|x86.Build.0 = Debug|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|Any CPU.Build.0 = Release|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|x64.ActiveCfg = Release|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|x64.Build.0 = Release|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|x86.ActiveCfg = Release|Any CPU - {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Core", "StellaOps.Orchestrator.Core\StellaOps.Orchestrator.Core.csproj", "{463C8A77-52BB-4282-BCED-F8D62BAE0528}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Infrastructure", "StellaOps.Orchestrator.Infrastructure\StellaOps.Orchestrator.Infrastructure.csproj", "{C0DE4E60-7554-406A-8119-7F5714A604E3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.WebService", "StellaOps.Orchestrator.WebService\StellaOps.Orchestrator.WebService.csproj", "{A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Worker", "StellaOps.Orchestrator.Worker\StellaOps.Orchestrator.Worker.csproj", "{38BC487F-11C6-4397-9654-D54AE7EE08DD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Orchestrator.Tests", "StellaOps.Orchestrator.Tests\StellaOps.Orchestrator.Tests.csproj", "{8F0989E8-8666-4D37-8E50-E84602237A83}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|Any CPU.Build.0 = Debug|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|x64.ActiveCfg = Debug|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|x64.Build.0 = Debug|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|x86.ActiveCfg = Debug|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Debug|x86.Build.0 = Debug|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|Any CPU.ActiveCfg = Release|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|Any CPU.Build.0 = Release|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|x64.ActiveCfg = Release|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|x64.Build.0 = Release|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|x86.ActiveCfg = Release|Any CPU + {463C8A77-52BB-4282-BCED-F8D62BAE0528}.Release|x86.Build.0 = Release|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|x64.ActiveCfg = Debug|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|x64.Build.0 = Debug|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|x86.ActiveCfg = Debug|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Debug|x86.Build.0 = Debug|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|Any CPU.Build.0 = Release|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|x64.ActiveCfg = Release|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|x64.Build.0 = Release|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|x86.ActiveCfg = Release|Any CPU + {C0DE4E60-7554-406A-8119-7F5714A604E3}.Release|x86.Build.0 = Release|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|x64.ActiveCfg = Debug|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|x64.Build.0 = Debug|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|x86.ActiveCfg = Debug|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Debug|x86.Build.0 = Debug|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|Any CPU.Build.0 = Release|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|x64.ActiveCfg = Release|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|x64.Build.0 = Release|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|x86.ActiveCfg = Release|Any CPU + {A9D6DF47-5CAF-4E07-BC44-19ABE7D8CDD9}.Release|x86.Build.0 = Release|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|x64.ActiveCfg = Debug|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|x64.Build.0 = Debug|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|x86.ActiveCfg = Debug|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Debug|x86.Build.0 = Debug|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|Any CPU.Build.0 = Release|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|x64.ActiveCfg = Release|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|x64.Build.0 = Release|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|x86.ActiveCfg = Release|Any CPU + {38BC487F-11C6-4397-9654-D54AE7EE08DD}.Release|x86.Build.0 = Release|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|x64.ActiveCfg = Debug|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|x64.Build.0 = Debug|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|x86.ActiveCfg = Debug|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Debug|x86.Build.0 = Debug|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|Any CPU.Build.0 = Release|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|x64.ActiveCfg = Release|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|x64.Build.0 = Release|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|x86.ActiveCfg = Release|Any CPU + {8F0989E8-8666-4D37-8E50-E84602237A83}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Orchestrator/TASKS.md b/src/Orchestrator/StellaOps.Orchestrator/TASKS.md similarity index 99% rename from src/StellaOps.Orchestrator/TASKS.md rename to src/Orchestrator/StellaOps.Orchestrator/TASKS.md index 76fda45e..f3d75be3 100644 --- a/src/StellaOps.Orchestrator/TASKS.md +++ b/src/Orchestrator/StellaOps.Orchestrator/TASKS.md @@ -1,76 +1,76 @@ -# Orchestrator Service Task Board — Epic 9: Source & Job Orchestrator Dashboard - -## Sprint 32 – Foundations (Read-Only) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-SVC-32-001 | TODO | Orchestrator Service Guild | DEVOPS-ORCH-32-001 | Bootstrap service project, configuration, and Postgres schema/migrations for `sources`, `runs`, `jobs`, `dag_edges`, `artifacts`, `quotas`, `schedules`. | Service builds/tests; migrations generated with repeatable scripts; baseline integration test seeds schema; compliance checklist recorded. | -| ORCH-SVC-32-002 | TODO | Orchestrator Service Guild | ORCH-SVC-32-001 | Implement scheduler DAG planner + dependency resolver, job state machine, and critical-path metadata without yet issuing control actions. | DAG builder passes unit/property tests; job states transition per spec; deterministic hashes recorded; docs updated in code comments. | -| ORCH-SVC-32-003 | TODO | Orchestrator Service Guild | ORCH-SVC-32-001 | Expose read-only REST APIs (sources, runs, jobs, DAG) with OpenAPI, validation, pagination, and tenant scoping. | Endpoints return deterministic responses; OpenAPI published; contract tests cover filters/pagination; lint passes. | -> Tenant-scoped tokens must require `orch:read` scope issued by Authority (`AUTH-ORCH-32-001`); reject legacy console bundles missing the new role and document requirement in OpenAPI security section. -| ORCH-SVC-32-004 | TODO | Orchestrator Service Guild | ORCH-SVC-32-002, ORCH-SVC-32-003 | Implement WebSocket/SSE stream for job/run updates, emit structured metrics counters/histograms, and add health probes. | SSE stream proven with integration test; metrics registered in Prometheus exporter; health endpoints wired; docstrings reference event schema. | -| ORCH-SVC-32-005 | TODO | Orchestrator Service Guild | ORCH-SVC-32-001, WORKER-GO-32-001, WORKER-PY-32-001 | Deliver worker claim/heartbeat/progress endpoints capturing artifact metadata/checksums and enforcing idempotency keys. | Claim/heartbeat/progress endpoints pass integration tests with Go/Python sample workers; artifact metadata persisted; idempotency violations rejected with `ERR_ORCH_4xx`; docs note imposed rule. | - -## Sprint 33 – Controls & Recovery -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-SVC-33-001 | TODO | Orchestrator Service Guild | ORCH-SVC-32-003, AUTH-ORCH-33-001 | Enable `sources test|pause|resume|sync-now` and `jobs retry|cancel|prioritize` actions with audit logging, RBAC enforcement, and optimistic concurrency. | Actions mutate state deterministically; audit entries include operator, reason, ticket; integration tests cover happy/error paths; CLI/Console smoke pass. | -| ORCH-SVC-33-002 | TODO | Orchestrator Service Guild | ORCH-SVC-32-002, DEVOPS-ORCH-33-001 | Implement per-source/tenant adaptive token-bucket rate limiter, concurrency caps, and backpressure signals reacting to upstream 429/503. | Rate limiter configurable via API; metrics expose tokens available; simulated 429 storm reduces issuance ≥80%; tests exercise cooldown logic. | -| ORCH-SVC-33-003 | TODO | Orchestrator Service Guild | ORCH-SVC-32-002, WORKER-GO-33-001, WORKER-PY-33-001 | Add watermark/backfill manager with event-time windows, duplicate suppression, dry-run preview endpoint, and safety validations. | Backfill preview API returns window coverage; executed backfills avoid duplicate artifacts (hash equality); tests cover skew/overlap; docs updated. | -| ORCH-SVC-33-004 | TODO | Orchestrator Service Guild | ORCH-SVC-32-004 | Deliver dead-letter store, replay endpoints, and error classification surfaces with remediation hints + notification hooks. | Dead-letter entries persisted with error class + payload refs; replay moves jobs to queues; metrics/logs emitted; documentation references remediation guide. | - -## Sprint 34 – Backfills, Quotas & GA -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-SVC-34-001 | TODO | Orchestrator Service Guild | ORCH-SVC-33-001, ORCH-SVC-33-002 | Implement quota management APIs, per-tenant SLO burn-rate computation, and alert budget tracking surfaced via metrics. | Quotas CRUD endpoints live with RBAC; burn-rate metrics published; alerts hooked (DEVOPS-ORCH-34-001); unit/integration tests cover overage scenarios. | -| ORCH-SVC-34-002 | TODO | Orchestrator Service Guild | ORCH-SVC-33-004, LEDGER-34-101 | Build audit log + immutable run ledger export with signed manifest support, including provenance chain to artifacts. | Ledger export produces signed manifest; hash chain verified; integration test links to Findings Ledger; docs cross-link to run-ledger doc. | -| ORCH-SVC-34-003 | TODO | Orchestrator Service Guild | ORCH-SVC-32-004, ORCH-SVC-33-002 | Execute perf/scale validation (≥10k pending jobs, dispatch P95 <150 ms) and add autoscaling hooks with health probes. | Load test report committed; autoscale recommendations documented; health probes wired; perf regression guard added to CI. | -| ORCH-SVC-34-004 | TODO | Orchestrator Service Guild | ORCH-SVC-34-001..003, DEPLOY-ORCH-34-001 | Package orchestrator container, Helm overlays, offline bundle seeds, provenance attestations, and compliance checklist for GA. | Container built with SBOM/attestation; Helm/Compose overlays committed; offline bundle instructions validated; launch readiness checklist signed. | - -## Export Center Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-SVC-35-101 | TODO | Orchestrator Service Guild | EXPORT-SVC-35-001 | Register `export` job type with quotas/rate policies, expose telemetry, and ensure exporter workers heartbeat via orchestrator contracts. | Job type available; metrics emitted; integration test with exporter worker passes. | -| ORCH-SVC-36-101 | TODO | Orchestrator Service Guild | ORCH-SVC-35-101, EXPORT-SVC-36-003 | Capture distribution metadata and retention timestamps for export jobs, updating dashboards and SSE payloads. | Distribution state persisted; SSE includes distribution progress; dashboards updated. | -| ORCH-SVC-37-101 | TODO | Orchestrator Service Guild | ORCH-SVC-36-101, EXPORT-SVC-37-003 | Enable scheduled export runs, retention pruning hooks, and failure alerting tied to export job class. | Schedules trigger exports; retention API operational; alerts configured; tests cover failure alerting. | - -## Notifications Studio Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-SVC-38-101 | TODO | Orchestrator Service Guild | — | Standardize event envelope (policy/export/job lifecycle) with idempotency keys, ensure export/job failure events published to notifier bus with provenance metadata. | Event schema documented; idempotency keys enforced; notifier integration tests consume events; metrics updated. | - -## CLI Parity & Task Packs Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-SVC-41-101 | TODO | Orchestrator Service Guild | AUTH-PACKS-41-001 | Register `pack-run` job type, persist run metadata, integrate logs/artifacts collection, and expose API for Task Runner scheduling. | Pack job type available; logs/artifacts stored; API documented; CLI E2E test passes. | -| ORCH-SVC-42-101 | TODO | Orchestrator Service Guild | ORCH-SVC-41-101, TASKRUN-41-001 | Stream pack run logs via SSE/WS, add manifest endpoints, enforce quotas, and emit pack run events to Notifications Studio. | Log stream operational; manifests accessible; quotas enforced; events published; tests cover flows. | - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-TEN-48-001 | TODO | Orchestrator Service Guild | WEB-TEN-47-001 | Include `tenant_id`/`project_id` in job specs, set DB session context before processing, enforce context on all queries, and reject jobs missing tenant metadata. | Jobs stamped with tenant/project; RLS respected; tests cover missing context rejection. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-OBS-50-001 | TODO | Orchestrator Service Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Wire `StellaOps.Telemetry.Core` into orchestrator host, instrument schedulers and control APIs with trace spans, structured logs, and exemplar metrics. Ensure tenant/job metadata recorded for every span/log. | Telemetry emitted on happy/error paths; integration tests assert trace propagation to worker payloads; log field contract validated. | -| ORCH-OBS-51-001 | TODO | Orchestrator Service Guild, DevOps Guild | ORCH-OBS-50-001, TELEMETRY-OBS-51-001 | Publish golden-signal metrics (dispatch latency, queue depth, failure rate), define job/tenant SLOs, and emit burn-rate alerts to collector + Notifications. Provide Grafana dashboards + alert rules. | Metrics visible in dashboards; burn-rate alerts trigger in staging; documentation updated with thresholds and runbooks. | -| ORCH-OBS-52-001 | TODO | Orchestrator Service Guild | ORCH-OBS-50-001, TIMELINE-OBS-52-002 | Emit `timeline_event` objects for job lifecycle (`job.scheduled`, `job.started`, `job.completed`, `job.failed`) including trace IDs, run IDs, tenant/project, and causal metadata. Add contract tests and Kafka/NATS emitter with retries. | Timeline events verified against fixtures; duplicates suppressed; failure retries logged; docs reference schema. | -| ORCH-OBS-53-001 | TODO | Orchestrator Service Guild, Evidence Locker Guild | ORCH-OBS-52-001, EVID-OBS-53-002 | Generate job capsule inputs for evidence locker (payload digests, worker image, config hash, log manifest) and invoke locker snapshot hooks on completion/failure. Ensure redaction guard enforced. | Evidence snapshots created for sample jobs; manifests deterministic; secret redaction tests pass; documentation updated. | -| ORCH-OBS-54-001 | TODO | Orchestrator Service Guild, Provenance Guild | ORCH-OBS-53-001, PROV-OBS-53-002 | Produce DSSE attestations for orchestrator-scheduled jobs (subject = job capsule) and store references in timeline + evidence locker. Provide verification endpoint `/jobs/{id}/attestation`. | Attestations generated and verified in integration tests; timeline links added; docs updated. | -| ORCH-OBS-55-001 | TODO | Orchestrator Service Guild, DevOps Guild | ORCH-OBS-51-001, TELEMETRY-OBS-55-001, DEVOPS-OBS-55-001 | Implement incident mode hooks (sampling overrides, extended retention, additional debug spans) and automatic activation on SLO burn-rate breach. Emit activation/deactivation events to timeline + Notifier. | Incident mode triggers automatically in staging; manual override API documented; events observed in timeline and notifications. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-AIRGAP-56-001 | TODO | Orchestrator Service Guild, AirGap Policy Guild | AIRGAP-POL-56-001, TASKRUN-OBS-50-001 | Enforce job descriptors to declare network intents; reject or flag any external endpoints in sealed mode before scheduling. | Validator prevents forbidden jobs; errors return remediation guidance; tests cover allow/deny cases. | -| ORCH-AIRGAP-56-002 | TODO | Orchestrator Service Guild, AirGap Controller Guild | ORCH-AIRGAP-56-001, AIRGAP-CTL-56-002 | Surface sealing status and time staleness in job scheduling decisions; block runs when staleness budgets exceeded. | Scheduler checks status API; blocked runs emit timeline + notification; tests cover stale vs fresh. | -| ORCH-AIRGAP-57-001 | TODO | Orchestrator Service Guild, Mirror Creator Guild | ORCH-AIRGAP-56-001, MIRROR-CRT-58-002 | Add job type `mirror.bundle` to orchestrate bundle creation in connected environments with audit + provenance outputs. | Job type defined; export center integration validated; timeline events emitted. | -| ORCH-AIRGAP-58-001 | TODO | Orchestrator Service Guild, Evidence Locker Guild | ORCH-OBS-53-001, EVID-OBS-55-001 | Capture import/export operations as timeline/evidence entries, ensuring chain-of-custody for mirror + portable evidence jobs. | Evidence snapshots created; timeline references bundle/job IDs; integration tests pass. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| ORCH-OAS-61-001 | TODO | Orchestrator Service Guild, API Contracts Guild | OAS-61-001 | Document orchestrator endpoints in per-service OAS with standardized pagination, idempotency, and error envelope examples. | Spec covers all orchestrator endpoints; lint passes; examples validated. | -| ORCH-OAS-61-002 | TODO | Orchestrator Service Guild | ORCH-OAS-61-001 | Implement `GET /.well-known/openapi` in service and ensure version metadata aligns with runtime build. | Discovery endpoint live; integration test verifies schema + headers. | -| ORCH-OAS-62-001 | TODO | Orchestrator Service Guild, SDK Generator Guild | ORCH-OAS-61-001, SDKGEN-63-001 | Ensure SDK paginators and operations support orchestrator job operations; add SDK smoke tests for schedule/retry APIs. | SDK integration tests cover orchestrator flows; CLI reuses SDK methods. | -| ORCH-OAS-63-001 | TODO | Orchestrator Service Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and documentation for legacy orchestrator endpoints; update notifications metadata. | Deprecated endpoints include headers + docs; Notifications triggered in staging. | +# Orchestrator Service Task Board — Epic 9: Source & Job Orchestrator Dashboard + +## Sprint 32 – Foundations (Read-Only) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-SVC-32-001 | TODO | Orchestrator Service Guild | DEVOPS-ORCH-32-001 | Bootstrap service project, configuration, and Postgres schema/migrations for `sources`, `runs`, `jobs`, `dag_edges`, `artifacts`, `quotas`, `schedules`. | Service builds/tests; migrations generated with repeatable scripts; baseline integration test seeds schema; compliance checklist recorded. | +| ORCH-SVC-32-002 | TODO | Orchestrator Service Guild | ORCH-SVC-32-001 | Implement scheduler DAG planner + dependency resolver, job state machine, and critical-path metadata without yet issuing control actions. | DAG builder passes unit/property tests; job states transition per spec; deterministic hashes recorded; docs updated in code comments. | +| ORCH-SVC-32-003 | TODO | Orchestrator Service Guild | ORCH-SVC-32-001 | Expose read-only REST APIs (sources, runs, jobs, DAG) with OpenAPI, validation, pagination, and tenant scoping. | Endpoints return deterministic responses; OpenAPI published; contract tests cover filters/pagination; lint passes. | +> Tenant-scoped tokens must require `orch:read` scope issued by Authority (`AUTH-ORCH-32-001`); reject legacy console bundles missing the new role and document requirement in OpenAPI security section. +| ORCH-SVC-32-004 | TODO | Orchestrator Service Guild | ORCH-SVC-32-002, ORCH-SVC-32-003 | Implement WebSocket/SSE stream for job/run updates, emit structured metrics counters/histograms, and add health probes. | SSE stream proven with integration test; metrics registered in Prometheus exporter; health endpoints wired; docstrings reference event schema. | +| ORCH-SVC-32-005 | TODO | Orchestrator Service Guild | ORCH-SVC-32-001, WORKER-GO-32-001, WORKER-PY-32-001 | Deliver worker claim/heartbeat/progress endpoints capturing artifact metadata/checksums and enforcing idempotency keys. | Claim/heartbeat/progress endpoints pass integration tests with Go/Python sample workers; artifact metadata persisted; idempotency violations rejected with `ERR_ORCH_4xx`; docs note imposed rule. | + +## Sprint 33 – Controls & Recovery +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-SVC-33-001 | TODO | Orchestrator Service Guild | ORCH-SVC-32-003, AUTH-ORCH-33-001 | Enable `sources test|pause|resume|sync-now` and `jobs retry|cancel|prioritize` actions with audit logging, RBAC enforcement, and optimistic concurrency. | Actions mutate state deterministically; audit entries include operator, reason, ticket; integration tests cover happy/error paths; CLI/Console smoke pass. | +| ORCH-SVC-33-002 | TODO | Orchestrator Service Guild | ORCH-SVC-32-002, DEVOPS-ORCH-33-001 | Implement per-source/tenant adaptive token-bucket rate limiter, concurrency caps, and backpressure signals reacting to upstream 429/503. | Rate limiter configurable via API; metrics expose tokens available; simulated 429 storm reduces issuance ≥80%; tests exercise cooldown logic. | +| ORCH-SVC-33-003 | TODO | Orchestrator Service Guild | ORCH-SVC-32-002, WORKER-GO-33-001, WORKER-PY-33-001 | Add watermark/backfill manager with event-time windows, duplicate suppression, dry-run preview endpoint, and safety validations. | Backfill preview API returns window coverage; executed backfills avoid duplicate artifacts (hash equality); tests cover skew/overlap; docs updated. | +| ORCH-SVC-33-004 | TODO | Orchestrator Service Guild | ORCH-SVC-32-004 | Deliver dead-letter store, replay endpoints, and error classification surfaces with remediation hints + notification hooks. | Dead-letter entries persisted with error class + payload refs; replay moves jobs to queues; metrics/logs emitted; documentation references remediation guide. | + +## Sprint 34 – Backfills, Quotas & GA +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-SVC-34-001 | TODO | Orchestrator Service Guild | ORCH-SVC-33-001, ORCH-SVC-33-002 | Implement quota management APIs, per-tenant SLO burn-rate computation, and alert budget tracking surfaced via metrics. | Quotas CRUD endpoints live with RBAC; burn-rate metrics published; alerts hooked (DEVOPS-ORCH-34-001); unit/integration tests cover overage scenarios. | +| ORCH-SVC-34-002 | TODO | Orchestrator Service Guild | ORCH-SVC-33-004, LEDGER-34-101 | Build audit log + immutable run ledger export with signed manifest support, including provenance chain to artifacts. | Ledger export produces signed manifest; hash chain verified; integration test links to Findings Ledger; docs cross-link to run-ledger doc. | +| ORCH-SVC-34-003 | TODO | Orchestrator Service Guild | ORCH-SVC-32-004, ORCH-SVC-33-002 | Execute perf/scale validation (≥10k pending jobs, dispatch P95 <150 ms) and add autoscaling hooks with health probes. | Load test report committed; autoscale recommendations documented; health probes wired; perf regression guard added to CI. | +| ORCH-SVC-34-004 | TODO | Orchestrator Service Guild | ORCH-SVC-34-001..003, DEPLOY-ORCH-34-001 | Package orchestrator container, Helm overlays, offline bundle seeds, provenance attestations, and compliance checklist for GA. | Container built with SBOM/attestation; Helm/Compose overlays committed; offline bundle instructions validated; launch readiness checklist signed. | + +## Export Center Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-SVC-35-101 | TODO | Orchestrator Service Guild | EXPORT-SVC-35-001 | Register `export` job type with quotas/rate policies, expose telemetry, and ensure exporter workers heartbeat via orchestrator contracts. | Job type available; metrics emitted; integration test with exporter worker passes. | +| ORCH-SVC-36-101 | TODO | Orchestrator Service Guild | ORCH-SVC-35-101, EXPORT-SVC-36-003 | Capture distribution metadata and retention timestamps for export jobs, updating dashboards and SSE payloads. | Distribution state persisted; SSE includes distribution progress; dashboards updated. | +| ORCH-SVC-37-101 | TODO | Orchestrator Service Guild | ORCH-SVC-36-101, EXPORT-SVC-37-003 | Enable scheduled export runs, retention pruning hooks, and failure alerting tied to export job class. | Schedules trigger exports; retention API operational; alerts configured; tests cover failure alerting. | + +## Notifications Studio Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-SVC-38-101 | TODO | Orchestrator Service Guild | — | Standardize event envelope (policy/export/job lifecycle) with idempotency keys, ensure export/job failure events published to notifier bus with provenance metadata. | Event schema documented; idempotency keys enforced; notifier integration tests consume events; metrics updated. | + +## CLI Parity & Task Packs Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-SVC-41-101 | TODO | Orchestrator Service Guild | AUTH-PACKS-41-001 | Register `pack-run` job type, persist run metadata, integrate logs/artifacts collection, and expose API for Task Runner scheduling. | Pack job type available; logs/artifacts stored; API documented; CLI E2E test passes. | +| ORCH-SVC-42-101 | TODO | Orchestrator Service Guild | ORCH-SVC-41-101, TASKRUN-41-001 | Stream pack run logs via SSE/WS, add manifest endpoints, enforce quotas, and emit pack run events to Notifications Studio. | Log stream operational; manifests accessible; quotas enforced; events published; tests cover flows. | + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-TEN-48-001 | TODO | Orchestrator Service Guild | WEB-TEN-47-001 | Include `tenant_id`/`project_id` in job specs, set DB session context before processing, enforce context on all queries, and reject jobs missing tenant metadata. | Jobs stamped with tenant/project; RLS respected; tests cover missing context rejection. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-OBS-50-001 | TODO | Orchestrator Service Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Wire `StellaOps.Telemetry.Core` into orchestrator host, instrument schedulers and control APIs with trace spans, structured logs, and exemplar metrics. Ensure tenant/job metadata recorded for every span/log. | Telemetry emitted on happy/error paths; integration tests assert trace propagation to worker payloads; log field contract validated. | +| ORCH-OBS-51-001 | TODO | Orchestrator Service Guild, DevOps Guild | ORCH-OBS-50-001, TELEMETRY-OBS-51-001 | Publish golden-signal metrics (dispatch latency, queue depth, failure rate), define job/tenant SLOs, and emit burn-rate alerts to collector + Notifications. Provide Grafana dashboards + alert rules. | Metrics visible in dashboards; burn-rate alerts trigger in staging; documentation updated with thresholds and runbooks. | +| ORCH-OBS-52-001 | TODO | Orchestrator Service Guild | ORCH-OBS-50-001, TIMELINE-OBS-52-002 | Emit `timeline_event` objects for job lifecycle (`job.scheduled`, `job.started`, `job.completed`, `job.failed`) including trace IDs, run IDs, tenant/project, and causal metadata. Add contract tests and Kafka/NATS emitter with retries. | Timeline events verified against fixtures; duplicates suppressed; failure retries logged; docs reference schema. | +| ORCH-OBS-53-001 | TODO | Orchestrator Service Guild, Evidence Locker Guild | ORCH-OBS-52-001, EVID-OBS-53-002 | Generate job capsule inputs for evidence locker (payload digests, worker image, config hash, log manifest) and invoke locker snapshot hooks on completion/failure. Ensure redaction guard enforced. | Evidence snapshots created for sample jobs; manifests deterministic; secret redaction tests pass; documentation updated. | +| ORCH-OBS-54-001 | TODO | Orchestrator Service Guild, Provenance Guild | ORCH-OBS-53-001, PROV-OBS-53-002 | Produce DSSE attestations for orchestrator-scheduled jobs (subject = job capsule) and store references in timeline + evidence locker. Provide verification endpoint `/jobs/{id}/attestation`. | Attestations generated and verified in integration tests; timeline links added; docs updated. | +| ORCH-OBS-55-001 | TODO | Orchestrator Service Guild, DevOps Guild | ORCH-OBS-51-001, TELEMETRY-OBS-55-001, DEVOPS-OBS-55-001 | Implement incident mode hooks (sampling overrides, extended retention, additional debug spans) and automatic activation on SLO burn-rate breach. Emit activation/deactivation events to timeline + Notifier. | Incident mode triggers automatically in staging; manual override API documented; events observed in timeline and notifications. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-AIRGAP-56-001 | TODO | Orchestrator Service Guild, AirGap Policy Guild | AIRGAP-POL-56-001, TASKRUN-OBS-50-001 | Enforce job descriptors to declare network intents; reject or flag any external endpoints in sealed mode before scheduling. | Validator prevents forbidden jobs; errors return remediation guidance; tests cover allow/deny cases. | +| ORCH-AIRGAP-56-002 | TODO | Orchestrator Service Guild, AirGap Controller Guild | ORCH-AIRGAP-56-001, AIRGAP-CTL-56-002 | Surface sealing status and time staleness in job scheduling decisions; block runs when staleness budgets exceeded. | Scheduler checks status API; blocked runs emit timeline + notification; tests cover stale vs fresh. | +| ORCH-AIRGAP-57-001 | TODO | Orchestrator Service Guild, Mirror Creator Guild | ORCH-AIRGAP-56-001, MIRROR-CRT-58-002 | Add job type `mirror.bundle` to orchestrate bundle creation in connected environments with audit + provenance outputs. | Job type defined; export center integration validated; timeline events emitted. | +| ORCH-AIRGAP-58-001 | TODO | Orchestrator Service Guild, Evidence Locker Guild | ORCH-OBS-53-001, EVID-OBS-55-001 | Capture import/export operations as timeline/evidence entries, ensuring chain-of-custody for mirror + portable evidence jobs. | Evidence snapshots created; timeline references bundle/job IDs; integration tests pass. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| ORCH-OAS-61-001 | TODO | Orchestrator Service Guild, API Contracts Guild | OAS-61-001 | Document orchestrator endpoints in per-service OAS with standardized pagination, idempotency, and error envelope examples. | Spec covers all orchestrator endpoints; lint passes; examples validated. | +| ORCH-OAS-61-002 | TODO | Orchestrator Service Guild | ORCH-OAS-61-001 | Implement `GET /.well-known/openapi` in service and ensure version metadata aligns with runtime build. | Discovery endpoint live; integration test verifies schema + headers. | +| ORCH-OAS-62-001 | TODO | Orchestrator Service Guild, SDK Generator Guild | ORCH-OAS-61-001, SDKGEN-63-001 | Ensure SDK paginators and operations support orchestrator job operations; add SDK smoke tests for schedule/retry APIs. | SDK integration tests cover orchestrator flows; CLI reuses SDK methods. | +| ORCH-OAS-63-001 | TODO | Orchestrator Service Guild, API Governance Guild | APIGOV-63-001 | Emit deprecation headers and documentation for legacy orchestrator endpoints; update notifications metadata. | Deprecated endpoints include headers + docs; Notifications triggered in staging. | diff --git a/src/PacksRegistry/StellaOps.PacksRegistry.sln b/src/PacksRegistry/StellaOps.PacksRegistry.sln new file mode 100644 index 00000000..5e633e36 --- /dev/null +++ b/src/PacksRegistry/StellaOps.PacksRegistry.sln @@ -0,0 +1,99 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.PacksRegistry", "StellaOps.PacksRegistry", "{5E837028-56B3-94B8-18C9-620EA4BA051A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Core", "StellaOps.PacksRegistry\StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj", "{5827B712-158F-4C99-859A-308A612F9482}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Infrastructure", "StellaOps.PacksRegistry\StellaOps.PacksRegistry.Infrastructure\StellaOps.PacksRegistry.Infrastructure.csproj", "{1870FA80-A39D-4115-90AC-CB13E5640372}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Tests", "StellaOps.PacksRegistry\StellaOps.PacksRegistry.Tests\StellaOps.PacksRegistry.Tests.csproj", "{9594F2C3-D509-419E-81CC-615798845A53}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.WebService", "StellaOps.PacksRegistry\StellaOps.PacksRegistry.WebService\StellaOps.PacksRegistry.WebService.csproj", "{316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Worker", "StellaOps.PacksRegistry\StellaOps.PacksRegistry.Worker\StellaOps.PacksRegistry.Worker.csproj", "{E6F019B3-D1BA-4E2D-808C-9A0A215096C5}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {5827B712-158F-4C99-859A-308A612F9482}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Debug|x64.ActiveCfg = Debug|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Debug|x64.Build.0 = Debug|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Debug|x86.ActiveCfg = Debug|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Debug|x86.Build.0 = Debug|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Release|Any CPU.Build.0 = Release|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Release|x64.ActiveCfg = Release|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Release|x64.Build.0 = Release|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Release|x86.ActiveCfg = Release|Any CPU + {5827B712-158F-4C99-859A-308A612F9482}.Release|x86.Build.0 = Release|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Debug|x64.ActiveCfg = Debug|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Debug|x64.Build.0 = Debug|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Debug|x86.ActiveCfg = Debug|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Debug|x86.Build.0 = Debug|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Release|Any CPU.Build.0 = Release|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Release|x64.ActiveCfg = Release|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Release|x64.Build.0 = Release|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Release|x86.ActiveCfg = Release|Any CPU + {1870FA80-A39D-4115-90AC-CB13E5640372}.Release|x86.Build.0 = Release|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Debug|x64.ActiveCfg = Debug|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Debug|x64.Build.0 = Debug|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Debug|x86.ActiveCfg = Debug|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Debug|x86.Build.0 = Debug|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Release|Any CPU.Build.0 = Release|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Release|x64.ActiveCfg = Release|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Release|x64.Build.0 = Release|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Release|x86.ActiveCfg = Release|Any CPU + {9594F2C3-D509-419E-81CC-615798845A53}.Release|x86.Build.0 = Release|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Debug|Any CPU.Build.0 = Debug|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Debug|x64.ActiveCfg = Debug|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Debug|x64.Build.0 = Debug|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Debug|x86.ActiveCfg = Debug|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Debug|x86.Build.0 = Debug|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Release|Any CPU.ActiveCfg = Release|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Release|Any CPU.Build.0 = Release|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Release|x64.ActiveCfg = Release|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Release|x64.Build.0 = Release|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Release|x86.ActiveCfg = Release|Any CPU + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60}.Release|x86.Build.0 = Release|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Debug|x64.ActiveCfg = Debug|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Debug|x64.Build.0 = Debug|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Debug|x86.ActiveCfg = Debug|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Debug|x86.Build.0 = Debug|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Release|Any CPU.Build.0 = Release|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Release|x64.ActiveCfg = Release|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Release|x64.Build.0 = Release|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Release|x86.ActiveCfg = Release|Any CPU + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {5827B712-158F-4C99-859A-308A612F9482} = {5E837028-56B3-94B8-18C9-620EA4BA051A} + {1870FA80-A39D-4115-90AC-CB13E5640372} = {5E837028-56B3-94B8-18C9-620EA4BA051A} + {9594F2C3-D509-419E-81CC-615798845A53} = {5E837028-56B3-94B8-18C9-620EA4BA051A} + {316EBEF5-5749-486A-B9E8-A3DDE0AEAE60} = {5E837028-56B3-94B8-18C9-620EA4BA051A} + {E6F019B3-D1BA-4E2D-808C-9A0A215096C5} = {5E837028-56B3-94B8-18C9-620EA4BA051A} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.PacksRegistry/AGENTS.md b/src/PacksRegistry/StellaOps.PacksRegistry/AGENTS.md similarity index 98% rename from src/StellaOps.PacksRegistry/AGENTS.md rename to src/PacksRegistry/StellaOps.PacksRegistry/AGENTS.md index e4b31d7d..8b8136ca 100644 --- a/src/StellaOps.PacksRegistry/AGENTS.md +++ b/src/PacksRegistry/StellaOps.PacksRegistry/AGENTS.md @@ -1,17 +1,17 @@ -# Packs Registry Service — Agent Charter - -## Mission -Host signed Task Pack bundles with provenance and RBAC for Epic 12. Ensure packs are verifiable, auditable, and distributed safely, respecting the imposed rule to propagate similar safeguards elsewhere. - -## Responsibilities -- Maintain packs index, signature verification, provenance metadata, tenant visibility, and registry APIs. -- Integrate with CLI, Task Runner, Orchestrator, Authority, Export Center, and DevOps tooling. -- Guarantee deterministic digest computations, immutable history, and secure storage of pack artefacts. - -## Module Layout -- `StellaOps.PacksRegistry.Core/` — pack catalogue models, validation, lifecycle orchestration. -- `StellaOps.PacksRegistry.Infrastructure/` — storage providers, signature verification hooks, provenance stores. -- `StellaOps.PacksRegistry.WebService/` — registry APIs and RBAC enforcement. -- `StellaOps.PacksRegistry.Worker/` — background reconciliation, mirroring, and rotation jobs. -- `StellaOps.PacksRegistry.Tests/` — unit tests validating core/infrastructure logic. -- `StellaOps.PacksRegistry.sln` — module solution. +# Packs Registry Service — Agent Charter + +## Mission +Host signed Task Pack bundles with provenance and RBAC for Epic 12. Ensure packs are verifiable, auditable, and distributed safely, respecting the imposed rule to propagate similar safeguards elsewhere. + +## Responsibilities +- Maintain packs index, signature verification, provenance metadata, tenant visibility, and registry APIs. +- Integrate with CLI, Task Runner, Orchestrator, Authority, Export Center, and DevOps tooling. +- Guarantee deterministic digest computations, immutable history, and secure storage of pack artefacts. + +## Module Layout +- `StellaOps.PacksRegistry.Core/` — pack catalogue models, validation, lifecycle orchestration. +- `StellaOps.PacksRegistry.Infrastructure/` — storage providers, signature verification hooks, provenance stores. +- `StellaOps.PacksRegistry.WebService/` — registry APIs and RBAC enforcement. +- `StellaOps.PacksRegistry.Worker/` — background reconciliation, mirroring, and rotation jobs. +- `StellaOps.PacksRegistry.Tests/` — unit tests validating core/infrastructure logic. +- `StellaOps.PacksRegistry.sln` — module solution. diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/Class1.cs b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/Class1.cs similarity index 92% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/Class1.cs rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/Class1.cs index b5cb679a..fe0664de 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/Class1.cs +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.PacksRegistry.Core; - -public class Class1 -{ - -} +namespace StellaOps.PacksRegistry.Core; + +public class Class1 +{ + +} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/StellaOps.PacksRegistry.Core.csproj b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/StellaOps.PacksRegistry.Core.csproj similarity index 95% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/StellaOps.PacksRegistry.Core.csproj rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/StellaOps.PacksRegistry.Core.csproj index fe0eef44..e4808f0d 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/StellaOps.PacksRegistry.Core.csproj +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Core/StellaOps.PacksRegistry.Core.csproj @@ -1,18 +1,18 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/Class1.cs b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/Class1.cs similarity index 92% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/Class1.cs rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/Class1.cs index 457bf1d0..cb007f4f 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/Class1.cs +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.PacksRegistry.Infrastructure; - -public class Class1 -{ - -} +namespace StellaOps.PacksRegistry.Infrastructure; + +public class Class1 +{ + +} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/StellaOps.PacksRegistry.Infrastructure.csproj b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/StellaOps.PacksRegistry.Infrastructure.csproj similarity index 94% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/StellaOps.PacksRegistry.Infrastructure.csproj rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/StellaOps.PacksRegistry.Infrastructure.csproj index 99c0a84e..0db5ce09 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/StellaOps.PacksRegistry.Infrastructure.csproj +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Infrastructure/StellaOps.PacksRegistry.Infrastructure.csproj @@ -1,28 +1,28 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj"/> - - - </ItemGroup> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj"/> + + + </ItemGroup> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/StellaOps.PacksRegistry.Tests.csproj b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/StellaOps.PacksRegistry.Tests.csproj similarity index 91% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/StellaOps.PacksRegistry.Tests.csproj rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/StellaOps.PacksRegistry.Tests.csproj index e445c56a..97697838 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/StellaOps.PacksRegistry.Tests.csproj +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/StellaOps.PacksRegistry.Tests.csproj @@ -1,135 +1,135 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - - - <PropertyGroup> - - - - - <OutputType>Exe</OutputType> - - - - - <IsPackable>false</IsPackable> - - - - - - - - - - - - - - <TargetFramework>net10.0</TargetFramework> - - - <ImplicitUsings>enable</ImplicitUsings> - - - <Nullable>enable</Nullable> - - - <UseConcelierTestInfra>false</UseConcelierTestInfra> - - - <LangVersion>preview</LangVersion> - - - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - - - </PropertyGroup> - - - - - - <ItemGroup> - - - - - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> - - - - - <PackageReference Include="xunit.v3" Version="3.0.0"/> - - - - - <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Using Include="Xunit"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <ProjectReference Include="..\StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj"/> - - - - - <ProjectReference Include="..\StellaOps.PacksRegistry.Infrastructure\StellaOps.PacksRegistry.Infrastructure.csproj"/> - - - - - </ItemGroup> - - - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + + + <PropertyGroup> + + + + + <OutputType>Exe</OutputType> + + + + + <IsPackable>false</IsPackable> + + + + + + + + + + + + + + <TargetFramework>net10.0</TargetFramework> + + + <ImplicitUsings>enable</ImplicitUsings> + + + <Nullable>enable</Nullable> + + + <UseConcelierTestInfra>false</UseConcelierTestInfra> + + + <LangVersion>preview</LangVersion> + + + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + + + </PropertyGroup> + + + + + + <ItemGroup> + + + + + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> + + + + + <PackageReference Include="xunit.v3" Version="3.0.0"/> + + + + + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Using Include="Xunit"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <ProjectReference Include="..\StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj"/> + + + + + <ProjectReference Include="..\StellaOps.PacksRegistry.Infrastructure\StellaOps.PacksRegistry.Infrastructure.csproj"/> + + + + + </ItemGroup> + + + + + +</Project> diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/UnitTest1.cs b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/UnitTest1.cs similarity index 92% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/UnitTest1.cs rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/UnitTest1.cs index 6b234636..6e7566ea 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/UnitTest1.cs +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/UnitTest1.cs @@ -1,10 +1,10 @@ -namespace StellaOps.PacksRegistry.Tests; - -public class UnitTest1 -{ - [Fact] - public void Test1() - { - - } -} +namespace StellaOps.PacksRegistry.Tests; + +public class UnitTest1 +{ + [Fact] + public void Test1() + { + + } +} diff --git a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/xunit.runner.json b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/xunit.runner.json new file mode 100644 index 00000000..249d815c --- /dev/null +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/xunit.runner.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Program.cs similarity index 96% rename from src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Program.cs index ee9d65d6..3917ef1b 100644 --- a/src/StellaOps.EvidenceLocker/StellaOps.EvidenceLocker.WebService/Program.cs +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Program.cs @@ -1,41 +1,41 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} +var builder = WebApplication.CreateBuilder(args); + +// Add services to the container. +// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +// Configure the HTTP request pipeline. +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); + +var summaries = new[] +{ + "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" +}; + +app.MapGet("/weatherforecast", () => +{ + var forecast = Enumerable.Range(1, 5).Select(index => + new WeatherForecast + ( + DateOnly.FromDateTime(DateTime.Now.AddDays(index)), + Random.Shared.Next(-20, 55), + summaries[Random.Shared.Next(summaries.Length)] + )) + .ToArray(); + return forecast; +}) +.WithName("GetWeatherForecast"); + +app.Run(); + +record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) +{ + public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); +} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Properties/launchSettings.json b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Properties/launchSettings.json rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Properties/launchSettings.json index b4d53ffa..2ca6fd86 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Properties/launchSettings.json +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/Properties/launchSettings.json @@ -1,23 +1,23 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "http": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "http://localhost:5151", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - }, - "https": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "https://localhost:7136;http://localhost:5151", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "http://localhost:5151", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "https": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "https://localhost:7136;http://localhost:5151", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj similarity index 95% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj index 71e052c6..5c6ad91c 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.csproj @@ -1,41 +1,41 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Web"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.PacksRegistry.Infrastructure\StellaOps.PacksRegistry.Infrastructure.csproj"/> - - - </ItemGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.PacksRegistry.Infrastructure\StellaOps.PacksRegistry.Infrastructure.csproj"/> + + + </ItemGroup> + + + +</Project> diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.http b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.http similarity index 96% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.http rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.http index 399cd52c..4cd0d4cc 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.http +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/StellaOps.PacksRegistry.WebService.http @@ -1,6 +1,6 @@ -@StellaOps.PacksRegistry.WebService_HostAddress = http://localhost:5151 - -GET {{StellaOps.PacksRegistry.WebService_HostAddress}}/weatherforecast/ -Accept: application/json - -### +@StellaOps.PacksRegistry.WebService_HostAddress = http://localhost:5151 + +GET {{StellaOps.PacksRegistry.WebService_HostAddress}}/weatherforecast/ +Accept: application/json + +### diff --git a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.Development.json b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.Development.json new file mode 100644 index 00000000..ff66ba6b --- /dev/null +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.json b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.json new file mode 100644 index 00000000..4d566948 --- /dev/null +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs similarity index 96% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs index 2e39e5ad..4d390650 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Program.cs @@ -1,7 +1,7 @@ -using StellaOps.PacksRegistry.Worker; - -var builder = Host.CreateApplicationBuilder(args); -builder.Services.AddHostedService<Worker>(); - -var host = builder.Build(); -host.Run(); +using StellaOps.PacksRegistry.Worker; + +var builder = Host.CreateApplicationBuilder(args); +builder.Services.AddHostedService<Worker>(); + +var host = builder.Build(); +host.Run(); diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Properties/launchSettings.json b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Properties/launchSettings.json similarity index 95% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Properties/launchSettings.json rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Properties/launchSettings.json index 950e8d35..1f4e81c5 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Properties/launchSettings.json +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Properties/launchSettings.json @@ -1,12 +1,12 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "StellaOps.PacksRegistry.Worker": { - "commandName": "Project", - "dotnetRunMessages": true, - "environmentVariables": { - "DOTNET_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "StellaOps.PacksRegistry.Worker": { + "commandName": "Project", + "dotnetRunMessages": true, + "environmentVariables": { + "DOTNET_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj similarity index 95% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj index ab1c918a..a4795714 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/StellaOps.PacksRegistry.Worker.csproj @@ -1,43 +1,43 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Worker"> - - - - <PropertyGroup> - - - <UserSecretsId>dotnet-StellaOps.PacksRegistry.Worker-a5c025f8-62a4-498b-928b-5ed8f27c53de</UserSecretsId> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.PacksRegistry.Infrastructure\StellaOps.PacksRegistry.Infrastructure.csproj"/> - - - </ItemGroup> - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Worker"> + + + + <PropertyGroup> + + + <UserSecretsId>dotnet-StellaOps.PacksRegistry.Worker-a5c025f8-62a4-498b-928b-5ed8f27c53de</UserSecretsId> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.PacksRegistry.Infrastructure\StellaOps.PacksRegistry.Infrastructure.csproj"/> + + + </ItemGroup> + + +</Project> diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Worker.cs b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Worker.cs similarity index 96% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Worker.cs rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Worker.cs index 8f2ba413..65c967c9 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Worker.cs +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/Worker.cs @@ -1,16 +1,16 @@ -namespace StellaOps.PacksRegistry.Worker; - -public class Worker(ILogger<Worker> logger) : BackgroundService -{ - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - while (!stoppingToken.IsCancellationRequested) - { - if (logger.IsEnabled(LogLevel.Information)) - { - logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); - } - await Task.Delay(1000, stoppingToken); - } - } -} +namespace StellaOps.PacksRegistry.Worker; + +public class Worker(ILogger<Worker> logger) : BackgroundService +{ + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + while (!stoppingToken.IsCancellationRequested) + { + if (logger.IsEnabled(LogLevel.Information)) + { + logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); + } + await Task.Delay(1000, stoppingToken); + } + } +} diff --git a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.Development.json b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.Development.json new file mode 100644 index 00000000..69017646 --- /dev/null +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.json b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.json new file mode 100644 index 00000000..69017646 --- /dev/null +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.sln b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.sln similarity index 98% rename from src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.sln rename to src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.sln index aff8259a..a57ec3ff 100644 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.sln +++ b/src/PacksRegistry/StellaOps.PacksRegistry/StellaOps.PacksRegistry.sln @@ -1,90 +1,90 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Core", "StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj", "{98FB93E5-21F8-4D24-AD54-1DF52070CAB8}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Infrastructure", "StellaOps.PacksRegistry.Infrastructure\StellaOps.PacksRegistry.Infrastructure.csproj", "{C5FDDBA3-5D96-4158-810D-6597A96DA574}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.WebService", "StellaOps.PacksRegistry.WebService\StellaOps.PacksRegistry.WebService.csproj", "{4CE7EBE6-67A6-4947-8702-D123343FC297}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Worker", "StellaOps.PacksRegistry.Worker\StellaOps.PacksRegistry.Worker.csproj", "{7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Tests", "StellaOps.PacksRegistry.Tests\StellaOps.PacksRegistry.Tests.csproj", "{1FA70E02-C65A-484C-87E7-0A33EEB69573}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|Any CPU.Build.0 = Debug|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|x64.ActiveCfg = Debug|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|x64.Build.0 = Debug|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|x86.ActiveCfg = Debug|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|x86.Build.0 = Debug|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|Any CPU.ActiveCfg = Release|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|Any CPU.Build.0 = Release|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|x64.ActiveCfg = Release|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|x64.Build.0 = Release|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|x86.ActiveCfg = Release|Any CPU - {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|x86.Build.0 = Release|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|x64.ActiveCfg = Debug|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|x64.Build.0 = Debug|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|x86.ActiveCfg = Debug|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|x86.Build.0 = Debug|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|Any CPU.Build.0 = Release|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|x64.ActiveCfg = Release|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|x64.Build.0 = Release|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|x86.ActiveCfg = Release|Any CPU - {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|x86.Build.0 = Release|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|x64.ActiveCfg = Debug|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|x64.Build.0 = Debug|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|x86.ActiveCfg = Debug|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|x86.Build.0 = Debug|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|Any CPU.Build.0 = Release|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|x64.ActiveCfg = Release|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|x64.Build.0 = Release|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|x86.ActiveCfg = Release|Any CPU - {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|x86.Build.0 = Release|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|x64.ActiveCfg = Debug|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|x64.Build.0 = Debug|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|x86.ActiveCfg = Debug|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|x86.Build.0 = Debug|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|Any CPU.Build.0 = Release|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|x64.ActiveCfg = Release|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|x64.Build.0 = Release|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|x86.ActiveCfg = Release|Any CPU - {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|x86.Build.0 = Release|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|x64.ActiveCfg = Debug|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|x64.Build.0 = Debug|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|x86.ActiveCfg = Debug|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|x86.Build.0 = Debug|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|Any CPU.Build.0 = Release|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|x64.ActiveCfg = Release|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|x64.Build.0 = Release|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|x86.ActiveCfg = Release|Any CPU - {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Core", "StellaOps.PacksRegistry.Core\StellaOps.PacksRegistry.Core.csproj", "{98FB93E5-21F8-4D24-AD54-1DF52070CAB8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Infrastructure", "StellaOps.PacksRegistry.Infrastructure\StellaOps.PacksRegistry.Infrastructure.csproj", "{C5FDDBA3-5D96-4158-810D-6597A96DA574}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.WebService", "StellaOps.PacksRegistry.WebService\StellaOps.PacksRegistry.WebService.csproj", "{4CE7EBE6-67A6-4947-8702-D123343FC297}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Worker", "StellaOps.PacksRegistry.Worker\StellaOps.PacksRegistry.Worker.csproj", "{7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.PacksRegistry.Tests", "StellaOps.PacksRegistry.Tests\StellaOps.PacksRegistry.Tests.csproj", "{1FA70E02-C65A-484C-87E7-0A33EEB69573}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|x64.ActiveCfg = Debug|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|x64.Build.0 = Debug|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|x86.ActiveCfg = Debug|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Debug|x86.Build.0 = Debug|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|Any CPU.Build.0 = Release|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|x64.ActiveCfg = Release|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|x64.Build.0 = Release|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|x86.ActiveCfg = Release|Any CPU + {98FB93E5-21F8-4D24-AD54-1DF52070CAB8}.Release|x86.Build.0 = Release|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|x64.ActiveCfg = Debug|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|x64.Build.0 = Debug|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|x86.ActiveCfg = Debug|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Debug|x86.Build.0 = Debug|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|Any CPU.Build.0 = Release|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|x64.ActiveCfg = Release|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|x64.Build.0 = Release|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|x86.ActiveCfg = Release|Any CPU + {C5FDDBA3-5D96-4158-810D-6597A96DA574}.Release|x86.Build.0 = Release|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|x64.ActiveCfg = Debug|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|x64.Build.0 = Debug|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|x86.ActiveCfg = Debug|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Debug|x86.Build.0 = Debug|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|Any CPU.Build.0 = Release|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|x64.ActiveCfg = Release|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|x64.Build.0 = Release|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|x86.ActiveCfg = Release|Any CPU + {4CE7EBE6-67A6-4947-8702-D123343FC297}.Release|x86.Build.0 = Release|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|x64.ActiveCfg = Debug|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|x64.Build.0 = Debug|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|x86.ActiveCfg = Debug|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Debug|x86.Build.0 = Debug|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|Any CPU.Build.0 = Release|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|x64.ActiveCfg = Release|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|x64.Build.0 = Release|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|x86.ActiveCfg = Release|Any CPU + {7DE3DD7E-E1F9-4443-81E4-C7E4E80F5703}.Release|x86.Build.0 = Release|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|x64.ActiveCfg = Debug|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|x64.Build.0 = Debug|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|x86.ActiveCfg = Debug|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Debug|x86.Build.0 = Debug|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|Any CPU.Build.0 = Release|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|x64.ActiveCfg = Release|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|x64.Build.0 = Release|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|x86.ActiveCfg = Release|Any CPU + {1FA70E02-C65A-484C-87E7-0A33EEB69573}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.PacksRegistry/TASKS.md b/src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md similarity index 99% rename from src/StellaOps.PacksRegistry/TASKS.md rename to src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md index a7305a79..30f035a6 100644 --- a/src/StellaOps.PacksRegistry/TASKS.md +++ b/src/PacksRegistry/StellaOps.PacksRegistry/TASKS.md @@ -1,16 +1,16 @@ -# Packs Registry Task Board — Epic 12: CLI Parity & Task Packs - -## Sprint 41 – Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| PACKS-REG-41-001 | TODO | Packs Registry Guild | AUTH-PACKS-41-001 | Implement registry service, migrations for `packs_index`, `parity_matrix`, provenance docs; support pack upload/list/get, signature verification, RBAC enforcement, and provenance manifest storage. | Service builds/tests; signature verification works; RBAC validated; provenance stored; docs cross-linked. | - -## Sprint 42 – Lifecycle & Governance -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| PACKS-REG-42-001 | TODO | Packs Registry Guild | PACKS-REG-41-001 | Add version lifecycle (promote/deprecate), tenant allowlists, provenance export, signature rotation, audit logs, and Offline Kit seed support. | Version lifecycle APIs live; allowlists enforced; rotation documented; audit logs recorded; offline kit seeds generated. | - -## Sprint 43 – Mirroring & Compliance -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| PACKS-REG-43-001 | TODO | Packs Registry Guild | PACKS-REG-42-001 | Implement registry mirroring, pack signing policies, attestation integration, and compliance dashboards; integrate with Export Center. | Mirroring operational; signing policies enforced; attestation pipeline documented; dashboards live; export integration validated. | +# Packs Registry Task Board — Epic 12: CLI Parity & Task Packs + +## Sprint 41 – Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| PACKS-REG-41-001 | TODO | Packs Registry Guild | AUTH-PACKS-41-001 | Implement registry service, migrations for `packs_index`, `parity_matrix`, provenance docs; support pack upload/list/get, signature verification, RBAC enforcement, and provenance manifest storage. | Service builds/tests; signature verification works; RBAC validated; provenance stored; docs cross-linked. | + +## Sprint 42 – Lifecycle & Governance +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| PACKS-REG-42-001 | TODO | Packs Registry Guild | PACKS-REG-41-001 | Add version lifecycle (promote/deprecate), tenant allowlists, provenance export, signature rotation, audit logs, and Offline Kit seed support. | Version lifecycle APIs live; allowlists enforced; rotation documented; audit logs recorded; offline kit seeds generated. | + +## Sprint 43 – Mirroring & Compliance +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| PACKS-REG-43-001 | TODO | Packs Registry Guild | PACKS-REG-42-001 | Implement registry mirroring, pack signing policies, attestation integration, and compliance dashboards; integrate with Export Center. | Mirroring operational; signing policies enforced; attestation pipeline documented; dashboards live; export integration validated. | diff --git a/src/StellaOps.Policy.Engine/AGENTS.md b/src/Policy/StellaOps.Policy.Engine/AGENTS.md similarity index 92% rename from src/StellaOps.Policy.Engine/AGENTS.md rename to src/Policy/StellaOps.Policy.Engine/AGENTS.md index 294d1399..363045a4 100644 --- a/src/StellaOps.Policy.Engine/AGENTS.md +++ b/src/Policy/StellaOps.Policy.Engine/AGENTS.md @@ -1,18 +1,18 @@ -# StellaOps.Policy.Engine — Agent Charter - -## Mission -Stand up the Policy Engine runtime host that evaluates organization policies against SBOM/advisory/VEX inputs with deterministic, replayable results. Deliver the API/worker orchestration, materialization writers, and observability stack described in Epic 2 (Policy Engine v2). - -## Scope -- Minimal API host & background workers for policy runs (full, incremental, simulate). -- Mongo persistence for `policies`, `policy_runs`, and `effective_finding_*` collections. -- Change stream listeners and scheduler integration for incremental re-evaluation. -- Authority integration enforcing new `policy:*` and `effective:write` scopes. -- Observability: metrics, traces, structured logs, trace sampling. - -## Expectations -- Keep endpoints deterministic, cancellation-aware, and tenant-scoped. -- Only Policy Engine identity performs writes to effective findings. -- Coordinate with Concelier/Excititor/Scheduler guilds for linkset joins and orchestration inputs. -- Update `TASKS.md`, `SPRINTS.md` when status changes. -- Maintain compliance checklists and schema docs alongside code updates. +# StellaOps.Policy.Engine — Agent Charter + +## Mission +Stand up the Policy Engine runtime host that evaluates organization policies against SBOM/advisory/VEX inputs with deterministic, replayable results. Deliver the API/worker orchestration, materialization writers, and observability stack described in Epic 2 (Policy Engine v2). + +## Scope +- Minimal API host & background workers for policy runs (full, incremental, simulate). +- Mongo persistence for `policies`, `policy_runs`, and `effective_finding_*` collections. +- Change stream listeners and scheduler integration for incremental re-evaluation. +- Authority integration enforcing new `policy:*` and `effective:write` scopes. +- Observability: metrics, traces, structured logs, trace sampling. + +## Expectations +- Keep endpoints deterministic, cancellation-aware, and tenant-scoped. +- Only Policy Engine identity performs writes to effective findings. +- Coordinate with Concelier/Excititor/Scheduler guilds for linkset joins and orchestration inputs. +- Update `TASKS.md`, `../../docs/implplan/SPRINTS.md` when status changes. +- Maintain compliance checklists and schema docs alongside code updates. diff --git a/src/StellaOps.Policy.Engine/Compilation/DslToken.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/DslToken.cs similarity index 96% rename from src/StellaOps.Policy.Engine/Compilation/DslToken.cs rename to src/Policy/StellaOps.Policy.Engine/Compilation/DslToken.cs index bcd107c7..1b3dfcd6 100644 --- a/src/StellaOps.Policy.Engine/Compilation/DslToken.cs +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/DslToken.cs @@ -1,160 +1,160 @@ -using System.Diagnostics.CodeAnalysis; - -namespace StellaOps.Policy.Engine.Compilation; - -/// <summary> -/// Represents a precise source location within a policy DSL document. -/// </summary> -public readonly struct SourceLocation : IEquatable<SourceLocation>, IComparable<SourceLocation> -{ - public SourceLocation(int offset, int line, int column) - { - if (offset < 0) - { - throw new ArgumentOutOfRangeException(nameof(offset)); - } - - if (line < 1) - { - throw new ArgumentOutOfRangeException(nameof(line)); - } - - if (column < 1) - { - throw new ArgumentOutOfRangeException(nameof(column)); - } - - Offset = offset; - Line = line; - Column = column; - } - - public int Offset { get; } - - public int Line { get; } - - public int Column { get; } - - public override string ToString() => $"(L{Line}, C{Column})"; - - public bool Equals(SourceLocation other) => - Offset == other.Offset && Line == other.Line && Column == other.Column; - - public override bool Equals([NotNullWhen(true)] object? obj) => - obj is SourceLocation other && Equals(other); - - public override int GetHashCode() => HashCode.Combine(Offset, Line, Column); - - public int CompareTo(SourceLocation other) => Offset.CompareTo(other.Offset); - - public static bool operator ==(SourceLocation left, SourceLocation right) => left.Equals(right); - - public static bool operator !=(SourceLocation left, SourceLocation right) => !left.Equals(right); - - public static bool operator <(SourceLocation left, SourceLocation right) => left.CompareTo(right) < 0; - - public static bool operator <=(SourceLocation left, SourceLocation right) => left.CompareTo(right) <= 0; - - public static bool operator >(SourceLocation left, SourceLocation right) => left.CompareTo(right) > 0; - - public static bool operator >=(SourceLocation left, SourceLocation right) => left.CompareTo(right) >= 0; -} - -/// <summary> -/// Represents a start/end location pair within a policy DSL source document. -/// </summary> -public readonly struct SourceSpan : IEquatable<SourceSpan> -{ - public SourceSpan(SourceLocation start, SourceLocation end) - { - if (start.Offset > end.Offset) - { - throw new ArgumentException("Start must not be after end.", nameof(start)); - } - - Start = start; - End = end; - } - - public SourceLocation Start { get; } - - public SourceLocation End { get; } - - public override string ToString() => $"{Start}->{End}"; - - public bool Equals(SourceSpan other) => Start.Equals(other.Start) && End.Equals(other.End); - - public override bool Equals([NotNullWhen(true)] object? obj) => obj is SourceSpan other && Equals(other); - - public override int GetHashCode() => HashCode.Combine(Start, End); - - public static SourceSpan Combine(SourceSpan first, SourceSpan second) - { - var start = first.Start <= second.Start ? first.Start : second.Start; - var end = first.End >= second.End ? first.End : second.End; - return new SourceSpan(start, end); - } -} - -internal enum TokenKind -{ - EndOfFile = 0, - Identifier, - StringLiteral, - NumberLiteral, - BooleanLiteral, - LeftBrace, - RightBrace, - LeftParen, - RightParen, - LeftBracket, - RightBracket, - Comma, - Semicolon, - Colon, - Arrow, // => - Assign, // = - Define, // := - Dot, - KeywordPolicy, - KeywordSyntax, - KeywordMetadata, - KeywordProfile, - KeywordRule, - KeywordMap, - KeywordSource, - KeywordEnv, - KeywordIf, - KeywordThen, - KeywordWhen, - KeywordAnd, - KeywordOr, - KeywordNot, - KeywordPriority, - KeywordElse, - KeywordBecause, - KeywordSettings, - KeywordIgnore, - KeywordUntil, - KeywordEscalate, - KeywordTo, - KeywordRequireVex, - KeywordWarn, - KeywordMessage, - KeywordDefer, - KeywordAnnotate, - KeywordIn, - EqualEqual, - NotEqual, - LessThan, - LessThanOrEqual, - GreaterThan, - GreaterThanOrEqual, - Unknown, -} - -internal readonly record struct DslToken( - TokenKind Kind, - string Text, - SourceSpan Span, - object? Value = null); +using System.Diagnostics.CodeAnalysis; + +namespace StellaOps.Policy.Engine.Compilation; + +/// <summary> +/// Represents a precise source location within a policy DSL document. +/// </summary> +public readonly struct SourceLocation : IEquatable<SourceLocation>, IComparable<SourceLocation> +{ + public SourceLocation(int offset, int line, int column) + { + if (offset < 0) + { + throw new ArgumentOutOfRangeException(nameof(offset)); + } + + if (line < 1) + { + throw new ArgumentOutOfRangeException(nameof(line)); + } + + if (column < 1) + { + throw new ArgumentOutOfRangeException(nameof(column)); + } + + Offset = offset; + Line = line; + Column = column; + } + + public int Offset { get; } + + public int Line { get; } + + public int Column { get; } + + public override string ToString() => $"(L{Line}, C{Column})"; + + public bool Equals(SourceLocation other) => + Offset == other.Offset && Line == other.Line && Column == other.Column; + + public override bool Equals([NotNullWhen(true)] object? obj) => + obj is SourceLocation other && Equals(other); + + public override int GetHashCode() => HashCode.Combine(Offset, Line, Column); + + public int CompareTo(SourceLocation other) => Offset.CompareTo(other.Offset); + + public static bool operator ==(SourceLocation left, SourceLocation right) => left.Equals(right); + + public static bool operator !=(SourceLocation left, SourceLocation right) => !left.Equals(right); + + public static bool operator <(SourceLocation left, SourceLocation right) => left.CompareTo(right) < 0; + + public static bool operator <=(SourceLocation left, SourceLocation right) => left.CompareTo(right) <= 0; + + public static bool operator >(SourceLocation left, SourceLocation right) => left.CompareTo(right) > 0; + + public static bool operator >=(SourceLocation left, SourceLocation right) => left.CompareTo(right) >= 0; +} + +/// <summary> +/// Represents a start/end location pair within a policy DSL source document. +/// </summary> +public readonly struct SourceSpan : IEquatable<SourceSpan> +{ + public SourceSpan(SourceLocation start, SourceLocation end) + { + if (start.Offset > end.Offset) + { + throw new ArgumentException("Start must not be after end.", nameof(start)); + } + + Start = start; + End = end; + } + + public SourceLocation Start { get; } + + public SourceLocation End { get; } + + public override string ToString() => $"{Start}->{End}"; + + public bool Equals(SourceSpan other) => Start.Equals(other.Start) && End.Equals(other.End); + + public override bool Equals([NotNullWhen(true)] object? obj) => obj is SourceSpan other && Equals(other); + + public override int GetHashCode() => HashCode.Combine(Start, End); + + public static SourceSpan Combine(SourceSpan first, SourceSpan second) + { + var start = first.Start <= second.Start ? first.Start : second.Start; + var end = first.End >= second.End ? first.End : second.End; + return new SourceSpan(start, end); + } +} + +internal enum TokenKind +{ + EndOfFile = 0, + Identifier, + StringLiteral, + NumberLiteral, + BooleanLiteral, + LeftBrace, + RightBrace, + LeftParen, + RightParen, + LeftBracket, + RightBracket, + Comma, + Semicolon, + Colon, + Arrow, // => + Assign, // = + Define, // := + Dot, + KeywordPolicy, + KeywordSyntax, + KeywordMetadata, + KeywordProfile, + KeywordRule, + KeywordMap, + KeywordSource, + KeywordEnv, + KeywordIf, + KeywordThen, + KeywordWhen, + KeywordAnd, + KeywordOr, + KeywordNot, + KeywordPriority, + KeywordElse, + KeywordBecause, + KeywordSettings, + KeywordIgnore, + KeywordUntil, + KeywordEscalate, + KeywordTo, + KeywordRequireVex, + KeywordWarn, + KeywordMessage, + KeywordDefer, + KeywordAnnotate, + KeywordIn, + EqualEqual, + NotEqual, + LessThan, + LessThanOrEqual, + GreaterThan, + GreaterThanOrEqual, + Unknown, +} + +internal readonly record struct DslToken( + TokenKind Kind, + string Text, + SourceSpan Span, + object? Value = null); diff --git a/src/StellaOps.Policy.Engine/Compilation/DslTokenizer.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/DslTokenizer.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Compilation/DslTokenizer.cs rename to src/Policy/StellaOps.Policy.Engine/Compilation/DslTokenizer.cs index 01f68ce8..e1311358 100644 --- a/src/StellaOps.Policy.Engine/Compilation/DslTokenizer.cs +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/DslTokenizer.cs @@ -1,576 +1,576 @@ -using System.Collections.Immutable; -using System.Globalization; -using System.Text; -using StellaOps.Policy; - -namespace StellaOps.Policy.Engine.Compilation; - -internal static class DslTokenizer -{ - public static TokenizerResult Tokenize(string source) - { - if (source is null) - { - throw new ArgumentNullException(nameof(source)); - } - - var tokens = ImmutableArray.CreateBuilder<DslToken>(); - var diagnostics = ImmutableArray.CreateBuilder<PolicyIssue>(); - - var index = 0; - var line = 1; - var column = 1; - - while (index < source.Length) - { - var current = source[index]; - if (char.IsWhiteSpace(current)) - { - (index, line, column) = AdvanceWhitespace(source, index, line, column); - continue; - } - - if (current == '/' && index + 1 < source.Length) - { - if (source[index + 1] == '/') - { - (index, line, column) = SkipSingleLineComment(source, index + 2, line, column + 2); - continue; - } - - if (source[index + 1] == '*') - { - (index, line, column) = SkipMultiLineComment(source, index + 2, line, column + 2, diagnostics); - continue; - } - } - - var startLocation = new SourceLocation(index, line, column); - switch (current) - { - case '{': - tokens.Add(CreateToken(TokenKind.LeftBrace, "{", startLocation, ref index, ref column)); - break; - case '}': - tokens.Add(CreateToken(TokenKind.RightBrace, "}", startLocation, ref index, ref column)); - break; - case '(': - tokens.Add(CreateToken(TokenKind.LeftParen, "(", startLocation, ref index, ref column)); - break; - case ')': - tokens.Add(CreateToken(TokenKind.RightParen, ")", startLocation, ref index, ref column)); - break; - case '[': - tokens.Add(CreateToken(TokenKind.LeftBracket, "[", startLocation, ref index, ref column)); - break; - case ']': - tokens.Add(CreateToken(TokenKind.RightBracket, "]", startLocation, ref index, ref column)); - break; - case ',': - tokens.Add(CreateToken(TokenKind.Comma, ",", startLocation, ref index, ref column)); - break; - case ';': - tokens.Add(CreateToken(TokenKind.Semicolon, ";", startLocation, ref index, ref column)); - break; - case ':': - { - if (Match(source, index + 1, '=')) - { - tokens.Add(CreateToken(TokenKind.Define, ":=", startLocation, ref index, ref column, advance: 2)); - } - else - { - tokens.Add(CreateToken(TokenKind.Colon, ":", startLocation, ref index, ref column)); - } - - break; - } - case '=': - { - if (Match(source, index + 1, '>')) - { - tokens.Add(CreateToken(TokenKind.Arrow, "=>", startLocation, ref index, ref column, advance: 2)); - } - else if (Match(source, index + 1, '=')) - { - tokens.Add(CreateToken(TokenKind.EqualEqual, "==", startLocation, ref index, ref column, advance: 2)); - } - else - { - tokens.Add(CreateToken(TokenKind.Assign, "=", startLocation, ref index, ref column)); - } - - break; - } - case '!': - { - if (Match(source, index + 1, '=')) - { - tokens.Add(CreateToken(TokenKind.NotEqual, "!=", startLocation, ref index, ref column, advance: 2)); - } - else - { - ReportUnexpectedCharacter(diagnostics, current, startLocation); - index++; - column++; - } - - break; - } - case '<': - { - if (Match(source, index + 1, '=')) - { - tokens.Add(CreateToken(TokenKind.LessThanOrEqual, "<=", startLocation, ref index, ref column, advance: 2)); - } - else - { - tokens.Add(CreateToken(TokenKind.LessThan, "<", startLocation, ref index, ref column)); - } - - break; - } - case '>': - { - if (Match(source, index + 1, '=')) - { - tokens.Add(CreateToken(TokenKind.GreaterThanOrEqual, ">=", startLocation, ref index, ref column, advance: 2)); - } - else - { - tokens.Add(CreateToken(TokenKind.GreaterThan, ">", startLocation, ref index, ref column)); - } - - break; - } - case '.': - tokens.Add(CreateToken(TokenKind.Dot, ".", startLocation, ref index, ref column)); - break; - case '"': - TokenizeString(source, ref index, ref line, ref column, startLocation, tokens, diagnostics); - break; - case '+': - case '-': - { - if (index + 1 < source.Length && char.IsDigit(source[index + 1])) - { - TokenizeNumber(source, ref index, ref line, ref column, startLocation, tokens, diagnostics); - } - else - { - ReportUnexpectedCharacter(diagnostics, current, startLocation); - index++; - column++; - } - - break; - } - default: - { - if (char.IsDigit(current)) - { - TokenizeNumber(source, ref index, ref line, ref column, startLocation, tokens, diagnostics); - } - else if (IsIdentifierStart(current)) - { - TokenizeIdentifierOrKeyword(source, ref index, ref line, ref column, startLocation, tokens); - } - else - { - ReportUnexpectedCharacter(diagnostics, current, startLocation); - index++; - column++; - } - - break; - } - } - } - - var eofLocation = new SourceLocation(index, line, column); - tokens.Add(new DslToken(TokenKind.EndOfFile, string.Empty, new SourceSpan(eofLocation, eofLocation))); - - return new TokenizerResult(tokens.ToImmutable(), diagnostics.ToImmutable()); - } - - private static void TokenizeString( - string source, - ref int index, - ref int line, - ref int column, - SourceLocation start, - ImmutableArray<DslToken>.Builder tokens, - ImmutableArray<PolicyIssue>.Builder diagnostics) - { - var builder = new StringBuilder(); - var i = index + 1; - var currentLine = line; - var currentColumn = column + 1; - - while (i < source.Length) - { - var ch = source[i]; - if (ch == '"') - { - var end = new SourceLocation(i + 1, currentLine, currentColumn + 1); - index = i + 1; - column = currentColumn + 1; - tokens.Add(new DslToken(TokenKind.StringLiteral, builder.ToString(), new SourceSpan(start, end), builder.ToString())); - return; - } - - if (ch == '\\') - { - if (i + 1 >= source.Length) - { - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}")); - index = source.Length; - line = currentLine; - column = currentColumn; - return; - } - - var escape = source[i + 1]; - switch (escape) - { - case '\\': - builder.Append('\\'); - break; - case '"': - builder.Append('"'); - break; - case 'n': - builder.Append('\n'); - break; - case 'r': - builder.Append('\r'); - break; - case 't': - builder.Append('\t'); - break; - default: - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidEscapeSequence, $"Invalid escape sequence '\\{escape}'.", $"@{currentLine}:{currentColumn}")); - builder.Append(escape); - break; - } - - i += 2; - currentColumn += 2; - continue; - } - - if (ch == '\r' || ch == '\n') - { - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}")); - (index, line, column) = AdvanceWhitespace(source, i, currentLine, currentColumn); - return; - } - - builder.Append(ch); - i++; - currentColumn++; - } - - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}")); - index = source.Length; - line = currentLine; - column = currentColumn; - } - - private static void TokenizeNumber( - string source, - ref int index, - ref int line, - ref int column, - SourceLocation start, - ImmutableArray<DslToken>.Builder tokens, - ImmutableArray<PolicyIssue>.Builder diagnostics) - { - var i = index; - var hasDecimal = false; - - if (source[i] == '+' || source[i] == '-') - { - i++; - } - - while (i < source.Length) - { - var ch = source[i]; - if (char.IsDigit(ch)) - { - i++; - continue; - } - - if (ch == '.') - { - if (hasDecimal) - { - break; - } - - hasDecimal = true; - i++; - continue; - } - - break; - } - - var percent = false; - if (i < source.Length && source[i] == '%') - { - percent = true; - i++; - } - - var text = source.Substring(index, i - index); - if (!decimal.TryParse(text, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var value)) - { - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidNumber, $"Invalid numeric literal '{text}'.", $"@{start.Line}:{start.Column}")); - index = i; - column += i - index; - return; - } - - if (percent) - { - value /= 100m; - } - - var end = new SourceLocation(i, line, column + (i - index)); - tokens.Add(new DslToken(TokenKind.NumberLiteral, text, new SourceSpan(start, end), value)); - column += i - index; - index = i; - } - - private static void TokenizeIdentifierOrKeyword( - string source, - ref int index, - ref int line, - ref int column, - SourceLocation start, - ImmutableArray<DslToken>.Builder tokens) - { - var i = index + 1; - while (i < source.Length && IsIdentifierPart(source[i])) - { - i++; - } - - var text = source.Substring(index, i - index); - var kind = GetKeywordKind(text); - - if (kind == TokenKind.BooleanLiteral) - { - var value = string.Equals(text, "true", StringComparison.Ordinal); - var end = new SourceLocation(i, line, column + (i - index)); - tokens.Add(new DslToken(TokenKind.BooleanLiteral, text, new SourceSpan(start, end), value)); - } - else if (kind == TokenKind.Identifier) - { - var end = new SourceLocation(i, line, column + (i - index)); - tokens.Add(new DslToken(TokenKind.Identifier, text, new SourceSpan(start, end))); - } - else - { - var end = new SourceLocation(i, line, column + (i - index)); - tokens.Add(new DslToken(kind, text, new SourceSpan(start, end))); - } - - column += i - index; - index = i; - } - - private static TokenKind GetKeywordKind(string text) - { - return text switch - { - "policy" => TokenKind.KeywordPolicy, - "syntax" => TokenKind.KeywordSyntax, - "metadata" => TokenKind.KeywordMetadata, - "profile" => TokenKind.KeywordProfile, - "rule" => TokenKind.KeywordRule, - "map" => TokenKind.KeywordMap, - "source" => TokenKind.KeywordSource, - "env" => TokenKind.Identifier, - "if" => TokenKind.KeywordIf, - "then" => TokenKind.KeywordThen, - "when" => TokenKind.KeywordWhen, - "and" => TokenKind.KeywordAnd, - "or" => TokenKind.KeywordOr, - "not" => TokenKind.KeywordNot, - "priority" => TokenKind.KeywordPriority, - "else" => TokenKind.KeywordElse, - "because" => TokenKind.KeywordBecause, - "settings" => TokenKind.KeywordSettings, - "ignore" => TokenKind.KeywordIgnore, - "until" => TokenKind.KeywordUntil, - "escalate" => TokenKind.KeywordEscalate, - "to" => TokenKind.KeywordTo, - "requireVex" => TokenKind.KeywordRequireVex, - "warn" => TokenKind.KeywordWarn, - "message" => TokenKind.KeywordMessage, - "defer" => TokenKind.KeywordDefer, - "annotate" => TokenKind.KeywordAnnotate, - "in" => TokenKind.KeywordIn, - "true" => TokenKind.BooleanLiteral, - "false" => TokenKind.BooleanLiteral, - _ => TokenKind.Identifier, - }; - } - - private static bool IsIdentifierStart(char ch) => char.IsLetter(ch) || ch == '_'; - - private static bool IsIdentifierPart(char ch) => char.IsLetterOrDigit(ch) || ch == '_' || ch == '-'; - - private static (int Index, int Line, int Column) AdvanceWhitespace(string source, int index, int line, int column) - { - var i = index; - var currentLine = line; - var currentColumn = column; - - while (i < source.Length) - { - var ch = source[i]; - if (ch == '\r') - { - if (i + 1 < source.Length && source[i + 1] == '\n') - { - i += 2; - } - else - { - i++; - } - - currentLine++; - currentColumn = 1; - continue; - } - - if (ch == '\n') - { - i++; - currentLine++; - currentColumn = 1; - continue; - } - - if (!char.IsWhiteSpace(ch)) - { - break; - } - - i++; - currentColumn++; - } - - return (i, currentLine, currentColumn); - } - - private static (int Index, int Line, int Column) SkipSingleLineComment(string source, int index, int line, int column) - { - var i = index; - var currentLine = line; - var currentColumn = column; - - while (i < source.Length) - { - var ch = source[i]; - if (ch == '\r' || ch == '\n') - { - return AdvanceWhitespace(source, i, currentLine, currentColumn); - } - - i++; - currentColumn++; - } - - return (i, currentLine, currentColumn); - } - - private static (int Index, int Line, int Column) SkipMultiLineComment( - string source, - int index, - int line, - int column, - ImmutableArray<PolicyIssue>.Builder diagnostics) - { - var i = index; - var currentLine = line; - var currentColumn = column; - - while (i < source.Length) - { - var ch = source[i]; - if (ch == '*' && i + 1 < source.Length && source[i + 1] == '/') - { - return (i + 2, currentLine, currentColumn + 2); - } - - if (ch == '\r') - { - if (i + 1 < source.Length && source[i + 1] == '\n') - { - i += 2; - } - else - { - i++; - } - - currentLine++; - currentColumn = 1; - continue; - } - - if (ch == '\n') - { - i++; - currentLine++; - currentColumn = 1; - continue; - } - - i++; - currentColumn++; - } - - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedCharacter, "Unterminated comment block.", $"@{line}:{column}")); - return (source.Length, currentLine, currentColumn); - } - - private static DslToken CreateToken( - TokenKind kind, - string text, - SourceLocation start, - ref int index, - ref int column, - int advance = 1) - { - var end = new SourceLocation(index + advance, start.Line, start.Column + advance); - index += advance; - column += advance; - return new DslToken(kind, text, new SourceSpan(start, end)); - } - - private static void ReportUnexpectedCharacter( - ImmutableArray<PolicyIssue>.Builder diagnostics, - char ch, - SourceLocation location) - { - diagnostics.Add(PolicyIssue.Error( - PolicyDslDiagnosticCodes.UnexpectedCharacter, - $"Unexpected character '{ch}'.", - $"@{location.Line}:{location.Column}")); - } - - private static bool Match(string source, int index, char expected) => - index < source.Length && source[index] == expected; -} - -internal readonly record struct TokenizerResult( - ImmutableArray<DslToken> Tokens, - ImmutableArray<PolicyIssue> Diagnostics); +using System.Collections.Immutable; +using System.Globalization; +using System.Text; +using StellaOps.Policy; + +namespace StellaOps.Policy.Engine.Compilation; + +internal static class DslTokenizer +{ + public static TokenizerResult Tokenize(string source) + { + if (source is null) + { + throw new ArgumentNullException(nameof(source)); + } + + var tokens = ImmutableArray.CreateBuilder<DslToken>(); + var diagnostics = ImmutableArray.CreateBuilder<PolicyIssue>(); + + var index = 0; + var line = 1; + var column = 1; + + while (index < source.Length) + { + var current = source[index]; + if (char.IsWhiteSpace(current)) + { + (index, line, column) = AdvanceWhitespace(source, index, line, column); + continue; + } + + if (current == '/' && index + 1 < source.Length) + { + if (source[index + 1] == '/') + { + (index, line, column) = SkipSingleLineComment(source, index + 2, line, column + 2); + continue; + } + + if (source[index + 1] == '*') + { + (index, line, column) = SkipMultiLineComment(source, index + 2, line, column + 2, diagnostics); + continue; + } + } + + var startLocation = new SourceLocation(index, line, column); + switch (current) + { + case '{': + tokens.Add(CreateToken(TokenKind.LeftBrace, "{", startLocation, ref index, ref column)); + break; + case '}': + tokens.Add(CreateToken(TokenKind.RightBrace, "}", startLocation, ref index, ref column)); + break; + case '(': + tokens.Add(CreateToken(TokenKind.LeftParen, "(", startLocation, ref index, ref column)); + break; + case ')': + tokens.Add(CreateToken(TokenKind.RightParen, ")", startLocation, ref index, ref column)); + break; + case '[': + tokens.Add(CreateToken(TokenKind.LeftBracket, "[", startLocation, ref index, ref column)); + break; + case ']': + tokens.Add(CreateToken(TokenKind.RightBracket, "]", startLocation, ref index, ref column)); + break; + case ',': + tokens.Add(CreateToken(TokenKind.Comma, ",", startLocation, ref index, ref column)); + break; + case ';': + tokens.Add(CreateToken(TokenKind.Semicolon, ";", startLocation, ref index, ref column)); + break; + case ':': + { + if (Match(source, index + 1, '=')) + { + tokens.Add(CreateToken(TokenKind.Define, ":=", startLocation, ref index, ref column, advance: 2)); + } + else + { + tokens.Add(CreateToken(TokenKind.Colon, ":", startLocation, ref index, ref column)); + } + + break; + } + case '=': + { + if (Match(source, index + 1, '>')) + { + tokens.Add(CreateToken(TokenKind.Arrow, "=>", startLocation, ref index, ref column, advance: 2)); + } + else if (Match(source, index + 1, '=')) + { + tokens.Add(CreateToken(TokenKind.EqualEqual, "==", startLocation, ref index, ref column, advance: 2)); + } + else + { + tokens.Add(CreateToken(TokenKind.Assign, "=", startLocation, ref index, ref column)); + } + + break; + } + case '!': + { + if (Match(source, index + 1, '=')) + { + tokens.Add(CreateToken(TokenKind.NotEqual, "!=", startLocation, ref index, ref column, advance: 2)); + } + else + { + ReportUnexpectedCharacter(diagnostics, current, startLocation); + index++; + column++; + } + + break; + } + case '<': + { + if (Match(source, index + 1, '=')) + { + tokens.Add(CreateToken(TokenKind.LessThanOrEqual, "<=", startLocation, ref index, ref column, advance: 2)); + } + else + { + tokens.Add(CreateToken(TokenKind.LessThan, "<", startLocation, ref index, ref column)); + } + + break; + } + case '>': + { + if (Match(source, index + 1, '=')) + { + tokens.Add(CreateToken(TokenKind.GreaterThanOrEqual, ">=", startLocation, ref index, ref column, advance: 2)); + } + else + { + tokens.Add(CreateToken(TokenKind.GreaterThan, ">", startLocation, ref index, ref column)); + } + + break; + } + case '.': + tokens.Add(CreateToken(TokenKind.Dot, ".", startLocation, ref index, ref column)); + break; + case '"': + TokenizeString(source, ref index, ref line, ref column, startLocation, tokens, diagnostics); + break; + case '+': + case '-': + { + if (index + 1 < source.Length && char.IsDigit(source[index + 1])) + { + TokenizeNumber(source, ref index, ref line, ref column, startLocation, tokens, diagnostics); + } + else + { + ReportUnexpectedCharacter(diagnostics, current, startLocation); + index++; + column++; + } + + break; + } + default: + { + if (char.IsDigit(current)) + { + TokenizeNumber(source, ref index, ref line, ref column, startLocation, tokens, diagnostics); + } + else if (IsIdentifierStart(current)) + { + TokenizeIdentifierOrKeyword(source, ref index, ref line, ref column, startLocation, tokens); + } + else + { + ReportUnexpectedCharacter(diagnostics, current, startLocation); + index++; + column++; + } + + break; + } + } + } + + var eofLocation = new SourceLocation(index, line, column); + tokens.Add(new DslToken(TokenKind.EndOfFile, string.Empty, new SourceSpan(eofLocation, eofLocation))); + + return new TokenizerResult(tokens.ToImmutable(), diagnostics.ToImmutable()); + } + + private static void TokenizeString( + string source, + ref int index, + ref int line, + ref int column, + SourceLocation start, + ImmutableArray<DslToken>.Builder tokens, + ImmutableArray<PolicyIssue>.Builder diagnostics) + { + var builder = new StringBuilder(); + var i = index + 1; + var currentLine = line; + var currentColumn = column + 1; + + while (i < source.Length) + { + var ch = source[i]; + if (ch == '"') + { + var end = new SourceLocation(i + 1, currentLine, currentColumn + 1); + index = i + 1; + column = currentColumn + 1; + tokens.Add(new DslToken(TokenKind.StringLiteral, builder.ToString(), new SourceSpan(start, end), builder.ToString())); + return; + } + + if (ch == '\\') + { + if (i + 1 >= source.Length) + { + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}")); + index = source.Length; + line = currentLine; + column = currentColumn; + return; + } + + var escape = source[i + 1]; + switch (escape) + { + case '\\': + builder.Append('\\'); + break; + case '"': + builder.Append('"'); + break; + case 'n': + builder.Append('\n'); + break; + case 'r': + builder.Append('\r'); + break; + case 't': + builder.Append('\t'); + break; + default: + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidEscapeSequence, $"Invalid escape sequence '\\{escape}'.", $"@{currentLine}:{currentColumn}")); + builder.Append(escape); + break; + } + + i += 2; + currentColumn += 2; + continue; + } + + if (ch == '\r' || ch == '\n') + { + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}")); + (index, line, column) = AdvanceWhitespace(source, i, currentLine, currentColumn); + return; + } + + builder.Append(ch); + i++; + currentColumn++; + } + + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnterminatedString, "Unterminated string literal.", $"@{start.Line}:{start.Column}")); + index = source.Length; + line = currentLine; + column = currentColumn; + } + + private static void TokenizeNumber( + string source, + ref int index, + ref int line, + ref int column, + SourceLocation start, + ImmutableArray<DslToken>.Builder tokens, + ImmutableArray<PolicyIssue>.Builder diagnostics) + { + var i = index; + var hasDecimal = false; + + if (source[i] == '+' || source[i] == '-') + { + i++; + } + + while (i < source.Length) + { + var ch = source[i]; + if (char.IsDigit(ch)) + { + i++; + continue; + } + + if (ch == '.') + { + if (hasDecimal) + { + break; + } + + hasDecimal = true; + i++; + continue; + } + + break; + } + + var percent = false; + if (i < source.Length && source[i] == '%') + { + percent = true; + i++; + } + + var text = source.Substring(index, i - index); + if (!decimal.TryParse(text, NumberStyles.AllowLeadingSign | NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out var value)) + { + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidNumber, $"Invalid numeric literal '{text}'.", $"@{start.Line}:{start.Column}")); + index = i; + column += i - index; + return; + } + + if (percent) + { + value /= 100m; + } + + var end = new SourceLocation(i, line, column + (i - index)); + tokens.Add(new DslToken(TokenKind.NumberLiteral, text, new SourceSpan(start, end), value)); + column += i - index; + index = i; + } + + private static void TokenizeIdentifierOrKeyword( + string source, + ref int index, + ref int line, + ref int column, + SourceLocation start, + ImmutableArray<DslToken>.Builder tokens) + { + var i = index + 1; + while (i < source.Length && IsIdentifierPart(source[i])) + { + i++; + } + + var text = source.Substring(index, i - index); + var kind = GetKeywordKind(text); + + if (kind == TokenKind.BooleanLiteral) + { + var value = string.Equals(text, "true", StringComparison.Ordinal); + var end = new SourceLocation(i, line, column + (i - index)); + tokens.Add(new DslToken(TokenKind.BooleanLiteral, text, new SourceSpan(start, end), value)); + } + else if (kind == TokenKind.Identifier) + { + var end = new SourceLocation(i, line, column + (i - index)); + tokens.Add(new DslToken(TokenKind.Identifier, text, new SourceSpan(start, end))); + } + else + { + var end = new SourceLocation(i, line, column + (i - index)); + tokens.Add(new DslToken(kind, text, new SourceSpan(start, end))); + } + + column += i - index; + index = i; + } + + private static TokenKind GetKeywordKind(string text) + { + return text switch + { + "policy" => TokenKind.KeywordPolicy, + "syntax" => TokenKind.KeywordSyntax, + "metadata" => TokenKind.KeywordMetadata, + "profile" => TokenKind.KeywordProfile, + "rule" => TokenKind.KeywordRule, + "map" => TokenKind.KeywordMap, + "source" => TokenKind.KeywordSource, + "env" => TokenKind.Identifier, + "if" => TokenKind.KeywordIf, + "then" => TokenKind.KeywordThen, + "when" => TokenKind.KeywordWhen, + "and" => TokenKind.KeywordAnd, + "or" => TokenKind.KeywordOr, + "not" => TokenKind.KeywordNot, + "priority" => TokenKind.KeywordPriority, + "else" => TokenKind.KeywordElse, + "because" => TokenKind.KeywordBecause, + "settings" => TokenKind.KeywordSettings, + "ignore" => TokenKind.KeywordIgnore, + "until" => TokenKind.KeywordUntil, + "escalate" => TokenKind.KeywordEscalate, + "to" => TokenKind.KeywordTo, + "requireVex" => TokenKind.KeywordRequireVex, + "warn" => TokenKind.KeywordWarn, + "message" => TokenKind.KeywordMessage, + "defer" => TokenKind.KeywordDefer, + "annotate" => TokenKind.KeywordAnnotate, + "in" => TokenKind.KeywordIn, + "true" => TokenKind.BooleanLiteral, + "false" => TokenKind.BooleanLiteral, + _ => TokenKind.Identifier, + }; + } + + private static bool IsIdentifierStart(char ch) => char.IsLetter(ch) || ch == '_'; + + private static bool IsIdentifierPart(char ch) => char.IsLetterOrDigit(ch) || ch == '_' || ch == '-'; + + private static (int Index, int Line, int Column) AdvanceWhitespace(string source, int index, int line, int column) + { + var i = index; + var currentLine = line; + var currentColumn = column; + + while (i < source.Length) + { + var ch = source[i]; + if (ch == '\r') + { + if (i + 1 < source.Length && source[i + 1] == '\n') + { + i += 2; + } + else + { + i++; + } + + currentLine++; + currentColumn = 1; + continue; + } + + if (ch == '\n') + { + i++; + currentLine++; + currentColumn = 1; + continue; + } + + if (!char.IsWhiteSpace(ch)) + { + break; + } + + i++; + currentColumn++; + } + + return (i, currentLine, currentColumn); + } + + private static (int Index, int Line, int Column) SkipSingleLineComment(string source, int index, int line, int column) + { + var i = index; + var currentLine = line; + var currentColumn = column; + + while (i < source.Length) + { + var ch = source[i]; + if (ch == '\r' || ch == '\n') + { + return AdvanceWhitespace(source, i, currentLine, currentColumn); + } + + i++; + currentColumn++; + } + + return (i, currentLine, currentColumn); + } + + private static (int Index, int Line, int Column) SkipMultiLineComment( + string source, + int index, + int line, + int column, + ImmutableArray<PolicyIssue>.Builder diagnostics) + { + var i = index; + var currentLine = line; + var currentColumn = column; + + while (i < source.Length) + { + var ch = source[i]; + if (ch == '*' && i + 1 < source.Length && source[i + 1] == '/') + { + return (i + 2, currentLine, currentColumn + 2); + } + + if (ch == '\r') + { + if (i + 1 < source.Length && source[i + 1] == '\n') + { + i += 2; + } + else + { + i++; + } + + currentLine++; + currentColumn = 1; + continue; + } + + if (ch == '\n') + { + i++; + currentLine++; + currentColumn = 1; + continue; + } + + i++; + currentColumn++; + } + + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedCharacter, "Unterminated comment block.", $"@{line}:{column}")); + return (source.Length, currentLine, currentColumn); + } + + private static DslToken CreateToken( + TokenKind kind, + string text, + SourceLocation start, + ref int index, + ref int column, + int advance = 1) + { + var end = new SourceLocation(index + advance, start.Line, start.Column + advance); + index += advance; + column += advance; + return new DslToken(kind, text, new SourceSpan(start, end)); + } + + private static void ReportUnexpectedCharacter( + ImmutableArray<PolicyIssue>.Builder diagnostics, + char ch, + SourceLocation location) + { + diagnostics.Add(PolicyIssue.Error( + PolicyDslDiagnosticCodes.UnexpectedCharacter, + $"Unexpected character '{ch}'.", + $"@{location.Line}:{location.Column}")); + } + + private static bool Match(string source, int index, char expected) => + index < source.Length && source[index] == expected; +} + +internal readonly record struct TokenizerResult( + ImmutableArray<DslToken> Tokens, + ImmutableArray<PolicyIssue> Diagnostics); diff --git a/src/StellaOps.Policy.Engine/Compilation/PolicyCompiler.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyCompiler.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Compilation/PolicyCompiler.cs rename to src/Policy/StellaOps.Policy.Engine/Compilation/PolicyCompiler.cs index 48ead072..f7e630ea 100644 --- a/src/StellaOps.Policy.Engine/Compilation/PolicyCompiler.cs +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyCompiler.cs @@ -1,169 +1,169 @@ -using System.Collections.Immutable; -using System.Linq; -using System.Security.Cryptography; -using StellaOps.Policy; - -namespace StellaOps.Policy.Engine.Compilation; - -public sealed class PolicyCompiler -{ - public PolicyCompilationResult Compile(string source) - { - if (source is null) - { - throw new ArgumentNullException(nameof(source)); - } - - var parseResult = PolicyParser.Parse(source); - if (parseResult.Document is null) - { - return new PolicyCompilationResult( - Success: false, - Document: null, - Checksum: null, - CanonicalRepresentation: ImmutableArray<byte>.Empty, - Diagnostics: parseResult.Diagnostics); - } - - if (parseResult.Diagnostics.Any(static issue => issue.Severity == PolicyIssueSeverity.Error)) - { - return new PolicyCompilationResult( - Success: false, - Document: null, - Checksum: null, - CanonicalRepresentation: ImmutableArray<byte>.Empty, - Diagnostics: parseResult.Diagnostics); - } - - var irDocument = BuildIntermediateRepresentation(parseResult.Document); - var canonical = PolicyIrSerializer.Serialize(irDocument); - var checksum = Convert.ToHexString(SHA256.HashData(canonical.AsSpan())).ToLowerInvariant(); - - return new PolicyCompilationResult( - Success: true, - Document: irDocument, - Checksum: checksum, - CanonicalRepresentation: canonical, - Diagnostics: parseResult.Diagnostics); - } - - private static PolicyIrDocument BuildIntermediateRepresentation(PolicyDocumentNode node) - { - var metadata = node.Metadata - .OrderBy(static kvp => kvp.Key, StringComparer.Ordinal) - .ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => ToIrLiteral(kvp.Value), StringComparer.Ordinal); - - var settings = node.Settings - .OrderBy(static kvp => kvp.Key, StringComparer.Ordinal) - .ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => ToIrLiteral(kvp.Value), StringComparer.Ordinal); - - var profiles = ImmutableArray.CreateBuilder<PolicyIrProfile>(node.Profiles.Length); - foreach (var profile in node.Profiles) - { - var maps = ImmutableArray.CreateBuilder<PolicyIrProfileMap>(); - var envs = ImmutableArray.CreateBuilder<PolicyIrProfileEnv>(); - var scalars = ImmutableArray.CreateBuilder<PolicyIrProfileScalar>(); - - foreach (var item in profile.Items) - { - switch (item) - { - case PolicyProfileMapNode map: - maps.Add(new PolicyIrProfileMap( - map.Name, - map.Entries - .Select(entry => new PolicyIrProfileMapEntry(entry.Source, entry.Weight)) - .ToImmutableArray())); - break; - case PolicyProfileEnvNode env: - envs.Add(new PolicyIrProfileEnv( - env.Name, - env.Entries - .Select(entry => new PolicyIrProfileEnvEntry(entry.Condition, entry.Weight)) - .ToImmutableArray())); - break; - case PolicyProfileScalarNode scalar: - scalars.Add(new PolicyIrProfileScalar(scalar.Name, ToIrLiteral(scalar.Value))); - break; - } - } - - profiles.Add(new PolicyIrProfile( - profile.Name, - maps.ToImmutable(), - envs.ToImmutable(), - scalars.ToImmutable())); - } - - var rules = ImmutableArray.CreateBuilder<PolicyIrRule>(node.Rules.Length); - foreach (var rule in node.Rules) - { - var thenActions = ImmutableArray.CreateBuilder<PolicyIrAction>(rule.ThenActions.Length); - foreach (var action in rule.ThenActions) - { - var converted = ToIrAction(action); - if (converted is not null) - { - thenActions.Add(converted); - } - } - - var elseActions = ImmutableArray.CreateBuilder<PolicyIrAction>(rule.ElseActions.Length); - foreach (var action in rule.ElseActions) - { - var converted = ToIrAction(action); - if (converted is not null) - { - elseActions.Add(converted); - } - } - - rules.Add(new PolicyIrRule( - rule.Name, - rule.Priority, - rule.When, - thenActions.ToImmutable(), - elseActions.ToImmutable(), - rule.Because ?? string.Empty)); - } - - return new PolicyIrDocument( - node.Name, - node.Syntax, - metadata, - profiles.ToImmutable(), - settings, - rules.ToImmutable()); - } - - private static PolicyIrLiteral ToIrLiteral(PolicyLiteralValue value) => value switch - { - PolicyStringLiteral s => new PolicyIrStringLiteral(s.Value), - PolicyNumberLiteral n => new PolicyIrNumberLiteral(n.Value), - PolicyBooleanLiteral b => new PolicyIrBooleanLiteral(b.Value), - PolicyListLiteral list => new PolicyIrListLiteral(list.Items.Select(ToIrLiteral).ToImmutableArray()), - _ => new PolicyIrStringLiteral(string.Empty), - }; - - private static PolicyIrAction? ToIrAction(PolicyActionNode action) => action switch - { - PolicyAssignmentActionNode assign => new PolicyIrAssignmentAction(assign.Target.Segments, assign.Value), - PolicyAnnotateActionNode annotate => new PolicyIrAnnotateAction(annotate.Target.Segments, annotate.Value), - PolicyIgnoreActionNode ignore => new PolicyIrIgnoreAction(ignore.Until, ignore.Because), - PolicyEscalateActionNode escalate => new PolicyIrEscalateAction(escalate.To, escalate.When), - PolicyRequireVexActionNode require => new PolicyIrRequireVexAction( - require.Conditions - .OrderBy(static kvp => kvp.Key, StringComparer.Ordinal) - .ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal)), - PolicyWarnActionNode warn => new PolicyIrWarnAction(warn.Message), - PolicyDeferActionNode defer => new PolicyIrDeferAction(defer.Until), - _ => null, - }; -} - -public sealed record PolicyCompilationResult( - bool Success, - PolicyIrDocument? Document, - string? Checksum, - ImmutableArray<byte> CanonicalRepresentation, - ImmutableArray<PolicyIssue> Diagnostics); +using System.Collections.Immutable; +using System.Linq; +using System.Security.Cryptography; +using StellaOps.Policy; + +namespace StellaOps.Policy.Engine.Compilation; + +public sealed class PolicyCompiler +{ + public PolicyCompilationResult Compile(string source) + { + if (source is null) + { + throw new ArgumentNullException(nameof(source)); + } + + var parseResult = PolicyParser.Parse(source); + if (parseResult.Document is null) + { + return new PolicyCompilationResult( + Success: false, + Document: null, + Checksum: null, + CanonicalRepresentation: ImmutableArray<byte>.Empty, + Diagnostics: parseResult.Diagnostics); + } + + if (parseResult.Diagnostics.Any(static issue => issue.Severity == PolicyIssueSeverity.Error)) + { + return new PolicyCompilationResult( + Success: false, + Document: null, + Checksum: null, + CanonicalRepresentation: ImmutableArray<byte>.Empty, + Diagnostics: parseResult.Diagnostics); + } + + var irDocument = BuildIntermediateRepresentation(parseResult.Document); + var canonical = PolicyIrSerializer.Serialize(irDocument); + var checksum = Convert.ToHexString(SHA256.HashData(canonical.AsSpan())).ToLowerInvariant(); + + return new PolicyCompilationResult( + Success: true, + Document: irDocument, + Checksum: checksum, + CanonicalRepresentation: canonical, + Diagnostics: parseResult.Diagnostics); + } + + private static PolicyIrDocument BuildIntermediateRepresentation(PolicyDocumentNode node) + { + var metadata = node.Metadata + .OrderBy(static kvp => kvp.Key, StringComparer.Ordinal) + .ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => ToIrLiteral(kvp.Value), StringComparer.Ordinal); + + var settings = node.Settings + .OrderBy(static kvp => kvp.Key, StringComparer.Ordinal) + .ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => ToIrLiteral(kvp.Value), StringComparer.Ordinal); + + var profiles = ImmutableArray.CreateBuilder<PolicyIrProfile>(node.Profiles.Length); + foreach (var profile in node.Profiles) + { + var maps = ImmutableArray.CreateBuilder<PolicyIrProfileMap>(); + var envs = ImmutableArray.CreateBuilder<PolicyIrProfileEnv>(); + var scalars = ImmutableArray.CreateBuilder<PolicyIrProfileScalar>(); + + foreach (var item in profile.Items) + { + switch (item) + { + case PolicyProfileMapNode map: + maps.Add(new PolicyIrProfileMap( + map.Name, + map.Entries + .Select(entry => new PolicyIrProfileMapEntry(entry.Source, entry.Weight)) + .ToImmutableArray())); + break; + case PolicyProfileEnvNode env: + envs.Add(new PolicyIrProfileEnv( + env.Name, + env.Entries + .Select(entry => new PolicyIrProfileEnvEntry(entry.Condition, entry.Weight)) + .ToImmutableArray())); + break; + case PolicyProfileScalarNode scalar: + scalars.Add(new PolicyIrProfileScalar(scalar.Name, ToIrLiteral(scalar.Value))); + break; + } + } + + profiles.Add(new PolicyIrProfile( + profile.Name, + maps.ToImmutable(), + envs.ToImmutable(), + scalars.ToImmutable())); + } + + var rules = ImmutableArray.CreateBuilder<PolicyIrRule>(node.Rules.Length); + foreach (var rule in node.Rules) + { + var thenActions = ImmutableArray.CreateBuilder<PolicyIrAction>(rule.ThenActions.Length); + foreach (var action in rule.ThenActions) + { + var converted = ToIrAction(action); + if (converted is not null) + { + thenActions.Add(converted); + } + } + + var elseActions = ImmutableArray.CreateBuilder<PolicyIrAction>(rule.ElseActions.Length); + foreach (var action in rule.ElseActions) + { + var converted = ToIrAction(action); + if (converted is not null) + { + elseActions.Add(converted); + } + } + + rules.Add(new PolicyIrRule( + rule.Name, + rule.Priority, + rule.When, + thenActions.ToImmutable(), + elseActions.ToImmutable(), + rule.Because ?? string.Empty)); + } + + return new PolicyIrDocument( + node.Name, + node.Syntax, + metadata, + profiles.ToImmutable(), + settings, + rules.ToImmutable()); + } + + private static PolicyIrLiteral ToIrLiteral(PolicyLiteralValue value) => value switch + { + PolicyStringLiteral s => new PolicyIrStringLiteral(s.Value), + PolicyNumberLiteral n => new PolicyIrNumberLiteral(n.Value), + PolicyBooleanLiteral b => new PolicyIrBooleanLiteral(b.Value), + PolicyListLiteral list => new PolicyIrListLiteral(list.Items.Select(ToIrLiteral).ToImmutableArray()), + _ => new PolicyIrStringLiteral(string.Empty), + }; + + private static PolicyIrAction? ToIrAction(PolicyActionNode action) => action switch + { + PolicyAssignmentActionNode assign => new PolicyIrAssignmentAction(assign.Target.Segments, assign.Value), + PolicyAnnotateActionNode annotate => new PolicyIrAnnotateAction(annotate.Target.Segments, annotate.Value), + PolicyIgnoreActionNode ignore => new PolicyIrIgnoreAction(ignore.Until, ignore.Because), + PolicyEscalateActionNode escalate => new PolicyIrEscalateAction(escalate.To, escalate.When), + PolicyRequireVexActionNode require => new PolicyIrRequireVexAction( + require.Conditions + .OrderBy(static kvp => kvp.Key, StringComparer.Ordinal) + .ToImmutableSortedDictionary(static kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal)), + PolicyWarnActionNode warn => new PolicyIrWarnAction(warn.Message), + PolicyDeferActionNode defer => new PolicyIrDeferAction(defer.Until), + _ => null, + }; +} + +public sealed record PolicyCompilationResult( + bool Success, + PolicyIrDocument? Document, + string? Checksum, + ImmutableArray<byte> CanonicalRepresentation, + ImmutableArray<PolicyIssue> Diagnostics); diff --git a/src/StellaOps.Policy.Engine/Compilation/PolicyDslDiagnosticCodes.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyDslDiagnosticCodes.cs similarity index 98% rename from src/StellaOps.Policy.Engine/Compilation/PolicyDslDiagnosticCodes.cs rename to src/Policy/StellaOps.Policy.Engine/Compilation/PolicyDslDiagnosticCodes.cs index 242e8528..f651c607 100644 --- a/src/StellaOps.Policy.Engine/Compilation/PolicyDslDiagnosticCodes.cs +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyDslDiagnosticCodes.cs @@ -1,19 +1,19 @@ -namespace StellaOps.Policy.Engine.Compilation; - -internal static class PolicyDslDiagnosticCodes -{ - public const string UnexpectedCharacter = "POLICY-DSL-LEX-001"; - public const string UnterminatedString = "POLICY-DSL-LEX-002"; - public const string InvalidEscapeSequence = "POLICY-DSL-LEX-003"; - public const string InvalidNumber = "POLICY-DSL-LEX-004"; - public const string UnexpectedToken = "POLICY-DSL-PARSE-001"; - public const string DuplicateSection = "POLICY-DSL-PARSE-002"; - public const string MissingPolicyHeader = "POLICY-DSL-PARSE-003"; - public const string UnsupportedSyntaxVersion = "POLICY-DSL-PARSE-004"; - public const string DuplicateRuleName = "POLICY-DSL-PARSE-005"; - public const string MissingBecauseClause = "POLICY-DSL-PARSE-006"; - public const string MissingTerminator = "POLICY-DSL-PARSE-007"; - public const string InvalidAction = "POLICY-DSL-PARSE-008"; - public const string InvalidLiteral = "POLICY-DSL-PARSE-009"; - public const string UnexpectedSection = "POLICY-DSL-PARSE-010"; -} +namespace StellaOps.Policy.Engine.Compilation; + +internal static class PolicyDslDiagnosticCodes +{ + public const string UnexpectedCharacter = "POLICY-DSL-LEX-001"; + public const string UnterminatedString = "POLICY-DSL-LEX-002"; + public const string InvalidEscapeSequence = "POLICY-DSL-LEX-003"; + public const string InvalidNumber = "POLICY-DSL-LEX-004"; + public const string UnexpectedToken = "POLICY-DSL-PARSE-001"; + public const string DuplicateSection = "POLICY-DSL-PARSE-002"; + public const string MissingPolicyHeader = "POLICY-DSL-PARSE-003"; + public const string UnsupportedSyntaxVersion = "POLICY-DSL-PARSE-004"; + public const string DuplicateRuleName = "POLICY-DSL-PARSE-005"; + public const string MissingBecauseClause = "POLICY-DSL-PARSE-006"; + public const string MissingTerminator = "POLICY-DSL-PARSE-007"; + public const string InvalidAction = "POLICY-DSL-PARSE-008"; + public const string InvalidLiteral = "POLICY-DSL-PARSE-009"; + public const string UnexpectedSection = "POLICY-DSL-PARSE-010"; +} diff --git a/src/StellaOps.Policy.Engine/Compilation/PolicyIr.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyIr.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Compilation/PolicyIr.cs rename to src/Policy/StellaOps.Policy.Engine/Compilation/PolicyIr.cs index 6f9aec83..9eaacb22 100644 --- a/src/StellaOps.Policy.Engine/Compilation/PolicyIr.cs +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyIr.cs @@ -1,61 +1,61 @@ -using System.Collections.Immutable; - -namespace StellaOps.Policy.Engine.Compilation; - -public sealed record PolicyIrDocument( - string Name, - string Syntax, - ImmutableSortedDictionary<string, PolicyIrLiteral> Metadata, - ImmutableArray<PolicyIrProfile> Profiles, - ImmutableSortedDictionary<string, PolicyIrLiteral> Settings, - ImmutableArray<PolicyIrRule> Rules); - -public abstract record PolicyIrLiteral; - -public sealed record PolicyIrStringLiteral(string Value) : PolicyIrLiteral; - -public sealed record PolicyIrNumberLiteral(decimal Value) : PolicyIrLiteral; - -public sealed record PolicyIrBooleanLiteral(bool Value) : PolicyIrLiteral; - -public sealed record PolicyIrListLiteral(ImmutableArray<PolicyIrLiteral> Items) : PolicyIrLiteral; - -public sealed record PolicyIrProfile( - string Name, - ImmutableArray<PolicyIrProfileMap> Maps, - ImmutableArray<PolicyIrProfileEnv> Environments, - ImmutableArray<PolicyIrProfileScalar> Scalars); - -public sealed record PolicyIrProfileMap(string Name, ImmutableArray<PolicyIrProfileMapEntry> Entries); - -public sealed record PolicyIrProfileMapEntry(string Source, decimal Weight); - -public sealed record PolicyIrProfileEnv(string Name, ImmutableArray<PolicyIrProfileEnvEntry> Entries); - -public sealed record PolicyIrProfileEnvEntry(PolicyExpression Condition, decimal Weight); - -public sealed record PolicyIrProfileScalar(string Name, PolicyIrLiteral Value); - -public sealed record PolicyIrRule( - string Name, - int Priority, - PolicyExpression When, - ImmutableArray<PolicyIrAction> ThenActions, - ImmutableArray<PolicyIrAction> ElseActions, - string Because); - -public abstract record PolicyIrAction; - -public sealed record PolicyIrAssignmentAction(ImmutableArray<string> Target, PolicyExpression Value) : PolicyIrAction; - -public sealed record PolicyIrAnnotateAction(ImmutableArray<string> Target, PolicyExpression Value) : PolicyIrAction; - -public sealed record PolicyIrIgnoreAction(PolicyExpression? Until, string? Because) : PolicyIrAction; - -public sealed record PolicyIrEscalateAction(PolicyExpression? To, PolicyExpression? When) : PolicyIrAction; - -public sealed record PolicyIrRequireVexAction(ImmutableSortedDictionary<string, PolicyExpression> Conditions) : PolicyIrAction; - -public sealed record PolicyIrWarnAction(PolicyExpression? Message) : PolicyIrAction; - -public sealed record PolicyIrDeferAction(PolicyExpression? Until) : PolicyIrAction; +using System.Collections.Immutable; + +namespace StellaOps.Policy.Engine.Compilation; + +public sealed record PolicyIrDocument( + string Name, + string Syntax, + ImmutableSortedDictionary<string, PolicyIrLiteral> Metadata, + ImmutableArray<PolicyIrProfile> Profiles, + ImmutableSortedDictionary<string, PolicyIrLiteral> Settings, + ImmutableArray<PolicyIrRule> Rules); + +public abstract record PolicyIrLiteral; + +public sealed record PolicyIrStringLiteral(string Value) : PolicyIrLiteral; + +public sealed record PolicyIrNumberLiteral(decimal Value) : PolicyIrLiteral; + +public sealed record PolicyIrBooleanLiteral(bool Value) : PolicyIrLiteral; + +public sealed record PolicyIrListLiteral(ImmutableArray<PolicyIrLiteral> Items) : PolicyIrLiteral; + +public sealed record PolicyIrProfile( + string Name, + ImmutableArray<PolicyIrProfileMap> Maps, + ImmutableArray<PolicyIrProfileEnv> Environments, + ImmutableArray<PolicyIrProfileScalar> Scalars); + +public sealed record PolicyIrProfileMap(string Name, ImmutableArray<PolicyIrProfileMapEntry> Entries); + +public sealed record PolicyIrProfileMapEntry(string Source, decimal Weight); + +public sealed record PolicyIrProfileEnv(string Name, ImmutableArray<PolicyIrProfileEnvEntry> Entries); + +public sealed record PolicyIrProfileEnvEntry(PolicyExpression Condition, decimal Weight); + +public sealed record PolicyIrProfileScalar(string Name, PolicyIrLiteral Value); + +public sealed record PolicyIrRule( + string Name, + int Priority, + PolicyExpression When, + ImmutableArray<PolicyIrAction> ThenActions, + ImmutableArray<PolicyIrAction> ElseActions, + string Because); + +public abstract record PolicyIrAction; + +public sealed record PolicyIrAssignmentAction(ImmutableArray<string> Target, PolicyExpression Value) : PolicyIrAction; + +public sealed record PolicyIrAnnotateAction(ImmutableArray<string> Target, PolicyExpression Value) : PolicyIrAction; + +public sealed record PolicyIrIgnoreAction(PolicyExpression? Until, string? Because) : PolicyIrAction; + +public sealed record PolicyIrEscalateAction(PolicyExpression? To, PolicyExpression? When) : PolicyIrAction; + +public sealed record PolicyIrRequireVexAction(ImmutableSortedDictionary<string, PolicyExpression> Conditions) : PolicyIrAction; + +public sealed record PolicyIrWarnAction(PolicyExpression? Message) : PolicyIrAction; + +public sealed record PolicyIrDeferAction(PolicyExpression? Until) : PolicyIrAction; diff --git a/src/StellaOps.Policy.Engine/Compilation/PolicyIrSerializer.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyIrSerializer.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Compilation/PolicyIrSerializer.cs rename to src/Policy/StellaOps.Policy.Engine/Compilation/PolicyIrSerializer.cs index fc7b97be..3796cb70 100644 --- a/src/StellaOps.Policy.Engine/Compilation/PolicyIrSerializer.cs +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyIrSerializer.cs @@ -1,415 +1,415 @@ -using System.Buffers; -using System.Collections.Immutable; -using System.Text.Json; - -namespace StellaOps.Policy.Engine.Compilation; - -internal static class PolicyIrSerializer -{ - public static ImmutableArray<byte> Serialize(PolicyIrDocument document) - { - var buffer = new ArrayBufferWriter<byte>(); - using var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions - { - Indented = false, - SkipValidation = false - }); - - WriteDocument(writer, document); - writer.Flush(); - - return buffer.WrittenSpan.ToArray().ToImmutableArray(); - } - - private static void WriteDocument(Utf8JsonWriter writer, PolicyIrDocument document) - { - writer.WriteStartObject(); - writer.WriteString("name", document.Name); - writer.WriteString("syntax", document.Syntax); - - writer.WritePropertyName("metadata"); - WriteLiteralDictionary(writer, document.Metadata); - - writer.WritePropertyName("profiles"); - writer.WriteStartArray(); - foreach (var profile in document.Profiles) - { - WriteProfile(writer, profile); - } - - writer.WriteEndArray(); - - writer.WritePropertyName("settings"); - WriteLiteralDictionary(writer, document.Settings); - - writer.WritePropertyName("rules"); - writer.WriteStartArray(); - foreach (var rule in document.Rules) - { - WriteRule(writer, rule); - } - - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - private static void WriteProfile(Utf8JsonWriter writer, PolicyIrProfile profile) - { - writer.WriteStartObject(); - writer.WriteString("name", profile.Name); - - writer.WritePropertyName("maps"); - writer.WriteStartArray(); - foreach (var map in profile.Maps) - { - writer.WriteStartObject(); - writer.WriteString("name", map.Name); - writer.WritePropertyName("entries"); - writer.WriteStartArray(); - foreach (var entry in map.Entries) - { - writer.WriteStartObject(); - writer.WriteString("source", entry.Source); - writer.WriteNumber("weight", entry.Weight); - writer.WriteEndObject(); - } - - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - writer.WriteEndArray(); - - writer.WritePropertyName("env"); - writer.WriteStartArray(); - foreach (var env in profile.Environments) - { - writer.WriteStartObject(); - writer.WriteString("name", env.Name); - writer.WritePropertyName("entries"); - writer.WriteStartArray(); - foreach (var entry in env.Entries) - { - writer.WriteStartObject(); - writer.WritePropertyName("condition"); - WriteExpression(writer, entry.Condition); - writer.WriteNumber("weight", entry.Weight); - writer.WriteEndObject(); - } - - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - writer.WriteEndArray(); - - writer.WritePropertyName("scalars"); - writer.WriteStartArray(); - foreach (var scalar in profile.Scalars) - { - writer.WriteStartObject(); - writer.WriteString("name", scalar.Name); - writer.WritePropertyName("value"); - WriteLiteral(writer, scalar.Value); - writer.WriteEndObject(); - } - - writer.WriteEndArray(); - writer.WriteEndObject(); - } - - private static void WriteRule(Utf8JsonWriter writer, PolicyIrRule rule) - { - writer.WriteStartObject(); - writer.WriteString("name", rule.Name); - writer.WriteNumber("priority", rule.Priority); - writer.WritePropertyName("when"); - WriteExpression(writer, rule.When); - - writer.WritePropertyName("then"); - WriteActions(writer, rule.ThenActions); - - writer.WritePropertyName("else"); - WriteActions(writer, rule.ElseActions); - - writer.WriteString("because", rule.Because); - writer.WriteEndObject(); - } - - private static void WriteActions(Utf8JsonWriter writer, ImmutableArray<PolicyIrAction> actions) - { - writer.WriteStartArray(); - foreach (var action in actions) - { - WriteAction(writer, action); - } - - writer.WriteEndArray(); - } - - private static void WriteAction(Utf8JsonWriter writer, PolicyIrAction action) - { - switch (action) - { - case PolicyIrAssignmentAction assign: - writer.WriteStartObject(); - writer.WriteString("type", "assign"); - WriteReference(writer, assign.Target); - writer.WritePropertyName("value"); - WriteExpression(writer, assign.Value); - writer.WriteEndObject(); - break; - case PolicyIrAnnotateAction annotate: - writer.WriteStartObject(); - writer.WriteString("type", "annotate"); - WriteReference(writer, annotate.Target); - writer.WritePropertyName("value"); - WriteExpression(writer, annotate.Value); - writer.WriteEndObject(); - break; - case PolicyIrIgnoreAction ignore: - writer.WriteStartObject(); - writer.WriteString("type", "ignore"); - writer.WritePropertyName("until"); - WriteOptionalExpression(writer, ignore.Until); - writer.WriteString("because", ignore.Because ?? string.Empty); - writer.WriteEndObject(); - break; - case PolicyIrEscalateAction escalate: - writer.WriteStartObject(); - writer.WriteString("type", "escalate"); - writer.WritePropertyName("to"); - WriteOptionalExpression(writer, escalate.To); - writer.WritePropertyName("when"); - WriteOptionalExpression(writer, escalate.When); - writer.WriteEndObject(); - break; - case PolicyIrRequireVexAction require: - writer.WriteStartObject(); - writer.WriteString("type", "requireVex"); - writer.WritePropertyName("conditions"); - writer.WriteStartObject(); - foreach (var kvp in require.Conditions) - { - writer.WritePropertyName(kvp.Key); - WriteExpression(writer, kvp.Value); - } - - writer.WriteEndObject(); - writer.WriteEndObject(); - break; - case PolicyIrWarnAction warn: - writer.WriteStartObject(); - writer.WriteString("type", "warn"); - writer.WritePropertyName("message"); - WriteOptionalExpression(writer, warn.Message); - writer.WriteEndObject(); - break; - case PolicyIrDeferAction defer: - writer.WriteStartObject(); - writer.WriteString("type", "defer"); - writer.WritePropertyName("until"); - WriteOptionalExpression(writer, defer.Until); - writer.WriteEndObject(); - break; - } - } - - private static void WriteReference(Utf8JsonWriter writer, ImmutableArray<string> segments) - { - writer.WritePropertyName("target"); - writer.WriteStartArray(); - foreach (var segment in segments) - { - writer.WriteStringValue(segment); - } - - writer.WriteEndArray(); - } - - private static void WriteOptionalExpression(Utf8JsonWriter writer, PolicyExpression? expression) - { - if (expression is null) - { - writer.WriteNullValue(); - return; - } - - WriteExpression(writer, expression); - } - - private static void WriteExpression(Utf8JsonWriter writer, PolicyExpression expression) - { - switch (expression) - { - case PolicyLiteralExpression literal: - writer.WriteStartObject(); - writer.WriteString("type", "literal"); - writer.WritePropertyName("value"); - WriteLiteralValue(writer, literal.Value); - writer.WriteEndObject(); - break; - case PolicyListExpression list: - writer.WriteStartObject(); - writer.WriteString("type", "list"); - writer.WritePropertyName("items"); - writer.WriteStartArray(); - foreach (var item in list.Items) - { - WriteExpression(writer, item); - } - - writer.WriteEndArray(); - writer.WriteEndObject(); - break; - case PolicyIdentifierExpression identifier: - writer.WriteStartObject(); - writer.WriteString("type", "identifier"); - writer.WriteString("name", identifier.Name); - writer.WriteEndObject(); - break; - case PolicyMemberAccessExpression member: - writer.WriteStartObject(); - writer.WriteString("type", "member"); - writer.WritePropertyName("target"); - WriteExpression(writer, member.Target); - writer.WriteString("member", member.Member); - writer.WriteEndObject(); - break; - case PolicyInvocationExpression invocation: - writer.WriteStartObject(); - writer.WriteString("type", "call"); - writer.WritePropertyName("target"); - WriteExpression(writer, invocation.Target); - writer.WritePropertyName("args"); - writer.WriteStartArray(); - foreach (var arg in invocation.Arguments) - { - WriteExpression(writer, arg); - } - - writer.WriteEndArray(); - writer.WriteEndObject(); - break; - case PolicyIndexerExpression indexer: - writer.WriteStartObject(); - writer.WriteString("type", "indexer"); - writer.WritePropertyName("target"); - WriteExpression(writer, indexer.Target); - writer.WritePropertyName("index"); - WriteExpression(writer, indexer.Index); - writer.WriteEndObject(); - break; - case PolicyUnaryExpression unary: - writer.WriteStartObject(); - writer.WriteString("type", "unary"); - writer.WriteString("op", unary.Operator switch - { - PolicyUnaryOperator.Not => "not", - _ => unary.Operator.ToString().ToLowerInvariant(), - }); - writer.WritePropertyName("operand"); - WriteExpression(writer, unary.Operand); - writer.WriteEndObject(); - break; - case PolicyBinaryExpression binary: - writer.WriteStartObject(); - writer.WriteString("type", "binary"); - writer.WriteString("op", GetBinaryOperator(binary.Operator)); - writer.WritePropertyName("left"); - WriteExpression(writer, binary.Left); - writer.WritePropertyName("right"); - WriteExpression(writer, binary.Right); - writer.WriteEndObject(); - break; - default: - writer.WriteStartObject(); - writer.WriteString("type", "unknown"); - writer.WriteEndObject(); - break; - } - } - - private static string GetBinaryOperator(PolicyBinaryOperator op) => op switch - { - PolicyBinaryOperator.And => "and", - PolicyBinaryOperator.Or => "or", - PolicyBinaryOperator.Equal => "eq", - PolicyBinaryOperator.NotEqual => "neq", - PolicyBinaryOperator.LessThan => "lt", - PolicyBinaryOperator.LessThanOrEqual => "lte", - PolicyBinaryOperator.GreaterThan => "gt", - PolicyBinaryOperator.GreaterThanOrEqual => "gte", - PolicyBinaryOperator.In => "in", - PolicyBinaryOperator.NotIn => "not_in", - _ => op.ToString().ToLowerInvariant(), - }; - - private static void WriteLiteralDictionary(Utf8JsonWriter writer, ImmutableSortedDictionary<string, PolicyIrLiteral> dictionary) - { - writer.WriteStartObject(); - foreach (var kvp in dictionary) - { - writer.WritePropertyName(kvp.Key); - WriteLiteral(writer, kvp.Value); - } - - writer.WriteEndObject(); - } - - private static void WriteLiteral(Utf8JsonWriter writer, PolicyIrLiteral literal) - { - switch (literal) - { - case PolicyIrStringLiteral s: - writer.WriteStringValue(s.Value); - break; - case PolicyIrNumberLiteral n: - writer.WriteNumberValue(n.Value); - break; - case PolicyIrBooleanLiteral b: - writer.WriteBooleanValue(b.Value); - break; - case PolicyIrListLiteral list: - writer.WriteStartArray(); - foreach (var item in list.Items) - { - WriteLiteral(writer, item); - } - - writer.WriteEndArray(); - break; - default: - writer.WriteNullValue(); - break; - } - } - - private static void WriteLiteralValue(Utf8JsonWriter writer, object? value) - { - switch (value) - { - case null: - writer.WriteNullValue(); - break; - case string s: - writer.WriteStringValue(s); - break; - case bool b: - writer.WriteBooleanValue(b); - break; - case decimal dec: - writer.WriteNumberValue(dec); - break; - case double dbl: - writer.WriteNumberValue(dbl); - break; - case int i: - writer.WriteNumberValue(i); - break; - default: - writer.WriteStringValue(value.ToString()); - break; - } - } -} +using System.Buffers; +using System.Collections.Immutable; +using System.Text.Json; + +namespace StellaOps.Policy.Engine.Compilation; + +internal static class PolicyIrSerializer +{ + public static ImmutableArray<byte> Serialize(PolicyIrDocument document) + { + var buffer = new ArrayBufferWriter<byte>(); + using var writer = new Utf8JsonWriter(buffer, new JsonWriterOptions + { + Indented = false, + SkipValidation = false + }); + + WriteDocument(writer, document); + writer.Flush(); + + return buffer.WrittenSpan.ToArray().ToImmutableArray(); + } + + private static void WriteDocument(Utf8JsonWriter writer, PolicyIrDocument document) + { + writer.WriteStartObject(); + writer.WriteString("name", document.Name); + writer.WriteString("syntax", document.Syntax); + + writer.WritePropertyName("metadata"); + WriteLiteralDictionary(writer, document.Metadata); + + writer.WritePropertyName("profiles"); + writer.WriteStartArray(); + foreach (var profile in document.Profiles) + { + WriteProfile(writer, profile); + } + + writer.WriteEndArray(); + + writer.WritePropertyName("settings"); + WriteLiteralDictionary(writer, document.Settings); + + writer.WritePropertyName("rules"); + writer.WriteStartArray(); + foreach (var rule in document.Rules) + { + WriteRule(writer, rule); + } + + writer.WriteEndArray(); + writer.WriteEndObject(); + } + + private static void WriteProfile(Utf8JsonWriter writer, PolicyIrProfile profile) + { + writer.WriteStartObject(); + writer.WriteString("name", profile.Name); + + writer.WritePropertyName("maps"); + writer.WriteStartArray(); + foreach (var map in profile.Maps) + { + writer.WriteStartObject(); + writer.WriteString("name", map.Name); + writer.WritePropertyName("entries"); + writer.WriteStartArray(); + foreach (var entry in map.Entries) + { + writer.WriteStartObject(); + writer.WriteString("source", entry.Source); + writer.WriteNumber("weight", entry.Weight); + writer.WriteEndObject(); + } + + writer.WriteEndArray(); + writer.WriteEndObject(); + } + + writer.WriteEndArray(); + + writer.WritePropertyName("env"); + writer.WriteStartArray(); + foreach (var env in profile.Environments) + { + writer.WriteStartObject(); + writer.WriteString("name", env.Name); + writer.WritePropertyName("entries"); + writer.WriteStartArray(); + foreach (var entry in env.Entries) + { + writer.WriteStartObject(); + writer.WritePropertyName("condition"); + WriteExpression(writer, entry.Condition); + writer.WriteNumber("weight", entry.Weight); + writer.WriteEndObject(); + } + + writer.WriteEndArray(); + writer.WriteEndObject(); + } + + writer.WriteEndArray(); + + writer.WritePropertyName("scalars"); + writer.WriteStartArray(); + foreach (var scalar in profile.Scalars) + { + writer.WriteStartObject(); + writer.WriteString("name", scalar.Name); + writer.WritePropertyName("value"); + WriteLiteral(writer, scalar.Value); + writer.WriteEndObject(); + } + + writer.WriteEndArray(); + writer.WriteEndObject(); + } + + private static void WriteRule(Utf8JsonWriter writer, PolicyIrRule rule) + { + writer.WriteStartObject(); + writer.WriteString("name", rule.Name); + writer.WriteNumber("priority", rule.Priority); + writer.WritePropertyName("when"); + WriteExpression(writer, rule.When); + + writer.WritePropertyName("then"); + WriteActions(writer, rule.ThenActions); + + writer.WritePropertyName("else"); + WriteActions(writer, rule.ElseActions); + + writer.WriteString("because", rule.Because); + writer.WriteEndObject(); + } + + private static void WriteActions(Utf8JsonWriter writer, ImmutableArray<PolicyIrAction> actions) + { + writer.WriteStartArray(); + foreach (var action in actions) + { + WriteAction(writer, action); + } + + writer.WriteEndArray(); + } + + private static void WriteAction(Utf8JsonWriter writer, PolicyIrAction action) + { + switch (action) + { + case PolicyIrAssignmentAction assign: + writer.WriteStartObject(); + writer.WriteString("type", "assign"); + WriteReference(writer, assign.Target); + writer.WritePropertyName("value"); + WriteExpression(writer, assign.Value); + writer.WriteEndObject(); + break; + case PolicyIrAnnotateAction annotate: + writer.WriteStartObject(); + writer.WriteString("type", "annotate"); + WriteReference(writer, annotate.Target); + writer.WritePropertyName("value"); + WriteExpression(writer, annotate.Value); + writer.WriteEndObject(); + break; + case PolicyIrIgnoreAction ignore: + writer.WriteStartObject(); + writer.WriteString("type", "ignore"); + writer.WritePropertyName("until"); + WriteOptionalExpression(writer, ignore.Until); + writer.WriteString("because", ignore.Because ?? string.Empty); + writer.WriteEndObject(); + break; + case PolicyIrEscalateAction escalate: + writer.WriteStartObject(); + writer.WriteString("type", "escalate"); + writer.WritePropertyName("to"); + WriteOptionalExpression(writer, escalate.To); + writer.WritePropertyName("when"); + WriteOptionalExpression(writer, escalate.When); + writer.WriteEndObject(); + break; + case PolicyIrRequireVexAction require: + writer.WriteStartObject(); + writer.WriteString("type", "requireVex"); + writer.WritePropertyName("conditions"); + writer.WriteStartObject(); + foreach (var kvp in require.Conditions) + { + writer.WritePropertyName(kvp.Key); + WriteExpression(writer, kvp.Value); + } + + writer.WriteEndObject(); + writer.WriteEndObject(); + break; + case PolicyIrWarnAction warn: + writer.WriteStartObject(); + writer.WriteString("type", "warn"); + writer.WritePropertyName("message"); + WriteOptionalExpression(writer, warn.Message); + writer.WriteEndObject(); + break; + case PolicyIrDeferAction defer: + writer.WriteStartObject(); + writer.WriteString("type", "defer"); + writer.WritePropertyName("until"); + WriteOptionalExpression(writer, defer.Until); + writer.WriteEndObject(); + break; + } + } + + private static void WriteReference(Utf8JsonWriter writer, ImmutableArray<string> segments) + { + writer.WritePropertyName("target"); + writer.WriteStartArray(); + foreach (var segment in segments) + { + writer.WriteStringValue(segment); + } + + writer.WriteEndArray(); + } + + private static void WriteOptionalExpression(Utf8JsonWriter writer, PolicyExpression? expression) + { + if (expression is null) + { + writer.WriteNullValue(); + return; + } + + WriteExpression(writer, expression); + } + + private static void WriteExpression(Utf8JsonWriter writer, PolicyExpression expression) + { + switch (expression) + { + case PolicyLiteralExpression literal: + writer.WriteStartObject(); + writer.WriteString("type", "literal"); + writer.WritePropertyName("value"); + WriteLiteralValue(writer, literal.Value); + writer.WriteEndObject(); + break; + case PolicyListExpression list: + writer.WriteStartObject(); + writer.WriteString("type", "list"); + writer.WritePropertyName("items"); + writer.WriteStartArray(); + foreach (var item in list.Items) + { + WriteExpression(writer, item); + } + + writer.WriteEndArray(); + writer.WriteEndObject(); + break; + case PolicyIdentifierExpression identifier: + writer.WriteStartObject(); + writer.WriteString("type", "identifier"); + writer.WriteString("name", identifier.Name); + writer.WriteEndObject(); + break; + case PolicyMemberAccessExpression member: + writer.WriteStartObject(); + writer.WriteString("type", "member"); + writer.WritePropertyName("target"); + WriteExpression(writer, member.Target); + writer.WriteString("member", member.Member); + writer.WriteEndObject(); + break; + case PolicyInvocationExpression invocation: + writer.WriteStartObject(); + writer.WriteString("type", "call"); + writer.WritePropertyName("target"); + WriteExpression(writer, invocation.Target); + writer.WritePropertyName("args"); + writer.WriteStartArray(); + foreach (var arg in invocation.Arguments) + { + WriteExpression(writer, arg); + } + + writer.WriteEndArray(); + writer.WriteEndObject(); + break; + case PolicyIndexerExpression indexer: + writer.WriteStartObject(); + writer.WriteString("type", "indexer"); + writer.WritePropertyName("target"); + WriteExpression(writer, indexer.Target); + writer.WritePropertyName("index"); + WriteExpression(writer, indexer.Index); + writer.WriteEndObject(); + break; + case PolicyUnaryExpression unary: + writer.WriteStartObject(); + writer.WriteString("type", "unary"); + writer.WriteString("op", unary.Operator switch + { + PolicyUnaryOperator.Not => "not", + _ => unary.Operator.ToString().ToLowerInvariant(), + }); + writer.WritePropertyName("operand"); + WriteExpression(writer, unary.Operand); + writer.WriteEndObject(); + break; + case PolicyBinaryExpression binary: + writer.WriteStartObject(); + writer.WriteString("type", "binary"); + writer.WriteString("op", GetBinaryOperator(binary.Operator)); + writer.WritePropertyName("left"); + WriteExpression(writer, binary.Left); + writer.WritePropertyName("right"); + WriteExpression(writer, binary.Right); + writer.WriteEndObject(); + break; + default: + writer.WriteStartObject(); + writer.WriteString("type", "unknown"); + writer.WriteEndObject(); + break; + } + } + + private static string GetBinaryOperator(PolicyBinaryOperator op) => op switch + { + PolicyBinaryOperator.And => "and", + PolicyBinaryOperator.Or => "or", + PolicyBinaryOperator.Equal => "eq", + PolicyBinaryOperator.NotEqual => "neq", + PolicyBinaryOperator.LessThan => "lt", + PolicyBinaryOperator.LessThanOrEqual => "lte", + PolicyBinaryOperator.GreaterThan => "gt", + PolicyBinaryOperator.GreaterThanOrEqual => "gte", + PolicyBinaryOperator.In => "in", + PolicyBinaryOperator.NotIn => "not_in", + _ => op.ToString().ToLowerInvariant(), + }; + + private static void WriteLiteralDictionary(Utf8JsonWriter writer, ImmutableSortedDictionary<string, PolicyIrLiteral> dictionary) + { + writer.WriteStartObject(); + foreach (var kvp in dictionary) + { + writer.WritePropertyName(kvp.Key); + WriteLiteral(writer, kvp.Value); + } + + writer.WriteEndObject(); + } + + private static void WriteLiteral(Utf8JsonWriter writer, PolicyIrLiteral literal) + { + switch (literal) + { + case PolicyIrStringLiteral s: + writer.WriteStringValue(s.Value); + break; + case PolicyIrNumberLiteral n: + writer.WriteNumberValue(n.Value); + break; + case PolicyIrBooleanLiteral b: + writer.WriteBooleanValue(b.Value); + break; + case PolicyIrListLiteral list: + writer.WriteStartArray(); + foreach (var item in list.Items) + { + WriteLiteral(writer, item); + } + + writer.WriteEndArray(); + break; + default: + writer.WriteNullValue(); + break; + } + } + + private static void WriteLiteralValue(Utf8JsonWriter writer, object? value) + { + switch (value) + { + case null: + writer.WriteNullValue(); + break; + case string s: + writer.WriteStringValue(s); + break; + case bool b: + writer.WriteBooleanValue(b); + break; + case decimal dec: + writer.WriteNumberValue(dec); + break; + case double dbl: + writer.WriteNumberValue(dbl); + break; + case int i: + writer.WriteNumberValue(i); + break; + default: + writer.WriteStringValue(value.ToString()); + break; + } + } +} diff --git a/src/StellaOps.Policy.Engine/Compilation/PolicyParser.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyParser.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Compilation/PolicyParser.cs rename to src/Policy/StellaOps.Policy.Engine/Compilation/PolicyParser.cs index 4fbb4012..a6ebf82f 100644 --- a/src/StellaOps.Policy.Engine/Compilation/PolicyParser.cs +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicyParser.cs @@ -1,678 +1,678 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using StellaOps.Policy; - -namespace StellaOps.Policy.Engine.Compilation; - -internal sealed class PolicyParser -{ - private readonly ImmutableArray<DslToken> tokens; - private readonly List<PolicyIssue> diagnostics = new(); - private int position; - - private PolicyParser(ImmutableArray<DslToken> tokens) - { - this.tokens = tokens; - } - - public static PolicyParseResult Parse(string source) - { - if (source is null) - { - throw new ArgumentNullException(nameof(source)); - } - - var tokenization = DslTokenizer.Tokenize(source); - var parser = new PolicyParser(tokenization.Tokens); - var document = parser.ParseDocument(); - var allDiagnostics = tokenization.Diagnostics.AddRange(parser.diagnostics).ToImmutableArray(); - return new PolicyParseResult(document, allDiagnostics); - } - - private PolicyDocumentNode? ParseDocument() - { - if (!Match(TokenKind.KeywordPolicy)) - { - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.MissingPolicyHeader, "Expected 'policy' declaration.", "policy")); - return null; - } - - var nameToken = Consume(TokenKind.StringLiteral, "Policy name must be a string literal.", "policy.name"); - var name = nameToken.Value as string ?? nameToken.Text; - - Consume(TokenKind.KeywordSyntax, "Expected 'syntax' declaration.", "policy.syntax"); - var syntaxToken = Consume(TokenKind.StringLiteral, "Policy syntax must be a string literal.", "policy.syntax.value"); - var syntax = syntaxToken.Value as string ?? syntaxToken.Text; - - Consume(TokenKind.LeftBrace, "Expected '{' to start policy body.", "policy.body"); - - var metadataBuilder = ImmutableDictionary.CreateBuilder<string, PolicyLiteralValue>(StringComparer.Ordinal); - var settingsBuilder = ImmutableDictionary.CreateBuilder<string, PolicyLiteralValue>(StringComparer.Ordinal); - var profiles = ImmutableArray.CreateBuilder<PolicyProfileNode>(); - var rules = ImmutableArray.CreateBuilder<PolicyRuleNode>(); - - while (!Check(TokenKind.RightBrace) && !IsAtEnd) - { - if (Match(TokenKind.KeywordMetadata)) - { - foreach (var kvp in ParseKeyValueBlock("policy.metadata")) - { - metadataBuilder[kvp.Key] = kvp.Value; - } - - continue; - } - - if (Match(TokenKind.KeywordSettings)) - { - foreach (var kvp in ParseKeyValueBlock("policy.settings")) - { - settingsBuilder[kvp.Key] = kvp.Value; - } - - continue; - } - - if (Match(TokenKind.KeywordProfile)) - { - var profile = ParseProfile(); - if (profile is not null) - { - profiles.Add(profile); - } - - continue; - } - - if (Match(TokenKind.KeywordRule)) - { - var rule = ParseRule(); - if (rule is not null) - { - rules.Add(rule); - } - - continue; - } - - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedSection, $"Unexpected token '{Current.Text}' in policy body.", "policy.body")); - Advance(); - } - - var close = Consume(TokenKind.RightBrace, "Expected '}' to close policy definition.", "policy"); - - if (!string.Equals(syntax, "stella-dsl@1", StringComparison.Ordinal)) - { - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnsupportedSyntaxVersion, $"Unsupported syntax '{syntax}'.", "policy.syntax")); - } - - var span = new SourceSpan(tokens[0].Span.Start, close.Span.End); - return new PolicyDocumentNode( - name, - syntax, - metadataBuilder.ToImmutable(), - profiles.ToImmutable(), - settingsBuilder.ToImmutable(), - rules.ToImmutable(), - span); - } - - private PolicyProfileNode? ParseProfile() - { - var nameToken = Consume(TokenKind.Identifier, "Profile requires a name.", "policy.profile"); - var name = nameToken.Text; - Consume(TokenKind.LeftBrace, "Expected '{' after profile declaration.", $"policy.profile.{name}"); - - var start = nameToken.Span.Start; - var depth = 1; - while (depth > 0 && !IsAtEnd) - { - if (Match(TokenKind.LeftBrace)) - { - depth++; - } - else if (Match(TokenKind.RightBrace)) - { - depth--; - } - else - { - Advance(); - } - } - - var close = Previous; - return new PolicyProfileNode( - name, - ImmutableArray<PolicyProfileItemNode>.Empty, - new SourceSpan(start, close.Span.End)); - } - - private PolicyRuleNode? ParseRule() - { - var nameToken = Consume(TokenKind.Identifier, "Rule requires a name.", "policy.rule"); - var name = nameToken.Text; - - var priority = 0; - if (Match(TokenKind.KeywordPriority)) - { - var priorityToken = Consume(TokenKind.NumberLiteral, "Priority must be numeric.", $"policy.rule.{name}"); - if (priorityToken.Value is decimal dec) - { - priority = (int)Math.Round(dec, MidpointRounding.AwayFromZero); - } - } - - Consume(TokenKind.LeftBrace, "Expected '{' to start rule.", $"policy.rule.{name}"); - Consume(TokenKind.KeywordWhen, "Rule requires a 'when' clause.", $"policy.rule.{name}"); - var when = ParseExpression(); - - Consume(TokenKind.KeywordThen, "Rule requires a 'then' clause.", $"policy.rule.{name}"); - var thenActions = ParseActions(name, "then"); - - var elseActions = ImmutableArray<PolicyActionNode>.Empty; - if (Match(TokenKind.KeywordElse)) - { - elseActions = ParseActions(name, "else"); - } - - string? because = null; - if (Match(TokenKind.KeywordBecause)) - { - var becauseToken = Consume(TokenKind.StringLiteral, "Because clause must be string.", $"policy.rule.{name}.because"); - because = becauseToken.Value as string ?? becauseToken.Text; - } - - var close = Consume(TokenKind.RightBrace, "Expected '}' to close rule.", $"policy.rule.{name}"); - - if (because is null) - { - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.MissingBecauseClause, $"Rule '{name}' missing 'because' clause.", $"policy.rule.{name}")); - } - - return new PolicyRuleNode(name, priority, when, thenActions, elseActions, because, new SourceSpan(nameToken.Span.Start, close.Span.End)); - } - - private ImmutableArray<PolicyActionNode> ParseActions(string ruleName, string clause) - { - var actions = ImmutableArray.CreateBuilder<PolicyActionNode>(); - while (!Check(TokenKind.RightBrace) && !Check(TokenKind.KeywordElse) && !Check(TokenKind.KeywordBecause) && !IsAtEnd) - { - if (Check(TokenKind.Identifier)) - { - actions.Add(ParseAssignmentAction(ruleName, clause)); - continue; - } - - if (Match(TokenKind.KeywordAnnotate)) - { - actions.Add(ParseAnnotateAction(ruleName, clause)); - continue; - } - - if (Match(TokenKind.KeywordWarn)) - { - actions.Add(ParseWarnAction()); - continue; - } - - if (Match(TokenKind.KeywordEscalate)) - { - actions.Add(ParseEscalateAction()); - continue; - } - - if (Match(TokenKind.KeywordRequireVex)) - { - actions.Add(ParseRequireVexAction(ruleName, clause)); - continue; - } - - if (Match(TokenKind.KeywordIgnore)) - { - actions.Add(ParseIgnoreAction(ruleName, clause)); - continue; - } - - if (Match(TokenKind.KeywordDefer)) - { - actions.Add(ParseDeferAction(ruleName, clause)); - continue; - } - - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidAction, $"Unexpected token '{Current.Text}' in {clause} actions.", $"policy.rule.{ruleName}.{clause}")); - Advance(); - } - - return actions.ToImmutable(); - } - - private PolicyActionNode ParseAssignmentAction(string ruleName, string clause) - { - var segments = ImmutableArray.CreateBuilder<string>(); - var first = Consume(TokenKind.Identifier, "Assignment target must start with identifier.", $"policy.rule.{ruleName}.{clause}"); - segments.Add(first.Text); - while (Match(TokenKind.Dot)) - { - segments.Add(Consume(TokenKind.Identifier, "Expected identifier after '.'.", $"policy.rule.{ruleName}.{clause}").Text); - } - - Consume(TokenKind.Define, "Expected ':=' in action.", $"policy.rule.{ruleName}.{clause}"); - var value = ParseExpression(); - Match(TokenKind.Semicolon); - return new PolicyAssignmentActionNode(new PolicyReference(segments.ToImmutable(), new SourceSpan(first.Span.Start, value.Span.End)), value, new SourceSpan(first.Span.Start, value.Span.End)); - } - - private PolicyActionNode ParseAnnotateAction(string ruleName, string clause) - { - var reference = ParseReference($"policy.rule.{ruleName}.{clause}.annotate"); - Consume(TokenKind.Define, "Expected ':=' in annotate action.", $"policy.rule.{ruleName}.{clause}.annotate"); - var value = ParseExpression(); - Match(TokenKind.Semicolon); - return new PolicyAnnotateActionNode(reference, value, new SourceSpan(reference.Span.Start, value.Span.End)); - } - - private PolicyActionNode ParseWarnAction() - { - PolicyExpression? message = null; - if (Match(TokenKind.KeywordMessage)) - { - message = ParseExpression(); - } - - Match(TokenKind.Semicolon); - var span = message?.Span ?? Previous.Span; - return new PolicyWarnActionNode(message, span); - } - - private PolicyActionNode ParseEscalateAction() - { - PolicyExpression? to = null; - PolicyExpression? when = null; - - if (Match(TokenKind.KeywordTo)) - { - to = ParseExpression(); - } - - if (Match(TokenKind.KeywordWhen)) - { - when = ParseExpression(); - } - - Match(TokenKind.Semicolon); - var end = when?.Span.End ?? to?.Span.End ?? Previous.Span.End; - return new PolicyEscalateActionNode(to, when, new SourceSpan(Previous.Span.Start, end)); - } - - private PolicyActionNode ParseRequireVexAction(string ruleName, string clause) - { - Consume(TokenKind.LeftBrace, "Expected '{' after requireVex.", $"policy.rule.{ruleName}.{clause}.requireVex"); - var builder = ImmutableDictionary.CreateBuilder<string, PolicyExpression>(StringComparer.Ordinal); - while (!Check(TokenKind.RightBrace) && !IsAtEnd) - { - var key = Consume(TokenKind.Identifier, "requireVex key must be identifier.", $"policy.rule.{ruleName}.{clause}.requireVex").Text; - Consume(TokenKind.Assign, "Expected '=' in requireVex condition.", $"policy.rule.{ruleName}.{clause}.requireVex"); - builder[key] = ParseExpression(); - Match(TokenKind.Comma); - } - - var close = Consume(TokenKind.RightBrace, "Expected '}' to close requireVex block.", $"policy.rule.{ruleName}.{clause}.requireVex"); - Match(TokenKind.Semicolon); - return new PolicyRequireVexActionNode(builder.ToImmutable(), new SourceSpan(close.Span.Start, close.Span.End)); - } - - private PolicyActionNode ParseIgnoreAction(string ruleName, string clause) - { - PolicyExpression? until = null; - string? because = null; - if (Match(TokenKind.KeywordUntil)) - { - until = ParseExpression(); - } - - if (Match(TokenKind.KeywordBecause)) - { - var becauseToken = Consume(TokenKind.StringLiteral, "Ignore 'because' must be string.", $"policy.rule.{ruleName}.{clause}.ignore"); - because = becauseToken.Value as string ?? becauseToken.Text; - } - - Match(TokenKind.Semicolon); - return new PolicyIgnoreActionNode(until, because, new SourceSpan(Previous.Span.Start, (until?.Span.End ?? Previous.Span.End))); - } - - private PolicyActionNode ParseDeferAction(string ruleName, string clause) - { - PolicyExpression? until = null; - if (Match(TokenKind.KeywordUntil)) - { - until = ParseExpression(); - } - - Match(TokenKind.Semicolon); - return new PolicyDeferActionNode(until, new SourceSpan(Previous.Span.Start, (until?.Span.End ?? Previous.Span.End))); - } - - private PolicyReference ParseReference(string path) - { - var segments = ImmutableArray.CreateBuilder<string>(); - var first = Consume(TokenKind.Identifier, "Expected identifier.", path); - segments.Add(first.Text); - while (Match(TokenKind.Dot)) - { - segments.Add(Consume(TokenKind.Identifier, "Expected identifier after '.'.", path).Text); - } - - return new PolicyReference(segments.ToImmutable(), first.Span); - } - - private Dictionary<string, PolicyLiteralValue> ParseKeyValueBlock(string path) - { - Consume(TokenKind.LeftBrace, "Expected '{'.", path); - var entries = new Dictionary<string, PolicyLiteralValue>(StringComparer.Ordinal); - while (!Check(TokenKind.RightBrace) && !IsAtEnd) - { - var key = Consume(TokenKind.Identifier, "Expected identifier.", path).Text; - Consume(TokenKind.Assign, "Expected '='.", path); - entries[key] = ParseLiteralValue(path); - Match(TokenKind.Semicolon); - } - - Consume(TokenKind.RightBrace, "Expected '}'.", path); - return entries; - } - - private PolicyLiteralValue ParseLiteralValue(string path) - { - if (Match(TokenKind.StringLiteral)) - { - return new PolicyStringLiteral(Previous.Value as string ?? Previous.Text, Previous.Span); - } - - if (Match(TokenKind.NumberLiteral)) - { - return new PolicyNumberLiteral(Previous.Value is decimal dec ? dec : 0m, Previous.Span); - } - - if (Match(TokenKind.BooleanLiteral)) - { - return new PolicyBooleanLiteral(Previous.Value is bool b && b, Previous.Span); - } - - if (Match(TokenKind.LeftBracket)) - { - var start = Previous.Span.Start; - var items = ImmutableArray.CreateBuilder<PolicyLiteralValue>(); - while (!Check(TokenKind.RightBracket) && !IsAtEnd) - { - items.Add(ParseLiteralValue(path)); - Match(TokenKind.Comma); - } - - var close = Consume(TokenKind.RightBracket, "Expected ']' in list literal.", path); - return new PolicyListLiteral(items.ToImmutable(), new SourceSpan(start, close.Span.End)); - } - - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidLiteral, "Invalid literal.", path)); - return new PolicyStringLiteral(string.Empty, Current.Span); - } - - private PolicyExpression ParseExpression() => ParseOr(); - - private PolicyExpression ParseOr() - { - var expr = ParseAnd(); - while (Match(TokenKind.KeywordOr)) - { - var right = ParseAnd(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.Or, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - - return expr; - } - - private PolicyExpression ParseAnd() - { - var expr = ParseEquality(); - while (Match(TokenKind.KeywordAnd)) - { - var right = ParseEquality(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.And, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - - return expr; - } - - private PolicyExpression ParseEquality() - { - var expr = ParseUnary(); - while (true) - { - if (Match(TokenKind.EqualEqual)) - { - var right = ParseUnary(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.Equal, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - else if (Match(TokenKind.NotEqual)) - { - var right = ParseUnary(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.NotEqual, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - else if (Match(TokenKind.KeywordIn)) - { - var right = ParseUnary(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.In, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - else if (Match(TokenKind.KeywordNot)) - { - if (Match(TokenKind.KeywordIn)) - { - var right = ParseUnary(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.NotIn, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - else - { - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, "Expected 'in' after 'not'.", "expression.not")); - } - } - else if (Match(TokenKind.LessThan)) - { - var right = ParseUnary(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.LessThan, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - else if (Match(TokenKind.LessThanOrEqual)) - { - var right = ParseUnary(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.LessThanOrEqual, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - else if (Match(TokenKind.GreaterThan)) - { - var right = ParseUnary(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.GreaterThan, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - else if (Match(TokenKind.GreaterThanOrEqual)) - { - var right = ParseUnary(); - expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.GreaterThanOrEqual, right, new SourceSpan(expr.Span.Start, right.Span.End)); - } - else - { - break; - } - } - - return expr; - } - - private PolicyExpression ParseUnary() - { - if (Match(TokenKind.KeywordNot)) - { - var operand = ParseUnary(); - return new PolicyUnaryExpression(PolicyUnaryOperator.Not, operand, new SourceSpan(Previous.Span.Start, operand.Span.End)); - } - - return ParsePrimary(); - } - - private PolicyExpression ParsePrimary() - { - if (Match(TokenKind.StringLiteral)) - { - return new PolicyLiteralExpression(Previous.Value as string ?? Previous.Text, Previous.Span); - } - - if (Match(TokenKind.NumberLiteral)) - { - return new PolicyLiteralExpression(Previous.Value ?? 0m, Previous.Span); - } - - if (Match(TokenKind.BooleanLiteral)) - { - return new PolicyLiteralExpression(Previous.Value ?? false, Previous.Span); - } - - if (Match(TokenKind.LeftBracket)) - { - var start = Previous.Span.Start; - var items = ImmutableArray.CreateBuilder<PolicyExpression>(); - while (!Check(TokenKind.RightBracket) && !IsAtEnd) - { - items.Add(ParseExpression()); - Match(TokenKind.Comma); - } - - var close = Consume(TokenKind.RightBracket, "Expected ']' to close list expression.", "expression.list"); - return new PolicyListExpression(items.ToImmutable(), new SourceSpan(start, close.Span.End)); - } - - if (Match(TokenKind.LeftParen)) - { - var expr = ParseExpression(); - Consume(TokenKind.RightParen, "Expected ')' to close grouped expression.", "expression.group"); - return expr; - } - - if (Match(TokenKind.Identifier)) - { - return ParseIdentifierExpression(Previous); - } - - if (Match(TokenKind.KeywordEnv)) - { - return ParseIdentifierExpression(Previous); - } - - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, $"Unexpected token '{Current.Text}' in expression.", "expression")); - var bad = Advance(); - return new PolicyLiteralExpression(null, bad.Span); - } - - private PolicyExpression ParseIdentifierExpression(DslToken identifier) - { - PolicyExpression expr = new PolicyIdentifierExpression(identifier.Text, identifier.Span); - while (true) - { - if (Match(TokenKind.Dot)) - { - var member = Consume(TokenKind.Identifier, "Expected identifier after '.'.", "expression.member"); - expr = new PolicyMemberAccessExpression(expr, member.Text, new SourceSpan(expr.Span.Start, member.Span.End)); - continue; - } - - if (Match(TokenKind.LeftParen)) - { - var args = ImmutableArray.CreateBuilder<PolicyExpression>(); - if (!Check(TokenKind.RightParen)) - { - do - { - args.Add(ParseExpression()); - } - while (Match(TokenKind.Comma)); - } - - var close = Consume(TokenKind.RightParen, "Expected ')' to close invocation.", "expression.call"); - expr = new PolicyInvocationExpression(expr, args.ToImmutable(), new SourceSpan(expr.Span.Start, close.Span.End)); - continue; - } - - if (Match(TokenKind.LeftBracket)) - { - var indexExpr = ParseExpression(); - var close = Consume(TokenKind.RightBracket, "Expected ']' to close indexer.", "expression.indexer"); - expr = new PolicyIndexerExpression(expr, indexExpr, new SourceSpan(expr.Span.Start, close.Span.End)); - continue; - } - - break; - } - - return expr; - } - - private bool Match(TokenKind kind) - { - if (Check(kind)) - { - Advance(); - return true; - } - - return false; - } - - private bool Check(TokenKind kind) => !IsAtEnd && Current.Kind == kind; - - private DslToken Consume(TokenKind kind, string message, string path) - { - if (Check(kind)) - { - return Advance(); - } - - diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, message, path)); - return Advance(); - } - - private void SkipBlock() - { - var depth = 1; - while (depth > 0 && !IsAtEnd) - { - if (Match(TokenKind.LeftBrace)) - { - depth++; - } - else if (Match(TokenKind.RightBrace)) - { - depth--; - } - else - { - Advance(); - } - } - } - - private DslToken Advance() - { - if (!IsAtEnd) - { - position++; - } - - return tokens[position - 1]; - } - - private bool IsAtEnd => Current.Kind == TokenKind.EndOfFile; - - private DslToken Current => tokens[position]; - - private DslToken Previous => tokens[position - 1]; -} - -internal readonly record struct PolicyParseResult( - PolicyDocumentNode? Document, - ImmutableArray<PolicyIssue> Diagnostics); +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using StellaOps.Policy; + +namespace StellaOps.Policy.Engine.Compilation; + +internal sealed class PolicyParser +{ + private readonly ImmutableArray<DslToken> tokens; + private readonly List<PolicyIssue> diagnostics = new(); + private int position; + + private PolicyParser(ImmutableArray<DslToken> tokens) + { + this.tokens = tokens; + } + + public static PolicyParseResult Parse(string source) + { + if (source is null) + { + throw new ArgumentNullException(nameof(source)); + } + + var tokenization = DslTokenizer.Tokenize(source); + var parser = new PolicyParser(tokenization.Tokens); + var document = parser.ParseDocument(); + var allDiagnostics = tokenization.Diagnostics.AddRange(parser.diagnostics).ToImmutableArray(); + return new PolicyParseResult(document, allDiagnostics); + } + + private PolicyDocumentNode? ParseDocument() + { + if (!Match(TokenKind.KeywordPolicy)) + { + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.MissingPolicyHeader, "Expected 'policy' declaration.", "policy")); + return null; + } + + var nameToken = Consume(TokenKind.StringLiteral, "Policy name must be a string literal.", "policy.name"); + var name = nameToken.Value as string ?? nameToken.Text; + + Consume(TokenKind.KeywordSyntax, "Expected 'syntax' declaration.", "policy.syntax"); + var syntaxToken = Consume(TokenKind.StringLiteral, "Policy syntax must be a string literal.", "policy.syntax.value"); + var syntax = syntaxToken.Value as string ?? syntaxToken.Text; + + Consume(TokenKind.LeftBrace, "Expected '{' to start policy body.", "policy.body"); + + var metadataBuilder = ImmutableDictionary.CreateBuilder<string, PolicyLiteralValue>(StringComparer.Ordinal); + var settingsBuilder = ImmutableDictionary.CreateBuilder<string, PolicyLiteralValue>(StringComparer.Ordinal); + var profiles = ImmutableArray.CreateBuilder<PolicyProfileNode>(); + var rules = ImmutableArray.CreateBuilder<PolicyRuleNode>(); + + while (!Check(TokenKind.RightBrace) && !IsAtEnd) + { + if (Match(TokenKind.KeywordMetadata)) + { + foreach (var kvp in ParseKeyValueBlock("policy.metadata")) + { + metadataBuilder[kvp.Key] = kvp.Value; + } + + continue; + } + + if (Match(TokenKind.KeywordSettings)) + { + foreach (var kvp in ParseKeyValueBlock("policy.settings")) + { + settingsBuilder[kvp.Key] = kvp.Value; + } + + continue; + } + + if (Match(TokenKind.KeywordProfile)) + { + var profile = ParseProfile(); + if (profile is not null) + { + profiles.Add(profile); + } + + continue; + } + + if (Match(TokenKind.KeywordRule)) + { + var rule = ParseRule(); + if (rule is not null) + { + rules.Add(rule); + } + + continue; + } + + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedSection, $"Unexpected token '{Current.Text}' in policy body.", "policy.body")); + Advance(); + } + + var close = Consume(TokenKind.RightBrace, "Expected '}' to close policy definition.", "policy"); + + if (!string.Equals(syntax, "stella-dsl@1", StringComparison.Ordinal)) + { + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnsupportedSyntaxVersion, $"Unsupported syntax '{syntax}'.", "policy.syntax")); + } + + var span = new SourceSpan(tokens[0].Span.Start, close.Span.End); + return new PolicyDocumentNode( + name, + syntax, + metadataBuilder.ToImmutable(), + profiles.ToImmutable(), + settingsBuilder.ToImmutable(), + rules.ToImmutable(), + span); + } + + private PolicyProfileNode? ParseProfile() + { + var nameToken = Consume(TokenKind.Identifier, "Profile requires a name.", "policy.profile"); + var name = nameToken.Text; + Consume(TokenKind.LeftBrace, "Expected '{' after profile declaration.", $"policy.profile.{name}"); + + var start = nameToken.Span.Start; + var depth = 1; + while (depth > 0 && !IsAtEnd) + { + if (Match(TokenKind.LeftBrace)) + { + depth++; + } + else if (Match(TokenKind.RightBrace)) + { + depth--; + } + else + { + Advance(); + } + } + + var close = Previous; + return new PolicyProfileNode( + name, + ImmutableArray<PolicyProfileItemNode>.Empty, + new SourceSpan(start, close.Span.End)); + } + + private PolicyRuleNode? ParseRule() + { + var nameToken = Consume(TokenKind.Identifier, "Rule requires a name.", "policy.rule"); + var name = nameToken.Text; + + var priority = 0; + if (Match(TokenKind.KeywordPriority)) + { + var priorityToken = Consume(TokenKind.NumberLiteral, "Priority must be numeric.", $"policy.rule.{name}"); + if (priorityToken.Value is decimal dec) + { + priority = (int)Math.Round(dec, MidpointRounding.AwayFromZero); + } + } + + Consume(TokenKind.LeftBrace, "Expected '{' to start rule.", $"policy.rule.{name}"); + Consume(TokenKind.KeywordWhen, "Rule requires a 'when' clause.", $"policy.rule.{name}"); + var when = ParseExpression(); + + Consume(TokenKind.KeywordThen, "Rule requires a 'then' clause.", $"policy.rule.{name}"); + var thenActions = ParseActions(name, "then"); + + var elseActions = ImmutableArray<PolicyActionNode>.Empty; + if (Match(TokenKind.KeywordElse)) + { + elseActions = ParseActions(name, "else"); + } + + string? because = null; + if (Match(TokenKind.KeywordBecause)) + { + var becauseToken = Consume(TokenKind.StringLiteral, "Because clause must be string.", $"policy.rule.{name}.because"); + because = becauseToken.Value as string ?? becauseToken.Text; + } + + var close = Consume(TokenKind.RightBrace, "Expected '}' to close rule.", $"policy.rule.{name}"); + + if (because is null) + { + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.MissingBecauseClause, $"Rule '{name}' missing 'because' clause.", $"policy.rule.{name}")); + } + + return new PolicyRuleNode(name, priority, when, thenActions, elseActions, because, new SourceSpan(nameToken.Span.Start, close.Span.End)); + } + + private ImmutableArray<PolicyActionNode> ParseActions(string ruleName, string clause) + { + var actions = ImmutableArray.CreateBuilder<PolicyActionNode>(); + while (!Check(TokenKind.RightBrace) && !Check(TokenKind.KeywordElse) && !Check(TokenKind.KeywordBecause) && !IsAtEnd) + { + if (Check(TokenKind.Identifier)) + { + actions.Add(ParseAssignmentAction(ruleName, clause)); + continue; + } + + if (Match(TokenKind.KeywordAnnotate)) + { + actions.Add(ParseAnnotateAction(ruleName, clause)); + continue; + } + + if (Match(TokenKind.KeywordWarn)) + { + actions.Add(ParseWarnAction()); + continue; + } + + if (Match(TokenKind.KeywordEscalate)) + { + actions.Add(ParseEscalateAction()); + continue; + } + + if (Match(TokenKind.KeywordRequireVex)) + { + actions.Add(ParseRequireVexAction(ruleName, clause)); + continue; + } + + if (Match(TokenKind.KeywordIgnore)) + { + actions.Add(ParseIgnoreAction(ruleName, clause)); + continue; + } + + if (Match(TokenKind.KeywordDefer)) + { + actions.Add(ParseDeferAction(ruleName, clause)); + continue; + } + + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidAction, $"Unexpected token '{Current.Text}' in {clause} actions.", $"policy.rule.{ruleName}.{clause}")); + Advance(); + } + + return actions.ToImmutable(); + } + + private PolicyActionNode ParseAssignmentAction(string ruleName, string clause) + { + var segments = ImmutableArray.CreateBuilder<string>(); + var first = Consume(TokenKind.Identifier, "Assignment target must start with identifier.", $"policy.rule.{ruleName}.{clause}"); + segments.Add(first.Text); + while (Match(TokenKind.Dot)) + { + segments.Add(Consume(TokenKind.Identifier, "Expected identifier after '.'.", $"policy.rule.{ruleName}.{clause}").Text); + } + + Consume(TokenKind.Define, "Expected ':=' in action.", $"policy.rule.{ruleName}.{clause}"); + var value = ParseExpression(); + Match(TokenKind.Semicolon); + return new PolicyAssignmentActionNode(new PolicyReference(segments.ToImmutable(), new SourceSpan(first.Span.Start, value.Span.End)), value, new SourceSpan(first.Span.Start, value.Span.End)); + } + + private PolicyActionNode ParseAnnotateAction(string ruleName, string clause) + { + var reference = ParseReference($"policy.rule.{ruleName}.{clause}.annotate"); + Consume(TokenKind.Define, "Expected ':=' in annotate action.", $"policy.rule.{ruleName}.{clause}.annotate"); + var value = ParseExpression(); + Match(TokenKind.Semicolon); + return new PolicyAnnotateActionNode(reference, value, new SourceSpan(reference.Span.Start, value.Span.End)); + } + + private PolicyActionNode ParseWarnAction() + { + PolicyExpression? message = null; + if (Match(TokenKind.KeywordMessage)) + { + message = ParseExpression(); + } + + Match(TokenKind.Semicolon); + var span = message?.Span ?? Previous.Span; + return new PolicyWarnActionNode(message, span); + } + + private PolicyActionNode ParseEscalateAction() + { + PolicyExpression? to = null; + PolicyExpression? when = null; + + if (Match(TokenKind.KeywordTo)) + { + to = ParseExpression(); + } + + if (Match(TokenKind.KeywordWhen)) + { + when = ParseExpression(); + } + + Match(TokenKind.Semicolon); + var end = when?.Span.End ?? to?.Span.End ?? Previous.Span.End; + return new PolicyEscalateActionNode(to, when, new SourceSpan(Previous.Span.Start, end)); + } + + private PolicyActionNode ParseRequireVexAction(string ruleName, string clause) + { + Consume(TokenKind.LeftBrace, "Expected '{' after requireVex.", $"policy.rule.{ruleName}.{clause}.requireVex"); + var builder = ImmutableDictionary.CreateBuilder<string, PolicyExpression>(StringComparer.Ordinal); + while (!Check(TokenKind.RightBrace) && !IsAtEnd) + { + var key = Consume(TokenKind.Identifier, "requireVex key must be identifier.", $"policy.rule.{ruleName}.{clause}.requireVex").Text; + Consume(TokenKind.Assign, "Expected '=' in requireVex condition.", $"policy.rule.{ruleName}.{clause}.requireVex"); + builder[key] = ParseExpression(); + Match(TokenKind.Comma); + } + + var close = Consume(TokenKind.RightBrace, "Expected '}' to close requireVex block.", $"policy.rule.{ruleName}.{clause}.requireVex"); + Match(TokenKind.Semicolon); + return new PolicyRequireVexActionNode(builder.ToImmutable(), new SourceSpan(close.Span.Start, close.Span.End)); + } + + private PolicyActionNode ParseIgnoreAction(string ruleName, string clause) + { + PolicyExpression? until = null; + string? because = null; + if (Match(TokenKind.KeywordUntil)) + { + until = ParseExpression(); + } + + if (Match(TokenKind.KeywordBecause)) + { + var becauseToken = Consume(TokenKind.StringLiteral, "Ignore 'because' must be string.", $"policy.rule.{ruleName}.{clause}.ignore"); + because = becauseToken.Value as string ?? becauseToken.Text; + } + + Match(TokenKind.Semicolon); + return new PolicyIgnoreActionNode(until, because, new SourceSpan(Previous.Span.Start, (until?.Span.End ?? Previous.Span.End))); + } + + private PolicyActionNode ParseDeferAction(string ruleName, string clause) + { + PolicyExpression? until = null; + if (Match(TokenKind.KeywordUntil)) + { + until = ParseExpression(); + } + + Match(TokenKind.Semicolon); + return new PolicyDeferActionNode(until, new SourceSpan(Previous.Span.Start, (until?.Span.End ?? Previous.Span.End))); + } + + private PolicyReference ParseReference(string path) + { + var segments = ImmutableArray.CreateBuilder<string>(); + var first = Consume(TokenKind.Identifier, "Expected identifier.", path); + segments.Add(first.Text); + while (Match(TokenKind.Dot)) + { + segments.Add(Consume(TokenKind.Identifier, "Expected identifier after '.'.", path).Text); + } + + return new PolicyReference(segments.ToImmutable(), first.Span); + } + + private Dictionary<string, PolicyLiteralValue> ParseKeyValueBlock(string path) + { + Consume(TokenKind.LeftBrace, "Expected '{'.", path); + var entries = new Dictionary<string, PolicyLiteralValue>(StringComparer.Ordinal); + while (!Check(TokenKind.RightBrace) && !IsAtEnd) + { + var key = Consume(TokenKind.Identifier, "Expected identifier.", path).Text; + Consume(TokenKind.Assign, "Expected '='.", path); + entries[key] = ParseLiteralValue(path); + Match(TokenKind.Semicolon); + } + + Consume(TokenKind.RightBrace, "Expected '}'.", path); + return entries; + } + + private PolicyLiteralValue ParseLiteralValue(string path) + { + if (Match(TokenKind.StringLiteral)) + { + return new PolicyStringLiteral(Previous.Value as string ?? Previous.Text, Previous.Span); + } + + if (Match(TokenKind.NumberLiteral)) + { + return new PolicyNumberLiteral(Previous.Value is decimal dec ? dec : 0m, Previous.Span); + } + + if (Match(TokenKind.BooleanLiteral)) + { + return new PolicyBooleanLiteral(Previous.Value is bool b && b, Previous.Span); + } + + if (Match(TokenKind.LeftBracket)) + { + var start = Previous.Span.Start; + var items = ImmutableArray.CreateBuilder<PolicyLiteralValue>(); + while (!Check(TokenKind.RightBracket) && !IsAtEnd) + { + items.Add(ParseLiteralValue(path)); + Match(TokenKind.Comma); + } + + var close = Consume(TokenKind.RightBracket, "Expected ']' in list literal.", path); + return new PolicyListLiteral(items.ToImmutable(), new SourceSpan(start, close.Span.End)); + } + + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.InvalidLiteral, "Invalid literal.", path)); + return new PolicyStringLiteral(string.Empty, Current.Span); + } + + private PolicyExpression ParseExpression() => ParseOr(); + + private PolicyExpression ParseOr() + { + var expr = ParseAnd(); + while (Match(TokenKind.KeywordOr)) + { + var right = ParseAnd(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.Or, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + + return expr; + } + + private PolicyExpression ParseAnd() + { + var expr = ParseEquality(); + while (Match(TokenKind.KeywordAnd)) + { + var right = ParseEquality(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.And, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + + return expr; + } + + private PolicyExpression ParseEquality() + { + var expr = ParseUnary(); + while (true) + { + if (Match(TokenKind.EqualEqual)) + { + var right = ParseUnary(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.Equal, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + else if (Match(TokenKind.NotEqual)) + { + var right = ParseUnary(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.NotEqual, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + else if (Match(TokenKind.KeywordIn)) + { + var right = ParseUnary(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.In, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + else if (Match(TokenKind.KeywordNot)) + { + if (Match(TokenKind.KeywordIn)) + { + var right = ParseUnary(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.NotIn, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + else + { + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, "Expected 'in' after 'not'.", "expression.not")); + } + } + else if (Match(TokenKind.LessThan)) + { + var right = ParseUnary(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.LessThan, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + else if (Match(TokenKind.LessThanOrEqual)) + { + var right = ParseUnary(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.LessThanOrEqual, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + else if (Match(TokenKind.GreaterThan)) + { + var right = ParseUnary(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.GreaterThan, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + else if (Match(TokenKind.GreaterThanOrEqual)) + { + var right = ParseUnary(); + expr = new PolicyBinaryExpression(expr, PolicyBinaryOperator.GreaterThanOrEqual, right, new SourceSpan(expr.Span.Start, right.Span.End)); + } + else + { + break; + } + } + + return expr; + } + + private PolicyExpression ParseUnary() + { + if (Match(TokenKind.KeywordNot)) + { + var operand = ParseUnary(); + return new PolicyUnaryExpression(PolicyUnaryOperator.Not, operand, new SourceSpan(Previous.Span.Start, operand.Span.End)); + } + + return ParsePrimary(); + } + + private PolicyExpression ParsePrimary() + { + if (Match(TokenKind.StringLiteral)) + { + return new PolicyLiteralExpression(Previous.Value as string ?? Previous.Text, Previous.Span); + } + + if (Match(TokenKind.NumberLiteral)) + { + return new PolicyLiteralExpression(Previous.Value ?? 0m, Previous.Span); + } + + if (Match(TokenKind.BooleanLiteral)) + { + return new PolicyLiteralExpression(Previous.Value ?? false, Previous.Span); + } + + if (Match(TokenKind.LeftBracket)) + { + var start = Previous.Span.Start; + var items = ImmutableArray.CreateBuilder<PolicyExpression>(); + while (!Check(TokenKind.RightBracket) && !IsAtEnd) + { + items.Add(ParseExpression()); + Match(TokenKind.Comma); + } + + var close = Consume(TokenKind.RightBracket, "Expected ']' to close list expression.", "expression.list"); + return new PolicyListExpression(items.ToImmutable(), new SourceSpan(start, close.Span.End)); + } + + if (Match(TokenKind.LeftParen)) + { + var expr = ParseExpression(); + Consume(TokenKind.RightParen, "Expected ')' to close grouped expression.", "expression.group"); + return expr; + } + + if (Match(TokenKind.Identifier)) + { + return ParseIdentifierExpression(Previous); + } + + if (Match(TokenKind.KeywordEnv)) + { + return ParseIdentifierExpression(Previous); + } + + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, $"Unexpected token '{Current.Text}' in expression.", "expression")); + var bad = Advance(); + return new PolicyLiteralExpression(null, bad.Span); + } + + private PolicyExpression ParseIdentifierExpression(DslToken identifier) + { + PolicyExpression expr = new PolicyIdentifierExpression(identifier.Text, identifier.Span); + while (true) + { + if (Match(TokenKind.Dot)) + { + var member = Consume(TokenKind.Identifier, "Expected identifier after '.'.", "expression.member"); + expr = new PolicyMemberAccessExpression(expr, member.Text, new SourceSpan(expr.Span.Start, member.Span.End)); + continue; + } + + if (Match(TokenKind.LeftParen)) + { + var args = ImmutableArray.CreateBuilder<PolicyExpression>(); + if (!Check(TokenKind.RightParen)) + { + do + { + args.Add(ParseExpression()); + } + while (Match(TokenKind.Comma)); + } + + var close = Consume(TokenKind.RightParen, "Expected ')' to close invocation.", "expression.call"); + expr = new PolicyInvocationExpression(expr, args.ToImmutable(), new SourceSpan(expr.Span.Start, close.Span.End)); + continue; + } + + if (Match(TokenKind.LeftBracket)) + { + var indexExpr = ParseExpression(); + var close = Consume(TokenKind.RightBracket, "Expected ']' to close indexer.", "expression.indexer"); + expr = new PolicyIndexerExpression(expr, indexExpr, new SourceSpan(expr.Span.Start, close.Span.End)); + continue; + } + + break; + } + + return expr; + } + + private bool Match(TokenKind kind) + { + if (Check(kind)) + { + Advance(); + return true; + } + + return false; + } + + private bool Check(TokenKind kind) => !IsAtEnd && Current.Kind == kind; + + private DslToken Consume(TokenKind kind, string message, string path) + { + if (Check(kind)) + { + return Advance(); + } + + diagnostics.Add(PolicyIssue.Error(PolicyDslDiagnosticCodes.UnexpectedToken, message, path)); + return Advance(); + } + + private void SkipBlock() + { + var depth = 1; + while (depth > 0 && !IsAtEnd) + { + if (Match(TokenKind.LeftBrace)) + { + depth++; + } + else if (Match(TokenKind.RightBrace)) + { + depth--; + } + else + { + Advance(); + } + } + } + + private DslToken Advance() + { + if (!IsAtEnd) + { + position++; + } + + return tokens[position - 1]; + } + + private bool IsAtEnd => Current.Kind == TokenKind.EndOfFile; + + private DslToken Current => tokens[position]; + + private DslToken Previous => tokens[position - 1]; +} + +internal readonly record struct PolicyParseResult( + PolicyDocumentNode? Document, + ImmutableArray<PolicyIssue> Diagnostics); diff --git a/src/StellaOps.Policy.Engine/Compilation/PolicySyntaxNodes.cs b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicySyntaxNodes.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Compilation/PolicySyntaxNodes.cs rename to src/Policy/StellaOps.Policy.Engine/Compilation/PolicySyntaxNodes.cs index dc815935..b4b00cbb 100644 --- a/src/StellaOps.Policy.Engine/Compilation/PolicySyntaxNodes.cs +++ b/src/Policy/StellaOps.Policy.Engine/Compilation/PolicySyntaxNodes.cs @@ -1,141 +1,141 @@ -using System.Collections.Immutable; - -namespace StellaOps.Policy.Engine.Compilation; - -public abstract record SyntaxNode(SourceSpan Span); - -public sealed record PolicyDocumentNode( - string Name, - string Syntax, - ImmutableDictionary<string, PolicyLiteralValue> Metadata, - ImmutableArray<PolicyProfileNode> Profiles, - ImmutableDictionary<string, PolicyLiteralValue> Settings, - ImmutableArray<PolicyRuleNode> Rules, - SourceSpan Span) : SyntaxNode(Span); - -public sealed record PolicyProfileNode( - string Name, - ImmutableArray<PolicyProfileItemNode> Items, - SourceSpan Span) : SyntaxNode(Span); - -public abstract record PolicyProfileItemNode(SourceSpan Span); - -public sealed record PolicyProfileMapNode( - string Name, - ImmutableArray<PolicyProfileMapEntryNode> Entries, - SourceSpan Span) : PolicyProfileItemNode(Span); - -public sealed record PolicyProfileMapEntryNode( - string Source, - decimal Weight, - SourceSpan Span) : SyntaxNode(Span); - -public sealed record PolicyProfileEnvNode( - string Name, - ImmutableArray<PolicyProfileEnvEntryNode> Entries, - SourceSpan Span) : PolicyProfileItemNode(Span); - -public sealed record PolicyProfileEnvEntryNode( - PolicyExpression Condition, - decimal Weight, - SourceSpan Span) : SyntaxNode(Span); - -public sealed record PolicyProfileScalarNode( - string Name, - PolicyLiteralValue Value, - SourceSpan Span) : PolicyProfileItemNode(Span); - -public sealed record PolicyRuleNode( - string Name, - int Priority, - PolicyExpression When, - ImmutableArray<PolicyActionNode> ThenActions, - ImmutableArray<PolicyActionNode> ElseActions, - string? Because, - SourceSpan Span) : SyntaxNode(Span); - -public abstract record PolicyActionNode(SourceSpan Span); - -public sealed record PolicyAssignmentActionNode( - PolicyReference Target, - PolicyExpression Value, - SourceSpan Span) : PolicyActionNode(Span); - -public sealed record PolicyAnnotateActionNode( - PolicyReference Target, - PolicyExpression Value, - SourceSpan Span) : PolicyActionNode(Span); - -public sealed record PolicyIgnoreActionNode( - PolicyExpression? Until, - string? Because, - SourceSpan Span) : PolicyActionNode(Span); - -public sealed record PolicyEscalateActionNode( - PolicyExpression? To, - PolicyExpression? When, - SourceSpan Span) : PolicyActionNode(Span); - -public sealed record PolicyRequireVexActionNode( - ImmutableDictionary<string, PolicyExpression> Conditions, - SourceSpan Span) : PolicyActionNode(Span); - -public sealed record PolicyWarnActionNode( - PolicyExpression? Message, - SourceSpan Span) : PolicyActionNode(Span); - -public sealed record PolicyDeferActionNode( - PolicyExpression? Until, - SourceSpan Span) : PolicyActionNode(Span); - -public abstract record PolicyExpression(SourceSpan Span); - -public sealed record PolicyLiteralExpression(object? Value, SourceSpan Span) : PolicyExpression(Span); - -public sealed record PolicyListExpression(ImmutableArray<PolicyExpression> Items, SourceSpan Span) : PolicyExpression(Span); - -public sealed record PolicyIdentifierExpression(string Name, SourceSpan Span) : PolicyExpression(Span); - -public sealed record PolicyMemberAccessExpression(PolicyExpression Target, string Member, SourceSpan Span) : PolicyExpression(Span); - -public sealed record PolicyInvocationExpression(PolicyExpression Target, ImmutableArray<PolicyExpression> Arguments, SourceSpan Span) : PolicyExpression(Span); - -public sealed record PolicyIndexerExpression(PolicyExpression Target, PolicyExpression Index, SourceSpan Span) : PolicyExpression(Span); - -public sealed record PolicyUnaryExpression(PolicyUnaryOperator Operator, PolicyExpression Operand, SourceSpan Span) : PolicyExpression(Span); - -public sealed record PolicyBinaryExpression(PolicyExpression Left, PolicyBinaryOperator Operator, PolicyExpression Right, SourceSpan Span) : PolicyExpression(Span); - -public enum PolicyUnaryOperator -{ - Not, -} - -public enum PolicyBinaryOperator -{ - And, - Or, - Equal, - NotEqual, - LessThan, - LessThanOrEqual, - GreaterThan, - GreaterThanOrEqual, - In, - NotIn, -} - -public sealed record PolicyReference(ImmutableArray<string> Segments, SourceSpan Span) -{ - public override string ToString() => string.Join(".", Segments); -} - -public abstract record PolicyLiteralValue(SourceSpan Span); - -public sealed record PolicyStringLiteral(string Value, SourceSpan Span) : PolicyLiteralValue(Span); - -public sealed record PolicyNumberLiteral(decimal Value, SourceSpan Span) : PolicyLiteralValue(Span); - -public sealed record PolicyBooleanLiteral(bool Value, SourceSpan Span) : PolicyLiteralValue(Span); - -public sealed record PolicyListLiteral(ImmutableArray<PolicyLiteralValue> Items, SourceSpan Span) : PolicyLiteralValue(Span); +using System.Collections.Immutable; + +namespace StellaOps.Policy.Engine.Compilation; + +public abstract record SyntaxNode(SourceSpan Span); + +public sealed record PolicyDocumentNode( + string Name, + string Syntax, + ImmutableDictionary<string, PolicyLiteralValue> Metadata, + ImmutableArray<PolicyProfileNode> Profiles, + ImmutableDictionary<string, PolicyLiteralValue> Settings, + ImmutableArray<PolicyRuleNode> Rules, + SourceSpan Span) : SyntaxNode(Span); + +public sealed record PolicyProfileNode( + string Name, + ImmutableArray<PolicyProfileItemNode> Items, + SourceSpan Span) : SyntaxNode(Span); + +public abstract record PolicyProfileItemNode(SourceSpan Span); + +public sealed record PolicyProfileMapNode( + string Name, + ImmutableArray<PolicyProfileMapEntryNode> Entries, + SourceSpan Span) : PolicyProfileItemNode(Span); + +public sealed record PolicyProfileMapEntryNode( + string Source, + decimal Weight, + SourceSpan Span) : SyntaxNode(Span); + +public sealed record PolicyProfileEnvNode( + string Name, + ImmutableArray<PolicyProfileEnvEntryNode> Entries, + SourceSpan Span) : PolicyProfileItemNode(Span); + +public sealed record PolicyProfileEnvEntryNode( + PolicyExpression Condition, + decimal Weight, + SourceSpan Span) : SyntaxNode(Span); + +public sealed record PolicyProfileScalarNode( + string Name, + PolicyLiteralValue Value, + SourceSpan Span) : PolicyProfileItemNode(Span); + +public sealed record PolicyRuleNode( + string Name, + int Priority, + PolicyExpression When, + ImmutableArray<PolicyActionNode> ThenActions, + ImmutableArray<PolicyActionNode> ElseActions, + string? Because, + SourceSpan Span) : SyntaxNode(Span); + +public abstract record PolicyActionNode(SourceSpan Span); + +public sealed record PolicyAssignmentActionNode( + PolicyReference Target, + PolicyExpression Value, + SourceSpan Span) : PolicyActionNode(Span); + +public sealed record PolicyAnnotateActionNode( + PolicyReference Target, + PolicyExpression Value, + SourceSpan Span) : PolicyActionNode(Span); + +public sealed record PolicyIgnoreActionNode( + PolicyExpression? Until, + string? Because, + SourceSpan Span) : PolicyActionNode(Span); + +public sealed record PolicyEscalateActionNode( + PolicyExpression? To, + PolicyExpression? When, + SourceSpan Span) : PolicyActionNode(Span); + +public sealed record PolicyRequireVexActionNode( + ImmutableDictionary<string, PolicyExpression> Conditions, + SourceSpan Span) : PolicyActionNode(Span); + +public sealed record PolicyWarnActionNode( + PolicyExpression? Message, + SourceSpan Span) : PolicyActionNode(Span); + +public sealed record PolicyDeferActionNode( + PolicyExpression? Until, + SourceSpan Span) : PolicyActionNode(Span); + +public abstract record PolicyExpression(SourceSpan Span); + +public sealed record PolicyLiteralExpression(object? Value, SourceSpan Span) : PolicyExpression(Span); + +public sealed record PolicyListExpression(ImmutableArray<PolicyExpression> Items, SourceSpan Span) : PolicyExpression(Span); + +public sealed record PolicyIdentifierExpression(string Name, SourceSpan Span) : PolicyExpression(Span); + +public sealed record PolicyMemberAccessExpression(PolicyExpression Target, string Member, SourceSpan Span) : PolicyExpression(Span); + +public sealed record PolicyInvocationExpression(PolicyExpression Target, ImmutableArray<PolicyExpression> Arguments, SourceSpan Span) : PolicyExpression(Span); + +public sealed record PolicyIndexerExpression(PolicyExpression Target, PolicyExpression Index, SourceSpan Span) : PolicyExpression(Span); + +public sealed record PolicyUnaryExpression(PolicyUnaryOperator Operator, PolicyExpression Operand, SourceSpan Span) : PolicyExpression(Span); + +public sealed record PolicyBinaryExpression(PolicyExpression Left, PolicyBinaryOperator Operator, PolicyExpression Right, SourceSpan Span) : PolicyExpression(Span); + +public enum PolicyUnaryOperator +{ + Not, +} + +public enum PolicyBinaryOperator +{ + And, + Or, + Equal, + NotEqual, + LessThan, + LessThanOrEqual, + GreaterThan, + GreaterThanOrEqual, + In, + NotIn, +} + +public sealed record PolicyReference(ImmutableArray<string> Segments, SourceSpan Span) +{ + public override string ToString() => string.Join(".", Segments); +} + +public abstract record PolicyLiteralValue(SourceSpan Span); + +public sealed record PolicyStringLiteral(string Value, SourceSpan Span) : PolicyLiteralValue(Span); + +public sealed record PolicyNumberLiteral(decimal Value, SourceSpan Span) : PolicyLiteralValue(Span); + +public sealed record PolicyBooleanLiteral(bool Value, SourceSpan Span) : PolicyLiteralValue(Span); + +public sealed record PolicyListLiteral(ImmutableArray<PolicyLiteralValue> Items, SourceSpan Span) : PolicyLiteralValue(Span); diff --git a/src/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs b/src/Policy/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs similarity index 96% rename from src/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs rename to src/Policy/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs index 5c611206..0b7f722b 100644 --- a/src/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs +++ b/src/Policy/StellaOps.Policy.Engine/Domain/PolicyPackRecord.cs @@ -1,101 +1,101 @@ -using System.Collections.Concurrent; -using System.Collections.Immutable; - -namespace StellaOps.Policy.Engine.Domain; - -internal sealed class PolicyPackRecord -{ - private readonly ConcurrentDictionary<int, PolicyRevisionRecord> revisions = new(); - - public PolicyPackRecord(string packId, string? displayName, DateTimeOffset createdAt) - { - PackId = packId ?? throw new ArgumentNullException(nameof(packId)); - DisplayName = displayName; - CreatedAt = createdAt; - } - - public string PackId { get; } - - public string? DisplayName { get; } - - public DateTimeOffset CreatedAt { get; } - - public ImmutableArray<PolicyRevisionRecord> GetRevisions() - => revisions.Values - .OrderBy(r => r.Version) - .ToImmutableArray(); - - public PolicyRevisionRecord GetOrAddRevision(int version, Func<int, PolicyRevisionRecord> factory) - => revisions.GetOrAdd(version, factory); - - public bool TryGetRevision(int version, out PolicyRevisionRecord revision) - => revisions.TryGetValue(version, out revision!); - - public int GetNextVersion() - => revisions.IsEmpty ? 1 : revisions.Keys.Max() + 1; -} - -internal sealed class PolicyRevisionRecord -{ - private readonly ConcurrentDictionary<string, PolicyActivationApproval> approvals = new(StringComparer.OrdinalIgnoreCase); - - public PolicyRevisionRecord(int version, bool requiresTwoPerson, PolicyRevisionStatus status, DateTimeOffset createdAt) - { - Version = version; - RequiresTwoPersonApproval = requiresTwoPerson; - Status = status; - CreatedAt = createdAt; - } - - public int Version { get; } - - public bool RequiresTwoPersonApproval { get; } - - public PolicyRevisionStatus Status { get; private set; } - - public DateTimeOffset CreatedAt { get; } - - public DateTimeOffset? ActivatedAt { get; private set; } - - public ImmutableArray<PolicyActivationApproval> Approvals - => approvals.Values - .OrderBy(approval => approval.ApprovedAt) - .ToImmutableArray(); - - public void SetStatus(PolicyRevisionStatus status, DateTimeOffset timestamp) - { - Status = status; - if (status == PolicyRevisionStatus.Active) - { - ActivatedAt = timestamp; - } - } - - public PolicyActivationApprovalStatus AddApproval(PolicyActivationApproval approval) - { - if (!approvals.TryAdd(approval.ActorId, approval)) - { - return PolicyActivationApprovalStatus.Duplicate; - } - - return approvals.Count >= 2 - ? PolicyActivationApprovalStatus.ThresholdReached - : PolicyActivationApprovalStatus.Pending; - } -} - -internal enum PolicyRevisionStatus -{ - Draft, - Approved, - Active -} - -internal sealed record PolicyActivationApproval(string ActorId, DateTimeOffset ApprovedAt, string? Comment); - -internal enum PolicyActivationApprovalStatus -{ - Pending, - ThresholdReached, - Duplicate -} +using System.Collections.Concurrent; +using System.Collections.Immutable; + +namespace StellaOps.Policy.Engine.Domain; + +internal sealed class PolicyPackRecord +{ + private readonly ConcurrentDictionary<int, PolicyRevisionRecord> revisions = new(); + + public PolicyPackRecord(string packId, string? displayName, DateTimeOffset createdAt) + { + PackId = packId ?? throw new ArgumentNullException(nameof(packId)); + DisplayName = displayName; + CreatedAt = createdAt; + } + + public string PackId { get; } + + public string? DisplayName { get; } + + public DateTimeOffset CreatedAt { get; } + + public ImmutableArray<PolicyRevisionRecord> GetRevisions() + => revisions.Values + .OrderBy(r => r.Version) + .ToImmutableArray(); + + public PolicyRevisionRecord GetOrAddRevision(int version, Func<int, PolicyRevisionRecord> factory) + => revisions.GetOrAdd(version, factory); + + public bool TryGetRevision(int version, out PolicyRevisionRecord revision) + => revisions.TryGetValue(version, out revision!); + + public int GetNextVersion() + => revisions.IsEmpty ? 1 : revisions.Keys.Max() + 1; +} + +internal sealed class PolicyRevisionRecord +{ + private readonly ConcurrentDictionary<string, PolicyActivationApproval> approvals = new(StringComparer.OrdinalIgnoreCase); + + public PolicyRevisionRecord(int version, bool requiresTwoPerson, PolicyRevisionStatus status, DateTimeOffset createdAt) + { + Version = version; + RequiresTwoPersonApproval = requiresTwoPerson; + Status = status; + CreatedAt = createdAt; + } + + public int Version { get; } + + public bool RequiresTwoPersonApproval { get; } + + public PolicyRevisionStatus Status { get; private set; } + + public DateTimeOffset CreatedAt { get; } + + public DateTimeOffset? ActivatedAt { get; private set; } + + public ImmutableArray<PolicyActivationApproval> Approvals + => approvals.Values + .OrderBy(approval => approval.ApprovedAt) + .ToImmutableArray(); + + public void SetStatus(PolicyRevisionStatus status, DateTimeOffset timestamp) + { + Status = status; + if (status == PolicyRevisionStatus.Active) + { + ActivatedAt = timestamp; + } + } + + public PolicyActivationApprovalStatus AddApproval(PolicyActivationApproval approval) + { + if (!approvals.TryAdd(approval.ActorId, approval)) + { + return PolicyActivationApprovalStatus.Duplicate; + } + + return approvals.Count >= 2 + ? PolicyActivationApprovalStatus.ThresholdReached + : PolicyActivationApprovalStatus.Pending; + } +} + +internal enum PolicyRevisionStatus +{ + Draft, + Approved, + Active +} + +internal sealed record PolicyActivationApproval(string ActorId, DateTimeOffset ApprovedAt, string? Comment); + +internal enum PolicyActivationApprovalStatus +{ + Pending, + ThresholdReached, + Duplicate +} diff --git a/src/StellaOps.Policy.Engine/Endpoints/PolicyCompilationEndpoints.cs b/src/Policy/StellaOps.Policy.Engine/Endpoints/PolicyCompilationEndpoints.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Endpoints/PolicyCompilationEndpoints.cs rename to src/Policy/StellaOps.Policy.Engine/Endpoints/PolicyCompilationEndpoints.cs index 2c74c4b9..f2a12e78 100644 --- a/src/StellaOps.Policy.Engine/Endpoints/PolicyCompilationEndpoints.cs +++ b/src/Policy/StellaOps.Policy.Engine/Endpoints/PolicyCompilationEndpoints.cs @@ -1,107 +1,107 @@ -using Microsoft.AspNetCore.Http.HttpResults; -using Microsoft.AspNetCore.Mvc; -using StellaOps.Policy; -using StellaOps.Policy.Engine.Services; -using System.Collections.Immutable; - -namespace StellaOps.Policy.Engine.Endpoints; - -internal static class PolicyCompilationEndpoints -{ - private const string CompileRoute = "/api/policy/policies/{policyId}/versions/{version}:compile"; - - public static IEndpointRouteBuilder MapPolicyCompilation(this IEndpointRouteBuilder endpoints) - { - endpoints.MapPost(CompileRoute, CompilePolicy) - .WithName("CompilePolicy") - .WithSummary("Compile and lint a policy DSL document.") - .WithDescription("Compiles a stella-dsl@1 policy document and returns deterministic digest and statistics.") - .Produces<PolicyCompileResponse>(StatusCodes.Status200OK) - .Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest) - .RequireAuthorization(); // scopes enforced by policy middleware. - - return endpoints; - } - - private static IResult CompilePolicy( - [FromRoute] string policyId, - [FromRoute] int version, - [FromBody] PolicyCompileRequest request, - PolicyCompilationService compilationService) - { - if (request is null) - { - return Results.BadRequest(BuildProblem("ERR_POL_001", "Request body missing.", policyId, version)); - } - - var result = compilationService.Compile(request); - if (!result.Success) - { - return Results.BadRequest(BuildProblem("ERR_POL_001", "Policy compilation failed.", policyId, version, result.Diagnostics)); - } - - var response = new PolicyCompileResponse( - result.Digest!, - result.Statistics ?? new PolicyCompilationStatistics(0, ImmutableDictionary<string, int>.Empty), - ConvertDiagnostics(result.Diagnostics)); - return Results.Ok(response); - } - - private static PolicyProblemDetails BuildProblem(string code, string message, string policyId, int version, ImmutableArray<PolicyIssue>? diagnostics = null) - { - var problem = new PolicyProblemDetails - { - Code = code, - Title = "Policy compilation error", - Detail = message, - PolicyId = policyId, - PolicyVersion = version - }; - - if (diagnostics is { Length: > 0 } diag) - { - problem.Diagnostics = diag; - } - - return problem; - } - - private static ImmutableArray<PolicyDiagnosticDto> ConvertDiagnostics(ImmutableArray<PolicyIssue> issues) - { - if (issues.IsDefaultOrEmpty) - { - return ImmutableArray<PolicyDiagnosticDto>.Empty; - } - - var builder = ImmutableArray.CreateBuilder<PolicyDiagnosticDto>(issues.Length); - foreach (var issue in issues) - { - if (issue.Severity != PolicyIssueSeverity.Warning) - { - continue; - } - - builder.Add(new PolicyDiagnosticDto(issue.Code, issue.Message, issue.Path)); - } - - return builder.ToImmutable(); - } - - private sealed class PolicyProblemDetails : ProblemDetails - { - public string Code { get; set; } = "ERR_POL_001"; - - public string? PolicyId { get; set; } - - public int PolicyVersion { get; set; } - - public ImmutableArray<PolicyIssue> Diagnostics { get; set; } = ImmutableArray<PolicyIssue>.Empty; - } -} - -internal sealed record PolicyCompileResponse( - string Digest, - PolicyCompilationStatistics Statistics, - ImmutableArray<PolicyDiagnosticDto> Warnings); - -internal sealed record PolicyDiagnosticDto(string Code, string Message, string Path); +using Microsoft.AspNetCore.Http.HttpResults; +using Microsoft.AspNetCore.Mvc; +using StellaOps.Policy; +using StellaOps.Policy.Engine.Services; +using System.Collections.Immutable; + +namespace StellaOps.Policy.Engine.Endpoints; + +internal static class PolicyCompilationEndpoints +{ + private const string CompileRoute = "/api/policy/policies/{policyId}/versions/{version}:compile"; + + public static IEndpointRouteBuilder MapPolicyCompilation(this IEndpointRouteBuilder endpoints) + { + endpoints.MapPost(CompileRoute, CompilePolicy) + .WithName("CompilePolicy") + .WithSummary("Compile and lint a policy DSL document.") + .WithDescription("Compiles a stella-dsl@1 policy document and returns deterministic digest and statistics.") + .Produces<PolicyCompileResponse>(StatusCodes.Status200OK) + .Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest) + .RequireAuthorization(); // scopes enforced by policy middleware. + + return endpoints; + } + + private static IResult CompilePolicy( + [FromRoute] string policyId, + [FromRoute] int version, + [FromBody] PolicyCompileRequest request, + PolicyCompilationService compilationService) + { + if (request is null) + { + return Results.BadRequest(BuildProblem("ERR_POL_001", "Request body missing.", policyId, version)); + } + + var result = compilationService.Compile(request); + if (!result.Success) + { + return Results.BadRequest(BuildProblem("ERR_POL_001", "Policy compilation failed.", policyId, version, result.Diagnostics)); + } + + var response = new PolicyCompileResponse( + result.Digest!, + result.Statistics ?? new PolicyCompilationStatistics(0, ImmutableDictionary<string, int>.Empty), + ConvertDiagnostics(result.Diagnostics)); + return Results.Ok(response); + } + + private static PolicyProblemDetails BuildProblem(string code, string message, string policyId, int version, ImmutableArray<PolicyIssue>? diagnostics = null) + { + var problem = new PolicyProblemDetails + { + Code = code, + Title = "Policy compilation error", + Detail = message, + PolicyId = policyId, + PolicyVersion = version + }; + + if (diagnostics is { Length: > 0 } diag) + { + problem.Diagnostics = diag; + } + + return problem; + } + + private static ImmutableArray<PolicyDiagnosticDto> ConvertDiagnostics(ImmutableArray<PolicyIssue> issues) + { + if (issues.IsDefaultOrEmpty) + { + return ImmutableArray<PolicyDiagnosticDto>.Empty; + } + + var builder = ImmutableArray.CreateBuilder<PolicyDiagnosticDto>(issues.Length); + foreach (var issue in issues) + { + if (issue.Severity != PolicyIssueSeverity.Warning) + { + continue; + } + + builder.Add(new PolicyDiagnosticDto(issue.Code, issue.Message, issue.Path)); + } + + return builder.ToImmutable(); + } + + private sealed class PolicyProblemDetails : ProblemDetails + { + public string Code { get; set; } = "ERR_POL_001"; + + public string? PolicyId { get; set; } + + public int PolicyVersion { get; set; } + + public ImmutableArray<PolicyIssue> Diagnostics { get; set; } = ImmutableArray<PolicyIssue>.Empty; + } +} + +internal sealed record PolicyCompileResponse( + string Digest, + PolicyCompilationStatistics Statistics, + ImmutableArray<PolicyDiagnosticDto> Warnings); + +internal sealed record PolicyDiagnosticDto(string Code, string Message, string Path); diff --git a/src/StellaOps.Policy.Engine/Endpoints/PolicyPackEndpoints.cs b/src/Policy/StellaOps.Policy.Engine/Endpoints/PolicyPackEndpoints.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Endpoints/PolicyPackEndpoints.cs rename to src/Policy/StellaOps.Policy.Engine/Endpoints/PolicyPackEndpoints.cs index 41e020d4..f71f147a 100644 --- a/src/StellaOps.Policy.Engine/Endpoints/PolicyPackEndpoints.cs +++ b/src/Policy/StellaOps.Policy.Engine/Endpoints/PolicyPackEndpoints.cs @@ -1,267 +1,267 @@ -using System.Security.Claims; -using Microsoft.AspNetCore.Http.HttpResults; -using Microsoft.AspNetCore.Mvc; -using StellaOps.Auth.Abstractions; -using StellaOps.Policy.Engine.Domain; -using StellaOps.Policy.Engine.Services; - -namespace StellaOps.Policy.Engine.Endpoints; - -internal static class PolicyPackEndpoints -{ - public static IEndpointRouteBuilder MapPolicyPacks(this IEndpointRouteBuilder endpoints) - { - var group = endpoints.MapGroup("/api/policy/packs") - .RequireAuthorization() - .WithTags("Policy Packs"); - - group.MapPost(string.Empty, CreatePack) - .WithName("CreatePolicyPack") - .WithSummary("Create a new policy pack container.") - .Produces<PolicyPackDto>(StatusCodes.Status201Created); - - group.MapGet(string.Empty, ListPacks) - .WithName("ListPolicyPacks") - .WithSummary("List policy packs for the current tenant.") - .Produces<IReadOnlyList<PolicyPackSummaryDto>>(StatusCodes.Status200OK); - - group.MapPost("/{packId}/revisions", CreateRevision) - .WithName("CreatePolicyRevision") - .WithSummary("Create or update policy revision metadata.") - .Produces<PolicyRevisionDto>(StatusCodes.Status201Created) - .Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest); - - group.MapPost("/{packId}/revisions/{version:int}:activate", ActivateRevision) - .WithName("ActivatePolicyRevision") - .WithSummary("Activate an approved policy revision, enforcing two-person approval when required.") - .Produces<PolicyRevisionActivationResponse>(StatusCodes.Status200OK) - .Produces<PolicyRevisionActivationResponse>(StatusCodes.Status202Accepted) - .Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest) - .Produces<ProblemHttpResult>(StatusCodes.Status404NotFound); - - return endpoints; - } - - private static async Task<IResult> CreatePack( - HttpContext context, - [FromBody] CreatePolicyPackRequest request, - IPolicyPackRepository repository, - CancellationToken cancellationToken) - { - var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit); - if (scopeResult is not null) - { - return scopeResult; - } - - if (request is null) - { - return Results.BadRequest(new ProblemDetails - { - Title = "Invalid request", - Detail = "Request body is required.", - Status = StatusCodes.Status400BadRequest - }); - } - - var packId = string.IsNullOrWhiteSpace(request.PackId) - ? $"pack-{Guid.NewGuid():n}" - : request.PackId.Trim(); - - var pack = await repository.CreateAsync(packId, request.DisplayName?.Trim(), cancellationToken).ConfigureAwait(false); - var dto = PolicyPackMapper.ToDto(pack); - return Results.Created($"/api/policy/packs/{dto.PackId}", dto); - } - - private static async Task<IResult> ListPacks( - HttpContext context, - IPolicyPackRepository repository, - CancellationToken cancellationToken) - { - var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead); - if (scopeResult is not null) - { - return scopeResult; - } - - var packs = await repository.ListAsync(cancellationToken).ConfigureAwait(false); - var summaries = packs.Select(PolicyPackMapper.ToSummaryDto).ToArray(); - return Results.Ok(summaries); - } - - private static async Task<IResult> CreateRevision( - HttpContext context, - [FromRoute] string packId, - [FromBody] CreatePolicyRevisionRequest request, - IPolicyPackRepository repository, - CancellationToken cancellationToken) - { - var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit); - if (scopeResult is not null) - { - return scopeResult; - } - - if (request is null) - { - return Results.BadRequest(new ProblemDetails - { - Title = "Invalid request", - Detail = "Request body is required.", - Status = StatusCodes.Status400BadRequest - }); - } - - if (request.InitialStatus is not (PolicyRevisionStatus.Draft or PolicyRevisionStatus.Approved)) - { - return Results.BadRequest(new ProblemDetails - { - Title = "Invalid status", - Detail = "Only Draft or Approved statuses are supported for new revisions.", - Status = StatusCodes.Status400BadRequest - }); - } - - var revision = await repository.UpsertRevisionAsync( - packId, - request.Version ?? 0, - request.RequiresTwoPersonApproval, - request.InitialStatus, - cancellationToken).ConfigureAwait(false); - - return Results.Created( - $"/api/policy/packs/{packId}/revisions/{revision.Version}", - PolicyPackMapper.ToDto(packId, revision)); - } - - private static async Task<IResult> ActivateRevision( - HttpContext context, - [FromRoute] string packId, - [FromRoute] int version, - [FromBody] ActivatePolicyRevisionRequest request, - IPolicyPackRepository repository, - CancellationToken cancellationToken) - { - var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyActivate); - if (scopeResult is not null) - { - return scopeResult; - } - - if (request is null) - { - return Results.BadRequest(new ProblemDetails - { - Title = "Invalid request", - Detail = "Request body is required.", - Status = StatusCodes.Status400BadRequest - }); - } - - var actorId = ResolveActorId(context); - if (actorId is null) - { - return Results.Problem("Actor identity required.", statusCode: StatusCodes.Status401Unauthorized); - } - - var result = await repository.RecordActivationAsync( - packId, - version, - actorId, - DateTimeOffset.UtcNow, - request.Comment, - cancellationToken).ConfigureAwait(false); - - return result.Status switch - { - PolicyActivationResultStatus.PackNotFound => Results.NotFound(new ProblemDetails - { - Title = "Policy pack not found", - Status = StatusCodes.Status404NotFound - }), - PolicyActivationResultStatus.RevisionNotFound => Results.NotFound(new ProblemDetails - { - Title = "Policy revision not found", - Status = StatusCodes.Status404NotFound - }), - PolicyActivationResultStatus.NotApproved => Results.BadRequest(new ProblemDetails - { - Title = "Revision not approved", - Detail = "Only approved revisions may be activated.", - Status = StatusCodes.Status400BadRequest - }), - PolicyActivationResultStatus.DuplicateApproval => Results.BadRequest(new ProblemDetails - { - Title = "Approval already recorded", - Detail = "This approver has already approved activation.", - Status = StatusCodes.Status400BadRequest - }), - PolicyActivationResultStatus.PendingSecondApproval => Results.Accepted( - $"/api/policy/packs/{packId}/revisions/{version}", - new PolicyRevisionActivationResponse("pending_second_approval", PolicyPackMapper.ToDto(packId, result.Revision!))), - PolicyActivationResultStatus.Activated => Results.Ok(new PolicyRevisionActivationResponse("activated", PolicyPackMapper.ToDto(packId, result.Revision!))), - PolicyActivationResultStatus.AlreadyActive => Results.Ok(new PolicyRevisionActivationResponse("already_active", PolicyPackMapper.ToDto(packId, result.Revision!))), - _ => Results.BadRequest(new ProblemDetails - { - Title = "Activation failed", - Detail = "Unknown activation result.", - Status = StatusCodes.Status400BadRequest - }) - }; - } - - private static string? ResolveActorId(HttpContext context) - { - var user = context.User; - var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value - ?? user?.FindFirst(ClaimTypes.Upn)?.Value - ?? user?.FindFirst("sub")?.Value; - - if (!string.IsNullOrWhiteSpace(actor)) - { - return actor; - } - - if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header)) - { - return header.ToString(); - } - - return null; - } -} - -internal static class PolicyPackMapper -{ - public static PolicyPackDto ToDto(PolicyPackRecord record) - => new(record.PackId, record.DisplayName, record.CreatedAt, record.GetRevisions().Select(r => ToDto(record.PackId, r)).ToArray()); - - public static PolicyPackSummaryDto ToSummaryDto(PolicyPackRecord record) - => new(record.PackId, record.DisplayName, record.CreatedAt, record.GetRevisions().Select(r => r.Version).ToArray()); - - public static PolicyRevisionDto ToDto(string packId, PolicyRevisionRecord revision) - => new( - packId, - revision.Version, - revision.Status.ToString(), - revision.RequiresTwoPersonApproval, - revision.CreatedAt, - revision.ActivatedAt, - revision.Approvals.Select(a => new PolicyActivationApprovalDto(a.ActorId, a.ApprovedAt, a.Comment)).ToArray()); -} - -internal sealed record CreatePolicyPackRequest(string? PackId, string? DisplayName); - -internal sealed record PolicyPackDto(string PackId, string? DisplayName, DateTimeOffset CreatedAt, IReadOnlyList<PolicyRevisionDto> Revisions); - -internal sealed record PolicyPackSummaryDto(string PackId, string? DisplayName, DateTimeOffset CreatedAt, IReadOnlyList<int> Versions); - -internal sealed record CreatePolicyRevisionRequest(int? Version, bool RequiresTwoPersonApproval, PolicyRevisionStatus InitialStatus = PolicyRevisionStatus.Approved); - -internal sealed record PolicyRevisionDto(string PackId, int Version, string Status, bool RequiresTwoPersonApproval, DateTimeOffset CreatedAt, DateTimeOffset? ActivatedAt, IReadOnlyList<PolicyActivationApprovalDto> Approvals); - -internal sealed record PolicyActivationApprovalDto(string ActorId, DateTimeOffset ApprovedAt, string? Comment); - -internal sealed record ActivatePolicyRevisionRequest(string? Comment); - -internal sealed record PolicyRevisionActivationResponse(string Status, PolicyRevisionDto Revision); +using System.Security.Claims; +using Microsoft.AspNetCore.Http.HttpResults; +using Microsoft.AspNetCore.Mvc; +using StellaOps.Auth.Abstractions; +using StellaOps.Policy.Engine.Domain; +using StellaOps.Policy.Engine.Services; + +namespace StellaOps.Policy.Engine.Endpoints; + +internal static class PolicyPackEndpoints +{ + public static IEndpointRouteBuilder MapPolicyPacks(this IEndpointRouteBuilder endpoints) + { + var group = endpoints.MapGroup("/api/policy/packs") + .RequireAuthorization() + .WithTags("Policy Packs"); + + group.MapPost(string.Empty, CreatePack) + .WithName("CreatePolicyPack") + .WithSummary("Create a new policy pack container.") + .Produces<PolicyPackDto>(StatusCodes.Status201Created); + + group.MapGet(string.Empty, ListPacks) + .WithName("ListPolicyPacks") + .WithSummary("List policy packs for the current tenant.") + .Produces<IReadOnlyList<PolicyPackSummaryDto>>(StatusCodes.Status200OK); + + group.MapPost("/{packId}/revisions", CreateRevision) + .WithName("CreatePolicyRevision") + .WithSummary("Create or update policy revision metadata.") + .Produces<PolicyRevisionDto>(StatusCodes.Status201Created) + .Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest); + + group.MapPost("/{packId}/revisions/{version:int}:activate", ActivateRevision) + .WithName("ActivatePolicyRevision") + .WithSummary("Activate an approved policy revision, enforcing two-person approval when required.") + .Produces<PolicyRevisionActivationResponse>(StatusCodes.Status200OK) + .Produces<PolicyRevisionActivationResponse>(StatusCodes.Status202Accepted) + .Produces<ProblemHttpResult>(StatusCodes.Status400BadRequest) + .Produces<ProblemHttpResult>(StatusCodes.Status404NotFound); + + return endpoints; + } + + private static async Task<IResult> CreatePack( + HttpContext context, + [FromBody] CreatePolicyPackRequest request, + IPolicyPackRepository repository, + CancellationToken cancellationToken) + { + var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit); + if (scopeResult is not null) + { + return scopeResult; + } + + if (request is null) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Invalid request", + Detail = "Request body is required.", + Status = StatusCodes.Status400BadRequest + }); + } + + var packId = string.IsNullOrWhiteSpace(request.PackId) + ? $"pack-{Guid.NewGuid():n}" + : request.PackId.Trim(); + + var pack = await repository.CreateAsync(packId, request.DisplayName?.Trim(), cancellationToken).ConfigureAwait(false); + var dto = PolicyPackMapper.ToDto(pack); + return Results.Created($"/api/policy/packs/{dto.PackId}", dto); + } + + private static async Task<IResult> ListPacks( + HttpContext context, + IPolicyPackRepository repository, + CancellationToken cancellationToken) + { + var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyRead); + if (scopeResult is not null) + { + return scopeResult; + } + + var packs = await repository.ListAsync(cancellationToken).ConfigureAwait(false); + var summaries = packs.Select(PolicyPackMapper.ToSummaryDto).ToArray(); + return Results.Ok(summaries); + } + + private static async Task<IResult> CreateRevision( + HttpContext context, + [FromRoute] string packId, + [FromBody] CreatePolicyRevisionRequest request, + IPolicyPackRepository repository, + CancellationToken cancellationToken) + { + var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyEdit); + if (scopeResult is not null) + { + return scopeResult; + } + + if (request is null) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Invalid request", + Detail = "Request body is required.", + Status = StatusCodes.Status400BadRequest + }); + } + + if (request.InitialStatus is not (PolicyRevisionStatus.Draft or PolicyRevisionStatus.Approved)) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Invalid status", + Detail = "Only Draft or Approved statuses are supported for new revisions.", + Status = StatusCodes.Status400BadRequest + }); + } + + var revision = await repository.UpsertRevisionAsync( + packId, + request.Version ?? 0, + request.RequiresTwoPersonApproval, + request.InitialStatus, + cancellationToken).ConfigureAwait(false); + + return Results.Created( + $"/api/policy/packs/{packId}/revisions/{revision.Version}", + PolicyPackMapper.ToDto(packId, revision)); + } + + private static async Task<IResult> ActivateRevision( + HttpContext context, + [FromRoute] string packId, + [FromRoute] int version, + [FromBody] ActivatePolicyRevisionRequest request, + IPolicyPackRepository repository, + CancellationToken cancellationToken) + { + var scopeResult = ScopeAuthorization.RequireScope(context, StellaOpsScopes.PolicyActivate); + if (scopeResult is not null) + { + return scopeResult; + } + + if (request is null) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Invalid request", + Detail = "Request body is required.", + Status = StatusCodes.Status400BadRequest + }); + } + + var actorId = ResolveActorId(context); + if (actorId is null) + { + return Results.Problem("Actor identity required.", statusCode: StatusCodes.Status401Unauthorized); + } + + var result = await repository.RecordActivationAsync( + packId, + version, + actorId, + DateTimeOffset.UtcNow, + request.Comment, + cancellationToken).ConfigureAwait(false); + + return result.Status switch + { + PolicyActivationResultStatus.PackNotFound => Results.NotFound(new ProblemDetails + { + Title = "Policy pack not found", + Status = StatusCodes.Status404NotFound + }), + PolicyActivationResultStatus.RevisionNotFound => Results.NotFound(new ProblemDetails + { + Title = "Policy revision not found", + Status = StatusCodes.Status404NotFound + }), + PolicyActivationResultStatus.NotApproved => Results.BadRequest(new ProblemDetails + { + Title = "Revision not approved", + Detail = "Only approved revisions may be activated.", + Status = StatusCodes.Status400BadRequest + }), + PolicyActivationResultStatus.DuplicateApproval => Results.BadRequest(new ProblemDetails + { + Title = "Approval already recorded", + Detail = "This approver has already approved activation.", + Status = StatusCodes.Status400BadRequest + }), + PolicyActivationResultStatus.PendingSecondApproval => Results.Accepted( + $"/api/policy/packs/{packId}/revisions/{version}", + new PolicyRevisionActivationResponse("pending_second_approval", PolicyPackMapper.ToDto(packId, result.Revision!))), + PolicyActivationResultStatus.Activated => Results.Ok(new PolicyRevisionActivationResponse("activated", PolicyPackMapper.ToDto(packId, result.Revision!))), + PolicyActivationResultStatus.AlreadyActive => Results.Ok(new PolicyRevisionActivationResponse("already_active", PolicyPackMapper.ToDto(packId, result.Revision!))), + _ => Results.BadRequest(new ProblemDetails + { + Title = "Activation failed", + Detail = "Unknown activation result.", + Status = StatusCodes.Status400BadRequest + }) + }; + } + + private static string? ResolveActorId(HttpContext context) + { + var user = context.User; + var actor = user?.FindFirst(ClaimTypes.NameIdentifier)?.Value + ?? user?.FindFirst(ClaimTypes.Upn)?.Value + ?? user?.FindFirst("sub")?.Value; + + if (!string.IsNullOrWhiteSpace(actor)) + { + return actor; + } + + if (context.Request.Headers.TryGetValue("X-StellaOps-Actor", out var header) && !string.IsNullOrWhiteSpace(header)) + { + return header.ToString(); + } + + return null; + } +} + +internal static class PolicyPackMapper +{ + public static PolicyPackDto ToDto(PolicyPackRecord record) + => new(record.PackId, record.DisplayName, record.CreatedAt, record.GetRevisions().Select(r => ToDto(record.PackId, r)).ToArray()); + + public static PolicyPackSummaryDto ToSummaryDto(PolicyPackRecord record) + => new(record.PackId, record.DisplayName, record.CreatedAt, record.GetRevisions().Select(r => r.Version).ToArray()); + + public static PolicyRevisionDto ToDto(string packId, PolicyRevisionRecord revision) + => new( + packId, + revision.Version, + revision.Status.ToString(), + revision.RequiresTwoPersonApproval, + revision.CreatedAt, + revision.ActivatedAt, + revision.Approvals.Select(a => new PolicyActivationApprovalDto(a.ActorId, a.ApprovedAt, a.Comment)).ToArray()); +} + +internal sealed record CreatePolicyPackRequest(string? PackId, string? DisplayName); + +internal sealed record PolicyPackDto(string PackId, string? DisplayName, DateTimeOffset CreatedAt, IReadOnlyList<PolicyRevisionDto> Revisions); + +internal sealed record PolicyPackSummaryDto(string PackId, string? DisplayName, DateTimeOffset CreatedAt, IReadOnlyList<int> Versions); + +internal sealed record CreatePolicyRevisionRequest(int? Version, bool RequiresTwoPersonApproval, PolicyRevisionStatus InitialStatus = PolicyRevisionStatus.Approved); + +internal sealed record PolicyRevisionDto(string PackId, int Version, string Status, bool RequiresTwoPersonApproval, DateTimeOffset CreatedAt, DateTimeOffset? ActivatedAt, IReadOnlyList<PolicyActivationApprovalDto> Approvals); + +internal sealed record PolicyActivationApprovalDto(string ActorId, DateTimeOffset ApprovedAt, string? Comment); + +internal sealed record ActivatePolicyRevisionRequest(string? Comment); + +internal sealed record PolicyRevisionActivationResponse(string Status, PolicyRevisionDto Revision); diff --git a/src/StellaOps.Policy.Engine/Evaluation/PolicyEvaluationContext.cs b/src/Policy/StellaOps.Policy.Engine/Evaluation/PolicyEvaluationContext.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Evaluation/PolicyEvaluationContext.cs rename to src/Policy/StellaOps.Policy.Engine/Evaluation/PolicyEvaluationContext.cs index 3776743c..b17995f7 100644 --- a/src/StellaOps.Policy.Engine/Evaluation/PolicyEvaluationContext.cs +++ b/src/Policy/StellaOps.Policy.Engine/Evaluation/PolicyEvaluationContext.cs @@ -1,142 +1,142 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using StellaOps.Policy; -using StellaOps.Policy.Engine.Compilation; - -namespace StellaOps.Policy.Engine.Evaluation; - -internal sealed record PolicyEvaluationRequest( - PolicyIrDocument Document, - PolicyEvaluationContext Context); - -internal sealed record PolicyEvaluationContext( - PolicyEvaluationSeverity Severity, - PolicyEvaluationEnvironment Environment, - PolicyEvaluationAdvisory Advisory, - PolicyEvaluationVexEvidence Vex, - PolicyEvaluationSbom Sbom, - PolicyEvaluationExceptions Exceptions); - -internal sealed record PolicyEvaluationSeverity(string Normalized, decimal? Score = null); - -internal sealed record PolicyEvaluationEnvironment( - ImmutableDictionary<string, string> Properties) -{ - public string? Get(string key) => Properties.TryGetValue(key, out var value) ? value : null; -} - -internal sealed record PolicyEvaluationAdvisory( - string Source, - ImmutableDictionary<string, string> Metadata); - -internal sealed record PolicyEvaluationVexEvidence( - ImmutableArray<PolicyEvaluationVexStatement> Statements) -{ - public static readonly PolicyEvaluationVexEvidence Empty = new(ImmutableArray<PolicyEvaluationVexStatement>.Empty); -} - -internal sealed record PolicyEvaluationVexStatement( - string Status, - string Justification, - string StatementId, - DateTimeOffset? Timestamp = null); - -internal sealed record PolicyEvaluationSbom(ImmutableHashSet<string> Tags) -{ - public bool HasTag(string tag) => Tags.Contains(tag); -} - -internal sealed record PolicyEvaluationResult( - bool Matched, - string Status, - string? Severity, - string? RuleName, - int? Priority, - ImmutableDictionary<string, string> Annotations, - ImmutableArray<string> Warnings, - PolicyExceptionApplication? AppliedException) -{ - public static PolicyEvaluationResult CreateDefault(string? severity) => new( - Matched: false, - Status: "affected", - Severity: severity, - RuleName: null, - Priority: null, - Annotations: ImmutableDictionary<string, string>.Empty, - Warnings: ImmutableArray<string>.Empty, - AppliedException: null); -} - -internal sealed record PolicyEvaluationExceptions( - ImmutableDictionary<string, PolicyExceptionEffect> Effects, - ImmutableArray<PolicyEvaluationExceptionInstance> Instances) -{ - public static readonly PolicyEvaluationExceptions Empty = new( - ImmutableDictionary<string, PolicyExceptionEffect>.Empty, - ImmutableArray<PolicyEvaluationExceptionInstance>.Empty); - - public bool IsEmpty => Instances.IsDefaultOrEmpty || Instances.Length == 0; -} - -internal sealed record PolicyEvaluationExceptionInstance( - string Id, - string EffectId, - PolicyEvaluationExceptionScope Scope, - DateTimeOffset CreatedAt, - ImmutableDictionary<string, string> Metadata); - -internal sealed record PolicyEvaluationExceptionScope( - ImmutableHashSet<string> RuleNames, - ImmutableHashSet<string> Severities, - ImmutableHashSet<string> Sources, - ImmutableHashSet<string> Tags) -{ - public static PolicyEvaluationExceptionScope Empty { get; } = new( - ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase), - ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase), - ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase), - ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase)); - - public bool IsEmpty => RuleNames.Count == 0 - && Severities.Count == 0 - && Sources.Count == 0 - && Tags.Count == 0; - - public static PolicyEvaluationExceptionScope Create( - IEnumerable<string>? ruleNames = null, - IEnumerable<string>? severities = null, - IEnumerable<string>? sources = null, - IEnumerable<string>? tags = null) - { - return new PolicyEvaluationExceptionScope( - Normalize(ruleNames), - Normalize(severities), - Normalize(sources), - Normalize(tags)); - } - - private static ImmutableHashSet<string> Normalize(IEnumerable<string>? values) - { - if (values is null) - { - return ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase); - } - - return values - .Where(static value => !string.IsNullOrWhiteSpace(value)) - .Select(static value => value.Trim()) - .ToImmutableHashSet(StringComparer.OrdinalIgnoreCase); - } -} - -internal sealed record PolicyExceptionApplication( - string ExceptionId, - string EffectId, - PolicyExceptionEffectType EffectType, - string OriginalStatus, - string? OriginalSeverity, - string AppliedStatus, - string? AppliedSeverity, - ImmutableDictionary<string, string> Metadata); +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using StellaOps.Policy; +using StellaOps.Policy.Engine.Compilation; + +namespace StellaOps.Policy.Engine.Evaluation; + +internal sealed record PolicyEvaluationRequest( + PolicyIrDocument Document, + PolicyEvaluationContext Context); + +internal sealed record PolicyEvaluationContext( + PolicyEvaluationSeverity Severity, + PolicyEvaluationEnvironment Environment, + PolicyEvaluationAdvisory Advisory, + PolicyEvaluationVexEvidence Vex, + PolicyEvaluationSbom Sbom, + PolicyEvaluationExceptions Exceptions); + +internal sealed record PolicyEvaluationSeverity(string Normalized, decimal? Score = null); + +internal sealed record PolicyEvaluationEnvironment( + ImmutableDictionary<string, string> Properties) +{ + public string? Get(string key) => Properties.TryGetValue(key, out var value) ? value : null; +} + +internal sealed record PolicyEvaluationAdvisory( + string Source, + ImmutableDictionary<string, string> Metadata); + +internal sealed record PolicyEvaluationVexEvidence( + ImmutableArray<PolicyEvaluationVexStatement> Statements) +{ + public static readonly PolicyEvaluationVexEvidence Empty = new(ImmutableArray<PolicyEvaluationVexStatement>.Empty); +} + +internal sealed record PolicyEvaluationVexStatement( + string Status, + string Justification, + string StatementId, + DateTimeOffset? Timestamp = null); + +internal sealed record PolicyEvaluationSbom(ImmutableHashSet<string> Tags) +{ + public bool HasTag(string tag) => Tags.Contains(tag); +} + +internal sealed record PolicyEvaluationResult( + bool Matched, + string Status, + string? Severity, + string? RuleName, + int? Priority, + ImmutableDictionary<string, string> Annotations, + ImmutableArray<string> Warnings, + PolicyExceptionApplication? AppliedException) +{ + public static PolicyEvaluationResult CreateDefault(string? severity) => new( + Matched: false, + Status: "affected", + Severity: severity, + RuleName: null, + Priority: null, + Annotations: ImmutableDictionary<string, string>.Empty, + Warnings: ImmutableArray<string>.Empty, + AppliedException: null); +} + +internal sealed record PolicyEvaluationExceptions( + ImmutableDictionary<string, PolicyExceptionEffect> Effects, + ImmutableArray<PolicyEvaluationExceptionInstance> Instances) +{ + public static readonly PolicyEvaluationExceptions Empty = new( + ImmutableDictionary<string, PolicyExceptionEffect>.Empty, + ImmutableArray<PolicyEvaluationExceptionInstance>.Empty); + + public bool IsEmpty => Instances.IsDefaultOrEmpty || Instances.Length == 0; +} + +internal sealed record PolicyEvaluationExceptionInstance( + string Id, + string EffectId, + PolicyEvaluationExceptionScope Scope, + DateTimeOffset CreatedAt, + ImmutableDictionary<string, string> Metadata); + +internal sealed record PolicyEvaluationExceptionScope( + ImmutableHashSet<string> RuleNames, + ImmutableHashSet<string> Severities, + ImmutableHashSet<string> Sources, + ImmutableHashSet<string> Tags) +{ + public static PolicyEvaluationExceptionScope Empty { get; } = new( + ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase), + ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase), + ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase), + ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase)); + + public bool IsEmpty => RuleNames.Count == 0 + && Severities.Count == 0 + && Sources.Count == 0 + && Tags.Count == 0; + + public static PolicyEvaluationExceptionScope Create( + IEnumerable<string>? ruleNames = null, + IEnumerable<string>? severities = null, + IEnumerable<string>? sources = null, + IEnumerable<string>? tags = null) + { + return new PolicyEvaluationExceptionScope( + Normalize(ruleNames), + Normalize(severities), + Normalize(sources), + Normalize(tags)); + } + + private static ImmutableHashSet<string> Normalize(IEnumerable<string>? values) + { + if (values is null) + { + return ImmutableHashSet<string>.Empty.WithComparer(StringComparer.OrdinalIgnoreCase); + } + + return values + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Select(static value => value.Trim()) + .ToImmutableHashSet(StringComparer.OrdinalIgnoreCase); + } +} + +internal sealed record PolicyExceptionApplication( + string ExceptionId, + string EffectId, + PolicyExceptionEffectType EffectType, + string OriginalStatus, + string? OriginalSeverity, + string AppliedStatus, + string? AppliedSeverity, + ImmutableDictionary<string, string> Metadata); diff --git a/src/StellaOps.Policy.Engine/Evaluation/PolicyEvaluator.cs b/src/Policy/StellaOps.Policy.Engine/Evaluation/PolicyEvaluator.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Evaluation/PolicyEvaluator.cs rename to src/Policy/StellaOps.Policy.Engine/Evaluation/PolicyEvaluator.cs index e7ea301e..16ea7a5f 100644 --- a/src/StellaOps.Policy.Engine/Evaluation/PolicyEvaluator.cs +++ b/src/Policy/StellaOps.Policy.Engine/Evaluation/PolicyEvaluator.cs @@ -1,420 +1,420 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Globalization; -using System.Linq; -using StellaOps.Policy; -using StellaOps.Policy.Engine.Compilation; - -namespace StellaOps.Policy.Engine.Evaluation; - -/// <summary> -/// Deterministically evaluates compiled policy IR against advisory/VEX/SBOM inputs. -/// </summary> -internal sealed class PolicyEvaluator -{ - public PolicyEvaluationResult Evaluate(PolicyEvaluationRequest request) - { - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - if (request.Document is null) - { - throw new ArgumentNullException(nameof(request.Document)); - } - - var evaluator = new PolicyExpressionEvaluator(request.Context); - var orderedRules = request.Document.Rules - .Select(static (rule, index) => new { rule, index }) - .OrderBy(x => x.rule.Priority) - .ThenBy(x => x.index) - .ToImmutableArray(); - - foreach (var entry in orderedRules) - { - var rule = entry.rule; - if (!evaluator.EvaluateBoolean(rule.When)) - { - continue; - } - - var runtime = new PolicyRuntimeState(request.Context.Severity.Normalized); - foreach (var action in rule.ThenActions) - { - ApplyAction(rule.Name, action, evaluator, runtime); - } - - if (runtime.Status is null) - { - runtime.Status = "affected"; - } - - var baseResult = new PolicyEvaluationResult( - Matched: true, - Status: runtime.Status, - Severity: runtime.Severity, - RuleName: rule.Name, - Priority: rule.Priority, - Annotations: runtime.Annotations.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase), - Warnings: runtime.Warnings.ToImmutableArray(), - AppliedException: null); - - return ApplyExceptions(request, baseResult); - } - - var defaultResult = PolicyEvaluationResult.CreateDefault(request.Context.Severity.Normalized); - return ApplyExceptions(request, defaultResult); - } - - private static void ApplyAction( - string ruleName, - PolicyIrAction action, - PolicyExpressionEvaluator evaluator, - PolicyRuntimeState runtime) - { - switch (action) - { - case PolicyIrAssignmentAction assign: - ApplyAssignment(assign, evaluator, runtime); - break; - case PolicyIrAnnotateAction annotate: - ApplyAnnotate(annotate, evaluator, runtime); - break; - case PolicyIrWarnAction warn: - ApplyWarn(warn, evaluator, runtime); - break; - case PolicyIrEscalateAction escalate: - ApplyEscalate(escalate, evaluator, runtime); - break; - case PolicyIrRequireVexAction require: - var allSatisfied = true; - foreach (var condition in require.Conditions.Values) - { - if (!evaluator.EvaluateBoolean(condition)) - { - allSatisfied = false; - break; - } - } - - runtime.Status ??= allSatisfied ? "affected" : "suppressed"; - break; - case PolicyIrIgnoreAction ignore: - runtime.Status = "ignored"; - break; - case PolicyIrDeferAction defer: - runtime.Status = "deferred"; - break; - default: - runtime.Warnings.Add($"Unhandled action '{action.GetType().Name}' in rule '{ruleName}'."); - break; - } - } - - private static void ApplyAssignment(PolicyIrAssignmentAction assign, PolicyExpressionEvaluator evaluator, PolicyRuntimeState runtime) - { - var value = evaluator.Evaluate(assign.Value); - var stringValue = value.AsString(); - if (assign.Target.Length == 0) - { - return; - } - - var target = assign.Target[0]; - switch (target) - { - case "status": - runtime.Status = stringValue ?? runtime.Status ?? "affected"; - break; - case "severity": - runtime.Severity = stringValue; - break; - default: - runtime.Annotations[target] = stringValue ?? value.Raw?.ToString() ?? string.Empty; - break; - } - } - - private static void ApplyAnnotate(PolicyIrAnnotateAction annotate, PolicyExpressionEvaluator evaluator, PolicyRuntimeState runtime) - { - var key = annotate.Target.Length > 0 ? annotate.Target[^1] : "annotation"; - var value = evaluator.Evaluate(annotate.Value).AsString() ?? string.Empty; - runtime.Annotations[key] = value; - } - - private static void ApplyWarn(PolicyIrWarnAction warn, PolicyExpressionEvaluator evaluator, PolicyRuntimeState runtime) - { - var message = warn.Message is null ? "" : evaluator.Evaluate(warn.Message).AsString(); - if (!string.IsNullOrWhiteSpace(message)) - { - runtime.Warnings.Add(message!); - } - else - { - runtime.Warnings.Add("Policy rule emitted a warning."); - } - - runtime.Status ??= "warned"; - } - - private static void ApplyEscalate(PolicyIrEscalateAction escalate, PolicyExpressionEvaluator evaluator, PolicyRuntimeState runtime) - { - if (escalate.To is not null) - { - runtime.Severity = evaluator.Evaluate(escalate.To).AsString() ?? runtime.Severity; - } - - if (escalate.When is not null && !evaluator.EvaluateBoolean(escalate.When)) - { - return; - } - } - - private sealed class PolicyRuntimeState - { - public PolicyRuntimeState(string? initialSeverity) - { - Severity = initialSeverity; - } - - public string? Status { get; set; } - - public string? Severity { get; set; } - - public Dictionary<string, string> Annotations { get; } = new(StringComparer.OrdinalIgnoreCase); - - public List<string> Warnings { get; } = new(); - } - - private static PolicyEvaluationResult ApplyExceptions(PolicyEvaluationRequest request, PolicyEvaluationResult baseResult) - { - var exceptions = request.Context.Exceptions; - if (exceptions.IsEmpty) - { - return baseResult; - } - - PolicyEvaluationExceptionInstance? winningInstance = null; - PolicyExceptionEffect? winningEffect = null; - var winningScore = -1; - - foreach (var instance in exceptions.Instances) - { - if (!exceptions.Effects.TryGetValue(instance.EffectId, out var effect)) - { - continue; - } - - if (!MatchesScope(instance.Scope, request, baseResult)) - { - continue; - } - - var specificity = ComputeSpecificity(instance.Scope); - if (specificity < 0) - { - continue; - } - - if (winningInstance is null - || specificity > winningScore - || (specificity == winningScore && instance.CreatedAt > winningInstance.CreatedAt) - || (specificity == winningScore && instance.CreatedAt == winningInstance!.CreatedAt - && string.CompareOrdinal(instance.Id, winningInstance.Id) < 0)) - { - winningInstance = instance; - winningEffect = effect; - winningScore = specificity; - } - } - - if (winningInstance is null || winningEffect is null) - { - return baseResult; - } - - return ApplyExceptionEffect(baseResult, winningInstance, winningEffect); - } - - private static bool MatchesScope( - PolicyEvaluationExceptionScope scope, - PolicyEvaluationRequest request, - PolicyEvaluationResult baseResult) - { - if (scope.RuleNames.Count > 0) - { - if (string.IsNullOrEmpty(baseResult.RuleName) - || !scope.RuleNames.Contains(baseResult.RuleName)) - { - return false; - } - } - - if (scope.Severities.Count > 0) - { - var severity = request.Context.Severity.Normalized; - if (string.IsNullOrEmpty(severity) - || !scope.Severities.Contains(severity)) - { - return false; - } - } - - if (scope.Sources.Count > 0) - { - var source = request.Context.Advisory.Source; - if (string.IsNullOrEmpty(source) - || !scope.Sources.Contains(source)) - { - return false; - } - } - - if (scope.Tags.Count > 0) - { - var sbom = request.Context.Sbom; - var hasMatch = scope.Tags.Any(sbom.HasTag); - if (!hasMatch) - { - return false; - } - } - - return true; - } - - private static int ComputeSpecificity(PolicyEvaluationExceptionScope scope) - { - var score = 0; - - if (scope.RuleNames.Count > 0) - { - score += 1_000 + scope.RuleNames.Count * 25; - } - - if (scope.Severities.Count > 0) - { - score += 500 + scope.Severities.Count * 10; - } - - if (scope.Sources.Count > 0) - { - score += 250 + scope.Sources.Count * 10; - } - - if (scope.Tags.Count > 0) - { - score += 100 + scope.Tags.Count * 5; - } - - return score; - } - - private static PolicyEvaluationResult ApplyExceptionEffect( - PolicyEvaluationResult baseResult, - PolicyEvaluationExceptionInstance instance, - PolicyExceptionEffect effect) - { - var annotationsBuilder = baseResult.Annotations.ToBuilder(); - annotationsBuilder["exception.id"] = instance.Id; - annotationsBuilder["exception.effectId"] = effect.Id; - annotationsBuilder["exception.effectType"] = effect.Effect.ToString(); - - if (!string.IsNullOrWhiteSpace(effect.Name)) - { - annotationsBuilder["exception.effectName"] = effect.Name!; - } - - if (!string.IsNullOrWhiteSpace(effect.RoutingTemplate)) - { - annotationsBuilder["exception.routingTemplate"] = effect.RoutingTemplate!; - } - - if (effect.MaxDurationDays is int durationDays) - { - annotationsBuilder["exception.maxDurationDays"] = durationDays.ToString(CultureInfo.InvariantCulture); - } - - foreach (var pair in instance.Metadata) - { - annotationsBuilder[$"exception.meta.{pair.Key}"] = pair.Value; - } - - var metadataBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.OrdinalIgnoreCase); - if (!string.IsNullOrWhiteSpace(effect.RoutingTemplate)) - { - metadataBuilder["routingTemplate"] = effect.RoutingTemplate!; - } - - if (effect.MaxDurationDays is int metadataDuration) - { - metadataBuilder["maxDurationDays"] = metadataDuration.ToString(CultureInfo.InvariantCulture); - } - - if (!string.IsNullOrWhiteSpace(effect.RequiredControlId)) - { - metadataBuilder["requiredControlId"] = effect.RequiredControlId!; - } - - if (!string.IsNullOrWhiteSpace(effect.Name)) - { - metadataBuilder["effectName"] = effect.Name!; - } - - foreach (var pair in instance.Metadata) - { - metadataBuilder[pair.Key] = pair.Value; - } - - var newStatus = baseResult.Status; - var newSeverity = baseResult.Severity; - var warnings = baseResult.Warnings; - - switch (effect.Effect) - { - case PolicyExceptionEffectType.Suppress: - newStatus = "suppressed"; - annotationsBuilder["exception.status"] = newStatus; - break; - case PolicyExceptionEffectType.Defer: - newStatus = "deferred"; - annotationsBuilder["exception.status"] = newStatus; - break; - case PolicyExceptionEffectType.Downgrade: - if (effect.DowngradeSeverity is { } downgradeSeverity) - { - newSeverity = downgradeSeverity.ToString(); - annotationsBuilder["exception.severity"] = newSeverity!; - } - break; - case PolicyExceptionEffectType.RequireControl: - if (!string.IsNullOrWhiteSpace(effect.RequiredControlId)) - { - annotationsBuilder["exception.requiredControl"] = effect.RequiredControlId!; - warnings = warnings.Add($"Exception '{instance.Id}' requires control '{effect.RequiredControlId}'."); - } - break; - } - - var application = new PolicyExceptionApplication( - ExceptionId: instance.Id, - EffectId: instance.EffectId, - EffectType: effect.Effect, - OriginalStatus: baseResult.Status, - OriginalSeverity: baseResult.Severity, - AppliedStatus: newStatus, - AppliedSeverity: newSeverity, - Metadata: metadataBuilder.ToImmutable()); - - return baseResult with - { - Status = newStatus, - Severity = newSeverity, - Annotations = annotationsBuilder.ToImmutable(), - Warnings = warnings, - AppliedException = application, - }; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Globalization; +using System.Linq; +using StellaOps.Policy; +using StellaOps.Policy.Engine.Compilation; + +namespace StellaOps.Policy.Engine.Evaluation; + +/// <summary> +/// Deterministically evaluates compiled policy IR against advisory/VEX/SBOM inputs. +/// </summary> +internal sealed class PolicyEvaluator +{ + public PolicyEvaluationResult Evaluate(PolicyEvaluationRequest request) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + if (request.Document is null) + { + throw new ArgumentNullException(nameof(request.Document)); + } + + var evaluator = new PolicyExpressionEvaluator(request.Context); + var orderedRules = request.Document.Rules + .Select(static (rule, index) => new { rule, index }) + .OrderBy(x => x.rule.Priority) + .ThenBy(x => x.index) + .ToImmutableArray(); + + foreach (var entry in orderedRules) + { + var rule = entry.rule; + if (!evaluator.EvaluateBoolean(rule.When)) + { + continue; + } + + var runtime = new PolicyRuntimeState(request.Context.Severity.Normalized); + foreach (var action in rule.ThenActions) + { + ApplyAction(rule.Name, action, evaluator, runtime); + } + + if (runtime.Status is null) + { + runtime.Status = "affected"; + } + + var baseResult = new PolicyEvaluationResult( + Matched: true, + Status: runtime.Status, + Severity: runtime.Severity, + RuleName: rule.Name, + Priority: rule.Priority, + Annotations: runtime.Annotations.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase), + Warnings: runtime.Warnings.ToImmutableArray(), + AppliedException: null); + + return ApplyExceptions(request, baseResult); + } + + var defaultResult = PolicyEvaluationResult.CreateDefault(request.Context.Severity.Normalized); + return ApplyExceptions(request, defaultResult); + } + + private static void ApplyAction( + string ruleName, + PolicyIrAction action, + PolicyExpressionEvaluator evaluator, + PolicyRuntimeState runtime) + { + switch (action) + { + case PolicyIrAssignmentAction assign: + ApplyAssignment(assign, evaluator, runtime); + break; + case PolicyIrAnnotateAction annotate: + ApplyAnnotate(annotate, evaluator, runtime); + break; + case PolicyIrWarnAction warn: + ApplyWarn(warn, evaluator, runtime); + break; + case PolicyIrEscalateAction escalate: + ApplyEscalate(escalate, evaluator, runtime); + break; + case PolicyIrRequireVexAction require: + var allSatisfied = true; + foreach (var condition in require.Conditions.Values) + { + if (!evaluator.EvaluateBoolean(condition)) + { + allSatisfied = false; + break; + } + } + + runtime.Status ??= allSatisfied ? "affected" : "suppressed"; + break; + case PolicyIrIgnoreAction ignore: + runtime.Status = "ignored"; + break; + case PolicyIrDeferAction defer: + runtime.Status = "deferred"; + break; + default: + runtime.Warnings.Add($"Unhandled action '{action.GetType().Name}' in rule '{ruleName}'."); + break; + } + } + + private static void ApplyAssignment(PolicyIrAssignmentAction assign, PolicyExpressionEvaluator evaluator, PolicyRuntimeState runtime) + { + var value = evaluator.Evaluate(assign.Value); + var stringValue = value.AsString(); + if (assign.Target.Length == 0) + { + return; + } + + var target = assign.Target[0]; + switch (target) + { + case "status": + runtime.Status = stringValue ?? runtime.Status ?? "affected"; + break; + case "severity": + runtime.Severity = stringValue; + break; + default: + runtime.Annotations[target] = stringValue ?? value.Raw?.ToString() ?? string.Empty; + break; + } + } + + private static void ApplyAnnotate(PolicyIrAnnotateAction annotate, PolicyExpressionEvaluator evaluator, PolicyRuntimeState runtime) + { + var key = annotate.Target.Length > 0 ? annotate.Target[^1] : "annotation"; + var value = evaluator.Evaluate(annotate.Value).AsString() ?? string.Empty; + runtime.Annotations[key] = value; + } + + private static void ApplyWarn(PolicyIrWarnAction warn, PolicyExpressionEvaluator evaluator, PolicyRuntimeState runtime) + { + var message = warn.Message is null ? "" : evaluator.Evaluate(warn.Message).AsString(); + if (!string.IsNullOrWhiteSpace(message)) + { + runtime.Warnings.Add(message!); + } + else + { + runtime.Warnings.Add("Policy rule emitted a warning."); + } + + runtime.Status ??= "warned"; + } + + private static void ApplyEscalate(PolicyIrEscalateAction escalate, PolicyExpressionEvaluator evaluator, PolicyRuntimeState runtime) + { + if (escalate.To is not null) + { + runtime.Severity = evaluator.Evaluate(escalate.To).AsString() ?? runtime.Severity; + } + + if (escalate.When is not null && !evaluator.EvaluateBoolean(escalate.When)) + { + return; + } + } + + private sealed class PolicyRuntimeState + { + public PolicyRuntimeState(string? initialSeverity) + { + Severity = initialSeverity; + } + + public string? Status { get; set; } + + public string? Severity { get; set; } + + public Dictionary<string, string> Annotations { get; } = new(StringComparer.OrdinalIgnoreCase); + + public List<string> Warnings { get; } = new(); + } + + private static PolicyEvaluationResult ApplyExceptions(PolicyEvaluationRequest request, PolicyEvaluationResult baseResult) + { + var exceptions = request.Context.Exceptions; + if (exceptions.IsEmpty) + { + return baseResult; + } + + PolicyEvaluationExceptionInstance? winningInstance = null; + PolicyExceptionEffect? winningEffect = null; + var winningScore = -1; + + foreach (var instance in exceptions.Instances) + { + if (!exceptions.Effects.TryGetValue(instance.EffectId, out var effect)) + { + continue; + } + + if (!MatchesScope(instance.Scope, request, baseResult)) + { + continue; + } + + var specificity = ComputeSpecificity(instance.Scope); + if (specificity < 0) + { + continue; + } + + if (winningInstance is null + || specificity > winningScore + || (specificity == winningScore && instance.CreatedAt > winningInstance.CreatedAt) + || (specificity == winningScore && instance.CreatedAt == winningInstance!.CreatedAt + && string.CompareOrdinal(instance.Id, winningInstance.Id) < 0)) + { + winningInstance = instance; + winningEffect = effect; + winningScore = specificity; + } + } + + if (winningInstance is null || winningEffect is null) + { + return baseResult; + } + + return ApplyExceptionEffect(baseResult, winningInstance, winningEffect); + } + + private static bool MatchesScope( + PolicyEvaluationExceptionScope scope, + PolicyEvaluationRequest request, + PolicyEvaluationResult baseResult) + { + if (scope.RuleNames.Count > 0) + { + if (string.IsNullOrEmpty(baseResult.RuleName) + || !scope.RuleNames.Contains(baseResult.RuleName)) + { + return false; + } + } + + if (scope.Severities.Count > 0) + { + var severity = request.Context.Severity.Normalized; + if (string.IsNullOrEmpty(severity) + || !scope.Severities.Contains(severity)) + { + return false; + } + } + + if (scope.Sources.Count > 0) + { + var source = request.Context.Advisory.Source; + if (string.IsNullOrEmpty(source) + || !scope.Sources.Contains(source)) + { + return false; + } + } + + if (scope.Tags.Count > 0) + { + var sbom = request.Context.Sbom; + var hasMatch = scope.Tags.Any(sbom.HasTag); + if (!hasMatch) + { + return false; + } + } + + return true; + } + + private static int ComputeSpecificity(PolicyEvaluationExceptionScope scope) + { + var score = 0; + + if (scope.RuleNames.Count > 0) + { + score += 1_000 + scope.RuleNames.Count * 25; + } + + if (scope.Severities.Count > 0) + { + score += 500 + scope.Severities.Count * 10; + } + + if (scope.Sources.Count > 0) + { + score += 250 + scope.Sources.Count * 10; + } + + if (scope.Tags.Count > 0) + { + score += 100 + scope.Tags.Count * 5; + } + + return score; + } + + private static PolicyEvaluationResult ApplyExceptionEffect( + PolicyEvaluationResult baseResult, + PolicyEvaluationExceptionInstance instance, + PolicyExceptionEffect effect) + { + var annotationsBuilder = baseResult.Annotations.ToBuilder(); + annotationsBuilder["exception.id"] = instance.Id; + annotationsBuilder["exception.effectId"] = effect.Id; + annotationsBuilder["exception.effectType"] = effect.Effect.ToString(); + + if (!string.IsNullOrWhiteSpace(effect.Name)) + { + annotationsBuilder["exception.effectName"] = effect.Name!; + } + + if (!string.IsNullOrWhiteSpace(effect.RoutingTemplate)) + { + annotationsBuilder["exception.routingTemplate"] = effect.RoutingTemplate!; + } + + if (effect.MaxDurationDays is int durationDays) + { + annotationsBuilder["exception.maxDurationDays"] = durationDays.ToString(CultureInfo.InvariantCulture); + } + + foreach (var pair in instance.Metadata) + { + annotationsBuilder[$"exception.meta.{pair.Key}"] = pair.Value; + } + + var metadataBuilder = ImmutableDictionary.CreateBuilder<string, string>(StringComparer.OrdinalIgnoreCase); + if (!string.IsNullOrWhiteSpace(effect.RoutingTemplate)) + { + metadataBuilder["routingTemplate"] = effect.RoutingTemplate!; + } + + if (effect.MaxDurationDays is int metadataDuration) + { + metadataBuilder["maxDurationDays"] = metadataDuration.ToString(CultureInfo.InvariantCulture); + } + + if (!string.IsNullOrWhiteSpace(effect.RequiredControlId)) + { + metadataBuilder["requiredControlId"] = effect.RequiredControlId!; + } + + if (!string.IsNullOrWhiteSpace(effect.Name)) + { + metadataBuilder["effectName"] = effect.Name!; + } + + foreach (var pair in instance.Metadata) + { + metadataBuilder[pair.Key] = pair.Value; + } + + var newStatus = baseResult.Status; + var newSeverity = baseResult.Severity; + var warnings = baseResult.Warnings; + + switch (effect.Effect) + { + case PolicyExceptionEffectType.Suppress: + newStatus = "suppressed"; + annotationsBuilder["exception.status"] = newStatus; + break; + case PolicyExceptionEffectType.Defer: + newStatus = "deferred"; + annotationsBuilder["exception.status"] = newStatus; + break; + case PolicyExceptionEffectType.Downgrade: + if (effect.DowngradeSeverity is { } downgradeSeverity) + { + newSeverity = downgradeSeverity.ToString(); + annotationsBuilder["exception.severity"] = newSeverity!; + } + break; + case PolicyExceptionEffectType.RequireControl: + if (!string.IsNullOrWhiteSpace(effect.RequiredControlId)) + { + annotationsBuilder["exception.requiredControl"] = effect.RequiredControlId!; + warnings = warnings.Add($"Exception '{instance.Id}' requires control '{effect.RequiredControlId}'."); + } + break; + } + + var application = new PolicyExceptionApplication( + ExceptionId: instance.Id, + EffectId: instance.EffectId, + EffectType: effect.Effect, + OriginalStatus: baseResult.Status, + OriginalSeverity: baseResult.Severity, + AppliedStatus: newStatus, + AppliedSeverity: newSeverity, + Metadata: metadataBuilder.ToImmutable()); + + return baseResult with + { + Status = newStatus, + Severity = newSeverity, + Annotations = annotationsBuilder.ToImmutable(), + Warnings = warnings, + AppliedException = application, + }; + } +} diff --git a/src/StellaOps.Policy.Engine/Evaluation/PolicyExpressionEvaluator.cs b/src/Policy/StellaOps.Policy.Engine/Evaluation/PolicyExpressionEvaluator.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Evaluation/PolicyExpressionEvaluator.cs rename to src/Policy/StellaOps.Policy.Engine/Evaluation/PolicyExpressionEvaluator.cs index 1a1b7421..655862b4 100644 --- a/src/StellaOps.Policy.Engine/Evaluation/PolicyExpressionEvaluator.cs +++ b/src/Policy/StellaOps.Policy.Engine/Evaluation/PolicyExpressionEvaluator.cs @@ -1,509 +1,509 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Globalization; -using System.Linq; -using StellaOps.Policy.Engine.Compilation; - -namespace StellaOps.Policy.Engine.Evaluation; - -internal sealed class PolicyExpressionEvaluator -{ - private static readonly IReadOnlyDictionary<string, decimal> SeverityOrder = new Dictionary<string, decimal>(StringComparer.OrdinalIgnoreCase) - { - ["critical"] = 5m, - ["high"] = 4m, - ["medium"] = 3m, - ["moderate"] = 3m, - ["low"] = 2m, - ["informational"] = 1m, - ["info"] = 1m, - ["none"] = 0m, - ["unknown"] = -1m, - }; - - private readonly PolicyEvaluationContext context; - - public PolicyExpressionEvaluator(PolicyEvaluationContext context) - { - this.context = context ?? throw new ArgumentNullException(nameof(context)); - } - - public EvaluationValue Evaluate(PolicyExpression expression, EvaluationScope? scope = null) - { - scope ??= EvaluationScope.Root(context); - return expression switch - { - PolicyLiteralExpression literal => new EvaluationValue(literal.Value), - PolicyListExpression list => new EvaluationValue(list.Items.Select(item => Evaluate(item, scope).Raw).ToImmutableArray()), - PolicyIdentifierExpression identifier => ResolveIdentifier(identifier.Name, scope), - PolicyMemberAccessExpression member => EvaluateMember(member, scope), - PolicyInvocationExpression invocation => EvaluateInvocation(invocation, scope), - PolicyIndexerExpression indexer => EvaluateIndexer(indexer, scope), - PolicyUnaryExpression unary => EvaluateUnary(unary, scope), - PolicyBinaryExpression binary => EvaluateBinary(binary, scope), - _ => EvaluationValue.Null, - }; - } - - public bool EvaluateBoolean(PolicyExpression expression, EvaluationScope? scope = null) => - Evaluate(expression, scope).AsBoolean(); - - private EvaluationValue ResolveIdentifier(string name, EvaluationScope scope) - { - if (scope.TryGetLocal(name, out var local)) - { - return new EvaluationValue(local); - } - - return name switch - { - "severity" => new EvaluationValue(new SeverityScope(context.Severity)), - "env" => new EvaluationValue(new EnvironmentScope(context.Environment)), - "vex" => new EvaluationValue(new VexScope(this, context.Vex)), - "advisory" => new EvaluationValue(new AdvisoryScope(context.Advisory)), - "sbom" => new EvaluationValue(new SbomScope(context.Sbom)), - "true" => EvaluationValue.True, - "false" => EvaluationValue.False, - _ => EvaluationValue.Null, - }; - } - - private EvaluationValue EvaluateMember(PolicyMemberAccessExpression member, EvaluationScope scope) - { - var target = Evaluate(member.Target, scope); - var raw = target.Raw; - if (raw is SeverityScope severity) - { - return severity.Get(member.Member); - } - - if (raw is EnvironmentScope env) - { - return env.Get(member.Member); - } - - if (raw is VexScope vex) - { - return vex.Get(member.Member); - } - - if (raw is AdvisoryScope advisory) - { - return advisory.Get(member.Member); - } - - if (raw is SbomScope sbom) - { - return sbom.Get(member.Member); - } - - if (raw is ImmutableDictionary<string, object?> dict && dict.TryGetValue(member.Member, out var value)) - { - return new EvaluationValue(value); - } - - if (raw is PolicyEvaluationVexStatement stmt) - { - return member.Member switch - { - "status" => new EvaluationValue(stmt.Status), - "justification" => new EvaluationValue(stmt.Justification), - "statementId" => new EvaluationValue(stmt.StatementId), - _ => EvaluationValue.Null, - }; - } - - return EvaluationValue.Null; - } - - private EvaluationValue EvaluateInvocation(PolicyInvocationExpression invocation, EvaluationScope scope) - { - if (invocation.Target is PolicyIdentifierExpression identifier) - { - switch (identifier.Name) - { - case "severity_band": - var arg = invocation.Arguments.Length > 0 ? Evaluate(invocation.Arguments[0], scope).AsString() : null; - return new EvaluationValue(arg ?? string.Empty); - } - } - - if (invocation.Target is PolicyMemberAccessExpression member && member.Target is PolicyIdentifierExpression root) - { - if (root.Name == "vex") - { - var vex = Evaluate(member.Target, scope); - if (vex.Raw is VexScope vexScope) - { - return member.Member switch - { - "any" => new EvaluationValue(vexScope.Any(invocation.Arguments, scope)), - "latest" => new EvaluationValue(vexScope.Latest()), - _ => EvaluationValue.Null, - }; - } - } - - if (root.Name == "sbom") - { - var sbom = Evaluate(member.Target, scope); - if (sbom.Raw is SbomScope sbomScope) - { - return member.Member switch - { - "has_tag" => sbomScope.HasTag(invocation.Arguments, scope, this), - _ => EvaluationValue.Null, - }; - } - } - - if (root.Name == "advisory") - { - var advisory = Evaluate(member.Target, scope); - if (advisory.Raw is AdvisoryScope advisoryScope) - { - return advisoryScope.Invoke(member.Member, invocation.Arguments, scope, this); - } - } - } - - return EvaluationValue.Null; - } - - private EvaluationValue EvaluateIndexer(PolicyIndexerExpression indexer, EvaluationScope scope) - { - var target = Evaluate(indexer.Target, scope).Raw; - var index = Evaluate(indexer.Index, scope).Raw; - - if (target is ImmutableArray<object?> array && index is int i && i >= 0 && i < array.Length) - { - return new EvaluationValue(array[i]); - } - - return EvaluationValue.Null; - } - - private EvaluationValue EvaluateUnary(PolicyUnaryExpression unary, EvaluationScope scope) - { - var operand = Evaluate(unary.Operand, scope); - return unary.Operator switch - { - PolicyUnaryOperator.Not => new EvaluationValue(!operand.AsBoolean()), - _ => EvaluationValue.Null, - }; - } - - private EvaluationValue EvaluateBinary(PolicyBinaryExpression binary, EvaluationScope scope) - { - return binary.Operator switch - { - PolicyBinaryOperator.And => new EvaluationValue(EvaluateBoolean(binary.Left, scope) && EvaluateBoolean(binary.Right, scope)), - PolicyBinaryOperator.Or => new EvaluationValue(EvaluateBoolean(binary.Left, scope) || EvaluateBoolean(binary.Right, scope)), - PolicyBinaryOperator.Equal => Compare(binary.Left, binary.Right, scope, static (a, b) => Equals(a, b)), - PolicyBinaryOperator.NotEqual => Compare(binary.Left, binary.Right, scope, static (a, b) => !Equals(a, b)), - PolicyBinaryOperator.In => Contains(binary.Left, binary.Right, scope), - PolicyBinaryOperator.NotIn => new EvaluationValue(!Contains(binary.Left, binary.Right, scope).AsBoolean()), - PolicyBinaryOperator.LessThan => CompareNumeric(binary.Left, binary.Right, scope, static (a, b) => a < b), - PolicyBinaryOperator.LessThanOrEqual => CompareNumeric(binary.Left, binary.Right, scope, static (a, b) => a <= b), - PolicyBinaryOperator.GreaterThan => CompareNumeric(binary.Left, binary.Right, scope, static (a, b) => a > b), - PolicyBinaryOperator.GreaterThanOrEqual => CompareNumeric(binary.Left, binary.Right, scope, static (a, b) => a >= b), - _ => EvaluationValue.Null, - }; - } - - private EvaluationValue Compare(PolicyExpression left, PolicyExpression right, EvaluationScope scope, Func<object?, object?, bool> comparer) - { - var leftValue = Evaluate(left, scope).Raw; - var rightValue = Evaluate(right, scope).Raw; - return new EvaluationValue(comparer(leftValue, rightValue)); - } - - private EvaluationValue CompareNumeric(PolicyExpression left, PolicyExpression right, EvaluationScope scope, Func<decimal, decimal, bool> comparer) - { - var leftValue = Evaluate(left, scope); - var rightValue = Evaluate(right, scope); - - if (!TryGetComparableNumber(leftValue, out var leftNumber) - || !TryGetComparableNumber(rightValue, out var rightNumber)) - { - return EvaluationValue.False; - } - - return new EvaluationValue(comparer(leftNumber, rightNumber)); - } - - private static bool TryGetComparableNumber(EvaluationValue value, out decimal number) - { - var numeric = value.AsDecimal(); - if (numeric.HasValue) - { - number = numeric.Value; - return true; - } - - if (value.Raw is string text && SeverityOrder.TryGetValue(text.Trim(), out var mapped)) - { - number = mapped; - return true; - } - - number = 0m; - return false; - } - - private EvaluationValue Contains(PolicyExpression needleExpr, PolicyExpression haystackExpr, EvaluationScope scope) - { - var needle = Evaluate(needleExpr, scope).Raw; - var haystack = Evaluate(haystackExpr, scope).Raw; - - if (haystack is ImmutableArray<object?> array) - { - return new EvaluationValue(array.Any(item => Equals(item, needle))); - } - - if (haystack is string str && needle is string needleString) - { - return new EvaluationValue(str.Contains(needleString, StringComparison.OrdinalIgnoreCase)); - } - - return new EvaluationValue(false); - } - - internal readonly struct EvaluationValue - { - public static readonly EvaluationValue Null = new(null); - public static readonly EvaluationValue True = new(true); - public static readonly EvaluationValue False = new(false); - - public EvaluationValue(object? raw) - { - Raw = raw; - } - - public object? Raw { get; } - - public bool AsBoolean() - { - return Raw switch - { - bool b => b, - string s => !string.IsNullOrWhiteSpace(s), - ImmutableArray<object?> array => !array.IsDefaultOrEmpty, - null => false, - _ => true, - }; - } - - public string? AsString() - { - return Raw switch - { - null => null, - string s => s, - decimal dec => dec.ToString("G", CultureInfo.InvariantCulture), - double d => d.ToString("G", CultureInfo.InvariantCulture), - int i => i.ToString(CultureInfo.InvariantCulture), - _ => Raw.ToString(), - }; - } - - public decimal? AsDecimal() - { - return Raw switch - { - decimal dec => dec, - double dbl => (decimal)dbl, - float fl => (decimal)fl, - int i => i, - long l => l, - string s when decimal.TryParse(s, NumberStyles.Any, CultureInfo.InvariantCulture, out var value) => value, - _ => null, - }; - } - } - - internal sealed class EvaluationScope - { - private readonly IReadOnlyDictionary<string, object?> locals; - - private EvaluationScope(IReadOnlyDictionary<string, object?> locals, PolicyEvaluationContext globals) - { - this.locals = locals; - Globals = globals; - } - - public static EvaluationScope Root(PolicyEvaluationContext globals) => - new EvaluationScope(new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase), globals); - - public static EvaluationScope FromLocals(PolicyEvaluationContext globals, IReadOnlyDictionary<string, object?> locals) => - new EvaluationScope(locals, globals); - - public bool TryGetLocal(string name, out object? value) - { - if (locals.TryGetValue(name, out value)) - { - return true; - } - - value = null; - return false; - } - - public PolicyEvaluationContext Globals { get; } - } - - private sealed class SeverityScope - { - private readonly PolicyEvaluationSeverity severity; - - public SeverityScope(PolicyEvaluationSeverity severity) - { - this.severity = severity; - } - - public EvaluationValue Get(string member) => member switch - { - "normalized" => new EvaluationValue(severity.Normalized), - "score" => new EvaluationValue(severity.Score), - _ => EvaluationValue.Null, - }; - } - - private sealed class EnvironmentScope - { - private readonly PolicyEvaluationEnvironment environment; - - public EnvironmentScope(PolicyEvaluationEnvironment environment) - { - this.environment = environment; - } - - public EvaluationValue Get(string member) - { - var value = environment.Get(member) - ?? environment.Get(member.ToLowerInvariant()); - return new EvaluationValue(value); - } - } - - private sealed class AdvisoryScope - { - private readonly PolicyEvaluationAdvisory advisory; - - public AdvisoryScope(PolicyEvaluationAdvisory advisory) - { - this.advisory = advisory; - } - - public EvaluationValue Get(string member) => member switch - { - "source" => new EvaluationValue(advisory.Source), - _ => advisory.Metadata.TryGetValue(member, out var value) ? new EvaluationValue(value) : EvaluationValue.Null, - }; - - public EvaluationValue Invoke(string member, ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator) - { - if (member.Equals("has_metadata", StringComparison.OrdinalIgnoreCase)) - { - var key = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null; - if (string.IsNullOrEmpty(key)) - { - return EvaluationValue.False; - } - - return new EvaluationValue(advisory.Metadata.ContainsKey(key!)); - } - - return EvaluationValue.Null; - } - } - - private sealed class SbomScope - { - private readonly PolicyEvaluationSbom sbom; - - public SbomScope(PolicyEvaluationSbom sbom) - { - this.sbom = sbom; - } - - public EvaluationValue Get(string member) - { - if (member.Equals("tags", StringComparison.OrdinalIgnoreCase)) - { - return new EvaluationValue(sbom.Tags.ToImmutableArray<object?>()); - } - - return EvaluationValue.Null; - } - - public EvaluationValue HasTag(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator) - { - var tag = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null; - if (string.IsNullOrWhiteSpace(tag)) - { - return EvaluationValue.False; - } - - return new EvaluationValue(sbom.HasTag(tag!)); - } - } - - private sealed class VexScope - { - private readonly PolicyExpressionEvaluator evaluator; - private readonly PolicyEvaluationVexEvidence vex; - - public VexScope(PolicyExpressionEvaluator evaluator, PolicyEvaluationVexEvidence vex) - { - this.evaluator = evaluator; - this.vex = vex; - } - - public EvaluationValue Get(string member) => member switch - { - "status" => new EvaluationValue(vex.Statements.IsDefaultOrEmpty ? null : vex.Statements[0].Status), - "justification" => new EvaluationValue(vex.Statements.IsDefaultOrEmpty ? null : vex.Statements[0].Justification), - _ => EvaluationValue.Null, - }; - - public bool Any(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope) - { - if (arguments.Length == 0 || vex.Statements.IsDefaultOrEmpty) - { - return false; - } - - var predicate = arguments[0]; - foreach (var statement in vex.Statements) - { - var locals = new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase) - { - ["status"] = statement.Status, - ["justification"] = statement.Justification, - ["statement"] = statement, - ["statementId"] = statement.StatementId, - }; - - var nestedScope = EvaluationScope.FromLocals(scope.Globals, locals); - if (evaluator.EvaluateBoolean(predicate, nestedScope)) - { - return true; - } - } - - return false; - } - - public PolicyEvaluationVexStatement? Latest() - { - if (vex.Statements.IsDefaultOrEmpty) - { - return null; - } - - return vex.Statements[^1]; - } - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Globalization; +using System.Linq; +using StellaOps.Policy.Engine.Compilation; + +namespace StellaOps.Policy.Engine.Evaluation; + +internal sealed class PolicyExpressionEvaluator +{ + private static readonly IReadOnlyDictionary<string, decimal> SeverityOrder = new Dictionary<string, decimal>(StringComparer.OrdinalIgnoreCase) + { + ["critical"] = 5m, + ["high"] = 4m, + ["medium"] = 3m, + ["moderate"] = 3m, + ["low"] = 2m, + ["informational"] = 1m, + ["info"] = 1m, + ["none"] = 0m, + ["unknown"] = -1m, + }; + + private readonly PolicyEvaluationContext context; + + public PolicyExpressionEvaluator(PolicyEvaluationContext context) + { + this.context = context ?? throw new ArgumentNullException(nameof(context)); + } + + public EvaluationValue Evaluate(PolicyExpression expression, EvaluationScope? scope = null) + { + scope ??= EvaluationScope.Root(context); + return expression switch + { + PolicyLiteralExpression literal => new EvaluationValue(literal.Value), + PolicyListExpression list => new EvaluationValue(list.Items.Select(item => Evaluate(item, scope).Raw).ToImmutableArray()), + PolicyIdentifierExpression identifier => ResolveIdentifier(identifier.Name, scope), + PolicyMemberAccessExpression member => EvaluateMember(member, scope), + PolicyInvocationExpression invocation => EvaluateInvocation(invocation, scope), + PolicyIndexerExpression indexer => EvaluateIndexer(indexer, scope), + PolicyUnaryExpression unary => EvaluateUnary(unary, scope), + PolicyBinaryExpression binary => EvaluateBinary(binary, scope), + _ => EvaluationValue.Null, + }; + } + + public bool EvaluateBoolean(PolicyExpression expression, EvaluationScope? scope = null) => + Evaluate(expression, scope).AsBoolean(); + + private EvaluationValue ResolveIdentifier(string name, EvaluationScope scope) + { + if (scope.TryGetLocal(name, out var local)) + { + return new EvaluationValue(local); + } + + return name switch + { + "severity" => new EvaluationValue(new SeverityScope(context.Severity)), + "env" => new EvaluationValue(new EnvironmentScope(context.Environment)), + "vex" => new EvaluationValue(new VexScope(this, context.Vex)), + "advisory" => new EvaluationValue(new AdvisoryScope(context.Advisory)), + "sbom" => new EvaluationValue(new SbomScope(context.Sbom)), + "true" => EvaluationValue.True, + "false" => EvaluationValue.False, + _ => EvaluationValue.Null, + }; + } + + private EvaluationValue EvaluateMember(PolicyMemberAccessExpression member, EvaluationScope scope) + { + var target = Evaluate(member.Target, scope); + var raw = target.Raw; + if (raw is SeverityScope severity) + { + return severity.Get(member.Member); + } + + if (raw is EnvironmentScope env) + { + return env.Get(member.Member); + } + + if (raw is VexScope vex) + { + return vex.Get(member.Member); + } + + if (raw is AdvisoryScope advisory) + { + return advisory.Get(member.Member); + } + + if (raw is SbomScope sbom) + { + return sbom.Get(member.Member); + } + + if (raw is ImmutableDictionary<string, object?> dict && dict.TryGetValue(member.Member, out var value)) + { + return new EvaluationValue(value); + } + + if (raw is PolicyEvaluationVexStatement stmt) + { + return member.Member switch + { + "status" => new EvaluationValue(stmt.Status), + "justification" => new EvaluationValue(stmt.Justification), + "statementId" => new EvaluationValue(stmt.StatementId), + _ => EvaluationValue.Null, + }; + } + + return EvaluationValue.Null; + } + + private EvaluationValue EvaluateInvocation(PolicyInvocationExpression invocation, EvaluationScope scope) + { + if (invocation.Target is PolicyIdentifierExpression identifier) + { + switch (identifier.Name) + { + case "severity_band": + var arg = invocation.Arguments.Length > 0 ? Evaluate(invocation.Arguments[0], scope).AsString() : null; + return new EvaluationValue(arg ?? string.Empty); + } + } + + if (invocation.Target is PolicyMemberAccessExpression member && member.Target is PolicyIdentifierExpression root) + { + if (root.Name == "vex") + { + var vex = Evaluate(member.Target, scope); + if (vex.Raw is VexScope vexScope) + { + return member.Member switch + { + "any" => new EvaluationValue(vexScope.Any(invocation.Arguments, scope)), + "latest" => new EvaluationValue(vexScope.Latest()), + _ => EvaluationValue.Null, + }; + } + } + + if (root.Name == "sbom") + { + var sbom = Evaluate(member.Target, scope); + if (sbom.Raw is SbomScope sbomScope) + { + return member.Member switch + { + "has_tag" => sbomScope.HasTag(invocation.Arguments, scope, this), + _ => EvaluationValue.Null, + }; + } + } + + if (root.Name == "advisory") + { + var advisory = Evaluate(member.Target, scope); + if (advisory.Raw is AdvisoryScope advisoryScope) + { + return advisoryScope.Invoke(member.Member, invocation.Arguments, scope, this); + } + } + } + + return EvaluationValue.Null; + } + + private EvaluationValue EvaluateIndexer(PolicyIndexerExpression indexer, EvaluationScope scope) + { + var target = Evaluate(indexer.Target, scope).Raw; + var index = Evaluate(indexer.Index, scope).Raw; + + if (target is ImmutableArray<object?> array && index is int i && i >= 0 && i < array.Length) + { + return new EvaluationValue(array[i]); + } + + return EvaluationValue.Null; + } + + private EvaluationValue EvaluateUnary(PolicyUnaryExpression unary, EvaluationScope scope) + { + var operand = Evaluate(unary.Operand, scope); + return unary.Operator switch + { + PolicyUnaryOperator.Not => new EvaluationValue(!operand.AsBoolean()), + _ => EvaluationValue.Null, + }; + } + + private EvaluationValue EvaluateBinary(PolicyBinaryExpression binary, EvaluationScope scope) + { + return binary.Operator switch + { + PolicyBinaryOperator.And => new EvaluationValue(EvaluateBoolean(binary.Left, scope) && EvaluateBoolean(binary.Right, scope)), + PolicyBinaryOperator.Or => new EvaluationValue(EvaluateBoolean(binary.Left, scope) || EvaluateBoolean(binary.Right, scope)), + PolicyBinaryOperator.Equal => Compare(binary.Left, binary.Right, scope, static (a, b) => Equals(a, b)), + PolicyBinaryOperator.NotEqual => Compare(binary.Left, binary.Right, scope, static (a, b) => !Equals(a, b)), + PolicyBinaryOperator.In => Contains(binary.Left, binary.Right, scope), + PolicyBinaryOperator.NotIn => new EvaluationValue(!Contains(binary.Left, binary.Right, scope).AsBoolean()), + PolicyBinaryOperator.LessThan => CompareNumeric(binary.Left, binary.Right, scope, static (a, b) => a < b), + PolicyBinaryOperator.LessThanOrEqual => CompareNumeric(binary.Left, binary.Right, scope, static (a, b) => a <= b), + PolicyBinaryOperator.GreaterThan => CompareNumeric(binary.Left, binary.Right, scope, static (a, b) => a > b), + PolicyBinaryOperator.GreaterThanOrEqual => CompareNumeric(binary.Left, binary.Right, scope, static (a, b) => a >= b), + _ => EvaluationValue.Null, + }; + } + + private EvaluationValue Compare(PolicyExpression left, PolicyExpression right, EvaluationScope scope, Func<object?, object?, bool> comparer) + { + var leftValue = Evaluate(left, scope).Raw; + var rightValue = Evaluate(right, scope).Raw; + return new EvaluationValue(comparer(leftValue, rightValue)); + } + + private EvaluationValue CompareNumeric(PolicyExpression left, PolicyExpression right, EvaluationScope scope, Func<decimal, decimal, bool> comparer) + { + var leftValue = Evaluate(left, scope); + var rightValue = Evaluate(right, scope); + + if (!TryGetComparableNumber(leftValue, out var leftNumber) + || !TryGetComparableNumber(rightValue, out var rightNumber)) + { + return EvaluationValue.False; + } + + return new EvaluationValue(comparer(leftNumber, rightNumber)); + } + + private static bool TryGetComparableNumber(EvaluationValue value, out decimal number) + { + var numeric = value.AsDecimal(); + if (numeric.HasValue) + { + number = numeric.Value; + return true; + } + + if (value.Raw is string text && SeverityOrder.TryGetValue(text.Trim(), out var mapped)) + { + number = mapped; + return true; + } + + number = 0m; + return false; + } + + private EvaluationValue Contains(PolicyExpression needleExpr, PolicyExpression haystackExpr, EvaluationScope scope) + { + var needle = Evaluate(needleExpr, scope).Raw; + var haystack = Evaluate(haystackExpr, scope).Raw; + + if (haystack is ImmutableArray<object?> array) + { + return new EvaluationValue(array.Any(item => Equals(item, needle))); + } + + if (haystack is string str && needle is string needleString) + { + return new EvaluationValue(str.Contains(needleString, StringComparison.OrdinalIgnoreCase)); + } + + return new EvaluationValue(false); + } + + internal readonly struct EvaluationValue + { + public static readonly EvaluationValue Null = new(null); + public static readonly EvaluationValue True = new(true); + public static readonly EvaluationValue False = new(false); + + public EvaluationValue(object? raw) + { + Raw = raw; + } + + public object? Raw { get; } + + public bool AsBoolean() + { + return Raw switch + { + bool b => b, + string s => !string.IsNullOrWhiteSpace(s), + ImmutableArray<object?> array => !array.IsDefaultOrEmpty, + null => false, + _ => true, + }; + } + + public string? AsString() + { + return Raw switch + { + null => null, + string s => s, + decimal dec => dec.ToString("G", CultureInfo.InvariantCulture), + double d => d.ToString("G", CultureInfo.InvariantCulture), + int i => i.ToString(CultureInfo.InvariantCulture), + _ => Raw.ToString(), + }; + } + + public decimal? AsDecimal() + { + return Raw switch + { + decimal dec => dec, + double dbl => (decimal)dbl, + float fl => (decimal)fl, + int i => i, + long l => l, + string s when decimal.TryParse(s, NumberStyles.Any, CultureInfo.InvariantCulture, out var value) => value, + _ => null, + }; + } + } + + internal sealed class EvaluationScope + { + private readonly IReadOnlyDictionary<string, object?> locals; + + private EvaluationScope(IReadOnlyDictionary<string, object?> locals, PolicyEvaluationContext globals) + { + this.locals = locals; + Globals = globals; + } + + public static EvaluationScope Root(PolicyEvaluationContext globals) => + new EvaluationScope(new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase), globals); + + public static EvaluationScope FromLocals(PolicyEvaluationContext globals, IReadOnlyDictionary<string, object?> locals) => + new EvaluationScope(locals, globals); + + public bool TryGetLocal(string name, out object? value) + { + if (locals.TryGetValue(name, out value)) + { + return true; + } + + value = null; + return false; + } + + public PolicyEvaluationContext Globals { get; } + } + + private sealed class SeverityScope + { + private readonly PolicyEvaluationSeverity severity; + + public SeverityScope(PolicyEvaluationSeverity severity) + { + this.severity = severity; + } + + public EvaluationValue Get(string member) => member switch + { + "normalized" => new EvaluationValue(severity.Normalized), + "score" => new EvaluationValue(severity.Score), + _ => EvaluationValue.Null, + }; + } + + private sealed class EnvironmentScope + { + private readonly PolicyEvaluationEnvironment environment; + + public EnvironmentScope(PolicyEvaluationEnvironment environment) + { + this.environment = environment; + } + + public EvaluationValue Get(string member) + { + var value = environment.Get(member) + ?? environment.Get(member.ToLowerInvariant()); + return new EvaluationValue(value); + } + } + + private sealed class AdvisoryScope + { + private readonly PolicyEvaluationAdvisory advisory; + + public AdvisoryScope(PolicyEvaluationAdvisory advisory) + { + this.advisory = advisory; + } + + public EvaluationValue Get(string member) => member switch + { + "source" => new EvaluationValue(advisory.Source), + _ => advisory.Metadata.TryGetValue(member, out var value) ? new EvaluationValue(value) : EvaluationValue.Null, + }; + + public EvaluationValue Invoke(string member, ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator) + { + if (member.Equals("has_metadata", StringComparison.OrdinalIgnoreCase)) + { + var key = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null; + if (string.IsNullOrEmpty(key)) + { + return EvaluationValue.False; + } + + return new EvaluationValue(advisory.Metadata.ContainsKey(key!)); + } + + return EvaluationValue.Null; + } + } + + private sealed class SbomScope + { + private readonly PolicyEvaluationSbom sbom; + + public SbomScope(PolicyEvaluationSbom sbom) + { + this.sbom = sbom; + } + + public EvaluationValue Get(string member) + { + if (member.Equals("tags", StringComparison.OrdinalIgnoreCase)) + { + return new EvaluationValue(sbom.Tags.ToImmutableArray<object?>()); + } + + return EvaluationValue.Null; + } + + public EvaluationValue HasTag(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope, PolicyExpressionEvaluator evaluator) + { + var tag = arguments.Length > 0 ? evaluator.Evaluate(arguments[0], scope).AsString() : null; + if (string.IsNullOrWhiteSpace(tag)) + { + return EvaluationValue.False; + } + + return new EvaluationValue(sbom.HasTag(tag!)); + } + } + + private sealed class VexScope + { + private readonly PolicyExpressionEvaluator evaluator; + private readonly PolicyEvaluationVexEvidence vex; + + public VexScope(PolicyExpressionEvaluator evaluator, PolicyEvaluationVexEvidence vex) + { + this.evaluator = evaluator; + this.vex = vex; + } + + public EvaluationValue Get(string member) => member switch + { + "status" => new EvaluationValue(vex.Statements.IsDefaultOrEmpty ? null : vex.Statements[0].Status), + "justification" => new EvaluationValue(vex.Statements.IsDefaultOrEmpty ? null : vex.Statements[0].Justification), + _ => EvaluationValue.Null, + }; + + public bool Any(ImmutableArray<PolicyExpression> arguments, EvaluationScope scope) + { + if (arguments.Length == 0 || vex.Statements.IsDefaultOrEmpty) + { + return false; + } + + var predicate = arguments[0]; + foreach (var statement in vex.Statements) + { + var locals = new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase) + { + ["status"] = statement.Status, + ["justification"] = statement.Justification, + ["statement"] = statement, + ["statementId"] = statement.StatementId, + }; + + var nestedScope = EvaluationScope.FromLocals(scope.Globals, locals); + if (evaluator.EvaluateBoolean(predicate, nestedScope)) + { + return true; + } + } + + return false; + } + + public PolicyEvaluationVexStatement? Latest() + { + if (vex.Statements.IsDefaultOrEmpty) + { + return null; + } + + return vex.Statements[^1]; + } + } +} diff --git a/src/StellaOps.Policy.Engine/Hosting/PolicyEngineStartupDiagnostics.cs b/src/Policy/StellaOps.Policy.Engine/Hosting/PolicyEngineStartupDiagnostics.cs similarity index 95% rename from src/StellaOps.Policy.Engine/Hosting/PolicyEngineStartupDiagnostics.cs rename to src/Policy/StellaOps.Policy.Engine/Hosting/PolicyEngineStartupDiagnostics.cs index 4d7ccb23..8b06a385 100644 --- a/src/StellaOps.Policy.Engine/Hosting/PolicyEngineStartupDiagnostics.cs +++ b/src/Policy/StellaOps.Policy.Engine/Hosting/PolicyEngineStartupDiagnostics.cs @@ -1,12 +1,12 @@ -using System.Threading; - -namespace StellaOps.Policy.Engine.Hosting; - -internal sealed class PolicyEngineStartupDiagnostics -{ - private int isReady; - - public bool IsReady => Volatile.Read(ref isReady) == 1; - - public void MarkReady() => Volatile.Write(ref isReady, 1); -} +using System.Threading; + +namespace StellaOps.Policy.Engine.Hosting; + +internal sealed class PolicyEngineStartupDiagnostics +{ + private int isReady; + + public bool IsReady => Volatile.Read(ref isReady) == 1; + + public void MarkReady() => Volatile.Write(ref isReady, 1); +} diff --git a/src/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs b/src/Policy/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs similarity index 96% rename from src/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs rename to src/Policy/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs index 0b86996d..f5210eb1 100644 --- a/src/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs +++ b/src/Policy/StellaOps.Policy.Engine/Options/PolicyEngineOptions.cs @@ -1,168 +1,168 @@ -using System.Collections.ObjectModel; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Policy.Engine.Options; - -/// <summary> -/// Root configuration for the Policy Engine host. -/// </summary> -public sealed class PolicyEngineOptions -{ - public const string SectionName = "PolicyEngine"; - - public PolicyEngineAuthorityOptions Authority { get; } = new(); - - public PolicyEngineStorageOptions Storage { get; } = new(); - - public PolicyEngineWorkerOptions Workers { get; } = new(); - - public PolicyEngineResourceServerOptions ResourceServer { get; } = new(); - - public void Validate() - { - Authority.Validate(); - Storage.Validate(); - Workers.Validate(); - ResourceServer.Validate(); - } -} - -public sealed class PolicyEngineAuthorityOptions -{ - public bool Enabled { get; set; } = true; - - public string Issuer { get; set; } = "https://authority.stella-ops.local"; - - public string ClientId { get; set; } = "policy-engine"; - - public string? ClientSecret { get; set; } - - public IList<string> Scopes { get; } = new List<string> - { - StellaOpsScopes.PolicyRun, - StellaOpsScopes.FindingsRead, - StellaOpsScopes.EffectiveWrite - }; - - public int BackchannelTimeoutSeconds { get; set; } = 30; - - public void Validate() - { - if (!Enabled) - { - return; - } - - if (string.IsNullOrWhiteSpace(Issuer)) - { - throw new InvalidOperationException("Policy Engine authority configuration requires an issuer."); - } - - if (!Uri.TryCreate(Issuer, UriKind.Absolute, out var issuerUri) || !issuerUri.IsAbsoluteUri) - { - throw new InvalidOperationException("Policy Engine authority issuer must be an absolute URI."); - } - - if (issuerUri.Scheme != Uri.UriSchemeHttps && !issuerUri.IsLoopback) - { - throw new InvalidOperationException("Policy Engine authority issuer must use HTTPS unless targeting loopback."); - } - - if (string.IsNullOrWhiteSpace(ClientId)) - { - throw new InvalidOperationException("Policy Engine authority configuration requires a clientId."); - } - - if (Scopes.Count == 0) - { - throw new InvalidOperationException("Policy Engine authority configuration requires at least one scope."); - } - - if (BackchannelTimeoutSeconds <= 0) - { - throw new InvalidOperationException("Policy Engine authority backchannel timeout must be greater than zero."); - } - } -} - -public sealed class PolicyEngineStorageOptions -{ - public string ConnectionString { get; set; } = "mongodb://localhost:27017/policy-engine"; - - public string DatabaseName { get; set; } = "policy_engine"; - - public int CommandTimeoutSeconds { get; set; } = 30; - - public void Validate() - { - if (string.IsNullOrWhiteSpace(ConnectionString)) - { - throw new InvalidOperationException("Policy Engine storage configuration requires a MongoDB connection string."); - } - - if (string.IsNullOrWhiteSpace(DatabaseName)) - { - throw new InvalidOperationException("Policy Engine storage configuration requires a database name."); - } - - if (CommandTimeoutSeconds <= 0) - { - throw new InvalidOperationException("Policy Engine storage command timeout must be greater than zero."); - } - } - - public TimeSpan CommandTimeout => TimeSpan.FromSeconds(CommandTimeoutSeconds); -} - -public sealed class PolicyEngineWorkerOptions -{ - public int SchedulerIntervalSeconds { get; set; } = 15; - - public int MaxConcurrentEvaluations { get; set; } = 4; - - public void Validate() - { - if (SchedulerIntervalSeconds <= 0) - { - throw new InvalidOperationException("Policy Engine worker interval must be greater than zero."); - } - - if (MaxConcurrentEvaluations <= 0) - { - throw new InvalidOperationException("Policy Engine worker concurrency must be greater than zero."); - } - } -} - -public sealed class PolicyEngineResourceServerOptions -{ - public string Authority { get; set; } = "https://authority.stella-ops.local"; - - public IList<string> Audiences { get; } = new List<string> { "api://policy-engine" }; - - public IList<string> RequiredScopes { get; } = new List<string> { StellaOpsScopes.PolicyRun }; - - public IList<string> RequiredTenants { get; } = new List<string>(); - - public IList<string> BypassNetworks { get; } = new List<string> { "127.0.0.1/32", "::1/128" }; - - public bool RequireHttpsMetadata { get; set; } = true; - - public void Validate() - { - if (string.IsNullOrWhiteSpace(Authority)) - { - throw new InvalidOperationException("Resource server configuration requires an Authority URL."); - } - - if (!Uri.TryCreate(Authority.Trim(), UriKind.Absolute, out var uri)) - { - throw new InvalidOperationException("Resource server Authority URL must be absolute."); - } - - if (RequireHttpsMetadata && !uri.IsLoopback && !string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Resource server Authority URL must use HTTPS when HTTPS metadata is required."); - } - } -} +using System.Collections.ObjectModel; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Policy.Engine.Options; + +/// <summary> +/// Root configuration for the Policy Engine host. +/// </summary> +public sealed class PolicyEngineOptions +{ + public const string SectionName = "PolicyEngine"; + + public PolicyEngineAuthorityOptions Authority { get; } = new(); + + public PolicyEngineStorageOptions Storage { get; } = new(); + + public PolicyEngineWorkerOptions Workers { get; } = new(); + + public PolicyEngineResourceServerOptions ResourceServer { get; } = new(); + + public void Validate() + { + Authority.Validate(); + Storage.Validate(); + Workers.Validate(); + ResourceServer.Validate(); + } +} + +public sealed class PolicyEngineAuthorityOptions +{ + public bool Enabled { get; set; } = true; + + public string Issuer { get; set; } = "https://authority.stella-ops.local"; + + public string ClientId { get; set; } = "policy-engine"; + + public string? ClientSecret { get; set; } + + public IList<string> Scopes { get; } = new List<string> + { + StellaOpsScopes.PolicyRun, + StellaOpsScopes.FindingsRead, + StellaOpsScopes.EffectiveWrite + }; + + public int BackchannelTimeoutSeconds { get; set; } = 30; + + public void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(Issuer)) + { + throw new InvalidOperationException("Policy Engine authority configuration requires an issuer."); + } + + if (!Uri.TryCreate(Issuer, UriKind.Absolute, out var issuerUri) || !issuerUri.IsAbsoluteUri) + { + throw new InvalidOperationException("Policy Engine authority issuer must be an absolute URI."); + } + + if (issuerUri.Scheme != Uri.UriSchemeHttps && !issuerUri.IsLoopback) + { + throw new InvalidOperationException("Policy Engine authority issuer must use HTTPS unless targeting loopback."); + } + + if (string.IsNullOrWhiteSpace(ClientId)) + { + throw new InvalidOperationException("Policy Engine authority configuration requires a clientId."); + } + + if (Scopes.Count == 0) + { + throw new InvalidOperationException("Policy Engine authority configuration requires at least one scope."); + } + + if (BackchannelTimeoutSeconds <= 0) + { + throw new InvalidOperationException("Policy Engine authority backchannel timeout must be greater than zero."); + } + } +} + +public sealed class PolicyEngineStorageOptions +{ + public string ConnectionString { get; set; } = "mongodb://localhost:27017/policy-engine"; + + public string DatabaseName { get; set; } = "policy_engine"; + + public int CommandTimeoutSeconds { get; set; } = 30; + + public void Validate() + { + if (string.IsNullOrWhiteSpace(ConnectionString)) + { + throw new InvalidOperationException("Policy Engine storage configuration requires a MongoDB connection string."); + } + + if (string.IsNullOrWhiteSpace(DatabaseName)) + { + throw new InvalidOperationException("Policy Engine storage configuration requires a database name."); + } + + if (CommandTimeoutSeconds <= 0) + { + throw new InvalidOperationException("Policy Engine storage command timeout must be greater than zero."); + } + } + + public TimeSpan CommandTimeout => TimeSpan.FromSeconds(CommandTimeoutSeconds); +} + +public sealed class PolicyEngineWorkerOptions +{ + public int SchedulerIntervalSeconds { get; set; } = 15; + + public int MaxConcurrentEvaluations { get; set; } = 4; + + public void Validate() + { + if (SchedulerIntervalSeconds <= 0) + { + throw new InvalidOperationException("Policy Engine worker interval must be greater than zero."); + } + + if (MaxConcurrentEvaluations <= 0) + { + throw new InvalidOperationException("Policy Engine worker concurrency must be greater than zero."); + } + } +} + +public sealed class PolicyEngineResourceServerOptions +{ + public string Authority { get; set; } = "https://authority.stella-ops.local"; + + public IList<string> Audiences { get; } = new List<string> { "api://policy-engine" }; + + public IList<string> RequiredScopes { get; } = new List<string> { StellaOpsScopes.PolicyRun }; + + public IList<string> RequiredTenants { get; } = new List<string>(); + + public IList<string> BypassNetworks { get; } = new List<string> { "127.0.0.1/32", "::1/128" }; + + public bool RequireHttpsMetadata { get; set; } = true; + + public void Validate() + { + if (string.IsNullOrWhiteSpace(Authority)) + { + throw new InvalidOperationException("Resource server configuration requires an Authority URL."); + } + + if (!Uri.TryCreate(Authority.Trim(), UriKind.Absolute, out var uri)) + { + throw new InvalidOperationException("Resource server Authority URL must be absolute."); + } + + if (RequireHttpsMetadata && !uri.IsLoopback && !string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Resource server Authority URL must use HTTPS when HTTPS metadata is required."); + } + } +} diff --git a/src/StellaOps.Policy.Engine/Program.cs b/src/Policy/StellaOps.Policy.Engine/Program.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Program.cs rename to src/Policy/StellaOps.Policy.Engine/Program.cs index c15c860a..e1c77b34 100644 --- a/src/StellaOps.Policy.Engine/Program.cs +++ b/src/Policy/StellaOps.Policy.Engine/Program.cs @@ -1,139 +1,139 @@ -using System.IO; -using Microsoft.Extensions.Options; -using NetEscapades.Configuration.Yaml; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.Client; -using StellaOps.Auth.ServerIntegration; -using StellaOps.Configuration; -using StellaOps.Policy.Engine.Hosting; -using StellaOps.Policy.Engine.Options; -using StellaOps.Policy.Engine.Compilation; -using StellaOps.Policy.Engine.Endpoints; -using StellaOps.Policy.Engine.Services; -using StellaOps.Policy.Engine.Workers; - -var builder = WebApplication.CreateBuilder(args); - -builder.Logging.ClearProviders(); -builder.Logging.AddConsole(); - -builder.Configuration.AddStellaOpsDefaults(options => -{ - options.BasePath = builder.Environment.ContentRootPath; - options.EnvironmentPrefix = "STELLAOPS_POLICY_ENGINE_"; - options.ConfigureBuilder = configurationBuilder => - { - var contentRoot = builder.Environment.ContentRootPath; - foreach (var relative in new[] - { - "../etc/policy-engine.yaml", - "../etc/policy-engine.local.yaml", - "policy-engine.yaml", - "policy-engine.local.yaml" - }) - { - var path = Path.Combine(contentRoot, relative); - configurationBuilder.AddYamlFile(path, optional: true); - } - }; -}); - -var bootstrap = StellaOpsConfigurationBootstrapper.Build<PolicyEngineOptions>(options => -{ - options.BasePath = builder.Environment.ContentRootPath; - options.EnvironmentPrefix = "STELLAOPS_POLICY_ENGINE_"; - options.BindingSection = PolicyEngineOptions.SectionName; - options.ConfigureBuilder = configurationBuilder => - { - foreach (var relative in new[] - { - "../etc/policy-engine.yaml", - "../etc/policy-engine.local.yaml", - "policy-engine.yaml", - "policy-engine.local.yaml" - }) - { - var path = Path.Combine(builder.Environment.ContentRootPath, relative); - configurationBuilder.AddYamlFile(path, optional: true); - } - }; - options.PostBind = static (value, _) => value.Validate(); -}); - -builder.Configuration.AddConfiguration(bootstrap.Configuration); - -builder.Services.AddOptions<PolicyEngineOptions>() - .Bind(builder.Configuration.GetSection(PolicyEngineOptions.SectionName)) - .Validate(options => - { - try - { - options.Validate(); - return true; - } - catch (Exception ex) - { - throw new OptionsValidationException( - PolicyEngineOptions.SectionName, - typeof(PolicyEngineOptions), - new[] { ex.Message }); - } - }) - .ValidateOnStart(); - -builder.Services.AddSingleton(sp => sp.GetRequiredService<IOptions<PolicyEngineOptions>>().Value); -builder.Services.AddSingleton(TimeProvider.System); -builder.Services.AddSingleton<PolicyEngineStartupDiagnostics>(); -builder.Services.AddHostedService<PolicyEngineBootstrapWorker>(); -builder.Services.AddSingleton<PolicyCompiler>(); -builder.Services.AddSingleton<PolicyCompilationService>(); -builder.Services.AddSingleton<PolicyEvaluationService>(); -builder.Services.AddSingleton<IPolicyPackRepository, InMemoryPolicyPackRepository>(); - -builder.Services.AddHttpContextAccessor(); -builder.Services.AddRouting(options => options.LowercaseUrls = true); -builder.Services.AddProblemDetails(); -builder.Services.AddHealthChecks(); - -builder.Services.AddAuthentication(); -builder.Services.AddAuthorization(); -builder.Services.AddStellaOpsScopeHandler(); -builder.Services.AddStellaOpsResourceServerAuthentication( - builder.Configuration, - configurationSection: $"{PolicyEngineOptions.SectionName}:ResourceServer"); - -if (bootstrap.Options.Authority.Enabled) -{ - builder.Services.AddStellaOpsAuthClient(clientOptions => - { - clientOptions.Authority = bootstrap.Options.Authority.Issuer; - clientOptions.ClientId = bootstrap.Options.Authority.ClientId; - clientOptions.ClientSecret = bootstrap.Options.Authority.ClientSecret; - clientOptions.HttpTimeout = TimeSpan.FromSeconds(bootstrap.Options.Authority.BackchannelTimeoutSeconds); - - clientOptions.DefaultScopes.Clear(); - foreach (var scope in bootstrap.Options.Authority.Scopes) - { - clientOptions.DefaultScopes.Add(scope); - } - }); -} - -var app = builder.Build(); - -app.UseAuthentication(); -app.UseAuthorization(); - -app.MapHealthChecks("/healthz"); -app.MapGet("/readyz", (PolicyEngineStartupDiagnostics diagnostics) => - diagnostics.IsReady - ? Results.Ok(new { status = "ready" }) - : Results.StatusCode(StatusCodes.Status503ServiceUnavailable)) - .WithName("Readiness"); - -app.MapGet("/", () => Results.Redirect("/healthz")); - -app.MapPolicyCompilation(); -app.MapPolicyPacks(); - -app.Run(); +using System.IO; +using Microsoft.Extensions.Options; +using NetEscapades.Configuration.Yaml; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.Client; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Configuration; +using StellaOps.Policy.Engine.Hosting; +using StellaOps.Policy.Engine.Options; +using StellaOps.Policy.Engine.Compilation; +using StellaOps.Policy.Engine.Endpoints; +using StellaOps.Policy.Engine.Services; +using StellaOps.Policy.Engine.Workers; + +var builder = WebApplication.CreateBuilder(args); + +builder.Logging.ClearProviders(); +builder.Logging.AddConsole(); + +builder.Configuration.AddStellaOpsDefaults(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "STELLAOPS_POLICY_ENGINE_"; + options.ConfigureBuilder = configurationBuilder => + { + var contentRoot = builder.Environment.ContentRootPath; + foreach (var relative in new[] + { + "../etc/policy-engine.yaml", + "../etc/policy-engine.local.yaml", + "policy-engine.yaml", + "policy-engine.local.yaml" + }) + { + var path = Path.Combine(contentRoot, relative); + configurationBuilder.AddYamlFile(path, optional: true); + } + }; +}); + +var bootstrap = StellaOpsConfigurationBootstrapper.Build<PolicyEngineOptions>(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "STELLAOPS_POLICY_ENGINE_"; + options.BindingSection = PolicyEngineOptions.SectionName; + options.ConfigureBuilder = configurationBuilder => + { + foreach (var relative in new[] + { + "../etc/policy-engine.yaml", + "../etc/policy-engine.local.yaml", + "policy-engine.yaml", + "policy-engine.local.yaml" + }) + { + var path = Path.Combine(builder.Environment.ContentRootPath, relative); + configurationBuilder.AddYamlFile(path, optional: true); + } + }; + options.PostBind = static (value, _) => value.Validate(); +}); + +builder.Configuration.AddConfiguration(bootstrap.Configuration); + +builder.Services.AddOptions<PolicyEngineOptions>() + .Bind(builder.Configuration.GetSection(PolicyEngineOptions.SectionName)) + .Validate(options => + { + try + { + options.Validate(); + return true; + } + catch (Exception ex) + { + throw new OptionsValidationException( + PolicyEngineOptions.SectionName, + typeof(PolicyEngineOptions), + new[] { ex.Message }); + } + }) + .ValidateOnStart(); + +builder.Services.AddSingleton(sp => sp.GetRequiredService<IOptions<PolicyEngineOptions>>().Value); +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddSingleton<PolicyEngineStartupDiagnostics>(); +builder.Services.AddHostedService<PolicyEngineBootstrapWorker>(); +builder.Services.AddSingleton<PolicyCompiler>(); +builder.Services.AddSingleton<PolicyCompilationService>(); +builder.Services.AddSingleton<PolicyEvaluationService>(); +builder.Services.AddSingleton<IPolicyPackRepository, InMemoryPolicyPackRepository>(); + +builder.Services.AddHttpContextAccessor(); +builder.Services.AddRouting(options => options.LowercaseUrls = true); +builder.Services.AddProblemDetails(); +builder.Services.AddHealthChecks(); + +builder.Services.AddAuthentication(); +builder.Services.AddAuthorization(); +builder.Services.AddStellaOpsScopeHandler(); +builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: $"{PolicyEngineOptions.SectionName}:ResourceServer"); + +if (bootstrap.Options.Authority.Enabled) +{ + builder.Services.AddStellaOpsAuthClient(clientOptions => + { + clientOptions.Authority = bootstrap.Options.Authority.Issuer; + clientOptions.ClientId = bootstrap.Options.Authority.ClientId; + clientOptions.ClientSecret = bootstrap.Options.Authority.ClientSecret; + clientOptions.HttpTimeout = TimeSpan.FromSeconds(bootstrap.Options.Authority.BackchannelTimeoutSeconds); + + clientOptions.DefaultScopes.Clear(); + foreach (var scope in bootstrap.Options.Authority.Scopes) + { + clientOptions.DefaultScopes.Add(scope); + } + }); +} + +var app = builder.Build(); + +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapHealthChecks("/healthz"); +app.MapGet("/readyz", (PolicyEngineStartupDiagnostics diagnostics) => + diagnostics.IsReady + ? Results.Ok(new { status = "ready" }) + : Results.StatusCode(StatusCodes.Status503ServiceUnavailable)) + .WithName("Readiness"); + +app.MapGet("/", () => Results.Redirect("/healthz")); + +app.MapPolicyCompilation(); +app.MapPolicyPacks(); + +app.Run(); diff --git a/src/StellaOps.Policy.Engine/Properties/AssemblyInfo.cs b/src/Policy/StellaOps.Policy.Engine/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Properties/AssemblyInfo.cs rename to src/Policy/StellaOps.Policy.Engine/Properties/AssemblyInfo.cs index 826700d8..b845e3ac 100644 --- a/src/StellaOps.Policy.Engine/Properties/AssemblyInfo.cs +++ b/src/Policy/StellaOps.Policy.Engine/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Policy.Engine.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Policy.Engine.Tests")] diff --git a/src/StellaOps.Policy.Engine/README.md b/src/Policy/StellaOps.Policy.Engine/README.md similarity index 98% rename from src/StellaOps.Policy.Engine/README.md rename to src/Policy/StellaOps.Policy.Engine/README.md index 2531805e..bb38180f 100644 --- a/src/StellaOps.Policy.Engine/README.md +++ b/src/Policy/StellaOps.Policy.Engine/README.md @@ -1,14 +1,14 @@ -# Policy Engine Host Template - -This service hosts the Policy Engine APIs and background workers introduced in **Policy Engine v2**. The project currently ships a minimal bootstrap that validates configuration, registers Authority clients, and exposes readiness/health endpoints. Future tasks will extend it with compilation, evaluation, and persistence features. - -## Compliance Checklist - -- [x] Configuration loads from `policy-engine.yaml`/environment variables and validates on startup. -- [x] Authority client scaffolding enforces `policy:*` + `effective:write` scopes and respects back-channel timeouts. -- [x] Resource server authentication requires Policy Engine scopes with tenant-aware policies. -- [x] Health and readiness endpoints exist for platform probes. -- [ ] Deterministic policy evaluation pipeline implemented (POLICY-ENGINE-20-002). -- [ ] Mongo materialisation writers implemented (POLICY-ENGINE-20-004). -- [ ] Observability (metrics/traces/logs) completed (POLICY-ENGINE-20-007). -- [ ] Comprehensive test suites and perf baselines established (POLICY-ENGINE-20-008). +# Policy Engine Host Template + +This service hosts the Policy Engine APIs and background workers introduced in **Policy Engine v2**. The project currently ships a minimal bootstrap that validates configuration, registers Authority clients, and exposes readiness/health endpoints. Future tasks will extend it with compilation, evaluation, and persistence features. + +## Compliance Checklist + +- [x] Configuration loads from `policy-engine.yaml`/environment variables and validates on startup. +- [x] Authority client scaffolding enforces `policy:*` + `effective:write` scopes and respects back-channel timeouts. +- [x] Resource server authentication requires Policy Engine scopes with tenant-aware policies. +- [x] Health and readiness endpoints exist for platform probes. +- [ ] Deterministic policy evaluation pipeline implemented (POLICY-ENGINE-20-002). +- [ ] Mongo materialisation writers implemented (POLICY-ENGINE-20-004). +- [ ] Observability (metrics/traces/logs) completed (POLICY-ENGINE-20-007). +- [ ] Comprehensive test suites and perf baselines established (POLICY-ENGINE-20-008). diff --git a/src/StellaOps.Policy.Engine/Services/IPolicyPackRepository.cs b/src/Policy/StellaOps.Policy.Engine/Services/IPolicyPackRepository.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Services/IPolicyPackRepository.cs rename to src/Policy/StellaOps.Policy.Engine/Services/IPolicyPackRepository.cs index e1b05022..0ae64cbd 100644 --- a/src/StellaOps.Policy.Engine/Services/IPolicyPackRepository.cs +++ b/src/Policy/StellaOps.Policy.Engine/Services/IPolicyPackRepository.cs @@ -1,29 +1,29 @@ -using StellaOps.Policy.Engine.Domain; - -namespace StellaOps.Policy.Engine.Services; - -internal interface IPolicyPackRepository -{ - Task<PolicyPackRecord> CreateAsync(string packId, string? displayName, CancellationToken cancellationToken); - - Task<IReadOnlyList<PolicyPackRecord>> ListAsync(CancellationToken cancellationToken); - - Task<PolicyRevisionRecord> UpsertRevisionAsync(string packId, int version, bool requiresTwoPersonApproval, PolicyRevisionStatus initialStatus, CancellationToken cancellationToken); - - Task<PolicyRevisionRecord?> GetRevisionAsync(string packId, int version, CancellationToken cancellationToken); - - Task<PolicyActivationResult> RecordActivationAsync(string packId, int version, string actorId, DateTimeOffset timestamp, string? comment, CancellationToken cancellationToken); -} - -internal sealed record PolicyActivationResult(PolicyActivationResultStatus Status, PolicyRevisionRecord? Revision); - -internal enum PolicyActivationResultStatus -{ - PackNotFound, - RevisionNotFound, - NotApproved, - DuplicateApproval, - PendingSecondApproval, - Activated, - AlreadyActive -} +using StellaOps.Policy.Engine.Domain; + +namespace StellaOps.Policy.Engine.Services; + +internal interface IPolicyPackRepository +{ + Task<PolicyPackRecord> CreateAsync(string packId, string? displayName, CancellationToken cancellationToken); + + Task<IReadOnlyList<PolicyPackRecord>> ListAsync(CancellationToken cancellationToken); + + Task<PolicyRevisionRecord> UpsertRevisionAsync(string packId, int version, bool requiresTwoPersonApproval, PolicyRevisionStatus initialStatus, CancellationToken cancellationToken); + + Task<PolicyRevisionRecord?> GetRevisionAsync(string packId, int version, CancellationToken cancellationToken); + + Task<PolicyActivationResult> RecordActivationAsync(string packId, int version, string actorId, DateTimeOffset timestamp, string? comment, CancellationToken cancellationToken); +} + +internal sealed record PolicyActivationResult(PolicyActivationResultStatus Status, PolicyRevisionRecord? Revision); + +internal enum PolicyActivationResultStatus +{ + PackNotFound, + RevisionNotFound, + NotApproved, + DuplicateApproval, + PendingSecondApproval, + Activated, + AlreadyActive +} diff --git a/src/StellaOps.Policy.Engine/Services/InMemoryPolicyPackRepository.cs b/src/Policy/StellaOps.Policy.Engine/Services/InMemoryPolicyPackRepository.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Services/InMemoryPolicyPackRepository.cs rename to src/Policy/StellaOps.Policy.Engine/Services/InMemoryPolicyPackRepository.cs index b44f3028..4ef3e29c 100644 --- a/src/StellaOps.Policy.Engine/Services/InMemoryPolicyPackRepository.cs +++ b/src/Policy/StellaOps.Policy.Engine/Services/InMemoryPolicyPackRepository.cs @@ -1,93 +1,93 @@ -using System.Collections.Concurrent; -using StellaOps.Policy.Engine.Domain; - -namespace StellaOps.Policy.Engine.Services; - -internal sealed class InMemoryPolicyPackRepository : IPolicyPackRepository -{ - private readonly ConcurrentDictionary<string, PolicyPackRecord> packs = new(StringComparer.OrdinalIgnoreCase); - - public Task<PolicyPackRecord> CreateAsync(string packId, string? displayName, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(packId); - - var created = packs.GetOrAdd(packId, id => new PolicyPackRecord(id, displayName, DateTimeOffset.UtcNow)); - return Task.FromResult(created); - } - - public Task<IReadOnlyList<PolicyPackRecord>> ListAsync(CancellationToken cancellationToken) - { - IReadOnlyList<PolicyPackRecord> list = packs.Values - .OrderBy(pack => pack.PackId, StringComparer.Ordinal) - .ToList(); - return Task.FromResult(list); - } - - public Task<PolicyRevisionRecord> UpsertRevisionAsync(string packId, int version, bool requiresTwoPersonApproval, PolicyRevisionStatus initialStatus, CancellationToken cancellationToken) - { - var pack = packs.GetOrAdd(packId, id => new PolicyPackRecord(id, null, DateTimeOffset.UtcNow)); - int revisionVersion = version > 0 ? version : pack.GetNextVersion(); - var revision = pack.GetOrAddRevision( - revisionVersion, - v => new PolicyRevisionRecord(v, requiresTwoPersonApproval, initialStatus, DateTimeOffset.UtcNow)); - - if (revision.Status != initialStatus) - { - revision.SetStatus(initialStatus, DateTimeOffset.UtcNow); - } - - return Task.FromResult(revision); - } - - public Task<PolicyRevisionRecord?> GetRevisionAsync(string packId, int version, CancellationToken cancellationToken) - { - if (!packs.TryGetValue(packId, out var pack)) - { - return Task.FromResult<PolicyRevisionRecord?>(null); - } - - return Task.FromResult(pack.TryGetRevision(version, out var revision) ? revision : null); - } - - public Task<PolicyActivationResult> RecordActivationAsync(string packId, int version, string actorId, DateTimeOffset timestamp, string? comment, CancellationToken cancellationToken) - { - if (!packs.TryGetValue(packId, out var pack)) - { - return Task.FromResult(new PolicyActivationResult(PolicyActivationResultStatus.PackNotFound, null)); - } - - if (!pack.TryGetRevision(version, out var revision)) - { - return Task.FromResult(new PolicyActivationResult(PolicyActivationResultStatus.RevisionNotFound, null)); - } - - if (revision.Status == PolicyRevisionStatus.Active) - { - return Task.FromResult(new PolicyActivationResult(PolicyActivationResultStatus.AlreadyActive, revision)); - } - - if (revision.Status != PolicyRevisionStatus.Approved) - { - return Task.FromResult(new PolicyActivationResult(PolicyActivationResultStatus.NotApproved, revision)); - } - - var approvalStatus = revision.AddApproval(new PolicyActivationApproval(actorId, timestamp, comment)); - return Task.FromResult(approvalStatus switch - { - PolicyActivationApprovalStatus.Duplicate => new PolicyActivationResult(PolicyActivationResultStatus.DuplicateApproval, revision), - PolicyActivationApprovalStatus.Pending when revision.RequiresTwoPersonApproval - => new PolicyActivationResult(PolicyActivationResultStatus.PendingSecondApproval, revision), - PolicyActivationApprovalStatus.Pending => - ActivateRevision(revision, timestamp), - PolicyActivationApprovalStatus.ThresholdReached => - ActivateRevision(revision, timestamp), - _ => throw new InvalidOperationException("Unknown activation approval status.") - }); - } - - private static PolicyActivationResult ActivateRevision(PolicyRevisionRecord revision, DateTimeOffset timestamp) - { - revision.SetStatus(PolicyRevisionStatus.Active, timestamp); - return new PolicyActivationResult(PolicyActivationResultStatus.Activated, revision); - } -} +using System.Collections.Concurrent; +using StellaOps.Policy.Engine.Domain; + +namespace StellaOps.Policy.Engine.Services; + +internal sealed class InMemoryPolicyPackRepository : IPolicyPackRepository +{ + private readonly ConcurrentDictionary<string, PolicyPackRecord> packs = new(StringComparer.OrdinalIgnoreCase); + + public Task<PolicyPackRecord> CreateAsync(string packId, string? displayName, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(packId); + + var created = packs.GetOrAdd(packId, id => new PolicyPackRecord(id, displayName, DateTimeOffset.UtcNow)); + return Task.FromResult(created); + } + + public Task<IReadOnlyList<PolicyPackRecord>> ListAsync(CancellationToken cancellationToken) + { + IReadOnlyList<PolicyPackRecord> list = packs.Values + .OrderBy(pack => pack.PackId, StringComparer.Ordinal) + .ToList(); + return Task.FromResult(list); + } + + public Task<PolicyRevisionRecord> UpsertRevisionAsync(string packId, int version, bool requiresTwoPersonApproval, PolicyRevisionStatus initialStatus, CancellationToken cancellationToken) + { + var pack = packs.GetOrAdd(packId, id => new PolicyPackRecord(id, null, DateTimeOffset.UtcNow)); + int revisionVersion = version > 0 ? version : pack.GetNextVersion(); + var revision = pack.GetOrAddRevision( + revisionVersion, + v => new PolicyRevisionRecord(v, requiresTwoPersonApproval, initialStatus, DateTimeOffset.UtcNow)); + + if (revision.Status != initialStatus) + { + revision.SetStatus(initialStatus, DateTimeOffset.UtcNow); + } + + return Task.FromResult(revision); + } + + public Task<PolicyRevisionRecord?> GetRevisionAsync(string packId, int version, CancellationToken cancellationToken) + { + if (!packs.TryGetValue(packId, out var pack)) + { + return Task.FromResult<PolicyRevisionRecord?>(null); + } + + return Task.FromResult(pack.TryGetRevision(version, out var revision) ? revision : null); + } + + public Task<PolicyActivationResult> RecordActivationAsync(string packId, int version, string actorId, DateTimeOffset timestamp, string? comment, CancellationToken cancellationToken) + { + if (!packs.TryGetValue(packId, out var pack)) + { + return Task.FromResult(new PolicyActivationResult(PolicyActivationResultStatus.PackNotFound, null)); + } + + if (!pack.TryGetRevision(version, out var revision)) + { + return Task.FromResult(new PolicyActivationResult(PolicyActivationResultStatus.RevisionNotFound, null)); + } + + if (revision.Status == PolicyRevisionStatus.Active) + { + return Task.FromResult(new PolicyActivationResult(PolicyActivationResultStatus.AlreadyActive, revision)); + } + + if (revision.Status != PolicyRevisionStatus.Approved) + { + return Task.FromResult(new PolicyActivationResult(PolicyActivationResultStatus.NotApproved, revision)); + } + + var approvalStatus = revision.AddApproval(new PolicyActivationApproval(actorId, timestamp, comment)); + return Task.FromResult(approvalStatus switch + { + PolicyActivationApprovalStatus.Duplicate => new PolicyActivationResult(PolicyActivationResultStatus.DuplicateApproval, revision), + PolicyActivationApprovalStatus.Pending when revision.RequiresTwoPersonApproval + => new PolicyActivationResult(PolicyActivationResultStatus.PendingSecondApproval, revision), + PolicyActivationApprovalStatus.Pending => + ActivateRevision(revision, timestamp), + PolicyActivationApprovalStatus.ThresholdReached => + ActivateRevision(revision, timestamp), + _ => throw new InvalidOperationException("Unknown activation approval status.") + }); + } + + private static PolicyActivationResult ActivateRevision(PolicyRevisionRecord revision, DateTimeOffset timestamp) + { + revision.SetStatus(PolicyRevisionStatus.Active, timestamp); + return new PolicyActivationResult(PolicyActivationResultStatus.Activated, revision); + } +} diff --git a/src/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs b/src/Policy/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs rename to src/Policy/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs index 74dd6212..23e74a0e 100644 --- a/src/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs +++ b/src/Policy/StellaOps.Policy.Engine/Services/PolicyCompilationService.cs @@ -1,120 +1,120 @@ -using System.Collections.Immutable; -using StellaOps.Policy; -using StellaOps.Policy.Engine.Compilation; - -namespace StellaOps.Policy.Engine.Services; - -/// <summary> -/// Provides deterministic compilation for <c>stella-dsl@1</c> policy documents and exposes -/// basic statistics consumed by API/CLI surfaces. -/// </summary> -internal sealed class PolicyCompilationService -{ - private readonly PolicyCompiler compiler; - - public PolicyCompilationService(PolicyCompiler compiler) - { - this.compiler = compiler ?? throw new ArgumentNullException(nameof(compiler)); - } - - public PolicyCompilationResultDto Compile(PolicyCompileRequest request) - { - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - if (request.Dsl is null || string.IsNullOrWhiteSpace(request.Dsl.Source)) - { - throw new ArgumentException("Compilation requires DSL source.", nameof(request)); - } - - if (!string.Equals(request.Dsl.Syntax, "stella-dsl@1", StringComparison.Ordinal)) - { - return PolicyCompilationResultDto.FromFailure( - ImmutableArray.Create(PolicyIssue.Error( - PolicyDslDiagnosticCodes.UnsupportedSyntaxVersion, - $"Unsupported syntax '{request.Dsl.Syntax ?? "null"}'. Expected 'stella-dsl@1'.", - "dsl.syntax"))); - } - - var result = compiler.Compile(request.Dsl.Source); - if (!result.Success || result.Document is null) - { - return PolicyCompilationResultDto.FromFailure(result.Diagnostics); - } - - return PolicyCompilationResultDto.FromSuccess(result); - } -} - -internal sealed record PolicyCompileRequest(PolicyDslPayload Dsl); - -internal sealed record PolicyDslPayload(string Syntax, string Source); - -internal sealed record PolicyCompilationResultDto( - bool Success, - string? Digest, - PolicyCompilationStatistics? Statistics, - ImmutableArray<PolicyIssue> Diagnostics) -{ - public static PolicyCompilationResultDto FromFailure(ImmutableArray<PolicyIssue> diagnostics) => - new(false, null, null, diagnostics); - - public static PolicyCompilationResultDto FromSuccess(PolicyCompilationResult compilationResult) - { - if (compilationResult.Document is null) - { - throw new ArgumentException("Compilation result must include a document for success.", nameof(compilationResult)); - } - - var stats = PolicyCompilationStatistics.Create(compilationResult.Document); - return new PolicyCompilationResultDto( - true, - $"sha256:{compilationResult.Checksum}", - stats, - compilationResult.Diagnostics); - } -} - -internal sealed record PolicyCompilationStatistics( - int RuleCount, - ImmutableDictionary<string, int> ActionCounts) -{ - public static PolicyCompilationStatistics Create(PolicyIrDocument document) - { - var actions = ImmutableDictionary.CreateBuilder<string, int>(StringComparer.OrdinalIgnoreCase); - - void Increment(string key) - { - actions[key] = actions.TryGetValue(key, out var existing) ? existing + 1 : 1; - } - - foreach (var rule in document.Rules) - { - foreach (var action in rule.ThenActions) - { - Increment(GetActionKey(action)); - } - - foreach (var action in rule.ElseActions) - { - Increment($"else:{GetActionKey(action)}"); - } - } - - return new PolicyCompilationStatistics(document.Rules.Length, actions.ToImmutable()); - } - - private static string GetActionKey(PolicyIrAction action) => action switch - { - PolicyIrAssignmentAction => "assign", - PolicyIrAnnotateAction => "annotate", - PolicyIrIgnoreAction => "ignore", - PolicyIrEscalateAction => "escalate", - PolicyIrRequireVexAction => "requireVex", - PolicyIrWarnAction => "warn", - PolicyIrDeferAction => "defer", - _ => "unknown" - }; -} +using System.Collections.Immutable; +using StellaOps.Policy; +using StellaOps.Policy.Engine.Compilation; + +namespace StellaOps.Policy.Engine.Services; + +/// <summary> +/// Provides deterministic compilation for <c>stella-dsl@1</c> policy documents and exposes +/// basic statistics consumed by API/CLI surfaces. +/// </summary> +internal sealed class PolicyCompilationService +{ + private readonly PolicyCompiler compiler; + + public PolicyCompilationService(PolicyCompiler compiler) + { + this.compiler = compiler ?? throw new ArgumentNullException(nameof(compiler)); + } + + public PolicyCompilationResultDto Compile(PolicyCompileRequest request) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + if (request.Dsl is null || string.IsNullOrWhiteSpace(request.Dsl.Source)) + { + throw new ArgumentException("Compilation requires DSL source.", nameof(request)); + } + + if (!string.Equals(request.Dsl.Syntax, "stella-dsl@1", StringComparison.Ordinal)) + { + return PolicyCompilationResultDto.FromFailure( + ImmutableArray.Create(PolicyIssue.Error( + PolicyDslDiagnosticCodes.UnsupportedSyntaxVersion, + $"Unsupported syntax '{request.Dsl.Syntax ?? "null"}'. Expected 'stella-dsl@1'.", + "dsl.syntax"))); + } + + var result = compiler.Compile(request.Dsl.Source); + if (!result.Success || result.Document is null) + { + return PolicyCompilationResultDto.FromFailure(result.Diagnostics); + } + + return PolicyCompilationResultDto.FromSuccess(result); + } +} + +internal sealed record PolicyCompileRequest(PolicyDslPayload Dsl); + +internal sealed record PolicyDslPayload(string Syntax, string Source); + +internal sealed record PolicyCompilationResultDto( + bool Success, + string? Digest, + PolicyCompilationStatistics? Statistics, + ImmutableArray<PolicyIssue> Diagnostics) +{ + public static PolicyCompilationResultDto FromFailure(ImmutableArray<PolicyIssue> diagnostics) => + new(false, null, null, diagnostics); + + public static PolicyCompilationResultDto FromSuccess(PolicyCompilationResult compilationResult) + { + if (compilationResult.Document is null) + { + throw new ArgumentException("Compilation result must include a document for success.", nameof(compilationResult)); + } + + var stats = PolicyCompilationStatistics.Create(compilationResult.Document); + return new PolicyCompilationResultDto( + true, + $"sha256:{compilationResult.Checksum}", + stats, + compilationResult.Diagnostics); + } +} + +internal sealed record PolicyCompilationStatistics( + int RuleCount, + ImmutableDictionary<string, int> ActionCounts) +{ + public static PolicyCompilationStatistics Create(PolicyIrDocument document) + { + var actions = ImmutableDictionary.CreateBuilder<string, int>(StringComparer.OrdinalIgnoreCase); + + void Increment(string key) + { + actions[key] = actions.TryGetValue(key, out var existing) ? existing + 1 : 1; + } + + foreach (var rule in document.Rules) + { + foreach (var action in rule.ThenActions) + { + Increment(GetActionKey(action)); + } + + foreach (var action in rule.ElseActions) + { + Increment($"else:{GetActionKey(action)}"); + } + } + + return new PolicyCompilationStatistics(document.Rules.Length, actions.ToImmutable()); + } + + private static string GetActionKey(PolicyIrAction action) => action switch + { + PolicyIrAssignmentAction => "assign", + PolicyIrAnnotateAction => "annotate", + PolicyIrIgnoreAction => "ignore", + PolicyIrEscalateAction => "escalate", + PolicyIrRequireVexAction => "requireVex", + PolicyIrWarnAction => "warn", + PolicyIrDeferAction => "defer", + _ => "unknown" + }; +} diff --git a/src/StellaOps.Policy.Engine/Services/PolicyEvaluationService.cs b/src/Policy/StellaOps.Policy.Engine/Services/PolicyEvaluationService.cs similarity index 96% rename from src/StellaOps.Policy.Engine/Services/PolicyEvaluationService.cs rename to src/Policy/StellaOps.Policy.Engine/Services/PolicyEvaluationService.cs index c09f27d9..937f2cd0 100644 --- a/src/StellaOps.Policy.Engine/Services/PolicyEvaluationService.cs +++ b/src/Policy/StellaOps.Policy.Engine/Services/PolicyEvaluationService.cs @@ -1,26 +1,26 @@ -using System.Collections.Immutable; -using StellaOps.Policy.Engine.Compilation; -using StellaOps.Policy.Engine.Evaluation; - -namespace StellaOps.Policy.Engine.Services; - -internal sealed class PolicyEvaluationService -{ - private readonly PolicyEvaluator evaluator = new(); - - public PolicyEvaluationResult Evaluate(PolicyIrDocument document, PolicyEvaluationContext context) - { - if (document is null) - { - throw new ArgumentNullException(nameof(document)); - } - - if (context is null) - { - throw new ArgumentNullException(nameof(context)); - } - - var request = new PolicyEvaluationRequest(document, context); - return evaluator.Evaluate(request); - } -} +using System.Collections.Immutable; +using StellaOps.Policy.Engine.Compilation; +using StellaOps.Policy.Engine.Evaluation; + +namespace StellaOps.Policy.Engine.Services; + +internal sealed class PolicyEvaluationService +{ + private readonly PolicyEvaluator evaluator = new(); + + public PolicyEvaluationResult Evaluate(PolicyIrDocument document, PolicyEvaluationContext context) + { + if (document is null) + { + throw new ArgumentNullException(nameof(document)); + } + + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + var request = new PolicyEvaluationRequest(document, context); + return evaluator.Evaluate(request); + } +} diff --git a/src/StellaOps.Policy.Engine/Services/ScopeAuthorization.cs b/src/Policy/StellaOps.Policy.Engine/Services/ScopeAuthorization.cs similarity index 96% rename from src/StellaOps.Policy.Engine/Services/ScopeAuthorization.cs rename to src/Policy/StellaOps.Policy.Engine/Services/ScopeAuthorization.cs index 59a91622..14c4df38 100644 --- a/src/StellaOps.Policy.Engine/Services/ScopeAuthorization.cs +++ b/src/Policy/StellaOps.Policy.Engine/Services/ScopeAuthorization.cs @@ -1,53 +1,53 @@ -using System.Security.Claims; - -namespace StellaOps.Policy.Engine.Services; - -internal static class ScopeAuthorization -{ - private static readonly StringComparer ScopeComparer = StringComparer.OrdinalIgnoreCase; - - public static IResult? RequireScope(HttpContext context, string requiredScope) - { - if (context is null) - { - throw new ArgumentNullException(nameof(context)); - } - - if (string.IsNullOrWhiteSpace(requiredScope)) - { - throw new ArgumentException("Scope must be provided.", nameof(requiredScope)); - } - - var user = context.User; - if (user?.Identity?.IsAuthenticated is not true) - { - return Results.Unauthorized(); - } - - if (!HasScope(user, requiredScope)) - { - return Results.Forbid(); - } - - return null; - } - - private static bool HasScope(ClaimsPrincipal principal, string scope) - { - foreach (var claim in principal.FindAll("scope").Concat(principal.FindAll("scp"))) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - var scopes = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - if (scopes.Any(value => ScopeComparer.Equals(value, scope))) - { - return true; - } - } - - return false; - } -} +using System.Security.Claims; + +namespace StellaOps.Policy.Engine.Services; + +internal static class ScopeAuthorization +{ + private static readonly StringComparer ScopeComparer = StringComparer.OrdinalIgnoreCase; + + public static IResult? RequireScope(HttpContext context, string requiredScope) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + if (string.IsNullOrWhiteSpace(requiredScope)) + { + throw new ArgumentException("Scope must be provided.", nameof(requiredScope)); + } + + var user = context.User; + if (user?.Identity?.IsAuthenticated is not true) + { + return Results.Unauthorized(); + } + + if (!HasScope(user, requiredScope)) + { + return Results.Forbid(); + } + + return null; + } + + private static bool HasScope(ClaimsPrincipal principal, string scope) + { + foreach (var claim in principal.FindAll("scope").Concat(principal.FindAll("scp"))) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var scopes = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (scopes.Any(value => ScopeComparer.Equals(value, scope))) + { + return true; + } + } + + return false; + } +} diff --git a/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj b/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj new file mode 100644 index 00000000..b6c4e930 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj @@ -0,0 +1,20 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + <AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Policy.Engine/TASKS.md b/src/Policy/StellaOps.Policy.Engine/TASKS.md similarity index 99% rename from src/StellaOps.Policy.Engine/TASKS.md rename to src/Policy/StellaOps.Policy.Engine/TASKS.md index afdde66c..e458f45f 100644 --- a/src/StellaOps.Policy.Engine/TASKS.md +++ b/src/Policy/StellaOps.Policy.Engine/TASKS.md @@ -8,7 +8,7 @@ | POLICY-ENGINE-20-002 | BLOCKED (2025-10-26) | Policy Guild | POLICY-ENGINE-20-001 | Build deterministic evaluator honoring lexical/priority order, first-match semantics, and safe value types (no wall-clock/network access). | Evaluator executes policies deterministically in unit/property tests; guard rejects forbidden intrinsics; perf baseline recorded. | > 2025-10-26: Blocked while bootstrapping DSL parser/evaluator; remaining grammar coverage (profile keywords, condition parsing) and rule evaluation semantics still pending to satisfy acceptance tests. | POLICY-ENGINE-20-003 | TODO | Policy Guild, Concelier Core Guild, Excititor Core Guild | POLICY-ENGINE-20-001, CONCELIER-POLICY-20-002, EXCITITOR-POLICY-20-002 | Implement selection joiners resolving SBOM↔advisory↔VEX tuples using linksets and PURL equivalence tables, with deterministic batching. | Joiners fetch correct candidate sets in integration tests; batching meets memory targets; explain traces list input provenance. | -> 2025-10-26: Scheduler DTO contracts for runs/diffs/explains available (`src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`); consume `PolicyRunRequest/Status/DiffSummary` from samples under `samples/api/scheduler/`. +> 2025-10-26: Scheduler DTO contracts for runs/diffs/explains available (`src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`); consume `PolicyRunRequest/Status/DiffSummary` from samples under `samples/api/scheduler/`. | POLICY-ENGINE-20-004 | TODO | Policy Guild, Platform Storage Guild | POLICY-ENGINE-20-003, CONCELIER-POLICY-20-003, EXCITITOR-POLICY-20-003 | Ship materialization writer that upserts into `effective_finding_{policyId}` with append-only history, tenant scoping, and trace references. | Writes restricted to Policy Engine identity; idempotent upserts proven via tests; collections indexed per design and docs updated. | | POLICY-ENGINE-20-005 | TODO | Policy Guild, Security Engineering | POLICY-ENGINE-20-002 | Enforce determinism guard banning wall-clock, RNG, and network usage during evaluation via static analysis + runtime sandbox. | Guard blocks forbidden APIs in unit/integration tests; violations emit `ERR_POL_004`; CI analyzer wired. | | POLICY-ENGINE-20-006 | TODO | Policy Guild, Scheduler Worker Guild | POLICY-ENGINE-20-003, POLICY-ENGINE-20-004, SCHED-WORKER-20-301 | Implement incremental orchestrator reacting to advisory/vex/SBOM change streams and scheduling partial policy re-evaluations. | Change stream listeners enqueue affected tuples with dedupe; orchestrator meets 5 min SLA in perf tests; metrics exposed (`policy_run_seconds`). | diff --git a/src/StellaOps.Policy.Engine/Workers/PolicyEngineBootstrapWorker.cs b/src/Policy/StellaOps.Policy.Engine/Workers/PolicyEngineBootstrapWorker.cs similarity index 97% rename from src/StellaOps.Policy.Engine/Workers/PolicyEngineBootstrapWorker.cs rename to src/Policy/StellaOps.Policy.Engine/Workers/PolicyEngineBootstrapWorker.cs index dbb57d17..62a95107 100644 --- a/src/StellaOps.Policy.Engine/Workers/PolicyEngineBootstrapWorker.cs +++ b/src/Policy/StellaOps.Policy.Engine/Workers/PolicyEngineBootstrapWorker.cs @@ -1,35 +1,35 @@ -using System; -using System.Threading; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using StellaOps.Policy.Engine.Hosting; -using StellaOps.Policy.Engine.Options; - -namespace StellaOps.Policy.Engine.Workers; - -internal sealed class PolicyEngineBootstrapWorker : BackgroundService -{ - private readonly ILogger<PolicyEngineBootstrapWorker> logger; - private readonly PolicyEngineStartupDiagnostics diagnostics; - private readonly PolicyEngineOptions options; - - public PolicyEngineBootstrapWorker( - ILogger<PolicyEngineBootstrapWorker> logger, - PolicyEngineStartupDiagnostics diagnostics, - PolicyEngineOptions options) - { - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - this.diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); - this.options = options ?? throw new ArgumentNullException(nameof(options)); - } - - protected override Task ExecuteAsync(CancellationToken stoppingToken) - { - logger.LogInformation("Policy Engine bootstrap worker started. Authority issuer: {AuthorityIssuer}. Database: {Database}.", - options.Authority.Issuer, - options.Storage.DatabaseName); - - diagnostics.MarkReady(); - return Task.CompletedTask; - } -} +using System; +using System.Threading; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using StellaOps.Policy.Engine.Hosting; +using StellaOps.Policy.Engine.Options; + +namespace StellaOps.Policy.Engine.Workers; + +internal sealed class PolicyEngineBootstrapWorker : BackgroundService +{ + private readonly ILogger<PolicyEngineBootstrapWorker> logger; + private readonly PolicyEngineStartupDiagnostics diagnostics; + private readonly PolicyEngineOptions options; + + public PolicyEngineBootstrapWorker( + ILogger<PolicyEngineBootstrapWorker> logger, + PolicyEngineStartupDiagnostics diagnostics, + PolicyEngineOptions options) + { + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + this.diagnostics = diagnostics ?? throw new ArgumentNullException(nameof(diagnostics)); + this.options = options ?? throw new ArgumentNullException(nameof(options)); + } + + protected override Task ExecuteAsync(CancellationToken stoppingToken) + { + logger.LogInformation("Policy Engine bootstrap worker started. Authority issuer: {AuthorityIssuer}. Database: {Database}.", + options.Authority.Issuer, + options.Storage.DatabaseName); + + diagnostics.MarkReady(); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Policy.Gateway/Clients/IPolicyEngineClient.cs b/src/Policy/StellaOps.Policy.Gateway/Clients/IPolicyEngineClient.cs similarity index 98% rename from src/StellaOps.Policy.Gateway/Clients/IPolicyEngineClient.cs rename to src/Policy/StellaOps.Policy.Gateway/Clients/IPolicyEngineClient.cs index f7983fe0..40956254 100644 --- a/src/StellaOps.Policy.Gateway/Clients/IPolicyEngineClient.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Clients/IPolicyEngineClient.cs @@ -1,15 +1,15 @@ -using StellaOps.Policy.Gateway.Contracts; -using StellaOps.Policy.Gateway.Infrastructure; - -namespace StellaOps.Policy.Gateway.Clients; - -internal interface IPolicyEngineClient -{ - Task<PolicyEngineResponse<IReadOnlyList<PolicyPackSummaryDto>>> ListPolicyPacksAsync(GatewayForwardingContext? forwardingContext, CancellationToken cancellationToken); - - Task<PolicyEngineResponse<PolicyPackDto>> CreatePolicyPackAsync(GatewayForwardingContext? forwardingContext, CreatePolicyPackRequest request, CancellationToken cancellationToken); - - Task<PolicyEngineResponse<PolicyRevisionDto>> CreatePolicyRevisionAsync(GatewayForwardingContext? forwardingContext, string packId, CreatePolicyRevisionRequest request, CancellationToken cancellationToken); - - Task<PolicyEngineResponse<PolicyRevisionActivationDto>> ActivatePolicyRevisionAsync(GatewayForwardingContext? forwardingContext, string packId, int version, ActivatePolicyRevisionRequest request, CancellationToken cancellationToken); -} +using StellaOps.Policy.Gateway.Contracts; +using StellaOps.Policy.Gateway.Infrastructure; + +namespace StellaOps.Policy.Gateway.Clients; + +internal interface IPolicyEngineClient +{ + Task<PolicyEngineResponse<IReadOnlyList<PolicyPackSummaryDto>>> ListPolicyPacksAsync(GatewayForwardingContext? forwardingContext, CancellationToken cancellationToken); + + Task<PolicyEngineResponse<PolicyPackDto>> CreatePolicyPackAsync(GatewayForwardingContext? forwardingContext, CreatePolicyPackRequest request, CancellationToken cancellationToken); + + Task<PolicyEngineResponse<PolicyRevisionDto>> CreatePolicyRevisionAsync(GatewayForwardingContext? forwardingContext, string packId, CreatePolicyRevisionRequest request, CancellationToken cancellationToken); + + Task<PolicyEngineResponse<PolicyRevisionActivationDto>> ActivatePolicyRevisionAsync(GatewayForwardingContext? forwardingContext, string packId, int version, ActivatePolicyRevisionRequest request, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Policy.Gateway/Clients/PolicyEngineClient.cs b/src/Policy/StellaOps.Policy.Gateway/Clients/PolicyEngineClient.cs similarity index 97% rename from src/StellaOps.Policy.Gateway/Clients/PolicyEngineClient.cs rename to src/Policy/StellaOps.Policy.Gateway/Clients/PolicyEngineClient.cs index 3c10a4db..649503c7 100644 --- a/src/StellaOps.Policy.Gateway/Clients/PolicyEngineClient.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Clients/PolicyEngineClient.cs @@ -1,199 +1,199 @@ -using System; -using System.Collections.Generic; -using System.Net; -using System.Net.Http; -using System.Net.Http.Json; -using System.Text.Json; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Policy.Gateway.Contracts; -using StellaOps.Policy.Gateway.Infrastructure; -using StellaOps.Policy.Gateway.Options; -using StellaOps.Policy.Gateway.Services; - -namespace StellaOps.Policy.Gateway.Clients; - -internal sealed class PolicyEngineClient : IPolicyEngineClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - PropertyNameCaseInsensitive = true - }; - - private readonly HttpClient httpClient; - private readonly PolicyEngineTokenProvider tokenProvider; - private readonly ILogger<PolicyEngineClient> logger; - private readonly PolicyGatewayOptions options; - - public PolicyEngineClient( - HttpClient httpClient, - IOptions<PolicyGatewayOptions> options, - PolicyEngineTokenProvider tokenProvider, - ILogger<PolicyEngineClient> logger) - { - this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - this.tokenProvider = tokenProvider ?? throw new ArgumentNullException(nameof(tokenProvider)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - - if (options is null) - { - throw new ArgumentNullException(nameof(options)); - } - - this.options = options.Value ?? throw new InvalidOperationException("Policy Gateway options must be configured."); - if (httpClient.BaseAddress is null) - { - httpClient.BaseAddress = this.options.PolicyEngine.BaseUri; - } - - httpClient.DefaultRequestHeaders.Accept.Clear(); - httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/json"); - } - - public Task<PolicyEngineResponse<IReadOnlyList<PolicyPackSummaryDto>>> ListPolicyPacksAsync( - GatewayForwardingContext? forwardingContext, - CancellationToken cancellationToken) - => SendAsync<IReadOnlyList<PolicyPackSummaryDto>>( - HttpMethod.Get, - "api/policy/packs", - forwardingContext, - content: null, - cancellationToken); - - public Task<PolicyEngineResponse<PolicyPackDto>> CreatePolicyPackAsync( - GatewayForwardingContext? forwardingContext, - CreatePolicyPackRequest request, - CancellationToken cancellationToken) - => SendAsync<PolicyPackDto>( - HttpMethod.Post, - "api/policy/packs", - forwardingContext, - request, - cancellationToken); - - public Task<PolicyEngineResponse<PolicyRevisionDto>> CreatePolicyRevisionAsync( - GatewayForwardingContext? forwardingContext, - string packId, - CreatePolicyRevisionRequest request, - CancellationToken cancellationToken) - => SendAsync<PolicyRevisionDto>( - HttpMethod.Post, - $"api/policy/packs/{Uri.EscapeDataString(packId)}/revisions", - forwardingContext, - request, - cancellationToken); - - public Task<PolicyEngineResponse<PolicyRevisionActivationDto>> ActivatePolicyRevisionAsync( - GatewayForwardingContext? forwardingContext, - string packId, - int version, - ActivatePolicyRevisionRequest request, - CancellationToken cancellationToken) - => SendAsync<PolicyRevisionActivationDto>( - HttpMethod.Post, - $"api/policy/packs/{Uri.EscapeDataString(packId)}/revisions/{version}:activate", - forwardingContext, - request, - cancellationToken); - - private async Task<PolicyEngineResponse<TSuccess>> SendAsync<TSuccess>( - HttpMethod method, - string relativeUri, - GatewayForwardingContext? forwardingContext, - object? content, - CancellationToken cancellationToken) - { - var absoluteUri = httpClient.BaseAddress is not null - ? new Uri(httpClient.BaseAddress, relativeUri) - : new Uri(relativeUri, UriKind.Absolute); - - using var request = new HttpRequestMessage(method, absoluteUri); - - if (forwardingContext is not null) - { - forwardingContext.Apply(request); - } - else - { - var serviceAuthorization = await tokenProvider.GetAuthorizationAsync(method, absoluteUri, cancellationToken).ConfigureAwait(false); - if (serviceAuthorization is null) - { - logger.LogWarning( - "Policy Engine request {Method} {Uri} lacks caller credentials and client credentials flow is disabled.", - method, - absoluteUri); - var problem = new ProblemDetails - { - Title = "Upstream authorization missing", - Detail = "Caller did not present credentials and client credentials flow is disabled.", - Status = StatusCodes.Status401Unauthorized - }; - return PolicyEngineResponse<TSuccess>.Failure(HttpStatusCode.Unauthorized, problem); - } - - var authorization = serviceAuthorization.Value; - authorization.Apply(request); - } - - if (content is not null) - { - request.Content = JsonContent.Create(content, options: SerializerOptions); - } - - using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - var location = response.Headers.Location?.ToString(); - - if (response.IsSuccessStatusCode) - { - if (response.Content is null || response.Content.Headers.ContentLength == 0) - { - return PolicyEngineResponse<TSuccess>.Success(response.StatusCode, value: default, location); - } - - try - { - var successValue = await response.Content.ReadFromJsonAsync<TSuccess>(SerializerOptions, cancellationToken).ConfigureAwait(false); - return PolicyEngineResponse<TSuccess>.Success(response.StatusCode, successValue, location); - } - catch (JsonException ex) - { - logger.LogError(ex, "Failed to deserialize Policy Engine response for {Path}.", relativeUri); - var problem = new ProblemDetails - { - Title = "Invalid upstream response", - Detail = "Policy Engine returned an unexpected payload.", - Status = StatusCodes.Status502BadGateway - }; - return PolicyEngineResponse<TSuccess>.Failure(HttpStatusCode.BadGateway, problem); - } - } - - var problemDetails = await ReadProblemDetailsAsync(response, cancellationToken).ConfigureAwait(false); - return PolicyEngineResponse<TSuccess>.Failure(response.StatusCode, problemDetails); - } - - private async Task<ProblemDetails?> ReadProblemDetailsAsync(HttpResponseMessage response, CancellationToken cancellationToken) - { - if (response.Content is null) - { - return null; - } - - try - { - return await response.Content.ReadFromJsonAsync<ProblemDetails>(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - catch (JsonException ex) - { - logger.LogDebug(ex, "Policy Engine returned non-ProblemDetails error response for {StatusCode}.", (int)response.StatusCode); - return new ProblemDetails - { - Title = "Upstream error", - Detail = $"Policy Engine responded with {(int)response.StatusCode} {response.ReasonPhrase}.", - Status = (int)response.StatusCode - }; - } - } -} +using System; +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text.Json; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Policy.Gateway.Contracts; +using StellaOps.Policy.Gateway.Infrastructure; +using StellaOps.Policy.Gateway.Options; +using StellaOps.Policy.Gateway.Services; + +namespace StellaOps.Policy.Gateway.Clients; + +internal sealed class PolicyEngineClient : IPolicyEngineClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true + }; + + private readonly HttpClient httpClient; + private readonly PolicyEngineTokenProvider tokenProvider; + private readonly ILogger<PolicyEngineClient> logger; + private readonly PolicyGatewayOptions options; + + public PolicyEngineClient( + HttpClient httpClient, + IOptions<PolicyGatewayOptions> options, + PolicyEngineTokenProvider tokenProvider, + ILogger<PolicyEngineClient> logger) + { + this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + this.tokenProvider = tokenProvider ?? throw new ArgumentNullException(nameof(tokenProvider)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + if (options is null) + { + throw new ArgumentNullException(nameof(options)); + } + + this.options = options.Value ?? throw new InvalidOperationException("Policy Gateway options must be configured."); + if (httpClient.BaseAddress is null) + { + httpClient.BaseAddress = this.options.PolicyEngine.BaseUri; + } + + httpClient.DefaultRequestHeaders.Accept.Clear(); + httpClient.DefaultRequestHeaders.Accept.ParseAdd("application/json"); + } + + public Task<PolicyEngineResponse<IReadOnlyList<PolicyPackSummaryDto>>> ListPolicyPacksAsync( + GatewayForwardingContext? forwardingContext, + CancellationToken cancellationToken) + => SendAsync<IReadOnlyList<PolicyPackSummaryDto>>( + HttpMethod.Get, + "api/policy/packs", + forwardingContext, + content: null, + cancellationToken); + + public Task<PolicyEngineResponse<PolicyPackDto>> CreatePolicyPackAsync( + GatewayForwardingContext? forwardingContext, + CreatePolicyPackRequest request, + CancellationToken cancellationToken) + => SendAsync<PolicyPackDto>( + HttpMethod.Post, + "api/policy/packs", + forwardingContext, + request, + cancellationToken); + + public Task<PolicyEngineResponse<PolicyRevisionDto>> CreatePolicyRevisionAsync( + GatewayForwardingContext? forwardingContext, + string packId, + CreatePolicyRevisionRequest request, + CancellationToken cancellationToken) + => SendAsync<PolicyRevisionDto>( + HttpMethod.Post, + $"api/policy/packs/{Uri.EscapeDataString(packId)}/revisions", + forwardingContext, + request, + cancellationToken); + + public Task<PolicyEngineResponse<PolicyRevisionActivationDto>> ActivatePolicyRevisionAsync( + GatewayForwardingContext? forwardingContext, + string packId, + int version, + ActivatePolicyRevisionRequest request, + CancellationToken cancellationToken) + => SendAsync<PolicyRevisionActivationDto>( + HttpMethod.Post, + $"api/policy/packs/{Uri.EscapeDataString(packId)}/revisions/{version}:activate", + forwardingContext, + request, + cancellationToken); + + private async Task<PolicyEngineResponse<TSuccess>> SendAsync<TSuccess>( + HttpMethod method, + string relativeUri, + GatewayForwardingContext? forwardingContext, + object? content, + CancellationToken cancellationToken) + { + var absoluteUri = httpClient.BaseAddress is not null + ? new Uri(httpClient.BaseAddress, relativeUri) + : new Uri(relativeUri, UriKind.Absolute); + + using var request = new HttpRequestMessage(method, absoluteUri); + + if (forwardingContext is not null) + { + forwardingContext.Apply(request); + } + else + { + var serviceAuthorization = await tokenProvider.GetAuthorizationAsync(method, absoluteUri, cancellationToken).ConfigureAwait(false); + if (serviceAuthorization is null) + { + logger.LogWarning( + "Policy Engine request {Method} {Uri} lacks caller credentials and client credentials flow is disabled.", + method, + absoluteUri); + var problem = new ProblemDetails + { + Title = "Upstream authorization missing", + Detail = "Caller did not present credentials and client credentials flow is disabled.", + Status = StatusCodes.Status401Unauthorized + }; + return PolicyEngineResponse<TSuccess>.Failure(HttpStatusCode.Unauthorized, problem); + } + + var authorization = serviceAuthorization.Value; + authorization.Apply(request); + } + + if (content is not null) + { + request.Content = JsonContent.Create(content, options: SerializerOptions); + } + + using var response = await httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + var location = response.Headers.Location?.ToString(); + + if (response.IsSuccessStatusCode) + { + if (response.Content is null || response.Content.Headers.ContentLength == 0) + { + return PolicyEngineResponse<TSuccess>.Success(response.StatusCode, value: default, location); + } + + try + { + var successValue = await response.Content.ReadFromJsonAsync<TSuccess>(SerializerOptions, cancellationToken).ConfigureAwait(false); + return PolicyEngineResponse<TSuccess>.Success(response.StatusCode, successValue, location); + } + catch (JsonException ex) + { + logger.LogError(ex, "Failed to deserialize Policy Engine response for {Path}.", relativeUri); + var problem = new ProblemDetails + { + Title = "Invalid upstream response", + Detail = "Policy Engine returned an unexpected payload.", + Status = StatusCodes.Status502BadGateway + }; + return PolicyEngineResponse<TSuccess>.Failure(HttpStatusCode.BadGateway, problem); + } + } + + var problemDetails = await ReadProblemDetailsAsync(response, cancellationToken).ConfigureAwait(false); + return PolicyEngineResponse<TSuccess>.Failure(response.StatusCode, problemDetails); + } + + private async Task<ProblemDetails?> ReadProblemDetailsAsync(HttpResponseMessage response, CancellationToken cancellationToken) + { + if (response.Content is null) + { + return null; + } + + try + { + return await response.Content.ReadFromJsonAsync<ProblemDetails>(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + catch (JsonException ex) + { + logger.LogDebug(ex, "Policy Engine returned non-ProblemDetails error response for {StatusCode}.", (int)response.StatusCode); + return new ProblemDetails + { + Title = "Upstream error", + Detail = $"Policy Engine responded with {(int)response.StatusCode} {response.ReasonPhrase}.", + Status = (int)response.StatusCode + }; + } + } +} diff --git a/src/StellaOps.Policy.Gateway/Clients/PolicyEngineResponse.cs b/src/Policy/StellaOps.Policy.Gateway/Clients/PolicyEngineResponse.cs similarity index 97% rename from src/StellaOps.Policy.Gateway/Clients/PolicyEngineResponse.cs rename to src/Policy/StellaOps.Policy.Gateway/Clients/PolicyEngineResponse.cs index 9e09329c..7e18f5de 100644 --- a/src/StellaOps.Policy.Gateway/Clients/PolicyEngineResponse.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Clients/PolicyEngineResponse.cs @@ -1,31 +1,31 @@ -using System.Net; -using Microsoft.AspNetCore.Mvc; - -namespace StellaOps.Policy.Gateway.Clients; - -internal sealed class PolicyEngineResponse<TSuccess> -{ - private PolicyEngineResponse(HttpStatusCode statusCode, TSuccess? value, ProblemDetails? problem, string? location) - { - StatusCode = statusCode; - Value = value; - Problem = problem; - Location = location; - } - - public HttpStatusCode StatusCode { get; } - - public TSuccess? Value { get; } - - public ProblemDetails? Problem { get; } - - public string? Location { get; } - - public bool IsSuccess => Problem is null && StatusCode is >= HttpStatusCode.OK and < HttpStatusCode.MultipleChoices; - - public static PolicyEngineResponse<TSuccess> Success(HttpStatusCode statusCode, TSuccess? value, string? location) - => new(statusCode, value, problem: null, location); - - public static PolicyEngineResponse<TSuccess> Failure(HttpStatusCode statusCode, ProblemDetails? problem) - => new(statusCode, value: default, problem, location: null); -} +using System.Net; +using Microsoft.AspNetCore.Mvc; + +namespace StellaOps.Policy.Gateway.Clients; + +internal sealed class PolicyEngineResponse<TSuccess> +{ + private PolicyEngineResponse(HttpStatusCode statusCode, TSuccess? value, ProblemDetails? problem, string? location) + { + StatusCode = statusCode; + Value = value; + Problem = problem; + Location = location; + } + + public HttpStatusCode StatusCode { get; } + + public TSuccess? Value { get; } + + public ProblemDetails? Problem { get; } + + public string? Location { get; } + + public bool IsSuccess => Problem is null && StatusCode is >= HttpStatusCode.OK and < HttpStatusCode.MultipleChoices; + + public static PolicyEngineResponse<TSuccess> Success(HttpStatusCode statusCode, TSuccess? value, string? location) + => new(statusCode, value, problem: null, location); + + public static PolicyEngineResponse<TSuccess> Failure(HttpStatusCode statusCode, ProblemDetails? problem) + => new(statusCode, value: default, problem, location: null); +} diff --git a/src/StellaOps.Policy.Gateway/Clients/PolicyEngineResponseExtensions.cs b/src/Policy/StellaOps.Policy.Gateway/Clients/PolicyEngineResponseExtensions.cs similarity index 96% rename from src/StellaOps.Policy.Gateway/Clients/PolicyEngineResponseExtensions.cs rename to src/Policy/StellaOps.Policy.Gateway/Clients/PolicyEngineResponseExtensions.cs index d21ada4b..1cc37c61 100644 --- a/src/StellaOps.Policy.Gateway/Clients/PolicyEngineResponseExtensions.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Clients/PolicyEngineResponseExtensions.cs @@ -1,71 +1,71 @@ -using System; -using System.Net; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; - -namespace StellaOps.Policy.Gateway.Clients; - -internal static class PolicyEngineResponseExtensions -{ - public static IResult ToMinimalResult<T>(this PolicyEngineResponse<T> response) - { - if (response is null) - { - throw new ArgumentNullException(nameof(response)); - } - - if (response.IsSuccess) - { - return CreateSuccessResult(response); - } - - return CreateErrorResult(response); - } - - private static IResult CreateSuccessResult<T>(PolicyEngineResponse<T> response) - { - var value = response.Value; - switch (response.StatusCode) - { - case HttpStatusCode.Created: - if (!string.IsNullOrWhiteSpace(response.Location)) - { - return Results.Created(response.Location, value); - } - - return Results.Json(value, statusCode: StatusCodes.Status201Created); - - case HttpStatusCode.Accepted: - if (!string.IsNullOrWhiteSpace(response.Location)) - { - return Results.Accepted(response.Location, value); - } - - return Results.Json(value, statusCode: StatusCodes.Status202Accepted); - - case HttpStatusCode.NoContent: - return Results.NoContent(); - - default: - return Results.Json(value, statusCode: (int)response.StatusCode); - } - } - - private static IResult CreateErrorResult<T>(PolicyEngineResponse<T> response) - { - var problem = response.Problem; - if (problem is null) - { - return Results.StatusCode((int)response.StatusCode); - } - - var statusCode = problem.Status ?? (int)response.StatusCode; - return Results.Problem( - title: problem.Title, - detail: problem.Detail, - type: problem.Type, - instance: problem.Instance, - statusCode: statusCode, - extensions: problem.Extensions); - } -} +using System; +using System.Net; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; + +namespace StellaOps.Policy.Gateway.Clients; + +internal static class PolicyEngineResponseExtensions +{ + public static IResult ToMinimalResult<T>(this PolicyEngineResponse<T> response) + { + if (response is null) + { + throw new ArgumentNullException(nameof(response)); + } + + if (response.IsSuccess) + { + return CreateSuccessResult(response); + } + + return CreateErrorResult(response); + } + + private static IResult CreateSuccessResult<T>(PolicyEngineResponse<T> response) + { + var value = response.Value; + switch (response.StatusCode) + { + case HttpStatusCode.Created: + if (!string.IsNullOrWhiteSpace(response.Location)) + { + return Results.Created(response.Location, value); + } + + return Results.Json(value, statusCode: StatusCodes.Status201Created); + + case HttpStatusCode.Accepted: + if (!string.IsNullOrWhiteSpace(response.Location)) + { + return Results.Accepted(response.Location, value); + } + + return Results.Json(value, statusCode: StatusCodes.Status202Accepted); + + case HttpStatusCode.NoContent: + return Results.NoContent(); + + default: + return Results.Json(value, statusCode: (int)response.StatusCode); + } + } + + private static IResult CreateErrorResult<T>(PolicyEngineResponse<T> response) + { + var problem = response.Problem; + if (problem is null) + { + return Results.StatusCode((int)response.StatusCode); + } + + var statusCode = problem.Status ?? (int)response.StatusCode; + return Results.Problem( + title: problem.Title, + detail: problem.Detail, + type: problem.Type, + instance: problem.Instance, + statusCode: statusCode, + extensions: problem.Extensions); + } +} diff --git a/src/StellaOps.Policy.Gateway/Contracts/PolicyPackContracts.cs b/src/Policy/StellaOps.Policy.Gateway/Contracts/PolicyPackContracts.cs similarity index 96% rename from src/StellaOps.Policy.Gateway/Contracts/PolicyPackContracts.cs rename to src/Policy/StellaOps.Policy.Gateway/Contracts/PolicyPackContracts.cs index a650a410..de9fefc4 100644 --- a/src/StellaOps.Policy.Gateway/Contracts/PolicyPackContracts.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Contracts/PolicyPackContracts.cs @@ -1,45 +1,45 @@ -using System; -using System.Collections.Generic; -using System.ComponentModel.DataAnnotations; - -namespace StellaOps.Policy.Gateway.Contracts; - -public sealed record PolicyPackSummaryDto( - string PackId, - string? DisplayName, - DateTimeOffset CreatedAt, - IReadOnlyList<int> Versions); - -public sealed record PolicyPackDto( - string PackId, - string? DisplayName, - DateTimeOffset CreatedAt, - IReadOnlyList<PolicyRevisionDto> Revisions); - -public sealed record PolicyRevisionDto( - int Version, - string Status, - bool RequiresTwoPersonApproval, - DateTimeOffset CreatedAt, - DateTimeOffset? ActivatedAt, - IReadOnlyList<PolicyActivationApprovalDto> Approvals); - -public sealed record PolicyActivationApprovalDto( - string ActorId, - DateTimeOffset ApprovedAt, - string? Comment); - -public sealed record PolicyRevisionActivationDto( - string Status, - PolicyRevisionDto Revision); - -public sealed record CreatePolicyPackRequest( - [StringLength(200)] string? PackId, - [StringLength(200)] string? DisplayName); - -public sealed record CreatePolicyRevisionRequest( - int? Version, - bool RequiresTwoPersonApproval, - string InitialStatus = "Approved"); - -public sealed record ActivatePolicyRevisionRequest(string? Comment); +using System; +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; + +namespace StellaOps.Policy.Gateway.Contracts; + +public sealed record PolicyPackSummaryDto( + string PackId, + string? DisplayName, + DateTimeOffset CreatedAt, + IReadOnlyList<int> Versions); + +public sealed record PolicyPackDto( + string PackId, + string? DisplayName, + DateTimeOffset CreatedAt, + IReadOnlyList<PolicyRevisionDto> Revisions); + +public sealed record PolicyRevisionDto( + int Version, + string Status, + bool RequiresTwoPersonApproval, + DateTimeOffset CreatedAt, + DateTimeOffset? ActivatedAt, + IReadOnlyList<PolicyActivationApprovalDto> Approvals); + +public sealed record PolicyActivationApprovalDto( + string ActorId, + DateTimeOffset ApprovedAt, + string? Comment); + +public sealed record PolicyRevisionActivationDto( + string Status, + PolicyRevisionDto Revision); + +public sealed record CreatePolicyPackRequest( + [StringLength(200)] string? PackId, + [StringLength(200)] string? DisplayName); + +public sealed record CreatePolicyRevisionRequest( + int? Version, + bool RequiresTwoPersonApproval, + string InitialStatus = "Approved"); + +public sealed record ActivatePolicyRevisionRequest(string? Comment); diff --git a/src/StellaOps.Policy.Gateway/Infrastructure/GatewayForwardingContext.cs b/src/Policy/StellaOps.Policy.Gateway/Infrastructure/GatewayForwardingContext.cs similarity index 96% rename from src/StellaOps.Policy.Gateway/Infrastructure/GatewayForwardingContext.cs rename to src/Policy/StellaOps.Policy.Gateway/Infrastructure/GatewayForwardingContext.cs index ea0c8e36..7a7786b2 100644 --- a/src/StellaOps.Policy.Gateway/Infrastructure/GatewayForwardingContext.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Infrastructure/GatewayForwardingContext.cs @@ -1,59 +1,59 @@ -using System; -using System.Net.Http; -using Microsoft.AspNetCore.Http; - -namespace StellaOps.Policy.Gateway.Infrastructure; - -internal sealed record GatewayForwardingContext(string Authorization, string? Dpop, string? Tenant) -{ - private static readonly string[] ForwardedHeaders = - { - "Authorization", - "DPoP", - "X-Stella-Tenant" - }; - - public void Apply(HttpRequestMessage request) - { - ArgumentNullException.ThrowIfNull(request); - - request.Headers.TryAddWithoutValidation(ForwardedHeaders[0], Authorization); - - if (!string.IsNullOrWhiteSpace(Dpop)) - { - request.Headers.TryAddWithoutValidation(ForwardedHeaders[1], Dpop); - } - - if (!string.IsNullOrWhiteSpace(Tenant)) - { - request.Headers.TryAddWithoutValidation(ForwardedHeaders[2], Tenant); - } - } - - public static bool TryCreate(HttpContext context, out GatewayForwardingContext forwardingContext) - { - ArgumentNullException.ThrowIfNull(context); - - var authorization = context.Request.Headers.Authorization.ToString(); - if (string.IsNullOrWhiteSpace(authorization)) - { - forwardingContext = null!; - return false; - } - - var dpop = context.Request.Headers["DPoP"].ToString(); - if (string.IsNullOrWhiteSpace(dpop)) - { - dpop = null; - } - - var tenant = context.Request.Headers["X-Stella-Tenant"].ToString(); - if (string.IsNullOrWhiteSpace(tenant)) - { - tenant = null; - } - - forwardingContext = new GatewayForwardingContext(authorization.Trim(), dpop, tenant); - return true; - } -} +using System; +using System.Net.Http; +using Microsoft.AspNetCore.Http; + +namespace StellaOps.Policy.Gateway.Infrastructure; + +internal sealed record GatewayForwardingContext(string Authorization, string? Dpop, string? Tenant) +{ + private static readonly string[] ForwardedHeaders = + { + "Authorization", + "DPoP", + "X-Stella-Tenant" + }; + + public void Apply(HttpRequestMessage request) + { + ArgumentNullException.ThrowIfNull(request); + + request.Headers.TryAddWithoutValidation(ForwardedHeaders[0], Authorization); + + if (!string.IsNullOrWhiteSpace(Dpop)) + { + request.Headers.TryAddWithoutValidation(ForwardedHeaders[1], Dpop); + } + + if (!string.IsNullOrWhiteSpace(Tenant)) + { + request.Headers.TryAddWithoutValidation(ForwardedHeaders[2], Tenant); + } + } + + public static bool TryCreate(HttpContext context, out GatewayForwardingContext forwardingContext) + { + ArgumentNullException.ThrowIfNull(context); + + var authorization = context.Request.Headers.Authorization.ToString(); + if (string.IsNullOrWhiteSpace(authorization)) + { + forwardingContext = null!; + return false; + } + + var dpop = context.Request.Headers["DPoP"].ToString(); + if (string.IsNullOrWhiteSpace(dpop)) + { + dpop = null; + } + + var tenant = context.Request.Headers["X-Stella-Tenant"].ToString(); + if (string.IsNullOrWhiteSpace(tenant)) + { + tenant = null; + } + + forwardingContext = new GatewayForwardingContext(authorization.Trim(), dpop, tenant); + return true; + } +} diff --git a/src/StellaOps.Policy.Gateway/Options/PolicyGatewayOptions.cs b/src/Policy/StellaOps.Policy.Gateway/Options/PolicyGatewayOptions.cs similarity index 97% rename from src/StellaOps.Policy.Gateway/Options/PolicyGatewayOptions.cs rename to src/Policy/StellaOps.Policy.Gateway/Options/PolicyGatewayOptions.cs index 6b0b3528..05410124 100644 --- a/src/StellaOps.Policy.Gateway/Options/PolicyGatewayOptions.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Options/PolicyGatewayOptions.cs @@ -1,323 +1,323 @@ -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using Microsoft.Extensions.Logging; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Policy.Gateway.Options; - -/// <summary> -/// Root configuration for the Policy Gateway host. -/// </summary> -public sealed class PolicyGatewayOptions -{ - public const string SectionName = "PolicyGateway"; - - public PolicyGatewayTelemetryOptions Telemetry { get; } = new(); - - public PolicyGatewayResourceServerOptions ResourceServer { get; } = new(); - - public PolicyGatewayPolicyEngineOptions PolicyEngine { get; } = new(); - - public void Validate() - { - Telemetry.Validate(); - ResourceServer.Validate(); - PolicyEngine.Validate(); - } -} - -/// <summary> -/// Logging and telemetry configuration for the gateway. -/// </summary> -public sealed class PolicyGatewayTelemetryOptions -{ - public LogLevel MinimumLogLevel { get; set; } = LogLevel.Information; - - public void Validate() - { - if (!Enum.IsDefined(typeof(LogLevel), MinimumLogLevel)) - { - throw new InvalidOperationException("Unsupported log level configured for Policy Gateway telemetry."); - } - } -} - -/// <summary> -/// JWT resource server configuration for incoming requests handled by the gateway. -/// </summary> -public sealed class PolicyGatewayResourceServerOptions -{ - public string Authority { get; set; } = "https://authority.stella-ops.local"; - - public string? MetadataAddress { get; set; } - = "https://authority.stella-ops.local/.well-known/openid-configuration"; - - public IList<string> Audiences { get; } = new List<string> { "api://policy-gateway" }; - - public IList<string> RequiredScopes { get; } = new List<string> - { - StellaOpsScopes.PolicyRead, - StellaOpsScopes.PolicyAuthor, - StellaOpsScopes.PolicyReview, - StellaOpsScopes.PolicyApprove, - StellaOpsScopes.PolicyOperate, - StellaOpsScopes.PolicySimulate, - StellaOpsScopes.PolicyRun, - StellaOpsScopes.PolicyActivate - }; - - public IList<string> RequiredTenants { get; } = new List<string>(); - - public IList<string> BypassNetworks { get; } = new List<string> { "127.0.0.1/32", "::1/128" }; - - public bool RequireHttpsMetadata { get; set; } = true; - - public int BackchannelTimeoutSeconds { get; set; } = 30; - - public int TokenClockSkewSeconds { get; set; } = 60; - - public void Validate() - { - if (string.IsNullOrWhiteSpace(Authority)) - { - throw new InvalidOperationException("Policy Gateway resource server configuration requires an Authority URL."); - } - - if (!Uri.TryCreate(Authority.Trim(), UriKind.Absolute, out var authorityUri)) - { - throw new InvalidOperationException("Policy Gateway resource server Authority URL must be absolute."); - } - - if (RequireHttpsMetadata && - !authorityUri.IsLoopback && - !string.Equals(authorityUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Policy Gateway resource server Authority URL must use HTTPS when metadata requires HTTPS."); - } - - if (BackchannelTimeoutSeconds <= 0) - { - throw new InvalidOperationException("Policy Gateway resource server back-channel timeout must be greater than zero seconds."); - } - - if (TokenClockSkewSeconds < 0 || TokenClockSkewSeconds > 300) - { - throw new InvalidOperationException("Policy Gateway resource server token clock skew must be between 0 and 300 seconds."); - } - - NormalizeList(Audiences, toLower: false); - NormalizeList(RequiredScopes, toLower: true); - NormalizeList(RequiredTenants, toLower: true); - NormalizeList(BypassNetworks, toLower: false); - } - - private static void NormalizeList(IList<string> values, bool toLower) - { - if (values.Count == 0) - { - return; - } - - var unique = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - for (var index = values.Count - 1; index >= 0; index--) - { - var value = values[index]; - if (string.IsNullOrWhiteSpace(value)) - { - values.RemoveAt(index); - continue; - } - - var normalized = value.Trim(); - if (toLower) - { - normalized = normalized.ToLowerInvariant(); - } - - if (!unique.Add(normalized)) - { - values.RemoveAt(index); - continue; - } - - values[index] = normalized; - } - } -} - -/// <summary> -/// Outbound Policy Engine configuration used by the gateway to forward requests. -/// </summary> -public sealed class PolicyGatewayPolicyEngineOptions -{ - public string BaseAddress { get; set; } = "https://policy-engine.stella-ops.local"; - - public string Audience { get; set; } = "api://policy-engine"; - - public PolicyGatewayClientCredentialsOptions ClientCredentials { get; } = new(); - - public PolicyGatewayDpopOptions Dpop { get; } = new(); - - public void Validate() - { - if (string.IsNullOrWhiteSpace(BaseAddress)) - { - throw new InvalidOperationException("Policy Gateway requires a Policy Engine base address."); - } - - if (!Uri.TryCreate(BaseAddress.Trim(), UriKind.Absolute, out var baseUri)) - { - throw new InvalidOperationException("Policy Gateway Policy Engine base address must be an absolute URI."); - } - - if (!string.Equals(baseUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase) && !baseUri.IsLoopback) - { - throw new InvalidOperationException("Policy Gateway Policy Engine base address must use HTTPS unless targeting loopback."); - } - - if (string.IsNullOrWhiteSpace(Audience)) - { - throw new InvalidOperationException("Policy Gateway requires a Policy Engine audience value for client credential flows."); - } - - ClientCredentials.Validate(); - Dpop.Validate(); - } - - public Uri BaseUri => new(BaseAddress, UriKind.Absolute); -} - -/// <summary> -/// Client credential configuration for the gateway when calling the Policy Engine. -/// </summary> -public sealed class PolicyGatewayClientCredentialsOptions -{ - public bool Enabled { get; set; } = true; - - public string ClientId { get; set; } = "policy-gateway"; - - public string? ClientSecret { get; set; } - = "change-me"; - - public IList<string> Scopes { get; } = new List<string> - { - StellaOpsScopes.PolicyRead, - StellaOpsScopes.PolicyAuthor, - StellaOpsScopes.PolicyReview, - StellaOpsScopes.PolicyApprove, - StellaOpsScopes.PolicyOperate, - StellaOpsScopes.PolicySimulate, - StellaOpsScopes.PolicyRun, - StellaOpsScopes.PolicyActivate - }; - - public int BackchannelTimeoutSeconds { get; set; } = 30; - - public void Validate() - { - if (!Enabled) - { - return; - } - - if (string.IsNullOrWhiteSpace(ClientId)) - { - throw new InvalidOperationException("Policy Gateway client credential configuration requires a client identifier when enabled."); - } - - if (Scopes.Count == 0) - { - throw new InvalidOperationException("Policy Gateway client credential configuration requires at least one scope when enabled."); - } - - var normalized = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - for (var index = Scopes.Count - 1; index >= 0; index--) - { - var scope = Scopes[index]; - if (string.IsNullOrWhiteSpace(scope)) - { - Scopes.RemoveAt(index); - continue; - } - - var trimmed = scope.Trim().ToLowerInvariant(); - if (!normalized.Add(trimmed)) - { - Scopes.RemoveAt(index); - continue; - } - - Scopes[index] = trimmed; - } - - if (Scopes.Count == 0) - { - throw new InvalidOperationException("Policy Gateway client credential configuration requires at least one non-empty scope when enabled."); - } - - if (BackchannelTimeoutSeconds <= 0) - { - throw new InvalidOperationException("Policy Gateway client credential back-channel timeout must be greater than zero seconds."); - } - } - - public IReadOnlyList<string> NormalizedScopes => new ReadOnlyCollection<string>(Scopes); - - public TimeSpan BackchannelTimeout => TimeSpan.FromSeconds(BackchannelTimeoutSeconds); -} - -/// <summary> -/// DPoP sender-constrained credential configuration for outbound Policy Engine calls. -/// </summary> -public sealed class PolicyGatewayDpopOptions -{ - public bool Enabled { get; set; } = false; - - public string KeyPath { get; set; } = string.Empty; - - public string? KeyPassphrase { get; set; } - = null; - - public string Algorithm { get; set; } = "ES256"; - - public TimeSpan ProofLifetime { get; set; } = TimeSpan.FromMinutes(2); - - public TimeSpan ClockSkew { get; set; } = TimeSpan.FromSeconds(30); - - public void Validate() - { - if (!Enabled) - { - return; - } - - if (string.IsNullOrWhiteSpace(KeyPath)) - { - throw new InvalidOperationException("Policy Gateway DPoP configuration requires a key path when enabled."); - } - - if (string.IsNullOrWhiteSpace(Algorithm)) - { - throw new InvalidOperationException("Policy Gateway DPoP configuration requires an algorithm when enabled."); - } - - var normalizedAlgorithm = Algorithm.Trim().ToUpperInvariant(); - if (normalizedAlgorithm is not ("ES256" or "ES384")) - { - throw new InvalidOperationException("Policy Gateway DPoP configuration supports only ES256 or ES384 algorithms."); - } - - if (ProofLifetime <= TimeSpan.Zero) - { - throw new InvalidOperationException("Policy Gateway DPoP proof lifetime must be greater than zero."); - } - - if (ClockSkew < TimeSpan.Zero || ClockSkew > TimeSpan.FromMinutes(5)) - { - throw new InvalidOperationException("Policy Gateway DPoP clock skew must be between 0 seconds and 5 minutes."); - } - - Algorithm = normalizedAlgorithm; - } -} +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using Microsoft.Extensions.Logging; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Policy.Gateway.Options; + +/// <summary> +/// Root configuration for the Policy Gateway host. +/// </summary> +public sealed class PolicyGatewayOptions +{ + public const string SectionName = "PolicyGateway"; + + public PolicyGatewayTelemetryOptions Telemetry { get; } = new(); + + public PolicyGatewayResourceServerOptions ResourceServer { get; } = new(); + + public PolicyGatewayPolicyEngineOptions PolicyEngine { get; } = new(); + + public void Validate() + { + Telemetry.Validate(); + ResourceServer.Validate(); + PolicyEngine.Validate(); + } +} + +/// <summary> +/// Logging and telemetry configuration for the gateway. +/// </summary> +public sealed class PolicyGatewayTelemetryOptions +{ + public LogLevel MinimumLogLevel { get; set; } = LogLevel.Information; + + public void Validate() + { + if (!Enum.IsDefined(typeof(LogLevel), MinimumLogLevel)) + { + throw new InvalidOperationException("Unsupported log level configured for Policy Gateway telemetry."); + } + } +} + +/// <summary> +/// JWT resource server configuration for incoming requests handled by the gateway. +/// </summary> +public sealed class PolicyGatewayResourceServerOptions +{ + public string Authority { get; set; } = "https://authority.stella-ops.local"; + + public string? MetadataAddress { get; set; } + = "https://authority.stella-ops.local/.well-known/openid-configuration"; + + public IList<string> Audiences { get; } = new List<string> { "api://policy-gateway" }; + + public IList<string> RequiredScopes { get; } = new List<string> + { + StellaOpsScopes.PolicyRead, + StellaOpsScopes.PolicyAuthor, + StellaOpsScopes.PolicyReview, + StellaOpsScopes.PolicyApprove, + StellaOpsScopes.PolicyOperate, + StellaOpsScopes.PolicySimulate, + StellaOpsScopes.PolicyRun, + StellaOpsScopes.PolicyActivate + }; + + public IList<string> RequiredTenants { get; } = new List<string>(); + + public IList<string> BypassNetworks { get; } = new List<string> { "127.0.0.1/32", "::1/128" }; + + public bool RequireHttpsMetadata { get; set; } = true; + + public int BackchannelTimeoutSeconds { get; set; } = 30; + + public int TokenClockSkewSeconds { get; set; } = 60; + + public void Validate() + { + if (string.IsNullOrWhiteSpace(Authority)) + { + throw new InvalidOperationException("Policy Gateway resource server configuration requires an Authority URL."); + } + + if (!Uri.TryCreate(Authority.Trim(), UriKind.Absolute, out var authorityUri)) + { + throw new InvalidOperationException("Policy Gateway resource server Authority URL must be absolute."); + } + + if (RequireHttpsMetadata && + !authorityUri.IsLoopback && + !string.Equals(authorityUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Policy Gateway resource server Authority URL must use HTTPS when metadata requires HTTPS."); + } + + if (BackchannelTimeoutSeconds <= 0) + { + throw new InvalidOperationException("Policy Gateway resource server back-channel timeout must be greater than zero seconds."); + } + + if (TokenClockSkewSeconds < 0 || TokenClockSkewSeconds > 300) + { + throw new InvalidOperationException("Policy Gateway resource server token clock skew must be between 0 and 300 seconds."); + } + + NormalizeList(Audiences, toLower: false); + NormalizeList(RequiredScopes, toLower: true); + NormalizeList(RequiredTenants, toLower: true); + NormalizeList(BypassNetworks, toLower: false); + } + + private static void NormalizeList(IList<string> values, bool toLower) + { + if (values.Count == 0) + { + return; + } + + var unique = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + for (var index = values.Count - 1; index >= 0; index--) + { + var value = values[index]; + if (string.IsNullOrWhiteSpace(value)) + { + values.RemoveAt(index); + continue; + } + + var normalized = value.Trim(); + if (toLower) + { + normalized = normalized.ToLowerInvariant(); + } + + if (!unique.Add(normalized)) + { + values.RemoveAt(index); + continue; + } + + values[index] = normalized; + } + } +} + +/// <summary> +/// Outbound Policy Engine configuration used by the gateway to forward requests. +/// </summary> +public sealed class PolicyGatewayPolicyEngineOptions +{ + public string BaseAddress { get; set; } = "https://policy-engine.stella-ops.local"; + + public string Audience { get; set; } = "api://policy-engine"; + + public PolicyGatewayClientCredentialsOptions ClientCredentials { get; } = new(); + + public PolicyGatewayDpopOptions Dpop { get; } = new(); + + public void Validate() + { + if (string.IsNullOrWhiteSpace(BaseAddress)) + { + throw new InvalidOperationException("Policy Gateway requires a Policy Engine base address."); + } + + if (!Uri.TryCreate(BaseAddress.Trim(), UriKind.Absolute, out var baseUri)) + { + throw new InvalidOperationException("Policy Gateway Policy Engine base address must be an absolute URI."); + } + + if (!string.Equals(baseUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase) && !baseUri.IsLoopback) + { + throw new InvalidOperationException("Policy Gateway Policy Engine base address must use HTTPS unless targeting loopback."); + } + + if (string.IsNullOrWhiteSpace(Audience)) + { + throw new InvalidOperationException("Policy Gateway requires a Policy Engine audience value for client credential flows."); + } + + ClientCredentials.Validate(); + Dpop.Validate(); + } + + public Uri BaseUri => new(BaseAddress, UriKind.Absolute); +} + +/// <summary> +/// Client credential configuration for the gateway when calling the Policy Engine. +/// </summary> +public sealed class PolicyGatewayClientCredentialsOptions +{ + public bool Enabled { get; set; } = true; + + public string ClientId { get; set; } = "policy-gateway"; + + public string? ClientSecret { get; set; } + = "change-me"; + + public IList<string> Scopes { get; } = new List<string> + { + StellaOpsScopes.PolicyRead, + StellaOpsScopes.PolicyAuthor, + StellaOpsScopes.PolicyReview, + StellaOpsScopes.PolicyApprove, + StellaOpsScopes.PolicyOperate, + StellaOpsScopes.PolicySimulate, + StellaOpsScopes.PolicyRun, + StellaOpsScopes.PolicyActivate + }; + + public int BackchannelTimeoutSeconds { get; set; } = 30; + + public void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(ClientId)) + { + throw new InvalidOperationException("Policy Gateway client credential configuration requires a client identifier when enabled."); + } + + if (Scopes.Count == 0) + { + throw new InvalidOperationException("Policy Gateway client credential configuration requires at least one scope when enabled."); + } + + var normalized = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + for (var index = Scopes.Count - 1; index >= 0; index--) + { + var scope = Scopes[index]; + if (string.IsNullOrWhiteSpace(scope)) + { + Scopes.RemoveAt(index); + continue; + } + + var trimmed = scope.Trim().ToLowerInvariant(); + if (!normalized.Add(trimmed)) + { + Scopes.RemoveAt(index); + continue; + } + + Scopes[index] = trimmed; + } + + if (Scopes.Count == 0) + { + throw new InvalidOperationException("Policy Gateway client credential configuration requires at least one non-empty scope when enabled."); + } + + if (BackchannelTimeoutSeconds <= 0) + { + throw new InvalidOperationException("Policy Gateway client credential back-channel timeout must be greater than zero seconds."); + } + } + + public IReadOnlyList<string> NormalizedScopes => new ReadOnlyCollection<string>(Scopes); + + public TimeSpan BackchannelTimeout => TimeSpan.FromSeconds(BackchannelTimeoutSeconds); +} + +/// <summary> +/// DPoP sender-constrained credential configuration for outbound Policy Engine calls. +/// </summary> +public sealed class PolicyGatewayDpopOptions +{ + public bool Enabled { get; set; } = false; + + public string KeyPath { get; set; } = string.Empty; + + public string? KeyPassphrase { get; set; } + = null; + + public string Algorithm { get; set; } = "ES256"; + + public TimeSpan ProofLifetime { get; set; } = TimeSpan.FromMinutes(2); + + public TimeSpan ClockSkew { get; set; } = TimeSpan.FromSeconds(30); + + public void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(KeyPath)) + { + throw new InvalidOperationException("Policy Gateway DPoP configuration requires a key path when enabled."); + } + + if (string.IsNullOrWhiteSpace(Algorithm)) + { + throw new InvalidOperationException("Policy Gateway DPoP configuration requires an algorithm when enabled."); + } + + var normalizedAlgorithm = Algorithm.Trim().ToUpperInvariant(); + if (normalizedAlgorithm is not ("ES256" or "ES384")) + { + throw new InvalidOperationException("Policy Gateway DPoP configuration supports only ES256 or ES384 algorithms."); + } + + if (ProofLifetime <= TimeSpan.Zero) + { + throw new InvalidOperationException("Policy Gateway DPoP proof lifetime must be greater than zero."); + } + + if (ClockSkew < TimeSpan.Zero || ClockSkew > TimeSpan.FromMinutes(5)) + { + throw new InvalidOperationException("Policy Gateway DPoP clock skew must be between 0 seconds and 5 minutes."); + } + + Algorithm = normalizedAlgorithm; + } +} diff --git a/src/StellaOps.Policy.Gateway/Program.cs b/src/Policy/StellaOps.Policy.Gateway/Program.cs similarity index 97% rename from src/StellaOps.Policy.Gateway/Program.cs rename to src/Policy/StellaOps.Policy.Gateway/Program.cs index 1d9337e1..bcb416be 100644 --- a/src/StellaOps.Policy.Gateway/Program.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Program.cs @@ -1,406 +1,406 @@ -using System; -using System.Diagnostics; -using System.IO; -using System.Net.Http; -using System.Net; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using NetEscapades.Configuration.Yaml; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.Client; -using StellaOps.Auth.ServerIntegration; -using StellaOps.Configuration; -using StellaOps.Policy.Gateway.Clients; -using StellaOps.Policy.Gateway.Contracts; -using StellaOps.Policy.Gateway.Infrastructure; -using StellaOps.Policy.Gateway.Options; -using StellaOps.Policy.Gateway.Services; -using Polly; -using Polly.Extensions.Http; - -var builder = WebApplication.CreateBuilder(args); - -builder.Logging.ClearProviders(); -builder.Logging.AddJsonConsole(); - -builder.Configuration.AddStellaOpsDefaults(options => -{ - options.BasePath = builder.Environment.ContentRootPath; - options.EnvironmentPrefix = "STELLAOPS_POLICY_GATEWAY_"; - options.ConfigureBuilder = configurationBuilder => - { - var contentRoot = builder.Environment.ContentRootPath; - foreach (var relative in new[] - { - "../etc/policy-gateway.yaml", - "../etc/policy-gateway.local.yaml", - "policy-gateway.yaml", - "policy-gateway.local.yaml" - }) - { - var path = Path.Combine(contentRoot, relative); - configurationBuilder.AddYamlFile(path, optional: true); - } - }; -}); - -var bootstrap = StellaOpsConfigurationBootstrapper.Build<PolicyGatewayOptions>(options => -{ - options.BasePath = builder.Environment.ContentRootPath; - options.EnvironmentPrefix = "STELLAOPS_POLICY_GATEWAY_"; - options.BindingSection = PolicyGatewayOptions.SectionName; - options.ConfigureBuilder = configurationBuilder => - { - foreach (var relative in new[] - { - "../etc/policy-gateway.yaml", - "../etc/policy-gateway.local.yaml", - "policy-gateway.yaml", - "policy-gateway.local.yaml" - }) - { - var path = Path.Combine(builder.Environment.ContentRootPath, relative); - configurationBuilder.AddYamlFile(path, optional: true); - } - }; - options.PostBind = static (value, _) => value.Validate(); -}); - -builder.Configuration.AddConfiguration(bootstrap.Configuration); - -builder.Logging.SetMinimumLevel(bootstrap.Options.Telemetry.MinimumLogLevel); - -builder.Services.AddOptions<PolicyGatewayOptions>() - .Bind(builder.Configuration.GetSection(PolicyGatewayOptions.SectionName)) - .Validate(options => - { - try - { - options.Validate(); - return true; - } - catch (Exception ex) - { - throw new OptionsValidationException( - PolicyGatewayOptions.SectionName, - typeof(PolicyGatewayOptions), - new[] { ex.Message }); - } - }) - .ValidateOnStart(); - -builder.Services.AddSingleton(sp => sp.GetRequiredService<IOptions<PolicyGatewayOptions>>().Value); -builder.Services.AddSingleton(TimeProvider.System); -builder.Services.AddRouting(options => options.LowercaseUrls = true); -builder.Services.AddProblemDetails(); -builder.Services.AddHealthChecks(); -builder.Services.AddAuthentication(); -builder.Services.AddAuthorization(); -builder.Services.AddStellaOpsScopeHandler(); -builder.Services.AddStellaOpsResourceServerAuthentication( - builder.Configuration, - configurationSection: $"{PolicyGatewayOptions.SectionName}:ResourceServer"); -builder.Services.AddSingleton<PolicyGatewayMetrics>(); -builder.Services.AddSingleton<PolicyGatewayDpopProofGenerator>(); -builder.Services.AddSingleton<PolicyEngineTokenProvider>(); -builder.Services.AddTransient<PolicyGatewayDpopHandler>(); - -if (bootstrap.Options.PolicyEngine.ClientCredentials.Enabled) -{ - builder.Services.AddOptions<StellaOpsAuthClientOptions>() - .Configure(options => - { - options.Authority = bootstrap.Options.ResourceServer.Authority; - options.ClientId = bootstrap.Options.PolicyEngine.ClientCredentials.ClientId; - options.ClientSecret = bootstrap.Options.PolicyEngine.ClientCredentials.ClientSecret; - options.HttpTimeout = TimeSpan.FromSeconds(bootstrap.Options.PolicyEngine.ClientCredentials.BackchannelTimeoutSeconds); - foreach (var scope in bootstrap.Options.PolicyEngine.ClientCredentials.Scopes) - { - options.DefaultScopes.Add(scope); - } - }) - .PostConfigure(static opt => opt.Validate()); - - builder.Services.TryAddSingleton<IStellaOpsTokenCache, InMemoryTokenCache>(); - - builder.Services.AddHttpClient<StellaOpsDiscoveryCache>((provider, client) => - { - var authOptions = provider.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>().CurrentValue; - client.Timeout = authOptions.HttpTimeout; - }).AddPolicyHandler(static (provider, _) => CreateAuthorityRetryPolicy(provider)); - - builder.Services.AddHttpClient<StellaOpsJwksCache>((provider, client) => - { - var authOptions = provider.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>().CurrentValue; - client.Timeout = authOptions.HttpTimeout; - }).AddPolicyHandler(static (provider, _) => CreateAuthorityRetryPolicy(provider)); - - builder.Services.AddHttpClient<IStellaOpsTokenClient, StellaOpsTokenClient>((provider, client) => - { - var authOptions = provider.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>().CurrentValue; - client.Timeout = authOptions.HttpTimeout; - }) - .AddPolicyHandler(static (provider, _) => CreateAuthorityRetryPolicy(provider)) - .AddHttpMessageHandler<PolicyGatewayDpopHandler>(); -} - -builder.Services.AddHttpClient<IPolicyEngineClient, PolicyEngineClient>((serviceProvider, client) => -{ - var gatewayOptions = serviceProvider.GetRequiredService<IOptions<PolicyGatewayOptions>>().Value; - client.BaseAddress = gatewayOptions.PolicyEngine.BaseUri; - client.Timeout = TimeSpan.FromSeconds(gatewayOptions.PolicyEngine.ClientCredentials.BackchannelTimeoutSeconds); -}) -.AddPolicyHandler(static (provider, _) => CreatePolicyEngineRetryPolicy(provider)); - -var app = builder.Build(); - -app.UseExceptionHandler(static appBuilder => appBuilder.Run(async context => -{ - context.Response.StatusCode = StatusCodes.Status500InternalServerError; - await context.Response.WriteAsJsonAsync(new { error = "Unexpected gateway error." }); -})); - -app.UseStatusCodePages(); - -app.UseAuthentication(); -app.UseAuthorization(); - -app.MapHealthChecks("/healthz"); - -app.MapGet("/readyz", () => Results.Ok(new { status = "ready" })) - .WithName("Readiness"); - -app.MapGet("/", () => Results.Redirect("/healthz")); - -var policyPacks = app.MapGroup("/api/policy/packs") - .WithTags("Policy Packs"); - -policyPacks.MapGet(string.Empty, async Task<IResult> ( - HttpContext context, - IPolicyEngineClient client, - PolicyEngineTokenProvider tokenProvider, - CancellationToken cancellationToken) => - { - GatewayForwardingContext? forwardingContext = null; - if (GatewayForwardingContext.TryCreate(context, out var callerContext)) - { - forwardingContext = callerContext; - } - else if (!tokenProvider.IsEnabled) - { - return Results.Unauthorized(); - } - - var response = await client.ListPolicyPacksAsync(forwardingContext, cancellationToken).ConfigureAwait(false); - return response.ToMinimalResult(); - }) - .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.PolicyRead)); - -policyPacks.MapPost(string.Empty, async Task<IResult> ( - HttpContext context, - CreatePolicyPackRequest request, - IPolicyEngineClient client, - PolicyEngineTokenProvider tokenProvider, - CancellationToken cancellationToken) => - { - if (request is null) - { - return Results.BadRequest(new ProblemDetails - { - Title = "Request body required.", - Status = StatusCodes.Status400BadRequest - }); - } - - GatewayForwardingContext? forwardingContext = null; - if (GatewayForwardingContext.TryCreate(context, out var callerContext)) - { - forwardingContext = callerContext; - } - else if (!tokenProvider.IsEnabled) - { - return Results.Unauthorized(); - } - - var response = await client.CreatePolicyPackAsync(forwardingContext, request, cancellationToken).ConfigureAwait(false); - return response.ToMinimalResult(); - }) - .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.PolicyAuthor)); - -policyPacks.MapPost("/{packId}/revisions", async Task<IResult> ( - HttpContext context, - string packId, - CreatePolicyRevisionRequest request, - IPolicyEngineClient client, - PolicyEngineTokenProvider tokenProvider, - CancellationToken cancellationToken) => - { - if (string.IsNullOrWhiteSpace(packId)) - { - return Results.BadRequest(new ProblemDetails - { - Title = "packId is required.", - Status = StatusCodes.Status400BadRequest - }); - } - - if (request is null) - { - return Results.BadRequest(new ProblemDetails - { - Title = "Request body required.", - Status = StatusCodes.Status400BadRequest - }); - } - - GatewayForwardingContext? forwardingContext = null; - if (GatewayForwardingContext.TryCreate(context, out var callerContext)) - { - forwardingContext = callerContext; - } - else if (!tokenProvider.IsEnabled) - { - return Results.Unauthorized(); - } - - var response = await client.CreatePolicyRevisionAsync(forwardingContext, packId, request, cancellationToken).ConfigureAwait(false); - return response.ToMinimalResult(); - }) - .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.PolicyAuthor)); - -policyPacks.MapPost("/{packId}/revisions/{version:int}:activate", async Task<IResult> ( - HttpContext context, - string packId, - int version, - ActivatePolicyRevisionRequest request, - IPolicyEngineClient client, - PolicyEngineTokenProvider tokenProvider, - PolicyGatewayMetrics metrics, - ILoggerFactory loggerFactory, - CancellationToken cancellationToken) => - { - if (string.IsNullOrWhiteSpace(packId)) - { - return Results.BadRequest(new ProblemDetails - { - Title = "packId is required.", - Status = StatusCodes.Status400BadRequest - }); - } - - if (request is null) - { - return Results.BadRequest(new ProblemDetails - { - Title = "Request body required.", - Status = StatusCodes.Status400BadRequest - }); - } - - GatewayForwardingContext? forwardingContext = null; - var source = "service"; - if (GatewayForwardingContext.TryCreate(context, out var callerContext)) - { - forwardingContext = callerContext; - source = "caller"; - } - else if (!tokenProvider.IsEnabled) - { - return Results.Unauthorized(); - } - - var stopwatch = System.Diagnostics.Stopwatch.StartNew(); - var response = await client.ActivatePolicyRevisionAsync(forwardingContext, packId, version, request, cancellationToken).ConfigureAwait(false); - stopwatch.Stop(); - - var outcome = DetermineActivationOutcome(response); - metrics.RecordActivation(outcome, source, stopwatch.Elapsed.TotalMilliseconds); - - var logger = loggerFactory.CreateLogger("StellaOps.Policy.Gateway.Activation"); - LogActivation(logger, packId, version, outcome, source, response.StatusCode); - - return response.ToMinimalResult(); - }) - .RequireAuthorization(policy => policy.RequireStellaOpsScopes( - StellaOpsScopes.PolicyOperate, - StellaOpsScopes.PolicyActivate)); - -app.Run(); - -static IAsyncPolicy<HttpResponseMessage> CreateAuthorityRetryPolicy(IServiceProvider provider) -{ - var authOptions = provider.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>().CurrentValue; - var delays = authOptions.NormalizedRetryDelays; - if (delays.Count == 0) - { - return Policy.NoOpAsync<HttpResponseMessage>(); - } - - var loggerFactory = provider.GetService<ILoggerFactory>(); - var logger = loggerFactory?.CreateLogger("PolicyGateway.AuthorityHttp"); - - return HttpPolicyExtensions - .HandleTransientHttpError() - .OrResult(static message => message.StatusCode == HttpStatusCode.TooManyRequests) - .WaitAndRetryAsync( - delays.Count, - attempt => delays[attempt - 1], - (outcome, delay, attempt, _) => - { - logger?.LogWarning( - outcome.Exception, - "Retrying Authority HTTP call ({Attempt}/{Total}) after {Reason}; waiting {Delay}.", - attempt, - delays.Count, - outcome.Exception?.Message ?? outcome.Result?.StatusCode.ToString(), - delay); - }); -} - -static IAsyncPolicy<HttpResponseMessage> CreatePolicyEngineRetryPolicy(IServiceProvider provider) - => HttpPolicyExtensions - .HandleTransientHttpError() - .OrResult(static response => response.StatusCode is HttpStatusCode.TooManyRequests or HttpStatusCode.BadGateway or HttpStatusCode.ServiceUnavailable or HttpStatusCode.GatewayTimeout) - .WaitAndRetryAsync(3, attempt => TimeSpan.FromSeconds(Math.Pow(2, attempt))); - -static string DetermineActivationOutcome(PolicyEngineResponse<PolicyRevisionActivationDto> response) -{ - if (response.IsSuccess) - { - return response.Value?.Status switch - { - "activated" => "activated", - "already_active" => "already_active", - "pending_second_approval" => "pending_second_approval", - _ => "success" - }; - } - - return response.StatusCode switch - { - HttpStatusCode.BadRequest => "bad_request", - HttpStatusCode.NotFound => "not_found", - HttpStatusCode.Unauthorized => "unauthorized", - HttpStatusCode.Forbidden => "forbidden", - _ => "error" - }; -} - -static void LogActivation(ILogger logger, string packId, int version, string outcome, string source, HttpStatusCode statusCode) -{ - if (logger is null) - { - return; - } - - var message = "Policy activation forwarded."; - var logLevel = outcome is "activated" or "already_active" or "pending_second_approval" ? LogLevel.Information : LogLevel.Warning; - logger.Log(logLevel, message + " Outcome={Outcome}; Source={Source}; PackId={PackId}; Version={Version}; StatusCode={StatusCode}.", outcome, source, packId, version, (int)statusCode); -} - -public partial class Program -{ -} +using System; +using System.Diagnostics; +using System.IO; +using System.Net.Http; +using System.Net; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using NetEscapades.Configuration.Yaml; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.Client; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Configuration; +using StellaOps.Policy.Gateway.Clients; +using StellaOps.Policy.Gateway.Contracts; +using StellaOps.Policy.Gateway.Infrastructure; +using StellaOps.Policy.Gateway.Options; +using StellaOps.Policy.Gateway.Services; +using Polly; +using Polly.Extensions.Http; + +var builder = WebApplication.CreateBuilder(args); + +builder.Logging.ClearProviders(); +builder.Logging.AddJsonConsole(); + +builder.Configuration.AddStellaOpsDefaults(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "STELLAOPS_POLICY_GATEWAY_"; + options.ConfigureBuilder = configurationBuilder => + { + var contentRoot = builder.Environment.ContentRootPath; + foreach (var relative in new[] + { + "../etc/policy-gateway.yaml", + "../etc/policy-gateway.local.yaml", + "policy-gateway.yaml", + "policy-gateway.local.yaml" + }) + { + var path = Path.Combine(contentRoot, relative); + configurationBuilder.AddYamlFile(path, optional: true); + } + }; +}); + +var bootstrap = StellaOpsConfigurationBootstrapper.Build<PolicyGatewayOptions>(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "STELLAOPS_POLICY_GATEWAY_"; + options.BindingSection = PolicyGatewayOptions.SectionName; + options.ConfigureBuilder = configurationBuilder => + { + foreach (var relative in new[] + { + "../etc/policy-gateway.yaml", + "../etc/policy-gateway.local.yaml", + "policy-gateway.yaml", + "policy-gateway.local.yaml" + }) + { + var path = Path.Combine(builder.Environment.ContentRootPath, relative); + configurationBuilder.AddYamlFile(path, optional: true); + } + }; + options.PostBind = static (value, _) => value.Validate(); +}); + +builder.Configuration.AddConfiguration(bootstrap.Configuration); + +builder.Logging.SetMinimumLevel(bootstrap.Options.Telemetry.MinimumLogLevel); + +builder.Services.AddOptions<PolicyGatewayOptions>() + .Bind(builder.Configuration.GetSection(PolicyGatewayOptions.SectionName)) + .Validate(options => + { + try + { + options.Validate(); + return true; + } + catch (Exception ex) + { + throw new OptionsValidationException( + PolicyGatewayOptions.SectionName, + typeof(PolicyGatewayOptions), + new[] { ex.Message }); + } + }) + .ValidateOnStart(); + +builder.Services.AddSingleton(sp => sp.GetRequiredService<IOptions<PolicyGatewayOptions>>().Value); +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddRouting(options => options.LowercaseUrls = true); +builder.Services.AddProblemDetails(); +builder.Services.AddHealthChecks(); +builder.Services.AddAuthentication(); +builder.Services.AddAuthorization(); +builder.Services.AddStellaOpsScopeHandler(); +builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: $"{PolicyGatewayOptions.SectionName}:ResourceServer"); +builder.Services.AddSingleton<PolicyGatewayMetrics>(); +builder.Services.AddSingleton<PolicyGatewayDpopProofGenerator>(); +builder.Services.AddSingleton<PolicyEngineTokenProvider>(); +builder.Services.AddTransient<PolicyGatewayDpopHandler>(); + +if (bootstrap.Options.PolicyEngine.ClientCredentials.Enabled) +{ + builder.Services.AddOptions<StellaOpsAuthClientOptions>() + .Configure(options => + { + options.Authority = bootstrap.Options.ResourceServer.Authority; + options.ClientId = bootstrap.Options.PolicyEngine.ClientCredentials.ClientId; + options.ClientSecret = bootstrap.Options.PolicyEngine.ClientCredentials.ClientSecret; + options.HttpTimeout = TimeSpan.FromSeconds(bootstrap.Options.PolicyEngine.ClientCredentials.BackchannelTimeoutSeconds); + foreach (var scope in bootstrap.Options.PolicyEngine.ClientCredentials.Scopes) + { + options.DefaultScopes.Add(scope); + } + }) + .PostConfigure(static opt => opt.Validate()); + + builder.Services.TryAddSingleton<IStellaOpsTokenCache, InMemoryTokenCache>(); + + builder.Services.AddHttpClient<StellaOpsDiscoveryCache>((provider, client) => + { + var authOptions = provider.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>().CurrentValue; + client.Timeout = authOptions.HttpTimeout; + }).AddPolicyHandler(static (provider, _) => CreateAuthorityRetryPolicy(provider)); + + builder.Services.AddHttpClient<StellaOpsJwksCache>((provider, client) => + { + var authOptions = provider.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>().CurrentValue; + client.Timeout = authOptions.HttpTimeout; + }).AddPolicyHandler(static (provider, _) => CreateAuthorityRetryPolicy(provider)); + + builder.Services.AddHttpClient<IStellaOpsTokenClient, StellaOpsTokenClient>((provider, client) => + { + var authOptions = provider.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>().CurrentValue; + client.Timeout = authOptions.HttpTimeout; + }) + .AddPolicyHandler(static (provider, _) => CreateAuthorityRetryPolicy(provider)) + .AddHttpMessageHandler<PolicyGatewayDpopHandler>(); +} + +builder.Services.AddHttpClient<IPolicyEngineClient, PolicyEngineClient>((serviceProvider, client) => +{ + var gatewayOptions = serviceProvider.GetRequiredService<IOptions<PolicyGatewayOptions>>().Value; + client.BaseAddress = gatewayOptions.PolicyEngine.BaseUri; + client.Timeout = TimeSpan.FromSeconds(gatewayOptions.PolicyEngine.ClientCredentials.BackchannelTimeoutSeconds); +}) +.AddPolicyHandler(static (provider, _) => CreatePolicyEngineRetryPolicy(provider)); + +var app = builder.Build(); + +app.UseExceptionHandler(static appBuilder => appBuilder.Run(async context => +{ + context.Response.StatusCode = StatusCodes.Status500InternalServerError; + await context.Response.WriteAsJsonAsync(new { error = "Unexpected gateway error." }); +})); + +app.UseStatusCodePages(); + +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapHealthChecks("/healthz"); + +app.MapGet("/readyz", () => Results.Ok(new { status = "ready" })) + .WithName("Readiness"); + +app.MapGet("/", () => Results.Redirect("/healthz")); + +var policyPacks = app.MapGroup("/api/policy/packs") + .WithTags("Policy Packs"); + +policyPacks.MapGet(string.Empty, async Task<IResult> ( + HttpContext context, + IPolicyEngineClient client, + PolicyEngineTokenProvider tokenProvider, + CancellationToken cancellationToken) => + { + GatewayForwardingContext? forwardingContext = null; + if (GatewayForwardingContext.TryCreate(context, out var callerContext)) + { + forwardingContext = callerContext; + } + else if (!tokenProvider.IsEnabled) + { + return Results.Unauthorized(); + } + + var response = await client.ListPolicyPacksAsync(forwardingContext, cancellationToken).ConfigureAwait(false); + return response.ToMinimalResult(); + }) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.PolicyRead)); + +policyPacks.MapPost(string.Empty, async Task<IResult> ( + HttpContext context, + CreatePolicyPackRequest request, + IPolicyEngineClient client, + PolicyEngineTokenProvider tokenProvider, + CancellationToken cancellationToken) => + { + if (request is null) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Request body required.", + Status = StatusCodes.Status400BadRequest + }); + } + + GatewayForwardingContext? forwardingContext = null; + if (GatewayForwardingContext.TryCreate(context, out var callerContext)) + { + forwardingContext = callerContext; + } + else if (!tokenProvider.IsEnabled) + { + return Results.Unauthorized(); + } + + var response = await client.CreatePolicyPackAsync(forwardingContext, request, cancellationToken).ConfigureAwait(false); + return response.ToMinimalResult(); + }) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.PolicyAuthor)); + +policyPacks.MapPost("/{packId}/revisions", async Task<IResult> ( + HttpContext context, + string packId, + CreatePolicyRevisionRequest request, + IPolicyEngineClient client, + PolicyEngineTokenProvider tokenProvider, + CancellationToken cancellationToken) => + { + if (string.IsNullOrWhiteSpace(packId)) + { + return Results.BadRequest(new ProblemDetails + { + Title = "packId is required.", + Status = StatusCodes.Status400BadRequest + }); + } + + if (request is null) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Request body required.", + Status = StatusCodes.Status400BadRequest + }); + } + + GatewayForwardingContext? forwardingContext = null; + if (GatewayForwardingContext.TryCreate(context, out var callerContext)) + { + forwardingContext = callerContext; + } + else if (!tokenProvider.IsEnabled) + { + return Results.Unauthorized(); + } + + var response = await client.CreatePolicyRevisionAsync(forwardingContext, packId, request, cancellationToken).ConfigureAwait(false); + return response.ToMinimalResult(); + }) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes(StellaOpsScopes.PolicyAuthor)); + +policyPacks.MapPost("/{packId}/revisions/{version:int}:activate", async Task<IResult> ( + HttpContext context, + string packId, + int version, + ActivatePolicyRevisionRequest request, + IPolicyEngineClient client, + PolicyEngineTokenProvider tokenProvider, + PolicyGatewayMetrics metrics, + ILoggerFactory loggerFactory, + CancellationToken cancellationToken) => + { + if (string.IsNullOrWhiteSpace(packId)) + { + return Results.BadRequest(new ProblemDetails + { + Title = "packId is required.", + Status = StatusCodes.Status400BadRequest + }); + } + + if (request is null) + { + return Results.BadRequest(new ProblemDetails + { + Title = "Request body required.", + Status = StatusCodes.Status400BadRequest + }); + } + + GatewayForwardingContext? forwardingContext = null; + var source = "service"; + if (GatewayForwardingContext.TryCreate(context, out var callerContext)) + { + forwardingContext = callerContext; + source = "caller"; + } + else if (!tokenProvider.IsEnabled) + { + return Results.Unauthorized(); + } + + var stopwatch = System.Diagnostics.Stopwatch.StartNew(); + var response = await client.ActivatePolicyRevisionAsync(forwardingContext, packId, version, request, cancellationToken).ConfigureAwait(false); + stopwatch.Stop(); + + var outcome = DetermineActivationOutcome(response); + metrics.RecordActivation(outcome, source, stopwatch.Elapsed.TotalMilliseconds); + + var logger = loggerFactory.CreateLogger("StellaOps.Policy.Gateway.Activation"); + LogActivation(logger, packId, version, outcome, source, response.StatusCode); + + return response.ToMinimalResult(); + }) + .RequireAuthorization(policy => policy.RequireStellaOpsScopes( + StellaOpsScopes.PolicyOperate, + StellaOpsScopes.PolicyActivate)); + +app.Run(); + +static IAsyncPolicy<HttpResponseMessage> CreateAuthorityRetryPolicy(IServiceProvider provider) +{ + var authOptions = provider.GetRequiredService<IOptionsMonitor<StellaOpsAuthClientOptions>>().CurrentValue; + var delays = authOptions.NormalizedRetryDelays; + if (delays.Count == 0) + { + return Policy.NoOpAsync<HttpResponseMessage>(); + } + + var loggerFactory = provider.GetService<ILoggerFactory>(); + var logger = loggerFactory?.CreateLogger("PolicyGateway.AuthorityHttp"); + + return HttpPolicyExtensions + .HandleTransientHttpError() + .OrResult(static message => message.StatusCode == HttpStatusCode.TooManyRequests) + .WaitAndRetryAsync( + delays.Count, + attempt => delays[attempt - 1], + (outcome, delay, attempt, _) => + { + logger?.LogWarning( + outcome.Exception, + "Retrying Authority HTTP call ({Attempt}/{Total}) after {Reason}; waiting {Delay}.", + attempt, + delays.Count, + outcome.Exception?.Message ?? outcome.Result?.StatusCode.ToString(), + delay); + }); +} + +static IAsyncPolicy<HttpResponseMessage> CreatePolicyEngineRetryPolicy(IServiceProvider provider) + => HttpPolicyExtensions + .HandleTransientHttpError() + .OrResult(static response => response.StatusCode is HttpStatusCode.TooManyRequests or HttpStatusCode.BadGateway or HttpStatusCode.ServiceUnavailable or HttpStatusCode.GatewayTimeout) + .WaitAndRetryAsync(3, attempt => TimeSpan.FromSeconds(Math.Pow(2, attempt))); + +static string DetermineActivationOutcome(PolicyEngineResponse<PolicyRevisionActivationDto> response) +{ + if (response.IsSuccess) + { + return response.Value?.Status switch + { + "activated" => "activated", + "already_active" => "already_active", + "pending_second_approval" => "pending_second_approval", + _ => "success" + }; + } + + return response.StatusCode switch + { + HttpStatusCode.BadRequest => "bad_request", + HttpStatusCode.NotFound => "not_found", + HttpStatusCode.Unauthorized => "unauthorized", + HttpStatusCode.Forbidden => "forbidden", + _ => "error" + }; +} + +static void LogActivation(ILogger logger, string packId, int version, string outcome, string source, HttpStatusCode statusCode) +{ + if (logger is null) + { + return; + } + + var message = "Policy activation forwarded."; + var logLevel = outcome is "activated" or "already_active" or "pending_second_approval" ? LogLevel.Information : LogLevel.Warning; + logger.Log(logLevel, message + " Outcome={Outcome}; Source={Source}; PackId={PackId}; Version={Version}; StatusCode={StatusCode}.", outcome, source, packId, version, (int)statusCode); +} + +public partial class Program +{ +} diff --git a/src/StellaOps.Policy.Gateway/Properties/AssemblyInfo.cs b/src/Policy/StellaOps.Policy.Gateway/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Policy.Gateway/Properties/AssemblyInfo.cs rename to src/Policy/StellaOps.Policy.Gateway/Properties/AssemblyInfo.cs index 8c3284e8..5baba798 100644 --- a/src/StellaOps.Policy.Gateway/Properties/AssemblyInfo.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Policy.Gateway.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Policy.Gateway.Tests")] diff --git a/src/StellaOps.Policy.Gateway/Services/PolicyEngineTokenProvider.cs b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyEngineTokenProvider.cs similarity index 97% rename from src/StellaOps.Policy.Gateway/Services/PolicyEngineTokenProvider.cs rename to src/Policy/StellaOps.Policy.Gateway/Services/PolicyEngineTokenProvider.cs index cfa3de24..46707c3a 100644 --- a/src/StellaOps.Policy.Gateway/Services/PolicyEngineTokenProvider.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyEngineTokenProvider.cs @@ -1,123 +1,123 @@ -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Client; -using StellaOps.Policy.Gateway.Options; - -namespace StellaOps.Policy.Gateway.Services; - -internal sealed class PolicyEngineTokenProvider -{ - private readonly IStellaOpsTokenClient tokenClient; - private readonly IOptionsMonitor<PolicyGatewayOptions> optionsMonitor; - private readonly PolicyGatewayDpopProofGenerator dpopGenerator; - private readonly TimeProvider timeProvider; - private readonly ILogger<PolicyEngineTokenProvider> logger; - private readonly SemaphoreSlim mutex = new(1, 1); - private CachedToken? cachedToken; - - public PolicyEngineTokenProvider( - IStellaOpsTokenClient tokenClient, - IOptionsMonitor<PolicyGatewayOptions> optionsMonitor, - PolicyGatewayDpopProofGenerator dpopGenerator, - TimeProvider timeProvider, - ILogger<PolicyEngineTokenProvider> logger) - { - this.tokenClient = tokenClient ?? throw new ArgumentNullException(nameof(tokenClient)); - this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - this.dpopGenerator = dpopGenerator ?? throw new ArgumentNullException(nameof(dpopGenerator)); - this.timeProvider = timeProvider ?? TimeProvider.System; - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public bool IsEnabled => optionsMonitor.CurrentValue.PolicyEngine.ClientCredentials.Enabled; - - public async ValueTask<PolicyGatewayAuthorization?> GetAuthorizationAsync(HttpMethod method, Uri targetUri, CancellationToken cancellationToken) - { - if (!IsEnabled) - { - return null; - } - - var tokenResult = await GetTokenAsync(cancellationToken).ConfigureAwait(false); - if (tokenResult is null) - { - return null; - } - - var token = tokenResult.Value; - string? proof = null; - if (dpopGenerator.Enabled) - { - proof = dpopGenerator.CreateProof(method, targetUri, token.AccessToken); - } - - var scheme = string.Equals(token.TokenType, "dpop", StringComparison.OrdinalIgnoreCase) - ? "DPoP" - : token.TokenType; - - var authorization = $"{scheme} {token.AccessToken}"; - return new PolicyGatewayAuthorization(authorization, proof, "service"); - } - - private async ValueTask<CachedToken?> GetTokenAsync(CancellationToken cancellationToken) - { - var options = optionsMonitor.CurrentValue.PolicyEngine; - if (!options.ClientCredentials.Enabled) - { - return null; - } - - var now = timeProvider.GetUtcNow(); - if (cachedToken is { } existing && existing.ExpiresAt > now + TimeSpan.FromSeconds(30)) - { - return existing; - } - - await mutex.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (cachedToken is { } cached && cached.ExpiresAt > now + TimeSpan.FromSeconds(30)) - { - return cached; - } - - var scopeString = BuildScopeClaim(options); - var result = await tokenClient.RequestClientCredentialsTokenAsync(scopeString, null, cancellationToken).ConfigureAwait(false); - var expiresAt = result.ExpiresAtUtc; - cachedToken = new CachedToken(result.AccessToken, string.IsNullOrWhiteSpace(result.TokenType) ? "Bearer" : result.TokenType, expiresAt); - logger.LogInformation("Issued Policy Engine client credentials token; expires at {ExpiresAt:o}.", expiresAt); - return cachedToken; - } - finally - { - mutex.Release(); - } - } - - private string BuildScopeClaim(PolicyGatewayPolicyEngineOptions options) - { - var scopeSet = new SortedSet<string>(StringComparer.Ordinal) - { - $"aud:{options.Audience.Trim().ToLowerInvariant()}" - }; - - foreach (var scope in options.ClientCredentials.Scopes) - { - if (string.IsNullOrWhiteSpace(scope)) - { - continue; - } - - scopeSet.Add(scope.Trim()); - } - - return string.Join(' ', scopeSet); - } - - private readonly record struct CachedToken(string AccessToken, string TokenType, DateTimeOffset ExpiresAt); -} +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Client; +using StellaOps.Policy.Gateway.Options; + +namespace StellaOps.Policy.Gateway.Services; + +internal sealed class PolicyEngineTokenProvider +{ + private readonly IStellaOpsTokenClient tokenClient; + private readonly IOptionsMonitor<PolicyGatewayOptions> optionsMonitor; + private readonly PolicyGatewayDpopProofGenerator dpopGenerator; + private readonly TimeProvider timeProvider; + private readonly ILogger<PolicyEngineTokenProvider> logger; + private readonly SemaphoreSlim mutex = new(1, 1); + private CachedToken? cachedToken; + + public PolicyEngineTokenProvider( + IStellaOpsTokenClient tokenClient, + IOptionsMonitor<PolicyGatewayOptions> optionsMonitor, + PolicyGatewayDpopProofGenerator dpopGenerator, + TimeProvider timeProvider, + ILogger<PolicyEngineTokenProvider> logger) + { + this.tokenClient = tokenClient ?? throw new ArgumentNullException(nameof(tokenClient)); + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + this.dpopGenerator = dpopGenerator ?? throw new ArgumentNullException(nameof(dpopGenerator)); + this.timeProvider = timeProvider ?? TimeProvider.System; + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public bool IsEnabled => optionsMonitor.CurrentValue.PolicyEngine.ClientCredentials.Enabled; + + public async ValueTask<PolicyGatewayAuthorization?> GetAuthorizationAsync(HttpMethod method, Uri targetUri, CancellationToken cancellationToken) + { + if (!IsEnabled) + { + return null; + } + + var tokenResult = await GetTokenAsync(cancellationToken).ConfigureAwait(false); + if (tokenResult is null) + { + return null; + } + + var token = tokenResult.Value; + string? proof = null; + if (dpopGenerator.Enabled) + { + proof = dpopGenerator.CreateProof(method, targetUri, token.AccessToken); + } + + var scheme = string.Equals(token.TokenType, "dpop", StringComparison.OrdinalIgnoreCase) + ? "DPoP" + : token.TokenType; + + var authorization = $"{scheme} {token.AccessToken}"; + return new PolicyGatewayAuthorization(authorization, proof, "service"); + } + + private async ValueTask<CachedToken?> GetTokenAsync(CancellationToken cancellationToken) + { + var options = optionsMonitor.CurrentValue.PolicyEngine; + if (!options.ClientCredentials.Enabled) + { + return null; + } + + var now = timeProvider.GetUtcNow(); + if (cachedToken is { } existing && existing.ExpiresAt > now + TimeSpan.FromSeconds(30)) + { + return existing; + } + + await mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (cachedToken is { } cached && cached.ExpiresAt > now + TimeSpan.FromSeconds(30)) + { + return cached; + } + + var scopeString = BuildScopeClaim(options); + var result = await tokenClient.RequestClientCredentialsTokenAsync(scopeString, null, cancellationToken).ConfigureAwait(false); + var expiresAt = result.ExpiresAtUtc; + cachedToken = new CachedToken(result.AccessToken, string.IsNullOrWhiteSpace(result.TokenType) ? "Bearer" : result.TokenType, expiresAt); + logger.LogInformation("Issued Policy Engine client credentials token; expires at {ExpiresAt:o}.", expiresAt); + return cachedToken; + } + finally + { + mutex.Release(); + } + } + + private string BuildScopeClaim(PolicyGatewayPolicyEngineOptions options) + { + var scopeSet = new SortedSet<string>(StringComparer.Ordinal) + { + $"aud:{options.Audience.Trim().ToLowerInvariant()}" + }; + + foreach (var scope in options.ClientCredentials.Scopes) + { + if (string.IsNullOrWhiteSpace(scope)) + { + continue; + } + + scopeSet.Add(scope.Trim()); + } + + return string.Join(' ', scopeSet); + } + + private readonly record struct CachedToken(string AccessToken, string TokenType, DateTimeOffset ExpiresAt); +} diff --git a/src/StellaOps.Policy.Gateway/Services/PolicyGatewayAuthorization.cs b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayAuthorization.cs similarity index 96% rename from src/StellaOps.Policy.Gateway/Services/PolicyGatewayAuthorization.cs rename to src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayAuthorization.cs index 1c314d58..79967194 100644 --- a/src/StellaOps.Policy.Gateway/Services/PolicyGatewayAuthorization.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayAuthorization.cs @@ -1,24 +1,24 @@ -using System; -using System.Net.Http; - -namespace StellaOps.Policy.Gateway.Services; - -internal readonly record struct PolicyGatewayAuthorization(string AuthorizationHeader, string? DpopProof, string Source) -{ - public void Apply(HttpRequestMessage request) - { - ArgumentNullException.ThrowIfNull(request); - - if (!string.IsNullOrWhiteSpace(AuthorizationHeader)) - { - request.Headers.Remove("Authorization"); - request.Headers.TryAddWithoutValidation("Authorization", AuthorizationHeader); - } - - if (!string.IsNullOrWhiteSpace(DpopProof)) - { - request.Headers.Remove("DPoP"); - request.Headers.TryAddWithoutValidation("DPoP", DpopProof); - } - } -} +using System; +using System.Net.Http; + +namespace StellaOps.Policy.Gateway.Services; + +internal readonly record struct PolicyGatewayAuthorization(string AuthorizationHeader, string? DpopProof, string Source) +{ + public void Apply(HttpRequestMessage request) + { + ArgumentNullException.ThrowIfNull(request); + + if (!string.IsNullOrWhiteSpace(AuthorizationHeader)) + { + request.Headers.Remove("Authorization"); + request.Headers.TryAddWithoutValidation("Authorization", AuthorizationHeader); + } + + if (!string.IsNullOrWhiteSpace(DpopProof)) + { + request.Headers.Remove("DPoP"); + request.Headers.TryAddWithoutValidation("DPoP", DpopProof); + } + } +} diff --git a/src/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopHandler.cs b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopHandler.cs similarity index 97% rename from src/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopHandler.cs rename to src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopHandler.cs index 540ff5bf..07ff025d 100644 --- a/src/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopHandler.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopHandler.cs @@ -1,42 +1,42 @@ -using System; -using System.Net.Http; -using Microsoft.Extensions.Options; -using StellaOps.Policy.Gateway.Options; - -namespace StellaOps.Policy.Gateway.Services; - -internal sealed class PolicyGatewayDpopHandler : DelegatingHandler -{ - private readonly IOptionsMonitor<PolicyGatewayOptions> optionsMonitor; - private readonly PolicyGatewayDpopProofGenerator proofGenerator; - - public PolicyGatewayDpopHandler( - IOptionsMonitor<PolicyGatewayOptions> optionsMonitor, - PolicyGatewayDpopProofGenerator proofGenerator) - { - this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - this.proofGenerator = proofGenerator ?? throw new ArgumentNullException(nameof(proofGenerator)); - } - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - if (request is null) - { - throw new ArgumentNullException(nameof(request)); - } - - var options = optionsMonitor.CurrentValue.PolicyEngine.Dpop; - if (options.Enabled && - proofGenerator.Enabled && - request.Method == HttpMethod.Post && - request.RequestUri is { } uri && - uri.AbsolutePath.Contains("/token", StringComparison.OrdinalIgnoreCase)) - { - var proof = proofGenerator.CreateProof(request.Method, uri, accessToken: null); - request.Headers.Remove("DPoP"); - request.Headers.TryAddWithoutValidation("DPoP", proof); - } - - return base.SendAsync(request, cancellationToken); - } -} +using System; +using System.Net.Http; +using Microsoft.Extensions.Options; +using StellaOps.Policy.Gateway.Options; + +namespace StellaOps.Policy.Gateway.Services; + +internal sealed class PolicyGatewayDpopHandler : DelegatingHandler +{ + private readonly IOptionsMonitor<PolicyGatewayOptions> optionsMonitor; + private readonly PolicyGatewayDpopProofGenerator proofGenerator; + + public PolicyGatewayDpopHandler( + IOptionsMonitor<PolicyGatewayOptions> optionsMonitor, + PolicyGatewayDpopProofGenerator proofGenerator) + { + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + this.proofGenerator = proofGenerator ?? throw new ArgumentNullException(nameof(proofGenerator)); + } + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + if (request is null) + { + throw new ArgumentNullException(nameof(request)); + } + + var options = optionsMonitor.CurrentValue.PolicyEngine.Dpop; + if (options.Enabled && + proofGenerator.Enabled && + request.Method == HttpMethod.Post && + request.RequestUri is { } uri && + uri.AbsolutePath.Contains("/token", StringComparison.OrdinalIgnoreCase)) + { + var proof = proofGenerator.CreateProof(request.Method, uri, accessToken: null); + request.Headers.Remove("DPoP"); + request.Headers.TryAddWithoutValidation("DPoP", proof); + } + + return base.SendAsync(request, cancellationToken); + } +} diff --git a/src/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopProofGenerator.cs b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopProofGenerator.cs similarity index 97% rename from src/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopProofGenerator.cs rename to src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopProofGenerator.cs index 73539159..82104fb2 100644 --- a/src/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopProofGenerator.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayDpopProofGenerator.cs @@ -1,235 +1,235 @@ -using System; -using System.Collections.Generic; -using System.Security.Cryptography; -using System.Text; -using System.IO; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using Microsoft.IdentityModel.Tokens; -using System.IdentityModel.Tokens.Jwt; -using StellaOps.Policy.Gateway.Options; - -namespace StellaOps.Policy.Gateway.Services; - -internal sealed class PolicyGatewayDpopProofGenerator : IDisposable -{ - private readonly IHostEnvironment hostEnvironment; - private readonly IOptionsMonitor<PolicyGatewayOptions> optionsMonitor; - private readonly TimeProvider timeProvider; - private readonly ILogger<PolicyGatewayDpopProofGenerator> logger; - private DpopKeyMaterial? keyMaterial; - private readonly object sync = new(); - - public PolicyGatewayDpopProofGenerator( - IHostEnvironment hostEnvironment, - IOptionsMonitor<PolicyGatewayOptions> optionsMonitor, - TimeProvider timeProvider, - ILogger<PolicyGatewayDpopProofGenerator> logger) - { - this.hostEnvironment = hostEnvironment ?? throw new ArgumentNullException(nameof(hostEnvironment)); - this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - this.timeProvider = timeProvider ?? TimeProvider.System; - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public bool Enabled - { - get - { - var options = optionsMonitor.CurrentValue.PolicyEngine.Dpop; - return options.Enabled; - } - } - - public string CreateProof(HttpMethod method, Uri targetUri, string? accessToken) - { - ArgumentNullException.ThrowIfNull(method); - ArgumentNullException.ThrowIfNull(targetUri); - - if (!Enabled) - { - throw new InvalidOperationException("DPoP proof requested while DPoP is disabled."); - } - - var material = GetOrLoadKeyMaterial(); - var header = CreateHeader(material); - var payload = CreatePayload(method, targetUri, accessToken); - - var jwt = new JwtSecurityToken(header, payload); - var handler = new JwtSecurityTokenHandler(); - return handler.WriteToken(jwt); - } - - private JwtHeader CreateHeader(DpopKeyMaterial material) - { - var header = new JwtHeader(new SigningCredentials(material.SecurityKey, material.SigningAlgorithm)); - header["typ"] = "dpop+jwt"; - header["jwk"] = new Dictionary<string, object> - { - ["kty"] = material.Jwk.Kty, - ["crv"] = material.Jwk.Crv, - ["x"] = material.Jwk.X, - ["y"] = material.Jwk.Y, - ["kid"] = material.Jwk.Kid - }; - return header; - } - - private JwtPayload CreatePayload(HttpMethod method, Uri targetUri, string? accessToken) - { - var now = timeProvider.GetUtcNow(); - var epochSeconds = (long)Math.Floor((now - DateTimeOffset.UnixEpoch).TotalSeconds); - var payload = new JwtPayload - { - ["htm"] = method.Method.ToUpperInvariant(), - ["htu"] = NormalizeTarget(targetUri), - ["iat"] = epochSeconds, - ["jti"] = Guid.NewGuid().ToString("N") - }; - - if (!string.IsNullOrWhiteSpace(accessToken)) - { - var hash = SHA256.HashData(Encoding.UTF8.GetBytes(accessToken)); - payload["ath"] = Base64UrlEncoder.Encode(hash); - } - - return payload; - } - - private static string NormalizeTarget(Uri uri) - { - if (!uri.IsAbsoluteUri) - { - throw new InvalidOperationException("DPoP proofs require absolute target URIs."); - } - - return uri.GetComponents(UriComponents.SchemeAndServer | UriComponents.PathAndQuery, UriFormat.UriEscaped); - } - - private DpopKeyMaterial GetOrLoadKeyMaterial() - { - if (keyMaterial is not null) - { - return keyMaterial; - } - - lock (sync) - { - if (keyMaterial is not null) - { - return keyMaterial; - } - - var options = optionsMonitor.CurrentValue.PolicyEngine.Dpop; - if (!options.Enabled) - { - throw new InvalidOperationException("DPoP is not enabled in the current configuration."); - } - - var resolvedPath = ResolveKeyPath(options.KeyPath); - if (!File.Exists(resolvedPath)) - { - throw new FileNotFoundException($"DPoP key file not found at '{resolvedPath}'.", resolvedPath); - } - - var pem = File.ReadAllText(resolvedPath); - ECDsa ecdsa; - try - { - ecdsa = ECDsa.Create(); - if (!string.IsNullOrWhiteSpace(options.KeyPassphrase)) - { - ecdsa.ImportFromEncryptedPem(pem, options.KeyPassphrase); - } - else - { - ecdsa.ImportFromPem(pem); - } - } - catch (Exception ex) - { - throw new InvalidOperationException("Failed to load DPoP private key.", ex); - } - - var securityKey = new ECDsaSecurityKey(ecdsa) - { - KeyId = ComputeKeyId(ecdsa) - }; - - var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(securityKey); - jwk.Kid ??= securityKey.KeyId; - - keyMaterial = new DpopKeyMaterial(ecdsa, securityKey, jwk, MapAlgorithm(options.Algorithm)); - logger.LogInformation("Loaded DPoP key from {Path} (alg: {Algorithm}).", resolvedPath, options.Algorithm); - return keyMaterial; - } - } - - private string ResolveKeyPath(string path) - { - if (Path.IsPathRooted(path)) - { - return path; - } - - return Path.GetFullPath(Path.Combine(hostEnvironment.ContentRootPath, path)); - } - - private static string ComputeKeyId(ECDsa ecdsa) - { - var parameters = ecdsa.ExportParameters(includePrivateParameters: false); - var buffer = new byte[(parameters.Q.X?.Length ?? 0) + (parameters.Q.Y?.Length ?? 0)]; - var offset = 0; - if (parameters.Q.X is not null) - { - Buffer.BlockCopy(parameters.Q.X, 0, buffer, offset, parameters.Q.X.Length); - offset += parameters.Q.X.Length; - } - - if (parameters.Q.Y is not null) - { - Buffer.BlockCopy(parameters.Q.Y, 0, buffer, offset, parameters.Q.Y.Length); - } - - var hash = SHA256.HashData(buffer); - return Base64UrlEncoder.Encode(hash); - } - - private static string MapAlgorithm(string algorithm) - => algorithm switch - { - "ES256" => SecurityAlgorithms.EcdsaSha256, - "ES384" => SecurityAlgorithms.EcdsaSha384, - _ => throw new InvalidOperationException($"Unsupported DPoP signing algorithm '{algorithm}'.") - }; - - public void Dispose() - { - if (keyMaterial is { } material) - { - material.Dispose(); - } - } - - private sealed class DpopKeyMaterial : IDisposable - { - public DpopKeyMaterial(ECDsa ecdsa, ECDsaSecurityKey securityKey, JsonWebKey jwk, string signingAlgorithm) - { - Ecdsa = ecdsa; - SecurityKey = securityKey; - Jwk = jwk; - SigningAlgorithm = signingAlgorithm; - } - - public ECDsa Ecdsa { get; } - public ECDsaSecurityKey SecurityKey { get; } - public JsonWebKey Jwk { get; } - public string SigningAlgorithm { get; } - - public void Dispose() - { - Ecdsa.Dispose(); - } - } -} +using System; +using System.Collections.Generic; +using System.Security.Cryptography; +using System.Text; +using System.IO; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using System.IdentityModel.Tokens.Jwt; +using StellaOps.Policy.Gateway.Options; + +namespace StellaOps.Policy.Gateway.Services; + +internal sealed class PolicyGatewayDpopProofGenerator : IDisposable +{ + private readonly IHostEnvironment hostEnvironment; + private readonly IOptionsMonitor<PolicyGatewayOptions> optionsMonitor; + private readonly TimeProvider timeProvider; + private readonly ILogger<PolicyGatewayDpopProofGenerator> logger; + private DpopKeyMaterial? keyMaterial; + private readonly object sync = new(); + + public PolicyGatewayDpopProofGenerator( + IHostEnvironment hostEnvironment, + IOptionsMonitor<PolicyGatewayOptions> optionsMonitor, + TimeProvider timeProvider, + ILogger<PolicyGatewayDpopProofGenerator> logger) + { + this.hostEnvironment = hostEnvironment ?? throw new ArgumentNullException(nameof(hostEnvironment)); + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + this.timeProvider = timeProvider ?? TimeProvider.System; + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public bool Enabled + { + get + { + var options = optionsMonitor.CurrentValue.PolicyEngine.Dpop; + return options.Enabled; + } + } + + public string CreateProof(HttpMethod method, Uri targetUri, string? accessToken) + { + ArgumentNullException.ThrowIfNull(method); + ArgumentNullException.ThrowIfNull(targetUri); + + if (!Enabled) + { + throw new InvalidOperationException("DPoP proof requested while DPoP is disabled."); + } + + var material = GetOrLoadKeyMaterial(); + var header = CreateHeader(material); + var payload = CreatePayload(method, targetUri, accessToken); + + var jwt = new JwtSecurityToken(header, payload); + var handler = new JwtSecurityTokenHandler(); + return handler.WriteToken(jwt); + } + + private JwtHeader CreateHeader(DpopKeyMaterial material) + { + var header = new JwtHeader(new SigningCredentials(material.SecurityKey, material.SigningAlgorithm)); + header["typ"] = "dpop+jwt"; + header["jwk"] = new Dictionary<string, object> + { + ["kty"] = material.Jwk.Kty, + ["crv"] = material.Jwk.Crv, + ["x"] = material.Jwk.X, + ["y"] = material.Jwk.Y, + ["kid"] = material.Jwk.Kid + }; + return header; + } + + private JwtPayload CreatePayload(HttpMethod method, Uri targetUri, string? accessToken) + { + var now = timeProvider.GetUtcNow(); + var epochSeconds = (long)Math.Floor((now - DateTimeOffset.UnixEpoch).TotalSeconds); + var payload = new JwtPayload + { + ["htm"] = method.Method.ToUpperInvariant(), + ["htu"] = NormalizeTarget(targetUri), + ["iat"] = epochSeconds, + ["jti"] = Guid.NewGuid().ToString("N") + }; + + if (!string.IsNullOrWhiteSpace(accessToken)) + { + var hash = SHA256.HashData(Encoding.UTF8.GetBytes(accessToken)); + payload["ath"] = Base64UrlEncoder.Encode(hash); + } + + return payload; + } + + private static string NormalizeTarget(Uri uri) + { + if (!uri.IsAbsoluteUri) + { + throw new InvalidOperationException("DPoP proofs require absolute target URIs."); + } + + return uri.GetComponents(UriComponents.SchemeAndServer | UriComponents.PathAndQuery, UriFormat.UriEscaped); + } + + private DpopKeyMaterial GetOrLoadKeyMaterial() + { + if (keyMaterial is not null) + { + return keyMaterial; + } + + lock (sync) + { + if (keyMaterial is not null) + { + return keyMaterial; + } + + var options = optionsMonitor.CurrentValue.PolicyEngine.Dpop; + if (!options.Enabled) + { + throw new InvalidOperationException("DPoP is not enabled in the current configuration."); + } + + var resolvedPath = ResolveKeyPath(options.KeyPath); + if (!File.Exists(resolvedPath)) + { + throw new FileNotFoundException($"DPoP key file not found at '{resolvedPath}'.", resolvedPath); + } + + var pem = File.ReadAllText(resolvedPath); + ECDsa ecdsa; + try + { + ecdsa = ECDsa.Create(); + if (!string.IsNullOrWhiteSpace(options.KeyPassphrase)) + { + ecdsa.ImportFromEncryptedPem(pem, options.KeyPassphrase); + } + else + { + ecdsa.ImportFromPem(pem); + } + } + catch (Exception ex) + { + throw new InvalidOperationException("Failed to load DPoP private key.", ex); + } + + var securityKey = new ECDsaSecurityKey(ecdsa) + { + KeyId = ComputeKeyId(ecdsa) + }; + + var jwk = JsonWebKeyConverter.ConvertFromECDsaSecurityKey(securityKey); + jwk.Kid ??= securityKey.KeyId; + + keyMaterial = new DpopKeyMaterial(ecdsa, securityKey, jwk, MapAlgorithm(options.Algorithm)); + logger.LogInformation("Loaded DPoP key from {Path} (alg: {Algorithm}).", resolvedPath, options.Algorithm); + return keyMaterial; + } + } + + private string ResolveKeyPath(string path) + { + if (Path.IsPathRooted(path)) + { + return path; + } + + return Path.GetFullPath(Path.Combine(hostEnvironment.ContentRootPath, path)); + } + + private static string ComputeKeyId(ECDsa ecdsa) + { + var parameters = ecdsa.ExportParameters(includePrivateParameters: false); + var buffer = new byte[(parameters.Q.X?.Length ?? 0) + (parameters.Q.Y?.Length ?? 0)]; + var offset = 0; + if (parameters.Q.X is not null) + { + Buffer.BlockCopy(parameters.Q.X, 0, buffer, offset, parameters.Q.X.Length); + offset += parameters.Q.X.Length; + } + + if (parameters.Q.Y is not null) + { + Buffer.BlockCopy(parameters.Q.Y, 0, buffer, offset, parameters.Q.Y.Length); + } + + var hash = SHA256.HashData(buffer); + return Base64UrlEncoder.Encode(hash); + } + + private static string MapAlgorithm(string algorithm) + => algorithm switch + { + "ES256" => SecurityAlgorithms.EcdsaSha256, + "ES384" => SecurityAlgorithms.EcdsaSha384, + _ => throw new InvalidOperationException($"Unsupported DPoP signing algorithm '{algorithm}'.") + }; + + public void Dispose() + { + if (keyMaterial is { } material) + { + material.Dispose(); + } + } + + private sealed class DpopKeyMaterial : IDisposable + { + public DpopKeyMaterial(ECDsa ecdsa, ECDsaSecurityKey securityKey, JsonWebKey jwk, string signingAlgorithm) + { + Ecdsa = ecdsa; + SecurityKey = securityKey; + Jwk = jwk; + SigningAlgorithm = signingAlgorithm; + } + + public ECDsa Ecdsa { get; } + public ECDsaSecurityKey SecurityKey { get; } + public JsonWebKey Jwk { get; } + public string SigningAlgorithm { get; } + + public void Dispose() + { + Ecdsa.Dispose(); + } + } +} diff --git a/src/StellaOps.Policy.Gateway/Services/PolicyGatewayMetrics.cs b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayMetrics.cs similarity index 97% rename from src/StellaOps.Policy.Gateway/Services/PolicyGatewayMetrics.cs rename to src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayMetrics.cs index 1db21a90..6dc3cea8 100644 --- a/src/StellaOps.Policy.Gateway/Services/PolicyGatewayMetrics.cs +++ b/src/Policy/StellaOps.Policy.Gateway/Services/PolicyGatewayMetrics.cs @@ -1,51 +1,51 @@ -using System; -using System.Diagnostics.Metrics; - -namespace StellaOps.Policy.Gateway.Services; - -internal sealed class PolicyGatewayMetrics : IDisposable -{ - private static readonly KeyValuePair<string, object?>[] EmptyTags = Array.Empty<KeyValuePair<string, object?>>(); - - private readonly Meter meter; - - public PolicyGatewayMetrics() - { - meter = new Meter("StellaOps.Policy.Gateway", "1.0.0"); - ActivationRequests = meter.CreateCounter<long>( - "policy_gateway_activation_requests_total", - unit: "count", - description: "Total policy activation proxy requests processed by the gateway."); - ActivationLatencyMs = meter.CreateHistogram<double>( - "policy_gateway_activation_latency_ms", - unit: "ms", - description: "Latency distribution for policy activation proxy calls."); - } - - public Counter<long> ActivationRequests { get; } - - public Histogram<double> ActivationLatencyMs { get; } - - public void RecordActivation(string outcome, string source, double elapsedMilliseconds) - { - var tags = BuildTags(outcome, source); - ActivationRequests.Add(1, tags); - ActivationLatencyMs.Record(elapsedMilliseconds, tags); - } - - private static KeyValuePair<string, object?>[] BuildTags(string outcome, string source) - { - outcome = string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome; - source = string.IsNullOrWhiteSpace(source) ? "unspecified" : source; - return new[] - { - new KeyValuePair<string, object?>("outcome", outcome), - new KeyValuePair<string, object?>("source", source) - }; - } - - public void Dispose() - { - meter.Dispose(); - } -} +using System; +using System.Diagnostics.Metrics; + +namespace StellaOps.Policy.Gateway.Services; + +internal sealed class PolicyGatewayMetrics : IDisposable +{ + private static readonly KeyValuePair<string, object?>[] EmptyTags = Array.Empty<KeyValuePair<string, object?>>(); + + private readonly Meter meter; + + public PolicyGatewayMetrics() + { + meter = new Meter("StellaOps.Policy.Gateway", "1.0.0"); + ActivationRequests = meter.CreateCounter<long>( + "policy_gateway_activation_requests_total", + unit: "count", + description: "Total policy activation proxy requests processed by the gateway."); + ActivationLatencyMs = meter.CreateHistogram<double>( + "policy_gateway_activation_latency_ms", + unit: "ms", + description: "Latency distribution for policy activation proxy calls."); + } + + public Counter<long> ActivationRequests { get; } + + public Histogram<double> ActivationLatencyMs { get; } + + public void RecordActivation(string outcome, string source, double elapsedMilliseconds) + { + var tags = BuildTags(outcome, source); + ActivationRequests.Add(1, tags); + ActivationLatencyMs.Record(elapsedMilliseconds, tags); + } + + private static KeyValuePair<string, object?>[] BuildTags(string outcome, string source) + { + outcome = string.IsNullOrWhiteSpace(outcome) ? "unknown" : outcome; + source = string.IsNullOrWhiteSpace(source) ? "unspecified" : source; + return new[] + { + new KeyValuePair<string, object?>("outcome", outcome), + new KeyValuePair<string, object?>("source", source) + }; + } + + public void Dispose() + { + meter.Dispose(); + } +} diff --git a/src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj b/src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj new file mode 100644 index 00000000..1bc80017 --- /dev/null +++ b/src/Policy/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj @@ -0,0 +1,23 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + <AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Http.Polly" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="8.14.0" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Policy.Registry/AGENTS.md b/src/Policy/StellaOps.Policy.Registry/AGENTS.md similarity index 76% rename from src/StellaOps.Policy.Registry/AGENTS.md rename to src/Policy/StellaOps.Policy.Registry/AGENTS.md index 3ba1b282..3c2c6f0b 100644 --- a/src/StellaOps.Policy.Registry/AGENTS.md +++ b/src/Policy/StellaOps.Policy.Registry/AGENTS.md @@ -4,7 +4,7 @@ Stand up and operate the Policy Registry service defined in Epic 4. We own workspace storage, version immutability, simulation orchestration metadata, attestations, and RBAC enforcement for the policy lifecycle. ## Scope -- Service source under `src/StellaOps.Policy.Registry` (REST API, workers, storage schemas). +- Service source under `src/Policy/StellaOps.Policy.Registry` (REST API, workers, storage schemas). - Mongo models, migrations, and object storage bindings for policy workspaces, versions, reviews, promotions, simulations. - Integration with Policy Engine, Scheduler, Authority, Web Gateway, Telemetry. - Attestation signing pipeline, evidence bundle management, and retention policies. @@ -17,9 +17,9 @@ Stand up and operate the Policy Registry service defined in Epic 4. We own works 5. **Auditable** – Every transition emits structured events with actor, scope, digest, attestation IDs. ## Collaboration -- Keep `src/StellaOps.Policy.Registry/TASKS.md`, `SPRINTS.md` synchronized. -- Coordinate API contracts with Policy Engine (`src/StellaOps.Policy.Engine`), Web Gateway (`src/StellaOps.Web`), Console (`/console`), CLI (`src/StellaOps.Cli`), and Docs. -- Publish or update OpenAPI specs under `src/StellaOps.Policy.Registry/openapi/` and hand them to client teams. +- Keep `src/Policy/StellaOps.Policy.Registry/TASKS.md`, `../../docs/implplan/SPRINTS.md` synchronized. +- Coordinate API contracts with Policy Engine (`src/Policy/StellaOps.Policy.Engine`), Web Gateway (`src/Web/StellaOps.Web`), Console (`/console`), CLI (`src/Cli/StellaOps.Cli`), and Docs. +- Publish or update OpenAPI specs under `src/Policy/StellaOps.Policy.Registry/openapi/` and hand them to client teams. ## Tooling - .NET 10 preview (minimal API + background workers). diff --git a/src/StellaOps.Policy.Registry/TASKS.md b/src/Policy/StellaOps.Policy.Registry/TASKS.md similarity index 99% rename from src/StellaOps.Policy.Registry/TASKS.md rename to src/Policy/StellaOps.Policy.Registry/TASKS.md index 32e0a2aa..45cfd652 100644 --- a/src/StellaOps.Policy.Registry/TASKS.md +++ b/src/Policy/StellaOps.Policy.Registry/TASKS.md @@ -1,17 +1,17 @@ -# Policy Registry Task Board — Epic 4: Policy Studio -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| REGISTRY-API-27-001 | TODO | Policy Registry Guild | AUTH-CONSOLE-23-001, POLICY-ENGINE-20-001 | Define OpenAPI specification covering workspaces, versions, reviews, simulations, promotions, and attestations; publish typed clients for Console/CLI. | OpenAPI YAML committed, spectral lint passes, SDK regeneration documented, consumers notified. Docs `DOCS-POLICY-27-001/008/010` waiting on this spec. | -| REGISTRY-API-27-002 | TODO | Policy Registry Guild | REGISTRY-API-27-001 | Implement workspace storage (Mongo collections, object storage buckets) with CRUD endpoints, diff history, and retention policies. | Workspace CRUD passes integration tests; retention job documented; tenancy scopes enforced. | -| REGISTRY-API-27-003 | TODO | Policy Registry Guild | REGISTRY-API-27-002, POLICY-ENGINE-20-001 | Integrate compile endpoint: forward source bundle to Policy Engine, persist diagnostics, symbol table, rule index, and complexity metrics. | Compile API returns diagnostics + symbol table, metrics recorded, failures mapped to `ERR_POL_*`, tests cover success/error cases. | -| REGISTRY-API-27-004 | TODO | Policy Registry Guild | REGISTRY-API-27-003, POLICY-ENGINE-20-002 | Implement quick simulation API with request limits (sample size, timeouts), returning counts, heatmap, sampled explains. | Quick sim enforces limits, results cached with hash, integration tests validate deterministic output. | -| REGISTRY-API-27-005 | TODO | Policy Registry Guild, Scheduler Guild | REGISTRY-API-27-004, SCHED-WORKER-27-301 | Build batch simulation orchestration: enqueue shards, collect partials, reduce deltas, produce evidence bundles + signed manifest. | Batch sim runs end-to-end in staging fixture, manifests stored with checksums, retries/backoff documented. | -> Docs dependency: `DOCS-POLICY-27-004` needs simulation APIs/workers. -| REGISTRY-API-27-006 | TODO | Policy Registry Guild | REGISTRY-API-27-003 | Implement review workflow (comments, votes, required approvers, status transitions) with audit trails and webhooks. | Review endpoints enforce approver quorum, audit log captured, webhook integration tests pass. | -> Docs dependency: `DOCS-POLICY-27-005` waiting on review workflow. -| REGISTRY-API-27-007 | TODO | Policy Registry Guild, Security Guild | REGISTRY-API-27-006, AUTH-POLICY-27-001 | Implement publish pipeline: sign source/compiled digests, create attestations, mark version immutable, emit events. | Published versions immutable, attestations stored & verifiable, metrics/logs emitted, tests cover signing failure. | -> Docs dependency: `DOCS-POLICY-27-003` blocked until publish/sign pipeline ships. -| REGISTRY-API-27-008 | TODO | Policy Registry Guild | REGISTRY-API-27-007, AUTH-POLICY-27-002 | Implement promotion bindings per tenant/environment with canary subsets, rollback path, and environment history. | Promotion API updates bindings atomically, canary percent enforced, rollback recorded, runbooks updated. | -> Docs dependency: `DOCS-POLICY-27-006` requires promotion APIs. -| REGISTRY-API-27-009 | TODO | Policy Registry Guild, Observability Guild | REGISTRY-API-27-002..008 | Instrument metrics/logs/traces (compile time, diagnostics rate, sim queue depth, approval latency) and expose dashboards. | Metrics registered, dashboards seeded, alerts configured, documentation updated. | -| REGISTRY-API-27-010 | TODO | Policy Registry Guild, QA Guild | REGISTRY-API-27-002..008 | Build unit/integration/load test suites for compile/sim/review/publish/promote flows; provide seeded fixtures for CI. | Tests run in CI, load test report documented, determinism checks validated across runs. | +# Policy Registry Task Board — Epic 4: Policy Studio +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| REGISTRY-API-27-001 | TODO | Policy Registry Guild | AUTH-CONSOLE-23-001, POLICY-ENGINE-20-001 | Define OpenAPI specification covering workspaces, versions, reviews, simulations, promotions, and attestations; publish typed clients for Console/CLI. | OpenAPI YAML committed, spectral lint passes, SDK regeneration documented, consumers notified. Docs `DOCS-POLICY-27-001/008/010` waiting on this spec. | +| REGISTRY-API-27-002 | TODO | Policy Registry Guild | REGISTRY-API-27-001 | Implement workspace storage (Mongo collections, object storage buckets) with CRUD endpoints, diff history, and retention policies. | Workspace CRUD passes integration tests; retention job documented; tenancy scopes enforced. | +| REGISTRY-API-27-003 | TODO | Policy Registry Guild | REGISTRY-API-27-002, POLICY-ENGINE-20-001 | Integrate compile endpoint: forward source bundle to Policy Engine, persist diagnostics, symbol table, rule index, and complexity metrics. | Compile API returns diagnostics + symbol table, metrics recorded, failures mapped to `ERR_POL_*`, tests cover success/error cases. | +| REGISTRY-API-27-004 | TODO | Policy Registry Guild | REGISTRY-API-27-003, POLICY-ENGINE-20-002 | Implement quick simulation API with request limits (sample size, timeouts), returning counts, heatmap, sampled explains. | Quick sim enforces limits, results cached with hash, integration tests validate deterministic output. | +| REGISTRY-API-27-005 | TODO | Policy Registry Guild, Scheduler Guild | REGISTRY-API-27-004, SCHED-WORKER-27-301 | Build batch simulation orchestration: enqueue shards, collect partials, reduce deltas, produce evidence bundles + signed manifest. | Batch sim runs end-to-end in staging fixture, manifests stored with checksums, retries/backoff documented. | +> Docs dependency: `DOCS-POLICY-27-004` needs simulation APIs/workers. +| REGISTRY-API-27-006 | TODO | Policy Registry Guild | REGISTRY-API-27-003 | Implement review workflow (comments, votes, required approvers, status transitions) with audit trails and webhooks. | Review endpoints enforce approver quorum, audit log captured, webhook integration tests pass. | +> Docs dependency: `DOCS-POLICY-27-005` waiting on review workflow. +| REGISTRY-API-27-007 | TODO | Policy Registry Guild, Security Guild | REGISTRY-API-27-006, AUTH-POLICY-27-001 | Implement publish pipeline: sign source/compiled digests, create attestations, mark version immutable, emit events. | Published versions immutable, attestations stored & verifiable, metrics/logs emitted, tests cover signing failure. | +> Docs dependency: `DOCS-POLICY-27-003` blocked until publish/sign pipeline ships. +| REGISTRY-API-27-008 | TODO | Policy Registry Guild | REGISTRY-API-27-007, AUTH-POLICY-27-002 | Implement promotion bindings per tenant/environment with canary subsets, rollback path, and environment history. | Promotion API updates bindings atomically, canary percent enforced, rollback recorded, runbooks updated. | +> Docs dependency: `DOCS-POLICY-27-006` requires promotion APIs. +| REGISTRY-API-27-009 | TODO | Policy Registry Guild, Observability Guild | REGISTRY-API-27-002..008 | Instrument metrics/logs/traces (compile time, diagnostics rate, sim queue depth, approval latency) and expose dashboards. | Metrics registered, dashboards seeded, alerts configured, documentation updated. | +| REGISTRY-API-27-010 | TODO | Policy Registry Guild, QA Guild | REGISTRY-API-27-002..008 | Build unit/integration/load test suites for compile/sim/review/publish/promote flows; provide seeded fixtures for CI. | Tests run in CI, load test report documented, determinism checks validated across runs. | diff --git a/src/StellaOps.Policy.RiskProfile/AGENTS.md b/src/Policy/StellaOps.Policy.RiskProfile/AGENTS.md similarity index 97% rename from src/StellaOps.Policy.RiskProfile/AGENTS.md rename to src/Policy/StellaOps.Policy.RiskProfile/AGENTS.md index 9a1a39a8..7de4719f 100644 --- a/src/StellaOps.Policy.RiskProfile/AGENTS.md +++ b/src/Policy/StellaOps.Policy.RiskProfile/AGENTS.md @@ -1,15 +1,15 @@ -# Risk Profile Schema Guild Charter - -## Mission -Define and maintain the RiskProfile schema, validation rules, inheritance logic, and integration with Policy Engine and Authority scoping. - -## Scope -- JSON Schema definition, validators, and code generation for RiskProfile documents. -- Inheritance/merge engine, content hashing, and signature support. -- Policy store integration, scope selectors, and lifecycle management. -- Tooling for Policy Studio and CLI authoring. - -## Definition of Done -- Schema publishes via `.well-known/risk-profile-schema` with versioning. -- Validators catch conflicts and produce actionable errors. -- Inheritance and overrides deterministic with tests and golden fixtures. +# Risk Profile Schema Guild Charter + +## Mission +Define and maintain the RiskProfile schema, validation rules, inheritance logic, and integration with Policy Engine and Authority scoping. + +## Scope +- JSON Schema definition, validators, and code generation for RiskProfile documents. +- Inheritance/merge engine, content hashing, and signature support. +- Policy store integration, scope selectors, and lifecycle management. +- Tooling for Policy Studio and CLI authoring. + +## Definition of Done +- Schema publishes via `.well-known/risk-profile-schema` with versioning. +- Validators catch conflicts and produce actionable errors. +- Inheritance and overrides deterministic with tests and golden fixtures. diff --git a/src/StellaOps.Policy.RiskProfile/TASKS.md b/src/Policy/StellaOps.Policy.RiskProfile/TASKS.md similarity index 99% rename from src/StellaOps.Policy.RiskProfile/TASKS.md rename to src/Policy/StellaOps.Policy.RiskProfile/TASKS.md index 599efb60..c1752a6b 100644 --- a/src/StellaOps.Policy.RiskProfile/TASKS.md +++ b/src/Policy/StellaOps.Policy.RiskProfile/TASKS.md @@ -1,20 +1,20 @@ -# Risk Profile Schema Task Board — Epic 18: Risk Scoring Profiles - -## Sprint 66 – Schema Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| POLICY-RISK-66-001 | TODO | Risk Profile Schema Guild | — | Develop initial JSON Schema for RiskProfile (signals, transforms, weights, severity, overrides) with validator stubs. | Schema published; validators unit-tested with positive/negative fixtures. | -| POLICY-RISK-66-002 | TODO | Risk Profile Schema Guild | POLICY-RISK-66-001 | Implement inheritance/merge logic with conflict detection and deterministic content hashing. | Inheritance tests pass; hashes stable; documentation drafted. | - -## Sprint 67 – Policy Store Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| POLICY-RISK-67-001 | TODO | Risk Profile Schema Guild, Policy Engine Guild | POLICY-RISK-66-002 | Integrate profile storage and versioning into Policy Store with lifecycle states (draft/publish/deprecate). | Profiles persisted with status transitions; API returns versioned docs. | -| POLICY-RISK-67-002 | TODO | Risk Profile Schema Guild | POLICY-RISK-67-001 | Publish `.well-known/risk-profile-schema` endpoint and CLI validation tooling. | Endpoint returns schema with version metadata; CLI `stella risk profile validate` uses schema. | - -## Sprint 68 – Scope & Overrides -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| POLICY-RISK-68-001 | TODO | Risk Profile Schema Guild, Authority Guild | POLICY-RISK-67-001 | Implement scope selectors, precedence rules, and Authority attachment APIs. | Scope resolution works in tests; conflicts produce clear errors. | -| POLICY-RISK-68-002 | TODO | Risk Profile Schema Guild | POLICY-RISK-66-002 | Add override/adjustment support with audit metadata and validation for conflicting rules. | Overrides validated; golden tests ensure deterministic ordering. | -*** End Task Board *** +# Risk Profile Schema Task Board — Epic 18: Risk Scoring Profiles + +## Sprint 66 – Schema Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| POLICY-RISK-66-001 | TODO | Risk Profile Schema Guild | — | Develop initial JSON Schema for RiskProfile (signals, transforms, weights, severity, overrides) with validator stubs. | Schema published; validators unit-tested with positive/negative fixtures. | +| POLICY-RISK-66-002 | TODO | Risk Profile Schema Guild | POLICY-RISK-66-001 | Implement inheritance/merge logic with conflict detection and deterministic content hashing. | Inheritance tests pass; hashes stable; documentation drafted. | + +## Sprint 67 – Policy Store Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| POLICY-RISK-67-001 | TODO | Risk Profile Schema Guild, Policy Engine Guild | POLICY-RISK-66-002 | Integrate profile storage and versioning into Policy Store with lifecycle states (draft/publish/deprecate). | Profiles persisted with status transitions; API returns versioned docs. | +| POLICY-RISK-67-002 | TODO | Risk Profile Schema Guild | POLICY-RISK-67-001 | Publish `.well-known/risk-profile-schema` endpoint and CLI validation tooling. | Endpoint returns schema with version metadata; CLI `stella risk profile validate` uses schema. | + +## Sprint 68 – Scope & Overrides +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| POLICY-RISK-68-001 | TODO | Risk Profile Schema Guild, Authority Guild | POLICY-RISK-67-001 | Implement scope selectors, precedence rules, and Authority attachment APIs. | Scope resolution works in tests; conflicts produce clear errors. | +| POLICY-RISK-68-002 | TODO | Risk Profile Schema Guild | POLICY-RISK-66-002 | Add override/adjustment support with audit metadata and validation for conflicting rules. | Overrides validated; golden tests ensure deterministic ordering. | +*** End Task Board *** diff --git a/src/Policy/StellaOps.Policy.sln b/src/Policy/StellaOps.Policy.sln new file mode 100644 index 00000000..f06e6115 --- /dev/null +++ b/src/Policy/StellaOps.Policy.sln @@ -0,0 +1,212 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Engine", "StellaOps.Policy.Engine\StellaOps.Policy.Engine.csproj", "{83716724-0833-4EB4-BD13-7570DB47148E}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy", "__Libraries\StellaOps.Policy\StellaOps.Policy.csproj", "{9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{E33561D0-D9C4-42F0-A414-CC6439302E5F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{C4F44230-D5FF-425E-BC1B-2ECE59908B59}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{93944DA7-ED8C-466C-90DF-E3522DC49B08}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{45287280-FC03-4233-9012-193F4CE41964}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Gateway", "StellaOps.Policy.Gateway\StellaOps.Policy.Gateway.csproj", "{6B83C5F2-EA81-4723-87EB-99101697B232}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Engine.Tests", "__Tests\StellaOps.Policy.Engine.Tests\StellaOps.Policy.Engine.Tests.csproj", "{478DF014-BF69-41BA-B78A-AAC0918337D8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Gateway.Tests", "__Tests\StellaOps.Policy.Gateway.Tests\StellaOps.Policy.Gateway.Tests.csproj", "{77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Tests", "__Tests\StellaOps.Policy.Tests\StellaOps.Policy.Tests.csproj", "{D064D5C1-3311-470C-92A1-41E913125C14}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {83716724-0833-4EB4-BD13-7570DB47148E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Debug|x64.ActiveCfg = Debug|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Debug|x64.Build.0 = Debug|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Debug|x86.ActiveCfg = Debug|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Debug|x86.Build.0 = Debug|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Release|Any CPU.Build.0 = Release|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Release|x64.ActiveCfg = Release|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Release|x64.Build.0 = Release|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Release|x86.ActiveCfg = Release|Any CPU + {83716724-0833-4EB4-BD13-7570DB47148E}.Release|x86.Build.0 = Release|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Debug|x64.ActiveCfg = Debug|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Debug|x64.Build.0 = Debug|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Debug|x86.ActiveCfg = Debug|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Debug|x86.Build.0 = Debug|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Release|Any CPU.Build.0 = Release|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Release|x64.ActiveCfg = Release|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Release|x64.Build.0 = Release|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Release|x86.ActiveCfg = Release|Any CPU + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82}.Release|x86.Build.0 = Release|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Debug|x64.ActiveCfg = Debug|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Debug|x64.Build.0 = Debug|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Debug|x86.ActiveCfg = Debug|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Debug|x86.Build.0 = Debug|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Release|Any CPU.Build.0 = Release|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Release|x64.ActiveCfg = Release|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Release|x64.Build.0 = Release|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Release|x86.ActiveCfg = Release|Any CPU + {9F136BAA-6DBF-4FD5-ABD1-2648D1FA47AC}.Release|x86.Build.0 = Release|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Debug|x64.ActiveCfg = Debug|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Debug|x64.Build.0 = Debug|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Debug|x86.ActiveCfg = Debug|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Debug|x86.Build.0 = Debug|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Release|Any CPU.Build.0 = Release|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Release|x64.ActiveCfg = Release|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Release|x64.Build.0 = Release|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Release|x86.ActiveCfg = Release|Any CPU + {E33561D0-D9C4-42F0-A414-CC6439302E5F}.Release|x86.Build.0 = Release|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Debug|x64.ActiveCfg = Debug|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Debug|x64.Build.0 = Debug|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Debug|x86.ActiveCfg = Debug|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Debug|x86.Build.0 = Debug|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Release|Any CPU.Build.0 = Release|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Release|x64.ActiveCfg = Release|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Release|x64.Build.0 = Release|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Release|x86.ActiveCfg = Release|Any CPU + {C4F44230-D5FF-425E-BC1B-2ECE59908B59}.Release|x86.Build.0 = Release|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Debug|Any CPU.Build.0 = Debug|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Debug|x64.ActiveCfg = Debug|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Debug|x64.Build.0 = Debug|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Debug|x86.ActiveCfg = Debug|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Debug|x86.Build.0 = Debug|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Release|Any CPU.ActiveCfg = Release|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Release|Any CPU.Build.0 = Release|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Release|x64.ActiveCfg = Release|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Release|x64.Build.0 = Release|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Release|x86.ActiveCfg = Release|Any CPU + {93944DA7-ED8C-466C-90DF-E3522DC49B08}.Release|x86.Build.0 = Release|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Debug|x64.ActiveCfg = Debug|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Debug|x64.Build.0 = Debug|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Debug|x86.ActiveCfg = Debug|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Debug|x86.Build.0 = Debug|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Release|Any CPU.Build.0 = Release|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Release|x64.ActiveCfg = Release|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Release|x64.Build.0 = Release|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Release|x86.ActiveCfg = Release|Any CPU + {D9E27F55-32F4-42EE-AF96-DCC3B1DACD09}.Release|x86.Build.0 = Release|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Debug|Any CPU.Build.0 = Debug|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Debug|x64.ActiveCfg = Debug|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Debug|x64.Build.0 = Debug|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Debug|x86.ActiveCfg = Debug|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Debug|x86.Build.0 = Debug|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Release|Any CPU.ActiveCfg = Release|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Release|Any CPU.Build.0 = Release|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Release|x64.ActiveCfg = Release|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Release|x64.Build.0 = Release|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Release|x86.ActiveCfg = Release|Any CPU + {45287280-FC03-4233-9012-193F4CE41964}.Release|x86.Build.0 = Release|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Debug|x64.ActiveCfg = Debug|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Debug|x64.Build.0 = Debug|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Debug|x86.ActiveCfg = Debug|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Debug|x86.Build.0 = Debug|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Release|Any CPU.Build.0 = Release|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Release|x64.ActiveCfg = Release|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Release|x64.Build.0 = Release|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Release|x86.ActiveCfg = Release|Any CPU + {DF8EBB6E-1C72-4AB9-A5BB-3BB9095499CC}.Release|x86.Build.0 = Release|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Debug|x64.ActiveCfg = Debug|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Debug|x64.Build.0 = Debug|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Debug|x86.ActiveCfg = Debug|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Debug|x86.Build.0 = Debug|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Release|Any CPU.Build.0 = Release|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Release|x64.ActiveCfg = Release|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Release|x64.Build.0 = Release|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Release|x86.ActiveCfg = Release|Any CPU + {6B83C5F2-EA81-4723-87EB-99101697B232}.Release|x86.Build.0 = Release|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Debug|x64.ActiveCfg = Debug|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Debug|x64.Build.0 = Debug|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Debug|x86.ActiveCfg = Debug|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Debug|x86.Build.0 = Debug|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Release|Any CPU.Build.0 = Release|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Release|x64.ActiveCfg = Release|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Release|x64.Build.0 = Release|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Release|x86.ActiveCfg = Release|Any CPU + {478DF014-BF69-41BA-B78A-AAC0918337D8}.Release|x86.Build.0 = Release|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Debug|Any CPU.Build.0 = Debug|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Debug|x64.ActiveCfg = Debug|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Debug|x64.Build.0 = Debug|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Debug|x86.ActiveCfg = Debug|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Debug|x86.Build.0 = Debug|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Release|Any CPU.ActiveCfg = Release|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Release|Any CPU.Build.0 = Release|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Release|x64.ActiveCfg = Release|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Release|x64.Build.0 = Release|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Release|x86.ActiveCfg = Release|Any CPU + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44}.Release|x86.Build.0 = Release|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Debug|x64.ActiveCfg = Debug|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Debug|x64.Build.0 = Debug|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Debug|x86.ActiveCfg = Debug|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Debug|x86.Build.0 = Debug|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Release|Any CPU.Build.0 = Release|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Release|x64.ActiveCfg = Release|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Release|x64.Build.0 = Release|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Release|x86.ActiveCfg = Release|Any CPU + {D064D5C1-3311-470C-92A1-41E913125C14}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {9C200CFD-2A8F-4CF5-BD33-AB8B06DA7C82} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {478DF014-BF69-41BA-B78A-AAC0918337D8} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {77189D88-1CA1-46BD-A9DC-99B2B6EF7D44} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {D064D5C1-3311-470C-92A1-41E913125C14} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Policy/AGENTS.md b/src/Policy/__Libraries/StellaOps.Policy/AGENTS.md similarity index 100% rename from src/StellaOps.Policy/AGENTS.md rename to src/Policy/__Libraries/StellaOps.Policy/AGENTS.md diff --git a/src/StellaOps.Policy/Audit/IPolicyAuditRepository.cs b/src/Policy/__Libraries/StellaOps.Policy/Audit/IPolicyAuditRepository.cs similarity index 100% rename from src/StellaOps.Policy/Audit/IPolicyAuditRepository.cs rename to src/Policy/__Libraries/StellaOps.Policy/Audit/IPolicyAuditRepository.cs diff --git a/src/StellaOps.Policy/Audit/InMemoryPolicyAuditRepository.cs b/src/Policy/__Libraries/StellaOps.Policy/Audit/InMemoryPolicyAuditRepository.cs similarity index 100% rename from src/StellaOps.Policy/Audit/InMemoryPolicyAuditRepository.cs rename to src/Policy/__Libraries/StellaOps.Policy/Audit/InMemoryPolicyAuditRepository.cs diff --git a/src/StellaOps.Policy/PolicyAuditEntry.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyAuditEntry.cs similarity index 100% rename from src/StellaOps.Policy/PolicyAuditEntry.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyAuditEntry.cs diff --git a/src/StellaOps.Policy/PolicyBinder.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyBinder.cs similarity index 100% rename from src/StellaOps.Policy/PolicyBinder.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyBinder.cs diff --git a/src/StellaOps.Policy/PolicyDiagnostics.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyDiagnostics.cs similarity index 100% rename from src/StellaOps.Policy/PolicyDiagnostics.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyDiagnostics.cs diff --git a/src/StellaOps.Policy/PolicyDigest.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyDigest.cs similarity index 100% rename from src/StellaOps.Policy/PolicyDigest.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyDigest.cs diff --git a/src/StellaOps.Policy/PolicyDocument.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyDocument.cs similarity index 100% rename from src/StellaOps.Policy/PolicyDocument.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyDocument.cs diff --git a/src/StellaOps.Policy/PolicyEvaluation.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyEvaluation.cs similarity index 100% rename from src/StellaOps.Policy/PolicyEvaluation.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyEvaluation.cs diff --git a/src/StellaOps.Policy/PolicyFinding.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyFinding.cs similarity index 100% rename from src/StellaOps.Policy/PolicyFinding.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyFinding.cs diff --git a/src/StellaOps.Policy/PolicyIssue.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyIssue.cs similarity index 100% rename from src/StellaOps.Policy/PolicyIssue.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyIssue.cs diff --git a/src/StellaOps.Policy/PolicyPreviewModels.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyPreviewModels.cs similarity index 100% rename from src/StellaOps.Policy/PolicyPreviewModels.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyPreviewModels.cs diff --git a/src/StellaOps.Policy/PolicyPreviewService.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyPreviewService.cs similarity index 100% rename from src/StellaOps.Policy/PolicyPreviewService.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyPreviewService.cs diff --git a/src/StellaOps.Policy/PolicySchemaResource.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicySchemaResource.cs similarity index 100% rename from src/StellaOps.Policy/PolicySchemaResource.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicySchemaResource.cs diff --git a/src/StellaOps.Policy/PolicyScoringConfig.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyScoringConfig.cs similarity index 100% rename from src/StellaOps.Policy/PolicyScoringConfig.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyScoringConfig.cs diff --git a/src/StellaOps.Policy/PolicyScoringConfigBinder.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyScoringConfigBinder.cs similarity index 100% rename from src/StellaOps.Policy/PolicyScoringConfigBinder.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyScoringConfigBinder.cs diff --git a/src/StellaOps.Policy/PolicyScoringConfigDigest.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyScoringConfigDigest.cs similarity index 100% rename from src/StellaOps.Policy/PolicyScoringConfigDigest.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyScoringConfigDigest.cs diff --git a/src/StellaOps.Policy/PolicyScoringSchema.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyScoringSchema.cs similarity index 100% rename from src/StellaOps.Policy/PolicyScoringSchema.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyScoringSchema.cs diff --git a/src/StellaOps.Policy/PolicySnapshot.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicySnapshot.cs similarity index 100% rename from src/StellaOps.Policy/PolicySnapshot.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicySnapshot.cs diff --git a/src/StellaOps.Policy/PolicySnapshotStore.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicySnapshotStore.cs similarity index 100% rename from src/StellaOps.Policy/PolicySnapshotStore.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicySnapshotStore.cs diff --git a/src/StellaOps.Policy/PolicyUnknownConfidenceConfig.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyUnknownConfidenceConfig.cs similarity index 100% rename from src/StellaOps.Policy/PolicyUnknownConfidenceConfig.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyUnknownConfidenceConfig.cs diff --git a/src/StellaOps.Policy/PolicyValidationCli.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyValidationCli.cs similarity index 100% rename from src/StellaOps.Policy/PolicyValidationCli.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyValidationCli.cs diff --git a/src/StellaOps.Policy/PolicyVerdict.cs b/src/Policy/__Libraries/StellaOps.Policy/PolicyVerdict.cs similarity index 100% rename from src/StellaOps.Policy/PolicyVerdict.cs rename to src/Policy/__Libraries/StellaOps.Policy/PolicyVerdict.cs diff --git a/src/StellaOps.Policy/Schemas/policy-schema@1.json b/src/Policy/__Libraries/StellaOps.Policy/Schemas/policy-schema@1.json similarity index 100% rename from src/StellaOps.Policy/Schemas/policy-schema@1.json rename to src/Policy/__Libraries/StellaOps.Policy/Schemas/policy-schema@1.json diff --git a/src/StellaOps.Policy/Schemas/policy-scoring-default.json b/src/Policy/__Libraries/StellaOps.Policy/Schemas/policy-scoring-default.json similarity index 100% rename from src/StellaOps.Policy/Schemas/policy-scoring-default.json rename to src/Policy/__Libraries/StellaOps.Policy/Schemas/policy-scoring-default.json diff --git a/src/StellaOps.Policy/Schemas/policy-scoring-schema@1.json b/src/Policy/__Libraries/StellaOps.Policy/Schemas/policy-scoring-schema@1.json similarity index 100% rename from src/StellaOps.Policy/Schemas/policy-scoring-schema@1.json rename to src/Policy/__Libraries/StellaOps.Policy/Schemas/policy-scoring-schema@1.json diff --git a/src/StellaOps.Policy/StellaOps.Policy.csproj b/src/Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj similarity index 97% rename from src/StellaOps.Policy/StellaOps.Policy.csproj rename to src/Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj index b1d74ac2..01548206 100644 --- a/src/StellaOps.Policy/StellaOps.Policy.csproj +++ b/src/Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj @@ -1,22 +1,22 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="System.CommandLine" Version="2.0.0-beta5.25306.1" /> - <PackageReference Include="YamlDotNet" Version="13.7.1" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="JsonSchema.Net" Version="5.3.0" /> - </ItemGroup> - - <ItemGroup> - <EmbeddedResource Include="Schemas\policy-schema@1.json" /> - <EmbeddedResource Include="Schemas\policy-scoring-default.json" /> - <EmbeddedResource Include="Schemas\policy-scoring-schema@1.json" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="System.CommandLine" Version="2.0.0-beta5.25306.1" /> + <PackageReference Include="YamlDotNet" Version="13.7.1" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="JsonSchema.Net" Version="5.3.0" /> + </ItemGroup> + + <ItemGroup> + <EmbeddedResource Include="Schemas\policy-schema@1.json" /> + <EmbeddedResource Include="Schemas\policy-scoring-default.json" /> + <EmbeddedResource Include="Schemas\policy-scoring-schema@1.json" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Policy/Storage/IPolicySnapshotRepository.cs b/src/Policy/__Libraries/StellaOps.Policy/Storage/IPolicySnapshotRepository.cs similarity index 100% rename from src/StellaOps.Policy/Storage/IPolicySnapshotRepository.cs rename to src/Policy/__Libraries/StellaOps.Policy/Storage/IPolicySnapshotRepository.cs diff --git a/src/StellaOps.Policy/Storage/InMemoryPolicySnapshotRepository.cs b/src/Policy/__Libraries/StellaOps.Policy/Storage/InMemoryPolicySnapshotRepository.cs similarity index 100% rename from src/StellaOps.Policy/Storage/InMemoryPolicySnapshotRepository.cs rename to src/Policy/__Libraries/StellaOps.Policy/Storage/InMemoryPolicySnapshotRepository.cs diff --git a/src/StellaOps.Policy/TASKS.md b/src/Policy/__Libraries/StellaOps.Policy/TASKS.md similarity index 97% rename from src/StellaOps.Policy/TASKS.md rename to src/Policy/__Libraries/StellaOps.Policy/TASKS.md index dfae9029..ef272f25 100644 --- a/src/StellaOps.Policy/TASKS.md +++ b/src/Policy/__Libraries/StellaOps.Policy/TASKS.md @@ -6,7 +6,7 @@ | POLICY-AOC-19-003 | TODO | Policy Guild | CONCELIER-CORE-AOC-19-004, EXCITITOR-CORE-AOC-19-004 | Update readers/processors to consume only `content.raw`, `identifiers`, and `linkset`. Remove dependencies on legacy normalized fields and refresh fixtures. | All policy pipelines pass tests using raw inputs; fixture diff shows no derived data persisted in ingestion; docs updated. | | POLICY-AOC-19-004 | TODO | Policy Guild, QA Guild | POLICY-AOC-19-003 | Add regression tests ensuring policy derived outputs remain deterministic when ingesting revised raw docs (supersedes) and when violations occur. | Determinism suite passes; new fixtures prove policy recomputation handles append-only raw data and surfaces guard violations. | -> Epic 2 service implementation tasks now live under `src/StellaOps.Policy.Engine/TASKS.md`. Keep library-specific work in this file. +> Epic 2 service implementation tasks now live under `src/Policy/StellaOps.Policy.Engine/TASKS.md`. Keep library-specific work in this file. ## Policy Engine + Editor v1 (Epic 5) diff --git a/src/StellaOps.Policy.Engine.Tests/PolicyCompilerTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyCompilerTests.cs similarity index 97% rename from src/StellaOps.Policy.Engine.Tests/PolicyCompilerTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyCompilerTests.cs index b2a1fb44..7b3322cb 100644 --- a/src/StellaOps.Policy.Engine.Tests/PolicyCompilerTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyCompilerTests.cs @@ -1,104 +1,104 @@ -using System.Collections.Immutable; -using System.Linq; -using StellaOps.Policy; -using StellaOps.Policy.Engine.Compilation; -using Xunit; -using Xunit.Sdk; - -namespace StellaOps.Policy.Engine.Tests; - -public sealed class PolicyCompilerTests -{ - [Fact] - public void Compile_BaselinePolicy_Succeeds() - { - const string source = """ - policy "Baseline Production Policy" syntax "stella-dsl@1" { - metadata { - description = "Block critical, escalate high, enforce VEX justifications." - tags = ["baseline","production"] - } - - profile severity { - map vendor_weight { - source "GHSA" => +0.5 - source "OSV" => +0.0 - } - env exposure_adjustments { - if env.exposure == "internet" then +0.5 - } - } - - rule block_critical priority 5 { - when severity.normalized >= "Critical" - then status := "blocked" - because "Critical severity must be remediated before deploy." - } - - rule escalate_high_internet { - when severity.normalized == "High" - and env.exposure == "internet" - then escalate to severity_band("Critical") - because "High severity on internet-exposed asset escalates to critical." - } - - rule require_vex_justification { - when vex.any(status in ["not_affected","fixed"]) - and vex.justification in ["component_not_present","vulnerable_code_not_present"] - then status := vex.status - annotate winning_statement := vex.latest().statementId - because "Respect strong vendor VEX claims." - } - - rule alert_warn_eol_runtime priority 1 { - when severity.normalized <= "Medium" - and sbom.has_tag("runtime:eol") - then warn message "Runtime marked as EOL; upgrade recommended." - because "Deprecated runtime should be upgraded." - } - } - """; - - var compiler = new PolicyCompiler(); - var result = compiler.Compile(source); - - if (!result.Success) - { - throw new Xunit.Sdk.XunitException($"Compilation failed: {Describe(result.Diagnostics)}"); - } - Assert.False(string.IsNullOrWhiteSpace(result.Checksum)); - Assert.NotEmpty(result.CanonicalRepresentation); - Assert.All(result.Diagnostics, issue => Assert.NotEqual(PolicyIssueSeverity.Error, issue.Severity)); - - var document = Assert.IsType<PolicyIrDocument>(result.Document); - Assert.Equal("Baseline Production Policy", document.Name); - Assert.Equal("stella-dsl@1", document.Syntax); - Assert.Equal(4, document.Rules.Length); - Assert.Single(document.Profiles); - var firstAction = Assert.IsType<PolicyIrAssignmentAction>(document.Rules[0].ThenActions[0]); - Assert.Equal("status", firstAction.Target[0]); - } - - [Fact] - public void Compile_MissingBecause_ReportsDiagnostic() - { - const string source = """ - policy "Incomplete" syntax "stella-dsl@1" { - rule missing_because { - when true - then status := "suppressed" - } - } - """; - - var compiler = new PolicyCompiler(); - var result = compiler.Compile(source); - - Assert.False(result.Success); - PolicyIssue diagnostic = result.Diagnostics.First(issue => issue.Code == "POLICY-DSL-PARSE-006"); - Assert.Equal(PolicyIssueSeverity.Error, diagnostic.Severity); - } - - private static string Describe(ImmutableArray<PolicyIssue> issues) => - string.Join(" | ", issues.Select(issue => $"{issue.Severity}:{issue.Code}:{issue.Message}")); -} +using System.Collections.Immutable; +using System.Linq; +using StellaOps.Policy; +using StellaOps.Policy.Engine.Compilation; +using Xunit; +using Xunit.Sdk; + +namespace StellaOps.Policy.Engine.Tests; + +public sealed class PolicyCompilerTests +{ + [Fact] + public void Compile_BaselinePolicy_Succeeds() + { + const string source = """ + policy "Baseline Production Policy" syntax "stella-dsl@1" { + metadata { + description = "Block critical, escalate high, enforce VEX justifications." + tags = ["baseline","production"] + } + + profile severity { + map vendor_weight { + source "GHSA" => +0.5 + source "OSV" => +0.0 + } + env exposure_adjustments { + if env.exposure == "internet" then +0.5 + } + } + + rule block_critical priority 5 { + when severity.normalized >= "Critical" + then status := "blocked" + because "Critical severity must be remediated before deploy." + } + + rule escalate_high_internet { + when severity.normalized == "High" + and env.exposure == "internet" + then escalate to severity_band("Critical") + because "High severity on internet-exposed asset escalates to critical." + } + + rule require_vex_justification { + when vex.any(status in ["not_affected","fixed"]) + and vex.justification in ["component_not_present","vulnerable_code_not_present"] + then status := vex.status + annotate winning_statement := vex.latest().statementId + because "Respect strong vendor VEX claims." + } + + rule alert_warn_eol_runtime priority 1 { + when severity.normalized <= "Medium" + and sbom.has_tag("runtime:eol") + then warn message "Runtime marked as EOL; upgrade recommended." + because "Deprecated runtime should be upgraded." + } + } + """; + + var compiler = new PolicyCompiler(); + var result = compiler.Compile(source); + + if (!result.Success) + { + throw new Xunit.Sdk.XunitException($"Compilation failed: {Describe(result.Diagnostics)}"); + } + Assert.False(string.IsNullOrWhiteSpace(result.Checksum)); + Assert.NotEmpty(result.CanonicalRepresentation); + Assert.All(result.Diagnostics, issue => Assert.NotEqual(PolicyIssueSeverity.Error, issue.Severity)); + + var document = Assert.IsType<PolicyIrDocument>(result.Document); + Assert.Equal("Baseline Production Policy", document.Name); + Assert.Equal("stella-dsl@1", document.Syntax); + Assert.Equal(4, document.Rules.Length); + Assert.Single(document.Profiles); + var firstAction = Assert.IsType<PolicyIrAssignmentAction>(document.Rules[0].ThenActions[0]); + Assert.Equal("status", firstAction.Target[0]); + } + + [Fact] + public void Compile_MissingBecause_ReportsDiagnostic() + { + const string source = """ + policy "Incomplete" syntax "stella-dsl@1" { + rule missing_because { + when true + then status := "suppressed" + } + } + """; + + var compiler = new PolicyCompiler(); + var result = compiler.Compile(source); + + Assert.False(result.Success); + PolicyIssue diagnostic = result.Diagnostics.First(issue => issue.Code == "POLICY-DSL-PARSE-006"); + Assert.Equal(PolicyIssueSeverity.Error, diagnostic.Severity); + } + + private static string Describe(ImmutableArray<PolicyIssue> issues) => + string.Join(" | ", issues.Select(issue => $"{issue.Severity}:{issue.Code}:{issue.Message}")); +} diff --git a/src/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs similarity index 97% rename from src/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs index 5186a4f6..c5fe0b66 100644 --- a/src/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyEvaluatorTests.cs @@ -1,291 +1,291 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using StellaOps.Policy; -using StellaOps.Policy.Engine.Compilation; -using StellaOps.Policy.Engine.Evaluation; -using StellaOps.Policy.Engine.Services; -using Xunit; -using Xunit.Sdk; - -namespace StellaOps.Policy.Engine.Tests; - -public sealed class PolicyEvaluatorTests -{ - private static readonly string BaselinePolicy = """ -policy "Baseline Production Policy" syntax "stella-dsl@1" { - metadata { - description = "Block critical, escalate high, enforce VEX justifications." - tags = ["baseline","production"] - } - - profile severity { - map vendor_weight { - source "GHSA" => +0.5 - source "OSV" => +0.0 - } - env exposure_adjustments { - if env.exposure == "internet" then +0.5 - } - } - - rule block_critical priority 5 { - when severity.normalized >= "Critical" - then status := "blocked" - because "Critical severity must be remediated before deploy." - } - - rule escalate_high_internet { - when severity.normalized == "High" - and env.exposure == "internet" - then escalate to severity_band("Critical") - because "High severity on internet-exposed asset escalates to critical." - } - - rule require_vex_justification { - when vex.any(status in ["not_affected","fixed"]) - and vex.justification in ["component_not_present","vulnerable_code_not_present"] - then status := vex.status - annotate winning_statement := vex.latest().statementId - because "Respect strong vendor VEX claims." - } - - rule alert_warn_eol_runtime priority 1 { - when severity.normalized <= "Medium" - and sbom.has_tag("runtime:eol") - then warn message "Runtime marked as EOL; upgrade recommended." - because "Deprecated runtime should be upgraded." - } -} -"""; - - private readonly PolicyCompiler compiler = new(); - private readonly PolicyEvaluationService evaluationService = new(); - - [Fact] - public void Evaluate_BlockCriticalRuleMatches() - { - var document = CompileBaseline(); - var context = CreateContext(severity: "Critical", exposure: "internal"); - - var result = evaluationService.Evaluate(document, context); - - Assert.True(result.Matched); - Assert.Equal("block_critical", result.RuleName); - Assert.Equal("blocked", result.Status); - } - - [Fact] - public void Evaluate_EscalateAdjustsSeverity() - { - var document = CompileBaseline(); - var context = CreateContext(severity: "High", exposure: "internet"); - - var result = evaluationService.Evaluate(document, context); - - Assert.True(result.Matched); - Assert.Equal("escalate_high_internet", result.RuleName); - Assert.Equal("affected", result.Status); - Assert.Equal("Critical", result.Severity); - } - - [Fact] - public void Evaluate_VexOverrideSetsStatusAndAnnotation() - { - var document = CompileBaseline(); - var statements = ImmutableArray.Create( - new PolicyEvaluationVexStatement("not_affected", "component_not_present", "stmt-001")); - var context = CreateContext("Medium", "internal") with - { - Vex = new PolicyEvaluationVexEvidence(statements) - }; - - var result = evaluationService.Evaluate(document, context); - - Assert.True(result.Matched); - Assert.Equal("require_vex_justification", result.RuleName); - Assert.Equal("not_affected", result.Status); - Assert.Equal("stmt-001", result.Annotations["winning_statement"]); - } - - [Fact] - public void Evaluate_WarnRuleEmitsWarning() - { - var document = CompileBaseline(); - var tags = ImmutableHashSet.Create("runtime:eol"); - var context = CreateContext("Medium", "internal") with - { - Sbom = new PolicyEvaluationSbom(tags) - }; - - var result = evaluationService.Evaluate(document, context); - - Assert.True(result.Matched); - Assert.Equal("alert_warn_eol_runtime", result.RuleName); - Assert.Equal("warned", result.Status); - Assert.Contains(result.Warnings, message => message.Contains("EOL", StringComparison.OrdinalIgnoreCase)); - } - - [Fact] - public void Evaluate_ExceptionSuppressesCriticalFinding() - { - var document = CompileBaseline(); - var effect = new PolicyExceptionEffect( - Id: "suppress-critical", - Name: "Critical Break Glass", - Effect: PolicyExceptionEffectType.Suppress, - DowngradeSeverity: null, - RequiredControlId: null, - RoutingTemplate: "secops", - MaxDurationDays: 7, - Description: null); - var scope = PolicyEvaluationExceptionScope.Create(ruleNames: new[] { "block_critical" }); - var instance = new PolicyEvaluationExceptionInstance( - Id: "exc-001", - EffectId: effect.Id, - Scope: scope, - CreatedAt: new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero), - Metadata: ImmutableDictionary<string, string>.Empty); - var exceptions = new PolicyEvaluationExceptions( - ImmutableDictionary<string, PolicyExceptionEffect>.Empty.Add(effect.Id, effect), - ImmutableArray.Create(instance)); - var context = CreateContext("Critical", "internal", exceptions); - - var result = evaluationService.Evaluate(document, context); - - Assert.True(result.Matched); - Assert.Equal("block_critical", result.RuleName); - Assert.Equal("suppressed", result.Status); - Assert.NotNull(result.AppliedException); - Assert.Equal("exc-001", result.AppliedException!.ExceptionId); - Assert.Equal("suppress-critical", result.AppliedException!.EffectId); - Assert.Equal("blocked", result.AppliedException!.OriginalStatus); - Assert.Equal("suppressed", result.AppliedException!.AppliedStatus); - Assert.Equal("suppressed", result.Annotations["exception.status"]); - } - - [Fact] - public void Evaluate_ExceptionDowngradesSeverity() - { - var document = CompileBaseline(); - var effect = new PolicyExceptionEffect( - Id: "downgrade-internet", - Name: "Downgrade High Internet", - Effect: PolicyExceptionEffectType.Downgrade, - DowngradeSeverity: PolicySeverity.Medium, - RequiredControlId: null, - RoutingTemplate: null, - MaxDurationDays: null, - Description: null); - var scope = PolicyEvaluationExceptionScope.Create( - ruleNames: new[] { "escalate_high_internet" }, - severities: new[] { "High" }, - sources: new[] { "GHSA" }); - var instance = new PolicyEvaluationExceptionInstance( - Id: "exc-200", - EffectId: effect.Id, - Scope: scope, - CreatedAt: new DateTimeOffset(2025, 10, 2, 0, 0, 0, TimeSpan.Zero), - Metadata: ImmutableDictionary<string, string>.Empty); - var exceptions = new PolicyEvaluationExceptions( - ImmutableDictionary<string, PolicyExceptionEffect>.Empty.Add(effect.Id, effect), - ImmutableArray.Create(instance)); - var context = CreateContext("High", "internet", exceptions); - - var result = evaluationService.Evaluate(document, context); - - Assert.True(result.Matched); - Assert.Equal("escalate_high_internet", result.RuleName); - Assert.Equal("affected", result.Status); - Assert.Equal("Medium", result.Severity); - Assert.NotNull(result.AppliedException); - Assert.Equal("Critical", result.AppliedException!.OriginalSeverity); - Assert.Equal("Medium", result.AppliedException!.AppliedSeverity); - Assert.Equal("Medium", result.Annotations["exception.severity"]); - } - - [Fact] - public void Evaluate_MoreSpecificExceptionWins() - { - var document = CompileBaseline(); - var suppressGlobal = new PolicyExceptionEffect( - Id: "suppress-critical-global", - Name: "Global Critical Suppress", - Effect: PolicyExceptionEffectType.Suppress, - DowngradeSeverity: null, - RequiredControlId: null, - RoutingTemplate: null, - MaxDurationDays: null, - Description: null); - var suppressRule = new PolicyExceptionEffect( - Id: "suppress-critical-rule", - Name: "Rule Critical Suppress", - Effect: PolicyExceptionEffectType.Suppress, - DowngradeSeverity: null, - RequiredControlId: null, - RoutingTemplate: null, - MaxDurationDays: null, - Description: null); - - var globalInstance = new PolicyEvaluationExceptionInstance( - Id: "exc-global", - EffectId: suppressGlobal.Id, - Scope: PolicyEvaluationExceptionScope.Create(severities: new[] { "Critical" }), - CreatedAt: new DateTimeOffset(2025, 9, 1, 0, 0, 0, TimeSpan.Zero), - Metadata: ImmutableDictionary<string, string>.Empty); - - var ruleInstance = new PolicyEvaluationExceptionInstance( - Id: "exc-rule", - EffectId: suppressRule.Id, - Scope: PolicyEvaluationExceptionScope.Create( - ruleNames: new[] { "block_critical" }, - severities: new[] { "Critical" }), - CreatedAt: new DateTimeOffset(2025, 10, 5, 0, 0, 0, TimeSpan.Zero), - Metadata: ImmutableDictionary<string, string>.Empty.Add("requestedBy", "alice")); - - var effects = ImmutableDictionary<string, PolicyExceptionEffect>.Empty - .Add(suppressGlobal.Id, suppressGlobal) - .Add(suppressRule.Id, suppressRule); - - var exceptions = new PolicyEvaluationExceptions( - effects, - ImmutableArray.Create(globalInstance, ruleInstance)); - - var context = CreateContext("Critical", "internal", exceptions); - - var result = evaluationService.Evaluate(document, context); - - Assert.True(result.Matched); - Assert.Equal("suppressed", result.Status); - Assert.NotNull(result.AppliedException); - Assert.Equal("exc-rule", result.AppliedException!.ExceptionId); - Assert.Equal("Rule Critical Suppress", result.AppliedException!.Metadata["effectName"]); - Assert.Equal("alice", result.AppliedException!.Metadata["requestedBy"]); - Assert.Equal("alice", result.Annotations["exception.meta.requestedBy"]); - } - - private PolicyIrDocument CompileBaseline() - { - var compilation = compiler.Compile(BaselinePolicy); - Assert.True(compilation.Success, Describe(compilation.Diagnostics)); - return Assert.IsType<PolicyIrDocument>(compilation.Document); - } - - private static PolicyEvaluationContext CreateContext(string severity, string exposure, PolicyEvaluationExceptions? exceptions = null) - { - return new PolicyEvaluationContext( - new PolicyEvaluationSeverity(severity), - new PolicyEvaluationEnvironment(new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase) - { - ["exposure"] = exposure - }.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase)), - new PolicyEvaluationAdvisory("GHSA", ImmutableDictionary<string, string>.Empty), - PolicyEvaluationVexEvidence.Empty, - new PolicyEvaluationSbom(ImmutableHashSet<string>.Empty), - exceptions ?? PolicyEvaluationExceptions.Empty); - } - - private static string Describe(ImmutableArray<PolicyIssue> issues) => - string.Join(" | ", issues.Select(issue => $"{issue.Severity}:{issue.Code}:{issue.Message}")); -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using StellaOps.Policy; +using StellaOps.Policy.Engine.Compilation; +using StellaOps.Policy.Engine.Evaluation; +using StellaOps.Policy.Engine.Services; +using Xunit; +using Xunit.Sdk; + +namespace StellaOps.Policy.Engine.Tests; + +public sealed class PolicyEvaluatorTests +{ + private static readonly string BaselinePolicy = """ +policy "Baseline Production Policy" syntax "stella-dsl@1" { + metadata { + description = "Block critical, escalate high, enforce VEX justifications." + tags = ["baseline","production"] + } + + profile severity { + map vendor_weight { + source "GHSA" => +0.5 + source "OSV" => +0.0 + } + env exposure_adjustments { + if env.exposure == "internet" then +0.5 + } + } + + rule block_critical priority 5 { + when severity.normalized >= "Critical" + then status := "blocked" + because "Critical severity must be remediated before deploy." + } + + rule escalate_high_internet { + when severity.normalized == "High" + and env.exposure == "internet" + then escalate to severity_band("Critical") + because "High severity on internet-exposed asset escalates to critical." + } + + rule require_vex_justification { + when vex.any(status in ["not_affected","fixed"]) + and vex.justification in ["component_not_present","vulnerable_code_not_present"] + then status := vex.status + annotate winning_statement := vex.latest().statementId + because "Respect strong vendor VEX claims." + } + + rule alert_warn_eol_runtime priority 1 { + when severity.normalized <= "Medium" + and sbom.has_tag("runtime:eol") + then warn message "Runtime marked as EOL; upgrade recommended." + because "Deprecated runtime should be upgraded." + } +} +"""; + + private readonly PolicyCompiler compiler = new(); + private readonly PolicyEvaluationService evaluationService = new(); + + [Fact] + public void Evaluate_BlockCriticalRuleMatches() + { + var document = CompileBaseline(); + var context = CreateContext(severity: "Critical", exposure: "internal"); + + var result = evaluationService.Evaluate(document, context); + + Assert.True(result.Matched); + Assert.Equal("block_critical", result.RuleName); + Assert.Equal("blocked", result.Status); + } + + [Fact] + public void Evaluate_EscalateAdjustsSeverity() + { + var document = CompileBaseline(); + var context = CreateContext(severity: "High", exposure: "internet"); + + var result = evaluationService.Evaluate(document, context); + + Assert.True(result.Matched); + Assert.Equal("escalate_high_internet", result.RuleName); + Assert.Equal("affected", result.Status); + Assert.Equal("Critical", result.Severity); + } + + [Fact] + public void Evaluate_VexOverrideSetsStatusAndAnnotation() + { + var document = CompileBaseline(); + var statements = ImmutableArray.Create( + new PolicyEvaluationVexStatement("not_affected", "component_not_present", "stmt-001")); + var context = CreateContext("Medium", "internal") with + { + Vex = new PolicyEvaluationVexEvidence(statements) + }; + + var result = evaluationService.Evaluate(document, context); + + Assert.True(result.Matched); + Assert.Equal("require_vex_justification", result.RuleName); + Assert.Equal("not_affected", result.Status); + Assert.Equal("stmt-001", result.Annotations["winning_statement"]); + } + + [Fact] + public void Evaluate_WarnRuleEmitsWarning() + { + var document = CompileBaseline(); + var tags = ImmutableHashSet.Create("runtime:eol"); + var context = CreateContext("Medium", "internal") with + { + Sbom = new PolicyEvaluationSbom(tags) + }; + + var result = evaluationService.Evaluate(document, context); + + Assert.True(result.Matched); + Assert.Equal("alert_warn_eol_runtime", result.RuleName); + Assert.Equal("warned", result.Status); + Assert.Contains(result.Warnings, message => message.Contains("EOL", StringComparison.OrdinalIgnoreCase)); + } + + [Fact] + public void Evaluate_ExceptionSuppressesCriticalFinding() + { + var document = CompileBaseline(); + var effect = new PolicyExceptionEffect( + Id: "suppress-critical", + Name: "Critical Break Glass", + Effect: PolicyExceptionEffectType.Suppress, + DowngradeSeverity: null, + RequiredControlId: null, + RoutingTemplate: "secops", + MaxDurationDays: 7, + Description: null); + var scope = PolicyEvaluationExceptionScope.Create(ruleNames: new[] { "block_critical" }); + var instance = new PolicyEvaluationExceptionInstance( + Id: "exc-001", + EffectId: effect.Id, + Scope: scope, + CreatedAt: new DateTimeOffset(2025, 10, 1, 0, 0, 0, TimeSpan.Zero), + Metadata: ImmutableDictionary<string, string>.Empty); + var exceptions = new PolicyEvaluationExceptions( + ImmutableDictionary<string, PolicyExceptionEffect>.Empty.Add(effect.Id, effect), + ImmutableArray.Create(instance)); + var context = CreateContext("Critical", "internal", exceptions); + + var result = evaluationService.Evaluate(document, context); + + Assert.True(result.Matched); + Assert.Equal("block_critical", result.RuleName); + Assert.Equal("suppressed", result.Status); + Assert.NotNull(result.AppliedException); + Assert.Equal("exc-001", result.AppliedException!.ExceptionId); + Assert.Equal("suppress-critical", result.AppliedException!.EffectId); + Assert.Equal("blocked", result.AppliedException!.OriginalStatus); + Assert.Equal("suppressed", result.AppliedException!.AppliedStatus); + Assert.Equal("suppressed", result.Annotations["exception.status"]); + } + + [Fact] + public void Evaluate_ExceptionDowngradesSeverity() + { + var document = CompileBaseline(); + var effect = new PolicyExceptionEffect( + Id: "downgrade-internet", + Name: "Downgrade High Internet", + Effect: PolicyExceptionEffectType.Downgrade, + DowngradeSeverity: PolicySeverity.Medium, + RequiredControlId: null, + RoutingTemplate: null, + MaxDurationDays: null, + Description: null); + var scope = PolicyEvaluationExceptionScope.Create( + ruleNames: new[] { "escalate_high_internet" }, + severities: new[] { "High" }, + sources: new[] { "GHSA" }); + var instance = new PolicyEvaluationExceptionInstance( + Id: "exc-200", + EffectId: effect.Id, + Scope: scope, + CreatedAt: new DateTimeOffset(2025, 10, 2, 0, 0, 0, TimeSpan.Zero), + Metadata: ImmutableDictionary<string, string>.Empty); + var exceptions = new PolicyEvaluationExceptions( + ImmutableDictionary<string, PolicyExceptionEffect>.Empty.Add(effect.Id, effect), + ImmutableArray.Create(instance)); + var context = CreateContext("High", "internet", exceptions); + + var result = evaluationService.Evaluate(document, context); + + Assert.True(result.Matched); + Assert.Equal("escalate_high_internet", result.RuleName); + Assert.Equal("affected", result.Status); + Assert.Equal("Medium", result.Severity); + Assert.NotNull(result.AppliedException); + Assert.Equal("Critical", result.AppliedException!.OriginalSeverity); + Assert.Equal("Medium", result.AppliedException!.AppliedSeverity); + Assert.Equal("Medium", result.Annotations["exception.severity"]); + } + + [Fact] + public void Evaluate_MoreSpecificExceptionWins() + { + var document = CompileBaseline(); + var suppressGlobal = new PolicyExceptionEffect( + Id: "suppress-critical-global", + Name: "Global Critical Suppress", + Effect: PolicyExceptionEffectType.Suppress, + DowngradeSeverity: null, + RequiredControlId: null, + RoutingTemplate: null, + MaxDurationDays: null, + Description: null); + var suppressRule = new PolicyExceptionEffect( + Id: "suppress-critical-rule", + Name: "Rule Critical Suppress", + Effect: PolicyExceptionEffectType.Suppress, + DowngradeSeverity: null, + RequiredControlId: null, + RoutingTemplate: null, + MaxDurationDays: null, + Description: null); + + var globalInstance = new PolicyEvaluationExceptionInstance( + Id: "exc-global", + EffectId: suppressGlobal.Id, + Scope: PolicyEvaluationExceptionScope.Create(severities: new[] { "Critical" }), + CreatedAt: new DateTimeOffset(2025, 9, 1, 0, 0, 0, TimeSpan.Zero), + Metadata: ImmutableDictionary<string, string>.Empty); + + var ruleInstance = new PolicyEvaluationExceptionInstance( + Id: "exc-rule", + EffectId: suppressRule.Id, + Scope: PolicyEvaluationExceptionScope.Create( + ruleNames: new[] { "block_critical" }, + severities: new[] { "Critical" }), + CreatedAt: new DateTimeOffset(2025, 10, 5, 0, 0, 0, TimeSpan.Zero), + Metadata: ImmutableDictionary<string, string>.Empty.Add("requestedBy", "alice")); + + var effects = ImmutableDictionary<string, PolicyExceptionEffect>.Empty + .Add(suppressGlobal.Id, suppressGlobal) + .Add(suppressRule.Id, suppressRule); + + var exceptions = new PolicyEvaluationExceptions( + effects, + ImmutableArray.Create(globalInstance, ruleInstance)); + + var context = CreateContext("Critical", "internal", exceptions); + + var result = evaluationService.Evaluate(document, context); + + Assert.True(result.Matched); + Assert.Equal("suppressed", result.Status); + Assert.NotNull(result.AppliedException); + Assert.Equal("exc-rule", result.AppliedException!.ExceptionId); + Assert.Equal("Rule Critical Suppress", result.AppliedException!.Metadata["effectName"]); + Assert.Equal("alice", result.AppliedException!.Metadata["requestedBy"]); + Assert.Equal("alice", result.Annotations["exception.meta.requestedBy"]); + } + + private PolicyIrDocument CompileBaseline() + { + var compilation = compiler.Compile(BaselinePolicy); + Assert.True(compilation.Success, Describe(compilation.Diagnostics)); + return Assert.IsType<PolicyIrDocument>(compilation.Document); + } + + private static PolicyEvaluationContext CreateContext(string severity, string exposure, PolicyEvaluationExceptions? exceptions = null) + { + return new PolicyEvaluationContext( + new PolicyEvaluationSeverity(severity), + new PolicyEvaluationEnvironment(new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase) + { + ["exposure"] = exposure + }.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase)), + new PolicyEvaluationAdvisory("GHSA", ImmutableDictionary<string, string>.Empty), + PolicyEvaluationVexEvidence.Empty, + new PolicyEvaluationSbom(ImmutableHashSet<string>.Empty), + exceptions ?? PolicyEvaluationExceptions.Empty); + } + + private static string Describe(ImmutableArray<PolicyIssue> issues) => + string.Join(" | ", issues.Select(issue => $"{issue.Severity}:{issue.Code}:{issue.Message}")); +} diff --git a/src/StellaOps.Policy.Engine.Tests/PolicyPackRepositoryTests.cs b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyPackRepositoryTests.cs similarity index 98% rename from src/StellaOps.Policy.Engine.Tests/PolicyPackRepositoryTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyPackRepositoryTests.cs index 372ffb7f..9413f311 100644 --- a/src/StellaOps.Policy.Engine.Tests/PolicyPackRepositoryTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/PolicyPackRepositoryTests.cs @@ -1,44 +1,44 @@ -using StellaOps.Policy.Engine.Domain; -using StellaOps.Policy.Engine.Services; -using Xunit; - -namespace StellaOps.Policy.Engine.Tests; - -public class PolicyPackRepositoryTests -{ - private readonly InMemoryPolicyPackRepository repository = new(); - - [Fact] - public async Task ActivateRevision_WithSingleApprover_ActivatesImmediately() - { - await repository.CreateAsync("pack-1", "Pack", CancellationToken.None); - await repository.UpsertRevisionAsync("pack-1", 1, requiresTwoPersonApproval: false, PolicyRevisionStatus.Approved, CancellationToken.None); - - var result = await repository.RecordActivationAsync("pack-1", 1, "alice", DateTimeOffset.UtcNow, null, CancellationToken.None); - - Assert.Equal(PolicyActivationResultStatus.Activated, result.Status); - Assert.NotNull(result.Revision); - Assert.Equal(PolicyRevisionStatus.Active, result.Revision!.Status); - Assert.Single(result.Revision.Approvals); - } - - [Fact] - public async Task ActivateRevision_WithTwoPersonRequirement_ReturnsPendingUntilSecondApproval() - { - await repository.CreateAsync("pack-2", "Pack", CancellationToken.None); - await repository.UpsertRevisionAsync("pack-2", 1, requiresTwoPersonApproval: true, PolicyRevisionStatus.Approved, CancellationToken.None); - - var first = await repository.RecordActivationAsync("pack-2", 1, "alice", DateTimeOffset.UtcNow, null, CancellationToken.None); - Assert.Equal(PolicyActivationResultStatus.PendingSecondApproval, first.Status); - Assert.Equal(PolicyRevisionStatus.Approved, first.Revision!.Status); - Assert.Single(first.Revision.Approvals); - - var duplicate = await repository.RecordActivationAsync("pack-2", 1, "alice", DateTimeOffset.UtcNow, null, CancellationToken.None); - Assert.Equal(PolicyActivationResultStatus.DuplicateApproval, duplicate.Status); - - var second = await repository.RecordActivationAsync("pack-2", 1, "bob", DateTimeOffset.UtcNow, null, CancellationToken.None); - Assert.Equal(PolicyActivationResultStatus.Activated, second.Status); - Assert.Equal(PolicyRevisionStatus.Active, second.Revision!.Status); - Assert.Equal(2, second.Revision.Approvals.Length); - } -} +using StellaOps.Policy.Engine.Domain; +using StellaOps.Policy.Engine.Services; +using Xunit; + +namespace StellaOps.Policy.Engine.Tests; + +public class PolicyPackRepositoryTests +{ + private readonly InMemoryPolicyPackRepository repository = new(); + + [Fact] + public async Task ActivateRevision_WithSingleApprover_ActivatesImmediately() + { + await repository.CreateAsync("pack-1", "Pack", CancellationToken.None); + await repository.UpsertRevisionAsync("pack-1", 1, requiresTwoPersonApproval: false, PolicyRevisionStatus.Approved, CancellationToken.None); + + var result = await repository.RecordActivationAsync("pack-1", 1, "alice", DateTimeOffset.UtcNow, null, CancellationToken.None); + + Assert.Equal(PolicyActivationResultStatus.Activated, result.Status); + Assert.NotNull(result.Revision); + Assert.Equal(PolicyRevisionStatus.Active, result.Revision!.Status); + Assert.Single(result.Revision.Approvals); + } + + [Fact] + public async Task ActivateRevision_WithTwoPersonRequirement_ReturnsPendingUntilSecondApproval() + { + await repository.CreateAsync("pack-2", "Pack", CancellationToken.None); + await repository.UpsertRevisionAsync("pack-2", 1, requiresTwoPersonApproval: true, PolicyRevisionStatus.Approved, CancellationToken.None); + + var first = await repository.RecordActivationAsync("pack-2", 1, "alice", DateTimeOffset.UtcNow, null, CancellationToken.None); + Assert.Equal(PolicyActivationResultStatus.PendingSecondApproval, first.Status); + Assert.Equal(PolicyRevisionStatus.Approved, first.Revision!.Status); + Assert.Single(first.Revision.Approvals); + + var duplicate = await repository.RecordActivationAsync("pack-2", 1, "alice", DateTimeOffset.UtcNow, null, CancellationToken.None); + Assert.Equal(PolicyActivationResultStatus.DuplicateApproval, duplicate.Status); + + var second = await repository.RecordActivationAsync("pack-2", 1, "bob", DateTimeOffset.UtcNow, null, CancellationToken.None); + Assert.Equal(PolicyActivationResultStatus.Activated, second.Status); + Assert.Equal(PolicyRevisionStatus.Active, second.Revision!.Status); + Assert.Equal(2, second.Revision.Approvals.Length); + } +} diff --git a/src/StellaOps.Policy.Engine.Tests/StellaOps.Policy.Engine.Tests.csproj b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/StellaOps.Policy.Engine.Tests.csproj similarity index 68% rename from src/StellaOps.Policy.Engine.Tests/StellaOps.Policy.Engine.Tests.csproj rename to src/Policy/__Tests/StellaOps.Policy.Engine.Tests/StellaOps.Policy.Engine.Tests.csproj index 4180c80e..5c4d518b 100644 --- a/src/StellaOps.Policy.Engine.Tests/StellaOps.Policy.Engine.Tests.csproj +++ b/src/Policy/__Tests/StellaOps.Policy.Engine.Tests/StellaOps.Policy.Engine.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -8,6 +9,6 @@ </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Policy.Engine\StellaOps.Policy.Engine.csproj" /> + <ProjectReference Include="../../StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs similarity index 97% rename from src/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs index 9cac9263..4c2df9f7 100644 --- a/src/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/GatewayActivationTests.cs @@ -1,548 +1,548 @@ -using System.Diagnostics.Metrics; -using System.Net; -using System.Net.Http.Headers; -using System.Net.Http.Json; -using System.Text.Json; -using Microsoft.AspNetCore.Authentication.JwtBearer; -using Microsoft.AspNetCore.Builder; -using Microsoft.AspNetCore.Hosting; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.IdentityModel.JsonWebTokens; -using Microsoft.IdentityModel.Protocols.OpenIdConnect; -using Microsoft.IdentityModel.Tokens; -using Polly.Utilities; -using StellaOps.Auth.Client; -using StellaOps.Auth.Abstractions; -using StellaOps.Policy.Gateway.Clients; -using StellaOps.Policy.Gateway.Contracts; -using StellaOps.Policy.Gateway.Options; -using StellaOps.Policy.Gateway.Services; -using Xunit; -using Xunit.Sdk; - -namespace StellaOps.Policy.Gateway.Tests; - -public sealed class GatewayActivationTests -{ - [Fact] - public async Task ActivateRevision_UsesServiceTokenFallback_And_RecordsMetrics() - { - await using var factory = new PolicyGatewayWebApplicationFactory(); - - var tokenClient = factory.Services.GetRequiredService<StubTokenClient>(); - tokenClient.Reset(); - - var recordingHandler = factory.Services.GetRequiredService<RecordingPolicyEngineHandler>(); - recordingHandler.Reset(); - - using var listener = new MeterListener(); - var activationMeasurements = new List<(long Value, string Outcome, string Source)>(); - var latencyMeasurements = new List<(double Value, string Outcome, string Source)>(); - - listener.InstrumentPublished += (instrument, meterListener) => - { - if (instrument.Meter.Name != "StellaOps.Policy.Gateway") - { - return; - } - - meterListener.EnableMeasurementEvents(instrument); - }; - - listener.SetMeasurementEventCallback<long>((instrument, value, tags, _) => - { - if (instrument.Name != "policy_gateway_activation_requests_total") - { - return; - } - - activationMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); - }); - - listener.SetMeasurementEventCallback<double>((instrument, value, tags, _) => - { - if (instrument.Name != "policy_gateway_activation_latency_ms") - { - return; - } - - latencyMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); - }); - - listener.Start(); - - using var client = factory.CreateClient(); - - var response = await client.PostAsJsonAsync( - "/api/policy/packs/example/revisions/5:activate", - new ActivatePolicyRevisionRequest("rollout window start")); - - listener.Dispose(); - - var forwardedRequest = recordingHandler.LastRequest; - var issuedTokens = tokenClient.RequestCount; - var responseBody = await response.Content.ReadAsStringAsync(); - if (!response.IsSuccessStatusCode) - { - throw new Xunit.Sdk.XunitException( - $"Gateway response was {(int)response.StatusCode} {response.StatusCode}. " + - $"Body: {responseBody}. IssuedTokens: {issuedTokens}. Forwarded: { (forwardedRequest is null ? "no" : "yes") }."); - } - - Assert.Equal(1, tokenClient.RequestCount); - - Assert.NotNull(forwardedRequest); - Assert.Equal(HttpMethod.Post, forwardedRequest!.Method); - Assert.Equal("https://policy-engine.test/api/policy/packs/example/revisions/5:activate", forwardedRequest.RequestUri!.ToString()); - Assert.Equal("Bearer", forwardedRequest.Headers.Authorization?.Scheme); - Assert.Equal("service-token", forwardedRequest.Headers.Authorization?.Parameter); - Assert.False(forwardedRequest.Headers.TryGetValues("DPoP", out _), "Expected no DPoP header when DPoP is disabled."); - - Assert.Contains(activationMeasurements, measurement => - measurement.Value == 1 && - measurement.Outcome == "activated" && - measurement.Source == "service"); - - Assert.Contains(latencyMeasurements, measurement => - measurement.Outcome == "activated" && - measurement.Source == "service"); - } - - [Fact] - public async Task ActivateRevision_RecordsMetrics_WhenUpstreamReturnsUnauthorized() - { - await using var factory = new PolicyGatewayWebApplicationFactory(); - - var tokenClient = factory.Services.GetRequiredService<StubTokenClient>(); - tokenClient.Reset(); - - var recordingHandler = factory.Services.GetRequiredService<RecordingPolicyEngineHandler>(); - recordingHandler.Reset(); - recordingHandler.SetResponseFactory(_ => - { - var problem = new ProblemDetails - { - Title = "Unauthorized", - Detail = "Caller token rejected.", - Status = StatusCodes.Status401Unauthorized - }; - return new HttpResponseMessage(HttpStatusCode.Unauthorized) - { - Content = JsonContent.Create(problem) - }; - }); - - using var listener = new MeterListener(); - var activationMeasurements = new List<(long Value, string Outcome, string Source)>(); - var latencyMeasurements = new List<(double Value, string Outcome, string Source)>(); - - listener.InstrumentPublished += (instrument, meterListener) => - { - if (instrument.Meter.Name != "StellaOps.Policy.Gateway") - { - return; - } - - meterListener.EnableMeasurementEvents(instrument); - }; - - listener.SetMeasurementEventCallback<long>((instrument, value, tags, _) => - { - if (instrument.Name != "policy_gateway_activation_requests_total") - { - return; - } - - activationMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); - }); - - listener.SetMeasurementEventCallback<double>((instrument, value, tags, _) => - { - if (instrument.Name != "policy_gateway_activation_latency_ms") - { - return; - } - - latencyMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); - }); - - listener.Start(); - - using var client = factory.CreateClient(); - - var response = await client.PostAsJsonAsync( - "/api/policy/packs/example/revisions/2:activate", - new ActivatePolicyRevisionRequest("failure path")); - - listener.Dispose(); - - Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); - - Assert.Equal(1, tokenClient.RequestCount); - - var forwardedRequest = recordingHandler.LastRequest; - Assert.NotNull(forwardedRequest); - Assert.Equal("service-token", forwardedRequest!.Headers.Authorization?.Parameter); - - Assert.Contains(activationMeasurements, measurement => - measurement.Value == 1 && - measurement.Outcome == "unauthorized" && - measurement.Source == "service"); - - Assert.Contains(latencyMeasurements, measurement => - measurement.Outcome == "unauthorized" && - measurement.Source == "service"); - } - - [Fact] - public async Task ActivateRevision_RecordsMetrics_WhenUpstreamReturnsBadGateway() - { - await using var factory = new PolicyGatewayWebApplicationFactory(); - - var tokenClient = factory.Services.GetRequiredService<StubTokenClient>(); - tokenClient.Reset(); - - var recordingHandler = factory.Services.GetRequiredService<RecordingPolicyEngineHandler>(); - recordingHandler.Reset(); - recordingHandler.SetResponseFactory(_ => - { - var problem = new ProblemDetails - { - Title = "Upstream error", - Detail = "Policy Engine returned 502.", - Status = StatusCodes.Status502BadGateway - }; - return new HttpResponseMessage(HttpStatusCode.BadGateway) - { - Content = JsonContent.Create(problem) - }; - }); - - using var listener = new MeterListener(); - var activationMeasurements = new List<(long Value, string Outcome, string Source)>(); - var latencyMeasurements = new List<(double Value, string Outcome, string Source)>(); - - listener.InstrumentPublished += (instrument, meterListener) => - { - if (instrument.Meter.Name != "StellaOps.Policy.Gateway") - { - return; - } - - meterListener.EnableMeasurementEvents(instrument); - }; - - listener.SetMeasurementEventCallback<long>((instrument, value, tags, _) => - { - if (instrument.Name != "policy_gateway_activation_requests_total") - { - return; - } - - activationMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); - }); - - listener.SetMeasurementEventCallback<double>((instrument, value, tags, _) => - { - if (instrument.Name != "policy_gateway_activation_latency_ms") - { - return; - } - - latencyMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); - }); - - listener.Start(); - - using var client = factory.CreateClient(); - - var response = await client.PostAsJsonAsync( - "/api/policy/packs/example/revisions/3:activate", - new ActivatePolicyRevisionRequest("upstream failure")); - - listener.Dispose(); - - Assert.Equal(HttpStatusCode.BadGateway, response.StatusCode); - - Assert.Equal(1, tokenClient.RequestCount); - - var forwardedRequest = recordingHandler.LastRequest; - Assert.NotNull(forwardedRequest); - Assert.Equal("service-token", forwardedRequest!.Headers.Authorization?.Parameter); - - Assert.Contains(activationMeasurements, measurement => - measurement.Value == 1 && - measurement.Outcome == "error" && - measurement.Source == "service"); - - Assert.Contains(latencyMeasurements, measurement => - measurement.Outcome == "error" && - measurement.Source == "service"); - } - - [Fact] - public async Task ActivateRevision_RetriesOnTooManyRequests() - { - await using var factory = new PolicyGatewayWebApplicationFactory(); - - var recordedDelays = new List<TimeSpan>(); - var originalSleep = SystemClock.SleepAsync; - SystemClock.SleepAsync = (delay, cancellationToken) => - { - recordedDelays.Add(delay); - return Task.CompletedTask; - }; - - var tokenClient = factory.Services.GetRequiredService<StubTokenClient>(); - tokenClient.Reset(); - - var recordingHandler = factory.Services.GetRequiredService<RecordingPolicyEngineHandler>(); - recordingHandler.Reset(); - recordingHandler.SetResponseSequence(new[] - { - CreateThrottleResponse(), - CreateThrottleResponse(), - RecordingPolicyEngineHandler.CreateSuccessResponse() - }); - - using var client = factory.CreateClient(); - - try - { - var response = await client.PostAsJsonAsync( - "/api/policy/packs/example/revisions/7:activate", - new ActivatePolicyRevisionRequest("retry after throttle")); - - Assert.True(response.IsSuccessStatusCode, "Gateway should succeed after retrying throttled upstream responses."); - Assert.Equal(1, tokenClient.RequestCount); - Assert.Equal(3, recordingHandler.RequestCount); - } - finally - { - SystemClock.SleepAsync = originalSleep; - } - - Assert.Equal(new[] { TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(4) }, recordedDelays); - } - - private static HttpResponseMessage CreateThrottleResponse() - { - var problem = new ProblemDetails - { - Title = "Too many requests", - Detail = "Slow down.", - Status = StatusCodes.Status429TooManyRequests - }; - - var response = new HttpResponseMessage((HttpStatusCode)StatusCodes.Status429TooManyRequests) - { - Content = JsonContent.Create(problem) - }; - response.Headers.RetryAfter = new RetryConditionHeaderValue(TimeSpan.FromMilliseconds(10)); - return response; - } - - private static string GetTag(ReadOnlySpan<KeyValuePair<string, object?>> tags, string key) - { - foreach (var tag in tags) - { - if (string.Equals(tag.Key, key, StringComparison.Ordinal)) - { - return tag.Value?.ToString() ?? string.Empty; - } - } - - return string.Empty; - } - - private sealed class PolicyGatewayWebApplicationFactory : WebApplicationFactory<Program> - { - protected override void ConfigureWebHost(IWebHostBuilder builder) - { - builder.UseEnvironment("Development"); - - builder.ConfigureAppConfiguration((_, configurationBuilder) => - { - var settings = new Dictionary<string, string?> - { - ["PolicyGateway:Telemetry:MinimumLogLevel"] = "Warning", - ["PolicyGateway:ResourceServer:Authority"] = "https://authority.test", - ["PolicyGateway:ResourceServer:RequireHttpsMetadata"] = "false", - ["PolicyGateway:ResourceServer:BypassNetworks:0"] = "127.0.0.1/32", - ["PolicyGateway:ResourceServer:BypassNetworks:1"] = "::1/128", - ["PolicyGateway:PolicyEngine:BaseAddress"] = "https://policy-engine.test/", - ["PolicyGateway:PolicyEngine:ClientCredentials:Enabled"] = "true", - ["PolicyGateway:PolicyEngine:ClientCredentials:ClientId"] = "policy-gateway", - ["PolicyGateway:PolicyEngine:ClientCredentials:ClientSecret"] = "secret", - ["PolicyGateway:PolicyEngine:ClientCredentials:Scopes:0"] = "policy:activate", - ["PolicyGateway:PolicyEngine:Dpop:Enabled"] = "false" - }; - - configurationBuilder.AddInMemoryCollection(settings); - }); - - builder.ConfigureServices(services => - { - services.RemoveAll<IStellaOpsTokenClient>(); - services.AddSingleton<StubTokenClient>(); - services.AddSingleton<IStellaOpsTokenClient>(sp => sp.GetRequiredService<StubTokenClient>()); - - services.RemoveAll<PolicyEngineClient>(); - services.RemoveAll<IPolicyEngineClient>(); - services.AddSingleton<RecordingPolicyEngineHandler>(); - services.AddHttpClient<IPolicyEngineClient, PolicyEngineClient>() - .ConfigureHttpClient(client => - { - client.BaseAddress = new Uri("https://policy-engine.test/"); - }) - .ConfigurePrimaryHttpMessageHandler(sp => sp.GetRequiredService<RecordingPolicyEngineHandler>()); - - services.AddSingleton<IStartupFilter>(new RemoteIpStartupFilter()); - - services.PostConfigure<JwtBearerOptions>(StellaOpsAuthenticationDefaults.AuthenticationScheme, options => - { - options.RequireHttpsMetadata = false; - options.Configuration = new OpenIdConnectConfiguration - { - Issuer = "https://authority.test", - TokenEndpoint = "https://authority.test/token" - }; - options.TokenValidationParameters = new TokenValidationParameters - { - ValidateIssuer = false, - ValidateAudience = false, - ValidateIssuerSigningKey = false, - SignatureValidator = (token, parameters) => new JsonWebToken(token) - }; - options.BackchannelHttpHandler = new NoOpBackchannelHandler(); - }); - - }); - } - } - - private sealed class RemoteIpStartupFilter : IStartupFilter - { - public Action<IApplicationBuilder> Configure(Action<IApplicationBuilder> next) - { - return app => - { - app.Use(async (context, innerNext) => - { - context.Connection.RemoteIpAddress ??= IPAddress.Loopback; - await innerNext().ConfigureAwait(false); - }); - - next(app); - }; - } - } - - private sealed class RecordingPolicyEngineHandler : HttpMessageHandler - { - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); - - public HttpRequestMessage? LastRequest { get; private set; } - public int RequestCount { get; private set; } - private Func<HttpRequestMessage, HttpResponseMessage>? responseFactory; - private Queue<HttpResponseMessage>? responseQueue; - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - LastRequest = request; - RequestCount++; - - if (responseQueue is { Count: > 0 }) - { - return Task.FromResult(responseQueue.Dequeue()); - } - - var response = responseFactory is not null - ? responseFactory(request) - : CreateSuccessResponse(); - - return Task.FromResult(response); - } - - public void Reset() - { - LastRequest = null; - RequestCount = 0; - responseFactory = null; - responseQueue?.Clear(); - responseQueue = null; - } - - public void SetResponseFactory(Func<HttpRequestMessage, HttpResponseMessage>? factory) - { - responseFactory = factory; - } - - public void SetResponseSequence(IEnumerable<HttpResponseMessage> responses) - { - responseQueue = new Queue<HttpResponseMessage>(responses ?? Array.Empty<HttpResponseMessage>()); - } - - public static HttpResponseMessage CreateSuccessResponse() - { - var now = DateTimeOffset.UtcNow; - var payload = new PolicyRevisionActivationDto( - "activated", - new PolicyRevisionDto( - 5, - "activated", - false, - now, - now, - Array.Empty<PolicyActivationApprovalDto>())); - - return new HttpResponseMessage(HttpStatusCode.OK) - { - Content = JsonContent.Create(payload, options: SerializerOptions) - }; - } - } - - private sealed class NoOpBackchannelHandler : HttpMessageHandler - { - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - => Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)); - } - - private sealed class StubTokenClient : IStellaOpsTokenClient - { - public int RequestCount { get; private set; } - - public void Reset() - { - RequestCount = 0; - } - - public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) - { - RequestCount++; - var expiresAt = DateTimeOffset.UtcNow.AddMinutes(5); - return Task.FromResult(new StellaOpsTokenResult("service-token", "Bearer", expiresAt, Array.Empty<string>())); - } - - public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null); - - public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - - public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - } -} +using System.Diagnostics.Metrics; +using System.Net; +using System.Net.Http.Headers; +using System.Net.Http.Json; +using System.Text.Json; +using Microsoft.AspNetCore.Authentication.JwtBearer; +using Microsoft.AspNetCore.Builder; +using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.IdentityModel.JsonWebTokens; +using Microsoft.IdentityModel.Protocols.OpenIdConnect; +using Microsoft.IdentityModel.Tokens; +using Polly.Utilities; +using StellaOps.Auth.Client; +using StellaOps.Auth.Abstractions; +using StellaOps.Policy.Gateway.Clients; +using StellaOps.Policy.Gateway.Contracts; +using StellaOps.Policy.Gateway.Options; +using StellaOps.Policy.Gateway.Services; +using Xunit; +using Xunit.Sdk; + +namespace StellaOps.Policy.Gateway.Tests; + +public sealed class GatewayActivationTests +{ + [Fact] + public async Task ActivateRevision_UsesServiceTokenFallback_And_RecordsMetrics() + { + await using var factory = new PolicyGatewayWebApplicationFactory(); + + var tokenClient = factory.Services.GetRequiredService<StubTokenClient>(); + tokenClient.Reset(); + + var recordingHandler = factory.Services.GetRequiredService<RecordingPolicyEngineHandler>(); + recordingHandler.Reset(); + + using var listener = new MeterListener(); + var activationMeasurements = new List<(long Value, string Outcome, string Source)>(); + var latencyMeasurements = new List<(double Value, string Outcome, string Source)>(); + + listener.InstrumentPublished += (instrument, meterListener) => + { + if (instrument.Meter.Name != "StellaOps.Policy.Gateway") + { + return; + } + + meterListener.EnableMeasurementEvents(instrument); + }; + + listener.SetMeasurementEventCallback<long>((instrument, value, tags, _) => + { + if (instrument.Name != "policy_gateway_activation_requests_total") + { + return; + } + + activationMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); + }); + + listener.SetMeasurementEventCallback<double>((instrument, value, tags, _) => + { + if (instrument.Name != "policy_gateway_activation_latency_ms") + { + return; + } + + latencyMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); + }); + + listener.Start(); + + using var client = factory.CreateClient(); + + var response = await client.PostAsJsonAsync( + "/api/policy/packs/example/revisions/5:activate", + new ActivatePolicyRevisionRequest("rollout window start")); + + listener.Dispose(); + + var forwardedRequest = recordingHandler.LastRequest; + var issuedTokens = tokenClient.RequestCount; + var responseBody = await response.Content.ReadAsStringAsync(); + if (!response.IsSuccessStatusCode) + { + throw new Xunit.Sdk.XunitException( + $"Gateway response was {(int)response.StatusCode} {response.StatusCode}. " + + $"Body: {responseBody}. IssuedTokens: {issuedTokens}. Forwarded: { (forwardedRequest is null ? "no" : "yes") }."); + } + + Assert.Equal(1, tokenClient.RequestCount); + + Assert.NotNull(forwardedRequest); + Assert.Equal(HttpMethod.Post, forwardedRequest!.Method); + Assert.Equal("https://policy-engine.test/api/policy/packs/example/revisions/5:activate", forwardedRequest.RequestUri!.ToString()); + Assert.Equal("Bearer", forwardedRequest.Headers.Authorization?.Scheme); + Assert.Equal("service-token", forwardedRequest.Headers.Authorization?.Parameter); + Assert.False(forwardedRequest.Headers.TryGetValues("DPoP", out _), "Expected no DPoP header when DPoP is disabled."); + + Assert.Contains(activationMeasurements, measurement => + measurement.Value == 1 && + measurement.Outcome == "activated" && + measurement.Source == "service"); + + Assert.Contains(latencyMeasurements, measurement => + measurement.Outcome == "activated" && + measurement.Source == "service"); + } + + [Fact] + public async Task ActivateRevision_RecordsMetrics_WhenUpstreamReturnsUnauthorized() + { + await using var factory = new PolicyGatewayWebApplicationFactory(); + + var tokenClient = factory.Services.GetRequiredService<StubTokenClient>(); + tokenClient.Reset(); + + var recordingHandler = factory.Services.GetRequiredService<RecordingPolicyEngineHandler>(); + recordingHandler.Reset(); + recordingHandler.SetResponseFactory(_ => + { + var problem = new ProblemDetails + { + Title = "Unauthorized", + Detail = "Caller token rejected.", + Status = StatusCodes.Status401Unauthorized + }; + return new HttpResponseMessage(HttpStatusCode.Unauthorized) + { + Content = JsonContent.Create(problem) + }; + }); + + using var listener = new MeterListener(); + var activationMeasurements = new List<(long Value, string Outcome, string Source)>(); + var latencyMeasurements = new List<(double Value, string Outcome, string Source)>(); + + listener.InstrumentPublished += (instrument, meterListener) => + { + if (instrument.Meter.Name != "StellaOps.Policy.Gateway") + { + return; + } + + meterListener.EnableMeasurementEvents(instrument); + }; + + listener.SetMeasurementEventCallback<long>((instrument, value, tags, _) => + { + if (instrument.Name != "policy_gateway_activation_requests_total") + { + return; + } + + activationMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); + }); + + listener.SetMeasurementEventCallback<double>((instrument, value, tags, _) => + { + if (instrument.Name != "policy_gateway_activation_latency_ms") + { + return; + } + + latencyMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); + }); + + listener.Start(); + + using var client = factory.CreateClient(); + + var response = await client.PostAsJsonAsync( + "/api/policy/packs/example/revisions/2:activate", + new ActivatePolicyRevisionRequest("failure path")); + + listener.Dispose(); + + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + + Assert.Equal(1, tokenClient.RequestCount); + + var forwardedRequest = recordingHandler.LastRequest; + Assert.NotNull(forwardedRequest); + Assert.Equal("service-token", forwardedRequest!.Headers.Authorization?.Parameter); + + Assert.Contains(activationMeasurements, measurement => + measurement.Value == 1 && + measurement.Outcome == "unauthorized" && + measurement.Source == "service"); + + Assert.Contains(latencyMeasurements, measurement => + measurement.Outcome == "unauthorized" && + measurement.Source == "service"); + } + + [Fact] + public async Task ActivateRevision_RecordsMetrics_WhenUpstreamReturnsBadGateway() + { + await using var factory = new PolicyGatewayWebApplicationFactory(); + + var tokenClient = factory.Services.GetRequiredService<StubTokenClient>(); + tokenClient.Reset(); + + var recordingHandler = factory.Services.GetRequiredService<RecordingPolicyEngineHandler>(); + recordingHandler.Reset(); + recordingHandler.SetResponseFactory(_ => + { + var problem = new ProblemDetails + { + Title = "Upstream error", + Detail = "Policy Engine returned 502.", + Status = StatusCodes.Status502BadGateway + }; + return new HttpResponseMessage(HttpStatusCode.BadGateway) + { + Content = JsonContent.Create(problem) + }; + }); + + using var listener = new MeterListener(); + var activationMeasurements = new List<(long Value, string Outcome, string Source)>(); + var latencyMeasurements = new List<(double Value, string Outcome, string Source)>(); + + listener.InstrumentPublished += (instrument, meterListener) => + { + if (instrument.Meter.Name != "StellaOps.Policy.Gateway") + { + return; + } + + meterListener.EnableMeasurementEvents(instrument); + }; + + listener.SetMeasurementEventCallback<long>((instrument, value, tags, _) => + { + if (instrument.Name != "policy_gateway_activation_requests_total") + { + return; + } + + activationMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); + }); + + listener.SetMeasurementEventCallback<double>((instrument, value, tags, _) => + { + if (instrument.Name != "policy_gateway_activation_latency_ms") + { + return; + } + + latencyMeasurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); + }); + + listener.Start(); + + using var client = factory.CreateClient(); + + var response = await client.PostAsJsonAsync( + "/api/policy/packs/example/revisions/3:activate", + new ActivatePolicyRevisionRequest("upstream failure")); + + listener.Dispose(); + + Assert.Equal(HttpStatusCode.BadGateway, response.StatusCode); + + Assert.Equal(1, tokenClient.RequestCount); + + var forwardedRequest = recordingHandler.LastRequest; + Assert.NotNull(forwardedRequest); + Assert.Equal("service-token", forwardedRequest!.Headers.Authorization?.Parameter); + + Assert.Contains(activationMeasurements, measurement => + measurement.Value == 1 && + measurement.Outcome == "error" && + measurement.Source == "service"); + + Assert.Contains(latencyMeasurements, measurement => + measurement.Outcome == "error" && + measurement.Source == "service"); + } + + [Fact] + public async Task ActivateRevision_RetriesOnTooManyRequests() + { + await using var factory = new PolicyGatewayWebApplicationFactory(); + + var recordedDelays = new List<TimeSpan>(); + var originalSleep = SystemClock.SleepAsync; + SystemClock.SleepAsync = (delay, cancellationToken) => + { + recordedDelays.Add(delay); + return Task.CompletedTask; + }; + + var tokenClient = factory.Services.GetRequiredService<StubTokenClient>(); + tokenClient.Reset(); + + var recordingHandler = factory.Services.GetRequiredService<RecordingPolicyEngineHandler>(); + recordingHandler.Reset(); + recordingHandler.SetResponseSequence(new[] + { + CreateThrottleResponse(), + CreateThrottleResponse(), + RecordingPolicyEngineHandler.CreateSuccessResponse() + }); + + using var client = factory.CreateClient(); + + try + { + var response = await client.PostAsJsonAsync( + "/api/policy/packs/example/revisions/7:activate", + new ActivatePolicyRevisionRequest("retry after throttle")); + + Assert.True(response.IsSuccessStatusCode, "Gateway should succeed after retrying throttled upstream responses."); + Assert.Equal(1, tokenClient.RequestCount); + Assert.Equal(3, recordingHandler.RequestCount); + } + finally + { + SystemClock.SleepAsync = originalSleep; + } + + Assert.Equal(new[] { TimeSpan.FromSeconds(2), TimeSpan.FromSeconds(4) }, recordedDelays); + } + + private static HttpResponseMessage CreateThrottleResponse() + { + var problem = new ProblemDetails + { + Title = "Too many requests", + Detail = "Slow down.", + Status = StatusCodes.Status429TooManyRequests + }; + + var response = new HttpResponseMessage((HttpStatusCode)StatusCodes.Status429TooManyRequests) + { + Content = JsonContent.Create(problem) + }; + response.Headers.RetryAfter = new RetryConditionHeaderValue(TimeSpan.FromMilliseconds(10)); + return response; + } + + private static string GetTag(ReadOnlySpan<KeyValuePair<string, object?>> tags, string key) + { + foreach (var tag in tags) + { + if (string.Equals(tag.Key, key, StringComparison.Ordinal)) + { + return tag.Value?.ToString() ?? string.Empty; + } + } + + return string.Empty; + } + + private sealed class PolicyGatewayWebApplicationFactory : WebApplicationFactory<Program> + { + protected override void ConfigureWebHost(IWebHostBuilder builder) + { + builder.UseEnvironment("Development"); + + builder.ConfigureAppConfiguration((_, configurationBuilder) => + { + var settings = new Dictionary<string, string?> + { + ["PolicyGateway:Telemetry:MinimumLogLevel"] = "Warning", + ["PolicyGateway:ResourceServer:Authority"] = "https://authority.test", + ["PolicyGateway:ResourceServer:RequireHttpsMetadata"] = "false", + ["PolicyGateway:ResourceServer:BypassNetworks:0"] = "127.0.0.1/32", + ["PolicyGateway:ResourceServer:BypassNetworks:1"] = "::1/128", + ["PolicyGateway:PolicyEngine:BaseAddress"] = "https://policy-engine.test/", + ["PolicyGateway:PolicyEngine:ClientCredentials:Enabled"] = "true", + ["PolicyGateway:PolicyEngine:ClientCredentials:ClientId"] = "policy-gateway", + ["PolicyGateway:PolicyEngine:ClientCredentials:ClientSecret"] = "secret", + ["PolicyGateway:PolicyEngine:ClientCredentials:Scopes:0"] = "policy:activate", + ["PolicyGateway:PolicyEngine:Dpop:Enabled"] = "false" + }; + + configurationBuilder.AddInMemoryCollection(settings); + }); + + builder.ConfigureServices(services => + { + services.RemoveAll<IStellaOpsTokenClient>(); + services.AddSingleton<StubTokenClient>(); + services.AddSingleton<IStellaOpsTokenClient>(sp => sp.GetRequiredService<StubTokenClient>()); + + services.RemoveAll<PolicyEngineClient>(); + services.RemoveAll<IPolicyEngineClient>(); + services.AddSingleton<RecordingPolicyEngineHandler>(); + services.AddHttpClient<IPolicyEngineClient, PolicyEngineClient>() + .ConfigureHttpClient(client => + { + client.BaseAddress = new Uri("https://policy-engine.test/"); + }) + .ConfigurePrimaryHttpMessageHandler(sp => sp.GetRequiredService<RecordingPolicyEngineHandler>()); + + services.AddSingleton<IStartupFilter>(new RemoteIpStartupFilter()); + + services.PostConfigure<JwtBearerOptions>(StellaOpsAuthenticationDefaults.AuthenticationScheme, options => + { + options.RequireHttpsMetadata = false; + options.Configuration = new OpenIdConnectConfiguration + { + Issuer = "https://authority.test", + TokenEndpoint = "https://authority.test/token" + }; + options.TokenValidationParameters = new TokenValidationParameters + { + ValidateIssuer = false, + ValidateAudience = false, + ValidateIssuerSigningKey = false, + SignatureValidator = (token, parameters) => new JsonWebToken(token) + }; + options.BackchannelHttpHandler = new NoOpBackchannelHandler(); + }); + + }); + } + } + + private sealed class RemoteIpStartupFilter : IStartupFilter + { + public Action<IApplicationBuilder> Configure(Action<IApplicationBuilder> next) + { + return app => + { + app.Use(async (context, innerNext) => + { + context.Connection.RemoteIpAddress ??= IPAddress.Loopback; + await innerNext().ConfigureAwait(false); + }); + + next(app); + }; + } + } + + private sealed class RecordingPolicyEngineHandler : HttpMessageHandler + { + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + public HttpRequestMessage? LastRequest { get; private set; } + public int RequestCount { get; private set; } + private Func<HttpRequestMessage, HttpResponseMessage>? responseFactory; + private Queue<HttpResponseMessage>? responseQueue; + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + LastRequest = request; + RequestCount++; + + if (responseQueue is { Count: > 0 }) + { + return Task.FromResult(responseQueue.Dequeue()); + } + + var response = responseFactory is not null + ? responseFactory(request) + : CreateSuccessResponse(); + + return Task.FromResult(response); + } + + public void Reset() + { + LastRequest = null; + RequestCount = 0; + responseFactory = null; + responseQueue?.Clear(); + responseQueue = null; + } + + public void SetResponseFactory(Func<HttpRequestMessage, HttpResponseMessage>? factory) + { + responseFactory = factory; + } + + public void SetResponseSequence(IEnumerable<HttpResponseMessage> responses) + { + responseQueue = new Queue<HttpResponseMessage>(responses ?? Array.Empty<HttpResponseMessage>()); + } + + public static HttpResponseMessage CreateSuccessResponse() + { + var now = DateTimeOffset.UtcNow; + var payload = new PolicyRevisionActivationDto( + "activated", + new PolicyRevisionDto( + 5, + "activated", + false, + now, + now, + Array.Empty<PolicyActivationApprovalDto>())); + + return new HttpResponseMessage(HttpStatusCode.OK) + { + Content = JsonContent.Create(payload, options: SerializerOptions) + }; + } + } + + private sealed class NoOpBackchannelHandler : HttpMessageHandler + { + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + => Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)); + } + + private sealed class StubTokenClient : IStellaOpsTokenClient + { + public int RequestCount { get; private set; } + + public void Reset() + { + RequestCount = 0; + } + + public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) + { + RequestCount++; + var expiresAt = DateTimeOffset.UtcNow.AddMinutes(5); + return Task.FromResult(new StellaOpsTokenResult("service-token", "Bearer", expiresAt, Array.Empty<string>())); + } + + public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null); + + public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + } +} diff --git a/src/StellaOps.Policy.Gateway.Tests/PolicyEngineClientTests.cs b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/PolicyEngineClientTests.cs similarity index 97% rename from src/StellaOps.Policy.Gateway.Tests/PolicyEngineClientTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/PolicyEngineClientTests.cs index 04f028b1..d200f8c2 100644 --- a/src/StellaOps.Policy.Gateway.Tests/PolicyEngineClientTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/PolicyEngineClientTests.cs @@ -1,212 +1,212 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics.Metrics; -using System.Net; -using System.Net.Http; -using System.Security.Claims; -using System.Text; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Hosting; -using Microsoft.Extensions.FileProviders; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using Microsoft.Extensions.Hosting; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Auth.Client; -using StellaOps.Policy.Gateway.Clients; -using StellaOps.Policy.Gateway.Contracts; -using StellaOps.Policy.Gateway.Options; -using StellaOps.Policy.Gateway.Services; -using Xunit; - -namespace StellaOps.Policy.Gateway.Tests; - -public class PolicyEngineClientTests -{ - [Fact] - public async Task ActivateRevision_UsesServiceTokenWhenForwardingContextMissing() - { - var options = CreateGatewayOptions(); - options.PolicyEngine.ClientCredentials.Enabled = true; - options.PolicyEngine.ClientCredentials.ClientId = "policy-gateway"; - options.PolicyEngine.ClientCredentials.ClientSecret = "secret"; - options.PolicyEngine.ClientCredentials.Scopes.Clear(); - options.PolicyEngine.ClientCredentials.Scopes.Add("policy:activate"); - options.PolicyEngine.BaseAddress = "https://policy-engine.test/"; - - var optionsMonitor = new TestOptionsMonitor(options); - var tokenClient = new StubTokenClient(); - var dpopGenerator = new PolicyGatewayDpopProofGenerator(new StubHostEnvironment(), optionsMonitor, TimeProvider.System, NullLogger<PolicyGatewayDpopProofGenerator>.Instance); - var tokenProvider = new PolicyEngineTokenProvider(tokenClient, optionsMonitor, dpopGenerator, TimeProvider.System, NullLogger<PolicyEngineTokenProvider>.Instance); - - using var recordingHandler = new RecordingHandler(); - using var httpClient = new HttpClient(recordingHandler) - { - BaseAddress = new Uri(options.PolicyEngine.BaseAddress) - }; - - var client = new PolicyEngineClient(httpClient, Microsoft.Extensions.Options.Options.Create(options), tokenProvider, NullLogger<PolicyEngineClient>.Instance); - - var request = new ActivatePolicyRevisionRequest("comment"); - var result = await client.ActivatePolicyRevisionAsync(null, "pack-123", 7, request, CancellationToken.None); - - Assert.True(result.IsSuccess); - Assert.NotNull(recordingHandler.LastRequest); - var authorization = recordingHandler.LastRequest!.Headers.Authorization; - Assert.NotNull(authorization); - Assert.Equal("Bearer", authorization!.Scheme); - Assert.Equal("service-token", authorization.Parameter); - Assert.Equal(1, tokenClient.RequestCount); - } - - [Fact] - public void Metrics_RecordActivation_EmitsExpectedTags() - { - using var metrics = new PolicyGatewayMetrics(); - using var listener = new MeterListener(); - var measurements = new List<(long Value, string Outcome, string Source)>(); - var latencies = new List<(double Value, string Outcome, string Source)>(); - - listener.InstrumentPublished += (instrument, meterListener) => - { - if (!string.Equals(instrument.Meter.Name, "StellaOps.Policy.Gateway", StringComparison.Ordinal)) - { - return; - } - - meterListener.EnableMeasurementEvents(instrument); - }; - - listener.SetMeasurementEventCallback<long>((instrument, value, tags, state) => - { - if (instrument.Name != "policy_gateway_activation_requests_total") - { - return; - } - - measurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); - }); - - listener.SetMeasurementEventCallback<double>((instrument, value, tags, state) => - { - if (instrument.Name != "policy_gateway_activation_latency_ms") - { - return; - } - - latencies.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); - }); - - listener.Start(); - - metrics.RecordActivation("activated", "service", 42.5); - - listener.Dispose(); - - Assert.Contains(measurements, entry => entry.Value == 1 && entry.Outcome == "activated" && entry.Source == "service"); - Assert.Contains(latencies, entry => entry.Outcome == "activated" && entry.Source == "service" && entry.Value == 42.5); - } - - private static string GetTag(ReadOnlySpan<KeyValuePair<string, object?>> tags, string key) - { - foreach (var tag in tags) - { - if (string.Equals(tag.Key, key, StringComparison.Ordinal)) - { - return tag.Value?.ToString() ?? string.Empty; - } - } - - return string.Empty; - } - - private static PolicyGatewayOptions CreateGatewayOptions() - { - return new PolicyGatewayOptions - { - PolicyEngine = - { - BaseAddress = "https://policy-engine.test/" - } - }; - } - - private sealed class TestOptionsMonitor : IOptionsMonitor<PolicyGatewayOptions> - { - public TestOptionsMonitor(PolicyGatewayOptions current) - { - CurrentValue = current; - } - - public PolicyGatewayOptions CurrentValue { get; } - - public PolicyGatewayOptions Get(string? name) => CurrentValue; - - public IDisposable OnChange(Action<PolicyGatewayOptions, string?> listener) => EmptyDisposable.Instance; - - private sealed class EmptyDisposable : IDisposable - { - public static readonly EmptyDisposable Instance = new(); - public void Dispose() - { - } - } - } - - private sealed class StubTokenClient : IStellaOpsTokenClient - { - public int RequestCount { get; private set; } - - public IReadOnlyDictionary<string, string>? LastAdditionalParameters { get; private set; } - - public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) - { - RequestCount++; - LastAdditionalParameters = additionalParameters; - return Task.FromResult(new StellaOpsTokenResult("service-token", "Bearer", DateTimeOffset.UtcNow.AddMinutes(5), Array.Empty<string>())); - } - - public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null); - - public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - - public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - } - - private sealed class RecordingHandler : HttpMessageHandler - { - public HttpRequestMessage? LastRequest { get; private set; } - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - LastRequest = request; - - var payload = JsonSerializer.Serialize(new PolicyRevisionActivationDto("activated", new PolicyRevisionDto(7, "Activated", false, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, Array.Empty<PolicyActivationApprovalDto>()))); - var response = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(payload, Encoding.UTF8, "application/json") - }; - - return Task.FromResult(response); - } - } - - private sealed class StubHostEnvironment : IHostEnvironment - { - public string EnvironmentName { get; set; } = "Development"; - public string ApplicationName { get; set; } = "PolicyGatewayTests"; - public string ContentRootPath { get; set; } = AppContext.BaseDirectory; - public IFileProvider ContentRootFileProvider { get; set; } = new NullFileProvider(); - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics.Metrics; +using System.Net; +using System.Net.Http; +using System.Security.Claims; +using System.Text; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Hosting; +using Microsoft.Extensions.FileProviders; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.Hosting; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Auth.Client; +using StellaOps.Policy.Gateway.Clients; +using StellaOps.Policy.Gateway.Contracts; +using StellaOps.Policy.Gateway.Options; +using StellaOps.Policy.Gateway.Services; +using Xunit; + +namespace StellaOps.Policy.Gateway.Tests; + +public class PolicyEngineClientTests +{ + [Fact] + public async Task ActivateRevision_UsesServiceTokenWhenForwardingContextMissing() + { + var options = CreateGatewayOptions(); + options.PolicyEngine.ClientCredentials.Enabled = true; + options.PolicyEngine.ClientCredentials.ClientId = "policy-gateway"; + options.PolicyEngine.ClientCredentials.ClientSecret = "secret"; + options.PolicyEngine.ClientCredentials.Scopes.Clear(); + options.PolicyEngine.ClientCredentials.Scopes.Add("policy:activate"); + options.PolicyEngine.BaseAddress = "https://policy-engine.test/"; + + var optionsMonitor = new TestOptionsMonitor(options); + var tokenClient = new StubTokenClient(); + var dpopGenerator = new PolicyGatewayDpopProofGenerator(new StubHostEnvironment(), optionsMonitor, TimeProvider.System, NullLogger<PolicyGatewayDpopProofGenerator>.Instance); + var tokenProvider = new PolicyEngineTokenProvider(tokenClient, optionsMonitor, dpopGenerator, TimeProvider.System, NullLogger<PolicyEngineTokenProvider>.Instance); + + using var recordingHandler = new RecordingHandler(); + using var httpClient = new HttpClient(recordingHandler) + { + BaseAddress = new Uri(options.PolicyEngine.BaseAddress) + }; + + var client = new PolicyEngineClient(httpClient, Microsoft.Extensions.Options.Options.Create(options), tokenProvider, NullLogger<PolicyEngineClient>.Instance); + + var request = new ActivatePolicyRevisionRequest("comment"); + var result = await client.ActivatePolicyRevisionAsync(null, "pack-123", 7, request, CancellationToken.None); + + Assert.True(result.IsSuccess); + Assert.NotNull(recordingHandler.LastRequest); + var authorization = recordingHandler.LastRequest!.Headers.Authorization; + Assert.NotNull(authorization); + Assert.Equal("Bearer", authorization!.Scheme); + Assert.Equal("service-token", authorization.Parameter); + Assert.Equal(1, tokenClient.RequestCount); + } + + [Fact] + public void Metrics_RecordActivation_EmitsExpectedTags() + { + using var metrics = new PolicyGatewayMetrics(); + using var listener = new MeterListener(); + var measurements = new List<(long Value, string Outcome, string Source)>(); + var latencies = new List<(double Value, string Outcome, string Source)>(); + + listener.InstrumentPublished += (instrument, meterListener) => + { + if (!string.Equals(instrument.Meter.Name, "StellaOps.Policy.Gateway", StringComparison.Ordinal)) + { + return; + } + + meterListener.EnableMeasurementEvents(instrument); + }; + + listener.SetMeasurementEventCallback<long>((instrument, value, tags, state) => + { + if (instrument.Name != "policy_gateway_activation_requests_total") + { + return; + } + + measurements.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); + }); + + listener.SetMeasurementEventCallback<double>((instrument, value, tags, state) => + { + if (instrument.Name != "policy_gateway_activation_latency_ms") + { + return; + } + + latencies.Add((value, GetTag(tags, "outcome"), GetTag(tags, "source"))); + }); + + listener.Start(); + + metrics.RecordActivation("activated", "service", 42.5); + + listener.Dispose(); + + Assert.Contains(measurements, entry => entry.Value == 1 && entry.Outcome == "activated" && entry.Source == "service"); + Assert.Contains(latencies, entry => entry.Outcome == "activated" && entry.Source == "service" && entry.Value == 42.5); + } + + private static string GetTag(ReadOnlySpan<KeyValuePair<string, object?>> tags, string key) + { + foreach (var tag in tags) + { + if (string.Equals(tag.Key, key, StringComparison.Ordinal)) + { + return tag.Value?.ToString() ?? string.Empty; + } + } + + return string.Empty; + } + + private static PolicyGatewayOptions CreateGatewayOptions() + { + return new PolicyGatewayOptions + { + PolicyEngine = + { + BaseAddress = "https://policy-engine.test/" + } + }; + } + + private sealed class TestOptionsMonitor : IOptionsMonitor<PolicyGatewayOptions> + { + public TestOptionsMonitor(PolicyGatewayOptions current) + { + CurrentValue = current; + } + + public PolicyGatewayOptions CurrentValue { get; } + + public PolicyGatewayOptions Get(string? name) => CurrentValue; + + public IDisposable OnChange(Action<PolicyGatewayOptions, string?> listener) => EmptyDisposable.Instance; + + private sealed class EmptyDisposable : IDisposable + { + public static readonly EmptyDisposable Instance = new(); + public void Dispose() + { + } + } + } + + private sealed class StubTokenClient : IStellaOpsTokenClient + { + public int RequestCount { get; private set; } + + public IReadOnlyDictionary<string, string>? LastAdditionalParameters { get; private set; } + + public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) + { + RequestCount++; + LastAdditionalParameters = additionalParameters; + return Task.FromResult(new StellaOpsTokenResult("service-token", "Bearer", DateTimeOffset.UtcNow.AddMinutes(5), Array.Empty<string>())); + } + + public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null); + + public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + } + + private sealed class RecordingHandler : HttpMessageHandler + { + public HttpRequestMessage? LastRequest { get; private set; } + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + LastRequest = request; + + var payload = JsonSerializer.Serialize(new PolicyRevisionActivationDto("activated", new PolicyRevisionDto(7, "Activated", false, DateTimeOffset.UtcNow, DateTimeOffset.UtcNow, Array.Empty<PolicyActivationApprovalDto>()))); + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(payload, Encoding.UTF8, "application/json") + }; + + return Task.FromResult(response); + } + } + + private sealed class StubHostEnvironment : IHostEnvironment + { + public string EnvironmentName { get; set; } = "Development"; + public string ApplicationName { get; set; } = "PolicyGatewayTests"; + public string ContentRootPath { get; set; } = AppContext.BaseDirectory; + public IFileProvider ContentRootFileProvider { get; set; } = new NullFileProvider(); + } +} diff --git a/src/StellaOps.Policy.Gateway.Tests/PolicyGatewayDpopProofGeneratorTests.cs b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/PolicyGatewayDpopProofGeneratorTests.cs similarity index 97% rename from src/StellaOps.Policy.Gateway.Tests/PolicyGatewayDpopProofGeneratorTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/PolicyGatewayDpopProofGeneratorTests.cs index 453f410f..44bd840d 100644 --- a/src/StellaOps.Policy.Gateway.Tests/PolicyGatewayDpopProofGeneratorTests.cs +++ b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/PolicyGatewayDpopProofGeneratorTests.cs @@ -1,167 +1,167 @@ -using System.Globalization; -using System.IdentityModel.Tokens.Jwt; -using System.Net.Http; -using System.Security.Cryptography; -using System.Text; -using Microsoft.Extensions.FileProviders; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Policy.Gateway.Options; -using StellaOps.Policy.Gateway.Services; -using Xunit; - -namespace StellaOps.Policy.Gateway.Tests; - -public sealed class PolicyGatewayDpopProofGeneratorTests -{ - [Fact] - public void CreateProof_Throws_WhenDpopDisabled() - { - var options = CreateGatewayOptions(); - options.PolicyEngine.Dpop.Enabled = false; - - using var generator = new PolicyGatewayDpopProofGenerator( - new StubHostEnvironment(AppContext.BaseDirectory), - new TestOptionsMonitor(options), - TimeProvider.System, - NullLogger<PolicyGatewayDpopProofGenerator>.Instance); - - var exception = Assert.Throws<InvalidOperationException>(() => - generator.CreateProof(HttpMethod.Get, new Uri("https://policy-engine.example/api"), null)); - - Assert.Equal("DPoP proof requested while DPoP is disabled.", exception.Message); - } - - [Fact] - public void CreateProof_Throws_WhenKeyFileMissing() - { - var tempRoot = Directory.CreateTempSubdirectory(); - try - { - var options = CreateGatewayOptions(); - options.PolicyEngine.Dpop.Enabled = true; - options.PolicyEngine.Dpop.KeyPath = "missing-key.pem"; - - using var generator = new PolicyGatewayDpopProofGenerator( - new StubHostEnvironment(tempRoot.FullName), - new TestOptionsMonitor(options), - TimeProvider.System, - NullLogger<PolicyGatewayDpopProofGenerator>.Instance); - - var exception = Assert.Throws<FileNotFoundException>(() => - generator.CreateProof(HttpMethod.Post, new Uri("https://policy-engine.example/token"), null)); - - Assert.Contains("missing-key.pem", exception.FileName, StringComparison.Ordinal); - } - finally - { - tempRoot.Delete(recursive: true); - } - } - - [Fact] - public void CreateProof_UsesConfiguredAlgorithmAndEmbedsTokenHash() - { - var tempRoot = Directory.CreateTempSubdirectory(); - try - { - var keyPath = CreateEcKey(tempRoot, ECCurve.NamedCurves.nistP384); - var options = CreateGatewayOptions(); - options.PolicyEngine.Dpop.Enabled = true; - options.PolicyEngine.Dpop.KeyPath = keyPath; - options.PolicyEngine.Dpop.Algorithm = "ES384"; - - using var generator = new PolicyGatewayDpopProofGenerator( - new StubHostEnvironment(tempRoot.FullName), - new TestOptionsMonitor(options), - TimeProvider.System, - NullLogger<PolicyGatewayDpopProofGenerator>.Instance); - - const string accessToken = "sample-access-token"; - var proof = generator.CreateProof(HttpMethod.Delete, new Uri("https://policy-engine.example/api/resource"), accessToken); - - var token = new JwtSecurityTokenHandler().ReadJwtToken(proof); - - Assert.Equal("dpop+jwt", token.Header.Typ); - Assert.Equal("ES384", token.Header.Alg); - Assert.Equal("DELETE", token.Payload.TryGetValue("htm", out var method) ? method?.ToString() : null); - Assert.Equal("https://policy-engine.example/api/resource", token.Payload.TryGetValue("htu", out var uri) ? uri?.ToString() : null); - - Assert.True(token.Payload.TryGetValue("iat", out var issuedAt)); - Assert.True(long.TryParse(Convert.ToString(issuedAt, CultureInfo.InvariantCulture), out var epoch)); - Assert.True(epoch > 0); - - Assert.True(token.Payload.TryGetValue("jti", out var jti)); - Assert.False(string.IsNullOrWhiteSpace(Convert.ToString(jti, CultureInfo.InvariantCulture))); - - Assert.True(token.Payload.TryGetValue("ath", out var ath)); - var expectedHash = Base64UrlEncoder.Encode(SHA256.HashData(Encoding.UTF8.GetBytes(accessToken))); - Assert.Equal(expectedHash, ath?.ToString()); - } - finally - { - tempRoot.Delete(recursive: true); - } - } - - private static PolicyGatewayOptions CreateGatewayOptions() - { - return new PolicyGatewayOptions - { - PolicyEngine = - { - BaseAddress = "https://policy-engine.example" - } - }; - } - - private static string CreateEcKey(DirectoryInfo directory, ECCurve curve) - { - using var ecdsa = ECDsa.Create(curve); - var privateKey = ecdsa.ExportPkcs8PrivateKey(); - var pem = PemEncoding.Write("PRIVATE KEY", privateKey); - var path = Path.Combine(directory.FullName, "policy-gateway-dpop.pem"); - File.WriteAllText(path, pem); - return path; - } - - private sealed class StubHostEnvironment : IHostEnvironment - { - public StubHostEnvironment(string contentRootPath) - { - ContentRootPath = contentRootPath; - } - - public string ApplicationName { get; set; } = "PolicyGatewayTests"; - - public IFileProvider ContentRootFileProvider { get; set; } = new NullFileProvider(); - - public string ContentRootPath { get; set; } - - public string EnvironmentName { get; set; } = Environments.Development; - } - - private sealed class TestOptionsMonitor : IOptionsMonitor<PolicyGatewayOptions> - { - public TestOptionsMonitor(PolicyGatewayOptions current) - { - CurrentValue = current; - } - - public PolicyGatewayOptions CurrentValue { get; } - - public PolicyGatewayOptions Get(string? name) => CurrentValue; - - public IDisposable OnChange(Action<PolicyGatewayOptions, string?> listener) => EmptyDisposable.Instance; - - private sealed class EmptyDisposable : IDisposable - { - public static readonly EmptyDisposable Instance = new(); - public void Dispose() - { - } - } - } -} +using System.Globalization; +using System.IdentityModel.Tokens.Jwt; +using System.Net.Http; +using System.Security.Cryptography; +using System.Text; +using Microsoft.Extensions.FileProviders; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Policy.Gateway.Options; +using StellaOps.Policy.Gateway.Services; +using Xunit; + +namespace StellaOps.Policy.Gateway.Tests; + +public sealed class PolicyGatewayDpopProofGeneratorTests +{ + [Fact] + public void CreateProof_Throws_WhenDpopDisabled() + { + var options = CreateGatewayOptions(); + options.PolicyEngine.Dpop.Enabled = false; + + using var generator = new PolicyGatewayDpopProofGenerator( + new StubHostEnvironment(AppContext.BaseDirectory), + new TestOptionsMonitor(options), + TimeProvider.System, + NullLogger<PolicyGatewayDpopProofGenerator>.Instance); + + var exception = Assert.Throws<InvalidOperationException>(() => + generator.CreateProof(HttpMethod.Get, new Uri("https://policy-engine.example/api"), null)); + + Assert.Equal("DPoP proof requested while DPoP is disabled.", exception.Message); + } + + [Fact] + public void CreateProof_Throws_WhenKeyFileMissing() + { + var tempRoot = Directory.CreateTempSubdirectory(); + try + { + var options = CreateGatewayOptions(); + options.PolicyEngine.Dpop.Enabled = true; + options.PolicyEngine.Dpop.KeyPath = "missing-key.pem"; + + using var generator = new PolicyGatewayDpopProofGenerator( + new StubHostEnvironment(tempRoot.FullName), + new TestOptionsMonitor(options), + TimeProvider.System, + NullLogger<PolicyGatewayDpopProofGenerator>.Instance); + + var exception = Assert.Throws<FileNotFoundException>(() => + generator.CreateProof(HttpMethod.Post, new Uri("https://policy-engine.example/token"), null)); + + Assert.Contains("missing-key.pem", exception.FileName, StringComparison.Ordinal); + } + finally + { + tempRoot.Delete(recursive: true); + } + } + + [Fact] + public void CreateProof_UsesConfiguredAlgorithmAndEmbedsTokenHash() + { + var tempRoot = Directory.CreateTempSubdirectory(); + try + { + var keyPath = CreateEcKey(tempRoot, ECCurve.NamedCurves.nistP384); + var options = CreateGatewayOptions(); + options.PolicyEngine.Dpop.Enabled = true; + options.PolicyEngine.Dpop.KeyPath = keyPath; + options.PolicyEngine.Dpop.Algorithm = "ES384"; + + using var generator = new PolicyGatewayDpopProofGenerator( + new StubHostEnvironment(tempRoot.FullName), + new TestOptionsMonitor(options), + TimeProvider.System, + NullLogger<PolicyGatewayDpopProofGenerator>.Instance); + + const string accessToken = "sample-access-token"; + var proof = generator.CreateProof(HttpMethod.Delete, new Uri("https://policy-engine.example/api/resource"), accessToken); + + var token = new JwtSecurityTokenHandler().ReadJwtToken(proof); + + Assert.Equal("dpop+jwt", token.Header.Typ); + Assert.Equal("ES384", token.Header.Alg); + Assert.Equal("DELETE", token.Payload.TryGetValue("htm", out var method) ? method?.ToString() : null); + Assert.Equal("https://policy-engine.example/api/resource", token.Payload.TryGetValue("htu", out var uri) ? uri?.ToString() : null); + + Assert.True(token.Payload.TryGetValue("iat", out var issuedAt)); + Assert.True(long.TryParse(Convert.ToString(issuedAt, CultureInfo.InvariantCulture), out var epoch)); + Assert.True(epoch > 0); + + Assert.True(token.Payload.TryGetValue("jti", out var jti)); + Assert.False(string.IsNullOrWhiteSpace(Convert.ToString(jti, CultureInfo.InvariantCulture))); + + Assert.True(token.Payload.TryGetValue("ath", out var ath)); + var expectedHash = Base64UrlEncoder.Encode(SHA256.HashData(Encoding.UTF8.GetBytes(accessToken))); + Assert.Equal(expectedHash, ath?.ToString()); + } + finally + { + tempRoot.Delete(recursive: true); + } + } + + private static PolicyGatewayOptions CreateGatewayOptions() + { + return new PolicyGatewayOptions + { + PolicyEngine = + { + BaseAddress = "https://policy-engine.example" + } + }; + } + + private static string CreateEcKey(DirectoryInfo directory, ECCurve curve) + { + using var ecdsa = ECDsa.Create(curve); + var privateKey = ecdsa.ExportPkcs8PrivateKey(); + var pem = PemEncoding.Write("PRIVATE KEY", privateKey); + var path = Path.Combine(directory.FullName, "policy-gateway-dpop.pem"); + File.WriteAllText(path, pem); + return path; + } + + private sealed class StubHostEnvironment : IHostEnvironment + { + public StubHostEnvironment(string contentRootPath) + { + ContentRootPath = contentRootPath; + } + + public string ApplicationName { get; set; } = "PolicyGatewayTests"; + + public IFileProvider ContentRootFileProvider { get; set; } = new NullFileProvider(); + + public string ContentRootPath { get; set; } + + public string EnvironmentName { get; set; } = Environments.Development; + } + + private sealed class TestOptionsMonitor : IOptionsMonitor<PolicyGatewayOptions> + { + public TestOptionsMonitor(PolicyGatewayOptions current) + { + CurrentValue = current; + } + + public PolicyGatewayOptions CurrentValue { get; } + + public PolicyGatewayOptions Get(string? name) => CurrentValue; + + public IDisposable OnChange(Action<PolicyGatewayOptions, string?> listener) => EmptyDisposable.Instance; + + private sealed class EmptyDisposable : IDisposable + { + public static readonly EmptyDisposable Instance = new(); + public void Dispose() + { + } + } + } +} diff --git a/src/StellaOps.Policy.Gateway.Tests/StellaOps.Policy.Gateway.Tests.csproj b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/StellaOps.Policy.Gateway.Tests.csproj similarity index 60% rename from src/StellaOps.Policy.Gateway.Tests/StellaOps.Policy.Gateway.Tests.csproj rename to src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/StellaOps.Policy.Gateway.Tests.csproj index 168ebc71..4b9679c7 100644 --- a/src/StellaOps.Policy.Gateway.Tests/StellaOps.Policy.Gateway.Tests.csproj +++ b/src/Policy/__Tests/StellaOps.Policy.Gateway.Tests/StellaOps.Policy.Gateway.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -6,6 +7,6 @@ </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Policy.Gateway\StellaOps.Policy.Gateway.csproj" /> + <ProjectReference Include="../../StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Policy.Tests/PolicyBinderTests.cs b/src/Policy/__Tests/StellaOps.Policy.Tests/PolicyBinderTests.cs similarity index 100% rename from src/StellaOps.Policy.Tests/PolicyBinderTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Tests/PolicyBinderTests.cs diff --git a/src/StellaOps.Policy.Tests/PolicyEvaluationTests.cs b/src/Policy/__Tests/StellaOps.Policy.Tests/PolicyEvaluationTests.cs similarity index 100% rename from src/StellaOps.Policy.Tests/PolicyEvaluationTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Tests/PolicyEvaluationTests.cs diff --git a/src/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs b/src/Policy/__Tests/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs similarity index 100% rename from src/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Tests/PolicyPreviewServiceTests.cs diff --git a/src/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs b/src/Policy/__Tests/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs similarity index 100% rename from src/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Tests/PolicyScoringConfigTests.cs diff --git a/src/StellaOps.Policy.Tests/PolicySnapshotStoreTests.cs b/src/Policy/__Tests/StellaOps.Policy.Tests/PolicySnapshotStoreTests.cs similarity index 100% rename from src/StellaOps.Policy.Tests/PolicySnapshotStoreTests.cs rename to src/Policy/__Tests/StellaOps.Policy.Tests/PolicySnapshotStoreTests.cs diff --git a/src/StellaOps.Policy.Tests/StellaOps.Policy.Tests.csproj b/src/Policy/__Tests/StellaOps.Policy.Tests/StellaOps.Policy.Tests.csproj similarity index 69% rename from src/StellaOps.Policy.Tests/StellaOps.Policy.Tests.csproj rename to src/Policy/__Tests/StellaOps.Policy.Tests/StellaOps.Policy.Tests.csproj index 77bb0e5e..3a327538 100644 --- a/src/StellaOps.Policy.Tests/StellaOps.Policy.Tests.csproj +++ b/src/Policy/__Tests/StellaOps.Policy.Tests/StellaOps.Policy.Tests.csproj @@ -1,13 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Policy\StellaOps.Policy.csproj" /> - </ItemGroup> - -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" /> + </ItemGroup> + +</Project> \ No newline at end of file diff --git a/src/StellaOps.Provenance.Attestation/AGENTS.md b/src/Provenance/StellaOps.Provenance.Attestation/AGENTS.md similarity index 98% rename from src/StellaOps.Provenance.Attestation/AGENTS.md rename to src/Provenance/StellaOps.Provenance.Attestation/AGENTS.md index b09e6c6a..09c103f8 100644 --- a/src/StellaOps.Provenance.Attestation/AGENTS.md +++ b/src/Provenance/StellaOps.Provenance.Attestation/AGENTS.md @@ -1,20 +1,20 @@ -# StellaOps Provenance & Attestation Guild Charter - -## Mission -Provide shared libraries and tooling for generating, signing, and verifying provenance attestations (DSSE/SLSA) used by evidence bundles, exports, and timeline verification flows. - -## Scope -- DSSE statement builders with Merkle and digest utilities. -- Signer/validator abstractions for KMS, cosign, offline keys. -- Provenance schema definitions reused across services and CLI. -- Verification harnesses for evidence locker and export center integrations. - -## Collaboration -- Partner with Evidence Locker, Exporter, Orchestrator, and CLI guilds for integration. -- Coordinate with Security Guild on key management policies and rotation logs. -- Ensure docs in `/docs/forensics/provenance-attestation.md` stay aligned with implementation. - -## Definition of Done -- Libraries ship with deterministic serialization tests. -- Threat model reviewed before each release. -- Sample statements and verification scripts committed under `samples/provenance/`. +# StellaOps Provenance & Attestation Guild Charter + +## Mission +Provide shared libraries and tooling for generating, signing, and verifying provenance attestations (DSSE/SLSA) used by evidence bundles, exports, and timeline verification flows. + +## Scope +- DSSE statement builders with Merkle and digest utilities. +- Signer/validator abstractions for KMS, cosign, offline keys. +- Provenance schema definitions reused across services and CLI. +- Verification harnesses for evidence locker and export center integrations. + +## Collaboration +- Partner with Evidence Locker, Exporter, Orchestrator, and CLI guilds for integration. +- Coordinate with Security Guild on key management policies and rotation logs. +- Ensure docs in `/docs/forensics/provenance-attestation.md` stay aligned with implementation. + +## Definition of Done +- Libraries ship with deterministic serialization tests. +- Threat model reviewed before each release. +- Sample statements and verification scripts committed under `samples/provenance/`. diff --git a/src/StellaOps.Provenance.Attestation/TASKS.md b/src/Provenance/StellaOps.Provenance.Attestation/TASKS.md similarity index 99% rename from src/StellaOps.Provenance.Attestation/TASKS.md rename to src/Provenance/StellaOps.Provenance.Attestation/TASKS.md index da922fbe..7ea48c56 100644 --- a/src/StellaOps.Provenance.Attestation/TASKS.md +++ b/src/Provenance/StellaOps.Provenance.Attestation/TASKS.md @@ -1,13 +1,13 @@ -# Provenance & Attestation Task Board — Epic 15: Observability & Forensics - -## Sprint 53 – Evidence Bundle Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| PROV-OBS-53-001 | TODO | Provenance Guild | TELEMETRY-OBS-50-001 | Implement DSSE/SLSA `BuildDefinition` + `BuildMetadata` models with canonical JSON serializer, Merkle digest helpers, and deterministic hashing tests. Publish sample statements for orchestrator/job/export subjects. | Models serialized deterministically; test vectors stored under `samples/provenance/`; compliance checklist recorded. | -| PROV-OBS-53-002 | TODO | Provenance Guild, Security Guild | PROV-OBS-53-001 | Build signer abstraction (cosign/KMS/offline) with key rotation hooks, audit logging, and policy enforcement (required claims). Provide unit tests using fake signer + real cosign fixture. | Signer abstraction delivers DSSE envelopes; rotation docs updated; tests cover key expiry + claim enforcement. | - -## Sprint 54 – Verification Tooling -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| PROV-OBS-54-001 | TODO | Provenance Guild, Evidence Locker Guild | PROV-OBS-53-002, EVID-OBS-53-001 | Deliver verification library that validates DSSE signatures, Merkle roots, and timeline chain-of-custody, exposing reusable CLI/service APIs. Include negative-case fixtures and offline timestamp verification. | Verification API integrated into evidence locker; tests cover success/failure; timestamp (RFC3161) optional hook documented. | -| PROV-OBS-54-002 | TODO | Provenance Guild, DevEx/CLI Guild | PROV-OBS-54-001, CLI-FORENSICS-54-001 | Generate .NET global tool for local verification + embed command helpers for CLI `stella forensic verify`. Provide deterministic packaging and offline kit instructions. | Tool published to `local-nuget`; CLI integration tests pass; offline instructions documented. | +# Provenance & Attestation Task Board — Epic 15: Observability & Forensics + +## Sprint 53 – Evidence Bundle Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| PROV-OBS-53-001 | TODO | Provenance Guild | TELEMETRY-OBS-50-001 | Implement DSSE/SLSA `BuildDefinition` + `BuildMetadata` models with canonical JSON serializer, Merkle digest helpers, and deterministic hashing tests. Publish sample statements for orchestrator/job/export subjects. | Models serialized deterministically; test vectors stored under `samples/provenance/`; compliance checklist recorded. | +| PROV-OBS-53-002 | TODO | Provenance Guild, Security Guild | PROV-OBS-53-001 | Build signer abstraction (cosign/KMS/offline) with key rotation hooks, audit logging, and policy enforcement (required claims). Provide unit tests using fake signer + real cosign fixture. | Signer abstraction delivers DSSE envelopes; rotation docs updated; tests cover key expiry + claim enforcement. | + +## Sprint 54 – Verification Tooling +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| PROV-OBS-54-001 | TODO | Provenance Guild, Evidence Locker Guild | PROV-OBS-53-002, EVID-OBS-53-001 | Deliver verification library that validates DSSE signatures, Merkle roots, and timeline chain-of-custody, exposing reusable CLI/service APIs. Include negative-case fixtures and offline timestamp verification. | Verification API integrated into evidence locker; tests cover success/failure; timestamp (RFC3161) optional hook documented. | +| PROV-OBS-54-002 | TODO | Provenance Guild, DevEx/CLI Guild | PROV-OBS-54-001, CLI-FORENSICS-54-001 | Generate .NET global tool for local verification + embed command helpers for CLI `stella forensic verify`. Provide deterministic packaging and offline kit instructions. | Tool published to `local-nuget`; CLI integration tests pass; offline instructions documented. | diff --git a/src/StellaOps.Registry.TokenService/Observability/RegistryTokenMetrics.cs b/src/Registry/StellaOps.Registry.TokenService/Observability/RegistryTokenMetrics.cs similarity index 96% rename from src/StellaOps.Registry.TokenService/Observability/RegistryTokenMetrics.cs rename to src/Registry/StellaOps.Registry.TokenService/Observability/RegistryTokenMetrics.cs index 22452cf4..8d4ce08d 100644 --- a/src/StellaOps.Registry.TokenService/Observability/RegistryTokenMetrics.cs +++ b/src/Registry/StellaOps.Registry.TokenService/Observability/RegistryTokenMetrics.cs @@ -1,34 +1,34 @@ -using System; -using System.Diagnostics.Metrics; - -namespace StellaOps.Registry.TokenService.Observability; - -public sealed class RegistryTokenMetrics : IDisposable -{ - public const string MeterName = "StellaOps.Registry.TokenService"; - - private readonly Meter _meter; - private bool _disposed; - - public RegistryTokenMetrics() - { - _meter = new Meter(MeterName); - TokensIssued = _meter.CreateCounter<long>("registry_token_issued_total", unit: "tokens", description: "Total tokens issued grouped by plan."); - TokensRejected = _meter.CreateCounter<long>("registry_token_rejected_total", unit: "tokens", description: "Total token requests rejected grouped by reason."); - } - - public Counter<long> TokensIssued { get; } - - public Counter<long> TokensRejected { get; } - - public void Dispose() - { - if (_disposed) - { - return; - } - - _meter.Dispose(); - _disposed = true; - } -} +using System; +using System.Diagnostics.Metrics; + +namespace StellaOps.Registry.TokenService.Observability; + +public sealed class RegistryTokenMetrics : IDisposable +{ + public const string MeterName = "StellaOps.Registry.TokenService"; + + private readonly Meter _meter; + private bool _disposed; + + public RegistryTokenMetrics() + { + _meter = new Meter(MeterName); + TokensIssued = _meter.CreateCounter<long>("registry_token_issued_total", unit: "tokens", description: "Total tokens issued grouped by plan."); + TokensRejected = _meter.CreateCounter<long>("registry_token_rejected_total", unit: "tokens", description: "Total token requests rejected grouped by reason."); + } + + public Counter<long> TokensIssued { get; } + + public Counter<long> TokensRejected { get; } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _meter.Dispose(); + _disposed = true; + } +} diff --git a/src/StellaOps.Registry.TokenService/PlanRegistry.cs b/src/Registry/StellaOps.Registry.TokenService/PlanRegistry.cs similarity index 96% rename from src/StellaOps.Registry.TokenService/PlanRegistry.cs rename to src/Registry/StellaOps.Registry.TokenService/PlanRegistry.cs index a5f3a6ed..acedb1c4 100644 --- a/src/StellaOps.Registry.TokenService/PlanRegistry.cs +++ b/src/Registry/StellaOps.Registry.TokenService/PlanRegistry.cs @@ -1,150 +1,150 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Security.Claims; -using System.Text.RegularExpressions; - -namespace StellaOps.Registry.TokenService; - -/// <summary> -/// Evaluates repository access against configured plan rules. -/// </summary> -public sealed class PlanRegistry -{ - private readonly IReadOnlyDictionary<string, PlanDescriptor> _plans; - private readonly IReadOnlySet<string> _revokedLicenses; - private readonly string? _defaultPlan; - - public PlanRegistry(RegistryTokenServiceOptions options) - { - ArgumentNullException.ThrowIfNull(options); - - _plans = options.Plans - .Select(plan => new PlanDescriptor(plan)) - .ToDictionary(static plan => plan.Name, StringComparer.OrdinalIgnoreCase); - - _revokedLicenses = options.RevokedLicenses.Count == 0 - ? new HashSet<string>(StringComparer.OrdinalIgnoreCase) - : new HashSet<string>(options.RevokedLicenses, StringComparer.OrdinalIgnoreCase); - - _defaultPlan = options.DefaultPlan; - } - - public RegistryAccessDecision Authorize( - ClaimsPrincipal principal, - IReadOnlyList<RegistryAccessRequest> requests) - { - ArgumentNullException.ThrowIfNull(principal); - ArgumentNullException.ThrowIfNull(requests); - - if (requests.Count == 0) - { - return new RegistryAccessDecision(false, "no_scopes_requested"); - } - - var licenseId = principal.FindFirstValue("stellaops:license")?.Trim(); - if (!string.IsNullOrEmpty(licenseId) && _revokedLicenses.Contains(licenseId)) - { - return new RegistryAccessDecision(false, "license_revoked"); - } - - var planName = principal.FindFirstValue("stellaops:plan")?.Trim(); - if (string.IsNullOrEmpty(planName)) - { - planName = _defaultPlan; - } - - if (string.IsNullOrEmpty(planName) || !_plans.TryGetValue(planName, out var descriptor)) - { - return new RegistryAccessDecision(false, "plan_unknown"); - } - - foreach (var request in requests) - { - if (!descriptor.IsRepositoryAllowed(request)) - { - return new RegistryAccessDecision(false, "scope_not_permitted"); - } - } - - return new RegistryAccessDecision(true); - } - - private sealed class PlanDescriptor - { - private readonly IReadOnlyList<RepositoryDescriptor> _repositories; - - public PlanDescriptor(RegistryTokenServiceOptions.PlanRule source) - { - Name = source.Name; - _repositories = source.Repositories - .Select(rule => new RepositoryDescriptor(rule)) - .ToArray(); - } - - public string Name { get; } - - public bool IsRepositoryAllowed(RegistryAccessRequest request) - { - if (!string.Equals(request.Type, "repository", StringComparison.OrdinalIgnoreCase)) - { - return false; - } - - foreach (var repo in _repositories) - { - if (!repo.Matches(request.Name)) - { - continue; - } - - if (repo.AllowsActions(request.Actions)) - { - return true; - } - } - - return false; - } - } - - private sealed class RepositoryDescriptor - { - private readonly Regex _pattern; - private readonly IReadOnlySet<string> _allowedActions; - - public RepositoryDescriptor(RegistryTokenServiceOptions.RepositoryRule rule) - { - Pattern = rule.Pattern; - _pattern = Compile(rule.Pattern); - _allowedActions = new HashSet<string>(rule.Actions, StringComparer.OrdinalIgnoreCase); - } - - public string Pattern { get; } - - public bool Matches(string repository) - { - return _pattern.IsMatch(repository); - } - - public bool AllowsActions(IReadOnlyList<string> actions) - { - foreach (var action in actions) - { - if (!_allowedActions.Contains(action)) - { - return false; - } - } - - return true; - } - - private static Regex Compile(string pattern) - { - var escaped = Regex.Escape(pattern); - escaped = escaped.Replace(@"\*", ".*", StringComparison.Ordinal); - return new Regex($"^{escaped}$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Security.Claims; +using System.Text.RegularExpressions; + +namespace StellaOps.Registry.TokenService; + +/// <summary> +/// Evaluates repository access against configured plan rules. +/// </summary> +public sealed class PlanRegistry +{ + private readonly IReadOnlyDictionary<string, PlanDescriptor> _plans; + private readonly IReadOnlySet<string> _revokedLicenses; + private readonly string? _defaultPlan; + + public PlanRegistry(RegistryTokenServiceOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + _plans = options.Plans + .Select(plan => new PlanDescriptor(plan)) + .ToDictionary(static plan => plan.Name, StringComparer.OrdinalIgnoreCase); + + _revokedLicenses = options.RevokedLicenses.Count == 0 + ? new HashSet<string>(StringComparer.OrdinalIgnoreCase) + : new HashSet<string>(options.RevokedLicenses, StringComparer.OrdinalIgnoreCase); + + _defaultPlan = options.DefaultPlan; + } + + public RegistryAccessDecision Authorize( + ClaimsPrincipal principal, + IReadOnlyList<RegistryAccessRequest> requests) + { + ArgumentNullException.ThrowIfNull(principal); + ArgumentNullException.ThrowIfNull(requests); + + if (requests.Count == 0) + { + return new RegistryAccessDecision(false, "no_scopes_requested"); + } + + var licenseId = principal.FindFirstValue("stellaops:license")?.Trim(); + if (!string.IsNullOrEmpty(licenseId) && _revokedLicenses.Contains(licenseId)) + { + return new RegistryAccessDecision(false, "license_revoked"); + } + + var planName = principal.FindFirstValue("stellaops:plan")?.Trim(); + if (string.IsNullOrEmpty(planName)) + { + planName = _defaultPlan; + } + + if (string.IsNullOrEmpty(planName) || !_plans.TryGetValue(planName, out var descriptor)) + { + return new RegistryAccessDecision(false, "plan_unknown"); + } + + foreach (var request in requests) + { + if (!descriptor.IsRepositoryAllowed(request)) + { + return new RegistryAccessDecision(false, "scope_not_permitted"); + } + } + + return new RegistryAccessDecision(true); + } + + private sealed class PlanDescriptor + { + private readonly IReadOnlyList<RepositoryDescriptor> _repositories; + + public PlanDescriptor(RegistryTokenServiceOptions.PlanRule source) + { + Name = source.Name; + _repositories = source.Repositories + .Select(rule => new RepositoryDescriptor(rule)) + .ToArray(); + } + + public string Name { get; } + + public bool IsRepositoryAllowed(RegistryAccessRequest request) + { + if (!string.Equals(request.Type, "repository", StringComparison.OrdinalIgnoreCase)) + { + return false; + } + + foreach (var repo in _repositories) + { + if (!repo.Matches(request.Name)) + { + continue; + } + + if (repo.AllowsActions(request.Actions)) + { + return true; + } + } + + return false; + } + } + + private sealed class RepositoryDescriptor + { + private readonly Regex _pattern; + private readonly IReadOnlySet<string> _allowedActions; + + public RepositoryDescriptor(RegistryTokenServiceOptions.RepositoryRule rule) + { + Pattern = rule.Pattern; + _pattern = Compile(rule.Pattern); + _allowedActions = new HashSet<string>(rule.Actions, StringComparer.OrdinalIgnoreCase); + } + + public string Pattern { get; } + + public bool Matches(string repository) + { + return _pattern.IsMatch(repository); + } + + public bool AllowsActions(IReadOnlyList<string> actions) + { + foreach (var action in actions) + { + if (!_allowedActions.Contains(action)) + { + return false; + } + } + + return true; + } + + private static Regex Compile(string pattern) + { + var escaped = Regex.Escape(pattern); + escaped = escaped.Replace(@"\*", ".*", StringComparison.Ordinal); + return new Regex($"^{escaped}$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant | RegexOptions.Compiled); + } + } +} diff --git a/src/StellaOps.Registry.TokenService/Program.cs b/src/Registry/StellaOps.Registry.TokenService/Program.cs similarity index 97% rename from src/StellaOps.Registry.TokenService/Program.cs rename to src/Registry/StellaOps.Registry.TokenService/Program.cs index 52dcdc4e..eacaef0e 100644 --- a/src/StellaOps.Registry.TokenService/Program.cs +++ b/src/Registry/StellaOps.Registry.TokenService/Program.cs @@ -1,171 +1,171 @@ -using System.Net; -using Microsoft.AspNetCore.Authentication; -using Microsoft.AspNetCore.Authorization; -using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.Options; -using OpenTelemetry.Instrumentation.AspNetCore; -using OpenTelemetry.Instrumentation.Runtime; -using OpenTelemetry.Metrics; -using Serilog; -using Serilog.Events; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.ServerIntegration; -using StellaOps.Configuration; -using StellaOps.Registry.TokenService; -using StellaOps.Registry.TokenService.Observability; - -var builder = WebApplication.CreateBuilder(args); - -builder.Configuration.AddStellaOpsDefaults(options => -{ - options.BasePath = builder.Environment.ContentRootPath; - options.EnvironmentPrefix = "REGISTRY_TOKEN_"; - options.ConfigureBuilder = configurationBuilder => - { - configurationBuilder.AddYamlFile("../etc/registry-token.yaml", optional: true, reloadOnChange: true); - }; -}); - -var bootstrapOptions = builder.Configuration.BindOptions<RegistryTokenServiceOptions>( - RegistryTokenServiceOptions.SectionName, - (opts, _) => opts.Validate()); - -builder.Host.UseSerilog((context, services, loggerConfiguration) => -{ - loggerConfiguration - .MinimumLevel.Information() - .MinimumLevel.Override("Microsoft.AspNetCore", LogEventLevel.Warning) - .Enrich.FromLogContext() - .WriteTo.Console(); -}); - -builder.Services.AddOptions<RegistryTokenServiceOptions>() - .Bind(builder.Configuration.GetSection(RegistryTokenServiceOptions.SectionName)) - .PostConfigure(options => options.Validate()) - .ValidateOnStart(); - -builder.Services.AddSingleton(TimeProvider.System); -builder.Services.AddSingleton<RegistryTokenMetrics>(); -builder.Services.AddSingleton<PlanRegistry>(sp => -{ - var options = sp.GetRequiredService<IOptions<RegistryTokenServiceOptions>>().Value; - return new PlanRegistry(options); -}); -builder.Services.AddSingleton<RegistryTokenIssuer>(); - -builder.Services.AddHealthChecks().AddCheck("self", () => Microsoft.Extensions.Diagnostics.HealthChecks.HealthCheckResult.Healthy()); - -builder.Services.AddOpenTelemetry() - .WithMetrics(metricsBuilder => - { - metricsBuilder.AddMeter(RegistryTokenMetrics.MeterName); - metricsBuilder.AddAspNetCoreInstrumentation(); - metricsBuilder.AddRuntimeInstrumentation(); - }); - -builder.Services.AddStellaOpsResourceServerAuthentication( - builder.Configuration, - configurationSection: null, - configure: resourceOptions => - { - resourceOptions.Authority = bootstrapOptions.Authority.Issuer; - resourceOptions.RequireHttpsMetadata = bootstrapOptions.Authority.RequireHttpsMetadata; - resourceOptions.MetadataAddress = bootstrapOptions.Authority.MetadataAddress; - - resourceOptions.Audiences.Clear(); - foreach (var audience in bootstrapOptions.Authority.Audiences) - { - resourceOptions.Audiences.Add(audience); - } - }); - -builder.Services.AddAuthorization(options => -{ - var scopes = bootstrapOptions.Authority.RequiredScopes.Count == 0 - ? new[] { "registry.token.issue" } - : bootstrapOptions.Authority.RequiredScopes.ToArray(); - - options.AddPolicy("registry.token.issue", policy => - { - policy.RequireAuthenticatedUser(); - policy.Requirements.Add(new StellaOpsScopeRequirement(scopes)); - policy.AddAuthenticationSchemes(StellaOpsAuthenticationDefaults.AuthenticationScheme); - }); -}); - -var app = builder.Build(); - -app.UseSerilogRequestLogging(); -app.UseAuthentication(); -app.UseAuthorization(); - -app.MapHealthChecks("/healthz"); - -app.MapGet("/token", ( - HttpContext context, - [FromServices] IOptions<RegistryTokenServiceOptions> options, - [FromServices] RegistryTokenIssuer issuer) => -{ - var serviceOptions = options.Value; - - var service = context.Request.Query["service"].FirstOrDefault()?.Trim(); - if (string.IsNullOrWhiteSpace(service)) - { - return Results.Problem( - detail: "The 'service' query parameter is required.", - statusCode: StatusCodes.Status400BadRequest); - } - - if (serviceOptions.Registry.AllowedServices.Count > 0 && - !serviceOptions.Registry.AllowedServices.Contains(service, StringComparer.OrdinalIgnoreCase)) - { - return Results.Problem( - detail: "The requested registry service is not permitted for this installation.", - statusCode: StatusCodes.Status403Forbidden); - } - - IReadOnlyList<RegistryAccessRequest> accessRequests; - try - { - accessRequests = RegistryScopeParser.Parse(context.Request.Query); - } - catch (InvalidScopeException ex) - { - return Results.Problem( - detail: ex.Message, - statusCode: StatusCodes.Status400BadRequest); - } - - if (accessRequests.Count == 0) - { - return Results.Problem( - detail: "At least one scope must be requested.", - statusCode: StatusCodes.Status400BadRequest); - } - - try - { - var response = issuer.IssueToken(context.User, service, accessRequests); - - return Results.Json(new - { - token = response.Token, - expires_in = response.ExpiresIn, - issued_at = response.IssuedAt.UtcDateTime.ToString("O"), - issued_token_type = "urn:ietf:params:oauth:token-type:access_token" - }); - } - catch (RegistryTokenException ex) - { - return Results.Problem( - detail: ex.Message, - statusCode: StatusCodes.Status403Forbidden); - } -}) -.WithName("GetRegistryToken") -.RequireAuthorization("registry.token.issue") -.Produces(StatusCodes.Status200OK) -.ProducesProblem(StatusCodes.Status400BadRequest) -.ProducesProblem(StatusCodes.Status403Forbidden); - -app.Run(); +using System.Net; +using Microsoft.AspNetCore.Authentication; +using Microsoft.AspNetCore.Authorization; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Options; +using OpenTelemetry.Instrumentation.AspNetCore; +using OpenTelemetry.Instrumentation.Runtime; +using OpenTelemetry.Metrics; +using Serilog; +using Serilog.Events; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Configuration; +using StellaOps.Registry.TokenService; +using StellaOps.Registry.TokenService.Observability; + +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration.AddStellaOpsDefaults(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "REGISTRY_TOKEN_"; + options.ConfigureBuilder = configurationBuilder => + { + configurationBuilder.AddYamlFile("../etc/registry-token.yaml", optional: true, reloadOnChange: true); + }; +}); + +var bootstrapOptions = builder.Configuration.BindOptions<RegistryTokenServiceOptions>( + RegistryTokenServiceOptions.SectionName, + (opts, _) => opts.Validate()); + +builder.Host.UseSerilog((context, services, loggerConfiguration) => +{ + loggerConfiguration + .MinimumLevel.Information() + .MinimumLevel.Override("Microsoft.AspNetCore", LogEventLevel.Warning) + .Enrich.FromLogContext() + .WriteTo.Console(); +}); + +builder.Services.AddOptions<RegistryTokenServiceOptions>() + .Bind(builder.Configuration.GetSection(RegistryTokenServiceOptions.SectionName)) + .PostConfigure(options => options.Validate()) + .ValidateOnStart(); + +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddSingleton<RegistryTokenMetrics>(); +builder.Services.AddSingleton<PlanRegistry>(sp => +{ + var options = sp.GetRequiredService<IOptions<RegistryTokenServiceOptions>>().Value; + return new PlanRegistry(options); +}); +builder.Services.AddSingleton<RegistryTokenIssuer>(); + +builder.Services.AddHealthChecks().AddCheck("self", () => Microsoft.Extensions.Diagnostics.HealthChecks.HealthCheckResult.Healthy()); + +builder.Services.AddOpenTelemetry() + .WithMetrics(metricsBuilder => + { + metricsBuilder.AddMeter(RegistryTokenMetrics.MeterName); + metricsBuilder.AddAspNetCoreInstrumentation(); + metricsBuilder.AddRuntimeInstrumentation(); + }); + +builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: null, + configure: resourceOptions => + { + resourceOptions.Authority = bootstrapOptions.Authority.Issuer; + resourceOptions.RequireHttpsMetadata = bootstrapOptions.Authority.RequireHttpsMetadata; + resourceOptions.MetadataAddress = bootstrapOptions.Authority.MetadataAddress; + + resourceOptions.Audiences.Clear(); + foreach (var audience in bootstrapOptions.Authority.Audiences) + { + resourceOptions.Audiences.Add(audience); + } + }); + +builder.Services.AddAuthorization(options => +{ + var scopes = bootstrapOptions.Authority.RequiredScopes.Count == 0 + ? new[] { "registry.token.issue" } + : bootstrapOptions.Authority.RequiredScopes.ToArray(); + + options.AddPolicy("registry.token.issue", policy => + { + policy.RequireAuthenticatedUser(); + policy.Requirements.Add(new StellaOpsScopeRequirement(scopes)); + policy.AddAuthenticationSchemes(StellaOpsAuthenticationDefaults.AuthenticationScheme); + }); +}); + +var app = builder.Build(); + +app.UseSerilogRequestLogging(); +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapHealthChecks("/healthz"); + +app.MapGet("/token", ( + HttpContext context, + [FromServices] IOptions<RegistryTokenServiceOptions> options, + [FromServices] RegistryTokenIssuer issuer) => +{ + var serviceOptions = options.Value; + + var service = context.Request.Query["service"].FirstOrDefault()?.Trim(); + if (string.IsNullOrWhiteSpace(service)) + { + return Results.Problem( + detail: "The 'service' query parameter is required.", + statusCode: StatusCodes.Status400BadRequest); + } + + if (serviceOptions.Registry.AllowedServices.Count > 0 && + !serviceOptions.Registry.AllowedServices.Contains(service, StringComparer.OrdinalIgnoreCase)) + { + return Results.Problem( + detail: "The requested registry service is not permitted for this installation.", + statusCode: StatusCodes.Status403Forbidden); + } + + IReadOnlyList<RegistryAccessRequest> accessRequests; + try + { + accessRequests = RegistryScopeParser.Parse(context.Request.Query); + } + catch (InvalidScopeException ex) + { + return Results.Problem( + detail: ex.Message, + statusCode: StatusCodes.Status400BadRequest); + } + + if (accessRequests.Count == 0) + { + return Results.Problem( + detail: "At least one scope must be requested.", + statusCode: StatusCodes.Status400BadRequest); + } + + try + { + var response = issuer.IssueToken(context.User, service, accessRequests); + + return Results.Json(new + { + token = response.Token, + expires_in = response.ExpiresIn, + issued_at = response.IssuedAt.UtcDateTime.ToString("O"), + issued_token_type = "urn:ietf:params:oauth:token-type:access_token" + }); + } + catch (RegistryTokenException ex) + { + return Results.Problem( + detail: ex.Message, + statusCode: StatusCodes.Status403Forbidden); + } +}) +.WithName("GetRegistryToken") +.RequireAuthorization("registry.token.issue") +.Produces(StatusCodes.Status200OK) +.ProducesProblem(StatusCodes.Status400BadRequest) +.ProducesProblem(StatusCodes.Status403Forbidden); + +app.Run(); diff --git a/src/StellaOps.Registry.TokenService/Properties/launchSettings.json b/src/Registry/StellaOps.Registry.TokenService/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.Registry.TokenService/Properties/launchSettings.json rename to src/Registry/StellaOps.Registry.TokenService/Properties/launchSettings.json index 96048292..17c1f83d 100644 --- a/src/StellaOps.Registry.TokenService/Properties/launchSettings.json +++ b/src/Registry/StellaOps.Registry.TokenService/Properties/launchSettings.json @@ -1,14 +1,14 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "http": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": true, - "applicationUrl": "http://localhost:5068", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": true, + "applicationUrl": "http://localhost:5068", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.Registry.TokenService/RegistryAccessModels.cs b/src/Registry/StellaOps.Registry.TokenService/RegistryAccessModels.cs similarity index 97% rename from src/StellaOps.Registry.TokenService/RegistryAccessModels.cs rename to src/Registry/StellaOps.Registry.TokenService/RegistryAccessModels.cs index 63767922..4247f530 100644 --- a/src/StellaOps.Registry.TokenService/RegistryAccessModels.cs +++ b/src/Registry/StellaOps.Registry.TokenService/RegistryAccessModels.cs @@ -1,13 +1,13 @@ -using System.Collections.Generic; - -namespace StellaOps.Registry.TokenService; - -/// <summary> -/// Represents a scope access request parsed from the <c>scope</c> query parameter. -/// </summary> -public sealed record RegistryAccessRequest(string Type, string Name, IReadOnlyList<string> Actions); - -/// <summary> -/// Authorization decision. -/// </summary> -public sealed record RegistryAccessDecision(bool Allowed, string? FailureReason = null); +using System.Collections.Generic; + +namespace StellaOps.Registry.TokenService; + +/// <summary> +/// Represents a scope access request parsed from the <c>scope</c> query parameter. +/// </summary> +public sealed record RegistryAccessRequest(string Type, string Name, IReadOnlyList<string> Actions); + +/// <summary> +/// Authorization decision. +/// </summary> +public sealed record RegistryAccessDecision(bool Allowed, string? FailureReason = null); diff --git a/src/StellaOps.Registry.TokenService/RegistryScopeParser.cs b/src/Registry/StellaOps.Registry.TokenService/RegistryScopeParser.cs similarity index 96% rename from src/StellaOps.Registry.TokenService/RegistryScopeParser.cs rename to src/Registry/StellaOps.Registry.TokenService/RegistryScopeParser.cs index 2770989e..f0efd253 100644 --- a/src/StellaOps.Registry.TokenService/RegistryScopeParser.cs +++ b/src/Registry/StellaOps.Registry.TokenService/RegistryScopeParser.cs @@ -1,93 +1,93 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using Microsoft.AspNetCore.Http; - -namespace StellaOps.Registry.TokenService; - -public static class RegistryScopeParser -{ - public static IReadOnlyList<RegistryAccessRequest> Parse(IQueryCollection query) - { - ArgumentNullException.ThrowIfNull(query); - - var scopes = new List<string>(); - - if (query.TryGetValue("scope", out var scopeValues)) - { - foreach (var scope in scopeValues) - { - if (string.IsNullOrWhiteSpace(scope)) - { - continue; - } - - // Support space-delimited scopes per OAuth2 spec - foreach (var component in scope.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) - { - scopes.Add(component); - } - } - } - - var requests = new List<RegistryAccessRequest>(scopes.Count); - foreach (var scope in scopes) - { - var request = ParseScope(scope); - requests.Add(request); - } - - return requests; - } - - private static RegistryAccessRequest ParseScope(string scope) - { - var segments = scope.Split(':', StringSplitOptions.TrimEntries); - if (segments.Length < 1) - { - throw new InvalidScopeException(scope, "scope missing resource type"); - } - - var type = segments[0]; - if (!string.Equals(type, "repository", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidScopeException(scope, $"unsupported resource type '{type}'"); - } - - if (segments.Length < 2 || string.IsNullOrWhiteSpace(segments[1])) - { - throw new InvalidScopeException(scope, "repository scope missing name"); - } - - var name = segments[1]; - var actions = segments.Length >= 3 && !string.IsNullOrWhiteSpace(segments[2]) - ? segments[2].Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) - : Array.Empty<string>(); - - if (actions.Length == 0) - { - actions = new[] { "pull" }; - } - - var normalized = actions - .Select(action => action.ToLowerInvariant()) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToArray(); - - return new RegistryAccessRequest(type.ToLowerInvariant(), name, normalized); - } -} - -public sealed class InvalidScopeException : Exception -{ - public InvalidScopeException(string scope, string reason) - : base($"Invalid scope '{scope}': {reason}") - { - Scope = scope; - Reason = reason; - } - - public string Scope { get; } - - public string Reason { get; } -} +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.AspNetCore.Http; + +namespace StellaOps.Registry.TokenService; + +public static class RegistryScopeParser +{ + public static IReadOnlyList<RegistryAccessRequest> Parse(IQueryCollection query) + { + ArgumentNullException.ThrowIfNull(query); + + var scopes = new List<string>(); + + if (query.TryGetValue("scope", out var scopeValues)) + { + foreach (var scope in scopeValues) + { + if (string.IsNullOrWhiteSpace(scope)) + { + continue; + } + + // Support space-delimited scopes per OAuth2 spec + foreach (var component in scope.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + { + scopes.Add(component); + } + } + } + + var requests = new List<RegistryAccessRequest>(scopes.Count); + foreach (var scope in scopes) + { + var request = ParseScope(scope); + requests.Add(request); + } + + return requests; + } + + private static RegistryAccessRequest ParseScope(string scope) + { + var segments = scope.Split(':', StringSplitOptions.TrimEntries); + if (segments.Length < 1) + { + throw new InvalidScopeException(scope, "scope missing resource type"); + } + + var type = segments[0]; + if (!string.Equals(type, "repository", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidScopeException(scope, $"unsupported resource type '{type}'"); + } + + if (segments.Length < 2 || string.IsNullOrWhiteSpace(segments[1])) + { + throw new InvalidScopeException(scope, "repository scope missing name"); + } + + var name = segments[1]; + var actions = segments.Length >= 3 && !string.IsNullOrWhiteSpace(segments[2]) + ? segments[2].Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + : Array.Empty<string>(); + + if (actions.Length == 0) + { + actions = new[] { "pull" }; + } + + var normalized = actions + .Select(action => action.ToLowerInvariant()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + return new RegistryAccessRequest(type.ToLowerInvariant(), name, normalized); + } +} + +public sealed class InvalidScopeException : Exception +{ + public InvalidScopeException(string scope, string reason) + : base($"Invalid scope '{scope}': {reason}") + { + Scope = scope; + Reason = reason; + } + + public string Scope { get; } + + public string Reason { get; } +} diff --git a/src/StellaOps.Registry.TokenService/RegistryTokenIssuer.cs b/src/Registry/StellaOps.Registry.TokenService/RegistryTokenIssuer.cs similarity index 97% rename from src/StellaOps.Registry.TokenService/RegistryTokenIssuer.cs rename to src/Registry/StellaOps.Registry.TokenService/RegistryTokenIssuer.cs index aa7eaa40..4e06d4e9 100644 --- a/src/StellaOps.Registry.TokenService/RegistryTokenIssuer.cs +++ b/src/Registry/StellaOps.Registry.TokenService/RegistryTokenIssuer.cs @@ -1,129 +1,129 @@ -using System; -using System.Collections.Generic; -using System.IdentityModel.Tokens.Jwt; -using System.Linq; -using System.Security.Claims; -using Microsoft.Extensions.Options; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Registry.TokenService.Observability; -using StellaOps.Registry.TokenService.Security; - -namespace StellaOps.Registry.TokenService; - -public sealed class RegistryTokenIssuer -{ - private readonly RegistryTokenServiceOptions _options; - private readonly PlanRegistry _planRegistry; - private readonly RegistryTokenMetrics _metrics; - private readonly SigningCredentials _signingCredentials; - private readonly JwtSecurityTokenHandler _tokenHandler = new(); - private readonly TimeProvider _timeProvider; - - public RegistryTokenIssuer( - IOptions<RegistryTokenServiceOptions> options, - PlanRegistry planRegistry, - RegistryTokenMetrics metrics, - TimeProvider timeProvider) - { - ArgumentNullException.ThrowIfNull(options); - ArgumentNullException.ThrowIfNull(planRegistry); - ArgumentNullException.ThrowIfNull(metrics); - ArgumentNullException.ThrowIfNull(timeProvider); - - _options = options.Value; - _planRegistry = planRegistry; - _metrics = metrics; - _timeProvider = timeProvider; - _signingCredentials = SigningKeyLoader.Load(_options.Signing); - } - - public RegistryTokenResponse IssueToken( - ClaimsPrincipal principal, - string service, - IReadOnlyList<RegistryAccessRequest> requests) - { - var decision = _planRegistry.Authorize(principal, requests); - if (!decision.Allowed) - { - _metrics.TokensRejected.Add(1, new KeyValuePair<string, object?>("reason", decision.FailureReason ?? "denied")); - throw new RegistryTokenException(decision.FailureReason ?? "denied"); - } - - var now = _timeProvider.GetUtcNow(); - var expires = now + _options.Signing.Lifetime; - var subject = principal.FindFirstValue(ClaimTypes.NameIdentifier) - ?? principal.FindFirstValue("client_id") - ?? principal.FindFirstValue("sub") - ?? "anonymous"; - - var payload = new JwtPayload( - issuer: _options.Signing.Issuer, - audience: _options.Signing.Audience ?? service, - claims: null, - notBefore: now.UtcDateTime, - expires: expires.UtcDateTime, - issuedAt: now.UtcDateTime) - { - { JwtRegisteredClaimNames.Sub, subject }, - { JwtRegisteredClaimNames.Jti, Guid.NewGuid().ToString("n") }, - { "service", service }, - { "access", BuildAccessClaim(requests) } - }; - - var licenseId = principal.FindFirstValue("stellaops:license"); - if (!string.IsNullOrWhiteSpace(licenseId)) - { - payload["stellaops:license"] = licenseId; - } - - var token = new JwtSecurityToken(new JwtHeader(_signingCredentials), payload); - var serialized = _tokenHandler.WriteToken(token); - - var plan = principal.FindFirstValue("stellaops:plan") ?? _options.DefaultPlan ?? "unknown"; - _metrics.TokensIssued.Add(1, new KeyValuePair<string, object?>("plan", plan)); - - return new RegistryTokenResponse( - serialized, - (int)_options.Signing.Lifetime.TotalSeconds, - now); - } - - private static object BuildAccessClaim(IReadOnlyList<RegistryAccessRequest> requests) - { - return requests - .Select(request => new Dictionary<string, object> - { - ["type"] = request.Type, - ["name"] = request.Name, - ["actions"] = request.Actions - }) - .ToArray(); - } -} - -public sealed class RegistryTokenResponse -{ - public RegistryTokenResponse(string token, int expiresInSeconds, DateTimeOffset issuedAt) - { - Token = token; - ExpiresIn = expiresInSeconds; - IssuedAt = issuedAt; - } - - public string Token { get; } - - public int ExpiresIn { get; } - - public DateTimeOffset IssuedAt { get; } -} - -public sealed class RegistryTokenException : Exception -{ - public RegistryTokenException(string reason) - : base($"Token request denied: {reason}") - { - Reason = reason; - } - - public string Reason { get; } -} +using System; +using System.Collections.Generic; +using System.IdentityModel.Tokens.Jwt; +using System.Linq; +using System.Security.Claims; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Registry.TokenService.Observability; +using StellaOps.Registry.TokenService.Security; + +namespace StellaOps.Registry.TokenService; + +public sealed class RegistryTokenIssuer +{ + private readonly RegistryTokenServiceOptions _options; + private readonly PlanRegistry _planRegistry; + private readonly RegistryTokenMetrics _metrics; + private readonly SigningCredentials _signingCredentials; + private readonly JwtSecurityTokenHandler _tokenHandler = new(); + private readonly TimeProvider _timeProvider; + + public RegistryTokenIssuer( + IOptions<RegistryTokenServiceOptions> options, + PlanRegistry planRegistry, + RegistryTokenMetrics metrics, + TimeProvider timeProvider) + { + ArgumentNullException.ThrowIfNull(options); + ArgumentNullException.ThrowIfNull(planRegistry); + ArgumentNullException.ThrowIfNull(metrics); + ArgumentNullException.ThrowIfNull(timeProvider); + + _options = options.Value; + _planRegistry = planRegistry; + _metrics = metrics; + _timeProvider = timeProvider; + _signingCredentials = SigningKeyLoader.Load(_options.Signing); + } + + public RegistryTokenResponse IssueToken( + ClaimsPrincipal principal, + string service, + IReadOnlyList<RegistryAccessRequest> requests) + { + var decision = _planRegistry.Authorize(principal, requests); + if (!decision.Allowed) + { + _metrics.TokensRejected.Add(1, new KeyValuePair<string, object?>("reason", decision.FailureReason ?? "denied")); + throw new RegistryTokenException(decision.FailureReason ?? "denied"); + } + + var now = _timeProvider.GetUtcNow(); + var expires = now + _options.Signing.Lifetime; + var subject = principal.FindFirstValue(ClaimTypes.NameIdentifier) + ?? principal.FindFirstValue("client_id") + ?? principal.FindFirstValue("sub") + ?? "anonymous"; + + var payload = new JwtPayload( + issuer: _options.Signing.Issuer, + audience: _options.Signing.Audience ?? service, + claims: null, + notBefore: now.UtcDateTime, + expires: expires.UtcDateTime, + issuedAt: now.UtcDateTime) + { + { JwtRegisteredClaimNames.Sub, subject }, + { JwtRegisteredClaimNames.Jti, Guid.NewGuid().ToString("n") }, + { "service", service }, + { "access", BuildAccessClaim(requests) } + }; + + var licenseId = principal.FindFirstValue("stellaops:license"); + if (!string.IsNullOrWhiteSpace(licenseId)) + { + payload["stellaops:license"] = licenseId; + } + + var token = new JwtSecurityToken(new JwtHeader(_signingCredentials), payload); + var serialized = _tokenHandler.WriteToken(token); + + var plan = principal.FindFirstValue("stellaops:plan") ?? _options.DefaultPlan ?? "unknown"; + _metrics.TokensIssued.Add(1, new KeyValuePair<string, object?>("plan", plan)); + + return new RegistryTokenResponse( + serialized, + (int)_options.Signing.Lifetime.TotalSeconds, + now); + } + + private static object BuildAccessClaim(IReadOnlyList<RegistryAccessRequest> requests) + { + return requests + .Select(request => new Dictionary<string, object> + { + ["type"] = request.Type, + ["name"] = request.Name, + ["actions"] = request.Actions + }) + .ToArray(); + } +} + +public sealed class RegistryTokenResponse +{ + public RegistryTokenResponse(string token, int expiresInSeconds, DateTimeOffset issuedAt) + { + Token = token; + ExpiresIn = expiresInSeconds; + IssuedAt = issuedAt; + } + + public string Token { get; } + + public int ExpiresIn { get; } + + public DateTimeOffset IssuedAt { get; } +} + +public sealed class RegistryTokenException : Exception +{ + public RegistryTokenException(string reason) + : base($"Token request denied: {reason}") + { + Reason = reason; + } + + public string Reason { get; } +} diff --git a/src/StellaOps.Registry.TokenService/RegistryTokenServiceOptions.cs b/src/Registry/StellaOps.Registry.TokenService/RegistryTokenServiceOptions.cs similarity index 96% rename from src/StellaOps.Registry.TokenService/RegistryTokenServiceOptions.cs rename to src/Registry/StellaOps.Registry.TokenService/RegistryTokenServiceOptions.cs index ec9f90dc..94d07316 100644 --- a/src/StellaOps.Registry.TokenService/RegistryTokenServiceOptions.cs +++ b/src/Registry/StellaOps.Registry.TokenService/RegistryTokenServiceOptions.cs @@ -1,321 +1,321 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Linq; - -namespace StellaOps.Registry.TokenService; - -/// <summary> -/// Strongly typed options for the registry token service. -/// </summary> -public sealed class RegistryTokenServiceOptions -{ - public const string SectionName = "RegistryTokenService"; - - /// <summary> - /// Authority validation options. - /// </summary> - public AuthorityOptions Authority { get; set; } = new(); - - /// <summary> - /// JWT signing options. - /// </summary> - public SigningOptions Signing { get; set; } = new(); - - /// <summary> - /// Registry-scoped settings. - /// </summary> - public RegistryOptions Registry { get; set; } = new(); - - /// <summary> - /// Plan catalogue. - /// </summary> - public IList<PlanRule> Plans { get; set; } = new List<PlanRule>(); - - /// <summary> - /// Identifiers that are revoked (license IDs or customer IDs). - /// </summary> - public IList<string> RevokedLicenses { get; set; } = new List<string>(); - - /// <summary> - /// Optional explicit default plan when no plan claim is supplied. - /// </summary> - public string? DefaultPlan { get; set; } - - public void Validate() - { - Authority.Validate(); - Signing.Validate(); - Registry.Validate(); - - if (Plans.Count == 0) - { - throw new InvalidOperationException("At least one plan rule must be configured."); - } - - foreach (var plan in Plans) - { - plan.Validate(); - } - - NormalizeList(RevokedLicenses, toLower: true); - - if (!string.IsNullOrWhiteSpace(DefaultPlan)) - { - var normalized = DefaultPlan.Trim(); - if (!Plans.Any(plan => string.Equals(plan.Name, normalized, StringComparison.OrdinalIgnoreCase))) - { - throw new InvalidOperationException($"Default plan '{normalized}' is not present in the plan catalogue."); - } - - DefaultPlan = normalized; - } - } - - private static void NormalizeList(IList<string> values, bool toLower) - { - if (values.Count == 0) - { - return; - } - - var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - for (var index = values.Count - 1; index >= 0; index--) - { - var value = values[index]; - if (string.IsNullOrWhiteSpace(value)) - { - values.RemoveAt(index); - continue; - } - - var normalized = value.Trim(); - if (toLower) - { - normalized = normalized.ToLowerInvariant(); - } - - if (!seen.Add(normalized)) - { - values.RemoveAt(index); - continue; - } - - values[index] = normalized; - } - } - - public sealed class AuthorityOptions - { - /// <summary> - /// Issuer/authority URL (e.g. https://authority.stella.internal). - /// </summary> - public string Issuer { get; set; } = string.Empty; - - /// <summary> - /// Optional explicit metadata (JWKS) endpoint. - /// </summary> - public string? MetadataAddress { get; set; } - - /// <summary> - /// Whether HTTPS metadata is required (disabled for dev loops). - /// </summary> - public bool RequireHttpsMetadata { get; set; } = true; - - /// <summary> - /// Audiences that resource server accepts. - /// </summary> - public IList<string> Audiences { get; set; } = new List<string>(); - - /// <summary> - /// Scopes required to hit the token endpoint. - /// </summary> - public IList<string> RequiredScopes { get; set; } = new List<string> { "registry.token.issue" }; - - public void Validate() - { - if (string.IsNullOrWhiteSpace(Issuer)) - { - throw new InvalidOperationException("Authority issuer must be configured."); - } - - if (!Uri.TryCreate(Issuer.Trim(), UriKind.Absolute, out var uri)) - { - throw new InvalidOperationException("Authority issuer must be an absolute URI."); - } - - if (RequireHttpsMetadata && - !uri.IsLoopback && - !string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Authority issuer must use HTTPS when RequireHttpsMetadata is true."); - } - - NormalizeList(Audiences, toLower: false); - NormalizeList(RequiredScopes, toLower: true); - } - } - - public sealed class SigningOptions - { - /// <summary> - /// Issuer for generated registry tokens. - /// </summary> - public string Issuer { get; set; } = string.Empty; - - /// <summary> - /// Optional audience override. Defaults to the requested registry service. - /// </summary> - public string? Audience { get; set; } - - /// <summary> - /// Path to an RSA private key (PEM or PFX). - /// </summary> - public string KeyPath { get; set; } = string.Empty; - - /// <summary> - /// Optional password when loading a PFX. - /// </summary> - public string? KeyPassword { get; set; } - - /// <summary> - /// Optional key identifier (kid) appended to the JWT header. - /// </summary> - public string? KeyId { get; set; } - - /// <summary> - /// Token lifetime. - /// </summary> - public TimeSpan Lifetime { get; set; } = TimeSpan.FromMinutes(5); - - public void Validate() - { - if (string.IsNullOrWhiteSpace(Issuer)) - { - throw new InvalidOperationException("Signing.Issuer must be provided."); - } - - if (Lifetime <= TimeSpan.Zero || Lifetime > TimeSpan.FromHours(1)) - { - throw new InvalidOperationException("Signing.Lifetime must be between 1 second and 1 hour."); - } - - if (string.IsNullOrWhiteSpace(KeyPath)) - { - throw new InvalidOperationException("Signing.KeyPath must be configured."); - } - - var file = KeyPath.Trim(); - if (!Path.IsPathRooted(file)) - { - file = Path.GetFullPath(file); - } - - if (!File.Exists(file)) - { - throw new InvalidOperationException($"Signing.KeyPath '{file}' does not exist."); - } - - KeyPath = file; - if (!string.IsNullOrWhiteSpace(KeyId)) - { - KeyId = KeyId.Trim(); - } - } - } - - public sealed class RegistryOptions - { - /// <summary> - /// Registry service realm (matches Docker registry configuration). - /// </summary> - public string Realm { get; set; } = string.Empty; - - /// <summary> - /// Allowed service identifiers. Empty list permits any service. - /// </summary> - public IList<string> AllowedServices { get; set; } = new List<string>(); - - public void Validate() - { - if (string.IsNullOrWhiteSpace(Realm)) - { - throw new InvalidOperationException("Registry.Realm must be provided."); - } - - if (!Uri.TryCreate(Realm.Trim(), UriKind.Absolute, out _)) - { - throw new InvalidOperationException("Registry.Realm must be an absolute URI."); - } - - NormalizeList(AllowedServices, toLower: false); - } - } - - public sealed class PlanRule - { - /// <summary> - /// Plan identifier (case-insensitive). - /// </summary> - public string Name { get; set; } = string.Empty; - - /// <summary> - /// Repository rules associated to the plan. - /// </summary> - public IList<RepositoryRule> Repositories { get; set; } = new List<RepositoryRule>(); - - public void Validate() - { - if (string.IsNullOrWhiteSpace(Name)) - { - throw new InvalidOperationException("Plan name cannot be empty."); - } - - Name = Name.Trim(); - - if (Repositories.Count == 0) - { - throw new InvalidOperationException($"Plan '{Name}' must specify at least one repository rule."); - } - - foreach (var repo in Repositories) - { - repo.Validate(Name); - } - } - } - - public sealed class RepositoryRule - { - /// <summary> - /// Repository pattern (supports '*' wildcard). - /// </summary> - public string Pattern { get; set; } = string.Empty; - - /// <summary> - /// Allowed actions (pull/push/delete, etc.) - /// </summary> - public IList<string> Actions { get; set; } = new List<string> { "pull" }; - - public void Validate(string planName) - { - if (string.IsNullOrWhiteSpace(Pattern)) - { - throw new InvalidOperationException($"Plan '{planName}' contains a repository rule with an empty pattern."); - } - - Pattern = Pattern.Trim(); - if (Pattern.Contains(' ', StringComparison.Ordinal)) - { - throw new InvalidOperationException($"Plan '{planName}' repository pattern '{Pattern}' may not contain spaces."); - } - - if (Actions.Count == 0) - { - throw new InvalidOperationException($"Plan '{planName}' repository '{Pattern}' must define allowed actions."); - } - - NormalizeList(Actions, toLower: true); - } - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Linq; + +namespace StellaOps.Registry.TokenService; + +/// <summary> +/// Strongly typed options for the registry token service. +/// </summary> +public sealed class RegistryTokenServiceOptions +{ + public const string SectionName = "RegistryTokenService"; + + /// <summary> + /// Authority validation options. + /// </summary> + public AuthorityOptions Authority { get; set; } = new(); + + /// <summary> + /// JWT signing options. + /// </summary> + public SigningOptions Signing { get; set; } = new(); + + /// <summary> + /// Registry-scoped settings. + /// </summary> + public RegistryOptions Registry { get; set; } = new(); + + /// <summary> + /// Plan catalogue. + /// </summary> + public IList<PlanRule> Plans { get; set; } = new List<PlanRule>(); + + /// <summary> + /// Identifiers that are revoked (license IDs or customer IDs). + /// </summary> + public IList<string> RevokedLicenses { get; set; } = new List<string>(); + + /// <summary> + /// Optional explicit default plan when no plan claim is supplied. + /// </summary> + public string? DefaultPlan { get; set; } + + public void Validate() + { + Authority.Validate(); + Signing.Validate(); + Registry.Validate(); + + if (Plans.Count == 0) + { + throw new InvalidOperationException("At least one plan rule must be configured."); + } + + foreach (var plan in Plans) + { + plan.Validate(); + } + + NormalizeList(RevokedLicenses, toLower: true); + + if (!string.IsNullOrWhiteSpace(DefaultPlan)) + { + var normalized = DefaultPlan.Trim(); + if (!Plans.Any(plan => string.Equals(plan.Name, normalized, StringComparison.OrdinalIgnoreCase))) + { + throw new InvalidOperationException($"Default plan '{normalized}' is not present in the plan catalogue."); + } + + DefaultPlan = normalized; + } + } + + private static void NormalizeList(IList<string> values, bool toLower) + { + if (values.Count == 0) + { + return; + } + + var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + for (var index = values.Count - 1; index >= 0; index--) + { + var value = values[index]; + if (string.IsNullOrWhiteSpace(value)) + { + values.RemoveAt(index); + continue; + } + + var normalized = value.Trim(); + if (toLower) + { + normalized = normalized.ToLowerInvariant(); + } + + if (!seen.Add(normalized)) + { + values.RemoveAt(index); + continue; + } + + values[index] = normalized; + } + } + + public sealed class AuthorityOptions + { + /// <summary> + /// Issuer/authority URL (e.g. https://authority.stella.internal). + /// </summary> + public string Issuer { get; set; } = string.Empty; + + /// <summary> + /// Optional explicit metadata (JWKS) endpoint. + /// </summary> + public string? MetadataAddress { get; set; } + + /// <summary> + /// Whether HTTPS metadata is required (disabled for dev loops). + /// </summary> + public bool RequireHttpsMetadata { get; set; } = true; + + /// <summary> + /// Audiences that resource server accepts. + /// </summary> + public IList<string> Audiences { get; set; } = new List<string>(); + + /// <summary> + /// Scopes required to hit the token endpoint. + /// </summary> + public IList<string> RequiredScopes { get; set; } = new List<string> { "registry.token.issue" }; + + public void Validate() + { + if (string.IsNullOrWhiteSpace(Issuer)) + { + throw new InvalidOperationException("Authority issuer must be configured."); + } + + if (!Uri.TryCreate(Issuer.Trim(), UriKind.Absolute, out var uri)) + { + throw new InvalidOperationException("Authority issuer must be an absolute URI."); + } + + if (RequireHttpsMetadata && + !uri.IsLoopback && + !string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Authority issuer must use HTTPS when RequireHttpsMetadata is true."); + } + + NormalizeList(Audiences, toLower: false); + NormalizeList(RequiredScopes, toLower: true); + } + } + + public sealed class SigningOptions + { + /// <summary> + /// Issuer for generated registry tokens. + /// </summary> + public string Issuer { get; set; } = string.Empty; + + /// <summary> + /// Optional audience override. Defaults to the requested registry service. + /// </summary> + public string? Audience { get; set; } + + /// <summary> + /// Path to an RSA private key (PEM or PFX). + /// </summary> + public string KeyPath { get; set; } = string.Empty; + + /// <summary> + /// Optional password when loading a PFX. + /// </summary> + public string? KeyPassword { get; set; } + + /// <summary> + /// Optional key identifier (kid) appended to the JWT header. + /// </summary> + public string? KeyId { get; set; } + + /// <summary> + /// Token lifetime. + /// </summary> + public TimeSpan Lifetime { get; set; } = TimeSpan.FromMinutes(5); + + public void Validate() + { + if (string.IsNullOrWhiteSpace(Issuer)) + { + throw new InvalidOperationException("Signing.Issuer must be provided."); + } + + if (Lifetime <= TimeSpan.Zero || Lifetime > TimeSpan.FromHours(1)) + { + throw new InvalidOperationException("Signing.Lifetime must be between 1 second and 1 hour."); + } + + if (string.IsNullOrWhiteSpace(KeyPath)) + { + throw new InvalidOperationException("Signing.KeyPath must be configured."); + } + + var file = KeyPath.Trim(); + if (!Path.IsPathRooted(file)) + { + file = Path.GetFullPath(file); + } + + if (!File.Exists(file)) + { + throw new InvalidOperationException($"Signing.KeyPath '{file}' does not exist."); + } + + KeyPath = file; + if (!string.IsNullOrWhiteSpace(KeyId)) + { + KeyId = KeyId.Trim(); + } + } + } + + public sealed class RegistryOptions + { + /// <summary> + /// Registry service realm (matches Docker registry configuration). + /// </summary> + public string Realm { get; set; } = string.Empty; + + /// <summary> + /// Allowed service identifiers. Empty list permits any service. + /// </summary> + public IList<string> AllowedServices { get; set; } = new List<string>(); + + public void Validate() + { + if (string.IsNullOrWhiteSpace(Realm)) + { + throw new InvalidOperationException("Registry.Realm must be provided."); + } + + if (!Uri.TryCreate(Realm.Trim(), UriKind.Absolute, out _)) + { + throw new InvalidOperationException("Registry.Realm must be an absolute URI."); + } + + NormalizeList(AllowedServices, toLower: false); + } + } + + public sealed class PlanRule + { + /// <summary> + /// Plan identifier (case-insensitive). + /// </summary> + public string Name { get; set; } = string.Empty; + + /// <summary> + /// Repository rules associated to the plan. + /// </summary> + public IList<RepositoryRule> Repositories { get; set; } = new List<RepositoryRule>(); + + public void Validate() + { + if (string.IsNullOrWhiteSpace(Name)) + { + throw new InvalidOperationException("Plan name cannot be empty."); + } + + Name = Name.Trim(); + + if (Repositories.Count == 0) + { + throw new InvalidOperationException($"Plan '{Name}' must specify at least one repository rule."); + } + + foreach (var repo in Repositories) + { + repo.Validate(Name); + } + } + } + + public sealed class RepositoryRule + { + /// <summary> + /// Repository pattern (supports '*' wildcard). + /// </summary> + public string Pattern { get; set; } = string.Empty; + + /// <summary> + /// Allowed actions (pull/push/delete, etc.) + /// </summary> + public IList<string> Actions { get; set; } = new List<string> { "pull" }; + + public void Validate(string planName) + { + if (string.IsNullOrWhiteSpace(Pattern)) + { + throw new InvalidOperationException($"Plan '{planName}' contains a repository rule with an empty pattern."); + } + + Pattern = Pattern.Trim(); + if (Pattern.Contains(' ', StringComparison.Ordinal)) + { + throw new InvalidOperationException($"Plan '{planName}' repository pattern '{Pattern}' may not contain spaces."); + } + + if (Actions.Count == 0) + { + throw new InvalidOperationException($"Plan '{planName}' repository '{Pattern}' must define allowed actions."); + } + + NormalizeList(Actions, toLower: true); + } + } +} diff --git a/src/StellaOps.Registry.TokenService/Security/SigningKeyLoader.cs b/src/Registry/StellaOps.Registry.TokenService/Security/SigningKeyLoader.cs similarity index 96% rename from src/StellaOps.Registry.TokenService/Security/SigningKeyLoader.cs rename to src/Registry/StellaOps.Registry.TokenService/Security/SigningKeyLoader.cs index ee464d5c..d539fcf4 100644 --- a/src/StellaOps.Registry.TokenService/Security/SigningKeyLoader.cs +++ b/src/Registry/StellaOps.Registry.TokenService/Security/SigningKeyLoader.cs @@ -1,66 +1,66 @@ -using System; -using System.IO; -using System.Security.Cryptography; -using System.Security.Cryptography.X509Certificates; -using Microsoft.IdentityModel.Tokens; - -namespace StellaOps.Registry.TokenService.Security; - -internal static class SigningKeyLoader -{ - public static SigningCredentials Load(RegistryTokenServiceOptions.SigningOptions options) - { - ArgumentNullException.ThrowIfNull(options); - - SecurityKey key; - - var extension = Path.GetExtension(options.KeyPath); - if (string.Equals(extension, ".pfx", StringComparison.OrdinalIgnoreCase)) - { - key = LoadFromPfx(options.KeyPath, options.KeyPassword); - } - else - { - key = LoadFromPem(options.KeyPath); - } - - var credentials = new SigningCredentials(key, SecurityAlgorithms.RsaSha256) - { - CryptoProviderFactory = new CryptoProviderFactory { CacheSignatureProviders = true } - }; - - if (!string.IsNullOrWhiteSpace(options.KeyId)) - { - credentials.Key.KeyId = options.KeyId; - } - - return credentials; - } - - private static SecurityKey LoadFromPfx(string path, string? password) - { - using var cert = X509CertificateLoader.LoadPkcs12FromFile(path, password, X509KeyStorageFlags.Exportable | X509KeyStorageFlags.EphemeralKeySet); - if (!cert.HasPrivateKey) - { - throw new InvalidOperationException($"Certificate '{path}' does not contain a private key."); - } - - if (cert.GetRSAPrivateKey() is not RSA rsa) - { - throw new InvalidOperationException($"Certificate '{path}' does not contain an RSA private key."); - } - - var parameters = rsa.ExportParameters(true); - rsa.Dispose(); - - return new RsaSecurityKey(parameters) { KeyId = cert.Thumbprint }; - } - - private static SecurityKey LoadFromPem(string path) - { - using var rsa = RSA.Create(); - var pem = File.ReadAllText(path); - rsa.ImportFromPem(pem); - return new RsaSecurityKey(rsa.ExportParameters(includePrivateParameters: true)); - } -} +using System; +using System.IO; +using System.Security.Cryptography; +using System.Security.Cryptography.X509Certificates; +using Microsoft.IdentityModel.Tokens; + +namespace StellaOps.Registry.TokenService.Security; + +internal static class SigningKeyLoader +{ + public static SigningCredentials Load(RegistryTokenServiceOptions.SigningOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + SecurityKey key; + + var extension = Path.GetExtension(options.KeyPath); + if (string.Equals(extension, ".pfx", StringComparison.OrdinalIgnoreCase)) + { + key = LoadFromPfx(options.KeyPath, options.KeyPassword); + } + else + { + key = LoadFromPem(options.KeyPath); + } + + var credentials = new SigningCredentials(key, SecurityAlgorithms.RsaSha256) + { + CryptoProviderFactory = new CryptoProviderFactory { CacheSignatureProviders = true } + }; + + if (!string.IsNullOrWhiteSpace(options.KeyId)) + { + credentials.Key.KeyId = options.KeyId; + } + + return credentials; + } + + private static SecurityKey LoadFromPfx(string path, string? password) + { + using var cert = X509CertificateLoader.LoadPkcs12FromFile(path, password, X509KeyStorageFlags.Exportable | X509KeyStorageFlags.EphemeralKeySet); + if (!cert.HasPrivateKey) + { + throw new InvalidOperationException($"Certificate '{path}' does not contain a private key."); + } + + if (cert.GetRSAPrivateKey() is not RSA rsa) + { + throw new InvalidOperationException($"Certificate '{path}' does not contain an RSA private key."); + } + + var parameters = rsa.ExportParameters(true); + rsa.Dispose(); + + return new RsaSecurityKey(parameters) { KeyId = cert.Thumbprint }; + } + + private static SecurityKey LoadFromPem(string path) + { + using var rsa = RSA.Create(); + var pem = File.ReadAllText(path); + rsa.ImportFromPem(pem); + return new RsaSecurityKey(rsa.ExportParameters(includePrivateParameters: true)); + } +} diff --git a/src/StellaOps.Registry.TokenService/StellaOps.Registry.TokenService.csproj b/src/Registry/StellaOps.Registry.TokenService/StellaOps.Registry.TokenService.csproj similarity index 62% rename from src/StellaOps.Registry.TokenService/StellaOps.Registry.TokenService.csproj rename to src/Registry/StellaOps.Registry.TokenService/StellaOps.Registry.TokenService.csproj index f86c30fe..eee6a338 100644 --- a/src/StellaOps.Registry.TokenService/StellaOps.Registry.TokenService.csproj +++ b/src/Registry/StellaOps.Registry.TokenService/StellaOps.Registry.TokenService.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk.Web"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -16,9 +17,9 @@ <PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="8.0.1" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj" /> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/Registry/StellaOps.Registry.TokenService/appsettings.Development.json b/src/Registry/StellaOps.Registry.TokenService/appsettings.Development.json new file mode 100644 index 00000000..ff66ba6b --- /dev/null +++ b/src/Registry/StellaOps.Registry.TokenService/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/src/Registry/StellaOps.Registry.TokenService/appsettings.json b/src/Registry/StellaOps.Registry.TokenService/appsettings.json new file mode 100644 index 00000000..4d566948 --- /dev/null +++ b/src/Registry/StellaOps.Registry.TokenService/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/src/Registry/StellaOps.Registry.sln b/src/Registry/StellaOps.Registry.sln new file mode 100644 index 00000000..2fedc5a5 --- /dev/null +++ b/src/Registry/StellaOps.Registry.sln @@ -0,0 +1,137 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Registry.TokenService", "StellaOps.Registry.TokenService\StellaOps.Registry.TokenService.csproj", "{47219E8C-6EF9-4F09-88D0-28E7525824F6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{5B2C944F-C02D-444E-BF69-6FF06E8BB165}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{84F1A536-BA7B-4FF6-82C1-EC324B3BD158}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{538BEB07-55EB-4AAD-B323-D49984F152F6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{41DF0C8A-D826-4398-95F7-7FEDFEFE9053}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{F97C3CD8-B89D-4E4D-815C-4D799F65A78A}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Registry.TokenService.Tests", "__Tests\StellaOps.Registry.TokenService.Tests\StellaOps.Registry.TokenService.Tests.csproj", "{C34D56B3-8B7A-4AF0-8279-80155527235B}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Debug|x64.ActiveCfg = Debug|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Debug|x64.Build.0 = Debug|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Debug|x86.ActiveCfg = Debug|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Debug|x86.Build.0 = Debug|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Release|Any CPU.Build.0 = Release|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Release|x64.ActiveCfg = Release|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Release|x64.Build.0 = Release|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Release|x86.ActiveCfg = Release|Any CPU + {47219E8C-6EF9-4F09-88D0-28E7525824F6}.Release|x86.Build.0 = Release|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Debug|x64.ActiveCfg = Debug|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Debug|x64.Build.0 = Debug|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Debug|x86.ActiveCfg = Debug|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Debug|x86.Build.0 = Debug|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Release|Any CPU.Build.0 = Release|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Release|x64.ActiveCfg = Release|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Release|x64.Build.0 = Release|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Release|x86.ActiveCfg = Release|Any CPU + {5B2C944F-C02D-444E-BF69-6FF06E8BB165}.Release|x86.Build.0 = Release|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Debug|Any CPU.Build.0 = Debug|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Debug|x64.ActiveCfg = Debug|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Debug|x64.Build.0 = Debug|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Debug|x86.ActiveCfg = Debug|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Debug|x86.Build.0 = Debug|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Release|Any CPU.ActiveCfg = Release|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Release|Any CPU.Build.0 = Release|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Release|x64.ActiveCfg = Release|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Release|x64.Build.0 = Release|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Release|x86.ActiveCfg = Release|Any CPU + {75CE45B4-ACA9-4E96-A7C8-99F05A6B8090}.Release|x86.Build.0 = Release|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Debug|Any CPU.Build.0 = Debug|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Debug|x64.ActiveCfg = Debug|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Debug|x64.Build.0 = Debug|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Debug|x86.ActiveCfg = Debug|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Debug|x86.Build.0 = Debug|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Release|Any CPU.ActiveCfg = Release|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Release|Any CPU.Build.0 = Release|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Release|x64.ActiveCfg = Release|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Release|x64.Build.0 = Release|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Release|x86.ActiveCfg = Release|Any CPU + {84F1A536-BA7B-4FF6-82C1-EC324B3BD158}.Release|x86.Build.0 = Release|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Debug|x64.ActiveCfg = Debug|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Debug|x64.Build.0 = Debug|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Debug|x86.ActiveCfg = Debug|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Debug|x86.Build.0 = Debug|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Release|Any CPU.Build.0 = Release|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Release|x64.ActiveCfg = Release|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Release|x64.Build.0 = Release|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Release|x86.ActiveCfg = Release|Any CPU + {538BEB07-55EB-4AAD-B323-D49984F152F6}.Release|x86.Build.0 = Release|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Debug|Any CPU.Build.0 = Debug|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Debug|x64.ActiveCfg = Debug|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Debug|x64.Build.0 = Debug|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Debug|x86.ActiveCfg = Debug|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Debug|x86.Build.0 = Debug|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Release|Any CPU.ActiveCfg = Release|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Release|Any CPU.Build.0 = Release|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Release|x64.ActiveCfg = Release|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Release|x64.Build.0 = Release|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Release|x86.ActiveCfg = Release|Any CPU + {41DF0C8A-D826-4398-95F7-7FEDFEFE9053}.Release|x86.Build.0 = Release|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Debug|x64.ActiveCfg = Debug|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Debug|x64.Build.0 = Debug|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Debug|x86.ActiveCfg = Debug|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Debug|x86.Build.0 = Debug|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Release|Any CPU.Build.0 = Release|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Release|x64.ActiveCfg = Release|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Release|x64.Build.0 = Release|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Release|x86.ActiveCfg = Release|Any CPU + {F97C3CD8-B89D-4E4D-815C-4D799F65A78A}.Release|x86.Build.0 = Release|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Debug|x64.ActiveCfg = Debug|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Debug|x64.Build.0 = Debug|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Debug|x86.ActiveCfg = Debug|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Debug|x86.Build.0 = Debug|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Release|Any CPU.Build.0 = Release|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Release|x64.ActiveCfg = Release|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Release|x64.Build.0 = Release|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Release|x86.ActiveCfg = Release|Any CPU + {C34D56B3-8B7A-4AF0-8279-80155527235B}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {C34D56B3-8B7A-4AF0-8279-80155527235B} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Registry.TokenService.Tests/PlanRegistryTests.cs b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/PlanRegistryTests.cs similarity index 96% rename from src/StellaOps.Registry.TokenService.Tests/PlanRegistryTests.cs rename to src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/PlanRegistryTests.cs index e3deccc3..0c488355 100644 --- a/src/StellaOps.Registry.TokenService.Tests/PlanRegistryTests.cs +++ b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/PlanRegistryTests.cs @@ -1,109 +1,109 @@ -using System.Security.Claims; -using Microsoft.Extensions.Options; -using StellaOps.Registry.TokenService; - -namespace StellaOps.Registry.TokenService.Tests; - -public sealed class PlanRegistryTests -{ - private static RegistryTokenServiceOptions CreateOptions() - { - return new RegistryTokenServiceOptions - { - Authority = new RegistryTokenServiceOptions.AuthorityOptions - { - Issuer = "https://authority.localhost", - RequireHttpsMetadata = false, - }, - Signing = new RegistryTokenServiceOptions.SigningOptions - { - Issuer = "https://registry.localhost/token", - KeyPath = Path.GetTempFileName(), - }, - Registry = new RegistryTokenServiceOptions.RegistryOptions - { - Realm = "https://registry.localhost/v2/token" - }, - Plans = - { - new RegistryTokenServiceOptions.PlanRule - { - Name = "community", - Repositories = - { - new RegistryTokenServiceOptions.RepositoryRule - { - Pattern = "stella-ops/public/*", - Actions = new [] { "pull" } - } - } - }, - new RegistryTokenServiceOptions.PlanRule - { - Name = "enterprise", - Repositories = - { - new RegistryTokenServiceOptions.RepositoryRule - { - Pattern = "stella-ops/public/*", - Actions = new [] { "pull" } - }, - new RegistryTokenServiceOptions.RepositoryRule - { - Pattern = "stella-ops/enterprise/*", - Actions = new [] { "pull", "push" } - } - } - } - } - }; - } - - [Fact] - public void Authorize_AllowsMatchingPlan() - { - var options = CreateOptions(); - options.Signing.Validate(); - options.Registry.Validate(); - foreach (var plan in options.Plans) - { - plan.Validate(); - } - - var registry = new PlanRegistry(options); - - var principal = new ClaimsPrincipal(new ClaimsIdentity(new[] - { - new Claim("stellaops:plan", "enterprise") - }, "test")); - - var decision = registry.Authorize(principal, new[] - { - new RegistryAccessRequest("repository", "stella-ops/enterprise/cache", new [] { "pull" }) - }); - - Assert.True(decision.Allowed); - } - - [Fact] - public void Authorize_DeniesUnknownPlan() - { - var options = CreateOptions(); - options.Signing.Validate(); - options.Registry.Validate(); - foreach (var plan in options.Plans) - { - plan.Validate(); - } - - var registry = new PlanRegistry(options); - var principal = new ClaimsPrincipal(new ClaimsIdentity(new Claim[] { }, "test")); - - var decision = registry.Authorize(principal, new[] - { - new RegistryAccessRequest("repository", "stella-ops/enterprise/cache", new [] { "pull" }) - }); - - Assert.False(decision.Allowed); - } -} +using System.Security.Claims; +using Microsoft.Extensions.Options; +using StellaOps.Registry.TokenService; + +namespace StellaOps.Registry.TokenService.Tests; + +public sealed class PlanRegistryTests +{ + private static RegistryTokenServiceOptions CreateOptions() + { + return new RegistryTokenServiceOptions + { + Authority = new RegistryTokenServiceOptions.AuthorityOptions + { + Issuer = "https://authority.localhost", + RequireHttpsMetadata = false, + }, + Signing = new RegistryTokenServiceOptions.SigningOptions + { + Issuer = "https://registry.localhost/token", + KeyPath = Path.GetTempFileName(), + }, + Registry = new RegistryTokenServiceOptions.RegistryOptions + { + Realm = "https://registry.localhost/v2/token" + }, + Plans = + { + new RegistryTokenServiceOptions.PlanRule + { + Name = "community", + Repositories = + { + new RegistryTokenServiceOptions.RepositoryRule + { + Pattern = "stella-ops/public/*", + Actions = new [] { "pull" } + } + } + }, + new RegistryTokenServiceOptions.PlanRule + { + Name = "enterprise", + Repositories = + { + new RegistryTokenServiceOptions.RepositoryRule + { + Pattern = "stella-ops/public/*", + Actions = new [] { "pull" } + }, + new RegistryTokenServiceOptions.RepositoryRule + { + Pattern = "stella-ops/enterprise/*", + Actions = new [] { "pull", "push" } + } + } + } + } + }; + } + + [Fact] + public void Authorize_AllowsMatchingPlan() + { + var options = CreateOptions(); + options.Signing.Validate(); + options.Registry.Validate(); + foreach (var plan in options.Plans) + { + plan.Validate(); + } + + var registry = new PlanRegistry(options); + + var principal = new ClaimsPrincipal(new ClaimsIdentity(new[] + { + new Claim("stellaops:plan", "enterprise") + }, "test")); + + var decision = registry.Authorize(principal, new[] + { + new RegistryAccessRequest("repository", "stella-ops/enterprise/cache", new [] { "pull" }) + }); + + Assert.True(decision.Allowed); + } + + [Fact] + public void Authorize_DeniesUnknownPlan() + { + var options = CreateOptions(); + options.Signing.Validate(); + options.Registry.Validate(); + foreach (var plan in options.Plans) + { + plan.Validate(); + } + + var registry = new PlanRegistry(options); + var principal = new ClaimsPrincipal(new ClaimsIdentity(new Claim[] { }, "test")); + + var decision = registry.Authorize(principal, new[] + { + new RegistryAccessRequest("repository", "stella-ops/enterprise/cache", new [] { "pull" }) + }); + + Assert.False(decision.Allowed); + } +} diff --git a/src/StellaOps.Registry.TokenService.Tests/RegistryScopeParserTests.cs b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/RegistryScopeParserTests.cs similarity index 96% rename from src/StellaOps.Registry.TokenService.Tests/RegistryScopeParserTests.cs rename to src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/RegistryScopeParserTests.cs index 01dee736..94cb3d32 100644 --- a/src/StellaOps.Registry.TokenService.Tests/RegistryScopeParserTests.cs +++ b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/RegistryScopeParserTests.cs @@ -1,38 +1,38 @@ -using Microsoft.AspNetCore.Http; -using StellaOps.Registry.TokenService; - -namespace StellaOps.Registry.TokenService.Tests; - -public sealed class RegistryScopeParserTests -{ - [Fact] - public void Parse_SingleScope_DefaultsPull() - { - var query = new QueryCollection(new Dictionary<string, Microsoft.Extensions.Primitives.StringValues> - { - ["scope"] = "repository:stella-ops/public/base" - }); - - var result = RegistryScopeParser.Parse(query); - - Assert.Single(result); - Assert.Equal("repository", result[0].Type); - Assert.Equal("stella-ops/public/base", result[0].Name); - Assert.Equal(new[] { "pull" }, result[0].Actions); - } - - [Fact] - public void Parse_MultipleScopes() - { - var query = new QueryCollection(new Dictionary<string, Microsoft.Extensions.Primitives.StringValues> - { - ["scope"] = new[] { "repository:stella/public/api:pull,push", "repository:stella/private/api:pull" } - }); - - var result = RegistryScopeParser.Parse(query); - - Assert.Equal(2, result.Count); - Assert.Equal(new[] { "pull", "push" }, result[0].Actions); - Assert.Equal(new[] { "pull" }, result[1].Actions); - } -} +using Microsoft.AspNetCore.Http; +using StellaOps.Registry.TokenService; + +namespace StellaOps.Registry.TokenService.Tests; + +public sealed class RegistryScopeParserTests +{ + [Fact] + public void Parse_SingleScope_DefaultsPull() + { + var query = new QueryCollection(new Dictionary<string, Microsoft.Extensions.Primitives.StringValues> + { + ["scope"] = "repository:stella-ops/public/base" + }); + + var result = RegistryScopeParser.Parse(query); + + Assert.Single(result); + Assert.Equal("repository", result[0].Type); + Assert.Equal("stella-ops/public/base", result[0].Name); + Assert.Equal(new[] { "pull" }, result[0].Actions); + } + + [Fact] + public void Parse_MultipleScopes() + { + var query = new QueryCollection(new Dictionary<string, Microsoft.Extensions.Primitives.StringValues> + { + ["scope"] = new[] { "repository:stella/public/api:pull,push", "repository:stella/private/api:pull" } + }); + + var result = RegistryScopeParser.Parse(query); + + Assert.Equal(2, result.Count); + Assert.Equal(new[] { "pull", "push" }, result[0].Actions); + Assert.Equal(new[] { "pull" }, result[1].Actions); + } +} diff --git a/src/StellaOps.Registry.TokenService.Tests/RegistryTokenIssuerTests.cs b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/RegistryTokenIssuerTests.cs similarity index 96% rename from src/StellaOps.Registry.TokenService.Tests/RegistryTokenIssuerTests.cs rename to src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/RegistryTokenIssuerTests.cs index d7d8f59c..5f8d8182 100644 --- a/src/StellaOps.Registry.TokenService.Tests/RegistryTokenIssuerTests.cs +++ b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/RegistryTokenIssuerTests.cs @@ -1,110 +1,110 @@ -using System.IdentityModel.Tokens.Jwt; -using System.Security.Claims; -using System.Security.Cryptography; -using Microsoft.Extensions.Options; -using StellaOps.Registry.TokenService; -using StellaOps.Registry.TokenService.Observability; - -namespace StellaOps.Registry.TokenService.Tests; - -public sealed class RegistryTokenIssuerTests : IDisposable -{ - private readonly List<string> _tempFiles = new(); - - [Fact] - public void IssueToken_GeneratesJwtWithAccessClaim() - { - var pemPath = CreatePemKey(); - var options = new RegistryTokenServiceOptions - { - Authority = new RegistryTokenServiceOptions.AuthorityOptions - { - Issuer = "https://authority.localhost", - RequireHttpsMetadata = false, - }, - Signing = new RegistryTokenServiceOptions.SigningOptions - { - Issuer = "https://registry.localhost/token", - KeyPath = pemPath, - Lifetime = TimeSpan.FromMinutes(5) - }, - Registry = new RegistryTokenServiceOptions.RegistryOptions - { - Realm = "https://registry.localhost/v2/token" - }, - Plans = - { - new RegistryTokenServiceOptions.PlanRule - { - Name = "community", - Repositories = - { - new RegistryTokenServiceOptions.RepositoryRule - { - Pattern = "stella-ops/public/*", - Actions = new [] { "pull" } - } - } - } - } - }; - options.Validate(); - - var issuer = new RegistryTokenIssuer( - Options.Create(options), - new PlanRegistry(options), - new RegistryTokenMetrics(), - TimeProvider.System); - - var principal = new ClaimsPrincipal(new ClaimsIdentity(new[] - { - new Claim("sub", "client-1"), - new Claim("stellaops:plan", "community") - }, "test")); - - var accessRequests = new[] - { - new RegistryAccessRequest("repository", "stella-ops/public/base", new [] { "pull" }) - }; - - var response = issuer.IssueToken(principal, "registry.localhost", accessRequests); - - Assert.NotEmpty(response.Token); - - var handler = new JwtSecurityTokenHandler(); - var jwt = handler.ReadJwtToken(response.Token); - - Assert.Equal("https://registry.localhost/token", jwt.Issuer); - Assert.True(jwt.Payload.TryGetValue("access", out var access)); - Assert.NotNull(access); - } - - private string CreatePemKey() - { - using var rsa = RSA.Create(2048); - var builder = new StringWriter(); - builder.WriteLine("-----BEGIN PRIVATE KEY-----"); - builder.WriteLine(Convert.ToBase64String(rsa.ExportPkcs8PrivateKey(), Base64FormattingOptions.InsertLineBreaks)); - builder.WriteLine("-----END PRIVATE KEY-----"); - - var path = Path.GetTempFileName(); - File.WriteAllText(path, builder.ToString()); - _tempFiles.Add(path); - return path; - } - - public void Dispose() - { - foreach (var file in _tempFiles) - { - try - { - File.Delete(file); - } - catch - { - // ignore - } - } - } -} +using System.IdentityModel.Tokens.Jwt; +using System.Security.Claims; +using System.Security.Cryptography; +using Microsoft.Extensions.Options; +using StellaOps.Registry.TokenService; +using StellaOps.Registry.TokenService.Observability; + +namespace StellaOps.Registry.TokenService.Tests; + +public sealed class RegistryTokenIssuerTests : IDisposable +{ + private readonly List<string> _tempFiles = new(); + + [Fact] + public void IssueToken_GeneratesJwtWithAccessClaim() + { + var pemPath = CreatePemKey(); + var options = new RegistryTokenServiceOptions + { + Authority = new RegistryTokenServiceOptions.AuthorityOptions + { + Issuer = "https://authority.localhost", + RequireHttpsMetadata = false, + }, + Signing = new RegistryTokenServiceOptions.SigningOptions + { + Issuer = "https://registry.localhost/token", + KeyPath = pemPath, + Lifetime = TimeSpan.FromMinutes(5) + }, + Registry = new RegistryTokenServiceOptions.RegistryOptions + { + Realm = "https://registry.localhost/v2/token" + }, + Plans = + { + new RegistryTokenServiceOptions.PlanRule + { + Name = "community", + Repositories = + { + new RegistryTokenServiceOptions.RepositoryRule + { + Pattern = "stella-ops/public/*", + Actions = new [] { "pull" } + } + } + } + } + }; + options.Validate(); + + var issuer = new RegistryTokenIssuer( + Options.Create(options), + new PlanRegistry(options), + new RegistryTokenMetrics(), + TimeProvider.System); + + var principal = new ClaimsPrincipal(new ClaimsIdentity(new[] + { + new Claim("sub", "client-1"), + new Claim("stellaops:plan", "community") + }, "test")); + + var accessRequests = new[] + { + new RegistryAccessRequest("repository", "stella-ops/public/base", new [] { "pull" }) + }; + + var response = issuer.IssueToken(principal, "registry.localhost", accessRequests); + + Assert.NotEmpty(response.Token); + + var handler = new JwtSecurityTokenHandler(); + var jwt = handler.ReadJwtToken(response.Token); + + Assert.Equal("https://registry.localhost/token", jwt.Issuer); + Assert.True(jwt.Payload.TryGetValue("access", out var access)); + Assert.NotNull(access); + } + + private string CreatePemKey() + { + using var rsa = RSA.Create(2048); + var builder = new StringWriter(); + builder.WriteLine("-----BEGIN PRIVATE KEY-----"); + builder.WriteLine(Convert.ToBase64String(rsa.ExportPkcs8PrivateKey(), Base64FormattingOptions.InsertLineBreaks)); + builder.WriteLine("-----END PRIVATE KEY-----"); + + var path = Path.GetTempFileName(); + File.WriteAllText(path, builder.ToString()); + _tempFiles.Add(path); + return path; + } + + public void Dispose() + { + foreach (var file in _tempFiles) + { + try + { + File.Delete(file); + } + catch + { + // ignore + } + } + } +} diff --git a/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/StellaOps.Registry.TokenService.Tests.csproj b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/StellaOps.Registry.TokenService.Tests.csproj new file mode 100644 index 00000000..ba44a784 --- /dev/null +++ b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/StellaOps.Registry.TokenService.Tests.csproj @@ -0,0 +1,29 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <OutputType>Exe</OutputType> + <IsPackable>false</IsPackable> + + + + + + + + + + </PropertyGroup> + + <ItemGroup> + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="../../StellaOps.Registry.TokenService/StellaOps.Registry.TokenService.csproj" /> + </ItemGroup> + +</Project> \ No newline at end of file diff --git a/src/StellaOps.Registry.TokenService.Tests/UnitTest1.cs b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/UnitTest1.cs similarity index 92% rename from src/StellaOps.Registry.TokenService.Tests/UnitTest1.cs rename to src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/UnitTest1.cs index 5760479c..d232dcb6 100644 --- a/src/StellaOps.Registry.TokenService.Tests/UnitTest1.cs +++ b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/UnitTest1.cs @@ -1,10 +1,10 @@ -namespace StellaOps.Registry.TokenService.Tests; - -public class UnitTest1 -{ - [Fact] - public void Test1() - { - - } -} +namespace StellaOps.Registry.TokenService.Tests; + +public class UnitTest1 +{ + [Fact] + public void Test1() + { + + } +} diff --git a/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/xunit.runner.json b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/xunit.runner.json new file mode 100644 index 00000000..249d815c --- /dev/null +++ b/src/Registry/__Tests/StellaOps.Registry.TokenService.Tests/xunit.runner.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/RiskEngine/StellaOps.RiskEngine.sln b/src/RiskEngine/StellaOps.RiskEngine.sln new file mode 100644 index 00000000..88dd530a --- /dev/null +++ b/src/RiskEngine/StellaOps.RiskEngine.sln @@ -0,0 +1,99 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.RiskEngine", "StellaOps.RiskEngine", "{E9DC9A4E-8D1E-15F0-822A-EE12A198E114}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Core", "StellaOps.RiskEngine\StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj", "{36C6C658-51E2-4D1C-9C2D-723A6015B0A4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Infrastructure", "StellaOps.RiskEngine\StellaOps.RiskEngine.Infrastructure\StellaOps.RiskEngine.Infrastructure.csproj", "{BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Tests", "StellaOps.RiskEngine\StellaOps.RiskEngine.Tests\StellaOps.RiskEngine.Tests.csproj", "{015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.WebService", "StellaOps.RiskEngine\StellaOps.RiskEngine.WebService\StellaOps.RiskEngine.WebService.csproj", "{5B3CBF2B-B913-4A40-8D78-12F68A6828EF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Worker", "StellaOps.RiskEngine\StellaOps.RiskEngine.Worker\StellaOps.RiskEngine.Worker.csproj", "{55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Debug|x64.ActiveCfg = Debug|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Debug|x64.Build.0 = Debug|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Debug|x86.ActiveCfg = Debug|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Debug|x86.Build.0 = Debug|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Release|Any CPU.Build.0 = Release|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Release|x64.ActiveCfg = Release|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Release|x64.Build.0 = Release|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Release|x86.ActiveCfg = Release|Any CPU + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4}.Release|x86.Build.0 = Release|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Debug|x64.ActiveCfg = Debug|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Debug|x64.Build.0 = Debug|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Debug|x86.ActiveCfg = Debug|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Debug|x86.Build.0 = Debug|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Release|Any CPU.Build.0 = Release|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Release|x64.ActiveCfg = Release|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Release|x64.Build.0 = Release|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Release|x86.ActiveCfg = Release|Any CPU + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81}.Release|x86.Build.0 = Release|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Debug|x64.ActiveCfg = Debug|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Debug|x64.Build.0 = Debug|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Debug|x86.ActiveCfg = Debug|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Debug|x86.Build.0 = Debug|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Release|Any CPU.Build.0 = Release|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Release|x64.ActiveCfg = Release|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Release|x64.Build.0 = Release|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Release|x86.ActiveCfg = Release|Any CPU + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A}.Release|x86.Build.0 = Release|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Debug|x64.ActiveCfg = Debug|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Debug|x64.Build.0 = Debug|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Debug|x86.ActiveCfg = Debug|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Debug|x86.Build.0 = Debug|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Release|Any CPU.Build.0 = Release|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Release|x64.ActiveCfg = Release|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Release|x64.Build.0 = Release|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Release|x86.ActiveCfg = Release|Any CPU + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF}.Release|x86.Build.0 = Release|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Debug|x64.ActiveCfg = Debug|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Debug|x64.Build.0 = Debug|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Debug|x86.ActiveCfg = Debug|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Debug|x86.Build.0 = Debug|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Release|Any CPU.Build.0 = Release|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Release|x64.ActiveCfg = Release|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Release|x64.Build.0 = Release|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Release|x86.ActiveCfg = Release|Any CPU + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {36C6C658-51E2-4D1C-9C2D-723A6015B0A4} = {E9DC9A4E-8D1E-15F0-822A-EE12A198E114} + {BBFBBF30-8AC3-45A9-8EC7-35A0B3F50F81} = {E9DC9A4E-8D1E-15F0-822A-EE12A198E114} + {015C24AC-3CCD-4FE7-BAD6-4A3728D4978A} = {E9DC9A4E-8D1E-15F0-822A-EE12A198E114} + {5B3CBF2B-B913-4A40-8D78-12F68A6828EF} = {E9DC9A4E-8D1E-15F0-822A-EE12A198E114} + {55F4423C-FDAD-43D4-9A91-EB217EB1BE2B} = {E9DC9A4E-8D1E-15F0-822A-EE12A198E114} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.RiskEngine/AGENTS.md b/src/RiskEngine/StellaOps.RiskEngine/AGENTS.md similarity index 98% rename from src/StellaOps.RiskEngine/AGENTS.md rename to src/RiskEngine/StellaOps.RiskEngine/AGENTS.md index ff92f4be..ff87ddc0 100644 --- a/src/StellaOps.RiskEngine/AGENTS.md +++ b/src/RiskEngine/StellaOps.RiskEngine/AGENTS.md @@ -1,23 +1,23 @@ -# Risk Engine Guild Charter - -## Mission -Design, build, and operate the scoring runtime that computes Risk Scoring Profiles across StellaOps deployments while preserving provenance and explainability. - -## Scope -- Scoring workers, job scheduler, provider registry, caching, and explainability artifacts. -- Integration with Findings Ledger, Conseiller, Excitator, and Policy Engine. -- Performance, determinism, and observability of scoring jobs. -- Air-gapped support through offline factor bundles. - -## Definition of Done -- Scoring jobs execute deterministically with audit trails and explainability payloads. -- Providers registered with TTLs and health checks; missing data surfaced explicitly. -- Benchmarks and SLO dashboards in place with incident response runbooks. - -## Module Layout -- `StellaOps.RiskEngine.Core/` — scoring orchestrators, provider contracts, explainability models. -- `StellaOps.RiskEngine.Infrastructure/` — persistence, caching, provider loading, external data connectors. -- `StellaOps.RiskEngine.WebService/` — APIs for jobs, results, explanations. -- `StellaOps.RiskEngine.Worker/` — execution loops, provider refreshers, scoring pipelines. -- `StellaOps.RiskEngine.Tests/` — unit tests for core/infrastructure services. -- `StellaOps.RiskEngine.sln` — solution unifying module projects. +# Risk Engine Guild Charter + +## Mission +Design, build, and operate the scoring runtime that computes Risk Scoring Profiles across StellaOps deployments while preserving provenance and explainability. + +## Scope +- Scoring workers, job scheduler, provider registry, caching, and explainability artifacts. +- Integration with Findings Ledger, Conseiller, Excitator, and Policy Engine. +- Performance, determinism, and observability of scoring jobs. +- Air-gapped support through offline factor bundles. + +## Definition of Done +- Scoring jobs execute deterministically with audit trails and explainability payloads. +- Providers registered with TTLs and health checks; missing data surfaced explicitly. +- Benchmarks and SLO dashboards in place with incident response runbooks. + +## Module Layout +- `StellaOps.RiskEngine.Core/` — scoring orchestrators, provider contracts, explainability models. +- `StellaOps.RiskEngine.Infrastructure/` — persistence, caching, provider loading, external data connectors. +- `StellaOps.RiskEngine.WebService/` — APIs for jobs, results, explanations. +- `StellaOps.RiskEngine.Worker/` — execution loops, provider refreshers, scoring pipelines. +- `StellaOps.RiskEngine.Tests/` — unit tests for core/infrastructure services. +- `StellaOps.RiskEngine.sln` — solution unifying module projects. diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/Class1.cs b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/Class1.cs similarity index 91% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/Class1.cs rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/Class1.cs index 49a054c7..e92fe0f1 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/Class1.cs +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.RiskEngine.Core; - -public class Class1 -{ - -} +namespace StellaOps.RiskEngine.Core; + +public class Class1 +{ + +} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/StellaOps.RiskEngine.Core.csproj b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/StellaOps.RiskEngine.Core.csproj similarity index 95% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/StellaOps.RiskEngine.Core.csproj rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/StellaOps.RiskEngine.Core.csproj index fe0eef44..e4808f0d 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/StellaOps.RiskEngine.Core.csproj +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Core/StellaOps.RiskEngine.Core.csproj @@ -1,18 +1,18 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/Class1.cs b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/Class1.cs similarity index 92% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/Class1.cs rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/Class1.cs index 8a826da1..591eb10d 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/Class1.cs +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.RiskEngine.Infrastructure; - -public class Class1 -{ - -} +namespace StellaOps.RiskEngine.Infrastructure; + +public class Class1 +{ + +} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/StellaOps.RiskEngine.Infrastructure.csproj b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/StellaOps.RiskEngine.Infrastructure.csproj similarity index 94% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/StellaOps.RiskEngine.Infrastructure.csproj rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/StellaOps.RiskEngine.Infrastructure.csproj index 3a68070b..90fa18ac 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/StellaOps.RiskEngine.Infrastructure.csproj +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Infrastructure/StellaOps.RiskEngine.Infrastructure.csproj @@ -1,28 +1,28 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj"/> - - - </ItemGroup> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj"/> + + + </ItemGroup> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/StellaOps.RiskEngine.Tests.csproj b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/StellaOps.RiskEngine.Tests.csproj similarity index 91% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/StellaOps.RiskEngine.Tests.csproj rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/StellaOps.RiskEngine.Tests.csproj index 3a425b95..e4f98896 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/StellaOps.RiskEngine.Tests.csproj +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/StellaOps.RiskEngine.Tests.csproj @@ -1,135 +1,135 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - - - <PropertyGroup> - - - - - <OutputType>Exe</OutputType> - - - - - <IsPackable>false</IsPackable> - - - - - - - - - - - - - - <TargetFramework>net10.0</TargetFramework> - - - <ImplicitUsings>enable</ImplicitUsings> - - - <Nullable>enable</Nullable> - - - <UseConcelierTestInfra>false</UseConcelierTestInfra> - - - <LangVersion>preview</LangVersion> - - - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - - - </PropertyGroup> - - - - - - <ItemGroup> - - - - - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> - - - - - <PackageReference Include="xunit.v3" Version="3.0.0"/> - - - - - <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Using Include="Xunit"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <ProjectReference Include="..\StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj"/> - - - - - <ProjectReference Include="..\StellaOps.RiskEngine.Infrastructure\StellaOps.RiskEngine.Infrastructure.csproj"/> - - - - - </ItemGroup> - - - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + + + <PropertyGroup> + + + + + <OutputType>Exe</OutputType> + + + + + <IsPackable>false</IsPackable> + + + + + + + + + + + + + + <TargetFramework>net10.0</TargetFramework> + + + <ImplicitUsings>enable</ImplicitUsings> + + + <Nullable>enable</Nullable> + + + <UseConcelierTestInfra>false</UseConcelierTestInfra> + + + <LangVersion>preview</LangVersion> + + + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + + + </PropertyGroup> + + + + + + <ItemGroup> + + + + + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> + + + + + <PackageReference Include="xunit.v3" Version="3.0.0"/> + + + + + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Using Include="Xunit"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <ProjectReference Include="..\StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj"/> + + + + + <ProjectReference Include="..\StellaOps.RiskEngine.Infrastructure\StellaOps.RiskEngine.Infrastructure.csproj"/> + + + + + </ItemGroup> + + + + + +</Project> diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/UnitTest1.cs b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/UnitTest1.cs similarity index 92% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/UnitTest1.cs rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/UnitTest1.cs index f75d6c9b..561bf259 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/UnitTest1.cs +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/UnitTest1.cs @@ -1,10 +1,10 @@ -namespace StellaOps.RiskEngine.Tests; - -public class UnitTest1 -{ - [Fact] - public void Test1() - { - - } -} +namespace StellaOps.RiskEngine.Tests; + +public class UnitTest1 +{ + [Fact] + public void Test1() + { + + } +} diff --git a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/xunit.runner.json b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/xunit.runner.json new file mode 100644 index 00000000..249d815c --- /dev/null +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/xunit.runner.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Program.cs b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Program.cs new file mode 100644 index 00000000..3917ef1b --- /dev/null +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Program.cs @@ -0,0 +1,41 @@ +var builder = WebApplication.CreateBuilder(args); + +// Add services to the container. +// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +// Configure the HTTP request pipeline. +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); + +var summaries = new[] +{ + "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" +}; + +app.MapGet("/weatherforecast", () => +{ + var forecast = Enumerable.Range(1, 5).Select(index => + new WeatherForecast + ( + DateOnly.FromDateTime(DateTime.Now.AddDays(index)), + Random.Shared.Next(-20, 55), + summaries[Random.Shared.Next(summaries.Length)] + )) + .ToArray(); + return forecast; +}) +.WithName("GetWeatherForecast"); + +app.Run(); + +record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) +{ + public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); +} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Properties/launchSettings.json b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Properties/launchSettings.json rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Properties/launchSettings.json index 374038e2..14d06883 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Properties/launchSettings.json +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Properties/launchSettings.json @@ -1,23 +1,23 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "http": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "http://localhost:5115", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - }, - "https": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "https://localhost:7103;http://localhost:5115", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "http://localhost:5115", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "https": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "https://localhost:7103;http://localhost:5115", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj similarity index 94% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj index b870c717..006f4055 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.csproj @@ -1,41 +1,41 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Web"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.RiskEngine.Infrastructure\StellaOps.RiskEngine.Infrastructure.csproj"/> - - - </ItemGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.RiskEngine.Infrastructure\StellaOps.RiskEngine.Infrastructure.csproj"/> + + + </ItemGroup> + + + +</Project> diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.http b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.http similarity index 96% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.http rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.http index 3d7121dc..99fe7efe 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.http +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/StellaOps.RiskEngine.WebService.http @@ -1,6 +1,6 @@ -@StellaOps.RiskEngine.WebService_HostAddress = http://localhost:5115 - -GET {{StellaOps.RiskEngine.WebService_HostAddress}}/weatherforecast/ -Accept: application/json - -### +@StellaOps.RiskEngine.WebService_HostAddress = http://localhost:5115 + +GET {{StellaOps.RiskEngine.WebService_HostAddress}}/weatherforecast/ +Accept: application/json + +### diff --git a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.Development.json b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.Development.json new file mode 100644 index 00000000..ff66ba6b --- /dev/null +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.json b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.json new file mode 100644 index 00000000..4d566948 --- /dev/null +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs similarity index 96% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs index 18b42424..64ad9bc9 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Program.cs @@ -1,7 +1,7 @@ -using StellaOps.RiskEngine.Worker; - -var builder = Host.CreateApplicationBuilder(args); -builder.Services.AddHostedService<Worker>(); - -var host = builder.Build(); -host.Run(); +using StellaOps.RiskEngine.Worker; + +var builder = Host.CreateApplicationBuilder(args); +builder.Services.AddHostedService<Worker>(); + +var host = builder.Build(); +host.Run(); diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Properties/launchSettings.json b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Properties/launchSettings.json similarity index 95% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Properties/launchSettings.json rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Properties/launchSettings.json index c50acadb..6942a306 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Properties/launchSettings.json +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Properties/launchSettings.json @@ -1,12 +1,12 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "StellaOps.RiskEngine.Worker": { - "commandName": "Project", - "dotnetRunMessages": true, - "environmentVariables": { - "DOTNET_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "StellaOps.RiskEngine.Worker": { + "commandName": "Project", + "dotnetRunMessages": true, + "environmentVariables": { + "DOTNET_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj similarity index 95% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj index 667832a0..590d9d2d 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/StellaOps.RiskEngine.Worker.csproj @@ -1,43 +1,43 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Worker"> - - - - <PropertyGroup> - - - <UserSecretsId>dotnet-StellaOps.RiskEngine.Worker-b973483d-c33b-47fb-a20f-e2669c244427</UserSecretsId> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.RiskEngine.Infrastructure\StellaOps.RiskEngine.Infrastructure.csproj"/> - - - </ItemGroup> - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Worker"> + + + + <PropertyGroup> + + + <UserSecretsId>dotnet-StellaOps.RiskEngine.Worker-b973483d-c33b-47fb-a20f-e2669c244427</UserSecretsId> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.RiskEngine.Infrastructure\StellaOps.RiskEngine.Infrastructure.csproj"/> + + + </ItemGroup> + + +</Project> diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Worker.cs b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Worker.cs similarity index 96% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Worker.cs rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Worker.cs index f38be7de..e9018421 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Worker.cs +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/Worker.cs @@ -1,16 +1,16 @@ -namespace StellaOps.RiskEngine.Worker; - -public class Worker(ILogger<Worker> logger) : BackgroundService -{ - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - while (!stoppingToken.IsCancellationRequested) - { - if (logger.IsEnabled(LogLevel.Information)) - { - logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); - } - await Task.Delay(1000, stoppingToken); - } - } -} +namespace StellaOps.RiskEngine.Worker; + +public class Worker(ILogger<Worker> logger) : BackgroundService +{ + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + while (!stoppingToken.IsCancellationRequested) + { + if (logger.IsEnabled(LogLevel.Information)) + { + logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); + } + await Task.Delay(1000, stoppingToken); + } + } +} diff --git a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.Development.json b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.Development.json new file mode 100644 index 00000000..69017646 --- /dev/null +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.json b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.json new file mode 100644 index 00000000..69017646 --- /dev/null +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.sln b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.sln similarity index 98% rename from src/StellaOps.RiskEngine/StellaOps.RiskEngine.sln rename to src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.sln index d0d418d9..f9b55c2b 100644 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.sln +++ b/src/RiskEngine/StellaOps.RiskEngine/StellaOps.RiskEngine.sln @@ -1,90 +1,90 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Core", "StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj", "{C570DE3F-3510-40EA-ADEF-40852E3B29DC}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Infrastructure", "StellaOps.RiskEngine.Infrastructure\StellaOps.RiskEngine.Infrastructure.csproj", "{7686E310-A4CF-40AD-B6D3-F875AC7AF19F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.WebService", "StellaOps.RiskEngine.WebService\StellaOps.RiskEngine.WebService.csproj", "{602F3394-15B6-4349-90CE-8E07F5BE58EB}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Worker", "StellaOps.RiskEngine.Worker\StellaOps.RiskEngine.Worker.csproj", "{B05B5581-B31D-4C49-931C-707A9206E12C}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Tests", "StellaOps.RiskEngine.Tests\StellaOps.RiskEngine.Tests.csproj", "{FE873E24-9A06-414D-BD25-7A7658D11F22}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|x64.ActiveCfg = Debug|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|x64.Build.0 = Debug|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|x86.ActiveCfg = Debug|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|x86.Build.0 = Debug|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|Any CPU.Build.0 = Release|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|x64.ActiveCfg = Release|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|x64.Build.0 = Release|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|x86.ActiveCfg = Release|Any CPU - {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|x86.Build.0 = Release|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|x64.ActiveCfg = Debug|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|x64.Build.0 = Debug|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|x86.ActiveCfg = Debug|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|x86.Build.0 = Debug|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|Any CPU.Build.0 = Release|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|x64.ActiveCfg = Release|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|x64.Build.0 = Release|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|x86.ActiveCfg = Release|Any CPU - {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|x86.Build.0 = Release|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|Any CPU.Build.0 = Debug|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|x64.ActiveCfg = Debug|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|x64.Build.0 = Debug|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|x86.ActiveCfg = Debug|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|x86.Build.0 = Debug|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|Any CPU.ActiveCfg = Release|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|Any CPU.Build.0 = Release|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|x64.ActiveCfg = Release|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|x64.Build.0 = Release|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|x86.ActiveCfg = Release|Any CPU - {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|x86.Build.0 = Release|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|x64.ActiveCfg = Debug|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|x64.Build.0 = Debug|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|x86.ActiveCfg = Debug|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|x86.Build.0 = Debug|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|Any CPU.Build.0 = Release|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|x64.ActiveCfg = Release|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|x64.Build.0 = Release|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|x86.ActiveCfg = Release|Any CPU - {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|x86.Build.0 = Release|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|x64.ActiveCfg = Debug|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|x64.Build.0 = Debug|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|x86.ActiveCfg = Debug|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|x86.Build.0 = Debug|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|Any CPU.Build.0 = Release|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|x64.ActiveCfg = Release|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|x64.Build.0 = Release|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|x86.ActiveCfg = Release|Any CPU - {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Core", "StellaOps.RiskEngine.Core\StellaOps.RiskEngine.Core.csproj", "{C570DE3F-3510-40EA-ADEF-40852E3B29DC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Infrastructure", "StellaOps.RiskEngine.Infrastructure\StellaOps.RiskEngine.Infrastructure.csproj", "{7686E310-A4CF-40AD-B6D3-F875AC7AF19F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.WebService", "StellaOps.RiskEngine.WebService\StellaOps.RiskEngine.WebService.csproj", "{602F3394-15B6-4349-90CE-8E07F5BE58EB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Worker", "StellaOps.RiskEngine.Worker\StellaOps.RiskEngine.Worker.csproj", "{B05B5581-B31D-4C49-931C-707A9206E12C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.RiskEngine.Tests", "StellaOps.RiskEngine.Tests\StellaOps.RiskEngine.Tests.csproj", "{FE873E24-9A06-414D-BD25-7A7658D11F22}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|x64.ActiveCfg = Debug|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|x64.Build.0 = Debug|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|x86.ActiveCfg = Debug|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Debug|x86.Build.0 = Debug|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|Any CPU.Build.0 = Release|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|x64.ActiveCfg = Release|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|x64.Build.0 = Release|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|x86.ActiveCfg = Release|Any CPU + {C570DE3F-3510-40EA-ADEF-40852E3B29DC}.Release|x86.Build.0 = Release|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|x64.ActiveCfg = Debug|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|x64.Build.0 = Debug|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|x86.ActiveCfg = Debug|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Debug|x86.Build.0 = Debug|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|Any CPU.Build.0 = Release|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|x64.ActiveCfg = Release|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|x64.Build.0 = Release|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|x86.ActiveCfg = Release|Any CPU + {7686E310-A4CF-40AD-B6D3-F875AC7AF19F}.Release|x86.Build.0 = Release|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|x64.ActiveCfg = Debug|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|x64.Build.0 = Debug|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|x86.ActiveCfg = Debug|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Debug|x86.Build.0 = Debug|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|Any CPU.Build.0 = Release|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|x64.ActiveCfg = Release|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|x64.Build.0 = Release|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|x86.ActiveCfg = Release|Any CPU + {602F3394-15B6-4349-90CE-8E07F5BE58EB}.Release|x86.Build.0 = Release|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|x64.ActiveCfg = Debug|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|x64.Build.0 = Debug|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|x86.ActiveCfg = Debug|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Debug|x86.Build.0 = Debug|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|Any CPU.Build.0 = Release|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|x64.ActiveCfg = Release|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|x64.Build.0 = Release|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|x86.ActiveCfg = Release|Any CPU + {B05B5581-B31D-4C49-931C-707A9206E12C}.Release|x86.Build.0 = Release|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|x64.ActiveCfg = Debug|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|x64.Build.0 = Debug|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|x86.ActiveCfg = Debug|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Debug|x86.Build.0 = Debug|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|Any CPU.Build.0 = Release|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|x64.ActiveCfg = Release|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|x64.Build.0 = Release|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|x86.ActiveCfg = Release|Any CPU + {FE873E24-9A06-414D-BD25-7A7658D11F22}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.RiskEngine/TASKS.md b/src/RiskEngine/StellaOps.RiskEngine/TASKS.md similarity index 99% rename from src/StellaOps.RiskEngine/TASKS.md rename to src/RiskEngine/StellaOps.RiskEngine/TASKS.md index cfae00cc..146b667b 100644 --- a/src/StellaOps.RiskEngine/TASKS.md +++ b/src/RiskEngine/StellaOps.RiskEngine/TASKS.md @@ -1,32 +1,32 @@ -# Risk Engine Task Board — Epic 18: Risk Scoring Profiles - -## Sprint 66 – Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| RISK-ENGINE-66-001 | TODO | Risk Engine Guild | POLICY-RISK-66-001 | Scaffold scoring service (job queue, worker loop, provider registry) with deterministic execution harness. | Service builds/tests; job queue runs sample job; determinism tests pass. | -| RISK-ENGINE-66-002 | TODO | Risk Engine Guild | RISK-ENGINE-66-001 | Implement default transforms (linear, minmax, logistic, piecewise), clamping, gating, and contribution calculator. | Transform/gating unit tests passing; contribution breakdown matches golden fixtures. | - -## Sprint 67 – Provider Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| RISK-ENGINE-67-001 | TODO | Risk Engine Guild, Concelier Guild | RISK-ENGINE-66-002, CONCELIER-RISK-66-001 | Integrate CVSS and KEV providers pulling data from Conseiller; implement reducers (`max`, `any`, `consensus`). | Providers return sample data; reducer tests pass; provenance recorded. | -| RISK-ENGINE-67-002 | TODO | Risk Engine Guild, Excitator Guild | RISK-ENGINE-66-002, EXCITITOR-RISK-66-001 | Integrate VEX gate provider and ensure gating short-circuits scoring as configured. | VEX gate tests pass; explanation indicates gate decision. | -| RISK-ENGINE-67-003 | TODO | Risk Engine Guild, Policy Engine Guild | RISK-ENGINE-66-002 | Add fix availability, asset criticality, and internet exposure providers with caching + TTL enforcement. | Providers deliver normalized values; cache hit metrics exposed. | - -## Sprint 68 – Ledger & API Wiring -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| RISK-ENGINE-68-001 | TODO | Risk Engine Guild, Findings Ledger Guild | RISK-ENGINE-66-002, LEDGER-RISK-66-001 | Persist scoring results + explanation pointers to Findings Ledger; handle incremental updates via input hash. | Results stored with hash; updates skip unchanged findings; tests cover dedupe. | -| RISK-ENGINE-68-002 | TODO | Risk Engine Guild, API Guild | RISK-ENGINE-68-001, POLICY-RISK-67-002 | Expose APIs (`/risk/jobs`, `/risk/results`, `/risk/results/{id}/explanation`); include pagination, filtering, error codes. | OpenAPI documented; contract tests pass; endpoints gated by scopes. | - -## Sprint 69 – Simulation & Performance -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| RISK-ENGINE-69-001 | TODO | Risk Engine Guild, Policy Studio Guild | RISK-ENGINE-68-002 | Implement simulation mode producing distributions and top movers without mutating ledger. | Simulation API returns metrics; golden tests cover scenarios. | -| RISK-ENGINE-69-002 | TODO | Risk Engine Guild, Observability Guild | RISK-ENGINE-66-001 | Add telemetry (spans, metrics, logs) for provider latency, job throughput, cache hits; define SLO dashboards. | Metrics visible in Grafana; alerts configured for P95 latency + error rate. | - -## Sprint 70 – Air-Gap & Advanced Providers -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| RISK-ENGINE-70-001 | TODO | Risk Engine Guild, Export Guild | RISK-ENGINE-67-003, RISK-BUNDLE-69-001 | Support offline provider bundles with manifest verification and missing-data reporting. | Engine loads bundle data; missing providers logged with `AIRGAP_MISSING_DATA`. | -| RISK-ENGINE-70-002 | TODO | Risk Engine Guild, Observability Guild | RISK-ENGINE-68-002 | Integrate runtime evidence provider and reachability provider outputs with caching + TTL. | Providers return runtime/reachability signals; explanation includes sources; tests pass. | +# Risk Engine Task Board — Epic 18: Risk Scoring Profiles + +## Sprint 66 – Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| RISK-ENGINE-66-001 | TODO | Risk Engine Guild | POLICY-RISK-66-001 | Scaffold scoring service (job queue, worker loop, provider registry) with deterministic execution harness. | Service builds/tests; job queue runs sample job; determinism tests pass. | +| RISK-ENGINE-66-002 | TODO | Risk Engine Guild | RISK-ENGINE-66-001 | Implement default transforms (linear, minmax, logistic, piecewise), clamping, gating, and contribution calculator. | Transform/gating unit tests passing; contribution breakdown matches golden fixtures. | + +## Sprint 67 – Provider Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| RISK-ENGINE-67-001 | TODO | Risk Engine Guild, Concelier Guild | RISK-ENGINE-66-002, CONCELIER-RISK-66-001 | Integrate CVSS and KEV providers pulling data from Conseiller; implement reducers (`max`, `any`, `consensus`). | Providers return sample data; reducer tests pass; provenance recorded. | +| RISK-ENGINE-67-002 | TODO | Risk Engine Guild, Excitator Guild | RISK-ENGINE-66-002, EXCITITOR-RISK-66-001 | Integrate VEX gate provider and ensure gating short-circuits scoring as configured. | VEX gate tests pass; explanation indicates gate decision. | +| RISK-ENGINE-67-003 | TODO | Risk Engine Guild, Policy Engine Guild | RISK-ENGINE-66-002 | Add fix availability, asset criticality, and internet exposure providers with caching + TTL enforcement. | Providers deliver normalized values; cache hit metrics exposed. | + +## Sprint 68 – Ledger & API Wiring +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| RISK-ENGINE-68-001 | TODO | Risk Engine Guild, Findings Ledger Guild | RISK-ENGINE-66-002, LEDGER-RISK-66-001 | Persist scoring results + explanation pointers to Findings Ledger; handle incremental updates via input hash. | Results stored with hash; updates skip unchanged findings; tests cover dedupe. | +| RISK-ENGINE-68-002 | TODO | Risk Engine Guild, API Guild | RISK-ENGINE-68-001, POLICY-RISK-67-002 | Expose APIs (`/risk/jobs`, `/risk/results`, `/risk/results/{id}/explanation`); include pagination, filtering, error codes. | OpenAPI documented; contract tests pass; endpoints gated by scopes. | + +## Sprint 69 – Simulation & Performance +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| RISK-ENGINE-69-001 | TODO | Risk Engine Guild, Policy Studio Guild | RISK-ENGINE-68-002 | Implement simulation mode producing distributions and top movers without mutating ledger. | Simulation API returns metrics; golden tests cover scenarios. | +| RISK-ENGINE-69-002 | TODO | Risk Engine Guild, Observability Guild | RISK-ENGINE-66-001 | Add telemetry (spans, metrics, logs) for provider latency, job throughput, cache hits; define SLO dashboards. | Metrics visible in Grafana; alerts configured for P95 latency + error rate. | + +## Sprint 70 – Air-Gap & Advanced Providers +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| RISK-ENGINE-70-001 | TODO | Risk Engine Guild, Export Guild | RISK-ENGINE-67-003, RISK-BUNDLE-69-001 | Support offline provider bundles with manifest verification and missing-data reporting. | Engine loads bundle data; missing providers logged with `AIRGAP_MISSING_DATA`. | +| RISK-ENGINE-70-002 | TODO | Risk Engine Guild, Observability Guild | RISK-ENGINE-68-002 | Integrate runtime evidence provider and reachability provider outputs with caching + TTL. | Providers return runtime/reachability signals; explanation includes sources; tests pass. | diff --git a/src/SbomService/StellaOps.SbomService.sln b/src/SbomService/StellaOps.SbomService.sln new file mode 100644 index 00000000..8088fc6a --- /dev/null +++ b/src/SbomService/StellaOps.SbomService.sln @@ -0,0 +1,104 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.SbomService", "StellaOps.SbomService\StellaOps.SbomService.csproj", "{0D9049C8-1667-4F98-9295-579AD9F3631C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{AF00CFB3-C548-4272-AE91-21720CCA0F51}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{0D5F8F7D-D66D-4415-956F-F4822AB72D31}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{CF8D1B05-BB50-45B9-B956-56380D5B4616}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{A7F565B4-F79B-471A-BD17-AE6314591345}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Debug|x64.ActiveCfg = Debug|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Debug|x64.Build.0 = Debug|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Debug|x86.ActiveCfg = Debug|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Debug|x86.Build.0 = Debug|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Release|Any CPU.Build.0 = Release|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Release|x64.ActiveCfg = Release|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Release|x64.Build.0 = Release|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Release|x86.ActiveCfg = Release|Any CPU + {0D9049C8-1667-4F98-9295-579AD9F3631C}.Release|x86.Build.0 = Release|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Debug|x64.ActiveCfg = Debug|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Debug|x64.Build.0 = Debug|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Debug|x86.ActiveCfg = Debug|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Debug|x86.Build.0 = Debug|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Release|Any CPU.Build.0 = Release|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Release|x64.ActiveCfg = Release|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Release|x64.Build.0 = Release|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Release|x86.ActiveCfg = Release|Any CPU + {AF00CFB3-C548-4272-AE91-21720CCA0F51}.Release|x86.Build.0 = Release|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Debug|x64.ActiveCfg = Debug|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Debug|x64.Build.0 = Debug|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Debug|x86.ActiveCfg = Debug|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Debug|x86.Build.0 = Debug|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Release|Any CPU.Build.0 = Release|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Release|x64.ActiveCfg = Release|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Release|x64.Build.0 = Release|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Release|x86.ActiveCfg = Release|Any CPU + {1D1D07F0-86EE-45FB-B9FA-6D9F7E49770C}.Release|x86.Build.0 = Release|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Debug|x64.ActiveCfg = Debug|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Debug|x64.Build.0 = Debug|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Debug|x86.ActiveCfg = Debug|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Debug|x86.Build.0 = Debug|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Release|Any CPU.Build.0 = Release|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Release|x64.ActiveCfg = Release|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Release|x64.Build.0 = Release|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Release|x86.ActiveCfg = Release|Any CPU + {0D5F8F7D-D66D-4415-956F-F4822AB72D31}.Release|x86.Build.0 = Release|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Debug|x64.ActiveCfg = Debug|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Debug|x64.Build.0 = Debug|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Debug|x86.ActiveCfg = Debug|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Debug|x86.Build.0 = Debug|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Release|Any CPU.Build.0 = Release|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Release|x64.ActiveCfg = Release|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Release|x64.Build.0 = Release|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Release|x86.ActiveCfg = Release|Any CPU + {CF8D1B05-BB50-45B9-B956-56380D5B4616}.Release|x86.Build.0 = Release|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Debug|x64.ActiveCfg = Debug|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Debug|x64.Build.0 = Debug|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Debug|x86.ActiveCfg = Debug|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Debug|x86.Build.0 = Debug|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Release|Any CPU.Build.0 = Release|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Release|x64.ActiveCfg = Release|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Release|x64.Build.0 = Release|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Release|x86.ActiveCfg = Release|Any CPU + {A7F565B4-F79B-471A-BD17-AE6314591345}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.SbomService/AGENTS.md b/src/SbomService/StellaOps.SbomService/AGENTS.md similarity index 98% rename from src/StellaOps.SbomService/AGENTS.md rename to src/SbomService/StellaOps.SbomService/AGENTS.md index fee2ec58..ca9044fb 100644 --- a/src/StellaOps.SbomService/AGENTS.md +++ b/src/SbomService/StellaOps.SbomService/AGENTS.md @@ -1,15 +1,15 @@ -# StellaOps.SbomService — Agent Charter - -## Mission -Expose normalized SBOM projections (components, relationships, scopes, entrypoints) that downstream systems such as Cartographer, Policy Engine, and Scheduler consume. Maintain deterministic SBOM versioning, change events, and tenant-aware access patterns. - -## Responsibilities -- Normalize ingest from Scanner outputs/CycloneDX/SPDX artifacts into canonical documents. -- Provide APIs for SBOM metadata, projections, entrypoint catalogs, and version history. -- Emit change events when SBOMs are added or updated so Cartographer and overlay workers can react. -- Enforce Authority scopes/tenancy and deliver observability for SBOM projection latency. - -## Expectations -- SBOM documents remain immutable once published; new versions append only. -- Keep projections deterministic and schema-validated; include compliance checklists. -- Update `TASKS.md` whenever status changes and coordinate with Cartographer/Scheduler guilds. +# StellaOps.SbomService — Agent Charter + +## Mission +Expose normalized SBOM projections (components, relationships, scopes, entrypoints) that downstream systems such as Cartographer, Policy Engine, and Scheduler consume. Maintain deterministic SBOM versioning, change events, and tenant-aware access patterns. + +## Responsibilities +- Normalize ingest from Scanner outputs/CycloneDX/SPDX artifacts into canonical documents. +- Provide APIs for SBOM metadata, projections, entrypoint catalogs, and version history. +- Emit change events when SBOMs are added or updated so Cartographer and overlay workers can react. +- Enforce Authority scopes/tenancy and deliver observability for SBOM projection latency. + +## Expectations +- SBOM documents remain immutable once published; new versions append only. +- Keep projections deterministic and schema-validated; include compliance checklists. +- Update `TASKS.md` whenever status changes and coordinate with Cartographer/Scheduler guilds. diff --git a/src/StellaOps.SbomService/Program.cs b/src/SbomService/StellaOps.SbomService/Program.cs similarity index 96% rename from src/StellaOps.SbomService/Program.cs rename to src/SbomService/StellaOps.SbomService/Program.cs index 6bff3380..880afc5b 100644 --- a/src/StellaOps.SbomService/Program.cs +++ b/src/SbomService/StellaOps.SbomService/Program.cs @@ -1,17 +1,17 @@ -var builder = WebApplication.CreateBuilder(args); - -builder.Configuration - .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) - .AddEnvironmentVariables("SBOM_"); - -builder.Services.AddOptions(); -builder.Services.AddLogging(); - -// TODO: register SBOM projection services, repositories, and Authority integration. - -var app = builder.Build(); - -app.MapGet("/healthz", () => Results.Ok(new { status = "ok" })); -app.MapGet("/readyz", () => Results.Ok(new { status = "warming" })); - -app.Run(); +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration + .AddJsonFile("appsettings.json", optional: true, reloadOnChange: true) + .AddEnvironmentVariables("SBOM_"); + +builder.Services.AddOptions(); +builder.Services.AddLogging(); + +// TODO: register SBOM projection services, repositories, and Authority integration. + +var app = builder.Build(); + +app.MapGet("/healthz", () => Results.Ok(new { status = "ok" })); +app.MapGet("/readyz", () => Results.Ok(new { status = "warming" })); + +app.Run(); diff --git a/src/StellaOps.SbomService/StellaOps.SbomService.csproj b/src/SbomService/StellaOps.SbomService/StellaOps.SbomService.csproj similarity index 58% rename from src/StellaOps.SbomService/StellaOps.SbomService.csproj rename to src/SbomService/StellaOps.SbomService/StellaOps.SbomService.csproj index cc5ded2a..c64b68ca 100644 --- a/src/StellaOps.SbomService/StellaOps.SbomService.csproj +++ b/src/SbomService/StellaOps.SbomService/StellaOps.SbomService.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk.Web"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -9,7 +10,7 @@ </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.SbomService/TASKS.md b/src/SbomService/StellaOps.SbomService/TASKS.md similarity index 99% rename from src/StellaOps.SbomService/TASKS.md rename to src/SbomService/StellaOps.SbomService/TASKS.md index bc3d155b..35529d46 100644 --- a/src/StellaOps.SbomService/TASKS.md +++ b/src/SbomService/StellaOps.SbomService/TASKS.md @@ -1,47 +1,47 @@ -# SBOM Service Task Board — Epic 3: Graph Explorer v1 -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SBOM-SERVICE-21-001 | BLOCKED (2025-10-27) | SBOM Service Guild, Cartographer Guild | CONCELIER-GRAPH-21-001 | Publish normalized SBOM projection schema (components, relationships, scopes, entrypoints) and implement read API with pagination + tenant enforcement. | Schema validated with fixtures; API documented; integration tests cover CycloneDX/SPDX inputs. | -> 2025-10-27: Awaiting projection schema from Concelier (`CONCELIER-GRAPH-21-001`) before we can finalize API payloads and fixtures. -| SBOM-SERVICE-21-002 | BLOCKED (2025-10-27) | SBOM Service Guild, Scheduler Guild | SBOM-SERVICE-21-001, SCHED-MODELS-21-001 | Emit change events (`sbom.version.created`) carrying digest/version metadata for Graph Indexer builds; add replay/backfill tooling. | Events published on new SBOMs; consumer harness validated; replay scripts documented. | -> 2025-10-27: Blocked until `SBOM-SERVICE-21-001` defines projection schema and endpoints. -| SBOM-SERVICE-21-003 | BLOCKED (2025-10-27) | SBOM Service Guild | SBOM-SERVICE-21-001 | Provide entrypoint/service node management API (list/update overrides) feeding Cartographer path relevance with deterministic defaults. | Entrypoint API live; overrides persisted; docs updated; tests cover fallback logic. | -> 2025-10-27: Depends on base projection schema (`SBOM-SERVICE-21-001`) which is blocked. -| SBOM-SERVICE-21-004 | BLOCKED (2025-10-27) | SBOM Service Guild, Observability Guild | SBOM-SERVICE-21-001 | Wire observability: metrics (`sbom_projection_seconds`, `sbom_projection_size`), traces, structured logs with tenant info; set alerts for backlog. | Metrics/traces exposed; dashboards updated; alert thresholds defined. | -> 2025-10-27: Projection pipeline not in place yet; will follow once `SBOM-SERVICE-21-001` unblocks. - -## Policy Engine + Editor v1 - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SBOM-SERVICE-23-001 | TODO | SBOM Service Guild, Policy Guild | SBOM-SERVICE-21-001 | Extend projections to include asset metadata (criticality, owner, environment, exposure flags) required by policy rules; update schema docs. | Projection schema updated; fixtures expanded; policy runtime tests consume new fields. | -| SBOM-SERVICE-23-002 | TODO | SBOM Service Guild, Platform Events Guild | SBOM-SERVICE-23-001 | Emit `sbom.asset.updated` events when metadata changes; ensure idempotent payloads and documentation. | Events published with tests; evaluator receives updates; docs updated. | - -## StellaOps Console (Sprint 23) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SBOM-CONSOLE-23-001 | TODO | SBOM Service Guild, Cartographer Guild | SBOM-SERVICE-21-001, SBOM-SERVICE-21-003 | Provide Console-focused SBOM catalog API (`/console/sboms`) with filters (artifact, license, scope, asset tags), pagination cursors, evaluation metadata, and immutable JSON projections for raw view drawer. Document schema + determinism guarantees. | API deployed with contract tests, latency ≤ 200 ms P95 on seeded fixtures, docs updated, integration tests confirm parity with underlying projections. | -| SBOM-CONSOLE-23-002 | TODO | SBOM Service Guild | SBOM-CONSOLE-23-001, SBOM-SERVICE-21-002 | Deliver component lookup endpoints powering global search and Graph overlays (component neighborhoods, license overlays, policy deltas) with caching hints and tenant enforcement. | Endpoints documented, caching headers validated, integration tests cover search use cases, telemetry metrics exported. | - -## Graph & Vuln Explorer v1 - -> 2025-10-26 update — Cartographer service (`CARTO-GRAPH-21-001..009`) now owns graph construction/overlays. SBOM Service continues to expose projections and change events via `SBOM-SERVICE-21-00x`. - -## Vulnerability Explorer (Sprint 29) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SBOM-VULN-29-001 | TODO | SBOM Service Guild | SBOM-SERVICE-21-001 | Emit inventory evidence with `scope`, `runtime_flag`, dependency paths, and nearest safe version hints, streaming change events for resolver jobs. | Evidence payloads extended; change events published with tests; documentation updated. | -| SBOM-VULN-29-002 | TODO | SBOM Service Guild, Findings Ledger Guild | SBOM-VULN-29-001, LEDGER-29-002 | Provide resolver feed (artifact, purl, version, paths) via queue/topic for Vuln Explorer candidate generation; ensure idempotent delivery. | Feed operational with dedupe keys; integration tests confirm candidate generation; metrics added. | - -## Advisory AI (Sprint 31) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SBOM-AIAI-31-001 | TODO | SBOM Service Guild | SBOM-VULN-29-001 | Provide `GET /sbom/paths?purl=...` and version timeline endpoints optimized for Advisory AI (incl. env flags, blast radius metadata). | Endpoints live with caching; perf targets met; tests cover ecosystems. | -| SBOM-AIAI-31-002 | TODO | SBOM Service Guild, Observability Guild | SBOM-AIAI-31-001 | Instrument metrics for path/timeline queries (latency, cache hit rate) and surface dashboards. | Metrics/traces live; dashboards approved. | - -## Orchestrator Dashboard -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SBOM-ORCH-32-001 | TODO | SBOM Service Guild | ORCH-SVC-32-001, WORKER-GO-32-001, WORKER-PY-32-001 | Register SBOM ingest/index sources with orchestrator, embed worker SDK, and emit artifact hashes + job metadata. | SDK integration tested with orchestrator; artifact hashes persisted; metrics include sbom ingest job lifecycle. | -| SBOM-ORCH-33-001 | TODO | SBOM Service Guild | SBOM-ORCH-32-001, ORCH-SVC-33-001, ORCH-SVC-33-002 | Report backpressure metrics, honor orchestrator pause/throttle signals, and classify error outputs for sbom jobs. | Backpressure metrics exported; pause/resume E2E tests pass; error classes mapped to orchestrator codes. | -| SBOM-ORCH-34-001 | TODO | SBOM Service Guild | SBOM-ORCH-33-001, ORCH-SVC-33-003, ORCH-SVC-34-001 | Implement orchestrator backfill + watermark reconciliation for SBOM ingest/index, ensuring idempotent artifact reuse. | Backfill operations verified with no duplicate artifacts; watermark status persisted; coverage metrics published. | +# SBOM Service Task Board — Epic 3: Graph Explorer v1 +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SBOM-SERVICE-21-001 | BLOCKED (2025-10-27) | SBOM Service Guild, Cartographer Guild | CONCELIER-GRAPH-21-001 | Publish normalized SBOM projection schema (components, relationships, scopes, entrypoints) and implement read API with pagination + tenant enforcement. | Schema validated with fixtures; API documented; integration tests cover CycloneDX/SPDX inputs. | +> 2025-10-27: Awaiting projection schema from Concelier (`CONCELIER-GRAPH-21-001`) before we can finalize API payloads and fixtures. +| SBOM-SERVICE-21-002 | BLOCKED (2025-10-27) | SBOM Service Guild, Scheduler Guild | SBOM-SERVICE-21-001, SCHED-MODELS-21-001 | Emit change events (`sbom.version.created`) carrying digest/version metadata for Graph Indexer builds; add replay/backfill tooling. | Events published on new SBOMs; consumer harness validated; replay scripts documented. | +> 2025-10-27: Blocked until `SBOM-SERVICE-21-001` defines projection schema and endpoints. +| SBOM-SERVICE-21-003 | BLOCKED (2025-10-27) | SBOM Service Guild | SBOM-SERVICE-21-001 | Provide entrypoint/service node management API (list/update overrides) feeding Cartographer path relevance with deterministic defaults. | Entrypoint API live; overrides persisted; docs updated; tests cover fallback logic. | +> 2025-10-27: Depends on base projection schema (`SBOM-SERVICE-21-001`) which is blocked. +| SBOM-SERVICE-21-004 | BLOCKED (2025-10-27) | SBOM Service Guild, Observability Guild | SBOM-SERVICE-21-001 | Wire observability: metrics (`sbom_projection_seconds`, `sbom_projection_size`), traces, structured logs with tenant info; set alerts for backlog. | Metrics/traces exposed; dashboards updated; alert thresholds defined. | +> 2025-10-27: Projection pipeline not in place yet; will follow once `SBOM-SERVICE-21-001` unblocks. + +## Policy Engine + Editor v1 + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SBOM-SERVICE-23-001 | TODO | SBOM Service Guild, Policy Guild | SBOM-SERVICE-21-001 | Extend projections to include asset metadata (criticality, owner, environment, exposure flags) required by policy rules; update schema docs. | Projection schema updated; fixtures expanded; policy runtime tests consume new fields. | +| SBOM-SERVICE-23-002 | TODO | SBOM Service Guild, Platform Events Guild | SBOM-SERVICE-23-001 | Emit `sbom.asset.updated` events when metadata changes; ensure idempotent payloads and documentation. | Events published with tests; evaluator receives updates; docs updated. | + +## StellaOps Console (Sprint 23) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SBOM-CONSOLE-23-001 | TODO | SBOM Service Guild, Cartographer Guild | SBOM-SERVICE-21-001, SBOM-SERVICE-21-003 | Provide Console-focused SBOM catalog API (`/console/sboms`) with filters (artifact, license, scope, asset tags), pagination cursors, evaluation metadata, and immutable JSON projections for raw view drawer. Document schema + determinism guarantees. | API deployed with contract tests, latency ≤ 200 ms P95 on seeded fixtures, docs updated, integration tests confirm parity with underlying projections. | +| SBOM-CONSOLE-23-002 | TODO | SBOM Service Guild | SBOM-CONSOLE-23-001, SBOM-SERVICE-21-002 | Deliver component lookup endpoints powering global search and Graph overlays (component neighborhoods, license overlays, policy deltas) with caching hints and tenant enforcement. | Endpoints documented, caching headers validated, integration tests cover search use cases, telemetry metrics exported. | + +## Graph & Vuln Explorer v1 + +> 2025-10-26 update — Cartographer service (`CARTO-GRAPH-21-001..009`) now owns graph construction/overlays. SBOM Service continues to expose projections and change events via `SBOM-SERVICE-21-00x`. + +## Vulnerability Explorer (Sprint 29) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SBOM-VULN-29-001 | TODO | SBOM Service Guild | SBOM-SERVICE-21-001 | Emit inventory evidence with `scope`, `runtime_flag`, dependency paths, and nearest safe version hints, streaming change events for resolver jobs. | Evidence payloads extended; change events published with tests; documentation updated. | +| SBOM-VULN-29-002 | TODO | SBOM Service Guild, Findings Ledger Guild | SBOM-VULN-29-001, LEDGER-29-002 | Provide resolver feed (artifact, purl, version, paths) via queue/topic for Vuln Explorer candidate generation; ensure idempotent delivery. | Feed operational with dedupe keys; integration tests confirm candidate generation; metrics added. | + +## Advisory AI (Sprint 31) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SBOM-AIAI-31-001 | TODO | SBOM Service Guild | SBOM-VULN-29-001 | Provide `GET /sbom/paths?purl=...` and version timeline endpoints optimized for Advisory AI (incl. env flags, blast radius metadata). | Endpoints live with caching; perf targets met; tests cover ecosystems. | +| SBOM-AIAI-31-002 | TODO | SBOM Service Guild, Observability Guild | SBOM-AIAI-31-001 | Instrument metrics for path/timeline queries (latency, cache hit rate) and surface dashboards. | Metrics/traces live; dashboards approved. | + +## Orchestrator Dashboard +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SBOM-ORCH-32-001 | TODO | SBOM Service Guild | ORCH-SVC-32-001, WORKER-GO-32-001, WORKER-PY-32-001 | Register SBOM ingest/index sources with orchestrator, embed worker SDK, and emit artifact hashes + job metadata. | SDK integration tested with orchestrator; artifact hashes persisted; metrics include sbom ingest job lifecycle. | +| SBOM-ORCH-33-001 | TODO | SBOM Service Guild | SBOM-ORCH-32-001, ORCH-SVC-33-001, ORCH-SVC-33-002 | Report backpressure metrics, honor orchestrator pause/throttle signals, and classify error outputs for sbom jobs. | Backpressure metrics exported; pause/resume E2E tests pass; error classes mapped to orchestrator codes. | +| SBOM-ORCH-34-001 | TODO | SBOM Service Guild | SBOM-ORCH-33-001, ORCH-SVC-33-003, ORCH-SVC-34-001 | Implement orchestrator backfill + watermark reconciliation for SBOM ingest/index, ensuring idempotent artifact reuse. | Backfill operations verified with no duplicate artifacts; watermark status persisted; coverage metrics published. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md b/src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md rename to src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md index 448e55d9..aa5f597c 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md +++ b/src/Scanner/StellaOps.Scanner.Analyzers.Lang.Deno/TASKS.md @@ -1,21 +1,21 @@ -# Deno Analyzer Task Board -> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. - -## Deno Entry-Point Analyzer (Sprint 49) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-DENO-26-001 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309N | Build input normalizer & VFS for Deno projects: merge `deno.json(c)`, import maps, lockfiles, vendor dirs, `$DENO_DIR` caches, and container layers. Detect runtime/toolchain hints deterministically. | Normalizer ingests fixtures (source+vendor, cache-only, container) without network; outputs config digest, import map, cache locations, and deterministic module root inventory. | -| SCANNER-ANALYZERS-DENO-26-002 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-001 | Module graph builder: resolve static/dynamic imports using import map, `deno.lock`, vendor/, cache, npm bridge, node: builtins, WASM/JSON assertions. Annotate edges with resolution source and form. | Graph reconstruction succeeds on fixtures (vendor, cache-only, npm, remote). Edges include `form`, `source`, `resolution` (`vendor|cache|fs|declared_only`). Determinism harness passes. | -| SCANNER-ANALYZERS-DENO-26-003 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | NPM/Node compat adapter: map `npm:` specifiers to cached packages or compat `node_modules`, evaluate package `exports`/conditions, record node: builtin usage. | Fixtures with npm bridge resolve to cached/vendor modules; outputs include npm package metadata + node builtin list; unresolved npm deps flagged. | -| SCANNER-ANALYZERS-DENO-26-004 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | Static analyzer for permission/capability signals (FS, net, env, process, crypto, FFI, workers). Detect dynamic-import patterns, literal fetch URLs, tasks vs declared permissions. | Capability records emitted with evidence snippets; dynamic import warnings include pattern info; task vs inferred permission diffs reported. | -| SCANNER-ANALYZERS-DENO-26-005 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | Bundle/binary inspector: parse eszip bundles and `deno compile` executables (embedded eszip + snapshot) to recover module graph, config, embedded resources. | Bundle and compile fixtures yield recovered module lists, digests, and target metadata; compiled exe scanning <600ms; determinism verified. | -| SCANNER-ANALYZERS-DENO-26-006 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | Container adapter: traverse OCI layers for `deno`, caches, vendor directories, compiled binaries; merge module provenance with layer info. | Container fixtures output runtime version, cache roots, vendor mapping, binary metadata with layer provenance; determinism maintained. | - -## Deno Observation & Runtime (Sprint 50) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-DENO-26-007 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | Produce AOC-compliant observations: entrypoints, modules, edges, permissions, workers, warnings, binaries with reason codes and contexts. | Observation JSON for fixtures deterministic; edges include form/source/reason; capabilities and permission drift recorded; passes AOC lint. | -| SCANNER-ANALYZERS-DENO-26-008 | TODO | Deno Analyzer Guild, QA Guild | SCANNER-ANALYZERS-DENO-26-007 | Fixture suite + performance benchmarks (vendor, npm, FFI, workers, dynamic import, bundle/binary, cache-only, container). | Fixture set under `fixtures/lang/deno/ep`; determinism and perf (<1.5s 2k-module graph) CI gates enabled. | -| SCANNER-ANALYZERS-DENO-26-009 | TODO | Deno Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-DENO-26-007 | Optional runtime evidence hooks (loader/require shim) capturing module loads + permissions during harnessed execution with path hashing. | Runtime harness logs module loads for sample app with scrubbed paths; runtime edges merge without altering static precedence; privacy doc updated. | -| SCANNER-ANALYZERS-DENO-26-010 | TODO | Deno Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-DENO-26-007 | Package analyzer plug-in, add CLI (`stella deno inspect|resolve|trace`) commands, update Offline Kit docs, ensure Worker integration. | Plug-in manifest deployed; CLI commands documented/tested; Offline Kit instructions updated; worker restart verified. | -| SCANNER-ANALYZERS-DENO-26-011 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-004 | Policy signal emitter: net/fs/env/ffi/process/crypto capabilities, remote origin list, npm usage, wasm modules, dynamic-import warnings. | Outputs include policy signal section consumed by tests; schema documented; sample policy evaluation validated. | +# Deno Analyzer Task Board +> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. + +## Deno Entry-Point Analyzer (Sprint 49) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-DENO-26-001 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309N | Build input normalizer & VFS for Deno projects: merge `deno.json(c)`, import maps, lockfiles, vendor dirs, `$DENO_DIR` caches, and container layers. Detect runtime/toolchain hints deterministically. | Normalizer ingests fixtures (source+vendor, cache-only, container) without network; outputs config digest, import map, cache locations, and deterministic module root inventory. | +| SCANNER-ANALYZERS-DENO-26-002 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-001 | Module graph builder: resolve static/dynamic imports using import map, `deno.lock`, vendor/, cache, npm bridge, node: builtins, WASM/JSON assertions. Annotate edges with resolution source and form. | Graph reconstruction succeeds on fixtures (vendor, cache-only, npm, remote). Edges include `form`, `source`, `resolution` (`vendor|cache|fs|declared_only`). Determinism harness passes. | +| SCANNER-ANALYZERS-DENO-26-003 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | NPM/Node compat adapter: map `npm:` specifiers to cached packages or compat `node_modules`, evaluate package `exports`/conditions, record node: builtin usage. | Fixtures with npm bridge resolve to cached/vendor modules; outputs include npm package metadata + node builtin list; unresolved npm deps flagged. | +| SCANNER-ANALYZERS-DENO-26-004 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | Static analyzer for permission/capability signals (FS, net, env, process, crypto, FFI, workers). Detect dynamic-import patterns, literal fetch URLs, tasks vs declared permissions. | Capability records emitted with evidence snippets; dynamic import warnings include pattern info; task vs inferred permission diffs reported. | +| SCANNER-ANALYZERS-DENO-26-005 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | Bundle/binary inspector: parse eszip bundles and `deno compile` executables (embedded eszip + snapshot) to recover module graph, config, embedded resources. | Bundle and compile fixtures yield recovered module lists, digests, and target metadata; compiled exe scanning <600ms; determinism verified. | +| SCANNER-ANALYZERS-DENO-26-006 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | Container adapter: traverse OCI layers for `deno`, caches, vendor directories, compiled binaries; merge module provenance with layer info. | Container fixtures output runtime version, cache roots, vendor mapping, binary metadata with layer provenance; determinism maintained. | + +## Deno Observation & Runtime (Sprint 50) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-DENO-26-007 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-002 | Produce AOC-compliant observations: entrypoints, modules, edges, permissions, workers, warnings, binaries with reason codes and contexts. | Observation JSON for fixtures deterministic; edges include form/source/reason; capabilities and permission drift recorded; passes AOC lint. | +| SCANNER-ANALYZERS-DENO-26-008 | TODO | Deno Analyzer Guild, QA Guild | SCANNER-ANALYZERS-DENO-26-007 | Fixture suite + performance benchmarks (vendor, npm, FFI, workers, dynamic import, bundle/binary, cache-only, container). | Fixture set under `fixtures/lang/deno/ep`; determinism and perf (<1.5s 2k-module graph) CI gates enabled. | +| SCANNER-ANALYZERS-DENO-26-009 | TODO | Deno Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-DENO-26-007 | Optional runtime evidence hooks (loader/require shim) capturing module loads + permissions during harnessed execution with path hashing. | Runtime harness logs module loads for sample app with scrubbed paths; runtime edges merge without altering static precedence; privacy doc updated. | +| SCANNER-ANALYZERS-DENO-26-010 | TODO | Deno Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-DENO-26-007 | Package analyzer plug-in, add CLI (`stella deno inspect|resolve|trace`) commands, update Offline Kit docs, ensure Worker integration. | Plug-in manifest deployed; CLI commands documented/tested; Offline Kit instructions updated; worker restart verified. | +| SCANNER-ANALYZERS-DENO-26-011 | TODO | Deno Analyzer Guild | SCANNER-ANALYZERS-DENO-26-004 | Policy signal emitter: net/fs/env/ffi/process/crypto capabilities, remote origin list, npm usage, wasm modules, dynamic-import warnings. | Outputs include policy signal section consumed by tests; schema documented; sample policy evaluation validated. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md b/src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md rename to src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md index 45970ef2..4afb3aec 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md +++ b/src/Scanner/StellaOps.Scanner.Analyzers.Lang.Php/TASKS.md @@ -1,22 +1,22 @@ -# PHP Analyzer Task Board -> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. - -## PHP Entry-Point Analyzer (Sprint 51) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-PHP-27-001 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309P | Build input normalizer & VFS for PHP projects: merge source trees, composer manifests, vendor/, php.ini/conf.d, `.htaccess`, FPM configs, container layers. Detect framework/CMS fingerprints deterministically. | Normalizer ingests fixtures (Laravel, Symfony, WordPress, Drupal, container) without network; outputs config inventory, framework tags, and deterministic module/vendor root list. | -| SCANNER-ANALYZERS-PHP-27-002 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-001 | Composer/Autoload analyzer: parse composer.json/lock/installed.json, generate package nodes, autoload edges (psr-4/0/classmap/files), bin entrypoints, composer plugins. | Composer fixtures produce package list with PURLs, autoload graph, bin scripts, plugin inventory; determinism harness passes. | -| SCANNER-ANALYZERS-PHP-27-003 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-002 | Include/require graph builder: resolve static includes, capture dynamic include patterns, bootstrap chains, merge with autoload edges. | Include graph constructed for fixtures (legacy, WordPress, Laravel); dynamic includes recorded with patterns; deterministic ordering ensured. | -| SCANNER-ANALYZERS-PHP-27-004 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-003 | Runtime capability scanner: detect exec/fs/net/env/serialization/crypto/database usage, stream wrappers, uploads; record evidence snippets. | Capability signals generated for fixtures (exec, curl, unserialize); outputs include file/line/evidence hash; determinism validated. | -| SCANNER-ANALYZERS-PHP-27-005 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-001 | PHAR/Archive inspector: parse phar manifests/stubs, hash files, detect embedded vendor trees and phar:// usage. | PHAR fixtures yield file inventory, signature metadata, autoload edges; deterministic parse under <800 ms. | -| SCANNER-ANALYZERS-PHP-27-006 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-001 | Framework/CMS surface mapper: extract routes, controllers, middleware, CLI/cron entrypoints for Laravel/Symfony/Slim/WordPress/Drupal/Magento. | Framework fixtures produce route/endpoint lists, CLI command inventory, cron hints; tests validate coverage. | -| SCANNER-ANALYZERS-PHP-27-007 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-001 | Container & extension detector: parse php.ini/conf.d, map extensions to .so/.dll, collect web server/FPM settings, upload limits, disable_functions. | Container fixture outputs extension list with file paths, php.ini directives, web server front controller data; determinism maintained. | - -## PHP Observation & Runtime (Sprint 52) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-PHP-27-008 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-002 | Produce AOC-compliant observations: entrypoints, packages, extensions, modules, edges (require/autoload), capabilities, routes, configs. | Observation JSON for fixtures deterministic; edges contain reason/form; capability and route inventories included; passes AOC lint. | -| SCANNER-ANALYZERS-PHP-27-009 | TODO | PHP Analyzer Guild, QA Guild | SCANNER-ANALYZERS-PHP-27-008 | Fixture suite + performance benchmarks (Laravel, Symfony, WordPress, legacy, PHAR, container) with golden outputs. | Fixture set under `fixtures/lang/php/ep`; determinism and perf (<4s 50k files) gates active. | -| SCANNER-ANALYZERS-PHP-27-010 | TODO | PHP Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-PHP-27-008 | Optional runtime evidence hooks (if provided) to ingest audit logs or opcode cache stats with path hashing. | Runtime harness (if supplied) integrates without altering static precedence; hashed paths; documentation updated. | -| SCANNER-ANALYZERS-PHP-27-011 | TODO | PHP Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-PHP-27-008 | Package analyzer plug-in, add CLI (`stella php inspect|resolve`) commands, update Offline Kit docs, ensure Worker integration. | Plug-in manifest deployed; CLI commands documented/tested; Offline Kit instructions updated; worker restart verified. | -| SCANNER-ANALYZERS-PHP-27-012 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-004 | Policy signal emitter: extension requirements/presence, dangerous constructs counters, stream wrapper usage, capability summaries. | Policy signal section emitted and validated against fixtures; schema documented; sample policy evaluation added. | +# PHP Analyzer Task Board +> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. + +## PHP Entry-Point Analyzer (Sprint 51) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-PHP-27-001 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309P | Build input normalizer & VFS for PHP projects: merge source trees, composer manifests, vendor/, php.ini/conf.d, `.htaccess`, FPM configs, container layers. Detect framework/CMS fingerprints deterministically. | Normalizer ingests fixtures (Laravel, Symfony, WordPress, Drupal, container) without network; outputs config inventory, framework tags, and deterministic module/vendor root list. | +| SCANNER-ANALYZERS-PHP-27-002 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-001 | Composer/Autoload analyzer: parse composer.json/lock/installed.json, generate package nodes, autoload edges (psr-4/0/classmap/files), bin entrypoints, composer plugins. | Composer fixtures produce package list with PURLs, autoload graph, bin scripts, plugin inventory; determinism harness passes. | +| SCANNER-ANALYZERS-PHP-27-003 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-002 | Include/require graph builder: resolve static includes, capture dynamic include patterns, bootstrap chains, merge with autoload edges. | Include graph constructed for fixtures (legacy, WordPress, Laravel); dynamic includes recorded with patterns; deterministic ordering ensured. | +| SCANNER-ANALYZERS-PHP-27-004 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-003 | Runtime capability scanner: detect exec/fs/net/env/serialization/crypto/database usage, stream wrappers, uploads; record evidence snippets. | Capability signals generated for fixtures (exec, curl, unserialize); outputs include file/line/evidence hash; determinism validated. | +| SCANNER-ANALYZERS-PHP-27-005 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-001 | PHAR/Archive inspector: parse phar manifests/stubs, hash files, detect embedded vendor trees and phar:// usage. | PHAR fixtures yield file inventory, signature metadata, autoload edges; deterministic parse under <800 ms. | +| SCANNER-ANALYZERS-PHP-27-006 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-001 | Framework/CMS surface mapper: extract routes, controllers, middleware, CLI/cron entrypoints for Laravel/Symfony/Slim/WordPress/Drupal/Magento. | Framework fixtures produce route/endpoint lists, CLI command inventory, cron hints; tests validate coverage. | +| SCANNER-ANALYZERS-PHP-27-007 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-001 | Container & extension detector: parse php.ini/conf.d, map extensions to .so/.dll, collect web server/FPM settings, upload limits, disable_functions. | Container fixture outputs extension list with file paths, php.ini directives, web server front controller data; determinism maintained. | + +## PHP Observation & Runtime (Sprint 52) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-PHP-27-008 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-002 | Produce AOC-compliant observations: entrypoints, packages, extensions, modules, edges (require/autoload), capabilities, routes, configs. | Observation JSON for fixtures deterministic; edges contain reason/form; capability and route inventories included; passes AOC lint. | +| SCANNER-ANALYZERS-PHP-27-009 | TODO | PHP Analyzer Guild, QA Guild | SCANNER-ANALYZERS-PHP-27-008 | Fixture suite + performance benchmarks (Laravel, Symfony, WordPress, legacy, PHAR, container) with golden outputs. | Fixture set under `fixtures/lang/php/ep`; determinism and perf (<4s 50k files) gates active. | +| SCANNER-ANALYZERS-PHP-27-010 | TODO | PHP Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-PHP-27-008 | Optional runtime evidence hooks (if provided) to ingest audit logs or opcode cache stats with path hashing. | Runtime harness (if supplied) integrates without altering static precedence; hashed paths; documentation updated. | +| SCANNER-ANALYZERS-PHP-27-011 | TODO | PHP Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-PHP-27-008 | Package analyzer plug-in, add CLI (`stella php inspect|resolve`) commands, update Offline Kit docs, ensure Worker integration. | Plug-in manifest deployed; CLI commands documented/tested; Offline Kit instructions updated; worker restart verified. | +| SCANNER-ANALYZERS-PHP-27-012 | TODO | PHP Analyzer Guild | SCANNER-ANALYZERS-PHP-27-004 | Policy signal emitter: extension requirements/presence, dangerous constructs counters, stream wrapper usage, capability summaries. | Policy signal section emitted and validated against fixtures; schema documented; sample policy evaluation added. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md b/src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md rename to src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md index f2574e13..b511f496 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md +++ b/src/Scanner/StellaOps.Scanner.Analyzers.Lang.Ruby/TASKS.md @@ -1,22 +1,22 @@ -# Ruby Analyzer Task Board -> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. - -## Ruby Entry-Point Analyzer (Sprint 53) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-RUBY-28-001 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309R | Build input normalizer & VFS for Ruby projects: merge source trees, Gemfile/Gemfile.lock, vendor/bundle, .gem archives, `.bundle/config`, Rack configs, containers. Detect framework/job fingerprints deterministically. | Normalizer ingests fixtures (Rails, Rack, Sinatra, Sidekiq, container) without network; outputs config inventory, framework tags, ruby version hints, deterministic gem/vendor root list. | -| SCANNER-ANALYZERS-RUBY-28-002 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001 | Gem & Bundler analyzer: parse Gemfile/Gemfile.lock, vendor specs, .gem archives, produce package nodes (PURLs), dependency edges, bin scripts, Bundler group metadata. | Fixtures produce package list with version, groups, path/git sources; .gem archives decoded safely; determinism harness passes. | -| SCANNER-ANALYZERS-RUBY-28-003 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-002 | Require/autoload graph builder: resolve static/dynamic require, require_relative, load; infer Zeitwerk autoload paths and Rack boot chain. | Require graph built for fixtures (Rails, Rack, legacy); dynamic require warnings recorded; zeitwerk edges generated; deterministic ordering ensured. | -| SCANNER-ANALYZERS-RUBY-28-004 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001 | Framework surface mapper: extract routes/controllers/middleware for Rails/Rack/Sinatra/Grape/Hanami; inventory jobs/schedulers (Sidekiq, Resque, ActiveJob, whenever, clockwork). | Framework fixtures emit route, controller, middleware, job, scheduler entries with provenance; tests validate coverage. | -| SCANNER-ANALYZERS-RUBY-28-005 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-003 | Capability analyzer: detect os-exec, filesystem, network, serialization, crypto, DB usage, TLS posture, dynamic eval; record evidence snippets with file/line. | Capability signals generated for fixtures (system, Net::HTTP, YAML.load, exec); outputs deterministic with hashed snippets. | -| SCANNER-ANALYZERS-RUBY-28-006 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001 | Rake task & scheduler analyzer: parse Rakefiles/lib/tasks, capture task names/prereqs/shell commands; parse Sidekiq/whenever/clockwork configs into schedules. | Task/scheduler inventory produced for fixtures; includes cron specs, shell commands; determinism confirmed. | -| SCANNER-ANALYZERS-RUBY-28-007 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001 | Container/runtime scanner: detect Ruby version, installed gems, native extensions, web server configs in OCI layers. | Container fixtures output ruby version, gem list, native extension paths, server configs; determinism maintained. | - -## Ruby Observation & Runtime (Sprint 54) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-RUBY-28-008 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-002 | Produce AOC-compliant observations: entrypoints, packages, modules, edges (require/autoload), routes, jobs, tasks, capabilities, configs, warnings. | Observation JSON for fixtures deterministic; edges include reason/form; capability/route/task inventories present; passes AOC lint. | -| SCANNER-ANALYZERS-RUBY-28-009 | TODO | Ruby Analyzer Guild, QA Guild | SCANNER-ANALYZERS-RUBY-28-008 | Fixture suite + performance benchmarks (Rails, Rack, Sinatra, Sidekiq, legacy, .gem, container) with golden outputs. | Fixture set under `fixtures/lang/ruby/ep`; determinism & perf (<4.5s 40k files) CI guard active. | -| SCANNER-ANALYZERS-RUBY-28-010 | TODO | Ruby Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-RUBY-28-008 | Optional runtime evidence integration (if provided logs/metrics) with path hashing, without altering static precedence. | Runtime harness logs merge cleanly with static graph; hashed paths ensure privacy; documentation updated. | -| SCANNER-ANALYZERS-RUBY-28-011 | TODO | Ruby Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-RUBY-28-008 | Package analyzer plug-in, add CLI (`stella ruby inspect|resolve`) commands, update Offline Kit docs, ensure Worker integration. | Plugin manifest deployed; CLI commands documented/tested; Offline Kit instructions updated; worker restart verified. | -| SCANNER-ANALYZERS-RUBY-28-012 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-005 | Policy signal emitter: rubygems drift, native extension flags, dangerous constructs counts, TLS verify posture, dynamic require eval warnings. | Policy signal section emitted and validated against fixtures; schema documented; sample policy evaluation added. | +# Ruby Analyzer Task Board +> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. + +## Ruby Entry-Point Analyzer (Sprint 53) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-RUBY-28-001 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309R | Build input normalizer & VFS for Ruby projects: merge source trees, Gemfile/Gemfile.lock, vendor/bundle, .gem archives, `.bundle/config`, Rack configs, containers. Detect framework/job fingerprints deterministically. | Normalizer ingests fixtures (Rails, Rack, Sinatra, Sidekiq, container) without network; outputs config inventory, framework tags, ruby version hints, deterministic gem/vendor root list. | +| SCANNER-ANALYZERS-RUBY-28-002 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001 | Gem & Bundler analyzer: parse Gemfile/Gemfile.lock, vendor specs, .gem archives, produce package nodes (PURLs), dependency edges, bin scripts, Bundler group metadata. | Fixtures produce package list with version, groups, path/git sources; .gem archives decoded safely; determinism harness passes. | +| SCANNER-ANALYZERS-RUBY-28-003 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-002 | Require/autoload graph builder: resolve static/dynamic require, require_relative, load; infer Zeitwerk autoload paths and Rack boot chain. | Require graph built for fixtures (Rails, Rack, legacy); dynamic require warnings recorded; zeitwerk edges generated; deterministic ordering ensured. | +| SCANNER-ANALYZERS-RUBY-28-004 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001 | Framework surface mapper: extract routes/controllers/middleware for Rails/Rack/Sinatra/Grape/Hanami; inventory jobs/schedulers (Sidekiq, Resque, ActiveJob, whenever, clockwork). | Framework fixtures emit route, controller, middleware, job, scheduler entries with provenance; tests validate coverage. | +| SCANNER-ANALYZERS-RUBY-28-005 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-003 | Capability analyzer: detect os-exec, filesystem, network, serialization, crypto, DB usage, TLS posture, dynamic eval; record evidence snippets with file/line. | Capability signals generated for fixtures (system, Net::HTTP, YAML.load, exec); outputs deterministic with hashed snippets. | +| SCANNER-ANALYZERS-RUBY-28-006 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001 | Rake task & scheduler analyzer: parse Rakefiles/lib/tasks, capture task names/prereqs/shell commands; parse Sidekiq/whenever/clockwork configs into schedules. | Task/scheduler inventory produced for fixtures; includes cron specs, shell commands; determinism confirmed. | +| SCANNER-ANALYZERS-RUBY-28-007 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-001 | Container/runtime scanner: detect Ruby version, installed gems, native extensions, web server configs in OCI layers. | Container fixtures output ruby version, gem list, native extension paths, server configs; determinism maintained. | + +## Ruby Observation & Runtime (Sprint 54) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-RUBY-28-008 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-002 | Produce AOC-compliant observations: entrypoints, packages, modules, edges (require/autoload), routes, jobs, tasks, capabilities, configs, warnings. | Observation JSON for fixtures deterministic; edges include reason/form; capability/route/task inventories present; passes AOC lint. | +| SCANNER-ANALYZERS-RUBY-28-009 | TODO | Ruby Analyzer Guild, QA Guild | SCANNER-ANALYZERS-RUBY-28-008 | Fixture suite + performance benchmarks (Rails, Rack, Sinatra, Sidekiq, legacy, .gem, container) with golden outputs. | Fixture set under `fixtures/lang/ruby/ep`; determinism & perf (<4.5s 40k files) CI guard active. | +| SCANNER-ANALYZERS-RUBY-28-010 | TODO | Ruby Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-RUBY-28-008 | Optional runtime evidence integration (if provided logs/metrics) with path hashing, without altering static precedence. | Runtime harness logs merge cleanly with static graph; hashed paths ensure privacy; documentation updated. | +| SCANNER-ANALYZERS-RUBY-28-011 | TODO | Ruby Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-RUBY-28-008 | Package analyzer plug-in, add CLI (`stella ruby inspect|resolve`) commands, update Offline Kit docs, ensure Worker integration. | Plugin manifest deployed; CLI commands documented/tested; Offline Kit instructions updated; worker restart verified. | +| SCANNER-ANALYZERS-RUBY-28-012 | TODO | Ruby Analyzer Guild | SCANNER-ANALYZERS-RUBY-28-005 | Policy signal emitter: rubygems drift, native extension flags, dangerous constructs counts, TLS verify posture, dynamic require eval warnings. | Policy signal section emitted and validated against fixtures; schema documented; sample policy evaluation added. | diff --git a/src/StellaOps.Scanner.Analyzers.Native/TASKS.md b/src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Native/TASKS.md rename to src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md index 71594882..e796c253 100644 --- a/src/StellaOps.Scanner.Analyzers.Native/TASKS.md +++ b/src/Scanner/StellaOps.Scanner.Analyzers.Native/TASKS.md @@ -1,20 +1,20 @@ -# Native Analyzer Task Board -> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. - -## Native Static Analyzer (Sprint 37) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-NATIVE-20-001 | TODO | Native Analyzer Guild | SCANNER-CORE-09-501 | Implement format detector and binary identity model supporting ELF, PE/COFF, and Mach-O (including fat slices). Capture arch, OS, build-id/UUID, interpreter metadata. | Detector recognises sample binaries across linux/windows/macos; entrypoint identity includes arch+os slice and stable hash; fixtures stored under `fixtures/native/format-detector`. | -| SCANNER-ANALYZERS-NATIVE-20-002 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-001 | Parse ELF dynamic sections: `DT_NEEDED`, `DT_RPATH`, `DT_RUNPATH`, symbol versions, interpreter, and note build-id. Emit declared dependency records with reason `elf-dtneeded` and attach version needs. | ELF fixtures (glibc, musl, Go static) produce deterministic dependency records with runpath/rpath metadata and symbol version needs. | -| SCANNER-ANALYZERS-NATIVE-20-003 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-001 | Parse PE imports, delay-load tables, manifests/SxS metadata, and subsystem flags. Emit edges with reasons `pe-import` and `pe-delayimport`, plus SxS policy metadata. | Windows fixtures (standard, delay-load, SxS) generate dependency edges with policy hashes and delay-load markers; unit tests validate manifest parsing. | -| SCANNER-ANALYZERS-NATIVE-20-004 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-001 | Parse Mach-O load commands (`LC_LOAD_DYLIB`, `LC_REEXPORT_DYLIB`, `LC_RPATH`, `LC_UUID`, fat headers). Handle `@rpath/@loader_path` placeholders and slice separation. | Mach-O fixtures (single + universal) emit dependency edges per slice with expanded paths and UUID metadata; tests confirm `@rpath` expansion order. | -| SCANNER-ANALYZERS-NATIVE-20-005 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-002, SCANNER-ANALYZERS-NATIVE-20-003, SCANNER-ANALYZERS-NATIVE-20-004 | Implement resolver engine modeling loader search order for ELF (rpath/runpath/cache/default), PE (SafeDll search + SxS), and Mach-O (`@rpath` expansion). Works against virtual image roots, producing explain traces. | Resolver passes golden tests across linux/windows/macos fixtures; resolution trace records attempted paths; no host filesystem access in tests. | -| SCANNER-ANALYZERS-NATIVE-20-006 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-005 | Build heuristic scanner for `dlopen`/`LoadLibrary` strings, plugin ecosystem configs, and Go/Rust static hints. Emit edges with `reason_code` (`string-dlopen`, `config-plugin`, `ecosystem-heuristic`) and confidence levels. | Heuristic edges appear in fixtures (nginx modules, dlopen string literals); confidence flags applied; explain metadata references source string/config path. | - -## Native Observation Pipeline (Sprint 38) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-NATIVE-20-007 | TODO | Native Analyzer Guild, SBOM Service Guild | SCANNER-ANALYZERS-NATIVE-20-005 | Serialize AOC-compliant observations: entrypoints + dependency edges + environment profiles (search paths, interpreter, loader metadata). Integrate with Scanner writer API. | Analyzer emits normalized `entrypoints[]`/`edges[]` JSON for fixtures; SBOM tests consume output; determinism harness updated. | -| SCANNER-ANALYZERS-NATIVE-20-008 | TODO | Native Analyzer Guild, QA Guild | SCANNER-ANALYZERS-NATIVE-20-007 | Author cross-platform fixtures (ELF dynamic/static, PE delay-load/SxS, Mach-O @rpath, plugin configs) and determinism benchmarks (<25 ms / binary, <250 MB). | Fixture suite committed; determinism CI passes; benchmark report documents perf budgets and regression guard rails. | -| SCANNER-ANALYZERS-NATIVE-20-009 | TODO | Native Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-NATIVE-20-007 | Provide optional runtime capture adapters (Linux eBPF `dlopen`, Windows ETW ImageLoad, macOS dyld interpose) writing append-only runtime evidence. Include redaction/sandbox guidance. | Runtime harness emits `runtime-load` edges for sample binaries; data scrubbed to image-relative paths; docs outline sandboxing and privacy. | -| SCANNER-ANALYZERS-NATIVE-20-010 | TODO | Native Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-NATIVE-20-007 | Package native analyzer as restart-time plug-in with manifest/DI registration; update Offline Kit bundle + documentation. | Plugin manifest copied to `plugins/scanner/analyzers/native/`; Worker loads analyzer on restart; Offline Kit instructions updated; smoke test verifies packaging. | +# Native Analyzer Task Board +> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. + +## Native Static Analyzer (Sprint 37) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-NATIVE-20-001 | TODO | Native Analyzer Guild | SCANNER-CORE-09-501 | Implement format detector and binary identity model supporting ELF, PE/COFF, and Mach-O (including fat slices). Capture arch, OS, build-id/UUID, interpreter metadata. | Detector recognises sample binaries across linux/windows/macos; entrypoint identity includes arch+os slice and stable hash; fixtures stored under `fixtures/native/format-detector`. | +| SCANNER-ANALYZERS-NATIVE-20-002 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-001 | Parse ELF dynamic sections: `DT_NEEDED`, `DT_RPATH`, `DT_RUNPATH`, symbol versions, interpreter, and note build-id. Emit declared dependency records with reason `elf-dtneeded` and attach version needs. | ELF fixtures (glibc, musl, Go static) produce deterministic dependency records with runpath/rpath metadata and symbol version needs. | +| SCANNER-ANALYZERS-NATIVE-20-003 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-001 | Parse PE imports, delay-load tables, manifests/SxS metadata, and subsystem flags. Emit edges with reasons `pe-import` and `pe-delayimport`, plus SxS policy metadata. | Windows fixtures (standard, delay-load, SxS) generate dependency edges with policy hashes and delay-load markers; unit tests validate manifest parsing. | +| SCANNER-ANALYZERS-NATIVE-20-004 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-001 | Parse Mach-O load commands (`LC_LOAD_DYLIB`, `LC_REEXPORT_DYLIB`, `LC_RPATH`, `LC_UUID`, fat headers). Handle `@rpath/@loader_path` placeholders and slice separation. | Mach-O fixtures (single + universal) emit dependency edges per slice with expanded paths and UUID metadata; tests confirm `@rpath` expansion order. | +| SCANNER-ANALYZERS-NATIVE-20-005 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-002, SCANNER-ANALYZERS-NATIVE-20-003, SCANNER-ANALYZERS-NATIVE-20-004 | Implement resolver engine modeling loader search order for ELF (rpath/runpath/cache/default), PE (SafeDll search + SxS), and Mach-O (`@rpath` expansion). Works against virtual image roots, producing explain traces. | Resolver passes golden tests across linux/windows/macos fixtures; resolution trace records attempted paths; no host filesystem access in tests. | +| SCANNER-ANALYZERS-NATIVE-20-006 | TODO | Native Analyzer Guild | SCANNER-ANALYZERS-NATIVE-20-005 | Build heuristic scanner for `dlopen`/`LoadLibrary` strings, plugin ecosystem configs, and Go/Rust static hints. Emit edges with `reason_code` (`string-dlopen`, `config-plugin`, `ecosystem-heuristic`) and confidence levels. | Heuristic edges appear in fixtures (nginx modules, dlopen string literals); confidence flags applied; explain metadata references source string/config path. | + +## Native Observation Pipeline (Sprint 38) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-NATIVE-20-007 | TODO | Native Analyzer Guild, SBOM Service Guild | SCANNER-ANALYZERS-NATIVE-20-005 | Serialize AOC-compliant observations: entrypoints + dependency edges + environment profiles (search paths, interpreter, loader metadata). Integrate with Scanner writer API. | Analyzer emits normalized `entrypoints[]`/`edges[]` JSON for fixtures; SBOM tests consume output; determinism harness updated. | +| SCANNER-ANALYZERS-NATIVE-20-008 | TODO | Native Analyzer Guild, QA Guild | SCANNER-ANALYZERS-NATIVE-20-007 | Author cross-platform fixtures (ELF dynamic/static, PE delay-load/SxS, Mach-O @rpath, plugin configs) and determinism benchmarks (<25 ms / binary, <250 MB). | Fixture suite committed; determinism CI passes; benchmark report documents perf budgets and regression guard rails. | +| SCANNER-ANALYZERS-NATIVE-20-009 | TODO | Native Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-NATIVE-20-007 | Provide optional runtime capture adapters (Linux eBPF `dlopen`, Windows ETW ImageLoad, macOS dyld interpose) writing append-only runtime evidence. Include redaction/sandbox guidance. | Runtime harness emits `runtime-load` edges for sample binaries; data scrubbed to image-relative paths; docs outline sandboxing and privacy. | +| SCANNER-ANALYZERS-NATIVE-20-010 | TODO | Native Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-NATIVE-20-007 | Package native analyzer as restart-time plug-in with manifest/DI registration; update Offline Kit bundle + documentation. | Plugin manifest copied to `plugins/scanner/analyzers/native/`; Worker loads analyzer on restart; Offline Kit instructions updated; smoke test verifies packaging. | diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/AGENTS.md b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/AGENTS.md rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/AGENTS.md diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorClient.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorClient.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorClient.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorClient.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorProvenanceRequest.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorProvenanceRequest.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorProvenanceRequest.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Attestation/AttestorProvenanceRequest.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/BuildxPluginException.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/BuildxPluginException.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/BuildxPluginException.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/BuildxPluginException.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/CasWriteResult.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/CasWriteResult.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/CasWriteResult.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/CasWriteResult.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasClient.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasClient.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasClient.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasClient.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasOptions.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasOptions.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasOptions.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Cas/LocalCasOptions.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorArtifact.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorArtifact.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorArtifact.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorArtifact.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorDocument.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorDocument.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorDocument.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorDocument.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGenerator.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGenerator.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGenerator.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGenerator.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGeneratorMetadata.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGeneratorMetadata.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGeneratorMetadata.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorGeneratorMetadata.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorProvenance.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorProvenance.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorProvenance.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorProvenance.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorRequest.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorRequest.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorRequest.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorRequest.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorSubject.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorSubject.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorSubject.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Descriptor/DescriptorSubject.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginCas.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginCas.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginCas.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginCas.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginEntryPoint.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginEntryPoint.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginEntryPoint.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginEntryPoint.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginImage.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginImage.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginImage.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginImage.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifest.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifest.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifest.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifest.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifestLoader.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifestLoader.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifestLoader.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Manifest/BuildxPluginManifestLoader.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/Program.cs b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Program.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/Program.cs rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/Program.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/TASKS.md diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin/stellaops.sbom-indexer.manifest.json b/src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/stellaops.sbom-indexer.manifest.json similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin/stellaops.sbom-indexer.manifest.json rename to src/Scanner/StellaOps.Scanner.Sbomer.BuildXPlugin/stellaops.sbom-indexer.manifest.json diff --git a/src/StellaOps.Scanner.WebService/AssemblyInfo.cs b/src/Scanner/StellaOps.Scanner.WebService/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/AssemblyInfo.cs rename to src/Scanner/StellaOps.Scanner.WebService/AssemblyInfo.cs diff --git a/src/StellaOps.Scanner.WebService/Constants/ProblemTypes.cs b/src/Scanner/StellaOps.Scanner.WebService/Constants/ProblemTypes.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Constants/ProblemTypes.cs rename to src/Scanner/StellaOps.Scanner.WebService/Constants/ProblemTypes.cs diff --git a/src/StellaOps.Scanner.WebService/Contracts/OrchestratorEventContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/OrchestratorEventContracts.cs similarity index 96% rename from src/StellaOps.Scanner.WebService/Contracts/OrchestratorEventContracts.cs rename to src/Scanner/StellaOps.Scanner.WebService/Contracts/OrchestratorEventContracts.cs index 2bbb500d..f61bcd77 100644 --- a/src/StellaOps.Scanner.WebService/Contracts/OrchestratorEventContracts.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Contracts/OrchestratorEventContracts.cs @@ -1,277 +1,277 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Text.Json.Serialization; - -namespace StellaOps.Scanner.WebService.Contracts; - -internal static class OrchestratorEventKinds -{ - public const string ScannerReportReady = "scanner.event.report.ready"; - public const string ScannerScanCompleted = "scanner.event.scan.completed"; -} - -internal sealed record OrchestratorEvent -{ - [JsonPropertyName("eventId")] - [JsonPropertyOrder(0)] - public Guid EventId { get; init; } - - [JsonPropertyName("kind")] - [JsonPropertyOrder(1)] - public string Kind { get; init; } = string.Empty; - - [JsonPropertyName("version")] - [JsonPropertyOrder(2)] - public int Version { get; init; } = 1; - - [JsonPropertyName("tenant")] - [JsonPropertyOrder(3)] - public string Tenant { get; init; } = string.Empty; - - [JsonPropertyName("occurredAt")] - [JsonPropertyOrder(4)] - public DateTimeOffset OccurredAt { get; init; } - - [JsonPropertyName("recordedAt")] - [JsonPropertyOrder(5)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? RecordedAt { get; init; } - - [JsonPropertyName("source")] - [JsonPropertyOrder(6)] - public string Source { get; init; } = string.Empty; - - [JsonPropertyName("idempotencyKey")] - [JsonPropertyOrder(7)] - public string IdempotencyKey { get; init; } = string.Empty; - - [JsonPropertyName("correlationId")] - [JsonPropertyOrder(8)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? CorrelationId { get; init; } - - [JsonPropertyName("traceId")] - [JsonPropertyOrder(9)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? TraceId { get; init; } - - [JsonPropertyName("spanId")] - [JsonPropertyOrder(10)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? SpanId { get; init; } - - [JsonPropertyName("scope")] - [JsonPropertyOrder(11)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public OrchestratorEventScope? Scope { get; init; } - - [JsonPropertyName("payload")] - [JsonPropertyOrder(12)] - public OrchestratorEventPayload Payload { get; init; } = default!; - - [JsonPropertyName("attributes")] - [JsonPropertyOrder(13)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public ImmutableSortedDictionary<string, string>? Attributes { get; init; } -} - -internal sealed record OrchestratorEventScope -{ - [JsonPropertyName("namespace")] - [JsonPropertyOrder(0)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Namespace { get; init; } - - [JsonPropertyName("repo")] - [JsonPropertyOrder(1)] - public string Repo { get; init; } = string.Empty; - - [JsonPropertyName("digest")] - [JsonPropertyOrder(2)] - public string Digest { get; init; } = string.Empty; - - [JsonPropertyName("component")] - [JsonPropertyOrder(3)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Component { get; init; } - - [JsonPropertyName("image")] - [JsonPropertyOrder(4)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Image { get; init; } -} - -internal abstract record OrchestratorEventPayload; - -internal sealed record ReportReadyEventPayload : OrchestratorEventPayload -{ - [JsonPropertyName("reportId")] - [JsonPropertyOrder(0)] - public string ReportId { get; init; } = string.Empty; - - [JsonPropertyName("scanId")] - [JsonPropertyOrder(1)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? ScanId { get; init; } - - [JsonPropertyName("imageDigest")] - [JsonPropertyOrder(2)] - public string ImageDigest { get; init; } = string.Empty; - - [JsonPropertyName("generatedAt")] - [JsonPropertyOrder(3)] - public DateTimeOffset GeneratedAt { get; init; } - - [JsonPropertyName("verdict")] - [JsonPropertyOrder(4)] - public string Verdict { get; init; } = string.Empty; - - [JsonPropertyName("summary")] - [JsonPropertyOrder(5)] - public ReportSummaryDto Summary { get; init; } = new(); - - [JsonPropertyName("delta")] - [JsonPropertyOrder(6)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public ReportDeltaPayload? Delta { get; init; } - - [JsonPropertyName("quietedFindingCount")] - [JsonPropertyOrder(7)] - public int QuietedFindingCount { get; init; } - - [JsonPropertyName("policy")] - [JsonPropertyOrder(8)] - public ReportPolicyDto Policy { get; init; } = new(); - - [JsonPropertyName("links")] - [JsonPropertyOrder(9)] - public ReportLinksPayload Links { get; init; } = new(); - - [JsonPropertyName("dsse")] - [JsonPropertyOrder(10)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DsseEnvelopeDto? Dsse { get; init; } - - [JsonPropertyName("report")] - [JsonPropertyOrder(11)] - public ReportDocumentDto Report { get; init; } = new(); -} - -internal sealed record ScanCompletedEventPayload : OrchestratorEventPayload -{ - [JsonPropertyName("reportId")] - [JsonPropertyOrder(0)] - public string ReportId { get; init; } = string.Empty; - - [JsonPropertyName("scanId")] - [JsonPropertyOrder(1)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? ScanId { get; init; } - - [JsonPropertyName("imageDigest")] - [JsonPropertyOrder(2)] - public string ImageDigest { get; init; } = string.Empty; - - [JsonPropertyName("verdict")] - [JsonPropertyOrder(3)] - public string Verdict { get; init; } = string.Empty; - - [JsonPropertyName("summary")] - [JsonPropertyOrder(4)] - public ReportSummaryDto Summary { get; init; } = new(); - - [JsonPropertyName("delta")] - [JsonPropertyOrder(5)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public ReportDeltaPayload? Delta { get; init; } - - [JsonPropertyName("policy")] - [JsonPropertyOrder(6)] - public ReportPolicyDto Policy { get; init; } = new(); - - [JsonPropertyName("findings")] - [JsonPropertyOrder(7)] - public IReadOnlyList<FindingSummaryPayload> Findings { get; init; } = Array.Empty<FindingSummaryPayload>(); - - [JsonPropertyName("links")] - [JsonPropertyOrder(8)] - public ReportLinksPayload Links { get; init; } = new(); - - [JsonPropertyName("dsse")] - [JsonPropertyOrder(9)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DsseEnvelopeDto? Dsse { get; init; } - - [JsonPropertyName("report")] - [JsonPropertyOrder(10)] - public ReportDocumentDto Report { get; init; } = new(); -} - -internal sealed record ReportDeltaPayload -{ - [JsonPropertyName("newCritical")] - [JsonPropertyOrder(0)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? NewCritical { get; init; } - - [JsonPropertyName("newHigh")] - [JsonPropertyOrder(1)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? NewHigh { get; init; } - - [JsonPropertyName("kev")] - [JsonPropertyOrder(2)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public IReadOnlyList<string>? Kev { get; init; } -} - -internal sealed record ReportLinksPayload -{ - [JsonPropertyName("ui")] - [JsonPropertyOrder(0)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Ui { get; init; } - - [JsonPropertyName("report")] - [JsonPropertyOrder(1)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Report { get; init; } - - [JsonPropertyName("policy")] - [JsonPropertyOrder(2)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Policy { get; init; } - - [JsonPropertyName("attestation")] - [JsonPropertyOrder(3)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Attestation { get; init; } -} - -internal sealed record FindingSummaryPayload -{ - [JsonPropertyName("id")] - [JsonPropertyOrder(0)] - public string Id { get; init; } = string.Empty; - - [JsonPropertyName("severity")] - [JsonPropertyOrder(1)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Severity { get; init; } - - [JsonPropertyName("cve")] - [JsonPropertyOrder(2)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Cve { get; init; } - - [JsonPropertyName("purl")] - [JsonPropertyOrder(3)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Purl { get; init; } - - [JsonPropertyName("reachability")] - [JsonPropertyOrder(4)] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Reachability { get; init; } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.WebService.Contracts; + +internal static class OrchestratorEventKinds +{ + public const string ScannerReportReady = "scanner.event.report.ready"; + public const string ScannerScanCompleted = "scanner.event.scan.completed"; +} + +internal sealed record OrchestratorEvent +{ + [JsonPropertyName("eventId")] + [JsonPropertyOrder(0)] + public Guid EventId { get; init; } + + [JsonPropertyName("kind")] + [JsonPropertyOrder(1)] + public string Kind { get; init; } = string.Empty; + + [JsonPropertyName("version")] + [JsonPropertyOrder(2)] + public int Version { get; init; } = 1; + + [JsonPropertyName("tenant")] + [JsonPropertyOrder(3)] + public string Tenant { get; init; } = string.Empty; + + [JsonPropertyName("occurredAt")] + [JsonPropertyOrder(4)] + public DateTimeOffset OccurredAt { get; init; } + + [JsonPropertyName("recordedAt")] + [JsonPropertyOrder(5)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? RecordedAt { get; init; } + + [JsonPropertyName("source")] + [JsonPropertyOrder(6)] + public string Source { get; init; } = string.Empty; + + [JsonPropertyName("idempotencyKey")] + [JsonPropertyOrder(7)] + public string IdempotencyKey { get; init; } = string.Empty; + + [JsonPropertyName("correlationId")] + [JsonPropertyOrder(8)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? CorrelationId { get; init; } + + [JsonPropertyName("traceId")] + [JsonPropertyOrder(9)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? TraceId { get; init; } + + [JsonPropertyName("spanId")] + [JsonPropertyOrder(10)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? SpanId { get; init; } + + [JsonPropertyName("scope")] + [JsonPropertyOrder(11)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public OrchestratorEventScope? Scope { get; init; } + + [JsonPropertyName("payload")] + [JsonPropertyOrder(12)] + public OrchestratorEventPayload Payload { get; init; } = default!; + + [JsonPropertyName("attributes")] + [JsonPropertyOrder(13)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public ImmutableSortedDictionary<string, string>? Attributes { get; init; } +} + +internal sealed record OrchestratorEventScope +{ + [JsonPropertyName("namespace")] + [JsonPropertyOrder(0)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Namespace { get; init; } + + [JsonPropertyName("repo")] + [JsonPropertyOrder(1)] + public string Repo { get; init; } = string.Empty; + + [JsonPropertyName("digest")] + [JsonPropertyOrder(2)] + public string Digest { get; init; } = string.Empty; + + [JsonPropertyName("component")] + [JsonPropertyOrder(3)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Component { get; init; } + + [JsonPropertyName("image")] + [JsonPropertyOrder(4)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Image { get; init; } +} + +internal abstract record OrchestratorEventPayload; + +internal sealed record ReportReadyEventPayload : OrchestratorEventPayload +{ + [JsonPropertyName("reportId")] + [JsonPropertyOrder(0)] + public string ReportId { get; init; } = string.Empty; + + [JsonPropertyName("scanId")] + [JsonPropertyOrder(1)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ScanId { get; init; } + + [JsonPropertyName("imageDigest")] + [JsonPropertyOrder(2)] + public string ImageDigest { get; init; } = string.Empty; + + [JsonPropertyName("generatedAt")] + [JsonPropertyOrder(3)] + public DateTimeOffset GeneratedAt { get; init; } + + [JsonPropertyName("verdict")] + [JsonPropertyOrder(4)] + public string Verdict { get; init; } = string.Empty; + + [JsonPropertyName("summary")] + [JsonPropertyOrder(5)] + public ReportSummaryDto Summary { get; init; } = new(); + + [JsonPropertyName("delta")] + [JsonPropertyOrder(6)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public ReportDeltaPayload? Delta { get; init; } + + [JsonPropertyName("quietedFindingCount")] + [JsonPropertyOrder(7)] + public int QuietedFindingCount { get; init; } + + [JsonPropertyName("policy")] + [JsonPropertyOrder(8)] + public ReportPolicyDto Policy { get; init; } = new(); + + [JsonPropertyName("links")] + [JsonPropertyOrder(9)] + public ReportLinksPayload Links { get; init; } = new(); + + [JsonPropertyName("dsse")] + [JsonPropertyOrder(10)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DsseEnvelopeDto? Dsse { get; init; } + + [JsonPropertyName("report")] + [JsonPropertyOrder(11)] + public ReportDocumentDto Report { get; init; } = new(); +} + +internal sealed record ScanCompletedEventPayload : OrchestratorEventPayload +{ + [JsonPropertyName("reportId")] + [JsonPropertyOrder(0)] + public string ReportId { get; init; } = string.Empty; + + [JsonPropertyName("scanId")] + [JsonPropertyOrder(1)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ScanId { get; init; } + + [JsonPropertyName("imageDigest")] + [JsonPropertyOrder(2)] + public string ImageDigest { get; init; } = string.Empty; + + [JsonPropertyName("verdict")] + [JsonPropertyOrder(3)] + public string Verdict { get; init; } = string.Empty; + + [JsonPropertyName("summary")] + [JsonPropertyOrder(4)] + public ReportSummaryDto Summary { get; init; } = new(); + + [JsonPropertyName("delta")] + [JsonPropertyOrder(5)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public ReportDeltaPayload? Delta { get; init; } + + [JsonPropertyName("policy")] + [JsonPropertyOrder(6)] + public ReportPolicyDto Policy { get; init; } = new(); + + [JsonPropertyName("findings")] + [JsonPropertyOrder(7)] + public IReadOnlyList<FindingSummaryPayload> Findings { get; init; } = Array.Empty<FindingSummaryPayload>(); + + [JsonPropertyName("links")] + [JsonPropertyOrder(8)] + public ReportLinksPayload Links { get; init; } = new(); + + [JsonPropertyName("dsse")] + [JsonPropertyOrder(9)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DsseEnvelopeDto? Dsse { get; init; } + + [JsonPropertyName("report")] + [JsonPropertyOrder(10)] + public ReportDocumentDto Report { get; init; } = new(); +} + +internal sealed record ReportDeltaPayload +{ + [JsonPropertyName("newCritical")] + [JsonPropertyOrder(0)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? NewCritical { get; init; } + + [JsonPropertyName("newHigh")] + [JsonPropertyOrder(1)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? NewHigh { get; init; } + + [JsonPropertyName("kev")] + [JsonPropertyOrder(2)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList<string>? Kev { get; init; } +} + +internal sealed record ReportLinksPayload +{ + [JsonPropertyName("ui")] + [JsonPropertyOrder(0)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Ui { get; init; } + + [JsonPropertyName("report")] + [JsonPropertyOrder(1)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Report { get; init; } + + [JsonPropertyName("policy")] + [JsonPropertyOrder(2)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Policy { get; init; } + + [JsonPropertyName("attestation")] + [JsonPropertyOrder(3)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Attestation { get; init; } +} + +internal sealed record FindingSummaryPayload +{ + [JsonPropertyName("id")] + [JsonPropertyOrder(0)] + public string Id { get; init; } = string.Empty; + + [JsonPropertyName("severity")] + [JsonPropertyOrder(1)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Severity { get; init; } + + [JsonPropertyName("cve")] + [JsonPropertyOrder(2)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Cve { get; init; } + + [JsonPropertyName("purl")] + [JsonPropertyOrder(3)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Purl { get; init; } + + [JsonPropertyName("reachability")] + [JsonPropertyOrder(4)] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Reachability { get; init; } +} diff --git a/src/StellaOps.Scanner.WebService/Contracts/PolicyDiagnosticsContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/PolicyDiagnosticsContracts.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Contracts/PolicyDiagnosticsContracts.cs rename to src/Scanner/StellaOps.Scanner.WebService/Contracts/PolicyDiagnosticsContracts.cs diff --git a/src/StellaOps.Scanner.WebService/Contracts/PolicyPreviewContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/PolicyPreviewContracts.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Contracts/PolicyPreviewContracts.cs rename to src/Scanner/StellaOps.Scanner.WebService/Contracts/PolicyPreviewContracts.cs diff --git a/src/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs rename to src/Scanner/StellaOps.Scanner.WebService/Contracts/ReportContracts.cs diff --git a/src/StellaOps.Scanner.WebService/Contracts/RuntimeEventsContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/RuntimeEventsContracts.cs similarity index 96% rename from src/StellaOps.Scanner.WebService/Contracts/RuntimeEventsContracts.cs rename to src/Scanner/StellaOps.Scanner.WebService/Contracts/RuntimeEventsContracts.cs index 155f8978..5c698d09 100644 --- a/src/StellaOps.Scanner.WebService/Contracts/RuntimeEventsContracts.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Contracts/RuntimeEventsContracts.cs @@ -1,22 +1,22 @@ -using System.Text.Json.Serialization; -using StellaOps.Zastava.Core.Contracts; - -namespace StellaOps.Scanner.WebService.Contracts; - -public sealed record RuntimeEventsIngestRequestDto -{ - [JsonPropertyName("batchId")] - public string? BatchId { get; init; } - - [JsonPropertyName("events")] - public IReadOnlyList<RuntimeEventEnvelope> Events { get; init; } = Array.Empty<RuntimeEventEnvelope>(); -} - -public sealed record RuntimeEventsIngestResponseDto -{ - [JsonPropertyName("accepted")] - public int Accepted { get; init; } - - [JsonPropertyName("duplicates")] - public int Duplicates { get; init; } -} +using System.Text.Json.Serialization; +using StellaOps.Zastava.Core.Contracts; + +namespace StellaOps.Scanner.WebService.Contracts; + +public sealed record RuntimeEventsIngestRequestDto +{ + [JsonPropertyName("batchId")] + public string? BatchId { get; init; } + + [JsonPropertyName("events")] + public IReadOnlyList<RuntimeEventEnvelope> Events { get; init; } = Array.Empty<RuntimeEventEnvelope>(); +} + +public sealed record RuntimeEventsIngestResponseDto +{ + [JsonPropertyName("accepted")] + public int Accepted { get; init; } + + [JsonPropertyName("duplicates")] + public int Duplicates { get; init; } +} diff --git a/src/StellaOps.Scanner.WebService/Contracts/RuntimePolicyContracts.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/RuntimePolicyContracts.cs similarity index 97% rename from src/StellaOps.Scanner.WebService/Contracts/RuntimePolicyContracts.cs rename to src/Scanner/StellaOps.Scanner.WebService/Contracts/RuntimePolicyContracts.cs index 2a2cfe9c..ba647546 100644 --- a/src/StellaOps.Scanner.WebService/Contracts/RuntimePolicyContracts.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Contracts/RuntimePolicyContracts.cs @@ -1,91 +1,91 @@ -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace StellaOps.Scanner.WebService.Contracts; - -public sealed record RuntimePolicyRequestDto -{ - [JsonPropertyName("namespace")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Namespace { get; init; } - - [JsonPropertyName("labels")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public IDictionary<string, string>? Labels { get; init; } - - [JsonPropertyName("images")] - public IReadOnlyList<string> Images { get; init; } = Array.Empty<string>(); -} - -public sealed record RuntimePolicyResponseDto -{ - [JsonPropertyName("ttlSeconds")] - public int TtlSeconds { get; init; } - - [JsonPropertyName("expiresAtUtc")] - public DateTimeOffset ExpiresAtUtc { get; init; } - - [JsonPropertyName("policyRevision")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? PolicyRevision { get; init; } - - [JsonPropertyName("results")] - public IReadOnlyDictionary<string, RuntimePolicyImageResponseDto> Results { get; init; } = new Dictionary<string, RuntimePolicyImageResponseDto>(StringComparer.Ordinal); -} - -public sealed record RuntimePolicyImageResponseDto -{ - [JsonPropertyName("policyVerdict")] - public string PolicyVerdict { get; init; } = "unknown"; - - [JsonPropertyName("signed")] - public bool Signed { get; init; } - - [JsonPropertyName("hasSbomReferrers")] - public bool HasSbomReferrers { get; init; } - - [JsonPropertyName("hasSbom")] - public bool HasSbomLegacy { get; init; } - - [JsonPropertyName("reasons")] - public IReadOnlyList<string> Reasons { get; init; } = Array.Empty<string>(); - - [JsonPropertyName("rekor")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public RuntimePolicyRekorDto? Rekor { get; init; } - - [JsonPropertyName("confidence")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public double? Confidence { get; init; } - - [JsonPropertyName("quieted")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public bool? Quieted { get; init; } - - [JsonPropertyName("quietedBy")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? QuietedBy { get; init; } - - [JsonPropertyName("metadata")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Metadata { get; init; } - - [JsonPropertyName("buildIds")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public IReadOnlyList<string>? BuildIds { get; init; } -} - -public sealed record RuntimePolicyRekorDto -{ - [JsonPropertyName("uuid")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Uuid { get; init; } - - [JsonPropertyName("url")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Url { get; init; } - - [JsonPropertyName("verified")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public bool? Verified { get; init; } -} +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.WebService.Contracts; + +public sealed record RuntimePolicyRequestDto +{ + [JsonPropertyName("namespace")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Namespace { get; init; } + + [JsonPropertyName("labels")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IDictionary<string, string>? Labels { get; init; } + + [JsonPropertyName("images")] + public IReadOnlyList<string> Images { get; init; } = Array.Empty<string>(); +} + +public sealed record RuntimePolicyResponseDto +{ + [JsonPropertyName("ttlSeconds")] + public int TtlSeconds { get; init; } + + [JsonPropertyName("expiresAtUtc")] + public DateTimeOffset ExpiresAtUtc { get; init; } + + [JsonPropertyName("policyRevision")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? PolicyRevision { get; init; } + + [JsonPropertyName("results")] + public IReadOnlyDictionary<string, RuntimePolicyImageResponseDto> Results { get; init; } = new Dictionary<string, RuntimePolicyImageResponseDto>(StringComparer.Ordinal); +} + +public sealed record RuntimePolicyImageResponseDto +{ + [JsonPropertyName("policyVerdict")] + public string PolicyVerdict { get; init; } = "unknown"; + + [JsonPropertyName("signed")] + public bool Signed { get; init; } + + [JsonPropertyName("hasSbomReferrers")] + public bool HasSbomReferrers { get; init; } + + [JsonPropertyName("hasSbom")] + public bool HasSbomLegacy { get; init; } + + [JsonPropertyName("reasons")] + public IReadOnlyList<string> Reasons { get; init; } = Array.Empty<string>(); + + [JsonPropertyName("rekor")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public RuntimePolicyRekorDto? Rekor { get; init; } + + [JsonPropertyName("confidence")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? Confidence { get; init; } + + [JsonPropertyName("quieted")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? Quieted { get; init; } + + [JsonPropertyName("quietedBy")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? QuietedBy { get; init; } + + [JsonPropertyName("metadata")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Metadata { get; init; } + + [JsonPropertyName("buildIds")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyList<string>? BuildIds { get; init; } +} + +public sealed record RuntimePolicyRekorDto +{ + [JsonPropertyName("uuid")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Uuid { get; init; } + + [JsonPropertyName("url")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Url { get; init; } + + [JsonPropertyName("verified")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public bool? Verified { get; init; } +} diff --git a/src/StellaOps.Scanner.WebService/Contracts/ScanStatusResponse.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ScanStatusResponse.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Contracts/ScanStatusResponse.cs rename to src/Scanner/StellaOps.Scanner.WebService/Contracts/ScanStatusResponse.cs diff --git a/src/StellaOps.Scanner.WebService/Contracts/ScanSubmitRequest.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ScanSubmitRequest.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Contracts/ScanSubmitRequest.cs rename to src/Scanner/StellaOps.Scanner.WebService/Contracts/ScanSubmitRequest.cs diff --git a/src/StellaOps.Scanner.WebService/Contracts/ScanSubmitResponse.cs b/src/Scanner/StellaOps.Scanner.WebService/Contracts/ScanSubmitResponse.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Contracts/ScanSubmitResponse.cs rename to src/Scanner/StellaOps.Scanner.WebService/Contracts/ScanSubmitResponse.cs diff --git a/src/StellaOps.Scanner.WebService/Diagnostics/ServiceStatus.cs b/src/Scanner/StellaOps.Scanner.WebService/Diagnostics/ServiceStatus.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Diagnostics/ServiceStatus.cs rename to src/Scanner/StellaOps.Scanner.WebService/Diagnostics/ServiceStatus.cs diff --git a/src/StellaOps.Scanner.WebService/Domain/ScanId.cs b/src/Scanner/StellaOps.Scanner.WebService/Domain/ScanId.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Domain/ScanId.cs rename to src/Scanner/StellaOps.Scanner.WebService/Domain/ScanId.cs diff --git a/src/StellaOps.Scanner.WebService/Domain/ScanProgressEvent.cs b/src/Scanner/StellaOps.Scanner.WebService/Domain/ScanProgressEvent.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Domain/ScanProgressEvent.cs rename to src/Scanner/StellaOps.Scanner.WebService/Domain/ScanProgressEvent.cs diff --git a/src/StellaOps.Scanner.WebService/Domain/ScanSnapshot.cs b/src/Scanner/StellaOps.Scanner.WebService/Domain/ScanSnapshot.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Domain/ScanSnapshot.cs rename to src/Scanner/StellaOps.Scanner.WebService/Domain/ScanSnapshot.cs diff --git a/src/StellaOps.Scanner.WebService/Domain/ScanStatus.cs b/src/Scanner/StellaOps.Scanner.WebService/Domain/ScanStatus.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Domain/ScanStatus.cs rename to src/Scanner/StellaOps.Scanner.WebService/Domain/ScanStatus.cs diff --git a/src/StellaOps.Scanner.WebService/Domain/ScanSubmission.cs b/src/Scanner/StellaOps.Scanner.WebService/Domain/ScanSubmission.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Domain/ScanSubmission.cs rename to src/Scanner/StellaOps.Scanner.WebService/Domain/ScanSubmission.cs diff --git a/src/StellaOps.Scanner.WebService/Domain/ScanTarget.cs b/src/Scanner/StellaOps.Scanner.WebService/Domain/ScanTarget.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Domain/ScanTarget.cs rename to src/Scanner/StellaOps.Scanner.WebService/Domain/ScanTarget.cs diff --git a/src/StellaOps.Scanner.WebService/Endpoints/HealthEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/HealthEndpoints.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Endpoints/HealthEndpoints.cs rename to src/Scanner/StellaOps.Scanner.WebService/Endpoints/HealthEndpoints.cs diff --git a/src/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs rename to src/Scanner/StellaOps.Scanner.WebService/Endpoints/PolicyEndpoints.cs diff --git a/src/StellaOps.Scanner.WebService/Endpoints/ReportEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ReportEndpoints.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Endpoints/ReportEndpoints.cs rename to src/Scanner/StellaOps.Scanner.WebService/Endpoints/ReportEndpoints.cs diff --git a/src/StellaOps.Scanner.WebService/Endpoints/RuntimeEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/RuntimeEndpoints.cs similarity index 97% rename from src/StellaOps.Scanner.WebService/Endpoints/RuntimeEndpoints.cs rename to src/Scanner/StellaOps.Scanner.WebService/Endpoints/RuntimeEndpoints.cs index 06b4ba26..53d2ffe0 100644 --- a/src/StellaOps.Scanner.WebService/Endpoints/RuntimeEndpoints.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/RuntimeEndpoints.cs @@ -1,253 +1,253 @@ -using System.Collections.Generic; -using System.Globalization; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Routing; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.WebService.Constants; -using StellaOps.Scanner.WebService.Contracts; -using StellaOps.Scanner.WebService.Infrastructure; -using StellaOps.Scanner.WebService.Options; -using StellaOps.Scanner.WebService.Security; -using StellaOps.Scanner.WebService.Services; -using StellaOps.Zastava.Core.Contracts; - -namespace StellaOps.Scanner.WebService.Endpoints; - -internal static class RuntimeEndpoints -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - public static void MapRuntimeEndpoints(this RouteGroupBuilder apiGroup, string runtimeSegment) - { - ArgumentNullException.ThrowIfNull(apiGroup); - - var runtime = apiGroup - .MapGroup(NormalizeSegment(runtimeSegment)) - .WithTags("Runtime"); - - runtime.MapPost("/events", HandleRuntimeEventsAsync) - .WithName("scanner.runtime.events.ingest") - .Produces<RuntimeEventsIngestResponseDto>(StatusCodes.Status202Accepted) - .Produces(StatusCodes.Status400BadRequest) - .Produces(StatusCodes.Status429TooManyRequests) - .RequireAuthorization(ScannerPolicies.RuntimeIngest); - } - - private static async Task<IResult> HandleRuntimeEventsAsync( - RuntimeEventsIngestRequestDto request, - IRuntimeEventIngestionService ingestionService, - IOptions<ScannerWebServiceOptions> options, - HttpContext context, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentNullException.ThrowIfNull(ingestionService); - ArgumentNullException.ThrowIfNull(options); - - var runtimeOptions = options.Value.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions(); - var validationError = ValidateRequest(request, runtimeOptions, context, out var envelopes); - if (validationError is { } problem) - { - return problem; - } - - var result = await ingestionService.IngestAsync(envelopes, request.BatchId, cancellationToken).ConfigureAwait(false); - if (result.IsPayloadTooLarge) - { - var extensions = new Dictionary<string, object?> - { - ["payloadBytes"] = result.PayloadBytes, - ["maxPayloadBytes"] = result.PayloadLimit - }; - - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Runtime event batch too large", - StatusCodes.Status400BadRequest, - detail: "Runtime batch payload exceeds configured budget.", - extensions: extensions); - } - - if (result.IsRateLimited) - { - var retryAfterSeconds = Math.Max(1, (int)Math.Ceiling(result.RetryAfter.TotalSeconds)); - context.Response.Headers.RetryAfter = retryAfterSeconds.ToString(CultureInfo.InvariantCulture); - - var extensions = new Dictionary<string, object?> - { - ["scope"] = result.RateLimitedScope, - ["key"] = result.RateLimitedKey, - ["retryAfterSeconds"] = retryAfterSeconds - }; - - return ProblemResultFactory.Create( - context, - ProblemTypes.RateLimited, - "Runtime ingestion rate limited", - StatusCodes.Status429TooManyRequests, - detail: "Runtime ingestion exceeded configured rate limits.", - extensions: extensions); - } - - var payload = new RuntimeEventsIngestResponseDto - { - Accepted = result.Accepted, - Duplicates = result.Duplicates - }; - - return Json(payload, StatusCodes.Status202Accepted); - } - - private static IResult? ValidateRequest( - RuntimeEventsIngestRequestDto request, - ScannerWebServiceOptions.RuntimeOptions runtimeOptions, - HttpContext context, - out IReadOnlyList<RuntimeEventEnvelope> envelopes) - { - envelopes = request.Events ?? Array.Empty<RuntimeEventEnvelope>(); - if (envelopes.Count == 0) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid runtime ingest request", - StatusCodes.Status400BadRequest, - detail: "events array must include at least one item."); - } - - if (envelopes.Count > runtimeOptions.MaxBatchSize) - { - var extensions = new Dictionary<string, object?> - { - ["maxBatchSize"] = runtimeOptions.MaxBatchSize, - ["eventCount"] = envelopes.Count - }; - - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid runtime ingest request", - StatusCodes.Status400BadRequest, - detail: "events array exceeds allowed batch size.", - extensions: extensions); - } - - var seenEventIds = new HashSet<string>(StringComparer.Ordinal); - for (var i = 0; i < envelopes.Count; i++) - { - var envelope = envelopes[i]; - if (envelope is null) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid runtime ingest request", - StatusCodes.Status400BadRequest, - detail: $"events[{i}] must not be null."); - } - - if (!envelope.IsSupported()) - { - var extensions = new Dictionary<string, object?> - { - ["schemaVersion"] = envelope.SchemaVersion - }; - - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Unsupported runtime schema version", - StatusCodes.Status400BadRequest, - detail: "Runtime event schemaVersion is not supported.", - extensions: extensions); - } - - var runtimeEvent = envelope.Event; - if (runtimeEvent is null) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid runtime ingest request", - StatusCodes.Status400BadRequest, - detail: $"events[{i}].event must not be null."); - } - - if (string.IsNullOrWhiteSpace(runtimeEvent.EventId)) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid runtime ingest request", - StatusCodes.Status400BadRequest, - detail: $"events[{i}].eventId is required."); - } - - if (!seenEventIds.Add(runtimeEvent.EventId)) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid runtime ingest request", - StatusCodes.Status400BadRequest, - detail: $"Duplicate eventId detected within batch ('{runtimeEvent.EventId}')."); - } - - if (string.IsNullOrWhiteSpace(runtimeEvent.Tenant)) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid runtime ingest request", - StatusCodes.Status400BadRequest, - detail: $"events[{i}].tenant is required."); - } - - if (string.IsNullOrWhiteSpace(runtimeEvent.Node)) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid runtime ingest request", - StatusCodes.Status400BadRequest, - detail: $"events[{i}].node is required."); - } - - if (runtimeEvent.Workload is null) - { - return ProblemResultFactory.Create( - context, - ProblemTypes.Validation, - "Invalid runtime ingest request", - StatusCodes.Status400BadRequest, - detail: $"events[{i}].workload is required."); - } - } - - return null; - } - - private static string NormalizeSegment(string segment) - { - if (string.IsNullOrWhiteSpace(segment)) - { - return "/runtime"; - } - - var trimmed = segment.Trim('/'); - return "/" + trimmed; - } - - private static IResult Json<T>(T value, int statusCode) - { - var payload = JsonSerializer.Serialize(value, SerializerOptions); - return Results.Content(payload, "application/json", Encoding.UTF8, statusCode); - } -} +using System.Collections.Generic; +using System.Globalization; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Routing; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.WebService.Constants; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Infrastructure; +using StellaOps.Scanner.WebService.Options; +using StellaOps.Scanner.WebService.Security; +using StellaOps.Scanner.WebService.Services; +using StellaOps.Zastava.Core.Contracts; + +namespace StellaOps.Scanner.WebService.Endpoints; + +internal static class RuntimeEndpoints +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + public static void MapRuntimeEndpoints(this RouteGroupBuilder apiGroup, string runtimeSegment) + { + ArgumentNullException.ThrowIfNull(apiGroup); + + var runtime = apiGroup + .MapGroup(NormalizeSegment(runtimeSegment)) + .WithTags("Runtime"); + + runtime.MapPost("/events", HandleRuntimeEventsAsync) + .WithName("scanner.runtime.events.ingest") + .Produces<RuntimeEventsIngestResponseDto>(StatusCodes.Status202Accepted) + .Produces(StatusCodes.Status400BadRequest) + .Produces(StatusCodes.Status429TooManyRequests) + .RequireAuthorization(ScannerPolicies.RuntimeIngest); + } + + private static async Task<IResult> HandleRuntimeEventsAsync( + RuntimeEventsIngestRequestDto request, + IRuntimeEventIngestionService ingestionService, + IOptions<ScannerWebServiceOptions> options, + HttpContext context, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(ingestionService); + ArgumentNullException.ThrowIfNull(options); + + var runtimeOptions = options.Value.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions(); + var validationError = ValidateRequest(request, runtimeOptions, context, out var envelopes); + if (validationError is { } problem) + { + return problem; + } + + var result = await ingestionService.IngestAsync(envelopes, request.BatchId, cancellationToken).ConfigureAwait(false); + if (result.IsPayloadTooLarge) + { + var extensions = new Dictionary<string, object?> + { + ["payloadBytes"] = result.PayloadBytes, + ["maxPayloadBytes"] = result.PayloadLimit + }; + + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Runtime event batch too large", + StatusCodes.Status400BadRequest, + detail: "Runtime batch payload exceeds configured budget.", + extensions: extensions); + } + + if (result.IsRateLimited) + { + var retryAfterSeconds = Math.Max(1, (int)Math.Ceiling(result.RetryAfter.TotalSeconds)); + context.Response.Headers.RetryAfter = retryAfterSeconds.ToString(CultureInfo.InvariantCulture); + + var extensions = new Dictionary<string, object?> + { + ["scope"] = result.RateLimitedScope, + ["key"] = result.RateLimitedKey, + ["retryAfterSeconds"] = retryAfterSeconds + }; + + return ProblemResultFactory.Create( + context, + ProblemTypes.RateLimited, + "Runtime ingestion rate limited", + StatusCodes.Status429TooManyRequests, + detail: "Runtime ingestion exceeded configured rate limits.", + extensions: extensions); + } + + var payload = new RuntimeEventsIngestResponseDto + { + Accepted = result.Accepted, + Duplicates = result.Duplicates + }; + + return Json(payload, StatusCodes.Status202Accepted); + } + + private static IResult? ValidateRequest( + RuntimeEventsIngestRequestDto request, + ScannerWebServiceOptions.RuntimeOptions runtimeOptions, + HttpContext context, + out IReadOnlyList<RuntimeEventEnvelope> envelopes) + { + envelopes = request.Events ?? Array.Empty<RuntimeEventEnvelope>(); + if (envelopes.Count == 0) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid runtime ingest request", + StatusCodes.Status400BadRequest, + detail: "events array must include at least one item."); + } + + if (envelopes.Count > runtimeOptions.MaxBatchSize) + { + var extensions = new Dictionary<string, object?> + { + ["maxBatchSize"] = runtimeOptions.MaxBatchSize, + ["eventCount"] = envelopes.Count + }; + + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid runtime ingest request", + StatusCodes.Status400BadRequest, + detail: "events array exceeds allowed batch size.", + extensions: extensions); + } + + var seenEventIds = new HashSet<string>(StringComparer.Ordinal); + for (var i = 0; i < envelopes.Count; i++) + { + var envelope = envelopes[i]; + if (envelope is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid runtime ingest request", + StatusCodes.Status400BadRequest, + detail: $"events[{i}] must not be null."); + } + + if (!envelope.IsSupported()) + { + var extensions = new Dictionary<string, object?> + { + ["schemaVersion"] = envelope.SchemaVersion + }; + + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Unsupported runtime schema version", + StatusCodes.Status400BadRequest, + detail: "Runtime event schemaVersion is not supported.", + extensions: extensions); + } + + var runtimeEvent = envelope.Event; + if (runtimeEvent is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid runtime ingest request", + StatusCodes.Status400BadRequest, + detail: $"events[{i}].event must not be null."); + } + + if (string.IsNullOrWhiteSpace(runtimeEvent.EventId)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid runtime ingest request", + StatusCodes.Status400BadRequest, + detail: $"events[{i}].eventId is required."); + } + + if (!seenEventIds.Add(runtimeEvent.EventId)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid runtime ingest request", + StatusCodes.Status400BadRequest, + detail: $"Duplicate eventId detected within batch ('{runtimeEvent.EventId}')."); + } + + if (string.IsNullOrWhiteSpace(runtimeEvent.Tenant)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid runtime ingest request", + StatusCodes.Status400BadRequest, + detail: $"events[{i}].tenant is required."); + } + + if (string.IsNullOrWhiteSpace(runtimeEvent.Node)) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid runtime ingest request", + StatusCodes.Status400BadRequest, + detail: $"events[{i}].node is required."); + } + + if (runtimeEvent.Workload is null) + { + return ProblemResultFactory.Create( + context, + ProblemTypes.Validation, + "Invalid runtime ingest request", + StatusCodes.Status400BadRequest, + detail: $"events[{i}].workload is required."); + } + } + + return null; + } + + private static string NormalizeSegment(string segment) + { + if (string.IsNullOrWhiteSpace(segment)) + { + return "/runtime"; + } + + var trimmed = segment.Trim('/'); + return "/" + trimmed; + } + + private static IResult Json<T>(T value, int statusCode) + { + var payload = JsonSerializer.Serialize(value, SerializerOptions); + return Results.Content(payload, "application/json", Encoding.UTF8, statusCode); + } +} diff --git a/src/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs b/src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs rename to src/Scanner/StellaOps.Scanner.WebService/Endpoints/ScanEndpoints.cs diff --git a/src/StellaOps.Scanner.WebService/Extensions/ConfigurationExtensions.cs b/src/Scanner/StellaOps.Scanner.WebService/Extensions/ConfigurationExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Extensions/ConfigurationExtensions.cs rename to src/Scanner/StellaOps.Scanner.WebService/Extensions/ConfigurationExtensions.cs diff --git a/src/StellaOps.Scanner.WebService/Extensions/OpenApiRegistrationExtensions.cs b/src/Scanner/StellaOps.Scanner.WebService/Extensions/OpenApiRegistrationExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Extensions/OpenApiRegistrationExtensions.cs rename to src/Scanner/StellaOps.Scanner.WebService/Extensions/OpenApiRegistrationExtensions.cs diff --git a/src/StellaOps.Scanner.WebService/Hosting/ScannerPluginHostFactory.cs b/src/Scanner/StellaOps.Scanner.WebService/Hosting/ScannerPluginHostFactory.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Hosting/ScannerPluginHostFactory.cs rename to src/Scanner/StellaOps.Scanner.WebService/Hosting/ScannerPluginHostFactory.cs diff --git a/src/StellaOps.Scanner.WebService/Infrastructure/ProblemResultFactory.cs b/src/Scanner/StellaOps.Scanner.WebService/Infrastructure/ProblemResultFactory.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Infrastructure/ProblemResultFactory.cs rename to src/Scanner/StellaOps.Scanner.WebService/Infrastructure/ProblemResultFactory.cs diff --git a/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptions.cs b/src/Scanner/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptions.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptions.cs rename to src/Scanner/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptions.cs diff --git a/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsPostConfigure.cs b/src/Scanner/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsPostConfigure.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsPostConfigure.cs rename to src/Scanner/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsPostConfigure.cs diff --git a/src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsValidator.cs b/src/Scanner/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsValidator.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsValidator.cs rename to src/Scanner/StellaOps.Scanner.WebService/Options/ScannerWebServiceOptionsValidator.cs diff --git a/src/StellaOps.Scanner.WebService/Program.cs b/src/Scanner/StellaOps.Scanner.WebService/Program.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Program.cs rename to src/Scanner/StellaOps.Scanner.WebService/Program.cs diff --git a/src/StellaOps.Scanner.WebService/Security/AnonymousAuthenticationHandler.cs b/src/Scanner/StellaOps.Scanner.WebService/Security/AnonymousAuthenticationHandler.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Security/AnonymousAuthenticationHandler.cs rename to src/Scanner/StellaOps.Scanner.WebService/Security/AnonymousAuthenticationHandler.cs diff --git a/src/StellaOps.Scanner.WebService/Security/ScannerAuthorityScopes.cs b/src/Scanner/StellaOps.Scanner.WebService/Security/ScannerAuthorityScopes.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Security/ScannerAuthorityScopes.cs rename to src/Scanner/StellaOps.Scanner.WebService/Security/ScannerAuthorityScopes.cs diff --git a/src/StellaOps.Scanner.WebService/Security/ScannerPolicies.cs b/src/Scanner/StellaOps.Scanner.WebService/Security/ScannerPolicies.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Security/ScannerPolicies.cs rename to src/Scanner/StellaOps.Scanner.WebService/Security/ScannerPolicies.cs diff --git a/src/StellaOps.Scanner.WebService/Serialization/OrchestratorEventSerializer.cs b/src/Scanner/StellaOps.Scanner.WebService/Serialization/OrchestratorEventSerializer.cs similarity index 96% rename from src/StellaOps.Scanner.WebService/Serialization/OrchestratorEventSerializer.cs rename to src/Scanner/StellaOps.Scanner.WebService/Serialization/OrchestratorEventSerializer.cs index 282a2cef..48b13f1c 100644 --- a/src/StellaOps.Scanner.WebService/Serialization/OrchestratorEventSerializer.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Serialization/OrchestratorEventSerializer.cs @@ -1,198 +1,198 @@ -using System; -using System.Collections.Immutable; -using System.Linq; -using System.Text.Encodings.Web; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Text.Json.Serialization.Metadata; -using StellaOps.Scanner.WebService.Contracts; - -namespace StellaOps.Scanner.WebService.Serialization; - -internal static class OrchestratorEventSerializer -{ - private static readonly JsonSerializerOptions CompactOptions = CreateOptions(writeIndented: false); - private static readonly JsonSerializerOptions PrettyOptions = CreateOptions(writeIndented: true); - - public static string Serialize(OrchestratorEvent @event) - => JsonSerializer.Serialize(@event, CompactOptions); - - public static string SerializeIndented(OrchestratorEvent @event) - => JsonSerializer.Serialize(@event, PrettyOptions); - - private static JsonSerializerOptions CreateOptions(bool writeIndented) - { - var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) - { - WriteIndented = writeIndented, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping - }; - - var baselineResolver = options.TypeInfoResolver ?? new DefaultJsonTypeInfoResolver(); - options.TypeInfoResolver = new DeterministicTypeInfoResolver(baselineResolver); - return options; - } - - private sealed class DeterministicTypeInfoResolver : IJsonTypeInfoResolver - { - private static readonly ImmutableDictionary<Type, string[]> PropertyOrder = new Dictionary<Type, string[]> - { - [typeof(OrchestratorEvent)] = new[] - { - "eventId", - "kind", - "version", - "tenant", - "occurredAt", - "recordedAt", - "source", - "idempotencyKey", - "correlationId", - "traceId", - "spanId", - "scope", - "payload", - "attributes" - }, - [typeof(OrchestratorEventScope)] = new[] - { - "namespace", - "repo", - "digest", - "component", - "image" - }, - [typeof(ReportReadyEventPayload)] = new[] - { - "reportId", - "scanId", - "imageDigest", - "generatedAt", - "verdict", - "summary", - "delta", - "quietedFindingCount", - "policy", - "links", - "dsse", - "report" - }, - [typeof(ScanCompletedEventPayload)] = new[] - { - "reportId", - "scanId", - "imageDigest", - "verdict", - "summary", - "delta", - "policy", - "findings", - "links", - "dsse", - "report" - }, - [typeof(ReportDeltaPayload)] = new[] - { - "newCritical", - "newHigh", - "kev" - }, - [typeof(ReportLinksPayload)] = new[] - { - "ui", - "report", - "policy", - "attestation" - }, - [typeof(FindingSummaryPayload)] = new[] - { - "id", - "severity", - "cve", - "purl", - "reachability" - }, - [typeof(ReportPolicyDto)] = new[] - { - "revisionId", - "digest" - }, - [typeof(ReportSummaryDto)] = new[] - { - "total", - "blocked", - "warned", - "ignored", - "quieted" - }, - [typeof(ReportDocumentDto)] = new[] - { - "reportId", - "imageDigest", - "generatedAt", - "verdict", - "policy", - "summary", - "verdicts", - "issues" - }, - [typeof(DsseEnvelopeDto)] = new[] - { - "payloadType", - "payload", - "signatures" - }, - [typeof(DsseSignatureDto)] = new[] - { - "keyId", - "algorithm", - "signature" - } - }.ToImmutableDictionary(); - - private readonly IJsonTypeInfoResolver _inner; - - public DeterministicTypeInfoResolver(IJsonTypeInfoResolver inner) - { - _inner = inner ?? throw new ArgumentNullException(nameof(inner)); - } - - public JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions options) - { - var info = _inner.GetTypeInfo(type, options) - ?? throw new InvalidOperationException($"Unable to resolve JsonTypeInfo for '{type}'."); - - if (info.Kind is JsonTypeInfoKind.Object && info.Properties is { Count: > 1 }) - { - var ordered = info.Properties - .OrderBy(property => GetOrder(type, property.Name)) - .ThenBy(property => property.Name, StringComparer.Ordinal) - .ToArray(); - - info.Properties.Clear(); - foreach (var property in ordered) - { - info.Properties.Add(property); - } - } - - return info; - } - - private static int GetOrder(Type type, string propertyName) - { - if (PropertyOrder.TryGetValue(type, out var order) && Array.IndexOf(order, propertyName) is { } index and >= 0) - { - return index; - } - - if (type.BaseType is not null) - { - return GetOrder(type.BaseType, propertyName); - } - - return int.MaxValue; - } - } -} +using System; +using System.Collections.Immutable; +using System.Linq; +using System.Text.Encodings.Web; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Text.Json.Serialization.Metadata; +using StellaOps.Scanner.WebService.Contracts; + +namespace StellaOps.Scanner.WebService.Serialization; + +internal static class OrchestratorEventSerializer +{ + private static readonly JsonSerializerOptions CompactOptions = CreateOptions(writeIndented: false); + private static readonly JsonSerializerOptions PrettyOptions = CreateOptions(writeIndented: true); + + public static string Serialize(OrchestratorEvent @event) + => JsonSerializer.Serialize(@event, CompactOptions); + + public static string SerializeIndented(OrchestratorEvent @event) + => JsonSerializer.Serialize(@event, PrettyOptions); + + private static JsonSerializerOptions CreateOptions(bool writeIndented) + { + var options = new JsonSerializerOptions(JsonSerializerDefaults.Web) + { + WriteIndented = writeIndented, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping + }; + + var baselineResolver = options.TypeInfoResolver ?? new DefaultJsonTypeInfoResolver(); + options.TypeInfoResolver = new DeterministicTypeInfoResolver(baselineResolver); + return options; + } + + private sealed class DeterministicTypeInfoResolver : IJsonTypeInfoResolver + { + private static readonly ImmutableDictionary<Type, string[]> PropertyOrder = new Dictionary<Type, string[]> + { + [typeof(OrchestratorEvent)] = new[] + { + "eventId", + "kind", + "version", + "tenant", + "occurredAt", + "recordedAt", + "source", + "idempotencyKey", + "correlationId", + "traceId", + "spanId", + "scope", + "payload", + "attributes" + }, + [typeof(OrchestratorEventScope)] = new[] + { + "namespace", + "repo", + "digest", + "component", + "image" + }, + [typeof(ReportReadyEventPayload)] = new[] + { + "reportId", + "scanId", + "imageDigest", + "generatedAt", + "verdict", + "summary", + "delta", + "quietedFindingCount", + "policy", + "links", + "dsse", + "report" + }, + [typeof(ScanCompletedEventPayload)] = new[] + { + "reportId", + "scanId", + "imageDigest", + "verdict", + "summary", + "delta", + "policy", + "findings", + "links", + "dsse", + "report" + }, + [typeof(ReportDeltaPayload)] = new[] + { + "newCritical", + "newHigh", + "kev" + }, + [typeof(ReportLinksPayload)] = new[] + { + "ui", + "report", + "policy", + "attestation" + }, + [typeof(FindingSummaryPayload)] = new[] + { + "id", + "severity", + "cve", + "purl", + "reachability" + }, + [typeof(ReportPolicyDto)] = new[] + { + "revisionId", + "digest" + }, + [typeof(ReportSummaryDto)] = new[] + { + "total", + "blocked", + "warned", + "ignored", + "quieted" + }, + [typeof(ReportDocumentDto)] = new[] + { + "reportId", + "imageDigest", + "generatedAt", + "verdict", + "policy", + "summary", + "verdicts", + "issues" + }, + [typeof(DsseEnvelopeDto)] = new[] + { + "payloadType", + "payload", + "signatures" + }, + [typeof(DsseSignatureDto)] = new[] + { + "keyId", + "algorithm", + "signature" + } + }.ToImmutableDictionary(); + + private readonly IJsonTypeInfoResolver _inner; + + public DeterministicTypeInfoResolver(IJsonTypeInfoResolver inner) + { + _inner = inner ?? throw new ArgumentNullException(nameof(inner)); + } + + public JsonTypeInfo GetTypeInfo(Type type, JsonSerializerOptions options) + { + var info = _inner.GetTypeInfo(type, options) + ?? throw new InvalidOperationException($"Unable to resolve JsonTypeInfo for '{type}'."); + + if (info.Kind is JsonTypeInfoKind.Object && info.Properties is { Count: > 1 }) + { + var ordered = info.Properties + .OrderBy(property => GetOrder(type, property.Name)) + .ThenBy(property => property.Name, StringComparer.Ordinal) + .ToArray(); + + info.Properties.Clear(); + foreach (var property in ordered) + { + info.Properties.Add(property); + } + } + + return info; + } + + private static int GetOrder(Type type, string propertyName) + { + if (PropertyOrder.TryGetValue(type, out var order) && Array.IndexOf(order, propertyName) is { } index and >= 0) + { + return index; + } + + if (type.BaseType is not null) + { + return GetOrder(type.BaseType, propertyName); + } + + return int.MaxValue; + } + } +} diff --git a/src/StellaOps.Scanner.WebService/Services/IPlatformEventPublisher.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/IPlatformEventPublisher.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Services/IPlatformEventPublisher.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/IPlatformEventPublisher.cs diff --git a/src/StellaOps.Scanner.WebService/Services/IRedisConnectionFactory.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/IRedisConnectionFactory.cs similarity index 97% rename from src/StellaOps.Scanner.WebService/Services/IRedisConnectionFactory.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/IRedisConnectionFactory.cs index 786780d7..39689962 100644 --- a/src/StellaOps.Scanner.WebService/Services/IRedisConnectionFactory.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/IRedisConnectionFactory.cs @@ -1,13 +1,13 @@ -using System.Threading; -using System.Threading.Tasks; -using StackExchange.Redis; - -namespace StellaOps.Scanner.WebService.Services; - -/// <summary> -/// Abstraction for creating Redis connections so publishers can be tested without real infrastructure. -/// </summary> -internal interface IRedisConnectionFactory -{ - ValueTask<IConnectionMultiplexer> ConnectAsync(ConfigurationOptions options, CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; +using StackExchange.Redis; + +namespace StellaOps.Scanner.WebService.Services; + +/// <summary> +/// Abstraction for creating Redis connections so publishers can be tested without real infrastructure. +/// </summary> +internal interface IRedisConnectionFactory +{ + ValueTask<IConnectionMultiplexer> ConnectAsync(ConfigurationOptions options, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scanner.WebService/Services/IReportEventDispatcher.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/IReportEventDispatcher.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Services/IReportEventDispatcher.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/IReportEventDispatcher.cs diff --git a/src/StellaOps.Scanner.WebService/Services/IScanCoordinator.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/IScanCoordinator.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Services/IScanCoordinator.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/IScanCoordinator.cs diff --git a/src/StellaOps.Scanner.WebService/Services/InMemoryScanCoordinator.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/InMemoryScanCoordinator.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Services/InMemoryScanCoordinator.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/InMemoryScanCoordinator.cs diff --git a/src/StellaOps.Scanner.WebService/Services/NullPlatformEventPublisher.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/NullPlatformEventPublisher.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Services/NullPlatformEventPublisher.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/NullPlatformEventPublisher.cs diff --git a/src/StellaOps.Scanner.WebService/Services/PolicyDtoMapper.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/PolicyDtoMapper.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Services/PolicyDtoMapper.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/PolicyDtoMapper.cs diff --git a/src/StellaOps.Scanner.WebService/Services/RedisConnectionFactory.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/RedisConnectionFactory.cs similarity index 97% rename from src/StellaOps.Scanner.WebService/Services/RedisConnectionFactory.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/RedisConnectionFactory.cs index 0636f63d..3ac99a3b 100644 --- a/src/StellaOps.Scanner.WebService/Services/RedisConnectionFactory.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/RedisConnectionFactory.cs @@ -1,19 +1,19 @@ -using System.Threading; -using System.Threading.Tasks; -using StackExchange.Redis; - -namespace StellaOps.Scanner.WebService.Services; - -/// <summary> -/// Production Redis connection factory bridging to <see cref="ConnectionMultiplexer"/>. -/// </summary> -internal sealed class RedisConnectionFactory : IRedisConnectionFactory -{ - public async ValueTask<IConnectionMultiplexer> ConnectAsync(ConfigurationOptions options, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(options); - var connectTask = ConnectionMultiplexer.ConnectAsync(options); - var connection = await connectTask.WaitAsync(cancellationToken).ConfigureAwait(false); - return connection; - } -} +using System.Threading; +using System.Threading.Tasks; +using StackExchange.Redis; + +namespace StellaOps.Scanner.WebService.Services; + +/// <summary> +/// Production Redis connection factory bridging to <see cref="ConnectionMultiplexer"/>. +/// </summary> +internal sealed class RedisConnectionFactory : IRedisConnectionFactory +{ + public async ValueTask<IConnectionMultiplexer> ConnectAsync(ConfigurationOptions options, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(options); + var connectTask = ConnectionMultiplexer.ConnectAsync(options); + var connection = await connectTask.WaitAsync(cancellationToken).ConfigureAwait(false); + return connection; + } +} diff --git a/src/StellaOps.Scanner.WebService/Services/RedisPlatformEventPublisher.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/RedisPlatformEventPublisher.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Services/RedisPlatformEventPublisher.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/RedisPlatformEventPublisher.cs diff --git a/src/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs similarity index 97% rename from src/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs index 71f48534..50a9ed2b 100644 --- a/src/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/ReportEventDispatcher.cs @@ -1,583 +1,583 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Diagnostics; -using System.Linq; -using System.Security.Claims; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Abstractions; -using StellaOps.Policy; -using StellaOps.Scanner.WebService.Contracts; -using StellaOps.Scanner.WebService.Options; - -namespace StellaOps.Scanner.WebService.Services; - -internal sealed class ReportEventDispatcher : IReportEventDispatcher -{ - private const string DefaultTenant = "default"; - private const string Source = "scanner.webservice"; - - private readonly IPlatformEventPublisher _publisher; - private readonly TimeProvider _timeProvider; - private readonly ILogger<ReportEventDispatcher> _logger; - private readonly string[] _apiBaseSegments; - private readonly string _reportsSegment; - private readonly string _policySegment; - - public ReportEventDispatcher( - IPlatformEventPublisher publisher, - IOptions<ScannerWebServiceOptions> options, - TimeProvider timeProvider, - ILogger<ReportEventDispatcher> logger) - { - _publisher = publisher ?? throw new ArgumentNullException(nameof(publisher)); - if (options is null) - { - throw new ArgumentNullException(nameof(options)); - } - - var apiOptions = options.Value.Api ?? new ScannerWebServiceOptions.ApiOptions(); - _apiBaseSegments = SplitSegments(apiOptions.BasePath); - _reportsSegment = string.IsNullOrWhiteSpace(apiOptions.ReportsSegment) - ? "reports" - : apiOptions.ReportsSegment.Trim('/'); - _policySegment = string.IsNullOrWhiteSpace(apiOptions.PolicySegment) - ? "policy" - : apiOptions.PolicySegment.Trim('/'); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task PublishAsync( - ReportRequestDto request, - PolicyPreviewResponse preview, - ReportDocumentDto document, - DsseEnvelopeDto? envelope, - HttpContext httpContext, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentNullException.ThrowIfNull(preview); - ArgumentNullException.ThrowIfNull(document); - ArgumentNullException.ThrowIfNull(httpContext); - - cancellationToken.ThrowIfCancellationRequested(); - - var now = _timeProvider.GetUtcNow(); - var occurredAt = document.GeneratedAt == default ? now : document.GeneratedAt; - var tenant = ResolveTenant(httpContext); - var scope = BuildScope(request, document); - var attributes = BuildAttributes(document); - var links = BuildLinks(httpContext, document, envelope); - var correlationId = document.ReportId; - var (traceId, spanId) = ResolveTraceContext(); - - var reportEvent = new OrchestratorEvent - { - EventId = Guid.NewGuid(), - Kind = OrchestratorEventKinds.ScannerReportReady, - Version = 1, - Tenant = tenant, - OccurredAt = occurredAt, - RecordedAt = now, - Source = Source, - IdempotencyKey = BuildIdempotencyKey(OrchestratorEventKinds.ScannerReportReady, tenant, document.ReportId), - CorrelationId = correlationId, - TraceId = traceId, - SpanId = spanId, - Scope = scope, - Attributes = attributes, - Payload = BuildReportReadyPayload(request, preview, document, envelope, links, correlationId) - }; - - await PublishSafelyAsync(reportEvent, document.ReportId, cancellationToken).ConfigureAwait(false); - - var scanCompletedEvent = new OrchestratorEvent - { - EventId = Guid.NewGuid(), - Kind = OrchestratorEventKinds.ScannerScanCompleted, - Version = 1, - Tenant = tenant, - OccurredAt = occurredAt, - RecordedAt = now, - Source = Source, - IdempotencyKey = BuildIdempotencyKey(OrchestratorEventKinds.ScannerScanCompleted, tenant, correlationId), - CorrelationId = correlationId, - TraceId = traceId, - SpanId = spanId, - Scope = scope, - Attributes = attributes, - Payload = BuildScanCompletedPayload(request, preview, document, envelope, links, correlationId) - }; - - await PublishSafelyAsync(scanCompletedEvent, document.ReportId, cancellationToken).ConfigureAwait(false); - } - - private async Task PublishSafelyAsync(OrchestratorEvent @event, string reportId, CancellationToken cancellationToken) - { - try - { - await _publisher.PublishAsync(@event, cancellationToken).ConfigureAwait(false); - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - throw; - } - catch (Exception ex) - { - _logger.LogError( - ex, - "Failed to publish orchestrator event {EventKind} for report {ReportId}.", - @event.Kind, - reportId); - } - } - - private static string ResolveTenant(HttpContext context) - { - var tenant = context.User?.FindFirstValue(StellaOpsClaimTypes.Tenant); - if (!string.IsNullOrWhiteSpace(tenant)) - { - return tenant.Trim(); - } - - if (context.Request.Headers.TryGetValue("X-Stella-Tenant", out var headerTenant)) - { - var headerValue = headerTenant.ToString(); - if (!string.IsNullOrWhiteSpace(headerValue)) - { - return headerValue.Trim(); - } - } - - return DefaultTenant; - } - - private static OrchestratorEventScope BuildScope(ReportRequestDto request, ReportDocumentDto document) - { - var repository = ResolveRepository(request); - var (ns, repo) = SplitRepository(repository); - - var digest = string.IsNullOrWhiteSpace(document.ImageDigest) - ? request.ImageDigest ?? string.Empty - : document.ImageDigest; - - return new OrchestratorEventScope - { - Namespace = ns, - Repo = string.IsNullOrWhiteSpace(repo) ? "(unknown)" : repo, - Digest = string.IsNullOrWhiteSpace(digest) ? "(unknown)" : digest - }; - } - - private static ImmutableSortedDictionary<string, string> BuildAttributes(ReportDocumentDto document) - { - var builder = ImmutableSortedDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - builder["reportId"] = document.ReportId; - builder["verdict"] = document.Verdict; - - if (!string.IsNullOrWhiteSpace(document.Policy.RevisionId)) - { - builder["policyRevisionId"] = document.Policy.RevisionId!; - } - - if (!string.IsNullOrWhiteSpace(document.Policy.Digest)) - { - builder["policyDigest"] = document.Policy.Digest!; - } - - return builder.ToImmutable(); - } - - private static ReportReadyEventPayload BuildReportReadyPayload( - ReportRequestDto request, - PolicyPreviewResponse preview, - ReportDocumentDto document, - DsseEnvelopeDto? envelope, - ReportLinksPayload links, - string correlationId) - { - return new ReportReadyEventPayload - { - ReportId = document.ReportId, - ScanId = correlationId, - ImageDigest = document.ImageDigest, - GeneratedAt = document.GeneratedAt, - Verdict = MapVerdict(document.Verdict), - Summary = document.Summary, - Delta = BuildDelta(preview, request), - QuietedFindingCount = document.Summary.Quieted, - Policy = document.Policy, - Links = links, - Dsse = envelope, - Report = document - }; - } - - private static ScanCompletedEventPayload BuildScanCompletedPayload( - ReportRequestDto request, - PolicyPreviewResponse preview, - ReportDocumentDto document, - DsseEnvelopeDto? envelope, - ReportLinksPayload links, - string correlationId) - { - return new ScanCompletedEventPayload - { - ReportId = document.ReportId, - ScanId = correlationId, - ImageDigest = document.ImageDigest, - Verdict = MapVerdict(document.Verdict), - Summary = document.Summary, - Delta = BuildDelta(preview, request), - Policy = document.Policy, - Findings = BuildFindingSummaries(request), - Links = links, - Dsse = envelope, - Report = document - }; - } - - private ReportLinksPayload BuildLinks(HttpContext context, ReportDocumentDto document, DsseEnvelopeDto? envelope) - { - if (!context.Request.Host.HasValue) - { - return new ReportLinksPayload(); - } - - var uiLink = BuildAbsoluteUri(context, "ui", "reports", document.ReportId); - var reportLink = BuildAbsoluteUri(context, ConcatSegments(_apiBaseSegments, _reportsSegment, document.ReportId)); - var policyLink = string.IsNullOrWhiteSpace(document.Policy.RevisionId) - ? null - : BuildAbsoluteUri(context, ConcatSegments(_apiBaseSegments, _policySegment, "revisions", document.Policy.RevisionId)); - var attestationLink = envelope is null - ? null - : BuildAbsoluteUri(context, "ui", "attestations", document.ReportId); - - return new ReportLinksPayload - { - Ui = uiLink, - Report = reportLink, - Policy = policyLink, - Attestation = attestationLink - }; - } - - private static ReportDeltaPayload? BuildDelta(PolicyPreviewResponse preview, ReportRequestDto request) - { - if (preview.Diffs.IsDefaultOrEmpty) - { - return null; - } - - var findings = BuildFindingsIndex(request.Findings); - var kevIds = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); - var newCritical = 0; - var newHigh = 0; - - foreach (var diff in preview.Diffs) - { - var projected = diff.Projected; - if (projected is null || string.IsNullOrWhiteSpace(projected.FindingId)) - { - continue; - } - - findings.TryGetValue(projected.FindingId, out var finding); - - if (IsNewlyImportant(diff)) - { - var severity = finding?.Severity; - if (string.Equals(severity, "Critical", StringComparison.OrdinalIgnoreCase)) - { - newCritical++; - } - else if (string.Equals(severity, "High", StringComparison.OrdinalIgnoreCase)) - { - newHigh++; - } - - var kevId = ResolveKevIdentifier(finding); - if (!string.IsNullOrWhiteSpace(kevId)) - { - kevIds.Add(kevId); - } - } - } - - if (newCritical == 0 && newHigh == 0 && kevIds.Count == 0) - { - return null; - } - - return new ReportDeltaPayload - { - NewCritical = newCritical > 0 ? newCritical : null, - NewHigh = newHigh > 0 ? newHigh : null, - Kev = kevIds.Count > 0 ? kevIds.ToArray() : null - }; - } - - private static string BuildAbsoluteUri(HttpContext context, params string[] segments) - => BuildAbsoluteUri(context, segments.AsEnumerable()); - - private static string BuildAbsoluteUri(HttpContext context, IEnumerable<string> segments) - { - var normalized = segments - .Where(segment => !string.IsNullOrWhiteSpace(segment)) - .Select(segment => segment.Trim('/')) - .Where(segment => segment.Length > 0) - .ToArray(); - - if (!context.Request.Host.HasValue || normalized.Length == 0) - { - return string.Empty; - } - - var scheme = string.IsNullOrWhiteSpace(context.Request.Scheme) ? "https" : context.Request.Scheme; - var builder = new UriBuilder(scheme, context.Request.Host.Host) - { - Port = context.Request.Host.Port ?? -1, - Path = "/" + string.Join('/', normalized.Select(Uri.EscapeDataString)), - Query = string.Empty, - Fragment = string.Empty - }; - - return builder.Uri.ToString(); - } - - private string[] ConcatSegments(IEnumerable<string> prefix, params string[] suffix) - { - var segments = new List<string>(); - foreach (var segment in prefix) - { - if (!string.IsNullOrWhiteSpace(segment)) - { - segments.Add(segment.Trim('/')); - } - } - - foreach (var segment in suffix) - { - if (!string.IsNullOrWhiteSpace(segment)) - { - segments.Add(segment.Trim('/')); - } - } - - return segments.ToArray(); - } - - private static string[] SplitSegments(string? path) - { - if (string.IsNullOrWhiteSpace(path)) - { - return Array.Empty<string>(); - } - - return path.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - } - - private static ImmutableDictionary<string, PolicyPreviewFindingDto> BuildFindingsIndex( - IReadOnlyList<PolicyPreviewFindingDto>? findings) - { - if (findings is null || findings.Count == 0) - { - return ImmutableDictionary<string, PolicyPreviewFindingDto>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, PolicyPreviewFindingDto>(StringComparer.Ordinal); - foreach (var finding in findings) - { - if (string.IsNullOrWhiteSpace(finding.Id)) - { - continue; - } - - if (!builder.ContainsKey(finding.Id)) - { - builder.Add(finding.Id, finding); - } - } - - return builder.ToImmutable(); - } - - private static IReadOnlyList<FindingSummaryPayload> BuildFindingSummaries(ReportRequestDto request) - { - if (request.Findings is not { Count: > 0 }) - { - return Array.Empty<FindingSummaryPayload>(); - } - - var summaries = new List<FindingSummaryPayload>(request.Findings.Count); - foreach (var finding in request.Findings) - { - if (string.IsNullOrWhiteSpace(finding.Id)) - { - continue; - } - - summaries.Add(new FindingSummaryPayload - { - Id = finding.Id, - Severity = finding.Severity, - Cve = finding.Cve, - Purl = finding.Purl, - Reachability = ResolveReachability(finding.Tags) - }); - } - - return summaries; - } - - private static string ResolveRepository(ReportRequestDto request) - { - if (request.Findings is { Count: > 0 }) - { - foreach (var finding in request.Findings) - { - if (!string.IsNullOrWhiteSpace(finding.Repository)) - { - return finding.Repository!.Trim(); - } - - if (!string.IsNullOrWhiteSpace(finding.Image)) - { - return finding.Image!.Trim(); - } - } - } - - return string.Empty; - } - - private static (string? Namespace, string Repo) SplitRepository(string repository) - { - if (string.IsNullOrWhiteSpace(repository)) - { - return (null, string.Empty); - } - - var normalized = repository.Trim(); - var segments = normalized.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - if (segments.Length == 0) - { - return (null, normalized); - } - - if (segments.Length == 1) - { - return (null, segments[0]); - } - - var repo = segments[^1]; - var ns = string.Join('/', segments[..^1]); - return (ns, repo); - } - - private static bool IsNewlyImportant(PolicyVerdictDiff diff) - { - var projected = diff.Projected.Status; - var baseline = diff.Baseline.Status; - - return projected switch - { - PolicyVerdictStatus.Blocked or PolicyVerdictStatus.Escalated - => baseline != PolicyVerdictStatus.Blocked && baseline != PolicyVerdictStatus.Escalated, - PolicyVerdictStatus.Warned or PolicyVerdictStatus.Deferred or PolicyVerdictStatus.RequiresVex - => baseline != PolicyVerdictStatus.Warned - && baseline != PolicyVerdictStatus.Deferred - && baseline != PolicyVerdictStatus.RequiresVex - && baseline != PolicyVerdictStatus.Blocked - && baseline != PolicyVerdictStatus.Escalated, - _ => false - }; - } - - private static string? ResolveKevIdentifier(PolicyPreviewFindingDto? finding) - { - if (finding is null) - { - return null; - } - - var tags = finding.Tags; - if (tags is not null) - { - foreach (var tag in tags) - { - if (string.IsNullOrWhiteSpace(tag)) - { - continue; - } - - if (string.Equals(tag, "kev", StringComparison.OrdinalIgnoreCase)) - { - return finding.Cve; - } - - if (tag.StartsWith("kev:", StringComparison.OrdinalIgnoreCase)) - { - var value = tag["kev:".Length..]; - if (!string.IsNullOrWhiteSpace(value)) - { - return value.Trim(); - } - } - } - } - - return finding.Cve; - } - - private static string? ResolveReachability(IReadOnlyList<string>? tags) - { - if (tags is null) - { - return null; - } - - foreach (var tag in tags) - { - if (string.IsNullOrWhiteSpace(tag)) - { - continue; - } - - if (tag.StartsWith("reachability:", StringComparison.OrdinalIgnoreCase)) - { - return tag["reachability:".Length..]; - } - } - - return null; - } - - private static string MapVerdict(string verdict) - => verdict.ToLowerInvariant() switch - { - "blocked" or "fail" => "fail", - "escalated" => "fail", - "warn" or "warned" or "deferred" or "requiresvex" => "warn", - _ => "pass" - }; - - private static string BuildIdempotencyKey(string kind, string tenant, string identifier) - => $"{kind}:{tenant}:{identifier}".ToLowerInvariant(); - - private static (string? TraceId, string? SpanId) ResolveTraceContext() - { - var activity = Activity.Current; - if (activity is null) - { - return (null, null); - } - - var traceId = activity.TraceId.ToString(); - var spanId = activity.SpanId.ToString(); - return (traceId, spanId); - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Diagnostics; +using System.Linq; +using System.Security.Claims; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Abstractions; +using StellaOps.Policy; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Scanner.WebService.Options; + +namespace StellaOps.Scanner.WebService.Services; + +internal sealed class ReportEventDispatcher : IReportEventDispatcher +{ + private const string DefaultTenant = "default"; + private const string Source = "scanner.webservice"; + + private readonly IPlatformEventPublisher _publisher; + private readonly TimeProvider _timeProvider; + private readonly ILogger<ReportEventDispatcher> _logger; + private readonly string[] _apiBaseSegments; + private readonly string _reportsSegment; + private readonly string _policySegment; + + public ReportEventDispatcher( + IPlatformEventPublisher publisher, + IOptions<ScannerWebServiceOptions> options, + TimeProvider timeProvider, + ILogger<ReportEventDispatcher> logger) + { + _publisher = publisher ?? throw new ArgumentNullException(nameof(publisher)); + if (options is null) + { + throw new ArgumentNullException(nameof(options)); + } + + var apiOptions = options.Value.Api ?? new ScannerWebServiceOptions.ApiOptions(); + _apiBaseSegments = SplitSegments(apiOptions.BasePath); + _reportsSegment = string.IsNullOrWhiteSpace(apiOptions.ReportsSegment) + ? "reports" + : apiOptions.ReportsSegment.Trim('/'); + _policySegment = string.IsNullOrWhiteSpace(apiOptions.PolicySegment) + ? "policy" + : apiOptions.PolicySegment.Trim('/'); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task PublishAsync( + ReportRequestDto request, + PolicyPreviewResponse preview, + ReportDocumentDto document, + DsseEnvelopeDto? envelope, + HttpContext httpContext, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(preview); + ArgumentNullException.ThrowIfNull(document); + ArgumentNullException.ThrowIfNull(httpContext); + + cancellationToken.ThrowIfCancellationRequested(); + + var now = _timeProvider.GetUtcNow(); + var occurredAt = document.GeneratedAt == default ? now : document.GeneratedAt; + var tenant = ResolveTenant(httpContext); + var scope = BuildScope(request, document); + var attributes = BuildAttributes(document); + var links = BuildLinks(httpContext, document, envelope); + var correlationId = document.ReportId; + var (traceId, spanId) = ResolveTraceContext(); + + var reportEvent = new OrchestratorEvent + { + EventId = Guid.NewGuid(), + Kind = OrchestratorEventKinds.ScannerReportReady, + Version = 1, + Tenant = tenant, + OccurredAt = occurredAt, + RecordedAt = now, + Source = Source, + IdempotencyKey = BuildIdempotencyKey(OrchestratorEventKinds.ScannerReportReady, tenant, document.ReportId), + CorrelationId = correlationId, + TraceId = traceId, + SpanId = spanId, + Scope = scope, + Attributes = attributes, + Payload = BuildReportReadyPayload(request, preview, document, envelope, links, correlationId) + }; + + await PublishSafelyAsync(reportEvent, document.ReportId, cancellationToken).ConfigureAwait(false); + + var scanCompletedEvent = new OrchestratorEvent + { + EventId = Guid.NewGuid(), + Kind = OrchestratorEventKinds.ScannerScanCompleted, + Version = 1, + Tenant = tenant, + OccurredAt = occurredAt, + RecordedAt = now, + Source = Source, + IdempotencyKey = BuildIdempotencyKey(OrchestratorEventKinds.ScannerScanCompleted, tenant, correlationId), + CorrelationId = correlationId, + TraceId = traceId, + SpanId = spanId, + Scope = scope, + Attributes = attributes, + Payload = BuildScanCompletedPayload(request, preview, document, envelope, links, correlationId) + }; + + await PublishSafelyAsync(scanCompletedEvent, document.ReportId, cancellationToken).ConfigureAwait(false); + } + + private async Task PublishSafelyAsync(OrchestratorEvent @event, string reportId, CancellationToken cancellationToken) + { + try + { + await _publisher.PublishAsync(@event, cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) + { + _logger.LogError( + ex, + "Failed to publish orchestrator event {EventKind} for report {ReportId}.", + @event.Kind, + reportId); + } + } + + private static string ResolveTenant(HttpContext context) + { + var tenant = context.User?.FindFirstValue(StellaOpsClaimTypes.Tenant); + if (!string.IsNullOrWhiteSpace(tenant)) + { + return tenant.Trim(); + } + + if (context.Request.Headers.TryGetValue("X-Stella-Tenant", out var headerTenant)) + { + var headerValue = headerTenant.ToString(); + if (!string.IsNullOrWhiteSpace(headerValue)) + { + return headerValue.Trim(); + } + } + + return DefaultTenant; + } + + private static OrchestratorEventScope BuildScope(ReportRequestDto request, ReportDocumentDto document) + { + var repository = ResolveRepository(request); + var (ns, repo) = SplitRepository(repository); + + var digest = string.IsNullOrWhiteSpace(document.ImageDigest) + ? request.ImageDigest ?? string.Empty + : document.ImageDigest; + + return new OrchestratorEventScope + { + Namespace = ns, + Repo = string.IsNullOrWhiteSpace(repo) ? "(unknown)" : repo, + Digest = string.IsNullOrWhiteSpace(digest) ? "(unknown)" : digest + }; + } + + private static ImmutableSortedDictionary<string, string> BuildAttributes(ReportDocumentDto document) + { + var builder = ImmutableSortedDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + builder["reportId"] = document.ReportId; + builder["verdict"] = document.Verdict; + + if (!string.IsNullOrWhiteSpace(document.Policy.RevisionId)) + { + builder["policyRevisionId"] = document.Policy.RevisionId!; + } + + if (!string.IsNullOrWhiteSpace(document.Policy.Digest)) + { + builder["policyDigest"] = document.Policy.Digest!; + } + + return builder.ToImmutable(); + } + + private static ReportReadyEventPayload BuildReportReadyPayload( + ReportRequestDto request, + PolicyPreviewResponse preview, + ReportDocumentDto document, + DsseEnvelopeDto? envelope, + ReportLinksPayload links, + string correlationId) + { + return new ReportReadyEventPayload + { + ReportId = document.ReportId, + ScanId = correlationId, + ImageDigest = document.ImageDigest, + GeneratedAt = document.GeneratedAt, + Verdict = MapVerdict(document.Verdict), + Summary = document.Summary, + Delta = BuildDelta(preview, request), + QuietedFindingCount = document.Summary.Quieted, + Policy = document.Policy, + Links = links, + Dsse = envelope, + Report = document + }; + } + + private static ScanCompletedEventPayload BuildScanCompletedPayload( + ReportRequestDto request, + PolicyPreviewResponse preview, + ReportDocumentDto document, + DsseEnvelopeDto? envelope, + ReportLinksPayload links, + string correlationId) + { + return new ScanCompletedEventPayload + { + ReportId = document.ReportId, + ScanId = correlationId, + ImageDigest = document.ImageDigest, + Verdict = MapVerdict(document.Verdict), + Summary = document.Summary, + Delta = BuildDelta(preview, request), + Policy = document.Policy, + Findings = BuildFindingSummaries(request), + Links = links, + Dsse = envelope, + Report = document + }; + } + + private ReportLinksPayload BuildLinks(HttpContext context, ReportDocumentDto document, DsseEnvelopeDto? envelope) + { + if (!context.Request.Host.HasValue) + { + return new ReportLinksPayload(); + } + + var uiLink = BuildAbsoluteUri(context, "ui", "reports", document.ReportId); + var reportLink = BuildAbsoluteUri(context, ConcatSegments(_apiBaseSegments, _reportsSegment, document.ReportId)); + var policyLink = string.IsNullOrWhiteSpace(document.Policy.RevisionId) + ? null + : BuildAbsoluteUri(context, ConcatSegments(_apiBaseSegments, _policySegment, "revisions", document.Policy.RevisionId)); + var attestationLink = envelope is null + ? null + : BuildAbsoluteUri(context, "ui", "attestations", document.ReportId); + + return new ReportLinksPayload + { + Ui = uiLink, + Report = reportLink, + Policy = policyLink, + Attestation = attestationLink + }; + } + + private static ReportDeltaPayload? BuildDelta(PolicyPreviewResponse preview, ReportRequestDto request) + { + if (preview.Diffs.IsDefaultOrEmpty) + { + return null; + } + + var findings = BuildFindingsIndex(request.Findings); + var kevIds = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); + var newCritical = 0; + var newHigh = 0; + + foreach (var diff in preview.Diffs) + { + var projected = diff.Projected; + if (projected is null || string.IsNullOrWhiteSpace(projected.FindingId)) + { + continue; + } + + findings.TryGetValue(projected.FindingId, out var finding); + + if (IsNewlyImportant(diff)) + { + var severity = finding?.Severity; + if (string.Equals(severity, "Critical", StringComparison.OrdinalIgnoreCase)) + { + newCritical++; + } + else if (string.Equals(severity, "High", StringComparison.OrdinalIgnoreCase)) + { + newHigh++; + } + + var kevId = ResolveKevIdentifier(finding); + if (!string.IsNullOrWhiteSpace(kevId)) + { + kevIds.Add(kevId); + } + } + } + + if (newCritical == 0 && newHigh == 0 && kevIds.Count == 0) + { + return null; + } + + return new ReportDeltaPayload + { + NewCritical = newCritical > 0 ? newCritical : null, + NewHigh = newHigh > 0 ? newHigh : null, + Kev = kevIds.Count > 0 ? kevIds.ToArray() : null + }; + } + + private static string BuildAbsoluteUri(HttpContext context, params string[] segments) + => BuildAbsoluteUri(context, segments.AsEnumerable()); + + private static string BuildAbsoluteUri(HttpContext context, IEnumerable<string> segments) + { + var normalized = segments + .Where(segment => !string.IsNullOrWhiteSpace(segment)) + .Select(segment => segment.Trim('/')) + .Where(segment => segment.Length > 0) + .ToArray(); + + if (!context.Request.Host.HasValue || normalized.Length == 0) + { + return string.Empty; + } + + var scheme = string.IsNullOrWhiteSpace(context.Request.Scheme) ? "https" : context.Request.Scheme; + var builder = new UriBuilder(scheme, context.Request.Host.Host) + { + Port = context.Request.Host.Port ?? -1, + Path = "/" + string.Join('/', normalized.Select(Uri.EscapeDataString)), + Query = string.Empty, + Fragment = string.Empty + }; + + return builder.Uri.ToString(); + } + + private string[] ConcatSegments(IEnumerable<string> prefix, params string[] suffix) + { + var segments = new List<string>(); + foreach (var segment in prefix) + { + if (!string.IsNullOrWhiteSpace(segment)) + { + segments.Add(segment.Trim('/')); + } + } + + foreach (var segment in suffix) + { + if (!string.IsNullOrWhiteSpace(segment)) + { + segments.Add(segment.Trim('/')); + } + } + + return segments.ToArray(); + } + + private static string[] SplitSegments(string? path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return Array.Empty<string>(); + } + + return path.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + } + + private static ImmutableDictionary<string, PolicyPreviewFindingDto> BuildFindingsIndex( + IReadOnlyList<PolicyPreviewFindingDto>? findings) + { + if (findings is null || findings.Count == 0) + { + return ImmutableDictionary<string, PolicyPreviewFindingDto>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, PolicyPreviewFindingDto>(StringComparer.Ordinal); + foreach (var finding in findings) + { + if (string.IsNullOrWhiteSpace(finding.Id)) + { + continue; + } + + if (!builder.ContainsKey(finding.Id)) + { + builder.Add(finding.Id, finding); + } + } + + return builder.ToImmutable(); + } + + private static IReadOnlyList<FindingSummaryPayload> BuildFindingSummaries(ReportRequestDto request) + { + if (request.Findings is not { Count: > 0 }) + { + return Array.Empty<FindingSummaryPayload>(); + } + + var summaries = new List<FindingSummaryPayload>(request.Findings.Count); + foreach (var finding in request.Findings) + { + if (string.IsNullOrWhiteSpace(finding.Id)) + { + continue; + } + + summaries.Add(new FindingSummaryPayload + { + Id = finding.Id, + Severity = finding.Severity, + Cve = finding.Cve, + Purl = finding.Purl, + Reachability = ResolveReachability(finding.Tags) + }); + } + + return summaries; + } + + private static string ResolveRepository(ReportRequestDto request) + { + if (request.Findings is { Count: > 0 }) + { + foreach (var finding in request.Findings) + { + if (!string.IsNullOrWhiteSpace(finding.Repository)) + { + return finding.Repository!.Trim(); + } + + if (!string.IsNullOrWhiteSpace(finding.Image)) + { + return finding.Image!.Trim(); + } + } + } + + return string.Empty; + } + + private static (string? Namespace, string Repo) SplitRepository(string repository) + { + if (string.IsNullOrWhiteSpace(repository)) + { + return (null, string.Empty); + } + + var normalized = repository.Trim(); + var segments = normalized.Split('/', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (segments.Length == 0) + { + return (null, normalized); + } + + if (segments.Length == 1) + { + return (null, segments[0]); + } + + var repo = segments[^1]; + var ns = string.Join('/', segments[..^1]); + return (ns, repo); + } + + private static bool IsNewlyImportant(PolicyVerdictDiff diff) + { + var projected = diff.Projected.Status; + var baseline = diff.Baseline.Status; + + return projected switch + { + PolicyVerdictStatus.Blocked or PolicyVerdictStatus.Escalated + => baseline != PolicyVerdictStatus.Blocked && baseline != PolicyVerdictStatus.Escalated, + PolicyVerdictStatus.Warned or PolicyVerdictStatus.Deferred or PolicyVerdictStatus.RequiresVex + => baseline != PolicyVerdictStatus.Warned + && baseline != PolicyVerdictStatus.Deferred + && baseline != PolicyVerdictStatus.RequiresVex + && baseline != PolicyVerdictStatus.Blocked + && baseline != PolicyVerdictStatus.Escalated, + _ => false + }; + } + + private static string? ResolveKevIdentifier(PolicyPreviewFindingDto? finding) + { + if (finding is null) + { + return null; + } + + var tags = finding.Tags; + if (tags is not null) + { + foreach (var tag in tags) + { + if (string.IsNullOrWhiteSpace(tag)) + { + continue; + } + + if (string.Equals(tag, "kev", StringComparison.OrdinalIgnoreCase)) + { + return finding.Cve; + } + + if (tag.StartsWith("kev:", StringComparison.OrdinalIgnoreCase)) + { + var value = tag["kev:".Length..]; + if (!string.IsNullOrWhiteSpace(value)) + { + return value.Trim(); + } + } + } + } + + return finding.Cve; + } + + private static string? ResolveReachability(IReadOnlyList<string>? tags) + { + if (tags is null) + { + return null; + } + + foreach (var tag in tags) + { + if (string.IsNullOrWhiteSpace(tag)) + { + continue; + } + + if (tag.StartsWith("reachability:", StringComparison.OrdinalIgnoreCase)) + { + return tag["reachability:".Length..]; + } + } + + return null; + } + + private static string MapVerdict(string verdict) + => verdict.ToLowerInvariant() switch + { + "blocked" or "fail" => "fail", + "escalated" => "fail", + "warn" or "warned" or "deferred" or "requiresvex" => "warn", + _ => "pass" + }; + + private static string BuildIdempotencyKey(string kind, string tenant, string identifier) + => $"{kind}:{tenant}:{identifier}".ToLowerInvariant(); + + private static (string? TraceId, string? SpanId) ResolveTraceContext() + { + var activity = Activity.Current; + if (activity is null) + { + return (null, null); + } + + var traceId = activity.TraceId.ToString(); + var spanId = activity.SpanId.ToString(); + return (traceId, spanId); + } +} diff --git a/src/StellaOps.Scanner.WebService/Services/ReportSigner.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/ReportSigner.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Services/ReportSigner.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/ReportSigner.cs diff --git a/src/StellaOps.Scanner.WebService/Services/RuntimeEventIngestionService.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/RuntimeEventIngestionService.cs similarity index 97% rename from src/StellaOps.Scanner.WebService/Services/RuntimeEventIngestionService.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/RuntimeEventIngestionService.cs index 17f938ed..6b106787 100644 --- a/src/StellaOps.Scanner.WebService/Services/RuntimeEventIngestionService.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/RuntimeEventIngestionService.cs @@ -1,215 +1,215 @@ -using System.Text.Json; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using System.Text; -using MongoDB.Bson; -using StellaOps.Scanner.Storage.Catalog; -using StellaOps.Scanner.Storage.Repositories; -using StellaOps.Scanner.WebService.Options; -using StellaOps.Zastava.Core.Contracts; - -namespace StellaOps.Scanner.WebService.Services; - -internal interface IRuntimeEventIngestionService -{ - Task<RuntimeEventIngestionResult> IngestAsync( - IReadOnlyList<RuntimeEventEnvelope> envelopes, - string? batchId, - CancellationToken cancellationToken); -} - -internal sealed class RuntimeEventIngestionService : IRuntimeEventIngestionService -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); - - private readonly RuntimeEventRepository _repository; - private readonly RuntimeEventRateLimiter _rateLimiter; - private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor; - private readonly TimeProvider _timeProvider; - private readonly ILogger<RuntimeEventIngestionService> _logger; - - public RuntimeEventIngestionService( - RuntimeEventRepository repository, - RuntimeEventRateLimiter rateLimiter, - IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor, - TimeProvider timeProvider, - ILogger<RuntimeEventIngestionService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _rateLimiter = rateLimiter ?? throw new ArgumentNullException(nameof(rateLimiter)); - _optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<RuntimeEventIngestionResult> IngestAsync( - IReadOnlyList<RuntimeEventEnvelope> envelopes, - string? batchId, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(envelopes); - if (envelopes.Count == 0) - { - return RuntimeEventIngestionResult.Empty; - } - - var rateDecision = _rateLimiter.Evaluate(envelopes); - if (!rateDecision.Allowed) - { - _logger.LogWarning( - "Runtime event batch rejected due to rate limit ({Scope}={Key}, retryAfter={RetryAfter})", - rateDecision.Scope, - rateDecision.Key, - rateDecision.RetryAfter); - - return RuntimeEventIngestionResult.RateLimited(rateDecision.Scope, rateDecision.Key, rateDecision.RetryAfter); - } - - var options = _optionsMonitor.CurrentValue.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions(); - var receivedAt = _timeProvider.GetUtcNow().UtcDateTime; - var expiresAt = receivedAt.AddDays(options.EventTtlDays); - - var documents = new List<RuntimeEventDocument>(envelopes.Count); - var totalPayloadBytes = 0; - - foreach (var envelope in envelopes) - { - var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(envelope, SerializerOptions); - totalPayloadBytes += payloadBytes.Length; - if (totalPayloadBytes > options.MaxPayloadBytes) - { - _logger.LogWarning( - "Runtime event batch exceeds payload budget ({PayloadBytes} > {MaxPayloadBytes})", - totalPayloadBytes, - options.MaxPayloadBytes); - return RuntimeEventIngestionResult.PayloadTooLarge(totalPayloadBytes, options.MaxPayloadBytes); - } - - var payloadDocument = BsonDocument.Parse(Encoding.UTF8.GetString(payloadBytes)); - var runtimeEvent = envelope.Event; - var normalizedDigest = ExtractImageDigest(runtimeEvent); - var normalizedBuildId = NormalizeBuildId(runtimeEvent.Process?.BuildId); - - var document = new RuntimeEventDocument - { - EventId = runtimeEvent.EventId, - SchemaVersion = envelope.SchemaVersion, - Tenant = runtimeEvent.Tenant, - Node = runtimeEvent.Node, - Kind = runtimeEvent.Kind.ToString(), - When = runtimeEvent.When.UtcDateTime, - ReceivedAt = receivedAt, - ExpiresAt = expiresAt, - Platform = runtimeEvent.Workload.Platform, - Namespace = runtimeEvent.Workload.Namespace, - Pod = runtimeEvent.Workload.Pod, - Container = runtimeEvent.Workload.Container, - ContainerId = runtimeEvent.Workload.ContainerId, - ImageRef = runtimeEvent.Workload.ImageRef, - ImageDigest = normalizedDigest, - Engine = runtimeEvent.Runtime.Engine, - EngineVersion = runtimeEvent.Runtime.Version, - BaselineDigest = runtimeEvent.Delta?.BaselineImageDigest, - ImageSigned = runtimeEvent.Posture?.ImageSigned, - SbomReferrer = runtimeEvent.Posture?.SbomReferrer, - BuildId = normalizedBuildId, - Payload = payloadDocument - }; - - documents.Add(document); - } - - var insertResult = await _repository.InsertAsync(documents, cancellationToken).ConfigureAwait(false); - _logger.LogInformation( - "Runtime ingestion batch processed (batchId={BatchId}, accepted={Accepted}, duplicates={Duplicates}, payloadBytes={PayloadBytes})", - batchId, - insertResult.InsertedCount, - insertResult.DuplicateCount, - totalPayloadBytes); - - return RuntimeEventIngestionResult.Success(insertResult.InsertedCount, insertResult.DuplicateCount, totalPayloadBytes); - } - - private static string? ExtractImageDigest(RuntimeEvent runtimeEvent) - { - var digest = NormalizeDigest(runtimeEvent.Delta?.BaselineImageDigest); - if (!string.IsNullOrWhiteSpace(digest)) - { - return digest; - } - - var imageRef = runtimeEvent.Workload.ImageRef; - if (string.IsNullOrWhiteSpace(imageRef)) - { - return null; - } - - var trimmed = imageRef.Trim(); - var atIndex = trimmed.LastIndexOf('@'); - if (atIndex >= 0 && atIndex < trimmed.Length - 1) - { - var candidate = trimmed[(atIndex + 1)..]; - var parsed = NormalizeDigest(candidate); - if (!string.IsNullOrWhiteSpace(parsed)) - { - return parsed; - } - } - - if (trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - return NormalizeDigest(trimmed); - } - - return null; - } - - private static string? NormalizeDigest(string? candidate) - { - if (string.IsNullOrWhiteSpace(candidate)) - { - return null; - } - - var trimmed = candidate.Trim(); - if (!trimmed.Contains(':', StringComparison.Ordinal)) - { - return null; - } - - return trimmed.ToLowerInvariant(); - } - - private static string? NormalizeBuildId(string? buildId) - { - if (string.IsNullOrWhiteSpace(buildId)) - { - return null; - } - - return buildId.Trim().ToLowerInvariant(); - } -} - -internal readonly record struct RuntimeEventIngestionResult( - int Accepted, - int Duplicates, - bool IsRateLimited, - string? RateLimitedScope, - string? RateLimitedKey, - TimeSpan RetryAfter, - bool IsPayloadTooLarge, - int PayloadBytes, - int PayloadLimit) -{ - public static RuntimeEventIngestionResult Empty => new(0, 0, false, null, null, TimeSpan.Zero, false, 0, 0); - - public static RuntimeEventIngestionResult RateLimited(string? scope, string? key, TimeSpan retryAfter) - => new(0, 0, true, scope, key, retryAfter, false, 0, 0); - - public static RuntimeEventIngestionResult PayloadTooLarge(int payloadBytes, int payloadLimit) - => new(0, 0, false, null, null, TimeSpan.Zero, true, payloadBytes, payloadLimit); - - public static RuntimeEventIngestionResult Success(int accepted, int duplicates, int payloadBytes) - => new(accepted, duplicates, false, null, null, TimeSpan.Zero, false, payloadBytes, 0); -} +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using System.Text; +using MongoDB.Bson; +using StellaOps.Scanner.Storage.Catalog; +using StellaOps.Scanner.Storage.Repositories; +using StellaOps.Scanner.WebService.Options; +using StellaOps.Zastava.Core.Contracts; + +namespace StellaOps.Scanner.WebService.Services; + +internal interface IRuntimeEventIngestionService +{ + Task<RuntimeEventIngestionResult> IngestAsync( + IReadOnlyList<RuntimeEventEnvelope> envelopes, + string? batchId, + CancellationToken cancellationToken); +} + +internal sealed class RuntimeEventIngestionService : IRuntimeEventIngestionService +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web); + + private readonly RuntimeEventRepository _repository; + private readonly RuntimeEventRateLimiter _rateLimiter; + private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor; + private readonly TimeProvider _timeProvider; + private readonly ILogger<RuntimeEventIngestionService> _logger; + + public RuntimeEventIngestionService( + RuntimeEventRepository repository, + RuntimeEventRateLimiter rateLimiter, + IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor, + TimeProvider timeProvider, + ILogger<RuntimeEventIngestionService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _rateLimiter = rateLimiter ?? throw new ArgumentNullException(nameof(rateLimiter)); + _optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<RuntimeEventIngestionResult> IngestAsync( + IReadOnlyList<RuntimeEventEnvelope> envelopes, + string? batchId, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(envelopes); + if (envelopes.Count == 0) + { + return RuntimeEventIngestionResult.Empty; + } + + var rateDecision = _rateLimiter.Evaluate(envelopes); + if (!rateDecision.Allowed) + { + _logger.LogWarning( + "Runtime event batch rejected due to rate limit ({Scope}={Key}, retryAfter={RetryAfter})", + rateDecision.Scope, + rateDecision.Key, + rateDecision.RetryAfter); + + return RuntimeEventIngestionResult.RateLimited(rateDecision.Scope, rateDecision.Key, rateDecision.RetryAfter); + } + + var options = _optionsMonitor.CurrentValue.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions(); + var receivedAt = _timeProvider.GetUtcNow().UtcDateTime; + var expiresAt = receivedAt.AddDays(options.EventTtlDays); + + var documents = new List<RuntimeEventDocument>(envelopes.Count); + var totalPayloadBytes = 0; + + foreach (var envelope in envelopes) + { + var payloadBytes = JsonSerializer.SerializeToUtf8Bytes(envelope, SerializerOptions); + totalPayloadBytes += payloadBytes.Length; + if (totalPayloadBytes > options.MaxPayloadBytes) + { + _logger.LogWarning( + "Runtime event batch exceeds payload budget ({PayloadBytes} > {MaxPayloadBytes})", + totalPayloadBytes, + options.MaxPayloadBytes); + return RuntimeEventIngestionResult.PayloadTooLarge(totalPayloadBytes, options.MaxPayloadBytes); + } + + var payloadDocument = BsonDocument.Parse(Encoding.UTF8.GetString(payloadBytes)); + var runtimeEvent = envelope.Event; + var normalizedDigest = ExtractImageDigest(runtimeEvent); + var normalizedBuildId = NormalizeBuildId(runtimeEvent.Process?.BuildId); + + var document = new RuntimeEventDocument + { + EventId = runtimeEvent.EventId, + SchemaVersion = envelope.SchemaVersion, + Tenant = runtimeEvent.Tenant, + Node = runtimeEvent.Node, + Kind = runtimeEvent.Kind.ToString(), + When = runtimeEvent.When.UtcDateTime, + ReceivedAt = receivedAt, + ExpiresAt = expiresAt, + Platform = runtimeEvent.Workload.Platform, + Namespace = runtimeEvent.Workload.Namespace, + Pod = runtimeEvent.Workload.Pod, + Container = runtimeEvent.Workload.Container, + ContainerId = runtimeEvent.Workload.ContainerId, + ImageRef = runtimeEvent.Workload.ImageRef, + ImageDigest = normalizedDigest, + Engine = runtimeEvent.Runtime.Engine, + EngineVersion = runtimeEvent.Runtime.Version, + BaselineDigest = runtimeEvent.Delta?.BaselineImageDigest, + ImageSigned = runtimeEvent.Posture?.ImageSigned, + SbomReferrer = runtimeEvent.Posture?.SbomReferrer, + BuildId = normalizedBuildId, + Payload = payloadDocument + }; + + documents.Add(document); + } + + var insertResult = await _repository.InsertAsync(documents, cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "Runtime ingestion batch processed (batchId={BatchId}, accepted={Accepted}, duplicates={Duplicates}, payloadBytes={PayloadBytes})", + batchId, + insertResult.InsertedCount, + insertResult.DuplicateCount, + totalPayloadBytes); + + return RuntimeEventIngestionResult.Success(insertResult.InsertedCount, insertResult.DuplicateCount, totalPayloadBytes); + } + + private static string? ExtractImageDigest(RuntimeEvent runtimeEvent) + { + var digest = NormalizeDigest(runtimeEvent.Delta?.BaselineImageDigest); + if (!string.IsNullOrWhiteSpace(digest)) + { + return digest; + } + + var imageRef = runtimeEvent.Workload.ImageRef; + if (string.IsNullOrWhiteSpace(imageRef)) + { + return null; + } + + var trimmed = imageRef.Trim(); + var atIndex = trimmed.LastIndexOf('@'); + if (atIndex >= 0 && atIndex < trimmed.Length - 1) + { + var candidate = trimmed[(atIndex + 1)..]; + var parsed = NormalizeDigest(candidate); + if (!string.IsNullOrWhiteSpace(parsed)) + { + return parsed; + } + } + + if (trimmed.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + return NormalizeDigest(trimmed); + } + + return null; + } + + private static string? NormalizeDigest(string? candidate) + { + if (string.IsNullOrWhiteSpace(candidate)) + { + return null; + } + + var trimmed = candidate.Trim(); + if (!trimmed.Contains(':', StringComparison.Ordinal)) + { + return null; + } + + return trimmed.ToLowerInvariant(); + } + + private static string? NormalizeBuildId(string? buildId) + { + if (string.IsNullOrWhiteSpace(buildId)) + { + return null; + } + + return buildId.Trim().ToLowerInvariant(); + } +} + +internal readonly record struct RuntimeEventIngestionResult( + int Accepted, + int Duplicates, + bool IsRateLimited, + string? RateLimitedScope, + string? RateLimitedKey, + TimeSpan RetryAfter, + bool IsPayloadTooLarge, + int PayloadBytes, + int PayloadLimit) +{ + public static RuntimeEventIngestionResult Empty => new(0, 0, false, null, null, TimeSpan.Zero, false, 0, 0); + + public static RuntimeEventIngestionResult RateLimited(string? scope, string? key, TimeSpan retryAfter) + => new(0, 0, true, scope, key, retryAfter, false, 0, 0); + + public static RuntimeEventIngestionResult PayloadTooLarge(int payloadBytes, int payloadLimit) + => new(0, 0, false, null, null, TimeSpan.Zero, true, payloadBytes, payloadLimit); + + public static RuntimeEventIngestionResult Success(int accepted, int duplicates, int payloadBytes) + => new(accepted, duplicates, false, null, null, TimeSpan.Zero, false, payloadBytes, 0); +} diff --git a/src/StellaOps.Scanner.WebService/Services/RuntimeEventRateLimiter.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/RuntimeEventRateLimiter.cs similarity index 97% rename from src/StellaOps.Scanner.WebService/Services/RuntimeEventRateLimiter.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/RuntimeEventRateLimiter.cs index 963a8d07..b101ec55 100644 --- a/src/StellaOps.Scanner.WebService/Services/RuntimeEventRateLimiter.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/RuntimeEventRateLimiter.cs @@ -1,173 +1,173 @@ -using System.Collections.Concurrent; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.WebService.Options; -using StellaOps.Zastava.Core.Contracts; - -namespace StellaOps.Scanner.WebService.Services; - -internal sealed class RuntimeEventRateLimiter -{ - private readonly ConcurrentDictionary<string, TokenBucket> _tenantBuckets = new(StringComparer.Ordinal); - private readonly ConcurrentDictionary<string, TokenBucket> _nodeBuckets = new(StringComparer.Ordinal); - private readonly TimeProvider _timeProvider; - private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor; - - public RuntimeEventRateLimiter(IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor, TimeProvider timeProvider) - { - _optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - } - - public RateLimitDecision Evaluate(IReadOnlyList<RuntimeEventEnvelope> envelopes) - { - ArgumentNullException.ThrowIfNull(envelopes); - if (envelopes.Count == 0) - { - return RateLimitDecision.Success; - } - - var options = _optionsMonitor.CurrentValue.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions(); - var now = _timeProvider.GetUtcNow(); - - var tenantCounts = new Dictionary<string, int>(StringComparer.Ordinal); - var nodeCounts = new Dictionary<string, int>(StringComparer.Ordinal); - - foreach (var envelope in envelopes) - { - var tenant = envelope.Event.Tenant; - var node = envelope.Event.Node; - if (tenantCounts.TryGetValue(tenant, out var tenantCount)) - { - tenantCounts[tenant] = tenantCount + 1; - } - else - { - tenantCounts[tenant] = 1; - } - - var nodeKey = $"{tenant}|{node}"; - if (nodeCounts.TryGetValue(nodeKey, out var nodeCount)) - { - nodeCounts[nodeKey] = nodeCount + 1; - } - else - { - nodeCounts[nodeKey] = 1; - } - } - - var tenantDecision = TryAcquire( - _tenantBuckets, - tenantCounts, - options.PerTenantEventsPerSecond, - options.PerTenantBurst, - now, - scope: "tenant"); - - if (!tenantDecision.Allowed) - { - return tenantDecision; - } - - var nodeDecision = TryAcquire( - _nodeBuckets, - nodeCounts, - options.PerNodeEventsPerSecond, - options.PerNodeBurst, - now, - scope: "node"); - - return nodeDecision; - } - - private static RateLimitDecision TryAcquire( - ConcurrentDictionary<string, TokenBucket> buckets, - IReadOnlyDictionary<string, int> counts, - double ratePerSecond, - int burst, - DateTimeOffset now, - string scope) - { - if (counts.Count == 0) - { - return RateLimitDecision.Success; - } - - var acquired = new List<(TokenBucket bucket, double tokens)>(); - - foreach (var pair in counts) - { - var bucket = buckets.GetOrAdd( - pair.Key, - _ => new TokenBucket(burst, ratePerSecond, now)); - - lock (bucket.SyncRoot) - { - bucket.Refill(now); - if (bucket.Tokens + 1e-9 < pair.Value) - { - var deficit = pair.Value - bucket.Tokens; - var retryAfterSeconds = deficit / bucket.RefillRatePerSecond; - var retryAfter = retryAfterSeconds <= 0 - ? TimeSpan.FromSeconds(1) - : TimeSpan.FromSeconds(Math.Min(retryAfterSeconds, 3600)); - - // undo previously acquired tokens - foreach (var (acquiredBucket, tokens) in acquired) - { - lock (acquiredBucket.SyncRoot) - { - acquiredBucket.Tokens = Math.Min(acquiredBucket.Capacity, acquiredBucket.Tokens + tokens); - } - } - - return new RateLimitDecision(false, scope, pair.Key, retryAfter); - } - - bucket.Tokens -= pair.Value; - acquired.Add((bucket, pair.Value)); - } - } - - return RateLimitDecision.Success; - } - - private sealed class TokenBucket - { - public TokenBucket(double capacity, double refillRatePerSecond, DateTimeOffset now) - { - Capacity = capacity; - Tokens = capacity; - RefillRatePerSecond = refillRatePerSecond; - LastRefill = now; - } - - public double Capacity { get; } - public double Tokens { get; set; } - public double RefillRatePerSecond { get; } - public DateTimeOffset LastRefill { get; set; } - public object SyncRoot { get; } = new(); - - public void Refill(DateTimeOffset now) - { - if (now <= LastRefill) - { - return; - } - - var elapsedSeconds = (now - LastRefill).TotalSeconds; - if (elapsedSeconds <= 0) - { - return; - } - - Tokens = Math.Min(Capacity, Tokens + elapsedSeconds * RefillRatePerSecond); - LastRefill = now; - } - } -} - -internal readonly record struct RateLimitDecision(bool Allowed, string? Scope, string? Key, TimeSpan RetryAfter) -{ - public static RateLimitDecision Success { get; } = new(true, null, null, TimeSpan.Zero); -} +using System.Collections.Concurrent; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.WebService.Options; +using StellaOps.Zastava.Core.Contracts; + +namespace StellaOps.Scanner.WebService.Services; + +internal sealed class RuntimeEventRateLimiter +{ + private readonly ConcurrentDictionary<string, TokenBucket> _tenantBuckets = new(StringComparer.Ordinal); + private readonly ConcurrentDictionary<string, TokenBucket> _nodeBuckets = new(StringComparer.Ordinal); + private readonly TimeProvider _timeProvider; + private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor; + + public RuntimeEventRateLimiter(IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor, TimeProvider timeProvider) + { + _optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public RateLimitDecision Evaluate(IReadOnlyList<RuntimeEventEnvelope> envelopes) + { + ArgumentNullException.ThrowIfNull(envelopes); + if (envelopes.Count == 0) + { + return RateLimitDecision.Success; + } + + var options = _optionsMonitor.CurrentValue.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions(); + var now = _timeProvider.GetUtcNow(); + + var tenantCounts = new Dictionary<string, int>(StringComparer.Ordinal); + var nodeCounts = new Dictionary<string, int>(StringComparer.Ordinal); + + foreach (var envelope in envelopes) + { + var tenant = envelope.Event.Tenant; + var node = envelope.Event.Node; + if (tenantCounts.TryGetValue(tenant, out var tenantCount)) + { + tenantCounts[tenant] = tenantCount + 1; + } + else + { + tenantCounts[tenant] = 1; + } + + var nodeKey = $"{tenant}|{node}"; + if (nodeCounts.TryGetValue(nodeKey, out var nodeCount)) + { + nodeCounts[nodeKey] = nodeCount + 1; + } + else + { + nodeCounts[nodeKey] = 1; + } + } + + var tenantDecision = TryAcquire( + _tenantBuckets, + tenantCounts, + options.PerTenantEventsPerSecond, + options.PerTenantBurst, + now, + scope: "tenant"); + + if (!tenantDecision.Allowed) + { + return tenantDecision; + } + + var nodeDecision = TryAcquire( + _nodeBuckets, + nodeCounts, + options.PerNodeEventsPerSecond, + options.PerNodeBurst, + now, + scope: "node"); + + return nodeDecision; + } + + private static RateLimitDecision TryAcquire( + ConcurrentDictionary<string, TokenBucket> buckets, + IReadOnlyDictionary<string, int> counts, + double ratePerSecond, + int burst, + DateTimeOffset now, + string scope) + { + if (counts.Count == 0) + { + return RateLimitDecision.Success; + } + + var acquired = new List<(TokenBucket bucket, double tokens)>(); + + foreach (var pair in counts) + { + var bucket = buckets.GetOrAdd( + pair.Key, + _ => new TokenBucket(burst, ratePerSecond, now)); + + lock (bucket.SyncRoot) + { + bucket.Refill(now); + if (bucket.Tokens + 1e-9 < pair.Value) + { + var deficit = pair.Value - bucket.Tokens; + var retryAfterSeconds = deficit / bucket.RefillRatePerSecond; + var retryAfter = retryAfterSeconds <= 0 + ? TimeSpan.FromSeconds(1) + : TimeSpan.FromSeconds(Math.Min(retryAfterSeconds, 3600)); + + // undo previously acquired tokens + foreach (var (acquiredBucket, tokens) in acquired) + { + lock (acquiredBucket.SyncRoot) + { + acquiredBucket.Tokens = Math.Min(acquiredBucket.Capacity, acquiredBucket.Tokens + tokens); + } + } + + return new RateLimitDecision(false, scope, pair.Key, retryAfter); + } + + bucket.Tokens -= pair.Value; + acquired.Add((bucket, pair.Value)); + } + } + + return RateLimitDecision.Success; + } + + private sealed class TokenBucket + { + public TokenBucket(double capacity, double refillRatePerSecond, DateTimeOffset now) + { + Capacity = capacity; + Tokens = capacity; + RefillRatePerSecond = refillRatePerSecond; + LastRefill = now; + } + + public double Capacity { get; } + public double Tokens { get; set; } + public double RefillRatePerSecond { get; } + public DateTimeOffset LastRefill { get; set; } + public object SyncRoot { get; } = new(); + + public void Refill(DateTimeOffset now) + { + if (now <= LastRefill) + { + return; + } + + var elapsedSeconds = (now - LastRefill).TotalSeconds; + if (elapsedSeconds <= 0) + { + return; + } + + Tokens = Math.Min(Capacity, Tokens + elapsedSeconds * RefillRatePerSecond); + LastRefill = now; + } + } +} + +internal readonly record struct RateLimitDecision(bool Allowed, string? Scope, string? Key, TimeSpan RetryAfter) +{ + public static RateLimitDecision Success { get; } = new(true, null, null, TimeSpan.Zero); +} diff --git a/src/StellaOps.Scanner.WebService/Services/RuntimePolicyService.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/RuntimePolicyService.cs similarity index 97% rename from src/StellaOps.Scanner.WebService/Services/RuntimePolicyService.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/RuntimePolicyService.cs index f3a19c49..6166508a 100644 --- a/src/StellaOps.Scanner.WebService/Services/RuntimePolicyService.cs +++ b/src/Scanner/StellaOps.Scanner.WebService/Services/RuntimePolicyService.cs @@ -1,513 +1,513 @@ -using System.Collections.Immutable; -using System.Collections.ObjectModel; -using System.Diagnostics; -using System.Diagnostics.Metrics; -using System.Linq; -using System.Globalization; -using System.Text.Json; -using System.Text.Json.Serialization; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Policy; -using StellaOps.Scanner.Storage.Catalog; -using StellaOps.Scanner.Storage.Repositories; -using StellaOps.Scanner.WebService.Options; -using StellaOps.Zastava.Core.Contracts; -using RuntimePolicyVerdict = StellaOps.Zastava.Core.Contracts.PolicyVerdict; -using CanonicalPolicyVerdict = StellaOps.Policy.PolicyVerdict; -using CanonicalPolicyVerdictStatus = StellaOps.Policy.PolicyVerdictStatus; - -namespace StellaOps.Scanner.WebService.Services; - -internal interface IRuntimePolicyService -{ - Task<RuntimePolicyEvaluationResult> EvaluateAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken); -} - -internal sealed class RuntimePolicyService : IRuntimePolicyService -{ - private const int MaxBuildIdsPerImage = 3; - - private static readonly Meter PolicyMeter = new("StellaOps.Scanner.RuntimePolicy", "1.0.0"); - private static readonly Counter<long> PolicyEvaluations = PolicyMeter.CreateCounter<long>("scanner.runtime.policy.requests", unit: "1", description: "Total runtime policy evaluation requests processed."); - private static readonly Histogram<double> PolicyEvaluationLatencyMs = PolicyMeter.CreateHistogram<double>("scanner.runtime.policy.latency.ms", unit: "ms", description: "Latency for runtime policy evaluations."); - - private readonly LinkRepository _linkRepository; - private readonly ArtifactRepository _artifactRepository; - private readonly RuntimeEventRepository _runtimeEventRepository; - private readonly PolicySnapshotStore _policySnapshotStore; - private readonly PolicyPreviewService _policyPreviewService; - private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor; - private readonly TimeProvider _timeProvider; - private readonly IRuntimeAttestationVerifier _attestationVerifier; - private readonly ILogger<RuntimePolicyService> _logger; - - public RuntimePolicyService( - LinkRepository linkRepository, - ArtifactRepository artifactRepository, - RuntimeEventRepository runtimeEventRepository, - PolicySnapshotStore policySnapshotStore, - PolicyPreviewService policyPreviewService, - IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor, - TimeProvider timeProvider, - IRuntimeAttestationVerifier attestationVerifier, - ILogger<RuntimePolicyService> logger) - { - _linkRepository = linkRepository ?? throw new ArgumentNullException(nameof(linkRepository)); - _artifactRepository = artifactRepository ?? throw new ArgumentNullException(nameof(artifactRepository)); - _runtimeEventRepository = runtimeEventRepository ?? throw new ArgumentNullException(nameof(runtimeEventRepository)); - _policySnapshotStore = policySnapshotStore ?? throw new ArgumentNullException(nameof(policySnapshotStore)); - _policyPreviewService = policyPreviewService ?? throw new ArgumentNullException(nameof(policyPreviewService)); - _optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - _attestationVerifier = attestationVerifier ?? throw new ArgumentNullException(nameof(attestationVerifier)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<RuntimePolicyEvaluationResult> EvaluateAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - - var runtimeOptions = _optionsMonitor.CurrentValue.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions(); - var ttlSeconds = Math.Max(1, runtimeOptions.PolicyCacheTtlSeconds); - - var now = _timeProvider.GetUtcNow(); - var expiresAt = now.AddSeconds(ttlSeconds); - - var stopwatch = Stopwatch.StartNew(); - var snapshot = await _policySnapshotStore.GetLatestAsync(cancellationToken).ConfigureAwait(false); - - var policyRevision = snapshot?.RevisionId; - var policyDigest = snapshot?.Digest; - - var results = new Dictionary<string, RuntimePolicyImageDecision>(StringComparer.Ordinal); - var evaluationTags = new KeyValuePair<string, object?>[] - { - new("policy_revision", policyRevision ?? "none"), - new("namespace", request.Namespace ?? "unspecified") - }; - - var buildIdObservations = await _runtimeEventRepository - .GetRecentBuildIdsAsync(request.Images, MaxBuildIdsPerImage, cancellationToken) - .ConfigureAwait(false); - - try - { - var evaluated = new HashSet<string>(StringComparer.Ordinal); - foreach (var image in request.Images) - { - if (!evaluated.Add(image)) - { - continue; - } - - var metadata = await ResolveImageMetadataAsync(image, cancellationToken).ConfigureAwait(false); - var (findings, heuristicReasons) = BuildFindings(image, metadata, request.Namespace); - if (snapshot is null) - { - heuristicReasons.Add("policy.snapshot.missing"); - } - - ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts = ImmutableArray<CanonicalPolicyVerdict>.Empty; - ImmutableArray<PolicyIssue> issues = ImmutableArray<PolicyIssue>.Empty; - - try - { - if (!findings.IsDefaultOrEmpty && findings.Length > 0) - { - var previewRequest = new PolicyPreviewRequest( - image, - findings, - ImmutableArray<CanonicalPolicyVerdict>.Empty, - snapshot, - ProposedPolicy: null); - - var preview = await _policyPreviewService.PreviewAsync(previewRequest, cancellationToken).ConfigureAwait(false); - issues = preview.Issues; - if (!preview.Diffs.IsDefaultOrEmpty) - { - projectedVerdicts = preview.Diffs.Select(diff => diff.Projected).ToImmutableArray(); - } - } - } - catch (Exception ex) when (!cancellationToken.IsCancellationRequested) - { - _logger.LogWarning(ex, "Runtime policy preview failed for image {ImageDigest}; falling back to heuristic evaluation.", image); - } - - var normalizedImage = image.Trim().ToLowerInvariant(); - buildIdObservations.TryGetValue(normalizedImage, out var buildIdObservation); - - var decision = await BuildDecisionAsync( - image, - metadata, - heuristicReasons, - projectedVerdicts, - issues, - policyDigest, - buildIdObservation?.BuildIds, - cancellationToken).ConfigureAwait(false); - - results[image] = decision; - - _logger.LogInformation("Runtime policy evaluated image {ImageDigest} with verdict {Verdict} (Signed: {Signed}, HasSbom: {HasSbom}, Reasons: {ReasonsCount})", - image, - decision.PolicyVerdict, - decision.Signed, - decision.HasSbomReferrers, - decision.Reasons.Count); - } - } - finally - { - stopwatch.Stop(); - PolicyEvaluationLatencyMs.Record(stopwatch.Elapsed.TotalMilliseconds, evaluationTags); - } - - PolicyEvaluations.Add(results.Count, evaluationTags); - - var evaluationResult = new RuntimePolicyEvaluationResult( - ttlSeconds, - expiresAt, - policyRevision, - new ReadOnlyDictionary<string, RuntimePolicyImageDecision>(results)); - - return evaluationResult; - } - - private async Task<RuntimeImageMetadata> ResolveImageMetadataAsync(string imageDigest, CancellationToken cancellationToken) - { - var links = await _linkRepository.ListBySourceAsync(LinkSourceType.Image, imageDigest, cancellationToken).ConfigureAwait(false); - if (links.Count == 0) - { - return new RuntimeImageMetadata(imageDigest, false, false, null, MissingMetadata: true); - } - - var hasSbom = false; - var signed = false; - RuntimePolicyRekorReference? rekor = null; - - foreach (var link in links) - { - var artifact = await _artifactRepository.GetAsync(link.ArtifactId, cancellationToken).ConfigureAwait(false); - if (artifact is null) - { - continue; - } - - switch (artifact.Type) - { - case ArtifactDocumentType.ImageBom: - hasSbom = true; - break; - case ArtifactDocumentType.Attestation: - signed = true; - if (artifact.Rekor is { } rekorReference) - { - rekor = new RuntimePolicyRekorReference( - Normalize(rekorReference.Uuid), - Normalize(rekorReference.Url), - rekorReference.Index.HasValue); - } - break; - } - } - - return new RuntimeImageMetadata(imageDigest, signed, hasSbom, rekor, MissingMetadata: false); - } - - private (ImmutableArray<PolicyFinding> Findings, List<string> HeuristicReasons) BuildFindings(string imageDigest, RuntimeImageMetadata metadata, string? @namespace) - { - var findings = ImmutableArray.CreateBuilder<PolicyFinding>(); - var heuristics = new List<string>(); - - findings.Add(PolicyFinding.Create( - $"{imageDigest}#baseline", - PolicySeverity.None, - environment: @namespace, - source: "scanner.runtime")); - - if (metadata.MissingMetadata) - { - const string reason = "image.metadata.missing"; - heuristics.Add(reason); - findings.Add(PolicyFinding.Create( - $"{imageDigest}#metadata", - PolicySeverity.Critical, - environment: @namespace, - source: "scanner.runtime", - tags: ImmutableArray.Create(reason))); - } - - if (!metadata.Signed) - { - const string reason = "unsigned"; - heuristics.Add(reason); - findings.Add(PolicyFinding.Create( - $"{imageDigest}#signature", - PolicySeverity.High, - environment: @namespace, - source: "scanner.runtime", - tags: ImmutableArray.Create(reason))); - } - - if (!metadata.HasSbomReferrers) - { - const string reason = "missing SBOM"; - heuristics.Add(reason); - findings.Add(PolicyFinding.Create( - $"{imageDigest}#sbom", - PolicySeverity.High, - environment: @namespace, - source: "scanner.runtime", - tags: ImmutableArray.Create(reason))); - } - - return (findings.ToImmutable(), heuristics); - } - - private async Task<RuntimePolicyImageDecision> BuildDecisionAsync( - string imageDigest, - RuntimeImageMetadata metadata, - List<string> heuristicReasons, - ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts, - ImmutableArray<PolicyIssue> issues, - string? policyDigest, - IReadOnlyList<string>? buildIds, - CancellationToken cancellationToken) - { - var reasons = new List<string>(heuristicReasons); - - var overallVerdict = MapVerdict(projectedVerdicts, heuristicReasons); - - if (!projectedVerdicts.IsDefaultOrEmpty) - { - foreach (var verdict in projectedVerdicts) - { - if (verdict.Status == CanonicalPolicyVerdictStatus.Pass) - { - continue; - } - - if (!string.IsNullOrWhiteSpace(verdict.RuleName)) - { - reasons.Add($"policy.rule.{verdict.RuleName}"); - } - else - { - reasons.Add($"policy.status.{verdict.Status.ToString().ToLowerInvariant()}"); - } - } - } - - var confidence = ComputeConfidence(projectedVerdicts, overallVerdict); - var quieted = !projectedVerdicts.IsDefaultOrEmpty && projectedVerdicts.Any(v => v.Quiet); - var quietedBy = !projectedVerdicts.IsDefaultOrEmpty - ? projectedVerdicts.FirstOrDefault(v => !string.IsNullOrWhiteSpace(v.QuietedBy))?.QuietedBy - : null; - - var metadataPayload = BuildMetadataPayload(heuristicReasons, projectedVerdicts, issues, policyDigest); - - var rekor = metadata.Rekor; - var verified = await _attestationVerifier.VerifyAsync(imageDigest, metadata.Rekor, cancellationToken).ConfigureAwait(false); - if (rekor is not null && verified.HasValue) - { - rekor = rekor with { Verified = verified.Value }; - } - - var normalizedReasons = reasons - .Where(reason => !string.IsNullOrWhiteSpace(reason)) - .Distinct(StringComparer.Ordinal) - .ToArray(); - - return new RuntimePolicyImageDecision( - overallVerdict, - metadata.Signed, - metadata.HasSbomReferrers, - normalizedReasons, - rekor, - metadataPayload, - confidence, - quieted, - quietedBy, - buildIds); - } - - private RuntimePolicyVerdict MapVerdict(ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts, IReadOnlyList<string> heuristicReasons) - { - if (!projectedVerdicts.IsDefaultOrEmpty && projectedVerdicts.Length > 0) - { - var statuses = projectedVerdicts.Select(v => v.Status).ToArray(); - if (statuses.Any(status => status == CanonicalPolicyVerdictStatus.Blocked)) - { - return RuntimePolicyVerdict.Fail; - } - - if (statuses.Any(status => - status is CanonicalPolicyVerdictStatus.Warned - or CanonicalPolicyVerdictStatus.Deferred - or CanonicalPolicyVerdictStatus.Escalated - or CanonicalPolicyVerdictStatus.RequiresVex)) - { - return RuntimePolicyVerdict.Warn; - } - - return RuntimePolicyVerdict.Pass; - } - - if (heuristicReasons.Contains("image.metadata.missing", StringComparer.Ordinal) || - heuristicReasons.Contains("unsigned", StringComparer.Ordinal) || - heuristicReasons.Contains("missing SBOM", StringComparer.Ordinal)) - { - return RuntimePolicyVerdict.Fail; - } - - if (heuristicReasons.Contains("policy.snapshot.missing", StringComparer.Ordinal)) - { - return RuntimePolicyVerdict.Warn; - } - - return RuntimePolicyVerdict.Pass; - } - - private IDictionary<string, object?>? BuildMetadataPayload( - IReadOnlyList<string> heuristics, - ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts, - ImmutableArray<PolicyIssue> issues, - string? policyDigest) - { - var payload = new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase) - { - ["heuristics"] = heuristics, - ["evaluatedAt"] = _timeProvider.GetUtcNow().UtcDateTime - }; - - if (!string.IsNullOrWhiteSpace(policyDigest)) - { - payload["policyDigest"] = policyDigest; - } - - if (!issues.IsDefaultOrEmpty && issues.Length > 0) - { - payload["issues"] = issues.Select(issue => new - { - code = issue.Code, - severity = issue.Severity.ToString(), - message = issue.Message, - path = issue.Path - }).ToArray(); - } - - if (!projectedVerdicts.IsDefaultOrEmpty && projectedVerdicts.Length > 0) - { - payload["findings"] = projectedVerdicts.Select(verdict => new - { - id = verdict.FindingId, - status = verdict.Status.ToString().ToLowerInvariant(), - rule = verdict.RuleName, - action = verdict.RuleAction, - score = verdict.Score, - quiet = verdict.Quiet, - quietedBy = verdict.QuietedBy, - inputs = verdict.GetInputs(), - confidence = verdict.UnknownConfidence, - confidenceBand = verdict.ConfidenceBand, - sourceTrust = verdict.SourceTrust, - reachability = verdict.Reachability - }).ToArray(); - } - - return payload.Count == 0 ? null : payload; - } - - private static double ComputeConfidence(ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts, RuntimePolicyVerdict overall) - { - if (!projectedVerdicts.IsDefaultOrEmpty && projectedVerdicts.Length > 0) - { - var confidences = projectedVerdicts - .Select(v => v.UnknownConfidence) - .Where(value => value.HasValue) - .Select(value => value!.Value) - .ToArray(); - - if (confidences.Length > 0) - { - return Math.Clamp(confidences.Average(), 0.0, 1.0); - } - } - - return overall switch - { - RuntimePolicyVerdict.Pass => 0.95, - RuntimePolicyVerdict.Warn => 0.5, - RuntimePolicyVerdict.Fail => 0.1, - _ => 0.25 - }; - } - - private static string? Normalize(string? value) - => string.IsNullOrWhiteSpace(value) ? null : value; -} - -internal interface IRuntimeAttestationVerifier -{ - ValueTask<bool?> VerifyAsync(string imageDigest, RuntimePolicyRekorReference? rekor, CancellationToken cancellationToken); -} - -internal sealed class RuntimeAttestationVerifier : IRuntimeAttestationVerifier -{ - private readonly ILogger<RuntimeAttestationVerifier> _logger; - - public RuntimeAttestationVerifier(ILogger<RuntimeAttestationVerifier> logger) - { - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public ValueTask<bool?> VerifyAsync(string imageDigest, RuntimePolicyRekorReference? rekor, CancellationToken cancellationToken) - { - if (rekor is null) - { - return ValueTask.FromResult<bool?>(null); - } - - if (rekor.Verified.HasValue) - { - return ValueTask.FromResult(rekor.Verified); - } - - _logger.LogDebug("No attestation verification metadata available for image {ImageDigest}.", imageDigest); - return ValueTask.FromResult<bool?>(null); - } -} - -internal sealed record RuntimePolicyEvaluationRequest( - string? Namespace, - IReadOnlyDictionary<string, string> Labels, - IReadOnlyList<string> Images); - -internal sealed record RuntimePolicyEvaluationResult( - int TtlSeconds, - DateTimeOffset ExpiresAtUtc, - string? PolicyRevision, - IReadOnlyDictionary<string, RuntimePolicyImageDecision> Results); - -internal sealed record RuntimePolicyImageDecision( - RuntimePolicyVerdict PolicyVerdict, - bool Signed, - bool HasSbomReferrers, - IReadOnlyList<string> Reasons, - RuntimePolicyRekorReference? Rekor, - IDictionary<string, object?>? Metadata, - double Confidence, - bool Quieted, - string? QuietedBy, - IReadOnlyList<string>? BuildIds); - -internal sealed record RuntimePolicyRekorReference(string? Uuid, string? Url, bool? Verified); - -internal sealed record RuntimeImageMetadata( - string ImageDigest, - bool Signed, - bool HasSbomReferrers, - RuntimePolicyRekorReference? Rekor, - bool MissingMetadata); +using System.Collections.Immutable; +using System.Collections.ObjectModel; +using System.Diagnostics; +using System.Diagnostics.Metrics; +using System.Linq; +using System.Globalization; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Policy; +using StellaOps.Scanner.Storage.Catalog; +using StellaOps.Scanner.Storage.Repositories; +using StellaOps.Scanner.WebService.Options; +using StellaOps.Zastava.Core.Contracts; +using RuntimePolicyVerdict = StellaOps.Zastava.Core.Contracts.PolicyVerdict; +using CanonicalPolicyVerdict = StellaOps.Policy.PolicyVerdict; +using CanonicalPolicyVerdictStatus = StellaOps.Policy.PolicyVerdictStatus; + +namespace StellaOps.Scanner.WebService.Services; + +internal interface IRuntimePolicyService +{ + Task<RuntimePolicyEvaluationResult> EvaluateAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken); +} + +internal sealed class RuntimePolicyService : IRuntimePolicyService +{ + private const int MaxBuildIdsPerImage = 3; + + private static readonly Meter PolicyMeter = new("StellaOps.Scanner.RuntimePolicy", "1.0.0"); + private static readonly Counter<long> PolicyEvaluations = PolicyMeter.CreateCounter<long>("scanner.runtime.policy.requests", unit: "1", description: "Total runtime policy evaluation requests processed."); + private static readonly Histogram<double> PolicyEvaluationLatencyMs = PolicyMeter.CreateHistogram<double>("scanner.runtime.policy.latency.ms", unit: "ms", description: "Latency for runtime policy evaluations."); + + private readonly LinkRepository _linkRepository; + private readonly ArtifactRepository _artifactRepository; + private readonly RuntimeEventRepository _runtimeEventRepository; + private readonly PolicySnapshotStore _policySnapshotStore; + private readonly PolicyPreviewService _policyPreviewService; + private readonly IOptionsMonitor<ScannerWebServiceOptions> _optionsMonitor; + private readonly TimeProvider _timeProvider; + private readonly IRuntimeAttestationVerifier _attestationVerifier; + private readonly ILogger<RuntimePolicyService> _logger; + + public RuntimePolicyService( + LinkRepository linkRepository, + ArtifactRepository artifactRepository, + RuntimeEventRepository runtimeEventRepository, + PolicySnapshotStore policySnapshotStore, + PolicyPreviewService policyPreviewService, + IOptionsMonitor<ScannerWebServiceOptions> optionsMonitor, + TimeProvider timeProvider, + IRuntimeAttestationVerifier attestationVerifier, + ILogger<RuntimePolicyService> logger) + { + _linkRepository = linkRepository ?? throw new ArgumentNullException(nameof(linkRepository)); + _artifactRepository = artifactRepository ?? throw new ArgumentNullException(nameof(artifactRepository)); + _runtimeEventRepository = runtimeEventRepository ?? throw new ArgumentNullException(nameof(runtimeEventRepository)); + _policySnapshotStore = policySnapshotStore ?? throw new ArgumentNullException(nameof(policySnapshotStore)); + _policyPreviewService = policyPreviewService ?? throw new ArgumentNullException(nameof(policyPreviewService)); + _optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _attestationVerifier = attestationVerifier ?? throw new ArgumentNullException(nameof(attestationVerifier)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<RuntimePolicyEvaluationResult> EvaluateAsync(RuntimePolicyEvaluationRequest request, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + var runtimeOptions = _optionsMonitor.CurrentValue.Runtime ?? new ScannerWebServiceOptions.RuntimeOptions(); + var ttlSeconds = Math.Max(1, runtimeOptions.PolicyCacheTtlSeconds); + + var now = _timeProvider.GetUtcNow(); + var expiresAt = now.AddSeconds(ttlSeconds); + + var stopwatch = Stopwatch.StartNew(); + var snapshot = await _policySnapshotStore.GetLatestAsync(cancellationToken).ConfigureAwait(false); + + var policyRevision = snapshot?.RevisionId; + var policyDigest = snapshot?.Digest; + + var results = new Dictionary<string, RuntimePolicyImageDecision>(StringComparer.Ordinal); + var evaluationTags = new KeyValuePair<string, object?>[] + { + new("policy_revision", policyRevision ?? "none"), + new("namespace", request.Namespace ?? "unspecified") + }; + + var buildIdObservations = await _runtimeEventRepository + .GetRecentBuildIdsAsync(request.Images, MaxBuildIdsPerImage, cancellationToken) + .ConfigureAwait(false); + + try + { + var evaluated = new HashSet<string>(StringComparer.Ordinal); + foreach (var image in request.Images) + { + if (!evaluated.Add(image)) + { + continue; + } + + var metadata = await ResolveImageMetadataAsync(image, cancellationToken).ConfigureAwait(false); + var (findings, heuristicReasons) = BuildFindings(image, metadata, request.Namespace); + if (snapshot is null) + { + heuristicReasons.Add("policy.snapshot.missing"); + } + + ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts = ImmutableArray<CanonicalPolicyVerdict>.Empty; + ImmutableArray<PolicyIssue> issues = ImmutableArray<PolicyIssue>.Empty; + + try + { + if (!findings.IsDefaultOrEmpty && findings.Length > 0) + { + var previewRequest = new PolicyPreviewRequest( + image, + findings, + ImmutableArray<CanonicalPolicyVerdict>.Empty, + snapshot, + ProposedPolicy: null); + + var preview = await _policyPreviewService.PreviewAsync(previewRequest, cancellationToken).ConfigureAwait(false); + issues = preview.Issues; + if (!preview.Diffs.IsDefaultOrEmpty) + { + projectedVerdicts = preview.Diffs.Select(diff => diff.Projected).ToImmutableArray(); + } + } + } + catch (Exception ex) when (!cancellationToken.IsCancellationRequested) + { + _logger.LogWarning(ex, "Runtime policy preview failed for image {ImageDigest}; falling back to heuristic evaluation.", image); + } + + var normalizedImage = image.Trim().ToLowerInvariant(); + buildIdObservations.TryGetValue(normalizedImage, out var buildIdObservation); + + var decision = await BuildDecisionAsync( + image, + metadata, + heuristicReasons, + projectedVerdicts, + issues, + policyDigest, + buildIdObservation?.BuildIds, + cancellationToken).ConfigureAwait(false); + + results[image] = decision; + + _logger.LogInformation("Runtime policy evaluated image {ImageDigest} with verdict {Verdict} (Signed: {Signed}, HasSbom: {HasSbom}, Reasons: {ReasonsCount})", + image, + decision.PolicyVerdict, + decision.Signed, + decision.HasSbomReferrers, + decision.Reasons.Count); + } + } + finally + { + stopwatch.Stop(); + PolicyEvaluationLatencyMs.Record(stopwatch.Elapsed.TotalMilliseconds, evaluationTags); + } + + PolicyEvaluations.Add(results.Count, evaluationTags); + + var evaluationResult = new RuntimePolicyEvaluationResult( + ttlSeconds, + expiresAt, + policyRevision, + new ReadOnlyDictionary<string, RuntimePolicyImageDecision>(results)); + + return evaluationResult; + } + + private async Task<RuntimeImageMetadata> ResolveImageMetadataAsync(string imageDigest, CancellationToken cancellationToken) + { + var links = await _linkRepository.ListBySourceAsync(LinkSourceType.Image, imageDigest, cancellationToken).ConfigureAwait(false); + if (links.Count == 0) + { + return new RuntimeImageMetadata(imageDigest, false, false, null, MissingMetadata: true); + } + + var hasSbom = false; + var signed = false; + RuntimePolicyRekorReference? rekor = null; + + foreach (var link in links) + { + var artifact = await _artifactRepository.GetAsync(link.ArtifactId, cancellationToken).ConfigureAwait(false); + if (artifact is null) + { + continue; + } + + switch (artifact.Type) + { + case ArtifactDocumentType.ImageBom: + hasSbom = true; + break; + case ArtifactDocumentType.Attestation: + signed = true; + if (artifact.Rekor is { } rekorReference) + { + rekor = new RuntimePolicyRekorReference( + Normalize(rekorReference.Uuid), + Normalize(rekorReference.Url), + rekorReference.Index.HasValue); + } + break; + } + } + + return new RuntimeImageMetadata(imageDigest, signed, hasSbom, rekor, MissingMetadata: false); + } + + private (ImmutableArray<PolicyFinding> Findings, List<string> HeuristicReasons) BuildFindings(string imageDigest, RuntimeImageMetadata metadata, string? @namespace) + { + var findings = ImmutableArray.CreateBuilder<PolicyFinding>(); + var heuristics = new List<string>(); + + findings.Add(PolicyFinding.Create( + $"{imageDigest}#baseline", + PolicySeverity.None, + environment: @namespace, + source: "scanner.runtime")); + + if (metadata.MissingMetadata) + { + const string reason = "image.metadata.missing"; + heuristics.Add(reason); + findings.Add(PolicyFinding.Create( + $"{imageDigest}#metadata", + PolicySeverity.Critical, + environment: @namespace, + source: "scanner.runtime", + tags: ImmutableArray.Create(reason))); + } + + if (!metadata.Signed) + { + const string reason = "unsigned"; + heuristics.Add(reason); + findings.Add(PolicyFinding.Create( + $"{imageDigest}#signature", + PolicySeverity.High, + environment: @namespace, + source: "scanner.runtime", + tags: ImmutableArray.Create(reason))); + } + + if (!metadata.HasSbomReferrers) + { + const string reason = "missing SBOM"; + heuristics.Add(reason); + findings.Add(PolicyFinding.Create( + $"{imageDigest}#sbom", + PolicySeverity.High, + environment: @namespace, + source: "scanner.runtime", + tags: ImmutableArray.Create(reason))); + } + + return (findings.ToImmutable(), heuristics); + } + + private async Task<RuntimePolicyImageDecision> BuildDecisionAsync( + string imageDigest, + RuntimeImageMetadata metadata, + List<string> heuristicReasons, + ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts, + ImmutableArray<PolicyIssue> issues, + string? policyDigest, + IReadOnlyList<string>? buildIds, + CancellationToken cancellationToken) + { + var reasons = new List<string>(heuristicReasons); + + var overallVerdict = MapVerdict(projectedVerdicts, heuristicReasons); + + if (!projectedVerdicts.IsDefaultOrEmpty) + { + foreach (var verdict in projectedVerdicts) + { + if (verdict.Status == CanonicalPolicyVerdictStatus.Pass) + { + continue; + } + + if (!string.IsNullOrWhiteSpace(verdict.RuleName)) + { + reasons.Add($"policy.rule.{verdict.RuleName}"); + } + else + { + reasons.Add($"policy.status.{verdict.Status.ToString().ToLowerInvariant()}"); + } + } + } + + var confidence = ComputeConfidence(projectedVerdicts, overallVerdict); + var quieted = !projectedVerdicts.IsDefaultOrEmpty && projectedVerdicts.Any(v => v.Quiet); + var quietedBy = !projectedVerdicts.IsDefaultOrEmpty + ? projectedVerdicts.FirstOrDefault(v => !string.IsNullOrWhiteSpace(v.QuietedBy))?.QuietedBy + : null; + + var metadataPayload = BuildMetadataPayload(heuristicReasons, projectedVerdicts, issues, policyDigest); + + var rekor = metadata.Rekor; + var verified = await _attestationVerifier.VerifyAsync(imageDigest, metadata.Rekor, cancellationToken).ConfigureAwait(false); + if (rekor is not null && verified.HasValue) + { + rekor = rekor with { Verified = verified.Value }; + } + + var normalizedReasons = reasons + .Where(reason => !string.IsNullOrWhiteSpace(reason)) + .Distinct(StringComparer.Ordinal) + .ToArray(); + + return new RuntimePolicyImageDecision( + overallVerdict, + metadata.Signed, + metadata.HasSbomReferrers, + normalizedReasons, + rekor, + metadataPayload, + confidence, + quieted, + quietedBy, + buildIds); + } + + private RuntimePolicyVerdict MapVerdict(ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts, IReadOnlyList<string> heuristicReasons) + { + if (!projectedVerdicts.IsDefaultOrEmpty && projectedVerdicts.Length > 0) + { + var statuses = projectedVerdicts.Select(v => v.Status).ToArray(); + if (statuses.Any(status => status == CanonicalPolicyVerdictStatus.Blocked)) + { + return RuntimePolicyVerdict.Fail; + } + + if (statuses.Any(status => + status is CanonicalPolicyVerdictStatus.Warned + or CanonicalPolicyVerdictStatus.Deferred + or CanonicalPolicyVerdictStatus.Escalated + or CanonicalPolicyVerdictStatus.RequiresVex)) + { + return RuntimePolicyVerdict.Warn; + } + + return RuntimePolicyVerdict.Pass; + } + + if (heuristicReasons.Contains("image.metadata.missing", StringComparer.Ordinal) || + heuristicReasons.Contains("unsigned", StringComparer.Ordinal) || + heuristicReasons.Contains("missing SBOM", StringComparer.Ordinal)) + { + return RuntimePolicyVerdict.Fail; + } + + if (heuristicReasons.Contains("policy.snapshot.missing", StringComparer.Ordinal)) + { + return RuntimePolicyVerdict.Warn; + } + + return RuntimePolicyVerdict.Pass; + } + + private IDictionary<string, object?>? BuildMetadataPayload( + IReadOnlyList<string> heuristics, + ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts, + ImmutableArray<PolicyIssue> issues, + string? policyDigest) + { + var payload = new Dictionary<string, object?>(StringComparer.OrdinalIgnoreCase) + { + ["heuristics"] = heuristics, + ["evaluatedAt"] = _timeProvider.GetUtcNow().UtcDateTime + }; + + if (!string.IsNullOrWhiteSpace(policyDigest)) + { + payload["policyDigest"] = policyDigest; + } + + if (!issues.IsDefaultOrEmpty && issues.Length > 0) + { + payload["issues"] = issues.Select(issue => new + { + code = issue.Code, + severity = issue.Severity.ToString(), + message = issue.Message, + path = issue.Path + }).ToArray(); + } + + if (!projectedVerdicts.IsDefaultOrEmpty && projectedVerdicts.Length > 0) + { + payload["findings"] = projectedVerdicts.Select(verdict => new + { + id = verdict.FindingId, + status = verdict.Status.ToString().ToLowerInvariant(), + rule = verdict.RuleName, + action = verdict.RuleAction, + score = verdict.Score, + quiet = verdict.Quiet, + quietedBy = verdict.QuietedBy, + inputs = verdict.GetInputs(), + confidence = verdict.UnknownConfidence, + confidenceBand = verdict.ConfidenceBand, + sourceTrust = verdict.SourceTrust, + reachability = verdict.Reachability + }).ToArray(); + } + + return payload.Count == 0 ? null : payload; + } + + private static double ComputeConfidence(ImmutableArray<CanonicalPolicyVerdict> projectedVerdicts, RuntimePolicyVerdict overall) + { + if (!projectedVerdicts.IsDefaultOrEmpty && projectedVerdicts.Length > 0) + { + var confidences = projectedVerdicts + .Select(v => v.UnknownConfidence) + .Where(value => value.HasValue) + .Select(value => value!.Value) + .ToArray(); + + if (confidences.Length > 0) + { + return Math.Clamp(confidences.Average(), 0.0, 1.0); + } + } + + return overall switch + { + RuntimePolicyVerdict.Pass => 0.95, + RuntimePolicyVerdict.Warn => 0.5, + RuntimePolicyVerdict.Fail => 0.1, + _ => 0.25 + }; + } + + private static string? Normalize(string? value) + => string.IsNullOrWhiteSpace(value) ? null : value; +} + +internal interface IRuntimeAttestationVerifier +{ + ValueTask<bool?> VerifyAsync(string imageDigest, RuntimePolicyRekorReference? rekor, CancellationToken cancellationToken); +} + +internal sealed class RuntimeAttestationVerifier : IRuntimeAttestationVerifier +{ + private readonly ILogger<RuntimeAttestationVerifier> _logger; + + public RuntimeAttestationVerifier(ILogger<RuntimeAttestationVerifier> logger) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public ValueTask<bool?> VerifyAsync(string imageDigest, RuntimePolicyRekorReference? rekor, CancellationToken cancellationToken) + { + if (rekor is null) + { + return ValueTask.FromResult<bool?>(null); + } + + if (rekor.Verified.HasValue) + { + return ValueTask.FromResult(rekor.Verified); + } + + _logger.LogDebug("No attestation verification metadata available for image {ImageDigest}.", imageDigest); + return ValueTask.FromResult<bool?>(null); + } +} + +internal sealed record RuntimePolicyEvaluationRequest( + string? Namespace, + IReadOnlyDictionary<string, string> Labels, + IReadOnlyList<string> Images); + +internal sealed record RuntimePolicyEvaluationResult( + int TtlSeconds, + DateTimeOffset ExpiresAtUtc, + string? PolicyRevision, + IReadOnlyDictionary<string, RuntimePolicyImageDecision> Results); + +internal sealed record RuntimePolicyImageDecision( + RuntimePolicyVerdict PolicyVerdict, + bool Signed, + bool HasSbomReferrers, + IReadOnlyList<string> Reasons, + RuntimePolicyRekorReference? Rekor, + IDictionary<string, object?>? Metadata, + double Confidence, + bool Quieted, + string? QuietedBy, + IReadOnlyList<string>? BuildIds); + +internal sealed record RuntimePolicyRekorReference(string? Uuid, string? Url, bool? Verified); + +internal sealed record RuntimeImageMetadata( + string ImageDigest, + bool Signed, + bool HasSbomReferrers, + RuntimePolicyRekorReference? Rekor, + bool MissingMetadata); diff --git a/src/StellaOps.Scanner.WebService/Services/ScanProgressStream.cs b/src/Scanner/StellaOps.Scanner.WebService/Services/ScanProgressStream.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Services/ScanProgressStream.cs rename to src/Scanner/StellaOps.Scanner.WebService/Services/ScanProgressStream.cs diff --git a/src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj b/src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj new file mode 100644 index 00000000..a1406d0b --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj @@ -0,0 +1,34 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + <RootNamespace>StellaOps.Scanner.WebService</RootNamespace> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Serilog.AspNetCore" Version="8.0.1" /> + <PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" /> + <PackageReference Include="YamlDotNet" Version="13.7.1" /> + <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" /> + <ProjectReference Include="../../Policy/__Libraries/StellaOps.Policy/StellaOps.Policy.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj" /> + <ProjectReference Include="../../Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj" /> + <ProjectReference Include="../../Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.WebService/TASKS.md b/src/Scanner/StellaOps.Scanner.WebService/TASKS.md similarity index 97% rename from src/StellaOps.Scanner.WebService/TASKS.md rename to src/Scanner/StellaOps.Scanner.WebService/TASKS.md index 3a5c634e..55afa383 100644 --- a/src/StellaOps.Scanner.WebService/TASKS.md +++ b/src/Scanner/StellaOps.Scanner.WebService/TASKS.md @@ -1,15 +1,15 @@ -# Scanner WebService Task Board - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-WEB-09-101 | DONE (2025-10-18) | Scanner WebService Guild | SCANNER-CORE-09-501 | Stand up minimal API host with Authority OpTok + DPoP enforcement, health/ready endpoints, and restart-time plug-in loader per architecture §1, §4. | Host boots with configuration validation, `/healthz` and `/readyz` return 200, Authority middleware enforced in integration tests. | -| SCANNER-WEB-09-102 | DONE (2025-10-18) | Scanner WebService Guild | SCANNER-WEB-09-101, SCANNER-QUEUE-09-401 | Implement `/api/v1/scans` submission/status endpoints with deterministic IDs, validation, and cancellation tokens. | Contract documented, e2e test posts scan request and retrieves status, cancellation token honoured. | -| SCANNER-WEB-09-103 | DONE (2025-10-19) | Scanner WebService Guild | SCANNER-WEB-09-102, SCANNER-CORE-09-502 | Emit scan progress via SSE/JSONL with correlation IDs and deterministic timestamps; document API reference. | Streaming endpoint verified in tests, timestamps formatted ISO-8601 UTC, docs updated in `docs/09_API_CLI_REFERENCE.md`. | -| SCANNER-WEB-09-104 | DONE (2025-10-19) | Scanner WebService Guild | SCANNER-STORAGE-09-301, SCANNER-QUEUE-09-401 | Bind configuration for Mongo, MinIO, queue, feature flags; add startup diagnostics and fail-fast policy for missing deps. | Misconfiguration fails fast with actionable errors, configuration bound tests pass, diagnostics logged with correlation IDs. | -| SCANNER-POLICY-09-105 | DONE (2025-10-19) | Scanner WebService Guild | POLICY-CORE-09-001 | Integrate policy schema loader + diagnostics + OpenAPI (YAML ignore rules, VEX include/exclude, vendor precedence). | Policy endpoints documented; validation surfaces actionable errors; OpenAPI schema published. | -| SCANNER-POLICY-09-106 | DONE (2025-10-19) | Scanner WebService Guild | POLICY-CORE-09-002, SCANNER-POLICY-09-105 | `/reports` verdict assembly (Feedser/Vexer/Policy merge) + signed response envelope. | Aggregated report includes policy metadata; integration test verifies signed response; docs updated. | -| SCANNER-POLICY-09-107 | DONE (2025-10-19) | Scanner WebService Guild | POLICY-CORE-09-005, SCANNER-POLICY-09-106 | Surface score inputs, config version, and `quietedBy` provenance in `/reports` response and signed payload; document schema changes. | `/reports` JSON + DSSE contain score, reachability, sourceTrust, confidenceBand, quiet provenance; contract tests updated; docs refreshed. | -| SCANNER-WEB-10-201 | DONE (2025-10-19) | Scanner WebService Guild | SCANNER-CACHE-10-101 | Register scanner cache services and maintenance loop within WebService host. | `AddScannerCache` wired for configuration binding; maintenance service skips when disabled; project references updated. | +# Scanner WebService Task Board + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-WEB-09-101 | DONE (2025-10-18) | Scanner WebService Guild | SCANNER-CORE-09-501 | Stand up minimal API host with Authority OpTok + DPoP enforcement, health/ready endpoints, and restart-time plug-in loader per architecture §1, §4. | Host boots with configuration validation, `/healthz` and `/readyz` return 200, Authority middleware enforced in integration tests. | +| SCANNER-WEB-09-102 | DONE (2025-10-18) | Scanner WebService Guild | SCANNER-WEB-09-101, SCANNER-QUEUE-09-401 | Implement `/api/v1/scans` submission/status endpoints with deterministic IDs, validation, and cancellation tokens. | Contract documented, e2e test posts scan request and retrieves status, cancellation token honoured. | +| SCANNER-WEB-09-103 | DONE (2025-10-19) | Scanner WebService Guild | SCANNER-WEB-09-102, SCANNER-CORE-09-502 | Emit scan progress via SSE/JSONL with correlation IDs and deterministic timestamps; document API reference. | Streaming endpoint verified in tests, timestamps formatted ISO-8601 UTC, docs updated in `docs/09_API_CLI_REFERENCE.md`. | +| SCANNER-WEB-09-104 | DONE (2025-10-19) | Scanner WebService Guild | SCANNER-STORAGE-09-301, SCANNER-QUEUE-09-401 | Bind configuration for Mongo, MinIO, queue, feature flags; add startup diagnostics and fail-fast policy for missing deps. | Misconfiguration fails fast with actionable errors, configuration bound tests pass, diagnostics logged with correlation IDs. | +| SCANNER-POLICY-09-105 | DONE (2025-10-19) | Scanner WebService Guild | POLICY-CORE-09-001 | Integrate policy schema loader + diagnostics + OpenAPI (YAML ignore rules, VEX include/exclude, vendor precedence). | Policy endpoints documented; validation surfaces actionable errors; OpenAPI schema published. | +| SCANNER-POLICY-09-106 | DONE (2025-10-19) | Scanner WebService Guild | POLICY-CORE-09-002, SCANNER-POLICY-09-105 | `/reports` verdict assembly (Feedser/Vexer/Policy merge) + signed response envelope. | Aggregated report includes policy metadata; integration test verifies signed response; docs updated. | +| SCANNER-POLICY-09-107 | DONE (2025-10-19) | Scanner WebService Guild | POLICY-CORE-09-005, SCANNER-POLICY-09-106 | Surface score inputs, config version, and `quietedBy` provenance in `/reports` response and signed payload; document schema changes. | `/reports` JSON + DSSE contain score, reachability, sourceTrust, confidenceBand, quiet provenance; contract tests updated; docs refreshed. | +| SCANNER-WEB-10-201 | DONE (2025-10-19) | Scanner WebService Guild | SCANNER-CACHE-10-101 | Register scanner cache services and maintenance loop within WebService host. | `AddScannerCache` wired for configuration binding; maintenance service skips when disabled; project references updated. | | SCANNER-RUNTIME-12-301 | DONE (2025-10-20) | Scanner WebService Guild | ZASTAVA-CORE-12-201 | Implement `/runtime/events` ingestion endpoint with validation, batching, and storage hooks per Zastava contract. | Observer fixtures POST events, data persisted and acked; invalid payloads rejected with deterministic errors. | | SCANNER-RUNTIME-12-302 | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-301, ZASTAVA-CORE-12-201 | Implement `/policy/runtime` endpoint joining SBOM baseline + policy verdict, returning admission guidance. Coordinate with CLI (`CLI-RUNTIME-13-008`) before GA to lock response field names/metadata. | Webhook integration test passes; responses include verdict, TTL, reasons; metrics/logging added; CLI contract review signed off. | | SCANNER-RUNTIME-12-303 | DONE (2025-10-24) | Scanner WebService Guild | SCANNER-RUNTIME-12-302 | Replace `/policy/runtime` heuristic with canonical policy evaluation (Feedser/Vexer inputs, PolicyPreviewService) so results align with `/reports`. | Runtime policy endpoint now pipes findings through `PolicyPreviewService`, emits canonical verdicts/confidence/quiet metadata, and updated tests cover pass/warn/fail paths + CLI contract fixtures. | @@ -35,7 +35,7 @@ ## Notes - 2025-10-19: Sprint 9 streaming + policy endpoints (SCANNER-WEB-09-103, SCANNER-POLICY-09-105/106/107) landed with SSE/JSONL, OpenAPI, signed report coverage documented in `docs/09_API_CLI_REFERENCE.md`. -- 2025-10-20: Re-ran `dotnet test src/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj --filter FullyQualifiedName~ReportsEndpointsTests` to confirm DSSE/report regressions stay green after backlog sync. +- 2025-10-20: Re-ran `dotnet test src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj --filter FullyQualifiedName~ReportsEndpointsTests` to confirm DSSE/report regressions stay green after backlog sync. - 2025-10-20: SCANNER-RUNTIME-12-301 underway – `/runtime/events` ingest hitting Mongo with TTL + token-bucket rate limiting; integration tests (`RuntimeEndpointsTests`) green and docs updated with batch contract. - 2025-10-20: Follow-ups SCANNER-RUNTIME-12-303/304/305 track canonical verdict integration, attestation verification, and cross-guild fixture validation for runtime APIs. - 2025-10-21: Hardened progress streaming determinism by sorting `data` payload keys within `ScanProgressStream`; added regression `ProgressStreamDataKeysAreSortedDeterministically` ensuring JSONL ordering. diff --git a/src/StellaOps.Scanner.WebService/Utilities/ScanIdGenerator.cs b/src/Scanner/StellaOps.Scanner.WebService/Utilities/ScanIdGenerator.cs similarity index 100% rename from src/StellaOps.Scanner.WebService/Utilities/ScanIdGenerator.cs rename to src/Scanner/StellaOps.Scanner.WebService/Utilities/ScanIdGenerator.cs diff --git a/src/StellaOps.Scanner.Worker/AGENTS.md b/src/Scanner/StellaOps.Scanner.Worker/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Worker/AGENTS.md rename to src/Scanner/StellaOps.Scanner.Worker/AGENTS.md diff --git a/src/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerInstrumentation.cs b/src/Scanner/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerInstrumentation.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerInstrumentation.cs rename to src/Scanner/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerInstrumentation.cs diff --git a/src/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerMetrics.cs b/src/Scanner/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerMetrics.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerMetrics.cs rename to src/Scanner/StellaOps.Scanner.Worker/Diagnostics/ScannerWorkerMetrics.cs diff --git a/src/StellaOps.Scanner.Worker/Diagnostics/TelemetryExtensions.cs b/src/Scanner/StellaOps.Scanner.Worker/Diagnostics/TelemetryExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Diagnostics/TelemetryExtensions.cs rename to src/Scanner/StellaOps.Scanner.Worker/Diagnostics/TelemetryExtensions.cs diff --git a/src/StellaOps.Scanner.Worker/Hosting/ScannerWorkerHostedService.cs b/src/Scanner/StellaOps.Scanner.Worker/Hosting/ScannerWorkerHostedService.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Hosting/ScannerWorkerHostedService.cs rename to src/Scanner/StellaOps.Scanner.Worker/Hosting/ScannerWorkerHostedService.cs diff --git a/src/StellaOps.Scanner.Worker/Options/ScannerWorkerOptions.cs b/src/Scanner/StellaOps.Scanner.Worker/Options/ScannerWorkerOptions.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Options/ScannerWorkerOptions.cs rename to src/Scanner/StellaOps.Scanner.Worker/Options/ScannerWorkerOptions.cs diff --git a/src/StellaOps.Scanner.Worker/Options/ScannerWorkerOptionsValidator.cs b/src/Scanner/StellaOps.Scanner.Worker/Options/ScannerWorkerOptionsValidator.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Options/ScannerWorkerOptionsValidator.cs rename to src/Scanner/StellaOps.Scanner.Worker/Options/ScannerWorkerOptionsValidator.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/AnalyzerStageExecutor.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/AnalyzerStageExecutor.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/AnalyzerStageExecutor.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/AnalyzerStageExecutor.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/CompositeScanAnalyzerDispatcher.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/CompositeScanAnalyzerDispatcher.cs similarity index 97% rename from src/StellaOps.Scanner.Worker/Processing/CompositeScanAnalyzerDispatcher.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/CompositeScanAnalyzerDispatcher.cs index 08fa05ea..feec02a0 100644 --- a/src/StellaOps.Scanner.Worker/Processing/CompositeScanAnalyzerDispatcher.cs +++ b/src/Scanner/StellaOps.Scanner.Worker/Processing/CompositeScanAnalyzerDispatcher.cs @@ -1,281 +1,281 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Collections.ObjectModel; -using System.IO; -using System.Linq; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.Analyzers.Lang; -using StellaOps.Scanner.Analyzers.Lang.Plugin; -using StellaOps.Scanner.Analyzers.OS; -using StellaOps.Scanner.Analyzers.OS.Abstractions; -using StellaOps.Scanner.Analyzers.OS.Mapping; -using StellaOps.Scanner.Analyzers.OS.Plugin; -using StellaOps.Scanner.Core.Contracts; -using StellaOps.Scanner.Worker.Options; - -namespace StellaOps.Scanner.Worker.Processing; - -internal sealed class CompositeScanAnalyzerDispatcher : IScanAnalyzerDispatcher -{ - private readonly IServiceScopeFactory _scopeFactory; - private readonly IOSAnalyzerPluginCatalog _osCatalog; - private readonly ILanguageAnalyzerPluginCatalog _languageCatalog; - private readonly ScannerWorkerOptions _options; - private readonly ILogger<CompositeScanAnalyzerDispatcher> _logger; - private IReadOnlyList<string> _osPluginDirectories = Array.Empty<string>(); - private IReadOnlyList<string> _languagePluginDirectories = Array.Empty<string>(); - - public CompositeScanAnalyzerDispatcher( - IServiceScopeFactory scopeFactory, - IOSAnalyzerPluginCatalog osCatalog, - ILanguageAnalyzerPluginCatalog languageCatalog, - IOptions<ScannerWorkerOptions> options, - ILogger<CompositeScanAnalyzerDispatcher> logger) - { - _scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory)); - _osCatalog = osCatalog ?? throw new ArgumentNullException(nameof(osCatalog)); - _languageCatalog = languageCatalog ?? throw new ArgumentNullException(nameof(languageCatalog)); - _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - - LoadPlugins(); - } - - public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - - using var scope = _scopeFactory.CreateScope(); - var services = scope.ServiceProvider; - - var osAnalyzers = _osCatalog.CreateAnalyzers(services); - var languageAnalyzers = _languageCatalog.CreateAnalyzers(services); - - if (osAnalyzers.Count == 0 && languageAnalyzers.Count == 0) - { - _logger.LogWarning("No analyzer plug-ins available; skipping analyzer stage for job {JobId}.", context.JobId); - return; - } - - var metadata = new Dictionary<string, string>(context.Lease.Metadata, StringComparer.Ordinal); - var rootfsPath = ResolvePath(metadata, _options.Analyzers.RootFilesystemMetadataKey); - var workspacePath = ResolvePath(metadata, _options.Analyzers.WorkspaceMetadataKey) ?? rootfsPath; - - if (osAnalyzers.Count > 0) - { - await ExecuteOsAnalyzersAsync(context, osAnalyzers, services, rootfsPath, workspacePath, cancellationToken) - .ConfigureAwait(false); - } - - if (languageAnalyzers.Count > 0) - { - await ExecuteLanguageAnalyzersAsync(context, languageAnalyzers, services, workspacePath, cancellationToken) - .ConfigureAwait(false); - } - } - - private async Task ExecuteOsAnalyzersAsync( - ScanJobContext context, - IReadOnlyList<IOSPackageAnalyzer> analyzers, - IServiceProvider services, - string? rootfsPath, - string? workspacePath, - CancellationToken cancellationToken) - { - if (rootfsPath is null) - { - _logger.LogWarning( - "Metadata key '{MetadataKey}' missing for job {JobId}; unable to locate root filesystem. OS analyzers skipped.", - _options.Analyzers.RootFilesystemMetadataKey, - context.JobId); - return; - } - - var loggerFactory = services.GetRequiredService<ILoggerFactory>(); - var results = new List<OSPackageAnalyzerResult>(analyzers.Count); - - foreach (var analyzer in analyzers) - { - cancellationToken.ThrowIfCancellationRequested(); - - var analyzerLogger = loggerFactory.CreateLogger(analyzer.GetType()); - var analyzerContext = new OSPackageAnalyzerContext(rootfsPath, workspacePath, context.TimeProvider, analyzerLogger, context.Lease.Metadata); - - try - { - var result = await analyzer.AnalyzeAsync(analyzerContext, cancellationToken).ConfigureAwait(false); - results.Add(result); - } - catch (Exception ex) - { - _logger.LogError(ex, "Analyzer {AnalyzerId} failed for job {JobId}.", analyzer.AnalyzerId, context.JobId); - } - } - - if (results.Count == 0) - { - return; - } - - var dictionary = results.ToDictionary(result => result.AnalyzerId, StringComparer.OrdinalIgnoreCase); - context.Analysis.Set(ScanAnalysisKeys.OsPackageAnalyzers, dictionary); - - var fragments = OsComponentMapper.ToLayerFragments(results); - if (!fragments.IsDefaultOrEmpty) - { - context.Analysis.AppendLayerFragments(fragments); - context.Analysis.Set(ScanAnalysisKeys.OsComponentFragments, fragments); - } - } - - private async Task ExecuteLanguageAnalyzersAsync( - ScanJobContext context, - IReadOnlyList<ILanguageAnalyzer> analyzers, - IServiceProvider services, - string? workspacePath, - CancellationToken cancellationToken) - { - if (workspacePath is null) - { - _logger.LogWarning( - "Metadata key '{MetadataKey}' missing for job {JobId}; unable to locate workspace. Language analyzers skipped.", - _options.Analyzers.WorkspaceMetadataKey, - context.JobId); - return; - } - - var usageHints = LanguageUsageHints.Empty; - var analyzerContext = new LanguageAnalyzerContext(workspacePath, context.TimeProvider, usageHints, services); - var results = new Dictionary<string, LanguageAnalyzerResult>(StringComparer.OrdinalIgnoreCase); - var fragments = new List<LayerComponentFragment>(); - - foreach (var analyzer in analyzers) - { - cancellationToken.ThrowIfCancellationRequested(); - - try - { - var engine = new LanguageAnalyzerEngine(new[] { analyzer }); - var result = await engine.AnalyzeAsync(analyzerContext, cancellationToken).ConfigureAwait(false); - results[analyzer.Id] = result; - - var components = result.Components - .Where(component => string.Equals(component.AnalyzerId, analyzer.Id, StringComparison.Ordinal)) - .ToArray(); - - if (components.Length > 0) - { - var fragment = LanguageComponentMapper.ToLayerFragment(analyzer.Id, components); - fragments.Add(fragment); - } - } - catch (Exception ex) - { - _logger.LogError(ex, "Language analyzer {AnalyzerId} failed for job {JobId}.", analyzer.Id, context.JobId); - } - } - - if (results.Count == 0 && fragments.Count == 0) - { - return; - } - - if (results.Count > 0) - { - context.Analysis.Set( - ScanAnalysisKeys.LanguageAnalyzerResults, - new ReadOnlyDictionary<string, LanguageAnalyzerResult>(results)); - } - - if (fragments.Count > 0) - { - var immutableFragments = ImmutableArray.CreateRange(fragments); - context.Analysis.AppendLayerFragments(immutableFragments); - context.Analysis.Set(ScanAnalysisKeys.LanguageComponentFragments, immutableFragments); - } - } - - private void LoadPlugins() - { - _osPluginDirectories = NormalizeDirectories(_options.Analyzers.PluginDirectories, Path.Combine("plugins", "scanner", "analyzers", "os")); - for (var i = 0; i < _osPluginDirectories.Count; i++) - { - var directory = _osPluginDirectories[i]; - var seal = i == _osPluginDirectories.Count - 1; - - try - { - _osCatalog.LoadFromDirectory(directory, seal); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to load OS analyzer plug-ins from {Directory}.", directory); - } - } - - _languagePluginDirectories = NormalizeDirectories(_options.Analyzers.LanguagePluginDirectories, Path.Combine("plugins", "scanner", "analyzers", "lang")); - for (var i = 0; i < _languagePluginDirectories.Count; i++) - { - var directory = _languagePluginDirectories[i]; - var seal = i == _languagePluginDirectories.Count - 1; - - try - { - _languageCatalog.LoadFromDirectory(directory, seal); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to load language analyzer plug-ins from {Directory}.", directory); - } - } - } - - private static IReadOnlyList<string> NormalizeDirectories(IEnumerable<string> configured, string fallbackRelative) - { - var directories = new List<string>(); - foreach (var configuredPath in configured ?? Array.Empty<string>()) - { - if (string.IsNullOrWhiteSpace(configuredPath)) - { - continue; - } - - var path = configuredPath; - if (!Path.IsPathRooted(path)) - { - path = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, path)); - } - - directories.Add(path); - } - - if (directories.Count == 0) - { - var fallback = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, fallbackRelative)); - directories.Add(fallback); - } - - return new ReadOnlyCollection<string>(directories); - } - - private static string? ResolvePath(IReadOnlyDictionary<string, string> metadata, string key) - { - if (string.IsNullOrWhiteSpace(key)) - { - return null; - } - - if (!metadata.TryGetValue(key, out var value) || string.IsNullOrWhiteSpace(value)) - { - return null; - } - - var trimmed = value.Trim(); - return Path.IsPathRooted(trimmed) - ? trimmed - : Path.GetFullPath(trimmed); - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Collections.ObjectModel; +using System.IO; +using System.Linq; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Analyzers.Lang; +using StellaOps.Scanner.Analyzers.Lang.Plugin; +using StellaOps.Scanner.Analyzers.OS; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Mapping; +using StellaOps.Scanner.Analyzers.OS.Plugin; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Worker.Options; + +namespace StellaOps.Scanner.Worker.Processing; + +internal sealed class CompositeScanAnalyzerDispatcher : IScanAnalyzerDispatcher +{ + private readonly IServiceScopeFactory _scopeFactory; + private readonly IOSAnalyzerPluginCatalog _osCatalog; + private readonly ILanguageAnalyzerPluginCatalog _languageCatalog; + private readonly ScannerWorkerOptions _options; + private readonly ILogger<CompositeScanAnalyzerDispatcher> _logger; + private IReadOnlyList<string> _osPluginDirectories = Array.Empty<string>(); + private IReadOnlyList<string> _languagePluginDirectories = Array.Empty<string>(); + + public CompositeScanAnalyzerDispatcher( + IServiceScopeFactory scopeFactory, + IOSAnalyzerPluginCatalog osCatalog, + ILanguageAnalyzerPluginCatalog languageCatalog, + IOptions<ScannerWorkerOptions> options, + ILogger<CompositeScanAnalyzerDispatcher> logger) + { + _scopeFactory = scopeFactory ?? throw new ArgumentNullException(nameof(scopeFactory)); + _osCatalog = osCatalog ?? throw new ArgumentNullException(nameof(osCatalog)); + _languageCatalog = languageCatalog ?? throw new ArgumentNullException(nameof(languageCatalog)); + _options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + LoadPlugins(); + } + + public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + using var scope = _scopeFactory.CreateScope(); + var services = scope.ServiceProvider; + + var osAnalyzers = _osCatalog.CreateAnalyzers(services); + var languageAnalyzers = _languageCatalog.CreateAnalyzers(services); + + if (osAnalyzers.Count == 0 && languageAnalyzers.Count == 0) + { + _logger.LogWarning("No analyzer plug-ins available; skipping analyzer stage for job {JobId}.", context.JobId); + return; + } + + var metadata = new Dictionary<string, string>(context.Lease.Metadata, StringComparer.Ordinal); + var rootfsPath = ResolvePath(metadata, _options.Analyzers.RootFilesystemMetadataKey); + var workspacePath = ResolvePath(metadata, _options.Analyzers.WorkspaceMetadataKey) ?? rootfsPath; + + if (osAnalyzers.Count > 0) + { + await ExecuteOsAnalyzersAsync(context, osAnalyzers, services, rootfsPath, workspacePath, cancellationToken) + .ConfigureAwait(false); + } + + if (languageAnalyzers.Count > 0) + { + await ExecuteLanguageAnalyzersAsync(context, languageAnalyzers, services, workspacePath, cancellationToken) + .ConfigureAwait(false); + } + } + + private async Task ExecuteOsAnalyzersAsync( + ScanJobContext context, + IReadOnlyList<IOSPackageAnalyzer> analyzers, + IServiceProvider services, + string? rootfsPath, + string? workspacePath, + CancellationToken cancellationToken) + { + if (rootfsPath is null) + { + _logger.LogWarning( + "Metadata key '{MetadataKey}' missing for job {JobId}; unable to locate root filesystem. OS analyzers skipped.", + _options.Analyzers.RootFilesystemMetadataKey, + context.JobId); + return; + } + + var loggerFactory = services.GetRequiredService<ILoggerFactory>(); + var results = new List<OSPackageAnalyzerResult>(analyzers.Count); + + foreach (var analyzer in analyzers) + { + cancellationToken.ThrowIfCancellationRequested(); + + var analyzerLogger = loggerFactory.CreateLogger(analyzer.GetType()); + var analyzerContext = new OSPackageAnalyzerContext(rootfsPath, workspacePath, context.TimeProvider, analyzerLogger, context.Lease.Metadata); + + try + { + var result = await analyzer.AnalyzeAsync(analyzerContext, cancellationToken).ConfigureAwait(false); + results.Add(result); + } + catch (Exception ex) + { + _logger.LogError(ex, "Analyzer {AnalyzerId} failed for job {JobId}.", analyzer.AnalyzerId, context.JobId); + } + } + + if (results.Count == 0) + { + return; + } + + var dictionary = results.ToDictionary(result => result.AnalyzerId, StringComparer.OrdinalIgnoreCase); + context.Analysis.Set(ScanAnalysisKeys.OsPackageAnalyzers, dictionary); + + var fragments = OsComponentMapper.ToLayerFragments(results); + if (!fragments.IsDefaultOrEmpty) + { + context.Analysis.AppendLayerFragments(fragments); + context.Analysis.Set(ScanAnalysisKeys.OsComponentFragments, fragments); + } + } + + private async Task ExecuteLanguageAnalyzersAsync( + ScanJobContext context, + IReadOnlyList<ILanguageAnalyzer> analyzers, + IServiceProvider services, + string? workspacePath, + CancellationToken cancellationToken) + { + if (workspacePath is null) + { + _logger.LogWarning( + "Metadata key '{MetadataKey}' missing for job {JobId}; unable to locate workspace. Language analyzers skipped.", + _options.Analyzers.WorkspaceMetadataKey, + context.JobId); + return; + } + + var usageHints = LanguageUsageHints.Empty; + var analyzerContext = new LanguageAnalyzerContext(workspacePath, context.TimeProvider, usageHints, services); + var results = new Dictionary<string, LanguageAnalyzerResult>(StringComparer.OrdinalIgnoreCase); + var fragments = new List<LayerComponentFragment>(); + + foreach (var analyzer in analyzers) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + var engine = new LanguageAnalyzerEngine(new[] { analyzer }); + var result = await engine.AnalyzeAsync(analyzerContext, cancellationToken).ConfigureAwait(false); + results[analyzer.Id] = result; + + var components = result.Components + .Where(component => string.Equals(component.AnalyzerId, analyzer.Id, StringComparison.Ordinal)) + .ToArray(); + + if (components.Length > 0) + { + var fragment = LanguageComponentMapper.ToLayerFragment(analyzer.Id, components); + fragments.Add(fragment); + } + } + catch (Exception ex) + { + _logger.LogError(ex, "Language analyzer {AnalyzerId} failed for job {JobId}.", analyzer.Id, context.JobId); + } + } + + if (results.Count == 0 && fragments.Count == 0) + { + return; + } + + if (results.Count > 0) + { + context.Analysis.Set( + ScanAnalysisKeys.LanguageAnalyzerResults, + new ReadOnlyDictionary<string, LanguageAnalyzerResult>(results)); + } + + if (fragments.Count > 0) + { + var immutableFragments = ImmutableArray.CreateRange(fragments); + context.Analysis.AppendLayerFragments(immutableFragments); + context.Analysis.Set(ScanAnalysisKeys.LanguageComponentFragments, immutableFragments); + } + } + + private void LoadPlugins() + { + _osPluginDirectories = NormalizeDirectories(_options.Analyzers.PluginDirectories, Path.Combine("plugins", "scanner", "analyzers", "os")); + for (var i = 0; i < _osPluginDirectories.Count; i++) + { + var directory = _osPluginDirectories[i]; + var seal = i == _osPluginDirectories.Count - 1; + + try + { + _osCatalog.LoadFromDirectory(directory, seal); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to load OS analyzer plug-ins from {Directory}.", directory); + } + } + + _languagePluginDirectories = NormalizeDirectories(_options.Analyzers.LanguagePluginDirectories, Path.Combine("plugins", "scanner", "analyzers", "lang")); + for (var i = 0; i < _languagePluginDirectories.Count; i++) + { + var directory = _languagePluginDirectories[i]; + var seal = i == _languagePluginDirectories.Count - 1; + + try + { + _languageCatalog.LoadFromDirectory(directory, seal); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to load language analyzer plug-ins from {Directory}.", directory); + } + } + } + + private static IReadOnlyList<string> NormalizeDirectories(IEnumerable<string> configured, string fallbackRelative) + { + var directories = new List<string>(); + foreach (var configuredPath in configured ?? Array.Empty<string>()) + { + if (string.IsNullOrWhiteSpace(configuredPath)) + { + continue; + } + + var path = configuredPath; + if (!Path.IsPathRooted(path)) + { + path = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, path)); + } + + directories.Add(path); + } + + if (directories.Count == 0) + { + var fallback = Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, fallbackRelative)); + directories.Add(fallback); + } + + return new ReadOnlyCollection<string>(directories); + } + + private static string? ResolvePath(IReadOnlyDictionary<string, string> metadata, string key) + { + if (string.IsNullOrWhiteSpace(key)) + { + return null; + } + + if (!metadata.TryGetValue(key, out var value) || string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var trimmed = value.Trim(); + return Path.IsPathRooted(trimmed) + ? trimmed + : Path.GetFullPath(trimmed); + } +} diff --git a/src/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs similarity index 97% rename from src/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs index 9ed4419c..7e861701 100644 --- a/src/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs +++ b/src/Scanner/StellaOps.Scanner.Worker/Processing/EntryTraceExecutionService.cs @@ -1,302 +1,302 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.IO; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.Core.Contracts; -using StellaOps.Scanner.EntryTrace; -using StellaOps.Scanner.Worker.Options; -using IOPath = System.IO.Path; - -namespace StellaOps.Scanner.Worker.Processing; - -public sealed class EntryTraceExecutionService : IEntryTraceExecutionService -{ - private readonly IEntryTraceAnalyzer _analyzer; - private readonly EntryTraceAnalyzerOptions _entryTraceOptions; - private readonly ScannerWorkerOptions _workerOptions; - private readonly ILogger<EntryTraceExecutionService> _logger; - private readonly ILoggerFactory _loggerFactory; - - public EntryTraceExecutionService( - IEntryTraceAnalyzer analyzer, - IOptions<EntryTraceAnalyzerOptions> entryTraceOptions, - IOptions<ScannerWorkerOptions> workerOptions, - ILogger<EntryTraceExecutionService> logger, - ILoggerFactory loggerFactory) - { - _analyzer = analyzer ?? throw new ArgumentNullException(nameof(analyzer)); - _entryTraceOptions = (entryTraceOptions ?? throw new ArgumentNullException(nameof(entryTraceOptions))).Value ?? new EntryTraceAnalyzerOptions(); - _workerOptions = (workerOptions ?? throw new ArgumentNullException(nameof(workerOptions))).Value ?? new ScannerWorkerOptions(); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _loggerFactory = loggerFactory ?? throw new ArgumentNullException(nameof(loggerFactory)); - } - - public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - - var metadata = context.Lease.Metadata ?? new Dictionary<string, string>(StringComparer.Ordinal); - - var configPath = ResolvePath(metadata, _workerOptions.Analyzers.EntryTraceConfigMetadataKey, ScanMetadataKeys.ImageConfigPath); - if (configPath is null) - { - _logger.LogDebug("EntryTrace config metadata '{MetadataKey}' missing for job {JobId}; skipping entry trace.", _workerOptions.Analyzers.EntryTraceConfigMetadataKey, context.JobId); - return; - } - - if (!File.Exists(configPath)) - { - _logger.LogWarning("EntryTrace config file '{ConfigPath}' not found for job {JobId}; skipping entry trace.", configPath, context.JobId); - return; - } - - OciImageConfig config; - try - { - using var stream = File.OpenRead(configPath); - config = OciImageConfigLoader.Load(stream); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to parse OCI image config at '{ConfigPath}' for job {JobId}; entry trace skipped.", configPath, context.JobId); - return; - } - - var fileSystem = BuildFileSystem(context.JobId, metadata); - if (fileSystem is null) - { - return; - } - - var imageDigest = ResolveImageDigest(metadata, context); - var entryTraceLogger = _loggerFactory.CreateLogger<EntryTraceExecutionService>(); - EntryTraceImageContext imageContext; - try - { - imageContext = EntryTraceImageContextFactory.Create( - config, - fileSystem, - _entryTraceOptions, - imageDigest, - context.ScanId, - entryTraceLogger); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to build EntryTrace context for job {JobId}; skipping entry trace.", context.JobId); - return; - } - - EntryTraceGraph graph; - try - { - graph = await _analyzer.ResolveAsync(imageContext.Entrypoint, imageContext.Context, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) - { - _logger.LogError(ex, "EntryTrace analyzer failed for job {JobId}.", context.JobId); - return; - } - - context.Analysis.Set(ScanAnalysisKeys.EntryTraceGraph, graph); - } - - private LayeredRootFileSystem? BuildFileSystem(string jobId, IReadOnlyDictionary<string, string> metadata) - { - var directoryValues = ResolveList(metadata, _workerOptions.Analyzers.EntryTraceLayerDirectoriesMetadataKey, ScanMetadataKeys.LayerDirectories); - var archiveValues = ResolveList(metadata, _workerOptions.Analyzers.EntryTraceLayerArchivesMetadataKey, ScanMetadataKeys.LayerArchives); - - var directoryLayers = new List<LayeredRootFileSystem.LayerDirectory>(); - foreach (var value in directoryValues) - { - var fullPath = NormalizePath(value); - if (string.IsNullOrWhiteSpace(fullPath)) - { - continue; - } - - if (!Directory.Exists(fullPath)) - { - _logger.LogWarning("EntryTrace layer directory '{Directory}' not found for job {JobId}; skipping layer.", fullPath, jobId); - continue; - } - - directoryLayers.Add(new LayeredRootFileSystem.LayerDirectory(TryDeriveDigest(fullPath) ?? string.Empty, fullPath)); - } - - var archiveLayers = new List<LayeredRootFileSystem.LayerArchive>(); - foreach (var value in archiveValues) - { - var fullPath = NormalizePath(value); - if (string.IsNullOrWhiteSpace(fullPath)) - { - continue; - } - - if (!File.Exists(fullPath)) - { - _logger.LogWarning("EntryTrace layer archive '{Archive}' not found for job {JobId}; skipping layer.", fullPath, jobId); - continue; - } - - archiveLayers.Add(new LayeredRootFileSystem.LayerArchive(TryDeriveDigest(fullPath) ?? string.Empty, fullPath)); - } - - try - { - if (archiveLayers.Count > 0) - { - return LayeredRootFileSystem.FromArchives(archiveLayers); - } - - if (directoryLayers.Count > 0) - { - return LayeredRootFileSystem.FromDirectories(directoryLayers); - } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to construct layered root filesystem for job {JobId}; entry trace skipped.", jobId); - return null; - } - - var rootFsPath = ResolvePath(metadata, _workerOptions.Analyzers.RootFilesystemMetadataKey, ScanMetadataKeys.RootFilesystemPath); - if (!string.IsNullOrWhiteSpace(rootFsPath) && Directory.Exists(rootFsPath)) - { - try - { - return LayeredRootFileSystem.FromDirectories(new[] - { - new LayeredRootFileSystem.LayerDirectory(TryDeriveDigest(rootFsPath) ?? string.Empty, rootFsPath) - }); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to create layered filesystem from root path '{RootPath}' for job {JobId}; entry trace skipped.", rootFsPath, jobId); - return null; - } - } - - _logger.LogDebug("No EntryTrace layers or root filesystem metadata available for job {JobId}; skipping entry trace.", jobId); - return null; - } - - private static string ResolveImageDigest(IReadOnlyDictionary<string, string> metadata, ScanJobContext context) - { - if (metadata.TryGetValue("image.digest", out var digest) && !string.IsNullOrWhiteSpace(digest)) - { - return digest.Trim(); - } - - if (metadata.TryGetValue("imageDigest", out var altDigest) && !string.IsNullOrWhiteSpace(altDigest)) - { - return altDigest.Trim(); - } - - return context.Lease.Metadata.TryGetValue("scanner.image.digest", out var scopedDigest) && !string.IsNullOrWhiteSpace(scopedDigest) - ? scopedDigest.Trim() - : $"sha256:{context.JobId}"; - } - - private static IReadOnlyCollection<string> ResolveList(IReadOnlyDictionary<string, string> metadata, string key, string fallbackKey) - { - if (metadata.TryGetValue(key, out var value) && !string.IsNullOrWhiteSpace(value)) - { - return SplitList(value); - } - - if (!string.Equals(key, fallbackKey, StringComparison.Ordinal) && - metadata.TryGetValue(fallbackKey, out var fallbackValue) && - !string.IsNullOrWhiteSpace(fallbackValue)) - { - return SplitList(fallbackValue); - } - - return Array.Empty<string>(); - } - - private static string? ResolvePath(IReadOnlyDictionary<string, string> metadata, string key, string fallbackKey) - { - if (metadata.TryGetValue(key, out var value) && !string.IsNullOrWhiteSpace(value)) - { - return NormalizePath(value); - } - - if (!string.Equals(key, fallbackKey, StringComparison.Ordinal) && - metadata.TryGetValue(fallbackKey, out var fallbackValue) && - !string.IsNullOrWhiteSpace(fallbackValue)) - { - return NormalizePath(fallbackValue); - } - - return null; - } - - private static IReadOnlyCollection<string> SplitList(string value) - { - var segments = value.Split(new[] { ';', ',', '\n', '\r', IOPath.PathSeparator }, StringSplitOptions.RemoveEmptyEntries); - return segments - .Select(segment => NormalizePath(segment)) - .Where(segment => !string.IsNullOrWhiteSpace(segment)) - .ToArray(); - } - - private static string NormalizePath(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return string.Empty; - } - - var trimmed = value.Trim().Trim('"'); - return string.IsNullOrWhiteSpace(trimmed) ? string.Empty : trimmed; - } - - private static string? TryDeriveDigest(string path) - { - if (string.IsNullOrWhiteSpace(path)) - { - return null; - } - - var candidate = path.TrimEnd(IOPath.DirectorySeparatorChar, IOPath.AltDirectorySeparatorChar); - var name = IOPath.GetFileName(candidate); - if (string.IsNullOrWhiteSpace(name)) - { - return null; - } - - var normalized = name; - if (normalized.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase)) - { - normalized = normalized[..^7]; - } - else if (normalized.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase)) - { - normalized = normalized[..^4]; - } - else if (normalized.EndsWith(".tar", StringComparison.OrdinalIgnoreCase)) - { - normalized = normalized[..^4]; - } - - if (normalized.Contains(':', StringComparison.Ordinal)) - { - return normalized; - } - - if (normalized.StartsWith("sha", StringComparison.OrdinalIgnoreCase)) - { - return normalized.Contains('-') - ? normalized.Replace('-', ':') - : $"sha256:{normalized}"; - } - - return null; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.EntryTrace; +using StellaOps.Scanner.Worker.Options; +using IOPath = System.IO.Path; + +namespace StellaOps.Scanner.Worker.Processing; + +public sealed class EntryTraceExecutionService : IEntryTraceExecutionService +{ + private readonly IEntryTraceAnalyzer _analyzer; + private readonly EntryTraceAnalyzerOptions _entryTraceOptions; + private readonly ScannerWorkerOptions _workerOptions; + private readonly ILogger<EntryTraceExecutionService> _logger; + private readonly ILoggerFactory _loggerFactory; + + public EntryTraceExecutionService( + IEntryTraceAnalyzer analyzer, + IOptions<EntryTraceAnalyzerOptions> entryTraceOptions, + IOptions<ScannerWorkerOptions> workerOptions, + ILogger<EntryTraceExecutionService> logger, + ILoggerFactory loggerFactory) + { + _analyzer = analyzer ?? throw new ArgumentNullException(nameof(analyzer)); + _entryTraceOptions = (entryTraceOptions ?? throw new ArgumentNullException(nameof(entryTraceOptions))).Value ?? new EntryTraceAnalyzerOptions(); + _workerOptions = (workerOptions ?? throw new ArgumentNullException(nameof(workerOptions))).Value ?? new ScannerWorkerOptions(); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _loggerFactory = loggerFactory ?? throw new ArgumentNullException(nameof(loggerFactory)); + } + + public async ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var metadata = context.Lease.Metadata ?? new Dictionary<string, string>(StringComparer.Ordinal); + + var configPath = ResolvePath(metadata, _workerOptions.Analyzers.EntryTraceConfigMetadataKey, ScanMetadataKeys.ImageConfigPath); + if (configPath is null) + { + _logger.LogDebug("EntryTrace config metadata '{MetadataKey}' missing for job {JobId}; skipping entry trace.", _workerOptions.Analyzers.EntryTraceConfigMetadataKey, context.JobId); + return; + } + + if (!File.Exists(configPath)) + { + _logger.LogWarning("EntryTrace config file '{ConfigPath}' not found for job {JobId}; skipping entry trace.", configPath, context.JobId); + return; + } + + OciImageConfig config; + try + { + using var stream = File.OpenRead(configPath); + config = OciImageConfigLoader.Load(stream); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to parse OCI image config at '{ConfigPath}' for job {JobId}; entry trace skipped.", configPath, context.JobId); + return; + } + + var fileSystem = BuildFileSystem(context.JobId, metadata); + if (fileSystem is null) + { + return; + } + + var imageDigest = ResolveImageDigest(metadata, context); + var entryTraceLogger = _loggerFactory.CreateLogger<EntryTraceExecutionService>(); + EntryTraceImageContext imageContext; + try + { + imageContext = EntryTraceImageContextFactory.Create( + config, + fileSystem, + _entryTraceOptions, + imageDigest, + context.ScanId, + entryTraceLogger); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to build EntryTrace context for job {JobId}; skipping entry trace.", context.JobId); + return; + } + + EntryTraceGraph graph; + try + { + graph = await _analyzer.ResolveAsync(imageContext.Entrypoint, imageContext.Context, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) + { + _logger.LogError(ex, "EntryTrace analyzer failed for job {JobId}.", context.JobId); + return; + } + + context.Analysis.Set(ScanAnalysisKeys.EntryTraceGraph, graph); + } + + private LayeredRootFileSystem? BuildFileSystem(string jobId, IReadOnlyDictionary<string, string> metadata) + { + var directoryValues = ResolveList(metadata, _workerOptions.Analyzers.EntryTraceLayerDirectoriesMetadataKey, ScanMetadataKeys.LayerDirectories); + var archiveValues = ResolveList(metadata, _workerOptions.Analyzers.EntryTraceLayerArchivesMetadataKey, ScanMetadataKeys.LayerArchives); + + var directoryLayers = new List<LayeredRootFileSystem.LayerDirectory>(); + foreach (var value in directoryValues) + { + var fullPath = NormalizePath(value); + if (string.IsNullOrWhiteSpace(fullPath)) + { + continue; + } + + if (!Directory.Exists(fullPath)) + { + _logger.LogWarning("EntryTrace layer directory '{Directory}' not found for job {JobId}; skipping layer.", fullPath, jobId); + continue; + } + + directoryLayers.Add(new LayeredRootFileSystem.LayerDirectory(TryDeriveDigest(fullPath) ?? string.Empty, fullPath)); + } + + var archiveLayers = new List<LayeredRootFileSystem.LayerArchive>(); + foreach (var value in archiveValues) + { + var fullPath = NormalizePath(value); + if (string.IsNullOrWhiteSpace(fullPath)) + { + continue; + } + + if (!File.Exists(fullPath)) + { + _logger.LogWarning("EntryTrace layer archive '{Archive}' not found for job {JobId}; skipping layer.", fullPath, jobId); + continue; + } + + archiveLayers.Add(new LayeredRootFileSystem.LayerArchive(TryDeriveDigest(fullPath) ?? string.Empty, fullPath)); + } + + try + { + if (archiveLayers.Count > 0) + { + return LayeredRootFileSystem.FromArchives(archiveLayers); + } + + if (directoryLayers.Count > 0) + { + return LayeredRootFileSystem.FromDirectories(directoryLayers); + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to construct layered root filesystem for job {JobId}; entry trace skipped.", jobId); + return null; + } + + var rootFsPath = ResolvePath(metadata, _workerOptions.Analyzers.RootFilesystemMetadataKey, ScanMetadataKeys.RootFilesystemPath); + if (!string.IsNullOrWhiteSpace(rootFsPath) && Directory.Exists(rootFsPath)) + { + try + { + return LayeredRootFileSystem.FromDirectories(new[] + { + new LayeredRootFileSystem.LayerDirectory(TryDeriveDigest(rootFsPath) ?? string.Empty, rootFsPath) + }); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to create layered filesystem from root path '{RootPath}' for job {JobId}; entry trace skipped.", rootFsPath, jobId); + return null; + } + } + + _logger.LogDebug("No EntryTrace layers or root filesystem metadata available for job {JobId}; skipping entry trace.", jobId); + return null; + } + + private static string ResolveImageDigest(IReadOnlyDictionary<string, string> metadata, ScanJobContext context) + { + if (metadata.TryGetValue("image.digest", out var digest) && !string.IsNullOrWhiteSpace(digest)) + { + return digest.Trim(); + } + + if (metadata.TryGetValue("imageDigest", out var altDigest) && !string.IsNullOrWhiteSpace(altDigest)) + { + return altDigest.Trim(); + } + + return context.Lease.Metadata.TryGetValue("scanner.image.digest", out var scopedDigest) && !string.IsNullOrWhiteSpace(scopedDigest) + ? scopedDigest.Trim() + : $"sha256:{context.JobId}"; + } + + private static IReadOnlyCollection<string> ResolveList(IReadOnlyDictionary<string, string> metadata, string key, string fallbackKey) + { + if (metadata.TryGetValue(key, out var value) && !string.IsNullOrWhiteSpace(value)) + { + return SplitList(value); + } + + if (!string.Equals(key, fallbackKey, StringComparison.Ordinal) && + metadata.TryGetValue(fallbackKey, out var fallbackValue) && + !string.IsNullOrWhiteSpace(fallbackValue)) + { + return SplitList(fallbackValue); + } + + return Array.Empty<string>(); + } + + private static string? ResolvePath(IReadOnlyDictionary<string, string> metadata, string key, string fallbackKey) + { + if (metadata.TryGetValue(key, out var value) && !string.IsNullOrWhiteSpace(value)) + { + return NormalizePath(value); + } + + if (!string.Equals(key, fallbackKey, StringComparison.Ordinal) && + metadata.TryGetValue(fallbackKey, out var fallbackValue) && + !string.IsNullOrWhiteSpace(fallbackValue)) + { + return NormalizePath(fallbackValue); + } + + return null; + } + + private static IReadOnlyCollection<string> SplitList(string value) + { + var segments = value.Split(new[] { ';', ',', '\n', '\r', IOPath.PathSeparator }, StringSplitOptions.RemoveEmptyEntries); + return segments + .Select(segment => NormalizePath(segment)) + .Where(segment => !string.IsNullOrWhiteSpace(segment)) + .ToArray(); + } + + private static string NormalizePath(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return string.Empty; + } + + var trimmed = value.Trim().Trim('"'); + return string.IsNullOrWhiteSpace(trimmed) ? string.Empty : trimmed; + } + + private static string? TryDeriveDigest(string path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return null; + } + + var candidate = path.TrimEnd(IOPath.DirectorySeparatorChar, IOPath.AltDirectorySeparatorChar); + var name = IOPath.GetFileName(candidate); + if (string.IsNullOrWhiteSpace(name)) + { + return null; + } + + var normalized = name; + if (normalized.EndsWith(".tar.gz", StringComparison.OrdinalIgnoreCase)) + { + normalized = normalized[..^7]; + } + else if (normalized.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase)) + { + normalized = normalized[..^4]; + } + else if (normalized.EndsWith(".tar", StringComparison.OrdinalIgnoreCase)) + { + normalized = normalized[..^4]; + } + + if (normalized.Contains(':', StringComparison.Ordinal)) + { + return normalized; + } + + if (normalized.StartsWith("sha", StringComparison.OrdinalIgnoreCase)) + { + return normalized.Contains('-') + ? normalized.Replace('-', ':') + : $"sha256:{normalized}"; + } + + return null; + } +} diff --git a/src/StellaOps.Scanner.Worker/Processing/IDelayScheduler.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/IDelayScheduler.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/IDelayScheduler.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/IDelayScheduler.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/IEntryTraceExecutionService.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/IEntryTraceExecutionService.cs similarity index 96% rename from src/StellaOps.Scanner.Worker/Processing/IEntryTraceExecutionService.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/IEntryTraceExecutionService.cs index 85fd25a4..42d4ba96 100644 --- a/src/StellaOps.Scanner.Worker/Processing/IEntryTraceExecutionService.cs +++ b/src/Scanner/StellaOps.Scanner.Worker/Processing/IEntryTraceExecutionService.cs @@ -1,9 +1,9 @@ -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Scanner.Worker.Processing; - -public interface IEntryTraceExecutionService -{ - ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Scanner.Worker.Processing; + +public interface IEntryTraceExecutionService +{ + ValueTask ExecuteAsync(ScanJobContext context, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scanner.Worker/Processing/IScanAnalyzerDispatcher.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/IScanAnalyzerDispatcher.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/IScanAnalyzerDispatcher.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/IScanAnalyzerDispatcher.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/IScanJobLease.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/IScanJobLease.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/IScanJobLease.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/IScanJobLease.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/IScanJobSource.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/IScanJobSource.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/IScanJobSource.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/IScanJobSource.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/IScanStageExecutor.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/IScanStageExecutor.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/IScanStageExecutor.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/IScanStageExecutor.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/LeaseHeartbeatService.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/LeaseHeartbeatService.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/LeaseHeartbeatService.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/LeaseHeartbeatService.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/NoOpStageExecutor.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/NoOpStageExecutor.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/NoOpStageExecutor.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/NoOpStageExecutor.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/NullScanJobSource.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/NullScanJobSource.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/NullScanJobSource.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/NullScanJobSource.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/PollDelayStrategy.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/PollDelayStrategy.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/PollDelayStrategy.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/PollDelayStrategy.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/ScanJobContext.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/ScanJobContext.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/ScanJobContext.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/ScanJobContext.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/ScanJobProcessor.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/ScanJobProcessor.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/ScanJobProcessor.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/ScanJobProcessor.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/ScanProgressReporter.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/ScanProgressReporter.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/ScanProgressReporter.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/ScanProgressReporter.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/ScanStageNames.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/ScanStageNames.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/ScanStageNames.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/ScanStageNames.cs diff --git a/src/StellaOps.Scanner.Worker/Processing/SystemDelayScheduler.cs b/src/Scanner/StellaOps.Scanner.Worker/Processing/SystemDelayScheduler.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Processing/SystemDelayScheduler.cs rename to src/Scanner/StellaOps.Scanner.Worker/Processing/SystemDelayScheduler.cs diff --git a/src/StellaOps.Scanner.Worker/Program.cs b/src/Scanner/StellaOps.Scanner.Worker/Program.cs similarity index 100% rename from src/StellaOps.Scanner.Worker/Program.cs rename to src/Scanner/StellaOps.Scanner.Worker/Program.cs diff --git a/src/StellaOps.Scanner.Worker/Properties/AssemblyInfo.cs b/src/Scanner/StellaOps.Scanner.Worker/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Scanner.Worker/Properties/AssemblyInfo.cs rename to src/Scanner/StellaOps.Scanner.Worker/Properties/AssemblyInfo.cs index 29bcd042..4467ba54 100644 --- a/src/StellaOps.Scanner.Worker/Properties/AssemblyInfo.cs +++ b/src/Scanner/StellaOps.Scanner.Worker/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Scanner.Worker.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Worker.Tests")] diff --git a/src/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj b/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj similarity index 52% rename from src/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj rename to src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj index 300fd92f..ee0776d7 100644 --- a/src/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj +++ b/src/Scanner/StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj @@ -1,24 +1,25 @@ -<Project Sdk="Microsoft.NET.Sdk.Worker"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" /> - <PackageReference Include="OpenTelemetry.Exporter.Console" Version="1.12.0" /> - <PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.12.0" /> - <PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0" /> - <PackageReference Include="OpenTelemetry.Instrumentation.Process" Version="1.12.0-beta.1" /> - </ItemGroup> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk.Worker"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Plugin\StellaOps.Plugin.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.EntryTrace\StellaOps.Scanner.EntryTrace.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj" /> + <PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" /> + <PackageReference Include="OpenTelemetry.Exporter.Console" Version="1.12.0" /> + <PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.12.0" /> + <PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0" /> + <PackageReference Include="OpenTelemetry.Instrumentation.Process" Version="1.12.0-beta.1" /> </ItemGroup> -</Project> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Worker/TASKS.md b/src/Scanner/StellaOps.Scanner.Worker/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Worker/TASKS.md rename to src/Scanner/StellaOps.Scanner.Worker/TASKS.md diff --git a/src/Scanner/StellaOps.Scanner.sln b/src/Scanner/StellaOps.Scanner.sln new file mode 100644 index 00000000..14214e23 --- /dev/null +++ b/src/Scanner/StellaOps.Scanner.sln @@ -0,0 +1,775 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.WebService", "StellaOps.Scanner.WebService\StellaOps.Scanner.WebService.csproj", "{4E2FF0C3-C801-432F-B85F-A5AD352DE603}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{569786B9-DDDD-4F56-A59C-BF963536126B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{4635A902-2F4A-4FDA-8675-FD746425AF5A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{FEDB9801-7325-4CD6-B655-1C5F6CF03E54}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{ECD7934F-596D-499F-9E34-2486275C3EC2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{02C16715-9BF3-43D7-AC97-D6940365907A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy", "..\Policy\__Libraries\StellaOps.Policy\StellaOps.Policy.csproj", "{CC898965-E666-4138-8A38-77775A94B006}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.DependencyInjection", "..\__Libraries\StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj", "{8AA1603C-3E75-4D38-941C-85F81F08EE1F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.Plugin.BouncyCastle", "..\__Libraries\StellaOps.Cryptography.Plugin.BouncyCastle\StellaOps.Cryptography.Plugin.BouncyCastle.csproj", "{F5E3CCCC-1A22-4110-8E4E-429556A7E965}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Models", "..\Notify\__Libraries\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj", "{C7457C94-CA00-4DE5-9E7D-B570B74A4346}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cache", "__Libraries\StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj", "{6150DE8D-E28D-4B87-9825-7ADC37732D46}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Storage", "__Libraries\StellaOps.Scanner.Storage\StellaOps.Scanner.Storage.csproj", "{BA262EB0-B72B-4D61-91F1-F6933A9BF205}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core", "..\Zastava\__Libraries\StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj", "{C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "..\__Libraries\StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{E4DDCC41-5257-439B-9C50-136DD3BEE233}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker", "StellaOps.Scanner.Worker\StellaOps.Scanner.Worker.csproj", "{D3192CDB-6BF1-466C-9699-86523AC3D79C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS", "__Libraries\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj", "{25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Core", "__Libraries\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj", "{8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang", "__Libraries\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj", "{4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.EntryTrace", "__Libraries\StellaOps.Scanner.EntryTrace\StellaOps.Scanner.EntryTrace.csproj", "{C53BE08C-E6AC-492B-8315-F184E645DED7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.DotNet", "__Libraries\StellaOps.Scanner.Analyzers.Lang.DotNet\StellaOps.Scanner.Analyzers.Lang.DotNet.csproj", "{7471D151-B9D7-46A0-8F8E-601EA8D3B445}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Go", "__Libraries\StellaOps.Scanner.Analyzers.Lang.Go\StellaOps.Scanner.Analyzers.Lang.Go.csproj", "{29EB452F-2FCA-40B7-B087-89289FD64EA9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Java", "__Libraries\StellaOps.Scanner.Analyzers.Lang.Java\StellaOps.Scanner.Analyzers.Lang.Java.csproj", "{E72D568F-053A-4C7A-9707-9077663CF074}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Node", "__Libraries\StellaOps.Scanner.Analyzers.Lang.Node\StellaOps.Scanner.Analyzers.Lang.Node.csproj", "{EB9006BB-437A-4CA6-ADF4-45347B14298C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Python", "__Libraries\StellaOps.Scanner.Analyzers.Lang.Python\StellaOps.Scanner.Analyzers.Lang.Python.csproj", "{6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Rust", "__Libraries\StellaOps.Scanner.Analyzers.Lang.Rust\StellaOps.Scanner.Analyzers.Lang.Rust.csproj", "{ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Apk", "__Libraries\StellaOps.Scanner.Analyzers.OS.Apk\StellaOps.Scanner.Analyzers.OS.Apk.csproj", "{A46E73C7-6703-4870-9C80-D7238A693AAF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Dpkg", "__Libraries\StellaOps.Scanner.Analyzers.OS.Dpkg\StellaOps.Scanner.Analyzers.OS.Dpkg.csproj", "{1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Rpm", "__Libraries\StellaOps.Scanner.Analyzers.OS.Rpm\StellaOps.Scanner.Analyzers.OS.Rpm.csproj", "{2523BF07-0787-404B-B7FD-35BF10810BDD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Diff", "__Libraries\StellaOps.Scanner.Diff\StellaOps.Scanner.Diff.csproj", "{AF192668-6B1F-4D8C-AFBA-1338080BB3BC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit", "__Libraries\StellaOps.Scanner.Emit\StellaOps.Scanner.Emit.csproj", "{A7E55948-F5A6-4696-8D60-B80A299C2850}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Queue", "__Libraries\StellaOps.Scanner.Queue\StellaOps.Scanner.Queue.csproj", "{CE58DBCD-FE30-4714-A462-758459B21185}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Sbomer.BuildXPlugin", "StellaOps.Scanner.Sbomer.BuildXPlugin\StellaOps.Scanner.Sbomer.BuildXPlugin.csproj", "{A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Go.Tests", "__Tests\StellaOps.Scanner.Analyzers.Lang.Go.Tests\StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj", "{4496E181-413D-4F31-846C-DA6AF6F8E55C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Tests", "__Tests\StellaOps.Scanner.Analyzers.Lang.Tests\StellaOps.Scanner.Analyzers.Lang.Tests.csproj", "{C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Java.Tests", "__Tests\StellaOps.Scanner.Analyzers.Lang.Java.Tests\StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj", "{90196AAE-2C67-4D40-A1E2-2EBBCD035481}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Node.Tests", "__Tests\StellaOps.Scanner.Analyzers.Lang.Node.Tests\StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj", "{B5A20E00-B2CE-435D-B486-8AB8F831A3A6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Python.Tests", "__Tests\StellaOps.Scanner.Analyzers.Lang.Python.Tests\StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj", "{D643E06B-FDAC-4E17-A042-9A60F71CF471}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Tests", "__Tests\StellaOps.Scanner.Analyzers.OS.Tests\StellaOps.Scanner.Analyzers.OS.Tests.csproj", "{D756A376-1DE0-4E08-B50E-28E38A252C8C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cache.Tests", "__Tests\StellaOps.Scanner.Cache.Tests\StellaOps.Scanner.Cache.Tests.csproj", "{5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Core.Tests", "__Tests\StellaOps.Scanner.Core.Tests\StellaOps.Scanner.Core.Tests.csproj", "{8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Diff.Tests", "__Tests\StellaOps.Scanner.Diff.Tests\StellaOps.Scanner.Diff.Tests.csproj", "{FD376DC0-9638-481D-97AD-29A932CE2560}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit.Tests", "__Tests\StellaOps.Scanner.Emit.Tests\StellaOps.Scanner.Emit.Tests.csproj", "{4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.EntryTrace.Tests", "__Tests\StellaOps.Scanner.EntryTrace.Tests\StellaOps.Scanner.EntryTrace.Tests.csproj", "{764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Queue.Tests", "__Tests\StellaOps.Scanner.Queue.Tests\StellaOps.Scanner.Queue.Tests.csproj", "{A885E136-A737-40EE-814E-1A99FF599369}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Sbomer.BuildXPlugin.Tests", "__Tests\StellaOps.Scanner.Sbomer.BuildXPlugin.Tests\StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj", "{EF65B10E-0A83-4E47-B06A-B80A8E7530F5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Storage.Tests", "__Tests\StellaOps.Scanner.Storage.Tests\StellaOps.Scanner.Storage.Tests.csproj", "{07D15319-95A0-4C36-B06C-A5C80E0A7752}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.WebService.Tests", "__Tests\StellaOps.Scanner.WebService.Tests\StellaOps.Scanner.WebService.Tests.csproj", "{782652F5-A7C3-4070-8B42-F7DC2C17973E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker.Tests", "__Tests\StellaOps.Scanner.Worker.Tests\StellaOps.Scanner.Worker.Tests.csproj", "{51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Debug|x64.ActiveCfg = Debug|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Debug|x64.Build.0 = Debug|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Debug|x86.ActiveCfg = Debug|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Debug|x86.Build.0 = Debug|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Release|Any CPU.Build.0 = Release|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Release|x64.ActiveCfg = Release|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Release|x64.Build.0 = Release|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Release|x86.ActiveCfg = Release|Any CPU + {4E2FF0C3-C801-432F-B85F-A5AD352DE603}.Release|x86.Build.0 = Release|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Debug|x64.ActiveCfg = Debug|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Debug|x64.Build.0 = Debug|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Debug|x86.ActiveCfg = Debug|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Debug|x86.Build.0 = Debug|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Release|Any CPU.Build.0 = Release|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Release|x64.ActiveCfg = Release|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Release|x64.Build.0 = Release|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Release|x86.ActiveCfg = Release|Any CPU + {569786B9-DDDD-4F56-A59C-BF963536126B}.Release|x86.Build.0 = Release|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Debug|x64.ActiveCfg = Debug|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Debug|x64.Build.0 = Debug|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Debug|x86.ActiveCfg = Debug|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Debug|x86.Build.0 = Debug|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Release|Any CPU.Build.0 = Release|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Release|x64.ActiveCfg = Release|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Release|x64.Build.0 = Release|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Release|x86.ActiveCfg = Release|Any CPU + {4635A902-2F4A-4FDA-8675-FD746425AF5A}.Release|x86.Build.0 = Release|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Debug|x64.ActiveCfg = Debug|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Debug|x64.Build.0 = Debug|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Debug|x86.ActiveCfg = Debug|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Debug|x86.Build.0 = Debug|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Release|Any CPU.Build.0 = Release|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Release|x64.ActiveCfg = Release|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Release|x64.Build.0 = Release|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Release|x86.ActiveCfg = Release|Any CPU + {FEDB9801-7325-4CD6-B655-1C5F6CF03E54}.Release|x86.Build.0 = Release|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Debug|x64.ActiveCfg = Debug|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Debug|x64.Build.0 = Debug|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Debug|x86.ActiveCfg = Debug|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Debug|x86.Build.0 = Debug|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Release|Any CPU.Build.0 = Release|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Release|x64.ActiveCfg = Release|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Release|x64.Build.0 = Release|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Release|x86.ActiveCfg = Release|Any CPU + {ECD7934F-596D-499F-9E34-2486275C3EC2}.Release|x86.Build.0 = Release|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Debug|x64.ActiveCfg = Debug|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Debug|x64.Build.0 = Debug|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Debug|x86.ActiveCfg = Debug|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Debug|x86.Build.0 = Debug|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Release|Any CPU.Build.0 = Release|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Release|x64.ActiveCfg = Release|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Release|x64.Build.0 = Release|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Release|x86.ActiveCfg = Release|Any CPU + {02C16715-9BF3-43D7-AC97-D6940365907A}.Release|x86.Build.0 = Release|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Debug|x64.ActiveCfg = Debug|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Debug|x64.Build.0 = Debug|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Debug|x86.ActiveCfg = Debug|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Debug|x86.Build.0 = Debug|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Release|Any CPU.Build.0 = Release|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Release|x64.ActiveCfg = Release|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Release|x64.Build.0 = Release|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Release|x86.ActiveCfg = Release|Any CPU + {B53FEE71-9EBE-4479-9B07-0C3F8EA2C02E}.Release|x86.Build.0 = Release|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Debug|x64.ActiveCfg = Debug|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Debug|x64.Build.0 = Debug|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Debug|x86.ActiveCfg = Debug|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Debug|x86.Build.0 = Debug|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Release|Any CPU.Build.0 = Release|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Release|x64.ActiveCfg = Release|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Release|x64.Build.0 = Release|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Release|x86.ActiveCfg = Release|Any CPU + {7ECEB818-0FB0-4A1B-A93F-9CC06B73F5FE}.Release|x86.Build.0 = Release|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Debug|x64.ActiveCfg = Debug|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Debug|x64.Build.0 = Debug|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Debug|x86.ActiveCfg = Debug|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Debug|x86.Build.0 = Debug|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Release|Any CPU.Build.0 = Release|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Release|x64.ActiveCfg = Release|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Release|x64.Build.0 = Release|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Release|x86.ActiveCfg = Release|Any CPU + {CE23FF6E-E3A5-4664-904F-B6D7635A8BA3}.Release|x86.Build.0 = Release|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Debug|x64.ActiveCfg = Debug|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Debug|x64.Build.0 = Debug|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Debug|x86.ActiveCfg = Debug|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Debug|x86.Build.0 = Debug|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Release|Any CPU.Build.0 = Release|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Release|x64.ActiveCfg = Release|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Release|x64.Build.0 = Release|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Release|x86.ActiveCfg = Release|Any CPU + {CC898965-E666-4138-8A38-77775A94B006}.Release|x86.Build.0 = Release|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Debug|x64.ActiveCfg = Debug|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Debug|x64.Build.0 = Debug|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Debug|x86.ActiveCfg = Debug|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Debug|x86.Build.0 = Debug|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Release|Any CPU.Build.0 = Release|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Release|x64.ActiveCfg = Release|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Release|x64.Build.0 = Release|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Release|x86.ActiveCfg = Release|Any CPU + {8AA1603C-3E75-4D38-941C-85F81F08EE1F}.Release|x86.Build.0 = Release|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Debug|x64.ActiveCfg = Debug|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Debug|x64.Build.0 = Debug|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Debug|x86.ActiveCfg = Debug|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Debug|x86.Build.0 = Debug|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Release|Any CPU.Build.0 = Release|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Release|x64.ActiveCfg = Release|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Release|x64.Build.0 = Release|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Release|x86.ActiveCfg = Release|Any CPU + {F5E3CCCC-1A22-4110-8E4E-429556A7E965}.Release|x86.Build.0 = Release|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Debug|x64.ActiveCfg = Debug|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Debug|x64.Build.0 = Debug|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Debug|x86.ActiveCfg = Debug|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Debug|x86.Build.0 = Debug|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Release|Any CPU.Build.0 = Release|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Release|x64.ActiveCfg = Release|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Release|x64.Build.0 = Release|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Release|x86.ActiveCfg = Release|Any CPU + {C7457C94-CA00-4DE5-9E7D-B570B74A4346}.Release|x86.Build.0 = Release|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Debug|x64.ActiveCfg = Debug|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Debug|x64.Build.0 = Debug|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Debug|x86.ActiveCfg = Debug|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Debug|x86.Build.0 = Debug|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Release|Any CPU.Build.0 = Release|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Release|x64.ActiveCfg = Release|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Release|x64.Build.0 = Release|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Release|x86.ActiveCfg = Release|Any CPU + {6150DE8D-E28D-4B87-9825-7ADC37732D46}.Release|x86.Build.0 = Release|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Debug|x64.ActiveCfg = Debug|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Debug|x64.Build.0 = Debug|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Debug|x86.ActiveCfg = Debug|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Debug|x86.Build.0 = Debug|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Release|Any CPU.Build.0 = Release|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Release|x64.ActiveCfg = Release|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Release|x64.Build.0 = Release|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Release|x86.ActiveCfg = Release|Any CPU + {BA262EB0-B72B-4D61-91F1-F6933A9BF205}.Release|x86.Build.0 = Release|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Debug|x64.ActiveCfg = Debug|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Debug|x64.Build.0 = Debug|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Debug|x86.ActiveCfg = Debug|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Debug|x86.Build.0 = Debug|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Release|Any CPU.Build.0 = Release|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Release|x64.ActiveCfg = Release|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Release|x64.Build.0 = Release|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Release|x86.ActiveCfg = Release|Any CPU + {C6EA73CC-13DA-4122-AAA1-2C5CA18916AD}.Release|x86.Build.0 = Release|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Debug|x64.ActiveCfg = Debug|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Debug|x64.Build.0 = Debug|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Debug|x86.ActiveCfg = Debug|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Debug|x86.Build.0 = Debug|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Release|Any CPU.Build.0 = Release|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Release|x64.ActiveCfg = Release|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Release|x64.Build.0 = Release|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Release|x86.ActiveCfg = Release|Any CPU + {E4DDCC41-5257-439B-9C50-136DD3BEE233}.Release|x86.Build.0 = Release|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Debug|x64.ActiveCfg = Debug|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Debug|x64.Build.0 = Debug|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Debug|x86.ActiveCfg = Debug|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Debug|x86.Build.0 = Debug|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Release|Any CPU.Build.0 = Release|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Release|x64.ActiveCfg = Release|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Release|x64.Build.0 = Release|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Release|x86.ActiveCfg = Release|Any CPU + {D3192CDB-6BF1-466C-9699-86523AC3D79C}.Release|x86.Build.0 = Release|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Debug|x64.ActiveCfg = Debug|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Debug|x64.Build.0 = Debug|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Debug|x86.ActiveCfg = Debug|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Debug|x86.Build.0 = Debug|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Release|Any CPU.Build.0 = Release|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Release|x64.ActiveCfg = Release|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Release|x64.Build.0 = Release|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Release|x86.ActiveCfg = Release|Any CPU + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2}.Release|x86.Build.0 = Release|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Debug|x64.ActiveCfg = Debug|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Debug|x64.Build.0 = Debug|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Debug|x86.ActiveCfg = Debug|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Debug|x86.Build.0 = Debug|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Release|Any CPU.Build.0 = Release|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Release|x64.ActiveCfg = Release|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Release|x64.Build.0 = Release|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Release|x86.ActiveCfg = Release|Any CPU + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775}.Release|x86.Build.0 = Release|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Debug|x64.ActiveCfg = Debug|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Debug|x64.Build.0 = Debug|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Debug|x86.ActiveCfg = Debug|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Debug|x86.Build.0 = Debug|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Release|Any CPU.Build.0 = Release|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Release|x64.ActiveCfg = Release|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Release|x64.Build.0 = Release|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Release|x86.ActiveCfg = Release|Any CPU + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C}.Release|x86.Build.0 = Release|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Debug|x64.ActiveCfg = Debug|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Debug|x64.Build.0 = Debug|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Debug|x86.ActiveCfg = Debug|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Debug|x86.Build.0 = Debug|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Release|Any CPU.Build.0 = Release|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Release|x64.ActiveCfg = Release|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Release|x64.Build.0 = Release|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Release|x86.ActiveCfg = Release|Any CPU + {C53BE08C-E6AC-492B-8315-F184E645DED7}.Release|x86.Build.0 = Release|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Debug|x64.ActiveCfg = Debug|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Debug|x64.Build.0 = Debug|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Debug|x86.ActiveCfg = Debug|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Debug|x86.Build.0 = Debug|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Release|Any CPU.Build.0 = Release|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Release|x64.ActiveCfg = Release|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Release|x64.Build.0 = Release|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Release|x86.ActiveCfg = Release|Any CPU + {7471D151-B9D7-46A0-8F8E-601EA8D3B445}.Release|x86.Build.0 = Release|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Debug|x64.ActiveCfg = Debug|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Debug|x64.Build.0 = Debug|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Debug|x86.ActiveCfg = Debug|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Debug|x86.Build.0 = Debug|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Release|Any CPU.Build.0 = Release|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Release|x64.ActiveCfg = Release|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Release|x64.Build.0 = Release|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Release|x86.ActiveCfg = Release|Any CPU + {29EB452F-2FCA-40B7-B087-89289FD64EA9}.Release|x86.Build.0 = Release|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Debug|x64.ActiveCfg = Debug|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Debug|x64.Build.0 = Debug|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Debug|x86.ActiveCfg = Debug|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Debug|x86.Build.0 = Debug|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Release|Any CPU.Build.0 = Release|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Release|x64.ActiveCfg = Release|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Release|x64.Build.0 = Release|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Release|x86.ActiveCfg = Release|Any CPU + {E72D568F-053A-4C7A-9707-9077663CF074}.Release|x86.Build.0 = Release|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Debug|x64.ActiveCfg = Debug|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Debug|x64.Build.0 = Debug|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Debug|x86.ActiveCfg = Debug|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Debug|x86.Build.0 = Debug|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Release|Any CPU.Build.0 = Release|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Release|x64.ActiveCfg = Release|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Release|x64.Build.0 = Release|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Release|x86.ActiveCfg = Release|Any CPU + {EB9006BB-437A-4CA6-ADF4-45347B14298C}.Release|x86.Build.0 = Release|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Debug|x64.ActiveCfg = Debug|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Debug|x64.Build.0 = Debug|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Debug|x86.ActiveCfg = Debug|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Debug|x86.Build.0 = Debug|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Release|Any CPU.Build.0 = Release|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Release|x64.ActiveCfg = Release|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Release|x64.Build.0 = Release|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Release|x86.ActiveCfg = Release|Any CPU + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1}.Release|x86.Build.0 = Release|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Debug|x64.ActiveCfg = Debug|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Debug|x64.Build.0 = Debug|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Debug|x86.ActiveCfg = Debug|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Debug|x86.Build.0 = Debug|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Release|Any CPU.Build.0 = Release|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Release|x64.ActiveCfg = Release|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Release|x64.Build.0 = Release|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Release|x86.ActiveCfg = Release|Any CPU + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F}.Release|x86.Build.0 = Release|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Debug|x64.ActiveCfg = Debug|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Debug|x64.Build.0 = Debug|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Debug|x86.ActiveCfg = Debug|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Debug|x86.Build.0 = Debug|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Release|Any CPU.Build.0 = Release|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Release|x64.ActiveCfg = Release|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Release|x64.Build.0 = Release|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Release|x86.ActiveCfg = Release|Any CPU + {A46E73C7-6703-4870-9C80-D7238A693AAF}.Release|x86.Build.0 = Release|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Debug|x64.ActiveCfg = Debug|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Debug|x64.Build.0 = Debug|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Debug|x86.ActiveCfg = Debug|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Debug|x86.Build.0 = Debug|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Release|Any CPU.Build.0 = Release|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Release|x64.ActiveCfg = Release|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Release|x64.Build.0 = Release|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Release|x86.ActiveCfg = Release|Any CPU + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19}.Release|x86.Build.0 = Release|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Debug|x64.ActiveCfg = Debug|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Debug|x64.Build.0 = Debug|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Debug|x86.ActiveCfg = Debug|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Debug|x86.Build.0 = Debug|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Release|Any CPU.Build.0 = Release|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Release|x64.ActiveCfg = Release|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Release|x64.Build.0 = Release|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Release|x86.ActiveCfg = Release|Any CPU + {2523BF07-0787-404B-B7FD-35BF10810BDD}.Release|x86.Build.0 = Release|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Debug|x64.ActiveCfg = Debug|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Debug|x64.Build.0 = Debug|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Debug|x86.ActiveCfg = Debug|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Debug|x86.Build.0 = Debug|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Release|Any CPU.Build.0 = Release|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Release|x64.ActiveCfg = Release|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Release|x64.Build.0 = Release|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Release|x86.ActiveCfg = Release|Any CPU + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC}.Release|x86.Build.0 = Release|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Debug|x64.ActiveCfg = Debug|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Debug|x64.Build.0 = Debug|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Debug|x86.ActiveCfg = Debug|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Debug|x86.Build.0 = Debug|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Release|Any CPU.Build.0 = Release|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Release|x64.ActiveCfg = Release|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Release|x64.Build.0 = Release|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Release|x86.ActiveCfg = Release|Any CPU + {A7E55948-F5A6-4696-8D60-B80A299C2850}.Release|x86.Build.0 = Release|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Debug|x64.ActiveCfg = Debug|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Debug|x64.Build.0 = Debug|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Debug|x86.ActiveCfg = Debug|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Debug|x86.Build.0 = Debug|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Release|Any CPU.Build.0 = Release|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Release|x64.ActiveCfg = Release|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Release|x64.Build.0 = Release|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Release|x86.ActiveCfg = Release|Any CPU + {CE58DBCD-FE30-4714-A462-758459B21185}.Release|x86.Build.0 = Release|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Debug|x64.ActiveCfg = Debug|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Debug|x64.Build.0 = Debug|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Debug|x86.ActiveCfg = Debug|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Debug|x86.Build.0 = Debug|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Release|Any CPU.Build.0 = Release|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Release|x64.ActiveCfg = Release|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Release|x64.Build.0 = Release|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Release|x86.ActiveCfg = Release|Any CPU + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C}.Release|x86.Build.0 = Release|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Debug|x64.ActiveCfg = Debug|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Debug|x64.Build.0 = Debug|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Debug|x86.ActiveCfg = Debug|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Debug|x86.Build.0 = Debug|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Release|Any CPU.Build.0 = Release|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Release|x64.ActiveCfg = Release|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Release|x64.Build.0 = Release|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Release|x86.ActiveCfg = Release|Any CPU + {4496E181-413D-4F31-846C-DA6AF6F8E55C}.Release|x86.Build.0 = Release|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Debug|x64.ActiveCfg = Debug|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Debug|x64.Build.0 = Debug|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Debug|x86.ActiveCfg = Debug|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Debug|x86.Build.0 = Debug|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Release|Any CPU.Build.0 = Release|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Release|x64.ActiveCfg = Release|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Release|x64.Build.0 = Release|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Release|x86.ActiveCfg = Release|Any CPU + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9}.Release|x86.Build.0 = Release|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Debug|Any CPU.Build.0 = Debug|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Debug|x64.ActiveCfg = Debug|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Debug|x64.Build.0 = Debug|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Debug|x86.ActiveCfg = Debug|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Debug|x86.Build.0 = Debug|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Release|Any CPU.ActiveCfg = Release|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Release|Any CPU.Build.0 = Release|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Release|x64.ActiveCfg = Release|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Release|x64.Build.0 = Release|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Release|x86.ActiveCfg = Release|Any CPU + {90196AAE-2C67-4D40-A1E2-2EBBCD035481}.Release|x86.Build.0 = Release|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Debug|x64.ActiveCfg = Debug|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Debug|x64.Build.0 = Debug|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Debug|x86.ActiveCfg = Debug|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Debug|x86.Build.0 = Debug|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Release|Any CPU.Build.0 = Release|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Release|x64.ActiveCfg = Release|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Release|x64.Build.0 = Release|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Release|x86.ActiveCfg = Release|Any CPU + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6}.Release|x86.Build.0 = Release|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Debug|x64.ActiveCfg = Debug|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Debug|x64.Build.0 = Debug|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Debug|x86.ActiveCfg = Debug|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Debug|x86.Build.0 = Debug|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Release|Any CPU.Build.0 = Release|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Release|x64.ActiveCfg = Release|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Release|x64.Build.0 = Release|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Release|x86.ActiveCfg = Release|Any CPU + {D643E06B-FDAC-4E17-A042-9A60F71CF471}.Release|x86.Build.0 = Release|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Debug|x64.ActiveCfg = Debug|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Debug|x64.Build.0 = Debug|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Debug|x86.ActiveCfg = Debug|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Debug|x86.Build.0 = Debug|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Release|Any CPU.Build.0 = Release|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Release|x64.ActiveCfg = Release|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Release|x64.Build.0 = Release|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Release|x86.ActiveCfg = Release|Any CPU + {D756A376-1DE0-4E08-B50E-28E38A252C8C}.Release|x86.Build.0 = Release|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Debug|x64.ActiveCfg = Debug|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Debug|x64.Build.0 = Debug|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Debug|x86.ActiveCfg = Debug|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Debug|x86.Build.0 = Debug|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Release|Any CPU.Build.0 = Release|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Release|x64.ActiveCfg = Release|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Release|x64.Build.0 = Release|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Release|x86.ActiveCfg = Release|Any CPU + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1}.Release|x86.Build.0 = Release|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Debug|x64.ActiveCfg = Debug|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Debug|x64.Build.0 = Debug|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Debug|x86.ActiveCfg = Debug|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Debug|x86.Build.0 = Debug|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Release|Any CPU.Build.0 = Release|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Release|x64.ActiveCfg = Release|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Release|x64.Build.0 = Release|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Release|x86.ActiveCfg = Release|Any CPU + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13}.Release|x86.Build.0 = Release|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Debug|x64.ActiveCfg = Debug|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Debug|x64.Build.0 = Debug|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Debug|x86.ActiveCfg = Debug|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Debug|x86.Build.0 = Debug|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Release|Any CPU.Build.0 = Release|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Release|x64.ActiveCfg = Release|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Release|x64.Build.0 = Release|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Release|x86.ActiveCfg = Release|Any CPU + {FD376DC0-9638-481D-97AD-29A932CE2560}.Release|x86.Build.0 = Release|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Debug|x64.ActiveCfg = Debug|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Debug|x64.Build.0 = Debug|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Debug|x86.ActiveCfg = Debug|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Debug|x86.Build.0 = Debug|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Release|Any CPU.Build.0 = Release|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Release|x64.ActiveCfg = Release|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Release|x64.Build.0 = Release|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Release|x86.ActiveCfg = Release|Any CPU + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8}.Release|x86.Build.0 = Release|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Debug|x64.ActiveCfg = Debug|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Debug|x64.Build.0 = Debug|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Debug|x86.ActiveCfg = Debug|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Debug|x86.Build.0 = Debug|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Release|Any CPU.Build.0 = Release|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Release|x64.ActiveCfg = Release|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Release|x64.Build.0 = Release|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Release|x86.ActiveCfg = Release|Any CPU + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C}.Release|x86.Build.0 = Release|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Debug|x64.ActiveCfg = Debug|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Debug|x64.Build.0 = Debug|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Debug|x86.ActiveCfg = Debug|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Debug|x86.Build.0 = Debug|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Release|Any CPU.Build.0 = Release|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Release|x64.ActiveCfg = Release|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Release|x64.Build.0 = Release|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Release|x86.ActiveCfg = Release|Any CPU + {A885E136-A737-40EE-814E-1A99FF599369}.Release|x86.Build.0 = Release|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Debug|x64.ActiveCfg = Debug|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Debug|x64.Build.0 = Debug|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Debug|x86.ActiveCfg = Debug|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Debug|x86.Build.0 = Debug|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Release|Any CPU.Build.0 = Release|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Release|x64.ActiveCfg = Release|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Release|x64.Build.0 = Release|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Release|x86.ActiveCfg = Release|Any CPU + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5}.Release|x86.Build.0 = Release|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Debug|Any CPU.Build.0 = Debug|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Debug|x64.ActiveCfg = Debug|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Debug|x64.Build.0 = Debug|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Debug|x86.ActiveCfg = Debug|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Debug|x86.Build.0 = Debug|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Release|Any CPU.ActiveCfg = Release|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Release|Any CPU.Build.0 = Release|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Release|x64.ActiveCfg = Release|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Release|x64.Build.0 = Release|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Release|x86.ActiveCfg = Release|Any CPU + {07D15319-95A0-4C36-B06C-A5C80E0A7752}.Release|x86.Build.0 = Release|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Debug|x64.ActiveCfg = Debug|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Debug|x64.Build.0 = Debug|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Debug|x86.ActiveCfg = Debug|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Debug|x86.Build.0 = Debug|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Release|Any CPU.Build.0 = Release|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Release|x64.ActiveCfg = Release|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Release|x64.Build.0 = Release|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Release|x86.ActiveCfg = Release|Any CPU + {782652F5-A7C3-4070-8B42-F7DC2C17973E}.Release|x86.Build.0 = Release|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Debug|x64.ActiveCfg = Debug|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Debug|x64.Build.0 = Debug|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Debug|x86.ActiveCfg = Debug|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Debug|x86.Build.0 = Debug|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Release|Any CPU.Build.0 = Release|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Release|x64.ActiveCfg = Release|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Release|x64.Build.0 = Release|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Release|x86.ActiveCfg = Release|Any CPU + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {6150DE8D-E28D-4B87-9825-7ADC37732D46} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {BA262EB0-B72B-4D61-91F1-F6933A9BF205} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {25C9ED1F-22C7-4979-A00D-B3BDFDD238F2} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {8C0176ED-F21D-43AD-B9F6-4B23CBDF5775} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {4AF07D9C-67BF-460C-9E0B-EB6B5D4E5F1C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {C53BE08C-E6AC-492B-8315-F184E645DED7} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {7471D151-B9D7-46A0-8F8E-601EA8D3B445} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {29EB452F-2FCA-40B7-B087-89289FD64EA9} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {E72D568F-053A-4C7A-9707-9077663CF074} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {EB9006BB-437A-4CA6-ADF4-45347B14298C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {6625E0A5-728F-4AC7-BAEB-B4244B3E5ED1} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {ACED4072-1C6B-4FEC-8CAE-6EE43C796B8F} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {A46E73C7-6703-4870-9C80-D7238A693AAF} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {1F8BFCCF-8B00-40D3-AFC4-B3DD668BBC19} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {2523BF07-0787-404B-B7FD-35BF10810BDD} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {AF192668-6B1F-4D8C-AFBA-1338080BB3BC} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {A7E55948-F5A6-4696-8D60-B80A299C2850} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {CE58DBCD-FE30-4714-A462-758459B21185} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {A7A2ECB8-5D56-4FB3-81A5-5C22982E7A8C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {4496E181-413D-4F31-846C-DA6AF6F8E55C} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {C7F55BF1-F20B-4D0E-A5EA-FEDCEF59ECF9} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {90196AAE-2C67-4D40-A1E2-2EBBCD035481} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {B5A20E00-B2CE-435D-B486-8AB8F831A3A6} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {D643E06B-FDAC-4E17-A042-9A60F71CF471} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {D756A376-1DE0-4E08-B50E-28E38A252C8C} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {5C8DC0DB-F6B2-4F59-BD2A-DC6CD5AA6EF1} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {8D7A50A6-7F4B-4754-A150-5C3FFEDA0B13} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {FD376DC0-9638-481D-97AD-29A932CE2560} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {4A9F5BF6-7883-4037-9D60-43B05A4BF4A8} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {764DB3E2-4A7C-4268-8D54-4CD6C468AF0C} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {A885E136-A737-40EE-814E-1A99FF599369} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {EF65B10E-0A83-4E47-B06A-B80A8E7530F5} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {07D15319-95A0-4C36-B06C-A5C80E0A7752} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {782652F5-A7C3-4070-8B42-F7DC2C17973E} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {51CAC6CD-ED38-4AFC-AE81-84A4BDD45DB2} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/AGENTS.md diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetAnalyzerPlugin.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetAnalyzerPlugin.cs index fdca7c76..8858862d 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetAnalyzerPlugin.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetAnalyzerPlugin.cs @@ -1,17 +1,17 @@ -using System; -using StellaOps.Scanner.Analyzers.Lang.Plugin; - -namespace StellaOps.Scanner.Analyzers.Lang.DotNet; - -public sealed class DotNetAnalyzerPlugin : ILanguageAnalyzerPlugin -{ - public string Name => "StellaOps.Scanner.Analyzers.Lang.DotNet"; - - public bool IsAvailable(IServiceProvider services) => services is not null; - - public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) - { - ArgumentNullException.ThrowIfNull(services); - return new DotNetLanguageAnalyzer(); - } -} +using System; +using StellaOps.Scanner.Analyzers.Lang.Plugin; + +namespace StellaOps.Scanner.Analyzers.Lang.DotNet; + +public sealed class DotNetAnalyzerPlugin : ILanguageAnalyzerPlugin +{ + public string Name => "StellaOps.Scanner.Analyzers.Lang.DotNet"; + + public bool IsAvailable(IServiceProvider services) => services is not null; + + public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return new DotNetLanguageAnalyzer(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetLanguageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetLanguageAnalyzer.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetLanguageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetLanguageAnalyzer.cs index a6412876..d8284b6f 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetLanguageAnalyzer.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/DotNetLanguageAnalyzer.cs @@ -1,37 +1,37 @@ -using StellaOps.Scanner.Analyzers.Lang.DotNet.Internal; - -namespace StellaOps.Scanner.Analyzers.Lang.DotNet; - -public sealed class DotNetLanguageAnalyzer : ILanguageAnalyzer -{ - public string Id => "dotnet"; - - public string DisplayName => ".NET Analyzer (preview)"; - - public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - ArgumentNullException.ThrowIfNull(writer); - - var packages = await DotNetDependencyCollector.CollectAsync(context, cancellationToken).ConfigureAwait(false); - if (packages.Count == 0) - { - return; - } - - foreach (var package in packages) - { - cancellationToken.ThrowIfCancellationRequested(); - - writer.AddFromPurl( - analyzerId: Id, - purl: package.Purl, - name: package.Name, - version: package.Version, - type: "nuget", - metadata: package.Metadata, - evidence: package.Evidence, - usedByEntrypoint: package.UsedByEntrypoint); - } - } -} +using StellaOps.Scanner.Analyzers.Lang.DotNet.Internal; + +namespace StellaOps.Scanner.Analyzers.Lang.DotNet; + +public sealed class DotNetLanguageAnalyzer : ILanguageAnalyzer +{ + public string Id => "dotnet"; + + public string DisplayName => ".NET Analyzer (preview)"; + + public async ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + ArgumentNullException.ThrowIfNull(writer); + + var packages = await DotNetDependencyCollector.CollectAsync(context, cancellationToken).ConfigureAwait(false); + if (packages.Count == 0) + { + return; + } + + foreach (var package in packages) + { + cancellationToken.ThrowIfCancellationRequested(); + + writer.AddFromPurl( + analyzerId: Id, + purl: package.Purl, + name: package.Name, + version: package.Version, + type: "nuget", + metadata: package.Metadata, + evidence: package.Evidence, + usedByEntrypoint: package.UsedByEntrypoint); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/GlobalUsings.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/GlobalUsings.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/GlobalUsings.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/IDotNetAuthenticodeInspector.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/IDotNetAuthenticodeInspector.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/IDotNetAuthenticodeInspector.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/IDotNetAuthenticodeInspector.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDependencyCollector.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDependencyCollector.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDependencyCollector.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDependencyCollector.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDepsFile.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDepsFile.cs similarity index 98% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDepsFile.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDepsFile.cs index 88c38f33..82797bee 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDepsFile.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetDepsFile.cs @@ -1,172 +1,172 @@ -using System.Diagnostics.CodeAnalysis; -using System.Text.Json; - -namespace StellaOps.Scanner.Analyzers.Lang.DotNet.Internal; - -internal sealed class DotNetDepsFile -{ - private DotNetDepsFile(string relativePath, IReadOnlyDictionary<string, DotNetLibrary> libraries) - { - RelativePath = relativePath; - Libraries = libraries; - } - - public string RelativePath { get; } - - public IReadOnlyDictionary<string, DotNetLibrary> Libraries { get; } - - public static DotNetDepsFile? Load(string absolutePath, string relativePath, CancellationToken cancellationToken) - { - using var stream = File.OpenRead(absolutePath); - using var document = JsonDocument.Parse(stream, new JsonDocumentOptions - { - AllowTrailingCommas = true, - CommentHandling = JsonCommentHandling.Skip - }); - - var root = document.RootElement; - if (root.ValueKind is not JsonValueKind.Object) - { - return null; - } - - var libraries = ParseLibraries(root, cancellationToken); - if (libraries.Count == 0) - { - return null; - } - - PopulateTargets(root, libraries, cancellationToken); - return new DotNetDepsFile(relativePath, libraries); - } - - private static Dictionary<string, DotNetLibrary> ParseLibraries(JsonElement root, CancellationToken cancellationToken) - { - var result = new Dictionary<string, DotNetLibrary>(StringComparer.Ordinal); - - if (!root.TryGetProperty("libraries", out var librariesElement) || librariesElement.ValueKind is not JsonValueKind.Object) - { - return result; - } - - foreach (var property in librariesElement.EnumerateObject()) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (DotNetLibrary.TryCreate(property.Name, property.Value, out var library)) - { - result[property.Name] = library; - } - } - - return result; - } - - private static void PopulateTargets(JsonElement root, IDictionary<string, DotNetLibrary> libraries, CancellationToken cancellationToken) - { - if (!root.TryGetProperty("targets", out var targetsElement) || targetsElement.ValueKind is not JsonValueKind.Object) - { - return; - } - - foreach (var targetProperty in targetsElement.EnumerateObject()) - { - cancellationToken.ThrowIfCancellationRequested(); - - var (tfm, rid) = ParseTargetKey(targetProperty.Name); - if (targetProperty.Value.ValueKind is not JsonValueKind.Object) - { - continue; - } - - foreach (var libraryProperty in targetProperty.Value.EnumerateObject()) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (!libraries.TryGetValue(libraryProperty.Name, out var library)) - { - continue; - } - - if (!string.IsNullOrEmpty(tfm)) - { - library.AddTargetFramework(tfm); - } - - if (!string.IsNullOrEmpty(rid)) - { - library.AddRuntimeIdentifier(rid); - } - +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; + +namespace StellaOps.Scanner.Analyzers.Lang.DotNet.Internal; + +internal sealed class DotNetDepsFile +{ + private DotNetDepsFile(string relativePath, IReadOnlyDictionary<string, DotNetLibrary> libraries) + { + RelativePath = relativePath; + Libraries = libraries; + } + + public string RelativePath { get; } + + public IReadOnlyDictionary<string, DotNetLibrary> Libraries { get; } + + public static DotNetDepsFile? Load(string absolutePath, string relativePath, CancellationToken cancellationToken) + { + using var stream = File.OpenRead(absolutePath); + using var document = JsonDocument.Parse(stream, new JsonDocumentOptions + { + AllowTrailingCommas = true, + CommentHandling = JsonCommentHandling.Skip + }); + + var root = document.RootElement; + if (root.ValueKind is not JsonValueKind.Object) + { + return null; + } + + var libraries = ParseLibraries(root, cancellationToken); + if (libraries.Count == 0) + { + return null; + } + + PopulateTargets(root, libraries, cancellationToken); + return new DotNetDepsFile(relativePath, libraries); + } + + private static Dictionary<string, DotNetLibrary> ParseLibraries(JsonElement root, CancellationToken cancellationToken) + { + var result = new Dictionary<string, DotNetLibrary>(StringComparer.Ordinal); + + if (!root.TryGetProperty("libraries", out var librariesElement) || librariesElement.ValueKind is not JsonValueKind.Object) + { + return result; + } + + foreach (var property in librariesElement.EnumerateObject()) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (DotNetLibrary.TryCreate(property.Name, property.Value, out var library)) + { + result[property.Name] = library; + } + } + + return result; + } + + private static void PopulateTargets(JsonElement root, IDictionary<string, DotNetLibrary> libraries, CancellationToken cancellationToken) + { + if (!root.TryGetProperty("targets", out var targetsElement) || targetsElement.ValueKind is not JsonValueKind.Object) + { + return; + } + + foreach (var targetProperty in targetsElement.EnumerateObject()) + { + cancellationToken.ThrowIfCancellationRequested(); + + var (tfm, rid) = ParseTargetKey(targetProperty.Name); + if (targetProperty.Value.ValueKind is not JsonValueKind.Object) + { + continue; + } + + foreach (var libraryProperty in targetProperty.Value.EnumerateObject()) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (!libraries.TryGetValue(libraryProperty.Name, out var library)) + { + continue; + } + + if (!string.IsNullOrEmpty(tfm)) + { + library.AddTargetFramework(tfm); + } + + if (!string.IsNullOrEmpty(rid)) + { + library.AddRuntimeIdentifier(rid); + } + library.MergeTargetMetadata(libraryProperty.Value, tfm, rid); - } - } - } - - private static (string tfm, string? rid) ParseTargetKey(string value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return (string.Empty, null); - } - - var separatorIndex = value.IndexOf('/'); - if (separatorIndex < 0) - { - return (value.Trim(), null); - } - - var tfm = value[..separatorIndex].Trim(); - var rid = value[(separatorIndex + 1)..].Trim(); - return (tfm, string.IsNullOrEmpty(rid) ? null : rid); - } -} - -internal sealed class DotNetLibrary -{ - private readonly HashSet<string> _dependencies = new(StringComparer.OrdinalIgnoreCase); + } + } + } + + private static (string tfm, string? rid) ParseTargetKey(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return (string.Empty, null); + } + + var separatorIndex = value.IndexOf('/'); + if (separatorIndex < 0) + { + return (value.Trim(), null); + } + + var tfm = value[..separatorIndex].Trim(); + var rid = value[(separatorIndex + 1)..].Trim(); + return (tfm, string.IsNullOrEmpty(rid) ? null : rid); + } +} + +internal sealed class DotNetLibrary +{ + private readonly HashSet<string> _dependencies = new(StringComparer.OrdinalIgnoreCase); private readonly HashSet<string> _runtimeIdentifiers = new(StringComparer.Ordinal); private readonly List<DotNetLibraryAsset> _runtimeAssets = new(); - private readonly HashSet<string> _targetFrameworks = new(StringComparer.Ordinal); - - private DotNetLibrary( - string key, - string id, - string version, - string type, - bool? serviceable, - string? sha512, - string? path, - string? hashPath) - { - Key = key; - Id = id; - Version = version; - Type = type; - Serviceable = serviceable; - Sha512 = NormalizeValue(sha512); - PackagePath = NormalizePath(path); - HashPath = NormalizePath(hashPath); - } - - public string Key { get; } - - public string Id { get; } - - public string Version { get; } - - public string Type { get; } - - public bool? Serviceable { get; } - - public string? Sha512 { get; } - - public string? PackagePath { get; } - - public string? HashPath { get; } - - public bool IsPackage => string.Equals(Type, "package", StringComparison.OrdinalIgnoreCase); - + private readonly HashSet<string> _targetFrameworks = new(StringComparer.Ordinal); + + private DotNetLibrary( + string key, + string id, + string version, + string type, + bool? serviceable, + string? sha512, + string? path, + string? hashPath) + { + Key = key; + Id = id; + Version = version; + Type = type; + Serviceable = serviceable; + Sha512 = NormalizeValue(sha512); + PackagePath = NormalizePath(path); + HashPath = NormalizePath(hashPath); + } + + public string Key { get; } + + public string Id { get; } + + public string Version { get; } + + public string Type { get; } + + public bool? Serviceable { get; } + + public string? Sha512 { get; } + + public string? PackagePath { get; } + + public string? HashPath { get; } + + public bool IsPackage => string.Equals(Type, "package", StringComparison.OrdinalIgnoreCase); + public IReadOnlyCollection<string> Dependencies => _dependencies; public IReadOnlyCollection<string> TargetFrameworks => _targetFrameworks; @@ -174,65 +174,65 @@ internal sealed class DotNetLibrary public IReadOnlyCollection<string> RuntimeIdentifiers => _runtimeIdentifiers; public IReadOnlyCollection<DotNetLibraryAsset> RuntimeAssets => _runtimeAssets; - - public static bool TryCreate(string key, JsonElement element, [NotNullWhen(true)] out DotNetLibrary? library) - { - library = null; - if (!TrySplitNameAndVersion(key, out var id, out var version)) - { - return false; - } - - var type = element.TryGetProperty("type", out var typeElement) && typeElement.ValueKind == JsonValueKind.String - ? typeElement.GetString() ?? string.Empty - : string.Empty; - - bool? serviceable = null; - if (element.TryGetProperty("serviceable", out var serviceableElement)) - { - if (serviceableElement.ValueKind is JsonValueKind.True) - { - serviceable = true; - } - else if (serviceableElement.ValueKind is JsonValueKind.False) - { - serviceable = false; - } - } - - var sha512 = element.TryGetProperty("sha512", out var sha512Element) && sha512Element.ValueKind == JsonValueKind.String - ? sha512Element.GetString() - : null; - - var path = element.TryGetProperty("path", out var pathElement) && pathElement.ValueKind == JsonValueKind.String - ? pathElement.GetString() - : null; - - var hashPath = element.TryGetProperty("hashPath", out var hashElement) && hashElement.ValueKind == JsonValueKind.String - ? hashElement.GetString() - : null; - - library = new DotNetLibrary(key, id, version, type, serviceable, sha512, path, hashPath); - library.MergeLibraryMetadata(element); - return true; - } - - public void AddTargetFramework(string tfm) - { - if (!string.IsNullOrWhiteSpace(tfm)) - { - _targetFrameworks.Add(tfm); - } - } - - public void AddRuntimeIdentifier(string rid) - { - if (!string.IsNullOrWhiteSpace(rid)) - { - _runtimeIdentifiers.Add(rid); - } - } - + + public static bool TryCreate(string key, JsonElement element, [NotNullWhen(true)] out DotNetLibrary? library) + { + library = null; + if (!TrySplitNameAndVersion(key, out var id, out var version)) + { + return false; + } + + var type = element.TryGetProperty("type", out var typeElement) && typeElement.ValueKind == JsonValueKind.String + ? typeElement.GetString() ?? string.Empty + : string.Empty; + + bool? serviceable = null; + if (element.TryGetProperty("serviceable", out var serviceableElement)) + { + if (serviceableElement.ValueKind is JsonValueKind.True) + { + serviceable = true; + } + else if (serviceableElement.ValueKind is JsonValueKind.False) + { + serviceable = false; + } + } + + var sha512 = element.TryGetProperty("sha512", out var sha512Element) && sha512Element.ValueKind == JsonValueKind.String + ? sha512Element.GetString() + : null; + + var path = element.TryGetProperty("path", out var pathElement) && pathElement.ValueKind == JsonValueKind.String + ? pathElement.GetString() + : null; + + var hashPath = element.TryGetProperty("hashPath", out var hashElement) && hashElement.ValueKind == JsonValueKind.String + ? hashElement.GetString() + : null; + + library = new DotNetLibrary(key, id, version, type, serviceable, sha512, path, hashPath); + library.MergeLibraryMetadata(element); + return true; + } + + public void AddTargetFramework(string tfm) + { + if (!string.IsNullOrWhiteSpace(tfm)) + { + _targetFrameworks.Add(tfm); + } + } + + public void AddRuntimeIdentifier(string rid) + { + if (!string.IsNullOrWhiteSpace(rid)) + { + _runtimeIdentifiers.Add(rid); + } + } + public void MergeTargetMetadata(JsonElement element, string? tfm, string? rid) { if (element.TryGetProperty("dependencies", out var dependenciesElement) && dependenciesElement.ValueKind is JsonValueKind.Object) @@ -245,7 +245,7 @@ internal sealed class DotNetLibrary MergeRuntimeAssets(element, tfm, rid); } - + public void MergeLibraryMetadata(JsonElement element) { if (element.TryGetProperty("dependencies", out var dependenciesElement) && dependenciesElement.ValueKind is JsonValueKind.Object) @@ -296,66 +296,66 @@ internal sealed class DotNetLibrary } } } - - private void AddDependency(string name) - { - if (string.IsNullOrWhiteSpace(name)) - { - return; - } - - var dependencyId = name; - if (TrySplitNameAndVersion(name, out var parsedName, out _)) - { - dependencyId = parsedName; - } - - if (!string.IsNullOrWhiteSpace(dependencyId)) - { - _dependencies.Add(dependencyId); - } - } - - private static bool TrySplitNameAndVersion(string key, out string name, out string version) - { - name = string.Empty; - version = string.Empty; - - if (string.IsNullOrWhiteSpace(key)) - { - return false; - } - - var separatorIndex = key.LastIndexOf('/'); - if (separatorIndex <= 0 || separatorIndex >= key.Length - 1) - { - return false; - } - - name = key[..separatorIndex].Trim(); - version = key[(separatorIndex + 1)..].Trim(); - return name.Length > 0 && version.Length > 0; - } - - private static string? NormalizePath(string? path) - { - if (string.IsNullOrWhiteSpace(path)) - { - return null; - } - - return path.Replace('\\', '/'); - } - - private static string? NormalizeValue(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - return value.Trim(); - } + + private void AddDependency(string name) + { + if (string.IsNullOrWhiteSpace(name)) + { + return; + } + + var dependencyId = name; + if (TrySplitNameAndVersion(name, out var parsedName, out _)) + { + dependencyId = parsedName; + } + + if (!string.IsNullOrWhiteSpace(dependencyId)) + { + _dependencies.Add(dependencyId); + } + } + + private static bool TrySplitNameAndVersion(string key, out string name, out string version) + { + name = string.Empty; + version = string.Empty; + + if (string.IsNullOrWhiteSpace(key)) + { + return false; + } + + var separatorIndex = key.LastIndexOf('/'); + if (separatorIndex <= 0 || separatorIndex >= key.Length - 1) + { + return false; + } + + name = key[..separatorIndex].Trim(); + version = key[(separatorIndex + 1)..].Trim(); + return name.Length > 0 && version.Length > 0; + } + + private static string? NormalizePath(string? path) + { + if (string.IsNullOrWhiteSpace(path)) + { + return null; + } + + return path.Replace('\\', '/'); + } + + private static string? NormalizeValue(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + return value.Trim(); + } } internal enum DotNetLibraryAssetKind diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetFileCaches.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetFileCaches.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetFileCaches.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetFileCaches.cs index b3a1b9ad..fcb9a10e 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetFileCaches.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetFileCaches.cs @@ -1,332 +1,332 @@ -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Diagnostics; -using System.IO; -using System.Linq; -using System.Reflection; -using System.Security; -using System.Xml; - -namespace StellaOps.Scanner.Analyzers.Lang.DotNet.Internal; - -internal static class DotNetFileMetadataCache -{ - private static readonly ConcurrentDictionary<DotNetFileCacheKey, Optional<string>> Sha256Cache = new(); - private static readonly ConcurrentDictionary<DotNetFileCacheKey, Optional<AssemblyName>> AssemblyCache = new(); - private static readonly ConcurrentDictionary<DotNetFileCacheKey, Optional<FileVersionInfo>> VersionCache = new(); - - public static bool TryGetSha256(string path, out string? sha256) - => TryGet(path, Sha256Cache, ComputeSha256, out sha256); - - public static bool TryGetAssemblyName(string path, out AssemblyName? assemblyName) - => TryGet(path, AssemblyCache, TryReadAssemblyName, out assemblyName); - - public static bool TryGetFileVersionInfo(string path, out FileVersionInfo? versionInfo) - => TryGet(path, VersionCache, TryReadFileVersionInfo, out versionInfo); - - private static bool TryGet<T>(string path, ConcurrentDictionary<DotNetFileCacheKey, Optional<T>> cache, Func<string, T?> resolver, out T? value) - where T : class - { - value = null; - - DotNetFileCacheKey key; - try - { - var info = new FileInfo(path); - if (!info.Exists) - { - return false; - } - - key = new DotNetFileCacheKey(info.FullName, info.Length, info.LastWriteTimeUtc.Ticks); - } - catch (IOException) - { - return false; - } - catch (UnauthorizedAccessException) - { - return false; - } - catch (SecurityException) - { - return false; - } - catch (ArgumentException) - { - return false; - } - catch (NotSupportedException) - { - return false; - } - - var optional = cache.GetOrAdd(key, static (cacheKey, state) => CreateOptional(cacheKey.Path, state.resolver), (resolver, path)); - if (!optional.HasValue) - { - return false; - } - - value = optional.Value; - return value is not null; - } - - private static Optional<T> CreateOptional<T>(string path, Func<string, T?> resolver) where T : class - { - try - { - var value = resolver(path); - return Optional<T>.From(value); - } - catch (FileNotFoundException) - { - return Optional<T>.None; - } - catch (FileLoadException) - { - return Optional<T>.None; - } - catch (BadImageFormatException) - { - return Optional<T>.None; - } - catch (UnauthorizedAccessException) - { - return Optional<T>.None; - } - catch (SecurityException) - { - return Optional<T>.None; - } - catch (IOException) - { - return Optional<T>.None; - } - } - - private static string? ComputeSha256(string path) - { - using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var sha = System.Security.Cryptography.SHA256.Create(); - var hash = sha.ComputeHash(stream); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static AssemblyName? TryReadAssemblyName(string path) - { - try - { - return AssemblyName.GetAssemblyName(path); - } - catch (FileNotFoundException) - { - return null; - } - catch (FileLoadException) - { - return null; - } - catch (BadImageFormatException) - { - return null; - } - catch (IOException) - { - return null; - } - } - - private static FileVersionInfo? TryReadFileVersionInfo(string path) - { - try - { - return FileVersionInfo.GetVersionInfo(path); - } - catch (FileNotFoundException) - { - return null; - } - catch (IOException) - { - return null; - } - catch (UnauthorizedAccessException) - { - return null; - } - } -} - -internal static class DotNetLicenseCache -{ - private static readonly ConcurrentDictionary<DotNetFileCacheKey, Optional<DotNetLicenseInfo>> Licenses = new(); - - public static bool TryGetLicenseInfo(string nuspecPath, out DotNetLicenseInfo? info) - { - info = null; - - DotNetFileCacheKey key; - try - { - var fileInfo = new FileInfo(nuspecPath); - if (!fileInfo.Exists) - { - return false; - } - - key = new DotNetFileCacheKey(fileInfo.FullName, fileInfo.Length, fileInfo.LastWriteTimeUtc.Ticks); - } - catch (IOException) - { - return false; - } - catch (UnauthorizedAccessException) - { - return false; - } - catch (SecurityException) - { - return false; - } - - var optional = Licenses.GetOrAdd(key, static (cacheKey, path) => CreateOptional(path), nuspecPath); - if (!optional.HasValue) - { - return false; - } - - info = optional.Value; - return info is not null; - } - - private static Optional<DotNetLicenseInfo> CreateOptional(string nuspecPath) - { - try - { - var info = Parse(nuspecPath); - return Optional<DotNetLicenseInfo>.From(info); - } - catch (IOException) - { - return Optional<DotNetLicenseInfo>.None; - } - catch (UnauthorizedAccessException) - { - return Optional<DotNetLicenseInfo>.None; - } - catch (SecurityException) - { - return Optional<DotNetLicenseInfo>.None; - } - catch (XmlException) - { - return Optional<DotNetLicenseInfo>.None; - } - } - - private static DotNetLicenseInfo? Parse(string path) - { - using var stream = File.OpenRead(path); - using var reader = XmlReader.Create(stream, new XmlReaderSettings - { - DtdProcessing = DtdProcessing.Ignore, - IgnoreComments = true, - IgnoreWhitespace = true, - }); - - var expressions = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); - var files = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); - var urls = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); - - while (reader.Read()) - { - if (reader.NodeType != XmlNodeType.Element) - { - continue; - } - - if (string.Equals(reader.LocalName, "license", StringComparison.OrdinalIgnoreCase)) - { - var type = reader.GetAttribute("type"); - var value = reader.ReadElementContentAsString()?.Trim(); - - if (string.IsNullOrWhiteSpace(value)) - { - continue; - } - - if (string.Equals(type, "expression", StringComparison.OrdinalIgnoreCase)) - { - expressions.Add(value); - } - else if (string.Equals(type, "file", StringComparison.OrdinalIgnoreCase)) - { - files.Add(NormalizeLicensePath(value)); - } - else - { - expressions.Add(value); - } - } - else if (string.Equals(reader.LocalName, "licenseUrl", StringComparison.OrdinalIgnoreCase)) - { - var value = reader.ReadElementContentAsString()?.Trim(); - if (!string.IsNullOrWhiteSpace(value)) - { - urls.Add(value); - } - } - } - - if (expressions.Count == 0 && files.Count == 0 && urls.Count == 0) - { - return null; - } - - return new DotNetLicenseInfo( - expressions.ToArray(), - files.ToArray(), - urls.ToArray()); - } - - private static string NormalizeLicensePath(string value) - => value.Replace('\\', '/').Trim(); -} - -internal sealed record DotNetLicenseInfo( - IReadOnlyList<string> Expressions, - IReadOnlyList<string> Files, - IReadOnlyList<string> Urls); - -internal readonly record struct DotNetFileCacheKey(string Path, long Length, long LastWriteTicks) -{ - private readonly string _normalizedPath = OperatingSystem.IsWindows() - ? Path.ToLowerInvariant() - : Path; - - public bool Equals(DotNetFileCacheKey other) - => Length == other.Length - && LastWriteTicks == other.LastWriteTicks - && string.Equals(_normalizedPath, other._normalizedPath, StringComparison.Ordinal); - - public override int GetHashCode() - => HashCode.Combine(_normalizedPath, Length, LastWriteTicks); -} - -internal readonly struct Optional<T> where T : class -{ - private Optional(bool hasValue, T? value) - { - HasValue = hasValue; - Value = value; - } - - public bool HasValue { get; } - - public T? Value { get; } - - public static Optional<T> From(T? value) - => value is null ? None : new Optional<T>(true, value); - - public static Optional<T> None => default; -} +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics; +using System.IO; +using System.Linq; +using System.Reflection; +using System.Security; +using System.Xml; + +namespace StellaOps.Scanner.Analyzers.Lang.DotNet.Internal; + +internal static class DotNetFileMetadataCache +{ + private static readonly ConcurrentDictionary<DotNetFileCacheKey, Optional<string>> Sha256Cache = new(); + private static readonly ConcurrentDictionary<DotNetFileCacheKey, Optional<AssemblyName>> AssemblyCache = new(); + private static readonly ConcurrentDictionary<DotNetFileCacheKey, Optional<FileVersionInfo>> VersionCache = new(); + + public static bool TryGetSha256(string path, out string? sha256) + => TryGet(path, Sha256Cache, ComputeSha256, out sha256); + + public static bool TryGetAssemblyName(string path, out AssemblyName? assemblyName) + => TryGet(path, AssemblyCache, TryReadAssemblyName, out assemblyName); + + public static bool TryGetFileVersionInfo(string path, out FileVersionInfo? versionInfo) + => TryGet(path, VersionCache, TryReadFileVersionInfo, out versionInfo); + + private static bool TryGet<T>(string path, ConcurrentDictionary<DotNetFileCacheKey, Optional<T>> cache, Func<string, T?> resolver, out T? value) + where T : class + { + value = null; + + DotNetFileCacheKey key; + try + { + var info = new FileInfo(path); + if (!info.Exists) + { + return false; + } + + key = new DotNetFileCacheKey(info.FullName, info.Length, info.LastWriteTimeUtc.Ticks); + } + catch (IOException) + { + return false; + } + catch (UnauthorizedAccessException) + { + return false; + } + catch (SecurityException) + { + return false; + } + catch (ArgumentException) + { + return false; + } + catch (NotSupportedException) + { + return false; + } + + var optional = cache.GetOrAdd(key, static (cacheKey, state) => CreateOptional(cacheKey.Path, state.resolver), (resolver, path)); + if (!optional.HasValue) + { + return false; + } + + value = optional.Value; + return value is not null; + } + + private static Optional<T> CreateOptional<T>(string path, Func<string, T?> resolver) where T : class + { + try + { + var value = resolver(path); + return Optional<T>.From(value); + } + catch (FileNotFoundException) + { + return Optional<T>.None; + } + catch (FileLoadException) + { + return Optional<T>.None; + } + catch (BadImageFormatException) + { + return Optional<T>.None; + } + catch (UnauthorizedAccessException) + { + return Optional<T>.None; + } + catch (SecurityException) + { + return Optional<T>.None; + } + catch (IOException) + { + return Optional<T>.None; + } + } + + private static string? ComputeSha256(string path) + { + using var stream = File.Open(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var sha = System.Security.Cryptography.SHA256.Create(); + var hash = sha.ComputeHash(stream); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static AssemblyName? TryReadAssemblyName(string path) + { + try + { + return AssemblyName.GetAssemblyName(path); + } + catch (FileNotFoundException) + { + return null; + } + catch (FileLoadException) + { + return null; + } + catch (BadImageFormatException) + { + return null; + } + catch (IOException) + { + return null; + } + } + + private static FileVersionInfo? TryReadFileVersionInfo(string path) + { + try + { + return FileVersionInfo.GetVersionInfo(path); + } + catch (FileNotFoundException) + { + return null; + } + catch (IOException) + { + return null; + } + catch (UnauthorizedAccessException) + { + return null; + } + } +} + +internal static class DotNetLicenseCache +{ + private static readonly ConcurrentDictionary<DotNetFileCacheKey, Optional<DotNetLicenseInfo>> Licenses = new(); + + public static bool TryGetLicenseInfo(string nuspecPath, out DotNetLicenseInfo? info) + { + info = null; + + DotNetFileCacheKey key; + try + { + var fileInfo = new FileInfo(nuspecPath); + if (!fileInfo.Exists) + { + return false; + } + + key = new DotNetFileCacheKey(fileInfo.FullName, fileInfo.Length, fileInfo.LastWriteTimeUtc.Ticks); + } + catch (IOException) + { + return false; + } + catch (UnauthorizedAccessException) + { + return false; + } + catch (SecurityException) + { + return false; + } + + var optional = Licenses.GetOrAdd(key, static (cacheKey, path) => CreateOptional(path), nuspecPath); + if (!optional.HasValue) + { + return false; + } + + info = optional.Value; + return info is not null; + } + + private static Optional<DotNetLicenseInfo> CreateOptional(string nuspecPath) + { + try + { + var info = Parse(nuspecPath); + return Optional<DotNetLicenseInfo>.From(info); + } + catch (IOException) + { + return Optional<DotNetLicenseInfo>.None; + } + catch (UnauthorizedAccessException) + { + return Optional<DotNetLicenseInfo>.None; + } + catch (SecurityException) + { + return Optional<DotNetLicenseInfo>.None; + } + catch (XmlException) + { + return Optional<DotNetLicenseInfo>.None; + } + } + + private static DotNetLicenseInfo? Parse(string path) + { + using var stream = File.OpenRead(path); + using var reader = XmlReader.Create(stream, new XmlReaderSettings + { + DtdProcessing = DtdProcessing.Ignore, + IgnoreComments = true, + IgnoreWhitespace = true, + }); + + var expressions = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); + var files = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); + var urls = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); + + while (reader.Read()) + { + if (reader.NodeType != XmlNodeType.Element) + { + continue; + } + + if (string.Equals(reader.LocalName, "license", StringComparison.OrdinalIgnoreCase)) + { + var type = reader.GetAttribute("type"); + var value = reader.ReadElementContentAsString()?.Trim(); + + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + if (string.Equals(type, "expression", StringComparison.OrdinalIgnoreCase)) + { + expressions.Add(value); + } + else if (string.Equals(type, "file", StringComparison.OrdinalIgnoreCase)) + { + files.Add(NormalizeLicensePath(value)); + } + else + { + expressions.Add(value); + } + } + else if (string.Equals(reader.LocalName, "licenseUrl", StringComparison.OrdinalIgnoreCase)) + { + var value = reader.ReadElementContentAsString()?.Trim(); + if (!string.IsNullOrWhiteSpace(value)) + { + urls.Add(value); + } + } + } + + if (expressions.Count == 0 && files.Count == 0 && urls.Count == 0) + { + return null; + } + + return new DotNetLicenseInfo( + expressions.ToArray(), + files.ToArray(), + urls.ToArray()); + } + + private static string NormalizeLicensePath(string value) + => value.Replace('\\', '/').Trim(); +} + +internal sealed record DotNetLicenseInfo( + IReadOnlyList<string> Expressions, + IReadOnlyList<string> Files, + IReadOnlyList<string> Urls); + +internal readonly record struct DotNetFileCacheKey(string Path, long Length, long LastWriteTicks) +{ + private readonly string _normalizedPath = OperatingSystem.IsWindows() + ? Path.ToLowerInvariant() + : Path; + + public bool Equals(DotNetFileCacheKey other) + => Length == other.Length + && LastWriteTicks == other.LastWriteTicks + && string.Equals(_normalizedPath, other._normalizedPath, StringComparison.Ordinal); + + public override int GetHashCode() + => HashCode.Combine(_normalizedPath, Length, LastWriteTicks); +} + +internal readonly struct Optional<T> where T : class +{ + private Optional(bool hasValue, T? value) + { + HasValue = hasValue; + Value = value; + } + + public bool HasValue { get; } + + public T? Value { get; } + + public static Optional<T> From(T? value) + => value is null ? None : new Optional<T>(true, value); + + public static Optional<T> None => default; +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetRuntimeConfig.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetRuntimeConfig.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetRuntimeConfig.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetRuntimeConfig.cs index 91d8ac63..3274b4b6 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetRuntimeConfig.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/Internal/DotNetRuntimeConfig.cs @@ -1,158 +1,158 @@ -using System.Linq; -using System.Text.Json; - -namespace StellaOps.Scanner.Analyzers.Lang.DotNet.Internal; - -internal sealed class DotNetRuntimeConfig -{ - private DotNetRuntimeConfig( - string relativePath, - IReadOnlyCollection<string> tfms, - IReadOnlyCollection<string> frameworks, - IReadOnlyCollection<RuntimeGraphEntry> runtimeGraph) - { - RelativePath = relativePath; - Tfms = tfms; - Frameworks = frameworks; - RuntimeGraph = runtimeGraph; - } - - public string RelativePath { get; } - - public IReadOnlyCollection<string> Tfms { get; } - - public IReadOnlyCollection<string> Frameworks { get; } - - public IReadOnlyCollection<RuntimeGraphEntry> RuntimeGraph { get; } - - public static DotNetRuntimeConfig? Load(string absolutePath, string relativePath, CancellationToken cancellationToken) - { - using var stream = File.OpenRead(absolutePath); - using var document = JsonDocument.Parse(stream, new JsonDocumentOptions - { - AllowTrailingCommas = true, - CommentHandling = JsonCommentHandling.Skip - }); - - var root = document.RootElement; - if (!root.TryGetProperty("runtimeOptions", out var runtimeOptions) || runtimeOptions.ValueKind is not JsonValueKind.Object) - { - return null; - } - - var tfms = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); - var frameworks = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); - var runtimeGraph = new List<RuntimeGraphEntry>(); - - if (runtimeOptions.TryGetProperty("tfm", out var tfmElement) && tfmElement.ValueKind == JsonValueKind.String) - { - AddIfPresent(tfms, tfmElement.GetString()); - } - - if (runtimeOptions.TryGetProperty("framework", out var frameworkElement) && frameworkElement.ValueKind == JsonValueKind.Object) - { - var frameworkId = FormatFramework(frameworkElement); - AddIfPresent(frameworks, frameworkId); - } - - if (runtimeOptions.TryGetProperty("frameworks", out var frameworksElement) && frameworksElement.ValueKind == JsonValueKind.Array) - { - foreach (var item in frameworksElement.EnumerateArray()) - { - cancellationToken.ThrowIfCancellationRequested(); - var frameworkId = FormatFramework(item); - AddIfPresent(frameworks, frameworkId); - } - } - - if (runtimeOptions.TryGetProperty("includedFrameworks", out var includedElement) && includedElement.ValueKind == JsonValueKind.Array) - { - foreach (var item in includedElement.EnumerateArray()) - { - cancellationToken.ThrowIfCancellationRequested(); - var frameworkId = FormatFramework(item); - AddIfPresent(frameworks, frameworkId); - } - } - - if (runtimeOptions.TryGetProperty("runtimeGraph", out var runtimeGraphElement) && - runtimeGraphElement.ValueKind == JsonValueKind.Object && - runtimeGraphElement.TryGetProperty("runtimes", out var runtimesElement) && - runtimesElement.ValueKind == JsonValueKind.Object) - { - foreach (var ridProperty in runtimesElement.EnumerateObject()) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (string.IsNullOrWhiteSpace(ridProperty.Name)) - { - continue; - } - - var fallbacks = new List<string>(); - if (ridProperty.Value.ValueKind == JsonValueKind.Object && - ridProperty.Value.TryGetProperty("fallbacks", out var fallbacksElement) && - fallbacksElement.ValueKind == JsonValueKind.Array) - { - foreach (var fallback in fallbacksElement.EnumerateArray()) - { - if (fallback.ValueKind == JsonValueKind.String) - { - var fallbackValue = fallback.GetString(); - if (!string.IsNullOrWhiteSpace(fallbackValue)) - { - fallbacks.Add(fallbackValue.Trim()); - } - } - } - } - - runtimeGraph.Add(new RuntimeGraphEntry(ridProperty.Name.Trim(), fallbacks)); - } - } - - return new DotNetRuntimeConfig( - relativePath, - tfms.ToArray(), - frameworks.ToArray(), - runtimeGraph); - } - - private static void AddIfPresent(ISet<string> set, string? value) - { - if (!string.IsNullOrWhiteSpace(value)) - { - set.Add(value.Trim()); - } - } - - private static string? FormatFramework(JsonElement element) - { - if (element.ValueKind is not JsonValueKind.Object) - { - return null; - } - - var name = element.TryGetProperty("name", out var nameElement) && nameElement.ValueKind == JsonValueKind.String - ? nameElement.GetString() - : null; - - var version = element.TryGetProperty("version", out var versionElement) && versionElement.ValueKind == JsonValueKind.String - ? versionElement.GetString() - : null; - - if (string.IsNullOrWhiteSpace(name)) - { - return null; - } - - if (string.IsNullOrWhiteSpace(version)) - { - return name.Trim(); - } - - return $"{name.Trim()}@{version.Trim()}"; - } - - internal sealed record RuntimeGraphEntry(string Rid, IReadOnlyList<string> Fallbacks); -} +using System.Linq; +using System.Text.Json; + +namespace StellaOps.Scanner.Analyzers.Lang.DotNet.Internal; + +internal sealed class DotNetRuntimeConfig +{ + private DotNetRuntimeConfig( + string relativePath, + IReadOnlyCollection<string> tfms, + IReadOnlyCollection<string> frameworks, + IReadOnlyCollection<RuntimeGraphEntry> runtimeGraph) + { + RelativePath = relativePath; + Tfms = tfms; + Frameworks = frameworks; + RuntimeGraph = runtimeGraph; + } + + public string RelativePath { get; } + + public IReadOnlyCollection<string> Tfms { get; } + + public IReadOnlyCollection<string> Frameworks { get; } + + public IReadOnlyCollection<RuntimeGraphEntry> RuntimeGraph { get; } + + public static DotNetRuntimeConfig? Load(string absolutePath, string relativePath, CancellationToken cancellationToken) + { + using var stream = File.OpenRead(absolutePath); + using var document = JsonDocument.Parse(stream, new JsonDocumentOptions + { + AllowTrailingCommas = true, + CommentHandling = JsonCommentHandling.Skip + }); + + var root = document.RootElement; + if (!root.TryGetProperty("runtimeOptions", out var runtimeOptions) || runtimeOptions.ValueKind is not JsonValueKind.Object) + { + return null; + } + + var tfms = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); + var frameworks = new SortedSet<string>(StringComparer.OrdinalIgnoreCase); + var runtimeGraph = new List<RuntimeGraphEntry>(); + + if (runtimeOptions.TryGetProperty("tfm", out var tfmElement) && tfmElement.ValueKind == JsonValueKind.String) + { + AddIfPresent(tfms, tfmElement.GetString()); + } + + if (runtimeOptions.TryGetProperty("framework", out var frameworkElement) && frameworkElement.ValueKind == JsonValueKind.Object) + { + var frameworkId = FormatFramework(frameworkElement); + AddIfPresent(frameworks, frameworkId); + } + + if (runtimeOptions.TryGetProperty("frameworks", out var frameworksElement) && frameworksElement.ValueKind == JsonValueKind.Array) + { + foreach (var item in frameworksElement.EnumerateArray()) + { + cancellationToken.ThrowIfCancellationRequested(); + var frameworkId = FormatFramework(item); + AddIfPresent(frameworks, frameworkId); + } + } + + if (runtimeOptions.TryGetProperty("includedFrameworks", out var includedElement) && includedElement.ValueKind == JsonValueKind.Array) + { + foreach (var item in includedElement.EnumerateArray()) + { + cancellationToken.ThrowIfCancellationRequested(); + var frameworkId = FormatFramework(item); + AddIfPresent(frameworks, frameworkId); + } + } + + if (runtimeOptions.TryGetProperty("runtimeGraph", out var runtimeGraphElement) && + runtimeGraphElement.ValueKind == JsonValueKind.Object && + runtimeGraphElement.TryGetProperty("runtimes", out var runtimesElement) && + runtimesElement.ValueKind == JsonValueKind.Object) + { + foreach (var ridProperty in runtimesElement.EnumerateObject()) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (string.IsNullOrWhiteSpace(ridProperty.Name)) + { + continue; + } + + var fallbacks = new List<string>(); + if (ridProperty.Value.ValueKind == JsonValueKind.Object && + ridProperty.Value.TryGetProperty("fallbacks", out var fallbacksElement) && + fallbacksElement.ValueKind == JsonValueKind.Array) + { + foreach (var fallback in fallbacksElement.EnumerateArray()) + { + if (fallback.ValueKind == JsonValueKind.String) + { + var fallbackValue = fallback.GetString(); + if (!string.IsNullOrWhiteSpace(fallbackValue)) + { + fallbacks.Add(fallbackValue.Trim()); + } + } + } + } + + runtimeGraph.Add(new RuntimeGraphEntry(ridProperty.Name.Trim(), fallbacks)); + } + } + + return new DotNetRuntimeConfig( + relativePath, + tfms.ToArray(), + frameworks.ToArray(), + runtimeGraph); + } + + private static void AddIfPresent(ISet<string> set, string? value) + { + if (!string.IsNullOrWhiteSpace(value)) + { + set.Add(value.Trim()); + } + } + + private static string? FormatFramework(JsonElement element) + { + if (element.ValueKind is not JsonValueKind.Object) + { + return null; + } + + var name = element.TryGetProperty("name", out var nameElement) && nameElement.ValueKind == JsonValueKind.String + ? nameElement.GetString() + : null; + + var version = element.TryGetProperty("version", out var versionElement) && versionElement.ValueKind == JsonValueKind.String + ? versionElement.GetString() + : null; + + if (string.IsNullOrWhiteSpace(name)) + { + return null; + } + + if (string.IsNullOrWhiteSpace(version)) + { + return name.Trim(); + } + + return $"{name.Trim()}@{version.Trim()}"; + } + + internal sealed record RuntimeGraphEntry(string Rid, IReadOnlyList<string> Fallbacks); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md index 040d3fee..94e5945c 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/TASKS.md @@ -1,19 +1,19 @@ -# .NET Analyzer Task Flow - -| Seq | ID | Status | Depends on | Description | Exit Criteria | -|-----|----|--------|------------|-------------|---------------| -| 1 | SCANNER-ANALYZERS-LANG-10-305A | DONE (2025-10-22) | SCANNER-ANALYZERS-LANG-10-307 | Parse `*.deps.json` + `runtimeconfig.json`, build RID graph, and normalize to `pkg:nuget` components. | RID graph deterministic; fixtures confirm consistent component ordering; fallback to `bin:{sha256}` documented. | -| 2 | SCANNER-ANALYZERS-LANG-10-305B | DONE (2025-10-22) | SCANNER-ANALYZERS-LANG-10-305A | Extract assembly metadata (strong name, file/product info) and optional Authenticode details when offline cert bundle provided. | Signing metadata captured for signed assemblies; offline trust store documented; hash validations deterministic. | -| 3 | SCANNER-ANALYZERS-LANG-10-305C | DONE (2025-10-22) | SCANNER-ANALYZERS-LANG-10-305B | Handle self-contained apps and native assets; merge with EntryTrace usage hints. | Self-contained fixtures map to components with RID flags; usage hints propagate; tests cover linux/win variants. | -| 4 | SCANNER-ANALYZERS-LANG-10-307D | DONE (2025-10-22) | SCANNER-ANALYZERS-LANG-10-305C | Integrate shared helpers (license mapping, quiet provenance) and concurrency-safe caches. | Shared helpers reused; concurrency tests for parallel layer scans pass; no redundant allocations. | -| 5 | SCANNER-ANALYZERS-LANG-10-308D | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-307D | Determinism fixtures + benchmark harness; compare to competitor scanners for accuracy/perf. | Fixtures in `Fixtures/lang/dotnet/`; determinism CI guard; benchmark demonstrates lower duplication + faster runtime. | -| 6 | SCANNER-ANALYZERS-LANG-10-309D | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-308D | Package plug-in (manifest, DI registration) and update Offline Kit instructions. | Manifest copied to `plugins/scanner/analyzers/lang/`; Worker loads analyzer; Offline Kit doc updated. | - -## .NET Entry-Point & Dependency Resolver (Sprint 11) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-LANG-11-001 | TODO | StellaOps.Scanner EPDR Guild, Language Analyzer Guild | - | Build entrypoint resolver that maps project/publish artifacts to entrypoint identities (assembly name, MVID, TFM, RID) and environment profiles (publish mode, host kind, probing paths). Output normalized `entrypoints[]` records with deterministic IDs. | Entrypoint records produced for fixtures (framework-dependent, self-contained, single-file, multi-TFM/RID); determinism check passes; docs updated. | -| SCANNER-ANALYZERS-LANG-11-002 | TODO | StellaOps.Scanner EPDR Guild | SCANNER-ANALYZERS-LANG-11-001 | Implement static analyzer (IL + reflection heuristics) capturing AssemblyRef, ModuleRef/PInvoke, DynamicDependency, reflection literals, DI patterns, and custom AssemblyLoadContext probing hints. Emit dependency edges with reason codes and confidence. | Static analysis coverage demonstrated on fixtures; edges carry reason codes (`il-assemblyref`, `il-moduleref`, `reflection-literal`, `alc-probing`); tests cover trimmed/single-file cases. | -| SCANNER-ANALYZERS-LANG-11-003 | TODO | StellaOps.Scanner EPDR Guild, Signals Guild | SCANNER-ANALYZERS-LANG-11-002 | Ingest optional runtime evidence (AssemblyLoad, Resolving, P/Invoke) via event listener harness; merge runtime edges with static/declared ones and attach reason codes/confidence. | Runtime listener service pluggable; fixtures record runtime edges; merged output shows combined reason set with confidence per edge. | -| SCANNER-ANALYZERS-LANG-11-004 | TODO | StellaOps.Scanner EPDR Guild, SBOM Service Guild | SCANNER-ANALYZERS-LANG-11-002 | Produce normalized observation export to Scanner writer: entrypoints + dependency edges + environment profiles (AOC compliant). Wire to SBOM service entrypoint tagging. | Analyzer writes observation records consumed by SBOM service tests; AOC compliance docs updated; determinism checked. | -| SCANNER-ANALYZERS-LANG-11-005 | TODO | StellaOps.Scanner EPDR Guild, QA Guild | SCANNER-ANALYZERS-LANG-11-004 | Add comprehensive fixtures/benchmarks covering framework-dependent, self-contained, single-file, trimmed, NativeAOT, multi-RID scenarios; include explain traces and perf benchmarks vs previous analyzer. | Fixtures stored under `fixtures/lang/dotnet/epdr`; determinism + perf thresholds validated; benchmark results documented. | +# .NET Analyzer Task Flow + +| Seq | ID | Status | Depends on | Description | Exit Criteria | +|-----|----|--------|------------|-------------|---------------| +| 1 | SCANNER-ANALYZERS-LANG-10-305A | DONE (2025-10-22) | SCANNER-ANALYZERS-LANG-10-307 | Parse `*.deps.json` + `runtimeconfig.json`, build RID graph, and normalize to `pkg:nuget` components. | RID graph deterministic; fixtures confirm consistent component ordering; fallback to `bin:{sha256}` documented. | +| 2 | SCANNER-ANALYZERS-LANG-10-305B | DONE (2025-10-22) | SCANNER-ANALYZERS-LANG-10-305A | Extract assembly metadata (strong name, file/product info) and optional Authenticode details when offline cert bundle provided. | Signing metadata captured for signed assemblies; offline trust store documented; hash validations deterministic. | +| 3 | SCANNER-ANALYZERS-LANG-10-305C | DONE (2025-10-22) | SCANNER-ANALYZERS-LANG-10-305B | Handle self-contained apps and native assets; merge with EntryTrace usage hints. | Self-contained fixtures map to components with RID flags; usage hints propagate; tests cover linux/win variants. | +| 4 | SCANNER-ANALYZERS-LANG-10-307D | DONE (2025-10-22) | SCANNER-ANALYZERS-LANG-10-305C | Integrate shared helpers (license mapping, quiet provenance) and concurrency-safe caches. | Shared helpers reused; concurrency tests for parallel layer scans pass; no redundant allocations. | +| 5 | SCANNER-ANALYZERS-LANG-10-308D | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-307D | Determinism fixtures + benchmark harness; compare to competitor scanners for accuracy/perf. | Fixtures in `Fixtures/lang/dotnet/`; determinism CI guard; benchmark demonstrates lower duplication + faster runtime. | +| 6 | SCANNER-ANALYZERS-LANG-10-309D | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-308D | Package plug-in (manifest, DI registration) and update Offline Kit instructions. | Manifest copied to `plugins/scanner/analyzers/lang/`; Worker loads analyzer; Offline Kit doc updated. | + +## .NET Entry-Point & Dependency Resolver (Sprint 11) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-LANG-11-001 | TODO | StellaOps.Scanner EPDR Guild, Language Analyzer Guild | - | Build entrypoint resolver that maps project/publish artifacts to entrypoint identities (assembly name, MVID, TFM, RID) and environment profiles (publish mode, host kind, probing paths). Output normalized `entrypoints[]` records with deterministic IDs. | Entrypoint records produced for fixtures (framework-dependent, self-contained, single-file, multi-TFM/RID); determinism check passes; docs updated. | +| SCANNER-ANALYZERS-LANG-11-002 | TODO | StellaOps.Scanner EPDR Guild | SCANNER-ANALYZERS-LANG-11-001 | Implement static analyzer (IL + reflection heuristics) capturing AssemblyRef, ModuleRef/PInvoke, DynamicDependency, reflection literals, DI patterns, and custom AssemblyLoadContext probing hints. Emit dependency edges with reason codes and confidence. | Static analysis coverage demonstrated on fixtures; edges carry reason codes (`il-assemblyref`, `il-moduleref`, `reflection-literal`, `alc-probing`); tests cover trimmed/single-file cases. | +| SCANNER-ANALYZERS-LANG-11-003 | TODO | StellaOps.Scanner EPDR Guild, Signals Guild | SCANNER-ANALYZERS-LANG-11-002 | Ingest optional runtime evidence (AssemblyLoad, Resolving, P/Invoke) via event listener harness; merge runtime edges with static/declared ones and attach reason codes/confidence. | Runtime listener service pluggable; fixtures record runtime edges; merged output shows combined reason set with confidence per edge. | +| SCANNER-ANALYZERS-LANG-11-004 | TODO | StellaOps.Scanner EPDR Guild, SBOM Service Guild | SCANNER-ANALYZERS-LANG-11-002 | Produce normalized observation export to Scanner writer: entrypoints + dependency edges + environment profiles (AOC compliant). Wire to SBOM service entrypoint tagging. | Analyzer writes observation records consumed by SBOM service tests; AOC compliance docs updated; determinism checked. | +| SCANNER-ANALYZERS-LANG-11-005 | TODO | StellaOps.Scanner EPDR Guild, QA Guild | SCANNER-ANALYZERS-LANG-11-004 | Add comprehensive fixtures/benchmarks covering framework-dependent, self-contained, single-file, trimmed, NativeAOT, multi-RID scenarios; include explain traces and perf benchmarks vs previous analyzer. | Fixtures stored under `fixtures/lang/dotnet/epdr`; determinism + perf thresholds validated; benchmark results documented. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json index e8c3b8f8..33ba4243 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/manifest.json @@ -1,23 +1,23 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.dotnet", - "displayName": "StellaOps .NET Analyzer (preview)", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.DotNet.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.DotNet.DotNetAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "dotnet", - "nuget" - ], - "metadata": { - "org.stellaops.analyzer.language": "dotnet", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true", - "org.stellaops.analyzer.status": "preview" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.dotnet", + "displayName": "StellaOps .NET Analyzer (preview)", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.DotNet.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.DotNet.DotNetAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "dotnet", + "nuget" + ], + "metadata": { + "org.stellaops.analyzer.language": "dotnet", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true", + "org.stellaops.analyzer.status": "preview" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md similarity index 94% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md index f186fe9a..164cf928 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/AGENTS.md @@ -27,5 +27,5 @@ Build the Go analyzer plug-in that reads Go build info, module metadata, and DWA ## Testing & Artifacts - Golden fixtures for modules with/without VCS info, stripped binaries, cross-compiled variants. -- Benchmark comparison with competitor scanners to demonstrate speed/fidelity advantages (captured in `src/StellaOps.Bench/Scanner.Analyzers/lang/go/`). +- Benchmark comparison with competitor scanners to demonstrate speed/fidelity advantages (captured in `src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/go/`). - ADR documenting heuristics and risk mitigation. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/GlobalUsings.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/GlobalUsings.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/GlobalUsings.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/GoAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/GoAnalyzerPlugin.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/GoAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/GoAnalyzerPlugin.cs index e5cc39ae..71d1d21a 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/GoAnalyzerPlugin.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/GoAnalyzerPlugin.cs @@ -1,17 +1,17 @@ -using System; -using StellaOps.Scanner.Analyzers.Lang.Plugin; - -namespace StellaOps.Scanner.Analyzers.Lang.Go; - -public sealed class GoAnalyzerPlugin : ILanguageAnalyzerPlugin -{ - public string Name => "StellaOps.Scanner.Analyzers.Lang.Go"; - - public bool IsAvailable(IServiceProvider services) => services is not null; - - public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) - { - ArgumentNullException.ThrowIfNull(services); - return new GoLanguageAnalyzer(); - } -} +using System; +using StellaOps.Scanner.Analyzers.Lang.Plugin; + +namespace StellaOps.Scanner.Analyzers.Lang.Go; + +public sealed class GoAnalyzerPlugin : ILanguageAnalyzerPlugin +{ + public string Name => "StellaOps.Scanner.Analyzers.Lang.Go"; + + public bool IsAvailable(IServiceProvider services) => services is not null; + + public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return new GoLanguageAnalyzer(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/GoLanguageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/GoLanguageAnalyzer.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/GoLanguageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/GoLanguageAnalyzer.cs index 16d9a26c..d07c2877 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/GoLanguageAnalyzer.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/GoLanguageAnalyzer.cs @@ -1,385 +1,385 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Security.Cryptography; -using System.Linq; -using StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -namespace StellaOps.Scanner.Analyzers.Lang.Go; - -public sealed class GoLanguageAnalyzer : ILanguageAnalyzer -{ - public string Id => "golang"; - - public string DisplayName => "Go Analyzer"; - - public ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - ArgumentNullException.ThrowIfNull(writer); - - var candidatePaths = new List<string>(GoBinaryScanner.EnumerateCandidateFiles(context.RootPath)); - candidatePaths.Sort(StringComparer.Ordinal); - - var fallbackBinaries = new List<GoStrippedBinaryClassification>(); - - foreach (var absolutePath in candidatePaths) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (!GoBuildInfoProvider.TryGetBuildInfo(absolutePath, out var buildInfo) || buildInfo is null) - { - if (GoBinaryScanner.TryClassifyStrippedBinary(absolutePath, out var classification)) - { - fallbackBinaries.Add(classification); - } - - continue; - } - - EmitComponents(buildInfo, context, writer); - } - - foreach (var fallback in fallbackBinaries) - { - cancellationToken.ThrowIfCancellationRequested(); - EmitFallbackComponent(fallback, context, writer); - } - - return ValueTask.CompletedTask; - } - - private void EmitComponents(GoBuildInfo buildInfo, LanguageAnalyzerContext context, LanguageComponentWriter writer) - { - var components = new List<GoModule> { buildInfo.MainModule }; - components.AddRange(buildInfo.Dependencies - .OrderBy(static module => module.Path, StringComparer.Ordinal) - .ThenBy(static module => module.Version, StringComparer.Ordinal)); - - string? binaryHash = null; - var binaryRelativePath = context.GetRelativePath(buildInfo.AbsoluteBinaryPath); - - foreach (var module in components) - { - var metadata = BuildMetadata(buildInfo, module, binaryRelativePath); - var evidence = BuildEvidence(buildInfo, module, binaryRelativePath, context, ref binaryHash); - var usedByEntrypoint = module.IsMain && context.UsageHints.IsPathUsed(buildInfo.AbsoluteBinaryPath); - - var purl = BuildPurl(module.Path, module.Version); - - if (!string.IsNullOrEmpty(purl)) - { - writer.AddFromPurl( - analyzerId: Id, - purl: purl, - name: module.Path, - version: module.Version, - type: "golang", - metadata: metadata, - evidence: evidence, - usedByEntrypoint: usedByEntrypoint); - } - else - { - var componentKey = BuildFallbackComponentKey(module, buildInfo, binaryRelativePath, ref binaryHash); - - writer.AddFromExplicitKey( - analyzerId: Id, - componentKey: componentKey, - purl: null, - name: module.Path, - version: module.Version, - type: "golang", - metadata: metadata, - evidence: evidence, - usedByEntrypoint: usedByEntrypoint); - } - } - } - - private static IEnumerable<KeyValuePair<string, string?>> BuildMetadata(GoBuildInfo buildInfo, GoModule module, string binaryRelativePath) - { - var entries = new List<KeyValuePair<string, string?>>(16) - { - new("modulePath", module.Path), - new("binaryPath", string.IsNullOrEmpty(binaryRelativePath) ? "." : binaryRelativePath), - }; - - if (!string.IsNullOrEmpty(module.Version)) - { - entries.Add(new KeyValuePair<string, string?>("moduleVersion", module.Version)); - } - - if (!string.IsNullOrEmpty(module.Sum)) - { - entries.Add(new KeyValuePair<string, string?>("moduleSum", module.Sum)); - } - - if (module.Replacement is not null) - { - entries.Add(new KeyValuePair<string, string?>("replacedBy.path", module.Replacement.Path)); - - if (!string.IsNullOrEmpty(module.Replacement.Version)) - { - entries.Add(new KeyValuePair<string, string?>("replacedBy.version", module.Replacement.Version)); - } - - if (!string.IsNullOrEmpty(module.Replacement.Sum)) - { - entries.Add(new KeyValuePair<string, string?>("replacedBy.sum", module.Replacement.Sum)); - } - } - - if (module.IsMain) - { - entries.Add(new KeyValuePair<string, string?>("go.version", buildInfo.GoVersion)); - entries.Add(new KeyValuePair<string, string?>("modulePath.main", buildInfo.ModulePath)); - - foreach (var setting in buildInfo.Settings) - { - var key = $"build.{setting.Key}"; - if (!entries.Any(pair => string.Equals(pair.Key, key, StringComparison.Ordinal))) - { - entries.Add(new KeyValuePair<string, string?>(key, setting.Value)); - } - } - - if (buildInfo.DwarfMetadata is { } dwarf) - { - AddIfMissing(entries, "build.vcs", dwarf.VcsSystem); - AddIfMissing(entries, "build.vcs.revision", dwarf.Revision); - AddIfMissing(entries, "build.vcs.modified", dwarf.Modified?.ToString()?.ToLowerInvariant()); - AddIfMissing(entries, "build.vcs.time", dwarf.TimestampUtc); - } - } - - entries.Sort(static (left, right) => string.CompareOrdinal(left.Key, right.Key)); - return entries; - } - - private void EmitFallbackComponent(GoStrippedBinaryClassification strippedBinary, LanguageAnalyzerContext context, LanguageComponentWriter writer) - { - var relativePath = context.GetRelativePath(strippedBinary.AbsolutePath); - var normalizedRelative = string.IsNullOrEmpty(relativePath) ? "." : relativePath; - var usedByEntrypoint = context.UsageHints.IsPathUsed(strippedBinary.AbsolutePath); - - var binaryHash = ComputeBinaryHash(strippedBinary.AbsolutePath); - - var metadata = new List<KeyValuePair<string, string?>> - { - new("binaryPath", normalizedRelative), - new("languageHint", "golang"), - new("provenance", "binary"), - }; - - if (!string.IsNullOrEmpty(binaryHash)) - { - metadata.Add(new KeyValuePair<string, string?>("binary.sha256", binaryHash)); - } - - if (!string.IsNullOrEmpty(strippedBinary.GoVersionHint)) - { - metadata.Add(new KeyValuePair<string, string?>("go.version.hint", strippedBinary.GoVersionHint)); - } - - metadata.Sort(static (left, right) => string.CompareOrdinal(left.Key, right.Key)); - - var evidence = new List<LanguageComponentEvidence> - { - new( - LanguageEvidenceKind.File, - "binary", - normalizedRelative, - null, - string.IsNullOrEmpty(binaryHash) ? null : binaryHash), - }; - - var detectionSource = strippedBinary.Indicator switch - { - GoStrippedBinaryIndicator.BuildId => "build-id", - GoStrippedBinaryIndicator.GoRuntimeMarkers => "runtime-markers", - _ => null, - }; - - if (!string.IsNullOrEmpty(detectionSource)) - { - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Metadata, - "go.heuristic", - "classification", - detectionSource, - null)); - } - - evidence.Sort(static (left, right) => string.CompareOrdinal(left.ComparisonKey, right.ComparisonKey)); - - var componentName = Path.GetFileName(strippedBinary.AbsolutePath); - if (string.IsNullOrWhiteSpace(componentName)) - { - componentName = "golang-binary"; - } - - var componentKey = string.IsNullOrEmpty(binaryHash) - ? $"golang::bin::{normalizedRelative}" - : $"golang::bin::sha256:{binaryHash}"; - - writer.AddFromExplicitKey( - analyzerId: Id, - componentKey: componentKey, - purl: null, - name: componentName, - version: null, - type: "bin", - metadata: metadata, - evidence: evidence, - usedByEntrypoint: usedByEntrypoint); - - GoAnalyzerMetrics.RecordHeuristic(strippedBinary.Indicator, !string.IsNullOrEmpty(strippedBinary.GoVersionHint)); - } - - private static IEnumerable<LanguageComponentEvidence> BuildEvidence(GoBuildInfo buildInfo, GoModule module, string binaryRelativePath, LanguageAnalyzerContext context, ref string? binaryHash) - { - var evidence = new List<LanguageComponentEvidence> - { - new( - LanguageEvidenceKind.Metadata, - "go.buildinfo", - $"module:{module.Path}", - module.Version ?? string.Empty, - module.Sum) - }; - - if (module.IsMain) - { - foreach (var setting in buildInfo.Settings) - { - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Metadata, - "go.buildinfo.setting", - setting.Key, - setting.Value, - null)); - } - - if (buildInfo.DwarfMetadata is { } dwarf) - { - if (!string.IsNullOrWhiteSpace(dwarf.VcsSystem)) - { - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Metadata, - "go.dwarf", - "vcs", - dwarf.VcsSystem, - null)); - } - - if (!string.IsNullOrWhiteSpace(dwarf.Revision)) - { - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Metadata, - "go.dwarf", - "vcs.revision", - dwarf.Revision, - null)); - } - - if (dwarf.Modified.HasValue) - { - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Metadata, - "go.dwarf", - "vcs.modified", - dwarf.Modified.Value ? "true" : "false", - null)); - } - - if (!string.IsNullOrWhiteSpace(dwarf.TimestampUtc)) - { - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Metadata, - "go.dwarf", - "vcs.time", - dwarf.TimestampUtc, - null)); - } - } - } - - // Attach binary hash evidence for fallback components without purl. - if (string.IsNullOrEmpty(module.Version)) - { - binaryHash ??= ComputeBinaryHash(buildInfo.AbsoluteBinaryPath); - if (!string.IsNullOrEmpty(binaryHash)) - { - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.File, - "binary", - string.IsNullOrEmpty(binaryRelativePath) ? "." : binaryRelativePath, - null, - binaryHash)); - } - } - - evidence.Sort(static (left, right) => string.CompareOrdinal(left.ComparisonKey, right.ComparisonKey)); - return evidence; - } - - private static string? BuildPurl(string path, string? version) - { - if (string.IsNullOrWhiteSpace(path) || string.IsNullOrWhiteSpace(version)) - { - return null; - } - - var cleanedPath = path.Trim(); - var cleanedVersion = version.Trim(); - var encodedVersion = Uri.EscapeDataString(cleanedVersion); - return $"pkg:golang/{cleanedPath}@{encodedVersion}"; - } - - private static string BuildFallbackComponentKey(GoModule module, GoBuildInfo buildInfo, string binaryRelativePath, ref string? binaryHash) - { - var relative = string.IsNullOrEmpty(binaryRelativePath) ? "." : binaryRelativePath; - binaryHash ??= ComputeBinaryHash(buildInfo.AbsoluteBinaryPath); - if (!string.IsNullOrEmpty(binaryHash)) - { - return $"golang::module:{module.Path}::{relative}::{binaryHash}"; - } - - return $"golang::module:{module.Path}::{relative}"; - } - - private static void AddIfMissing(List<KeyValuePair<string, string?>> entries, string key, string? value) - { - if (string.IsNullOrWhiteSpace(key) || string.IsNullOrWhiteSpace(value)) - { - return; - } - - if (entries.Any(entry => string.Equals(entry.Key, key, StringComparison.Ordinal))) - { - return; - } - - entries.Add(new KeyValuePair<string, string?>(key, value)); - } - - private static string? ComputeBinaryHash(string path) - { - try - { - using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var sha = SHA256.Create(); - var hash = sha.ComputeHash(stream); - return Convert.ToHexString(hash).ToLowerInvariant(); - } - catch (IOException) - { - return null; - } - catch (UnauthorizedAccessException) - { - return null; - } - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Security.Cryptography; +using System.Linq; +using StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +namespace StellaOps.Scanner.Analyzers.Lang.Go; + +public sealed class GoLanguageAnalyzer : ILanguageAnalyzer +{ + public string Id => "golang"; + + public string DisplayName => "Go Analyzer"; + + public ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + ArgumentNullException.ThrowIfNull(writer); + + var candidatePaths = new List<string>(GoBinaryScanner.EnumerateCandidateFiles(context.RootPath)); + candidatePaths.Sort(StringComparer.Ordinal); + + var fallbackBinaries = new List<GoStrippedBinaryClassification>(); + + foreach (var absolutePath in candidatePaths) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (!GoBuildInfoProvider.TryGetBuildInfo(absolutePath, out var buildInfo) || buildInfo is null) + { + if (GoBinaryScanner.TryClassifyStrippedBinary(absolutePath, out var classification)) + { + fallbackBinaries.Add(classification); + } + + continue; + } + + EmitComponents(buildInfo, context, writer); + } + + foreach (var fallback in fallbackBinaries) + { + cancellationToken.ThrowIfCancellationRequested(); + EmitFallbackComponent(fallback, context, writer); + } + + return ValueTask.CompletedTask; + } + + private void EmitComponents(GoBuildInfo buildInfo, LanguageAnalyzerContext context, LanguageComponentWriter writer) + { + var components = new List<GoModule> { buildInfo.MainModule }; + components.AddRange(buildInfo.Dependencies + .OrderBy(static module => module.Path, StringComparer.Ordinal) + .ThenBy(static module => module.Version, StringComparer.Ordinal)); + + string? binaryHash = null; + var binaryRelativePath = context.GetRelativePath(buildInfo.AbsoluteBinaryPath); + + foreach (var module in components) + { + var metadata = BuildMetadata(buildInfo, module, binaryRelativePath); + var evidence = BuildEvidence(buildInfo, module, binaryRelativePath, context, ref binaryHash); + var usedByEntrypoint = module.IsMain && context.UsageHints.IsPathUsed(buildInfo.AbsoluteBinaryPath); + + var purl = BuildPurl(module.Path, module.Version); + + if (!string.IsNullOrEmpty(purl)) + { + writer.AddFromPurl( + analyzerId: Id, + purl: purl, + name: module.Path, + version: module.Version, + type: "golang", + metadata: metadata, + evidence: evidence, + usedByEntrypoint: usedByEntrypoint); + } + else + { + var componentKey = BuildFallbackComponentKey(module, buildInfo, binaryRelativePath, ref binaryHash); + + writer.AddFromExplicitKey( + analyzerId: Id, + componentKey: componentKey, + purl: null, + name: module.Path, + version: module.Version, + type: "golang", + metadata: metadata, + evidence: evidence, + usedByEntrypoint: usedByEntrypoint); + } + } + } + + private static IEnumerable<KeyValuePair<string, string?>> BuildMetadata(GoBuildInfo buildInfo, GoModule module, string binaryRelativePath) + { + var entries = new List<KeyValuePair<string, string?>>(16) + { + new("modulePath", module.Path), + new("binaryPath", string.IsNullOrEmpty(binaryRelativePath) ? "." : binaryRelativePath), + }; + + if (!string.IsNullOrEmpty(module.Version)) + { + entries.Add(new KeyValuePair<string, string?>("moduleVersion", module.Version)); + } + + if (!string.IsNullOrEmpty(module.Sum)) + { + entries.Add(new KeyValuePair<string, string?>("moduleSum", module.Sum)); + } + + if (module.Replacement is not null) + { + entries.Add(new KeyValuePair<string, string?>("replacedBy.path", module.Replacement.Path)); + + if (!string.IsNullOrEmpty(module.Replacement.Version)) + { + entries.Add(new KeyValuePair<string, string?>("replacedBy.version", module.Replacement.Version)); + } + + if (!string.IsNullOrEmpty(module.Replacement.Sum)) + { + entries.Add(new KeyValuePair<string, string?>("replacedBy.sum", module.Replacement.Sum)); + } + } + + if (module.IsMain) + { + entries.Add(new KeyValuePair<string, string?>("go.version", buildInfo.GoVersion)); + entries.Add(new KeyValuePair<string, string?>("modulePath.main", buildInfo.ModulePath)); + + foreach (var setting in buildInfo.Settings) + { + var key = $"build.{setting.Key}"; + if (!entries.Any(pair => string.Equals(pair.Key, key, StringComparison.Ordinal))) + { + entries.Add(new KeyValuePair<string, string?>(key, setting.Value)); + } + } + + if (buildInfo.DwarfMetadata is { } dwarf) + { + AddIfMissing(entries, "build.vcs", dwarf.VcsSystem); + AddIfMissing(entries, "build.vcs.revision", dwarf.Revision); + AddIfMissing(entries, "build.vcs.modified", dwarf.Modified?.ToString()?.ToLowerInvariant()); + AddIfMissing(entries, "build.vcs.time", dwarf.TimestampUtc); + } + } + + entries.Sort(static (left, right) => string.CompareOrdinal(left.Key, right.Key)); + return entries; + } + + private void EmitFallbackComponent(GoStrippedBinaryClassification strippedBinary, LanguageAnalyzerContext context, LanguageComponentWriter writer) + { + var relativePath = context.GetRelativePath(strippedBinary.AbsolutePath); + var normalizedRelative = string.IsNullOrEmpty(relativePath) ? "." : relativePath; + var usedByEntrypoint = context.UsageHints.IsPathUsed(strippedBinary.AbsolutePath); + + var binaryHash = ComputeBinaryHash(strippedBinary.AbsolutePath); + + var metadata = new List<KeyValuePair<string, string?>> + { + new("binaryPath", normalizedRelative), + new("languageHint", "golang"), + new("provenance", "binary"), + }; + + if (!string.IsNullOrEmpty(binaryHash)) + { + metadata.Add(new KeyValuePair<string, string?>("binary.sha256", binaryHash)); + } + + if (!string.IsNullOrEmpty(strippedBinary.GoVersionHint)) + { + metadata.Add(new KeyValuePair<string, string?>("go.version.hint", strippedBinary.GoVersionHint)); + } + + metadata.Sort(static (left, right) => string.CompareOrdinal(left.Key, right.Key)); + + var evidence = new List<LanguageComponentEvidence> + { + new( + LanguageEvidenceKind.File, + "binary", + normalizedRelative, + null, + string.IsNullOrEmpty(binaryHash) ? null : binaryHash), + }; + + var detectionSource = strippedBinary.Indicator switch + { + GoStrippedBinaryIndicator.BuildId => "build-id", + GoStrippedBinaryIndicator.GoRuntimeMarkers => "runtime-markers", + _ => null, + }; + + if (!string.IsNullOrEmpty(detectionSource)) + { + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Metadata, + "go.heuristic", + "classification", + detectionSource, + null)); + } + + evidence.Sort(static (left, right) => string.CompareOrdinal(left.ComparisonKey, right.ComparisonKey)); + + var componentName = Path.GetFileName(strippedBinary.AbsolutePath); + if (string.IsNullOrWhiteSpace(componentName)) + { + componentName = "golang-binary"; + } + + var componentKey = string.IsNullOrEmpty(binaryHash) + ? $"golang::bin::{normalizedRelative}" + : $"golang::bin::sha256:{binaryHash}"; + + writer.AddFromExplicitKey( + analyzerId: Id, + componentKey: componentKey, + purl: null, + name: componentName, + version: null, + type: "bin", + metadata: metadata, + evidence: evidence, + usedByEntrypoint: usedByEntrypoint); + + GoAnalyzerMetrics.RecordHeuristic(strippedBinary.Indicator, !string.IsNullOrEmpty(strippedBinary.GoVersionHint)); + } + + private static IEnumerable<LanguageComponentEvidence> BuildEvidence(GoBuildInfo buildInfo, GoModule module, string binaryRelativePath, LanguageAnalyzerContext context, ref string? binaryHash) + { + var evidence = new List<LanguageComponentEvidence> + { + new( + LanguageEvidenceKind.Metadata, + "go.buildinfo", + $"module:{module.Path}", + module.Version ?? string.Empty, + module.Sum) + }; + + if (module.IsMain) + { + foreach (var setting in buildInfo.Settings) + { + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Metadata, + "go.buildinfo.setting", + setting.Key, + setting.Value, + null)); + } + + if (buildInfo.DwarfMetadata is { } dwarf) + { + if (!string.IsNullOrWhiteSpace(dwarf.VcsSystem)) + { + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Metadata, + "go.dwarf", + "vcs", + dwarf.VcsSystem, + null)); + } + + if (!string.IsNullOrWhiteSpace(dwarf.Revision)) + { + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Metadata, + "go.dwarf", + "vcs.revision", + dwarf.Revision, + null)); + } + + if (dwarf.Modified.HasValue) + { + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Metadata, + "go.dwarf", + "vcs.modified", + dwarf.Modified.Value ? "true" : "false", + null)); + } + + if (!string.IsNullOrWhiteSpace(dwarf.TimestampUtc)) + { + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Metadata, + "go.dwarf", + "vcs.time", + dwarf.TimestampUtc, + null)); + } + } + } + + // Attach binary hash evidence for fallback components without purl. + if (string.IsNullOrEmpty(module.Version)) + { + binaryHash ??= ComputeBinaryHash(buildInfo.AbsoluteBinaryPath); + if (!string.IsNullOrEmpty(binaryHash)) + { + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.File, + "binary", + string.IsNullOrEmpty(binaryRelativePath) ? "." : binaryRelativePath, + null, + binaryHash)); + } + } + + evidence.Sort(static (left, right) => string.CompareOrdinal(left.ComparisonKey, right.ComparisonKey)); + return evidence; + } + + private static string? BuildPurl(string path, string? version) + { + if (string.IsNullOrWhiteSpace(path) || string.IsNullOrWhiteSpace(version)) + { + return null; + } + + var cleanedPath = path.Trim(); + var cleanedVersion = version.Trim(); + var encodedVersion = Uri.EscapeDataString(cleanedVersion); + return $"pkg:golang/{cleanedPath}@{encodedVersion}"; + } + + private static string BuildFallbackComponentKey(GoModule module, GoBuildInfo buildInfo, string binaryRelativePath, ref string? binaryHash) + { + var relative = string.IsNullOrEmpty(binaryRelativePath) ? "." : binaryRelativePath; + binaryHash ??= ComputeBinaryHash(buildInfo.AbsoluteBinaryPath); + if (!string.IsNullOrEmpty(binaryHash)) + { + return $"golang::module:{module.Path}::{relative}::{binaryHash}"; + } + + return $"golang::module:{module.Path}::{relative}"; + } + + private static void AddIfMissing(List<KeyValuePair<string, string?>> entries, string key, string? value) + { + if (string.IsNullOrWhiteSpace(key) || string.IsNullOrWhiteSpace(value)) + { + return; + } + + if (entries.Any(entry => string.Equals(entry.Key, key, StringComparison.Ordinal))) + { + return; + } + + entries.Add(new KeyValuePair<string, string?>(key, value)); + } + + private static string? ComputeBinaryHash(string path) + { + try + { + using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var sha = SHA256.Create(); + var hash = sha.ComputeHash(stream); + return Convert.ToHexString(hash).ToLowerInvariant(); + } + catch (IOException) + { + return null; + } + catch (UnauthorizedAccessException) + { + return null; + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoAnalyzerMetrics.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoAnalyzerMetrics.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoAnalyzerMetrics.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoAnalyzerMetrics.cs index 801c1d6a..aed5d6c5 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoAnalyzerMetrics.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoAnalyzerMetrics.cs @@ -1,30 +1,30 @@ -using System.Collections.Generic; -using System.Diagnostics.Metrics; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -internal static class GoAnalyzerMetrics -{ - private static readonly Meter Meter = new("StellaOps.Scanner.Analyzers.Lang.Go", "1.0.0"); - - private static readonly Counter<long> HeuristicCounter = Meter.CreateCounter<long>( - "scanner_analyzer_golang_heuristic_total", - unit: "components", - description: "Counts Go components emitted via heuristic fallbacks when build metadata is missing."); - - public static void RecordHeuristic(GoStrippedBinaryIndicator indicator, bool hasVersionHint) - { - HeuristicCounter.Add( - 1, - new KeyValuePair<string, object?>("indicator", NormalizeIndicator(indicator)), - new KeyValuePair<string, object?>("version_hint", hasVersionHint ? "present" : "absent")); - } - - private static string NormalizeIndicator(GoStrippedBinaryIndicator indicator) - => indicator switch - { - GoStrippedBinaryIndicator.BuildId => "build-id", - GoStrippedBinaryIndicator.GoRuntimeMarkers => "runtime-markers", - _ => "unknown", - }; -} +using System.Collections.Generic; +using System.Diagnostics.Metrics; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +internal static class GoAnalyzerMetrics +{ + private static readonly Meter Meter = new("StellaOps.Scanner.Analyzers.Lang.Go", "1.0.0"); + + private static readonly Counter<long> HeuristicCounter = Meter.CreateCounter<long>( + "scanner_analyzer_golang_heuristic_total", + unit: "components", + description: "Counts Go components emitted via heuristic fallbacks when build metadata is missing."); + + public static void RecordHeuristic(GoStrippedBinaryIndicator indicator, bool hasVersionHint) + { + HeuristicCounter.Add( + 1, + new KeyValuePair<string, object?>("indicator", NormalizeIndicator(indicator)), + new KeyValuePair<string, object?>("version_hint", hasVersionHint ? "present" : "absent")); + } + + private static string NormalizeIndicator(GoStrippedBinaryIndicator indicator) + => indicator switch + { + GoStrippedBinaryIndicator.BuildId => "build-id", + GoStrippedBinaryIndicator.GoRuntimeMarkers => "runtime-markers", + _ => "unknown", + }; +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBinaryScanner.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBinaryScanner.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBinaryScanner.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBinaryScanner.cs index cc1f5774..954a691f 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBinaryScanner.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBinaryScanner.cs @@ -1,264 +1,264 @@ -using System; -using System.Collections.Generic; -using System.Buffers; -using System.IO; -using System.Text; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -internal static class GoBinaryScanner -{ - private static readonly ReadOnlyMemory<byte> BuildInfoMagic = new byte[] - { - 0xFF, (byte)' ', (byte)'G', (byte)'o', (byte)' ', (byte)'b', (byte)'u', (byte)'i', (byte)'l', (byte)'d', (byte)'i', (byte)'n', (byte)'f', (byte)':' - }; - - private static readonly ReadOnlyMemory<byte> BuildIdMarker = Encoding.ASCII.GetBytes("Go build ID:"); - private static readonly ReadOnlyMemory<byte> GoPclnTabMarker = Encoding.ASCII.GetBytes(".gopclntab"); - private static readonly ReadOnlyMemory<byte> GoVersionPrefix = Encoding.ASCII.GetBytes("go1."); - - public static IEnumerable<string> EnumerateCandidateFiles(string rootPath) - { - var enumeration = new EnumerationOptions - { - RecurseSubdirectories = true, - IgnoreInaccessible = true, - AttributesToSkip = FileAttributes.Device | FileAttributes.ReparsePoint, - MatchCasing = MatchCasing.CaseSensitive, - }; - - foreach (var path in Directory.EnumerateFiles(rootPath, "*", enumeration)) - { - yield return path; - } - } - - public static bool TryReadBuildInfo(string filePath, out string? goVersion, out string? moduleData) - { - goVersion = null; - moduleData = null; - - FileInfo info; - try - { - info = new FileInfo(filePath); - if (!info.Exists || info.Length < 64 || info.Length > 128 * 1024 * 1024) - { - return false; - } - } - catch (IOException) - { - return false; - } - catch (UnauthorizedAccessException) - { - return false; - } - catch (System.Security.SecurityException) - { - return false; - } - - var length = info.Length; - if (length <= 0) - { - return false; - } - - var inspectLength = (int)Math.Min(length, int.MaxValue); - var buffer = ArrayPool<byte>.Shared.Rent(inspectLength); - - try - { - using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); - var totalRead = 0; - - while (totalRead < inspectLength) - { - var read = stream.Read(buffer, totalRead, inspectLength - totalRead); - if (read <= 0) - { - break; - } - - totalRead += read; - } - - if (totalRead < 64) - { - return false; - } - - var span = new ReadOnlySpan<byte>(buffer, 0, totalRead); - var offset = span.IndexOf(BuildInfoMagic.Span); - if (offset < 0) - { - return false; - } - - var view = span[offset..]; - return GoBuildInfoDecoder.TryDecode(view, out goVersion, out moduleData); - } - catch (IOException) - { - return false; - } - catch (UnauthorizedAccessException) - { - return false; - } - finally - { - Array.Clear(buffer, 0, inspectLength); - ArrayPool<byte>.Shared.Return(buffer); - } - } - - public static bool TryClassifyStrippedBinary(string filePath, out GoStrippedBinaryClassification classification) - { - classification = default; - - FileInfo fileInfo; - try - { - fileInfo = new FileInfo(filePath); - if (!fileInfo.Exists) - { - return false; - } - } - catch (IOException) - { - return false; - } - catch (UnauthorizedAccessException) - { - return false; - } - catch (System.Security.SecurityException) - { - return false; - } - - var length = fileInfo.Length; - if (length < 128) - { - return false; - } - - const int WindowSize = 128 * 1024; - var readSize = (int)Math.Min(length, WindowSize); - var buffer = ArrayPool<byte>.Shared.Rent(readSize); - - try - { - using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); - - var headRead = stream.Read(buffer, 0, readSize); - if (headRead <= 0) - { - return false; - } - - var headSpan = new ReadOnlySpan<byte>(buffer, 0, headRead); - var hasBuildId = headSpan.IndexOf(BuildIdMarker.Span) >= 0; - var hasPcln = headSpan.IndexOf(GoPclnTabMarker.Span) >= 0; - var goVersion = ExtractGoVersion(headSpan); - - if (length > headRead) - { - var tailSize = Math.Min(readSize, (int)length); - if (tailSize > 0) - { - stream.Seek(-tailSize, SeekOrigin.End); - var tailRead = stream.Read(buffer, 0, tailSize); - if (tailRead > 0) - { - var tailSpan = new ReadOnlySpan<byte>(buffer, 0, tailRead); - hasBuildId |= tailSpan.IndexOf(BuildIdMarker.Span) >= 0; - hasPcln |= tailSpan.IndexOf(GoPclnTabMarker.Span) >= 0; - goVersion ??= ExtractGoVersion(tailSpan); - } - } - } - - if (hasBuildId) - { - classification = new GoStrippedBinaryClassification( - filePath, - GoStrippedBinaryIndicator.BuildId, - goVersion); - return true; - } - - if (hasPcln && !string.IsNullOrEmpty(goVersion)) - { - classification = new GoStrippedBinaryClassification( - filePath, - GoStrippedBinaryIndicator.GoRuntimeMarkers, - goVersion); - return true; - } - - return false; - } - finally - { - Array.Clear(buffer, 0, readSize); - ArrayPool<byte>.Shared.Return(buffer); - } - } - - private static string? ExtractGoVersion(ReadOnlySpan<byte> data) - { - var prefix = GoVersionPrefix.Span; - var span = data; - - while (!span.IsEmpty) - { - var index = span.IndexOf(prefix); - if (index < 0) - { - return null; - } - - var absoluteIndex = data.Length - span.Length + index; - - if (absoluteIndex > 0) - { - var previous = (char)data[absoluteIndex - 1]; - if (char.IsLetterOrDigit(previous)) - { - span = span[(index + 1)..]; - continue; - } - } - - var start = absoluteIndex; - var end = start + prefix.Length; - - while (end < data.Length && IsVersionCharacter((char)data[end])) - { - end++; - } - - if (end - start <= prefix.Length) - { - span = span[(index + 1)..]; - continue; - } - - var candidate = data[start..end]; - return Encoding.ASCII.GetString(candidate); - } - - return null; - } - - private static bool IsVersionCharacter(char value) - => (value >= '0' && value <= '9') - || (value >= 'a' && value <= 'z') - || (value >= 'A' && value <= 'Z') - || value is '.' or '-' or '+' or '_'; -} +using System; +using System.Collections.Generic; +using System.Buffers; +using System.IO; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +internal static class GoBinaryScanner +{ + private static readonly ReadOnlyMemory<byte> BuildInfoMagic = new byte[] + { + 0xFF, (byte)' ', (byte)'G', (byte)'o', (byte)' ', (byte)'b', (byte)'u', (byte)'i', (byte)'l', (byte)'d', (byte)'i', (byte)'n', (byte)'f', (byte)':' + }; + + private static readonly ReadOnlyMemory<byte> BuildIdMarker = Encoding.ASCII.GetBytes("Go build ID:"); + private static readonly ReadOnlyMemory<byte> GoPclnTabMarker = Encoding.ASCII.GetBytes(".gopclntab"); + private static readonly ReadOnlyMemory<byte> GoVersionPrefix = Encoding.ASCII.GetBytes("go1."); + + public static IEnumerable<string> EnumerateCandidateFiles(string rootPath) + { + var enumeration = new EnumerationOptions + { + RecurseSubdirectories = true, + IgnoreInaccessible = true, + AttributesToSkip = FileAttributes.Device | FileAttributes.ReparsePoint, + MatchCasing = MatchCasing.CaseSensitive, + }; + + foreach (var path in Directory.EnumerateFiles(rootPath, "*", enumeration)) + { + yield return path; + } + } + + public static bool TryReadBuildInfo(string filePath, out string? goVersion, out string? moduleData) + { + goVersion = null; + moduleData = null; + + FileInfo info; + try + { + info = new FileInfo(filePath); + if (!info.Exists || info.Length < 64 || info.Length > 128 * 1024 * 1024) + { + return false; + } + } + catch (IOException) + { + return false; + } + catch (UnauthorizedAccessException) + { + return false; + } + catch (System.Security.SecurityException) + { + return false; + } + + var length = info.Length; + if (length <= 0) + { + return false; + } + + var inspectLength = (int)Math.Min(length, int.MaxValue); + var buffer = ArrayPool<byte>.Shared.Rent(inspectLength); + + try + { + using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); + var totalRead = 0; + + while (totalRead < inspectLength) + { + var read = stream.Read(buffer, totalRead, inspectLength - totalRead); + if (read <= 0) + { + break; + } + + totalRead += read; + } + + if (totalRead < 64) + { + return false; + } + + var span = new ReadOnlySpan<byte>(buffer, 0, totalRead); + var offset = span.IndexOf(BuildInfoMagic.Span); + if (offset < 0) + { + return false; + } + + var view = span[offset..]; + return GoBuildInfoDecoder.TryDecode(view, out goVersion, out moduleData); + } + catch (IOException) + { + return false; + } + catch (UnauthorizedAccessException) + { + return false; + } + finally + { + Array.Clear(buffer, 0, inspectLength); + ArrayPool<byte>.Shared.Return(buffer); + } + } + + public static bool TryClassifyStrippedBinary(string filePath, out GoStrippedBinaryClassification classification) + { + classification = default; + + FileInfo fileInfo; + try + { + fileInfo = new FileInfo(filePath); + if (!fileInfo.Exists) + { + return false; + } + } + catch (IOException) + { + return false; + } + catch (UnauthorizedAccessException) + { + return false; + } + catch (System.Security.SecurityException) + { + return false; + } + + var length = fileInfo.Length; + if (length < 128) + { + return false; + } + + const int WindowSize = 128 * 1024; + var readSize = (int)Math.Min(length, WindowSize); + var buffer = ArrayPool<byte>.Shared.Rent(readSize); + + try + { + using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); + + var headRead = stream.Read(buffer, 0, readSize); + if (headRead <= 0) + { + return false; + } + + var headSpan = new ReadOnlySpan<byte>(buffer, 0, headRead); + var hasBuildId = headSpan.IndexOf(BuildIdMarker.Span) >= 0; + var hasPcln = headSpan.IndexOf(GoPclnTabMarker.Span) >= 0; + var goVersion = ExtractGoVersion(headSpan); + + if (length > headRead) + { + var tailSize = Math.Min(readSize, (int)length); + if (tailSize > 0) + { + stream.Seek(-tailSize, SeekOrigin.End); + var tailRead = stream.Read(buffer, 0, tailSize); + if (tailRead > 0) + { + var tailSpan = new ReadOnlySpan<byte>(buffer, 0, tailRead); + hasBuildId |= tailSpan.IndexOf(BuildIdMarker.Span) >= 0; + hasPcln |= tailSpan.IndexOf(GoPclnTabMarker.Span) >= 0; + goVersion ??= ExtractGoVersion(tailSpan); + } + } + } + + if (hasBuildId) + { + classification = new GoStrippedBinaryClassification( + filePath, + GoStrippedBinaryIndicator.BuildId, + goVersion); + return true; + } + + if (hasPcln && !string.IsNullOrEmpty(goVersion)) + { + classification = new GoStrippedBinaryClassification( + filePath, + GoStrippedBinaryIndicator.GoRuntimeMarkers, + goVersion); + return true; + } + + return false; + } + finally + { + Array.Clear(buffer, 0, readSize); + ArrayPool<byte>.Shared.Return(buffer); + } + } + + private static string? ExtractGoVersion(ReadOnlySpan<byte> data) + { + var prefix = GoVersionPrefix.Span; + var span = data; + + while (!span.IsEmpty) + { + var index = span.IndexOf(prefix); + if (index < 0) + { + return null; + } + + var absoluteIndex = data.Length - span.Length + index; + + if (absoluteIndex > 0) + { + var previous = (char)data[absoluteIndex - 1]; + if (char.IsLetterOrDigit(previous)) + { + span = span[(index + 1)..]; + continue; + } + } + + var start = absoluteIndex; + var end = start + prefix.Length; + + while (end < data.Length && IsVersionCharacter((char)data[end])) + { + end++; + } + + if (end - start <= prefix.Length) + { + span = span[(index + 1)..]; + continue; + } + + var candidate = data[start..end]; + return Encoding.ASCII.GetString(candidate); + } + + return null; + } + + private static bool IsVersionCharacter(char value) + => (value >= '0' && value <= '9') + || (value >= 'a' && value <= 'z') + || (value >= 'A' && value <= 'Z') + || value is '.' or '-' or '+' or '_'; +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfo.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfo.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfo.cs index 883c018e..a58fcdcd 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfo.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfo.cs @@ -1,80 +1,80 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -internal sealed class GoBuildInfo -{ - public GoBuildInfo( - string goVersion, - string absoluteBinaryPath, - string modulePath, - GoModule mainModule, - IEnumerable<GoModule> dependencies, - IEnumerable<KeyValuePair<string, string?>> settings, - GoDwarfMetadata? dwarfMetadata = null) - : this( - goVersion, - absoluteBinaryPath, - modulePath, - mainModule, - dependencies? - .Where(static module => module is not null) - .ToImmutableArray() - ?? ImmutableArray<GoModule>.Empty, - settings? - .Where(static pair => pair.Key is not null) - .Select(static pair => new KeyValuePair<string, string?>(pair.Key, pair.Value)) - .ToImmutableArray() - ?? ImmutableArray<KeyValuePair<string, string?>>.Empty, - dwarfMetadata) - { - } - - private GoBuildInfo( - string goVersion, - string absoluteBinaryPath, - string modulePath, - GoModule mainModule, - ImmutableArray<GoModule> dependencies, - ImmutableArray<KeyValuePair<string, string?>> settings, - GoDwarfMetadata? dwarfMetadata) - { - GoVersion = goVersion ?? throw new ArgumentNullException(nameof(goVersion)); - AbsoluteBinaryPath = absoluteBinaryPath ?? throw new ArgumentNullException(nameof(absoluteBinaryPath)); - ModulePath = modulePath ?? throw new ArgumentNullException(nameof(modulePath)); - MainModule = mainModule ?? throw new ArgumentNullException(nameof(mainModule)); - Dependencies = dependencies; - Settings = settings; - DwarfMetadata = dwarfMetadata; - } - - public string GoVersion { get; } - - public string AbsoluteBinaryPath { get; } - - public string ModulePath { get; } - - public GoModule MainModule { get; } - - public ImmutableArray<GoModule> Dependencies { get; } - - public ImmutableArray<KeyValuePair<string, string?>> Settings { get; } - - public GoDwarfMetadata? DwarfMetadata { get; } - - public GoBuildInfo WithDwarf(GoDwarfMetadata metadata) - { - ArgumentNullException.ThrowIfNull(metadata); - return new GoBuildInfo( - GoVersion, - AbsoluteBinaryPath, - ModulePath, - MainModule, - Dependencies, - Settings, - metadata); - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +internal sealed class GoBuildInfo +{ + public GoBuildInfo( + string goVersion, + string absoluteBinaryPath, + string modulePath, + GoModule mainModule, + IEnumerable<GoModule> dependencies, + IEnumerable<KeyValuePair<string, string?>> settings, + GoDwarfMetadata? dwarfMetadata = null) + : this( + goVersion, + absoluteBinaryPath, + modulePath, + mainModule, + dependencies? + .Where(static module => module is not null) + .ToImmutableArray() + ?? ImmutableArray<GoModule>.Empty, + settings? + .Where(static pair => pair.Key is not null) + .Select(static pair => new KeyValuePair<string, string?>(pair.Key, pair.Value)) + .ToImmutableArray() + ?? ImmutableArray<KeyValuePair<string, string?>>.Empty, + dwarfMetadata) + { + } + + private GoBuildInfo( + string goVersion, + string absoluteBinaryPath, + string modulePath, + GoModule mainModule, + ImmutableArray<GoModule> dependencies, + ImmutableArray<KeyValuePair<string, string?>> settings, + GoDwarfMetadata? dwarfMetadata) + { + GoVersion = goVersion ?? throw new ArgumentNullException(nameof(goVersion)); + AbsoluteBinaryPath = absoluteBinaryPath ?? throw new ArgumentNullException(nameof(absoluteBinaryPath)); + ModulePath = modulePath ?? throw new ArgumentNullException(nameof(modulePath)); + MainModule = mainModule ?? throw new ArgumentNullException(nameof(mainModule)); + Dependencies = dependencies; + Settings = settings; + DwarfMetadata = dwarfMetadata; + } + + public string GoVersion { get; } + + public string AbsoluteBinaryPath { get; } + + public string ModulePath { get; } + + public GoModule MainModule { get; } + + public ImmutableArray<GoModule> Dependencies { get; } + + public ImmutableArray<KeyValuePair<string, string?>> Settings { get; } + + public GoDwarfMetadata? DwarfMetadata { get; } + + public GoBuildInfo WithDwarf(GoDwarfMetadata metadata) + { + ArgumentNullException.ThrowIfNull(metadata); + return new GoBuildInfo( + GoVersion, + AbsoluteBinaryPath, + ModulePath, + MainModule, + Dependencies, + Settings, + metadata); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoDecoder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoDecoder.cs similarity index 95% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoDecoder.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoDecoder.cs index 1981ec98..02ea7065 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoDecoder.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoDecoder.cs @@ -1,159 +1,159 @@ -using System; -using System.Text; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -internal static class GoBuildInfoDecoder -{ - private const string BuildInfoMagic = "\xff Go buildinf:"; - private const int HeaderSize = 32; - private const byte VarintEncodingFlag = 0x02; - - public static bool TryDecode(ReadOnlySpan<byte> data, out string? goVersion, out string? moduleData) - { - goVersion = null; - moduleData = null; - - if (data.Length < HeaderSize) - { - return false; - } - - if (!IsMagicMatch(data)) - { - return false; - } - - var pointerSize = data[14]; - var flags = data[15]; - - if (pointerSize != 4 && pointerSize != 8) - { - return false; - } - - if ((flags & VarintEncodingFlag) == 0) - { - // Older Go toolchains encode pointers to strings instead of inline data. - // The Sprint 10 scope targets Go 1.18+, which always sets the varint flag. - return false; - } - - var payload = data.Slice(HeaderSize); - - if (!TryReadVarString(payload, out var version, out var consumed)) - { - return false; - } - - payload = payload.Slice(consumed); - - if (!TryReadVarString(payload, out var modules, out _)) - { - return false; - } - - if (string.IsNullOrWhiteSpace(version)) - { - return false; - } - - modules = StripSentinel(modules); - - goVersion = version; - moduleData = modules; - return !string.IsNullOrWhiteSpace(moduleData); - } - - private static bool IsMagicMatch(ReadOnlySpan<byte> data) - { - if (data.Length < BuildInfoMagic.Length) - { - return false; - } - - for (var i = 0; i < BuildInfoMagic.Length; i++) - { - if (data[i] != BuildInfoMagic[i]) - { - return false; - } - } - - return true; - } - - private static bool TryReadVarString(ReadOnlySpan<byte> data, out string result, out int consumed) - { - result = string.Empty; - consumed = 0; - - if (!TryReadUVarint(data, out var length, out var lengthBytes)) - { - return false; - } - - if (length > int.MaxValue) - { - return false; - } - - var stringLength = (int)length; - var totalRequired = lengthBytes + stringLength; - if (stringLength <= 0 || totalRequired > data.Length) - { - return false; - } - - var slice = data.Slice(lengthBytes, stringLength); - result = Encoding.UTF8.GetString(slice); - consumed = totalRequired; - return true; - } - - private static bool TryReadUVarint(ReadOnlySpan<byte> data, out ulong value, out int bytesRead) - { - value = 0; - bytesRead = 0; - - ulong x = 0; - var shift = 0; - - for (var i = 0; i < data.Length; i++) - { - var b = data[i]; - if (b < 0x80) - { - if (i > 9 || i == 9 && b > 1) - { - return false; - } - - value = x | (ulong)b << shift; - bytesRead = i + 1; - return true; - } - - x |= (ulong)(b & 0x7F) << shift; - shift += 7; - } - - return false; - } - - private static string StripSentinel(string value) - { - if (string.IsNullOrEmpty(value) || value.Length < 33) - { - return value; - } - - var sentinelIndex = value.Length - 17; - if (value[sentinelIndex] != '\n') - { - return value; - } - - return value[16..^16]; - } -} +using System; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +internal static class GoBuildInfoDecoder +{ + private const string BuildInfoMagic = "\xff Go buildinf:"; + private const int HeaderSize = 32; + private const byte VarintEncodingFlag = 0x02; + + public static bool TryDecode(ReadOnlySpan<byte> data, out string? goVersion, out string? moduleData) + { + goVersion = null; + moduleData = null; + + if (data.Length < HeaderSize) + { + return false; + } + + if (!IsMagicMatch(data)) + { + return false; + } + + var pointerSize = data[14]; + var flags = data[15]; + + if (pointerSize != 4 && pointerSize != 8) + { + return false; + } + + if ((flags & VarintEncodingFlag) == 0) + { + // Older Go toolchains encode pointers to strings instead of inline data. + // The Sprint 10 scope targets Go 1.18+, which always sets the varint flag. + return false; + } + + var payload = data.Slice(HeaderSize); + + if (!TryReadVarString(payload, out var version, out var consumed)) + { + return false; + } + + payload = payload.Slice(consumed); + + if (!TryReadVarString(payload, out var modules, out _)) + { + return false; + } + + if (string.IsNullOrWhiteSpace(version)) + { + return false; + } + + modules = StripSentinel(modules); + + goVersion = version; + moduleData = modules; + return !string.IsNullOrWhiteSpace(moduleData); + } + + private static bool IsMagicMatch(ReadOnlySpan<byte> data) + { + if (data.Length < BuildInfoMagic.Length) + { + return false; + } + + for (var i = 0; i < BuildInfoMagic.Length; i++) + { + if (data[i] != BuildInfoMagic[i]) + { + return false; + } + } + + return true; + } + + private static bool TryReadVarString(ReadOnlySpan<byte> data, out string result, out int consumed) + { + result = string.Empty; + consumed = 0; + + if (!TryReadUVarint(data, out var length, out var lengthBytes)) + { + return false; + } + + if (length > int.MaxValue) + { + return false; + } + + var stringLength = (int)length; + var totalRequired = lengthBytes + stringLength; + if (stringLength <= 0 || totalRequired > data.Length) + { + return false; + } + + var slice = data.Slice(lengthBytes, stringLength); + result = Encoding.UTF8.GetString(slice); + consumed = totalRequired; + return true; + } + + private static bool TryReadUVarint(ReadOnlySpan<byte> data, out ulong value, out int bytesRead) + { + value = 0; + bytesRead = 0; + + ulong x = 0; + var shift = 0; + + for (var i = 0; i < data.Length; i++) + { + var b = data[i]; + if (b < 0x80) + { + if (i > 9 || i == 9 && b > 1) + { + return false; + } + + value = x | (ulong)b << shift; + bytesRead = i + 1; + return true; + } + + x |= (ulong)(b & 0x7F) << shift; + shift += 7; + } + + return false; + } + + private static string StripSentinel(string value) + { + if (string.IsNullOrEmpty(value) || value.Length < 33) + { + return value; + } + + var sentinelIndex = value.Length - 17; + if (value[sentinelIndex] != '\n') + { + return value; + } + + return value[16..^16]; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoParser.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoParser.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoParser.cs index 57299c80..ba2694a4 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoParser.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoParser.cs @@ -1,234 +1,234 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Text; -using System.Text.Json; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -internal static class GoBuildInfoParser -{ - private const string PathPrefix = "path\t"; - private const string ModulePrefix = "mod\t"; - private const string DependencyPrefix = "dep\t"; - private const string ReplacementPrefix = "=>\t"; - private const string BuildPrefix = "build\t"; - - public static bool TryParse(string goVersion, string absoluteBinaryPath, string rawModuleData, out GoBuildInfo? info) - { - info = null; - - if (string.IsNullOrWhiteSpace(goVersion) || string.IsNullOrWhiteSpace(rawModuleData)) - { - return false; - } - - string? modulePath = null; - GoModule? mainModule = null; - var dependencies = new List<GoModule>(); - var settings = new SortedDictionary<string, string?>(StringComparer.Ordinal); - - GoModule? lastModule = null; - using var reader = new StringReader(rawModuleData); - - while (reader.ReadLine() is { } line) - { - if (string.IsNullOrWhiteSpace(line)) - { - continue; - } - - if (line.StartsWith(PathPrefix, StringComparison.Ordinal)) - { - modulePath = line[PathPrefix.Length..].Trim(); - continue; - } - - if (line.StartsWith(ModulePrefix, StringComparison.Ordinal)) - { - mainModule = ParseModule(line.AsSpan(ModulePrefix.Length), isMain: true); - lastModule = mainModule; - continue; - } - - if (line.StartsWith(DependencyPrefix, StringComparison.Ordinal)) - { - var dependency = ParseModule(line.AsSpan(DependencyPrefix.Length), isMain: false); - if (dependency is not null) - { - dependencies.Add(dependency); - lastModule = dependency; - } - - continue; - } - - if (line.StartsWith(ReplacementPrefix, StringComparison.Ordinal)) - { - if (lastModule is null) - { - continue; - } - - var replacement = ParseReplacement(line.AsSpan(ReplacementPrefix.Length)); - if (replacement is not null) - { - lastModule.SetReplacement(replacement); - } - - continue; - } - - if (line.StartsWith(BuildPrefix, StringComparison.Ordinal)) - { - var pair = ParseBuildSetting(line.AsSpan(BuildPrefix.Length)); - if (!string.IsNullOrEmpty(pair.Key)) - { - settings[pair.Key] = pair.Value; - } - } - } - - if (mainModule is null) - { - return false; - } - - if (string.IsNullOrEmpty(modulePath)) - { - modulePath = mainModule.Path; - } - - info = new GoBuildInfo( - goVersion, - absoluteBinaryPath, - modulePath, - mainModule, - dependencies, - settings); - - return true; - } - - private static GoModule? ParseModule(ReadOnlySpan<char> span, bool isMain) - { - var fields = SplitFields(span, expected: 4); - if (fields.Count == 0) - { - return null; - } - - var path = fields[0]; - if (string.IsNullOrWhiteSpace(path)) - { - return null; - } - - var version = fields.Count > 1 ? fields[1] : null; - var sum = fields.Count > 2 ? fields[2] : null; - - return new GoModule(path, version, sum, isMain); - } - - private static GoModuleReplacement? ParseReplacement(ReadOnlySpan<char> span) - { - var fields = SplitFields(span, expected: 3); - if (fields.Count == 0) - { - return null; - } - - var path = fields[0]; - if (string.IsNullOrWhiteSpace(path)) - { - return null; - } - - var version = fields.Count > 1 ? fields[1] : null; - var sum = fields.Count > 2 ? fields[2] : null; - - return new GoModuleReplacement(path, version, sum); - } - - private static KeyValuePair<string, string?> ParseBuildSetting(ReadOnlySpan<char> span) - { - span = span.Trim(); - if (span.IsEmpty) - { - return default; - } - - var separatorIndex = span.IndexOf('='); - if (separatorIndex <= 0) - { - return default; - } - - var rawKey = span[..separatorIndex].Trim(); - var rawValue = span[(separatorIndex + 1)..].Trim(); - - var key = Unquote(rawKey.ToString()); - if (string.IsNullOrWhiteSpace(key)) - { - return default; - } - - var value = Unquote(rawValue.ToString()); - return new KeyValuePair<string, string?>(key, value); - } - - private static List<string> SplitFields(ReadOnlySpan<char> span, int expected) - { - var fields = new List<string>(expected); - var builder = new StringBuilder(); - - for (var i = 0; i < span.Length; i++) - { - var current = span[i]; - if (current == '\t') - { - fields.Add(builder.ToString()); - builder.Clear(); - continue; - } - - builder.Append(current); - } - - fields.Add(builder.ToString()); - return fields; - } - - private static string Unquote(string value) - { - if (string.IsNullOrEmpty(value)) - { - return value; - } - - value = value.Trim(); - if (value.Length < 2) - { - return value; - } - - if (value[0] == '"' && value[^1] == '"') - { - try - { - return JsonSerializer.Deserialize<string>(value) ?? value; - } - catch (JsonException) - { - return value; - } - } - - if (value[0] == '`' && value[^1] == '`') - { - return value[1..^1]; - } - - return value; - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Text; +using System.Text.Json; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +internal static class GoBuildInfoParser +{ + private const string PathPrefix = "path\t"; + private const string ModulePrefix = "mod\t"; + private const string DependencyPrefix = "dep\t"; + private const string ReplacementPrefix = "=>\t"; + private const string BuildPrefix = "build\t"; + + public static bool TryParse(string goVersion, string absoluteBinaryPath, string rawModuleData, out GoBuildInfo? info) + { + info = null; + + if (string.IsNullOrWhiteSpace(goVersion) || string.IsNullOrWhiteSpace(rawModuleData)) + { + return false; + } + + string? modulePath = null; + GoModule? mainModule = null; + var dependencies = new List<GoModule>(); + var settings = new SortedDictionary<string, string?>(StringComparer.Ordinal); + + GoModule? lastModule = null; + using var reader = new StringReader(rawModuleData); + + while (reader.ReadLine() is { } line) + { + if (string.IsNullOrWhiteSpace(line)) + { + continue; + } + + if (line.StartsWith(PathPrefix, StringComparison.Ordinal)) + { + modulePath = line[PathPrefix.Length..].Trim(); + continue; + } + + if (line.StartsWith(ModulePrefix, StringComparison.Ordinal)) + { + mainModule = ParseModule(line.AsSpan(ModulePrefix.Length), isMain: true); + lastModule = mainModule; + continue; + } + + if (line.StartsWith(DependencyPrefix, StringComparison.Ordinal)) + { + var dependency = ParseModule(line.AsSpan(DependencyPrefix.Length), isMain: false); + if (dependency is not null) + { + dependencies.Add(dependency); + lastModule = dependency; + } + + continue; + } + + if (line.StartsWith(ReplacementPrefix, StringComparison.Ordinal)) + { + if (lastModule is null) + { + continue; + } + + var replacement = ParseReplacement(line.AsSpan(ReplacementPrefix.Length)); + if (replacement is not null) + { + lastModule.SetReplacement(replacement); + } + + continue; + } + + if (line.StartsWith(BuildPrefix, StringComparison.Ordinal)) + { + var pair = ParseBuildSetting(line.AsSpan(BuildPrefix.Length)); + if (!string.IsNullOrEmpty(pair.Key)) + { + settings[pair.Key] = pair.Value; + } + } + } + + if (mainModule is null) + { + return false; + } + + if (string.IsNullOrEmpty(modulePath)) + { + modulePath = mainModule.Path; + } + + info = new GoBuildInfo( + goVersion, + absoluteBinaryPath, + modulePath, + mainModule, + dependencies, + settings); + + return true; + } + + private static GoModule? ParseModule(ReadOnlySpan<char> span, bool isMain) + { + var fields = SplitFields(span, expected: 4); + if (fields.Count == 0) + { + return null; + } + + var path = fields[0]; + if (string.IsNullOrWhiteSpace(path)) + { + return null; + } + + var version = fields.Count > 1 ? fields[1] : null; + var sum = fields.Count > 2 ? fields[2] : null; + + return new GoModule(path, version, sum, isMain); + } + + private static GoModuleReplacement? ParseReplacement(ReadOnlySpan<char> span) + { + var fields = SplitFields(span, expected: 3); + if (fields.Count == 0) + { + return null; + } + + var path = fields[0]; + if (string.IsNullOrWhiteSpace(path)) + { + return null; + } + + var version = fields.Count > 1 ? fields[1] : null; + var sum = fields.Count > 2 ? fields[2] : null; + + return new GoModuleReplacement(path, version, sum); + } + + private static KeyValuePair<string, string?> ParseBuildSetting(ReadOnlySpan<char> span) + { + span = span.Trim(); + if (span.IsEmpty) + { + return default; + } + + var separatorIndex = span.IndexOf('='); + if (separatorIndex <= 0) + { + return default; + } + + var rawKey = span[..separatorIndex].Trim(); + var rawValue = span[(separatorIndex + 1)..].Trim(); + + var key = Unquote(rawKey.ToString()); + if (string.IsNullOrWhiteSpace(key)) + { + return default; + } + + var value = Unquote(rawValue.ToString()); + return new KeyValuePair<string, string?>(key, value); + } + + private static List<string> SplitFields(ReadOnlySpan<char> span, int expected) + { + var fields = new List<string>(expected); + var builder = new StringBuilder(); + + for (var i = 0; i < span.Length; i++) + { + var current = span[i]; + if (current == '\t') + { + fields.Add(builder.ToString()); + builder.Clear(); + continue; + } + + builder.Append(current); + } + + fields.Add(builder.ToString()); + return fields; + } + + private static string Unquote(string value) + { + if (string.IsNullOrEmpty(value)) + { + return value; + } + + value = value.Trim(); + if (value.Length < 2) + { + return value; + } + + if (value[0] == '"' && value[^1] == '"') + { + try + { + return JsonSerializer.Deserialize<string>(value) ?? value; + } + catch (JsonException) + { + return value; + } + } + + if (value[0] == '`' && value[^1] == '`') + { + return value[1..^1]; + } + + return value; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoProvider.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoProvider.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoProvider.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoProvider.cs index e74ecc9c..21f1e7fe 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoProvider.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoBuildInfoProvider.cs @@ -1,82 +1,82 @@ -using System; -using System.Collections.Concurrent; -using System.IO; -using System.Security; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -internal static class GoBuildInfoProvider -{ - private static readonly ConcurrentDictionary<GoBinaryCacheKey, GoBuildInfo?> Cache = new(); - - public static bool TryGetBuildInfo(string absolutePath, out GoBuildInfo? info) - { - info = null; - - FileInfo fileInfo; - try - { - fileInfo = new FileInfo(absolutePath); - if (!fileInfo.Exists) - { - return false; - } - } - catch (IOException) - { - return false; - } - catch (UnauthorizedAccessException) - { - return false; - } - catch (System.Security.SecurityException) - { - return false; - } - - var key = new GoBinaryCacheKey(absolutePath, fileInfo.Length, fileInfo.LastWriteTimeUtc.Ticks); - info = Cache.GetOrAdd(key, static (cacheKey, path) => CreateBuildInfo(path), absolutePath); - return info is not null; - } - - private static GoBuildInfo? CreateBuildInfo(string absolutePath) - { - if (!GoBinaryScanner.TryReadBuildInfo(absolutePath, out var goVersion, out var moduleData)) - { - return null; - } - - if (string.IsNullOrWhiteSpace(goVersion) || string.IsNullOrWhiteSpace(moduleData)) - { - return null; - } - - if (!GoBuildInfoParser.TryParse(goVersion!, absolutePath, moduleData!, out var buildInfo) || buildInfo is null) - { - return null; - } - - if (GoDwarfReader.TryRead(absolutePath, out var dwarf) && dwarf is not null) - { - buildInfo = buildInfo.WithDwarf(dwarf); - } - - return buildInfo; - } - - private readonly record struct GoBinaryCacheKey(string Path, long Length, long LastWriteTicks) - { - private readonly string _normalizedPath = OperatingSystem.IsWindows() - ? Path.ToLowerInvariant() - : Path; - - public bool Equals(GoBinaryCacheKey other) - => Length == other.Length - && LastWriteTicks == other.LastWriteTicks - && string.Equals(_normalizedPath, other._normalizedPath, StringComparison.Ordinal); - - public override int GetHashCode() - => HashCode.Combine(_normalizedPath, Length, LastWriteTicks); - } -} +using System; +using System.Collections.Concurrent; +using System.IO; +using System.Security; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +internal static class GoBuildInfoProvider +{ + private static readonly ConcurrentDictionary<GoBinaryCacheKey, GoBuildInfo?> Cache = new(); + + public static bool TryGetBuildInfo(string absolutePath, out GoBuildInfo? info) + { + info = null; + + FileInfo fileInfo; + try + { + fileInfo = new FileInfo(absolutePath); + if (!fileInfo.Exists) + { + return false; + } + } + catch (IOException) + { + return false; + } + catch (UnauthorizedAccessException) + { + return false; + } + catch (System.Security.SecurityException) + { + return false; + } + + var key = new GoBinaryCacheKey(absolutePath, fileInfo.Length, fileInfo.LastWriteTimeUtc.Ticks); + info = Cache.GetOrAdd(key, static (cacheKey, path) => CreateBuildInfo(path), absolutePath); + return info is not null; + } + + private static GoBuildInfo? CreateBuildInfo(string absolutePath) + { + if (!GoBinaryScanner.TryReadBuildInfo(absolutePath, out var goVersion, out var moduleData)) + { + return null; + } + + if (string.IsNullOrWhiteSpace(goVersion) || string.IsNullOrWhiteSpace(moduleData)) + { + return null; + } + + if (!GoBuildInfoParser.TryParse(goVersion!, absolutePath, moduleData!, out var buildInfo) || buildInfo is null) + { + return null; + } + + if (GoDwarfReader.TryRead(absolutePath, out var dwarf) && dwarf is not null) + { + buildInfo = buildInfo.WithDwarf(dwarf); + } + + return buildInfo; + } + + private readonly record struct GoBinaryCacheKey(string Path, long Length, long LastWriteTicks) + { + private readonly string _normalizedPath = OperatingSystem.IsWindows() + ? Path.ToLowerInvariant() + : Path; + + public bool Equals(GoBinaryCacheKey other) + => Length == other.Length + && LastWriteTicks == other.LastWriteTicks + && string.Equals(_normalizedPath, other._normalizedPath, StringComparison.Ordinal); + + public override int GetHashCode() + => HashCode.Combine(_normalizedPath, Length, LastWriteTicks); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfMetadata.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfMetadata.cs similarity index 95% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfMetadata.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfMetadata.cs index 83d2755a..0d7c4fc0 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfMetadata.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfMetadata.cs @@ -1,33 +1,33 @@ -using System; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -internal sealed class GoDwarfMetadata -{ - public GoDwarfMetadata(string? vcsSystem, string? revision, bool? modified, string? timestampUtc) - { - VcsSystem = Normalize(vcsSystem); - Revision = Normalize(revision); - Modified = modified; - TimestampUtc = Normalize(timestampUtc); - } - - public string? VcsSystem { get; } - - public string? Revision { get; } - - public bool? Modified { get; } - - public string? TimestampUtc { get; } - - private static string? Normalize(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - var trimmed = value.Trim(); - return trimmed.Length == 0 ? null : trimmed; - } -} +using System; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +internal sealed class GoDwarfMetadata +{ + public GoDwarfMetadata(string? vcsSystem, string? revision, bool? modified, string? timestampUtc) + { + VcsSystem = Normalize(vcsSystem); + Revision = Normalize(revision); + Modified = modified; + TimestampUtc = Normalize(timestampUtc); + } + + public string? VcsSystem { get; } + + public string? Revision { get; } + + public bool? Modified { get; } + + public string? TimestampUtc { get; } + + private static string? Normalize(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var trimmed = value.Trim(); + return trimmed.Length == 0 ? null : trimmed; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfReader.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfReader.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfReader.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfReader.cs index e1fa5f76..9d114936 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfReader.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoDwarfReader.cs @@ -1,120 +1,120 @@ -using System; -using System.Buffers; -using System.IO; -using System.Text; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -internal static class GoDwarfReader -{ - private static readonly byte[] VcsSystemToken = Encoding.UTF8.GetBytes("vcs="); - private static readonly byte[] VcsRevisionToken = Encoding.UTF8.GetBytes("vcs.revision="); - private static readonly byte[] VcsModifiedToken = Encoding.UTF8.GetBytes("vcs.modified="); - private static readonly byte[] VcsTimeToken = Encoding.UTF8.GetBytes("vcs.time="); - - public static bool TryRead(string path, out GoDwarfMetadata? metadata) - { - metadata = null; - - FileInfo fileInfo; - try - { - fileInfo = new FileInfo(path); - } - catch (IOException) - { - return false; - } - catch (UnauthorizedAccessException) - { - return false; - } - - if (!fileInfo.Exists || fileInfo.Length == 0 || fileInfo.Length > 256 * 1024 * 1024) - { - return false; - } - - var length = fileInfo.Length; - var readLength = (int)Math.Min(length, int.MaxValue); - var buffer = ArrayPool<byte>.Shared.Rent(readLength); - var bytesRead = 0; - - try - { - using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); - bytesRead = stream.Read(buffer, 0, readLength); - if (bytesRead <= 0) - { - return false; - } - - var data = new ReadOnlySpan<byte>(buffer, 0, bytesRead); - - var revision = ExtractValue(data, VcsRevisionToken); - var modifiedText = ExtractValue(data, VcsModifiedToken); - var timestamp = ExtractValue(data, VcsTimeToken); - var system = ExtractValue(data, VcsSystemToken); - - bool? modified = null; - if (!string.IsNullOrWhiteSpace(modifiedText)) - { - if (bool.TryParse(modifiedText, out var parsed)) - { - modified = parsed; - } - } - - if (string.IsNullOrWhiteSpace(revision) && string.IsNullOrWhiteSpace(system) && modified is null && string.IsNullOrWhiteSpace(timestamp)) - { - return false; - } - - metadata = new GoDwarfMetadata(system, revision, modified, timestamp); - return true; - } - catch (IOException) - { - return false; - } - catch (UnauthorizedAccessException) - { - return false; - } - finally - { - Array.Clear(buffer, 0, bytesRead); - ArrayPool<byte>.Shared.Return(buffer); - } - } - - private static string? ExtractValue(ReadOnlySpan<byte> data, ReadOnlySpan<byte> token) - { - var index = data.IndexOf(token); - if (index < 0) - { - return null; - } - - var start = index + token.Length; - var end = start; - - while (end < data.Length) - { - var current = data[end]; - if (current == 0 || current == (byte)'\n' || current == (byte)'\r') - { - break; - } - - end++; - } - - if (end <= start) - { - return null; - } - - return Encoding.UTF8.GetString(data.Slice(start, end - start)); - } -} +using System; +using System.Buffers; +using System.IO; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +internal static class GoDwarfReader +{ + private static readonly byte[] VcsSystemToken = Encoding.UTF8.GetBytes("vcs="); + private static readonly byte[] VcsRevisionToken = Encoding.UTF8.GetBytes("vcs.revision="); + private static readonly byte[] VcsModifiedToken = Encoding.UTF8.GetBytes("vcs.modified="); + private static readonly byte[] VcsTimeToken = Encoding.UTF8.GetBytes("vcs.time="); + + public static bool TryRead(string path, out GoDwarfMetadata? metadata) + { + metadata = null; + + FileInfo fileInfo; + try + { + fileInfo = new FileInfo(path); + } + catch (IOException) + { + return false; + } + catch (UnauthorizedAccessException) + { + return false; + } + + if (!fileInfo.Exists || fileInfo.Length == 0 || fileInfo.Length > 256 * 1024 * 1024) + { + return false; + } + + var length = fileInfo.Length; + var readLength = (int)Math.Min(length, int.MaxValue); + var buffer = ArrayPool<byte>.Shared.Rent(readLength); + var bytesRead = 0; + + try + { + using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + bytesRead = stream.Read(buffer, 0, readLength); + if (bytesRead <= 0) + { + return false; + } + + var data = new ReadOnlySpan<byte>(buffer, 0, bytesRead); + + var revision = ExtractValue(data, VcsRevisionToken); + var modifiedText = ExtractValue(data, VcsModifiedToken); + var timestamp = ExtractValue(data, VcsTimeToken); + var system = ExtractValue(data, VcsSystemToken); + + bool? modified = null; + if (!string.IsNullOrWhiteSpace(modifiedText)) + { + if (bool.TryParse(modifiedText, out var parsed)) + { + modified = parsed; + } + } + + if (string.IsNullOrWhiteSpace(revision) && string.IsNullOrWhiteSpace(system) && modified is null && string.IsNullOrWhiteSpace(timestamp)) + { + return false; + } + + metadata = new GoDwarfMetadata(system, revision, modified, timestamp); + return true; + } + catch (IOException) + { + return false; + } + catch (UnauthorizedAccessException) + { + return false; + } + finally + { + Array.Clear(buffer, 0, bytesRead); + ArrayPool<byte>.Shared.Return(buffer); + } + } + + private static string? ExtractValue(ReadOnlySpan<byte> data, ReadOnlySpan<byte> token) + { + var index = data.IndexOf(token); + if (index < 0) + { + return null; + } + + var start = index + token.Length; + var end = start; + + while (end < data.Length) + { + var current = data[end]; + if (current == 0 || current == (byte)'\n' || current == (byte)'\r') + { + break; + } + + end++; + } + + if (end <= start) + { + return null; + } + + return Encoding.UTF8.GetString(data.Slice(start, end - start)); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoModule.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoModule.cs similarity index 95% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoModule.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoModule.cs index ab47202b..f6b0325d 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoModule.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoModule.cs @@ -1,67 +1,67 @@ -using System; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; - -internal sealed class GoModule -{ - public GoModule(string path, string? version, string? sum, bool isMain) - { - Path = path ?? throw new ArgumentNullException(nameof(path)); - Version = Normalize(version); - Sum = Normalize(sum); - IsMain = isMain; - } - - public string Path { get; } - - public string? Version { get; } - - public string? Sum { get; } - - public GoModuleReplacement? Replacement { get; private set; } - - public bool IsMain { get; } - - public void SetReplacement(GoModuleReplacement replacement) - { - Replacement = replacement ?? throw new ArgumentNullException(nameof(replacement)); - } - - private static string? Normalize(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - var trimmed = value.Trim(); - return trimmed.Length == 0 ? null : trimmed; - } -} - -internal sealed class GoModuleReplacement -{ - public GoModuleReplacement(string path, string? version, string? sum) - { - Path = path ?? throw new ArgumentNullException(nameof(path)); - Version = Normalize(version); - Sum = Normalize(sum); - } - - public string Path { get; } - - public string? Version { get; } - - public string? Sum { get; } - - private static string? Normalize(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - var trimmed = value.Trim(); - return trimmed.Length == 0 ? null : trimmed; - } -} +using System; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Internal; + +internal sealed class GoModule +{ + public GoModule(string path, string? version, string? sum, bool isMain) + { + Path = path ?? throw new ArgumentNullException(nameof(path)); + Version = Normalize(version); + Sum = Normalize(sum); + IsMain = isMain; + } + + public string Path { get; } + + public string? Version { get; } + + public string? Sum { get; } + + public GoModuleReplacement? Replacement { get; private set; } + + public bool IsMain { get; } + + public void SetReplacement(GoModuleReplacement replacement) + { + Replacement = replacement ?? throw new ArgumentNullException(nameof(replacement)); + } + + private static string? Normalize(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var trimmed = value.Trim(); + return trimmed.Length == 0 ? null : trimmed; + } +} + +internal sealed class GoModuleReplacement +{ + public GoModuleReplacement(string path, string? version, string? sum) + { + Path = path ?? throw new ArgumentNullException(nameof(path)); + Version = Normalize(version); + Sum = Normalize(sum); + } + + public string Path { get; } + + public string? Version { get; } + + public string? Sum { get; } + + private static string? Normalize(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var trimmed = value.Trim(); + return trimmed.Length == 0 ? null : trimmed; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoStrippedBinaryClassification.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoStrippedBinaryClassification.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoStrippedBinaryClassification.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/Internal/GoStrippedBinaryClassification.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/TASKS.md diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json index d00c941a..3a3998fa 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/manifest.json @@ -1,23 +1,23 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.go", - "displayName": "StellaOps Go Analyzer (preview)", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.Go.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.Go.GoAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "golang", - "go" - ], - "metadata": { - "org.stellaops.analyzer.language": "go", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true", - "org.stellaops.analyzer.status": "preview" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.go", + "displayName": "StellaOps Go Analyzer (preview)", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Go.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Go.GoAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "golang", + "go" + ], + "metadata": { + "org.stellaops.analyzer.language": "go", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true", + "org.stellaops.analyzer.status": "preview" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/GlobalUsings.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/GlobalUsings.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/GlobalUsings.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassLocation.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassLocation.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassLocation.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassLocation.cs index 27567feb..3706585f 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassLocation.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassLocation.cs @@ -1,62 +1,62 @@ -using System.IO.Compression; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; - -internal enum JavaClassLocationKind -{ - ArchiveEntry, - EmbeddedArchiveEntry, -} - -internal sealed record JavaClassLocation( - JavaClassLocationKind Kind, - JavaArchive Archive, - JavaArchiveEntry Entry, - string? NestedClassPath) -{ - public static JavaClassLocation ForArchive(JavaArchive archive, JavaArchiveEntry entry) - => new(JavaClassLocationKind.ArchiveEntry, archive, entry, NestedClassPath: null); - - public static JavaClassLocation ForEmbedded(JavaArchive archive, JavaArchiveEntry entry, string nestedClassPath) - => new(JavaClassLocationKind.EmbeddedArchiveEntry, archive, entry, nestedClassPath); - - public Stream OpenClassStream(CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - - return Kind switch - { - JavaClassLocationKind.ArchiveEntry => Archive.OpenEntry(Entry), - JavaClassLocationKind.EmbeddedArchiveEntry => OpenEmbeddedEntryStream(cancellationToken), - _ => throw new InvalidOperationException($"Unsupported class location kind '{Kind}'."), - }; - } - - private Stream OpenEmbeddedEntryStream(CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - - using var embeddedStream = Archive.OpenEntry(Entry); - using var buffer = new MemoryStream(); - embeddedStream.CopyTo(buffer); - buffer.Position = 0; - - using var nestedArchive = new ZipArchive(buffer, ZipArchiveMode.Read, leaveOpen: true); - if (NestedClassPath is null) - { - throw new InvalidOperationException($"Nested class path not specified for embedded entry '{Entry.OriginalPath}'."); - } - - var classEntry = nestedArchive.GetEntry(NestedClassPath); - if (classEntry is null) - { - throw new FileNotFoundException($"Class '{NestedClassPath}' not found inside embedded archive entry '{Entry.OriginalPath}'."); - } - - using var classStream = classEntry.Open(); - var output = new MemoryStream(); - classStream.CopyTo(output); - output.Position = 0; - return output; - } -} +using System.IO.Compression; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; + +internal enum JavaClassLocationKind +{ + ArchiveEntry, + EmbeddedArchiveEntry, +} + +internal sealed record JavaClassLocation( + JavaClassLocationKind Kind, + JavaArchive Archive, + JavaArchiveEntry Entry, + string? NestedClassPath) +{ + public static JavaClassLocation ForArchive(JavaArchive archive, JavaArchiveEntry entry) + => new(JavaClassLocationKind.ArchiveEntry, archive, entry, NestedClassPath: null); + + public static JavaClassLocation ForEmbedded(JavaArchive archive, JavaArchiveEntry entry, string nestedClassPath) + => new(JavaClassLocationKind.EmbeddedArchiveEntry, archive, entry, nestedClassPath); + + public Stream OpenClassStream(CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + return Kind switch + { + JavaClassLocationKind.ArchiveEntry => Archive.OpenEntry(Entry), + JavaClassLocationKind.EmbeddedArchiveEntry => OpenEmbeddedEntryStream(cancellationToken), + _ => throw new InvalidOperationException($"Unsupported class location kind '{Kind}'."), + }; + } + + private Stream OpenEmbeddedEntryStream(CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + using var embeddedStream = Archive.OpenEntry(Entry); + using var buffer = new MemoryStream(); + embeddedStream.CopyTo(buffer); + buffer.Position = 0; + + using var nestedArchive = new ZipArchive(buffer, ZipArchiveMode.Read, leaveOpen: true); + if (NestedClassPath is null) + { + throw new InvalidOperationException($"Nested class path not specified for embedded entry '{Entry.OriginalPath}'."); + } + + var classEntry = nestedArchive.GetEntry(NestedClassPath); + if (classEntry is null) + { + throw new FileNotFoundException($"Class '{NestedClassPath}' not found inside embedded archive entry '{Entry.OriginalPath}'."); + } + + using var classStream = classEntry.Open(); + var output = new MemoryStream(); + classStream.CopyTo(output); + output.Position = 0; + return output; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathAnalysis.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathAnalysis.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathAnalysis.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathAnalysis.cs index 20a12797..9597a1f6 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathAnalysis.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathAnalysis.cs @@ -1,102 +1,102 @@ -using System.Collections.Immutable; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; - -internal sealed class JavaClassPathAnalysis -{ - public JavaClassPathAnalysis( - IEnumerable<JavaClassPathSegment> segments, - IEnumerable<JavaModuleDescriptor> modules, - IEnumerable<JavaClassDuplicate> duplicateClasses, - IEnumerable<JavaSplitPackage> splitPackages) - { - Segments = segments - .Where(static segment => segment is not null) - .OrderBy(static segment => segment.Order) - .ThenBy(static segment => segment.Identifier, StringComparer.Ordinal) - .ToImmutableArray(); - - Modules = modules - .Where(static module => module is not null) - .OrderBy(static module => module.Name, StringComparer.Ordinal) - .ThenBy(static module => module.Source, StringComparer.Ordinal) - .ToImmutableArray(); - - DuplicateClasses = duplicateClasses - .Where(static duplicate => duplicate is not null) - .OrderBy(static duplicate => duplicate.ClassName, StringComparer.Ordinal) - .ToImmutableArray(); - - SplitPackages = splitPackages - .Where(static split => split is not null) - .OrderBy(static split => split.PackageName, StringComparer.Ordinal) - .ToImmutableArray(); - } - - public ImmutableArray<JavaClassPathSegment> Segments { get; } - - public ImmutableArray<JavaModuleDescriptor> Modules { get; } - - public ImmutableArray<JavaClassDuplicate> DuplicateClasses { get; } - - public ImmutableArray<JavaSplitPackage> SplitPackages { get; } -} - -internal sealed class JavaClassPathSegment -{ - public JavaClassPathSegment( - string identifier, - string displayPath, - JavaClassPathSegmentKind kind, - JavaPackagingKind packaging, - int order, - JavaModuleDescriptor? module, - ImmutableSortedSet<string> classes, - ImmutableDictionary<string, JavaPackageFingerprint> packages, - ImmutableDictionary<string, ImmutableArray<string>> serviceDefinitions, - ImmutableDictionary<string, JavaClassLocation> classLocations) - { - Identifier = identifier ?? throw new ArgumentNullException(nameof(identifier)); - DisplayPath = displayPath ?? throw new ArgumentNullException(nameof(displayPath)); - Kind = kind; - Packaging = packaging; - Order = order; - Module = module; - Classes = classes; - Packages = packages ?? ImmutableDictionary<string, JavaPackageFingerprint>.Empty; - ServiceDefinitions = serviceDefinitions ?? ImmutableDictionary<string, ImmutableArray<string>>.Empty; - ClassLocations = classLocations ?? ImmutableDictionary<string, JavaClassLocation>.Empty; - } - - public string Identifier { get; } - - public string DisplayPath { get; } - - public JavaClassPathSegmentKind Kind { get; } - - public JavaPackagingKind Packaging { get; } - - public int Order { get; } - - public JavaModuleDescriptor? Module { get; } - - public ImmutableSortedSet<string> Classes { get; } - - public ImmutableDictionary<string, JavaPackageFingerprint> Packages { get; } - - public ImmutableDictionary<string, ImmutableArray<string>> ServiceDefinitions { get; } - - public ImmutableDictionary<string, JavaClassLocation> ClassLocations { get; } -} - -internal enum JavaClassPathSegmentKind -{ - Archive, - Directory, -} - -internal sealed record JavaPackageFingerprint(string PackageName, int ClassCount, string Fingerprint); - -internal sealed record JavaClassDuplicate(string ClassName, ImmutableArray<string> SegmentIdentifiers); - -internal sealed record JavaSplitPackage(string PackageName, ImmutableArray<string> SegmentIdentifiers); +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; + +internal sealed class JavaClassPathAnalysis +{ + public JavaClassPathAnalysis( + IEnumerable<JavaClassPathSegment> segments, + IEnumerable<JavaModuleDescriptor> modules, + IEnumerable<JavaClassDuplicate> duplicateClasses, + IEnumerable<JavaSplitPackage> splitPackages) + { + Segments = segments + .Where(static segment => segment is not null) + .OrderBy(static segment => segment.Order) + .ThenBy(static segment => segment.Identifier, StringComparer.Ordinal) + .ToImmutableArray(); + + Modules = modules + .Where(static module => module is not null) + .OrderBy(static module => module.Name, StringComparer.Ordinal) + .ThenBy(static module => module.Source, StringComparer.Ordinal) + .ToImmutableArray(); + + DuplicateClasses = duplicateClasses + .Where(static duplicate => duplicate is not null) + .OrderBy(static duplicate => duplicate.ClassName, StringComparer.Ordinal) + .ToImmutableArray(); + + SplitPackages = splitPackages + .Where(static split => split is not null) + .OrderBy(static split => split.PackageName, StringComparer.Ordinal) + .ToImmutableArray(); + } + + public ImmutableArray<JavaClassPathSegment> Segments { get; } + + public ImmutableArray<JavaModuleDescriptor> Modules { get; } + + public ImmutableArray<JavaClassDuplicate> DuplicateClasses { get; } + + public ImmutableArray<JavaSplitPackage> SplitPackages { get; } +} + +internal sealed class JavaClassPathSegment +{ + public JavaClassPathSegment( + string identifier, + string displayPath, + JavaClassPathSegmentKind kind, + JavaPackagingKind packaging, + int order, + JavaModuleDescriptor? module, + ImmutableSortedSet<string> classes, + ImmutableDictionary<string, JavaPackageFingerprint> packages, + ImmutableDictionary<string, ImmutableArray<string>> serviceDefinitions, + ImmutableDictionary<string, JavaClassLocation> classLocations) + { + Identifier = identifier ?? throw new ArgumentNullException(nameof(identifier)); + DisplayPath = displayPath ?? throw new ArgumentNullException(nameof(displayPath)); + Kind = kind; + Packaging = packaging; + Order = order; + Module = module; + Classes = classes; + Packages = packages ?? ImmutableDictionary<string, JavaPackageFingerprint>.Empty; + ServiceDefinitions = serviceDefinitions ?? ImmutableDictionary<string, ImmutableArray<string>>.Empty; + ClassLocations = classLocations ?? ImmutableDictionary<string, JavaClassLocation>.Empty; + } + + public string Identifier { get; } + + public string DisplayPath { get; } + + public JavaClassPathSegmentKind Kind { get; } + + public JavaPackagingKind Packaging { get; } + + public int Order { get; } + + public JavaModuleDescriptor? Module { get; } + + public ImmutableSortedSet<string> Classes { get; } + + public ImmutableDictionary<string, JavaPackageFingerprint> Packages { get; } + + public ImmutableDictionary<string, ImmutableArray<string>> ServiceDefinitions { get; } + + public ImmutableDictionary<string, JavaClassLocation> ClassLocations { get; } +} + +internal enum JavaClassPathSegmentKind +{ + Archive, + Directory, +} + +internal sealed record JavaPackageFingerprint(string PackageName, int ClassCount, string Fingerprint); + +internal sealed record JavaClassDuplicate(string ClassName, ImmutableArray<string> SegmentIdentifiers); + +internal sealed record JavaSplitPackage(string PackageName, ImmutableArray<string> SegmentIdentifiers); diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathBuilder.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathBuilder.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathBuilder.cs index cd1407da..7d3380c7 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathBuilder.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaClassPathBuilder.cs @@ -1,660 +1,660 @@ -using System.Collections.Immutable; -using System.Security.Cryptography; -using System.Text; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; - -internal static class JavaClassPathBuilder -{ - private const string ClassFileSuffix = ".class"; - - public static JavaClassPathAnalysis Build(JavaWorkspace workspace, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(workspace); - - var segments = new List<JavaClassPathSegment>(); - var modules = new List<JavaModuleDescriptor>(); - var duplicateMap = new Dictionary<string, List<string>>(StringComparer.Ordinal); - var packageMap = new Dictionary<string, HashSet<string>>(StringComparer.Ordinal); - var order = 0; - - foreach (var archive in workspace.Archives) - { - cancellationToken.ThrowIfCancellationRequested(); - ProcessArchive(archive, segments, modules, duplicateMap, packageMap, ref order, cancellationToken); - } - - var duplicateClasses = duplicateMap - .Where(static pair => pair.Value.Count > 1) - .Select(pair => new JavaClassDuplicate( - pair.Key, - pair.Value - .Distinct(StringComparer.Ordinal) - .OrderBy(static identifier => identifier, StringComparer.Ordinal) - .ToImmutableArray())) - .ToImmutableArray(); - - var splitPackages = packageMap - .Where(static pair => pair.Value.Count > 1) - .Select(pair => new JavaSplitPackage( - pair.Key, - pair.Value - .OrderBy(static identifier => identifier, StringComparer.Ordinal) - .ToImmutableArray())) - .ToImmutableArray(); - - return new JavaClassPathAnalysis(segments, modules, duplicateClasses, splitPackages); - } - - private static void ProcessArchive( - JavaArchive archive, - List<JavaClassPathSegment> segments, - List<JavaModuleDescriptor> modules, - Dictionary<string, List<string>> duplicateMap, - Dictionary<string, HashSet<string>> packageMap, - ref int order, - CancellationToken cancellationToken) - { - var identifier = NormalizeArchivePath(archive.RelativePath); - var baseClasses = new List<JavaClassRecord>(); - var baseServices = new Dictionary<string, List<string>>(StringComparer.Ordinal); - var bootClasses = new List<JavaClassRecord>(); - var bootServices = new Dictionary<string, List<string>>(StringComparer.Ordinal); - var webClasses = new List<JavaClassRecord>(); - var webServices = new Dictionary<string, List<string>>(StringComparer.Ordinal); - var embeddedEntries = new List<JavaArchiveEntry>(); - - JavaModuleDescriptor? moduleDescriptor = ParseModuleDescriptor(archive, identifier, cancellationToken); - - foreach (var entry in archive.Entries) - { - cancellationToken.ThrowIfCancellationRequested(); - - var path = entry.EffectivePath; - if (path.Length == 0) - { - continue; - } - - if (string.Equals(path, "module-info.class", StringComparison.OrdinalIgnoreCase)) - { - continue; - } - - if (path.StartsWith("BOOT-INF/classes/", StringComparison.OrdinalIgnoreCase)) - { - var relative = path["BOOT-INF/classes/".Length..]; - - if (TryHandleServiceDescriptor(archive, entry, relative, bootServices, cancellationToken)) - { - continue; - } - - if (relative.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) - { - AddClassRecord(bootClasses, archive, entry, relative); - } - - continue; - } - - if (path.StartsWith("WEB-INF/classes/", StringComparison.OrdinalIgnoreCase)) - { - var relative = path["WEB-INF/classes/".Length..]; - - if (TryHandleServiceDescriptor(archive, entry, relative, webServices, cancellationToken)) - { - continue; - } - - if (relative.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) - { - AddClassRecord(webClasses, archive, entry, relative); - } - - continue; - } - - if (TryHandleServiceDescriptor(archive, entry, path, baseServices, cancellationToken)) - { - continue; - } - - if (path.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) - { - if (archive.Packaging == JavaPackagingKind.JMod && path.StartsWith("classes/", StringComparison.OrdinalIgnoreCase)) - { - var relative = path["classes/".Length..]; - AddClassRecord(baseClasses, archive, entry, relative); - } - else - { - AddClassRecord(baseClasses, archive, entry, path); - } - } - else if (path.EndsWith(".jar", StringComparison.OrdinalIgnoreCase) && IsEmbeddedLibrary(path)) - { - embeddedEntries.Add(entry); - } - } - - // Base archive segment. - if (baseClasses.Count > 0 || moduleDescriptor is not null) - { - AddSegment( - segments, - modules, - duplicateMap, - packageMap, - ref order, - identifier, - identifier, - JavaClassPathSegmentKind.Archive, - archive.Packaging, - moduleDescriptor, - baseClasses, - ToImmutableServiceMap(baseServices)); - } - - if (bootClasses.Count > 0) - { - var bootIdentifier = string.Concat(identifier, "!BOOT-INF/classes/"); - AddSegment( - segments, - modules, - duplicateMap, - packageMap, - ref order, - bootIdentifier, - bootIdentifier, - JavaClassPathSegmentKind.Directory, - JavaPackagingKind.Unknown, - module: null, - bootClasses, - ToImmutableServiceMap(bootServices)); - } - - if (webClasses.Count > 0) - { - var webIdentifier = string.Concat(identifier, "!WEB-INF/classes/"); - AddSegment( - segments, - modules, - duplicateMap, - packageMap, - ref order, - webIdentifier, - webIdentifier, - JavaClassPathSegmentKind.Directory, - JavaPackagingKind.Unknown, - module: null, - webClasses, - ToImmutableServiceMap(webServices)); - } - - foreach (var embedded in embeddedEntries.OrderBy(static entry => entry.EffectivePath, StringComparer.Ordinal)) - { - cancellationToken.ThrowIfCancellationRequested(); - - var childIdentifier = string.Concat(identifier, "!", embedded.EffectivePath.Replace('\\', '/')); - var analysis = AnalyzeEmbeddedArchive(archive, embedded, childIdentifier, cancellationToken); - if (analysis is null) - { - continue; - } - - AddSegment( - segments, - modules, - duplicateMap, - packageMap, - ref order, - childIdentifier, - childIdentifier, - JavaClassPathSegmentKind.Archive, - JavaPackagingKind.Jar, - analysis.Module, - analysis.Classes, - analysis.Services); - } - } - - private static JavaModuleDescriptor? ParseModuleDescriptor(JavaArchive archive, string identifier, CancellationToken cancellationToken) - { - if (!archive.TryGetEntry("module-info.class", out var entry)) - { - return null; - } - - using var stream = archive.OpenEntry(entry); - return JavaModuleInfoParser.TryParse(stream, identifier, cancellationToken); - } - - private static void AddSegment( - List<JavaClassPathSegment> segments, - List<JavaModuleDescriptor> modules, - Dictionary<string, List<string>> duplicateMap, - Dictionary<string, HashSet<string>> packageMap, - ref int order, - string identifier, - string displayPath, - JavaClassPathSegmentKind kind, - JavaPackagingKind packaging, - JavaModuleDescriptor? module, - IReadOnlyCollection<JavaClassRecord> classes, - ImmutableDictionary<string, ImmutableArray<string>> serviceDefinitions) - { - if ((classes is null || classes.Count == 0) && module is null && (serviceDefinitions is null || serviceDefinitions.Count == 0)) - { - return; - } - - var normalizedClasses = classes ?? Array.Empty<JavaClassRecord>(); - - var classSet = normalizedClasses.Count == 0 - ? ImmutableSortedSet<string>.Empty - : ImmutableSortedSet.CreateRange(StringComparer.Ordinal, normalizedClasses.Select(static record => record.ClassName)); - - var packageFingerprints = BuildPackageFingerprints(normalizedClasses); - var classLocations = normalizedClasses.Count == 0 - ? ImmutableDictionary<string, JavaClassLocation>.Empty - : normalizedClasses.ToImmutableDictionary( - static record => record.ClassName, - static record => record.Location, - StringComparer.Ordinal); - - var segment = new JavaClassPathSegment( - identifier, - displayPath, - kind, - packaging, - order++, - module, - classSet, - packageFingerprints, - serviceDefinitions ?? ImmutableDictionary<string, ImmutableArray<string>>.Empty, - classLocations); - - segments.Add(segment); - - if (module is not null) - { - modules.Add(module); - } - - foreach (var className in classSet) - { - if (!duplicateMap.TryGetValue(className, out var locations)) - { - locations = new List<string>(); - duplicateMap[className] = locations; - } - - locations.Add(identifier); - } - - foreach (var fingerprint in packageFingerprints.Values) - { - if (!packageMap.TryGetValue(fingerprint.PackageName, out var segmentsSet)) - { - segmentsSet = new HashSet<string>(StringComparer.Ordinal); - packageMap[fingerprint.PackageName] = segmentsSet; - } - - segmentsSet.Add(identifier); - } - } - - private static bool TryHandleServiceDescriptor( - JavaArchive archive, - JavaArchiveEntry entry, - string relativePath, - Dictionary<string, List<string>> target, - CancellationToken cancellationToken) - { - if (!TryGetServiceId(relativePath, out var serviceId)) - { - return false; - } - - try - { - var providers = ReadServiceProviders(() => archive.OpenEntry(entry), cancellationToken); - if (providers.Count == 0) - { - return true; - } - - if (!target.TryGetValue(serviceId, out var list)) - { - list = new List<string>(); - target[serviceId] = list; - } - - list.AddRange(providers); - } - catch (IOException) - { - // Ignore malformed service descriptor. - } - catch (InvalidDataException) - { - // Ignore malformed service descriptor. - } - - return true; - } - - private static bool TryGetServiceId(string relativePath, out string serviceId) - { - const string Prefix = "META-INF/services/"; - if (!relativePath.StartsWith(Prefix, StringComparison.OrdinalIgnoreCase)) - { - serviceId = string.Empty; - return false; - } - - var candidate = relativePath[Prefix.Length..].Trim(); - if (candidate.Length == 0) - { - serviceId = string.Empty; - return false; - } - - serviceId = candidate; - return true; - } - - private static List<string> ReadServiceProviders(Func<Stream> streamFactory, CancellationToken cancellationToken) - { - using var stream = streamFactory(); - using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, leaveOpen: false); - - var providers = new List<string>(); - while (reader.ReadLine() is { } line) - { - cancellationToken.ThrowIfCancellationRequested(); - - var commentIndex = line.IndexOf('#'); - if (commentIndex >= 0) - { - line = line[..commentIndex]; - } - - line = line.Trim(); - if (line.Length == 0) - { - continue; - } - - providers.Add(line); - } - - return providers; - } - - private static ImmutableDictionary<string, ImmutableArray<string>> ToImmutableServiceMap(Dictionary<string, List<string>> source) - { - if (source.Count == 0) - { - return ImmutableDictionary<string, ImmutableArray<string>>.Empty; - } - - var builder = ImmutableDictionary.CreateBuilder<string, ImmutableArray<string>>(StringComparer.Ordinal); - - foreach (var pair in source.OrderBy(static entry => entry.Key, StringComparer.Ordinal)) - { - var cleaned = new List<string>(pair.Value.Count); - foreach (var value in pair.Value) - { - var trimmed = value?.Trim(); - if (string.IsNullOrEmpty(trimmed)) - { - continue; - } - - cleaned.Add(trimmed); - } - - if (cleaned.Count == 0) - { - continue; - } - - builder[pair.Key] = ImmutableArray.CreateRange(cleaned); - } - - return builder.ToImmutable(); - } - - private static ImmutableDictionary<string, JavaPackageFingerprint> BuildPackageFingerprints(IReadOnlyCollection<JavaClassRecord> classes) - { - if (classes is null || classes.Count == 0) - { - return ImmutableDictionary<string, JavaPackageFingerprint>.Empty; - } - - var packages = classes - .GroupBy(static record => record.PackageName, StringComparer.Ordinal) - .OrderBy(static group => group.Key, StringComparer.Ordinal); - - var builder = ImmutableDictionary.CreateBuilder<string, JavaPackageFingerprint>(StringComparer.Ordinal); - - foreach (var group in packages) - { - var simpleNames = group - .Select(static record => record.SimpleName) - .OrderBy(static name => name, StringComparer.Ordinal) - .ToArray(); - - var fingerprint = ComputeFingerprint(simpleNames); - builder[group.Key] = new JavaPackageFingerprint(group.Key, simpleNames.Length, fingerprint); - } - - return builder.ToImmutable(); - } - - private static string ComputeFingerprint(IEnumerable<string> values) - { - using var sha = SHA256.Create(); - var buffer = string.Join('\n', values); - var bytes = Encoding.UTF8.GetBytes(buffer); - return Convert.ToHexString(sha.ComputeHash(bytes)).ToLowerInvariant(); - } - - private static EmbeddedArchiveAnalysis? AnalyzeEmbeddedArchive(JavaArchive parentArchive, JavaArchiveEntry entry, string identifier, CancellationToken cancellationToken) - { - using var sourceStream = parentArchive.OpenEntry(entry); - using var buffer = new MemoryStream(); - sourceStream.CopyTo(buffer); - buffer.Position = 0; - - using var zip = new ZipArchive(buffer, ZipArchiveMode.Read, leaveOpen: true); - - var candidates = new Dictionary<string, EmbeddedClassCandidate>(StringComparer.Ordinal); - var services = new Dictionary<string, EmbeddedServiceCandidate>(StringComparer.Ordinal); - JavaModuleDescriptor? moduleDescriptor = null; - - foreach (var zipEntry in zip.Entries) - { - cancellationToken.ThrowIfCancellationRequested(); - - var normalized = JavaZipEntryUtilities.NormalizeEntryName(zipEntry.FullName); - if (normalized.Length == 0) - { - continue; - } - - if (string.Equals(normalized, "module-info.class", StringComparison.OrdinalIgnoreCase)) - { - using var moduleStream = zipEntry.Open(); - moduleDescriptor = JavaModuleInfoParser.TryParse(moduleStream, identifier, cancellationToken); - continue; - } - - var effectivePath = normalized; - var version = 0; - if (JavaZipEntryUtilities.TryParseMultiReleasePath(normalized, out var candidatePath, out var candidateVersion)) - { - effectivePath = candidatePath; - version = candidateVersion; - } - - if (string.Equals(effectivePath, "module-info.class", StringComparison.OrdinalIgnoreCase)) - { - using var moduleStream = zipEntry.Open(); - moduleDescriptor = JavaModuleInfoParser.TryParse(moduleStream, identifier, cancellationToken); - continue; - } - - if (TryGetServiceId(effectivePath, out var serviceId)) - { - try - { - var providers = ReadServiceProviders(() => zipEntry.Open(), cancellationToken); - if (providers.Count == 0) - { - continue; - } - - var providerArray = ImmutableArray.CreateRange(providers); - if (!services.TryGetValue(serviceId, out var existingService) - || version > existingService.Version - || (version == existingService.Version && string.CompareOrdinal(zipEntry.FullName, existingService.OriginalPath) < 0)) - { - services[serviceId] = new EmbeddedServiceCandidate(zipEntry.FullName, version, providerArray); - } - } - catch (IOException) - { - // Ignore malformed descriptor. - } - catch (InvalidDataException) - { - // Ignore malformed descriptor. - } - - continue; - } - - if (!effectivePath.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) - { - continue; - } - - if (!candidates.TryGetValue(effectivePath, out var existing) || version > existing.Version || (version == existing.Version && string.CompareOrdinal(zipEntry.FullName, existing.OriginalPath) < 0)) - { - candidates[effectivePath] = new EmbeddedClassCandidate(zipEntry.FullName, version); - } - } - - var classes = new List<JavaClassRecord>(candidates.Count); - foreach (var pair in candidates) - { - AddClassRecord(classes, parentArchive, entry, pair.Key, pair.Value.OriginalPath); - } - - var serviceMap = services.Count == 0 - ? ImmutableDictionary<string, ImmutableArray<string>>.Empty - : services - .OrderBy(static pair => pair.Key, StringComparer.Ordinal) - .ToImmutableDictionary( - static pair => pair.Key, - static pair => pair.Value.Providers, - StringComparer.Ordinal); - - if (classes.Count == 0 && moduleDescriptor is null && serviceMap.Count == 0) - { - return null; - } - - return new EmbeddedArchiveAnalysis(classes, moduleDescriptor, serviceMap); - } - - private static bool IsEmbeddedLibrary(string path) - { - if (path.StartsWith("BOOT-INF/lib/", StringComparison.OrdinalIgnoreCase)) - { - return true; - } - - if (path.StartsWith("WEB-INF/lib/", StringComparison.OrdinalIgnoreCase)) - { - return true; - } - - if (path.StartsWith("lib/", StringComparison.OrdinalIgnoreCase) || path.StartsWith("APP-INF/lib/", StringComparison.OrdinalIgnoreCase)) - { - return true; - } - - return false; - } - - private static void AddClassRecord( - ICollection<JavaClassRecord> target, - JavaArchive archive, - JavaArchiveEntry entry, - string path, - string? nestedClassPath = null) - { - if (string.IsNullOrEmpty(path) || !path.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) - { - return; - } - - var withoutExtension = path[..^ClassFileSuffix.Length]; - if (withoutExtension.Length == 0) - { - return; - } - - var className = withoutExtension.Replace('/', '.'); - var lastDot = className.LastIndexOf('.'); - string packageName; - string simpleName; - - if (lastDot >= 0) - { - packageName = className[..lastDot]; - simpleName = className[(lastDot + 1)..]; - } - else - { - packageName = string.Empty; - simpleName = className; - } - - var location = nestedClassPath is null - ? JavaClassLocation.ForArchive(archive, entry) - : JavaClassLocation.ForEmbedded(archive, entry, nestedClassPath); - - target.Add(new JavaClassRecord(className, packageName, simpleName, location)); - } - - private static string NormalizeArchivePath(string relativePath) - { - if (string.IsNullOrEmpty(relativePath)) - { - return "."; - } - - var normalized = relativePath.Replace('\\', '/'); - return normalized.Length == 0 ? "." : normalized; - } - - private readonly record struct JavaClassRecord( - string ClassName, - string PackageName, - string SimpleName, - JavaClassLocation Location); - - private sealed record EmbeddedArchiveAnalysis( - IReadOnlyCollection<JavaClassRecord> Classes, - JavaModuleDescriptor? Module, - ImmutableDictionary<string, ImmutableArray<string>> Services); - - private readonly record struct EmbeddedClassCandidate(string OriginalPath, int Version); - - private readonly record struct EmbeddedServiceCandidate(string OriginalPath, int Version, ImmutableArray<string> Providers); -} +using System.Collections.Immutable; +using System.Security.Cryptography; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; + +internal static class JavaClassPathBuilder +{ + private const string ClassFileSuffix = ".class"; + + public static JavaClassPathAnalysis Build(JavaWorkspace workspace, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(workspace); + + var segments = new List<JavaClassPathSegment>(); + var modules = new List<JavaModuleDescriptor>(); + var duplicateMap = new Dictionary<string, List<string>>(StringComparer.Ordinal); + var packageMap = new Dictionary<string, HashSet<string>>(StringComparer.Ordinal); + var order = 0; + + foreach (var archive in workspace.Archives) + { + cancellationToken.ThrowIfCancellationRequested(); + ProcessArchive(archive, segments, modules, duplicateMap, packageMap, ref order, cancellationToken); + } + + var duplicateClasses = duplicateMap + .Where(static pair => pair.Value.Count > 1) + .Select(pair => new JavaClassDuplicate( + pair.Key, + pair.Value + .Distinct(StringComparer.Ordinal) + .OrderBy(static identifier => identifier, StringComparer.Ordinal) + .ToImmutableArray())) + .ToImmutableArray(); + + var splitPackages = packageMap + .Where(static pair => pair.Value.Count > 1) + .Select(pair => new JavaSplitPackage( + pair.Key, + pair.Value + .OrderBy(static identifier => identifier, StringComparer.Ordinal) + .ToImmutableArray())) + .ToImmutableArray(); + + return new JavaClassPathAnalysis(segments, modules, duplicateClasses, splitPackages); + } + + private static void ProcessArchive( + JavaArchive archive, + List<JavaClassPathSegment> segments, + List<JavaModuleDescriptor> modules, + Dictionary<string, List<string>> duplicateMap, + Dictionary<string, HashSet<string>> packageMap, + ref int order, + CancellationToken cancellationToken) + { + var identifier = NormalizeArchivePath(archive.RelativePath); + var baseClasses = new List<JavaClassRecord>(); + var baseServices = new Dictionary<string, List<string>>(StringComparer.Ordinal); + var bootClasses = new List<JavaClassRecord>(); + var bootServices = new Dictionary<string, List<string>>(StringComparer.Ordinal); + var webClasses = new List<JavaClassRecord>(); + var webServices = new Dictionary<string, List<string>>(StringComparer.Ordinal); + var embeddedEntries = new List<JavaArchiveEntry>(); + + JavaModuleDescriptor? moduleDescriptor = ParseModuleDescriptor(archive, identifier, cancellationToken); + + foreach (var entry in archive.Entries) + { + cancellationToken.ThrowIfCancellationRequested(); + + var path = entry.EffectivePath; + if (path.Length == 0) + { + continue; + } + + if (string.Equals(path, "module-info.class", StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (path.StartsWith("BOOT-INF/classes/", StringComparison.OrdinalIgnoreCase)) + { + var relative = path["BOOT-INF/classes/".Length..]; + + if (TryHandleServiceDescriptor(archive, entry, relative, bootServices, cancellationToken)) + { + continue; + } + + if (relative.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) + { + AddClassRecord(bootClasses, archive, entry, relative); + } + + continue; + } + + if (path.StartsWith("WEB-INF/classes/", StringComparison.OrdinalIgnoreCase)) + { + var relative = path["WEB-INF/classes/".Length..]; + + if (TryHandleServiceDescriptor(archive, entry, relative, webServices, cancellationToken)) + { + continue; + } + + if (relative.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) + { + AddClassRecord(webClasses, archive, entry, relative); + } + + continue; + } + + if (TryHandleServiceDescriptor(archive, entry, path, baseServices, cancellationToken)) + { + continue; + } + + if (path.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) + { + if (archive.Packaging == JavaPackagingKind.JMod && path.StartsWith("classes/", StringComparison.OrdinalIgnoreCase)) + { + var relative = path["classes/".Length..]; + AddClassRecord(baseClasses, archive, entry, relative); + } + else + { + AddClassRecord(baseClasses, archive, entry, path); + } + } + else if (path.EndsWith(".jar", StringComparison.OrdinalIgnoreCase) && IsEmbeddedLibrary(path)) + { + embeddedEntries.Add(entry); + } + } + + // Base archive segment. + if (baseClasses.Count > 0 || moduleDescriptor is not null) + { + AddSegment( + segments, + modules, + duplicateMap, + packageMap, + ref order, + identifier, + identifier, + JavaClassPathSegmentKind.Archive, + archive.Packaging, + moduleDescriptor, + baseClasses, + ToImmutableServiceMap(baseServices)); + } + + if (bootClasses.Count > 0) + { + var bootIdentifier = string.Concat(identifier, "!BOOT-INF/classes/"); + AddSegment( + segments, + modules, + duplicateMap, + packageMap, + ref order, + bootIdentifier, + bootIdentifier, + JavaClassPathSegmentKind.Directory, + JavaPackagingKind.Unknown, + module: null, + bootClasses, + ToImmutableServiceMap(bootServices)); + } + + if (webClasses.Count > 0) + { + var webIdentifier = string.Concat(identifier, "!WEB-INF/classes/"); + AddSegment( + segments, + modules, + duplicateMap, + packageMap, + ref order, + webIdentifier, + webIdentifier, + JavaClassPathSegmentKind.Directory, + JavaPackagingKind.Unknown, + module: null, + webClasses, + ToImmutableServiceMap(webServices)); + } + + foreach (var embedded in embeddedEntries.OrderBy(static entry => entry.EffectivePath, StringComparer.Ordinal)) + { + cancellationToken.ThrowIfCancellationRequested(); + + var childIdentifier = string.Concat(identifier, "!", embedded.EffectivePath.Replace('\\', '/')); + var analysis = AnalyzeEmbeddedArchive(archive, embedded, childIdentifier, cancellationToken); + if (analysis is null) + { + continue; + } + + AddSegment( + segments, + modules, + duplicateMap, + packageMap, + ref order, + childIdentifier, + childIdentifier, + JavaClassPathSegmentKind.Archive, + JavaPackagingKind.Jar, + analysis.Module, + analysis.Classes, + analysis.Services); + } + } + + private static JavaModuleDescriptor? ParseModuleDescriptor(JavaArchive archive, string identifier, CancellationToken cancellationToken) + { + if (!archive.TryGetEntry("module-info.class", out var entry)) + { + return null; + } + + using var stream = archive.OpenEntry(entry); + return JavaModuleInfoParser.TryParse(stream, identifier, cancellationToken); + } + + private static void AddSegment( + List<JavaClassPathSegment> segments, + List<JavaModuleDescriptor> modules, + Dictionary<string, List<string>> duplicateMap, + Dictionary<string, HashSet<string>> packageMap, + ref int order, + string identifier, + string displayPath, + JavaClassPathSegmentKind kind, + JavaPackagingKind packaging, + JavaModuleDescriptor? module, + IReadOnlyCollection<JavaClassRecord> classes, + ImmutableDictionary<string, ImmutableArray<string>> serviceDefinitions) + { + if ((classes is null || classes.Count == 0) && module is null && (serviceDefinitions is null || serviceDefinitions.Count == 0)) + { + return; + } + + var normalizedClasses = classes ?? Array.Empty<JavaClassRecord>(); + + var classSet = normalizedClasses.Count == 0 + ? ImmutableSortedSet<string>.Empty + : ImmutableSortedSet.CreateRange(StringComparer.Ordinal, normalizedClasses.Select(static record => record.ClassName)); + + var packageFingerprints = BuildPackageFingerprints(normalizedClasses); + var classLocations = normalizedClasses.Count == 0 + ? ImmutableDictionary<string, JavaClassLocation>.Empty + : normalizedClasses.ToImmutableDictionary( + static record => record.ClassName, + static record => record.Location, + StringComparer.Ordinal); + + var segment = new JavaClassPathSegment( + identifier, + displayPath, + kind, + packaging, + order++, + module, + classSet, + packageFingerprints, + serviceDefinitions ?? ImmutableDictionary<string, ImmutableArray<string>>.Empty, + classLocations); + + segments.Add(segment); + + if (module is not null) + { + modules.Add(module); + } + + foreach (var className in classSet) + { + if (!duplicateMap.TryGetValue(className, out var locations)) + { + locations = new List<string>(); + duplicateMap[className] = locations; + } + + locations.Add(identifier); + } + + foreach (var fingerprint in packageFingerprints.Values) + { + if (!packageMap.TryGetValue(fingerprint.PackageName, out var segmentsSet)) + { + segmentsSet = new HashSet<string>(StringComparer.Ordinal); + packageMap[fingerprint.PackageName] = segmentsSet; + } + + segmentsSet.Add(identifier); + } + } + + private static bool TryHandleServiceDescriptor( + JavaArchive archive, + JavaArchiveEntry entry, + string relativePath, + Dictionary<string, List<string>> target, + CancellationToken cancellationToken) + { + if (!TryGetServiceId(relativePath, out var serviceId)) + { + return false; + } + + try + { + var providers = ReadServiceProviders(() => archive.OpenEntry(entry), cancellationToken); + if (providers.Count == 0) + { + return true; + } + + if (!target.TryGetValue(serviceId, out var list)) + { + list = new List<string>(); + target[serviceId] = list; + } + + list.AddRange(providers); + } + catch (IOException) + { + // Ignore malformed service descriptor. + } + catch (InvalidDataException) + { + // Ignore malformed service descriptor. + } + + return true; + } + + private static bool TryGetServiceId(string relativePath, out string serviceId) + { + const string Prefix = "META-INF/services/"; + if (!relativePath.StartsWith(Prefix, StringComparison.OrdinalIgnoreCase)) + { + serviceId = string.Empty; + return false; + } + + var candidate = relativePath[Prefix.Length..].Trim(); + if (candidate.Length == 0) + { + serviceId = string.Empty; + return false; + } + + serviceId = candidate; + return true; + } + + private static List<string> ReadServiceProviders(Func<Stream> streamFactory, CancellationToken cancellationToken) + { + using var stream = streamFactory(); + using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, leaveOpen: false); + + var providers = new List<string>(); + while (reader.ReadLine() is { } line) + { + cancellationToken.ThrowIfCancellationRequested(); + + var commentIndex = line.IndexOf('#'); + if (commentIndex >= 0) + { + line = line[..commentIndex]; + } + + line = line.Trim(); + if (line.Length == 0) + { + continue; + } + + providers.Add(line); + } + + return providers; + } + + private static ImmutableDictionary<string, ImmutableArray<string>> ToImmutableServiceMap(Dictionary<string, List<string>> source) + { + if (source.Count == 0) + { + return ImmutableDictionary<string, ImmutableArray<string>>.Empty; + } + + var builder = ImmutableDictionary.CreateBuilder<string, ImmutableArray<string>>(StringComparer.Ordinal); + + foreach (var pair in source.OrderBy(static entry => entry.Key, StringComparer.Ordinal)) + { + var cleaned = new List<string>(pair.Value.Count); + foreach (var value in pair.Value) + { + var trimmed = value?.Trim(); + if (string.IsNullOrEmpty(trimmed)) + { + continue; + } + + cleaned.Add(trimmed); + } + + if (cleaned.Count == 0) + { + continue; + } + + builder[pair.Key] = ImmutableArray.CreateRange(cleaned); + } + + return builder.ToImmutable(); + } + + private static ImmutableDictionary<string, JavaPackageFingerprint> BuildPackageFingerprints(IReadOnlyCollection<JavaClassRecord> classes) + { + if (classes is null || classes.Count == 0) + { + return ImmutableDictionary<string, JavaPackageFingerprint>.Empty; + } + + var packages = classes + .GroupBy(static record => record.PackageName, StringComparer.Ordinal) + .OrderBy(static group => group.Key, StringComparer.Ordinal); + + var builder = ImmutableDictionary.CreateBuilder<string, JavaPackageFingerprint>(StringComparer.Ordinal); + + foreach (var group in packages) + { + var simpleNames = group + .Select(static record => record.SimpleName) + .OrderBy(static name => name, StringComparer.Ordinal) + .ToArray(); + + var fingerprint = ComputeFingerprint(simpleNames); + builder[group.Key] = new JavaPackageFingerprint(group.Key, simpleNames.Length, fingerprint); + } + + return builder.ToImmutable(); + } + + private static string ComputeFingerprint(IEnumerable<string> values) + { + using var sha = SHA256.Create(); + var buffer = string.Join('\n', values); + var bytes = Encoding.UTF8.GetBytes(buffer); + return Convert.ToHexString(sha.ComputeHash(bytes)).ToLowerInvariant(); + } + + private static EmbeddedArchiveAnalysis? AnalyzeEmbeddedArchive(JavaArchive parentArchive, JavaArchiveEntry entry, string identifier, CancellationToken cancellationToken) + { + using var sourceStream = parentArchive.OpenEntry(entry); + using var buffer = new MemoryStream(); + sourceStream.CopyTo(buffer); + buffer.Position = 0; + + using var zip = new ZipArchive(buffer, ZipArchiveMode.Read, leaveOpen: true); + + var candidates = new Dictionary<string, EmbeddedClassCandidate>(StringComparer.Ordinal); + var services = new Dictionary<string, EmbeddedServiceCandidate>(StringComparer.Ordinal); + JavaModuleDescriptor? moduleDescriptor = null; + + foreach (var zipEntry in zip.Entries) + { + cancellationToken.ThrowIfCancellationRequested(); + + var normalized = JavaZipEntryUtilities.NormalizeEntryName(zipEntry.FullName); + if (normalized.Length == 0) + { + continue; + } + + if (string.Equals(normalized, "module-info.class", StringComparison.OrdinalIgnoreCase)) + { + using var moduleStream = zipEntry.Open(); + moduleDescriptor = JavaModuleInfoParser.TryParse(moduleStream, identifier, cancellationToken); + continue; + } + + var effectivePath = normalized; + var version = 0; + if (JavaZipEntryUtilities.TryParseMultiReleasePath(normalized, out var candidatePath, out var candidateVersion)) + { + effectivePath = candidatePath; + version = candidateVersion; + } + + if (string.Equals(effectivePath, "module-info.class", StringComparison.OrdinalIgnoreCase)) + { + using var moduleStream = zipEntry.Open(); + moduleDescriptor = JavaModuleInfoParser.TryParse(moduleStream, identifier, cancellationToken); + continue; + } + + if (TryGetServiceId(effectivePath, out var serviceId)) + { + try + { + var providers = ReadServiceProviders(() => zipEntry.Open(), cancellationToken); + if (providers.Count == 0) + { + continue; + } + + var providerArray = ImmutableArray.CreateRange(providers); + if (!services.TryGetValue(serviceId, out var existingService) + || version > existingService.Version + || (version == existingService.Version && string.CompareOrdinal(zipEntry.FullName, existingService.OriginalPath) < 0)) + { + services[serviceId] = new EmbeddedServiceCandidate(zipEntry.FullName, version, providerArray); + } + } + catch (IOException) + { + // Ignore malformed descriptor. + } + catch (InvalidDataException) + { + // Ignore malformed descriptor. + } + + continue; + } + + if (!effectivePath.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) + { + continue; + } + + if (!candidates.TryGetValue(effectivePath, out var existing) || version > existing.Version || (version == existing.Version && string.CompareOrdinal(zipEntry.FullName, existing.OriginalPath) < 0)) + { + candidates[effectivePath] = new EmbeddedClassCandidate(zipEntry.FullName, version); + } + } + + var classes = new List<JavaClassRecord>(candidates.Count); + foreach (var pair in candidates) + { + AddClassRecord(classes, parentArchive, entry, pair.Key, pair.Value.OriginalPath); + } + + var serviceMap = services.Count == 0 + ? ImmutableDictionary<string, ImmutableArray<string>>.Empty + : services + .OrderBy(static pair => pair.Key, StringComparer.Ordinal) + .ToImmutableDictionary( + static pair => pair.Key, + static pair => pair.Value.Providers, + StringComparer.Ordinal); + + if (classes.Count == 0 && moduleDescriptor is null && serviceMap.Count == 0) + { + return null; + } + + return new EmbeddedArchiveAnalysis(classes, moduleDescriptor, serviceMap); + } + + private static bool IsEmbeddedLibrary(string path) + { + if (path.StartsWith("BOOT-INF/lib/", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + if (path.StartsWith("WEB-INF/lib/", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + if (path.StartsWith("lib/", StringComparison.OrdinalIgnoreCase) || path.StartsWith("APP-INF/lib/", StringComparison.OrdinalIgnoreCase)) + { + return true; + } + + return false; + } + + private static void AddClassRecord( + ICollection<JavaClassRecord> target, + JavaArchive archive, + JavaArchiveEntry entry, + string path, + string? nestedClassPath = null) + { + if (string.IsNullOrEmpty(path) || !path.EndsWith(ClassFileSuffix, StringComparison.OrdinalIgnoreCase)) + { + return; + } + + var withoutExtension = path[..^ClassFileSuffix.Length]; + if (withoutExtension.Length == 0) + { + return; + } + + var className = withoutExtension.Replace('/', '.'); + var lastDot = className.LastIndexOf('.'); + string packageName; + string simpleName; + + if (lastDot >= 0) + { + packageName = className[..lastDot]; + simpleName = className[(lastDot + 1)..]; + } + else + { + packageName = string.Empty; + simpleName = className; + } + + var location = nestedClassPath is null + ? JavaClassLocation.ForArchive(archive, entry) + : JavaClassLocation.ForEmbedded(archive, entry, nestedClassPath); + + target.Add(new JavaClassRecord(className, packageName, simpleName, location)); + } + + private static string NormalizeArchivePath(string relativePath) + { + if (string.IsNullOrEmpty(relativePath)) + { + return "."; + } + + var normalized = relativePath.Replace('\\', '/'); + return normalized.Length == 0 ? "." : normalized; + } + + private readonly record struct JavaClassRecord( + string ClassName, + string PackageName, + string SimpleName, + JavaClassLocation Location); + + private sealed record EmbeddedArchiveAnalysis( + IReadOnlyCollection<JavaClassRecord> Classes, + JavaModuleDescriptor? Module, + ImmutableDictionary<string, ImmutableArray<string>> Services); + + private readonly record struct EmbeddedClassCandidate(string OriginalPath, int Version); + + private readonly record struct EmbeddedServiceCandidate(string OriginalPath, int Version, ImmutableArray<string> Providers); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleDescriptor.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleDescriptor.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleDescriptor.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleDescriptor.cs index b0464584..b28f09ba 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleDescriptor.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleDescriptor.cs @@ -1,22 +1,22 @@ -using System.Collections.Immutable; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; - -internal sealed record JavaModuleDescriptor( - string Name, - string? Version, - ushort Flags, - ImmutableArray<JavaModuleRequires> Requires, - ImmutableArray<JavaModuleExports> Exports, - ImmutableArray<JavaModuleOpens> Opens, - ImmutableArray<string> Uses, - ImmutableArray<JavaModuleProvides> Provides, - string Source); - -internal sealed record JavaModuleRequires(string Name, ushort Flags, string? Version); - -internal sealed record JavaModuleExports(string Package, ushort Flags, ImmutableArray<string> Targets); - -internal sealed record JavaModuleOpens(string Package, ushort Flags, ImmutableArray<string> Targets); - -internal sealed record JavaModuleProvides(string Service, ImmutableArray<string> Implementations); +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; + +internal sealed record JavaModuleDescriptor( + string Name, + string? Version, + ushort Flags, + ImmutableArray<JavaModuleRequires> Requires, + ImmutableArray<JavaModuleExports> Exports, + ImmutableArray<JavaModuleOpens> Opens, + ImmutableArray<string> Uses, + ImmutableArray<JavaModuleProvides> Provides, + string Source); + +internal sealed record JavaModuleRequires(string Name, ushort Flags, string? Version); + +internal sealed record JavaModuleExports(string Package, ushort Flags, ImmutableArray<string> Targets); + +internal sealed record JavaModuleOpens(string Package, ushort Flags, ImmutableArray<string> Targets); + +internal sealed record JavaModuleProvides(string Service, ImmutableArray<string> Implementations); diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleInfoParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleInfoParser.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleInfoParser.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleInfoParser.cs index ce6db666..b0dadd02 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleInfoParser.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ClassPath/JavaModuleInfoParser.cs @@ -1,367 +1,367 @@ -using System.Buffers.Binary; -using System.Collections.Immutable; -using System.Text; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; - -internal static class JavaModuleInfoParser -{ - public static JavaModuleDescriptor? TryParse(Stream stream, string sourceIdentifier, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(stream); - - using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true); - - if (!TryReadMagic(reader, out var magic) || magic != 0xCAFEBABE) - { - return null; - } - - // Skip minor/major version. - _ = ReadUInt16(reader); - _ = ReadUInt16(reader); - - var constantPool = ReadConstantPool(reader); - cancellationToken.ThrowIfCancellationRequested(); - - // access_flags, this_class, super_class - _ = ReadUInt16(reader); - _ = ReadUInt16(reader); - _ = ReadUInt16(reader); - - // interfaces - var interfacesCount = ReadUInt16(reader); - SkipBytes(reader, interfacesCount * 2); - - // fields - var fieldsCount = ReadUInt16(reader); - SkipMembers(reader, fieldsCount); - - // methods - var methodsCount = ReadUInt16(reader); - SkipMembers(reader, methodsCount); - - var attributesCount = ReadUInt16(reader); - JavaModuleDescriptor? descriptor = null; - - for (var i = 0; i < attributesCount; i++) - { - cancellationToken.ThrowIfCancellationRequested(); - - var nameIndex = ReadUInt16(reader); - var length = ReadUInt32(reader); - var attributeName = GetUtf8(constantPool, nameIndex); - - if (string.Equals(attributeName, "Module", StringComparison.Ordinal)) - { - descriptor = ParseModuleAttribute(reader, constantPool, sourceIdentifier); - } - else - { - SkipBytes(reader, (int)length); - } - } - - return descriptor; - } - - private static JavaModuleDescriptor ParseModuleAttribute(BinaryReader reader, ConstantPoolEntry[] constantPool, string sourceIdentifier) - { - var moduleNameIndex = ReadUInt16(reader); - var moduleFlags = ReadUInt16(reader); - var moduleVersionIndex = ReadUInt16(reader); - - var moduleName = GetModuleName(constantPool, moduleNameIndex); - var moduleVersion = moduleVersionIndex != 0 ? GetUtf8(constantPool, moduleVersionIndex) : null; - - var requiresCount = ReadUInt16(reader); - var requiresBuilder = ImmutableArray.CreateBuilder<JavaModuleRequires>(requiresCount); - for (var i = 0; i < requiresCount; i++) - { - var requiresIndex = ReadUInt16(reader); - var requiresFlags = ReadUInt16(reader); - var requiresVersionIndex = ReadUInt16(reader); - var requiresName = GetModuleName(constantPool, requiresIndex); - var requiresVersion = requiresVersionIndex != 0 ? GetUtf8(constantPool, requiresVersionIndex) : null; - requiresBuilder.Add(new JavaModuleRequires(requiresName, requiresFlags, requiresVersion)); - } - - var exportsCount = ReadUInt16(reader); - var exportsBuilder = ImmutableArray.CreateBuilder<JavaModuleExports>(exportsCount); - for (var i = 0; i < exportsCount; i++) - { - var exportsIndex = ReadUInt16(reader); - var exportsFlags = ReadUInt16(reader); - var exportsToCount = ReadUInt16(reader); - - var targetsBuilder = ImmutableArray.CreateBuilder<string>(exportsToCount); - for (var j = 0; j < exportsToCount; j++) - { - var targetIndex = ReadUInt16(reader); - targetsBuilder.Add(GetModuleName(constantPool, targetIndex)); - } - - var packageName = GetPackageName(constantPool, exportsIndex); - exportsBuilder.Add(new JavaModuleExports(packageName, exportsFlags, targetsBuilder.ToImmutable())); - } - - var opensCount = ReadUInt16(reader); - var opensBuilder = ImmutableArray.CreateBuilder<JavaModuleOpens>(opensCount); - for (var i = 0; i < opensCount; i++) - { - var opensIndex = ReadUInt16(reader); - var opensFlags = ReadUInt16(reader); - var opensToCount = ReadUInt16(reader); - - var targetsBuilder = ImmutableArray.CreateBuilder<string>(opensToCount); - for (var j = 0; j < opensToCount; j++) - { - var targetIndex = ReadUInt16(reader); - targetsBuilder.Add(GetModuleName(constantPool, targetIndex)); - } - - var packageName = GetPackageName(constantPool, opensIndex); - opensBuilder.Add(new JavaModuleOpens(packageName, opensFlags, targetsBuilder.ToImmutable())); - } - - var usesCount = ReadUInt16(reader); - var usesBuilder = ImmutableArray.CreateBuilder<string>(usesCount); - for (var i = 0; i < usesCount; i++) - { - var classIndex = ReadUInt16(reader); - usesBuilder.Add(GetClassName(constantPool, classIndex)); - } - - var providesCount = ReadUInt16(reader); - var providesBuilder = ImmutableArray.CreateBuilder<JavaModuleProvides>(providesCount); - for (var i = 0; i < providesCount; i++) - { - var serviceIndex = ReadUInt16(reader); - var providesWithCount = ReadUInt16(reader); - - var implementationsBuilder = ImmutableArray.CreateBuilder<string>(providesWithCount); - for (var j = 0; j < providesWithCount; j++) - { - var implIndex = ReadUInt16(reader); - implementationsBuilder.Add(GetClassName(constantPool, implIndex)); - } - - var serviceName = GetClassName(constantPool, serviceIndex); - providesBuilder.Add(new JavaModuleProvides(serviceName, implementationsBuilder.ToImmutable())); - } - - return new JavaModuleDescriptor( - moduleName, - moduleVersion, - moduleFlags, - requiresBuilder.ToImmutable(), - exportsBuilder.ToImmutable(), - opensBuilder.ToImmutable(), - usesBuilder.ToImmutable(), - providesBuilder.ToImmutable(), - sourceIdentifier); - } - - private static ConstantPoolEntry[] ReadConstantPool(BinaryReader reader) - { - var count = ReadUInt16(reader); - var pool = new ConstantPoolEntry[count]; - - var index = 1; - while (index < count) - { - var tag = reader.ReadByte(); - switch (tag) - { - case 1: // Utf8 - { - var length = ReadUInt16(reader); - var bytes = reader.ReadBytes(length); - var value = Encoding.UTF8.GetString(bytes); - pool[index] = new Utf8Entry(value); - break; - } - case 7: // Class - { - var nameIndex = ReadUInt16(reader); - pool[index] = new ClassEntry(nameIndex); - break; - } - case 8: // String - pool[index] = new SimpleEntry(tag); - reader.BaseStream.Seek(2, SeekOrigin.Current); - break; - case 3: // Integer - case 4: // Float - case 9: // Fieldref - case 10: // Methodref - case 11: // InterfaceMethodref - case 12: // NameAndType - case 17: // Dynamic - case 18: // InvokeDynamic - pool[index] = new SimpleEntry(tag); - reader.BaseStream.Seek(4, SeekOrigin.Current); - break; - case 5: // Long - case 6: // Double - pool[index] = new SimpleEntry(tag); - reader.BaseStream.Seek(8, SeekOrigin.Current); - index++; - break; - case 15: // MethodHandle - pool[index] = new SimpleEntry(tag); - reader.BaseStream.Seek(3, SeekOrigin.Current); - break; - case 16: // MethodType - pool[index] = new SimpleEntry(tag); - reader.BaseStream.Seek(2, SeekOrigin.Current); - break; - case 19: // Module - { - var nameIndex = ReadUInt16(reader); - pool[index] = new ModuleEntry(nameIndex); - break; - } - case 20: // Package - { - var nameIndex = ReadUInt16(reader); - pool[index] = new PackageEntry(nameIndex); - break; - } - default: - throw new InvalidDataException($"Unsupported constant pool tag {tag}."); - } - - index++; - } - - return pool; - } - - private static string GetUtf8(ConstantPoolEntry[] pool, int index) - { - if (index <= 0 || index >= pool.Length) - { - return string.Empty; - } - - if (pool[index] is Utf8Entry utf8) - { - return utf8.Value; - } - - return string.Empty; - } - - private static string GetModuleName(ConstantPoolEntry[] pool, int index) - { - if (index <= 0 || index >= pool.Length) - { - return string.Empty; - } - - if (pool[index] is ModuleEntry module) - { - var utf8 = GetUtf8(pool, module.NameIndex); - return NormalizeBinaryName(utf8); - } - - return string.Empty; - } - - private static string GetPackageName(ConstantPoolEntry[] pool, int index) - { - if (index <= 0 || index >= pool.Length) - { - return string.Empty; - } - - if (pool[index] is PackageEntry package) - { - var utf8 = GetUtf8(pool, package.NameIndex); - return NormalizeBinaryName(utf8); - } - - return string.Empty; - } - - private static string GetClassName(ConstantPoolEntry[] pool, int index) - { - if (index <= 0 || index >= pool.Length) - { - return string.Empty; - } - - if (pool[index] is ClassEntry classEntry) - { - var utf8 = GetUtf8(pool, classEntry.NameIndex); - return NormalizeBinaryName(utf8); - } - - return string.Empty; - } - - private static string NormalizeBinaryName(string value) - => string.IsNullOrEmpty(value) ? string.Empty : value.Replace('/', '.'); - - private static bool TryReadMagic(BinaryReader reader, out uint magic) - { - if (reader.BaseStream.Length - reader.BaseStream.Position < 4) - { - magic = 0; - return false; - } - - magic = ReadUInt32(reader); - return true; - } - - private static void SkipMembers(BinaryReader reader, int count) - { - for (var i = 0; i < count; i++) - { - // access_flags, name_index, descriptor_index - reader.BaseStream.Seek(6, SeekOrigin.Current); - var attributesCount = ReadUInt16(reader); - SkipAttributes(reader, attributesCount); - } - } - - private static void SkipAttributes(BinaryReader reader, int count) - { - for (var i = 0; i < count; i++) - { - reader.BaseStream.Seek(2, SeekOrigin.Current); // name_index - var length = ReadUInt32(reader); - SkipBytes(reader, (int)length); - } - } - - private static void SkipBytes(BinaryReader reader, int count) - { - if (count <= 0) - { - return; - } - - reader.BaseStream.Seek(count, SeekOrigin.Current); - } - - private static ushort ReadUInt16(BinaryReader reader) - => BinaryPrimitives.ReadUInt16BigEndian(reader.ReadBytes(sizeof(ushort))); - - private static uint ReadUInt32(BinaryReader reader) - => BinaryPrimitives.ReadUInt32BigEndian(reader.ReadBytes(sizeof(uint))); - - private abstract record ConstantPoolEntry(byte Tag); - - private sealed record Utf8Entry(string Value) : ConstantPoolEntry(1); - - private sealed record ClassEntry(ushort NameIndex) : ConstantPoolEntry(7); - - private sealed record ModuleEntry(ushort NameIndex) : ConstantPoolEntry(19); - - private sealed record PackageEntry(ushort NameIndex) : ConstantPoolEntry(20); - - private sealed record SimpleEntry(byte Tag) : ConstantPoolEntry(Tag); -} +using System.Buffers.Binary; +using System.Collections.Immutable; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; + +internal static class JavaModuleInfoParser +{ + public static JavaModuleDescriptor? TryParse(Stream stream, string sourceIdentifier, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(stream); + + using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true); + + if (!TryReadMagic(reader, out var magic) || magic != 0xCAFEBABE) + { + return null; + } + + // Skip minor/major version. + _ = ReadUInt16(reader); + _ = ReadUInt16(reader); + + var constantPool = ReadConstantPool(reader); + cancellationToken.ThrowIfCancellationRequested(); + + // access_flags, this_class, super_class + _ = ReadUInt16(reader); + _ = ReadUInt16(reader); + _ = ReadUInt16(reader); + + // interfaces + var interfacesCount = ReadUInt16(reader); + SkipBytes(reader, interfacesCount * 2); + + // fields + var fieldsCount = ReadUInt16(reader); + SkipMembers(reader, fieldsCount); + + // methods + var methodsCount = ReadUInt16(reader); + SkipMembers(reader, methodsCount); + + var attributesCount = ReadUInt16(reader); + JavaModuleDescriptor? descriptor = null; + + for (var i = 0; i < attributesCount; i++) + { + cancellationToken.ThrowIfCancellationRequested(); + + var nameIndex = ReadUInt16(reader); + var length = ReadUInt32(reader); + var attributeName = GetUtf8(constantPool, nameIndex); + + if (string.Equals(attributeName, "Module", StringComparison.Ordinal)) + { + descriptor = ParseModuleAttribute(reader, constantPool, sourceIdentifier); + } + else + { + SkipBytes(reader, (int)length); + } + } + + return descriptor; + } + + private static JavaModuleDescriptor ParseModuleAttribute(BinaryReader reader, ConstantPoolEntry[] constantPool, string sourceIdentifier) + { + var moduleNameIndex = ReadUInt16(reader); + var moduleFlags = ReadUInt16(reader); + var moduleVersionIndex = ReadUInt16(reader); + + var moduleName = GetModuleName(constantPool, moduleNameIndex); + var moduleVersion = moduleVersionIndex != 0 ? GetUtf8(constantPool, moduleVersionIndex) : null; + + var requiresCount = ReadUInt16(reader); + var requiresBuilder = ImmutableArray.CreateBuilder<JavaModuleRequires>(requiresCount); + for (var i = 0; i < requiresCount; i++) + { + var requiresIndex = ReadUInt16(reader); + var requiresFlags = ReadUInt16(reader); + var requiresVersionIndex = ReadUInt16(reader); + var requiresName = GetModuleName(constantPool, requiresIndex); + var requiresVersion = requiresVersionIndex != 0 ? GetUtf8(constantPool, requiresVersionIndex) : null; + requiresBuilder.Add(new JavaModuleRequires(requiresName, requiresFlags, requiresVersion)); + } + + var exportsCount = ReadUInt16(reader); + var exportsBuilder = ImmutableArray.CreateBuilder<JavaModuleExports>(exportsCount); + for (var i = 0; i < exportsCount; i++) + { + var exportsIndex = ReadUInt16(reader); + var exportsFlags = ReadUInt16(reader); + var exportsToCount = ReadUInt16(reader); + + var targetsBuilder = ImmutableArray.CreateBuilder<string>(exportsToCount); + for (var j = 0; j < exportsToCount; j++) + { + var targetIndex = ReadUInt16(reader); + targetsBuilder.Add(GetModuleName(constantPool, targetIndex)); + } + + var packageName = GetPackageName(constantPool, exportsIndex); + exportsBuilder.Add(new JavaModuleExports(packageName, exportsFlags, targetsBuilder.ToImmutable())); + } + + var opensCount = ReadUInt16(reader); + var opensBuilder = ImmutableArray.CreateBuilder<JavaModuleOpens>(opensCount); + for (var i = 0; i < opensCount; i++) + { + var opensIndex = ReadUInt16(reader); + var opensFlags = ReadUInt16(reader); + var opensToCount = ReadUInt16(reader); + + var targetsBuilder = ImmutableArray.CreateBuilder<string>(opensToCount); + for (var j = 0; j < opensToCount; j++) + { + var targetIndex = ReadUInt16(reader); + targetsBuilder.Add(GetModuleName(constantPool, targetIndex)); + } + + var packageName = GetPackageName(constantPool, opensIndex); + opensBuilder.Add(new JavaModuleOpens(packageName, opensFlags, targetsBuilder.ToImmutable())); + } + + var usesCount = ReadUInt16(reader); + var usesBuilder = ImmutableArray.CreateBuilder<string>(usesCount); + for (var i = 0; i < usesCount; i++) + { + var classIndex = ReadUInt16(reader); + usesBuilder.Add(GetClassName(constantPool, classIndex)); + } + + var providesCount = ReadUInt16(reader); + var providesBuilder = ImmutableArray.CreateBuilder<JavaModuleProvides>(providesCount); + for (var i = 0; i < providesCount; i++) + { + var serviceIndex = ReadUInt16(reader); + var providesWithCount = ReadUInt16(reader); + + var implementationsBuilder = ImmutableArray.CreateBuilder<string>(providesWithCount); + for (var j = 0; j < providesWithCount; j++) + { + var implIndex = ReadUInt16(reader); + implementationsBuilder.Add(GetClassName(constantPool, implIndex)); + } + + var serviceName = GetClassName(constantPool, serviceIndex); + providesBuilder.Add(new JavaModuleProvides(serviceName, implementationsBuilder.ToImmutable())); + } + + return new JavaModuleDescriptor( + moduleName, + moduleVersion, + moduleFlags, + requiresBuilder.ToImmutable(), + exportsBuilder.ToImmutable(), + opensBuilder.ToImmutable(), + usesBuilder.ToImmutable(), + providesBuilder.ToImmutable(), + sourceIdentifier); + } + + private static ConstantPoolEntry[] ReadConstantPool(BinaryReader reader) + { + var count = ReadUInt16(reader); + var pool = new ConstantPoolEntry[count]; + + var index = 1; + while (index < count) + { + var tag = reader.ReadByte(); + switch (tag) + { + case 1: // Utf8 + { + var length = ReadUInt16(reader); + var bytes = reader.ReadBytes(length); + var value = Encoding.UTF8.GetString(bytes); + pool[index] = new Utf8Entry(value); + break; + } + case 7: // Class + { + var nameIndex = ReadUInt16(reader); + pool[index] = new ClassEntry(nameIndex); + break; + } + case 8: // String + pool[index] = new SimpleEntry(tag); + reader.BaseStream.Seek(2, SeekOrigin.Current); + break; + case 3: // Integer + case 4: // Float + case 9: // Fieldref + case 10: // Methodref + case 11: // InterfaceMethodref + case 12: // NameAndType + case 17: // Dynamic + case 18: // InvokeDynamic + pool[index] = new SimpleEntry(tag); + reader.BaseStream.Seek(4, SeekOrigin.Current); + break; + case 5: // Long + case 6: // Double + pool[index] = new SimpleEntry(tag); + reader.BaseStream.Seek(8, SeekOrigin.Current); + index++; + break; + case 15: // MethodHandle + pool[index] = new SimpleEntry(tag); + reader.BaseStream.Seek(3, SeekOrigin.Current); + break; + case 16: // MethodType + pool[index] = new SimpleEntry(tag); + reader.BaseStream.Seek(2, SeekOrigin.Current); + break; + case 19: // Module + { + var nameIndex = ReadUInt16(reader); + pool[index] = new ModuleEntry(nameIndex); + break; + } + case 20: // Package + { + var nameIndex = ReadUInt16(reader); + pool[index] = new PackageEntry(nameIndex); + break; + } + default: + throw new InvalidDataException($"Unsupported constant pool tag {tag}."); + } + + index++; + } + + return pool; + } + + private static string GetUtf8(ConstantPoolEntry[] pool, int index) + { + if (index <= 0 || index >= pool.Length) + { + return string.Empty; + } + + if (pool[index] is Utf8Entry utf8) + { + return utf8.Value; + } + + return string.Empty; + } + + private static string GetModuleName(ConstantPoolEntry[] pool, int index) + { + if (index <= 0 || index >= pool.Length) + { + return string.Empty; + } + + if (pool[index] is ModuleEntry module) + { + var utf8 = GetUtf8(pool, module.NameIndex); + return NormalizeBinaryName(utf8); + } + + return string.Empty; + } + + private static string GetPackageName(ConstantPoolEntry[] pool, int index) + { + if (index <= 0 || index >= pool.Length) + { + return string.Empty; + } + + if (pool[index] is PackageEntry package) + { + var utf8 = GetUtf8(pool, package.NameIndex); + return NormalizeBinaryName(utf8); + } + + return string.Empty; + } + + private static string GetClassName(ConstantPoolEntry[] pool, int index) + { + if (index <= 0 || index >= pool.Length) + { + return string.Empty; + } + + if (pool[index] is ClassEntry classEntry) + { + var utf8 = GetUtf8(pool, classEntry.NameIndex); + return NormalizeBinaryName(utf8); + } + + return string.Empty; + } + + private static string NormalizeBinaryName(string value) + => string.IsNullOrEmpty(value) ? string.Empty : value.Replace('/', '.'); + + private static bool TryReadMagic(BinaryReader reader, out uint magic) + { + if (reader.BaseStream.Length - reader.BaseStream.Position < 4) + { + magic = 0; + return false; + } + + magic = ReadUInt32(reader); + return true; + } + + private static void SkipMembers(BinaryReader reader, int count) + { + for (var i = 0; i < count; i++) + { + // access_flags, name_index, descriptor_index + reader.BaseStream.Seek(6, SeekOrigin.Current); + var attributesCount = ReadUInt16(reader); + SkipAttributes(reader, attributesCount); + } + } + + private static void SkipAttributes(BinaryReader reader, int count) + { + for (var i = 0; i < count; i++) + { + reader.BaseStream.Seek(2, SeekOrigin.Current); // name_index + var length = ReadUInt32(reader); + SkipBytes(reader, (int)length); + } + } + + private static void SkipBytes(BinaryReader reader, int count) + { + if (count <= 0) + { + return; + } + + reader.BaseStream.Seek(count, SeekOrigin.Current); + } + + private static ushort ReadUInt16(BinaryReader reader) + => BinaryPrimitives.ReadUInt16BigEndian(reader.ReadBytes(sizeof(ushort))); + + private static uint ReadUInt32(BinaryReader reader) + => BinaryPrimitives.ReadUInt32BigEndian(reader.ReadBytes(sizeof(uint))); + + private abstract record ConstantPoolEntry(byte Tag); + + private sealed record Utf8Entry(string Value) : ConstantPoolEntry(1); + + private sealed record ClassEntry(ushort NameIndex) : ConstantPoolEntry(7); + + private sealed record ModuleEntry(ushort NameIndex) : ConstantPoolEntry(19); + + private sealed record PackageEntry(ushort NameIndex) : ConstantPoolEntry(20); + + private sealed record SimpleEntry(byte Tag) : ConstantPoolEntry(Tag); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchive.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchive.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchive.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchive.cs index 9910e63c..f757040e 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchive.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchive.cs @@ -1,264 +1,264 @@ -using System.Collections.Immutable; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; - -internal sealed class JavaArchive -{ - private readonly ImmutableDictionary<string, JavaArchiveEntry> _entryMap; - - private JavaArchive( - string absolutePath, - string relativePath, - JavaPackagingKind packaging, - ImmutableArray<string> layeredDirectories, - bool isMultiRelease, - bool hasModuleInfo, - ImmutableArray<JavaArchiveEntry> entries) - { - AbsolutePath = absolutePath ?? throw new ArgumentNullException(nameof(absolutePath)); - RelativePath = relativePath ?? throw new ArgumentNullException(nameof(relativePath)); - Packaging = packaging; - LayeredDirectories = layeredDirectories; - IsMultiRelease = isMultiRelease; - HasModuleInfo = hasModuleInfo; - Entries = entries; - _entryMap = entries.ToImmutableDictionary(static entry => entry.EffectivePath, static entry => entry, StringComparer.Ordinal); - } - - public string AbsolutePath { get; } - - public string RelativePath { get; } - - public JavaPackagingKind Packaging { get; } - - public ImmutableArray<string> LayeredDirectories { get; } - - public bool IsMultiRelease { get; } - - public bool HasModuleInfo { get; } - - public ImmutableArray<JavaArchiveEntry> Entries { get; } - - public static JavaArchive Load(string absolutePath, string relativePath) - { - ArgumentException.ThrowIfNullOrEmpty(absolutePath); - ArgumentException.ThrowIfNullOrEmpty(relativePath); - - using var fileStream = new FileStream(absolutePath, FileMode.Open, FileAccess.Read, FileShare.Read); - using var zip = new ZipArchive(fileStream, ZipArchiveMode.Read, leaveOpen: false); - - var layeredDirectories = new HashSet<string>(StringComparer.Ordinal); - var candidates = new Dictionary<string, List<EntryCandidate>>(StringComparer.Ordinal); - var isMultiRelease = false; - var hasModuleInfo = false; - var hasBootInf = false; - var hasWebInf = false; - - foreach (var entry in zip.Entries) - { - var normalized = JavaZipEntryUtilities.NormalizeEntryName(entry.FullName); - if (string.IsNullOrEmpty(normalized) || normalized.EndsWith('/')) - { - continue; - } - - if (normalized.StartsWith("BOOT-INF/", StringComparison.OrdinalIgnoreCase)) - { - hasBootInf = true; - layeredDirectories.Add("BOOT-INF"); - } - - if (normalized.StartsWith("WEB-INF/", StringComparison.OrdinalIgnoreCase)) - { - hasWebInf = true; - layeredDirectories.Add("WEB-INF"); - } - - var version = 0; - var effectivePath = normalized; - if (JavaZipEntryUtilities.TryParseMultiReleasePath(normalized, out var candidatePath, out var candidateVersion)) - { - effectivePath = candidatePath; - version = candidateVersion; - isMultiRelease = true; - } - - if (string.IsNullOrEmpty(effectivePath)) - { - continue; - } - - if (string.Equals(effectivePath, "module-info.class", StringComparison.Ordinal)) - { - hasModuleInfo = true; - } - - var candidate = new EntryCandidate( - effectivePath, - entry.FullName, - version, - entry.Length, - entry.LastWriteTime.ToUniversalTime()); - - if (!candidates.TryGetValue(effectivePath, out var bucket)) - { - bucket = new List<EntryCandidate>(); - candidates[effectivePath] = bucket; - } - - bucket.Add(candidate); - } - - var entries = new List<JavaArchiveEntry>(candidates.Count); - foreach (var pair in candidates) - { - var selected = pair.Value - .OrderByDescending(static candidate => candidate.Version) - .ThenBy(static candidate => candidate.OriginalPath, StringComparer.Ordinal) - .First(); - - entries.Add(new JavaArchiveEntry( - pair.Key, - selected.OriginalPath, - selected.Version, - selected.Length, - selected.LastWriteTime)); - } - - entries.Sort(static (left, right) => StringComparer.Ordinal.Compare(left.EffectivePath, right.EffectivePath)); - - var packaging = DeterminePackaging(absolutePath, hasBootInf, hasWebInf); - - return new JavaArchive( - absolutePath, - relativePath, - packaging, - layeredDirectories - .OrderBy(static directory => directory, StringComparer.Ordinal) - .ToImmutableArray(), - isMultiRelease, - hasModuleInfo, - entries.ToImmutableArray()); - } - - public bool TryGetEntry(string effectivePath, out JavaArchiveEntry entry) - { - ArgumentNullException.ThrowIfNull(effectivePath); - return _entryMap.TryGetValue(effectivePath, out entry!); - } - - public Stream OpenEntry(JavaArchiveEntry entry) - { - ArgumentNullException.ThrowIfNull(entry); - - var fileStream = new FileStream(AbsolutePath, FileMode.Open, FileAccess.Read, FileShare.Read); - ZipArchive? archive = null; - Stream? entryStream = null; - - try - { - archive = new ZipArchive(fileStream, ZipArchiveMode.Read, leaveOpen: false); - var zipEntry = archive.GetEntry(entry.OriginalPath); - if (zipEntry is null) - { - throw new FileNotFoundException($"Entry '{entry.OriginalPath}' not found in archive '{AbsolutePath}'."); - } - - entryStream = zipEntry.Open(); - return new ZipEntryStream(fileStream, archive, entryStream); - } - catch - { - entryStream?.Dispose(); - archive?.Dispose(); - fileStream.Dispose(); - throw; - } - } - - private static JavaPackagingKind DeterminePackaging(string absolutePath, bool hasBootInf, bool hasWebInf) - { - var extension = Path.GetExtension(absolutePath); - return extension switch - { - ".war" => JavaPackagingKind.War, - ".ear" => JavaPackagingKind.Ear, - ".jmod" => JavaPackagingKind.JMod, - ".jimage" => JavaPackagingKind.JImage, - ".jar" => hasBootInf ? JavaPackagingKind.SpringBootFatJar : JavaPackagingKind.Jar, - _ => JavaPackagingKind.Unknown, - }; - } - - private sealed record EntryCandidate( - string EffectivePath, - string OriginalPath, - int Version, - long Length, - DateTimeOffset LastWriteTime); - - private sealed class ZipEntryStream : Stream - { - private readonly Stream _fileStream; - private readonly ZipArchive _archive; - private readonly Stream _entryStream; - - public ZipEntryStream(Stream fileStream, ZipArchive archive, Stream entryStream) - { - _fileStream = fileStream; - _archive = archive; - _entryStream = entryStream; - } - - public override bool CanRead => _entryStream.CanRead; - - public override bool CanSeek => _entryStream.CanSeek; - - public override bool CanWrite => _entryStream.CanWrite; - - public override long Length => _entryStream.Length; - - public override long Position - { - get => _entryStream.Position; - set => _entryStream.Position = value; - } - - public override void Flush() => _entryStream.Flush(); - - public override int Read(byte[] buffer, int offset, int count) - => _entryStream.Read(buffer, offset, count); - - public override ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default) - => _entryStream.ReadAsync(buffer, cancellationToken); - - public override long Seek(long offset, SeekOrigin origin) - => _entryStream.Seek(offset, origin); - - public override void SetLength(long value) - => _entryStream.SetLength(value); - - public override void Write(byte[] buffer, int offset, int count) - => _entryStream.Write(buffer, offset, count); - - public override ValueTask DisposeAsync() - { - _entryStream.Dispose(); - _archive.Dispose(); - _fileStream.Dispose(); - return ValueTask.CompletedTask; - } - - protected override void Dispose(bool disposing) - { - if (disposing) - { - _entryStream.Dispose(); - _archive.Dispose(); - _fileStream.Dispose(); - } - - base.Dispose(disposing); - } - } -} +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; + +internal sealed class JavaArchive +{ + private readonly ImmutableDictionary<string, JavaArchiveEntry> _entryMap; + + private JavaArchive( + string absolutePath, + string relativePath, + JavaPackagingKind packaging, + ImmutableArray<string> layeredDirectories, + bool isMultiRelease, + bool hasModuleInfo, + ImmutableArray<JavaArchiveEntry> entries) + { + AbsolutePath = absolutePath ?? throw new ArgumentNullException(nameof(absolutePath)); + RelativePath = relativePath ?? throw new ArgumentNullException(nameof(relativePath)); + Packaging = packaging; + LayeredDirectories = layeredDirectories; + IsMultiRelease = isMultiRelease; + HasModuleInfo = hasModuleInfo; + Entries = entries; + _entryMap = entries.ToImmutableDictionary(static entry => entry.EffectivePath, static entry => entry, StringComparer.Ordinal); + } + + public string AbsolutePath { get; } + + public string RelativePath { get; } + + public JavaPackagingKind Packaging { get; } + + public ImmutableArray<string> LayeredDirectories { get; } + + public bool IsMultiRelease { get; } + + public bool HasModuleInfo { get; } + + public ImmutableArray<JavaArchiveEntry> Entries { get; } + + public static JavaArchive Load(string absolutePath, string relativePath) + { + ArgumentException.ThrowIfNullOrEmpty(absolutePath); + ArgumentException.ThrowIfNullOrEmpty(relativePath); + + using var fileStream = new FileStream(absolutePath, FileMode.Open, FileAccess.Read, FileShare.Read); + using var zip = new ZipArchive(fileStream, ZipArchiveMode.Read, leaveOpen: false); + + var layeredDirectories = new HashSet<string>(StringComparer.Ordinal); + var candidates = new Dictionary<string, List<EntryCandidate>>(StringComparer.Ordinal); + var isMultiRelease = false; + var hasModuleInfo = false; + var hasBootInf = false; + var hasWebInf = false; + + foreach (var entry in zip.Entries) + { + var normalized = JavaZipEntryUtilities.NormalizeEntryName(entry.FullName); + if (string.IsNullOrEmpty(normalized) || normalized.EndsWith('/')) + { + continue; + } + + if (normalized.StartsWith("BOOT-INF/", StringComparison.OrdinalIgnoreCase)) + { + hasBootInf = true; + layeredDirectories.Add("BOOT-INF"); + } + + if (normalized.StartsWith("WEB-INF/", StringComparison.OrdinalIgnoreCase)) + { + hasWebInf = true; + layeredDirectories.Add("WEB-INF"); + } + + var version = 0; + var effectivePath = normalized; + if (JavaZipEntryUtilities.TryParseMultiReleasePath(normalized, out var candidatePath, out var candidateVersion)) + { + effectivePath = candidatePath; + version = candidateVersion; + isMultiRelease = true; + } + + if (string.IsNullOrEmpty(effectivePath)) + { + continue; + } + + if (string.Equals(effectivePath, "module-info.class", StringComparison.Ordinal)) + { + hasModuleInfo = true; + } + + var candidate = new EntryCandidate( + effectivePath, + entry.FullName, + version, + entry.Length, + entry.LastWriteTime.ToUniversalTime()); + + if (!candidates.TryGetValue(effectivePath, out var bucket)) + { + bucket = new List<EntryCandidate>(); + candidates[effectivePath] = bucket; + } + + bucket.Add(candidate); + } + + var entries = new List<JavaArchiveEntry>(candidates.Count); + foreach (var pair in candidates) + { + var selected = pair.Value + .OrderByDescending(static candidate => candidate.Version) + .ThenBy(static candidate => candidate.OriginalPath, StringComparer.Ordinal) + .First(); + + entries.Add(new JavaArchiveEntry( + pair.Key, + selected.OriginalPath, + selected.Version, + selected.Length, + selected.LastWriteTime)); + } + + entries.Sort(static (left, right) => StringComparer.Ordinal.Compare(left.EffectivePath, right.EffectivePath)); + + var packaging = DeterminePackaging(absolutePath, hasBootInf, hasWebInf); + + return new JavaArchive( + absolutePath, + relativePath, + packaging, + layeredDirectories + .OrderBy(static directory => directory, StringComparer.Ordinal) + .ToImmutableArray(), + isMultiRelease, + hasModuleInfo, + entries.ToImmutableArray()); + } + + public bool TryGetEntry(string effectivePath, out JavaArchiveEntry entry) + { + ArgumentNullException.ThrowIfNull(effectivePath); + return _entryMap.TryGetValue(effectivePath, out entry!); + } + + public Stream OpenEntry(JavaArchiveEntry entry) + { + ArgumentNullException.ThrowIfNull(entry); + + var fileStream = new FileStream(AbsolutePath, FileMode.Open, FileAccess.Read, FileShare.Read); + ZipArchive? archive = null; + Stream? entryStream = null; + + try + { + archive = new ZipArchive(fileStream, ZipArchiveMode.Read, leaveOpen: false); + var zipEntry = archive.GetEntry(entry.OriginalPath); + if (zipEntry is null) + { + throw new FileNotFoundException($"Entry '{entry.OriginalPath}' not found in archive '{AbsolutePath}'."); + } + + entryStream = zipEntry.Open(); + return new ZipEntryStream(fileStream, archive, entryStream); + } + catch + { + entryStream?.Dispose(); + archive?.Dispose(); + fileStream.Dispose(); + throw; + } + } + + private static JavaPackagingKind DeterminePackaging(string absolutePath, bool hasBootInf, bool hasWebInf) + { + var extension = Path.GetExtension(absolutePath); + return extension switch + { + ".war" => JavaPackagingKind.War, + ".ear" => JavaPackagingKind.Ear, + ".jmod" => JavaPackagingKind.JMod, + ".jimage" => JavaPackagingKind.JImage, + ".jar" => hasBootInf ? JavaPackagingKind.SpringBootFatJar : JavaPackagingKind.Jar, + _ => JavaPackagingKind.Unknown, + }; + } + + private sealed record EntryCandidate( + string EffectivePath, + string OriginalPath, + int Version, + long Length, + DateTimeOffset LastWriteTime); + + private sealed class ZipEntryStream : Stream + { + private readonly Stream _fileStream; + private readonly ZipArchive _archive; + private readonly Stream _entryStream; + + public ZipEntryStream(Stream fileStream, ZipArchive archive, Stream entryStream) + { + _fileStream = fileStream; + _archive = archive; + _entryStream = entryStream; + } + + public override bool CanRead => _entryStream.CanRead; + + public override bool CanSeek => _entryStream.CanSeek; + + public override bool CanWrite => _entryStream.CanWrite; + + public override long Length => _entryStream.Length; + + public override long Position + { + get => _entryStream.Position; + set => _entryStream.Position = value; + } + + public override void Flush() => _entryStream.Flush(); + + public override int Read(byte[] buffer, int offset, int count) + => _entryStream.Read(buffer, offset, count); + + public override ValueTask<int> ReadAsync(Memory<byte> buffer, CancellationToken cancellationToken = default) + => _entryStream.ReadAsync(buffer, cancellationToken); + + public override long Seek(long offset, SeekOrigin origin) + => _entryStream.Seek(offset, origin); + + public override void SetLength(long value) + => _entryStream.SetLength(value); + + public override void Write(byte[] buffer, int offset, int count) + => _entryStream.Write(buffer, offset, count); + + public override ValueTask DisposeAsync() + { + _entryStream.Dispose(); + _archive.Dispose(); + _fileStream.Dispose(); + return ValueTask.CompletedTask; + } + + protected override void Dispose(bool disposing) + { + if (disposing) + { + _entryStream.Dispose(); + _archive.Dispose(); + _fileStream.Dispose(); + } + + base.Dispose(disposing); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchiveEntry.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchiveEntry.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchiveEntry.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchiveEntry.cs index 91226ac7..9df3c2fe 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchiveEntry.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaArchiveEntry.cs @@ -1,8 +1,8 @@ -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; - -internal sealed record JavaArchiveEntry( - string EffectivePath, - string OriginalPath, - int Version, - long Length, - DateTimeOffset LastWriteTime); +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; + +internal sealed record JavaArchiveEntry( + string EffectivePath, + string OriginalPath, + int Version, + long Length, + DateTimeOffset LastWriteTime); diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaPackagingKind.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaPackagingKind.cs similarity index 93% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaPackagingKind.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaPackagingKind.cs index f6decbfc..affd5430 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaPackagingKind.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaPackagingKind.cs @@ -1,12 +1,12 @@ -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; - -internal enum JavaPackagingKind -{ - Jar, - SpringBootFatJar, - War, - Ear, - JMod, - JImage, - Unknown, -} +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; + +internal enum JavaPackagingKind +{ + Jar, + SpringBootFatJar, + War, + Ear, + JMod, + JImage, + Unknown, +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaReleaseFileParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaReleaseFileParser.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaReleaseFileParser.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaReleaseFileParser.cs index 9a77e7ec..c981d457 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaReleaseFileParser.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaReleaseFileParser.cs @@ -1,68 +1,68 @@ -using System.Text; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; - -internal static class JavaReleaseFileParser -{ - public static JavaReleaseMetadata Parse(string filePath) - { - ArgumentException.ThrowIfNullOrEmpty(filePath); - - using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true); - - var map = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); - string? line; - while ((line = reader.ReadLine()) is not null) - { - line = line.Trim(); - if (line.Length == 0 || line.StartsWith('#')) - { - continue; - } - - var separatorIndex = line.IndexOf('='); - if (separatorIndex <= 0) - { - continue; - } - - var key = line[..separatorIndex].Trim(); - if (key.Length == 0) - { - continue; - } - - var value = line[(separatorIndex + 1)..].Trim(); - map[key] = TrimQuotes(value); - } - - map.TryGetValue("JAVA_VERSION", out var version); - if (string.IsNullOrWhiteSpace(version) && map.TryGetValue("JAVA_RUNTIME_VERSION", out var runtimeVersion)) - { - version = runtimeVersion; - } - - map.TryGetValue("IMPLEMENTOR", out var vendor); - if (string.IsNullOrWhiteSpace(vendor) && map.TryGetValue("IMPLEMENTOR_VERSION", out var implementorVersion)) - { - vendor = implementorVersion; - } - - return new JavaReleaseMetadata( - version?.Trim() ?? string.Empty, - vendor?.Trim() ?? string.Empty); - } - - private static string TrimQuotes(string value) - { - if (value.Length >= 2 && value[0] == '"' && value[^1] == '"') - { - return value[1..^1]; - } - - return value; - } - - public sealed record JavaReleaseMetadata(string Version, string Vendor); -} +using System.Text; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; + +internal static class JavaReleaseFileParser +{ + public static JavaReleaseMetadata Parse(string filePath) + { + ArgumentException.ThrowIfNullOrEmpty(filePath); + + using var stream = new FileStream(filePath, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true); + + var map = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); + string? line; + while ((line = reader.ReadLine()) is not null) + { + line = line.Trim(); + if (line.Length == 0 || line.StartsWith('#')) + { + continue; + } + + var separatorIndex = line.IndexOf('='); + if (separatorIndex <= 0) + { + continue; + } + + var key = line[..separatorIndex].Trim(); + if (key.Length == 0) + { + continue; + } + + var value = line[(separatorIndex + 1)..].Trim(); + map[key] = TrimQuotes(value); + } + + map.TryGetValue("JAVA_VERSION", out var version); + if (string.IsNullOrWhiteSpace(version) && map.TryGetValue("JAVA_RUNTIME_VERSION", out var runtimeVersion)) + { + version = runtimeVersion; + } + + map.TryGetValue("IMPLEMENTOR", out var vendor); + if (string.IsNullOrWhiteSpace(vendor) && map.TryGetValue("IMPLEMENTOR_VERSION", out var implementorVersion)) + { + vendor = implementorVersion; + } + + return new JavaReleaseMetadata( + version?.Trim() ?? string.Empty, + vendor?.Trim() ?? string.Empty); + } + + private static string TrimQuotes(string value) + { + if (value.Length >= 2 && value[0] == '"' && value[^1] == '"') + { + return value[1..^1]; + } + + return value; + } + + public sealed record JavaReleaseMetadata(string Version, string Vendor); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaRuntimeImage.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaRuntimeImage.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaRuntimeImage.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaRuntimeImage.cs index 5e7980f9..f15deb1e 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaRuntimeImage.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaRuntimeImage.cs @@ -1,7 +1,7 @@ -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; - -internal sealed record JavaRuntimeImage( - string AbsolutePath, - string RelativePath, - string JavaVersion, - string Vendor); +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; + +internal sealed record JavaRuntimeImage( + string AbsolutePath, + string RelativePath, + string JavaVersion, + string Vendor); diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspace.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspace.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspace.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspace.cs index 3162e344..157207de 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspace.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspace.cs @@ -1,28 +1,28 @@ -using System.Collections.Immutable; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; - -internal sealed class JavaWorkspace -{ - public JavaWorkspace( - IEnumerable<JavaArchive> archives, - IEnumerable<JavaRuntimeImage> runtimeImages) - { - ArgumentNullException.ThrowIfNull(archives); - ArgumentNullException.ThrowIfNull(runtimeImages); - - Archives = archives - .Where(static archive => archive is not null) - .OrderBy(static archive => archive.RelativePath, StringComparer.Ordinal) - .ToImmutableArray(); - - RuntimeImages = runtimeImages - .Where(static image => image is not null) - .OrderBy(static image => image.RelativePath, StringComparer.Ordinal) - .ToImmutableArray(); - } - - public ImmutableArray<JavaArchive> Archives { get; } - - public ImmutableArray<JavaRuntimeImage> RuntimeImages { get; } -} +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; + +internal sealed class JavaWorkspace +{ + public JavaWorkspace( + IEnumerable<JavaArchive> archives, + IEnumerable<JavaRuntimeImage> runtimeImages) + { + ArgumentNullException.ThrowIfNull(archives); + ArgumentNullException.ThrowIfNull(runtimeImages); + + Archives = archives + .Where(static archive => archive is not null) + .OrderBy(static archive => archive.RelativePath, StringComparer.Ordinal) + .ToImmutableArray(); + + RuntimeImages = runtimeImages + .Where(static image => image is not null) + .OrderBy(static image => image.RelativePath, StringComparer.Ordinal) + .ToImmutableArray(); + } + + public ImmutableArray<JavaArchive> Archives { get; } + + public ImmutableArray<JavaRuntimeImage> RuntimeImages { get; } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspaceNormalizer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspaceNormalizer.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspaceNormalizer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspaceNormalizer.cs index ec431431..f442491f 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspaceNormalizer.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaWorkspaceNormalizer.cs @@ -1,101 +1,101 @@ -using System.Collections.Immutable; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; - -internal static class JavaWorkspaceNormalizer -{ - private static readonly HashSet<string> SupportedExtensions = new(StringComparer.OrdinalIgnoreCase) - { - ".jar", - ".war", - ".ear", - ".jmod", - ".jimage", - }; - - private static readonly EnumerationOptions EnumerationOptions = new() - { - RecurseSubdirectories = true, - IgnoreInaccessible = true, - AttributesToSkip = FileAttributes.Device | FileAttributes.ReparsePoint, - ReturnSpecialDirectories = false, - }; - - public static JavaWorkspace Normalize(LanguageAnalyzerContext context, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - - var archives = new List<JavaArchive>(); - var runtimeImages = new List<JavaRuntimeImage>(); - - foreach (var filePath in Directory.EnumerateFiles(context.RootPath, "*", EnumerationOptions)) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (!SupportedExtensions.Contains(Path.GetExtension(filePath))) - { - continue; - } - - try - { - var relative = context.GetRelativePath(filePath); - var archive = JavaArchive.Load(filePath, relative); - archives.Add(archive); - } - catch (IOException) - { - // Corrupt archives should not abort the scan. - } - catch (InvalidDataException) - { - // Skip non-zip payloads despite the extension. - } - } - - foreach (var directory in Directory.EnumerateDirectories(context.RootPath, "*", EnumerationOptions)) - { - cancellationToken.ThrowIfCancellationRequested(); - - try - { - if (!LooksLikeRuntimeImage(directory)) - { - continue; - } - - var releasePath = Path.Combine(directory, "release"); - if (!File.Exists(releasePath)) - { - continue; - } - - var metadata = JavaReleaseFileParser.Parse(releasePath); - runtimeImages.Add(new JavaRuntimeImage( - AbsolutePath: directory, - RelativePath: context.GetRelativePath(directory), - JavaVersion: metadata.Version, - Vendor: metadata.Vendor)); - } - catch (IOException) - { - // Skip directories we cannot access. - } - } - - return new JavaWorkspace(archives, runtimeImages); - } - - private static bool LooksLikeRuntimeImage(string directory) - { - if (!Directory.Exists(directory)) - { - return false; - } - - var libModules = Path.Combine(directory, "lib", "modules"); - var binJava = Path.Combine(directory, "bin", OperatingSystem.IsWindows() ? "java.exe" : "java"); - - return File.Exists(libModules) || File.Exists(binJava); - } -} +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; + +internal static class JavaWorkspaceNormalizer +{ + private static readonly HashSet<string> SupportedExtensions = new(StringComparer.OrdinalIgnoreCase) + { + ".jar", + ".war", + ".ear", + ".jmod", + ".jimage", + }; + + private static readonly EnumerationOptions EnumerationOptions = new() + { + RecurseSubdirectories = true, + IgnoreInaccessible = true, + AttributesToSkip = FileAttributes.Device | FileAttributes.ReparsePoint, + ReturnSpecialDirectories = false, + }; + + public static JavaWorkspace Normalize(LanguageAnalyzerContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var archives = new List<JavaArchive>(); + var runtimeImages = new List<JavaRuntimeImage>(); + + foreach (var filePath in Directory.EnumerateFiles(context.RootPath, "*", EnumerationOptions)) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (!SupportedExtensions.Contains(Path.GetExtension(filePath))) + { + continue; + } + + try + { + var relative = context.GetRelativePath(filePath); + var archive = JavaArchive.Load(filePath, relative); + archives.Add(archive); + } + catch (IOException) + { + // Corrupt archives should not abort the scan. + } + catch (InvalidDataException) + { + // Skip non-zip payloads despite the extension. + } + } + + foreach (var directory in Directory.EnumerateDirectories(context.RootPath, "*", EnumerationOptions)) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + if (!LooksLikeRuntimeImage(directory)) + { + continue; + } + + var releasePath = Path.Combine(directory, "release"); + if (!File.Exists(releasePath)) + { + continue; + } + + var metadata = JavaReleaseFileParser.Parse(releasePath); + runtimeImages.Add(new JavaRuntimeImage( + AbsolutePath: directory, + RelativePath: context.GetRelativePath(directory), + JavaVersion: metadata.Version, + Vendor: metadata.Vendor)); + } + catch (IOException) + { + // Skip directories we cannot access. + } + } + + return new JavaWorkspace(archives, runtimeImages); + } + + private static bool LooksLikeRuntimeImage(string directory) + { + if (!Directory.Exists(directory)) + { + return false; + } + + var libModules = Path.Combine(directory, "lib", "modules"); + var binJava = Path.Combine(directory, "bin", OperatingSystem.IsWindows() ? "java.exe" : "java"); + + return File.Exists(libModules) || File.Exists(binJava); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaZipEntryUtilities.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaZipEntryUtilities.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaZipEntryUtilities.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaZipEntryUtilities.cs index 68ccbf39..56e84cee 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaZipEntryUtilities.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/JavaZipEntryUtilities.cs @@ -1,52 +1,52 @@ -using System.Globalization; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; - -internal static class JavaZipEntryUtilities -{ - public static string NormalizeEntryName(string entryName) - { - var normalized = entryName.Replace('\\', '/'); - return normalized.TrimStart('/'); - } - - public static bool TryParseMultiReleasePath(string normalizedPath, out string effectivePath, out int version) - { - const string Prefix = "META-INF/versions/"; - if (!normalizedPath.StartsWith(Prefix, StringComparison.OrdinalIgnoreCase)) - { - effectivePath = normalizedPath; - version = 0; - return false; - } - - var remainder = normalizedPath.AsSpan(Prefix.Length); - var separatorIndex = remainder.IndexOf('/'); - if (separatorIndex <= 0) - { - effectivePath = normalizedPath; - version = 0; - return false; - } - - var versionSpan = remainder[..separatorIndex]; - if (!int.TryParse(versionSpan, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedVersion)) - { - effectivePath = normalizedPath; - version = 0; - return false; - } - - var relativeSpan = remainder[(separatorIndex + 1)..]; - if (relativeSpan.IsEmpty) - { - effectivePath = normalizedPath; - version = 0; - return false; - } - - effectivePath = relativeSpan.ToString(); - version = parsedVersion; - return true; - } -} +using System.Globalization; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal; + +internal static class JavaZipEntryUtilities +{ + public static string NormalizeEntryName(string entryName) + { + var normalized = entryName.Replace('\\', '/'); + return normalized.TrimStart('/'); + } + + public static bool TryParseMultiReleasePath(string normalizedPath, out string effectivePath, out int version) + { + const string Prefix = "META-INF/versions/"; + if (!normalizedPath.StartsWith(Prefix, StringComparison.OrdinalIgnoreCase)) + { + effectivePath = normalizedPath; + version = 0; + return false; + } + + var remainder = normalizedPath.AsSpan(Prefix.Length); + var separatorIndex = remainder.IndexOf('/'); + if (separatorIndex <= 0) + { + effectivePath = normalizedPath; + version = 0; + return false; + } + + var versionSpan = remainder[..separatorIndex]; + if (!int.TryParse(versionSpan, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedVersion)) + { + effectivePath = normalizedPath; + version = 0; + return false; + } + + var relativeSpan = remainder[(separatorIndex + 1)..]; + if (relativeSpan.IsEmpty) + { + effectivePath = normalizedPath; + version = 0; + return false; + } + + effectivePath = relativeSpan.ToString(); + version = parsedVersion; + return true; + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalysis.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalysis.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalysis.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalysis.cs index e68fc4a6..62594cc5 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalysis.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalysis.cs @@ -1,44 +1,44 @@ -using System.Collections.Immutable; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.Reflection; - -internal sealed record JavaReflectionAnalysis( - ImmutableArray<JavaReflectionEdge> Edges, - ImmutableArray<JavaReflectionWarning> Warnings) -{ - public static readonly JavaReflectionAnalysis Empty = new(ImmutableArray<JavaReflectionEdge>.Empty, ImmutableArray<JavaReflectionWarning>.Empty); -} - -internal sealed record JavaReflectionEdge( - string SourceClass, - string SegmentIdentifier, - string? TargetType, - JavaReflectionReason Reason, - JavaReflectionConfidence Confidence, - string MethodName, - string MethodDescriptor, - int InstructionOffset, - string? Details); - -internal sealed record JavaReflectionWarning( - string SourceClass, - string SegmentIdentifier, - string WarningCode, - string Message, - string MethodName, - string MethodDescriptor); - -internal enum JavaReflectionReason -{ - ClassForName, - ClassLoaderLoadClass, - ServiceLoaderLoad, - ResourceLookup, -} - -internal enum JavaReflectionConfidence -{ - Low = 1, - Medium = 2, - High = 3, -} +using System.Collections.Immutable; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.Reflection; + +internal sealed record JavaReflectionAnalysis( + ImmutableArray<JavaReflectionEdge> Edges, + ImmutableArray<JavaReflectionWarning> Warnings) +{ + public static readonly JavaReflectionAnalysis Empty = new(ImmutableArray<JavaReflectionEdge>.Empty, ImmutableArray<JavaReflectionWarning>.Empty); +} + +internal sealed record JavaReflectionEdge( + string SourceClass, + string SegmentIdentifier, + string? TargetType, + JavaReflectionReason Reason, + JavaReflectionConfidence Confidence, + string MethodName, + string MethodDescriptor, + int InstructionOffset, + string? Details); + +internal sealed record JavaReflectionWarning( + string SourceClass, + string SegmentIdentifier, + string WarningCode, + string Message, + string MethodName, + string MethodDescriptor); + +internal enum JavaReflectionReason +{ + ClassForName, + ClassLoaderLoadClass, + ServiceLoaderLoad, + ResourceLookup, +} + +internal enum JavaReflectionConfidence +{ + Low = 1, + Medium = 2, + High = 3, +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalyzer.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalyzer.cs index efa3e699..a478ae1d 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalyzer.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/Reflection/JavaReflectionAnalyzer.cs @@ -1,716 +1,716 @@ -using System.Buffers.Binary; -using System.Collections.Immutable; -using System.Text; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.Reflection; - -internal static class JavaReflectionAnalyzer -{ - public static JavaReflectionAnalysis Analyze(JavaClassPathAnalysis classPath, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(classPath); - - if (classPath.Segments.IsDefaultOrEmpty) - { - return JavaReflectionAnalysis.Empty; - } - - var edges = new List<JavaReflectionEdge>(); - var warnings = new List<JavaReflectionWarning>(); - - foreach (var segment in classPath.Segments) - { - cancellationToken.ThrowIfCancellationRequested(); - - foreach (var kvp in segment.ClassLocations) - { - var className = kvp.Key; - var location = kvp.Value; - - using var stream = location.OpenClassStream(cancellationToken); - var classFile = JavaClassFile.Parse(stream, cancellationToken); - - foreach (var method in classFile.Methods) - { - cancellationToken.ThrowIfCancellationRequested(); - AnalyzeMethod(classFile, method, segment.Identifier, className, edges, warnings); - } - } - } - - if (edges.Count == 0 && warnings.Count == 0) - { - return JavaReflectionAnalysis.Empty; - } - - return new JavaReflectionAnalysis( - edges.ToImmutableArray(), - warnings.ToImmutableArray()); - } - - private static void AnalyzeMethod( - JavaClassFile classFile, - JavaMethod method, - string segmentIdentifier, - string className, - List<JavaReflectionEdge> edges, - List<JavaReflectionWarning> warnings) - { - var pool = classFile.ConstantPool; - - string? pendingStringLiteral = null; - string? pendingClassLiteral = null; - var sawCurrentThread = false; - var emittedTcclWarning = false; - - var code = method.Code; - var offset = 0; - var length = code.Length; - - while (offset < length) - { - var instructionOffset = offset; - var opcode = code[offset++]; - - switch (opcode) - { - case 0x12: // LDC - { - var index = code[offset++]; - HandleLdc(index, pool, ref pendingStringLiteral, ref pendingClassLiteral); - break; - } - case 0x13: // LDC_W - case 0x14: // LDC2_W - { - var index = (code[offset++] << 8) | code[offset++]; - HandleLdc(index, pool, ref pendingStringLiteral, ref pendingClassLiteral); - break; - } - case 0xB8: // invokestatic - case 0xB6: // invokevirtual - case 0xB7: // invokespecial - case 0xB9: // invokeinterface - { - var methodIndex = (code[offset++] << 8) | code[offset++]; - if (opcode == 0xB9) - { - offset += 2; // count and zero - } - - HandleInvocation( - pool, - method, - segmentIdentifier, - className, - instructionOffset, - methodIndex, - opcode, - ref pendingStringLiteral, - ref pendingClassLiteral, - ref sawCurrentThread, - ref emittedTcclWarning, - edges, - warnings); - - pendingStringLiteral = null; - pendingClassLiteral = null; - break; - } - default: - { - if (IsStoreInstruction(opcode)) - { - pendingStringLiteral = null; - pendingClassLiteral = null; - - if (IsStoreWithExplicitIndex(opcode)) - { - offset++; - } - } - else if (IsLoadInstructionWithIndex(opcode)) - { - offset++; - } - else if (IsStackMutation(opcode)) - { - pendingStringLiteral = null; - pendingClassLiteral = null; - } - - break; - } - } - } - - // When the method calls Thread.currentThread without accessing the context loader, we do not emit warnings. - } - - private static void HandleLdc( - int constantIndex, - JavaConstantPool pool, - ref string? pendingString, - ref string? pendingClassLiteral) - { - var constantKind = pool.GetConstantKind(constantIndex); - switch (constantKind) - { - case JavaConstantKind.String: - pendingString = pool.GetString(constantIndex); - pendingClassLiteral = null; - break; - case JavaConstantKind.Class: - pendingClassLiteral = pool.GetClassName(constantIndex); - pendingString = null; - break; - default: - pendingString = null; - pendingClassLiteral = null; - break; - } - } - - private static void HandleInvocation( - JavaConstantPool pool, - JavaMethod method, - string segmentIdentifier, - string className, - int instructionOffset, - int methodIndex, - byte opcode, - ref string? pendingString, - ref string? pendingClassLiteral, - ref bool sawCurrentThread, - ref bool emittedTcclWarning, - List<JavaReflectionEdge> edges, - List<JavaReflectionWarning> warnings) - { - var methodRef = pool.GetMethodReference(methodIndex); - - var owner = methodRef.OwnerInternalName; - var name = methodRef.Name; - var descriptor = methodRef.Descriptor; - - var normalizedOwner = owner ?? string.Empty; - var normalizedSource = NormalizeClassName(className) ?? className ?? string.Empty; - - if (normalizedOwner == "java/lang/Class" && name == "forName") - { - var target = NormalizeClassName(pendingString); - var confidence = pendingString is null ? JavaReflectionConfidence.Low : JavaReflectionConfidence.High; - edges.Add(new JavaReflectionEdge( - normalizedSource, - segmentIdentifier, - target, - JavaReflectionReason.ClassForName, - confidence, - method.Name, - method.Descriptor, - instructionOffset, - null)); - } - else if (normalizedOwner == "java/lang/ClassLoader" && name == "loadClass") - { - var target = NormalizeClassName(pendingString); - var confidence = pendingString is null ? JavaReflectionConfidence.Low : JavaReflectionConfidence.High; - edges.Add(new JavaReflectionEdge( - normalizedSource, - segmentIdentifier, - target, - JavaReflectionReason.ClassLoaderLoadClass, - confidence, - method.Name, - method.Descriptor, - instructionOffset, - null)); - } - else if (normalizedOwner == "java/util/ServiceLoader" && name.StartsWith("load", StringComparison.Ordinal)) - { - var target = NormalizeClassName(pendingClassLiteral); - var confidence = pendingClassLiteral is null ? JavaReflectionConfidence.Low : JavaReflectionConfidence.High; - edges.Add(new JavaReflectionEdge( - normalizedSource, - segmentIdentifier, - target, - JavaReflectionReason.ServiceLoaderLoad, - confidence, - method.Name, - method.Descriptor, - instructionOffset, - null)); - } - else if (normalizedOwner == "java/lang/ClassLoader" && (name == "getResource" || name == "getResourceAsStream" || name == "getResources")) - { - var target = pendingString; - var confidence = pendingString is null ? JavaReflectionConfidence.Low : JavaReflectionConfidence.High; - edges.Add(new JavaReflectionEdge( - normalizedSource, - segmentIdentifier, - target, - JavaReflectionReason.ResourceLookup, - confidence, - method.Name, - method.Descriptor, - instructionOffset, - null)); - } - else if (normalizedOwner == "java/lang/Thread" && name == "currentThread") - { - sawCurrentThread = true; - } - else if (normalizedOwner == "java/lang/Thread" && name == "getContextClassLoader") - { - if (sawCurrentThread && !emittedTcclWarning) - { - warnings.Add(new JavaReflectionWarning( - normalizedSource, - segmentIdentifier, - "tccl", - "Thread context class loader access detected.", - method.Name, - method.Descriptor)); - emittedTcclWarning = true; - } - } - - pendingString = null; - pendingClassLiteral = null; - } - - private static string? NormalizeClassName(string? internalName) - { - if (string.IsNullOrWhiteSpace(internalName)) - { - return null; - } - - return internalName.Replace('/', '.'); - } - - private static bool IsStoreInstruction(byte opcode) - => (opcode >= 0x3B && opcode <= 0x4E) || (opcode >= 0x4F && opcode <= 0x56) || (opcode >= 0x36 && opcode <= 0x3A); - - private static bool IsStoreWithExplicitIndex(byte opcode) - => opcode >= 0x36 && opcode <= 0x3A; - - private static bool IsLoadInstructionWithIndex(byte opcode) - => opcode >= 0x15 && opcode <= 0x19; - - private static bool IsStackMutation(byte opcode) - => opcode is 0x57 or 0x58 or 0x59 or 0x5A or 0x5B or 0x5C or 0x5D or 0x5E or 0x5F; - - private sealed class JavaClassFile - { - public JavaClassFile(string thisClassName, JavaConstantPool constantPool, ImmutableArray<JavaMethod> methods) - { - ThisClassName = thisClassName; - ConstantPool = constantPool; - Methods = methods; - } - - public string ThisClassName { get; } - - public JavaConstantPool ConstantPool { get; } - - public ImmutableArray<JavaMethod> Methods { get; } - - public static JavaClassFile Parse(Stream stream, CancellationToken cancellationToken) - { - var reader = new BigEndianReader(stream, leaveOpen: true); - if (reader.ReadUInt32() != 0xCAFEBABE) - { - throw new InvalidDataException("Invalid Java class file magic header."); - } - - _ = reader.ReadUInt16(); // minor - _ = reader.ReadUInt16(); // major - - var constantPoolCount = reader.ReadUInt16(); - var pool = new JavaConstantPool(constantPoolCount); - - var index = 1; - while (index < constantPoolCount) - { - cancellationToken.ThrowIfCancellationRequested(); - var tag = reader.ReadByte(); - switch ((JavaConstantTag)tag) - { - case JavaConstantTag.Utf8: - { - pool.Set(index, JavaConstantPoolEntry.Utf8(reader.ReadUtf8())); - index++; - break; - } - case JavaConstantTag.Integer: - reader.Skip(4); - pool.Set(index, JavaConstantPoolEntry.Other(tag)); - index++; - break; - case JavaConstantTag.Float: - reader.Skip(4); - pool.Set(index, JavaConstantPoolEntry.Other(tag)); - index++; - break; - case JavaConstantTag.Long: - case JavaConstantTag.Double: - reader.Skip(8); - pool.Set(index, JavaConstantPoolEntry.Other(tag)); - index += 2; - break; - case JavaConstantTag.Class: - case JavaConstantTag.String: - case JavaConstantTag.MethodType: - pool.Set(index, JavaConstantPoolEntry.Indexed(tag, reader.ReadUInt16())); - index++; - break; - case JavaConstantTag.Fieldref: - case JavaConstantTag.Methodref: - case JavaConstantTag.InterfaceMethodref: - case JavaConstantTag.NameAndType: - case JavaConstantTag.InvokeDynamic: - pool.Set(index, JavaConstantPoolEntry.IndexedPair(tag, reader.ReadUInt16(), reader.ReadUInt16())); - index++; - break; - case JavaConstantTag.MethodHandle: - reader.Skip(1); // reference kind - pool.Set(index, JavaConstantPoolEntry.Indexed(tag, reader.ReadUInt16())); - index++; - break; - default: - throw new InvalidDataException($"Unsupported constant pool tag {tag}."); - } - } - - var accessFlags = reader.ReadUInt16(); - var thisClassIndex = reader.ReadUInt16(); - _ = reader.ReadUInt16(); // super - - var interfacesCount = reader.ReadUInt16(); - reader.Skip(interfacesCount * 2); - - var fieldsCount = reader.ReadUInt16(); - for (var i = 0; i < fieldsCount; i++) - { - SkipMember(reader); - } - - var methodsCount = reader.ReadUInt16(); - var methods = ImmutableArray.CreateBuilder<JavaMethod>(methodsCount); - for (var i = 0; i < methodsCount; i++) - { - cancellationToken.ThrowIfCancellationRequested(); - _ = reader.ReadUInt16(); // method access flags - var nameIndex = reader.ReadUInt16(); - var descriptorIndex = reader.ReadUInt16(); - var attributesCount = reader.ReadUInt16(); - - byte[]? code = null; - - for (var attr = 0; attr < attributesCount; attr++) - { - var attributeNameIndex = reader.ReadUInt16(); - var attributeLength = reader.ReadUInt32(); - var attributeName = pool.GetUtf8(attributeNameIndex) ?? string.Empty; - - if (attributeName == "Code") - { - var maxStack = reader.ReadUInt16(); - var maxLocals = reader.ReadUInt16(); - var codeLength = reader.ReadUInt32(); - code = reader.ReadBytes((int)codeLength); - var exceptionTableLength = reader.ReadUInt16(); - reader.Skip(exceptionTableLength * 8); - var codeAttributeCount = reader.ReadUInt16(); - for (var c = 0; c < codeAttributeCount; c++) - { - reader.Skip(2); // name index - var len = reader.ReadUInt32(); - reader.Skip((int)len); - } - } - else - { - reader.Skip((int)attributeLength); - } - } - - if (code is not null) - { - var name = pool.GetUtf8(nameIndex) ?? string.Empty; - var descriptor = pool.GetUtf8(descriptorIndex) ?? string.Empty; - methods.Add(new JavaMethod(name, descriptor, code)); - } - } - - var classAttributesCount = reader.ReadUInt16(); - for (var a = 0; a < classAttributesCount; a++) - { - reader.Skip(2); - var len = reader.ReadUInt32(); - reader.Skip((int)len); - } - - var thisClassName = pool.GetClassName(thisClassIndex) ?? string.Empty; - return new JavaClassFile(thisClassName, pool, methods.ToImmutable()); - } - - private static void SkipMember(BigEndianReader reader) - { - reader.Skip(6); // access, name, descriptor - var attributeCount = reader.ReadUInt16(); - for (var i = 0; i < attributeCount; i++) - { - reader.Skip(2); - var len = reader.ReadUInt32(); - reader.Skip((int)len); - } - } - } - - private sealed class JavaMethod - { - public JavaMethod(string name, string descriptor, byte[] code) - { - Name = name; - Descriptor = descriptor; - Code = code; - } - - public string Name { get; } - - public string Descriptor { get; } - - public byte[] Code { get; } - } - - private sealed class JavaConstantPool - { - private readonly JavaConstantPoolEntry?[] _entries; - - public JavaConstantPool(int count) - { - _entries = new JavaConstantPoolEntry?[count]; - } - - public void Set(int index, JavaConstantPoolEntry entry) - { - _entries[index] = entry; - } - - public JavaConstantKind GetConstantKind(int index) - { - var entry = _entries[index]; - return entry?.Kind ?? JavaConstantKind.Other; - } - - public string? GetUtf8(int index) - { - if (index <= 0 || index >= _entries.Length) - { - return null; - } - - return _entries[index] is JavaConstantPoolEntry.Utf8Entry utf8 ? utf8.Value : null; - } - - public string? GetString(int index) - { - if (_entries[index] is JavaConstantPoolEntry.IndexedEntry { Kind: JavaConstantKind.String, Index: var utf8Index }) - { - return GetUtf8(utf8Index); - } - - return null; - } - - public string? GetClassName(int index) - { - if (_entries[index] is JavaConstantPoolEntry.IndexedEntry { Kind: JavaConstantKind.Class, Index: var nameIndex }) - { - return GetUtf8(nameIndex); - } - - return null; - } - - public JavaMethodReference GetMethodReference(int index) - { - if (_entries[index] is not JavaConstantPoolEntry.IndexedPairEntry pair || pair.Kind is not (JavaConstantKind.Methodref or JavaConstantKind.InterfaceMethodref)) - { - throw new InvalidDataException($"Constant pool entry {index} is not a method reference."); - } - - var owner = GetClassName(pair.FirstIndex) ?? string.Empty; - var nameAndType = _entries[pair.SecondIndex] as JavaConstantPoolEntry.IndexedPairEntry; - if (nameAndType is null || nameAndType.Kind != JavaConstantKind.NameAndType) - { - throw new InvalidDataException("Invalid NameAndType entry for method reference."); - } - - var name = GetUtf8(nameAndType.FirstIndex) ?? string.Empty; - var descriptor = GetUtf8(nameAndType.SecondIndex) ?? string.Empty; - return new JavaMethodReference(owner, name, descriptor); - } - } - - private readonly record struct JavaMethodReference(string OwnerInternalName, string Name, string Descriptor); - - private abstract record JavaConstantPoolEntry(JavaConstantKind Kind) - { - public sealed record Utf8Entry(string Value) : JavaConstantPoolEntry(JavaConstantKind.Utf8); - - public sealed record IndexedEntry(JavaConstantKind Kind, ushort Index) : JavaConstantPoolEntry(Kind); - - public sealed record IndexedPairEntry(JavaConstantKind Kind, ushort FirstIndex, ushort SecondIndex) : JavaConstantPoolEntry(Kind); - - public sealed record OtherEntry(byte Tag) : JavaConstantPoolEntry(JavaConstantKind.Other); - - public static JavaConstantPoolEntry Utf8(string value) => new Utf8Entry(value); - - public static JavaConstantPoolEntry Indexed(byte tag, ushort index) - => new IndexedEntry(ToKind(tag), index); - - public static JavaConstantPoolEntry IndexedPair(byte tag, ushort first, ushort second) - => new IndexedPairEntry(ToKind(tag), first, second); - - public static JavaConstantPoolEntry Other(byte tag) => new OtherEntry(tag); - - private static JavaConstantKind ToKind(byte tag) - => tag switch - { - 7 => JavaConstantKind.Class, - 8 => JavaConstantKind.String, - 9 => JavaConstantKind.Fieldref, - 10 => JavaConstantKind.Methodref, - 11 => JavaConstantKind.InterfaceMethodref, - 12 => JavaConstantKind.NameAndType, - 15 => JavaConstantKind.MethodHandle, - 16 => JavaConstantKind.MethodType, - 18 => JavaConstantKind.InvokeDynamic, - _ => JavaConstantKind.Other, - }; - } - - private enum JavaConstantKind - { - Utf8, - Integer, - Float, - Long, - Double, - Class, - String, - Fieldref, - Methodref, - InterfaceMethodref, - NameAndType, - MethodHandle, - MethodType, - InvokeDynamic, - Other, - } - - private enum JavaConstantTag : byte - { - Utf8 = 1, - Integer = 3, - Float = 4, - Long = 5, - Double = 6, - Class = 7, - String = 8, - Fieldref = 9, - Methodref = 10, - InterfaceMethodref = 11, - NameAndType = 12, - MethodHandle = 15, - MethodType = 16, - InvokeDynamic = 18, - } - - private sealed class BigEndianReader - { - private readonly Stream _stream; - private readonly BinaryReader _reader; - - public BigEndianReader(Stream stream, bool leaveOpen) - { - _stream = stream; - _reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen); - } - - public ushort ReadUInt16() - { - Span<byte> buffer = stackalloc byte[2]; - Fill(buffer); - return BinaryPrimitives.ReadUInt16BigEndian(buffer); - } - - public uint ReadUInt32() - { - Span<byte> buffer = stackalloc byte[4]; - Fill(buffer); - return BinaryPrimitives.ReadUInt32BigEndian(buffer); - } - - public int ReadInt32() - { - Span<byte> buffer = stackalloc byte[4]; - Fill(buffer); - return BinaryPrimitives.ReadInt32BigEndian(buffer); - } - - public byte ReadByte() => _reader.ReadByte(); - - public string ReadUtf8() - { - var length = ReadUInt16(); - var bytes = ReadBytes(length); - return Encoding.UTF8.GetString(bytes); - } - - public byte[] ReadBytes(int count) - { - var bytes = _reader.ReadBytes(count); - if (bytes.Length != count) - { - throw new EndOfStreamException(); - } - - return bytes; - } - - public void Skip(int count) - { - if (count <= 0) - { - return; - } - - var buffer = new byte[Math.Min(count, 4096)]; - var remaining = count; - - while (remaining > 0) - { - var read = _stream.Read(buffer, 0, Math.Min(buffer.Length, remaining)); - if (read == 0) - { - throw new EndOfStreamException(); - } - - remaining -= read; - } - } - - private void Fill(Span<byte> buffer) - { - var read = _stream.Read(buffer); - if (read != buffer.Length) - { - throw new EndOfStreamException(); - } - } - } -} +using System.Buffers.Binary; +using System.Collections.Immutable; +using System.Text; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.Reflection; + +internal static class JavaReflectionAnalyzer +{ + public static JavaReflectionAnalysis Analyze(JavaClassPathAnalysis classPath, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(classPath); + + if (classPath.Segments.IsDefaultOrEmpty) + { + return JavaReflectionAnalysis.Empty; + } + + var edges = new List<JavaReflectionEdge>(); + var warnings = new List<JavaReflectionWarning>(); + + foreach (var segment in classPath.Segments) + { + cancellationToken.ThrowIfCancellationRequested(); + + foreach (var kvp in segment.ClassLocations) + { + var className = kvp.Key; + var location = kvp.Value; + + using var stream = location.OpenClassStream(cancellationToken); + var classFile = JavaClassFile.Parse(stream, cancellationToken); + + foreach (var method in classFile.Methods) + { + cancellationToken.ThrowIfCancellationRequested(); + AnalyzeMethod(classFile, method, segment.Identifier, className, edges, warnings); + } + } + } + + if (edges.Count == 0 && warnings.Count == 0) + { + return JavaReflectionAnalysis.Empty; + } + + return new JavaReflectionAnalysis( + edges.ToImmutableArray(), + warnings.ToImmutableArray()); + } + + private static void AnalyzeMethod( + JavaClassFile classFile, + JavaMethod method, + string segmentIdentifier, + string className, + List<JavaReflectionEdge> edges, + List<JavaReflectionWarning> warnings) + { + var pool = classFile.ConstantPool; + + string? pendingStringLiteral = null; + string? pendingClassLiteral = null; + var sawCurrentThread = false; + var emittedTcclWarning = false; + + var code = method.Code; + var offset = 0; + var length = code.Length; + + while (offset < length) + { + var instructionOffset = offset; + var opcode = code[offset++]; + + switch (opcode) + { + case 0x12: // LDC + { + var index = code[offset++]; + HandleLdc(index, pool, ref pendingStringLiteral, ref pendingClassLiteral); + break; + } + case 0x13: // LDC_W + case 0x14: // LDC2_W + { + var index = (code[offset++] << 8) | code[offset++]; + HandleLdc(index, pool, ref pendingStringLiteral, ref pendingClassLiteral); + break; + } + case 0xB8: // invokestatic + case 0xB6: // invokevirtual + case 0xB7: // invokespecial + case 0xB9: // invokeinterface + { + var methodIndex = (code[offset++] << 8) | code[offset++]; + if (opcode == 0xB9) + { + offset += 2; // count and zero + } + + HandleInvocation( + pool, + method, + segmentIdentifier, + className, + instructionOffset, + methodIndex, + opcode, + ref pendingStringLiteral, + ref pendingClassLiteral, + ref sawCurrentThread, + ref emittedTcclWarning, + edges, + warnings); + + pendingStringLiteral = null; + pendingClassLiteral = null; + break; + } + default: + { + if (IsStoreInstruction(opcode)) + { + pendingStringLiteral = null; + pendingClassLiteral = null; + + if (IsStoreWithExplicitIndex(opcode)) + { + offset++; + } + } + else if (IsLoadInstructionWithIndex(opcode)) + { + offset++; + } + else if (IsStackMutation(opcode)) + { + pendingStringLiteral = null; + pendingClassLiteral = null; + } + + break; + } + } + } + + // When the method calls Thread.currentThread without accessing the context loader, we do not emit warnings. + } + + private static void HandleLdc( + int constantIndex, + JavaConstantPool pool, + ref string? pendingString, + ref string? pendingClassLiteral) + { + var constantKind = pool.GetConstantKind(constantIndex); + switch (constantKind) + { + case JavaConstantKind.String: + pendingString = pool.GetString(constantIndex); + pendingClassLiteral = null; + break; + case JavaConstantKind.Class: + pendingClassLiteral = pool.GetClassName(constantIndex); + pendingString = null; + break; + default: + pendingString = null; + pendingClassLiteral = null; + break; + } + } + + private static void HandleInvocation( + JavaConstantPool pool, + JavaMethod method, + string segmentIdentifier, + string className, + int instructionOffset, + int methodIndex, + byte opcode, + ref string? pendingString, + ref string? pendingClassLiteral, + ref bool sawCurrentThread, + ref bool emittedTcclWarning, + List<JavaReflectionEdge> edges, + List<JavaReflectionWarning> warnings) + { + var methodRef = pool.GetMethodReference(methodIndex); + + var owner = methodRef.OwnerInternalName; + var name = methodRef.Name; + var descriptor = methodRef.Descriptor; + + var normalizedOwner = owner ?? string.Empty; + var normalizedSource = NormalizeClassName(className) ?? className ?? string.Empty; + + if (normalizedOwner == "java/lang/Class" && name == "forName") + { + var target = NormalizeClassName(pendingString); + var confidence = pendingString is null ? JavaReflectionConfidence.Low : JavaReflectionConfidence.High; + edges.Add(new JavaReflectionEdge( + normalizedSource, + segmentIdentifier, + target, + JavaReflectionReason.ClassForName, + confidence, + method.Name, + method.Descriptor, + instructionOffset, + null)); + } + else if (normalizedOwner == "java/lang/ClassLoader" && name == "loadClass") + { + var target = NormalizeClassName(pendingString); + var confidence = pendingString is null ? JavaReflectionConfidence.Low : JavaReflectionConfidence.High; + edges.Add(new JavaReflectionEdge( + normalizedSource, + segmentIdentifier, + target, + JavaReflectionReason.ClassLoaderLoadClass, + confidence, + method.Name, + method.Descriptor, + instructionOffset, + null)); + } + else if (normalizedOwner == "java/util/ServiceLoader" && name.StartsWith("load", StringComparison.Ordinal)) + { + var target = NormalizeClassName(pendingClassLiteral); + var confidence = pendingClassLiteral is null ? JavaReflectionConfidence.Low : JavaReflectionConfidence.High; + edges.Add(new JavaReflectionEdge( + normalizedSource, + segmentIdentifier, + target, + JavaReflectionReason.ServiceLoaderLoad, + confidence, + method.Name, + method.Descriptor, + instructionOffset, + null)); + } + else if (normalizedOwner == "java/lang/ClassLoader" && (name == "getResource" || name == "getResourceAsStream" || name == "getResources")) + { + var target = pendingString; + var confidence = pendingString is null ? JavaReflectionConfidence.Low : JavaReflectionConfidence.High; + edges.Add(new JavaReflectionEdge( + normalizedSource, + segmentIdentifier, + target, + JavaReflectionReason.ResourceLookup, + confidence, + method.Name, + method.Descriptor, + instructionOffset, + null)); + } + else if (normalizedOwner == "java/lang/Thread" && name == "currentThread") + { + sawCurrentThread = true; + } + else if (normalizedOwner == "java/lang/Thread" && name == "getContextClassLoader") + { + if (sawCurrentThread && !emittedTcclWarning) + { + warnings.Add(new JavaReflectionWarning( + normalizedSource, + segmentIdentifier, + "tccl", + "Thread context class loader access detected.", + method.Name, + method.Descriptor)); + emittedTcclWarning = true; + } + } + + pendingString = null; + pendingClassLiteral = null; + } + + private static string? NormalizeClassName(string? internalName) + { + if (string.IsNullOrWhiteSpace(internalName)) + { + return null; + } + + return internalName.Replace('/', '.'); + } + + private static bool IsStoreInstruction(byte opcode) + => (opcode >= 0x3B && opcode <= 0x4E) || (opcode >= 0x4F && opcode <= 0x56) || (opcode >= 0x36 && opcode <= 0x3A); + + private static bool IsStoreWithExplicitIndex(byte opcode) + => opcode >= 0x36 && opcode <= 0x3A; + + private static bool IsLoadInstructionWithIndex(byte opcode) + => opcode >= 0x15 && opcode <= 0x19; + + private static bool IsStackMutation(byte opcode) + => opcode is 0x57 or 0x58 or 0x59 or 0x5A or 0x5B or 0x5C or 0x5D or 0x5E or 0x5F; + + private sealed class JavaClassFile + { + public JavaClassFile(string thisClassName, JavaConstantPool constantPool, ImmutableArray<JavaMethod> methods) + { + ThisClassName = thisClassName; + ConstantPool = constantPool; + Methods = methods; + } + + public string ThisClassName { get; } + + public JavaConstantPool ConstantPool { get; } + + public ImmutableArray<JavaMethod> Methods { get; } + + public static JavaClassFile Parse(Stream stream, CancellationToken cancellationToken) + { + var reader = new BigEndianReader(stream, leaveOpen: true); + if (reader.ReadUInt32() != 0xCAFEBABE) + { + throw new InvalidDataException("Invalid Java class file magic header."); + } + + _ = reader.ReadUInt16(); // minor + _ = reader.ReadUInt16(); // major + + var constantPoolCount = reader.ReadUInt16(); + var pool = new JavaConstantPool(constantPoolCount); + + var index = 1; + while (index < constantPoolCount) + { + cancellationToken.ThrowIfCancellationRequested(); + var tag = reader.ReadByte(); + switch ((JavaConstantTag)tag) + { + case JavaConstantTag.Utf8: + { + pool.Set(index, JavaConstantPoolEntry.Utf8(reader.ReadUtf8())); + index++; + break; + } + case JavaConstantTag.Integer: + reader.Skip(4); + pool.Set(index, JavaConstantPoolEntry.Other(tag)); + index++; + break; + case JavaConstantTag.Float: + reader.Skip(4); + pool.Set(index, JavaConstantPoolEntry.Other(tag)); + index++; + break; + case JavaConstantTag.Long: + case JavaConstantTag.Double: + reader.Skip(8); + pool.Set(index, JavaConstantPoolEntry.Other(tag)); + index += 2; + break; + case JavaConstantTag.Class: + case JavaConstantTag.String: + case JavaConstantTag.MethodType: + pool.Set(index, JavaConstantPoolEntry.Indexed(tag, reader.ReadUInt16())); + index++; + break; + case JavaConstantTag.Fieldref: + case JavaConstantTag.Methodref: + case JavaConstantTag.InterfaceMethodref: + case JavaConstantTag.NameAndType: + case JavaConstantTag.InvokeDynamic: + pool.Set(index, JavaConstantPoolEntry.IndexedPair(tag, reader.ReadUInt16(), reader.ReadUInt16())); + index++; + break; + case JavaConstantTag.MethodHandle: + reader.Skip(1); // reference kind + pool.Set(index, JavaConstantPoolEntry.Indexed(tag, reader.ReadUInt16())); + index++; + break; + default: + throw new InvalidDataException($"Unsupported constant pool tag {tag}."); + } + } + + var accessFlags = reader.ReadUInt16(); + var thisClassIndex = reader.ReadUInt16(); + _ = reader.ReadUInt16(); // super + + var interfacesCount = reader.ReadUInt16(); + reader.Skip(interfacesCount * 2); + + var fieldsCount = reader.ReadUInt16(); + for (var i = 0; i < fieldsCount; i++) + { + SkipMember(reader); + } + + var methodsCount = reader.ReadUInt16(); + var methods = ImmutableArray.CreateBuilder<JavaMethod>(methodsCount); + for (var i = 0; i < methodsCount; i++) + { + cancellationToken.ThrowIfCancellationRequested(); + _ = reader.ReadUInt16(); // method access flags + var nameIndex = reader.ReadUInt16(); + var descriptorIndex = reader.ReadUInt16(); + var attributesCount = reader.ReadUInt16(); + + byte[]? code = null; + + for (var attr = 0; attr < attributesCount; attr++) + { + var attributeNameIndex = reader.ReadUInt16(); + var attributeLength = reader.ReadUInt32(); + var attributeName = pool.GetUtf8(attributeNameIndex) ?? string.Empty; + + if (attributeName == "Code") + { + var maxStack = reader.ReadUInt16(); + var maxLocals = reader.ReadUInt16(); + var codeLength = reader.ReadUInt32(); + code = reader.ReadBytes((int)codeLength); + var exceptionTableLength = reader.ReadUInt16(); + reader.Skip(exceptionTableLength * 8); + var codeAttributeCount = reader.ReadUInt16(); + for (var c = 0; c < codeAttributeCount; c++) + { + reader.Skip(2); // name index + var len = reader.ReadUInt32(); + reader.Skip((int)len); + } + } + else + { + reader.Skip((int)attributeLength); + } + } + + if (code is not null) + { + var name = pool.GetUtf8(nameIndex) ?? string.Empty; + var descriptor = pool.GetUtf8(descriptorIndex) ?? string.Empty; + methods.Add(new JavaMethod(name, descriptor, code)); + } + } + + var classAttributesCount = reader.ReadUInt16(); + for (var a = 0; a < classAttributesCount; a++) + { + reader.Skip(2); + var len = reader.ReadUInt32(); + reader.Skip((int)len); + } + + var thisClassName = pool.GetClassName(thisClassIndex) ?? string.Empty; + return new JavaClassFile(thisClassName, pool, methods.ToImmutable()); + } + + private static void SkipMember(BigEndianReader reader) + { + reader.Skip(6); // access, name, descriptor + var attributeCount = reader.ReadUInt16(); + for (var i = 0; i < attributeCount; i++) + { + reader.Skip(2); + var len = reader.ReadUInt32(); + reader.Skip((int)len); + } + } + } + + private sealed class JavaMethod + { + public JavaMethod(string name, string descriptor, byte[] code) + { + Name = name; + Descriptor = descriptor; + Code = code; + } + + public string Name { get; } + + public string Descriptor { get; } + + public byte[] Code { get; } + } + + private sealed class JavaConstantPool + { + private readonly JavaConstantPoolEntry?[] _entries; + + public JavaConstantPool(int count) + { + _entries = new JavaConstantPoolEntry?[count]; + } + + public void Set(int index, JavaConstantPoolEntry entry) + { + _entries[index] = entry; + } + + public JavaConstantKind GetConstantKind(int index) + { + var entry = _entries[index]; + return entry?.Kind ?? JavaConstantKind.Other; + } + + public string? GetUtf8(int index) + { + if (index <= 0 || index >= _entries.Length) + { + return null; + } + + return _entries[index] is JavaConstantPoolEntry.Utf8Entry utf8 ? utf8.Value : null; + } + + public string? GetString(int index) + { + if (_entries[index] is JavaConstantPoolEntry.IndexedEntry { Kind: JavaConstantKind.String, Index: var utf8Index }) + { + return GetUtf8(utf8Index); + } + + return null; + } + + public string? GetClassName(int index) + { + if (_entries[index] is JavaConstantPoolEntry.IndexedEntry { Kind: JavaConstantKind.Class, Index: var nameIndex }) + { + return GetUtf8(nameIndex); + } + + return null; + } + + public JavaMethodReference GetMethodReference(int index) + { + if (_entries[index] is not JavaConstantPoolEntry.IndexedPairEntry pair || pair.Kind is not (JavaConstantKind.Methodref or JavaConstantKind.InterfaceMethodref)) + { + throw new InvalidDataException($"Constant pool entry {index} is not a method reference."); + } + + var owner = GetClassName(pair.FirstIndex) ?? string.Empty; + var nameAndType = _entries[pair.SecondIndex] as JavaConstantPoolEntry.IndexedPairEntry; + if (nameAndType is null || nameAndType.Kind != JavaConstantKind.NameAndType) + { + throw new InvalidDataException("Invalid NameAndType entry for method reference."); + } + + var name = GetUtf8(nameAndType.FirstIndex) ?? string.Empty; + var descriptor = GetUtf8(nameAndType.SecondIndex) ?? string.Empty; + return new JavaMethodReference(owner, name, descriptor); + } + } + + private readonly record struct JavaMethodReference(string OwnerInternalName, string Name, string Descriptor); + + private abstract record JavaConstantPoolEntry(JavaConstantKind Kind) + { + public sealed record Utf8Entry(string Value) : JavaConstantPoolEntry(JavaConstantKind.Utf8); + + public sealed record IndexedEntry(JavaConstantKind Kind, ushort Index) : JavaConstantPoolEntry(Kind); + + public sealed record IndexedPairEntry(JavaConstantKind Kind, ushort FirstIndex, ushort SecondIndex) : JavaConstantPoolEntry(Kind); + + public sealed record OtherEntry(byte Tag) : JavaConstantPoolEntry(JavaConstantKind.Other); + + public static JavaConstantPoolEntry Utf8(string value) => new Utf8Entry(value); + + public static JavaConstantPoolEntry Indexed(byte tag, ushort index) + => new IndexedEntry(ToKind(tag), index); + + public static JavaConstantPoolEntry IndexedPair(byte tag, ushort first, ushort second) + => new IndexedPairEntry(ToKind(tag), first, second); + + public static JavaConstantPoolEntry Other(byte tag) => new OtherEntry(tag); + + private static JavaConstantKind ToKind(byte tag) + => tag switch + { + 7 => JavaConstantKind.Class, + 8 => JavaConstantKind.String, + 9 => JavaConstantKind.Fieldref, + 10 => JavaConstantKind.Methodref, + 11 => JavaConstantKind.InterfaceMethodref, + 12 => JavaConstantKind.NameAndType, + 15 => JavaConstantKind.MethodHandle, + 16 => JavaConstantKind.MethodType, + 18 => JavaConstantKind.InvokeDynamic, + _ => JavaConstantKind.Other, + }; + } + + private enum JavaConstantKind + { + Utf8, + Integer, + Float, + Long, + Double, + Class, + String, + Fieldref, + Methodref, + InterfaceMethodref, + NameAndType, + MethodHandle, + MethodType, + InvokeDynamic, + Other, + } + + private enum JavaConstantTag : byte + { + Utf8 = 1, + Integer = 3, + Float = 4, + Long = 5, + Double = 6, + Class = 7, + String = 8, + Fieldref = 9, + Methodref = 10, + InterfaceMethodref = 11, + NameAndType = 12, + MethodHandle = 15, + MethodType = 16, + InvokeDynamic = 18, + } + + private sealed class BigEndianReader + { + private readonly Stream _stream; + private readonly BinaryReader _reader; + + public BigEndianReader(Stream stream, bool leaveOpen) + { + _stream = stream; + _reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen); + } + + public ushort ReadUInt16() + { + Span<byte> buffer = stackalloc byte[2]; + Fill(buffer); + return BinaryPrimitives.ReadUInt16BigEndian(buffer); + } + + public uint ReadUInt32() + { + Span<byte> buffer = stackalloc byte[4]; + Fill(buffer); + return BinaryPrimitives.ReadUInt32BigEndian(buffer); + } + + public int ReadInt32() + { + Span<byte> buffer = stackalloc byte[4]; + Fill(buffer); + return BinaryPrimitives.ReadInt32BigEndian(buffer); + } + + public byte ReadByte() => _reader.ReadByte(); + + public string ReadUtf8() + { + var length = ReadUInt16(); + var bytes = ReadBytes(length); + return Encoding.UTF8.GetString(bytes); + } + + public byte[] ReadBytes(int count) + { + var bytes = _reader.ReadBytes(count); + if (bytes.Length != count) + { + throw new EndOfStreamException(); + } + + return bytes; + } + + public void Skip(int count) + { + if (count <= 0) + { + return; + } + + var buffer = new byte[Math.Min(count, 4096)]; + var remaining = count; + + while (remaining > 0) + { + var read = _stream.Read(buffer, 0, Math.Min(buffer.Length, remaining)); + if (read == 0) + { + throw new EndOfStreamException(); + } + + remaining -= read; + } + } + + private void Fill(Span<byte> buffer) + { + var read = _stream.Read(buffer); + if (read != buffer.Length) + { + throw new EndOfStreamException(); + } + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaServiceProviderScanner.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaServiceProviderScanner.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaServiceProviderScanner.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaServiceProviderScanner.cs index 94ccbcd7..33c0e863 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaServiceProviderScanner.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaServiceProviderScanner.cs @@ -1,160 +1,160 @@ -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Threading; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders; - -internal static class JavaServiceProviderScanner -{ - public static JavaServiceProviderAnalysis Scan(JavaClassPathAnalysis classPath, JavaSpiCatalog catalog, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(classPath); - ArgumentNullException.ThrowIfNull(catalog); - - var services = new Dictionary<string, ServiceAccumulator>(StringComparer.Ordinal); - - foreach (var segment in classPath.Segments.OrderBy(static s => s.Order)) - { - cancellationToken.ThrowIfCancellationRequested(); - - foreach (var kvp in segment.ServiceDefinitions) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (kvp.Value.IsDefaultOrEmpty) - { - continue; - } - - if (!services.TryGetValue(kvp.Key, out var accumulator)) - { - accumulator = new ServiceAccumulator(); - services[kvp.Key] = accumulator; - } - - var providerIndex = 0; - foreach (var provider in kvp.Value) - { - var normalizedProvider = provider?.Trim(); - if (string.IsNullOrEmpty(normalizedProvider)) - { - providerIndex++; - continue; - } - - accumulator.AddCandidate(new JavaServiceProviderCandidateRecord( - ProviderClass: normalizedProvider, - SegmentIdentifier: segment.Identifier, - SegmentOrder: segment.Order, - ProviderIndex: providerIndex++, - IsSelected: false)); - } - } - } - - var records = new List<JavaServiceProviderRecord>(services.Count); - - foreach (var pair in services.OrderBy(static entry => entry.Key, StringComparer.Ordinal)) - { - var descriptor = catalog.Resolve(pair.Key); - var accumulator = pair.Value; - var orderedCandidates = accumulator.GetOrderedCandidates(); - - if (orderedCandidates.Count == 0) - { - continue; - } - - var selectedIndex = accumulator.DetermineSelection(orderedCandidates); - var warnings = accumulator.BuildWarnings(); - - var candidateArray = ImmutableArray.CreateRange(orderedCandidates.Select((candidate, index) => - candidate with { IsSelected = index == selectedIndex })); - - var warningsArray = warnings.Count == 0 - ? ImmutableArray<string>.Empty - : ImmutableArray.CreateRange(warnings); - - records.Add(new JavaServiceProviderRecord( - ServiceId: pair.Key, - DisplayName: descriptor.DisplayName, - Category: descriptor.Category, - Candidates: candidateArray, - SelectedIndex: selectedIndex, - Warnings: warningsArray)); - } - - return new JavaServiceProviderAnalysis(records.ToImmutableArray()); - } - - private sealed class ServiceAccumulator - { - private readonly List<JavaServiceProviderCandidateRecord> _candidates = new(); - private readonly Dictionary<string, HashSet<string>> _providerSources = new(StringComparer.Ordinal); - - public void AddCandidate(JavaServiceProviderCandidateRecord candidate) - { - _candidates.Add(candidate); - - if (!_providerSources.TryGetValue(candidate.ProviderClass, out var sources)) - { - sources = new HashSet<string>(StringComparer.Ordinal); - _providerSources[candidate.ProviderClass] = sources; - } - - sources.Add(candidate.SegmentIdentifier); - } - - public IReadOnlyList<JavaServiceProviderCandidateRecord> GetOrderedCandidates() - => _candidates - .OrderBy(static c => c.SegmentOrder) - .ThenBy(static c => c.ProviderIndex) - .ThenBy(static c => c.ProviderClass, StringComparer.Ordinal) - .ToList(); - - public int DetermineSelection(IReadOnlyList<JavaServiceProviderCandidateRecord> orderedCandidates) - => orderedCandidates.Count == 0 ? -1 : 0; - - public List<string> BuildWarnings() - { - var warnings = new List<string>(); - foreach (var pair in _providerSources.OrderBy(static entry => entry.Key, StringComparer.Ordinal)) - { - if (pair.Value.Count <= 1) - { - continue; - } - - var locations = pair.Value - .OrderBy(static value => value, StringComparer.Ordinal) - .ToArray(); - - warnings.Add($"duplicate-provider: {pair.Key} ({string.Join(", ", locations)})"); - } - - return warnings; - } - } -} - -internal sealed record JavaServiceProviderAnalysis(ImmutableArray<JavaServiceProviderRecord> Services) -{ - public static readonly JavaServiceProviderAnalysis Empty = new(ImmutableArray<JavaServiceProviderRecord>.Empty); -} - -internal sealed record JavaServiceProviderRecord( - string ServiceId, - string DisplayName, - string Category, - ImmutableArray<JavaServiceProviderCandidateRecord> Candidates, - int SelectedIndex, - ImmutableArray<string> Warnings); - -internal sealed record JavaServiceProviderCandidateRecord( - string ProviderClass, - string SegmentIdentifier, - int SegmentOrder, - int ProviderIndex, - bool IsSelected); +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Threading; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders; + +internal static class JavaServiceProviderScanner +{ + public static JavaServiceProviderAnalysis Scan(JavaClassPathAnalysis classPath, JavaSpiCatalog catalog, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(classPath); + ArgumentNullException.ThrowIfNull(catalog); + + var services = new Dictionary<string, ServiceAccumulator>(StringComparer.Ordinal); + + foreach (var segment in classPath.Segments.OrderBy(static s => s.Order)) + { + cancellationToken.ThrowIfCancellationRequested(); + + foreach (var kvp in segment.ServiceDefinitions) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (kvp.Value.IsDefaultOrEmpty) + { + continue; + } + + if (!services.TryGetValue(kvp.Key, out var accumulator)) + { + accumulator = new ServiceAccumulator(); + services[kvp.Key] = accumulator; + } + + var providerIndex = 0; + foreach (var provider in kvp.Value) + { + var normalizedProvider = provider?.Trim(); + if (string.IsNullOrEmpty(normalizedProvider)) + { + providerIndex++; + continue; + } + + accumulator.AddCandidate(new JavaServiceProviderCandidateRecord( + ProviderClass: normalizedProvider, + SegmentIdentifier: segment.Identifier, + SegmentOrder: segment.Order, + ProviderIndex: providerIndex++, + IsSelected: false)); + } + } + } + + var records = new List<JavaServiceProviderRecord>(services.Count); + + foreach (var pair in services.OrderBy(static entry => entry.Key, StringComparer.Ordinal)) + { + var descriptor = catalog.Resolve(pair.Key); + var accumulator = pair.Value; + var orderedCandidates = accumulator.GetOrderedCandidates(); + + if (orderedCandidates.Count == 0) + { + continue; + } + + var selectedIndex = accumulator.DetermineSelection(orderedCandidates); + var warnings = accumulator.BuildWarnings(); + + var candidateArray = ImmutableArray.CreateRange(orderedCandidates.Select((candidate, index) => + candidate with { IsSelected = index == selectedIndex })); + + var warningsArray = warnings.Count == 0 + ? ImmutableArray<string>.Empty + : ImmutableArray.CreateRange(warnings); + + records.Add(new JavaServiceProviderRecord( + ServiceId: pair.Key, + DisplayName: descriptor.DisplayName, + Category: descriptor.Category, + Candidates: candidateArray, + SelectedIndex: selectedIndex, + Warnings: warningsArray)); + } + + return new JavaServiceProviderAnalysis(records.ToImmutableArray()); + } + + private sealed class ServiceAccumulator + { + private readonly List<JavaServiceProviderCandidateRecord> _candidates = new(); + private readonly Dictionary<string, HashSet<string>> _providerSources = new(StringComparer.Ordinal); + + public void AddCandidate(JavaServiceProviderCandidateRecord candidate) + { + _candidates.Add(candidate); + + if (!_providerSources.TryGetValue(candidate.ProviderClass, out var sources)) + { + sources = new HashSet<string>(StringComparer.Ordinal); + _providerSources[candidate.ProviderClass] = sources; + } + + sources.Add(candidate.SegmentIdentifier); + } + + public IReadOnlyList<JavaServiceProviderCandidateRecord> GetOrderedCandidates() + => _candidates + .OrderBy(static c => c.SegmentOrder) + .ThenBy(static c => c.ProviderIndex) + .ThenBy(static c => c.ProviderClass, StringComparer.Ordinal) + .ToList(); + + public int DetermineSelection(IReadOnlyList<JavaServiceProviderCandidateRecord> orderedCandidates) + => orderedCandidates.Count == 0 ? -1 : 0; + + public List<string> BuildWarnings() + { + var warnings = new List<string>(); + foreach (var pair in _providerSources.OrderBy(static entry => entry.Key, StringComparer.Ordinal)) + { + if (pair.Value.Count <= 1) + { + continue; + } + + var locations = pair.Value + .OrderBy(static value => value, StringComparer.Ordinal) + .ToArray(); + + warnings.Add($"duplicate-provider: {pair.Key} ({string.Join(", ", locations)})"); + } + + return warnings; + } + } +} + +internal sealed record JavaServiceProviderAnalysis(ImmutableArray<JavaServiceProviderRecord> Services) +{ + public static readonly JavaServiceProviderAnalysis Empty = new(ImmutableArray<JavaServiceProviderRecord>.Empty); +} + +internal sealed record JavaServiceProviderRecord( + string ServiceId, + string DisplayName, + string Category, + ImmutableArray<JavaServiceProviderCandidateRecord> Candidates, + int SelectedIndex, + ImmutableArray<string> Warnings); + +internal sealed record JavaServiceProviderCandidateRecord( + string ProviderClass, + string SegmentIdentifier, + int SegmentOrder, + int ProviderIndex, + bool IsSelected); diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaSpiCatalog.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaSpiCatalog.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaSpiCatalog.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaSpiCatalog.cs index f38d26bd..e72cc2ae 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaSpiCatalog.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/JavaSpiCatalog.cs @@ -1,103 +1,103 @@ -using System.Collections.Immutable; -using System.Reflection; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders; - -internal sealed class JavaSpiCatalog -{ - private static readonly Lazy<JavaSpiCatalog> LazyDefault = new(LoadDefaultCore); - private readonly ImmutableDictionary<string, JavaSpiDescriptor> _descriptors; - - private JavaSpiCatalog(ImmutableDictionary<string, JavaSpiDescriptor> descriptors) - { - _descriptors = descriptors; - } - - public static JavaSpiCatalog Default => LazyDefault.Value; - - public JavaSpiDescriptor Resolve(string serviceId) - { - if (string.IsNullOrWhiteSpace(serviceId)) - { - return JavaSpiDescriptor.CreateUnknown(string.Empty); - } - - var key = serviceId.Trim(); - if (_descriptors.TryGetValue(key, out var descriptor)) - { - return descriptor; - } - - return JavaSpiDescriptor.CreateUnknown(key); - } - - private static JavaSpiCatalog LoadDefaultCore() - { - var assembly = typeof(JavaSpiCatalog).GetTypeInfo().Assembly; - var resourceName = "StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders.java-spi-catalog.json"; - using var stream = assembly.GetManifestResourceStream(resourceName) - ?? throw new InvalidOperationException($"Embedded SPI catalog '{resourceName}' not found."); - using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, leaveOpen: false); - var json = reader.ReadToEnd(); - - var items = JsonSerializer.Deserialize<List<JavaSpiDescriptor>>(json, new JsonSerializerOptions - { - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - }) ?? new List<JavaSpiDescriptor>(); - - var descriptors = items - .Select(Normalize) - .Where(static item => !string.IsNullOrWhiteSpace(item.ServiceId)) - .ToImmutableDictionary( - static item => item.ServiceId, - static item => item, - StringComparer.Ordinal); - - return new JavaSpiCatalog(descriptors); - } - - private static JavaSpiDescriptor Normalize(JavaSpiDescriptor descriptor) - { - var serviceId = descriptor.ServiceId?.Trim() ?? string.Empty; - var category = string.IsNullOrWhiteSpace(descriptor.Category) - ? "unknown" - : descriptor.Category.Trim().ToLowerInvariant(); - var displayName = string.IsNullOrWhiteSpace(descriptor.DisplayName) - ? serviceId - : descriptor.DisplayName.Trim(); - - return descriptor with - { - ServiceId = serviceId, - Category = category, - DisplayName = displayName, - }; - } -} - -internal sealed record class JavaSpiDescriptor -{ - [JsonPropertyName("serviceId")] - public string ServiceId { get; init; } = string.Empty; - - [JsonPropertyName("category")] - public string Category { get; init; } = "unknown"; - - [JsonPropertyName("displayName")] - public string DisplayName { get; init; } = string.Empty; - - [JsonPropertyName("notes")] - public string? Notes { get; init; } - - public static JavaSpiDescriptor CreateUnknown(string serviceId) - => new() - { - ServiceId = serviceId, - Category = "unknown", - DisplayName = serviceId, - }; -} +using System.Collections.Immutable; +using System.Reflection; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders; + +internal sealed class JavaSpiCatalog +{ + private static readonly Lazy<JavaSpiCatalog> LazyDefault = new(LoadDefaultCore); + private readonly ImmutableDictionary<string, JavaSpiDescriptor> _descriptors; + + private JavaSpiCatalog(ImmutableDictionary<string, JavaSpiDescriptor> descriptors) + { + _descriptors = descriptors; + } + + public static JavaSpiCatalog Default => LazyDefault.Value; + + public JavaSpiDescriptor Resolve(string serviceId) + { + if (string.IsNullOrWhiteSpace(serviceId)) + { + return JavaSpiDescriptor.CreateUnknown(string.Empty); + } + + var key = serviceId.Trim(); + if (_descriptors.TryGetValue(key, out var descriptor)) + { + return descriptor; + } + + return JavaSpiDescriptor.CreateUnknown(key); + } + + private static JavaSpiCatalog LoadDefaultCore() + { + var assembly = typeof(JavaSpiCatalog).GetTypeInfo().Assembly; + var resourceName = "StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders.java-spi-catalog.json"; + using var stream = assembly.GetManifestResourceStream(resourceName) + ?? throw new InvalidOperationException($"Embedded SPI catalog '{resourceName}' not found."); + using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, leaveOpen: false); + var json = reader.ReadToEnd(); + + var items = JsonSerializer.Deserialize<List<JavaSpiDescriptor>>(json, new JsonSerializerOptions + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + }) ?? new List<JavaSpiDescriptor>(); + + var descriptors = items + .Select(Normalize) + .Where(static item => !string.IsNullOrWhiteSpace(item.ServiceId)) + .ToImmutableDictionary( + static item => item.ServiceId, + static item => item, + StringComparer.Ordinal); + + return new JavaSpiCatalog(descriptors); + } + + private static JavaSpiDescriptor Normalize(JavaSpiDescriptor descriptor) + { + var serviceId = descriptor.ServiceId?.Trim() ?? string.Empty; + var category = string.IsNullOrWhiteSpace(descriptor.Category) + ? "unknown" + : descriptor.Category.Trim().ToLowerInvariant(); + var displayName = string.IsNullOrWhiteSpace(descriptor.DisplayName) + ? serviceId + : descriptor.DisplayName.Trim(); + + return descriptor with + { + ServiceId = serviceId, + Category = category, + DisplayName = displayName, + }; + } +} + +internal sealed record class JavaSpiDescriptor +{ + [JsonPropertyName("serviceId")] + public string ServiceId { get; init; } = string.Empty; + + [JsonPropertyName("category")] + public string Category { get; init; } = "unknown"; + + [JsonPropertyName("displayName")] + public string DisplayName { get; init; } = string.Empty; + + [JsonPropertyName("notes")] + public string? Notes { get; init; } + + public static JavaSpiDescriptor CreateUnknown(string serviceId) + => new() + { + ServiceId = serviceId, + Category = "unknown", + DisplayName = serviceId, + }; +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/java-spi-catalog.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/java-spi-catalog.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/java-spi-catalog.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/java-spi-catalog.json index f435e8d6..11cd19f3 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/java-spi-catalog.json +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Internal/ServiceProviders/java-spi-catalog.json @@ -1,52 +1,52 @@ -[ - { - "serviceId": "java.sql.Driver", - "category": "jdk", - "displayName": "JDBC Driver" - }, - { - "serviceId": "javax.annotation.processing.Processor", - "category": "jdk", - "displayName": "Annotation Processor" - }, - { - "serviceId": "org.slf4j.spi.SLF4JServiceProvider", - "category": "logging", - "displayName": "SLF4J Service Provider" - }, - { - "serviceId": "ch.qos.logback.classic.spi.Configurator", - "category": "logging", - "displayName": "Logback Configurator" - }, - { - "serviceId": "com.fasterxml.jackson.core.TokenStreamFactory", - "category": "jackson", - "displayName": "Jackson Token Stream Factory" - }, - { - "serviceId": "com.fasterxml.jackson.databind.Module", - "category": "jackson", - "displayName": "Jackson Module" - }, - { - "serviceId": "org.springframework.boot.Bootstrapper", - "category": "spring", - "displayName": "Spring Boot Bootstrapper" - }, - { - "serviceId": "org.springframework.boot.SpringApplicationRunListener", - "category": "spring", - "displayName": "Spring Application Run Listener" - }, - { - "serviceId": "org.eclipse.microprofile.config.spi.ConfigSourceProvider", - "category": "microprofile", - "displayName": "MicroProfile Config Source Provider" - }, - { - "serviceId": "org.eclipse.microprofile.config.spi.Converter", - "category": "microprofile", - "displayName": "MicroProfile Converter" - } -] +[ + { + "serviceId": "java.sql.Driver", + "category": "jdk", + "displayName": "JDBC Driver" + }, + { + "serviceId": "javax.annotation.processing.Processor", + "category": "jdk", + "displayName": "Annotation Processor" + }, + { + "serviceId": "org.slf4j.spi.SLF4JServiceProvider", + "category": "logging", + "displayName": "SLF4J Service Provider" + }, + { + "serviceId": "ch.qos.logback.classic.spi.Configurator", + "category": "logging", + "displayName": "Logback Configurator" + }, + { + "serviceId": "com.fasterxml.jackson.core.TokenStreamFactory", + "category": "jackson", + "displayName": "Jackson Token Stream Factory" + }, + { + "serviceId": "com.fasterxml.jackson.databind.Module", + "category": "jackson", + "displayName": "Jackson Module" + }, + { + "serviceId": "org.springframework.boot.Bootstrapper", + "category": "spring", + "displayName": "Spring Boot Bootstrapper" + }, + { + "serviceId": "org.springframework.boot.SpringApplicationRunListener", + "category": "spring", + "displayName": "Spring Application Run Listener" + }, + { + "serviceId": "org.eclipse.microprofile.config.spi.ConfigSourceProvider", + "category": "microprofile", + "displayName": "MicroProfile Config Source Provider" + }, + { + "serviceId": "org.eclipse.microprofile.config.spi.Converter", + "category": "microprofile", + "displayName": "MicroProfile Converter" + } +] diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/JavaLanguageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/JavaLanguageAnalyzer.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/JavaLanguageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/JavaLanguageAnalyzer.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/Properties/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/Properties/AssemblyInfo.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Properties/AssemblyInfo.cs index c29279e2..19b513bd 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/Properties/AssemblyInfo.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Scanner.Analyzers.Lang.Java.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scanner.Analyzers.Lang.Java.Tests")] diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md index f45ac1e2..656859d9 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/TASKS.md @@ -1,31 +1,31 @@ -# Java Analyzer Task Board -> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. - -## Java Static Core (Sprint 39) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-JAVA-21-001 | DONE (2025-10-27) | Java Analyzer Guild | SCANNER-CORE-09-501 | Build input normalizer and virtual file system for JAR/WAR/EAR/fat-jar/JMOD/jimage/container roots. Detect packaging type, layered dirs (BOOT-INF/WEB-INF), multi-release overlays, and jlink runtime metadata. | Normalizer walks fixtures without extraction, classifies packaging, selects MR overlays deterministically, records java version + vendor from runtime images. | -| SCANNER-ANALYZERS-JAVA-21-002 | DONE (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-001 | Implement module/classpath builder: JPMS graph parser (`module-info.class`), classpath order rules (fat jar, war, ear), duplicate & split-package detection, package fingerprinting. | Classpath order reproduced for fixtures; module graph serialized; duplicate provider + split-package warnings emitted deterministically. | -| SCANNER-ANALYZERS-JAVA-21-003 | DONE (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | SPI scanner covering META-INF/services, provider selection, and warning generation. Include configurable SPI corpus (JDK, Spring, logging, Jackson, MicroProfile). | SPI tables produced with selected provider + candidates; fixtures show first-wins behaviour; warnings recorded for duplicate providers. | -| SCANNER-ANALYZERS-JAVA-21-004 | DOING (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | Reflection/dynamic loader heuristics: scan constant pools, bytecode sites (Class.forName, loadClass, TCCL usage), resource-based plugin hints, manifest loader hints. Emit edges with reason codes + confidence. | Reflection edges generated for fixtures (classpath, boot, war); includes call site metadata and confidence scoring; TCCL warning emitted where detected. | -| SCANNER-ANALYZERS-JAVA-21-005 | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | Framework config extraction: Spring Boot imports, spring.factories, application properties/yaml, Jakarta web.xml & fragments, JAX-RS/JPA/CDI/JAXB configs, logging files, Graal native-image configs. | Framework fixtures parsed; relevant class FQCNs surfaced with reasons (`config-spring`, `config-jaxrs`, etc.); non-class config ignored; determinism guard passes. | -| SCANNER-ANALYZERS-JAVA-21-006 | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | JNI/native hint scanner: detect native methods, System.load/Library literals, bundled native libs, Graal JNI configs; emit `jni-load` edges for native analyzer correlation. | JNI fixtures produce hint edges pointing at embedded libs; metadata includes candidate paths and reason `jni`. | -| SCANNER-ANALYZERS-JAVA-21-007 | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-003 | Signature and manifest metadata collector: verify JAR signature structure, capture signers, manifest loader attributes (Main-Class, Agent-Class, Start-Class, Class-Path). | Signed jar fixture reports signer info and structural validation result; manifest metadata attached to entrypoints. | - -> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-001 implemented `JavaWorkspaceNormalizer` + fixtures covering packaging, layered directories, multi-release overlays, and runtime image metadata. -> -> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-002 delivered `JavaClassPathBuilder` producing ordered segments (jar/war/boot fat, embedded libs), JPMS descriptors via `JavaModuleInfoParser`, and duplicate/split-package detection with package fingerprints + unit tests. -> -> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-004 in progress: added bytecode-driven `JavaReflectionAnalyzer` covering `Class.forName`, `ClassLoader.loadClass`, `ServiceLoader.load`, resource lookups, and TCCL warnings with unit fixtures (boot jar, embedded jar, synthetic classes). -> -> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-003 added SPI catalog + `JavaServiceProviderScanner`, capturing META-INF/services across layered jars, selecting first-wins providers, and emitting duplicate warnings with coverage tests (fat-jar, duplicates, simple jars). - -## Java Observation & Runtime (Sprint 40) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-JAVA-21-008 | BLOCKED (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-003, SCANNER-ANALYZERS-JAVA-21-004, SCANNER-ANALYZERS-JAVA-21-005 | Implement resolver + AOC writer: produce entrypoints (env profiles, warnings), components (jar_id + semantic ids), edges (jpms, cp, spi, reflect, jni) with reason codes/confidence. | Observation JSON for fixtures deterministic; includes entrypoints, edges, warnings; passes AOC compliance lint. | -| SCANNER-ANALYZERS-JAVA-21-009 | TODO | Java Analyzer Guild, QA Guild | SCANNER-ANALYZERS-JAVA-21-008 | Author comprehensive fixtures (modular app, boot fat jar, war, ear, MR-jar, jlink image, JNI, reflection heavy, signed jar, microprofile) with golden outputs and perf benchmarks. | Fixture suite committed under `fixtures/lang/java/ep`; determinism + benchmark gates (<300ms fat jar) configured in CI. | -| SCANNER-ANALYZERS-JAVA-21-010 | TODO | Java Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-JAVA-21-008 | Optional runtime ingestion: Java agent + JFR reader capturing class load, ServiceLoader, and System.load events with path scrubbing. Emit append-only runtime edges `runtime-class`/`runtime-spi`/`runtime-load`. | Runtime harness produces scrubbed events for sample app; edges merge with static output; docs describe sandbox & privacy. | -| SCANNER-ANALYZERS-JAVA-21-011 | TODO | Java Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-JAVA-21-008 | Package analyzer as restart-time plug-in (manifest/DI), update Offline Kit docs, add CLI/worker hooks for Java inspection commands. | Plugin manifest deployed to `plugins/scanner/analyzers/lang/`; Worker loads new analyzer; Offline Kit + CLI instructions updated; smoke test verifies packaging. | - -> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-008 blocked pending upstream completion of tasks 003–005 (module/classpath resolver, SPI scanner, reflection/config extraction). Observation writer needs their outputs for components/edges/warnings per exit criteria. +# Java Analyzer Task Board +> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. + +## Java Static Core (Sprint 39) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-JAVA-21-001 | DONE (2025-10-27) | Java Analyzer Guild | SCANNER-CORE-09-501 | Build input normalizer and virtual file system for JAR/WAR/EAR/fat-jar/JMOD/jimage/container roots. Detect packaging type, layered dirs (BOOT-INF/WEB-INF), multi-release overlays, and jlink runtime metadata. | Normalizer walks fixtures without extraction, classifies packaging, selects MR overlays deterministically, records java version + vendor from runtime images. | +| SCANNER-ANALYZERS-JAVA-21-002 | DONE (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-001 | Implement module/classpath builder: JPMS graph parser (`module-info.class`), classpath order rules (fat jar, war, ear), duplicate & split-package detection, package fingerprinting. | Classpath order reproduced for fixtures; module graph serialized; duplicate provider + split-package warnings emitted deterministically. | +| SCANNER-ANALYZERS-JAVA-21-003 | DONE (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | SPI scanner covering META-INF/services, provider selection, and warning generation. Include configurable SPI corpus (JDK, Spring, logging, Jackson, MicroProfile). | SPI tables produced with selected provider + candidates; fixtures show first-wins behaviour; warnings recorded for duplicate providers. | +| SCANNER-ANALYZERS-JAVA-21-004 | DOING (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | Reflection/dynamic loader heuristics: scan constant pools, bytecode sites (Class.forName, loadClass, TCCL usage), resource-based plugin hints, manifest loader hints. Emit edges with reason codes + confidence. | Reflection edges generated for fixtures (classpath, boot, war); includes call site metadata and confidence scoring; TCCL warning emitted where detected. | +| SCANNER-ANALYZERS-JAVA-21-005 | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | Framework config extraction: Spring Boot imports, spring.factories, application properties/yaml, Jakarta web.xml & fragments, JAX-RS/JPA/CDI/JAXB configs, logging files, Graal native-image configs. | Framework fixtures parsed; relevant class FQCNs surfaced with reasons (`config-spring`, `config-jaxrs`, etc.); non-class config ignored; determinism guard passes. | +| SCANNER-ANALYZERS-JAVA-21-006 | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-002 | JNI/native hint scanner: detect native methods, System.load/Library literals, bundled native libs, Graal JNI configs; emit `jni-load` edges for native analyzer correlation. | JNI fixtures produce hint edges pointing at embedded libs; metadata includes candidate paths and reason `jni`. | +| SCANNER-ANALYZERS-JAVA-21-007 | TODO | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-003 | Signature and manifest metadata collector: verify JAR signature structure, capture signers, manifest loader attributes (Main-Class, Agent-Class, Start-Class, Class-Path). | Signed jar fixture reports signer info and structural validation result; manifest metadata attached to entrypoints. | + +> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-001 implemented `JavaWorkspaceNormalizer` + fixtures covering packaging, layered directories, multi-release overlays, and runtime image metadata. +> +> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-002 delivered `JavaClassPathBuilder` producing ordered segments (jar/war/boot fat, embedded libs), JPMS descriptors via `JavaModuleInfoParser`, and duplicate/split-package detection with package fingerprints + unit tests. +> +> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-004 in progress: added bytecode-driven `JavaReflectionAnalyzer` covering `Class.forName`, `ClassLoader.loadClass`, `ServiceLoader.load`, resource lookups, and TCCL warnings with unit fixtures (boot jar, embedded jar, synthetic classes). +> +> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-003 added SPI catalog + `JavaServiceProviderScanner`, capturing META-INF/services across layered jars, selecting first-wins providers, and emitting duplicate warnings with coverage tests (fat-jar, duplicates, simple jars). + +## Java Observation & Runtime (Sprint 40) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-JAVA-21-008 | BLOCKED (2025-10-27) | Java Analyzer Guild | SCANNER-ANALYZERS-JAVA-21-003, SCANNER-ANALYZERS-JAVA-21-004, SCANNER-ANALYZERS-JAVA-21-005 | Implement resolver + AOC writer: produce entrypoints (env profiles, warnings), components (jar_id + semantic ids), edges (jpms, cp, spi, reflect, jni) with reason codes/confidence. | Observation JSON for fixtures deterministic; includes entrypoints, edges, warnings; passes AOC compliance lint. | +| SCANNER-ANALYZERS-JAVA-21-009 | TODO | Java Analyzer Guild, QA Guild | SCANNER-ANALYZERS-JAVA-21-008 | Author comprehensive fixtures (modular app, boot fat jar, war, ear, MR-jar, jlink image, JNI, reflection heavy, signed jar, microprofile) with golden outputs and perf benchmarks. | Fixture suite committed under `fixtures/lang/java/ep`; determinism + benchmark gates (<300ms fat jar) configured in CI. | +| SCANNER-ANALYZERS-JAVA-21-010 | TODO | Java Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-JAVA-21-008 | Optional runtime ingestion: Java agent + JFR reader capturing class load, ServiceLoader, and System.load events with path scrubbing. Emit append-only runtime edges `runtime-class`/`runtime-spi`/`runtime-load`. | Runtime harness produces scrubbed events for sample app; edges merge with static output; docs describe sandbox & privacy. | +| SCANNER-ANALYZERS-JAVA-21-011 | TODO | Java Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-JAVA-21-008 | Package analyzer as restart-time plug-in (manifest/DI), update Offline Kit docs, add CLI/worker hooks for Java inspection commands. | Plugin manifest deployed to `plugins/scanner/analyzers/lang/`; Worker loads new analyzer; Offline Kit + CLI instructions updated; smoke test verifies packaging. | + +> 2025-10-27 — SCANNER-ANALYZERS-JAVA-21-008 blocked pending upstream completion of tasks 003–005 (module/classpath resolver, SPI scanner, reflection/config extraction). Observation writer needs their outputs for components/edges/warnings per exit criteria. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/manifest.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md index d5d8406a..67d37fab 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/AGENTS.md @@ -28,7 +28,7 @@ Deliver the Node.js / npm / Yarn / PNPM analyzer plug-in that resolves workspace ## Testing & Artifacts - Determinism golden fixtures under `Fixtures/lang/node/`. -- Benchmark CSV + flamegraph stored in `src/StellaOps.Bench/Scanner.Analyzers/`. +- Benchmark CSV + flamegraph stored in `src/Bench/StellaOps.Bench/Scanner.Analyzers/`. - Plug-in manifest + cosign workflow added to Offline Kit instructions once analyzer is production-ready. ## Telemetry & Policy Hints diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/GlobalUsings.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/GlobalUsings.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/GlobalUsings.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeAnalyzerMetrics.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeAnalyzerMetrics.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeAnalyzerMetrics.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeAnalyzerMetrics.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLifecycleScript.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLifecycleScript.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLifecycleScript.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLifecycleScript.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockData.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockData.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockData.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockData.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockEntry.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockEntry.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockEntry.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeLockEntry.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackage.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackage.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackage.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackage.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodePackageCollector.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeWorkspaceIndex.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeWorkspaceIndex.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeWorkspaceIndex.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/Internal/NodeWorkspaceIndex.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/NodeAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/NodeAnalyzerPlugin.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/NodeAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/NodeAnalyzerPlugin.cs index 1eddfc1f..ab64d924 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node/NodeAnalyzerPlugin.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/NodeAnalyzerPlugin.cs @@ -1,18 +1,18 @@ -using System; -using StellaOps.Scanner.Analyzers.Lang; -using StellaOps.Scanner.Analyzers.Lang.Plugin; - -namespace StellaOps.Scanner.Analyzers.Lang.Node; - -public sealed class NodeAnalyzerPlugin : ILanguageAnalyzerPlugin -{ - public string Name => "StellaOps.Scanner.Analyzers.Lang.Node"; - - public bool IsAvailable(IServiceProvider services) => services is not null; - - public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) - { - ArgumentNullException.ThrowIfNull(services); - return new NodeLanguageAnalyzer(); - } -} +using System; +using StellaOps.Scanner.Analyzers.Lang; +using StellaOps.Scanner.Analyzers.Lang.Plugin; + +namespace StellaOps.Scanner.Analyzers.Lang.Node; + +public sealed class NodeAnalyzerPlugin : ILanguageAnalyzerPlugin +{ + public string Name => "StellaOps.Scanner.Analyzers.Lang.Node"; + + public bool IsAvailable(IServiceProvider services) => services is not null; + + public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return new NodeLanguageAnalyzer(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/NodeLanguageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/NodeLanguageAnalyzer.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/NodeLanguageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/NodeLanguageAnalyzer.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md index 5f1f763d..43f48e8c 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/TASKS.md @@ -1,31 +1,31 @@ -# Node Analyzer Task Flow - -| Seq | ID | Status | Depends on | Description | Exit Criteria | -|-----|----|--------|------------|-------------|---------------| -| 1 | SCANNER-ANALYZERS-LANG-10-302A | DONE (2025-10-19) | SCANNER-ANALYZERS-LANG-10-307 | Build deterministic module graph walker covering npm, Yarn, and PNPM; capture package.json provenance and integrity metadata. | Walker indexes >100 k modules in <1.5 s (hot cache); golden fixtures verify deterministic ordering and path normalization. | -| 2 | SCANNER-ANALYZERS-LANG-10-302B | DONE (2025-10-19) | SCANNER-ANALYZERS-LANG-10-302A | Resolve workspaces/symlinks and attribute components to originating package with usage hints; guard against directory traversal. | Workspace attribution accurate on multi-workspace fixture; symlink resolver proves canonical path; security tests ensure no traversal. | -| 3 | SCANNER-ANALYZERS-LANG-10-302C | DONE (2025-10-19) | SCANNER-ANALYZERS-LANG-10-302B | Surface script metadata (postinstall/preinstall) and policy hints; emit telemetry counters and evidence records. | Analyzer output includes script metadata + evidence; metrics `scanner_analyzer_node_scripts_total` recorded; policy hints documented. | -| 4 | SCANNER-ANALYZERS-LANG-10-307N | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-302C | Integrate shared helpers for license/licence evidence, canonical JSON serialization, and usage flag propagation. | Reuse shared helpers without duplication; unit tests confirm stable metadata merge; no analyzer-specific serializer drift. | -| 5 | SCANNER-ANALYZERS-LANG-10-308N | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-307N | Author determinism harness + fixtures for Node analyzer; add benchmark suite. | Fixtures committed under `Fixtures/lang/node/`; determinism CI job compares JSON snapshots; benchmark CSV published. | -| 6 | SCANNER-ANALYZERS-LANG-10-309N | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-308N | Package Node analyzer as restart-time plug-in (manifest, DI registration, Offline Kit notes). | Manifest copied to `plugins/scanner/analyzers/lang/`; Worker loads analyzer after restart; Offline Kit docs updated. | - -## Node Entry-Point Analyzer (Sprint 41) -> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-NODE-22-001 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309N | Build input normalizer + VFS for Node projects: dirs, tgz, container layers, pnpm store, Yarn PnP zips; detect Node version targets (`.nvmrc`, `.node-version`, Dockerfile) and workspace roots deterministically. | Normalizer handles fixtures (npm, pnpm, Yarn classic/PnP, container) without extraction; records node_version, workspace list, and symlink mode with golden outputs. | -| SCANNER-ANALYZERS-NODE-22-002 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-001 | Implement entrypoint discovery (bin/main/module/exports/imports, workers, electron, shebang scripts) and condition set builder per entrypoint. | Entrypoint inventory generated for fixtures (library, CLI, electron, worker); each entrypoint includes kind, start file, conditions; determinism harness updated. | -| SCANNER-ANALYZERS-NODE-22-003 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-001 | Parse JS/TS sources for static `import`, `require`, `import()` and string concat cases; flag dynamic patterns with confidence levels; support source map de-bundling. | Static edges + dynamic-specifier warnings emitted for fixtures (ESM, CJS, bundles); source map fixture rewrites concatenated modules with `confidence` metadata. | -| SCANNER-ANALYZERS-NODE-22-004 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-002 | Implement Node resolver engine for CJS + ESM (core modules, exports/imports maps, conditions, extension priorities, self-references) parameterised by node_version. | Resolver reproduces Node 18/20 semantics across fixture matrix; includes explain trace per edge; unit tests cover exports/conditions/extension ordering. | -| SCANNER-ANALYZERS-NODE-22-005 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-004 | Add package manager adapters: Yarn PnP (.pnp.data/.pnp.cjs), pnpm virtual store, npm/Yarn classic hoists; operate entirely in virtual FS. | PnP fixture resolves via `.pnp.data.json`/cache zips; pnpm fixture follows `.pnpm` symlinks; classic hoist fixture maintains first-wins ordering; warnings emitted for unreadable PnP. | -| SCANNER-ANALYZERS-NODE-22-006 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-004 | Detect bundles + source maps, reconstruct module specifiers, and correlate to original paths; support dual CJS/ESM graphs with conditions. | Bundle fixture using source maps produces `bundle-map` edges with medium confidence and original source paths; dual package fixture yields separate import/require graphs. | -| SCANNER-ANALYZERS-NODE-22-007 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-002 | Scan for native addons (.node), WASM modules, and core capability signals (child_process, vm, worker_threads); emit hint edges and native metadata. | Fixtures with native addon/WASM produce `native-addon`/`wasm` edges plus capability hints; metadata captures ABI, N-API, OS/arch where available. | - -## Node Observation & Runtime (Sprint 42) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-NODE-22-008 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-004 | Produce AOC-compliant observations: entrypoints, components (pkg/native/wasm), edges (esm-import, cjs-require, exports, json, native-addon, wasm, worker) with reason codes/confidence and resolver traces. | Observation JSON for fixtures deterministic; edges include reason codes + confidence; resolves attach trace data; passes AOC lint. | -| SCANNER-ANALYZERS-NODE-22-009 | TODO | Node Analyzer Guild, QA Guild | SCANNER-ANALYZERS-NODE-22-008 | Author fixture suite + performance benchmarks (npm, pnpm, PnP, bundle, electron, worker) with golden outputs and latency budgets. | Fixtures stored under `fixtures/lang/node/ep`; determinism + perf (<350ms npm, <900ms PnP) enforced via CI benchmarks. | -| SCANNER-ANALYZERS-NODE-22-010 | TODO | Node Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-NODE-22-008 | Implement optional runtime evidence hooks (ESM loader, CJS require hook) with path scrubbing and loader ID hashing; emit runtime-* edges. | Sandbox harness records runtime resolve events for sample app; paths hashed; runtime edges merge with static graph without altering first-wins selection. | -| SCANNER-ANALYZERS-NODE-22-011 | TODO | Node Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-NODE-22-008 | Package updated analyzer as restart-time plug-in, expose Scanner CLI (`stella node *`) commands, refresh Offline Kit documentation. | Plugins folder updated; worker loads analyzer; CLI commands documented and smoke-tested; Offline Kit instructions include Node analyzer usage. | -| SCANNER-ANALYZERS-NODE-22-012 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-001 | Integrate container filesystem adapter (OCI layers, Dockerfile hints) and record NODE_OPTIONS/env warnings. | Container fixture parsed: Node base image + NODE_OPTIONS captured; entrypoints resolved relative to image root; warnings emitted for loader flags. | +# Node Analyzer Task Flow + +| Seq | ID | Status | Depends on | Description | Exit Criteria | +|-----|----|--------|------------|-------------|---------------| +| 1 | SCANNER-ANALYZERS-LANG-10-302A | DONE (2025-10-19) | SCANNER-ANALYZERS-LANG-10-307 | Build deterministic module graph walker covering npm, Yarn, and PNPM; capture package.json provenance and integrity metadata. | Walker indexes >100 k modules in <1.5 s (hot cache); golden fixtures verify deterministic ordering and path normalization. | +| 2 | SCANNER-ANALYZERS-LANG-10-302B | DONE (2025-10-19) | SCANNER-ANALYZERS-LANG-10-302A | Resolve workspaces/symlinks and attribute components to originating package with usage hints; guard against directory traversal. | Workspace attribution accurate on multi-workspace fixture; symlink resolver proves canonical path; security tests ensure no traversal. | +| 3 | SCANNER-ANALYZERS-LANG-10-302C | DONE (2025-10-19) | SCANNER-ANALYZERS-LANG-10-302B | Surface script metadata (postinstall/preinstall) and policy hints; emit telemetry counters and evidence records. | Analyzer output includes script metadata + evidence; metrics `scanner_analyzer_node_scripts_total` recorded; policy hints documented. | +| 4 | SCANNER-ANALYZERS-LANG-10-307N | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-302C | Integrate shared helpers for license/licence evidence, canonical JSON serialization, and usage flag propagation. | Reuse shared helpers without duplication; unit tests confirm stable metadata merge; no analyzer-specific serializer drift. | +| 5 | SCANNER-ANALYZERS-LANG-10-308N | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-307N | Author determinism harness + fixtures for Node analyzer; add benchmark suite. | Fixtures committed under `Fixtures/lang/node/`; determinism CI job compares JSON snapshots; benchmark CSV published. | +| 6 | SCANNER-ANALYZERS-LANG-10-309N | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-308N | Package Node analyzer as restart-time plug-in (manifest, DI registration, Offline Kit notes). | Manifest copied to `plugins/scanner/analyzers/lang/`; Worker loads analyzer after restart; Offline Kit docs updated. | + +## Node Entry-Point Analyzer (Sprint 41) +> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-NODE-22-001 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309N | Build input normalizer + VFS for Node projects: dirs, tgz, container layers, pnpm store, Yarn PnP zips; detect Node version targets (`.nvmrc`, `.node-version`, Dockerfile) and workspace roots deterministically. | Normalizer handles fixtures (npm, pnpm, Yarn classic/PnP, container) without extraction; records node_version, workspace list, and symlink mode with golden outputs. | +| SCANNER-ANALYZERS-NODE-22-002 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-001 | Implement entrypoint discovery (bin/main/module/exports/imports, workers, electron, shebang scripts) and condition set builder per entrypoint. | Entrypoint inventory generated for fixtures (library, CLI, electron, worker); each entrypoint includes kind, start file, conditions; determinism harness updated. | +| SCANNER-ANALYZERS-NODE-22-003 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-001 | Parse JS/TS sources for static `import`, `require`, `import()` and string concat cases; flag dynamic patterns with confidence levels; support source map de-bundling. | Static edges + dynamic-specifier warnings emitted for fixtures (ESM, CJS, bundles); source map fixture rewrites concatenated modules with `confidence` metadata. | +| SCANNER-ANALYZERS-NODE-22-004 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-002 | Implement Node resolver engine for CJS + ESM (core modules, exports/imports maps, conditions, extension priorities, self-references) parameterised by node_version. | Resolver reproduces Node 18/20 semantics across fixture matrix; includes explain trace per edge; unit tests cover exports/conditions/extension ordering. | +| SCANNER-ANALYZERS-NODE-22-005 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-004 | Add package manager adapters: Yarn PnP (.pnp.data/.pnp.cjs), pnpm virtual store, npm/Yarn classic hoists; operate entirely in virtual FS. | PnP fixture resolves via `.pnp.data.json`/cache zips; pnpm fixture follows `.pnpm` symlinks; classic hoist fixture maintains first-wins ordering; warnings emitted for unreadable PnP. | +| SCANNER-ANALYZERS-NODE-22-006 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-004 | Detect bundles + source maps, reconstruct module specifiers, and correlate to original paths; support dual CJS/ESM graphs with conditions. | Bundle fixture using source maps produces `bundle-map` edges with medium confidence and original source paths; dual package fixture yields separate import/require graphs. | +| SCANNER-ANALYZERS-NODE-22-007 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-002 | Scan for native addons (.node), WASM modules, and core capability signals (child_process, vm, worker_threads); emit hint edges and native metadata. | Fixtures with native addon/WASM produce `native-addon`/`wasm` edges plus capability hints; metadata captures ABI, N-API, OS/arch where available. | + +## Node Observation & Runtime (Sprint 42) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-NODE-22-008 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-004 | Produce AOC-compliant observations: entrypoints, components (pkg/native/wasm), edges (esm-import, cjs-require, exports, json, native-addon, wasm, worker) with reason codes/confidence and resolver traces. | Observation JSON for fixtures deterministic; edges include reason codes + confidence; resolves attach trace data; passes AOC lint. | +| SCANNER-ANALYZERS-NODE-22-009 | TODO | Node Analyzer Guild, QA Guild | SCANNER-ANALYZERS-NODE-22-008 | Author fixture suite + performance benchmarks (npm, pnpm, PnP, bundle, electron, worker) with golden outputs and latency budgets. | Fixtures stored under `fixtures/lang/node/ep`; determinism + perf (<350ms npm, <900ms PnP) enforced via CI benchmarks. | +| SCANNER-ANALYZERS-NODE-22-010 | TODO | Node Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-NODE-22-008 | Implement optional runtime evidence hooks (ESM loader, CJS require hook) with path scrubbing and loader ID hashing; emit runtime-* edges. | Sandbox harness records runtime resolve events for sample app; paths hashed; runtime edges merge with static graph without altering first-wins selection. | +| SCANNER-ANALYZERS-NODE-22-011 | TODO | Node Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-NODE-22-008 | Package updated analyzer as restart-time plug-in, expose Scanner CLI (`stella node *`) commands, refresh Offline Kit documentation. | Plugins folder updated; worker loads analyzer; CLI commands documented and smoke-tested; Offline Kit instructions include Node analyzer usage. | +| SCANNER-ANALYZERS-NODE-22-012 | TODO | Node Analyzer Guild | SCANNER-ANALYZERS-NODE-22-001 | Integrate container filesystem adapter (OCI layers, Dockerfile hints) and record NODE_OPTIONS/env warnings. | Container fixture parsed: Node base image + NODE_OPTIONS captured; entrypoints resolved relative to image root; warnings emitted for loader flags. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json index 29c9a0d1..a5759e60 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/manifest.json @@ -1,22 +1,22 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.node", - "displayName": "StellaOps Node.js Analyzer", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.Node.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.Node.NodeAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "node", - "npm" - ], - "metadata": { - "org.stellaops.analyzer.language": "node", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.node", + "displayName": "StellaOps Node.js Analyzer", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Node.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Node.NodeAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "node", + "npm" + ], + "metadata": { + "org.stellaops.analyzer.language": "node", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/AGENTS.md diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/GlobalUsings.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python/GlobalUsings.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/GlobalUsings.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonDistributionLoader.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonDistributionLoader.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonDistributionLoader.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonDistributionLoader.cs index 8d5c1b51..6483dd60 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonDistributionLoader.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/Internal/PythonDistributionLoader.cs @@ -1,47 +1,47 @@ -using System.Buffers; -using System.Globalization; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; - -namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal; - -internal static class PythonDistributionLoader -{ - - public static async Task<PythonDistribution?> LoadAsync(LanguageAnalyzerContext context, string distInfoPath, CancellationToken cancellationToken) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (string.IsNullOrWhiteSpace(distInfoPath) || !Directory.Exists(distInfoPath)) - { - return null; - } - - var metadataPath = Path.Combine(distInfoPath, "METADATA"); - var wheelPath = Path.Combine(distInfoPath, "WHEEL"); - var entryPointsPath = Path.Combine(distInfoPath, "entry_points.txt"); - var recordPath = Path.Combine(distInfoPath, "RECORD"); - var installerPath = Path.Combine(distInfoPath, "INSTALLER"); - var directUrlPath = Path.Combine(distInfoPath, "direct_url.json"); - - var metadataDocument = await PythonMetadataDocument.LoadAsync(metadataPath, cancellationToken).ConfigureAwait(false); - var name = metadataDocument.GetFirst("Name") ?? ExtractNameFromDirectory(distInfoPath); - var version = metadataDocument.GetFirst("Version") ?? ExtractVersionFromDirectory(distInfoPath); - - if (string.IsNullOrWhiteSpace(name) || string.IsNullOrWhiteSpace(version)) - { - return null; - } - - var trimmedName = name.Trim(); - var trimmedVersion = version.Trim(); - var normalizedName = NormalizePackageName(trimmedName); - var purl = $"pkg:pypi/{normalizedName}@{trimmedVersion}"; - - var metadataEntries = new List<KeyValuePair<string, string?>>(); - var evidenceEntries = new List<LanguageComponentEvidence>(); - +using System.Buffers; +using System.Globalization; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Internal; + +internal static class PythonDistributionLoader +{ + + public static async Task<PythonDistribution?> LoadAsync(LanguageAnalyzerContext context, string distInfoPath, CancellationToken cancellationToken) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (string.IsNullOrWhiteSpace(distInfoPath) || !Directory.Exists(distInfoPath)) + { + return null; + } + + var metadataPath = Path.Combine(distInfoPath, "METADATA"); + var wheelPath = Path.Combine(distInfoPath, "WHEEL"); + var entryPointsPath = Path.Combine(distInfoPath, "entry_points.txt"); + var recordPath = Path.Combine(distInfoPath, "RECORD"); + var installerPath = Path.Combine(distInfoPath, "INSTALLER"); + var directUrlPath = Path.Combine(distInfoPath, "direct_url.json"); + + var metadataDocument = await PythonMetadataDocument.LoadAsync(metadataPath, cancellationToken).ConfigureAwait(false); + var name = metadataDocument.GetFirst("Name") ?? ExtractNameFromDirectory(distInfoPath); + var version = metadataDocument.GetFirst("Version") ?? ExtractVersionFromDirectory(distInfoPath); + + if (string.IsNullOrWhiteSpace(name) || string.IsNullOrWhiteSpace(version)) + { + return null; + } + + var trimmedName = name.Trim(); + var trimmedVersion = version.Trim(); + var normalizedName = NormalizePackageName(trimmedName); + var purl = $"pkg:pypi/{normalizedName}@{trimmedVersion}"; + + var metadataEntries = new List<KeyValuePair<string, string?>>(); + var evidenceEntries = new List<LanguageComponentEvidence>(); + AddFileEvidence(context, metadataPath, "METADATA", evidenceEntries); AddFileEvidence(context, wheelPath, "WHEEL", evidenceEntries); AddFileEvidence(context, entryPointsPath, "entry_points.txt", evidenceEntries); @@ -109,7 +109,7 @@ internal static class PythonDistributionLoader } } } - + var classifiers = metadataDocument.GetAll("Classifier"); if (classifiers.Count > 0) { @@ -137,62 +137,62 @@ internal static class PythonDistributionLoader } } } - - var requiresDist = metadataDocument.GetAll("Requires-Dist"); - if (requiresDist.Count > 0) - { - AppendMetadata(metadataEntries, "requiresDist", string.Join(';', requiresDist)); - } - - var entryPoints = await PythonEntryPointSet.LoadAsync(entryPointsPath, cancellationToken).ConfigureAwait(false); - foreach (var group in entryPoints.Groups.OrderBy(static g => g.Key, StringComparer.OrdinalIgnoreCase)) - { - AppendMetadata(metadataEntries, $"entryPoints.{group.Key}", string.Join(';', group.Value.Select(static ep => $"{ep.Name}={ep.Target}"))); - } - - var wheelInfo = await PythonWheelInfo.LoadAsync(wheelPath, cancellationToken).ConfigureAwait(false); - if (wheelInfo is not null) - { - foreach (var pair in wheelInfo.ToMetadata()) - { - AppendMetadata(metadataEntries, pair.Key, pair.Value); - } - } - - var installer = await ReadSingleLineAsync(installerPath, cancellationToken).ConfigureAwait(false); - if (!string.IsNullOrWhiteSpace(installer)) - { - AppendMetadata(metadataEntries, "installer", installer); - } - - var directUrl = await PythonDirectUrlInfo.LoadAsync(directUrlPath, cancellationToken).ConfigureAwait(false); - if (directUrl is not null) - { - foreach (var pair in directUrl.ToMetadata()) - { - AppendMetadata(metadataEntries, pair.Key, pair.Value); - } - - if (!string.IsNullOrWhiteSpace(directUrl.Url)) - { - evidenceEntries.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Metadata, - "direct_url.json", - PythonPathHelper.NormalizeRelative(context, directUrlPath), - directUrl.Url, - Sha256: null)); - } - } - - var recordEntries = await PythonRecordParser.LoadAsync(recordPath, cancellationToken).ConfigureAwait(false); - var verification = await PythonRecordVerifier.VerifyAsync(context, distInfoPath, recordEntries, cancellationToken).ConfigureAwait(false); - - metadataEntries.Add(new KeyValuePair<string, string?>("record.totalEntries", verification.TotalEntries.ToString(CultureInfo.InvariantCulture))); - metadataEntries.Add(new KeyValuePair<string, string?>("record.hashedEntries", verification.HashedEntries.ToString(CultureInfo.InvariantCulture))); - metadataEntries.Add(new KeyValuePair<string, string?>("record.missingFiles", verification.MissingFiles.ToString(CultureInfo.InvariantCulture))); - metadataEntries.Add(new KeyValuePair<string, string?>("record.hashMismatches", verification.HashMismatches.ToString(CultureInfo.InvariantCulture))); - metadataEntries.Add(new KeyValuePair<string, string?>("record.ioErrors", verification.IoErrors.ToString(CultureInfo.InvariantCulture))); - + + var requiresDist = metadataDocument.GetAll("Requires-Dist"); + if (requiresDist.Count > 0) + { + AppendMetadata(metadataEntries, "requiresDist", string.Join(';', requiresDist)); + } + + var entryPoints = await PythonEntryPointSet.LoadAsync(entryPointsPath, cancellationToken).ConfigureAwait(false); + foreach (var group in entryPoints.Groups.OrderBy(static g => g.Key, StringComparer.OrdinalIgnoreCase)) + { + AppendMetadata(metadataEntries, $"entryPoints.{group.Key}", string.Join(';', group.Value.Select(static ep => $"{ep.Name}={ep.Target}"))); + } + + var wheelInfo = await PythonWheelInfo.LoadAsync(wheelPath, cancellationToken).ConfigureAwait(false); + if (wheelInfo is not null) + { + foreach (var pair in wheelInfo.ToMetadata()) + { + AppendMetadata(metadataEntries, pair.Key, pair.Value); + } + } + + var installer = await ReadSingleLineAsync(installerPath, cancellationToken).ConfigureAwait(false); + if (!string.IsNullOrWhiteSpace(installer)) + { + AppendMetadata(metadataEntries, "installer", installer); + } + + var directUrl = await PythonDirectUrlInfo.LoadAsync(directUrlPath, cancellationToken).ConfigureAwait(false); + if (directUrl is not null) + { + foreach (var pair in directUrl.ToMetadata()) + { + AppendMetadata(metadataEntries, pair.Key, pair.Value); + } + + if (!string.IsNullOrWhiteSpace(directUrl.Url)) + { + evidenceEntries.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Metadata, + "direct_url.json", + PythonPathHelper.NormalizeRelative(context, directUrlPath), + directUrl.Url, + Sha256: null)); + } + } + + var recordEntries = await PythonRecordParser.LoadAsync(recordPath, cancellationToken).ConfigureAwait(false); + var verification = await PythonRecordVerifier.VerifyAsync(context, distInfoPath, recordEntries, cancellationToken).ConfigureAwait(false); + + metadataEntries.Add(new KeyValuePair<string, string?>("record.totalEntries", verification.TotalEntries.ToString(CultureInfo.InvariantCulture))); + metadataEntries.Add(new KeyValuePair<string, string?>("record.hashedEntries", verification.HashedEntries.ToString(CultureInfo.InvariantCulture))); + metadataEntries.Add(new KeyValuePair<string, string?>("record.missingFiles", verification.MissingFiles.ToString(CultureInfo.InvariantCulture))); + metadataEntries.Add(new KeyValuePair<string, string?>("record.hashMismatches", verification.HashMismatches.ToString(CultureInfo.InvariantCulture))); + metadataEntries.Add(new KeyValuePair<string, string?>("record.ioErrors", verification.IoErrors.ToString(CultureInfo.InvariantCulture))); + if (verification.UnsupportedAlgorithms.Count > 0) { AppendMetadata(metadataEntries, "record.unsupportedAlgorithms", string.Join(';', verification.UnsupportedAlgorithms)); @@ -206,139 +206,139 @@ internal static class PythonDistributionLoader trimmedName, trimmedVersion, purl, - metadataEntries, - evidenceEntries, - usedByEntrypoint); - } - - private static bool EvaluateEntryPointUsage(LanguageAnalyzerContext context, string distInfoPath, PythonEntryPointSet entryPoints) - { - if (entryPoints.Groups.Count == 0) - { - return false; - } - - var parentDirectory = Directory.GetParent(distInfoPath)?.FullName; - if (string.IsNullOrWhiteSpace(parentDirectory)) - { - return false; - } - - foreach (var group in entryPoints.Groups.Values) - { - foreach (var entryPoint in group) - { - var candidatePaths = entryPoint.GetCandidateRelativeScriptPaths(); - foreach (var relative in candidatePaths) - { - var combined = Path.GetFullPath(Path.Combine(parentDirectory, relative)); - if (context.UsageHints.IsPathUsed(combined)) - { - return true; - } - } - } - } - - return false; - } - - private static void AddFileEvidence(LanguageAnalyzerContext context, string path, string source, ICollection<LanguageComponentEvidence> evidence) - { - if (!File.Exists(path)) - { - return; - } - - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.File, - source, - PythonPathHelper.NormalizeRelative(context, path), - Value: null, - Sha256: null)); - } - - private static void AppendMetadata(ICollection<KeyValuePair<string, string?>> metadata, string key, string? value) - { - if (string.IsNullOrWhiteSpace(key)) - { - return; - } - - if (string.IsNullOrWhiteSpace(value)) - { - return; - } - - metadata.Add(new KeyValuePair<string, string?>(key, value.Trim())); - } - - private static string? ExtractNameFromDirectory(string distInfoPath) - { - var directoryName = Path.GetFileName(distInfoPath); - if (string.IsNullOrWhiteSpace(directoryName)) - { - return null; - } - - var suffixIndex = directoryName.IndexOf(".dist-info", StringComparison.OrdinalIgnoreCase); - if (suffixIndex <= 0) - { - return null; - } - - var trimmed = directoryName[..suffixIndex]; - var dashIndex = trimmed.LastIndexOf('-'); - if (dashIndex <= 0) - { - return trimmed; - } - - return trimmed[..dashIndex]; - } - - private static string? ExtractVersionFromDirectory(string distInfoPath) - { - var directoryName = Path.GetFileName(distInfoPath); - if (string.IsNullOrWhiteSpace(directoryName)) - { - return null; - } - - var suffixIndex = directoryName.IndexOf(".dist-info", StringComparison.OrdinalIgnoreCase); - if (suffixIndex <= 0) - { - return null; - } - - var trimmed = directoryName[..suffixIndex]; - var dashIndex = trimmed.LastIndexOf('-'); - if (dashIndex >= 0 && dashIndex < trimmed.Length - 1) - { - return trimmed[(dashIndex + 1)..]; - } - - return null; - } - + metadataEntries, + evidenceEntries, + usedByEntrypoint); + } + + private static bool EvaluateEntryPointUsage(LanguageAnalyzerContext context, string distInfoPath, PythonEntryPointSet entryPoints) + { + if (entryPoints.Groups.Count == 0) + { + return false; + } + + var parentDirectory = Directory.GetParent(distInfoPath)?.FullName; + if (string.IsNullOrWhiteSpace(parentDirectory)) + { + return false; + } + + foreach (var group in entryPoints.Groups.Values) + { + foreach (var entryPoint in group) + { + var candidatePaths = entryPoint.GetCandidateRelativeScriptPaths(); + foreach (var relative in candidatePaths) + { + var combined = Path.GetFullPath(Path.Combine(parentDirectory, relative)); + if (context.UsageHints.IsPathUsed(combined)) + { + return true; + } + } + } + } + + return false; + } + + private static void AddFileEvidence(LanguageAnalyzerContext context, string path, string source, ICollection<LanguageComponentEvidence> evidence) + { + if (!File.Exists(path)) + { + return; + } + + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.File, + source, + PythonPathHelper.NormalizeRelative(context, path), + Value: null, + Sha256: null)); + } + + private static void AppendMetadata(ICollection<KeyValuePair<string, string?>> metadata, string key, string? value) + { + if (string.IsNullOrWhiteSpace(key)) + { + return; + } + + if (string.IsNullOrWhiteSpace(value)) + { + return; + } + + metadata.Add(new KeyValuePair<string, string?>(key, value.Trim())); + } + + private static string? ExtractNameFromDirectory(string distInfoPath) + { + var directoryName = Path.GetFileName(distInfoPath); + if (string.IsNullOrWhiteSpace(directoryName)) + { + return null; + } + + var suffixIndex = directoryName.IndexOf(".dist-info", StringComparison.OrdinalIgnoreCase); + if (suffixIndex <= 0) + { + return null; + } + + var trimmed = directoryName[..suffixIndex]; + var dashIndex = trimmed.LastIndexOf('-'); + if (dashIndex <= 0) + { + return trimmed; + } + + return trimmed[..dashIndex]; + } + + private static string? ExtractVersionFromDirectory(string distInfoPath) + { + var directoryName = Path.GetFileName(distInfoPath); + if (string.IsNullOrWhiteSpace(directoryName)) + { + return null; + } + + var suffixIndex = directoryName.IndexOf(".dist-info", StringComparison.OrdinalIgnoreCase); + if (suffixIndex <= 0) + { + return null; + } + + var trimmed = directoryName[..suffixIndex]; + var dashIndex = trimmed.LastIndexOf('-'); + if (dashIndex >= 0 && dashIndex < trimmed.Length - 1) + { + return trimmed[(dashIndex + 1)..]; + } + + return null; + } + private static string NormalizePackageName(string name) { if (string.IsNullOrWhiteSpace(name)) { return string.Empty; - } - - var builder = new StringBuilder(name.Length); - foreach (var ch in name.Trim().ToLowerInvariant()) - { - builder.Append(ch switch - { - '_' => '-', - '.' => '-', - ' ' => '-', - _ => ch - }); - } + } + + var builder = new StringBuilder(name.Length); + foreach (var ch in name.Trim().ToLowerInvariant()) + { + builder.Append(ch switch + { + '_' => '-', + '.' => '-', + ' ' => '-', + _ => ch + }); + } return builder.ToString(); } @@ -360,509 +360,509 @@ internal static class PythonDistributionLoader return null; } } - - private static async Task<string?> ReadSingleLineAsync(string path, CancellationToken cancellationToken) - { - if (!File.Exists(path)) - { - return null; - } - - await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); - var line = await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false); - return line?.Trim(); - } -} - -internal sealed record PythonDistribution( - string Name, - string Version, - string Purl, - IReadOnlyCollection<KeyValuePair<string, string?>> Metadata, - IReadOnlyCollection<LanguageComponentEvidence> Evidence, - bool UsedByEntrypoint) -{ - public IReadOnlyCollection<KeyValuePair<string, string?>> SortedMetadata => - Metadata - .OrderBy(static pair => pair.Key, StringComparer.Ordinal) - .ToArray(); - - public IReadOnlyCollection<LanguageComponentEvidence> SortedEvidence => - Evidence - .OrderBy(static item => item.Locator, StringComparer.Ordinal) - .ToArray(); -} - -internal sealed class PythonMetadataDocument -{ - private readonly Dictionary<string, List<string>> _values; - - private PythonMetadataDocument(Dictionary<string, List<string>> values) - { - _values = values; - } - - public static async Task<PythonMetadataDocument> LoadAsync(string path, CancellationToken cancellationToken) - { - if (!File.Exists(path)) - { - return new PythonMetadataDocument(new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase)); - } - - var values = new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase); - await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); - - string? currentKey = null; - var builder = new StringBuilder(); - - while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (line.Length == 0) - { - Commit(); - continue; - } - - if (line.StartsWith(' ') || line.StartsWith('\t')) - { - if (currentKey is not null) - { - if (builder.Length > 0) - { - builder.Append(' '); - } - - builder.Append(line.Trim()); - } - - continue; - } - - Commit(); - - var separator = line.IndexOf(':'); - if (separator <= 0) - { - continue; - } - - currentKey = line[..separator].Trim(); - builder.Clear(); - builder.Append(line[(separator + 1)..].Trim()); - } - - Commit(); - return new PythonMetadataDocument(values); - - void Commit() - { - if (string.IsNullOrWhiteSpace(currentKey)) - { - return; - } - - if (!values.TryGetValue(currentKey, out var list)) - { - list = new List<string>(); - values[currentKey] = list; - } - - var value = builder.ToString().Trim(); - if (value.Length > 0) - { - list.Add(value); - } - - currentKey = null; - builder.Clear(); - } - } - - public string? GetFirst(string key) - { - if (key is null) - { - return null; - } - - return _values.TryGetValue(key, out var list) && list.Count > 0 - ? list[0] - : null; - } - - public IReadOnlyList<string> GetAll(string key) - { - if (key is null) - { - return Array.Empty<string>(); - } - - return _values.TryGetValue(key, out var list) - ? list.AsReadOnly() - : Array.Empty<string>(); - } -} - -internal sealed class PythonWheelInfo -{ - private readonly Dictionary<string, string> _values; - - private PythonWheelInfo(Dictionary<string, string> values) - { - _values = values; - } - - public static async Task<PythonWheelInfo?> LoadAsync(string path, CancellationToken cancellationToken) - { - if (!File.Exists(path)) - { - return null; - } - - var values = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); - await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); - - while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (string.IsNullOrWhiteSpace(line)) - { - continue; - } - - var separator = line.IndexOf(':'); - if (separator <= 0) - { - continue; - } - - var key = line[..separator].Trim(); - var value = line[(separator + 1)..].Trim(); - if (key.Length == 0 || value.Length == 0) - { - continue; - } - - values[key] = value; - } - - return new PythonWheelInfo(values); - } - - public IReadOnlyCollection<KeyValuePair<string, string?>> ToMetadata() - { - var entries = new List<KeyValuePair<string, string?>>(4); - - if (_values.TryGetValue("Wheel-Version", out var wheelVersion)) - { - entries.Add(new KeyValuePair<string, string?>("wheel.version", wheelVersion)); - } - - if (_values.TryGetValue("Tag", out var tags)) - { - entries.Add(new KeyValuePair<string, string?>("wheel.tags", tags)); - } - - if (_values.TryGetValue("Root-Is-Purelib", out var purelib)) - { - entries.Add(new KeyValuePair<string, string?>("wheel.rootIsPurelib", purelib)); - } - - if (_values.TryGetValue("Generator", out var generator)) - { - entries.Add(new KeyValuePair<string, string?>("wheel.generator", generator)); - } - - return entries; - } -} - -internal sealed class PythonEntryPointSet -{ - public IReadOnlyDictionary<string, IReadOnlyList<PythonEntryPoint>> Groups { get; } - - private PythonEntryPointSet(Dictionary<string, IReadOnlyList<PythonEntryPoint>> groups) - { - Groups = groups; - } - - public static async Task<PythonEntryPointSet> LoadAsync(string path, CancellationToken cancellationToken) - { - if (!File.Exists(path)) - { - return new PythonEntryPointSet(new Dictionary<string, IReadOnlyList<PythonEntryPoint>>(StringComparer.OrdinalIgnoreCase)); - } - - var groups = new Dictionary<string, List<PythonEntryPoint>>(StringComparer.OrdinalIgnoreCase); - string? currentGroup = null; - - await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); - - while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line) - { - cancellationToken.ThrowIfCancellationRequested(); - - line = line.Trim(); - if (line.Length == 0 || line.StartsWith('#')) - { - continue; - } - - if (line.StartsWith('[') && line.EndsWith(']')) - { - currentGroup = line[1..^1].Trim(); - if (currentGroup.Length == 0) - { - currentGroup = null; - } - - continue; - } - - if (currentGroup is null) - { - continue; - } - - var separator = line.IndexOf('='); - if (separator <= 0) - { - continue; - } - - var name = line[..separator].Trim(); - var target = line[(separator + 1)..].Trim(); - if (name.Length == 0 || target.Length == 0) - { - continue; - } - - if (!groups.TryGetValue(currentGroup, out var list)) - { - list = new List<PythonEntryPoint>(); - groups[currentGroup] = list; - } - - list.Add(new PythonEntryPoint(name, target)); - } - - return new PythonEntryPointSet(groups.ToDictionary( - static pair => pair.Key, - static pair => (IReadOnlyList<PythonEntryPoint>)pair.Value.AsReadOnly(), - StringComparer.OrdinalIgnoreCase)); - } -} - -internal sealed record PythonEntryPoint(string Name, string Target) -{ - public IReadOnlyCollection<string> GetCandidateRelativeScriptPaths() - { - var list = new List<string>(3) - { - Path.Combine("bin", Name), - Path.Combine("Scripts", $"{Name}.exe"), - Path.Combine("Scripts", Name) - }; - - return list; - } -} - -internal sealed record PythonRecordEntry(string Path, string? HashAlgorithm, string? HashValue, long? Size); - -internal static class PythonRecordParser -{ - public static async Task<IReadOnlyList<PythonRecordEntry>> LoadAsync(string path, CancellationToken cancellationToken) - { - if (!File.Exists(path)) - { - return Array.Empty<PythonRecordEntry>(); - } - - var entries = new List<PythonRecordEntry>(); - - await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); - - while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (line.Length == 0) - { - continue; - } - - var fields = ParseCsvLine(line); - if (fields.Count < 1) - { - continue; - } - - var entryPath = fields[0]; - string? algorithm = null; - string? hashValue = null; - - if (fields.Count > 1 && !string.IsNullOrWhiteSpace(fields[1])) - { - var hashField = fields[1].Trim(); - var separator = hashField.IndexOf('='); - if (separator > 0 && separator < hashField.Length - 1) - { - algorithm = hashField[..separator]; - hashValue = hashField[(separator + 1)..]; - } - } - - long? size = null; - if (fields.Count > 2 && long.TryParse(fields[2], NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedSize)) - { - size = parsedSize; - } - - entries.Add(new PythonRecordEntry(entryPath, algorithm, hashValue, size)); - } - - return entries; - } - - private static List<string> ParseCsvLine(string line) - { - var values = new List<string>(); - var builder = new StringBuilder(); - var inQuotes = false; - - for (var i = 0; i < line.Length; i++) - { - var ch = line[i]; - - if (inQuotes) - { - if (ch == '"') - { - var next = i + 1 < line.Length ? line[i + 1] : '\0'; - if (next == '"') - { - builder.Append('"'); - i++; - } - else - { - inQuotes = false; - } - } - else - { - builder.Append(ch); - } - - continue; - } - - if (ch == ',') - { - values.Add(builder.ToString()); - builder.Clear(); - continue; - } - - if (ch == '"') - { - inQuotes = true; - continue; - } - - builder.Append(ch); - } - - values.Add(builder.ToString()); - return values; - } -} - -internal sealed class PythonRecordVerificationResult -{ - public PythonRecordVerificationResult( - int totalEntries, - int hashedEntries, - int missingFiles, - int hashMismatches, - int ioErrors, - bool usedByEntrypoint, - IReadOnlyCollection<string> unsupportedAlgorithms, - IReadOnlyCollection<LanguageComponentEvidence> evidence) - { - TotalEntries = totalEntries; - HashedEntries = hashedEntries; - MissingFiles = missingFiles; - HashMismatches = hashMismatches; - IoErrors = ioErrors; - UsedByEntrypoint = usedByEntrypoint; - UnsupportedAlgorithms = unsupportedAlgorithms; - Evidence = evidence; - } - - public int TotalEntries { get; } - public int HashedEntries { get; } - public int MissingFiles { get; } - public int HashMismatches { get; } - public int IoErrors { get; } - public bool UsedByEntrypoint { get; } - public IReadOnlyCollection<string> UnsupportedAlgorithms { get; } - public IReadOnlyCollection<LanguageComponentEvidence> Evidence { get; } -} - -internal static class PythonRecordVerifier -{ - private static readonly HashSet<string> SupportedAlgorithms = new(StringComparer.OrdinalIgnoreCase) - { - "sha256" - }; - - public static async Task<PythonRecordVerificationResult> VerifyAsync( - LanguageAnalyzerContext context, - string distInfoPath, - IReadOnlyList<PythonRecordEntry> entries, - CancellationToken cancellationToken) - { - if (entries.Count == 0) - { - return new PythonRecordVerificationResult(0, 0, 0, 0, 0, usedByEntrypoint: false, Array.Empty<string>(), Array.Empty<LanguageComponentEvidence>()); - } - - var evidence = new List<LanguageComponentEvidence>(); - var unsupported = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - - var root = context.RootPath; - if (!root.EndsWith(Path.DirectorySeparatorChar)) - { - root += Path.DirectorySeparatorChar; - } - - var parent = Directory.GetParent(distInfoPath)?.FullName ?? distInfoPath; - - var total = 0; - var hashed = 0; - var missing = 0; - var mismatched = 0; - var ioErrors = 0; - var usedByEntrypoint = false; - - foreach (var entry in entries) - { - cancellationToken.ThrowIfCancellationRequested(); - total++; - - var entryPath = entry.Path.Replace('/', Path.DirectorySeparatorChar); - var fullPath = Path.GetFullPath(Path.Combine(parent, entryPath)); - + + private static async Task<string?> ReadSingleLineAsync(string path, CancellationToken cancellationToken) + { + if (!File.Exists(path)) + { + return null; + } + + await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); + var line = await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false); + return line?.Trim(); + } +} + +internal sealed record PythonDistribution( + string Name, + string Version, + string Purl, + IReadOnlyCollection<KeyValuePair<string, string?>> Metadata, + IReadOnlyCollection<LanguageComponentEvidence> Evidence, + bool UsedByEntrypoint) +{ + public IReadOnlyCollection<KeyValuePair<string, string?>> SortedMetadata => + Metadata + .OrderBy(static pair => pair.Key, StringComparer.Ordinal) + .ToArray(); + + public IReadOnlyCollection<LanguageComponentEvidence> SortedEvidence => + Evidence + .OrderBy(static item => item.Locator, StringComparer.Ordinal) + .ToArray(); +} + +internal sealed class PythonMetadataDocument +{ + private readonly Dictionary<string, List<string>> _values; + + private PythonMetadataDocument(Dictionary<string, List<string>> values) + { + _values = values; + } + + public static async Task<PythonMetadataDocument> LoadAsync(string path, CancellationToken cancellationToken) + { + if (!File.Exists(path)) + { + return new PythonMetadataDocument(new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase)); + } + + var values = new Dictionary<string, List<string>>(StringComparer.OrdinalIgnoreCase); + await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); + + string? currentKey = null; + var builder = new StringBuilder(); + + while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (line.Length == 0) + { + Commit(); + continue; + } + + if (line.StartsWith(' ') || line.StartsWith('\t')) + { + if (currentKey is not null) + { + if (builder.Length > 0) + { + builder.Append(' '); + } + + builder.Append(line.Trim()); + } + + continue; + } + + Commit(); + + var separator = line.IndexOf(':'); + if (separator <= 0) + { + continue; + } + + currentKey = line[..separator].Trim(); + builder.Clear(); + builder.Append(line[(separator + 1)..].Trim()); + } + + Commit(); + return new PythonMetadataDocument(values); + + void Commit() + { + if (string.IsNullOrWhiteSpace(currentKey)) + { + return; + } + + if (!values.TryGetValue(currentKey, out var list)) + { + list = new List<string>(); + values[currentKey] = list; + } + + var value = builder.ToString().Trim(); + if (value.Length > 0) + { + list.Add(value); + } + + currentKey = null; + builder.Clear(); + } + } + + public string? GetFirst(string key) + { + if (key is null) + { + return null; + } + + return _values.TryGetValue(key, out var list) && list.Count > 0 + ? list[0] + : null; + } + + public IReadOnlyList<string> GetAll(string key) + { + if (key is null) + { + return Array.Empty<string>(); + } + + return _values.TryGetValue(key, out var list) + ? list.AsReadOnly() + : Array.Empty<string>(); + } +} + +internal sealed class PythonWheelInfo +{ + private readonly Dictionary<string, string> _values; + + private PythonWheelInfo(Dictionary<string, string> values) + { + _values = values; + } + + public static async Task<PythonWheelInfo?> LoadAsync(string path, CancellationToken cancellationToken) + { + if (!File.Exists(path)) + { + return null; + } + + var values = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); + await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); + + while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (string.IsNullOrWhiteSpace(line)) + { + continue; + } + + var separator = line.IndexOf(':'); + if (separator <= 0) + { + continue; + } + + var key = line[..separator].Trim(); + var value = line[(separator + 1)..].Trim(); + if (key.Length == 0 || value.Length == 0) + { + continue; + } + + values[key] = value; + } + + return new PythonWheelInfo(values); + } + + public IReadOnlyCollection<KeyValuePair<string, string?>> ToMetadata() + { + var entries = new List<KeyValuePair<string, string?>>(4); + + if (_values.TryGetValue("Wheel-Version", out var wheelVersion)) + { + entries.Add(new KeyValuePair<string, string?>("wheel.version", wheelVersion)); + } + + if (_values.TryGetValue("Tag", out var tags)) + { + entries.Add(new KeyValuePair<string, string?>("wheel.tags", tags)); + } + + if (_values.TryGetValue("Root-Is-Purelib", out var purelib)) + { + entries.Add(new KeyValuePair<string, string?>("wheel.rootIsPurelib", purelib)); + } + + if (_values.TryGetValue("Generator", out var generator)) + { + entries.Add(new KeyValuePair<string, string?>("wheel.generator", generator)); + } + + return entries; + } +} + +internal sealed class PythonEntryPointSet +{ + public IReadOnlyDictionary<string, IReadOnlyList<PythonEntryPoint>> Groups { get; } + + private PythonEntryPointSet(Dictionary<string, IReadOnlyList<PythonEntryPoint>> groups) + { + Groups = groups; + } + + public static async Task<PythonEntryPointSet> LoadAsync(string path, CancellationToken cancellationToken) + { + if (!File.Exists(path)) + { + return new PythonEntryPointSet(new Dictionary<string, IReadOnlyList<PythonEntryPoint>>(StringComparer.OrdinalIgnoreCase)); + } + + var groups = new Dictionary<string, List<PythonEntryPoint>>(StringComparer.OrdinalIgnoreCase); + string? currentGroup = null; + + await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); + + while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line) + { + cancellationToken.ThrowIfCancellationRequested(); + + line = line.Trim(); + if (line.Length == 0 || line.StartsWith('#')) + { + continue; + } + + if (line.StartsWith('[') && line.EndsWith(']')) + { + currentGroup = line[1..^1].Trim(); + if (currentGroup.Length == 0) + { + currentGroup = null; + } + + continue; + } + + if (currentGroup is null) + { + continue; + } + + var separator = line.IndexOf('='); + if (separator <= 0) + { + continue; + } + + var name = line[..separator].Trim(); + var target = line[(separator + 1)..].Trim(); + if (name.Length == 0 || target.Length == 0) + { + continue; + } + + if (!groups.TryGetValue(currentGroup, out var list)) + { + list = new List<PythonEntryPoint>(); + groups[currentGroup] = list; + } + + list.Add(new PythonEntryPoint(name, target)); + } + + return new PythonEntryPointSet(groups.ToDictionary( + static pair => pair.Key, + static pair => (IReadOnlyList<PythonEntryPoint>)pair.Value.AsReadOnly(), + StringComparer.OrdinalIgnoreCase)); + } +} + +internal sealed record PythonEntryPoint(string Name, string Target) +{ + public IReadOnlyCollection<string> GetCandidateRelativeScriptPaths() + { + var list = new List<string>(3) + { + Path.Combine("bin", Name), + Path.Combine("Scripts", $"{Name}.exe"), + Path.Combine("Scripts", Name) + }; + + return list; + } +} + +internal sealed record PythonRecordEntry(string Path, string? HashAlgorithm, string? HashValue, long? Size); + +internal static class PythonRecordParser +{ + public static async Task<IReadOnlyList<PythonRecordEntry>> LoadAsync(string path, CancellationToken cancellationToken) + { + if (!File.Exists(path)) + { + return Array.Empty<PythonRecordEntry>(); + } + + var entries = new List<PythonRecordEntry>(); + + await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var reader = new StreamReader(stream, PythonEncoding.Utf8, detectEncodingFromByteOrderMarks: true); + + while (await reader.ReadLineAsync(cancellationToken).ConfigureAwait(false) is { } line) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (line.Length == 0) + { + continue; + } + + var fields = ParseCsvLine(line); + if (fields.Count < 1) + { + continue; + } + + var entryPath = fields[0]; + string? algorithm = null; + string? hashValue = null; + + if (fields.Count > 1 && !string.IsNullOrWhiteSpace(fields[1])) + { + var hashField = fields[1].Trim(); + var separator = hashField.IndexOf('='); + if (separator > 0 && separator < hashField.Length - 1) + { + algorithm = hashField[..separator]; + hashValue = hashField[(separator + 1)..]; + } + } + + long? size = null; + if (fields.Count > 2 && long.TryParse(fields[2], NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedSize)) + { + size = parsedSize; + } + + entries.Add(new PythonRecordEntry(entryPath, algorithm, hashValue, size)); + } + + return entries; + } + + private static List<string> ParseCsvLine(string line) + { + var values = new List<string>(); + var builder = new StringBuilder(); + var inQuotes = false; + + for (var i = 0; i < line.Length; i++) + { + var ch = line[i]; + + if (inQuotes) + { + if (ch == '"') + { + var next = i + 1 < line.Length ? line[i + 1] : '\0'; + if (next == '"') + { + builder.Append('"'); + i++; + } + else + { + inQuotes = false; + } + } + else + { + builder.Append(ch); + } + + continue; + } + + if (ch == ',') + { + values.Add(builder.ToString()); + builder.Clear(); + continue; + } + + if (ch == '"') + { + inQuotes = true; + continue; + } + + builder.Append(ch); + } + + values.Add(builder.ToString()); + return values; + } +} + +internal sealed class PythonRecordVerificationResult +{ + public PythonRecordVerificationResult( + int totalEntries, + int hashedEntries, + int missingFiles, + int hashMismatches, + int ioErrors, + bool usedByEntrypoint, + IReadOnlyCollection<string> unsupportedAlgorithms, + IReadOnlyCollection<LanguageComponentEvidence> evidence) + { + TotalEntries = totalEntries; + HashedEntries = hashedEntries; + MissingFiles = missingFiles; + HashMismatches = hashMismatches; + IoErrors = ioErrors; + UsedByEntrypoint = usedByEntrypoint; + UnsupportedAlgorithms = unsupportedAlgorithms; + Evidence = evidence; + } + + public int TotalEntries { get; } + public int HashedEntries { get; } + public int MissingFiles { get; } + public int HashMismatches { get; } + public int IoErrors { get; } + public bool UsedByEntrypoint { get; } + public IReadOnlyCollection<string> UnsupportedAlgorithms { get; } + public IReadOnlyCollection<LanguageComponentEvidence> Evidence { get; } +} + +internal static class PythonRecordVerifier +{ + private static readonly HashSet<string> SupportedAlgorithms = new(StringComparer.OrdinalIgnoreCase) + { + "sha256" + }; + + public static async Task<PythonRecordVerificationResult> VerifyAsync( + LanguageAnalyzerContext context, + string distInfoPath, + IReadOnlyList<PythonRecordEntry> entries, + CancellationToken cancellationToken) + { + if (entries.Count == 0) + { + return new PythonRecordVerificationResult(0, 0, 0, 0, 0, usedByEntrypoint: false, Array.Empty<string>(), Array.Empty<LanguageComponentEvidence>()); + } + + var evidence = new List<LanguageComponentEvidence>(); + var unsupported = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + + var root = context.RootPath; + if (!root.EndsWith(Path.DirectorySeparatorChar)) + { + root += Path.DirectorySeparatorChar; + } + + var parent = Directory.GetParent(distInfoPath)?.FullName ?? distInfoPath; + + var total = 0; + var hashed = 0; + var missing = 0; + var mismatched = 0; + var ioErrors = 0; + var usedByEntrypoint = false; + + foreach (var entry in entries) + { + cancellationToken.ThrowIfCancellationRequested(); + total++; + + var entryPath = entry.Path.Replace('/', Path.DirectorySeparatorChar); + var fullPath = Path.GetFullPath(Path.Combine(parent, entryPath)); + if (context.UsageHints.IsPathUsed(fullPath)) { usedByEntrypoint = true; @@ -874,12 +874,12 @@ internal static class PythonRecordVerifier evidence.Add(new LanguageComponentEvidence( LanguageEvidenceKind.Derived, "RECORD", - PythonPathHelper.NormalizeRelative(context, fullPath), - "outside-root", - Sha256: null)); - continue; - } - + PythonPathHelper.NormalizeRelative(context, fullPath), + "outside-root", + Sha256: null)); + continue; + } + if (!File.Exists(fullPath)) { missing++; @@ -891,193 +891,193 @@ internal static class PythonRecordVerifier Sha256: null)); continue; } - - if (string.IsNullOrWhiteSpace(entry.HashAlgorithm) || string.IsNullOrWhiteSpace(entry.HashValue)) - { - continue; - } - - hashed++; - - if (!SupportedAlgorithms.Contains(entry.HashAlgorithm)) - { - unsupported.Add(entry.HashAlgorithm); - continue; - } - - string? actualHash = null; - - try - { - actualHash = await ComputeSha256Base64Async(fullPath, cancellationToken).ConfigureAwait(false); - } - catch (IOException) - { - ioErrors++; - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Derived, - "RECORD", - PythonPathHelper.NormalizeRelative(context, fullPath), - "io-error", - Sha256: null)); - continue; - } - catch (UnauthorizedAccessException) - { - ioErrors++; - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Derived, - "RECORD", - PythonPathHelper.NormalizeRelative(context, fullPath), - "access-denied", - Sha256: null)); - continue; - } - - if (actualHash is null) - { - continue; - } - - if (!string.Equals(actualHash, entry.HashValue, StringComparison.Ordinal)) - { - mismatched++; - evidence.Add(new LanguageComponentEvidence( - LanguageEvidenceKind.Derived, - "RECORD", - PythonPathHelper.NormalizeRelative(context, fullPath), - $"sha256 mismatch expected={entry.HashValue} actual={actualHash}", - Sha256: actualHash)); - } - } - - return new PythonRecordVerificationResult( - total, - hashed, - missing, - mismatched, - ioErrors, - usedByEntrypoint, - unsupported.ToArray(), - evidence); - } - - private static async Task<string> ComputeSha256Base64Async(string path, CancellationToken cancellationToken) - { - await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); - - using var sha = SHA256.Create(); - var buffer = ArrayPool<byte>.Shared.Rent(81920); - try - { - int bytesRead; - while ((bytesRead = await stream.ReadAsync(buffer.AsMemory(0, buffer.Length), cancellationToken).ConfigureAwait(false)) > 0) - { - sha.TransformBlock(buffer, 0, bytesRead, null, 0); - } - - sha.TransformFinalBlock(Array.Empty<byte>(), 0, 0); - return Convert.ToBase64String(sha.Hash ?? Array.Empty<byte>()); - } - finally - { - ArrayPool<byte>.Shared.Return(buffer); - } - } -} - -internal sealed class PythonDirectUrlInfo -{ - public string? Url { get; } - public bool IsEditable { get; } - public string? Subdirectory { get; } - public string? Vcs { get; } - public string? Commit { get; } - - private PythonDirectUrlInfo(string? url, bool isEditable, string? subdirectory, string? vcs, string? commit) - { - Url = url; - IsEditable = isEditable; - Subdirectory = subdirectory; - Vcs = vcs; - Commit = commit; - } - - public static async Task<PythonDirectUrlInfo?> LoadAsync(string path, CancellationToken cancellationToken) - { - if (!File.Exists(path)) - { - return null; - } - - await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); - using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); - var root = document.RootElement; - - var url = root.TryGetProperty("url", out var urlElement) ? urlElement.GetString() : null; - var isEditable = root.TryGetProperty("dir_info", out var dirInfo) && dirInfo.TryGetProperty("editable", out var editableValue) && editableValue.GetBoolean(); - var subdir = root.TryGetProperty("dir_info", out dirInfo) && dirInfo.TryGetProperty("subdirectory", out var subdirElement) ? subdirElement.GetString() : null; - - string? vcs = null; - string? commit = null; - - if (root.TryGetProperty("vcs_info", out var vcsInfo)) - { - vcs = vcsInfo.TryGetProperty("vcs", out var vcsElement) ? vcsElement.GetString() : null; - commit = vcsInfo.TryGetProperty("commit_id", out var commitElement) ? commitElement.GetString() : null; - } - - return new PythonDirectUrlInfo(url, isEditable, subdir, vcs, commit); - } - - public IReadOnlyCollection<KeyValuePair<string, string?>> ToMetadata() - { - var entries = new List<KeyValuePair<string, string?>>(); - - if (IsEditable) - { - entries.Add(new KeyValuePair<string, string?>("editable", "true")); - } - - if (!string.IsNullOrWhiteSpace(Url)) - { - entries.Add(new KeyValuePair<string, string?>("sourceUrl", Url)); - } - - if (!string.IsNullOrWhiteSpace(Subdirectory)) - { - entries.Add(new KeyValuePair<string, string?>("sourceSubdirectory", Subdirectory)); - } - - if (!string.IsNullOrWhiteSpace(Vcs)) - { - entries.Add(new KeyValuePair<string, string?>("sourceVcs", Vcs)); - } - - if (!string.IsNullOrWhiteSpace(Commit)) - { - entries.Add(new KeyValuePair<string, string?>("sourceCommit", Commit)); - } - - return entries; - } -} - -internal static class PythonPathHelper -{ - public static string NormalizeRelative(LanguageAnalyzerContext context, string path) - { - var relative = context.GetRelativePath(path); - if (string.IsNullOrEmpty(relative) || relative == ".") - { - return "."; - } - - return relative; - } -} - -internal static class PythonEncoding -{ - public static readonly UTF8Encoding Utf8 = new(encoderShouldEmitUTF8Identifier: false, throwOnInvalidBytes: true); -} + + if (string.IsNullOrWhiteSpace(entry.HashAlgorithm) || string.IsNullOrWhiteSpace(entry.HashValue)) + { + continue; + } + + hashed++; + + if (!SupportedAlgorithms.Contains(entry.HashAlgorithm)) + { + unsupported.Add(entry.HashAlgorithm); + continue; + } + + string? actualHash = null; + + try + { + actualHash = await ComputeSha256Base64Async(fullPath, cancellationToken).ConfigureAwait(false); + } + catch (IOException) + { + ioErrors++; + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Derived, + "RECORD", + PythonPathHelper.NormalizeRelative(context, fullPath), + "io-error", + Sha256: null)); + continue; + } + catch (UnauthorizedAccessException) + { + ioErrors++; + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Derived, + "RECORD", + PythonPathHelper.NormalizeRelative(context, fullPath), + "access-denied", + Sha256: null)); + continue; + } + + if (actualHash is null) + { + continue; + } + + if (!string.Equals(actualHash, entry.HashValue, StringComparison.Ordinal)) + { + mismatched++; + evidence.Add(new LanguageComponentEvidence( + LanguageEvidenceKind.Derived, + "RECORD", + PythonPathHelper.NormalizeRelative(context, fullPath), + $"sha256 mismatch expected={entry.HashValue} actual={actualHash}", + Sha256: actualHash)); + } + } + + return new PythonRecordVerificationResult( + total, + hashed, + missing, + mismatched, + ioErrors, + usedByEntrypoint, + unsupported.ToArray(), + evidence); + } + + private static async Task<string> ComputeSha256Base64Async(string path, CancellationToken cancellationToken) + { + await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + + using var sha = SHA256.Create(); + var buffer = ArrayPool<byte>.Shared.Rent(81920); + try + { + int bytesRead; + while ((bytesRead = await stream.ReadAsync(buffer.AsMemory(0, buffer.Length), cancellationToken).ConfigureAwait(false)) > 0) + { + sha.TransformBlock(buffer, 0, bytesRead, null, 0); + } + + sha.TransformFinalBlock(Array.Empty<byte>(), 0, 0); + return Convert.ToBase64String(sha.Hash ?? Array.Empty<byte>()); + } + finally + { + ArrayPool<byte>.Shared.Return(buffer); + } + } +} + +internal sealed class PythonDirectUrlInfo +{ + public string? Url { get; } + public bool IsEditable { get; } + public string? Subdirectory { get; } + public string? Vcs { get; } + public string? Commit { get; } + + private PythonDirectUrlInfo(string? url, bool isEditable, string? subdirectory, string? vcs, string? commit) + { + Url = url; + IsEditable = isEditable; + Subdirectory = subdirectory; + Vcs = vcs; + Commit = commit; + } + + public static async Task<PythonDirectUrlInfo?> LoadAsync(string path, CancellationToken cancellationToken) + { + if (!File.Exists(path)) + { + return null; + } + + await using var stream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); + using var document = await JsonDocument.ParseAsync(stream, cancellationToken: cancellationToken).ConfigureAwait(false); + var root = document.RootElement; + + var url = root.TryGetProperty("url", out var urlElement) ? urlElement.GetString() : null; + var isEditable = root.TryGetProperty("dir_info", out var dirInfo) && dirInfo.TryGetProperty("editable", out var editableValue) && editableValue.GetBoolean(); + var subdir = root.TryGetProperty("dir_info", out dirInfo) && dirInfo.TryGetProperty("subdirectory", out var subdirElement) ? subdirElement.GetString() : null; + + string? vcs = null; + string? commit = null; + + if (root.TryGetProperty("vcs_info", out var vcsInfo)) + { + vcs = vcsInfo.TryGetProperty("vcs", out var vcsElement) ? vcsElement.GetString() : null; + commit = vcsInfo.TryGetProperty("commit_id", out var commitElement) ? commitElement.GetString() : null; + } + + return new PythonDirectUrlInfo(url, isEditable, subdir, vcs, commit); + } + + public IReadOnlyCollection<KeyValuePair<string, string?>> ToMetadata() + { + var entries = new List<KeyValuePair<string, string?>>(); + + if (IsEditable) + { + entries.Add(new KeyValuePair<string, string?>("editable", "true")); + } + + if (!string.IsNullOrWhiteSpace(Url)) + { + entries.Add(new KeyValuePair<string, string?>("sourceUrl", Url)); + } + + if (!string.IsNullOrWhiteSpace(Subdirectory)) + { + entries.Add(new KeyValuePair<string, string?>("sourceSubdirectory", Subdirectory)); + } + + if (!string.IsNullOrWhiteSpace(Vcs)) + { + entries.Add(new KeyValuePair<string, string?>("sourceVcs", Vcs)); + } + + if (!string.IsNullOrWhiteSpace(Commit)) + { + entries.Add(new KeyValuePair<string, string?>("sourceCommit", Commit)); + } + + return entries; + } +} + +internal static class PythonPathHelper +{ + public static string NormalizeRelative(LanguageAnalyzerContext context, string path) + { + var relative = context.GetRelativePath(path); + if (string.IsNullOrEmpty(relative) || relative == ".") + { + return "."; + } + + return relative; + } +} + +internal static class PythonEncoding +{ + public static readonly UTF8Encoding Utf8 = new(encoderShouldEmitUTF8Identifier: false, throwOnInvalidBytes: true); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/PythonAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonAnalyzerPlugin.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Python/PythonAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonAnalyzerPlugin.cs index 119b749c..1ef65bf0 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Python/PythonAnalyzerPlugin.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonAnalyzerPlugin.cs @@ -1,17 +1,17 @@ -using System; -using StellaOps.Scanner.Analyzers.Lang.Plugin; - -namespace StellaOps.Scanner.Analyzers.Lang.Python; - -public sealed class PythonAnalyzerPlugin : ILanguageAnalyzerPlugin -{ - public string Name => "StellaOps.Scanner.Analyzers.Lang.Python"; - - public bool IsAvailable(IServiceProvider services) => services is not null; - - public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) - { - ArgumentNullException.ThrowIfNull(services); - return new PythonLanguageAnalyzer(); - } -} +using System; +using StellaOps.Scanner.Analyzers.Lang.Plugin; + +namespace StellaOps.Scanner.Analyzers.Lang.Python; + +public sealed class PythonAnalyzerPlugin : ILanguageAnalyzerPlugin +{ + public string Name => "StellaOps.Scanner.Analyzers.Lang.Python"; + + public bool IsAvailable(IServiceProvider services) => services is not null; + + public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return new PythonLanguageAnalyzer(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs index a5be4350..d119d939 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/PythonLanguageAnalyzer.cs @@ -1,72 +1,72 @@ -using System.Text.Json; -using StellaOps.Scanner.Analyzers.Lang.Python.Internal; - -namespace StellaOps.Scanner.Analyzers.Lang.Python; - -public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer -{ - private static readonly EnumerationOptions Enumeration = new() - { - RecurseSubdirectories = true, - IgnoreInaccessible = true, - AttributesToSkip = FileAttributes.Device | FileAttributes.ReparsePoint - }; - - public string Id => "python"; - - public string DisplayName => "Python Analyzer"; - - public ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - ArgumentNullException.ThrowIfNull(writer); - - return AnalyzeInternalAsync(context, writer, cancellationToken); - } - - private static async ValueTask AnalyzeInternalAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) - { - var distInfoDirectories = Directory - .EnumerateDirectories(context.RootPath, "*.dist-info", Enumeration) - .OrderBy(static path => path, StringComparer.Ordinal) - .ToArray(); - - foreach (var distInfoPath in distInfoDirectories) - { - cancellationToken.ThrowIfCancellationRequested(); - - PythonDistribution? distribution; - try - { - distribution = await PythonDistributionLoader.LoadAsync(context, distInfoPath, cancellationToken).ConfigureAwait(false); - } - catch (IOException) - { - continue; - } - catch (JsonException) - { - continue; - } - catch (UnauthorizedAccessException) - { - continue; - } - - if (distribution is null) - { - continue; - } - - writer.AddFromPurl( - analyzerId: "python", - purl: distribution.Purl, - name: distribution.Name, - version: distribution.Version, - type: "pypi", - metadata: distribution.SortedMetadata, - evidence: distribution.SortedEvidence, - usedByEntrypoint: distribution.UsedByEntrypoint); - } - } -} +using System.Text.Json; +using StellaOps.Scanner.Analyzers.Lang.Python.Internal; + +namespace StellaOps.Scanner.Analyzers.Lang.Python; + +public sealed class PythonLanguageAnalyzer : ILanguageAnalyzer +{ + private static readonly EnumerationOptions Enumeration = new() + { + RecurseSubdirectories = true, + IgnoreInaccessible = true, + AttributesToSkip = FileAttributes.Device | FileAttributes.ReparsePoint + }; + + public string Id => "python"; + + public string DisplayName => "Python Analyzer"; + + public ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + ArgumentNullException.ThrowIfNull(writer); + + return AnalyzeInternalAsync(context, writer, cancellationToken); + } + + private static async ValueTask AnalyzeInternalAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + var distInfoDirectories = Directory + .EnumerateDirectories(context.RootPath, "*.dist-info", Enumeration) + .OrderBy(static path => path, StringComparer.Ordinal) + .ToArray(); + + foreach (var distInfoPath in distInfoDirectories) + { + cancellationToken.ThrowIfCancellationRequested(); + + PythonDistribution? distribution; + try + { + distribution = await PythonDistributionLoader.LoadAsync(context, distInfoPath, cancellationToken).ConfigureAwait(false); + } + catch (IOException) + { + continue; + } + catch (JsonException) + { + continue; + } + catch (UnauthorizedAccessException) + { + continue; + } + + if (distribution is null) + { + continue; + } + + writer.AddFromPurl( + analyzerId: "python", + purl: distribution.Purl, + name: distribution.Name, + version: distribution.Version, + type: "pypi", + metadata: distribution.SortedMetadata, + evidence: distribution.SortedEvidence, + usedByEntrypoint: distribution.UsedByEntrypoint); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md index f14e0cb1..0714d00b 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/TASKS.md @@ -1,31 +1,31 @@ -# Python Analyzer Task Flow - -| Seq | ID | Status | Depends on | Description | Exit Criteria | -|-----|----|--------|------------|-------------|---------------| -| 1 | SCANNER-ANALYZERS-LANG-10-303A | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-307 | STREAM-based parser for `*.dist-info` (`METADATA`, `WHEEL`, `entry_points.txt`) with normalization + evidence capture. | Parser handles CPython 3.8–3.12 metadata variations; fixtures confirm canonical ordering and UTF-8 handling. | -| 2 | SCANNER-ANALYZERS-LANG-10-303B | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-303A | RECORD hash verifier with chunked hashing, Zip64 support, and mismatch diagnostics. | Verifier processes 5 GB RECORD fixture without allocations >2 MB; mismatches produce deterministic evidence records. | -| 3 | SCANNER-ANALYZERS-LANG-10-303C | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-303B | Editable install + pip cache detection; integrate EntryTrace hints for runtime usage flags. | Editable installs resolved to source path; usage flags propagated; regression tests cover mixed editable + wheel installs. | -| 4 | SCANNER-ANALYZERS-LANG-10-307P | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-303C | Shared helper integration (license metadata, quiet provenance, component merging). | Shared helpers reused; analyzer-specific metadata minimal; deterministic merge tests pass. | -| 5 | SCANNER-ANALYZERS-LANG-10-308P | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-307P | Golden fixtures + determinism harness for Python analyzer; add benchmark and hash throughput reporting. | Fixtures under `Fixtures/lang/python/`; determinism CI guard; benchmark CSV added with threshold alerts. | -| 6 | SCANNER-ANALYZERS-LANG-10-309P | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-308P | Package plug-in (manifest, DI registration) and document Offline Kit bundling of Python stdlib metadata if needed. | Manifest copied to `plugins/scanner/analyzers/lang/`; Worker loads analyzer; Offline Kit doc updated. | - -## Python Entry-Point Analyzer (Sprint 43) -> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-PYTHON-23-001 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309P | Build input normalizer & virtual filesystem for wheels, sdists, editable installs, zipapps, site-packages trees, and container roots. Detect Python version targets (`pyproject.toml`, `runtime.txt`, Dockerfile) + virtualenv layout deterministically. | Normalizer ingests fixtures (venv, wheel, sdist, zipapp, container layer) without extraction; records python_version, root metadata, and namespace resolution hints; determinism harness updated. | -| SCANNER-ANALYZERS-PYTHON-23-002 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-001 | Entrypoint discovery: module `__main__`, console_scripts entry points, `scripts`, zipapp main, `manage.py`/gunicorn/celery patterns. Capture invocation context (module vs package, argv wrappers). | Fixtures produce entrypoint list with kind (console, module, package, zipapp, framework) and deterministic ordering; warnings for missing targets recorded. | -| SCANNER-ANALYZERS-PYTHON-23-003 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-001 | Static import graph builder using AST and bytecode fallback. Support `import`, `from ... import`, relative imports, `importlib.import_module`, `__import__` with literal args, `pkgutil.extend_path`. | AST scanner emits edges for explicit imports; literal importlib calls covered; unresolved/dynamic patterns yield `dynamic-import` warnings with candidate prefixes; regression fixtures pass. | -| SCANNER-ANALYZERS-PYTHON-23-004 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-003 | Python resolver engine (importlib semantics) handling namespace packages (PEP 420), package discovery order, `.pth` files, `sys.path` composition, zipimport, and site-packages precedence across virtualenv/container roots. | Resolver reproduces importlib behaviour on fixture matrix (namespace pkg, zipimport, multi-site-dir); includes explain traces; determinism tests for path ordering succeed. | -| SCANNER-ANALYZERS-PYTHON-23-005 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-004 | Packaging adapters: pip editable (`.egg-link`), Poetry/Flit layout, Conda prefix, `.dist-info/RECORD` cross-check, container layer overlays. | Adapters resolve editable links, conda pkgs, layered site-packages; edges capture provider path + metadata; warnings emitted for missing RECORD entries. | -| SCANNER-ANALYZERS-PYTHON-23-006 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-003 | Detect native extensions (`*.so`, `*.pyd`), CFFI modules, ctypes loaders, embedded WASM, and runtime capability signals (subprocess, multiprocessing, ctypes, eval). | Fixtures with native/CFFI/ctypes emit `native-extension`, `cffi`, `ctypes` hints; capability flags recorded; metadata captures ABI/platform info. | -| SCANNER-ANALYZERS-PYTHON-23-007 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-002 | Framework/config heuristics: Django, Flask, FastAPI, Celery, AWS Lambda handlers, Gunicorn, Click/Typer CLIs, logging configs, pyproject optional dependencies. Tagged as hints only. | Framework fixtures produce hint records with source files (settings.py, pyproject extras, celery app); no resolver impact; determinism maintained. | - -## Python Observation & Runtime (Sprint 44) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCANNER-ANALYZERS-PYTHON-23-008 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-004 | Produce AOC-compliant observations: entrypoints, components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with reason codes/confidence and resolver traces. | Observation JSON for fixtures deterministic; includes explain trace per edge and namespace resolution metadata; passes AOC compliance lint. | -| SCANNER-ANALYZERS-PYTHON-23-009 | TODO | Python Analyzer Guild, QA Guild | SCANNER-ANALYZERS-PYTHON-23-008 | Fixture suite + perf benchmarks covering virtualenv, namespace packages, zipapp, editable installs, containers, lambda handler. | Fixture set committed under `fixtures/lang/python/ep`; determinism CI and perf (<250ms medium project) gates enabled. | -| SCANNER-ANALYZERS-PYTHON-23-010 | TODO | Python Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-PYTHON-23-008 | Optional runtime evidence: import hook capturing module load events with path scrubbing, optional bytecode instrumentation for `importlib` hooks, multiprocessing tracer. | Runtime harness records module loads for sample app; paths hashed; runtime edges merge without altering resolver precedence; privacy doc updated. | -| SCANNER-ANALYZERS-PYTHON-23-011 | TODO | Python Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-PYTHON-23-008 | Package analyzer plug-in, add CLI commands (`stella python inspect|resolve|trace`), update Offline Kit guidance. | Plugin manifest deployed; CLI commands documented & smoke tested; Offline Kit instructions cover Python analyzer usage; worker restart verified. | -| SCANNER-ANALYZERS-PYTHON-23-012 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-001 | Container/zipapp adapter enhancements: parse OCI layers for Python runtime, detect `PYTHONPATH`/`PYTHONHOME` env, record warnings for sitecustomize/startup hooks. | Container fixtures output runtime metadata (python binary, env vars) and warnings for startup hooks; zipapp fixture resolves internal modules; determinism retained. | +# Python Analyzer Task Flow + +| Seq | ID | Status | Depends on | Description | Exit Criteria | +|-----|----|--------|------------|-------------|---------------| +| 1 | SCANNER-ANALYZERS-LANG-10-303A | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-307 | STREAM-based parser for `*.dist-info` (`METADATA`, `WHEEL`, `entry_points.txt`) with normalization + evidence capture. | Parser handles CPython 3.8–3.12 metadata variations; fixtures confirm canonical ordering and UTF-8 handling. | +| 2 | SCANNER-ANALYZERS-LANG-10-303B | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-303A | RECORD hash verifier with chunked hashing, Zip64 support, and mismatch diagnostics. | Verifier processes 5 GB RECORD fixture without allocations >2 MB; mismatches produce deterministic evidence records. | +| 3 | SCANNER-ANALYZERS-LANG-10-303C | DONE (2025-10-21) | SCANNER-ANALYZERS-LANG-10-303B | Editable install + pip cache detection; integrate EntryTrace hints for runtime usage flags. | Editable installs resolved to source path; usage flags propagated; regression tests cover mixed editable + wheel installs. | +| 4 | SCANNER-ANALYZERS-LANG-10-307P | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-303C | Shared helper integration (license metadata, quiet provenance, component merging). | Shared helpers reused; analyzer-specific metadata minimal; deterministic merge tests pass. | +| 5 | SCANNER-ANALYZERS-LANG-10-308P | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-307P | Golden fixtures + determinism harness for Python analyzer; add benchmark and hash throughput reporting. | Fixtures under `Fixtures/lang/python/`; determinism CI guard; benchmark CSV added with threshold alerts. | +| 6 | SCANNER-ANALYZERS-LANG-10-309P | DONE (2025-10-23) | SCANNER-ANALYZERS-LANG-10-308P | Package plug-in (manifest, DI registration) and document Offline Kit bundling of Python stdlib metadata if needed. | Manifest copied to `plugins/scanner/analyzers/lang/`; Worker loads analyzer; Offline Kit doc updated. | + +## Python Entry-Point Analyzer (Sprint 43) +> **Imposed rule:** work of this type or tasks of this type on this component — and everywhere else it should be applied. +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-PYTHON-23-001 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-LANG-10-309P | Build input normalizer & virtual filesystem for wheels, sdists, editable installs, zipapps, site-packages trees, and container roots. Detect Python version targets (`pyproject.toml`, `runtime.txt`, Dockerfile) + virtualenv layout deterministically. | Normalizer ingests fixtures (venv, wheel, sdist, zipapp, container layer) without extraction; records python_version, root metadata, and namespace resolution hints; determinism harness updated. | +| SCANNER-ANALYZERS-PYTHON-23-002 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-001 | Entrypoint discovery: module `__main__`, console_scripts entry points, `scripts`, zipapp main, `manage.py`/gunicorn/celery patterns. Capture invocation context (module vs package, argv wrappers). | Fixtures produce entrypoint list with kind (console, module, package, zipapp, framework) and deterministic ordering; warnings for missing targets recorded. | +| SCANNER-ANALYZERS-PYTHON-23-003 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-001 | Static import graph builder using AST and bytecode fallback. Support `import`, `from ... import`, relative imports, `importlib.import_module`, `__import__` with literal args, `pkgutil.extend_path`. | AST scanner emits edges for explicit imports; literal importlib calls covered; unresolved/dynamic patterns yield `dynamic-import` warnings with candidate prefixes; regression fixtures pass. | +| SCANNER-ANALYZERS-PYTHON-23-004 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-003 | Python resolver engine (importlib semantics) handling namespace packages (PEP 420), package discovery order, `.pth` files, `sys.path` composition, zipimport, and site-packages precedence across virtualenv/container roots. | Resolver reproduces importlib behaviour on fixture matrix (namespace pkg, zipimport, multi-site-dir); includes explain traces; determinism tests for path ordering succeed. | +| SCANNER-ANALYZERS-PYTHON-23-005 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-004 | Packaging adapters: pip editable (`.egg-link`), Poetry/Flit layout, Conda prefix, `.dist-info/RECORD` cross-check, container layer overlays. | Adapters resolve editable links, conda pkgs, layered site-packages; edges capture provider path + metadata; warnings emitted for missing RECORD entries. | +| SCANNER-ANALYZERS-PYTHON-23-006 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-003 | Detect native extensions (`*.so`, `*.pyd`), CFFI modules, ctypes loaders, embedded WASM, and runtime capability signals (subprocess, multiprocessing, ctypes, eval). | Fixtures with native/CFFI/ctypes emit `native-extension`, `cffi`, `ctypes` hints; capability flags recorded; metadata captures ABI/platform info. | +| SCANNER-ANALYZERS-PYTHON-23-007 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-002 | Framework/config heuristics: Django, Flask, FastAPI, Celery, AWS Lambda handlers, Gunicorn, Click/Typer CLIs, logging configs, pyproject optional dependencies. Tagged as hints only. | Framework fixtures produce hint records with source files (settings.py, pyproject extras, celery app); no resolver impact; determinism maintained. | + +## Python Observation & Runtime (Sprint 44) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCANNER-ANALYZERS-PYTHON-23-008 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-004 | Produce AOC-compliant observations: entrypoints, components (modules/packages/native), edges (import, namespace, dynamic-hint, native-extension) with reason codes/confidence and resolver traces. | Observation JSON for fixtures deterministic; includes explain trace per edge and namespace resolution metadata; passes AOC compliance lint. | +| SCANNER-ANALYZERS-PYTHON-23-009 | TODO | Python Analyzer Guild, QA Guild | SCANNER-ANALYZERS-PYTHON-23-008 | Fixture suite + perf benchmarks covering virtualenv, namespace packages, zipapp, editable installs, containers, lambda handler. | Fixture set committed under `fixtures/lang/python/ep`; determinism CI and perf (<250ms medium project) gates enabled. | +| SCANNER-ANALYZERS-PYTHON-23-010 | TODO | Python Analyzer Guild, Signals Guild | SCANNER-ANALYZERS-PYTHON-23-008 | Optional runtime evidence: import hook capturing module load events with path scrubbing, optional bytecode instrumentation for `importlib` hooks, multiprocessing tracer. | Runtime harness records module loads for sample app; paths hashed; runtime edges merge without altering resolver precedence; privacy doc updated. | +| SCANNER-ANALYZERS-PYTHON-23-011 | TODO | Python Analyzer Guild, DevOps Guild | SCANNER-ANALYZERS-PYTHON-23-008 | Package analyzer plug-in, add CLI commands (`stella python inspect|resolve|trace`), update Offline Kit guidance. | Plugin manifest deployed; CLI commands documented & smoke tested; Offline Kit instructions cover Python analyzer usage; worker restart verified. | +| SCANNER-ANALYZERS-PYTHON-23-012 | TODO | Python Analyzer Guild | SCANNER-ANALYZERS-PYTHON-23-001 | Container/zipapp adapter enhancements: parse OCI layers for Python runtime, detect `PYTHONPATH`/`PYTHONHOME` env, record warnings for sitecustomize/startup hooks. | Container fixtures output runtime metadata (python binary, env vars) and warnings for startup hooks; zipapp fixture resolves internal modules; determinism retained. | diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json index ada19bcc..36c377ea 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/manifest.json @@ -1,23 +1,23 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.python", - "displayName": "StellaOps Python Analyzer (preview)", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.Python.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.Python.PythonAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "python", - "pypi" - ], - "metadata": { - "org.stellaops.analyzer.language": "python", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true", - "org.stellaops.analyzer.status": "preview" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.python", + "displayName": "StellaOps Python Analyzer (preview)", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Python.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Python.PythonAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "python", + "pypi" + ], + "metadata": { + "org.stellaops.analyzer.language": "python", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true", + "org.stellaops.analyzer.status": "preview" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/AGENTS.md diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/GlobalUsings.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/GlobalUsings.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/GlobalUsings.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustAnalyzerCollector.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustAnalyzerCollector.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustAnalyzerCollector.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustAnalyzerCollector.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustBinaryClassifier.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustBinaryClassifier.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustBinaryClassifier.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustBinaryClassifier.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustCargoLockParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustCargoLockParser.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustCargoLockParser.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustCargoLockParser.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustFingerprintScanner.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustFingerprintScanner.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustFingerprintScanner.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/Internal/RustFingerprintScanner.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/RustAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/RustAnalyzerPlugin.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/RustAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/RustAnalyzerPlugin.cs index cef41543..98efca4f 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Rust/RustAnalyzerPlugin.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/RustAnalyzerPlugin.cs @@ -1,17 +1,17 @@ -using System; -using StellaOps.Scanner.Analyzers.Lang.Plugin; - -namespace StellaOps.Scanner.Analyzers.Lang.Rust; - -public sealed class RustAnalyzerPlugin : ILanguageAnalyzerPlugin -{ +using System; +using StellaOps.Scanner.Analyzers.Lang.Plugin; + +namespace StellaOps.Scanner.Analyzers.Lang.Rust; + +public sealed class RustAnalyzerPlugin : ILanguageAnalyzerPlugin +{ public string Name => "StellaOps.Scanner.Analyzers.Lang.Rust"; public bool IsAvailable(IServiceProvider services) => services is not null; - - public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) - { - ArgumentNullException.ThrowIfNull(services); - return new RustLanguageAnalyzer(); - } -} + + public ILanguageAnalyzer CreateAnalyzer(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + return new RustLanguageAnalyzer(); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/RustLanguageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/RustLanguageAnalyzer.cs similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/RustLanguageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/RustLanguageAnalyzer.cs index b9a7fd22..1e295e65 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Rust/RustLanguageAnalyzer.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/RustLanguageAnalyzer.cs @@ -2,9 +2,9 @@ using System; using System.Threading; using System.Threading.Tasks; using StellaOps.Scanner.Analyzers.Lang.Rust.Internal; - -namespace StellaOps.Scanner.Analyzers.Lang.Rust; - + +namespace StellaOps.Scanner.Analyzers.Lang.Rust; + public sealed class RustLanguageAnalyzer : ILanguageAnalyzer { public string Id => "rust"; diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/TASKS.md diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json index 24775978..744bf8ef 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/manifest.json @@ -1,23 +1,23 @@ -{ - "schemaVersion": "1.0", - "id": "stellaops.analyzer.lang.rust", - "displayName": "StellaOps Rust Analyzer (preview)", - "version": "0.1.0", - "requiresRestart": true, - "entryPoint": { - "type": "dotnet", - "assembly": "StellaOps.Scanner.Analyzers.Lang.Rust.dll", - "typeName": "StellaOps.Scanner.Analyzers.Lang.Rust.RustAnalyzerPlugin" - }, - "capabilities": [ - "language-analyzer", - "rust", - "cargo" - ], - "metadata": { - "org.stellaops.analyzer.language": "rust", - "org.stellaops.analyzer.kind": "language", - "org.stellaops.restart.required": "true", - "org.stellaops.analyzer.status": "preview" - } -} +{ + "schemaVersion": "1.0", + "id": "stellaops.analyzer.lang.rust", + "displayName": "StellaOps Rust Analyzer (preview)", + "version": "0.1.0", + "requiresRestart": true, + "entryPoint": { + "type": "dotnet", + "assembly": "StellaOps.Scanner.Analyzers.Lang.Rust.dll", + "typeName": "StellaOps.Scanner.Analyzers.Lang.Rust.RustAnalyzerPlugin" + }, + "capabilities": [ + "language-analyzer", + "rust", + "cargo" + ], + "metadata": { + "org.stellaops.analyzer.language": "rust", + "org.stellaops.analyzer.kind": "language", + "org.stellaops.restart.required": "true", + "org.stellaops.analyzer.status": "preview" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/AGENTS.md diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/ILanguageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/ILanguageAnalyzer.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/Core/ILanguageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/ILanguageAnalyzer.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/Internal/LanguageAnalyzerJson.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/Internal/LanguageAnalyzerJson.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/Core/Internal/LanguageAnalyzerJson.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/Internal/LanguageAnalyzerJson.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerContext.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerContext.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerContext.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerContext.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerEngine.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerEngine.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerEngine.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerEngine.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerResult.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerResult.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerResult.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageAnalyzerResult.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentEvidence.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentEvidence.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentEvidence.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentEvidence.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentMapper.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentMapper.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentMapper.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentMapper.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentRecord.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentRecord.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentRecord.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageComponentRecord.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageUsageHints.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageUsageHints.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/Core/LanguageUsageHints.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Core/LanguageUsageHints.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/GlobalUsings.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/GlobalUsings.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/GlobalUsings.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Plugin/ILanguageAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Plugin/ILanguageAnalyzerPlugin.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang/Plugin/ILanguageAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Plugin/ILanguageAnalyzerPlugin.cs index cd7f5c5c..88151195 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang/Plugin/ILanguageAnalyzerPlugin.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Plugin/ILanguageAnalyzerPlugin.cs @@ -1,15 +1,15 @@ -using System; -using StellaOps.Plugin; - -namespace StellaOps.Scanner.Analyzers.Lang.Plugin; - -/// <summary> -/// Represents a restart-time plug-in that exposes a language analyzer. -/// </summary> -public interface ILanguageAnalyzerPlugin : IAvailabilityPlugin -{ - /// <summary> - /// Creates the analyzer instance bound to the service provider. - /// </summary> - ILanguageAnalyzer CreateAnalyzer(IServiceProvider services); -} +using System; +using StellaOps.Plugin; + +namespace StellaOps.Scanner.Analyzers.Lang.Plugin; + +/// <summary> +/// Represents a restart-time plug-in that exposes a language analyzer. +/// </summary> +public interface ILanguageAnalyzerPlugin : IAvailabilityPlugin +{ + /// <summary> + /// Creates the analyzer instance bound to the service provider. + /// </summary> + ILanguageAnalyzer CreateAnalyzer(IServiceProvider services); +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/Plugin/LanguageAnalyzerPluginCatalog.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Plugin/LanguageAnalyzerPluginCatalog.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang/Plugin/LanguageAnalyzerPluginCatalog.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Plugin/LanguageAnalyzerPluginCatalog.cs index 627b03dd..f077f308 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang/Plugin/LanguageAnalyzerPluginCatalog.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/Plugin/LanguageAnalyzerPluginCatalog.cs @@ -1,147 +1,147 @@ -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Linq; -using System.Reflection; -using Microsoft.Extensions.Logging; -using StellaOps.Plugin; -using StellaOps.Plugin.Hosting; -using StellaOps.Scanner.Core.Security; - -namespace StellaOps.Scanner.Analyzers.Lang.Plugin; - -public interface ILanguageAnalyzerPluginCatalog -{ - IReadOnlyCollection<ILanguageAnalyzerPlugin> Plugins { get; } - - void LoadFromDirectory(string directory, bool seal = true); - - IReadOnlyList<ILanguageAnalyzer> CreateAnalyzers(IServiceProvider services); -} - -public sealed class LanguageAnalyzerPluginCatalog : ILanguageAnalyzerPluginCatalog -{ - private readonly ILogger<LanguageAnalyzerPluginCatalog> _logger; - private readonly IPluginCatalogGuard _guard; - private readonly ConcurrentDictionary<string, Assembly> _assemblies = new(StringComparer.OrdinalIgnoreCase); - private IReadOnlyList<ILanguageAnalyzerPlugin> _plugins = Array.Empty<ILanguageAnalyzerPlugin>(); - - public LanguageAnalyzerPluginCatalog(IPluginCatalogGuard guard, ILogger<LanguageAnalyzerPluginCatalog> logger) - { - _guard = guard ?? throw new ArgumentNullException(nameof(guard)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public IReadOnlyCollection<ILanguageAnalyzerPlugin> Plugins => _plugins; - - public void LoadFromDirectory(string directory, bool seal = true) - { - ArgumentException.ThrowIfNullOrWhiteSpace(directory); - var fullDirectory = Path.GetFullPath(directory); - - var options = new PluginHostOptions - { - PluginsDirectory = fullDirectory, - EnsureDirectoryExists = false, - RecursiveSearch = false, - }; - options.SearchPatterns.Add("StellaOps.Scanner.Analyzers.*.dll"); - - var result = PluginHost.LoadPlugins(options, _logger); - if (result.Plugins.Count == 0) - { - _logger.LogWarning("No language analyzer plug-ins discovered under '{Directory}'.", fullDirectory); - } - - foreach (var descriptor in result.Plugins) - { - try - { - _guard.EnsureRegistrationAllowed(descriptor.AssemblyPath); - _assemblies[descriptor.AssemblyPath] = descriptor.Assembly; - _logger.LogInformation( - "Registered language analyzer plug-in assembly '{Assembly}' from '{Path}'.", - descriptor.Assembly.FullName, - descriptor.AssemblyPath); - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to register language analyzer plug-in '{Path}'.", descriptor.AssemblyPath); - } - } - - RefreshPluginList(); - - if (seal) - { - _guard.Seal(); - } - } - - public IReadOnlyList<ILanguageAnalyzer> CreateAnalyzers(IServiceProvider services) - { - ArgumentNullException.ThrowIfNull(services); - - if (_plugins.Count == 0) - { - _logger.LogWarning("No language analyzer plug-ins available; skipping language analysis."); - return Array.Empty<ILanguageAnalyzer>(); - } - - var analyzers = new List<ILanguageAnalyzer>(_plugins.Count); - - foreach (var plugin in _plugins) - { - if (!IsPluginAvailable(plugin, services)) - { - continue; - } - - try - { - var analyzer = plugin.CreateAnalyzer(services); - if (analyzer is null) - { - continue; - } - - analyzers.Add(analyzer); - } - catch (Exception ex) - { - _logger.LogError(ex, "Language analyzer plug-in '{Plugin}' failed to create analyzer instance.", plugin.Name); - } - } - - if (analyzers.Count == 0) - { - _logger.LogWarning("All language analyzer plug-ins were unavailable."); - return Array.Empty<ILanguageAnalyzer>(); - } - - analyzers.Sort(static (a, b) => string.CompareOrdinal(a.Id, b.Id)); - return new ReadOnlyCollection<ILanguageAnalyzer>(analyzers); - } - - private void RefreshPluginList() - { - var assemblies = _assemblies.Values.ToArray(); - var plugins = PluginLoader.LoadPlugins<ILanguageAnalyzerPlugin>(assemblies); - _plugins = plugins is IReadOnlyList<ILanguageAnalyzerPlugin> list - ? list - : new ReadOnlyCollection<ILanguageAnalyzerPlugin>(plugins.ToArray()); - } - - private static bool IsPluginAvailable(ILanguageAnalyzerPlugin plugin, IServiceProvider services) - { - try - { - return plugin.IsAvailable(services); - } - catch - { - return false; - } - } -} +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Reflection; +using Microsoft.Extensions.Logging; +using StellaOps.Plugin; +using StellaOps.Plugin.Hosting; +using StellaOps.Scanner.Core.Security; + +namespace StellaOps.Scanner.Analyzers.Lang.Plugin; + +public interface ILanguageAnalyzerPluginCatalog +{ + IReadOnlyCollection<ILanguageAnalyzerPlugin> Plugins { get; } + + void LoadFromDirectory(string directory, bool seal = true); + + IReadOnlyList<ILanguageAnalyzer> CreateAnalyzers(IServiceProvider services); +} + +public sealed class LanguageAnalyzerPluginCatalog : ILanguageAnalyzerPluginCatalog +{ + private readonly ILogger<LanguageAnalyzerPluginCatalog> _logger; + private readonly IPluginCatalogGuard _guard; + private readonly ConcurrentDictionary<string, Assembly> _assemblies = new(StringComparer.OrdinalIgnoreCase); + private IReadOnlyList<ILanguageAnalyzerPlugin> _plugins = Array.Empty<ILanguageAnalyzerPlugin>(); + + public LanguageAnalyzerPluginCatalog(IPluginCatalogGuard guard, ILogger<LanguageAnalyzerPluginCatalog> logger) + { + _guard = guard ?? throw new ArgumentNullException(nameof(guard)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public IReadOnlyCollection<ILanguageAnalyzerPlugin> Plugins => _plugins; + + public void LoadFromDirectory(string directory, bool seal = true) + { + ArgumentException.ThrowIfNullOrWhiteSpace(directory); + var fullDirectory = Path.GetFullPath(directory); + + var options = new PluginHostOptions + { + PluginsDirectory = fullDirectory, + EnsureDirectoryExists = false, + RecursiveSearch = false, + }; + options.SearchPatterns.Add("StellaOps.Scanner.Analyzers.*.dll"); + + var result = PluginHost.LoadPlugins(options, _logger); + if (result.Plugins.Count == 0) + { + _logger.LogWarning("No language analyzer plug-ins discovered under '{Directory}'.", fullDirectory); + } + + foreach (var descriptor in result.Plugins) + { + try + { + _guard.EnsureRegistrationAllowed(descriptor.AssemblyPath); + _assemblies[descriptor.AssemblyPath] = descriptor.Assembly; + _logger.LogInformation( + "Registered language analyzer plug-in assembly '{Assembly}' from '{Path}'.", + descriptor.Assembly.FullName, + descriptor.AssemblyPath); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to register language analyzer plug-in '{Path}'.", descriptor.AssemblyPath); + } + } + + RefreshPluginList(); + + if (seal) + { + _guard.Seal(); + } + } + + public IReadOnlyList<ILanguageAnalyzer> CreateAnalyzers(IServiceProvider services) + { + ArgumentNullException.ThrowIfNull(services); + + if (_plugins.Count == 0) + { + _logger.LogWarning("No language analyzer plug-ins available; skipping language analysis."); + return Array.Empty<ILanguageAnalyzer>(); + } + + var analyzers = new List<ILanguageAnalyzer>(_plugins.Count); + + foreach (var plugin in _plugins) + { + if (!IsPluginAvailable(plugin, services)) + { + continue; + } + + try + { + var analyzer = plugin.CreateAnalyzer(services); + if (analyzer is null) + { + continue; + } + + analyzers.Add(analyzer); + } + catch (Exception ex) + { + _logger.LogError(ex, "Language analyzer plug-in '{Plugin}' failed to create analyzer instance.", plugin.Name); + } + } + + if (analyzers.Count == 0) + { + _logger.LogWarning("All language analyzer plug-ins were unavailable."); + return Array.Empty<ILanguageAnalyzer>(); + } + + analyzers.Sort(static (a, b) => string.CompareOrdinal(a.Id, b.Id)); + return new ReadOnlyCollection<ILanguageAnalyzer>(analyzers); + } + + private void RefreshPluginList() + { + var assemblies = _assemblies.Values.ToArray(); + var plugins = PluginLoader.LoadPlugins<ILanguageAnalyzerPlugin>(assemblies); + _plugins = plugins is IReadOnlyList<ILanguageAnalyzerPlugin> list + ? list + : new ReadOnlyCollection<ILanguageAnalyzerPlugin>(plugins.ToArray()); + } + + private static bool IsPluginAvailable(ILanguageAnalyzerPlugin plugin, IServiceProvider services) + { + try + { + return plugin.IsAvailable(services); + } + catch + { + return false; + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md similarity index 93% rename from src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md index 89506625..04370e05 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/SPRINTS_LANG_IMPLEMENTATION_PLAN.md @@ -18,7 +18,7 @@ All sprints below assume prerequisites from SP10-G2 (core scaffolding + Java ana - All symlink targets canonicalized; path traversal guarded. - **Gate Artifacts:** - `Fixtures/lang/node/**` golden outputs. - - Analyzer benchmark CSV + flamegraph (commit under `src/StellaOps.Bench/Scanner.Analyzers`). + - Analyzer benchmark CSV + flamegraph (commit under `src/Bench/StellaOps.Bench/Scanner.Analyzers`). - Worker integration sample enabling Node analyzer via manifest. - **Progress (2025-10-21):** Module walker with package-lock/yarn/pnpm resolution, workspace attribution, integrity metadata, and deterministic fixture harness committed; Node tasks 10-302A/B remain green. Shared component mapper + canonical result harness landed, closing tasks 10-307/308. Script metadata & telemetry (10-302C) emit policy hints, hashed evidence, and feed `scanner_analyzer_node_scripts_total` into Worker OpenTelemetry pipeline. Restart-time packaging closed (10-309): manifest added, Worker language catalog loads the Node analyzer, integration tests cover dispatch + layer fragments, and Offline Kit docs call out bundled language plug-ins. @@ -52,7 +52,7 @@ All sprints below assume prerequisites from SP10-G2 (core scaffolding + Java ana - **Gate Artifacts:** - Benchmarks vs competitor open-source tool (Trivy or Syft) demonstrating faster metadata extraction. - Documentation snippet explaining VCS metadata fields for Policy team. -- **Progress (2025-10-22):** Build-info decoder shipped with DWARF-string fallback for `vcs.*` markers, plus cached metadata keyed by binary length/timestamp. Added Go test fixtures covering build-info and DWARF-only binaries with deterministic goldens; analyzer now emits `go.dwarf` evidence alongside `go.buildinfo` metadata to feed downstream provenance rules. Completed stripped-binary heuristics with deterministic `golang::bin::sha256` components and a new `stripped` fixture to guard quiet-provenance behaviour. Heuristic fallbacks now emit `scanner_analyzer_golang_heuristic_total{indicator,version_hint}` counters, and shared buffer pooling (`ArrayPool<byte>`) keeps concurrent scans allocation-lite. Bench harness (`src/StellaOps.Bench/Scanner.Analyzers/config.json`) gained a dedicated Go scenario with baseline mean 4.02 ms; comparison against Syft v1.29.1 on the same fixture shows a 22 % speed advantage (see `src/StellaOps.Bench/Scanner.Analyzers/lang/go/syft-comparison-20251021.csv`). +- **Progress (2025-10-22):** Build-info decoder shipped with DWARF-string fallback for `vcs.*` markers, plus cached metadata keyed by binary length/timestamp. Added Go test fixtures covering build-info and DWARF-only binaries with deterministic goldens; analyzer now emits `go.dwarf` evidence alongside `go.buildinfo` metadata to feed downstream provenance rules. Completed stripped-binary heuristics with deterministic `golang::bin::sha256` components and a new `stripped` fixture to guard quiet-provenance behaviour. Heuristic fallbacks now emit `scanner_analyzer_golang_heuristic_total{indicator,version_hint}` counters, and shared buffer pooling (`ArrayPool<byte>`) keeps concurrent scans allocation-lite. Bench harness (`src/Bench/StellaOps.Bench/Scanner.Analyzers/config.json`) gained a dedicated Go scenario with baseline mean 4.02 ms; comparison against Syft v1.29.1 on the same fixture shows a 22 % speed advantage (see `src/Bench/StellaOps.Bench/Scanner.Analyzers/lang/go/syft-comparison-20251021.csv`). ## Sprint LA4 — .NET Analyzer & RID Variants (Tasks 10-305, 10-307, 10-308, 10-309 subset) - **Scope:** Parse `*.deps.json`, `runtimeconfig.json`, assembly metadata, and RID-specific assets; correlate with native dependencies. @@ -97,7 +97,7 @@ All sprints below assume prerequisites from SP10-G2 (core scaffolding + Java ana - Telemetry coverage: each analyzer emits timing + component counters. - **Gate Artifacts:** - `SPRINTS_LANG_IMPLEMENTATION_PLAN.md` progress log updated (this file). - - `src/StellaOps.Bench/Scanner.Analyzers/lang-matrix.csv` recorded + referenced in docs. + - `src/Bench/StellaOps.Bench/Scanner.Analyzers/lang-matrix.csv` recorded + referenced in docs. - Ops notes for packaging plug-ins into Offline Kit. --- @@ -106,7 +106,7 @@ All sprints below assume prerequisites from SP10-G2 (core scaffolding + Java ana - **Security:** All analyzers must enforce path canonicalization, guard against zip-slip, and expose provenance classifications (`observed`, `heuristic`, `attested`). - **Offline-first:** No network calls; rely on cached metadata and optional offline bundles (license texts, signature roots). - **Determinism:** Normalise timestamps to `0001-01-01T00:00:00Z` when persisting synthetic data; sort collections by stable keys. -- **Benchmarking:** Extend `src/StellaOps.Bench/Scanner.Analyzers` to compare against open-source scanners (Syft/Trivy) and document performance wins. +- **Benchmarking:** Extend `src/Bench/StellaOps.Bench/Scanner.Analyzers` to compare against open-source scanners (Syft/Trivy) and document performance wins. - **Hand-offs:** Emit guild requires consistent component schemas; Policy needs license + provenance metadata; Scheduler depends on usage flags for ImpactIndex. ## Tracking & Reporting diff --git a/src/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj similarity index 67% rename from src/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj index e2971e48..c7f66edf 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj @@ -1,22 +1,23 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - <EnableDefaultItems>false</EnableDefaultItems> - </PropertyGroup> - - <ItemGroup> - <Compile Include="**\\*.cs" Exclude="obj\\**;bin\\**" /> - <EmbeddedResource Include="**\\*.json" Exclude="obj\\**;bin\\**" /> - <None Include="**\\*" Exclude="**\\*.cs;**\\*.json;bin\\**;obj\\**" /> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + <EnableDefaultItems>false</EnableDefaultItems> + </PropertyGroup> + + <ItemGroup> + <Compile Include="**\\*.cs" Exclude="obj\\**;bin\\**" /> + <EmbeddedResource Include="**\\*.json" Exclude="obj\\**;bin\\**" /> + <None Include="**\\*" Exclude="**\\*.cs;**\\*.json;bin\\**;obj\\**" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\\StellaOps.Scanner.Core\\StellaOps.Scanner.Core.csproj" /> - <ProjectReference Include="..\\StellaOps.Plugin\\StellaOps.Plugin.csproj" /> + <ProjectReference Include="../StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.Lang/TASKS.md diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/ApkAnalyzerPlugin.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Apk/ApkAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/ApkAnalyzerPlugin.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkDatabaseParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/ApkDatabaseParser.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Apk/ApkDatabaseParser.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/ApkDatabaseParser.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/ApkPackageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/ApkPackageAnalyzer.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Apk/ApkPackageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/ApkPackageAnalyzer.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/Properties/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Apk/Properties/AssemblyInfo.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj similarity index 97% rename from src/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj index b7be5c8a..a56e869b 100644 --- a/src/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj @@ -1,15 +1,15 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/manifest.json diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgAnalyzerPlugin.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgAnalyzerPlugin.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgPackageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgPackageAnalyzer.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgPackageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgPackageAnalyzer.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgStatusParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgStatusParser.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgStatusParser.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/DpkgStatusParser.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/Properties/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Dpkg/Properties/AssemblyInfo.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj similarity index 97% rename from src/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj index b7be5c8a..a56e869b 100644 --- a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj @@ -1,15 +1,15 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/manifest.json diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/IRpmDatabaseReader.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/IRpmDatabaseReader.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/IRpmDatabaseReader.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/IRpmDatabaseReader.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeader.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeader.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeader.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeader.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeaderParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeaderParser.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeaderParser.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmHeaderParser.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmTags.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmTags.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmTags.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/Internal/RpmTags.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/Properties/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/Properties/AssemblyInfo.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/RpmAnalyzerPlugin.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/RpmAnalyzerPlugin.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmDatabaseReader.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/RpmDatabaseReader.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmDatabaseReader.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/RpmDatabaseReader.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmPackageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/RpmPackageAnalyzer.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/RpmPackageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/RpmPackageAnalyzer.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj similarity index 97% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj index 4fed40b9..50e10e00 100644 --- a/src/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj @@ -1,16 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Data.Sqlite" Version="9.0.0-rc.1.24451.1" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Data.Sqlite" Version="9.0.0-rc.1.24451.1" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/manifest.json diff --git a/src/StellaOps.Scanner.Analyzers.OS/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/AGENTS.md diff --git a/src/StellaOps.Scanner.Analyzers.OS/Abstractions/IOSPackageAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Abstractions/IOSPackageAnalyzer.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Abstractions/IOSPackageAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Abstractions/IOSPackageAnalyzer.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Analyzers/OsPackageAnalyzerBase.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Analyzers/OsPackageAnalyzerBase.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Analyzers/OsPackageAnalyzerBase.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Analyzers/OsPackageAnalyzerBase.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Helpers/CveHintExtractor.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/CveHintExtractor.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Helpers/CveHintExtractor.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/CveHintExtractor.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/PackageUrlBuilder.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageVersionParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/PackageVersionParser.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Helpers/PackageVersionParser.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Helpers/PackageVersionParser.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Mapping/OsComponentMapper.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Mapping/OsComponentMapper.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Mapping/OsComponentMapper.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Mapping/OsComponentMapper.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/AnalyzerWarning.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/AnalyzerWarning.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Model/AnalyzerWarning.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/AnalyzerWarning.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSAnalyzerTelemetry.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSAnalyzerTelemetry.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Model/OSAnalyzerTelemetry.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSAnalyzerTelemetry.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerContext.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerContext.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerContext.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerContext.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerResult.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerResult.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerResult.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSPackageAnalyzerResult.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageFileEvidence.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSPackageFileEvidence.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageFileEvidence.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSPackageFileEvidence.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageRecord.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSPackageRecord.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Model/OSPackageRecord.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/OSPackageRecord.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Model/PackageEvidenceSource.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Plugin/IOSAnalyzerPlugin.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Plugin/IOSAnalyzerPlugin.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Plugin/IOSAnalyzerPlugin.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Plugin/IOSAnalyzerPlugin.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Plugin/OsAnalyzerPluginCatalog.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Plugin/OsAnalyzerPluginCatalog.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Plugin/OsAnalyzerPluginCatalog.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Plugin/OsAnalyzerPluginCatalog.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/Properties/AssemblyInfo.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/Properties/AssemblyInfo.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj similarity index 78% rename from src/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj index 71ce2220..a0bd80af 100644 --- a/src/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -10,7 +11,7 @@ <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Plugin\StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> <ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.OS/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Analyzers.OS/TASKS.md diff --git a/src/StellaOps.Scanner.Cache/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Cache/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/AGENTS.md diff --git a/src/StellaOps.Scanner.Cache/Abstractions/IFileContentAddressableStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/Abstractions/IFileContentAddressableStore.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/Abstractions/IFileContentAddressableStore.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/Abstractions/IFileContentAddressableStore.cs diff --git a/src/StellaOps.Scanner.Cache/Abstractions/ILayerCacheStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/Abstractions/ILayerCacheStore.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/Abstractions/ILayerCacheStore.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/Abstractions/ILayerCacheStore.cs diff --git a/src/StellaOps.Scanner.Cache/Abstractions/LayerCacheEntry.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/Abstractions/LayerCacheEntry.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/Abstractions/LayerCacheEntry.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/Abstractions/LayerCacheEntry.cs diff --git a/src/StellaOps.Scanner.Cache/Abstractions/LayerCachePutRequest.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/Abstractions/LayerCachePutRequest.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/Abstractions/LayerCachePutRequest.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/Abstractions/LayerCachePutRequest.cs diff --git a/src/StellaOps.Scanner.Cache/FileCas/FileContentAddressableStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/FileCas/FileContentAddressableStore.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/FileCas/FileContentAddressableStore.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/FileCas/FileContentAddressableStore.cs diff --git a/src/StellaOps.Scanner.Cache/FileCas/NullFileContentAddressableStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/FileCas/NullFileContentAddressableStore.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/FileCas/NullFileContentAddressableStore.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/FileCas/NullFileContentAddressableStore.cs diff --git a/src/StellaOps.Scanner.Cache/LayerCache/LayerCacheStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/LayerCache/LayerCacheStore.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/LayerCache/LayerCacheStore.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/LayerCache/LayerCacheStore.cs diff --git a/src/StellaOps.Scanner.Cache/Maintenance/ScannerCacheMaintenanceService.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/Maintenance/ScannerCacheMaintenanceService.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/Maintenance/ScannerCacheMaintenanceService.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/Maintenance/ScannerCacheMaintenanceService.cs diff --git a/src/StellaOps.Scanner.Cache/ScannerCacheMetrics.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/ScannerCacheMetrics.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/ScannerCacheMetrics.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/ScannerCacheMetrics.cs diff --git a/src/StellaOps.Scanner.Cache/ScannerCacheOptions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/ScannerCacheOptions.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/ScannerCacheOptions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/ScannerCacheOptions.cs diff --git a/src/StellaOps.Scanner.Cache/ScannerCacheServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/ScannerCacheServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.Cache/ScannerCacheServiceCollectionExtensions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/ScannerCacheServiceCollectionExtensions.cs diff --git a/src/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj similarity index 98% rename from src/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj index eff5024a..c6d90749 100644 --- a/src/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj @@ -1,19 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <GenerateAssemblyInfo>false</GenerateAssemblyInfo> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="StackExchange.Redis" Version="2.7.33" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <GenerateAssemblyInfo>false</GenerateAssemblyInfo> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="StackExchange.Redis" Version="2.7.33" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Scanner.Cache/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Cache/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Cache/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Cache/TASKS.md diff --git a/src/StellaOps.Scanner.Core/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Core/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Core/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/AGENTS.md diff --git a/src/StellaOps.Scanner.Core/Contracts/ComponentGraph.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ComponentGraph.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ComponentGraph.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ComponentGraph.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/ComponentModels.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ComponentModels.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ComponentModels.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ComponentModels.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/SbomView.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/SbomView.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/SbomView.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/SbomView.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisKeys.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisStore.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStore.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisStore.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStoreExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisStoreExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ScanAnalysisStoreExtensions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanAnalysisStoreExtensions.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanJob.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanJob.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ScanJob.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanJob.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanJobIdJsonConverter.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanJobIdJsonConverter.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ScanJobIdJsonConverter.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanJobIdJsonConverter.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanMetadataKeys.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/ScanProgressEvent.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanProgressEvent.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ScanProgressEvent.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScanProgressEvent.cs diff --git a/src/StellaOps.Scanner.Core/Contracts/ScannerError.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScannerError.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Contracts/ScannerError.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Contracts/ScannerError.cs diff --git a/src/StellaOps.Scanner.Core/Observability/ScannerCorrelationContext.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Observability/ScannerCorrelationContext.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Observability/ScannerCorrelationContext.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Observability/ScannerCorrelationContext.cs diff --git a/src/StellaOps.Scanner.Core/Observability/ScannerDiagnostics.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Observability/ScannerDiagnostics.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Observability/ScannerDiagnostics.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Observability/ScannerDiagnostics.cs diff --git a/src/StellaOps.Scanner.Core/Observability/ScannerLogExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Observability/ScannerLogExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Observability/ScannerLogExtensions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Observability/ScannerLogExtensions.cs diff --git a/src/StellaOps.Scanner.Core/Observability/ScannerMetricNames.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Observability/ScannerMetricNames.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Observability/ScannerMetricNames.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Observability/ScannerMetricNames.cs diff --git a/src/StellaOps.Scanner.Core/Security/AuthorityTokenSource.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/AuthorityTokenSource.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Security/AuthorityTokenSource.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/AuthorityTokenSource.cs diff --git a/src/StellaOps.Scanner.Core/Security/IAuthorityTokenSource.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/IAuthorityTokenSource.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Security/IAuthorityTokenSource.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/IAuthorityTokenSource.cs diff --git a/src/StellaOps.Scanner.Core/Security/IPluginCatalogGuard.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/IPluginCatalogGuard.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Security/IPluginCatalogGuard.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/IPluginCatalogGuard.cs diff --git a/src/StellaOps.Scanner.Core/Security/RestartOnlyPluginGuard.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/RestartOnlyPluginGuard.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Security/RestartOnlyPluginGuard.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/RestartOnlyPluginGuard.cs diff --git a/src/StellaOps.Scanner.Core/Security/ScannerOperationalToken.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/ScannerOperationalToken.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Security/ScannerOperationalToken.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/ScannerOperationalToken.cs diff --git a/src/StellaOps.Scanner.Core/Security/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Security/ServiceCollectionExtensions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Security/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Scanner.Core/Serialization/ScannerJsonOptions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Serialization/ScannerJsonOptions.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Serialization/ScannerJsonOptions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Serialization/ScannerJsonOptions.cs diff --git a/src/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj similarity index 65% rename from src/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj index a3c89a04..edfb7bf9 100644 --- a/src/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -11,7 +12,7 @@ <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="..\StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Core/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Core/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Core/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/TASKS.md diff --git a/src/StellaOps.Scanner.Core/Utility/ScannerIdentifiers.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Utility/ScannerIdentifiers.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Utility/ScannerIdentifiers.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Utility/ScannerIdentifiers.cs diff --git a/src/StellaOps.Scanner.Core/Utility/ScannerTimestamps.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Core/Utility/ScannerTimestamps.cs similarity index 100% rename from src/StellaOps.Scanner.Core/Utility/ScannerTimestamps.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Core/Utility/ScannerTimestamps.cs diff --git a/src/StellaOps.Scanner.Diff/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Diff/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Diff/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Diff/AGENTS.md diff --git a/src/StellaOps.Scanner.Diff/ComponentDiffModels.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Diff/ComponentDiffModels.cs similarity index 100% rename from src/StellaOps.Scanner.Diff/ComponentDiffModels.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Diff/ComponentDiffModels.cs diff --git a/src/StellaOps.Scanner.Diff/ComponentDiffer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Diff/ComponentDiffer.cs similarity index 100% rename from src/StellaOps.Scanner.Diff/ComponentDiffer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Diff/ComponentDiffer.cs diff --git a/src/StellaOps.Scanner.Diff/DiffJsonSerializer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Diff/DiffJsonSerializer.cs similarity index 100% rename from src/StellaOps.Scanner.Diff/DiffJsonSerializer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Diff/DiffJsonSerializer.cs diff --git a/src/StellaOps.Scanner.Diff/StellaOps.Scanner.Diff.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Diff/StellaOps.Scanner.Diff.csproj similarity index 100% rename from src/StellaOps.Scanner.Diff/StellaOps.Scanner.Diff.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Diff/StellaOps.Scanner.Diff.csproj diff --git a/src/StellaOps.Scanner.Diff/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Diff/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Diff/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Diff/TASKS.md diff --git a/src/StellaOps.Scanner.Emit/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Emit/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/AGENTS.md diff --git a/src/StellaOps.Scanner.Emit/Composition/CycloneDxComposer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/CycloneDxComposer.cs similarity index 100% rename from src/StellaOps.Scanner.Emit/Composition/CycloneDxComposer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/CycloneDxComposer.cs diff --git a/src/StellaOps.Scanner.Emit/Composition/SbomCompositionRequest.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/SbomCompositionRequest.cs similarity index 100% rename from src/StellaOps.Scanner.Emit/Composition/SbomCompositionRequest.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/SbomCompositionRequest.cs diff --git a/src/StellaOps.Scanner.Emit/Composition/SbomCompositionResult.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/SbomCompositionResult.cs similarity index 100% rename from src/StellaOps.Scanner.Emit/Composition/SbomCompositionResult.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/SbomCompositionResult.cs diff --git a/src/StellaOps.Scanner.Emit/Composition/SbomPolicyFinding.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/SbomPolicyFinding.cs similarity index 97% rename from src/StellaOps.Scanner.Emit/Composition/SbomPolicyFinding.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/SbomPolicyFinding.cs index d0d77ea8..f34e163e 100644 --- a/src/StellaOps.Scanner.Emit/Composition/SbomPolicyFinding.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/SbomPolicyFinding.cs @@ -1,65 +1,65 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; - -namespace StellaOps.Scanner.Emit.Composition; - -public sealed record SbomPolicyFinding -{ - public required string FindingId { get; init; } - - public required string ComponentKey { get; init; } - - public string? VulnerabilityId { get; init; } - - public string Status { get; init; } = string.Empty; - - public double Score { get; init; } - - public string ConfigVersion { get; init; } = string.Empty; - - public ImmutableArray<KeyValuePair<string, double>> Inputs { get; init; } = ImmutableArray<KeyValuePair<string, double>>.Empty; - - public string? QuietedBy { get; init; } - - public bool Quiet { get; init; } - - public double? UnknownConfidence { get; init; } - - public string? ConfidenceBand { get; init; } - - public double? UnknownAgeDays { get; init; } - - public string? SourceTrust { get; init; } - - public string? Reachability { get; init; } - - internal SbomPolicyFinding Normalize() - { - ArgumentException.ThrowIfNullOrWhiteSpace(FindingId); - ArgumentException.ThrowIfNullOrWhiteSpace(ComponentKey); - - var normalizedInputs = Inputs.IsDefaultOrEmpty - ? ImmutableArray<KeyValuePair<string, double>>.Empty - : Inputs - .Where(static pair => !string.IsNullOrWhiteSpace(pair.Key)) - .Select(static pair => new KeyValuePair<string, double>(pair.Key.Trim(), pair.Value)) - .OrderBy(static pair => pair.Key, StringComparer.Ordinal) - .ToImmutableArray(); - - return this with - { - FindingId = FindingId.Trim(), - ComponentKey = ComponentKey.Trim(), - VulnerabilityId = string.IsNullOrWhiteSpace(VulnerabilityId) ? null : VulnerabilityId.Trim(), - Status = string.IsNullOrWhiteSpace(Status) ? string.Empty : Status.Trim(), - ConfigVersion = string.IsNullOrWhiteSpace(ConfigVersion) ? string.Empty : ConfigVersion.Trim(), - QuietedBy = string.IsNullOrWhiteSpace(QuietedBy) ? null : QuietedBy.Trim(), - ConfidenceBand = string.IsNullOrWhiteSpace(ConfidenceBand) ? null : ConfidenceBand.Trim(), - SourceTrust = string.IsNullOrWhiteSpace(SourceTrust) ? null : SourceTrust.Trim(), - Reachability = string.IsNullOrWhiteSpace(Reachability) ? null : Reachability.Trim(), - Inputs = normalizedInputs - }; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; + +namespace StellaOps.Scanner.Emit.Composition; + +public sealed record SbomPolicyFinding +{ + public required string FindingId { get; init; } + + public required string ComponentKey { get; init; } + + public string? VulnerabilityId { get; init; } + + public string Status { get; init; } = string.Empty; + + public double Score { get; init; } + + public string ConfigVersion { get; init; } = string.Empty; + + public ImmutableArray<KeyValuePair<string, double>> Inputs { get; init; } = ImmutableArray<KeyValuePair<string, double>>.Empty; + + public string? QuietedBy { get; init; } + + public bool Quiet { get; init; } + + public double? UnknownConfidence { get; init; } + + public string? ConfidenceBand { get; init; } + + public double? UnknownAgeDays { get; init; } + + public string? SourceTrust { get; init; } + + public string? Reachability { get; init; } + + internal SbomPolicyFinding Normalize() + { + ArgumentException.ThrowIfNullOrWhiteSpace(FindingId); + ArgumentException.ThrowIfNullOrWhiteSpace(ComponentKey); + + var normalizedInputs = Inputs.IsDefaultOrEmpty + ? ImmutableArray<KeyValuePair<string, double>>.Empty + : Inputs + .Where(static pair => !string.IsNullOrWhiteSpace(pair.Key)) + .Select(static pair => new KeyValuePair<string, double>(pair.Key.Trim(), pair.Value)) + .OrderBy(static pair => pair.Key, StringComparer.Ordinal) + .ToImmutableArray(); + + return this with + { + FindingId = FindingId.Trim(), + ComponentKey = ComponentKey.Trim(), + VulnerabilityId = string.IsNullOrWhiteSpace(VulnerabilityId) ? null : VulnerabilityId.Trim(), + Status = string.IsNullOrWhiteSpace(Status) ? string.Empty : Status.Trim(), + ConfigVersion = string.IsNullOrWhiteSpace(ConfigVersion) ? string.Empty : ConfigVersion.Trim(), + QuietedBy = string.IsNullOrWhiteSpace(QuietedBy) ? null : QuietedBy.Trim(), + ConfidenceBand = string.IsNullOrWhiteSpace(ConfidenceBand) ? null : ConfidenceBand.Trim(), + SourceTrust = string.IsNullOrWhiteSpace(SourceTrust) ? null : SourceTrust.Trim(), + Reachability = string.IsNullOrWhiteSpace(Reachability) ? null : Reachability.Trim(), + Inputs = normalizedInputs + }; + } +} diff --git a/src/StellaOps.Scanner.Emit/Composition/ScanAnalysisCompositionBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/ScanAnalysisCompositionBuilder.cs similarity index 100% rename from src/StellaOps.Scanner.Emit/Composition/ScanAnalysisCompositionBuilder.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/Composition/ScanAnalysisCompositionBuilder.cs diff --git a/src/StellaOps.Scanner.Emit/Index/BomIndexBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/Index/BomIndexBuilder.cs similarity index 100% rename from src/StellaOps.Scanner.Emit/Index/BomIndexBuilder.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/Index/BomIndexBuilder.cs diff --git a/src/StellaOps.Scanner.Emit/Packaging/ScannerArtifactPackageBuilder.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/Packaging/ScannerArtifactPackageBuilder.cs similarity index 100% rename from src/StellaOps.Scanner.Emit/Packaging/ScannerArtifactPackageBuilder.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/Packaging/ScannerArtifactPackageBuilder.cs diff --git a/src/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj similarity index 100% rename from src/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj diff --git a/src/StellaOps.Scanner.Emit/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Emit/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Emit/TASKS.md diff --git a/src/StellaOps.Scanner.EntryTrace/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/AGENTS.md diff --git a/src/StellaOps.Scanner.EntryTrace/Diagnostics/EntryTraceMetrics.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Diagnostics/EntryTraceMetrics.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/Diagnostics/EntryTraceMetrics.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Diagnostics/EntryTraceMetrics.cs diff --git a/src/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzer.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzer.cs diff --git a/src/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzerOptions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzerOptions.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzerOptions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceAnalyzerOptions.cs diff --git a/src/StellaOps.Scanner.EntryTrace/EntryTraceContext.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceContext.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/EntryTraceContext.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceContext.cs diff --git a/src/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs similarity index 96% rename from src/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs index 361232cb..a5054ef0 100644 --- a/src/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceImageContextFactory.cs @@ -1,178 +1,178 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using Microsoft.Extensions.Logging; - -namespace StellaOps.Scanner.EntryTrace; - -/// <summary> -/// Combines OCI configuration and root filesystem data into the context required by the EntryTrace analyzer. -/// </summary> -public static class EntryTraceImageContextFactory -{ - private const string DefaultUser = "root"; - - public static EntryTraceImageContext Create( - OciImageConfig config, - IRootFileSystem fileSystem, - EntryTraceAnalyzerOptions options, - string imageDigest, - string scanId, - ILogger? logger = null) - { - ArgumentNullException.ThrowIfNull(config); - ArgumentNullException.ThrowIfNull(fileSystem); - ArgumentNullException.ThrowIfNull(options); - ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest); - ArgumentException.ThrowIfNullOrWhiteSpace(scanId); - - var environment = BuildEnvironment(config.Environment); - var path = DeterminePath(environment, options); - var workingDir = NormalizeWorkingDirectory(config.WorkingDirectory); - var user = NormalizeUser(config.User); - - var context = new EntryTraceContext( - fileSystem, - environment, - path, - workingDir, - user, - imageDigest, - scanId, - logger); - - var entrypoint = EntrypointSpecification.FromExecForm( - config.Entrypoint.IsDefaultOrEmpty ? null : config.Entrypoint, - config.Command.IsDefaultOrEmpty ? null : config.Command); - - return new EntryTraceImageContext(entrypoint, context); - } - - private static ImmutableDictionary<string, string> BuildEnvironment(ImmutableArray<string> raw) - { - if (raw.IsDefaultOrEmpty) - { - return ImmutableDictionary<string, string>.Empty; - } - - var dictionary = new Dictionary<string, string>(StringComparer.Ordinal); - foreach (var entry in raw) - { - if (string.IsNullOrWhiteSpace(entry)) - { - continue; - } - - var separatorIndex = entry.IndexOf('='); - if (separatorIndex < 0) - { - var key = entry.Trim(); - if (key.Length > 0) - { - dictionary[key] = string.Empty; - } - continue; - } - - var keyPart = entry[..separatorIndex].Trim(); - if (keyPart.Length == 0) - { - continue; - } - - var valuePart = entry[(separatorIndex + 1)..]; - dictionary[keyPart] = valuePart; - } - - return ImmutableDictionary.CreateRange(StringComparer.Ordinal, dictionary); - } - - private static ImmutableArray<string> DeterminePath(ImmutableDictionary<string, string> env, EntryTraceAnalyzerOptions options) - { - if (env.TryGetValue("PATH", out var pathValue) && !string.IsNullOrWhiteSpace(pathValue)) - { - return SplitPath(pathValue); - } - - var fallback = string.IsNullOrWhiteSpace(options.DefaultPath) - ? EntryTraceDefaults.DefaultPath - : options.DefaultPath; - - return SplitPath(fallback); - } - - private static string NormalizeWorkingDirectory(string? workingDir) - { - if (string.IsNullOrWhiteSpace(workingDir)) - { - return "/"; - } - - var text = workingDir.Replace('\\', '/').Trim(); - if (!text.StartsWith("/", StringComparison.Ordinal)) - { - text = "/" + text; - } - - if (text.Length > 1 && text.EndsWith("/", StringComparison.Ordinal)) - { - text = text.TrimEnd('/'); - } - - return text.Length == 0 ? "/" : text; - } - - private static string NormalizeUser(string? user) - { - if (string.IsNullOrWhiteSpace(user)) - { - return DefaultUser; - } - - return user.Trim(); - } - - private static ImmutableArray<string> SplitPath(string value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return ImmutableArray<string>.Empty; - } - - var builder = ImmutableArray.CreateBuilder<string>(); - foreach (var segment in value.Split(':', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) - { - if (segment.Length == 0) - { - continue; - } - - var normalized = segment.Replace('\\', '/'); - if (!normalized.StartsWith("/", StringComparison.Ordinal)) - { - normalized = "/" + normalized; - } - - if (normalized.EndsWith("/", StringComparison.Ordinal) && normalized.Length > 1) - { - normalized = normalized.TrimEnd('/'); - } - - builder.Add(normalized); - } - - return builder.ToImmutable(); - } -} - -/// <summary> -/// Bundles the resolved entrypoint and context required for the analyzer to operate. -/// </summary> -public sealed record EntryTraceImageContext( - EntrypointSpecification Entrypoint, - EntryTraceContext Context); - -internal static class EntryTraceDefaults -{ - public const string DefaultPath = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"; -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scanner.EntryTrace; + +/// <summary> +/// Combines OCI configuration and root filesystem data into the context required by the EntryTrace analyzer. +/// </summary> +public static class EntryTraceImageContextFactory +{ + private const string DefaultUser = "root"; + + public static EntryTraceImageContext Create( + OciImageConfig config, + IRootFileSystem fileSystem, + EntryTraceAnalyzerOptions options, + string imageDigest, + string scanId, + ILogger? logger = null) + { + ArgumentNullException.ThrowIfNull(config); + ArgumentNullException.ThrowIfNull(fileSystem); + ArgumentNullException.ThrowIfNull(options); + ArgumentException.ThrowIfNullOrWhiteSpace(imageDigest); + ArgumentException.ThrowIfNullOrWhiteSpace(scanId); + + var environment = BuildEnvironment(config.Environment); + var path = DeterminePath(environment, options); + var workingDir = NormalizeWorkingDirectory(config.WorkingDirectory); + var user = NormalizeUser(config.User); + + var context = new EntryTraceContext( + fileSystem, + environment, + path, + workingDir, + user, + imageDigest, + scanId, + logger); + + var entrypoint = EntrypointSpecification.FromExecForm( + config.Entrypoint.IsDefaultOrEmpty ? null : config.Entrypoint, + config.Command.IsDefaultOrEmpty ? null : config.Command); + + return new EntryTraceImageContext(entrypoint, context); + } + + private static ImmutableDictionary<string, string> BuildEnvironment(ImmutableArray<string> raw) + { + if (raw.IsDefaultOrEmpty) + { + return ImmutableDictionary<string, string>.Empty; + } + + var dictionary = new Dictionary<string, string>(StringComparer.Ordinal); + foreach (var entry in raw) + { + if (string.IsNullOrWhiteSpace(entry)) + { + continue; + } + + var separatorIndex = entry.IndexOf('='); + if (separatorIndex < 0) + { + var key = entry.Trim(); + if (key.Length > 0) + { + dictionary[key] = string.Empty; + } + continue; + } + + var keyPart = entry[..separatorIndex].Trim(); + if (keyPart.Length == 0) + { + continue; + } + + var valuePart = entry[(separatorIndex + 1)..]; + dictionary[keyPart] = valuePart; + } + + return ImmutableDictionary.CreateRange(StringComparer.Ordinal, dictionary); + } + + private static ImmutableArray<string> DeterminePath(ImmutableDictionary<string, string> env, EntryTraceAnalyzerOptions options) + { + if (env.TryGetValue("PATH", out var pathValue) && !string.IsNullOrWhiteSpace(pathValue)) + { + return SplitPath(pathValue); + } + + var fallback = string.IsNullOrWhiteSpace(options.DefaultPath) + ? EntryTraceDefaults.DefaultPath + : options.DefaultPath; + + return SplitPath(fallback); + } + + private static string NormalizeWorkingDirectory(string? workingDir) + { + if (string.IsNullOrWhiteSpace(workingDir)) + { + return "/"; + } + + var text = workingDir.Replace('\\', '/').Trim(); + if (!text.StartsWith("/", StringComparison.Ordinal)) + { + text = "/" + text; + } + + if (text.Length > 1 && text.EndsWith("/", StringComparison.Ordinal)) + { + text = text.TrimEnd('/'); + } + + return text.Length == 0 ? "/" : text; + } + + private static string NormalizeUser(string? user) + { + if (string.IsNullOrWhiteSpace(user)) + { + return DefaultUser; + } + + return user.Trim(); + } + + private static ImmutableArray<string> SplitPath(string value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return ImmutableArray<string>.Empty; + } + + var builder = ImmutableArray.CreateBuilder<string>(); + foreach (var segment in value.Split(':', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + { + if (segment.Length == 0) + { + continue; + } + + var normalized = segment.Replace('\\', '/'); + if (!normalized.StartsWith("/", StringComparison.Ordinal)) + { + normalized = "/" + normalized; + } + + if (normalized.EndsWith("/", StringComparison.Ordinal) && normalized.Length > 1) + { + normalized = normalized.TrimEnd('/'); + } + + builder.Add(normalized); + } + + return builder.ToImmutable(); + } +} + +/// <summary> +/// Bundles the resolved entrypoint and context required for the analyzer to operate. +/// </summary> +public sealed record EntryTraceImageContext( + EntrypointSpecification Entrypoint, + EntryTraceContext Context); + +internal static class EntryTraceDefaults +{ + public const string DefaultPath = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"; +} diff --git a/src/StellaOps.Scanner.EntryTrace/EntryTraceTypes.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceTypes.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/EntryTraceTypes.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntryTraceTypes.cs diff --git a/src/StellaOps.Scanner.EntryTrace/EntrypointSpecification.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntrypointSpecification.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/EntrypointSpecification.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/EntrypointSpecification.cs diff --git a/src/StellaOps.Scanner.EntryTrace/FileSystem/IRootFileSystem.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/IRootFileSystem.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/FileSystem/IRootFileSystem.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/IRootFileSystem.cs diff --git a/src/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs similarity index 96% rename from src/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs index e1c58299..e86144bf 100644 --- a/src/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/FileSystem/LayeredRootFileSystem.cs @@ -1,771 +1,771 @@ -using System.Collections.Immutable; -using System.Formats.Tar; -using System.IO; -using System.IO.Compression; -using System.Linq; -using System.Text; -using System.Threading; -using IOPath = System.IO.Path; - -namespace StellaOps.Scanner.EntryTrace; - -/// <summary> -/// Represents an <see cref="IRootFileSystem"/> backed by OCI image layers. -/// </summary> -public sealed class LayeredRootFileSystem : IRootFileSystem -{ - private const int MaxSymlinkDepth = 32; - private const int MaxCachedTextBytes = 1_048_576; // 1 MiB - - private readonly ImmutableDictionary<string, FileEntry> _entries; - - private LayeredRootFileSystem(IDictionary<string, FileEntry> entries) - { - _entries = entries.ToImmutableDictionary(StringComparer.Ordinal); - } - - /// <summary> - /// Describes a directory on disk containing a single layer's contents. - /// </summary> - public sealed record LayerDirectory(string Digest, string Path); - - /// <summary> - /// Describes a tar archive on disk containing a single layer's contents. - /// </summary> - public sealed record LayerArchive(string Digest, string Path); - - /// <summary> - /// Builds a root filesystem by applying the provided directory layers in order. - /// </summary> - public static LayeredRootFileSystem FromDirectories(IEnumerable<LayerDirectory> layers) - { - if (layers is null) - { - throw new ArgumentNullException(nameof(layers)); - } - - var builder = new Builder(); - foreach (var layer in layers) - { - builder.ApplyDirectoryLayer(layer); - } - - return new LayeredRootFileSystem(builder.Build()); - } - - /// <summary> - /// Builds a root filesystem by applying the provided tar archive layers in order. - /// </summary> - public static LayeredRootFileSystem FromArchives(IEnumerable<LayerArchive> layers) - { - if (layers is null) - { - throw new ArgumentNullException(nameof(layers)); - } - - var builder = new Builder(); - foreach (var layer in layers) - { - builder.ApplyArchiveLayer(layer); - } - - return new LayeredRootFileSystem(builder.Build()); - } - - public bool TryResolveExecutable(string name, IReadOnlyList<string> searchPaths, out RootFileDescriptor descriptor) - { - descriptor = null!; - - if (string.IsNullOrWhiteSpace(name)) - { - return false; - } - - if (name.Contains('/', StringComparison.Ordinal)) - { - return TryResolveExecutableByPath(name, out descriptor); - } - - foreach (var searchPath in searchPaths) - { - if (string.IsNullOrWhiteSpace(searchPath)) - { - continue; - } - - var candidate = NormalizePath(searchPath, name); - if (TryResolveExecutableByPath(candidate, out descriptor)) - { - return true; - } - } - - return false; - } - - public bool TryReadAllText(string path, out RootFileDescriptor descriptor, out string content) - { - descriptor = null!; - content = string.Empty; - - if (!TryResolveFile(path, out var entry, out var resolvedPath)) - { - return false; - } - - if (!entry.TryReadText(out content)) - { - return false; - } - - descriptor = entry.ToDescriptor(resolvedPath); - return true; - } - - public ImmutableArray<RootFileDescriptor> EnumerateDirectory(string path) - { - var normalizedDirectory = NormalizeDirectory(path); - var results = ImmutableArray.CreateBuilder<RootFileDescriptor>(); - - foreach (var entry in _entries.Values) - { - if (!string.Equals(entry.Parent, normalizedDirectory, StringComparison.Ordinal)) - { - continue; - } - - if (entry.Kind == FileEntryKind.Symlink) - { - if (TryResolveFile(entry.Path, out var resolved, out var resolvedPath)) - { - results.Add(resolved.ToDescriptor(resolvedPath)); - } - continue; - } - - results.Add(entry.ToDescriptor(entry.Path)); - } - - return results.ToImmutable().Sort(static (left, right) => string.CompareOrdinal(left.Path, right.Path)); - } - - public bool DirectoryExists(string path) - { - var normalized = NormalizeDirectory(path); - if (_entries.TryGetValue(normalized, out var entry)) - { - return entry.Kind == FileEntryKind.Directory; - } - - return _entries.Keys.Any(k => k.StartsWith(normalized + "/", StringComparison.Ordinal)); - } - - private bool TryResolveExecutableByPath(string path, out RootFileDescriptor descriptor) - { - descriptor = null!; - - if (!TryResolveFile(path, out var entry, out var resolvedPath)) - { - return false; - } - - if (!entry.IsExecutable) - { - return false; - } - - descriptor = entry.ToDescriptor(resolvedPath); - return true; - } - - private bool TryResolveFile(string path, out FileEntry entry, out string resolvedPath) - { - var normalized = NormalizePath(path); - var visited = new HashSet<string>(StringComparer.Ordinal); - var depth = 0; - - while (true) - { - if (++depth > MaxSymlinkDepth) - { - break; - } - - if (!visited.Add(normalized)) - { - break; - } - - if (!_entries.TryGetValue(normalized, out var current)) - { - break; - } - - switch (current.Kind) - { - case FileEntryKind.File: - entry = current; - resolvedPath = normalized; - return true; - case FileEntryKind.Symlink: - normalized = ResolveSymlink(normalized, current.SymlinkTarget); - continue; - case FileEntryKind.Directory: - // cannot resolve to directory - entry = null!; - resolvedPath = string.Empty; - return false; - default: - entry = null!; - resolvedPath = string.Empty; - return false; - } - } - - entry = null!; - resolvedPath = string.Empty; - return false; - } - - private static string ResolveSymlink(string sourcePath, string? target) - { - if (string.IsNullOrWhiteSpace(target)) - { - return sourcePath; - } - - if (target.StartsWith("/", StringComparison.Ordinal)) - { - return NormalizePath(target); - } - - var directory = NormalizeDirectory(IOPath.GetDirectoryName(sourcePath)); - return NormalizePath(directory, target); - } - - private static string NormalizeDirectory(string? path) - { - var normalized = NormalizePath(path); - if (normalized.Length > 1 && normalized.EndsWith("/", StringComparison.Ordinal)) - { - normalized = normalized[..^1]; - } - - return normalized; - } - - private static string NormalizePath(string? path) - => NormalizePath("/", path); - - private static string NormalizePath(string basePath, string? relative) - { - var combined = string.IsNullOrWhiteSpace(relative) - ? basePath - : relative.StartsWith("/", StringComparison.Ordinal) - ? relative - : $"{basePath}/{relative}"; - - var text = combined.Replace('\\', '/'); - if (!text.StartsWith("/", StringComparison.Ordinal)) - { - text = "/" + text; - } - - var parts = new Stack<string>(); - foreach (var segment in text.Split('/', StringSplitOptions.RemoveEmptyEntries)) - { - if (segment == ".") - { - continue; - } - - if (segment == "..") - { - if (parts.Count > 0) - { - parts.Pop(); - } - continue; - } - - parts.Push(segment); - } - - if (parts.Count == 0) - { - return "/"; - } - - var builder = new StringBuilder(); - foreach (var segment in parts.Reverse()) - { - builder.Append('/').Append(segment); - } - - return builder.ToString(); - } - - private sealed class Builder - { - private readonly Dictionary<string, FileEntry> _entries = new(StringComparer.Ordinal); - - public Builder() - { - _entries["/"] = FileEntry.Directory("/", null); - } - - public void ApplyDirectoryLayer(LayerDirectory layer) - { - ArgumentNullException.ThrowIfNull(layer); - var root = layer.Path; - if (!Directory.Exists(root)) - { - throw new DirectoryNotFoundException($"Layer directory '{root}' was not found."); - } - - ApplyDirectoryEntry("/", layer.Digest); - - var stack = new Stack<string>(); - stack.Push(root); - - while (stack.Count > 0) - { - var current = stack.Pop(); - foreach (var entryPath in Directory.EnumerateFileSystemEntries(current, "*", SearchOption.TopDirectoryOnly)) - { - var relative = IOPath.GetRelativePath(root, entryPath); - var normalized = NormalizePath(relative); - var fileName = IOPath.GetFileName(normalized); - - if (IsWhiteoutEntry(fileName)) - { - HandleWhiteout(normalized); - continue; - } - - var attributes = File.GetAttributes(entryPath); - var isSymlink = attributes.HasFlag(FileAttributes.ReparsePoint); - - if (Directory.Exists(entryPath) && !isSymlink) - { - ApplyDirectoryEntry(normalized, layer.Digest); - stack.Push(entryPath); - continue; - } - - if (isSymlink) - { - var linkTarget = GetLinkTarget(entryPath); - _entries[normalized] = FileEntry.Symlink(normalized, layer.Digest, linkTarget, parent: GetParent(normalized)); - continue; - } - - var isExecutable = InferExecutable(entryPath, attributes); - var contentProvider = FileContentProvider.FromFile(entryPath); - var shebang = ExtractShebang(contentProvider.Peek(MaxCachedTextBytes)); - - EnsureAncestry(normalized, layer.Digest); - _entries[normalized] = FileEntry.File( - normalized, - layer.Digest, - isExecutable, - shebang, - contentProvider, - parent: GetParent(normalized)); - } - } - } - - public void ApplyArchiveLayer(LayerArchive layer) - { - ArgumentNullException.ThrowIfNull(layer); - if (!File.Exists(layer.Path)) - { - throw new FileNotFoundException("Layer archive not found.", layer.Path); - } - - using var archiveStream = File.OpenRead(layer.Path); - using var reader = new TarReader(OpenPossiblyCompressedStream(archiveStream, layer.Path), leaveOpen: false); - - TarEntry? entry; - while ((entry = reader.GetNextEntry()) is not null) - { - var normalized = NormalizePath(entry.Name); - var fileName = IOPath.GetFileName(normalized); - - if (IsWhiteoutEntry(fileName)) - { - HandleWhiteout(normalized); - continue; - } - - switch (entry.EntryType) - { - case TarEntryType.Directory: - ApplyDirectoryEntry(normalized, layer.Digest); - break; - case TarEntryType.RegularFile: - case TarEntryType.V7RegularFile: - case TarEntryType.ContiguousFile: - { - var contentProvider = FileContentProvider.FromTarEntry(entry); - var preview = contentProvider.Peek(MaxCachedTextBytes); - var shebang = ExtractShebang(preview); - var isExecutable = InferExecutable(entry); - - EnsureAncestry(normalized, layer.Digest); - _entries[normalized] = FileEntry.File( - normalized, - layer.Digest, - isExecutable, - shebang, - contentProvider, - parent: GetParent(normalized)); - break; - } - case TarEntryType.SymbolicLink: - case TarEntryType.HardLink: - { - EnsureAncestry(normalized, layer.Digest); - var target = string.IsNullOrWhiteSpace(entry.LinkName) - ? null - : entry.LinkName; - _entries[normalized] = FileEntry.Symlink( - normalized, - layer.Digest, - target, - parent: GetParent(normalized)); - break; - } - default: - // Ignore other entry types for now. - break; - } - } - } - - public IDictionary<string, FileEntry> Build() - { - return _entries; - } - - private void ApplyDirectoryEntry(string path, string? digest) - { - var normalized = NormalizeDirectory(path); - EnsureAncestry(normalized, digest); - _entries[normalized] = FileEntry.Directory(normalized, digest); - } - - private void EnsureAncestry(string path, string? digest) - { - var current = GetParent(path); - while (!string.Equals(current, "/", StringComparison.Ordinal)) - { - if (_entries.TryGetValue(current, out var existing) && existing.Kind == FileEntryKind.Directory) - { - break; - } - - _entries[current] = FileEntry.Directory(current, digest); - current = GetParent(current); - } - - if (!_entries.ContainsKey("/")) - { - _entries["/"] = FileEntry.Directory("/", digest); - } - } - - private void HandleWhiteout(string path) - { - var fileName = IOPath.GetFileName(path); - if (string.Equals(fileName, ".wh..wh..opq", StringComparison.Ordinal)) - { - var directory = NormalizeDirectory(IOPath.GetDirectoryName(path)); - var keys = _entries.Keys - .Where(k => k.StartsWith(directory + "/", StringComparison.Ordinal)) - .ToArray(); - - foreach (var key in keys) - { - _entries.Remove(key); - } - - return; - } - - if (!fileName.StartsWith(".wh.", StringComparison.Ordinal)) - { - return; - } - - var targetName = fileName[4..]; - var directoryPath = NormalizeDirectory(IOPath.GetDirectoryName(path)); - var targetPath = directoryPath == "/" - ? "/" + targetName - : directoryPath + "/" + targetName; - - var toRemove = _entries.Keys - .Where(k => string.Equals(k, targetPath, StringComparison.Ordinal) || - k.StartsWith(targetPath + "/", StringComparison.Ordinal)) - .ToArray(); - - foreach (var key in toRemove) - { - _entries.Remove(key); - } - } - - private static bool IsWhiteoutEntry(string? fileName) - { - if (string.IsNullOrEmpty(fileName)) - { - return false; - } - - return fileName == ".wh..wh..opq" || fileName.StartsWith(".wh.", StringComparison.Ordinal); - } - - private static Stream OpenPossiblyCompressedStream(Stream source, string path) - { - if (path.EndsWith(".gz", StringComparison.OrdinalIgnoreCase) || - path.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase)) - { - return new GZipStream(source, CompressionMode.Decompress, leaveOpen: false); - } - - return source; - } - - private static string? GetLinkTarget(string entryPath) - { - try - { - var fileInfo = new FileInfo(entryPath); - if (!string.IsNullOrEmpty(fileInfo.LinkTarget)) - { - return fileInfo.LinkTarget; - } - - var directoryInfo = new DirectoryInfo(entryPath); - return directoryInfo.LinkTarget; - } - catch - { - return null; - } - } - - private static string GetParent(string path) - { - var directory = IOPath.GetDirectoryName(path); - return NormalizeDirectory(directory); - } - - private static bool InferExecutable(string path, FileAttributes attributes) - { - if (OperatingSystem.IsWindows()) - { - var extension = IOPath.GetExtension(path); - return extension is ".exe" or ".bat" or ".cmd" or ".ps1" or ".com" or ".sh" - or ".py" or ".pl" or ".rb" or ".js"; - } - - try - { -#if NET8_0_OR_GREATER - var mode = File.GetUnixFileMode(path); - return mode.HasFlag(UnixFileMode.UserExecute) || - mode.HasFlag(UnixFileMode.GroupExecute) || - mode.HasFlag(UnixFileMode.OtherExecute); -#else - return true; -#endif - } - catch - { - return true; - } - } - - private static bool InferExecutable(TarEntry entry) - { - var mode = (UnixFileMode)entry.Mode; - return mode.HasFlag(UnixFileMode.UserExecute) || - mode.HasFlag(UnixFileMode.GroupExecute) || - mode.HasFlag(UnixFileMode.OtherExecute); - } - - private static string? ExtractShebang(string? contentPreview) - { - if (string.IsNullOrEmpty(contentPreview)) - { - return null; - } - - using var reader = new StringReader(contentPreview); - var firstLine = reader.ReadLine(); - - if (firstLine is null || !firstLine.StartsWith("#!", StringComparison.Ordinal)) - { - return null; - } - - return firstLine[2..].Trim(); - } - } - - private sealed class FileEntry - { - private readonly FileContentProvider? _content; - - private FileEntry( - string path, - string? layerDigest, - FileEntryKind kind, - bool isExecutable, - string? shebang, - FileContentProvider? content, - string parent, - string? symlinkTarget) - { - Path = path; - LayerDigest = layerDigest; - Kind = kind; - IsExecutable = isExecutable; - Shebang = shebang; - _content = content; - Parent = parent; - SymlinkTarget = symlinkTarget; - } - - public string Path { get; } - - public string? LayerDigest { get; } - - public FileEntryKind Kind { get; } - - public bool IsExecutable { get; } - - public string? Shebang { get; } - - public string Parent { get; } - - public string? SymlinkTarget { get; } - - public RootFileDescriptor ToDescriptor(string resolvedPath) - => new( - resolvedPath, - LayerDigest, - IsExecutable, - Kind == FileEntryKind.Directory, - Shebang); - - public bool TryReadText(out string content) - { - if (Kind != FileEntryKind.File || _content is null) - { - content = string.Empty; - return false; - } - - return _content.TryRead(out content); - } - - public static FileEntry File( - string path, - string? digest, - bool isExecutable, - string? shebang, - FileContentProvider content, - string parent) - => new(path, digest, FileEntryKind.File, isExecutable, shebang, content, parent, symlinkTarget: null); - - public static FileEntry Directory(string path, string? digest) - => new(path, digest, FileEntryKind.Directory, isExecutable: false, shebang: null, content: null, parent: GetParent(path), symlinkTarget: null); - - public static FileEntry Symlink(string path, string? digest, string? target, string parent) - => new(path, digest, FileEntryKind.Symlink, isExecutable: false, shebang: null, content: null, parent, target); - - private static string GetParent(string path) - => NormalizeDirectory(IOPath.GetDirectoryName(path)); - } - - private enum FileEntryKind - { - File, - Directory, - Symlink - } - - private sealed class FileContentProvider - { - private readonly Func<string?> _factory; - private readonly Lazy<string?> _cached; - - private FileContentProvider(Func<string?> factory) - { - _factory = factory; - _cached = new Lazy<string?>(() => _factory(), LazyThreadSafetyMode.ExecutionAndPublication); - } - - public static FileContentProvider FromFile(string path) - => new(() => - { - try - { - return File.ReadAllText(path); - } - catch - { - return null; - } - }); - - public static FileContentProvider FromTarEntry(TarEntry entry) - { - return new FileContentProvider(() => - { - using var stream = new MemoryStream(); - entry.DataStream?.CopyTo(stream); - if (stream.Length > MaxCachedTextBytes) - { - return null; - } - - stream.Position = 0; - using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, leaveOpen: true); - return reader.ReadToEnd(); - }); - } - - public string? Peek(int maxBytes) - { - var content = _cached.Value; - if (content is null) - { - return null; - } - - if (content.Length * sizeof(char) <= maxBytes) - { - return content; - } - - return content[..Math.Min(content.Length, maxBytes / sizeof(char))]; - } - - public bool TryRead(out string content) - { - var value = _cached.Value; - if (value is null) - { - content = string.Empty; - return false; - } - - content = value; - return true; - } - } -} +using System.Collections.Immutable; +using System.Formats.Tar; +using System.IO; +using System.IO.Compression; +using System.Linq; +using System.Text; +using System.Threading; +using IOPath = System.IO.Path; + +namespace StellaOps.Scanner.EntryTrace; + +/// <summary> +/// Represents an <see cref="IRootFileSystem"/> backed by OCI image layers. +/// </summary> +public sealed class LayeredRootFileSystem : IRootFileSystem +{ + private const int MaxSymlinkDepth = 32; + private const int MaxCachedTextBytes = 1_048_576; // 1 MiB + + private readonly ImmutableDictionary<string, FileEntry> _entries; + + private LayeredRootFileSystem(IDictionary<string, FileEntry> entries) + { + _entries = entries.ToImmutableDictionary(StringComparer.Ordinal); + } + + /// <summary> + /// Describes a directory on disk containing a single layer's contents. + /// </summary> + public sealed record LayerDirectory(string Digest, string Path); + + /// <summary> + /// Describes a tar archive on disk containing a single layer's contents. + /// </summary> + public sealed record LayerArchive(string Digest, string Path); + + /// <summary> + /// Builds a root filesystem by applying the provided directory layers in order. + /// </summary> + public static LayeredRootFileSystem FromDirectories(IEnumerable<LayerDirectory> layers) + { + if (layers is null) + { + throw new ArgumentNullException(nameof(layers)); + } + + var builder = new Builder(); + foreach (var layer in layers) + { + builder.ApplyDirectoryLayer(layer); + } + + return new LayeredRootFileSystem(builder.Build()); + } + + /// <summary> + /// Builds a root filesystem by applying the provided tar archive layers in order. + /// </summary> + public static LayeredRootFileSystem FromArchives(IEnumerable<LayerArchive> layers) + { + if (layers is null) + { + throw new ArgumentNullException(nameof(layers)); + } + + var builder = new Builder(); + foreach (var layer in layers) + { + builder.ApplyArchiveLayer(layer); + } + + return new LayeredRootFileSystem(builder.Build()); + } + + public bool TryResolveExecutable(string name, IReadOnlyList<string> searchPaths, out RootFileDescriptor descriptor) + { + descriptor = null!; + + if (string.IsNullOrWhiteSpace(name)) + { + return false; + } + + if (name.Contains('/', StringComparison.Ordinal)) + { + return TryResolveExecutableByPath(name, out descriptor); + } + + foreach (var searchPath in searchPaths) + { + if (string.IsNullOrWhiteSpace(searchPath)) + { + continue; + } + + var candidate = NormalizePath(searchPath, name); + if (TryResolveExecutableByPath(candidate, out descriptor)) + { + return true; + } + } + + return false; + } + + public bool TryReadAllText(string path, out RootFileDescriptor descriptor, out string content) + { + descriptor = null!; + content = string.Empty; + + if (!TryResolveFile(path, out var entry, out var resolvedPath)) + { + return false; + } + + if (!entry.TryReadText(out content)) + { + return false; + } + + descriptor = entry.ToDescriptor(resolvedPath); + return true; + } + + public ImmutableArray<RootFileDescriptor> EnumerateDirectory(string path) + { + var normalizedDirectory = NormalizeDirectory(path); + var results = ImmutableArray.CreateBuilder<RootFileDescriptor>(); + + foreach (var entry in _entries.Values) + { + if (!string.Equals(entry.Parent, normalizedDirectory, StringComparison.Ordinal)) + { + continue; + } + + if (entry.Kind == FileEntryKind.Symlink) + { + if (TryResolveFile(entry.Path, out var resolved, out var resolvedPath)) + { + results.Add(resolved.ToDescriptor(resolvedPath)); + } + continue; + } + + results.Add(entry.ToDescriptor(entry.Path)); + } + + return results.ToImmutable().Sort(static (left, right) => string.CompareOrdinal(left.Path, right.Path)); + } + + public bool DirectoryExists(string path) + { + var normalized = NormalizeDirectory(path); + if (_entries.TryGetValue(normalized, out var entry)) + { + return entry.Kind == FileEntryKind.Directory; + } + + return _entries.Keys.Any(k => k.StartsWith(normalized + "/", StringComparison.Ordinal)); + } + + private bool TryResolveExecutableByPath(string path, out RootFileDescriptor descriptor) + { + descriptor = null!; + + if (!TryResolveFile(path, out var entry, out var resolvedPath)) + { + return false; + } + + if (!entry.IsExecutable) + { + return false; + } + + descriptor = entry.ToDescriptor(resolvedPath); + return true; + } + + private bool TryResolveFile(string path, out FileEntry entry, out string resolvedPath) + { + var normalized = NormalizePath(path); + var visited = new HashSet<string>(StringComparer.Ordinal); + var depth = 0; + + while (true) + { + if (++depth > MaxSymlinkDepth) + { + break; + } + + if (!visited.Add(normalized)) + { + break; + } + + if (!_entries.TryGetValue(normalized, out var current)) + { + break; + } + + switch (current.Kind) + { + case FileEntryKind.File: + entry = current; + resolvedPath = normalized; + return true; + case FileEntryKind.Symlink: + normalized = ResolveSymlink(normalized, current.SymlinkTarget); + continue; + case FileEntryKind.Directory: + // cannot resolve to directory + entry = null!; + resolvedPath = string.Empty; + return false; + default: + entry = null!; + resolvedPath = string.Empty; + return false; + } + } + + entry = null!; + resolvedPath = string.Empty; + return false; + } + + private static string ResolveSymlink(string sourcePath, string? target) + { + if (string.IsNullOrWhiteSpace(target)) + { + return sourcePath; + } + + if (target.StartsWith("/", StringComparison.Ordinal)) + { + return NormalizePath(target); + } + + var directory = NormalizeDirectory(IOPath.GetDirectoryName(sourcePath)); + return NormalizePath(directory, target); + } + + private static string NormalizeDirectory(string? path) + { + var normalized = NormalizePath(path); + if (normalized.Length > 1 && normalized.EndsWith("/", StringComparison.Ordinal)) + { + normalized = normalized[..^1]; + } + + return normalized; + } + + private static string NormalizePath(string? path) + => NormalizePath("/", path); + + private static string NormalizePath(string basePath, string? relative) + { + var combined = string.IsNullOrWhiteSpace(relative) + ? basePath + : relative.StartsWith("/", StringComparison.Ordinal) + ? relative + : $"{basePath}/{relative}"; + + var text = combined.Replace('\\', '/'); + if (!text.StartsWith("/", StringComparison.Ordinal)) + { + text = "/" + text; + } + + var parts = new Stack<string>(); + foreach (var segment in text.Split('/', StringSplitOptions.RemoveEmptyEntries)) + { + if (segment == ".") + { + continue; + } + + if (segment == "..") + { + if (parts.Count > 0) + { + parts.Pop(); + } + continue; + } + + parts.Push(segment); + } + + if (parts.Count == 0) + { + return "/"; + } + + var builder = new StringBuilder(); + foreach (var segment in parts.Reverse()) + { + builder.Append('/').Append(segment); + } + + return builder.ToString(); + } + + private sealed class Builder + { + private readonly Dictionary<string, FileEntry> _entries = new(StringComparer.Ordinal); + + public Builder() + { + _entries["/"] = FileEntry.Directory("/", null); + } + + public void ApplyDirectoryLayer(LayerDirectory layer) + { + ArgumentNullException.ThrowIfNull(layer); + var root = layer.Path; + if (!Directory.Exists(root)) + { + throw new DirectoryNotFoundException($"Layer directory '{root}' was not found."); + } + + ApplyDirectoryEntry("/", layer.Digest); + + var stack = new Stack<string>(); + stack.Push(root); + + while (stack.Count > 0) + { + var current = stack.Pop(); + foreach (var entryPath in Directory.EnumerateFileSystemEntries(current, "*", SearchOption.TopDirectoryOnly)) + { + var relative = IOPath.GetRelativePath(root, entryPath); + var normalized = NormalizePath(relative); + var fileName = IOPath.GetFileName(normalized); + + if (IsWhiteoutEntry(fileName)) + { + HandleWhiteout(normalized); + continue; + } + + var attributes = File.GetAttributes(entryPath); + var isSymlink = attributes.HasFlag(FileAttributes.ReparsePoint); + + if (Directory.Exists(entryPath) && !isSymlink) + { + ApplyDirectoryEntry(normalized, layer.Digest); + stack.Push(entryPath); + continue; + } + + if (isSymlink) + { + var linkTarget = GetLinkTarget(entryPath); + _entries[normalized] = FileEntry.Symlink(normalized, layer.Digest, linkTarget, parent: GetParent(normalized)); + continue; + } + + var isExecutable = InferExecutable(entryPath, attributes); + var contentProvider = FileContentProvider.FromFile(entryPath); + var shebang = ExtractShebang(contentProvider.Peek(MaxCachedTextBytes)); + + EnsureAncestry(normalized, layer.Digest); + _entries[normalized] = FileEntry.File( + normalized, + layer.Digest, + isExecutable, + shebang, + contentProvider, + parent: GetParent(normalized)); + } + } + } + + public void ApplyArchiveLayer(LayerArchive layer) + { + ArgumentNullException.ThrowIfNull(layer); + if (!File.Exists(layer.Path)) + { + throw new FileNotFoundException("Layer archive not found.", layer.Path); + } + + using var archiveStream = File.OpenRead(layer.Path); + using var reader = new TarReader(OpenPossiblyCompressedStream(archiveStream, layer.Path), leaveOpen: false); + + TarEntry? entry; + while ((entry = reader.GetNextEntry()) is not null) + { + var normalized = NormalizePath(entry.Name); + var fileName = IOPath.GetFileName(normalized); + + if (IsWhiteoutEntry(fileName)) + { + HandleWhiteout(normalized); + continue; + } + + switch (entry.EntryType) + { + case TarEntryType.Directory: + ApplyDirectoryEntry(normalized, layer.Digest); + break; + case TarEntryType.RegularFile: + case TarEntryType.V7RegularFile: + case TarEntryType.ContiguousFile: + { + var contentProvider = FileContentProvider.FromTarEntry(entry); + var preview = contentProvider.Peek(MaxCachedTextBytes); + var shebang = ExtractShebang(preview); + var isExecutable = InferExecutable(entry); + + EnsureAncestry(normalized, layer.Digest); + _entries[normalized] = FileEntry.File( + normalized, + layer.Digest, + isExecutable, + shebang, + contentProvider, + parent: GetParent(normalized)); + break; + } + case TarEntryType.SymbolicLink: + case TarEntryType.HardLink: + { + EnsureAncestry(normalized, layer.Digest); + var target = string.IsNullOrWhiteSpace(entry.LinkName) + ? null + : entry.LinkName; + _entries[normalized] = FileEntry.Symlink( + normalized, + layer.Digest, + target, + parent: GetParent(normalized)); + break; + } + default: + // Ignore other entry types for now. + break; + } + } + } + + public IDictionary<string, FileEntry> Build() + { + return _entries; + } + + private void ApplyDirectoryEntry(string path, string? digest) + { + var normalized = NormalizeDirectory(path); + EnsureAncestry(normalized, digest); + _entries[normalized] = FileEntry.Directory(normalized, digest); + } + + private void EnsureAncestry(string path, string? digest) + { + var current = GetParent(path); + while (!string.Equals(current, "/", StringComparison.Ordinal)) + { + if (_entries.TryGetValue(current, out var existing) && existing.Kind == FileEntryKind.Directory) + { + break; + } + + _entries[current] = FileEntry.Directory(current, digest); + current = GetParent(current); + } + + if (!_entries.ContainsKey("/")) + { + _entries["/"] = FileEntry.Directory("/", digest); + } + } + + private void HandleWhiteout(string path) + { + var fileName = IOPath.GetFileName(path); + if (string.Equals(fileName, ".wh..wh..opq", StringComparison.Ordinal)) + { + var directory = NormalizeDirectory(IOPath.GetDirectoryName(path)); + var keys = _entries.Keys + .Where(k => k.StartsWith(directory + "/", StringComparison.Ordinal)) + .ToArray(); + + foreach (var key in keys) + { + _entries.Remove(key); + } + + return; + } + + if (!fileName.StartsWith(".wh.", StringComparison.Ordinal)) + { + return; + } + + var targetName = fileName[4..]; + var directoryPath = NormalizeDirectory(IOPath.GetDirectoryName(path)); + var targetPath = directoryPath == "/" + ? "/" + targetName + : directoryPath + "/" + targetName; + + var toRemove = _entries.Keys + .Where(k => string.Equals(k, targetPath, StringComparison.Ordinal) || + k.StartsWith(targetPath + "/", StringComparison.Ordinal)) + .ToArray(); + + foreach (var key in toRemove) + { + _entries.Remove(key); + } + } + + private static bool IsWhiteoutEntry(string? fileName) + { + if (string.IsNullOrEmpty(fileName)) + { + return false; + } + + return fileName == ".wh..wh..opq" || fileName.StartsWith(".wh.", StringComparison.Ordinal); + } + + private static Stream OpenPossiblyCompressedStream(Stream source, string path) + { + if (path.EndsWith(".gz", StringComparison.OrdinalIgnoreCase) || + path.EndsWith(".tgz", StringComparison.OrdinalIgnoreCase)) + { + return new GZipStream(source, CompressionMode.Decompress, leaveOpen: false); + } + + return source; + } + + private static string? GetLinkTarget(string entryPath) + { + try + { + var fileInfo = new FileInfo(entryPath); + if (!string.IsNullOrEmpty(fileInfo.LinkTarget)) + { + return fileInfo.LinkTarget; + } + + var directoryInfo = new DirectoryInfo(entryPath); + return directoryInfo.LinkTarget; + } + catch + { + return null; + } + } + + private static string GetParent(string path) + { + var directory = IOPath.GetDirectoryName(path); + return NormalizeDirectory(directory); + } + + private static bool InferExecutable(string path, FileAttributes attributes) + { + if (OperatingSystem.IsWindows()) + { + var extension = IOPath.GetExtension(path); + return extension is ".exe" or ".bat" or ".cmd" or ".ps1" or ".com" or ".sh" + or ".py" or ".pl" or ".rb" or ".js"; + } + + try + { +#if NET8_0_OR_GREATER + var mode = File.GetUnixFileMode(path); + return mode.HasFlag(UnixFileMode.UserExecute) || + mode.HasFlag(UnixFileMode.GroupExecute) || + mode.HasFlag(UnixFileMode.OtherExecute); +#else + return true; +#endif + } + catch + { + return true; + } + } + + private static bool InferExecutable(TarEntry entry) + { + var mode = (UnixFileMode)entry.Mode; + return mode.HasFlag(UnixFileMode.UserExecute) || + mode.HasFlag(UnixFileMode.GroupExecute) || + mode.HasFlag(UnixFileMode.OtherExecute); + } + + private static string? ExtractShebang(string? contentPreview) + { + if (string.IsNullOrEmpty(contentPreview)) + { + return null; + } + + using var reader = new StringReader(contentPreview); + var firstLine = reader.ReadLine(); + + if (firstLine is null || !firstLine.StartsWith("#!", StringComparison.Ordinal)) + { + return null; + } + + return firstLine[2..].Trim(); + } + } + + private sealed class FileEntry + { + private readonly FileContentProvider? _content; + + private FileEntry( + string path, + string? layerDigest, + FileEntryKind kind, + bool isExecutable, + string? shebang, + FileContentProvider? content, + string parent, + string? symlinkTarget) + { + Path = path; + LayerDigest = layerDigest; + Kind = kind; + IsExecutable = isExecutable; + Shebang = shebang; + _content = content; + Parent = parent; + SymlinkTarget = symlinkTarget; + } + + public string Path { get; } + + public string? LayerDigest { get; } + + public FileEntryKind Kind { get; } + + public bool IsExecutable { get; } + + public string? Shebang { get; } + + public string Parent { get; } + + public string? SymlinkTarget { get; } + + public RootFileDescriptor ToDescriptor(string resolvedPath) + => new( + resolvedPath, + LayerDigest, + IsExecutable, + Kind == FileEntryKind.Directory, + Shebang); + + public bool TryReadText(out string content) + { + if (Kind != FileEntryKind.File || _content is null) + { + content = string.Empty; + return false; + } + + return _content.TryRead(out content); + } + + public static FileEntry File( + string path, + string? digest, + bool isExecutable, + string? shebang, + FileContentProvider content, + string parent) + => new(path, digest, FileEntryKind.File, isExecutable, shebang, content, parent, symlinkTarget: null); + + public static FileEntry Directory(string path, string? digest) + => new(path, digest, FileEntryKind.Directory, isExecutable: false, shebang: null, content: null, parent: GetParent(path), symlinkTarget: null); + + public static FileEntry Symlink(string path, string? digest, string? target, string parent) + => new(path, digest, FileEntryKind.Symlink, isExecutable: false, shebang: null, content: null, parent, target); + + private static string GetParent(string path) + => NormalizeDirectory(IOPath.GetDirectoryName(path)); + } + + private enum FileEntryKind + { + File, + Directory, + Symlink + } + + private sealed class FileContentProvider + { + private readonly Func<string?> _factory; + private readonly Lazy<string?> _cached; + + private FileContentProvider(Func<string?> factory) + { + _factory = factory; + _cached = new Lazy<string?>(() => _factory(), LazyThreadSafetyMode.ExecutionAndPublication); + } + + public static FileContentProvider FromFile(string path) + => new(() => + { + try + { + return File.ReadAllText(path); + } + catch + { + return null; + } + }); + + public static FileContentProvider FromTarEntry(TarEntry entry) + { + return new FileContentProvider(() => + { + using var stream = new MemoryStream(); + entry.DataStream?.CopyTo(stream); + if (stream.Length > MaxCachedTextBytes) + { + return null; + } + + stream.Position = 0; + using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, leaveOpen: true); + return reader.ReadToEnd(); + }); + } + + public string? Peek(int maxBytes) + { + var content = _cached.Value; + if (content is null) + { + return null; + } + + if (content.Length * sizeof(char) <= maxBytes) + { + return content; + } + + return content[..Math.Min(content.Length, maxBytes / sizeof(char))]; + } + + public bool TryRead(out string content) + { + var value = _cached.Value; + if (value is null) + { + content = string.Empty; + return false; + } + + content = value; + return true; + } + } +} diff --git a/src/StellaOps.Scanner.EntryTrace/IEntryTraceAnalyzer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/IEntryTraceAnalyzer.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/IEntryTraceAnalyzer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/IEntryTraceAnalyzer.cs diff --git a/src/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs similarity index 96% rename from src/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs index bb7a22b6..7f964f13 100644 --- a/src/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Oci/OciImageConfig.cs @@ -1,129 +1,129 @@ -using System.Collections.Immutable; -using System.IO; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Scanner.EntryTrace; - -/// <summary> -/// Represents the deserialized OCI image config document. -/// </summary> -internal sealed class OciImageConfiguration -{ - [JsonPropertyName("config")] - public OciImageConfig? Config { get; init; } - - [JsonPropertyName("container_config")] - public OciImageConfig? ContainerConfig { get; init; } -} - -/// <summary> -/// Logical representation of the OCI image config fields used by EntryTrace. -/// </summary> -public sealed class OciImageConfig -{ - [JsonPropertyName("Env")] - [JsonConverter(typeof(FlexibleStringListConverter))] - public ImmutableArray<string> Environment { get; init; } = ImmutableArray<string>.Empty; - - [JsonPropertyName("Entrypoint")] - [JsonConverter(typeof(FlexibleStringListConverter))] - public ImmutableArray<string> Entrypoint { get; init; } = ImmutableArray<string>.Empty; - - [JsonPropertyName("Cmd")] - [JsonConverter(typeof(FlexibleStringListConverter))] - public ImmutableArray<string> Command { get; init; } = ImmutableArray<string>.Empty; - - [JsonPropertyName("WorkingDir")] - public string? WorkingDirectory { get; init; } - - [JsonPropertyName("User")] - public string? User { get; init; } -} - -/// <summary> -/// Loads <see cref="OciImageConfig"/> instances from OCI config JSON. -/// </summary> -public static class OciImageConfigLoader -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - PropertyNameCaseInsensitive = true - }; - - public static OciImageConfig Load(string filePath) - { - ArgumentException.ThrowIfNullOrWhiteSpace(filePath); - using var stream = File.OpenRead(filePath); - return Load(stream); - } - - public static OciImageConfig Load(Stream stream) - { - ArgumentNullException.ThrowIfNull(stream); - - var configuration = JsonSerializer.Deserialize<OciImageConfiguration>(stream, SerializerOptions) - ?? throw new InvalidDataException("OCI image config is empty or invalid."); - - if (configuration.Config is not null) - { - return configuration.Config; - } - - if (configuration.ContainerConfig is not null) - { - return configuration.ContainerConfig; - } - - throw new InvalidDataException("OCI image config does not include a config section."); - } -} - -internal sealed class FlexibleStringListConverter : JsonConverter<ImmutableArray<string>> -{ - public override ImmutableArray<string> Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) - { - if (reader.TokenType == JsonTokenType.Null) - { - return ImmutableArray<string>.Empty; - } - - if (reader.TokenType == JsonTokenType.StartArray) - { - var builder = ImmutableArray.CreateBuilder<string>(); - while (reader.Read()) - { - if (reader.TokenType == JsonTokenType.EndArray) - { - return builder.ToImmutable(); - } - - if (reader.TokenType == JsonTokenType.String) - { - builder.Add(reader.GetString() ?? string.Empty); - continue; - } - - throw new JsonException($"Expected string elements in array but found {reader.TokenType}."); - } - } - - if (reader.TokenType == JsonTokenType.String) - { - return ImmutableArray.Create(reader.GetString() ?? string.Empty); - } - - throw new JsonException($"Unsupported JSON token {reader.TokenType} for string array."); - } - - public override void Write(Utf8JsonWriter writer, ImmutableArray<string> value, JsonSerializerOptions options) - { - writer.WriteStartArray(); - foreach (var entry in value) - { - writer.WriteStringValue(entry); - } - - writer.WriteEndArray(); - } -} +using System.Collections.Immutable; +using System.IO; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Scanner.EntryTrace; + +/// <summary> +/// Represents the deserialized OCI image config document. +/// </summary> +internal sealed class OciImageConfiguration +{ + [JsonPropertyName("config")] + public OciImageConfig? Config { get; init; } + + [JsonPropertyName("container_config")] + public OciImageConfig? ContainerConfig { get; init; } +} + +/// <summary> +/// Logical representation of the OCI image config fields used by EntryTrace. +/// </summary> +public sealed class OciImageConfig +{ + [JsonPropertyName("Env")] + [JsonConverter(typeof(FlexibleStringListConverter))] + public ImmutableArray<string> Environment { get; init; } = ImmutableArray<string>.Empty; + + [JsonPropertyName("Entrypoint")] + [JsonConverter(typeof(FlexibleStringListConverter))] + public ImmutableArray<string> Entrypoint { get; init; } = ImmutableArray<string>.Empty; + + [JsonPropertyName("Cmd")] + [JsonConverter(typeof(FlexibleStringListConverter))] + public ImmutableArray<string> Command { get; init; } = ImmutableArray<string>.Empty; + + [JsonPropertyName("WorkingDir")] + public string? WorkingDirectory { get; init; } + + [JsonPropertyName("User")] + public string? User { get; init; } +} + +/// <summary> +/// Loads <see cref="OciImageConfig"/> instances from OCI config JSON. +/// </summary> +public static class OciImageConfigLoader +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + PropertyNameCaseInsensitive = true + }; + + public static OciImageConfig Load(string filePath) + { + ArgumentException.ThrowIfNullOrWhiteSpace(filePath); + using var stream = File.OpenRead(filePath); + return Load(stream); + } + + public static OciImageConfig Load(Stream stream) + { + ArgumentNullException.ThrowIfNull(stream); + + var configuration = JsonSerializer.Deserialize<OciImageConfiguration>(stream, SerializerOptions) + ?? throw new InvalidDataException("OCI image config is empty or invalid."); + + if (configuration.Config is not null) + { + return configuration.Config; + } + + if (configuration.ContainerConfig is not null) + { + return configuration.ContainerConfig; + } + + throw new InvalidDataException("OCI image config does not include a config section."); + } +} + +internal sealed class FlexibleStringListConverter : JsonConverter<ImmutableArray<string>> +{ + public override ImmutableArray<string> Read(ref Utf8JsonReader reader, Type typeToConvert, JsonSerializerOptions options) + { + if (reader.TokenType == JsonTokenType.Null) + { + return ImmutableArray<string>.Empty; + } + + if (reader.TokenType == JsonTokenType.StartArray) + { + var builder = ImmutableArray.CreateBuilder<string>(); + while (reader.Read()) + { + if (reader.TokenType == JsonTokenType.EndArray) + { + return builder.ToImmutable(); + } + + if (reader.TokenType == JsonTokenType.String) + { + builder.Add(reader.GetString() ?? string.Empty); + continue; + } + + throw new JsonException($"Expected string elements in array but found {reader.TokenType}."); + } + } + + if (reader.TokenType == JsonTokenType.String) + { + return ImmutableArray.Create(reader.GetString() ?? string.Empty); + } + + throw new JsonException($"Unsupported JSON token {reader.TokenType} for string array."); + } + + public override void Write(Utf8JsonWriter writer, ImmutableArray<string> value, JsonSerializerOptions options) + { + writer.WriteStartArray(); + foreach (var entry in value) + { + writer.WriteStringValue(entry); + } + + writer.WriteEndArray(); + } +} diff --git a/src/StellaOps.Scanner.EntryTrace/Parsing/ShellNodes.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Parsing/ShellNodes.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/Parsing/ShellNodes.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Parsing/ShellNodes.cs diff --git a/src/StellaOps.Scanner.EntryTrace/Parsing/ShellParser.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Parsing/ShellParser.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/Parsing/ShellParser.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Parsing/ShellParser.cs diff --git a/src/StellaOps.Scanner.EntryTrace/Parsing/ShellToken.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Parsing/ShellToken.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/Parsing/ShellToken.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Parsing/ShellToken.cs diff --git a/src/StellaOps.Scanner.EntryTrace/Parsing/ShellTokenizer.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Parsing/ShellTokenizer.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/Parsing/ShellTokenizer.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/Parsing/ShellTokenizer.cs diff --git a/src/StellaOps.Scanner.EntryTrace/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/ServiceCollectionExtensions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj similarity index 84% rename from src/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj index b2c49e6f..479a42e4 100644 --- a/src/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -13,6 +14,6 @@ <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Plugin\StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.EntryTrace/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.EntryTrace/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.EntryTrace/TASKS.md diff --git a/src/StellaOps.Scanner.Queue/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Queue/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/AGENTS.md diff --git a/src/StellaOps.Scanner.Queue/IScanQueue.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/IScanQueue.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/IScanQueue.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/IScanQueue.cs diff --git a/src/StellaOps.Scanner.Queue/IScanQueueLease.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/IScanQueueLease.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/IScanQueueLease.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/IScanQueueLease.cs diff --git a/src/StellaOps.Scanner.Queue/Nats/NatsScanQueue.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/Nats/NatsScanQueue.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/Nats/NatsScanQueue.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/Nats/NatsScanQueue.cs diff --git a/src/StellaOps.Scanner.Queue/Nats/NatsScanQueueLease.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/Nats/NatsScanQueueLease.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/Nats/NatsScanQueueLease.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/Nats/NatsScanQueueLease.cs diff --git a/src/StellaOps.Scanner.Queue/QueueEnvelopeFields.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/QueueEnvelopeFields.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/QueueEnvelopeFields.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/QueueEnvelopeFields.cs diff --git a/src/StellaOps.Scanner.Queue/QueueMetrics.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/QueueMetrics.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/QueueMetrics.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/QueueMetrics.cs diff --git a/src/StellaOps.Scanner.Queue/QueueTransportKind.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/QueueTransportKind.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/QueueTransportKind.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/QueueTransportKind.cs diff --git a/src/StellaOps.Scanner.Queue/Redis/RedisScanQueue.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/Redis/RedisScanQueue.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/Redis/RedisScanQueue.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/Redis/RedisScanQueue.cs diff --git a/src/StellaOps.Scanner.Queue/Redis/RedisScanQueueLease.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/Redis/RedisScanQueueLease.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/Redis/RedisScanQueueLease.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/Redis/RedisScanQueueLease.cs diff --git a/src/StellaOps.Scanner.Queue/ScanQueueContracts.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/ScanQueueContracts.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/ScanQueueContracts.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/ScanQueueContracts.cs diff --git a/src/StellaOps.Scanner.Queue/ScannerQueueHealthCheck.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/ScannerQueueHealthCheck.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/ScannerQueueHealthCheck.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/ScannerQueueHealthCheck.cs diff --git a/src/StellaOps.Scanner.Queue/ScannerQueueOptions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/ScannerQueueOptions.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/ScannerQueueOptions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/ScannerQueueOptions.cs diff --git a/src/StellaOps.Scanner.Queue/ScannerQueueServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/ScannerQueueServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.Queue/ScannerQueueServiceCollectionExtensions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/ScannerQueueServiceCollectionExtensions.cs diff --git a/src/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj similarity index 98% rename from src/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj index 3a90aefb..7cbdfd26 100644 --- a/src/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj @@ -1,21 +1,21 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <GenerateAssemblyInfo>false</GenerateAssemblyInfo> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="StackExchange.Redis" Version="2.7.33" /> - <PackageReference Include="NATS.Client.Core" Version="2.0.0" /> - <PackageReference Include="NATS.Client.JetStream" Version="2.0.0" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <GenerateAssemblyInfo>false</GenerateAssemblyInfo> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="StackExchange.Redis" Version="2.7.33" /> + <PackageReference Include="NATS.Client.Core" Version="2.0.0" /> + <PackageReference Include="NATS.Client.JetStream" Version="2.0.0" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Scanner.Queue/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Queue/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Queue/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Queue/TASKS.md diff --git a/src/StellaOps.Scanner.Storage/AGENTS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/AGENTS.md similarity index 100% rename from src/StellaOps.Scanner.Storage/AGENTS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/AGENTS.md diff --git a/src/StellaOps.Scanner.Storage/Catalog/ArtifactDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/ArtifactDocument.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Catalog/ArtifactDocument.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/ArtifactDocument.cs diff --git a/src/StellaOps.Scanner.Storage/Catalog/CatalogIdFactory.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/CatalogIdFactory.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Catalog/CatalogIdFactory.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/CatalogIdFactory.cs diff --git a/src/StellaOps.Scanner.Storage/Catalog/ImageDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/ImageDocument.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Catalog/ImageDocument.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/ImageDocument.cs diff --git a/src/StellaOps.Scanner.Storage/Catalog/JobDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/JobDocument.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Catalog/JobDocument.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/JobDocument.cs diff --git a/src/StellaOps.Scanner.Storage/Catalog/LayerDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/LayerDocument.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Catalog/LayerDocument.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/LayerDocument.cs diff --git a/src/StellaOps.Scanner.Storage/Catalog/LifecycleRuleDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/LifecycleRuleDocument.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Catalog/LifecycleRuleDocument.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/LifecycleRuleDocument.cs diff --git a/src/StellaOps.Scanner.Storage/Catalog/LinkDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/LinkDocument.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Catalog/LinkDocument.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/LinkDocument.cs diff --git a/src/StellaOps.Scanner.Storage/Catalog/RuntimeEventDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/RuntimeEventDocument.cs similarity index 96% rename from src/StellaOps.Scanner.Storage/Catalog/RuntimeEventDocument.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/RuntimeEventDocument.cs index b143cdb2..d82347ba 100644 --- a/src/StellaOps.Scanner.Storage/Catalog/RuntimeEventDocument.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Catalog/RuntimeEventDocument.cs @@ -1,89 +1,89 @@ -using MongoDB.Bson; -using MongoDB.Bson.Serialization.Attributes; - -namespace StellaOps.Scanner.Storage.Catalog; - -/// <summary> -/// MongoDB persistence model for runtime events emitted by the Zastava observer. -/// </summary> -public sealed class RuntimeEventDocument -{ - [BsonId] - [BsonRepresentation(BsonType.ObjectId)] - public string? Id { get; set; } - - [BsonElement("eventId")] - [BsonRequired] - public string EventId { get; set; } = string.Empty; - - [BsonElement("schemaVersion")] - [BsonRequired] - public string SchemaVersion { get; set; } = string.Empty; - - [BsonElement("tenant")] - [BsonRequired] - public string Tenant { get; set; } = string.Empty; - - [BsonElement("node")] - [BsonRequired] - public string Node { get; set; } = string.Empty; - - [BsonElement("kind")] - [BsonRepresentation(BsonType.String)] - [BsonRequired] - public string Kind { get; set; } = string.Empty; - - [BsonElement("when")] - [BsonDateTimeOptions(Kind = DateTimeKind.Utc)] - public DateTime When { get; set; } - - [BsonElement("receivedAt")] - [BsonDateTimeOptions(Kind = DateTimeKind.Utc)] - public DateTime ReceivedAt { get; set; } - - [BsonElement("expiresAt")] - [BsonDateTimeOptions(Kind = DateTimeKind.Utc)] - public DateTime ExpiresAt { get; set; } - - [BsonElement("platform")] - public string? Platform { get; set; } - - [BsonElement("namespace")] - public string? Namespace { get; set; } - - [BsonElement("pod")] - public string? Pod { get; set; } - - [BsonElement("container")] - public string? Container { get; set; } - - [BsonElement("containerId")] - public string? ContainerId { get; set; } - - [BsonElement("imageRef")] - public string? ImageRef { get; set; } - - [BsonElement("imageDigest")] - public string? ImageDigest { get; set; } - - [BsonElement("engine")] - public string? Engine { get; set; } - - [BsonElement("engineVersion")] - public string? EngineVersion { get; set; } - - [BsonElement("baselineDigest")] - public string? BaselineDigest { get; set; } - - [BsonElement("imageSigned")] - public bool? ImageSigned { get; set; } - - [BsonElement("sbomReferrer")] - public string? SbomReferrer { get; set; } - - [BsonElement("buildId")] - public string? BuildId { get; set; } - - [BsonElement("payload")] - public BsonDocument Payload { get; set; } = new(); -} +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Scanner.Storage.Catalog; + +/// <summary> +/// MongoDB persistence model for runtime events emitted by the Zastava observer. +/// </summary> +public sealed class RuntimeEventDocument +{ + [BsonId] + [BsonRepresentation(BsonType.ObjectId)] + public string? Id { get; set; } + + [BsonElement("eventId")] + [BsonRequired] + public string EventId { get; set; } = string.Empty; + + [BsonElement("schemaVersion")] + [BsonRequired] + public string SchemaVersion { get; set; } = string.Empty; + + [BsonElement("tenant")] + [BsonRequired] + public string Tenant { get; set; } = string.Empty; + + [BsonElement("node")] + [BsonRequired] + public string Node { get; set; } = string.Empty; + + [BsonElement("kind")] + [BsonRepresentation(BsonType.String)] + [BsonRequired] + public string Kind { get; set; } = string.Empty; + + [BsonElement("when")] + [BsonDateTimeOptions(Kind = DateTimeKind.Utc)] + public DateTime When { get; set; } + + [BsonElement("receivedAt")] + [BsonDateTimeOptions(Kind = DateTimeKind.Utc)] + public DateTime ReceivedAt { get; set; } + + [BsonElement("expiresAt")] + [BsonDateTimeOptions(Kind = DateTimeKind.Utc)] + public DateTime ExpiresAt { get; set; } + + [BsonElement("platform")] + public string? Platform { get; set; } + + [BsonElement("namespace")] + public string? Namespace { get; set; } + + [BsonElement("pod")] + public string? Pod { get; set; } + + [BsonElement("container")] + public string? Container { get; set; } + + [BsonElement("containerId")] + public string? ContainerId { get; set; } + + [BsonElement("imageRef")] + public string? ImageRef { get; set; } + + [BsonElement("imageDigest")] + public string? ImageDigest { get; set; } + + [BsonElement("engine")] + public string? Engine { get; set; } + + [BsonElement("engineVersion")] + public string? EngineVersion { get; set; } + + [BsonElement("baselineDigest")] + public string? BaselineDigest { get; set; } + + [BsonElement("imageSigned")] + public bool? ImageSigned { get; set; } + + [BsonElement("sbomReferrer")] + public string? SbomReferrer { get; set; } + + [BsonElement("buildId")] + public string? BuildId { get; set; } + + [BsonElement("payload")] + public BsonDocument Payload { get; set; } = new(); +} diff --git a/src/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Extensions/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Scanner.Storage/Migrations/EnsureLifecycleRuleTtlMigration.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Migrations/EnsureLifecycleRuleTtlMigration.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Migrations/EnsureLifecycleRuleTtlMigration.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Migrations/EnsureLifecycleRuleTtlMigration.cs diff --git a/src/StellaOps.Scanner.Storage/Migrations/IMongoMigration.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Migrations/IMongoMigration.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Migrations/IMongoMigration.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Migrations/IMongoMigration.cs diff --git a/src/StellaOps.Scanner.Storage/Migrations/MongoMigrationDocument.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Migrations/MongoMigrationDocument.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Migrations/MongoMigrationDocument.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Migrations/MongoMigrationDocument.cs diff --git a/src/StellaOps.Scanner.Storage/Migrations/MongoMigrationRunner.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Migrations/MongoMigrationRunner.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Migrations/MongoMigrationRunner.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Migrations/MongoMigrationRunner.cs diff --git a/src/StellaOps.Scanner.Storage/Mongo/MongoBootstrapper.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Mongo/MongoBootstrapper.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Mongo/MongoBootstrapper.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Mongo/MongoBootstrapper.cs diff --git a/src/StellaOps.Scanner.Storage/Mongo/MongoCollectionProvider.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Mongo/MongoCollectionProvider.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Mongo/MongoCollectionProvider.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Mongo/MongoCollectionProvider.cs diff --git a/src/StellaOps.Scanner.Storage/ObjectStore/IArtifactObjectStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/ObjectStore/IArtifactObjectStore.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/ObjectStore/IArtifactObjectStore.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/ObjectStore/IArtifactObjectStore.cs diff --git a/src/StellaOps.Scanner.Storage/ObjectStore/RustFsArtifactObjectStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/ObjectStore/RustFsArtifactObjectStore.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/ObjectStore/RustFsArtifactObjectStore.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/ObjectStore/RustFsArtifactObjectStore.cs diff --git a/src/StellaOps.Scanner.Storage/ObjectStore/S3ArtifactObjectStore.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/ObjectStore/S3ArtifactObjectStore.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/ObjectStore/S3ArtifactObjectStore.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/ObjectStore/S3ArtifactObjectStore.cs diff --git a/src/StellaOps.Scanner.Storage/Repositories/ArtifactRepository.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/ArtifactRepository.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Repositories/ArtifactRepository.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/ArtifactRepository.cs diff --git a/src/StellaOps.Scanner.Storage/Repositories/ImageRepository.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/ImageRepository.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Repositories/ImageRepository.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/ImageRepository.cs diff --git a/src/StellaOps.Scanner.Storage/Repositories/JobRepository.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/JobRepository.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Repositories/JobRepository.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/JobRepository.cs diff --git a/src/StellaOps.Scanner.Storage/Repositories/LayerRepository.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/LayerRepository.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Repositories/LayerRepository.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/LayerRepository.cs diff --git a/src/StellaOps.Scanner.Storage/Repositories/LifecycleRuleRepository.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/LifecycleRuleRepository.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Repositories/LifecycleRuleRepository.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/LifecycleRuleRepository.cs diff --git a/src/StellaOps.Scanner.Storage/Repositories/LinkRepository.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/LinkRepository.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Repositories/LinkRepository.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/LinkRepository.cs diff --git a/src/StellaOps.Scanner.Storage/Repositories/RuntimeEventRepository.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/RuntimeEventRepository.cs similarity index 97% rename from src/StellaOps.Scanner.Storage/Repositories/RuntimeEventRepository.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/RuntimeEventRepository.cs index cdd05d37..4091abbe 100644 --- a/src/StellaOps.Scanner.Storage/Repositories/RuntimeEventRepository.cs +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Repositories/RuntimeEventRepository.cs @@ -1,132 +1,132 @@ -using System.Collections.Generic; -using System.Linq; -using MongoDB.Driver; -using StellaOps.Scanner.Storage.Catalog; -using StellaOps.Scanner.Storage.Mongo; - -namespace StellaOps.Scanner.Storage.Repositories; - -/// <summary> -/// Repository responsible for persisting runtime events. -/// </summary> -public sealed class RuntimeEventRepository -{ - private readonly MongoCollectionProvider _collections; - - public RuntimeEventRepository(MongoCollectionProvider collections) - { - _collections = collections ?? throw new ArgumentNullException(nameof(collections)); - } - - public async Task<RuntimeEventInsertResult> InsertAsync( - IReadOnlyCollection<RuntimeEventDocument> documents, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(documents); - if (documents.Count == 0) - { - return RuntimeEventInsertResult.Empty; - } - - try - { - await _collections.RuntimeEvents.InsertManyAsync( - documents, - new InsertManyOptions { IsOrdered = false }, - cancellationToken).ConfigureAwait(false); - - return new RuntimeEventInsertResult(documents.Count, 0); - } - catch (MongoBulkWriteException<RuntimeEventDocument> ex) - { - var duplicates = ex.WriteErrors - .Count(error => error.Category == ServerErrorCategory.DuplicateKey); - var inserted = documents.Count - duplicates; - if (inserted < 0) - { - inserted = 0; - } - - return new RuntimeEventInsertResult(inserted, duplicates); - } - } - - public async Task<IReadOnlyDictionary<string, RuntimeBuildIdObservation>> GetRecentBuildIdsAsync( - IReadOnlyCollection<string> imageDigests, - int maxPerImage, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(imageDigests); - if (imageDigests.Count == 0 || maxPerImage <= 0) - { - return new Dictionary<string, RuntimeBuildIdObservation>(StringComparer.Ordinal); - } - - var normalized = imageDigests - .Where(digest => !string.IsNullOrWhiteSpace(digest)) - .Select(digest => digest.Trim().ToLowerInvariant()) - .Distinct(StringComparer.Ordinal) - .ToArray(); - - if (normalized.Length == 0) - { - return new Dictionary<string, RuntimeBuildIdObservation>(StringComparer.Ordinal); - } - - var results = new Dictionary<string, RuntimeBuildIdObservation>(StringComparer.Ordinal); - var limit = Math.Max(1, maxPerImage); - - foreach (var digest in normalized) - { - var filter = Builders<RuntimeEventDocument>.Filter.And( - Builders<RuntimeEventDocument>.Filter.Eq(doc => doc.ImageDigest, digest), - Builders<RuntimeEventDocument>.Filter.Ne(doc => doc.BuildId, null), - Builders<RuntimeEventDocument>.Filter.Ne(doc => doc.BuildId, string.Empty)); - - var documents = await _collections.RuntimeEvents - .Find(filter) - .SortByDescending(doc => doc.When) - .Limit(limit * 4) - .Project(doc => new { doc.BuildId, doc.When }) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - if (documents.Count == 0) - { - continue; - } - - var buildIds = documents - .Select(doc => doc.BuildId) - .Where(id => !string.IsNullOrWhiteSpace(id)) - .Distinct(StringComparer.OrdinalIgnoreCase) - .Take(limit) - .Select(id => id!.Trim().ToLowerInvariant()) - .ToArray(); - - if (buildIds.Length == 0) - { - continue; - } - - var observedAt = documents - .Where(doc => !string.IsNullOrWhiteSpace(doc.BuildId)) - .Select(doc => doc.When) - .FirstOrDefault(); - - results[digest] = new RuntimeBuildIdObservation(digest, buildIds, observedAt); - } - - return results; - } -} - -public readonly record struct RuntimeEventInsertResult(int InsertedCount, int DuplicateCount) -{ - public static RuntimeEventInsertResult Empty => new(0, 0); -} - -public sealed record RuntimeBuildIdObservation( - string ImageDigest, - IReadOnlyList<string> BuildIds, - DateTime ObservedAtUtc); +using System.Collections.Generic; +using System.Linq; +using MongoDB.Driver; +using StellaOps.Scanner.Storage.Catalog; +using StellaOps.Scanner.Storage.Mongo; + +namespace StellaOps.Scanner.Storage.Repositories; + +/// <summary> +/// Repository responsible for persisting runtime events. +/// </summary> +public sealed class RuntimeEventRepository +{ + private readonly MongoCollectionProvider _collections; + + public RuntimeEventRepository(MongoCollectionProvider collections) + { + _collections = collections ?? throw new ArgumentNullException(nameof(collections)); + } + + public async Task<RuntimeEventInsertResult> InsertAsync( + IReadOnlyCollection<RuntimeEventDocument> documents, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(documents); + if (documents.Count == 0) + { + return RuntimeEventInsertResult.Empty; + } + + try + { + await _collections.RuntimeEvents.InsertManyAsync( + documents, + new InsertManyOptions { IsOrdered = false }, + cancellationToken).ConfigureAwait(false); + + return new RuntimeEventInsertResult(documents.Count, 0); + } + catch (MongoBulkWriteException<RuntimeEventDocument> ex) + { + var duplicates = ex.WriteErrors + .Count(error => error.Category == ServerErrorCategory.DuplicateKey); + var inserted = documents.Count - duplicates; + if (inserted < 0) + { + inserted = 0; + } + + return new RuntimeEventInsertResult(inserted, duplicates); + } + } + + public async Task<IReadOnlyDictionary<string, RuntimeBuildIdObservation>> GetRecentBuildIdsAsync( + IReadOnlyCollection<string> imageDigests, + int maxPerImage, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(imageDigests); + if (imageDigests.Count == 0 || maxPerImage <= 0) + { + return new Dictionary<string, RuntimeBuildIdObservation>(StringComparer.Ordinal); + } + + var normalized = imageDigests + .Where(digest => !string.IsNullOrWhiteSpace(digest)) + .Select(digest => digest.Trim().ToLowerInvariant()) + .Distinct(StringComparer.Ordinal) + .ToArray(); + + if (normalized.Length == 0) + { + return new Dictionary<string, RuntimeBuildIdObservation>(StringComparer.Ordinal); + } + + var results = new Dictionary<string, RuntimeBuildIdObservation>(StringComparer.Ordinal); + var limit = Math.Max(1, maxPerImage); + + foreach (var digest in normalized) + { + var filter = Builders<RuntimeEventDocument>.Filter.And( + Builders<RuntimeEventDocument>.Filter.Eq(doc => doc.ImageDigest, digest), + Builders<RuntimeEventDocument>.Filter.Ne(doc => doc.BuildId, null), + Builders<RuntimeEventDocument>.Filter.Ne(doc => doc.BuildId, string.Empty)); + + var documents = await _collections.RuntimeEvents + .Find(filter) + .SortByDescending(doc => doc.When) + .Limit(limit * 4) + .Project(doc => new { doc.BuildId, doc.When }) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + if (documents.Count == 0) + { + continue; + } + + var buildIds = documents + .Select(doc => doc.BuildId) + .Where(id => !string.IsNullOrWhiteSpace(id)) + .Distinct(StringComparer.OrdinalIgnoreCase) + .Take(limit) + .Select(id => id!.Trim().ToLowerInvariant()) + .ToArray(); + + if (buildIds.Length == 0) + { + continue; + } + + var observedAt = documents + .Where(doc => !string.IsNullOrWhiteSpace(doc.BuildId)) + .Select(doc => doc.When) + .FirstOrDefault(); + + results[digest] = new RuntimeBuildIdObservation(digest, buildIds, observedAt); + } + + return results; + } +} + +public readonly record struct RuntimeEventInsertResult(int InsertedCount, int DuplicateCount) +{ + public static RuntimeEventInsertResult Empty => new(0, 0); +} + +public sealed record RuntimeBuildIdObservation( + string ImageDigest, + IReadOnlyList<string> BuildIds, + DateTime ObservedAtUtc); diff --git a/src/StellaOps.Scanner.Storage/ScannerStorageDefaults.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/ScannerStorageDefaults.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/ScannerStorageDefaults.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/ScannerStorageDefaults.cs diff --git a/src/StellaOps.Scanner.Storage/ScannerStorageOptions.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/ScannerStorageOptions.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/ScannerStorageOptions.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/ScannerStorageOptions.cs diff --git a/src/StellaOps.Scanner.Storage/Services/ArtifactStorageService.cs b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/Services/ArtifactStorageService.cs similarity index 100% rename from src/StellaOps.Scanner.Storage/Services/ArtifactStorageService.cs rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/Services/ArtifactStorageService.cs diff --git a/src/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj similarity index 98% rename from src/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj index eb780ae7..9fdda755 100644 --- a/src/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj +++ b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj @@ -1,18 +1,18 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> - <PackageReference Include="AWSSDK.S3" Version="3.7.305.6" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> + <PackageReference Include="AWSSDK.S3" Version="3.7.305.6" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Scanner.Storage/TASKS.md b/src/Scanner/__Libraries/StellaOps.Scanner.Storage/TASKS.md similarity index 100% rename from src/StellaOps.Scanner.Storage/TASKS.md rename to src/Scanner/__Libraries/StellaOps.Scanner.Storage/TASKS.md diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/app b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/app similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/app rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/app diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/expected.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/expected.json index 21217f9e..b6b25b18 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/basic/expected.json @@ -1,118 +1,118 @@ -[ - { - "analyzerId": "golang", - "componentKey": "purl::pkg:golang/example.com/app@v1.2.3", - "purl": "pkg:golang/example.com/app@v1.2.3", - "name": "example.com/app", - "version": "v1.2.3", - "type": "golang", - "usedByEntrypoint": false, - "metadata": { - "binaryPath": "app", - "build.GOARCH": "amd64", - "build.GOOS": "linux", - "build.vcs": "git", - "build.vcs.modified": "false", - "build.vcs.revision": "1234567890abcdef1234567890abcdef12345678", - "build.vcs.time": "2025-09-14T12:34:56Z", - "go.version": "go1.22.5", - "modulePath": "example.com/app", - "modulePath.main": "example.com/app", - "moduleSum": "h1:mainchecksum", - "moduleVersion": "v1.2.3" - }, - "evidence": [ - { - "kind": "metadata", - "source": "go.buildinfo.setting", - "locator": "GOARCH", - "value": "amd64" - }, - { - "kind": "metadata", - "source": "go.buildinfo.setting", - "locator": "GOOS", - "value": "linux" - }, - { - "kind": "metadata", - "source": "go.buildinfo.setting", - "locator": "vcs.modified", - "value": "false" - }, - { - "kind": "metadata", - "source": "go.buildinfo.setting", - "locator": "vcs.revision", - "value": "1234567890abcdef1234567890abcdef12345678" - }, - { - "kind": "metadata", - "source": "go.buildinfo.setting", - "locator": "vcs.time", - "value": "2025-09-14T12:34:56Z" - }, - { - "kind": "metadata", - "source": "go.buildinfo.setting", - "locator": "vcs", - "value": "git" - }, - { - "kind": "metadata", - "source": "go.buildinfo", - "locator": "module:example.com/app", - "value": "v1.2.3", - "sha256": "h1:mainchecksum" - }, - { - "kind": "metadata", - "source": "go.dwarf", - "locator": "vcs.modified", - "value": "false" - }, - { - "kind": "metadata", - "source": "go.dwarf", - "locator": "vcs.revision", - "value": "1234567890abcdef1234567890abcdef12345678" - }, - { - "kind": "metadata", - "source": "go.dwarf", - "locator": "vcs.time", - "value": "2025-09-14T12:34:56Z" - }, - { - "kind": "metadata", - "source": "go.dwarf", - "locator": "vcs", - "value": "git" - } - ] - }, - { - "analyzerId": "golang", - "componentKey": "purl::pkg:golang/example.com/lib@v1.0.0", - "purl": "pkg:golang/example.com/lib@v1.0.0", - "name": "example.com/lib", - "version": "v1.0.0", - "type": "golang", - "usedByEntrypoint": false, - "metadata": { - "binaryPath": "app", - "modulePath": "example.com/lib", - "moduleSum": "h1:depchecksum", - "moduleVersion": "v1.0.0" - }, - "evidence": [ - { - "kind": "metadata", - "source": "go.buildinfo", - "locator": "module:example.com/lib", - "value": "v1.0.0", - "sha256": "h1:depchecksum" - } - ] - } +[ + { + "analyzerId": "golang", + "componentKey": "purl::pkg:golang/example.com/app@v1.2.3", + "purl": "pkg:golang/example.com/app@v1.2.3", + "name": "example.com/app", + "version": "v1.2.3", + "type": "golang", + "usedByEntrypoint": false, + "metadata": { + "binaryPath": "app", + "build.GOARCH": "amd64", + "build.GOOS": "linux", + "build.vcs": "git", + "build.vcs.modified": "false", + "build.vcs.revision": "1234567890abcdef1234567890abcdef12345678", + "build.vcs.time": "2025-09-14T12:34:56Z", + "go.version": "go1.22.5", + "modulePath": "example.com/app", + "modulePath.main": "example.com/app", + "moduleSum": "h1:mainchecksum", + "moduleVersion": "v1.2.3" + }, + "evidence": [ + { + "kind": "metadata", + "source": "go.buildinfo.setting", + "locator": "GOARCH", + "value": "amd64" + }, + { + "kind": "metadata", + "source": "go.buildinfo.setting", + "locator": "GOOS", + "value": "linux" + }, + { + "kind": "metadata", + "source": "go.buildinfo.setting", + "locator": "vcs.modified", + "value": "false" + }, + { + "kind": "metadata", + "source": "go.buildinfo.setting", + "locator": "vcs.revision", + "value": "1234567890abcdef1234567890abcdef12345678" + }, + { + "kind": "metadata", + "source": "go.buildinfo.setting", + "locator": "vcs.time", + "value": "2025-09-14T12:34:56Z" + }, + { + "kind": "metadata", + "source": "go.buildinfo.setting", + "locator": "vcs", + "value": "git" + }, + { + "kind": "metadata", + "source": "go.buildinfo", + "locator": "module:example.com/app", + "value": "v1.2.3", + "sha256": "h1:mainchecksum" + }, + { + "kind": "metadata", + "source": "go.dwarf", + "locator": "vcs.modified", + "value": "false" + }, + { + "kind": "metadata", + "source": "go.dwarf", + "locator": "vcs.revision", + "value": "1234567890abcdef1234567890abcdef12345678" + }, + { + "kind": "metadata", + "source": "go.dwarf", + "locator": "vcs.time", + "value": "2025-09-14T12:34:56Z" + }, + { + "kind": "metadata", + "source": "go.dwarf", + "locator": "vcs", + "value": "git" + } + ] + }, + { + "analyzerId": "golang", + "componentKey": "purl::pkg:golang/example.com/lib@v1.0.0", + "purl": "pkg:golang/example.com/lib@v1.0.0", + "name": "example.com/lib", + "version": "v1.0.0", + "type": "golang", + "usedByEntrypoint": false, + "metadata": { + "binaryPath": "app", + "modulePath": "example.com/lib", + "moduleSum": "h1:depchecksum", + "moduleVersion": "v1.0.0" + }, + "evidence": [ + { + "kind": "metadata", + "source": "go.buildinfo", + "locator": "module:example.com/lib", + "value": "v1.0.0", + "sha256": "h1:depchecksum" + } + ] + } ] \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/app b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/app similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/app rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/app diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/expected.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/expected.json index 78899372..0a1de34e 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/dwarf-only/expected.json @@ -1,80 +1,80 @@ -[ - { - "analyzerId": "golang", - "componentKey": "purl::pkg:golang/example.com/app@v0.0.0", - "purl": "pkg:golang/example.com/app@v0.0.0", - "name": "example.com/app", - "version": "v0.0.0", - "type": "golang", - "usedByEntrypoint": false, - "metadata": { - "binaryPath": "app", - "build.vcs": "git", - "build.vcs.modified": "true", - "build.vcs.revision": "abcdef0123456789abcdef0123456789abcdef01", - "build.vcs.time": "2025-01-02T03:04:05Z", - "go.version": "go1.20.3", - "modulePath": "example.com/app", - "modulePath.main": "example.com/app", - "moduleSum": "h1:dwarfchecksum", - "moduleVersion": "v0.0.0" - }, - "evidence": [ - { - "kind": "metadata", - "source": "go.buildinfo", - "locator": "module:example.com/app", - "value": "v0.0.0", - "sha256": "h1:dwarfchecksum" - }, - { - "kind": "metadata", - "source": "go.dwarf", - "locator": "vcs.modified", - "value": "true" - }, - { - "kind": "metadata", - "source": "go.dwarf", - "locator": "vcs.revision", - "value": "abcdef0123456789abcdef0123456789abcdef01" - }, - { - "kind": "metadata", - "source": "go.dwarf", - "locator": "vcs.time", - "value": "2025-01-02T03:04:05Z" - }, - { - "kind": "metadata", - "source": "go.dwarf", - "locator": "vcs", - "value": "git" - } - ] - }, - { - "analyzerId": "golang", - "componentKey": "purl::pkg:golang/example.com/lib@v0.1.0", - "purl": "pkg:golang/example.com/lib@v0.1.0", - "name": "example.com/lib", - "version": "v0.1.0", - "type": "golang", - "usedByEntrypoint": false, - "metadata": { - "binaryPath": "app", - "modulePath": "example.com/lib", - "moduleSum": "h1:libchecksum", - "moduleVersion": "v0.1.0" - }, - "evidence": [ - { - "kind": "metadata", - "source": "go.buildinfo", - "locator": "module:example.com/lib", - "value": "v0.1.0", - "sha256": "h1:libchecksum" - } - ] - } +[ + { + "analyzerId": "golang", + "componentKey": "purl::pkg:golang/example.com/app@v0.0.0", + "purl": "pkg:golang/example.com/app@v0.0.0", + "name": "example.com/app", + "version": "v0.0.0", + "type": "golang", + "usedByEntrypoint": false, + "metadata": { + "binaryPath": "app", + "build.vcs": "git", + "build.vcs.modified": "true", + "build.vcs.revision": "abcdef0123456789abcdef0123456789abcdef01", + "build.vcs.time": "2025-01-02T03:04:05Z", + "go.version": "go1.20.3", + "modulePath": "example.com/app", + "modulePath.main": "example.com/app", + "moduleSum": "h1:dwarfchecksum", + "moduleVersion": "v0.0.0" + }, + "evidence": [ + { + "kind": "metadata", + "source": "go.buildinfo", + "locator": "module:example.com/app", + "value": "v0.0.0", + "sha256": "h1:dwarfchecksum" + }, + { + "kind": "metadata", + "source": "go.dwarf", + "locator": "vcs.modified", + "value": "true" + }, + { + "kind": "metadata", + "source": "go.dwarf", + "locator": "vcs.revision", + "value": "abcdef0123456789abcdef0123456789abcdef01" + }, + { + "kind": "metadata", + "source": "go.dwarf", + "locator": "vcs.time", + "value": "2025-01-02T03:04:05Z" + }, + { + "kind": "metadata", + "source": "go.dwarf", + "locator": "vcs", + "value": "git" + } + ] + }, + { + "analyzerId": "golang", + "componentKey": "purl::pkg:golang/example.com/lib@v0.1.0", + "purl": "pkg:golang/example.com/lib@v0.1.0", + "name": "example.com/lib", + "version": "v0.1.0", + "type": "golang", + "usedByEntrypoint": false, + "metadata": { + "binaryPath": "app", + "modulePath": "example.com/lib", + "moduleSum": "h1:libchecksum", + "moduleVersion": "v0.1.0" + }, + "evidence": [ + { + "kind": "metadata", + "source": "go.buildinfo", + "locator": "module:example.com/lib", + "value": "v0.1.0", + "sha256": "h1:libchecksum" + } + ] + } ] \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/app b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/app similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/app rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/app diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/expected.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/expected.json index bdd1a771..d9375984 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Fixtures/lang/go/stripped/expected.json @@ -1,30 +1,30 @@ -[ - { - "analyzerId": "golang", - "componentKey": "golang::bin::sha256:7125d65230b913faa744a33acd884899c81a1dbc6d88cbf251a74b19621cde99", - "name": "app", - "type": "bin", - "usedByEntrypoint": false, - "metadata": { - "binary.sha256": "7125d65230b913faa744a33acd884899c81a1dbc6d88cbf251a74b19621cde99", - "binaryPath": "app", - "go.version.hint": "go1.22.8", - "languageHint": "golang", - "provenance": "binary" - }, - "evidence": [ - { - "kind": "file", - "source": "binary", - "locator": "app", - "sha256": "7125d65230b913faa744a33acd884899c81a1dbc6d88cbf251a74b19621cde99" - }, - { - "kind": "metadata", - "source": "go.heuristic", - "locator": "classification", - "value": "build-id" - } - ] - } -] +[ + { + "analyzerId": "golang", + "componentKey": "golang::bin::sha256:7125d65230b913faa744a33acd884899c81a1dbc6d88cbf251a74b19621cde99", + "name": "app", + "type": "bin", + "usedByEntrypoint": false, + "metadata": { + "binary.sha256": "7125d65230b913faa744a33acd884899c81a1dbc6d88cbf251a74b19621cde99", + "binaryPath": "app", + "go.version.hint": "go1.22.8", + "languageHint": "golang", + "provenance": "binary" + }, + "evidence": [ + { + "kind": "file", + "source": "binary", + "locator": "app", + "sha256": "7125d65230b913faa744a33acd884899c81a1dbc6d88cbf251a74b19621cde99" + }, + { + "kind": "metadata", + "source": "go.heuristic", + "locator": "classification", + "value": "build-id" + } + ] + } +] diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Go/GoLanguageAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Go/GoLanguageAnalyzerTests.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Go/GoLanguageAnalyzerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Go/GoLanguageAnalyzerTests.cs index 373f7cde..bfe7a0c9 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Go/GoLanguageAnalyzerTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/Go/GoLanguageAnalyzerTests.cs @@ -1,134 +1,134 @@ -using System; -using System.Diagnostics.Metrics; -using System.IO; -using System.Linq; -using StellaOps.Scanner.Analyzers.Lang.Go; -using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; -using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; - -namespace StellaOps.Scanner.Analyzers.Lang.Go.Tests; - -public sealed class GoLanguageAnalyzerTests -{ - [Fact] - public async Task BuildInfoFixtureProducesDeterministicOutputAsync() - { - var cancellationToken = TestContext.Current.CancellationToken; - var fixturePath = TestPaths.ResolveFixture("lang", "go", "basic"); - var goldenPath = Path.Combine(fixturePath, "expected.json"); - - var analyzers = new ILanguageAnalyzer[] - { - new GoLanguageAnalyzer(), - }; - - await LanguageAnalyzerTestHarness.AssertDeterministicAsync( - fixturePath, - goldenPath, - analyzers, - cancellationToken); - } - - [Fact] - public async Task DwarfOnlyFixtureFallsBackToMetadataAsync() - { - var cancellationToken = TestContext.Current.CancellationToken; - var fixturePath = TestPaths.ResolveFixture("lang", "go", "dwarf-only"); - var goldenPath = Path.Combine(fixturePath, "expected.json"); - - var analyzers = new ILanguageAnalyzer[] - { - new GoLanguageAnalyzer(), - }; - - await LanguageAnalyzerTestHarness.AssertDeterministicAsync( - fixturePath, - goldenPath, - analyzers, - cancellationToken); - } - - [Fact] - public async Task StrippedBinaryFallsBackToHeuristicBinHashAsync() - { - var cancellationToken = TestContext.Current.CancellationToken; - var fixturePath = TestPaths.ResolveFixture("lang", "go", "stripped"); - var goldenPath = Path.Combine(fixturePath, "expected.json"); - - var analyzers = new ILanguageAnalyzer[] - { - new GoLanguageAnalyzer(), - }; - - await LanguageAnalyzerTestHarness.AssertDeterministicAsync( - fixturePath, - goldenPath, - analyzers, - cancellationToken); - } - - [Fact] - public async Task ParallelRunsRemainDeterministicAsync() - { - var cancellationToken = TestContext.Current.CancellationToken; - var fixturePath = TestPaths.ResolveFixture("lang", "go", "basic"); - var goldenPath = Path.Combine(fixturePath, "expected.json"); - - var analyzers = new ILanguageAnalyzer[] - { - new GoLanguageAnalyzer(), - }; - - var tasks = Enumerable - .Range(0, Environment.ProcessorCount) - .Select(_ => LanguageAnalyzerTestHarness.AssertDeterministicAsync( - fixturePath, - goldenPath, - analyzers, - cancellationToken)); - - await Task.WhenAll(tasks); - } - - [Fact] - public async Task HeuristicMetricCounterIncrementsAsync() - { - var cancellationToken = TestContext.Current.CancellationToken; - var fixturePath = TestPaths.ResolveFixture("lang", "go", "stripped"); - - var analyzers = new ILanguageAnalyzer[] - { - new GoLanguageAnalyzer(), - }; - - var total = 0L; - - using var listener = new MeterListener - { - InstrumentPublished = (instrument, meterListener) => - { - if (instrument.Meter.Name == "StellaOps.Scanner.Analyzers.Lang.Go" - && instrument.Name == "scanner_analyzer_golang_heuristic_total") - { - meterListener.EnableMeasurementEvents(instrument); - } - } - }; - - listener.SetMeasurementEventCallback<long>((_, measurement, _, _) => - { - Interlocked.Add(ref total, measurement); - }); - - listener.Start(); - - await LanguageAnalyzerTestHarness.RunToJsonAsync( - fixturePath, - analyzers, - cancellationToken: cancellationToken).ConfigureAwait(false); - - listener.Dispose(); - - Assert.Equal(1, Interlocked.Read(ref total)); - } -} +using System; +using System.Diagnostics.Metrics; +using System.IO; +using System.Linq; +using StellaOps.Scanner.Analyzers.Lang.Go; +using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Go.Tests; + +public sealed class GoLanguageAnalyzerTests +{ + [Fact] + public async Task BuildInfoFixtureProducesDeterministicOutputAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "go", "basic"); + var goldenPath = Path.Combine(fixturePath, "expected.json"); + + var analyzers = new ILanguageAnalyzer[] + { + new GoLanguageAnalyzer(), + }; + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath, + goldenPath, + analyzers, + cancellationToken); + } + + [Fact] + public async Task DwarfOnlyFixtureFallsBackToMetadataAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "go", "dwarf-only"); + var goldenPath = Path.Combine(fixturePath, "expected.json"); + + var analyzers = new ILanguageAnalyzer[] + { + new GoLanguageAnalyzer(), + }; + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath, + goldenPath, + analyzers, + cancellationToken); + } + + [Fact] + public async Task StrippedBinaryFallsBackToHeuristicBinHashAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "go", "stripped"); + var goldenPath = Path.Combine(fixturePath, "expected.json"); + + var analyzers = new ILanguageAnalyzer[] + { + new GoLanguageAnalyzer(), + }; + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath, + goldenPath, + analyzers, + cancellationToken); + } + + [Fact] + public async Task ParallelRunsRemainDeterministicAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "go", "basic"); + var goldenPath = Path.Combine(fixturePath, "expected.json"); + + var analyzers = new ILanguageAnalyzer[] + { + new GoLanguageAnalyzer(), + }; + + var tasks = Enumerable + .Range(0, Environment.ProcessorCount) + .Select(_ => LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath, + goldenPath, + analyzers, + cancellationToken)); + + await Task.WhenAll(tasks); + } + + [Fact] + public async Task HeuristicMetricCounterIncrementsAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "go", "stripped"); + + var analyzers = new ILanguageAnalyzer[] + { + new GoLanguageAnalyzer(), + }; + + var total = 0L; + + using var listener = new MeterListener + { + InstrumentPublished = (instrument, meterListener) => + { + if (instrument.Meter.Name == "StellaOps.Scanner.Analyzers.Lang.Go" + && instrument.Name == "scanner_analyzer_golang_heuristic_total") + { + meterListener.EnableMeasurementEvents(instrument); + } + } + }; + + listener.SetMeasurementEventCallback<long>((_, measurement, _, _) => + { + Interlocked.Add(ref total, measurement); + }); + + listener.Start(); + + await LanguageAnalyzerTestHarness.RunToJsonAsync( + fixturePath, + analyzers, + cancellationToken: cancellationToken).ConfigureAwait(false); + + listener.Dispose(); + + Assert.Equal(1, Interlocked.Read(ref total)); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj similarity index 80% rename from src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj index 0704c4fb..a8ea452a 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Go.Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests/StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -30,9 +31,9 @@ <ItemGroup> <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.Tests\StellaOps.Scanner.Analyzers.Lang.Tests.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.Go\StellaOps.Scanner.Analyzers.Lang.Go.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.Go/StellaOps.Scanner.Analyzers.Lang.Go.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" /> </ItemGroup> <ItemGroup> @@ -42,4 +43,4 @@ <ItemGroup> <Using Include="Xunit" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Fixtures/java/basic/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Fixtures/java/basic/expected.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Fixtures/java/basic/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Fixtures/java/basic/expected.json index df18806d..4f78fe2e 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Fixtures/java/basic/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Fixtures/java/basic/expected.json @@ -1,35 +1,35 @@ -[ - { - "analyzerId": "java", - "componentKey": "purl::pkg:maven/com/example/demo@1.0.0", - "purl": "pkg:maven/com/example/demo@1.0.0", - "name": "demo", - "version": "1.0.0", - "type": "maven", - "usedByEntrypoint": true, - "metadata": { - "artifactId": "demo", - "displayName": "Demo Library", - "groupId": "com.example", - "jarPath": "libs/demo.jar", - "manifestTitle": "Demo", - "manifestVendor": "Example Corp", - "manifestVersion": "1.0.0", - "packaging": "jar" - }, - "evidence": [ - { - "kind": "file", - "source": "MANIFEST.MF", - "locator": "libs/demo.jar!META-INF/MANIFEST.MF", - "value": "title=Demo;version=1.0.0;vendor=Example Corp" - }, - { - "kind": "file", - "source": "pom.properties", - "locator": "libs/demo.jar!META-INF/maven/com.example/demo/pom.properties", - "sha256": "c20f36aa1b9d89d28cf9ed131519ffd6287a4dac0c7cb926130496f3f8157bf1" - } - ] - } -] +[ + { + "analyzerId": "java", + "componentKey": "purl::pkg:maven/com/example/demo@1.0.0", + "purl": "pkg:maven/com/example/demo@1.0.0", + "name": "demo", + "version": "1.0.0", + "type": "maven", + "usedByEntrypoint": true, + "metadata": { + "artifactId": "demo", + "displayName": "Demo Library", + "groupId": "com.example", + "jarPath": "libs/demo.jar", + "manifestTitle": "Demo", + "manifestVendor": "Example Corp", + "manifestVersion": "1.0.0", + "packaging": "jar" + }, + "evidence": [ + { + "kind": "file", + "source": "MANIFEST.MF", + "locator": "libs/demo.jar!META-INF/MANIFEST.MF", + "value": "title=Demo;version=1.0.0;vendor=Example Corp" + }, + { + "kind": "file", + "source": "pom.properties", + "locator": "libs/demo.jar!META-INF/maven/com.example/demo/pom.properties", + "sha256": "c20f36aa1b9d89d28cf9ed131519ffd6287a4dac0c7cb926130496f3f8157bf1" + } + ] + } +] diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaClassPathBuilderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaClassPathBuilderTests.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaClassPathBuilderTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaClassPathBuilderTests.cs index 3322acee..a6920ffb 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaClassPathBuilderTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaClassPathBuilderTests.cs @@ -1,172 +1,172 @@ -using System.Collections.Generic; -using System.IO.Compression; -using System.Linq; -using System.Text; -using System.Threading; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; -using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; - -public sealed class JavaClassPathBuilderTests -{ - [Fact] - public void Build_ClassPathForSimpleJar() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - JavaFixtureBuilder.CreateSampleJar(root, "libs/simple.jar"); - - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); - var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None); - - var segment = Assert.Single(analysis.Segments); - Assert.Equal("libs/simple.jar", segment.Identifier.Replace('\\', '/')); - Assert.Contains("com.example.Demo", segment.Classes); - - var package = Assert.Single(segment.Packages); - Assert.Equal("com.example", package.Key); - Assert.Equal(1, package.Value.ClassCount); - - Assert.Empty(analysis.DuplicateClasses); - Assert.Empty(analysis.SplitPackages); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - [Fact] - public void Build_CapturesServiceDefinitions() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - var services = new Dictionary<string, string[]> - { - ["java.sql.Driver"] = new[] { "com.example.DriverImpl" }, - }; - - CreateJarWithClasses(root, "libs/spi.jar", new[] { "com.example.DriverImpl" }, services); - - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); - var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None); - - var segment = Assert.Single(analysis.Segments); - var providers = Assert.Single(segment.ServiceDefinitions); - Assert.Equal("java.sql.Driver", providers.Key); - Assert.Contains("com.example.DriverImpl", providers.Value); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - [Fact] - public void Build_FatJarIncludesNestedLibraries() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar"); - - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); - var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None); - - Assert.Equal(2, analysis.Segments.Length); - - var classesSegment = analysis.Segments[0]; - Assert.Equal("apps/app-fat.jar!BOOT-INF/classes/", classesSegment.Identifier.Replace('\\', '/')); - Assert.Contains("com.example.App", classesSegment.Classes); - - var librarySegment = analysis.Segments[1]; - Assert.Equal("apps/app-fat.jar!BOOT-INF/lib/library.jar", librarySegment.Identifier.Replace('\\', '/')); - Assert.Contains("com.example.Lib", librarySegment.Classes); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - [Fact] - public void Build_ReportsDuplicateClassesAndSplitPackages() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - CreateJarWithClasses(root, "libs/a.jar", "com.example.Demo"); - CreateJarWithClasses(root, "libs/b.jar", "com.example.Demo", "com.example.Other"); - - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); - var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None); - - Assert.Equal(2, analysis.Segments.Length); - - var duplicate = Assert.Single(analysis.DuplicateClasses); - Assert.Equal("com.example.Demo", duplicate.ClassName); - Assert.Equal(2, duplicate.SegmentIdentifiers.Length); - - var split = Assert.Single(analysis.SplitPackages); - Assert.Equal("com.example", split.PackageName); - Assert.Equal(2, split.SegmentIdentifiers.Length); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - private static void CreateJarWithClasses(string rootDirectory, string relativePath, params string[] classNames) - => CreateJarWithClasses(rootDirectory, relativePath, classNames.AsEnumerable(), serviceDefinitions: null); - - private static void CreateJarWithClasses( - string rootDirectory, - string relativePath, - IEnumerable<string> classNames, - IDictionary<string, string[]>? serviceDefinitions) - { - ArgumentNullException.ThrowIfNull(rootDirectory); - ArgumentException.ThrowIfNullOrEmpty(relativePath); - - var jarPath = Path.Combine(rootDirectory, relativePath.Replace('/', Path.DirectorySeparatorChar)); - Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!); - - using var fileStream = new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None); - using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: false); - - var timestamp = new DateTimeOffset(2024, 01, 01, 0, 0, 0, TimeSpan.Zero); - - foreach (var className in classNames) - { - var entryPath = className.Replace('.', '/') + ".class"; - var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression); - entry.LastWriteTime = timestamp; - using var writer = new BinaryWriter(entry.Open(), Encoding.UTF8, leaveOpen: false); - writer.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE }); - } - - if (serviceDefinitions is not null) - { - foreach (var pair in serviceDefinitions) - { - var entryPath = "META-INF/services/" + pair.Key; - var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression); - entry.LastWriteTime = timestamp; - using var writer = new StreamWriter(entry.Open(), Encoding.UTF8, leaveOpen: false); - foreach (var provider in pair.Value) - { - writer.WriteLine(provider); - } - } - } - } -} +using System.Collections.Generic; +using System.IO.Compression; +using System.Linq; +using System.Text; +using System.Threading; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; + +public sealed class JavaClassPathBuilderTests +{ + [Fact] + public void Build_ClassPathForSimpleJar() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + JavaFixtureBuilder.CreateSampleJar(root, "libs/simple.jar"); + + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); + var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None); + + var segment = Assert.Single(analysis.Segments); + Assert.Equal("libs/simple.jar", segment.Identifier.Replace('\\', '/')); + Assert.Contains("com.example.Demo", segment.Classes); + + var package = Assert.Single(segment.Packages); + Assert.Equal("com.example", package.Key); + Assert.Equal(1, package.Value.ClassCount); + + Assert.Empty(analysis.DuplicateClasses); + Assert.Empty(analysis.SplitPackages); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + [Fact] + public void Build_CapturesServiceDefinitions() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + var services = new Dictionary<string, string[]> + { + ["java.sql.Driver"] = new[] { "com.example.DriverImpl" }, + }; + + CreateJarWithClasses(root, "libs/spi.jar", new[] { "com.example.DriverImpl" }, services); + + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); + var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None); + + var segment = Assert.Single(analysis.Segments); + var providers = Assert.Single(segment.ServiceDefinitions); + Assert.Equal("java.sql.Driver", providers.Key); + Assert.Contains("com.example.DriverImpl", providers.Value); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + [Fact] + public void Build_FatJarIncludesNestedLibraries() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar"); + + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); + var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None); + + Assert.Equal(2, analysis.Segments.Length); + + var classesSegment = analysis.Segments[0]; + Assert.Equal("apps/app-fat.jar!BOOT-INF/classes/", classesSegment.Identifier.Replace('\\', '/')); + Assert.Contains("com.example.App", classesSegment.Classes); + + var librarySegment = analysis.Segments[1]; + Assert.Equal("apps/app-fat.jar!BOOT-INF/lib/library.jar", librarySegment.Identifier.Replace('\\', '/')); + Assert.Contains("com.example.Lib", librarySegment.Classes); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + [Fact] + public void Build_ReportsDuplicateClassesAndSplitPackages() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + CreateJarWithClasses(root, "libs/a.jar", "com.example.Demo"); + CreateJarWithClasses(root, "libs/b.jar", "com.example.Demo", "com.example.Other"); + + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); + var analysis = JavaClassPathBuilder.Build(workspace, CancellationToken.None); + + Assert.Equal(2, analysis.Segments.Length); + + var duplicate = Assert.Single(analysis.DuplicateClasses); + Assert.Equal("com.example.Demo", duplicate.ClassName); + Assert.Equal(2, duplicate.SegmentIdentifiers.Length); + + var split = Assert.Single(analysis.SplitPackages); + Assert.Equal("com.example", split.PackageName); + Assert.Equal(2, split.SegmentIdentifiers.Length); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + private static void CreateJarWithClasses(string rootDirectory, string relativePath, params string[] classNames) + => CreateJarWithClasses(rootDirectory, relativePath, classNames.AsEnumerable(), serviceDefinitions: null); + + private static void CreateJarWithClasses( + string rootDirectory, + string relativePath, + IEnumerable<string> classNames, + IDictionary<string, string[]>? serviceDefinitions) + { + ArgumentNullException.ThrowIfNull(rootDirectory); + ArgumentException.ThrowIfNullOrEmpty(relativePath); + + var jarPath = Path.Combine(rootDirectory, relativePath.Replace('/', Path.DirectorySeparatorChar)); + Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!); + + using var fileStream = new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None); + using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: false); + + var timestamp = new DateTimeOffset(2024, 01, 01, 0, 0, 0, TimeSpan.Zero); + + foreach (var className in classNames) + { + var entryPath = className.Replace('.', '/') + ".class"; + var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression); + entry.LastWriteTime = timestamp; + using var writer = new BinaryWriter(entry.Open(), Encoding.UTF8, leaveOpen: false); + writer.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE }); + } + + if (serviceDefinitions is not null) + { + foreach (var pair in serviceDefinitions) + { + var entryPath = "META-INF/services/" + pair.Key; + var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression); + entry.LastWriteTime = timestamp; + using var writer = new StreamWriter(entry.Open(), Encoding.UTF8, leaveOpen: false); + foreach (var provider in pair.Value) + { + writer.WriteLine(provider); + } + } + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaLanguageAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaLanguageAnalyzerTests.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaLanguageAnalyzerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaLanguageAnalyzerTests.cs index d1bc27fb..c0f295f2 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaLanguageAnalyzerTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaLanguageAnalyzerTests.cs @@ -1,33 +1,33 @@ -using StellaOps.Scanner.Analyzers.Lang.Java; -using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; -using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; - -public sealed class JavaLanguageAnalyzerTests -{ - [Fact] - public async Task ExtractsMavenArtifactFromJarAsync() - { - var cancellationToken = TestContext.Current.CancellationToken; - var root = TestPaths.CreateTemporaryDirectory(); - try - { - var jarPath = JavaFixtureBuilder.CreateSampleJar(root); - var usageHints = new LanguageUsageHints(new[] { jarPath }); - var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() }; - var goldenPath = TestPaths.ResolveFixture("java", "basic", "expected.json"); - - await LanguageAnalyzerTestHarness.AssertDeterministicAsync( - fixturePath: root, - goldenPath: goldenPath, - analyzers: analyzers, - cancellationToken: cancellationToken, - usageHints: usageHints); - } - finally - { - TestPaths.SafeDelete(root); - } - } -} +using StellaOps.Scanner.Analyzers.Lang.Java; +using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; + +public sealed class JavaLanguageAnalyzerTests +{ + [Fact] + public async Task ExtractsMavenArtifactFromJarAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var root = TestPaths.CreateTemporaryDirectory(); + try + { + var jarPath = JavaFixtureBuilder.CreateSampleJar(root); + var usageHints = new LanguageUsageHints(new[] { jarPath }); + var analyzers = new ILanguageAnalyzer[] { new JavaLanguageAnalyzer() }; + var goldenPath = TestPaths.ResolveFixture("java", "basic", "expected.json"); + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath: root, + goldenPath: goldenPath, + analyzers: analyzers, + cancellationToken: cancellationToken, + usageHints: usageHints); + } + finally + { + TestPaths.SafeDelete(root); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaReflectionAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaReflectionAnalyzerTests.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaReflectionAnalyzerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaReflectionAnalyzerTests.cs index 7ff84e73..eafe3c64 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaReflectionAnalyzerTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaReflectionAnalyzerTests.cs @@ -1,102 +1,102 @@ -using System.IO.Compression; -using System.Threading; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal.Reflection; -using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; - -public sealed class JavaReflectionAnalyzerTests -{ - [Fact] - public void Analyze_ClassForNameLiteral_ProducesEdge() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - var jarPath = Path.Combine(root, "libs", "reflect.jar"); - Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!); - using (var archive = new ZipArchive(new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None), ZipArchiveMode.Create, leaveOpen: false)) - { - var entry = archive.CreateEntry("com/example/Reflective.class"); - var bytes = JavaClassFileFactory.CreateClassForNameInvoker("com/example/Reflective", "com.example.Plugin"); - using var stream = entry.Open(); - stream.Write(bytes); - } - - var cancellationToken = TestContext.Current.CancellationToken; - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); - var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); - var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken); - - var edge = Assert.Single(analysis.Edges); - Assert.Equal("com.example.Reflective", edge.SourceClass); - Assert.Equal("com.example.Plugin", edge.TargetType); - Assert.Equal(JavaReflectionReason.ClassForName, edge.Reason); - Assert.Equal(JavaReflectionConfidence.High, edge.Confidence); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - [Fact] - public void Analyze_TcclUsage_ProducesWarning() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - var jarPath = Path.Combine(root, "libs", "tccl.jar"); - Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!); - using (var archive = new ZipArchive(new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None), ZipArchiveMode.Create, leaveOpen: false)) - { - var entry = archive.CreateEntry("com/example/Tccl.class"); - var bytes = JavaClassFileFactory.CreateTcclChecker("com/example/Tccl"); - using var stream = entry.Open(); - stream.Write(bytes); - } - - var cancellationToken = TestContext.Current.CancellationToken; - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); - var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); - var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken); - - Assert.Empty(analysis.Edges); - var warning = Assert.Single(analysis.Warnings); - Assert.Equal("tccl", warning.WarningCode); - Assert.Equal("com.example.Tccl", warning.SourceClass); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - [Fact] - public void Analyze_SpringBootFatJar_ScansEmbeddedAndBootSegments() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar"); - - var cancellationToken = TestContext.Current.CancellationToken; - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); - var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); - var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken); - - // Expect at least one edge originating from BOOT-INF classes - Assert.Contains(analysis.Edges, edge => edge.SourceClass == "com.example.App" && edge.Reason == JavaReflectionReason.ClassForName); - Assert.Contains(analysis.Edges, edge => edge.SourceClass == "com.example.Lib" && edge.Reason == JavaReflectionReason.ClassForName); - } - finally - { - TestPaths.SafeDelete(root); - } - } -} +using System.IO.Compression; +using System.Threading; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal.Reflection; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; + +public sealed class JavaReflectionAnalyzerTests +{ + [Fact] + public void Analyze_ClassForNameLiteral_ProducesEdge() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + var jarPath = Path.Combine(root, "libs", "reflect.jar"); + Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!); + using (var archive = new ZipArchive(new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None), ZipArchiveMode.Create, leaveOpen: false)) + { + var entry = archive.CreateEntry("com/example/Reflective.class"); + var bytes = JavaClassFileFactory.CreateClassForNameInvoker("com/example/Reflective", "com.example.Plugin"); + using var stream = entry.Open(); + stream.Write(bytes); + } + + var cancellationToken = TestContext.Current.CancellationToken; + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); + var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); + var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken); + + var edge = Assert.Single(analysis.Edges); + Assert.Equal("com.example.Reflective", edge.SourceClass); + Assert.Equal("com.example.Plugin", edge.TargetType); + Assert.Equal(JavaReflectionReason.ClassForName, edge.Reason); + Assert.Equal(JavaReflectionConfidence.High, edge.Confidence); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + [Fact] + public void Analyze_TcclUsage_ProducesWarning() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + var jarPath = Path.Combine(root, "libs", "tccl.jar"); + Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!); + using (var archive = new ZipArchive(new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None), ZipArchiveMode.Create, leaveOpen: false)) + { + var entry = archive.CreateEntry("com/example/Tccl.class"); + var bytes = JavaClassFileFactory.CreateTcclChecker("com/example/Tccl"); + using var stream = entry.Open(); + stream.Write(bytes); + } + + var cancellationToken = TestContext.Current.CancellationToken; + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); + var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); + var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken); + + Assert.Empty(analysis.Edges); + var warning = Assert.Single(analysis.Warnings); + Assert.Equal("tccl", warning.WarningCode); + Assert.Equal("com.example.Tccl", warning.SourceClass); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + [Fact] + public void Analyze_SpringBootFatJar_ScansEmbeddedAndBootSegments() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar"); + + var cancellationToken = TestContext.Current.CancellationToken; + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); + var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); + var analysis = JavaReflectionAnalyzer.Analyze(classPath, cancellationToken); + + // Expect at least one edge originating from BOOT-INF classes + Assert.Contains(analysis.Edges, edge => edge.SourceClass == "com.example.App" && edge.Reason == JavaReflectionReason.ClassForName); + Assert.Contains(analysis.Edges, edge => edge.SourceClass == "com.example.Lib" && edge.Reason == JavaReflectionReason.ClassForName); + } + finally + { + TestPaths.SafeDelete(root); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaServiceProviderScannerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaServiceProviderScannerTests.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaServiceProviderScannerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaServiceProviderScannerTests.cs index 5e4dd533..b8bc88a4 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaServiceProviderScannerTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaServiceProviderScannerTests.cs @@ -1,147 +1,147 @@ -using System.Collections.Generic; -using System.IO.Compression; -using System.Linq; -using System.Text; -using System.Threading; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders; -using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; -using Xunit; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; - -public sealed class JavaServiceProviderScannerTests -{ - [Fact] - public void Scan_SelectsFirstProviderByClasspathOrder() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - var servicesA = new Dictionary<string, string[]> - { - ["java.sql.Driver"] = new[] { "com.example.ADriver" }, - }; - - var servicesB = new Dictionary<string, string[]> - { - ["java.sql.Driver"] = new[] { "com.example.BDriver" }, - }; - - CreateJarWithClasses(root, "libs/a.jar", new[] { "com.example.ADriver" }, servicesA); - CreateJarWithClasses(root, "libs/b.jar", new[] { "com.example.BDriver" }, servicesB); - - var cancellationToken = TestContext.Current.CancellationToken; - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); - var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); - var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken); - - var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver"); - Assert.Equal("jdk", service.Category); - - var selected = Assert.Single(service.Candidates.Where(candidate => candidate.IsSelected)); - Assert.Equal("com.example.ADriver", selected.ProviderClass); - Assert.Empty(service.Warnings); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - [Fact] - public void Scan_FlagsDuplicateProviders() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - var services = new Dictionary<string, string[]> - { - ["java.sql.Driver"] = new[] { "com.example.DuplicateDriver" }, - }; - - CreateJarWithClasses(root, "libs/a.jar", new[] { "com.example.DuplicateDriver" }, services); - CreateJarWithClasses(root, "libs/b.jar", new[] { "com.example.Other" }, services); - - var cancellationToken = TestContext.Current.CancellationToken; - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); - var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); - var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken); - - var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver"); - Assert.NotEmpty(service.Warnings); - Assert.Contains(service.Warnings, warning => warning.Contains("duplicate-provider", StringComparison.OrdinalIgnoreCase)); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - [Fact] - public void Scan_RespectsBootFatJarOrdering() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar"); - - var cancellationToken = TestContext.Current.CancellationToken; - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); - var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); - var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken); - - var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver"); - var selected = Assert.Single(service.Candidates.Where(candidate => candidate.IsSelected)); - Assert.Equal("com.example.AppDriver", selected.ProviderClass); - Assert.Contains(service.Candidates.Select(candidate => candidate.ProviderClass), provider => provider == "com.example.LibDriver"); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - private static void CreateJarWithClasses( - string rootDirectory, - string relativePath, - IEnumerable<string> classNames, - IDictionary<string, string[]> serviceDefinitions) - { - ArgumentNullException.ThrowIfNull(rootDirectory); - ArgumentException.ThrowIfNullOrEmpty(relativePath); - - var jarPath = Path.Combine(rootDirectory, relativePath.Replace('/', Path.DirectorySeparatorChar)); - Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!); - - using var fileStream = new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None); - using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: false); - - var timestamp = new DateTimeOffset(2024, 01, 01, 0, 0, 0, TimeSpan.Zero); - - foreach (var className in classNames) - { - var entryPath = className.Replace('.', '/') + ".class"; - var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression); - entry.LastWriteTime = timestamp; - using var writer = new BinaryWriter(entry.Open(), Encoding.UTF8, leaveOpen: false); - writer.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE }); - } - - foreach (var pair in serviceDefinitions) - { - var entryPath = "META-INF/services/" + pair.Key; - var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression); - entry.LastWriteTime = timestamp; - using var writer = new StreamWriter(entry.Open(), Encoding.UTF8, leaveOpen: false); - foreach (var provider in pair.Value) - { - writer.WriteLine(provider); - } - } - } -} +using System.Collections.Generic; +using System.IO.Compression; +using System.Linq; +using System.Text; +using System.Threading; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ClassPath; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal.ServiceProviders; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; +using Xunit; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; + +public sealed class JavaServiceProviderScannerTests +{ + [Fact] + public void Scan_SelectsFirstProviderByClasspathOrder() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + var servicesA = new Dictionary<string, string[]> + { + ["java.sql.Driver"] = new[] { "com.example.ADriver" }, + }; + + var servicesB = new Dictionary<string, string[]> + { + ["java.sql.Driver"] = new[] { "com.example.BDriver" }, + }; + + CreateJarWithClasses(root, "libs/a.jar", new[] { "com.example.ADriver" }, servicesA); + CreateJarWithClasses(root, "libs/b.jar", new[] { "com.example.BDriver" }, servicesB); + + var cancellationToken = TestContext.Current.CancellationToken; + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); + var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); + var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken); + + var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver"); + Assert.Equal("jdk", service.Category); + + var selected = Assert.Single(service.Candidates.Where(candidate => candidate.IsSelected)); + Assert.Equal("com.example.ADriver", selected.ProviderClass); + Assert.Empty(service.Warnings); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + [Fact] + public void Scan_FlagsDuplicateProviders() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + var services = new Dictionary<string, string[]> + { + ["java.sql.Driver"] = new[] { "com.example.DuplicateDriver" }, + }; + + CreateJarWithClasses(root, "libs/a.jar", new[] { "com.example.DuplicateDriver" }, services); + CreateJarWithClasses(root, "libs/b.jar", new[] { "com.example.Other" }, services); + + var cancellationToken = TestContext.Current.CancellationToken; + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); + var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); + var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken); + + var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver"); + Assert.NotEmpty(service.Warnings); + Assert.Contains(service.Warnings, warning => warning.Contains("duplicate-provider", StringComparison.OrdinalIgnoreCase)); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + [Fact] + public void Scan_RespectsBootFatJarOrdering() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + JavaFixtureBuilder.CreateSpringBootFatJar(root, "apps/app-fat.jar"); + + var cancellationToken = TestContext.Current.CancellationToken; + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, cancellationToken); + var classPath = JavaClassPathBuilder.Build(workspace, cancellationToken); + var analysis = JavaServiceProviderScanner.Scan(classPath, JavaSpiCatalog.Default, cancellationToken); + + var service = Assert.Single(analysis.Services, record => record.ServiceId == "java.sql.Driver"); + var selected = Assert.Single(service.Candidates.Where(candidate => candidate.IsSelected)); + Assert.Equal("com.example.AppDriver", selected.ProviderClass); + Assert.Contains(service.Candidates.Select(candidate => candidate.ProviderClass), provider => provider == "com.example.LibDriver"); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + private static void CreateJarWithClasses( + string rootDirectory, + string relativePath, + IEnumerable<string> classNames, + IDictionary<string, string[]> serviceDefinitions) + { + ArgumentNullException.ThrowIfNull(rootDirectory); + ArgumentException.ThrowIfNullOrEmpty(relativePath); + + var jarPath = Path.Combine(rootDirectory, relativePath.Replace('/', Path.DirectorySeparatorChar)); + Directory.CreateDirectory(Path.GetDirectoryName(jarPath)!); + + using var fileStream = new FileStream(jarPath, FileMode.Create, FileAccess.ReadWrite, FileShare.None); + using var archive = new ZipArchive(fileStream, ZipArchiveMode.Create, leaveOpen: false); + + var timestamp = new DateTimeOffset(2024, 01, 01, 0, 0, 0, TimeSpan.Zero); + + foreach (var className in classNames) + { + var entryPath = className.Replace('.', '/') + ".class"; + var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression); + entry.LastWriteTime = timestamp; + using var writer = new BinaryWriter(entry.Open(), Encoding.UTF8, leaveOpen: false); + writer.Write(new byte[] { 0xCA, 0xFE, 0xBA, 0xBE }); + } + + foreach (var pair in serviceDefinitions) + { + var entryPath = "META-INF/services/" + pair.Key; + var entry = archive.CreateEntry(entryPath, CompressionLevel.NoCompression); + entry.LastWriteTime = timestamp; + using var writer = new StreamWriter(entry.Open(), Encoding.UTF8, leaveOpen: false); + foreach (var provider in pair.Value) + { + writer.WriteLine(provider); + } + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaWorkspaceNormalizerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaWorkspaceNormalizerTests.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaWorkspaceNormalizerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaWorkspaceNormalizerTests.cs index b84b1672..23dd410b 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaWorkspaceNormalizerTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/Java/JavaWorkspaceNormalizerTests.cs @@ -1,93 +1,93 @@ -using System.Linq; -using System.Threading; -using StellaOps.Scanner.Analyzers.Lang.Java.Internal; -using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; - -namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; - -public sealed class JavaWorkspaceNormalizerTests -{ - [Fact] - public void Normalize_ClassifiesPackagingAndLayers() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - JavaFixtureBuilder.CreateSampleJar(root, "libs/simple.jar"); - JavaFixtureBuilder.CreateSpringBootFatJar(root, "libs/app-fat.jar"); - JavaFixtureBuilder.CreateWarArchive(root, "apps/sample.war"); - - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); - - var archivesByPath = workspace.Archives.ToDictionary( - archive => archive.RelativePath.Replace('\\', '/'), - archive => archive, - StringComparer.Ordinal); - - var simpleJar = Assert.Contains("libs/simple.jar", archivesByPath); - Assert.Equal(JavaPackagingKind.Jar, simpleJar.Packaging); - Assert.Empty(simpleJar.LayeredDirectories); - - var fatJar = Assert.Contains("libs/app-fat.jar", archivesByPath); - Assert.Equal(JavaPackagingKind.SpringBootFatJar, fatJar.Packaging); - Assert.Contains("BOOT-INF", fatJar.LayeredDirectories); - - var war = Assert.Contains("apps/sample.war", archivesByPath); - Assert.Equal(JavaPackagingKind.War, war.Packaging); - Assert.Contains("WEB-INF", war.LayeredDirectories); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - [Fact] - public void Normalize_SelectsMultiReleaseOverlay() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - JavaFixtureBuilder.CreateMultiReleaseJar(root, "libs/mr.jar"); - - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); - - var archive = Assert.Single(workspace.Archives); - - Assert.True(archive.IsMultiRelease); - Assert.False(archive.HasModuleInfo); - - Assert.True(archive.TryGetEntry("com/example/App.class", out var entry)); - Assert.Equal(11, entry.Version); - Assert.Equal("META-INF/versions/11/com/example/App.class", entry.OriginalPath.Replace('\\', '/')); - } - finally - { - TestPaths.SafeDelete(root); - } - } - - [Fact] - public void Normalize_DetectsRuntimeImageMetadata() - { - var root = TestPaths.CreateTemporaryDirectory(); - try - { - JavaFixtureBuilder.CreateRuntimeImage(root, "runtime/jre"); - - var context = new LanguageAnalyzerContext(root, TimeProvider.System); - var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); - - var runtime = Assert.Single(workspace.RuntimeImages); - Assert.Equal("17.0.8", runtime.JavaVersion); - Assert.Equal("Eclipse Adoptium", runtime.Vendor); - Assert.Equal("runtime/jre", runtime.RelativePath.Replace('\\', '/')); - } - finally - { - TestPaths.SafeDelete(root); - } - } -} +using System.Linq; +using System.Threading; +using StellaOps.Scanner.Analyzers.Lang.Java.Internal; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Java.Tests; + +public sealed class JavaWorkspaceNormalizerTests +{ + [Fact] + public void Normalize_ClassifiesPackagingAndLayers() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + JavaFixtureBuilder.CreateSampleJar(root, "libs/simple.jar"); + JavaFixtureBuilder.CreateSpringBootFatJar(root, "libs/app-fat.jar"); + JavaFixtureBuilder.CreateWarArchive(root, "apps/sample.war"); + + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); + + var archivesByPath = workspace.Archives.ToDictionary( + archive => archive.RelativePath.Replace('\\', '/'), + archive => archive, + StringComparer.Ordinal); + + var simpleJar = Assert.Contains("libs/simple.jar", archivesByPath); + Assert.Equal(JavaPackagingKind.Jar, simpleJar.Packaging); + Assert.Empty(simpleJar.LayeredDirectories); + + var fatJar = Assert.Contains("libs/app-fat.jar", archivesByPath); + Assert.Equal(JavaPackagingKind.SpringBootFatJar, fatJar.Packaging); + Assert.Contains("BOOT-INF", fatJar.LayeredDirectories); + + var war = Assert.Contains("apps/sample.war", archivesByPath); + Assert.Equal(JavaPackagingKind.War, war.Packaging); + Assert.Contains("WEB-INF", war.LayeredDirectories); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + [Fact] + public void Normalize_SelectsMultiReleaseOverlay() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + JavaFixtureBuilder.CreateMultiReleaseJar(root, "libs/mr.jar"); + + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); + + var archive = Assert.Single(workspace.Archives); + + Assert.True(archive.IsMultiRelease); + Assert.False(archive.HasModuleInfo); + + Assert.True(archive.TryGetEntry("com/example/App.class", out var entry)); + Assert.Equal(11, entry.Version); + Assert.Equal("META-INF/versions/11/com/example/App.class", entry.OriginalPath.Replace('\\', '/')); + } + finally + { + TestPaths.SafeDelete(root); + } + } + + [Fact] + public void Normalize_DetectsRuntimeImageMetadata() + { + var root = TestPaths.CreateTemporaryDirectory(); + try + { + JavaFixtureBuilder.CreateRuntimeImage(root, "runtime/jre"); + + var context = new LanguageAnalyzerContext(root, TimeProvider.System); + var workspace = JavaWorkspaceNormalizer.Normalize(context, CancellationToken.None); + + var runtime = Assert.Single(workspace.RuntimeImages); + Assert.Equal("17.0.8", runtime.JavaVersion); + Assert.Equal("Eclipse Adoptium", runtime.Vendor); + Assert.Equal("runtime/jre", runtime.RelativePath.Replace('\\', '/')); + } + finally + { + TestPaths.SafeDelete(root); + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj similarity index 80% rename from src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj index 1c3eee38..10ef9574 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Java.Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests/StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -30,9 +31,9 @@ <ItemGroup> <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.Tests\StellaOps.Scanner.Analyzers.Lang.Tests.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.Java\StellaOps.Scanner.Analyzers.Lang.Java.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.Java/StellaOps.Scanner.Analyzers.Lang.Java.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" /> </ItemGroup> <ItemGroup> @@ -42,4 +43,4 @@ <ItemGroup> <Using Include="Xunit" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/expected.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/expected.json index 15ff8223..e7335182 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/expected.json @@ -1,134 +1,134 @@ -[ - { - "analyzerId": "node", - "componentKey": "purl::pkg:npm/left-pad@1.3.0", - "purl": "pkg:npm/left-pad@1.3.0", - "name": "left-pad", - "version": "1.3.0", - "type": "npm", - "usedByEntrypoint": false, - "metadata": { - "integrity": "sha512-LEFTPAD", - "path": "packages/app/node_modules/left-pad", - "resolved": "https://registry.example/left-pad-1.3.0.tgz" - }, - "evidence": [ - { - "kind": "file", - "source": "package.json", - "locator": "packages/app/node_modules/left-pad/package.json" - } - ] - }, - { - "analyzerId": "node", - "componentKey": "purl::pkg:npm/lib@2.0.1", - "purl": "pkg:npm/lib@2.0.1", - "name": "lib", - "version": "2.0.1", - "type": "npm", - "usedByEntrypoint": false, - "metadata": { - "integrity": "sha512-LIB", - "path": "packages/lib", - "resolved": "https://registry.example/lib-2.0.1.tgz", - "workspaceLink": "packages/app/node_modules/lib", - "workspaceMember": "true", - "workspaceRoot": "packages/lib" - }, - "evidence": [ - { - "kind": "file", - "source": "package.json", - "locator": "packages/app/node_modules/lib/package.json" - }, - { - "kind": "file", - "source": "package.json", - "locator": "packages/lib/package.json" - } - ] - }, - { - "analyzerId": "node", - "componentKey": "purl::pkg:npm/root-workspace@1.0.0", - "purl": "pkg:npm/root-workspace@1.0.0", - "name": "root-workspace", - "version": "1.0.0", - "type": "npm", - "usedByEntrypoint": false, - "metadata": { - "path": ".", - "private": "true" - }, - "evidence": [ - { - "kind": "file", - "source": "package.json", - "locator": "package.json" - } - ] - }, - { - "analyzerId": "node", - "componentKey": "purl::pkg:npm/shared@3.1.4", - "purl": "pkg:npm/shared@3.1.4", - "name": "shared", - "version": "3.1.4", - "type": "npm", - "usedByEntrypoint": false, - "metadata": { - "integrity": "sha512-SHARED", - "path": "packages/shared", - "resolved": "https://registry.example/shared-3.1.4.tgz", - "workspaceLink": "packages/app/node_modules/shared", - "workspaceMember": "true", - "workspaceRoot": "packages/shared", - "workspaceTargets": "packages/lib" - }, - "evidence": [ - { - "kind": "file", - "source": "package.json", - "locator": "packages/app/node_modules/shared/package.json" - }, - { - "kind": "file", - "source": "package.json", - "locator": "packages/shared/package.json" - } - ] - }, - { - "analyzerId": "node", - "componentKey": "purl::pkg:npm/workspace-app@1.0.0", - "purl": "pkg:npm/workspace-app@1.0.0", - "name": "workspace-app", - "version": "1.0.0", - "type": "npm", - "usedByEntrypoint": false, - "metadata": { - "installScripts": "true", - "path": "packages/app", - "policyHint.installLifecycle": "postinstall", - "script.postinstall": "node scripts/setup.js", - "workspaceMember": "true", - "workspaceRoot": "packages/app", - "workspaceTargets": "packages/lib;packages/shared" - }, - "evidence": [ - { - "kind": "file", - "source": "package.json", - "locator": "packages/app/package.json" - }, - { - "kind": "metadata", - "source": "package.json:scripts", - "locator": "packages/app/package.json#scripts.postinstall", - "value": "node scripts/setup.js", - "sha256": "f9ae4e4c9313857d1acc31947cee9984232cbefe93c8a56c718804744992728a" - } - ] - } -] +[ + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/left-pad@1.3.0", + "purl": "pkg:npm/left-pad@1.3.0", + "name": "left-pad", + "version": "1.3.0", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "integrity": "sha512-LEFTPAD", + "path": "packages/app/node_modules/left-pad", + "resolved": "https://registry.example/left-pad-1.3.0.tgz" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "packages/app/node_modules/left-pad/package.json" + } + ] + }, + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/lib@2.0.1", + "purl": "pkg:npm/lib@2.0.1", + "name": "lib", + "version": "2.0.1", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "integrity": "sha512-LIB", + "path": "packages/lib", + "resolved": "https://registry.example/lib-2.0.1.tgz", + "workspaceLink": "packages/app/node_modules/lib", + "workspaceMember": "true", + "workspaceRoot": "packages/lib" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "packages/app/node_modules/lib/package.json" + }, + { + "kind": "file", + "source": "package.json", + "locator": "packages/lib/package.json" + } + ] + }, + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/root-workspace@1.0.0", + "purl": "pkg:npm/root-workspace@1.0.0", + "name": "root-workspace", + "version": "1.0.0", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "path": ".", + "private": "true" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "package.json" + } + ] + }, + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/shared@3.1.4", + "purl": "pkg:npm/shared@3.1.4", + "name": "shared", + "version": "3.1.4", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "integrity": "sha512-SHARED", + "path": "packages/shared", + "resolved": "https://registry.example/shared-3.1.4.tgz", + "workspaceLink": "packages/app/node_modules/shared", + "workspaceMember": "true", + "workspaceRoot": "packages/shared", + "workspaceTargets": "packages/lib" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "packages/app/node_modules/shared/package.json" + }, + { + "kind": "file", + "source": "package.json", + "locator": "packages/shared/package.json" + } + ] + }, + { + "analyzerId": "node", + "componentKey": "purl::pkg:npm/workspace-app@1.0.0", + "purl": "pkg:npm/workspace-app@1.0.0", + "name": "workspace-app", + "version": "1.0.0", + "type": "npm", + "usedByEntrypoint": false, + "metadata": { + "installScripts": "true", + "path": "packages/app", + "policyHint.installLifecycle": "postinstall", + "script.postinstall": "node scripts/setup.js", + "workspaceMember": "true", + "workspaceRoot": "packages/app", + "workspaceTargets": "packages/lib;packages/shared" + }, + "evidence": [ + { + "kind": "file", + "source": "package.json", + "locator": "packages/app/package.json" + }, + { + "kind": "metadata", + "source": "package.json:scripts", + "locator": "packages/app/package.json#scripts.postinstall", + "value": "node scripts/setup.js", + "sha256": "f9ae4e4c9313857d1acc31947cee9984232cbefe93c8a56c718804744992728a" + } + ] + } +] diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package-lock.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package-lock.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package-lock.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package-lock.json index 435ce6bb..84abb8da 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package-lock.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package-lock.json @@ -1,49 +1,49 @@ -{ - "name": "root-workspace", - "version": "1.0.0", - "lockfileVersion": 3, - "packages": { - "": { - "name": "root-workspace", - "version": "1.0.0", - "private": true, - "workspaces": [ - "packages/*" - ] - }, - "packages/app": { - "name": "workspace-app", - "version": "1.0.0" - }, - "packages/lib": { - "name": "lib", - "version": "2.0.1", - "resolved": "https://registry.example/lib-2.0.1.tgz", - "integrity": "sha512-LIB" - }, - "packages/shared": { - "name": "shared", - "version": "3.1.4", - "resolved": "https://registry.example/shared-3.1.4.tgz", - "integrity": "sha512-SHARED" - }, - "packages/app/node_modules/lib": { - "name": "lib", - "version": "2.0.1", - "resolved": "https://registry.example/lib-2.0.1.tgz", - "integrity": "sha512-LIB" - }, - "packages/app/node_modules/shared": { - "name": "shared", - "version": "3.1.4", - "resolved": "https://registry.example/shared-3.1.4.tgz", - "integrity": "sha512-SHARED" - }, - "packages/app/node_modules/left-pad": { - "name": "left-pad", - "version": "1.3.0", - "resolved": "https://registry.example/left-pad-1.3.0.tgz", - "integrity": "sha512-LEFTPAD" - } - } -} +{ + "name": "root-workspace", + "version": "1.0.0", + "lockfileVersion": 3, + "packages": { + "": { + "name": "root-workspace", + "version": "1.0.0", + "private": true, + "workspaces": [ + "packages/*" + ] + }, + "packages/app": { + "name": "workspace-app", + "version": "1.0.0" + }, + "packages/lib": { + "name": "lib", + "version": "2.0.1", + "resolved": "https://registry.example/lib-2.0.1.tgz", + "integrity": "sha512-LIB" + }, + "packages/shared": { + "name": "shared", + "version": "3.1.4", + "resolved": "https://registry.example/shared-3.1.4.tgz", + "integrity": "sha512-SHARED" + }, + "packages/app/node_modules/lib": { + "name": "lib", + "version": "2.0.1", + "resolved": "https://registry.example/lib-2.0.1.tgz", + "integrity": "sha512-LIB" + }, + "packages/app/node_modules/shared": { + "name": "shared", + "version": "3.1.4", + "resolved": "https://registry.example/shared-3.1.4.tgz", + "integrity": "sha512-SHARED" + }, + "packages/app/node_modules/left-pad": { + "name": "left-pad", + "version": "1.3.0", + "resolved": "https://registry.example/left-pad-1.3.0.tgz", + "integrity": "sha512-LEFTPAD" + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package.json similarity index 94% rename from src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package.json index 9fef5fcd..03fded30 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/package.json @@ -1,10 +1,10 @@ -{ - "name": "root-workspace", - "version": "1.0.0", - "private": true, - "workspaces": [ - "packages/app", - "packages/lib", - "packages/shared" - ] -} +{ + "name": "root-workspace", + "version": "1.0.0", + "private": true, + "workspaces": [ + "packages/app", + "packages/lib", + "packages/shared" + ] +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/package.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/package.json similarity index 94% rename from src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/package.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/package.json index 42c39700..191c13ae 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/package.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/package.json @@ -1,11 +1,11 @@ -{ - "name": "workspace-app", - "version": "1.0.0", - "dependencies": { - "lib": "workspace:../lib", - "shared": "workspace:../shared" - }, - "scripts": { - "postinstall": "node scripts/setup.js" - } -} +{ + "name": "workspace-app", + "version": "1.0.0", + "dependencies": { + "lib": "workspace:../lib", + "shared": "workspace:../shared" + }, + "scripts": { + "postinstall": "node scripts/setup.js" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js similarity index 95% rename from src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js index 55078d87..13f1a33c 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/scripts/setup.js @@ -1 +1 @@ -console.log('setup'); +console.log('setup'); diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json similarity index 92% rename from src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json index 29b7eb9b..9fba5e27 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/lib/package.json @@ -1,7 +1,7 @@ -{ - "name": "lib", - "version": "2.0.1", - "dependencies": { - "left-pad": "1.3.0" - } -} +{ + "name": "lib", + "version": "2.0.1", + "dependencies": { + "left-pad": "1.3.0" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json similarity index 93% rename from src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json index c72da176..0141611d 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/shared/package.json @@ -1,7 +1,7 @@ -{ - "name": "shared", - "version": "3.1.4", - "dependencies": { - "lib": "workspace:../lib" - } -} +{ + "name": "shared", + "version": "3.1.4", + "dependencies": { + "lib": "workspace:../lib" + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Node/NodeLanguageAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Node/NodeLanguageAnalyzerTests.cs similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Node/NodeLanguageAnalyzerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Node/NodeLanguageAnalyzerTests.cs index 2798223f..db2dfde1 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Node/NodeLanguageAnalyzerTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Node/NodeLanguageAnalyzerTests.cs @@ -1,27 +1,27 @@ -using StellaOps.Scanner.Analyzers.Lang.Node; -using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; -using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; - -namespace StellaOps.Scanner.Analyzers.Lang.Node.Tests; - -public sealed class NodeLanguageAnalyzerTests -{ - [Fact] - public async Task WorkspaceFixtureProducesDeterministicOutputAsync() - { - var cancellationToken = TestContext.Current.CancellationToken; - var fixturePath = TestPaths.ResolveFixture("lang", "node", "workspaces"); - var goldenPath = Path.Combine(fixturePath, "expected.json"); - - var analyzers = new ILanguageAnalyzer[] - { - new NodeLanguageAnalyzer() - }; - - await LanguageAnalyzerTestHarness.AssertDeterministicAsync( - fixturePath, - goldenPath, - analyzers, - cancellationToken); - } -} +using StellaOps.Scanner.Analyzers.Lang.Node; +using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Node.Tests; + +public sealed class NodeLanguageAnalyzerTests +{ + [Fact] + public async Task WorkspaceFixtureProducesDeterministicOutputAsync() + { + var cancellationToken = TestContext.Current.CancellationToken; + var fixturePath = TestPaths.ResolveFixture("lang", "node", "workspaces"); + var goldenPath = Path.Combine(fixturePath, "expected.json"); + + var analyzers = new ILanguageAnalyzer[] + { + new NodeLanguageAnalyzer() + }; + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath, + goldenPath, + analyzers, + cancellationToken); + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj similarity index 80% rename from src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj index 972155b7..b79e37f1 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests/StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -30,9 +31,9 @@ <ItemGroup> <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.Tests\StellaOps.Scanner.Analyzers.Lang.Tests.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.Node\StellaOps.Scanner.Analyzers.Lang.Node.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.Node/StellaOps.Scanner.Analyzers.Lang.Node.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" /> </ItemGroup> <ItemGroup> @@ -42,4 +43,4 @@ <ItemGroup> <Using Include="Xunit" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/expected.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/expected.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/__init__.py similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/__init__.py rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/__init__.py diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/cli.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/cli.py similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/cli.py rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/cli.py diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/core.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/core.py similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/core.py rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer1/usr/lib/python3.11/site-packages/layered/core.py diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/LICENSE b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/LICENSE similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/LICENSE rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/LICENSE diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/INSTALLER diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/METADATA diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/RECORD diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/WHEEL diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/direct_url.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/direct_url.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/direct_url.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/direct_url.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered-2.0.dist-info/entry_points.txt diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/__init__.py similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/__init__.py rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/__init__.py diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/plugin.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/plugin.py similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/plugin.py rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/layered-editable/layer2/usr/lib/python3.11/site-packages/layered/plugins/plugin.py diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/expected.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/expected.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.data/scripts/cache-tool b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.data/scripts/cache-tool similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.data/scripts/cache-tool rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.data/scripts/cache-tool diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/INSTALLER b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/INSTALLER similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/INSTALLER rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/INSTALLER diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/METADATA b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/METADATA similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/METADATA rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/METADATA diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/RECORD b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/RECORD similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/RECORD rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/RECORD diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/WHEEL b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/WHEEL similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/WHEEL rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/WHEEL diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/entry_points.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/entry_points.txt similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/entry_points.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg-1.2.3.dist-info/entry_points.txt diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/LICENSE b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/LICENSE similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/LICENSE rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/LICENSE diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/__init__.py similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/__init__.py rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/__init__.py diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/data/config.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/data/config.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/data/config.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/data/config.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/md5only.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/md5only.txt similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/md5only.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/pip-cache/lib/python3.11/site-packages/cache_pkg/md5only.txt diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/expected.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/expected.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/INSTALLER b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/INSTALLER similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/INSTALLER rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/INSTALLER diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/METADATA b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/METADATA similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/METADATA rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/METADATA diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/RECORD b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/RECORD similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/RECORD rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/RECORD diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/WHEEL b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/WHEEL similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/WHEEL rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/WHEEL diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/direct_url.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/direct_url.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/direct_url.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/direct_url.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/entry_points.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/entry_points.txt similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/entry_points.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple-1.0.0.dist-info/entry_points.txt diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__init__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__init__.py similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__init__.py rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__init__.py diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__main__.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__main__.py similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__main__.py rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/__main__.py diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/core.py b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/core.py similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/core.py rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python/simple-venv/lib/python3.11/site-packages/simple/core.py diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Python/PythonLanguageAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Python/PythonLanguageAnalyzerTests.cs similarity index 99% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Python/PythonLanguageAnalyzerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Python/PythonLanguageAnalyzerTests.cs index 45eb6407..b7879311 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Python/PythonLanguageAnalyzerTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Python/PythonLanguageAnalyzerTests.cs @@ -1,32 +1,32 @@ -using StellaOps.Scanner.Analyzers.Lang.Python; -using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; -using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; - -namespace StellaOps.Scanner.Analyzers.Lang.Python.Tests; - -public sealed class PythonLanguageAnalyzerTests -{ - [Fact] +using StellaOps.Scanner.Analyzers.Lang.Python; +using StellaOps.Scanner.Analyzers.Lang.Tests.Harness; +using StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +namespace StellaOps.Scanner.Analyzers.Lang.Python.Tests; + +public sealed class PythonLanguageAnalyzerTests +{ + [Fact] public async Task SimpleVenvFixtureProducesDeterministicOutputAsync() { var cancellationToken = TestContext.Current.CancellationToken; var fixturePath = TestPaths.ResolveFixture("lang", "python", "simple-venv"); var goldenPath = Path.Combine(fixturePath, "expected.json"); - - var usageHints = new LanguageUsageHints(new[] - { - Path.Combine(fixturePath, "bin", "simple-tool") - }); - - var analyzers = new ILanguageAnalyzer[] - { - new PythonLanguageAnalyzer() - }; - - await LanguageAnalyzerTestHarness.AssertDeterministicAsync( - fixturePath, - goldenPath, - analyzers, + + var usageHints = new LanguageUsageHints(new[] + { + Path.Combine(fixturePath, "bin", "simple-tool") + }); + + var analyzers = new ILanguageAnalyzer[] + { + new PythonLanguageAnalyzer() + }; + + await LanguageAnalyzerTestHarness.AssertDeterministicAsync( + fixturePath, + goldenPath, + analyzers, cancellationToken, usageHints); } diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj similarity index 79% rename from src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj index bf6c603e..d83c5200 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -30,9 +31,9 @@ <ItemGroup> <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.Tests\StellaOps.Scanner.Analyzers.Lang.Tests.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.Python\StellaOps.Scanner.Analyzers.Lang.Python.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.Python/StellaOps.Scanner.Analyzers.Lang.Python.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" /> </ItemGroup> <ItemGroup> @@ -42,4 +43,4 @@ <ItemGroup> <Using Include="Xunit" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageAnalyzerResultTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageAnalyzerResultTests.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageAnalyzerResultTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageAnalyzerResultTests.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageComponentMapperTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageComponentMapperTests.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageComponentMapperTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Core/LanguageComponentMapperTests.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Determinism/LanguageAnalyzerHarnessTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Determinism/LanguageAnalyzerHarnessTests.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Determinism/LanguageAnalyzerHarnessTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Determinism/LanguageAnalyzerHarnessTests.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/DotNet/DotNetLanguageAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/DotNet/DotNetLanguageAnalyzerTests.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/DotNet/DotNetLanguageAnalyzerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/DotNet/DotNetLanguageAnalyzerTests.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/expected.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/expected.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/input/placeholder.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/input/placeholder.txt similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/input/placeholder.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/determinism/basic/input/placeholder.txt diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.deps.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.deps.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.deps.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.deps.json index b5412459..bb11f18b 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.deps.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.deps.json @@ -1,84 +1,84 @@ -{ - "runtimeTarget": { - "name": ".NETCoreApp,Version=v10.0/osx-arm64" - }, - "targets": { - ".NETCoreApp,Version=v10.0": { - "AppA/2.0.0": { - "dependencies": { - "StellaOps.Toolkit": "1.2.3", - "StellaOps.Logging": "2.5.1" - } - }, - "StellaOps.Toolkit/1.2.3": { - "dependencies": { - "StellaOps.Logging": "2.5.1" - }, - "runtime": { - "lib/net10.0/StellaOps.Toolkit.dll": { - "assemblyVersion": "1.2.3.0", - "fileVersion": "1.2.3.0" - } - } - }, - "StellaOps.Logging/2.5.1": { - "runtime": { - "lib/net10.0/StellaOps.Logging.dll": { - "assemblyVersion": "2.5.1.0", - "fileVersion": "2.5.1.12345" - } - } - } - }, - ".NETCoreApp,Version=v10.0/linux-x64": { - "StellaOps.Toolkit/1.2.3": { - "runtimeTargets": { - "runtimes/linux-x64/native/libstellaops.toolkit.so": { - "rid": "linux-x64", - "assetType": "native" - } - } - }, - "StellaOps.Logging/2.5.1": { - "runtime": { - "runtimes/linux-x64/lib/net10.0/StellaOps.Logging.dll": {} - } - } - }, - ".NETCoreApp,Version=v10.0/osx-arm64": { - "StellaOps.Toolkit/1.2.3": { - "runtimeTargets": { - "runtimes/osx-arm64/native/libstellaops.toolkit.dylib": { - "rid": "osx-arm64", - "assetType": "native" - } - } - }, - "StellaOps.Logging/2.5.1": { - "runtime": { - "runtimes/osx-arm64/lib/net10.0/StellaOps.Logging.dll": {} - } - } - } - }, - "libraries": { - "AppA/2.0.0": { - "type": "project", - "serviceable": false - }, - "StellaOps.Toolkit/1.2.3": { - "type": "package", - "serviceable": true, - "sha512": "sha512-FAKE_TOOLKIT_SHA==", - "path": "stellaops.toolkit/1.2.3", - "hashPath": "stellaops.toolkit.1.2.3.nupkg.sha512" - }, - "StellaOps.Logging/2.5.1": { - "type": "package", - "serviceable": true, - "sha512": "sha512-FAKE_LOGGING_SHA==", - "path": "stellaops.logging/2.5.1", - "hashPath": "stellaops.logging.2.5.1.nupkg.sha512" - } - } -} +{ + "runtimeTarget": { + "name": ".NETCoreApp,Version=v10.0/osx-arm64" + }, + "targets": { + ".NETCoreApp,Version=v10.0": { + "AppA/2.0.0": { + "dependencies": { + "StellaOps.Toolkit": "1.2.3", + "StellaOps.Logging": "2.5.1" + } + }, + "StellaOps.Toolkit/1.2.3": { + "dependencies": { + "StellaOps.Logging": "2.5.1" + }, + "runtime": { + "lib/net10.0/StellaOps.Toolkit.dll": { + "assemblyVersion": "1.2.3.0", + "fileVersion": "1.2.3.0" + } + } + }, + "StellaOps.Logging/2.5.1": { + "runtime": { + "lib/net10.0/StellaOps.Logging.dll": { + "assemblyVersion": "2.5.1.0", + "fileVersion": "2.5.1.12345" + } + } + } + }, + ".NETCoreApp,Version=v10.0/linux-x64": { + "StellaOps.Toolkit/1.2.3": { + "runtimeTargets": { + "runtimes/linux-x64/native/libstellaops.toolkit.so": { + "rid": "linux-x64", + "assetType": "native" + } + } + }, + "StellaOps.Logging/2.5.1": { + "runtime": { + "runtimes/linux-x64/lib/net10.0/StellaOps.Logging.dll": {} + } + } + }, + ".NETCoreApp,Version=v10.0/osx-arm64": { + "StellaOps.Toolkit/1.2.3": { + "runtimeTargets": { + "runtimes/osx-arm64/native/libstellaops.toolkit.dylib": { + "rid": "osx-arm64", + "assetType": "native" + } + } + }, + "StellaOps.Logging/2.5.1": { + "runtime": { + "runtimes/osx-arm64/lib/net10.0/StellaOps.Logging.dll": {} + } + } + } + }, + "libraries": { + "AppA/2.0.0": { + "type": "project", + "serviceable": false + }, + "StellaOps.Toolkit/1.2.3": { + "type": "package", + "serviceable": true, + "sha512": "sha512-FAKE_TOOLKIT_SHA==", + "path": "stellaops.toolkit/1.2.3", + "hashPath": "stellaops.toolkit.1.2.3.nupkg.sha512" + }, + "StellaOps.Logging/2.5.1": { + "type": "package", + "serviceable": true, + "sha512": "sha512-FAKE_LOGGING_SHA==", + "path": "stellaops.logging/2.5.1", + "hashPath": "stellaops.logging.2.5.1.nupkg.sha512" + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.runtimeconfig.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.runtimeconfig.json similarity index 94% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.runtimeconfig.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.runtimeconfig.json index 36677e13..a1b0d897 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.runtimeconfig.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppA.runtimeconfig.json @@ -1,39 +1,39 @@ -{ - "runtimeOptions": { - "tfm": "net10.0", - "framework": { - "name": "Microsoft.NETCore.App", - "version": "10.0.1" - }, - "frameworks": [ - { - "name": "Microsoft.NETCore.App", - "version": "10.0.1" - }, - { - "name": "Microsoft.AspNetCore.App", - "version": "10.0.0" - }, - { - "name": "StellaOps.Hosting", - "version": "2.0.0" - } - ], - "runtimeGraph": { - "runtimes": { - "osx-arm64": { - "fallbacks": [ - "osx", - "unix" - ] - }, - "linux-x64": { - "fallbacks": [ - "linux", - "unix" - ] - } - } - } - } -} +{ + "runtimeOptions": { + "tfm": "net10.0", + "framework": { + "name": "Microsoft.NETCore.App", + "version": "10.0.1" + }, + "frameworks": [ + { + "name": "Microsoft.NETCore.App", + "version": "10.0.1" + }, + { + "name": "Microsoft.AspNetCore.App", + "version": "10.0.0" + }, + { + "name": "StellaOps.Hosting", + "version": "2.0.0" + } + ], + "runtimeGraph": { + "runtimes": { + "osx-arm64": { + "fallbacks": [ + "osx", + "unix" + ] + }, + "linux-x64": { + "fallbacks": [ + "linux", + "unix" + ] + } + } + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.deps.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.deps.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.deps.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.deps.json index 0a5b09f1..bd18123c 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.deps.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.deps.json @@ -1,76 +1,76 @@ -{ - "runtimeTarget": { - "name": ".NETCoreApp,Version=v10.0/win-arm64" - }, - "targets": { - ".NETCoreApp,Version=v10.0": { - "AppB/3.1.0": { - "dependencies": { - "StellaOps.Toolkit": "1.2.3", - "StellaOps.Logging": "2.5.1" - } - }, - "StellaOps.Toolkit/1.2.3": { - "runtime": { - "lib/net10.0/StellaOps.Toolkit.dll": { - "assemblyVersion": "1.2.3.0", - "fileVersion": "1.2.3.0" - } - } - }, - "StellaOps.Logging/2.5.1": { - "runtime": { - "lib/net10.0/StellaOps.Logging.dll": { - "assemblyVersion": "2.5.1.0", - "fileVersion": "2.5.1.12345" - } - } - } - }, - ".NETCoreApp,Version=v10.0/win-arm64": { - "StellaOps.Toolkit/1.2.3": { - "runtimeTargets": { - "runtimes/win-arm64/native/stellaops.toolkit.dll": { - "rid": "win-arm64", - "assetType": "native" - } - } - }, - "StellaOps.Logging/2.5.1": { - "runtimeTargets": { - "runtimes/win-arm64/native/stellaops.logging.dll": { - "rid": "win-arm64", - "assetType": "native" - } - } - } - }, - ".NETCoreApp,Version=v10.0/linux-arm64": { - "StellaOps.Logging/2.5.1": { - "runtime": { - "runtimes/linux-arm64/lib/net10.0/StellaOps.Logging.dll": {} - } - } - } - }, - "libraries": { - "AppB/3.1.0": { - "type": "project", - "serviceable": false - }, - "StellaOps.Toolkit/1.2.3": { - "type": "package", - "serviceable": true, - "sha512": "sha512-FAKE_TOOLKIT_SHA==", - "path": "stellaops.toolkit/1.2.3", - "hashPath": "stellaops.toolkit.1.2.3.nupkg.sha512" - }, - "StellaOps.Logging/2.5.1": { - "type": "package", - "serviceable": true, - "sha512": "sha512-FAKE_LOGGING_SHA==", - "path": "stellaops.logging/2.5.1", - "hashPath": "stellaops.logging.2.5.1.nupkg.sha512" - } - } -} +{ + "runtimeTarget": { + "name": ".NETCoreApp,Version=v10.0/win-arm64" + }, + "targets": { + ".NETCoreApp,Version=v10.0": { + "AppB/3.1.0": { + "dependencies": { + "StellaOps.Toolkit": "1.2.3", + "StellaOps.Logging": "2.5.1" + } + }, + "StellaOps.Toolkit/1.2.3": { + "runtime": { + "lib/net10.0/StellaOps.Toolkit.dll": { + "assemblyVersion": "1.2.3.0", + "fileVersion": "1.2.3.0" + } + } + }, + "StellaOps.Logging/2.5.1": { + "runtime": { + "lib/net10.0/StellaOps.Logging.dll": { + "assemblyVersion": "2.5.1.0", + "fileVersion": "2.5.1.12345" + } + } + } + }, + ".NETCoreApp,Version=v10.0/win-arm64": { + "StellaOps.Toolkit/1.2.3": { + "runtimeTargets": { + "runtimes/win-arm64/native/stellaops.toolkit.dll": { + "rid": "win-arm64", + "assetType": "native" + } + } + }, + "StellaOps.Logging/2.5.1": { + "runtimeTargets": { + "runtimes/win-arm64/native/stellaops.logging.dll": { + "rid": "win-arm64", + "assetType": "native" + } + } + } + }, + ".NETCoreApp,Version=v10.0/linux-arm64": { + "StellaOps.Logging/2.5.1": { + "runtime": { + "runtimes/linux-arm64/lib/net10.0/StellaOps.Logging.dll": {} + } + } + } + }, + "libraries": { + "AppB/3.1.0": { + "type": "project", + "serviceable": false + }, + "StellaOps.Toolkit/1.2.3": { + "type": "package", + "serviceable": true, + "sha512": "sha512-FAKE_TOOLKIT_SHA==", + "path": "stellaops.toolkit/1.2.3", + "hashPath": "stellaops.toolkit.1.2.3.nupkg.sha512" + }, + "StellaOps.Logging/2.5.1": { + "type": "package", + "serviceable": true, + "sha512": "sha512-FAKE_LOGGING_SHA==", + "path": "stellaops.logging/2.5.1", + "hashPath": "stellaops.logging.2.5.1.nupkg.sha512" + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.runtimeconfig.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.runtimeconfig.json similarity index 94% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.runtimeconfig.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.runtimeconfig.json index 049280f8..9a7241ca 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.runtimeconfig.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/AppB.runtimeconfig.json @@ -1,38 +1,38 @@ -{ - "runtimeOptions": { - "tfm": "net10.0", - "framework": { - "name": "Microsoft.NETCore.App", - "version": "10.0.0" - }, - "frameworks": [ - { - "name": "Microsoft.NETCore.App", - "version": "10.0.0" - }, - { - "name": "Microsoft.WindowsDesktop.App", - "version": "10.0.0" - } - ], - "additionalProbingPaths": [ - "C:/Users/runner/.nuget/packages" - ], - "runtimeGraph": { - "runtimes": { - "win-arm64": { - "fallbacks": [ - "win", - "any" - ] - }, - "linux-arm64": { - "fallbacks": [ - "linux", - "unix" - ] - } - } - } - } -} +{ + "runtimeOptions": { + "tfm": "net10.0", + "framework": { + "name": "Microsoft.NETCore.App", + "version": "10.0.0" + }, + "frameworks": [ + { + "name": "Microsoft.NETCore.App", + "version": "10.0.0" + }, + { + "name": "Microsoft.WindowsDesktop.App", + "version": "10.0.0" + } + ], + "additionalProbingPaths": [ + "C:/Users/runner/.nuget/packages" + ], + "runtimeGraph": { + "runtimes": { + "win-arm64": { + "fallbacks": [ + "win", + "any" + ] + }, + "linux-arm64": { + "fallbacks": [ + "linux", + "unix" + ] + } + } + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/expected.json similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/expected.json index 181578b8..475576cd 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/expected.json @@ -1,120 +1,120 @@ -[ - { - "analyzerId": "dotnet", - "componentKey": "purl::pkg:nuget/stellaops.logging@2.5.1", - "purl": "pkg:nuget/stellaops.logging@2.5.1", - "name": "StellaOps.Logging", - "version": "2.5.1", - "type": "nuget", - "usedByEntrypoint": false, - "metadata": { - "assembly[0].assetPath": "lib/net10.0/StellaOps.Logging.dll", - "assembly[0].fileVersion": "2.5.1.12345", - "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", - "assembly[0].version": "2.5.1.0", - "assembly[1].assetPath": "runtimes/linux-arm64/lib/net10.0/StellaOps.Logging.dll", - "assembly[1].rid[0]": "linux-arm64", - "assembly[1].tfm[0]": ".NETCoreApp,Version=v10.0", - "assembly[2].assetPath": "runtimes/linux-x64/lib/net10.0/StellaOps.Logging.dll", - "assembly[2].rid[0]": "linux-x64", - "assembly[2].tfm[0]": ".NETCoreApp,Version=v10.0", - "assembly[3].assetPath": "runtimes/osx-arm64/lib/net10.0/StellaOps.Logging.dll", - "assembly[3].rid[0]": "osx-arm64", - "assembly[3].tfm[0]": ".NETCoreApp,Version=v10.0", - "deps.path[0]": "AppA.deps.json", - "deps.path[1]": "AppB.deps.json", - "deps.rid[0]": "linux-arm64", - "deps.rid[1]": "linux-x64", - "deps.rid[2]": "osx-arm64", - "deps.rid[3]": "win-arm64", - "deps.tfm[0]": ".NETCoreApp,Version=v10.0", - "license.expression[0]": "Apache-2.0", - "native[0].assetPath": "runtimes/win-arm64/native/stellaops.logging.dll", - "native[0].rid[0]": "win-arm64", - "native[0].tfm[0]": ".NETCoreApp,Version=v10.0", - "package.hashPath[0]": "stellaops.logging.2.5.1.nupkg.sha512", - "package.id": "StellaOps.Logging", - "package.id.normalized": "stellaops.logging", - "package.path[0]": "stellaops.logging/2.5.1", - "package.serviceable": "true", - "package.sha512[0]": "sha512-FAKE_LOGGING_SHA==", - "package.version": "2.5.1", - "provenance": "manifest" - }, - "evidence": [ - { - "kind": "file", - "source": "deps.json", - "locator": "AppA.deps.json", - "value": "StellaOps.Logging/2.5.1" - }, - { - "kind": "file", - "source": "deps.json", - "locator": "AppB.deps.json", - "value": "StellaOps.Logging/2.5.1" - } - ] - }, - { - "analyzerId": "dotnet", - "componentKey": "purl::pkg:nuget/stellaops.toolkit@1.2.3", - "purl": "pkg:nuget/stellaops.toolkit@1.2.3", - "name": "StellaOps.Toolkit", - "version": "1.2.3", - "type": "nuget", - "usedByEntrypoint": false, - "metadata": { - "assembly[0].assetPath": "lib/net10.0/StellaOps.Toolkit.dll", - "assembly[0].fileVersion": "1.2.3.0", - "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", - "assembly[0].version": "1.2.3.0", - "deps.dependency[0]": "stellaops.logging", - "deps.path[0]": "AppA.deps.json", - "deps.path[1]": "AppB.deps.json", - "deps.rid[0]": "linux-x64", - "deps.rid[1]": "osx-arm64", - "deps.rid[2]": "win-arm64", - "deps.tfm[0]": ".NETCoreApp,Version=v10.0", - "license.file.sha256[0]": "604e182900b0ecb1ffb911c817bcbd148a31b8f55ad392a3b770be8005048c5c", - "license.file[0]": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", - "native[0].assetPath": "runtimes/linux-x64/native/libstellaops.toolkit.so", - "native[0].rid[0]": "linux-x64", - "native[0].tfm[0]": ".NETCoreApp,Version=v10.0", - "native[1].assetPath": "runtimes/osx-arm64/native/libstellaops.toolkit.dylib", - "native[1].rid[0]": "osx-arm64", - "native[1].tfm[0]": ".NETCoreApp,Version=v10.0", - "native[2].assetPath": "runtimes/win-arm64/native/stellaops.toolkit.dll", - "native[2].rid[0]": "win-arm64", - "native[2].tfm[0]": ".NETCoreApp,Version=v10.0", - "package.hashPath[0]": "stellaops.toolkit.1.2.3.nupkg.sha512", - "package.id": "StellaOps.Toolkit", - "package.id.normalized": "stellaops.toolkit", - "package.path[0]": "stellaops.toolkit/1.2.3", - "package.serviceable": "true", - "package.sha512[0]": "sha512-FAKE_TOOLKIT_SHA==", - "package.version": "1.2.3", - "provenance": "manifest" - }, - "evidence": [ - { - "kind": "file", - "source": "deps.json", - "locator": "AppA.deps.json", - "value": "StellaOps.Toolkit/1.2.3" - }, - { - "kind": "file", - "source": "deps.json", - "locator": "AppB.deps.json", - "value": "StellaOps.Toolkit/1.2.3" - }, - { - "kind": "file", - "source": "license", - "locator": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", - "sha256": "604e182900b0ecb1ffb911c817bcbd148a31b8f55ad392a3b770be8005048c5c" - } - ] - } +[ + { + "analyzerId": "dotnet", + "componentKey": "purl::pkg:nuget/stellaops.logging@2.5.1", + "purl": "pkg:nuget/stellaops.logging@2.5.1", + "name": "StellaOps.Logging", + "version": "2.5.1", + "type": "nuget", + "usedByEntrypoint": false, + "metadata": { + "assembly[0].assetPath": "lib/net10.0/StellaOps.Logging.dll", + "assembly[0].fileVersion": "2.5.1.12345", + "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", + "assembly[0].version": "2.5.1.0", + "assembly[1].assetPath": "runtimes/linux-arm64/lib/net10.0/StellaOps.Logging.dll", + "assembly[1].rid[0]": "linux-arm64", + "assembly[1].tfm[0]": ".NETCoreApp,Version=v10.0", + "assembly[2].assetPath": "runtimes/linux-x64/lib/net10.0/StellaOps.Logging.dll", + "assembly[2].rid[0]": "linux-x64", + "assembly[2].tfm[0]": ".NETCoreApp,Version=v10.0", + "assembly[3].assetPath": "runtimes/osx-arm64/lib/net10.0/StellaOps.Logging.dll", + "assembly[3].rid[0]": "osx-arm64", + "assembly[3].tfm[0]": ".NETCoreApp,Version=v10.0", + "deps.path[0]": "AppA.deps.json", + "deps.path[1]": "AppB.deps.json", + "deps.rid[0]": "linux-arm64", + "deps.rid[1]": "linux-x64", + "deps.rid[2]": "osx-arm64", + "deps.rid[3]": "win-arm64", + "deps.tfm[0]": ".NETCoreApp,Version=v10.0", + "license.expression[0]": "Apache-2.0", + "native[0].assetPath": "runtimes/win-arm64/native/stellaops.logging.dll", + "native[0].rid[0]": "win-arm64", + "native[0].tfm[0]": ".NETCoreApp,Version=v10.0", + "package.hashPath[0]": "stellaops.logging.2.5.1.nupkg.sha512", + "package.id": "StellaOps.Logging", + "package.id.normalized": "stellaops.logging", + "package.path[0]": "stellaops.logging/2.5.1", + "package.serviceable": "true", + "package.sha512[0]": "sha512-FAKE_LOGGING_SHA==", + "package.version": "2.5.1", + "provenance": "manifest" + }, + "evidence": [ + { + "kind": "file", + "source": "deps.json", + "locator": "AppA.deps.json", + "value": "StellaOps.Logging/2.5.1" + }, + { + "kind": "file", + "source": "deps.json", + "locator": "AppB.deps.json", + "value": "StellaOps.Logging/2.5.1" + } + ] + }, + { + "analyzerId": "dotnet", + "componentKey": "purl::pkg:nuget/stellaops.toolkit@1.2.3", + "purl": "pkg:nuget/stellaops.toolkit@1.2.3", + "name": "StellaOps.Toolkit", + "version": "1.2.3", + "type": "nuget", + "usedByEntrypoint": false, + "metadata": { + "assembly[0].assetPath": "lib/net10.0/StellaOps.Toolkit.dll", + "assembly[0].fileVersion": "1.2.3.0", + "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", + "assembly[0].version": "1.2.3.0", + "deps.dependency[0]": "stellaops.logging", + "deps.path[0]": "AppA.deps.json", + "deps.path[1]": "AppB.deps.json", + "deps.rid[0]": "linux-x64", + "deps.rid[1]": "osx-arm64", + "deps.rid[2]": "win-arm64", + "deps.tfm[0]": ".NETCoreApp,Version=v10.0", + "license.file.sha256[0]": "604e182900b0ecb1ffb911c817bcbd148a31b8f55ad392a3b770be8005048c5c", + "license.file[0]": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", + "native[0].assetPath": "runtimes/linux-x64/native/libstellaops.toolkit.so", + "native[0].rid[0]": "linux-x64", + "native[0].tfm[0]": ".NETCoreApp,Version=v10.0", + "native[1].assetPath": "runtimes/osx-arm64/native/libstellaops.toolkit.dylib", + "native[1].rid[0]": "osx-arm64", + "native[1].tfm[0]": ".NETCoreApp,Version=v10.0", + "native[2].assetPath": "runtimes/win-arm64/native/stellaops.toolkit.dll", + "native[2].rid[0]": "win-arm64", + "native[2].tfm[0]": ".NETCoreApp,Version=v10.0", + "package.hashPath[0]": "stellaops.toolkit.1.2.3.nupkg.sha512", + "package.id": "StellaOps.Toolkit", + "package.id.normalized": "stellaops.toolkit", + "package.path[0]": "stellaops.toolkit/1.2.3", + "package.serviceable": "true", + "package.sha512[0]": "sha512-FAKE_TOOLKIT_SHA==", + "package.version": "1.2.3", + "provenance": "manifest" + }, + "evidence": [ + { + "kind": "file", + "source": "deps.json", + "locator": "AppA.deps.json", + "value": "StellaOps.Toolkit/1.2.3" + }, + { + "kind": "file", + "source": "deps.json", + "locator": "AppB.deps.json", + "value": "StellaOps.Toolkit/1.2.3" + }, + { + "kind": "file", + "source": "license", + "locator": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", + "sha256": "604e182900b0ecb1ffb911c817bcbd148a31b8f55ad392a3b770be8005048c5c" + } + ] + } ] \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/LICENSE.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/LICENSE.txt similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/LICENSE.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/LICENSE.txt index a2662911..e03ce1f9 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/LICENSE.txt +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/LICENSE.txt @@ -1,15 +1,15 @@ -StellaOps Logging - -Copyright (c) 2025 StellaOps. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - https://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. +StellaOps Logging + +Copyright (c) 2025 StellaOps. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + https://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/stellaops.logging.nuspec b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/stellaops.logging.nuspec similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/stellaops.logging.nuspec rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/stellaops.logging.nuspec index e8fe924b..75b1c2ec 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/stellaops.logging.nuspec +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.logging/2.5.1/stellaops.logging.nuspec @@ -1,12 +1,12 @@ -<?xml version="1.0" encoding="utf-8"?> -<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> - <metadata> - <id>StellaOps.Logging</id> - <version>2.5.1</version> - <authors>StellaOps</authors> - <description>Logging sample package for analyzer fixtures.</description> - <license type="expression">Apache-2.0</license> - <licenseUrl>https://stella-ops.example/licenses/logging</licenseUrl> - <projectUrl>https://stella-ops.example/projects/logging</projectUrl> - </metadata> -</package> +<?xml version="1.0" encoding="utf-8"?> +<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> + <metadata> + <id>StellaOps.Logging</id> + <version>2.5.1</version> + <authors>StellaOps</authors> + <description>Logging sample package for analyzer fixtures.</description> + <license type="expression">Apache-2.0</license> + <licenseUrl>https://stella-ops.example/licenses/logging</licenseUrl> + <projectUrl>https://stella-ops.example/projects/logging</projectUrl> + </metadata> +</package> diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/LICENSE.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/LICENSE.txt similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/LICENSE.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/LICENSE.txt index 0b18126c..5ebf8c52 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/LICENSE.txt +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/LICENSE.txt @@ -1,7 +1,7 @@ -StellaOps Toolkit License -========================= - -This sample license is provided for test fixtures only. - -Permission is granted to use, copy, modify, and distribute this fixture -for the purpose of automated testing. +StellaOps Toolkit License +========================= + +This sample license is provided for test fixtures only. + +Permission is granted to use, copy, modify, and distribute this fixture +for the purpose of automated testing. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec index 0889b15f..cac0966d 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec @@ -1,11 +1,11 @@ -<?xml version="1.0" encoding="utf-8"?> -<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> - <metadata> - <id>StellaOps.Toolkit</id> - <version>1.2.3</version> - <authors>StellaOps</authors> - <description>Toolkit sample package for analyzer fixtures.</description> - <license type="file">LICENSE.txt</license> - <licenseUrl>https://stella-ops.example/licenses/toolkit</licenseUrl> - </metadata> -</package> +<?xml version="1.0" encoding="utf-8"?> +<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> + <metadata> + <id>StellaOps.Toolkit</id> + <version>1.2.3</version> + <authors>StellaOps</authors> + <description>Toolkit sample package for analyzer fixtures.</description> + <license type="file">LICENSE.txt</license> + <licenseUrl>https://stella-ops.example/licenses/toolkit</licenseUrl> + </metadata> +</package> diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.deps.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.deps.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.deps.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.deps.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.runtimeconfig.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.runtimeconfig.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.runtimeconfig.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/MyApp.runtimeconfig.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/expected.json similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/expected.json index 47b363c4..e9eb1141 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/expected.json @@ -1,94 +1,94 @@ -[ - { - "analyzerId": "dotnet", - "componentKey": "purl::pkg:nuget/stellaops.runtime.selfcontained@2.1.0", - "purl": "pkg:nuget/stellaops.runtime.selfcontained@2.1.0", - "name": "StellaOps.Runtime.SelfContained", - "version": "2.1.0", - "type": "nuget", - "usedByEntrypoint": true, - "metadata": { - "deps.path[0]": "MyApp.deps.json", - "deps.rid[0]": "linux-x64", - "deps.rid[1]": "win-x64", - "deps.tfm[0]": ".NETCoreApp,Version=v10.0", - "license.expression[0]": "Apache-2.0", - "native[0].assetPath": "runtimes/linux-x64/native/libstellaopsnative.so", - "native[0].path": "runtimes/linux-x64/native/libstellaopsnative.so", - "native[0].rid[0]": "linux-x64", - "native[0].sha256": "6cf3d2a487d6a42fc7c3e2edbc452224e99a3656287a534f1164ee6ec9daadf0", - "native[0].tfm[0]": ".NETCoreApp,Version=v10.0", - "native[1].assetPath": "runtimes/win-x64/native/stellaopsnative.dll", - "native[1].rid[0]": "win-x64", - "native[1].tfm[0]": ".NETCoreApp,Version=v10.0", - "package.hashPath[0]": "stellaops.runtime.selfcontained.2.1.0.nupkg.sha512", - "package.id": "StellaOps.Runtime.SelfContained", - "package.id.normalized": "stellaops.runtime.selfcontained", - "package.path[0]": "stellaops.runtime.selfcontained/2.1.0", - "package.serviceable": "true", - "package.sha512[0]": "sha512-FAKE_RUNTIME_SHA==", - "package.version": "2.1.0", - "provenance": "manifest" - }, - "evidence": [ - { - "kind": "file", - "source": "deps.json", - "locator": "MyApp.deps.json", - "value": "StellaOps.Runtime.SelfContained/2.1.0" - }, - { - "kind": "file", - "source": "native", - "locator": "runtimes/linux-x64/native/libstellaopsnative.so", - "value": "runtimes/linux-x64/native/libstellaopsnative.so", - "sha256": "6cf3d2a487d6a42fc7c3e2edbc452224e99a3656287a534f1164ee6ec9daadf0" - } - ] - }, - { - "analyzerId": "dotnet", - "componentKey": "purl::pkg:nuget/stellaops.toolkit@1.2.3", - "purl": "pkg:nuget/stellaops.toolkit@1.2.3", - "name": "StellaOps.Toolkit", - "version": "1.2.3", - "type": "nuget", - "usedByEntrypoint": false, - "metadata": { - "assembly[0].assetPath": "lib/net10.0/StellaOps.Toolkit.dll", - "assembly[0].fileVersion": "1.2.3.0", - "assembly[0].rid[0]": "linux-x64", - "assembly[0].rid[1]": "win-x64", - "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", - "assembly[0].version": "1.2.3.0", - "deps.path[0]": "MyApp.deps.json", - "deps.rid[0]": "linux-x64", - "deps.rid[1]": "win-x64", - "deps.tfm[0]": ".NETCoreApp,Version=v10.0", - "license.file.sha256[0]": "f94d89a576c63e8ba6ee01760c52fa7861ba609491d7c6e6c01ead5ca66b6048", - "license.file[0]": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", - "package.hashPath[0]": "stellaops.toolkit.1.2.3.nupkg.sha512", - "package.id": "StellaOps.Toolkit", - "package.id.normalized": "stellaops.toolkit", - "package.path[0]": "stellaops.toolkit/1.2.3", - "package.serviceable": "true", - "package.sha512[0]": "sha512-FAKE_TOOLKIT_SHA==", - "package.version": "1.2.3", - "provenance": "manifest" - }, - "evidence": [ - { - "kind": "file", - "source": "deps.json", - "locator": "MyApp.deps.json", - "value": "StellaOps.Toolkit/1.2.3" - }, - { - "kind": "file", - "source": "license", - "locator": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", - "sha256": "f94d89a576c63e8ba6ee01760c52fa7861ba609491d7c6e6c01ead5ca66b6048" - } - ] - } +[ + { + "analyzerId": "dotnet", + "componentKey": "purl::pkg:nuget/stellaops.runtime.selfcontained@2.1.0", + "purl": "pkg:nuget/stellaops.runtime.selfcontained@2.1.0", + "name": "StellaOps.Runtime.SelfContained", + "version": "2.1.0", + "type": "nuget", + "usedByEntrypoint": true, + "metadata": { + "deps.path[0]": "MyApp.deps.json", + "deps.rid[0]": "linux-x64", + "deps.rid[1]": "win-x64", + "deps.tfm[0]": ".NETCoreApp,Version=v10.0", + "license.expression[0]": "Apache-2.0", + "native[0].assetPath": "runtimes/linux-x64/native/libstellaopsnative.so", + "native[0].path": "runtimes/linux-x64/native/libstellaopsnative.so", + "native[0].rid[0]": "linux-x64", + "native[0].sha256": "6cf3d2a487d6a42fc7c3e2edbc452224e99a3656287a534f1164ee6ec9daadf0", + "native[0].tfm[0]": ".NETCoreApp,Version=v10.0", + "native[1].assetPath": "runtimes/win-x64/native/stellaopsnative.dll", + "native[1].rid[0]": "win-x64", + "native[1].tfm[0]": ".NETCoreApp,Version=v10.0", + "package.hashPath[0]": "stellaops.runtime.selfcontained.2.1.0.nupkg.sha512", + "package.id": "StellaOps.Runtime.SelfContained", + "package.id.normalized": "stellaops.runtime.selfcontained", + "package.path[0]": "stellaops.runtime.selfcontained/2.1.0", + "package.serviceable": "true", + "package.sha512[0]": "sha512-FAKE_RUNTIME_SHA==", + "package.version": "2.1.0", + "provenance": "manifest" + }, + "evidence": [ + { + "kind": "file", + "source": "deps.json", + "locator": "MyApp.deps.json", + "value": "StellaOps.Runtime.SelfContained/2.1.0" + }, + { + "kind": "file", + "source": "native", + "locator": "runtimes/linux-x64/native/libstellaopsnative.so", + "value": "runtimes/linux-x64/native/libstellaopsnative.so", + "sha256": "6cf3d2a487d6a42fc7c3e2edbc452224e99a3656287a534f1164ee6ec9daadf0" + } + ] + }, + { + "analyzerId": "dotnet", + "componentKey": "purl::pkg:nuget/stellaops.toolkit@1.2.3", + "purl": "pkg:nuget/stellaops.toolkit@1.2.3", + "name": "StellaOps.Toolkit", + "version": "1.2.3", + "type": "nuget", + "usedByEntrypoint": false, + "metadata": { + "assembly[0].assetPath": "lib/net10.0/StellaOps.Toolkit.dll", + "assembly[0].fileVersion": "1.2.3.0", + "assembly[0].rid[0]": "linux-x64", + "assembly[0].rid[1]": "win-x64", + "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", + "assembly[0].version": "1.2.3.0", + "deps.path[0]": "MyApp.deps.json", + "deps.rid[0]": "linux-x64", + "deps.rid[1]": "win-x64", + "deps.tfm[0]": ".NETCoreApp,Version=v10.0", + "license.file.sha256[0]": "f94d89a576c63e8ba6ee01760c52fa7861ba609491d7c6e6c01ead5ca66b6048", + "license.file[0]": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", + "package.hashPath[0]": "stellaops.toolkit.1.2.3.nupkg.sha512", + "package.id": "StellaOps.Toolkit", + "package.id.normalized": "stellaops.toolkit", + "package.path[0]": "stellaops.toolkit/1.2.3", + "package.serviceable": "true", + "package.sha512[0]": "sha512-FAKE_TOOLKIT_SHA==", + "package.version": "1.2.3", + "provenance": "manifest" + }, + "evidence": [ + { + "kind": "file", + "source": "deps.json", + "locator": "MyApp.deps.json", + "value": "StellaOps.Toolkit/1.2.3" + }, + { + "kind": "file", + "source": "license", + "locator": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", + "sha256": "f94d89a576c63e8ba6ee01760c52fa7861ba609491d7c6e6c01ead5ca66b6048" + } + ] + } ] \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.runtime.selfcontained/2.1.0/stellaops.runtime.selfcontained.nuspec b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.runtime.selfcontained/2.1.0/stellaops.runtime.selfcontained.nuspec similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.runtime.selfcontained/2.1.0/stellaops.runtime.selfcontained.nuspec rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.runtime.selfcontained/2.1.0/stellaops.runtime.selfcontained.nuspec index 9bee72bb..1a158ed0 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.runtime.selfcontained/2.1.0/stellaops.runtime.selfcontained.nuspec +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.runtime.selfcontained/2.1.0/stellaops.runtime.selfcontained.nuspec @@ -1,11 +1,11 @@ -<?xml version="1.0" encoding="utf-8"?> -<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> - <metadata> - <id>StellaOps.Runtime.SelfContained</id> - <version>2.1.0</version> - <authors>StellaOps</authors> - <description>Runtime bundle used for self-contained analyzer fixtures.</description> - <license type="expression">Apache-2.0</license> - <licenseUrl>https://stella-ops.example/licenses/runtime</licenseUrl> - </metadata> -</package> +<?xml version="1.0" encoding="utf-8"?> +<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> + <metadata> + <id>StellaOps.Runtime.SelfContained</id> + <version>2.1.0</version> + <authors>StellaOps</authors> + <description>Runtime bundle used for self-contained analyzer fixtures.</description> + <license type="expression">Apache-2.0</license> + <licenseUrl>https://stella-ops.example/licenses/runtime</licenseUrl> + </metadata> +</package> diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/LICENSE.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/LICENSE.txt similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/LICENSE.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/LICENSE.txt index 53d386d5..b2a95459 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/LICENSE.txt +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/LICENSE.txt @@ -1,6 +1,6 @@ -StellaOps Toolkit License -========================= - -Reusable toolkit licensing terms for analyzer fixtures. - -This document is intentionally short for deterministic hashing tests. +StellaOps Toolkit License +========================= + +Reusable toolkit licensing terms for analyzer fixtures. + +This document is intentionally short for deterministic hashing tests. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec index 8e536b46..954feac9 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec @@ -1,11 +1,11 @@ -<?xml version="1.0" encoding="utf-8"?> -<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> - <metadata> - <id>StellaOps.Toolkit</id> - <version>1.2.3</version> - <authors>StellaOps</authors> - <description>Toolkit package for self-contained analyzer fixtures.</description> - <license type="file">LICENSE.txt</license> - <licenseUrl>https://stella-ops.example/licenses/toolkit</licenseUrl> - </metadata> -</package> +<?xml version="1.0" encoding="utf-8"?> +<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> + <metadata> + <id>StellaOps.Toolkit</id> + <version>1.2.3</version> + <authors>StellaOps</authors> + <description>Toolkit package for self-contained analyzer fixtures.</description> + <license type="file">LICENSE.txt</license> + <licenseUrl>https://stella-ops.example/licenses/toolkit</licenseUrl> + </metadata> +</package> diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/runtimes/linux-x64/native/libstellaopsnative.so b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/runtimes/linux-x64/native/libstellaopsnative.so similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/runtimes/linux-x64/native/libstellaopsnative.so rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/selfcontained/runtimes/linux-x64/native/libstellaopsnative.so diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.deps.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.deps.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.deps.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.deps.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.runtimeconfig.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.runtimeconfig.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.runtimeconfig.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/Signed.App.runtimeconfig.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/expected.json similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/expected.json index 6ac5526f..e84ed169 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/expected.json @@ -1,40 +1,40 @@ -[ - { - "analyzerId": "dotnet", - "componentKey": "purl::pkg:nuget/microsoft.extensions.logging@9.0.0", - "purl": "pkg:nuget/microsoft.extensions.logging@9.0.0", - "name": "Microsoft.Extensions.Logging", - "version": "9.0.0", - "type": "nuget", - "usedByEntrypoint": false, - "metadata": { - "assembly[0].assetPath": "lib/net9.0/Microsoft.Extensions.Logging.dll", - "assembly[0].fileVersion": "9.0.24.52809", - "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", - "assembly[0].version": "9.0.0.0", - "assembly[1].assetPath": "runtimes/linux-x64/lib/net9.0/Microsoft.Extensions.Logging.dll", - "assembly[1].rid[0]": "linux-x64", - "assembly[1].tfm[0]": ".NETCoreApp,Version=v10.0", - "deps.path[0]": "Signed.App.deps.json", - "deps.rid[0]": "linux-x64", - "deps.tfm[0]": ".NETCoreApp,Version=v10.0", - "license.expression[0]": "MIT", - "package.hashPath[0]": "microsoft.extensions.logging.9.0.0.nupkg.sha512", - "package.id": "Microsoft.Extensions.Logging", - "package.id.normalized": "microsoft.extensions.logging", - "package.path[0]": "microsoft.extensions.logging/9.0.0", - "package.serviceable": "true", - "package.sha512[0]": "sha512-FAKE_LOGGING_SHA==", - "package.version": "9.0.0", - "provenance": "manifest" - }, - "evidence": [ - { - "kind": "file", - "source": "deps.json", - "locator": "Signed.App.deps.json", - "value": "Microsoft.Extensions.Logging/9.0.0" - } - ] - } +[ + { + "analyzerId": "dotnet", + "componentKey": "purl::pkg:nuget/microsoft.extensions.logging@9.0.0", + "purl": "pkg:nuget/microsoft.extensions.logging@9.0.0", + "name": "Microsoft.Extensions.Logging", + "version": "9.0.0", + "type": "nuget", + "usedByEntrypoint": false, + "metadata": { + "assembly[0].assetPath": "lib/net9.0/Microsoft.Extensions.Logging.dll", + "assembly[0].fileVersion": "9.0.24.52809", + "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", + "assembly[0].version": "9.0.0.0", + "assembly[1].assetPath": "runtimes/linux-x64/lib/net9.0/Microsoft.Extensions.Logging.dll", + "assembly[1].rid[0]": "linux-x64", + "assembly[1].tfm[0]": ".NETCoreApp,Version=v10.0", + "deps.path[0]": "Signed.App.deps.json", + "deps.rid[0]": "linux-x64", + "deps.tfm[0]": ".NETCoreApp,Version=v10.0", + "license.expression[0]": "MIT", + "package.hashPath[0]": "microsoft.extensions.logging.9.0.0.nupkg.sha512", + "package.id": "Microsoft.Extensions.Logging", + "package.id.normalized": "microsoft.extensions.logging", + "package.path[0]": "microsoft.extensions.logging/9.0.0", + "package.serviceable": "true", + "package.sha512[0]": "sha512-FAKE_LOGGING_SHA==", + "package.version": "9.0.0", + "provenance": "manifest" + }, + "evidence": [ + { + "kind": "file", + "source": "deps.json", + "locator": "Signed.App.deps.json", + "value": "Microsoft.Extensions.Logging/9.0.0" + } + ] + } ] \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec index 66876d16..e1e64e6a 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/signed/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec @@ -1,11 +1,11 @@ -<?xml version="1.0" encoding="utf-8"?> -<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> - <metadata> - <id>Microsoft.Extensions.Logging</id> - <version>9.0.0</version> - <authors>Microsoft</authors> - <description>Signed logging package fixture.</description> - <license type="expression">MIT</license> - <licenseUrl>https://licenses.nuget.org/MIT</licenseUrl> - </metadata> -</package> +<?xml version="1.0" encoding="utf-8"?> +<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> + <metadata> + <id>Microsoft.Extensions.Logging</id> + <version>9.0.0</version> + <authors>Microsoft</authors> + <description>Signed logging package fixture.</description> + <license type="expression">MIT</license> + <licenseUrl>https://licenses.nuget.org/MIT</licenseUrl> + </metadata> +</package> diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.deps.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.deps.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.deps.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.deps.json index 23599343..a64a54f8 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.deps.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.deps.json @@ -1,73 +1,73 @@ -{ - "runtimeTarget": { - "name": ".NETCoreApp,Version=v10.0/linux-x64" - }, - "targets": { - ".NETCoreApp,Version=v10.0": { - "Sample.App/1.0.0": { - "dependencies": { - "StellaOps.Toolkit": "1.2.3", - "Microsoft.Extensions.Logging": "9.0.0" - } - }, - "StellaOps.Toolkit/1.2.3": { - "dependencies": { - "Microsoft.Extensions.Logging": "9.0.0" - }, - "runtime": { - "lib/net10.0/StellaOps.Toolkit.dll": { - "assemblyVersion": "1.2.3.0", - "fileVersion": "1.2.3.0" - } - } - }, - "Microsoft.Extensions.Logging/9.0.0": { - "runtime": { - "lib/net9.0/Microsoft.Extensions.Logging.dll": { - "assemblyVersion": "9.0.0.0", - "fileVersion": "9.0.24.52809" - } - } - } - }, - ".NETCoreApp,Version=v10.0/linux-x64": { - "StellaOps.Toolkit/1.2.3": { - "runtime": { - "runtimes/linux-x64/native/libstellaops.toolkit.so": {} - } - }, - "Microsoft.Extensions.Logging/9.0.0": { - "runtime": { - "runtimes/linux-x64/lib/net9.0/Microsoft.Extensions.Logging.dll": {} - } - } - }, - ".NETCoreApp,Version=v10.0/win-x86": { - "Microsoft.Extensions.Logging/9.0.0": { - "runtime": { - "runtimes/win-x86/lib/net9.0/Microsoft.Extensions.Logging.dll": {} - } - } - } - }, - "libraries": { - "Sample.App/1.0.0": { - "type": "project", - "serviceable": false - }, - "StellaOps.Toolkit/1.2.3": { - "type": "package", - "serviceable": true, - "sha512": "sha512-FAKE_TOOLKIT_SHA==", - "path": "stellaops.toolkit/1.2.3", - "hashPath": "stellaops.toolkit.1.2.3.nupkg.sha512" - }, - "Microsoft.Extensions.Logging/9.0.0": { - "type": "package", - "serviceable": true, - "sha512": "sha512-FAKE_LOGGING_SHA==", - "path": "microsoft.extensions.logging/9.0.0", - "hashPath": "microsoft.extensions.logging.9.0.0.nupkg.sha512" - } - } -} +{ + "runtimeTarget": { + "name": ".NETCoreApp,Version=v10.0/linux-x64" + }, + "targets": { + ".NETCoreApp,Version=v10.0": { + "Sample.App/1.0.0": { + "dependencies": { + "StellaOps.Toolkit": "1.2.3", + "Microsoft.Extensions.Logging": "9.0.0" + } + }, + "StellaOps.Toolkit/1.2.3": { + "dependencies": { + "Microsoft.Extensions.Logging": "9.0.0" + }, + "runtime": { + "lib/net10.0/StellaOps.Toolkit.dll": { + "assemblyVersion": "1.2.3.0", + "fileVersion": "1.2.3.0" + } + } + }, + "Microsoft.Extensions.Logging/9.0.0": { + "runtime": { + "lib/net9.0/Microsoft.Extensions.Logging.dll": { + "assemblyVersion": "9.0.0.0", + "fileVersion": "9.0.24.52809" + } + } + } + }, + ".NETCoreApp,Version=v10.0/linux-x64": { + "StellaOps.Toolkit/1.2.3": { + "runtime": { + "runtimes/linux-x64/native/libstellaops.toolkit.so": {} + } + }, + "Microsoft.Extensions.Logging/9.0.0": { + "runtime": { + "runtimes/linux-x64/lib/net9.0/Microsoft.Extensions.Logging.dll": {} + } + } + }, + ".NETCoreApp,Version=v10.0/win-x86": { + "Microsoft.Extensions.Logging/9.0.0": { + "runtime": { + "runtimes/win-x86/lib/net9.0/Microsoft.Extensions.Logging.dll": {} + } + } + } + }, + "libraries": { + "Sample.App/1.0.0": { + "type": "project", + "serviceable": false + }, + "StellaOps.Toolkit/1.2.3": { + "type": "package", + "serviceable": true, + "sha512": "sha512-FAKE_TOOLKIT_SHA==", + "path": "stellaops.toolkit/1.2.3", + "hashPath": "stellaops.toolkit.1.2.3.nupkg.sha512" + }, + "Microsoft.Extensions.Logging/9.0.0": { + "type": "package", + "serviceable": true, + "sha512": "sha512-FAKE_LOGGING_SHA==", + "path": "microsoft.extensions.logging/9.0.0", + "hashPath": "microsoft.extensions.logging.9.0.0.nupkg.sha512" + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.runtimeconfig.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.runtimeconfig.json similarity index 94% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.runtimeconfig.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.runtimeconfig.json index d750a40c..6b469911 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.runtimeconfig.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/Sample.App.runtimeconfig.json @@ -1,35 +1,35 @@ -{ - "runtimeOptions": { - "tfm": "net10.0", - "framework": { - "name": "Microsoft.NETCore.App", - "version": "10.0.0" - }, - "frameworks": [ - { - "name": "Microsoft.NETCore.App", - "version": "10.0.0" - }, - { - "name": "Microsoft.AspNetCore.App", - "version": "10.0.0" - } - ], - "runtimeGraph": { - "runtimes": { - "linux-x64": { - "fallbacks": [ - "linux", - "unix" - ] - }, - "win-x86": { - "fallbacks": [ - "win", - "any" - ] - } - } - } - } -} +{ + "runtimeOptions": { + "tfm": "net10.0", + "framework": { + "name": "Microsoft.NETCore.App", + "version": "10.0.0" + }, + "frameworks": [ + { + "name": "Microsoft.NETCore.App", + "version": "10.0.0" + }, + { + "name": "Microsoft.AspNetCore.App", + "version": "10.0.0" + } + ], + "runtimeGraph": { + "runtimes": { + "linux-x64": { + "fallbacks": [ + "linux", + "unix" + ] + }, + "win-x86": { + "fallbacks": [ + "win", + "any" + ] + } + } + } + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/expected.json similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/expected.json index 979f01f5..6c3fc270 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/expected.json @@ -1,87 +1,87 @@ -[ - { - "analyzerId": "dotnet", - "componentKey": "purl::pkg:nuget/microsoft.extensions.logging@9.0.0", - "purl": "pkg:nuget/microsoft.extensions.logging@9.0.0", - "name": "Microsoft.Extensions.Logging", - "version": "9.0.0", - "type": "nuget", - "usedByEntrypoint": false, - "metadata": { - "assembly[0].assetPath": "lib/net9.0/Microsoft.Extensions.Logging.dll", - "assembly[0].fileVersion": "9.0.24.52809", - "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", - "assembly[0].version": "9.0.0.0", - "assembly[1].assetPath": "runtimes/linux-x64/lib/net9.0/Microsoft.Extensions.Logging.dll", - "assembly[1].rid[0]": "linux-x64", - "assembly[1].tfm[0]": ".NETCoreApp,Version=v10.0", - "assembly[2].assetPath": "runtimes/win-x86/lib/net9.0/Microsoft.Extensions.Logging.dll", - "assembly[2].rid[0]": "win-x86", - "assembly[2].tfm[0]": ".NETCoreApp,Version=v10.0", - "deps.path[0]": "Sample.App.deps.json", - "deps.rid[0]": "linux-x64", - "deps.rid[1]": "win-x86", - "deps.tfm[0]": ".NETCoreApp,Version=v10.0", - "license.expression[0]": "MIT", - "package.hashPath[0]": "microsoft.extensions.logging.9.0.0.nupkg.sha512", - "package.id": "Microsoft.Extensions.Logging", - "package.id.normalized": "microsoft.extensions.logging", - "package.path[0]": "microsoft.extensions.logging/9.0.0", - "package.serviceable": "true", - "package.sha512[0]": "sha512-FAKE_LOGGING_SHA==", - "package.version": "9.0.0", - "provenance": "manifest" - }, - "evidence": [ - { - "kind": "file", - "source": "deps.json", - "locator": "Sample.App.deps.json", - "value": "Microsoft.Extensions.Logging/9.0.0" - } - ] - }, - { - "analyzerId": "dotnet", - "componentKey": "purl::pkg:nuget/stellaops.toolkit@1.2.3", - "purl": "pkg:nuget/stellaops.toolkit@1.2.3", - "name": "StellaOps.Toolkit", - "version": "1.2.3", - "type": "nuget", - "usedByEntrypoint": false, - "metadata": { - "assembly[0].assetPath": "lib/net10.0/StellaOps.Toolkit.dll", - "assembly[0].fileVersion": "1.2.3.0", - "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", - "assembly[0].version": "1.2.3.0", - "deps.dependency[0]": "microsoft.extensions.logging", - "deps.path[0]": "Sample.App.deps.json", - "deps.rid[0]": "linux-x64", - "deps.tfm[0]": ".NETCoreApp,Version=v10.0", - "license.file.sha256[0]": "604e182900b0ecb1ffb911c817bcbd148a31b8f55ad392a3b770be8005048c5c", - "license.file[0]": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", - "package.hashPath[0]": "stellaops.toolkit.1.2.3.nupkg.sha512", - "package.id": "StellaOps.Toolkit", - "package.id.normalized": "stellaops.toolkit", - "package.path[0]": "stellaops.toolkit/1.2.3", - "package.serviceable": "true", - "package.sha512[0]": "sha512-FAKE_TOOLKIT_SHA==", - "package.version": "1.2.3", - "provenance": "manifest" - }, - "evidence": [ - { - "kind": "file", - "source": "deps.json", - "locator": "Sample.App.deps.json", - "value": "StellaOps.Toolkit/1.2.3" - }, - { - "kind": "file", - "source": "license", - "locator": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", - "sha256": "604e182900b0ecb1ffb911c817bcbd148a31b8f55ad392a3b770be8005048c5c" - } - ] - } +[ + { + "analyzerId": "dotnet", + "componentKey": "purl::pkg:nuget/microsoft.extensions.logging@9.0.0", + "purl": "pkg:nuget/microsoft.extensions.logging@9.0.0", + "name": "Microsoft.Extensions.Logging", + "version": "9.0.0", + "type": "nuget", + "usedByEntrypoint": false, + "metadata": { + "assembly[0].assetPath": "lib/net9.0/Microsoft.Extensions.Logging.dll", + "assembly[0].fileVersion": "9.0.24.52809", + "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", + "assembly[0].version": "9.0.0.0", + "assembly[1].assetPath": "runtimes/linux-x64/lib/net9.0/Microsoft.Extensions.Logging.dll", + "assembly[1].rid[0]": "linux-x64", + "assembly[1].tfm[0]": ".NETCoreApp,Version=v10.0", + "assembly[2].assetPath": "runtimes/win-x86/lib/net9.0/Microsoft.Extensions.Logging.dll", + "assembly[2].rid[0]": "win-x86", + "assembly[2].tfm[0]": ".NETCoreApp,Version=v10.0", + "deps.path[0]": "Sample.App.deps.json", + "deps.rid[0]": "linux-x64", + "deps.rid[1]": "win-x86", + "deps.tfm[0]": ".NETCoreApp,Version=v10.0", + "license.expression[0]": "MIT", + "package.hashPath[0]": "microsoft.extensions.logging.9.0.0.nupkg.sha512", + "package.id": "Microsoft.Extensions.Logging", + "package.id.normalized": "microsoft.extensions.logging", + "package.path[0]": "microsoft.extensions.logging/9.0.0", + "package.serviceable": "true", + "package.sha512[0]": "sha512-FAKE_LOGGING_SHA==", + "package.version": "9.0.0", + "provenance": "manifest" + }, + "evidence": [ + { + "kind": "file", + "source": "deps.json", + "locator": "Sample.App.deps.json", + "value": "Microsoft.Extensions.Logging/9.0.0" + } + ] + }, + { + "analyzerId": "dotnet", + "componentKey": "purl::pkg:nuget/stellaops.toolkit@1.2.3", + "purl": "pkg:nuget/stellaops.toolkit@1.2.3", + "name": "StellaOps.Toolkit", + "version": "1.2.3", + "type": "nuget", + "usedByEntrypoint": false, + "metadata": { + "assembly[0].assetPath": "lib/net10.0/StellaOps.Toolkit.dll", + "assembly[0].fileVersion": "1.2.3.0", + "assembly[0].tfm[0]": ".NETCoreApp,Version=v10.0", + "assembly[0].version": "1.2.3.0", + "deps.dependency[0]": "microsoft.extensions.logging", + "deps.path[0]": "Sample.App.deps.json", + "deps.rid[0]": "linux-x64", + "deps.tfm[0]": ".NETCoreApp,Version=v10.0", + "license.file.sha256[0]": "604e182900b0ecb1ffb911c817bcbd148a31b8f55ad392a3b770be8005048c5c", + "license.file[0]": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", + "package.hashPath[0]": "stellaops.toolkit.1.2.3.nupkg.sha512", + "package.id": "StellaOps.Toolkit", + "package.id.normalized": "stellaops.toolkit", + "package.path[0]": "stellaops.toolkit/1.2.3", + "package.serviceable": "true", + "package.sha512[0]": "sha512-FAKE_TOOLKIT_SHA==", + "package.version": "1.2.3", + "provenance": "manifest" + }, + "evidence": [ + { + "kind": "file", + "source": "deps.json", + "locator": "Sample.App.deps.json", + "value": "StellaOps.Toolkit/1.2.3" + }, + { + "kind": "file", + "source": "license", + "locator": "packages/stellaops.toolkit/1.2.3/LICENSE.txt", + "sha256": "604e182900b0ecb1ffb911c817bcbd148a31b8f55ad392a3b770be8005048c5c" + } + ] + } ] \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec index 2aa3ca72..b5241510 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/microsoft.extensions.logging/9.0.0/microsoft.extensions.logging.nuspec @@ -1,11 +1,11 @@ -<?xml version="1.0" encoding="utf-8"?> -<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> - <metadata> - <id>Microsoft.Extensions.Logging</id> - <version>9.0.0</version> - <authors>Microsoft</authors> - <description>Logging abstractions for StellaOps test fixture.</description> - <license type="expression">MIT</license> - <licenseUrl>https://licenses.nuget.org/MIT</licenseUrl> - </metadata> -</package> +<?xml version="1.0" encoding="utf-8"?> +<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> + <metadata> + <id>Microsoft.Extensions.Logging</id> + <version>9.0.0</version> + <authors>Microsoft</authors> + <description>Logging abstractions for StellaOps test fixture.</description> + <license type="expression">MIT</license> + <licenseUrl>https://licenses.nuget.org/MIT</licenseUrl> + </metadata> +</package> diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/LICENSE.txt b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/LICENSE.txt similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/LICENSE.txt rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/LICENSE.txt index 0b18126c..5ebf8c52 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/LICENSE.txt +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/LICENSE.txt @@ -1,7 +1,7 @@ -StellaOps Toolkit License -========================= - -This sample license is provided for test fixtures only. - -Permission is granted to use, copy, modify, and distribute this fixture -for the purpose of automated testing. +StellaOps Toolkit License +========================= + +This sample license is provided for test fixtures only. + +Permission is granted to use, copy, modify, and distribute this fixture +for the purpose of automated testing. diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec index 0889b15f..cac0966d 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/multi/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/dotnet/simple/packages/stellaops.toolkit/1.2.3/stellaops.toolkit.nuspec @@ -1,11 +1,11 @@ -<?xml version="1.0" encoding="utf-8"?> -<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> - <metadata> - <id>StellaOps.Toolkit</id> - <version>1.2.3</version> - <authors>StellaOps</authors> - <description>Toolkit sample package for analyzer fixtures.</description> - <license type="file">LICENSE.txt</license> - <licenseUrl>https://stella-ops.example/licenses/toolkit</licenseUrl> - </metadata> -</package> +<?xml version="1.0" encoding="utf-8"?> +<package xmlns="http://schemas.microsoft.com/packaging/2013/05/nuspec.xsd"> + <metadata> + <id>StellaOps.Toolkit</id> + <version>1.2.3</version> + <authors>StellaOps</authors> + <description>Toolkit sample package for analyzer fixtures.</description> + <license type="file">LICENSE.txt</license> + <licenseUrl>https://stella-ops.example/licenses/toolkit</licenseUrl> + </metadata> +</package> diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/Cargo.lock b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/Cargo.lock similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/Cargo.lock rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/Cargo.lock diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/expected.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/expected.json similarity index 96% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/expected.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/expected.json index ca23d06f..fb2c2360 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/expected.json +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/expected.json @@ -1,62 +1,62 @@ -[ - { - "analyzerId": "rust", - "componentKey": "purl::pkg:cargo/my_app@0.1.0", - "purl": "pkg:cargo/my_app@0.1.0", - "name": "my_app", - "version": "0.1.0", - "type": "cargo", - "usedByEntrypoint": false, - "metadata": { - "cargo.lock.path": "Cargo.lock", - "fingerprint.profile": "debug", - "fingerprint.targetKind": "bin", - "source": "registry\u002Bhttps://github.com/rust-lang/crates.io-index" - }, - "evidence": [ - { - "kind": "file", - "source": "cargo.fingerprint", - "locator": "target/debug/.fingerprint/my_app-1234567890abcdef/bin-my_app-1234567890abcdef.json", - "value": "bin" - }, - { - "kind": "file", - "source": "cargo.lock", - "locator": "Cargo.lock", - "value": "my_app 0.1.0" - } - ] - }, - { - "analyzerId": "rust", - "componentKey": "purl::pkg:cargo/serde@1.0.188", - "purl": "pkg:cargo/serde@1.0.188", - "name": "serde", - "version": "1.0.188", - "type": "cargo", - "usedByEntrypoint": false, - "metadata": { - "cargo.lock.path": "Cargo.lock", - "checksum": "abc123", - "fingerprint.profile": "release", - "fingerprint.targetKind": "lib", - "source": "registry\u002Bhttps://github.com/rust-lang/crates.io-index" - }, - "evidence": [ - { - "kind": "file", - "source": "cargo.fingerprint", - "locator": "target/debug/.fingerprint/serde-abcdef1234567890/libserde-abcdef1234567890.json", - "value": "lib" - }, - { - "kind": "file", - "source": "cargo.lock", - "locator": "Cargo.lock", - "value": "serde 1.0.188", - "sha256": "abc123" - } - ] - } +[ + { + "analyzerId": "rust", + "componentKey": "purl::pkg:cargo/my_app@0.1.0", + "purl": "pkg:cargo/my_app@0.1.0", + "name": "my_app", + "version": "0.1.0", + "type": "cargo", + "usedByEntrypoint": false, + "metadata": { + "cargo.lock.path": "Cargo.lock", + "fingerprint.profile": "debug", + "fingerprint.targetKind": "bin", + "source": "registry\u002Bhttps://github.com/rust-lang/crates.io-index" + }, + "evidence": [ + { + "kind": "file", + "source": "cargo.fingerprint", + "locator": "target/debug/.fingerprint/my_app-1234567890abcdef/bin-my_app-1234567890abcdef.json", + "value": "bin" + }, + { + "kind": "file", + "source": "cargo.lock", + "locator": "Cargo.lock", + "value": "my_app 0.1.0" + } + ] + }, + { + "analyzerId": "rust", + "componentKey": "purl::pkg:cargo/serde@1.0.188", + "purl": "pkg:cargo/serde@1.0.188", + "name": "serde", + "version": "1.0.188", + "type": "cargo", + "usedByEntrypoint": false, + "metadata": { + "cargo.lock.path": "Cargo.lock", + "checksum": "abc123", + "fingerprint.profile": "release", + "fingerprint.targetKind": "lib", + "source": "registry\u002Bhttps://github.com/rust-lang/crates.io-index" + }, + "evidence": [ + { + "kind": "file", + "source": "cargo.fingerprint", + "locator": "target/debug/.fingerprint/serde-abcdef1234567890/libserde-abcdef1234567890.json", + "value": "lib" + }, + { + "kind": "file", + "source": "cargo.lock", + "locator": "Cargo.lock", + "value": "serde 1.0.188", + "sha256": "abc123" + } + ] + } ] \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/my_app-1234567890abcdef/bin-my_app-1234567890abcdef.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/my_app-1234567890abcdef/bin-my_app-1234567890abcdef.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/my_app-1234567890abcdef/bin-my_app-1234567890abcdef.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/my_app-1234567890abcdef/bin-my_app-1234567890abcdef.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/serde-abcdef1234567890/libserde-abcdef1234567890.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/serde-abcdef1234567890/libserde-abcdef1234567890.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/serde-abcdef1234567890/libserde-abcdef1234567890.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Fixtures/lang/rust/simple/target/debug/.fingerprint/serde-abcdef1234567890/libserde-abcdef1234567890.json diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Harness/LanguageAnalyzerTestHarness.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Harness/LanguageAnalyzerTestHarness.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Harness/LanguageAnalyzerTestHarness.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Harness/LanguageAnalyzerTestHarness.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustLanguageAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustLanguageAnalyzerTests.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustLanguageAnalyzerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/Rust/RustLanguageAnalyzerTests.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj similarity index 72% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj index 593ea4ab..2a12c489 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/StellaOps.Scanner.Analyzers.Lang.Tests.csproj @@ -1,4 +1,5 @@ -<Project Sdk="Microsoft.NET.Sdk"> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> <LangVersion>preview</LangVersion> @@ -30,10 +31,10 @@ </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.DotNet\StellaOps.Scanner.Analyzers.Lang.DotNet.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.Lang.Rust\StellaOps.Scanner.Analyzers.Lang.Rust.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang/StellaOps.Scanner.Analyzers.Lang.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.DotNet/StellaOps.Scanner.Analyzers.Lang.DotNet.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.Lang.Rust/StellaOps.Scanner.Analyzers.Lang.Rust.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" /> </ItemGroup> <ItemGroup> @@ -43,4 +44,4 @@ <ItemGroup> <None Include="Fixtures\**\*" CopyToOutputDirectory="PreserveNewest" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaClassFileFactory.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaClassFileFactory.cs similarity index 97% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaClassFileFactory.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaClassFileFactory.cs index c134a6c4..2f8fc10f 100644 --- a/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaClassFileFactory.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaClassFileFactory.cs @@ -1,202 +1,202 @@ -using System.Buffers.Binary; -using System.Text; - -namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; - -public static class JavaClassFileFactory -{ - public static byte[] CreateClassForNameInvoker(string internalClassName, string targetClassName) - { - using var buffer = new MemoryStream(); - using var writer = new BigEndianWriter(buffer); - - WriteClassFileHeader(writer, constantPoolCount: 16); - - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1 - writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3 - writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("invoke"); // #5 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(targetClassName); // #8 - writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Class"); // #10 - writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("forName"); // #12 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)Ljava/lang/Class;"); // #13 - writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14 - writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15 - - writer.WriteUInt16(0x0001); // public - writer.WriteUInt16(2); // this class - writer.WriteUInt16(4); // super class - - writer.WriteUInt16(0); // interfaces - writer.WriteUInt16(0); // fields - writer.WriteUInt16(1); // methods - - WriteInvokeMethod(writer, methodNameIndex: 5, descriptorIndex: 6, ldcIndex: 9, methodRefIndex: 15); - - writer.WriteUInt16(0); // class attributes - - return buffer.ToArray(); - } - - public static byte[] CreateTcclChecker(string internalClassName) - { - using var buffer = new MemoryStream(); - using var writer = new BigEndianWriter(buffer); - - WriteClassFileHeader(writer, constantPoolCount: 18); - - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1 - writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3 - writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("check"); // #5 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Thread"); // #8 - writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(8); // #9 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("currentThread"); // #10 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/Thread;"); // #11 - writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(10); writer.WriteUInt16(11); // #12 - writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(9); writer.WriteUInt16(12); // #13 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("getContextClassLoader"); // #14 - writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/ClassLoader;"); // #15 - writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(14); writer.WriteUInt16(15); // #16 - writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(9); writer.WriteUInt16(16); // #17 - - writer.WriteUInt16(0x0001); // public - writer.WriteUInt16(2); // this - writer.WriteUInt16(4); // super - - writer.WriteUInt16(0); // interfaces - writer.WriteUInt16(0); // fields - writer.WriteUInt16(1); // methods - - WriteTcclMethod(writer, methodNameIndex: 5, descriptorIndex: 6, currentThreadMethodRefIndex: 13, getContextMethodRefIndex: 17); - - writer.WriteUInt16(0); // class attributes - - return buffer.ToArray(); - } - - private static void WriteClassFileHeader(BigEndianWriter writer, ushort constantPoolCount) - { - writer.WriteUInt32(0xCAFEBABE); - writer.WriteUInt16(0); - writer.WriteUInt16(52); - writer.WriteUInt16(constantPoolCount); - } - - private static void WriteInvokeMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort ldcIndex, ushort methodRefIndex) - { - writer.WriteUInt16(0x0009); // public static - writer.WriteUInt16(methodNameIndex); - writer.WriteUInt16(descriptorIndex); - writer.WriteUInt16(1); // attributes_count - - writer.WriteUInt16(7); // "Code" - using var codeBuffer = new MemoryStream(); - using (var codeWriter = new BigEndianWriter(codeBuffer)) - { - codeWriter.WriteUInt16(1); // max_stack - codeWriter.WriteUInt16(0); // max_locals - codeWriter.WriteUInt32(6); // code_length - codeWriter.WriteByte(0x12); - codeWriter.WriteByte((byte)ldcIndex); - codeWriter.WriteByte(0xB8); - codeWriter.WriteUInt16(methodRefIndex); - codeWriter.WriteByte(0xB1); - codeWriter.WriteUInt16(0); // exception table length - codeWriter.WriteUInt16(0); // code attributes - } - - var codeBytes = codeBuffer.ToArray(); - writer.WriteUInt32((uint)codeBytes.Length); - writer.WriteBytes(codeBytes); - } - - private static void WriteTcclMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort currentThreadMethodRefIndex, ushort getContextMethodRefIndex) - { - writer.WriteUInt16(0x0009); - writer.WriteUInt16(methodNameIndex); - writer.WriteUInt16(descriptorIndex); - writer.WriteUInt16(1); - - writer.WriteUInt16(7); - using var codeBuffer = new MemoryStream(); - using (var codeWriter = new BigEndianWriter(codeBuffer)) - { - codeWriter.WriteUInt16(2); - codeWriter.WriteUInt16(0); - codeWriter.WriteUInt32(8); - codeWriter.WriteByte(0xB8); - codeWriter.WriteUInt16(currentThreadMethodRefIndex); - codeWriter.WriteByte(0xB6); - codeWriter.WriteUInt16(getContextMethodRefIndex); - codeWriter.WriteByte(0x57); - codeWriter.WriteByte(0xB1); - codeWriter.WriteUInt16(0); - codeWriter.WriteUInt16(0); - } - - var codeBytes = codeBuffer.ToArray(); - writer.WriteUInt32((uint)codeBytes.Length); - writer.WriteBytes(codeBytes); - } - - private sealed class BigEndianWriter : IDisposable - { - private readonly BinaryWriter _writer; - - public BigEndianWriter(Stream stream) - { - _writer = new BinaryWriter(stream, Encoding.UTF8, leaveOpen: true); - } - - public void WriteByte(byte value) => _writer.Write(value); - - public void WriteBytes(byte[] data) => _writer.Write(data); - - public void WriteUInt16(ushort value) - { - Span<byte> buffer = stackalloc byte[2]; - BinaryPrimitives.WriteUInt16BigEndian(buffer, value); - _writer.Write(buffer); - } - - public void WriteUInt32(uint value) - { - Span<byte> buffer = stackalloc byte[4]; - BinaryPrimitives.WriteUInt32BigEndian(buffer, value); - _writer.Write(buffer); - } - - public void WriteUtf8(string value) - { - var bytes = Encoding.UTF8.GetBytes(value); - WriteUInt16((ushort)bytes.Length); - _writer.Write(bytes); - } - - public void Dispose() => _writer.Dispose(); - } - - private enum ConstantTag : byte - { - Utf8 = 1, - Integer = 3, - Float = 4, - Long = 5, - Double = 6, - Class = 7, - String = 8, - Fieldref = 9, - Methodref = 10, - InterfaceMethodref = 11, - NameAndType = 12, - } -} +using System.Buffers.Binary; +using System.Text; + +namespace StellaOps.Scanner.Analyzers.Lang.Tests.TestUtilities; + +public static class JavaClassFileFactory +{ + public static byte[] CreateClassForNameInvoker(string internalClassName, string targetClassName) + { + using var buffer = new MemoryStream(); + using var writer = new BigEndianWriter(buffer); + + WriteClassFileHeader(writer, constantPoolCount: 16); + + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1 + writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3 + writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("invoke"); // #5 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(targetClassName); // #8 + writer.WriteByte((byte)ConstantTag.String); writer.WriteUInt16(8); // #9 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Class"); // #10 + writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(10); // #11 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("forName"); // #12 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("(Ljava/lang/String;)Ljava/lang/Class;"); // #13 + writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(12); writer.WriteUInt16(13); // #14 + writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(11); writer.WriteUInt16(14); // #15 + + writer.WriteUInt16(0x0001); // public + writer.WriteUInt16(2); // this class + writer.WriteUInt16(4); // super class + + writer.WriteUInt16(0); // interfaces + writer.WriteUInt16(0); // fields + writer.WriteUInt16(1); // methods + + WriteInvokeMethod(writer, methodNameIndex: 5, descriptorIndex: 6, ldcIndex: 9, methodRefIndex: 15); + + writer.WriteUInt16(0); // class attributes + + return buffer.ToArray(); + } + + public static byte[] CreateTcclChecker(string internalClassName) + { + using var buffer = new MemoryStream(); + using var writer = new BigEndianWriter(buffer); + + WriteClassFileHeader(writer, constantPoolCount: 18); + + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8(internalClassName); // #1 + writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(1); // #2 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Object"); // #3 + writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(3); // #4 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("check"); // #5 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()V"); // #6 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("Code"); // #7 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("java/lang/Thread"); // #8 + writer.WriteByte((byte)ConstantTag.Class); writer.WriteUInt16(8); // #9 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("currentThread"); // #10 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/Thread;"); // #11 + writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(10); writer.WriteUInt16(11); // #12 + writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(9); writer.WriteUInt16(12); // #13 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("getContextClassLoader"); // #14 + writer.WriteByte((byte)ConstantTag.Utf8); writer.WriteUtf8("()Ljava/lang/ClassLoader;"); // #15 + writer.WriteByte((byte)ConstantTag.NameAndType); writer.WriteUInt16(14); writer.WriteUInt16(15); // #16 + writer.WriteByte((byte)ConstantTag.Methodref); writer.WriteUInt16(9); writer.WriteUInt16(16); // #17 + + writer.WriteUInt16(0x0001); // public + writer.WriteUInt16(2); // this + writer.WriteUInt16(4); // super + + writer.WriteUInt16(0); // interfaces + writer.WriteUInt16(0); // fields + writer.WriteUInt16(1); // methods + + WriteTcclMethod(writer, methodNameIndex: 5, descriptorIndex: 6, currentThreadMethodRefIndex: 13, getContextMethodRefIndex: 17); + + writer.WriteUInt16(0); // class attributes + + return buffer.ToArray(); + } + + private static void WriteClassFileHeader(BigEndianWriter writer, ushort constantPoolCount) + { + writer.WriteUInt32(0xCAFEBABE); + writer.WriteUInt16(0); + writer.WriteUInt16(52); + writer.WriteUInt16(constantPoolCount); + } + + private static void WriteInvokeMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort ldcIndex, ushort methodRefIndex) + { + writer.WriteUInt16(0x0009); // public static + writer.WriteUInt16(methodNameIndex); + writer.WriteUInt16(descriptorIndex); + writer.WriteUInt16(1); // attributes_count + + writer.WriteUInt16(7); // "Code" + using var codeBuffer = new MemoryStream(); + using (var codeWriter = new BigEndianWriter(codeBuffer)) + { + codeWriter.WriteUInt16(1); // max_stack + codeWriter.WriteUInt16(0); // max_locals + codeWriter.WriteUInt32(6); // code_length + codeWriter.WriteByte(0x12); + codeWriter.WriteByte((byte)ldcIndex); + codeWriter.WriteByte(0xB8); + codeWriter.WriteUInt16(methodRefIndex); + codeWriter.WriteByte(0xB1); + codeWriter.WriteUInt16(0); // exception table length + codeWriter.WriteUInt16(0); // code attributes + } + + var codeBytes = codeBuffer.ToArray(); + writer.WriteUInt32((uint)codeBytes.Length); + writer.WriteBytes(codeBytes); + } + + private static void WriteTcclMethod(BigEndianWriter writer, ushort methodNameIndex, ushort descriptorIndex, ushort currentThreadMethodRefIndex, ushort getContextMethodRefIndex) + { + writer.WriteUInt16(0x0009); + writer.WriteUInt16(methodNameIndex); + writer.WriteUInt16(descriptorIndex); + writer.WriteUInt16(1); + + writer.WriteUInt16(7); + using var codeBuffer = new MemoryStream(); + using (var codeWriter = new BigEndianWriter(codeBuffer)) + { + codeWriter.WriteUInt16(2); + codeWriter.WriteUInt16(0); + codeWriter.WriteUInt32(8); + codeWriter.WriteByte(0xB8); + codeWriter.WriteUInt16(currentThreadMethodRefIndex); + codeWriter.WriteByte(0xB6); + codeWriter.WriteUInt16(getContextMethodRefIndex); + codeWriter.WriteByte(0x57); + codeWriter.WriteByte(0xB1); + codeWriter.WriteUInt16(0); + codeWriter.WriteUInt16(0); + } + + var codeBytes = codeBuffer.ToArray(); + writer.WriteUInt32((uint)codeBytes.Length); + writer.WriteBytes(codeBytes); + } + + private sealed class BigEndianWriter : IDisposable + { + private readonly BinaryWriter _writer; + + public BigEndianWriter(Stream stream) + { + _writer = new BinaryWriter(stream, Encoding.UTF8, leaveOpen: true); + } + + public void WriteByte(byte value) => _writer.Write(value); + + public void WriteBytes(byte[] data) => _writer.Write(data); + + public void WriteUInt16(ushort value) + { + Span<byte> buffer = stackalloc byte[2]; + BinaryPrimitives.WriteUInt16BigEndian(buffer, value); + _writer.Write(buffer); + } + + public void WriteUInt32(uint value) + { + Span<byte> buffer = stackalloc byte[4]; + BinaryPrimitives.WriteUInt32BigEndian(buffer, value); + _writer.Write(buffer); + } + + public void WriteUtf8(string value) + { + var bytes = Encoding.UTF8.GetBytes(value); + WriteUInt16((ushort)bytes.Length); + _writer.Write(bytes); + } + + public void Dispose() => _writer.Dispose(); + } + + private enum ConstantTag : byte + { + Utf8 = 1, + Integer = 3, + Float = 4, + Long = 5, + Double = 6, + Class = 7, + String = 8, + Fieldref = 9, + Methodref = 10, + InterfaceMethodref = 11, + NameAndType = 12, + } +} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaFixtureBuilder.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaFixtureBuilder.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaFixtureBuilder.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/JavaFixtureBuilder.cs diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/TestPaths.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/TestPaths.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/TestPaths.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/TestUtilities/TestPaths.cs diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/xunit.runner.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/xunit.runner.json new file mode 100644 index 00000000..249d815c --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Tests/xunit.runner.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/apk/lib/apk/db/installed b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/apk/lib/apk/db/installed similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/apk/lib/apk/db/installed rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/apk/lib/apk/db/installed diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.conffiles b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.conffiles similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.conffiles rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.conffiles diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.list b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.list similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.list rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.list diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.md5sums b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.md5sums similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.md5sums rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/info/bash.md5sums diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/status b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/status similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/status rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/dpkg/var/lib/dpkg/status diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/apk.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/apk.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/apk.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/apk.json diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/dpkg.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/dpkg.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/dpkg.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/dpkg.json diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/rpm.json b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/rpm.json similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/rpm.json rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Fixtures/goldens/rpm.json diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/Mapping/OsComponentMapperTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Mapping/OsComponentMapperTests.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/Mapping/OsComponentMapperTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/Mapping/OsComponentMapperTests.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/OsAnalyzerDeterminismTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/OsAnalyzerDeterminismTests.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/OsAnalyzerDeterminismTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/OsAnalyzerDeterminismTests.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj similarity index 62% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj index 389cfd5f..0c337de5 100644 --- a/src/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/StellaOps.Scanner.Analyzers.OS.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -17,12 +18,12 @@ <PackageReference Include="coverlet.collector" Version="6.0.4" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS.Apk\StellaOps.Scanner.Analyzers.OS.Apk.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS.Dpkg\StellaOps.Scanner.Analyzers.OS.Dpkg.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Analyzers.OS.Rpm\StellaOps.Scanner.Analyzers.OS.Rpm.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.OS/StellaOps.Scanner.Analyzers.OS.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.OS.Apk/StellaOps.Scanner.Analyzers.OS.Apk.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.OS.Dpkg/StellaOps.Scanner.Analyzers.OS.Dpkg.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Analyzers.OS.Rpm/StellaOps.Scanner.Analyzers.OS.Rpm.csproj" /> </ItemGroup> <ItemGroup> <None Include="Fixtures\**\*" CopyToOutputDirectory="PreserveNewest" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/FixtureManager.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/FixtureManager.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/FixtureManager.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/FixtureManager.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/GoldenAssert.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/GoldenAssert.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/GoldenAssert.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/GoldenAssert.cs diff --git a/src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/SnapshotSerializer.cs b/src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/SnapshotSerializer.cs similarity index 100% rename from src/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/SnapshotSerializer.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.OS.Tests/TestUtilities/SnapshotSerializer.cs diff --git a/src/StellaOps.Scanner.Cache.Tests/LayerCacheRoundTripTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Cache.Tests/LayerCacheRoundTripTests.cs similarity index 100% rename from src/StellaOps.Scanner.Cache.Tests/LayerCacheRoundTripTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Cache.Tests/LayerCacheRoundTripTests.cs diff --git a/src/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj similarity index 84% rename from src/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj index 525d94aa..50feae0a 100644 --- a/src/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Cache.Tests/StellaOps.Scanner.Cache.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -20,6 +21,6 @@ <Using Remove="StellaOps.Concelier.Testing" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Cache/StellaOps.Scanner.Cache.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Core.Tests/Contracts/ComponentGraphBuilderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Contracts/ComponentGraphBuilderTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Contracts/ComponentGraphBuilderTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Contracts/ComponentGraphBuilderTests.cs diff --git a/src/StellaOps.Scanner.Core.Tests/Contracts/ComponentModelsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Contracts/ComponentModelsTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Contracts/ComponentModelsTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Contracts/ComponentModelsTests.cs diff --git a/src/StellaOps.Scanner.Core.Tests/Contracts/ScanJobTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Contracts/ScanJobTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Contracts/ScanJobTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Contracts/ScanJobTests.cs diff --git a/src/StellaOps.Scanner.Core.Tests/Contracts/ScannerCoreContractsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Contracts/ScannerCoreContractsTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Contracts/ScannerCoreContractsTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Contracts/ScannerCoreContractsTests.cs diff --git a/src/StellaOps.Scanner.Core.Tests/Fixtures/scan-job.json b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Fixtures/scan-job.json similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Fixtures/scan-job.json rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Fixtures/scan-job.json diff --git a/src/StellaOps.Scanner.Core.Tests/Fixtures/scan-progress-event.json b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Fixtures/scan-progress-event.json similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Fixtures/scan-progress-event.json rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Fixtures/scan-progress-event.json diff --git a/src/StellaOps.Scanner.Core.Tests/Fixtures/scanner-error.json b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Fixtures/scanner-error.json similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Fixtures/scanner-error.json rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Fixtures/scanner-error.json diff --git a/src/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsPerformanceTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsPerformanceTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsPerformanceTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsPerformanceTests.cs diff --git a/src/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Observability/ScannerLogExtensionsTests.cs diff --git a/src/StellaOps.Scanner.Core.Tests/Security/AuthorityTokenSourceTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Security/AuthorityTokenSourceTests.cs similarity index 97% rename from src/StellaOps.Scanner.Core.Tests/Security/AuthorityTokenSourceTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Security/AuthorityTokenSourceTests.cs index 71cfadc3..927f229b 100644 --- a/src/StellaOps.Scanner.Core.Tests/Security/AuthorityTokenSourceTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Security/AuthorityTokenSourceTests.cs @@ -1,95 +1,95 @@ -using System.Collections.Generic; -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Time.Testing; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Auth.Client; -using StellaOps.Scanner.Core.Security; -using Xunit; - -namespace StellaOps.Scanner.Core.Tests.Security; - -public sealed class AuthorityTokenSourceTests -{ - [Fact] - public async Task GetAsync_ReusesCachedTokenUntilRefreshSkew() - { - var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero)); - var client = new FakeTokenClient(timeProvider); - var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance); - - var token1 = await source.GetAsync("scanner", new[] { "scanner.read" }); - Assert.Equal(1, client.RequestCount); - Assert.Null(client.LastAdditionalParameters); - - var token2 = await source.GetAsync("scanner", new[] { "scanner.read" }); - Assert.Equal(1, client.RequestCount); - Assert.Equal(token1.AccessToken, token2.AccessToken); - - timeProvider.Advance(TimeSpan.FromMinutes(3)); - var token3 = await source.GetAsync("scanner", new[] { "scanner.read" }); - Assert.Equal(2, client.RequestCount); - Assert.NotEqual(token1.AccessToken, token3.AccessToken); - } - - [Fact] - public async Task InvalidateAsync_RemovesCachedToken() - { - var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero)); - var client = new FakeTokenClient(timeProvider); - var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance); - - _ = await source.GetAsync("scanner", new[] { "scanner.read" }); - Assert.Equal(1, client.RequestCount); - Assert.Null(client.LastAdditionalParameters); - - await source.InvalidateAsync("scanner", new[] { "scanner.read" }); - _ = await source.GetAsync("scanner", new[] { "scanner.read" }); - - Assert.Equal(2, client.RequestCount); - } - - private sealed class FakeTokenClient : IStellaOpsTokenClient - { - private readonly FakeTimeProvider timeProvider; - private int counter; - - public FakeTokenClient(FakeTimeProvider timeProvider) - { - this.timeProvider = timeProvider; - } - - public int RequestCount => counter; - - public IReadOnlyDictionary<string, string>? LastAdditionalParameters { get; private set; } - - public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) - { - LastAdditionalParameters = additionalParameters; - var access = $"token-{Interlocked.Increment(ref counter)}"; - var expires = timeProvider.GetUtcNow().AddMinutes(2); - var scopes = scope is null - ? Array.Empty<string>() - : scope.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - - return Task.FromResult(new StellaOpsTokenResult(access, "Bearer", expires, scopes)); - } - - public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null); - - public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - - public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - } -} +using System.Collections.Generic; +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Time.Testing; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Auth.Client; +using StellaOps.Scanner.Core.Security; +using Xunit; + +namespace StellaOps.Scanner.Core.Tests.Security; + +public sealed class AuthorityTokenSourceTests +{ + [Fact] + public async Task GetAsync_ReusesCachedTokenUntilRefreshSkew() + { + var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero)); + var client = new FakeTokenClient(timeProvider); + var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance); + + var token1 = await source.GetAsync("scanner", new[] { "scanner.read" }); + Assert.Equal(1, client.RequestCount); + Assert.Null(client.LastAdditionalParameters); + + var token2 = await source.GetAsync("scanner", new[] { "scanner.read" }); + Assert.Equal(1, client.RequestCount); + Assert.Equal(token1.AccessToken, token2.AccessToken); + + timeProvider.Advance(TimeSpan.FromMinutes(3)); + var token3 = await source.GetAsync("scanner", new[] { "scanner.read" }); + Assert.Equal(2, client.RequestCount); + Assert.NotEqual(token1.AccessToken, token3.AccessToken); + } + + [Fact] + public async Task InvalidateAsync_RemovesCachedToken() + { + var timeProvider = new FakeTimeProvider(new DateTimeOffset(2025, 10, 18, 12, 0, 0, TimeSpan.Zero)); + var client = new FakeTokenClient(timeProvider); + var source = new AuthorityTokenSource(client, TimeSpan.FromSeconds(30), timeProvider, NullLogger<AuthorityTokenSource>.Instance); + + _ = await source.GetAsync("scanner", new[] { "scanner.read" }); + Assert.Equal(1, client.RequestCount); + Assert.Null(client.LastAdditionalParameters); + + await source.InvalidateAsync("scanner", new[] { "scanner.read" }); + _ = await source.GetAsync("scanner", new[] { "scanner.read" }); + + Assert.Equal(2, client.RequestCount); + } + + private sealed class FakeTokenClient : IStellaOpsTokenClient + { + private readonly FakeTimeProvider timeProvider; + private int counter; + + public FakeTokenClient(FakeTimeProvider timeProvider) + { + this.timeProvider = timeProvider; + } + + public int RequestCount => counter; + + public IReadOnlyDictionary<string, string>? LastAdditionalParameters { get; private set; } + + public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) + { + LastAdditionalParameters = additionalParameters; + var access = $"token-{Interlocked.Increment(ref counter)}"; + var expires = timeProvider.GetUtcNow().AddMinutes(2); + var scopes = scope is null + ? Array.Empty<string>() + : scope.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + + return Task.FromResult(new StellaOpsTokenResult(access, "Bearer", expires, scopes)); + } + + public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null); + + public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + } +} diff --git a/src/StellaOps.Scanner.Core.Tests/Security/DpopProofValidatorTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Security/DpopProofValidatorTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Security/DpopProofValidatorTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Security/DpopProofValidatorTests.cs diff --git a/src/StellaOps.Scanner.Core.Tests/Security/RestartOnlyPluginGuardTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Security/RestartOnlyPluginGuardTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Security/RestartOnlyPluginGuardTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Security/RestartOnlyPluginGuardTests.cs diff --git a/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj new file mode 100644 index 00000000..81f5181f --- /dev/null +++ b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj @@ -0,0 +1,16 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + </ItemGroup> + <ItemGroup> + <None Update="Fixtures\*.json" CopyToOutputDirectory="PreserveNewest" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Core.Tests/Utility/ScannerIdentifiersTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Utility/ScannerIdentifiersTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Utility/ScannerIdentifiersTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Utility/ScannerIdentifiersTests.cs diff --git a/src/StellaOps.Scanner.Core.Tests/Utility/ScannerTimestampsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Utility/ScannerTimestampsTests.cs similarity index 100% rename from src/StellaOps.Scanner.Core.Tests/Utility/ScannerTimestampsTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Core.Tests/Utility/ScannerTimestampsTests.cs diff --git a/src/StellaOps.Scanner.Diff.Tests/ComponentDifferTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Diff.Tests/ComponentDifferTests.cs similarity index 100% rename from src/StellaOps.Scanner.Diff.Tests/ComponentDifferTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Diff.Tests/ComponentDifferTests.cs diff --git a/src/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj similarity index 59% rename from src/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj index 9b78c0fa..6a495526 100644 --- a/src/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Diff.Tests/StellaOps.Scanner.Diff.Tests.csproj @@ -1,11 +1,12 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.Diff\StellaOps.Scanner.Diff.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Diff/StellaOps.Scanner.Diff.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Emit.Tests/Composition/CycloneDxComposerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/Composition/CycloneDxComposerTests.cs similarity index 100% rename from src/StellaOps.Scanner.Emit.Tests/Composition/CycloneDxComposerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/Composition/CycloneDxComposerTests.cs diff --git a/src/StellaOps.Scanner.Emit.Tests/Composition/ScanAnalysisCompositionBuilderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/Composition/ScanAnalysisCompositionBuilderTests.cs similarity index 100% rename from src/StellaOps.Scanner.Emit.Tests/Composition/ScanAnalysisCompositionBuilderTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/Composition/ScanAnalysisCompositionBuilderTests.cs diff --git a/src/StellaOps.Scanner.Emit.Tests/Index/BomIndexBuilderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/Index/BomIndexBuilderTests.cs similarity index 100% rename from src/StellaOps.Scanner.Emit.Tests/Index/BomIndexBuilderTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/Index/BomIndexBuilderTests.cs diff --git a/src/StellaOps.Scanner.Emit.Tests/Packaging/ScannerArtifactPackageBuilderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/Packaging/ScannerArtifactPackageBuilderTests.cs similarity index 100% rename from src/StellaOps.Scanner.Emit.Tests/Packaging/ScannerArtifactPackageBuilderTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/Packaging/ScannerArtifactPackageBuilderTests.cs diff --git a/src/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj similarity index 59% rename from src/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj index 8f20afaf..23824068 100644 --- a/src/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Emit.Tests/StellaOps.Scanner.Emit.Tests.csproj @@ -1,11 +1,12 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.Emit\StellaOps.Scanner.Emit.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.EntryTrace.Tests/EntryTraceAnalyzerTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceAnalyzerTests.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace.Tests/EntryTraceAnalyzerTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceAnalyzerTests.cs diff --git a/src/StellaOps.Scanner.EntryTrace.Tests/EntryTraceImageContextFactoryTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceImageContextFactoryTests.cs similarity index 96% rename from src/StellaOps.Scanner.EntryTrace.Tests/EntryTraceImageContextFactoryTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceImageContextFactoryTests.cs index 3905c6a9..cae92fe6 100644 --- a/src/StellaOps.Scanner.EntryTrace.Tests/EntryTraceImageContextFactoryTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/EntryTraceImageContextFactoryTests.cs @@ -1,86 +1,86 @@ -using System.Collections.Immutable; -using System.IO; -using System.Text; -using Microsoft.Extensions.Logging.Abstractions; -using Xunit; - -namespace StellaOps.Scanner.EntryTrace.Tests; - -public sealed class EntryTraceImageContextFactoryTests -{ - [Fact] - public void Create_UsesEnvironmentAndEntrypointFromConfig() - { - var json = """ - { - "config": { - "Env": ["PATH=/custom/bin:/usr/bin", "FOO=bar"], - "Entrypoint": ["/bin/sh", "-c"], - "Cmd": ["./start.sh"], - "WorkingDir": "/srv/app", - "User": "1000:1000" - } - } - """; - - var config = OciImageConfigLoader.Load(new MemoryStream(Encoding.UTF8.GetBytes(json))); - var options = new EntryTraceAnalyzerOptions - { - DefaultPath = "/default/bin" - }; - - var fs = new TestRootFileSystem(); - var imageContext = EntryTraceImageContextFactory.Create( - config, - fs, - options, - "sha256:testimage", - "scan-001", - NullLogger.Instance); - - Assert.Equal("/bin/sh", imageContext.Entrypoint.Entrypoint[0]); - Assert.Equal("./start.sh", imageContext.Entrypoint.Command[0]); - - Assert.Equal("/srv/app", imageContext.Context.WorkingDirectory); - Assert.Equal("1000:1000", imageContext.Context.User); - Assert.Equal("sha256:testimage", imageContext.Context.ImageDigest); - Assert.Equal("scan-001", imageContext.Context.ScanId); - - Assert.True(imageContext.Context.Environment.ContainsKey("FOO")); - Assert.Equal("bar", imageContext.Context.Environment["FOO"]); - - Assert.Equal("/custom/bin:/usr/bin", string.Join(":", imageContext.Context.Path)); - } - - [Fact] - public void Create_FallsBackToDefaultPathWhenMissing() - { - var json = """ - { - "config": { - "Env": ["FOO=bar"], - "Cmd": ["node", "server.js"] - } - } - """; - - var config = OciImageConfigLoader.Load(new MemoryStream(Encoding.UTF8.GetBytes(json))); - var options = new EntryTraceAnalyzerOptions - { - DefaultPath = "/usr/local/sbin:/usr/local/bin" - }; - - var fs = new TestRootFileSystem(); - var imageContext = EntryTraceImageContextFactory.Create( - config, - fs, - options, - "sha256:abc", - "scan-xyz", - NullLogger.Instance); - - Assert.Equal("/usr/local/sbin:/usr/local/bin", string.Join(":", imageContext.Context.Path)); - Assert.Equal("root", imageContext.Context.User); - Assert.Equal("/", imageContext.Context.WorkingDirectory); - } -} +using System.Collections.Immutable; +using System.IO; +using System.Text; +using Microsoft.Extensions.Logging.Abstractions; +using Xunit; + +namespace StellaOps.Scanner.EntryTrace.Tests; + +public sealed class EntryTraceImageContextFactoryTests +{ + [Fact] + public void Create_UsesEnvironmentAndEntrypointFromConfig() + { + var json = """ + { + "config": { + "Env": ["PATH=/custom/bin:/usr/bin", "FOO=bar"], + "Entrypoint": ["/bin/sh", "-c"], + "Cmd": ["./start.sh"], + "WorkingDir": "/srv/app", + "User": "1000:1000" + } + } + """; + + var config = OciImageConfigLoader.Load(new MemoryStream(Encoding.UTF8.GetBytes(json))); + var options = new EntryTraceAnalyzerOptions + { + DefaultPath = "/default/bin" + }; + + var fs = new TestRootFileSystem(); + var imageContext = EntryTraceImageContextFactory.Create( + config, + fs, + options, + "sha256:testimage", + "scan-001", + NullLogger.Instance); + + Assert.Equal("/bin/sh", imageContext.Entrypoint.Entrypoint[0]); + Assert.Equal("./start.sh", imageContext.Entrypoint.Command[0]); + + Assert.Equal("/srv/app", imageContext.Context.WorkingDirectory); + Assert.Equal("1000:1000", imageContext.Context.User); + Assert.Equal("sha256:testimage", imageContext.Context.ImageDigest); + Assert.Equal("scan-001", imageContext.Context.ScanId); + + Assert.True(imageContext.Context.Environment.ContainsKey("FOO")); + Assert.Equal("bar", imageContext.Context.Environment["FOO"]); + + Assert.Equal("/custom/bin:/usr/bin", string.Join(":", imageContext.Context.Path)); + } + + [Fact] + public void Create_FallsBackToDefaultPathWhenMissing() + { + var json = """ + { + "config": { + "Env": ["FOO=bar"], + "Cmd": ["node", "server.js"] + } + } + """; + + var config = OciImageConfigLoader.Load(new MemoryStream(Encoding.UTF8.GetBytes(json))); + var options = new EntryTraceAnalyzerOptions + { + DefaultPath = "/usr/local/sbin:/usr/local/bin" + }; + + var fs = new TestRootFileSystem(); + var imageContext = EntryTraceImageContextFactory.Create( + config, + fs, + options, + "sha256:abc", + "scan-xyz", + NullLogger.Instance); + + Assert.Equal("/usr/local/sbin:/usr/local/bin", string.Join(":", imageContext.Context.Path)); + Assert.Equal("root", imageContext.Context.User); + Assert.Equal("/", imageContext.Context.WorkingDirectory); + } +} diff --git a/src/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs similarity index 97% rename from src/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs index 771eaab9..257712b6 100644 --- a/src/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/LayeredRootFileSystemTests.cs @@ -1,176 +1,176 @@ -using System; -using System.Formats.Tar; -using System.IO; -using System.Text; -using Xunit; - -namespace StellaOps.Scanner.EntryTrace.Tests; - -public sealed class LayeredRootFileSystemTests : IDisposable -{ - private readonly string _tempRoot; - - public LayeredRootFileSystemTests() - { - _tempRoot = Path.Combine(Path.GetTempPath(), $"entrytrace-layerfs-{Guid.NewGuid():n}"); - Directory.CreateDirectory(_tempRoot); - } - - [Fact] - public void FromDirectories_HandlesWhiteoutsAndResolution() - { - var layer1 = CreateLayerDirectory("layer1"); - var layer2 = CreateLayerDirectory("layer2"); - - var usrBin1 = Path.Combine(layer1, "usr", "bin"); - Directory.CreateDirectory(usrBin1); - var entrypointPath = Path.Combine(usrBin1, "entrypoint.sh"); - File.WriteAllText(entrypointPath, "#!/bin/sh\necho layer1\n"); - - var optDirectory1 = Path.Combine(layer1, "opt"); - Directory.CreateDirectory(optDirectory1); - File.WriteAllText(Path.Combine(optDirectory1, "setup.sh"), "echo setup\n"); - - var optDirectory2 = Path.Combine(layer2, "opt"); - Directory.CreateDirectory(optDirectory2); - File.WriteAllText(Path.Combine(optDirectory2, ".wh.setup.sh"), string.Empty); - - var fs = LayeredRootFileSystem.FromDirectories(new[] - { - new LayeredRootFileSystem.LayerDirectory("sha256:layer1", layer1), - new LayeredRootFileSystem.LayerDirectory("sha256:layer2", layer2) - }); - - Assert.True(fs.TryResolveExecutable("entrypoint.sh", new[] { "/usr/bin" }, out var descriptor)); - Assert.Equal("/usr/bin/entrypoint.sh", descriptor.Path); - Assert.Equal("sha256:layer1", descriptor.LayerDigest); - - Assert.True(fs.TryReadAllText("/usr/bin/entrypoint.sh", out var textDescriptor, out var content)); - Assert.Equal(descriptor.Path, textDescriptor.Path); - Assert.Contains("echo layer1", content); - - Assert.False(fs.TryReadAllText("/opt/setup.sh", out _, out _)); - - var optEntries = fs.EnumerateDirectory("/opt"); - Assert.DoesNotContain(optEntries, entry => entry.Path.EndsWith("setup.sh", StringComparison.Ordinal)); - } - - [Fact] - public void FromArchives_ResolvesSymlinkAndWhiteout() - { - var layer1Path = Path.Combine(_tempRoot, "layer1.tar"); - var layer2Path = Path.Combine(_tempRoot, "layer2.tar"); - - CreateArchive(layer1Path, writer => - { - var scriptEntry = new PaxTarEntry(TarEntryType.RegularFile, "usr/local/bin/start.sh"); - scriptEntry.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute | - UnixFileMode.GroupRead | UnixFileMode.GroupExecute | - UnixFileMode.OtherRead | UnixFileMode.OtherExecute; - scriptEntry.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("#!/bin/sh\necho start\n")); - writer.WriteEntry(scriptEntry); - - var oldScript = new PaxTarEntry(TarEntryType.RegularFile, "opt/old.sh"); - oldScript.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute | - UnixFileMode.GroupRead | UnixFileMode.GroupExecute | - UnixFileMode.OtherRead | UnixFileMode.OtherExecute; - oldScript.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("echo old\n")); - writer.WriteEntry(oldScript); - }); - - CreateArchive(layer2Path, writer => - { - var symlinkEntry = new PaxTarEntry(TarEntryType.SymbolicLink, "usr/bin/start.sh"); - symlinkEntry.LinkName = "/usr/local/bin/start.sh"; - writer.WriteEntry(symlinkEntry); - - var whiteout = new PaxTarEntry(TarEntryType.RegularFile, "opt/.wh.old.sh"); - whiteout.DataStream = new MemoryStream(Array.Empty<byte>()); - writer.WriteEntry(whiteout); - }); - - var fs = LayeredRootFileSystem.FromArchives(new[] - { - new LayeredRootFileSystem.LayerArchive("sha256:base", layer1Path), - new LayeredRootFileSystem.LayerArchive("sha256:update", layer2Path) - }); - - Assert.True(fs.TryResolveExecutable("start.sh", new[] { "/usr/bin" }, out var descriptor)); - Assert.Equal("/usr/local/bin/start.sh", descriptor.Path); - Assert.Equal("sha256:base", descriptor.LayerDigest); - - Assert.True(fs.TryReadAllText("/usr/bin/start.sh", out var resolvedDescriptor, out var content)); - Assert.Equal(descriptor.Path, resolvedDescriptor.Path); - Assert.Contains("echo start", content); - - Assert.False(fs.TryReadAllText("/opt/old.sh", out _, out _)); - } - - [Fact] - public void FromArchives_ResolvesHardLinkContent() - { - var baseLayer = Path.Combine(_tempRoot, "base.tar"); - var hardLinkLayer = Path.Combine(_tempRoot, "hardlink.tar"); - - CreateArchive(baseLayer, writer => - { - var baseEntry = new PaxTarEntry(TarEntryType.RegularFile, "usr/bin/tool.sh"); - baseEntry.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute | - UnixFileMode.GroupRead | UnixFileMode.GroupExecute | - UnixFileMode.OtherRead | UnixFileMode.OtherExecute; - baseEntry.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("#!/bin/sh\necho tool\n")); - writer.WriteEntry(baseEntry); - }); - - CreateArchive(hardLinkLayer, writer => - { - var hardLink = new PaxTarEntry(TarEntryType.HardLink, "bin/tool.sh") - { - LinkName = "/usr/bin/tool.sh", - Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute | - UnixFileMode.GroupRead | UnixFileMode.GroupExecute | - UnixFileMode.OtherRead | UnixFileMode.OtherExecute - }; - writer.WriteEntry(hardLink); - }); - - var fs = LayeredRootFileSystem.FromArchives(new[] - { - new LayeredRootFileSystem.LayerArchive("sha256:base", baseLayer), - new LayeredRootFileSystem.LayerArchive("sha256:hardlink", hardLinkLayer) - }); - - Assert.True(fs.TryReadAllText("/bin/tool.sh", out var descriptor, out var content)); - Assert.Equal("/usr/bin/tool.sh", descriptor.Path); - Assert.Contains("echo tool", content); - } - - private string CreateLayerDirectory(string name) - { - var path = Path.Combine(_tempRoot, name); - Directory.CreateDirectory(path); - return path; - } - - private static void CreateArchive(string path, Action<TarWriter> writerAction) - { - using var stream = File.Create(path); - using var writer = new TarWriter(stream, leaveOpen: false); - writerAction(writer); - } - - public void Dispose() - { - try - { - if (Directory.Exists(_tempRoot)) - { - Directory.Delete(_tempRoot, recursive: true); - } - } - catch - { - // ignore cleanup failures - } - } -} +using System; +using System.Formats.Tar; +using System.IO; +using System.Text; +using Xunit; + +namespace StellaOps.Scanner.EntryTrace.Tests; + +public sealed class LayeredRootFileSystemTests : IDisposable +{ + private readonly string _tempRoot; + + public LayeredRootFileSystemTests() + { + _tempRoot = Path.Combine(Path.GetTempPath(), $"entrytrace-layerfs-{Guid.NewGuid():n}"); + Directory.CreateDirectory(_tempRoot); + } + + [Fact] + public void FromDirectories_HandlesWhiteoutsAndResolution() + { + var layer1 = CreateLayerDirectory("layer1"); + var layer2 = CreateLayerDirectory("layer2"); + + var usrBin1 = Path.Combine(layer1, "usr", "bin"); + Directory.CreateDirectory(usrBin1); + var entrypointPath = Path.Combine(usrBin1, "entrypoint.sh"); + File.WriteAllText(entrypointPath, "#!/bin/sh\necho layer1\n"); + + var optDirectory1 = Path.Combine(layer1, "opt"); + Directory.CreateDirectory(optDirectory1); + File.WriteAllText(Path.Combine(optDirectory1, "setup.sh"), "echo setup\n"); + + var optDirectory2 = Path.Combine(layer2, "opt"); + Directory.CreateDirectory(optDirectory2); + File.WriteAllText(Path.Combine(optDirectory2, ".wh.setup.sh"), string.Empty); + + var fs = LayeredRootFileSystem.FromDirectories(new[] + { + new LayeredRootFileSystem.LayerDirectory("sha256:layer1", layer1), + new LayeredRootFileSystem.LayerDirectory("sha256:layer2", layer2) + }); + + Assert.True(fs.TryResolveExecutable("entrypoint.sh", new[] { "/usr/bin" }, out var descriptor)); + Assert.Equal("/usr/bin/entrypoint.sh", descriptor.Path); + Assert.Equal("sha256:layer1", descriptor.LayerDigest); + + Assert.True(fs.TryReadAllText("/usr/bin/entrypoint.sh", out var textDescriptor, out var content)); + Assert.Equal(descriptor.Path, textDescriptor.Path); + Assert.Contains("echo layer1", content); + + Assert.False(fs.TryReadAllText("/opt/setup.sh", out _, out _)); + + var optEntries = fs.EnumerateDirectory("/opt"); + Assert.DoesNotContain(optEntries, entry => entry.Path.EndsWith("setup.sh", StringComparison.Ordinal)); + } + + [Fact] + public void FromArchives_ResolvesSymlinkAndWhiteout() + { + var layer1Path = Path.Combine(_tempRoot, "layer1.tar"); + var layer2Path = Path.Combine(_tempRoot, "layer2.tar"); + + CreateArchive(layer1Path, writer => + { + var scriptEntry = new PaxTarEntry(TarEntryType.RegularFile, "usr/local/bin/start.sh"); + scriptEntry.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute | + UnixFileMode.GroupRead | UnixFileMode.GroupExecute | + UnixFileMode.OtherRead | UnixFileMode.OtherExecute; + scriptEntry.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("#!/bin/sh\necho start\n")); + writer.WriteEntry(scriptEntry); + + var oldScript = new PaxTarEntry(TarEntryType.RegularFile, "opt/old.sh"); + oldScript.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute | + UnixFileMode.GroupRead | UnixFileMode.GroupExecute | + UnixFileMode.OtherRead | UnixFileMode.OtherExecute; + oldScript.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("echo old\n")); + writer.WriteEntry(oldScript); + }); + + CreateArchive(layer2Path, writer => + { + var symlinkEntry = new PaxTarEntry(TarEntryType.SymbolicLink, "usr/bin/start.sh"); + symlinkEntry.LinkName = "/usr/local/bin/start.sh"; + writer.WriteEntry(symlinkEntry); + + var whiteout = new PaxTarEntry(TarEntryType.RegularFile, "opt/.wh.old.sh"); + whiteout.DataStream = new MemoryStream(Array.Empty<byte>()); + writer.WriteEntry(whiteout); + }); + + var fs = LayeredRootFileSystem.FromArchives(new[] + { + new LayeredRootFileSystem.LayerArchive("sha256:base", layer1Path), + new LayeredRootFileSystem.LayerArchive("sha256:update", layer2Path) + }); + + Assert.True(fs.TryResolveExecutable("start.sh", new[] { "/usr/bin" }, out var descriptor)); + Assert.Equal("/usr/local/bin/start.sh", descriptor.Path); + Assert.Equal("sha256:base", descriptor.LayerDigest); + + Assert.True(fs.TryReadAllText("/usr/bin/start.sh", out var resolvedDescriptor, out var content)); + Assert.Equal(descriptor.Path, resolvedDescriptor.Path); + Assert.Contains("echo start", content); + + Assert.False(fs.TryReadAllText("/opt/old.sh", out _, out _)); + } + + [Fact] + public void FromArchives_ResolvesHardLinkContent() + { + var baseLayer = Path.Combine(_tempRoot, "base.tar"); + var hardLinkLayer = Path.Combine(_tempRoot, "hardlink.tar"); + + CreateArchive(baseLayer, writer => + { + var baseEntry = new PaxTarEntry(TarEntryType.RegularFile, "usr/bin/tool.sh"); + baseEntry.Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute | + UnixFileMode.GroupRead | UnixFileMode.GroupExecute | + UnixFileMode.OtherRead | UnixFileMode.OtherExecute; + baseEntry.DataStream = new MemoryStream(Encoding.UTF8.GetBytes("#!/bin/sh\necho tool\n")); + writer.WriteEntry(baseEntry); + }); + + CreateArchive(hardLinkLayer, writer => + { + var hardLink = new PaxTarEntry(TarEntryType.HardLink, "bin/tool.sh") + { + LinkName = "/usr/bin/tool.sh", + Mode = UnixFileMode.UserRead | UnixFileMode.UserExecute | + UnixFileMode.GroupRead | UnixFileMode.GroupExecute | + UnixFileMode.OtherRead | UnixFileMode.OtherExecute + }; + writer.WriteEntry(hardLink); + }); + + var fs = LayeredRootFileSystem.FromArchives(new[] + { + new LayeredRootFileSystem.LayerArchive("sha256:base", baseLayer), + new LayeredRootFileSystem.LayerArchive("sha256:hardlink", hardLinkLayer) + }); + + Assert.True(fs.TryReadAllText("/bin/tool.sh", out var descriptor, out var content)); + Assert.Equal("/usr/bin/tool.sh", descriptor.Path); + Assert.Contains("echo tool", content); + } + + private string CreateLayerDirectory(string name) + { + var path = Path.Combine(_tempRoot, name); + Directory.CreateDirectory(path); + return path; + } + + private static void CreateArchive(string path, Action<TarWriter> writerAction) + { + using var stream = File.Create(path); + using var writer = new TarWriter(stream, leaveOpen: false); + writerAction(writer); + } + + public void Dispose() + { + try + { + if (Directory.Exists(_tempRoot)) + { + Directory.Delete(_tempRoot, recursive: true); + } + } + catch + { + // ignore cleanup failures + } + } +} diff --git a/src/StellaOps.Scanner.EntryTrace.Tests/ShellParserTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/ShellParserTests.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace.Tests/ShellParserTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/ShellParserTests.cs diff --git a/src/StellaOps.Scanner.EntryTrace.Tests/StellaOps.Scanner.EntryTrace.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/StellaOps.Scanner.EntryTrace.Tests.csproj similarity index 66% rename from src/StellaOps.Scanner.EntryTrace.Tests/StellaOps.Scanner.EntryTrace.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/StellaOps.Scanner.EntryTrace.Tests.csproj index e0257e8c..d0ea4877 100644 --- a/src/StellaOps.Scanner.EntryTrace.Tests/StellaOps.Scanner.EntryTrace.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/StellaOps.Scanner.EntryTrace.Tests.csproj @@ -1,13 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj" /> - </ItemGroup> - <ItemGroup> - <None Update="Fixtures\**\*" CopyToOutputDirectory="PreserveNewest" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.EntryTrace/StellaOps.Scanner.EntryTrace.csproj" /> + </ItemGroup> + <ItemGroup> + <None Update="Fixtures\**\*" CopyToOutputDirectory="PreserveNewest" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.EntryTrace.Tests/TestRootFileSystem.cs b/src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/TestRootFileSystem.cs similarity index 100% rename from src/StellaOps.Scanner.EntryTrace.Tests/TestRootFileSystem.cs rename to src/Scanner/__Tests/StellaOps.Scanner.EntryTrace.Tests/TestRootFileSystem.cs diff --git a/src/StellaOps.Scanner.Queue.Tests/QueueLeaseIntegrationTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Queue.Tests/QueueLeaseIntegrationTests.cs similarity index 100% rename from src/StellaOps.Scanner.Queue.Tests/QueueLeaseIntegrationTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Queue.Tests/QueueLeaseIntegrationTests.cs diff --git a/src/StellaOps.Scanner.Queue.Tests/StellaOps.Scanner.Queue.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Queue.Tests/StellaOps.Scanner.Queue.Tests.csproj similarity index 69% rename from src/StellaOps.Scanner.Queue.Tests/StellaOps.Scanner.Queue.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Queue.Tests/StellaOps.Scanner.Queue.Tests.csproj index 5dac25ed..691d58f2 100644 --- a/src/StellaOps.Scanner.Queue.Tests/StellaOps.Scanner.Queue.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Queue.Tests/StellaOps.Scanner.Queue.Tests.csproj @@ -1,14 +1,15 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <IsPackable>false</IsPackable> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <IsPackable>false</IsPackable> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="FluentAssertions" Version="6.12.0" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Attestation/AttestorClientTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Attestation/AttestorClientTests.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Attestation/AttestorClientTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Attestation/AttestorClientTests.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Cas/LocalCasClientTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Cas/LocalCasClientTests.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Cas/LocalCasClientTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Cas/LocalCasClientTests.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGeneratorTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGeneratorTests.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGeneratorTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGeneratorTests.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGoldenTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGoldenTests.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGoldenTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Descriptor/DescriptorGoldenTests.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Fixtures/descriptor.baseline.json b/src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Fixtures/descriptor.baseline.json similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Fixtures/descriptor.baseline.json rename to src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Fixtures/descriptor.baseline.json diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Manifest/BuildxPluginManifestLoaderTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Manifest/BuildxPluginManifestLoaderTests.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Manifest/BuildxPluginManifestLoaderTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/Manifest/BuildxPluginManifestLoaderTests.cs diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj similarity index 69% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj index 55c965cb..f7577ac4 100644 --- a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests.csproj @@ -1,12 +1,13 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.Sbomer.BuildXPlugin\StellaOps.Scanner.Sbomer.BuildXPlugin.csproj" /> + <ProjectReference Include="../../StellaOps.Scanner.Sbomer.BuildXPlugin/StellaOps.Scanner.Sbomer.BuildXPlugin.csproj" /> </ItemGroup> <ItemGroup> @@ -14,4 +15,4 @@ <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> </None> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/TestUtilities/TempDirectory.cs b/src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/TestUtilities/TempDirectory.cs similarity index 100% rename from src/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/TestUtilities/TempDirectory.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Sbomer.BuildXPlugin.Tests/TestUtilities/TempDirectory.cs diff --git a/src/StellaOps.Scanner.Storage.Tests/InMemoryArtifactObjectStore.cs b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/InMemoryArtifactObjectStore.cs similarity index 100% rename from src/StellaOps.Scanner.Storage.Tests/InMemoryArtifactObjectStore.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/InMemoryArtifactObjectStore.cs diff --git a/src/StellaOps.Scanner.Storage.Tests/RustFsArtifactObjectStoreTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/RustFsArtifactObjectStoreTests.cs similarity index 100% rename from src/StellaOps.Scanner.Storage.Tests/RustFsArtifactObjectStoreTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/RustFsArtifactObjectStoreTests.cs diff --git a/src/StellaOps.Scanner.Storage.Tests/ScannerMongoFixture.cs b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/ScannerMongoFixture.cs similarity index 100% rename from src/StellaOps.Scanner.Storage.Tests/ScannerMongoFixture.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/ScannerMongoFixture.cs diff --git a/src/StellaOps.Scanner.Storage.Tests/StellaOps.Scanner.Storage.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/StellaOps.Scanner.Storage.Tests.csproj similarity index 58% rename from src/StellaOps.Scanner.Storage.Tests/StellaOps.Scanner.Storage.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/StellaOps.Scanner.Storage.Tests.csproj index 2b133e25..4d645ab2 100644 --- a/src/StellaOps.Scanner.Storage.Tests/StellaOps.Scanner.Storage.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/StellaOps.Scanner.Storage.Tests.csproj @@ -1,10 +1,11 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Storage/StellaOps.Scanner.Storage.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Storage.Tests/StorageDualWriteFixture.cs b/src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/StorageDualWriteFixture.cs similarity index 100% rename from src/StellaOps.Scanner.Storage.Tests/StorageDualWriteFixture.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Storage.Tests/StorageDualWriteFixture.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/AuthorizationTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/AuthorizationTests.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/AuthorizationTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/AuthorizationTests.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/HealthEndpointsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/HealthEndpointsTests.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/HealthEndpointsTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/HealthEndpointsTests.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/PlatformEventPublisherRegistrationTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/PlatformEventPublisherRegistrationTests.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/PlatformEventPublisherRegistrationTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/PlatformEventPublisherRegistrationTests.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/PlatformEventSamplesTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/PlatformEventSamplesTests.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/PlatformEventSamplesTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/PlatformEventSamplesTests.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/PolicyEndpointsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/PolicyEndpointsTests.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/PolicyEndpointsTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/PolicyEndpointsTests.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/ReportEventDispatcherTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ReportEventDispatcherTests.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/ReportEventDispatcherTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ReportEventDispatcherTests.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/ReportSamplesTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ReportSamplesTests.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/ReportSamplesTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ReportSamplesTests.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/ReportsEndpointsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ReportsEndpointsTests.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/ReportsEndpointsTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ReportsEndpointsTests.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/RuntimeEndpointsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/RuntimeEndpointsTests.cs similarity index 97% rename from src/StellaOps.Scanner.WebService.Tests/RuntimeEndpointsTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/RuntimeEndpointsTests.cs index 4a480d7f..14a63bb7 100644 --- a/src/StellaOps.Scanner.WebService.Tests/RuntimeEndpointsTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/RuntimeEndpointsTests.cs @@ -1,363 +1,363 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Net; -using System.Net.Http.Json; -using System.Text.Json; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.DependencyInjection; -using MongoDB.Driver; -using StellaOps.Policy; -using StellaOps.Scanner.Storage.Catalog; -using StellaOps.Scanner.Storage.Mongo; -using StellaOps.Scanner.WebService.Contracts; -using StellaOps.Zastava.Core.Contracts; - -namespace StellaOps.Scanner.WebService.Tests; - -public sealed class RuntimeEndpointsTests -{ - [Fact] - public async Task RuntimeEventsEndpointPersistsEvents() - { - using var factory = new ScannerApplicationFactory(); - using var client = factory.CreateClient(); - - var request = new RuntimeEventsIngestRequestDto - { - BatchId = "batch-1", - Events = new[] - { - CreateEnvelope("evt-001", buildId: "ABCDEF1234567890ABCDEF1234567890ABCDEF12"), - CreateEnvelope("evt-002", buildId: "abcdef1234567890abcdef1234567890abcdef12") - } - }; - - var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request); - Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); - - var payload = await response.Content.ReadFromJsonAsync<RuntimeEventsIngestResponseDto>(); - Assert.NotNull(payload); - Assert.Equal(2, payload!.Accepted); - Assert.Equal(0, payload.Duplicates); - - using var scope = factory.Services.CreateScope(); - var collections = scope.ServiceProvider.GetRequiredService<MongoCollectionProvider>(); - var stored = await collections.RuntimeEvents.Find(FilterDefinition<RuntimeEventDocument>.Empty).ToListAsync(); - Assert.Equal(2, stored.Count); - Assert.Contains(stored, doc => doc.EventId == "evt-001"); - Assert.All(stored, doc => - { - Assert.Equal("tenant-alpha", doc.Tenant); - Assert.True(doc.ExpiresAt > doc.ReceivedAt); - Assert.Equal("sha256:deadbeef", doc.ImageDigest); - Assert.Equal("abcdef1234567890abcdef1234567890abcdef12", doc.BuildId); - }); - } - - [Fact] - public async Task RuntimeEventsEndpointRejectsUnsupportedSchema() - { - using var factory = new ScannerApplicationFactory(); - using var client = factory.CreateClient(); - - var envelope = CreateEnvelope("evt-100", schemaVersion: "zastava.runtime.event@v2.0"); - - var request = new RuntimeEventsIngestRequestDto - { - Events = new[] { envelope } - }; - - var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request); - Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); - } - - [Fact] - public async Task RuntimeEventsEndpointEnforcesRateLimit() - { - using var factory = new ScannerApplicationFactory(configuration => - { - configuration["scanner:runtime:perNodeBurst"] = "1"; - configuration["scanner:runtime:perNodeEventsPerSecond"] = "1"; - configuration["scanner:runtime:perTenantBurst"] = "1"; - configuration["scanner:runtime:perTenantEventsPerSecond"] = "1"; - }); - using var client = factory.CreateClient(); - - var request = new RuntimeEventsIngestRequestDto - { - Events = new[] - { - CreateEnvelope("evt-500"), - CreateEnvelope("evt-501") - } - }; - - var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request); - Assert.Equal((HttpStatusCode)StatusCodes.Status429TooManyRequests, response.StatusCode); - Assert.NotNull(response.Headers.RetryAfter); - - using var scope = factory.Services.CreateScope(); - var collections = scope.ServiceProvider.GetRequiredService<MongoCollectionProvider>(); - var count = await collections.RuntimeEvents.CountDocumentsAsync(FilterDefinition<RuntimeEventDocument>.Empty); - Assert.Equal(0, count); - } - - [Fact] - public async Task RuntimePolicyEndpointReturnsDecisions() - { - using var factory = new ScannerApplicationFactory(configuration => - { - configuration["scanner:runtime:policyCacheTtlSeconds"] = "600"; - }); - - const string imageDigest = "sha256:deadbeef"; - - using var client = factory.CreateClient(); - - using (var scope = factory.Services.CreateScope()) - { - var collections = scope.ServiceProvider.GetRequiredService<MongoCollectionProvider>(); - var policyStore = scope.ServiceProvider.GetRequiredService<PolicySnapshotStore>(); - - const string policyYaml = """ -version: "1.0" -rules: - - name: Block Critical - severity: [Critical] - action: block -"""; - var saveResult = await policyStore.SaveAsync( - new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "tests", "seed"), - CancellationToken.None); - Assert.True(saveResult.Success); - - var snapshot = await policyStore.GetLatestAsync(CancellationToken.None); - Assert.NotNull(snapshot); - - var sbomArtifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.ImageBom, "sha256:sbomdigest"); - var attestationArtifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.Attestation, "sha256:attdigest"); - - await collections.Artifacts.InsertManyAsync(new[] - { - new ArtifactDocument - { - Id = sbomArtifactId, - Type = ArtifactDocumentType.ImageBom, - Format = ArtifactDocumentFormat.CycloneDxJson, - MediaType = "application/json", - BytesSha256 = "sha256:sbomdigest", - RefCount = 1, - CreatedAtUtc = DateTime.UtcNow, - UpdatedAtUtc = DateTime.UtcNow - }, - new ArtifactDocument - { - Id = attestationArtifactId, - Type = ArtifactDocumentType.Attestation, - Format = ArtifactDocumentFormat.DsseJson, - MediaType = "application/vnd.dsse.envelope+json", - BytesSha256 = "sha256:attdigest", - RefCount = 1, - CreatedAtUtc = DateTime.UtcNow, - UpdatedAtUtc = DateTime.UtcNow, - Rekor = new RekorReference { Uuid = "rekor-uuid", Url = "https://rekor.example/uuid/rekor-uuid", Index = 7 } - } - }); - - await collections.Links.InsertManyAsync(new[] - { - new LinkDocument - { - Id = Guid.NewGuid().ToString("N"), - FromType = LinkSourceType.Image, - FromDigest = imageDigest, - ArtifactId = sbomArtifactId, - CreatedAtUtc = DateTime.UtcNow - }, - new LinkDocument - { - Id = Guid.NewGuid().ToString("N"), - FromType = LinkSourceType.Image, - FromDigest = imageDigest, - ArtifactId = attestationArtifactId, - CreatedAtUtc = DateTime.UtcNow - } - }); - } - - var ingestRequest = new RuntimeEventsIngestRequestDto - { - Events = new[] - { - CreateEnvelope("evt-210", imageDigest: imageDigest, buildId: "1122aabbccddeeff00112233445566778899aabb"), - CreateEnvelope("evt-211", imageDigest: imageDigest, buildId: "1122AABBCCDDEEFF00112233445566778899AABB") - } - }; - var ingestResponse = await client.PostAsJsonAsync("/api/v1/runtime/events", ingestRequest); - Assert.Equal(HttpStatusCode.Accepted, ingestResponse.StatusCode); - - var request = new RuntimePolicyRequestDto - { - Namespace = "payments", - Images = new[] { imageDigest, imageDigest }, - Labels = new Dictionary<string, string> { ["app"] = "api" } - }; - - var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", request); - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - - var raw = await response.Content.ReadAsStringAsync(); - Assert.False(string.IsNullOrWhiteSpace(raw), "Runtime policy response body was empty."); - var payload = JsonSerializer.Deserialize<RuntimePolicyResponseDto>(raw); - Assert.True(payload is not null, $"Runtime policy response: {raw}"); - Assert.Equal(600, payload!.TtlSeconds); - Assert.NotNull(payload.PolicyRevision); - Assert.True(payload.ExpiresAtUtc > DateTimeOffset.UtcNow); - - var decision = payload.Results[imageDigest]; - Assert.Equal("pass", decision.PolicyVerdict); - Assert.True(decision.Signed); - Assert.True(decision.HasSbomReferrers); - Assert.True(decision.HasSbomLegacy); - Assert.Empty(decision.Reasons); - Assert.NotNull(decision.Rekor); - Assert.Equal("rekor-uuid", decision.Rekor!.Uuid); - Assert.True(decision.Rekor.Verified); - Assert.NotNull(decision.Confidence); - Assert.InRange(decision.Confidence!.Value, 0.0, 1.0); - Assert.False(decision.Quieted.GetValueOrDefault()); - Assert.Null(decision.QuietedBy); - Assert.NotNull(decision.BuildIds); - Assert.Contains("1122aabbccddeeff00112233445566778899aabb", decision.BuildIds!); - var metadataString = decision.Metadata; - Console.WriteLine($"Runtime policy metadata: {metadataString ?? "<null>"}"); - Assert.False(string.IsNullOrWhiteSpace(metadataString)); - using var metadataDocument = JsonDocument.Parse(decision.Metadata!); - Assert.True(metadataDocument.RootElement.TryGetProperty("heuristics", out _)); - } - - [Fact] - public async Task RuntimePolicyEndpointFlagsUnsignedAndMissingSbom() - { - using var factory = new ScannerApplicationFactory(); - using var client = factory.CreateClient(); - - const string imageDigest = "sha256:feedface"; - - using (var scope = factory.Services.CreateScope()) - { - var collections = scope.ServiceProvider.GetRequiredService<MongoCollectionProvider>(); - var policyStore = scope.ServiceProvider.GetRequiredService<PolicySnapshotStore>(); - - const string policyYaml = """ -version: "1.0" -rules: [] -"""; - await policyStore.SaveAsync( - new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "tests", "baseline"), - CancellationToken.None); - - // Intentionally skip artifacts/links to simulate missing metadata. - await collections.RuntimeEvents.DeleteManyAsync(Builders<RuntimeEventDocument>.Filter.Empty); - } - - var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", new RuntimePolicyRequestDto - { - Namespace = "payments", - Images = new[] { imageDigest } - }); - - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - var payload = await response.Content.ReadFromJsonAsync<RuntimePolicyResponseDto>(); - Assert.NotNull(payload); - var decision = payload!.Results[imageDigest]; - - Assert.Equal("fail", decision.PolicyVerdict); - Assert.False(decision.Signed); - Assert.False(decision.HasSbomReferrers); - Assert.Contains("image.metadata.missing", decision.Reasons); - Assert.Contains("unsigned", decision.Reasons); - Assert.Contains("missing SBOM", decision.Reasons); - Assert.NotNull(decision.Confidence); - Assert.InRange(decision.Confidence!.Value, 0.0, 1.0); - if (!string.IsNullOrWhiteSpace(decision.Metadata)) - { - using var failureMetadata = JsonDocument.Parse(decision.Metadata!); - if (failureMetadata.RootElement.TryGetProperty("heuristics", out var heuristicsElement)) - { - var heuristics = heuristicsElement.EnumerateArray().Select(item => item.GetString()).ToArray(); - Assert.Contains("image.metadata.missing", heuristics); - Assert.Contains("unsigned", heuristics); - } - } - } - - [Fact] - public async Task RuntimePolicyEndpointValidatesRequest() - { - using var factory = new ScannerApplicationFactory(); - using var client = factory.CreateClient(); - - var request = new RuntimePolicyRequestDto - { - Images = Array.Empty<string>() - }; - - var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", request); - Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); - } - - private static RuntimeEventEnvelope CreateEnvelope( - string eventId, - string? schemaVersion = null, - string? imageDigest = null, - string? buildId = null) - { - var digest = string.IsNullOrWhiteSpace(imageDigest) ? "sha256:deadbeef" : imageDigest; - var runtimeEvent = new RuntimeEvent - { - EventId = eventId, - When = DateTimeOffset.UtcNow, - Kind = RuntimeEventKind.ContainerStart, - Tenant = "tenant-alpha", - Node = "node-a", - Runtime = new RuntimeEngine - { - Engine = "containerd", - Version = "1.7.0" - }, - Workload = new RuntimeWorkload - { - Platform = "kubernetes", - Namespace = "default", - Pod = "api-123", - Container = "api", - ContainerId = "containerd://abc", - ImageRef = $"ghcr.io/example/api@{digest}" - }, - Delta = new RuntimeDelta - { - BaselineImageDigest = digest - }, - Process = new RuntimeProcess - { - Pid = 123, - Entrypoint = new[] { "/bin/start" }, - EntryTrace = Array.Empty<RuntimeEntryTrace>(), - BuildId = buildId - } - }; - - if (schemaVersion is null) - { - return RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent); - } - - return new RuntimeEventEnvelope - { - SchemaVersion = schemaVersion, - Event = runtimeEvent - }; - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Net; +using System.Net.Http.Json; +using System.Text.Json; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.DependencyInjection; +using MongoDB.Driver; +using StellaOps.Policy; +using StellaOps.Scanner.Storage.Catalog; +using StellaOps.Scanner.Storage.Mongo; +using StellaOps.Scanner.WebService.Contracts; +using StellaOps.Zastava.Core.Contracts; + +namespace StellaOps.Scanner.WebService.Tests; + +public sealed class RuntimeEndpointsTests +{ + [Fact] + public async Task RuntimeEventsEndpointPersistsEvents() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + var request = new RuntimeEventsIngestRequestDto + { + BatchId = "batch-1", + Events = new[] + { + CreateEnvelope("evt-001", buildId: "ABCDEF1234567890ABCDEF1234567890ABCDEF12"), + CreateEnvelope("evt-002", buildId: "abcdef1234567890abcdef1234567890abcdef12") + } + }; + + var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request); + Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); + + var payload = await response.Content.ReadFromJsonAsync<RuntimeEventsIngestResponseDto>(); + Assert.NotNull(payload); + Assert.Equal(2, payload!.Accepted); + Assert.Equal(0, payload.Duplicates); + + using var scope = factory.Services.CreateScope(); + var collections = scope.ServiceProvider.GetRequiredService<MongoCollectionProvider>(); + var stored = await collections.RuntimeEvents.Find(FilterDefinition<RuntimeEventDocument>.Empty).ToListAsync(); + Assert.Equal(2, stored.Count); + Assert.Contains(stored, doc => doc.EventId == "evt-001"); + Assert.All(stored, doc => + { + Assert.Equal("tenant-alpha", doc.Tenant); + Assert.True(doc.ExpiresAt > doc.ReceivedAt); + Assert.Equal("sha256:deadbeef", doc.ImageDigest); + Assert.Equal("abcdef1234567890abcdef1234567890abcdef12", doc.BuildId); + }); + } + + [Fact] + public async Task RuntimeEventsEndpointRejectsUnsupportedSchema() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + var envelope = CreateEnvelope("evt-100", schemaVersion: "zastava.runtime.event@v2.0"); + + var request = new RuntimeEventsIngestRequestDto + { + Events = new[] { envelope } + }; + + var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + [Fact] + public async Task RuntimeEventsEndpointEnforcesRateLimit() + { + using var factory = new ScannerApplicationFactory(configuration => + { + configuration["scanner:runtime:perNodeBurst"] = "1"; + configuration["scanner:runtime:perNodeEventsPerSecond"] = "1"; + configuration["scanner:runtime:perTenantBurst"] = "1"; + configuration["scanner:runtime:perTenantEventsPerSecond"] = "1"; + }); + using var client = factory.CreateClient(); + + var request = new RuntimeEventsIngestRequestDto + { + Events = new[] + { + CreateEnvelope("evt-500"), + CreateEnvelope("evt-501") + } + }; + + var response = await client.PostAsJsonAsync("/api/v1/runtime/events", request); + Assert.Equal((HttpStatusCode)StatusCodes.Status429TooManyRequests, response.StatusCode); + Assert.NotNull(response.Headers.RetryAfter); + + using var scope = factory.Services.CreateScope(); + var collections = scope.ServiceProvider.GetRequiredService<MongoCollectionProvider>(); + var count = await collections.RuntimeEvents.CountDocumentsAsync(FilterDefinition<RuntimeEventDocument>.Empty); + Assert.Equal(0, count); + } + + [Fact] + public async Task RuntimePolicyEndpointReturnsDecisions() + { + using var factory = new ScannerApplicationFactory(configuration => + { + configuration["scanner:runtime:policyCacheTtlSeconds"] = "600"; + }); + + const string imageDigest = "sha256:deadbeef"; + + using var client = factory.CreateClient(); + + using (var scope = factory.Services.CreateScope()) + { + var collections = scope.ServiceProvider.GetRequiredService<MongoCollectionProvider>(); + var policyStore = scope.ServiceProvider.GetRequiredService<PolicySnapshotStore>(); + + const string policyYaml = """ +version: "1.0" +rules: + - name: Block Critical + severity: [Critical] + action: block +"""; + var saveResult = await policyStore.SaveAsync( + new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "tests", "seed"), + CancellationToken.None); + Assert.True(saveResult.Success); + + var snapshot = await policyStore.GetLatestAsync(CancellationToken.None); + Assert.NotNull(snapshot); + + var sbomArtifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.ImageBom, "sha256:sbomdigest"); + var attestationArtifactId = CatalogIdFactory.CreateArtifactId(ArtifactDocumentType.Attestation, "sha256:attdigest"); + + await collections.Artifacts.InsertManyAsync(new[] + { + new ArtifactDocument + { + Id = sbomArtifactId, + Type = ArtifactDocumentType.ImageBom, + Format = ArtifactDocumentFormat.CycloneDxJson, + MediaType = "application/json", + BytesSha256 = "sha256:sbomdigest", + RefCount = 1, + CreatedAtUtc = DateTime.UtcNow, + UpdatedAtUtc = DateTime.UtcNow + }, + new ArtifactDocument + { + Id = attestationArtifactId, + Type = ArtifactDocumentType.Attestation, + Format = ArtifactDocumentFormat.DsseJson, + MediaType = "application/vnd.dsse.envelope+json", + BytesSha256 = "sha256:attdigest", + RefCount = 1, + CreatedAtUtc = DateTime.UtcNow, + UpdatedAtUtc = DateTime.UtcNow, + Rekor = new RekorReference { Uuid = "rekor-uuid", Url = "https://rekor.example/uuid/rekor-uuid", Index = 7 } + } + }); + + await collections.Links.InsertManyAsync(new[] + { + new LinkDocument + { + Id = Guid.NewGuid().ToString("N"), + FromType = LinkSourceType.Image, + FromDigest = imageDigest, + ArtifactId = sbomArtifactId, + CreatedAtUtc = DateTime.UtcNow + }, + new LinkDocument + { + Id = Guid.NewGuid().ToString("N"), + FromType = LinkSourceType.Image, + FromDigest = imageDigest, + ArtifactId = attestationArtifactId, + CreatedAtUtc = DateTime.UtcNow + } + }); + } + + var ingestRequest = new RuntimeEventsIngestRequestDto + { + Events = new[] + { + CreateEnvelope("evt-210", imageDigest: imageDigest, buildId: "1122aabbccddeeff00112233445566778899aabb"), + CreateEnvelope("evt-211", imageDigest: imageDigest, buildId: "1122AABBCCDDEEFF00112233445566778899AABB") + } + }; + var ingestResponse = await client.PostAsJsonAsync("/api/v1/runtime/events", ingestRequest); + Assert.Equal(HttpStatusCode.Accepted, ingestResponse.StatusCode); + + var request = new RuntimePolicyRequestDto + { + Namespace = "payments", + Images = new[] { imageDigest, imageDigest }, + Labels = new Dictionary<string, string> { ["app"] = "api" } + }; + + var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", request); + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + + var raw = await response.Content.ReadAsStringAsync(); + Assert.False(string.IsNullOrWhiteSpace(raw), "Runtime policy response body was empty."); + var payload = JsonSerializer.Deserialize<RuntimePolicyResponseDto>(raw); + Assert.True(payload is not null, $"Runtime policy response: {raw}"); + Assert.Equal(600, payload!.TtlSeconds); + Assert.NotNull(payload.PolicyRevision); + Assert.True(payload.ExpiresAtUtc > DateTimeOffset.UtcNow); + + var decision = payload.Results[imageDigest]; + Assert.Equal("pass", decision.PolicyVerdict); + Assert.True(decision.Signed); + Assert.True(decision.HasSbomReferrers); + Assert.True(decision.HasSbomLegacy); + Assert.Empty(decision.Reasons); + Assert.NotNull(decision.Rekor); + Assert.Equal("rekor-uuid", decision.Rekor!.Uuid); + Assert.True(decision.Rekor.Verified); + Assert.NotNull(decision.Confidence); + Assert.InRange(decision.Confidence!.Value, 0.0, 1.0); + Assert.False(decision.Quieted.GetValueOrDefault()); + Assert.Null(decision.QuietedBy); + Assert.NotNull(decision.BuildIds); + Assert.Contains("1122aabbccddeeff00112233445566778899aabb", decision.BuildIds!); + var metadataString = decision.Metadata; + Console.WriteLine($"Runtime policy metadata: {metadataString ?? "<null>"}"); + Assert.False(string.IsNullOrWhiteSpace(metadataString)); + using var metadataDocument = JsonDocument.Parse(decision.Metadata!); + Assert.True(metadataDocument.RootElement.TryGetProperty("heuristics", out _)); + } + + [Fact] + public async Task RuntimePolicyEndpointFlagsUnsignedAndMissingSbom() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + const string imageDigest = "sha256:feedface"; + + using (var scope = factory.Services.CreateScope()) + { + var collections = scope.ServiceProvider.GetRequiredService<MongoCollectionProvider>(); + var policyStore = scope.ServiceProvider.GetRequiredService<PolicySnapshotStore>(); + + const string policyYaml = """ +version: "1.0" +rules: [] +"""; + await policyStore.SaveAsync( + new PolicySnapshotContent(policyYaml, PolicyDocumentFormat.Yaml, "tester", "tests", "baseline"), + CancellationToken.None); + + // Intentionally skip artifacts/links to simulate missing metadata. + await collections.RuntimeEvents.DeleteManyAsync(Builders<RuntimeEventDocument>.Filter.Empty); + } + + var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", new RuntimePolicyRequestDto + { + Namespace = "payments", + Images = new[] { imageDigest } + }); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var payload = await response.Content.ReadFromJsonAsync<RuntimePolicyResponseDto>(); + Assert.NotNull(payload); + var decision = payload!.Results[imageDigest]; + + Assert.Equal("fail", decision.PolicyVerdict); + Assert.False(decision.Signed); + Assert.False(decision.HasSbomReferrers); + Assert.Contains("image.metadata.missing", decision.Reasons); + Assert.Contains("unsigned", decision.Reasons); + Assert.Contains("missing SBOM", decision.Reasons); + Assert.NotNull(decision.Confidence); + Assert.InRange(decision.Confidence!.Value, 0.0, 1.0); + if (!string.IsNullOrWhiteSpace(decision.Metadata)) + { + using var failureMetadata = JsonDocument.Parse(decision.Metadata!); + if (failureMetadata.RootElement.TryGetProperty("heuristics", out var heuristicsElement)) + { + var heuristics = heuristicsElement.EnumerateArray().Select(item => item.GetString()).ToArray(); + Assert.Contains("image.metadata.missing", heuristics); + Assert.Contains("unsigned", heuristics); + } + } + } + + [Fact] + public async Task RuntimePolicyEndpointValidatesRequest() + { + using var factory = new ScannerApplicationFactory(); + using var client = factory.CreateClient(); + + var request = new RuntimePolicyRequestDto + { + Images = Array.Empty<string>() + }; + + var response = await client.PostAsJsonAsync("/api/v1/policy/runtime", request); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + private static RuntimeEventEnvelope CreateEnvelope( + string eventId, + string? schemaVersion = null, + string? imageDigest = null, + string? buildId = null) + { + var digest = string.IsNullOrWhiteSpace(imageDigest) ? "sha256:deadbeef" : imageDigest; + var runtimeEvent = new RuntimeEvent + { + EventId = eventId, + When = DateTimeOffset.UtcNow, + Kind = RuntimeEventKind.ContainerStart, + Tenant = "tenant-alpha", + Node = "node-a", + Runtime = new RuntimeEngine + { + Engine = "containerd", + Version = "1.7.0" + }, + Workload = new RuntimeWorkload + { + Platform = "kubernetes", + Namespace = "default", + Pod = "api-123", + Container = "api", + ContainerId = "containerd://abc", + ImageRef = $"ghcr.io/example/api@{digest}" + }, + Delta = new RuntimeDelta + { + BaselineImageDigest = digest + }, + Process = new RuntimeProcess + { + Pid = 123, + Entrypoint = new[] { "/bin/start" }, + EntryTrace = Array.Empty<RuntimeEntryTrace>(), + BuildId = buildId + } + }; + + if (schemaVersion is null) + { + return RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent); + } + + return new RuntimeEventEnvelope + { + SchemaVersion = schemaVersion, + Event = runtimeEvent + }; + } +} diff --git a/src/StellaOps.Scanner.WebService.Tests/ScannerApplicationFactory.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ScannerApplicationFactory.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/ScannerApplicationFactory.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ScannerApplicationFactory.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs similarity index 100% rename from src/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/ScansEndpointsTests.cs diff --git a/src/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj similarity index 81% rename from src/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj index e60f64d2..a3736047 100644 --- a/src/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.WebService.Tests/StellaOps.Scanner.WebService.Tests.csproj @@ -1,13 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <IsPackable>false</IsPackable> - <RootNamespace>StellaOps.Scanner.WebService.Tests</RootNamespace> - </PropertyGroup> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <IsPackable>false</IsPackable> + <RootNamespace>StellaOps.Scanner.WebService.Tests</RootNamespace> + </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.WebService\StellaOps.Scanner.WebService.csproj" /> + <ProjectReference Include="../../StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj" /> </ItemGroup> <ItemGroup> <None Include="..\..\docs\events\samples\scanner.event.report.ready@1.sample.json"> @@ -17,4 +18,4 @@ <CopyToOutputDirectory>Always</CopyToOutputDirectory> </None> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Worker.Tests/CompositeScanAnalyzerDispatcherTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/CompositeScanAnalyzerDispatcherTests.cs similarity index 97% rename from src/StellaOps.Scanner.Worker.Tests/CompositeScanAnalyzerDispatcherTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/CompositeScanAnalyzerDispatcherTests.cs index 03221a9e..7ae6a775 100644 --- a/src/StellaOps.Scanner.Worker.Tests/CompositeScanAnalyzerDispatcherTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/CompositeScanAnalyzerDispatcherTests.cs @@ -1,173 +1,173 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Collections.ObjectModel; -using System.IO; -using System.Linq; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.Analyzers.Lang; -using StellaOps.Scanner.Analyzers.Lang.Plugin; -using StellaOps.Scanner.Analyzers.OS.Abstractions; -using StellaOps.Scanner.Analyzers.OS.Plugin; -using StellaOps.Scanner.Core.Contracts; -using StellaOps.Scanner.Worker.Processing; -using Xunit; -using WorkerOptions = StellaOps.Scanner.Worker.Options.ScannerWorkerOptions; - -namespace StellaOps.Scanner.Worker.Tests; - -public sealed class CompositeScanAnalyzerDispatcherTests -{ - [Fact] - public async Task ExecuteAsync_RunsLanguageAnalyzers_StoresResults() - { - using var workspace = new TempDirectory(); - var metadata = new Dictionary<string, string>(StringComparer.Ordinal) - { - { ScanMetadataKeys.RootFilesystemPath, workspace.Path }, - { ScanMetadataKeys.WorkspacePath, workspace.Path }, - }; - - var osCatalog = new FakeOsCatalog(); - var languageCatalog = new FakeLanguageCatalog(new FakeLanguageAnalyzer()); - - var services = new ServiceCollection() - .AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug)) - .BuildServiceProvider(); - - var scopeFactory = services.GetRequiredService<IServiceScopeFactory>(); - var loggerFactory = services.GetRequiredService<ILoggerFactory>(); - var options = Microsoft.Extensions.Options.Options.Create(new WorkerOptions()); - var dispatcher = new CompositeScanAnalyzerDispatcher( - scopeFactory, - osCatalog, - languageCatalog, - options, - loggerFactory.CreateLogger<CompositeScanAnalyzerDispatcher>()); - - var lease = new TestJobLease(metadata); - var context = new ScanJobContext(lease, TimeProvider.System, TimeProvider.System.GetUtcNow(), CancellationToken.None); - - await dispatcher.ExecuteAsync(context, CancellationToken.None); - - Assert.True(context.Analysis.TryGet<ReadOnlyDictionary<string, LanguageAnalyzerResult>>(ScanAnalysisKeys.LanguageAnalyzerResults, out var results)); - Assert.Single(results); - Assert.True(context.Analysis.TryGet<ImmutableArray<LayerComponentFragment>>(ScanAnalysisKeys.LanguageComponentFragments, out var fragments)); - Assert.False(fragments.IsDefaultOrEmpty); - Assert.True(context.Analysis.GetLayerFragments().Any(fragment => fragment.Components.Any(component => component.Identity.Name == "demo-package"))); - } - - private sealed class FakeOsCatalog : IOSAnalyzerPluginCatalog - { - public IReadOnlyCollection<IOSAnalyzerPlugin> Plugins => Array.Empty<IOSAnalyzerPlugin>(); - - public void LoadFromDirectory(string directory, bool seal = true) - { - } - - public IReadOnlyList<IOSPackageAnalyzer> CreateAnalyzers(IServiceProvider services) => Array.Empty<IOSPackageAnalyzer>(); - } - - private sealed class FakeLanguageCatalog : ILanguageAnalyzerPluginCatalog - { - private readonly IReadOnlyList<ILanguageAnalyzer> _analyzers; - - public FakeLanguageCatalog(params ILanguageAnalyzer[] analyzers) - { - _analyzers = analyzers ?? Array.Empty<ILanguageAnalyzer>(); - } - - public IReadOnlyCollection<ILanguageAnalyzerPlugin> Plugins => Array.Empty<ILanguageAnalyzerPlugin>(); - - public void LoadFromDirectory(string directory, bool seal = true) - { - } - - public IReadOnlyList<ILanguageAnalyzer> CreateAnalyzers(IServiceProvider services) => _analyzers; - } - - private sealed class FakeLanguageAnalyzer : ILanguageAnalyzer - { - public string Id => "lang.fake"; - - public string DisplayName => "Fake Language Analyzer"; - - public ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) - { - writer.AddFromPurl( - analyzerId: Id, - purl: "pkg:npm/demo-package@1.0.0", - name: "demo-package", - version: "1.0.0", - type: "npm"); - return ValueTask.CompletedTask; - } - } - - private sealed class TestJobLease : IScanJobLease - { - private readonly Dictionary<string, string> _metadata; - - public TestJobLease(Dictionary<string, string> metadata) - { - _metadata = metadata; - JobId = Guid.NewGuid().ToString("n"); - ScanId = $"scan-{Guid.NewGuid():n}"; - Attempt = 1; - EnqueuedAtUtc = DateTimeOffset.UtcNow.AddSeconds(-1); - LeasedAtUtc = DateTimeOffset.UtcNow; - LeaseDuration = TimeSpan.FromMinutes(5); - } - - public string JobId { get; } - - public string ScanId { get; } - - public int Attempt { get; } - - public DateTimeOffset EnqueuedAtUtc { get; } - - public DateTimeOffset LeasedAtUtc { get; } - - public TimeSpan LeaseDuration { get; } - - public IReadOnlyDictionary<string, string> Metadata => _metadata; - - public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask DisposeAsync() => ValueTask.CompletedTask; - } - - private sealed class TempDirectory : IDisposable - { - public TempDirectory() - { - Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"stellaops-test-{Guid.NewGuid():n}"); - Directory.CreateDirectory(Path); - } - - public string Path { get; } - - public void Dispose() - { - try - { - if (Directory.Exists(Path)) - { - Directory.Delete(Path, recursive: true); - } - } - catch - { - } - } - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Collections.ObjectModel; +using System.IO; +using System.Linq; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Analyzers.Lang; +using StellaOps.Scanner.Analyzers.Lang.Plugin; +using StellaOps.Scanner.Analyzers.OS.Abstractions; +using StellaOps.Scanner.Analyzers.OS.Plugin; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Worker.Processing; +using Xunit; +using WorkerOptions = StellaOps.Scanner.Worker.Options.ScannerWorkerOptions; + +namespace StellaOps.Scanner.Worker.Tests; + +public sealed class CompositeScanAnalyzerDispatcherTests +{ + [Fact] + public async Task ExecuteAsync_RunsLanguageAnalyzers_StoresResults() + { + using var workspace = new TempDirectory(); + var metadata = new Dictionary<string, string>(StringComparer.Ordinal) + { + { ScanMetadataKeys.RootFilesystemPath, workspace.Path }, + { ScanMetadataKeys.WorkspacePath, workspace.Path }, + }; + + var osCatalog = new FakeOsCatalog(); + var languageCatalog = new FakeLanguageCatalog(new FakeLanguageAnalyzer()); + + var services = new ServiceCollection() + .AddLogging(builder => builder.SetMinimumLevel(LogLevel.Debug)) + .BuildServiceProvider(); + + var scopeFactory = services.GetRequiredService<IServiceScopeFactory>(); + var loggerFactory = services.GetRequiredService<ILoggerFactory>(); + var options = Microsoft.Extensions.Options.Options.Create(new WorkerOptions()); + var dispatcher = new CompositeScanAnalyzerDispatcher( + scopeFactory, + osCatalog, + languageCatalog, + options, + loggerFactory.CreateLogger<CompositeScanAnalyzerDispatcher>()); + + var lease = new TestJobLease(metadata); + var context = new ScanJobContext(lease, TimeProvider.System, TimeProvider.System.GetUtcNow(), CancellationToken.None); + + await dispatcher.ExecuteAsync(context, CancellationToken.None); + + Assert.True(context.Analysis.TryGet<ReadOnlyDictionary<string, LanguageAnalyzerResult>>(ScanAnalysisKeys.LanguageAnalyzerResults, out var results)); + Assert.Single(results); + Assert.True(context.Analysis.TryGet<ImmutableArray<LayerComponentFragment>>(ScanAnalysisKeys.LanguageComponentFragments, out var fragments)); + Assert.False(fragments.IsDefaultOrEmpty); + Assert.True(context.Analysis.GetLayerFragments().Any(fragment => fragment.Components.Any(component => component.Identity.Name == "demo-package"))); + } + + private sealed class FakeOsCatalog : IOSAnalyzerPluginCatalog + { + public IReadOnlyCollection<IOSAnalyzerPlugin> Plugins => Array.Empty<IOSAnalyzerPlugin>(); + + public void LoadFromDirectory(string directory, bool seal = true) + { + } + + public IReadOnlyList<IOSPackageAnalyzer> CreateAnalyzers(IServiceProvider services) => Array.Empty<IOSPackageAnalyzer>(); + } + + private sealed class FakeLanguageCatalog : ILanguageAnalyzerPluginCatalog + { + private readonly IReadOnlyList<ILanguageAnalyzer> _analyzers; + + public FakeLanguageCatalog(params ILanguageAnalyzer[] analyzers) + { + _analyzers = analyzers ?? Array.Empty<ILanguageAnalyzer>(); + } + + public IReadOnlyCollection<ILanguageAnalyzerPlugin> Plugins => Array.Empty<ILanguageAnalyzerPlugin>(); + + public void LoadFromDirectory(string directory, bool seal = true) + { + } + + public IReadOnlyList<ILanguageAnalyzer> CreateAnalyzers(IServiceProvider services) => _analyzers; + } + + private sealed class FakeLanguageAnalyzer : ILanguageAnalyzer + { + public string Id => "lang.fake"; + + public string DisplayName => "Fake Language Analyzer"; + + public ValueTask AnalyzeAsync(LanguageAnalyzerContext context, LanguageComponentWriter writer, CancellationToken cancellationToken) + { + writer.AddFromPurl( + analyzerId: Id, + purl: "pkg:npm/demo-package@1.0.0", + name: "demo-package", + version: "1.0.0", + type: "npm"); + return ValueTask.CompletedTask; + } + } + + private sealed class TestJobLease : IScanJobLease + { + private readonly Dictionary<string, string> _metadata; + + public TestJobLease(Dictionary<string, string> metadata) + { + _metadata = metadata; + JobId = Guid.NewGuid().ToString("n"); + ScanId = $"scan-{Guid.NewGuid():n}"; + Attempt = 1; + EnqueuedAtUtc = DateTimeOffset.UtcNow.AddSeconds(-1); + LeasedAtUtc = DateTimeOffset.UtcNow; + LeaseDuration = TimeSpan.FromMinutes(5); + } + + public string JobId { get; } + + public string ScanId { get; } + + public int Attempt { get; } + + public DateTimeOffset EnqueuedAtUtc { get; } + + public DateTimeOffset LeasedAtUtc { get; } + + public TimeSpan LeaseDuration { get; } + + public IReadOnlyDictionary<string, string> Metadata => _metadata; + + public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask DisposeAsync() => ValueTask.CompletedTask; + } + + private sealed class TempDirectory : IDisposable + { + public TempDirectory() + { + Path = System.IO.Path.Combine(System.IO.Path.GetTempPath(), $"stellaops-test-{Guid.NewGuid():n}"); + Directory.CreateDirectory(Path); + } + + public string Path { get; } + + public void Dispose() + { + try + { + if (Directory.Exists(Path)) + { + Directory.Delete(Path, recursive: true); + } + } + catch + { + } + } + } +} diff --git a/src/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs similarity index 97% rename from src/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs index 1c35f164..841311a6 100644 --- a/src/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs +++ b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/EntryTraceExecutionServiceTests.cs @@ -1,179 +1,179 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scanner.Core.Contracts; -using StellaOps.Scanner.EntryTrace; -using StellaOps.Scanner.Worker.Options; -using StellaOps.Scanner.Worker.Processing; -using Xunit; - -namespace StellaOps.Scanner.Worker.Tests; - -public sealed class EntryTraceExecutionServiceTests : IDisposable -{ - private readonly string _tempRoot; - - public EntryTraceExecutionServiceTests() - { - _tempRoot = Path.Combine(Path.GetTempPath(), $"entrytrace-service-{Guid.NewGuid():n}"); - Directory.CreateDirectory(_tempRoot); - } - - [Fact] - public async Task ExecuteAsync_Skips_When_ConfigMetadataMissing() - { - var analyzer = new CapturingEntryTraceAnalyzer(); - var service = CreateService(analyzer); - - var context = CreateContext(new Dictionary<string, string>()); - - await service.ExecuteAsync(context, CancellationToken.None); - - Assert.False(analyzer.Invoked); - Assert.False(context.Analysis.TryGet<EntryTraceGraph>(ScanAnalysisKeys.EntryTraceGraph, out _)); - } - - [Fact] - public async Task ExecuteAsync_BuildsContext_AndStoresGraph() - { - var configPath = Path.Combine(_tempRoot, "config.json"); - File.WriteAllText(configPath, """ - { - "config": { - "Env": ["PATH=/bin:/usr/bin"], - "Entrypoint": ["/entrypoint.sh"], - "WorkingDir": "/workspace", - "User": "scanner" - } - } - """); - - var layerDirectory = Path.Combine(_tempRoot, "layer-1"); - Directory.CreateDirectory(layerDirectory); - File.WriteAllText(Path.Combine(layerDirectory, "entrypoint.sh"), "#!/bin/sh\necho hello\n"); - - var metadata = new Dictionary<string, string> - { - [ScanMetadataKeys.ImageConfigPath] = configPath, - [ScanMetadataKeys.LayerDirectories] = layerDirectory, - ["image.digest"] = "sha256:test-digest" - }; - - var analyzer = new CapturingEntryTraceAnalyzer(); - var service = CreateService(analyzer); - - var context = CreateContext(metadata); - - await service.ExecuteAsync(context, CancellationToken.None); - - Assert.True(analyzer.Invoked); - Assert.NotNull(analyzer.LastEntrypoint); - Assert.Equal("/entrypoint.sh", analyzer.LastEntrypoint!.Entrypoint[0]); - Assert.NotNull(analyzer.LastContext); - Assert.Equal("scanner", analyzer.LastContext!.User); - Assert.Equal("/workspace", analyzer.LastContext.WorkingDirectory); - Assert.Contains("/bin", analyzer.LastContext.Path); - - Assert.True(context.Analysis.TryGet(ScanAnalysisKeys.EntryTraceGraph, out EntryTraceGraph stored)); - Assert.Same(analyzer.Graph, stored); - } - - private EntryTraceExecutionService CreateService(IEntryTraceAnalyzer analyzer) - { - var workerOptions = new ScannerWorkerOptions(); - var entryTraceOptions = new EntryTraceAnalyzerOptions(); - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Trace)); - return new EntryTraceExecutionService( - analyzer, - Options.Create(entryTraceOptions), - Options.Create(workerOptions), - loggerFactory.CreateLogger<EntryTraceExecutionService>(), - loggerFactory); - } - - private static ScanJobContext CreateContext(IReadOnlyDictionary<string, string> metadata) - { - var lease = new TestLease(metadata); - return new ScanJobContext(lease, TimeProvider.System, DateTimeOffset.UtcNow, CancellationToken.None); - } - - public void Dispose() - { - try - { - if (Directory.Exists(_tempRoot)) - { - Directory.Delete(_tempRoot, recursive: true); - } - } - catch - { - // ignore cleanup failures - } - } - - private sealed class CapturingEntryTraceAnalyzer : IEntryTraceAnalyzer - { - public bool Invoked { get; private set; } - - public EntrypointSpecification? LastEntrypoint { get; private set; } - - public EntryTraceContext? LastContext { get; private set; } - - public EntryTraceGraph Graph { get; } = new( - EntryTraceOutcome.Resolved, - ImmutableArray<EntryTraceNode>.Empty, - ImmutableArray<EntryTraceEdge>.Empty, - ImmutableArray<EntryTraceDiagnostic>.Empty); - - public ValueTask<EntryTraceGraph> ResolveAsync(EntrypointSpecification entrypoint, EntryTraceContext context, CancellationToken cancellationToken = default) - { - Invoked = true; - LastEntrypoint = entrypoint; - LastContext = context; - return ValueTask.FromResult(Graph); - } - } - - private sealed class TestLease : IScanJobLease - { - private readonly IReadOnlyDictionary<string, string> _metadata; - - public TestLease(IReadOnlyDictionary<string, string> metadata) - { - _metadata = metadata; - EnqueuedAtUtc = DateTimeOffset.UtcNow; - LeasedAtUtc = EnqueuedAtUtc; - } - - public string JobId { get; } = $"job-{Guid.NewGuid():n}"; - - public string ScanId { get; } = $"scan-{Guid.NewGuid():n}"; - - public int Attempt => 1; - - public DateTimeOffset EnqueuedAtUtc { get; } - - public DateTimeOffset LeasedAtUtc { get; } - - public TimeSpan LeaseDuration => TimeSpan.FromMinutes(5); - - public IReadOnlyDictionary<string, string> Metadata => _metadata; - - public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; - - public ValueTask DisposeAsync() => ValueTask.CompletedTask; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.EntryTrace; +using StellaOps.Scanner.Worker.Options; +using StellaOps.Scanner.Worker.Processing; +using Xunit; + +namespace StellaOps.Scanner.Worker.Tests; + +public sealed class EntryTraceExecutionServiceTests : IDisposable +{ + private readonly string _tempRoot; + + public EntryTraceExecutionServiceTests() + { + _tempRoot = Path.Combine(Path.GetTempPath(), $"entrytrace-service-{Guid.NewGuid():n}"); + Directory.CreateDirectory(_tempRoot); + } + + [Fact] + public async Task ExecuteAsync_Skips_When_ConfigMetadataMissing() + { + var analyzer = new CapturingEntryTraceAnalyzer(); + var service = CreateService(analyzer); + + var context = CreateContext(new Dictionary<string, string>()); + + await service.ExecuteAsync(context, CancellationToken.None); + + Assert.False(analyzer.Invoked); + Assert.False(context.Analysis.TryGet<EntryTraceGraph>(ScanAnalysisKeys.EntryTraceGraph, out _)); + } + + [Fact] + public async Task ExecuteAsync_BuildsContext_AndStoresGraph() + { + var configPath = Path.Combine(_tempRoot, "config.json"); + File.WriteAllText(configPath, """ + { + "config": { + "Env": ["PATH=/bin:/usr/bin"], + "Entrypoint": ["/entrypoint.sh"], + "WorkingDir": "/workspace", + "User": "scanner" + } + } + """); + + var layerDirectory = Path.Combine(_tempRoot, "layer-1"); + Directory.CreateDirectory(layerDirectory); + File.WriteAllText(Path.Combine(layerDirectory, "entrypoint.sh"), "#!/bin/sh\necho hello\n"); + + var metadata = new Dictionary<string, string> + { + [ScanMetadataKeys.ImageConfigPath] = configPath, + [ScanMetadataKeys.LayerDirectories] = layerDirectory, + ["image.digest"] = "sha256:test-digest" + }; + + var analyzer = new CapturingEntryTraceAnalyzer(); + var service = CreateService(analyzer); + + var context = CreateContext(metadata); + + await service.ExecuteAsync(context, CancellationToken.None); + + Assert.True(analyzer.Invoked); + Assert.NotNull(analyzer.LastEntrypoint); + Assert.Equal("/entrypoint.sh", analyzer.LastEntrypoint!.Entrypoint[0]); + Assert.NotNull(analyzer.LastContext); + Assert.Equal("scanner", analyzer.LastContext!.User); + Assert.Equal("/workspace", analyzer.LastContext.WorkingDirectory); + Assert.Contains("/bin", analyzer.LastContext.Path); + + Assert.True(context.Analysis.TryGet(ScanAnalysisKeys.EntryTraceGraph, out EntryTraceGraph stored)); + Assert.Same(analyzer.Graph, stored); + } + + private EntryTraceExecutionService CreateService(IEntryTraceAnalyzer analyzer) + { + var workerOptions = new ScannerWorkerOptions(); + var entryTraceOptions = new EntryTraceAnalyzerOptions(); + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Trace)); + return new EntryTraceExecutionService( + analyzer, + Options.Create(entryTraceOptions), + Options.Create(workerOptions), + loggerFactory.CreateLogger<EntryTraceExecutionService>(), + loggerFactory); + } + + private static ScanJobContext CreateContext(IReadOnlyDictionary<string, string> metadata) + { + var lease = new TestLease(metadata); + return new ScanJobContext(lease, TimeProvider.System, DateTimeOffset.UtcNow, CancellationToken.None); + } + + public void Dispose() + { + try + { + if (Directory.Exists(_tempRoot)) + { + Directory.Delete(_tempRoot, recursive: true); + } + } + catch + { + // ignore cleanup failures + } + } + + private sealed class CapturingEntryTraceAnalyzer : IEntryTraceAnalyzer + { + public bool Invoked { get; private set; } + + public EntrypointSpecification? LastEntrypoint { get; private set; } + + public EntryTraceContext? LastContext { get; private set; } + + public EntryTraceGraph Graph { get; } = new( + EntryTraceOutcome.Resolved, + ImmutableArray<EntryTraceNode>.Empty, + ImmutableArray<EntryTraceEdge>.Empty, + ImmutableArray<EntryTraceDiagnostic>.Empty); + + public ValueTask<EntryTraceGraph> ResolveAsync(EntrypointSpecification entrypoint, EntryTraceContext context, CancellationToken cancellationToken = default) + { + Invoked = true; + LastEntrypoint = entrypoint; + LastContext = context; + return ValueTask.FromResult(Graph); + } + } + + private sealed class TestLease : IScanJobLease + { + private readonly IReadOnlyDictionary<string, string> _metadata; + + public TestLease(IReadOnlyDictionary<string, string> metadata) + { + _metadata = metadata; + EnqueuedAtUtc = DateTimeOffset.UtcNow; + LeasedAtUtc = EnqueuedAtUtc; + } + + public string JobId { get; } = $"job-{Guid.NewGuid():n}"; + + public string ScanId { get; } = $"scan-{Guid.NewGuid():n}"; + + public int Attempt => 1; + + public DateTimeOffset EnqueuedAtUtc { get; } + + public DateTimeOffset LeasedAtUtc { get; } + + public TimeSpan LeaseDuration => TimeSpan.FromMinutes(5); + + public IReadOnlyDictionary<string, string> Metadata => _metadata; + + public ValueTask RenewAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask CompleteAsync(CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask AbandonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask PoisonAsync(string reason, CancellationToken cancellationToken) => ValueTask.CompletedTask; + + public ValueTask DisposeAsync() => ValueTask.CompletedTask; + } +} diff --git a/src/StellaOps.Scanner.Worker.Tests/LeaseHeartbeatServiceTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/LeaseHeartbeatServiceTests.cs similarity index 100% rename from src/StellaOps.Scanner.Worker.Tests/LeaseHeartbeatServiceTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/LeaseHeartbeatServiceTests.cs diff --git a/src/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs similarity index 100% rename from src/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/RedisWorkerSmokeTests.cs diff --git a/src/StellaOps.Scanner.Worker.Tests/ScannerWorkerOptionsValidatorTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/ScannerWorkerOptionsValidatorTests.cs similarity index 100% rename from src/StellaOps.Scanner.Worker.Tests/ScannerWorkerOptionsValidatorTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/ScannerWorkerOptionsValidatorTests.cs diff --git a/src/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj similarity index 53% rename from src/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj rename to src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj index 265caf0c..e7fe053b 100644 --- a/src/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj +++ b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/StellaOps.Scanner.Worker.Tests.csproj @@ -1,13 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <IsPackable>false</IsPackable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Scanner.Worker\StellaOps.Scanner.Worker.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Queue\StellaOps.Scanner.Queue.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <IsPackable>false</IsPackable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../StellaOps.Scanner.Worker/StellaOps.Scanner.Worker.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scanner.Queue/StellaOps.Scanner.Queue.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scanner.Worker.Tests/TestInfrastructure/StaticOptionsMonitor.cs b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/TestInfrastructure/StaticOptionsMonitor.cs similarity index 100% rename from src/StellaOps.Scanner.Worker.Tests/TestInfrastructure/StaticOptionsMonitor.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/TestInfrastructure/StaticOptionsMonitor.cs diff --git a/src/StellaOps.Scanner.Worker.Tests/WorkerBasicScanScenarioTests.cs b/src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/WorkerBasicScanScenarioTests.cs similarity index 100% rename from src/StellaOps.Scanner.Worker.Tests/WorkerBasicScanScenarioTests.cs rename to src/Scanner/__Tests/StellaOps.Scanner.Worker.Tests/WorkerBasicScanScenarioTests.cs diff --git a/src/StellaOps.Scheduler.WebService/AGENTS.md b/src/Scheduler/StellaOps.Scheduler.WebService/AGENTS.md similarity index 100% rename from src/StellaOps.Scheduler.WebService/AGENTS.md rename to src/Scheduler/StellaOps.Scheduler.WebService/AGENTS.md diff --git a/src/StellaOps.Scheduler.WebService/Auth/AnonymousAuthenticationHandler.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/AnonymousAuthenticationHandler.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Auth/AnonymousAuthenticationHandler.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Auth/AnonymousAuthenticationHandler.cs index 3209ffc8..0af49ba0 100644 --- a/src/StellaOps.Scheduler.WebService/Auth/AnonymousAuthenticationHandler.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/AnonymousAuthenticationHandler.cs @@ -1,26 +1,26 @@ -using System.Security.Claims; -using System.Text.Encodings.Web; -using Microsoft.AspNetCore.Authentication; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; - -namespace StellaOps.Scheduler.WebService.Auth; - -internal sealed class AnonymousAuthenticationHandler : AuthenticationHandler<AuthenticationSchemeOptions> -{ - public AnonymousAuthenticationHandler( - IOptionsMonitor<AuthenticationSchemeOptions> options, - ILoggerFactory logger, - UrlEncoder encoder) - : base(options, logger, encoder) - { - } - - protected override Task<AuthenticateResult> HandleAuthenticateAsync() - { - var identity = new ClaimsIdentity(Scheme.Name); - var principal = new ClaimsPrincipal(identity); - var ticket = new AuthenticationTicket(principal, Scheme.Name); - return Task.FromResult(AuthenticateResult.Success(ticket)); - } -} +using System.Security.Claims; +using System.Text.Encodings.Web; +using Microsoft.AspNetCore.Authentication; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.Scheduler.WebService.Auth; + +internal sealed class AnonymousAuthenticationHandler : AuthenticationHandler<AuthenticationSchemeOptions> +{ + public AnonymousAuthenticationHandler( + IOptionsMonitor<AuthenticationSchemeOptions> options, + ILoggerFactory logger, + UrlEncoder encoder) + : base(options, logger, encoder) + { + } + + protected override Task<AuthenticateResult> HandleAuthenticateAsync() + { + var identity = new ClaimsIdentity(Scheme.Name); + var principal = new ClaimsPrincipal(identity); + var ticket = new AuthenticationTicket(principal, Scheme.Name); + return Task.FromResult(AuthenticateResult.Success(ticket)); + } +} diff --git a/src/StellaOps.Scheduler.WebService/Auth/ClaimsTenantContextAccessor.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/ClaimsTenantContextAccessor.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Auth/ClaimsTenantContextAccessor.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Auth/ClaimsTenantContextAccessor.cs index 1b0d50ee..d92a8289 100644 --- a/src/StellaOps.Scheduler.WebService/Auth/ClaimsTenantContextAccessor.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/ClaimsTenantContextAccessor.cs @@ -1,27 +1,27 @@ -using System.Security.Claims; -using Microsoft.AspNetCore.Http; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Scheduler.WebService.Auth; - -internal sealed class ClaimsTenantContextAccessor : ITenantContextAccessor -{ - public TenantContext GetTenant(HttpContext context) - { - ArgumentNullException.ThrowIfNull(context); - - var principal = context.User ?? throw new UnauthorizedAccessException("Authentication required."); - if (principal.Identity?.IsAuthenticated != true) - { - throw new UnauthorizedAccessException("Authentication required."); - } - - var tenant = principal.FindFirstValue(StellaOpsClaimTypes.Tenant); - if (string.IsNullOrWhiteSpace(tenant)) - { - throw new InvalidOperationException("Authenticated principal is missing required tenant claim."); - } - - return new TenantContext(tenant.Trim()); - } -} +using System.Security.Claims; +using Microsoft.AspNetCore.Http; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Scheduler.WebService.Auth; + +internal sealed class ClaimsTenantContextAccessor : ITenantContextAccessor +{ + public TenantContext GetTenant(HttpContext context) + { + ArgumentNullException.ThrowIfNull(context); + + var principal = context.User ?? throw new UnauthorizedAccessException("Authentication required."); + if (principal.Identity?.IsAuthenticated != true) + { + throw new UnauthorizedAccessException("Authentication required."); + } + + var tenant = principal.FindFirstValue(StellaOpsClaimTypes.Tenant); + if (string.IsNullOrWhiteSpace(tenant)) + { + throw new InvalidOperationException("Authenticated principal is missing required tenant claim."); + } + + return new TenantContext(tenant.Trim()); + } +} diff --git a/src/StellaOps.Scheduler.WebService/Auth/HeaderScopeAuthorizer.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/HeaderScopeAuthorizer.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Auth/HeaderScopeAuthorizer.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Auth/HeaderScopeAuthorizer.cs index 819cd4a8..2a186acf 100644 --- a/src/StellaOps.Scheduler.WebService/Auth/HeaderScopeAuthorizer.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/HeaderScopeAuthorizer.cs @@ -1,31 +1,31 @@ -using Microsoft.AspNetCore.Http; - -namespace StellaOps.Scheduler.WebService.Auth; - -internal sealed class HeaderScopeAuthorizer : IScopeAuthorizer -{ - private const string ScopeHeader = "X-Scopes"; - - public void EnsureScope(HttpContext context, string requiredScope) - { - if (!context.Request.Headers.TryGetValue(ScopeHeader, out var values)) - { - throw new UnauthorizedAccessException($"Missing required header '{ScopeHeader}'."); - } - - var scopeBuffer = string.Join(' ', values.ToArray()); - if (string.IsNullOrWhiteSpace(scopeBuffer)) - { - throw new UnauthorizedAccessException($"Header '{ScopeHeader}' cannot be empty."); - } - - var scopes = scopeBuffer - .Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) - .ToHashSet(StringComparer.OrdinalIgnoreCase); - - if (!scopes.Contains(requiredScope)) - { - throw new InvalidOperationException($"Missing required scope '{requiredScope}'."); - } - } -} +using Microsoft.AspNetCore.Http; + +namespace StellaOps.Scheduler.WebService.Auth; + +internal sealed class HeaderScopeAuthorizer : IScopeAuthorizer +{ + private const string ScopeHeader = "X-Scopes"; + + public void EnsureScope(HttpContext context, string requiredScope) + { + if (!context.Request.Headers.TryGetValue(ScopeHeader, out var values)) + { + throw new UnauthorizedAccessException($"Missing required header '{ScopeHeader}'."); + } + + var scopeBuffer = string.Join(' ', values.ToArray()); + if (string.IsNullOrWhiteSpace(scopeBuffer)) + { + throw new UnauthorizedAccessException($"Header '{ScopeHeader}' cannot be empty."); + } + + var scopes = scopeBuffer + .Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .ToHashSet(StringComparer.OrdinalIgnoreCase); + + if (!scopes.Contains(requiredScope)) + { + throw new InvalidOperationException($"Missing required scope '{requiredScope}'."); + } + } +} diff --git a/src/StellaOps.Scheduler.WebService/Auth/HeaderTenantContextAccessor.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/HeaderTenantContextAccessor.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/Auth/HeaderTenantContextAccessor.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Auth/HeaderTenantContextAccessor.cs index 2b431322..8989259e 100644 --- a/src/StellaOps.Scheduler.WebService/Auth/HeaderTenantContextAccessor.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/HeaderTenantContextAccessor.cs @@ -1,24 +1,24 @@ -using Microsoft.AspNetCore.Http; - -namespace StellaOps.Scheduler.WebService.Auth; - -internal sealed class HeaderTenantContextAccessor : ITenantContextAccessor -{ - private const string TenantHeader = "X-Tenant-Id"; - - public TenantContext GetTenant(HttpContext context) - { - if (!context.Request.Headers.TryGetValue(TenantHeader, out var values)) - { - throw new UnauthorizedAccessException($"Missing required header '{TenantHeader}'."); - } - - var tenantId = values.ToString().Trim(); - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new UnauthorizedAccessException($"Header '{TenantHeader}' cannot be empty."); - } - - return new TenantContext(tenantId); - } -} +using Microsoft.AspNetCore.Http; + +namespace StellaOps.Scheduler.WebService.Auth; + +internal sealed class HeaderTenantContextAccessor : ITenantContextAccessor +{ + private const string TenantHeader = "X-Tenant-Id"; + + public TenantContext GetTenant(HttpContext context) + { + if (!context.Request.Headers.TryGetValue(TenantHeader, out var values)) + { + throw new UnauthorizedAccessException($"Missing required header '{TenantHeader}'."); + } + + var tenantId = values.ToString().Trim(); + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new UnauthorizedAccessException($"Header '{TenantHeader}' cannot be empty."); + } + + return new TenantContext(tenantId); + } +} diff --git a/src/StellaOps.Scheduler.WebService/Auth/IScopeAuthorizer.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/IScopeAuthorizer.cs similarity index 95% rename from src/StellaOps.Scheduler.WebService/Auth/IScopeAuthorizer.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Auth/IScopeAuthorizer.cs index bf4d7edd..224e32c1 100644 --- a/src/StellaOps.Scheduler.WebService/Auth/IScopeAuthorizer.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/IScopeAuthorizer.cs @@ -1,8 +1,8 @@ -using Microsoft.AspNetCore.Http; - -namespace StellaOps.Scheduler.WebService.Auth; - -public interface IScopeAuthorizer -{ - void EnsureScope(HttpContext context, string requiredScope); -} +using Microsoft.AspNetCore.Http; + +namespace StellaOps.Scheduler.WebService.Auth; + +public interface IScopeAuthorizer +{ + void EnsureScope(HttpContext context, string requiredScope); +} diff --git a/src/StellaOps.Scheduler.WebService/Auth/ITenantContextAccessor.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/ITenantContextAccessor.cs similarity index 95% rename from src/StellaOps.Scheduler.WebService/Auth/ITenantContextAccessor.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Auth/ITenantContextAccessor.cs index 180045cc..31a72ef6 100644 --- a/src/StellaOps.Scheduler.WebService/Auth/ITenantContextAccessor.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/ITenantContextAccessor.cs @@ -1,10 +1,10 @@ -using Microsoft.AspNetCore.Http; - -namespace StellaOps.Scheduler.WebService.Auth; - -public interface ITenantContextAccessor -{ - TenantContext GetTenant(HttpContext context); -} - -public sealed record TenantContext(string TenantId); +using Microsoft.AspNetCore.Http; + +namespace StellaOps.Scheduler.WebService.Auth; + +public interface ITenantContextAccessor +{ + TenantContext GetTenant(HttpContext context); +} + +public sealed record TenantContext(string TenantId); diff --git a/src/StellaOps.Scheduler.WebService/Auth/TokenScopeAuthorizer.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/TokenScopeAuthorizer.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Auth/TokenScopeAuthorizer.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Auth/TokenScopeAuthorizer.cs index 3397cc7d..3fc611b0 100644 --- a/src/StellaOps.Scheduler.WebService/Auth/TokenScopeAuthorizer.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Auth/TokenScopeAuthorizer.cs @@ -1,61 +1,61 @@ -using System.Security.Claims; -using Microsoft.AspNetCore.Http; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Scheduler.WebService.Auth; - -internal sealed class TokenScopeAuthorizer : IScopeAuthorizer -{ - public void EnsureScope(HttpContext context, string requiredScope) - { - ArgumentNullException.ThrowIfNull(context); - - if (string.IsNullOrWhiteSpace(requiredScope)) - { - return; - } - - var principal = context.User ?? throw new UnauthorizedAccessException("Authentication required."); - if (principal.Identity?.IsAuthenticated != true) - { - throw new UnauthorizedAccessException("Authentication required."); - } - - var normalizedRequired = StellaOpsScopes.Normalize(requiredScope) ?? requiredScope.Trim().ToLowerInvariant(); - if (!HasScope(principal, normalizedRequired)) - { - throw new InvalidOperationException($"Missing required scope '{normalizedRequired}'."); - } - } - - private static bool HasScope(ClaimsPrincipal principal, string requiredScope) - { - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) - { - if (string.Equals(claim.Value, requiredScope, StringComparison.OrdinalIgnoreCase)) - { - return true; - } - } - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - foreach (var part in parts) - { - var normalized = StellaOpsScopes.Normalize(part); - if (normalized is not null && string.Equals(normalized, requiredScope, StringComparison.Ordinal)) - { - return true; - } - } - } - - return false; - } -} +using System.Security.Claims; +using Microsoft.AspNetCore.Http; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Scheduler.WebService.Auth; + +internal sealed class TokenScopeAuthorizer : IScopeAuthorizer +{ + public void EnsureScope(HttpContext context, string requiredScope) + { + ArgumentNullException.ThrowIfNull(context); + + if (string.IsNullOrWhiteSpace(requiredScope)) + { + return; + } + + var principal = context.User ?? throw new UnauthorizedAccessException("Authentication required."); + if (principal.Identity?.IsAuthenticated != true) + { + throw new UnauthorizedAccessException("Authentication required."); + } + + var normalizedRequired = StellaOpsScopes.Normalize(requiredScope) ?? requiredScope.Trim().ToLowerInvariant(); + if (!HasScope(principal, normalizedRequired)) + { + throw new InvalidOperationException($"Missing required scope '{normalizedRequired}'."); + } + } + + private static bool HasScope(ClaimsPrincipal principal, string requiredScope) + { + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + if (string.Equals(claim.Value, requiredScope, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + foreach (var part in parts) + { + var normalized = StellaOpsScopes.Normalize(part); + if (normalized is not null && string.Equals(normalized, requiredScope, StringComparison.Ordinal)) + { + return true; + } + } + } + + return false; + } +} diff --git a/src/StellaOps.Scheduler.WebService/EventWebhooks/EventWebhookEndpointExtensions.cs b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/EventWebhookEndpointExtensions.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/EventWebhooks/EventWebhookEndpointExtensions.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/EventWebhookEndpointExtensions.cs index f4e256ea..dfac9585 100644 --- a/src/StellaOps.Scheduler.WebService/EventWebhooks/EventWebhookEndpointExtensions.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/EventWebhookEndpointExtensions.cs @@ -1,173 +1,173 @@ -using System.ComponentModel.DataAnnotations; -using System.IO; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.WebService.Options; - -namespace StellaOps.Scheduler.WebService.EventWebhooks; - -public static class EventWebhookEndpointExtensions -{ - public static void MapSchedulerEventWebhookEndpoints(this IEndpointRouteBuilder builder) - { - var group = builder.MapGroup("/events"); - - group.MapPost("/feedser-export", HandleFeedserExportAsync); - group.MapPost("/vexer-export", HandleVexerExportAsync); - } - - private static async Task<IResult> HandleFeedserExportAsync( - HttpContext httpContext, - [FromServices] IOptionsMonitor<SchedulerEventsOptions> options, - [FromServices] IWebhookRequestAuthenticator authenticator, - [FromServices] IWebhookRateLimiter rateLimiter, - [FromServices] IInboundExportEventSink sink, - CancellationToken cancellationToken) - { - var webhookOptions = options.CurrentValue.Webhooks.Feedser; - if (!webhookOptions.Enabled) - { - return Results.StatusCode(StatusCodes.Status503ServiceUnavailable); - } - - var readResult = await ReadPayloadAsync<FeedserExportEventRequest>(httpContext, cancellationToken).ConfigureAwait(false); - if (!readResult.Succeeded) - { - return readResult.ErrorResult!; - } - - if (!rateLimiter.TryAcquire("feedser", webhookOptions.RateLimitRequests, webhookOptions.GetRateLimitWindow(), out var retryAfter)) - { - var response = Results.StatusCode(StatusCodes.Status429TooManyRequests); - if (retryAfter > TimeSpan.Zero) - { - httpContext.Response.Headers.RetryAfter = ((int)Math.Ceiling(retryAfter.TotalSeconds)).ToString(); - } - - return response; - } - - var authResult = await authenticator.AuthenticateAsync(httpContext, readResult.RawBody, webhookOptions, cancellationToken).ConfigureAwait(false); - if (!authResult.Succeeded) - { - return authResult.ToResult(); - } - - try - { - await sink.HandleFeedserAsync(readResult.Payload!, cancellationToken).ConfigureAwait(false); - return Results.Accepted(value: new { status = "accepted" }); - } - catch (ValidationException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> HandleVexerExportAsync( - HttpContext httpContext, - [FromServices] IOptionsMonitor<SchedulerEventsOptions> options, - [FromServices] IWebhookRequestAuthenticator authenticator, - [FromServices] IWebhookRateLimiter rateLimiter, - [FromServices] IInboundExportEventSink sink, - CancellationToken cancellationToken) - { - var webhookOptions = options.CurrentValue.Webhooks.Vexer; - if (!webhookOptions.Enabled) - { - return Results.StatusCode(StatusCodes.Status503ServiceUnavailable); - } - - var readResult = await ReadPayloadAsync<VexerExportEventRequest>(httpContext, cancellationToken).ConfigureAwait(false); - if (!readResult.Succeeded) - { - return readResult.ErrorResult!; - } - - if (!rateLimiter.TryAcquire("vexer", webhookOptions.RateLimitRequests, webhookOptions.GetRateLimitWindow(), out var retryAfter)) - { - var response = Results.StatusCode(StatusCodes.Status429TooManyRequests); - if (retryAfter > TimeSpan.Zero) - { - httpContext.Response.Headers.RetryAfter = ((int)Math.Ceiling(retryAfter.TotalSeconds)).ToString(); - } - - return response; - } - - var authResult = await authenticator.AuthenticateAsync(httpContext, readResult.RawBody, webhookOptions, cancellationToken).ConfigureAwait(false); - if (!authResult.Succeeded) - { - return authResult.ToResult(); - } - - try - { - await sink.HandleVexerAsync(readResult.Payload!, cancellationToken).ConfigureAwait(false); - return Results.Accepted(value: new { status = "accepted" }); - } - catch (ValidationException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<RequestPayload<T>> ReadPayloadAsync<T>(HttpContext context, CancellationToken cancellationToken) - { - context.Request.EnableBuffering(); - - await using var buffer = new MemoryStream(); - await context.Request.Body.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false); - var bodyBytes = buffer.ToArray(); - context.Request.Body.Position = 0; - - try - { - var payload = JsonSerializer.Deserialize<T>(bodyBytes, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - if (payload is null) - { - return RequestPayload<T>.Failed(Results.BadRequest(new { error = "Request payload cannot be empty." })); - } - - return RequestPayload<T>.Success(payload, bodyBytes); - } - catch (JsonException ex) - { - return RequestPayload<T>.Failed(Results.BadRequest(new { error = ex.Message })); - } - catch (ValidationException ex) - { - return RequestPayload<T>.Failed(Results.BadRequest(new { error = ex.Message })); - } - } - - private readonly struct RequestPayload<T> - { - private RequestPayload(T? payload, byte[] rawBody, IResult? error, bool succeeded) - { - Payload = payload; - RawBody = rawBody; - ErrorResult = error; - Succeeded = succeeded; - } - - public T? Payload { get; } - - public byte[] RawBody { get; } - - public IResult? ErrorResult { get; } - - public bool Succeeded { get; } - - public static RequestPayload<T> Success(T payload, byte[] rawBody) - => new(payload, rawBody, null, true); - - public static RequestPayload<T> Failed(IResult error) - => new(default, Array.Empty<byte>(), error, false); - } -} +using System.ComponentModel.DataAnnotations; +using System.IO; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.WebService.Options; + +namespace StellaOps.Scheduler.WebService.EventWebhooks; + +public static class EventWebhookEndpointExtensions +{ + public static void MapSchedulerEventWebhookEndpoints(this IEndpointRouteBuilder builder) + { + var group = builder.MapGroup("/events"); + + group.MapPost("/feedser-export", HandleFeedserExportAsync); + group.MapPost("/vexer-export", HandleVexerExportAsync); + } + + private static async Task<IResult> HandleFeedserExportAsync( + HttpContext httpContext, + [FromServices] IOptionsMonitor<SchedulerEventsOptions> options, + [FromServices] IWebhookRequestAuthenticator authenticator, + [FromServices] IWebhookRateLimiter rateLimiter, + [FromServices] IInboundExportEventSink sink, + CancellationToken cancellationToken) + { + var webhookOptions = options.CurrentValue.Webhooks.Feedser; + if (!webhookOptions.Enabled) + { + return Results.StatusCode(StatusCodes.Status503ServiceUnavailable); + } + + var readResult = await ReadPayloadAsync<FeedserExportEventRequest>(httpContext, cancellationToken).ConfigureAwait(false); + if (!readResult.Succeeded) + { + return readResult.ErrorResult!; + } + + if (!rateLimiter.TryAcquire("feedser", webhookOptions.RateLimitRequests, webhookOptions.GetRateLimitWindow(), out var retryAfter)) + { + var response = Results.StatusCode(StatusCodes.Status429TooManyRequests); + if (retryAfter > TimeSpan.Zero) + { + httpContext.Response.Headers.RetryAfter = ((int)Math.Ceiling(retryAfter.TotalSeconds)).ToString(); + } + + return response; + } + + var authResult = await authenticator.AuthenticateAsync(httpContext, readResult.RawBody, webhookOptions, cancellationToken).ConfigureAwait(false); + if (!authResult.Succeeded) + { + return authResult.ToResult(); + } + + try + { + await sink.HandleFeedserAsync(readResult.Payload!, cancellationToken).ConfigureAwait(false); + return Results.Accepted(value: new { status = "accepted" }); + } + catch (ValidationException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> HandleVexerExportAsync( + HttpContext httpContext, + [FromServices] IOptionsMonitor<SchedulerEventsOptions> options, + [FromServices] IWebhookRequestAuthenticator authenticator, + [FromServices] IWebhookRateLimiter rateLimiter, + [FromServices] IInboundExportEventSink sink, + CancellationToken cancellationToken) + { + var webhookOptions = options.CurrentValue.Webhooks.Vexer; + if (!webhookOptions.Enabled) + { + return Results.StatusCode(StatusCodes.Status503ServiceUnavailable); + } + + var readResult = await ReadPayloadAsync<VexerExportEventRequest>(httpContext, cancellationToken).ConfigureAwait(false); + if (!readResult.Succeeded) + { + return readResult.ErrorResult!; + } + + if (!rateLimiter.TryAcquire("vexer", webhookOptions.RateLimitRequests, webhookOptions.GetRateLimitWindow(), out var retryAfter)) + { + var response = Results.StatusCode(StatusCodes.Status429TooManyRequests); + if (retryAfter > TimeSpan.Zero) + { + httpContext.Response.Headers.RetryAfter = ((int)Math.Ceiling(retryAfter.TotalSeconds)).ToString(); + } + + return response; + } + + var authResult = await authenticator.AuthenticateAsync(httpContext, readResult.RawBody, webhookOptions, cancellationToken).ConfigureAwait(false); + if (!authResult.Succeeded) + { + return authResult.ToResult(); + } + + try + { + await sink.HandleVexerAsync(readResult.Payload!, cancellationToken).ConfigureAwait(false); + return Results.Accepted(value: new { status = "accepted" }); + } + catch (ValidationException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<RequestPayload<T>> ReadPayloadAsync<T>(HttpContext context, CancellationToken cancellationToken) + { + context.Request.EnableBuffering(); + + await using var buffer = new MemoryStream(); + await context.Request.Body.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false); + var bodyBytes = buffer.ToArray(); + context.Request.Body.Position = 0; + + try + { + var payload = JsonSerializer.Deserialize<T>(bodyBytes, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + if (payload is null) + { + return RequestPayload<T>.Failed(Results.BadRequest(new { error = "Request payload cannot be empty." })); + } + + return RequestPayload<T>.Success(payload, bodyBytes); + } + catch (JsonException ex) + { + return RequestPayload<T>.Failed(Results.BadRequest(new { error = ex.Message })); + } + catch (ValidationException ex) + { + return RequestPayload<T>.Failed(Results.BadRequest(new { error = ex.Message })); + } + } + + private readonly struct RequestPayload<T> + { + private RequestPayload(T? payload, byte[] rawBody, IResult? error, bool succeeded) + { + Payload = payload; + RawBody = rawBody; + ErrorResult = error; + Succeeded = succeeded; + } + + public T? Payload { get; } + + public byte[] RawBody { get; } + + public IResult? ErrorResult { get; } + + public bool Succeeded { get; } + + public static RequestPayload<T> Success(T payload, byte[] rawBody) + => new(payload, rawBody, null, true); + + public static RequestPayload<T> Failed(IResult error) + => new(default, Array.Empty<byte>(), error, false); + } +} diff --git a/src/StellaOps.Scheduler.WebService/EventWebhooks/IInboundExportEventSink.cs b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/IInboundExportEventSink.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/EventWebhooks/IInboundExportEventSink.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/IInboundExportEventSink.cs index 66c9df60..d902c7d9 100644 --- a/src/StellaOps.Scheduler.WebService/EventWebhooks/IInboundExportEventSink.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/IInboundExportEventSink.cs @@ -1,11 +1,11 @@ -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Scheduler.WebService.EventWebhooks; - -public interface IInboundExportEventSink -{ - Task HandleFeedserAsync(FeedserExportEventRequest request, CancellationToken cancellationToken); - - Task HandleVexerAsync(VexerExportEventRequest request, CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Scheduler.WebService.EventWebhooks; + +public interface IInboundExportEventSink +{ + Task HandleFeedserAsync(FeedserExportEventRequest request, CancellationToken cancellationToken); + + Task HandleVexerAsync(VexerExportEventRequest request, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRateLimiter.cs b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRateLimiter.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRateLimiter.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRateLimiter.cs index 0213f36c..e80dc43c 100644 --- a/src/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRateLimiter.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRateLimiter.cs @@ -1,8 +1,8 @@ -using System; - -namespace StellaOps.Scheduler.WebService.EventWebhooks; - -public interface IWebhookRateLimiter -{ - bool TryAcquire(string key, int limit, TimeSpan window, out TimeSpan retryAfter); -} +using System; + +namespace StellaOps.Scheduler.WebService.EventWebhooks; + +public interface IWebhookRateLimiter +{ + bool TryAcquire(string key, int limit, TimeSpan window, out TimeSpan retryAfter); +} diff --git a/src/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRequestAuthenticator.cs b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRequestAuthenticator.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRequestAuthenticator.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRequestAuthenticator.cs index 11ea5ee2..ca7d0b66 100644 --- a/src/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRequestAuthenticator.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/IWebhookRequestAuthenticator.cs @@ -1,107 +1,107 @@ -using System; -using System.Security.Cryptography; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.WebService.Options; - -namespace StellaOps.Scheduler.WebService.EventWebhooks; - -public interface IWebhookRequestAuthenticator -{ - Task<WebhookAuthenticationResult> AuthenticateAsync(HttpContext context, ReadOnlyMemory<byte> body, SchedulerWebhookOptions options, CancellationToken cancellationToken); -} - -internal sealed class WebhookRequestAuthenticator : IWebhookRequestAuthenticator -{ - private readonly ILogger<WebhookRequestAuthenticator> _logger; - - public WebhookRequestAuthenticator( - ILogger<WebhookRequestAuthenticator> logger) - { - _logger = logger; - } - - public async Task<WebhookAuthenticationResult> AuthenticateAsync(HttpContext context, ReadOnlyMemory<byte> body, SchedulerWebhookOptions options, CancellationToken cancellationToken) - { - if (!options.Enabled) - { - return WebhookAuthenticationResult.Success(); - } - - if (options.RequireClientCertificate) - { - var certificate = context.Connection.ClientCertificate ?? await context.Connection.GetClientCertificateAsync(cancellationToken).ConfigureAwait(false); - if (certificate is null) - { - _logger.LogWarning("Webhook {Name} rejected request without client certificate.", options.Name); - return WebhookAuthenticationResult.Fail(StatusCodes.Status401Unauthorized, "Client certificate required."); - } - } - - if (!string.IsNullOrWhiteSpace(options.HmacSecret)) - { - var headerName = string.IsNullOrWhiteSpace(options.SignatureHeader) ? "X-Scheduler-Signature" : options.SignatureHeader; - if (!context.Request.Headers.TryGetValue(headerName, out var signatureValues)) - { - _logger.LogWarning("Webhook {Name} rejected request missing signature header {Header}.", options.Name, headerName); - return WebhookAuthenticationResult.Fail(StatusCodes.Status401Unauthorized, "Missing signature header."); - } - - var providedSignature = signatureValues.ToString(); - if (string.IsNullOrWhiteSpace(providedSignature)) - { - return WebhookAuthenticationResult.Fail(StatusCodes.Status401Unauthorized, "Signature header is empty."); - } - - if (!VerifySignature(body.Span, options.HmacSecret!, providedSignature)) - { - _logger.LogWarning("Webhook {Name} rejected request with invalid signature.", options.Name); - return WebhookAuthenticationResult.Fail(StatusCodes.Status401Unauthorized, "Invalid signature."); - } - } - - return WebhookAuthenticationResult.Success(); - } - - private static bool VerifySignature(ReadOnlySpan<byte> payload, string secret, string providedSignature) - { - byte[] secretBytes; - try - { - secretBytes = Convert.FromBase64String(secret); - } - catch (FormatException) - { - try - { - secretBytes = Convert.FromHexString(secret); - } - catch (FormatException) - { - secretBytes = Encoding.UTF8.GetBytes(secret); - } - } - - using var hmac = new HMACSHA256(secretBytes); - var hash = hmac.ComputeHash(payload.ToArray()); - var computedSignature = "sha256=" + Convert.ToHexString(hash).ToLowerInvariant(); - - return CryptographicOperations.FixedTimeEquals( - Encoding.UTF8.GetBytes(computedSignature), - Encoding.UTF8.GetBytes(providedSignature.Trim())); - } -} - -public readonly record struct WebhookAuthenticationResult(bool Succeeded, int StatusCode, string? Message) -{ - public static WebhookAuthenticationResult Success() => new(true, StatusCodes.Status200OK, null); - - public static WebhookAuthenticationResult Fail(int statusCode, string message) => new(false, statusCode, message); - - public IResult ToResult() - => Succeeded ? Results.Ok() : Results.Json(new { error = Message ?? "Unauthorized" }, statusCode: StatusCode); -} +using System; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.WebService.Options; + +namespace StellaOps.Scheduler.WebService.EventWebhooks; + +public interface IWebhookRequestAuthenticator +{ + Task<WebhookAuthenticationResult> AuthenticateAsync(HttpContext context, ReadOnlyMemory<byte> body, SchedulerWebhookOptions options, CancellationToken cancellationToken); +} + +internal sealed class WebhookRequestAuthenticator : IWebhookRequestAuthenticator +{ + private readonly ILogger<WebhookRequestAuthenticator> _logger; + + public WebhookRequestAuthenticator( + ILogger<WebhookRequestAuthenticator> logger) + { + _logger = logger; + } + + public async Task<WebhookAuthenticationResult> AuthenticateAsync(HttpContext context, ReadOnlyMemory<byte> body, SchedulerWebhookOptions options, CancellationToken cancellationToken) + { + if (!options.Enabled) + { + return WebhookAuthenticationResult.Success(); + } + + if (options.RequireClientCertificate) + { + var certificate = context.Connection.ClientCertificate ?? await context.Connection.GetClientCertificateAsync(cancellationToken).ConfigureAwait(false); + if (certificate is null) + { + _logger.LogWarning("Webhook {Name} rejected request without client certificate.", options.Name); + return WebhookAuthenticationResult.Fail(StatusCodes.Status401Unauthorized, "Client certificate required."); + } + } + + if (!string.IsNullOrWhiteSpace(options.HmacSecret)) + { + var headerName = string.IsNullOrWhiteSpace(options.SignatureHeader) ? "X-Scheduler-Signature" : options.SignatureHeader; + if (!context.Request.Headers.TryGetValue(headerName, out var signatureValues)) + { + _logger.LogWarning("Webhook {Name} rejected request missing signature header {Header}.", options.Name, headerName); + return WebhookAuthenticationResult.Fail(StatusCodes.Status401Unauthorized, "Missing signature header."); + } + + var providedSignature = signatureValues.ToString(); + if (string.IsNullOrWhiteSpace(providedSignature)) + { + return WebhookAuthenticationResult.Fail(StatusCodes.Status401Unauthorized, "Signature header is empty."); + } + + if (!VerifySignature(body.Span, options.HmacSecret!, providedSignature)) + { + _logger.LogWarning("Webhook {Name} rejected request with invalid signature.", options.Name); + return WebhookAuthenticationResult.Fail(StatusCodes.Status401Unauthorized, "Invalid signature."); + } + } + + return WebhookAuthenticationResult.Success(); + } + + private static bool VerifySignature(ReadOnlySpan<byte> payload, string secret, string providedSignature) + { + byte[] secretBytes; + try + { + secretBytes = Convert.FromBase64String(secret); + } + catch (FormatException) + { + try + { + secretBytes = Convert.FromHexString(secret); + } + catch (FormatException) + { + secretBytes = Encoding.UTF8.GetBytes(secret); + } + } + + using var hmac = new HMACSHA256(secretBytes); + var hash = hmac.ComputeHash(payload.ToArray()); + var computedSignature = "sha256=" + Convert.ToHexString(hash).ToLowerInvariant(); + + return CryptographicOperations.FixedTimeEquals( + Encoding.UTF8.GetBytes(computedSignature), + Encoding.UTF8.GetBytes(providedSignature.Trim())); + } +} + +public readonly record struct WebhookAuthenticationResult(bool Succeeded, int StatusCode, string? Message) +{ + public static WebhookAuthenticationResult Success() => new(true, StatusCodes.Status200OK, null); + + public static WebhookAuthenticationResult Fail(int statusCode, string message) => new(false, statusCode, message); + + public IResult ToResult() + => Succeeded ? Results.Ok() : Results.Json(new { error = Message ?? "Unauthorized" }, statusCode: StatusCode); +} diff --git a/src/StellaOps.Scheduler.WebService/EventWebhooks/InMemoryWebhookRateLimiter.cs b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/InMemoryWebhookRateLimiter.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/EventWebhooks/InMemoryWebhookRateLimiter.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/InMemoryWebhookRateLimiter.cs index 4643ccbc..f76bb4e8 100644 --- a/src/StellaOps.Scheduler.WebService/EventWebhooks/InMemoryWebhookRateLimiter.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/InMemoryWebhookRateLimiter.cs @@ -1,63 +1,63 @@ -using System; -using System.Collections.Generic; -using Microsoft.Extensions.Caching.Memory; - -namespace StellaOps.Scheduler.WebService.EventWebhooks; - -internal sealed class InMemoryWebhookRateLimiter : IWebhookRateLimiter, IDisposable -{ - private readonly MemoryCache _cache = new(new MemoryCacheOptions()); - - private readonly object _mutex = new(); - - public bool TryAcquire(string key, int limit, TimeSpan window, out TimeSpan retryAfter) - { - if (limit <= 0) - { - retryAfter = TimeSpan.Zero; - return true; - } - - retryAfter = TimeSpan.Zero; - var now = DateTimeOffset.UtcNow; - - lock (_mutex) - { - if (!_cache.TryGetValue(key, out Queue<DateTimeOffset>? hits)) - { - hits = new Queue<DateTimeOffset>(); - _cache.Set(key, hits, new MemoryCacheEntryOptions - { - SlidingExpiration = window.Add(window) - }); - } - - hits ??= new Queue<DateTimeOffset>(); - - while (hits.Count > 0 && now - hits.Peek() > window) - { - hits.Dequeue(); - } - - if (hits.Count >= limit) - { - var oldest = hits.Peek(); - retryAfter = (oldest + window) - now; - if (retryAfter < TimeSpan.Zero) - { - retryAfter = TimeSpan.Zero; - } - - return false; - } - - hits.Enqueue(now); - return true; - } - } - - public void Dispose() - { - _cache.Dispose(); - } -} +using System; +using System.Collections.Generic; +using Microsoft.Extensions.Caching.Memory; + +namespace StellaOps.Scheduler.WebService.EventWebhooks; + +internal sealed class InMemoryWebhookRateLimiter : IWebhookRateLimiter, IDisposable +{ + private readonly MemoryCache _cache = new(new MemoryCacheOptions()); + + private readonly object _mutex = new(); + + public bool TryAcquire(string key, int limit, TimeSpan window, out TimeSpan retryAfter) + { + if (limit <= 0) + { + retryAfter = TimeSpan.Zero; + return true; + } + + retryAfter = TimeSpan.Zero; + var now = DateTimeOffset.UtcNow; + + lock (_mutex) + { + if (!_cache.TryGetValue(key, out Queue<DateTimeOffset>? hits)) + { + hits = new Queue<DateTimeOffset>(); + _cache.Set(key, hits, new MemoryCacheEntryOptions + { + SlidingExpiration = window.Add(window) + }); + } + + hits ??= new Queue<DateTimeOffset>(); + + while (hits.Count > 0 && now - hits.Peek() > window) + { + hits.Dequeue(); + } + + if (hits.Count >= limit) + { + var oldest = hits.Peek(); + retryAfter = (oldest + window) - now; + if (retryAfter < TimeSpan.Zero) + { + retryAfter = TimeSpan.Zero; + } + + return false; + } + + hits.Enqueue(now); + return true; + } + } + + public void Dispose() + { + _cache.Dispose(); + } +} diff --git a/src/StellaOps.Scheduler.WebService/EventWebhooks/LoggingExportEventSink.cs b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/LoggingExportEventSink.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/EventWebhooks/LoggingExportEventSink.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/LoggingExportEventSink.cs index 2bc2994c..22352504 100644 --- a/src/StellaOps.Scheduler.WebService/EventWebhooks/LoggingExportEventSink.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/LoggingExportEventSink.cs @@ -1,33 +1,33 @@ -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; - -namespace StellaOps.Scheduler.WebService.EventWebhooks; - -internal sealed class LoggingExportEventSink : IInboundExportEventSink -{ - private readonly ILogger<LoggingExportEventSink> _logger; - - public LoggingExportEventSink(ILogger<LoggingExportEventSink> logger) - { - _logger = logger; - } - - public Task HandleFeedserAsync(FeedserExportEventRequest request, CancellationToken cancellationToken) - { - _logger.LogInformation( - "Received Feedser export webhook {ExportId} with {ChangedProducts} product keys.", - request.ExportId, - request.ChangedProductKeys.Count); - return Task.CompletedTask; - } - - public Task HandleVexerAsync(VexerExportEventRequest request, CancellationToken cancellationToken) - { - _logger.LogInformation( - "Received Vexer export webhook {ExportId} with {ChangedClaims} claim changes.", - request.ExportId, - request.ChangedClaims.Count); - return Task.CompletedTask; - } -} +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scheduler.WebService.EventWebhooks; + +internal sealed class LoggingExportEventSink : IInboundExportEventSink +{ + private readonly ILogger<LoggingExportEventSink> _logger; + + public LoggingExportEventSink(ILogger<LoggingExportEventSink> logger) + { + _logger = logger; + } + + public Task HandleFeedserAsync(FeedserExportEventRequest request, CancellationToken cancellationToken) + { + _logger.LogInformation( + "Received Feedser export webhook {ExportId} with {ChangedProducts} product keys.", + request.ExportId, + request.ChangedProductKeys.Count); + return Task.CompletedTask; + } + + public Task HandleVexerAsync(VexerExportEventRequest request, CancellationToken cancellationToken) + { + _logger.LogInformation( + "Received Vexer export webhook {ExportId} with {ChangedClaims} claim changes.", + request.ExportId, + request.ChangedClaims.Count); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Scheduler.WebService/EventWebhooks/WebhookPayloads.cs b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/WebhookPayloads.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/EventWebhooks/WebhookPayloads.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/WebhookPayloads.cs index a9627df4..cd51b7b5 100644 --- a/src/StellaOps.Scheduler.WebService/EventWebhooks/WebhookPayloads.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/EventWebhooks/WebhookPayloads.cs @@ -1,106 +1,106 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.ComponentModel.DataAnnotations; -using System.Linq; - -namespace StellaOps.Scheduler.WebService.EventWebhooks; - -public sealed record FeedserExportEventRequest( - string ExportId, - IReadOnlyList<string> ChangedProductKeys, - IReadOnlyList<string>? Kev, - WebhookEventWindow? Window) -{ - public string ExportId { get; } = ExportId?.Trim() ?? throw new ArgumentNullException(nameof(ExportId)); - - public IReadOnlyList<string> ChangedProductKeys { get; } = NormalizeList(ChangedProductKeys, nameof(ChangedProductKeys)); - - public IReadOnlyList<string> Kev { get; } = NormalizeList(Kev, nameof(Kev), allowEmpty: true); - - public WebhookEventWindow? Window { get; } = Window; - - private static IReadOnlyList<string> NormalizeList(IReadOnlyList<string>? source, string propertyName, bool allowEmpty = false) - { - if (source is null) - { - if (allowEmpty) - { - return ImmutableArray<string>.Empty; - } - - throw new ValidationException($"{propertyName} must be specified."); - } - - var cleaned = source - .Where(item => !string.IsNullOrWhiteSpace(item)) - .Select(item => item.Trim()) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToArray(); - - if (!allowEmpty && cleaned.Length == 0) - { - throw new ValidationException($"{propertyName} must contain at least one value."); - } - - return cleaned; - } -} - -public sealed record VexerExportEventRequest( - string ExportId, - IReadOnlyList<VexerClaimChange> ChangedClaims, - WebhookEventWindow? Window) -{ - public string ExportId { get; } = ExportId?.Trim() ?? throw new ArgumentNullException(nameof(ExportId)); - - public IReadOnlyList<VexerClaimChange> ChangedClaims { get; } = NormalizeClaims(ChangedClaims); - - public WebhookEventWindow? Window { get; } = Window; - - private static IReadOnlyList<VexerClaimChange> NormalizeClaims(IReadOnlyList<VexerClaimChange>? claims) - { - if (claims is null || claims.Count == 0) - { - throw new ValidationException("changedClaims must contain at least one entry."); - } - - foreach (var claim in claims) - { - claim.Validate(); - } - - return claims; - } -} - -public sealed record VexerClaimChange( - string ProductKey, - string VulnerabilityId, - string Status) -{ - public string ProductKey { get; } = Normalize(ProductKey, nameof(ProductKey)); - - public string VulnerabilityId { get; } = Normalize(VulnerabilityId, nameof(VulnerabilityId)); - - public string Status { get; } = Normalize(Status, nameof(Status)); - - internal void Validate() - { - _ = ProductKey; - _ = VulnerabilityId; - _ = Status; - } - - private static string Normalize(string value, string propertyName) - { - if (string.IsNullOrWhiteSpace(value)) - { - throw new ValidationException($"{propertyName} must be provided."); - } - - return value.Trim(); - } -} - -public sealed record WebhookEventWindow(DateTimeOffset? From, DateTimeOffset? To); +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.ComponentModel.DataAnnotations; +using System.Linq; + +namespace StellaOps.Scheduler.WebService.EventWebhooks; + +public sealed record FeedserExportEventRequest( + string ExportId, + IReadOnlyList<string> ChangedProductKeys, + IReadOnlyList<string>? Kev, + WebhookEventWindow? Window) +{ + public string ExportId { get; } = ExportId?.Trim() ?? throw new ArgumentNullException(nameof(ExportId)); + + public IReadOnlyList<string> ChangedProductKeys { get; } = NormalizeList(ChangedProductKeys, nameof(ChangedProductKeys)); + + public IReadOnlyList<string> Kev { get; } = NormalizeList(Kev, nameof(Kev), allowEmpty: true); + + public WebhookEventWindow? Window { get; } = Window; + + private static IReadOnlyList<string> NormalizeList(IReadOnlyList<string>? source, string propertyName, bool allowEmpty = false) + { + if (source is null) + { + if (allowEmpty) + { + return ImmutableArray<string>.Empty; + } + + throw new ValidationException($"{propertyName} must be specified."); + } + + var cleaned = source + .Where(item => !string.IsNullOrWhiteSpace(item)) + .Select(item => item.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (!allowEmpty && cleaned.Length == 0) + { + throw new ValidationException($"{propertyName} must contain at least one value."); + } + + return cleaned; + } +} + +public sealed record VexerExportEventRequest( + string ExportId, + IReadOnlyList<VexerClaimChange> ChangedClaims, + WebhookEventWindow? Window) +{ + public string ExportId { get; } = ExportId?.Trim() ?? throw new ArgumentNullException(nameof(ExportId)); + + public IReadOnlyList<VexerClaimChange> ChangedClaims { get; } = NormalizeClaims(ChangedClaims); + + public WebhookEventWindow? Window { get; } = Window; + + private static IReadOnlyList<VexerClaimChange> NormalizeClaims(IReadOnlyList<VexerClaimChange>? claims) + { + if (claims is null || claims.Count == 0) + { + throw new ValidationException("changedClaims must contain at least one entry."); + } + + foreach (var claim in claims) + { + claim.Validate(); + } + + return claims; + } +} + +public sealed record VexerClaimChange( + string ProductKey, + string VulnerabilityId, + string Status) +{ + public string ProductKey { get; } = Normalize(ProductKey, nameof(ProductKey)); + + public string VulnerabilityId { get; } = Normalize(VulnerabilityId, nameof(VulnerabilityId)); + + public string Status { get; } = Normalize(Status, nameof(Status)); + + internal void Validate() + { + _ = ProductKey; + _ = VulnerabilityId; + _ = Status; + } + + private static string Normalize(string value, string propertyName) + { + if (string.IsNullOrWhiteSpace(value)) + { + throw new ValidationException($"{propertyName} must be provided."); + } + + return value.Trim(); + } +} + +public sealed record WebhookEventWindow(DateTimeOffset? From, DateTimeOffset? To); diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/CartographerWebhookClient.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/CartographerWebhookClient.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/CartographerWebhookClient.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/CartographerWebhookClient.cs index 69eeffb7..a5025123 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/CartographerWebhookClient.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/CartographerWebhookClient.cs @@ -1,102 +1,102 @@ -using System.Net.Http.Headers; -using System.Net.Mime; -using System.Text; -using System.Text.Json; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.WebService.Options; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -internal sealed class CartographerWebhookClient : ICartographerWebhookClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull - }; - - private readonly HttpClient _httpClient; - private readonly IOptionsMonitor<SchedulerCartographerOptions> _options; - private readonly ILogger<CartographerWebhookClient> _logger; - - public CartographerWebhookClient( - HttpClient httpClient, - IOptionsMonitor<SchedulerCartographerOptions> options, - ILogger<CartographerWebhookClient> logger) - { - _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task NotifyAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken) - { - var snapshot = _options.CurrentValue; - var webhook = snapshot.Webhook; - - if (!webhook.Enabled) - { - _logger.LogDebug("Cartographer webhook disabled; skipping notification for job {JobId}.", notification.Job.Id); - return; - } - - if (string.IsNullOrWhiteSpace(webhook.Endpoint)) - { - _logger.LogWarning("Cartographer webhook endpoint not configured; unable to notify for job {JobId}.", notification.Job.Id); - return; - } - - Uri endpointUri; - try - { - endpointUri = new Uri(webhook.Endpoint, UriKind.Absolute); - } - catch (Exception ex) - { - _logger.LogError(ex, "Invalid Cartographer webhook endpoint '{Endpoint}'.", webhook.Endpoint); - return; - } - - var payload = new - { - tenantId = notification.TenantId, - jobId = notification.Job.Id, - jobType = notification.Job.Kind, - status = notification.Status.ToString().ToLowerInvariant(), - occurredAt = notification.OccurredAt, - resultUri = notification.ResultUri, - correlationId = notification.CorrelationId, - job = notification.Job, - error = notification.Error - }; - - using var request = new HttpRequestMessage(HttpMethod.Post, endpointUri) - { - Content = new StringContent(JsonSerializer.Serialize(payload, SerializerOptions), Encoding.UTF8, MediaTypeNames.Application.Json) - }; - - if (!string.IsNullOrWhiteSpace(webhook.ApiKey) && !string.IsNullOrWhiteSpace(webhook.ApiKeyHeader)) - { - request.Headers.TryAddWithoutValidation(webhook.ApiKeyHeader!, webhook.ApiKey); - } - - try - { - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - _logger.LogWarning("Cartographer webhook responded {StatusCode} for job {JobId}: {Body}", (int)response.StatusCode, notification.Job.Id, body); - } - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - throw; - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to invoke Cartographer webhook for job {JobId}.", notification.Job.Id); - } - } -} +using System.Net.Http.Headers; +using System.Net.Mime; +using System.Text; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.WebService.Options; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +internal sealed class CartographerWebhookClient : ICartographerWebhookClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull + }; + + private readonly HttpClient _httpClient; + private readonly IOptionsMonitor<SchedulerCartographerOptions> _options; + private readonly ILogger<CartographerWebhookClient> _logger; + + public CartographerWebhookClient( + HttpClient httpClient, + IOptionsMonitor<SchedulerCartographerOptions> options, + ILogger<CartographerWebhookClient> logger) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task NotifyAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken) + { + var snapshot = _options.CurrentValue; + var webhook = snapshot.Webhook; + + if (!webhook.Enabled) + { + _logger.LogDebug("Cartographer webhook disabled; skipping notification for job {JobId}.", notification.Job.Id); + return; + } + + if (string.IsNullOrWhiteSpace(webhook.Endpoint)) + { + _logger.LogWarning("Cartographer webhook endpoint not configured; unable to notify for job {JobId}.", notification.Job.Id); + return; + } + + Uri endpointUri; + try + { + endpointUri = new Uri(webhook.Endpoint, UriKind.Absolute); + } + catch (Exception ex) + { + _logger.LogError(ex, "Invalid Cartographer webhook endpoint '{Endpoint}'.", webhook.Endpoint); + return; + } + + var payload = new + { + tenantId = notification.TenantId, + jobId = notification.Job.Id, + jobType = notification.Job.Kind, + status = notification.Status.ToString().ToLowerInvariant(), + occurredAt = notification.OccurredAt, + resultUri = notification.ResultUri, + correlationId = notification.CorrelationId, + job = notification.Job, + error = notification.Error + }; + + using var request = new HttpRequestMessage(HttpMethod.Post, endpointUri) + { + Content = new StringContent(JsonSerializer.Serialize(payload, SerializerOptions), Encoding.UTF8, MediaTypeNames.Application.Json) + }; + + if (!string.IsNullOrWhiteSpace(webhook.ApiKey) && !string.IsNullOrWhiteSpace(webhook.ApiKeyHeader)) + { + request.Headers.TryAddWithoutValidation(webhook.ApiKeyHeader!, webhook.ApiKey); + } + + try + { + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + _logger.LogWarning("Cartographer webhook responded {StatusCode} for job {JobId}: {Body}", (int)response.StatusCode, notification.Job.Id, body); + } + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to invoke Cartographer webhook for job {JobId}.", notification.Job.Id); + } + } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobCompletedEvent.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobCompletedEvent.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobCompletedEvent.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobCompletedEvent.cs index f24f1e94..c44e3b72 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobCompletedEvent.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobCompletedEvent.cs @@ -1,46 +1,46 @@ -using System.Collections.Generic; -using System.Text.Json.Serialization; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs.Events; - -internal sealed record GraphJobCompletedEvent -{ - [JsonPropertyName("eventId")] - public required string EventId { get; init; } - - [JsonPropertyName("kind")] - public required string Kind { get; init; } - - [JsonPropertyName("tenant")] - public required string Tenant { get; init; } - - [JsonPropertyName("ts")] - public required DateTimeOffset Timestamp { get; init; } - - [JsonPropertyName("payload")] - public required GraphJobCompletedPayload Payload { get; init; } - - [JsonPropertyName("attributes")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public IReadOnlyDictionary<string, string>? Attributes { get; init; } -} - -internal sealed record GraphJobCompletedPayload -{ - [JsonPropertyName("jobType")] - public required string JobType { get; init; } - - [JsonPropertyName("status")] - public required GraphJobStatus Status { get; init; } - - [JsonPropertyName("occurredAt")] - public required DateTimeOffset OccurredAt { get; init; } - - [JsonPropertyName("job")] - public required GraphJobResponse Job { get; init; } - - [JsonPropertyName("resultUri")] - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? ResultUri { get; init; } -} +using System.Collections.Generic; +using System.Text.Json.Serialization; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs.Events; + +internal sealed record GraphJobCompletedEvent +{ + [JsonPropertyName("eventId")] + public required string EventId { get; init; } + + [JsonPropertyName("kind")] + public required string Kind { get; init; } + + [JsonPropertyName("tenant")] + public required string Tenant { get; init; } + + [JsonPropertyName("ts")] + public required DateTimeOffset Timestamp { get; init; } + + [JsonPropertyName("payload")] + public required GraphJobCompletedPayload Payload { get; init; } + + [JsonPropertyName("attributes")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public IReadOnlyDictionary<string, string>? Attributes { get; init; } +} + +internal sealed record GraphJobCompletedPayload +{ + [JsonPropertyName("jobType")] + public required string JobType { get; init; } + + [JsonPropertyName("status")] + public required GraphJobStatus Status { get; init; } + + [JsonPropertyName("occurredAt")] + public required DateTimeOffset OccurredAt { get; init; } + + [JsonPropertyName("job")] + public required GraphJobResponse Job { get; init; } + + [JsonPropertyName("resultUri")] + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ResultUri { get; init; } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventFactory.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventFactory.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventFactory.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventFactory.cs index 5814b148..3a3e35e4 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventFactory.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventFactory.cs @@ -1,43 +1,43 @@ -using System; -using System.Collections.Generic; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs.Events; - -internal static class GraphJobEventFactory -{ - public static GraphJobCompletedEvent Create(GraphJobCompletionNotification notification) - { - var eventId = Guid.CreateVersion7().ToString("n"); - var attributes = new Dictionary<string, string>(StringComparer.Ordinal); - - if (!string.IsNullOrWhiteSpace(notification.CorrelationId)) - { - attributes["correlationId"] = notification.CorrelationId!; - } - - if (!string.IsNullOrWhiteSpace(notification.Error)) - { - attributes["error"] = notification.Error!; - } - - var payload = new GraphJobCompletedPayload - { - JobType = notification.JobType.ToString().ToLowerInvariant(), - Status = notification.Status, - OccurredAt = notification.OccurredAt, - Job = notification.Job, - ResultUri = notification.ResultUri - }; - - return new GraphJobCompletedEvent - { - EventId = eventId, - Kind = GraphJobEventKinds.GraphJobCompleted, - Tenant = notification.TenantId, - Timestamp = notification.OccurredAt, - Payload = payload, - Attributes = attributes.Count == 0 ? null : attributes - }; - } -} +using System; +using System.Collections.Generic; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs.Events; + +internal static class GraphJobEventFactory +{ + public static GraphJobCompletedEvent Create(GraphJobCompletionNotification notification) + { + var eventId = Guid.CreateVersion7().ToString("n"); + var attributes = new Dictionary<string, string>(StringComparer.Ordinal); + + if (!string.IsNullOrWhiteSpace(notification.CorrelationId)) + { + attributes["correlationId"] = notification.CorrelationId!; + } + + if (!string.IsNullOrWhiteSpace(notification.Error)) + { + attributes["error"] = notification.Error!; + } + + var payload = new GraphJobCompletedPayload + { + JobType = notification.JobType.ToString().ToLowerInvariant(), + Status = notification.Status, + OccurredAt = notification.OccurredAt, + Job = notification.Job, + ResultUri = notification.ResultUri + }; + + return new GraphJobCompletedEvent + { + EventId = eventId, + Kind = GraphJobEventKinds.GraphJobCompleted, + Tenant = notification.TenantId, + Timestamp = notification.OccurredAt, + Payload = payload, + Attributes = attributes.Count == 0 ? null : attributes + }; + } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventKinds.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventKinds.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventKinds.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventKinds.cs index 12b26478..733f3cdc 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventKinds.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventKinds.cs @@ -1,6 +1,6 @@ -namespace StellaOps.Scheduler.WebService.GraphJobs.Events; - -internal static class GraphJobEventKinds -{ - public const string GraphJobCompleted = "scheduler.graph.job.completed"; -} +namespace StellaOps.Scheduler.WebService.GraphJobs.Events; + +internal static class GraphJobEventKinds +{ + public const string GraphJobCompleted = "scheduler.graph.job.completed"; +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventPublisher.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventPublisher.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventPublisher.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventPublisher.cs index 649ebca7..df6b17cc 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventPublisher.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/Events/GraphJobEventPublisher.cs @@ -1,41 +1,41 @@ -using System.Text.Json; -using System.Text.Json.Serialization; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.WebService.Options; - -namespace StellaOps.Scheduler.WebService.GraphJobs.Events; - -internal sealed class GraphJobEventPublisher : IGraphJobCompletionPublisher -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - private readonly IOptionsMonitor<SchedulerEventsOptions> _options; - private readonly ILogger<GraphJobEventPublisher> _logger; - - public GraphJobEventPublisher( - IOptionsMonitor<SchedulerEventsOptions> options, - ILogger<GraphJobEventPublisher> logger) - { - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public Task PublishAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken) - { - var options = _options.CurrentValue; - if (!options.GraphJobs.Enabled) - { - _logger.LogDebug("Graph job events disabled; skipping emission for {JobId}.", notification.Job.Id); - return Task.CompletedTask; - } - - var envelope = GraphJobEventFactory.Create(notification); - var json = JsonSerializer.Serialize(envelope, SerializerOptions); - _logger.LogInformation("{EventJson}", json); - return Task.CompletedTask; - } -} +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.WebService.Options; + +namespace StellaOps.Scheduler.WebService.GraphJobs.Events; + +internal sealed class GraphJobEventPublisher : IGraphJobCompletionPublisher +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + private readonly IOptionsMonitor<SchedulerEventsOptions> _options; + private readonly ILogger<GraphJobEventPublisher> _logger; + + public GraphJobEventPublisher( + IOptionsMonitor<SchedulerEventsOptions> options, + ILogger<GraphJobEventPublisher> logger) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public Task PublishAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken) + { + var options = _options.CurrentValue; + if (!options.GraphJobs.Enabled) + { + _logger.LogDebug("Graph job events disabled; skipping emission for {JobId}.", notification.Job.Id); + return Task.CompletedTask; + } + + var envelope = GraphJobEventFactory.Create(notification); + var json = JsonSerializer.Serialize(envelope, SerializerOptions); + _logger.LogInformation("{EventJson}", json); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphBuildJobRequest.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphBuildJobRequest.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/GraphJobs/GraphBuildJobRequest.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphBuildJobRequest.cs index 8d8cdc52..7fa66939 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphBuildJobRequest.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphBuildJobRequest.cs @@ -1,26 +1,26 @@ -using System.ComponentModel.DataAnnotations; -using System.Text.Json.Serialization; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public sealed record GraphBuildJobRequest -{ - [Required] - public string SbomId { get; init; } = string.Empty; - - [Required] - public string SbomVersionId { get; init; } = string.Empty; - - [Required] - public string SbomDigest { get; init; } = string.Empty; - - public string? GraphSnapshotId { get; init; } - - [JsonConverter(typeof(JsonStringEnumConverter))] - public GraphBuildJobTrigger? Trigger { get; init; } - - public string? CorrelationId { get; init; } - - public IDictionary<string, string>? Metadata { get; init; } -} +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public sealed record GraphBuildJobRequest +{ + [Required] + public string SbomId { get; init; } = string.Empty; + + [Required] + public string SbomVersionId { get; init; } = string.Empty; + + [Required] + public string SbomDigest { get; init; } = string.Empty; + + public string? GraphSnapshotId { get; init; } + + [JsonConverter(typeof(JsonStringEnumConverter))] + public GraphBuildJobTrigger? Trigger { get; init; } + + public string? CorrelationId { get; init; } + + public IDictionary<string, string>? Metadata { get; init; } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionNotification.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionNotification.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionNotification.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionNotification.cs index b4666a33..18884727 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionNotification.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionNotification.cs @@ -1,13 +1,13 @@ -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public sealed record GraphJobCompletionNotification( - string TenantId, - GraphJobQueryType JobType, - GraphJobStatus Status, - DateTimeOffset OccurredAt, - GraphJobResponse Job, - string? ResultUri, - string? CorrelationId, - string? Error); +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public sealed record GraphJobCompletionNotification( + string TenantId, + GraphJobQueryType JobType, + GraphJobStatus Status, + DateTimeOffset OccurredAt, + GraphJobResponse Job, + string? ResultUri, + string? CorrelationId, + string? Error); diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionRequest.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionRequest.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionRequest.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionRequest.cs index 800ebf37..c037505f 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionRequest.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobCompletionRequest.cs @@ -1,30 +1,30 @@ -using System.ComponentModel.DataAnnotations; -using System.Text.Json.Serialization; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public sealed record GraphJobCompletionRequest -{ - [Required] - public string JobId { get; init; } = string.Empty; - - [Required] - [JsonConverter(typeof(JsonStringEnumConverter))] - public GraphJobQueryType JobType { get; init; } - - [Required] - [JsonConverter(typeof(JsonStringEnumConverter))] - public GraphJobStatus Status { get; init; } - - [Required] - public DateTimeOffset OccurredAt { get; init; } - - public string? GraphSnapshotId { get; init; } - - public string? ResultUri { get; init; } - - public string? CorrelationId { get; init; } - - public string? Error { get; init; } -} +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public sealed record GraphJobCompletionRequest +{ + [Required] + public string JobId { get; init; } = string.Empty; + + [Required] + [JsonConverter(typeof(JsonStringEnumConverter))] + public GraphJobQueryType JobType { get; init; } + + [Required] + [JsonConverter(typeof(JsonStringEnumConverter))] + public GraphJobStatus Status { get; init; } + + [Required] + public DateTimeOffset OccurredAt { get; init; } + + public string? GraphSnapshotId { get; init; } + + public string? ResultUri { get; init; } + + public string? CorrelationId { get; init; } + + public string? Error { get; init; } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobEndpointExtensions.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobEndpointExtensions.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobEndpointExtensions.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobEndpointExtensions.cs index b9b261ad..9b6c1323 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobEndpointExtensions.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobEndpointExtensions.cs @@ -1,161 +1,161 @@ -using Microsoft.AspNetCore.Mvc; -using System.ComponentModel.DataAnnotations; -using StellaOps.Auth.Abstractions; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.WebService.Auth; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public static class GraphJobEndpointExtensions -{ - public static void MapGraphJobEndpoints(this IEndpointRouteBuilder builder) - { - var group = builder.MapGroup("/graphs"); - - group.MapPost("/build", CreateGraphBuildJob); - group.MapPost("/overlays", CreateGraphOverlayJob); - group.MapGet("/jobs", GetGraphJobs); - group.MapPost("/hooks/completed", CompleteGraphJob); - group.MapGet("/overlays/lag", GetOverlayLagMetrics); - } - - internal static async Task<IResult> CreateGraphBuildJob( - [FromBody] GraphBuildJobRequest request, - HttpContext httpContext, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer authorizer, - [FromServices] IGraphJobService jobService, - CancellationToken cancellationToken) - { - try - { - authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphWrite); - var tenant = tenantAccessor.GetTenant(httpContext); - var job = await jobService.CreateBuildJobAsync(tenant.TenantId, request, cancellationToken); - return Results.Created($"/graphs/jobs/{job.Id}", GraphJobResponse.From(job)); - } - catch (UnauthorizedAccessException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); - } - catch (InvalidOperationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); - } - catch (ValidationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status400BadRequest); - } - catch (KeyNotFoundException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status404NotFound); - } - } - - internal static async Task<IResult> CreateGraphOverlayJob( - [FromBody] GraphOverlayJobRequest request, - HttpContext httpContext, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer authorizer, - [FromServices] IGraphJobService jobService, - CancellationToken cancellationToken) - { - try - { - authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphWrite); - var tenant = tenantAccessor.GetTenant(httpContext); - var job = await jobService.CreateOverlayJobAsync(tenant.TenantId, request, cancellationToken); - return Results.Created($"/graphs/jobs/{job.Id}", GraphJobResponse.From(job)); - } - catch (UnauthorizedAccessException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); - } - catch (InvalidOperationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); - } - catch (ValidationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status400BadRequest); - } - } - - internal static async Task<IResult> GetGraphJobs( - [AsParameters] GraphJobQuery query, - HttpContext httpContext, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer authorizer, - [FromServices] IGraphJobService jobService, - CancellationToken cancellationToken) - { - try - { - authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphRead); - var tenant = tenantAccessor.GetTenant(httpContext); - var jobs = await jobService.GetJobsAsync(tenant.TenantId, query, cancellationToken); - return Results.Ok(jobs); - } - catch (UnauthorizedAccessException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); - } - catch (InvalidOperationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); - } - } - - internal static async Task<IResult> CompleteGraphJob( - [FromBody] GraphJobCompletionRequest request, - HttpContext httpContext, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer authorizer, - [FromServices] IGraphJobService jobService, - CancellationToken cancellationToken) - { - try - { - authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphWrite); - var tenant = tenantAccessor.GetTenant(httpContext); - var response = await jobService.CompleteJobAsync(tenant.TenantId, request, cancellationToken); - return Results.Ok(response); - } - catch (UnauthorizedAccessException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); - } - catch (KeyNotFoundException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status404NotFound); - } - catch (InvalidOperationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); - } - catch (ValidationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status400BadRequest); - } - } - - internal static async Task<IResult> GetOverlayLagMetrics( - HttpContext httpContext, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer authorizer, - [FromServices] IGraphJobService jobService, - CancellationToken cancellationToken) - { - try - { - authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphRead); - var tenant = tenantAccessor.GetTenant(httpContext); - var metrics = await jobService.GetOverlayLagMetricsAsync(tenant.TenantId, cancellationToken); - return Results.Ok(metrics); - } - catch (UnauthorizedAccessException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); - } - } -} +using Microsoft.AspNetCore.Mvc; +using System.ComponentModel.DataAnnotations; +using StellaOps.Auth.Abstractions; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.WebService.Auth; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public static class GraphJobEndpointExtensions +{ + public static void MapGraphJobEndpoints(this IEndpointRouteBuilder builder) + { + var group = builder.MapGroup("/graphs"); + + group.MapPost("/build", CreateGraphBuildJob); + group.MapPost("/overlays", CreateGraphOverlayJob); + group.MapGet("/jobs", GetGraphJobs); + group.MapPost("/hooks/completed", CompleteGraphJob); + group.MapGet("/overlays/lag", GetOverlayLagMetrics); + } + + internal static async Task<IResult> CreateGraphBuildJob( + [FromBody] GraphBuildJobRequest request, + HttpContext httpContext, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer authorizer, + [FromServices] IGraphJobService jobService, + CancellationToken cancellationToken) + { + try + { + authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphWrite); + var tenant = tenantAccessor.GetTenant(httpContext); + var job = await jobService.CreateBuildJobAsync(tenant.TenantId, request, cancellationToken); + return Results.Created($"/graphs/jobs/{job.Id}", GraphJobResponse.From(job)); + } + catch (UnauthorizedAccessException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); + } + catch (InvalidOperationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); + } + catch (ValidationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status400BadRequest); + } + catch (KeyNotFoundException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status404NotFound); + } + } + + internal static async Task<IResult> CreateGraphOverlayJob( + [FromBody] GraphOverlayJobRequest request, + HttpContext httpContext, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer authorizer, + [FromServices] IGraphJobService jobService, + CancellationToken cancellationToken) + { + try + { + authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphWrite); + var tenant = tenantAccessor.GetTenant(httpContext); + var job = await jobService.CreateOverlayJobAsync(tenant.TenantId, request, cancellationToken); + return Results.Created($"/graphs/jobs/{job.Id}", GraphJobResponse.From(job)); + } + catch (UnauthorizedAccessException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); + } + catch (InvalidOperationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); + } + catch (ValidationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status400BadRequest); + } + } + + internal static async Task<IResult> GetGraphJobs( + [AsParameters] GraphJobQuery query, + HttpContext httpContext, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer authorizer, + [FromServices] IGraphJobService jobService, + CancellationToken cancellationToken) + { + try + { + authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphRead); + var tenant = tenantAccessor.GetTenant(httpContext); + var jobs = await jobService.GetJobsAsync(tenant.TenantId, query, cancellationToken); + return Results.Ok(jobs); + } + catch (UnauthorizedAccessException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); + } + catch (InvalidOperationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); + } + } + + internal static async Task<IResult> CompleteGraphJob( + [FromBody] GraphJobCompletionRequest request, + HttpContext httpContext, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer authorizer, + [FromServices] IGraphJobService jobService, + CancellationToken cancellationToken) + { + try + { + authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphWrite); + var tenant = tenantAccessor.GetTenant(httpContext); + var response = await jobService.CompleteJobAsync(tenant.TenantId, request, cancellationToken); + return Results.Ok(response); + } + catch (UnauthorizedAccessException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); + } + catch (KeyNotFoundException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status404NotFound); + } + catch (InvalidOperationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); + } + catch (ValidationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status400BadRequest); + } + } + + internal static async Task<IResult> GetOverlayLagMetrics( + HttpContext httpContext, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer authorizer, + [FromServices] IGraphJobService jobService, + CancellationToken cancellationToken) + { + try + { + authorizer.EnsureScope(httpContext, StellaOpsScopes.GraphRead); + var tenant = tenantAccessor.GetTenant(httpContext); + var metrics = await jobService.GetOverlayLagMetricsAsync(tenant.TenantId, cancellationToken); + return Results.Ok(metrics); + } + catch (UnauthorizedAccessException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); + } + } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobQuery.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobQuery.cs similarity index 95% rename from src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobQuery.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobQuery.cs index 8d03e2a6..b92e7f7a 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobQuery.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobQuery.cs @@ -1,27 +1,27 @@ -using System.Text.Json.Serialization; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public sealed record GraphJobQuery -{ - [JsonConverter(typeof(JsonStringEnumConverter))] - public GraphJobQueryType? Type { get; init; } - - [JsonConverter(typeof(JsonStringEnumConverter))] - public GraphJobStatus? Status { get; init; } - - public int? Limit { get; init; } - - internal GraphJobQuery Normalize() - => this with - { - Limit = Limit is null or <= 0 or > 200 ? 50 : Limit - }; -} - -public enum GraphJobQueryType -{ - Build, - Overlay -} +using System.Text.Json.Serialization; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public sealed record GraphJobQuery +{ + [JsonConverter(typeof(JsonStringEnumConverter))] + public GraphJobQueryType? Type { get; init; } + + [JsonConverter(typeof(JsonStringEnumConverter))] + public GraphJobStatus? Status { get; init; } + + public int? Limit { get; init; } + + internal GraphJobQuery Normalize() + => this with + { + Limit = Limit is null or <= 0 or > 200 ? 50 : Limit + }; +} + +public enum GraphJobQueryType +{ + Build, + Overlay +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobResponse.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobResponse.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobResponse.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobResponse.cs index d692bb39..a67bcfb7 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobResponse.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobResponse.cs @@ -1,45 +1,45 @@ -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public sealed record GraphJobResponse -{ - public required string Id { get; init; } - public required string TenantId { get; init; } - public required string Kind { get; init; } - public required GraphJobStatus Status { get; init; } - public required object Payload { get; init; } - - public static GraphJobResponse From(GraphBuildJob job) - => new() - { - Id = job.Id, - TenantId = job.TenantId, - Kind = "build", - Status = job.Status, - Payload = job - }; - - public static GraphJobResponse From(GraphOverlayJob job) - => new() - { - Id = job.Id, - TenantId = job.TenantId, - Kind = "overlay", - Status = job.Status, - Payload = job - }; -} - -public sealed record GraphJobCollection(IReadOnlyList<GraphJobResponse> Jobs) -{ - public static GraphJobCollection From(IEnumerable<GraphBuildJob> builds, IEnumerable<GraphOverlayJob> overlays) - { - var responses = builds.Select(GraphJobResponse.From) - .Concat(overlays.Select(GraphJobResponse.From)) - .OrderBy(response => response.Id, StringComparer.Ordinal) - .ToArray(); - - return new GraphJobCollection(responses); - } -} +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public sealed record GraphJobResponse +{ + public required string Id { get; init; } + public required string TenantId { get; init; } + public required string Kind { get; init; } + public required GraphJobStatus Status { get; init; } + public required object Payload { get; init; } + + public static GraphJobResponse From(GraphBuildJob job) + => new() + { + Id = job.Id, + TenantId = job.TenantId, + Kind = "build", + Status = job.Status, + Payload = job + }; + + public static GraphJobResponse From(GraphOverlayJob job) + => new() + { + Id = job.Id, + TenantId = job.TenantId, + Kind = "overlay", + Status = job.Status, + Payload = job + }; +} + +public sealed record GraphJobCollection(IReadOnlyList<GraphJobResponse> Jobs) +{ + public static GraphJobCollection From(IEnumerable<GraphBuildJob> builds, IEnumerable<GraphOverlayJob> overlays) + { + var responses = builds.Select(GraphJobResponse.From) + .Concat(overlays.Select(GraphJobResponse.From)) + .OrderBy(response => response.Id, StringComparer.Ordinal) + .ToArray(); + + return new GraphJobCollection(responses); + } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobService.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobService.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobService.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobService.cs index 6e4733b7..fda8271c 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphJobService.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphJobService.cs @@ -1,338 +1,338 @@ -using System.Collections.Immutable; -using System.ComponentModel.DataAnnotations; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -internal sealed class GraphJobService : IGraphJobService -{ - private readonly IGraphJobStore _store; - private readonly ISystemClock _clock; - private readonly IGraphJobCompletionPublisher _completionPublisher; - private readonly ICartographerWebhookClient _cartographerWebhook; - - public GraphJobService( - IGraphJobStore store, - ISystemClock clock, - IGraphJobCompletionPublisher completionPublisher, - ICartographerWebhookClient cartographerWebhook) - { - _store = store ?? throw new ArgumentNullException(nameof(store)); - _clock = clock ?? throw new ArgumentNullException(nameof(clock)); - _completionPublisher = completionPublisher ?? throw new ArgumentNullException(nameof(completionPublisher)); - _cartographerWebhook = cartographerWebhook ?? throw new ArgumentNullException(nameof(cartographerWebhook)); - } - - public async Task<GraphBuildJob> CreateBuildJobAsync(string tenantId, GraphBuildJobRequest request, CancellationToken cancellationToken) - { - Validate(request); - - var trigger = request.Trigger ?? GraphBuildJobTrigger.SbomVersion; - var metadata = request.Metadata ?? new Dictionary<string, string>(StringComparer.Ordinal); - - var now = _clock.UtcNow; - var id = GenerateIdentifier("gbj"); - var job = new GraphBuildJob( - id, - tenantId, - request.SbomId.Trim(), - request.SbomVersionId.Trim(), - NormalizeDigest(request.SbomDigest), - GraphJobStatus.Pending, - trigger, - now, - request.GraphSnapshotId, - attempts: 0, - cartographerJobId: null, - correlationId: request.CorrelationId?.Trim(), - startedAt: null, - completedAt: null, - error: null, - metadata: metadata.Select(pair => new KeyValuePair<string, string>(pair.Key, pair.Value))); - - return await _store.AddAsync(job, cancellationToken); - } - - public async Task<GraphOverlayJob> CreateOverlayJobAsync(string tenantId, GraphOverlayJobRequest request, CancellationToken cancellationToken) - { - Validate(request); - - var subjects = (request.Subjects ?? Array.Empty<string>()) - .Where(subject => !string.IsNullOrWhiteSpace(subject)) - .Select(subject => subject.Trim()) - .ToArray(); - var metadata = request.Metadata ?? new Dictionary<string, string>(StringComparer.Ordinal); - var trigger = request.Trigger ?? GraphOverlayJobTrigger.Policy; - - var now = _clock.UtcNow; - var id = GenerateIdentifier("goj"); - - var job = new GraphOverlayJob( - id: id, - tenantId: tenantId, - graphSnapshotId: request.GraphSnapshotId.Trim(), - overlayKind: request.OverlayKind, - overlayKey: request.OverlayKey.Trim(), - status: GraphJobStatus.Pending, - trigger: trigger, - createdAt: now, - subjects: subjects, - attempts: 0, - buildJobId: request.BuildJobId?.Trim(), - correlationId: request.CorrelationId?.Trim(), - metadata: metadata.Select(pair => new KeyValuePair<string, string>(pair.Key, pair.Value))); - - return await _store.AddAsync(job, cancellationToken); - } - - public async Task<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken) - { - return await _store.GetJobsAsync(tenantId, query, cancellationToken); - } - - public async Task<GraphJobResponse> CompleteJobAsync(string tenantId, GraphJobCompletionRequest request, CancellationToken cancellationToken) - { - if (request.Status is not (GraphJobStatus.Completed or GraphJobStatus.Failed or GraphJobStatus.Cancelled)) - { - throw new ValidationException("Completion requires status completed, failed, or cancelled."); - } - - var occurredAt = request.OccurredAt == default ? _clock.UtcNow : request.OccurredAt.ToUniversalTime(); - - switch (request.JobType) - { - case GraphJobQueryType.Build: - { - var existing = await _store.GetBuildJobAsync(tenantId, request.JobId, cancellationToken); - if (existing is null) - { - throw new KeyNotFoundException($"Graph build job '{request.JobId}' not found."); - } - - var current = existing; - if (current.Status is GraphJobStatus.Pending or GraphJobStatus.Queued) - { - current = GraphJobStateMachine.EnsureTransition(current, GraphJobStatus.Running, occurredAt, attempts: current.Attempts); - } - - var updated = GraphJobStateMachine.EnsureTransition(current, request.Status, occurredAt, attempts: current.Attempts + 1, errorMessage: request.Error); - var metadata = MergeMetadata(updated.Metadata, request.ResultUri); - var normalized = new GraphBuildJob( - id: updated.Id, - tenantId: updated.TenantId, - sbomId: updated.SbomId, - sbomVersionId: updated.SbomVersionId, - sbomDigest: updated.SbomDigest, - graphSnapshotId: request.GraphSnapshotId?.Trim() ?? updated.GraphSnapshotId, - status: updated.Status, - trigger: updated.Trigger, - attempts: updated.Attempts, - cartographerJobId: updated.CartographerJobId, - correlationId: request.CorrelationId?.Trim() ?? updated.CorrelationId, - createdAt: updated.CreatedAt, - startedAt: updated.StartedAt, - completedAt: updated.CompletedAt, - error: updated.Error, - metadata: metadata, - schemaVersion: updated.SchemaVersion); - - var stored = await _store.UpdateAsync(normalized, cancellationToken); - var response = GraphJobResponse.From(stored); - await PublishCompletionAsync(tenantId, GraphJobQueryType.Build, request.Status, occurredAt, response, request.ResultUri, request.CorrelationId, request.Error, cancellationToken); - return response; - } - - case GraphJobQueryType.Overlay: - { - var existing = await _store.GetOverlayJobAsync(tenantId, request.JobId, cancellationToken); - if (existing is null) - { - throw new KeyNotFoundException($"Graph overlay job '{request.JobId}' not found."); - } - - var current = existing; - if (current.Status is GraphJobStatus.Pending or GraphJobStatus.Queued) - { - current = GraphJobStateMachine.EnsureTransition(current, GraphJobStatus.Running, occurredAt, attempts: current.Attempts); - } - - var updated = GraphJobStateMachine.EnsureTransition(current, request.Status, occurredAt, attempts: current.Attempts + 1, errorMessage: request.Error); - var metadata = MergeMetadata(updated.Metadata, request.ResultUri); - var normalized = new GraphOverlayJob( - id: updated.Id, - tenantId: updated.TenantId, - graphSnapshotId: updated.GraphSnapshotId, - buildJobId: updated.BuildJobId, - overlayKind: updated.OverlayKind, - overlayKey: updated.OverlayKey, - subjects: updated.Subjects, - status: updated.Status, - trigger: updated.Trigger, - attempts: updated.Attempts, - correlationId: request.CorrelationId?.Trim() ?? updated.CorrelationId, - createdAt: updated.CreatedAt, - startedAt: updated.StartedAt, - completedAt: updated.CompletedAt, - error: updated.Error, - metadata: metadata, - schemaVersion: updated.SchemaVersion); - - var stored = await _store.UpdateAsync(normalized, cancellationToken); - var response = GraphJobResponse.From(stored); - await PublishCompletionAsync(tenantId, GraphJobQueryType.Overlay, request.Status, occurredAt, response, request.ResultUri, request.CorrelationId, request.Error, cancellationToken); - return response; - } - - default: - throw new ValidationException("Unsupported job type."); - } - } - - public async Task<OverlayLagMetricsResponse> GetOverlayLagMetricsAsync(string tenantId, CancellationToken cancellationToken) - { - var now = _clock.UtcNow; - var overlayJobs = await _store.GetOverlayJobsAsync(tenantId, cancellationToken); - - var pending = overlayJobs.Count(job => job.Status == GraphJobStatus.Pending); - var running = overlayJobs.Count(job => job.Status == GraphJobStatus.Running || job.Status == GraphJobStatus.Queued); - var completed = overlayJobs.Count(job => job.Status == GraphJobStatus.Completed); - var failed = overlayJobs.Count(job => job.Status == GraphJobStatus.Failed); - var cancelled = overlayJobs.Count(job => job.Status == GraphJobStatus.Cancelled); - - var completedJobs = overlayJobs - .Where(job => job.Status == GraphJobStatus.Completed && job.CompletedAt is not null) - .OrderByDescending(job => job.CompletedAt) - .ToArray(); - - double? minLag = null; - double? maxLag = null; - double? avgLag = null; - List<OverlayLagEntry> recent = new(); - - if (completedJobs.Length > 0) - { - var lags = completedJobs - .Select(job => (now - job.CompletedAt!.Value).TotalSeconds) - .ToArray(); - - minLag = lags.Min(); - maxLag = lags.Max(); - avgLag = lags.Average(); - - recent = completedJobs - .Take(5) - .Select(job => new OverlayLagEntry( - JobId: job.Id, - CompletedAt: job.CompletedAt!.Value, - LagSeconds: (now - job.CompletedAt!.Value).TotalSeconds, - CorrelationId: job.CorrelationId, - ResultUri: job.Metadata.TryGetValue("resultUri", out var value) ? value : null)) - .ToList(); - } - - return new OverlayLagMetricsResponse( - TenantId: tenantId, - Pending: pending, - Running: running, - Completed: completed, - Failed: failed, - Cancelled: cancelled, - MinLagSeconds: minLag, - MaxLagSeconds: maxLag, - AverageLagSeconds: avgLag, - RecentCompleted: recent); - } - - private static void Validate(GraphBuildJobRequest request) - { - if (string.IsNullOrWhiteSpace(request.SbomId)) - { - throw new ValidationException("sbomId is required."); - } - - if (string.IsNullOrWhiteSpace(request.SbomVersionId)) - { - throw new ValidationException("sbomVersionId is required."); - } - - if (string.IsNullOrWhiteSpace(request.SbomDigest)) - { - throw new ValidationException("sbomDigest is required."); - } - } - - private static void Validate(GraphOverlayJobRequest request) - { - if (string.IsNullOrWhiteSpace(request.GraphSnapshotId)) - { - throw new ValidationException("graphSnapshotId is required."); - } - - if (string.IsNullOrWhiteSpace(request.OverlayKey)) - { - throw new ValidationException("overlayKey is required."); - } - } - - private static string GenerateIdentifier(string prefix) - => $"{prefix}_{Guid.CreateVersion7().ToString("n")}"; - - private static string NormalizeDigest(string value) - { - var text = value.Trim(); - if (!text.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) - { - throw new ValidationException("sbomDigest must start with 'sha256:'."); - } - - var digest = text[7..]; - if (digest.Length != 64 || !digest.All(IsHex)) - { - throw new ValidationException("sbomDigest must contain 64 hexadecimal characters."); - } - - return $"sha256:{digest.ToLowerInvariant()}"; - } - - private static bool IsHex(char c) - => (c >= '0' && c <= '9') || - (c >= 'a' && c <= 'f') || - (c >= 'A' && c <= 'F'); - - private static ImmutableSortedDictionary<string, string> MergeMetadata(ImmutableSortedDictionary<string, string> existing, string? resultUri) - { - if (string.IsNullOrWhiteSpace(resultUri)) - { - return existing; - } - - var builder = existing.ToBuilder(); - builder["resultUri"] = resultUri.Trim(); - return builder.ToImmutableSortedDictionary(StringComparer.Ordinal); - } - - private async Task PublishCompletionAsync( - string tenantId, - GraphJobQueryType jobType, - GraphJobStatus status, - DateTimeOffset occurredAt, - GraphJobResponse response, - string? resultUri, - string? correlationId, - string? error, - CancellationToken cancellationToken) - { - var notification = new GraphJobCompletionNotification( - tenantId, - jobType, - status, - occurredAt, - response, - resultUri, - correlationId, - error); - - await _completionPublisher.PublishAsync(notification, cancellationToken).ConfigureAwait(false); - await _cartographerWebhook.NotifyAsync(notification, cancellationToken).ConfigureAwait(false); - } -} +using System.Collections.Immutable; +using System.ComponentModel.DataAnnotations; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +internal sealed class GraphJobService : IGraphJobService +{ + private readonly IGraphJobStore _store; + private readonly ISystemClock _clock; + private readonly IGraphJobCompletionPublisher _completionPublisher; + private readonly ICartographerWebhookClient _cartographerWebhook; + + public GraphJobService( + IGraphJobStore store, + ISystemClock clock, + IGraphJobCompletionPublisher completionPublisher, + ICartographerWebhookClient cartographerWebhook) + { + _store = store ?? throw new ArgumentNullException(nameof(store)); + _clock = clock ?? throw new ArgumentNullException(nameof(clock)); + _completionPublisher = completionPublisher ?? throw new ArgumentNullException(nameof(completionPublisher)); + _cartographerWebhook = cartographerWebhook ?? throw new ArgumentNullException(nameof(cartographerWebhook)); + } + + public async Task<GraphBuildJob> CreateBuildJobAsync(string tenantId, GraphBuildJobRequest request, CancellationToken cancellationToken) + { + Validate(request); + + var trigger = request.Trigger ?? GraphBuildJobTrigger.SbomVersion; + var metadata = request.Metadata ?? new Dictionary<string, string>(StringComparer.Ordinal); + + var now = _clock.UtcNow; + var id = GenerateIdentifier("gbj"); + var job = new GraphBuildJob( + id, + tenantId, + request.SbomId.Trim(), + request.SbomVersionId.Trim(), + NormalizeDigest(request.SbomDigest), + GraphJobStatus.Pending, + trigger, + now, + request.GraphSnapshotId, + attempts: 0, + cartographerJobId: null, + correlationId: request.CorrelationId?.Trim(), + startedAt: null, + completedAt: null, + error: null, + metadata: metadata.Select(pair => new KeyValuePair<string, string>(pair.Key, pair.Value))); + + return await _store.AddAsync(job, cancellationToken); + } + + public async Task<GraphOverlayJob> CreateOverlayJobAsync(string tenantId, GraphOverlayJobRequest request, CancellationToken cancellationToken) + { + Validate(request); + + var subjects = (request.Subjects ?? Array.Empty<string>()) + .Where(subject => !string.IsNullOrWhiteSpace(subject)) + .Select(subject => subject.Trim()) + .ToArray(); + var metadata = request.Metadata ?? new Dictionary<string, string>(StringComparer.Ordinal); + var trigger = request.Trigger ?? GraphOverlayJobTrigger.Policy; + + var now = _clock.UtcNow; + var id = GenerateIdentifier("goj"); + + var job = new GraphOverlayJob( + id: id, + tenantId: tenantId, + graphSnapshotId: request.GraphSnapshotId.Trim(), + overlayKind: request.OverlayKind, + overlayKey: request.OverlayKey.Trim(), + status: GraphJobStatus.Pending, + trigger: trigger, + createdAt: now, + subjects: subjects, + attempts: 0, + buildJobId: request.BuildJobId?.Trim(), + correlationId: request.CorrelationId?.Trim(), + metadata: metadata.Select(pair => new KeyValuePair<string, string>(pair.Key, pair.Value))); + + return await _store.AddAsync(job, cancellationToken); + } + + public async Task<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken) + { + return await _store.GetJobsAsync(tenantId, query, cancellationToken); + } + + public async Task<GraphJobResponse> CompleteJobAsync(string tenantId, GraphJobCompletionRequest request, CancellationToken cancellationToken) + { + if (request.Status is not (GraphJobStatus.Completed or GraphJobStatus.Failed or GraphJobStatus.Cancelled)) + { + throw new ValidationException("Completion requires status completed, failed, or cancelled."); + } + + var occurredAt = request.OccurredAt == default ? _clock.UtcNow : request.OccurredAt.ToUniversalTime(); + + switch (request.JobType) + { + case GraphJobQueryType.Build: + { + var existing = await _store.GetBuildJobAsync(tenantId, request.JobId, cancellationToken); + if (existing is null) + { + throw new KeyNotFoundException($"Graph build job '{request.JobId}' not found."); + } + + var current = existing; + if (current.Status is GraphJobStatus.Pending or GraphJobStatus.Queued) + { + current = GraphJobStateMachine.EnsureTransition(current, GraphJobStatus.Running, occurredAt, attempts: current.Attempts); + } + + var updated = GraphJobStateMachine.EnsureTransition(current, request.Status, occurredAt, attempts: current.Attempts + 1, errorMessage: request.Error); + var metadata = MergeMetadata(updated.Metadata, request.ResultUri); + var normalized = new GraphBuildJob( + id: updated.Id, + tenantId: updated.TenantId, + sbomId: updated.SbomId, + sbomVersionId: updated.SbomVersionId, + sbomDigest: updated.SbomDigest, + graphSnapshotId: request.GraphSnapshotId?.Trim() ?? updated.GraphSnapshotId, + status: updated.Status, + trigger: updated.Trigger, + attempts: updated.Attempts, + cartographerJobId: updated.CartographerJobId, + correlationId: request.CorrelationId?.Trim() ?? updated.CorrelationId, + createdAt: updated.CreatedAt, + startedAt: updated.StartedAt, + completedAt: updated.CompletedAt, + error: updated.Error, + metadata: metadata, + schemaVersion: updated.SchemaVersion); + + var stored = await _store.UpdateAsync(normalized, cancellationToken); + var response = GraphJobResponse.From(stored); + await PublishCompletionAsync(tenantId, GraphJobQueryType.Build, request.Status, occurredAt, response, request.ResultUri, request.CorrelationId, request.Error, cancellationToken); + return response; + } + + case GraphJobQueryType.Overlay: + { + var existing = await _store.GetOverlayJobAsync(tenantId, request.JobId, cancellationToken); + if (existing is null) + { + throw new KeyNotFoundException($"Graph overlay job '{request.JobId}' not found."); + } + + var current = existing; + if (current.Status is GraphJobStatus.Pending or GraphJobStatus.Queued) + { + current = GraphJobStateMachine.EnsureTransition(current, GraphJobStatus.Running, occurredAt, attempts: current.Attempts); + } + + var updated = GraphJobStateMachine.EnsureTransition(current, request.Status, occurredAt, attempts: current.Attempts + 1, errorMessage: request.Error); + var metadata = MergeMetadata(updated.Metadata, request.ResultUri); + var normalized = new GraphOverlayJob( + id: updated.Id, + tenantId: updated.TenantId, + graphSnapshotId: updated.GraphSnapshotId, + buildJobId: updated.BuildJobId, + overlayKind: updated.OverlayKind, + overlayKey: updated.OverlayKey, + subjects: updated.Subjects, + status: updated.Status, + trigger: updated.Trigger, + attempts: updated.Attempts, + correlationId: request.CorrelationId?.Trim() ?? updated.CorrelationId, + createdAt: updated.CreatedAt, + startedAt: updated.StartedAt, + completedAt: updated.CompletedAt, + error: updated.Error, + metadata: metadata, + schemaVersion: updated.SchemaVersion); + + var stored = await _store.UpdateAsync(normalized, cancellationToken); + var response = GraphJobResponse.From(stored); + await PublishCompletionAsync(tenantId, GraphJobQueryType.Overlay, request.Status, occurredAt, response, request.ResultUri, request.CorrelationId, request.Error, cancellationToken); + return response; + } + + default: + throw new ValidationException("Unsupported job type."); + } + } + + public async Task<OverlayLagMetricsResponse> GetOverlayLagMetricsAsync(string tenantId, CancellationToken cancellationToken) + { + var now = _clock.UtcNow; + var overlayJobs = await _store.GetOverlayJobsAsync(tenantId, cancellationToken); + + var pending = overlayJobs.Count(job => job.Status == GraphJobStatus.Pending); + var running = overlayJobs.Count(job => job.Status == GraphJobStatus.Running || job.Status == GraphJobStatus.Queued); + var completed = overlayJobs.Count(job => job.Status == GraphJobStatus.Completed); + var failed = overlayJobs.Count(job => job.Status == GraphJobStatus.Failed); + var cancelled = overlayJobs.Count(job => job.Status == GraphJobStatus.Cancelled); + + var completedJobs = overlayJobs + .Where(job => job.Status == GraphJobStatus.Completed && job.CompletedAt is not null) + .OrderByDescending(job => job.CompletedAt) + .ToArray(); + + double? minLag = null; + double? maxLag = null; + double? avgLag = null; + List<OverlayLagEntry> recent = new(); + + if (completedJobs.Length > 0) + { + var lags = completedJobs + .Select(job => (now - job.CompletedAt!.Value).TotalSeconds) + .ToArray(); + + minLag = lags.Min(); + maxLag = lags.Max(); + avgLag = lags.Average(); + + recent = completedJobs + .Take(5) + .Select(job => new OverlayLagEntry( + JobId: job.Id, + CompletedAt: job.CompletedAt!.Value, + LagSeconds: (now - job.CompletedAt!.Value).TotalSeconds, + CorrelationId: job.CorrelationId, + ResultUri: job.Metadata.TryGetValue("resultUri", out var value) ? value : null)) + .ToList(); + } + + return new OverlayLagMetricsResponse( + TenantId: tenantId, + Pending: pending, + Running: running, + Completed: completed, + Failed: failed, + Cancelled: cancelled, + MinLagSeconds: minLag, + MaxLagSeconds: maxLag, + AverageLagSeconds: avgLag, + RecentCompleted: recent); + } + + private static void Validate(GraphBuildJobRequest request) + { + if (string.IsNullOrWhiteSpace(request.SbomId)) + { + throw new ValidationException("sbomId is required."); + } + + if (string.IsNullOrWhiteSpace(request.SbomVersionId)) + { + throw new ValidationException("sbomVersionId is required."); + } + + if (string.IsNullOrWhiteSpace(request.SbomDigest)) + { + throw new ValidationException("sbomDigest is required."); + } + } + + private static void Validate(GraphOverlayJobRequest request) + { + if (string.IsNullOrWhiteSpace(request.GraphSnapshotId)) + { + throw new ValidationException("graphSnapshotId is required."); + } + + if (string.IsNullOrWhiteSpace(request.OverlayKey)) + { + throw new ValidationException("overlayKey is required."); + } + } + + private static string GenerateIdentifier(string prefix) + => $"{prefix}_{Guid.CreateVersion7().ToString("n")}"; + + private static string NormalizeDigest(string value) + { + var text = value.Trim(); + if (!text.StartsWith("sha256:", StringComparison.OrdinalIgnoreCase)) + { + throw new ValidationException("sbomDigest must start with 'sha256:'."); + } + + var digest = text[7..]; + if (digest.Length != 64 || !digest.All(IsHex)) + { + throw new ValidationException("sbomDigest must contain 64 hexadecimal characters."); + } + + return $"sha256:{digest.ToLowerInvariant()}"; + } + + private static bool IsHex(char c) + => (c >= '0' && c <= '9') || + (c >= 'a' && c <= 'f') || + (c >= 'A' && c <= 'F'); + + private static ImmutableSortedDictionary<string, string> MergeMetadata(ImmutableSortedDictionary<string, string> existing, string? resultUri) + { + if (string.IsNullOrWhiteSpace(resultUri)) + { + return existing; + } + + var builder = existing.ToBuilder(); + builder["resultUri"] = resultUri.Trim(); + return builder.ToImmutableSortedDictionary(StringComparer.Ordinal); + } + + private async Task PublishCompletionAsync( + string tenantId, + GraphJobQueryType jobType, + GraphJobStatus status, + DateTimeOffset occurredAt, + GraphJobResponse response, + string? resultUri, + string? correlationId, + string? error, + CancellationToken cancellationToken) + { + var notification = new GraphJobCompletionNotification( + tenantId, + jobType, + status, + occurredAt, + response, + resultUri, + correlationId, + error); + + await _completionPublisher.PublishAsync(notification, cancellationToken).ConfigureAwait(false); + await _cartographerWebhook.NotifyAsync(notification, cancellationToken).ConfigureAwait(false); + } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphOverlayJobRequest.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphOverlayJobRequest.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/GraphJobs/GraphOverlayJobRequest.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphOverlayJobRequest.cs index 76ce1764..b0eec980 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/GraphOverlayJobRequest.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/GraphOverlayJobRequest.cs @@ -1,29 +1,29 @@ -using System.ComponentModel.DataAnnotations; -using System.Text.Json.Serialization; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public sealed record GraphOverlayJobRequest -{ - [Required] - public string GraphSnapshotId { get; init; } = string.Empty; - - public string? BuildJobId { get; init; } - - [Required] - [JsonConverter(typeof(JsonStringEnumConverter))] - public GraphOverlayKind OverlayKind { get; init; } - - [Required] - public string OverlayKey { get; init; } = string.Empty; - - public IReadOnlyList<string>? Subjects { get; init; } - - [JsonConverter(typeof(JsonStringEnumConverter))] - public GraphOverlayJobTrigger? Trigger { get; init; } - - public string? CorrelationId { get; init; } - - public IDictionary<string, string>? Metadata { get; init; } -} +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public sealed record GraphOverlayJobRequest +{ + [Required] + public string GraphSnapshotId { get; init; } = string.Empty; + + public string? BuildJobId { get; init; } + + [Required] + [JsonConverter(typeof(JsonStringEnumConverter))] + public GraphOverlayKind OverlayKind { get; init; } + + [Required] + public string OverlayKey { get; init; } = string.Empty; + + public IReadOnlyList<string>? Subjects { get; init; } + + [JsonConverter(typeof(JsonStringEnumConverter))] + public GraphOverlayJobTrigger? Trigger { get; init; } + + public string? CorrelationId { get; init; } + + public IDictionary<string, string>? Metadata { get; init; } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/ICartographerWebhookClient.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/ICartographerWebhookClient.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/ICartographerWebhookClient.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/ICartographerWebhookClient.cs index bc953d3d..a15578b9 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/ICartographerWebhookClient.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/ICartographerWebhookClient.cs @@ -1,6 +1,6 @@ -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public interface ICartographerWebhookClient -{ - Task NotifyAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken); -} +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public interface ICartographerWebhookClient +{ + Task NotifyAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobCompletionPublisher.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobCompletionPublisher.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobCompletionPublisher.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobCompletionPublisher.cs index 8f036672..2858d2a0 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobCompletionPublisher.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobCompletionPublisher.cs @@ -1,6 +1,6 @@ -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public interface IGraphJobCompletionPublisher -{ - Task PublishAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken); -} +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public interface IGraphJobCompletionPublisher +{ + Task PublishAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobService.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobService.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobService.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobService.cs index 708d83a5..635f9c21 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobService.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobService.cs @@ -1,16 +1,16 @@ -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public interface IGraphJobService -{ - Task<GraphBuildJob> CreateBuildJobAsync(string tenantId, GraphBuildJobRequest request, CancellationToken cancellationToken); - - Task<GraphOverlayJob> CreateOverlayJobAsync(string tenantId, GraphOverlayJobRequest request, CancellationToken cancellationToken); - - Task<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken); - - Task<GraphJobResponse> CompleteJobAsync(string tenantId, GraphJobCompletionRequest request, CancellationToken cancellationToken); - - Task<OverlayLagMetricsResponse> GetOverlayLagMetricsAsync(string tenantId, CancellationToken cancellationToken); -} +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public interface IGraphJobService +{ + Task<GraphBuildJob> CreateBuildJobAsync(string tenantId, GraphBuildJobRequest request, CancellationToken cancellationToken); + + Task<GraphOverlayJob> CreateOverlayJobAsync(string tenantId, GraphOverlayJobRequest request, CancellationToken cancellationToken); + + Task<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken); + + Task<GraphJobResponse> CompleteJobAsync(string tenantId, GraphJobCompletionRequest request, CancellationToken cancellationToken); + + Task<OverlayLagMetricsResponse> GetOverlayLagMetricsAsync(string tenantId, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobStore.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobStore.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobStore.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobStore.cs index 6f01dd3c..3f271a58 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobStore.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/IGraphJobStore.cs @@ -1,22 +1,22 @@ -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public interface IGraphJobStore -{ - ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken); - - ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken); - - ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken); - - ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken); - - ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken); - - ValueTask<GraphBuildJob> UpdateAsync(GraphBuildJob job, CancellationToken cancellationToken); - - ValueTask<GraphOverlayJob> UpdateAsync(GraphOverlayJob job, CancellationToken cancellationToken); - - ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken); -} +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public interface IGraphJobStore +{ + ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken); + + ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken); + + ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken); + + ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken); + + ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken); + + ValueTask<GraphBuildJob> UpdateAsync(GraphBuildJob job, CancellationToken cancellationToken); + + ValueTask<GraphOverlayJob> UpdateAsync(GraphOverlayJob job, CancellationToken cancellationToken); + + ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/InMemoryGraphJobStore.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/InMemoryGraphJobStore.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/InMemoryGraphJobStore.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/InMemoryGraphJobStore.cs index 00f74232..3995069b 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/InMemoryGraphJobStore.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/InMemoryGraphJobStore.cs @@ -1,83 +1,83 @@ -using System.Collections.Concurrent; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -internal sealed class InMemoryGraphJobStore : IGraphJobStore -{ - private readonly ConcurrentDictionary<string, GraphBuildJob> _buildJobs = new(StringComparer.Ordinal); - private readonly ConcurrentDictionary<string, GraphOverlayJob> _overlayJobs = new(StringComparer.Ordinal); - - public ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken) - { - _buildJobs[job.Id] = job; - return ValueTask.FromResult(job); - } - - public ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken) - { - _overlayJobs[job.Id] = job; - return ValueTask.FromResult(job); - } - - public ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken) - { - var normalized = query.Normalize(); - var buildJobs = _buildJobs.Values - .Where(job => string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) - .Where(job => normalized.Status is null || job.Status == normalized.Status) - .OrderByDescending(job => job.CreatedAt) - .Take(normalized.Limit ?? 50) - .ToArray(); - - var overlayJobs = _overlayJobs.Values - .Where(job => string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) - .Where(job => normalized.Status is null || job.Status == normalized.Status) - .OrderByDescending(job => job.CreatedAt) - .Take(normalized.Limit ?? 50) - .ToArray(); - - return ValueTask.FromResult(GraphJobCollection.From(buildJobs, overlayJobs)); - } - - public ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken) - { - if (_buildJobs.TryGetValue(jobId, out var job) && string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) - { - return ValueTask.FromResult<GraphBuildJob?>(job); - } - - return ValueTask.FromResult<GraphBuildJob?>(null); - } - - public ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken) - { - if (_overlayJobs.TryGetValue(jobId, out var job) && string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) - { - return ValueTask.FromResult<GraphOverlayJob?>(job); - } - - return ValueTask.FromResult<GraphOverlayJob?>(null); - } - - public ValueTask<GraphBuildJob> UpdateAsync(GraphBuildJob job, CancellationToken cancellationToken) - { - _buildJobs[job.Id] = job; - return ValueTask.FromResult(job); - } - - public ValueTask<GraphOverlayJob> UpdateAsync(GraphOverlayJob job, CancellationToken cancellationToken) - { - _overlayJobs[job.Id] = job; - return ValueTask.FromResult(job); - } - - public ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken) - { - var jobs = _overlayJobs.Values - .Where(job => string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) - .ToArray(); - - return ValueTask.FromResult<IReadOnlyCollection<GraphOverlayJob>>(jobs); - } -} +using System.Collections.Concurrent; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +internal sealed class InMemoryGraphJobStore : IGraphJobStore +{ + private readonly ConcurrentDictionary<string, GraphBuildJob> _buildJobs = new(StringComparer.Ordinal); + private readonly ConcurrentDictionary<string, GraphOverlayJob> _overlayJobs = new(StringComparer.Ordinal); + + public ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken) + { + _buildJobs[job.Id] = job; + return ValueTask.FromResult(job); + } + + public ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken) + { + _overlayJobs[job.Id] = job; + return ValueTask.FromResult(job); + } + + public ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken) + { + var normalized = query.Normalize(); + var buildJobs = _buildJobs.Values + .Where(job => string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) + .Where(job => normalized.Status is null || job.Status == normalized.Status) + .OrderByDescending(job => job.CreatedAt) + .Take(normalized.Limit ?? 50) + .ToArray(); + + var overlayJobs = _overlayJobs.Values + .Where(job => string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) + .Where(job => normalized.Status is null || job.Status == normalized.Status) + .OrderByDescending(job => job.CreatedAt) + .Take(normalized.Limit ?? 50) + .ToArray(); + + return ValueTask.FromResult(GraphJobCollection.From(buildJobs, overlayJobs)); + } + + public ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken) + { + if (_buildJobs.TryGetValue(jobId, out var job) && string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) + { + return ValueTask.FromResult<GraphBuildJob?>(job); + } + + return ValueTask.FromResult<GraphBuildJob?>(null); + } + + public ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken) + { + if (_overlayJobs.TryGetValue(jobId, out var job) && string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) + { + return ValueTask.FromResult<GraphOverlayJob?>(job); + } + + return ValueTask.FromResult<GraphOverlayJob?>(null); + } + + public ValueTask<GraphBuildJob> UpdateAsync(GraphBuildJob job, CancellationToken cancellationToken) + { + _buildJobs[job.Id] = job; + return ValueTask.FromResult(job); + } + + public ValueTask<GraphOverlayJob> UpdateAsync(GraphOverlayJob job, CancellationToken cancellationToken) + { + _overlayJobs[job.Id] = job; + return ValueTask.FromResult(job); + } + + public ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken) + { + var jobs = _overlayJobs.Values + .Where(job => string.Equals(job.TenantId, tenantId, StringComparison.Ordinal)) + .ToArray(); + + return ValueTask.FromResult<IReadOnlyCollection<GraphOverlayJob>>(jobs); + } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs index 952c8b52..652db3b7 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/MongoGraphJobStore.cs @@ -1,55 +1,55 @@ -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -internal sealed class MongoGraphJobStore : IGraphJobStore -{ - private readonly IGraphJobRepository _repository; - - public MongoGraphJobStore(IGraphJobRepository repository) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - } - - public async ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken) - { - await _repository.InsertAsync(job, cancellationToken); - return job; - } - - public async ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken) - { - await _repository.InsertAsync(job, cancellationToken); - return job; - } - - public async ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken) - { - var normalized = query.Normalize(); - var builds = normalized.Type is null or GraphJobQueryType.Build - ? await _repository.ListBuildJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken) - : Array.Empty<GraphBuildJob>(); - - var overlays = normalized.Type is null or GraphJobQueryType.Overlay - ? await _repository.ListOverlayJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken) - : Array.Empty<GraphOverlayJob>(); - - return GraphJobCollection.From(builds, overlays); - } - - public async ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken) - => await _repository.GetBuildJobAsync(tenantId, jobId, cancellationToken); - - public async ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken) - => await _repository.GetOverlayJobAsync(tenantId, jobId, cancellationToken); - - public async ValueTask<GraphBuildJob> UpdateAsync(GraphBuildJob job, CancellationToken cancellationToken) - => await _repository.ReplaceAsync(job, cancellationToken); - - public async ValueTask<GraphOverlayJob> UpdateAsync(GraphOverlayJob job, CancellationToken cancellationToken) - => await _repository.ReplaceAsync(job, cancellationToken); - - public async ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken) - => await _repository.ListOverlayJobsAsync(tenantId, cancellationToken); -} +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +internal sealed class MongoGraphJobStore : IGraphJobStore +{ + private readonly IGraphJobRepository _repository; + + public MongoGraphJobStore(IGraphJobRepository repository) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + } + + public async ValueTask<GraphBuildJob> AddAsync(GraphBuildJob job, CancellationToken cancellationToken) + { + await _repository.InsertAsync(job, cancellationToken); + return job; + } + + public async ValueTask<GraphOverlayJob> AddAsync(GraphOverlayJob job, CancellationToken cancellationToken) + { + await _repository.InsertAsync(job, cancellationToken); + return job; + } + + public async ValueTask<GraphJobCollection> GetJobsAsync(string tenantId, GraphJobQuery query, CancellationToken cancellationToken) + { + var normalized = query.Normalize(); + var builds = normalized.Type is null or GraphJobQueryType.Build + ? await _repository.ListBuildJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken) + : Array.Empty<GraphBuildJob>(); + + var overlays = normalized.Type is null or GraphJobQueryType.Overlay + ? await _repository.ListOverlayJobsAsync(tenantId, normalized.Status, normalized.Limit ?? 50, cancellationToken) + : Array.Empty<GraphOverlayJob>(); + + return GraphJobCollection.From(builds, overlays); + } + + public async ValueTask<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken) + => await _repository.GetBuildJobAsync(tenantId, jobId, cancellationToken); + + public async ValueTask<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken) + => await _repository.GetOverlayJobAsync(tenantId, jobId, cancellationToken); + + public async ValueTask<GraphBuildJob> UpdateAsync(GraphBuildJob job, CancellationToken cancellationToken) + => await _repository.ReplaceAsync(job, cancellationToken); + + public async ValueTask<GraphOverlayJob> UpdateAsync(GraphOverlayJob job, CancellationToken cancellationToken) + => await _repository.ReplaceAsync(job, cancellationToken); + + public async ValueTask<IReadOnlyCollection<GraphOverlayJob>> GetOverlayJobsAsync(string tenantId, CancellationToken cancellationToken) + => await _repository.ListOverlayJobsAsync(tenantId, cancellationToken); +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/NullCartographerWebhookClient.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/NullCartographerWebhookClient.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/NullCartographerWebhookClient.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/NullCartographerWebhookClient.cs index 86226432..a39207dd 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/NullCartographerWebhookClient.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/NullCartographerWebhookClient.cs @@ -1,17 +1,17 @@ -using Microsoft.Extensions.Logging; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -internal sealed class NullCartographerWebhookClient : ICartographerWebhookClient -{ - private readonly ILogger<NullCartographerWebhookClient> _logger; - - public NullCartographerWebhookClient(ILogger<NullCartographerWebhookClient> logger) - => _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - - public Task NotifyAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken) - { - _logger.LogDebug("Cartographer webhook suppressed for tenant {TenantId}, job {JobId} ({Status}).", notification.TenantId, notification.Job.Id, notification.Status); - return Task.CompletedTask; - } -} +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +internal sealed class NullCartographerWebhookClient : ICartographerWebhookClient +{ + private readonly ILogger<NullCartographerWebhookClient> _logger; + + public NullCartographerWebhookClient(ILogger<NullCartographerWebhookClient> logger) + => _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + public Task NotifyAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken) + { + _logger.LogDebug("Cartographer webhook suppressed for tenant {TenantId}, job {JobId} ({Status}).", notification.TenantId, notification.Job.Id, notification.Status); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/NullGraphJobCompletionPublisher.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/NullGraphJobCompletionPublisher.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/GraphJobs/NullGraphJobCompletionPublisher.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/NullGraphJobCompletionPublisher.cs index e87b8744..30257020 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/NullGraphJobCompletionPublisher.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/NullGraphJobCompletionPublisher.cs @@ -1,17 +1,17 @@ -using Microsoft.Extensions.Logging; - -namespace StellaOps.Scheduler.WebService.GraphJobs; - -internal sealed class NullGraphJobCompletionPublisher : IGraphJobCompletionPublisher -{ - private readonly ILogger<NullGraphJobCompletionPublisher> _logger; - - public NullGraphJobCompletionPublisher(ILogger<NullGraphJobCompletionPublisher> logger) - => _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - - public Task PublishAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken) - { - _logger.LogDebug("Graph job completion notification suppressed for tenant {TenantId}, job {JobId} ({Status}).", notification.TenantId, notification.Job.Id, notification.Status); - return Task.CompletedTask; - } -} +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scheduler.WebService.GraphJobs; + +internal sealed class NullGraphJobCompletionPublisher : IGraphJobCompletionPublisher +{ + private readonly ILogger<NullGraphJobCompletionPublisher> _logger; + + public NullGraphJobCompletionPublisher(ILogger<NullGraphJobCompletionPublisher> logger) + => _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + + public Task PublishAsync(GraphJobCompletionNotification notification, CancellationToken cancellationToken) + { + _logger.LogDebug("Graph job completion notification suppressed for tenant {TenantId}, job {JobId} ({Status}).", notification.TenantId, notification.Job.Id, notification.Status); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Scheduler.WebService/GraphJobs/OverlayLagMetricsResponse.cs b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/OverlayLagMetricsResponse.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/GraphJobs/OverlayLagMetricsResponse.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/OverlayLagMetricsResponse.cs index 3c2d543f..830363c7 100644 --- a/src/StellaOps.Scheduler.WebService/GraphJobs/OverlayLagMetricsResponse.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/GraphJobs/OverlayLagMetricsResponse.cs @@ -1,20 +1,20 @@ -namespace StellaOps.Scheduler.WebService.GraphJobs; - -public sealed record OverlayLagMetricsResponse( - string TenantId, - int Pending, - int Running, - int Completed, - int Failed, - int Cancelled, - double? MinLagSeconds, - double? MaxLagSeconds, - double? AverageLagSeconds, - IReadOnlyList<OverlayLagEntry> RecentCompleted); - -public sealed record OverlayLagEntry( - string JobId, - DateTimeOffset CompletedAt, - double LagSeconds, - string? CorrelationId, - string? ResultUri); +namespace StellaOps.Scheduler.WebService.GraphJobs; + +public sealed record OverlayLagMetricsResponse( + string TenantId, + int Pending, + int Running, + int Completed, + int Failed, + int Cancelled, + double? MinLagSeconds, + double? MaxLagSeconds, + double? AverageLagSeconds, + IReadOnlyList<OverlayLagEntry> RecentCompleted); + +public sealed record OverlayLagEntry( + string JobId, + DateTimeOffset CompletedAt, + double LagSeconds, + string? CorrelationId, + string? ResultUri); diff --git a/src/StellaOps.Scheduler.WebService/Hosting/SchedulerPluginHostFactory.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Hosting/SchedulerPluginHostFactory.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Hosting/SchedulerPluginHostFactory.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Hosting/SchedulerPluginHostFactory.cs index baeb22a0..a3b8a930 100644 --- a/src/StellaOps.Scheduler.WebService/Hosting/SchedulerPluginHostFactory.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Hosting/SchedulerPluginHostFactory.cs @@ -1,76 +1,76 @@ -using System; -using System.IO; -using StellaOps.Plugin.Hosting; -using StellaOps.Scheduler.WebService.Options; - -namespace StellaOps.Scheduler.WebService.Hosting; - -internal static class SchedulerPluginHostFactory -{ - public static PluginHostOptions Build(SchedulerOptions.PluginOptions options, string contentRootPath) - { - ArgumentNullException.ThrowIfNull(options); - - if (string.IsNullOrWhiteSpace(contentRootPath)) - { - throw new ArgumentException("Content root path must be provided for plug-in discovery.", nameof(contentRootPath)); - } - - var baseDirectory = ResolveBaseDirectory(options.BaseDirectory, contentRootPath); - var pluginsDirectory = ResolvePluginsDirectory(options.Directory, baseDirectory); - - var hostOptions = new PluginHostOptions - { - BaseDirectory = baseDirectory, - PluginsDirectory = pluginsDirectory, - PrimaryPrefix = "StellaOps.Scheduler", - RecursiveSearch = options.RecursiveSearch, - EnsureDirectoryExists = options.EnsureDirectoryExists - }; - - if (options.OrderedPlugins.Count > 0) - { - foreach (var pluginName in options.OrderedPlugins) - { - hostOptions.PluginOrder.Add(pluginName); - } - } - - if (options.SearchPatterns.Count > 0) - { - foreach (var pattern in options.SearchPatterns) - { - hostOptions.SearchPatterns.Add(pattern); - } - } - else - { - hostOptions.SearchPatterns.Add("StellaOps.Scheduler.Plugin.*.dll"); - } - - return hostOptions; - } - - private static string ResolveBaseDirectory(string? configuredBaseDirectory, string contentRootPath) - { - if (string.IsNullOrWhiteSpace(configuredBaseDirectory)) - { - return Path.GetFullPath(Path.Combine(contentRootPath, "..")); - } - - return Path.IsPathRooted(configuredBaseDirectory) - ? configuredBaseDirectory - : Path.GetFullPath(Path.Combine(contentRootPath, configuredBaseDirectory)); - } - - private static string ResolvePluginsDirectory(string? configuredDirectory, string baseDirectory) - { - var pluginsDirectory = string.IsNullOrWhiteSpace(configuredDirectory) - ? Path.Combine("plugins", "scheduler") - : configuredDirectory; - - return Path.IsPathRooted(pluginsDirectory) - ? pluginsDirectory - : Path.GetFullPath(Path.Combine(baseDirectory, pluginsDirectory)); - } -} +using System; +using System.IO; +using StellaOps.Plugin.Hosting; +using StellaOps.Scheduler.WebService.Options; + +namespace StellaOps.Scheduler.WebService.Hosting; + +internal static class SchedulerPluginHostFactory +{ + public static PluginHostOptions Build(SchedulerOptions.PluginOptions options, string contentRootPath) + { + ArgumentNullException.ThrowIfNull(options); + + if (string.IsNullOrWhiteSpace(contentRootPath)) + { + throw new ArgumentException("Content root path must be provided for plug-in discovery.", nameof(contentRootPath)); + } + + var baseDirectory = ResolveBaseDirectory(options.BaseDirectory, contentRootPath); + var pluginsDirectory = ResolvePluginsDirectory(options.Directory, baseDirectory); + + var hostOptions = new PluginHostOptions + { + BaseDirectory = baseDirectory, + PluginsDirectory = pluginsDirectory, + PrimaryPrefix = "StellaOps.Scheduler", + RecursiveSearch = options.RecursiveSearch, + EnsureDirectoryExists = options.EnsureDirectoryExists + }; + + if (options.OrderedPlugins.Count > 0) + { + foreach (var pluginName in options.OrderedPlugins) + { + hostOptions.PluginOrder.Add(pluginName); + } + } + + if (options.SearchPatterns.Count > 0) + { + foreach (var pattern in options.SearchPatterns) + { + hostOptions.SearchPatterns.Add(pattern); + } + } + else + { + hostOptions.SearchPatterns.Add("StellaOps.Scheduler.Plugin.*.dll"); + } + + return hostOptions; + } + + private static string ResolveBaseDirectory(string? configuredBaseDirectory, string contentRootPath) + { + if (string.IsNullOrWhiteSpace(configuredBaseDirectory)) + { + return Path.GetFullPath(Path.Combine(contentRootPath, "..")); + } + + return Path.IsPathRooted(configuredBaseDirectory) + ? configuredBaseDirectory + : Path.GetFullPath(Path.Combine(contentRootPath, configuredBaseDirectory)); + } + + private static string ResolvePluginsDirectory(string? configuredDirectory, string baseDirectory) + { + var pluginsDirectory = string.IsNullOrWhiteSpace(configuredDirectory) + ? Path.Combine("plugins", "scheduler") + : configuredDirectory; + + return Path.IsPathRooted(pluginsDirectory) + ? pluginsDirectory + : Path.GetFullPath(Path.Combine(baseDirectory, pluginsDirectory)); + } +} diff --git a/src/StellaOps.Scheduler.WebService/ISystemClock.cs b/src/Scheduler/StellaOps.Scheduler.WebService/ISystemClock.cs similarity index 95% rename from src/StellaOps.Scheduler.WebService/ISystemClock.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/ISystemClock.cs index 76adff77..f0474b46 100644 --- a/src/StellaOps.Scheduler.WebService/ISystemClock.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/ISystemClock.cs @@ -1,11 +1,11 @@ -namespace StellaOps.Scheduler.WebService; - -public interface ISystemClock -{ - DateTimeOffset UtcNow { get; } -} - -public sealed class SystemClock : ISystemClock -{ - public DateTimeOffset UtcNow => DateTimeOffset.UtcNow; -} +namespace StellaOps.Scheduler.WebService; + +public interface ISystemClock +{ + DateTimeOffset UtcNow { get; } +} + +public sealed class SystemClock : ISystemClock +{ + public DateTimeOffset UtcNow => DateTimeOffset.UtcNow; +} diff --git a/src/StellaOps.Scheduler.WebService/Options/SchedulerAuthorityOptions.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerAuthorityOptions.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Options/SchedulerAuthorityOptions.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerAuthorityOptions.cs index 415ad995..88a30228 100644 --- a/src/StellaOps.Scheduler.WebService/Options/SchedulerAuthorityOptions.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerAuthorityOptions.cs @@ -1,71 +1,71 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Scheduler.WebService.Options; - -/// <summary> -/// Configuration controlling Authority-backed authentication for the Scheduler WebService. -/// </summary> -public sealed class SchedulerAuthorityOptions -{ - public bool Enabled { get; set; } = false; - - /// <summary> - /// Allows the service to run without enforcing Authority authentication (development/tests only). - /// </summary> - public bool AllowAnonymousFallback { get; set; } - - /// <summary> - /// Authority issuer URL exposed via OpenID discovery. - /// </summary> - public string Issuer { get; set; } = string.Empty; - - public bool RequireHttpsMetadata { get; set; } = true; - - public string? MetadataAddress { get; set; } - - public int BackchannelTimeoutSeconds { get; set; } = 30; - - public int TokenClockSkewSeconds { get; set; } = 60; - - public IList<string> Audiences { get; } = new List<string>(); - - public IList<string> RequiredScopes { get; } = new List<string>(); - - public IList<string> RequiredTenants { get; } = new List<string>(); - - public IList<string> BypassNetworks { get; } = new List<string>(); - - public void Validate() - { - if (!Enabled) - { - return; - } - - if (string.IsNullOrWhiteSpace(Issuer)) - { - throw new InvalidOperationException("Scheduler Authority issuer must be configured when Authority is enabled."); - } - - if (!Uri.TryCreate(Issuer.Trim(), UriKind.Absolute, out var uri)) - { - throw new InvalidOperationException("Scheduler Authority issuer must be an absolute URI."); - } - - if (RequireHttpsMetadata && !uri.IsLoopback && !string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Scheduler Authority issuer must use HTTPS unless targeting loopback development."); - } - - if (BackchannelTimeoutSeconds <= 0) - { - throw new InvalidOperationException("Scheduler Authority back-channel timeout must be greater than zero seconds."); - } - - if (TokenClockSkewSeconds < 0 || TokenClockSkewSeconds > 300) - { - throw new InvalidOperationException("Scheduler Authority token clock skew must be between 0 and 300 seconds."); - } - } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Scheduler.WebService.Options; + +/// <summary> +/// Configuration controlling Authority-backed authentication for the Scheduler WebService. +/// </summary> +public sealed class SchedulerAuthorityOptions +{ + public bool Enabled { get; set; } = false; + + /// <summary> + /// Allows the service to run without enforcing Authority authentication (development/tests only). + /// </summary> + public bool AllowAnonymousFallback { get; set; } + + /// <summary> + /// Authority issuer URL exposed via OpenID discovery. + /// </summary> + public string Issuer { get; set; } = string.Empty; + + public bool RequireHttpsMetadata { get; set; } = true; + + public string? MetadataAddress { get; set; } + + public int BackchannelTimeoutSeconds { get; set; } = 30; + + public int TokenClockSkewSeconds { get; set; } = 60; + + public IList<string> Audiences { get; } = new List<string>(); + + public IList<string> RequiredScopes { get; } = new List<string>(); + + public IList<string> RequiredTenants { get; } = new List<string>(); + + public IList<string> BypassNetworks { get; } = new List<string>(); + + public void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(Issuer)) + { + throw new InvalidOperationException("Scheduler Authority issuer must be configured when Authority is enabled."); + } + + if (!Uri.TryCreate(Issuer.Trim(), UriKind.Absolute, out var uri)) + { + throw new InvalidOperationException("Scheduler Authority issuer must be an absolute URI."); + } + + if (RequireHttpsMetadata && !uri.IsLoopback && !string.Equals(uri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Scheduler Authority issuer must use HTTPS unless targeting loopback development."); + } + + if (BackchannelTimeoutSeconds <= 0) + { + throw new InvalidOperationException("Scheduler Authority back-channel timeout must be greater than zero seconds."); + } + + if (TokenClockSkewSeconds < 0 || TokenClockSkewSeconds > 300) + { + throw new InvalidOperationException("Scheduler Authority token clock skew must be between 0 and 300 seconds."); + } + } +} diff --git a/src/StellaOps.Scheduler.WebService/Options/SchedulerCartographerOptions.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerCartographerOptions.cs similarity index 95% rename from src/StellaOps.Scheduler.WebService/Options/SchedulerCartographerOptions.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerCartographerOptions.cs index b13d5c2d..dd77a4cd 100644 --- a/src/StellaOps.Scheduler.WebService/Options/SchedulerCartographerOptions.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerCartographerOptions.cs @@ -1,19 +1,19 @@ -namespace StellaOps.Scheduler.WebService.Options; - -public sealed class SchedulerCartographerOptions -{ - public CartographerWebhookOptions Webhook { get; set; } = new(); -} - -public sealed class CartographerWebhookOptions -{ - public bool Enabled { get; set; } - - public string? Endpoint { get; set; } - - public string? ApiKeyHeader { get; set; } - - public string? ApiKey { get; set; } - - public int TimeoutSeconds { get; set; } = 10; -} +namespace StellaOps.Scheduler.WebService.Options; + +public sealed class SchedulerCartographerOptions +{ + public CartographerWebhookOptions Webhook { get; set; } = new(); +} + +public sealed class CartographerWebhookOptions +{ + public bool Enabled { get; set; } + + public string? Endpoint { get; set; } + + public string? ApiKeyHeader { get; set; } + + public string? ApiKey { get; set; } + + public int TimeoutSeconds { get; set; } = 10; +} diff --git a/src/StellaOps.Scheduler.WebService/Options/SchedulerEventsOptions.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerEventsOptions.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/Options/SchedulerEventsOptions.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerEventsOptions.cs index 1c067e9a..d27b26f3 100644 --- a/src/StellaOps.Scheduler.WebService/Options/SchedulerEventsOptions.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerEventsOptions.cs @@ -1,109 +1,109 @@ -namespace StellaOps.Scheduler.WebService.Options; - -/// <summary> -/// Scheduler WebService event options (outbound + inbound). -/// </summary> -using System; - -public sealed class SchedulerEventsOptions -{ - public GraphJobEventsOptions GraphJobs { get; set; } = new(); - - public SchedulerInboundWebhooksOptions Webhooks { get; set; } = new(); -} - -public sealed class GraphJobEventsOptions -{ - /// <summary> - /// Enables emission of legacy <c>scheduler.graph.job.completed@1</c> events. - /// </summary> - public bool Enabled { get; set; } -} - -public sealed class SchedulerInboundWebhooksOptions -{ - public SchedulerWebhookOptions Feedser { get; set; } = SchedulerWebhookOptions.CreateDefault("feedser"); - - public SchedulerWebhookOptions Vexer { get; set; } = SchedulerWebhookOptions.CreateDefault("vexer"); -} - -public sealed class SchedulerWebhookOptions -{ - private const string DefaultSignatureHeader = "X-Scheduler-Signature"; - - public SchedulerWebhookOptions() - { - SignatureHeader = DefaultSignatureHeader; - } - - public bool Enabled { get; set; } = true; - - /// <summary> - /// Require a client certificate to be presented (mTLS). Optional when HMAC is configured. - /// </summary> - public bool RequireClientCertificate { get; set; } - - /// <summary> - /// Shared secret (Base64 or raw text) for HMAC-SHA256 signatures. Required if <see cref="RequireClientCertificate"/> is false. - /// </summary> - public string? HmacSecret { get; set; } - - /// <summary> - /// Header name carrying the webhook signature (defaults to <c>X-Scheduler-Signature</c>). - /// </summary> - public string SignatureHeader { get; set; } - - /// <summary> - /// Maximum number of accepted requests per sliding window. - /// </summary> - public int RateLimitRequests { get; set; } = 60; - - /// <summary> - /// Sliding window duration in seconds for the rate limiter. - /// </summary> - public int RateLimitWindowSeconds { get; set; } = 60; - - /// <summary> - /// Optional label used for logging/diagnostics; populated via <see cref="CreateDefault"/>. - /// </summary> - public string Name { get; set; } = string.Empty; - - public static SchedulerWebhookOptions CreateDefault(string name) - => new() - { - Name = name, - SignatureHeader = DefaultSignatureHeader, - RateLimitRequests = 120, - RateLimitWindowSeconds = 60 - }; - - public void Validate() - { - if (!Enabled) - { - return; - } - - if (string.IsNullOrWhiteSpace(SignatureHeader)) - { - throw new InvalidOperationException($"Scheduler webhook '{Name}' must specify a signature header when enabled."); - } - - if (!RequireClientCertificate && string.IsNullOrWhiteSpace(HmacSecret)) - { - throw new InvalidOperationException($"Scheduler webhook '{Name}' must configure either HMAC secret or mTLS enforcement."); - } - - if (RateLimitRequests <= 0) - { - throw new InvalidOperationException($"Scheduler webhook '{Name}' must configure a positive rate limit."); - } - - if (RateLimitWindowSeconds <= 0) - { - throw new InvalidOperationException($"Scheduler webhook '{Name}' must configure a rate limit window greater than zero seconds."); - } - } - - public TimeSpan GetRateLimitWindow() => TimeSpan.FromSeconds(RateLimitWindowSeconds <= 0 ? 60 : RateLimitWindowSeconds); -} +namespace StellaOps.Scheduler.WebService.Options; + +/// <summary> +/// Scheduler WebService event options (outbound + inbound). +/// </summary> +using System; + +public sealed class SchedulerEventsOptions +{ + public GraphJobEventsOptions GraphJobs { get; set; } = new(); + + public SchedulerInboundWebhooksOptions Webhooks { get; set; } = new(); +} + +public sealed class GraphJobEventsOptions +{ + /// <summary> + /// Enables emission of legacy <c>scheduler.graph.job.completed@1</c> events. + /// </summary> + public bool Enabled { get; set; } +} + +public sealed class SchedulerInboundWebhooksOptions +{ + public SchedulerWebhookOptions Feedser { get; set; } = SchedulerWebhookOptions.CreateDefault("feedser"); + + public SchedulerWebhookOptions Vexer { get; set; } = SchedulerWebhookOptions.CreateDefault("vexer"); +} + +public sealed class SchedulerWebhookOptions +{ + private const string DefaultSignatureHeader = "X-Scheduler-Signature"; + + public SchedulerWebhookOptions() + { + SignatureHeader = DefaultSignatureHeader; + } + + public bool Enabled { get; set; } = true; + + /// <summary> + /// Require a client certificate to be presented (mTLS). Optional when HMAC is configured. + /// </summary> + public bool RequireClientCertificate { get; set; } + + /// <summary> + /// Shared secret (Base64 or raw text) for HMAC-SHA256 signatures. Required if <see cref="RequireClientCertificate"/> is false. + /// </summary> + public string? HmacSecret { get; set; } + + /// <summary> + /// Header name carrying the webhook signature (defaults to <c>X-Scheduler-Signature</c>). + /// </summary> + public string SignatureHeader { get; set; } + + /// <summary> + /// Maximum number of accepted requests per sliding window. + /// </summary> + public int RateLimitRequests { get; set; } = 60; + + /// <summary> + /// Sliding window duration in seconds for the rate limiter. + /// </summary> + public int RateLimitWindowSeconds { get; set; } = 60; + + /// <summary> + /// Optional label used for logging/diagnostics; populated via <see cref="CreateDefault"/>. + /// </summary> + public string Name { get; set; } = string.Empty; + + public static SchedulerWebhookOptions CreateDefault(string name) + => new() + { + Name = name, + SignatureHeader = DefaultSignatureHeader, + RateLimitRequests = 120, + RateLimitWindowSeconds = 60 + }; + + public void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(SignatureHeader)) + { + throw new InvalidOperationException($"Scheduler webhook '{Name}' must specify a signature header when enabled."); + } + + if (!RequireClientCertificate && string.IsNullOrWhiteSpace(HmacSecret)) + { + throw new InvalidOperationException($"Scheduler webhook '{Name}' must configure either HMAC secret or mTLS enforcement."); + } + + if (RateLimitRequests <= 0) + { + throw new InvalidOperationException($"Scheduler webhook '{Name}' must configure a positive rate limit."); + } + + if (RateLimitWindowSeconds <= 0) + { + throw new InvalidOperationException($"Scheduler webhook '{Name}' must configure a rate limit window greater than zero seconds."); + } + } + + public TimeSpan GetRateLimitWindow() => TimeSpan.FromSeconds(RateLimitWindowSeconds <= 0 ? 60 : RateLimitWindowSeconds); +} diff --git a/src/StellaOps.Scheduler.WebService/Options/SchedulerOptions.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerOptions.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/Options/SchedulerOptions.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerOptions.cs index 5a76661e..8312e240 100644 --- a/src/StellaOps.Scheduler.WebService/Options/SchedulerOptions.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Options/SchedulerOptions.cs @@ -1,70 +1,70 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Scheduler.WebService.Options; - -/// <summary> -/// Scheduler host configuration defaults consumed at startup for cross-cutting concerns -/// such as plug-in discovery. -/// </summary> -public sealed class SchedulerOptions -{ - public PluginOptions Plugins { get; set; } = new(); - - public void Validate() - { - Plugins.Validate(); - } - - public sealed class PluginOptions - { - /// <summary> - /// Base directory resolving relative plug-in paths. Defaults to solution root. - /// </summary> - public string? BaseDirectory { get; set; } - - /// <summary> - /// Directory containing plug-in binaries. Defaults to <c>plugins/scheduler</c>. - /// </summary> - public string? Directory { get; set; } - - /// <summary> - /// Controls whether sub-directories are scanned for plug-ins. - /// </summary> - public bool RecursiveSearch { get; set; } = false; - - /// <summary> - /// Ensures the plug-in directory exists on startup. - /// </summary> - public bool EnsureDirectoryExists { get; set; } = true; - - /// <summary> - /// Explicit plug-in discovery patterns (supports globbing). - /// </summary> - public IList<string> SearchPatterns { get; } = new List<string>(); - - /// <summary> - /// Optional ordered plug-in assembly names (without extension). - /// </summary> - public IList<string> OrderedPlugins { get; } = new List<string>(); - - public void Validate() - { - foreach (var pattern in SearchPatterns) - { - if (string.IsNullOrWhiteSpace(pattern)) - { - throw new InvalidOperationException("Scheduler plug-in search patterns cannot contain null or whitespace entries."); - } - } - - foreach (var assemblyName in OrderedPlugins) - { - if (string.IsNullOrWhiteSpace(assemblyName)) - { - throw new InvalidOperationException("Scheduler ordered plug-in entries cannot contain null or whitespace values."); - } - } - } - } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Scheduler.WebService.Options; + +/// <summary> +/// Scheduler host configuration defaults consumed at startup for cross-cutting concerns +/// such as plug-in discovery. +/// </summary> +public sealed class SchedulerOptions +{ + public PluginOptions Plugins { get; set; } = new(); + + public void Validate() + { + Plugins.Validate(); + } + + public sealed class PluginOptions + { + /// <summary> + /// Base directory resolving relative plug-in paths. Defaults to solution root. + /// </summary> + public string? BaseDirectory { get; set; } + + /// <summary> + /// Directory containing plug-in binaries. Defaults to <c>plugins/scheduler</c>. + /// </summary> + public string? Directory { get; set; } + + /// <summary> + /// Controls whether sub-directories are scanned for plug-ins. + /// </summary> + public bool RecursiveSearch { get; set; } = false; + + /// <summary> + /// Ensures the plug-in directory exists on startup. + /// </summary> + public bool EnsureDirectoryExists { get; set; } = true; + + /// <summary> + /// Explicit plug-in discovery patterns (supports globbing). + /// </summary> + public IList<string> SearchPatterns { get; } = new List<string>(); + + /// <summary> + /// Optional ordered plug-in assembly names (without extension). + /// </summary> + public IList<string> OrderedPlugins { get; } = new List<string>(); + + public void Validate() + { + foreach (var pattern in SearchPatterns) + { + if (string.IsNullOrWhiteSpace(pattern)) + { + throw new InvalidOperationException("Scheduler plug-in search patterns cannot contain null or whitespace entries."); + } + } + + foreach (var assemblyName in OrderedPlugins) + { + if (string.IsNullOrWhiteSpace(assemblyName)) + { + throw new InvalidOperationException("Scheduler ordered plug-in entries cannot contain null or whitespace values."); + } + } + } + } +} diff --git a/src/StellaOps.Scheduler.WebService/PolicyRuns/IPolicyRunService.cs b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/IPolicyRunService.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/PolicyRuns/IPolicyRunService.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/IPolicyRunService.cs index 36841ef6..4c6d53c1 100644 --- a/src/StellaOps.Scheduler.WebService/PolicyRuns/IPolicyRunService.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/IPolicyRunService.cs @@ -1,12 +1,12 @@ -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.PolicyRuns; - -internal interface IPolicyRunService -{ - Task<PolicyRunStatus> EnqueueAsync(string tenantId, PolicyRunRequest request, CancellationToken cancellationToken); - - Task<IReadOnlyList<PolicyRunStatus>> ListAsync(string tenantId, PolicyRunQueryOptions options, CancellationToken cancellationToken); - - Task<PolicyRunStatus?> GetAsync(string tenantId, string runId, CancellationToken cancellationToken); -} +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.PolicyRuns; + +internal interface IPolicyRunService +{ + Task<PolicyRunStatus> EnqueueAsync(string tenantId, PolicyRunRequest request, CancellationToken cancellationToken); + + Task<IReadOnlyList<PolicyRunStatus>> ListAsync(string tenantId, PolicyRunQueryOptions options, CancellationToken cancellationToken); + + Task<PolicyRunStatus?> GetAsync(string tenantId, string runId, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.WebService/PolicyRuns/InMemoryPolicyRunService.cs b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/InMemoryPolicyRunService.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/PolicyRuns/InMemoryPolicyRunService.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/InMemoryPolicyRunService.cs index d305a112..0be788bf 100644 --- a/src/StellaOps.Scheduler.WebService/PolicyRuns/InMemoryPolicyRunService.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/InMemoryPolicyRunService.cs @@ -1,138 +1,138 @@ -using System.Collections.Concurrent; -using System.Collections.Immutable; -using System.ComponentModel.DataAnnotations; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.PolicyRuns; - -internal sealed class InMemoryPolicyRunService : IPolicyRunService -{ - private readonly ConcurrentDictionary<string, PolicyRunStatus> _runs = new(StringComparer.Ordinal); - private readonly List<PolicyRunStatus> _orderedRuns = new(); - private readonly object _gate = new(); - - public Task<PolicyRunStatus> EnqueueAsync(string tenantId, PolicyRunRequest request, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentNullException.ThrowIfNull(request); - cancellationToken.ThrowIfCancellationRequested(); - - var runId = string.IsNullOrWhiteSpace(request.RunId) - ? GenerateRunId(request.PolicyId, request.QueuedAt ?? DateTimeOffset.UtcNow) - : request.RunId; - - var queuedAt = request.QueuedAt ?? DateTimeOffset.UtcNow; - - var status = new PolicyRunStatus( - runId, - tenantId, - request.PolicyId ?? throw new ValidationException("policyId must be provided."), - request.PolicyVersion ?? throw new ValidationException("policyVersion must be provided."), - request.Mode, - PolicyRunExecutionStatus.Queued, - request.Priority, - queuedAt, - PolicyRunStats.Empty, - request.Inputs ?? PolicyRunInputs.Empty, - null, - null, - null, - null, - null, - 0, - null, - null, - request.Metadata ?? ImmutableSortedDictionary<string, string>.Empty, - SchedulerSchemaVersions.PolicyRunStatus); - - lock (_gate) - { - if (_runs.TryGetValue(runId, out var existing)) - { - return Task.FromResult(existing); - } - - _runs[runId] = status; - _orderedRuns.Add(status); - } - - return Task.FromResult(status); - } - - public Task<IReadOnlyList<PolicyRunStatus>> ListAsync(string tenantId, PolicyRunQueryOptions options, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentNullException.ThrowIfNull(options); - cancellationToken.ThrowIfCancellationRequested(); - - List<PolicyRunStatus> snapshot; - lock (_gate) - { - snapshot = _orderedRuns - .Where(run => string.Equals(run.TenantId, tenantId, StringComparison.Ordinal)) - .ToList(); - } - - if (options.PolicyId is { Length: > 0 } policyId) - { - snapshot = snapshot - .Where(run => string.Equals(run.PolicyId, policyId, StringComparison.OrdinalIgnoreCase)) - .ToList(); - } - - if (options.Mode is { } mode) - { - snapshot = snapshot - .Where(run => run.Mode == mode) - .ToList(); - } - - if (options.Status is { } status) - { - snapshot = snapshot - .Where(run => run.Status == status) - .ToList(); - } - - if (options.QueuedAfter is { } since) - { - snapshot = snapshot - .Where(run => run.QueuedAt >= since) - .ToList(); - } - - var result = snapshot - .OrderByDescending(run => run.QueuedAt) - .ThenBy(run => run.RunId, StringComparer.Ordinal) - .Take(options.Limit) - .ToList(); - - return Task.FromResult<IReadOnlyList<PolicyRunStatus>>(result); - } - - public Task<PolicyRunStatus?> GetAsync(string tenantId, string runId, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(runId); - cancellationToken.ThrowIfCancellationRequested(); - - if (!_runs.TryGetValue(runId, out var run)) - { - return Task.FromResult<PolicyRunStatus?>(null); - } - - if (!string.Equals(run.TenantId, tenantId, StringComparison.Ordinal)) - { - return Task.FromResult<PolicyRunStatus?>(null); - } - - return Task.FromResult<PolicyRunStatus?>(run); - } - - private static string GenerateRunId(string policyId, DateTimeOffset timestamp) - { - var normalizedPolicyId = string.IsNullOrWhiteSpace(policyId) ? "policy" : policyId.Trim(); - var suffix = Guid.NewGuid().ToString("N")[..8]; - return $"run:{normalizedPolicyId}:{timestamp:yyyyMMddTHHmmssZ}:{suffix}"; - } -} +using System.Collections.Concurrent; +using System.Collections.Immutable; +using System.ComponentModel.DataAnnotations; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.PolicyRuns; + +internal sealed class InMemoryPolicyRunService : IPolicyRunService +{ + private readonly ConcurrentDictionary<string, PolicyRunStatus> _runs = new(StringComparer.Ordinal); + private readonly List<PolicyRunStatus> _orderedRuns = new(); + private readonly object _gate = new(); + + public Task<PolicyRunStatus> EnqueueAsync(string tenantId, PolicyRunRequest request, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentNullException.ThrowIfNull(request); + cancellationToken.ThrowIfCancellationRequested(); + + var runId = string.IsNullOrWhiteSpace(request.RunId) + ? GenerateRunId(request.PolicyId, request.QueuedAt ?? DateTimeOffset.UtcNow) + : request.RunId; + + var queuedAt = request.QueuedAt ?? DateTimeOffset.UtcNow; + + var status = new PolicyRunStatus( + runId, + tenantId, + request.PolicyId ?? throw new ValidationException("policyId must be provided."), + request.PolicyVersion ?? throw new ValidationException("policyVersion must be provided."), + request.Mode, + PolicyRunExecutionStatus.Queued, + request.Priority, + queuedAt, + PolicyRunStats.Empty, + request.Inputs ?? PolicyRunInputs.Empty, + null, + null, + null, + null, + null, + 0, + null, + null, + request.Metadata ?? ImmutableSortedDictionary<string, string>.Empty, + SchedulerSchemaVersions.PolicyRunStatus); + + lock (_gate) + { + if (_runs.TryGetValue(runId, out var existing)) + { + return Task.FromResult(existing); + } + + _runs[runId] = status; + _orderedRuns.Add(status); + } + + return Task.FromResult(status); + } + + public Task<IReadOnlyList<PolicyRunStatus>> ListAsync(string tenantId, PolicyRunQueryOptions options, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentNullException.ThrowIfNull(options); + cancellationToken.ThrowIfCancellationRequested(); + + List<PolicyRunStatus> snapshot; + lock (_gate) + { + snapshot = _orderedRuns + .Where(run => string.Equals(run.TenantId, tenantId, StringComparison.Ordinal)) + .ToList(); + } + + if (options.PolicyId is { Length: > 0 } policyId) + { + snapshot = snapshot + .Where(run => string.Equals(run.PolicyId, policyId, StringComparison.OrdinalIgnoreCase)) + .ToList(); + } + + if (options.Mode is { } mode) + { + snapshot = snapshot + .Where(run => run.Mode == mode) + .ToList(); + } + + if (options.Status is { } status) + { + snapshot = snapshot + .Where(run => run.Status == status) + .ToList(); + } + + if (options.QueuedAfter is { } since) + { + snapshot = snapshot + .Where(run => run.QueuedAt >= since) + .ToList(); + } + + var result = snapshot + .OrderByDescending(run => run.QueuedAt) + .ThenBy(run => run.RunId, StringComparer.Ordinal) + .Take(options.Limit) + .ToList(); + + return Task.FromResult<IReadOnlyList<PolicyRunStatus>>(result); + } + + public Task<PolicyRunStatus?> GetAsync(string tenantId, string runId, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + cancellationToken.ThrowIfCancellationRequested(); + + if (!_runs.TryGetValue(runId, out var run)) + { + return Task.FromResult<PolicyRunStatus?>(null); + } + + if (!string.Equals(run.TenantId, tenantId, StringComparison.Ordinal)) + { + return Task.FromResult<PolicyRunStatus?>(null); + } + + return Task.FromResult<PolicyRunStatus?>(run); + } + + private static string GenerateRunId(string policyId, DateTimeOffset timestamp) + { + var normalizedPolicyId = string.IsNullOrWhiteSpace(policyId) ? "policy" : policyId.Trim(); + var suffix = Guid.NewGuid().ToString("N")[..8]; + return $"run:{normalizedPolicyId}:{timestamp:yyyyMMddTHHmmssZ}:{suffix}"; + } +} diff --git a/src/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunEndpointExtensions.cs b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunEndpointExtensions.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunEndpointExtensions.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunEndpointExtensions.cs index 009d8112..fc66aaa7 100644 --- a/src/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunEndpointExtensions.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunEndpointExtensions.cs @@ -1,197 +1,197 @@ -using System.Collections.Immutable; -using System.ComponentModel.DataAnnotations; -using System.Text.Json.Serialization; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using StellaOps.Auth.Abstractions; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.WebService.Auth; - -namespace StellaOps.Scheduler.WebService.PolicyRuns; - -internal static class PolicyRunEndpointExtensions -{ - private const string Scope = StellaOpsScopes.PolicyRun; - - public static void MapPolicyRunEndpoints(this IEndpointRouteBuilder builder) - { - var group = builder.MapGroup("/api/v1/scheduler/policy/runs"); - - group.MapGet("/", ListPolicyRunsAsync); - group.MapGet("/{runId}", GetPolicyRunAsync); - group.MapPost("/", CreatePolicyRunAsync); - } - - internal static async Task<IResult> ListPolicyRunsAsync( - HttpContext httpContext, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IPolicyRunService policyRunService, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, Scope); - var tenant = tenantAccessor.GetTenant(httpContext); - var options = PolicyRunQueryOptions.FromRequest(httpContext.Request); - - var runs = await policyRunService - .ListAsync(tenant.TenantId, options, cancellationToken) - .ConfigureAwait(false); - - return Results.Ok(new PolicyRunCollectionResponse(runs)); - } - catch (UnauthorizedAccessException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); - } - catch (InvalidOperationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); - } - catch (ValidationException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - internal static async Task<IResult> GetPolicyRunAsync( - HttpContext httpContext, - string runId, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IPolicyRunService policyRunService, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, Scope); - var tenant = tenantAccessor.GetTenant(httpContext); - var run = await policyRunService - .GetAsync(tenant.TenantId, runId, cancellationToken) - .ConfigureAwait(false); - - return run is null - ? Results.NotFound() - : Results.Ok(new PolicyRunResponse(run)); - } - catch (UnauthorizedAccessException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); - } - catch (InvalidOperationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); - } - catch (ValidationException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - internal static async Task<IResult> CreatePolicyRunAsync( - HttpContext httpContext, - PolicyRunCreateRequest request, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IPolicyRunService policyRunService, - [FromServices] TimeProvider timeProvider, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, Scope); - var tenant = tenantAccessor.GetTenant(httpContext); - var actorId = SchedulerEndpointHelpers.ResolveActorId(httpContext); - var now = timeProvider.GetUtcNow(); - - if (request.PolicyVersion is null || request.PolicyVersion <= 0) - { - throw new ValidationException("policyVersion must be provided and greater than zero."); - } - - if (string.IsNullOrWhiteSpace(request.PolicyId)) - { - throw new ValidationException("policyId must be provided."); - } - - var normalizedMetadata = NormalizeMetadata(request.Metadata); - - var policyRunRequest = new PolicyRunRequest( - tenant.TenantId, - request.PolicyId, - request.PolicyVersion, - request.Mode, - request.Priority, - request.RunId, - now, - actorId, - request.CorrelationId, - normalizedMetadata, - request.Inputs ?? PolicyRunInputs.Empty, - request.SchemaVersion); - - var status = await policyRunService - .EnqueueAsync(tenant.TenantId, policyRunRequest, cancellationToken) - .ConfigureAwait(false); - - return Results.Created($"/api/v1/scheduler/policy/runs/{status.RunId}", new PolicyRunResponse(status)); - } - catch (UnauthorizedAccessException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); - } - catch (InvalidOperationException ex) - { - return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); - } - catch (ValidationException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - internal sealed record PolicyRunCollectionResponse( - [property: JsonPropertyName("runs")] IReadOnlyList<PolicyRunStatus> Runs); - - internal sealed record PolicyRunResponse( - [property: JsonPropertyName("run")] PolicyRunStatus Run); - - internal sealed record PolicyRunCreateRequest( - [property: JsonPropertyName("schemaVersion")] string? SchemaVersion, - [property: JsonPropertyName("policyId")] string PolicyId, - [property: JsonPropertyName("policyVersion")] int? PolicyVersion, - [property: JsonPropertyName("mode")] PolicyRunMode Mode = PolicyRunMode.Incremental, - [property: JsonPropertyName("priority")] PolicyRunPriority Priority = PolicyRunPriority.Normal, - [property: JsonPropertyName("runId")] string? RunId = null, - [property: JsonPropertyName("correlationId")] string? CorrelationId = null, - [property: JsonPropertyName("metadata")] IReadOnlyDictionary<string, string>? Metadata = null, - [property: JsonPropertyName("inputs")] PolicyRunInputs? Inputs = null); - - private static ImmutableSortedDictionary<string, string> NormalizeMetadata(IReadOnlyDictionary<string, string>? metadata) - { - if (metadata is null || metadata.Count == 0) - { - return ImmutableSortedDictionary<string, string>.Empty; - } - - var builder = ImmutableSortedDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var pair in metadata) - { - var key = pair.Key?.Trim(); - var value = pair.Value?.Trim(); - if (string.IsNullOrEmpty(key) || string.IsNullOrEmpty(value)) - { - continue; - } - - var normalizedKey = key.ToLowerInvariant(); - if (!builder.ContainsKey(normalizedKey)) - { - builder[normalizedKey] = value; - } - } - - return builder.ToImmutable(); - } -} +using System.Collections.Immutable; +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using StellaOps.Auth.Abstractions; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.WebService.Auth; + +namespace StellaOps.Scheduler.WebService.PolicyRuns; + +internal static class PolicyRunEndpointExtensions +{ + private const string Scope = StellaOpsScopes.PolicyRun; + + public static void MapPolicyRunEndpoints(this IEndpointRouteBuilder builder) + { + var group = builder.MapGroup("/api/v1/scheduler/policy/runs"); + + group.MapGet("/", ListPolicyRunsAsync); + group.MapGet("/{runId}", GetPolicyRunAsync); + group.MapPost("/", CreatePolicyRunAsync); + } + + internal static async Task<IResult> ListPolicyRunsAsync( + HttpContext httpContext, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IPolicyRunService policyRunService, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, Scope); + var tenant = tenantAccessor.GetTenant(httpContext); + var options = PolicyRunQueryOptions.FromRequest(httpContext.Request); + + var runs = await policyRunService + .ListAsync(tenant.TenantId, options, cancellationToken) + .ConfigureAwait(false); + + return Results.Ok(new PolicyRunCollectionResponse(runs)); + } + catch (UnauthorizedAccessException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); + } + catch (InvalidOperationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); + } + catch (ValidationException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + internal static async Task<IResult> GetPolicyRunAsync( + HttpContext httpContext, + string runId, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IPolicyRunService policyRunService, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, Scope); + var tenant = tenantAccessor.GetTenant(httpContext); + var run = await policyRunService + .GetAsync(tenant.TenantId, runId, cancellationToken) + .ConfigureAwait(false); + + return run is null + ? Results.NotFound() + : Results.Ok(new PolicyRunResponse(run)); + } + catch (UnauthorizedAccessException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); + } + catch (InvalidOperationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); + } + catch (ValidationException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + internal static async Task<IResult> CreatePolicyRunAsync( + HttpContext httpContext, + PolicyRunCreateRequest request, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IPolicyRunService policyRunService, + [FromServices] TimeProvider timeProvider, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, Scope); + var tenant = tenantAccessor.GetTenant(httpContext); + var actorId = SchedulerEndpointHelpers.ResolveActorId(httpContext); + var now = timeProvider.GetUtcNow(); + + if (request.PolicyVersion is null || request.PolicyVersion <= 0) + { + throw new ValidationException("policyVersion must be provided and greater than zero."); + } + + if (string.IsNullOrWhiteSpace(request.PolicyId)) + { + throw new ValidationException("policyId must be provided."); + } + + var normalizedMetadata = NormalizeMetadata(request.Metadata); + + var policyRunRequest = new PolicyRunRequest( + tenant.TenantId, + request.PolicyId, + request.PolicyVersion, + request.Mode, + request.Priority, + request.RunId, + now, + actorId, + request.CorrelationId, + normalizedMetadata, + request.Inputs ?? PolicyRunInputs.Empty, + request.SchemaVersion); + + var status = await policyRunService + .EnqueueAsync(tenant.TenantId, policyRunRequest, cancellationToken) + .ConfigureAwait(false); + + return Results.Created($"/api/v1/scheduler/policy/runs/{status.RunId}", new PolicyRunResponse(status)); + } + catch (UnauthorizedAccessException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status401Unauthorized); + } + catch (InvalidOperationException ex) + { + return Results.Json(new { error = ex.Message }, statusCode: StatusCodes.Status403Forbidden); + } + catch (ValidationException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + internal sealed record PolicyRunCollectionResponse( + [property: JsonPropertyName("runs")] IReadOnlyList<PolicyRunStatus> Runs); + + internal sealed record PolicyRunResponse( + [property: JsonPropertyName("run")] PolicyRunStatus Run); + + internal sealed record PolicyRunCreateRequest( + [property: JsonPropertyName("schemaVersion")] string? SchemaVersion, + [property: JsonPropertyName("policyId")] string PolicyId, + [property: JsonPropertyName("policyVersion")] int? PolicyVersion, + [property: JsonPropertyName("mode")] PolicyRunMode Mode = PolicyRunMode.Incremental, + [property: JsonPropertyName("priority")] PolicyRunPriority Priority = PolicyRunPriority.Normal, + [property: JsonPropertyName("runId")] string? RunId = null, + [property: JsonPropertyName("correlationId")] string? CorrelationId = null, + [property: JsonPropertyName("metadata")] IReadOnlyDictionary<string, string>? Metadata = null, + [property: JsonPropertyName("inputs")] PolicyRunInputs? Inputs = null); + + private static ImmutableSortedDictionary<string, string> NormalizeMetadata(IReadOnlyDictionary<string, string>? metadata) + { + if (metadata is null || metadata.Count == 0) + { + return ImmutableSortedDictionary<string, string>.Empty; + } + + var builder = ImmutableSortedDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var pair in metadata) + { + var key = pair.Key?.Trim(); + var value = pair.Value?.Trim(); + if (string.IsNullOrEmpty(key) || string.IsNullOrEmpty(value)) + { + continue; + } + + var normalizedKey = key.ToLowerInvariant(); + if (!builder.ContainsKey(normalizedKey)) + { + builder[normalizedKey] = value; + } + } + + return builder.ToImmutable(); + } +} diff --git a/src/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunQueryOptions.cs b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunQueryOptions.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunQueryOptions.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunQueryOptions.cs index 5eba4060..1cd648c7 100644 --- a/src/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunQueryOptions.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunQueryOptions.cs @@ -1,120 +1,120 @@ -using System.ComponentModel.DataAnnotations; -using System.Globalization; -using Microsoft.AspNetCore.Http; -using Microsoft.Extensions.Primitives; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.PolicyRuns; - -internal sealed class PolicyRunQueryOptions -{ - private const int DefaultLimit = 50; - private const int MaxLimit = 200; - - private PolicyRunQueryOptions() - { - } - - public string? PolicyId { get; private set; } - - public PolicyRunMode? Mode { get; private set; } - - public PolicyRunExecutionStatus? Status { get; private set; } - - public DateTimeOffset? QueuedAfter { get; private set; } - - public int Limit { get; private set; } = DefaultLimit; - - public static PolicyRunQueryOptions FromRequest(HttpRequest request) - { - ArgumentNullException.ThrowIfNull(request); - - var options = new PolicyRunQueryOptions(); - var query = request.Query; - - if (query.TryGetValue("policyId", out var policyValues)) - { - var policyId = policyValues.ToString().Trim(); - if (!string.IsNullOrEmpty(policyId)) - { - options.PolicyId = policyId; - } - } - - options.Mode = ParseEnum<PolicyRunMode>(query, "mode"); - options.Status = ParseEnum<PolicyRunExecutionStatus>(query, "status"); - options.QueuedAfter = ParseTimestamp(query); - options.Limit = ParseLimit(query); - - return options; - } - - private static TEnum? ParseEnum<TEnum>(IQueryCollection query, string key) - where TEnum : struct, Enum - { - if (!query.TryGetValue(key, out var values) || values == StringValues.Empty) - { - return null; - } - - var value = values.ToString().Trim(); - if (string.IsNullOrEmpty(value)) - { - return null; - } - - if (Enum.TryParse<TEnum>(value, ignoreCase: true, out var parsed)) - { - return parsed; - } - - throw new ValidationException($"Value '{value}' is not valid for parameter '{key}'."); - } - - private static DateTimeOffset? ParseTimestamp(IQueryCollection query) - { - if (!query.TryGetValue("since", out var values) || values == StringValues.Empty) - { - return null; - } - - var candidate = values.ToString().Trim(); - if (string.IsNullOrEmpty(candidate)) - { - return null; - } - - if (DateTimeOffset.TryParse(candidate, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var timestamp)) - { - return timestamp.ToUniversalTime(); - } - - throw new ValidationException($"Value '{candidate}' is not a valid ISO-8601 timestamp."); - } - - private static int ParseLimit(IQueryCollection query) - { - if (!query.TryGetValue("limit", out var values) || values == StringValues.Empty) - { - return DefaultLimit; - } - - var candidate = values.ToString().Trim(); - if (string.IsNullOrEmpty(candidate)) - { - return DefaultLimit; - } - - if (!int.TryParse(candidate, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed) || parsed <= 0) - { - throw new ValidationException("Parameter 'limit' must be a positive integer."); - } - - if (parsed > MaxLimit) - { - throw new ValidationException($"Parameter 'limit' must not exceed {MaxLimit}."); - } - - return parsed; - } -} +using System.ComponentModel.DataAnnotations; +using System.Globalization; +using Microsoft.AspNetCore.Http; +using Microsoft.Extensions.Primitives; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.PolicyRuns; + +internal sealed class PolicyRunQueryOptions +{ + private const int DefaultLimit = 50; + private const int MaxLimit = 200; + + private PolicyRunQueryOptions() + { + } + + public string? PolicyId { get; private set; } + + public PolicyRunMode? Mode { get; private set; } + + public PolicyRunExecutionStatus? Status { get; private set; } + + public DateTimeOffset? QueuedAfter { get; private set; } + + public int Limit { get; private set; } = DefaultLimit; + + public static PolicyRunQueryOptions FromRequest(HttpRequest request) + { + ArgumentNullException.ThrowIfNull(request); + + var options = new PolicyRunQueryOptions(); + var query = request.Query; + + if (query.TryGetValue("policyId", out var policyValues)) + { + var policyId = policyValues.ToString().Trim(); + if (!string.IsNullOrEmpty(policyId)) + { + options.PolicyId = policyId; + } + } + + options.Mode = ParseEnum<PolicyRunMode>(query, "mode"); + options.Status = ParseEnum<PolicyRunExecutionStatus>(query, "status"); + options.QueuedAfter = ParseTimestamp(query); + options.Limit = ParseLimit(query); + + return options; + } + + private static TEnum? ParseEnum<TEnum>(IQueryCollection query, string key) + where TEnum : struct, Enum + { + if (!query.TryGetValue(key, out var values) || values == StringValues.Empty) + { + return null; + } + + var value = values.ToString().Trim(); + if (string.IsNullOrEmpty(value)) + { + return null; + } + + if (Enum.TryParse<TEnum>(value, ignoreCase: true, out var parsed)) + { + return parsed; + } + + throw new ValidationException($"Value '{value}' is not valid for parameter '{key}'."); + } + + private static DateTimeOffset? ParseTimestamp(IQueryCollection query) + { + if (!query.TryGetValue("since", out var values) || values == StringValues.Empty) + { + return null; + } + + var candidate = values.ToString().Trim(); + if (string.IsNullOrEmpty(candidate)) + { + return null; + } + + if (DateTimeOffset.TryParse(candidate, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var timestamp)) + { + return timestamp.ToUniversalTime(); + } + + throw new ValidationException($"Value '{candidate}' is not a valid ISO-8601 timestamp."); + } + + private static int ParseLimit(IQueryCollection query) + { + if (!query.TryGetValue("limit", out var values) || values == StringValues.Empty) + { + return DefaultLimit; + } + + var candidate = values.ToString().Trim(); + if (string.IsNullOrEmpty(candidate)) + { + return DefaultLimit; + } + + if (!int.TryParse(candidate, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed) || parsed <= 0) + { + throw new ValidationException("Parameter 'limit' must be a positive integer."); + } + + if (parsed > MaxLimit) + { + throw new ValidationException($"Parameter 'limit' must not exceed {MaxLimit}."); + } + + return parsed; + } +} diff --git a/src/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs index a17d27eb..2d04da4a 100644 --- a/src/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/PolicyRuns/PolicyRunService.cs @@ -1,213 +1,213 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.ComponentModel.DataAnnotations; -using System.Linq; -using Microsoft.Extensions.Logging; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.WebService; - -namespace StellaOps.Scheduler.WebService.PolicyRuns; - -internal sealed class PolicyRunService : IPolicyRunService -{ - private readonly IPolicyRunJobRepository _repository; - private readonly TimeProvider _timeProvider; - private readonly ILogger<PolicyRunService> _logger; - - public PolicyRunService( - IPolicyRunJobRepository repository, - TimeProvider timeProvider, - ILogger<PolicyRunService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<PolicyRunStatus> EnqueueAsync(string tenantId, PolicyRunRequest request, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentNullException.ThrowIfNull(request); - cancellationToken.ThrowIfCancellationRequested(); - - var now = _timeProvider.GetUtcNow(); - var runId = string.IsNullOrWhiteSpace(request.RunId) - ? GenerateRunId(request.PolicyId, now) - : request.RunId!; - - // Idempotency: return existing job if present when a runId was supplied. - if (!string.IsNullOrWhiteSpace(request.RunId)) - { - var existing = await _repository - .GetByRunIdAsync(tenantId, runId, cancellationToken: cancellationToken) - .ConfigureAwait(false); - - if (existing is not null) - { - _logger.LogDebug("Policy run job already exists for tenant {TenantId} and run {RunId}.", tenantId, runId); - return ToStatus(existing, now); - } - } - - var jobId = SchedulerEndpointHelpers.GenerateIdentifier("policyjob"); - var queuedAt = request.QueuedAt ?? now; - var metadata = request.Metadata ?? ImmutableSortedDictionary<string, string>.Empty; - var job = new PolicyRunJob( - SchemaVersion: SchedulerSchemaVersions.PolicyRunJob, - Id: jobId, - TenantId: tenantId, - PolicyId: request.PolicyId, - PolicyVersion: request.PolicyVersion, - Mode: request.Mode, - Priority: request.Priority, - PriorityRank: -1, - RunId: runId, - RequestedBy: request.RequestedBy, - CorrelationId: request.CorrelationId, - Metadata: metadata, - Inputs: request.Inputs ?? PolicyRunInputs.Empty, - QueuedAt: queuedAt, - Status: PolicyRunJobStatus.Pending, - AttemptCount: 0, - LastAttemptAt: null, - LastError: null, - CreatedAt: now, - UpdatedAt: now, - AvailableAt: now, - SubmittedAt: null, - CompletedAt: null, - LeaseOwner: null, - LeaseExpiresAt: null, - CancellationRequested: false, - CancellationRequestedAt: null, - CancellationReason: null, - CancelledAt: null); - - await _repository.InsertAsync(job, cancellationToken: cancellationToken).ConfigureAwait(false); - _logger.LogInformation( - "Enqueued policy run job {JobId} for tenant {TenantId} policy {PolicyId} (runId={RunId}, mode={Mode}).", - job.Id, - tenantId, - job.PolicyId, - job.RunId, - job.Mode); - - return ToStatus(job, now); - } - - public async Task<IReadOnlyList<PolicyRunStatus>> ListAsync( - string tenantId, - PolicyRunQueryOptions options, - CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentNullException.ThrowIfNull(options); - cancellationToken.ThrowIfCancellationRequested(); - - var statuses = options.Status is null - ? null - : MapExecutionStatus(options.Status.Value); - - var jobs = await _repository - .ListAsync( - tenantId, - options.PolicyId, - options.Mode, - statuses, - options.QueuedAfter, - options.Limit, - cancellationToken: cancellationToken) - .ConfigureAwait(false); - - var now = _timeProvider.GetUtcNow(); - return jobs - .Select(job => ToStatus(job, now)) - .ToList(); - } - - public async Task<PolicyRunStatus?> GetAsync(string tenantId, string runId, CancellationToken cancellationToken) - { - ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); - ArgumentException.ThrowIfNullOrWhiteSpace(runId); - cancellationToken.ThrowIfCancellationRequested(); - - var job = await _repository - .GetByRunIdAsync(tenantId, runId, cancellationToken: cancellationToken) - .ConfigureAwait(false); - - if (job is null) - { - return null; - } - - var now = _timeProvider.GetUtcNow(); - return ToStatus(job, now); - } - - private static PolicyRunStatus ToStatus(PolicyRunJob job, DateTimeOffset now) - { - var status = MapExecutionStatus(job.Status); - var queuedAt = job.QueuedAt ?? job.CreatedAt; - var startedAt = job.SubmittedAt; - var finishedAt = job.CompletedAt ?? job.CancelledAt; - var metadata = job.Metadata ?? ImmutableSortedDictionary<string, string>.Empty; - var inputs = job.Inputs ?? PolicyRunInputs.Empty; - var policyVersion = job.PolicyVersion - ?? throw new InvalidOperationException($"Policy run job '{job.Id}' is missing policyVersion."); - - return new PolicyRunStatus( - job.RunId ?? job.Id, - job.TenantId, - job.PolicyId, - policyVersion, - job.Mode, - status, - job.Priority, - queuedAt, - job.Status == PolicyRunJobStatus.Pending ? null : startedAt, - finishedAt, - PolicyRunStats.Empty, - inputs, - determinismHash: null, - errorCode: null, - error: job.Status == PolicyRunJobStatus.Failed ? job.LastError : null, - attempts: job.AttemptCount, - traceId: null, - explainUri: null, - metadata, - SchedulerSchemaVersions.PolicyRunStatus); - } - - private static PolicyRunExecutionStatus MapExecutionStatus(PolicyRunJobStatus status) - => status switch - { - PolicyRunJobStatus.Pending => PolicyRunExecutionStatus.Queued, - PolicyRunJobStatus.Dispatching => PolicyRunExecutionStatus.Running, - PolicyRunJobStatus.Submitted => PolicyRunExecutionStatus.Running, - PolicyRunJobStatus.Completed => PolicyRunExecutionStatus.Succeeded, - PolicyRunJobStatus.Failed => PolicyRunExecutionStatus.Failed, - PolicyRunJobStatus.Cancelled => PolicyRunExecutionStatus.Cancelled, - _ => PolicyRunExecutionStatus.Queued - }; - - private static IReadOnlyCollection<PolicyRunJobStatus>? MapExecutionStatus(PolicyRunExecutionStatus status) - => status switch - { - PolicyRunExecutionStatus.Queued => new[] { PolicyRunJobStatus.Pending }, - PolicyRunExecutionStatus.Running => new[] { PolicyRunJobStatus.Dispatching, PolicyRunJobStatus.Submitted }, - PolicyRunExecutionStatus.Succeeded => new[] { PolicyRunJobStatus.Completed }, - PolicyRunExecutionStatus.Failed => new[] { PolicyRunJobStatus.Failed }, - PolicyRunExecutionStatus.Cancelled => new[] { PolicyRunJobStatus.Cancelled }, - PolicyRunExecutionStatus.ReplayPending => Array.Empty<PolicyRunJobStatus>(), - _ => null - }; - - private static string GenerateRunId(string policyId, DateTimeOffset timestamp) - { - var normalizedPolicyId = string.IsNullOrWhiteSpace(policyId) ? "policy" : policyId.Trim(); - var suffix = Guid.NewGuid().ToString("N")[..8]; - return $"run:{normalizedPolicyId}:{timestamp:yyyyMMddTHHmmssZ}:{suffix}"; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.ComponentModel.DataAnnotations; +using System.Linq; +using Microsoft.Extensions.Logging; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.WebService; + +namespace StellaOps.Scheduler.WebService.PolicyRuns; + +internal sealed class PolicyRunService : IPolicyRunService +{ + private readonly IPolicyRunJobRepository _repository; + private readonly TimeProvider _timeProvider; + private readonly ILogger<PolicyRunService> _logger; + + public PolicyRunService( + IPolicyRunJobRepository repository, + TimeProvider timeProvider, + ILogger<PolicyRunService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<PolicyRunStatus> EnqueueAsync(string tenantId, PolicyRunRequest request, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentNullException.ThrowIfNull(request); + cancellationToken.ThrowIfCancellationRequested(); + + var now = _timeProvider.GetUtcNow(); + var runId = string.IsNullOrWhiteSpace(request.RunId) + ? GenerateRunId(request.PolicyId, now) + : request.RunId!; + + // Idempotency: return existing job if present when a runId was supplied. + if (!string.IsNullOrWhiteSpace(request.RunId)) + { + var existing = await _repository + .GetByRunIdAsync(tenantId, runId, cancellationToken: cancellationToken) + .ConfigureAwait(false); + + if (existing is not null) + { + _logger.LogDebug("Policy run job already exists for tenant {TenantId} and run {RunId}.", tenantId, runId); + return ToStatus(existing, now); + } + } + + var jobId = SchedulerEndpointHelpers.GenerateIdentifier("policyjob"); + var queuedAt = request.QueuedAt ?? now; + var metadata = request.Metadata ?? ImmutableSortedDictionary<string, string>.Empty; + var job = new PolicyRunJob( + SchemaVersion: SchedulerSchemaVersions.PolicyRunJob, + Id: jobId, + TenantId: tenantId, + PolicyId: request.PolicyId, + PolicyVersion: request.PolicyVersion, + Mode: request.Mode, + Priority: request.Priority, + PriorityRank: -1, + RunId: runId, + RequestedBy: request.RequestedBy, + CorrelationId: request.CorrelationId, + Metadata: metadata, + Inputs: request.Inputs ?? PolicyRunInputs.Empty, + QueuedAt: queuedAt, + Status: PolicyRunJobStatus.Pending, + AttemptCount: 0, + LastAttemptAt: null, + LastError: null, + CreatedAt: now, + UpdatedAt: now, + AvailableAt: now, + SubmittedAt: null, + CompletedAt: null, + LeaseOwner: null, + LeaseExpiresAt: null, + CancellationRequested: false, + CancellationRequestedAt: null, + CancellationReason: null, + CancelledAt: null); + + await _repository.InsertAsync(job, cancellationToken: cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "Enqueued policy run job {JobId} for tenant {TenantId} policy {PolicyId} (runId={RunId}, mode={Mode}).", + job.Id, + tenantId, + job.PolicyId, + job.RunId, + job.Mode); + + return ToStatus(job, now); + } + + public async Task<IReadOnlyList<PolicyRunStatus>> ListAsync( + string tenantId, + PolicyRunQueryOptions options, + CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentNullException.ThrowIfNull(options); + cancellationToken.ThrowIfCancellationRequested(); + + var statuses = options.Status is null + ? null + : MapExecutionStatus(options.Status.Value); + + var jobs = await _repository + .ListAsync( + tenantId, + options.PolicyId, + options.Mode, + statuses, + options.QueuedAfter, + options.Limit, + cancellationToken: cancellationToken) + .ConfigureAwait(false); + + var now = _timeProvider.GetUtcNow(); + return jobs + .Select(job => ToStatus(job, now)) + .ToList(); + } + + public async Task<PolicyRunStatus?> GetAsync(string tenantId, string runId, CancellationToken cancellationToken) + { + ArgumentException.ThrowIfNullOrWhiteSpace(tenantId); + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + cancellationToken.ThrowIfCancellationRequested(); + + var job = await _repository + .GetByRunIdAsync(tenantId, runId, cancellationToken: cancellationToken) + .ConfigureAwait(false); + + if (job is null) + { + return null; + } + + var now = _timeProvider.GetUtcNow(); + return ToStatus(job, now); + } + + private static PolicyRunStatus ToStatus(PolicyRunJob job, DateTimeOffset now) + { + var status = MapExecutionStatus(job.Status); + var queuedAt = job.QueuedAt ?? job.CreatedAt; + var startedAt = job.SubmittedAt; + var finishedAt = job.CompletedAt ?? job.CancelledAt; + var metadata = job.Metadata ?? ImmutableSortedDictionary<string, string>.Empty; + var inputs = job.Inputs ?? PolicyRunInputs.Empty; + var policyVersion = job.PolicyVersion + ?? throw new InvalidOperationException($"Policy run job '{job.Id}' is missing policyVersion."); + + return new PolicyRunStatus( + job.RunId ?? job.Id, + job.TenantId, + job.PolicyId, + policyVersion, + job.Mode, + status, + job.Priority, + queuedAt, + job.Status == PolicyRunJobStatus.Pending ? null : startedAt, + finishedAt, + PolicyRunStats.Empty, + inputs, + determinismHash: null, + errorCode: null, + error: job.Status == PolicyRunJobStatus.Failed ? job.LastError : null, + attempts: job.AttemptCount, + traceId: null, + explainUri: null, + metadata, + SchedulerSchemaVersions.PolicyRunStatus); + } + + private static PolicyRunExecutionStatus MapExecutionStatus(PolicyRunJobStatus status) + => status switch + { + PolicyRunJobStatus.Pending => PolicyRunExecutionStatus.Queued, + PolicyRunJobStatus.Dispatching => PolicyRunExecutionStatus.Running, + PolicyRunJobStatus.Submitted => PolicyRunExecutionStatus.Running, + PolicyRunJobStatus.Completed => PolicyRunExecutionStatus.Succeeded, + PolicyRunJobStatus.Failed => PolicyRunExecutionStatus.Failed, + PolicyRunJobStatus.Cancelled => PolicyRunExecutionStatus.Cancelled, + _ => PolicyRunExecutionStatus.Queued + }; + + private static IReadOnlyCollection<PolicyRunJobStatus>? MapExecutionStatus(PolicyRunExecutionStatus status) + => status switch + { + PolicyRunExecutionStatus.Queued => new[] { PolicyRunJobStatus.Pending }, + PolicyRunExecutionStatus.Running => new[] { PolicyRunJobStatus.Dispatching, PolicyRunJobStatus.Submitted }, + PolicyRunExecutionStatus.Succeeded => new[] { PolicyRunJobStatus.Completed }, + PolicyRunExecutionStatus.Failed => new[] { PolicyRunJobStatus.Failed }, + PolicyRunExecutionStatus.Cancelled => new[] { PolicyRunJobStatus.Cancelled }, + PolicyRunExecutionStatus.ReplayPending => Array.Empty<PolicyRunJobStatus>(), + _ => null + }; + + private static string GenerateRunId(string policyId, DateTimeOffset timestamp) + { + var normalizedPolicyId = string.IsNullOrWhiteSpace(policyId) ? "policy" : policyId.Trim(); + var suffix = Guid.NewGuid().ToString("N")[..8]; + return $"run:{normalizedPolicyId}:{timestamp:yyyyMMddTHHmmssZ}:{suffix}"; + } +} diff --git a/src/StellaOps.Scheduler.WebService/Program.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Program.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Program.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Program.cs index cb617d40..60d5d268 100644 --- a/src/StellaOps.Scheduler.WebService/Program.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Program.cs @@ -1,202 +1,202 @@ -using System.Linq; -using Microsoft.AspNetCore.Authentication; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.ServerIntegration; -using StellaOps.Plugin.DependencyInjection; -using StellaOps.Plugin.Hosting; -using StellaOps.Scheduler.WebService.Hosting; -using StellaOps.Scheduler.ImpactIndex; -using StellaOps.Scheduler.Storage.Mongo; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Storage.Mongo.Services; -using StellaOps.Scheduler.WebService; -using StellaOps.Scheduler.WebService.Auth; -using StellaOps.Scheduler.WebService.EventWebhooks; -using StellaOps.Scheduler.WebService.GraphJobs; -using StellaOps.Scheduler.WebService.GraphJobs.Events; -using StellaOps.Scheduler.WebService.Schedules; -using StellaOps.Scheduler.WebService.Options; -using StellaOps.Scheduler.WebService.Runs; -using StellaOps.Scheduler.WebService.PolicyRuns; - -var builder = WebApplication.CreateBuilder(args); - -builder.Services.AddRouting(options => options.LowercaseUrls = true); -builder.Services.AddSingleton<StellaOps.Scheduler.WebService.ISystemClock, StellaOps.Scheduler.WebService.SystemClock>(); -builder.Services.TryAddSingleton(TimeProvider.System); - -var authorityOptions = new SchedulerAuthorityOptions(); -builder.Configuration.GetSection("Scheduler:Authority").Bind(authorityOptions); - -if (!authorityOptions.RequiredScopes.Any(scope => string.Equals(scope, StellaOpsScopes.GraphRead, StringComparison.OrdinalIgnoreCase))) -{ - authorityOptions.RequiredScopes.Add(StellaOpsScopes.GraphRead); -} - -if (!authorityOptions.RequiredScopes.Any(scope => string.Equals(scope, StellaOpsScopes.GraphWrite, StringComparison.OrdinalIgnoreCase))) -{ - authorityOptions.RequiredScopes.Add(StellaOpsScopes.GraphWrite); -} - -if (authorityOptions.Audiences.Count == 0) -{ - authorityOptions.Audiences.Add("api://scheduler"); -} - -authorityOptions.Validate(); -builder.Services.AddSingleton(authorityOptions); - -builder.Services.AddOptions<SchedulerEventsOptions>() - .Bind(builder.Configuration.GetSection("Scheduler:Events")) - .PostConfigure(options => - { - options.Webhooks ??= new SchedulerInboundWebhooksOptions(); - options.Webhooks.Feedser ??= SchedulerWebhookOptions.CreateDefault("feedser"); - options.Webhooks.Vexer ??= SchedulerWebhookOptions.CreateDefault("vexer"); - - options.Webhooks.Feedser.Name = string.IsNullOrWhiteSpace(options.Webhooks.Feedser.Name) - ? "feedser" - : options.Webhooks.Feedser.Name; - options.Webhooks.Vexer.Name = string.IsNullOrWhiteSpace(options.Webhooks.Vexer.Name) - ? "vexer" - : options.Webhooks.Vexer.Name; - - options.Webhooks.Feedser.Validate(); - options.Webhooks.Vexer.Validate(); - }); - -builder.Services.AddMemoryCache(); -builder.Services.AddSingleton<IWebhookRateLimiter, InMemoryWebhookRateLimiter>(); -builder.Services.AddSingleton<IWebhookRequestAuthenticator, WebhookRequestAuthenticator>(); -builder.Services.AddSingleton<IInboundExportEventSink, LoggingExportEventSink>(); - -var cartographerOptions = builder.Configuration.GetSection("Scheduler:Cartographer").Get<SchedulerCartographerOptions>() ?? new SchedulerCartographerOptions(); -builder.Services.AddSingleton(cartographerOptions); -builder.Services.AddOptions<SchedulerCartographerOptions>() - .Bind(builder.Configuration.GetSection("Scheduler:Cartographer")); - -var storageSection = builder.Configuration.GetSection("Scheduler:Storage"); -if (storageSection.Exists()) -{ - builder.Services.AddSchedulerMongoStorage(storageSection); - builder.Services.AddSingleton<IGraphJobStore, MongoGraphJobStore>(); - builder.Services.AddSingleton<IPolicyRunService, PolicyRunService>(); -} -else -{ - builder.Services.AddSingleton<IGraphJobStore, InMemoryGraphJobStore>(); - builder.Services.AddSingleton<IScheduleRepository, InMemoryScheduleRepository>(); - builder.Services.AddSingleton<IRunRepository, InMemoryRunRepository>(); - builder.Services.AddSingleton<IRunSummaryService, InMemoryRunSummaryService>(); - builder.Services.AddSingleton<ISchedulerAuditService, InMemorySchedulerAuditService>(); - builder.Services.AddSingleton<IPolicyRunService, InMemoryPolicyRunService>(); -} -builder.Services.AddSingleton<IGraphJobCompletionPublisher, GraphJobEventPublisher>(); -if (cartographerOptions.Webhook.Enabled) -{ - builder.Services.AddHttpClient<ICartographerWebhookClient, CartographerWebhookClient>((serviceProvider, client) => - { - var options = serviceProvider.GetRequiredService<IOptionsMonitor<SchedulerCartographerOptions>>().CurrentValue; - client.Timeout = TimeSpan.FromSeconds(options.Webhook.TimeoutSeconds <= 0 ? 10 : options.Webhook.TimeoutSeconds); - }); -} -else -{ - builder.Services.AddSingleton<ICartographerWebhookClient, NullCartographerWebhookClient>(); -} -builder.Services.AddScoped<IGraphJobService, GraphJobService>(); -builder.Services.AddImpactIndexStub(); - -var schedulerOptions = builder.Configuration.GetSection("Scheduler").Get<SchedulerOptions>() ?? new SchedulerOptions(); -schedulerOptions.Validate(); -builder.Services.AddSingleton(schedulerOptions); -builder.Services.AddOptions<SchedulerOptions>() - .Bind(builder.Configuration.GetSection("Scheduler")) - .PostConfigure(options => options.Validate()); - -var pluginHostOptions = SchedulerPluginHostFactory.Build(schedulerOptions.Plugins, builder.Environment.ContentRootPath); -builder.Services.AddSingleton(pluginHostOptions); -builder.Services.RegisterPluginRoutines(builder.Configuration, pluginHostOptions); - -if (authorityOptions.Enabled) -{ - builder.Services.AddHttpContextAccessor(); - builder.Services.AddStellaOpsResourceServerAuthentication( - builder.Configuration, - configurationSection: null, - configure: resourceOptions => - { - resourceOptions.Authority = authorityOptions.Issuer; - resourceOptions.RequireHttpsMetadata = authorityOptions.RequireHttpsMetadata; - resourceOptions.MetadataAddress = authorityOptions.MetadataAddress; - resourceOptions.BackchannelTimeout = TimeSpan.FromSeconds(authorityOptions.BackchannelTimeoutSeconds); - resourceOptions.TokenClockSkew = TimeSpan.FromSeconds(authorityOptions.TokenClockSkewSeconds); - - foreach (var audience in authorityOptions.Audiences) - { - resourceOptions.Audiences.Add(audience); - } - - foreach (var scope in authorityOptions.RequiredScopes) - { - resourceOptions.RequiredScopes.Add(scope); - } - - foreach (var tenant in authorityOptions.RequiredTenants) - { - resourceOptions.RequiredTenants.Add(tenant); - } - - foreach (var network in authorityOptions.BypassNetworks) - { - resourceOptions.BypassNetworks.Add(network); - } - }); - - builder.Services.AddAuthorization(); - builder.Services.AddScoped<ITenantContextAccessor, ClaimsTenantContextAccessor>(); - builder.Services.AddScoped<IScopeAuthorizer, TokenScopeAuthorizer>(); -} -else -{ - builder.Services.AddAuthentication(options => - { - options.DefaultAuthenticateScheme = "Anonymous"; - options.DefaultChallengeScheme = "Anonymous"; - }).AddScheme<AuthenticationSchemeOptions, AnonymousAuthenticationHandler>("Anonymous", static _ => { }); - - builder.Services.AddAuthorization(); - builder.Services.AddScoped<ITenantContextAccessor, HeaderTenantContextAccessor>(); - builder.Services.AddScoped<IScopeAuthorizer, HeaderScopeAuthorizer>(); -} - -builder.Services.AddEndpointsApiExplorer(); - -var app = builder.Build(); - -app.UseAuthentication(); -app.UseAuthorization(); - -if (!authorityOptions.Enabled) -{ - app.Logger.LogWarning("Scheduler Authority authentication is disabled; relying on header-based development fallback."); -} -else if (authorityOptions.AllowAnonymousFallback) -{ - app.Logger.LogWarning("Scheduler Authority authentication is enabled but anonymous fallback remains allowed. Disable fallback before production rollout."); -} - -app.MapGet("/healthz", () => Results.Json(new { status = "ok" })); -app.MapGet("/readyz", () => Results.Json(new { status = "ready" })); - -app.MapGraphJobEndpoints(); -app.MapScheduleEndpoints(); -app.MapRunEndpoints(); -app.MapPolicyRunEndpoints(); -app.MapSchedulerEventWebhookEndpoints(); - -app.Run(); - -public partial class Program; +using System.Linq; +using Microsoft.AspNetCore.Authentication; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Plugin.DependencyInjection; +using StellaOps.Plugin.Hosting; +using StellaOps.Scheduler.WebService.Hosting; +using StellaOps.Scheduler.ImpactIndex; +using StellaOps.Scheduler.Storage.Mongo; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Storage.Mongo.Services; +using StellaOps.Scheduler.WebService; +using StellaOps.Scheduler.WebService.Auth; +using StellaOps.Scheduler.WebService.EventWebhooks; +using StellaOps.Scheduler.WebService.GraphJobs; +using StellaOps.Scheduler.WebService.GraphJobs.Events; +using StellaOps.Scheduler.WebService.Schedules; +using StellaOps.Scheduler.WebService.Options; +using StellaOps.Scheduler.WebService.Runs; +using StellaOps.Scheduler.WebService.PolicyRuns; + +var builder = WebApplication.CreateBuilder(args); + +builder.Services.AddRouting(options => options.LowercaseUrls = true); +builder.Services.AddSingleton<StellaOps.Scheduler.WebService.ISystemClock, StellaOps.Scheduler.WebService.SystemClock>(); +builder.Services.TryAddSingleton(TimeProvider.System); + +var authorityOptions = new SchedulerAuthorityOptions(); +builder.Configuration.GetSection("Scheduler:Authority").Bind(authorityOptions); + +if (!authorityOptions.RequiredScopes.Any(scope => string.Equals(scope, StellaOpsScopes.GraphRead, StringComparison.OrdinalIgnoreCase))) +{ + authorityOptions.RequiredScopes.Add(StellaOpsScopes.GraphRead); +} + +if (!authorityOptions.RequiredScopes.Any(scope => string.Equals(scope, StellaOpsScopes.GraphWrite, StringComparison.OrdinalIgnoreCase))) +{ + authorityOptions.RequiredScopes.Add(StellaOpsScopes.GraphWrite); +} + +if (authorityOptions.Audiences.Count == 0) +{ + authorityOptions.Audiences.Add("api://scheduler"); +} + +authorityOptions.Validate(); +builder.Services.AddSingleton(authorityOptions); + +builder.Services.AddOptions<SchedulerEventsOptions>() + .Bind(builder.Configuration.GetSection("Scheduler:Events")) + .PostConfigure(options => + { + options.Webhooks ??= new SchedulerInboundWebhooksOptions(); + options.Webhooks.Feedser ??= SchedulerWebhookOptions.CreateDefault("feedser"); + options.Webhooks.Vexer ??= SchedulerWebhookOptions.CreateDefault("vexer"); + + options.Webhooks.Feedser.Name = string.IsNullOrWhiteSpace(options.Webhooks.Feedser.Name) + ? "feedser" + : options.Webhooks.Feedser.Name; + options.Webhooks.Vexer.Name = string.IsNullOrWhiteSpace(options.Webhooks.Vexer.Name) + ? "vexer" + : options.Webhooks.Vexer.Name; + + options.Webhooks.Feedser.Validate(); + options.Webhooks.Vexer.Validate(); + }); + +builder.Services.AddMemoryCache(); +builder.Services.AddSingleton<IWebhookRateLimiter, InMemoryWebhookRateLimiter>(); +builder.Services.AddSingleton<IWebhookRequestAuthenticator, WebhookRequestAuthenticator>(); +builder.Services.AddSingleton<IInboundExportEventSink, LoggingExportEventSink>(); + +var cartographerOptions = builder.Configuration.GetSection("Scheduler:Cartographer").Get<SchedulerCartographerOptions>() ?? new SchedulerCartographerOptions(); +builder.Services.AddSingleton(cartographerOptions); +builder.Services.AddOptions<SchedulerCartographerOptions>() + .Bind(builder.Configuration.GetSection("Scheduler:Cartographer")); + +var storageSection = builder.Configuration.GetSection("Scheduler:Storage"); +if (storageSection.Exists()) +{ + builder.Services.AddSchedulerMongoStorage(storageSection); + builder.Services.AddSingleton<IGraphJobStore, MongoGraphJobStore>(); + builder.Services.AddSingleton<IPolicyRunService, PolicyRunService>(); +} +else +{ + builder.Services.AddSingleton<IGraphJobStore, InMemoryGraphJobStore>(); + builder.Services.AddSingleton<IScheduleRepository, InMemoryScheduleRepository>(); + builder.Services.AddSingleton<IRunRepository, InMemoryRunRepository>(); + builder.Services.AddSingleton<IRunSummaryService, InMemoryRunSummaryService>(); + builder.Services.AddSingleton<ISchedulerAuditService, InMemorySchedulerAuditService>(); + builder.Services.AddSingleton<IPolicyRunService, InMemoryPolicyRunService>(); +} +builder.Services.AddSingleton<IGraphJobCompletionPublisher, GraphJobEventPublisher>(); +if (cartographerOptions.Webhook.Enabled) +{ + builder.Services.AddHttpClient<ICartographerWebhookClient, CartographerWebhookClient>((serviceProvider, client) => + { + var options = serviceProvider.GetRequiredService<IOptionsMonitor<SchedulerCartographerOptions>>().CurrentValue; + client.Timeout = TimeSpan.FromSeconds(options.Webhook.TimeoutSeconds <= 0 ? 10 : options.Webhook.TimeoutSeconds); + }); +} +else +{ + builder.Services.AddSingleton<ICartographerWebhookClient, NullCartographerWebhookClient>(); +} +builder.Services.AddScoped<IGraphJobService, GraphJobService>(); +builder.Services.AddImpactIndexStub(); + +var schedulerOptions = builder.Configuration.GetSection("Scheduler").Get<SchedulerOptions>() ?? new SchedulerOptions(); +schedulerOptions.Validate(); +builder.Services.AddSingleton(schedulerOptions); +builder.Services.AddOptions<SchedulerOptions>() + .Bind(builder.Configuration.GetSection("Scheduler")) + .PostConfigure(options => options.Validate()); + +var pluginHostOptions = SchedulerPluginHostFactory.Build(schedulerOptions.Plugins, builder.Environment.ContentRootPath); +builder.Services.AddSingleton(pluginHostOptions); +builder.Services.RegisterPluginRoutines(builder.Configuration, pluginHostOptions); + +if (authorityOptions.Enabled) +{ + builder.Services.AddHttpContextAccessor(); + builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: null, + configure: resourceOptions => + { + resourceOptions.Authority = authorityOptions.Issuer; + resourceOptions.RequireHttpsMetadata = authorityOptions.RequireHttpsMetadata; + resourceOptions.MetadataAddress = authorityOptions.MetadataAddress; + resourceOptions.BackchannelTimeout = TimeSpan.FromSeconds(authorityOptions.BackchannelTimeoutSeconds); + resourceOptions.TokenClockSkew = TimeSpan.FromSeconds(authorityOptions.TokenClockSkewSeconds); + + foreach (var audience in authorityOptions.Audiences) + { + resourceOptions.Audiences.Add(audience); + } + + foreach (var scope in authorityOptions.RequiredScopes) + { + resourceOptions.RequiredScopes.Add(scope); + } + + foreach (var tenant in authorityOptions.RequiredTenants) + { + resourceOptions.RequiredTenants.Add(tenant); + } + + foreach (var network in authorityOptions.BypassNetworks) + { + resourceOptions.BypassNetworks.Add(network); + } + }); + + builder.Services.AddAuthorization(); + builder.Services.AddScoped<ITenantContextAccessor, ClaimsTenantContextAccessor>(); + builder.Services.AddScoped<IScopeAuthorizer, TokenScopeAuthorizer>(); +} +else +{ + builder.Services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = "Anonymous"; + options.DefaultChallengeScheme = "Anonymous"; + }).AddScheme<AuthenticationSchemeOptions, AnonymousAuthenticationHandler>("Anonymous", static _ => { }); + + builder.Services.AddAuthorization(); + builder.Services.AddScoped<ITenantContextAccessor, HeaderTenantContextAccessor>(); + builder.Services.AddScoped<IScopeAuthorizer, HeaderScopeAuthorizer>(); +} + +builder.Services.AddEndpointsApiExplorer(); + +var app = builder.Build(); + +app.UseAuthentication(); +app.UseAuthorization(); + +if (!authorityOptions.Enabled) +{ + app.Logger.LogWarning("Scheduler Authority authentication is disabled; relying on header-based development fallback."); +} +else if (authorityOptions.AllowAnonymousFallback) +{ + app.Logger.LogWarning("Scheduler Authority authentication is enabled but anonymous fallback remains allowed. Disable fallback before production rollout."); +} + +app.MapGet("/healthz", () => Results.Json(new { status = "ok" })); +app.MapGet("/readyz", () => Results.Json(new { status = "ready" })); + +app.MapGraphJobEndpoints(); +app.MapScheduleEndpoints(); +app.MapRunEndpoints(); +app.MapPolicyRunEndpoints(); +app.MapSchedulerEventWebhookEndpoints(); + +app.Run(); + +public partial class Program; diff --git a/src/StellaOps.Scheduler.WebService/Properties/AssemblyInfo.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Properties/AssemblyInfo.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Properties/AssemblyInfo.cs index 21e0ad8f..fc099ce8 100644 --- a/src/StellaOps.Scheduler.WebService/Properties/AssemblyInfo.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Scheduler.WebService.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scheduler.WebService.Tests")] diff --git a/src/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs index d7c202cf..33a3e4b7 100644 --- a/src/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/InMemoryRunRepository.cs @@ -1,130 +1,130 @@ -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; - -namespace StellaOps.Scheduler.WebService.Runs; - -internal sealed class InMemoryRunRepository : IRunRepository -{ - private readonly ConcurrentDictionary<string, Run> _runs = new(StringComparer.Ordinal); - - public Task InsertAsync( - Run run, - MongoDB.Driver.IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(run); - _runs[run.Id] = run; - return Task.CompletedTask; - } - - public Task<bool> UpdateAsync( - Run run, - MongoDB.Driver.IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(run); - - if (!_runs.TryGetValue(run.Id, out var existing)) - { - return Task.FromResult(false); - } - - if (!string.Equals(existing.TenantId, run.TenantId, StringComparison.Ordinal)) - { - return Task.FromResult(false); - } - - _runs[run.Id] = run; - return Task.FromResult(true); - } - - public Task<Run?> GetAsync( - string tenantId, - string runId, - MongoDB.Driver.IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - if (string.IsNullOrWhiteSpace(runId)) - { - throw new ArgumentException("Run id must be provided.", nameof(runId)); - } - - if (_runs.TryGetValue(runId, out var run) && string.Equals(run.TenantId, tenantId, StringComparison.Ordinal)) - { - return Task.FromResult<Run?>(run); - } - - return Task.FromResult<Run?>(null); - } - - public Task<IReadOnlyList<Run>> ListAsync( - string tenantId, - RunQueryOptions? options = null, - MongoDB.Driver.IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - options ??= new RunQueryOptions(); - - IEnumerable<Run> query = _runs.Values - .Where(run => string.Equals(run.TenantId, tenantId, StringComparison.Ordinal)); - - if (!string.IsNullOrWhiteSpace(options.ScheduleId)) - { - query = query.Where(run => string.Equals(run.ScheduleId, options.ScheduleId, StringComparison.Ordinal)); - } - - if (!options.States.IsDefaultOrEmpty) - { - var allowed = options.States.ToImmutableHashSet(); - query = query.Where(run => allowed.Contains(run.State)); - } - - if (options.CreatedAfter is { } createdAfter) - { - query = query.Where(run => run.CreatedAt > createdAfter); - } - - query = options.SortAscending - ? query.OrderBy(run => run.CreatedAt).ThenBy(run => run.Id, StringComparer.Ordinal) - : query.OrderByDescending(run => run.CreatedAt).ThenByDescending(run => run.Id, StringComparer.Ordinal); - - var limit = options.Limit is { } specified && specified > 0 ? specified : 50; - var result = query.Take(limit).ToArray(); - return Task.FromResult<IReadOnlyList<Run>>(result); - } - - public Task<IReadOnlyList<Run>> ListByStateAsync( - RunState state, - int limit = 50, - MongoDB.Driver.IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (limit <= 0) - { - throw new ArgumentOutOfRangeException(nameof(limit), limit, "Limit must be greater than zero."); - } - - var result = _runs.Values - .Where(run => run.State == state) - .OrderBy(run => run.CreatedAt) - .ThenBy(run => run.Id, StringComparer.Ordinal) - .Take(limit) - .ToArray(); - - return Task.FromResult<IReadOnlyList<Run>>(result); - } -} +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; + +namespace StellaOps.Scheduler.WebService.Runs; + +internal sealed class InMemoryRunRepository : IRunRepository +{ + private readonly ConcurrentDictionary<string, Run> _runs = new(StringComparer.Ordinal); + + public Task InsertAsync( + Run run, + MongoDB.Driver.IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(run); + _runs[run.Id] = run; + return Task.CompletedTask; + } + + public Task<bool> UpdateAsync( + Run run, + MongoDB.Driver.IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(run); + + if (!_runs.TryGetValue(run.Id, out var existing)) + { + return Task.FromResult(false); + } + + if (!string.Equals(existing.TenantId, run.TenantId, StringComparison.Ordinal)) + { + return Task.FromResult(false); + } + + _runs[run.Id] = run; + return Task.FromResult(true); + } + + public Task<Run?> GetAsync( + string tenantId, + string runId, + MongoDB.Driver.IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + if (string.IsNullOrWhiteSpace(runId)) + { + throw new ArgumentException("Run id must be provided.", nameof(runId)); + } + + if (_runs.TryGetValue(runId, out var run) && string.Equals(run.TenantId, tenantId, StringComparison.Ordinal)) + { + return Task.FromResult<Run?>(run); + } + + return Task.FromResult<Run?>(null); + } + + public Task<IReadOnlyList<Run>> ListAsync( + string tenantId, + RunQueryOptions? options = null, + MongoDB.Driver.IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + options ??= new RunQueryOptions(); + + IEnumerable<Run> query = _runs.Values + .Where(run => string.Equals(run.TenantId, tenantId, StringComparison.Ordinal)); + + if (!string.IsNullOrWhiteSpace(options.ScheduleId)) + { + query = query.Where(run => string.Equals(run.ScheduleId, options.ScheduleId, StringComparison.Ordinal)); + } + + if (!options.States.IsDefaultOrEmpty) + { + var allowed = options.States.ToImmutableHashSet(); + query = query.Where(run => allowed.Contains(run.State)); + } + + if (options.CreatedAfter is { } createdAfter) + { + query = query.Where(run => run.CreatedAt > createdAfter); + } + + query = options.SortAscending + ? query.OrderBy(run => run.CreatedAt).ThenBy(run => run.Id, StringComparer.Ordinal) + : query.OrderByDescending(run => run.CreatedAt).ThenByDescending(run => run.Id, StringComparer.Ordinal); + + var limit = options.Limit is { } specified && specified > 0 ? specified : 50; + var result = query.Take(limit).ToArray(); + return Task.FromResult<IReadOnlyList<Run>>(result); + } + + public Task<IReadOnlyList<Run>> ListByStateAsync( + RunState state, + int limit = 50, + MongoDB.Driver.IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (limit <= 0) + { + throw new ArgumentOutOfRangeException(nameof(limit), limit, "Limit must be greater than zero."); + } + + var result = _runs.Values + .Where(run => run.State == state) + .OrderBy(run => run.CreatedAt) + .ThenBy(run => run.Id, StringComparer.Ordinal) + .Take(limit) + .ToArray(); + + return Task.FromResult<IReadOnlyList<Run>>(result); + } +} diff --git a/src/StellaOps.Scheduler.WebService/Runs/RunContracts.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunContracts.cs similarity index 98% rename from src/StellaOps.Scheduler.WebService/Runs/RunContracts.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunContracts.cs index a9313b86..ae9a2e14 100644 --- a/src/StellaOps.Scheduler.WebService/Runs/RunContracts.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunContracts.cs @@ -1,40 +1,40 @@ -using System.Collections.Immutable; -using System.Text.Json.Serialization; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.WebService.Runs; - -internal sealed record RunCreateRequest( - [property: JsonPropertyName("scheduleId")] string? ScheduleId, - [property: JsonPropertyName("trigger")] RunTrigger Trigger = RunTrigger.Manual, - [property: JsonPropertyName("reason")] RunReason? Reason = null, - [property: JsonPropertyName("correlationId")] string? CorrelationId = null); - -internal sealed record RunCollectionResponse( - [property: JsonPropertyName("runs")] IReadOnlyList<Run> Runs); - -internal sealed record RunResponse( - [property: JsonPropertyName("run")] Run Run); - -internal sealed record ImpactPreviewRequest( - [property: JsonPropertyName("scheduleId")] string? ScheduleId, - [property: JsonPropertyName("selector")] Selector? Selector, - [property: JsonPropertyName("productKeys")] ImmutableArray<string>? ProductKeys, - [property: JsonPropertyName("vulnerabilityIds")] ImmutableArray<string>? VulnerabilityIds, - [property: JsonPropertyName("usageOnly")] bool UsageOnly = true, - [property: JsonPropertyName("sampleSize")] int SampleSize = 10); - -internal sealed record ImpactPreviewResponse( - [property: JsonPropertyName("total")] int Total, - [property: JsonPropertyName("usageOnly")] bool UsageOnly, - [property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt, - [property: JsonPropertyName("snapshotId")] string? SnapshotId, - [property: JsonPropertyName("sample")] ImmutableArray<ImpactPreviewSample> Sample); - -internal sealed record ImpactPreviewSample( - [property: JsonPropertyName("imageDigest")] string ImageDigest, - [property: JsonPropertyName("registry")] string Registry, - [property: JsonPropertyName("repository")] string Repository, - [property: JsonPropertyName("namespaces")] ImmutableArray<string> Namespaces, - [property: JsonPropertyName("tags")] ImmutableArray<string> Tags, - [property: JsonPropertyName("usedByEntrypoint")] bool UsedByEntrypoint); +using System.Collections.Immutable; +using System.Text.Json.Serialization; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.WebService.Runs; + +internal sealed record RunCreateRequest( + [property: JsonPropertyName("scheduleId")] string? ScheduleId, + [property: JsonPropertyName("trigger")] RunTrigger Trigger = RunTrigger.Manual, + [property: JsonPropertyName("reason")] RunReason? Reason = null, + [property: JsonPropertyName("correlationId")] string? CorrelationId = null); + +internal sealed record RunCollectionResponse( + [property: JsonPropertyName("runs")] IReadOnlyList<Run> Runs); + +internal sealed record RunResponse( + [property: JsonPropertyName("run")] Run Run); + +internal sealed record ImpactPreviewRequest( + [property: JsonPropertyName("scheduleId")] string? ScheduleId, + [property: JsonPropertyName("selector")] Selector? Selector, + [property: JsonPropertyName("productKeys")] ImmutableArray<string>? ProductKeys, + [property: JsonPropertyName("vulnerabilityIds")] ImmutableArray<string>? VulnerabilityIds, + [property: JsonPropertyName("usageOnly")] bool UsageOnly = true, + [property: JsonPropertyName("sampleSize")] int SampleSize = 10); + +internal sealed record ImpactPreviewResponse( + [property: JsonPropertyName("total")] int Total, + [property: JsonPropertyName("usageOnly")] bool UsageOnly, + [property: JsonPropertyName("generatedAt")] DateTimeOffset GeneratedAt, + [property: JsonPropertyName("snapshotId")] string? SnapshotId, + [property: JsonPropertyName("sample")] ImmutableArray<ImpactPreviewSample> Sample); + +internal sealed record ImpactPreviewSample( + [property: JsonPropertyName("imageDigest")] string ImageDigest, + [property: JsonPropertyName("registry")] string Registry, + [property: JsonPropertyName("repository")] string Repository, + [property: JsonPropertyName("namespaces")] ImmutableArray<string> Namespaces, + [property: JsonPropertyName("tags")] ImmutableArray<string> Tags, + [property: JsonPropertyName("usedByEntrypoint")] bool UsedByEntrypoint); diff --git a/src/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs index 232a2492..306ffcce 100644 --- a/src/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Runs/RunEndpoints.cs @@ -1,419 +1,419 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.ComponentModel.DataAnnotations; -using System.Linq; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.AspNetCore.Routing; -using Microsoft.Extensions.Primitives; -using StellaOps.Scheduler.ImpactIndex; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Storage.Mongo.Services; -using StellaOps.Scheduler.WebService.Auth; - -namespace StellaOps.Scheduler.WebService.Runs; - -internal static class RunEndpoints -{ - private const string ReadScope = "scheduler.runs.read"; - private const string WriteScope = "scheduler.runs.write"; - private const string PreviewScope = "scheduler.runs.preview"; - - public static IEndpointRouteBuilder MapRunEndpoints(this IEndpointRouteBuilder routes) - { - var group = routes.MapGroup("/api/v1/scheduler/runs"); - - group.MapGet("/", ListRunsAsync); - group.MapGet("/{runId}", GetRunAsync); - group.MapPost("/", CreateRunAsync); - group.MapPost("/{runId}/cancel", CancelRunAsync); - group.MapPost("/preview", PreviewImpactAsync); - - return routes; - } - - private static async Task<IResult> ListRunsAsync( - HttpContext httpContext, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IRunRepository repository, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, ReadScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - var scheduleId = httpContext.Request.Query.TryGetValue("scheduleId", out var scheduleValues) - ? scheduleValues.ToString().Trim() - : null; - - var states = ParseRunStates(httpContext.Request.Query.TryGetValue("state", out var stateValues) ? stateValues : StringValues.Empty); - var createdAfter = SchedulerEndpointHelpers.TryParseDateTimeOffset(httpContext.Request.Query.TryGetValue("createdAfter", out var createdAfterValues) ? createdAfterValues.ToString() : null); - var limit = SchedulerEndpointHelpers.TryParsePositiveInt(httpContext.Request.Query.TryGetValue("limit", out var limitValues) ? limitValues.ToString() : null); - - var sortAscending = httpContext.Request.Query.TryGetValue("sort", out var sortValues) && - sortValues.Any(value => string.Equals(value, "asc", StringComparison.OrdinalIgnoreCase)); - - var options = new RunQueryOptions - { - ScheduleId = string.IsNullOrWhiteSpace(scheduleId) ? null : scheduleId, - States = states, - CreatedAfter = createdAfter, - Limit = limit, - SortAscending = sortAscending, - }; - - var runs = await repository.ListAsync(tenant.TenantId, options, cancellationToken: cancellationToken).ConfigureAwait(false); - return Results.Ok(new RunCollectionResponse(runs)); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> GetRunAsync( - HttpContext httpContext, - string runId, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IRunRepository repository, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, ReadScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - var run = await repository.GetAsync(tenant.TenantId, runId, cancellationToken: cancellationToken).ConfigureAwait(false); - if (run is null) - { - return Results.NotFound(); - } - - return Results.Ok(new RunResponse(run)); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> CreateRunAsync( - HttpContext httpContext, - RunCreateRequest request, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IScheduleRepository scheduleRepository, - [FromServices] IRunRepository runRepository, - [FromServices] IRunSummaryService runSummaryService, - [FromServices] ISchedulerAuditService auditService, - [FromServices] TimeProvider timeProvider, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, WriteScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - if (string.IsNullOrWhiteSpace(request.ScheduleId)) - { - throw new ValidationException("scheduleId must be provided when creating a run."); - } - - var scheduleId = request.ScheduleId!.Trim(); - if (scheduleId.Length == 0) - { - throw new ValidationException("scheduleId must contain a value."); - } - - var schedule = await scheduleRepository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); - if (schedule is null) - { - return Results.NotFound(); - } - - if (request.Trigger != RunTrigger.Manual) - { - throw new ValidationException("Only manual runs can be created via this endpoint."); - } - - var now = timeProvider.GetUtcNow(); - var runId = SchedulerEndpointHelpers.GenerateIdentifier("run"); - var reason = request.Reason ?? RunReason.Empty; - - var run = new Run( - runId, - tenant.TenantId, - request.Trigger, - RunState.Planning, - RunStats.Empty, - now, - reason, - schedule.Id); - - await runRepository.InsertAsync(run, cancellationToken: cancellationToken).ConfigureAwait(false); - - if (!string.IsNullOrWhiteSpace(run.ScheduleId)) - { - await runSummaryService.ProjectAsync(run, cancellationToken).ConfigureAwait(false); - } - - await auditService.WriteAsync( - new SchedulerAuditEvent( - tenant.TenantId, - "scheduler.run", - "create", - SchedulerEndpointHelpers.ResolveAuditActor(httpContext), - RunId: run.Id, - ScheduleId: schedule.Id, - Metadata: BuildMetadata( - ("state", run.State.ToString().ToLowerInvariant()), - ("trigger", run.Trigger.ToString().ToLowerInvariant()), - ("correlationId", request.CorrelationId?.Trim()))), - cancellationToken).ConfigureAwait(false); - - return Results.Created($"/api/v1/scheduler/runs/{run.Id}", new RunResponse(run)); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> CancelRunAsync( - HttpContext httpContext, - string runId, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IRunRepository repository, - [FromServices] IRunSummaryService runSummaryService, - [FromServices] ISchedulerAuditService auditService, - [FromServices] TimeProvider timeProvider, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, WriteScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - var run = await repository.GetAsync(tenant.TenantId, runId, cancellationToken: cancellationToken).ConfigureAwait(false); - if (run is null) - { - return Results.NotFound(); - } - - if (RunStateMachine.IsTerminal(run.State)) - { - return Results.Conflict(new { error = "Run is already in a terminal state." }); - } - - var now = timeProvider.GetUtcNow(); - var cancelled = RunStateMachine.EnsureTransition(run, RunState.Cancelled, now); - var updated = await repository.UpdateAsync(cancelled, cancellationToken: cancellationToken).ConfigureAwait(false); - if (!updated) - { - return Results.Conflict(new { error = "Run could not be updated because it changed concurrently." }); - } - - if (!string.IsNullOrWhiteSpace(cancelled.ScheduleId)) - { - await runSummaryService.ProjectAsync(cancelled, cancellationToken).ConfigureAwait(false); - } - - await auditService.WriteAsync( - new SchedulerAuditEvent( - tenant.TenantId, - "scheduler.run", - "cancel", - SchedulerEndpointHelpers.ResolveAuditActor(httpContext), - RunId: cancelled.Id, - ScheduleId: cancelled.ScheduleId, - Metadata: BuildMetadata(("state", cancelled.State.ToString().ToLowerInvariant()))), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(new RunResponse(cancelled)); - } - catch (InvalidOperationException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> PreviewImpactAsync( - HttpContext httpContext, - ImpactPreviewRequest request, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IScheduleRepository scheduleRepository, - [FromServices] IImpactIndex impactIndex, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, PreviewScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - var selector = await ResolveSelectorAsync(request, tenant.TenantId, scheduleRepository, cancellationToken).ConfigureAwait(false); - - var normalizedProductKeys = NormalizeStringInputs(request.ProductKeys); - var normalizedVulnerabilityIds = NormalizeStringInputs(request.VulnerabilityIds); - - ImpactSet impactSet; - if (!normalizedProductKeys.IsDefaultOrEmpty) - { - impactSet = await impactIndex.ResolveByPurlsAsync(normalizedProductKeys, request.UsageOnly, selector, cancellationToken).ConfigureAwait(false); - } - else if (!normalizedVulnerabilityIds.IsDefaultOrEmpty) - { - impactSet = await impactIndex.ResolveByVulnerabilitiesAsync(normalizedVulnerabilityIds, request.UsageOnly, selector, cancellationToken).ConfigureAwait(false); - } - else - { - impactSet = await impactIndex.ResolveAllAsync(selector, request.UsageOnly, cancellationToken).ConfigureAwait(false); - } - - var sampleSize = Math.Clamp(request.SampleSize, 1, 50); - var sampleBuilder = ImmutableArray.CreateBuilder<ImpactPreviewSample>(); - foreach (var image in impactSet.Images.Take(sampleSize)) - { - sampleBuilder.Add(new ImpactPreviewSample( - image.ImageDigest, - image.Registry, - image.Repository, - image.Namespaces.IsDefault ? ImmutableArray<string>.Empty : image.Namespaces, - image.Tags.IsDefault ? ImmutableArray<string>.Empty : image.Tags, - image.UsedByEntrypoint)); - } - - var response = new ImpactPreviewResponse( - impactSet.Total, - impactSet.UsageOnly, - impactSet.GeneratedAt, - impactSet.SnapshotId, - sampleBuilder.ToImmutable()); - - return Results.Ok(response); - } - catch (KeyNotFoundException) - { - return Results.NotFound(); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static ImmutableArray<RunState> ParseRunStates(StringValues values) - { - if (values.Count == 0) - { - return ImmutableArray<RunState>.Empty; - } - - var builder = ImmutableArray.CreateBuilder<RunState>(); - foreach (var value in values) - { - if (string.IsNullOrWhiteSpace(value)) - { - continue; - } - - if (!Enum.TryParse<RunState>(value, ignoreCase: true, out var parsed)) - { - throw new ValidationException($"State '{value}' is not a valid run state."); - } - - builder.Add(parsed); - } - - return builder.ToImmutable(); - } - - private static async Task<Selector> ResolveSelectorAsync( - ImpactPreviewRequest request, - string tenantId, - IScheduleRepository scheduleRepository, - CancellationToken cancellationToken) - { - Selector? selector = null; - - if (!string.IsNullOrWhiteSpace(request.ScheduleId)) - { - var schedule = await scheduleRepository.GetAsync(tenantId, request.ScheduleId!, cancellationToken: cancellationToken).ConfigureAwait(false); - if (schedule is null) - { - throw new KeyNotFoundException($"Schedule '{request.ScheduleId}' was not found for tenant '{tenantId}'."); - } - - selector = schedule.Selection; - } - - if (request.Selector is not null) - { - if (selector is not null && request.ScheduleId is not null) - { - throw new ValidationException("selector cannot be combined with scheduleId in the same request."); - } - - selector = request.Selector; - } - - if (selector is null) - { - throw new ValidationException("Either scheduleId or selector must be provided."); - } - - return SchedulerEndpointHelpers.NormalizeSelector(selector, tenantId); - } - - private static ImmutableArray<string> NormalizeStringInputs(ImmutableArray<string>? values) - { - if (values is null || values.Value.IsDefaultOrEmpty) - { - return ImmutableArray<string>.Empty; - } - - var builder = ImmutableArray.CreateBuilder<string>(); - var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - foreach (var value in values.Value) - { - if (string.IsNullOrWhiteSpace(value)) - { - continue; - } - - var trimmed = value.Trim(); - if (seen.Add(trimmed)) - { - builder.Add(trimmed); - } - } - - return builder.ToImmutable(); - } - - private static IReadOnlyDictionary<string, string> BuildMetadata(params (string Key, string? Value)[] pairs) - { - var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); - foreach (var (key, value) in pairs) - { - if (string.IsNullOrWhiteSpace(key) || string.IsNullOrWhiteSpace(value)) - { - continue; - } - - metadata[key] = value!; - } - - return metadata; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.ComponentModel.DataAnnotations; +using System.Linq; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Routing; +using Microsoft.Extensions.Primitives; +using StellaOps.Scheduler.ImpactIndex; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Storage.Mongo.Services; +using StellaOps.Scheduler.WebService.Auth; + +namespace StellaOps.Scheduler.WebService.Runs; + +internal static class RunEndpoints +{ + private const string ReadScope = "scheduler.runs.read"; + private const string WriteScope = "scheduler.runs.write"; + private const string PreviewScope = "scheduler.runs.preview"; + + public static IEndpointRouteBuilder MapRunEndpoints(this IEndpointRouteBuilder routes) + { + var group = routes.MapGroup("/api/v1/scheduler/runs"); + + group.MapGet("/", ListRunsAsync); + group.MapGet("/{runId}", GetRunAsync); + group.MapPost("/", CreateRunAsync); + group.MapPost("/{runId}/cancel", CancelRunAsync); + group.MapPost("/preview", PreviewImpactAsync); + + return routes; + } + + private static async Task<IResult> ListRunsAsync( + HttpContext httpContext, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IRunRepository repository, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, ReadScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + var scheduleId = httpContext.Request.Query.TryGetValue("scheduleId", out var scheduleValues) + ? scheduleValues.ToString().Trim() + : null; + + var states = ParseRunStates(httpContext.Request.Query.TryGetValue("state", out var stateValues) ? stateValues : StringValues.Empty); + var createdAfter = SchedulerEndpointHelpers.TryParseDateTimeOffset(httpContext.Request.Query.TryGetValue("createdAfter", out var createdAfterValues) ? createdAfterValues.ToString() : null); + var limit = SchedulerEndpointHelpers.TryParsePositiveInt(httpContext.Request.Query.TryGetValue("limit", out var limitValues) ? limitValues.ToString() : null); + + var sortAscending = httpContext.Request.Query.TryGetValue("sort", out var sortValues) && + sortValues.Any(value => string.Equals(value, "asc", StringComparison.OrdinalIgnoreCase)); + + var options = new RunQueryOptions + { + ScheduleId = string.IsNullOrWhiteSpace(scheduleId) ? null : scheduleId, + States = states, + CreatedAfter = createdAfter, + Limit = limit, + SortAscending = sortAscending, + }; + + var runs = await repository.ListAsync(tenant.TenantId, options, cancellationToken: cancellationToken).ConfigureAwait(false); + return Results.Ok(new RunCollectionResponse(runs)); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> GetRunAsync( + HttpContext httpContext, + string runId, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IRunRepository repository, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, ReadScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + var run = await repository.GetAsync(tenant.TenantId, runId, cancellationToken: cancellationToken).ConfigureAwait(false); + if (run is null) + { + return Results.NotFound(); + } + + return Results.Ok(new RunResponse(run)); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> CreateRunAsync( + HttpContext httpContext, + RunCreateRequest request, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IScheduleRepository scheduleRepository, + [FromServices] IRunRepository runRepository, + [FromServices] IRunSummaryService runSummaryService, + [FromServices] ISchedulerAuditService auditService, + [FromServices] TimeProvider timeProvider, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, WriteScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + if (string.IsNullOrWhiteSpace(request.ScheduleId)) + { + throw new ValidationException("scheduleId must be provided when creating a run."); + } + + var scheduleId = request.ScheduleId!.Trim(); + if (scheduleId.Length == 0) + { + throw new ValidationException("scheduleId must contain a value."); + } + + var schedule = await scheduleRepository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); + if (schedule is null) + { + return Results.NotFound(); + } + + if (request.Trigger != RunTrigger.Manual) + { + throw new ValidationException("Only manual runs can be created via this endpoint."); + } + + var now = timeProvider.GetUtcNow(); + var runId = SchedulerEndpointHelpers.GenerateIdentifier("run"); + var reason = request.Reason ?? RunReason.Empty; + + var run = new Run( + runId, + tenant.TenantId, + request.Trigger, + RunState.Planning, + RunStats.Empty, + now, + reason, + schedule.Id); + + await runRepository.InsertAsync(run, cancellationToken: cancellationToken).ConfigureAwait(false); + + if (!string.IsNullOrWhiteSpace(run.ScheduleId)) + { + await runSummaryService.ProjectAsync(run, cancellationToken).ConfigureAwait(false); + } + + await auditService.WriteAsync( + new SchedulerAuditEvent( + tenant.TenantId, + "scheduler.run", + "create", + SchedulerEndpointHelpers.ResolveAuditActor(httpContext), + RunId: run.Id, + ScheduleId: schedule.Id, + Metadata: BuildMetadata( + ("state", run.State.ToString().ToLowerInvariant()), + ("trigger", run.Trigger.ToString().ToLowerInvariant()), + ("correlationId", request.CorrelationId?.Trim()))), + cancellationToken).ConfigureAwait(false); + + return Results.Created($"/api/v1/scheduler/runs/{run.Id}", new RunResponse(run)); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> CancelRunAsync( + HttpContext httpContext, + string runId, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IRunRepository repository, + [FromServices] IRunSummaryService runSummaryService, + [FromServices] ISchedulerAuditService auditService, + [FromServices] TimeProvider timeProvider, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, WriteScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + var run = await repository.GetAsync(tenant.TenantId, runId, cancellationToken: cancellationToken).ConfigureAwait(false); + if (run is null) + { + return Results.NotFound(); + } + + if (RunStateMachine.IsTerminal(run.State)) + { + return Results.Conflict(new { error = "Run is already in a terminal state." }); + } + + var now = timeProvider.GetUtcNow(); + var cancelled = RunStateMachine.EnsureTransition(run, RunState.Cancelled, now); + var updated = await repository.UpdateAsync(cancelled, cancellationToken: cancellationToken).ConfigureAwait(false); + if (!updated) + { + return Results.Conflict(new { error = "Run could not be updated because it changed concurrently." }); + } + + if (!string.IsNullOrWhiteSpace(cancelled.ScheduleId)) + { + await runSummaryService.ProjectAsync(cancelled, cancellationToken).ConfigureAwait(false); + } + + await auditService.WriteAsync( + new SchedulerAuditEvent( + tenant.TenantId, + "scheduler.run", + "cancel", + SchedulerEndpointHelpers.ResolveAuditActor(httpContext), + RunId: cancelled.Id, + ScheduleId: cancelled.ScheduleId, + Metadata: BuildMetadata(("state", cancelled.State.ToString().ToLowerInvariant()))), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(new RunResponse(cancelled)); + } + catch (InvalidOperationException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> PreviewImpactAsync( + HttpContext httpContext, + ImpactPreviewRequest request, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IScheduleRepository scheduleRepository, + [FromServices] IImpactIndex impactIndex, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, PreviewScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + var selector = await ResolveSelectorAsync(request, tenant.TenantId, scheduleRepository, cancellationToken).ConfigureAwait(false); + + var normalizedProductKeys = NormalizeStringInputs(request.ProductKeys); + var normalizedVulnerabilityIds = NormalizeStringInputs(request.VulnerabilityIds); + + ImpactSet impactSet; + if (!normalizedProductKeys.IsDefaultOrEmpty) + { + impactSet = await impactIndex.ResolveByPurlsAsync(normalizedProductKeys, request.UsageOnly, selector, cancellationToken).ConfigureAwait(false); + } + else if (!normalizedVulnerabilityIds.IsDefaultOrEmpty) + { + impactSet = await impactIndex.ResolveByVulnerabilitiesAsync(normalizedVulnerabilityIds, request.UsageOnly, selector, cancellationToken).ConfigureAwait(false); + } + else + { + impactSet = await impactIndex.ResolveAllAsync(selector, request.UsageOnly, cancellationToken).ConfigureAwait(false); + } + + var sampleSize = Math.Clamp(request.SampleSize, 1, 50); + var sampleBuilder = ImmutableArray.CreateBuilder<ImpactPreviewSample>(); + foreach (var image in impactSet.Images.Take(sampleSize)) + { + sampleBuilder.Add(new ImpactPreviewSample( + image.ImageDigest, + image.Registry, + image.Repository, + image.Namespaces.IsDefault ? ImmutableArray<string>.Empty : image.Namespaces, + image.Tags.IsDefault ? ImmutableArray<string>.Empty : image.Tags, + image.UsedByEntrypoint)); + } + + var response = new ImpactPreviewResponse( + impactSet.Total, + impactSet.UsageOnly, + impactSet.GeneratedAt, + impactSet.SnapshotId, + sampleBuilder.ToImmutable()); + + return Results.Ok(response); + } + catch (KeyNotFoundException) + { + return Results.NotFound(); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static ImmutableArray<RunState> ParseRunStates(StringValues values) + { + if (values.Count == 0) + { + return ImmutableArray<RunState>.Empty; + } + + var builder = ImmutableArray.CreateBuilder<RunState>(); + foreach (var value in values) + { + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + if (!Enum.TryParse<RunState>(value, ignoreCase: true, out var parsed)) + { + throw new ValidationException($"State '{value}' is not a valid run state."); + } + + builder.Add(parsed); + } + + return builder.ToImmutable(); + } + + private static async Task<Selector> ResolveSelectorAsync( + ImpactPreviewRequest request, + string tenantId, + IScheduleRepository scheduleRepository, + CancellationToken cancellationToken) + { + Selector? selector = null; + + if (!string.IsNullOrWhiteSpace(request.ScheduleId)) + { + var schedule = await scheduleRepository.GetAsync(tenantId, request.ScheduleId!, cancellationToken: cancellationToken).ConfigureAwait(false); + if (schedule is null) + { + throw new KeyNotFoundException($"Schedule '{request.ScheduleId}' was not found for tenant '{tenantId}'."); + } + + selector = schedule.Selection; + } + + if (request.Selector is not null) + { + if (selector is not null && request.ScheduleId is not null) + { + throw new ValidationException("selector cannot be combined with scheduleId in the same request."); + } + + selector = request.Selector; + } + + if (selector is null) + { + throw new ValidationException("Either scheduleId or selector must be provided."); + } + + return SchedulerEndpointHelpers.NormalizeSelector(selector, tenantId); + } + + private static ImmutableArray<string> NormalizeStringInputs(ImmutableArray<string>? values) + { + if (values is null || values.Value.IsDefaultOrEmpty) + { + return ImmutableArray<string>.Empty; + } + + var builder = ImmutableArray.CreateBuilder<string>(); + var seen = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + foreach (var value in values.Value) + { + if (string.IsNullOrWhiteSpace(value)) + { + continue; + } + + var trimmed = value.Trim(); + if (seen.Add(trimmed)) + { + builder.Add(trimmed); + } + } + + return builder.ToImmutable(); + } + + private static IReadOnlyDictionary<string, string> BuildMetadata(params (string Key, string? Value)[] pairs) + { + var metadata = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase); + foreach (var (key, value) in pairs) + { + if (string.IsNullOrWhiteSpace(key) || string.IsNullOrWhiteSpace(value)) + { + continue; + } + + metadata[key] = value!; + } + + return metadata; + } +} diff --git a/src/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs b/src/Scheduler/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs similarity index 96% rename from src/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs index cccbd571..136031c8 100644 --- a/src/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/SchedulerEndpointHelpers.cs @@ -1,127 +1,127 @@ -using System.ComponentModel.DataAnnotations; -using System.Globalization; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Services; - -namespace StellaOps.Scheduler.WebService; - -internal static class SchedulerEndpointHelpers -{ - private const string ActorHeader = "X-Actor-Id"; - private const string ActorNameHeader = "X-Actor-Name"; - private const string ActorKindHeader = "X-Actor-Kind"; - private const string TenantHeader = "X-Tenant-Id"; - - public static string GenerateIdentifier(string prefix) - { - if (string.IsNullOrWhiteSpace(prefix)) - { - throw new ArgumentException("Prefix must be provided.", nameof(prefix)); - } - - return $"{prefix.Trim()}_{Guid.NewGuid():N}"; - } - - public static string ResolveActorId(HttpContext context) - { - ArgumentNullException.ThrowIfNull(context); - - if (context.Request.Headers.TryGetValue(ActorHeader, out var values)) - { - var actor = values.ToString().Trim(); - if (!string.IsNullOrEmpty(actor)) - { - return actor; - } - } - - if (context.Request.Headers.TryGetValue(TenantHeader, out var tenant)) - { - var tenantId = tenant.ToString().Trim(); - if (!string.IsNullOrEmpty(tenantId)) - { - return tenantId; - } - } - - return "system"; - } - - public static AuditActor ResolveAuditActor(HttpContext context) - { - ArgumentNullException.ThrowIfNull(context); - - var actorId = context.Request.Headers.TryGetValue(ActorHeader, out var idHeader) - ? idHeader.ToString().Trim() - : null; - - var displayName = context.Request.Headers.TryGetValue(ActorNameHeader, out var nameHeader) - ? nameHeader.ToString().Trim() - : null; - - var kind = context.Request.Headers.TryGetValue(ActorKindHeader, out var kindHeader) - ? kindHeader.ToString().Trim() - : null; - - if (string.IsNullOrWhiteSpace(actorId)) - { - actorId = context.Request.Headers.TryGetValue(TenantHeader, out var tenantHeader) - ? tenantHeader.ToString().Trim() - : "system"; - } - - displayName = string.IsNullOrWhiteSpace(displayName) ? actorId : displayName; - kind = string.IsNullOrWhiteSpace(kind) ? "user" : kind; - - return new AuditActor(actorId!, displayName!, kind!); - } - - public static bool TryParseBoolean(string? value) - => !string.IsNullOrWhiteSpace(value) && - (string.Equals(value, "true", StringComparison.OrdinalIgnoreCase) || - string.Equals(value, "1", StringComparison.OrdinalIgnoreCase)); - - public static int? TryParsePositiveInt(string? value) - { - if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed) && parsed > 0) - { - return parsed; - } - - return null; - } - - public static DateTimeOffset? TryParseDateTimeOffset(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed)) - { - return parsed.ToUniversalTime(); - } - - throw new ValidationException($"Value '{value}' is not a valid ISO-8601 timestamp."); - } - - public static Selector NormalizeSelector(Selector selection, string tenantId) - { - ArgumentNullException.ThrowIfNull(selection); - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant identifier must be provided.", nameof(tenantId)); - } - - return new Selector( - selection.Scope, - tenantId, - selection.Namespaces, - selection.Repositories, - selection.Digests, - selection.IncludeTags, - selection.Labels, - selection.ResolvesTags); - } -} +using System.ComponentModel.DataAnnotations; +using System.Globalization; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Services; + +namespace StellaOps.Scheduler.WebService; + +internal static class SchedulerEndpointHelpers +{ + private const string ActorHeader = "X-Actor-Id"; + private const string ActorNameHeader = "X-Actor-Name"; + private const string ActorKindHeader = "X-Actor-Kind"; + private const string TenantHeader = "X-Tenant-Id"; + + public static string GenerateIdentifier(string prefix) + { + if (string.IsNullOrWhiteSpace(prefix)) + { + throw new ArgumentException("Prefix must be provided.", nameof(prefix)); + } + + return $"{prefix.Trim()}_{Guid.NewGuid():N}"; + } + + public static string ResolveActorId(HttpContext context) + { + ArgumentNullException.ThrowIfNull(context); + + if (context.Request.Headers.TryGetValue(ActorHeader, out var values)) + { + var actor = values.ToString().Trim(); + if (!string.IsNullOrEmpty(actor)) + { + return actor; + } + } + + if (context.Request.Headers.TryGetValue(TenantHeader, out var tenant)) + { + var tenantId = tenant.ToString().Trim(); + if (!string.IsNullOrEmpty(tenantId)) + { + return tenantId; + } + } + + return "system"; + } + + public static AuditActor ResolveAuditActor(HttpContext context) + { + ArgumentNullException.ThrowIfNull(context); + + var actorId = context.Request.Headers.TryGetValue(ActorHeader, out var idHeader) + ? idHeader.ToString().Trim() + : null; + + var displayName = context.Request.Headers.TryGetValue(ActorNameHeader, out var nameHeader) + ? nameHeader.ToString().Trim() + : null; + + var kind = context.Request.Headers.TryGetValue(ActorKindHeader, out var kindHeader) + ? kindHeader.ToString().Trim() + : null; + + if (string.IsNullOrWhiteSpace(actorId)) + { + actorId = context.Request.Headers.TryGetValue(TenantHeader, out var tenantHeader) + ? tenantHeader.ToString().Trim() + : "system"; + } + + displayName = string.IsNullOrWhiteSpace(displayName) ? actorId : displayName; + kind = string.IsNullOrWhiteSpace(kind) ? "user" : kind; + + return new AuditActor(actorId!, displayName!, kind!); + } + + public static bool TryParseBoolean(string? value) + => !string.IsNullOrWhiteSpace(value) && + (string.Equals(value, "true", StringComparison.OrdinalIgnoreCase) || + string.Equals(value, "1", StringComparison.OrdinalIgnoreCase)); + + public static int? TryParsePositiveInt(string? value) + { + if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsed) && parsed > 0) + { + return parsed; + } + + return null; + } + + public static DateTimeOffset? TryParseDateTimeOffset(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal, out var parsed)) + { + return parsed.ToUniversalTime(); + } + + throw new ValidationException($"Value '{value}' is not a valid ISO-8601 timestamp."); + } + + public static Selector NormalizeSelector(Selector selection, string tenantId) + { + ArgumentNullException.ThrowIfNull(selection); + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant identifier must be provided.", nameof(tenantId)); + } + + return new Selector( + selection.Scope, + tenantId, + selection.Namespaces, + selection.Repositories, + selection.Digests, + selection.IncludeTags, + selection.Labels, + selection.ResolvesTags); + } +} diff --git a/src/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs index e0dadcc0..b29038a3 100644 --- a/src/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/InMemorySchedulerServices.cs @@ -1,153 +1,153 @@ -using System.Collections.Concurrent; -using System.Collections.Immutable; -using MongoDB.Driver; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Projections; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Storage.Mongo.Services; - -namespace StellaOps.Scheduler.WebService.Schedules; - -internal sealed class InMemoryScheduleRepository : IScheduleRepository -{ - private readonly ConcurrentDictionary<string, Schedule> _schedules = new(StringComparer.Ordinal); - - public Task UpsertAsync( - Schedule schedule, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - _schedules[schedule.Id] = schedule; - return Task.CompletedTask; - } - - public Task<Schedule?> GetAsync( - string tenantId, - string scheduleId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (_schedules.TryGetValue(scheduleId, out var schedule) && - string.Equals(schedule.TenantId, tenantId, StringComparison.Ordinal)) - { - return Task.FromResult<Schedule?>(schedule); - } - - return Task.FromResult<Schedule?>(null); - } - - public Task<IReadOnlyList<Schedule>> ListAsync( - string tenantId, - ScheduleQueryOptions? options = null, - MongoDB.Driver.IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - options ??= new ScheduleQueryOptions(); - - var query = _schedules.Values - .Where(schedule => string.Equals(schedule.TenantId, tenantId, StringComparison.Ordinal)); - - if (!options.IncludeDisabled) - { - query = query.Where(schedule => schedule.Enabled); - } - - var result = query - .OrderBy(schedule => schedule.Name, StringComparer.Ordinal) - .Take(options.Limit ?? int.MaxValue) - .ToArray(); - - return Task.FromResult<IReadOnlyList<Schedule>>(result); - } - - public Task<bool> SoftDeleteAsync( - string tenantId, - string scheduleId, - string deletedBy, - DateTimeOffset deletedAt, - MongoDB.Driver.IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (_schedules.TryGetValue(scheduleId, out var schedule) && - string.Equals(schedule.TenantId, tenantId, StringComparison.Ordinal)) - { - _schedules.TryRemove(scheduleId, out _); - return Task.FromResult(true); - } - - return Task.FromResult(false); - } -} - -internal sealed class InMemoryRunSummaryService : IRunSummaryService -{ - private readonly ConcurrentDictionary<(string TenantId, string ScheduleId), RunSummaryProjection> _summaries = new(); - - public Task<RunSummaryProjection> ProjectAsync(Run run, CancellationToken cancellationToken = default) - { - var scheduleId = run.ScheduleId ?? string.Empty; - var updatedAt = run.FinishedAt ?? run.StartedAt ?? run.CreatedAt; - - var counters = new RunSummaryCounters( - Total: 0, - Planning: 0, - Queued: 0, - Running: 0, - Completed: 0, - Error: 0, - Cancelled: 0, - TotalDeltas: 0, - TotalNewCriticals: 0, - TotalNewHigh: 0, - TotalNewMedium: 0, - TotalNewLow: 0); - - var projection = new RunSummaryProjection( - run.TenantId, - scheduleId, - updatedAt, - null, - ImmutableArray<RunSummarySnapshot>.Empty, - counters); - - _summaries[(run.TenantId, scheduleId)] = projection; - return Task.FromResult(projection); - } - - public Task<RunSummaryProjection?> GetAsync(string tenantId, string scheduleId, CancellationToken cancellationToken = default) - { - _summaries.TryGetValue((tenantId, scheduleId), out var projection); - return Task.FromResult<RunSummaryProjection?>(projection); - } - - public Task<IReadOnlyList<RunSummaryProjection>> ListAsync(string tenantId, CancellationToken cancellationToken = default) - { - var projections = _summaries.Values - .Where(summary => string.Equals(summary.TenantId, tenantId, StringComparison.Ordinal)) - .ToArray(); - return Task.FromResult<IReadOnlyList<RunSummaryProjection>>(projections); - } -} - -internal sealed class InMemorySchedulerAuditService : ISchedulerAuditService -{ - public Task<AuditRecord> WriteAsync(SchedulerAuditEvent auditEvent, CancellationToken cancellationToken = default) - { - var occurredAt = auditEvent.OccurredAt ?? DateTimeOffset.UtcNow; - var record = new AuditRecord( - auditEvent.AuditId ?? $"audit_{Guid.NewGuid():N}", - auditEvent.TenantId, - auditEvent.Category, - auditEvent.Action, - occurredAt, - auditEvent.Actor, - auditEvent.EntityId, - auditEvent.ScheduleId, - auditEvent.RunId, - auditEvent.CorrelationId, - auditEvent.Metadata?.ToImmutableSortedDictionary(StringComparer.OrdinalIgnoreCase) ?? ImmutableSortedDictionary<string, string>.Empty, - auditEvent.Message); - - return Task.FromResult(record); - } -} +using System.Collections.Concurrent; +using System.Collections.Immutable; +using MongoDB.Driver; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Projections; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Storage.Mongo.Services; + +namespace StellaOps.Scheduler.WebService.Schedules; + +internal sealed class InMemoryScheduleRepository : IScheduleRepository +{ + private readonly ConcurrentDictionary<string, Schedule> _schedules = new(StringComparer.Ordinal); + + public Task UpsertAsync( + Schedule schedule, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + _schedules[schedule.Id] = schedule; + return Task.CompletedTask; + } + + public Task<Schedule?> GetAsync( + string tenantId, + string scheduleId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (_schedules.TryGetValue(scheduleId, out var schedule) && + string.Equals(schedule.TenantId, tenantId, StringComparison.Ordinal)) + { + return Task.FromResult<Schedule?>(schedule); + } + + return Task.FromResult<Schedule?>(null); + } + + public Task<IReadOnlyList<Schedule>> ListAsync( + string tenantId, + ScheduleQueryOptions? options = null, + MongoDB.Driver.IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + options ??= new ScheduleQueryOptions(); + + var query = _schedules.Values + .Where(schedule => string.Equals(schedule.TenantId, tenantId, StringComparison.Ordinal)); + + if (!options.IncludeDisabled) + { + query = query.Where(schedule => schedule.Enabled); + } + + var result = query + .OrderBy(schedule => schedule.Name, StringComparer.Ordinal) + .Take(options.Limit ?? int.MaxValue) + .ToArray(); + + return Task.FromResult<IReadOnlyList<Schedule>>(result); + } + + public Task<bool> SoftDeleteAsync( + string tenantId, + string scheduleId, + string deletedBy, + DateTimeOffset deletedAt, + MongoDB.Driver.IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (_schedules.TryGetValue(scheduleId, out var schedule) && + string.Equals(schedule.TenantId, tenantId, StringComparison.Ordinal)) + { + _schedules.TryRemove(scheduleId, out _); + return Task.FromResult(true); + } + + return Task.FromResult(false); + } +} + +internal sealed class InMemoryRunSummaryService : IRunSummaryService +{ + private readonly ConcurrentDictionary<(string TenantId, string ScheduleId), RunSummaryProjection> _summaries = new(); + + public Task<RunSummaryProjection> ProjectAsync(Run run, CancellationToken cancellationToken = default) + { + var scheduleId = run.ScheduleId ?? string.Empty; + var updatedAt = run.FinishedAt ?? run.StartedAt ?? run.CreatedAt; + + var counters = new RunSummaryCounters( + Total: 0, + Planning: 0, + Queued: 0, + Running: 0, + Completed: 0, + Error: 0, + Cancelled: 0, + TotalDeltas: 0, + TotalNewCriticals: 0, + TotalNewHigh: 0, + TotalNewMedium: 0, + TotalNewLow: 0); + + var projection = new RunSummaryProjection( + run.TenantId, + scheduleId, + updatedAt, + null, + ImmutableArray<RunSummarySnapshot>.Empty, + counters); + + _summaries[(run.TenantId, scheduleId)] = projection; + return Task.FromResult(projection); + } + + public Task<RunSummaryProjection?> GetAsync(string tenantId, string scheduleId, CancellationToken cancellationToken = default) + { + _summaries.TryGetValue((tenantId, scheduleId), out var projection); + return Task.FromResult<RunSummaryProjection?>(projection); + } + + public Task<IReadOnlyList<RunSummaryProjection>> ListAsync(string tenantId, CancellationToken cancellationToken = default) + { + var projections = _summaries.Values + .Where(summary => string.Equals(summary.TenantId, tenantId, StringComparison.Ordinal)) + .ToArray(); + return Task.FromResult<IReadOnlyList<RunSummaryProjection>>(projections); + } +} + +internal sealed class InMemorySchedulerAuditService : ISchedulerAuditService +{ + public Task<AuditRecord> WriteAsync(SchedulerAuditEvent auditEvent, CancellationToken cancellationToken = default) + { + var occurredAt = auditEvent.OccurredAt ?? DateTimeOffset.UtcNow; + var record = new AuditRecord( + auditEvent.AuditId ?? $"audit_{Guid.NewGuid():N}", + auditEvent.TenantId, + auditEvent.Category, + auditEvent.Action, + occurredAt, + auditEvent.Actor, + auditEvent.EntityId, + auditEvent.ScheduleId, + auditEvent.RunId, + auditEvent.CorrelationId, + auditEvent.Metadata?.ToImmutableSortedDictionary(StringComparer.OrdinalIgnoreCase) ?? ImmutableSortedDictionary<string, string>.Empty, + auditEvent.Message); + + return Task.FromResult(record); + } +} diff --git a/src/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs similarity index 98% rename from src/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs index efd12c5a..6af961ec 100644 --- a/src/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleContracts.cs @@ -1,34 +1,34 @@ -using System.Collections.Immutable; -using System.ComponentModel.DataAnnotations; -using System.Text.Json.Serialization; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Projections; - -namespace StellaOps.Scheduler.WebService.Schedules; - -internal sealed record ScheduleCreateRequest( - [property: JsonPropertyName("name"), Required] string Name, - [property: JsonPropertyName("cronExpression"), Required] string CronExpression, - [property: JsonPropertyName("timezone"), Required] string Timezone, - [property: JsonPropertyName("mode")] ScheduleMode Mode = ScheduleMode.AnalysisOnly, - [property: JsonPropertyName("selection"), Required] Selector Selection = null!, - [property: JsonPropertyName("onlyIf")] ScheduleOnlyIf? OnlyIf = null, - [property: JsonPropertyName("notify")] ScheduleNotify? Notify = null, - [property: JsonPropertyName("limits")] ScheduleLimits? Limits = null, - [property: JsonPropertyName("subscribers")] ImmutableArray<string>? Subscribers = null, - [property: JsonPropertyName("enabled")] bool Enabled = true); - -internal sealed record ScheduleUpdateRequest( - [property: JsonPropertyName("name")] string? Name, - [property: JsonPropertyName("cronExpression")] string? CronExpression, - [property: JsonPropertyName("timezone")] string? Timezone, - [property: JsonPropertyName("mode")] ScheduleMode? Mode, - [property: JsonPropertyName("selection")] Selector? Selection, - [property: JsonPropertyName("onlyIf")] ScheduleOnlyIf? OnlyIf, - [property: JsonPropertyName("notify")] ScheduleNotify? Notify, - [property: JsonPropertyName("limits")] ScheduleLimits? Limits, - [property: JsonPropertyName("subscribers")] ImmutableArray<string>? Subscribers); - -internal sealed record ScheduleCollectionResponse(IReadOnlyList<ScheduleResponse> Schedules); - -internal sealed record ScheduleResponse(Schedule Schedule, RunSummaryProjection? Summary); +using System.Collections.Immutable; +using System.ComponentModel.DataAnnotations; +using System.Text.Json.Serialization; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Projections; + +namespace StellaOps.Scheduler.WebService.Schedules; + +internal sealed record ScheduleCreateRequest( + [property: JsonPropertyName("name"), Required] string Name, + [property: JsonPropertyName("cronExpression"), Required] string CronExpression, + [property: JsonPropertyName("timezone"), Required] string Timezone, + [property: JsonPropertyName("mode")] ScheduleMode Mode = ScheduleMode.AnalysisOnly, + [property: JsonPropertyName("selection"), Required] Selector Selection = null!, + [property: JsonPropertyName("onlyIf")] ScheduleOnlyIf? OnlyIf = null, + [property: JsonPropertyName("notify")] ScheduleNotify? Notify = null, + [property: JsonPropertyName("limits")] ScheduleLimits? Limits = null, + [property: JsonPropertyName("subscribers")] ImmutableArray<string>? Subscribers = null, + [property: JsonPropertyName("enabled")] bool Enabled = true); + +internal sealed record ScheduleUpdateRequest( + [property: JsonPropertyName("name")] string? Name, + [property: JsonPropertyName("cronExpression")] string? CronExpression, + [property: JsonPropertyName("timezone")] string? Timezone, + [property: JsonPropertyName("mode")] ScheduleMode? Mode, + [property: JsonPropertyName("selection")] Selector? Selection, + [property: JsonPropertyName("onlyIf")] ScheduleOnlyIf? OnlyIf, + [property: JsonPropertyName("notify")] ScheduleNotify? Notify, + [property: JsonPropertyName("limits")] ScheduleLimits? Limits, + [property: JsonPropertyName("subscribers")] ImmutableArray<string>? Subscribers); + +internal sealed record ScheduleCollectionResponse(IReadOnlyList<ScheduleResponse> Schedules); + +internal sealed record ScheduleResponse(Schedule Schedule, RunSummaryProjection? Summary); diff --git a/src/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs rename to src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs index 2cee563d..cb443492 100644 --- a/src/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs +++ b/src/Scheduler/StellaOps.Scheduler.WebService/Schedules/ScheduleEndpoints.cs @@ -1,397 +1,397 @@ -using System.Collections.Immutable; -using System.ComponentModel.DataAnnotations; -using System.Linq; -using Microsoft.AspNetCore.Http; -using Microsoft.AspNetCore.Mvc; -using Microsoft.AspNetCore.Routing; -using Microsoft.Extensions.Logging; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Storage.Mongo.Services; -using StellaOps.Scheduler.WebService.Auth; - -namespace StellaOps.Scheduler.WebService.Schedules; - -internal static class ScheduleEndpoints -{ - private const string ReadScope = "scheduler.schedules.read"; - private const string WriteScope = "scheduler.schedules.write"; - - public static IEndpointRouteBuilder MapScheduleEndpoints(this IEndpointRouteBuilder routes) - { - var group = routes.MapGroup("/api/v1/scheduler/schedules"); - - group.MapGet("/", ListSchedulesAsync); - group.MapGet("/{scheduleId}", GetScheduleAsync); - group.MapPost("/", CreateScheduleAsync); - group.MapPatch("/{scheduleId}", UpdateScheduleAsync); - group.MapPost("/{scheduleId}/pause", PauseScheduleAsync); - group.MapPost("/{scheduleId}/resume", ResumeScheduleAsync); - - return routes; - } - - private static async Task<IResult> ListSchedulesAsync( - HttpContext httpContext, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IScheduleRepository repository, - [FromServices] IRunSummaryService runSummaryService, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, ReadScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - var includeDisabled = SchedulerEndpointHelpers.TryParseBoolean(httpContext.Request.Query.TryGetValue("includeDisabled", out var includeDisabledValues) ? includeDisabledValues.ToString() : null); - var includeDeleted = SchedulerEndpointHelpers.TryParseBoolean(httpContext.Request.Query.TryGetValue("includeDeleted", out var includeDeletedValues) ? includeDeletedValues.ToString() : null); - var limit = SchedulerEndpointHelpers.TryParsePositiveInt(httpContext.Request.Query.TryGetValue("limit", out var limitValues) ? limitValues.ToString() : null); - - var options = new ScheduleQueryOptions - { - IncludeDisabled = includeDisabled, - IncludeDeleted = includeDeleted, - Limit = limit - }; - - var schedules = await repository.ListAsync(tenant.TenantId, options, cancellationToken: cancellationToken).ConfigureAwait(false); - var summaries = await runSummaryService.ListAsync(tenant.TenantId, cancellationToken).ConfigureAwait(false); - var summaryLookup = summaries.ToDictionary(summary => summary.ScheduleId, summary => summary, StringComparer.Ordinal); - - var response = new ScheduleCollectionResponse( - schedules.Select(schedule => new ScheduleResponse(schedule, summaryLookup.GetValueOrDefault(schedule.Id))).ToArray()); - - return Results.Ok(response); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> GetScheduleAsync( - HttpContext httpContext, - string scheduleId, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IScheduleRepository repository, - [FromServices] IRunSummaryService runSummaryService, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, ReadScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - var schedule = await repository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); - if (schedule is null) - { - return Results.NotFound(); - } - - var summary = await runSummaryService.GetAsync(tenant.TenantId, scheduleId, cancellationToken).ConfigureAwait(false); - return Results.Ok(new ScheduleResponse(schedule, summary)); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> CreateScheduleAsync( - HttpContext httpContext, - ScheduleCreateRequest request, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IScheduleRepository repository, - [FromServices] ISchedulerAuditService auditService, - [FromServices] TimeProvider timeProvider, - [FromServices] ILoggerFactory loggerFactory, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, WriteScope); - ValidateRequest(request); - - var tenant = tenantAccessor.GetTenant(httpContext); - var now = timeProvider.GetUtcNow(); - - var selection = SchedulerEndpointHelpers.NormalizeSelector(request.Selection, tenant.TenantId); - var scheduleId = SchedulerEndpointHelpers.GenerateIdentifier("sch"); - - var subscribers = request.Subscribers ?? ImmutableArray<string>.Empty; - var schedule = new Schedule( - scheduleId, - tenant.TenantId, - request.Name.Trim(), - request.Enabled, - request.CronExpression.Trim(), - request.Timezone.Trim(), - request.Mode, - selection, - request.OnlyIf ?? ScheduleOnlyIf.Default, - request.Notify ?? ScheduleNotify.Default, - request.Limits ?? ScheduleLimits.Default, - subscribers.IsDefault ? ImmutableArray<string>.Empty : subscribers, - now, - SchedulerEndpointHelpers.ResolveActorId(httpContext), - now, - SchedulerEndpointHelpers.ResolveActorId(httpContext), - SchedulerSchemaVersions.Schedule); - - await repository.UpsertAsync(schedule, cancellationToken: cancellationToken).ConfigureAwait(false); - await auditService.WriteAsync( - new SchedulerAuditEvent( - tenant.TenantId, - "scheduler", - "create", - SchedulerEndpointHelpers.ResolveAuditActor(httpContext), - ScheduleId: schedule.Id, - Metadata: new Dictionary<string, string> - { - ["cronExpression"] = schedule.CronExpression, - ["timezone"] = schedule.Timezone - }), - cancellationToken).ConfigureAwait(false); - - var response = new ScheduleResponse(schedule, null); - return Results.Created($"/api/v1/scheduler/schedules/{schedule.Id}", response); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> UpdateScheduleAsync( - HttpContext httpContext, - string scheduleId, - ScheduleUpdateRequest request, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IScheduleRepository repository, - [FromServices] ISchedulerAuditService auditService, - [FromServices] TimeProvider timeProvider, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, WriteScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - var existing = await repository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); - if (existing is null) - { - return Results.NotFound(); - } - - var updated = UpdateSchedule(existing, request, tenant.TenantId, timeProvider.GetUtcNow(), SchedulerEndpointHelpers.ResolveActorId(httpContext)); - await repository.UpsertAsync(updated, cancellationToken: cancellationToken).ConfigureAwait(false); - - await auditService.WriteAsync( - new SchedulerAuditEvent( - tenant.TenantId, - "scheduler", - "update", - SchedulerEndpointHelpers.ResolveAuditActor(httpContext), - ScheduleId: updated.Id, - Metadata: new Dictionary<string, string> - { - ["updatedAt"] = updated.UpdatedAt.ToString("O") - }), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(new ScheduleResponse(updated, null)); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> PauseScheduleAsync( - HttpContext httpContext, - string scheduleId, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IScheduleRepository repository, - [FromServices] ISchedulerAuditService auditService, - [FromServices] TimeProvider timeProvider, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, WriteScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - var existing = await repository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); - if (existing is null) - { - return Results.NotFound(); - } - - if (!existing.Enabled) - { - return Results.Ok(new ScheduleResponse(existing, null)); - } - - var now = timeProvider.GetUtcNow(); - var updated = new Schedule( - existing.Id, - existing.TenantId, - existing.Name, - enabled: false, - existing.CronExpression, - existing.Timezone, - existing.Mode, - existing.Selection, - existing.OnlyIf, - existing.Notify, - existing.Limits, - existing.Subscribers, - existing.CreatedAt, - existing.CreatedBy, - now, - SchedulerEndpointHelpers.ResolveActorId(httpContext), - existing.SchemaVersion); - - await repository.UpsertAsync(updated, cancellationToken: cancellationToken).ConfigureAwait(false); - await auditService.WriteAsync( - new SchedulerAuditEvent( - tenant.TenantId, - "scheduler", - "pause", - SchedulerEndpointHelpers.ResolveAuditActor(httpContext), - ScheduleId: scheduleId, - Metadata: new Dictionary<string, string> - { - ["enabled"] = "false" - }), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(new ScheduleResponse(updated, null)); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static async Task<IResult> ResumeScheduleAsync( - HttpContext httpContext, - string scheduleId, - [FromServices] ITenantContextAccessor tenantAccessor, - [FromServices] IScopeAuthorizer scopeAuthorizer, - [FromServices] IScheduleRepository repository, - [FromServices] ISchedulerAuditService auditService, - [FromServices] TimeProvider timeProvider, - CancellationToken cancellationToken) - { - try - { - scopeAuthorizer.EnsureScope(httpContext, WriteScope); - var tenant = tenantAccessor.GetTenant(httpContext); - - var existing = await repository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); - if (existing is null) - { - return Results.NotFound(); - } - - if (existing.Enabled) - { - return Results.Ok(new ScheduleResponse(existing, null)); - } - - var now = timeProvider.GetUtcNow(); - var updated = new Schedule( - existing.Id, - existing.TenantId, - existing.Name, - enabled: true, - existing.CronExpression, - existing.Timezone, - existing.Mode, - existing.Selection, - existing.OnlyIf, - existing.Notify, - existing.Limits, - existing.Subscribers, - existing.CreatedAt, - existing.CreatedBy, - now, - SchedulerEndpointHelpers.ResolveActorId(httpContext), - existing.SchemaVersion); - - await repository.UpsertAsync(updated, cancellationToken: cancellationToken).ConfigureAwait(false); - await auditService.WriteAsync( - new SchedulerAuditEvent( - tenant.TenantId, - "scheduler", - "resume", - SchedulerEndpointHelpers.ResolveAuditActor(httpContext), - ScheduleId: scheduleId, - Metadata: new Dictionary<string, string> - { - ["enabled"] = "true" - }), - cancellationToken).ConfigureAwait(false); - - return Results.Ok(new ScheduleResponse(updated, null)); - } - catch (Exception ex) when (ex is ArgumentException or ValidationException) - { - return Results.BadRequest(new { error = ex.Message }); - } - } - - private static void ValidateRequest(ScheduleCreateRequest request) - { - if (request.Selection is null) - { - throw new ValidationException("Selection is required."); - } - } - - private static Schedule UpdateSchedule( - Schedule existing, - ScheduleUpdateRequest request, - string tenantId, - DateTimeOffset updatedAt, - string actor) - { - var name = request.Name?.Trim() ?? existing.Name; - var cronExpression = request.CronExpression?.Trim() ?? existing.CronExpression; - var timezone = request.Timezone?.Trim() ?? existing.Timezone; - var mode = request.Mode ?? existing.Mode; - var selection = request.Selection is null - ? existing.Selection - : SchedulerEndpointHelpers.NormalizeSelector(request.Selection, tenantId); - var onlyIf = request.OnlyIf ?? existing.OnlyIf; - var notify = request.Notify ?? existing.Notify; - var limits = request.Limits ?? existing.Limits; - var subscribers = request.Subscribers ?? existing.Subscribers; - - return new Schedule( - existing.Id, - existing.TenantId, - name, - existing.Enabled, - cronExpression, - timezone, - mode, - selection, - onlyIf, - notify, - limits, - subscribers.IsDefault ? ImmutableArray<string>.Empty : subscribers, - existing.CreatedAt, - existing.CreatedBy, - updatedAt, - actor, - existing.SchemaVersion); - } - -} +using System.Collections.Immutable; +using System.ComponentModel.DataAnnotations; +using System.Linq; +using Microsoft.AspNetCore.Http; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Routing; +using Microsoft.Extensions.Logging; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Storage.Mongo.Services; +using StellaOps.Scheduler.WebService.Auth; + +namespace StellaOps.Scheduler.WebService.Schedules; + +internal static class ScheduleEndpoints +{ + private const string ReadScope = "scheduler.schedules.read"; + private const string WriteScope = "scheduler.schedules.write"; + + public static IEndpointRouteBuilder MapScheduleEndpoints(this IEndpointRouteBuilder routes) + { + var group = routes.MapGroup("/api/v1/scheduler/schedules"); + + group.MapGet("/", ListSchedulesAsync); + group.MapGet("/{scheduleId}", GetScheduleAsync); + group.MapPost("/", CreateScheduleAsync); + group.MapPatch("/{scheduleId}", UpdateScheduleAsync); + group.MapPost("/{scheduleId}/pause", PauseScheduleAsync); + group.MapPost("/{scheduleId}/resume", ResumeScheduleAsync); + + return routes; + } + + private static async Task<IResult> ListSchedulesAsync( + HttpContext httpContext, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IScheduleRepository repository, + [FromServices] IRunSummaryService runSummaryService, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, ReadScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + var includeDisabled = SchedulerEndpointHelpers.TryParseBoolean(httpContext.Request.Query.TryGetValue("includeDisabled", out var includeDisabledValues) ? includeDisabledValues.ToString() : null); + var includeDeleted = SchedulerEndpointHelpers.TryParseBoolean(httpContext.Request.Query.TryGetValue("includeDeleted", out var includeDeletedValues) ? includeDeletedValues.ToString() : null); + var limit = SchedulerEndpointHelpers.TryParsePositiveInt(httpContext.Request.Query.TryGetValue("limit", out var limitValues) ? limitValues.ToString() : null); + + var options = new ScheduleQueryOptions + { + IncludeDisabled = includeDisabled, + IncludeDeleted = includeDeleted, + Limit = limit + }; + + var schedules = await repository.ListAsync(tenant.TenantId, options, cancellationToken: cancellationToken).ConfigureAwait(false); + var summaries = await runSummaryService.ListAsync(tenant.TenantId, cancellationToken).ConfigureAwait(false); + var summaryLookup = summaries.ToDictionary(summary => summary.ScheduleId, summary => summary, StringComparer.Ordinal); + + var response = new ScheduleCollectionResponse( + schedules.Select(schedule => new ScheduleResponse(schedule, summaryLookup.GetValueOrDefault(schedule.Id))).ToArray()); + + return Results.Ok(response); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> GetScheduleAsync( + HttpContext httpContext, + string scheduleId, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IScheduleRepository repository, + [FromServices] IRunSummaryService runSummaryService, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, ReadScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + var schedule = await repository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); + if (schedule is null) + { + return Results.NotFound(); + } + + var summary = await runSummaryService.GetAsync(tenant.TenantId, scheduleId, cancellationToken).ConfigureAwait(false); + return Results.Ok(new ScheduleResponse(schedule, summary)); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> CreateScheduleAsync( + HttpContext httpContext, + ScheduleCreateRequest request, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IScheduleRepository repository, + [FromServices] ISchedulerAuditService auditService, + [FromServices] TimeProvider timeProvider, + [FromServices] ILoggerFactory loggerFactory, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, WriteScope); + ValidateRequest(request); + + var tenant = tenantAccessor.GetTenant(httpContext); + var now = timeProvider.GetUtcNow(); + + var selection = SchedulerEndpointHelpers.NormalizeSelector(request.Selection, tenant.TenantId); + var scheduleId = SchedulerEndpointHelpers.GenerateIdentifier("sch"); + + var subscribers = request.Subscribers ?? ImmutableArray<string>.Empty; + var schedule = new Schedule( + scheduleId, + tenant.TenantId, + request.Name.Trim(), + request.Enabled, + request.CronExpression.Trim(), + request.Timezone.Trim(), + request.Mode, + selection, + request.OnlyIf ?? ScheduleOnlyIf.Default, + request.Notify ?? ScheduleNotify.Default, + request.Limits ?? ScheduleLimits.Default, + subscribers.IsDefault ? ImmutableArray<string>.Empty : subscribers, + now, + SchedulerEndpointHelpers.ResolveActorId(httpContext), + now, + SchedulerEndpointHelpers.ResolveActorId(httpContext), + SchedulerSchemaVersions.Schedule); + + await repository.UpsertAsync(schedule, cancellationToken: cancellationToken).ConfigureAwait(false); + await auditService.WriteAsync( + new SchedulerAuditEvent( + tenant.TenantId, + "scheduler", + "create", + SchedulerEndpointHelpers.ResolveAuditActor(httpContext), + ScheduleId: schedule.Id, + Metadata: new Dictionary<string, string> + { + ["cronExpression"] = schedule.CronExpression, + ["timezone"] = schedule.Timezone + }), + cancellationToken).ConfigureAwait(false); + + var response = new ScheduleResponse(schedule, null); + return Results.Created($"/api/v1/scheduler/schedules/{schedule.Id}", response); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> UpdateScheduleAsync( + HttpContext httpContext, + string scheduleId, + ScheduleUpdateRequest request, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IScheduleRepository repository, + [FromServices] ISchedulerAuditService auditService, + [FromServices] TimeProvider timeProvider, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, WriteScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + var existing = await repository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); + if (existing is null) + { + return Results.NotFound(); + } + + var updated = UpdateSchedule(existing, request, tenant.TenantId, timeProvider.GetUtcNow(), SchedulerEndpointHelpers.ResolveActorId(httpContext)); + await repository.UpsertAsync(updated, cancellationToken: cancellationToken).ConfigureAwait(false); + + await auditService.WriteAsync( + new SchedulerAuditEvent( + tenant.TenantId, + "scheduler", + "update", + SchedulerEndpointHelpers.ResolveAuditActor(httpContext), + ScheduleId: updated.Id, + Metadata: new Dictionary<string, string> + { + ["updatedAt"] = updated.UpdatedAt.ToString("O") + }), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(new ScheduleResponse(updated, null)); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> PauseScheduleAsync( + HttpContext httpContext, + string scheduleId, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IScheduleRepository repository, + [FromServices] ISchedulerAuditService auditService, + [FromServices] TimeProvider timeProvider, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, WriteScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + var existing = await repository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); + if (existing is null) + { + return Results.NotFound(); + } + + if (!existing.Enabled) + { + return Results.Ok(new ScheduleResponse(existing, null)); + } + + var now = timeProvider.GetUtcNow(); + var updated = new Schedule( + existing.Id, + existing.TenantId, + existing.Name, + enabled: false, + existing.CronExpression, + existing.Timezone, + existing.Mode, + existing.Selection, + existing.OnlyIf, + existing.Notify, + existing.Limits, + existing.Subscribers, + existing.CreatedAt, + existing.CreatedBy, + now, + SchedulerEndpointHelpers.ResolveActorId(httpContext), + existing.SchemaVersion); + + await repository.UpsertAsync(updated, cancellationToken: cancellationToken).ConfigureAwait(false); + await auditService.WriteAsync( + new SchedulerAuditEvent( + tenant.TenantId, + "scheduler", + "pause", + SchedulerEndpointHelpers.ResolveAuditActor(httpContext), + ScheduleId: scheduleId, + Metadata: new Dictionary<string, string> + { + ["enabled"] = "false" + }), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(new ScheduleResponse(updated, null)); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static async Task<IResult> ResumeScheduleAsync( + HttpContext httpContext, + string scheduleId, + [FromServices] ITenantContextAccessor tenantAccessor, + [FromServices] IScopeAuthorizer scopeAuthorizer, + [FromServices] IScheduleRepository repository, + [FromServices] ISchedulerAuditService auditService, + [FromServices] TimeProvider timeProvider, + CancellationToken cancellationToken) + { + try + { + scopeAuthorizer.EnsureScope(httpContext, WriteScope); + var tenant = tenantAccessor.GetTenant(httpContext); + + var existing = await repository.GetAsync(tenant.TenantId, scheduleId, cancellationToken: cancellationToken).ConfigureAwait(false); + if (existing is null) + { + return Results.NotFound(); + } + + if (existing.Enabled) + { + return Results.Ok(new ScheduleResponse(existing, null)); + } + + var now = timeProvider.GetUtcNow(); + var updated = new Schedule( + existing.Id, + existing.TenantId, + existing.Name, + enabled: true, + existing.CronExpression, + existing.Timezone, + existing.Mode, + existing.Selection, + existing.OnlyIf, + existing.Notify, + existing.Limits, + existing.Subscribers, + existing.CreatedAt, + existing.CreatedBy, + now, + SchedulerEndpointHelpers.ResolveActorId(httpContext), + existing.SchemaVersion); + + await repository.UpsertAsync(updated, cancellationToken: cancellationToken).ConfigureAwait(false); + await auditService.WriteAsync( + new SchedulerAuditEvent( + tenant.TenantId, + "scheduler", + "resume", + SchedulerEndpointHelpers.ResolveAuditActor(httpContext), + ScheduleId: scheduleId, + Metadata: new Dictionary<string, string> + { + ["enabled"] = "true" + }), + cancellationToken).ConfigureAwait(false); + + return Results.Ok(new ScheduleResponse(updated, null)); + } + catch (Exception ex) when (ex is ArgumentException or ValidationException) + { + return Results.BadRequest(new { error = ex.Message }); + } + } + + private static void ValidateRequest(ScheduleCreateRequest request) + { + if (request.Selection is null) + { + throw new ValidationException("Selection is required."); + } + } + + private static Schedule UpdateSchedule( + Schedule existing, + ScheduleUpdateRequest request, + string tenantId, + DateTimeOffset updatedAt, + string actor) + { + var name = request.Name?.Trim() ?? existing.Name; + var cronExpression = request.CronExpression?.Trim() ?? existing.CronExpression; + var timezone = request.Timezone?.Trim() ?? existing.Timezone; + var mode = request.Mode ?? existing.Mode; + var selection = request.Selection is null + ? existing.Selection + : SchedulerEndpointHelpers.NormalizeSelector(request.Selection, tenantId); + var onlyIf = request.OnlyIf ?? existing.OnlyIf; + var notify = request.Notify ?? existing.Notify; + var limits = request.Limits ?? existing.Limits; + var subscribers = request.Subscribers ?? existing.Subscribers; + + return new Schedule( + existing.Id, + existing.TenantId, + name, + existing.Enabled, + cronExpression, + timezone, + mode, + selection, + onlyIf, + notify, + limits, + subscribers.IsDefault ? ImmutableArray<string>.Empty : subscribers, + existing.CreatedAt, + existing.CreatedBy, + updatedAt, + actor, + existing.SchemaVersion); + } + +} diff --git a/src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj b/src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj new file mode 100644 index 00000000..420267be --- /dev/null +++ b/src/Scheduler/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj @@ -0,0 +1,16 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Scheduler.ImpactIndex/StellaOps.Scheduler.ImpactIndex.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scheduler.WebService/TASKS.md b/src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md similarity index 98% rename from src/StellaOps.Scheduler.WebService/TASKS.md rename to src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md index 06d3bfc4..9823336f 100644 --- a/src/StellaOps.Scheduler.WebService/TASKS.md +++ b/src/Scheduler/StellaOps.Scheduler.WebService/TASKS.md @@ -13,7 +13,7 @@ |----|--------|----------|------------|-------------|---------------| | SCHED-WEB-20-001 | DONE (2025-10-29) | Scheduler WebService Guild, Policy Guild | SCHED-WEB-16-101, POLICY-ENGINE-20-000 | Expose policy run scheduling APIs (`POST /policy/runs`, `GET /policy/runs`) with tenant scoping and RBAC enforcement for `policy:run`. | Endpoints documented; integration tests cover run creation/status; unauthorized access blocked. | > 2025-10-29: Added `/api/v1/scheduler/policy/runs` create/list/get endpoints with in-memory queue, scope/tenant enforcement, and contract docs (`docs/SCHED-WEB-20-001-POLICY-RUNS.md`). Tests cover happy path + auth failures. -> 2025-10-26: Use canonical request/response samples from `samples/api/scheduler/policy-*.json`; serializer contract defined in `src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`. +> 2025-10-26: Use canonical request/response samples from `samples/api/scheduler/policy-*.json`; serializer contract defined in `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`. | SCHED-WEB-20-002 | BLOCKED (waiting on SCHED-WORKER-20-301) | Scheduler WebService Guild | SCHED-WEB-20-001, SCHED-WORKER-20-301 | Provide simulation trigger endpoint returning diff preview metadata and job state for UI/CLI consumption. | Simulation endpoint returns deterministic diffs metadata; rate limits enforced; tests cover concurrency. | > 2025-10-29: WebService requires Worker policy job orchestration + Policy Engine diff callbacks (POLICY-ENGINE-20-003/006) to provide simulation previews. Awaiting completion of SCHED-WORKER-20-301 before wiring API. diff --git a/src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-103-RUN-APIS.md b/src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-103-RUN-APIS.md similarity index 96% rename from src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-103-RUN-APIS.md rename to src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-103-RUN-APIS.md index 8ef59c59..b1f392e7 100644 --- a/src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-103-RUN-APIS.md +++ b/src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-103-RUN-APIS.md @@ -1,183 +1,183 @@ -# SCHED-WEB-16-103 — Scheduler Run APIs - -> Status: 2025-10-26 — **Developer preview** (schema solid, planner/worker integration pending). - -## Endpoints - -| Method | Path | Description | Scopes | -| ------ | ---- | ----------- | ------ | -| `GET` | `/api/v1/scheduler/runs` | List runs for the current tenant (filter by schedule, state, createdAfter). | `scheduler.runs.read` | -| `GET` | `/api/v1/scheduler/runs/{runId}` | Retrieve run details. | `scheduler.runs.read` | -| `POST` | `/api/v1/scheduler/runs` | Create an ad-hoc run bound to an existing schedule. | `scheduler.runs.write` | -| `POST` | `/api/v1/scheduler/runs/{runId}/cancel` | Transition a run to `cancelled` when still in a non-terminal state. | `scheduler.runs.write` | -| `POST` | `/api/v1/scheduler/runs/preview` | Resolve impacted images using the ImpactIndex without enqueuing work. | `scheduler.runs.preview` | - -All endpoints require a tenant context (`X-Tenant-Id`) and the appropriate scheduler scopes. Development mode allows header-based auth; production deployments must rely on Authority-issued tokens (OpTok + DPoP). - -## Create Run (manual trigger) - -```http -POST /api/v1/scheduler/runs -X-Tenant-Id: tenant-alpha -Authorization: Bearer <OpTok> -``` - -```json -{ - "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", - "trigger": "manual", - "reason": { - "manualReason": "Nightly backfill" - }, - "correlationId": "backfill-2025-10-26" -} -``` - -```json -HTTP/1.1 201 Created -Location: /api/v1/scheduler/runs/run_c7b4e9d2f6a04f8784a40476d8a2f771 -{ - "run": { - "schemaVersion": "scheduler.run@1", - "id": "run_c7b4e9d2f6a04f8784a40476d8a2f771", - "tenantId": "tenant-alpha", - "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", - "trigger": "manual", - "state": "planning", - "stats": { - "candidates": 0, - "deduped": 0, - "queued": 0, - "completed": 0, - "deltas": 0, - "newCriticals": 0, - "newHigh": 0, - "newMedium": 0, - "newLow": 0 - }, - "reason": { - "manualReason": "Nightly backfill" - }, - "createdAt": "2025-10-26T03:12:45Z" - } -} -``` - -## List Runs - -```http -GET /api/v1/scheduler/runs?scheduleId=sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234&state=planning&limit=10 -``` - -```json -{ - "runs": [ - { - "schemaVersion": "scheduler.run@1", - "id": "run_c7b4e9d2f6a04f8784a40476d8a2f771", - "tenantId": "tenant-alpha", - "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", - "trigger": "manual", - "state": "planning", - "stats": { - "candidates": 0, - "deduped": 0, - "queued": 0, - "completed": 0, - "deltas": 0, - "newCriticals": 0, - "newHigh": 0, - "newMedium": 0, - "newLow": 0 - }, - "reason": { - "manualReason": "Nightly backfill" - }, - "createdAt": "2025-10-26T03:12:45Z" - } - ] -} -``` - -## Cancel Run - -```http -POST /api/v1/scheduler/runs/run_c7b4e9d2f6a04f8784a40476d8a2f771/cancel -``` - -```json -{ - "run": { - "schemaVersion": "scheduler.run@1", - "id": "run_c7b4e9d2f6a04f8784a40476d8a2f771", - "tenantId": "tenant-alpha", - "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", - "trigger": "manual", - "state": "cancelled", - "stats": { - "candidates": 0, - "deduped": 0, - "queued": 0, - "completed": 0, - "deltas": 0, - "newCriticals": 0, - "newHigh": 0, - "newMedium": 0, - "newLow": 0 - }, - "reason": { - "manualReason": "Nightly backfill" - }, - "createdAt": "2025-10-26T03:12:45Z", - "finishedAt": "2025-10-26T03:13:02Z" - } -} -``` - -## Impact Preview - -`/api/v1/scheduler/runs/preview` resolves impacted images via the ImpactIndex without mutating state. When `scheduleId` is provided the schedule selector is reused; callers may alternatively supply an explicit selector. - -```http -POST /api/v1/scheduler/runs/preview -``` - -```json -{ - "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", - "usageOnly": true, - "sampleSize": 5 -} -``` - -```json -{ - "total": 128, - "usageOnly": true, - "generatedAt": "2025-10-26T03:12:47Z", - "snapshotId": "impact-snapshot-20251026", - "sample": [ - { - "imageDigest": "sha256:0b1f...", - "registry": "internal", - "repository": "service-api", - "namespaces": ["prod"], - "tags": ["prod-2025-10-01"], - "usedByEntrypoint": true - } - ] -} -``` - -### Validation rules - -* `scheduleId` is mandatory for run creation; ad-hoc selectors will be added alongside planner support. -* Cancelling a run already in a terminal state returns `409 Conflict`. -* Preview requires either `scheduleId` or `selector` (mutually exclusive). -* `sampleSize` is clamped to `1..50` to keep responses deterministic and lightweight. - -### Integration notes - -* Run creation and cancellation produce audit entries under category `scheduler.run` with correlation metadata when provided. -* The preview endpoint relies on the ImpactIndex stub in development. Production deployments must register the concrete index implementation before use. -* Planner/worker orchestration tasks will wire run creation to queueing in SCHED-WORKER-16-201/202. +# SCHED-WEB-16-103 — Scheduler Run APIs + +> Status: 2025-10-26 — **Developer preview** (schema solid, planner/worker integration pending). + +## Endpoints + +| Method | Path | Description | Scopes | +| ------ | ---- | ----------- | ------ | +| `GET` | `/api/v1/scheduler/runs` | List runs for the current tenant (filter by schedule, state, createdAfter). | `scheduler.runs.read` | +| `GET` | `/api/v1/scheduler/runs/{runId}` | Retrieve run details. | `scheduler.runs.read` | +| `POST` | `/api/v1/scheduler/runs` | Create an ad-hoc run bound to an existing schedule. | `scheduler.runs.write` | +| `POST` | `/api/v1/scheduler/runs/{runId}/cancel` | Transition a run to `cancelled` when still in a non-terminal state. | `scheduler.runs.write` | +| `POST` | `/api/v1/scheduler/runs/preview` | Resolve impacted images using the ImpactIndex without enqueuing work. | `scheduler.runs.preview` | + +All endpoints require a tenant context (`X-Tenant-Id`) and the appropriate scheduler scopes. Development mode allows header-based auth; production deployments must rely on Authority-issued tokens (OpTok + DPoP). + +## Create Run (manual trigger) + +```http +POST /api/v1/scheduler/runs +X-Tenant-Id: tenant-alpha +Authorization: Bearer <OpTok> +``` + +```json +{ + "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", + "trigger": "manual", + "reason": { + "manualReason": "Nightly backfill" + }, + "correlationId": "backfill-2025-10-26" +} +``` + +```json +HTTP/1.1 201 Created +Location: /api/v1/scheduler/runs/run_c7b4e9d2f6a04f8784a40476d8a2f771 +{ + "run": { + "schemaVersion": "scheduler.run@1", + "id": "run_c7b4e9d2f6a04f8784a40476d8a2f771", + "tenantId": "tenant-alpha", + "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", + "trigger": "manual", + "state": "planning", + "stats": { + "candidates": 0, + "deduped": 0, + "queued": 0, + "completed": 0, + "deltas": 0, + "newCriticals": 0, + "newHigh": 0, + "newMedium": 0, + "newLow": 0 + }, + "reason": { + "manualReason": "Nightly backfill" + }, + "createdAt": "2025-10-26T03:12:45Z" + } +} +``` + +## List Runs + +```http +GET /api/v1/scheduler/runs?scheduleId=sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234&state=planning&limit=10 +``` + +```json +{ + "runs": [ + { + "schemaVersion": "scheduler.run@1", + "id": "run_c7b4e9d2f6a04f8784a40476d8a2f771", + "tenantId": "tenant-alpha", + "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", + "trigger": "manual", + "state": "planning", + "stats": { + "candidates": 0, + "deduped": 0, + "queued": 0, + "completed": 0, + "deltas": 0, + "newCriticals": 0, + "newHigh": 0, + "newMedium": 0, + "newLow": 0 + }, + "reason": { + "manualReason": "Nightly backfill" + }, + "createdAt": "2025-10-26T03:12:45Z" + } + ] +} +``` + +## Cancel Run + +```http +POST /api/v1/scheduler/runs/run_c7b4e9d2f6a04f8784a40476d8a2f771/cancel +``` + +```json +{ + "run": { + "schemaVersion": "scheduler.run@1", + "id": "run_c7b4e9d2f6a04f8784a40476d8a2f771", + "tenantId": "tenant-alpha", + "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", + "trigger": "manual", + "state": "cancelled", + "stats": { + "candidates": 0, + "deduped": 0, + "queued": 0, + "completed": 0, + "deltas": 0, + "newCriticals": 0, + "newHigh": 0, + "newMedium": 0, + "newLow": 0 + }, + "reason": { + "manualReason": "Nightly backfill" + }, + "createdAt": "2025-10-26T03:12:45Z", + "finishedAt": "2025-10-26T03:13:02Z" + } +} +``` + +## Impact Preview + +`/api/v1/scheduler/runs/preview` resolves impacted images via the ImpactIndex without mutating state. When `scheduleId` is provided the schedule selector is reused; callers may alternatively supply an explicit selector. + +```http +POST /api/v1/scheduler/runs/preview +``` + +```json +{ + "scheduleId": "sch_4f2c7d9e0a2b4c64a0e7b5f9d65c1234", + "usageOnly": true, + "sampleSize": 5 +} +``` + +```json +{ + "total": 128, + "usageOnly": true, + "generatedAt": "2025-10-26T03:12:47Z", + "snapshotId": "impact-snapshot-20251026", + "sample": [ + { + "imageDigest": "sha256:0b1f...", + "registry": "internal", + "repository": "service-api", + "namespaces": ["prod"], + "tags": ["prod-2025-10-01"], + "usedByEntrypoint": true + } + ] +} +``` + +### Validation rules + +* `scheduleId` is mandatory for run creation; ad-hoc selectors will be added alongside planner support. +* Cancelling a run already in a terminal state returns `409 Conflict`. +* Preview requires either `scheduleId` or `selector` (mutually exclusive). +* `sampleSize` is clamped to `1..50` to keep responses deterministic and lightweight. + +### Integration notes + +* Run creation and cancellation produce audit entries under category `scheduler.run` with correlation metadata when provided. +* The preview endpoint relies on the ImpactIndex stub in development. Production deployments must register the concrete index implementation before use. +* Planner/worker orchestration tasks will wire run creation to queueing in SCHED-WORKER-16-201/202. diff --git a/src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-104-WEBHOOKS.md b/src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-104-WEBHOOKS.md similarity index 97% rename from src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-104-WEBHOOKS.md rename to src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-104-WEBHOOKS.md index aa1f893c..8dd6507d 100644 --- a/src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-104-WEBHOOKS.md +++ b/src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-16-104-WEBHOOKS.md @@ -1,58 +1,58 @@ -# SCHED-WEB-16-104 · Feedser/Vexer Webhook Endpoints - -## Overview - -Scheduler.WebService exposes inbound webhooks that allow Feedser and Vexer to -notify the planner when new exports are available. Each webhook validates the -payload, enforces signature requirements, and applies a per-endpoint rate -limit before queuing downstream processing. - -| Endpoint | Description | AuthZ | -|----------|-------------|-------| -| `POST /events/feedser-export` | Ingest Feedser export metadata (`exportId`, `changedProductKeys`, optional KEV & window). | HMAC `X-Scheduler-Signature` and/or mTLS client certificate | -| `POST /events/vexer-export` | Ingest Vexer export delta summary (`changedClaims`). | HMAC `X-Scheduler-Signature` and/or mTLS client certificate | - -## Security - -* Webhooks require either: - * mTLS with trusted client certificates; **or** - * an HMAC-SHA256 signature in the `X-Scheduler-Signature` header. The - signature must be computed as `sha256=<hex>` over the raw request body. -* Requests without the required signature/certificate return `401`. -* Secrets are configured under `Scheduler:Events:Webhooks:{Feedser|Vexer}:HmacSecret`. - -## Rate limiting - -* Each webhook enforces a sliding-window limit (`RateLimitRequests` over - `RateLimitWindowSeconds`). -* Requests over the limit return `429` and include a `Retry-After` header. -* Defaults: 120 requests / 60 seconds. Adjust via configuration. - -## Configuration - -``` -Scheduler: - Events: - Webhooks: - Feedser: - Enabled: true - HmacSecret: feedser-secret - RequireClientCertificate: false - RateLimitRequests: 120 - RateLimitWindowSeconds: 60 - Vexer: - Enabled: true - HmacSecret: vexer-secret - RequireClientCertificate: false -``` - -## Response envelope - -On success the webhook returns `202 Accepted` and a JSON body: - -``` -{ "status": "accepted" } -``` - -Failures return problem JSON with `error` describing the violation. - +# SCHED-WEB-16-104 · Feedser/Vexer Webhook Endpoints + +## Overview + +Scheduler.WebService exposes inbound webhooks that allow Feedser and Vexer to +notify the planner when new exports are available. Each webhook validates the +payload, enforces signature requirements, and applies a per-endpoint rate +limit before queuing downstream processing. + +| Endpoint | Description | AuthZ | +|----------|-------------|-------| +| `POST /events/feedser-export` | Ingest Feedser export metadata (`exportId`, `changedProductKeys`, optional KEV & window). | HMAC `X-Scheduler-Signature` and/or mTLS client certificate | +| `POST /events/vexer-export` | Ingest Vexer export delta summary (`changedClaims`). | HMAC `X-Scheduler-Signature` and/or mTLS client certificate | + +## Security + +* Webhooks require either: + * mTLS with trusted client certificates; **or** + * an HMAC-SHA256 signature in the `X-Scheduler-Signature` header. The + signature must be computed as `sha256=<hex>` over the raw request body. +* Requests without the required signature/certificate return `401`. +* Secrets are configured under `Scheduler:Events:Webhooks:{Feedser|Vexer}:HmacSecret`. + +## Rate limiting + +* Each webhook enforces a sliding-window limit (`RateLimitRequests` over + `RateLimitWindowSeconds`). +* Requests over the limit return `429` and include a `Retry-After` header. +* Defaults: 120 requests / 60 seconds. Adjust via configuration. + +## Configuration + +``` +Scheduler: + Events: + Webhooks: + Feedser: + Enabled: true + HmacSecret: feedser-secret + RequireClientCertificate: false + RateLimitRequests: 120 + RateLimitWindowSeconds: 60 + Vexer: + Enabled: true + HmacSecret: vexer-secret + RequireClientCertificate: false +``` + +## Response envelope + +On success the webhook returns `202 Accepted` and a JSON body: + +``` +{ "status": "accepted" } +``` + +Failures return problem JSON with `error` describing the violation. + diff --git a/src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-20-001-POLICY-RUNS.md b/src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-20-001-POLICY-RUNS.md similarity index 97% rename from src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-20-001-POLICY-RUNS.md rename to src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-20-001-POLICY-RUNS.md index 6d73a571..4452911f 100644 --- a/src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-20-001-POLICY-RUNS.md +++ b/src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-20-001-POLICY-RUNS.md @@ -156,6 +156,6 @@ GET /api/v1/scheduler/policy/runs/run:P-7:20251026T140500Z:1b2c3d4e ## Notes - The developer-preview implementation keeps run state in-memory; Policy Engine workers will replace this with durable storage + orchestration (`SCHED-WORKER-20-301/302`). -- Responses align with the canonical DTOs described in `src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`. +- Responses align with the canonical DTOs described in `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md`. - `runId` generation follows the `run:{policyId}:{timestamp}:{suffix}` convention to support deterministic replay logs. - Additional filters (policy version, metadata) will be introduced once the Policy Engine incremental orchestrator lands. diff --git a/src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-21-001-GRAPH-APIS.md b/src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-21-001-GRAPH-APIS.md similarity index 96% rename from src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-21-001-GRAPH-APIS.md rename to src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-21-001-GRAPH-APIS.md index ccf7eb8b..42289713 100644 --- a/src/StellaOps.Scheduler.WebService/docs/SCHED-WEB-21-001-GRAPH-APIS.md +++ b/src/Scheduler/StellaOps.Scheduler.WebService/docs/SCHED-WEB-21-001-GRAPH-APIS.md @@ -1,137 +1,137 @@ -# SCHED-WEB-21-001 — Graph Job APIs - -> Status: 2025-10-26 — **Complete** (developer preview) - -Minimal API endpoints for Cartographer orchestration live under `/graphs`. Authentication now relies on Authority-issued bearer tokens carrying `graph:*` scopes. For development scenarios you can disable `Scheduler:Authority:Enabled` and continue using legacy headers: - -- `X-Tenant-Id`: tenant identifier (matches Scheduler Models `tenantId`). -- `X-Scopes`: space-delimited scopes. `graph:write` is required for write operations, `graph:read` for queries. - -Example configuration (`appsettings.json` or environment overrides): - -```jsonc -{ - "Scheduler": { - "Authority": { - "Enabled": true, - "Issuer": "https://authority.stella-ops.local", - "Audiences": [ "api://scheduler" ], - "RequiredScopes": [ "graph:read", "graph:write" ] - }, - "Events": { - "GraphJobs": { - "Enabled": true - } - }, - "Cartographer": { - "Webhook": { - "Enabled": true, - "Endpoint": "https://cartographer.stella-ops.local/hooks/graph/completed", - "ApiKeyHeader": "X-StellaOps-Webhook-Key", - "ApiKey": "change-me", - "TimeoutSeconds": 10 - } - } - } -} -``` - -## Endpoints - -### `POST /graphs/build` -Creates a `GraphBuildJob` in `pending` state. - -Request body: - -```jsonc -{ - "sbomId": "sbom_alpha", - "sbomVersionId": "sbom_alpha_v1", - "sbomDigest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", - "trigger": "sbom-version", - "metadata": { - "sbomEventId": "sbom_evt_20251026" - } -} -``` - -Response: `201 Created` - -```jsonc -{ - "id": "gbj_018dc2f5902147e2b7f2ea05f5de1f3f", - "tenantId": "tenant-alpha", - "kind": "build", - "status": "pending", - "payload": { - "schemaVersion": "scheduler.graph-build-job@1", - "id": "gbj_018dc2f5902147e2b7f2ea05f5de1f3f", - "tenantId": "tenant-alpha", - "sbomId": "sbom_alpha", - "sbomVersionId": "sbom_alpha_v1", - "sbomDigest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", - "status": "pending", - "trigger": "sbom-version", - "createdAt": "2025-10-26T12:00:00Z", - "metadata": { - "sbomeventid": "sbom_evt_20251026" - } - } -} -``` - -### `POST /graphs/overlays` -Creates a `GraphOverlayJob` in `pending` state. Include optional `buildJobId` and `subjects` filters. - -### `POST /graphs/hooks/completed` -Webhook invoked by Scheduler Worker once Cartographer finishes a build/overlay job. Requires `graph:write`. - -```jsonc -{ - "jobId": "goj_018dc2f5929b4f5c88ad1e43d0ab3b90", - "jobType": "Overlay", - "status": "Completed", // Completed | Failed | Cancelled - "occurredAt": "2025-10-26T12:02:45Z", - "correlationId": "corr-123", - "resultUri": "oras://cartographer/offline/tenant-alpha/graph_snap_20251026" -} -``` - -The endpoint advances the job through `running → terminal` transitions via `GraphJobStateMachine`, captures the latest correlation identifier, and stores the optional `resultUri` in metadata for downstream exports. - -### `GET /graphs/overlays/lag` -Returns per-tenant overlay lag metrics (counts, min/max/average lag seconds, and last five completions with correlation IDs + result URIs). Requires `graph:read`. - -### `GET /graphs/jobs` -Returns a combined `GraphJobCollection`. Query parameters: - -| Parameter | Description | -|-----------|-------------| -| `type` | Optional filter (`build` or `overlay`). | -| `status` | Optional filter using `GraphJobStatus`. | -| `limit` | Maximum number of results (default 50, max 200). | - -Response example: - -```jsonc -{ - "jobs": [ - { - "id": "gbj_018dc2f5902147e2b7f2ea05f5de1f3f", - "tenantId": "tenant-alpha", - "kind": "build", - "status": "pending", - "payload": { /* graph build job */ } - } - ] -} -``` - -## Integration tests - -`StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs` covers scope enforcement and the build-list happy path using the in-memory store. Future work should add overlay coverage once Cartographer adapters are available. - -## Known gaps / TODO - -- Persist jobs to Scheduler storage and publish `scheduler.graph.job.completed@1` events + outbound webhook to Cartographer (see new `SCHED-WEB-21-004`). -- Extend `GET /graphs/jobs` with pagination cursors shared with Cartographer/Console. +# SCHED-WEB-21-001 — Graph Job APIs + +> Status: 2025-10-26 — **Complete** (developer preview) + +Minimal API endpoints for Cartographer orchestration live under `/graphs`. Authentication now relies on Authority-issued bearer tokens carrying `graph:*` scopes. For development scenarios you can disable `Scheduler:Authority:Enabled` and continue using legacy headers: + +- `X-Tenant-Id`: tenant identifier (matches Scheduler Models `tenantId`). +- `X-Scopes`: space-delimited scopes. `graph:write` is required for write operations, `graph:read` for queries. + +Example configuration (`appsettings.json` or environment overrides): + +```jsonc +{ + "Scheduler": { + "Authority": { + "Enabled": true, + "Issuer": "https://authority.stella-ops.local", + "Audiences": [ "api://scheduler" ], + "RequiredScopes": [ "graph:read", "graph:write" ] + }, + "Events": { + "GraphJobs": { + "Enabled": true + } + }, + "Cartographer": { + "Webhook": { + "Enabled": true, + "Endpoint": "https://cartographer.stella-ops.local/hooks/graph/completed", + "ApiKeyHeader": "X-StellaOps-Webhook-Key", + "ApiKey": "change-me", + "TimeoutSeconds": 10 + } + } + } +} +``` + +## Endpoints + +### `POST /graphs/build` +Creates a `GraphBuildJob` in `pending` state. + +Request body: + +```jsonc +{ + "sbomId": "sbom_alpha", + "sbomVersionId": "sbom_alpha_v1", + "sbomDigest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + "trigger": "sbom-version", + "metadata": { + "sbomEventId": "sbom_evt_20251026" + } +} +``` + +Response: `201 Created` + +```jsonc +{ + "id": "gbj_018dc2f5902147e2b7f2ea05f5de1f3f", + "tenantId": "tenant-alpha", + "kind": "build", + "status": "pending", + "payload": { + "schemaVersion": "scheduler.graph-build-job@1", + "id": "gbj_018dc2f5902147e2b7f2ea05f5de1f3f", + "tenantId": "tenant-alpha", + "sbomId": "sbom_alpha", + "sbomVersionId": "sbom_alpha_v1", + "sbomDigest": "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + "status": "pending", + "trigger": "sbom-version", + "createdAt": "2025-10-26T12:00:00Z", + "metadata": { + "sbomeventid": "sbom_evt_20251026" + } + } +} +``` + +### `POST /graphs/overlays` +Creates a `GraphOverlayJob` in `pending` state. Include optional `buildJobId` and `subjects` filters. + +### `POST /graphs/hooks/completed` +Webhook invoked by Scheduler Worker once Cartographer finishes a build/overlay job. Requires `graph:write`. + +```jsonc +{ + "jobId": "goj_018dc2f5929b4f5c88ad1e43d0ab3b90", + "jobType": "Overlay", + "status": "Completed", // Completed | Failed | Cancelled + "occurredAt": "2025-10-26T12:02:45Z", + "correlationId": "corr-123", + "resultUri": "oras://cartographer/offline/tenant-alpha/graph_snap_20251026" +} +``` + +The endpoint advances the job through `running → terminal` transitions via `GraphJobStateMachine`, captures the latest correlation identifier, and stores the optional `resultUri` in metadata for downstream exports. + +### `GET /graphs/overlays/lag` +Returns per-tenant overlay lag metrics (counts, min/max/average lag seconds, and last five completions with correlation IDs + result URIs). Requires `graph:read`. + +### `GET /graphs/jobs` +Returns a combined `GraphJobCollection`. Query parameters: + +| Parameter | Description | +|-----------|-------------| +| `type` | Optional filter (`build` or `overlay`). | +| `status` | Optional filter using `GraphJobStatus`. | +| `limit` | Maximum number of results (default 50, max 200). | + +Response example: + +```jsonc +{ + "jobs": [ + { + "id": "gbj_018dc2f5902147e2b7f2ea05f5de1f3f", + "tenantId": "tenant-alpha", + "kind": "build", + "status": "pending", + "payload": { /* graph build job */ } + } + ] +} +``` + +## Integration tests + +`StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs` covers scope enforcement and the build-list happy path using the in-memory store. Future work should add overlay coverage once Cartographer adapters are available. + +## Known gaps / TODO + +- Persist jobs to Scheduler storage and publish `scheduler.graph.job.completed@1` events + outbound webhook to Cartographer (see new `SCHED-WEB-21-004`). +- Extend `GET /graphs/jobs` with pagination cursors shared with Cartographer/Console. diff --git a/src/StellaOps.Scheduler.Worker.Host/Program.cs b/src/Scheduler/StellaOps.Scheduler.Worker.Host/Program.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker.Host/Program.cs rename to src/Scheduler/StellaOps.Scheduler.Worker.Host/Program.cs diff --git a/src/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj b/src/Scheduler/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj similarity index 100% rename from src/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj rename to src/Scheduler/StellaOps.Scheduler.Worker.Host/StellaOps.Scheduler.Worker.Host.csproj diff --git a/src/Scheduler/StellaOps.Scheduler.sln b/src/Scheduler/StellaOps.Scheduler.sln new file mode 100644 index 00000000..895f9ade --- /dev/null +++ b/src/Scheduler/StellaOps.Scheduler.sln @@ -0,0 +1,416 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.WebService", "StellaOps.Scheduler.WebService\StellaOps.Scheduler.WebService.csproj", "{888F7FD1-820A-4784-B169-18A7E4629F77}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Models", "__Libraries\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj", "{382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Storage.Mongo", "__Libraries\StellaOps.Scheduler.Storage.Mongo\StellaOps.Scheduler.Storage.Mongo.csproj", "{33770BC5-6802-45AD-A866-10027DD360E2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.ImpactIndex", "__Libraries\StellaOps.Scheduler.ImpactIndex\StellaOps.Scheduler.ImpactIndex.csproj", "{56209C24-3CE7-4F8E-8B8C-F052CB919DE2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "..\__Libraries\StellaOps.Plugin\StellaOps.Plugin.csproj", "{2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{214ED54A-FA25-4189-9F58-50D11F079ACF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{923FB293-CBBD-48DD-8FC1-74ED840935C9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{44FAF98E-B0AD-4AA6-9017-551B5A100F01}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{66B32B7D-0A46-4353-A3C2-0B6238238887}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Queue", "__Libraries\StellaOps.Scheduler.Queue\StellaOps.Scheduler.Queue.csproj", "{6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Worker", "__Libraries\StellaOps.Scheduler.Worker\StellaOps.Scheduler.Worker.csproj", "{C48F2207-8974-43A4-B3D6-6A1761C37605}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Models", "..\Notify\__Libraries\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj", "{F9395736-7220-409E-9C6F-DE083E00EBFF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Notify.Queue", "..\Notify\__Libraries\StellaOps.Notify.Queue\StellaOps.Notify.Queue.csproj", "{827D179C-A229-439E-A878-4028F30CA670}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Worker.Host", "StellaOps.Scheduler.Worker.Host\StellaOps.Scheduler.Worker.Host.csproj", "{37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.ImpactIndex.Tests", "__Tests\StellaOps.Scheduler.ImpactIndex.Tests\StellaOps.Scheduler.ImpactIndex.Tests.csproj", "{5ED2BF16-72CE-4DF1-917C-6D832427AE6F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit", "..\Scanner\__Libraries\StellaOps.Scanner.Emit\StellaOps.Scanner.Emit.csproj", "{11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Core", "..\Scanner\__Libraries\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj", "{8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{4CF2A8EB-CB97-481D-843E-7F47D5979121}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "..\__Libraries\StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Storage", "..\Scanner\__Libraries\StellaOps.Scanner.Storage\StellaOps.Scanner.Storage.csproj", "{46839CB8-AB2A-4048-BC09-B837B1221F7D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Models.Tests", "__Tests\StellaOps.Scheduler.Models.Tests\StellaOps.Scheduler.Models.Tests.csproj", "{2F097B4B-8F38-45C3-8A42-90250E912F0C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Queue.Tests", "__Tests\StellaOps.Scheduler.Queue.Tests\StellaOps.Scheduler.Queue.Tests.csproj", "{7C22F6B7-095E-459B-BCCF-87098EA9F192}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Storage.Mongo.Tests", "__Tests\StellaOps.Scheduler.Storage.Mongo.Tests\StellaOps.Scheduler.Storage.Mongo.Tests.csproj", "{972CEB4D-510B-4701-B4A2-F14A85F11CC7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.WebService.Tests", "__Tests\StellaOps.Scheduler.WebService.Tests\StellaOps.Scheduler.WebService.Tests.csproj", "{7B4C9EAC-316E-4890-A715-7BB9C1577F96}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.Worker.Tests", "__Tests\StellaOps.Scheduler.Worker.Tests\StellaOps.Scheduler.Worker.Tests.csproj", "{D640DBB2-4251-44B3-B949-75FC6BF02B71}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {888F7FD1-820A-4784-B169-18A7E4629F77}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Debug|Any CPU.Build.0 = Debug|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Debug|x64.ActiveCfg = Debug|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Debug|x64.Build.0 = Debug|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Debug|x86.ActiveCfg = Debug|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Debug|x86.Build.0 = Debug|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Release|Any CPU.ActiveCfg = Release|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Release|Any CPU.Build.0 = Release|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Release|x64.ActiveCfg = Release|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Release|x64.Build.0 = Release|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Release|x86.ActiveCfg = Release|Any CPU + {888F7FD1-820A-4784-B169-18A7E4629F77}.Release|x86.Build.0 = Release|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Debug|x64.ActiveCfg = Debug|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Debug|x64.Build.0 = Debug|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Debug|x86.ActiveCfg = Debug|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Debug|x86.Build.0 = Debug|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Release|Any CPU.Build.0 = Release|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Release|x64.ActiveCfg = Release|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Release|x64.Build.0 = Release|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Release|x86.ActiveCfg = Release|Any CPU + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B}.Release|x86.Build.0 = Release|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Debug|x64.ActiveCfg = Debug|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Debug|x64.Build.0 = Debug|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Debug|x86.ActiveCfg = Debug|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Debug|x86.Build.0 = Debug|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Release|Any CPU.Build.0 = Release|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Release|x64.ActiveCfg = Release|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Release|x64.Build.0 = Release|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Release|x86.ActiveCfg = Release|Any CPU + {33770BC5-6802-45AD-A866-10027DD360E2}.Release|x86.Build.0 = Release|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Debug|x64.ActiveCfg = Debug|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Debug|x64.Build.0 = Debug|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Debug|x86.ActiveCfg = Debug|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Debug|x86.Build.0 = Debug|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Release|Any CPU.Build.0 = Release|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Release|x64.ActiveCfg = Release|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Release|x64.Build.0 = Release|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Release|x86.ActiveCfg = Release|Any CPU + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2}.Release|x86.Build.0 = Release|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Debug|x64.ActiveCfg = Debug|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Debug|x64.Build.0 = Debug|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Debug|x86.ActiveCfg = Debug|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Debug|x86.Build.0 = Debug|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Release|Any CPU.Build.0 = Release|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Release|x64.ActiveCfg = Release|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Release|x64.Build.0 = Release|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Release|x86.ActiveCfg = Release|Any CPU + {2F9CDB3D-7BB5-46B6-A51B-49AB498CC959}.Release|x86.Build.0 = Release|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Debug|x64.ActiveCfg = Debug|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Debug|x64.Build.0 = Debug|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Debug|x86.ActiveCfg = Debug|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Debug|x86.Build.0 = Debug|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Release|Any CPU.Build.0 = Release|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Release|x64.ActiveCfg = Release|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Release|x64.Build.0 = Release|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Release|x86.ActiveCfg = Release|Any CPU + {214ED54A-FA25-4189-9F58-50D11F079ACF}.Release|x86.Build.0 = Release|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Debug|x64.ActiveCfg = Debug|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Debug|x64.Build.0 = Debug|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Debug|x86.ActiveCfg = Debug|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Debug|x86.Build.0 = Debug|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Release|Any CPU.Build.0 = Release|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Release|x64.ActiveCfg = Release|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Release|x64.Build.0 = Release|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Release|x86.ActiveCfg = Release|Any CPU + {923FB293-CBBD-48DD-8FC1-74ED840935C9}.Release|x86.Build.0 = Release|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Debug|Any CPU.Build.0 = Debug|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Debug|x64.ActiveCfg = Debug|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Debug|x64.Build.0 = Debug|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Debug|x86.ActiveCfg = Debug|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Debug|x86.Build.0 = Debug|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Release|Any CPU.ActiveCfg = Release|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Release|Any CPU.Build.0 = Release|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Release|x64.ActiveCfg = Release|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Release|x64.Build.0 = Release|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Release|x86.ActiveCfg = Release|Any CPU + {44FAF98E-B0AD-4AA6-9017-551B5A100F01}.Release|x86.Build.0 = Release|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Debug|x64.ActiveCfg = Debug|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Debug|x64.Build.0 = Debug|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Debug|x86.ActiveCfg = Debug|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Debug|x86.Build.0 = Debug|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Release|Any CPU.Build.0 = Release|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Release|x64.ActiveCfg = Release|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Release|x64.Build.0 = Release|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Release|x86.ActiveCfg = Release|Any CPU + {60EFD52D-AA51-4DE5-BC40-ED9DCC30802C}.Release|x86.Build.0 = Release|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Debug|Any CPU.Build.0 = Debug|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Debug|x64.ActiveCfg = Debug|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Debug|x64.Build.0 = Debug|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Debug|x86.ActiveCfg = Debug|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Debug|x86.Build.0 = Debug|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Release|Any CPU.ActiveCfg = Release|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Release|Any CPU.Build.0 = Release|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Release|x64.ActiveCfg = Release|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Release|x64.Build.0 = Release|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Release|x86.ActiveCfg = Release|Any CPU + {66B32B7D-0A46-4353-A3C2-0B6238238887}.Release|x86.Build.0 = Release|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Debug|x64.ActiveCfg = Debug|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Debug|x64.Build.0 = Debug|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Debug|x86.ActiveCfg = Debug|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Debug|x86.Build.0 = Debug|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Release|Any CPU.Build.0 = Release|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Release|x64.ActiveCfg = Release|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Release|x64.Build.0 = Release|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Release|x86.ActiveCfg = Release|Any CPU + {E4F0FC41-BD88-4D5D-ADEE-5C74FDFACA4D}.Release|x86.Build.0 = Release|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Debug|x64.ActiveCfg = Debug|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Debug|x64.Build.0 = Debug|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Debug|x86.ActiveCfg = Debug|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Debug|x86.Build.0 = Debug|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Release|Any CPU.Build.0 = Release|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Release|x64.ActiveCfg = Release|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Release|x64.Build.0 = Release|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Release|x86.ActiveCfg = Release|Any CPU + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4}.Release|x86.Build.0 = Release|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Debug|x64.ActiveCfg = Debug|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Debug|x64.Build.0 = Debug|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Debug|x86.ActiveCfg = Debug|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Debug|x86.Build.0 = Debug|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Release|Any CPU.Build.0 = Release|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Release|x64.ActiveCfg = Release|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Release|x64.Build.0 = Release|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Release|x86.ActiveCfg = Release|Any CPU + {C48F2207-8974-43A4-B3D6-6A1761C37605}.Release|x86.Build.0 = Release|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Debug|x64.ActiveCfg = Debug|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Debug|x64.Build.0 = Debug|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Debug|x86.ActiveCfg = Debug|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Debug|x86.Build.0 = Debug|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Release|Any CPU.Build.0 = Release|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Release|x64.ActiveCfg = Release|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Release|x64.Build.0 = Release|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Release|x86.ActiveCfg = Release|Any CPU + {F9395736-7220-409E-9C6F-DE083E00EBFF}.Release|x86.Build.0 = Release|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Debug|Any CPU.Build.0 = Debug|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Debug|x64.ActiveCfg = Debug|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Debug|x64.Build.0 = Debug|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Debug|x86.ActiveCfg = Debug|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Debug|x86.Build.0 = Debug|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Release|Any CPU.ActiveCfg = Release|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Release|Any CPU.Build.0 = Release|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Release|x64.ActiveCfg = Release|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Release|x64.Build.0 = Release|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Release|x86.ActiveCfg = Release|Any CPU + {827D179C-A229-439E-A878-4028F30CA670}.Release|x86.Build.0 = Release|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Debug|Any CPU.Build.0 = Debug|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Debug|x64.ActiveCfg = Debug|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Debug|x64.Build.0 = Debug|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Debug|x86.ActiveCfg = Debug|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Debug|x86.Build.0 = Debug|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Release|Any CPU.ActiveCfg = Release|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Release|Any CPU.Build.0 = Release|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Release|x64.ActiveCfg = Release|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Release|x64.Build.0 = Release|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Release|x86.ActiveCfg = Release|Any CPU + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082}.Release|x86.Build.0 = Release|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Debug|x64.ActiveCfg = Debug|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Debug|x64.Build.0 = Debug|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Debug|x86.ActiveCfg = Debug|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Debug|x86.Build.0 = Debug|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Release|Any CPU.Build.0 = Release|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Release|x64.ActiveCfg = Release|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Release|x64.Build.0 = Release|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Release|x86.ActiveCfg = Release|Any CPU + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F}.Release|x86.Build.0 = Release|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Debug|x64.ActiveCfg = Debug|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Debug|x64.Build.0 = Debug|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Debug|x86.ActiveCfg = Debug|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Debug|x86.Build.0 = Debug|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Release|Any CPU.Build.0 = Release|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Release|x64.ActiveCfg = Release|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Release|x64.Build.0 = Release|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Release|x86.ActiveCfg = Release|Any CPU + {11D72DD3-3752-4A6A-AA4A-5298D4FD6FA0}.Release|x86.Build.0 = Release|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Debug|x64.ActiveCfg = Debug|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Debug|x64.Build.0 = Debug|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Debug|x86.ActiveCfg = Debug|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Debug|x86.Build.0 = Debug|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Release|Any CPU.Build.0 = Release|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Release|x64.ActiveCfg = Release|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Release|x64.Build.0 = Release|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Release|x86.ActiveCfg = Release|Any CPU + {8D4FF2D1-D387-4F85-9A2E-A952B2AFAE7B}.Release|x86.Build.0 = Release|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Debug|x64.ActiveCfg = Debug|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Debug|x64.Build.0 = Debug|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Debug|x86.ActiveCfg = Debug|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Debug|x86.Build.0 = Debug|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Release|Any CPU.Build.0 = Release|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Release|x64.ActiveCfg = Release|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Release|x64.Build.0 = Release|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Release|x86.ActiveCfg = Release|Any CPU + {4CF2A8EB-CB97-481D-843E-7F47D5979121}.Release|x86.Build.0 = Release|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Debug|x64.ActiveCfg = Debug|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Debug|x64.Build.0 = Debug|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Debug|x86.ActiveCfg = Debug|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Debug|x86.Build.0 = Debug|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Release|Any CPU.Build.0 = Release|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Release|x64.ActiveCfg = Release|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Release|x64.Build.0 = Release|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Release|x86.ActiveCfg = Release|Any CPU + {FBBEE020-A7B1-41A8-AEC9-711A4F8B2097}.Release|x86.Build.0 = Release|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Debug|x64.ActiveCfg = Debug|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Debug|x64.Build.0 = Debug|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Debug|x86.ActiveCfg = Debug|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Debug|x86.Build.0 = Debug|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Release|Any CPU.Build.0 = Release|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Release|x64.ActiveCfg = Release|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Release|x64.Build.0 = Release|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Release|x86.ActiveCfg = Release|Any CPU + {46839CB8-AB2A-4048-BC09-B837B1221F7D}.Release|x86.Build.0 = Release|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Debug|x64.ActiveCfg = Debug|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Debug|x64.Build.0 = Debug|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Debug|x86.ActiveCfg = Debug|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Debug|x86.Build.0 = Debug|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Release|Any CPU.Build.0 = Release|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Release|x64.ActiveCfg = Release|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Release|x64.Build.0 = Release|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Release|x86.ActiveCfg = Release|Any CPU + {2F097B4B-8F38-45C3-8A42-90250E912F0C}.Release|x86.Build.0 = Release|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Debug|x64.ActiveCfg = Debug|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Debug|x64.Build.0 = Debug|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Debug|x86.ActiveCfg = Debug|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Debug|x86.Build.0 = Debug|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Release|Any CPU.Build.0 = Release|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Release|x64.ActiveCfg = Release|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Release|x64.Build.0 = Release|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Release|x86.ActiveCfg = Release|Any CPU + {7C22F6B7-095E-459B-BCCF-87098EA9F192}.Release|x86.Build.0 = Release|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Debug|x64.ActiveCfg = Debug|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Debug|x64.Build.0 = Debug|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Debug|x86.ActiveCfg = Debug|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Debug|x86.Build.0 = Debug|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Release|Any CPU.Build.0 = Release|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Release|x64.ActiveCfg = Release|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Release|x64.Build.0 = Release|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Release|x86.ActiveCfg = Release|Any CPU + {972CEB4D-510B-4701-B4A2-F14A85F11CC7}.Release|x86.Build.0 = Release|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Debug|x64.ActiveCfg = Debug|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Debug|x64.Build.0 = Debug|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Debug|x86.ActiveCfg = Debug|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Debug|x86.Build.0 = Debug|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Release|Any CPU.Build.0 = Release|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Release|x64.ActiveCfg = Release|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Release|x64.Build.0 = Release|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Release|x86.ActiveCfg = Release|Any CPU + {7B4C9EAC-316E-4890-A715-7BB9C1577F96}.Release|x86.Build.0 = Release|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Debug|x64.ActiveCfg = Debug|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Debug|x64.Build.0 = Debug|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Debug|x86.ActiveCfg = Debug|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Debug|x86.Build.0 = Debug|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|Any CPU.Build.0 = Release|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x64.ActiveCfg = Release|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x64.Build.0 = Release|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x86.ActiveCfg = Release|Any CPU + {D640DBB2-4251-44B3-B949-75FC6BF02B71}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {382FA1C0-5F5F-424A-8485-7FED0ADE9F6B} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {33770BC5-6802-45AD-A866-10027DD360E2} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {56209C24-3CE7-4F8E-8B8C-F052CB919DE2} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {6A62C12A-8742-4D1E-AEA7-8DDC3C722AC4} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {C48F2207-8974-43A4-B3D6-6A1761C37605} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {37FA8A12-E96E-4F23-AB72-8FA9DD9DA082} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {5ED2BF16-72CE-4DF1-917C-6D832427AE6F} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {2F097B4B-8F38-45C3-8A42-90250E912F0C} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {7C22F6B7-095E-459B-BCCF-87098EA9F192} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {972CEB4D-510B-4701-B4A2-F14A85F11CC7} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {7B4C9EAC-316E-4890-A715-7BB9C1577F96} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {D640DBB2-4251-44B3-B949-75FC6BF02B71} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Scheduler.ImpactIndex/AGENTS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/AGENTS.md similarity index 100% rename from src/StellaOps.Scheduler.ImpactIndex/AGENTS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/AGENTS.md diff --git a/src/StellaOps.Scheduler.ImpactIndex/FixtureImpactIndex.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/FixtureImpactIndex.cs similarity index 97% rename from src/StellaOps.Scheduler.ImpactIndex/FixtureImpactIndex.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/FixtureImpactIndex.cs index 436cac46..1eb4baa7 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/FixtureImpactIndex.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/FixtureImpactIndex.cs @@ -1,615 +1,615 @@ -using System.Collections.Immutable; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using System.IO; -using System.IO.Enumeration; -using System.Reflection; -using System.Text.Json; -using System.Text.Json.Serialization; -using Microsoft.Extensions.Logging; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.ImpactIndex; - -/// <summary> -/// Fixture-backed implementation of <see cref="IImpactIndex"/> used while the real index is under construction. -/// </summary> -public sealed class FixtureImpactIndex : IImpactIndex -{ - private static readonly JsonSerializerOptions SerializerOptions = new() - { - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, - }; - - private readonly ImpactIndexStubOptions _options; - private readonly TimeProvider _timeProvider; - private readonly ILogger<FixtureImpactIndex> _logger; - private readonly SemaphoreSlim _initializationLock = new(1, 1); - private FixtureIndexState? _state; - - public FixtureImpactIndex( - ImpactIndexStubOptions options, - TimeProvider? timeProvider, - ILogger<FixtureImpactIndex> logger) - { - _options = options ?? throw new ArgumentNullException(nameof(options)); - _timeProvider = timeProvider ?? TimeProvider.System; - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async ValueTask<ImpactSet> ResolveByPurlsAsync( - IEnumerable<string> purls, - bool usageOnly, - Selector selector, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(purls); - ArgumentNullException.ThrowIfNull(selector); - - var state = await EnsureInitializedAsync(cancellationToken).ConfigureAwait(false); - var normalizedPurls = NormalizeKeys(purls); - - if (normalizedPurls.Length == 0) - { - return CreateImpactSet(state, selector, Enumerable.Empty<FixtureMatch>(), usageOnly); - } - - var matches = new List<FixtureMatch>(); - foreach (var purl in normalizedPurls) - { - cancellationToken.ThrowIfCancellationRequested(); - - if (!state.PurlIndex.TryGetValue(purl, out var componentMatches)) - { - continue; - } - - foreach (var component in componentMatches) - { - var usedByEntrypoint = component.Component.UsedByEntrypoint; - if (usageOnly && !usedByEntrypoint) - { - continue; - } - - matches.Add(new FixtureMatch(component.Image, usedByEntrypoint)); - } - } - - return CreateImpactSet(state, selector, matches, usageOnly); - } - - public async ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync( - IEnumerable<string> vulnerabilityIds, - bool usageOnly, - Selector selector, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(vulnerabilityIds); - ArgumentNullException.ThrowIfNull(selector); - - var state = await EnsureInitializedAsync(cancellationToken).ConfigureAwait(false); - - // The stub does not maintain a vulnerability → purl projection, so we return an empty result. - if (_logger.IsEnabled(LogLevel.Debug)) - { - var first = vulnerabilityIds.FirstOrDefault(static id => !string.IsNullOrWhiteSpace(id)); - if (first is not null) - { - _logger.LogDebug( - "ImpactIndex stub received ResolveByVulnerabilitiesAsync for '{VulnerabilityId}' but mappings are not available.", - first); - } - } - - return CreateImpactSet(state, selector, Enumerable.Empty<FixtureMatch>(), usageOnly); - } - - public async ValueTask<ImpactSet> ResolveAllAsync( - Selector selector, - bool usageOnly, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(selector); - - var state = await EnsureInitializedAsync(cancellationToken).ConfigureAwait(false); - - var matches = state.ImagesByDigest.Values - .Select(image => new FixtureMatch(image, image.UsedByEntrypoint)) - .Where(match => !usageOnly || match.UsedByEntrypoint); - - return CreateImpactSet(state, selector, matches, usageOnly); - } - - private async Task<FixtureIndexState> EnsureInitializedAsync(CancellationToken cancellationToken) - { - if (_state is not null) - { - return _state; - } - - await _initializationLock.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_state is not null) - { - return _state; - } - - var state = await LoadAsync(cancellationToken).ConfigureAwait(false); - _state = state; - _logger.LogInformation( - "ImpactIndex stub loaded {ImageCount} fixture images from {SourceDescription}.", - state.ImagesByDigest.Count, - state.SourceDescription); - return state; - } - finally - { - _initializationLock.Release(); - } - } - - private async Task<FixtureIndexState> LoadAsync(CancellationToken cancellationToken) - { - var images = new List<FixtureImage>(); - string? sourceDescription = null; - - if (!string.IsNullOrWhiteSpace(_options.FixtureDirectory)) - { - var directory = ResolveDirectoryPath(_options.FixtureDirectory!); - if (Directory.Exists(directory)) - { - images.AddRange(await LoadFromDirectoryAsync(directory, cancellationToken).ConfigureAwait(false)); - sourceDescription = directory; - } - else - { - _logger.LogWarning( - "ImpactIndex stub fixture directory '{Directory}' was not found. Falling back to embedded fixtures.", - directory); - } - } - - if (images.Count == 0) - { - images.AddRange(await LoadFromResourcesAsync(cancellationToken).ConfigureAwait(false)); - sourceDescription ??= "embedded:scheduler-impact-index-fixtures"; - } - - if (images.Count == 0) - { - throw new InvalidOperationException("No BOM-Index fixtures were found for the ImpactIndex stub."); - } - - return BuildState(images, sourceDescription!, _options.SnapshotId); - } - - private static string ResolveDirectoryPath(string path) - { - if (Path.IsPathRooted(path)) - { - return path; - } - - var basePath = AppContext.BaseDirectory; - return Path.GetFullPath(Path.Combine(basePath, path)); - } - - private static async Task<IReadOnlyList<FixtureImage>> LoadFromDirectoryAsync( - string directory, - CancellationToken cancellationToken) - { - var results = new List<FixtureImage>(); - - foreach (var file in Directory.EnumerateFiles(directory, "bom-index.json", SearchOption.AllDirectories) - .OrderBy(static file => file, StringComparer.Ordinal)) - { - cancellationToken.ThrowIfCancellationRequested(); - - await using var stream = File.OpenRead(file); - var document = await JsonSerializer.DeserializeAsync<BomIndexDocument>(stream, SerializerOptions, cancellationToken) - .ConfigureAwait(false); - if (document is null) - { - continue; - } - - results.Add(CreateFixtureImage(document)); - } - - return results; - } - - private static async Task<IReadOnlyList<FixtureImage>> LoadFromResourcesAsync(CancellationToken cancellationToken) - { - var assembly = typeof(FixtureImpactIndex).Assembly; - var resourceNames = assembly - .GetManifestResourceNames() - .Where(static name => name.EndsWith(".bom-index.json", StringComparison.OrdinalIgnoreCase)) - .OrderBy(static name => name, StringComparer.Ordinal) - .ToArray(); - - var results = new List<FixtureImage>(resourceNames.Length); - - foreach (var resourceName in resourceNames) - { - cancellationToken.ThrowIfCancellationRequested(); - - await using var stream = assembly.GetManifestResourceStream(resourceName); - if (stream is null) - { - continue; - } - - var document = await JsonSerializer.DeserializeAsync<BomIndexDocument>(stream, SerializerOptions, cancellationToken) - .ConfigureAwait(false); - - if (document is null) - { - continue; - } - - results.Add(CreateFixtureImage(document)); - } - - return results; - } - - private static FixtureIndexState BuildState( - IReadOnlyList<FixtureImage> images, - string sourceDescription, - string snapshotId) - { - var imagesByDigest = images - .GroupBy(static image => image.Digest, StringComparer.OrdinalIgnoreCase) - .ToImmutableDictionary( - static group => group.Key, - static group => group - .OrderBy(static image => image.Repository, StringComparer.Ordinal) - .ThenBy(static image => image.Registry, StringComparer.Ordinal) - .ThenBy(static image => image.Tags.Length, Comparer<int>.Default) - .First(), - StringComparer.OrdinalIgnoreCase); - - var purlIndexBuilder = new Dictionary<string, List<FixtureComponentMatch>>(StringComparer.OrdinalIgnoreCase); - foreach (var image in images) - { - foreach (var component in image.Components) - { - if (!purlIndexBuilder.TryGetValue(component.Purl, out var list)) - { - list = new List<FixtureComponentMatch>(); - purlIndexBuilder[component.Purl] = list; - } - - list.Add(new FixtureComponentMatch(image, component)); - } - } - - var purlIndex = purlIndexBuilder.ToImmutableDictionary( - static entry => entry.Key, - static entry => entry.Value - .OrderBy(static item => item.Image.Digest, StringComparer.Ordinal) - .Select(static item => new FixtureComponentMatch(item.Image, item.Component)) - .ToImmutableArray(), - StringComparer.OrdinalIgnoreCase); - - var generatedAt = images.Count == 0 - ? DateTimeOffset.UnixEpoch - : images.Max(static image => image.GeneratedAt); - - return new FixtureIndexState(imagesByDigest, purlIndex, generatedAt, sourceDescription, snapshotId); - } - - private ImpactSet CreateImpactSet( - FixtureIndexState state, - Selector selector, - IEnumerable<FixtureMatch> matches, - bool usageOnly) - { - var aggregated = new Dictionary<string, ImpactImageBuilder>(StringComparer.OrdinalIgnoreCase); - - foreach (var match in matches) - { - if (!ImageMatchesSelector(match.Image, selector)) - { - continue; - } - - if (!aggregated.TryGetValue(match.Image.Digest, out var builder)) - { - builder = new ImpactImageBuilder(match.Image); - aggregated[match.Image.Digest] = builder; - } - - builder.MarkUsedByEntrypoint(match.UsedByEntrypoint); - } - - var images = aggregated.Values - .Select(static builder => builder.Build()) - .OrderBy(static image => image.ImageDigest, StringComparer.Ordinal) - .ToImmutableArray(); - - return new ImpactSet( - selector, - images, - usageOnly, - state.GeneratedAt == DateTimeOffset.UnixEpoch - ? _timeProvider.GetUtcNow() - : state.GeneratedAt, - images.Length, - state.SnapshotId, - SchedulerSchemaVersions.ImpactSet); - } - - private static bool ImageMatchesSelector(FixtureImage image, Selector selector) - { - if (selector is null) - { - return true; - } - - if (selector.Digests.Length > 0 && - !selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase)) - { - return false; - } - - if (selector.Repositories.Length > 0) - { - var repositoryMatch = selector.Repositories.Any(repo => - string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) || - string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)); - - if (!repositoryMatch) - { - return false; - } - } - - if (selector.Namespaces.Length > 0) - { - if (image.Namespaces.IsDefaultOrEmpty) - { - return false; - } - - var namespaceMatch = selector.Namespaces.Any(namespaceId => - image.Namespaces.Contains(namespaceId, StringComparer.OrdinalIgnoreCase)); - - if (!namespaceMatch) - { - return false; - } - } - - if (selector.IncludeTags.Length > 0) - { - if (image.Tags.IsDefaultOrEmpty) - { - return false; - } - - var tagMatch = selector.IncludeTags.Any(pattern => - MatchesAnyTag(image.Tags, pattern)); - - if (!tagMatch) - { - return false; - } - } - - if (selector.Labels.Length > 0) - { - if (image.Labels.Count == 0) - { - return false; - } - - foreach (var labelSelector in selector.Labels) - { - if (!image.Labels.TryGetValue(labelSelector.Key, out var value)) - { - return false; - } - - if (labelSelector.Values.Length > 0 && - !labelSelector.Values.Contains(value, StringComparer.OrdinalIgnoreCase)) - { - return false; - } - } - } - - return selector.Scope switch - { - SelectorScope.ByDigest => selector.Digests.Length == 0 - ? true - : selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase), - SelectorScope.ByRepository => selector.Repositories.Length == 0 - ? true - : selector.Repositories.Any(repo => - string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) || - string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)), - SelectorScope.ByNamespace => selector.Namespaces.Length == 0 - ? true - : !image.Namespaces.IsDefaultOrEmpty && - selector.Namespaces.Any(namespaceId => - image.Namespaces.Contains(namespaceId, StringComparer.OrdinalIgnoreCase)), - SelectorScope.ByLabels => selector.Labels.Length == 0 - ? true - : selector.Labels.All(label => - image.Labels.TryGetValue(label.Key, out var value) && - (label.Values.Length == 0 || label.Values.Contains(value, StringComparer.OrdinalIgnoreCase))), - _ => true, - }; - } - - private static bool MatchesAnyTag(ImmutableArray<string> tags, string pattern) - { - foreach (var tag in tags) - { - if (FileSystemName.MatchesSimpleExpression(pattern, tag, ignoreCase: true)) - { - return true; - } - } - - return false; - } - - private static FixtureImage CreateFixtureImage(BomIndexDocument document) - { - if (document.Image is null) - { - throw new InvalidOperationException("BOM-Index image metadata is required."); - } - - var digest = Validation.EnsureDigestFormat(document.Image.Digest, "image.digest"); - var (registry, repository) = SplitRepository(document.Image.Repository); - - var tags = string.IsNullOrWhiteSpace(document.Image.Tag) - ? ImmutableArray<string>.Empty - : ImmutableArray.Create(document.Image.Tag.Trim()); - - var components = (document.Components ?? Array.Empty<BomIndexComponent>()) - .Where(static component => !string.IsNullOrWhiteSpace(component.Purl)) - .Select(component => new FixtureComponent( - component.Purl!.Trim(), - component.Usage?.Any(static usage => - usage.Equals("runtime", StringComparison.OrdinalIgnoreCase) || - usage.Equals("usedByEntrypoint", StringComparison.OrdinalIgnoreCase)) == true)) - .OrderBy(static component => component.Purl, StringComparer.OrdinalIgnoreCase) - .ToImmutableArray(); - - return new FixtureImage( - digest, - registry, - repository, - ImmutableArray<string>.Empty, - tags, - ImmutableSortedDictionary<string, string>.Empty.WithComparers(StringComparer.OrdinalIgnoreCase), - components, - document.GeneratedAt == default ? DateTimeOffset.UnixEpoch : document.GeneratedAt.ToUniversalTime(), - components.Any(static component => component.UsedByEntrypoint)); - } - - private static (string Registry, string Repository) SplitRepository(string repository) - { - var normalized = Validation.EnsureNotNullOrWhiteSpace(repository, nameof(repository)); - var separatorIndex = normalized.IndexOf('/'); - if (separatorIndex < 0) - { - return ("docker.io", normalized); - } - - var registry = normalized[..separatorIndex]; - var repo = normalized[(separatorIndex + 1)..]; - if (string.IsNullOrWhiteSpace(repo)) - { - throw new ArgumentException("Repository segment is required after registry.", nameof(repository)); - } - - return (registry.Trim(), repo.Trim()); - } - - private static string[] NormalizeKeys(IEnumerable<string> values) - { - return values - .Where(static value => !string.IsNullOrWhiteSpace(value)) - .Select(static value => value.Trim()) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToArray(); - } - - private readonly record struct FixtureMatch(FixtureImage Image, bool UsedByEntrypoint); - - private sealed record FixtureImage( - string Digest, - string Registry, - string Repository, - ImmutableArray<string> Namespaces, - ImmutableArray<string> Tags, - ImmutableSortedDictionary<string, string> Labels, - ImmutableArray<FixtureComponent> Components, - DateTimeOffset GeneratedAt, - bool UsedByEntrypoint); - - private sealed record FixtureComponent(string Purl, bool UsedByEntrypoint); - - private sealed record FixtureComponentMatch(FixtureImage Image, FixtureComponent Component); - - private sealed record FixtureIndexState( - ImmutableDictionary<string, FixtureImage> ImagesByDigest, - ImmutableDictionary<string, ImmutableArray<FixtureComponentMatch>> PurlIndex, - DateTimeOffset GeneratedAt, - string SourceDescription, - string SnapshotId); - - private sealed class ImpactImageBuilder - { - private readonly FixtureImage _image; - private bool _usedByEntrypoint; - - public ImpactImageBuilder(FixtureImage image) - { - _image = image; - } - - public void MarkUsedByEntrypoint(bool usedByEntrypoint) - { - _usedByEntrypoint |= usedByEntrypoint; - } - - public ImpactImage Build() - { - return new ImpactImage( - _image.Digest, - _image.Registry, - _image.Repository, - _image.Namespaces, - _image.Tags, - _usedByEntrypoint, - _image.Labels); - } - } - - private sealed record BomIndexDocument - { - [JsonPropertyName("schema")] - public string? Schema { get; init; } - - [JsonPropertyName("image")] - public BomIndexImage? Image { get; init; } - - [JsonPropertyName("generatedAt")] - public DateTimeOffset GeneratedAt { get; init; } - - [JsonPropertyName("components")] - public IReadOnlyList<BomIndexComponent>? Components { get; init; } - } - - private sealed record BomIndexImage - { - [JsonPropertyName("repository")] - public string Repository { get; init; } = string.Empty; - - [JsonPropertyName("digest")] - public string Digest { get; init; } = string.Empty; - - [JsonPropertyName("tag")] - public string? Tag { get; init; } - } - - private sealed record BomIndexComponent - { - [JsonPropertyName("purl")] - public string? Purl { get; init; } - - [JsonPropertyName("usage")] - public IReadOnlyList<string>? Usage { get; init; } - } -} +using System.Collections.Immutable; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using System.IO; +using System.IO.Enumeration; +using System.Reflection; +using System.Text.Json; +using System.Text.Json.Serialization; +using Microsoft.Extensions.Logging; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.ImpactIndex; + +/// <summary> +/// Fixture-backed implementation of <see cref="IImpactIndex"/> used while the real index is under construction. +/// </summary> +public sealed class FixtureImpactIndex : IImpactIndex +{ + private static readonly JsonSerializerOptions SerializerOptions = new() + { + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, + }; + + private readonly ImpactIndexStubOptions _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger<FixtureImpactIndex> _logger; + private readonly SemaphoreSlim _initializationLock = new(1, 1); + private FixtureIndexState? _state; + + public FixtureImpactIndex( + ImpactIndexStubOptions options, + TimeProvider? timeProvider, + ILogger<FixtureImpactIndex> logger) + { + _options = options ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async ValueTask<ImpactSet> ResolveByPurlsAsync( + IEnumerable<string> purls, + bool usageOnly, + Selector selector, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(purls); + ArgumentNullException.ThrowIfNull(selector); + + var state = await EnsureInitializedAsync(cancellationToken).ConfigureAwait(false); + var normalizedPurls = NormalizeKeys(purls); + + if (normalizedPurls.Length == 0) + { + return CreateImpactSet(state, selector, Enumerable.Empty<FixtureMatch>(), usageOnly); + } + + var matches = new List<FixtureMatch>(); + foreach (var purl in normalizedPurls) + { + cancellationToken.ThrowIfCancellationRequested(); + + if (!state.PurlIndex.TryGetValue(purl, out var componentMatches)) + { + continue; + } + + foreach (var component in componentMatches) + { + var usedByEntrypoint = component.Component.UsedByEntrypoint; + if (usageOnly && !usedByEntrypoint) + { + continue; + } + + matches.Add(new FixtureMatch(component.Image, usedByEntrypoint)); + } + } + + return CreateImpactSet(state, selector, matches, usageOnly); + } + + public async ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync( + IEnumerable<string> vulnerabilityIds, + bool usageOnly, + Selector selector, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(vulnerabilityIds); + ArgumentNullException.ThrowIfNull(selector); + + var state = await EnsureInitializedAsync(cancellationToken).ConfigureAwait(false); + + // The stub does not maintain a vulnerability → purl projection, so we return an empty result. + if (_logger.IsEnabled(LogLevel.Debug)) + { + var first = vulnerabilityIds.FirstOrDefault(static id => !string.IsNullOrWhiteSpace(id)); + if (first is not null) + { + _logger.LogDebug( + "ImpactIndex stub received ResolveByVulnerabilitiesAsync for '{VulnerabilityId}' but mappings are not available.", + first); + } + } + + return CreateImpactSet(state, selector, Enumerable.Empty<FixtureMatch>(), usageOnly); + } + + public async ValueTask<ImpactSet> ResolveAllAsync( + Selector selector, + bool usageOnly, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(selector); + + var state = await EnsureInitializedAsync(cancellationToken).ConfigureAwait(false); + + var matches = state.ImagesByDigest.Values + .Select(image => new FixtureMatch(image, image.UsedByEntrypoint)) + .Where(match => !usageOnly || match.UsedByEntrypoint); + + return CreateImpactSet(state, selector, matches, usageOnly); + } + + private async Task<FixtureIndexState> EnsureInitializedAsync(CancellationToken cancellationToken) + { + if (_state is not null) + { + return _state; + } + + await _initializationLock.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_state is not null) + { + return _state; + } + + var state = await LoadAsync(cancellationToken).ConfigureAwait(false); + _state = state; + _logger.LogInformation( + "ImpactIndex stub loaded {ImageCount} fixture images from {SourceDescription}.", + state.ImagesByDigest.Count, + state.SourceDescription); + return state; + } + finally + { + _initializationLock.Release(); + } + } + + private async Task<FixtureIndexState> LoadAsync(CancellationToken cancellationToken) + { + var images = new List<FixtureImage>(); + string? sourceDescription = null; + + if (!string.IsNullOrWhiteSpace(_options.FixtureDirectory)) + { + var directory = ResolveDirectoryPath(_options.FixtureDirectory!); + if (Directory.Exists(directory)) + { + images.AddRange(await LoadFromDirectoryAsync(directory, cancellationToken).ConfigureAwait(false)); + sourceDescription = directory; + } + else + { + _logger.LogWarning( + "ImpactIndex stub fixture directory '{Directory}' was not found. Falling back to embedded fixtures.", + directory); + } + } + + if (images.Count == 0) + { + images.AddRange(await LoadFromResourcesAsync(cancellationToken).ConfigureAwait(false)); + sourceDescription ??= "embedded:scheduler-impact-index-fixtures"; + } + + if (images.Count == 0) + { + throw new InvalidOperationException("No BOM-Index fixtures were found for the ImpactIndex stub."); + } + + return BuildState(images, sourceDescription!, _options.SnapshotId); + } + + private static string ResolveDirectoryPath(string path) + { + if (Path.IsPathRooted(path)) + { + return path; + } + + var basePath = AppContext.BaseDirectory; + return Path.GetFullPath(Path.Combine(basePath, path)); + } + + private static async Task<IReadOnlyList<FixtureImage>> LoadFromDirectoryAsync( + string directory, + CancellationToken cancellationToken) + { + var results = new List<FixtureImage>(); + + foreach (var file in Directory.EnumerateFiles(directory, "bom-index.json", SearchOption.AllDirectories) + .OrderBy(static file => file, StringComparer.Ordinal)) + { + cancellationToken.ThrowIfCancellationRequested(); + + await using var stream = File.OpenRead(file); + var document = await JsonSerializer.DeserializeAsync<BomIndexDocument>(stream, SerializerOptions, cancellationToken) + .ConfigureAwait(false); + if (document is null) + { + continue; + } + + results.Add(CreateFixtureImage(document)); + } + + return results; + } + + private static async Task<IReadOnlyList<FixtureImage>> LoadFromResourcesAsync(CancellationToken cancellationToken) + { + var assembly = typeof(FixtureImpactIndex).Assembly; + var resourceNames = assembly + .GetManifestResourceNames() + .Where(static name => name.EndsWith(".bom-index.json", StringComparison.OrdinalIgnoreCase)) + .OrderBy(static name => name, StringComparer.Ordinal) + .ToArray(); + + var results = new List<FixtureImage>(resourceNames.Length); + + foreach (var resourceName in resourceNames) + { + cancellationToken.ThrowIfCancellationRequested(); + + await using var stream = assembly.GetManifestResourceStream(resourceName); + if (stream is null) + { + continue; + } + + var document = await JsonSerializer.DeserializeAsync<BomIndexDocument>(stream, SerializerOptions, cancellationToken) + .ConfigureAwait(false); + + if (document is null) + { + continue; + } + + results.Add(CreateFixtureImage(document)); + } + + return results; + } + + private static FixtureIndexState BuildState( + IReadOnlyList<FixtureImage> images, + string sourceDescription, + string snapshotId) + { + var imagesByDigest = images + .GroupBy(static image => image.Digest, StringComparer.OrdinalIgnoreCase) + .ToImmutableDictionary( + static group => group.Key, + static group => group + .OrderBy(static image => image.Repository, StringComparer.Ordinal) + .ThenBy(static image => image.Registry, StringComparer.Ordinal) + .ThenBy(static image => image.Tags.Length, Comparer<int>.Default) + .First(), + StringComparer.OrdinalIgnoreCase); + + var purlIndexBuilder = new Dictionary<string, List<FixtureComponentMatch>>(StringComparer.OrdinalIgnoreCase); + foreach (var image in images) + { + foreach (var component in image.Components) + { + if (!purlIndexBuilder.TryGetValue(component.Purl, out var list)) + { + list = new List<FixtureComponentMatch>(); + purlIndexBuilder[component.Purl] = list; + } + + list.Add(new FixtureComponentMatch(image, component)); + } + } + + var purlIndex = purlIndexBuilder.ToImmutableDictionary( + static entry => entry.Key, + static entry => entry.Value + .OrderBy(static item => item.Image.Digest, StringComparer.Ordinal) + .Select(static item => new FixtureComponentMatch(item.Image, item.Component)) + .ToImmutableArray(), + StringComparer.OrdinalIgnoreCase); + + var generatedAt = images.Count == 0 + ? DateTimeOffset.UnixEpoch + : images.Max(static image => image.GeneratedAt); + + return new FixtureIndexState(imagesByDigest, purlIndex, generatedAt, sourceDescription, snapshotId); + } + + private ImpactSet CreateImpactSet( + FixtureIndexState state, + Selector selector, + IEnumerable<FixtureMatch> matches, + bool usageOnly) + { + var aggregated = new Dictionary<string, ImpactImageBuilder>(StringComparer.OrdinalIgnoreCase); + + foreach (var match in matches) + { + if (!ImageMatchesSelector(match.Image, selector)) + { + continue; + } + + if (!aggregated.TryGetValue(match.Image.Digest, out var builder)) + { + builder = new ImpactImageBuilder(match.Image); + aggregated[match.Image.Digest] = builder; + } + + builder.MarkUsedByEntrypoint(match.UsedByEntrypoint); + } + + var images = aggregated.Values + .Select(static builder => builder.Build()) + .OrderBy(static image => image.ImageDigest, StringComparer.Ordinal) + .ToImmutableArray(); + + return new ImpactSet( + selector, + images, + usageOnly, + state.GeneratedAt == DateTimeOffset.UnixEpoch + ? _timeProvider.GetUtcNow() + : state.GeneratedAt, + images.Length, + state.SnapshotId, + SchedulerSchemaVersions.ImpactSet); + } + + private static bool ImageMatchesSelector(FixtureImage image, Selector selector) + { + if (selector is null) + { + return true; + } + + if (selector.Digests.Length > 0 && + !selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase)) + { + return false; + } + + if (selector.Repositories.Length > 0) + { + var repositoryMatch = selector.Repositories.Any(repo => + string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) || + string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)); + + if (!repositoryMatch) + { + return false; + } + } + + if (selector.Namespaces.Length > 0) + { + if (image.Namespaces.IsDefaultOrEmpty) + { + return false; + } + + var namespaceMatch = selector.Namespaces.Any(namespaceId => + image.Namespaces.Contains(namespaceId, StringComparer.OrdinalIgnoreCase)); + + if (!namespaceMatch) + { + return false; + } + } + + if (selector.IncludeTags.Length > 0) + { + if (image.Tags.IsDefaultOrEmpty) + { + return false; + } + + var tagMatch = selector.IncludeTags.Any(pattern => + MatchesAnyTag(image.Tags, pattern)); + + if (!tagMatch) + { + return false; + } + } + + if (selector.Labels.Length > 0) + { + if (image.Labels.Count == 0) + { + return false; + } + + foreach (var labelSelector in selector.Labels) + { + if (!image.Labels.TryGetValue(labelSelector.Key, out var value)) + { + return false; + } + + if (labelSelector.Values.Length > 0 && + !labelSelector.Values.Contains(value, StringComparer.OrdinalIgnoreCase)) + { + return false; + } + } + } + + return selector.Scope switch + { + SelectorScope.ByDigest => selector.Digests.Length == 0 + ? true + : selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase), + SelectorScope.ByRepository => selector.Repositories.Length == 0 + ? true + : selector.Repositories.Any(repo => + string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) || + string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)), + SelectorScope.ByNamespace => selector.Namespaces.Length == 0 + ? true + : !image.Namespaces.IsDefaultOrEmpty && + selector.Namespaces.Any(namespaceId => + image.Namespaces.Contains(namespaceId, StringComparer.OrdinalIgnoreCase)), + SelectorScope.ByLabels => selector.Labels.Length == 0 + ? true + : selector.Labels.All(label => + image.Labels.TryGetValue(label.Key, out var value) && + (label.Values.Length == 0 || label.Values.Contains(value, StringComparer.OrdinalIgnoreCase))), + _ => true, + }; + } + + private static bool MatchesAnyTag(ImmutableArray<string> tags, string pattern) + { + foreach (var tag in tags) + { + if (FileSystemName.MatchesSimpleExpression(pattern, tag, ignoreCase: true)) + { + return true; + } + } + + return false; + } + + private static FixtureImage CreateFixtureImage(BomIndexDocument document) + { + if (document.Image is null) + { + throw new InvalidOperationException("BOM-Index image metadata is required."); + } + + var digest = Validation.EnsureDigestFormat(document.Image.Digest, "image.digest"); + var (registry, repository) = SplitRepository(document.Image.Repository); + + var tags = string.IsNullOrWhiteSpace(document.Image.Tag) + ? ImmutableArray<string>.Empty + : ImmutableArray.Create(document.Image.Tag.Trim()); + + var components = (document.Components ?? Array.Empty<BomIndexComponent>()) + .Where(static component => !string.IsNullOrWhiteSpace(component.Purl)) + .Select(component => new FixtureComponent( + component.Purl!.Trim(), + component.Usage?.Any(static usage => + usage.Equals("runtime", StringComparison.OrdinalIgnoreCase) || + usage.Equals("usedByEntrypoint", StringComparison.OrdinalIgnoreCase)) == true)) + .OrderBy(static component => component.Purl, StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + + return new FixtureImage( + digest, + registry, + repository, + ImmutableArray<string>.Empty, + tags, + ImmutableSortedDictionary<string, string>.Empty.WithComparers(StringComparer.OrdinalIgnoreCase), + components, + document.GeneratedAt == default ? DateTimeOffset.UnixEpoch : document.GeneratedAt.ToUniversalTime(), + components.Any(static component => component.UsedByEntrypoint)); + } + + private static (string Registry, string Repository) SplitRepository(string repository) + { + var normalized = Validation.EnsureNotNullOrWhiteSpace(repository, nameof(repository)); + var separatorIndex = normalized.IndexOf('/'); + if (separatorIndex < 0) + { + return ("docker.io", normalized); + } + + var registry = normalized[..separatorIndex]; + var repo = normalized[(separatorIndex + 1)..]; + if (string.IsNullOrWhiteSpace(repo)) + { + throw new ArgumentException("Repository segment is required after registry.", nameof(repository)); + } + + return (registry.Trim(), repo.Trim()); + } + + private static string[] NormalizeKeys(IEnumerable<string> values) + { + return values + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Select(static value => value.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + } + + private readonly record struct FixtureMatch(FixtureImage Image, bool UsedByEntrypoint); + + private sealed record FixtureImage( + string Digest, + string Registry, + string Repository, + ImmutableArray<string> Namespaces, + ImmutableArray<string> Tags, + ImmutableSortedDictionary<string, string> Labels, + ImmutableArray<FixtureComponent> Components, + DateTimeOffset GeneratedAt, + bool UsedByEntrypoint); + + private sealed record FixtureComponent(string Purl, bool UsedByEntrypoint); + + private sealed record FixtureComponentMatch(FixtureImage Image, FixtureComponent Component); + + private sealed record FixtureIndexState( + ImmutableDictionary<string, FixtureImage> ImagesByDigest, + ImmutableDictionary<string, ImmutableArray<FixtureComponentMatch>> PurlIndex, + DateTimeOffset GeneratedAt, + string SourceDescription, + string SnapshotId); + + private sealed class ImpactImageBuilder + { + private readonly FixtureImage _image; + private bool _usedByEntrypoint; + + public ImpactImageBuilder(FixtureImage image) + { + _image = image; + } + + public void MarkUsedByEntrypoint(bool usedByEntrypoint) + { + _usedByEntrypoint |= usedByEntrypoint; + } + + public ImpactImage Build() + { + return new ImpactImage( + _image.Digest, + _image.Registry, + _image.Repository, + _image.Namespaces, + _image.Tags, + _usedByEntrypoint, + _image.Labels); + } + } + + private sealed record BomIndexDocument + { + [JsonPropertyName("schema")] + public string? Schema { get; init; } + + [JsonPropertyName("image")] + public BomIndexImage? Image { get; init; } + + [JsonPropertyName("generatedAt")] + public DateTimeOffset GeneratedAt { get; init; } + + [JsonPropertyName("components")] + public IReadOnlyList<BomIndexComponent>? Components { get; init; } + } + + private sealed record BomIndexImage + { + [JsonPropertyName("repository")] + public string Repository { get; init; } = string.Empty; + + [JsonPropertyName("digest")] + public string Digest { get; init; } = string.Empty; + + [JsonPropertyName("tag")] + public string? Tag { get; init; } + } + + private sealed record BomIndexComponent + { + [JsonPropertyName("purl")] + public string? Purl { get; init; } + + [JsonPropertyName("usage")] + public IReadOnlyList<string>? Usage { get; init; } + } +} diff --git a/src/StellaOps.Scheduler.ImpactIndex/IImpactIndex.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/IImpactIndex.cs similarity index 97% rename from src/StellaOps.Scheduler.ImpactIndex/IImpactIndex.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/IImpactIndex.cs index 3730b100..8520db45 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/IImpactIndex.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/IImpactIndex.cs @@ -1,46 +1,46 @@ -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.ImpactIndex; - -/// <summary> -/// Provides read access to the scheduler impact index. -/// </summary> -public interface IImpactIndex -{ - /// <summary> - /// Resolves the impacted image set for the provided package URLs. - /// </summary> - /// <param name="purls">Package URLs to look up.</param> - /// <param name="usageOnly">When true, restricts results to components marked as runtime/entrypoint usage.</param> - /// <param name="selector">Selector scoping the query.</param> - /// <param name="cancellationToken">Cancellation token.</param> - ValueTask<ImpactSet> ResolveByPurlsAsync( - IEnumerable<string> purls, - bool usageOnly, - Selector selector, - CancellationToken cancellationToken = default); - - /// <summary> - /// Resolves impacted images by vulnerability identifiers if the index has the mapping available. - /// </summary> - /// <param name="vulnerabilityIds">Vulnerability identifiers to look up.</param> - /// <param name="usageOnly">When true, restricts results to components marked as runtime/entrypoint usage.</param> - /// <param name="selector">Selector scoping the query.</param> - /// <param name="cancellationToken">Cancellation token.</param> - ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync( - IEnumerable<string> vulnerabilityIds, - bool usageOnly, - Selector selector, - CancellationToken cancellationToken = default); - - /// <summary> - /// Resolves all tracked images for the provided selector. - /// </summary> - /// <param name="selector">Selector scoping the query.</param> - /// <param name="usageOnly">When true, restricts results to images with entrypoint usage.</param> - /// <param name="cancellationToken">Cancellation token.</param> - ValueTask<ImpactSet> ResolveAllAsync( - Selector selector, - bool usageOnly, - CancellationToken cancellationToken = default); -} +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.ImpactIndex; + +/// <summary> +/// Provides read access to the scheduler impact index. +/// </summary> +public interface IImpactIndex +{ + /// <summary> + /// Resolves the impacted image set for the provided package URLs. + /// </summary> + /// <param name="purls">Package URLs to look up.</param> + /// <param name="usageOnly">When true, restricts results to components marked as runtime/entrypoint usage.</param> + /// <param name="selector">Selector scoping the query.</param> + /// <param name="cancellationToken">Cancellation token.</param> + ValueTask<ImpactSet> ResolveByPurlsAsync( + IEnumerable<string> purls, + bool usageOnly, + Selector selector, + CancellationToken cancellationToken = default); + + /// <summary> + /// Resolves impacted images by vulnerability identifiers if the index has the mapping available. + /// </summary> + /// <param name="vulnerabilityIds">Vulnerability identifiers to look up.</param> + /// <param name="usageOnly">When true, restricts results to components marked as runtime/entrypoint usage.</param> + /// <param name="selector">Selector scoping the query.</param> + /// <param name="cancellationToken">Cancellation token.</param> + ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync( + IEnumerable<string> vulnerabilityIds, + bool usageOnly, + Selector selector, + CancellationToken cancellationToken = default); + + /// <summary> + /// Resolves all tracked images for the provided selector. + /// </summary> + /// <param name="selector">Selector scoping the query.</param> + /// <param name="usageOnly">When true, restricts results to images with entrypoint usage.</param> + /// <param name="cancellationToken">Cancellation token.</param> + ValueTask<ImpactSet> ResolveAllAsync( + Selector selector, + bool usageOnly, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.ImpactIndex/ImpactImageRecord.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/ImpactImageRecord.cs similarity index 96% rename from src/StellaOps.Scheduler.ImpactIndex/ImpactImageRecord.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/ImpactImageRecord.cs index 395ee54e..9ff2739a 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/ImpactImageRecord.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/ImpactImageRecord.cs @@ -1,17 +1,17 @@ -using System; -using System.Collections.Immutable; - -namespace StellaOps.Scheduler.ImpactIndex; - -internal sealed record ImpactImageRecord( - int ImageId, - string TenantId, - string Digest, - string Registry, - string Repository, - ImmutableArray<string> Namespaces, - ImmutableArray<string> Tags, - ImmutableSortedDictionary<string, string> Labels, - DateTimeOffset GeneratedAt, - ImmutableArray<string> Components, - ImmutableArray<string> EntrypointComponents); +using System; +using System.Collections.Immutable; + +namespace StellaOps.Scheduler.ImpactIndex; + +internal sealed record ImpactImageRecord( + int ImageId, + string TenantId, + string Digest, + string Registry, + string Repository, + ImmutableArray<string> Namespaces, + ImmutableArray<string> Tags, + ImmutableSortedDictionary<string, string> Labels, + DateTimeOffset GeneratedAt, + ImmutableArray<string> Components, + ImmutableArray<string> EntrypointComponents); diff --git a/src/StellaOps.Scheduler.ImpactIndex/ImpactIndexServiceCollectionExtensions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/ImpactIndexServiceCollectionExtensions.cs similarity index 96% rename from src/StellaOps.Scheduler.ImpactIndex/ImpactIndexServiceCollectionExtensions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/ImpactIndexServiceCollectionExtensions.cs index 94d8eb8e..a67851ed 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/ImpactIndexServiceCollectionExtensions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/ImpactIndexServiceCollectionExtensions.cs @@ -1,26 +1,26 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.DependencyInjection.Extensions; - -namespace StellaOps.Scheduler.ImpactIndex; - -/// <summary> -/// ServiceCollection helpers for wiring the fixture-backed impact index. -/// </summary> -public static class ImpactIndexServiceCollectionExtensions -{ - public static IServiceCollection AddImpactIndexStub( - this IServiceCollection services, - Action<ImpactIndexStubOptions>? configure = null) - { - ArgumentNullException.ThrowIfNull(services); - - var options = new ImpactIndexStubOptions(); - configure?.Invoke(options); - - services.TryAddSingleton(TimeProvider.System); - services.AddSingleton(options); - services.TryAddSingleton<IImpactIndex, FixtureImpactIndex>(); - - return services; - } -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; + +namespace StellaOps.Scheduler.ImpactIndex; + +/// <summary> +/// ServiceCollection helpers for wiring the fixture-backed impact index. +/// </summary> +public static class ImpactIndexServiceCollectionExtensions +{ + public static IServiceCollection AddImpactIndexStub( + this IServiceCollection services, + Action<ImpactIndexStubOptions>? configure = null) + { + ArgumentNullException.ThrowIfNull(services); + + var options = new ImpactIndexStubOptions(); + configure?.Invoke(options); + + services.TryAddSingleton(TimeProvider.System); + services.AddSingleton(options); + services.TryAddSingleton<IImpactIndex, FixtureImpactIndex>(); + + return services; + } +} diff --git a/src/StellaOps.Scheduler.ImpactIndex/ImpactIndexStubOptions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/ImpactIndexStubOptions.cs similarity index 97% rename from src/StellaOps.Scheduler.ImpactIndex/ImpactIndexStubOptions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/ImpactIndexStubOptions.cs index 344b77d5..c0acd735 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/ImpactIndexStubOptions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/ImpactIndexStubOptions.cs @@ -1,19 +1,19 @@ -namespace StellaOps.Scheduler.ImpactIndex; - -/// <summary> -/// Options controlling the fixture-backed impact index stub. -/// </summary> -public sealed class ImpactIndexStubOptions -{ - /// <summary> - /// Optional absolute or relative directory containing BOM-Index JSON fixtures. - /// When not supplied or not found, embedded fixtures ship with the assembly are used instead. - /// </summary> - public string? FixtureDirectory { get; set; } - - /// <summary> - /// Snapshot identifier reported in the generated <see cref="StellaOps.Scheduler.Models.ImpactSet"/>. - /// Defaults to <c>samples/impact-index-stub</c>. - /// </summary> - public string SnapshotId { get; set; } = "samples/impact-index-stub"; -} +namespace StellaOps.Scheduler.ImpactIndex; + +/// <summary> +/// Options controlling the fixture-backed impact index stub. +/// </summary> +public sealed class ImpactIndexStubOptions +{ + /// <summary> + /// Optional absolute or relative directory containing BOM-Index JSON fixtures. + /// When not supplied or not found, embedded fixtures ship with the assembly are used instead. + /// </summary> + public string? FixtureDirectory { get; set; } + + /// <summary> + /// Snapshot identifier reported in the generated <see cref="StellaOps.Scheduler.Models.ImpactSet"/>. + /// Defaults to <c>samples/impact-index-stub</c>. + /// </summary> + public string SnapshotId { get; set; } = "samples/impact-index-stub"; +} diff --git a/src/StellaOps.Scheduler.ImpactIndex/Ingestion/BomIndexReader.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/Ingestion/BomIndexReader.cs similarity index 97% rename from src/StellaOps.Scheduler.ImpactIndex/Ingestion/BomIndexReader.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/Ingestion/BomIndexReader.cs index 4764196c..b4ac3e17 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/Ingestion/BomIndexReader.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/Ingestion/BomIndexReader.cs @@ -1,119 +1,119 @@ -using System.Buffers.Binary; -using System.Collections.Immutable; -using System.Globalization; -using System.Text; -using Collections.Special; - -namespace StellaOps.Scheduler.ImpactIndex.Ingestion; - -internal sealed record BomIndexComponent(string Key, bool UsedByEntrypoint); - -internal sealed record BomIndexDocument(string ImageDigest, DateTimeOffset GeneratedAt, ImmutableArray<BomIndexComponent> Components); - -internal static class BomIndexReader -{ - private const int HeaderMagicLength = 7; - private static readonly byte[] Magic = Encoding.ASCII.GetBytes("BOMIDX1"); - - public static BomIndexDocument Read(Stream stream) - { - ArgumentNullException.ThrowIfNull(stream); - - using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true); - Span<byte> magicBuffer = stackalloc byte[HeaderMagicLength]; - if (reader.Read(magicBuffer) != HeaderMagicLength || !magicBuffer.SequenceEqual(Magic)) - { - throw new InvalidOperationException("Invalid BOM index header magic."); - } - - var version = reader.ReadUInt16(); - if (version != 1) - { - throw new NotSupportedException($"Unsupported BOM index version '{version}'."); - } - - var flags = reader.ReadUInt16(); - var hasEntrypoints = (flags & 0x1) == 1; - - var digestLength = reader.ReadUInt16(); - var digestBytes = reader.ReadBytes(digestLength); - var imageDigest = Encoding.UTF8.GetString(digestBytes); - - var generatedAtMicros = reader.ReadInt64(); - var generatedAt = DateTimeOffset.FromUnixTimeMilliseconds(generatedAtMicros / 1000) - .AddTicks((generatedAtMicros % 1000) * TimeSpan.TicksPerMillisecond / 1000); - - var layerCount = checked((int)reader.ReadUInt32()); - var componentCount = checked((int)reader.ReadUInt32()); - var entrypointCount = checked((int)reader.ReadUInt32()); - - // Layer table (we only need to skip entries but validate length) - for (var i = 0; i < layerCount; i++) - { - _ = ReadUtf8String(reader); - } - - var componentKeys = new string[componentCount]; - for (var i = 0; i < componentCount; i++) - { - componentKeys[i] = ReadUtf8String(reader); - } - - for (var i = 0; i < componentCount; i++) - { - var length = reader.ReadUInt32(); - if (length > 0) - { - var payload = reader.ReadBytes(checked((int)length)); - using var bitmapStream = new MemoryStream(payload, writable: false); - _ = RoaringBitmap.Deserialize(bitmapStream); - } - } - - var entrypointPresence = new bool[componentCount]; - if (hasEntrypoints && entrypointCount > 0) - { - // Entrypoint table (skip strings) - for (var i = 0; i < entrypointCount; i++) - { - _ = ReadUtf8String(reader); - } - - for (var i = 0; i < componentCount; i++) - { - var length = reader.ReadUInt32(); - if (length == 0) - { - entrypointPresence[i] = false; - continue; - } - - var payload = reader.ReadBytes(checked((int)length)); - using var bitmapStream = new MemoryStream(payload, writable: false); - var bitmap = RoaringBitmap.Deserialize(bitmapStream); - entrypointPresence[i] = bitmap.Any(); - } - } - - var builder = ImmutableArray.CreateBuilder<BomIndexComponent>(componentCount); - for (var i = 0; i < componentCount; i++) - { - var key = componentKeys[i]; - builder.Add(new BomIndexComponent(key, entrypointPresence[i])); - } - - return new BomIndexDocument(imageDigest, generatedAt, builder.MoveToImmutable()); - } - - private static string ReadUtf8String(BinaryReader reader) - { - var length = reader.ReadUInt16(); - if (length == 0) - { - return string.Empty; - } - - var bytes = reader.ReadBytes(length); - return Encoding.UTF8.GetString(bytes); - } -} +using System.Buffers.Binary; +using System.Collections.Immutable; +using System.Globalization; +using System.Text; +using Collections.Special; + +namespace StellaOps.Scheduler.ImpactIndex.Ingestion; + +internal sealed record BomIndexComponent(string Key, bool UsedByEntrypoint); + +internal sealed record BomIndexDocument(string ImageDigest, DateTimeOffset GeneratedAt, ImmutableArray<BomIndexComponent> Components); + +internal static class BomIndexReader +{ + private const int HeaderMagicLength = 7; + private static readonly byte[] Magic = Encoding.ASCII.GetBytes("BOMIDX1"); + + public static BomIndexDocument Read(Stream stream) + { + ArgumentNullException.ThrowIfNull(stream); + + using var reader = new BinaryReader(stream, Encoding.UTF8, leaveOpen: true); + Span<byte> magicBuffer = stackalloc byte[HeaderMagicLength]; + if (reader.Read(magicBuffer) != HeaderMagicLength || !magicBuffer.SequenceEqual(Magic)) + { + throw new InvalidOperationException("Invalid BOM index header magic."); + } + + var version = reader.ReadUInt16(); + if (version != 1) + { + throw new NotSupportedException($"Unsupported BOM index version '{version}'."); + } + + var flags = reader.ReadUInt16(); + var hasEntrypoints = (flags & 0x1) == 1; + + var digestLength = reader.ReadUInt16(); + var digestBytes = reader.ReadBytes(digestLength); + var imageDigest = Encoding.UTF8.GetString(digestBytes); + + var generatedAtMicros = reader.ReadInt64(); + var generatedAt = DateTimeOffset.FromUnixTimeMilliseconds(generatedAtMicros / 1000) + .AddTicks((generatedAtMicros % 1000) * TimeSpan.TicksPerMillisecond / 1000); + + var layerCount = checked((int)reader.ReadUInt32()); + var componentCount = checked((int)reader.ReadUInt32()); + var entrypointCount = checked((int)reader.ReadUInt32()); + + // Layer table (we only need to skip entries but validate length) + for (var i = 0; i < layerCount; i++) + { + _ = ReadUtf8String(reader); + } + + var componentKeys = new string[componentCount]; + for (var i = 0; i < componentCount; i++) + { + componentKeys[i] = ReadUtf8String(reader); + } + + for (var i = 0; i < componentCount; i++) + { + var length = reader.ReadUInt32(); + if (length > 0) + { + var payload = reader.ReadBytes(checked((int)length)); + using var bitmapStream = new MemoryStream(payload, writable: false); + _ = RoaringBitmap.Deserialize(bitmapStream); + } + } + + var entrypointPresence = new bool[componentCount]; + if (hasEntrypoints && entrypointCount > 0) + { + // Entrypoint table (skip strings) + for (var i = 0; i < entrypointCount; i++) + { + _ = ReadUtf8String(reader); + } + + for (var i = 0; i < componentCount; i++) + { + var length = reader.ReadUInt32(); + if (length == 0) + { + entrypointPresence[i] = false; + continue; + } + + var payload = reader.ReadBytes(checked((int)length)); + using var bitmapStream = new MemoryStream(payload, writable: false); + var bitmap = RoaringBitmap.Deserialize(bitmapStream); + entrypointPresence[i] = bitmap.Any(); + } + } + + var builder = ImmutableArray.CreateBuilder<BomIndexComponent>(componentCount); + for (var i = 0; i < componentCount; i++) + { + var key = componentKeys[i]; + builder.Add(new BomIndexComponent(key, entrypointPresence[i])); + } + + return new BomIndexDocument(imageDigest, generatedAt, builder.MoveToImmutable()); + } + + private static string ReadUtf8String(BinaryReader reader) + { + var length = reader.ReadUInt16(); + if (length == 0) + { + return string.Empty; + } + + var bytes = reader.ReadBytes(length); + return Encoding.UTF8.GetString(bytes); + } +} diff --git a/src/StellaOps.Scheduler.ImpactIndex/Ingestion/ImpactIndexIngestionRequest.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/Ingestion/ImpactIndexIngestionRequest.cs similarity index 97% rename from src/StellaOps.Scheduler.ImpactIndex/Ingestion/ImpactIndexIngestionRequest.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/Ingestion/ImpactIndexIngestionRequest.cs index 3bf02262..175d8856 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/Ingestion/ImpactIndexIngestionRequest.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/Ingestion/ImpactIndexIngestionRequest.cs @@ -1,28 +1,28 @@ -using System; -using System.Collections.Immutable; -using System.IO; - -namespace StellaOps.Scheduler.ImpactIndex.Ingestion; - -/// <summary> -/// Describes a BOM-Index ingestion payload for the scheduler impact index. -/// </summary> -public sealed record ImpactIndexIngestionRequest -{ - public required string TenantId { get; init; } - - public required string ImageDigest { get; init; } - - public required string Registry { get; init; } - - public required string Repository { get; init; } - - public ImmutableArray<string> Namespaces { get; init; } = ImmutableArray<string>.Empty; - - public ImmutableArray<string> Tags { get; init; } = ImmutableArray<string>.Empty; - - public ImmutableSortedDictionary<string, string> Labels { get; init; } = ImmutableSortedDictionary<string, string>.Empty.WithComparers(StringComparer.OrdinalIgnoreCase); - - public required Stream BomIndexStream { get; init; } - = Stream.Null; -} +using System; +using System.Collections.Immutable; +using System.IO; + +namespace StellaOps.Scheduler.ImpactIndex.Ingestion; + +/// <summary> +/// Describes a BOM-Index ingestion payload for the scheduler impact index. +/// </summary> +public sealed record ImpactIndexIngestionRequest +{ + public required string TenantId { get; init; } + + public required string ImageDigest { get; init; } + + public required string Registry { get; init; } + + public required string Repository { get; init; } + + public ImmutableArray<string> Namespaces { get; init; } = ImmutableArray<string>.Empty; + + public ImmutableArray<string> Tags { get; init; } = ImmutableArray<string>.Empty; + + public ImmutableSortedDictionary<string, string> Labels { get; init; } = ImmutableSortedDictionary<string, string>.Empty.WithComparers(StringComparer.OrdinalIgnoreCase); + + public required Stream BomIndexStream { get; init; } + = Stream.Null; +} diff --git a/src/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md similarity index 97% rename from src/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md index 57ebcfec..b84ef108 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md @@ -1,15 +1,15 @@ -# ImpactIndex Stub Removal Tracker - -- **Created:** 2025-10-20 -- **Owner:** Scheduler ImpactIndex Guild -- **Reference Task:** SCHED-IMPACT-16-300 (fixture-backed stub) - -## Exit Reminder - -Replace `FixtureImpactIndex` with the roaring bitmap-backed implementation once SCHED-IMPACT-16-301/302 are completed, then delete: - -1. Stub classes (`FixtureImpactIndex`, `ImpactIndexStubOptions`, `ImpactIndexServiceCollectionExtensions`). -2. Embedded sample fixture wiring in `StellaOps.Scheduler.ImpactIndex.csproj`. -3. Temporary unit tests in `StellaOps.Scheduler.ImpactIndex.Tests`. - -Remove this file when the production ImpactIndex replaces the stub. +# ImpactIndex Stub Removal Tracker + +- **Created:** 2025-10-20 +- **Owner:** Scheduler ImpactIndex Guild +- **Reference Task:** SCHED-IMPACT-16-300 (fixture-backed stub) + +## Exit Reminder + +Replace `FixtureImpactIndex` with the roaring bitmap-backed implementation once SCHED-IMPACT-16-301/302 are completed, then delete: + +1. Stub classes (`FixtureImpactIndex`, `ImpactIndexStubOptions`, `ImpactIndexServiceCollectionExtensions`). +2. Embedded sample fixture wiring in `StellaOps.Scheduler.ImpactIndex.csproj`. +3. Temporary unit tests in `StellaOps.Scheduler.ImpactIndex.Tests`. + +Remove this file when the production ImpactIndex replaces the stub. diff --git a/src/StellaOps.Scheduler.ImpactIndex/RoaringImpactIndex.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/RoaringImpactIndex.cs similarity index 97% rename from src/StellaOps.Scheduler.ImpactIndex/RoaringImpactIndex.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/RoaringImpactIndex.cs index d1466b0d..256b1031 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/RoaringImpactIndex.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/RoaringImpactIndex.cs @@ -1,481 +1,481 @@ -using System; -using System.Buffers.Binary; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Linq; -using System.Security.Cryptography; -using System.Text; -using System.Text.RegularExpressions; -using Collections.Special; -using Microsoft.Extensions.Logging; -using StellaOps.Scheduler.ImpactIndex.Ingestion; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.ImpactIndex; - -/// <summary> -/// Roaring bitmap-backed implementation of the scheduler impact index. -/// </summary> -public sealed class RoaringImpactIndex : IImpactIndex -{ - private readonly object _gate = new(); - - private readonly Dictionary<string, int> _imageIds = new(StringComparer.OrdinalIgnoreCase); - private readonly Dictionary<int, ImpactImageRecord> _images = new(); - private readonly Dictionary<string, RoaringBitmap> _containsByPurl = new(StringComparer.OrdinalIgnoreCase); - private readonly Dictionary<string, RoaringBitmap> _usedByEntrypointByPurl = new(StringComparer.OrdinalIgnoreCase); - - private readonly ILogger<RoaringImpactIndex> _logger; - private readonly TimeProvider _timeProvider; - - public RoaringImpactIndex(ILogger<RoaringImpactIndex> logger, TimeProvider? timeProvider = null) - { - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? TimeProvider.System; - } - - public async Task IngestAsync(ImpactIndexIngestionRequest request, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentNullException.ThrowIfNull(request.BomIndexStream); - - using var buffer = new MemoryStream(); - await request.BomIndexStream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false); - buffer.Position = 0; - - var document = BomIndexReader.Read(buffer); - if (!string.Equals(document.ImageDigest, request.ImageDigest, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"BOM-Index digest mismatch. Header '{document.ImageDigest}', request '{request.ImageDigest}'."); - } - - var tenantId = request.TenantId ?? throw new ArgumentNullException(nameof(request.TenantId)); - var registry = request.Registry ?? throw new ArgumentNullException(nameof(request.Registry)); - var repository = request.Repository ?? throw new ArgumentNullException(nameof(request.Repository)); - - var namespaces = request.Namespaces.IsDefault ? ImmutableArray<string>.Empty : request.Namespaces; - var tags = request.Tags.IsDefault ? ImmutableArray<string>.Empty : request.Tags; - var labels = request.Labels.Count == 0 - ? ImmutableSortedDictionary<string, string>.Empty.WithComparers(StringComparer.OrdinalIgnoreCase) - : request.Labels; - - var componentKeys = document.Components - .Select(component => component.Key) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToImmutableArray(); - var entrypointComponents = document.Components - .Where(component => component.UsedByEntrypoint) - .Select(component => component.Key) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToImmutableArray(); - - lock (_gate) - { - var imageId = EnsureImageId(request.ImageDigest); - - if (_images.TryGetValue(imageId, out var existing)) - { - RemoveImageComponents(existing); - } - - var metadata = new ImpactImageRecord( - imageId, - tenantId, - request.ImageDigest, - registry, - repository, - namespaces, - tags, - labels, - document.GeneratedAt, - componentKeys, - entrypointComponents); - - _images[imageId] = metadata; - _imageIds[request.ImageDigest] = imageId; - - foreach (var key in componentKeys) - { - var bitmap = _containsByPurl.GetValueOrDefault(key); - _containsByPurl[key] = AddImageToBitmap(bitmap, imageId); - } - - foreach (var key in entrypointComponents) - { - var bitmap = _usedByEntrypointByPurl.GetValueOrDefault(key); - _usedByEntrypointByPurl[key] = AddImageToBitmap(bitmap, imageId); - } - } - - _logger.LogInformation( - "ImpactIndex ingested BOM-Index for {Digest} ({TenantId}/{Repository}). Components={ComponentCount} EntrypointComponents={EntrypointCount}", - request.ImageDigest, - tenantId, - repository, - componentKeys.Length, - entrypointComponents.Length); - } - - public ValueTask<ImpactSet> ResolveByPurlsAsync( - IEnumerable<string> purls, - bool usageOnly, - Selector selector, - CancellationToken cancellationToken = default) - => ValueTask.FromResult(ResolveByPurlsCore(purls, usageOnly, selector)); - - public ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync( - IEnumerable<string> vulnerabilityIds, - bool usageOnly, - Selector selector, - CancellationToken cancellationToken = default) - => ValueTask.FromResult(CreateEmptyImpactSet(selector, usageOnly)); - - public ValueTask<ImpactSet> ResolveAllAsync( - Selector selector, - bool usageOnly, - CancellationToken cancellationToken = default) - => ValueTask.FromResult(ResolveAllCore(selector, usageOnly)); - - private ImpactSet ResolveByPurlsCore(IEnumerable<string> purls, bool usageOnly, Selector selector) - { - ArgumentNullException.ThrowIfNull(purls); - ArgumentNullException.ThrowIfNull(selector); - - var normalized = purls - .Where(static purl => !string.IsNullOrWhiteSpace(purl)) - .Select(static purl => purl.Trim()) - .Distinct(StringComparer.OrdinalIgnoreCase) - .ToArray(); - - if (normalized.Length == 0) - { - return CreateEmptyImpactSet(selector, usageOnly); - } - - RoaringBitmap imageIds; - lock (_gate) - { - imageIds = RoaringBitmap.Create(Array.Empty<int>()); - foreach (var purl in normalized) - { - if (_containsByPurl.TryGetValue(purl, out var bitmap)) - { - imageIds = imageIds | bitmap; - } - } - } - - return BuildImpactSet(imageIds, selector, usageOnly); - } - - private ImpactSet ResolveAllCore(Selector selector, bool usageOnly) - { - ArgumentNullException.ThrowIfNull(selector); - - RoaringBitmap bitmap; - lock (_gate) - { - var ids = _images.Keys.OrderBy(id => id).ToArray(); - bitmap = RoaringBitmap.Create(ids); - } - - return BuildImpactSet(bitmap, selector, usageOnly); - } - - private ImpactSet BuildImpactSet(RoaringBitmap imageIds, Selector selector, bool usageOnly) - { - var images = new List<ImpactImage>(); - var latestGeneratedAt = DateTimeOffset.MinValue; - - lock (_gate) - { - foreach (var imageId in imageIds) - { - if (!_images.TryGetValue(imageId, out var metadata)) - { - continue; - } - - if (!ImageMatchesSelector(metadata, selector)) - { - continue; - } - - if (usageOnly && metadata.EntrypointComponents.Length == 0) - { - continue; - } - - if (metadata.GeneratedAt > latestGeneratedAt) - { - latestGeneratedAt = metadata.GeneratedAt; - } - - images.Add(new ImpactImage( - metadata.Digest, - metadata.Registry, - metadata.Repository, - metadata.Namespaces, - metadata.Tags, - metadata.EntrypointComponents.Length > 0, - metadata.Labels)); - } - } - - if (images.Count == 0) - { - return CreateEmptyImpactSet(selector, usageOnly); - } - - images.Sort(static (left, right) => string.Compare(left.ImageDigest, right.ImageDigest, StringComparison.Ordinal)); - - var generatedAt = latestGeneratedAt == DateTimeOffset.MinValue ? _timeProvider.GetUtcNow() : latestGeneratedAt; - - return new ImpactSet( - selector, - images.ToImmutableArray(), - usageOnly, - generatedAt, - images.Count, - snapshotId: null, - schemaVersion: SchedulerSchemaVersions.ImpactSet); - } - - private ImpactSet CreateEmptyImpactSet(Selector selector, bool usageOnly) - { - return new ImpactSet( - selector, - ImmutableArray<ImpactImage>.Empty, - usageOnly, - _timeProvider.GetUtcNow(), - 0, - snapshotId: null, - schemaVersion: SchedulerSchemaVersions.ImpactSet); - } - - private static bool ImageMatchesSelector(ImpactImageRecord image, Selector selector) - { - if (selector.TenantId is not null && !string.Equals(selector.TenantId, image.TenantId, StringComparison.Ordinal)) - { - return false; - } - - if (!MatchesScope(image, selector)) - { - return false; - } - - if (selector.Digests.Length > 0 && !selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase)) - { - return false; - } - - if (selector.Repositories.Length > 0) - { - var repoMatch = selector.Repositories.Any(repo => - string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) || - string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)); - if (!repoMatch) - { - return false; - } - } - - if (selector.Namespaces.Length > 0) - { - if (image.Namespaces.IsDefaultOrEmpty) - { - return false; - } - - var namespaceMatch = selector.Namespaces.Any(ns => image.Namespaces.Contains(ns, StringComparer.OrdinalIgnoreCase)); - if (!namespaceMatch) - { - return false; - } - } - - if (selector.IncludeTags.Length > 0) - { - if (image.Tags.IsDefaultOrEmpty) - { - return false; - } - - var tagMatch = selector.IncludeTags.Any(pattern => image.Tags.Any(tag => MatchesTagPattern(tag, pattern))); - if (!tagMatch) - { - return false; - } - } - - if (selector.Labels.Length > 0) - { - if (image.Labels.Count == 0) - { - return false; - } - - foreach (var label in selector.Labels) - { - if (!image.Labels.TryGetValue(label.Key, out var value)) - { - return false; - } - - if (label.Values.Length > 0 && !label.Values.Contains(value, StringComparer.OrdinalIgnoreCase)) - { - return false; - } - } - } - - return true; - } - - private void RemoveImageComponents(ImpactImageRecord record) - { - foreach (var key in record.Components) - { - if (_containsByPurl.TryGetValue(key, out var bitmap)) - { - var updated = RemoveImageFromBitmap(bitmap, record.ImageId); - if (updated is null) - { - _containsByPurl.Remove(key); - } - else - { - _containsByPurl[key] = updated; - } - } - } - - foreach (var key in record.EntrypointComponents) - { - if (_usedByEntrypointByPurl.TryGetValue(key, out var bitmap)) - { - var updated = RemoveImageFromBitmap(bitmap, record.ImageId); - if (updated is null) - { - _usedByEntrypointByPurl.Remove(key); - } - else - { - _usedByEntrypointByPurl[key] = updated; - } - } - } - } - - private static RoaringBitmap AddImageToBitmap(RoaringBitmap? bitmap, int imageId) - { - if (bitmap is null) - { - return RoaringBitmap.Create(new[] { imageId }); - } - - if (bitmap.Any(id => id == imageId)) - { - return bitmap; - } - - var merged = bitmap - .Concat(new[] { imageId }) - .Distinct() - .OrderBy(id => id) - .ToArray(); - - return RoaringBitmap.Create(merged); - } - - private static RoaringBitmap? RemoveImageFromBitmap(RoaringBitmap bitmap, int imageId) - { - var remaining = bitmap - .Where(id => id != imageId) - .OrderBy(id => id) - .ToArray(); - if (remaining.Length == 0) - { - return null; - } - - return RoaringBitmap.Create(remaining); - } - - private static bool MatchesScope(ImpactImageRecord image, Selector selector) - { - return selector.Scope switch - { - SelectorScope.AllImages => true, - SelectorScope.ByDigest => selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase), - SelectorScope.ByRepository => selector.Repositories.Any(repo => - string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) || - string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)), - SelectorScope.ByNamespace => !image.Namespaces.IsDefaultOrEmpty && selector.Namespaces.Any(ns => image.Namespaces.Contains(ns, StringComparer.OrdinalIgnoreCase)), - SelectorScope.ByLabels => selector.Labels.All(label => - image.Labels.TryGetValue(label.Key, out var value) && - (label.Values.Length == 0 || label.Values.Contains(value, StringComparer.OrdinalIgnoreCase))), - _ => true, - }; - } - - private static bool MatchesTagPattern(string tag, string pattern) - { - if (string.IsNullOrWhiteSpace(pattern)) - { - return false; - } - - if (pattern == "*") - { - return true; - } - - if (!pattern.Contains('*') && !pattern.Contains('?')) - { - return string.Equals(tag, pattern, StringComparison.OrdinalIgnoreCase); - } - - var escaped = Regex.Escape(pattern) - .Replace("\\*", ".*") - .Replace("\\?", "."); - return Regex.IsMatch(tag, $"^{escaped}$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant); - } - - private int EnsureImageId(string digest) - { - if (_imageIds.TryGetValue(digest, out var existing)) - { - return existing; - } - - var candidate = ComputeDeterministicId(digest); - while (_images.ContainsKey(candidate)) - { - candidate = (candidate + 1) & int.MaxValue; - if (candidate == 0) - { - candidate = 1; - } - } - - _imageIds[digest] = candidate; - return candidate; - } - - private static int ComputeDeterministicId(string digest) - { - var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(digest)); - for (var offset = 0; offset <= bytes.Length - sizeof(int); offset += sizeof(int)) - { - var value = BinaryPrimitives.ReadInt32LittleEndian(bytes.AsSpan(offset, sizeof(int))) & int.MaxValue; - if (value != 0) - { - return value; - } - } - - return digest.GetHashCode(StringComparison.OrdinalIgnoreCase) & int.MaxValue; - } -} +using System; +using System.Buffers.Binary; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.RegularExpressions; +using Collections.Special; +using Microsoft.Extensions.Logging; +using StellaOps.Scheduler.ImpactIndex.Ingestion; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.ImpactIndex; + +/// <summary> +/// Roaring bitmap-backed implementation of the scheduler impact index. +/// </summary> +public sealed class RoaringImpactIndex : IImpactIndex +{ + private readonly object _gate = new(); + + private readonly Dictionary<string, int> _imageIds = new(StringComparer.OrdinalIgnoreCase); + private readonly Dictionary<int, ImpactImageRecord> _images = new(); + private readonly Dictionary<string, RoaringBitmap> _containsByPurl = new(StringComparer.OrdinalIgnoreCase); + private readonly Dictionary<string, RoaringBitmap> _usedByEntrypointByPurl = new(StringComparer.OrdinalIgnoreCase); + + private readonly ILogger<RoaringImpactIndex> _logger; + private readonly TimeProvider _timeProvider; + + public RoaringImpactIndex(ILogger<RoaringImpactIndex> logger, TimeProvider? timeProvider = null) + { + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + } + + public async Task IngestAsync(ImpactIndexIngestionRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(request.BomIndexStream); + + using var buffer = new MemoryStream(); + await request.BomIndexStream.CopyToAsync(buffer, cancellationToken).ConfigureAwait(false); + buffer.Position = 0; + + var document = BomIndexReader.Read(buffer); + if (!string.Equals(document.ImageDigest, request.ImageDigest, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"BOM-Index digest mismatch. Header '{document.ImageDigest}', request '{request.ImageDigest}'."); + } + + var tenantId = request.TenantId ?? throw new ArgumentNullException(nameof(request.TenantId)); + var registry = request.Registry ?? throw new ArgumentNullException(nameof(request.Registry)); + var repository = request.Repository ?? throw new ArgumentNullException(nameof(request.Repository)); + + var namespaces = request.Namespaces.IsDefault ? ImmutableArray<string>.Empty : request.Namespaces; + var tags = request.Tags.IsDefault ? ImmutableArray<string>.Empty : request.Tags; + var labels = request.Labels.Count == 0 + ? ImmutableSortedDictionary<string, string>.Empty.WithComparers(StringComparer.OrdinalIgnoreCase) + : request.Labels; + + var componentKeys = document.Components + .Select(component => component.Key) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + var entrypointComponents = document.Components + .Where(component => component.UsedByEntrypoint) + .Select(component => component.Key) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToImmutableArray(); + + lock (_gate) + { + var imageId = EnsureImageId(request.ImageDigest); + + if (_images.TryGetValue(imageId, out var existing)) + { + RemoveImageComponents(existing); + } + + var metadata = new ImpactImageRecord( + imageId, + tenantId, + request.ImageDigest, + registry, + repository, + namespaces, + tags, + labels, + document.GeneratedAt, + componentKeys, + entrypointComponents); + + _images[imageId] = metadata; + _imageIds[request.ImageDigest] = imageId; + + foreach (var key in componentKeys) + { + var bitmap = _containsByPurl.GetValueOrDefault(key); + _containsByPurl[key] = AddImageToBitmap(bitmap, imageId); + } + + foreach (var key in entrypointComponents) + { + var bitmap = _usedByEntrypointByPurl.GetValueOrDefault(key); + _usedByEntrypointByPurl[key] = AddImageToBitmap(bitmap, imageId); + } + } + + _logger.LogInformation( + "ImpactIndex ingested BOM-Index for {Digest} ({TenantId}/{Repository}). Components={ComponentCount} EntrypointComponents={EntrypointCount}", + request.ImageDigest, + tenantId, + repository, + componentKeys.Length, + entrypointComponents.Length); + } + + public ValueTask<ImpactSet> ResolveByPurlsAsync( + IEnumerable<string> purls, + bool usageOnly, + Selector selector, + CancellationToken cancellationToken = default) + => ValueTask.FromResult(ResolveByPurlsCore(purls, usageOnly, selector)); + + public ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync( + IEnumerable<string> vulnerabilityIds, + bool usageOnly, + Selector selector, + CancellationToken cancellationToken = default) + => ValueTask.FromResult(CreateEmptyImpactSet(selector, usageOnly)); + + public ValueTask<ImpactSet> ResolveAllAsync( + Selector selector, + bool usageOnly, + CancellationToken cancellationToken = default) + => ValueTask.FromResult(ResolveAllCore(selector, usageOnly)); + + private ImpactSet ResolveByPurlsCore(IEnumerable<string> purls, bool usageOnly, Selector selector) + { + ArgumentNullException.ThrowIfNull(purls); + ArgumentNullException.ThrowIfNull(selector); + + var normalized = purls + .Where(static purl => !string.IsNullOrWhiteSpace(purl)) + .Select(static purl => purl.Trim()) + .Distinct(StringComparer.OrdinalIgnoreCase) + .ToArray(); + + if (normalized.Length == 0) + { + return CreateEmptyImpactSet(selector, usageOnly); + } + + RoaringBitmap imageIds; + lock (_gate) + { + imageIds = RoaringBitmap.Create(Array.Empty<int>()); + foreach (var purl in normalized) + { + if (_containsByPurl.TryGetValue(purl, out var bitmap)) + { + imageIds = imageIds | bitmap; + } + } + } + + return BuildImpactSet(imageIds, selector, usageOnly); + } + + private ImpactSet ResolveAllCore(Selector selector, bool usageOnly) + { + ArgumentNullException.ThrowIfNull(selector); + + RoaringBitmap bitmap; + lock (_gate) + { + var ids = _images.Keys.OrderBy(id => id).ToArray(); + bitmap = RoaringBitmap.Create(ids); + } + + return BuildImpactSet(bitmap, selector, usageOnly); + } + + private ImpactSet BuildImpactSet(RoaringBitmap imageIds, Selector selector, bool usageOnly) + { + var images = new List<ImpactImage>(); + var latestGeneratedAt = DateTimeOffset.MinValue; + + lock (_gate) + { + foreach (var imageId in imageIds) + { + if (!_images.TryGetValue(imageId, out var metadata)) + { + continue; + } + + if (!ImageMatchesSelector(metadata, selector)) + { + continue; + } + + if (usageOnly && metadata.EntrypointComponents.Length == 0) + { + continue; + } + + if (metadata.GeneratedAt > latestGeneratedAt) + { + latestGeneratedAt = metadata.GeneratedAt; + } + + images.Add(new ImpactImage( + metadata.Digest, + metadata.Registry, + metadata.Repository, + metadata.Namespaces, + metadata.Tags, + metadata.EntrypointComponents.Length > 0, + metadata.Labels)); + } + } + + if (images.Count == 0) + { + return CreateEmptyImpactSet(selector, usageOnly); + } + + images.Sort(static (left, right) => string.Compare(left.ImageDigest, right.ImageDigest, StringComparison.Ordinal)); + + var generatedAt = latestGeneratedAt == DateTimeOffset.MinValue ? _timeProvider.GetUtcNow() : latestGeneratedAt; + + return new ImpactSet( + selector, + images.ToImmutableArray(), + usageOnly, + generatedAt, + images.Count, + snapshotId: null, + schemaVersion: SchedulerSchemaVersions.ImpactSet); + } + + private ImpactSet CreateEmptyImpactSet(Selector selector, bool usageOnly) + { + return new ImpactSet( + selector, + ImmutableArray<ImpactImage>.Empty, + usageOnly, + _timeProvider.GetUtcNow(), + 0, + snapshotId: null, + schemaVersion: SchedulerSchemaVersions.ImpactSet); + } + + private static bool ImageMatchesSelector(ImpactImageRecord image, Selector selector) + { + if (selector.TenantId is not null && !string.Equals(selector.TenantId, image.TenantId, StringComparison.Ordinal)) + { + return false; + } + + if (!MatchesScope(image, selector)) + { + return false; + } + + if (selector.Digests.Length > 0 && !selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase)) + { + return false; + } + + if (selector.Repositories.Length > 0) + { + var repoMatch = selector.Repositories.Any(repo => + string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) || + string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)); + if (!repoMatch) + { + return false; + } + } + + if (selector.Namespaces.Length > 0) + { + if (image.Namespaces.IsDefaultOrEmpty) + { + return false; + } + + var namespaceMatch = selector.Namespaces.Any(ns => image.Namespaces.Contains(ns, StringComparer.OrdinalIgnoreCase)); + if (!namespaceMatch) + { + return false; + } + } + + if (selector.IncludeTags.Length > 0) + { + if (image.Tags.IsDefaultOrEmpty) + { + return false; + } + + var tagMatch = selector.IncludeTags.Any(pattern => image.Tags.Any(tag => MatchesTagPattern(tag, pattern))); + if (!tagMatch) + { + return false; + } + } + + if (selector.Labels.Length > 0) + { + if (image.Labels.Count == 0) + { + return false; + } + + foreach (var label in selector.Labels) + { + if (!image.Labels.TryGetValue(label.Key, out var value)) + { + return false; + } + + if (label.Values.Length > 0 && !label.Values.Contains(value, StringComparer.OrdinalIgnoreCase)) + { + return false; + } + } + } + + return true; + } + + private void RemoveImageComponents(ImpactImageRecord record) + { + foreach (var key in record.Components) + { + if (_containsByPurl.TryGetValue(key, out var bitmap)) + { + var updated = RemoveImageFromBitmap(bitmap, record.ImageId); + if (updated is null) + { + _containsByPurl.Remove(key); + } + else + { + _containsByPurl[key] = updated; + } + } + } + + foreach (var key in record.EntrypointComponents) + { + if (_usedByEntrypointByPurl.TryGetValue(key, out var bitmap)) + { + var updated = RemoveImageFromBitmap(bitmap, record.ImageId); + if (updated is null) + { + _usedByEntrypointByPurl.Remove(key); + } + else + { + _usedByEntrypointByPurl[key] = updated; + } + } + } + } + + private static RoaringBitmap AddImageToBitmap(RoaringBitmap? bitmap, int imageId) + { + if (bitmap is null) + { + return RoaringBitmap.Create(new[] { imageId }); + } + + if (bitmap.Any(id => id == imageId)) + { + return bitmap; + } + + var merged = bitmap + .Concat(new[] { imageId }) + .Distinct() + .OrderBy(id => id) + .ToArray(); + + return RoaringBitmap.Create(merged); + } + + private static RoaringBitmap? RemoveImageFromBitmap(RoaringBitmap bitmap, int imageId) + { + var remaining = bitmap + .Where(id => id != imageId) + .OrderBy(id => id) + .ToArray(); + if (remaining.Length == 0) + { + return null; + } + + return RoaringBitmap.Create(remaining); + } + + private static bool MatchesScope(ImpactImageRecord image, Selector selector) + { + return selector.Scope switch + { + SelectorScope.AllImages => true, + SelectorScope.ByDigest => selector.Digests.Contains(image.Digest, StringComparer.OrdinalIgnoreCase), + SelectorScope.ByRepository => selector.Repositories.Any(repo => + string.Equals(repo, image.Repository, StringComparison.OrdinalIgnoreCase) || + string.Equals(repo, $"{image.Registry}/{image.Repository}", StringComparison.OrdinalIgnoreCase)), + SelectorScope.ByNamespace => !image.Namespaces.IsDefaultOrEmpty && selector.Namespaces.Any(ns => image.Namespaces.Contains(ns, StringComparer.OrdinalIgnoreCase)), + SelectorScope.ByLabels => selector.Labels.All(label => + image.Labels.TryGetValue(label.Key, out var value) && + (label.Values.Length == 0 || label.Values.Contains(value, StringComparer.OrdinalIgnoreCase))), + _ => true, + }; + } + + private static bool MatchesTagPattern(string tag, string pattern) + { + if (string.IsNullOrWhiteSpace(pattern)) + { + return false; + } + + if (pattern == "*") + { + return true; + } + + if (!pattern.Contains('*') && !pattern.Contains('?')) + { + return string.Equals(tag, pattern, StringComparison.OrdinalIgnoreCase); + } + + var escaped = Regex.Escape(pattern) + .Replace("\\*", ".*") + .Replace("\\?", "."); + return Regex.IsMatch(tag, $"^{escaped}$", RegexOptions.IgnoreCase | RegexOptions.CultureInvariant); + } + + private int EnsureImageId(string digest) + { + if (_imageIds.TryGetValue(digest, out var existing)) + { + return existing; + } + + var candidate = ComputeDeterministicId(digest); + while (_images.ContainsKey(candidate)) + { + candidate = (candidate + 1) & int.MaxValue; + if (candidate == 0) + { + candidate = 1; + } + } + + _imageIds[digest] = candidate; + return candidate; + } + + private static int ComputeDeterministicId(string digest) + { + var bytes = SHA256.HashData(Encoding.UTF8.GetBytes(digest)); + for (var offset = 0; offset <= bytes.Length - sizeof(int); offset += sizeof(int)) + { + var value = BinaryPrimitives.ReadInt32LittleEndian(bytes.AsSpan(offset, sizeof(int))) & int.MaxValue; + if (value != 0) + { + return value; + } + } + + return digest.GetHashCode(StringComparison.OrdinalIgnoreCase) & int.MaxValue; + } +} diff --git a/src/StellaOps.Scheduler.ImpactIndex/StellaOps.Scheduler.ImpactIndex.csproj b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/StellaOps.Scheduler.ImpactIndex.csproj similarity index 100% rename from src/StellaOps.Scheduler.ImpactIndex/StellaOps.Scheduler.ImpactIndex.csproj rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/StellaOps.Scheduler.ImpactIndex.csproj diff --git a/src/StellaOps.Scheduler.ImpactIndex/TASKS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md similarity index 88% rename from src/StellaOps.Scheduler.ImpactIndex/TASKS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md index 951e9f24..c2ca1c09 100644 --- a/src/StellaOps.Scheduler.ImpactIndex/TASKS.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/TASKS.md @@ -1,10 +1,10 @@ -# Scheduler ImpactIndex Task Board (Sprint 16) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| +# Scheduler ImpactIndex Task Board (Sprint 16) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| | SCHED-IMPACT-16-300 | DONE (2025-10-20) | Scheduler ImpactIndex Guild | SAMPLES-10-001 | **STUB** ingest/query using fixtures to unblock Scheduler planning (remove by SP16 end). | Stub merges fixture BOM-Index, query API returns deterministic results, removal note tracked. | | SCHED-IMPACT-16-301 | DONE (2025-10-26) | Scheduler ImpactIndex Guild | SCANNER-EMIT-10-605 | Implement ingestion of per-image BOM-Index sidecars into roaring bitmap store (contains/usedBy). | Ingestion tests process sample SBOM index; bitmaps persisted; deterministic IDs assigned. | | SCHED-IMPACT-16-302 | DONE (2025-10-26) | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-301 | Provide query APIs (ResolveByPurls, ResolveByVulns, ResolveAll, selectors) with tenant/namespace filters. | Query functions tested; performance benchmarks documented; selectors enforce filters. | | SCHED-IMPACT-16-303 | TODO | Scheduler ImpactIndex Guild | SCHED-IMPACT-16-301 | Snapshot/compaction + invalidation for removed images; persistence to RocksDB/Redis per architecture. | Snapshot routine implemented; invalidation tests pass; docs describe recovery. | -> Removal tracking note: see `src/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md` for follow-up actions once the roaring bitmap implementation lands. +> Removal tracking note: see `src/Scheduler/__Libraries/StellaOps.Scheduler.ImpactIndex/REMOVAL_NOTE.md` for follow-up actions once the roaring bitmap implementation lands. diff --git a/src/StellaOps.Scheduler.Models/AGENTS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/AGENTS.md similarity index 100% rename from src/StellaOps.Scheduler.Models/AGENTS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/AGENTS.md diff --git a/src/StellaOps.Scheduler.Models/AssemblyInfo.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Scheduler.Models/AssemblyInfo.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/AssemblyInfo.cs index 1752efa5..519b19b0 100644 --- a/src/StellaOps.Scheduler.Models/AssemblyInfo.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Scheduler.ImpactIndex")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Scheduler.ImpactIndex")] diff --git a/src/StellaOps.Scheduler.Models/AuditRecord.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/AuditRecord.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/AuditRecord.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/AuditRecord.cs diff --git a/src/StellaOps.Scheduler.Models/CanonicalJsonSerializer.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/CanonicalJsonSerializer.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/CanonicalJsonSerializer.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/CanonicalJsonSerializer.cs diff --git a/src/StellaOps.Scheduler.Models/EnumConverters.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/EnumConverters.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/EnumConverters.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/EnumConverters.cs diff --git a/src/StellaOps.Scheduler.Models/Enums.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Enums.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/Enums.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Enums.cs diff --git a/src/StellaOps.Scheduler.Models/GraphBuildJob.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/GraphBuildJob.cs similarity index 97% rename from src/StellaOps.Scheduler.Models/GraphBuildJob.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/GraphBuildJob.cs index 82ed6b62..f87a16c3 100644 --- a/src/StellaOps.Scheduler.Models/GraphBuildJob.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/GraphBuildJob.cs @@ -1,132 +1,132 @@ -using System.Collections.Immutable; -using System.Text.Json.Serialization; - -namespace StellaOps.Scheduler.Models; - -/// <summary> -/// Job instructing Cartographer to materialize a graph snapshot for an SBOM version. -/// </summary> -public sealed record GraphBuildJob -{ - public GraphBuildJob( - string id, - string tenantId, - string sbomId, - string sbomVersionId, - string sbomDigest, - GraphJobStatus status, - GraphBuildJobTrigger trigger, - DateTimeOffset createdAt, - string? graphSnapshotId = null, - int attempts = 0, - string? cartographerJobId = null, - string? correlationId = null, - DateTimeOffset? startedAt = null, - DateTimeOffset? completedAt = null, - string? error = null, - IEnumerable<KeyValuePair<string, string>>? metadata = null, - string? schemaVersion = null) - : this( - id, - tenantId, - sbomId, - sbomVersionId, - sbomDigest, - Validation.TrimToNull(graphSnapshotId), - status, - trigger, - Validation.EnsureNonNegative(attempts, nameof(attempts)), - Validation.TrimToNull(cartographerJobId), - Validation.TrimToNull(correlationId), - Validation.NormalizeTimestamp(createdAt), - Validation.NormalizeTimestamp(startedAt), - Validation.NormalizeTimestamp(completedAt), - Validation.TrimToNull(error), - Validation.NormalizeMetadata(metadata), - schemaVersion) - { - } - - [JsonConstructor] - public GraphBuildJob( - string id, - string tenantId, - string sbomId, - string sbomVersionId, - string sbomDigest, - string? graphSnapshotId, - GraphJobStatus status, - GraphBuildJobTrigger trigger, - int attempts, - string? cartographerJobId, - string? correlationId, - DateTimeOffset createdAt, - DateTimeOffset? startedAt, - DateTimeOffset? completedAt, - string? error, - ImmutableSortedDictionary<string, string> metadata, - string? schemaVersion = null) - { - Id = Validation.EnsureId(id, nameof(id)); - TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); - SbomId = Validation.EnsureId(sbomId, nameof(sbomId)); - SbomVersionId = Validation.EnsureId(sbomVersionId, nameof(sbomVersionId)); - SbomDigest = Validation.EnsureDigestFormat(sbomDigest, nameof(sbomDigest)); - GraphSnapshotId = Validation.TrimToNull(graphSnapshotId); - Status = status; - Trigger = trigger; - Attempts = Validation.EnsureNonNegative(attempts, nameof(attempts)); - CartographerJobId = Validation.TrimToNull(cartographerJobId); - CorrelationId = Validation.TrimToNull(correlationId); - CreatedAt = Validation.NormalizeTimestamp(createdAt); - StartedAt = Validation.NormalizeTimestamp(startedAt); - CompletedAt = Validation.NormalizeTimestamp(completedAt); - Error = Validation.TrimToNull(error); - var materializedMetadata = metadata ?? ImmutableSortedDictionary<string, string>.Empty; - Metadata = materializedMetadata.Count > 0 - ? materializedMetadata.WithComparers(StringComparer.Ordinal) - : ImmutableSortedDictionary<string, string>.Empty; - SchemaVersion = SchedulerSchemaVersions.EnsureGraphBuildJob(schemaVersion); - } - - public string SchemaVersion { get; } - - public string Id { get; } - - public string TenantId { get; } - - public string SbomId { get; } - - public string SbomVersionId { get; } - - public string SbomDigest { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? GraphSnapshotId { get; init; } - - public GraphJobStatus Status { get; init; } - - public GraphBuildJobTrigger Trigger { get; } - - public int Attempts { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? CartographerJobId { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? CorrelationId { get; init; } - - public DateTimeOffset CreatedAt { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? StartedAt { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? CompletedAt { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Error { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty; -} +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +/// <summary> +/// Job instructing Cartographer to materialize a graph snapshot for an SBOM version. +/// </summary> +public sealed record GraphBuildJob +{ + public GraphBuildJob( + string id, + string tenantId, + string sbomId, + string sbomVersionId, + string sbomDigest, + GraphJobStatus status, + GraphBuildJobTrigger trigger, + DateTimeOffset createdAt, + string? graphSnapshotId = null, + int attempts = 0, + string? cartographerJobId = null, + string? correlationId = null, + DateTimeOffset? startedAt = null, + DateTimeOffset? completedAt = null, + string? error = null, + IEnumerable<KeyValuePair<string, string>>? metadata = null, + string? schemaVersion = null) + : this( + id, + tenantId, + sbomId, + sbomVersionId, + sbomDigest, + Validation.TrimToNull(graphSnapshotId), + status, + trigger, + Validation.EnsureNonNegative(attempts, nameof(attempts)), + Validation.TrimToNull(cartographerJobId), + Validation.TrimToNull(correlationId), + Validation.NormalizeTimestamp(createdAt), + Validation.NormalizeTimestamp(startedAt), + Validation.NormalizeTimestamp(completedAt), + Validation.TrimToNull(error), + Validation.NormalizeMetadata(metadata), + schemaVersion) + { + } + + [JsonConstructor] + public GraphBuildJob( + string id, + string tenantId, + string sbomId, + string sbomVersionId, + string sbomDigest, + string? graphSnapshotId, + GraphJobStatus status, + GraphBuildJobTrigger trigger, + int attempts, + string? cartographerJobId, + string? correlationId, + DateTimeOffset createdAt, + DateTimeOffset? startedAt, + DateTimeOffset? completedAt, + string? error, + ImmutableSortedDictionary<string, string> metadata, + string? schemaVersion = null) + { + Id = Validation.EnsureId(id, nameof(id)); + TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); + SbomId = Validation.EnsureId(sbomId, nameof(sbomId)); + SbomVersionId = Validation.EnsureId(sbomVersionId, nameof(sbomVersionId)); + SbomDigest = Validation.EnsureDigestFormat(sbomDigest, nameof(sbomDigest)); + GraphSnapshotId = Validation.TrimToNull(graphSnapshotId); + Status = status; + Trigger = trigger; + Attempts = Validation.EnsureNonNegative(attempts, nameof(attempts)); + CartographerJobId = Validation.TrimToNull(cartographerJobId); + CorrelationId = Validation.TrimToNull(correlationId); + CreatedAt = Validation.NormalizeTimestamp(createdAt); + StartedAt = Validation.NormalizeTimestamp(startedAt); + CompletedAt = Validation.NormalizeTimestamp(completedAt); + Error = Validation.TrimToNull(error); + var materializedMetadata = metadata ?? ImmutableSortedDictionary<string, string>.Empty; + Metadata = materializedMetadata.Count > 0 + ? materializedMetadata.WithComparers(StringComparer.Ordinal) + : ImmutableSortedDictionary<string, string>.Empty; + SchemaVersion = SchedulerSchemaVersions.EnsureGraphBuildJob(schemaVersion); + } + + public string SchemaVersion { get; } + + public string Id { get; } + + public string TenantId { get; } + + public string SbomId { get; } + + public string SbomVersionId { get; } + + public string SbomDigest { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? GraphSnapshotId { get; init; } + + public GraphJobStatus Status { get; init; } + + public GraphBuildJobTrigger Trigger { get; } + + public int Attempts { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? CartographerJobId { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? CorrelationId { get; init; } + + public DateTimeOffset CreatedAt { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? StartedAt { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? CompletedAt { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Error { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty; +} diff --git a/src/StellaOps.Scheduler.Models/GraphJobStateMachine.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/GraphJobStateMachine.cs similarity index 97% rename from src/StellaOps.Scheduler.Models/GraphJobStateMachine.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/GraphJobStateMachine.cs index c529b9b8..ef73194e 100644 --- a/src/StellaOps.Scheduler.Models/GraphJobStateMachine.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/GraphJobStateMachine.cs @@ -1,241 +1,241 @@ -using System.Collections.Generic; -using System.Linq; - -namespace StellaOps.Scheduler.Models; - -/// <summary> -/// Encapsulates allowed status transitions and invariants for graph jobs. -/// </summary> -public static class GraphJobStateMachine -{ - private static readonly IReadOnlyDictionary<GraphJobStatus, GraphJobStatus[]> Adjacency = new Dictionary<GraphJobStatus, GraphJobStatus[]> - { - [GraphJobStatus.Pending] = new[] { GraphJobStatus.Pending, GraphJobStatus.Queued, GraphJobStatus.Running, GraphJobStatus.Failed, GraphJobStatus.Cancelled }, - [GraphJobStatus.Queued] = new[] { GraphJobStatus.Queued, GraphJobStatus.Running, GraphJobStatus.Failed, GraphJobStatus.Cancelled }, - [GraphJobStatus.Running] = new[] { GraphJobStatus.Running, GraphJobStatus.Completed, GraphJobStatus.Failed, GraphJobStatus.Cancelled }, - [GraphJobStatus.Completed] = new[] { GraphJobStatus.Completed }, - [GraphJobStatus.Failed] = new[] { GraphJobStatus.Failed }, - [GraphJobStatus.Cancelled] = new[] { GraphJobStatus.Cancelled }, - }; - - public static bool CanTransition(GraphJobStatus from, GraphJobStatus to) - { - if (!Adjacency.TryGetValue(from, out var allowed)) - { - return false; - } - - return allowed.Contains(to); - } - - public static bool IsTerminal(GraphJobStatus status) - => status is GraphJobStatus.Completed or GraphJobStatus.Failed or GraphJobStatus.Cancelled; - - public static GraphBuildJob EnsureTransition( - GraphBuildJob job, - GraphJobStatus next, - DateTimeOffset timestamp, - int? attempts = null, - string? errorMessage = null) - { - ArgumentNullException.ThrowIfNull(job); - - var normalizedTimestamp = Validation.NormalizeTimestamp(timestamp); - var current = job.Status; - - if (!CanTransition(current, next)) - { - throw new InvalidOperationException($"Graph build job transition from '{current}' to '{next}' is not allowed."); - } - - var nextAttempts = attempts ?? job.Attempts; - if (nextAttempts < job.Attempts) - { - throw new InvalidOperationException("Graph job attempts cannot decrease."); - } - - var startedAt = job.StartedAt; - var completedAt = job.CompletedAt; - - if (current != GraphJobStatus.Running && next == GraphJobStatus.Running && startedAt is null) - { - startedAt = normalizedTimestamp; - } - - if (IsTerminal(next)) - { - completedAt ??= normalizedTimestamp; - } - - string? nextError = null; - if (next == GraphJobStatus.Failed) - { - var effectiveError = string.IsNullOrWhiteSpace(errorMessage) ? job.Error : errorMessage.Trim(); - if (string.IsNullOrWhiteSpace(effectiveError)) - { - throw new InvalidOperationException("Transitioning to Failed requires a non-empty error message."); - } - - nextError = effectiveError; - } - else if (!string.IsNullOrWhiteSpace(errorMessage)) - { - throw new InvalidOperationException("Error message can only be provided when transitioning to Failed state."); - } - - var updated = job with - { - Status = next, - Attempts = nextAttempts, - StartedAt = startedAt, - CompletedAt = completedAt, - Error = nextError, - }; - - Validate(updated); - return updated; - } - - public static GraphOverlayJob EnsureTransition( - GraphOverlayJob job, - GraphJobStatus next, - DateTimeOffset timestamp, - int? attempts = null, - string? errorMessage = null) - { - ArgumentNullException.ThrowIfNull(job); - - var normalizedTimestamp = Validation.NormalizeTimestamp(timestamp); - var current = job.Status; - - if (!CanTransition(current, next)) - { - throw new InvalidOperationException($"Graph overlay job transition from '{current}' to '{next}' is not allowed."); - } - - var nextAttempts = attempts ?? job.Attempts; - if (nextAttempts < job.Attempts) - { - throw new InvalidOperationException("Graph job attempts cannot decrease."); - } - - var startedAt = job.StartedAt; - var completedAt = job.CompletedAt; - - if (current != GraphJobStatus.Running && next == GraphJobStatus.Running && startedAt is null) - { - startedAt = normalizedTimestamp; - } - - if (IsTerminal(next)) - { - completedAt ??= normalizedTimestamp; - } - - string? nextError = null; - if (next == GraphJobStatus.Failed) - { - var effectiveError = string.IsNullOrWhiteSpace(errorMessage) ? job.Error : errorMessage.Trim(); - if (string.IsNullOrWhiteSpace(effectiveError)) - { - throw new InvalidOperationException("Transitioning to Failed requires a non-empty error message."); - } - - nextError = effectiveError; - } - else if (!string.IsNullOrWhiteSpace(errorMessage)) - { - throw new InvalidOperationException("Error message can only be provided when transitioning to Failed state."); - } - - var updated = job with - { - Status = next, - Attempts = nextAttempts, - StartedAt = startedAt, - CompletedAt = completedAt, - Error = nextError, - }; - - Validate(updated); - return updated; - } - - public static void Validate(GraphBuildJob job) - { - ArgumentNullException.ThrowIfNull(job); - - if (job.StartedAt is { } started && started < job.CreatedAt) - { - throw new InvalidOperationException("GraphBuildJob.StartedAt cannot be earlier than CreatedAt."); - } - - if (job.CompletedAt is { } completed) - { - if (job.StartedAt is { } start && completed < start) - { - throw new InvalidOperationException("GraphBuildJob.CompletedAt cannot be earlier than StartedAt."); - } - - if (!IsTerminal(job.Status)) - { - throw new InvalidOperationException("GraphBuildJob.CompletedAt set while status is not terminal."); - } - } - else if (IsTerminal(job.Status)) - { - throw new InvalidOperationException("Terminal graph build job states must include CompletedAt."); - } - - if (job.Status == GraphJobStatus.Failed) - { - if (string.IsNullOrWhiteSpace(job.Error)) - { - throw new InvalidOperationException("GraphBuildJob.Error must be populated when status is Failed."); - } - } - else if (!string.IsNullOrWhiteSpace(job.Error)) - { - throw new InvalidOperationException("GraphBuildJob.Error must be null for non-failed states."); - } - } - - public static void Validate(GraphOverlayJob job) - { - ArgumentNullException.ThrowIfNull(job); - - if (job.StartedAt is { } started && started < job.CreatedAt) - { - throw new InvalidOperationException("GraphOverlayJob.StartedAt cannot be earlier than CreatedAt."); - } - - if (job.CompletedAt is { } completed) - { - if (job.StartedAt is { } start && completed < start) - { - throw new InvalidOperationException("GraphOverlayJob.CompletedAt cannot be earlier than StartedAt."); - } - - if (!IsTerminal(job.Status)) - { - throw new InvalidOperationException("GraphOverlayJob.CompletedAt set while status is not terminal."); - } - } - else if (IsTerminal(job.Status)) - { - throw new InvalidOperationException("Terminal graph overlay job states must include CompletedAt."); - } - - if (job.Status == GraphJobStatus.Failed) - { - if (string.IsNullOrWhiteSpace(job.Error)) - { - throw new InvalidOperationException("GraphOverlayJob.Error must be populated when status is Failed."); - } - } - else if (!string.IsNullOrWhiteSpace(job.Error)) - { - throw new InvalidOperationException("GraphOverlayJob.Error must be null for non-failed states."); - } - } -} +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.Scheduler.Models; + +/// <summary> +/// Encapsulates allowed status transitions and invariants for graph jobs. +/// </summary> +public static class GraphJobStateMachine +{ + private static readonly IReadOnlyDictionary<GraphJobStatus, GraphJobStatus[]> Adjacency = new Dictionary<GraphJobStatus, GraphJobStatus[]> + { + [GraphJobStatus.Pending] = new[] { GraphJobStatus.Pending, GraphJobStatus.Queued, GraphJobStatus.Running, GraphJobStatus.Failed, GraphJobStatus.Cancelled }, + [GraphJobStatus.Queued] = new[] { GraphJobStatus.Queued, GraphJobStatus.Running, GraphJobStatus.Failed, GraphJobStatus.Cancelled }, + [GraphJobStatus.Running] = new[] { GraphJobStatus.Running, GraphJobStatus.Completed, GraphJobStatus.Failed, GraphJobStatus.Cancelled }, + [GraphJobStatus.Completed] = new[] { GraphJobStatus.Completed }, + [GraphJobStatus.Failed] = new[] { GraphJobStatus.Failed }, + [GraphJobStatus.Cancelled] = new[] { GraphJobStatus.Cancelled }, + }; + + public static bool CanTransition(GraphJobStatus from, GraphJobStatus to) + { + if (!Adjacency.TryGetValue(from, out var allowed)) + { + return false; + } + + return allowed.Contains(to); + } + + public static bool IsTerminal(GraphJobStatus status) + => status is GraphJobStatus.Completed or GraphJobStatus.Failed or GraphJobStatus.Cancelled; + + public static GraphBuildJob EnsureTransition( + GraphBuildJob job, + GraphJobStatus next, + DateTimeOffset timestamp, + int? attempts = null, + string? errorMessage = null) + { + ArgumentNullException.ThrowIfNull(job); + + var normalizedTimestamp = Validation.NormalizeTimestamp(timestamp); + var current = job.Status; + + if (!CanTransition(current, next)) + { + throw new InvalidOperationException($"Graph build job transition from '{current}' to '{next}' is not allowed."); + } + + var nextAttempts = attempts ?? job.Attempts; + if (nextAttempts < job.Attempts) + { + throw new InvalidOperationException("Graph job attempts cannot decrease."); + } + + var startedAt = job.StartedAt; + var completedAt = job.CompletedAt; + + if (current != GraphJobStatus.Running && next == GraphJobStatus.Running && startedAt is null) + { + startedAt = normalizedTimestamp; + } + + if (IsTerminal(next)) + { + completedAt ??= normalizedTimestamp; + } + + string? nextError = null; + if (next == GraphJobStatus.Failed) + { + var effectiveError = string.IsNullOrWhiteSpace(errorMessage) ? job.Error : errorMessage.Trim(); + if (string.IsNullOrWhiteSpace(effectiveError)) + { + throw new InvalidOperationException("Transitioning to Failed requires a non-empty error message."); + } + + nextError = effectiveError; + } + else if (!string.IsNullOrWhiteSpace(errorMessage)) + { + throw new InvalidOperationException("Error message can only be provided when transitioning to Failed state."); + } + + var updated = job with + { + Status = next, + Attempts = nextAttempts, + StartedAt = startedAt, + CompletedAt = completedAt, + Error = nextError, + }; + + Validate(updated); + return updated; + } + + public static GraphOverlayJob EnsureTransition( + GraphOverlayJob job, + GraphJobStatus next, + DateTimeOffset timestamp, + int? attempts = null, + string? errorMessage = null) + { + ArgumentNullException.ThrowIfNull(job); + + var normalizedTimestamp = Validation.NormalizeTimestamp(timestamp); + var current = job.Status; + + if (!CanTransition(current, next)) + { + throw new InvalidOperationException($"Graph overlay job transition from '{current}' to '{next}' is not allowed."); + } + + var nextAttempts = attempts ?? job.Attempts; + if (nextAttempts < job.Attempts) + { + throw new InvalidOperationException("Graph job attempts cannot decrease."); + } + + var startedAt = job.StartedAt; + var completedAt = job.CompletedAt; + + if (current != GraphJobStatus.Running && next == GraphJobStatus.Running && startedAt is null) + { + startedAt = normalizedTimestamp; + } + + if (IsTerminal(next)) + { + completedAt ??= normalizedTimestamp; + } + + string? nextError = null; + if (next == GraphJobStatus.Failed) + { + var effectiveError = string.IsNullOrWhiteSpace(errorMessage) ? job.Error : errorMessage.Trim(); + if (string.IsNullOrWhiteSpace(effectiveError)) + { + throw new InvalidOperationException("Transitioning to Failed requires a non-empty error message."); + } + + nextError = effectiveError; + } + else if (!string.IsNullOrWhiteSpace(errorMessage)) + { + throw new InvalidOperationException("Error message can only be provided when transitioning to Failed state."); + } + + var updated = job with + { + Status = next, + Attempts = nextAttempts, + StartedAt = startedAt, + CompletedAt = completedAt, + Error = nextError, + }; + + Validate(updated); + return updated; + } + + public static void Validate(GraphBuildJob job) + { + ArgumentNullException.ThrowIfNull(job); + + if (job.StartedAt is { } started && started < job.CreatedAt) + { + throw new InvalidOperationException("GraphBuildJob.StartedAt cannot be earlier than CreatedAt."); + } + + if (job.CompletedAt is { } completed) + { + if (job.StartedAt is { } start && completed < start) + { + throw new InvalidOperationException("GraphBuildJob.CompletedAt cannot be earlier than StartedAt."); + } + + if (!IsTerminal(job.Status)) + { + throw new InvalidOperationException("GraphBuildJob.CompletedAt set while status is not terminal."); + } + } + else if (IsTerminal(job.Status)) + { + throw new InvalidOperationException("Terminal graph build job states must include CompletedAt."); + } + + if (job.Status == GraphJobStatus.Failed) + { + if (string.IsNullOrWhiteSpace(job.Error)) + { + throw new InvalidOperationException("GraphBuildJob.Error must be populated when status is Failed."); + } + } + else if (!string.IsNullOrWhiteSpace(job.Error)) + { + throw new InvalidOperationException("GraphBuildJob.Error must be null for non-failed states."); + } + } + + public static void Validate(GraphOverlayJob job) + { + ArgumentNullException.ThrowIfNull(job); + + if (job.StartedAt is { } started && started < job.CreatedAt) + { + throw new InvalidOperationException("GraphOverlayJob.StartedAt cannot be earlier than CreatedAt."); + } + + if (job.CompletedAt is { } completed) + { + if (job.StartedAt is { } start && completed < start) + { + throw new InvalidOperationException("GraphOverlayJob.CompletedAt cannot be earlier than StartedAt."); + } + + if (!IsTerminal(job.Status)) + { + throw new InvalidOperationException("GraphOverlayJob.CompletedAt set while status is not terminal."); + } + } + else if (IsTerminal(job.Status)) + { + throw new InvalidOperationException("Terminal graph overlay job states must include CompletedAt."); + } + + if (job.Status == GraphJobStatus.Failed) + { + if (string.IsNullOrWhiteSpace(job.Error)) + { + throw new InvalidOperationException("GraphOverlayJob.Error must be populated when status is Failed."); + } + } + else if (!string.IsNullOrWhiteSpace(job.Error)) + { + throw new InvalidOperationException("GraphOverlayJob.Error must be null for non-failed states."); + } + } +} diff --git a/src/StellaOps.Scheduler.Models/GraphOverlayJob.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/GraphOverlayJob.cs similarity index 97% rename from src/StellaOps.Scheduler.Models/GraphOverlayJob.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/GraphOverlayJob.cs index b7e67b98..be77feb8 100644 --- a/src/StellaOps.Scheduler.Models/GraphOverlayJob.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/GraphOverlayJob.cs @@ -1,132 +1,132 @@ -using System.Collections.Immutable; -using System.Text.Json.Serialization; - -namespace StellaOps.Scheduler.Models; - -/// <summary> -/// Job that materializes or refreshes an overlay on top of an existing graph snapshot. -/// </summary> -public sealed record GraphOverlayJob -{ - public GraphOverlayJob( - string id, - string tenantId, - string graphSnapshotId, - GraphOverlayKind overlayKind, - string overlayKey, - GraphJobStatus status, - GraphOverlayJobTrigger trigger, - DateTimeOffset createdAt, - IEnumerable<string>? subjects = null, - int attempts = 0, - string? buildJobId = null, - string? correlationId = null, - DateTimeOffset? startedAt = null, - DateTimeOffset? completedAt = null, - string? error = null, - IEnumerable<KeyValuePair<string, string>>? metadata = null, - string? schemaVersion = null) - : this( - id, - tenantId, - graphSnapshotId, - Validation.TrimToNull(buildJobId), - overlayKind, - Validation.EnsureNotNullOrWhiteSpace(overlayKey, nameof(overlayKey)), - Validation.NormalizeStringSet(subjects, nameof(subjects)), - status, - trigger, - Validation.EnsureNonNegative(attempts, nameof(attempts)), - Validation.TrimToNull(correlationId), - Validation.NormalizeTimestamp(createdAt), - Validation.NormalizeTimestamp(startedAt), - Validation.NormalizeTimestamp(completedAt), - Validation.TrimToNull(error), - Validation.NormalizeMetadata(metadata), - schemaVersion) - { - } - - [JsonConstructor] - public GraphOverlayJob( - string id, - string tenantId, - string graphSnapshotId, - string? buildJobId, - GraphOverlayKind overlayKind, - string overlayKey, - ImmutableArray<string> subjects, - GraphJobStatus status, - GraphOverlayJobTrigger trigger, - int attempts, - string? correlationId, - DateTimeOffset createdAt, - DateTimeOffset? startedAt, - DateTimeOffset? completedAt, - string? error, - ImmutableSortedDictionary<string, string> metadata, - string? schemaVersion = null) - { - Id = Validation.EnsureId(id, nameof(id)); - TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); - GraphSnapshotId = Validation.EnsureId(graphSnapshotId, nameof(graphSnapshotId)); - BuildJobId = Validation.TrimToNull(buildJobId); - OverlayKind = overlayKind; - OverlayKey = Validation.EnsureNotNullOrWhiteSpace(overlayKey, nameof(overlayKey)); - Subjects = subjects.IsDefault ? ImmutableArray<string>.Empty : subjects; - Status = status; - Trigger = trigger; - Attempts = Validation.EnsureNonNegative(attempts, nameof(attempts)); - CorrelationId = Validation.TrimToNull(correlationId); - CreatedAt = Validation.NormalizeTimestamp(createdAt); - StartedAt = Validation.NormalizeTimestamp(startedAt); - CompletedAt = Validation.NormalizeTimestamp(completedAt); - Error = Validation.TrimToNull(error); - var materializedMetadata = metadata ?? ImmutableSortedDictionary<string, string>.Empty; - Metadata = materializedMetadata.Count > 0 - ? materializedMetadata.WithComparers(StringComparer.Ordinal) - : ImmutableSortedDictionary<string, string>.Empty; - SchemaVersion = SchedulerSchemaVersions.EnsureGraphOverlayJob(schemaVersion); - } - - public string SchemaVersion { get; } - - public string Id { get; } - - public string TenantId { get; } - - public string GraphSnapshotId { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? BuildJobId { get; init; } - - public GraphOverlayKind OverlayKind { get; } - - public string OverlayKey { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableArray<string> Subjects { get; } = ImmutableArray<string>.Empty; - - public GraphJobStatus Status { get; init; } - - public GraphOverlayJobTrigger Trigger { get; } - - public int Attempts { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? CorrelationId { get; init; } - - public DateTimeOffset CreatedAt { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? StartedAt { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? CompletedAt { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Error { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty; -} +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +/// <summary> +/// Job that materializes or refreshes an overlay on top of an existing graph snapshot. +/// </summary> +public sealed record GraphOverlayJob +{ + public GraphOverlayJob( + string id, + string tenantId, + string graphSnapshotId, + GraphOverlayKind overlayKind, + string overlayKey, + GraphJobStatus status, + GraphOverlayJobTrigger trigger, + DateTimeOffset createdAt, + IEnumerable<string>? subjects = null, + int attempts = 0, + string? buildJobId = null, + string? correlationId = null, + DateTimeOffset? startedAt = null, + DateTimeOffset? completedAt = null, + string? error = null, + IEnumerable<KeyValuePair<string, string>>? metadata = null, + string? schemaVersion = null) + : this( + id, + tenantId, + graphSnapshotId, + Validation.TrimToNull(buildJobId), + overlayKind, + Validation.EnsureNotNullOrWhiteSpace(overlayKey, nameof(overlayKey)), + Validation.NormalizeStringSet(subjects, nameof(subjects)), + status, + trigger, + Validation.EnsureNonNegative(attempts, nameof(attempts)), + Validation.TrimToNull(correlationId), + Validation.NormalizeTimestamp(createdAt), + Validation.NormalizeTimestamp(startedAt), + Validation.NormalizeTimestamp(completedAt), + Validation.TrimToNull(error), + Validation.NormalizeMetadata(metadata), + schemaVersion) + { + } + + [JsonConstructor] + public GraphOverlayJob( + string id, + string tenantId, + string graphSnapshotId, + string? buildJobId, + GraphOverlayKind overlayKind, + string overlayKey, + ImmutableArray<string> subjects, + GraphJobStatus status, + GraphOverlayJobTrigger trigger, + int attempts, + string? correlationId, + DateTimeOffset createdAt, + DateTimeOffset? startedAt, + DateTimeOffset? completedAt, + string? error, + ImmutableSortedDictionary<string, string> metadata, + string? schemaVersion = null) + { + Id = Validation.EnsureId(id, nameof(id)); + TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); + GraphSnapshotId = Validation.EnsureId(graphSnapshotId, nameof(graphSnapshotId)); + BuildJobId = Validation.TrimToNull(buildJobId); + OverlayKind = overlayKind; + OverlayKey = Validation.EnsureNotNullOrWhiteSpace(overlayKey, nameof(overlayKey)); + Subjects = subjects.IsDefault ? ImmutableArray<string>.Empty : subjects; + Status = status; + Trigger = trigger; + Attempts = Validation.EnsureNonNegative(attempts, nameof(attempts)); + CorrelationId = Validation.TrimToNull(correlationId); + CreatedAt = Validation.NormalizeTimestamp(createdAt); + StartedAt = Validation.NormalizeTimestamp(startedAt); + CompletedAt = Validation.NormalizeTimestamp(completedAt); + Error = Validation.TrimToNull(error); + var materializedMetadata = metadata ?? ImmutableSortedDictionary<string, string>.Empty; + Metadata = materializedMetadata.Count > 0 + ? materializedMetadata.WithComparers(StringComparer.Ordinal) + : ImmutableSortedDictionary<string, string>.Empty; + SchemaVersion = SchedulerSchemaVersions.EnsureGraphOverlayJob(schemaVersion); + } + + public string SchemaVersion { get; } + + public string Id { get; } + + public string TenantId { get; } + + public string GraphSnapshotId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? BuildJobId { get; init; } + + public GraphOverlayKind OverlayKind { get; } + + public string OverlayKey { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray<string> Subjects { get; } = ImmutableArray<string>.Empty; + + public GraphJobStatus Status { get; init; } + + public GraphOverlayJobTrigger Trigger { get; } + + public int Attempts { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? CorrelationId { get; init; } + + public DateTimeOffset CreatedAt { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? StartedAt { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? CompletedAt { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Error { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty; +} diff --git a/src/StellaOps.Scheduler.Models/ImpactSet.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/ImpactSet.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/ImpactSet.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/ImpactSet.cs diff --git a/src/StellaOps.Scheduler.Models/PolicyRunJob.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/PolicyRunJob.cs similarity index 97% rename from src/StellaOps.Scheduler.Models/PolicyRunJob.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/PolicyRunJob.cs index ad6ddcfa..40b9ada7 100644 --- a/src/StellaOps.Scheduler.Models/PolicyRunJob.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/PolicyRunJob.cs @@ -1,185 +1,185 @@ -using System; -using System.Collections.Immutable; -using System.Text.Json.Serialization; - -namespace StellaOps.Scheduler.Models; - -public sealed record PolicyRunJob( - string SchemaVersion, - string Id, - string TenantId, - string PolicyId, - int? PolicyVersion, - PolicyRunMode Mode, - PolicyRunPriority Priority, - int PriorityRank, - string? RunId, - string? RequestedBy, - string? CorrelationId, - ImmutableSortedDictionary<string, string>? Metadata, - PolicyRunInputs Inputs, - DateTimeOffset? QueuedAt, - PolicyRunJobStatus Status, - int AttemptCount, - DateTimeOffset? LastAttemptAt, - string? LastError, - DateTimeOffset CreatedAt, - DateTimeOffset UpdatedAt, - DateTimeOffset AvailableAt, - DateTimeOffset? SubmittedAt, - DateTimeOffset? CompletedAt, - string? LeaseOwner, - DateTimeOffset? LeaseExpiresAt, - bool CancellationRequested, - DateTimeOffset? CancellationRequestedAt, - string? CancellationReason, - DateTimeOffset? CancelledAt) -{ - public string SchemaVersion { get; init; } = SchedulerSchemaVersions.EnsurePolicyRunJob(SchemaVersion); - - public string Id { get; init; } = Validation.EnsureId(Id, nameof(Id)); - - public string TenantId { get; init; } = Validation.EnsureTenantId(TenantId, nameof(TenantId)); - - public string PolicyId { get; init; } = Validation.EnsureSimpleIdentifier(PolicyId, nameof(PolicyId)); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? PolicyVersion { get; init; } = EnsurePolicyVersion(PolicyVersion); - - public PolicyRunMode Mode { get; init; } = Mode; - - public PolicyRunPriority Priority { get; init; } = Priority; - - public int PriorityRank { get; init; } = PriorityRank >= 0 ? PriorityRank : GetPriorityRank(Priority); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? RunId { get; init; } = NormalizeRunId(RunId); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? RequestedBy { get; init; } = Validation.TrimToNull(RequestedBy); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? CorrelationId { get; init; } = Validation.TrimToNull(CorrelationId); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public ImmutableSortedDictionary<string, string>? Metadata { get; init; } = NormalizeMetadata(Metadata); - - public PolicyRunInputs Inputs { get; init; } = Inputs ?? throw new ArgumentNullException(nameof(Inputs)); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? QueuedAt { get; init; } = Validation.NormalizeTimestamp(QueuedAt); - - public PolicyRunJobStatus Status { get; init; } = Status; - - public int AttemptCount { get; init; } = Validation.EnsureNonNegative(AttemptCount, nameof(AttemptCount)); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? LastAttemptAt { get; init; } = Validation.NormalizeTimestamp(LastAttemptAt); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? LastError { get; init; } = Validation.TrimToNull(LastError); - - public DateTimeOffset CreatedAt { get; init; } = NormalizeTimestamp(CreatedAt, nameof(CreatedAt)); - - public DateTimeOffset UpdatedAt { get; init; } = NormalizeTimestamp(UpdatedAt, nameof(UpdatedAt)); - - public DateTimeOffset AvailableAt { get; init; } = NormalizeTimestamp(AvailableAt, nameof(AvailableAt)); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? SubmittedAt { get; init; } = Validation.NormalizeTimestamp(SubmittedAt); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? CompletedAt { get; init; } = Validation.NormalizeTimestamp(CompletedAt); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? LeaseOwner { get; init; } = Validation.TrimToNull(LeaseOwner); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? LeaseExpiresAt { get; init; } = Validation.NormalizeTimestamp(LeaseExpiresAt); - - public bool CancellationRequested { get; init; } = CancellationRequested; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? CancellationRequestedAt { get; init; } = Validation.NormalizeTimestamp(CancellationRequestedAt); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? CancellationReason { get; init; } = Validation.TrimToNull(CancellationReason); - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? CancelledAt { get; init; } = Validation.NormalizeTimestamp(CancelledAt); - - public PolicyRunRequest ToPolicyRunRequest(DateTimeOffset fallbackQueuedAt) - { - var queuedAt = QueuedAt ?? fallbackQueuedAt; - return new PolicyRunRequest( - TenantId, - PolicyId, - Mode, - Inputs, - Priority, - RunId, - PolicyVersion, - RequestedBy, - queuedAt, - CorrelationId, - Metadata); - } - - private static int? EnsurePolicyVersion(int? value) - { - if (value is not null && value <= 0) - { - throw new ArgumentOutOfRangeException(nameof(PolicyVersion), value, "Policy version must be positive."); - } - - return value; - } - - private static string? NormalizeRunId(string? runId) - { - var trimmed = Validation.TrimToNull(runId); - return trimmed is null ? null : Validation.EnsureId(trimmed, nameof(runId)); - } - - private static ImmutableSortedDictionary<string, string>? NormalizeMetadata(ImmutableSortedDictionary<string, string>? metadata) - { - if (metadata is null || metadata.Count == 0) - { - return null; - } - - var builder = ImmutableSortedDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); - foreach (var (key, value) in metadata) - { - var normalizedKey = Validation.TrimToNull(key); - var normalizedValue = Validation.TrimToNull(value); - if (normalizedKey is null || normalizedValue is null) - { - continue; - } - - builder[normalizedKey.ToLowerInvariant()] = normalizedValue; - } - - return builder.Count == 0 ? null : builder.ToImmutable(); - } - - private static int GetPriorityRank(PolicyRunPriority priority) - => priority switch - { - PolicyRunPriority.Emergency => 2, - PolicyRunPriority.High => 1, - _ => 0 - }; - - private static DateTimeOffset NormalizeTimestamp(DateTimeOffset value, string propertyName) - { - var normalized = Validation.NormalizeTimestamp(value); - if (normalized == default) - { - throw new ArgumentException($"{propertyName} must be a valid timestamp.", propertyName); - } - - return normalized; - } -} +using System; +using System.Collections.Immutable; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +public sealed record PolicyRunJob( + string SchemaVersion, + string Id, + string TenantId, + string PolicyId, + int? PolicyVersion, + PolicyRunMode Mode, + PolicyRunPriority Priority, + int PriorityRank, + string? RunId, + string? RequestedBy, + string? CorrelationId, + ImmutableSortedDictionary<string, string>? Metadata, + PolicyRunInputs Inputs, + DateTimeOffset? QueuedAt, + PolicyRunJobStatus Status, + int AttemptCount, + DateTimeOffset? LastAttemptAt, + string? LastError, + DateTimeOffset CreatedAt, + DateTimeOffset UpdatedAt, + DateTimeOffset AvailableAt, + DateTimeOffset? SubmittedAt, + DateTimeOffset? CompletedAt, + string? LeaseOwner, + DateTimeOffset? LeaseExpiresAt, + bool CancellationRequested, + DateTimeOffset? CancellationRequestedAt, + string? CancellationReason, + DateTimeOffset? CancelledAt) +{ + public string SchemaVersion { get; init; } = SchedulerSchemaVersions.EnsurePolicyRunJob(SchemaVersion); + + public string Id { get; init; } = Validation.EnsureId(Id, nameof(Id)); + + public string TenantId { get; init; } = Validation.EnsureTenantId(TenantId, nameof(TenantId)); + + public string PolicyId { get; init; } = Validation.EnsureSimpleIdentifier(PolicyId, nameof(PolicyId)); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? PolicyVersion { get; init; } = EnsurePolicyVersion(PolicyVersion); + + public PolicyRunMode Mode { get; init; } = Mode; + + public PolicyRunPriority Priority { get; init; } = Priority; + + public int PriorityRank { get; init; } = PriorityRank >= 0 ? PriorityRank : GetPriorityRank(Priority); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? RunId { get; init; } = NormalizeRunId(RunId); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? RequestedBy { get; init; } = Validation.TrimToNull(RequestedBy); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? CorrelationId { get; init; } = Validation.TrimToNull(CorrelationId); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public ImmutableSortedDictionary<string, string>? Metadata { get; init; } = NormalizeMetadata(Metadata); + + public PolicyRunInputs Inputs { get; init; } = Inputs ?? throw new ArgumentNullException(nameof(Inputs)); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? QueuedAt { get; init; } = Validation.NormalizeTimestamp(QueuedAt); + + public PolicyRunJobStatus Status { get; init; } = Status; + + public int AttemptCount { get; init; } = Validation.EnsureNonNegative(AttemptCount, nameof(AttemptCount)); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? LastAttemptAt { get; init; } = Validation.NormalizeTimestamp(LastAttemptAt); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? LastError { get; init; } = Validation.TrimToNull(LastError); + + public DateTimeOffset CreatedAt { get; init; } = NormalizeTimestamp(CreatedAt, nameof(CreatedAt)); + + public DateTimeOffset UpdatedAt { get; init; } = NormalizeTimestamp(UpdatedAt, nameof(UpdatedAt)); + + public DateTimeOffset AvailableAt { get; init; } = NormalizeTimestamp(AvailableAt, nameof(AvailableAt)); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? SubmittedAt { get; init; } = Validation.NormalizeTimestamp(SubmittedAt); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? CompletedAt { get; init; } = Validation.NormalizeTimestamp(CompletedAt); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? LeaseOwner { get; init; } = Validation.TrimToNull(LeaseOwner); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? LeaseExpiresAt { get; init; } = Validation.NormalizeTimestamp(LeaseExpiresAt); + + public bool CancellationRequested { get; init; } = CancellationRequested; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? CancellationRequestedAt { get; init; } = Validation.NormalizeTimestamp(CancellationRequestedAt); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? CancellationReason { get; init; } = Validation.TrimToNull(CancellationReason); + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? CancelledAt { get; init; } = Validation.NormalizeTimestamp(CancelledAt); + + public PolicyRunRequest ToPolicyRunRequest(DateTimeOffset fallbackQueuedAt) + { + var queuedAt = QueuedAt ?? fallbackQueuedAt; + return new PolicyRunRequest( + TenantId, + PolicyId, + Mode, + Inputs, + Priority, + RunId, + PolicyVersion, + RequestedBy, + queuedAt, + CorrelationId, + Metadata); + } + + private static int? EnsurePolicyVersion(int? value) + { + if (value is not null && value <= 0) + { + throw new ArgumentOutOfRangeException(nameof(PolicyVersion), value, "Policy version must be positive."); + } + + return value; + } + + private static string? NormalizeRunId(string? runId) + { + var trimmed = Validation.TrimToNull(runId); + return trimmed is null ? null : Validation.EnsureId(trimmed, nameof(runId)); + } + + private static ImmutableSortedDictionary<string, string>? NormalizeMetadata(ImmutableSortedDictionary<string, string>? metadata) + { + if (metadata is null || metadata.Count == 0) + { + return null; + } + + var builder = ImmutableSortedDictionary.CreateBuilder<string, string>(StringComparer.Ordinal); + foreach (var (key, value) in metadata) + { + var normalizedKey = Validation.TrimToNull(key); + var normalizedValue = Validation.TrimToNull(value); + if (normalizedKey is null || normalizedValue is null) + { + continue; + } + + builder[normalizedKey.ToLowerInvariant()] = normalizedValue; + } + + return builder.Count == 0 ? null : builder.ToImmutable(); + } + + private static int GetPriorityRank(PolicyRunPriority priority) + => priority switch + { + PolicyRunPriority.Emergency => 2, + PolicyRunPriority.High => 1, + _ => 0 + }; + + private static DateTimeOffset NormalizeTimestamp(DateTimeOffset value, string propertyName) + { + var normalized = Validation.NormalizeTimestamp(value); + if (normalized == default) + { + throw new ArgumentException($"{propertyName} must be a valid timestamp.", propertyName); + } + + return normalized; + } +} diff --git a/src/StellaOps.Scheduler.Models/PolicyRunModels.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/PolicyRunModels.cs similarity index 97% rename from src/StellaOps.Scheduler.Models/PolicyRunModels.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/PolicyRunModels.cs index 2f5d8249..3217126f 100644 --- a/src/StellaOps.Scheduler.Models/PolicyRunModels.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/PolicyRunModels.cs @@ -1,930 +1,930 @@ -using System.Collections.Immutable; -using System.Linq; -using System.Text.Json; -using System.Text.Json.Serialization; - -namespace StellaOps.Scheduler.Models; - -/// <summary> -/// Request payload enqueued by the policy orchestrator/clients. -/// </summary> -public sealed record PolicyRunRequest -{ - public PolicyRunRequest( - string tenantId, - string policyId, - PolicyRunMode mode, - PolicyRunInputs? inputs = null, - PolicyRunPriority priority = PolicyRunPriority.Normal, - string? runId = null, - int? policyVersion = null, - string? requestedBy = null, - DateTimeOffset? queuedAt = null, - string? correlationId = null, - ImmutableSortedDictionary<string, string>? metadata = null, - string? schemaVersion = null) - : this( - tenantId, - policyId, - policyVersion, - mode, - priority, - runId, - Validation.NormalizeTimestamp(queuedAt), - Validation.TrimToNull(requestedBy), - Validation.TrimToNull(correlationId), - metadata ?? ImmutableSortedDictionary<string, string>.Empty, - inputs ?? PolicyRunInputs.Empty, - schemaVersion) - { - } - - [JsonConstructor] - public PolicyRunRequest( - string tenantId, - string policyId, - int? policyVersion, - PolicyRunMode mode, - PolicyRunPriority priority, - string? runId, - DateTimeOffset? queuedAt, - string? requestedBy, - string? correlationId, - ImmutableSortedDictionary<string, string> metadata, - PolicyRunInputs inputs, - string? schemaVersion = null) - { - SchemaVersion = SchedulerSchemaVersions.EnsurePolicyRunRequest(schemaVersion); - TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); - PolicyId = Validation.EnsureSimpleIdentifier(policyId, nameof(policyId)); - if (policyVersion is not null && policyVersion <= 0) - { - throw new ArgumentOutOfRangeException(nameof(policyVersion), policyVersion, "Policy version must be positive."); - } - - PolicyVersion = policyVersion; - Mode = mode; - Priority = priority; - RunId = Validation.TrimToNull(runId) is { Length: > 0 } normalizedRunId - ? Validation.EnsureId(normalizedRunId, nameof(runId)) - : null; - QueuedAt = Validation.NormalizeTimestamp(queuedAt); - RequestedBy = Validation.TrimToNull(requestedBy); - CorrelationId = Validation.TrimToNull(correlationId); - var normalizedMetadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty) - .Select(static pair => new KeyValuePair<string, string>( - Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty, - Validation.TrimToNull(pair.Value) ?? string.Empty)) - .Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value)) - .DistinctBy(static pair => pair.Key, StringComparer.Ordinal) - .OrderBy(static pair => pair.Key, StringComparer.Ordinal) - .ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal); - Metadata = normalizedMetadata.Count == 0 ? null : normalizedMetadata; - Inputs = inputs ?? PolicyRunInputs.Empty; - } - - public string SchemaVersion { get; } - - public string TenantId { get; } - - public string PolicyId { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public int? PolicyVersion { get; } - - public PolicyRunMode Mode { get; } - - public PolicyRunPriority Priority { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? RunId { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? QueuedAt { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? RequestedBy { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? CorrelationId { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public ImmutableSortedDictionary<string, string>? Metadata { get; } - - public PolicyRunInputs Inputs { get; } = PolicyRunInputs.Empty; -} - -/// <summary> -/// Scoped inputs for policy runs (SBOM set, cursors, environment). -/// </summary> -public sealed record PolicyRunInputs -{ - public static PolicyRunInputs Empty { get; } = new(); - - public PolicyRunInputs( - IEnumerable<string>? sbomSet = null, - DateTimeOffset? advisoryCursor = null, - DateTimeOffset? vexCursor = null, - IEnumerable<KeyValuePair<string, object?>>? env = null, - bool captureExplain = false) - { - _sbomSet = NormalizeSbomSet(sbomSet); - _advisoryCursor = Validation.NormalizeTimestamp(advisoryCursor); - _vexCursor = Validation.NormalizeTimestamp(vexCursor); - _environment = NormalizeEnvironment(env); - CaptureExplain = captureExplain; - } - - public PolicyRunInputs() - { - } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableArray<string> SbomSet - { - get => _sbomSet; - init => _sbomSet = NormalizeSbomSet(value); - } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? AdvisoryCursor - { - get => _advisoryCursor; - init => _advisoryCursor = Validation.NormalizeTimestamp(value); - } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? VexCursor - { - get => _vexCursor; - init => _vexCursor = Validation.NormalizeTimestamp(value); - } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public IReadOnlyDictionary<string, JsonElement> Environment - { - get => _environment; - init => _environment = NormalizeEnvironment(value); - } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public bool CaptureExplain { get; init; } - - private ImmutableArray<string> _sbomSet = ImmutableArray<string>.Empty; - private DateTimeOffset? _advisoryCursor; - private DateTimeOffset? _vexCursor; - private IReadOnlyDictionary<string, JsonElement> _environment = ImmutableSortedDictionary<string, JsonElement>.Empty; - - private static ImmutableArray<string> NormalizeSbomSet(IEnumerable<string>? values) - => Validation.NormalizeStringSet(values, nameof(SbomSet)); - - private static ImmutableArray<string> NormalizeSbomSet(ImmutableArray<string> values) - => values.IsDefaultOrEmpty ? ImmutableArray<string>.Empty : NormalizeSbomSet(values.AsEnumerable()); - - private static IReadOnlyDictionary<string, JsonElement> NormalizeEnvironment(IEnumerable<KeyValuePair<string, object?>>? entries) - { - if (entries is null) - { - return ImmutableSortedDictionary<string, JsonElement>.Empty; - } - - var builder = ImmutableSortedDictionary.CreateBuilder<string, JsonElement>(StringComparer.Ordinal); - foreach (var entry in entries) - { - var key = Validation.TrimToNull(entry.Key); - if (key is null) - { - continue; - } - - var normalizedKey = key.ToLowerInvariant(); - var element = entry.Value switch - { - JsonElement jsonElement => jsonElement.Clone(), - JsonDocument jsonDocument => jsonDocument.RootElement.Clone(), - string text => JsonSerializer.SerializeToElement(text).Clone(), - bool boolean => JsonSerializer.SerializeToElement(boolean).Clone(), - int integer => JsonSerializer.SerializeToElement(integer).Clone(), - long longValue => JsonSerializer.SerializeToElement(longValue).Clone(), - double doubleValue => JsonSerializer.SerializeToElement(doubleValue).Clone(), - decimal decimalValue => JsonSerializer.SerializeToElement(decimalValue).Clone(), - null => JsonSerializer.SerializeToElement<object?>(null).Clone(), - _ => JsonSerializer.SerializeToElement(entry.Value, entry.Value.GetType()).Clone(), - }; - - builder[normalizedKey] = element; - } - - return builder.ToImmutable(); - } - - private static IReadOnlyDictionary<string, JsonElement> NormalizeEnvironment(IReadOnlyDictionary<string, JsonElement>? environment) - { - if (environment is null || environment.Count == 0) - { - return ImmutableSortedDictionary<string, JsonElement>.Empty; - } - - var builder = ImmutableSortedDictionary.CreateBuilder<string, JsonElement>(StringComparer.Ordinal); - foreach (var entry in environment) - { - var key = Validation.TrimToNull(entry.Key); - if (key is null) - { - continue; - } - - builder[key.ToLowerInvariant()] = entry.Value.Clone(); - } - - return builder.ToImmutable(); - } -} - -/// <summary> -/// Stored status for a policy run (policy_runs collection). -/// </summary> -public sealed record PolicyRunStatus -{ - public PolicyRunStatus( - string runId, - string tenantId, - string policyId, - int policyVersion, - PolicyRunMode mode, - PolicyRunExecutionStatus status, - PolicyRunPriority priority, - DateTimeOffset queuedAt, - PolicyRunStats? stats = null, - PolicyRunInputs? inputs = null, - DateTimeOffset? startedAt = null, - DateTimeOffset? finishedAt = null, - string? determinismHash = null, - string? errorCode = null, - string? error = null, - int attempts = 0, - string? traceId = null, - string? explainUri = null, - ImmutableSortedDictionary<string, string>? metadata = null, - string? schemaVersion = null) - : this( - runId, - tenantId, - policyId, - policyVersion, - mode, - status, - priority, - Validation.NormalizeTimestamp(queuedAt), - Validation.NormalizeTimestamp(startedAt), - Validation.NormalizeTimestamp(finishedAt), - stats ?? PolicyRunStats.Empty, - inputs ?? PolicyRunInputs.Empty, - determinismHash, - Validation.TrimToNull(errorCode), - Validation.TrimToNull(error), - attempts, - Validation.TrimToNull(traceId), - Validation.TrimToNull(explainUri), - metadata ?? ImmutableSortedDictionary<string, string>.Empty, - schemaVersion) - { - } - - [JsonConstructor] - public PolicyRunStatus( - string runId, - string tenantId, - string policyId, - int policyVersion, - PolicyRunMode mode, - PolicyRunExecutionStatus status, - PolicyRunPriority priority, - DateTimeOffset queuedAt, - DateTimeOffset? startedAt, - DateTimeOffset? finishedAt, - PolicyRunStats stats, - PolicyRunInputs inputs, - string? determinismHash, - string? errorCode, - string? error, - int attempts, - string? traceId, - string? explainUri, - ImmutableSortedDictionary<string, string> metadata, - string? schemaVersion = null) - { - SchemaVersion = SchedulerSchemaVersions.EnsurePolicyRunStatus(schemaVersion); - RunId = Validation.EnsureId(runId, nameof(runId)); - TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); - PolicyId = Validation.EnsureSimpleIdentifier(policyId, nameof(policyId)); - if (policyVersion <= 0) - { - throw new ArgumentOutOfRangeException(nameof(policyVersion), policyVersion, "Policy version must be positive."); - } - - PolicyVersion = policyVersion; - Mode = mode; - Status = status; - Priority = priority; - QueuedAt = Validation.NormalizeTimestamp(queuedAt); - StartedAt = Validation.NormalizeTimestamp(startedAt); - FinishedAt = Validation.NormalizeTimestamp(finishedAt); - Stats = stats ?? PolicyRunStats.Empty; - Inputs = inputs ?? PolicyRunInputs.Empty; - DeterminismHash = Validation.TrimToNull(determinismHash); - ErrorCode = Validation.TrimToNull(errorCode); - Error = Validation.TrimToNull(error); - Attempts = attempts < 0 - ? throw new ArgumentOutOfRangeException(nameof(attempts), attempts, "Attempts must be non-negative.") - : attempts; - TraceId = Validation.TrimToNull(traceId); - ExplainUri = Validation.TrimToNull(explainUri); - Metadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty) - .Select(static pair => new KeyValuePair<string, string>( - Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty, - Validation.TrimToNull(pair.Value) ?? string.Empty)) - .Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value)) - .DistinctBy(static pair => pair.Key, StringComparer.Ordinal) - .OrderBy(static pair => pair.Key, StringComparer.Ordinal) - .ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal); - } - - public string SchemaVersion { get; } - - public string RunId { get; } - - public string TenantId { get; } - - public string PolicyId { get; } - - public int PolicyVersion { get; } - - public PolicyRunMode Mode { get; } - - public PolicyRunExecutionStatus Status { get; init; } - - public PolicyRunPriority Priority { get; init; } - - public DateTimeOffset QueuedAt { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? StartedAt { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public DateTimeOffset? FinishedAt { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? DeterminismHash { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? ErrorCode { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Error { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public int Attempts { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? TraceId { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? ExplainUri { get; init; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableSortedDictionary<string, string> Metadata { get; init; } = ImmutableSortedDictionary<string, string>.Empty; - - public PolicyRunStats Stats { get; init; } = PolicyRunStats.Empty; - - public PolicyRunInputs Inputs { get; init; } = PolicyRunInputs.Empty; -} - -/// <summary> -/// Aggregated metrics captured for a policy run. -/// </summary> -public sealed record PolicyRunStats -{ - public static PolicyRunStats Empty { get; } = new(); - - public PolicyRunStats( - int components = 0, - int rulesFired = 0, - int findingsWritten = 0, - int vexOverrides = 0, - int quieted = 0, - int suppressed = 0, - double? durationSeconds = null) - { - Components = Validation.EnsureNonNegative(components, nameof(components)); - RulesFired = Validation.EnsureNonNegative(rulesFired, nameof(rulesFired)); - FindingsWritten = Validation.EnsureNonNegative(findingsWritten, nameof(findingsWritten)); - VexOverrides = Validation.EnsureNonNegative(vexOverrides, nameof(vexOverrides)); - Quieted = Validation.EnsureNonNegative(quieted, nameof(quieted)); - Suppressed = Validation.EnsureNonNegative(suppressed, nameof(suppressed)); - DurationSeconds = durationSeconds is { } seconds && seconds < 0 - ? throw new ArgumentOutOfRangeException(nameof(durationSeconds), durationSeconds, "Duration must be non-negative.") - : durationSeconds; - } - - public int Components { get; } = 0; - - public int RulesFired { get; } = 0; - - public int FindingsWritten { get; } = 0; - - public int VexOverrides { get; } = 0; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public int Quieted { get; } = 0; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public int Suppressed { get; } = 0; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public double? DurationSeconds { get; } -} - -/// <summary> -/// Summary payload returned by simulations and run diffs. -/// </summary> -public sealed record PolicyDiffSummary -{ - public PolicyDiffSummary( - int added, - int removed, - int unchanged, - IEnumerable<KeyValuePair<string, PolicyDiffSeverityDelta>>? bySeverity = null, - IEnumerable<PolicyDiffRuleDelta>? ruleHits = null, - string? schemaVersion = null) - : this( - Validation.EnsureNonNegative(added, nameof(added)), - Validation.EnsureNonNegative(removed, nameof(removed)), - Validation.EnsureNonNegative(unchanged, nameof(unchanged)), - NormalizeSeverity(bySeverity), - NormalizeRuleHits(ruleHits), - schemaVersion) - { - } - - [JsonConstructor] - public PolicyDiffSummary( - int added, - int removed, - int unchanged, - ImmutableSortedDictionary<string, PolicyDiffSeverityDelta> bySeverity, - ImmutableArray<PolicyDiffRuleDelta> ruleHits, - string? schemaVersion = null) - { - Added = Validation.EnsureNonNegative(added, nameof(added)); - Removed = Validation.EnsureNonNegative(removed, nameof(removed)); - Unchanged = Validation.EnsureNonNegative(unchanged, nameof(unchanged)); - BySeverity = NormalizeSeverity(bySeverity); - RuleHits = ruleHits.IsDefault ? ImmutableArray<PolicyDiffRuleDelta>.Empty : ruleHits; - SchemaVersion = SchedulerSchemaVersions.EnsurePolicyDiffSummary(schemaVersion); - } - - public string SchemaVersion { get; } - - public int Added { get; } - - public int Removed { get; } - - public int Unchanged { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableSortedDictionary<string, PolicyDiffSeverityDelta> BySeverity { get; } = ImmutableSortedDictionary<string, PolicyDiffSeverityDelta>.Empty; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableArray<PolicyDiffRuleDelta> RuleHits { get; } = ImmutableArray<PolicyDiffRuleDelta>.Empty; - - private static ImmutableSortedDictionary<string, PolicyDiffSeverityDelta> NormalizeSeverity(IEnumerable<KeyValuePair<string, PolicyDiffSeverityDelta>>? buckets) - { - if (buckets is null) - { - return ImmutableSortedDictionary<string, PolicyDiffSeverityDelta>.Empty; - } - - var builder = ImmutableSortedDictionary.CreateBuilder<string, PolicyDiffSeverityDelta>(StringComparer.OrdinalIgnoreCase); - foreach (var bucket in buckets) - { - var key = Validation.TrimToNull(bucket.Key); - if (key is null) - { - continue; - } - - var normalizedKey = char.ToUpperInvariant(key[0]) + key[1..].ToLowerInvariant(); - builder[normalizedKey] = bucket.Value ?? PolicyDiffSeverityDelta.Empty; - } - - return builder.ToImmutable(); - } - - private static ImmutableArray<PolicyDiffRuleDelta> NormalizeRuleHits(IEnumerable<PolicyDiffRuleDelta>? ruleHits) - { - if (ruleHits is null) - { - return ImmutableArray<PolicyDiffRuleDelta>.Empty; - } - - return ruleHits - .Where(static hit => hit is not null) - .Select(static hit => hit!) - .OrderBy(static hit => hit.RuleId, StringComparer.Ordinal) - .ThenBy(static hit => hit.RuleName, StringComparer.Ordinal) - .ToImmutableArray(); - } -} - -/// <summary> -/// Delta counts for a single severity bucket. -/// </summary> -public sealed record PolicyDiffSeverityDelta -{ - public static PolicyDiffSeverityDelta Empty { get; } = new(); - - public PolicyDiffSeverityDelta(int up = 0, int down = 0) - { - Up = Validation.EnsureNonNegative(up, nameof(up)); - Down = Validation.EnsureNonNegative(down, nameof(down)); - } - - public int Up { get; } = 0; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public int Down { get; } = 0; -} - -/// <summary> -/// Delta counts per rule for simulation reporting. -/// </summary> -public sealed record PolicyDiffRuleDelta -{ - public PolicyDiffRuleDelta(string ruleId, string ruleName, int up = 0, int down = 0) - { - RuleId = Validation.EnsureSimpleIdentifier(ruleId, nameof(ruleId)); - RuleName = Validation.EnsureName(ruleName, nameof(ruleName)); - Up = Validation.EnsureNonNegative(up, nameof(up)); - Down = Validation.EnsureNonNegative(down, nameof(down)); - } - - public string RuleId { get; } - - public string RuleName { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public int Up { get; } = 0; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public int Down { get; } = 0; -} - -/// <summary> -/// Canonical explain trace for a policy finding. -/// </summary> -public sealed record PolicyExplainTrace -{ - public PolicyExplainTrace( - string findingId, - string policyId, - int policyVersion, - string tenantId, - string runId, - PolicyExplainVerdict verdict, - DateTimeOffset evaluatedAt, - IEnumerable<PolicyExplainRule>? ruleChain = null, - IEnumerable<PolicyExplainEvidence>? evidence = null, - IEnumerable<PolicyExplainVexImpact>? vexImpacts = null, - IEnumerable<PolicyExplainHistoryEvent>? history = null, - ImmutableSortedDictionary<string, string>? metadata = null, - string? schemaVersion = null) - : this( - findingId, - policyId, - policyVersion, - tenantId, - runId, - Validation.NormalizeTimestamp(evaluatedAt), - verdict, - NormalizeRuleChain(ruleChain), - NormalizeEvidence(evidence), - NormalizeVexImpacts(vexImpacts), - NormalizeHistory(history), - metadata ?? ImmutableSortedDictionary<string, string>.Empty, - schemaVersion) - { - } - - [JsonConstructor] - public PolicyExplainTrace( - string findingId, - string policyId, - int policyVersion, - string tenantId, - string runId, - DateTimeOffset evaluatedAt, - PolicyExplainVerdict verdict, - ImmutableArray<PolicyExplainRule> ruleChain, - ImmutableArray<PolicyExplainEvidence> evidence, - ImmutableArray<PolicyExplainVexImpact> vexImpacts, - ImmutableArray<PolicyExplainHistoryEvent> history, - ImmutableSortedDictionary<string, string> metadata, - string? schemaVersion = null) - { - SchemaVersion = SchedulerSchemaVersions.EnsurePolicyExplainTrace(schemaVersion); - FindingId = Validation.EnsureSimpleIdentifier(findingId, nameof(findingId)); - PolicyId = Validation.EnsureSimpleIdentifier(policyId, nameof(policyId)); - if (policyVersion <= 0) - { - throw new ArgumentOutOfRangeException(nameof(policyVersion), policyVersion, "Policy version must be positive."); - } - - PolicyVersion = policyVersion; - TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); - RunId = Validation.EnsureId(runId, nameof(runId)); - EvaluatedAt = Validation.NormalizeTimestamp(evaluatedAt); - Verdict = verdict ?? throw new ArgumentNullException(nameof(verdict)); - RuleChain = ruleChain.IsDefault ? ImmutableArray<PolicyExplainRule>.Empty : ruleChain; - Evidence = evidence.IsDefault ? ImmutableArray<PolicyExplainEvidence>.Empty : evidence; - VexImpacts = vexImpacts.IsDefault ? ImmutableArray<PolicyExplainVexImpact>.Empty : vexImpacts; - History = history.IsDefault ? ImmutableArray<PolicyExplainHistoryEvent>.Empty : history; - Metadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty) - .Select(static pair => new KeyValuePair<string, string>( - Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty, - Validation.TrimToNull(pair.Value) ?? string.Empty)) - .Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value)) - .DistinctBy(static pair => pair.Key, StringComparer.Ordinal) - .OrderBy(static pair => pair.Key, StringComparer.Ordinal) - .ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal); - } - - public string SchemaVersion { get; } - - public string FindingId { get; } - - public string PolicyId { get; } - - public int PolicyVersion { get; } - - public string TenantId { get; } - - public string RunId { get; } - - public DateTimeOffset EvaluatedAt { get; } - - public PolicyExplainVerdict Verdict { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableArray<PolicyExplainRule> RuleChain { get; } = ImmutableArray<PolicyExplainRule>.Empty; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableArray<PolicyExplainEvidence> Evidence { get; } = ImmutableArray<PolicyExplainEvidence>.Empty; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableArray<PolicyExplainVexImpact> VexImpacts { get; } = ImmutableArray<PolicyExplainVexImpact>.Empty; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableArray<PolicyExplainHistoryEvent> History { get; } = ImmutableArray<PolicyExplainHistoryEvent>.Empty; - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty; - - private static ImmutableArray<PolicyExplainRule> NormalizeRuleChain(IEnumerable<PolicyExplainRule>? rules) - { - if (rules is null) - { - return ImmutableArray<PolicyExplainRule>.Empty; - } - - return rules - .Where(static rule => rule is not null) - .Select(static rule => rule!) - .ToImmutableArray(); - } - - private static ImmutableArray<PolicyExplainEvidence> NormalizeEvidence(IEnumerable<PolicyExplainEvidence>? evidence) - { - if (evidence is null) - { - return ImmutableArray<PolicyExplainEvidence>.Empty; - } - - return evidence - .Where(static item => item is not null) - .Select(static item => item!) - .OrderBy(static item => item.Type, StringComparer.Ordinal) - .ThenBy(static item => item.Reference, StringComparer.Ordinal) - .ToImmutableArray(); - } - - private static ImmutableArray<PolicyExplainVexImpact> NormalizeVexImpacts(IEnumerable<PolicyExplainVexImpact>? impacts) - { - if (impacts is null) - { - return ImmutableArray<PolicyExplainVexImpact>.Empty; - } - - return impacts - .Where(static impact => impact is not null) - .Select(static impact => impact!) - .OrderBy(static impact => impact.StatementId, StringComparer.Ordinal) - .ToImmutableArray(); - } - - private static ImmutableArray<PolicyExplainHistoryEvent> NormalizeHistory(IEnumerable<PolicyExplainHistoryEvent>? history) - { - if (history is null) - { - return ImmutableArray<PolicyExplainHistoryEvent>.Empty; - } - - return history - .Where(static entry => entry is not null) - .Select(static entry => entry!) - .OrderBy(static entry => entry.OccurredAt) - .ToImmutableArray(); - } -} - -/// <summary> -/// Verdict metadata for explain traces. -/// </summary> -public sealed record PolicyExplainVerdict -{ - public PolicyExplainVerdict( - PolicyVerdictStatus status, - SeverityRank? severity = null, - bool quiet = false, - double? score = null, - string? rationale = null) - { - Status = status; - Severity = severity; - Quiet = quiet; - Score = score; - Rationale = Validation.TrimToNull(rationale); - } - - public PolicyVerdictStatus Status { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public SeverityRank? Severity { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public bool Quiet { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public double? Score { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Rationale { get; } -} - -/// <summary> -/// Rule evaluation entry captured in explain traces. -/// </summary> -public sealed record PolicyExplainRule -{ - public PolicyExplainRule( - string ruleId, - string ruleName, - string action, - string decision, - double score, - string? condition = null) - { - RuleId = Validation.EnsureSimpleIdentifier(ruleId, nameof(ruleId)); - RuleName = Validation.EnsureName(ruleName, nameof(ruleName)); - Action = Validation.TrimToNull(action) ?? throw new ArgumentNullException(nameof(action)); - Decision = Validation.TrimToNull(decision) ?? throw new ArgumentNullException(nameof(decision)); - Score = score; - Condition = Validation.TrimToNull(condition); - } - - public string RuleId { get; } - - public string RuleName { get; } - - public string Action { get; } - - public string Decision { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public double Score { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Condition { get; } -} - -/// <summary> -/// Evidence entry considered during policy evaluation. -/// </summary> -public sealed record PolicyExplainEvidence -{ - public PolicyExplainEvidence( - string type, - string reference, - string source, - string status, - double weight = 0, - string? justification = null, - ImmutableSortedDictionary<string, string>? metadata = null) - { - Type = Validation.TrimToNull(type) ?? throw new ArgumentNullException(nameof(type)); - Reference = Validation.TrimToNull(reference) ?? throw new ArgumentNullException(nameof(reference)); - Source = Validation.TrimToNull(source) ?? throw new ArgumentNullException(nameof(source)); - Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status)); - Weight = weight; - Justification = Validation.TrimToNull(justification); - Metadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty) - .Select(static pair => new KeyValuePair<string, string>( - Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty, - Validation.TrimToNull(pair.Value) ?? string.Empty)) - .Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value)) - .DistinctBy(static pair => pair.Key, StringComparer.Ordinal) - .OrderBy(static pair => pair.Key, StringComparer.Ordinal) - .ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal); - } - - public string Type { get; } - - public string Reference { get; } - - public string Source { get; } - - public string Status { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public double Weight { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Justification { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] - public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty; -} - -/// <summary> -/// VEX statement impact summary captured in explain traces. -/// </summary> -public sealed record PolicyExplainVexImpact -{ - public PolicyExplainVexImpact( - string statementId, - string provider, - string status, - bool accepted, - string? justification = null, - string? confidence = null) - { - StatementId = Validation.TrimToNull(statementId) ?? throw new ArgumentNullException(nameof(statementId)); - Provider = Validation.TrimToNull(provider) ?? throw new ArgumentNullException(nameof(provider)); - Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status)); - Accepted = accepted; - Justification = Validation.TrimToNull(justification); - Confidence = Validation.TrimToNull(confidence); - } - - public string StatementId { get; } - - public string Provider { get; } - - public string Status { get; } - - public bool Accepted { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Justification { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Confidence { get; } -} - -/// <summary> -/// History entry for a finding's policy lifecycle. -/// </summary> -public sealed record PolicyExplainHistoryEvent -{ - public PolicyExplainHistoryEvent( - string status, - DateTimeOffset occurredAt, - string? actor = null, - string? note = null) - { - Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status)); - OccurredAt = Validation.NormalizeTimestamp(occurredAt); - Actor = Validation.TrimToNull(actor); - Note = Validation.TrimToNull(note); - } - - public string Status { get; } - - public DateTimeOffset OccurredAt { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Actor { get; } - - [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] - public string? Note { get; } -} +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json; +using System.Text.Json.Serialization; + +namespace StellaOps.Scheduler.Models; + +/// <summary> +/// Request payload enqueued by the policy orchestrator/clients. +/// </summary> +public sealed record PolicyRunRequest +{ + public PolicyRunRequest( + string tenantId, + string policyId, + PolicyRunMode mode, + PolicyRunInputs? inputs = null, + PolicyRunPriority priority = PolicyRunPriority.Normal, + string? runId = null, + int? policyVersion = null, + string? requestedBy = null, + DateTimeOffset? queuedAt = null, + string? correlationId = null, + ImmutableSortedDictionary<string, string>? metadata = null, + string? schemaVersion = null) + : this( + tenantId, + policyId, + policyVersion, + mode, + priority, + runId, + Validation.NormalizeTimestamp(queuedAt), + Validation.TrimToNull(requestedBy), + Validation.TrimToNull(correlationId), + metadata ?? ImmutableSortedDictionary<string, string>.Empty, + inputs ?? PolicyRunInputs.Empty, + schemaVersion) + { + } + + [JsonConstructor] + public PolicyRunRequest( + string tenantId, + string policyId, + int? policyVersion, + PolicyRunMode mode, + PolicyRunPriority priority, + string? runId, + DateTimeOffset? queuedAt, + string? requestedBy, + string? correlationId, + ImmutableSortedDictionary<string, string> metadata, + PolicyRunInputs inputs, + string? schemaVersion = null) + { + SchemaVersion = SchedulerSchemaVersions.EnsurePolicyRunRequest(schemaVersion); + TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); + PolicyId = Validation.EnsureSimpleIdentifier(policyId, nameof(policyId)); + if (policyVersion is not null && policyVersion <= 0) + { + throw new ArgumentOutOfRangeException(nameof(policyVersion), policyVersion, "Policy version must be positive."); + } + + PolicyVersion = policyVersion; + Mode = mode; + Priority = priority; + RunId = Validation.TrimToNull(runId) is { Length: > 0 } normalizedRunId + ? Validation.EnsureId(normalizedRunId, nameof(runId)) + : null; + QueuedAt = Validation.NormalizeTimestamp(queuedAt); + RequestedBy = Validation.TrimToNull(requestedBy); + CorrelationId = Validation.TrimToNull(correlationId); + var normalizedMetadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty) + .Select(static pair => new KeyValuePair<string, string>( + Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty, + Validation.TrimToNull(pair.Value) ?? string.Empty)) + .Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value)) + .DistinctBy(static pair => pair.Key, StringComparer.Ordinal) + .OrderBy(static pair => pair.Key, StringComparer.Ordinal) + .ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal); + Metadata = normalizedMetadata.Count == 0 ? null : normalizedMetadata; + Inputs = inputs ?? PolicyRunInputs.Empty; + } + + public string SchemaVersion { get; } + + public string TenantId { get; } + + public string PolicyId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public int? PolicyVersion { get; } + + public PolicyRunMode Mode { get; } + + public PolicyRunPriority Priority { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? RunId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? QueuedAt { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? RequestedBy { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? CorrelationId { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public ImmutableSortedDictionary<string, string>? Metadata { get; } + + public PolicyRunInputs Inputs { get; } = PolicyRunInputs.Empty; +} + +/// <summary> +/// Scoped inputs for policy runs (SBOM set, cursors, environment). +/// </summary> +public sealed record PolicyRunInputs +{ + public static PolicyRunInputs Empty { get; } = new(); + + public PolicyRunInputs( + IEnumerable<string>? sbomSet = null, + DateTimeOffset? advisoryCursor = null, + DateTimeOffset? vexCursor = null, + IEnumerable<KeyValuePair<string, object?>>? env = null, + bool captureExplain = false) + { + _sbomSet = NormalizeSbomSet(sbomSet); + _advisoryCursor = Validation.NormalizeTimestamp(advisoryCursor); + _vexCursor = Validation.NormalizeTimestamp(vexCursor); + _environment = NormalizeEnvironment(env); + CaptureExplain = captureExplain; + } + + public PolicyRunInputs() + { + } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray<string> SbomSet + { + get => _sbomSet; + init => _sbomSet = NormalizeSbomSet(value); + } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? AdvisoryCursor + { + get => _advisoryCursor; + init => _advisoryCursor = Validation.NormalizeTimestamp(value); + } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? VexCursor + { + get => _vexCursor; + init => _vexCursor = Validation.NormalizeTimestamp(value); + } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public IReadOnlyDictionary<string, JsonElement> Environment + { + get => _environment; + init => _environment = NormalizeEnvironment(value); + } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool CaptureExplain { get; init; } + + private ImmutableArray<string> _sbomSet = ImmutableArray<string>.Empty; + private DateTimeOffset? _advisoryCursor; + private DateTimeOffset? _vexCursor; + private IReadOnlyDictionary<string, JsonElement> _environment = ImmutableSortedDictionary<string, JsonElement>.Empty; + + private static ImmutableArray<string> NormalizeSbomSet(IEnumerable<string>? values) + => Validation.NormalizeStringSet(values, nameof(SbomSet)); + + private static ImmutableArray<string> NormalizeSbomSet(ImmutableArray<string> values) + => values.IsDefaultOrEmpty ? ImmutableArray<string>.Empty : NormalizeSbomSet(values.AsEnumerable()); + + private static IReadOnlyDictionary<string, JsonElement> NormalizeEnvironment(IEnumerable<KeyValuePair<string, object?>>? entries) + { + if (entries is null) + { + return ImmutableSortedDictionary<string, JsonElement>.Empty; + } + + var builder = ImmutableSortedDictionary.CreateBuilder<string, JsonElement>(StringComparer.Ordinal); + foreach (var entry in entries) + { + var key = Validation.TrimToNull(entry.Key); + if (key is null) + { + continue; + } + + var normalizedKey = key.ToLowerInvariant(); + var element = entry.Value switch + { + JsonElement jsonElement => jsonElement.Clone(), + JsonDocument jsonDocument => jsonDocument.RootElement.Clone(), + string text => JsonSerializer.SerializeToElement(text).Clone(), + bool boolean => JsonSerializer.SerializeToElement(boolean).Clone(), + int integer => JsonSerializer.SerializeToElement(integer).Clone(), + long longValue => JsonSerializer.SerializeToElement(longValue).Clone(), + double doubleValue => JsonSerializer.SerializeToElement(doubleValue).Clone(), + decimal decimalValue => JsonSerializer.SerializeToElement(decimalValue).Clone(), + null => JsonSerializer.SerializeToElement<object?>(null).Clone(), + _ => JsonSerializer.SerializeToElement(entry.Value, entry.Value.GetType()).Clone(), + }; + + builder[normalizedKey] = element; + } + + return builder.ToImmutable(); + } + + private static IReadOnlyDictionary<string, JsonElement> NormalizeEnvironment(IReadOnlyDictionary<string, JsonElement>? environment) + { + if (environment is null || environment.Count == 0) + { + return ImmutableSortedDictionary<string, JsonElement>.Empty; + } + + var builder = ImmutableSortedDictionary.CreateBuilder<string, JsonElement>(StringComparer.Ordinal); + foreach (var entry in environment) + { + var key = Validation.TrimToNull(entry.Key); + if (key is null) + { + continue; + } + + builder[key.ToLowerInvariant()] = entry.Value.Clone(); + } + + return builder.ToImmutable(); + } +} + +/// <summary> +/// Stored status for a policy run (policy_runs collection). +/// </summary> +public sealed record PolicyRunStatus +{ + public PolicyRunStatus( + string runId, + string tenantId, + string policyId, + int policyVersion, + PolicyRunMode mode, + PolicyRunExecutionStatus status, + PolicyRunPriority priority, + DateTimeOffset queuedAt, + PolicyRunStats? stats = null, + PolicyRunInputs? inputs = null, + DateTimeOffset? startedAt = null, + DateTimeOffset? finishedAt = null, + string? determinismHash = null, + string? errorCode = null, + string? error = null, + int attempts = 0, + string? traceId = null, + string? explainUri = null, + ImmutableSortedDictionary<string, string>? metadata = null, + string? schemaVersion = null) + : this( + runId, + tenantId, + policyId, + policyVersion, + mode, + status, + priority, + Validation.NormalizeTimestamp(queuedAt), + Validation.NormalizeTimestamp(startedAt), + Validation.NormalizeTimestamp(finishedAt), + stats ?? PolicyRunStats.Empty, + inputs ?? PolicyRunInputs.Empty, + determinismHash, + Validation.TrimToNull(errorCode), + Validation.TrimToNull(error), + attempts, + Validation.TrimToNull(traceId), + Validation.TrimToNull(explainUri), + metadata ?? ImmutableSortedDictionary<string, string>.Empty, + schemaVersion) + { + } + + [JsonConstructor] + public PolicyRunStatus( + string runId, + string tenantId, + string policyId, + int policyVersion, + PolicyRunMode mode, + PolicyRunExecutionStatus status, + PolicyRunPriority priority, + DateTimeOffset queuedAt, + DateTimeOffset? startedAt, + DateTimeOffset? finishedAt, + PolicyRunStats stats, + PolicyRunInputs inputs, + string? determinismHash, + string? errorCode, + string? error, + int attempts, + string? traceId, + string? explainUri, + ImmutableSortedDictionary<string, string> metadata, + string? schemaVersion = null) + { + SchemaVersion = SchedulerSchemaVersions.EnsurePolicyRunStatus(schemaVersion); + RunId = Validation.EnsureId(runId, nameof(runId)); + TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); + PolicyId = Validation.EnsureSimpleIdentifier(policyId, nameof(policyId)); + if (policyVersion <= 0) + { + throw new ArgumentOutOfRangeException(nameof(policyVersion), policyVersion, "Policy version must be positive."); + } + + PolicyVersion = policyVersion; + Mode = mode; + Status = status; + Priority = priority; + QueuedAt = Validation.NormalizeTimestamp(queuedAt); + StartedAt = Validation.NormalizeTimestamp(startedAt); + FinishedAt = Validation.NormalizeTimestamp(finishedAt); + Stats = stats ?? PolicyRunStats.Empty; + Inputs = inputs ?? PolicyRunInputs.Empty; + DeterminismHash = Validation.TrimToNull(determinismHash); + ErrorCode = Validation.TrimToNull(errorCode); + Error = Validation.TrimToNull(error); + Attempts = attempts < 0 + ? throw new ArgumentOutOfRangeException(nameof(attempts), attempts, "Attempts must be non-negative.") + : attempts; + TraceId = Validation.TrimToNull(traceId); + ExplainUri = Validation.TrimToNull(explainUri); + Metadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty) + .Select(static pair => new KeyValuePair<string, string>( + Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty, + Validation.TrimToNull(pair.Value) ?? string.Empty)) + .Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value)) + .DistinctBy(static pair => pair.Key, StringComparer.Ordinal) + .OrderBy(static pair => pair.Key, StringComparer.Ordinal) + .ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal); + } + + public string SchemaVersion { get; } + + public string RunId { get; } + + public string TenantId { get; } + + public string PolicyId { get; } + + public int PolicyVersion { get; } + + public PolicyRunMode Mode { get; } + + public PolicyRunExecutionStatus Status { get; init; } + + public PolicyRunPriority Priority { get; init; } + + public DateTimeOffset QueuedAt { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? StartedAt { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public DateTimeOffset? FinishedAt { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? DeterminismHash { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ErrorCode { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Error { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int Attempts { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? TraceId { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? ExplainUri { get; init; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableSortedDictionary<string, string> Metadata { get; init; } = ImmutableSortedDictionary<string, string>.Empty; + + public PolicyRunStats Stats { get; init; } = PolicyRunStats.Empty; + + public PolicyRunInputs Inputs { get; init; } = PolicyRunInputs.Empty; +} + +/// <summary> +/// Aggregated metrics captured for a policy run. +/// </summary> +public sealed record PolicyRunStats +{ + public static PolicyRunStats Empty { get; } = new(); + + public PolicyRunStats( + int components = 0, + int rulesFired = 0, + int findingsWritten = 0, + int vexOverrides = 0, + int quieted = 0, + int suppressed = 0, + double? durationSeconds = null) + { + Components = Validation.EnsureNonNegative(components, nameof(components)); + RulesFired = Validation.EnsureNonNegative(rulesFired, nameof(rulesFired)); + FindingsWritten = Validation.EnsureNonNegative(findingsWritten, nameof(findingsWritten)); + VexOverrides = Validation.EnsureNonNegative(vexOverrides, nameof(vexOverrides)); + Quieted = Validation.EnsureNonNegative(quieted, nameof(quieted)); + Suppressed = Validation.EnsureNonNegative(suppressed, nameof(suppressed)); + DurationSeconds = durationSeconds is { } seconds && seconds < 0 + ? throw new ArgumentOutOfRangeException(nameof(durationSeconds), durationSeconds, "Duration must be non-negative.") + : durationSeconds; + } + + public int Components { get; } = 0; + + public int RulesFired { get; } = 0; + + public int FindingsWritten { get; } = 0; + + public int VexOverrides { get; } = 0; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int Quieted { get; } = 0; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int Suppressed { get; } = 0; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? DurationSeconds { get; } +} + +/// <summary> +/// Summary payload returned by simulations and run diffs. +/// </summary> +public sealed record PolicyDiffSummary +{ + public PolicyDiffSummary( + int added, + int removed, + int unchanged, + IEnumerable<KeyValuePair<string, PolicyDiffSeverityDelta>>? bySeverity = null, + IEnumerable<PolicyDiffRuleDelta>? ruleHits = null, + string? schemaVersion = null) + : this( + Validation.EnsureNonNegative(added, nameof(added)), + Validation.EnsureNonNegative(removed, nameof(removed)), + Validation.EnsureNonNegative(unchanged, nameof(unchanged)), + NormalizeSeverity(bySeverity), + NormalizeRuleHits(ruleHits), + schemaVersion) + { + } + + [JsonConstructor] + public PolicyDiffSummary( + int added, + int removed, + int unchanged, + ImmutableSortedDictionary<string, PolicyDiffSeverityDelta> bySeverity, + ImmutableArray<PolicyDiffRuleDelta> ruleHits, + string? schemaVersion = null) + { + Added = Validation.EnsureNonNegative(added, nameof(added)); + Removed = Validation.EnsureNonNegative(removed, nameof(removed)); + Unchanged = Validation.EnsureNonNegative(unchanged, nameof(unchanged)); + BySeverity = NormalizeSeverity(bySeverity); + RuleHits = ruleHits.IsDefault ? ImmutableArray<PolicyDiffRuleDelta>.Empty : ruleHits; + SchemaVersion = SchedulerSchemaVersions.EnsurePolicyDiffSummary(schemaVersion); + } + + public string SchemaVersion { get; } + + public int Added { get; } + + public int Removed { get; } + + public int Unchanged { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableSortedDictionary<string, PolicyDiffSeverityDelta> BySeverity { get; } = ImmutableSortedDictionary<string, PolicyDiffSeverityDelta>.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray<PolicyDiffRuleDelta> RuleHits { get; } = ImmutableArray<PolicyDiffRuleDelta>.Empty; + + private static ImmutableSortedDictionary<string, PolicyDiffSeverityDelta> NormalizeSeverity(IEnumerable<KeyValuePair<string, PolicyDiffSeverityDelta>>? buckets) + { + if (buckets is null) + { + return ImmutableSortedDictionary<string, PolicyDiffSeverityDelta>.Empty; + } + + var builder = ImmutableSortedDictionary.CreateBuilder<string, PolicyDiffSeverityDelta>(StringComparer.OrdinalIgnoreCase); + foreach (var bucket in buckets) + { + var key = Validation.TrimToNull(bucket.Key); + if (key is null) + { + continue; + } + + var normalizedKey = char.ToUpperInvariant(key[0]) + key[1..].ToLowerInvariant(); + builder[normalizedKey] = bucket.Value ?? PolicyDiffSeverityDelta.Empty; + } + + return builder.ToImmutable(); + } + + private static ImmutableArray<PolicyDiffRuleDelta> NormalizeRuleHits(IEnumerable<PolicyDiffRuleDelta>? ruleHits) + { + if (ruleHits is null) + { + return ImmutableArray<PolicyDiffRuleDelta>.Empty; + } + + return ruleHits + .Where(static hit => hit is not null) + .Select(static hit => hit!) + .OrderBy(static hit => hit.RuleId, StringComparer.Ordinal) + .ThenBy(static hit => hit.RuleName, StringComparer.Ordinal) + .ToImmutableArray(); + } +} + +/// <summary> +/// Delta counts for a single severity bucket. +/// </summary> +public sealed record PolicyDiffSeverityDelta +{ + public static PolicyDiffSeverityDelta Empty { get; } = new(); + + public PolicyDiffSeverityDelta(int up = 0, int down = 0) + { + Up = Validation.EnsureNonNegative(up, nameof(up)); + Down = Validation.EnsureNonNegative(down, nameof(down)); + } + + public int Up { get; } = 0; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int Down { get; } = 0; +} + +/// <summary> +/// Delta counts per rule for simulation reporting. +/// </summary> +public sealed record PolicyDiffRuleDelta +{ + public PolicyDiffRuleDelta(string ruleId, string ruleName, int up = 0, int down = 0) + { + RuleId = Validation.EnsureSimpleIdentifier(ruleId, nameof(ruleId)); + RuleName = Validation.EnsureName(ruleName, nameof(ruleName)); + Up = Validation.EnsureNonNegative(up, nameof(up)); + Down = Validation.EnsureNonNegative(down, nameof(down)); + } + + public string RuleId { get; } + + public string RuleName { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int Up { get; } = 0; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public int Down { get; } = 0; +} + +/// <summary> +/// Canonical explain trace for a policy finding. +/// </summary> +public sealed record PolicyExplainTrace +{ + public PolicyExplainTrace( + string findingId, + string policyId, + int policyVersion, + string tenantId, + string runId, + PolicyExplainVerdict verdict, + DateTimeOffset evaluatedAt, + IEnumerable<PolicyExplainRule>? ruleChain = null, + IEnumerable<PolicyExplainEvidence>? evidence = null, + IEnumerable<PolicyExplainVexImpact>? vexImpacts = null, + IEnumerable<PolicyExplainHistoryEvent>? history = null, + ImmutableSortedDictionary<string, string>? metadata = null, + string? schemaVersion = null) + : this( + findingId, + policyId, + policyVersion, + tenantId, + runId, + Validation.NormalizeTimestamp(evaluatedAt), + verdict, + NormalizeRuleChain(ruleChain), + NormalizeEvidence(evidence), + NormalizeVexImpacts(vexImpacts), + NormalizeHistory(history), + metadata ?? ImmutableSortedDictionary<string, string>.Empty, + schemaVersion) + { + } + + [JsonConstructor] + public PolicyExplainTrace( + string findingId, + string policyId, + int policyVersion, + string tenantId, + string runId, + DateTimeOffset evaluatedAt, + PolicyExplainVerdict verdict, + ImmutableArray<PolicyExplainRule> ruleChain, + ImmutableArray<PolicyExplainEvidence> evidence, + ImmutableArray<PolicyExplainVexImpact> vexImpacts, + ImmutableArray<PolicyExplainHistoryEvent> history, + ImmutableSortedDictionary<string, string> metadata, + string? schemaVersion = null) + { + SchemaVersion = SchedulerSchemaVersions.EnsurePolicyExplainTrace(schemaVersion); + FindingId = Validation.EnsureSimpleIdentifier(findingId, nameof(findingId)); + PolicyId = Validation.EnsureSimpleIdentifier(policyId, nameof(policyId)); + if (policyVersion <= 0) + { + throw new ArgumentOutOfRangeException(nameof(policyVersion), policyVersion, "Policy version must be positive."); + } + + PolicyVersion = policyVersion; + TenantId = Validation.EnsureTenantId(tenantId, nameof(tenantId)); + RunId = Validation.EnsureId(runId, nameof(runId)); + EvaluatedAt = Validation.NormalizeTimestamp(evaluatedAt); + Verdict = verdict ?? throw new ArgumentNullException(nameof(verdict)); + RuleChain = ruleChain.IsDefault ? ImmutableArray<PolicyExplainRule>.Empty : ruleChain; + Evidence = evidence.IsDefault ? ImmutableArray<PolicyExplainEvidence>.Empty : evidence; + VexImpacts = vexImpacts.IsDefault ? ImmutableArray<PolicyExplainVexImpact>.Empty : vexImpacts; + History = history.IsDefault ? ImmutableArray<PolicyExplainHistoryEvent>.Empty : history; + Metadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty) + .Select(static pair => new KeyValuePair<string, string>( + Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty, + Validation.TrimToNull(pair.Value) ?? string.Empty)) + .Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value)) + .DistinctBy(static pair => pair.Key, StringComparer.Ordinal) + .OrderBy(static pair => pair.Key, StringComparer.Ordinal) + .ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal); + } + + public string SchemaVersion { get; } + + public string FindingId { get; } + + public string PolicyId { get; } + + public int PolicyVersion { get; } + + public string TenantId { get; } + + public string RunId { get; } + + public DateTimeOffset EvaluatedAt { get; } + + public PolicyExplainVerdict Verdict { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray<PolicyExplainRule> RuleChain { get; } = ImmutableArray<PolicyExplainRule>.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray<PolicyExplainEvidence> Evidence { get; } = ImmutableArray<PolicyExplainEvidence>.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray<PolicyExplainVexImpact> VexImpacts { get; } = ImmutableArray<PolicyExplainVexImpact>.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableArray<PolicyExplainHistoryEvent> History { get; } = ImmutableArray<PolicyExplainHistoryEvent>.Empty; + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty; + + private static ImmutableArray<PolicyExplainRule> NormalizeRuleChain(IEnumerable<PolicyExplainRule>? rules) + { + if (rules is null) + { + return ImmutableArray<PolicyExplainRule>.Empty; + } + + return rules + .Where(static rule => rule is not null) + .Select(static rule => rule!) + .ToImmutableArray(); + } + + private static ImmutableArray<PolicyExplainEvidence> NormalizeEvidence(IEnumerable<PolicyExplainEvidence>? evidence) + { + if (evidence is null) + { + return ImmutableArray<PolicyExplainEvidence>.Empty; + } + + return evidence + .Where(static item => item is not null) + .Select(static item => item!) + .OrderBy(static item => item.Type, StringComparer.Ordinal) + .ThenBy(static item => item.Reference, StringComparer.Ordinal) + .ToImmutableArray(); + } + + private static ImmutableArray<PolicyExplainVexImpact> NormalizeVexImpacts(IEnumerable<PolicyExplainVexImpact>? impacts) + { + if (impacts is null) + { + return ImmutableArray<PolicyExplainVexImpact>.Empty; + } + + return impacts + .Where(static impact => impact is not null) + .Select(static impact => impact!) + .OrderBy(static impact => impact.StatementId, StringComparer.Ordinal) + .ToImmutableArray(); + } + + private static ImmutableArray<PolicyExplainHistoryEvent> NormalizeHistory(IEnumerable<PolicyExplainHistoryEvent>? history) + { + if (history is null) + { + return ImmutableArray<PolicyExplainHistoryEvent>.Empty; + } + + return history + .Where(static entry => entry is not null) + .Select(static entry => entry!) + .OrderBy(static entry => entry.OccurredAt) + .ToImmutableArray(); + } +} + +/// <summary> +/// Verdict metadata for explain traces. +/// </summary> +public sealed record PolicyExplainVerdict +{ + public PolicyExplainVerdict( + PolicyVerdictStatus status, + SeverityRank? severity = null, + bool quiet = false, + double? score = null, + string? rationale = null) + { + Status = status; + Severity = severity; + Quiet = quiet; + Score = score; + Rationale = Validation.TrimToNull(rationale); + } + + public PolicyVerdictStatus Status { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public SeverityRank? Severity { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public bool Quiet { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public double? Score { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Rationale { get; } +} + +/// <summary> +/// Rule evaluation entry captured in explain traces. +/// </summary> +public sealed record PolicyExplainRule +{ + public PolicyExplainRule( + string ruleId, + string ruleName, + string action, + string decision, + double score, + string? condition = null) + { + RuleId = Validation.EnsureSimpleIdentifier(ruleId, nameof(ruleId)); + RuleName = Validation.EnsureName(ruleName, nameof(ruleName)); + Action = Validation.TrimToNull(action) ?? throw new ArgumentNullException(nameof(action)); + Decision = Validation.TrimToNull(decision) ?? throw new ArgumentNullException(nameof(decision)); + Score = score; + Condition = Validation.TrimToNull(condition); + } + + public string RuleId { get; } + + public string RuleName { get; } + + public string Action { get; } + + public string Decision { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public double Score { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Condition { get; } +} + +/// <summary> +/// Evidence entry considered during policy evaluation. +/// </summary> +public sealed record PolicyExplainEvidence +{ + public PolicyExplainEvidence( + string type, + string reference, + string source, + string status, + double weight = 0, + string? justification = null, + ImmutableSortedDictionary<string, string>? metadata = null) + { + Type = Validation.TrimToNull(type) ?? throw new ArgumentNullException(nameof(type)); + Reference = Validation.TrimToNull(reference) ?? throw new ArgumentNullException(nameof(reference)); + Source = Validation.TrimToNull(source) ?? throw new ArgumentNullException(nameof(source)); + Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status)); + Weight = weight; + Justification = Validation.TrimToNull(justification); + Metadata = (metadata ?? ImmutableSortedDictionary<string, string>.Empty) + .Select(static pair => new KeyValuePair<string, string>( + Validation.TrimToNull(pair.Key)?.ToLowerInvariant() ?? string.Empty, + Validation.TrimToNull(pair.Value) ?? string.Empty)) + .Where(static pair => !string.IsNullOrEmpty(pair.Key) && !string.IsNullOrEmpty(pair.Value)) + .DistinctBy(static pair => pair.Key, StringComparer.Ordinal) + .OrderBy(static pair => pair.Key, StringComparer.Ordinal) + .ToImmutableSortedDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal); + } + + public string Type { get; } + + public string Reference { get; } + + public string Source { get; } + + public string Status { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public double Weight { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Justification { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingDefault)] + public ImmutableSortedDictionary<string, string> Metadata { get; } = ImmutableSortedDictionary<string, string>.Empty; +} + +/// <summary> +/// VEX statement impact summary captured in explain traces. +/// </summary> +public sealed record PolicyExplainVexImpact +{ + public PolicyExplainVexImpact( + string statementId, + string provider, + string status, + bool accepted, + string? justification = null, + string? confidence = null) + { + StatementId = Validation.TrimToNull(statementId) ?? throw new ArgumentNullException(nameof(statementId)); + Provider = Validation.TrimToNull(provider) ?? throw new ArgumentNullException(nameof(provider)); + Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status)); + Accepted = accepted; + Justification = Validation.TrimToNull(justification); + Confidence = Validation.TrimToNull(confidence); + } + + public string StatementId { get; } + + public string Provider { get; } + + public string Status { get; } + + public bool Accepted { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Justification { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Confidence { get; } +} + +/// <summary> +/// History entry for a finding's policy lifecycle. +/// </summary> +public sealed record PolicyExplainHistoryEvent +{ + public PolicyExplainHistoryEvent( + string status, + DateTimeOffset occurredAt, + string? actor = null, + string? note = null) + { + Status = Validation.TrimToNull(status) ?? throw new ArgumentNullException(nameof(status)); + OccurredAt = Validation.NormalizeTimestamp(occurredAt); + Actor = Validation.TrimToNull(actor); + Note = Validation.TrimToNull(note); + } + + public string Status { get; } + + public DateTimeOffset OccurredAt { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Actor { get; } + + [JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)] + public string? Note { get; } +} diff --git a/src/StellaOps.Scheduler.Models/Run.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Run.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/Run.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Run.cs diff --git a/src/StellaOps.Scheduler.Models/RunReasonExtensions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/RunReasonExtensions.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/RunReasonExtensions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/RunReasonExtensions.cs diff --git a/src/StellaOps.Scheduler.Models/RunStateMachine.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/RunStateMachine.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/RunStateMachine.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/RunStateMachine.cs diff --git a/src/StellaOps.Scheduler.Models/RunStatsBuilder.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/RunStatsBuilder.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/RunStatsBuilder.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/RunStatsBuilder.cs diff --git a/src/StellaOps.Scheduler.Models/Schedule.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Schedule.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/Schedule.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Schedule.cs diff --git a/src/StellaOps.Scheduler.Models/SchedulerSchemaMigration.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/SchedulerSchemaMigration.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/SchedulerSchemaMigration.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/SchedulerSchemaMigration.cs diff --git a/src/StellaOps.Scheduler.Models/SchedulerSchemaMigrationResult.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/SchedulerSchemaMigrationResult.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/SchedulerSchemaMigrationResult.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/SchedulerSchemaMigrationResult.cs diff --git a/src/StellaOps.Scheduler.Models/SchedulerSchemaVersions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/SchedulerSchemaVersions.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/SchedulerSchemaVersions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/SchedulerSchemaVersions.cs diff --git a/src/StellaOps.Scheduler.Models/Selector.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Selector.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/Selector.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Selector.cs diff --git a/src/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj similarity index 100% rename from src/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj diff --git a/src/StellaOps.Scheduler.Models/TASKS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md similarity index 95% rename from src/StellaOps.Scheduler.Models/TASKS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md index 43555dfb..391be273 100644 --- a/src/StellaOps.Scheduler.Models/TASKS.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/TASKS.md @@ -1,9 +1,9 @@ -# Scheduler Models Task Board (Sprint 16) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SCHED-MODELS-16-101 | DONE (2025-10-19) | Scheduler Models Guild | — | Define DTOs (Schedule, Run, ImpactSet, Selector, DeltaSummary, AuditRecord) with validation + canonical JSON. | DTOs merged with tests; documentation snippet added; serialization deterministic. | -| SCHED-MODELS-16-102 | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-101 | Publish schema docs & sample payloads for UI/Notify integration. | Samples committed; docs referenced; contract tests pass. | +# Scheduler Models Task Board (Sprint 16) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SCHED-MODELS-16-101 | DONE (2025-10-19) | Scheduler Models Guild | — | Define DTOs (Schedule, Run, ImpactSet, Selector, DeltaSummary, AuditRecord) with validation + canonical JSON. | DTOs merged with tests; documentation snippet added; serialization deterministic. | +| SCHED-MODELS-16-102 | DONE (2025-10-19) | Scheduler Models Guild | SCHED-MODELS-16-101 | Publish schema docs & sample payloads for UI/Notify integration. | Samples committed; docs referenced; contract tests pass. | | SCHED-MODELS-16-103 | DONE (2025-10-20) | Scheduler Models Guild | SCHED-MODELS-16-101 | Versioning/migration helpers (schedule evolution, run state transitions). | Migration helpers implemented; tests cover upgrade/downgrade; guidelines documented. | ## Policy Engine v2 (Sprint 20) @@ -18,5 +18,5 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| -| SCHED-MODELS-21-001 | DONE (2025-10-26) | Scheduler Models Guild, Cartographer Guild | CARTO-GRAPH-21-007 | Define job DTOs for graph builds/overlay refresh (`GraphBuildJob`, `GraphOverlayJob`) with deterministic serialization and status enums. | DTOs serialized deterministically; schema snippets documented in `src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md`; tests cover transitions. | +| SCHED-MODELS-21-001 | DONE (2025-10-26) | Scheduler Models Guild, Cartographer Guild | CARTO-GRAPH-21-007 | Define job DTOs for graph builds/overlay refresh (`GraphBuildJob`, `GraphOverlayJob`) with deterministic serialization and status enums. | DTOs serialized deterministically; schema snippets documented in `src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md`; tests cover transitions. | | SCHED-MODELS-21-002 | DONE (2025-10-26) | Scheduler Models Guild | SCHED-MODELS-21-001 | Publish schema docs/sample payloads for graph jobs and overlay events for downstream workers/UI. | Docs updated with compliance checklist; samples validated; notifications sent to guilds. | diff --git a/src/StellaOps.Scheduler.Models/Validation.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Validation.cs similarity index 100% rename from src/StellaOps.Scheduler.Models/Validation.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/Validation.cs diff --git a/src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-16-103-DESIGN.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-16-103-DESIGN.md similarity index 100% rename from src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-16-103-DESIGN.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-16-103-DESIGN.md diff --git a/src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md similarity index 97% rename from src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md index cc2ce2ce..cef32e30 100644 --- a/src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-20-001-POLICY-RUNS.md @@ -1,148 +1,148 @@ -# SCHED-MODELS-20-001 — Policy Engine Run DTOs - -> Status: 2025-10-26 — **Complete** - -Defines the scheduler contracts that Policy Engine (Epic 2) relies on for orchestration, simulation, and explainability. DTOs serialize with `CanonicalJsonSerializer` to guarantee deterministic ordering, enabling replay and signed artefacts. - -## PolicyRunRequest — `scheduler.policy-run-request@1` - -Posted by CLI/UI or the orchestrator to enqueue a run. Canonical sample lives at `samples/api/scheduler/policy-run-request.json`. - -```jsonc -{ - "schemaVersion": "scheduler.policy-run-request@1", - "tenantId": "default", - "policyId": "P-7", - "policyVersion": 4, - "mode": "incremental", // full | incremental | simulate - "priority": "normal", // normal | high | emergency - "runId": "run:P-7:2025-10-26:auto", // optional idempotency key - "queuedAt": "2025-10-26T14:05:00+00:00", - "requestedBy": "user:cli", - "correlationId": "req-...", - "metadata": {"source": "stella policy run", "trigger": "cli"}, - "inputs": { - "sbomSet": ["sbom:S-318", "sbom:S-42"], // sorted uniques - "advisoryCursor": "2025-10-26T13:59:00+00:00", - "vexCursor": "2025-10-26T13:58:30+00:00", - "environment": {"exposure": "internet", "sealed": false}, - "captureExplain": true - } -} -``` - -* Environment values accept any JSON primitive/object; keys normalise to lowercase for deterministic hashing. -* `metadata` is optional contextual breadcrumbs (lowercased keys). Use it for orchestrator provenance or offline bundle identifiers. - -## PolicyRunStatus — `scheduler.policy-run-status@1` - -Captured in `policy_runs` collection and returned by run status APIs. Sample: `samples/api/scheduler/policy-run-status.json`. - -```jsonc -{ - "schemaVersion": "scheduler.policy-run-status@1", - "runId": "run:P-7:2025-10-26:auto", - "tenantId": "default", - "policyId": "P-7", - "policyVersion": 4, - "mode": "incremental", - "status": "succeeded", // queued|running|succeeded|failed|canceled|replay_pending - "priority": "normal", - "queuedAt": "2025-10-26T14:05:00+00:00", - "startedAt": "2025-10-26T14:05:11+00:00", - "finishedAt": "2025-10-26T14:06:01+00:00", - "determinismHash": "sha256:...", // optional until run completes - "traceId": "01HE0BJX5S4T9YCN6ZT0", - "metadata": {"orchestrator": "scheduler", "sbombatchhash": "sha256:..."}, - "stats": { - "components": 1742, - "rulesFired": 68023, - "findingsWritten": 4321, - "vexOverrides": 210, - "quieted": 12, - "durationSeconds": 50.8 - }, - "inputs": { ... } // same schema as request -} -``` - -* `determinismHash` must be a `sha256:` digest combining ordered input digests + policy digest. -* `attempts` (not shown) increases per retry. -* Error responses populate `errorCode` (`ERR_POL_00x`) and `error` message; omitted when successful. - -## PolicyDiffSummary — `scheduler.policy-diff-summary@1` - -Returned by simulation APIs; referenced by CLI/UI diff visualisations. Sample: `samples/api/scheduler/policy-diff-summary.json`. - -```jsonc -{ - "schemaVersion": "scheduler.policy-diff-summary@1", - "added": 12, - "removed": 8, - "unchanged": 657, - "bySeverity": { - "critical": {"up": 1}, - "high": {"up": 3, "down": 4}, - "medium": {"up": 2, "down": 1} - }, - "ruleHits": [ - {"ruleId": "rule-block-critical", "ruleName": "Block Critical Findings", "up": 1}, - {"ruleId": "rule-quiet-low", "ruleName": "Quiet Low Risk", "down": 2} - ] -} -``` - -* Severity bucket keys normalise to camelCase for JSON determinism across CLI/UI consumers. -* Zero-valued counts (`down`/`up`) are omitted to keep payloads compact. -* `ruleHits` sorts by `ruleId` to keep diff heatmaps deterministic. - -## PolicyExplainTrace — `scheduler.policy-explain-trace@1` - -Canonical explain tree embedded in findings explainers and exported bundles. Sample: `samples/api/scheduler/policy-explain-trace.json`. - -```jsonc -{ - "schemaVersion": "scheduler.policy-explain-trace@1", - "findingId": "finding:sbom:S-42/pkg:npm/lodash@4.17.21", - "policyId": "P-7", - "policyVersion": 4, - "tenantId": "default", - "runId": "run:P-7:2025-10-26:auto", - "evaluatedAt": "2025-10-26T14:06:01+00:00", - "verdict": {"status": "blocked", "severity": "critical", "score": 19.5}, - "ruleChain": [ - {"ruleId": "rule-allow-known", "action": "allow", "decision": "skipped"}, - {"ruleId": "rule-block-critical", "action": "block", "decision": "matched", "score": 19.5} - ], - "evidence": [ - {"type": "advisory", "reference": "CVE-2025-12345", "source": "nvd", "status": "affected", "weight": 1, "metadata": {}}, - {"type": "vex", "reference": "vex:ghsa-2025-0001", "source": "vendor", "status": "not_affected", "weight": 0.5} - ], - "vexImpacts": [ - {"statementId": "vex:ghsa-2025-0001", "provider": "vendor", "status": "not_affected", "accepted": true} - ], - "history": [ - {"status": "blocked", "occurredAt": "2025-10-26T14:06:01+00:00", "actor": "policy-engine"} - ], - "metadata": {"componentpurl": "pkg:npm/lodash@4.17.21", "sbomid": "sbom:S-42", "traceid": "01HE0BJX5S4T9YCN6ZT0"} -} -``` - -* Rule chain preserves execution order; evidence & VEX arrays sort for deterministic outputs. -* Evidence metadata is always emitted (empty object when no attributes) so clients can merge annotations deterministically. -* Metadata keys lower-case for consistent lookups (`componentpurl`, `traceid`, etc.). -* `verdict.status` uses `passed|warned|blocked|quieted|ignored` reflecting final policy decision. - -## Compliance Checklist - -| Item | Owner | Status | Notes | -| --- | --- | --- | --- | -| Canonical samples committed (`policy-run-request|status|diff-summary|explain-trace`) | Scheduler Models Guild | ☑ 2025-10-26 | Round-trip tests enforce schema stability. | -| DTOs documented here and linked from `/docs/policy/runs.md` checklist | Scheduler Models Guild | ☑ 2025-10-26 | Added Run DTO schema section. | -| Serializer ensures deterministic ordering for new types | Scheduler Models Guild | ☑ 2025-10-26 | `CanonicalJsonSerializer` updated with property order + converters. | -| Tests cover DTO validation and sample fixtures | Scheduler Models Guild | ☑ 2025-10-26 | `PolicyRunModelsTests` + extended `SamplePayloadTests`. | -| Scheduler guilds notified (Models, Worker, WebService) | Scheduler Models Guild | ☑ 2025-10-26 | Posted in `#scheduler-guild` with sample links. | - ---- - -*Last updated: 2025-10-26.* +# SCHED-MODELS-20-001 — Policy Engine Run DTOs + +> Status: 2025-10-26 — **Complete** + +Defines the scheduler contracts that Policy Engine (Epic 2) relies on for orchestration, simulation, and explainability. DTOs serialize with `CanonicalJsonSerializer` to guarantee deterministic ordering, enabling replay and signed artefacts. + +## PolicyRunRequest — `scheduler.policy-run-request@1` + +Posted by CLI/UI or the orchestrator to enqueue a run. Canonical sample lives at `samples/api/scheduler/policy-run-request.json`. + +```jsonc +{ + "schemaVersion": "scheduler.policy-run-request@1", + "tenantId": "default", + "policyId": "P-7", + "policyVersion": 4, + "mode": "incremental", // full | incremental | simulate + "priority": "normal", // normal | high | emergency + "runId": "run:P-7:2025-10-26:auto", // optional idempotency key + "queuedAt": "2025-10-26T14:05:00+00:00", + "requestedBy": "user:cli", + "correlationId": "req-...", + "metadata": {"source": "stella policy run", "trigger": "cli"}, + "inputs": { + "sbomSet": ["sbom:S-318", "sbom:S-42"], // sorted uniques + "advisoryCursor": "2025-10-26T13:59:00+00:00", + "vexCursor": "2025-10-26T13:58:30+00:00", + "environment": {"exposure": "internet", "sealed": false}, + "captureExplain": true + } +} +``` + +* Environment values accept any JSON primitive/object; keys normalise to lowercase for deterministic hashing. +* `metadata` is optional contextual breadcrumbs (lowercased keys). Use it for orchestrator provenance or offline bundle identifiers. + +## PolicyRunStatus — `scheduler.policy-run-status@1` + +Captured in `policy_runs` collection and returned by run status APIs. Sample: `samples/api/scheduler/policy-run-status.json`. + +```jsonc +{ + "schemaVersion": "scheduler.policy-run-status@1", + "runId": "run:P-7:2025-10-26:auto", + "tenantId": "default", + "policyId": "P-7", + "policyVersion": 4, + "mode": "incremental", + "status": "succeeded", // queued|running|succeeded|failed|canceled|replay_pending + "priority": "normal", + "queuedAt": "2025-10-26T14:05:00+00:00", + "startedAt": "2025-10-26T14:05:11+00:00", + "finishedAt": "2025-10-26T14:06:01+00:00", + "determinismHash": "sha256:...", // optional until run completes + "traceId": "01HE0BJX5S4T9YCN6ZT0", + "metadata": {"orchestrator": "scheduler", "sbombatchhash": "sha256:..."}, + "stats": { + "components": 1742, + "rulesFired": 68023, + "findingsWritten": 4321, + "vexOverrides": 210, + "quieted": 12, + "durationSeconds": 50.8 + }, + "inputs": { ... } // same schema as request +} +``` + +* `determinismHash` must be a `sha256:` digest combining ordered input digests + policy digest. +* `attempts` (not shown) increases per retry. +* Error responses populate `errorCode` (`ERR_POL_00x`) and `error` message; omitted when successful. + +## PolicyDiffSummary — `scheduler.policy-diff-summary@1` + +Returned by simulation APIs; referenced by CLI/UI diff visualisations. Sample: `samples/api/scheduler/policy-diff-summary.json`. + +```jsonc +{ + "schemaVersion": "scheduler.policy-diff-summary@1", + "added": 12, + "removed": 8, + "unchanged": 657, + "bySeverity": { + "critical": {"up": 1}, + "high": {"up": 3, "down": 4}, + "medium": {"up": 2, "down": 1} + }, + "ruleHits": [ + {"ruleId": "rule-block-critical", "ruleName": "Block Critical Findings", "up": 1}, + {"ruleId": "rule-quiet-low", "ruleName": "Quiet Low Risk", "down": 2} + ] +} +``` + +* Severity bucket keys normalise to camelCase for JSON determinism across CLI/UI consumers. +* Zero-valued counts (`down`/`up`) are omitted to keep payloads compact. +* `ruleHits` sorts by `ruleId` to keep diff heatmaps deterministic. + +## PolicyExplainTrace — `scheduler.policy-explain-trace@1` + +Canonical explain tree embedded in findings explainers and exported bundles. Sample: `samples/api/scheduler/policy-explain-trace.json`. + +```jsonc +{ + "schemaVersion": "scheduler.policy-explain-trace@1", + "findingId": "finding:sbom:S-42/pkg:npm/lodash@4.17.21", + "policyId": "P-7", + "policyVersion": 4, + "tenantId": "default", + "runId": "run:P-7:2025-10-26:auto", + "evaluatedAt": "2025-10-26T14:06:01+00:00", + "verdict": {"status": "blocked", "severity": "critical", "score": 19.5}, + "ruleChain": [ + {"ruleId": "rule-allow-known", "action": "allow", "decision": "skipped"}, + {"ruleId": "rule-block-critical", "action": "block", "decision": "matched", "score": 19.5} + ], + "evidence": [ + {"type": "advisory", "reference": "CVE-2025-12345", "source": "nvd", "status": "affected", "weight": 1, "metadata": {}}, + {"type": "vex", "reference": "vex:ghsa-2025-0001", "source": "vendor", "status": "not_affected", "weight": 0.5} + ], + "vexImpacts": [ + {"statementId": "vex:ghsa-2025-0001", "provider": "vendor", "status": "not_affected", "accepted": true} + ], + "history": [ + {"status": "blocked", "occurredAt": "2025-10-26T14:06:01+00:00", "actor": "policy-engine"} + ], + "metadata": {"componentpurl": "pkg:npm/lodash@4.17.21", "sbomid": "sbom:S-42", "traceid": "01HE0BJX5S4T9YCN6ZT0"} +} +``` + +* Rule chain preserves execution order; evidence & VEX arrays sort for deterministic outputs. +* Evidence metadata is always emitted (empty object when no attributes) so clients can merge annotations deterministically. +* Metadata keys lower-case for consistent lookups (`componentpurl`, `traceid`, etc.). +* `verdict.status` uses `passed|warned|blocked|quieted|ignored` reflecting final policy decision. + +## Compliance Checklist + +| Item | Owner | Status | Notes | +| --- | --- | --- | --- | +| Canonical samples committed (`policy-run-request|status|diff-summary|explain-trace`) | Scheduler Models Guild | ☑ 2025-10-26 | Round-trip tests enforce schema stability. | +| DTOs documented here and linked from `/docs/policy/runs.md` checklist | Scheduler Models Guild | ☑ 2025-10-26 | Added Run DTO schema section. | +| Serializer ensures deterministic ordering for new types | Scheduler Models Guild | ☑ 2025-10-26 | `CanonicalJsonSerializer` updated with property order + converters. | +| Tests cover DTO validation and sample fixtures | Scheduler Models Guild | ☑ 2025-10-26 | `PolicyRunModelsTests` + extended `SamplePayloadTests`. | +| Scheduler guilds notified (Models, Worker, WebService) | Scheduler Models Guild | ☑ 2025-10-26 | Posted in `#scheduler-guild` with sample links. | + +--- + +*Last updated: 2025-10-26.* diff --git a/src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md similarity index 98% rename from src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md index abd927f4..27274b8b 100644 --- a/src/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Models/docs/SCHED-MODELS-21-001-GRAPH-JOBS.md @@ -1,107 +1,107 @@ -# SCHED-MODELS-21-001 — Graph Job DTOs - -> Status: 2025-10-26 — **Complete** - -Defines the scheduler-facing contracts for Cartographer orchestration. Both DTOs serialize with `CanonicalJsonSerializer` and share the `GraphJobStatus` lifecycle guarded by `GraphJobStateMachine`. - -## GraphBuildJob — `scheduler.graph-build-job@1` - -```jsonc -{ - "schemaVersion": "scheduler.graph-build-job@1", - "id": "gbj_...", - "tenantId": "tenant-id", - "sbomId": "sbom-id", - "sbomVersionId": "sbom-version-id", - "sbomDigest": "sha256:<64-hex>", - "graphSnapshotId": "graph-snapshot-id?", // optional until Cartographer returns id - "status": "pending|queued|running|completed|failed|cancelled", - "trigger": "sbom-version|backfill|manual", - "attempts": 0, - "cartographerJobId": "external-id?", // optional identifier returned by Cartographer - "correlationId": "evt-...", // optional event correlation key - "createdAt": "2025-10-26T12:00:00+00:00", - "startedAt": "2025-10-26T12:00:05+00:00?", - "completedAt": "2025-10-26T12:00:35+00:00?", - "error": "cartographer timeout?", // populated only for failed state - "metadata": { // extra provenance (sorted, case-insensitive keys) - "sbomEventId": "sbom_evt_123" - } -} -``` - -* `sbomDigest` must be a lowercase `sha256:<hex>` string. -* `attempts` is monotonic across retries; `GraphJobStateMachine.EnsureTransition` enforces non-decreasing values and timestamps. -* Terminal states (`completed|failed|cancelled`) require `completedAt` to be set; failures require `error`. - -## GraphOverlayJob — `scheduler.graph-overlay-job@1` - -```jsonc -{ - "schemaVersion": "scheduler.graph-overlay-job@1", - "id": "goj_...", - "tenantId": "tenant-id", - "graphSnapshotId": "graph-snapshot-id", - "buildJobId": "gbj_...?", - "overlayKind": "policy|advisory|vex", - "overlayKey": "policy@2025-10-01", - "subjects": [ - "artifact/service-api", - "artifact/service-worker" - ], - "status": "pending|queued|running|completed|failed|cancelled", - "trigger": "policy|advisory|vex|sbom-version|manual", - "attempts": 0, - "correlationId": "policy_run_321?", - "createdAt": "2025-10-26T12:05:00+00:00", - "startedAt": "2025-10-26T12:05:05+00:00?", - "completedAt": "2025-10-26T12:05:15+00:00?", - "error": "overlay build failed?", - "metadata": { - "policyRunId": "policy_run_321" - } -} -``` - -* `overlayKey` is free-form but trimmed; `subjects` are deduplicated and lexicographically ordered. -* `GraphOverlayJobTrigger` strings (`policy`, `advisory`, `vex`, `sbom-version`, `manual`) align with upstream events (Policy Engine, Conseiller, Excititor, SBOM Service, or manual enqueue). -* State invariants mirror build jobs: timestamps advance monotonically, terminal states require `completedAt`, failures require `error`. - -## Status & trigger matrix - -| Enum | JSON values | -| --- | --- | -| `GraphJobStatus` | `pending`, `queued`, `running`, `completed`, `failed`, `cancelled` | -| `GraphBuildJobTrigger` | `sbom-version`, `backfill`, `manual` | -| `GraphOverlayJobTrigger` | `policy`, `advisory`, `vex`, `sbom-version`, `manual` | -| `GraphOverlayKind` | `policy`, `advisory`, `vex` | - -`GraphJobStateMachine` exposes `CanTransition` and `EnsureTransition(...)` helpers to keep scheduler workers deterministic and to centralize validation logic. Callers must provide an error message when moving to `failed`; other states clear the error automatically. - ---- - -## Published samples - -- `samples/api/scheduler/graph-build-job.json` – canonical Cartographer build request snapshot (status `running`, one retry). -- `samples/api/scheduler/graph-overlay-job.json` – queued policy overlay job with deduplicated `subjects`. -- `docs/events/samples/scheduler.graph.job.completed@1.sample.json` – legacy completion event embedding the canonical job payload for downstream caches/UI. - -Tests in `StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs` validate the job fixtures against the canonical serializer. - ---- - -## Events - -Scheduler emits `scheduler.graph.job.completed@1` when a graph build or overlay job reaches `completed`, `failed`, or `cancelled`. Schema lives at `docs/events/scheduler.graph.job.completed@1.json` (legacy envelope) and the sample above illustrates the canonical payload. Downstream services should validate their consumers against the schema and budget for eventual migration to the orchestrator envelope once Cartographer hooks are promoted. - ---- - -## Compliance checklist - -| Item | Owner | Status | Notes | -| --- | --- | --- | --- | -| Canonical graph job samples committed under `samples/api/scheduler` | Scheduler Models Guild | ☑ 2025-10-26 | Round-trip tests cover both payloads. | -| Schema doc published with trigger/status matrix and sample references | Scheduler Models Guild | ☑ 2025-10-26 | This document. | -| Event schema + sample published under `docs/events/` | Scheduler Models Guild | ☑ 2025-10-26 | `scheduler.graph.job.completed@1` covers terminal job events. | -| Notify Scheduler WebService & Worker guilds about new DTO availability | Scheduler Models Guild | ☑ 2025-10-26 | Announcement posted (see `docs/updates/2025-10-26-scheduler-graph-jobs.md`). | -| Notify Cartographer Guild about expected job metadata (`graphSnapshotId`, `cartographerJobId`) | Scheduler Models Guild | ☑ 2025-10-26 | Included in Cartographer sync note (`docs/updates/2025-10-26-scheduler-graph-jobs.md`). | +# SCHED-MODELS-21-001 — Graph Job DTOs + +> Status: 2025-10-26 — **Complete** + +Defines the scheduler-facing contracts for Cartographer orchestration. Both DTOs serialize with `CanonicalJsonSerializer` and share the `GraphJobStatus` lifecycle guarded by `GraphJobStateMachine`. + +## GraphBuildJob — `scheduler.graph-build-job@1` + +```jsonc +{ + "schemaVersion": "scheduler.graph-build-job@1", + "id": "gbj_...", + "tenantId": "tenant-id", + "sbomId": "sbom-id", + "sbomVersionId": "sbom-version-id", + "sbomDigest": "sha256:<64-hex>", + "graphSnapshotId": "graph-snapshot-id?", // optional until Cartographer returns id + "status": "pending|queued|running|completed|failed|cancelled", + "trigger": "sbom-version|backfill|manual", + "attempts": 0, + "cartographerJobId": "external-id?", // optional identifier returned by Cartographer + "correlationId": "evt-...", // optional event correlation key + "createdAt": "2025-10-26T12:00:00+00:00", + "startedAt": "2025-10-26T12:00:05+00:00?", + "completedAt": "2025-10-26T12:00:35+00:00?", + "error": "cartographer timeout?", // populated only for failed state + "metadata": { // extra provenance (sorted, case-insensitive keys) + "sbomEventId": "sbom_evt_123" + } +} +``` + +* `sbomDigest` must be a lowercase `sha256:<hex>` string. +* `attempts` is monotonic across retries; `GraphJobStateMachine.EnsureTransition` enforces non-decreasing values and timestamps. +* Terminal states (`completed|failed|cancelled`) require `completedAt` to be set; failures require `error`. + +## GraphOverlayJob — `scheduler.graph-overlay-job@1` + +```jsonc +{ + "schemaVersion": "scheduler.graph-overlay-job@1", + "id": "goj_...", + "tenantId": "tenant-id", + "graphSnapshotId": "graph-snapshot-id", + "buildJobId": "gbj_...?", + "overlayKind": "policy|advisory|vex", + "overlayKey": "policy@2025-10-01", + "subjects": [ + "artifact/service-api", + "artifact/service-worker" + ], + "status": "pending|queued|running|completed|failed|cancelled", + "trigger": "policy|advisory|vex|sbom-version|manual", + "attempts": 0, + "correlationId": "policy_run_321?", + "createdAt": "2025-10-26T12:05:00+00:00", + "startedAt": "2025-10-26T12:05:05+00:00?", + "completedAt": "2025-10-26T12:05:15+00:00?", + "error": "overlay build failed?", + "metadata": { + "policyRunId": "policy_run_321" + } +} +``` + +* `overlayKey` is free-form but trimmed; `subjects` are deduplicated and lexicographically ordered. +* `GraphOverlayJobTrigger` strings (`policy`, `advisory`, `vex`, `sbom-version`, `manual`) align with upstream events (Policy Engine, Conseiller, Excititor, SBOM Service, or manual enqueue). +* State invariants mirror build jobs: timestamps advance monotonically, terminal states require `completedAt`, failures require `error`. + +## Status & trigger matrix + +| Enum | JSON values | +| --- | --- | +| `GraphJobStatus` | `pending`, `queued`, `running`, `completed`, `failed`, `cancelled` | +| `GraphBuildJobTrigger` | `sbom-version`, `backfill`, `manual` | +| `GraphOverlayJobTrigger` | `policy`, `advisory`, `vex`, `sbom-version`, `manual` | +| `GraphOverlayKind` | `policy`, `advisory`, `vex` | + +`GraphJobStateMachine` exposes `CanTransition` and `EnsureTransition(...)` helpers to keep scheduler workers deterministic and to centralize validation logic. Callers must provide an error message when moving to `failed`; other states clear the error automatically. + +--- + +## Published samples + +- `samples/api/scheduler/graph-build-job.json` – canonical Cartographer build request snapshot (status `running`, one retry). +- `samples/api/scheduler/graph-overlay-job.json` – queued policy overlay job with deduplicated `subjects`. +- `docs/events/samples/scheduler.graph.job.completed@1.sample.json` – legacy completion event embedding the canonical job payload for downstream caches/UI. + +Tests in `StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs` validate the job fixtures against the canonical serializer. + +--- + +## Events + +Scheduler emits `scheduler.graph.job.completed@1` when a graph build or overlay job reaches `completed`, `failed`, or `cancelled`. Schema lives at `docs/events/scheduler.graph.job.completed@1.json` (legacy envelope) and the sample above illustrates the canonical payload. Downstream services should validate their consumers against the schema and budget for eventual migration to the orchestrator envelope once Cartographer hooks are promoted. + +--- + +## Compliance checklist + +| Item | Owner | Status | Notes | +| --- | --- | --- | --- | +| Canonical graph job samples committed under `samples/api/scheduler` | Scheduler Models Guild | ☑ 2025-10-26 | Round-trip tests cover both payloads. | +| Schema doc published with trigger/status matrix and sample references | Scheduler Models Guild | ☑ 2025-10-26 | This document. | +| Event schema + sample published under `docs/events/` | Scheduler Models Guild | ☑ 2025-10-26 | `scheduler.graph.job.completed@1` covers terminal job events. | +| Notify Scheduler WebService & Worker guilds about new DTO availability | Scheduler Models Guild | ☑ 2025-10-26 | Announcement posted (see `docs/updates/2025-10-26-scheduler-graph-jobs.md`). | +| Notify Cartographer Guild about expected job metadata (`graphSnapshotId`, `cartographerJobId`) | Scheduler Models Guild | ☑ 2025-10-26 | Included in Cartographer sync note (`docs/updates/2025-10-26-scheduler-graph-jobs.md`). | diff --git a/src/StellaOps.Scheduler.Queue/AGENTS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/AGENTS.md similarity index 100% rename from src/StellaOps.Scheduler.Queue/AGENTS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/AGENTS.md diff --git a/src/StellaOps.Scheduler.Queue/AssemblyInfo.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/AssemblyInfo.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/AssemblyInfo.cs diff --git a/src/StellaOps.Scheduler.Queue/ISchedulerQueueTransportDiagnostics.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/ISchedulerQueueTransportDiagnostics.cs similarity index 95% rename from src/StellaOps.Scheduler.Queue/ISchedulerQueueTransportDiagnostics.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/ISchedulerQueueTransportDiagnostics.cs index f7fdbcd6..34013ca3 100644 --- a/src/StellaOps.Scheduler.Queue/ISchedulerQueueTransportDiagnostics.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/ISchedulerQueueTransportDiagnostics.cs @@ -1,9 +1,9 @@ -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Scheduler.Queue; - -internal interface ISchedulerQueueTransportDiagnostics -{ - ValueTask PingAsync(CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Scheduler.Queue; + +internal interface ISchedulerQueueTransportDiagnostics +{ + ValueTask PingAsync(CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.Queue/Nats/INatsSchedulerQueuePayload.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/INatsSchedulerQueuePayload.cs similarity index 95% rename from src/StellaOps.Scheduler.Queue/Nats/INatsSchedulerQueuePayload.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/INatsSchedulerQueuePayload.cs index 567fd733..c11a1381 100644 --- a/src/StellaOps.Scheduler.Queue/Nats/INatsSchedulerQueuePayload.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/INatsSchedulerQueuePayload.cs @@ -1,26 +1,26 @@ -using System.Collections.Generic; - -namespace StellaOps.Scheduler.Queue.Nats; - -internal interface INatsSchedulerQueuePayload<TMessage> -{ - string QueueName { get; } - - string GetIdempotencyKey(TMessage message); - - byte[] Serialize(TMessage message); - - TMessage Deserialize(byte[] payload); - - string GetRunId(TMessage message); - - string GetTenantId(TMessage message); - - string? GetScheduleId(TMessage message); - - string? GetSegmentId(TMessage message); - - string? GetCorrelationId(TMessage message); - - IReadOnlyDictionary<string, string>? GetAttributes(TMessage message); -} +using System.Collections.Generic; + +namespace StellaOps.Scheduler.Queue.Nats; + +internal interface INatsSchedulerQueuePayload<TMessage> +{ + string QueueName { get; } + + string GetIdempotencyKey(TMessage message); + + byte[] Serialize(TMessage message); + + TMessage Deserialize(byte[] payload); + + string GetRunId(TMessage message); + + string GetTenantId(TMessage message); + + string? GetScheduleId(TMessage message); + + string? GetSegmentId(TMessage message); + + string? GetCorrelationId(TMessage message); + + IReadOnlyDictionary<string, string>? GetAttributes(TMessage message); +} diff --git a/src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerPlannerQueue.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerPlannerQueue.cs similarity index 97% rename from src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerPlannerQueue.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerPlannerQueue.cs index 37416b7d..16c7b2a0 100644 --- a/src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerPlannerQueue.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerPlannerQueue.cs @@ -1,66 +1,66 @@ -using System; -using System.Collections.Generic; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using NATS.Client.Core; -using NATS.Client.JetStream; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Queue.Nats; - -internal sealed class NatsSchedulerPlannerQueue - : NatsSchedulerQueueBase<PlannerQueueMessage>, ISchedulerPlannerQueue -{ - public NatsSchedulerPlannerQueue( - SchedulerQueueOptions queueOptions, - SchedulerNatsQueueOptions natsOptions, - ILogger<NatsSchedulerPlannerQueue> logger, - TimeProvider timeProvider, - Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) - : base( - queueOptions, - natsOptions, - natsOptions.Planner, - PlannerPayload.Instance, - logger, - timeProvider, - connectionFactory) - { - } - - private sealed class PlannerPayload : INatsSchedulerQueuePayload<PlannerQueueMessage> - { - public static PlannerPayload Instance { get; } = new(); - - public string QueueName => "planner"; - - public string GetIdempotencyKey(PlannerQueueMessage message) - => message.IdempotencyKey; - - public byte[] Serialize(PlannerQueueMessage message) - => Encoding.UTF8.GetBytes(CanonicalJsonSerializer.Serialize(message)); - - public PlannerQueueMessage Deserialize(byte[] payload) - => CanonicalJsonSerializer.Deserialize<PlannerQueueMessage>(Encoding.UTF8.GetString(payload)); - - public string GetRunId(PlannerQueueMessage message) - => message.Run.Id; - - public string GetTenantId(PlannerQueueMessage message) - => message.Run.TenantId; - - public string? GetScheduleId(PlannerQueueMessage message) - => message.ScheduleId; - - public string? GetSegmentId(PlannerQueueMessage message) - => null; - - public string? GetCorrelationId(PlannerQueueMessage message) - => message.CorrelationId; - - public IReadOnlyDictionary<string, string>? GetAttributes(PlannerQueueMessage message) - => null; - } -} +using System; +using System.Collections.Generic; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using NATS.Client.Core; +using NATS.Client.JetStream; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Queue.Nats; + +internal sealed class NatsSchedulerPlannerQueue + : NatsSchedulerQueueBase<PlannerQueueMessage>, ISchedulerPlannerQueue +{ + public NatsSchedulerPlannerQueue( + SchedulerQueueOptions queueOptions, + SchedulerNatsQueueOptions natsOptions, + ILogger<NatsSchedulerPlannerQueue> logger, + TimeProvider timeProvider, + Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) + : base( + queueOptions, + natsOptions, + natsOptions.Planner, + PlannerPayload.Instance, + logger, + timeProvider, + connectionFactory) + { + } + + private sealed class PlannerPayload : INatsSchedulerQueuePayload<PlannerQueueMessage> + { + public static PlannerPayload Instance { get; } = new(); + + public string QueueName => "planner"; + + public string GetIdempotencyKey(PlannerQueueMessage message) + => message.IdempotencyKey; + + public byte[] Serialize(PlannerQueueMessage message) + => Encoding.UTF8.GetBytes(CanonicalJsonSerializer.Serialize(message)); + + public PlannerQueueMessage Deserialize(byte[] payload) + => CanonicalJsonSerializer.Deserialize<PlannerQueueMessage>(Encoding.UTF8.GetString(payload)); + + public string GetRunId(PlannerQueueMessage message) + => message.Run.Id; + + public string GetTenantId(PlannerQueueMessage message) + => message.Run.TenantId; + + public string? GetScheduleId(PlannerQueueMessage message) + => message.ScheduleId; + + public string? GetSegmentId(PlannerQueueMessage message) + => null; + + public string? GetCorrelationId(PlannerQueueMessage message) + => message.CorrelationId; + + public IReadOnlyDictionary<string, string>? GetAttributes(PlannerQueueMessage message) + => null; + } +} diff --git a/src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueBase.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueBase.cs similarity index 97% rename from src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueBase.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueBase.cs index c14514a3..2024203a 100644 --- a/src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueBase.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueBase.cs @@ -1,692 +1,692 @@ -using System; -using System.Collections.Generic; -using System.Collections.ObjectModel; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using NATS.Client.Core; -using NATS.Client.JetStream; -using NATS.Client.JetStream.Models; - -namespace StellaOps.Scheduler.Queue.Nats; - -internal abstract class NatsSchedulerQueueBase<TMessage> : ISchedulerQueue<TMessage>, IAsyncDisposable, ISchedulerQueueTransportDiagnostics -{ - private const string TransportName = "nats"; - - private static readonly INatsSerializer<byte[]> PayloadSerializer = NatsRawSerializer<byte[]>.Default; - - private readonly SchedulerQueueOptions _queueOptions; - private readonly SchedulerNatsQueueOptions _natsOptions; - private readonly SchedulerNatsStreamOptions _streamOptions; - private readonly INatsSchedulerQueuePayload<TMessage> _payload; - private readonly ILogger _logger; - private readonly TimeProvider _timeProvider; - private readonly SemaphoreSlim _connectionGate = new(1, 1); - private readonly Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>> _connectionFactory; - - private NatsConnection? _connection; - private NatsJSContext? _jsContext; - private INatsJSConsumer? _consumer; - private bool _disposed; - private long _approximateDepth; - - protected NatsSchedulerQueueBase( - SchedulerQueueOptions queueOptions, - SchedulerNatsQueueOptions natsOptions, - SchedulerNatsStreamOptions streamOptions, - INatsSchedulerQueuePayload<TMessage> payload, - ILogger logger, - TimeProvider timeProvider, - Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) - { - _queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions)); - _natsOptions = natsOptions ?? throw new ArgumentNullException(nameof(natsOptions)); - _streamOptions = streamOptions ?? throw new ArgumentNullException(nameof(streamOptions)); - _payload = payload ?? throw new ArgumentNullException(nameof(payload)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _timeProvider = timeProvider ?? TimeProvider.System; - _connectionFactory = connectionFactory ?? ((opts, cancellationToken) => new ValueTask<NatsConnection>(new NatsConnection(opts))); - - if (string.IsNullOrWhiteSpace(_natsOptions.Url)) - { - throw new InvalidOperationException("NATS connection URL must be configured for the scheduler queue."); - } - } - - public async ValueTask<SchedulerQueueEnqueueResult> EnqueueAsync( - TMessage message, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(message); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); - - var payloadBytes = _payload.Serialize(message); - var idempotencyKey = _payload.GetIdempotencyKey(message); - var headers = BuildHeaders(message, idempotencyKey); - - var publishOptions = new NatsJSPubOpts - { - MsgId = idempotencyKey, - RetryAttempts = 0 - }; - - var ack = await js.PublishAsync( - _streamOptions.Subject, - payloadBytes, - PayloadSerializer, - publishOptions, - headers, - cancellationToken) - .ConfigureAwait(false); - - if (ack.Duplicate) - { - SchedulerQueueMetrics.RecordDeduplicated(TransportName, _payload.QueueName); - _logger.LogDebug( - "Duplicate enqueue detected for scheduler {Queue} message idempotency key {Key}; sequence {Sequence} reused.", - _payload.QueueName, - idempotencyKey, - ack.Seq); - - PublishDepth(); - return new SchedulerQueueEnqueueResult(ack.Seq.ToString(), true); - } - - SchedulerQueueMetrics.RecordEnqueued(TransportName, _payload.QueueName); - _logger.LogDebug( - "Enqueued scheduler {Queue} message into stream {Stream} with sequence {Sequence}.", - _payload.QueueName, - ack.Stream, - ack.Seq); - - IncrementDepth(); - return new SchedulerQueueEnqueueResult(ack.Seq.ToString(), false); - } - - public async ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> LeaseAsync( - SchedulerQueueLeaseRequest request, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); - - var fetchOpts = new NatsJSFetchOpts - { - MaxMsgs = request.BatchSize, - Expires = request.LeaseDuration, - IdleHeartbeat = _natsOptions.IdleHeartbeat - }; - - var now = _timeProvider.GetUtcNow(); - var leases = new List<ISchedulerQueueLease<TMessage>>(request.BatchSize); - - await foreach (var message in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) - { - var lease = CreateLease(message, request.Consumer, now, request.LeaseDuration); - if (lease is not null) - { - leases.Add(lease); - } - } - - PublishDepth(); - return leases; - } - - public async ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> ClaimExpiredAsync( - SchedulerQueueClaimOptions options, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(options); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); - - var fetchOpts = new NatsJSFetchOpts - { - MaxMsgs = options.BatchSize, - Expires = options.MinIdleTime, - IdleHeartbeat = _natsOptions.IdleHeartbeat - }; - - var now = _timeProvider.GetUtcNow(); - var leases = new List<ISchedulerQueueLease<TMessage>>(options.BatchSize); - - await foreach (var message in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) - { - var deliveries = (int)(message.Metadata?.NumDelivered ?? 1); - if (deliveries <= 1) - { - await message.NakAsync(new AckOpts(), TimeSpan.Zero, cancellationToken).ConfigureAwait(false); - continue; - } - - var lease = CreateLease(message, options.ClaimantConsumer, now, _queueOptions.DefaultLeaseDuration); - if (lease is not null) - { - leases.Add(lease); - } - } - - PublishDepth(); - return leases; - } - - public async ValueTask DisposeAsync() - { - if (_disposed) - { - return; - } - - _disposed = true; - - if (_connection is not null) - { - await _connection.DisposeAsync().ConfigureAwait(false); - } - - _connectionGate.Dispose(); - SchedulerQueueMetrics.RemoveDepth(TransportName, _payload.QueueName); - GC.SuppressFinalize(this); - } - - public async ValueTask PingAsync(CancellationToken cancellationToken) - { - var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); - await connection.PingAsync(cancellationToken).ConfigureAwait(false); - } - - internal async Task AcknowledgeAsync(NatsSchedulerQueueLease<TMessage> lease, CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName); - DecrementDepth(); - } - - internal async Task RenewAsync(NatsSchedulerQueueLease<TMessage> lease, TimeSpan leaseDuration, CancellationToken cancellationToken) - { - await lease.RawMessage.AckProgressAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - lease.RefreshLease(_timeProvider.GetUtcNow().Add(leaseDuration)); - } - - internal async Task ReleaseAsync(NatsSchedulerQueueLease<TMessage> lease, SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken) - { - if (disposition == SchedulerQueueReleaseDisposition.Retry && lease.Attempt >= _queueOptions.MaxDeliveryAttempts) - { - await DeadLetterAsync(lease, $"max-delivery-attempts:{lease.Attempt}", cancellationToken).ConfigureAwait(false); - return; - } - - if (!lease.TryBeginCompletion()) - { - return; - } - - if (disposition == SchedulerQueueReleaseDisposition.Retry) - { - SchedulerQueueMetrics.RecordRetry(TransportName, _payload.QueueName); - var delay = CalculateBackoff(lease.Attempt + 1); - lease.IncrementAttempt(); - await lease.RawMessage.NakAsync(new AckOpts(), delay, cancellationToken).ConfigureAwait(false); - _logger.LogWarning( - "Requeued scheduler {Queue} message {RunId} with delay {Delay} (attempt {Attempt}).", - _payload.QueueName, - lease.RunId, - delay, - lease.Attempt); - } - else - { - await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName); - DecrementDepth(); - _logger.LogInformation( - "Abandoned scheduler {Queue} message {RunId} after {Attempt} attempt(s).", - _payload.QueueName, - lease.RunId, - lease.Attempt); - } - - PublishDepth(); - } - - internal async Task DeadLetterAsync(NatsSchedulerQueueLease<TMessage> lease, string reason, CancellationToken cancellationToken) - { - if (!lease.TryBeginCompletion()) - { - return; - } - - await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); - DecrementDepth(); - - var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); - - if (!_queueOptions.DeadLetterEnabled) - { - _logger.LogWarning( - "Dropped scheduler {Queue} message {RunId} after {Attempt} attempt(s); dead-letter disabled. Reason: {Reason}", - _payload.QueueName, - lease.RunId, - lease.Attempt, - reason); - PublishDepth(); - return; - } - - await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); - - var headers = BuildDeadLetterHeaders(lease, reason); - await js.PublishAsync( - _streamOptions.DeadLetterSubject, - lease.Payload, - PayloadSerializer, - new NatsJSPubOpts(), - headers, - cancellationToken) - .ConfigureAwait(false); - - SchedulerQueueMetrics.RecordDeadLetter(TransportName, _payload.QueueName); - _logger.LogError( - "Dead-lettered scheduler {Queue} message {RunId} after {Attempt} attempt(s): {Reason}", - _payload.QueueName, - lease.RunId, - lease.Attempt, - reason); - PublishDepth(); - } - - private async Task<NatsJSContext> GetJetStreamAsync(CancellationToken cancellationToken) - { - if (_jsContext is not null) - { - return _jsContext; - } - - var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); - - await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - _jsContext ??= new NatsJSContext(connection); - return _jsContext; - } - finally - { - _connectionGate.Release(); - } - } - - private async ValueTask<INatsJSConsumer> EnsureStreamAndConsumerAsync(NatsJSContext js, CancellationToken cancellationToken) - { - if (_consumer is not null) - { - return _consumer; - } - - await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_consumer is not null) - { - return _consumer; - } - - await EnsureStreamAsync(js, cancellationToken).ConfigureAwait(false); - await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); - - var consumerConfig = new ConsumerConfig - { - DurableName = _streamOptions.DurableConsumer, - AckPolicy = ConsumerConfigAckPolicy.Explicit, - ReplayPolicy = ConsumerConfigReplayPolicy.Instant, - DeliverPolicy = ConsumerConfigDeliverPolicy.All, - AckWait = ToNanoseconds(_streamOptions.AckWait), - MaxAckPending = Math.Max(1, _streamOptions.MaxAckPending), - MaxDeliver = Math.Max(1, _queueOptions.MaxDeliveryAttempts), - FilterSubjects = new[] { _streamOptions.Subject } - }; - - try - { - _consumer = await js.CreateConsumerAsync( - _streamOptions.Stream, - consumerConfig, - cancellationToken) - .ConfigureAwait(false); - } - catch (NatsJSApiException apiEx) - { - _logger.LogDebug(apiEx, - "CreateConsumerAsync failed with code {Code}; attempting to reuse durable {Durable}.", - apiEx.Error?.Code, - _streamOptions.DurableConsumer); - - _consumer = await js.GetConsumerAsync( - _streamOptions.Stream, - _streamOptions.DurableConsumer, - cancellationToken) - .ConfigureAwait(false); - } - - return _consumer; - } - finally - { - _connectionGate.Release(); - } - } - - private async Task EnsureStreamAsync(NatsJSContext js, CancellationToken cancellationToken) - { - try - { - await js.GetStreamAsync( - _streamOptions.Stream, - new StreamInfoRequest(), - cancellationToken) - .ConfigureAwait(false); - } - catch (NatsJSApiException) - { - var config = new StreamConfig( - name: _streamOptions.Stream, - subjects: new[] { _streamOptions.Subject }) - { - Retention = StreamConfigRetention.Workqueue, - Storage = StreamConfigStorage.File, - MaxConsumers = -1, - MaxMsgs = -1, - MaxBytes = -1, - MaxAge = 0 - }; - - await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); - _logger.LogInformation( - "Created NATS JetStream stream {Stream} ({Subject}) for scheduler {Queue} queue.", - _streamOptions.Stream, - _streamOptions.Subject, - _payload.QueueName); - } - } - - private async Task EnsureDeadLetterStreamAsync(NatsJSContext js, CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(_streamOptions.DeadLetterStream) || string.IsNullOrWhiteSpace(_streamOptions.DeadLetterSubject)) - { - return; - } - - try - { - await js.GetStreamAsync( - _streamOptions.DeadLetterStream, - new StreamInfoRequest(), - cancellationToken) - .ConfigureAwait(false); - } - catch (NatsJSApiException) - { - var config = new StreamConfig( - name: _streamOptions.DeadLetterStream, - subjects: new[] { _streamOptions.DeadLetterSubject }) - { - Retention = StreamConfigRetention.Workqueue, - Storage = StreamConfigStorage.File, - MaxConsumers = -1, - MaxMsgs = -1, - MaxBytes = -1, - MaxAge = 0 - }; - - await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); - _logger.LogInformation( - "Created NATS JetStream dead-letter stream {Stream} ({Subject}) for scheduler {Queue} queue.", - _streamOptions.DeadLetterStream, - _streamOptions.DeadLetterSubject, - _payload.QueueName); - } - } - - private async Task<NatsConnection> EnsureConnectionAsync(CancellationToken cancellationToken) - { - if (_connection is not null) - { - return _connection; - } - - await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); - try - { - if (_connection is not null) - { - return _connection; - } - - var options = new NatsOpts - { - Url = _natsOptions.Url!, - Name = $"stellaops-scheduler-{_payload.QueueName}-queue", - CommandTimeout = TimeSpan.FromSeconds(10), - RequestTimeout = TimeSpan.FromSeconds(20), - PingInterval = TimeSpan.FromSeconds(30) - }; - - _connection = await _connectionFactory(options, cancellationToken).ConfigureAwait(false); - await _connection.ConnectAsync().ConfigureAwait(false); - return _connection; - } - finally - { - _connectionGate.Release(); - } - } - - private NatsSchedulerQueueLease<TMessage>? CreateLease( - NatsJSMsg<byte[]> message, - string consumer, - DateTimeOffset now, - TimeSpan leaseDuration) - { - var payload = message.Data ?? ReadOnlyMemory<byte>.Empty; - if (payload.IsEmpty) - { - return null; - } - - TMessage deserialized; - try - { - deserialized = _payload.Deserialize(payload.ToArray()); - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to deserialize scheduler {Queue} payload from NATS sequence {Sequence}.", _payload.QueueName, message.Metadata?.Sequence); - return null; - } - - var attempt = (int)(message.Metadata?.NumDelivered ?? 1); - if (attempt <= 0) - { - attempt = 1; - } - - var headers = message.Headers ?? new NatsHeaders(); - - var enqueuedAt = headers.TryGetValue(SchedulerQueueFields.EnqueuedAt, out var enqueuedValues) && enqueuedValues.Count > 0 - && long.TryParse(enqueuedValues[0], out var unix) - ? DateTimeOffset.FromUnixTimeMilliseconds(unix) - : now; - - var leaseExpires = now.Add(leaseDuration); - var runId = _payload.GetRunId(deserialized); - var tenantId = _payload.GetTenantId(deserialized); - var scheduleId = _payload.GetScheduleId(deserialized); - var segmentId = _payload.GetSegmentId(deserialized); - var correlationId = _payload.GetCorrelationId(deserialized); - var attributes = _payload.GetAttributes(deserialized) ?? new Dictionary<string, string>(); - - var attributeView = attributes.Count == 0 - ? EmptyReadOnlyDictionary<string, string>.Instance - : new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(attributes, StringComparer.Ordinal)); - - return new NatsSchedulerQueueLease<TMessage>( - this, - message, - payload.ToArray(), - _payload.GetIdempotencyKey(deserialized), - runId, - tenantId, - scheduleId, - segmentId, - correlationId, - attributeView, - deserialized, - attempt, - enqueuedAt, - leaseExpires, - consumer); - } - - private NatsHeaders BuildHeaders(TMessage message, string idempotencyKey) - { - var headers = new NatsHeaders - { - { SchedulerQueueFields.IdempotencyKey, idempotencyKey }, - { SchedulerQueueFields.RunId, _payload.GetRunId(message) }, - { SchedulerQueueFields.TenantId, _payload.GetTenantId(message) }, - { SchedulerQueueFields.QueueKind, _payload.QueueName }, - { SchedulerQueueFields.EnqueuedAt, _timeProvider.GetUtcNow().ToUnixTimeMilliseconds().ToString() } - }; - - var scheduleId = _payload.GetScheduleId(message); - if (!string.IsNullOrWhiteSpace(scheduleId)) - { - headers.Add(SchedulerQueueFields.ScheduleId, scheduleId); - } - - var segmentId = _payload.GetSegmentId(message); - if (!string.IsNullOrWhiteSpace(segmentId)) - { - headers.Add(SchedulerQueueFields.SegmentId, segmentId); - } - - var correlationId = _payload.GetCorrelationId(message); - if (!string.IsNullOrWhiteSpace(correlationId)) - { - headers.Add(SchedulerQueueFields.CorrelationId, correlationId); - } - - var attributes = _payload.GetAttributes(message); - if (attributes is not null) - { - foreach (var kvp in attributes) - { - headers.Add(SchedulerQueueFields.AttributePrefix + kvp.Key, kvp.Value); - } - } - - return headers; - } - - private NatsHeaders BuildDeadLetterHeaders(NatsSchedulerQueueLease<TMessage> lease, string reason) - { - var headers = new NatsHeaders - { - { SchedulerQueueFields.RunId, lease.RunId }, - { SchedulerQueueFields.TenantId, lease.TenantId }, - { SchedulerQueueFields.QueueKind, _payload.QueueName }, - { "reason", reason } - }; - - if (!string.IsNullOrWhiteSpace(lease.ScheduleId)) - { - headers.Add(SchedulerQueueFields.ScheduleId, lease.ScheduleId); - } - - if (!string.IsNullOrWhiteSpace(lease.CorrelationId)) - { - headers.Add(SchedulerQueueFields.CorrelationId, lease.CorrelationId); - } - - if (!string.IsNullOrWhiteSpace(lease.SegmentId)) - { - headers.Add(SchedulerQueueFields.SegmentId, lease.SegmentId); - } - - return headers; - } - - private TimeSpan CalculateBackoff(int attempt) - { - var initial = _queueOptions.RetryInitialBackoff > TimeSpan.Zero - ? _queueOptions.RetryInitialBackoff - : _streamOptions.RetryDelay; - - if (initial <= TimeSpan.Zero) - { - return TimeSpan.Zero; - } - - if (attempt <= 1) - { - return initial; - } - - var max = _queueOptions.RetryMaxBackoff > TimeSpan.Zero - ? _queueOptions.RetryMaxBackoff - : initial; - - var exponent = attempt - 1; - var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1); - var cappedTicks = Math.Min(max.Ticks, scaledTicks); - - return TimeSpan.FromTicks((long)Math.Max(initial.Ticks, cappedTicks)); - } - - private static long ToNanoseconds(TimeSpan value) - => value <= TimeSpan.Zero ? 0 : (long)(value.TotalMilliseconds * 1_000_000.0); - - private sealed class EmptyReadOnlyDictionary<TKey, TValue> - where TKey : notnull - { - public static readonly IReadOnlyDictionary<TKey, TValue> Instance = - new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default)); - } - - private void IncrementDepth() - { - var depth = Interlocked.Increment(ref _approximateDepth); - SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth); - } - - private void DecrementDepth() - { - var depth = Interlocked.Decrement(ref _approximateDepth); - if (depth < 0) - { - depth = Interlocked.Exchange(ref _approximateDepth, 0); - } - - SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth); - } - - private void PublishDepth() - { - var depth = Volatile.Read(ref _approximateDepth); - SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth); - } -} +using System; +using System.Collections.Generic; +using System.Collections.ObjectModel; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using NATS.Client.Core; +using NATS.Client.JetStream; +using NATS.Client.JetStream.Models; + +namespace StellaOps.Scheduler.Queue.Nats; + +internal abstract class NatsSchedulerQueueBase<TMessage> : ISchedulerQueue<TMessage>, IAsyncDisposable, ISchedulerQueueTransportDiagnostics +{ + private const string TransportName = "nats"; + + private static readonly INatsSerializer<byte[]> PayloadSerializer = NatsRawSerializer<byte[]>.Default; + + private readonly SchedulerQueueOptions _queueOptions; + private readonly SchedulerNatsQueueOptions _natsOptions; + private readonly SchedulerNatsStreamOptions _streamOptions; + private readonly INatsSchedulerQueuePayload<TMessage> _payload; + private readonly ILogger _logger; + private readonly TimeProvider _timeProvider; + private readonly SemaphoreSlim _connectionGate = new(1, 1); + private readonly Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>> _connectionFactory; + + private NatsConnection? _connection; + private NatsJSContext? _jsContext; + private INatsJSConsumer? _consumer; + private bool _disposed; + private long _approximateDepth; + + protected NatsSchedulerQueueBase( + SchedulerQueueOptions queueOptions, + SchedulerNatsQueueOptions natsOptions, + SchedulerNatsStreamOptions streamOptions, + INatsSchedulerQueuePayload<TMessage> payload, + ILogger logger, + TimeProvider timeProvider, + Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) + { + _queueOptions = queueOptions ?? throw new ArgumentNullException(nameof(queueOptions)); + _natsOptions = natsOptions ?? throw new ArgumentNullException(nameof(natsOptions)); + _streamOptions = streamOptions ?? throw new ArgumentNullException(nameof(streamOptions)); + _payload = payload ?? throw new ArgumentNullException(nameof(payload)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _timeProvider = timeProvider ?? TimeProvider.System; + _connectionFactory = connectionFactory ?? ((opts, cancellationToken) => new ValueTask<NatsConnection>(new NatsConnection(opts))); + + if (string.IsNullOrWhiteSpace(_natsOptions.Url)) + { + throw new InvalidOperationException("NATS connection URL must be configured for the scheduler queue."); + } + } + + public async ValueTask<SchedulerQueueEnqueueResult> EnqueueAsync( + TMessage message, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(message); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); + + var payloadBytes = _payload.Serialize(message); + var idempotencyKey = _payload.GetIdempotencyKey(message); + var headers = BuildHeaders(message, idempotencyKey); + + var publishOptions = new NatsJSPubOpts + { + MsgId = idempotencyKey, + RetryAttempts = 0 + }; + + var ack = await js.PublishAsync( + _streamOptions.Subject, + payloadBytes, + PayloadSerializer, + publishOptions, + headers, + cancellationToken) + .ConfigureAwait(false); + + if (ack.Duplicate) + { + SchedulerQueueMetrics.RecordDeduplicated(TransportName, _payload.QueueName); + _logger.LogDebug( + "Duplicate enqueue detected for scheduler {Queue} message idempotency key {Key}; sequence {Sequence} reused.", + _payload.QueueName, + idempotencyKey, + ack.Seq); + + PublishDepth(); + return new SchedulerQueueEnqueueResult(ack.Seq.ToString(), true); + } + + SchedulerQueueMetrics.RecordEnqueued(TransportName, _payload.QueueName); + _logger.LogDebug( + "Enqueued scheduler {Queue} message into stream {Stream} with sequence {Sequence}.", + _payload.QueueName, + ack.Stream, + ack.Seq); + + IncrementDepth(); + return new SchedulerQueueEnqueueResult(ack.Seq.ToString(), false); + } + + public async ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> LeaseAsync( + SchedulerQueueLeaseRequest request, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); + + var fetchOpts = new NatsJSFetchOpts + { + MaxMsgs = request.BatchSize, + Expires = request.LeaseDuration, + IdleHeartbeat = _natsOptions.IdleHeartbeat + }; + + var now = _timeProvider.GetUtcNow(); + var leases = new List<ISchedulerQueueLease<TMessage>>(request.BatchSize); + + await foreach (var message in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) + { + var lease = CreateLease(message, request.Consumer, now, request.LeaseDuration); + if (lease is not null) + { + leases.Add(lease); + } + } + + PublishDepth(); + return leases; + } + + public async ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> ClaimExpiredAsync( + SchedulerQueueClaimOptions options, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(options); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + var consumer = await EnsureStreamAndConsumerAsync(js, cancellationToken).ConfigureAwait(false); + + var fetchOpts = new NatsJSFetchOpts + { + MaxMsgs = options.BatchSize, + Expires = options.MinIdleTime, + IdleHeartbeat = _natsOptions.IdleHeartbeat + }; + + var now = _timeProvider.GetUtcNow(); + var leases = new List<ISchedulerQueueLease<TMessage>>(options.BatchSize); + + await foreach (var message in consumer.FetchAsync(PayloadSerializer, fetchOpts, cancellationToken).ConfigureAwait(false)) + { + var deliveries = (int)(message.Metadata?.NumDelivered ?? 1); + if (deliveries <= 1) + { + await message.NakAsync(new AckOpts(), TimeSpan.Zero, cancellationToken).ConfigureAwait(false); + continue; + } + + var lease = CreateLease(message, options.ClaimantConsumer, now, _queueOptions.DefaultLeaseDuration); + if (lease is not null) + { + leases.Add(lease); + } + } + + PublishDepth(); + return leases; + } + + public async ValueTask DisposeAsync() + { + if (_disposed) + { + return; + } + + _disposed = true; + + if (_connection is not null) + { + await _connection.DisposeAsync().ConfigureAwait(false); + } + + _connectionGate.Dispose(); + SchedulerQueueMetrics.RemoveDepth(TransportName, _payload.QueueName); + GC.SuppressFinalize(this); + } + + public async ValueTask PingAsync(CancellationToken cancellationToken) + { + var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); + await connection.PingAsync(cancellationToken).ConfigureAwait(false); + } + + internal async Task AcknowledgeAsync(NatsSchedulerQueueLease<TMessage> lease, CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName); + DecrementDepth(); + } + + internal async Task RenewAsync(NatsSchedulerQueueLease<TMessage> lease, TimeSpan leaseDuration, CancellationToken cancellationToken) + { + await lease.RawMessage.AckProgressAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + lease.RefreshLease(_timeProvider.GetUtcNow().Add(leaseDuration)); + } + + internal async Task ReleaseAsync(NatsSchedulerQueueLease<TMessage> lease, SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken) + { + if (disposition == SchedulerQueueReleaseDisposition.Retry && lease.Attempt >= _queueOptions.MaxDeliveryAttempts) + { + await DeadLetterAsync(lease, $"max-delivery-attempts:{lease.Attempt}", cancellationToken).ConfigureAwait(false); + return; + } + + if (!lease.TryBeginCompletion()) + { + return; + } + + if (disposition == SchedulerQueueReleaseDisposition.Retry) + { + SchedulerQueueMetrics.RecordRetry(TransportName, _payload.QueueName); + var delay = CalculateBackoff(lease.Attempt + 1); + lease.IncrementAttempt(); + await lease.RawMessage.NakAsync(new AckOpts(), delay, cancellationToken).ConfigureAwait(false); + _logger.LogWarning( + "Requeued scheduler {Queue} message {RunId} with delay {Delay} (attempt {Attempt}).", + _payload.QueueName, + lease.RunId, + delay, + lease.Attempt); + } + else + { + await lease.RawMessage.AckTerminateAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + SchedulerQueueMetrics.RecordAck(TransportName, _payload.QueueName); + DecrementDepth(); + _logger.LogInformation( + "Abandoned scheduler {Queue} message {RunId} after {Attempt} attempt(s).", + _payload.QueueName, + lease.RunId, + lease.Attempt); + } + + PublishDepth(); + } + + internal async Task DeadLetterAsync(NatsSchedulerQueueLease<TMessage> lease, string reason, CancellationToken cancellationToken) + { + if (!lease.TryBeginCompletion()) + { + return; + } + + await lease.RawMessage.AckAsync(new AckOpts(), cancellationToken).ConfigureAwait(false); + DecrementDepth(); + + var js = await GetJetStreamAsync(cancellationToken).ConfigureAwait(false); + + if (!_queueOptions.DeadLetterEnabled) + { + _logger.LogWarning( + "Dropped scheduler {Queue} message {RunId} after {Attempt} attempt(s); dead-letter disabled. Reason: {Reason}", + _payload.QueueName, + lease.RunId, + lease.Attempt, + reason); + PublishDepth(); + return; + } + + await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); + + var headers = BuildDeadLetterHeaders(lease, reason); + await js.PublishAsync( + _streamOptions.DeadLetterSubject, + lease.Payload, + PayloadSerializer, + new NatsJSPubOpts(), + headers, + cancellationToken) + .ConfigureAwait(false); + + SchedulerQueueMetrics.RecordDeadLetter(TransportName, _payload.QueueName); + _logger.LogError( + "Dead-lettered scheduler {Queue} message {RunId} after {Attempt} attempt(s): {Reason}", + _payload.QueueName, + lease.RunId, + lease.Attempt, + reason); + PublishDepth(); + } + + private async Task<NatsJSContext> GetJetStreamAsync(CancellationToken cancellationToken) + { + if (_jsContext is not null) + { + return _jsContext; + } + + var connection = await EnsureConnectionAsync(cancellationToken).ConfigureAwait(false); + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + _jsContext ??= new NatsJSContext(connection); + return _jsContext; + } + finally + { + _connectionGate.Release(); + } + } + + private async ValueTask<INatsJSConsumer> EnsureStreamAndConsumerAsync(NatsJSContext js, CancellationToken cancellationToken) + { + if (_consumer is not null) + { + return _consumer; + } + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_consumer is not null) + { + return _consumer; + } + + await EnsureStreamAsync(js, cancellationToken).ConfigureAwait(false); + await EnsureDeadLetterStreamAsync(js, cancellationToken).ConfigureAwait(false); + + var consumerConfig = new ConsumerConfig + { + DurableName = _streamOptions.DurableConsumer, + AckPolicy = ConsumerConfigAckPolicy.Explicit, + ReplayPolicy = ConsumerConfigReplayPolicy.Instant, + DeliverPolicy = ConsumerConfigDeliverPolicy.All, + AckWait = ToNanoseconds(_streamOptions.AckWait), + MaxAckPending = Math.Max(1, _streamOptions.MaxAckPending), + MaxDeliver = Math.Max(1, _queueOptions.MaxDeliveryAttempts), + FilterSubjects = new[] { _streamOptions.Subject } + }; + + try + { + _consumer = await js.CreateConsumerAsync( + _streamOptions.Stream, + consumerConfig, + cancellationToken) + .ConfigureAwait(false); + } + catch (NatsJSApiException apiEx) + { + _logger.LogDebug(apiEx, + "CreateConsumerAsync failed with code {Code}; attempting to reuse durable {Durable}.", + apiEx.Error?.Code, + _streamOptions.DurableConsumer); + + _consumer = await js.GetConsumerAsync( + _streamOptions.Stream, + _streamOptions.DurableConsumer, + cancellationToken) + .ConfigureAwait(false); + } + + return _consumer; + } + finally + { + _connectionGate.Release(); + } + } + + private async Task EnsureStreamAsync(NatsJSContext js, CancellationToken cancellationToken) + { + try + { + await js.GetStreamAsync( + _streamOptions.Stream, + new StreamInfoRequest(), + cancellationToken) + .ConfigureAwait(false); + } + catch (NatsJSApiException) + { + var config = new StreamConfig( + name: _streamOptions.Stream, + subjects: new[] { _streamOptions.Subject }) + { + Retention = StreamConfigRetention.Workqueue, + Storage = StreamConfigStorage.File, + MaxConsumers = -1, + MaxMsgs = -1, + MaxBytes = -1, + MaxAge = 0 + }; + + await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "Created NATS JetStream stream {Stream} ({Subject}) for scheduler {Queue} queue.", + _streamOptions.Stream, + _streamOptions.Subject, + _payload.QueueName); + } + } + + private async Task EnsureDeadLetterStreamAsync(NatsJSContext js, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(_streamOptions.DeadLetterStream) || string.IsNullOrWhiteSpace(_streamOptions.DeadLetterSubject)) + { + return; + } + + try + { + await js.GetStreamAsync( + _streamOptions.DeadLetterStream, + new StreamInfoRequest(), + cancellationToken) + .ConfigureAwait(false); + } + catch (NatsJSApiException) + { + var config = new StreamConfig( + name: _streamOptions.DeadLetterStream, + subjects: new[] { _streamOptions.DeadLetterSubject }) + { + Retention = StreamConfigRetention.Workqueue, + Storage = StreamConfigStorage.File, + MaxConsumers = -1, + MaxMsgs = -1, + MaxBytes = -1, + MaxAge = 0 + }; + + await js.CreateStreamAsync(config, cancellationToken).ConfigureAwait(false); + _logger.LogInformation( + "Created NATS JetStream dead-letter stream {Stream} ({Subject}) for scheduler {Queue} queue.", + _streamOptions.DeadLetterStream, + _streamOptions.DeadLetterSubject, + _payload.QueueName); + } + } + + private async Task<NatsConnection> EnsureConnectionAsync(CancellationToken cancellationToken) + { + if (_connection is not null) + { + return _connection; + } + + await _connectionGate.WaitAsync(cancellationToken).ConfigureAwait(false); + try + { + if (_connection is not null) + { + return _connection; + } + + var options = new NatsOpts + { + Url = _natsOptions.Url!, + Name = $"stellaops-scheduler-{_payload.QueueName}-queue", + CommandTimeout = TimeSpan.FromSeconds(10), + RequestTimeout = TimeSpan.FromSeconds(20), + PingInterval = TimeSpan.FromSeconds(30) + }; + + _connection = await _connectionFactory(options, cancellationToken).ConfigureAwait(false); + await _connection.ConnectAsync().ConfigureAwait(false); + return _connection; + } + finally + { + _connectionGate.Release(); + } + } + + private NatsSchedulerQueueLease<TMessage>? CreateLease( + NatsJSMsg<byte[]> message, + string consumer, + DateTimeOffset now, + TimeSpan leaseDuration) + { + var payload = message.Data ?? ReadOnlyMemory<byte>.Empty; + if (payload.IsEmpty) + { + return null; + } + + TMessage deserialized; + try + { + deserialized = _payload.Deserialize(payload.ToArray()); + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to deserialize scheduler {Queue} payload from NATS sequence {Sequence}.", _payload.QueueName, message.Metadata?.Sequence); + return null; + } + + var attempt = (int)(message.Metadata?.NumDelivered ?? 1); + if (attempt <= 0) + { + attempt = 1; + } + + var headers = message.Headers ?? new NatsHeaders(); + + var enqueuedAt = headers.TryGetValue(SchedulerQueueFields.EnqueuedAt, out var enqueuedValues) && enqueuedValues.Count > 0 + && long.TryParse(enqueuedValues[0], out var unix) + ? DateTimeOffset.FromUnixTimeMilliseconds(unix) + : now; + + var leaseExpires = now.Add(leaseDuration); + var runId = _payload.GetRunId(deserialized); + var tenantId = _payload.GetTenantId(deserialized); + var scheduleId = _payload.GetScheduleId(deserialized); + var segmentId = _payload.GetSegmentId(deserialized); + var correlationId = _payload.GetCorrelationId(deserialized); + var attributes = _payload.GetAttributes(deserialized) ?? new Dictionary<string, string>(); + + var attributeView = attributes.Count == 0 + ? EmptyReadOnlyDictionary<string, string>.Instance + : new ReadOnlyDictionary<string, string>(new Dictionary<string, string>(attributes, StringComparer.Ordinal)); + + return new NatsSchedulerQueueLease<TMessage>( + this, + message, + payload.ToArray(), + _payload.GetIdempotencyKey(deserialized), + runId, + tenantId, + scheduleId, + segmentId, + correlationId, + attributeView, + deserialized, + attempt, + enqueuedAt, + leaseExpires, + consumer); + } + + private NatsHeaders BuildHeaders(TMessage message, string idempotencyKey) + { + var headers = new NatsHeaders + { + { SchedulerQueueFields.IdempotencyKey, idempotencyKey }, + { SchedulerQueueFields.RunId, _payload.GetRunId(message) }, + { SchedulerQueueFields.TenantId, _payload.GetTenantId(message) }, + { SchedulerQueueFields.QueueKind, _payload.QueueName }, + { SchedulerQueueFields.EnqueuedAt, _timeProvider.GetUtcNow().ToUnixTimeMilliseconds().ToString() } + }; + + var scheduleId = _payload.GetScheduleId(message); + if (!string.IsNullOrWhiteSpace(scheduleId)) + { + headers.Add(SchedulerQueueFields.ScheduleId, scheduleId); + } + + var segmentId = _payload.GetSegmentId(message); + if (!string.IsNullOrWhiteSpace(segmentId)) + { + headers.Add(SchedulerQueueFields.SegmentId, segmentId); + } + + var correlationId = _payload.GetCorrelationId(message); + if (!string.IsNullOrWhiteSpace(correlationId)) + { + headers.Add(SchedulerQueueFields.CorrelationId, correlationId); + } + + var attributes = _payload.GetAttributes(message); + if (attributes is not null) + { + foreach (var kvp in attributes) + { + headers.Add(SchedulerQueueFields.AttributePrefix + kvp.Key, kvp.Value); + } + } + + return headers; + } + + private NatsHeaders BuildDeadLetterHeaders(NatsSchedulerQueueLease<TMessage> lease, string reason) + { + var headers = new NatsHeaders + { + { SchedulerQueueFields.RunId, lease.RunId }, + { SchedulerQueueFields.TenantId, lease.TenantId }, + { SchedulerQueueFields.QueueKind, _payload.QueueName }, + { "reason", reason } + }; + + if (!string.IsNullOrWhiteSpace(lease.ScheduleId)) + { + headers.Add(SchedulerQueueFields.ScheduleId, lease.ScheduleId); + } + + if (!string.IsNullOrWhiteSpace(lease.CorrelationId)) + { + headers.Add(SchedulerQueueFields.CorrelationId, lease.CorrelationId); + } + + if (!string.IsNullOrWhiteSpace(lease.SegmentId)) + { + headers.Add(SchedulerQueueFields.SegmentId, lease.SegmentId); + } + + return headers; + } + + private TimeSpan CalculateBackoff(int attempt) + { + var initial = _queueOptions.RetryInitialBackoff > TimeSpan.Zero + ? _queueOptions.RetryInitialBackoff + : _streamOptions.RetryDelay; + + if (initial <= TimeSpan.Zero) + { + return TimeSpan.Zero; + } + + if (attempt <= 1) + { + return initial; + } + + var max = _queueOptions.RetryMaxBackoff > TimeSpan.Zero + ? _queueOptions.RetryMaxBackoff + : initial; + + var exponent = attempt - 1; + var scaledTicks = initial.Ticks * Math.Pow(2, exponent - 1); + var cappedTicks = Math.Min(max.Ticks, scaledTicks); + + return TimeSpan.FromTicks((long)Math.Max(initial.Ticks, cappedTicks)); + } + + private static long ToNanoseconds(TimeSpan value) + => value <= TimeSpan.Zero ? 0 : (long)(value.TotalMilliseconds * 1_000_000.0); + + private sealed class EmptyReadOnlyDictionary<TKey, TValue> + where TKey : notnull + { + public static readonly IReadOnlyDictionary<TKey, TValue> Instance = + new ReadOnlyDictionary<TKey, TValue>(new Dictionary<TKey, TValue>(0, EqualityComparer<TKey>.Default)); + } + + private void IncrementDepth() + { + var depth = Interlocked.Increment(ref _approximateDepth); + SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth); + } + + private void DecrementDepth() + { + var depth = Interlocked.Decrement(ref _approximateDepth); + if (depth < 0) + { + depth = Interlocked.Exchange(ref _approximateDepth, 0); + } + + SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth); + } + + private void PublishDepth() + { + var depth = Volatile.Read(ref _approximateDepth); + SchedulerQueueMetrics.RecordDepth(TransportName, _payload.QueueName, depth); + } +} diff --git a/src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueLease.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueLease.cs similarity index 96% rename from src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueLease.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueLease.cs index ecd8c566..38a15448 100644 --- a/src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueLease.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerQueueLease.cs @@ -1,101 +1,101 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using NATS.Client.JetStream; - -namespace StellaOps.Scheduler.Queue.Nats; - -internal sealed class NatsSchedulerQueueLease<TMessage> : ISchedulerQueueLease<TMessage> -{ - private readonly NatsSchedulerQueueBase<TMessage> _queue; - private int _completed; - - internal NatsSchedulerQueueLease( - NatsSchedulerQueueBase<TMessage> queue, - NatsJSMsg<byte[]> message, - byte[] payload, - string idempotencyKey, - string runId, - string tenantId, - string? scheduleId, - string? segmentId, - string? correlationId, - IReadOnlyDictionary<string, string> attributes, - TMessage deserialized, - int attempt, - DateTimeOffset enqueuedAt, - DateTimeOffset leaseExpiresAt, - string consumer) - { - _queue = queue; - MessageId = message.Metadata?.Sequence.ToString() ?? idempotencyKey; - RunId = runId; - TenantId = tenantId; - ScheduleId = scheduleId; - SegmentId = segmentId; - CorrelationId = correlationId; - Attributes = attributes; - Attempt = attempt; - EnqueuedAt = enqueuedAt; - LeaseExpiresAt = leaseExpiresAt; - Consumer = consumer; - IdempotencyKey = idempotencyKey; - Message = deserialized; - _message = message; - Payload = payload; - } - - private readonly NatsJSMsg<byte[]> _message; - - internal NatsJSMsg<byte[]> RawMessage => _message; - - internal byte[] Payload { get; } - - public string MessageId { get; } - - public string IdempotencyKey { get; } - - public string RunId { get; } - - public string TenantId { get; } - - public string? ScheduleId { get; } - - public string? SegmentId { get; } - - public string? CorrelationId { get; } - - public IReadOnlyDictionary<string, string> Attributes { get; } - - public TMessage Message { get; } - - public int Attempt { get; private set; } - - public DateTimeOffset EnqueuedAt { get; } - - public DateTimeOffset LeaseExpiresAt { get; private set; } - - public string Consumer { get; } - - public Task AcknowledgeAsync(CancellationToken cancellationToken = default) - => _queue.AcknowledgeAsync(this, cancellationToken); - - public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) - => _queue.RenewAsync(this, leaseDuration, cancellationToken); - - public Task ReleaseAsync(SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) - => _queue.ReleaseAsync(this, disposition, cancellationToken); - - public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) - => _queue.DeadLetterAsync(this, reason, cancellationToken); - - internal bool TryBeginCompletion() - => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; - - internal void RefreshLease(DateTimeOffset expiresAt) - => LeaseExpiresAt = expiresAt; - - internal void IncrementAttempt() - => Attempt++; -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using NATS.Client.JetStream; + +namespace StellaOps.Scheduler.Queue.Nats; + +internal sealed class NatsSchedulerQueueLease<TMessage> : ISchedulerQueueLease<TMessage> +{ + private readonly NatsSchedulerQueueBase<TMessage> _queue; + private int _completed; + + internal NatsSchedulerQueueLease( + NatsSchedulerQueueBase<TMessage> queue, + NatsJSMsg<byte[]> message, + byte[] payload, + string idempotencyKey, + string runId, + string tenantId, + string? scheduleId, + string? segmentId, + string? correlationId, + IReadOnlyDictionary<string, string> attributes, + TMessage deserialized, + int attempt, + DateTimeOffset enqueuedAt, + DateTimeOffset leaseExpiresAt, + string consumer) + { + _queue = queue; + MessageId = message.Metadata?.Sequence.ToString() ?? idempotencyKey; + RunId = runId; + TenantId = tenantId; + ScheduleId = scheduleId; + SegmentId = segmentId; + CorrelationId = correlationId; + Attributes = attributes; + Attempt = attempt; + EnqueuedAt = enqueuedAt; + LeaseExpiresAt = leaseExpiresAt; + Consumer = consumer; + IdempotencyKey = idempotencyKey; + Message = deserialized; + _message = message; + Payload = payload; + } + + private readonly NatsJSMsg<byte[]> _message; + + internal NatsJSMsg<byte[]> RawMessage => _message; + + internal byte[] Payload { get; } + + public string MessageId { get; } + + public string IdempotencyKey { get; } + + public string RunId { get; } + + public string TenantId { get; } + + public string? ScheduleId { get; } + + public string? SegmentId { get; } + + public string? CorrelationId { get; } + + public IReadOnlyDictionary<string, string> Attributes { get; } + + public TMessage Message { get; } + + public int Attempt { get; private set; } + + public DateTimeOffset EnqueuedAt { get; } + + public DateTimeOffset LeaseExpiresAt { get; private set; } + + public string Consumer { get; } + + public Task AcknowledgeAsync(CancellationToken cancellationToken = default) + => _queue.AcknowledgeAsync(this, cancellationToken); + + public Task RenewAsync(TimeSpan leaseDuration, CancellationToken cancellationToken = default) + => _queue.RenewAsync(this, leaseDuration, cancellationToken); + + public Task ReleaseAsync(SchedulerQueueReleaseDisposition disposition, CancellationToken cancellationToken = default) + => _queue.ReleaseAsync(this, disposition, cancellationToken); + + public Task DeadLetterAsync(string reason, CancellationToken cancellationToken = default) + => _queue.DeadLetterAsync(this, reason, cancellationToken); + + internal bool TryBeginCompletion() + => Interlocked.CompareExchange(ref _completed, 1, 0) == 0; + + internal void RefreshLease(DateTimeOffset expiresAt) + => LeaseExpiresAt = expiresAt; + + internal void IncrementAttempt() + => Attempt++; +} diff --git a/src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerRunnerQueue.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerRunnerQueue.cs similarity index 97% rename from src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerRunnerQueue.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerRunnerQueue.cs index e47fd21e..a192a5b4 100644 --- a/src/StellaOps.Scheduler.Queue/Nats/NatsSchedulerRunnerQueue.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Nats/NatsSchedulerRunnerQueue.cs @@ -1,74 +1,74 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using NATS.Client.Core; -using NATS.Client.JetStream; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Queue.Nats; - -internal sealed class NatsSchedulerRunnerQueue - : NatsSchedulerQueueBase<RunnerSegmentQueueMessage>, ISchedulerRunnerQueue -{ - public NatsSchedulerRunnerQueue( - SchedulerQueueOptions queueOptions, - SchedulerNatsQueueOptions natsOptions, - ILogger<NatsSchedulerRunnerQueue> logger, - TimeProvider timeProvider, - Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) - : base( - queueOptions, - natsOptions, - natsOptions.Runner, - RunnerPayload.Instance, - logger, - timeProvider, - connectionFactory) - { - } - - private sealed class RunnerPayload : INatsSchedulerQueuePayload<RunnerSegmentQueueMessage> - { - public static RunnerPayload Instance { get; } = new(); - - public string QueueName => "runner"; - - public string GetIdempotencyKey(RunnerSegmentQueueMessage message) - => message.IdempotencyKey; - - public byte[] Serialize(RunnerSegmentQueueMessage message) - => Encoding.UTF8.GetBytes(CanonicalJsonSerializer.Serialize(message)); - - public RunnerSegmentQueueMessage Deserialize(byte[] payload) - => CanonicalJsonSerializer.Deserialize<RunnerSegmentQueueMessage>(Encoding.UTF8.GetString(payload)); - - public string GetRunId(RunnerSegmentQueueMessage message) - => message.RunId; - - public string GetTenantId(RunnerSegmentQueueMessage message) - => message.TenantId; - - public string? GetScheduleId(RunnerSegmentQueueMessage message) - => message.ScheduleId; - - public string? GetSegmentId(RunnerSegmentQueueMessage message) - => message.SegmentId; - - public string? GetCorrelationId(RunnerSegmentQueueMessage message) - => message.CorrelationId; - - public IReadOnlyDictionary<string, string>? GetAttributes(RunnerSegmentQueueMessage message) - { - if (message.Attributes is null || message.Attributes.Count == 0) - { - return null; - } - - return message.Attributes.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using NATS.Client.Core; +using NATS.Client.JetStream; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Queue.Nats; + +internal sealed class NatsSchedulerRunnerQueue + : NatsSchedulerQueueBase<RunnerSegmentQueueMessage>, ISchedulerRunnerQueue +{ + public NatsSchedulerRunnerQueue( + SchedulerQueueOptions queueOptions, + SchedulerNatsQueueOptions natsOptions, + ILogger<NatsSchedulerRunnerQueue> logger, + TimeProvider timeProvider, + Func<NatsOpts, CancellationToken, ValueTask<NatsConnection>>? connectionFactory = null) + : base( + queueOptions, + natsOptions, + natsOptions.Runner, + RunnerPayload.Instance, + logger, + timeProvider, + connectionFactory) + { + } + + private sealed class RunnerPayload : INatsSchedulerQueuePayload<RunnerSegmentQueueMessage> + { + public static RunnerPayload Instance { get; } = new(); + + public string QueueName => "runner"; + + public string GetIdempotencyKey(RunnerSegmentQueueMessage message) + => message.IdempotencyKey; + + public byte[] Serialize(RunnerSegmentQueueMessage message) + => Encoding.UTF8.GetBytes(CanonicalJsonSerializer.Serialize(message)); + + public RunnerSegmentQueueMessage Deserialize(byte[] payload) + => CanonicalJsonSerializer.Deserialize<RunnerSegmentQueueMessage>(Encoding.UTF8.GetString(payload)); + + public string GetRunId(RunnerSegmentQueueMessage message) + => message.RunId; + + public string GetTenantId(RunnerSegmentQueueMessage message) + => message.TenantId; + + public string? GetScheduleId(RunnerSegmentQueueMessage message) + => message.ScheduleId; + + public string? GetSegmentId(RunnerSegmentQueueMessage message) + => message.SegmentId; + + public string? GetCorrelationId(RunnerSegmentQueueMessage message) + => message.CorrelationId; + + public IReadOnlyDictionary<string, string>? GetAttributes(RunnerSegmentQueueMessage message) + { + if (message.Attributes is null || message.Attributes.Count == 0) + { + return null; + } + + return message.Attributes.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal); + } + } +} diff --git a/src/StellaOps.Scheduler.Queue/README.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/README.md similarity index 100% rename from src/StellaOps.Scheduler.Queue/README.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/README.md diff --git a/src/StellaOps.Scheduler.Queue/Redis/IRedisSchedulerQueuePayload.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/IRedisSchedulerQueuePayload.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/Redis/IRedisSchedulerQueuePayload.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/IRedisSchedulerQueuePayload.cs diff --git a/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerPlannerQueue.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/RedisSchedulerPlannerQueue.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerPlannerQueue.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/RedisSchedulerPlannerQueue.cs diff --git a/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueBase.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueBase.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueBase.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueBase.cs diff --git a/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueLease.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueLease.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueLease.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/RedisSchedulerQueueLease.cs diff --git a/src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerRunnerQueue.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/RedisSchedulerRunnerQueue.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/Redis/RedisSchedulerRunnerQueue.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/Redis/RedisSchedulerRunnerQueue.cs diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueContracts.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueContracts.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/SchedulerQueueContracts.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueContracts.cs diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueFields.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueFields.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/SchedulerQueueFields.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueFields.cs diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueHealthCheck.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueHealthCheck.cs similarity index 97% rename from src/StellaOps.Scheduler.Queue/SchedulerQueueHealthCheck.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueHealthCheck.cs index 4763fc9d..b430b025 100644 --- a/src/StellaOps.Scheduler.Queue/SchedulerQueueHealthCheck.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueHealthCheck.cs @@ -1,72 +1,72 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Diagnostics.HealthChecks; -using Microsoft.Extensions.Logging; - -namespace StellaOps.Scheduler.Queue; - -public sealed class SchedulerQueueHealthCheck : IHealthCheck -{ - private readonly ISchedulerPlannerQueue _plannerQueue; - private readonly ISchedulerRunnerQueue _runnerQueue; - private readonly ILogger<SchedulerQueueHealthCheck> _logger; - - public SchedulerQueueHealthCheck( - ISchedulerPlannerQueue plannerQueue, - ISchedulerRunnerQueue runnerQueue, - ILogger<SchedulerQueueHealthCheck> logger) - { - _plannerQueue = plannerQueue ?? throw new ArgumentNullException(nameof(plannerQueue)); - _runnerQueue = runnerQueue ?? throw new ArgumentNullException(nameof(runnerQueue)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<HealthCheckResult> CheckHealthAsync( - HealthCheckContext context, - CancellationToken cancellationToken = default) - { - cancellationToken.ThrowIfCancellationRequested(); - - var failures = new List<string>(); - - if (!await ProbeAsync(_plannerQueue, "planner", cancellationToken).ConfigureAwait(false)) - { - failures.Add("planner transport unreachable"); - } - - if (!await ProbeAsync(_runnerQueue, "runner", cancellationToken).ConfigureAwait(false)) - { - failures.Add("runner transport unreachable"); - } - - if (failures.Count == 0) - { - return HealthCheckResult.Healthy("Scheduler queues reachable."); - } - - var description = string.Join("; ", failures); - return new HealthCheckResult( - context.Registration.FailureStatus, - description); - } - - private async Task<bool> ProbeAsync(object queue, string label, CancellationToken cancellationToken) - { - try - { - if (queue is ISchedulerQueueTransportDiagnostics diagnostics) - { - await diagnostics.PingAsync(cancellationToken).ConfigureAwait(false); - } - - return true; - } - catch (Exception ex) - { - _logger.LogError(ex, "Scheduler {Label} queue transport ping failed.", label); - return false; - } - } -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Logging; + +namespace StellaOps.Scheduler.Queue; + +public sealed class SchedulerQueueHealthCheck : IHealthCheck +{ + private readonly ISchedulerPlannerQueue _plannerQueue; + private readonly ISchedulerRunnerQueue _runnerQueue; + private readonly ILogger<SchedulerQueueHealthCheck> _logger; + + public SchedulerQueueHealthCheck( + ISchedulerPlannerQueue plannerQueue, + ISchedulerRunnerQueue runnerQueue, + ILogger<SchedulerQueueHealthCheck> logger) + { + _plannerQueue = plannerQueue ?? throw new ArgumentNullException(nameof(plannerQueue)); + _runnerQueue = runnerQueue ?? throw new ArgumentNullException(nameof(runnerQueue)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<HealthCheckResult> CheckHealthAsync( + HealthCheckContext context, + CancellationToken cancellationToken = default) + { + cancellationToken.ThrowIfCancellationRequested(); + + var failures = new List<string>(); + + if (!await ProbeAsync(_plannerQueue, "planner", cancellationToken).ConfigureAwait(false)) + { + failures.Add("planner transport unreachable"); + } + + if (!await ProbeAsync(_runnerQueue, "runner", cancellationToken).ConfigureAwait(false)) + { + failures.Add("runner transport unreachable"); + } + + if (failures.Count == 0) + { + return HealthCheckResult.Healthy("Scheduler queues reachable."); + } + + var description = string.Join("; ", failures); + return new HealthCheckResult( + context.Registration.FailureStatus, + description); + } + + private async Task<bool> ProbeAsync(object queue, string label, CancellationToken cancellationToken) + { + try + { + if (queue is ISchedulerQueueTransportDiagnostics diagnostics) + { + await diagnostics.PingAsync(cancellationToken).ConfigureAwait(false); + } + + return true; + } + catch (Exception ex) + { + _logger.LogError(ex, "Scheduler {Label} queue transport ping failed.", label); + return false; + } + } +} diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueMetrics.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueMetrics.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/SchedulerQueueMetrics.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueMetrics.cs diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueOptions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueOptions.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/SchedulerQueueOptions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueOptions.cs diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueServiceCollectionExtensions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/SchedulerQueueServiceCollectionExtensions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueServiceCollectionExtensions.cs diff --git a/src/StellaOps.Scheduler.Queue/SchedulerQueueTransportKind.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueTransportKind.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue/SchedulerQueueTransportKind.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/SchedulerQueueTransportKind.cs diff --git a/src/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj similarity index 98% rename from src/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj index 5b94daa2..7a99594f 100644 --- a/src/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj @@ -1,21 +1,21 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="NATS.Client.Core" Version="2.0.0" /> - <PackageReference Include="NATS.Client.JetStream" Version="2.0.0" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Configuration.Binder" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Diagnostics.HealthChecks.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="NATS.Client.Core" Version="2.0.0" /> + <PackageReference Include="NATS.Client.JetStream" Version="2.0.0" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Scheduler.Queue/TASKS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/TASKS.md similarity index 100% rename from src/StellaOps.Scheduler.Queue/TASKS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Queue/TASKS.md diff --git a/src/StellaOps.Scheduler.Storage.Mongo/AGENTS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/AGENTS.md similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/AGENTS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/AGENTS.md diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Documents/RunSummaryDocument.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Documents/RunSummaryDocument.cs similarity index 95% rename from src/StellaOps.Scheduler.Storage.Mongo/Documents/RunSummaryDocument.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Documents/RunSummaryDocument.cs index 20ee8089..c0e7c4bd 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Documents/RunSummaryDocument.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Documents/RunSummaryDocument.cs @@ -1,88 +1,88 @@ -using MongoDB.Bson; -using MongoDB.Bson.Serialization.Attributes; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Documents; - -[BsonIgnoreExtraElements] -internal sealed class RunSummaryDocument -{ - public string Id { get; set; } = string.Empty; - - public string TenantId { get; set; } = string.Empty; - - public string ScheduleId { get; set; } = string.Empty; - - public DateTime UpdatedAt { get; set; } = DateTime.SpecifyKind(DateTime.UnixEpoch, DateTimeKind.Utc); - - public RunSummaryEntryDocument? LastRun { get; set; } - = null; - - public List<RunSummaryEntryDocument> Recent { get; set; } = new(); - - public RunSummaryCountersDocument Counters { get; set; } = new(); -} - -internal sealed class RunSummaryEntryDocument -{ - public string RunId { get; set; } = string.Empty; - - [BsonRepresentation(BsonType.String)] - public RunTrigger Trigger { get; set; } = RunTrigger.Cron; - - [BsonRepresentation(BsonType.String)] - public RunState State { get; set; } = RunState.Planning; - - public DateTime CreatedAt { get; set; } = DateTime.SpecifyKind(DateTime.UnixEpoch, DateTimeKind.Utc); - - public DateTime? StartedAt { get; set; } - = null; - - public DateTime? FinishedAt { get; set; } - = null; - - public RunStats Stats { get; set; } = RunStats.Empty; - - [BsonIgnoreIfNull] - public string? Error { get; set; } - = null; -} - -internal sealed class RunSummaryCountersDocument -{ - public int Total { get; set; } - = 0; - - public int Planning { get; set; } - = 0; - - public int Queued { get; set; } - = 0; - - public int Running { get; set; } - = 0; - - public int Completed { get; set; } - = 0; - - public int Error { get; set; } - = 0; - - public int Cancelled { get; set; } - = 0; - - public int TotalDeltas { get; set; } - = 0; - - public int TotalNewCriticals { get; set; } - = 0; - - public int TotalNewHigh { get; set; } - = 0; - - public int TotalNewMedium { get; set; } - = 0; - - public int TotalNewLow { get; set; } - = 0; -} +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Documents; + +[BsonIgnoreExtraElements] +internal sealed class RunSummaryDocument +{ + public string Id { get; set; } = string.Empty; + + public string TenantId { get; set; } = string.Empty; + + public string ScheduleId { get; set; } = string.Empty; + + public DateTime UpdatedAt { get; set; } = DateTime.SpecifyKind(DateTime.UnixEpoch, DateTimeKind.Utc); + + public RunSummaryEntryDocument? LastRun { get; set; } + = null; + + public List<RunSummaryEntryDocument> Recent { get; set; } = new(); + + public RunSummaryCountersDocument Counters { get; set; } = new(); +} + +internal sealed class RunSummaryEntryDocument +{ + public string RunId { get; set; } = string.Empty; + + [BsonRepresentation(BsonType.String)] + public RunTrigger Trigger { get; set; } = RunTrigger.Cron; + + [BsonRepresentation(BsonType.String)] + public RunState State { get; set; } = RunState.Planning; + + public DateTime CreatedAt { get; set; } = DateTime.SpecifyKind(DateTime.UnixEpoch, DateTimeKind.Utc); + + public DateTime? StartedAt { get; set; } + = null; + + public DateTime? FinishedAt { get; set; } + = null; + + public RunStats Stats { get; set; } = RunStats.Empty; + + [BsonIgnoreIfNull] + public string? Error { get; set; } + = null; +} + +internal sealed class RunSummaryCountersDocument +{ + public int Total { get; set; } + = 0; + + public int Planning { get; set; } + = 0; + + public int Queued { get; set; } + = 0; + + public int Running { get; set; } + = 0; + + public int Completed { get; set; } + = 0; + + public int Error { get; set; } + = 0; + + public int Cancelled { get; set; } + = 0; + + public int TotalDeltas { get; set; } + = 0; + + public int TotalNewCriticals { get; set; } + = 0; + + public int TotalNewHigh { get; set; } + = 0; + + public int TotalNewMedium { get; set; } + = 0; + + public int TotalNewLow { get; set; } + = 0; +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoContext.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoContext.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoContext.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoContext.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializer.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializer.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializer.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializer.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializerHostedService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializerHostedService.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializerHostedService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Internal/SchedulerMongoInitializerHostedService.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerCollectionsMigration.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerCollectionsMigration.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerCollectionsMigration.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerCollectionsMigration.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerIndexesMigration.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerIndexesMigration.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerIndexesMigration.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/EnsureSchedulerIndexesMigration.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/ISchedulerMongoMigration.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/ISchedulerMongoMigration.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Migrations/ISchedulerMongoMigration.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/ISchedulerMongoMigration.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRecord.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRecord.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRecord.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRecord.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRunner.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRunner.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRunner.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Migrations/SchedulerMongoMigrationRunner.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Options/SchedulerMongoOptions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Options/SchedulerMongoOptions.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Options/SchedulerMongoOptions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Options/SchedulerMongoOptions.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Projections/RunSummaryProjection.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Projections/RunSummaryProjection.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Projections/RunSummaryProjection.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Projections/RunSummaryProjection.cs index f71a1317..c02d9e00 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Projections/RunSummaryProjection.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Projections/RunSummaryProjection.cs @@ -1,36 +1,36 @@ -using System.Collections.Immutable; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Projections; - -public sealed record RunSummaryProjection( - string TenantId, - string ScheduleId, - DateTimeOffset UpdatedAt, - RunSummarySnapshot? LastRun, - ImmutableArray<RunSummarySnapshot> Recent, - RunSummaryCounters Counters); - -public sealed record RunSummarySnapshot( - string RunId, - RunTrigger Trigger, - RunState State, - DateTimeOffset CreatedAt, - DateTimeOffset? StartedAt, - DateTimeOffset? FinishedAt, - RunStats Stats, - string? Error); - -public sealed record RunSummaryCounters( - int Total, - int Planning, - int Queued, - int Running, - int Completed, - int Error, - int Cancelled, - int TotalDeltas, - int TotalNewCriticals, - int TotalNewHigh, - int TotalNewMedium, - int TotalNewLow); +using System.Collections.Immutable; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Projections; + +public sealed record RunSummaryProjection( + string TenantId, + string ScheduleId, + DateTimeOffset UpdatedAt, + RunSummarySnapshot? LastRun, + ImmutableArray<RunSummarySnapshot> Recent, + RunSummaryCounters Counters); + +public sealed record RunSummarySnapshot( + string RunId, + RunTrigger Trigger, + RunState State, + DateTimeOffset CreatedAt, + DateTimeOffset? StartedAt, + DateTimeOffset? FinishedAt, + RunStats Stats, + string? Error); + +public sealed record RunSummaryCounters( + int Total, + int Planning, + int Queued, + int Running, + int Completed, + int Error, + int Cancelled, + int TotalDeltas, + int TotalNewCriticals, + int TotalNewHigh, + int TotalNewMedium, + int TotalNewLow); diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Properties/AssemblyInfo.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/Properties/AssemblyInfo.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/README.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/README.md similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/README.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/README.md diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditQueryOptions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditQueryOptions.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditQueryOptions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditQueryOptions.cs index 67a22fc2..21e17e70 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditQueryOptions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditQueryOptions.cs @@ -1,32 +1,32 @@ -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -/// <summary> -/// Filters applied when querying scheduler audit records. -/// </summary> -public sealed class AuditQueryOptions -{ - /// <summary> - /// Optional audit category filter (e.g., "scheduler"). - /// </summary> - public string? Category { get; init; } - - /// <summary> - /// Optional schedule identifier filter. - /// </summary> - public string? ScheduleId { get; init; } - - /// <summary> - /// Optional run identifier filter. - /// </summary> - public string? RunId { get; init; } - - /// <summary> - /// Lower bound for audit occurrence timestamp. - /// </summary> - public DateTimeOffset? Since { get; init; } - - /// <summary> - /// Maximum number of records to return. - /// </summary> - public int? Limit { get; init; } -} +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +/// <summary> +/// Filters applied when querying scheduler audit records. +/// </summary> +public sealed class AuditQueryOptions +{ + /// <summary> + /// Optional audit category filter (e.g., "scheduler"). + /// </summary> + public string? Category { get; init; } + + /// <summary> + /// Optional schedule identifier filter. + /// </summary> + public string? ScheduleId { get; init; } + + /// <summary> + /// Optional run identifier filter. + /// </summary> + public string? RunId { get; init; } + + /// <summary> + /// Lower bound for audit occurrence timestamp. + /// </summary> + public DateTimeOffset? Since { get; init; } + + /// <summary> + /// Maximum number of records to return. + /// </summary> + public int? Limit { get; init; } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditRepository.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditRepository.cs index 68927f7c..6967cf91 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/AuditRepository.cs @@ -1,99 +1,99 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Internal; -using StellaOps.Scheduler.Storage.Mongo.Serialization; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -internal sealed class AuditRepository : IAuditRepository -{ - private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; - private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort; - - private readonly IMongoCollection<BsonDocument> _collection; - - public AuditRepository(SchedulerMongoContext context) - { - if (context is null) - { - throw new ArgumentNullException(nameof(context)); - } - - _collection = context.Database.GetCollection<BsonDocument>(context.Options.AuditCollection); - } - - public async Task InsertAsync( - AuditRecord record, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(record); - - var document = AuditRecordDocumentMapper.ToBsonDocument(record); - if (session is null) - { - await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); - } - else - { - await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); - } - } - - public async Task<IReadOnlyList<AuditRecord>> ListAsync( - string tenantId, - AuditQueryOptions? options = null, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - options ??= new AuditQueryOptions(); - - var filters = new List<FilterDefinition<BsonDocument>> - { - Filter.Eq("tenantId", tenantId) - }; - - if (!string.IsNullOrWhiteSpace(options.Category)) - { - filters.Add(Filter.Eq("category", options.Category)); - } - - if (!string.IsNullOrWhiteSpace(options.ScheduleId)) - { - filters.Add(Filter.Eq("scheduleId", options.ScheduleId)); - } - - if (!string.IsNullOrWhiteSpace(options.RunId)) - { - filters.Add(Filter.Eq("runId", options.RunId)); - } - - if (options.Since is { } since) - { - filters.Add(Filter.Gte("occurredAt", since.ToUniversalTime().ToString("O"))); - } - - var combined = Filter.And(filters); - var find = session is null - ? _collection.Find(combined) - : _collection.Find(session, combined); - - var limit = options.Limit is { } specified && specified > 0 ? specified : 100; - var documents = await find - .Sort(Sort.Descending("occurredAt")) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return documents.Select(AuditRecordDocumentMapper.FromBsonDocument).ToArray(); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Internal; +using StellaOps.Scheduler.Storage.Mongo.Serialization; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +internal sealed class AuditRepository : IAuditRepository +{ + private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; + private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort; + + private readonly IMongoCollection<BsonDocument> _collection; + + public AuditRepository(SchedulerMongoContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + _collection = context.Database.GetCollection<BsonDocument>(context.Options.AuditCollection); + } + + public async Task InsertAsync( + AuditRecord record, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(record); + + var document = AuditRecordDocumentMapper.ToBsonDocument(record); + if (session is null) + { + await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task<IReadOnlyList<AuditRecord>> ListAsync( + string tenantId, + AuditQueryOptions? options = null, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + options ??= new AuditQueryOptions(); + + var filters = new List<FilterDefinition<BsonDocument>> + { + Filter.Eq("tenantId", tenantId) + }; + + if (!string.IsNullOrWhiteSpace(options.Category)) + { + filters.Add(Filter.Eq("category", options.Category)); + } + + if (!string.IsNullOrWhiteSpace(options.ScheduleId)) + { + filters.Add(Filter.Eq("scheduleId", options.ScheduleId)); + } + + if (!string.IsNullOrWhiteSpace(options.RunId)) + { + filters.Add(Filter.Eq("runId", options.RunId)); + } + + if (options.Since is { } since) + { + filters.Add(Filter.Gte("occurredAt", since.ToUniversalTime().ToString("O"))); + } + + var combined = Filter.And(filters); + var find = session is null + ? _collection.Find(combined) + : _collection.Find(session, combined); + + var limit = options.Limit is { } specified && specified > 0 ? specified : 100; + var documents = await find + .Sort(Sort.Descending("occurredAt")) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(AuditRecordDocumentMapper.FromBsonDocument).ToArray(); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/GraphJobRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/GraphJobRepository.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/GraphJobRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/GraphJobRepository.cs index f542af3c..026a1825 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/GraphJobRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/GraphJobRepository.cs @@ -1,200 +1,200 @@ -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Internal; -using StellaOps.Scheduler.Storage.Mongo.Serialization; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -internal sealed class GraphJobRepository : IGraphJobRepository -{ - private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; - private readonly IMongoCollection<BsonDocument> _collection; - - public GraphJobRepository(SchedulerMongoContext context) - { - ArgumentNullException.ThrowIfNull(context); - _collection = context.Database.GetCollection<BsonDocument>(context.Options.GraphJobsCollection); - } - - public async Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default) - { - var document = GraphJobDocumentMapper.ToBsonDocument(job); - await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); - } - - public async Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) - { - var document = GraphJobDocumentMapper.ToBsonDocument(job); - await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); - } - - public async Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) - { - var filter = BuildIdFilter(tenantId, jobId, "build"); - var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document is null ? null : GraphJobDocumentMapper.ToGraphBuildJob(document); - } - - public async Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) - { - var filter = BuildIdFilter(tenantId, jobId, "overlay"); - var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document is null ? null : GraphJobDocumentMapper.ToGraphOverlayJob(document); - } - - public async Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default) - { - var document = GraphJobDocumentMapper.ToBsonDocument(job); - var filter = BuildIdFilter(job.TenantId, job.Id, "build"); - await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false); - return job; - } - - public async Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) - { - var document = GraphJobDocumentMapper.ToBsonDocument(job); - var filter = BuildIdFilter(job.TenantId, job.Id, "overlay"); - await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false); - return job; - } - - public async Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - { - var filters = new List<FilterDefinition<BsonDocument>> - { - Filter.Eq("tenantId", tenantId), - Filter.Eq("kind", "build") - }; - - if (status is { } s) - { - filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant())); - } - - var filter = Filter.And(filters); - var cursor = await _collection.Find(filter) - .Sort(Builders<BsonDocument>.Sort.Descending("createdAt")) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return cursor.Select(GraphJobDocumentMapper.ToGraphBuildJob).ToArray(); - } - - public async Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - { - var filters = new List<FilterDefinition<BsonDocument>> - { - Filter.Eq("kind", "build") - }; - - if (status is { } s) - { - filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant())); - } - - var filter = Filter.And(filters); - var cursor = await _collection.Find(filter) - .Sort(Builders<BsonDocument>.Sort.Ascending("createdAt")) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return cursor.Select(GraphJobDocumentMapper.ToGraphBuildJob).ToArray(); - } - - public async Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - { - var filters = new List<FilterDefinition<BsonDocument>> - { - Filter.Eq("tenantId", tenantId), - Filter.Eq("kind", "overlay") - }; - - if (status is { } s) - { - filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant())); - } - - var filter = Filter.And(filters); - var cursor = await _collection.Find(filter) - .Sort(Builders<BsonDocument>.Sort.Descending("createdAt")) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return cursor.Select(GraphJobDocumentMapper.ToGraphOverlayJob).ToArray(); - } - - public async Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - { - var filters = new List<FilterDefinition<BsonDocument>> - { - Filter.Eq("kind", "overlay") - }; - - if (status is { } s) - { - filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant())); - } - - var filter = Filter.And(filters); - var cursor = await _collection.Find(filter) - .Sort(Builders<BsonDocument>.Sort.Ascending("createdAt")) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return cursor.Select(GraphJobDocumentMapper.ToGraphOverlayJob).ToArray(); - } - - public async Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default) - { - var filter = Filter.And( - Filter.Eq("tenantId", tenantId), - Filter.Eq("kind", "overlay")); - - var cursor = await _collection.Find(filter) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return cursor.Select(GraphJobDocumentMapper.ToGraphOverlayJob).ToArray(); - } - - public async Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(job); - - var filter = Filter.And( - Filter.Eq("_id", job.Id), - Filter.Eq("tenantId", job.TenantId), - Filter.Eq("kind", "build"), - Filter.Eq("status", expectedStatus.ToString().ToLowerInvariant())); - - var document = GraphJobDocumentMapper.ToBsonDocument(job); - var result = await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false); - return result.MatchedCount > 0; - } - - public async Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(job); - - var filter = Filter.And( - Filter.Eq("_id", job.Id), - Filter.Eq("tenantId", job.TenantId), - Filter.Eq("kind", "overlay"), - Filter.Eq("status", expectedStatus.ToString().ToLowerInvariant())); - - var document = GraphJobDocumentMapper.ToBsonDocument(job); - var result = await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false); - return result.MatchedCount > 0; - } - - private static FilterDefinition<BsonDocument> BuildIdFilter(string tenantId, string jobId, string kind) - => Filter.And( - Filter.Eq("_id", jobId), - Filter.Eq("tenantId", tenantId), - Filter.Eq("kind", kind)); -} +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Internal; +using StellaOps.Scheduler.Storage.Mongo.Serialization; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +internal sealed class GraphJobRepository : IGraphJobRepository +{ + private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; + private readonly IMongoCollection<BsonDocument> _collection; + + public GraphJobRepository(SchedulerMongoContext context) + { + ArgumentNullException.ThrowIfNull(context); + _collection = context.Database.GetCollection<BsonDocument>(context.Options.GraphJobsCollection); + } + + public async Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default) + { + var document = GraphJobDocumentMapper.ToBsonDocument(job); + await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public async Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) + { + var document = GraphJobDocumentMapper.ToBsonDocument(job); + await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + public async Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) + { + var filter = BuildIdFilter(tenantId, jobId, "build"); + var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : GraphJobDocumentMapper.ToGraphBuildJob(document); + } + + public async Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) + { + var filter = BuildIdFilter(tenantId, jobId, "overlay"); + var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : GraphJobDocumentMapper.ToGraphOverlayJob(document); + } + + public async Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default) + { + var document = GraphJobDocumentMapper.ToBsonDocument(job); + var filter = BuildIdFilter(job.TenantId, job.Id, "build"); + await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false); + return job; + } + + public async Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) + { + var document = GraphJobDocumentMapper.ToBsonDocument(job); + var filter = BuildIdFilter(job.TenantId, job.Id, "overlay"); + await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false); + return job; + } + + public async Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + { + var filters = new List<FilterDefinition<BsonDocument>> + { + Filter.Eq("tenantId", tenantId), + Filter.Eq("kind", "build") + }; + + if (status is { } s) + { + filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant())); + } + + var filter = Filter.And(filters); + var cursor = await _collection.Find(filter) + .Sort(Builders<BsonDocument>.Sort.Descending("createdAt")) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return cursor.Select(GraphJobDocumentMapper.ToGraphBuildJob).ToArray(); + } + + public async Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + { + var filters = new List<FilterDefinition<BsonDocument>> + { + Filter.Eq("kind", "build") + }; + + if (status is { } s) + { + filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant())); + } + + var filter = Filter.And(filters); + var cursor = await _collection.Find(filter) + .Sort(Builders<BsonDocument>.Sort.Ascending("createdAt")) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return cursor.Select(GraphJobDocumentMapper.ToGraphBuildJob).ToArray(); + } + + public async Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + { + var filters = new List<FilterDefinition<BsonDocument>> + { + Filter.Eq("tenantId", tenantId), + Filter.Eq("kind", "overlay") + }; + + if (status is { } s) + { + filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant())); + } + + var filter = Filter.And(filters); + var cursor = await _collection.Find(filter) + .Sort(Builders<BsonDocument>.Sort.Descending("createdAt")) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return cursor.Select(GraphJobDocumentMapper.ToGraphOverlayJob).ToArray(); + } + + public async Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + { + var filters = new List<FilterDefinition<BsonDocument>> + { + Filter.Eq("kind", "overlay") + }; + + if (status is { } s) + { + filters.Add(Filter.Eq("status", s.ToString().ToLowerInvariant())); + } + + var filter = Filter.And(filters); + var cursor = await _collection.Find(filter) + .Sort(Builders<BsonDocument>.Sort.Ascending("createdAt")) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return cursor.Select(GraphJobDocumentMapper.ToGraphOverlayJob).ToArray(); + } + + public async Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default) + { + var filter = Filter.And( + Filter.Eq("tenantId", tenantId), + Filter.Eq("kind", "overlay")); + + var cursor = await _collection.Find(filter) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return cursor.Select(GraphJobDocumentMapper.ToGraphOverlayJob).ToArray(); + } + + public async Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(job); + + var filter = Filter.And( + Filter.Eq("_id", job.Id), + Filter.Eq("tenantId", job.TenantId), + Filter.Eq("kind", "build"), + Filter.Eq("status", expectedStatus.ToString().ToLowerInvariant())); + + var document = GraphJobDocumentMapper.ToBsonDocument(job); + var result = await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false); + return result.MatchedCount > 0; + } + + public async Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(job); + + var filter = Filter.And( + Filter.Eq("_id", job.Id), + Filter.Eq("tenantId", job.TenantId), + Filter.Eq("kind", "overlay"), + Filter.Eq("status", expectedStatus.ToString().ToLowerInvariant())); + + var document = GraphJobDocumentMapper.ToBsonDocument(job); + var result = await _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = false }, cancellationToken).ConfigureAwait(false); + return result.MatchedCount > 0; + } + + private static FilterDefinition<BsonDocument> BuildIdFilter(string tenantId, string jobId, string kind) + => Filter.And( + Filter.Eq("_id", jobId), + Filter.Eq("tenantId", tenantId), + Filter.Eq("kind", kind)); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IAuditRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IAuditRepository.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/IAuditRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IAuditRepository.cs index 847e5f40..bb9aaaf8 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IAuditRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IAuditRepository.cs @@ -1,18 +1,18 @@ -using MongoDB.Driver; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -public interface IAuditRepository -{ - Task InsertAsync( - AuditRecord record, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<IReadOnlyList<AuditRecord>> ListAsync( - string tenantId, - AuditQueryOptions? options = null, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); -} +using MongoDB.Driver; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +public interface IAuditRepository +{ + Task InsertAsync( + AuditRecord record, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<IReadOnlyList<AuditRecord>> ListAsync( + string tenantId, + AuditQueryOptions? options = null, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IGraphJobRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IGraphJobRepository.cs similarity index 98% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/IGraphJobRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IGraphJobRepository.cs index 443f955e..20413750 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IGraphJobRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IGraphJobRepository.cs @@ -1,32 +1,32 @@ -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -public interface IGraphJobRepository -{ - Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default); - - Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default); - - Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default); - - Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default); - - Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default); - - Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default); - - Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default); - - Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default); - - Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default); - - Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default); - - Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default); - - Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default); - - Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default); -} +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +public interface IGraphJobRepository +{ + Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default); + + Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default); + + Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default); + + Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default); + + Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default); + + Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default); + + Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default); + + Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default); + + Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default); + + Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default); + + Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default); + + Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default); + + Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IImpactSnapshotRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IImpactSnapshotRepository.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/IImpactSnapshotRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IImpactSnapshotRepository.cs index 831b9c00..8311b048 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IImpactSnapshotRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IImpactSnapshotRepository.cs @@ -1,22 +1,22 @@ -using MongoDB.Driver; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -public interface IImpactSnapshotRepository -{ - Task UpsertAsync( - ImpactSet snapshot, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<ImpactSet?> GetBySnapshotIdAsync( - string snapshotId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<ImpactSet?> GetLatestBySelectorAsync( - Selector selector, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); -} +using MongoDB.Driver; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +public interface IImpactSnapshotRepository +{ + Task UpsertAsync( + ImpactSet snapshot, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<ImpactSet?> GetBySnapshotIdAsync( + string snapshotId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<ImpactSet?> GetLatestBySelectorAsync( + Selector selector, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IPolicyRunJobRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IPolicyRunJobRepository.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/IPolicyRunJobRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IPolicyRunJobRepository.cs index b72f94cb..b3cbf503 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IPolicyRunJobRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IPolicyRunJobRepository.cs @@ -1,48 +1,48 @@ -using MongoDB.Driver; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -public interface IPolicyRunJobRepository -{ - Task InsertAsync( - PolicyRunJob job, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<PolicyRunJob?> GetAsync( - string tenantId, - string jobId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<PolicyRunJob?> GetByRunIdAsync( - string tenantId, - string runId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<PolicyRunJob?> LeaseAsync( - string leaseOwner, - DateTimeOffset now, - TimeSpan leaseDuration, - int maxAttempts, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<IReadOnlyList<PolicyRunJob>> ListAsync( - string tenantId, - string? policyId = null, - PolicyRunMode? mode = null, - IReadOnlyCollection<PolicyRunJobStatus>? statuses = null, - DateTimeOffset? queuedAfter = null, - int limit = 50, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<bool> ReplaceAsync( - PolicyRunJob job, - string? expectedLeaseOwner = null, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); -} +using MongoDB.Driver; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +public interface IPolicyRunJobRepository +{ + Task InsertAsync( + PolicyRunJob job, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<PolicyRunJob?> GetAsync( + string tenantId, + string jobId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<PolicyRunJob?> GetByRunIdAsync( + string tenantId, + string runId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<PolicyRunJob?> LeaseAsync( + string leaseOwner, + DateTimeOffset now, + TimeSpan leaseDuration, + int maxAttempts, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<IReadOnlyList<PolicyRunJob>> ListAsync( + string tenantId, + string? policyId = null, + PolicyRunMode? mode = null, + IReadOnlyCollection<PolicyRunJobStatus>? statuses = null, + DateTimeOffset? queuedAfter = null, + int limit = 50, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<bool> ReplaceAsync( + PolicyRunJob job, + string? expectedLeaseOwner = null, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunRepository.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunRepository.cs index 84a881d6..9771a58f 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunRepository.cs @@ -1,35 +1,35 @@ -using MongoDB.Driver; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -public interface IRunRepository -{ - Task InsertAsync( - Run run, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<bool> UpdateAsync( - Run run, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<Run?> GetAsync( - string tenantId, - string runId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<IReadOnlyList<Run>> ListAsync( - string tenantId, - RunQueryOptions? options = null, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<IReadOnlyList<Run>> ListByStateAsync( - RunState state, - int limit = 50, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); -} +using MongoDB.Driver; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +public interface IRunRepository +{ + Task InsertAsync( + Run run, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<bool> UpdateAsync( + Run run, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<Run?> GetAsync( + string tenantId, + string runId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<IReadOnlyList<Run>> ListAsync( + string tenantId, + RunQueryOptions? options = null, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<IReadOnlyList<Run>> ListByStateAsync( + RunState state, + int limit = 50, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunSummaryRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunSummaryRepository.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunSummaryRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunSummaryRepository.cs index 8c3f291e..1e829b78 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunSummaryRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IRunSummaryRepository.cs @@ -1,19 +1,19 @@ -using StellaOps.Scheduler.Storage.Mongo.Documents; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -internal interface IRunSummaryRepository -{ - Task<RunSummaryDocument?> GetAsync( - string tenantId, - string scheduleId, - CancellationToken cancellationToken = default); - - Task<IReadOnlyList<RunSummaryDocument>> ListAsync( - string tenantId, - CancellationToken cancellationToken = default); - - Task UpsertAsync( - RunSummaryDocument document, - CancellationToken cancellationToken = default); -} +using StellaOps.Scheduler.Storage.Mongo.Documents; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +internal interface IRunSummaryRepository +{ + Task<RunSummaryDocument?> GetAsync( + string tenantId, + string scheduleId, + CancellationToken cancellationToken = default); + + Task<IReadOnlyList<RunSummaryDocument>> ListAsync( + string tenantId, + CancellationToken cancellationToken = default); + + Task UpsertAsync( + RunSummaryDocument document, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IScheduleRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IScheduleRepository.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/IScheduleRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IScheduleRepository.cs index 80d358ef..11ed839c 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/IScheduleRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/IScheduleRepository.cs @@ -1,32 +1,32 @@ -using MongoDB.Driver; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -public interface IScheduleRepository -{ - Task UpsertAsync( - Schedule schedule, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<Schedule?> GetAsync( - string tenantId, - string scheduleId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<IReadOnlyList<Schedule>> ListAsync( - string tenantId, - ScheduleQueryOptions? options = null, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); - - Task<bool> SoftDeleteAsync( - string tenantId, - string scheduleId, - string deletedBy, - DateTimeOffset deletedAt, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default); -} +using MongoDB.Driver; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +public interface IScheduleRepository +{ + Task UpsertAsync( + Schedule schedule, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<Schedule?> GetAsync( + string tenantId, + string scheduleId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<IReadOnlyList<Schedule>> ListAsync( + string tenantId, + ScheduleQueryOptions? options = null, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); + + Task<bool> SoftDeleteAsync( + string tenantId, + string scheduleId, + string deletedBy, + DateTimeOffset deletedAt, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/ImpactSnapshotRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/ImpactSnapshotRepository.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/ImpactSnapshotRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/ImpactSnapshotRepository.cs index 80181961..cd0c8a9a 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/ImpactSnapshotRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/ImpactSnapshotRepository.cs @@ -1,94 +1,94 @@ -using System; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Internal; -using StellaOps.Scheduler.Storage.Mongo.Serialization; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -internal sealed class ImpactSnapshotRepository : IImpactSnapshotRepository -{ - private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; - private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort; - - private readonly IMongoCollection<BsonDocument> _collection; - - public ImpactSnapshotRepository(SchedulerMongoContext context) - { - if (context is null) - { - throw new ArgumentNullException(nameof(context)); - } - - _collection = context.Database.GetCollection<BsonDocument>(context.Options.ImpactSnapshotsCollection); - } - - public async Task UpsertAsync( - ImpactSet snapshot, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(snapshot); - - var document = ImpactSetDocumentMapper.ToBsonDocument(snapshot); - var filter = Filter.Eq("_id", document["_id"]); - var options = new ReplaceOptions { IsUpsert = true }; - - if (session is null) - { - await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); - } - else - { - await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); - } - } - - public async Task<ImpactSet?> GetBySnapshotIdAsync( - string snapshotId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(snapshotId)) - { - throw new ArgumentException("Snapshot id must be provided.", nameof(snapshotId)); - } - - var filter = Filter.Eq("_id", snapshotId); - var query = session is null - ? _collection.Find(filter) - : _collection.Find(session, filter); - - var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document is null ? null : ImpactSetDocumentMapper.FromBsonDocument(document); - } - - public async Task<ImpactSet?> GetLatestBySelectorAsync( - Selector selector, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(selector); - if (string.IsNullOrWhiteSpace(selector.TenantId)) - { - throw new ArgumentException("Selector tenantId is required to resolve impact snapshots.", nameof(selector)); - } - - var digest = ImpactSetDocumentMapper.ComputeSelectorDigest(selector); - var filters = Filter.And( - Filter.Eq("selectorDigest", digest), - Filter.Eq("selector.tenantId", selector.TenantId)); - - var find = session is null - ? _collection.Find(filters) - : _collection.Find(session, filters); - - var document = await find - .Sort(Sort.Descending("generatedAt")) - .FirstOrDefaultAsync(cancellationToken) - .ConfigureAwait(false); - - return document is null ? null : ImpactSetDocumentMapper.FromBsonDocument(document); - } -} +using System; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Internal; +using StellaOps.Scheduler.Storage.Mongo.Serialization; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +internal sealed class ImpactSnapshotRepository : IImpactSnapshotRepository +{ + private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; + private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort; + + private readonly IMongoCollection<BsonDocument> _collection; + + public ImpactSnapshotRepository(SchedulerMongoContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + _collection = context.Database.GetCollection<BsonDocument>(context.Options.ImpactSnapshotsCollection); + } + + public async Task UpsertAsync( + ImpactSet snapshot, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(snapshot); + + var document = ImpactSetDocumentMapper.ToBsonDocument(snapshot); + var filter = Filter.Eq("_id", document["_id"]); + var options = new ReplaceOptions { IsUpsert = true }; + + if (session is null) + { + await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + } + + public async Task<ImpactSet?> GetBySnapshotIdAsync( + string snapshotId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(snapshotId)) + { + throw new ArgumentException("Snapshot id must be provided.", nameof(snapshotId)); + } + + var filter = Filter.Eq("_id", snapshotId); + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : ImpactSetDocumentMapper.FromBsonDocument(document); + } + + public async Task<ImpactSet?> GetLatestBySelectorAsync( + Selector selector, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(selector); + if (string.IsNullOrWhiteSpace(selector.TenantId)) + { + throw new ArgumentException("Selector tenantId is required to resolve impact snapshots.", nameof(selector)); + } + + var digest = ImpactSetDocumentMapper.ComputeSelectorDigest(selector); + var filters = Filter.And( + Filter.Eq("selectorDigest", digest), + Filter.Eq("selector.tenantId", selector.TenantId)); + + var find = session is null + ? _collection.Find(filters) + : _collection.Find(session, filters); + + var document = await find + .Sort(Sort.Descending("generatedAt")) + .FirstOrDefaultAsync(cancellationToken) + .ConfigureAwait(false); + + return document is null ? null : ImpactSetDocumentMapper.FromBsonDocument(document); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/PolicyRunJobRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/PolicyRunJobRepository.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/PolicyRunJobRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/PolicyRunJobRepository.cs index fd6f8d32..6bc2bfcb 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/PolicyRunJobRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/PolicyRunJobRepository.cs @@ -1,249 +1,249 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Internal; -using StellaOps.Scheduler.Storage.Mongo.Serialization; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -internal sealed class PolicyRunJobRepository : IPolicyRunJobRepository -{ - private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; - private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort; - - private readonly IMongoCollection<BsonDocument> _collection; - - public PolicyRunJobRepository(SchedulerMongoContext context) - { - if (context is null) - { - throw new ArgumentNullException(nameof(context)); - } - - _collection = context.Database.GetCollection<BsonDocument>(context.Options.PolicyJobsCollection); - } - - public async Task InsertAsync( - PolicyRunJob job, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(job); - var document = PolicyRunJobDocumentMapper.ToBsonDocument(job); - - if (session is null) - { - await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); - } - else - { - await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); - } - } - - public async Task<PolicyRunJob?> GetAsync( - string tenantId, - string jobId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - if (string.IsNullOrWhiteSpace(jobId)) - { - throw new ArgumentException("Job id must be provided.", nameof(jobId)); - } - - var filter = Filter.And( - Filter.Eq("_id", jobId), - Filter.Eq("tenantId", tenantId)); - - var query = session is null - ? _collection.Find(filter) - : _collection.Find(session, filter); - - var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document is null ? null : PolicyRunJobDocumentMapper.FromBsonDocument(document); - } - - public async Task<PolicyRunJob?> GetByRunIdAsync( - string tenantId, - string runId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - if (string.IsNullOrWhiteSpace(runId)) - { - throw new ArgumentException("Run id must be provided.", nameof(runId)); - } - - var filter = Filter.And( - Filter.Eq("tenantId", tenantId), - Filter.Eq("runId", runId)); - - var query = session is null - ? _collection.Find(filter) - : _collection.Find(session, filter); - - var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document is null ? null : PolicyRunJobDocumentMapper.FromBsonDocument(document); - } - - public async Task<PolicyRunJob?> LeaseAsync( - string leaseOwner, - DateTimeOffset now, - TimeSpan leaseDuration, - int maxAttempts, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(leaseOwner); - if (leaseDuration <= TimeSpan.Zero) - { - throw new ArgumentOutOfRangeException(nameof(leaseDuration), leaseDuration, "Lease duration must be positive."); - } - - if (maxAttempts <= 0) - { - throw new ArgumentOutOfRangeException(nameof(maxAttempts), maxAttempts, "Max attempts must be greater than zero."); - } - - var statusFilter = Filter.In("status", new BsonArray(new[] { "pending", "failed" })); - var availabilityFilter = Filter.Lte("availableAt", now.UtcDateTime); - var leaseFilter = Filter.Or( - Filter.Exists("leaseOwner", false), - Filter.Eq("leaseOwner", BsonNull.Value), - Filter.Lt("leaseExpiresAt", now.UtcDateTime)); - var attemptsFilter = Filter.Lt("attemptCount", maxAttempts); - - var filter = Filter.And(statusFilter, availabilityFilter, leaseFilter, attemptsFilter); - var update = Builders<BsonDocument>.Update - .Set("status", "dispatching") - .Set("leaseOwner", leaseOwner) - .Set("leaseExpiresAt", now.Add(leaseDuration).UtcDateTime) - .Set("updatedAt", now.UtcDateTime); - - var options = new FindOneAndUpdateOptions<BsonDocument> - { - ReturnDocument = ReturnDocument.After, - Sort = Sort.Descending("priorityRank").Ascending("createdAt") - }; - - var document = session is null - ? await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false) - : await _collection.FindOneAndUpdateAsync(session, filter, update, options, cancellationToken).ConfigureAwait(false); - - return document is null ? null : PolicyRunJobDocumentMapper.FromBsonDocument(document); - } - - public async Task<IReadOnlyList<PolicyRunJob>> ListAsync( - string tenantId, - string? policyId = null, - PolicyRunMode? mode = null, - IReadOnlyCollection<PolicyRunJobStatus>? statuses = null, - DateTimeOffset? queuedAfter = null, - int limit = 50, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - if (limit <= 0) - { - throw new ArgumentOutOfRangeException(nameof(limit), limit, "Limit must be greater than zero."); - } - - var filters = new List<FilterDefinition<BsonDocument>> - { - Filter.Eq("tenantId", tenantId) - }; - - if (!string.IsNullOrWhiteSpace(policyId)) - { - filters.Add(Filter.Eq("policyId", policyId)); - } - - if (mode is not null) - { - filters.Add(Filter.Eq("mode", mode.ToString()!.ToLowerInvariant())); - } - - if (statuses is { Count: > 0 }) - { - var array = new BsonArray(statuses.Select(static status => status.ToString().ToLowerInvariant())); - filters.Add(Filter.In("status", array)); - } - - if (queuedAfter is { } since) - { - filters.Add(Filter.Gte("queuedAt", since.UtcDateTime)); - } - - var filter = Filter.And(filters); - var sort = Sort.Descending("queuedAt").Descending("createdAt"); - - var query = session is null - ? _collection.Find(filter) - : _collection.Find(session, filter); - - var documents = await query - .Sort(sort) - .Limit(limit) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return documents - .Select(PolicyRunJobDocumentMapper.FromBsonDocument) - .ToList(); - } - - public async Task<bool> ReplaceAsync( - PolicyRunJob job, - string? expectedLeaseOwner = null, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(job); - - var filters = new List<FilterDefinition<BsonDocument>> - { - Filter.Eq("_id", job.Id), - Filter.Eq("tenantId", job.TenantId) - }; - - if (!string.IsNullOrEmpty(expectedLeaseOwner)) - { - filters.Add(Filter.Eq("leaseOwner", expectedLeaseOwner)); - } - - var filter = Filter.And(filters); - var document = PolicyRunJobDocumentMapper.ToBsonDocument(job); - var options = new ReplaceOptions { IsUpsert = false }; - - ReplaceOneResult result; - if (session is null) - { - result = await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); - } - else - { - result = await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); - } - - return result.MatchedCount > 0; - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Internal; +using StellaOps.Scheduler.Storage.Mongo.Serialization; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +internal sealed class PolicyRunJobRepository : IPolicyRunJobRepository +{ + private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; + private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort; + + private readonly IMongoCollection<BsonDocument> _collection; + + public PolicyRunJobRepository(SchedulerMongoContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + _collection = context.Database.GetCollection<BsonDocument>(context.Options.PolicyJobsCollection); + } + + public async Task InsertAsync( + PolicyRunJob job, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(job); + var document = PolicyRunJobDocumentMapper.ToBsonDocument(job); + + if (session is null) + { + await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task<PolicyRunJob?> GetAsync( + string tenantId, + string jobId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + if (string.IsNullOrWhiteSpace(jobId)) + { + throw new ArgumentException("Job id must be provided.", nameof(jobId)); + } + + var filter = Filter.And( + Filter.Eq("_id", jobId), + Filter.Eq("tenantId", tenantId)); + + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : PolicyRunJobDocumentMapper.FromBsonDocument(document); + } + + public async Task<PolicyRunJob?> GetByRunIdAsync( + string tenantId, + string runId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + if (string.IsNullOrWhiteSpace(runId)) + { + throw new ArgumentException("Run id must be provided.", nameof(runId)); + } + + var filter = Filter.And( + Filter.Eq("tenantId", tenantId), + Filter.Eq("runId", runId)); + + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : PolicyRunJobDocumentMapper.FromBsonDocument(document); + } + + public async Task<PolicyRunJob?> LeaseAsync( + string leaseOwner, + DateTimeOffset now, + TimeSpan leaseDuration, + int maxAttempts, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(leaseOwner); + if (leaseDuration <= TimeSpan.Zero) + { + throw new ArgumentOutOfRangeException(nameof(leaseDuration), leaseDuration, "Lease duration must be positive."); + } + + if (maxAttempts <= 0) + { + throw new ArgumentOutOfRangeException(nameof(maxAttempts), maxAttempts, "Max attempts must be greater than zero."); + } + + var statusFilter = Filter.In("status", new BsonArray(new[] { "pending", "failed" })); + var availabilityFilter = Filter.Lte("availableAt", now.UtcDateTime); + var leaseFilter = Filter.Or( + Filter.Exists("leaseOwner", false), + Filter.Eq("leaseOwner", BsonNull.Value), + Filter.Lt("leaseExpiresAt", now.UtcDateTime)); + var attemptsFilter = Filter.Lt("attemptCount", maxAttempts); + + var filter = Filter.And(statusFilter, availabilityFilter, leaseFilter, attemptsFilter); + var update = Builders<BsonDocument>.Update + .Set("status", "dispatching") + .Set("leaseOwner", leaseOwner) + .Set("leaseExpiresAt", now.Add(leaseDuration).UtcDateTime) + .Set("updatedAt", now.UtcDateTime); + + var options = new FindOneAndUpdateOptions<BsonDocument> + { + ReturnDocument = ReturnDocument.After, + Sort = Sort.Descending("priorityRank").Ascending("createdAt") + }; + + var document = session is null + ? await _collection.FindOneAndUpdateAsync(filter, update, options, cancellationToken).ConfigureAwait(false) + : await _collection.FindOneAndUpdateAsync(session, filter, update, options, cancellationToken).ConfigureAwait(false); + + return document is null ? null : PolicyRunJobDocumentMapper.FromBsonDocument(document); + } + + public async Task<IReadOnlyList<PolicyRunJob>> ListAsync( + string tenantId, + string? policyId = null, + PolicyRunMode? mode = null, + IReadOnlyCollection<PolicyRunJobStatus>? statuses = null, + DateTimeOffset? queuedAfter = null, + int limit = 50, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + if (limit <= 0) + { + throw new ArgumentOutOfRangeException(nameof(limit), limit, "Limit must be greater than zero."); + } + + var filters = new List<FilterDefinition<BsonDocument>> + { + Filter.Eq("tenantId", tenantId) + }; + + if (!string.IsNullOrWhiteSpace(policyId)) + { + filters.Add(Filter.Eq("policyId", policyId)); + } + + if (mode is not null) + { + filters.Add(Filter.Eq("mode", mode.ToString()!.ToLowerInvariant())); + } + + if (statuses is { Count: > 0 }) + { + var array = new BsonArray(statuses.Select(static status => status.ToString().ToLowerInvariant())); + filters.Add(Filter.In("status", array)); + } + + if (queuedAfter is { } since) + { + filters.Add(Filter.Gte("queuedAt", since.UtcDateTime)); + } + + var filter = Filter.And(filters); + var sort = Sort.Descending("queuedAt").Descending("createdAt"); + + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var documents = await query + .Sort(sort) + .Limit(limit) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents + .Select(PolicyRunJobDocumentMapper.FromBsonDocument) + .ToList(); + } + + public async Task<bool> ReplaceAsync( + PolicyRunJob job, + string? expectedLeaseOwner = null, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(job); + + var filters = new List<FilterDefinition<BsonDocument>> + { + Filter.Eq("_id", job.Id), + Filter.Eq("tenantId", job.TenantId) + }; + + if (!string.IsNullOrEmpty(expectedLeaseOwner)) + { + filters.Add(Filter.Eq("leaseOwner", expectedLeaseOwner)); + } + + var filter = Filter.And(filters); + var document = PolicyRunJobDocumentMapper.ToBsonDocument(job); + var options = new ReplaceOptions { IsUpsert = false }; + + ReplaceOneResult result; + if (session is null) + { + result = await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + result = await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + + return result.MatchedCount > 0; + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/RunQueryOptions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/RunQueryOptions.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/RunQueryOptions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/RunQueryOptions.cs index 6ffe0ca1..dc36a743 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/RunQueryOptions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/RunQueryOptions.cs @@ -1,35 +1,35 @@ -using System.Collections.Immutable; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -/// <summary> -/// Filters applied when listing scheduler runs. -/// </summary> -public sealed class RunQueryOptions -{ - /// <summary> - /// Optional schedule identifier to scope the list. - /// </summary> - public string? ScheduleId { get; init; } - - /// <summary> - /// Optional set of run states to include. When empty all states are returned. - /// </summary> - public ImmutableArray<RunState> States { get; init; } = ImmutableArray<RunState>.Empty; - - /// <summary> - /// Optional lower bound for creation timestamp (UTC). - /// </summary> - public DateTimeOffset? CreatedAfter { get; init; } - - /// <summary> - /// Maximum number of runs to return (default 50 when unspecified). - /// </summary> - public int? Limit { get; init; } - - /// <summary> - /// Sort order flag. Defaults to descending by createdAt. - /// </summary> - public bool SortAscending { get; init; } -} +using System.Collections.Immutable; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +/// <summary> +/// Filters applied when listing scheduler runs. +/// </summary> +public sealed class RunQueryOptions +{ + /// <summary> + /// Optional schedule identifier to scope the list. + /// </summary> + public string? ScheduleId { get; init; } + + /// <summary> + /// Optional set of run states to include. When empty all states are returned. + /// </summary> + public ImmutableArray<RunState> States { get; init; } = ImmutableArray<RunState>.Empty; + + /// <summary> + /// Optional lower bound for creation timestamp (UTC). + /// </summary> + public DateTimeOffset? CreatedAfter { get; init; } + + /// <summary> + /// Maximum number of runs to return (default 50 when unspecified). + /// </summary> + public int? Limit { get; init; } + + /// <summary> + /// Sort order flag. Defaults to descending by createdAt. + /// </summary> + public bool SortAscending { get; init; } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/RunRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/RunRepository.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/RunRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/RunRepository.cs index cc9e7224..8cf7189f 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/RunRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/RunRepository.cs @@ -1,176 +1,176 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Internal; -using StellaOps.Scheduler.Storage.Mongo.Serialization; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -internal sealed class RunRepository : IRunRepository -{ - private const int DefaultListLimit = 50; - - private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; - private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort; - - private readonly IMongoCollection<BsonDocument> _collection; - - public RunRepository(SchedulerMongoContext context) - { - if (context is null) - { - throw new ArgumentNullException(nameof(context)); - } - - _collection = context.Database.GetCollection<BsonDocument>(context.Options.RunsCollection); - } - - public async Task InsertAsync( - Run run, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(run); - - var document = RunDocumentMapper.ToBsonDocument(run); - if (session is null) - { - await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); - } - else - { - await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); - } - } - - public async Task<bool> UpdateAsync( - Run run, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(run); - - var document = RunDocumentMapper.ToBsonDocument(run); - var filter = Filter.And( - Filter.Eq("_id", run.Id), - Filter.Eq("tenantId", run.TenantId)); - - var options = new ReplaceOptions { IsUpsert = false }; - ReplaceOneResult result; - if (session is null) - { - result = await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); - } - else - { - result = await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); - } - - return result.MatchedCount > 0; - } - - public async Task<Run?> GetAsync( - string tenantId, - string runId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - if (string.IsNullOrWhiteSpace(runId)) - { - throw new ArgumentException("Run id must be provided.", nameof(runId)); - } - - var filter = Filter.And( - Filter.Eq("_id", runId), - Filter.Eq("tenantId", tenantId)); - - var query = session is null - ? _collection.Find(filter) - : _collection.Find(session, filter); - - var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document is null ? null : RunDocumentMapper.FromBsonDocument(document); - } - - public async Task<IReadOnlyList<Run>> ListAsync( - string tenantId, - RunQueryOptions? options = null, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - options ??= new RunQueryOptions(); - var filters = new List<FilterDefinition<BsonDocument>> - { - Filter.Eq("tenantId", tenantId) - }; - - if (!string.IsNullOrWhiteSpace(options.ScheduleId)) - { - filters.Add(Filter.Eq("scheduleId", options.ScheduleId)); - } - - if (options.States.Length > 0) - { - filters.Add(Filter.In("state", options.States.Select(state => state.ToString().ToLowerInvariant()))); - } - - if (options.CreatedAfter is { } createdAfter) - { - filters.Add(Filter.Gt("createdAt", createdAfter.ToUniversalTime().UtcDateTime)); - } - - var combined = Filter.And(filters); - - var find = session is null - ? _collection.Find(combined) - : _collection.Find(session, combined); - - var limit = options.Limit is { } specified && specified > 0 ? specified : DefaultListLimit; - find = find.Limit(limit); - - var sortDefinition = options.SortAscending - ? Sort.Ascending("createdAt") - : Sort.Descending("createdAt"); - - find = find.Sort(sortDefinition); - - var documents = await find.ToListAsync(cancellationToken).ConfigureAwait(false); - return documents.Select(RunDocumentMapper.FromBsonDocument).ToArray(); - } - - public async Task<IReadOnlyList<Run>> ListByStateAsync( - RunState state, - int limit = 50, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (limit <= 0) - { - throw new ArgumentOutOfRangeException(nameof(limit), limit, "Limit must be greater than zero."); - } - - var filter = Filter.Eq("state", state.ToString().ToLowerInvariant()); - var find = session is null - ? _collection.Find(filter) - : _collection.Find(session, filter); - - find = find.Sort(Sort.Ascending("createdAt")); - find = find.Limit(limit); - - var documents = await find.ToListAsync(cancellationToken).ConfigureAwait(false); - return documents.Select(RunDocumentMapper.FromBsonDocument).ToArray(); - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Internal; +using StellaOps.Scheduler.Storage.Mongo.Serialization; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +internal sealed class RunRepository : IRunRepository +{ + private const int DefaultListLimit = 50; + + private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; + private static readonly SortDefinitionBuilder<BsonDocument> Sort = Builders<BsonDocument>.Sort; + + private readonly IMongoCollection<BsonDocument> _collection; + + public RunRepository(SchedulerMongoContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + _collection = context.Database.GetCollection<BsonDocument>(context.Options.RunsCollection); + } + + public async Task InsertAsync( + Run run, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(run); + + var document = RunDocumentMapper.ToBsonDocument(run); + if (session is null) + { + await _collection.InsertOneAsync(document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.InsertOneAsync(session, document, cancellationToken: cancellationToken).ConfigureAwait(false); + } + } + + public async Task<bool> UpdateAsync( + Run run, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(run); + + var document = RunDocumentMapper.ToBsonDocument(run); + var filter = Filter.And( + Filter.Eq("_id", run.Id), + Filter.Eq("tenantId", run.TenantId)); + + var options = new ReplaceOptions { IsUpsert = false }; + ReplaceOneResult result; + if (session is null) + { + result = await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + result = await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + + return result.MatchedCount > 0; + } + + public async Task<Run?> GetAsync( + string tenantId, + string runId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + if (string.IsNullOrWhiteSpace(runId)) + { + throw new ArgumentException("Run id must be provided.", nameof(runId)); + } + + var filter = Filter.And( + Filter.Eq("_id", runId), + Filter.Eq("tenantId", tenantId)); + + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : RunDocumentMapper.FromBsonDocument(document); + } + + public async Task<IReadOnlyList<Run>> ListAsync( + string tenantId, + RunQueryOptions? options = null, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + options ??= new RunQueryOptions(); + var filters = new List<FilterDefinition<BsonDocument>> + { + Filter.Eq("tenantId", tenantId) + }; + + if (!string.IsNullOrWhiteSpace(options.ScheduleId)) + { + filters.Add(Filter.Eq("scheduleId", options.ScheduleId)); + } + + if (options.States.Length > 0) + { + filters.Add(Filter.In("state", options.States.Select(state => state.ToString().ToLowerInvariant()))); + } + + if (options.CreatedAfter is { } createdAfter) + { + filters.Add(Filter.Gt("createdAt", createdAfter.ToUniversalTime().UtcDateTime)); + } + + var combined = Filter.And(filters); + + var find = session is null + ? _collection.Find(combined) + : _collection.Find(session, combined); + + var limit = options.Limit is { } specified && specified > 0 ? specified : DefaultListLimit; + find = find.Limit(limit); + + var sortDefinition = options.SortAscending + ? Sort.Ascending("createdAt") + : Sort.Descending("createdAt"); + + find = find.Sort(sortDefinition); + + var documents = await find.ToListAsync(cancellationToken).ConfigureAwait(false); + return documents.Select(RunDocumentMapper.FromBsonDocument).ToArray(); + } + + public async Task<IReadOnlyList<Run>> ListByStateAsync( + RunState state, + int limit = 50, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (limit <= 0) + { + throw new ArgumentOutOfRangeException(nameof(limit), limit, "Limit must be greater than zero."); + } + + var filter = Filter.Eq("state", state.ToString().ToLowerInvariant()); + var find = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + find = find.Sort(Sort.Ascending("createdAt")); + find = find.Limit(limit); + + var documents = await find.ToListAsync(cancellationToken).ConfigureAwait(false); + return documents.Select(RunDocumentMapper.FromBsonDocument).ToArray(); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/RunSummaryRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/RunSummaryRepository.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/RunSummaryRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/RunSummaryRepository.cs index 324de7b1..ab7bae4a 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/RunSummaryRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/RunSummaryRepository.cs @@ -1,79 +1,79 @@ -using MongoDB.Driver; -using StellaOps.Scheduler.Storage.Mongo.Documents; -using StellaOps.Scheduler.Storage.Mongo.Internal; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -internal sealed class RunSummaryRepository : IRunSummaryRepository -{ - private readonly IMongoCollection<RunSummaryDocument> _collection; - - public RunSummaryRepository(SchedulerMongoContext context) - { - ArgumentNullException.ThrowIfNull(context); - _collection = context.Database.GetCollection<RunSummaryDocument>(context.Options.RunSummariesCollection); - } - - public async Task<RunSummaryDocument?> GetAsync( - string tenantId, - string scheduleId, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - if (string.IsNullOrWhiteSpace(scheduleId)) - { - throw new ArgumentException("Schedule id must be provided.", nameof(scheduleId)); - } - - var filter = Builders<RunSummaryDocument>.Filter.Eq(document => document.Id, CreateDocumentId(tenantId, scheduleId)); - var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document; - } - - public async Task<IReadOnlyList<RunSummaryDocument>> ListAsync( - string tenantId, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - var filter = Builders<RunSummaryDocument>.Filter.Eq(document => document.TenantId, tenantId); - var sort = Builders<RunSummaryDocument>.Sort.Descending(document => document.UpdatedAt); - var documents = await _collection.Find(filter).Sort(sort).ToListAsync(cancellationToken).ConfigureAwait(false); - return documents; - } - - public Task UpsertAsync( - RunSummaryDocument document, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(document); - if (string.IsNullOrWhiteSpace(document.TenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(document.TenantId)); - } - - if (string.IsNullOrWhiteSpace(document.ScheduleId)) - { - throw new ArgumentException("Schedule id must be provided.", nameof(document.ScheduleId)); - } - - document.Id = CreateDocumentId(document.TenantId, document.ScheduleId); - var filter = Builders<RunSummaryDocument>.Filter.Eq(x => x.Id, document.Id); - return _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken); - } - - private static string CreateDocumentId(string tenantId, string scheduleId) - => string.Create(tenantId.Length + scheduleId.Length + 1, (tenantId, scheduleId), static (span, value) => - { - value.tenantId.AsSpan().CopyTo(span); - span[value.tenantId.Length] = ':'; - value.scheduleId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); - }); -} +using MongoDB.Driver; +using StellaOps.Scheduler.Storage.Mongo.Documents; +using StellaOps.Scheduler.Storage.Mongo.Internal; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +internal sealed class RunSummaryRepository : IRunSummaryRepository +{ + private readonly IMongoCollection<RunSummaryDocument> _collection; + + public RunSummaryRepository(SchedulerMongoContext context) + { + ArgumentNullException.ThrowIfNull(context); + _collection = context.Database.GetCollection<RunSummaryDocument>(context.Options.RunSummariesCollection); + } + + public async Task<RunSummaryDocument?> GetAsync( + string tenantId, + string scheduleId, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + if (string.IsNullOrWhiteSpace(scheduleId)) + { + throw new ArgumentException("Schedule id must be provided.", nameof(scheduleId)); + } + + var filter = Builders<RunSummaryDocument>.Filter.Eq(document => document.Id, CreateDocumentId(tenantId, scheduleId)); + var document = await _collection.Find(filter).FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document; + } + + public async Task<IReadOnlyList<RunSummaryDocument>> ListAsync( + string tenantId, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + var filter = Builders<RunSummaryDocument>.Filter.Eq(document => document.TenantId, tenantId); + var sort = Builders<RunSummaryDocument>.Sort.Descending(document => document.UpdatedAt); + var documents = await _collection.Find(filter).Sort(sort).ToListAsync(cancellationToken).ConfigureAwait(false); + return documents; + } + + public Task UpsertAsync( + RunSummaryDocument document, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(document); + if (string.IsNullOrWhiteSpace(document.TenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(document.TenantId)); + } + + if (string.IsNullOrWhiteSpace(document.ScheduleId)) + { + throw new ArgumentException("Schedule id must be provided.", nameof(document.ScheduleId)); + } + + document.Id = CreateDocumentId(document.TenantId, document.ScheduleId); + var filter = Builders<RunSummaryDocument>.Filter.Eq(x => x.Id, document.Id); + return _collection.ReplaceOneAsync(filter, document, new ReplaceOptions { IsUpsert = true }, cancellationToken); + } + + private static string CreateDocumentId(string tenantId, string scheduleId) + => string.Create(tenantId.Length + scheduleId.Length + 1, (tenantId, scheduleId), static (span, value) => + { + value.tenantId.AsSpan().CopyTo(span); + span[value.tenantId.Length] = ':'; + value.scheduleId.AsSpan().CopyTo(span[(value.tenantId.Length + 1)..]); + }); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleQueryOptions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleQueryOptions.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleQueryOptions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleQueryOptions.cs index d4f31aa8..54ad4a28 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleQueryOptions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleQueryOptions.cs @@ -1,22 +1,22 @@ -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -/// <summary> -/// Filters applied when listing scheduler schedules. -/// </summary> -public sealed class ScheduleQueryOptions -{ - /// <summary> - /// When true, returns disabled schedules; otherwise disabled entries are excluded. - /// </summary> - public bool IncludeDisabled { get; init; } - - /// <summary> - /// When true, includes soft-deleted schedules; by default deleted entries are excluded. - /// </summary> - public bool IncludeDeleted { get; init; } - - /// <summary> - /// Optional maximum number of schedules to return. - /// </summary> - public int? Limit { get; init; } -} +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +/// <summary> +/// Filters applied when listing scheduler schedules. +/// </summary> +public sealed class ScheduleQueryOptions +{ + /// <summary> + /// When true, returns disabled schedules; otherwise disabled entries are excluded. + /// </summary> + public bool IncludeDisabled { get; init; } + + /// <summary> + /// When true, includes soft-deleted schedules; by default deleted entries are excluded. + /// </summary> + public bool IncludeDeleted { get; init; } + + /// <summary> + /// Optional maximum number of schedules to return. + /// </summary> + public int? Limit { get; init; } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleRepository.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleRepository.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleRepository.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleRepository.cs index af3b043a..59c84a10 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleRepository.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Repositories/ScheduleRepository.cs @@ -1,180 +1,180 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Internal; -using StellaOps.Scheduler.Storage.Mongo.Serialization; - -namespace StellaOps.Scheduler.Storage.Mongo.Repositories; - -internal sealed class ScheduleRepository : IScheduleRepository -{ - private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; - private static readonly UpdateDefinitionBuilder<BsonDocument> Update = Builders<BsonDocument>.Update; - - private readonly IMongoCollection<BsonDocument> _collection; - - public ScheduleRepository(SchedulerMongoContext context) - { - if (context is null) - { - throw new ArgumentNullException(nameof(context)); - } - - _collection = context.Database.GetCollection<BsonDocument>(context.Options.SchedulesCollection); - } - - public async Task UpsertAsync( - Schedule schedule, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(schedule); - - var document = ScheduleDocumentMapper.ToBsonDocument(schedule); - document.Remove("deletedAt"); - document.Remove("deletedBy"); - - var filter = Filter.And( - Filter.Eq("_id", schedule.Id), - Filter.Eq("tenantId", schedule.TenantId)); - - var options = new ReplaceOptions { IsUpsert = true }; - - if (session is null) - { - await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); - } - else - { - await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); - } - } - - public async Task<Schedule?> GetAsync( - string tenantId, - string scheduleId, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - if (string.IsNullOrWhiteSpace(scheduleId)) - { - throw new ArgumentException("Schedule id must be provided.", nameof(scheduleId)); - } - - var filter = Filter.And( - Filter.Eq("_id", scheduleId), - Filter.Eq("tenantId", tenantId), - Filter.Or( - Filter.Exists("deletedAt", false), - Filter.Eq("deletedAt", BsonNull.Value))); - - var query = session is null - ? _collection.Find(filter) - : _collection.Find(session, filter); - - var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); - return document is null ? null : ScheduleDocumentMapper.FromBsonDocument(document); - } - - public async Task<IReadOnlyList<Schedule>> ListAsync( - string tenantId, - ScheduleQueryOptions? options = null, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - options ??= new ScheduleQueryOptions(); - - var filters = new List<FilterDefinition<BsonDocument>> - { - Filter.Eq("tenantId", tenantId) - }; - - if (!options.IncludeDeleted) - { - filters.Add(Filter.Or( - Filter.Exists("deletedAt", false), - Filter.Eq("deletedAt", BsonNull.Value))); - } - - if (!options.IncludeDisabled) - { - filters.Add(Filter.Eq("enabled", true)); - } - - var combined = Filter.And(filters); - - var find = session is null - ? _collection.Find(combined) - : _collection.Find(session, combined); - - if (options.Limit is { } limit && limit > 0) - { - find = find.Limit(limit); - } - - var documents = await find.Sort(Builders<BsonDocument>.Sort.Ascending("name")) - .ToListAsync(cancellationToken) - .ConfigureAwait(false); - - return documents.Select(ScheduleDocumentMapper.FromBsonDocument).ToArray(); - } - - public async Task<bool> SoftDeleteAsync( - string tenantId, - string scheduleId, - string deletedBy, - DateTimeOffset deletedAt, - IClientSessionHandle? session = null, - CancellationToken cancellationToken = default) - { - if (string.IsNullOrWhiteSpace(tenantId)) - { - throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); - } - - if (string.IsNullOrWhiteSpace(scheduleId)) - { - throw new ArgumentException("Schedule id must be provided.", nameof(scheduleId)); - } - if (string.IsNullOrWhiteSpace(deletedBy)) - { - throw new ArgumentException("Deleted by must be provided.", nameof(deletedBy)); - } - - var filter = Filter.And( - Filter.Eq("_id", scheduleId), - Filter.Eq("tenantId", tenantId)); - - var utc = deletedAt.ToUniversalTime(); - var update = Update - .Set("deletedAt", utc.UtcDateTime) - .Set("deletedBy", deletedBy) - .Set("enabled", false) - .Set("updatedAt", utc.UtcDateTime) - .Set("updatedBy", deletedBy); - - UpdateResult result; - if (session is null) - { - result = await _collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); - } - else - { - result = await _collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); - } - - return result.ModifiedCount > 0; - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Internal; +using StellaOps.Scheduler.Storage.Mongo.Serialization; + +namespace StellaOps.Scheduler.Storage.Mongo.Repositories; + +internal sealed class ScheduleRepository : IScheduleRepository +{ + private static readonly FilterDefinitionBuilder<BsonDocument> Filter = Builders<BsonDocument>.Filter; + private static readonly UpdateDefinitionBuilder<BsonDocument> Update = Builders<BsonDocument>.Update; + + private readonly IMongoCollection<BsonDocument> _collection; + + public ScheduleRepository(SchedulerMongoContext context) + { + if (context is null) + { + throw new ArgumentNullException(nameof(context)); + } + + _collection = context.Database.GetCollection<BsonDocument>(context.Options.SchedulesCollection); + } + + public async Task UpsertAsync( + Schedule schedule, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(schedule); + + var document = ScheduleDocumentMapper.ToBsonDocument(schedule); + document.Remove("deletedAt"); + document.Remove("deletedBy"); + + var filter = Filter.And( + Filter.Eq("_id", schedule.Id), + Filter.Eq("tenantId", schedule.TenantId)); + + var options = new ReplaceOptions { IsUpsert = true }; + + if (session is null) + { + await _collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + } + else + { + await _collection.ReplaceOneAsync(session, filter, document, options, cancellationToken).ConfigureAwait(false); + } + } + + public async Task<Schedule?> GetAsync( + string tenantId, + string scheduleId, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + if (string.IsNullOrWhiteSpace(scheduleId)) + { + throw new ArgumentException("Schedule id must be provided.", nameof(scheduleId)); + } + + var filter = Filter.And( + Filter.Eq("_id", scheduleId), + Filter.Eq("tenantId", tenantId), + Filter.Or( + Filter.Exists("deletedAt", false), + Filter.Eq("deletedAt", BsonNull.Value))); + + var query = session is null + ? _collection.Find(filter) + : _collection.Find(session, filter); + + var document = await query.FirstOrDefaultAsync(cancellationToken).ConfigureAwait(false); + return document is null ? null : ScheduleDocumentMapper.FromBsonDocument(document); + } + + public async Task<IReadOnlyList<Schedule>> ListAsync( + string tenantId, + ScheduleQueryOptions? options = null, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + options ??= new ScheduleQueryOptions(); + + var filters = new List<FilterDefinition<BsonDocument>> + { + Filter.Eq("tenantId", tenantId) + }; + + if (!options.IncludeDeleted) + { + filters.Add(Filter.Or( + Filter.Exists("deletedAt", false), + Filter.Eq("deletedAt", BsonNull.Value))); + } + + if (!options.IncludeDisabled) + { + filters.Add(Filter.Eq("enabled", true)); + } + + var combined = Filter.And(filters); + + var find = session is null + ? _collection.Find(combined) + : _collection.Find(session, combined); + + if (options.Limit is { } limit && limit > 0) + { + find = find.Limit(limit); + } + + var documents = await find.Sort(Builders<BsonDocument>.Sort.Ascending("name")) + .ToListAsync(cancellationToken) + .ConfigureAwait(false); + + return documents.Select(ScheduleDocumentMapper.FromBsonDocument).ToArray(); + } + + public async Task<bool> SoftDeleteAsync( + string tenantId, + string scheduleId, + string deletedBy, + DateTimeOffset deletedAt, + IClientSessionHandle? session = null, + CancellationToken cancellationToken = default) + { + if (string.IsNullOrWhiteSpace(tenantId)) + { + throw new ArgumentException("Tenant id must be provided.", nameof(tenantId)); + } + + if (string.IsNullOrWhiteSpace(scheduleId)) + { + throw new ArgumentException("Schedule id must be provided.", nameof(scheduleId)); + } + if (string.IsNullOrWhiteSpace(deletedBy)) + { + throw new ArgumentException("Deleted by must be provided.", nameof(deletedBy)); + } + + var filter = Filter.And( + Filter.Eq("_id", scheduleId), + Filter.Eq("tenantId", tenantId)); + + var utc = deletedAt.ToUniversalTime(); + var update = Update + .Set("deletedAt", utc.UtcDateTime) + .Set("deletedBy", deletedBy) + .Set("enabled", false) + .Set("updatedAt", utc.UtcDateTime) + .Set("updatedBy", deletedBy); + + UpdateResult result; + if (session is null) + { + result = await _collection.UpdateOneAsync(filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + else + { + result = await _collection.UpdateOneAsync(session, filter, update, cancellationToken: cancellationToken).ConfigureAwait(false); + } + + return result.ModifiedCount > 0; + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/AuditRecordDocumentMapper.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/AuditRecordDocumentMapper.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Serialization/AuditRecordDocumentMapper.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/AuditRecordDocumentMapper.cs index d7b30db9..6c3e2042 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/AuditRecordDocumentMapper.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/AuditRecordDocumentMapper.cs @@ -1,23 +1,23 @@ -using MongoDB.Bson; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Serialization; - -internal static class AuditRecordDocumentMapper -{ - public static BsonDocument ToBsonDocument(AuditRecord record) - { - ArgumentNullException.ThrowIfNull(record); - var json = CanonicalJsonSerializer.Serialize(record); - var document = BsonDocument.Parse(json); - document["_id"] = record.Id; - return document; - } - - public static AuditRecord FromBsonDocument(BsonDocument document) - { - ArgumentNullException.ThrowIfNull(document); - var node = document.ToCanonicalJsonNode(); - return CanonicalJsonSerializer.Deserialize<AuditRecord>(node.ToCanonicalJson()); - } -} +using MongoDB.Bson; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Serialization; + +internal static class AuditRecordDocumentMapper +{ + public static BsonDocument ToBsonDocument(AuditRecord record) + { + ArgumentNullException.ThrowIfNull(record); + var json = CanonicalJsonSerializer.Serialize(record); + var document = BsonDocument.Parse(json); + document["_id"] = record.Id; + return document; + } + + public static AuditRecord FromBsonDocument(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + var node = document.ToCanonicalJsonNode(); + return CanonicalJsonSerializer.Deserialize<AuditRecord>(node.ToCanonicalJson()); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs index 6fe8970a..4ee1dd2c 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/BsonDocumentJsonExtensions.cs @@ -1,144 +1,144 @@ -using System.Globalization; -using System.Text.Json; -using System.Text.Json.Nodes; -using MongoDB.Bson; -using MongoDB.Bson.IO; - -namespace StellaOps.Scheduler.Storage.Mongo.Serialization; - -internal static class BsonDocumentJsonExtensions -{ - public static JsonNode ToCanonicalJsonNode(this BsonDocument document, params string[] fieldsToRemove) - { - ArgumentNullException.ThrowIfNull(document); - - var clone = document.DeepClone().AsBsonDocument; - clone.Remove("_id"); - - if (fieldsToRemove is { Length: > 0 }) - { - foreach (var field in fieldsToRemove) - { - clone.Remove(field); - } - } - - var json = clone.ToJson(new JsonWriterSettings - { - OutputMode = JsonOutputMode.RelaxedExtendedJson, - Indent = false, - }); - - var node = JsonNode.Parse(json) ?? throw new InvalidOperationException("Unable to parse BSON document JSON."); - return NormalizeExtendedJson(node); - } - - private static JsonNode NormalizeExtendedJson(JsonNode node) - { - if (node is JsonObject obj) - { - if (TryConvertExtendedDate(obj, out var replacement)) - { - return replacement; - } - - foreach (var property in obj.ToList()) - { - if (property.Value is null) - { - continue; - } - - var normalized = NormalizeExtendedJson(property.Value); - if (!ReferenceEquals(normalized, property.Value)) - { - obj[property.Key] = normalized; - } - } - - return obj; - } - - if (node is JsonArray array) - { - for (var i = 0; i < array.Count; i++) - { - if (array[i] is null) - { - continue; - } - - var normalized = NormalizeExtendedJson(array[i]!); - if (!ReferenceEquals(normalized, array[i])) - { - array[i] = normalized; - } - } - - return array; - } - - return node; - } - - private static bool TryConvertExtendedDate(JsonObject obj, out JsonNode replacement) - { - replacement = obj; - if (obj.Count != 1 || !obj.TryGetPropertyValue("$date", out var value) || value is null) - { - return false; - } - - if (value is JsonValue directValue) - { - if (directValue.TryGetValue(out string? dateString) && TryParseIso(dateString, out var iso)) - { - replacement = JsonValue.Create(iso); - return true; - } - - if (directValue.TryGetValue(out long epochMilliseconds)) - { - replacement = JsonValue.Create(DateTimeOffset.FromUnixTimeMilliseconds(epochMilliseconds).ToString("O")); - return true; - } - } - else if (value is JsonObject nested && - nested.TryGetPropertyValue("$numberLong", out var numberNode) && - numberNode is JsonValue numberValue && - numberValue.TryGetValue(out string? numberString) && - long.TryParse(numberString, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ms)) - { - replacement = JsonValue.Create(DateTimeOffset.FromUnixTimeMilliseconds(ms).ToString("O")); - return true; - } - - return false; - } - - private static bool TryParseIso(string? value, out string iso) - { - iso = string.Empty; - if (string.IsNullOrWhiteSpace(value)) - { - return false; - } - - if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var parsed)) - { - iso = parsed.ToUniversalTime().ToString("O"); - return true; - } - - return false; - } - - public static string ToCanonicalJson(this JsonNode node) - { - ArgumentNullException.ThrowIfNull(node); - return node.ToJsonString(new JsonSerializerOptions - { - WriteIndented = false - }); - } -} +using System.Globalization; +using System.Text.Json; +using System.Text.Json.Nodes; +using MongoDB.Bson; +using MongoDB.Bson.IO; + +namespace StellaOps.Scheduler.Storage.Mongo.Serialization; + +internal static class BsonDocumentJsonExtensions +{ + public static JsonNode ToCanonicalJsonNode(this BsonDocument document, params string[] fieldsToRemove) + { + ArgumentNullException.ThrowIfNull(document); + + var clone = document.DeepClone().AsBsonDocument; + clone.Remove("_id"); + + if (fieldsToRemove is { Length: > 0 }) + { + foreach (var field in fieldsToRemove) + { + clone.Remove(field); + } + } + + var json = clone.ToJson(new JsonWriterSettings + { + OutputMode = JsonOutputMode.RelaxedExtendedJson, + Indent = false, + }); + + var node = JsonNode.Parse(json) ?? throw new InvalidOperationException("Unable to parse BSON document JSON."); + return NormalizeExtendedJson(node); + } + + private static JsonNode NormalizeExtendedJson(JsonNode node) + { + if (node is JsonObject obj) + { + if (TryConvertExtendedDate(obj, out var replacement)) + { + return replacement; + } + + foreach (var property in obj.ToList()) + { + if (property.Value is null) + { + continue; + } + + var normalized = NormalizeExtendedJson(property.Value); + if (!ReferenceEquals(normalized, property.Value)) + { + obj[property.Key] = normalized; + } + } + + return obj; + } + + if (node is JsonArray array) + { + for (var i = 0; i < array.Count; i++) + { + if (array[i] is null) + { + continue; + } + + var normalized = NormalizeExtendedJson(array[i]!); + if (!ReferenceEquals(normalized, array[i])) + { + array[i] = normalized; + } + } + + return array; + } + + return node; + } + + private static bool TryConvertExtendedDate(JsonObject obj, out JsonNode replacement) + { + replacement = obj; + if (obj.Count != 1 || !obj.TryGetPropertyValue("$date", out var value) || value is null) + { + return false; + } + + if (value is JsonValue directValue) + { + if (directValue.TryGetValue(out string? dateString) && TryParseIso(dateString, out var iso)) + { + replacement = JsonValue.Create(iso); + return true; + } + + if (directValue.TryGetValue(out long epochMilliseconds)) + { + replacement = JsonValue.Create(DateTimeOffset.FromUnixTimeMilliseconds(epochMilliseconds).ToString("O")); + return true; + } + } + else if (value is JsonObject nested && + nested.TryGetPropertyValue("$numberLong", out var numberNode) && + numberNode is JsonValue numberValue && + numberValue.TryGetValue(out string? numberString) && + long.TryParse(numberString, NumberStyles.Integer, CultureInfo.InvariantCulture, out var ms)) + { + replacement = JsonValue.Create(DateTimeOffset.FromUnixTimeMilliseconds(ms).ToString("O")); + return true; + } + + return false; + } + + private static bool TryParseIso(string? value, out string iso) + { + iso = string.Empty; + if (string.IsNullOrWhiteSpace(value)) + { + return false; + } + + if (DateTimeOffset.TryParse(value, CultureInfo.InvariantCulture, DateTimeStyles.RoundtripKind, out var parsed)) + { + iso = parsed.ToUniversalTime().ToString("O"); + return true; + } + + return false; + } + + public static string ToCanonicalJson(this JsonNode node) + { + ArgumentNullException.ThrowIfNull(node); + return node.ToJsonString(new JsonSerializerOptions + { + WriteIndented = false + }); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/GraphJobDocumentMapper.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/GraphJobDocumentMapper.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Serialization/GraphJobDocumentMapper.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/GraphJobDocumentMapper.cs index 965d56d3..63e47280 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/GraphJobDocumentMapper.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/GraphJobDocumentMapper.cs @@ -1,125 +1,125 @@ -using MongoDB.Bson; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Serialization; - -internal static class GraphJobDocumentMapper -{ - private const string PayloadField = "payload"; - - public static BsonDocument ToBsonDocument(GraphBuildJob job) - { - ArgumentNullException.ThrowIfNull(job); - - var payloadJson = CanonicalJsonSerializer.Serialize(job); - var payloadDocument = BsonDocument.Parse(payloadJson); - - var document = new BsonDocument - { - ["_id"] = job.Id, - ["tenantId"] = job.TenantId, - ["kind"] = "build", - ["status"] = job.Status.ToString().ToLowerInvariant(), - ["createdAt"] = job.CreatedAt.UtcDateTime, - ["attempts"] = job.Attempts, - [PayloadField] = payloadDocument - }; - - if (!string.IsNullOrWhiteSpace(job.GraphSnapshotId)) - { - document["graphSnapshotId"] = job.GraphSnapshotId; - } - - if (!string.IsNullOrWhiteSpace(job.CorrelationId)) - { - document["correlationId"] = job.CorrelationId; - } - - if (job.StartedAt is { } startedAt) - { - document["startedAt"] = startedAt.UtcDateTime; - } - - if (job.CompletedAt is { } completedAt) - { - document["completedAt"] = completedAt.UtcDateTime; - } - - if (!string.IsNullOrWhiteSpace(job.Error)) - { - document["error"] = job.Error; - } - - return document; - } - - public static BsonDocument ToBsonDocument(GraphOverlayJob job) - { - ArgumentNullException.ThrowIfNull(job); - - var payloadJson = CanonicalJsonSerializer.Serialize(job); - var payloadDocument = BsonDocument.Parse(payloadJson); - - var document = new BsonDocument - { - ["_id"] = job.Id, - ["tenantId"] = job.TenantId, - ["kind"] = "overlay", - ["status"] = job.Status.ToString().ToLowerInvariant(), - ["createdAt"] = job.CreatedAt.UtcDateTime, - ["attempts"] = job.Attempts, - [PayloadField] = payloadDocument - }; - - document["graphSnapshotId"] = job.GraphSnapshotId; - document["overlayKind"] = job.OverlayKind.ToString().ToLowerInvariant(); - document["overlayKey"] = job.OverlayKey; - - if (!string.IsNullOrWhiteSpace(job.BuildJobId)) - { - document["buildJobId"] = job.BuildJobId; - } - - if (!string.IsNullOrWhiteSpace(job.CorrelationId)) - { - document["correlationId"] = job.CorrelationId; - } - - if (job.StartedAt is { } startedAt) - { - document["startedAt"] = startedAt.UtcDateTime; - } - - if (job.CompletedAt is { } completedAt) - { - document["completedAt"] = completedAt.UtcDateTime; - } - - if (!string.IsNullOrWhiteSpace(job.Error)) - { - document["error"] = job.Error; - } - - return document; - } - - public static GraphBuildJob ToGraphBuildJob(BsonDocument document) - { - ArgumentNullException.ThrowIfNull(document); - - var payloadDocument = document[PayloadField].AsBsonDocument; - var json = payloadDocument.ToJson(); - var job = CanonicalJsonSerializer.Deserialize<GraphBuildJob>(json); - return job; - } - - public static GraphOverlayJob ToGraphOverlayJob(BsonDocument document) - { - ArgumentNullException.ThrowIfNull(document); - - var payloadDocument = document[PayloadField].AsBsonDocument; - var json = payloadDocument.ToJson(); - var job = CanonicalJsonSerializer.Deserialize<GraphOverlayJob>(json); - return job; - } -} +using MongoDB.Bson; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Serialization; + +internal static class GraphJobDocumentMapper +{ + private const string PayloadField = "payload"; + + public static BsonDocument ToBsonDocument(GraphBuildJob job) + { + ArgumentNullException.ThrowIfNull(job); + + var payloadJson = CanonicalJsonSerializer.Serialize(job); + var payloadDocument = BsonDocument.Parse(payloadJson); + + var document = new BsonDocument + { + ["_id"] = job.Id, + ["tenantId"] = job.TenantId, + ["kind"] = "build", + ["status"] = job.Status.ToString().ToLowerInvariant(), + ["createdAt"] = job.CreatedAt.UtcDateTime, + ["attempts"] = job.Attempts, + [PayloadField] = payloadDocument + }; + + if (!string.IsNullOrWhiteSpace(job.GraphSnapshotId)) + { + document["graphSnapshotId"] = job.GraphSnapshotId; + } + + if (!string.IsNullOrWhiteSpace(job.CorrelationId)) + { + document["correlationId"] = job.CorrelationId; + } + + if (job.StartedAt is { } startedAt) + { + document["startedAt"] = startedAt.UtcDateTime; + } + + if (job.CompletedAt is { } completedAt) + { + document["completedAt"] = completedAt.UtcDateTime; + } + + if (!string.IsNullOrWhiteSpace(job.Error)) + { + document["error"] = job.Error; + } + + return document; + } + + public static BsonDocument ToBsonDocument(GraphOverlayJob job) + { + ArgumentNullException.ThrowIfNull(job); + + var payloadJson = CanonicalJsonSerializer.Serialize(job); + var payloadDocument = BsonDocument.Parse(payloadJson); + + var document = new BsonDocument + { + ["_id"] = job.Id, + ["tenantId"] = job.TenantId, + ["kind"] = "overlay", + ["status"] = job.Status.ToString().ToLowerInvariant(), + ["createdAt"] = job.CreatedAt.UtcDateTime, + ["attempts"] = job.Attempts, + [PayloadField] = payloadDocument + }; + + document["graphSnapshotId"] = job.GraphSnapshotId; + document["overlayKind"] = job.OverlayKind.ToString().ToLowerInvariant(); + document["overlayKey"] = job.OverlayKey; + + if (!string.IsNullOrWhiteSpace(job.BuildJobId)) + { + document["buildJobId"] = job.BuildJobId; + } + + if (!string.IsNullOrWhiteSpace(job.CorrelationId)) + { + document["correlationId"] = job.CorrelationId; + } + + if (job.StartedAt is { } startedAt) + { + document["startedAt"] = startedAt.UtcDateTime; + } + + if (job.CompletedAt is { } completedAt) + { + document["completedAt"] = completedAt.UtcDateTime; + } + + if (!string.IsNullOrWhiteSpace(job.Error)) + { + document["error"] = job.Error; + } + + return document; + } + + public static GraphBuildJob ToGraphBuildJob(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + var payloadDocument = document[PayloadField].AsBsonDocument; + var json = payloadDocument.ToJson(); + var job = CanonicalJsonSerializer.Deserialize<GraphBuildJob>(json); + return job; + } + + public static GraphOverlayJob ToGraphOverlayJob(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + + var payloadDocument = document[PayloadField].AsBsonDocument; + var json = payloadDocument.ToJson(); + var job = CanonicalJsonSerializer.Deserialize<GraphOverlayJob>(json); + return job; + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/ImpactSetDocumentMapper.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/ImpactSetDocumentMapper.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Serialization/ImpactSetDocumentMapper.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/ImpactSetDocumentMapper.cs index 3586bfc2..644e798b 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/ImpactSetDocumentMapper.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/ImpactSetDocumentMapper.cs @@ -1,57 +1,57 @@ -using System; -using System.Security.Cryptography; -using System.Text; -using MongoDB.Bson; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Serialization; - -internal static class ImpactSetDocumentMapper -{ - private const string SelectorHashPrefix = "selector::"; - - public static BsonDocument ToBsonDocument(ImpactSet impactSet) - { - ArgumentNullException.ThrowIfNull(impactSet); - - var json = CanonicalJsonSerializer.Serialize(impactSet); - var document = BsonDocument.Parse(json); - document["_id"] = ComputeDocumentId(impactSet); - document["selectorDigest"] = ComputeSelectorDigest(impactSet); - return document; - } - - public static ImpactSet FromBsonDocument(BsonDocument document) - { - ArgumentNullException.ThrowIfNull(document); - var node = document.ToCanonicalJsonNode(); - return CanonicalJsonSerializer.Deserialize<ImpactSet>(node.ToCanonicalJson()); - } - - private static string ComputeDocumentId(ImpactSet impactSet) - { - if (!string.IsNullOrWhiteSpace(impactSet.SnapshotId)) - { - return impactSet.SnapshotId!; - } - - var selectorJson = CanonicalJsonSerializer.Serialize(impactSet.Selector); - using var sha256 = SHA256.Create(); - var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(selectorJson)); - return SelectorHashPrefix + Convert.ToHexString(hash).ToLowerInvariant(); - } - - private static string ComputeSelectorDigest(ImpactSet impactSet) - { - return ComputeSelectorDigest(impactSet.Selector); - } - - public static string ComputeSelectorDigest(Selector selector) - { - ArgumentNullException.ThrowIfNull(selector); - var selectorJson = CanonicalJsonSerializer.Serialize(selector); - using var sha256 = SHA256.Create(); - var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(selectorJson)); - return Convert.ToHexString(hash).ToLowerInvariant(); - } -} +using System; +using System.Security.Cryptography; +using System.Text; +using MongoDB.Bson; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Serialization; + +internal static class ImpactSetDocumentMapper +{ + private const string SelectorHashPrefix = "selector::"; + + public static BsonDocument ToBsonDocument(ImpactSet impactSet) + { + ArgumentNullException.ThrowIfNull(impactSet); + + var json = CanonicalJsonSerializer.Serialize(impactSet); + var document = BsonDocument.Parse(json); + document["_id"] = ComputeDocumentId(impactSet); + document["selectorDigest"] = ComputeSelectorDigest(impactSet); + return document; + } + + public static ImpactSet FromBsonDocument(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + var node = document.ToCanonicalJsonNode(); + return CanonicalJsonSerializer.Deserialize<ImpactSet>(node.ToCanonicalJson()); + } + + private static string ComputeDocumentId(ImpactSet impactSet) + { + if (!string.IsNullOrWhiteSpace(impactSet.SnapshotId)) + { + return impactSet.SnapshotId!; + } + + var selectorJson = CanonicalJsonSerializer.Serialize(impactSet.Selector); + using var sha256 = SHA256.Create(); + var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(selectorJson)); + return SelectorHashPrefix + Convert.ToHexString(hash).ToLowerInvariant(); + } + + private static string ComputeSelectorDigest(ImpactSet impactSet) + { + return ComputeSelectorDigest(impactSet.Selector); + } + + public static string ComputeSelectorDigest(Selector selector) + { + ArgumentNullException.ThrowIfNull(selector); + var selectorJson = CanonicalJsonSerializer.Serialize(selector); + using var sha256 = SHA256.Create(); + var hash = sha256.ComputeHash(Encoding.UTF8.GetBytes(selectorJson)); + return Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/PolicyRunJobDocumentMapper.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/PolicyRunJobDocumentMapper.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Serialization/PolicyRunJobDocumentMapper.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/PolicyRunJobDocumentMapper.cs index ee81abb0..1bde8813 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/PolicyRunJobDocumentMapper.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/PolicyRunJobDocumentMapper.cs @@ -1,23 +1,23 @@ -using MongoDB.Bson; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Serialization; - -internal static class PolicyRunJobDocumentMapper -{ - public static BsonDocument ToBsonDocument(PolicyRunJob job) - { - ArgumentNullException.ThrowIfNull(job); - var json = CanonicalJsonSerializer.Serialize(job); - var document = BsonDocument.Parse(json); - document["_id"] = job.Id; - return document; - } - - public static PolicyRunJob FromBsonDocument(BsonDocument document) - { - ArgumentNullException.ThrowIfNull(document); - var node = document.ToCanonicalJsonNode(); - return CanonicalJsonSerializer.Deserialize<PolicyRunJob>(node.ToCanonicalJson()); - } -} +using MongoDB.Bson; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Serialization; + +internal static class PolicyRunJobDocumentMapper +{ + public static BsonDocument ToBsonDocument(PolicyRunJob job) + { + ArgumentNullException.ThrowIfNull(job); + var json = CanonicalJsonSerializer.Serialize(job); + var document = BsonDocument.Parse(json); + document["_id"] = job.Id; + return document; + } + + public static PolicyRunJob FromBsonDocument(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + var node = document.ToCanonicalJsonNode(); + return CanonicalJsonSerializer.Deserialize<PolicyRunJob>(node.ToCanonicalJson()); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/RunDocumentMapper.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/RunDocumentMapper.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Serialization/RunDocumentMapper.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/RunDocumentMapper.cs index 393d2524..8ee7e2e7 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/RunDocumentMapper.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/RunDocumentMapper.cs @@ -1,23 +1,23 @@ -using MongoDB.Bson; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Serialization; - -internal static class RunDocumentMapper -{ - public static BsonDocument ToBsonDocument(Run run) - { - ArgumentNullException.ThrowIfNull(run); - var json = CanonicalJsonSerializer.Serialize(run); - var document = BsonDocument.Parse(json); - document["_id"] = run.Id; - return document; - } - - public static Run FromBsonDocument(BsonDocument document) - { - ArgumentNullException.ThrowIfNull(document); - var node = document.ToCanonicalJsonNode(); - return CanonicalJsonSerializer.Deserialize<Run>(node.ToCanonicalJson()); - } -} +using MongoDB.Bson; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Serialization; + +internal static class RunDocumentMapper +{ + public static BsonDocument ToBsonDocument(Run run) + { + ArgumentNullException.ThrowIfNull(run); + var json = CanonicalJsonSerializer.Serialize(run); + var document = BsonDocument.Parse(json); + document["_id"] = run.Id; + return document; + } + + public static Run FromBsonDocument(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + var node = document.ToCanonicalJsonNode(); + return CanonicalJsonSerializer.Deserialize<Run>(node.ToCanonicalJson()); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/ScheduleDocumentMapper.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/ScheduleDocumentMapper.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Serialization/ScheduleDocumentMapper.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/ScheduleDocumentMapper.cs index 35f98313..aa156370 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Serialization/ScheduleDocumentMapper.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Serialization/ScheduleDocumentMapper.cs @@ -1,25 +1,25 @@ -using MongoDB.Bson; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Serialization; - -internal static class ScheduleDocumentMapper -{ - private static readonly string[] IgnoredFields = { "deletedAt", "deletedBy" }; - - public static BsonDocument ToBsonDocument(Schedule schedule) - { - ArgumentNullException.ThrowIfNull(schedule); - var json = CanonicalJsonSerializer.Serialize(schedule); - var document = BsonDocument.Parse(json); - document["_id"] = schedule.Id; - return document; - } - - public static Schedule FromBsonDocument(BsonDocument document) - { - ArgumentNullException.ThrowIfNull(document); - var node = document.ToCanonicalJsonNode(IgnoredFields); - return CanonicalJsonSerializer.Deserialize<Schedule>(node.ToCanonicalJson()); - } -} +using MongoDB.Bson; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Serialization; + +internal static class ScheduleDocumentMapper +{ + private static readonly string[] IgnoredFields = { "deletedAt", "deletedBy" }; + + public static BsonDocument ToBsonDocument(Schedule schedule) + { + ArgumentNullException.ThrowIfNull(schedule); + var json = CanonicalJsonSerializer.Serialize(schedule); + var document = BsonDocument.Parse(json); + document["_id"] = schedule.Id; + return document; + } + + public static Schedule FromBsonDocument(BsonDocument document) + { + ArgumentNullException.ThrowIfNull(document); + var node = document.ToCanonicalJsonNode(IgnoredFields); + return CanonicalJsonSerializer.Deserialize<Schedule>(node.ToCanonicalJson()); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/ServiceCollectionExtensions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/ServiceCollectionExtensions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Services/IRunSummaryService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/IRunSummaryService.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Services/IRunSummaryService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/IRunSummaryService.cs index de10d16e..2a068a7c 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Services/IRunSummaryService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/IRunSummaryService.cs @@ -1,20 +1,20 @@ -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Projections; - -namespace StellaOps.Scheduler.Storage.Mongo.Services; - -public interface IRunSummaryService -{ - Task<RunSummaryProjection> ProjectAsync( - Run run, - CancellationToken cancellationToken = default); - - Task<RunSummaryProjection?> GetAsync( - string tenantId, - string scheduleId, - CancellationToken cancellationToken = default); - - Task<IReadOnlyList<RunSummaryProjection>> ListAsync( - string tenantId, - CancellationToken cancellationToken = default); -} +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Projections; + +namespace StellaOps.Scheduler.Storage.Mongo.Services; + +public interface IRunSummaryService +{ + Task<RunSummaryProjection> ProjectAsync( + Run run, + CancellationToken cancellationToken = default); + + Task<RunSummaryProjection?> GetAsync( + string tenantId, + string scheduleId, + CancellationToken cancellationToken = default); + + Task<IReadOnlyList<RunSummaryProjection>> ListAsync( + string tenantId, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Services/ISchedulerAuditService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/ISchedulerAuditService.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Services/ISchedulerAuditService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/ISchedulerAuditService.cs index 1943108b..3b0c98c7 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Services/ISchedulerAuditService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/ISchedulerAuditService.cs @@ -1,10 +1,10 @@ -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Services; - -public interface ISchedulerAuditService -{ - Task<AuditRecord> WriteAsync( - SchedulerAuditEvent auditEvent, - CancellationToken cancellationToken = default); -} +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Services; + +public interface ISchedulerAuditService +{ + Task<AuditRecord> WriteAsync( + SchedulerAuditEvent auditEvent, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Services/RunSummaryService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/RunSummaryService.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Services/RunSummaryService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/RunSummaryService.cs index a14dab43..acc9ffe7 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Services/RunSummaryService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/RunSummaryService.cs @@ -1,204 +1,204 @@ -using System.Collections.Immutable; -using Microsoft.Extensions.Logging; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Documents; -using StellaOps.Scheduler.Storage.Mongo.Projections; -using StellaOps.Scheduler.Storage.Mongo.Repositories; - -namespace StellaOps.Scheduler.Storage.Mongo.Services; - -internal sealed class RunSummaryService : IRunSummaryService -{ - private const int RecentLimit = 20; - - private readonly IRunSummaryRepository _repository; - private readonly TimeProvider _timeProvider; - private readonly ILogger<RunSummaryService> _logger; - - public RunSummaryService( - IRunSummaryRepository repository, - TimeProvider? timeProvider, - ILogger<RunSummaryService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _timeProvider = timeProvider ?? TimeProvider.System; - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<RunSummaryProjection> ProjectAsync( - Run run, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(run); - if (string.IsNullOrWhiteSpace(run.ScheduleId)) - { - throw new ArgumentException("Run must contain a scheduleId to project summary data.", nameof(run)); - } - - var document = await _repository - .GetAsync(run.TenantId, run.ScheduleId!, cancellationToken) - .ConfigureAwait(false) - ?? new RunSummaryDocument - { - TenantId = run.TenantId, - ScheduleId = run.ScheduleId!, - }; - - UpdateDocument(document, run); - document.UpdatedAt = _timeProvider.GetUtcNow().UtcDateTime; - - await _repository.UpsertAsync(document, cancellationToken).ConfigureAwait(false); - _logger.LogDebug( - "Projected run summary for tenant {TenantId} schedule {ScheduleId} using run {RunId}.", - run.TenantId, - run.ScheduleId, - run.Id); - return ToProjection(document); - } - - public async Task<RunSummaryProjection?> GetAsync( - string tenantId, - string scheduleId, - CancellationToken cancellationToken = default) - { - var document = await _repository - .GetAsync(tenantId, scheduleId, cancellationToken) - .ConfigureAwait(false); - - return document is null ? null : ToProjection(document); - } - - public async Task<IReadOnlyList<RunSummaryProjection>> ListAsync( - string tenantId, - CancellationToken cancellationToken = default) - { - var documents = await _repository.ListAsync(tenantId, cancellationToken).ConfigureAwait(false); - return documents.Select(ToProjection).ToArray(); - } - - private static void UpdateDocument(RunSummaryDocument document, Run run) - { - var entry = document.Recent.FirstOrDefault(item => string.Equals(item.RunId, run.Id, StringComparison.Ordinal)); - if (entry is null) - { - entry = new RunSummaryEntryDocument - { - RunId = run.Id, - }; - document.Recent.Add(entry); - } - - entry.Trigger = run.Trigger; - entry.State = run.State; - entry.CreatedAt = run.CreatedAt.UtcDateTime; - entry.StartedAt = run.StartedAt?.UtcDateTime; - entry.FinishedAt = run.FinishedAt?.UtcDateTime; - entry.Error = run.Error; - entry.Stats = run.Stats; - - document.Recent = document.Recent - .OrderByDescending(item => item.CreatedAt) - .ThenByDescending(item => item.RunId, StringComparer.Ordinal) - .Take(RecentLimit) - .ToList(); - - document.LastRun = document.Recent.FirstOrDefault(); - document.Counters = ComputeCounters(document.Recent); - } - - private static RunSummaryCountersDocument ComputeCounters(IEnumerable<RunSummaryEntryDocument> entries) - { - var counters = new RunSummaryCountersDocument(); - - foreach (var entry in entries) - { - counters.Total++; - switch (entry.State) - { - case RunState.Planning: - counters.Planning++; - break; - case RunState.Queued: - counters.Queued++; - break; - case RunState.Running: - counters.Running++; - break; - case RunState.Completed: - counters.Completed++; - break; - case RunState.Error: - counters.Error++; - break; - case RunState.Cancelled: - counters.Cancelled++; - break; - default: - break; - } - - counters.TotalDeltas += entry.Stats.Deltas; - counters.TotalNewCriticals += entry.Stats.NewCriticals; - counters.TotalNewHigh += entry.Stats.NewHigh; - counters.TotalNewMedium += entry.Stats.NewMedium; - counters.TotalNewLow += entry.Stats.NewLow; - } - - return counters; - } - - private static RunSummaryProjection ToProjection(RunSummaryDocument document) - { - var updatedAt = new DateTimeOffset(DateTime.SpecifyKind(document.UpdatedAt, DateTimeKind.Utc)); - var lastRun = document.LastRun is null - ? null - : ToSnapshot(document.LastRun); - - var recent = document.Recent - .Select(ToSnapshot) - .ToImmutableArray(); - - var counters = new RunSummaryCounters( - document.Counters.Total, - document.Counters.Planning, - document.Counters.Queued, - document.Counters.Running, - document.Counters.Completed, - document.Counters.Error, - document.Counters.Cancelled, - document.Counters.TotalDeltas, - document.Counters.TotalNewCriticals, - document.Counters.TotalNewHigh, - document.Counters.TotalNewMedium, - document.Counters.TotalNewLow); - - return new RunSummaryProjection( - document.TenantId, - document.ScheduleId, - updatedAt, - lastRun, - recent, - counters); - } - - private static RunSummarySnapshot ToSnapshot(RunSummaryEntryDocument entry) - { - var createdAt = new DateTimeOffset(DateTime.SpecifyKind(entry.CreatedAt, DateTimeKind.Utc)); - DateTimeOffset? startedAt = entry.StartedAt is null - ? null - : new DateTimeOffset(DateTime.SpecifyKind(entry.StartedAt.Value, DateTimeKind.Utc)); - DateTimeOffset? finishedAt = entry.FinishedAt is null - ? null - : new DateTimeOffset(DateTime.SpecifyKind(entry.FinishedAt.Value, DateTimeKind.Utc)); - - return new RunSummarySnapshot( - entry.RunId, - entry.Trigger, - entry.State, - createdAt, - startedAt, - finishedAt, - entry.Stats, - entry.Error); - } -} +using System.Collections.Immutable; +using Microsoft.Extensions.Logging; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Documents; +using StellaOps.Scheduler.Storage.Mongo.Projections; +using StellaOps.Scheduler.Storage.Mongo.Repositories; + +namespace StellaOps.Scheduler.Storage.Mongo.Services; + +internal sealed class RunSummaryService : IRunSummaryService +{ + private const int RecentLimit = 20; + + private readonly IRunSummaryRepository _repository; + private readonly TimeProvider _timeProvider; + private readonly ILogger<RunSummaryService> _logger; + + public RunSummaryService( + IRunSummaryRepository repository, + TimeProvider? timeProvider, + ILogger<RunSummaryService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<RunSummaryProjection> ProjectAsync( + Run run, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(run); + if (string.IsNullOrWhiteSpace(run.ScheduleId)) + { + throw new ArgumentException("Run must contain a scheduleId to project summary data.", nameof(run)); + } + + var document = await _repository + .GetAsync(run.TenantId, run.ScheduleId!, cancellationToken) + .ConfigureAwait(false) + ?? new RunSummaryDocument + { + TenantId = run.TenantId, + ScheduleId = run.ScheduleId!, + }; + + UpdateDocument(document, run); + document.UpdatedAt = _timeProvider.GetUtcNow().UtcDateTime; + + await _repository.UpsertAsync(document, cancellationToken).ConfigureAwait(false); + _logger.LogDebug( + "Projected run summary for tenant {TenantId} schedule {ScheduleId} using run {RunId}.", + run.TenantId, + run.ScheduleId, + run.Id); + return ToProjection(document); + } + + public async Task<RunSummaryProjection?> GetAsync( + string tenantId, + string scheduleId, + CancellationToken cancellationToken = default) + { + var document = await _repository + .GetAsync(tenantId, scheduleId, cancellationToken) + .ConfigureAwait(false); + + return document is null ? null : ToProjection(document); + } + + public async Task<IReadOnlyList<RunSummaryProjection>> ListAsync( + string tenantId, + CancellationToken cancellationToken = default) + { + var documents = await _repository.ListAsync(tenantId, cancellationToken).ConfigureAwait(false); + return documents.Select(ToProjection).ToArray(); + } + + private static void UpdateDocument(RunSummaryDocument document, Run run) + { + var entry = document.Recent.FirstOrDefault(item => string.Equals(item.RunId, run.Id, StringComparison.Ordinal)); + if (entry is null) + { + entry = new RunSummaryEntryDocument + { + RunId = run.Id, + }; + document.Recent.Add(entry); + } + + entry.Trigger = run.Trigger; + entry.State = run.State; + entry.CreatedAt = run.CreatedAt.UtcDateTime; + entry.StartedAt = run.StartedAt?.UtcDateTime; + entry.FinishedAt = run.FinishedAt?.UtcDateTime; + entry.Error = run.Error; + entry.Stats = run.Stats; + + document.Recent = document.Recent + .OrderByDescending(item => item.CreatedAt) + .ThenByDescending(item => item.RunId, StringComparer.Ordinal) + .Take(RecentLimit) + .ToList(); + + document.LastRun = document.Recent.FirstOrDefault(); + document.Counters = ComputeCounters(document.Recent); + } + + private static RunSummaryCountersDocument ComputeCounters(IEnumerable<RunSummaryEntryDocument> entries) + { + var counters = new RunSummaryCountersDocument(); + + foreach (var entry in entries) + { + counters.Total++; + switch (entry.State) + { + case RunState.Planning: + counters.Planning++; + break; + case RunState.Queued: + counters.Queued++; + break; + case RunState.Running: + counters.Running++; + break; + case RunState.Completed: + counters.Completed++; + break; + case RunState.Error: + counters.Error++; + break; + case RunState.Cancelled: + counters.Cancelled++; + break; + default: + break; + } + + counters.TotalDeltas += entry.Stats.Deltas; + counters.TotalNewCriticals += entry.Stats.NewCriticals; + counters.TotalNewHigh += entry.Stats.NewHigh; + counters.TotalNewMedium += entry.Stats.NewMedium; + counters.TotalNewLow += entry.Stats.NewLow; + } + + return counters; + } + + private static RunSummaryProjection ToProjection(RunSummaryDocument document) + { + var updatedAt = new DateTimeOffset(DateTime.SpecifyKind(document.UpdatedAt, DateTimeKind.Utc)); + var lastRun = document.LastRun is null + ? null + : ToSnapshot(document.LastRun); + + var recent = document.Recent + .Select(ToSnapshot) + .ToImmutableArray(); + + var counters = new RunSummaryCounters( + document.Counters.Total, + document.Counters.Planning, + document.Counters.Queued, + document.Counters.Running, + document.Counters.Completed, + document.Counters.Error, + document.Counters.Cancelled, + document.Counters.TotalDeltas, + document.Counters.TotalNewCriticals, + document.Counters.TotalNewHigh, + document.Counters.TotalNewMedium, + document.Counters.TotalNewLow); + + return new RunSummaryProjection( + document.TenantId, + document.ScheduleId, + updatedAt, + lastRun, + recent, + counters); + } + + private static RunSummarySnapshot ToSnapshot(RunSummaryEntryDocument entry) + { + var createdAt = new DateTimeOffset(DateTime.SpecifyKind(entry.CreatedAt, DateTimeKind.Utc)); + DateTimeOffset? startedAt = entry.StartedAt is null + ? null + : new DateTimeOffset(DateTime.SpecifyKind(entry.StartedAt.Value, DateTimeKind.Utc)); + DateTimeOffset? finishedAt = entry.FinishedAt is null + ? null + : new DateTimeOffset(DateTime.SpecifyKind(entry.FinishedAt.Value, DateTimeKind.Utc)); + + return new RunSummarySnapshot( + entry.RunId, + entry.Trigger, + entry.State, + createdAt, + startedAt, + finishedAt, + entry.Stats, + entry.Error); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditEvent.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditEvent.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditEvent.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditEvent.cs index 145dda56..51e7e276 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditEvent.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditEvent.cs @@ -1,18 +1,18 @@ -using System.Collections.Generic; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Storage.Mongo.Services; - -public sealed record SchedulerAuditEvent( - string TenantId, - string Category, - string Action, - AuditActor Actor, - string? EntityId = null, - string? ScheduleId = null, - string? RunId = null, - string? CorrelationId = null, - IReadOnlyDictionary<string, string>? Metadata = null, - string? Message = null, - DateTimeOffset? OccurredAt = null, - string? AuditId = null); +using System.Collections.Generic; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Storage.Mongo.Services; + +public sealed record SchedulerAuditEvent( + string TenantId, + string Category, + string Action, + AuditActor Actor, + string? EntityId = null, + string? ScheduleId = null, + string? RunId = null, + string? CorrelationId = null, + IReadOnlyDictionary<string, string>? Metadata = null, + string? Message = null, + DateTimeOffset? OccurredAt = null, + string? AuditId = null); diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditService.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditService.cs index 8497ba97..b6985939 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Services/SchedulerAuditService.cs @@ -1,62 +1,62 @@ -using Microsoft.Extensions.Logging; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; - -namespace StellaOps.Scheduler.Storage.Mongo.Services; - -internal sealed class SchedulerAuditService : ISchedulerAuditService -{ - private readonly IAuditRepository _repository; - private readonly TimeProvider _timeProvider; - private readonly ILogger<SchedulerAuditService> _logger; - - public SchedulerAuditService( - IAuditRepository repository, - TimeProvider? timeProvider, - ILogger<SchedulerAuditService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _timeProvider = timeProvider ?? TimeProvider.System; - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<AuditRecord> WriteAsync( - SchedulerAuditEvent auditEvent, - CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(auditEvent); - - var occurredAt = auditEvent.OccurredAt ?? _timeProvider.GetUtcNow(); - var identifier = string.IsNullOrWhiteSpace(auditEvent.AuditId) - ? $"audit_{Guid.NewGuid():N}" - : auditEvent.AuditId!.Trim(); - - var metadata = auditEvent.Metadata is null - ? Enumerable.Empty<KeyValuePair<string, string>>() - : auditEvent.Metadata.Where(pair => !string.IsNullOrWhiteSpace(pair.Key) && !string.IsNullOrWhiteSpace(pair.Value)); - - var record = new AuditRecord( - identifier, - auditEvent.TenantId, - auditEvent.Category, - auditEvent.Action, - occurredAt, - auditEvent.Actor, - auditEvent.EntityId, - auditEvent.ScheduleId, - auditEvent.RunId, - auditEvent.CorrelationId, - metadata, - auditEvent.Message); - - await _repository.InsertAsync(record, session: null, cancellationToken).ConfigureAwait(false); - - _logger.LogDebug( - "Scheduler audit record persisted with id {AuditId} for tenant {TenantId} action {Action}.", - record.Id, - record.TenantId, - record.Action); - - return record; - } -} +using Microsoft.Extensions.Logging; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; + +namespace StellaOps.Scheduler.Storage.Mongo.Services; + +internal sealed class SchedulerAuditService : ISchedulerAuditService +{ + private readonly IAuditRepository _repository; + private readonly TimeProvider _timeProvider; + private readonly ILogger<SchedulerAuditService> _logger; + + public SchedulerAuditService( + IAuditRepository repository, + TimeProvider? timeProvider, + ILogger<SchedulerAuditService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<AuditRecord> WriteAsync( + SchedulerAuditEvent auditEvent, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(auditEvent); + + var occurredAt = auditEvent.OccurredAt ?? _timeProvider.GetUtcNow(); + var identifier = string.IsNullOrWhiteSpace(auditEvent.AuditId) + ? $"audit_{Guid.NewGuid():N}" + : auditEvent.AuditId!.Trim(); + + var metadata = auditEvent.Metadata is null + ? Enumerable.Empty<KeyValuePair<string, string>>() + : auditEvent.Metadata.Where(pair => !string.IsNullOrWhiteSpace(pair.Key) && !string.IsNullOrWhiteSpace(pair.Value)); + + var record = new AuditRecord( + identifier, + auditEvent.TenantId, + auditEvent.Category, + auditEvent.Action, + occurredAt, + auditEvent.Actor, + auditEvent.EntityId, + auditEvent.ScheduleId, + auditEvent.RunId, + auditEvent.CorrelationId, + metadata, + auditEvent.Message); + + await _repository.InsertAsync(record, session: null, cancellationToken).ConfigureAwait(false); + + _logger.LogDebug( + "Scheduler audit record persisted with id {AuditId} for tenant {TenantId} action {Action}.", + record.Id, + record.TenantId, + record.Action); + + return record; + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Sessions/ISchedulerMongoSessionFactory.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Sessions/ISchedulerMongoSessionFactory.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Sessions/ISchedulerMongoSessionFactory.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Sessions/ISchedulerMongoSessionFactory.cs index 809d5197..6c9deec2 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Sessions/ISchedulerMongoSessionFactory.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Sessions/ISchedulerMongoSessionFactory.cs @@ -1,18 +1,18 @@ -using MongoDB.Driver; - -namespace StellaOps.Scheduler.Storage.Mongo.Sessions; - -/// <summary> -/// Provides helper methods for creating MongoDB sessions used by scheduler storage. -/// </summary> -public interface ISchedulerMongoSessionFactory -{ - /// <summary> - /// Starts a new client session applying the provided options. - /// </summary> - /// <param name="options">Session options (optional).</param> - /// <param name="cancellationToken">Cancellation token.</param> - Task<IClientSessionHandle> StartSessionAsync( - SchedulerMongoSessionOptions? options = null, - CancellationToken cancellationToken = default); -} +using MongoDB.Driver; + +namespace StellaOps.Scheduler.Storage.Mongo.Sessions; + +/// <summary> +/// Provides helper methods for creating MongoDB sessions used by scheduler storage. +/// </summary> +public interface ISchedulerMongoSessionFactory +{ + /// <summary> + /// Starts a new client session applying the provided options. + /// </summary> + /// <param name="options">Session options (optional).</param> + /// <param name="cancellationToken">Cancellation token.</param> + Task<IClientSessionHandle> StartSessionAsync( + SchedulerMongoSessionOptions? options = null, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionFactory.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionFactory.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionFactory.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionFactory.cs index ade8c7b1..1b234232 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionFactory.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionFactory.cs @@ -1,32 +1,32 @@ -using MongoDB.Driver; -using StellaOps.Scheduler.Storage.Mongo.Internal; - -namespace StellaOps.Scheduler.Storage.Mongo.Sessions; - -internal sealed class SchedulerMongoSessionFactory : ISchedulerMongoSessionFactory -{ - private readonly SchedulerMongoContext _context; - - public SchedulerMongoSessionFactory(SchedulerMongoContext context) - { - _context = context ?? throw new ArgumentNullException(nameof(context)); - } - - public Task<IClientSessionHandle> StartSessionAsync( - SchedulerMongoSessionOptions? options = null, - CancellationToken cancellationToken = default) - { - var clientOptions = new ClientSessionOptions - { - CausalConsistency = options?.CausalConsistency ?? true, - }; - - if (options?.ReadPreference is not null) - { - clientOptions.DefaultTransactionOptions = new TransactionOptions( - readPreference: options.ReadPreference); - } - - return _context.Client.StartSessionAsync(clientOptions, cancellationToken); - } -} +using MongoDB.Driver; +using StellaOps.Scheduler.Storage.Mongo.Internal; + +namespace StellaOps.Scheduler.Storage.Mongo.Sessions; + +internal sealed class SchedulerMongoSessionFactory : ISchedulerMongoSessionFactory +{ + private readonly SchedulerMongoContext _context; + + public SchedulerMongoSessionFactory(SchedulerMongoContext context) + { + _context = context ?? throw new ArgumentNullException(nameof(context)); + } + + public Task<IClientSessionHandle> StartSessionAsync( + SchedulerMongoSessionOptions? options = null, + CancellationToken cancellationToken = default) + { + var clientOptions = new ClientSessionOptions + { + CausalConsistency = options?.CausalConsistency ?? true, + }; + + if (options?.ReadPreference is not null) + { + clientOptions.DefaultTransactionOptions = new TransactionOptions( + readPreference: options.ReadPreference); + } + + return _context.Client.StartSessionAsync(clientOptions, cancellationToken); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionOptions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionOptions.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionOptions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionOptions.cs index c65a6c90..1f03f980 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionOptions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/Sessions/SchedulerMongoSessionOptions.cs @@ -1,19 +1,19 @@ -using MongoDB.Driver; - -namespace StellaOps.Scheduler.Storage.Mongo.Sessions; - -/// <summary> -/// Options controlling MongoDB client sessions for scheduler storage operations. -/// </summary> -public sealed class SchedulerMongoSessionOptions -{ - /// <summary> - /// When true (default), the session enables causal consistency for read operations following writes. - /// </summary> - public bool CausalConsistency { get; init; } = true; - - /// <summary> - /// Optional read preference override applied to the session when specified. - /// </summary> - public ReadPreference? ReadPreference { get; init; } -} +using MongoDB.Driver; + +namespace StellaOps.Scheduler.Storage.Mongo.Sessions; + +/// <summary> +/// Options controlling MongoDB client sessions for scheduler storage operations. +/// </summary> +public sealed class SchedulerMongoSessionOptions +{ + /// <summary> + /// When true (default), the session enables causal consistency for read operations following writes. + /// </summary> + public bool CausalConsistency { get; init; } = true; + + /// <summary> + /// Optional read preference override applied to the session when specified. + /// </summary> + public ReadPreference? ReadPreference { get; init; } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj similarity index 98% rename from src/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj index 9445065c..985c4940 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj @@ -1,19 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="MongoDB.Bson" Version="3.5.0" /> - <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Hosting.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options.ConfigurationExtensions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="MongoDB.Bson" Version="3.5.0" /> + <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="../StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Scheduler.Storage.Mongo/TASKS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/TASKS.md similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo/TASKS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Storage.Mongo/TASKS.md diff --git a/src/StellaOps.Scheduler.Worker/AGENTS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/AGENTS.md similarity index 100% rename from src/StellaOps.Scheduler.Worker/AGENTS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/AGENTS.md diff --git a/src/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs index abaab934..eaf636a4 100644 --- a/src/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/DependencyInjection/SchedulerWorkerServiceCollectionExtensions.cs @@ -1,102 +1,102 @@ -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Notify.Queue; -using StellaOps.Scheduler.Worker.Events; -using StellaOps.Scheduler.Worker.Execution; -using StellaOps.Scheduler.Worker.Options; -using StellaOps.Scheduler.Worker.Observability; -using StellaOps.Scheduler.Worker.Planning; -using StellaOps.Scheduler.Worker.Policy; -using StellaOps.Scheduler.Worker.Graph; -using StellaOps.Scheduler.Worker.Graph.Cartographer; -using StellaOps.Scheduler.Worker.Graph.Scheduler; - -namespace StellaOps.Scheduler.Worker.DependencyInjection; - -public static class SchedulerWorkerServiceCollectionExtensions -{ - public static IServiceCollection AddSchedulerWorker(this IServiceCollection services, IConfiguration configuration) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - - services - .AddOptions<SchedulerWorkerOptions>() - .Bind(configuration) - .PostConfigure(options => options.Validate()); - - services.AddSingleton(TimeProvider.System); - services.AddSingleton<SchedulerWorkerMetrics>(); - services.AddSingleton<IImpactTargetingService, ImpactTargetingService>(); - services.AddSingleton<IImpactShardPlanner, ImpactShardPlanner>(); - services.AddSingleton<IPlannerQueueDispatchService, PlannerQueueDispatchService>(); - services.AddSingleton<PlannerExecutionService>(); - services.AddSingleton<IRunnerExecutionService, RunnerExecutionService>(); - services.AddSingleton<IPolicyRunTargetingService, PolicyRunTargetingService>(); - services.AddSingleton<PolicyRunExecutionService>(); - services.AddSingleton<GraphBuildExecutionService>(); - services.AddSingleton<GraphOverlayExecutionService>(); - services.AddSingleton<ISchedulerEventPublisher>(sp => - { - var loggerFactory = sp.GetRequiredService<ILoggerFactory>(); - var queue = sp.GetService<INotifyEventQueue>(); - var queueOptions = sp.GetService<NotifyEventQueueOptions>(); - var timeProvider = sp.GetRequiredService<TimeProvider>(); - - if (queue is null || queueOptions is null) - { - return new NullSchedulerEventPublisher(loggerFactory.CreateLogger<NullSchedulerEventPublisher>()); - } - - return new SchedulerEventPublisher( - queue, - queueOptions, - timeProvider, - loggerFactory.CreateLogger<SchedulerEventPublisher>()); - }); - - services.AddHttpClient<IScannerReportClient, HttpScannerReportClient>(); - services.AddHttpClient<IPolicyRunClient, HttpPolicyRunClient>(); - services.AddHttpClient<ICartographerBuildClient, HttpCartographerBuildClient>((sp, client) => - { - var options = sp.GetRequiredService<IOptions<SchedulerWorkerOptions>>().Value.Graph; - client.Timeout = options.CartographerTimeout; - - if (options.Cartographer.BaseAddress is { } baseAddress) - { - client.BaseAddress = baseAddress; - } - }); - services.AddHttpClient<ICartographerOverlayClient, HttpCartographerOverlayClient>((sp, client) => - { - var options = sp.GetRequiredService<IOptions<SchedulerWorkerOptions>>().Value.Graph; - client.Timeout = options.CartographerTimeout; - - if (options.Cartographer.BaseAddress is { } baseAddress) - { - client.BaseAddress = baseAddress; - } - }); - services.AddHttpClient<IGraphJobCompletionClient, HttpGraphJobCompletionClient>((sp, client) => - { - var options = sp.GetRequiredService<IOptions<SchedulerWorkerOptions>>().Value.Graph; - client.Timeout = options.CartographerTimeout; - - if (options.SchedulerApi.BaseAddress is { } baseAddress) - { - client.BaseAddress = baseAddress; - } - }); - - services.AddHostedService<PlannerBackgroundService>(); - services.AddHostedService<PlannerQueueDispatcherBackgroundService>(); - services.AddHostedService<RunnerBackgroundService>(); - services.AddHostedService<PolicyRunDispatchBackgroundService>(); - services.AddHostedService<GraphBuildBackgroundService>(); - services.AddHostedService<GraphOverlayBackgroundService>(); - - return services; - } -} +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Notify.Queue; +using StellaOps.Scheduler.Worker.Events; +using StellaOps.Scheduler.Worker.Execution; +using StellaOps.Scheduler.Worker.Options; +using StellaOps.Scheduler.Worker.Observability; +using StellaOps.Scheduler.Worker.Planning; +using StellaOps.Scheduler.Worker.Policy; +using StellaOps.Scheduler.Worker.Graph; +using StellaOps.Scheduler.Worker.Graph.Cartographer; +using StellaOps.Scheduler.Worker.Graph.Scheduler; + +namespace StellaOps.Scheduler.Worker.DependencyInjection; + +public static class SchedulerWorkerServiceCollectionExtensions +{ + public static IServiceCollection AddSchedulerWorker(this IServiceCollection services, IConfiguration configuration) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + + services + .AddOptions<SchedulerWorkerOptions>() + .Bind(configuration) + .PostConfigure(options => options.Validate()); + + services.AddSingleton(TimeProvider.System); + services.AddSingleton<SchedulerWorkerMetrics>(); + services.AddSingleton<IImpactTargetingService, ImpactTargetingService>(); + services.AddSingleton<IImpactShardPlanner, ImpactShardPlanner>(); + services.AddSingleton<IPlannerQueueDispatchService, PlannerQueueDispatchService>(); + services.AddSingleton<PlannerExecutionService>(); + services.AddSingleton<IRunnerExecutionService, RunnerExecutionService>(); + services.AddSingleton<IPolicyRunTargetingService, PolicyRunTargetingService>(); + services.AddSingleton<PolicyRunExecutionService>(); + services.AddSingleton<GraphBuildExecutionService>(); + services.AddSingleton<GraphOverlayExecutionService>(); + services.AddSingleton<ISchedulerEventPublisher>(sp => + { + var loggerFactory = sp.GetRequiredService<ILoggerFactory>(); + var queue = sp.GetService<INotifyEventQueue>(); + var queueOptions = sp.GetService<NotifyEventQueueOptions>(); + var timeProvider = sp.GetRequiredService<TimeProvider>(); + + if (queue is null || queueOptions is null) + { + return new NullSchedulerEventPublisher(loggerFactory.CreateLogger<NullSchedulerEventPublisher>()); + } + + return new SchedulerEventPublisher( + queue, + queueOptions, + timeProvider, + loggerFactory.CreateLogger<SchedulerEventPublisher>()); + }); + + services.AddHttpClient<IScannerReportClient, HttpScannerReportClient>(); + services.AddHttpClient<IPolicyRunClient, HttpPolicyRunClient>(); + services.AddHttpClient<ICartographerBuildClient, HttpCartographerBuildClient>((sp, client) => + { + var options = sp.GetRequiredService<IOptions<SchedulerWorkerOptions>>().Value.Graph; + client.Timeout = options.CartographerTimeout; + + if (options.Cartographer.BaseAddress is { } baseAddress) + { + client.BaseAddress = baseAddress; + } + }); + services.AddHttpClient<ICartographerOverlayClient, HttpCartographerOverlayClient>((sp, client) => + { + var options = sp.GetRequiredService<IOptions<SchedulerWorkerOptions>>().Value.Graph; + client.Timeout = options.CartographerTimeout; + + if (options.Cartographer.BaseAddress is { } baseAddress) + { + client.BaseAddress = baseAddress; + } + }); + services.AddHttpClient<IGraphJobCompletionClient, HttpGraphJobCompletionClient>((sp, client) => + { + var options = sp.GetRequiredService<IOptions<SchedulerWorkerOptions>>().Value.Graph; + client.Timeout = options.CartographerTimeout; + + if (options.SchedulerApi.BaseAddress is { } baseAddress) + { + client.BaseAddress = baseAddress; + } + }); + + services.AddHostedService<PlannerBackgroundService>(); + services.AddHostedService<PlannerQueueDispatcherBackgroundService>(); + services.AddHostedService<RunnerBackgroundService>(); + services.AddHostedService<PolicyRunDispatchBackgroundService>(); + services.AddHostedService<GraphBuildBackgroundService>(); + services.AddHostedService<GraphOverlayBackgroundService>(); + + return services; + } +} diff --git a/src/StellaOps.Scheduler.Worker/Events/SchedulerEventPublisher.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Events/SchedulerEventPublisher.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Events/SchedulerEventPublisher.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Events/SchedulerEventPublisher.cs diff --git a/src/StellaOps.Scheduler.Worker/Execution/HttpScannerReportClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Execution/HttpScannerReportClient.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Execution/HttpScannerReportClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Execution/HttpScannerReportClient.cs diff --git a/src/StellaOps.Scheduler.Worker/Execution/RunnerBackgroundService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Execution/RunnerBackgroundService.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Execution/RunnerBackgroundService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Execution/RunnerBackgroundService.cs diff --git a/src/StellaOps.Scheduler.Worker/Execution/RunnerExecutionService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Execution/RunnerExecutionService.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Execution/RunnerExecutionService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Execution/RunnerExecutionService.cs diff --git a/src/StellaOps.Scheduler.Worker/Execution/ScannerReportClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Execution/ScannerReportClient.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Execution/ScannerReportClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Execution/ScannerReportClient.cs diff --git a/src/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerBuildClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerBuildClient.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerBuildClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerBuildClient.cs index 788a409f..7b6d1489 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerBuildClient.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerBuildClient.cs @@ -1,234 +1,234 @@ -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Net.Http.Json; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Graph.Cartographer; - -internal sealed class HttpCartographerBuildClient : ICartographerBuildClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - private readonly HttpClient _httpClient; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly ILogger<HttpCartographerBuildClient> _logger; - - public HttpCartographerBuildClient( - HttpClient httpClient, - IOptions<SchedulerWorkerOptions> options, - ILogger<HttpCartographerBuildClient> logger) - { - _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<CartographerBuildResult> StartBuildAsync(GraphBuildJob job, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(job); - - var graphOptions = _options.Value.Graph; - var apiOptions = graphOptions.Cartographer; - - if (apiOptions.BaseAddress is null) - { - throw new InvalidOperationException("Cartographer base address must be configured before starting graph builds."); - } - - if (_httpClient.BaseAddress != apiOptions.BaseAddress) - { - _httpClient.BaseAddress = apiOptions.BaseAddress; - } - - var payload = new CartographerBuildRequest - { - TenantId = job.TenantId, - SbomId = job.SbomId, - SbomVersionId = job.SbomVersionId, - SbomDigest = job.SbomDigest, - GraphSnapshotId = job.GraphSnapshotId, - CorrelationId = job.CorrelationId, - Metadata = job.Metadata - }; - - using var request = new HttpRequestMessage(HttpMethod.Post, apiOptions.BuildPath) - { - Content = JsonContent.Create(payload, options: SerializerOptions) - }; - - if (!string.IsNullOrWhiteSpace(apiOptions.ApiKeyHeader) && !string.IsNullOrWhiteSpace(apiOptions.ApiKey)) - { - request.Headers.TryAddWithoutValidation(apiOptions.ApiKeyHeader!, apiOptions.ApiKey); - } - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new CartographerBuildClientException($"Cartographer build submission failed with status {(int)response.StatusCode}: {body}"); - } - - CartographerBuildResponseModel? model = null; - try - { - if (response.Content.Headers.ContentLength is > 0) - { - model = await response.Content.ReadFromJsonAsync<CartographerBuildResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to parse Cartographer build response for job {JobId}.", job.Id); - } - - var status = ParseStatus(model?.Status); - - if ((status == GraphJobStatus.Pending || status == GraphJobStatus.Queued || status == GraphJobStatus.Running) && !string.IsNullOrWhiteSpace(model?.CartographerJobId)) - { - return await PollBuildStatusAsync(model.CartographerJobId!, cancellationToken).ConfigureAwait(false); - } - - return new CartographerBuildResult( - status, - model?.CartographerJobId, - model?.GraphSnapshotId, - model?.ResultUri, - model?.Error); - } - - private static GraphJobStatus ParseStatus(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return GraphJobStatus.Completed; - } - - return value.Trim().ToLowerInvariant() switch - { - "pending" => GraphJobStatus.Pending, - "queued" => GraphJobStatus.Queued, - "running" => GraphJobStatus.Running, - "failed" => GraphJobStatus.Failed, - "cancelled" => GraphJobStatus.Cancelled, - _ => GraphJobStatus.Completed - }; - } - - private async Task<CartographerBuildResult> PollBuildStatusAsync(string cartographerJobId, CancellationToken cancellationToken) - { - var graphOptions = _options.Value.Graph; - var apiOptions = graphOptions.Cartographer; - if (string.IsNullOrWhiteSpace(apiOptions.StatusPath)) - { - return new CartographerBuildResult(GraphJobStatus.Running, cartographerJobId, null, null, "status path not configured"); - } - - var statusPath = apiOptions.StatusPath.Replace("{jobId}", Uri.EscapeDataString(cartographerJobId), StringComparison.Ordinal); - var attempt = 0; - CartographerBuildResponseModel? model = null; - - while (attempt < graphOptions.MaxAttempts) - { - cancellationToken.ThrowIfCancellationRequested(); - attempt++; - - try - { - using var statusResponse = await _httpClient.GetAsync(statusPath, cancellationToken).ConfigureAwait(false); - if (!statusResponse.IsSuccessStatusCode) - { - var body = await statusResponse.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - _logger.LogWarning("Cartographer build status request failed ({StatusCode}) for job {JobId}: {Body}", (int)statusResponse.StatusCode, cartographerJobId, body); - break; - } - - model = await statusResponse.Content.ReadFromJsonAsync<CartographerBuildResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false); - var status = ParseStatus(model?.Status); - - if (status is GraphJobStatus.Completed or GraphJobStatus.Cancelled or GraphJobStatus.Failed) - { - return new CartographerBuildResult( - status, - cartographerJobId, - model?.GraphSnapshotId, - model?.ResultUri, - model?.Error); - } - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - throw; - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Polling Cartographer build status failed for job {JobId}.", cartographerJobId); - break; - } - - await Task.Delay(graphOptions.StatusPollInterval, cancellationToken).ConfigureAwait(false); - } - - var fallbackStatus = ParseStatus(model?.Status); - return new CartographerBuildResult( - fallbackStatus, - cartographerJobId, - model?.GraphSnapshotId, - model?.ResultUri, - model?.Error); - } - - private sealed record CartographerBuildRequest - { - public string TenantId { get; init; } = string.Empty; - - public string SbomId { get; init; } = string.Empty; - - public string SbomVersionId { get; init; } = string.Empty; - - public string SbomDigest { get; init; } = string.Empty; - - public string? GraphSnapshotId { get; init; } - - public string? CorrelationId { get; init; } - - public IReadOnlyDictionary<string, string> Metadata { get; init; } = new Dictionary<string, string>(StringComparer.Ordinal); - } - - private sealed record CartographerBuildResponseModel - { - [JsonPropertyName("status")] - public string? Status { get; init; } - - [JsonPropertyName("cartographerJobId")] - public string? CartographerJobId { get; init; } - - [JsonPropertyName("graphSnapshotId")] - public string? GraphSnapshotId { get; init; } - - [JsonPropertyName("resultUri")] - public string? ResultUri { get; init; } - - [JsonPropertyName("error")] - public string? Error { get; init; } - } -} - -internal sealed class CartographerBuildClientException : Exception -{ - public CartographerBuildClientException(string message) - : base(message) - { - } -} +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Graph.Cartographer; + +internal sealed class HttpCartographerBuildClient : ICartographerBuildClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + private readonly HttpClient _httpClient; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly ILogger<HttpCartographerBuildClient> _logger; + + public HttpCartographerBuildClient( + HttpClient httpClient, + IOptions<SchedulerWorkerOptions> options, + ILogger<HttpCartographerBuildClient> logger) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<CartographerBuildResult> StartBuildAsync(GraphBuildJob job, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(job); + + var graphOptions = _options.Value.Graph; + var apiOptions = graphOptions.Cartographer; + + if (apiOptions.BaseAddress is null) + { + throw new InvalidOperationException("Cartographer base address must be configured before starting graph builds."); + } + + if (_httpClient.BaseAddress != apiOptions.BaseAddress) + { + _httpClient.BaseAddress = apiOptions.BaseAddress; + } + + var payload = new CartographerBuildRequest + { + TenantId = job.TenantId, + SbomId = job.SbomId, + SbomVersionId = job.SbomVersionId, + SbomDigest = job.SbomDigest, + GraphSnapshotId = job.GraphSnapshotId, + CorrelationId = job.CorrelationId, + Metadata = job.Metadata + }; + + using var request = new HttpRequestMessage(HttpMethod.Post, apiOptions.BuildPath) + { + Content = JsonContent.Create(payload, options: SerializerOptions) + }; + + if (!string.IsNullOrWhiteSpace(apiOptions.ApiKeyHeader) && !string.IsNullOrWhiteSpace(apiOptions.ApiKey)) + { + request.Headers.TryAddWithoutValidation(apiOptions.ApiKeyHeader!, apiOptions.ApiKey); + } + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new CartographerBuildClientException($"Cartographer build submission failed with status {(int)response.StatusCode}: {body}"); + } + + CartographerBuildResponseModel? model = null; + try + { + if (response.Content.Headers.ContentLength is > 0) + { + model = await response.Content.ReadFromJsonAsync<CartographerBuildResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to parse Cartographer build response for job {JobId}.", job.Id); + } + + var status = ParseStatus(model?.Status); + + if ((status == GraphJobStatus.Pending || status == GraphJobStatus.Queued || status == GraphJobStatus.Running) && !string.IsNullOrWhiteSpace(model?.CartographerJobId)) + { + return await PollBuildStatusAsync(model.CartographerJobId!, cancellationToken).ConfigureAwait(false); + } + + return new CartographerBuildResult( + status, + model?.CartographerJobId, + model?.GraphSnapshotId, + model?.ResultUri, + model?.Error); + } + + private static GraphJobStatus ParseStatus(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return GraphJobStatus.Completed; + } + + return value.Trim().ToLowerInvariant() switch + { + "pending" => GraphJobStatus.Pending, + "queued" => GraphJobStatus.Queued, + "running" => GraphJobStatus.Running, + "failed" => GraphJobStatus.Failed, + "cancelled" => GraphJobStatus.Cancelled, + _ => GraphJobStatus.Completed + }; + } + + private async Task<CartographerBuildResult> PollBuildStatusAsync(string cartographerJobId, CancellationToken cancellationToken) + { + var graphOptions = _options.Value.Graph; + var apiOptions = graphOptions.Cartographer; + if (string.IsNullOrWhiteSpace(apiOptions.StatusPath)) + { + return new CartographerBuildResult(GraphJobStatus.Running, cartographerJobId, null, null, "status path not configured"); + } + + var statusPath = apiOptions.StatusPath.Replace("{jobId}", Uri.EscapeDataString(cartographerJobId), StringComparison.Ordinal); + var attempt = 0; + CartographerBuildResponseModel? model = null; + + while (attempt < graphOptions.MaxAttempts) + { + cancellationToken.ThrowIfCancellationRequested(); + attempt++; + + try + { + using var statusResponse = await _httpClient.GetAsync(statusPath, cancellationToken).ConfigureAwait(false); + if (!statusResponse.IsSuccessStatusCode) + { + var body = await statusResponse.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + _logger.LogWarning("Cartographer build status request failed ({StatusCode}) for job {JobId}: {Body}", (int)statusResponse.StatusCode, cartographerJobId, body); + break; + } + + model = await statusResponse.Content.ReadFromJsonAsync<CartographerBuildResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false); + var status = ParseStatus(model?.Status); + + if (status is GraphJobStatus.Completed or GraphJobStatus.Cancelled or GraphJobStatus.Failed) + { + return new CartographerBuildResult( + status, + cartographerJobId, + model?.GraphSnapshotId, + model?.ResultUri, + model?.Error); + } + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Polling Cartographer build status failed for job {JobId}.", cartographerJobId); + break; + } + + await Task.Delay(graphOptions.StatusPollInterval, cancellationToken).ConfigureAwait(false); + } + + var fallbackStatus = ParseStatus(model?.Status); + return new CartographerBuildResult( + fallbackStatus, + cartographerJobId, + model?.GraphSnapshotId, + model?.ResultUri, + model?.Error); + } + + private sealed record CartographerBuildRequest + { + public string TenantId { get; init; } = string.Empty; + + public string SbomId { get; init; } = string.Empty; + + public string SbomVersionId { get; init; } = string.Empty; + + public string SbomDigest { get; init; } = string.Empty; + + public string? GraphSnapshotId { get; init; } + + public string? CorrelationId { get; init; } + + public IReadOnlyDictionary<string, string> Metadata { get; init; } = new Dictionary<string, string>(StringComparer.Ordinal); + } + + private sealed record CartographerBuildResponseModel + { + [JsonPropertyName("status")] + public string? Status { get; init; } + + [JsonPropertyName("cartographerJobId")] + public string? CartographerJobId { get; init; } + + [JsonPropertyName("graphSnapshotId")] + public string? GraphSnapshotId { get; init; } + + [JsonPropertyName("resultUri")] + public string? ResultUri { get; init; } + + [JsonPropertyName("error")] + public string? Error { get; init; } + } +} + +internal sealed class CartographerBuildClientException : Exception +{ + public CartographerBuildClientException(string message) + : base(message) + { + } +} diff --git a/src/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerOverlayClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerOverlayClient.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerOverlayClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerOverlayClient.cs index ec71dcdf..8168baad 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerOverlayClient.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/HttpCartographerOverlayClient.cs @@ -1,227 +1,227 @@ -using System; -using System.Collections.Generic; -using System.Net.Http; -using System.Net.Http.Json; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Graph.Cartographer; - -internal sealed class HttpCartographerOverlayClient : ICartographerOverlayClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - private readonly HttpClient _httpClient; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly ILogger<HttpCartographerOverlayClient> _logger; - - public HttpCartographerOverlayClient( - HttpClient httpClient, - IOptions<SchedulerWorkerOptions> options, - ILogger<HttpCartographerOverlayClient> logger) - { - _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<CartographerOverlayResult> StartOverlayAsync(GraphOverlayJob job, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(job); - - var graphOptions = _options.Value.Graph; - var apiOptions = graphOptions.Cartographer; - - if (apiOptions.BaseAddress is null) - { - throw new InvalidOperationException("Cartographer base address must be configured before starting graph overlays."); - } - - if (_httpClient.BaseAddress != apiOptions.BaseAddress) - { - _httpClient.BaseAddress = apiOptions.BaseAddress; - } - - var payload = new CartographerOverlayRequest - { - TenantId = job.TenantId, - GraphSnapshotId = job.GraphSnapshotId, - OverlayKind = job.OverlayKind.ToString().ToLowerInvariant(), - OverlayKey = job.OverlayKey, - Subjects = job.Subjects, - CorrelationId = job.CorrelationId, - Metadata = job.Metadata - }; - - using var request = new HttpRequestMessage(HttpMethod.Post, apiOptions.OverlayPath) - { - Content = JsonContent.Create(payload, options: SerializerOptions) - }; - - if (!string.IsNullOrWhiteSpace(apiOptions.ApiKeyHeader) && !string.IsNullOrWhiteSpace(apiOptions.ApiKey)) - { - request.Headers.TryAddWithoutValidation(apiOptions.ApiKeyHeader!, apiOptions.ApiKey); - } - - using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - throw new CartographerOverlayClientException($"Cartographer overlay submission failed with status {(int)response.StatusCode}: {body}"); - } - - CartographerOverlayResponseModel? model = null; - try - { - if (response.Content.Headers.ContentLength is > 0) - { - model = await response.Content.ReadFromJsonAsync<CartographerOverlayResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false); - } - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to parse Cartographer overlay response for job {JobId}.", job.Id); - } - - var status = ParseStatus(model?.Status); - - if ((status == GraphJobStatus.Pending || status == GraphJobStatus.Queued || status == GraphJobStatus.Running)) - { - return await PollOverlayStatusAsync(job.Id, cancellationToken).ConfigureAwait(false); - } - - return new CartographerOverlayResult( - status, - model?.GraphSnapshotId, - model?.ResultUri, - model?.Error); - } - - private static GraphJobStatus ParseStatus(string? value) - { - if (string.IsNullOrWhiteSpace(value)) - { - return GraphJobStatus.Completed; - } - - return value.Trim().ToLowerInvariant() switch - { - "pending" => GraphJobStatus.Pending, - "queued" => GraphJobStatus.Queued, - "running" => GraphJobStatus.Running, - "failed" => GraphJobStatus.Failed, - "cancelled" => GraphJobStatus.Cancelled, - _ => GraphJobStatus.Completed - }; - } - - private async Task<CartographerOverlayResult> PollOverlayStatusAsync(string overlayJobId, CancellationToken cancellationToken) - { - var graphOptions = _options.Value.Graph; - var apiOptions = graphOptions.Cartographer; - if (string.IsNullOrWhiteSpace(apiOptions.OverlayStatusPath)) - { - return new CartographerOverlayResult(GraphJobStatus.Running, null, null, "overlay status path not configured"); - } - - var path = apiOptions.OverlayStatusPath.Replace("{jobId}", Uri.EscapeDataString(overlayJobId), StringComparison.Ordinal); - var attempt = 0; - CartographerOverlayResponseModel? model = null; - - while (attempt < graphOptions.MaxAttempts) - { - cancellationToken.ThrowIfCancellationRequested(); - attempt++; - - try - { - using var response = await _httpClient.GetAsync(path, cancellationToken).ConfigureAwait(false); - if (!response.IsSuccessStatusCode) - { - var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - _logger.LogWarning("Cartographer overlay status request failed ({StatusCode}) for job {JobId}: {Body}", (int)response.StatusCode, overlayJobId, body); - break; - } - - model = await response.Content.ReadFromJsonAsync<CartographerOverlayResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false); - var status = ParseStatus(model?.Status); - - if (status is GraphJobStatus.Completed or GraphJobStatus.Cancelled or GraphJobStatus.Failed) - { - return new CartographerOverlayResult( - status, - model?.GraphSnapshotId, - model?.ResultUri, - model?.Error); - } - } - catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) - { - throw; - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Polling Cartographer overlay status failed for job {JobId}.", overlayJobId); - break; - } - - await Task.Delay(graphOptions.StatusPollInterval, cancellationToken).ConfigureAwait(false); - } - - var fallbackStatus = ParseStatus(model?.Status); - return new CartographerOverlayResult( - fallbackStatus, - model?.GraphSnapshotId, - model?.ResultUri, - model?.Error); - } - - private sealed record CartographerOverlayRequest - { - public string TenantId { get; init; } = string.Empty; - - public string? GraphSnapshotId { get; init; } - - public string OverlayKind { get; init; } = string.Empty; - - public string OverlayKey { get; init; } = string.Empty; - - public IReadOnlyList<string> Subjects { get; init; } = Array.Empty<string>(); - - public string? CorrelationId { get; init; } - - public IReadOnlyDictionary<string, string> Metadata { get; init; } = new Dictionary<string, string>(StringComparer.Ordinal); - } - - private sealed record CartographerOverlayResponseModel - { - [JsonPropertyName("status")] - public string? Status { get; init; } - - [JsonPropertyName("graphSnapshotId")] - public string? GraphSnapshotId { get; init; } - - [JsonPropertyName("resultUri")] - public string? ResultUri { get; init; } - - [JsonPropertyName("error")] - public string? Error { get; init; } - } -} - -internal sealed class CartographerOverlayClientException : Exception -{ - public CartographerOverlayClientException(string message) - : base(message) - { - } -} +using System; +using System.Collections.Generic; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Graph.Cartographer; + +internal sealed class HttpCartographerOverlayClient : ICartographerOverlayClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + private readonly HttpClient _httpClient; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly ILogger<HttpCartographerOverlayClient> _logger; + + public HttpCartographerOverlayClient( + HttpClient httpClient, + IOptions<SchedulerWorkerOptions> options, + ILogger<HttpCartographerOverlayClient> logger) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<CartographerOverlayResult> StartOverlayAsync(GraphOverlayJob job, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(job); + + var graphOptions = _options.Value.Graph; + var apiOptions = graphOptions.Cartographer; + + if (apiOptions.BaseAddress is null) + { + throw new InvalidOperationException("Cartographer base address must be configured before starting graph overlays."); + } + + if (_httpClient.BaseAddress != apiOptions.BaseAddress) + { + _httpClient.BaseAddress = apiOptions.BaseAddress; + } + + var payload = new CartographerOverlayRequest + { + TenantId = job.TenantId, + GraphSnapshotId = job.GraphSnapshotId, + OverlayKind = job.OverlayKind.ToString().ToLowerInvariant(), + OverlayKey = job.OverlayKey, + Subjects = job.Subjects, + CorrelationId = job.CorrelationId, + Metadata = job.Metadata + }; + + using var request = new HttpRequestMessage(HttpMethod.Post, apiOptions.OverlayPath) + { + Content = JsonContent.Create(payload, options: SerializerOptions) + }; + + if (!string.IsNullOrWhiteSpace(apiOptions.ApiKeyHeader) && !string.IsNullOrWhiteSpace(apiOptions.ApiKey)) + { + request.Headers.TryAddWithoutValidation(apiOptions.ApiKeyHeader!, apiOptions.ApiKey); + } + + using var response = await _httpClient.SendAsync(request, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + throw new CartographerOverlayClientException($"Cartographer overlay submission failed with status {(int)response.StatusCode}: {body}"); + } + + CartographerOverlayResponseModel? model = null; + try + { + if (response.Content.Headers.ContentLength is > 0) + { + model = await response.Content.ReadFromJsonAsync<CartographerOverlayResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false); + } + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to parse Cartographer overlay response for job {JobId}.", job.Id); + } + + var status = ParseStatus(model?.Status); + + if ((status == GraphJobStatus.Pending || status == GraphJobStatus.Queued || status == GraphJobStatus.Running)) + { + return await PollOverlayStatusAsync(job.Id, cancellationToken).ConfigureAwait(false); + } + + return new CartographerOverlayResult( + status, + model?.GraphSnapshotId, + model?.ResultUri, + model?.Error); + } + + private static GraphJobStatus ParseStatus(string? value) + { + if (string.IsNullOrWhiteSpace(value)) + { + return GraphJobStatus.Completed; + } + + return value.Trim().ToLowerInvariant() switch + { + "pending" => GraphJobStatus.Pending, + "queued" => GraphJobStatus.Queued, + "running" => GraphJobStatus.Running, + "failed" => GraphJobStatus.Failed, + "cancelled" => GraphJobStatus.Cancelled, + _ => GraphJobStatus.Completed + }; + } + + private async Task<CartographerOverlayResult> PollOverlayStatusAsync(string overlayJobId, CancellationToken cancellationToken) + { + var graphOptions = _options.Value.Graph; + var apiOptions = graphOptions.Cartographer; + if (string.IsNullOrWhiteSpace(apiOptions.OverlayStatusPath)) + { + return new CartographerOverlayResult(GraphJobStatus.Running, null, null, "overlay status path not configured"); + } + + var path = apiOptions.OverlayStatusPath.Replace("{jobId}", Uri.EscapeDataString(overlayJobId), StringComparison.Ordinal); + var attempt = 0; + CartographerOverlayResponseModel? model = null; + + while (attempt < graphOptions.MaxAttempts) + { + cancellationToken.ThrowIfCancellationRequested(); + attempt++; + + try + { + using var response = await _httpClient.GetAsync(path, cancellationToken).ConfigureAwait(false); + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + _logger.LogWarning("Cartographer overlay status request failed ({StatusCode}) for job {JobId}: {Body}", (int)response.StatusCode, overlayJobId, body); + break; + } + + model = await response.Content.ReadFromJsonAsync<CartographerOverlayResponseModel>(SerializerOptions, cancellationToken).ConfigureAwait(false); + var status = ParseStatus(model?.Status); + + if (status is GraphJobStatus.Completed or GraphJobStatus.Cancelled or GraphJobStatus.Failed) + { + return new CartographerOverlayResult( + status, + model?.GraphSnapshotId, + model?.ResultUri, + model?.Error); + } + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + throw; + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Polling Cartographer overlay status failed for job {JobId}.", overlayJobId); + break; + } + + await Task.Delay(graphOptions.StatusPollInterval, cancellationToken).ConfigureAwait(false); + } + + var fallbackStatus = ParseStatus(model?.Status); + return new CartographerOverlayResult( + fallbackStatus, + model?.GraphSnapshotId, + model?.ResultUri, + model?.Error); + } + + private sealed record CartographerOverlayRequest + { + public string TenantId { get; init; } = string.Empty; + + public string? GraphSnapshotId { get; init; } + + public string OverlayKind { get; init; } = string.Empty; + + public string OverlayKey { get; init; } = string.Empty; + + public IReadOnlyList<string> Subjects { get; init; } = Array.Empty<string>(); + + public string? CorrelationId { get; init; } + + public IReadOnlyDictionary<string, string> Metadata { get; init; } = new Dictionary<string, string>(StringComparer.Ordinal); + } + + private sealed record CartographerOverlayResponseModel + { + [JsonPropertyName("status")] + public string? Status { get; init; } + + [JsonPropertyName("graphSnapshotId")] + public string? GraphSnapshotId { get; init; } + + [JsonPropertyName("resultUri")] + public string? ResultUri { get; init; } + + [JsonPropertyName("error")] + public string? Error { get; init; } + } +} + +internal sealed class CartographerOverlayClientException : Exception +{ + public CartographerOverlayClientException(string message) + : base(message) + { + } +} diff --git a/src/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerBuildClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerBuildClient.cs similarity index 96% rename from src/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerBuildClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerBuildClient.cs index 877508a4..4485f9ec 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerBuildClient.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerBuildClient.cs @@ -1,17 +1,17 @@ -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Worker.Graph.Cartographer; - -internal interface ICartographerBuildClient -{ - Task<CartographerBuildResult> StartBuildAsync(GraphBuildJob job, CancellationToken cancellationToken); -} - -internal sealed record CartographerBuildResult( - GraphJobStatus Status, - string? CartographerJobId, - string? GraphSnapshotId, - string? ResultUri, - string? Error); +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Worker.Graph.Cartographer; + +internal interface ICartographerBuildClient +{ + Task<CartographerBuildResult> StartBuildAsync(GraphBuildJob job, CancellationToken cancellationToken); +} + +internal sealed record CartographerBuildResult( + GraphJobStatus Status, + string? CartographerJobId, + string? GraphSnapshotId, + string? ResultUri, + string? Error); diff --git a/src/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerOverlayClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerOverlayClient.cs similarity index 96% rename from src/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerOverlayClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerOverlayClient.cs index 89ed2162..f55c3d42 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerOverlayClient.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Cartographer/ICartographerOverlayClient.cs @@ -1,16 +1,16 @@ -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Worker.Graph.Cartographer; - -internal interface ICartographerOverlayClient -{ - Task<CartographerOverlayResult> StartOverlayAsync(GraphOverlayJob job, CancellationToken cancellationToken); -} - -internal sealed record CartographerOverlayResult( - GraphJobStatus Status, - string? GraphSnapshotId, - string? ResultUri, - string? Error); +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Worker.Graph.Cartographer; + +internal interface ICartographerOverlayClient +{ + Task<CartographerOverlayResult> StartOverlayAsync(GraphOverlayJob job, CancellationToken cancellationToken); +} + +internal sealed record CartographerOverlayResult( + GraphJobStatus Status, + string? GraphSnapshotId, + string? ResultUri, + string? Error); diff --git a/src/StellaOps.Scheduler.Worker/Graph/GraphBuildBackgroundService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphBuildBackgroundService.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Graph/GraphBuildBackgroundService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphBuildBackgroundService.cs index f5442177..b86aa24b 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/GraphBuildBackgroundService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphBuildBackgroundService.cs @@ -1,129 +1,129 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Graph; - -internal sealed class GraphBuildBackgroundService : BackgroundService -{ - private readonly IGraphJobRepository _repository; - private readonly GraphBuildExecutionService _executionService; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly ILogger<GraphBuildBackgroundService> _logger; - - public GraphBuildBackgroundService( - IGraphJobRepository repository, - GraphBuildExecutionService executionService, - IOptions<SchedulerWorkerOptions> options, - ILogger<GraphBuildBackgroundService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _executionService = executionService ?? throw new ArgumentNullException(nameof(executionService)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - _logger.LogInformation("Graph build worker started."); - - while (!stoppingToken.IsCancellationRequested) - { - try - { - var graphOptions = _options.Value.Graph; - if (!graphOptions.Enabled) - { - await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false); - continue; - } - - var jobs = await _repository.ListBuildJobsAsync(GraphJobStatus.Pending, graphOptions.BatchSize, stoppingToken).ConfigureAwait(false); - - if (jobs.Count == 0) - { - await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false); - continue; - } - - foreach (var job in jobs) - { - try - { - var result = await _executionService.ExecuteAsync(job, stoppingToken).ConfigureAwait(false); - LogResult(result); - } - catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) - { - break; - } - catch (Exception ex) - { - _logger.LogError(ex, "Unhandled exception while processing graph build job {JobId}.", job.Id); - } - } - - await DelayAsync(graphOptions.PollInterval, stoppingToken).ConfigureAwait(false); - } - catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) - { - break; - } - catch (Exception ex) - { - _logger.LogError(ex, "Graph build worker encountered an error; backing off."); - await DelayAsync(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false); - } - } - - _logger.LogInformation("Graph build worker stopping."); - } - - private async Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) - { - if (delay <= TimeSpan.Zero) - { - return; - } - - try - { - await Task.Delay(delay, cancellationToken).ConfigureAwait(false); - } - catch (TaskCanceledException) - { - } - } - - private void LogResult(GraphBuildExecutionResult result) - { - switch (result.Type) - { - case GraphBuildExecutionResultType.Completed: - _logger.LogInformation( - "Graph build job {JobId} completed (tenant={TenantId}).", - result.Job.Id, - result.Job.TenantId); - break; - case GraphBuildExecutionResultType.Failed: - _logger.LogWarning( - "Graph build job {JobId} failed (tenant={TenantId}): {Reason}.", - result.Job.Id, - result.Job.TenantId, - result.Reason ?? "unknown error"); - break; - case GraphBuildExecutionResultType.Skipped: - _logger.LogDebug( - "Graph build job {JobId} skipped: {Reason}.", - result.Job.Id, - result.Reason ?? "no reason"); - break; - } - } -} +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Graph; + +internal sealed class GraphBuildBackgroundService : BackgroundService +{ + private readonly IGraphJobRepository _repository; + private readonly GraphBuildExecutionService _executionService; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly ILogger<GraphBuildBackgroundService> _logger; + + public GraphBuildBackgroundService( + IGraphJobRepository repository, + GraphBuildExecutionService executionService, + IOptions<SchedulerWorkerOptions> options, + ILogger<GraphBuildBackgroundService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _executionService = executionService ?? throw new ArgumentNullException(nameof(executionService)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + _logger.LogInformation("Graph build worker started."); + + while (!stoppingToken.IsCancellationRequested) + { + try + { + var graphOptions = _options.Value.Graph; + if (!graphOptions.Enabled) + { + await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false); + continue; + } + + var jobs = await _repository.ListBuildJobsAsync(GraphJobStatus.Pending, graphOptions.BatchSize, stoppingToken).ConfigureAwait(false); + + if (jobs.Count == 0) + { + await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false); + continue; + } + + foreach (var job in jobs) + { + try + { + var result = await _executionService.ExecuteAsync(job, stoppingToken).ConfigureAwait(false); + LogResult(result); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Unhandled exception while processing graph build job {JobId}.", job.Id); + } + } + + await DelayAsync(graphOptions.PollInterval, stoppingToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Graph build worker encountered an error; backing off."); + await DelayAsync(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false); + } + } + + _logger.LogInformation("Graph build worker stopping."); + } + + private async Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) + { + if (delay <= TimeSpan.Zero) + { + return; + } + + try + { + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + catch (TaskCanceledException) + { + } + } + + private void LogResult(GraphBuildExecutionResult result) + { + switch (result.Type) + { + case GraphBuildExecutionResultType.Completed: + _logger.LogInformation( + "Graph build job {JobId} completed (tenant={TenantId}).", + result.Job.Id, + result.Job.TenantId); + break; + case GraphBuildExecutionResultType.Failed: + _logger.LogWarning( + "Graph build job {JobId} failed (tenant={TenantId}): {Reason}.", + result.Job.Id, + result.Job.TenantId, + result.Reason ?? "unknown error"); + break; + case GraphBuildExecutionResultType.Skipped: + _logger.LogDebug( + "Graph build job {JobId} skipped: {Reason}.", + result.Job.Id, + result.Reason ?? "no reason"); + break; + } + } +} diff --git a/src/StellaOps.Scheduler.Worker/Graph/GraphBuildExecutionService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphBuildExecutionService.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Graph/GraphBuildExecutionService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphBuildExecutionService.cs index efb1aabf..3b1ccdf3 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/GraphBuildExecutionService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphBuildExecutionService.cs @@ -1,227 +1,227 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Worker.Graph.Cartographer; -using StellaOps.Scheduler.Worker.Graph.Scheduler; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Graph; - -internal sealed class GraphBuildExecutionService -{ - private readonly IGraphJobRepository _repository; - private readonly ICartographerBuildClient _cartographerClient; - private readonly IGraphJobCompletionClient _completionClient; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly SchedulerWorkerMetrics _metrics; - private readonly TimeProvider _timeProvider; - private readonly ILogger<GraphBuildExecutionService> _logger; - - public GraphBuildExecutionService( - IGraphJobRepository repository, - ICartographerBuildClient cartographerClient, - IGraphJobCompletionClient completionClient, - IOptions<SchedulerWorkerOptions> options, - SchedulerWorkerMetrics metrics, - TimeProvider? timeProvider, - ILogger<GraphBuildExecutionService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _cartographerClient = cartographerClient ?? throw new ArgumentNullException(nameof(cartographerClient)); - _completionClient = completionClient ?? throw new ArgumentNullException(nameof(completionClient)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); - _timeProvider = timeProvider ?? TimeProvider.System; - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<GraphBuildExecutionResult> ExecuteAsync(GraphBuildJob job, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(job); - - var graphOptions = _options.Value.Graph; - if (!graphOptions.Enabled) - { - _metrics.RecordGraphJobResult("build", "skipped"); - return GraphBuildExecutionResult.Skipped(job, "graph_processing_disabled"); - } - - if (job.Status != GraphJobStatus.Pending) - { - _metrics.RecordGraphJobResult("build", "skipped"); - return GraphBuildExecutionResult.Skipped(job, "status_not_pending"); - } - - var now = _timeProvider.GetUtcNow(); - GraphBuildJob running; - - try - { - running = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Running, now, attempts: job.Attempts + 1); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to transition graph job {JobId} to running state.", job.Id); - _metrics.RecordGraphJobResult("build", "skipped"); - return GraphBuildExecutionResult.Skipped(job, "transition_invalid"); - } - - if (!await _repository.TryReplaceAsync(running, job.Status, cancellationToken).ConfigureAwait(false)) - { - _metrics.RecordGraphJobResult("build", "skipped"); - return GraphBuildExecutionResult.Skipped(job, "concurrency_conflict"); - } - - var attempt = 0; - CartographerBuildResult? lastResult = null; - Exception? lastException = null; - var backoff = graphOptions.RetryBackoff; - - while (attempt < graphOptions.MaxAttempts) - { - cancellationToken.ThrowIfCancellationRequested(); - attempt++; - - try - { - var response = await _cartographerClient.StartBuildAsync(running, cancellationToken).ConfigureAwait(false); - lastResult = response; - - if (!string.IsNullOrWhiteSpace(response.CartographerJobId) && response.CartographerJobId != running.CartographerJobId) - { - var updated = running with { CartographerJobId = response.CartographerJobId }; - if (await _repository.TryReplaceAsync(updated, GraphJobStatus.Running, cancellationToken).ConfigureAwait(false)) - { - running = updated; - } - } - - if (!string.IsNullOrWhiteSpace(response.GraphSnapshotId) && response.GraphSnapshotId != running.GraphSnapshotId) - { - var updated = running with { GraphSnapshotId = response.GraphSnapshotId }; - if (await _repository.TryReplaceAsync(updated, GraphJobStatus.Running, cancellationToken).ConfigureAwait(false)) - { - running = updated; - } - } - - if (response.Status == GraphJobStatus.Completed || response.Status == GraphJobStatus.Cancelled || response.Status == GraphJobStatus.Running) - { - var completionTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Completed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("build", "completed", completionTime - running.CreatedAt); - return GraphBuildExecutionResult.Completed(running, response.ResultUri); - } - - if (response.Status == GraphJobStatus.Failed) - { - if (attempt >= graphOptions.MaxAttempts) - { - var completionTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("build", "failed", completionTime - running.CreatedAt); - return GraphBuildExecutionResult.Failed(running, response.Error); - } - - _logger.LogWarning( - "Cartographer build attempt {Attempt} failed for job {JobId}; retrying in {Delay} (reason: {Reason}).", - attempt, - job.Id, - backoff, - response.Error ?? "unknown"); - - await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); - continue; - } - - // If Cartographer reports pending/queued we wait and retry. - if (attempt >= graphOptions.MaxAttempts) - { - var completionTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error ?? "Cartographer did not complete the build.", cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("build", "failed", completionTime - running.CreatedAt); - return GraphBuildExecutionResult.Failed(running, response.Error); - } - - await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (!cancellationToken.IsCancellationRequested) - { - lastException = ex; - - if (attempt >= graphOptions.MaxAttempts) - { - var completionTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, running.GraphSnapshotId, null, ex.Message, cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("build", "failed", completionTime - running.CreatedAt); - return GraphBuildExecutionResult.Failed(running, ex.Message); - } - - _logger.LogWarning(ex, "Cartographer build attempt {Attempt} failed for job {JobId}; retrying in {Delay}.", attempt, job.Id, backoff); - await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); - } - } - - var error = lastResult?.Error ?? lastException?.Message ?? "Cartographer build failed"; - var finalTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Failed, finalTime, lastResult?.GraphSnapshotId ?? running.GraphSnapshotId, lastResult?.ResultUri, error, cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("build", "failed", finalTime - running.CreatedAt); - return GraphBuildExecutionResult.Failed(running, error); - } - - private async Task NotifyCompletionAsync( - GraphBuildJob job, - GraphJobStatus status, - DateTimeOffset occurredAt, - string? graphSnapshotId, - string? resultUri, - string? error, - CancellationToken cancellationToken) - { - var dto = new GraphJobCompletionRequestDto( - job.Id, - "Build", - status, - occurredAt, - graphSnapshotId ?? job.GraphSnapshotId, - resultUri, - job.CorrelationId, - status == GraphJobStatus.Failed ? (error ?? "Cartographer build failed.") : null); - - try - { - await _completionClient.NotifyAsync(dto, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (!cancellationToken.IsCancellationRequested) - { - _logger.LogError(ex, "Failed notifying Scheduler completion for graph job {JobId}.", job.Id); - } - } -} - -internal enum GraphBuildExecutionResultType -{ - Completed, - Failed, - Skipped -} - -internal readonly record struct GraphBuildExecutionResult( - GraphBuildExecutionResultType Type, - GraphBuildJob Job, - string? Reason = null, - string? ResultUri = null) -{ - public static GraphBuildExecutionResult Completed(GraphBuildJob job, string? resultUri) - => new(GraphBuildExecutionResultType.Completed, job, ResultUri: resultUri); - - public static GraphBuildExecutionResult Failed(GraphBuildJob job, string? error) - => new(GraphBuildExecutionResultType.Failed, job, error); - - public static GraphBuildExecutionResult Skipped(GraphBuildJob job, string reason) - => new(GraphBuildExecutionResultType.Skipped, job, reason); -} +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Worker.Graph.Cartographer; +using StellaOps.Scheduler.Worker.Graph.Scheduler; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Graph; + +internal sealed class GraphBuildExecutionService +{ + private readonly IGraphJobRepository _repository; + private readonly ICartographerBuildClient _cartographerClient; + private readonly IGraphJobCompletionClient _completionClient; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly SchedulerWorkerMetrics _metrics; + private readonly TimeProvider _timeProvider; + private readonly ILogger<GraphBuildExecutionService> _logger; + + public GraphBuildExecutionService( + IGraphJobRepository repository, + ICartographerBuildClient cartographerClient, + IGraphJobCompletionClient completionClient, + IOptions<SchedulerWorkerOptions> options, + SchedulerWorkerMetrics metrics, + TimeProvider? timeProvider, + ILogger<GraphBuildExecutionService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _cartographerClient = cartographerClient ?? throw new ArgumentNullException(nameof(cartographerClient)); + _completionClient = completionClient ?? throw new ArgumentNullException(nameof(completionClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<GraphBuildExecutionResult> ExecuteAsync(GraphBuildJob job, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(job); + + var graphOptions = _options.Value.Graph; + if (!graphOptions.Enabled) + { + _metrics.RecordGraphJobResult("build", "skipped"); + return GraphBuildExecutionResult.Skipped(job, "graph_processing_disabled"); + } + + if (job.Status != GraphJobStatus.Pending) + { + _metrics.RecordGraphJobResult("build", "skipped"); + return GraphBuildExecutionResult.Skipped(job, "status_not_pending"); + } + + var now = _timeProvider.GetUtcNow(); + GraphBuildJob running; + + try + { + running = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Running, now, attempts: job.Attempts + 1); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to transition graph job {JobId} to running state.", job.Id); + _metrics.RecordGraphJobResult("build", "skipped"); + return GraphBuildExecutionResult.Skipped(job, "transition_invalid"); + } + + if (!await _repository.TryReplaceAsync(running, job.Status, cancellationToken).ConfigureAwait(false)) + { + _metrics.RecordGraphJobResult("build", "skipped"); + return GraphBuildExecutionResult.Skipped(job, "concurrency_conflict"); + } + + var attempt = 0; + CartographerBuildResult? lastResult = null; + Exception? lastException = null; + var backoff = graphOptions.RetryBackoff; + + while (attempt < graphOptions.MaxAttempts) + { + cancellationToken.ThrowIfCancellationRequested(); + attempt++; + + try + { + var response = await _cartographerClient.StartBuildAsync(running, cancellationToken).ConfigureAwait(false); + lastResult = response; + + if (!string.IsNullOrWhiteSpace(response.CartographerJobId) && response.CartographerJobId != running.CartographerJobId) + { + var updated = running with { CartographerJobId = response.CartographerJobId }; + if (await _repository.TryReplaceAsync(updated, GraphJobStatus.Running, cancellationToken).ConfigureAwait(false)) + { + running = updated; + } + } + + if (!string.IsNullOrWhiteSpace(response.GraphSnapshotId) && response.GraphSnapshotId != running.GraphSnapshotId) + { + var updated = running with { GraphSnapshotId = response.GraphSnapshotId }; + if (await _repository.TryReplaceAsync(updated, GraphJobStatus.Running, cancellationToken).ConfigureAwait(false)) + { + running = updated; + } + } + + if (response.Status == GraphJobStatus.Completed || response.Status == GraphJobStatus.Cancelled || response.Status == GraphJobStatus.Running) + { + var completionTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Completed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("build", "completed", completionTime - running.CreatedAt); + return GraphBuildExecutionResult.Completed(running, response.ResultUri); + } + + if (response.Status == GraphJobStatus.Failed) + { + if (attempt >= graphOptions.MaxAttempts) + { + var completionTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("build", "failed", completionTime - running.CreatedAt); + return GraphBuildExecutionResult.Failed(running, response.Error); + } + + _logger.LogWarning( + "Cartographer build attempt {Attempt} failed for job {JobId}; retrying in {Delay} (reason: {Reason}).", + attempt, + job.Id, + backoff, + response.Error ?? "unknown"); + + await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); + continue; + } + + // If Cartographer reports pending/queued we wait and retry. + if (attempt >= graphOptions.MaxAttempts) + { + var completionTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId, response.ResultUri, response.Error ?? "Cartographer did not complete the build.", cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("build", "failed", completionTime - running.CreatedAt); + return GraphBuildExecutionResult.Failed(running, response.Error); + } + + await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (!cancellationToken.IsCancellationRequested) + { + lastException = ex; + + if (attempt >= graphOptions.MaxAttempts) + { + var completionTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, running.GraphSnapshotId, null, ex.Message, cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("build", "failed", completionTime - running.CreatedAt); + return GraphBuildExecutionResult.Failed(running, ex.Message); + } + + _logger.LogWarning(ex, "Cartographer build attempt {Attempt} failed for job {JobId}; retrying in {Delay}.", attempt, job.Id, backoff); + await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); + } + } + + var error = lastResult?.Error ?? lastException?.Message ?? "Cartographer build failed"; + var finalTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Failed, finalTime, lastResult?.GraphSnapshotId ?? running.GraphSnapshotId, lastResult?.ResultUri, error, cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("build", "failed", finalTime - running.CreatedAt); + return GraphBuildExecutionResult.Failed(running, error); + } + + private async Task NotifyCompletionAsync( + GraphBuildJob job, + GraphJobStatus status, + DateTimeOffset occurredAt, + string? graphSnapshotId, + string? resultUri, + string? error, + CancellationToken cancellationToken) + { + var dto = new GraphJobCompletionRequestDto( + job.Id, + "Build", + status, + occurredAt, + graphSnapshotId ?? job.GraphSnapshotId, + resultUri, + job.CorrelationId, + status == GraphJobStatus.Failed ? (error ?? "Cartographer build failed.") : null); + + try + { + await _completionClient.NotifyAsync(dto, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (!cancellationToken.IsCancellationRequested) + { + _logger.LogError(ex, "Failed notifying Scheduler completion for graph job {JobId}.", job.Id); + } + } +} + +internal enum GraphBuildExecutionResultType +{ + Completed, + Failed, + Skipped +} + +internal readonly record struct GraphBuildExecutionResult( + GraphBuildExecutionResultType Type, + GraphBuildJob Job, + string? Reason = null, + string? ResultUri = null) +{ + public static GraphBuildExecutionResult Completed(GraphBuildJob job, string? resultUri) + => new(GraphBuildExecutionResultType.Completed, job, ResultUri: resultUri); + + public static GraphBuildExecutionResult Failed(GraphBuildJob job, string? error) + => new(GraphBuildExecutionResultType.Failed, job, error); + + public static GraphBuildExecutionResult Skipped(GraphBuildJob job, string reason) + => new(GraphBuildExecutionResultType.Skipped, job, reason); +} diff --git a/src/StellaOps.Scheduler.Worker/Graph/GraphOverlayBackgroundService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphOverlayBackgroundService.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Graph/GraphOverlayBackgroundService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphOverlayBackgroundService.cs index fae8e666..53f1c3fc 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/GraphOverlayBackgroundService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphOverlayBackgroundService.cs @@ -1,128 +1,128 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Graph; - -internal sealed class GraphOverlayBackgroundService : BackgroundService -{ - private readonly IGraphJobRepository _repository; - private readonly GraphOverlayExecutionService _executionService; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly ILogger<GraphOverlayBackgroundService> _logger; - - public GraphOverlayBackgroundService( - IGraphJobRepository repository, - GraphOverlayExecutionService executionService, - IOptions<SchedulerWorkerOptions> options, - ILogger<GraphOverlayBackgroundService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _executionService = executionService ?? throw new ArgumentNullException(nameof(executionService)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - _logger.LogInformation("Graph overlay worker started."); - - while (!stoppingToken.IsCancellationRequested) - { - try - { - var graphOptions = _options.Value.Graph; - if (!graphOptions.Enabled) - { - await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false); - continue; - } - - var jobs = await _repository.ListOverlayJobsAsync(GraphJobStatus.Pending, graphOptions.BatchSize, stoppingToken).ConfigureAwait(false); - if (jobs.Count == 0) - { - await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false); - continue; - } - - foreach (var job in jobs) - { - try - { - var result = await _executionService.ExecuteAsync(job, stoppingToken).ConfigureAwait(false); - LogResult(result); - } - catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) - { - break; - } - catch (Exception ex) - { - _logger.LogError(ex, "Unhandled exception while processing graph overlay job {JobId}.", job.Id); - } - } - - await DelayAsync(graphOptions.PollInterval, stoppingToken).ConfigureAwait(false); - } - catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) - { - break; - } - catch (Exception ex) - { - _logger.LogError(ex, "Graph overlay worker encountered an error; backing off."); - await DelayAsync(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false); - } - } - - _logger.LogInformation("Graph overlay worker stopping."); - } - - private async Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) - { - if (delay <= TimeSpan.Zero) - { - return; - } - - try - { - await Task.Delay(delay, cancellationToken).ConfigureAwait(false); - } - catch (TaskCanceledException) - { - } - } - - private void LogResult(GraphOverlayExecutionResult result) - { - switch (result.Type) - { - case GraphOverlayExecutionResultType.Completed: - _logger.LogInformation( - "Graph overlay job {JobId} completed (tenant={TenantId}).", - result.Job.Id, - result.Job.TenantId); - break; - case GraphOverlayExecutionResultType.Failed: - _logger.LogWarning( - "Graph overlay job {JobId} failed (tenant={TenantId}): {Reason}.", - result.Job.Id, - result.Job.TenantId, - result.Reason ?? "unknown error"); - break; - case GraphOverlayExecutionResultType.Skipped: - _logger.LogDebug( - "Graph overlay job {JobId} skipped: {Reason}.", - result.Job.Id, - result.Reason ?? "no reason"); - break; - } - } -} +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Graph; + +internal sealed class GraphOverlayBackgroundService : BackgroundService +{ + private readonly IGraphJobRepository _repository; + private readonly GraphOverlayExecutionService _executionService; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly ILogger<GraphOverlayBackgroundService> _logger; + + public GraphOverlayBackgroundService( + IGraphJobRepository repository, + GraphOverlayExecutionService executionService, + IOptions<SchedulerWorkerOptions> options, + ILogger<GraphOverlayBackgroundService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _executionService = executionService ?? throw new ArgumentNullException(nameof(executionService)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + _logger.LogInformation("Graph overlay worker started."); + + while (!stoppingToken.IsCancellationRequested) + { + try + { + var graphOptions = _options.Value.Graph; + if (!graphOptions.Enabled) + { + await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false); + continue; + } + + var jobs = await _repository.ListOverlayJobsAsync(GraphJobStatus.Pending, graphOptions.BatchSize, stoppingToken).ConfigureAwait(false); + if (jobs.Count == 0) + { + await DelayAsync(graphOptions.IdleDelay, stoppingToken).ConfigureAwait(false); + continue; + } + + foreach (var job in jobs) + { + try + { + var result = await _executionService.ExecuteAsync(job, stoppingToken).ConfigureAwait(false); + LogResult(result); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Unhandled exception while processing graph overlay job {JobId}.", job.Id); + } + } + + await DelayAsync(graphOptions.PollInterval, stoppingToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (stoppingToken.IsCancellationRequested) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Graph overlay worker encountered an error; backing off."); + await DelayAsync(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false); + } + } + + _logger.LogInformation("Graph overlay worker stopping."); + } + + private async Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) + { + if (delay <= TimeSpan.Zero) + { + return; + } + + try + { + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + catch (TaskCanceledException) + { + } + } + + private void LogResult(GraphOverlayExecutionResult result) + { + switch (result.Type) + { + case GraphOverlayExecutionResultType.Completed: + _logger.LogInformation( + "Graph overlay job {JobId} completed (tenant={TenantId}).", + result.Job.Id, + result.Job.TenantId); + break; + case GraphOverlayExecutionResultType.Failed: + _logger.LogWarning( + "Graph overlay job {JobId} failed (tenant={TenantId}): {Reason}.", + result.Job.Id, + result.Job.TenantId, + result.Reason ?? "unknown error"); + break; + case GraphOverlayExecutionResultType.Skipped: + _logger.LogDebug( + "Graph overlay job {JobId} skipped: {Reason}.", + result.Job.Id, + result.Reason ?? "no reason"); + break; + } + } +} diff --git a/src/StellaOps.Scheduler.Worker/Graph/GraphOverlayExecutionService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphOverlayExecutionService.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Graph/GraphOverlayExecutionService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphOverlayExecutionService.cs index 298fda90..72a5f141 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/GraphOverlayExecutionService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/GraphOverlayExecutionService.cs @@ -1,208 +1,208 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Worker.Graph.Cartographer; -using StellaOps.Scheduler.Worker.Graph.Scheduler; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Graph; - -internal sealed class GraphOverlayExecutionService -{ - private readonly IGraphJobRepository _repository; - private readonly ICartographerOverlayClient _overlayClient; - private readonly IGraphJobCompletionClient _completionClient; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly SchedulerWorkerMetrics _metrics; - private readonly TimeProvider _timeProvider; - private readonly ILogger<GraphOverlayExecutionService> _logger; - - public GraphOverlayExecutionService( - IGraphJobRepository repository, - ICartographerOverlayClient overlayClient, - IGraphJobCompletionClient completionClient, - IOptions<SchedulerWorkerOptions> options, - SchedulerWorkerMetrics metrics, - TimeProvider? timeProvider, - ILogger<GraphOverlayExecutionService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _overlayClient = overlayClient ?? throw new ArgumentNullException(nameof(overlayClient)); - _completionClient = completionClient ?? throw new ArgumentNullException(nameof(completionClient)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); - _timeProvider = timeProvider ?? TimeProvider.System; - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<GraphOverlayExecutionResult> ExecuteAsync(GraphOverlayJob job, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(job); - - var graphOptions = _options.Value.Graph; - if (!graphOptions.Enabled) - { - _metrics.RecordGraphJobResult("overlay", "skipped"); - return GraphOverlayExecutionResult.Skipped(job, "graph_processing_disabled"); - } - - if (job.Status != GraphJobStatus.Pending) - { - _metrics.RecordGraphJobResult("overlay", "skipped"); - return GraphOverlayExecutionResult.Skipped(job, "status_not_pending"); - } - - var now = _timeProvider.GetUtcNow(); - GraphOverlayJob running; - - try - { - running = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Running, now, attempts: job.Attempts + 1); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Failed to transition graph overlay job {JobId} to running state.", job.Id); - _metrics.RecordGraphJobResult("overlay", "skipped"); - return GraphOverlayExecutionResult.Skipped(job, "transition_invalid"); - } - - if (!await _repository.TryReplaceOverlayAsync(running, job.Status, cancellationToken).ConfigureAwait(false)) - { - _metrics.RecordGraphJobResult("overlay", "skipped"); - return GraphOverlayExecutionResult.Skipped(job, "concurrency_conflict"); - } - - var attempt = 0; - CartographerOverlayResult? lastResult = null; - Exception? lastException = null; - var backoff = graphOptions.RetryBackoff; - - while (attempt < graphOptions.MaxAttempts) - { - cancellationToken.ThrowIfCancellationRequested(); - attempt++; - - try - { - var response = await _overlayClient.StartOverlayAsync(running, cancellationToken).ConfigureAwait(false); - lastResult = response; - - if (response.Status == GraphJobStatus.Completed || response.Status == GraphJobStatus.Cancelled || response.Status == GraphJobStatus.Running) - { - var completionTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Completed, completionTime, response.GraphSnapshotId ?? running.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("overlay", "completed", completionTime - running.CreatedAt); - return GraphOverlayExecutionResult.Completed(running, response.ResultUri); - } - - if (response.Status == GraphJobStatus.Failed) - { - if (attempt >= graphOptions.MaxAttempts) - { - var completionTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId ?? running.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("overlay", "failed", completionTime - running.CreatedAt); - return GraphOverlayExecutionResult.Failed(running, response.Error); - } - - _logger.LogWarning( - "Cartographer overlay attempt {Attempt} failed for job {JobId}; retrying in {Delay} (reason: {Reason}).", - attempt, - job.Id, - backoff, - response.Error ?? "unknown"); - - await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); - continue; - } - - if (attempt >= graphOptions.MaxAttempts) - { - var completionTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId ?? running.GraphSnapshotId, response.ResultUri, response.Error ?? "Cartographer did not complete the overlay.", cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("overlay", "failed", completionTime - running.CreatedAt); - return GraphOverlayExecutionResult.Failed(running, response.Error); - } - - await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (!cancellationToken.IsCancellationRequested) - { - lastException = ex; - - if (attempt >= graphOptions.MaxAttempts) - { - var completionTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, running.GraphSnapshotId, null, ex.Message, cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("overlay", "failed", completionTime - running.CreatedAt); - return GraphOverlayExecutionResult.Failed(running, ex.Message); - } - - _logger.LogWarning(ex, "Cartographer overlay attempt {Attempt} failed for job {JobId}; retrying in {Delay}.", attempt, job.Id, backoff); - await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); - } - } - - var error = lastResult?.Error ?? lastException?.Message ?? "Cartographer overlay failed"; - var finalTime = _timeProvider.GetUtcNow(); - await NotifyCompletionAsync(running, GraphJobStatus.Failed, finalTime, lastResult?.GraphSnapshotId ?? running.GraphSnapshotId, lastResult?.ResultUri, error, cancellationToken).ConfigureAwait(false); - _metrics.RecordGraphJobResult("overlay", "failed", finalTime - running.CreatedAt); - return GraphOverlayExecutionResult.Failed(running, error); - } - - private async Task NotifyCompletionAsync( - GraphOverlayJob job, - GraphJobStatus status, - DateTimeOffset occurredAt, - string? graphSnapshotId, - string? resultUri, - string? error, - CancellationToken cancellationToken) - { - var dto = new GraphJobCompletionRequestDto( - job.Id, - "Overlay", - status, - occurredAt, - graphSnapshotId ?? job.GraphSnapshotId, - resultUri, - job.CorrelationId, - status == GraphJobStatus.Failed ? (error ?? "Cartographer overlay failed.") : null); - - try - { - await _completionClient.NotifyAsync(dto, cancellationToken).ConfigureAwait(false); - } - catch (Exception ex) when (!cancellationToken.IsCancellationRequested) - { - _logger.LogError(ex, "Failed notifying Scheduler completion for graph overlay job {JobId}.", job.Id); - } - } -} - -internal enum GraphOverlayExecutionResultType -{ - Completed, - Failed, - Skipped -} - -internal readonly record struct GraphOverlayExecutionResult( - GraphOverlayExecutionResultType Type, - GraphOverlayJob Job, - string? Reason = null, - string? ResultUri = null) -{ - public static GraphOverlayExecutionResult Completed(GraphOverlayJob job, string? resultUri) - => new(GraphOverlayExecutionResultType.Completed, job, ResultUri: resultUri); - - public static GraphOverlayExecutionResult Failed(GraphOverlayJob job, string? error) - => new(GraphOverlayExecutionResultType.Failed, job, error); - - public static GraphOverlayExecutionResult Skipped(GraphOverlayJob job, string reason) - => new(GraphOverlayExecutionResultType.Skipped, job, reason); -} +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Worker.Graph.Cartographer; +using StellaOps.Scheduler.Worker.Graph.Scheduler; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Graph; + +internal sealed class GraphOverlayExecutionService +{ + private readonly IGraphJobRepository _repository; + private readonly ICartographerOverlayClient _overlayClient; + private readonly IGraphJobCompletionClient _completionClient; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly SchedulerWorkerMetrics _metrics; + private readonly TimeProvider _timeProvider; + private readonly ILogger<GraphOverlayExecutionService> _logger; + + public GraphOverlayExecutionService( + IGraphJobRepository repository, + ICartographerOverlayClient overlayClient, + IGraphJobCompletionClient completionClient, + IOptions<SchedulerWorkerOptions> options, + SchedulerWorkerMetrics metrics, + TimeProvider? timeProvider, + ILogger<GraphOverlayExecutionService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _overlayClient = overlayClient ?? throw new ArgumentNullException(nameof(overlayClient)); + _completionClient = completionClient ?? throw new ArgumentNullException(nameof(completionClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<GraphOverlayExecutionResult> ExecuteAsync(GraphOverlayJob job, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(job); + + var graphOptions = _options.Value.Graph; + if (!graphOptions.Enabled) + { + _metrics.RecordGraphJobResult("overlay", "skipped"); + return GraphOverlayExecutionResult.Skipped(job, "graph_processing_disabled"); + } + + if (job.Status != GraphJobStatus.Pending) + { + _metrics.RecordGraphJobResult("overlay", "skipped"); + return GraphOverlayExecutionResult.Skipped(job, "status_not_pending"); + } + + var now = _timeProvider.GetUtcNow(); + GraphOverlayJob running; + + try + { + running = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Running, now, attempts: job.Attempts + 1); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Failed to transition graph overlay job {JobId} to running state.", job.Id); + _metrics.RecordGraphJobResult("overlay", "skipped"); + return GraphOverlayExecutionResult.Skipped(job, "transition_invalid"); + } + + if (!await _repository.TryReplaceOverlayAsync(running, job.Status, cancellationToken).ConfigureAwait(false)) + { + _metrics.RecordGraphJobResult("overlay", "skipped"); + return GraphOverlayExecutionResult.Skipped(job, "concurrency_conflict"); + } + + var attempt = 0; + CartographerOverlayResult? lastResult = null; + Exception? lastException = null; + var backoff = graphOptions.RetryBackoff; + + while (attempt < graphOptions.MaxAttempts) + { + cancellationToken.ThrowIfCancellationRequested(); + attempt++; + + try + { + var response = await _overlayClient.StartOverlayAsync(running, cancellationToken).ConfigureAwait(false); + lastResult = response; + + if (response.Status == GraphJobStatus.Completed || response.Status == GraphJobStatus.Cancelled || response.Status == GraphJobStatus.Running) + { + var completionTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Completed, completionTime, response.GraphSnapshotId ?? running.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("overlay", "completed", completionTime - running.CreatedAt); + return GraphOverlayExecutionResult.Completed(running, response.ResultUri); + } + + if (response.Status == GraphJobStatus.Failed) + { + if (attempt >= graphOptions.MaxAttempts) + { + var completionTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId ?? running.GraphSnapshotId, response.ResultUri, response.Error, cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("overlay", "failed", completionTime - running.CreatedAt); + return GraphOverlayExecutionResult.Failed(running, response.Error); + } + + _logger.LogWarning( + "Cartographer overlay attempt {Attempt} failed for job {JobId}; retrying in {Delay} (reason: {Reason}).", + attempt, + job.Id, + backoff, + response.Error ?? "unknown"); + + await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); + continue; + } + + if (attempt >= graphOptions.MaxAttempts) + { + var completionTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, response.GraphSnapshotId ?? running.GraphSnapshotId, response.ResultUri, response.Error ?? "Cartographer did not complete the overlay.", cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("overlay", "failed", completionTime - running.CreatedAt); + return GraphOverlayExecutionResult.Failed(running, response.Error); + } + + await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (!cancellationToken.IsCancellationRequested) + { + lastException = ex; + + if (attempt >= graphOptions.MaxAttempts) + { + var completionTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Failed, completionTime, running.GraphSnapshotId, null, ex.Message, cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("overlay", "failed", completionTime - running.CreatedAt); + return GraphOverlayExecutionResult.Failed(running, ex.Message); + } + + _logger.LogWarning(ex, "Cartographer overlay attempt {Attempt} failed for job {JobId}; retrying in {Delay}.", attempt, job.Id, backoff); + await Task.Delay(backoff, cancellationToken).ConfigureAwait(false); + } + } + + var error = lastResult?.Error ?? lastException?.Message ?? "Cartographer overlay failed"; + var finalTime = _timeProvider.GetUtcNow(); + await NotifyCompletionAsync(running, GraphJobStatus.Failed, finalTime, lastResult?.GraphSnapshotId ?? running.GraphSnapshotId, lastResult?.ResultUri, error, cancellationToken).ConfigureAwait(false); + _metrics.RecordGraphJobResult("overlay", "failed", finalTime - running.CreatedAt); + return GraphOverlayExecutionResult.Failed(running, error); + } + + private async Task NotifyCompletionAsync( + GraphOverlayJob job, + GraphJobStatus status, + DateTimeOffset occurredAt, + string? graphSnapshotId, + string? resultUri, + string? error, + CancellationToken cancellationToken) + { + var dto = new GraphJobCompletionRequestDto( + job.Id, + "Overlay", + status, + occurredAt, + graphSnapshotId ?? job.GraphSnapshotId, + resultUri, + job.CorrelationId, + status == GraphJobStatus.Failed ? (error ?? "Cartographer overlay failed.") : null); + + try + { + await _completionClient.NotifyAsync(dto, cancellationToken).ConfigureAwait(false); + } + catch (Exception ex) when (!cancellationToken.IsCancellationRequested) + { + _logger.LogError(ex, "Failed notifying Scheduler completion for graph overlay job {JobId}.", job.Id); + } + } +} + +internal enum GraphOverlayExecutionResultType +{ + Completed, + Failed, + Skipped +} + +internal readonly record struct GraphOverlayExecutionResult( + GraphOverlayExecutionResultType Type, + GraphOverlayJob Job, + string? Reason = null, + string? ResultUri = null) +{ + public static GraphOverlayExecutionResult Completed(GraphOverlayJob job, string? resultUri) + => new(GraphOverlayExecutionResultType.Completed, job, ResultUri: resultUri); + + public static GraphOverlayExecutionResult Failed(GraphOverlayJob job, string? error) + => new(GraphOverlayExecutionResultType.Failed, job, error); + + public static GraphOverlayExecutionResult Skipped(GraphOverlayJob job, string reason) + => new(GraphOverlayExecutionResultType.Skipped, job, reason); +} diff --git a/src/StellaOps.Scheduler.Worker/Graph/Scheduler/HttpGraphJobCompletionClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Scheduler/HttpGraphJobCompletionClient.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Graph/Scheduler/HttpGraphJobCompletionClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Scheduler/HttpGraphJobCompletionClient.cs index fab7f497..eefe9d5b 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/Scheduler/HttpGraphJobCompletionClient.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Scheduler/HttpGraphJobCompletionClient.cs @@ -1,99 +1,99 @@ -using System; -using System.Net.Http; -using System.Net.Http.Json; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Graph.Scheduler; - -internal sealed class HttpGraphJobCompletionClient : IGraphJobCompletionClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - private readonly HttpClient _httpClient; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly ILogger<HttpGraphJobCompletionClient> _logger; - - public HttpGraphJobCompletionClient( - HttpClient httpClient, - IOptions<SchedulerWorkerOptions> options, - ILogger<HttpGraphJobCompletionClient> logger) - { - _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task NotifyAsync(GraphJobCompletionRequestDto request, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - - var graphOptions = _options.Value.Graph; - var api = graphOptions.SchedulerApi; - - if (api.BaseAddress is null) - { - throw new InvalidOperationException("Scheduler API base address must be configured before notifying graph job completion."); - } - - if (_httpClient.BaseAddress != api.BaseAddress) - { - _httpClient.BaseAddress = api.BaseAddress; - } - - using var message = new HttpRequestMessage(HttpMethod.Post, api.CompletionPath) - { - Content = JsonContent.Create(new SchedulerCompletionRequest(request), options: SerializerOptions) - }; - - if (!string.IsNullOrWhiteSpace(api.ApiKeyHeader) && !string.IsNullOrWhiteSpace(api.ApiKey)) - { - message.Headers.TryAddWithoutValidation(api.ApiKeyHeader!, api.ApiKey); - } - - using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - _logger.LogWarning( - "Scheduler API returned status {StatusCode} while completing graph job {JobId}: {Body}", - (int)response.StatusCode, - request.JobId, - body); - } - } - - private sealed record SchedulerCompletionRequest( - string JobId, - string JobType, - StellaOps.Scheduler.Models.GraphJobStatus Status, - DateTimeOffset OccurredAt, - string? GraphSnapshotId, - string? ResultUri, - string? CorrelationId, - string? Error) - { - public SchedulerCompletionRequest(GraphJobCompletionRequestDto dto) - : this( - dto.JobId, - dto.JobType, - dto.Status, - dto.OccurredAt, - dto.GraphSnapshotId, - dto.ResultUri, - dto.CorrelationId, - dto.Error) - { - } - } -} +using System; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Graph.Scheduler; + +internal sealed class HttpGraphJobCompletionClient : IGraphJobCompletionClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + private readonly HttpClient _httpClient; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly ILogger<HttpGraphJobCompletionClient> _logger; + + public HttpGraphJobCompletionClient( + HttpClient httpClient, + IOptions<SchedulerWorkerOptions> options, + ILogger<HttpGraphJobCompletionClient> logger) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task NotifyAsync(GraphJobCompletionRequestDto request, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + + var graphOptions = _options.Value.Graph; + var api = graphOptions.SchedulerApi; + + if (api.BaseAddress is null) + { + throw new InvalidOperationException("Scheduler API base address must be configured before notifying graph job completion."); + } + + if (_httpClient.BaseAddress != api.BaseAddress) + { + _httpClient.BaseAddress = api.BaseAddress; + } + + using var message = new HttpRequestMessage(HttpMethod.Post, api.CompletionPath) + { + Content = JsonContent.Create(new SchedulerCompletionRequest(request), options: SerializerOptions) + }; + + if (!string.IsNullOrWhiteSpace(api.ApiKeyHeader) && !string.IsNullOrWhiteSpace(api.ApiKey)) + { + message.Headers.TryAddWithoutValidation(api.ApiKeyHeader!, api.ApiKey); + } + + using var response = await _httpClient.SendAsync(message, cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var body = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + _logger.LogWarning( + "Scheduler API returned status {StatusCode} while completing graph job {JobId}: {Body}", + (int)response.StatusCode, + request.JobId, + body); + } + } + + private sealed record SchedulerCompletionRequest( + string JobId, + string JobType, + StellaOps.Scheduler.Models.GraphJobStatus Status, + DateTimeOffset OccurredAt, + string? GraphSnapshotId, + string? ResultUri, + string? CorrelationId, + string? Error) + { + public SchedulerCompletionRequest(GraphJobCompletionRequestDto dto) + : this( + dto.JobId, + dto.JobType, + dto.Status, + dto.OccurredAt, + dto.GraphSnapshotId, + dto.ResultUri, + dto.CorrelationId, + dto.Error) + { + } + } +} diff --git a/src/StellaOps.Scheduler.Worker/Graph/Scheduler/IGraphJobCompletionClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Scheduler/IGraphJobCompletionClient.cs similarity index 96% rename from src/StellaOps.Scheduler.Worker/Graph/Scheduler/IGraphJobCompletionClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Scheduler/IGraphJobCompletionClient.cs index 134b9a2c..720c71a7 100644 --- a/src/StellaOps.Scheduler.Worker/Graph/Scheduler/IGraphJobCompletionClient.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Graph/Scheduler/IGraphJobCompletionClient.cs @@ -1,21 +1,21 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Worker.Graph.Scheduler; - -internal interface IGraphJobCompletionClient -{ - Task NotifyAsync(GraphJobCompletionRequestDto request, CancellationToken cancellationToken); -} - -internal sealed record GraphJobCompletionRequestDto( - string JobId, - string JobType, - StellaOps.Scheduler.Models.GraphJobStatus Status, - DateTimeOffset OccurredAt, - string? GraphSnapshotId, - string? ResultUri, - string? CorrelationId, - string? Error); +using System; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Worker.Graph.Scheduler; + +internal interface IGraphJobCompletionClient +{ + Task NotifyAsync(GraphJobCompletionRequestDto request, CancellationToken cancellationToken); +} + +internal sealed record GraphJobCompletionRequestDto( + string JobId, + string JobType, + StellaOps.Scheduler.Models.GraphJobStatus Status, + DateTimeOffset OccurredAt, + string? GraphSnapshotId, + string? ResultUri, + string? CorrelationId, + string? Error); diff --git a/src/StellaOps.Scheduler.Worker/ImpactShard.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/ImpactShard.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/ImpactShard.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/ImpactShard.cs diff --git a/src/StellaOps.Scheduler.Worker/ImpactShardPlanner.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/ImpactShardPlanner.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/ImpactShardPlanner.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/ImpactShardPlanner.cs diff --git a/src/StellaOps.Scheduler.Worker/ImpactTargetingService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/ImpactTargetingService.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/ImpactTargetingService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/ImpactTargetingService.cs diff --git a/src/StellaOps.Scheduler.Worker/Observability/SchedulerWorkerMetrics.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Observability/SchedulerWorkerMetrics.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Observability/SchedulerWorkerMetrics.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Observability/SchedulerWorkerMetrics.cs index 07e8fe69..e96abe7a 100644 --- a/src/StellaOps.Scheduler.Worker/Observability/SchedulerWorkerMetrics.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Observability/SchedulerWorkerMetrics.cs @@ -1,236 +1,236 @@ -using System; -using System.Collections.Concurrent; -using System.Collections.Generic; -using System.Diagnostics.Metrics; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Worker.Observability; - -public sealed class SchedulerWorkerMetrics : IDisposable -{ - public const string MeterName = "StellaOps.Scheduler.Worker"; - - private readonly Meter _meter; - private readonly Counter<long> _plannerRunsTotal; - private readonly Histogram<double> _plannerLatencySeconds; - private readonly Counter<long> _runnerSegmentsTotal; - private readonly Counter<long> _runnerImagesTotal; - private readonly Counter<long> _runnerDeltaCriticalTotal; - private readonly Counter<long> _runnerDeltaHighTotal; - private readonly Counter<long> _runnerDeltaFindingsTotal; - private readonly Counter<long> _runnerKevHitsTotal; - private readonly Histogram<double> _runDurationSeconds; - private readonly UpDownCounter<long> _runsActive; - private readonly Counter<long> _graphJobsTotal; - private readonly Histogram<double> _graphJobDurationSeconds; - private readonly ConcurrentDictionary<string, long> _backlog = new(StringComparer.Ordinal); - private readonly ObservableGauge<long> _backlogGauge; - private bool _disposed; - - public SchedulerWorkerMetrics() - { - _meter = new Meter(MeterName); - _plannerRunsTotal = _meter.CreateCounter<long>( - "scheduler_planner_runs_total", - unit: "count", - description: "Planner runs grouped by status and mode."); - _plannerLatencySeconds = _meter.CreateHistogram<double>( - "scheduler_planner_latency_seconds", - unit: "s", - description: "Latency between run creation and planner processing grouped by mode and status."); - _runnerSegmentsTotal = _meter.CreateCounter<long>( - "scheduler_runner_segments_total", - unit: "count", - description: "Runner segments processed grouped by status and mode."); - _runnerImagesTotal = _meter.CreateCounter<long>( - "scheduler_runner_images_total", - unit: "count", - description: "Images processed by runner grouped by mode and delta outcome."); - _runnerDeltaCriticalTotal = _meter.CreateCounter<long>( - "scheduler_runner_delta_critical_total", - unit: "count", - description: "Critical findings observed by runner grouped by mode."); - _runnerDeltaHighTotal = _meter.CreateCounter<long>( - "scheduler_runner_delta_high_total", - unit: "count", - description: "High findings observed by runner grouped by mode."); - _runnerDeltaFindingsTotal = _meter.CreateCounter<long>( - "scheduler_runner_delta_total", - unit: "count", - description: "Total findings observed by runner grouped by mode."); - _runnerKevHitsTotal = _meter.CreateCounter<long>( - "scheduler_runner_delta_kev_total", - unit: "count", - description: "KEV hits observed by runner grouped by mode."); - _runDurationSeconds = _meter.CreateHistogram<double>( - "scheduler_run_duration_seconds", - unit: "s", - description: "End-to-end run durations grouped by mode and result."); - _runsActive = _meter.CreateUpDownCounter<long>( - "scheduler_runs_active", - unit: "count", - description: "Active scheduler runs grouped by mode."); - _graphJobsTotal = _meter.CreateCounter<long>( - "scheduler_graph_jobs_total", - unit: "count", - description: "Graph jobs processed by the worker grouped by type and result."); - _graphJobDurationSeconds = _meter.CreateHistogram<double>( - "scheduler_graph_job_duration_seconds", - unit: "s", - description: "Graph job durations grouped by type and result."); - _backlogGauge = _meter.CreateObservableGauge<long>( - "scheduler_runner_backlog", - ObserveBacklog, - unit: "images", - description: "Remaining images queued for runner processing grouped by mode and schedule."); - } - - public void RecordGraphJobResult(string type, string result, TimeSpan? duration = null) - { - var tags = new[] - { - new KeyValuePair<string, object?>("type", type), - new KeyValuePair<string, object?>("result", result) - }; - - _graphJobsTotal.Add(1, tags); - - if (duration is { } jobDuration) - { - _graphJobDurationSeconds.Record(Math.Max(jobDuration.TotalSeconds, 0d), tags); - } - } - - public void RecordPlannerResult(string mode, string status, TimeSpan latency, int imageCount) - { - var tags = new[] - { - new KeyValuePair<string, object?>("mode", mode), - new KeyValuePair<string, object?>("status", status) - }; - _plannerRunsTotal.Add(1, tags); - _plannerLatencySeconds.Record(Math.Max(latency.TotalSeconds, 0d), tags); - - if (status.Equals("enqueued", StringComparison.OrdinalIgnoreCase) && imageCount > 0) - { - _runsActive.Add(1, new[] { new KeyValuePair<string, object?>("mode", mode) }); - } - } - - public void RecordRunnerSegment(string mode, string status, int processedImages, int deltaImages) - { - var tags = new[] - { - new KeyValuePair<string, object?>("mode", mode), - new KeyValuePair<string, object?>("status", status) - }; - - _runnerSegmentsTotal.Add(1, tags); - - var imageTags = new[] - { - new KeyValuePair<string, object?>("mode", mode), - new KeyValuePair<string, object?>("delta", deltaImages > 0 ? "true" : "false") - }; - _runnerImagesTotal.Add(processedImages, imageTags); - } - - public void RecordDeltaSummaries(string mode, IReadOnlyList<DeltaSummary> deltas) - { - if (deltas.Count == 0) - { - return; - } - - var tags = new[] { new KeyValuePair<string, object?>("mode", mode) }; - - foreach (var delta in deltas) - { - if (delta.NewCriticals > 0) - { - _runnerDeltaCriticalTotal.Add(delta.NewCriticals, tags); - } - - if (delta.NewHigh > 0) - { - _runnerDeltaHighTotal.Add(delta.NewHigh, tags); - } - - if (delta.NewFindings > 0) - { - _runnerDeltaFindingsTotal.Add(delta.NewFindings, tags); - } - - if (!delta.KevHits.IsDefaultOrEmpty) - { - _runnerKevHitsTotal.Add(delta.KevHits.Length, tags); - } - } - } - - public void RecordRunCompletion(string mode, string result, TimeSpan? duration, bool decrementActive = true) - { - var tags = new[] - { - new KeyValuePair<string, object?>("mode", mode), - new KeyValuePair<string, object?>("result", result) - }; - - if (duration is { } runDuration) - { - _runDurationSeconds.Record(Math.Max(runDuration.TotalSeconds, 0d), tags); - } - - if (decrementActive) - { - _runsActive.Add(-1, new[] { new KeyValuePair<string, object?>("mode", mode) }); - } - } - - public void UpdateBacklog(string mode, string? scheduleId, long backlog) - { - var key = BuildBacklogKey(mode, scheduleId); - if (backlog <= 0) - { - _backlog.TryRemove(key, out _); - } - else - { - _backlog[key] = backlog; - } - } - - private IEnumerable<Measurement<long>> ObserveBacklog() - { - foreach (var entry in _backlog) - { - var (mode, scheduleId) = SplitBacklogKey(entry.Key); - yield return new Measurement<long>( - entry.Value, - new KeyValuePair<string, object?>("mode", mode), - new KeyValuePair<string, object?>("scheduleId", scheduleId ?? string.Empty)); - } - } - - private static string BuildBacklogKey(string mode, string? scheduleId) - => $"{mode}|{scheduleId ?? string.Empty}"; - - private static (string Mode, string? ScheduleId) SplitBacklogKey(string key) - { - var parts = key.Split('|', 2); - return parts.Length == 2 - ? (parts[0], string.IsNullOrEmpty(parts[1]) ? null : parts[1]) - : (key, null); - } - - public void Dispose() - { - if (_disposed) - { - return; - } - - _meter.Dispose(); - _disposed = true; - } -} +using System; +using System.Collections.Concurrent; +using System.Collections.Generic; +using System.Diagnostics.Metrics; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Worker.Observability; + +public sealed class SchedulerWorkerMetrics : IDisposable +{ + public const string MeterName = "StellaOps.Scheduler.Worker"; + + private readonly Meter _meter; + private readonly Counter<long> _plannerRunsTotal; + private readonly Histogram<double> _plannerLatencySeconds; + private readonly Counter<long> _runnerSegmentsTotal; + private readonly Counter<long> _runnerImagesTotal; + private readonly Counter<long> _runnerDeltaCriticalTotal; + private readonly Counter<long> _runnerDeltaHighTotal; + private readonly Counter<long> _runnerDeltaFindingsTotal; + private readonly Counter<long> _runnerKevHitsTotal; + private readonly Histogram<double> _runDurationSeconds; + private readonly UpDownCounter<long> _runsActive; + private readonly Counter<long> _graphJobsTotal; + private readonly Histogram<double> _graphJobDurationSeconds; + private readonly ConcurrentDictionary<string, long> _backlog = new(StringComparer.Ordinal); + private readonly ObservableGauge<long> _backlogGauge; + private bool _disposed; + + public SchedulerWorkerMetrics() + { + _meter = new Meter(MeterName); + _plannerRunsTotal = _meter.CreateCounter<long>( + "scheduler_planner_runs_total", + unit: "count", + description: "Planner runs grouped by status and mode."); + _plannerLatencySeconds = _meter.CreateHistogram<double>( + "scheduler_planner_latency_seconds", + unit: "s", + description: "Latency between run creation and planner processing grouped by mode and status."); + _runnerSegmentsTotal = _meter.CreateCounter<long>( + "scheduler_runner_segments_total", + unit: "count", + description: "Runner segments processed grouped by status and mode."); + _runnerImagesTotal = _meter.CreateCounter<long>( + "scheduler_runner_images_total", + unit: "count", + description: "Images processed by runner grouped by mode and delta outcome."); + _runnerDeltaCriticalTotal = _meter.CreateCounter<long>( + "scheduler_runner_delta_critical_total", + unit: "count", + description: "Critical findings observed by runner grouped by mode."); + _runnerDeltaHighTotal = _meter.CreateCounter<long>( + "scheduler_runner_delta_high_total", + unit: "count", + description: "High findings observed by runner grouped by mode."); + _runnerDeltaFindingsTotal = _meter.CreateCounter<long>( + "scheduler_runner_delta_total", + unit: "count", + description: "Total findings observed by runner grouped by mode."); + _runnerKevHitsTotal = _meter.CreateCounter<long>( + "scheduler_runner_delta_kev_total", + unit: "count", + description: "KEV hits observed by runner grouped by mode."); + _runDurationSeconds = _meter.CreateHistogram<double>( + "scheduler_run_duration_seconds", + unit: "s", + description: "End-to-end run durations grouped by mode and result."); + _runsActive = _meter.CreateUpDownCounter<long>( + "scheduler_runs_active", + unit: "count", + description: "Active scheduler runs grouped by mode."); + _graphJobsTotal = _meter.CreateCounter<long>( + "scheduler_graph_jobs_total", + unit: "count", + description: "Graph jobs processed by the worker grouped by type and result."); + _graphJobDurationSeconds = _meter.CreateHistogram<double>( + "scheduler_graph_job_duration_seconds", + unit: "s", + description: "Graph job durations grouped by type and result."); + _backlogGauge = _meter.CreateObservableGauge<long>( + "scheduler_runner_backlog", + ObserveBacklog, + unit: "images", + description: "Remaining images queued for runner processing grouped by mode and schedule."); + } + + public void RecordGraphJobResult(string type, string result, TimeSpan? duration = null) + { + var tags = new[] + { + new KeyValuePair<string, object?>("type", type), + new KeyValuePair<string, object?>("result", result) + }; + + _graphJobsTotal.Add(1, tags); + + if (duration is { } jobDuration) + { + _graphJobDurationSeconds.Record(Math.Max(jobDuration.TotalSeconds, 0d), tags); + } + } + + public void RecordPlannerResult(string mode, string status, TimeSpan latency, int imageCount) + { + var tags = new[] + { + new KeyValuePair<string, object?>("mode", mode), + new KeyValuePair<string, object?>("status", status) + }; + _plannerRunsTotal.Add(1, tags); + _plannerLatencySeconds.Record(Math.Max(latency.TotalSeconds, 0d), tags); + + if (status.Equals("enqueued", StringComparison.OrdinalIgnoreCase) && imageCount > 0) + { + _runsActive.Add(1, new[] { new KeyValuePair<string, object?>("mode", mode) }); + } + } + + public void RecordRunnerSegment(string mode, string status, int processedImages, int deltaImages) + { + var tags = new[] + { + new KeyValuePair<string, object?>("mode", mode), + new KeyValuePair<string, object?>("status", status) + }; + + _runnerSegmentsTotal.Add(1, tags); + + var imageTags = new[] + { + new KeyValuePair<string, object?>("mode", mode), + new KeyValuePair<string, object?>("delta", deltaImages > 0 ? "true" : "false") + }; + _runnerImagesTotal.Add(processedImages, imageTags); + } + + public void RecordDeltaSummaries(string mode, IReadOnlyList<DeltaSummary> deltas) + { + if (deltas.Count == 0) + { + return; + } + + var tags = new[] { new KeyValuePair<string, object?>("mode", mode) }; + + foreach (var delta in deltas) + { + if (delta.NewCriticals > 0) + { + _runnerDeltaCriticalTotal.Add(delta.NewCriticals, tags); + } + + if (delta.NewHigh > 0) + { + _runnerDeltaHighTotal.Add(delta.NewHigh, tags); + } + + if (delta.NewFindings > 0) + { + _runnerDeltaFindingsTotal.Add(delta.NewFindings, tags); + } + + if (!delta.KevHits.IsDefaultOrEmpty) + { + _runnerKevHitsTotal.Add(delta.KevHits.Length, tags); + } + } + } + + public void RecordRunCompletion(string mode, string result, TimeSpan? duration, bool decrementActive = true) + { + var tags = new[] + { + new KeyValuePair<string, object?>("mode", mode), + new KeyValuePair<string, object?>("result", result) + }; + + if (duration is { } runDuration) + { + _runDurationSeconds.Record(Math.Max(runDuration.TotalSeconds, 0d), tags); + } + + if (decrementActive) + { + _runsActive.Add(-1, new[] { new KeyValuePair<string, object?>("mode", mode) }); + } + } + + public void UpdateBacklog(string mode, string? scheduleId, long backlog) + { + var key = BuildBacklogKey(mode, scheduleId); + if (backlog <= 0) + { + _backlog.TryRemove(key, out _); + } + else + { + _backlog[key] = backlog; + } + } + + private IEnumerable<Measurement<long>> ObserveBacklog() + { + foreach (var entry in _backlog) + { + var (mode, scheduleId) = SplitBacklogKey(entry.Key); + yield return new Measurement<long>( + entry.Value, + new KeyValuePair<string, object?>("mode", mode), + new KeyValuePair<string, object?>("scheduleId", scheduleId ?? string.Empty)); + } + } + + private static string BuildBacklogKey(string mode, string? scheduleId) + => $"{mode}|{scheduleId ?? string.Empty}"; + + private static (string Mode, string? ScheduleId) SplitBacklogKey(string key) + { + var parts = key.Split('|', 2); + return parts.Length == 2 + ? (parts[0], string.IsNullOrEmpty(parts[1]) ? null : parts[1]) + : (key, null); + } + + public void Dispose() + { + if (_disposed) + { + return; + } + + _meter.Dispose(); + _disposed = true; + } +} diff --git a/src/StellaOps.Scheduler.Worker/Options/SchedulerWorkerOptions.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Options/SchedulerWorkerOptions.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Options/SchedulerWorkerOptions.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Options/SchedulerWorkerOptions.cs index 2bbc68a0..a1fd8133 100644 --- a/src/StellaOps.Scheduler.Worker/Options/SchedulerWorkerOptions.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Options/SchedulerWorkerOptions.cs @@ -1,649 +1,649 @@ -using System; - -namespace StellaOps.Scheduler.Worker.Options; - -/// <summary> -/// Strongly typed options for the scheduler worker host. -/// </summary> -public sealed class SchedulerWorkerOptions -{ - public PlannerOptions Planner { get; set; } = new(); - - public RunnerOptions Runner { get; set; } = new(); - - public PolicyOptions Policy { get; set; } = new(); - - public GraphOptions Graph { get; set; } = new(); - - public void Validate() - { - Planner.Validate(); - Runner.Validate(); - Policy.Validate(); - Graph.Validate(); - } - - public sealed class PlannerOptions - { - /// <summary> - /// Maximum number of planning runs to fetch per polling iteration. - /// </summary> - public int BatchSize { get; set; } = 20; - - /// <summary> - /// Polling cadence for the planner loop when work is available. - /// </summary> - public TimeSpan PollInterval { get; set; } = TimeSpan.FromSeconds(5); - - /// <summary> - /// Delay applied between polls when no work is available. - /// </summary> - public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(15); - - /// <summary> - /// Maximum number of tenants that can be processed concurrently. - /// </summary> - public int MaxConcurrentTenants { get; set; } = Environment.ProcessorCount; - - /// <summary> - /// Maximum number of planning runs allowed per minute (global throttle). - /// </summary> - public int MaxRunsPerMinute { get; set; } = 120; - - /// <summary> - /// Lease duration requested from the planner queue transport for deduplication. - /// </summary> - public TimeSpan QueueLeaseDuration { get; set; } = TimeSpan.FromMinutes(5); - - public void Validate() - { - if (BatchSize <= 0) - { - throw new InvalidOperationException("Planner batch size must be greater than zero."); - } - - if (PollInterval <= TimeSpan.Zero) - { - throw new InvalidOperationException("Planner poll interval must be greater than zero."); - } - - if (IdleDelay <= TimeSpan.Zero) - { - throw new InvalidOperationException("Planner idle delay must be greater than zero."); - } - - if (MaxConcurrentTenants <= 0) - { - throw new InvalidOperationException("Planner max concurrent tenants must be greater than zero."); - } - - if (MaxRunsPerMinute <= 0) - { - throw new InvalidOperationException("Planner max runs per minute must be greater than zero."); - } - - if (QueueLeaseDuration <= TimeSpan.Zero) - { - throw new InvalidOperationException("Planner queue lease duration must be greater than zero."); - } - } - } - - public sealed class RunnerOptions - { - public DispatchOptions Dispatch { get; set; } = new(); - - public ExecutionOptions Execution { get; set; } = new(); - - public ScannerOptions Scanner { get; set; } = new(); - - public void Validate() - { - Dispatch.Validate(); - Execution.Validate(); - Scanner.Validate(); - } - - public sealed class DispatchOptions - { - /// <summary> - /// Consumer name used when leasing planner queue messages to dispatch runner segments. - /// </summary> - public string ConsumerName { get; set; } = "scheduler-runner-dispatch"; - - /// <summary> - /// Maximum number of planner messages claimed per lease. - /// </summary> - public int BatchSize { get; set; } = 5; - - /// <summary> - /// Duration of the lease held while dispatching runner segments. - /// </summary> - public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromMinutes(5); - - /// <summary> - /// Delay applied between polls when no planner messages are available. - /// </summary> - public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(10); - - public void Validate() - { - if (string.IsNullOrWhiteSpace(ConsumerName)) - { - throw new InvalidOperationException("Runner dispatch consumer name must be configured."); - } - - if (BatchSize <= 0) - { - throw new InvalidOperationException("Runner dispatch batch size must be greater than zero."); - } - - if (LeaseDuration <= TimeSpan.Zero) - { - throw new InvalidOperationException("Runner dispatch lease duration must be greater than zero."); - } - - if (IdleDelay < TimeSpan.Zero) - { - throw new InvalidOperationException("Runner dispatch idle delay cannot be negative."); - } - } - } - - public sealed class ExecutionOptions - { - /// <summary> - /// Consumer name used when leasing runner segment messages. - /// </summary> - public string ConsumerName { get; set; } = "scheduler-runner"; - - /// <summary> - /// Maximum number of runner segments leased per poll. - /// </summary> - public int BatchSize { get; set; } = 5; - - /// <summary> - /// Lease duration granted while processing a runner segment. - /// </summary> - public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromMinutes(5); - - /// <summary> - /// Delay applied between polls when no runner segments are available. - /// </summary> - public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(5); - - /// <summary> - /// Maximum number of runner segments processed concurrently. - /// </summary> - public int MaxConcurrentSegments { get; set; } = Environment.ProcessorCount; - - /// <summary> - /// Timeout applied to scanner requests per image digest. - /// </summary> - public TimeSpan ReportTimeout { get; set; } = TimeSpan.FromSeconds(60); - - public void Validate() - { - if (string.IsNullOrWhiteSpace(ConsumerName)) - { - throw new InvalidOperationException("Runner execution consumer name must be configured."); - } - - if (BatchSize <= 0) - { - throw new InvalidOperationException("Runner execution batch size must be greater than zero."); - } - - if (LeaseDuration <= TimeSpan.Zero) - { - throw new InvalidOperationException("Runner execution lease duration must be greater than zero."); - } - - if (IdleDelay < TimeSpan.Zero) - { - throw new InvalidOperationException("Runner execution idle delay cannot be negative."); - } - - if (MaxConcurrentSegments <= 0) - { - throw new InvalidOperationException("Runner execution max concurrent segments must be greater than zero."); - } - - if (ReportTimeout <= TimeSpan.Zero) - { - throw new InvalidOperationException("Runner execution report timeout must be greater than zero."); - } - } - } - - public sealed class ScannerOptions - { - /// <summary> - /// Base address for Scanner WebService API calls. - /// </summary> - public Uri? BaseAddress { get; set; } - - /// <summary> - /// Relative path to the reports endpoint. - /// </summary> - public string ReportsPath { get; set; } = "/api/v1/reports"; - - /// <summary> - /// Relative path to the scans endpoint (content refresh). - /// </summary> - public string ScansPath { get; set; } = "/api/v1/scans"; - - /// <summary> - /// Whether runner should attempt content refresh before requesting report in content refresh mode. - /// </summary> - public bool EnableContentRefresh { get; set; } = true; - - /// <summary> - /// Maximum number of scanner retries for transient failures. - /// </summary> - public int MaxRetryAttempts { get; set; } = 3; - - /// <summary> - /// Base delay applied between retries for transient failures. - /// </summary> - public TimeSpan RetryBaseDelay { get; set; } = TimeSpan.FromSeconds(2); - - public void Validate() - { - if (string.IsNullOrWhiteSpace(ReportsPath)) - { - throw new InvalidOperationException("Runner scanner reports path must be configured."); - } - - if (string.IsNullOrWhiteSpace(ScansPath)) - { - throw new InvalidOperationException("Runner scanner scans path must be configured."); - } - - if (MaxRetryAttempts < 0) - { - throw new InvalidOperationException("Runner scanner retry attempts cannot be negative."); - } - - if (RetryBaseDelay < TimeSpan.Zero) - { - throw new InvalidOperationException("Runner scanner retry delay cannot be negative."); - } - } - } - } - - public sealed class PolicyOptions - { - /// <summary> - /// When disabled the worker skips policy run dispatch entirely. - /// </summary> - public bool Enabled { get; set; } = true; - - public DispatchOptions Dispatch { get; set; } = new(); - - public ApiOptions Api { get; set; } = new(); - - public TargetingOptions Targeting { get; set; } = new(); - - public void Validate() - { - Dispatch.Validate(); - Api.Validate(); - Targeting.Validate(); - } - - public sealed class DispatchOptions - { - /// <summary> - /// Identifier used when leasing policy run jobs. - /// </summary> - public string LeaseOwner { get; set; } = "scheduler-policy-dispatch"; - - /// <summary> - /// Number of jobs leased per polling iteration. - /// </summary> - public int BatchSize { get; set; } = 5; - - /// <summary> - /// Duration of the lease while dispatching a policy run job. - /// </summary> - public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromMinutes(2); - - /// <summary> - /// Delay applied when no policy jobs are available. - /// </summary> - public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(5); - - /// <summary> - /// Maximum number of submission attempts before a job is marked failed. - /// </summary> - public int MaxAttempts { get; set; } = 3; - - /// <summary> - /// Base retry delay applied after a failed submission attempt. - /// </summary> - public TimeSpan RetryBackoff { get; set; } = TimeSpan.FromSeconds(15); - - public void Validate() - { - if (string.IsNullOrWhiteSpace(LeaseOwner)) - { - throw new InvalidOperationException("Policy dispatch lease owner must be configured."); - } - - if (BatchSize <= 0) - { - throw new InvalidOperationException("Policy dispatch batch size must be greater than zero."); - } - - if (LeaseDuration <= TimeSpan.Zero) - { - throw new InvalidOperationException("Policy dispatch lease duration must be greater than zero."); - } - - if (IdleDelay < TimeSpan.Zero) - { - throw new InvalidOperationException("Policy dispatch idle delay cannot be negative."); - } - - if (MaxAttempts <= 0) - { - throw new InvalidOperationException("Policy dispatch max attempts must be greater than zero."); - } - - if (RetryBackoff <= TimeSpan.Zero) - { - throw new InvalidOperationException("Policy dispatch retry backoff must be greater than zero."); - } - } - } - - public sealed class ApiOptions - { - /// <summary> - /// Base address for the Policy Engine REST API. - /// </summary> - public Uri? BaseAddress { get; set; } - - /// <summary> - /// Relative path used to trigger policy runs. Must contain the token "{policyId}". - /// </summary> - public string RunsPath { get; set; } = "/api/policy/policies/{policyId}/runs"; - - /// <summary> - /// Relative path used to trigger policy simulations. - /// </summary> - public string SimulatePath { get; set; } = "/api/policy/policies/{policyId}/simulate"; - - /// <summary> - /// Header conveying tenant context for Policy Engine requests. - /// </summary> - public string TenantHeader { get; set; } = "X-Stella-Tenant"; - - /// <summary> - /// Header used to supply idempotency keys for run submissions. - /// </summary> - public string IdempotencyHeader { get; set; } = "Idempotency-Key"; - - /// <summary> - /// Timeout for HTTP requests dispatched to the Policy Engine. - /// </summary> - public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(30); - - public void Validate() - { - if (BaseAddress is null) - { - throw new InvalidOperationException("Policy API base address must be configured."); - } - - if (!BaseAddress.IsAbsoluteUri) - { - throw new InvalidOperationException("Policy API base address must be an absolute URI."); - } - - if (string.IsNullOrWhiteSpace(RunsPath)) - { - throw new InvalidOperationException("Policy API runs path must be configured."); - } - - if (string.IsNullOrWhiteSpace(SimulatePath)) - { - throw new InvalidOperationException("Policy API simulate path must be configured."); - } - - if (string.IsNullOrWhiteSpace(TenantHeader)) - { - throw new InvalidOperationException("Policy API tenant header must be configured."); - } - - if (string.IsNullOrWhiteSpace(IdempotencyHeader)) - { - throw new InvalidOperationException("Policy API idempotency header must be configured."); - } - - if (RequestTimeout <= TimeSpan.Zero) - { - throw new InvalidOperationException("Policy API request timeout must be greater than zero."); - } - } - } - - public sealed class TargetingOptions - { - /// <summary> - /// When disabled the worker skips policy delta targeting. - /// </summary> - public bool Enabled { get; set; } = true; - - /// <summary> - /// Maximum number of SBOM identifiers allowed for targeted runs before falling back to a full run. - /// </summary> - public int MaxSboms { get; set; } = 10_000; - - /// <summary> - /// Default behaviour for usage-only targeting when metadata does not specify a preference. - /// </summary> - public bool DefaultUsageOnly { get; set; } = false; - - public void Validate() - { - if (MaxSboms <= 0) - { - throw new InvalidOperationException("Policy targeting MaxSboms must be greater than zero."); - } - } - } - } - - public sealed class GraphOptions - { - /// <summary> - /// When disabled the worker skips graph job processing entirely. - /// </summary> - public bool Enabled { get; set; } = true; - - /// <summary> - /// Maximum number of graph build jobs processed per polling iteration. - /// </summary> - public int BatchSize { get; set; } = 5; - - /// <summary> - /// Polling interval applied when jobs were processed in the last iteration. - /// </summary> - public TimeSpan PollInterval { get; set; } = TimeSpan.FromSeconds(5); - - /// <summary> - /// Delay applied when no graph jobs are available. - /// </summary> - public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(20); - - /// <summary> - /// Maximum number of attempts before a graph build job is marked failed. - /// </summary> - public int MaxAttempts { get; set; } = 3; - - /// <summary> - /// Backoff duration applied between retries for transient failures. - /// </summary> - public TimeSpan RetryBackoff { get; set; } = TimeSpan.FromSeconds(30); - - /// <summary> - /// Timeout applied when waiting for Cartographer responses. - /// </summary> - public TimeSpan CartographerTimeout { get; set; } = TimeSpan.FromSeconds(60); - - /// <summary> - /// Base delay between polling Cartographer for job status when asynchronous responses are returned. - /// </summary> - public TimeSpan StatusPollInterval { get; set; } = TimeSpan.FromSeconds(10); - - public CartographerOptions Cartographer { get; set; } = new(); - - public SchedulerApiOptions SchedulerApi { get; set; } = new(); - - public void Validate() - { - if (BatchSize <= 0) - { - throw new InvalidOperationException("Graph batch size must be greater than zero."); - } - - if (PollInterval <= TimeSpan.Zero) - { - throw new InvalidOperationException("Graph poll interval must be greater than zero."); - } - - if (IdleDelay < TimeSpan.Zero) - { - throw new InvalidOperationException("Graph idle delay cannot be negative."); - } - - if (MaxAttempts <= 0) - { - throw new InvalidOperationException("Graph max attempts must be greater than zero."); - } - - if (RetryBackoff <= TimeSpan.Zero) - { - throw new InvalidOperationException("Graph retry backoff must be greater than zero."); - } - - if (CartographerTimeout <= TimeSpan.Zero) - { - throw new InvalidOperationException("Graph Cartographer timeout must be greater than zero."); - } - - if (StatusPollInterval <= TimeSpan.Zero) - { - throw new InvalidOperationException("Graph status poll interval must be greater than zero."); - } - - Cartographer.Validate(); - SchedulerApi.Validate(); - } - - public sealed class CartographerOptions - { - /// <summary> - /// Base address for Cartographer API requests. - /// </summary> - public Uri? BaseAddress { get; set; } - - /// <summary> - /// Relative path used to trigger graph builds. - /// </summary> - public string BuildPath { get; set; } = "/api/graphs/builds"; - - /// <summary> - /// Optional relative path used to query graph build status. Must contain the placeholder "{jobId}" when provided. - /// </summary> - public string StatusPath { get; set; } = "/api/graphs/builds/{jobId}"; - - /// <summary> - /// Relative path used to trigger graph overlay refreshes. - /// </summary> - public string OverlayPath { get; set; } = "/api/graphs/overlays"; - - /// <summary> - /// Optional relative path used to query graph overlay status. Must contain the placeholder "{jobId}" when provided. - /// </summary> - public string OverlayStatusPath { get; set; } = "/api/graphs/overlays/{jobId}"; - - /// <summary> - /// Optional header name for static API key authentication. - /// </summary> - public string? ApiKeyHeader { get; set; } - - /// <summary> - /// Optional API key value when using header-based authentication. - /// </summary> - public string? ApiKey { get; set; } - - public void Validate() - { - if (BuildPath is null || string.IsNullOrWhiteSpace(BuildPath)) - { - throw new InvalidOperationException("Cartographer build path must be configured."); - } - - if (string.IsNullOrWhiteSpace(StatusPath)) - { - throw new InvalidOperationException("Cartographer status path must be configured."); - } - - if (StatusPath.Contains("{jobId}", StringComparison.Ordinal) == false) - { - throw new InvalidOperationException("Cartographer status path must include '{jobId}' placeholder."); - } - - if (OverlayPath is null || string.IsNullOrWhiteSpace(OverlayPath)) - { - throw new InvalidOperationException("Cartographer overlay path must be configured."); - } - - if (string.IsNullOrWhiteSpace(OverlayStatusPath)) - { - throw new InvalidOperationException("Cartographer overlay status path must be configured."); - } - - if (OverlayStatusPath.Contains("{jobId}", StringComparison.Ordinal) == false) - { - throw new InvalidOperationException("Cartographer overlay status path must include '{jobId}' placeholder."); - } - } - } - - public sealed class SchedulerApiOptions - { - /// <summary> - /// Base address for Scheduler WebService graph endpoints. - /// </summary> - public Uri? BaseAddress { get; set; } - - /// <summary> - /// Relative path to the graph completion webhook. - /// </summary> - public string CompletionPath { get; set; } = "/graphs/hooks/completed"; - - /// <summary> - /// Optional API key header name when invoking Scheduler WebService. - /// </summary> - public string? ApiKeyHeader { get; set; } - - /// <summary> - /// Optional API key value. - /// </summary> - public string? ApiKey { get; set; } - - public void Validate() - { - if (string.IsNullOrWhiteSpace(CompletionPath)) - { - throw new InvalidOperationException("Scheduler graph completion path must be configured."); - } - } - } - } -} +using System; + +namespace StellaOps.Scheduler.Worker.Options; + +/// <summary> +/// Strongly typed options for the scheduler worker host. +/// </summary> +public sealed class SchedulerWorkerOptions +{ + public PlannerOptions Planner { get; set; } = new(); + + public RunnerOptions Runner { get; set; } = new(); + + public PolicyOptions Policy { get; set; } = new(); + + public GraphOptions Graph { get; set; } = new(); + + public void Validate() + { + Planner.Validate(); + Runner.Validate(); + Policy.Validate(); + Graph.Validate(); + } + + public sealed class PlannerOptions + { + /// <summary> + /// Maximum number of planning runs to fetch per polling iteration. + /// </summary> + public int BatchSize { get; set; } = 20; + + /// <summary> + /// Polling cadence for the planner loop when work is available. + /// </summary> + public TimeSpan PollInterval { get; set; } = TimeSpan.FromSeconds(5); + + /// <summary> + /// Delay applied between polls when no work is available. + /// </summary> + public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(15); + + /// <summary> + /// Maximum number of tenants that can be processed concurrently. + /// </summary> + public int MaxConcurrentTenants { get; set; } = Environment.ProcessorCount; + + /// <summary> + /// Maximum number of planning runs allowed per minute (global throttle). + /// </summary> + public int MaxRunsPerMinute { get; set; } = 120; + + /// <summary> + /// Lease duration requested from the planner queue transport for deduplication. + /// </summary> + public TimeSpan QueueLeaseDuration { get; set; } = TimeSpan.FromMinutes(5); + + public void Validate() + { + if (BatchSize <= 0) + { + throw new InvalidOperationException("Planner batch size must be greater than zero."); + } + + if (PollInterval <= TimeSpan.Zero) + { + throw new InvalidOperationException("Planner poll interval must be greater than zero."); + } + + if (IdleDelay <= TimeSpan.Zero) + { + throw new InvalidOperationException("Planner idle delay must be greater than zero."); + } + + if (MaxConcurrentTenants <= 0) + { + throw new InvalidOperationException("Planner max concurrent tenants must be greater than zero."); + } + + if (MaxRunsPerMinute <= 0) + { + throw new InvalidOperationException("Planner max runs per minute must be greater than zero."); + } + + if (QueueLeaseDuration <= TimeSpan.Zero) + { + throw new InvalidOperationException("Planner queue lease duration must be greater than zero."); + } + } + } + + public sealed class RunnerOptions + { + public DispatchOptions Dispatch { get; set; } = new(); + + public ExecutionOptions Execution { get; set; } = new(); + + public ScannerOptions Scanner { get; set; } = new(); + + public void Validate() + { + Dispatch.Validate(); + Execution.Validate(); + Scanner.Validate(); + } + + public sealed class DispatchOptions + { + /// <summary> + /// Consumer name used when leasing planner queue messages to dispatch runner segments. + /// </summary> + public string ConsumerName { get; set; } = "scheduler-runner-dispatch"; + + /// <summary> + /// Maximum number of planner messages claimed per lease. + /// </summary> + public int BatchSize { get; set; } = 5; + + /// <summary> + /// Duration of the lease held while dispatching runner segments. + /// </summary> + public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromMinutes(5); + + /// <summary> + /// Delay applied between polls when no planner messages are available. + /// </summary> + public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(10); + + public void Validate() + { + if (string.IsNullOrWhiteSpace(ConsumerName)) + { + throw new InvalidOperationException("Runner dispatch consumer name must be configured."); + } + + if (BatchSize <= 0) + { + throw new InvalidOperationException("Runner dispatch batch size must be greater than zero."); + } + + if (LeaseDuration <= TimeSpan.Zero) + { + throw new InvalidOperationException("Runner dispatch lease duration must be greater than zero."); + } + + if (IdleDelay < TimeSpan.Zero) + { + throw new InvalidOperationException("Runner dispatch idle delay cannot be negative."); + } + } + } + + public sealed class ExecutionOptions + { + /// <summary> + /// Consumer name used when leasing runner segment messages. + /// </summary> + public string ConsumerName { get; set; } = "scheduler-runner"; + + /// <summary> + /// Maximum number of runner segments leased per poll. + /// </summary> + public int BatchSize { get; set; } = 5; + + /// <summary> + /// Lease duration granted while processing a runner segment. + /// </summary> + public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromMinutes(5); + + /// <summary> + /// Delay applied between polls when no runner segments are available. + /// </summary> + public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(5); + + /// <summary> + /// Maximum number of runner segments processed concurrently. + /// </summary> + public int MaxConcurrentSegments { get; set; } = Environment.ProcessorCount; + + /// <summary> + /// Timeout applied to scanner requests per image digest. + /// </summary> + public TimeSpan ReportTimeout { get; set; } = TimeSpan.FromSeconds(60); + + public void Validate() + { + if (string.IsNullOrWhiteSpace(ConsumerName)) + { + throw new InvalidOperationException("Runner execution consumer name must be configured."); + } + + if (BatchSize <= 0) + { + throw new InvalidOperationException("Runner execution batch size must be greater than zero."); + } + + if (LeaseDuration <= TimeSpan.Zero) + { + throw new InvalidOperationException("Runner execution lease duration must be greater than zero."); + } + + if (IdleDelay < TimeSpan.Zero) + { + throw new InvalidOperationException("Runner execution idle delay cannot be negative."); + } + + if (MaxConcurrentSegments <= 0) + { + throw new InvalidOperationException("Runner execution max concurrent segments must be greater than zero."); + } + + if (ReportTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException("Runner execution report timeout must be greater than zero."); + } + } + } + + public sealed class ScannerOptions + { + /// <summary> + /// Base address for Scanner WebService API calls. + /// </summary> + public Uri? BaseAddress { get; set; } + + /// <summary> + /// Relative path to the reports endpoint. + /// </summary> + public string ReportsPath { get; set; } = "/api/v1/reports"; + + /// <summary> + /// Relative path to the scans endpoint (content refresh). + /// </summary> + public string ScansPath { get; set; } = "/api/v1/scans"; + + /// <summary> + /// Whether runner should attempt content refresh before requesting report in content refresh mode. + /// </summary> + public bool EnableContentRefresh { get; set; } = true; + + /// <summary> + /// Maximum number of scanner retries for transient failures. + /// </summary> + public int MaxRetryAttempts { get; set; } = 3; + + /// <summary> + /// Base delay applied between retries for transient failures. + /// </summary> + public TimeSpan RetryBaseDelay { get; set; } = TimeSpan.FromSeconds(2); + + public void Validate() + { + if (string.IsNullOrWhiteSpace(ReportsPath)) + { + throw new InvalidOperationException("Runner scanner reports path must be configured."); + } + + if (string.IsNullOrWhiteSpace(ScansPath)) + { + throw new InvalidOperationException("Runner scanner scans path must be configured."); + } + + if (MaxRetryAttempts < 0) + { + throw new InvalidOperationException("Runner scanner retry attempts cannot be negative."); + } + + if (RetryBaseDelay < TimeSpan.Zero) + { + throw new InvalidOperationException("Runner scanner retry delay cannot be negative."); + } + } + } + } + + public sealed class PolicyOptions + { + /// <summary> + /// When disabled the worker skips policy run dispatch entirely. + /// </summary> + public bool Enabled { get; set; } = true; + + public DispatchOptions Dispatch { get; set; } = new(); + + public ApiOptions Api { get; set; } = new(); + + public TargetingOptions Targeting { get; set; } = new(); + + public void Validate() + { + Dispatch.Validate(); + Api.Validate(); + Targeting.Validate(); + } + + public sealed class DispatchOptions + { + /// <summary> + /// Identifier used when leasing policy run jobs. + /// </summary> + public string LeaseOwner { get; set; } = "scheduler-policy-dispatch"; + + /// <summary> + /// Number of jobs leased per polling iteration. + /// </summary> + public int BatchSize { get; set; } = 5; + + /// <summary> + /// Duration of the lease while dispatching a policy run job. + /// </summary> + public TimeSpan LeaseDuration { get; set; } = TimeSpan.FromMinutes(2); + + /// <summary> + /// Delay applied when no policy jobs are available. + /// </summary> + public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(5); + + /// <summary> + /// Maximum number of submission attempts before a job is marked failed. + /// </summary> + public int MaxAttempts { get; set; } = 3; + + /// <summary> + /// Base retry delay applied after a failed submission attempt. + /// </summary> + public TimeSpan RetryBackoff { get; set; } = TimeSpan.FromSeconds(15); + + public void Validate() + { + if (string.IsNullOrWhiteSpace(LeaseOwner)) + { + throw new InvalidOperationException("Policy dispatch lease owner must be configured."); + } + + if (BatchSize <= 0) + { + throw new InvalidOperationException("Policy dispatch batch size must be greater than zero."); + } + + if (LeaseDuration <= TimeSpan.Zero) + { + throw new InvalidOperationException("Policy dispatch lease duration must be greater than zero."); + } + + if (IdleDelay < TimeSpan.Zero) + { + throw new InvalidOperationException("Policy dispatch idle delay cannot be negative."); + } + + if (MaxAttempts <= 0) + { + throw new InvalidOperationException("Policy dispatch max attempts must be greater than zero."); + } + + if (RetryBackoff <= TimeSpan.Zero) + { + throw new InvalidOperationException("Policy dispatch retry backoff must be greater than zero."); + } + } + } + + public sealed class ApiOptions + { + /// <summary> + /// Base address for the Policy Engine REST API. + /// </summary> + public Uri? BaseAddress { get; set; } + + /// <summary> + /// Relative path used to trigger policy runs. Must contain the token "{policyId}". + /// </summary> + public string RunsPath { get; set; } = "/api/policy/policies/{policyId}/runs"; + + /// <summary> + /// Relative path used to trigger policy simulations. + /// </summary> + public string SimulatePath { get; set; } = "/api/policy/policies/{policyId}/simulate"; + + /// <summary> + /// Header conveying tenant context for Policy Engine requests. + /// </summary> + public string TenantHeader { get; set; } = "X-Stella-Tenant"; + + /// <summary> + /// Header used to supply idempotency keys for run submissions. + /// </summary> + public string IdempotencyHeader { get; set; } = "Idempotency-Key"; + + /// <summary> + /// Timeout for HTTP requests dispatched to the Policy Engine. + /// </summary> + public TimeSpan RequestTimeout { get; set; } = TimeSpan.FromSeconds(30); + + public void Validate() + { + if (BaseAddress is null) + { + throw new InvalidOperationException("Policy API base address must be configured."); + } + + if (!BaseAddress.IsAbsoluteUri) + { + throw new InvalidOperationException("Policy API base address must be an absolute URI."); + } + + if (string.IsNullOrWhiteSpace(RunsPath)) + { + throw new InvalidOperationException("Policy API runs path must be configured."); + } + + if (string.IsNullOrWhiteSpace(SimulatePath)) + { + throw new InvalidOperationException("Policy API simulate path must be configured."); + } + + if (string.IsNullOrWhiteSpace(TenantHeader)) + { + throw new InvalidOperationException("Policy API tenant header must be configured."); + } + + if (string.IsNullOrWhiteSpace(IdempotencyHeader)) + { + throw new InvalidOperationException("Policy API idempotency header must be configured."); + } + + if (RequestTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException("Policy API request timeout must be greater than zero."); + } + } + } + + public sealed class TargetingOptions + { + /// <summary> + /// When disabled the worker skips policy delta targeting. + /// </summary> + public bool Enabled { get; set; } = true; + + /// <summary> + /// Maximum number of SBOM identifiers allowed for targeted runs before falling back to a full run. + /// </summary> + public int MaxSboms { get; set; } = 10_000; + + /// <summary> + /// Default behaviour for usage-only targeting when metadata does not specify a preference. + /// </summary> + public bool DefaultUsageOnly { get; set; } = false; + + public void Validate() + { + if (MaxSboms <= 0) + { + throw new InvalidOperationException("Policy targeting MaxSboms must be greater than zero."); + } + } + } + } + + public sealed class GraphOptions + { + /// <summary> + /// When disabled the worker skips graph job processing entirely. + /// </summary> + public bool Enabled { get; set; } = true; + + /// <summary> + /// Maximum number of graph build jobs processed per polling iteration. + /// </summary> + public int BatchSize { get; set; } = 5; + + /// <summary> + /// Polling interval applied when jobs were processed in the last iteration. + /// </summary> + public TimeSpan PollInterval { get; set; } = TimeSpan.FromSeconds(5); + + /// <summary> + /// Delay applied when no graph jobs are available. + /// </summary> + public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(20); + + /// <summary> + /// Maximum number of attempts before a graph build job is marked failed. + /// </summary> + public int MaxAttempts { get; set; } = 3; + + /// <summary> + /// Backoff duration applied between retries for transient failures. + /// </summary> + public TimeSpan RetryBackoff { get; set; } = TimeSpan.FromSeconds(30); + + /// <summary> + /// Timeout applied when waiting for Cartographer responses. + /// </summary> + public TimeSpan CartographerTimeout { get; set; } = TimeSpan.FromSeconds(60); + + /// <summary> + /// Base delay between polling Cartographer for job status when asynchronous responses are returned. + /// </summary> + public TimeSpan StatusPollInterval { get; set; } = TimeSpan.FromSeconds(10); + + public CartographerOptions Cartographer { get; set; } = new(); + + public SchedulerApiOptions SchedulerApi { get; set; } = new(); + + public void Validate() + { + if (BatchSize <= 0) + { + throw new InvalidOperationException("Graph batch size must be greater than zero."); + } + + if (PollInterval <= TimeSpan.Zero) + { + throw new InvalidOperationException("Graph poll interval must be greater than zero."); + } + + if (IdleDelay < TimeSpan.Zero) + { + throw new InvalidOperationException("Graph idle delay cannot be negative."); + } + + if (MaxAttempts <= 0) + { + throw new InvalidOperationException("Graph max attempts must be greater than zero."); + } + + if (RetryBackoff <= TimeSpan.Zero) + { + throw new InvalidOperationException("Graph retry backoff must be greater than zero."); + } + + if (CartographerTimeout <= TimeSpan.Zero) + { + throw new InvalidOperationException("Graph Cartographer timeout must be greater than zero."); + } + + if (StatusPollInterval <= TimeSpan.Zero) + { + throw new InvalidOperationException("Graph status poll interval must be greater than zero."); + } + + Cartographer.Validate(); + SchedulerApi.Validate(); + } + + public sealed class CartographerOptions + { + /// <summary> + /// Base address for Cartographer API requests. + /// </summary> + public Uri? BaseAddress { get; set; } + + /// <summary> + /// Relative path used to trigger graph builds. + /// </summary> + public string BuildPath { get; set; } = "/api/graphs/builds"; + + /// <summary> + /// Optional relative path used to query graph build status. Must contain the placeholder "{jobId}" when provided. + /// </summary> + public string StatusPath { get; set; } = "/api/graphs/builds/{jobId}"; + + /// <summary> + /// Relative path used to trigger graph overlay refreshes. + /// </summary> + public string OverlayPath { get; set; } = "/api/graphs/overlays"; + + /// <summary> + /// Optional relative path used to query graph overlay status. Must contain the placeholder "{jobId}" when provided. + /// </summary> + public string OverlayStatusPath { get; set; } = "/api/graphs/overlays/{jobId}"; + + /// <summary> + /// Optional header name for static API key authentication. + /// </summary> + public string? ApiKeyHeader { get; set; } + + /// <summary> + /// Optional API key value when using header-based authentication. + /// </summary> + public string? ApiKey { get; set; } + + public void Validate() + { + if (BuildPath is null || string.IsNullOrWhiteSpace(BuildPath)) + { + throw new InvalidOperationException("Cartographer build path must be configured."); + } + + if (string.IsNullOrWhiteSpace(StatusPath)) + { + throw new InvalidOperationException("Cartographer status path must be configured."); + } + + if (StatusPath.Contains("{jobId}", StringComparison.Ordinal) == false) + { + throw new InvalidOperationException("Cartographer status path must include '{jobId}' placeholder."); + } + + if (OverlayPath is null || string.IsNullOrWhiteSpace(OverlayPath)) + { + throw new InvalidOperationException("Cartographer overlay path must be configured."); + } + + if (string.IsNullOrWhiteSpace(OverlayStatusPath)) + { + throw new InvalidOperationException("Cartographer overlay status path must be configured."); + } + + if (OverlayStatusPath.Contains("{jobId}", StringComparison.Ordinal) == false) + { + throw new InvalidOperationException("Cartographer overlay status path must include '{jobId}' placeholder."); + } + } + } + + public sealed class SchedulerApiOptions + { + /// <summary> + /// Base address for Scheduler WebService graph endpoints. + /// </summary> + public Uri? BaseAddress { get; set; } + + /// <summary> + /// Relative path to the graph completion webhook. + /// </summary> + public string CompletionPath { get; set; } = "/graphs/hooks/completed"; + + /// <summary> + /// Optional API key header name when invoking Scheduler WebService. + /// </summary> + public string? ApiKeyHeader { get; set; } + + /// <summary> + /// Optional API key value. + /// </summary> + public string? ApiKey { get; set; } + + public void Validate() + { + if (string.IsNullOrWhiteSpace(CompletionPath)) + { + throw new InvalidOperationException("Scheduler graph completion path must be configured."); + } + } + } + } +} diff --git a/src/StellaOps.Scheduler.Worker/Planning/PlannerBackgroundService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerBackgroundService.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Planning/PlannerBackgroundService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerBackgroundService.cs diff --git a/src/StellaOps.Scheduler.Worker/Planning/PlannerExecutionResult.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerExecutionResult.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Planning/PlannerExecutionResult.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerExecutionResult.cs diff --git a/src/StellaOps.Scheduler.Worker/Planning/PlannerExecutionService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerExecutionService.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Planning/PlannerExecutionService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerExecutionService.cs diff --git a/src/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatchService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatchService.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatchService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatchService.cs diff --git a/src/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatcherBackgroundService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatcherBackgroundService.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatcherBackgroundService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Planning/PlannerQueueDispatcherBackgroundService.cs diff --git a/src/StellaOps.Scheduler.Worker/Policy/HttpPolicyRunClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/HttpPolicyRunClient.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Policy/HttpPolicyRunClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/HttpPolicyRunClient.cs index 1ce1d217..1a8bc54a 100644 --- a/src/StellaOps.Scheduler.Worker/Policy/HttpPolicyRunClient.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/HttpPolicyRunClient.cs @@ -1,154 +1,154 @@ -using System; -using System.Net.Http; -using System.Net.Http.Json; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Policy; - -internal sealed class HttpPolicyRunClient : IPolicyRunClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) - { - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - private readonly HttpClient _httpClient; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly ILogger<HttpPolicyRunClient> _logger; - - public HttpPolicyRunClient( - HttpClient httpClient, - IOptions<SchedulerWorkerOptions> options, - ILogger<HttpPolicyRunClient> logger) - { - _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<PolicyRunSubmissionResult> SubmitAsync(PolicyRunJob job, PolicyRunRequest request, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(job); - ArgumentNullException.ThrowIfNull(request); - - var apiOptions = _options.Value.Policy.Api; - ConfigureHttpClient(apiOptions); - - var path = ResolvePath(apiOptions, job.PolicyId, request.Mode); - using var message = new HttpRequestMessage(HttpMethod.Post, path) - { - Content = JsonContent.Create(request, options: SerializerOptions) - }; - - AddHeaders(message, apiOptions, job, request); - - using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); - timeoutCts.CancelAfter(apiOptions.RequestTimeout); - - try - { - using var response = await _httpClient.SendAsync(message, HttpCompletionOption.ResponseHeadersRead, timeoutCts.Token) - .ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - var errorPayload = await SafeReadAsync(response, timeoutCts.Token).ConfigureAwait(false); - _logger.LogWarning( - "Policy run submission for policy {PolicyId} failed with status {Status}.", - job.PolicyId, - (int)response.StatusCode); - return PolicyRunSubmissionResult.Failed(errorPayload); - } - - if (request.Mode == PolicyRunMode.Simulate) - { - // Response body contains diff summary; callers handle separately if needed. - return PolicyRunSubmissionResult.Succeeded(job.RunId ?? request.RunId, request.QueuedAt); - } - - var payload = await response.Content.ReadFromJsonAsync<PolicyRunSubmitResponse>(SerializerOptions, timeoutCts.Token) - .ConfigureAwait(false); - - var runId = payload?.RunId ?? request.RunId ?? job.RunId; - var queuedAt = payload?.QueuedAt ?? request.QueuedAt; - return PolicyRunSubmissionResult.Succeeded(runId, queuedAt); - } - catch (OperationCanceledException) when (!cancellationToken.IsCancellationRequested) - { - _logger.LogWarning( - "Policy run submission for policy {PolicyId} timed out after {Timeout}.", - job.PolicyId, - apiOptions.RequestTimeout); - return PolicyRunSubmissionResult.Failed("Request timed out."); - } - catch (Exception ex) - { - _logger.LogWarning(ex, "Policy run submission for policy {PolicyId} failed with exception.", job.PolicyId); - return PolicyRunSubmissionResult.Failed(ex.Message); - } - } - - private void ConfigureHttpClient(SchedulerWorkerOptions.PolicyOptions.ApiOptions apiOptions) - { - if (apiOptions.BaseAddress is not null && _httpClient.BaseAddress != apiOptions.BaseAddress) - { - _httpClient.BaseAddress = apiOptions.BaseAddress; - } - - _httpClient.Timeout = Timeout.InfiniteTimeSpan; - } - - private static string ResolvePath(SchedulerWorkerOptions.PolicyOptions.ApiOptions apiOptions, string policyId, PolicyRunMode mode) - { - var placeholder = "{policyId}"; - var template = mode == PolicyRunMode.Simulate ? apiOptions.SimulatePath : apiOptions.RunsPath; - if (!template.Contains(placeholder, StringComparison.Ordinal)) - { - throw new InvalidOperationException($"Policy API path '{template}' does not contain required placeholder '{placeholder}'."); - } - - return template.Replace(placeholder, Uri.EscapeDataString(policyId), StringComparison.Ordinal); - } - - private static void AddHeaders( - HttpRequestMessage message, - SchedulerWorkerOptions.PolicyOptions.ApiOptions apiOptions, - PolicyRunJob job, - PolicyRunRequest request) - { - if (!string.IsNullOrWhiteSpace(apiOptions.TenantHeader)) - { - message.Headers.TryAddWithoutValidation(apiOptions.TenantHeader, job.TenantId); - } - - if (!string.IsNullOrWhiteSpace(apiOptions.IdempotencyHeader)) - { - var key = request.RunId ?? job.RunId ?? job.Id; - message.Headers.TryAddWithoutValidation(apiOptions.IdempotencyHeader, key); - } - } - - private static async Task<string?> SafeReadAsync(HttpResponseMessage response, CancellationToken cancellationToken) - { - try - { - return await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - } - catch - { - return null; - } - } - - private sealed record PolicyRunSubmitResponse( - string? RunId, - DateTimeOffset? QueuedAt, - string? Status); -} +using System; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Policy; + +internal sealed class HttpPolicyRunClient : IPolicyRunClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new(JsonSerializerDefaults.Web) + { + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + private readonly HttpClient _httpClient; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly ILogger<HttpPolicyRunClient> _logger; + + public HttpPolicyRunClient( + HttpClient httpClient, + IOptions<SchedulerWorkerOptions> options, + ILogger<HttpPolicyRunClient> logger) + { + _httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<PolicyRunSubmissionResult> SubmitAsync(PolicyRunJob job, PolicyRunRequest request, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(job); + ArgumentNullException.ThrowIfNull(request); + + var apiOptions = _options.Value.Policy.Api; + ConfigureHttpClient(apiOptions); + + var path = ResolvePath(apiOptions, job.PolicyId, request.Mode); + using var message = new HttpRequestMessage(HttpMethod.Post, path) + { + Content = JsonContent.Create(request, options: SerializerOptions) + }; + + AddHeaders(message, apiOptions, job, request); + + using var timeoutCts = CancellationTokenSource.CreateLinkedTokenSource(cancellationToken); + timeoutCts.CancelAfter(apiOptions.RequestTimeout); + + try + { + using var response = await _httpClient.SendAsync(message, HttpCompletionOption.ResponseHeadersRead, timeoutCts.Token) + .ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + var errorPayload = await SafeReadAsync(response, timeoutCts.Token).ConfigureAwait(false); + _logger.LogWarning( + "Policy run submission for policy {PolicyId} failed with status {Status}.", + job.PolicyId, + (int)response.StatusCode); + return PolicyRunSubmissionResult.Failed(errorPayload); + } + + if (request.Mode == PolicyRunMode.Simulate) + { + // Response body contains diff summary; callers handle separately if needed. + return PolicyRunSubmissionResult.Succeeded(job.RunId ?? request.RunId, request.QueuedAt); + } + + var payload = await response.Content.ReadFromJsonAsync<PolicyRunSubmitResponse>(SerializerOptions, timeoutCts.Token) + .ConfigureAwait(false); + + var runId = payload?.RunId ?? request.RunId ?? job.RunId; + var queuedAt = payload?.QueuedAt ?? request.QueuedAt; + return PolicyRunSubmissionResult.Succeeded(runId, queuedAt); + } + catch (OperationCanceledException) when (!cancellationToken.IsCancellationRequested) + { + _logger.LogWarning( + "Policy run submission for policy {PolicyId} timed out after {Timeout}.", + job.PolicyId, + apiOptions.RequestTimeout); + return PolicyRunSubmissionResult.Failed("Request timed out."); + } + catch (Exception ex) + { + _logger.LogWarning(ex, "Policy run submission for policy {PolicyId} failed with exception.", job.PolicyId); + return PolicyRunSubmissionResult.Failed(ex.Message); + } + } + + private void ConfigureHttpClient(SchedulerWorkerOptions.PolicyOptions.ApiOptions apiOptions) + { + if (apiOptions.BaseAddress is not null && _httpClient.BaseAddress != apiOptions.BaseAddress) + { + _httpClient.BaseAddress = apiOptions.BaseAddress; + } + + _httpClient.Timeout = Timeout.InfiniteTimeSpan; + } + + private static string ResolvePath(SchedulerWorkerOptions.PolicyOptions.ApiOptions apiOptions, string policyId, PolicyRunMode mode) + { + var placeholder = "{policyId}"; + var template = mode == PolicyRunMode.Simulate ? apiOptions.SimulatePath : apiOptions.RunsPath; + if (!template.Contains(placeholder, StringComparison.Ordinal)) + { + throw new InvalidOperationException($"Policy API path '{template}' does not contain required placeholder '{placeholder}'."); + } + + return template.Replace(placeholder, Uri.EscapeDataString(policyId), StringComparison.Ordinal); + } + + private static void AddHeaders( + HttpRequestMessage message, + SchedulerWorkerOptions.PolicyOptions.ApiOptions apiOptions, + PolicyRunJob job, + PolicyRunRequest request) + { + if (!string.IsNullOrWhiteSpace(apiOptions.TenantHeader)) + { + message.Headers.TryAddWithoutValidation(apiOptions.TenantHeader, job.TenantId); + } + + if (!string.IsNullOrWhiteSpace(apiOptions.IdempotencyHeader)) + { + var key = request.RunId ?? job.RunId ?? job.Id; + message.Headers.TryAddWithoutValidation(apiOptions.IdempotencyHeader, key); + } + } + + private static async Task<string?> SafeReadAsync(HttpResponseMessage response, CancellationToken cancellationToken) + { + try + { + return await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + } + catch + { + return null; + } + } + + private sealed record PolicyRunSubmitResponse( + string? RunId, + DateTimeOffset? QueuedAt, + string? Status); +} diff --git a/src/StellaOps.Scheduler.Worker/Policy/IPolicyRunClient.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/IPolicyRunClient.cs similarity index 96% rename from src/StellaOps.Scheduler.Worker/Policy/IPolicyRunClient.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/IPolicyRunClient.cs index eb0a8188..e635bab4 100644 --- a/src/StellaOps.Scheduler.Worker/Policy/IPolicyRunClient.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/IPolicyRunClient.cs @@ -1,10 +1,10 @@ -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Worker.Policy; - -internal interface IPolicyRunClient -{ - Task<PolicyRunSubmissionResult> SubmitAsync(PolicyRunJob job, PolicyRunRequest request, CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Worker.Policy; + +internal interface IPolicyRunClient +{ + Task<PolicyRunSubmissionResult> SubmitAsync(PolicyRunJob job, PolicyRunRequest request, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.Worker/Policy/IPolicyRunTargetingService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/IPolicyRunTargetingService.cs similarity index 96% rename from src/StellaOps.Scheduler.Worker/Policy/IPolicyRunTargetingService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/IPolicyRunTargetingService.cs index f1824f28..aa38dc84 100644 --- a/src/StellaOps.Scheduler.Worker/Policy/IPolicyRunTargetingService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/IPolicyRunTargetingService.cs @@ -1,10 +1,10 @@ -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Worker.Policy; - -internal interface IPolicyRunTargetingService -{ - Task<PolicyRunTargetingResult> EnsureTargetsAsync(PolicyRunJob job, CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Worker.Policy; + +internal interface IPolicyRunTargetingService +{ + Task<PolicyRunTargetingResult> EnsureTargetsAsync(PolicyRunJob job, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunDispatchBackgroundService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunDispatchBackgroundService.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Policy/PolicyRunDispatchBackgroundService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunDispatchBackgroundService.cs index 742e9fdf..6a263534 100644 --- a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunDispatchBackgroundService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunDispatchBackgroundService.cs @@ -1,188 +1,188 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Policy; - -internal sealed class PolicyRunDispatchBackgroundService : BackgroundService -{ - private readonly IPolicyRunJobRepository _repository; - private readonly PolicyRunExecutionService _executionService; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly TimeProvider _timeProvider; - private readonly ILogger<PolicyRunDispatchBackgroundService> _logger; - private readonly string _leaseOwner; - - public PolicyRunDispatchBackgroundService( - IPolicyRunJobRepository repository, - PolicyRunExecutionService executionService, - IOptions<SchedulerWorkerOptions> options, - TimeProvider? timeProvider, - ILogger<PolicyRunDispatchBackgroundService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _executionService = executionService ?? throw new ArgumentNullException(nameof(executionService)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _timeProvider = timeProvider ?? TimeProvider.System; - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - _leaseOwner = options.Value.Policy.Dispatch.LeaseOwner; - } - - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - _logger.LogInformation("Policy run dispatcher loop started with lease owner {LeaseOwner}.", _leaseOwner); - - while (!stoppingToken.IsCancellationRequested) - { - try - { - var policyOptions = _options.Value.Policy; - if (!policyOptions.Enabled) - { - await DelayAsync(policyOptions.Dispatch.IdleDelay, stoppingToken).ConfigureAwait(false); - continue; - } - - var batch = await LeaseBatchAsync(policyOptions.Dispatch, stoppingToken).ConfigureAwait(false); - if (batch.Count == 0) - { - await DelayAsync(policyOptions.Dispatch.IdleDelay, stoppingToken).ConfigureAwait(false); - continue; - } - - foreach (var job in batch) - { - try - { - var result = await _executionService.ExecuteAsync(job, stoppingToken).ConfigureAwait(false); - LogResult(result); - } - catch (OperationCanceledException) - { - throw; - } - catch (Exception ex) - { - _logger.LogError(ex, "Unhandled exception while processing policy run job {JobId}.", job.Id); - } - } - } - catch (OperationCanceledException) - { - break; - } - catch (Exception ex) - { - _logger.LogError(ex, "Policy run dispatcher encountered an error; backing off."); - await DelayAsync(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false); - } - } - - _logger.LogInformation("Policy run dispatcher loop stopping."); - } - - private async Task<IReadOnlyList<PolicyRunJob>> LeaseBatchAsync( - SchedulerWorkerOptions.PolicyOptions.DispatchOptions dispatchOptions, - CancellationToken cancellationToken) - { - var jobs = new List<PolicyRunJob>(dispatchOptions.BatchSize); - for (var i = 0; i < dispatchOptions.BatchSize; i++) - { - var now = _timeProvider.GetUtcNow(); - PolicyRunJob? leased; - try - { - leased = await _repository - .LeaseAsync(_leaseOwner, now, dispatchOptions.LeaseDuration, dispatchOptions.MaxAttempts, cancellationToken: cancellationToken) - .ConfigureAwait(false); - } - catch (OperationCanceledException) - { - throw; - } - catch (Exception ex) - { - _logger.LogError(ex, "Failed to lease policy run job on attempt {Attempt}.", i + 1); - break; - } - - if (leased is null) - { - break; - } - - jobs.Add(leased); - } - - return jobs; - } - - private void LogResult(PolicyRunExecutionResult result) - { - switch (result.Type) - { - case PolicyRunExecutionResultType.Submitted: - _logger.LogInformation( - "Policy run job {JobId} submitted for tenant {TenantId} policy {PolicyId} (runId={RunId}).", - result.UpdatedJob.Id, - result.UpdatedJob.TenantId, - result.UpdatedJob.PolicyId, - result.UpdatedJob.RunId); - break; - case PolicyRunExecutionResultType.Retrying: - _logger.LogWarning( - "Policy run job {JobId} will retry for tenant {TenantId} policy {PolicyId}: {Error}.", - result.UpdatedJob.Id, - result.UpdatedJob.TenantId, - result.UpdatedJob.PolicyId, - result.Error); - break; - case PolicyRunExecutionResultType.Failed: - _logger.LogError( - "Policy run job {JobId} failed permanently for tenant {TenantId} policy {PolicyId}: {Error}.", - result.UpdatedJob.Id, - result.UpdatedJob.TenantId, - result.UpdatedJob.PolicyId, - result.Error); - break; - case PolicyRunExecutionResultType.Cancelled: - _logger.LogInformation( - "Policy run job {JobId} cancelled for tenant {TenantId} policy {PolicyId}.", - result.UpdatedJob.Id, - result.UpdatedJob.TenantId, - result.UpdatedJob.PolicyId); - break; - case PolicyRunExecutionResultType.NoOp: - _logger.LogInformation( - "Policy run job {JobId} completed without submission for tenant {TenantId} policy {PolicyId} (reason={Reason}).", - result.UpdatedJob.Id, - result.UpdatedJob.TenantId, - result.UpdatedJob.PolicyId, - result.Error ?? "none"); - break; - } - } - - private async Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) - { - if (delay <= TimeSpan.Zero) - { - return; - } - - try - { - await Task.Delay(delay, cancellationToken).ConfigureAwait(false); - } - catch (TaskCanceledException) - { - } - } -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Policy; + +internal sealed class PolicyRunDispatchBackgroundService : BackgroundService +{ + private readonly IPolicyRunJobRepository _repository; + private readonly PolicyRunExecutionService _executionService; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger<PolicyRunDispatchBackgroundService> _logger; + private readonly string _leaseOwner; + + public PolicyRunDispatchBackgroundService( + IPolicyRunJobRepository repository, + PolicyRunExecutionService executionService, + IOptions<SchedulerWorkerOptions> options, + TimeProvider? timeProvider, + ILogger<PolicyRunDispatchBackgroundService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _executionService = executionService ?? throw new ArgumentNullException(nameof(executionService)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + _leaseOwner = options.Value.Policy.Dispatch.LeaseOwner; + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + _logger.LogInformation("Policy run dispatcher loop started with lease owner {LeaseOwner}.", _leaseOwner); + + while (!stoppingToken.IsCancellationRequested) + { + try + { + var policyOptions = _options.Value.Policy; + if (!policyOptions.Enabled) + { + await DelayAsync(policyOptions.Dispatch.IdleDelay, stoppingToken).ConfigureAwait(false); + continue; + } + + var batch = await LeaseBatchAsync(policyOptions.Dispatch, stoppingToken).ConfigureAwait(false); + if (batch.Count == 0) + { + await DelayAsync(policyOptions.Dispatch.IdleDelay, stoppingToken).ConfigureAwait(false); + continue; + } + + foreach (var job in batch) + { + try + { + var result = await _executionService.ExecuteAsync(job, stoppingToken).ConfigureAwait(false); + LogResult(result); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogError(ex, "Unhandled exception while processing policy run job {JobId}.", job.Id); + } + } + } + catch (OperationCanceledException) + { + break; + } + catch (Exception ex) + { + _logger.LogError(ex, "Policy run dispatcher encountered an error; backing off."); + await DelayAsync(TimeSpan.FromSeconds(5), stoppingToken).ConfigureAwait(false); + } + } + + _logger.LogInformation("Policy run dispatcher loop stopping."); + } + + private async Task<IReadOnlyList<PolicyRunJob>> LeaseBatchAsync( + SchedulerWorkerOptions.PolicyOptions.DispatchOptions dispatchOptions, + CancellationToken cancellationToken) + { + var jobs = new List<PolicyRunJob>(dispatchOptions.BatchSize); + for (var i = 0; i < dispatchOptions.BatchSize; i++) + { + var now = _timeProvider.GetUtcNow(); + PolicyRunJob? leased; + try + { + leased = await _repository + .LeaseAsync(_leaseOwner, now, dispatchOptions.LeaseDuration, dispatchOptions.MaxAttempts, cancellationToken: cancellationToken) + .ConfigureAwait(false); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogError(ex, "Failed to lease policy run job on attempt {Attempt}.", i + 1); + break; + } + + if (leased is null) + { + break; + } + + jobs.Add(leased); + } + + return jobs; + } + + private void LogResult(PolicyRunExecutionResult result) + { + switch (result.Type) + { + case PolicyRunExecutionResultType.Submitted: + _logger.LogInformation( + "Policy run job {JobId} submitted for tenant {TenantId} policy {PolicyId} (runId={RunId}).", + result.UpdatedJob.Id, + result.UpdatedJob.TenantId, + result.UpdatedJob.PolicyId, + result.UpdatedJob.RunId); + break; + case PolicyRunExecutionResultType.Retrying: + _logger.LogWarning( + "Policy run job {JobId} will retry for tenant {TenantId} policy {PolicyId}: {Error}.", + result.UpdatedJob.Id, + result.UpdatedJob.TenantId, + result.UpdatedJob.PolicyId, + result.Error); + break; + case PolicyRunExecutionResultType.Failed: + _logger.LogError( + "Policy run job {JobId} failed permanently for tenant {TenantId} policy {PolicyId}: {Error}.", + result.UpdatedJob.Id, + result.UpdatedJob.TenantId, + result.UpdatedJob.PolicyId, + result.Error); + break; + case PolicyRunExecutionResultType.Cancelled: + _logger.LogInformation( + "Policy run job {JobId} cancelled for tenant {TenantId} policy {PolicyId}.", + result.UpdatedJob.Id, + result.UpdatedJob.TenantId, + result.UpdatedJob.PolicyId); + break; + case PolicyRunExecutionResultType.NoOp: + _logger.LogInformation( + "Policy run job {JobId} completed without submission for tenant {TenantId} policy {PolicyId} (reason={Reason}).", + result.UpdatedJob.Id, + result.UpdatedJob.TenantId, + result.UpdatedJob.PolicyId, + result.Error ?? "none"); + break; + } + } + + private async Task DelayAsync(TimeSpan delay, CancellationToken cancellationToken) + { + if (delay <= TimeSpan.Zero) + { + return; + } + + try + { + await Task.Delay(delay, cancellationToken).ConfigureAwait(false); + } + catch (TaskCanceledException) + { + } + } +} diff --git a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionResult.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionResult.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionResult.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionResult.cs index eacfcc86..436674a7 100644 --- a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionResult.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionResult.cs @@ -1,33 +1,33 @@ -namespace StellaOps.Scheduler.Worker.Policy; - -using StellaOps.Scheduler.Models; - -internal enum PolicyRunExecutionResultType -{ - Submitted, - Retrying, - Failed, - Cancelled, - NoOp -} - -internal readonly record struct PolicyRunExecutionResult( - PolicyRunExecutionResultType Type, - PolicyRunJob UpdatedJob, - string? Error) -{ - public static PolicyRunExecutionResult Submitted(PolicyRunJob job) - => new(PolicyRunExecutionResultType.Submitted, job, null); - - public static PolicyRunExecutionResult Retrying(PolicyRunJob job, string? error) - => new(PolicyRunExecutionResultType.Retrying, job, error); - - public static PolicyRunExecutionResult Failed(PolicyRunJob job, string? error) - => new(PolicyRunExecutionResultType.Failed, job, error); - - public static PolicyRunExecutionResult Cancelled(PolicyRunJob job) - => new(PolicyRunExecutionResultType.Cancelled, job, null); - - public static PolicyRunExecutionResult NoOp(PolicyRunJob job, string? reason = null) - => new(PolicyRunExecutionResultType.NoOp, job, reason); -} +namespace StellaOps.Scheduler.Worker.Policy; + +using StellaOps.Scheduler.Models; + +internal enum PolicyRunExecutionResultType +{ + Submitted, + Retrying, + Failed, + Cancelled, + NoOp +} + +internal readonly record struct PolicyRunExecutionResult( + PolicyRunExecutionResultType Type, + PolicyRunJob UpdatedJob, + string? Error) +{ + public static PolicyRunExecutionResult Submitted(PolicyRunJob job) + => new(PolicyRunExecutionResultType.Submitted, job, null); + + public static PolicyRunExecutionResult Retrying(PolicyRunJob job, string? error) + => new(PolicyRunExecutionResultType.Retrying, job, error); + + public static PolicyRunExecutionResult Failed(PolicyRunJob job, string? error) + => new(PolicyRunExecutionResultType.Failed, job, error); + + public static PolicyRunExecutionResult Cancelled(PolicyRunJob job) + => new(PolicyRunExecutionResultType.Cancelled, job, null); + + public static PolicyRunExecutionResult NoOp(PolicyRunJob job, string? reason = null) + => new(PolicyRunExecutionResultType.NoOp, job, reason); +} diff --git a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionService.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionService.cs index ce487553..7378906a 100644 --- a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunExecutionService.cs @@ -1,248 +1,248 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Worker.Observability; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Policy; - -internal sealed class PolicyRunExecutionService -{ - private readonly IPolicyRunJobRepository _repository; - private readonly IPolicyRunClient _client; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly TimeProvider _timeProvider; - private readonly SchedulerWorkerMetrics _metrics; - private readonly IPolicyRunTargetingService _targetingService; - private readonly ILogger<PolicyRunExecutionService> _logger; - - public PolicyRunExecutionService( - IPolicyRunJobRepository repository, - IPolicyRunClient client, - IOptions<SchedulerWorkerOptions> options, - TimeProvider? timeProvider, - SchedulerWorkerMetrics metrics, - IPolicyRunTargetingService targetingService, - ILogger<PolicyRunExecutionService> logger) - { - _repository = repository ?? throw new ArgumentNullException(nameof(repository)); - _client = client ?? throw new ArgumentNullException(nameof(client)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _timeProvider = timeProvider ?? TimeProvider.System; - _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); - _targetingService = targetingService ?? throw new ArgumentNullException(nameof(targetingService)); - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<PolicyRunExecutionResult> ExecuteAsync(PolicyRunJob job, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(job); - cancellationToken.ThrowIfCancellationRequested(); - - if (job.CancellationRequested) - { - var cancelledAt = _timeProvider.GetUtcNow(); - var cancelled = job with - { - Status = PolicyRunJobStatus.Cancelled, - CancelledAt = cancelledAt, - UpdatedAt = cancelledAt, - LeaseOwner = null, - LeaseExpiresAt = null, - AvailableAt = cancelledAt - }; - - var replaced = await _repository.ReplaceAsync(cancelled, job.LeaseOwner, cancellationToken: cancellationToken).ConfigureAwait(false); - if (!replaced) - { - _logger.LogWarning("Failed to update cancelled policy run job {JobId}.", job.Id); - } - - _metrics.RecordPolicyRunEvent( - cancelled.TenantId, - cancelled.PolicyId, - cancelled.Mode, - "cancelled", - reason: cancelled.CancellationReason); - _logger.LogInformation( - "Policy run job {JobId} cancelled (tenant={TenantId}, policy={PolicyId}, runId={RunId}).", - cancelled.Id, - cancelled.TenantId, - cancelled.PolicyId, - cancelled.RunId ?? "(pending)"); - - return PolicyRunExecutionResult.Cancelled(cancelled); - } - - var targeting = await _targetingService - .EnsureTargetsAsync(job, cancellationToken) - .ConfigureAwait(false); - - if (targeting.Status == PolicyRunTargetingStatus.NoWork) - { - var completionTime = _timeProvider.GetUtcNow(); - var completed = targeting.Job with - { - Status = PolicyRunJobStatus.Completed, - CompletedAt = completionTime, - UpdatedAt = completionTime, - LeaseOwner = null, - LeaseExpiresAt = null, - AvailableAt = completionTime, - LastError = null - }; - - var replaced = await _repository.ReplaceAsync( - completed, - job.LeaseOwner, - cancellationToken: cancellationToken) - .ConfigureAwait(false); - - if (!replaced) - { - _logger.LogWarning("Failed to persist no-work completion for policy run job {JobId}.", job.Id); - } - - var latency = CalculateLatency(job, completionTime); - _metrics.RecordPolicyRunEvent( - completed.TenantId, - completed.PolicyId, - completed.Mode, - "no_work", - latency, - targeting.Reason); - _logger.LogInformation( - "Policy run job {JobId} completed without submission (reason={Reason}).", - completed.Id, - targeting.Reason ?? "none"); - - return PolicyRunExecutionResult.NoOp(completed, targeting.Reason); - } - - job = targeting.Job; - var now = _timeProvider.GetUtcNow(); - var request = job.ToPolicyRunRequest(now); - var submission = await _client.SubmitAsync(job, request, cancellationToken).ConfigureAwait(false); - var dispatchOptions = _options.Value.Policy.Dispatch; - var attemptCount = job.AttemptCount + 1; - - if (submission.Success) - { - var updated = job with - { - Status = PolicyRunJobStatus.Submitted, - RunId = submission.RunId ?? job.RunId, - SubmittedAt = submission.QueuedAt ?? now, - UpdatedAt = now, - AttemptCount = attemptCount, - LastAttemptAt = now, - LastError = null, - LeaseOwner = null, - LeaseExpiresAt = null, - AvailableAt = now - }; - - var replaced = await _repository.ReplaceAsync(updated, job.LeaseOwner, cancellationToken: cancellationToken).ConfigureAwait(false); - if (!replaced) - { - _logger.LogWarning("Failed to persist submitted policy run job {JobId}.", job.Id); - } - - var latency = CalculateLatency(job, now); - _metrics.RecordPolicyRunEvent( - updated.TenantId, - updated.PolicyId, - updated.Mode, - "submitted", - latency); - _logger.LogInformation( - "Policy run job {JobId} submitted (tenant={TenantId}, policy={PolicyId}, runId={RunId}, attempts={Attempts}).", - updated.Id, - updated.TenantId, - updated.PolicyId, - updated.RunId ?? "(pending)", - attemptCount); - - return PolicyRunExecutionResult.Submitted(updated); - } - - var nextStatus = attemptCount >= dispatchOptions.MaxAttempts - ? PolicyRunJobStatus.Failed - : PolicyRunJobStatus.Pending; - var nextAvailable = nextStatus == PolicyRunJobStatus.Pending - ? now.Add(dispatchOptions.RetryBackoff) - : now; - - var failedJob = job with - { - Status = nextStatus, - AttemptCount = attemptCount, - LastAttemptAt = now, - LastError = submission.Error, - LeaseOwner = null, - LeaseExpiresAt = null, - UpdatedAt = now, - AvailableAt = nextAvailable - }; - - var updateSuccess = await _repository.ReplaceAsync(failedJob, job.LeaseOwner, cancellationToken: cancellationToken).ConfigureAwait(false); - if (!updateSuccess) - { - _logger.LogWarning("Failed to update policy run job {JobId} after submission failure.", job.Id); - } - - var latencyForFailure = CalculateLatency(job, now); - var reason = string.IsNullOrWhiteSpace(submission.Error) ? null : submission.Error; - - if (nextStatus == PolicyRunJobStatus.Failed) - { - _metrics.RecordPolicyRunEvent( - failedJob.TenantId, - failedJob.PolicyId, - failedJob.Mode, - "failed", - latencyForFailure, - reason); - - _logger.LogError( - "Policy run job {JobId} failed after {Attempts} attempts (tenant={TenantId}, policy={PolicyId}, runId={RunId}). Error: {Error}", - failedJob.Id, - attemptCount, - failedJob.TenantId, - failedJob.PolicyId, - failedJob.RunId ?? "(pending)", - submission.Error ?? "unknown"); - - return PolicyRunExecutionResult.Failed(failedJob, submission.Error); - } - - _metrics.RecordPolicyRunEvent( - failedJob.TenantId, - failedJob.PolicyId, - failedJob.Mode, - "retry", - latencyForFailure, - reason); - _logger.LogWarning( - "Policy run job {JobId} retry scheduled (tenant={TenantId}, policy={PolicyId}, runId={RunId}, attempt={Attempt}). Error: {Error}", - failedJob.Id, - failedJob.TenantId, - failedJob.PolicyId, - failedJob.RunId ?? "(pending)", - attemptCount, - submission.Error ?? "unknown"); - - return PolicyRunExecutionResult.Retrying(failedJob, submission.Error); - } - - private static TimeSpan CalculateLatency(PolicyRunJob job, DateTimeOffset now) - { - var origin = job.QueuedAt ?? job.CreatedAt; - var latency = now - origin; - return latency < TimeSpan.Zero ? TimeSpan.Zero : latency; - } -} +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Worker.Observability; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Policy; + +internal sealed class PolicyRunExecutionService +{ + private readonly IPolicyRunJobRepository _repository; + private readonly IPolicyRunClient _client; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly TimeProvider _timeProvider; + private readonly SchedulerWorkerMetrics _metrics; + private readonly IPolicyRunTargetingService _targetingService; + private readonly ILogger<PolicyRunExecutionService> _logger; + + public PolicyRunExecutionService( + IPolicyRunJobRepository repository, + IPolicyRunClient client, + IOptions<SchedulerWorkerOptions> options, + TimeProvider? timeProvider, + SchedulerWorkerMetrics metrics, + IPolicyRunTargetingService targetingService, + ILogger<PolicyRunExecutionService> logger) + { + _repository = repository ?? throw new ArgumentNullException(nameof(repository)); + _client = client ?? throw new ArgumentNullException(nameof(client)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + _metrics = metrics ?? throw new ArgumentNullException(nameof(metrics)); + _targetingService = targetingService ?? throw new ArgumentNullException(nameof(targetingService)); + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<PolicyRunExecutionResult> ExecuteAsync(PolicyRunJob job, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(job); + cancellationToken.ThrowIfCancellationRequested(); + + if (job.CancellationRequested) + { + var cancelledAt = _timeProvider.GetUtcNow(); + var cancelled = job with + { + Status = PolicyRunJobStatus.Cancelled, + CancelledAt = cancelledAt, + UpdatedAt = cancelledAt, + LeaseOwner = null, + LeaseExpiresAt = null, + AvailableAt = cancelledAt + }; + + var replaced = await _repository.ReplaceAsync(cancelled, job.LeaseOwner, cancellationToken: cancellationToken).ConfigureAwait(false); + if (!replaced) + { + _logger.LogWarning("Failed to update cancelled policy run job {JobId}.", job.Id); + } + + _metrics.RecordPolicyRunEvent( + cancelled.TenantId, + cancelled.PolicyId, + cancelled.Mode, + "cancelled", + reason: cancelled.CancellationReason); + _logger.LogInformation( + "Policy run job {JobId} cancelled (tenant={TenantId}, policy={PolicyId}, runId={RunId}).", + cancelled.Id, + cancelled.TenantId, + cancelled.PolicyId, + cancelled.RunId ?? "(pending)"); + + return PolicyRunExecutionResult.Cancelled(cancelled); + } + + var targeting = await _targetingService + .EnsureTargetsAsync(job, cancellationToken) + .ConfigureAwait(false); + + if (targeting.Status == PolicyRunTargetingStatus.NoWork) + { + var completionTime = _timeProvider.GetUtcNow(); + var completed = targeting.Job with + { + Status = PolicyRunJobStatus.Completed, + CompletedAt = completionTime, + UpdatedAt = completionTime, + LeaseOwner = null, + LeaseExpiresAt = null, + AvailableAt = completionTime, + LastError = null + }; + + var replaced = await _repository.ReplaceAsync( + completed, + job.LeaseOwner, + cancellationToken: cancellationToken) + .ConfigureAwait(false); + + if (!replaced) + { + _logger.LogWarning("Failed to persist no-work completion for policy run job {JobId}.", job.Id); + } + + var latency = CalculateLatency(job, completionTime); + _metrics.RecordPolicyRunEvent( + completed.TenantId, + completed.PolicyId, + completed.Mode, + "no_work", + latency, + targeting.Reason); + _logger.LogInformation( + "Policy run job {JobId} completed without submission (reason={Reason}).", + completed.Id, + targeting.Reason ?? "none"); + + return PolicyRunExecutionResult.NoOp(completed, targeting.Reason); + } + + job = targeting.Job; + var now = _timeProvider.GetUtcNow(); + var request = job.ToPolicyRunRequest(now); + var submission = await _client.SubmitAsync(job, request, cancellationToken).ConfigureAwait(false); + var dispatchOptions = _options.Value.Policy.Dispatch; + var attemptCount = job.AttemptCount + 1; + + if (submission.Success) + { + var updated = job with + { + Status = PolicyRunJobStatus.Submitted, + RunId = submission.RunId ?? job.RunId, + SubmittedAt = submission.QueuedAt ?? now, + UpdatedAt = now, + AttemptCount = attemptCount, + LastAttemptAt = now, + LastError = null, + LeaseOwner = null, + LeaseExpiresAt = null, + AvailableAt = now + }; + + var replaced = await _repository.ReplaceAsync(updated, job.LeaseOwner, cancellationToken: cancellationToken).ConfigureAwait(false); + if (!replaced) + { + _logger.LogWarning("Failed to persist submitted policy run job {JobId}.", job.Id); + } + + var latency = CalculateLatency(job, now); + _metrics.RecordPolicyRunEvent( + updated.TenantId, + updated.PolicyId, + updated.Mode, + "submitted", + latency); + _logger.LogInformation( + "Policy run job {JobId} submitted (tenant={TenantId}, policy={PolicyId}, runId={RunId}, attempts={Attempts}).", + updated.Id, + updated.TenantId, + updated.PolicyId, + updated.RunId ?? "(pending)", + attemptCount); + + return PolicyRunExecutionResult.Submitted(updated); + } + + var nextStatus = attemptCount >= dispatchOptions.MaxAttempts + ? PolicyRunJobStatus.Failed + : PolicyRunJobStatus.Pending; + var nextAvailable = nextStatus == PolicyRunJobStatus.Pending + ? now.Add(dispatchOptions.RetryBackoff) + : now; + + var failedJob = job with + { + Status = nextStatus, + AttemptCount = attemptCount, + LastAttemptAt = now, + LastError = submission.Error, + LeaseOwner = null, + LeaseExpiresAt = null, + UpdatedAt = now, + AvailableAt = nextAvailable + }; + + var updateSuccess = await _repository.ReplaceAsync(failedJob, job.LeaseOwner, cancellationToken: cancellationToken).ConfigureAwait(false); + if (!updateSuccess) + { + _logger.LogWarning("Failed to update policy run job {JobId} after submission failure.", job.Id); + } + + var latencyForFailure = CalculateLatency(job, now); + var reason = string.IsNullOrWhiteSpace(submission.Error) ? null : submission.Error; + + if (nextStatus == PolicyRunJobStatus.Failed) + { + _metrics.RecordPolicyRunEvent( + failedJob.TenantId, + failedJob.PolicyId, + failedJob.Mode, + "failed", + latencyForFailure, + reason); + + _logger.LogError( + "Policy run job {JobId} failed after {Attempts} attempts (tenant={TenantId}, policy={PolicyId}, runId={RunId}). Error: {Error}", + failedJob.Id, + attemptCount, + failedJob.TenantId, + failedJob.PolicyId, + failedJob.RunId ?? "(pending)", + submission.Error ?? "unknown"); + + return PolicyRunExecutionResult.Failed(failedJob, submission.Error); + } + + _metrics.RecordPolicyRunEvent( + failedJob.TenantId, + failedJob.PolicyId, + failedJob.Mode, + "retry", + latencyForFailure, + reason); + _logger.LogWarning( + "Policy run job {JobId} retry scheduled (tenant={TenantId}, policy={PolicyId}, runId={RunId}, attempt={Attempt}). Error: {Error}", + failedJob.Id, + failedJob.TenantId, + failedJob.PolicyId, + failedJob.RunId ?? "(pending)", + attemptCount, + submission.Error ?? "unknown"); + + return PolicyRunExecutionResult.Retrying(failedJob, submission.Error); + } + + private static TimeSpan CalculateLatency(PolicyRunJob job, DateTimeOffset now) + { + var origin = job.QueuedAt ?? job.CreatedAt; + var latency = now - origin; + return latency < TimeSpan.Zero ? TimeSpan.Zero : latency; + } +} diff --git a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunSubmissionResult.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunSubmissionResult.cs similarity index 96% rename from src/StellaOps.Scheduler.Worker/Policy/PolicyRunSubmissionResult.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunSubmissionResult.cs index f61e49ab..6c3d336d 100644 --- a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunSubmissionResult.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunSubmissionResult.cs @@ -1,28 +1,28 @@ -using System; - -namespace StellaOps.Scheduler.Worker.Policy; - -internal readonly record struct PolicyRunSubmissionResult -{ - private PolicyRunSubmissionResult(bool success, string? runId, DateTimeOffset? queuedAt, string? error) - { - Success = success; - RunId = runId; - QueuedAt = queuedAt; - Error = error; - } - - public bool Success { get; } - - public string? RunId { get; } - - public DateTimeOffset? QueuedAt { get; } - - public string? Error { get; } - - public static PolicyRunSubmissionResult Succeeded(string? runId, DateTimeOffset? queuedAt) - => new(success: true, runId, queuedAt, error: null); - - public static PolicyRunSubmissionResult Failed(string? error) - => new(success: false, runId: null, queuedAt: null, error); -} +using System; + +namespace StellaOps.Scheduler.Worker.Policy; + +internal readonly record struct PolicyRunSubmissionResult +{ + private PolicyRunSubmissionResult(bool success, string? runId, DateTimeOffset? queuedAt, string? error) + { + Success = success; + RunId = runId; + QueuedAt = queuedAt; + Error = error; + } + + public bool Success { get; } + + public string? RunId { get; } + + public DateTimeOffset? QueuedAt { get; } + + public string? Error { get; } + + public static PolicyRunSubmissionResult Succeeded(string? runId, DateTimeOffset? queuedAt) + => new(success: true, runId, queuedAt, error: null); + + public static PolicyRunSubmissionResult Failed(string? error) + => new(success: false, runId: null, queuedAt: null, error); +} diff --git a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingResult.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingResult.cs similarity index 96% rename from src/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingResult.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingResult.cs index ea1ef38a..bd3a7681 100644 --- a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingResult.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingResult.cs @@ -1,25 +1,25 @@ -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Worker.Policy; - -internal enum PolicyRunTargetingStatus -{ - Unchanged, - Targeted, - NoWork -} - -internal readonly record struct PolicyRunTargetingResult( - PolicyRunTargetingStatus Status, - PolicyRunJob Job, - string? Reason) -{ - public static PolicyRunTargetingResult Unchanged(PolicyRunJob job) - => new(PolicyRunTargetingStatus.Unchanged, job, null); - - public static PolicyRunTargetingResult Targeted(PolicyRunJob job) - => new(PolicyRunTargetingStatus.Targeted, job, null); - - public static PolicyRunTargetingResult NoWork(PolicyRunJob job, string? reason) - => new(PolicyRunTargetingStatus.NoWork, job, reason); -} +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Worker.Policy; + +internal enum PolicyRunTargetingStatus +{ + Unchanged, + Targeted, + NoWork +} + +internal readonly record struct PolicyRunTargetingResult( + PolicyRunTargetingStatus Status, + PolicyRunJob Job, + string? Reason) +{ + public static PolicyRunTargetingResult Unchanged(PolicyRunJob job) + => new(PolicyRunTargetingStatus.Unchanged, job, null); + + public static PolicyRunTargetingResult Targeted(PolicyRunJob job) + => new(PolicyRunTargetingStatus.Targeted, job, null); + + public static PolicyRunTargetingResult NoWork(PolicyRunJob job, string? reason) + => new(PolicyRunTargetingStatus.NoWork, job, reason); +} diff --git a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingService.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingService.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingService.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingService.cs index a9b2d6c4..3b6a4717 100644 --- a/src/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingService.cs +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Policy/PolicyRunTargetingService.cs @@ -1,455 +1,455 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Globalization; -using System.Linq; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Worker; -using StellaOps.Scheduler.Worker.Options; - -namespace StellaOps.Scheduler.Worker.Policy; - -internal sealed class PolicyRunTargetingService : IPolicyRunTargetingService -{ - private static readonly string[] DirectSbomMetadataKeys = - { - "delta.sboms", - "delta.sbomset", - "delta:sboms", - "delta_sbomset" - }; - - private static readonly string[] ProductKeyMetadataKeys = - { - "delta.purls", - "delta.productkeys", - "delta.components", - "delta:product_keys" - }; - - private static readonly string[] VulnerabilityMetadataKeys = - { - "delta.vulns", - "delta.vulnerabilities", - "delta.cves", - "delta:vulnerability_ids" - }; - - private readonly IImpactTargetingService _impactTargetingService; - private readonly IOptions<SchedulerWorkerOptions> _options; - private readonly TimeProvider _timeProvider; - private readonly ILogger<PolicyRunTargetingService> _logger; - - public PolicyRunTargetingService( - IImpactTargetingService impactTargetingService, - IOptions<SchedulerWorkerOptions> options, - TimeProvider? timeProvider, - ILogger<PolicyRunTargetingService> logger) - { - _impactTargetingService = impactTargetingService ?? throw new ArgumentNullException(nameof(impactTargetingService)); - _options = options ?? throw new ArgumentNullException(nameof(options)); - _timeProvider = timeProvider ?? TimeProvider.System; - _logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<PolicyRunTargetingResult> EnsureTargetsAsync(PolicyRunJob job, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(job); - - var policyOptions = _options.Value.Policy; - var targetingOptions = policyOptions.Targeting; - - if (!targetingOptions.Enabled) - { - return PolicyRunTargetingResult.Unchanged(job); - } - - if (job.Mode != PolicyRunMode.Incremental) - { - return PolicyRunTargetingResult.Unchanged(job); - } - - var inputs = job.Inputs ?? PolicyRunInputs.Empty; - if (!inputs.SbomSet.IsDefaultOrEmpty && inputs.SbomSet.Length > 0) - { - return PolicyRunTargetingResult.Unchanged(job); - } - - var metadata = job.Metadata ?? ImmutableSortedDictionary<string, string>.Empty; - var directSboms = ParseList(metadata, DirectSbomMetadataKeys); - var productKeys = ParseList(metadata, ProductKeyMetadataKeys); - var vulnerabilityIds = ParseList(metadata, VulnerabilityMetadataKeys); - - if (directSboms.Count == 0 && productKeys.Count == 0 && vulnerabilityIds.Count == 0) - { - _logger.LogDebug( - "Policy run job {JobId} has no delta metadata; skipping targeting.", - job.Id); - return PolicyRunTargetingResult.Unchanged(job); - } - - var candidates = new HashSet<string>(StringComparer.OrdinalIgnoreCase); - AddIdentifiers(candidates, directSboms); - - var selector = BuildSelector(job, metadata); - var usageOnly = DetermineUsageOnly(metadata, targetingOptions.DefaultUsageOnly); - - if (productKeys.Count > 0) - { - try - { - var impactSet = await _impactTargetingService - .ResolveByPurlsAsync(productKeys, usageOnly, selector, cancellationToken) - .ConfigureAwait(false); - AddFromImpactSet(candidates, impactSet); - } - catch (OperationCanceledException) - { - throw; - } - catch (Exception ex) - { - _logger.LogWarning( - ex, - "Policy run job {JobId} failed resolving delta by product keys; falling back to full run.", - job.Id); - return PolicyRunTargetingResult.Unchanged(job); - } - } - - if (vulnerabilityIds.Count > 0) - { - try - { - var impactSet = await _impactTargetingService - .ResolveByVulnerabilitiesAsync(vulnerabilityIds, usageOnly, selector, cancellationToken) - .ConfigureAwait(false); - AddFromImpactSet(candidates, impactSet); - } - catch (OperationCanceledException) - { - throw; - } - catch (Exception ex) - { - _logger.LogWarning( - ex, - "Policy run job {JobId} failed resolving delta by vulnerability ids; falling back to full run.", - job.Id); - return PolicyRunTargetingResult.Unchanged(job); - } - } - - if (candidates.Count == 0) - { - _logger.LogInformation( - "Policy run job {JobId} produced no SBOM targets (policy={PolicyId}).", - job.Id, - job.PolicyId); - return PolicyRunTargetingResult.NoWork(job, "no_matches"); - } - - if (candidates.Count > targetingOptions.MaxSboms) - { - _logger.LogWarning( - "Policy run job {JobId} resolved {Count} SBOMs exceeding limit {Limit}; falling back to full run.", - job.Id, - candidates.Count, - targetingOptions.MaxSboms); - return PolicyRunTargetingResult.Unchanged(job); - } - - var normalized = candidates - .Select(NormalizeSbomId) - .Where(static value => !string.IsNullOrWhiteSpace(value)) - .Distinct(StringComparer.Ordinal) - .OrderBy(static value => value, StringComparer.Ordinal) - .ToImmutableArray(); - - if (normalized.Length == 0) - { - _logger.LogInformation( - "Policy run job {JobId} resulted in empty SBOM set after normalization; marking as no-work.", - job.Id); - return PolicyRunTargetingResult.NoWork(job, "normalized_empty"); - } - - var updatedInputs = inputs with { SbomSet = normalized }; - var updatedJob = job with - { - Inputs = updatedInputs, - UpdatedAt = _timeProvider.GetUtcNow() - }; - - _logger.LogInformation( - "Policy run job {JobId} targeted {Count} SBOMs for policy {PolicyId}.", - job.Id, - normalized.Length, - job.PolicyId); - - return PolicyRunTargetingResult.Targeted(updatedJob); - } - - private static void AddIdentifiers(HashSet<string> destination, IReadOnlyList<string> values) - { - foreach (var value in values) - { - var trimmed = value?.Trim(); - if (!string.IsNullOrEmpty(trimmed)) - { - destination.Add(trimmed); - } - } - } - - private static void AddFromImpactSet(HashSet<string> destination, ImpactSet impactSet) - { - foreach (var image in impactSet.Images) - { - var sbomId = ExtractSbomId(image); - if (!string.IsNullOrEmpty(sbomId)) - { - destination.Add(sbomId); - } - } - } - - private static string? ExtractSbomId(ImpactImage image) - { - if (TryGetLabel(image.Labels, "sbom", out var value) || - TryGetLabel(image.Labels, "sbomid", out value) || - TryGetLabel(image.Labels, "sbom_id", out value) || - TryGetLabel(image.Labels, "sbomId", out value)) - { - return value; - } - - return string.IsNullOrWhiteSpace(image.ImageDigest) - ? null - : $"sbom:{image.ImageDigest}"; - } - - private static bool TryGetLabel(ImmutableSortedDictionary<string, string> labels, string key, out string? value) - { - foreach (var pair in labels) - { - if (string.Equals(pair.Key, key, StringComparison.OrdinalIgnoreCase)) - { - value = pair.Value; - return true; - } - } - - value = null; - return false; - } - - private static string NormalizeSbomId(string candidate) - { - var trimmed = candidate.Trim(); - if (trimmed.Length == 0) - { - return string.Empty; - } - - if (trimmed.StartsWith("sbom:", StringComparison.OrdinalIgnoreCase)) - { - return trimmed; - } - - return $"sbom:{trimmed}"; - } - - private static Selector BuildSelector(PolicyRunJob job, ImmutableSortedDictionary<string, string> metadata) - { - var scope = SelectorScope.AllImages; - if (TryGetMetadataValue(metadata, "policy.selector.scope", out var scopeValue)) - { - scope = scopeValue.Trim().ToLowerInvariant() switch - { - "namespace" or "bynamespace" => SelectorScope.ByNamespace, - "repository" or "byrepository" => SelectorScope.ByRepository, - "digest" or "bydigest" => SelectorScope.ByDigest, - "labels" or "bylabels" => SelectorScope.ByLabels, - _ => SelectorScope.AllImages - }; - } - - var namespaces = scope == SelectorScope.ByNamespace - ? ParseList(metadata, "policy.selector.namespaces") - : Array.Empty<string>(); - var repositories = scope == SelectorScope.ByRepository - ? ParseList(metadata, "policy.selector.repositories") - : Array.Empty<string>(); - var digests = scope == SelectorScope.ByDigest - ? ParseList(metadata, "policy.selector.digests") - : Array.Empty<string>(); - var includeTags = ParseList(metadata, "policy.selector.includeTags", "policy.selector.tags"); - var labelSelectors = scope == SelectorScope.ByLabels - ? ParseLabelSelectors(metadata) - : ImmutableArray<LabelSelector>.Empty; - - try - { - return new Selector( - scope, - job.TenantId, - namespaces, - repositories, - digests, - includeTags, - labelSelectors, - resolvesTags: TryGetMetadataValue(metadata, "policy.selector.resolvesTags", out var resolvesTagsValue) && ParseBoolean(resolvesTagsValue)); - } - catch (Exception) - { - return new Selector(SelectorScope.AllImages, tenantId: job.TenantId); - } - } - - private static ImmutableArray<LabelSelector> ParseLabelSelectors(ImmutableSortedDictionary<string, string> metadata) - { - if (!TryGetMetadataValue(metadata, "policy.selector.labels", out var raw) || string.IsNullOrWhiteSpace(raw)) - { - return ImmutableArray<LabelSelector>.Empty; - } - - var segments = Split(raw); - if (segments.Count == 0) - { - return ImmutableArray<LabelSelector>.Empty; - } - - var selectors = new List<LabelSelector>(segments.Count); - foreach (var segment in segments) - { - var index = segment.IndexOf('='); - if (index <= 0 || index == segment.Length - 1) - { - continue; - } - - var key = segment[..index].Trim(); - var values = segment[(index + 1)..].Split('|', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - if (string.IsNullOrEmpty(key)) - { - continue; - } - - selectors.Add(new LabelSelector(key, values)); - } - - return selectors.Count == 0 - ? ImmutableArray<LabelSelector>.Empty - : selectors.OrderBy(static selector => selector.Key, StringComparer.Ordinal).ToImmutableArray(); - } - - private static IReadOnlyList<string> ParseList(ImmutableSortedDictionary<string, string> metadata, params string[] keys) - { - foreach (var key in keys) - { - if (TryGetMetadataValue(metadata, key, out var raw) && !string.IsNullOrWhiteSpace(raw)) - { - return ParseList(raw); - } - } - - return Array.Empty<string>(); - } - - private static IReadOnlyList<string> ParseList(string raw) - { - var trimmed = raw.Trim(); - - if (trimmed.Length == 0) - { - return Array.Empty<string>(); - } - - if (trimmed.StartsWith("[", StringComparison.Ordinal)) - { - try - { - using var document = JsonDocument.Parse(trimmed); - if (document.RootElement.ValueKind == JsonValueKind.Array) - { - return document.RootElement - .EnumerateArray() - .Where(static element => element.ValueKind == JsonValueKind.String) - .Select(static element => element.GetString() ?? string.Empty) - .Where(static value => !string.IsNullOrWhiteSpace(value)) - .ToArray(); - } - } - catch (JsonException) - { - } - } - - return Split(trimmed); - } - - private static List<string> Split(string value) - { - return value - .Split(new[] { ',', ';', '\n', '\r', '\t' }, StringSplitOptions.RemoveEmptyEntries) - .Select(static item => item.Trim()) - .Where(static item => item.Length > 0) - .ToList(); - } - - private static bool DetermineUsageOnly(ImmutableSortedDictionary<string, string> metadata, bool defaultValue) - { - if (TryGetMetadataValue(metadata, "policy.selector.usageOnly", out var raw)) - { - return ParseBoolean(raw); - } - - if (TryGetMetadataValue(metadata, "delta.usageOnly", out raw)) - { - return ParseBoolean(raw); - } - - return defaultValue; - } - - private static bool ParseBoolean(string value) - { - if (bool.TryParse(value, out var parsed)) - { - return parsed; - } - - if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var numeric)) - { - return numeric != 0; - } - - return value.Equals("yes", StringComparison.OrdinalIgnoreCase) || - value.Equals("y", StringComparison.OrdinalIgnoreCase) || - value.Equals("true", StringComparison.OrdinalIgnoreCase); - } - - private static bool TryGetMetadataValue( - ImmutableSortedDictionary<string, string> metadata, - string key, - out string value) - { - foreach (var pair in metadata) - { - if (string.Equals(pair.Key, key, StringComparison.OrdinalIgnoreCase)) - { - value = pair.Value; - return true; - } - } - - value = string.Empty; - return false; - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Globalization; +using System.Linq; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Worker; +using StellaOps.Scheduler.Worker.Options; + +namespace StellaOps.Scheduler.Worker.Policy; + +internal sealed class PolicyRunTargetingService : IPolicyRunTargetingService +{ + private static readonly string[] DirectSbomMetadataKeys = + { + "delta.sboms", + "delta.sbomset", + "delta:sboms", + "delta_sbomset" + }; + + private static readonly string[] ProductKeyMetadataKeys = + { + "delta.purls", + "delta.productkeys", + "delta.components", + "delta:product_keys" + }; + + private static readonly string[] VulnerabilityMetadataKeys = + { + "delta.vulns", + "delta.vulnerabilities", + "delta.cves", + "delta:vulnerability_ids" + }; + + private readonly IImpactTargetingService _impactTargetingService; + private readonly IOptions<SchedulerWorkerOptions> _options; + private readonly TimeProvider _timeProvider; + private readonly ILogger<PolicyRunTargetingService> _logger; + + public PolicyRunTargetingService( + IImpactTargetingService impactTargetingService, + IOptions<SchedulerWorkerOptions> options, + TimeProvider? timeProvider, + ILogger<PolicyRunTargetingService> logger) + { + _impactTargetingService = impactTargetingService ?? throw new ArgumentNullException(nameof(impactTargetingService)); + _options = options ?? throw new ArgumentNullException(nameof(options)); + _timeProvider = timeProvider ?? TimeProvider.System; + _logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<PolicyRunTargetingResult> EnsureTargetsAsync(PolicyRunJob job, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(job); + + var policyOptions = _options.Value.Policy; + var targetingOptions = policyOptions.Targeting; + + if (!targetingOptions.Enabled) + { + return PolicyRunTargetingResult.Unchanged(job); + } + + if (job.Mode != PolicyRunMode.Incremental) + { + return PolicyRunTargetingResult.Unchanged(job); + } + + var inputs = job.Inputs ?? PolicyRunInputs.Empty; + if (!inputs.SbomSet.IsDefaultOrEmpty && inputs.SbomSet.Length > 0) + { + return PolicyRunTargetingResult.Unchanged(job); + } + + var metadata = job.Metadata ?? ImmutableSortedDictionary<string, string>.Empty; + var directSboms = ParseList(metadata, DirectSbomMetadataKeys); + var productKeys = ParseList(metadata, ProductKeyMetadataKeys); + var vulnerabilityIds = ParseList(metadata, VulnerabilityMetadataKeys); + + if (directSboms.Count == 0 && productKeys.Count == 0 && vulnerabilityIds.Count == 0) + { + _logger.LogDebug( + "Policy run job {JobId} has no delta metadata; skipping targeting.", + job.Id); + return PolicyRunTargetingResult.Unchanged(job); + } + + var candidates = new HashSet<string>(StringComparer.OrdinalIgnoreCase); + AddIdentifiers(candidates, directSboms); + + var selector = BuildSelector(job, metadata); + var usageOnly = DetermineUsageOnly(metadata, targetingOptions.DefaultUsageOnly); + + if (productKeys.Count > 0) + { + try + { + var impactSet = await _impactTargetingService + .ResolveByPurlsAsync(productKeys, usageOnly, selector, cancellationToken) + .ConfigureAwait(false); + AddFromImpactSet(candidates, impactSet); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogWarning( + ex, + "Policy run job {JobId} failed resolving delta by product keys; falling back to full run.", + job.Id); + return PolicyRunTargetingResult.Unchanged(job); + } + } + + if (vulnerabilityIds.Count > 0) + { + try + { + var impactSet = await _impactTargetingService + .ResolveByVulnerabilitiesAsync(vulnerabilityIds, usageOnly, selector, cancellationToken) + .ConfigureAwait(false); + AddFromImpactSet(candidates, impactSet); + } + catch (OperationCanceledException) + { + throw; + } + catch (Exception ex) + { + _logger.LogWarning( + ex, + "Policy run job {JobId} failed resolving delta by vulnerability ids; falling back to full run.", + job.Id); + return PolicyRunTargetingResult.Unchanged(job); + } + } + + if (candidates.Count == 0) + { + _logger.LogInformation( + "Policy run job {JobId} produced no SBOM targets (policy={PolicyId}).", + job.Id, + job.PolicyId); + return PolicyRunTargetingResult.NoWork(job, "no_matches"); + } + + if (candidates.Count > targetingOptions.MaxSboms) + { + _logger.LogWarning( + "Policy run job {JobId} resolved {Count} SBOMs exceeding limit {Limit}; falling back to full run.", + job.Id, + candidates.Count, + targetingOptions.MaxSboms); + return PolicyRunTargetingResult.Unchanged(job); + } + + var normalized = candidates + .Select(NormalizeSbomId) + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .Distinct(StringComparer.Ordinal) + .OrderBy(static value => value, StringComparer.Ordinal) + .ToImmutableArray(); + + if (normalized.Length == 0) + { + _logger.LogInformation( + "Policy run job {JobId} resulted in empty SBOM set after normalization; marking as no-work.", + job.Id); + return PolicyRunTargetingResult.NoWork(job, "normalized_empty"); + } + + var updatedInputs = inputs with { SbomSet = normalized }; + var updatedJob = job with + { + Inputs = updatedInputs, + UpdatedAt = _timeProvider.GetUtcNow() + }; + + _logger.LogInformation( + "Policy run job {JobId} targeted {Count} SBOMs for policy {PolicyId}.", + job.Id, + normalized.Length, + job.PolicyId); + + return PolicyRunTargetingResult.Targeted(updatedJob); + } + + private static void AddIdentifiers(HashSet<string> destination, IReadOnlyList<string> values) + { + foreach (var value in values) + { + var trimmed = value?.Trim(); + if (!string.IsNullOrEmpty(trimmed)) + { + destination.Add(trimmed); + } + } + } + + private static void AddFromImpactSet(HashSet<string> destination, ImpactSet impactSet) + { + foreach (var image in impactSet.Images) + { + var sbomId = ExtractSbomId(image); + if (!string.IsNullOrEmpty(sbomId)) + { + destination.Add(sbomId); + } + } + } + + private static string? ExtractSbomId(ImpactImage image) + { + if (TryGetLabel(image.Labels, "sbom", out var value) || + TryGetLabel(image.Labels, "sbomid", out value) || + TryGetLabel(image.Labels, "sbom_id", out value) || + TryGetLabel(image.Labels, "sbomId", out value)) + { + return value; + } + + return string.IsNullOrWhiteSpace(image.ImageDigest) + ? null + : $"sbom:{image.ImageDigest}"; + } + + private static bool TryGetLabel(ImmutableSortedDictionary<string, string> labels, string key, out string? value) + { + foreach (var pair in labels) + { + if (string.Equals(pair.Key, key, StringComparison.OrdinalIgnoreCase)) + { + value = pair.Value; + return true; + } + } + + value = null; + return false; + } + + private static string NormalizeSbomId(string candidate) + { + var trimmed = candidate.Trim(); + if (trimmed.Length == 0) + { + return string.Empty; + } + + if (trimmed.StartsWith("sbom:", StringComparison.OrdinalIgnoreCase)) + { + return trimmed; + } + + return $"sbom:{trimmed}"; + } + + private static Selector BuildSelector(PolicyRunJob job, ImmutableSortedDictionary<string, string> metadata) + { + var scope = SelectorScope.AllImages; + if (TryGetMetadataValue(metadata, "policy.selector.scope", out var scopeValue)) + { + scope = scopeValue.Trim().ToLowerInvariant() switch + { + "namespace" or "bynamespace" => SelectorScope.ByNamespace, + "repository" or "byrepository" => SelectorScope.ByRepository, + "digest" or "bydigest" => SelectorScope.ByDigest, + "labels" or "bylabels" => SelectorScope.ByLabels, + _ => SelectorScope.AllImages + }; + } + + var namespaces = scope == SelectorScope.ByNamespace + ? ParseList(metadata, "policy.selector.namespaces") + : Array.Empty<string>(); + var repositories = scope == SelectorScope.ByRepository + ? ParseList(metadata, "policy.selector.repositories") + : Array.Empty<string>(); + var digests = scope == SelectorScope.ByDigest + ? ParseList(metadata, "policy.selector.digests") + : Array.Empty<string>(); + var includeTags = ParseList(metadata, "policy.selector.includeTags", "policy.selector.tags"); + var labelSelectors = scope == SelectorScope.ByLabels + ? ParseLabelSelectors(metadata) + : ImmutableArray<LabelSelector>.Empty; + + try + { + return new Selector( + scope, + job.TenantId, + namespaces, + repositories, + digests, + includeTags, + labelSelectors, + resolvesTags: TryGetMetadataValue(metadata, "policy.selector.resolvesTags", out var resolvesTagsValue) && ParseBoolean(resolvesTagsValue)); + } + catch (Exception) + { + return new Selector(SelectorScope.AllImages, tenantId: job.TenantId); + } + } + + private static ImmutableArray<LabelSelector> ParseLabelSelectors(ImmutableSortedDictionary<string, string> metadata) + { + if (!TryGetMetadataValue(metadata, "policy.selector.labels", out var raw) || string.IsNullOrWhiteSpace(raw)) + { + return ImmutableArray<LabelSelector>.Empty; + } + + var segments = Split(raw); + if (segments.Count == 0) + { + return ImmutableArray<LabelSelector>.Empty; + } + + var selectors = new List<LabelSelector>(segments.Count); + foreach (var segment in segments) + { + var index = segment.IndexOf('='); + if (index <= 0 || index == segment.Length - 1) + { + continue; + } + + var key = segment[..index].Trim(); + var values = segment[(index + 1)..].Split('|', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (string.IsNullOrEmpty(key)) + { + continue; + } + + selectors.Add(new LabelSelector(key, values)); + } + + return selectors.Count == 0 + ? ImmutableArray<LabelSelector>.Empty + : selectors.OrderBy(static selector => selector.Key, StringComparer.Ordinal).ToImmutableArray(); + } + + private static IReadOnlyList<string> ParseList(ImmutableSortedDictionary<string, string> metadata, params string[] keys) + { + foreach (var key in keys) + { + if (TryGetMetadataValue(metadata, key, out var raw) && !string.IsNullOrWhiteSpace(raw)) + { + return ParseList(raw); + } + } + + return Array.Empty<string>(); + } + + private static IReadOnlyList<string> ParseList(string raw) + { + var trimmed = raw.Trim(); + + if (trimmed.Length == 0) + { + return Array.Empty<string>(); + } + + if (trimmed.StartsWith("[", StringComparison.Ordinal)) + { + try + { + using var document = JsonDocument.Parse(trimmed); + if (document.RootElement.ValueKind == JsonValueKind.Array) + { + return document.RootElement + .EnumerateArray() + .Where(static element => element.ValueKind == JsonValueKind.String) + .Select(static element => element.GetString() ?? string.Empty) + .Where(static value => !string.IsNullOrWhiteSpace(value)) + .ToArray(); + } + } + catch (JsonException) + { + } + } + + return Split(trimmed); + } + + private static List<string> Split(string value) + { + return value + .Split(new[] { ',', ';', '\n', '\r', '\t' }, StringSplitOptions.RemoveEmptyEntries) + .Select(static item => item.Trim()) + .Where(static item => item.Length > 0) + .ToList(); + } + + private static bool DetermineUsageOnly(ImmutableSortedDictionary<string, string> metadata, bool defaultValue) + { + if (TryGetMetadataValue(metadata, "policy.selector.usageOnly", out var raw)) + { + return ParseBoolean(raw); + } + + if (TryGetMetadataValue(metadata, "delta.usageOnly", out raw)) + { + return ParseBoolean(raw); + } + + return defaultValue; + } + + private static bool ParseBoolean(string value) + { + if (bool.TryParse(value, out var parsed)) + { + return parsed; + } + + if (int.TryParse(value, NumberStyles.Integer, CultureInfo.InvariantCulture, out var numeric)) + { + return numeric != 0; + } + + return value.Equals("yes", StringComparison.OrdinalIgnoreCase) || + value.Equals("y", StringComparison.OrdinalIgnoreCase) || + value.Equals("true", StringComparison.OrdinalIgnoreCase); + } + + private static bool TryGetMetadataValue( + ImmutableSortedDictionary<string, string> metadata, + string key, + out string value) + { + foreach (var pair in metadata) + { + if (string.Equals(pair.Key, key, StringComparison.OrdinalIgnoreCase)) + { + value = pair.Value; + return true; + } + } + + value = string.Empty; + return false; + } +} diff --git a/src/StellaOps.Scheduler.Worker/Properties/AssemblyInfo.cs b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker/Properties/AssemblyInfo.cs rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Scheduler.Worker/StellaOps.Scheduler.Worker.csproj b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/StellaOps.Scheduler.Worker.csproj similarity index 75% rename from src/StellaOps.Scheduler.Worker/StellaOps.Scheduler.Worker.csproj rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/StellaOps.Scheduler.Worker.csproj index c274bd30..dd54e9f4 100644 --- a/src/StellaOps.Scheduler.Worker/StellaOps.Scheduler.Worker.csproj +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/StellaOps.Scheduler.Worker.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -9,10 +10,10 @@ <ProjectReference Include="../StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> <ProjectReference Include="../StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj" /> <ProjectReference Include="../StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj" /> <PackageReference Include="Cronos" Version="0.10.0" /> <PackageReference Include="System.Threading.RateLimiting" Version="8.0.0" /> <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scheduler.Worker/TASKS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md similarity index 98% rename from src/StellaOps.Scheduler.Worker/TASKS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md index c7f5c483..b68b086b 100644 --- a/src/StellaOps.Scheduler.Worker/TASKS.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/TASKS.md @@ -18,7 +18,7 @@ | ID | Status | Owner(s) | Depends on | Description | Exit Criteria | |----|--------|----------|------------|-------------|---------------| | SCHED-WORKER-20-301 | DONE (2025-10-28) | Scheduler Worker Guild, Policy Guild | SCHED-WORKER-16-201, POLICY-ENGINE-20-000 | Extend scheduler worker to trigger policy runs (full/incremental/simulate) via Policy Engine API, with idempotent job tracking and tenant scoping. | Worker schedules policy jobs deterministically; job records persisted; integration tests cover modes + cancellation. | -> 2025-10-26: Align worker serializers with `PolicyRunRequest/Status/DiffSummary` contracts from `src/StellaOps.Scheduler.Models`. Reference fixtures in `samples/api/scheduler/` for expected payload ordering. +> 2025-10-26: Align worker serializers with `PolicyRunRequest/Status/DiffSummary` contracts from `src/Scheduler/__Libraries/StellaOps.Scheduler.Models`. Reference fixtures in `samples/api/scheduler/` for expected payload ordering. | SCHED-WORKER-20-302 | DONE (2025-10-29) | Scheduler Worker Guild | SCHED-WORKER-20-301, POLICY-ENGINE-20-006 | Implement policy delta targeting to re-evaluate only affected SBOM sets based on change streams and policy metadata. | Targeting reduces workload per design; tests simulate advisory/vex updates; metrics show delta counts. | > 2025-10-29: `PolicyRunTargetingService` translates change-stream metadata into SBOM sets, marks no-work jobs completed, and surfaces targeting options (`Policy.Targeting`). See `docs/SCHED-WORKER-20-302-POLICY-DELTA-TARGETING.md` for supported metadata keys and behaviour. | SCHED-WORKER-20-303 | DONE (2025-10-29) | Scheduler Worker Guild, Observability Guild | SCHED-WORKER-20-301 | Expose metrics (`policy_runs_scheduled`, `policy_runs_failed`, planner latency) and structured logs with policy/run identifiers. | Metrics registered; dashboards updated; logs validated in integration tests. | diff --git a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-201-PLANNER.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-201-PLANNER.md similarity index 100% rename from src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-201-PLANNER.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-201-PLANNER.md diff --git a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-202-IMPACT-TARGETING.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-202-IMPACT-TARGETING.md similarity index 100% rename from src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-202-IMPACT-TARGETING.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-202-IMPACT-TARGETING.md diff --git a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-203-RUNNER.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-203-RUNNER.md similarity index 100% rename from src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-203-RUNNER.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-203-RUNNER.md diff --git a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-204-EVENTS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-204-EVENTS.md similarity index 100% rename from src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-204-EVENTS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-204-EVENTS.md diff --git a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-205-OBSERVABILITY.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-205-OBSERVABILITY.md similarity index 100% rename from src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-205-OBSERVABILITY.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-16-205-OBSERVABILITY.md diff --git a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-301-POLICY-RUNS.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-301-POLICY-RUNS.md similarity index 98% rename from src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-301-POLICY-RUNS.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-301-POLICY-RUNS.md index 138edb1d..857342cb 100644 --- a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-301-POLICY-RUNS.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-301-POLICY-RUNS.md @@ -1,39 +1,39 @@ -# SCHED-WORKER-20-301 — Policy Run Dispatch - -_Sprint 20 · Scheduler Worker Guild_ - -This milestone introduces the worker-side plumbing required to trigger Policy Engine -runs from scheduler-managed jobs. The worker now leases policy run jobs from Mongo, -submits them to the Policy Engine REST API, and tracks submission state deterministically. - -## Highlights - -- New `PolicyRunJob` DTO (stored in `policy_jobs`) captures run metadata, attempts, - lease ownership, and cancellation markers. Schema version `scheduler.policy-run-job@1` - added to `SchedulerSchemaVersions` with canonical serializer coverage. -- Mongo storage gains `policy_jobs` collection with indexes on `{tenantId, status, availableAt}` - and `runId` uniqueness for idempotency. Repository `IPolicyRunJobRepository` exposes - leasing and replace semantics guarded by lease owner checks. -- Worker options now include `Policy` dispatch/API subsections covering lease cadence, - retry backoff, idempotency headers, and base URL validation. -- HTTP client (`HttpPolicyRunClient`) submits `PolicyRunRequest` payloads to - `/api/policy/policies/{policyId}/runs`, applying tenant + idempotency headers and honouring - configurable timeouts. Simulation mode reuses the same endpoint for now. -- `PolicyRunExecutionService` handles lease-aware execution, cancellation, retry backoff, - and permanent failure tracking with attempt counters. -- Background service `PolicyRunDispatchBackgroundService` leases batches, processes them - sequentially, and logs outcomes. Dispatch respects the global enable flag and idle delay. -- Unit tests cover success, retry, cancellation, and terminal failure paths. - -## Observability - -- Metrics: - - `scheduler_policy_run_events_total` — counter tagged by `tenant`, `policyId`, `mode`, `result`, and optional `reason`. - - `scheduler_policy_run_latency_seconds` — histogram capturing queue-to-submission latency with the same tag set. -- Structured logs now include tenant, policy, run identifier, and attempt counts for submit/retry/failure/cancellation paths. - -## Follow-ups - -- Integrate Authority token acquisition once Policy Engine authentication scopes are available. -- Extend jobs to capture simulation-specific inputs when the web service surfaces them. -- Wire policy run metrics/observability (`SCHED-WORKER-20-303`) atop the new execution service. +# SCHED-WORKER-20-301 — Policy Run Dispatch + +_Sprint 20 · Scheduler Worker Guild_ + +This milestone introduces the worker-side plumbing required to trigger Policy Engine +runs from scheduler-managed jobs. The worker now leases policy run jobs from Mongo, +submits them to the Policy Engine REST API, and tracks submission state deterministically. + +## Highlights + +- New `PolicyRunJob` DTO (stored in `policy_jobs`) captures run metadata, attempts, + lease ownership, and cancellation markers. Schema version `scheduler.policy-run-job@1` + added to `SchedulerSchemaVersions` with canonical serializer coverage. +- Mongo storage gains `policy_jobs` collection with indexes on `{tenantId, status, availableAt}` + and `runId` uniqueness for idempotency. Repository `IPolicyRunJobRepository` exposes + leasing and replace semantics guarded by lease owner checks. +- Worker options now include `Policy` dispatch/API subsections covering lease cadence, + retry backoff, idempotency headers, and base URL validation. +- HTTP client (`HttpPolicyRunClient`) submits `PolicyRunRequest` payloads to + `/api/policy/policies/{policyId}/runs`, applying tenant + idempotency headers and honouring + configurable timeouts. Simulation mode reuses the same endpoint for now. +- `PolicyRunExecutionService` handles lease-aware execution, cancellation, retry backoff, + and permanent failure tracking with attempt counters. +- Background service `PolicyRunDispatchBackgroundService` leases batches, processes them + sequentially, and logs outcomes. Dispatch respects the global enable flag and idle delay. +- Unit tests cover success, retry, cancellation, and terminal failure paths. + +## Observability + +- Metrics: + - `scheduler_policy_run_events_total` — counter tagged by `tenant`, `policyId`, `mode`, `result`, and optional `reason`. + - `scheduler_policy_run_latency_seconds` — histogram capturing queue-to-submission latency with the same tag set. +- Structured logs now include tenant, policy, run identifier, and attempt counts for submit/retry/failure/cancellation paths. + +## Follow-ups + +- Integrate Authority token acquisition once Policy Engine authentication scopes are available. +- Extend jobs to capture simulation-specific inputs when the web service surfaces them. +- Wire policy run metrics/observability (`SCHED-WORKER-20-303`) atop the new execution service. diff --git a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-302-POLICY-DELTA-TARGETING.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-302-POLICY-DELTA-TARGETING.md similarity index 98% rename from src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-302-POLICY-DELTA-TARGETING.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-302-POLICY-DELTA-TARGETING.md index 91d9546f..7114bac8 100644 --- a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-302-POLICY-DELTA-TARGETING.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-20-302-POLICY-DELTA-TARGETING.md @@ -1,77 +1,77 @@ -# SCHED-WORKER-20-302 — Policy Delta Targeting - -_Sprint 20 · Scheduler Worker Guild_ - -This milestone wires policy delta targeting into the scheduler worker so incremental -policy jobs only re-evaluate SBOMs impacted by recent change-stream events. - -## Highlights - -- New `PolicyRunTargetingService` resolves pending incremental jobs before - submission, using metadata assembled by the policy orchestrator. -- Supports delta metadata for: - - Explicit SBOM lists (`delta.sboms`, `delta.sbomset`). - - Component product keys (`delta.purls`, `delta.productkeys`). - - Vulnerability identifiers (`delta.vulns`, `delta.vulnerabilities`, `delta.cves`). -- Metadata-derived selectors (scope, namespaces, repositories, digests, labels) - and usage-only hints constrain impact index lookups per policy. -- Impact index results capture SBOM identifiers from labels (`sbom`, `sbomId`, - `sbom_id`). When absent, the worker falls back to `sbom:{digest}` so the Policy - Engine still receives deterministic request payloads. -- No-work detections mark jobs `Completed` without contacting Policy Engine and - emit `scheduler_policy_run_events_total{result="no_work"}` with the normalized - reason (`no_matches` / `normalized_empty`). -- Guardrails: configurable maximum SBOM count (default 10 000) avoids sending - overly large incremental runs; excess falls back to full run behaviour. - -## Configuration - -`SchedulerWorkerOptions.Policy.Targeting` - -| Option | Default | Description | -| --- | --- | --- | -| `Enabled` | `true` | Toggle delta targeting (falls back to legacy behaviour when false). | -| `MaxSboms` | `10000` | Upper bound for targeted SBOM identifiers before the worker submits the job unchanged. | -| `DefaultUsageOnly` | `false` | Default `usageOnly` flag when metadata omits a preference. | - -## Metadata Contract (current support) - -| Key | Purpose | Example | -| --- | --- | --- | -| `delta.sboms`, `delta.sbomset` | Direct SBOM identifiers | `sbom:S-42,sbom:S-318` | -| `delta.purls`, `delta.productkeys`, `delta.components` | Component product keys | `pkg:npm/lodash@4.17.21` | -| `delta.vulns`, `delta.vulnerabilities`, `delta.cves` | Vulnerability identifiers | `CVE-2025-1234` | -| `policy.selector.scope` | Selector scope (`all-images`, `namespace`, `repository`, `digest`, `labels`) | `namespace` | -| `policy.selector.namespaces` / `repositories` / `digests` | Selector values (comma/semicolon separated or JSON array) | `prod-team` | -| `policy.selector.labels` | Label filters (`key=value1|value2`) | `env=prod|staging` | -| `policy.selector.includeTags` / `policy.selector.tags` | Tag glob filters | `prod-*;stable` | -| `policy.selector.resolvesTags` | Whether to resolve tags at runtime (`true`/`false`) | `true` | -| `policy.selector.usageOnly` / `delta.usageOnly` | Override default usage-only flag | `true` | - -Unrecognised metadata is ignored; malformed selectors fall back to -`SelectorScope.AllImages` for safety. - -## Behaviour Summary - -1. Lease policy job (`dispatching` state). -2. Targeting service inspects metadata and resolves impacted SBOMs via direct - lists and/or the impact index (purls/vulns). -3. If the resulting set is empty, the job is marked completed without - submission and metrics log `result=no_work`. -4. When non-empty (and within `MaxSboms`), the job inputs are updated with a - sorted, deduplicated SBOM set before invoking `PolicyRunExecutionService`. -5. Failures during impact resolution (e.g., index unavailability) produce a - warning and fall back to unchanged behaviour so the orchestrator can retry. - -## Testing - -- `PolicyRunTargetingServiceTests` cover metadata parsing, impact index - integration, limit enforcement, and fallback paths. -- `PolicyRunExecutionServiceTests.ExecuteAsync_NoWork_CompletesJob` validates - repository updates, metrics, and dispatch logs when a delta resolves to no work. -- Existing execution tests were updated to run through the targeting pipeline - using a stubbed service. - ---- - -_Last updated: 2025-10-29._ +# SCHED-WORKER-20-302 — Policy Delta Targeting + +_Sprint 20 · Scheduler Worker Guild_ + +This milestone wires policy delta targeting into the scheduler worker so incremental +policy jobs only re-evaluate SBOMs impacted by recent change-stream events. + +## Highlights + +- New `PolicyRunTargetingService` resolves pending incremental jobs before + submission, using metadata assembled by the policy orchestrator. +- Supports delta metadata for: + - Explicit SBOM lists (`delta.sboms`, `delta.sbomset`). + - Component product keys (`delta.purls`, `delta.productkeys`). + - Vulnerability identifiers (`delta.vulns`, `delta.vulnerabilities`, `delta.cves`). +- Metadata-derived selectors (scope, namespaces, repositories, digests, labels) + and usage-only hints constrain impact index lookups per policy. +- Impact index results capture SBOM identifiers from labels (`sbom`, `sbomId`, + `sbom_id`). When absent, the worker falls back to `sbom:{digest}` so the Policy + Engine still receives deterministic request payloads. +- No-work detections mark jobs `Completed` without contacting Policy Engine and + emit `scheduler_policy_run_events_total{result="no_work"}` with the normalized + reason (`no_matches` / `normalized_empty`). +- Guardrails: configurable maximum SBOM count (default 10 000) avoids sending + overly large incremental runs; excess falls back to full run behaviour. + +## Configuration + +`SchedulerWorkerOptions.Policy.Targeting` + +| Option | Default | Description | +| --- | --- | --- | +| `Enabled` | `true` | Toggle delta targeting (falls back to legacy behaviour when false). | +| `MaxSboms` | `10000` | Upper bound for targeted SBOM identifiers before the worker submits the job unchanged. | +| `DefaultUsageOnly` | `false` | Default `usageOnly` flag when metadata omits a preference. | + +## Metadata Contract (current support) + +| Key | Purpose | Example | +| --- | --- | --- | +| `delta.sboms`, `delta.sbomset` | Direct SBOM identifiers | `sbom:S-42,sbom:S-318` | +| `delta.purls`, `delta.productkeys`, `delta.components` | Component product keys | `pkg:npm/lodash@4.17.21` | +| `delta.vulns`, `delta.vulnerabilities`, `delta.cves` | Vulnerability identifiers | `CVE-2025-1234` | +| `policy.selector.scope` | Selector scope (`all-images`, `namespace`, `repository`, `digest`, `labels`) | `namespace` | +| `policy.selector.namespaces` / `repositories` / `digests` | Selector values (comma/semicolon separated or JSON array) | `prod-team` | +| `policy.selector.labels` | Label filters (`key=value1|value2`) | `env=prod|staging` | +| `policy.selector.includeTags` / `policy.selector.tags` | Tag glob filters | `prod-*;stable` | +| `policy.selector.resolvesTags` | Whether to resolve tags at runtime (`true`/`false`) | `true` | +| `policy.selector.usageOnly` / `delta.usageOnly` | Override default usage-only flag | `true` | + +Unrecognised metadata is ignored; malformed selectors fall back to +`SelectorScope.AllImages` for safety. + +## Behaviour Summary + +1. Lease policy job (`dispatching` state). +2. Targeting service inspects metadata and resolves impacted SBOMs via direct + lists and/or the impact index (purls/vulns). +3. If the resulting set is empty, the job is marked completed without + submission and metrics log `result=no_work`. +4. When non-empty (and within `MaxSboms`), the job inputs are updated with a + sorted, deduplicated SBOM set before invoking `PolicyRunExecutionService`. +5. Failures during impact resolution (e.g., index unavailability) produce a + warning and fall back to unchanged behaviour so the orchestrator can retry. + +## Testing + +- `PolicyRunTargetingServiceTests` cover metadata parsing, impact index + integration, limit enforcement, and fallback paths. +- `PolicyRunExecutionServiceTests.ExecuteAsync_NoWork_CompletesJob` validates + repository updates, metrics, and dispatch logs when a delta resolves to no work. +- Existing execution tests were updated to run through the targeting pipeline + using a stubbed service. + +--- + +_Last updated: 2025-10-29._ diff --git a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-201-GRAPH-BUILD.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-201-GRAPH-BUILD.md similarity index 97% rename from src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-201-GRAPH-BUILD.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-201-GRAPH-BUILD.md index 3acea22f..a9dc623f 100644 --- a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-201-GRAPH-BUILD.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-201-GRAPH-BUILD.md @@ -71,7 +71,7 @@ Scheduler WebService webhook so downstream systems observe completion events. - Added `GraphBuildExecutionServiceTests` covering success, retry/failure, and concurrency-skip paths using stubs for the repository and HTTP clients. -- `dotnet test src/StellaOps.Scheduler.Worker.Tests` +- `dotnet test src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests` --- diff --git a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-202-GRAPH-OVERLAY.md b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-202-GRAPH-OVERLAY.md similarity index 97% rename from src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-202-GRAPH-OVERLAY.md rename to src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-202-GRAPH-OVERLAY.md index 43ff3c16..1561f94d 100644 --- a/src/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-202-GRAPH-OVERLAY.md +++ b/src/Scheduler/__Libraries/StellaOps.Scheduler.Worker/docs/SCHED-WORKER-21-202-GRAPH-OVERLAY.md @@ -69,7 +69,7 @@ snapshots. - `GraphOverlayExecutionServiceTests` validate success, retry, failure, and concurrency-skip behaviour using stubbed repository/clients. -- `dotnet test src/StellaOps.Scheduler.Worker.Tests` +- `dotnet test src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests` --- diff --git a/src/StellaOps.Scheduler.ImpactIndex.Tests/FixtureImpactIndexTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.ImpactIndex.Tests/FixtureImpactIndexTests.cs similarity index 97% rename from src/StellaOps.Scheduler.ImpactIndex.Tests/FixtureImpactIndexTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.ImpactIndex.Tests/FixtureImpactIndexTests.cs index e0764638..1cff0f0d 100644 --- a/src/StellaOps.Scheduler.ImpactIndex.Tests/FixtureImpactIndexTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.ImpactIndex.Tests/FixtureImpactIndexTests.cs @@ -1,142 +1,142 @@ -using System; -using System.IO; -using System.Linq; -using System.Threading.Tasks; -using FluentAssertions; -using Microsoft.Extensions.Logging; -using StellaOps.Scheduler.ImpactIndex; -using StellaOps.Scheduler.Models; -using Xunit; - -namespace StellaOps.Scheduler.ImpactIndex.Tests; - -public sealed class FixtureImpactIndexTests -{ - [Fact] - public async Task ResolveByPurls_UsesEmbeddedFixtures() - { - var selector = new Selector(SelectorScope.AllImages); - var (impactIndex, loggerFactory) = CreateImpactIndex(); - using var _ = loggerFactory; - - var result = await impactIndex.ResolveByPurlsAsync( - new[] { "pkg:apk/alpine/openssl@3.2.2-r0?arch=x86_64" }, - usageOnly: false, - selector); - - result.UsageOnly.Should().BeFalse(); - result.Images.Should().ContainSingle(); - - var image = result.Images.Single(); - image.ImageDigest.Should().Be("sha256:8f47d7c6b538c0d9533b78913cba3d5e671e7c4b4e7c6a2bb9a1a1c4d4f8e123"); - image.Registry.Should().Be("docker.io"); - image.Repository.Should().Be("library/nginx"); - image.Tags.Should().ContainSingle(tag => tag == "1.25.4"); - image.UsedByEntrypoint.Should().BeTrue(); - - result.GeneratedAt.Should().Be(DateTimeOffset.Parse("2025-10-19T00:00:00Z")); - result.SchemaVersion.Should().Be(SchedulerSchemaVersions.ImpactSet); - } - - [Fact] - public async Task ResolveByPurls_UsageOnlyFiltersInventoryOnlyComponents() - { - var selector = new Selector(SelectorScope.AllImages); - var (impactIndex, loggerFactory) = CreateImpactIndex(); - using var _ = loggerFactory; - - var inventoryOnlyPurl = "pkg:apk/alpine/pcre2@10.42-r1?arch=x86_64"; - - var runtimeResult = await impactIndex.ResolveByPurlsAsync( - new[] { inventoryOnlyPurl }, - usageOnly: true, - selector); - - runtimeResult.Images.Should().BeEmpty(); - - var inventoryResult = await impactIndex.ResolveByPurlsAsync( - new[] { inventoryOnlyPurl }, - usageOnly: false, - selector); - - inventoryResult.Images.Should().ContainSingle(); - inventoryResult.Images.Single().UsedByEntrypoint.Should().BeFalse(); - } - - [Fact] - public async Task ResolveAll_ReturnsDeterministicFixtureSet() - { - var selector = new Selector(SelectorScope.AllImages); - var (impactIndex, loggerFactory) = CreateImpactIndex(); - using var _ = loggerFactory; - - var first = await impactIndex.ResolveAllAsync(selector, usageOnly: false); - first.Images.Should().HaveCount(6); - - var second = await impactIndex.ResolveAllAsync(selector, usageOnly: false); - second.Images.Should().HaveCount(6); - second.Images.Should().Equal(first.Images); - } - - [Fact] - public async Task ResolveByVulnerabilities_ReturnsEmptySet() - { - var selector = new Selector(SelectorScope.AllImages); - var (impactIndex, loggerFactory) = CreateImpactIndex(); - using var _ = loggerFactory; - - var result = await impactIndex.ResolveByVulnerabilitiesAsync( - new[] { "CVE-2025-0001" }, - usageOnly: false, - selector); - - result.Images.Should().BeEmpty(); - } - - [Fact] - public async Task FixtureDirectoryOption_LoadsFromFileSystem() - { - var selector = new Selector(SelectorScope.AllImages); - var samplesDirectory = LocateSamplesDirectory(); - var (impactIndex, loggerFactory) = CreateImpactIndex(options => - { - options.FixtureDirectory = samplesDirectory; - }); - using var _ = loggerFactory; - - var result = await impactIndex.ResolveAllAsync(selector, usageOnly: false); - - result.Images.Should().HaveCount(6); - } - - private static (FixtureImpactIndex ImpactIndex, ILoggerFactory LoggerFactory) CreateImpactIndex( - Action<ImpactIndexStubOptions>? configure = null) - { - var options = new ImpactIndexStubOptions(); - configure?.Invoke(options); - - var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); - var logger = loggerFactory.CreateLogger<FixtureImpactIndex>(); - - var impactIndex = new FixtureImpactIndex(options, TimeProvider.System, logger); - return (impactIndex, loggerFactory); - } - - private static string LocateSamplesDirectory() - { - var current = AppContext.BaseDirectory; - - while (!string.IsNullOrWhiteSpace(current)) - { - var candidate = Path.Combine(current, "samples", "scanner", "images"); - if (Directory.Exists(candidate)) - { - return candidate; - } - - current = Directory.GetParent(current)?.FullName; - } - - throw new InvalidOperationException("Unable to locate 'samples/scanner/images'."); - } -} +using System; +using System.IO; +using System.Linq; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.Logging; +using StellaOps.Scheduler.ImpactIndex; +using StellaOps.Scheduler.Models; +using Xunit; + +namespace StellaOps.Scheduler.ImpactIndex.Tests; + +public sealed class FixtureImpactIndexTests +{ + [Fact] + public async Task ResolveByPurls_UsesEmbeddedFixtures() + { + var selector = new Selector(SelectorScope.AllImages); + var (impactIndex, loggerFactory) = CreateImpactIndex(); + using var _ = loggerFactory; + + var result = await impactIndex.ResolveByPurlsAsync( + new[] { "pkg:apk/alpine/openssl@3.2.2-r0?arch=x86_64" }, + usageOnly: false, + selector); + + result.UsageOnly.Should().BeFalse(); + result.Images.Should().ContainSingle(); + + var image = result.Images.Single(); + image.ImageDigest.Should().Be("sha256:8f47d7c6b538c0d9533b78913cba3d5e671e7c4b4e7c6a2bb9a1a1c4d4f8e123"); + image.Registry.Should().Be("docker.io"); + image.Repository.Should().Be("library/nginx"); + image.Tags.Should().ContainSingle(tag => tag == "1.25.4"); + image.UsedByEntrypoint.Should().BeTrue(); + + result.GeneratedAt.Should().Be(DateTimeOffset.Parse("2025-10-19T00:00:00Z")); + result.SchemaVersion.Should().Be(SchedulerSchemaVersions.ImpactSet); + } + + [Fact] + public async Task ResolveByPurls_UsageOnlyFiltersInventoryOnlyComponents() + { + var selector = new Selector(SelectorScope.AllImages); + var (impactIndex, loggerFactory) = CreateImpactIndex(); + using var _ = loggerFactory; + + var inventoryOnlyPurl = "pkg:apk/alpine/pcre2@10.42-r1?arch=x86_64"; + + var runtimeResult = await impactIndex.ResolveByPurlsAsync( + new[] { inventoryOnlyPurl }, + usageOnly: true, + selector); + + runtimeResult.Images.Should().BeEmpty(); + + var inventoryResult = await impactIndex.ResolveByPurlsAsync( + new[] { inventoryOnlyPurl }, + usageOnly: false, + selector); + + inventoryResult.Images.Should().ContainSingle(); + inventoryResult.Images.Single().UsedByEntrypoint.Should().BeFalse(); + } + + [Fact] + public async Task ResolveAll_ReturnsDeterministicFixtureSet() + { + var selector = new Selector(SelectorScope.AllImages); + var (impactIndex, loggerFactory) = CreateImpactIndex(); + using var _ = loggerFactory; + + var first = await impactIndex.ResolveAllAsync(selector, usageOnly: false); + first.Images.Should().HaveCount(6); + + var second = await impactIndex.ResolveAllAsync(selector, usageOnly: false); + second.Images.Should().HaveCount(6); + second.Images.Should().Equal(first.Images); + } + + [Fact] + public async Task ResolveByVulnerabilities_ReturnsEmptySet() + { + var selector = new Selector(SelectorScope.AllImages); + var (impactIndex, loggerFactory) = CreateImpactIndex(); + using var _ = loggerFactory; + + var result = await impactIndex.ResolveByVulnerabilitiesAsync( + new[] { "CVE-2025-0001" }, + usageOnly: false, + selector); + + result.Images.Should().BeEmpty(); + } + + [Fact] + public async Task FixtureDirectoryOption_LoadsFromFileSystem() + { + var selector = new Selector(SelectorScope.AllImages); + var samplesDirectory = LocateSamplesDirectory(); + var (impactIndex, loggerFactory) = CreateImpactIndex(options => + { + options.FixtureDirectory = samplesDirectory; + }); + using var _ = loggerFactory; + + var result = await impactIndex.ResolveAllAsync(selector, usageOnly: false); + + result.Images.Should().HaveCount(6); + } + + private static (FixtureImpactIndex ImpactIndex, ILoggerFactory LoggerFactory) CreateImpactIndex( + Action<ImpactIndexStubOptions>? configure = null) + { + var options = new ImpactIndexStubOptions(); + configure?.Invoke(options); + + var loggerFactory = LoggerFactory.Create(builder => builder.SetMinimumLevel(LogLevel.Debug)); + var logger = loggerFactory.CreateLogger<FixtureImpactIndex>(); + + var impactIndex = new FixtureImpactIndex(options, TimeProvider.System, logger); + return (impactIndex, loggerFactory); + } + + private static string LocateSamplesDirectory() + { + var current = AppContext.BaseDirectory; + + while (!string.IsNullOrWhiteSpace(current)) + { + var candidate = Path.Combine(current, "samples", "scanner", "images"); + if (Directory.Exists(candidate)) + { + return candidate; + } + + current = Directory.GetParent(current)?.FullName; + } + + throw new InvalidOperationException("Unable to locate 'samples/scanner/images'."); + } +} diff --git a/src/StellaOps.Scheduler.ImpactIndex.Tests/RoaringImpactIndexTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.ImpactIndex.Tests/RoaringImpactIndexTests.cs similarity index 97% rename from src/StellaOps.Scheduler.ImpactIndex.Tests/RoaringImpactIndexTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.ImpactIndex.Tests/RoaringImpactIndexTests.cs index 9bea4c9e..a0a9d73d 100644 --- a/src/StellaOps.Scheduler.ImpactIndex.Tests/RoaringImpactIndexTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.ImpactIndex.Tests/RoaringImpactIndexTests.cs @@ -1,195 +1,195 @@ -using System; -using System.Collections.Immutable; -using System.IO; -using FluentAssertions; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Scheduler.ImpactIndex.Ingestion; -using StellaOps.Scheduler.Models; -using StellaOps.Scanner.Core.Contracts; -using StellaOps.Scanner.Emit.Index; -using Xunit; - -namespace StellaOps.Scheduler.ImpactIndex.Tests; - -public sealed class RoaringImpactIndexTests -{ - [Fact] - public async Task IngestAsync_RegistersComponentsAndUsage() - { - var (stream, digest) = CreateBomIndex( - ComponentIdentity.Create("pkg:npm/a@1.0.0", "a", "1.0.0", "pkg:npm/a@1.0.0"), - ComponentUsage.Create(true, new[] { "/app/start.sh" })); - - var index = new RoaringImpactIndex(NullLogger<RoaringImpactIndex>.Instance); - var request = new ImpactIndexIngestionRequest - { - TenantId = "tenant-alpha", - ImageDigest = digest, - Registry = "docker.io", - Repository = "library/alpine", - Namespaces = ImmutableArray.Create("team-a"), - Tags = ImmutableArray.Create("3.20"), - Labels = ImmutableSortedDictionary.CreateRange(StringComparer.OrdinalIgnoreCase, new[] - { - new KeyValuePair<string, string>("env", "prod") - }), - BomIndexStream = stream, - }; - - await index.IngestAsync(request, CancellationToken.None); - - var selector = new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"); - var impactSet = await index.ResolveByPurlsAsync(new[] { "pkg:npm/a@1.0.0" }, usageOnly: false, selector); - - impactSet.Images.Should().HaveCount(1); - impactSet.Images[0].ImageDigest.Should().Be(digest); - impactSet.Images[0].Tags.Should().ContainSingle(tag => tag == "3.20"); - impactSet.Images[0].UsedByEntrypoint.Should().BeTrue(); - - var usageOnly = await index.ResolveByPurlsAsync(new[] { "pkg:npm/a@1.0.0" }, usageOnly: true, selector); - usageOnly.Images.Should().HaveCount(1); - } - - [Fact] - public async Task IngestAsync_ReplacesExistingImageData() - { - var component = ComponentIdentity.Create("pkg:npm/a@1.0.0", "a", "1.0.0", "pkg:npm/a@1.0.0"); - var (initialStream, digest) = CreateBomIndex(component, ComponentUsage.Create(false)); - var index = new RoaringImpactIndex(NullLogger<RoaringImpactIndex>.Instance); - - await index.IngestAsync(new ImpactIndexIngestionRequest - { - TenantId = "tenant-alpha", - ImageDigest = digest, - Registry = "docker.io", - Repository = "library/alpine", - Tags = ImmutableArray.Create("v1"), - BomIndexStream = initialStream, - }); - - var (updatedStream, _) = CreateBomIndex(component, ComponentUsage.Create(true, new[] { "/start.sh" }), digest); - await index.IngestAsync(new ImpactIndexIngestionRequest - { - TenantId = "tenant-alpha", - ImageDigest = digest, - Registry = "docker.io", - Repository = "library/alpine", - Tags = ImmutableArray.Create("v2"), - BomIndexStream = updatedStream, - }); - - var selector = new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"); - var impactSet = await index.ResolveByPurlsAsync(new[] { "pkg:npm/a@1.0.0" }, usageOnly: true, selector); - - impactSet.Images.Should().HaveCount(1); - impactSet.Images[0].Tags.Should().ContainSingle(tag => tag == "v2"); - impactSet.Images[0].UsedByEntrypoint.Should().BeTrue(); - } - - [Fact] - public async Task ResolveByPurlsAsync_RespectsTenantNamespaceAndTagFilters() - { - var component = ComponentIdentity.Create("pkg:npm/a@1.0.0", "a", "1.0.0", "pkg:npm/a@1.0.0"); - var (tenantStream, tenantDigest) = CreateBomIndex(component, ComponentUsage.Create(true, new[] { "/start.sh" })); - var (otherStream, otherDigest) = CreateBomIndex(component, ComponentUsage.Create(false)); - - var index = new RoaringImpactIndex(NullLogger<RoaringImpactIndex>.Instance); - - await index.IngestAsync(new ImpactIndexIngestionRequest - { - TenantId = "tenant-alpha", - ImageDigest = tenantDigest, - Registry = "docker.io", - Repository = "library/service", - Namespaces = ImmutableArray.Create("team-alpha"), - Tags = ImmutableArray.Create("prod-eu"), - BomIndexStream = tenantStream, - }); - - await index.IngestAsync(new ImpactIndexIngestionRequest - { - TenantId = "tenant-beta", - ImageDigest = otherDigest, - Registry = "docker.io", - Repository = "library/service", - Namespaces = ImmutableArray.Create("team-beta"), - Tags = ImmutableArray.Create("staging-us"), - BomIndexStream = otherStream, - }); - - var selector = new Selector( - SelectorScope.AllImages, - tenantId: "tenant-alpha", - namespaces: new[] { "team-alpha" }, - includeTags: new[] { "prod-*" }); - - var result = await index.ResolveByPurlsAsync(new[] { "pkg:npm/a@1.0.0" }, usageOnly: true, selector); - - result.Images.Should().ContainSingle(image => image.ImageDigest == tenantDigest); - result.Images[0].Tags.Should().Contain("prod-eu"); - } - - [Fact] - public async Task ResolveAllAsync_UsageOnlyFiltersEntrypointImages() - { - var component = ComponentIdentity.Create("pkg:npm/a@1.0.0", "a", "1.0.0", "pkg:npm/a@1.0.0"); - var (entryStream, entryDigest) = CreateBomIndex(component, ComponentUsage.Create(true, new[] { "/start.sh" })); - var nonEntryDigestValue = "sha256:" + new string('1', 64); - var (nonEntryStream, nonEntryDigest) = CreateBomIndex(component, ComponentUsage.Create(false), nonEntryDigestValue); - - var index = new RoaringImpactIndex(NullLogger<RoaringImpactIndex>.Instance); - - await index.IngestAsync(new ImpactIndexIngestionRequest - { - TenantId = "tenant-alpha", - ImageDigest = entryDigest, - Registry = "docker.io", - Repository = "library/service", - BomIndexStream = entryStream, - }); - - await index.IngestAsync(new ImpactIndexIngestionRequest - { - TenantId = "tenant-alpha", - ImageDigest = nonEntryDigest, - Registry = "docker.io", - Repository = "library/service", - BomIndexStream = nonEntryStream, - }); - - var selector = new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"); - - var usageOnly = await index.ResolveAllAsync(selector, usageOnly: true); - usageOnly.Images.Should().ContainSingle(image => image.ImageDigest == entryDigest); - - var allImages = await index.ResolveAllAsync(selector, usageOnly: false); - allImages.Images.Should().HaveCount(2); - } - - private static (Stream Stream, string Digest) CreateBomIndex(ComponentIdentity identity, ComponentUsage usage, string? digest = null) - { - var layer = LayerComponentFragment.Create( - "sha256:layer1", - new[] - { - new ComponentRecord - { - Identity = identity, - LayerDigest = "sha256:layer1", - Usage = usage, - } - }); - - var graph = ComponentGraphBuilder.Build(new[] { layer }); - var effectiveDigest = digest ?? "sha256:" + Guid.NewGuid().ToString("N"); - var builder = new BomIndexBuilder(); - var artifact = builder.Build(new BomIndexBuildRequest - { - ImageDigest = effectiveDigest, - Graph = graph, - GeneratedAt = DateTimeOffset.UtcNow, - }); - - return (new MemoryStream(artifact.Bytes, writable: false), effectiveDigest); - } -} +using System; +using System.Collections.Immutable; +using System.IO; +using FluentAssertions; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Scheduler.ImpactIndex.Ingestion; +using StellaOps.Scheduler.Models; +using StellaOps.Scanner.Core.Contracts; +using StellaOps.Scanner.Emit.Index; +using Xunit; + +namespace StellaOps.Scheduler.ImpactIndex.Tests; + +public sealed class RoaringImpactIndexTests +{ + [Fact] + public async Task IngestAsync_RegistersComponentsAndUsage() + { + var (stream, digest) = CreateBomIndex( + ComponentIdentity.Create("pkg:npm/a@1.0.0", "a", "1.0.0", "pkg:npm/a@1.0.0"), + ComponentUsage.Create(true, new[] { "/app/start.sh" })); + + var index = new RoaringImpactIndex(NullLogger<RoaringImpactIndex>.Instance); + var request = new ImpactIndexIngestionRequest + { + TenantId = "tenant-alpha", + ImageDigest = digest, + Registry = "docker.io", + Repository = "library/alpine", + Namespaces = ImmutableArray.Create("team-a"), + Tags = ImmutableArray.Create("3.20"), + Labels = ImmutableSortedDictionary.CreateRange(StringComparer.OrdinalIgnoreCase, new[] + { + new KeyValuePair<string, string>("env", "prod") + }), + BomIndexStream = stream, + }; + + await index.IngestAsync(request, CancellationToken.None); + + var selector = new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"); + var impactSet = await index.ResolveByPurlsAsync(new[] { "pkg:npm/a@1.0.0" }, usageOnly: false, selector); + + impactSet.Images.Should().HaveCount(1); + impactSet.Images[0].ImageDigest.Should().Be(digest); + impactSet.Images[0].Tags.Should().ContainSingle(tag => tag == "3.20"); + impactSet.Images[0].UsedByEntrypoint.Should().BeTrue(); + + var usageOnly = await index.ResolveByPurlsAsync(new[] { "pkg:npm/a@1.0.0" }, usageOnly: true, selector); + usageOnly.Images.Should().HaveCount(1); + } + + [Fact] + public async Task IngestAsync_ReplacesExistingImageData() + { + var component = ComponentIdentity.Create("pkg:npm/a@1.0.0", "a", "1.0.0", "pkg:npm/a@1.0.0"); + var (initialStream, digest) = CreateBomIndex(component, ComponentUsage.Create(false)); + var index = new RoaringImpactIndex(NullLogger<RoaringImpactIndex>.Instance); + + await index.IngestAsync(new ImpactIndexIngestionRequest + { + TenantId = "tenant-alpha", + ImageDigest = digest, + Registry = "docker.io", + Repository = "library/alpine", + Tags = ImmutableArray.Create("v1"), + BomIndexStream = initialStream, + }); + + var (updatedStream, _) = CreateBomIndex(component, ComponentUsage.Create(true, new[] { "/start.sh" }), digest); + await index.IngestAsync(new ImpactIndexIngestionRequest + { + TenantId = "tenant-alpha", + ImageDigest = digest, + Registry = "docker.io", + Repository = "library/alpine", + Tags = ImmutableArray.Create("v2"), + BomIndexStream = updatedStream, + }); + + var selector = new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"); + var impactSet = await index.ResolveByPurlsAsync(new[] { "pkg:npm/a@1.0.0" }, usageOnly: true, selector); + + impactSet.Images.Should().HaveCount(1); + impactSet.Images[0].Tags.Should().ContainSingle(tag => tag == "v2"); + impactSet.Images[0].UsedByEntrypoint.Should().BeTrue(); + } + + [Fact] + public async Task ResolveByPurlsAsync_RespectsTenantNamespaceAndTagFilters() + { + var component = ComponentIdentity.Create("pkg:npm/a@1.0.0", "a", "1.0.0", "pkg:npm/a@1.0.0"); + var (tenantStream, tenantDigest) = CreateBomIndex(component, ComponentUsage.Create(true, new[] { "/start.sh" })); + var (otherStream, otherDigest) = CreateBomIndex(component, ComponentUsage.Create(false)); + + var index = new RoaringImpactIndex(NullLogger<RoaringImpactIndex>.Instance); + + await index.IngestAsync(new ImpactIndexIngestionRequest + { + TenantId = "tenant-alpha", + ImageDigest = tenantDigest, + Registry = "docker.io", + Repository = "library/service", + Namespaces = ImmutableArray.Create("team-alpha"), + Tags = ImmutableArray.Create("prod-eu"), + BomIndexStream = tenantStream, + }); + + await index.IngestAsync(new ImpactIndexIngestionRequest + { + TenantId = "tenant-beta", + ImageDigest = otherDigest, + Registry = "docker.io", + Repository = "library/service", + Namespaces = ImmutableArray.Create("team-beta"), + Tags = ImmutableArray.Create("staging-us"), + BomIndexStream = otherStream, + }); + + var selector = new Selector( + SelectorScope.AllImages, + tenantId: "tenant-alpha", + namespaces: new[] { "team-alpha" }, + includeTags: new[] { "prod-*" }); + + var result = await index.ResolveByPurlsAsync(new[] { "pkg:npm/a@1.0.0" }, usageOnly: true, selector); + + result.Images.Should().ContainSingle(image => image.ImageDigest == tenantDigest); + result.Images[0].Tags.Should().Contain("prod-eu"); + } + + [Fact] + public async Task ResolveAllAsync_UsageOnlyFiltersEntrypointImages() + { + var component = ComponentIdentity.Create("pkg:npm/a@1.0.0", "a", "1.0.0", "pkg:npm/a@1.0.0"); + var (entryStream, entryDigest) = CreateBomIndex(component, ComponentUsage.Create(true, new[] { "/start.sh" })); + var nonEntryDigestValue = "sha256:" + new string('1', 64); + var (nonEntryStream, nonEntryDigest) = CreateBomIndex(component, ComponentUsage.Create(false), nonEntryDigestValue); + + var index = new RoaringImpactIndex(NullLogger<RoaringImpactIndex>.Instance); + + await index.IngestAsync(new ImpactIndexIngestionRequest + { + TenantId = "tenant-alpha", + ImageDigest = entryDigest, + Registry = "docker.io", + Repository = "library/service", + BomIndexStream = entryStream, + }); + + await index.IngestAsync(new ImpactIndexIngestionRequest + { + TenantId = "tenant-alpha", + ImageDigest = nonEntryDigest, + Registry = "docker.io", + Repository = "library/service", + BomIndexStream = nonEntryStream, + }); + + var selector = new Selector(SelectorScope.AllImages, tenantId: "tenant-alpha"); + + var usageOnly = await index.ResolveAllAsync(selector, usageOnly: true); + usageOnly.Images.Should().ContainSingle(image => image.ImageDigest == entryDigest); + + var allImages = await index.ResolveAllAsync(selector, usageOnly: false); + allImages.Images.Should().HaveCount(2); + } + + private static (Stream Stream, string Digest) CreateBomIndex(ComponentIdentity identity, ComponentUsage usage, string? digest = null) + { + var layer = LayerComponentFragment.Create( + "sha256:layer1", + new[] + { + new ComponentRecord + { + Identity = identity, + LayerDigest = "sha256:layer1", + Usage = usage, + } + }); + + var graph = ComponentGraphBuilder.Build(new[] { layer }); + var effectiveDigest = digest ?? "sha256:" + Guid.NewGuid().ToString("N"); + var builder = new BomIndexBuilder(); + var artifact = builder.Build(new BomIndexBuildRequest + { + ImageDigest = effectiveDigest, + Graph = graph, + GeneratedAt = DateTimeOffset.UtcNow, + }); + + return (new MemoryStream(artifact.Bytes, writable: false), effectiveDigest); + } +} diff --git a/src/StellaOps.Scheduler.ImpactIndex.Tests/StellaOps.Scheduler.ImpactIndex.Tests.csproj b/src/Scheduler/__Tests/StellaOps.Scheduler.ImpactIndex.Tests/StellaOps.Scheduler.ImpactIndex.Tests.csproj similarity index 68% rename from src/StellaOps.Scheduler.ImpactIndex.Tests/StellaOps.Scheduler.ImpactIndex.Tests.csproj rename to src/Scheduler/__Tests/StellaOps.Scheduler.ImpactIndex.Tests/StellaOps.Scheduler.ImpactIndex.Tests.csproj index 5652955b..0a8ebc5c 100644 --- a/src/StellaOps.Scheduler.ImpactIndex.Tests/StellaOps.Scheduler.ImpactIndex.Tests.csproj +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.ImpactIndex.Tests/StellaOps.Scheduler.ImpactIndex.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -19,8 +20,8 @@ </PackageReference> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Scheduler.ImpactIndex\StellaOps.Scheduler.ImpactIndex.csproj" /> - <ProjectReference Include="..\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Emit\StellaOps.Scanner.Emit.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scheduler.ImpactIndex/StellaOps.Scheduler.ImpactIndex.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> + <ProjectReference Include="../../../Scanner/__Libraries/StellaOps.Scanner.Emit/StellaOps.Scanner.Emit.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scheduler.Models.Tests/AuditRecordTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/AuditRecordTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Models.Tests/AuditRecordTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/AuditRecordTests.cs diff --git a/src/StellaOps.Scheduler.Models.Tests/GraphJobStateMachineTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/GraphJobStateMachineTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Models.Tests/GraphJobStateMachineTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/GraphJobStateMachineTests.cs index ae8c7391..5d1f9f2d 100644 --- a/src/StellaOps.Scheduler.Models.Tests/GraphJobStateMachineTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/GraphJobStateMachineTests.cs @@ -1,171 +1,171 @@ -namespace StellaOps.Scheduler.Models.Tests; - -public sealed class GraphJobStateMachineTests -{ - [Theory] - [InlineData(GraphJobStatus.Pending, GraphJobStatus.Pending, true)] - [InlineData(GraphJobStatus.Pending, GraphJobStatus.Queued, true)] - [InlineData(GraphJobStatus.Pending, GraphJobStatus.Running, true)] - [InlineData(GraphJobStatus.Pending, GraphJobStatus.Completed, false)] - [InlineData(GraphJobStatus.Queued, GraphJobStatus.Running, true)] - [InlineData(GraphJobStatus.Queued, GraphJobStatus.Completed, false)] - [InlineData(GraphJobStatus.Running, GraphJobStatus.Completed, true)] - [InlineData(GraphJobStatus.Running, GraphJobStatus.Pending, false)] - [InlineData(GraphJobStatus.Completed, GraphJobStatus.Failed, false)] - public void CanTransition_ReturnsExpectedResult(GraphJobStatus from, GraphJobStatus to, bool expected) - { - Assert.Equal(expected, GraphJobStateMachine.CanTransition(from, to)); - } - - [Fact] - public void EnsureTransition_UpdatesBuildJobLifecycle() - { - var createdAt = new DateTimeOffset(2025, 10, 26, 12, 0, 0, TimeSpan.Zero); - var job = new GraphBuildJob( - id: "gbj_1", - tenantId: "tenant-alpha", - sbomId: "sbom_1", - sbomVersionId: "sbom_ver_1", - sbomDigest: "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", - status: GraphJobStatus.Pending, - trigger: GraphBuildJobTrigger.SbomVersion, - createdAt: createdAt); - - var queuedAt = createdAt.AddSeconds(5); - job = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Queued, queuedAt); - Assert.Equal(GraphJobStatus.Queued, job.Status); - Assert.Null(job.StartedAt); - Assert.Null(job.CompletedAt); - - var runningAt = queuedAt.AddSeconds(5); - job = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Running, runningAt, attempts: job.Attempts + 1); - Assert.Equal(GraphJobStatus.Running, job.Status); - Assert.Equal(runningAt, job.StartedAt); - Assert.Null(job.CompletedAt); - - var completedAt = runningAt.AddSeconds(30); - job = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Completed, completedAt); - Assert.Equal(GraphJobStatus.Completed, job.Status); - Assert.Equal(runningAt, job.StartedAt); - Assert.Equal(completedAt, job.CompletedAt); - Assert.Null(job.Error); - } - - [Fact] - public void EnsureTransition_ToFailedRequiresError() - { - var job = new GraphBuildJob( - id: "gbj_1", - tenantId: "tenant-alpha", - sbomId: "sbom_1", - sbomVersionId: "sbom_ver_1", - sbomDigest: "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", - status: GraphJobStatus.Running, - trigger: GraphBuildJobTrigger.SbomVersion, - createdAt: DateTimeOffset.UtcNow, - startedAt: DateTimeOffset.UtcNow); - - Assert.Throws<InvalidOperationException>(() => GraphJobStateMachine.EnsureTransition( - job, - GraphJobStatus.Failed, - DateTimeOffset.UtcNow)); - } - - [Fact] - public void EnsureTransition_ToFailedSetsError() - { - var job = new GraphOverlayJob( - id: "goj_1", - tenantId: "tenant-alpha", - graphSnapshotId: "graph_snap_1", - overlayKind: GraphOverlayKind.Policy, - overlayKey: "policy@latest", - status: GraphJobStatus.Running, - trigger: GraphOverlayJobTrigger.Policy, - createdAt: DateTimeOffset.UtcNow, - startedAt: DateTimeOffset.UtcNow); - - var failed = GraphJobStateMachine.EnsureTransition( - job, - GraphJobStatus.Failed, - DateTimeOffset.UtcNow, - errorMessage: "cartographer timeout"); - - Assert.Equal(GraphJobStatus.Failed, failed.Status); - Assert.NotNull(failed.CompletedAt); - Assert.Equal("cartographer timeout", failed.Error); - } - - [Fact] - public void Validate_RequiresCompletedAtForTerminalState() - { - var job = new GraphOverlayJob( - id: "goj_1", - tenantId: "tenant-alpha", - graphSnapshotId: "graph_snap_1", - overlayKind: GraphOverlayKind.Policy, - overlayKey: "policy@latest", - status: GraphJobStatus.Completed, - trigger: GraphOverlayJobTrigger.Policy, - createdAt: DateTimeOffset.UtcNow); - - Assert.Throws<InvalidOperationException>(() => GraphJobStateMachine.Validate(job)); - } - - [Fact] - public void GraphOverlayJob_NormalizesSubjectsAndMetadata() - { - var createdAt = DateTimeOffset.UtcNow; - var job = new GraphOverlayJob( - id: "goj_norm", - tenantId: "tenant-alpha", - graphSnapshotId: "graph_snap_norm", - overlayKind: GraphOverlayKind.Policy, - overlayKey: "policy@norm", - status: GraphJobStatus.Pending, - trigger: GraphOverlayJobTrigger.Policy, - createdAt: createdAt, - subjects: new[] - { - "artifact/service-api", - "artifact/service-ui", - "artifact/service-api" - }, - metadata: new[] - { - new KeyValuePair<string, string>("PolicyRunId", "run-123"), - new KeyValuePair<string, string>("policyRunId", "run-123") - }); - - Assert.Equal(2, job.Subjects.Length); - Assert.Collection( - job.Subjects, - subject => Assert.Equal("artifact/service-api", subject), - subject => Assert.Equal("artifact/service-ui", subject)); - - Assert.Single(job.Metadata); - Assert.Equal("run-123", job.Metadata["policyrunid"]); - } - - [Fact] - public void GraphBuildJob_NormalizesDigestAndMetadata() - { - var job = new GraphBuildJob( - id: "gbj_norm", - tenantId: "tenant-alpha", - sbomId: "sbom_norm", - sbomVersionId: "sbom_ver_norm", - sbomDigest: "SHA256:ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890", - status: GraphJobStatus.Pending, - trigger: GraphBuildJobTrigger.Manual, - createdAt: DateTimeOffset.UtcNow, - metadata: new[] - { - new KeyValuePair<string, string>("SBoMEventId", "evt-42") - }); - - Assert.Equal("sha256:abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", job.SbomDigest); - Assert.Single(job.Metadata); - Assert.Equal("evt-42", job.Metadata["sbomeventid"]); - } -} +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class GraphJobStateMachineTests +{ + [Theory] + [InlineData(GraphJobStatus.Pending, GraphJobStatus.Pending, true)] + [InlineData(GraphJobStatus.Pending, GraphJobStatus.Queued, true)] + [InlineData(GraphJobStatus.Pending, GraphJobStatus.Running, true)] + [InlineData(GraphJobStatus.Pending, GraphJobStatus.Completed, false)] + [InlineData(GraphJobStatus.Queued, GraphJobStatus.Running, true)] + [InlineData(GraphJobStatus.Queued, GraphJobStatus.Completed, false)] + [InlineData(GraphJobStatus.Running, GraphJobStatus.Completed, true)] + [InlineData(GraphJobStatus.Running, GraphJobStatus.Pending, false)] + [InlineData(GraphJobStatus.Completed, GraphJobStatus.Failed, false)] + public void CanTransition_ReturnsExpectedResult(GraphJobStatus from, GraphJobStatus to, bool expected) + { + Assert.Equal(expected, GraphJobStateMachine.CanTransition(from, to)); + } + + [Fact] + public void EnsureTransition_UpdatesBuildJobLifecycle() + { + var createdAt = new DateTimeOffset(2025, 10, 26, 12, 0, 0, TimeSpan.Zero); + var job = new GraphBuildJob( + id: "gbj_1", + tenantId: "tenant-alpha", + sbomId: "sbom_1", + sbomVersionId: "sbom_ver_1", + sbomDigest: "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + status: GraphJobStatus.Pending, + trigger: GraphBuildJobTrigger.SbomVersion, + createdAt: createdAt); + + var queuedAt = createdAt.AddSeconds(5); + job = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Queued, queuedAt); + Assert.Equal(GraphJobStatus.Queued, job.Status); + Assert.Null(job.StartedAt); + Assert.Null(job.CompletedAt); + + var runningAt = queuedAt.AddSeconds(5); + job = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Running, runningAt, attempts: job.Attempts + 1); + Assert.Equal(GraphJobStatus.Running, job.Status); + Assert.Equal(runningAt, job.StartedAt); + Assert.Null(job.CompletedAt); + + var completedAt = runningAt.AddSeconds(30); + job = GraphJobStateMachine.EnsureTransition(job, GraphJobStatus.Completed, completedAt); + Assert.Equal(GraphJobStatus.Completed, job.Status); + Assert.Equal(runningAt, job.StartedAt); + Assert.Equal(completedAt, job.CompletedAt); + Assert.Null(job.Error); + } + + [Fact] + public void EnsureTransition_ToFailedRequiresError() + { + var job = new GraphBuildJob( + id: "gbj_1", + tenantId: "tenant-alpha", + sbomId: "sbom_1", + sbomVersionId: "sbom_ver_1", + sbomDigest: "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef", + status: GraphJobStatus.Running, + trigger: GraphBuildJobTrigger.SbomVersion, + createdAt: DateTimeOffset.UtcNow, + startedAt: DateTimeOffset.UtcNow); + + Assert.Throws<InvalidOperationException>(() => GraphJobStateMachine.EnsureTransition( + job, + GraphJobStatus.Failed, + DateTimeOffset.UtcNow)); + } + + [Fact] + public void EnsureTransition_ToFailedSetsError() + { + var job = new GraphOverlayJob( + id: "goj_1", + tenantId: "tenant-alpha", + graphSnapshotId: "graph_snap_1", + overlayKind: GraphOverlayKind.Policy, + overlayKey: "policy@latest", + status: GraphJobStatus.Running, + trigger: GraphOverlayJobTrigger.Policy, + createdAt: DateTimeOffset.UtcNow, + startedAt: DateTimeOffset.UtcNow); + + var failed = GraphJobStateMachine.EnsureTransition( + job, + GraphJobStatus.Failed, + DateTimeOffset.UtcNow, + errorMessage: "cartographer timeout"); + + Assert.Equal(GraphJobStatus.Failed, failed.Status); + Assert.NotNull(failed.CompletedAt); + Assert.Equal("cartographer timeout", failed.Error); + } + + [Fact] + public void Validate_RequiresCompletedAtForTerminalState() + { + var job = new GraphOverlayJob( + id: "goj_1", + tenantId: "tenant-alpha", + graphSnapshotId: "graph_snap_1", + overlayKind: GraphOverlayKind.Policy, + overlayKey: "policy@latest", + status: GraphJobStatus.Completed, + trigger: GraphOverlayJobTrigger.Policy, + createdAt: DateTimeOffset.UtcNow); + + Assert.Throws<InvalidOperationException>(() => GraphJobStateMachine.Validate(job)); + } + + [Fact] + public void GraphOverlayJob_NormalizesSubjectsAndMetadata() + { + var createdAt = DateTimeOffset.UtcNow; + var job = new GraphOverlayJob( + id: "goj_norm", + tenantId: "tenant-alpha", + graphSnapshotId: "graph_snap_norm", + overlayKind: GraphOverlayKind.Policy, + overlayKey: "policy@norm", + status: GraphJobStatus.Pending, + trigger: GraphOverlayJobTrigger.Policy, + createdAt: createdAt, + subjects: new[] + { + "artifact/service-api", + "artifact/service-ui", + "artifact/service-api" + }, + metadata: new[] + { + new KeyValuePair<string, string>("PolicyRunId", "run-123"), + new KeyValuePair<string, string>("policyRunId", "run-123") + }); + + Assert.Equal(2, job.Subjects.Length); + Assert.Collection( + job.Subjects, + subject => Assert.Equal("artifact/service-api", subject), + subject => Assert.Equal("artifact/service-ui", subject)); + + Assert.Single(job.Metadata); + Assert.Equal("run-123", job.Metadata["policyrunid"]); + } + + [Fact] + public void GraphBuildJob_NormalizesDigestAndMetadata() + { + var job = new GraphBuildJob( + id: "gbj_norm", + tenantId: "tenant-alpha", + sbomId: "sbom_norm", + sbomVersionId: "sbom_ver_norm", + sbomDigest: "SHA256:ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890ABCDEF1234567890", + status: GraphJobStatus.Pending, + trigger: GraphBuildJobTrigger.Manual, + createdAt: DateTimeOffset.UtcNow, + metadata: new[] + { + new KeyValuePair<string, string>("SBoMEventId", "evt-42") + }); + + Assert.Equal("sha256:abcdef1234567890abcdef1234567890abcdef1234567890abcdef1234567890", job.SbomDigest); + Assert.Single(job.Metadata); + Assert.Equal("evt-42", job.Metadata["sbomeventid"]); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/ImpactSetTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/ImpactSetTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Models.Tests/ImpactSetTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/ImpactSetTests.cs diff --git a/src/StellaOps.Scheduler.Models.Tests/PolicyRunModelsTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/PolicyRunModelsTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Models.Tests/PolicyRunModelsTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/PolicyRunModelsTests.cs index 241b490d..fbb33c38 100644 --- a/src/StellaOps.Scheduler.Models.Tests/PolicyRunModelsTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/PolicyRunModelsTests.cs @@ -1,83 +1,83 @@ -using System.Collections.Immutable; -using System.Text.Json; -using StellaOps.Scheduler.Models; - -namespace StellaOps.Scheduler.Models.Tests; - -public sealed class PolicyRunModelsTests -{ - [Fact] - public void PolicyRunInputs_NormalizesEnvironmentKeys() - { - var inputs = new PolicyRunInputs( - sbomSet: new[] { "sbom:two", "sbom:one" }, - env: new[] - { - new KeyValuePair<string, object?>("Sealed", true), - new KeyValuePair<string, object?>("Exposure", "internet"), - new KeyValuePair<string, object?>("region", JsonSerializer.SerializeToElement("global")) - }, - captureExplain: true); - - Assert.Equal(new[] { "sbom:one", "sbom:two" }, inputs.SbomSet); - Assert.True(inputs.CaptureExplain); - Assert.Equal(3, inputs.Environment.Count); - Assert.True(inputs.Environment.ContainsKey("sealed")); - Assert.Equal(JsonValueKind.True, inputs.Environment["sealed"].ValueKind); - Assert.Equal("internet", inputs.Environment["exposure"].GetString()); - Assert.Equal("global", inputs.Environment["region"].GetString()); - } - - [Fact] - public void PolicyRunStatus_ThrowsOnNegativeAttempts() - { - Assert.Throws<ArgumentOutOfRangeException>(() => new PolicyRunStatus( - runId: "run:test", - tenantId: "tenant-alpha", - policyId: "P-1", - policyVersion: 1, - mode: PolicyRunMode.Full, - status: PolicyRunExecutionStatus.Queued, - priority: PolicyRunPriority.Normal, - queuedAt: DateTimeOffset.UtcNow, - attempts: -1)); - } - - [Fact] - public void PolicyDiffSummary_NormalizesSeverityKeys() - { - var summary = new PolicyDiffSummary( - added: 1, - removed: 2, - unchanged: 3, - bySeverity: new[] - { - new KeyValuePair<string, PolicyDiffSeverityDelta>("critical", new PolicyDiffSeverityDelta(1, 0)), - new KeyValuePair<string, PolicyDiffSeverityDelta>("HIGH", new PolicyDiffSeverityDelta(0, 1)) - }); - - Assert.True(summary.BySeverity.ContainsKey("Critical")); - Assert.True(summary.BySeverity.ContainsKey("High")); - } - - [Fact] - public void PolicyExplainTrace_LowercasesMetadataKeys() - { - var trace = new PolicyExplainTrace( - findingId: "finding:alpha", - policyId: "P-1", - policyVersion: 1, - tenantId: "tenant-alpha", - runId: "run:test", - verdict: new PolicyExplainVerdict(PolicyVerdictStatus.Passed, SeverityRank.Low, quiet: false, score: 0, rationale: "ok"), - evaluatedAt: DateTimeOffset.UtcNow, - metadata: ImmutableSortedDictionary.CreateRange(new[] - { - new KeyValuePair<string, string>("TraceId", "trace-1"), - new KeyValuePair<string, string>("ComponentPurl", "pkg:npm/a@1.0.0") - })); - - Assert.Equal("trace-1", trace.Metadata["traceid"]); - Assert.Equal("pkg:npm/a@1.0.0", trace.Metadata["componentpurl"]); - } -} +using System.Collections.Immutable; +using System.Text.Json; +using StellaOps.Scheduler.Models; + +namespace StellaOps.Scheduler.Models.Tests; + +public sealed class PolicyRunModelsTests +{ + [Fact] + public void PolicyRunInputs_NormalizesEnvironmentKeys() + { + var inputs = new PolicyRunInputs( + sbomSet: new[] { "sbom:two", "sbom:one" }, + env: new[] + { + new KeyValuePair<string, object?>("Sealed", true), + new KeyValuePair<string, object?>("Exposure", "internet"), + new KeyValuePair<string, object?>("region", JsonSerializer.SerializeToElement("global")) + }, + captureExplain: true); + + Assert.Equal(new[] { "sbom:one", "sbom:two" }, inputs.SbomSet); + Assert.True(inputs.CaptureExplain); + Assert.Equal(3, inputs.Environment.Count); + Assert.True(inputs.Environment.ContainsKey("sealed")); + Assert.Equal(JsonValueKind.True, inputs.Environment["sealed"].ValueKind); + Assert.Equal("internet", inputs.Environment["exposure"].GetString()); + Assert.Equal("global", inputs.Environment["region"].GetString()); + } + + [Fact] + public void PolicyRunStatus_ThrowsOnNegativeAttempts() + { + Assert.Throws<ArgumentOutOfRangeException>(() => new PolicyRunStatus( + runId: "run:test", + tenantId: "tenant-alpha", + policyId: "P-1", + policyVersion: 1, + mode: PolicyRunMode.Full, + status: PolicyRunExecutionStatus.Queued, + priority: PolicyRunPriority.Normal, + queuedAt: DateTimeOffset.UtcNow, + attempts: -1)); + } + + [Fact] + public void PolicyDiffSummary_NormalizesSeverityKeys() + { + var summary = new PolicyDiffSummary( + added: 1, + removed: 2, + unchanged: 3, + bySeverity: new[] + { + new KeyValuePair<string, PolicyDiffSeverityDelta>("critical", new PolicyDiffSeverityDelta(1, 0)), + new KeyValuePair<string, PolicyDiffSeverityDelta>("HIGH", new PolicyDiffSeverityDelta(0, 1)) + }); + + Assert.True(summary.BySeverity.ContainsKey("Critical")); + Assert.True(summary.BySeverity.ContainsKey("High")); + } + + [Fact] + public void PolicyExplainTrace_LowercasesMetadataKeys() + { + var trace = new PolicyExplainTrace( + findingId: "finding:alpha", + policyId: "P-1", + policyVersion: 1, + tenantId: "tenant-alpha", + runId: "run:test", + verdict: new PolicyExplainVerdict(PolicyVerdictStatus.Passed, SeverityRank.Low, quiet: false, score: 0, rationale: "ok"), + evaluatedAt: DateTimeOffset.UtcNow, + metadata: ImmutableSortedDictionary.CreateRange(new[] + { + new KeyValuePair<string, string>("TraceId", "trace-1"), + new KeyValuePair<string, string>("ComponentPurl", "pkg:npm/a@1.0.0") + })); + + Assert.Equal("trace-1", trace.Metadata["traceid"]); + Assert.Equal("pkg:npm/a@1.0.0", trace.Metadata["componentpurl"]); + } +} diff --git a/src/StellaOps.Scheduler.Models.Tests/RescanDeltaEventSampleTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/RescanDeltaEventSampleTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Models.Tests/RescanDeltaEventSampleTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/RescanDeltaEventSampleTests.cs diff --git a/src/StellaOps.Scheduler.Models.Tests/RunStateMachineTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/RunStateMachineTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Models.Tests/RunStateMachineTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/RunStateMachineTests.cs diff --git a/src/StellaOps.Scheduler.Models.Tests/RunValidationTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/RunValidationTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Models.Tests/RunValidationTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/RunValidationTests.cs diff --git a/src/StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/SamplePayloadTests.cs diff --git a/src/StellaOps.Scheduler.Models.Tests/ScheduleSerializationTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/ScheduleSerializationTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Models.Tests/ScheduleSerializationTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/ScheduleSerializationTests.cs diff --git a/src/StellaOps.Scheduler.Models.Tests/SchedulerSchemaMigrationTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/SchedulerSchemaMigrationTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Models.Tests/SchedulerSchemaMigrationTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/SchedulerSchemaMigrationTests.cs diff --git a/src/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj similarity index 64% rename from src/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj rename to src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj index 39536098..d7acba76 100644 --- a/src/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Models.Tests/StellaOps.Scheduler.Models.Tests.csproj @@ -1,18 +1,19 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="..\..\docs\events\samples\scheduler.rescan.delta@1.sample.json"> - <CopyToOutputDirectory>Always</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + </ItemGroup> + <ItemGroup> + <None Include="..\..\docs\events\samples\scheduler.rescan.delta@1.sample.json"> + <CopyToOutputDirectory>Always</CopyToOutputDirectory> + </None> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scheduler.Queue.Tests/PlannerAndRunnerMessageTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/PlannerAndRunnerMessageTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue.Tests/PlannerAndRunnerMessageTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/PlannerAndRunnerMessageTests.cs diff --git a/src/StellaOps.Scheduler.Queue.Tests/RedisSchedulerQueueTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/RedisSchedulerQueueTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Queue.Tests/RedisSchedulerQueueTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/RedisSchedulerQueueTests.cs diff --git a/src/StellaOps.Scheduler.Queue.Tests/SchedulerQueueServiceCollectionExtensionsTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/SchedulerQueueServiceCollectionExtensionsTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Queue.Tests/SchedulerQueueServiceCollectionExtensionsTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/SchedulerQueueServiceCollectionExtensionsTests.cs index 56eeccaa..7e988a1f 100644 --- a/src/StellaOps.Scheduler.Queue.Tests/SchedulerQueueServiceCollectionExtensionsTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/SchedulerQueueServiceCollectionExtensionsTests.cs @@ -1,115 +1,115 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using FluentAssertions; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Diagnostics.HealthChecks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Queue; -using StellaOps.Scheduler.Queue.Nats; -using Xunit; - -namespace StellaOps.Scheduler.Queue.Tests; - -public sealed class SchedulerQueueServiceCollectionExtensionsTests -{ - [Fact] - public async Task AddSchedulerQueues_RegistersNatsTransport() - { - var services = new ServiceCollection(); - services.AddSingleton<ILoggerFactory>(_ => NullLoggerFactory.Instance); - services.AddSchedulerQueues(new ConfigurationBuilder().Build()); - - var optionsDescriptor = services.First(descriptor => descriptor.ServiceType == typeof(SchedulerQueueOptions)); - var options = (SchedulerQueueOptions)optionsDescriptor.ImplementationInstance!; - options.Kind = SchedulerQueueTransportKind.Nats; - options.Nats.Url = "nats://localhost:4222"; - - await using var provider = services.BuildServiceProvider(); - - var plannerQueue = provider.GetRequiredService<ISchedulerPlannerQueue>(); - var runnerQueue = provider.GetRequiredService<ISchedulerRunnerQueue>(); - - plannerQueue.Should().BeOfType<NatsSchedulerPlannerQueue>(); - runnerQueue.Should().BeOfType<NatsSchedulerRunnerQueue>(); - } - - [Fact] - public async Task SchedulerQueueHealthCheck_ReturnsHealthy_WhenTransportsReachable() - { - var healthCheck = new SchedulerQueueHealthCheck( - new FakePlannerQueue(failPing: false), - new FakeRunnerQueue(failPing: false), - NullLogger<SchedulerQueueHealthCheck>.Instance); - - var context = new HealthCheckContext - { - Registration = new HealthCheckRegistration("scheduler-queue", healthCheck, HealthStatus.Unhealthy, Array.Empty<string>()) - }; - - var result = await healthCheck.CheckHealthAsync(context); - - result.Status.Should().Be(HealthStatus.Healthy); - } - - [Fact] - public async Task SchedulerQueueHealthCheck_ReturnsUnhealthy_WhenRunnerPingFails() - { - var healthCheck = new SchedulerQueueHealthCheck( - new FakePlannerQueue(failPing: false), - new FakeRunnerQueue(failPing: true), - NullLogger<SchedulerQueueHealthCheck>.Instance); - - var context = new HealthCheckContext - { - Registration = new HealthCheckRegistration("scheduler-queue", healthCheck, HealthStatus.Unhealthy, Array.Empty<string>()) - }; - - var result = await healthCheck.CheckHealthAsync(context); - - result.Status.Should().Be(HealthStatus.Unhealthy); - result.Description.Should().Contain("runner transport unreachable"); - } - private abstract class FakeQueue<TMessage> : ISchedulerQueue<TMessage>, ISchedulerQueueTransportDiagnostics - { - private readonly bool _failPing; - - protected FakeQueue(bool failPing) - { - _failPing = failPing; - } - - public ValueTask<SchedulerQueueEnqueueResult> EnqueueAsync(TMessage message, CancellationToken cancellationToken = default) - => ValueTask.FromResult(new SchedulerQueueEnqueueResult("stub", false)); - - public ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> LeaseAsync(SchedulerQueueLeaseRequest request, CancellationToken cancellationToken = default) - => ValueTask.FromResult<IReadOnlyList<ISchedulerQueueLease<TMessage>>>(Array.Empty<ISchedulerQueueLease<TMessage>>()); - - public ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> ClaimExpiredAsync(SchedulerQueueClaimOptions options, CancellationToken cancellationToken = default) - => ValueTask.FromResult<IReadOnlyList<ISchedulerQueueLease<TMessage>>>(Array.Empty<ISchedulerQueueLease<TMessage>>()); - - public ValueTask PingAsync(CancellationToken cancellationToken) - => _failPing - ? ValueTask.FromException(new InvalidOperationException("ping failed")) - : ValueTask.CompletedTask; - } - - private sealed class FakePlannerQueue : FakeQueue<PlannerQueueMessage>, ISchedulerPlannerQueue - { - public FakePlannerQueue(bool failPing) : base(failPing) - { - } - } - - private sealed class FakeRunnerQueue : FakeQueue<RunnerSegmentQueueMessage>, ISchedulerRunnerQueue - { - public FakeRunnerQueue(bool failPing) : base(failPing) - { - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using FluentAssertions; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Queue; +using StellaOps.Scheduler.Queue.Nats; +using Xunit; + +namespace StellaOps.Scheduler.Queue.Tests; + +public sealed class SchedulerQueueServiceCollectionExtensionsTests +{ + [Fact] + public async Task AddSchedulerQueues_RegistersNatsTransport() + { + var services = new ServiceCollection(); + services.AddSingleton<ILoggerFactory>(_ => NullLoggerFactory.Instance); + services.AddSchedulerQueues(new ConfigurationBuilder().Build()); + + var optionsDescriptor = services.First(descriptor => descriptor.ServiceType == typeof(SchedulerQueueOptions)); + var options = (SchedulerQueueOptions)optionsDescriptor.ImplementationInstance!; + options.Kind = SchedulerQueueTransportKind.Nats; + options.Nats.Url = "nats://localhost:4222"; + + await using var provider = services.BuildServiceProvider(); + + var plannerQueue = provider.GetRequiredService<ISchedulerPlannerQueue>(); + var runnerQueue = provider.GetRequiredService<ISchedulerRunnerQueue>(); + + plannerQueue.Should().BeOfType<NatsSchedulerPlannerQueue>(); + runnerQueue.Should().BeOfType<NatsSchedulerRunnerQueue>(); + } + + [Fact] + public async Task SchedulerQueueHealthCheck_ReturnsHealthy_WhenTransportsReachable() + { + var healthCheck = new SchedulerQueueHealthCheck( + new FakePlannerQueue(failPing: false), + new FakeRunnerQueue(failPing: false), + NullLogger<SchedulerQueueHealthCheck>.Instance); + + var context = new HealthCheckContext + { + Registration = new HealthCheckRegistration("scheduler-queue", healthCheck, HealthStatus.Unhealthy, Array.Empty<string>()) + }; + + var result = await healthCheck.CheckHealthAsync(context); + + result.Status.Should().Be(HealthStatus.Healthy); + } + + [Fact] + public async Task SchedulerQueueHealthCheck_ReturnsUnhealthy_WhenRunnerPingFails() + { + var healthCheck = new SchedulerQueueHealthCheck( + new FakePlannerQueue(failPing: false), + new FakeRunnerQueue(failPing: true), + NullLogger<SchedulerQueueHealthCheck>.Instance); + + var context = new HealthCheckContext + { + Registration = new HealthCheckRegistration("scheduler-queue", healthCheck, HealthStatus.Unhealthy, Array.Empty<string>()) + }; + + var result = await healthCheck.CheckHealthAsync(context); + + result.Status.Should().Be(HealthStatus.Unhealthy); + result.Description.Should().Contain("runner transport unreachable"); + } + private abstract class FakeQueue<TMessage> : ISchedulerQueue<TMessage>, ISchedulerQueueTransportDiagnostics + { + private readonly bool _failPing; + + protected FakeQueue(bool failPing) + { + _failPing = failPing; + } + + public ValueTask<SchedulerQueueEnqueueResult> EnqueueAsync(TMessage message, CancellationToken cancellationToken = default) + => ValueTask.FromResult(new SchedulerQueueEnqueueResult("stub", false)); + + public ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> LeaseAsync(SchedulerQueueLeaseRequest request, CancellationToken cancellationToken = default) + => ValueTask.FromResult<IReadOnlyList<ISchedulerQueueLease<TMessage>>>(Array.Empty<ISchedulerQueueLease<TMessage>>()); + + public ValueTask<IReadOnlyList<ISchedulerQueueLease<TMessage>>> ClaimExpiredAsync(SchedulerQueueClaimOptions options, CancellationToken cancellationToken = default) + => ValueTask.FromResult<IReadOnlyList<ISchedulerQueueLease<TMessage>>>(Array.Empty<ISchedulerQueueLease<TMessage>>()); + + public ValueTask PingAsync(CancellationToken cancellationToken) + => _failPing + ? ValueTask.FromException(new InvalidOperationException("ping failed")) + : ValueTask.CompletedTask; + } + + private sealed class FakePlannerQueue : FakeQueue<PlannerQueueMessage>, ISchedulerPlannerQueue + { + public FakePlannerQueue(bool failPing) : base(failPing) + { + } + } + + private sealed class FakeRunnerQueue : FakeQueue<RunnerSegmentQueueMessage>, ISchedulerRunnerQueue + { + public FakeRunnerQueue(bool failPing) : base(failPing) + { + } + } +} diff --git a/src/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj b/src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj similarity index 80% rename from src/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj rename to src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj index d5994e7a..6eb863db 100644 --- a/src/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Queue.Tests/StellaOps.Scheduler.Queue.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -21,7 +22,7 @@ </PackageReference> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Scheduler.Queue\StellaOps.Scheduler.Queue.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Queue/StellaOps.Scheduler.Queue.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/GlobalUsings.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/GlobalUsings.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/GlobalUsings.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Integration/SchedulerMongoRoundTripTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Integration/SchedulerMongoRoundTripTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/Integration/SchedulerMongoRoundTripTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Integration/SchedulerMongoRoundTripTests.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Migrations/SchedulerMongoMigrationTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Migrations/SchedulerMongoMigrationTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/Migrations/SchedulerMongoMigrationTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Migrations/SchedulerMongoMigrationTests.cs diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/AuditRepositoryTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/AuditRepositoryTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/AuditRepositoryTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/AuditRepositoryTests.cs index 6aed9f4e..e5f4a68d 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/AuditRepositoryTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/AuditRepositoryTests.cs @@ -1,60 +1,60 @@ -using System; -using System.Linq; -using System.Threading; -using StellaOps.Scheduler.Storage.Mongo.Repositories; - -namespace StellaOps.Scheduler.Storage.Mongo.Tests.Repositories; - -public sealed class AuditRepositoryTests -{ - [Fact] - public async Task InsertAndListAsync_ReturnsTenantScopedEntries() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new AuditRepository(harness.Context); - - var record1 = TestDataFactory.CreateAuditRecord("tenant-alpha", "1"); - var record2 = TestDataFactory.CreateAuditRecord("tenant-alpha", "2"); - var otherTenant = TestDataFactory.CreateAuditRecord("tenant-beta", "3"); - - await repository.InsertAsync(record1); - await repository.InsertAsync(record2); - await repository.InsertAsync(otherTenant); - - var results = await repository.ListAsync("tenant-alpha"); - Assert.Equal(2, results.Count); - Assert.DoesNotContain(results, record => record.TenantId == "tenant-beta"); - } - - [Fact] - public async Task ListAsync_AppliesFilters() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new AuditRepository(harness.Context); - - var older = TestDataFactory.CreateAuditRecord( - "tenant-alpha", - "old", - occurredAt: DateTimeOffset.UtcNow.AddMinutes(-30), - scheduleId: "sch-a"); - var newer = TestDataFactory.CreateAuditRecord( - "tenant-alpha", - "new", - occurredAt: DateTimeOffset.UtcNow, - scheduleId: "sch-a"); - - await repository.InsertAsync(older); - await repository.InsertAsync(newer); - - var options = new AuditQueryOptions - { - Since = DateTimeOffset.UtcNow.AddMinutes(-5), - ScheduleId = "sch-a", - Limit = 5 - }; - - var results = await repository.ListAsync("tenant-alpha", options); - Assert.Single(results); - Assert.Equal("audit_new", results.Single().Id); - } -} +using System; +using System.Linq; +using System.Threading; +using StellaOps.Scheduler.Storage.Mongo.Repositories; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests.Repositories; + +public sealed class AuditRepositoryTests +{ + [Fact] + public async Task InsertAndListAsync_ReturnsTenantScopedEntries() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new AuditRepository(harness.Context); + + var record1 = TestDataFactory.CreateAuditRecord("tenant-alpha", "1"); + var record2 = TestDataFactory.CreateAuditRecord("tenant-alpha", "2"); + var otherTenant = TestDataFactory.CreateAuditRecord("tenant-beta", "3"); + + await repository.InsertAsync(record1); + await repository.InsertAsync(record2); + await repository.InsertAsync(otherTenant); + + var results = await repository.ListAsync("tenant-alpha"); + Assert.Equal(2, results.Count); + Assert.DoesNotContain(results, record => record.TenantId == "tenant-beta"); + } + + [Fact] + public async Task ListAsync_AppliesFilters() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new AuditRepository(harness.Context); + + var older = TestDataFactory.CreateAuditRecord( + "tenant-alpha", + "old", + occurredAt: DateTimeOffset.UtcNow.AddMinutes(-30), + scheduleId: "sch-a"); + var newer = TestDataFactory.CreateAuditRecord( + "tenant-alpha", + "new", + occurredAt: DateTimeOffset.UtcNow, + scheduleId: "sch-a"); + + await repository.InsertAsync(older); + await repository.InsertAsync(newer); + + var options = new AuditQueryOptions + { + Since = DateTimeOffset.UtcNow.AddMinutes(-5), + ScheduleId = "sch-a", + Limit = 5 + }; + + var results = await repository.ListAsync("tenant-alpha", options); + Assert.Single(results); + Assert.Equal("audit_new", results.Single().Id); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ImpactSnapshotRepositoryTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ImpactSnapshotRepositoryTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ImpactSnapshotRepositoryTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ImpactSnapshotRepositoryTests.cs index c9b7077e..15d7339c 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ImpactSnapshotRepositoryTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ImpactSnapshotRepositoryTests.cs @@ -1,41 +1,41 @@ -using System; -using System.Threading; -using StellaOps.Scheduler.Storage.Mongo.Repositories; - -namespace StellaOps.Scheduler.Storage.Mongo.Tests.Repositories; - -public sealed class ImpactSnapshotRepositoryTests -{ - [Fact] - public async Task UpsertAndGetAsync_RoundTripsSnapshot() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new ImpactSnapshotRepository(harness.Context); - - var snapshot = TestDataFactory.CreateImpactSet("tenant-alpha", "impact-1", DateTimeOffset.UtcNow.AddMinutes(-5)); - await repository.UpsertAsync(snapshot, cancellationToken: CancellationToken.None); - - var stored = await repository.GetBySnapshotIdAsync("impact-1", cancellationToken: CancellationToken.None); - Assert.NotNull(stored); - Assert.Equal(snapshot.SnapshotId, stored!.SnapshotId); - Assert.Equal(snapshot.Images[0].ImageDigest, stored.Images[0].ImageDigest); - } - - [Fact] - public async Task GetLatestBySelectorAsync_ReturnsMostRecent() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new ImpactSnapshotRepository(harness.Context); - - var selectorTenant = "tenant-alpha"; - var first = TestDataFactory.CreateImpactSet(selectorTenant, "impact-old", DateTimeOffset.UtcNow.AddMinutes(-10)); - var latest = TestDataFactory.CreateImpactSet(selectorTenant, "impact-new", DateTimeOffset.UtcNow); - - await repository.UpsertAsync(first); - await repository.UpsertAsync(latest); - - var resolved = await repository.GetLatestBySelectorAsync(latest.Selector); - Assert.NotNull(resolved); - Assert.Equal("impact-new", resolved!.SnapshotId); - } -} +using System; +using System.Threading; +using StellaOps.Scheduler.Storage.Mongo.Repositories; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests.Repositories; + +public sealed class ImpactSnapshotRepositoryTests +{ + [Fact] + public async Task UpsertAndGetAsync_RoundTripsSnapshot() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new ImpactSnapshotRepository(harness.Context); + + var snapshot = TestDataFactory.CreateImpactSet("tenant-alpha", "impact-1", DateTimeOffset.UtcNow.AddMinutes(-5)); + await repository.UpsertAsync(snapshot, cancellationToken: CancellationToken.None); + + var stored = await repository.GetBySnapshotIdAsync("impact-1", cancellationToken: CancellationToken.None); + Assert.NotNull(stored); + Assert.Equal(snapshot.SnapshotId, stored!.SnapshotId); + Assert.Equal(snapshot.Images[0].ImageDigest, stored.Images[0].ImageDigest); + } + + [Fact] + public async Task GetLatestBySelectorAsync_ReturnsMostRecent() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new ImpactSnapshotRepository(harness.Context); + + var selectorTenant = "tenant-alpha"; + var first = TestDataFactory.CreateImpactSet(selectorTenant, "impact-old", DateTimeOffset.UtcNow.AddMinutes(-10)); + var latest = TestDataFactory.CreateImpactSet(selectorTenant, "impact-new", DateTimeOffset.UtcNow); + + await repository.UpsertAsync(first); + await repository.UpsertAsync(latest); + + var resolved = await repository.GetLatestBySelectorAsync(latest.Selector); + Assert.NotNull(resolved); + Assert.Equal("impact-new", resolved!.SnapshotId); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/RunRepositoryTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/RunRepositoryTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/RunRepositoryTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/RunRepositoryTests.cs index 3178666e..2b4e6238 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/RunRepositoryTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/RunRepositoryTests.cs @@ -1,76 +1,76 @@ -using System; -using System.Collections.Immutable; -using System.Linq; -using System.Threading; -using StellaOps.Scheduler.Storage.Mongo.Repositories; - -namespace StellaOps.Scheduler.Storage.Mongo.Tests.Repositories; - -public sealed class RunRepositoryTests -{ - [Fact] - public async Task InsertAndGetAsync_RoundTripsRun() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new RunRepository(harness.Context); - - var run = TestDataFactory.CreateRun("run_1", "tenant-alpha", RunState.Planning); - await repository.InsertAsync(run, cancellationToken: CancellationToken.None); - - var stored = await repository.GetAsync(run.TenantId, run.Id, cancellationToken: CancellationToken.None); - Assert.NotNull(stored); - Assert.Equal(run.State, stored!.State); - Assert.Equal(run.Trigger, stored.Trigger); - } - - [Fact] - public async Task UpdateAsync_ChangesStateAndStats() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new RunRepository(harness.Context); - - var run = TestDataFactory.CreateRun("run_update", "tenant-alpha", RunState.Planning); - await repository.InsertAsync(run); - - var updated = run with - { - State = RunState.Completed, - FinishedAt = DateTimeOffset.UtcNow, - Stats = new RunStats(candidates: 10, deduped: 10, queued: 10, completed: 10, deltas: 2) - }; - - var result = await repository.UpdateAsync(updated); - Assert.True(result); - - var stored = await repository.GetAsync(updated.TenantId, updated.Id); - Assert.NotNull(stored); - Assert.Equal(RunState.Completed, stored!.State); - Assert.Equal(10, stored.Stats.Completed); - } - - [Fact] - public async Task ListAsync_FiltersByStateAndSchedule() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new RunRepository(harness.Context); - - var run1 = TestDataFactory.CreateRun("run_state_1", "tenant-alpha", RunState.Planning, scheduleId: "sch_a"); - var run2 = TestDataFactory.CreateRun("run_state_2", "tenant-alpha", RunState.Running, scheduleId: "sch_a"); - var run3 = TestDataFactory.CreateRun("run_state_3", "tenant-alpha", RunState.Completed, scheduleId: "sch_b"); - - await repository.InsertAsync(run1); - await repository.InsertAsync(run2); - await repository.InsertAsync(run3); - - var options = new RunQueryOptions - { - ScheduleId = "sch_a", - States = new[] { RunState.Running }.ToImmutableArray(), - Limit = 10 - }; - - var results = await repository.ListAsync("tenant-alpha", options); - Assert.Single(results); - Assert.Equal("run_state_2", results.Single().Id); - } -} +using System; +using System.Collections.Immutable; +using System.Linq; +using System.Threading; +using StellaOps.Scheduler.Storage.Mongo.Repositories; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests.Repositories; + +public sealed class RunRepositoryTests +{ + [Fact] + public async Task InsertAndGetAsync_RoundTripsRun() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new RunRepository(harness.Context); + + var run = TestDataFactory.CreateRun("run_1", "tenant-alpha", RunState.Planning); + await repository.InsertAsync(run, cancellationToken: CancellationToken.None); + + var stored = await repository.GetAsync(run.TenantId, run.Id, cancellationToken: CancellationToken.None); + Assert.NotNull(stored); + Assert.Equal(run.State, stored!.State); + Assert.Equal(run.Trigger, stored.Trigger); + } + + [Fact] + public async Task UpdateAsync_ChangesStateAndStats() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new RunRepository(harness.Context); + + var run = TestDataFactory.CreateRun("run_update", "tenant-alpha", RunState.Planning); + await repository.InsertAsync(run); + + var updated = run with + { + State = RunState.Completed, + FinishedAt = DateTimeOffset.UtcNow, + Stats = new RunStats(candidates: 10, deduped: 10, queued: 10, completed: 10, deltas: 2) + }; + + var result = await repository.UpdateAsync(updated); + Assert.True(result); + + var stored = await repository.GetAsync(updated.TenantId, updated.Id); + Assert.NotNull(stored); + Assert.Equal(RunState.Completed, stored!.State); + Assert.Equal(10, stored.Stats.Completed); + } + + [Fact] + public async Task ListAsync_FiltersByStateAndSchedule() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new RunRepository(harness.Context); + + var run1 = TestDataFactory.CreateRun("run_state_1", "tenant-alpha", RunState.Planning, scheduleId: "sch_a"); + var run2 = TestDataFactory.CreateRun("run_state_2", "tenant-alpha", RunState.Running, scheduleId: "sch_a"); + var run3 = TestDataFactory.CreateRun("run_state_3", "tenant-alpha", RunState.Completed, scheduleId: "sch_b"); + + await repository.InsertAsync(run1); + await repository.InsertAsync(run2); + await repository.InsertAsync(run3); + + var options = new RunQueryOptions + { + ScheduleId = "sch_a", + States = new[] { RunState.Running }.ToImmutableArray(), + Limit = 10 + }; + + var results = await repository.ListAsync("tenant-alpha", options); + Assert.Single(results); + Assert.Equal("run_state_2", results.Single().Id); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ScheduleRepositoryTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ScheduleRepositoryTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ScheduleRepositoryTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ScheduleRepositoryTests.cs index 8309ff95..f4d4c4ad 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ScheduleRepositoryTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Repositories/ScheduleRepositoryTests.cs @@ -1,74 +1,74 @@ -using System; -using System.Threading; -using StellaOps.Scheduler.Storage.Mongo.Repositories; - -namespace StellaOps.Scheduler.Storage.Mongo.Tests.Repositories; - -public sealed class ScheduleRepositoryTests -{ - [Fact] - public async Task UpsertAsync_PersistsScheduleWithCanonicalShape() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new ScheduleRepository(harness.Context); - - var schedule = TestDataFactory.CreateSchedule("sch_unit_1", "tenant-alpha"); - await repository.UpsertAsync(schedule, cancellationToken: CancellationToken.None); - - var stored = await repository.GetAsync(schedule.TenantId, schedule.Id, cancellationToken: CancellationToken.None); - Assert.NotNull(stored); - Assert.Equal(schedule.Id, stored!.Id); - Assert.Equal(schedule.Name, stored.Name); - Assert.Equal(schedule.Selection.Scope, stored.Selection.Scope); - } - - [Fact] - public async Task ListAsync_ExcludesDisabledAndDeletedByDefault() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new ScheduleRepository(harness.Context); - var tenantId = "tenant-alpha"; - - var enabled = TestDataFactory.CreateSchedule("sch_enabled", tenantId, enabled: true, name: "Enabled"); - var disabled = TestDataFactory.CreateSchedule("sch_disabled", tenantId, enabled: false, name: "Disabled"); - - await repository.UpsertAsync(enabled); - await repository.UpsertAsync(disabled); - await repository.SoftDeleteAsync(tenantId, enabled.Id, "svc_scheduler", DateTimeOffset.UtcNow); - - var results = await repository.ListAsync(tenantId); - Assert.Empty(results); - - var includeDisabled = await repository.ListAsync( - tenantId, - new ScheduleQueryOptions { IncludeDisabled = true, IncludeDeleted = true }); - - Assert.Equal(2, includeDisabled.Count); - Assert.Contains(includeDisabled, schedule => schedule.Id == enabled.Id); - Assert.Contains(includeDisabled, schedule => schedule.Id == disabled.Id); - } - - [Fact] - public async Task SoftDeleteAsync_SetsMetadataAndExcludesFromQueries() - { - using var harness = new SchedulerMongoTestHarness(); - var repository = new ScheduleRepository(harness.Context); - - var schedule = TestDataFactory.CreateSchedule("sch_delete", "tenant-beta"); - await repository.UpsertAsync(schedule); - - var deletedAt = DateTimeOffset.UtcNow; - var deleted = await repository.SoftDeleteAsync(schedule.TenantId, schedule.Id, "svc_delete", deletedAt); - Assert.True(deleted); - - var retrieved = await repository.GetAsync(schedule.TenantId, schedule.Id); - Assert.Null(retrieved); - - var includeDeleted = await repository.ListAsync( - schedule.TenantId, - new ScheduleQueryOptions { IncludeDeleted = true, IncludeDisabled = true }); - - Assert.Single(includeDeleted); - Assert.Equal("sch_delete", includeDeleted[0].Id); - } -} +using System; +using System.Threading; +using StellaOps.Scheduler.Storage.Mongo.Repositories; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests.Repositories; + +public sealed class ScheduleRepositoryTests +{ + [Fact] + public async Task UpsertAsync_PersistsScheduleWithCanonicalShape() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new ScheduleRepository(harness.Context); + + var schedule = TestDataFactory.CreateSchedule("sch_unit_1", "tenant-alpha"); + await repository.UpsertAsync(schedule, cancellationToken: CancellationToken.None); + + var stored = await repository.GetAsync(schedule.TenantId, schedule.Id, cancellationToken: CancellationToken.None); + Assert.NotNull(stored); + Assert.Equal(schedule.Id, stored!.Id); + Assert.Equal(schedule.Name, stored.Name); + Assert.Equal(schedule.Selection.Scope, stored.Selection.Scope); + } + + [Fact] + public async Task ListAsync_ExcludesDisabledAndDeletedByDefault() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new ScheduleRepository(harness.Context); + var tenantId = "tenant-alpha"; + + var enabled = TestDataFactory.CreateSchedule("sch_enabled", tenantId, enabled: true, name: "Enabled"); + var disabled = TestDataFactory.CreateSchedule("sch_disabled", tenantId, enabled: false, name: "Disabled"); + + await repository.UpsertAsync(enabled); + await repository.UpsertAsync(disabled); + await repository.SoftDeleteAsync(tenantId, enabled.Id, "svc_scheduler", DateTimeOffset.UtcNow); + + var results = await repository.ListAsync(tenantId); + Assert.Empty(results); + + var includeDisabled = await repository.ListAsync( + tenantId, + new ScheduleQueryOptions { IncludeDisabled = true, IncludeDeleted = true }); + + Assert.Equal(2, includeDisabled.Count); + Assert.Contains(includeDisabled, schedule => schedule.Id == enabled.Id); + Assert.Contains(includeDisabled, schedule => schedule.Id == disabled.Id); + } + + [Fact] + public async Task SoftDeleteAsync_SetsMetadataAndExcludesFromQueries() + { + using var harness = new SchedulerMongoTestHarness(); + var repository = new ScheduleRepository(harness.Context); + + var schedule = TestDataFactory.CreateSchedule("sch_delete", "tenant-beta"); + await repository.UpsertAsync(schedule); + + var deletedAt = DateTimeOffset.UtcNow; + var deleted = await repository.SoftDeleteAsync(schedule.TenantId, schedule.Id, "svc_delete", deletedAt); + Assert.True(deleted); + + var retrieved = await repository.GetAsync(schedule.TenantId, schedule.Id); + Assert.Null(retrieved); + + var includeDeleted = await repository.ListAsync( + schedule.TenantId, + new ScheduleQueryOptions { IncludeDeleted = true, IncludeDisabled = true }); + + Assert.Single(includeDeleted); + Assert.Equal("sch_delete", includeDeleted[0].Id); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/SchedulerMongoTestHarness.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/SchedulerMongoTestHarness.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/SchedulerMongoTestHarness.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/SchedulerMongoTestHarness.cs index 250870ad..3194c219 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/SchedulerMongoTestHarness.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/SchedulerMongoTestHarness.cs @@ -1,36 +1,36 @@ -using System; -using System.Threading; -using Microsoft.Extensions.Logging.Abstractions; - -namespace StellaOps.Scheduler.Storage.Mongo.Tests; - -internal sealed class SchedulerMongoTestHarness : IDisposable -{ - private readonly MongoDbRunner _runner; - - public SchedulerMongoTestHarness() - { - _runner = MongoDbRunner.Start(additionalMongodArguments: "--quiet"); - var options = new SchedulerMongoOptions - { - ConnectionString = _runner.ConnectionString, - Database = $"scheduler_tests_{Guid.NewGuid():N}" - }; - - Context = new SchedulerMongoContext(Microsoft.Extensions.Options.Options.Create(options), NullLogger<SchedulerMongoContext>.Instance); - var migrations = new ISchedulerMongoMigration[] - { - new EnsureSchedulerCollectionsMigration(NullLogger<EnsureSchedulerCollectionsMigration>.Instance), - new EnsureSchedulerIndexesMigration() - }; - var runner = new SchedulerMongoMigrationRunner(Context, migrations, NullLogger<SchedulerMongoMigrationRunner>.Instance); - runner.RunAsync(CancellationToken.None).GetAwaiter().GetResult(); - } - - public SchedulerMongoContext Context { get; } - - public void Dispose() - { - _runner.Dispose(); - } -} +using System; +using System.Threading; +using Microsoft.Extensions.Logging.Abstractions; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests; + +internal sealed class SchedulerMongoTestHarness : IDisposable +{ + private readonly MongoDbRunner _runner; + + public SchedulerMongoTestHarness() + { + _runner = MongoDbRunner.Start(additionalMongodArguments: "--quiet"); + var options = new SchedulerMongoOptions + { + ConnectionString = _runner.ConnectionString, + Database = $"scheduler_tests_{Guid.NewGuid():N}" + }; + + Context = new SchedulerMongoContext(Microsoft.Extensions.Options.Options.Create(options), NullLogger<SchedulerMongoContext>.Instance); + var migrations = new ISchedulerMongoMigration[] + { + new EnsureSchedulerCollectionsMigration(NullLogger<EnsureSchedulerCollectionsMigration>.Instance), + new EnsureSchedulerIndexesMigration() + }; + var runner = new SchedulerMongoMigrationRunner(Context, migrations, NullLogger<SchedulerMongoMigrationRunner>.Instance); + runner.RunAsync(CancellationToken.None).GetAwaiter().GetResult(); + } + + public SchedulerMongoContext Context { get; } + + public void Dispose() + { + _runner.Dispose(); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Services/RunSummaryServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Services/RunSummaryServiceTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/Services/RunSummaryServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Services/RunSummaryServiceTests.cs index 6e96a767..a4db007d 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Services/RunSummaryServiceTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Services/RunSummaryServiceTests.cs @@ -1,116 +1,116 @@ -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Storage.Mongo.Services; - -namespace StellaOps.Scheduler.Storage.Mongo.Tests.Services; - -public sealed class RunSummaryServiceTests : IDisposable -{ - private readonly SchedulerMongoTestHarness _harness; - private readonly RunSummaryRepository _repository; - private readonly StubTimeProvider _timeProvider; - private readonly RunSummaryService _service; - - public RunSummaryServiceTests() - { - _harness = new SchedulerMongoTestHarness(); - _repository = new RunSummaryRepository(_harness.Context); - _timeProvider = new StubTimeProvider(DateTimeOffset.Parse("2025-10-26T10:00:00Z")); - _service = new RunSummaryService(_repository, _timeProvider, NullLogger<RunSummaryService>.Instance); - } - - [Fact] - public async Task ProjectAsync_FirstRunCreatesProjection() - { - var run = TestDataFactory.CreateRun("run-1", "tenant-alpha", RunState.Planning, "sch-alpha"); - - var projection = await _service.ProjectAsync(run, CancellationToken.None); - - Assert.Equal("tenant-alpha", projection.TenantId); - Assert.Equal("sch-alpha", projection.ScheduleId); - Assert.NotNull(projection.LastRun); - Assert.Equal(RunState.Planning, projection.LastRun!.State); - Assert.Equal(1, projection.Counters.Total); - Assert.Equal(1, projection.Counters.Planning); - Assert.Equal(0, projection.Counters.Completed); - Assert.Single(projection.Recent); - Assert.Equal(run.Id, projection.Recent[0].RunId); - } - - [Fact] - public async Task ProjectAsync_UpdateRunReplacesExistingEntry() - { - var createdAt = DateTimeOffset.Parse("2025-10-26T09:55:00Z"); - var run = TestDataFactory.CreateRun( - "run-update", - "tenant-alpha", - RunState.Planning, - "sch-alpha", - createdAt: createdAt, - startedAt: createdAt.AddMinutes(1)); - await _service.ProjectAsync(run, CancellationToken.None); - - var updated = run with - { - State = RunState.Completed, - StartedAt = run.StartedAt, - FinishedAt = run.CreatedAt.AddMinutes(5), - Stats = new RunStats(candidates: 10, deduped: 8, queued: 5, completed: 10, deltas: 2, newCriticals: 1) - }; - - _timeProvider.Advance(TimeSpan.FromMinutes(10)); - var projection = await _service.ProjectAsync(updated, CancellationToken.None); - - Assert.NotNull(projection.LastRun); - Assert.Equal(RunState.Completed, projection.LastRun!.State); - Assert.Equal(1, projection.Counters.Completed); - Assert.Equal(0, projection.Counters.Planning); - Assert.Single(projection.Recent); - Assert.Equal(updated.Stats.Completed, projection.LastRun!.Stats.Completed); - Assert.True(projection.UpdatedAt > run.CreatedAt); - } - - [Fact] - public async Task ProjectAsync_TrimsRecentEntriesBeyondLimit() - { - var baseTime = DateTimeOffset.Parse("2025-10-26T00:00:00Z"); - - for (var i = 0; i < 25; i++) - { - var run = TestDataFactory.CreateRun( - $"run-{i}", - "tenant-alpha", - RunState.Completed, - "sch-alpha", - stats: new RunStats(candidates: 5, deduped: 4, queued: 3, completed: 5, deltas: 1), - createdAt: baseTime.AddMinutes(i)); - - await _service.ProjectAsync(run, CancellationToken.None); - } - - var projections = await _service.ListAsync("tenant-alpha", CancellationToken.None); - Assert.Single(projections); - var projection = projections[0]; - Assert.Equal(20, projection.Recent.Length); - Assert.Equal(20, projection.Counters.Total); - Assert.Equal("run-24", projection.Recent[0].RunId); - } - - public void Dispose() - { - _harness.Dispose(); - } - - private sealed class StubTimeProvider : TimeProvider - { - private DateTimeOffset _utcNow; - - public StubTimeProvider(DateTimeOffset initial) - => _utcNow = initial; - - public override DateTimeOffset GetUtcNow() => _utcNow; - - public void Advance(TimeSpan delta) => _utcNow = _utcNow.Add(delta); - } -} +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Storage.Mongo.Services; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests.Services; + +public sealed class RunSummaryServiceTests : IDisposable +{ + private readonly SchedulerMongoTestHarness _harness; + private readonly RunSummaryRepository _repository; + private readonly StubTimeProvider _timeProvider; + private readonly RunSummaryService _service; + + public RunSummaryServiceTests() + { + _harness = new SchedulerMongoTestHarness(); + _repository = new RunSummaryRepository(_harness.Context); + _timeProvider = new StubTimeProvider(DateTimeOffset.Parse("2025-10-26T10:00:00Z")); + _service = new RunSummaryService(_repository, _timeProvider, NullLogger<RunSummaryService>.Instance); + } + + [Fact] + public async Task ProjectAsync_FirstRunCreatesProjection() + { + var run = TestDataFactory.CreateRun("run-1", "tenant-alpha", RunState.Planning, "sch-alpha"); + + var projection = await _service.ProjectAsync(run, CancellationToken.None); + + Assert.Equal("tenant-alpha", projection.TenantId); + Assert.Equal("sch-alpha", projection.ScheduleId); + Assert.NotNull(projection.LastRun); + Assert.Equal(RunState.Planning, projection.LastRun!.State); + Assert.Equal(1, projection.Counters.Total); + Assert.Equal(1, projection.Counters.Planning); + Assert.Equal(0, projection.Counters.Completed); + Assert.Single(projection.Recent); + Assert.Equal(run.Id, projection.Recent[0].RunId); + } + + [Fact] + public async Task ProjectAsync_UpdateRunReplacesExistingEntry() + { + var createdAt = DateTimeOffset.Parse("2025-10-26T09:55:00Z"); + var run = TestDataFactory.CreateRun( + "run-update", + "tenant-alpha", + RunState.Planning, + "sch-alpha", + createdAt: createdAt, + startedAt: createdAt.AddMinutes(1)); + await _service.ProjectAsync(run, CancellationToken.None); + + var updated = run with + { + State = RunState.Completed, + StartedAt = run.StartedAt, + FinishedAt = run.CreatedAt.AddMinutes(5), + Stats = new RunStats(candidates: 10, deduped: 8, queued: 5, completed: 10, deltas: 2, newCriticals: 1) + }; + + _timeProvider.Advance(TimeSpan.FromMinutes(10)); + var projection = await _service.ProjectAsync(updated, CancellationToken.None); + + Assert.NotNull(projection.LastRun); + Assert.Equal(RunState.Completed, projection.LastRun!.State); + Assert.Equal(1, projection.Counters.Completed); + Assert.Equal(0, projection.Counters.Planning); + Assert.Single(projection.Recent); + Assert.Equal(updated.Stats.Completed, projection.LastRun!.Stats.Completed); + Assert.True(projection.UpdatedAt > run.CreatedAt); + } + + [Fact] + public async Task ProjectAsync_TrimsRecentEntriesBeyondLimit() + { + var baseTime = DateTimeOffset.Parse("2025-10-26T00:00:00Z"); + + for (var i = 0; i < 25; i++) + { + var run = TestDataFactory.CreateRun( + $"run-{i}", + "tenant-alpha", + RunState.Completed, + "sch-alpha", + stats: new RunStats(candidates: 5, deduped: 4, queued: 3, completed: 5, deltas: 1), + createdAt: baseTime.AddMinutes(i)); + + await _service.ProjectAsync(run, CancellationToken.None); + } + + var projections = await _service.ListAsync("tenant-alpha", CancellationToken.None); + Assert.Single(projections); + var projection = projections[0]; + Assert.Equal(20, projection.Recent.Length); + Assert.Equal(20, projection.Counters.Total); + Assert.Equal("run-24", projection.Recent[0].RunId); + } + + public void Dispose() + { + _harness.Dispose(); + } + + private sealed class StubTimeProvider : TimeProvider + { + private DateTimeOffset _utcNow; + + public StubTimeProvider(DateTimeOffset initial) + => _utcNow = initial; + + public override DateTimeOffset GetUtcNow() => _utcNow; + + public void Advance(TimeSpan delta) => _utcNow = _utcNow.Add(delta); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Services/SchedulerAuditServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Services/SchedulerAuditServiceTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/Services/SchedulerAuditServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Services/SchedulerAuditServiceTests.cs index 54c16689..f7be3a5f 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Services/SchedulerAuditServiceTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Services/SchedulerAuditServiceTests.cs @@ -1,82 +1,82 @@ -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Storage.Mongo.Services; - -namespace StellaOps.Scheduler.Storage.Mongo.Tests.Services; - -public sealed class SchedulerAuditServiceTests : IDisposable -{ - private readonly SchedulerMongoTestHarness _harness; - private readonly AuditRepository _repository; - private readonly StubTimeProvider _timeProvider; - private readonly SchedulerAuditService _service; - - public SchedulerAuditServiceTests() - { - _harness = new SchedulerMongoTestHarness(); - _repository = new AuditRepository(_harness.Context); - _timeProvider = new StubTimeProvider(DateTimeOffset.Parse("2025-10-26T11:30:00Z")); - _service = new SchedulerAuditService(_repository, _timeProvider, NullLogger<SchedulerAuditService>.Instance); - } - - [Fact] - public async Task WriteAsync_PersistsRecordWithGeneratedId() - { - var auditEvent = new SchedulerAuditEvent( - TenantId: "tenant-alpha", - Category: "scheduler", - Action: "create", - Actor: new AuditActor("user_admin", "Admin", "user"), - ScheduleId: "sch-alpha", - CorrelationId: "corr-1", - Metadata: new Dictionary<string, string> - { - ["Reason"] = "initial", - }, - Message: "created schedule"); - - var record = await _service.WriteAsync(auditEvent, CancellationToken.None); - - Assert.StartsWith("audit_", record.Id, StringComparison.Ordinal); - Assert.Equal(_timeProvider.GetUtcNow(), record.OccurredAt); - - var stored = await _repository.ListAsync("tenant-alpha", new AuditQueryOptions { ScheduleId = "sch-alpha" }, session: null, CancellationToken.None); - Assert.Single(stored); - Assert.Equal(record.Id, stored[0].Id); - Assert.Equal("created schedule", stored[0].Message); - Assert.Contains(stored[0].Metadata, pair => pair.Key == "reason" && pair.Value == "initial"); - } - - [Fact] - public async Task WriteAsync_HonoursProvidedAuditId() - { - var auditEvent = new SchedulerAuditEvent( - TenantId: "tenant-alpha", - Category: "scheduler", - Action: "update", - Actor: new AuditActor("user_admin", "Admin", "user"), - ScheduleId: "sch-alpha", - AuditId: "audit_custom_1", - OccurredAt: DateTimeOffset.Parse("2025-10-26T12:00:00Z")); - - var record = await _service.WriteAsync(auditEvent, CancellationToken.None); - Assert.Equal("audit_custom_1", record.Id); - Assert.Equal(DateTimeOffset.Parse("2025-10-26T12:00:00Z"), record.OccurredAt); - } - - public void Dispose() - { - _harness.Dispose(); - } - - private sealed class StubTimeProvider : TimeProvider - { - private DateTimeOffset _utcNow; - - public StubTimeProvider(DateTimeOffset initial) - => _utcNow = initial; - - public override DateTimeOffset GetUtcNow() => _utcNow; - } -} +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Storage.Mongo.Services; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests.Services; + +public sealed class SchedulerAuditServiceTests : IDisposable +{ + private readonly SchedulerMongoTestHarness _harness; + private readonly AuditRepository _repository; + private readonly StubTimeProvider _timeProvider; + private readonly SchedulerAuditService _service; + + public SchedulerAuditServiceTests() + { + _harness = new SchedulerMongoTestHarness(); + _repository = new AuditRepository(_harness.Context); + _timeProvider = new StubTimeProvider(DateTimeOffset.Parse("2025-10-26T11:30:00Z")); + _service = new SchedulerAuditService(_repository, _timeProvider, NullLogger<SchedulerAuditService>.Instance); + } + + [Fact] + public async Task WriteAsync_PersistsRecordWithGeneratedId() + { + var auditEvent = new SchedulerAuditEvent( + TenantId: "tenant-alpha", + Category: "scheduler", + Action: "create", + Actor: new AuditActor("user_admin", "Admin", "user"), + ScheduleId: "sch-alpha", + CorrelationId: "corr-1", + Metadata: new Dictionary<string, string> + { + ["Reason"] = "initial", + }, + Message: "created schedule"); + + var record = await _service.WriteAsync(auditEvent, CancellationToken.None); + + Assert.StartsWith("audit_", record.Id, StringComparison.Ordinal); + Assert.Equal(_timeProvider.GetUtcNow(), record.OccurredAt); + + var stored = await _repository.ListAsync("tenant-alpha", new AuditQueryOptions { ScheduleId = "sch-alpha" }, session: null, CancellationToken.None); + Assert.Single(stored); + Assert.Equal(record.Id, stored[0].Id); + Assert.Equal("created schedule", stored[0].Message); + Assert.Contains(stored[0].Metadata, pair => pair.Key == "reason" && pair.Value == "initial"); + } + + [Fact] + public async Task WriteAsync_HonoursProvidedAuditId() + { + var auditEvent = new SchedulerAuditEvent( + TenantId: "tenant-alpha", + Category: "scheduler", + Action: "update", + Actor: new AuditActor("user_admin", "Admin", "user"), + ScheduleId: "sch-alpha", + AuditId: "audit_custom_1", + OccurredAt: DateTimeOffset.Parse("2025-10-26T12:00:00Z")); + + var record = await _service.WriteAsync(auditEvent, CancellationToken.None); + Assert.Equal("audit_custom_1", record.Id); + Assert.Equal(DateTimeOffset.Parse("2025-10-26T12:00:00Z"), record.OccurredAt); + } + + public void Dispose() + { + _harness.Dispose(); + } + + private sealed class StubTimeProvider : TimeProvider + { + private DateTimeOffset _utcNow; + + public StubTimeProvider(DateTimeOffset initial) + => _utcNow = initial; + + public override DateTimeOffset GetUtcNow() => _utcNow; + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Sessions/SchedulerMongoSessionFactoryTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Sessions/SchedulerMongoSessionFactoryTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/Sessions/SchedulerMongoSessionFactoryTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Sessions/SchedulerMongoSessionFactoryTests.cs index 9e3da828..0dbf75ec 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/Sessions/SchedulerMongoSessionFactoryTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/Sessions/SchedulerMongoSessionFactoryTests.cs @@ -1,35 +1,35 @@ -using System.Threading; -using MongoDB.Driver; -using StellaOps.Scheduler.Storage.Mongo.Sessions; - -namespace StellaOps.Scheduler.Storage.Mongo.Tests.Sessions; - -public sealed class SchedulerMongoSessionFactoryTests -{ - [Fact] - public async Task StartSessionAsync_UsesCausalConsistencyByDefault() - { - using var harness = new SchedulerMongoTestHarness(); - var factory = new SchedulerMongoSessionFactory(harness.Context); - - using var session = await factory.StartSessionAsync(cancellationToken: CancellationToken.None); - Assert.True(session.Options.CausalConsistency.GetValueOrDefault()); - } - - [Fact] - public async Task StartSessionAsync_AllowsOverridingOptions() - { - using var harness = new SchedulerMongoTestHarness(); - var factory = new SchedulerMongoSessionFactory(harness.Context); - - var options = new SchedulerMongoSessionOptions - { - CausalConsistency = false, - ReadPreference = ReadPreference.PrimaryPreferred - }; - - using var session = await factory.StartSessionAsync(options); - Assert.False(session.Options.CausalConsistency.GetValueOrDefault(true)); - Assert.Equal(ReadPreference.PrimaryPreferred, session.Options.DefaultTransactionOptions?.ReadPreference); - } -} +using System.Threading; +using MongoDB.Driver; +using StellaOps.Scheduler.Storage.Mongo.Sessions; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests.Sessions; + +public sealed class SchedulerMongoSessionFactoryTests +{ + [Fact] + public async Task StartSessionAsync_UsesCausalConsistencyByDefault() + { + using var harness = new SchedulerMongoTestHarness(); + var factory = new SchedulerMongoSessionFactory(harness.Context); + + using var session = await factory.StartSessionAsync(cancellationToken: CancellationToken.None); + Assert.True(session.Options.CausalConsistency.GetValueOrDefault()); + } + + [Fact] + public async Task StartSessionAsync_AllowsOverridingOptions() + { + using var harness = new SchedulerMongoTestHarness(); + var factory = new SchedulerMongoSessionFactory(harness.Context); + + var options = new SchedulerMongoSessionOptions + { + CausalConsistency = false, + ReadPreference = ReadPreference.PrimaryPreferred + }; + + using var session = await factory.StartSessionAsync(options); + Assert.False(session.Options.CausalConsistency.GetValueOrDefault(true)); + Assert.Equal(ReadPreference.PrimaryPreferred, session.Options.DefaultTransactionOptions?.ReadPreference); + } +} diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj similarity index 71% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj index 60e78365..22064978 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/StellaOps.Scheduler.Storage.Mongo.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -6,8 +7,8 @@ <UseConcelierTestInfra>false</UseConcelierTestInfra> </PropertyGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> </ItemGroup> <ItemGroup> <PackageReference Include="Mongo2Go" Version="4.1.0" /> @@ -20,4 +21,4 @@ <CopyToOutputDirectory>Always</CopyToOutputDirectory> </None> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scheduler.Storage.Mongo.Tests/TestDataFactory.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/TestDataFactory.cs similarity index 96% rename from src/StellaOps.Scheduler.Storage.Mongo.Tests/TestDataFactory.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/TestDataFactory.cs index 2736368e..520312aa 100644 --- a/src/StellaOps.Scheduler.Storage.Mongo.Tests/TestDataFactory.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Storage.Mongo.Tests/TestDataFactory.cs @@ -1,98 +1,98 @@ -using System; -using System.Collections.Immutable; - -namespace StellaOps.Scheduler.Storage.Mongo.Tests; - -internal static class TestDataFactory -{ - public static Schedule CreateSchedule( - string id, - string tenantId, - bool enabled = true, - string name = "Nightly Prod") - { - var now = DateTimeOffset.UtcNow; - return new Schedule( - id, - tenantId, - name, - enabled, - "0 2 * * *", - "UTC", - ScheduleMode.AnalysisOnly, - new Selector(SelectorScope.AllImages, tenantId), - ScheduleOnlyIf.Default, - ScheduleNotify.Default, - ScheduleLimits.Default, - now, - "svc_scheduler", - now, - "svc_scheduler", - ImmutableArray<string>.Empty, - SchedulerSchemaVersions.Schedule); - } - - public static Run CreateRun( - string id, - string tenantId, - RunState state, - string? scheduleId = null, - RunTrigger trigger = RunTrigger.Manual, - RunStats? stats = null, - DateTimeOffset? createdAt = null, - DateTimeOffset? startedAt = null) - { - var resolvedStats = stats ?? new RunStats(candidates: 10, deduped: 8, queued: 5, completed: 0, deltas: 2); - var created = createdAt ?? DateTimeOffset.UtcNow; - return new Run( - id, - tenantId, - trigger, - state, - resolvedStats, - created, - scheduleId: scheduleId, - reason: new RunReason(manualReason: "test"), - startedAt: startedAt ?? created); - } - - public static ImpactSet CreateImpactSet(string tenantId, string snapshotId, DateTimeOffset? generatedAt = null, bool usageOnly = true) - { - var selector = new Selector(SelectorScope.AllImages, tenantId); - var image = new ImpactImage( - "sha256:" + Guid.NewGuid().ToString("N"), - "registry", - "repo/app", - namespaces: new[] { "team-a" }, - tags: new[] { "prod" }, - usedByEntrypoint: true); - - return new ImpactSet( - selector, - new[] { image }, - usageOnly: usageOnly, - generatedAt ?? DateTimeOffset.UtcNow, - total: 1, - snapshotId: snapshotId, - schemaVersion: SchedulerSchemaVersions.ImpactSet); - } - - public static AuditRecord CreateAuditRecord( - string tenantId, - string idSuffix, - DateTimeOffset? occurredAt = null, - string? scheduleId = null, - string? category = null, - string? action = null) - { - return new AuditRecord( - $"audit_{idSuffix}", - tenantId, - category ?? "scheduler", - action ?? "create", - occurredAt ?? DateTimeOffset.UtcNow, - new AuditActor("user_admin", "Admin", "user"), - scheduleId: scheduleId ?? $"sch_{idSuffix}", - message: "created"); - } -} +using System; +using System.Collections.Immutable; + +namespace StellaOps.Scheduler.Storage.Mongo.Tests; + +internal static class TestDataFactory +{ + public static Schedule CreateSchedule( + string id, + string tenantId, + bool enabled = true, + string name = "Nightly Prod") + { + var now = DateTimeOffset.UtcNow; + return new Schedule( + id, + tenantId, + name, + enabled, + "0 2 * * *", + "UTC", + ScheduleMode.AnalysisOnly, + new Selector(SelectorScope.AllImages, tenantId), + ScheduleOnlyIf.Default, + ScheduleNotify.Default, + ScheduleLimits.Default, + now, + "svc_scheduler", + now, + "svc_scheduler", + ImmutableArray<string>.Empty, + SchedulerSchemaVersions.Schedule); + } + + public static Run CreateRun( + string id, + string tenantId, + RunState state, + string? scheduleId = null, + RunTrigger trigger = RunTrigger.Manual, + RunStats? stats = null, + DateTimeOffset? createdAt = null, + DateTimeOffset? startedAt = null) + { + var resolvedStats = stats ?? new RunStats(candidates: 10, deduped: 8, queued: 5, completed: 0, deltas: 2); + var created = createdAt ?? DateTimeOffset.UtcNow; + return new Run( + id, + tenantId, + trigger, + state, + resolvedStats, + created, + scheduleId: scheduleId, + reason: new RunReason(manualReason: "test"), + startedAt: startedAt ?? created); + } + + public static ImpactSet CreateImpactSet(string tenantId, string snapshotId, DateTimeOffset? generatedAt = null, bool usageOnly = true) + { + var selector = new Selector(SelectorScope.AllImages, tenantId); + var image = new ImpactImage( + "sha256:" + Guid.NewGuid().ToString("N"), + "registry", + "repo/app", + namespaces: new[] { "team-a" }, + tags: new[] { "prod" }, + usedByEntrypoint: true); + + return new ImpactSet( + selector, + new[] { image }, + usageOnly: usageOnly, + generatedAt ?? DateTimeOffset.UtcNow, + total: 1, + snapshotId: snapshotId, + schemaVersion: SchedulerSchemaVersions.ImpactSet); + } + + public static AuditRecord CreateAuditRecord( + string tenantId, + string idSuffix, + DateTimeOffset? occurredAt = null, + string? scheduleId = null, + string? category = null, + string? action = null) + { + return new AuditRecord( + $"audit_{idSuffix}", + tenantId, + category ?? "scheduler", + action ?? "create", + occurredAt ?? DateTimeOffset.UtcNow, + new AuditActor("user_admin", "Admin", "user"), + scheduleId: scheduleId ?? $"sch_{idSuffix}", + message: "created"); + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/CartographerWebhookClientTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/CartographerWebhookClientTests.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService.Tests/CartographerWebhookClientTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/CartographerWebhookClientTests.cs index 5711528e..c03d773d 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/CartographerWebhookClientTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/CartographerWebhookClientTests.cs @@ -1,140 +1,140 @@ -using System.Net; -using System.Net.Http; -using System.Net.Http.Json; -using System.Text.Json; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.WebService.GraphJobs; -using StellaOps.Scheduler.WebService.Options; - -namespace StellaOps.Scheduler.WebService.Tests; - -public sealed class CartographerWebhookClientTests -{ - [Fact] - public async Task NotifyAsync_PostsPayload_WhenEnabled() - { - var handler = new RecordingHandler(); - var httpClient = new HttpClient(handler); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerCartographerOptions - { - Webhook = - { - Enabled = true, - Endpoint = "https://cartographer.local/hooks/graph-completed", - ApiKeyHeader = "X-Api-Key", - ApiKey = "secret" - } - }); - using var loggerFactory = LoggerFactory.Create(builder => builder.AddDebug()); - var client = new CartographerWebhookClient(httpClient, new OptionsMonitorStub<SchedulerCartographerOptions>(options), loggerFactory.CreateLogger<CartographerWebhookClient>()); - - var job = new GraphBuildJob( - id: "gbj_test", - tenantId: "tenant-alpha", - sbomId: "sbom", - sbomVersionId: "sbom_v1", - sbomDigest: "sha256:" + new string('a', 64), - status: GraphJobStatus.Completed, - trigger: GraphBuildJobTrigger.Backfill, - createdAt: DateTimeOffset.UtcNow, - graphSnapshotId: "snap", - attempts: 1, - cartographerJobId: "carto-123", - correlationId: "corr-1", - startedAt: null, - completedAt: DateTimeOffset.UtcNow, - error: null, - metadata: Array.Empty<KeyValuePair<string, string>>()); - - var notification = new GraphJobCompletionNotification( - job.TenantId, - GraphJobQueryType.Build, - GraphJobStatus.Completed, - DateTimeOffset.UtcNow, - GraphJobResponse.From(job), - "oras://snap/result", - "corr-1", - null); - - await client.NotifyAsync(notification, CancellationToken.None); - - Assert.NotNull(handler.LastRequest); - Assert.Equal("https://cartographer.local/hooks/graph-completed", handler.LastRequest.RequestUri!.ToString()); - Assert.True(handler.LastRequest.Headers.TryGetValues("X-Api-Key", out var values) && values!.Single() == "secret"); - var json = JsonSerializer.Deserialize<JsonElement>(handler.LastPayload!); - Assert.Equal("gbj_test", json.GetProperty("jobId").GetString()); - Assert.Equal("tenant-alpha", json.GetProperty("tenantId").GetString()); - } - - [Fact] - public async Task NotifyAsync_Skips_WhenDisabled() - { - var handler = new RecordingHandler(); - var httpClient = new HttpClient(handler); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerCartographerOptions()); - using var loggerFactory = LoggerFactory.Create(builder => builder.AddDebug()); - var client = new CartographerWebhookClient(httpClient, new OptionsMonitorStub<SchedulerCartographerOptions>(options), loggerFactory.CreateLogger<CartographerWebhookClient>()); - - var job = new GraphOverlayJob( - id: "goj-test", - tenantId: "tenant-alpha", - graphSnapshotId: "snap", - overlayKind: GraphOverlayKind.Policy, - overlayKey: "policy@1", - status: GraphJobStatus.Completed, - trigger: GraphOverlayJobTrigger.Manual, - createdAt: DateTimeOffset.UtcNow, - subjects: Array.Empty<string>(), - attempts: 1, - correlationId: null, - startedAt: null, - completedAt: DateTimeOffset.UtcNow, - error: null, - metadata: Array.Empty<KeyValuePair<string, string>>()); - - var notification = new GraphJobCompletionNotification( - job.TenantId, - GraphJobQueryType.Overlay, - GraphJobStatus.Completed, - DateTimeOffset.UtcNow, - GraphJobResponse.From(job), - null, - null, - null); - - await client.NotifyAsync(notification, CancellationToken.None); - - Assert.Null(handler.LastRequest); - } - - private sealed class RecordingHandler : HttpMessageHandler - { - public HttpRequestMessage? LastRequest { get; private set; } - public string? LastPayload { get; private set; } - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - { - LastRequest = request; - LastPayload = request.Content is null ? null : request.Content.ReadAsStringAsync(cancellationToken).Result; - return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)); - } - } - - private sealed class OptionsMonitorStub<T> : IOptionsMonitor<T> where T : class - { - private readonly IOptions<T> _options; - - public OptionsMonitorStub(IOptions<T> options) - { - _options = options; - } - - public T CurrentValue => _options.Value; - - public T Get(string? name) => _options.Value; - - public IDisposable? OnChange(Action<T, string?> listener) => null; - } -} +using System.Net; +using System.Net.Http; +using System.Net.Http.Json; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.WebService.GraphJobs; +using StellaOps.Scheduler.WebService.Options; + +namespace StellaOps.Scheduler.WebService.Tests; + +public sealed class CartographerWebhookClientTests +{ + [Fact] + public async Task NotifyAsync_PostsPayload_WhenEnabled() + { + var handler = new RecordingHandler(); + var httpClient = new HttpClient(handler); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerCartographerOptions + { + Webhook = + { + Enabled = true, + Endpoint = "https://cartographer.local/hooks/graph-completed", + ApiKeyHeader = "X-Api-Key", + ApiKey = "secret" + } + }); + using var loggerFactory = LoggerFactory.Create(builder => builder.AddDebug()); + var client = new CartographerWebhookClient(httpClient, new OptionsMonitorStub<SchedulerCartographerOptions>(options), loggerFactory.CreateLogger<CartographerWebhookClient>()); + + var job = new GraphBuildJob( + id: "gbj_test", + tenantId: "tenant-alpha", + sbomId: "sbom", + sbomVersionId: "sbom_v1", + sbomDigest: "sha256:" + new string('a', 64), + status: GraphJobStatus.Completed, + trigger: GraphBuildJobTrigger.Backfill, + createdAt: DateTimeOffset.UtcNow, + graphSnapshotId: "snap", + attempts: 1, + cartographerJobId: "carto-123", + correlationId: "corr-1", + startedAt: null, + completedAt: DateTimeOffset.UtcNow, + error: null, + metadata: Array.Empty<KeyValuePair<string, string>>()); + + var notification = new GraphJobCompletionNotification( + job.TenantId, + GraphJobQueryType.Build, + GraphJobStatus.Completed, + DateTimeOffset.UtcNow, + GraphJobResponse.From(job), + "oras://snap/result", + "corr-1", + null); + + await client.NotifyAsync(notification, CancellationToken.None); + + Assert.NotNull(handler.LastRequest); + Assert.Equal("https://cartographer.local/hooks/graph-completed", handler.LastRequest.RequestUri!.ToString()); + Assert.True(handler.LastRequest.Headers.TryGetValues("X-Api-Key", out var values) && values!.Single() == "secret"); + var json = JsonSerializer.Deserialize<JsonElement>(handler.LastPayload!); + Assert.Equal("gbj_test", json.GetProperty("jobId").GetString()); + Assert.Equal("tenant-alpha", json.GetProperty("tenantId").GetString()); + } + + [Fact] + public async Task NotifyAsync_Skips_WhenDisabled() + { + var handler = new RecordingHandler(); + var httpClient = new HttpClient(handler); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerCartographerOptions()); + using var loggerFactory = LoggerFactory.Create(builder => builder.AddDebug()); + var client = new CartographerWebhookClient(httpClient, new OptionsMonitorStub<SchedulerCartographerOptions>(options), loggerFactory.CreateLogger<CartographerWebhookClient>()); + + var job = new GraphOverlayJob( + id: "goj-test", + tenantId: "tenant-alpha", + graphSnapshotId: "snap", + overlayKind: GraphOverlayKind.Policy, + overlayKey: "policy@1", + status: GraphJobStatus.Completed, + trigger: GraphOverlayJobTrigger.Manual, + createdAt: DateTimeOffset.UtcNow, + subjects: Array.Empty<string>(), + attempts: 1, + correlationId: null, + startedAt: null, + completedAt: DateTimeOffset.UtcNow, + error: null, + metadata: Array.Empty<KeyValuePair<string, string>>()); + + var notification = new GraphJobCompletionNotification( + job.TenantId, + GraphJobQueryType.Overlay, + GraphJobStatus.Completed, + DateTimeOffset.UtcNow, + GraphJobResponse.From(job), + null, + null, + null); + + await client.NotifyAsync(notification, CancellationToken.None); + + Assert.Null(handler.LastRequest); + } + + private sealed class RecordingHandler : HttpMessageHandler + { + public HttpRequestMessage? LastRequest { get; private set; } + public string? LastPayload { get; private set; } + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + { + LastRequest = request; + LastPayload = request.Content is null ? null : request.Content.ReadAsStringAsync(cancellationToken).Result; + return Task.FromResult(new HttpResponseMessage(HttpStatusCode.OK)); + } + } + + private sealed class OptionsMonitorStub<T> : IOptionsMonitor<T> where T : class + { + private readonly IOptions<T> _options; + + public OptionsMonitorStub(IOptions<T> options) + { + _options = options; + } + + public T CurrentValue => _options.Value; + + public T Get(string? name) => _options.Value; + + public IDisposable? OnChange(Action<T, string?> listener) => null; + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/EventWebhookEndpointTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/EventWebhookEndpointTests.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService.Tests/EventWebhookEndpointTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/EventWebhookEndpointTests.cs index e1ec21cc..dae86c41 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/EventWebhookEndpointTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/EventWebhookEndpointTests.cs @@ -1,128 +1,128 @@ -using System; -using System.Collections.Generic; -using System.Net; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.Extensions.Configuration; - -namespace StellaOps.Scheduler.WebService.Tests; - -public sealed class EventWebhookEndpointTests : IClassFixture<WebApplicationFactory<Program>> -{ - static EventWebhookEndpointTests() - { - Environment.SetEnvironmentVariable("Scheduler__Events__Webhooks__Feedser__HmacSecret", FeedserSecret); - Environment.SetEnvironmentVariable("Scheduler__Events__Webhooks__Feedser__Enabled", "true"); - Environment.SetEnvironmentVariable("Scheduler__Events__Webhooks__Vexer__HmacSecret", VexerSecret); - Environment.SetEnvironmentVariable("Scheduler__Events__Webhooks__Vexer__Enabled", "true"); - } - - private const string FeedserSecret = "feedser-secret"; - private const string VexerSecret = "vexer-secret"; - - private readonly WebApplicationFactory<Program> _factory; - - public EventWebhookEndpointTests(WebApplicationFactory<Program> factory) - { - _factory = factory; - } - - [Fact] - public async Task FeedserWebhook_AcceptsValidSignature() - { - using var client = _factory.CreateClient(); - var payload = new - { - exportId = "feedser-exp-1", - changedProductKeys = new[] { "pkg:rpm/openssl", "pkg:deb/nginx" }, - kev = new[] { "CVE-2024-0001" }, - window = new { from = DateTimeOffset.UtcNow.AddHours(-1), to = DateTimeOffset.UtcNow } - }; - - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - using var request = new HttpRequestMessage(HttpMethod.Post, "/events/feedser-export") - { - Content = new StringContent(json, Encoding.UTF8, "application/json") - }; - request.Headers.TryAddWithoutValidation("X-Scheduler-Signature", ComputeSignature(FeedserSecret, json)); - - var response = await client.SendAsync(request); - Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); - } - - [Fact] - public async Task FeedserWebhook_RejectsInvalidSignature() - { - using var client = _factory.CreateClient(); - var payload = new - { - exportId = "feedser-exp-2", - changedProductKeys = new[] { "pkg:nuget/log4net" } - }; - - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - using var request = new HttpRequestMessage(HttpMethod.Post, "/events/feedser-export") - { - Content = new StringContent(json, Encoding.UTF8, "application/json") - }; - request.Headers.TryAddWithoutValidation("X-Scheduler-Signature", "sha256=invalid"); - - var response = await client.SendAsync(request); - Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); - } - - [Fact] - public async Task VexerWebhook_HonoursRateLimit() - { - using var restrictedFactory = _factory.WithWebHostBuilder(builder => - { - builder.ConfigureAppConfiguration((_, configuration) => - { - configuration.AddInMemoryCollection(new Dictionary<string, string?> - { - ["Scheduler:Events:Webhooks:Vexer:RateLimitRequests"] = "1", - ["Scheduler:Events:Webhooks:Vexer:RateLimitWindowSeconds"] = "60" - }); - }); - }); - - using var client = restrictedFactory.CreateClient(); - var payload = new - { - exportId = "vexer-exp-1", - changedClaims = new[] - { - new { productKey = "pkg:deb/openssl", vulnerabilityId = "CVE-2024-1234", status = "affected" } - } - }; - - var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web)); - - using var first = new HttpRequestMessage(HttpMethod.Post, "/events/vexer-export") - { - Content = new StringContent(json, Encoding.UTF8, "application/json") - }; - first.Headers.TryAddWithoutValidation("X-Scheduler-Signature", ComputeSignature(VexerSecret, json)); - var firstResponse = await client.SendAsync(first); - Assert.Equal(HttpStatusCode.Accepted, firstResponse.StatusCode); - - using var second = new HttpRequestMessage(HttpMethod.Post, "/events/vexer-export") - { - Content = new StringContent(json, Encoding.UTF8, "application/json") - }; - second.Headers.TryAddWithoutValidation("X-Scheduler-Signature", ComputeSignature(VexerSecret, json)); - var secondResponse = await client.SendAsync(second); - Assert.Equal((HttpStatusCode)429, secondResponse.StatusCode); - Assert.True(secondResponse.Headers.Contains("Retry-After")); - } - - private static string ComputeSignature(string secret, string payload) - { - using var hmac = new HMACSHA256(Encoding.UTF8.GetBytes(secret)); - var hash = hmac.ComputeHash(Encoding.UTF8.GetBytes(payload)); - return "sha256=" + Convert.ToHexString(hash).ToLowerInvariant(); - } -} +using System; +using System.Collections.Generic; +using System.Net; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.Extensions.Configuration; + +namespace StellaOps.Scheduler.WebService.Tests; + +public sealed class EventWebhookEndpointTests : IClassFixture<WebApplicationFactory<Program>> +{ + static EventWebhookEndpointTests() + { + Environment.SetEnvironmentVariable("Scheduler__Events__Webhooks__Feedser__HmacSecret", FeedserSecret); + Environment.SetEnvironmentVariable("Scheduler__Events__Webhooks__Feedser__Enabled", "true"); + Environment.SetEnvironmentVariable("Scheduler__Events__Webhooks__Vexer__HmacSecret", VexerSecret); + Environment.SetEnvironmentVariable("Scheduler__Events__Webhooks__Vexer__Enabled", "true"); + } + + private const string FeedserSecret = "feedser-secret"; + private const string VexerSecret = "vexer-secret"; + + private readonly WebApplicationFactory<Program> _factory; + + public EventWebhookEndpointTests(WebApplicationFactory<Program> factory) + { + _factory = factory; + } + + [Fact] + public async Task FeedserWebhook_AcceptsValidSignature() + { + using var client = _factory.CreateClient(); + var payload = new + { + exportId = "feedser-exp-1", + changedProductKeys = new[] { "pkg:rpm/openssl", "pkg:deb/nginx" }, + kev = new[] { "CVE-2024-0001" }, + window = new { from = DateTimeOffset.UtcNow.AddHours(-1), to = DateTimeOffset.UtcNow } + }; + + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + using var request = new HttpRequestMessage(HttpMethod.Post, "/events/feedser-export") + { + Content = new StringContent(json, Encoding.UTF8, "application/json") + }; + request.Headers.TryAddWithoutValidation("X-Scheduler-Signature", ComputeSignature(FeedserSecret, json)); + + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); + } + + [Fact] + public async Task FeedserWebhook_RejectsInvalidSignature() + { + using var client = _factory.CreateClient(); + var payload = new + { + exportId = "feedser-exp-2", + changedProductKeys = new[] { "pkg:nuget/log4net" } + }; + + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + using var request = new HttpRequestMessage(HttpMethod.Post, "/events/feedser-export") + { + Content = new StringContent(json, Encoding.UTF8, "application/json") + }; + request.Headers.TryAddWithoutValidation("X-Scheduler-Signature", "sha256=invalid"); + + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + + [Fact] + public async Task VexerWebhook_HonoursRateLimit() + { + using var restrictedFactory = _factory.WithWebHostBuilder(builder => + { + builder.ConfigureAppConfiguration((_, configuration) => + { + configuration.AddInMemoryCollection(new Dictionary<string, string?> + { + ["Scheduler:Events:Webhooks:Vexer:RateLimitRequests"] = "1", + ["Scheduler:Events:Webhooks:Vexer:RateLimitWindowSeconds"] = "60" + }); + }); + }); + + using var client = restrictedFactory.CreateClient(); + var payload = new + { + exportId = "vexer-exp-1", + changedClaims = new[] + { + new { productKey = "pkg:deb/openssl", vulnerabilityId = "CVE-2024-1234", status = "affected" } + } + }; + + var json = JsonSerializer.Serialize(payload, new JsonSerializerOptions(JsonSerializerDefaults.Web)); + + using var first = new HttpRequestMessage(HttpMethod.Post, "/events/vexer-export") + { + Content = new StringContent(json, Encoding.UTF8, "application/json") + }; + first.Headers.TryAddWithoutValidation("X-Scheduler-Signature", ComputeSignature(VexerSecret, json)); + var firstResponse = await client.SendAsync(first); + Assert.Equal(HttpStatusCode.Accepted, firstResponse.StatusCode); + + using var second = new HttpRequestMessage(HttpMethod.Post, "/events/vexer-export") + { + Content = new StringContent(json, Encoding.UTF8, "application/json") + }; + second.Headers.TryAddWithoutValidation("X-Scheduler-Signature", ComputeSignature(VexerSecret, json)); + var secondResponse = await client.SendAsync(second); + Assert.Equal((HttpStatusCode)429, secondResponse.StatusCode); + Assert.True(secondResponse.Headers.Contains("Retry-After")); + } + + private static string ComputeSignature(string secret, string payload) + { + using var hmac = new HMACSHA256(Encoding.UTF8.GetBytes(secret)); + var hash = hmac.ComputeHash(Encoding.UTF8.GetBytes(payload)); + return "sha256=" + Convert.ToHexString(hash).ToLowerInvariant(); + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/GlobalUsings.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/GlobalUsings.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService.Tests/GlobalUsings.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/GlobalUsings.cs index fb6a9c39..4a5ef335 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/GlobalUsings.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/GlobalUsings.cs @@ -1,6 +1,6 @@ -global using System.Net.Http.Json; -global using System.Text.Json; -global using System.Text.Json.Serialization; -global using System.Threading.Tasks; -global using Microsoft.AspNetCore.Mvc.Testing; -global using Xunit; +global using System.Net.Http.Json; +global using System.Text.Json; +global using System.Text.Json.Serialization; +global using System.Threading.Tasks; +global using Microsoft.AspNetCore.Mvc.Testing; +global using Xunit; diff --git a/src/StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs index 4790f025..09163d6f 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/GraphJobEndpointTests.cs @@ -1,110 +1,110 @@ -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Scheduler.WebService.Tests; - -public sealed class GraphJobEndpointTests : IClassFixture<SchedulerWebApplicationFactory> -{ - private readonly SchedulerWebApplicationFactory _factory; - - public GraphJobEndpointTests(SchedulerWebApplicationFactory factory) - { - _factory = factory; - } - - [Fact] - public async Task CreateGraphBuildJob_RequiresGraphWriteScope() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-alpha"); - - var response = await client.PostAsJsonAsync("/graphs/build", new - { - sbomId = "sbom-test", - sbomVersionId = "sbom-ver", - sbomDigest = "sha256:" + new string('a', 64) - }); - - Assert.Equal(System.Net.HttpStatusCode.Unauthorized, response.StatusCode); - } - - [Fact] - public async Task CreateGraphBuildJob_AndList() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-alpha"); - client.DefaultRequestHeaders.Add("X-Scopes", string.Join(' ', StellaOpsScopes.GraphWrite, StellaOpsScopes.GraphRead)); - - var createResponse = await client.PostAsJsonAsync("/graphs/build", new - { - sbomId = "sbom-alpha", - sbomVersionId = "sbom-alpha-v1", - sbomDigest = "sha256:" + new string('b', 64), - metadata = new { source = "test" } - }); - - createResponse.EnsureSuccessStatusCode(); - - var listResponse = await client.GetAsync("/graphs/jobs"); - listResponse.EnsureSuccessStatusCode(); - - var json = await listResponse.Content.ReadAsStringAsync(); - using var document = JsonDocument.Parse(json); - var root = document.RootElement; - Assert.True(root.TryGetProperty("jobs", out var jobs)); - Assert.True(jobs.GetArrayLength() >= 1); - var first = jobs[0]; - Assert.Equal("build", first.GetProperty("kind").GetString()); - Assert.Equal("tenant-alpha", first.GetProperty("tenantId").GetString()); - Assert.Equal("pending", first.GetProperty("status").GetString()); - } - - [Fact] - public async Task CompleteOverlayJob_UpdatesStatusAndMetrics() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-bravo"); - client.DefaultRequestHeaders.Add("X-Scopes", string.Join(' ', StellaOpsScopes.GraphWrite, StellaOpsScopes.GraphRead)); - - var createOverlay = await client.PostAsJsonAsync("/graphs/overlays", new - { - graphSnapshotId = "graph_snap_20251026", - overlayKind = "policy", - overlayKey = "policy@2025-10-01", - subjects = new[] { "artifact/service-api" } - }); - - createOverlay.EnsureSuccessStatusCode(); - var createdJson = await createOverlay.Content.ReadAsStringAsync(); - using var createdDoc = JsonDocument.Parse(createdJson); - var jobId = createdDoc.RootElement.GetProperty("id").GetString(); - Assert.False(string.IsNullOrEmpty(jobId)); - - var completeResponse = await client.PostAsJsonAsync("/graphs/hooks/completed", new - { - jobId = jobId, - jobType = "Overlay", - status = "Completed", - occurredAt = DateTimeOffset.UtcNow, - correlationId = "corr-123", - resultUri = "oras://cartographer/snapshots/graph_snap_20251026" - }); - - completeResponse.EnsureSuccessStatusCode(); - var completedJson = await completeResponse.Content.ReadAsStringAsync(); - using var completedDoc = JsonDocument.Parse(completedJson); - Assert.Equal("completed", completedDoc.RootElement.GetProperty("status").GetString()); - - var metricsResponse = await client.GetAsync("/graphs/overlays/lag"); - metricsResponse.EnsureSuccessStatusCode(); - var metricsJson = await metricsResponse.Content.ReadAsStringAsync(); - using var metricsDoc = JsonDocument.Parse(metricsJson); - var metricsRoot = metricsDoc.RootElement; - Assert.Equal("tenant-bravo", metricsRoot.GetProperty("tenantId").GetString()); - Assert.True(metricsRoot.GetProperty("completed").GetInt32() >= 1); - var recent = metricsRoot.GetProperty("recentCompleted"); - Assert.True(recent.GetArrayLength() >= 1); - var entry = recent[0]; - Assert.Equal(jobId, entry.GetProperty("jobId").GetString()); - Assert.Equal("corr-123", entry.GetProperty("correlationId").GetString()); - } -} +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Scheduler.WebService.Tests; + +public sealed class GraphJobEndpointTests : IClassFixture<SchedulerWebApplicationFactory> +{ + private readonly SchedulerWebApplicationFactory _factory; + + public GraphJobEndpointTests(SchedulerWebApplicationFactory factory) + { + _factory = factory; + } + + [Fact] + public async Task CreateGraphBuildJob_RequiresGraphWriteScope() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-alpha"); + + var response = await client.PostAsJsonAsync("/graphs/build", new + { + sbomId = "sbom-test", + sbomVersionId = "sbom-ver", + sbomDigest = "sha256:" + new string('a', 64) + }); + + Assert.Equal(System.Net.HttpStatusCode.Unauthorized, response.StatusCode); + } + + [Fact] + public async Task CreateGraphBuildJob_AndList() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-alpha"); + client.DefaultRequestHeaders.Add("X-Scopes", string.Join(' ', StellaOpsScopes.GraphWrite, StellaOpsScopes.GraphRead)); + + var createResponse = await client.PostAsJsonAsync("/graphs/build", new + { + sbomId = "sbom-alpha", + sbomVersionId = "sbom-alpha-v1", + sbomDigest = "sha256:" + new string('b', 64), + metadata = new { source = "test" } + }); + + createResponse.EnsureSuccessStatusCode(); + + var listResponse = await client.GetAsync("/graphs/jobs"); + listResponse.EnsureSuccessStatusCode(); + + var json = await listResponse.Content.ReadAsStringAsync(); + using var document = JsonDocument.Parse(json); + var root = document.RootElement; + Assert.True(root.TryGetProperty("jobs", out var jobs)); + Assert.True(jobs.GetArrayLength() >= 1); + var first = jobs[0]; + Assert.Equal("build", first.GetProperty("kind").GetString()); + Assert.Equal("tenant-alpha", first.GetProperty("tenantId").GetString()); + Assert.Equal("pending", first.GetProperty("status").GetString()); + } + + [Fact] + public async Task CompleteOverlayJob_UpdatesStatusAndMetrics() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-bravo"); + client.DefaultRequestHeaders.Add("X-Scopes", string.Join(' ', StellaOpsScopes.GraphWrite, StellaOpsScopes.GraphRead)); + + var createOverlay = await client.PostAsJsonAsync("/graphs/overlays", new + { + graphSnapshotId = "graph_snap_20251026", + overlayKind = "policy", + overlayKey = "policy@2025-10-01", + subjects = new[] { "artifact/service-api" } + }); + + createOverlay.EnsureSuccessStatusCode(); + var createdJson = await createOverlay.Content.ReadAsStringAsync(); + using var createdDoc = JsonDocument.Parse(createdJson); + var jobId = createdDoc.RootElement.GetProperty("id").GetString(); + Assert.False(string.IsNullOrEmpty(jobId)); + + var completeResponse = await client.PostAsJsonAsync("/graphs/hooks/completed", new + { + jobId = jobId, + jobType = "Overlay", + status = "Completed", + occurredAt = DateTimeOffset.UtcNow, + correlationId = "corr-123", + resultUri = "oras://cartographer/snapshots/graph_snap_20251026" + }); + + completeResponse.EnsureSuccessStatusCode(); + var completedJson = await completeResponse.Content.ReadAsStringAsync(); + using var completedDoc = JsonDocument.Parse(completedJson); + Assert.Equal("completed", completedDoc.RootElement.GetProperty("status").GetString()); + + var metricsResponse = await client.GetAsync("/graphs/overlays/lag"); + metricsResponse.EnsureSuccessStatusCode(); + var metricsJson = await metricsResponse.Content.ReadAsStringAsync(); + using var metricsDoc = JsonDocument.Parse(metricsJson); + var metricsRoot = metricsDoc.RootElement; + Assert.Equal("tenant-bravo", metricsRoot.GetProperty("tenantId").GetString()); + Assert.True(metricsRoot.GetProperty("completed").GetInt32() >= 1); + var recent = metricsRoot.GetProperty("recentCompleted"); + Assert.True(recent.GetArrayLength() >= 1); + var entry = recent[0]; + Assert.Equal(jobId, entry.GetProperty("jobId").GetString()); + Assert.Equal("corr-123", entry.GetProperty("correlationId").GetString()); + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/GraphJobEventPublisherTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/GraphJobEventPublisherTests.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService.Tests/GraphJobEventPublisherTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/GraphJobEventPublisherTests.cs index 60cc5a68..a42751d1 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/GraphJobEventPublisherTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/GraphJobEventPublisherTests.cs @@ -1,151 +1,151 @@ -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Abstractions; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.WebService.GraphJobs; -using StellaOps.Scheduler.WebService.GraphJobs.Events; -using StellaOps.Scheduler.WebService.Options; - -namespace StellaOps.Scheduler.WebService.Tests; - -public sealed class GraphJobEventPublisherTests -{ - [Fact] - public async Task PublishAsync_WritesEventJson_WhenEnabled() - { - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerEventsOptions - { - GraphJobs = { Enabled = true } - }); - var loggerProvider = new ListLoggerProvider(); - using var loggerFactory = LoggerFactory.Create(builder => builder.AddProvider(loggerProvider)); - var publisher = new GraphJobEventPublisher(new OptionsMonitorStub<SchedulerEventsOptions>(options), loggerFactory.CreateLogger<GraphJobEventPublisher>()); - - var buildJob = new GraphBuildJob( - id: "gbj_test", - tenantId: "tenant-alpha", - sbomId: "sbom", - sbomVersionId: "sbom_v1", - sbomDigest: "sha256:" + new string('a', 64), - status: GraphJobStatus.Completed, - trigger: GraphBuildJobTrigger.SbomVersion, - createdAt: DateTimeOffset.UtcNow, - graphSnapshotId: "graph_snap", - attempts: 1, - cartographerJobId: "carto", - correlationId: "corr", - startedAt: DateTimeOffset.UtcNow.AddSeconds(-30), - completedAt: DateTimeOffset.UtcNow, - error: null, - metadata: Array.Empty<KeyValuePair<string, string>>()); - - var notification = new GraphJobCompletionNotification( - buildJob.TenantId, - GraphJobQueryType.Build, - GraphJobStatus.Completed, - DateTimeOffset.UtcNow, - GraphJobResponse.From(buildJob), - "oras://result", - "corr", - null); - - await publisher.PublishAsync(notification, CancellationToken.None); - - var message = Assert.Single(loggerProvider.Messages); - Assert.Contains("\"kind\":\"scheduler.graph.job.completed\"", message); - Assert.Contains("\"tenant\":\"tenant-alpha\"", message); - Assert.Contains("\"resultUri\":\"oras://result\"", message); - } - - [Fact] - public async Task PublishAsync_Suppressed_WhenDisabled() - { - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerEventsOptions()); - var loggerProvider = new ListLoggerProvider(); - using var loggerFactory = LoggerFactory.Create(builder => builder.AddProvider(loggerProvider)); - var publisher = new GraphJobEventPublisher(new OptionsMonitorStub<SchedulerEventsOptions>(options), loggerFactory.CreateLogger<GraphJobEventPublisher>()); - - var overlayJob = new GraphOverlayJob( - id: "goj_test", - tenantId: "tenant-alpha", - graphSnapshotId: "graph_snap", - buildJobId: null, - overlayKind: GraphOverlayKind.Policy, - overlayKey: "policy@1", - subjects: Array.Empty<string>(), - status: GraphJobStatus.Completed, - trigger: GraphOverlayJobTrigger.Policy, - createdAt: DateTimeOffset.UtcNow, - attempts: 1, - correlationId: null, - startedAt: DateTimeOffset.UtcNow.AddSeconds(-10), - completedAt: DateTimeOffset.UtcNow, - error: null, - metadata: Array.Empty<KeyValuePair<string, string>>()); - - var notification = new GraphJobCompletionNotification( - overlayJob.TenantId, - GraphJobQueryType.Overlay, - GraphJobStatus.Completed, - DateTimeOffset.UtcNow, - GraphJobResponse.From(overlayJob), - null, - null, - null); - - await publisher.PublishAsync(notification, CancellationToken.None); - - Assert.DoesNotContain(loggerProvider.Messages, message => message.Contains(GraphJobEventKinds.GraphJobCompleted, StringComparison.Ordinal)); - } - -private sealed class OptionsMonitorStub<T> : IOptionsMonitor<T> where T : class - { - private readonly IOptions<T> _options; - - public OptionsMonitorStub(IOptions<T> options) - { - _options = options; - } - - public T CurrentValue => _options.Value; - - public T Get(string? name) => _options.Value; - - public IDisposable? OnChange(Action<T, string?> listener) => null; - } - - private sealed class ListLoggerProvider : ILoggerProvider - { - private readonly ListLogger _logger = new(); - - public IList<string> Messages => _logger.Messages; - - public ILogger CreateLogger(string categoryName) => _logger; - - public void Dispose() - { - } - - private sealed class ListLogger : ILogger - { - public IList<string> Messages { get; } = new List<string>(); - - public IDisposable BeginScope<TState>(TState state) where TState : notnull => NullDisposable.Instance; - - public bool IsEnabled(LogLevel logLevel) => true; - - public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter) - { - Messages.Add(formatter(state, exception)); - } - - private sealed class NullDisposable : IDisposable - { - public static readonly NullDisposable Instance = new(); - public void Dispose() - { - } - } - } - } -} +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Abstractions; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.WebService.GraphJobs; +using StellaOps.Scheduler.WebService.GraphJobs.Events; +using StellaOps.Scheduler.WebService.Options; + +namespace StellaOps.Scheduler.WebService.Tests; + +public sealed class GraphJobEventPublisherTests +{ + [Fact] + public async Task PublishAsync_WritesEventJson_WhenEnabled() + { + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerEventsOptions + { + GraphJobs = { Enabled = true } + }); + var loggerProvider = new ListLoggerProvider(); + using var loggerFactory = LoggerFactory.Create(builder => builder.AddProvider(loggerProvider)); + var publisher = new GraphJobEventPublisher(new OptionsMonitorStub<SchedulerEventsOptions>(options), loggerFactory.CreateLogger<GraphJobEventPublisher>()); + + var buildJob = new GraphBuildJob( + id: "gbj_test", + tenantId: "tenant-alpha", + sbomId: "sbom", + sbomVersionId: "sbom_v1", + sbomDigest: "sha256:" + new string('a', 64), + status: GraphJobStatus.Completed, + trigger: GraphBuildJobTrigger.SbomVersion, + createdAt: DateTimeOffset.UtcNow, + graphSnapshotId: "graph_snap", + attempts: 1, + cartographerJobId: "carto", + correlationId: "corr", + startedAt: DateTimeOffset.UtcNow.AddSeconds(-30), + completedAt: DateTimeOffset.UtcNow, + error: null, + metadata: Array.Empty<KeyValuePair<string, string>>()); + + var notification = new GraphJobCompletionNotification( + buildJob.TenantId, + GraphJobQueryType.Build, + GraphJobStatus.Completed, + DateTimeOffset.UtcNow, + GraphJobResponse.From(buildJob), + "oras://result", + "corr", + null); + + await publisher.PublishAsync(notification, CancellationToken.None); + + var message = Assert.Single(loggerProvider.Messages); + Assert.Contains("\"kind\":\"scheduler.graph.job.completed\"", message); + Assert.Contains("\"tenant\":\"tenant-alpha\"", message); + Assert.Contains("\"resultUri\":\"oras://result\"", message); + } + + [Fact] + public async Task PublishAsync_Suppressed_WhenDisabled() + { + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerEventsOptions()); + var loggerProvider = new ListLoggerProvider(); + using var loggerFactory = LoggerFactory.Create(builder => builder.AddProvider(loggerProvider)); + var publisher = new GraphJobEventPublisher(new OptionsMonitorStub<SchedulerEventsOptions>(options), loggerFactory.CreateLogger<GraphJobEventPublisher>()); + + var overlayJob = new GraphOverlayJob( + id: "goj_test", + tenantId: "tenant-alpha", + graphSnapshotId: "graph_snap", + buildJobId: null, + overlayKind: GraphOverlayKind.Policy, + overlayKey: "policy@1", + subjects: Array.Empty<string>(), + status: GraphJobStatus.Completed, + trigger: GraphOverlayJobTrigger.Policy, + createdAt: DateTimeOffset.UtcNow, + attempts: 1, + correlationId: null, + startedAt: DateTimeOffset.UtcNow.AddSeconds(-10), + completedAt: DateTimeOffset.UtcNow, + error: null, + metadata: Array.Empty<KeyValuePair<string, string>>()); + + var notification = new GraphJobCompletionNotification( + overlayJob.TenantId, + GraphJobQueryType.Overlay, + GraphJobStatus.Completed, + DateTimeOffset.UtcNow, + GraphJobResponse.From(overlayJob), + null, + null, + null); + + await publisher.PublishAsync(notification, CancellationToken.None); + + Assert.DoesNotContain(loggerProvider.Messages, message => message.Contains(GraphJobEventKinds.GraphJobCompleted, StringComparison.Ordinal)); + } + +private sealed class OptionsMonitorStub<T> : IOptionsMonitor<T> where T : class + { + private readonly IOptions<T> _options; + + public OptionsMonitorStub(IOptions<T> options) + { + _options = options; + } + + public T CurrentValue => _options.Value; + + public T Get(string? name) => _options.Value; + + public IDisposable? OnChange(Action<T, string?> listener) => null; + } + + private sealed class ListLoggerProvider : ILoggerProvider + { + private readonly ListLogger _logger = new(); + + public IList<string> Messages => _logger.Messages; + + public ILogger CreateLogger(string categoryName) => _logger; + + public void Dispose() + { + } + + private sealed class ListLogger : ILogger + { + public IList<string> Messages { get; } = new List<string>(); + + public IDisposable BeginScope<TState>(TState state) where TState : notnull => NullDisposable.Instance; + + public bool IsEnabled(LogLevel logLevel) => true; + + public void Log<TState>(LogLevel logLevel, EventId eventId, TState state, Exception? exception, Func<TState, Exception?, string> formatter) + { + Messages.Add(formatter(state, exception)); + } + + private sealed class NullDisposable : IDisposable + { + public static readonly NullDisposable Instance = new(); + public void Dispose() + { + } + } + } + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/PolicyRunEndpointTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/PolicyRunEndpointTests.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService.Tests/PolicyRunEndpointTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/PolicyRunEndpointTests.cs index d47c6736..2cbc40ce 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/PolicyRunEndpointTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/PolicyRunEndpointTests.cs @@ -1,71 +1,71 @@ -using System.Text.Json; - -namespace StellaOps.Scheduler.WebService.Tests; - -public sealed class PolicyRunEndpointTests : IClassFixture<WebApplicationFactory<Program>> -{ - private readonly WebApplicationFactory<Program> _factory; - - public PolicyRunEndpointTests(WebApplicationFactory<Program> factory) - { - _factory = factory; - } - - [Fact] - public async Task CreateListGetPolicyRun() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-policy"); - client.DefaultRequestHeaders.Add("X-Scopes", "policy:run"); - - var createResponse = await client.PostAsJsonAsync("/api/v1/scheduler/policy/runs", new - { - policyId = "P-7", - policyVersion = 4, - mode = "incremental", - priority = "normal", - metadata = new { source = "cli" }, - inputs = new - { - sbomSet = new[] { "sbom:S-42", "sbom:S-99" }, - advisoryCursor = "2025-10-26T13:59:00+00:00", - vexCursor = "2025-10-26T13:58:30+00:00", - environment = new { @sealed = false, exposure = "internet" }, - captureExplain = true - } - }); - - createResponse.EnsureSuccessStatusCode(); - Assert.Equal(System.Net.HttpStatusCode.Created, createResponse.StatusCode); - - var created = await createResponse.Content.ReadFromJsonAsync<JsonElement>(); - var runJson = created.GetProperty("run"); - var runId = runJson.GetProperty("runId").GetString(); - Assert.False(string.IsNullOrEmpty(runId)); - Assert.Equal("queued", runJson.GetProperty("status").GetString()); - Assert.Equal("P-7", runJson.GetProperty("policyId").GetString()); - Assert.True(runJson.GetProperty("inputs").GetProperty("captureExplain").GetBoolean()); - - var listResponse = await client.GetAsync("/api/v1/scheduler/policy/runs?policyId=P-7"); - listResponse.EnsureSuccessStatusCode(); - var list = await listResponse.Content.ReadFromJsonAsync<JsonElement>(); - var runsArray = list.GetProperty("runs"); - Assert.True(runsArray.GetArrayLength() >= 1); - - var getResponse = await client.GetAsync($"/api/v1/scheduler/policy/runs/{runId}"); - getResponse.EnsureSuccessStatusCode(); - var retrieved = await getResponse.Content.ReadFromJsonAsync<JsonElement>(); - Assert.Equal(runId, retrieved.GetProperty("run").GetProperty("runId").GetString()); - } - - [Fact] - public async Task MissingScopeReturnsForbidden() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-policy"); - - var response = await client.GetAsync("/api/v1/scheduler/policy/runs"); - - Assert.Equal(System.Net.HttpStatusCode.Unauthorized, response.StatusCode); - } -} +using System.Text.Json; + +namespace StellaOps.Scheduler.WebService.Tests; + +public sealed class PolicyRunEndpointTests : IClassFixture<WebApplicationFactory<Program>> +{ + private readonly WebApplicationFactory<Program> _factory; + + public PolicyRunEndpointTests(WebApplicationFactory<Program> factory) + { + _factory = factory; + } + + [Fact] + public async Task CreateListGetPolicyRun() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-policy"); + client.DefaultRequestHeaders.Add("X-Scopes", "policy:run"); + + var createResponse = await client.PostAsJsonAsync("/api/v1/scheduler/policy/runs", new + { + policyId = "P-7", + policyVersion = 4, + mode = "incremental", + priority = "normal", + metadata = new { source = "cli" }, + inputs = new + { + sbomSet = new[] { "sbom:S-42", "sbom:S-99" }, + advisoryCursor = "2025-10-26T13:59:00+00:00", + vexCursor = "2025-10-26T13:58:30+00:00", + environment = new { @sealed = false, exposure = "internet" }, + captureExplain = true + } + }); + + createResponse.EnsureSuccessStatusCode(); + Assert.Equal(System.Net.HttpStatusCode.Created, createResponse.StatusCode); + + var created = await createResponse.Content.ReadFromJsonAsync<JsonElement>(); + var runJson = created.GetProperty("run"); + var runId = runJson.GetProperty("runId").GetString(); + Assert.False(string.IsNullOrEmpty(runId)); + Assert.Equal("queued", runJson.GetProperty("status").GetString()); + Assert.Equal("P-7", runJson.GetProperty("policyId").GetString()); + Assert.True(runJson.GetProperty("inputs").GetProperty("captureExplain").GetBoolean()); + + var listResponse = await client.GetAsync("/api/v1/scheduler/policy/runs?policyId=P-7"); + listResponse.EnsureSuccessStatusCode(); + var list = await listResponse.Content.ReadFromJsonAsync<JsonElement>(); + var runsArray = list.GetProperty("runs"); + Assert.True(runsArray.GetArrayLength() >= 1); + + var getResponse = await client.GetAsync($"/api/v1/scheduler/policy/runs/{runId}"); + getResponse.EnsureSuccessStatusCode(); + var retrieved = await getResponse.Content.ReadFromJsonAsync<JsonElement>(); + Assert.Equal(runId, retrieved.GetProperty("run").GetProperty("runId").GetString()); + } + + [Fact] + public async Task MissingScopeReturnsForbidden() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-policy"); + + var response = await client.GetAsync("/api/v1/scheduler/policy/runs"); + + Assert.Equal(System.Net.HttpStatusCode.Unauthorized, response.StatusCode); + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/RunEndpointTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/RunEndpointTests.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService.Tests/RunEndpointTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/RunEndpointTests.cs index 6059019a..702f029f 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/RunEndpointTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/RunEndpointTests.cs @@ -1,104 +1,104 @@ -using System.Linq; -using System.Text.Json; - -namespace StellaOps.Scheduler.WebService.Tests; - -public sealed class RunEndpointTests : IClassFixture<WebApplicationFactory<Program>> -{ - private readonly WebApplicationFactory<Program> _factory; - - public RunEndpointTests(WebApplicationFactory<Program> factory) - { - _factory = factory; - } - - [Fact] - public async Task CreateListCancelRun() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-runs"); - client.DefaultRequestHeaders.Add("X-Scopes", "scheduler.schedules.write scheduler.schedules.read scheduler.runs.write scheduler.runs.read scheduler.runs.preview"); - - var scheduleResponse = await client.PostAsJsonAsync("/api/v1/scheduler/schedules", new - { - name = "RunSchedule", - cronExpression = "0 3 * * *", - timezone = "UTC", - mode = "analysis-only", - selection = new - { - scope = "all-images" - } - }); - - scheduleResponse.EnsureSuccessStatusCode(); - var scheduleJson = await scheduleResponse.Content.ReadFromJsonAsync<JsonElement>(); - var scheduleId = scheduleJson.GetProperty("schedule").GetProperty("id").GetString(); - Assert.False(string.IsNullOrEmpty(scheduleId)); - - var createRun = await client.PostAsJsonAsync("/api/v1/scheduler/runs", new - { - scheduleId, - trigger = "manual" - }); - - createRun.EnsureSuccessStatusCode(); - Assert.Equal(System.Net.HttpStatusCode.Created, createRun.StatusCode); - var runJson = await createRun.Content.ReadFromJsonAsync<JsonElement>(); - var runId = runJson.GetProperty("run").GetProperty("id").GetString(); - Assert.False(string.IsNullOrEmpty(runId)); - Assert.Equal("planning", runJson.GetProperty("run").GetProperty("state").GetString()); - - var listResponse = await client.GetAsync("/api/v1/scheduler/runs"); - listResponse.EnsureSuccessStatusCode(); - var listJson = await listResponse.Content.ReadFromJsonAsync<JsonElement>(); - Assert.True(listJson.GetProperty("runs").EnumerateArray().Any()); - - var cancelResponse = await client.PostAsync($"/api/v1/scheduler/runs/{runId}/cancel", null); - cancelResponse.EnsureSuccessStatusCode(); - var cancelled = await cancelResponse.Content.ReadFromJsonAsync<JsonElement>(); - Assert.Equal("cancelled", cancelled.GetProperty("run").GetProperty("state").GetString()); - - var getResponse = await client.GetAsync($"/api/v1/scheduler/runs/{runId}"); - getResponse.EnsureSuccessStatusCode(); - var runDetail = await getResponse.Content.ReadFromJsonAsync<JsonElement>(); - Assert.Equal("cancelled", runDetail.GetProperty("run").GetProperty("state").GetString()); - } - - [Fact] - public async Task PreviewImpactForSchedule() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-preview"); - client.DefaultRequestHeaders.Add("X-Scopes", "scheduler.schedules.write scheduler.schedules.read scheduler.runs.write scheduler.runs.read scheduler.runs.preview"); - - var scheduleResponse = await client.PostAsJsonAsync("/api/v1/scheduler/schedules", new - { - name = "PreviewSchedule", - cronExpression = "0 5 * * *", - timezone = "UTC", - mode = "analysis-only", - selection = new - { - scope = "all-images" - } - }); - - scheduleResponse.EnsureSuccessStatusCode(); - var scheduleJson = await scheduleResponse.Content.ReadFromJsonAsync<JsonElement>(); - var scheduleId = scheduleJson.GetProperty("schedule").GetProperty("id").GetString(); - Assert.False(string.IsNullOrEmpty(scheduleId)); - - var previewResponse = await client.PostAsJsonAsync("/api/v1/scheduler/runs/preview", new - { - scheduleId, - usageOnly = true, - sampleSize = 3 - }); - - previewResponse.EnsureSuccessStatusCode(); - var preview = await previewResponse.Content.ReadFromJsonAsync<JsonElement>(); - Assert.True(preview.GetProperty("total").GetInt32() >= 0); - Assert.True(preview.GetProperty("sample").GetArrayLength() <= 3); - } -} +using System.Linq; +using System.Text.Json; + +namespace StellaOps.Scheduler.WebService.Tests; + +public sealed class RunEndpointTests : IClassFixture<WebApplicationFactory<Program>> +{ + private readonly WebApplicationFactory<Program> _factory; + + public RunEndpointTests(WebApplicationFactory<Program> factory) + { + _factory = factory; + } + + [Fact] + public async Task CreateListCancelRun() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-runs"); + client.DefaultRequestHeaders.Add("X-Scopes", "scheduler.schedules.write scheduler.schedules.read scheduler.runs.write scheduler.runs.read scheduler.runs.preview"); + + var scheduleResponse = await client.PostAsJsonAsync("/api/v1/scheduler/schedules", new + { + name = "RunSchedule", + cronExpression = "0 3 * * *", + timezone = "UTC", + mode = "analysis-only", + selection = new + { + scope = "all-images" + } + }); + + scheduleResponse.EnsureSuccessStatusCode(); + var scheduleJson = await scheduleResponse.Content.ReadFromJsonAsync<JsonElement>(); + var scheduleId = scheduleJson.GetProperty("schedule").GetProperty("id").GetString(); + Assert.False(string.IsNullOrEmpty(scheduleId)); + + var createRun = await client.PostAsJsonAsync("/api/v1/scheduler/runs", new + { + scheduleId, + trigger = "manual" + }); + + createRun.EnsureSuccessStatusCode(); + Assert.Equal(System.Net.HttpStatusCode.Created, createRun.StatusCode); + var runJson = await createRun.Content.ReadFromJsonAsync<JsonElement>(); + var runId = runJson.GetProperty("run").GetProperty("id").GetString(); + Assert.False(string.IsNullOrEmpty(runId)); + Assert.Equal("planning", runJson.GetProperty("run").GetProperty("state").GetString()); + + var listResponse = await client.GetAsync("/api/v1/scheduler/runs"); + listResponse.EnsureSuccessStatusCode(); + var listJson = await listResponse.Content.ReadFromJsonAsync<JsonElement>(); + Assert.True(listJson.GetProperty("runs").EnumerateArray().Any()); + + var cancelResponse = await client.PostAsync($"/api/v1/scheduler/runs/{runId}/cancel", null); + cancelResponse.EnsureSuccessStatusCode(); + var cancelled = await cancelResponse.Content.ReadFromJsonAsync<JsonElement>(); + Assert.Equal("cancelled", cancelled.GetProperty("run").GetProperty("state").GetString()); + + var getResponse = await client.GetAsync($"/api/v1/scheduler/runs/{runId}"); + getResponse.EnsureSuccessStatusCode(); + var runDetail = await getResponse.Content.ReadFromJsonAsync<JsonElement>(); + Assert.Equal("cancelled", runDetail.GetProperty("run").GetProperty("state").GetString()); + } + + [Fact] + public async Task PreviewImpactForSchedule() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-preview"); + client.DefaultRequestHeaders.Add("X-Scopes", "scheduler.schedules.write scheduler.schedules.read scheduler.runs.write scheduler.runs.read scheduler.runs.preview"); + + var scheduleResponse = await client.PostAsJsonAsync("/api/v1/scheduler/schedules", new + { + name = "PreviewSchedule", + cronExpression = "0 5 * * *", + timezone = "UTC", + mode = "analysis-only", + selection = new + { + scope = "all-images" + } + }); + + scheduleResponse.EnsureSuccessStatusCode(); + var scheduleJson = await scheduleResponse.Content.ReadFromJsonAsync<JsonElement>(); + var scheduleId = scheduleJson.GetProperty("schedule").GetProperty("id").GetString(); + Assert.False(string.IsNullOrEmpty(scheduleId)); + + var previewResponse = await client.PostAsJsonAsync("/api/v1/scheduler/runs/preview", new + { + scheduleId, + usageOnly = true, + sampleSize = 3 + }); + + previewResponse.EnsureSuccessStatusCode(); + var preview = await previewResponse.Content.ReadFromJsonAsync<JsonElement>(); + Assert.True(preview.GetProperty("total").GetInt32() >= 0); + Assert.True(preview.GetProperty("sample").GetArrayLength() <= 3); + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/ScheduleEndpointTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/ScheduleEndpointTests.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService.Tests/ScheduleEndpointTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/ScheduleEndpointTests.cs index ee0b67cc..13392654 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/ScheduleEndpointTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/ScheduleEndpointTests.cs @@ -1,88 +1,88 @@ -namespace StellaOps.Scheduler.WebService.Tests; - -public sealed class ScheduleEndpointTests : IClassFixture<WebApplicationFactory<Program>> -{ - private readonly WebApplicationFactory<Program> _factory; - - public ScheduleEndpointTests(WebApplicationFactory<Program> factory) - { - _factory = factory; - } - - [Fact] - public async Task CreateListAndRetrieveSchedule() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-schedules"); - client.DefaultRequestHeaders.Add("X-Scopes", "scheduler.schedules.write scheduler.schedules.read"); - - var createResponse = await client.PostAsJsonAsync("/api/v1/scheduler/schedules", new - { - name = "Nightly", - cronExpression = "0 2 * * *", - timezone = "UTC", - mode = "analysis-only", - selection = new - { - scope = "all-images" - }, - notify = new - { - onNewFindings = true, - minSeverity = "medium", - includeKev = true - } - }); - - createResponse.EnsureSuccessStatusCode(); - var created = await createResponse.Content.ReadFromJsonAsync<JsonElement>(); - var scheduleId = created.GetProperty("schedule").GetProperty("id").GetString(); - Assert.False(string.IsNullOrEmpty(scheduleId)); - - var listResponse = await client.GetAsync("/api/v1/scheduler/schedules"); - listResponse.EnsureSuccessStatusCode(); - var listJson = await listResponse.Content.ReadFromJsonAsync<JsonElement>(); - var schedules = listJson.GetProperty("schedules"); - Assert.True(schedules.GetArrayLength() >= 1); - - var getResponse = await client.GetAsync($"/api/v1/scheduler/schedules/{scheduleId}"); - getResponse.EnsureSuccessStatusCode(); - var scheduleJson = await getResponse.Content.ReadFromJsonAsync<JsonElement>(); - Assert.Equal("Nightly", scheduleJson.GetProperty("schedule").GetProperty("name").GetString()); - } - - [Fact] - public async Task PauseAndResumeSchedule() - { - using var client = _factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-controls"); - client.DefaultRequestHeaders.Add("X-Scopes", "scheduler.schedules.write scheduler.schedules.read"); - - var create = await client.PostAsJsonAsync("/api/v1/scheduler/schedules", new - { - name = "PauseResume", - cronExpression = "*/5 * * * *", - timezone = "UTC", - mode = "analysis-only", - selection = new - { - scope = "all-images" - } - }); - - create.EnsureSuccessStatusCode(); - var created = await create.Content.ReadFromJsonAsync<JsonElement>(); - var scheduleId = created.GetProperty("schedule").GetProperty("id").GetString(); - Assert.False(string.IsNullOrEmpty(scheduleId)); - - var pauseResponse = await client.PostAsync($"/api/v1/scheduler/schedules/{scheduleId}/pause", null); - pauseResponse.EnsureSuccessStatusCode(); - var paused = await pauseResponse.Content.ReadFromJsonAsync<JsonElement>(); - Assert.False(paused.GetProperty("schedule").GetProperty("enabled").GetBoolean()); - - var resumeResponse = await client.PostAsync($"/api/v1/scheduler/schedules/{scheduleId}/resume", null); - resumeResponse.EnsureSuccessStatusCode(); - var resumed = await resumeResponse.Content.ReadFromJsonAsync<JsonElement>(); - Assert.True(resumed.GetProperty("schedule").GetProperty("enabled").GetBoolean()); - } -} +namespace StellaOps.Scheduler.WebService.Tests; + +public sealed class ScheduleEndpointTests : IClassFixture<WebApplicationFactory<Program>> +{ + private readonly WebApplicationFactory<Program> _factory; + + public ScheduleEndpointTests(WebApplicationFactory<Program> factory) + { + _factory = factory; + } + + [Fact] + public async Task CreateListAndRetrieveSchedule() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-schedules"); + client.DefaultRequestHeaders.Add("X-Scopes", "scheduler.schedules.write scheduler.schedules.read"); + + var createResponse = await client.PostAsJsonAsync("/api/v1/scheduler/schedules", new + { + name = "Nightly", + cronExpression = "0 2 * * *", + timezone = "UTC", + mode = "analysis-only", + selection = new + { + scope = "all-images" + }, + notify = new + { + onNewFindings = true, + minSeverity = "medium", + includeKev = true + } + }); + + createResponse.EnsureSuccessStatusCode(); + var created = await createResponse.Content.ReadFromJsonAsync<JsonElement>(); + var scheduleId = created.GetProperty("schedule").GetProperty("id").GetString(); + Assert.False(string.IsNullOrEmpty(scheduleId)); + + var listResponse = await client.GetAsync("/api/v1/scheduler/schedules"); + listResponse.EnsureSuccessStatusCode(); + var listJson = await listResponse.Content.ReadFromJsonAsync<JsonElement>(); + var schedules = listJson.GetProperty("schedules"); + Assert.True(schedules.GetArrayLength() >= 1); + + var getResponse = await client.GetAsync($"/api/v1/scheduler/schedules/{scheduleId}"); + getResponse.EnsureSuccessStatusCode(); + var scheduleJson = await getResponse.Content.ReadFromJsonAsync<JsonElement>(); + Assert.Equal("Nightly", scheduleJson.GetProperty("schedule").GetProperty("name").GetString()); + } + + [Fact] + public async Task PauseAndResumeSchedule() + { + using var client = _factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Tenant-Id", "tenant-controls"); + client.DefaultRequestHeaders.Add("X-Scopes", "scheduler.schedules.write scheduler.schedules.read"); + + var create = await client.PostAsJsonAsync("/api/v1/scheduler/schedules", new + { + name = "PauseResume", + cronExpression = "*/5 * * * *", + timezone = "UTC", + mode = "analysis-only", + selection = new + { + scope = "all-images" + } + }); + + create.EnsureSuccessStatusCode(); + var created = await create.Content.ReadFromJsonAsync<JsonElement>(); + var scheduleId = created.GetProperty("schedule").GetProperty("id").GetString(); + Assert.False(string.IsNullOrEmpty(scheduleId)); + + var pauseResponse = await client.PostAsync($"/api/v1/scheduler/schedules/{scheduleId}/pause", null); + pauseResponse.EnsureSuccessStatusCode(); + var paused = await pauseResponse.Content.ReadFromJsonAsync<JsonElement>(); + Assert.False(paused.GetProperty("schedule").GetProperty("enabled").GetBoolean()); + + var resumeResponse = await client.PostAsync($"/api/v1/scheduler/schedules/{scheduleId}/resume", null); + resumeResponse.EnsureSuccessStatusCode(); + var resumed = await resumeResponse.Content.ReadFromJsonAsync<JsonElement>(); + Assert.True(resumed.GetProperty("schedule").GetProperty("enabled").GetBoolean()); + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/SchedulerPluginHostFactoryTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/SchedulerPluginHostFactoryTests.cs similarity index 97% rename from src/StellaOps.Scheduler.WebService.Tests/SchedulerPluginHostFactoryTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/SchedulerPluginHostFactoryTests.cs index 424ff30a..b6c87dbb 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/SchedulerPluginHostFactoryTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/SchedulerPluginHostFactoryTests.cs @@ -1,73 +1,73 @@ -using System; -using System.IO; -using StellaOps.Plugin.Hosting; -using StellaOps.Scheduler.WebService.Hosting; -using StellaOps.Scheduler.WebService.Options; -using Xunit; - -namespace StellaOps.Scheduler.WebService.Tests; - -public class SchedulerPluginHostFactoryTests -{ - [Fact] - public void Build_usesDefaults_whenOptionsEmpty() - { - var options = new SchedulerOptions.PluginOptions(); - var contentRoot = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N")); - Directory.CreateDirectory(contentRoot); - - try - { - var hostOptions = SchedulerPluginHostFactory.Build(options, contentRoot); - - var expectedBase = Path.GetFullPath(Path.Combine(contentRoot, "..")); - var expectedPlugins = Path.Combine(expectedBase, "plugins", "scheduler"); - - Assert.Equal(expectedBase, hostOptions.BaseDirectory); - Assert.Equal(expectedPlugins, hostOptions.PluginsDirectory); - Assert.Single(hostOptions.SearchPatterns, "StellaOps.Scheduler.Plugin.*.dll"); - Assert.True(hostOptions.EnsureDirectoryExists); - Assert.False(hostOptions.RecursiveSearch); - Assert.Empty(hostOptions.PluginOrder); - } - finally - { - Directory.Delete(contentRoot, recursive: true); - } - } - - [Fact] - public void Build_respectsConfiguredValues() - { - var options = new SchedulerOptions.PluginOptions - { - BaseDirectory = Path.Combine(Path.GetTempPath(), "scheduler-options", Guid.NewGuid().ToString("N")), - Directory = Path.Combine("custom", "plugins"), - RecursiveSearch = true, - EnsureDirectoryExists = false - }; - - options.SearchPatterns.Add("Custom.Plugin.*.dll"); - options.OrderedPlugins.Add("StellaOps.Scheduler.Plugin.Alpha"); - - Directory.CreateDirectory(options.BaseDirectory!); - - try - { - var hostOptions = SchedulerPluginHostFactory.Build(options, contentRootPath: Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N"))); - - var expectedPlugins = Path.GetFullPath(Path.Combine(options.BaseDirectory!, options.Directory!)); - - Assert.Equal(options.BaseDirectory, hostOptions.BaseDirectory); - Assert.Equal(expectedPlugins, hostOptions.PluginsDirectory); - Assert.Single(hostOptions.SearchPatterns, "Custom.Plugin.*.dll"); - Assert.Single(hostOptions.PluginOrder, "StellaOps.Scheduler.Plugin.Alpha"); - Assert.True(hostOptions.RecursiveSearch); - Assert.False(hostOptions.EnsureDirectoryExists); - } - finally - { - Directory.Delete(options.BaseDirectory!, recursive: true); - } - } -} +using System; +using System.IO; +using StellaOps.Plugin.Hosting; +using StellaOps.Scheduler.WebService.Hosting; +using StellaOps.Scheduler.WebService.Options; +using Xunit; + +namespace StellaOps.Scheduler.WebService.Tests; + +public class SchedulerPluginHostFactoryTests +{ + [Fact] + public void Build_usesDefaults_whenOptionsEmpty() + { + var options = new SchedulerOptions.PluginOptions(); + var contentRoot = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N")); + Directory.CreateDirectory(contentRoot); + + try + { + var hostOptions = SchedulerPluginHostFactory.Build(options, contentRoot); + + var expectedBase = Path.GetFullPath(Path.Combine(contentRoot, "..")); + var expectedPlugins = Path.Combine(expectedBase, "plugins", "scheduler"); + + Assert.Equal(expectedBase, hostOptions.BaseDirectory); + Assert.Equal(expectedPlugins, hostOptions.PluginsDirectory); + Assert.Single(hostOptions.SearchPatterns, "StellaOps.Scheduler.Plugin.*.dll"); + Assert.True(hostOptions.EnsureDirectoryExists); + Assert.False(hostOptions.RecursiveSearch); + Assert.Empty(hostOptions.PluginOrder); + } + finally + { + Directory.Delete(contentRoot, recursive: true); + } + } + + [Fact] + public void Build_respectsConfiguredValues() + { + var options = new SchedulerOptions.PluginOptions + { + BaseDirectory = Path.Combine(Path.GetTempPath(), "scheduler-options", Guid.NewGuid().ToString("N")), + Directory = Path.Combine("custom", "plugins"), + RecursiveSearch = true, + EnsureDirectoryExists = false + }; + + options.SearchPatterns.Add("Custom.Plugin.*.dll"); + options.OrderedPlugins.Add("StellaOps.Scheduler.Plugin.Alpha"); + + Directory.CreateDirectory(options.BaseDirectory!); + + try + { + var hostOptions = SchedulerPluginHostFactory.Build(options, contentRootPath: Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString("N"))); + + var expectedPlugins = Path.GetFullPath(Path.Combine(options.BaseDirectory!, options.Directory!)); + + Assert.Equal(options.BaseDirectory, hostOptions.BaseDirectory); + Assert.Equal(expectedPlugins, hostOptions.PluginsDirectory); + Assert.Single(hostOptions.SearchPatterns, "Custom.Plugin.*.dll"); + Assert.Single(hostOptions.PluginOrder, "StellaOps.Scheduler.Plugin.Alpha"); + Assert.True(hostOptions.RecursiveSearch); + Assert.False(hostOptions.EnsureDirectoryExists); + } + finally + { + Directory.Delete(options.BaseDirectory!, recursive: true); + } + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/SchedulerWebApplicationFactory.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/SchedulerWebApplicationFactory.cs similarity index 98% rename from src/StellaOps.Scheduler.WebService.Tests/SchedulerWebApplicationFactory.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/SchedulerWebApplicationFactory.cs index ccaf06b2..aaed847f 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/SchedulerWebApplicationFactory.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/SchedulerWebApplicationFactory.cs @@ -1,46 +1,46 @@ -using System.Collections.Generic; -using Microsoft.AspNetCore.Hosting; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using StellaOps.Scheduler.WebService.Options; - -namespace StellaOps.Scheduler.WebService.Tests; - -public sealed class SchedulerWebApplicationFactory : WebApplicationFactory<Program> -{ - protected override void ConfigureWebHost(IWebHostBuilder builder) - { - builder.ConfigureAppConfiguration((_, configuration) => - { - configuration.AddInMemoryCollection(new[] - { - new KeyValuePair<string, string?>("Scheduler:Authority:Enabled", "false"), - new KeyValuePair<string, string?>("Scheduler:Cartographer:Webhook:Enabled", "false"), - new KeyValuePair<string, string?>("Scheduler:Events:GraphJobs:Enabled", "false"), - new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Feedser:Enabled", "true"), - new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Feedser:HmacSecret", "feedser-secret"), - new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Feedser:RateLimitRequests", "20"), - new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Feedser:RateLimitWindowSeconds", "60"), - new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Vexer:Enabled", "true"), - new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Vexer:HmacSecret", "vexer-secret"), - new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Vexer:RateLimitRequests", "20"), - new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Vexer:RateLimitWindowSeconds", "60") - }); - }); - - builder.ConfigureServices(services => - { - services.Configure<SchedulerEventsOptions>(options => - { - options.Webhooks ??= new SchedulerInboundWebhooksOptions(); - options.Webhooks.Feedser ??= SchedulerWebhookOptions.CreateDefault("feedser"); - options.Webhooks.Vexer ??= SchedulerWebhookOptions.CreateDefault("vexer"); - options.Webhooks.Feedser.HmacSecret = "feedser-secret"; - options.Webhooks.Feedser.Enabled = true; - options.Webhooks.Vexer.HmacSecret = "vexer-secret"; - options.Webhooks.Vexer.Enabled = true; - }); - }); - } -} +using System.Collections.Generic; +using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using StellaOps.Scheduler.WebService.Options; + +namespace StellaOps.Scheduler.WebService.Tests; + +public sealed class SchedulerWebApplicationFactory : WebApplicationFactory<Program> +{ + protected override void ConfigureWebHost(IWebHostBuilder builder) + { + builder.ConfigureAppConfiguration((_, configuration) => + { + configuration.AddInMemoryCollection(new[] + { + new KeyValuePair<string, string?>("Scheduler:Authority:Enabled", "false"), + new KeyValuePair<string, string?>("Scheduler:Cartographer:Webhook:Enabled", "false"), + new KeyValuePair<string, string?>("Scheduler:Events:GraphJobs:Enabled", "false"), + new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Feedser:Enabled", "true"), + new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Feedser:HmacSecret", "feedser-secret"), + new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Feedser:RateLimitRequests", "20"), + new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Feedser:RateLimitWindowSeconds", "60"), + new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Vexer:Enabled", "true"), + new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Vexer:HmacSecret", "vexer-secret"), + new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Vexer:RateLimitRequests", "20"), + new KeyValuePair<string, string?>("Scheduler:Events:Webhooks:Vexer:RateLimitWindowSeconds", "60") + }); + }); + + builder.ConfigureServices(services => + { + services.Configure<SchedulerEventsOptions>(options => + { + options.Webhooks ??= new SchedulerInboundWebhooksOptions(); + options.Webhooks.Feedser ??= SchedulerWebhookOptions.CreateDefault("feedser"); + options.Webhooks.Vexer ??= SchedulerWebhookOptions.CreateDefault("vexer"); + options.Webhooks.Feedser.HmacSecret = "feedser-secret"; + options.Webhooks.Feedser.Enabled = true; + options.Webhooks.Vexer.HmacSecret = "vexer-secret"; + options.Webhooks.Vexer.Enabled = true; + }); + }); + } +} diff --git a/src/StellaOps.Scheduler.WebService.Tests/StellaOps.Scheduler.WebService.Tests.csproj b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/StellaOps.Scheduler.WebService.Tests.csproj similarity index 81% rename from src/StellaOps.Scheduler.WebService.Tests/StellaOps.Scheduler.WebService.Tests.csproj rename to src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/StellaOps.Scheduler.WebService.Tests.csproj index f9818719..18f471ae 100644 --- a/src/StellaOps.Scheduler.WebService.Tests/StellaOps.Scheduler.WebService.Tests.csproj +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.WebService.Tests/StellaOps.Scheduler.WebService.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -14,6 +15,6 @@ <PackageReference Include="coverlet.collector" Version="6.0.4" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj" /> + <ProjectReference Include="../../StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Scheduler.Worker.Tests/GlobalUsings.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/GlobalUsings.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker.Tests/GlobalUsings.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/GlobalUsings.cs index 71e5486e..c1723fbc 100644 --- a/src/StellaOps.Scheduler.Worker.Tests/GlobalUsings.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/GlobalUsings.cs @@ -1,5 +1,5 @@ -global using System.Collections.Immutable; -global using StellaOps.Scheduler.ImpactIndex; -global using StellaOps.Scheduler.Models; -global using StellaOps.Scheduler.Worker; -global using Xunit; +global using System.Collections.Immutable; +global using StellaOps.Scheduler.ImpactIndex; +global using StellaOps.Scheduler.Models; +global using StellaOps.Scheduler.Worker; +global using Xunit; diff --git a/src/StellaOps.Scheduler.Worker.Tests/GraphBuildExecutionServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/GraphBuildExecutionServiceTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker.Tests/GraphBuildExecutionServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/GraphBuildExecutionServiceTests.cs index 116aaeec..590dff79 100644 --- a/src/StellaOps.Scheduler.Worker.Tests/GraphBuildExecutionServiceTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/GraphBuildExecutionServiceTests.cs @@ -1,243 +1,243 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Worker.Graph; -using StellaOps.Scheduler.Worker.Graph.Cartographer; -using StellaOps.Scheduler.Worker.Graph.Scheduler; -using StellaOps.Scheduler.Worker.Options; -using Xunit; - -namespace StellaOps.Scheduler.Worker.Tests; - -public sealed class GraphBuildExecutionServiceTests -{ - [Fact] - public async Task ExecuteAsync_Skips_WhenGraphDisabled() - { - var repository = new RecordingGraphJobRepository(); - var cartographer = new StubCartographerBuildClient(); - var completion = new RecordingCompletionClient(); - using var metrics = new SchedulerWorkerMetrics(); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions - { - Graph = new SchedulerWorkerOptions.GraphOptions - { - Enabled = false - } - }); - var service = new GraphBuildExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphBuildExecutionService>.Instance); - - var job = CreateGraphJob(); - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(GraphBuildExecutionResultType.Skipped, result.Type); - Assert.Equal("graph_processing_disabled", result.Reason); - Assert.Equal(0, repository.ReplaceCalls); - Assert.Equal(0, cartographer.CallCount); - Assert.Empty(completion.Notifications); - } - - [Fact] - public async Task ExecuteAsync_CompletesJob_OnSuccess() - { - var repository = new RecordingGraphJobRepository(); - var cartographer = new StubCartographerBuildClient - { - Result = new CartographerBuildResult( - GraphJobStatus.Completed, - CartographerJobId: "carto-1", - GraphSnapshotId: "graph_snap", - ResultUri: "oras://graph/result", - Error: null) - }; - var completion = new RecordingCompletionClient(); - using var metrics = new SchedulerWorkerMetrics(); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions - { - Graph = new SchedulerWorkerOptions.GraphOptions - { - Enabled = true, - MaxAttempts = 2, - RetryBackoff = TimeSpan.FromMilliseconds(10) - } - }); - var service = new GraphBuildExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphBuildExecutionService>.Instance); - - var job = CreateGraphJob(); - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(GraphBuildExecutionResultType.Completed, result.Type); - Assert.Single(completion.Notifications); - var notification = completion.Notifications[0]; - Assert.Equal(job.Id, notification.JobId); - Assert.Equal("Build", notification.JobType); - Assert.Equal(GraphJobStatus.Completed, notification.Status); - Assert.Equal("oras://graph/result", notification.ResultUri); - Assert.Equal("graph_snap", notification.GraphSnapshotId); - Assert.Null(notification.Error); - Assert.Equal(1, cartographer.CallCount); - Assert.True(repository.ReplaceCalls >= 1); - } - - [Fact] - public async Task ExecuteAsync_Fails_AfterMaxAttempts() - { - var repository = new RecordingGraphJobRepository(); - var cartographer = new StubCartographerBuildClient - { - ExceptionToThrow = new InvalidOperationException("network") - }; - var completion = new RecordingCompletionClient(); - using var metrics = new SchedulerWorkerMetrics(); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions - { - Graph = new SchedulerWorkerOptions.GraphOptions - { - Enabled = true, - MaxAttempts = 2, - RetryBackoff = TimeSpan.FromMilliseconds(1) - } - }); - var service = new GraphBuildExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphBuildExecutionService>.Instance); - - var job = CreateGraphJob(); - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(GraphBuildExecutionResultType.Failed, result.Type); - Assert.Equal(2, cartographer.CallCount); - Assert.Single(completion.Notifications); - Assert.Equal(GraphJobStatus.Failed, completion.Notifications[0].Status); - Assert.Equal("network", completion.Notifications[0].Error); - } - - [Fact] - public async Task ExecuteAsync_Skips_WhenConcurrencyConflict() - { - var repository = new RecordingGraphJobRepository - { - ShouldReplaceSucceed = false - }; - var cartographer = new StubCartographerBuildClient(); - var completion = new RecordingCompletionClient(); - using var metrics = new SchedulerWorkerMetrics(); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions - { - Graph = new SchedulerWorkerOptions.GraphOptions - { - Enabled = true - } - }); - var service = new GraphBuildExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphBuildExecutionService>.Instance); - - var job = CreateGraphJob(); - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(GraphBuildExecutionResultType.Skipped, result.Type); - Assert.Equal("concurrency_conflict", result.Reason); - Assert.Equal(0, cartographer.CallCount); - Assert.Empty(completion.Notifications); - } - - private static GraphBuildJob CreateGraphJob() => new( - id: "gbj_1", - tenantId: "tenant-alpha", - sbomId: "sbom-1", - sbomVersionId: "sbom-1-v1", - sbomDigest: "sha256:" + new string('a', 64), - status: GraphJobStatus.Pending, - trigger: GraphBuildJobTrigger.SbomVersion, - createdAt: DateTimeOffset.UtcNow, - attempts: 0, - metadata: Array.Empty<KeyValuePair<string, string>>()); - - private sealed class RecordingGraphJobRepository : IGraphJobRepository - { - public int ReplaceCalls { get; private set; } - - public bool ShouldReplaceSucceed { get; set; } = true; - - public Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) - { - if (!ShouldReplaceSucceed) - { - return Task.FromResult(false); - } - - ReplaceCalls++; - return Task.FromResult(true); - } - - public Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - } - - private sealed class StubCartographerBuildClient : ICartographerBuildClient - { - public CartographerBuildResult Result { get; set; } = new(GraphJobStatus.Completed, null, null, null, null); - - public Exception? ExceptionToThrow { get; set; } - - public int CallCount { get; private set; } - - public Task<CartographerBuildResult> StartBuildAsync(GraphBuildJob job, CancellationToken cancellationToken) - { - CallCount++; - - if (ExceptionToThrow is not null) - { - throw ExceptionToThrow; - } - - return Task.FromResult(Result); - } - } - - private sealed class RecordingCompletionClient : IGraphJobCompletionClient - { - public List<GraphJobCompletionRequestDto> Notifications { get; } = new(); - - public Task NotifyAsync(GraphJobCompletionRequestDto request, CancellationToken cancellationToken) - { - Notifications.Add(request); - return Task.CompletedTask; - } - } -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Worker.Graph; +using StellaOps.Scheduler.Worker.Graph.Cartographer; +using StellaOps.Scheduler.Worker.Graph.Scheduler; +using StellaOps.Scheduler.Worker.Options; +using Xunit; + +namespace StellaOps.Scheduler.Worker.Tests; + +public sealed class GraphBuildExecutionServiceTests +{ + [Fact] + public async Task ExecuteAsync_Skips_WhenGraphDisabled() + { + var repository = new RecordingGraphJobRepository(); + var cartographer = new StubCartographerBuildClient(); + var completion = new RecordingCompletionClient(); + using var metrics = new SchedulerWorkerMetrics(); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions + { + Graph = new SchedulerWorkerOptions.GraphOptions + { + Enabled = false + } + }); + var service = new GraphBuildExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphBuildExecutionService>.Instance); + + var job = CreateGraphJob(); + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(GraphBuildExecutionResultType.Skipped, result.Type); + Assert.Equal("graph_processing_disabled", result.Reason); + Assert.Equal(0, repository.ReplaceCalls); + Assert.Equal(0, cartographer.CallCount); + Assert.Empty(completion.Notifications); + } + + [Fact] + public async Task ExecuteAsync_CompletesJob_OnSuccess() + { + var repository = new RecordingGraphJobRepository(); + var cartographer = new StubCartographerBuildClient + { + Result = new CartographerBuildResult( + GraphJobStatus.Completed, + CartographerJobId: "carto-1", + GraphSnapshotId: "graph_snap", + ResultUri: "oras://graph/result", + Error: null) + }; + var completion = new RecordingCompletionClient(); + using var metrics = new SchedulerWorkerMetrics(); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions + { + Graph = new SchedulerWorkerOptions.GraphOptions + { + Enabled = true, + MaxAttempts = 2, + RetryBackoff = TimeSpan.FromMilliseconds(10) + } + }); + var service = new GraphBuildExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphBuildExecutionService>.Instance); + + var job = CreateGraphJob(); + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(GraphBuildExecutionResultType.Completed, result.Type); + Assert.Single(completion.Notifications); + var notification = completion.Notifications[0]; + Assert.Equal(job.Id, notification.JobId); + Assert.Equal("Build", notification.JobType); + Assert.Equal(GraphJobStatus.Completed, notification.Status); + Assert.Equal("oras://graph/result", notification.ResultUri); + Assert.Equal("graph_snap", notification.GraphSnapshotId); + Assert.Null(notification.Error); + Assert.Equal(1, cartographer.CallCount); + Assert.True(repository.ReplaceCalls >= 1); + } + + [Fact] + public async Task ExecuteAsync_Fails_AfterMaxAttempts() + { + var repository = new RecordingGraphJobRepository(); + var cartographer = new StubCartographerBuildClient + { + ExceptionToThrow = new InvalidOperationException("network") + }; + var completion = new RecordingCompletionClient(); + using var metrics = new SchedulerWorkerMetrics(); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions + { + Graph = new SchedulerWorkerOptions.GraphOptions + { + Enabled = true, + MaxAttempts = 2, + RetryBackoff = TimeSpan.FromMilliseconds(1) + } + }); + var service = new GraphBuildExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphBuildExecutionService>.Instance); + + var job = CreateGraphJob(); + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(GraphBuildExecutionResultType.Failed, result.Type); + Assert.Equal(2, cartographer.CallCount); + Assert.Single(completion.Notifications); + Assert.Equal(GraphJobStatus.Failed, completion.Notifications[0].Status); + Assert.Equal("network", completion.Notifications[0].Error); + } + + [Fact] + public async Task ExecuteAsync_Skips_WhenConcurrencyConflict() + { + var repository = new RecordingGraphJobRepository + { + ShouldReplaceSucceed = false + }; + var cartographer = new StubCartographerBuildClient(); + var completion = new RecordingCompletionClient(); + using var metrics = new SchedulerWorkerMetrics(); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions + { + Graph = new SchedulerWorkerOptions.GraphOptions + { + Enabled = true + } + }); + var service = new GraphBuildExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphBuildExecutionService>.Instance); + + var job = CreateGraphJob(); + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(GraphBuildExecutionResultType.Skipped, result.Type); + Assert.Equal("concurrency_conflict", result.Reason); + Assert.Equal(0, cartographer.CallCount); + Assert.Empty(completion.Notifications); + } + + private static GraphBuildJob CreateGraphJob() => new( + id: "gbj_1", + tenantId: "tenant-alpha", + sbomId: "sbom-1", + sbomVersionId: "sbom-1-v1", + sbomDigest: "sha256:" + new string('a', 64), + status: GraphJobStatus.Pending, + trigger: GraphBuildJobTrigger.SbomVersion, + createdAt: DateTimeOffset.UtcNow, + attempts: 0, + metadata: Array.Empty<KeyValuePair<string, string>>()); + + private sealed class RecordingGraphJobRepository : IGraphJobRepository + { + public int ReplaceCalls { get; private set; } + + public bool ShouldReplaceSucceed { get; set; } = true; + + public Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) + { + if (!ShouldReplaceSucceed) + { + return Task.FromResult(false); + } + + ReplaceCalls++; + return Task.FromResult(true); + } + + public Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + } + + private sealed class StubCartographerBuildClient : ICartographerBuildClient + { + public CartographerBuildResult Result { get; set; } = new(GraphJobStatus.Completed, null, null, null, null); + + public Exception? ExceptionToThrow { get; set; } + + public int CallCount { get; private set; } + + public Task<CartographerBuildResult> StartBuildAsync(GraphBuildJob job, CancellationToken cancellationToken) + { + CallCount++; + + if (ExceptionToThrow is not null) + { + throw ExceptionToThrow; + } + + return Task.FromResult(Result); + } + } + + private sealed class RecordingCompletionClient : IGraphJobCompletionClient + { + public List<GraphJobCompletionRequestDto> Notifications { get; } = new(); + + public Task NotifyAsync(GraphJobCompletionRequestDto request, CancellationToken cancellationToken) + { + Notifications.Add(request); + return Task.CompletedTask; + } + } +} diff --git a/src/StellaOps.Scheduler.Worker.Tests/GraphOverlayExecutionServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/GraphOverlayExecutionServiceTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker.Tests/GraphOverlayExecutionServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/GraphOverlayExecutionServiceTests.cs index 0ded8e85..036c6ae7 100644 --- a/src/StellaOps.Scheduler.Worker.Tests/GraphOverlayExecutionServiceTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/GraphOverlayExecutionServiceTests.cs @@ -1,237 +1,237 @@ -using System; -using System.Collections.Generic; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Worker.Graph; -using StellaOps.Scheduler.Worker.Graph.Cartographer; -using StellaOps.Scheduler.Worker.Graph.Scheduler; -using StellaOps.Scheduler.Worker.Options; -using Xunit; - -namespace StellaOps.Scheduler.Worker.Tests; - -public sealed class GraphOverlayExecutionServiceTests -{ - [Fact] - public async Task ExecuteAsync_Skips_WhenGraphDisabled() - { - var repository = new RecordingGraphJobRepository(); - var cartographer = new StubCartographerOverlayClient(); - var completion = new RecordingCompletionClient(); - using var metrics = new SchedulerWorkerMetrics(); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions - { - Graph = new SchedulerWorkerOptions.GraphOptions - { - Enabled = false - } - }); - var service = new GraphOverlayExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphOverlayExecutionService>.Instance); - - var job = CreateOverlayJob(); - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(GraphOverlayExecutionResultType.Skipped, result.Type); - Assert.Equal("graph_processing_disabled", result.Reason); - Assert.Empty(completion.Notifications); - Assert.Equal(0, cartographer.CallCount); - } - - [Fact] - public async Task ExecuteAsync_CompletesJob_OnSuccess() - { - var repository = new RecordingGraphJobRepository(); - var cartographer = new StubCartographerOverlayClient - { - Result = new CartographerOverlayResult( - GraphJobStatus.Completed, - GraphSnapshotId: "graph_snap_2", - ResultUri: "oras://graph/overlay", - Error: null) - }; - var completion = new RecordingCompletionClient(); - using var metrics = new SchedulerWorkerMetrics(); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions - { - Graph = new SchedulerWorkerOptions.GraphOptions - { - Enabled = true, - MaxAttempts = 2, - RetryBackoff = TimeSpan.FromMilliseconds(5) - } - }); - var service = new GraphOverlayExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphOverlayExecutionService>.Instance); - - var job = CreateOverlayJob(); - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(GraphOverlayExecutionResultType.Completed, result.Type); - Assert.Single(completion.Notifications); - var notification = completion.Notifications[0]; - Assert.Equal("Overlay", notification.JobType); - Assert.Equal(GraphJobStatus.Completed, notification.Status); - Assert.Equal("oras://graph/overlay", notification.ResultUri); - Assert.Equal("graph_snap_2", notification.GraphSnapshotId); - } - - [Fact] - public async Task ExecuteAsync_Fails_AfterRetries() - { - var repository = new RecordingGraphJobRepository(); - var cartographer = new StubCartographerOverlayClient - { - ExceptionToThrow = new InvalidOperationException("overlay failed") - }; - var completion = new RecordingCompletionClient(); - using var metrics = new SchedulerWorkerMetrics(); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions - { - Graph = new SchedulerWorkerOptions.GraphOptions - { - Enabled = true, - MaxAttempts = 2, - RetryBackoff = TimeSpan.FromMilliseconds(1) - } - }); - var service = new GraphOverlayExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphOverlayExecutionService>.Instance); - - var job = CreateOverlayJob(); - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(GraphOverlayExecutionResultType.Failed, result.Type); - Assert.Single(completion.Notifications); - Assert.Equal(GraphJobStatus.Failed, completion.Notifications[0].Status); - Assert.Equal("overlay failed", completion.Notifications[0].Error); - } - - [Fact] - public async Task ExecuteAsync_Skips_WhenConcurrencyConflict() - { - var repository = new RecordingGraphJobRepository - { - ShouldReplaceSucceed = false - }; - var cartographer = new StubCartographerOverlayClient(); - var completion = new RecordingCompletionClient(); - using var metrics = new SchedulerWorkerMetrics(); - var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions - { - Graph = new SchedulerWorkerOptions.GraphOptions - { - Enabled = true - } - }); - var service = new GraphOverlayExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphOverlayExecutionService>.Instance); - - var job = CreateOverlayJob(); - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(GraphOverlayExecutionResultType.Skipped, result.Type); - Assert.Equal("concurrency_conflict", result.Reason); - Assert.Empty(completion.Notifications); - Assert.Equal(0, cartographer.CallCount); - } - - private static GraphOverlayJob CreateOverlayJob() => new( - id: "goj_1", - tenantId: "tenant-alpha", - graphSnapshotId: "snap-1", - overlayKind: GraphOverlayKind.Policy, - overlayKey: "policy@1", - status: GraphJobStatus.Pending, - trigger: GraphOverlayJobTrigger.Policy, - createdAt: DateTimeOffset.UtcNow, - subjects: Array.Empty<string>(), - attempts: 0, - metadata: Array.Empty<KeyValuePair<string, string>>()); - - private sealed class RecordingGraphJobRepository : IGraphJobRepository - { - public bool ShouldReplaceSucceed { get; set; } = true; - - public int RunningReplacements { get; private set; } - - public Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) - { - if (!ShouldReplaceSucceed) - { - return Task.FromResult(false); - } - - RunningReplacements++; - return Task.FromResult(true); - } - - public Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - } - - private sealed class StubCartographerOverlayClient : ICartographerOverlayClient - { - public CartographerOverlayResult Result { get; set; } = new(GraphJobStatus.Completed, null, null, null); - - public Exception? ExceptionToThrow { get; set; } - - public int CallCount { get; private set; } - - public Task<CartographerOverlayResult> StartOverlayAsync(GraphOverlayJob job, CancellationToken cancellationToken) - { - CallCount++; - - if (ExceptionToThrow is not null) - { - throw ExceptionToThrow; - } - - return Task.FromResult(Result); - } - } - - private sealed class RecordingCompletionClient : IGraphJobCompletionClient - { - public List<GraphJobCompletionRequestDto> Notifications { get; } = new(); - - public Task NotifyAsync(GraphJobCompletionRequestDto request, CancellationToken cancellationToken) - { - Notifications.Add(request); - return Task.CompletedTask; - } - } -} +using System; +using System.Collections.Generic; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Worker.Graph; +using StellaOps.Scheduler.Worker.Graph.Cartographer; +using StellaOps.Scheduler.Worker.Graph.Scheduler; +using StellaOps.Scheduler.Worker.Options; +using Xunit; + +namespace StellaOps.Scheduler.Worker.Tests; + +public sealed class GraphOverlayExecutionServiceTests +{ + [Fact] + public async Task ExecuteAsync_Skips_WhenGraphDisabled() + { + var repository = new RecordingGraphJobRepository(); + var cartographer = new StubCartographerOverlayClient(); + var completion = new RecordingCompletionClient(); + using var metrics = new SchedulerWorkerMetrics(); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions + { + Graph = new SchedulerWorkerOptions.GraphOptions + { + Enabled = false + } + }); + var service = new GraphOverlayExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphOverlayExecutionService>.Instance); + + var job = CreateOverlayJob(); + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(GraphOverlayExecutionResultType.Skipped, result.Type); + Assert.Equal("graph_processing_disabled", result.Reason); + Assert.Empty(completion.Notifications); + Assert.Equal(0, cartographer.CallCount); + } + + [Fact] + public async Task ExecuteAsync_CompletesJob_OnSuccess() + { + var repository = new RecordingGraphJobRepository(); + var cartographer = new StubCartographerOverlayClient + { + Result = new CartographerOverlayResult( + GraphJobStatus.Completed, + GraphSnapshotId: "graph_snap_2", + ResultUri: "oras://graph/overlay", + Error: null) + }; + var completion = new RecordingCompletionClient(); + using var metrics = new SchedulerWorkerMetrics(); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions + { + Graph = new SchedulerWorkerOptions.GraphOptions + { + Enabled = true, + MaxAttempts = 2, + RetryBackoff = TimeSpan.FromMilliseconds(5) + } + }); + var service = new GraphOverlayExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphOverlayExecutionService>.Instance); + + var job = CreateOverlayJob(); + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(GraphOverlayExecutionResultType.Completed, result.Type); + Assert.Single(completion.Notifications); + var notification = completion.Notifications[0]; + Assert.Equal("Overlay", notification.JobType); + Assert.Equal(GraphJobStatus.Completed, notification.Status); + Assert.Equal("oras://graph/overlay", notification.ResultUri); + Assert.Equal("graph_snap_2", notification.GraphSnapshotId); + } + + [Fact] + public async Task ExecuteAsync_Fails_AfterRetries() + { + var repository = new RecordingGraphJobRepository(); + var cartographer = new StubCartographerOverlayClient + { + ExceptionToThrow = new InvalidOperationException("overlay failed") + }; + var completion = new RecordingCompletionClient(); + using var metrics = new SchedulerWorkerMetrics(); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions + { + Graph = new SchedulerWorkerOptions.GraphOptions + { + Enabled = true, + MaxAttempts = 2, + RetryBackoff = TimeSpan.FromMilliseconds(1) + } + }); + var service = new GraphOverlayExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphOverlayExecutionService>.Instance); + + var job = CreateOverlayJob(); + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(GraphOverlayExecutionResultType.Failed, result.Type); + Assert.Single(completion.Notifications); + Assert.Equal(GraphJobStatus.Failed, completion.Notifications[0].Status); + Assert.Equal("overlay failed", completion.Notifications[0].Error); + } + + [Fact] + public async Task ExecuteAsync_Skips_WhenConcurrencyConflict() + { + var repository = new RecordingGraphJobRepository + { + ShouldReplaceSucceed = false + }; + var cartographer = new StubCartographerOverlayClient(); + var completion = new RecordingCompletionClient(); + using var metrics = new SchedulerWorkerMetrics(); + var options = Microsoft.Extensions.Options.Options.Create(new SchedulerWorkerOptions + { + Graph = new SchedulerWorkerOptions.GraphOptions + { + Enabled = true + } + }); + var service = new GraphOverlayExecutionService(repository, cartographer, completion, options, metrics, TimeProvider.System, NullLogger<GraphOverlayExecutionService>.Instance); + + var job = CreateOverlayJob(); + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(GraphOverlayExecutionResultType.Skipped, result.Type); + Assert.Equal("concurrency_conflict", result.Reason); + Assert.Empty(completion.Notifications); + Assert.Equal(0, cartographer.CallCount); + } + + private static GraphOverlayJob CreateOverlayJob() => new( + id: "goj_1", + tenantId: "tenant-alpha", + graphSnapshotId: "snap-1", + overlayKind: GraphOverlayKind.Policy, + overlayKey: "policy@1", + status: GraphJobStatus.Pending, + trigger: GraphOverlayJobTrigger.Policy, + createdAt: DateTimeOffset.UtcNow, + subjects: Array.Empty<string>(), + attempts: 0, + metadata: Array.Empty<KeyValuePair<string, string>>()); + + private sealed class RecordingGraphJobRepository : IGraphJobRepository + { + public bool ShouldReplaceSucceed { get; set; } = true; + + public int RunningReplacements { get; private set; } + + public Task<bool> TryReplaceOverlayAsync(GraphOverlayJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) + { + if (!ShouldReplaceSucceed) + { + return Task.FromResult(false); + } + + RunningReplacements++; + return Task.FromResult(true); + } + + public Task<bool> TryReplaceAsync(GraphBuildJob job, GraphJobStatus expectedStatus, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<GraphBuildJob> ReplaceAsync(GraphBuildJob job, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<GraphOverlayJob> ReplaceAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task InsertAsync(GraphBuildJob job, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task InsertAsync(GraphOverlayJob job, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<GraphBuildJob?> GetBuildJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<GraphOverlayJob?> GetOverlayJobAsync(string tenantId, string jobId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyList<GraphBuildJob>> ListBuildJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyList<GraphOverlayJob>> ListOverlayJobsAsync(GraphJobStatus? status, int limit, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<IReadOnlyCollection<GraphOverlayJob>> ListOverlayJobsAsync(string tenantId, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + } + + private sealed class StubCartographerOverlayClient : ICartographerOverlayClient + { + public CartographerOverlayResult Result { get; set; } = new(GraphJobStatus.Completed, null, null, null); + + public Exception? ExceptionToThrow { get; set; } + + public int CallCount { get; private set; } + + public Task<CartographerOverlayResult> StartOverlayAsync(GraphOverlayJob job, CancellationToken cancellationToken) + { + CallCount++; + + if (ExceptionToThrow is not null) + { + throw ExceptionToThrow; + } + + return Task.FromResult(Result); + } + } + + private sealed class RecordingCompletionClient : IGraphJobCompletionClient + { + public List<GraphJobCompletionRequestDto> Notifications { get; } = new(); + + public Task NotifyAsync(GraphJobCompletionRequestDto request, CancellationToken cancellationToken) + { + Notifications.Add(request); + return Task.CompletedTask; + } + } +} diff --git a/src/StellaOps.Scheduler.Worker.Tests/HttpScannerReportClientTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/HttpScannerReportClientTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker.Tests/HttpScannerReportClientTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/HttpScannerReportClientTests.cs diff --git a/src/StellaOps.Scheduler.Worker.Tests/ImpactShardPlannerTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/ImpactShardPlannerTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker.Tests/ImpactShardPlannerTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/ImpactShardPlannerTests.cs diff --git a/src/StellaOps.Scheduler.Worker.Tests/ImpactTargetingServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/ImpactTargetingServiceTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker.Tests/ImpactTargetingServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/ImpactTargetingServiceTests.cs diff --git a/src/StellaOps.Scheduler.Worker.Tests/PlannerBackgroundServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PlannerBackgroundServiceTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker.Tests/PlannerBackgroundServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PlannerBackgroundServiceTests.cs index c278e53d..beb0bf6a 100644 --- a/src/StellaOps.Scheduler.Worker.Tests/PlannerBackgroundServiceTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PlannerBackgroundServiceTests.cs @@ -1,411 +1,411 @@ -using System.Collections.Concurrent; -using System.Collections.Immutable; -using System.Linq; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using MongoDB.Driver; -using StellaOps.Scheduler.Queue; -using StellaOps.Scheduler.Storage.Mongo.Projections; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Storage.Mongo.Services; -using StellaOps.Scheduler.Worker.Options; -using StellaOps.Scheduler.Worker.Observability; -using StellaOps.Scheduler.Worker.Planning; - -namespace StellaOps.Scheduler.Worker.Tests; - -public sealed class PlannerBackgroundServiceTests -{ - [Fact] - public async Task ExecuteAsync_RespectsTenantFairnessCap() - { - var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-27T12:00:00Z")); - - var runs = new[] - { - CreateRun("run-a1", "tenant-a", RunTrigger.Manual, timeProvider.GetUtcNow().AddMinutes(1), "schedule-a"), - CreateRun("run-a2", "tenant-a", RunTrigger.Cron, timeProvider.GetUtcNow().AddMinutes(2), "schedule-a"), - CreateRun("run-b1", "tenant-b", RunTrigger.Feedser, timeProvider.GetUtcNow().AddMinutes(3), "schedule-b"), - CreateRun("run-c1", "tenant-c", RunTrigger.Cron, timeProvider.GetUtcNow().AddMinutes(4), "schedule-c"), - }; - - var repository = new TestRunRepository(runs, Array.Empty<Run>()); - var options = CreateOptions(maxConcurrentTenants: 2); - var scheduleRepository = new TestScheduleRepository(runs.Select(run => CreateSchedule(run.ScheduleId!, run.TenantId, timeProvider.GetUtcNow()))); - var snapshotRepository = new StubImpactSnapshotRepository(); - var runSummaryService = new StubRunSummaryService(timeProvider); - var plannerQueue = new RecordingPlannerQueue(); - var targetingService = new StubImpactTargetingService(timeProvider); - - using var metrics = new SchedulerWorkerMetrics(); - var executionService = new PlannerExecutionService( - scheduleRepository, - repository, - snapshotRepository, - runSummaryService, - targetingService, - plannerQueue, - options, - timeProvider, - metrics, - NullLogger<PlannerExecutionService>.Instance); - - var service = new PlannerBackgroundService( - repository, - executionService, - options, - timeProvider, - NullLogger<PlannerBackgroundService>.Instance); - - await service.StartAsync(CancellationToken.None); - try - { - await WaitForConditionAsync(() => repository.UpdateCount >= 2); - } - finally - { - await service.StopAsync(CancellationToken.None); - } - - var processedIds = repository.UpdatedRuns.Select(run => run.Id).ToArray(); - Assert.Equal(new[] { "run-a1", "run-b1" }, processedIds); - } - - [Fact] - public async Task ExecuteAsync_PrioritizesManualAndEventTriggers() - { - var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-27T18:00:00Z")); - - var runs = new[] - { - CreateRun("run-cron", "tenant-alpha", RunTrigger.Cron, timeProvider.GetUtcNow().AddMinutes(1), "schedule-cron"), - CreateRun("run-feedser", "tenant-bravo", RunTrigger.Feedser, timeProvider.GetUtcNow().AddMinutes(2), "schedule-feedser"), - CreateRun("run-manual", "tenant-charlie", RunTrigger.Manual, timeProvider.GetUtcNow().AddMinutes(3), "schedule-manual"), - CreateRun("run-vexer", "tenant-delta", RunTrigger.Vexer, timeProvider.GetUtcNow().AddMinutes(4), "schedule-vexer"), - }; - - var repository = new TestRunRepository(runs, Array.Empty<Run>()); - var options = CreateOptions(maxConcurrentTenants: 4); - var scheduleRepository = new TestScheduleRepository(runs.Select(run => CreateSchedule(run.ScheduleId!, run.TenantId, timeProvider.GetUtcNow()))); - var snapshotRepository = new StubImpactSnapshotRepository(); - var runSummaryService = new StubRunSummaryService(timeProvider); - var plannerQueue = new RecordingPlannerQueue(); - var targetingService = new StubImpactTargetingService(timeProvider); - - using var metrics = new SchedulerWorkerMetrics(); - var executionService = new PlannerExecutionService( - scheduleRepository, - repository, - snapshotRepository, - runSummaryService, - targetingService, - plannerQueue, - options, - timeProvider, - metrics, - NullLogger<PlannerExecutionService>.Instance); - - var service = new PlannerBackgroundService( - repository, - executionService, - options, - timeProvider, - NullLogger<PlannerBackgroundService>.Instance); - - await service.StartAsync(CancellationToken.None); - try - { - await WaitForConditionAsync(() => repository.UpdateCount >= runs.Length); - } - finally - { - await service.StopAsync(CancellationToken.None); - } - - var processedIds = repository.UpdatedRuns.Select(run => run.Id).ToArray(); - Assert.Equal(new[] { "run-manual", "run-feedser", "run-vexer", "run-cron" }, processedIds); - } - - private static SchedulerWorkerOptions CreateOptions(int maxConcurrentTenants) - { - return new SchedulerWorkerOptions - { - Planner = - { - BatchSize = 20, - PollInterval = TimeSpan.FromMilliseconds(1), - IdleDelay = TimeSpan.FromMilliseconds(1), - MaxConcurrentTenants = maxConcurrentTenants, - MaxRunsPerMinute = int.MaxValue, - QueueLeaseDuration = TimeSpan.FromMinutes(5) - } - }; - } - - private static Run CreateRun( - string id, - string tenantId, - RunTrigger trigger, - DateTimeOffset createdAt, - string scheduleId) - => new( - id: id, - tenantId: tenantId, - trigger: trigger, - state: RunState.Planning, - stats: RunStats.Empty, - createdAt: createdAt, - reason: RunReason.Empty, - scheduleId: scheduleId); - - private static Schedule CreateSchedule(string scheduleId, string tenantId, DateTimeOffset now) - => new( - id: scheduleId, - tenantId: tenantId, - name: $"Schedule-{scheduleId}", - enabled: true, - cronExpression: "0 2 * * *", - timezone: "UTC", - mode: ScheduleMode.AnalysisOnly, - selection: new Selector(SelectorScope.AllImages, tenantId), - onlyIf: ScheduleOnlyIf.Default, - notify: ScheduleNotify.Default, - limits: ScheduleLimits.Default, - createdAt: now, - createdBy: "system", - updatedAt: now, - updatedBy: "system", - subscribers: ImmutableArray<string>.Empty); - - private static async Task WaitForConditionAsync(Func<bool> predicate, TimeSpan? timeout = null) - { - var deadline = DateTime.UtcNow + (timeout ?? TimeSpan.FromSeconds(1)); - while (!predicate()) - { - if (DateTime.UtcNow > deadline) - { - throw new TimeoutException("Planner background service did not reach expected state within the allotted time."); - } - - await Task.Delay(10); - } - } - - private sealed class TestRunRepository : IRunRepository - { - private readonly Queue<IReadOnlyList<Run>> _responses; - private readonly ConcurrentQueue<Run> _updates = new(); - private int _updateCount; - - public TestRunRepository(params IReadOnlyList<Run>[] responses) - { - if (responses is null) - { - throw new ArgumentNullException(nameof(responses)); - } - - _responses = new Queue<IReadOnlyList<Run>>(responses.Select(static runs => (IReadOnlyList<Run>)runs.ToArray())); - } - - public int UpdateCount => Volatile.Read(ref _updateCount); - - public IReadOnlyList<Run> UpdatedRuns => _updates.ToArray(); - - public Task InsertAsync(Run run, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public Task<bool> UpdateAsync(Run run, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - { - _updates.Enqueue(run); - Interlocked.Increment(ref _updateCount); - return Task.FromResult(true); - } - - public Task<Run?> GetAsync(string tenantId, string runId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - => Task.FromResult<Run?>(null); - - public Task<IReadOnlyList<Run>> ListAsync(string tenantId, RunQueryOptions? options = null, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - => Task.FromResult<IReadOnlyList<Run>>(Array.Empty<Run>()); - - public Task<IReadOnlyList<Run>> ListByStateAsync(RunState state, int limit = 50, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - { - if (state != RunState.Planning) - { - return Task.FromResult<IReadOnlyList<Run>>(Array.Empty<Run>()); - } - - var next = _responses.Count > 0 ? _responses.Dequeue() : Array.Empty<Run>(); - - if (next.Count > limit) - { - next = next.Take(limit).ToArray(); - } - - return Task.FromResult(next); - } - } - - private sealed class TestScheduleRepository : IScheduleRepository - { - public TestScheduleRepository(IEnumerable<Schedule> schedules) - { - ArgumentNullException.ThrowIfNull(schedules); - - _schedules = new Dictionary<(string TenantId, string ScheduleId), Schedule>(); - foreach (var schedule in schedules) - { - if (schedule is null) - { - continue; - } - - _schedules[(schedule.TenantId, schedule.Id)] = schedule; - } - } - - private readonly Dictionary<(string TenantId, string ScheduleId), Schedule> _schedules; - - public Task UpsertAsync(Schedule schedule, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - { - _schedules[(schedule.TenantId, schedule.Id)] = schedule; - return Task.CompletedTask; - } - - public Task<Schedule?> GetAsync(string tenantId, string scheduleId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - { - _schedules.TryGetValue((tenantId, scheduleId), out var schedule); - return Task.FromResult(schedule); - } - - public Task<IReadOnlyList<Schedule>> ListAsync(string tenantId, ScheduleQueryOptions? options = null, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - { - var results = _schedules.Values.Where(schedule => schedule.TenantId == tenantId).ToArray(); - return Task.FromResult<IReadOnlyList<Schedule>>(results); - } - - public Task<bool> SoftDeleteAsync(string tenantId, string scheduleId, string deletedBy, DateTimeOffset deletedAt, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - { - var removed = _schedules.Remove((tenantId, scheduleId)); - return Task.FromResult(removed); - } - } - - private sealed class StubImpactSnapshotRepository : IImpactSnapshotRepository - { - public ImpactSet? LastSnapshot { get; private set; } - - public Task UpsertAsync(ImpactSet snapshot, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - { - LastSnapshot = snapshot; - return Task.CompletedTask; - } - - public Task<ImpactSet?> GetBySnapshotIdAsync(string snapshotId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - => Task.FromResult<ImpactSet?>(null); - - public Task<ImpactSet?> GetLatestBySelectorAsync(Selector selector, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - => Task.FromResult<ImpactSet?>(null); - } - - private sealed class StubRunSummaryService : IRunSummaryService - { - private readonly TimeProvider _timeProvider; - - public StubRunSummaryService(TimeProvider timeProvider) - { - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - } - - public Task<RunSummaryProjection> ProjectAsync(Run run, CancellationToken cancellationToken = default) - { - var projection = new RunSummaryProjection( - run.TenantId, - run.ScheduleId ?? string.Empty, - _timeProvider.GetUtcNow(), - null, - ImmutableArray<RunSummarySnapshot>.Empty, - new RunSummaryCounters(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); - - return Task.FromResult(projection); - } - - public Task<RunSummaryProjection?> GetAsync(string tenantId, string scheduleId, CancellationToken cancellationToken = default) - => Task.FromResult<RunSummaryProjection?>(null); - - public Task<IReadOnlyList<RunSummaryProjection>> ListAsync(string tenantId, CancellationToken cancellationToken = default) - => Task.FromResult<IReadOnlyList<RunSummaryProjection>>(Array.Empty<RunSummaryProjection>()); - } - - private sealed class StubImpactTargetingService : IImpactTargetingService - { - private static readonly string DefaultDigest = "sha256:" + new string('a', 64); - private readonly TimeProvider _timeProvider; - - public StubImpactTargetingService(TimeProvider timeProvider) - { - _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - } - - public ValueTask<ImpactSet> ResolveByPurlsAsync(IEnumerable<string> productKeys, bool usageOnly, Selector selector, CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync(IEnumerable<string> vulnerabilityIds, bool usageOnly, Selector selector, CancellationToken cancellationToken = default) - => throw new NotSupportedException(); - - public ValueTask<ImpactSet> ResolveAllAsync(Selector selector, bool usageOnly, CancellationToken cancellationToken = default) - { - var image = new ImpactImage( - DefaultDigest, - registry: "registry.test", - repository: "repo/sample", - namespaces: new[] { selector.TenantId ?? "unknown" }, - tags: new[] { "latest" }, - usedByEntrypoint: true); - - var impactSet = new ImpactSet( - selector, - ImmutableArray.Create(image), - usageOnly, - _timeProvider.GetUtcNow(), - total: 1, - snapshotId: null, - schemaVersion: SchedulerSchemaVersions.ImpactSet); - - return ValueTask.FromResult(impactSet); - } - } - - private sealed class RecordingPlannerQueue : ISchedulerPlannerQueue - { - private readonly ConcurrentQueue<PlannerQueueMessage> _messages = new(); - - public IReadOnlyList<PlannerQueueMessage> Messages => _messages.ToArray(); - - public ValueTask<SchedulerQueueEnqueueResult> EnqueueAsync(PlannerQueueMessage message, CancellationToken cancellationToken = default) - { - _messages.Enqueue(message); - return ValueTask.FromResult(new SchedulerQueueEnqueueResult(message.Run.Id, Deduplicated: false)); - } - - public ValueTask<IReadOnlyList<ISchedulerQueueLease<PlannerQueueMessage>>> LeaseAsync(SchedulerQueueLeaseRequest request, CancellationToken cancellationToken = default) - => ValueTask.FromResult<IReadOnlyList<ISchedulerQueueLease<PlannerQueueMessage>>>(Array.Empty<ISchedulerQueueLease<PlannerQueueMessage>>()); - - public ValueTask<IReadOnlyList<ISchedulerQueueLease<PlannerQueueMessage>>> ClaimExpiredAsync(SchedulerQueueClaimOptions options, CancellationToken cancellationToken = default) - => ValueTask.FromResult<IReadOnlyList<ISchedulerQueueLease<PlannerQueueMessage>>>(Array.Empty<ISchedulerQueueLease<PlannerQueueMessage>>()); - } - - private sealed class TestTimeProvider : TimeProvider - { - private DateTimeOffset _now; - - public TestTimeProvider(DateTimeOffset initial) - { - _now = initial; - } - - public override DateTimeOffset GetUtcNow() => _now; - - public void Advance(TimeSpan delta) => _now = _now.Add(delta); - } -} +using System.Collections.Concurrent; +using System.Collections.Immutable; +using System.Linq; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using MongoDB.Driver; +using StellaOps.Scheduler.Queue; +using StellaOps.Scheduler.Storage.Mongo.Projections; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Storage.Mongo.Services; +using StellaOps.Scheduler.Worker.Options; +using StellaOps.Scheduler.Worker.Observability; +using StellaOps.Scheduler.Worker.Planning; + +namespace StellaOps.Scheduler.Worker.Tests; + +public sealed class PlannerBackgroundServiceTests +{ + [Fact] + public async Task ExecuteAsync_RespectsTenantFairnessCap() + { + var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-27T12:00:00Z")); + + var runs = new[] + { + CreateRun("run-a1", "tenant-a", RunTrigger.Manual, timeProvider.GetUtcNow().AddMinutes(1), "schedule-a"), + CreateRun("run-a2", "tenant-a", RunTrigger.Cron, timeProvider.GetUtcNow().AddMinutes(2), "schedule-a"), + CreateRun("run-b1", "tenant-b", RunTrigger.Feedser, timeProvider.GetUtcNow().AddMinutes(3), "schedule-b"), + CreateRun("run-c1", "tenant-c", RunTrigger.Cron, timeProvider.GetUtcNow().AddMinutes(4), "schedule-c"), + }; + + var repository = new TestRunRepository(runs, Array.Empty<Run>()); + var options = CreateOptions(maxConcurrentTenants: 2); + var scheduleRepository = new TestScheduleRepository(runs.Select(run => CreateSchedule(run.ScheduleId!, run.TenantId, timeProvider.GetUtcNow()))); + var snapshotRepository = new StubImpactSnapshotRepository(); + var runSummaryService = new StubRunSummaryService(timeProvider); + var plannerQueue = new RecordingPlannerQueue(); + var targetingService = new StubImpactTargetingService(timeProvider); + + using var metrics = new SchedulerWorkerMetrics(); + var executionService = new PlannerExecutionService( + scheduleRepository, + repository, + snapshotRepository, + runSummaryService, + targetingService, + plannerQueue, + options, + timeProvider, + metrics, + NullLogger<PlannerExecutionService>.Instance); + + var service = new PlannerBackgroundService( + repository, + executionService, + options, + timeProvider, + NullLogger<PlannerBackgroundService>.Instance); + + await service.StartAsync(CancellationToken.None); + try + { + await WaitForConditionAsync(() => repository.UpdateCount >= 2); + } + finally + { + await service.StopAsync(CancellationToken.None); + } + + var processedIds = repository.UpdatedRuns.Select(run => run.Id).ToArray(); + Assert.Equal(new[] { "run-a1", "run-b1" }, processedIds); + } + + [Fact] + public async Task ExecuteAsync_PrioritizesManualAndEventTriggers() + { + var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-27T18:00:00Z")); + + var runs = new[] + { + CreateRun("run-cron", "tenant-alpha", RunTrigger.Cron, timeProvider.GetUtcNow().AddMinutes(1), "schedule-cron"), + CreateRun("run-feedser", "tenant-bravo", RunTrigger.Feedser, timeProvider.GetUtcNow().AddMinutes(2), "schedule-feedser"), + CreateRun("run-manual", "tenant-charlie", RunTrigger.Manual, timeProvider.GetUtcNow().AddMinutes(3), "schedule-manual"), + CreateRun("run-vexer", "tenant-delta", RunTrigger.Vexer, timeProvider.GetUtcNow().AddMinutes(4), "schedule-vexer"), + }; + + var repository = new TestRunRepository(runs, Array.Empty<Run>()); + var options = CreateOptions(maxConcurrentTenants: 4); + var scheduleRepository = new TestScheduleRepository(runs.Select(run => CreateSchedule(run.ScheduleId!, run.TenantId, timeProvider.GetUtcNow()))); + var snapshotRepository = new StubImpactSnapshotRepository(); + var runSummaryService = new StubRunSummaryService(timeProvider); + var plannerQueue = new RecordingPlannerQueue(); + var targetingService = new StubImpactTargetingService(timeProvider); + + using var metrics = new SchedulerWorkerMetrics(); + var executionService = new PlannerExecutionService( + scheduleRepository, + repository, + snapshotRepository, + runSummaryService, + targetingService, + plannerQueue, + options, + timeProvider, + metrics, + NullLogger<PlannerExecutionService>.Instance); + + var service = new PlannerBackgroundService( + repository, + executionService, + options, + timeProvider, + NullLogger<PlannerBackgroundService>.Instance); + + await service.StartAsync(CancellationToken.None); + try + { + await WaitForConditionAsync(() => repository.UpdateCount >= runs.Length); + } + finally + { + await service.StopAsync(CancellationToken.None); + } + + var processedIds = repository.UpdatedRuns.Select(run => run.Id).ToArray(); + Assert.Equal(new[] { "run-manual", "run-feedser", "run-vexer", "run-cron" }, processedIds); + } + + private static SchedulerWorkerOptions CreateOptions(int maxConcurrentTenants) + { + return new SchedulerWorkerOptions + { + Planner = + { + BatchSize = 20, + PollInterval = TimeSpan.FromMilliseconds(1), + IdleDelay = TimeSpan.FromMilliseconds(1), + MaxConcurrentTenants = maxConcurrentTenants, + MaxRunsPerMinute = int.MaxValue, + QueueLeaseDuration = TimeSpan.FromMinutes(5) + } + }; + } + + private static Run CreateRun( + string id, + string tenantId, + RunTrigger trigger, + DateTimeOffset createdAt, + string scheduleId) + => new( + id: id, + tenantId: tenantId, + trigger: trigger, + state: RunState.Planning, + stats: RunStats.Empty, + createdAt: createdAt, + reason: RunReason.Empty, + scheduleId: scheduleId); + + private static Schedule CreateSchedule(string scheduleId, string tenantId, DateTimeOffset now) + => new( + id: scheduleId, + tenantId: tenantId, + name: $"Schedule-{scheduleId}", + enabled: true, + cronExpression: "0 2 * * *", + timezone: "UTC", + mode: ScheduleMode.AnalysisOnly, + selection: new Selector(SelectorScope.AllImages, tenantId), + onlyIf: ScheduleOnlyIf.Default, + notify: ScheduleNotify.Default, + limits: ScheduleLimits.Default, + createdAt: now, + createdBy: "system", + updatedAt: now, + updatedBy: "system", + subscribers: ImmutableArray<string>.Empty); + + private static async Task WaitForConditionAsync(Func<bool> predicate, TimeSpan? timeout = null) + { + var deadline = DateTime.UtcNow + (timeout ?? TimeSpan.FromSeconds(1)); + while (!predicate()) + { + if (DateTime.UtcNow > deadline) + { + throw new TimeoutException("Planner background service did not reach expected state within the allotted time."); + } + + await Task.Delay(10); + } + } + + private sealed class TestRunRepository : IRunRepository + { + private readonly Queue<IReadOnlyList<Run>> _responses; + private readonly ConcurrentQueue<Run> _updates = new(); + private int _updateCount; + + public TestRunRepository(params IReadOnlyList<Run>[] responses) + { + if (responses is null) + { + throw new ArgumentNullException(nameof(responses)); + } + + _responses = new Queue<IReadOnlyList<Run>>(responses.Select(static runs => (IReadOnlyList<Run>)runs.ToArray())); + } + + public int UpdateCount => Volatile.Read(ref _updateCount); + + public IReadOnlyList<Run> UpdatedRuns => _updates.ToArray(); + + public Task InsertAsync(Run run, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public Task<bool> UpdateAsync(Run run, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + { + _updates.Enqueue(run); + Interlocked.Increment(ref _updateCount); + return Task.FromResult(true); + } + + public Task<Run?> GetAsync(string tenantId, string runId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + => Task.FromResult<Run?>(null); + + public Task<IReadOnlyList<Run>> ListAsync(string tenantId, RunQueryOptions? options = null, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + => Task.FromResult<IReadOnlyList<Run>>(Array.Empty<Run>()); + + public Task<IReadOnlyList<Run>> ListByStateAsync(RunState state, int limit = 50, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + { + if (state != RunState.Planning) + { + return Task.FromResult<IReadOnlyList<Run>>(Array.Empty<Run>()); + } + + var next = _responses.Count > 0 ? _responses.Dequeue() : Array.Empty<Run>(); + + if (next.Count > limit) + { + next = next.Take(limit).ToArray(); + } + + return Task.FromResult(next); + } + } + + private sealed class TestScheduleRepository : IScheduleRepository + { + public TestScheduleRepository(IEnumerable<Schedule> schedules) + { + ArgumentNullException.ThrowIfNull(schedules); + + _schedules = new Dictionary<(string TenantId, string ScheduleId), Schedule>(); + foreach (var schedule in schedules) + { + if (schedule is null) + { + continue; + } + + _schedules[(schedule.TenantId, schedule.Id)] = schedule; + } + } + + private readonly Dictionary<(string TenantId, string ScheduleId), Schedule> _schedules; + + public Task UpsertAsync(Schedule schedule, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + { + _schedules[(schedule.TenantId, schedule.Id)] = schedule; + return Task.CompletedTask; + } + + public Task<Schedule?> GetAsync(string tenantId, string scheduleId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + { + _schedules.TryGetValue((tenantId, scheduleId), out var schedule); + return Task.FromResult(schedule); + } + + public Task<IReadOnlyList<Schedule>> ListAsync(string tenantId, ScheduleQueryOptions? options = null, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + { + var results = _schedules.Values.Where(schedule => schedule.TenantId == tenantId).ToArray(); + return Task.FromResult<IReadOnlyList<Schedule>>(results); + } + + public Task<bool> SoftDeleteAsync(string tenantId, string scheduleId, string deletedBy, DateTimeOffset deletedAt, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + { + var removed = _schedules.Remove((tenantId, scheduleId)); + return Task.FromResult(removed); + } + } + + private sealed class StubImpactSnapshotRepository : IImpactSnapshotRepository + { + public ImpactSet? LastSnapshot { get; private set; } + + public Task UpsertAsync(ImpactSet snapshot, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + { + LastSnapshot = snapshot; + return Task.CompletedTask; + } + + public Task<ImpactSet?> GetBySnapshotIdAsync(string snapshotId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + => Task.FromResult<ImpactSet?>(null); + + public Task<ImpactSet?> GetLatestBySelectorAsync(Selector selector, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + => Task.FromResult<ImpactSet?>(null); + } + + private sealed class StubRunSummaryService : IRunSummaryService + { + private readonly TimeProvider _timeProvider; + + public StubRunSummaryService(TimeProvider timeProvider) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public Task<RunSummaryProjection> ProjectAsync(Run run, CancellationToken cancellationToken = default) + { + var projection = new RunSummaryProjection( + run.TenantId, + run.ScheduleId ?? string.Empty, + _timeProvider.GetUtcNow(), + null, + ImmutableArray<RunSummarySnapshot>.Empty, + new RunSummaryCounters(0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0)); + + return Task.FromResult(projection); + } + + public Task<RunSummaryProjection?> GetAsync(string tenantId, string scheduleId, CancellationToken cancellationToken = default) + => Task.FromResult<RunSummaryProjection?>(null); + + public Task<IReadOnlyList<RunSummaryProjection>> ListAsync(string tenantId, CancellationToken cancellationToken = default) + => Task.FromResult<IReadOnlyList<RunSummaryProjection>>(Array.Empty<RunSummaryProjection>()); + } + + private sealed class StubImpactTargetingService : IImpactTargetingService + { + private static readonly string DefaultDigest = "sha256:" + new string('a', 64); + private readonly TimeProvider _timeProvider; + + public StubImpactTargetingService(TimeProvider timeProvider) + { + _timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + } + + public ValueTask<ImpactSet> ResolveByPurlsAsync(IEnumerable<string> productKeys, bool usageOnly, Selector selector, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync(IEnumerable<string> vulnerabilityIds, bool usageOnly, Selector selector, CancellationToken cancellationToken = default) + => throw new NotSupportedException(); + + public ValueTask<ImpactSet> ResolveAllAsync(Selector selector, bool usageOnly, CancellationToken cancellationToken = default) + { + var image = new ImpactImage( + DefaultDigest, + registry: "registry.test", + repository: "repo/sample", + namespaces: new[] { selector.TenantId ?? "unknown" }, + tags: new[] { "latest" }, + usedByEntrypoint: true); + + var impactSet = new ImpactSet( + selector, + ImmutableArray.Create(image), + usageOnly, + _timeProvider.GetUtcNow(), + total: 1, + snapshotId: null, + schemaVersion: SchedulerSchemaVersions.ImpactSet); + + return ValueTask.FromResult(impactSet); + } + } + + private sealed class RecordingPlannerQueue : ISchedulerPlannerQueue + { + private readonly ConcurrentQueue<PlannerQueueMessage> _messages = new(); + + public IReadOnlyList<PlannerQueueMessage> Messages => _messages.ToArray(); + + public ValueTask<SchedulerQueueEnqueueResult> EnqueueAsync(PlannerQueueMessage message, CancellationToken cancellationToken = default) + { + _messages.Enqueue(message); + return ValueTask.FromResult(new SchedulerQueueEnqueueResult(message.Run.Id, Deduplicated: false)); + } + + public ValueTask<IReadOnlyList<ISchedulerQueueLease<PlannerQueueMessage>>> LeaseAsync(SchedulerQueueLeaseRequest request, CancellationToken cancellationToken = default) + => ValueTask.FromResult<IReadOnlyList<ISchedulerQueueLease<PlannerQueueMessage>>>(Array.Empty<ISchedulerQueueLease<PlannerQueueMessage>>()); + + public ValueTask<IReadOnlyList<ISchedulerQueueLease<PlannerQueueMessage>>> ClaimExpiredAsync(SchedulerQueueClaimOptions options, CancellationToken cancellationToken = default) + => ValueTask.FromResult<IReadOnlyList<ISchedulerQueueLease<PlannerQueueMessage>>>(Array.Empty<ISchedulerQueueLease<PlannerQueueMessage>>()); + } + + private sealed class TestTimeProvider : TimeProvider + { + private DateTimeOffset _now; + + public TestTimeProvider(DateTimeOffset initial) + { + _now = initial; + } + + public override DateTimeOffset GetUtcNow() => _now; + + public void Advance(TimeSpan delta) => _now = _now.Add(delta); + } +} diff --git a/src/StellaOps.Scheduler.Worker.Tests/PlannerExecutionServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PlannerExecutionServiceTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker.Tests/PlannerExecutionServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PlannerExecutionServiceTests.cs diff --git a/src/StellaOps.Scheduler.Worker.Tests/PlannerQueueDispatchServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PlannerQueueDispatchServiceTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker.Tests/PlannerQueueDispatchServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PlannerQueueDispatchServiceTests.cs diff --git a/src/StellaOps.Scheduler.Worker.Tests/PolicyRunExecutionServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PolicyRunExecutionServiceTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker.Tests/PolicyRunExecutionServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PolicyRunExecutionServiceTests.cs index 46a9c195..2db7c7d3 100644 --- a/src/StellaOps.Scheduler.Worker.Tests/PolicyRunExecutionServiceTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PolicyRunExecutionServiceTests.cs @@ -1,328 +1,328 @@ -using System; -using System.Collections.Immutable; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using MongoDB.Driver; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Storage.Mongo.Repositories; -using StellaOps.Scheduler.Worker.Options; -using StellaOps.Scheduler.Worker.Observability; -using StellaOps.Scheduler.Worker.Policy; -using Xunit; - -namespace StellaOps.Scheduler.Worker.Tests; - -public sealed class PolicyRunExecutionServiceTests -{ - private static readonly SchedulerWorkerOptions WorkerOptions = new() - { - Policy = - { - Dispatch = - { - LeaseOwner = "test-dispatch", - BatchSize = 1, - LeaseDuration = TimeSpan.FromMinutes(1), - IdleDelay = TimeSpan.FromMilliseconds(10), - MaxAttempts = 2, - RetryBackoff = TimeSpan.FromSeconds(30) - }, - Api = - { - BaseAddress = new Uri("https://policy.example.com"), - RunsPath = "/api/policy/policies/{policyId}/runs", - SimulatePath = "/api/policy/policies/{policyId}/simulate" - } - } - }; - - [Fact] - public async Task ExecuteAsync_CancelsJob_WhenCancellationRequested() - { - var repository = new RecordingPolicyRunJobRepository(); - var client = new StubPolicyRunClient(); - var options = Microsoft.Extensions.Options.Options.Create(CloneOptions()); - var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); - using var metrics = new SchedulerWorkerMetrics(); - var targeting = new StubPolicyRunTargetingService - { - OnEnsureTargets = job => PolicyRunTargetingResult.Unchanged(job) - }; - var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); - - var job = CreateJob(status: PolicyRunJobStatus.Dispatching) with - { - CancellationRequested = true, - LeaseOwner = "test-dispatch", - LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) - }; - - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunExecutionResultType.Cancelled, result.Type); - Assert.Equal(PolicyRunJobStatus.Cancelled, result.UpdatedJob.Status); - Assert.True(repository.ReplaceCalled); - Assert.Equal("test-dispatch", repository.ExpectedLeaseOwner); - } - - [Fact] - public async Task ExecuteAsync_SubmitsJob_OnSuccess() - { - var repository = new RecordingPolicyRunJobRepository(); - var client = new StubPolicyRunClient - { - Result = PolicyRunSubmissionResult.Succeeded("run:P-7:2025", DateTimeOffset.Parse("2025-10-28T10:01:00Z")) - }; - var options = Microsoft.Extensions.Options.Options.Create(CloneOptions()); - var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); - using var metrics = new SchedulerWorkerMetrics(); - var targeting = new StubPolicyRunTargetingService - { - OnEnsureTargets = job => PolicyRunTargetingResult.Unchanged(job) - }; - var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); - - var job = CreateJob(status: PolicyRunJobStatus.Dispatching) with - { - LeaseOwner = "test-dispatch", - LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) - }; - - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunExecutionResultType.Submitted, result.Type); - Assert.Equal(PolicyRunJobStatus.Submitted, result.UpdatedJob.Status); - Assert.Equal("run:P-7:2025", result.UpdatedJob.RunId); - Assert.Equal(job.AttemptCount + 1, result.UpdatedJob.AttemptCount); - Assert.Null(result.UpdatedJob.LastError); - Assert.True(repository.ReplaceCalled); - } - - [Fact] - public async Task ExecuteAsync_RetriesJob_OnFailure() - { - var repository = new RecordingPolicyRunJobRepository(); - var client = new StubPolicyRunClient - { - Result = PolicyRunSubmissionResult.Failed("timeout") - }; - var options = Microsoft.Extensions.Options.Options.Create(CloneOptions()); - var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); - using var metrics = new SchedulerWorkerMetrics(); - var targeting = new StubPolicyRunTargetingService - { - OnEnsureTargets = job => PolicyRunTargetingResult.Unchanged(job) - }; - var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); - - var job = CreateJob(status: PolicyRunJobStatus.Dispatching) with - { - LeaseOwner = "test-dispatch", - LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) - }; - - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunExecutionResultType.Retrying, result.Type); - Assert.Equal(PolicyRunJobStatus.Pending, result.UpdatedJob.Status); - Assert.Equal(job.AttemptCount + 1, result.UpdatedJob.AttemptCount); - Assert.Equal("timeout", result.UpdatedJob.LastError); - Assert.True(result.UpdatedJob.AvailableAt > job.AvailableAt); - } - - [Fact] - public async Task ExecuteAsync_MarksJobFailed_WhenAttemptsExceeded() - { - var repository = new RecordingPolicyRunJobRepository(); - var client = new StubPolicyRunClient - { - Result = PolicyRunSubmissionResult.Failed("bad request") - }; - var optionsValue = CloneOptions(); - optionsValue.Policy.Dispatch.MaxAttempts = 1; - var options = Microsoft.Extensions.Options.Options.Create(optionsValue); - var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); - using var metrics = new SchedulerWorkerMetrics(); - var targeting = new StubPolicyRunTargetingService - { - OnEnsureTargets = job => PolicyRunTargetingResult.Unchanged(job) - }; - var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); - - var job = CreateJob(status: PolicyRunJobStatus.Dispatching, attemptCount: 0) with - { - LeaseOwner = "test-dispatch", - LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) - }; - - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunExecutionResultType.Failed, result.Type); - Assert.Equal(PolicyRunJobStatus.Failed, result.UpdatedJob.Status); - Assert.Equal("bad request", result.UpdatedJob.LastError); - } - - [Fact] - public async Task ExecuteAsync_NoWork_CompletesJob() - { - var repository = new RecordingPolicyRunJobRepository(); - var client = new StubPolicyRunClient(); - var options = Microsoft.Extensions.Options.Options.Create(CloneOptions()); - var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); - using var metrics = new SchedulerWorkerMetrics(); - var targeting = new StubPolicyRunTargetingService - { - OnEnsureTargets = job => PolicyRunTargetingResult.NoWork(job, "empty") - }; - var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); - - var job = CreateJob(status: PolicyRunJobStatus.Dispatching, inputs: PolicyRunInputs.Empty) with - { - LeaseOwner = "test-dispatch", - LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) - }; - - var result = await service.ExecuteAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunExecutionResultType.NoOp, result.Type); - Assert.Equal(PolicyRunJobStatus.Completed, result.UpdatedJob.Status); - Assert.True(repository.ReplaceCalled); - Assert.Equal("test-dispatch", repository.ExpectedLeaseOwner); - } - - private static PolicyRunJob CreateJob(PolicyRunJobStatus status, int attemptCount = 0, PolicyRunInputs? inputs = null) - { - var resolvedInputs = inputs ?? new PolicyRunInputs(sbomSet: new[] { "sbom:S-42" }, captureExplain: true); - var metadata = ImmutableSortedDictionary.Create<string, string>(StringComparer.Ordinal); - return new PolicyRunJob( - SchemaVersion: SchedulerSchemaVersions.PolicyRunJob, - Id: "job_1", - TenantId: "tenant-alpha", - PolicyId: "P-7", - PolicyVersion: 4, - Mode: PolicyRunMode.Incremental, - Priority: PolicyRunPriority.Normal, - PriorityRank: -1, - RunId: "run:P-7:2025", - RequestedBy: "user:cli", - CorrelationId: "corr-1", - Metadata: metadata, - Inputs: resolvedInputs, - QueuedAt: DateTimeOffset.Parse("2025-10-28T09:59:00Z"), - Status: status, - AttemptCount: attemptCount, - LastAttemptAt: null, - LastError: null, - CreatedAt: DateTimeOffset.Parse("2025-10-28T09:58:00Z"), - UpdatedAt: DateTimeOffset.Parse("2025-10-28T09:58:00Z"), - AvailableAt: DateTimeOffset.Parse("2025-10-28T09:59:00Z"), - SubmittedAt: null, - CompletedAt: null, - LeaseOwner: null, - LeaseExpiresAt: null, - CancellationRequested: false, - CancellationRequestedAt: null, - CancellationReason: null, - CancelledAt: null); - } - - private static SchedulerWorkerOptions CloneOptions() - { - return new SchedulerWorkerOptions - { - Policy = new SchedulerWorkerOptions.PolicyOptions - { - Enabled = WorkerOptions.Policy.Enabled, - Dispatch = new SchedulerWorkerOptions.PolicyOptions.DispatchOptions - { - LeaseOwner = WorkerOptions.Policy.Dispatch.LeaseOwner, - BatchSize = WorkerOptions.Policy.Dispatch.BatchSize, - LeaseDuration = WorkerOptions.Policy.Dispatch.LeaseDuration, - IdleDelay = WorkerOptions.Policy.Dispatch.IdleDelay, - MaxAttempts = WorkerOptions.Policy.Dispatch.MaxAttempts, - RetryBackoff = WorkerOptions.Policy.Dispatch.RetryBackoff - }, - Api = new SchedulerWorkerOptions.PolicyOptions.ApiOptions - { - BaseAddress = WorkerOptions.Policy.Api.BaseAddress, - RunsPath = WorkerOptions.Policy.Api.RunsPath, - SimulatePath = WorkerOptions.Policy.Api.SimulatePath, - TenantHeader = WorkerOptions.Policy.Api.TenantHeader, - IdempotencyHeader = WorkerOptions.Policy.Api.IdempotencyHeader, - RequestTimeout = WorkerOptions.Policy.Api.RequestTimeout - }, - Targeting = new SchedulerWorkerOptions.PolicyOptions.TargetingOptions - { - Enabled = WorkerOptions.Policy.Targeting.Enabled, - MaxSboms = WorkerOptions.Policy.Targeting.MaxSboms, - DefaultUsageOnly = WorkerOptions.Policy.Targeting.DefaultUsageOnly - } - } - }; - } - - private sealed class StubPolicyRunTargetingService : IPolicyRunTargetingService - { - public Func<PolicyRunJob, PolicyRunTargetingResult>? OnEnsureTargets { get; set; } - - public Task<PolicyRunTargetingResult> EnsureTargetsAsync(PolicyRunJob job, CancellationToken cancellationToken) - => Task.FromResult(OnEnsureTargets?.Invoke(job) ?? PolicyRunTargetingResult.Unchanged(job)); - } - - private sealed class RecordingPolicyRunJobRepository : IPolicyRunJobRepository - { - public bool ReplaceCalled { get; private set; } - public string? ExpectedLeaseOwner { get; private set; } - public PolicyRunJob? LastJob { get; private set; } - - public Task<PolicyRunJob?> GetAsync(string tenantId, string jobId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - => Task.FromResult<PolicyRunJob?>(null); - - public Task<PolicyRunJob?> GetByRunIdAsync(string tenantId, string runId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - => Task.FromResult<PolicyRunJob?>(null); - - public Task InsertAsync(PolicyRunJob job, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - { - LastJob = job; - return Task.CompletedTask; - } - - public Task<PolicyRunJob?> LeaseAsync(string leaseOwner, DateTimeOffset now, TimeSpan leaseDuration, int maxAttempts, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - => Task.FromResult<PolicyRunJob?>(null); - - public Task<bool> ReplaceAsync(PolicyRunJob job, string? expectedLeaseOwner = null, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - { - ReplaceCalled = true; - ExpectedLeaseOwner = expectedLeaseOwner; - LastJob = job; - return Task.FromResult(true); - } - - public Task<IReadOnlyList<PolicyRunJob>> ListAsync(string tenantId, string? policyId = null, PolicyRunMode? mode = null, IReadOnlyCollection<PolicyRunJobStatus>? statuses = null, DateTimeOffset? queuedAfter = null, int limit = 50, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) - => Task.FromResult<IReadOnlyList<PolicyRunJob>>(Array.Empty<PolicyRunJob>()); - } - - private sealed class StubPolicyRunClient : IPolicyRunClient - { - public PolicyRunSubmissionResult Result { get; set; } = PolicyRunSubmissionResult.Succeeded(null, null); - - public Task<PolicyRunSubmissionResult> SubmitAsync(PolicyRunJob job, PolicyRunRequest request, CancellationToken cancellationToken) - => Task.FromResult(Result); - } - - private sealed class TestTimeProvider : TimeProvider - { - private DateTimeOffset _now; - - public TestTimeProvider(DateTimeOffset now) - { - _now = now; - } - - public override DateTimeOffset GetUtcNow() => _now; - - public void Advance(TimeSpan delta) => _now = _now.Add(delta); - } -} +using System; +using System.Collections.Immutable; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Storage.Mongo.Repositories; +using StellaOps.Scheduler.Worker.Options; +using StellaOps.Scheduler.Worker.Observability; +using StellaOps.Scheduler.Worker.Policy; +using Xunit; + +namespace StellaOps.Scheduler.Worker.Tests; + +public sealed class PolicyRunExecutionServiceTests +{ + private static readonly SchedulerWorkerOptions WorkerOptions = new() + { + Policy = + { + Dispatch = + { + LeaseOwner = "test-dispatch", + BatchSize = 1, + LeaseDuration = TimeSpan.FromMinutes(1), + IdleDelay = TimeSpan.FromMilliseconds(10), + MaxAttempts = 2, + RetryBackoff = TimeSpan.FromSeconds(30) + }, + Api = + { + BaseAddress = new Uri("https://policy.example.com"), + RunsPath = "/api/policy/policies/{policyId}/runs", + SimulatePath = "/api/policy/policies/{policyId}/simulate" + } + } + }; + + [Fact] + public async Task ExecuteAsync_CancelsJob_WhenCancellationRequested() + { + var repository = new RecordingPolicyRunJobRepository(); + var client = new StubPolicyRunClient(); + var options = Microsoft.Extensions.Options.Options.Create(CloneOptions()); + var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); + using var metrics = new SchedulerWorkerMetrics(); + var targeting = new StubPolicyRunTargetingService + { + OnEnsureTargets = job => PolicyRunTargetingResult.Unchanged(job) + }; + var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); + + var job = CreateJob(status: PolicyRunJobStatus.Dispatching) with + { + CancellationRequested = true, + LeaseOwner = "test-dispatch", + LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) + }; + + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunExecutionResultType.Cancelled, result.Type); + Assert.Equal(PolicyRunJobStatus.Cancelled, result.UpdatedJob.Status); + Assert.True(repository.ReplaceCalled); + Assert.Equal("test-dispatch", repository.ExpectedLeaseOwner); + } + + [Fact] + public async Task ExecuteAsync_SubmitsJob_OnSuccess() + { + var repository = new RecordingPolicyRunJobRepository(); + var client = new StubPolicyRunClient + { + Result = PolicyRunSubmissionResult.Succeeded("run:P-7:2025", DateTimeOffset.Parse("2025-10-28T10:01:00Z")) + }; + var options = Microsoft.Extensions.Options.Options.Create(CloneOptions()); + var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); + using var metrics = new SchedulerWorkerMetrics(); + var targeting = new StubPolicyRunTargetingService + { + OnEnsureTargets = job => PolicyRunTargetingResult.Unchanged(job) + }; + var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); + + var job = CreateJob(status: PolicyRunJobStatus.Dispatching) with + { + LeaseOwner = "test-dispatch", + LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) + }; + + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunExecutionResultType.Submitted, result.Type); + Assert.Equal(PolicyRunJobStatus.Submitted, result.UpdatedJob.Status); + Assert.Equal("run:P-7:2025", result.UpdatedJob.RunId); + Assert.Equal(job.AttemptCount + 1, result.UpdatedJob.AttemptCount); + Assert.Null(result.UpdatedJob.LastError); + Assert.True(repository.ReplaceCalled); + } + + [Fact] + public async Task ExecuteAsync_RetriesJob_OnFailure() + { + var repository = new RecordingPolicyRunJobRepository(); + var client = new StubPolicyRunClient + { + Result = PolicyRunSubmissionResult.Failed("timeout") + }; + var options = Microsoft.Extensions.Options.Options.Create(CloneOptions()); + var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); + using var metrics = new SchedulerWorkerMetrics(); + var targeting = new StubPolicyRunTargetingService + { + OnEnsureTargets = job => PolicyRunTargetingResult.Unchanged(job) + }; + var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); + + var job = CreateJob(status: PolicyRunJobStatus.Dispatching) with + { + LeaseOwner = "test-dispatch", + LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) + }; + + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunExecutionResultType.Retrying, result.Type); + Assert.Equal(PolicyRunJobStatus.Pending, result.UpdatedJob.Status); + Assert.Equal(job.AttemptCount + 1, result.UpdatedJob.AttemptCount); + Assert.Equal("timeout", result.UpdatedJob.LastError); + Assert.True(result.UpdatedJob.AvailableAt > job.AvailableAt); + } + + [Fact] + public async Task ExecuteAsync_MarksJobFailed_WhenAttemptsExceeded() + { + var repository = new RecordingPolicyRunJobRepository(); + var client = new StubPolicyRunClient + { + Result = PolicyRunSubmissionResult.Failed("bad request") + }; + var optionsValue = CloneOptions(); + optionsValue.Policy.Dispatch.MaxAttempts = 1; + var options = Microsoft.Extensions.Options.Options.Create(optionsValue); + var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); + using var metrics = new SchedulerWorkerMetrics(); + var targeting = new StubPolicyRunTargetingService + { + OnEnsureTargets = job => PolicyRunTargetingResult.Unchanged(job) + }; + var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); + + var job = CreateJob(status: PolicyRunJobStatus.Dispatching, attemptCount: 0) with + { + LeaseOwner = "test-dispatch", + LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) + }; + + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunExecutionResultType.Failed, result.Type); + Assert.Equal(PolicyRunJobStatus.Failed, result.UpdatedJob.Status); + Assert.Equal("bad request", result.UpdatedJob.LastError); + } + + [Fact] + public async Task ExecuteAsync_NoWork_CompletesJob() + { + var repository = new RecordingPolicyRunJobRepository(); + var client = new StubPolicyRunClient(); + var options = Microsoft.Extensions.Options.Options.Create(CloneOptions()); + var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-28T10:00:00Z")); + using var metrics = new SchedulerWorkerMetrics(); + var targeting = new StubPolicyRunTargetingService + { + OnEnsureTargets = job => PolicyRunTargetingResult.NoWork(job, "empty") + }; + var service = new PolicyRunExecutionService(repository, client, options, timeProvider, metrics, targeting, NullLogger<PolicyRunExecutionService>.Instance); + + var job = CreateJob(status: PolicyRunJobStatus.Dispatching, inputs: PolicyRunInputs.Empty) with + { + LeaseOwner = "test-dispatch", + LeaseExpiresAt = timeProvider.GetUtcNow().AddMinutes(1) + }; + + var result = await service.ExecuteAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunExecutionResultType.NoOp, result.Type); + Assert.Equal(PolicyRunJobStatus.Completed, result.UpdatedJob.Status); + Assert.True(repository.ReplaceCalled); + Assert.Equal("test-dispatch", repository.ExpectedLeaseOwner); + } + + private static PolicyRunJob CreateJob(PolicyRunJobStatus status, int attemptCount = 0, PolicyRunInputs? inputs = null) + { + var resolvedInputs = inputs ?? new PolicyRunInputs(sbomSet: new[] { "sbom:S-42" }, captureExplain: true); + var metadata = ImmutableSortedDictionary.Create<string, string>(StringComparer.Ordinal); + return new PolicyRunJob( + SchemaVersion: SchedulerSchemaVersions.PolicyRunJob, + Id: "job_1", + TenantId: "tenant-alpha", + PolicyId: "P-7", + PolicyVersion: 4, + Mode: PolicyRunMode.Incremental, + Priority: PolicyRunPriority.Normal, + PriorityRank: -1, + RunId: "run:P-7:2025", + RequestedBy: "user:cli", + CorrelationId: "corr-1", + Metadata: metadata, + Inputs: resolvedInputs, + QueuedAt: DateTimeOffset.Parse("2025-10-28T09:59:00Z"), + Status: status, + AttemptCount: attemptCount, + LastAttemptAt: null, + LastError: null, + CreatedAt: DateTimeOffset.Parse("2025-10-28T09:58:00Z"), + UpdatedAt: DateTimeOffset.Parse("2025-10-28T09:58:00Z"), + AvailableAt: DateTimeOffset.Parse("2025-10-28T09:59:00Z"), + SubmittedAt: null, + CompletedAt: null, + LeaseOwner: null, + LeaseExpiresAt: null, + CancellationRequested: false, + CancellationRequestedAt: null, + CancellationReason: null, + CancelledAt: null); + } + + private static SchedulerWorkerOptions CloneOptions() + { + return new SchedulerWorkerOptions + { + Policy = new SchedulerWorkerOptions.PolicyOptions + { + Enabled = WorkerOptions.Policy.Enabled, + Dispatch = new SchedulerWorkerOptions.PolicyOptions.DispatchOptions + { + LeaseOwner = WorkerOptions.Policy.Dispatch.LeaseOwner, + BatchSize = WorkerOptions.Policy.Dispatch.BatchSize, + LeaseDuration = WorkerOptions.Policy.Dispatch.LeaseDuration, + IdleDelay = WorkerOptions.Policy.Dispatch.IdleDelay, + MaxAttempts = WorkerOptions.Policy.Dispatch.MaxAttempts, + RetryBackoff = WorkerOptions.Policy.Dispatch.RetryBackoff + }, + Api = new SchedulerWorkerOptions.PolicyOptions.ApiOptions + { + BaseAddress = WorkerOptions.Policy.Api.BaseAddress, + RunsPath = WorkerOptions.Policy.Api.RunsPath, + SimulatePath = WorkerOptions.Policy.Api.SimulatePath, + TenantHeader = WorkerOptions.Policy.Api.TenantHeader, + IdempotencyHeader = WorkerOptions.Policy.Api.IdempotencyHeader, + RequestTimeout = WorkerOptions.Policy.Api.RequestTimeout + }, + Targeting = new SchedulerWorkerOptions.PolicyOptions.TargetingOptions + { + Enabled = WorkerOptions.Policy.Targeting.Enabled, + MaxSboms = WorkerOptions.Policy.Targeting.MaxSboms, + DefaultUsageOnly = WorkerOptions.Policy.Targeting.DefaultUsageOnly + } + } + }; + } + + private sealed class StubPolicyRunTargetingService : IPolicyRunTargetingService + { + public Func<PolicyRunJob, PolicyRunTargetingResult>? OnEnsureTargets { get; set; } + + public Task<PolicyRunTargetingResult> EnsureTargetsAsync(PolicyRunJob job, CancellationToken cancellationToken) + => Task.FromResult(OnEnsureTargets?.Invoke(job) ?? PolicyRunTargetingResult.Unchanged(job)); + } + + private sealed class RecordingPolicyRunJobRepository : IPolicyRunJobRepository + { + public bool ReplaceCalled { get; private set; } + public string? ExpectedLeaseOwner { get; private set; } + public PolicyRunJob? LastJob { get; private set; } + + public Task<PolicyRunJob?> GetAsync(string tenantId, string jobId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + => Task.FromResult<PolicyRunJob?>(null); + + public Task<PolicyRunJob?> GetByRunIdAsync(string tenantId, string runId, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + => Task.FromResult<PolicyRunJob?>(null); + + public Task InsertAsync(PolicyRunJob job, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + { + LastJob = job; + return Task.CompletedTask; + } + + public Task<PolicyRunJob?> LeaseAsync(string leaseOwner, DateTimeOffset now, TimeSpan leaseDuration, int maxAttempts, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + => Task.FromResult<PolicyRunJob?>(null); + + public Task<bool> ReplaceAsync(PolicyRunJob job, string? expectedLeaseOwner = null, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + { + ReplaceCalled = true; + ExpectedLeaseOwner = expectedLeaseOwner; + LastJob = job; + return Task.FromResult(true); + } + + public Task<IReadOnlyList<PolicyRunJob>> ListAsync(string tenantId, string? policyId = null, PolicyRunMode? mode = null, IReadOnlyCollection<PolicyRunJobStatus>? statuses = null, DateTimeOffset? queuedAfter = null, int limit = 50, IClientSessionHandle? session = null, CancellationToken cancellationToken = default) + => Task.FromResult<IReadOnlyList<PolicyRunJob>>(Array.Empty<PolicyRunJob>()); + } + + private sealed class StubPolicyRunClient : IPolicyRunClient + { + public PolicyRunSubmissionResult Result { get; set; } = PolicyRunSubmissionResult.Succeeded(null, null); + + public Task<PolicyRunSubmissionResult> SubmitAsync(PolicyRunJob job, PolicyRunRequest request, CancellationToken cancellationToken) + => Task.FromResult(Result); + } + + private sealed class TestTimeProvider : TimeProvider + { + private DateTimeOffset _now; + + public TestTimeProvider(DateTimeOffset now) + { + _now = now; + } + + public override DateTimeOffset GetUtcNow() => _now; + + public void Advance(TimeSpan delta) => _now = _now.Add(delta); + } +} diff --git a/src/StellaOps.Scheduler.Worker.Tests/PolicyRunTargetingServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PolicyRunTargetingServiceTests.cs similarity index 97% rename from src/StellaOps.Scheduler.Worker.Tests/PolicyRunTargetingServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PolicyRunTargetingServiceTests.cs index 7c2095c8..e5b78546 100644 --- a/src/StellaOps.Scheduler.Worker.Tests/PolicyRunTargetingServiceTests.cs +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/PolicyRunTargetingServiceTests.cs @@ -1,255 +1,255 @@ -using System; -using System.Collections.Generic; -using System.Collections.Immutable; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Scheduler.Models; -using StellaOps.Scheduler.Worker; -using StellaOps.Scheduler.Worker.Options; -using StellaOps.Scheduler.Worker.Policy; -using Xunit; - -namespace StellaOps.Scheduler.Worker.Tests; - -public sealed class PolicyRunTargetingServiceTests -{ - [Fact] - public async Task EnsureTargetsAsync_ReturnsUnchanged_ForNonIncrementalJob() - { - var service = CreateService(); - var job = CreateJob(mode: PolicyRunMode.Full); - - var result = await service.EnsureTargetsAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunTargetingStatus.Unchanged, result.Status); - Assert.Equal(job, result.Job); - } - - [Fact] - public async Task EnsureTargetsAsync_ReturnsUnchanged_WhenSbomSetAlreadyPresent() - { - var service = CreateService(); - var inputs = new PolicyRunInputs(sbomSet: new[] { "sbom:S-1" }); - var job = CreateJob(inputs: inputs); - - var result = await service.EnsureTargetsAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunTargetingStatus.Unchanged, result.Status); - } - - [Fact] - public async Task EnsureTargetsAsync_ReturnsNoWork_WhenNoCandidatesResolved() - { - var impact = new StubImpactTargetingService(); - var service = CreateService(impact); - var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.purls", "pkg:npm/leftpad"); - var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); - - var result = await service.EnsureTargetsAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunTargetingStatus.NoWork, result.Status); - Assert.Equal("no_matches", result.Reason); - } - - [Fact] - public async Task EnsureTargetsAsync_TargetsDirectSboms() - { - var service = CreateService(); - var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.sboms", "sbom:S-2, sbom:S-1, sbom:S-2"); - var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); - - var result = await service.EnsureTargetsAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunTargetingStatus.Targeted, result.Status); - Assert.Equal(new[] { "sbom:S-1", "sbom:S-2" }, result.Job.Inputs.SbomSet); - } - - [Fact] - public async Task EnsureTargetsAsync_TargetsUsingImpactIndex() - { - var impact = new StubImpactTargetingService - { - OnResolveByPurls = (keys, usageOnly, selector, _) => - { - var image = new ImpactImage( - "sha256:111", - "registry", - "repo", - labels: ImmutableSortedDictionary.Create<string, string>(StringComparer.Ordinal).Add("sbomId", "sbom:S-42")); - var impactSet = new ImpactSet( - selector, - new[] { image }, - usageOnly, - DateTimeOffset.UtcNow, - total: 1, - snapshotId: null, - schemaVersion: SchedulerSchemaVersions.ImpactSet); - return ValueTask.FromResult(impactSet); - } - }; - - var service = CreateService(impact); - var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.purls", "pkg:npm/example"); - var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); - - var result = await service.EnsureTargetsAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunTargetingStatus.Targeted, result.Status); - Assert.Equal(new[] { "sbom:S-42" }, result.Job.Inputs.SbomSet); - } - - [Fact] - public async Task EnsureTargetsAsync_FallsBack_WhenLimitExceeded() - { - var service = CreateService(configure: options => options.MaxSboms = 1); - var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.sboms", "sbom:S-1,sbom:S-2"); - var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); - - var result = await service.EnsureTargetsAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunTargetingStatus.Unchanged, result.Status); - } - - [Fact] - public async Task EnsureTargetsAsync_FallbacksToDigest_WhenLabelMissing() - { - var impact = new StubImpactTargetingService - { - OnResolveByVulnerabilities = (ids, usageOnly, selector, _) => - { - var image = new ImpactImage("sha256:aaa", "registry", "repo"); - var impactSet = new ImpactSet( - selector, - new[] { image }, - usageOnly, - DateTimeOffset.UtcNow, - total: 1, - snapshotId: null, - schemaVersion: SchedulerSchemaVersions.ImpactSet); - return ValueTask.FromResult(impactSet); - } - }; - - var service = CreateService(impact); - var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.vulns", "CVE-2025-1234"); - var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); - - var result = await service.EnsureTargetsAsync(job, CancellationToken.None); - - Assert.Equal(PolicyRunTargetingStatus.Targeted, result.Status); - Assert.Equal(new[] { "sbom:sha256:aaa" }, result.Job.Inputs.SbomSet); - } - - private static PolicyRunTargetingService CreateService( - IImpactTargetingService? impact = null, - Action<SchedulerWorkerOptions.PolicyOptions.TargetingOptions>? configure = null) - { - impact ??= new StubImpactTargetingService(); - var options = CreateOptions(configure); - return new PolicyRunTargetingService( - impact, - Microsoft.Extensions.Options.Options.Create(options), - timeProvider: null, - NullLogger<PolicyRunTargetingService>.Instance); - } - - private static SchedulerWorkerOptions CreateOptions(Action<SchedulerWorkerOptions.PolicyOptions.TargetingOptions>? configure) - { - var options = new SchedulerWorkerOptions - { - Policy = - { - Api = - { - BaseAddress = new Uri("https://policy.example.com"), - RunsPath = "/runs", - SimulatePath = "/simulate" - } - } - }; - - configure?.Invoke(options.Policy.Targeting); - return options; - } - - private static PolicyRunJob CreateJob( - PolicyRunMode mode = PolicyRunMode.Incremental, - ImmutableSortedDictionary<string, string>? metadata = null, - PolicyRunInputs? inputs = null) - { - return new PolicyRunJob( - SchemaVersion: SchedulerSchemaVersions.PolicyRunJob, - Id: "job-1", - TenantId: "tenant-alpha", - PolicyId: "P-7", - PolicyVersion: 4, - Mode: mode, - Priority: PolicyRunPriority.Normal, - PriorityRank: 0, - RunId: null, - RequestedBy: null, - CorrelationId: null, - Metadata: metadata ?? ImmutableSortedDictionary<string, string>.Empty, - Inputs: inputs ?? PolicyRunInputs.Empty, - QueuedAt: DateTimeOffset.UtcNow, - Status: PolicyRunJobStatus.Dispatching, - AttemptCount: 0, - LastAttemptAt: null, - LastError: null, - CreatedAt: DateTimeOffset.UtcNow, - UpdatedAt: DateTimeOffset.UtcNow, - AvailableAt: DateTimeOffset.UtcNow, - SubmittedAt: null, - CompletedAt: null, - LeaseOwner: "lease", - LeaseExpiresAt: DateTimeOffset.UtcNow.AddMinutes(1), - CancellationRequested: false, - CancellationRequestedAt: null, - CancellationReason: null, - CancelledAt: null); - } - - private sealed class StubImpactTargetingService : IImpactTargetingService - { - public Func<IEnumerable<string>, bool, Selector, CancellationToken, ValueTask<ImpactSet>>? OnResolveByPurls { get; set; } - - public Func<IEnumerable<string>, bool, Selector, CancellationToken, ValueTask<ImpactSet>>? OnResolveByVulnerabilities { get; set; } - - public ValueTask<ImpactSet> ResolveByPurlsAsync(IEnumerable<string> productKeys, bool usageOnly, Selector selector, CancellationToken cancellationToken = default) - { - if (OnResolveByPurls is null) - { - return ValueTask.FromResult(CreateEmptyImpactSet(selector, usageOnly)); - } - - return OnResolveByPurls(productKeys, usageOnly, selector, cancellationToken); - } - - public ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync(IEnumerable<string> vulnerabilityIds, bool usageOnly, Selector selector, CancellationToken cancellationToken = default) - { - if (OnResolveByVulnerabilities is null) - { - return ValueTask.FromResult(CreateEmptyImpactSet(selector, usageOnly)); - } - - return OnResolveByVulnerabilities(vulnerabilityIds, usageOnly, selector, cancellationToken); - } - - public ValueTask<ImpactSet> ResolveAllAsync(Selector selector, bool usageOnly, CancellationToken cancellationToken = default) - => ValueTask.FromResult(CreateEmptyImpactSet(selector, usageOnly)); - - private static ImpactSet CreateEmptyImpactSet(Selector selector, bool usageOnly) - { - return new ImpactSet( - selector, - ImmutableArray<ImpactImage>.Empty, - usageOnly, - DateTimeOffset.UtcNow, - total: 0, - snapshotId: null, - schemaVersion: SchedulerSchemaVersions.ImpactSet); - } - } -} +using System; +using System.Collections.Generic; +using System.Collections.Immutable; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Scheduler.Models; +using StellaOps.Scheduler.Worker; +using StellaOps.Scheduler.Worker.Options; +using StellaOps.Scheduler.Worker.Policy; +using Xunit; + +namespace StellaOps.Scheduler.Worker.Tests; + +public sealed class PolicyRunTargetingServiceTests +{ + [Fact] + public async Task EnsureTargetsAsync_ReturnsUnchanged_ForNonIncrementalJob() + { + var service = CreateService(); + var job = CreateJob(mode: PolicyRunMode.Full); + + var result = await service.EnsureTargetsAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunTargetingStatus.Unchanged, result.Status); + Assert.Equal(job, result.Job); + } + + [Fact] + public async Task EnsureTargetsAsync_ReturnsUnchanged_WhenSbomSetAlreadyPresent() + { + var service = CreateService(); + var inputs = new PolicyRunInputs(sbomSet: new[] { "sbom:S-1" }); + var job = CreateJob(inputs: inputs); + + var result = await service.EnsureTargetsAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunTargetingStatus.Unchanged, result.Status); + } + + [Fact] + public async Task EnsureTargetsAsync_ReturnsNoWork_WhenNoCandidatesResolved() + { + var impact = new StubImpactTargetingService(); + var service = CreateService(impact); + var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.purls", "pkg:npm/leftpad"); + var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); + + var result = await service.EnsureTargetsAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunTargetingStatus.NoWork, result.Status); + Assert.Equal("no_matches", result.Reason); + } + + [Fact] + public async Task EnsureTargetsAsync_TargetsDirectSboms() + { + var service = CreateService(); + var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.sboms", "sbom:S-2, sbom:S-1, sbom:S-2"); + var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); + + var result = await service.EnsureTargetsAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunTargetingStatus.Targeted, result.Status); + Assert.Equal(new[] { "sbom:S-1", "sbom:S-2" }, result.Job.Inputs.SbomSet); + } + + [Fact] + public async Task EnsureTargetsAsync_TargetsUsingImpactIndex() + { + var impact = new StubImpactTargetingService + { + OnResolveByPurls = (keys, usageOnly, selector, _) => + { + var image = new ImpactImage( + "sha256:111", + "registry", + "repo", + labels: ImmutableSortedDictionary.Create<string, string>(StringComparer.Ordinal).Add("sbomId", "sbom:S-42")); + var impactSet = new ImpactSet( + selector, + new[] { image }, + usageOnly, + DateTimeOffset.UtcNow, + total: 1, + snapshotId: null, + schemaVersion: SchedulerSchemaVersions.ImpactSet); + return ValueTask.FromResult(impactSet); + } + }; + + var service = CreateService(impact); + var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.purls", "pkg:npm/example"); + var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); + + var result = await service.EnsureTargetsAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunTargetingStatus.Targeted, result.Status); + Assert.Equal(new[] { "sbom:S-42" }, result.Job.Inputs.SbomSet); + } + + [Fact] + public async Task EnsureTargetsAsync_FallsBack_WhenLimitExceeded() + { + var service = CreateService(configure: options => options.MaxSboms = 1); + var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.sboms", "sbom:S-1,sbom:S-2"); + var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); + + var result = await service.EnsureTargetsAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunTargetingStatus.Unchanged, result.Status); + } + + [Fact] + public async Task EnsureTargetsAsync_FallbacksToDigest_WhenLabelMissing() + { + var impact = new StubImpactTargetingService + { + OnResolveByVulnerabilities = (ids, usageOnly, selector, _) => + { + var image = new ImpactImage("sha256:aaa", "registry", "repo"); + var impactSet = new ImpactSet( + selector, + new[] { image }, + usageOnly, + DateTimeOffset.UtcNow, + total: 1, + snapshotId: null, + schemaVersion: SchedulerSchemaVersions.ImpactSet); + return ValueTask.FromResult(impactSet); + } + }; + + var service = CreateService(impact); + var metadata = ImmutableSortedDictionary<string, string>.Empty.Add("delta.vulns", "CVE-2025-1234"); + var job = CreateJob(metadata: metadata, inputs: PolicyRunInputs.Empty); + + var result = await service.EnsureTargetsAsync(job, CancellationToken.None); + + Assert.Equal(PolicyRunTargetingStatus.Targeted, result.Status); + Assert.Equal(new[] { "sbom:sha256:aaa" }, result.Job.Inputs.SbomSet); + } + + private static PolicyRunTargetingService CreateService( + IImpactTargetingService? impact = null, + Action<SchedulerWorkerOptions.PolicyOptions.TargetingOptions>? configure = null) + { + impact ??= new StubImpactTargetingService(); + var options = CreateOptions(configure); + return new PolicyRunTargetingService( + impact, + Microsoft.Extensions.Options.Options.Create(options), + timeProvider: null, + NullLogger<PolicyRunTargetingService>.Instance); + } + + private static SchedulerWorkerOptions CreateOptions(Action<SchedulerWorkerOptions.PolicyOptions.TargetingOptions>? configure) + { + var options = new SchedulerWorkerOptions + { + Policy = + { + Api = + { + BaseAddress = new Uri("https://policy.example.com"), + RunsPath = "/runs", + SimulatePath = "/simulate" + } + } + }; + + configure?.Invoke(options.Policy.Targeting); + return options; + } + + private static PolicyRunJob CreateJob( + PolicyRunMode mode = PolicyRunMode.Incremental, + ImmutableSortedDictionary<string, string>? metadata = null, + PolicyRunInputs? inputs = null) + { + return new PolicyRunJob( + SchemaVersion: SchedulerSchemaVersions.PolicyRunJob, + Id: "job-1", + TenantId: "tenant-alpha", + PolicyId: "P-7", + PolicyVersion: 4, + Mode: mode, + Priority: PolicyRunPriority.Normal, + PriorityRank: 0, + RunId: null, + RequestedBy: null, + CorrelationId: null, + Metadata: metadata ?? ImmutableSortedDictionary<string, string>.Empty, + Inputs: inputs ?? PolicyRunInputs.Empty, + QueuedAt: DateTimeOffset.UtcNow, + Status: PolicyRunJobStatus.Dispatching, + AttemptCount: 0, + LastAttemptAt: null, + LastError: null, + CreatedAt: DateTimeOffset.UtcNow, + UpdatedAt: DateTimeOffset.UtcNow, + AvailableAt: DateTimeOffset.UtcNow, + SubmittedAt: null, + CompletedAt: null, + LeaseOwner: "lease", + LeaseExpiresAt: DateTimeOffset.UtcNow.AddMinutes(1), + CancellationRequested: false, + CancellationRequestedAt: null, + CancellationReason: null, + CancelledAt: null); + } + + private sealed class StubImpactTargetingService : IImpactTargetingService + { + public Func<IEnumerable<string>, bool, Selector, CancellationToken, ValueTask<ImpactSet>>? OnResolveByPurls { get; set; } + + public Func<IEnumerable<string>, bool, Selector, CancellationToken, ValueTask<ImpactSet>>? OnResolveByVulnerabilities { get; set; } + + public ValueTask<ImpactSet> ResolveByPurlsAsync(IEnumerable<string> productKeys, bool usageOnly, Selector selector, CancellationToken cancellationToken = default) + { + if (OnResolveByPurls is null) + { + return ValueTask.FromResult(CreateEmptyImpactSet(selector, usageOnly)); + } + + return OnResolveByPurls(productKeys, usageOnly, selector, cancellationToken); + } + + public ValueTask<ImpactSet> ResolveByVulnerabilitiesAsync(IEnumerable<string> vulnerabilityIds, bool usageOnly, Selector selector, CancellationToken cancellationToken = default) + { + if (OnResolveByVulnerabilities is null) + { + return ValueTask.FromResult(CreateEmptyImpactSet(selector, usageOnly)); + } + + return OnResolveByVulnerabilities(vulnerabilityIds, usageOnly, selector, cancellationToken); + } + + public ValueTask<ImpactSet> ResolveAllAsync(Selector selector, bool usageOnly, CancellationToken cancellationToken = default) + => ValueTask.FromResult(CreateEmptyImpactSet(selector, usageOnly)); + + private static ImpactSet CreateEmptyImpactSet(Selector selector, bool usageOnly) + { + return new ImpactSet( + selector, + ImmutableArray<ImpactImage>.Empty, + usageOnly, + DateTimeOffset.UtcNow, + total: 0, + snapshotId: null, + schemaVersion: SchedulerSchemaVersions.ImpactSet); + } + } +} diff --git a/src/StellaOps.Scheduler.Worker.Tests/RunnerExecutionServiceTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/RunnerExecutionServiceTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker.Tests/RunnerExecutionServiceTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/RunnerExecutionServiceTests.cs diff --git a/src/StellaOps.Scheduler.Worker.Tests/SchedulerEventPublisherTests.cs b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/SchedulerEventPublisherTests.cs similarity index 100% rename from src/StellaOps.Scheduler.Worker.Tests/SchedulerEventPublisherTests.cs rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/SchedulerEventPublisherTests.cs diff --git a/src/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj similarity index 57% rename from src/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj rename to src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj index a0e77a0d..43a6b078 100644 --- a/src/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj +++ b/src/Scheduler/__Tests/StellaOps.Scheduler.Worker.Tests/StellaOps.Scheduler.Worker.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -13,9 +14,9 @@ <PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="../StellaOps.Scheduler.Worker/StellaOps.Scheduler.Worker.csproj" /> - <ProjectReference Include="../StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> - <ProjectReference Include="../StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Worker/StellaOps.Scheduler.Worker.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Models/StellaOps.Notify.Models.csproj" /> + <ProjectReference Include="../../../Notify/__Libraries/StellaOps.Notify.Queue/StellaOps.Notify.Queue.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Sdk.Generator/AGENTS.md b/src/Sdk/StellaOps.Sdk.Generator/AGENTS.md similarity index 98% rename from src/StellaOps.Sdk.Generator/AGENTS.md rename to src/Sdk/StellaOps.Sdk.Generator/AGENTS.md index f608eec2..5d1b1428 100644 --- a/src/StellaOps.Sdk.Generator/AGENTS.md +++ b/src/Sdk/StellaOps.Sdk.Generator/AGENTS.md @@ -1,15 +1,15 @@ -# SDK Generator Guild Charter - -## Mission -Generate and maintain official StellaOps SDKs across supported languages using reproducible code generation pipelines. - -## Scope -- Manage code generation templates and tooling for TS, Python, Go, Java (C#/Rust follow-ons). -- Implement post-processing hooks for auth helpers, retries, paginators, error mapping, and telemetry. -- Provide language-specific smoke tests, example snippets, and continuous integration. -- Coordinate with Release Guild for publishing and version bumps. - -## Definition of Done -- SDKs regenerate deterministically from `stella.yaml` without manual edits. -- Smoke tests and integration suites run per language in CI. -- Generated code adheres to language-specific style guides and passes lint/format checks. +# SDK Generator Guild Charter + +## Mission +Generate and maintain official StellaOps SDKs across supported languages using reproducible code generation pipelines. + +## Scope +- Manage code generation templates and tooling for TS, Python, Go, Java (C#/Rust follow-ons). +- Implement post-processing hooks for auth helpers, retries, paginators, error mapping, and telemetry. +- Provide language-specific smoke tests, example snippets, and continuous integration. +- Coordinate with Release Guild for publishing and version bumps. + +## Definition of Done +- SDKs regenerate deterministically from `stella.yaml` without manual edits. +- Smoke tests and integration suites run per language in CI. +- Generated code adheres to language-specific style guides and passes lint/format checks. diff --git a/src/StellaOps.Sdk.Generator/TASKS.md b/src/Sdk/StellaOps.Sdk.Generator/TASKS.md similarity index 99% rename from src/StellaOps.Sdk.Generator/TASKS.md rename to src/Sdk/StellaOps.Sdk.Generator/TASKS.md index 379b9f3c..203f5d02 100644 --- a/src/StellaOps.Sdk.Generator/TASKS.md +++ b/src/Sdk/StellaOps.Sdk.Generator/TASKS.md @@ -1,21 +1,21 @@ -# SDK Generator Task Board — Epic 17: SDKs & OpenAPI Docs - -## Sprint 62 – Generator Framework -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SDKGEN-62-001 | TODO | SDK Generator Guild | OAS-61-002 | Choose/pin generator toolchain, set up language template pipeline, and enforce reproducible builds. | Generator outputs deterministic code for sample spec; pipelines documented; lint passes. | -| SDKGEN-62-002 | TODO | SDK Generator Guild | SDKGEN-62-001 | Implement shared post-processing (auth helpers, retries, pagination utilities, telemetry hooks) applied to all languages. | Shared library integrated; unit tests cover helpers; docs updated. | - -## Sprint 63 – Language Alpha Releases -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SDKGEN-63-001 | TODO | SDK Generator Guild | SDKGEN-62-002 | Ship TypeScript SDK alpha with ESM/CJS builds, typed errors, paginator, streaming helpers. | TS package published to internal registry; smoke tests pass; README generated. | -| SDKGEN-63-002 | TODO | SDK Generator Guild | SDKGEN-62-002 | Ship Python SDK alpha (sync/async clients, type hints, upload/download helpers). | PyPI internal feed updated; mypy/pytest suites pass; docs generated. | -| SDKGEN-63-003 | TODO | SDK Generator Guild | SDKGEN-62-002 | Ship Go SDK alpha with context-first API and streaming helpers. | Go module published; gofmt/govet pass; integration tests run. | -| SDKGEN-63-004 | TODO | SDK Generator Guild | SDKGEN-62-002 | Ship Java SDK alpha (builder pattern, HTTP client abstraction). | Maven package staged; integration tests run; javadoc generated. | - -## Sprint 64 – Harden & Dogfood -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SDKGEN-64-001 | TODO | SDK Generator Guild, CLI Guild | SDKGEN-63-001 | Switch CLI to consume TS or Go SDK; ensure parity. | CLI builds/tests using SDK; regression suite passes. | -| SDKGEN-64-002 | TODO | SDK Generator Guild, Console Guild | SDKGEN-63-001..4 | Integrate SDKs into Console data providers where feasible. | Console builds with SDK; telemetry recorded; manual QA sign-off. | +# SDK Generator Task Board — Epic 17: SDKs & OpenAPI Docs + +## Sprint 62 – Generator Framework +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SDKGEN-62-001 | TODO | SDK Generator Guild | OAS-61-002 | Choose/pin generator toolchain, set up language template pipeline, and enforce reproducible builds. | Generator outputs deterministic code for sample spec; pipelines documented; lint passes. | +| SDKGEN-62-002 | TODO | SDK Generator Guild | SDKGEN-62-001 | Implement shared post-processing (auth helpers, retries, pagination utilities, telemetry hooks) applied to all languages. | Shared library integrated; unit tests cover helpers; docs updated. | + +## Sprint 63 – Language Alpha Releases +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SDKGEN-63-001 | TODO | SDK Generator Guild | SDKGEN-62-002 | Ship TypeScript SDK alpha with ESM/CJS builds, typed errors, paginator, streaming helpers. | TS package published to internal registry; smoke tests pass; README generated. | +| SDKGEN-63-002 | TODO | SDK Generator Guild | SDKGEN-62-002 | Ship Python SDK alpha (sync/async clients, type hints, upload/download helpers). | PyPI internal feed updated; mypy/pytest suites pass; docs generated. | +| SDKGEN-63-003 | TODO | SDK Generator Guild | SDKGEN-62-002 | Ship Go SDK alpha with context-first API and streaming helpers. | Go module published; gofmt/govet pass; integration tests run. | +| SDKGEN-63-004 | TODO | SDK Generator Guild | SDKGEN-62-002 | Ship Java SDK alpha (builder pattern, HTTP client abstraction). | Maven package staged; integration tests run; javadoc generated. | + +## Sprint 64 – Harden & Dogfood +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SDKGEN-64-001 | TODO | SDK Generator Guild, CLI Guild | SDKGEN-63-001 | Switch CLI to consume TS or Go SDK; ensure parity. | CLI builds/tests using SDK; regression suite passes. | +| SDKGEN-64-002 | TODO | SDK Generator Guild, Console Guild | SDKGEN-63-001..4 | Integrate SDKs into Console data providers where feasible. | Console builds with SDK; telemetry recorded; manual QA sign-off. | diff --git a/src/StellaOps.Sdk.Release/AGENTS.md b/src/Sdk/StellaOps.Sdk.Release/AGENTS.md similarity index 98% rename from src/StellaOps.Sdk.Release/AGENTS.md rename to src/Sdk/StellaOps.Sdk.Release/AGENTS.md index 72dc4749..ab68fd5f 100644 --- a/src/StellaOps.Sdk.Release/AGENTS.md +++ b/src/Sdk/StellaOps.Sdk.Release/AGENTS.md @@ -1,15 +1,15 @@ -# SDK Release Guild Charter - -## Mission -Own packaging, signing, publishing, and changelog automation for official StellaOps SDKs and dev portal bundles. - -## Scope -- Manage language-specific release pipelines (npm, PyPI, Maven, Go modules) with provenance signing. -- Automate changelog generation and SemVer version bumps aligned with API releases. -- Coordinate publication of offline bundles for air-gapped environments. -- Operate release dashboards and notification hooks for SDK updates. - -## Definition of Done -- Every SDK release is reproducible, signed, and accompanied by changelog + provenance. -- Registries updated via automated pipeline with rollback strategy. -- Offline bundle creation integrated with Export Center workflows. +# SDK Release Guild Charter + +## Mission +Own packaging, signing, publishing, and changelog automation for official StellaOps SDKs and dev portal bundles. + +## Scope +- Manage language-specific release pipelines (npm, PyPI, Maven, Go modules) with provenance signing. +- Automate changelog generation and SemVer version bumps aligned with API releases. +- Coordinate publication of offline bundles for air-gapped environments. +- Operate release dashboards and notification hooks for SDK updates. + +## Definition of Done +- Every SDK release is reproducible, signed, and accompanied by changelog + provenance. +- Registries updated via automated pipeline with rollback strategy. +- Offline bundle creation integrated with Export Center workflows. diff --git a/src/StellaOps.Sdk.Release/TASKS.md b/src/Sdk/StellaOps.Sdk.Release/TASKS.md similarity index 99% rename from src/StellaOps.Sdk.Release/TASKS.md rename to src/Sdk/StellaOps.Sdk.Release/TASKS.md index b7dc9706..9571e2d3 100644 --- a/src/StellaOps.Sdk.Release/TASKS.md +++ b/src/Sdk/StellaOps.Sdk.Release/TASKS.md @@ -1,13 +1,13 @@ -# SDK Release Task Board — Epic 17: SDKs & OpenAPI Docs - -## Sprint 63 – Pipeline Setup -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SDKREL-63-001 | TODO | SDK Release Guild | SDKGEN-63-001..4 | Configure CI pipelines for npm, PyPI, Maven Central staging, and Go proxies with signing and provenance attestations. | Pipelines publish to staging registries; provenance artifacts stored; rollback plan documented. | -| SDKREL-63-002 | TODO | SDK Release Guild, API Governance Guild | SDKREL-63-001 | Integrate changelog automation pulling from OAS diffs and generator metadata. | Changelogs generated per release; included in packages; verification tests pass. | - -## Sprint 64 – Release Automation & Notifications -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SDKREL-64-001 | TODO | SDK Release Guild, Notifications Guild | SDKREL-63-002 | Hook SDK releases into Notifications Studio with scoped announcements and RSS/Atom feeds. | Notification templates live; staging release triggers announcement; docs updated. | -| SDKREL-64-002 | TODO | SDK Release Guild, Export Center Guild | SDKREL-63-001 | Add `devportal --offline` bundle job packaging docs, specs, SDK artifacts for air-gapped users. | Offline bundle generated and verified; Export Center docs updated. | +# SDK Release Task Board — Epic 17: SDKs & OpenAPI Docs + +## Sprint 63 – Pipeline Setup +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SDKREL-63-001 | TODO | SDK Release Guild | SDKGEN-63-001..4 | Configure CI pipelines for npm, PyPI, Maven Central staging, and Go proxies with signing and provenance attestations. | Pipelines publish to staging registries; provenance artifacts stored; rollback plan documented. | +| SDKREL-63-002 | TODO | SDK Release Guild, API Governance Guild | SDKREL-63-001 | Integrate changelog automation pulling from OAS diffs and generator metadata. | Changelogs generated per release; included in packages; verification tests pass. | + +## Sprint 64 – Release Automation & Notifications +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SDKREL-64-001 | TODO | SDK Release Guild, Notifications Guild | SDKREL-63-002 | Hook SDK releases into Notifications Studio with scoped announcements and RSS/Atom feeds. | Notification templates live; staging release triggers announcement; docs updated. | +| SDKREL-64-002 | TODO | SDK Release Guild, Export Center Guild | SDKREL-63-001 | Add `devportal --offline` bundle job packaging docs, specs, SDK artifacts for air-gapped users. | Offline bundle generated and verified; Export Center docs updated. | diff --git a/src/Signals/StellaOps.Signals.sln b/src/Signals/StellaOps.Signals.sln new file mode 100644 index 00000000..262476b9 --- /dev/null +++ b/src/Signals/StellaOps.Signals.sln @@ -0,0 +1,118 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signals", "StellaOps.Signals\StellaOps.Signals.csproj", "{DF8EEADB-1C60-46D6-B271-5742FE8F33EC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{4933EA43-D891-4080-A644-5D14F680F6F1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{4B663300-18DB-44DA-95FB-7C2B02D7BF69}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{60D01EF6-9E65-447D-86DC-B140731B5513}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Debug|x64.ActiveCfg = Debug|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Debug|x64.Build.0 = Debug|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Debug|x86.ActiveCfg = Debug|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Debug|x86.Build.0 = Debug|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Release|Any CPU.Build.0 = Release|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Release|x64.ActiveCfg = Release|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Release|x64.Build.0 = Release|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Release|x86.ActiveCfg = Release|Any CPU + {DF8EEADB-1C60-46D6-B271-5742FE8F33EC}.Release|x86.Build.0 = Release|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Debug|x64.ActiveCfg = Debug|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Debug|x64.Build.0 = Debug|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Debug|x86.ActiveCfg = Debug|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Debug|x86.Build.0 = Debug|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Release|Any CPU.Build.0 = Release|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Release|x64.ActiveCfg = Release|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Release|x64.Build.0 = Release|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Release|x86.ActiveCfg = Release|Any CPU + {4933EA43-D891-4080-A644-5D14F680F6F1}.Release|x86.Build.0 = Release|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Debug|x64.ActiveCfg = Debug|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Debug|x64.Build.0 = Debug|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Debug|x86.ActiveCfg = Debug|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Debug|x86.Build.0 = Debug|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Release|Any CPU.Build.0 = Release|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Release|x64.ActiveCfg = Release|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Release|x64.Build.0 = Release|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Release|x86.ActiveCfg = Release|Any CPU + {4B663300-18DB-44DA-95FB-7C2B02D7BF69}.Release|x86.Build.0 = Release|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Debug|x64.ActiveCfg = Debug|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Debug|x64.Build.0 = Debug|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Debug|x86.ActiveCfg = Debug|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Debug|x86.Build.0 = Debug|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Release|Any CPU.Build.0 = Release|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Release|x64.ActiveCfg = Release|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Release|x64.Build.0 = Release|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Release|x86.ActiveCfg = Release|Any CPU + {16E1FBA9-18D0-4912-A36E-691C7BAE0CF7}.Release|x86.Build.0 = Release|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Debug|Any CPU.Build.0 = Debug|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Debug|x64.ActiveCfg = Debug|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Debug|x64.Build.0 = Debug|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Debug|x86.ActiveCfg = Debug|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Debug|x86.Build.0 = Debug|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Release|Any CPU.ActiveCfg = Release|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Release|Any CPU.Build.0 = Release|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Release|x64.ActiveCfg = Release|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Release|x64.Build.0 = Release|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Release|x86.ActiveCfg = Release|Any CPU + {60D01EF6-9E65-447D-86DC-B140731B5513}.Release|x86.Build.0 = Release|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Debug|x64.ActiveCfg = Debug|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Debug|x64.Build.0 = Debug|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Debug|x86.ActiveCfg = Debug|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Debug|x86.Build.0 = Debug|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Release|Any CPU.Build.0 = Release|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Release|x64.ActiveCfg = Release|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Release|x64.Build.0 = Release|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Release|x86.ActiveCfg = Release|Any CPU + {2283F9AD-83C5-473E-BE71-FAD3A98FB0FE}.Release|x86.Build.0 = Release|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Debug|x64.ActiveCfg = Debug|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Debug|x64.Build.0 = Debug|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Debug|x86.ActiveCfg = Debug|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Debug|x86.Build.0 = Debug|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Release|Any CPU.Build.0 = Release|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Release|x64.ActiveCfg = Release|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Release|x64.Build.0 = Release|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Release|x86.ActiveCfg = Release|Any CPU + {F7541F2C-CA8E-4D8E-A5DF-06E2E8F87F42}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Signals/AGENTS.md b/src/Signals/StellaOps.Signals/AGENTS.md similarity index 89% rename from src/StellaOps.Signals/AGENTS.md rename to src/Signals/StellaOps.Signals/AGENTS.md index 1f2f67dc..a6046f03 100644 --- a/src/StellaOps.Signals/AGENTS.md +++ b/src/Signals/StellaOps.Signals/AGENTS.md @@ -1,11 +1,11 @@ -# StellaOps.Signals — Agent Charter - -## Mission -Provide language-agnostic collection, normalization, and scoring of reachability and exploitability signals for Stella Ops. Accept static artifacts (call graphs, symbol references) and runtime context facts, derive normalized reachability states/scores, and expose them to Policy Engine, Web API, and Console without mutating advisory evidence. - -## Expectations -- Maintain deterministic scoring with full provenance (AOC chains). -- Support incremental ingestion (per asset + snapshot) and expose caches for fast policy evaluation. -- Coordinate with SBOM/Policy/Console guilds on schema changes and UI expectations. -- Implement guardrails for large artifacts, authentication, and privacy (no PII). -- Update `TASKS.md`, `SPRINTS.md` as work progresses. +# StellaOps.Signals — Agent Charter + +## Mission +Provide language-agnostic collection, normalization, and scoring of reachability and exploitability signals for Stella Ops. Accept static artifacts (call graphs, symbol references) and runtime context facts, derive normalized reachability states/scores, and expose them to Policy Engine, Web API, and Console without mutating advisory evidence. + +## Expectations +- Maintain deterministic scoring with full provenance (AOC chains). +- Support incremental ingestion (per asset + snapshot) and expose caches for fast policy evaluation. +- Coordinate with SBOM/Policy/Console guilds on schema changes and UI expectations. +- Implement guardrails for large artifacts, authentication, and privacy (no PII). +- Update `TASKS.md`, `../../docs/implplan/SPRINTS.md` as work progresses. diff --git a/src/StellaOps.Signals/Authentication/AnonymousAuthenticationHandler.cs b/src/Signals/StellaOps.Signals/Authentication/AnonymousAuthenticationHandler.cs similarity index 97% rename from src/StellaOps.Signals/Authentication/AnonymousAuthenticationHandler.cs rename to src/Signals/StellaOps.Signals/Authentication/AnonymousAuthenticationHandler.cs index efca2e7b..07879df3 100644 --- a/src/StellaOps.Signals/Authentication/AnonymousAuthenticationHandler.cs +++ b/src/Signals/StellaOps.Signals/Authentication/AnonymousAuthenticationHandler.cs @@ -1,29 +1,29 @@ -using System.Security.Claims; -using System.Text.Encodings.Web; -using Microsoft.AspNetCore.Authentication; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; - -namespace StellaOps.Signals.Authentication; - -/// <summary> -/// Authentication handler used during development fallback. -/// </summary> -internal sealed class AnonymousAuthenticationHandler : AuthenticationHandler<AuthenticationSchemeOptions> -{ - public AnonymousAuthenticationHandler( - IOptionsMonitor<AuthenticationSchemeOptions> options, - ILoggerFactory logger, - UrlEncoder encoder) - : base(options, logger, encoder) - { - } - - protected override Task<AuthenticateResult> HandleAuthenticateAsync() - { - var identity = new ClaimsIdentity(); - var principal = new ClaimsPrincipal(identity); - var ticket = new AuthenticationTicket(principal, Scheme.Name); - return Task.FromResult(AuthenticateResult.Success(ticket)); - } -} +using System.Security.Claims; +using System.Text.Encodings.Web; +using Microsoft.AspNetCore.Authentication; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; + +namespace StellaOps.Signals.Authentication; + +/// <summary> +/// Authentication handler used during development fallback. +/// </summary> +internal sealed class AnonymousAuthenticationHandler : AuthenticationHandler<AuthenticationSchemeOptions> +{ + public AnonymousAuthenticationHandler( + IOptionsMonitor<AuthenticationSchemeOptions> options, + ILoggerFactory logger, + UrlEncoder encoder) + : base(options, logger, encoder) + { + } + + protected override Task<AuthenticateResult> HandleAuthenticateAsync() + { + var identity = new ClaimsIdentity(); + var principal = new ClaimsPrincipal(identity); + var ticket = new AuthenticationTicket(principal, Scheme.Name); + return Task.FromResult(AuthenticateResult.Success(ticket)); + } +} diff --git a/src/StellaOps.Signals/Authentication/HeaderScopeAuthorizer.cs b/src/Signals/StellaOps.Signals/Authentication/HeaderScopeAuthorizer.cs similarity index 96% rename from src/StellaOps.Signals/Authentication/HeaderScopeAuthorizer.cs rename to src/Signals/StellaOps.Signals/Authentication/HeaderScopeAuthorizer.cs index 77a71d89..3b83adfb 100644 --- a/src/StellaOps.Signals/Authentication/HeaderScopeAuthorizer.cs +++ b/src/Signals/StellaOps.Signals/Authentication/HeaderScopeAuthorizer.cs @@ -1,61 +1,61 @@ -using System.Security.Claims; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Signals.Authentication; - -/// <summary> -/// Header-based scope authorizer for development environments. -/// </summary> -internal static class HeaderScopeAuthorizer -{ - internal static bool HasScope(ClaimsPrincipal principal, string requiredScope) - { - if (principal is null || string.IsNullOrWhiteSpace(requiredScope)) - { - return false; - } - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - var scopes = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - foreach (var scope in scopes) - { - if (string.Equals(scope, requiredScope, StringComparison.OrdinalIgnoreCase)) - { - return true; - } - } - } - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) - { - if (string.Equals(claim.Value, requiredScope, StringComparison.OrdinalIgnoreCase)) - { - return true; - } - } - - return false; - } - - internal static ClaimsPrincipal CreatePrincipal(string scopeBuffer) - { - var claims = new List<Claim> - { - new(StellaOpsClaimTypes.Scope, scopeBuffer) - }; - - foreach (var value in scopeBuffer.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) - { - claims.Add(new Claim(StellaOpsClaimTypes.ScopeItem, value)); - } - - var identity = new ClaimsIdentity(claims, authenticationType: "Header"); - return new ClaimsPrincipal(identity); - } -} +using System.Security.Claims; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Signals.Authentication; + +/// <summary> +/// Header-based scope authorizer for development environments. +/// </summary> +internal static class HeaderScopeAuthorizer +{ + internal static bool HasScope(ClaimsPrincipal principal, string requiredScope) + { + if (principal is null || string.IsNullOrWhiteSpace(requiredScope)) + { + return false; + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var scopes = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + foreach (var scope in scopes) + { + if (string.Equals(scope, requiredScope, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + if (string.Equals(claim.Value, requiredScope, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + return false; + } + + internal static ClaimsPrincipal CreatePrincipal(string scopeBuffer) + { + var claims = new List<Claim> + { + new(StellaOpsClaimTypes.Scope, scopeBuffer) + }; + + foreach (var value in scopeBuffer.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)) + { + claims.Add(new Claim(StellaOpsClaimTypes.ScopeItem, value)); + } + + var identity = new ClaimsIdentity(claims, authenticationType: "Header"); + return new ClaimsPrincipal(identity); + } +} diff --git a/src/StellaOps.Signals/Authentication/TokenScopeAuthorizer.cs b/src/Signals/StellaOps.Signals/Authentication/TokenScopeAuthorizer.cs similarity index 96% rename from src/StellaOps.Signals/Authentication/TokenScopeAuthorizer.cs rename to src/Signals/StellaOps.Signals/Authentication/TokenScopeAuthorizer.cs index 794ad024..62ef30db 100644 --- a/src/StellaOps.Signals/Authentication/TokenScopeAuthorizer.cs +++ b/src/Signals/StellaOps.Signals/Authentication/TokenScopeAuthorizer.cs @@ -1,41 +1,41 @@ -using System.Security.Claims; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Signals.Authentication; - -/// <summary> -/// Helpers for evaluating token scopes. -/// </summary> -internal static class TokenScopeAuthorizer -{ - internal static bool HasScope(ClaimsPrincipal principal, string requiredScope) - { - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) - { - if (string.Equals(claim.Value, requiredScope, StringComparison.OrdinalIgnoreCase)) - { - return true; - } - } - - foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) - { - if (string.IsNullOrWhiteSpace(claim.Value)) - { - continue; - } - - var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - foreach (var part in parts) - { - var normalized = StellaOpsScopes.Normalize(part); - if (normalized is not null && string.Equals(normalized, requiredScope, StringComparison.Ordinal)) - { - return true; - } - } - } - - return false; - } -} +using System.Security.Claims; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Signals.Authentication; + +/// <summary> +/// Helpers for evaluating token scopes. +/// </summary> +internal static class TokenScopeAuthorizer +{ + internal static bool HasScope(ClaimsPrincipal principal, string requiredScope) + { + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.ScopeItem)) + { + if (string.Equals(claim.Value, requiredScope, StringComparison.OrdinalIgnoreCase)) + { + return true; + } + } + + foreach (var claim in principal.FindAll(StellaOpsClaimTypes.Scope)) + { + if (string.IsNullOrWhiteSpace(claim.Value)) + { + continue; + } + + var parts = claim.Value.Split(' ', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + foreach (var part in parts) + { + var normalized = StellaOpsScopes.Normalize(part); + if (normalized is not null && string.Equals(normalized, requiredScope, StringComparison.Ordinal)) + { + return true; + } + } + } + + return false; + } +} diff --git a/src/StellaOps.Signals/Hosting/SignalsStartupState.cs b/src/Signals/StellaOps.Signals/Hosting/SignalsStartupState.cs similarity index 96% rename from src/StellaOps.Signals/Hosting/SignalsStartupState.cs rename to src/Signals/StellaOps.Signals/Hosting/SignalsStartupState.cs index bdafeb6b..64c53ce6 100644 --- a/src/StellaOps.Signals/Hosting/SignalsStartupState.cs +++ b/src/Signals/StellaOps.Signals/Hosting/SignalsStartupState.cs @@ -1,12 +1,12 @@ -namespace StellaOps.Signals.Hosting; - -/// <summary> -/// Tracks Signals service readiness state. -/// </summary> -public sealed class SignalsStartupState -{ - /// <summary> - /// Indicates whether the service is ready to accept requests. - /// </summary> - public bool IsReady { get; set; } = true; -} +namespace StellaOps.Signals.Hosting; + +/// <summary> +/// Tracks Signals service readiness state. +/// </summary> +public sealed class SignalsStartupState +{ + /// <summary> + /// Indicates whether the service is ready to accept requests. + /// </summary> + public bool IsReady { get; set; } = true; +} diff --git a/src/StellaOps.Signals/Models/CallgraphArtifactMetadata.cs b/src/Signals/StellaOps.Signals/Models/CallgraphArtifactMetadata.cs similarity index 96% rename from src/StellaOps.Signals/Models/CallgraphArtifactMetadata.cs rename to src/Signals/StellaOps.Signals/Models/CallgraphArtifactMetadata.cs index d37fe158..d93fb1af 100644 --- a/src/StellaOps.Signals/Models/CallgraphArtifactMetadata.cs +++ b/src/Signals/StellaOps.Signals/Models/CallgraphArtifactMetadata.cs @@ -1,21 +1,21 @@ -using MongoDB.Bson.Serialization.Attributes; - -namespace StellaOps.Signals.Models; - -/// <summary> -/// Metadata describing the stored raw callgraph artifact. -/// </summary> -public sealed class CallgraphArtifactMetadata -{ - [BsonElement("path")] - public string Path { get; set; } = string.Empty; - - [BsonElement("hash")] - public string Hash { get; set; } = string.Empty; - - [BsonElement("contentType")] - public string ContentType { get; set; } = string.Empty; - - [BsonElement("length")] - public long Length { get; set; } -} +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Signals.Models; + +/// <summary> +/// Metadata describing the stored raw callgraph artifact. +/// </summary> +public sealed class CallgraphArtifactMetadata +{ + [BsonElement("path")] + public string Path { get; set; } = string.Empty; + + [BsonElement("hash")] + public string Hash { get; set; } = string.Empty; + + [BsonElement("contentType")] + public string ContentType { get; set; } = string.Empty; + + [BsonElement("length")] + public long Length { get; set; } +} diff --git a/src/StellaOps.Signals/Models/CallgraphDocument.cs b/src/Signals/StellaOps.Signals/Models/CallgraphDocument.cs similarity index 96% rename from src/StellaOps.Signals/Models/CallgraphDocument.cs rename to src/Signals/StellaOps.Signals/Models/CallgraphDocument.cs index 6aee6813..22a296fd 100644 --- a/src/StellaOps.Signals/Models/CallgraphDocument.cs +++ b/src/Signals/StellaOps.Signals/Models/CallgraphDocument.cs @@ -1,41 +1,41 @@ -using System; -using System.Collections.Generic; -using MongoDB.Bson; -using MongoDB.Bson.Serialization.Attributes; - -namespace StellaOps.Signals.Models; - -/// <summary> -/// MongoDB document representing an ingested callgraph. -/// </summary> -public sealed class CallgraphDocument -{ - [BsonId] - [BsonRepresentation(BsonType.ObjectId)] - public string Id { get; set; } = ObjectId.GenerateNewId().ToString(); - - [BsonElement("language")] - public string Language { get; set; } = string.Empty; - - [BsonElement("component")] - public string Component { get; set; } = string.Empty; - - [BsonElement("version")] - public string Version { get; set; } = string.Empty; - - [BsonElement("ingestedAt")] - public DateTimeOffset IngestedAt { get; set; } - - [BsonElement("artifact")] - public CallgraphArtifactMetadata Artifact { get; set; } = new(); - - [BsonElement("nodes")] - public List<CallgraphNode> Nodes { get; set; } = new(); - - [BsonElement("edges")] - public List<CallgraphEdge> Edges { get; set; } = new(); - - [BsonElement("metadata")] - [BsonIgnoreIfNull] - public Dictionary<string, string?>? Metadata { get; set; } -} +using System; +using System.Collections.Generic; +using MongoDB.Bson; +using MongoDB.Bson.Serialization.Attributes; + +namespace StellaOps.Signals.Models; + +/// <summary> +/// MongoDB document representing an ingested callgraph. +/// </summary> +public sealed class CallgraphDocument +{ + [BsonId] + [BsonRepresentation(BsonType.ObjectId)] + public string Id { get; set; } = ObjectId.GenerateNewId().ToString(); + + [BsonElement("language")] + public string Language { get; set; } = string.Empty; + + [BsonElement("component")] + public string Component { get; set; } = string.Empty; + + [BsonElement("version")] + public string Version { get; set; } = string.Empty; + + [BsonElement("ingestedAt")] + public DateTimeOffset IngestedAt { get; set; } + + [BsonElement("artifact")] + public CallgraphArtifactMetadata Artifact { get; set; } = new(); + + [BsonElement("nodes")] + public List<CallgraphNode> Nodes { get; set; } = new(); + + [BsonElement("edges")] + public List<CallgraphEdge> Edges { get; set; } = new(); + + [BsonElement("metadata")] + [BsonIgnoreIfNull] + public Dictionary<string, string?>? Metadata { get; set; } +} diff --git a/src/StellaOps.Signals/Models/CallgraphEdge.cs b/src/Signals/StellaOps.Signals/Models/CallgraphEdge.cs similarity index 95% rename from src/StellaOps.Signals/Models/CallgraphEdge.cs rename to src/Signals/StellaOps.Signals/Models/CallgraphEdge.cs index f237ebba..50c411d0 100644 --- a/src/StellaOps.Signals/Models/CallgraphEdge.cs +++ b/src/Signals/StellaOps.Signals/Models/CallgraphEdge.cs @@ -1,9 +1,9 @@ -namespace StellaOps.Signals.Models; - -/// <summary> -/// Normalized callgraph edge. -/// </summary> -public sealed record CallgraphEdge( - string SourceId, - string TargetId, - string Type); +namespace StellaOps.Signals.Models; + +/// <summary> +/// Normalized callgraph edge. +/// </summary> +public sealed record CallgraphEdge( + string SourceId, + string TargetId, + string Type); diff --git a/src/StellaOps.Signals/Models/CallgraphIngestRequest.cs b/src/Signals/StellaOps.Signals/Models/CallgraphIngestRequest.cs similarity index 97% rename from src/StellaOps.Signals/Models/CallgraphIngestRequest.cs rename to src/Signals/StellaOps.Signals/Models/CallgraphIngestRequest.cs index 13b7ce7c..95f4fa34 100644 --- a/src/StellaOps.Signals/Models/CallgraphIngestRequest.cs +++ b/src/Signals/StellaOps.Signals/Models/CallgraphIngestRequest.cs @@ -1,16 +1,16 @@ -using System.Collections.Generic; -using System.ComponentModel.DataAnnotations; - -namespace StellaOps.Signals.Models; - -/// <summary> -/// API request payload for callgraph ingestion. -/// </summary> -public sealed record CallgraphIngestRequest( - [property: Required] string Language, - [property: Required] string Component, - [property: Required] string Version, - [property: Required] string ArtifactContentType, - [property: Required] string ArtifactFileName, - [property: Required] string ArtifactContentBase64, - IReadOnlyDictionary<string, string?>? Metadata); +using System.Collections.Generic; +using System.ComponentModel.DataAnnotations; + +namespace StellaOps.Signals.Models; + +/// <summary> +/// API request payload for callgraph ingestion. +/// </summary> +public sealed record CallgraphIngestRequest( + [property: Required] string Language, + [property: Required] string Component, + [property: Required] string Version, + [property: Required] string ArtifactContentType, + [property: Required] string ArtifactFileName, + [property: Required] string ArtifactContentBase64, + IReadOnlyDictionary<string, string?>? Metadata); diff --git a/src/StellaOps.Signals/Models/CallgraphIngestResponse.cs b/src/Signals/StellaOps.Signals/Models/CallgraphIngestResponse.cs similarity index 96% rename from src/StellaOps.Signals/Models/CallgraphIngestResponse.cs rename to src/Signals/StellaOps.Signals/Models/CallgraphIngestResponse.cs index 7846100e..d8037920 100644 --- a/src/StellaOps.Signals/Models/CallgraphIngestResponse.cs +++ b/src/Signals/StellaOps.Signals/Models/CallgraphIngestResponse.cs @@ -1,9 +1,9 @@ -namespace StellaOps.Signals.Models; - -/// <summary> -/// Response returned after callgraph ingestion. -/// </summary> -public sealed record CallgraphIngestResponse( - string CallgraphId, - string ArtifactPath, - string ArtifactHash); +namespace StellaOps.Signals.Models; + +/// <summary> +/// Response returned after callgraph ingestion. +/// </summary> +public sealed record CallgraphIngestResponse( + string CallgraphId, + string ArtifactPath, + string ArtifactHash); diff --git a/src/StellaOps.Signals/Models/CallgraphNode.cs b/src/Signals/StellaOps.Signals/Models/CallgraphNode.cs similarity index 95% rename from src/StellaOps.Signals/Models/CallgraphNode.cs rename to src/Signals/StellaOps.Signals/Models/CallgraphNode.cs index 5f15331e..5de05fdb 100644 --- a/src/StellaOps.Signals/Models/CallgraphNode.cs +++ b/src/Signals/StellaOps.Signals/Models/CallgraphNode.cs @@ -1,12 +1,12 @@ -namespace StellaOps.Signals.Models; - -/// <summary> -/// Normalized callgraph node. -/// </summary> -public sealed record CallgraphNode( - string Id, - string Name, - string Kind, - string? Namespace, - string? File, - int? Line); +namespace StellaOps.Signals.Models; + +/// <summary> +/// Normalized callgraph node. +/// </summary> +public sealed record CallgraphNode( + string Id, + string Name, + string Kind, + string? Namespace, + string? File, + int? Line); diff --git a/src/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs b/src/Signals/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs similarity index 96% rename from src/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs rename to src/Signals/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs index 8d3634f8..d27bc009 100644 --- a/src/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs +++ b/src/Signals/StellaOps.Signals/Options/SignalsArtifactStorageOptions.cs @@ -1,26 +1,26 @@ -using System; -using System.IO; - -namespace StellaOps.Signals.Options; - -/// <summary> -/// Artifact storage configuration for Signals callgraph ingestion. -/// </summary> -public sealed class SignalsArtifactStorageOptions -{ - /// <summary> - /// Root directory used to persist raw callgraph artifacts. - /// </summary> - public string RootPath { get; set; } = Path.Combine(AppContext.BaseDirectory, "callgraph-artifacts"); - - /// <summary> - /// Validates the configured values. - /// </summary> - public void Validate() - { - if (string.IsNullOrWhiteSpace(RootPath)) - { - throw new InvalidOperationException("Signals artifact storage path must be configured."); - } - } -} +using System; +using System.IO; + +namespace StellaOps.Signals.Options; + +/// <summary> +/// Artifact storage configuration for Signals callgraph ingestion. +/// </summary> +public sealed class SignalsArtifactStorageOptions +{ + /// <summary> + /// Root directory used to persist raw callgraph artifacts. + /// </summary> + public string RootPath { get; set; } = Path.Combine(AppContext.BaseDirectory, "callgraph-artifacts"); + + /// <summary> + /// Validates the configured values. + /// </summary> + public void Validate() + { + if (string.IsNullOrWhiteSpace(RootPath)) + { + throw new InvalidOperationException("Signals artifact storage path must be configured."); + } + } +} diff --git a/src/StellaOps.Signals/Options/SignalsAuthorityOptions.cs b/src/Signals/StellaOps.Signals/Options/SignalsAuthorityOptions.cs similarity index 96% rename from src/StellaOps.Signals/Options/SignalsAuthorityOptions.cs rename to src/Signals/StellaOps.Signals/Options/SignalsAuthorityOptions.cs index d3f78377..81b1abae 100644 --- a/src/StellaOps.Signals/Options/SignalsAuthorityOptions.cs +++ b/src/Signals/StellaOps.Signals/Options/SignalsAuthorityOptions.cs @@ -1,101 +1,101 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Signals.Options; - -/// <summary> -/// Authority configuration for the Signals service. -/// </summary> -public sealed class SignalsAuthorityOptions -{ - /// <summary> - /// Enables Authority-backed authentication. - /// </summary> - public bool Enabled { get; set; } - - /// <summary> - /// Allows header-based development fallback when Authority is disabled. - /// </summary> - public bool AllowAnonymousFallback { get; set; } = true; - - /// <summary> - /// Authority issuer URL. - /// </summary> - public string Issuer { get; set; } = string.Empty; - - /// <summary> - /// Indicates whether HTTPS metadata is required. - /// </summary> - public bool RequireHttpsMetadata { get; set; } = true; - - /// <summary> - /// Optional metadata address override. - /// </summary> - public string? MetadataAddress { get; set; } - - /// <summary> - /// Back-channel timeout (seconds). - /// </summary> - public int BackchannelTimeoutSeconds { get; set; } = 30; - - /// <summary> - /// Token clock skew allowance (seconds). - /// </summary> - public int TokenClockSkewSeconds { get; set; } = 60; - - /// <summary> - /// Accepted token audiences. - /// </summary> - public IList<string> Audiences { get; } = new List<string>(); - - /// <summary> - /// Required scopes. - /// </summary> - public IList<string> RequiredScopes { get; } = new List<string>(); - - /// <summary> - /// Required tenants. - /// </summary> - public IList<string> RequiredTenants { get; } = new List<string>(); - - /// <summary> - /// Networks allowed to bypass scope enforcement. - /// </summary> - public IList<string> BypassNetworks { get; } = new List<string>(); - - /// <summary> - /// Validates the configured options. - /// </summary> - public void Validate() - { - if (!Enabled) - { - return; - } - - if (string.IsNullOrWhiteSpace(Issuer)) - { - throw new InvalidOperationException("Signals Authority issuer must be configured when Authority integration is enabled."); - } - - if (!Uri.TryCreate(Issuer.Trim(), UriKind.Absolute, out var issuerUri)) - { - throw new InvalidOperationException("Signals Authority issuer must be an absolute URI."); - } - - if (RequireHttpsMetadata && !issuerUri.IsLoopback && !string.Equals(issuerUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Signals Authority issuer must use HTTPS unless running on loopback."); - } - - if (BackchannelTimeoutSeconds <= 0) - { - throw new InvalidOperationException("Signals Authority back-channel timeout must be greater than zero seconds."); - } - - if (TokenClockSkewSeconds < 0 || TokenClockSkewSeconds > 300) - { - throw new InvalidOperationException("Signals Authority token clock skew must be between 0 and 300 seconds."); - } - } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Signals.Options; + +/// <summary> +/// Authority configuration for the Signals service. +/// </summary> +public sealed class SignalsAuthorityOptions +{ + /// <summary> + /// Enables Authority-backed authentication. + /// </summary> + public bool Enabled { get; set; } + + /// <summary> + /// Allows header-based development fallback when Authority is disabled. + /// </summary> + public bool AllowAnonymousFallback { get; set; } = true; + + /// <summary> + /// Authority issuer URL. + /// </summary> + public string Issuer { get; set; } = string.Empty; + + /// <summary> + /// Indicates whether HTTPS metadata is required. + /// </summary> + public bool RequireHttpsMetadata { get; set; } = true; + + /// <summary> + /// Optional metadata address override. + /// </summary> + public string? MetadataAddress { get; set; } + + /// <summary> + /// Back-channel timeout (seconds). + /// </summary> + public int BackchannelTimeoutSeconds { get; set; } = 30; + + /// <summary> + /// Token clock skew allowance (seconds). + /// </summary> + public int TokenClockSkewSeconds { get; set; } = 60; + + /// <summary> + /// Accepted token audiences. + /// </summary> + public IList<string> Audiences { get; } = new List<string>(); + + /// <summary> + /// Required scopes. + /// </summary> + public IList<string> RequiredScopes { get; } = new List<string>(); + + /// <summary> + /// Required tenants. + /// </summary> + public IList<string> RequiredTenants { get; } = new List<string>(); + + /// <summary> + /// Networks allowed to bypass scope enforcement. + /// </summary> + public IList<string> BypassNetworks { get; } = new List<string>(); + + /// <summary> + /// Validates the configured options. + /// </summary> + public void Validate() + { + if (!Enabled) + { + return; + } + + if (string.IsNullOrWhiteSpace(Issuer)) + { + throw new InvalidOperationException("Signals Authority issuer must be configured when Authority integration is enabled."); + } + + if (!Uri.TryCreate(Issuer.Trim(), UriKind.Absolute, out var issuerUri)) + { + throw new InvalidOperationException("Signals Authority issuer must be an absolute URI."); + } + + if (RequireHttpsMetadata && !issuerUri.IsLoopback && !string.Equals(issuerUri.Scheme, Uri.UriSchemeHttps, StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Signals Authority issuer must use HTTPS unless running on loopback."); + } + + if (BackchannelTimeoutSeconds <= 0) + { + throw new InvalidOperationException("Signals Authority back-channel timeout must be greater than zero seconds."); + } + + if (TokenClockSkewSeconds < 0 || TokenClockSkewSeconds > 300) + { + throw new InvalidOperationException("Signals Authority token clock skew must be between 0 and 300 seconds."); + } + } +} diff --git a/src/StellaOps.Signals/Options/SignalsAuthorityOptionsConfigurator.cs b/src/Signals/StellaOps.Signals/Options/SignalsAuthorityOptionsConfigurator.cs similarity index 96% rename from src/StellaOps.Signals/Options/SignalsAuthorityOptionsConfigurator.cs rename to src/Signals/StellaOps.Signals/Options/SignalsAuthorityOptionsConfigurator.cs index a0c3c321..d268b826 100644 --- a/src/StellaOps.Signals/Options/SignalsAuthorityOptionsConfigurator.cs +++ b/src/Signals/StellaOps.Signals/Options/SignalsAuthorityOptionsConfigurator.cs @@ -1,38 +1,38 @@ -using System; -using System.Linq; -using StellaOps.Signals.Routing; - -namespace StellaOps.Signals.Options; - -/// <summary> -/// Applies Signals-specific defaults to <see cref="SignalsAuthorityOptions"/>. -/// </summary> -internal static class SignalsAuthorityOptionsConfigurator -{ - /// <summary> - /// Ensures required defaults are populated. - /// </summary> - public static void ApplyDefaults(SignalsAuthorityOptions options) - { - ArgumentNullException.ThrowIfNull(options); - - if (!options.Audiences.Any()) - { - options.Audiences.Add("api://signals"); - } - - EnsureScope(options, SignalsPolicies.Read); - EnsureScope(options, SignalsPolicies.Write); - EnsureScope(options, SignalsPolicies.Admin); - } - - private static void EnsureScope(SignalsAuthorityOptions options, string scope) - { - if (options.RequiredScopes.Any(existing => string.Equals(existing, scope, StringComparison.OrdinalIgnoreCase))) - { - return; - } - - options.RequiredScopes.Add(scope); - } -} +using System; +using System.Linq; +using StellaOps.Signals.Routing; + +namespace StellaOps.Signals.Options; + +/// <summary> +/// Applies Signals-specific defaults to <see cref="SignalsAuthorityOptions"/>. +/// </summary> +internal static class SignalsAuthorityOptionsConfigurator +{ + /// <summary> + /// Ensures required defaults are populated. + /// </summary> + public static void ApplyDefaults(SignalsAuthorityOptions options) + { + ArgumentNullException.ThrowIfNull(options); + + if (!options.Audiences.Any()) + { + options.Audiences.Add("api://signals"); + } + + EnsureScope(options, SignalsPolicies.Read); + EnsureScope(options, SignalsPolicies.Write); + EnsureScope(options, SignalsPolicies.Admin); + } + + private static void EnsureScope(SignalsAuthorityOptions options, string scope) + { + if (options.RequiredScopes.Any(existing => string.Equals(existing, scope, StringComparison.OrdinalIgnoreCase))) + { + return; + } + + options.RequiredScopes.Add(scope); + } +} diff --git a/src/StellaOps.Signals/Options/SignalsMongoOptions.cs b/src/Signals/StellaOps.Signals/Options/SignalsMongoOptions.cs similarity index 96% rename from src/StellaOps.Signals/Options/SignalsMongoOptions.cs rename to src/Signals/StellaOps.Signals/Options/SignalsMongoOptions.cs index 120d2478..90461c02 100644 --- a/src/StellaOps.Signals/Options/SignalsMongoOptions.cs +++ b/src/Signals/StellaOps.Signals/Options/SignalsMongoOptions.cs @@ -1,45 +1,45 @@ -using System; - -namespace StellaOps.Signals.Options; - -/// <summary> -/// MongoDB configuration for Signals. -/// </summary> -public sealed class SignalsMongoOptions -{ - /// <summary> - /// MongoDB connection string. - /// </summary> - public string ConnectionString { get; set; } = string.Empty; - - /// <summary> - /// Database name to use when the connection string omits one. - /// </summary> - public string Database { get; set; } = "signals"; - - /// <summary> - /// Collection name storing normalized callgraphs. - /// </summary> - public string CallgraphsCollection { get; set; } = "callgraphs"; - - /// <summary> - /// Validates the configured values. - /// </summary> - public void Validate() - { - if (string.IsNullOrWhiteSpace(ConnectionString)) - { - throw new InvalidOperationException("Signals Mongo connection string must be configured."); - } - - if (string.IsNullOrWhiteSpace(Database)) - { - throw new InvalidOperationException("Signals Mongo database name must be configured."); - } - - if (string.IsNullOrWhiteSpace(CallgraphsCollection)) - { - throw new InvalidOperationException("Signals callgraph collection name must be configured."); - } - } -} +using System; + +namespace StellaOps.Signals.Options; + +/// <summary> +/// MongoDB configuration for Signals. +/// </summary> +public sealed class SignalsMongoOptions +{ + /// <summary> + /// MongoDB connection string. + /// </summary> + public string ConnectionString { get; set; } = string.Empty; + + /// <summary> + /// Database name to use when the connection string omits one. + /// </summary> + public string Database { get; set; } = "signals"; + + /// <summary> + /// Collection name storing normalized callgraphs. + /// </summary> + public string CallgraphsCollection { get; set; } = "callgraphs"; + + /// <summary> + /// Validates the configured values. + /// </summary> + public void Validate() + { + if (string.IsNullOrWhiteSpace(ConnectionString)) + { + throw new InvalidOperationException("Signals Mongo connection string must be configured."); + } + + if (string.IsNullOrWhiteSpace(Database)) + { + throw new InvalidOperationException("Signals Mongo database name must be configured."); + } + + if (string.IsNullOrWhiteSpace(CallgraphsCollection)) + { + throw new InvalidOperationException("Signals callgraph collection name must be configured."); + } + } +} diff --git a/src/StellaOps.Signals/Options/SignalsOptions.cs b/src/Signals/StellaOps.Signals/Options/SignalsOptions.cs similarity index 95% rename from src/StellaOps.Signals/Options/SignalsOptions.cs rename to src/Signals/StellaOps.Signals/Options/SignalsOptions.cs index be16dab1..34a9574e 100644 --- a/src/StellaOps.Signals/Options/SignalsOptions.cs +++ b/src/Signals/StellaOps.Signals/Options/SignalsOptions.cs @@ -1,37 +1,37 @@ -namespace StellaOps.Signals.Options; - -/// <summary> -/// Root configuration for the Signals service. -/// </summary> -public sealed class SignalsOptions -{ - /// <summary> - /// Configuration section name. - /// </summary> - public const string SectionName = "Signals"; - - /// <summary> - /// Authority integration settings. - /// </summary> - public SignalsAuthorityOptions Authority { get; } = new(); - - /// <summary> - /// MongoDB configuration. - /// </summary> - public SignalsMongoOptions Mongo { get; } = new(); - - /// <summary> - /// Artifact storage configuration. - /// </summary> - public SignalsArtifactStorageOptions Storage { get; } = new(); - - /// <summary> - /// Validates configured options. - /// </summary> - public void Validate() - { - Authority.Validate(); - Mongo.Validate(); - Storage.Validate(); - } -} +namespace StellaOps.Signals.Options; + +/// <summary> +/// Root configuration for the Signals service. +/// </summary> +public sealed class SignalsOptions +{ + /// <summary> + /// Configuration section name. + /// </summary> + public const string SectionName = "Signals"; + + /// <summary> + /// Authority integration settings. + /// </summary> + public SignalsAuthorityOptions Authority { get; } = new(); + + /// <summary> + /// MongoDB configuration. + /// </summary> + public SignalsMongoOptions Mongo { get; } = new(); + + /// <summary> + /// Artifact storage configuration. + /// </summary> + public SignalsArtifactStorageOptions Storage { get; } = new(); + + /// <summary> + /// Validates configured options. + /// </summary> + public void Validate() + { + Authority.Validate(); + Mongo.Validate(); + Storage.Validate(); + } +} diff --git a/src/StellaOps.Signals/Parsing/CallgraphParseResult.cs b/src/Signals/StellaOps.Signals/Parsing/CallgraphParseResult.cs similarity index 96% rename from src/StellaOps.Signals/Parsing/CallgraphParseResult.cs rename to src/Signals/StellaOps.Signals/Parsing/CallgraphParseResult.cs index c12aec94..4ffeaa09 100644 --- a/src/StellaOps.Signals/Parsing/CallgraphParseResult.cs +++ b/src/Signals/StellaOps.Signals/Parsing/CallgraphParseResult.cs @@ -1,12 +1,12 @@ -using System.Collections.Generic; -using StellaOps.Signals.Models; - -namespace StellaOps.Signals.Parsing; - -/// <summary> -/// Result produced by a callgraph parser. -/// </summary> -public sealed record CallgraphParseResult( - IReadOnlyList<CallgraphNode> Nodes, - IReadOnlyList<CallgraphEdge> Edges, - string FormatVersion); +using System.Collections.Generic; +using StellaOps.Signals.Models; + +namespace StellaOps.Signals.Parsing; + +/// <summary> +/// Result produced by a callgraph parser. +/// </summary> +public sealed record CallgraphParseResult( + IReadOnlyList<CallgraphNode> Nodes, + IReadOnlyList<CallgraphEdge> Edges, + string FormatVersion); diff --git a/src/StellaOps.Signals/Parsing/CallgraphParserNotFoundException.cs b/src/Signals/StellaOps.Signals/Parsing/CallgraphParserNotFoundException.cs similarity index 96% rename from src/StellaOps.Signals/Parsing/CallgraphParserNotFoundException.cs rename to src/Signals/StellaOps.Signals/Parsing/CallgraphParserNotFoundException.cs index 6a58b7ca..7035f528 100644 --- a/src/StellaOps.Signals/Parsing/CallgraphParserNotFoundException.cs +++ b/src/Signals/StellaOps.Signals/Parsing/CallgraphParserNotFoundException.cs @@ -1,17 +1,17 @@ -using System; - -namespace StellaOps.Signals.Parsing; - -/// <summary> -/// Exception thrown when a parser is not registered for the requested language. -/// </summary> -public sealed class CallgraphParserNotFoundException : Exception -{ - public CallgraphParserNotFoundException(string language) - : base($"No callgraph parser registered for language '{language}'.") - { - Language = language; - } - - public string Language { get; } -} +using System; + +namespace StellaOps.Signals.Parsing; + +/// <summary> +/// Exception thrown when a parser is not registered for the requested language. +/// </summary> +public sealed class CallgraphParserNotFoundException : Exception +{ + public CallgraphParserNotFoundException(string language) + : base($"No callgraph parser registered for language '{language}'.") + { + Language = language; + } + + public string Language { get; } +} diff --git a/src/StellaOps.Signals/Parsing/CallgraphParserValidationException.cs b/src/Signals/StellaOps.Signals/Parsing/CallgraphParserValidationException.cs similarity index 95% rename from src/StellaOps.Signals/Parsing/CallgraphParserValidationException.cs rename to src/Signals/StellaOps.Signals/Parsing/CallgraphParserValidationException.cs index 8fb6be7c..16f36c99 100644 --- a/src/StellaOps.Signals/Parsing/CallgraphParserValidationException.cs +++ b/src/Signals/StellaOps.Signals/Parsing/CallgraphParserValidationException.cs @@ -1,14 +1,14 @@ -using System; - -namespace StellaOps.Signals.Parsing; - -/// <summary> -/// Exception thrown when a callgraph artifact is invalid. -/// </summary> -public sealed class CallgraphParserValidationException : Exception -{ - public CallgraphParserValidationException(string message) - : base(message) - { - } -} +using System; + +namespace StellaOps.Signals.Parsing; + +/// <summary> +/// Exception thrown when a callgraph artifact is invalid. +/// </summary> +public sealed class CallgraphParserValidationException : Exception +{ + public CallgraphParserValidationException(string message) + : base(message) + { + } +} diff --git a/src/StellaOps.Signals/Parsing/ICallgraphParser.cs b/src/Signals/StellaOps.Signals/Parsing/ICallgraphParser.cs similarity index 96% rename from src/StellaOps.Signals/Parsing/ICallgraphParser.cs rename to src/Signals/StellaOps.Signals/Parsing/ICallgraphParser.cs index 60ec4e99..95b6ac03 100644 --- a/src/StellaOps.Signals/Parsing/ICallgraphParser.cs +++ b/src/Signals/StellaOps.Signals/Parsing/ICallgraphParser.cs @@ -1,21 +1,21 @@ -using System.IO; -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Signals.Parsing; - -/// <summary> -/// Parses raw callgraph artifacts into normalized structures. -/// </summary> -public interface ICallgraphParser -{ - /// <summary> - /// Language identifier handled by the parser (e.g., java, nodejs). - /// </summary> - string Language { get; } - - /// <summary> - /// Parses the supplied artifact stream. - /// </summary> - Task<CallgraphParseResult> ParseAsync(Stream artifactStream, CancellationToken cancellationToken); -} +using System.IO; +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Signals.Parsing; + +/// <summary> +/// Parses raw callgraph artifacts into normalized structures. +/// </summary> +public interface ICallgraphParser +{ + /// <summary> + /// Language identifier handled by the parser (e.g., java, nodejs). + /// </summary> + string Language { get; } + + /// <summary> + /// Parses the supplied artifact stream. + /// </summary> + Task<CallgraphParseResult> ParseAsync(Stream artifactStream, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Signals/Parsing/ICallgraphParserResolver.cs b/src/Signals/StellaOps.Signals/Parsing/ICallgraphParserResolver.cs similarity index 96% rename from src/StellaOps.Signals/Parsing/ICallgraphParserResolver.cs rename to src/Signals/StellaOps.Signals/Parsing/ICallgraphParserResolver.cs index d234d81a..bd8c9514 100644 --- a/src/StellaOps.Signals/Parsing/ICallgraphParserResolver.cs +++ b/src/Signals/StellaOps.Signals/Parsing/ICallgraphParserResolver.cs @@ -1,45 +1,45 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Signals.Parsing; - -/// <summary> -/// Resolves callgraph parsers for specific languages. -/// </summary> -public interface ICallgraphParserResolver -{ - /// <summary> - /// Resolves a parser for the supplied language. - /// </summary> - ICallgraphParser Resolve(string language); -} - -internal sealed class CallgraphParserResolver : ICallgraphParserResolver -{ - private readonly IReadOnlyDictionary<string, ICallgraphParser> parsersByLanguage; - - public CallgraphParserResolver(IEnumerable<ICallgraphParser> parsers) - { - ArgumentNullException.ThrowIfNull(parsers); - - var map = new Dictionary<string, ICallgraphParser>(StringComparer.OrdinalIgnoreCase); - foreach (var parser in parsers) - { - map[parser.Language] = parser; - } - - parsersByLanguage = map; - } - - public ICallgraphParser Resolve(string language) - { - ArgumentException.ThrowIfNullOrWhiteSpace(language); - - if (parsersByLanguage.TryGetValue(language, out var parser)) - { - return parser; - } - - throw new CallgraphParserNotFoundException(language); - } -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Signals.Parsing; + +/// <summary> +/// Resolves callgraph parsers for specific languages. +/// </summary> +public interface ICallgraphParserResolver +{ + /// <summary> + /// Resolves a parser for the supplied language. + /// </summary> + ICallgraphParser Resolve(string language); +} + +internal sealed class CallgraphParserResolver : ICallgraphParserResolver +{ + private readonly IReadOnlyDictionary<string, ICallgraphParser> parsersByLanguage; + + public CallgraphParserResolver(IEnumerable<ICallgraphParser> parsers) + { + ArgumentNullException.ThrowIfNull(parsers); + + var map = new Dictionary<string, ICallgraphParser>(StringComparer.OrdinalIgnoreCase); + foreach (var parser in parsers) + { + map[parser.Language] = parser; + } + + parsersByLanguage = map; + } + + public ICallgraphParser Resolve(string language) + { + ArgumentException.ThrowIfNullOrWhiteSpace(language); + + if (parsersByLanguage.TryGetValue(language, out var parser)) + { + return parser; + } + + throw new CallgraphParserNotFoundException(language); + } +} diff --git a/src/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs b/src/Signals/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs similarity index 97% rename from src/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs rename to src/Signals/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs index 6aa50ea5..e416f9c5 100644 --- a/src/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs +++ b/src/Signals/StellaOps.Signals/Parsing/SimpleJsonCallgraphParser.cs @@ -1,119 +1,119 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Text.Json; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Signals.Models; - -namespace StellaOps.Signals.Parsing; - -/// <summary> -/// Simple JSON-based callgraph parser used for initial language coverage. -/// </summary> -internal sealed class SimpleJsonCallgraphParser : ICallgraphParser -{ - private readonly JsonSerializerOptions serializerOptions; - - public SimpleJsonCallgraphParser(string language) - { - ArgumentException.ThrowIfNullOrWhiteSpace(language); - Language = language; - serializerOptions = new JsonSerializerOptions - { - PropertyNameCaseInsensitive = true - }; - } - - public string Language { get; } - - public async Task<CallgraphParseResult> ParseAsync(Stream artifactStream, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(artifactStream); - - var payload = await JsonSerializer.DeserializeAsync<RawCallgraphPayload>( - artifactStream, - serializerOptions, - cancellationToken).ConfigureAwait(false); - - if (payload is null) - { - throw new CallgraphParserValidationException("Callgraph artifact payload is empty."); - } - - if (payload.Graph is null) - { - throw new CallgraphParserValidationException("Callgraph artifact is missing 'graph' section."); - } - - if (payload.Graph.Nodes is null || payload.Graph.Nodes.Count == 0) - { - throw new CallgraphParserValidationException("Callgraph artifact must include at least one node."); - } - - if (payload.Graph.Edges is null) - { - payload.Graph.Edges = new List<RawCallgraphEdge>(); - } - - var nodes = new List<CallgraphNode>(payload.Graph.Nodes.Count); - foreach (var node in payload.Graph.Nodes) - { - if (string.IsNullOrWhiteSpace(node.Id)) - { - throw new CallgraphParserValidationException("Callgraph node is missing an id."); - } - - nodes.Add(new CallgraphNode( - Id: node.Id.Trim(), - Name: node.Name ?? node.Id.Trim(), - Kind: node.Kind ?? "function", - Namespace: node.Namespace, - File: node.File, - Line: node.Line)); - } - - var edges = new List<CallgraphEdge>(payload.Graph.Edges.Count); - foreach (var edge in payload.Graph.Edges) - { - if (string.IsNullOrWhiteSpace(edge.Source) || string.IsNullOrWhiteSpace(edge.Target)) - { - throw new CallgraphParserValidationException("Callgraph edge requires both source and target."); - } - - edges.Add(new CallgraphEdge(edge.Source.Trim(), edge.Target.Trim(), edge.Type ?? "call")); - } - - var formatVersion = string.IsNullOrWhiteSpace(payload.FormatVersion) ? "1.0" : payload.FormatVersion.Trim(); - return new CallgraphParseResult(nodes, edges, formatVersion); - } - - private sealed class RawCallgraphPayload - { - public string? FormatVersion { get; set; } - public RawCallgraphGraph? Graph { get; set; } - } - - private sealed class RawCallgraphGraph - { - public List<RawCallgraphNode>? Nodes { get; set; } - public List<RawCallgraphEdge>? Edges { get; set; } - } - - private sealed class RawCallgraphNode - { - public string? Id { get; set; } - public string? Name { get; set; } - public string? Kind { get; set; } - public string? Namespace { get; set; } - public string? File { get; set; } - public int? Line { get; set; } - } - - private sealed class RawCallgraphEdge - { - public string? Source { get; set; } - public string? Target { get; set; } - public string? Type { get; set; } - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Text.Json; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Signals.Models; + +namespace StellaOps.Signals.Parsing; + +/// <summary> +/// Simple JSON-based callgraph parser used for initial language coverage. +/// </summary> +internal sealed class SimpleJsonCallgraphParser : ICallgraphParser +{ + private readonly JsonSerializerOptions serializerOptions; + + public SimpleJsonCallgraphParser(string language) + { + ArgumentException.ThrowIfNullOrWhiteSpace(language); + Language = language; + serializerOptions = new JsonSerializerOptions + { + PropertyNameCaseInsensitive = true + }; + } + + public string Language { get; } + + public async Task<CallgraphParseResult> ParseAsync(Stream artifactStream, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(artifactStream); + + var payload = await JsonSerializer.DeserializeAsync<RawCallgraphPayload>( + artifactStream, + serializerOptions, + cancellationToken).ConfigureAwait(false); + + if (payload is null) + { + throw new CallgraphParserValidationException("Callgraph artifact payload is empty."); + } + + if (payload.Graph is null) + { + throw new CallgraphParserValidationException("Callgraph artifact is missing 'graph' section."); + } + + if (payload.Graph.Nodes is null || payload.Graph.Nodes.Count == 0) + { + throw new CallgraphParserValidationException("Callgraph artifact must include at least one node."); + } + + if (payload.Graph.Edges is null) + { + payload.Graph.Edges = new List<RawCallgraphEdge>(); + } + + var nodes = new List<CallgraphNode>(payload.Graph.Nodes.Count); + foreach (var node in payload.Graph.Nodes) + { + if (string.IsNullOrWhiteSpace(node.Id)) + { + throw new CallgraphParserValidationException("Callgraph node is missing an id."); + } + + nodes.Add(new CallgraphNode( + Id: node.Id.Trim(), + Name: node.Name ?? node.Id.Trim(), + Kind: node.Kind ?? "function", + Namespace: node.Namespace, + File: node.File, + Line: node.Line)); + } + + var edges = new List<CallgraphEdge>(payload.Graph.Edges.Count); + foreach (var edge in payload.Graph.Edges) + { + if (string.IsNullOrWhiteSpace(edge.Source) || string.IsNullOrWhiteSpace(edge.Target)) + { + throw new CallgraphParserValidationException("Callgraph edge requires both source and target."); + } + + edges.Add(new CallgraphEdge(edge.Source.Trim(), edge.Target.Trim(), edge.Type ?? "call")); + } + + var formatVersion = string.IsNullOrWhiteSpace(payload.FormatVersion) ? "1.0" : payload.FormatVersion.Trim(); + return new CallgraphParseResult(nodes, edges, formatVersion); + } + + private sealed class RawCallgraphPayload + { + public string? FormatVersion { get; set; } + public RawCallgraphGraph? Graph { get; set; } + } + + private sealed class RawCallgraphGraph + { + public List<RawCallgraphNode>? Nodes { get; set; } + public List<RawCallgraphEdge>? Edges { get; set; } + } + + private sealed class RawCallgraphNode + { + public string? Id { get; set; } + public string? Name { get; set; } + public string? Kind { get; set; } + public string? Namespace { get; set; } + public string? File { get; set; } + public int? Line { get; set; } + } + + private sealed class RawCallgraphEdge + { + public string? Source { get; set; } + public string? Target { get; set; } + public string? Type { get; set; } + } +} diff --git a/src/StellaOps.Signals/Persistence/ICallgraphRepository.cs b/src/Signals/StellaOps.Signals/Persistence/ICallgraphRepository.cs similarity index 96% rename from src/StellaOps.Signals/Persistence/ICallgraphRepository.cs rename to src/Signals/StellaOps.Signals/Persistence/ICallgraphRepository.cs index 7d2ae128..21b28721 100644 --- a/src/StellaOps.Signals/Persistence/ICallgraphRepository.cs +++ b/src/Signals/StellaOps.Signals/Persistence/ICallgraphRepository.cs @@ -1,13 +1,13 @@ -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Signals.Models; - -namespace StellaOps.Signals.Persistence; - -/// <summary> -/// Persists normalized callgraphs. -/// </summary> -public interface ICallgraphRepository -{ - Task<CallgraphDocument> UpsertAsync(CallgraphDocument document, CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Signals.Models; + +namespace StellaOps.Signals.Persistence; + +/// <summary> +/// Persists normalized callgraphs. +/// </summary> +public interface ICallgraphRepository +{ + Task<CallgraphDocument> UpsertAsync(CallgraphDocument document, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Signals/Persistence/MongoCallgraphRepository.cs b/src/Signals/StellaOps.Signals/Persistence/MongoCallgraphRepository.cs similarity index 97% rename from src/StellaOps.Signals/Persistence/MongoCallgraphRepository.cs rename to src/Signals/StellaOps.Signals/Persistence/MongoCallgraphRepository.cs index 9124d6aa..a4b15bc8 100644 --- a/src/StellaOps.Signals/Persistence/MongoCallgraphRepository.cs +++ b/src/Signals/StellaOps.Signals/Persistence/MongoCallgraphRepository.cs @@ -1,48 +1,48 @@ -using System; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using MongoDB.Bson; -using MongoDB.Driver; -using StellaOps.Signals.Models; - -namespace StellaOps.Signals.Persistence; - -internal sealed class MongoCallgraphRepository : ICallgraphRepository -{ - private readonly IMongoCollection<CallgraphDocument> collection; - private readonly ILogger<MongoCallgraphRepository> logger; - - public MongoCallgraphRepository(IMongoCollection<CallgraphDocument> collection, ILogger<MongoCallgraphRepository> logger) - { - this.collection = collection ?? throw new ArgumentNullException(nameof(collection)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<CallgraphDocument> UpsertAsync(CallgraphDocument document, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(document); - - var filter = Builders<CallgraphDocument>.Filter.Eq(d => d.Component, document.Component) - & Builders<CallgraphDocument>.Filter.Eq(d => d.Version, document.Version) - & Builders<CallgraphDocument>.Filter.Eq(d => d.Language, document.Language); - - if (string.IsNullOrWhiteSpace(document.Id)) - { - document.Id = ObjectId.GenerateNewId().ToString(); - } - - document.IngestedAt = DateTimeOffset.UtcNow; - - var options = new ReplaceOptions { IsUpsert = true }; - var result = await collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); - - if (result.UpsertedId != null) - { - document.Id = result.UpsertedId.AsObjectId.ToString(); - } - - logger.LogInformation("Upserted callgraph {Language}:{Component}:{Version} (id={Id}).", document.Language, document.Component, document.Version, document.Id); - return document; - } -} +using System; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using MongoDB.Bson; +using MongoDB.Driver; +using StellaOps.Signals.Models; + +namespace StellaOps.Signals.Persistence; + +internal sealed class MongoCallgraphRepository : ICallgraphRepository +{ + private readonly IMongoCollection<CallgraphDocument> collection; + private readonly ILogger<MongoCallgraphRepository> logger; + + public MongoCallgraphRepository(IMongoCollection<CallgraphDocument> collection, ILogger<MongoCallgraphRepository> logger) + { + this.collection = collection ?? throw new ArgumentNullException(nameof(collection)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<CallgraphDocument> UpsertAsync(CallgraphDocument document, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(document); + + var filter = Builders<CallgraphDocument>.Filter.Eq(d => d.Component, document.Component) + & Builders<CallgraphDocument>.Filter.Eq(d => d.Version, document.Version) + & Builders<CallgraphDocument>.Filter.Eq(d => d.Language, document.Language); + + if (string.IsNullOrWhiteSpace(document.Id)) + { + document.Id = ObjectId.GenerateNewId().ToString(); + } + + document.IngestedAt = DateTimeOffset.UtcNow; + + var options = new ReplaceOptions { IsUpsert = true }; + var result = await collection.ReplaceOneAsync(filter, document, options, cancellationToken).ConfigureAwait(false); + + if (result.UpsertedId != null) + { + document.Id = result.UpsertedId.AsObjectId.ToString(); + } + + logger.LogInformation("Upserted callgraph {Language}:{Component}:{Version} (id={Id}).", document.Language, document.Component, document.Version, document.Id); + return document; + } +} diff --git a/src/StellaOps.Signals/Program.cs b/src/Signals/StellaOps.Signals/Program.cs similarity index 97% rename from src/StellaOps.Signals/Program.cs rename to src/Signals/StellaOps.Signals/Program.cs index 4cb960bd..eb7a5bd2 100644 --- a/src/StellaOps.Signals/Program.cs +++ b/src/Signals/StellaOps.Signals/Program.cs @@ -1,313 +1,313 @@ -using System.IO; -using Microsoft.AspNetCore.Authentication; -using Microsoft.Extensions.Options; -using MongoDB.Driver; -using NetEscapades.Configuration.Yaml; -using StellaOps.Auth.Abstractions; -using StellaOps.Auth.ServerIntegration; -using StellaOps.Configuration; -using StellaOps.Signals.Authentication; -using StellaOps.Signals.Hosting; -using StellaOps.Signals.Models; -using StellaOps.Signals.Options; -using StellaOps.Signals.Parsing; -using StellaOps.Signals.Persistence; -using StellaOps.Signals.Routing; -using StellaOps.Signals.Services; -using StellaOps.Signals.Storage; - -var builder = WebApplication.CreateBuilder(args); - -builder.Configuration.AddStellaOpsDefaults(options => -{ - options.BasePath = builder.Environment.ContentRootPath; - options.EnvironmentPrefix = "SIGNALS_"; - options.ConfigureBuilder = configurationBuilder => - { - var contentRoot = builder.Environment.ContentRootPath; - foreach (var relative in new[] - { - "../etc/signals.yaml", - "../etc/signals.local.yaml", - "signals.yaml", - "signals.local.yaml" - }) - { - var path = Path.Combine(contentRoot, relative); - configurationBuilder.AddYamlFile(path, optional: true); - } - }; -}); - -var bootstrap = builder.Configuration.BindOptions<SignalsOptions>( - SignalsOptions.SectionName, - static (options, _) => - { - SignalsAuthorityOptionsConfigurator.ApplyDefaults(options.Authority); - options.Validate(); - }); - -builder.Services.AddOptions<SignalsOptions>() - .Bind(builder.Configuration.GetSection(SignalsOptions.SectionName)) - .PostConfigure(static options => - { - SignalsAuthorityOptionsConfigurator.ApplyDefaults(options.Authority); - options.Validate(); - }) - .Validate(static options => - { - try - { - options.Validate(); - return true; - } - catch (Exception ex) - { - throw new OptionsValidationException( - SignalsOptions.SectionName, - typeof(SignalsOptions), - new[] { ex.Message }); - } - }) - .ValidateOnStart(); - -builder.Services.AddSingleton(sp => sp.GetRequiredService<IOptions<SignalsOptions>>().Value); -builder.Services.AddSingleton<SignalsStartupState>(); -builder.Services.AddSingleton(TimeProvider.System); -builder.Services.AddProblemDetails(); -builder.Services.AddHealthChecks(); -builder.Services.AddRouting(options => options.LowercaseUrls = true); - -builder.Services.AddSingleton<IMongoClient>(sp => -{ - var opts = sp.GetRequiredService<IOptions<SignalsOptions>>().Value; - return new MongoClient(opts.Mongo.ConnectionString); -}); - -builder.Services.AddSingleton<IMongoDatabase>(sp => -{ - var opts = sp.GetRequiredService<IOptions<SignalsOptions>>().Value; - var mongoClient = sp.GetRequiredService<IMongoClient>(); - var mongoUrl = MongoUrl.Create(opts.Mongo.ConnectionString); - var databaseName = string.IsNullOrWhiteSpace(mongoUrl.DatabaseName) ? opts.Mongo.Database : mongoUrl.DatabaseName; - return mongoClient.GetDatabase(databaseName); -}); - -builder.Services.AddSingleton<IMongoCollection<CallgraphDocument>>(sp => -{ - var opts = sp.GetRequiredService<IOptions<SignalsOptions>>().Value; - var database = sp.GetRequiredService<IMongoDatabase>(); - var collection = database.GetCollection<CallgraphDocument>(opts.Mongo.CallgraphsCollection); - EnsureCallgraphIndexes(collection); - return collection; -}); - -builder.Services.AddSingleton<ICallgraphRepository, MongoCallgraphRepository>(); -builder.Services.AddSingleton<ICallgraphArtifactStore, FileSystemCallgraphArtifactStore>(); -builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("java")); -builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("nodejs")); -builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("python")); -builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("go")); -builder.Services.AddSingleton<ICallgraphParserResolver, CallgraphParserResolver>(); -builder.Services.AddSingleton<ICallgraphIngestionService, CallgraphIngestionService>(); - -if (bootstrap.Authority.Enabled) -{ - builder.Services.AddHttpContextAccessor(); - builder.Services.AddStellaOpsScopeHandler(); - builder.Services.AddAuthorization(options => - { - options.AddStellaOpsScopePolicy(SignalsPolicies.Read, SignalsPolicies.Read); - options.AddStellaOpsScopePolicy(SignalsPolicies.Write, SignalsPolicies.Write); - options.AddStellaOpsScopePolicy(SignalsPolicies.Admin, SignalsPolicies.Admin); - }); - builder.Services.AddStellaOpsResourceServerAuthentication( - builder.Configuration, - configurationSection: $"{SignalsOptions.SectionName}:Authority", - configure: resourceOptions => - { - resourceOptions.Authority = bootstrap.Authority.Issuer; - resourceOptions.RequireHttpsMetadata = bootstrap.Authority.RequireHttpsMetadata; - resourceOptions.MetadataAddress = bootstrap.Authority.MetadataAddress; - resourceOptions.BackchannelTimeout = TimeSpan.FromSeconds(bootstrap.Authority.BackchannelTimeoutSeconds); - resourceOptions.TokenClockSkew = TimeSpan.FromSeconds(bootstrap.Authority.TokenClockSkewSeconds); - - resourceOptions.Audiences.Clear(); - foreach (var audience in bootstrap.Authority.Audiences) - { - resourceOptions.Audiences.Add(audience); - } - - resourceOptions.RequiredScopes.Clear(); - foreach (var scope in bootstrap.Authority.RequiredScopes) - { - resourceOptions.RequiredScopes.Add(scope); - } - - foreach (var tenant in bootstrap.Authority.RequiredTenants) - { - resourceOptions.RequiredTenants.Add(tenant); - } - - foreach (var network in bootstrap.Authority.BypassNetworks) - { - resourceOptions.BypassNetworks.Add(network); - } - }); - -} -else -{ - builder.Services.AddAuthorization(); - builder.Services.AddAuthentication(options => - { - options.DefaultAuthenticateScheme = "Anonymous"; - options.DefaultChallengeScheme = "Anonymous"; - }).AddScheme<AuthenticationSchemeOptions, AnonymousAuthenticationHandler>("Anonymous", static _ => { }); -} - -var app = builder.Build(); - -if (!bootstrap.Authority.Enabled) -{ - app.Logger.LogWarning("Signals Authority authentication is disabled; relying on header-based development fallback."); -} - -app.UseAuthentication(); -app.UseAuthorization(); - -app.MapHealthChecks("/healthz").AllowAnonymous(); -app.MapGet("/readyz", static (SignalsStartupState state) => - state.IsReady ? Results.Ok(new { status = "ready" }) : Results.StatusCode(StatusCodes.Status503ServiceUnavailable)) - .AllowAnonymous(); - -var fallbackAllowed = !bootstrap.Authority.Enabled || bootstrap.Authority.AllowAnonymousFallback; - -var signalsGroup = app.MapGroup("/signals"); - -signalsGroup.MapGet("/ping", (HttpContext context, SignalsOptions options) => - Program.TryAuthorize(context, requiredScope: SignalsPolicies.Read, fallbackAllowed: options.Authority.AllowAnonymousFallback, out var failure) - ? Results.NoContent() - : failure ?? Results.Unauthorized()).WithName("SignalsPing"); - -signalsGroup.MapGet("/status", (HttpContext context, SignalsOptions options) => - Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var failure) - ? Results.Ok(new - { - service = "signals", - version = typeof(Program).Assembly.GetName().Version?.ToString() ?? "unknown" - }) - : failure ?? Results.Unauthorized()).WithName("SignalsStatus"); - -signalsGroup.MapPost("/callgraphs", async Task<IResult> ( - HttpContext context, - SignalsOptions options, - CallgraphIngestRequest request, - ICallgraphIngestionService ingestionService, - CancellationToken cancellationToken) => -{ - if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var failure)) - { - return failure ?? Results.Unauthorized(); - } - - try - { - var result = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false); - return Results.Accepted($"/signals/callgraphs/{result.CallgraphId}", result); - } - catch (CallgraphIngestionValidationException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } - catch (CallgraphParserNotFoundException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } - catch (CallgraphParserValidationException ex) - { - return Results.UnprocessableEntity(new { error = ex.Message }); - } - catch (FormatException ex) - { - return Results.BadRequest(new { error = ex.Message }); - } -}).WithName("SignalsCallgraphIngest"); - -signalsGroup.MapPost("/runtime-facts", (HttpContext context, SignalsOptions options) => - Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var failure) - ? Results.StatusCode(StatusCodes.Status501NotImplemented) - : failure ?? Results.Unauthorized()).WithName("SignalsRuntimeIngest"); - -signalsGroup.MapPost("/reachability/recompute", (HttpContext context, SignalsOptions options) => - Program.TryAuthorize(context, SignalsPolicies.Admin, options.Authority.AllowAnonymousFallback, out var failure) - ? Results.StatusCode(StatusCodes.Status501NotImplemented) - : failure ?? Results.Unauthorized()).WithName("SignalsReachabilityRecompute"); - -app.Run(); - -public partial class Program -{ - internal static bool TryAuthorize(HttpContext httpContext, string requiredScope, bool fallbackAllowed, out IResult? failure) - { - if (httpContext.User?.Identity?.IsAuthenticated == true) - { - if (TokenScopeAuthorizer.HasScope(httpContext.User, requiredScope)) - { - failure = null; - return true; - } - - failure = Results.StatusCode(StatusCodes.Status403Forbidden); - return false; - } - - if (!fallbackAllowed) - { - failure = Results.Unauthorized(); - return false; - } - - if (!httpContext.Request.Headers.TryGetValue("X-Scopes", out var scopesHeader) || - string.IsNullOrWhiteSpace(scopesHeader.ToString())) - { - failure = Results.Unauthorized(); - return false; - } - - var principal = HeaderScopeAuthorizer.CreatePrincipal(scopesHeader.ToString()); - if (HeaderScopeAuthorizer.HasScope(principal, requiredScope)) - { - failure = null; - return true; - } - - failure = Results.StatusCode(StatusCodes.Status403Forbidden); - return false; - } - - internal static void EnsureCallgraphIndexes(IMongoCollection<CallgraphDocument> collection) - { - ArgumentNullException.ThrowIfNull(collection); - - try - { - var indexKeys = Builders<CallgraphDocument>.IndexKeys - .Ascending(document => document.Component) - .Ascending(document => document.Version) - .Ascending(document => document.Language); - - var model = new CreateIndexModel<CallgraphDocument>(indexKeys, new CreateIndexOptions - { - Name = "callgraphs_component_version_language_unique", - Unique = true - }); - - collection.Indexes.CreateOne(model); - } - catch (MongoCommandException ex) when (string.Equals(ex.CodeName, "IndexOptionsConflict", StringComparison.Ordinal)) - { - // Index already exists with different options – ignore to keep startup idempotent. - } - } -} +using System.IO; +using Microsoft.AspNetCore.Authentication; +using Microsoft.Extensions.Options; +using MongoDB.Driver; +using NetEscapades.Configuration.Yaml; +using StellaOps.Auth.Abstractions; +using StellaOps.Auth.ServerIntegration; +using StellaOps.Configuration; +using StellaOps.Signals.Authentication; +using StellaOps.Signals.Hosting; +using StellaOps.Signals.Models; +using StellaOps.Signals.Options; +using StellaOps.Signals.Parsing; +using StellaOps.Signals.Persistence; +using StellaOps.Signals.Routing; +using StellaOps.Signals.Services; +using StellaOps.Signals.Storage; + +var builder = WebApplication.CreateBuilder(args); + +builder.Configuration.AddStellaOpsDefaults(options => +{ + options.BasePath = builder.Environment.ContentRootPath; + options.EnvironmentPrefix = "SIGNALS_"; + options.ConfigureBuilder = configurationBuilder => + { + var contentRoot = builder.Environment.ContentRootPath; + foreach (var relative in new[] + { + "../etc/signals.yaml", + "../etc/signals.local.yaml", + "signals.yaml", + "signals.local.yaml" + }) + { + var path = Path.Combine(contentRoot, relative); + configurationBuilder.AddYamlFile(path, optional: true); + } + }; +}); + +var bootstrap = builder.Configuration.BindOptions<SignalsOptions>( + SignalsOptions.SectionName, + static (options, _) => + { + SignalsAuthorityOptionsConfigurator.ApplyDefaults(options.Authority); + options.Validate(); + }); + +builder.Services.AddOptions<SignalsOptions>() + .Bind(builder.Configuration.GetSection(SignalsOptions.SectionName)) + .PostConfigure(static options => + { + SignalsAuthorityOptionsConfigurator.ApplyDefaults(options.Authority); + options.Validate(); + }) + .Validate(static options => + { + try + { + options.Validate(); + return true; + } + catch (Exception ex) + { + throw new OptionsValidationException( + SignalsOptions.SectionName, + typeof(SignalsOptions), + new[] { ex.Message }); + } + }) + .ValidateOnStart(); + +builder.Services.AddSingleton(sp => sp.GetRequiredService<IOptions<SignalsOptions>>().Value); +builder.Services.AddSingleton<SignalsStartupState>(); +builder.Services.AddSingleton(TimeProvider.System); +builder.Services.AddProblemDetails(); +builder.Services.AddHealthChecks(); +builder.Services.AddRouting(options => options.LowercaseUrls = true); + +builder.Services.AddSingleton<IMongoClient>(sp => +{ + var opts = sp.GetRequiredService<IOptions<SignalsOptions>>().Value; + return new MongoClient(opts.Mongo.ConnectionString); +}); + +builder.Services.AddSingleton<IMongoDatabase>(sp => +{ + var opts = sp.GetRequiredService<IOptions<SignalsOptions>>().Value; + var mongoClient = sp.GetRequiredService<IMongoClient>(); + var mongoUrl = MongoUrl.Create(opts.Mongo.ConnectionString); + var databaseName = string.IsNullOrWhiteSpace(mongoUrl.DatabaseName) ? opts.Mongo.Database : mongoUrl.DatabaseName; + return mongoClient.GetDatabase(databaseName); +}); + +builder.Services.AddSingleton<IMongoCollection<CallgraphDocument>>(sp => +{ + var opts = sp.GetRequiredService<IOptions<SignalsOptions>>().Value; + var database = sp.GetRequiredService<IMongoDatabase>(); + var collection = database.GetCollection<CallgraphDocument>(opts.Mongo.CallgraphsCollection); + EnsureCallgraphIndexes(collection); + return collection; +}); + +builder.Services.AddSingleton<ICallgraphRepository, MongoCallgraphRepository>(); +builder.Services.AddSingleton<ICallgraphArtifactStore, FileSystemCallgraphArtifactStore>(); +builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("java")); +builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("nodejs")); +builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("python")); +builder.Services.AddSingleton<ICallgraphParser>(new SimpleJsonCallgraphParser("go")); +builder.Services.AddSingleton<ICallgraphParserResolver, CallgraphParserResolver>(); +builder.Services.AddSingleton<ICallgraphIngestionService, CallgraphIngestionService>(); + +if (bootstrap.Authority.Enabled) +{ + builder.Services.AddHttpContextAccessor(); + builder.Services.AddStellaOpsScopeHandler(); + builder.Services.AddAuthorization(options => + { + options.AddStellaOpsScopePolicy(SignalsPolicies.Read, SignalsPolicies.Read); + options.AddStellaOpsScopePolicy(SignalsPolicies.Write, SignalsPolicies.Write); + options.AddStellaOpsScopePolicy(SignalsPolicies.Admin, SignalsPolicies.Admin); + }); + builder.Services.AddStellaOpsResourceServerAuthentication( + builder.Configuration, + configurationSection: $"{SignalsOptions.SectionName}:Authority", + configure: resourceOptions => + { + resourceOptions.Authority = bootstrap.Authority.Issuer; + resourceOptions.RequireHttpsMetadata = bootstrap.Authority.RequireHttpsMetadata; + resourceOptions.MetadataAddress = bootstrap.Authority.MetadataAddress; + resourceOptions.BackchannelTimeout = TimeSpan.FromSeconds(bootstrap.Authority.BackchannelTimeoutSeconds); + resourceOptions.TokenClockSkew = TimeSpan.FromSeconds(bootstrap.Authority.TokenClockSkewSeconds); + + resourceOptions.Audiences.Clear(); + foreach (var audience in bootstrap.Authority.Audiences) + { + resourceOptions.Audiences.Add(audience); + } + + resourceOptions.RequiredScopes.Clear(); + foreach (var scope in bootstrap.Authority.RequiredScopes) + { + resourceOptions.RequiredScopes.Add(scope); + } + + foreach (var tenant in bootstrap.Authority.RequiredTenants) + { + resourceOptions.RequiredTenants.Add(tenant); + } + + foreach (var network in bootstrap.Authority.BypassNetworks) + { + resourceOptions.BypassNetworks.Add(network); + } + }); + +} +else +{ + builder.Services.AddAuthorization(); + builder.Services.AddAuthentication(options => + { + options.DefaultAuthenticateScheme = "Anonymous"; + options.DefaultChallengeScheme = "Anonymous"; + }).AddScheme<AuthenticationSchemeOptions, AnonymousAuthenticationHandler>("Anonymous", static _ => { }); +} + +var app = builder.Build(); + +if (!bootstrap.Authority.Enabled) +{ + app.Logger.LogWarning("Signals Authority authentication is disabled; relying on header-based development fallback."); +} + +app.UseAuthentication(); +app.UseAuthorization(); + +app.MapHealthChecks("/healthz").AllowAnonymous(); +app.MapGet("/readyz", static (SignalsStartupState state) => + state.IsReady ? Results.Ok(new { status = "ready" }) : Results.StatusCode(StatusCodes.Status503ServiceUnavailable)) + .AllowAnonymous(); + +var fallbackAllowed = !bootstrap.Authority.Enabled || bootstrap.Authority.AllowAnonymousFallback; + +var signalsGroup = app.MapGroup("/signals"); + +signalsGroup.MapGet("/ping", (HttpContext context, SignalsOptions options) => + Program.TryAuthorize(context, requiredScope: SignalsPolicies.Read, fallbackAllowed: options.Authority.AllowAnonymousFallback, out var failure) + ? Results.NoContent() + : failure ?? Results.Unauthorized()).WithName("SignalsPing"); + +signalsGroup.MapGet("/status", (HttpContext context, SignalsOptions options) => + Program.TryAuthorize(context, SignalsPolicies.Read, options.Authority.AllowAnonymousFallback, out var failure) + ? Results.Ok(new + { + service = "signals", + version = typeof(Program).Assembly.GetName().Version?.ToString() ?? "unknown" + }) + : failure ?? Results.Unauthorized()).WithName("SignalsStatus"); + +signalsGroup.MapPost("/callgraphs", async Task<IResult> ( + HttpContext context, + SignalsOptions options, + CallgraphIngestRequest request, + ICallgraphIngestionService ingestionService, + CancellationToken cancellationToken) => +{ + if (!Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var failure)) + { + return failure ?? Results.Unauthorized(); + } + + try + { + var result = await ingestionService.IngestAsync(request, cancellationToken).ConfigureAwait(false); + return Results.Accepted($"/signals/callgraphs/{result.CallgraphId}", result); + } + catch (CallgraphIngestionValidationException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } + catch (CallgraphParserNotFoundException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } + catch (CallgraphParserValidationException ex) + { + return Results.UnprocessableEntity(new { error = ex.Message }); + } + catch (FormatException ex) + { + return Results.BadRequest(new { error = ex.Message }); + } +}).WithName("SignalsCallgraphIngest"); + +signalsGroup.MapPost("/runtime-facts", (HttpContext context, SignalsOptions options) => + Program.TryAuthorize(context, SignalsPolicies.Write, options.Authority.AllowAnonymousFallback, out var failure) + ? Results.StatusCode(StatusCodes.Status501NotImplemented) + : failure ?? Results.Unauthorized()).WithName("SignalsRuntimeIngest"); + +signalsGroup.MapPost("/reachability/recompute", (HttpContext context, SignalsOptions options) => + Program.TryAuthorize(context, SignalsPolicies.Admin, options.Authority.AllowAnonymousFallback, out var failure) + ? Results.StatusCode(StatusCodes.Status501NotImplemented) + : failure ?? Results.Unauthorized()).WithName("SignalsReachabilityRecompute"); + +app.Run(); + +public partial class Program +{ + internal static bool TryAuthorize(HttpContext httpContext, string requiredScope, bool fallbackAllowed, out IResult? failure) + { + if (httpContext.User?.Identity?.IsAuthenticated == true) + { + if (TokenScopeAuthorizer.HasScope(httpContext.User, requiredScope)) + { + failure = null; + return true; + } + + failure = Results.StatusCode(StatusCodes.Status403Forbidden); + return false; + } + + if (!fallbackAllowed) + { + failure = Results.Unauthorized(); + return false; + } + + if (!httpContext.Request.Headers.TryGetValue("X-Scopes", out var scopesHeader) || + string.IsNullOrWhiteSpace(scopesHeader.ToString())) + { + failure = Results.Unauthorized(); + return false; + } + + var principal = HeaderScopeAuthorizer.CreatePrincipal(scopesHeader.ToString()); + if (HeaderScopeAuthorizer.HasScope(principal, requiredScope)) + { + failure = null; + return true; + } + + failure = Results.StatusCode(StatusCodes.Status403Forbidden); + return false; + } + + internal static void EnsureCallgraphIndexes(IMongoCollection<CallgraphDocument> collection) + { + ArgumentNullException.ThrowIfNull(collection); + + try + { + var indexKeys = Builders<CallgraphDocument>.IndexKeys + .Ascending(document => document.Component) + .Ascending(document => document.Version) + .Ascending(document => document.Language); + + var model = new CreateIndexModel<CallgraphDocument>(indexKeys, new CreateIndexOptions + { + Name = "callgraphs_component_version_language_unique", + Unique = true + }); + + collection.Indexes.CreateOne(model); + } + catch (MongoCommandException ex) when (string.Equals(ex.CodeName, "IndexOptionsConflict", StringComparison.Ordinal)) + { + // Index already exists with different options – ignore to keep startup idempotent. + } + } +} diff --git a/src/StellaOps.Signals/Routing/SignalsPolicies.cs b/src/Signals/StellaOps.Signals/Routing/SignalsPolicies.cs similarity index 96% rename from src/StellaOps.Signals/Routing/SignalsPolicies.cs rename to src/Signals/StellaOps.Signals/Routing/SignalsPolicies.cs index 945d1b2e..e8f5a958 100644 --- a/src/StellaOps.Signals/Routing/SignalsPolicies.cs +++ b/src/Signals/StellaOps.Signals/Routing/SignalsPolicies.cs @@ -1,22 +1,22 @@ -namespace StellaOps.Signals.Routing; - -/// <summary> -/// Signals service authorization policy names and scope constants. -/// </summary> -public static class SignalsPolicies -{ - /// <summary> - /// Scope required for read operations. - /// </summary> - public const string Read = "signals:read"; - - /// <summary> - /// Scope required for write operations. - /// </summary> - public const string Write = "signals:write"; - - /// <summary> - /// Scope required for administrative operations. - /// </summary> - public const string Admin = "signals:admin"; -} +namespace StellaOps.Signals.Routing; + +/// <summary> +/// Signals service authorization policy names and scope constants. +/// </summary> +public static class SignalsPolicies +{ + /// <summary> + /// Scope required for read operations. + /// </summary> + public const string Read = "signals:read"; + + /// <summary> + /// Scope required for write operations. + /// </summary> + public const string Write = "signals:write"; + + /// <summary> + /// Scope required for administrative operations. + /// </summary> + public const string Admin = "signals:admin"; +} diff --git a/src/StellaOps.Signals/Services/CallgraphIngestionService.cs b/src/Signals/StellaOps.Signals/Services/CallgraphIngestionService.cs similarity index 97% rename from src/StellaOps.Signals/Services/CallgraphIngestionService.cs rename to src/Signals/StellaOps.Signals/Services/CallgraphIngestionService.cs index ad8af40e..8c47227a 100644 --- a/src/StellaOps.Signals/Services/CallgraphIngestionService.cs +++ b/src/Signals/StellaOps.Signals/Services/CallgraphIngestionService.cs @@ -1,162 +1,162 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Security.Cryptography; -using System.Text; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Signals.Models; -using StellaOps.Signals.Options; -using StellaOps.Signals.Parsing; -using StellaOps.Signals.Persistence; -using StellaOps.Signals.Storage; -using StellaOps.Signals.Storage.Models; - -namespace StellaOps.Signals.Services; - -internal sealed class CallgraphIngestionService : ICallgraphIngestionService -{ - private static readonly HashSet<string> AllowedContentTypes = new(StringComparer.OrdinalIgnoreCase) - { - "application/json", - "application/vnd.stellaops.callgraph+json" - }; - - private readonly ICallgraphParserResolver parserResolver; - private readonly ICallgraphArtifactStore artifactStore; - private readonly ICallgraphRepository repository; - private readonly ILogger<CallgraphIngestionService> logger; - private readonly SignalsOptions options; - private readonly TimeProvider timeProvider; - - public CallgraphIngestionService( - ICallgraphParserResolver parserResolver, - ICallgraphArtifactStore artifactStore, - ICallgraphRepository repository, - IOptions<SignalsOptions> options, - TimeProvider timeProvider, - ILogger<CallgraphIngestionService> logger) - { - this.parserResolver = parserResolver ?? throw new ArgumentNullException(nameof(parserResolver)); - this.artifactStore = artifactStore ?? throw new ArgumentNullException(nameof(artifactStore)); - this.repository = repository ?? throw new ArgumentNullException(nameof(repository)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); - this.options = options?.Value ?? throw new ArgumentNullException(nameof(options)); - } - - public async Task<CallgraphIngestResponse> IngestAsync(CallgraphIngestRequest request, CancellationToken cancellationToken) - { - ValidateRequest(request); - - var parser = parserResolver.Resolve(request.Language); - - var artifactBytes = Convert.FromBase64String(request.ArtifactContentBase64); - await using var parseStream = new MemoryStream(artifactBytes, writable: false); - var parseResult = await parser.ParseAsync(parseStream, cancellationToken).ConfigureAwait(false); - - parseStream.Position = 0; - var hash = ComputeSha256(artifactBytes); - - var artifactMetadata = await artifactStore.SaveAsync( - new CallgraphArtifactSaveRequest( - request.Language, - request.Component, - request.Version, - request.ArtifactFileName, - request.ArtifactContentType, - hash), - parseStream, - cancellationToken).ConfigureAwait(false); - - var document = new CallgraphDocument - { - Language = parser.Language, - Component = request.Component, - Version = request.Version, - Nodes = new List<CallgraphNode>(parseResult.Nodes), - Edges = new List<CallgraphEdge>(parseResult.Edges), - Metadata = request.Metadata is null - ? null - : new Dictionary<string, string?>(request.Metadata, StringComparer.OrdinalIgnoreCase), - Artifact = new CallgraphArtifactMetadata - { - Path = artifactMetadata.Path, - Hash = artifactMetadata.Hash, - ContentType = artifactMetadata.ContentType, - Length = artifactMetadata.Length - }, - IngestedAt = timeProvider.GetUtcNow() - }; - - document.Metadata ??= new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase); - document.Metadata["formatVersion"] = parseResult.FormatVersion; - - document = await repository.UpsertAsync(document, cancellationToken).ConfigureAwait(false); - - logger.LogInformation( - "Ingested callgraph {Language}:{Component}:{Version} (id={Id}) with {NodeCount} nodes and {EdgeCount} edges.", - document.Language, - document.Component, - document.Version, - document.Id, - document.Nodes.Count, - document.Edges.Count); - - return new CallgraphIngestResponse(document.Id, document.Artifact.Path, document.Artifact.Hash); - } - - private static void ValidateRequest(CallgraphIngestRequest request) - { - ArgumentNullException.ThrowIfNull(request); - - if (string.IsNullOrWhiteSpace(request.Language)) - { - throw new CallgraphIngestionValidationException("Language is required."); - } - - if (string.IsNullOrWhiteSpace(request.Component)) - { - throw new CallgraphIngestionValidationException("Component is required."); - } - - if (string.IsNullOrWhiteSpace(request.Version)) - { - throw new CallgraphIngestionValidationException("Version is required."); - } - - if (string.IsNullOrWhiteSpace(request.ArtifactContentBase64)) - { - throw new CallgraphIngestionValidationException("Artifact content is required."); - } - - if (string.IsNullOrWhiteSpace(request.ArtifactFileName)) - { - throw new CallgraphIngestionValidationException("Artifact file name is required."); - } - - if (string.IsNullOrWhiteSpace(request.ArtifactContentType) || !AllowedContentTypes.Contains(request.ArtifactContentType)) - { - throw new CallgraphIngestionValidationException($"Unsupported artifact content type '{request.ArtifactContentType}'."); - } - } - - private static string ComputeSha256(ReadOnlySpan<byte> buffer) - { - Span<byte> hash = stackalloc byte[SHA256.HashSizeInBytes]; - SHA256.HashData(buffer, hash); - return Convert.ToHexString(hash); - } -} - -/// <summary> -/// Exception thrown when the ingestion request is invalid. -/// </summary> -public sealed class CallgraphIngestionValidationException : Exception -{ - public CallgraphIngestionValidationException(string message) : base(message) - { - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Security.Cryptography; +using System.Text; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Signals.Models; +using StellaOps.Signals.Options; +using StellaOps.Signals.Parsing; +using StellaOps.Signals.Persistence; +using StellaOps.Signals.Storage; +using StellaOps.Signals.Storage.Models; + +namespace StellaOps.Signals.Services; + +internal sealed class CallgraphIngestionService : ICallgraphIngestionService +{ + private static readonly HashSet<string> AllowedContentTypes = new(StringComparer.OrdinalIgnoreCase) + { + "application/json", + "application/vnd.stellaops.callgraph+json" + }; + + private readonly ICallgraphParserResolver parserResolver; + private readonly ICallgraphArtifactStore artifactStore; + private readonly ICallgraphRepository repository; + private readonly ILogger<CallgraphIngestionService> logger; + private readonly SignalsOptions options; + private readonly TimeProvider timeProvider; + + public CallgraphIngestionService( + ICallgraphParserResolver parserResolver, + ICallgraphArtifactStore artifactStore, + ICallgraphRepository repository, + IOptions<SignalsOptions> options, + TimeProvider timeProvider, + ILogger<CallgraphIngestionService> logger) + { + this.parserResolver = parserResolver ?? throw new ArgumentNullException(nameof(parserResolver)); + this.artifactStore = artifactStore ?? throw new ArgumentNullException(nameof(artifactStore)); + this.repository = repository ?? throw new ArgumentNullException(nameof(repository)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + this.timeProvider = timeProvider ?? throw new ArgumentNullException(nameof(timeProvider)); + this.options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + } + + public async Task<CallgraphIngestResponse> IngestAsync(CallgraphIngestRequest request, CancellationToken cancellationToken) + { + ValidateRequest(request); + + var parser = parserResolver.Resolve(request.Language); + + var artifactBytes = Convert.FromBase64String(request.ArtifactContentBase64); + await using var parseStream = new MemoryStream(artifactBytes, writable: false); + var parseResult = await parser.ParseAsync(parseStream, cancellationToken).ConfigureAwait(false); + + parseStream.Position = 0; + var hash = ComputeSha256(artifactBytes); + + var artifactMetadata = await artifactStore.SaveAsync( + new CallgraphArtifactSaveRequest( + request.Language, + request.Component, + request.Version, + request.ArtifactFileName, + request.ArtifactContentType, + hash), + parseStream, + cancellationToken).ConfigureAwait(false); + + var document = new CallgraphDocument + { + Language = parser.Language, + Component = request.Component, + Version = request.Version, + Nodes = new List<CallgraphNode>(parseResult.Nodes), + Edges = new List<CallgraphEdge>(parseResult.Edges), + Metadata = request.Metadata is null + ? null + : new Dictionary<string, string?>(request.Metadata, StringComparer.OrdinalIgnoreCase), + Artifact = new CallgraphArtifactMetadata + { + Path = artifactMetadata.Path, + Hash = artifactMetadata.Hash, + ContentType = artifactMetadata.ContentType, + Length = artifactMetadata.Length + }, + IngestedAt = timeProvider.GetUtcNow() + }; + + document.Metadata ??= new Dictionary<string, string?>(StringComparer.OrdinalIgnoreCase); + document.Metadata["formatVersion"] = parseResult.FormatVersion; + + document = await repository.UpsertAsync(document, cancellationToken).ConfigureAwait(false); + + logger.LogInformation( + "Ingested callgraph {Language}:{Component}:{Version} (id={Id}) with {NodeCount} nodes and {EdgeCount} edges.", + document.Language, + document.Component, + document.Version, + document.Id, + document.Nodes.Count, + document.Edges.Count); + + return new CallgraphIngestResponse(document.Id, document.Artifact.Path, document.Artifact.Hash); + } + + private static void ValidateRequest(CallgraphIngestRequest request) + { + ArgumentNullException.ThrowIfNull(request); + + if (string.IsNullOrWhiteSpace(request.Language)) + { + throw new CallgraphIngestionValidationException("Language is required."); + } + + if (string.IsNullOrWhiteSpace(request.Component)) + { + throw new CallgraphIngestionValidationException("Component is required."); + } + + if (string.IsNullOrWhiteSpace(request.Version)) + { + throw new CallgraphIngestionValidationException("Version is required."); + } + + if (string.IsNullOrWhiteSpace(request.ArtifactContentBase64)) + { + throw new CallgraphIngestionValidationException("Artifact content is required."); + } + + if (string.IsNullOrWhiteSpace(request.ArtifactFileName)) + { + throw new CallgraphIngestionValidationException("Artifact file name is required."); + } + + if (string.IsNullOrWhiteSpace(request.ArtifactContentType) || !AllowedContentTypes.Contains(request.ArtifactContentType)) + { + throw new CallgraphIngestionValidationException($"Unsupported artifact content type '{request.ArtifactContentType}'."); + } + } + + private static string ComputeSha256(ReadOnlySpan<byte> buffer) + { + Span<byte> hash = stackalloc byte[SHA256.HashSizeInBytes]; + SHA256.HashData(buffer, hash); + return Convert.ToHexString(hash); + } +} + +/// <summary> +/// Exception thrown when the ingestion request is invalid. +/// </summary> +public sealed class CallgraphIngestionValidationException : Exception +{ + public CallgraphIngestionValidationException(string message) : base(message) + { + } +} diff --git a/src/StellaOps.Signals/Services/ICallgraphIngestionService.cs b/src/Signals/StellaOps.Signals/Services/ICallgraphIngestionService.cs similarity index 96% rename from src/StellaOps.Signals/Services/ICallgraphIngestionService.cs rename to src/Signals/StellaOps.Signals/Services/ICallgraphIngestionService.cs index f2ed4133..e3c35092 100644 --- a/src/StellaOps.Signals/Services/ICallgraphIngestionService.cs +++ b/src/Signals/StellaOps.Signals/Services/ICallgraphIngestionService.cs @@ -1,16 +1,16 @@ -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Signals.Models; - -namespace StellaOps.Signals.Services; - -/// <summary> -/// Handles ingestion of callgraph artifacts. -/// </summary> -public interface ICallgraphIngestionService -{ - /// <summary> - /// Ingests the supplied callgraph request. - /// </summary> - Task<CallgraphIngestResponse> IngestAsync(CallgraphIngestRequest request, CancellationToken cancellationToken); -} +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Signals.Models; + +namespace StellaOps.Signals.Services; + +/// <summary> +/// Handles ingestion of callgraph artifacts. +/// </summary> +public interface ICallgraphIngestionService +{ + /// <summary> + /// Ingests the supplied callgraph request. + /// </summary> + Task<CallgraphIngestResponse> IngestAsync(CallgraphIngestRequest request, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Signals/StellaOps.Signals.csproj b/src/Signals/StellaOps.Signals/StellaOps.Signals.csproj similarity index 52% rename from src/StellaOps.Signals/StellaOps.Signals.csproj rename to src/Signals/StellaOps.Signals/StellaOps.Signals.csproj index 222f9533..bb8d77ef 100644 --- a/src/StellaOps.Signals/StellaOps.Signals.csproj +++ b/src/Signals/StellaOps.Signals/StellaOps.Signals.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk.Web"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -13,8 +14,8 @@ </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj" /> + <ProjectReference Include="../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs b/src/Signals/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs similarity index 97% rename from src/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs rename to src/Signals/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs index 33cf55d9..082e7e08 100644 --- a/src/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs +++ b/src/Signals/StellaOps.Signals/Storage/FileSystemCallgraphArtifactStore.cs @@ -1,60 +1,60 @@ -using System; -using System.Globalization; -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Signals.Options; -using StellaOps.Signals.Storage.Models; - -namespace StellaOps.Signals.Storage; - -/// <summary> -/// Stores callgraph artifacts on the local filesystem. -/// </summary> -internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore -{ - private readonly SignalsArtifactStorageOptions storageOptions; - private readonly ILogger<FileSystemCallgraphArtifactStore> logger; - - public FileSystemCallgraphArtifactStore(IOptions<SignalsOptions> options, ILogger<FileSystemCallgraphArtifactStore> logger) - { - ArgumentNullException.ThrowIfNull(options); - storageOptions = options.Value.Storage; - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<StoredCallgraphArtifact> SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(request); - ArgumentNullException.ThrowIfNull(content); - - var root = storageOptions.RootPath; - var directory = Path.Combine(root, Sanitize(request.Language), Sanitize(request.Component), Sanitize(request.Version)); - Directory.CreateDirectory(directory); - - var fileName = string.IsNullOrWhiteSpace(request.FileName) - ? FormattableString.Invariant($"artifact-{DateTimeOffset.UtcNow.ToUnixTimeMilliseconds()}.bin") - : request.FileName.Trim(); - - var destinationPath = Path.Combine(directory, fileName); - - await using (var fileStream = File.Create(destinationPath)) - { - await content.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); - } - - var fileInfo = new FileInfo(destinationPath); - logger.LogInformation("Stored callgraph artifact at {Path} (length={Length}).", destinationPath, fileInfo.Length); - - return new StoredCallgraphArtifact( - Path.GetRelativePath(root, destinationPath), - fileInfo.Length, - request.Hash, - request.ContentType); - } - - private static string Sanitize(string value) - => string.Join('_', value.Split(Path.GetInvalidFileNameChars(), StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)).ToLowerInvariant(); -} +using System; +using System.Globalization; +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Signals.Options; +using StellaOps.Signals.Storage.Models; + +namespace StellaOps.Signals.Storage; + +/// <summary> +/// Stores callgraph artifacts on the local filesystem. +/// </summary> +internal sealed class FileSystemCallgraphArtifactStore : ICallgraphArtifactStore +{ + private readonly SignalsArtifactStorageOptions storageOptions; + private readonly ILogger<FileSystemCallgraphArtifactStore> logger; + + public FileSystemCallgraphArtifactStore(IOptions<SignalsOptions> options, ILogger<FileSystemCallgraphArtifactStore> logger) + { + ArgumentNullException.ThrowIfNull(options); + storageOptions = options.Value.Storage; + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<StoredCallgraphArtifact> SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(request); + ArgumentNullException.ThrowIfNull(content); + + var root = storageOptions.RootPath; + var directory = Path.Combine(root, Sanitize(request.Language), Sanitize(request.Component), Sanitize(request.Version)); + Directory.CreateDirectory(directory); + + var fileName = string.IsNullOrWhiteSpace(request.FileName) + ? FormattableString.Invariant($"artifact-{DateTimeOffset.UtcNow.ToUnixTimeMilliseconds()}.bin") + : request.FileName.Trim(); + + var destinationPath = Path.Combine(directory, fileName); + + await using (var fileStream = File.Create(destinationPath)) + { + await content.CopyToAsync(fileStream, cancellationToken).ConfigureAwait(false); + } + + var fileInfo = new FileInfo(destinationPath); + logger.LogInformation("Stored callgraph artifact at {Path} (length={Length}).", destinationPath, fileInfo.Length); + + return new StoredCallgraphArtifact( + Path.GetRelativePath(root, destinationPath), + fileInfo.Length, + request.Hash, + request.ContentType); + } + + private static string Sanitize(string value) + => string.Join('_', value.Split(Path.GetInvalidFileNameChars(), StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries)).ToLowerInvariant(); +} diff --git a/src/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs b/src/Signals/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs similarity index 96% rename from src/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs rename to src/Signals/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs index 1c8cdf06..d38c2423 100644 --- a/src/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs +++ b/src/Signals/StellaOps.Signals/Storage/ICallgraphArtifactStore.cs @@ -1,14 +1,14 @@ -using System.IO; -using System.Threading; -using System.Threading.Tasks; -using StellaOps.Signals.Storage.Models; - -namespace StellaOps.Signals.Storage; - -/// <summary> -/// Persists raw callgraph artifacts. -/// </summary> -public interface ICallgraphArtifactStore -{ - Task<StoredCallgraphArtifact> SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken); -} +using System.IO; +using System.Threading; +using System.Threading.Tasks; +using StellaOps.Signals.Storage.Models; + +namespace StellaOps.Signals.Storage; + +/// <summary> +/// Persists raw callgraph artifacts. +/// </summary> +public interface ICallgraphArtifactStore +{ + Task<StoredCallgraphArtifact> SaveAsync(CallgraphArtifactSaveRequest request, Stream content, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Signals/Storage/Models/CallgraphArtifactSaveRequest.cs b/src/Signals/StellaOps.Signals/Storage/Models/CallgraphArtifactSaveRequest.cs similarity index 96% rename from src/StellaOps.Signals/Storage/Models/CallgraphArtifactSaveRequest.cs rename to src/Signals/StellaOps.Signals/Storage/Models/CallgraphArtifactSaveRequest.cs index bed65ad4..672a13b4 100644 --- a/src/StellaOps.Signals/Storage/Models/CallgraphArtifactSaveRequest.cs +++ b/src/Signals/StellaOps.Signals/Storage/Models/CallgraphArtifactSaveRequest.cs @@ -1,12 +1,12 @@ -namespace StellaOps.Signals.Storage.Models; - -/// <summary> -/// Context required to persist a callgraph artifact. -/// </summary> -public sealed record CallgraphArtifactSaveRequest( - string Language, - string Component, - string Version, - string FileName, - string ContentType, - string Hash); +namespace StellaOps.Signals.Storage.Models; + +/// <summary> +/// Context required to persist a callgraph artifact. +/// </summary> +public sealed record CallgraphArtifactSaveRequest( + string Language, + string Component, + string Version, + string FileName, + string ContentType, + string Hash); diff --git a/src/StellaOps.Signals/Storage/Models/StoredCallgraphArtifact.cs b/src/Signals/StellaOps.Signals/Storage/Models/StoredCallgraphArtifact.cs similarity index 96% rename from src/StellaOps.Signals/Storage/Models/StoredCallgraphArtifact.cs rename to src/Signals/StellaOps.Signals/Storage/Models/StoredCallgraphArtifact.cs index e3fbe3b8..04dd23ea 100644 --- a/src/StellaOps.Signals/Storage/Models/StoredCallgraphArtifact.cs +++ b/src/Signals/StellaOps.Signals/Storage/Models/StoredCallgraphArtifact.cs @@ -1,10 +1,10 @@ -namespace StellaOps.Signals.Storage.Models; - -/// <summary> -/// Result returned after storing an artifact. -/// </summary> -public sealed record StoredCallgraphArtifact( - string Path, - long Length, - string Hash, - string ContentType); +namespace StellaOps.Signals.Storage.Models; + +/// <summary> +/// Result returned after storing an artifact. +/// </summary> +public sealed record StoredCallgraphArtifact( + string Path, + long Length, + string Hash, + string ContentType); diff --git a/src/StellaOps.Signals/TASKS.md b/src/Signals/StellaOps.Signals/TASKS.md similarity index 99% rename from src/StellaOps.Signals/TASKS.md rename to src/Signals/StellaOps.Signals/TASKS.md index 8db66bbe..ee25e32b 100644 --- a/src/StellaOps.Signals/TASKS.md +++ b/src/Signals/StellaOps.Signals/TASKS.md @@ -1,13 +1,13 @@ -# Signals Service Task Board — Reachability v1 -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SIGNALS-24-001 | DONE (2025-10-29) | Signals Guild, Architecture Guild | SBOM-GRAPH-24-002 | Implement Signals API skeleton (ASP.NET Minimal API) with auth middleware, health checks, and configuration binding. | Service boots with configuration validation, `/healthz`/`/readyz` return 200, RBAC enforced in integration tests. | -> 2025-10-29: Skeleton live with scope policies, stub endpoints, integration tests. Sample config added under `etc/signals.yaml.sample`. -| SIGNALS-24-002 | DONE (2025-10-29) | Signals Guild, Language Specialists | SIGNALS-24-001 | Build callgraph ingestion pipeline (Java/Node/Python/Go parsers) normalizing into `callgraphs` collection and storing artifact metadata in object storage. | Parsers accept sample artifacts; data persisted with schema validation; unit tests cover malformed inputs. | -> 2025-10-29: JSON parsers for java/nodejs/python/go implemented; artifacts stored on filesystem with SHA-256, callgraphs upserted into Mongo with unique index; integration tests cover success + malformed requests. -| SIGNALS-24-003 | BLOCKED (2025-10-27) | Signals Guild, Runtime Guild | SIGNALS-24-001 | Implement runtime facts ingestion endpoint and normalizer (process, sockets, container metadata) populating `context_facts` with AOC provenance. | Endpoint ingests fixture batches; duplicates deduped; schema enforced; tests cover privacy filters. | -> 2025-10-27: Depends on SIGNALS-24-001 for base API host + authentication plumbing. -| SIGNALS-24-004 | BLOCKED (2025-10-27) | Signals Guild, Data Science | SIGNALS-24-002, SIGNALS-24-003 | Deliver reachability scoring engine producing states/scores and writing to `reachability_facts`; expose configuration for weights. | Scoring engine deterministic; tests cover state transitions; metrics emitted. | -> 2025-10-27: Upstream ingestion pipelines (SIGNALS-24-002/003) blocked; scoring engine cannot proceed. -| SIGNALS-24-005 | BLOCKED (2025-10-27) | Signals Guild, Platform Events Guild | SIGNALS-24-004 | Implement Redis caches (`reachability_cache:*`), invalidation on new facts, and publish `signals.fact.updated` events. | Cache hit rate tracked; invalidations working; events delivered with idempotent ids; integration tests pass. | -> 2025-10-27: Awaiting scoring engine and ingestion layers before wiring cache/events. +# Signals Service Task Board — Reachability v1 +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SIGNALS-24-001 | DONE (2025-10-29) | Signals Guild, Architecture Guild | SBOM-GRAPH-24-002 | Implement Signals API skeleton (ASP.NET Minimal API) with auth middleware, health checks, and configuration binding. | Service boots with configuration validation, `/healthz`/`/readyz` return 200, RBAC enforced in integration tests. | +> 2025-10-29: Skeleton live with scope policies, stub endpoints, integration tests. Sample config added under `etc/signals.yaml.sample`. +| SIGNALS-24-002 | DONE (2025-10-29) | Signals Guild, Language Specialists | SIGNALS-24-001 | Build callgraph ingestion pipeline (Java/Node/Python/Go parsers) normalizing into `callgraphs` collection and storing artifact metadata in object storage. | Parsers accept sample artifacts; data persisted with schema validation; unit tests cover malformed inputs. | +> 2025-10-29: JSON parsers for java/nodejs/python/go implemented; artifacts stored on filesystem with SHA-256, callgraphs upserted into Mongo with unique index; integration tests cover success + malformed requests. +| SIGNALS-24-003 | BLOCKED (2025-10-27) | Signals Guild, Runtime Guild | SIGNALS-24-001 | Implement runtime facts ingestion endpoint and normalizer (process, sockets, container metadata) populating `context_facts` with AOC provenance. | Endpoint ingests fixture batches; duplicates deduped; schema enforced; tests cover privacy filters. | +> 2025-10-27: Depends on SIGNALS-24-001 for base API host + authentication plumbing. +| SIGNALS-24-004 | BLOCKED (2025-10-27) | Signals Guild, Data Science | SIGNALS-24-002, SIGNALS-24-003 | Deliver reachability scoring engine producing states/scores and writing to `reachability_facts`; expose configuration for weights. | Scoring engine deterministic; tests cover state transitions; metrics emitted. | +> 2025-10-27: Upstream ingestion pipelines (SIGNALS-24-002/003) blocked; scoring engine cannot proceed. +| SIGNALS-24-005 | BLOCKED (2025-10-27) | Signals Guild, Platform Events Guild | SIGNALS-24-004 | Implement Redis caches (`reachability_cache:*`), invalidation on new facts, and publish `signals.fact.updated` events. | Cache hit rate tracked; invalidations working; events delivered with idempotent ids; integration tests pass. | +> 2025-10-27: Awaiting scoring engine and ingestion layers before wiring cache/events. diff --git a/src/Signer/StellaOps.Signer.sln b/src/Signer/StellaOps.Signer.sln new file mode 100644 index 00000000..bd062f39 --- /dev/null +++ b/src/Signer/StellaOps.Signer.sln @@ -0,0 +1,182 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Signer", "StellaOps.Signer", "{93E67595-BF90-642A-D1B1-E56DFA9E06DF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.Core", "StellaOps.Signer\StellaOps.Signer.Core\StellaOps.Signer.Core.csproj", "{B568E49A-91CF-4579-A8AD-C1214D27E088}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.Infrastructure", "StellaOps.Signer\StellaOps.Signer.Infrastructure\StellaOps.Signer.Infrastructure.csproj", "{B4A54B6C-998B-4D8D-833F-44932500AF1B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.Tests", "StellaOps.Signer\StellaOps.Signer.Tests\StellaOps.Signer.Tests.csproj", "{A30EA34C-0595-4399-AD6A-4D240F87C258}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signer.WebService", "StellaOps.Signer\StellaOps.Signer.WebService\StellaOps.Signer.WebService.csproj", "{0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{3B8F2A6A-EB4E-459D-855B-445CCD23422E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{9F1B7315-0521-4266-8E51-C5FA92BF33D3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{7D9D3603-0138-4E65-877A-CCE4BB7705DC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "..\__Libraries\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{B49713E0-CB65-414A-BE39-3F60585F5C41}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{3C959A09-8E09-4E2E-B9E8-7723007445EE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "..\Authority\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{D4E2E052-9CD5-4683-AF12-041662DEC782}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Debug|x64.ActiveCfg = Debug|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Debug|x64.Build.0 = Debug|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Debug|x86.ActiveCfg = Debug|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Debug|x86.Build.0 = Debug|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Release|Any CPU.Build.0 = Release|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Release|x64.ActiveCfg = Release|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Release|x64.Build.0 = Release|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Release|x86.ActiveCfg = Release|Any CPU + {B568E49A-91CF-4579-A8AD-C1214D27E088}.Release|x86.Build.0 = Release|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Debug|x64.ActiveCfg = Debug|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Debug|x64.Build.0 = Debug|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Debug|x86.ActiveCfg = Debug|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Debug|x86.Build.0 = Debug|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Release|Any CPU.Build.0 = Release|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Release|x64.ActiveCfg = Release|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Release|x64.Build.0 = Release|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Release|x86.ActiveCfg = Release|Any CPU + {B4A54B6C-998B-4D8D-833F-44932500AF1B}.Release|x86.Build.0 = Release|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Debug|x64.ActiveCfg = Debug|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Debug|x64.Build.0 = Debug|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Debug|x86.ActiveCfg = Debug|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Debug|x86.Build.0 = Debug|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Release|Any CPU.Build.0 = Release|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Release|x64.ActiveCfg = Release|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Release|x64.Build.0 = Release|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Release|x86.ActiveCfg = Release|Any CPU + {A30EA34C-0595-4399-AD6A-4D240F87C258}.Release|x86.Build.0 = Release|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Debug|x64.ActiveCfg = Debug|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Debug|x64.Build.0 = Debug|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Debug|x86.ActiveCfg = Debug|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Debug|x86.Build.0 = Debug|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Release|Any CPU.Build.0 = Release|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Release|x64.ActiveCfg = Release|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Release|x64.Build.0 = Release|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Release|x86.ActiveCfg = Release|Any CPU + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0}.Release|x86.Build.0 = Release|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Debug|x64.ActiveCfg = Debug|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Debug|x64.Build.0 = Debug|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Debug|x86.ActiveCfg = Debug|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Debug|x86.Build.0 = Debug|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Release|Any CPU.Build.0 = Release|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Release|x64.ActiveCfg = Release|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Release|x64.Build.0 = Release|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Release|x86.ActiveCfg = Release|Any CPU + {3B8F2A6A-EB4E-459D-855B-445CCD23422E}.Release|x86.Build.0 = Release|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Debug|x64.ActiveCfg = Debug|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Debug|x64.Build.0 = Debug|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Debug|x86.ActiveCfg = Debug|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Debug|x86.Build.0 = Debug|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Release|Any CPU.Build.0 = Release|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Release|x64.ActiveCfg = Release|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Release|x64.Build.0 = Release|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Release|x86.ActiveCfg = Release|Any CPU + {40C38CD7-2DFD-40BC-9F72-C15CB7C405E5}.Release|x86.Build.0 = Release|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Debug|x64.ActiveCfg = Debug|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Debug|x64.Build.0 = Debug|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Debug|x86.ActiveCfg = Debug|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Debug|x86.Build.0 = Debug|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Release|Any CPU.Build.0 = Release|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Release|x64.ActiveCfg = Release|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Release|x64.Build.0 = Release|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Release|x86.ActiveCfg = Release|Any CPU + {9F1B7315-0521-4266-8E51-C5FA92BF33D3}.Release|x86.Build.0 = Release|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Debug|x64.ActiveCfg = Debug|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Debug|x64.Build.0 = Debug|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Debug|x86.ActiveCfg = Debug|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Debug|x86.Build.0 = Debug|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Release|Any CPU.Build.0 = Release|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Release|x64.ActiveCfg = Release|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Release|x64.Build.0 = Release|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Release|x86.ActiveCfg = Release|Any CPU + {7D9D3603-0138-4E65-877A-CCE4BB7705DC}.Release|x86.Build.0 = Release|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Debug|x64.ActiveCfg = Debug|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Debug|x64.Build.0 = Debug|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Debug|x86.ActiveCfg = Debug|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Debug|x86.Build.0 = Debug|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Release|Any CPU.Build.0 = Release|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Release|x64.ActiveCfg = Release|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Release|x64.Build.0 = Release|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Release|x86.ActiveCfg = Release|Any CPU + {B49713E0-CB65-414A-BE39-3F60585F5C41}.Release|x86.Build.0 = Release|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Debug|x64.ActiveCfg = Debug|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Debug|x64.Build.0 = Debug|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Debug|x86.ActiveCfg = Debug|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Debug|x86.Build.0 = Debug|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Release|Any CPU.Build.0 = Release|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Release|x64.ActiveCfg = Release|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Release|x64.Build.0 = Release|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Release|x86.ActiveCfg = Release|Any CPU + {3C959A09-8E09-4E2E-B9E8-7723007445EE}.Release|x86.Build.0 = Release|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Debug|x64.ActiveCfg = Debug|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Debug|x64.Build.0 = Debug|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Debug|x86.ActiveCfg = Debug|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Debug|x86.Build.0 = Debug|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Release|Any CPU.Build.0 = Release|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Release|x64.ActiveCfg = Release|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Release|x64.Build.0 = Release|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Release|x86.ActiveCfg = Release|Any CPU + {D4E2E052-9CD5-4683-AF12-041662DEC782}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {B568E49A-91CF-4579-A8AD-C1214D27E088} = {93E67595-BF90-642A-D1B1-E56DFA9E06DF} + {B4A54B6C-998B-4D8D-833F-44932500AF1B} = {93E67595-BF90-642A-D1B1-E56DFA9E06DF} + {A30EA34C-0595-4399-AD6A-4D240F87C258} = {93E67595-BF90-642A-D1B1-E56DFA9E06DF} + {0AAA68F5-D148-4B53-83D3-E486D3BAE5A0} = {93E67595-BF90-642A-D1B1-E56DFA9E06DF} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Signer/AGENTS.md b/src/Signer/StellaOps.Signer/AGENTS.md similarity index 64% rename from src/StellaOps.Signer/AGENTS.md rename to src/Signer/StellaOps.Signer/AGENTS.md index 0f7b37d7..cc2a7861 100644 --- a/src/StellaOps.Signer/AGENTS.md +++ b/src/Signer/StellaOps.Signer/AGENTS.md @@ -1,21 +1,21 @@ -# Signer Guild - -## Mission -Operate the Stella Ops Signer service: authenticate trusted callers, enforce proof‑of‑entitlement and release integrity policy, and mint verifiable DSSE bundles (keyless or KMS-backed) for downstream attestation. - -## Teams On Call -- Team 11 (Signer API) -- Team 12 (Signer Reliability & Quotas) - -## Operating Principles -- Accept requests only with Authority-issued OpToks plus DPoP or mTLS sender binding; reject unsigned/cross-tenant traffic. -- Treat PoE claims as hard gates for quota, version windows, and license validity; cache results deterministically with bounded TTLs. -- Verify scanner image release signatures via OCI Referrers before signing; fail closed on ambiguity. -- Keep the hot path stateless and deterministic; persist audit trails with structured logging, metrics, and correlation IDs. -- Update `TASKS.md`, architecture notes, and tests whenever behaviour or contracts evolve. - -## Key Directories -- `src/StellaOps.Signer/StellaOps.Signer.WebService/` — Minimal API host and HTTP surface (to be scaffolded). -- `src/StellaOps.Signer/StellaOps.Signer.Core/` — Domain contracts, signing pipeline, quota enforcement (to be scaffolded). -- `src/StellaOps.Signer/StellaOps.Signer.Infrastructure/` — External clients (Authority, Licensing, Fulcio/KMS, OCI) and persistence (to be scaffolded). -- `src/StellaOps.Signer/StellaOps.Signer.Tests/` — Unit/integration test suites (to be scaffolded). +# Signer Guild + +## Mission +Operate the Stella Ops Signer service: authenticate trusted callers, enforce proof‑of‑entitlement and release integrity policy, and mint verifiable DSSE bundles (keyless or KMS-backed) for downstream attestation. + +## Teams On Call +- Team 11 (Signer API) +- Team 12 (Signer Reliability & Quotas) + +## Operating Principles +- Accept requests only with Authority-issued OpToks plus DPoP or mTLS sender binding; reject unsigned/cross-tenant traffic. +- Treat PoE claims as hard gates for quota, version windows, and license validity; cache results deterministically with bounded TTLs. +- Verify scanner image release signatures via OCI Referrers before signing; fail closed on ambiguity. +- Keep the hot path stateless and deterministic; persist audit trails with structured logging, metrics, and correlation IDs. +- Update `TASKS.md`, architecture notes, and tests whenever behaviour or contracts evolve. + +## Key Directories +- `src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/` — Minimal API host and HTTP surface (to be scaffolded). +- `src/Signer/StellaOps.Signer/StellaOps.Signer.Core/` — Domain contracts, signing pipeline, quota enforcement (to be scaffolded). +- `src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/` — External clients (Authority, Licensing, Fulcio/KMS, OCI) and persistence (to be scaffolded). +- `src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/` — Unit/integration test suites (to be scaffolded). diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerAbstractions.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerAbstractions.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Core/SignerAbstractions.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerAbstractions.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerContracts.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerContracts.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Core/SignerContracts.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerContracts.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerExceptions.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerExceptions.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Core/SignerExceptions.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerExceptions.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerPipeline.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerPipeline.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Core/SignerPipeline.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerPipeline.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/SignerStatementBuilder.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerStatementBuilder.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Core/SignerStatementBuilder.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Core/SignerStatementBuilder.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Core/StellaOps.Signer.Core.csproj b/src/Signer/StellaOps.Signer/StellaOps.Signer.Core/StellaOps.Signer.Core.csproj similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Core/StellaOps.Signer.Core.csproj rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Core/StellaOps.Signer.Core.csproj diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Auditing/InMemorySignerAuditSink.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Auditing/InMemorySignerAuditSink.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Auditing/InMemorySignerAuditSink.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Auditing/InMemorySignerAuditSink.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerCryptoOptions.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerCryptoOptions.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerCryptoOptions.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerCryptoOptions.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerEntitlementOptions.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerEntitlementOptions.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerEntitlementOptions.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerEntitlementOptions.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerReleaseVerificationOptions.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerReleaseVerificationOptions.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerReleaseVerificationOptions.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Options/SignerReleaseVerificationOptions.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ProofOfEntitlement/InMemoryProofOfEntitlementIntrospector.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ProofOfEntitlement/InMemoryProofOfEntitlementIntrospector.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ProofOfEntitlement/InMemoryProofOfEntitlementIntrospector.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ProofOfEntitlement/InMemoryProofOfEntitlementIntrospector.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Quotas/InMemoryQuotaService.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Quotas/InMemoryQuotaService.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Quotas/InMemoryQuotaService.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Quotas/InMemoryQuotaService.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ReleaseVerification/DefaultReleaseIntegrityVerifier.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ReleaseVerification/DefaultReleaseIntegrityVerifier.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ReleaseVerification/DefaultReleaseIntegrityVerifier.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ReleaseVerification/DefaultReleaseIntegrityVerifier.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Signing/HmacDsseSigner.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Signing/HmacDsseSigner.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/Signing/HmacDsseSigner.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/Signing/HmacDsseSigner.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj similarity index 98% rename from src/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj index 18e62f79..fd33b048 100644 --- a/src/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.Infrastructure/StellaOps.Signer.Infrastructure.csproj @@ -1,20 +1,20 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Signer.Core\StellaOps.Signer.Core.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> - <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Signer.Core\StellaOps.Signer.Core.csproj" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Http" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="MongoDB.Driver" Version="3.5.0" /> + <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Signer/StellaOps.Signer.Tests/SignerEndpointsTests.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/SignerEndpointsTests.cs similarity index 97% rename from src/StellaOps.Signer/StellaOps.Signer.Tests/SignerEndpointsTests.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/SignerEndpointsTests.cs index fa24bda0..4fb59497 100644 --- a/src/StellaOps.Signer/StellaOps.Signer.Tests/SignerEndpointsTests.cs +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/SignerEndpointsTests.cs @@ -1,127 +1,127 @@ -using System.Collections.Generic; -using System.Net; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Net.Http.Json; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Mvc; -using Microsoft.AspNetCore.Mvc.Testing; -using StellaOps.Signer.WebService.Contracts; -using Xunit; - -namespace StellaOps.Signer.Tests; - -public sealed class SignerEndpointsTests : IClassFixture<WebApplicationFactory<Program>> -{ - private readonly WebApplicationFactory<Program> _factory; - private const string TrustedDigest = "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"; - - public SignerEndpointsTests(WebApplicationFactory<Program> factory) - { - _factory = factory; - } - - [Fact] - public async Task SignDsse_ReturnsBundle_WhenRequestValid() - { - var client = CreateClient(); - var request = new HttpRequestMessage(HttpMethod.Post, "/api/v1/signer/sign/dsse") - { - Content = JsonContent.Create(new - { - subject = new[] - { - new - { - name = "pkg:npm/example", - digest = new Dictionary<string, string> { ["sha256"] = "4d5f" }, - }, - }, - predicateType = "https://in-toto.io/Statement/v0.1", - predicate = new { result = "pass" }, - scannerImageDigest = TrustedDigest, - poe = new { format = "jwt", value = "valid-poe" }, - options = new { signingMode = "kms", expirySeconds = 600, returnBundle = "dsse+cert" }, - }) - }; - - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", "stub-token"); - request.Headers.Add("DPoP", "stub-proof"); - - var response = await client.SendAsync(request); - var responseBody = await response.Content.ReadAsStringAsync(); - Assert.True(response.IsSuccessStatusCode, $"Expected success but got {(int)response.StatusCode}: {responseBody}"); - - var body = await response.Content.ReadFromJsonAsync<SignDsseResponseDto>(); - Assert.NotNull(body); - Assert.Equal("stub-subject", body!.Bundle.SigningIdentity.Subject); - Assert.Equal("stub-subject", body.Bundle.SigningIdentity.Issuer); - } - - [Fact] - public async Task SignDsse_ReturnsForbidden_WhenDigestUntrusted() - { - var client = CreateClient(); - var request = new HttpRequestMessage(HttpMethod.Post, "/api/v1/signer/sign/dsse") - { - Content = JsonContent.Create(new - { - subject = new[] - { - new - { - name = "pkg:npm/example", - digest = new Dictionary<string, string> { ["sha256"] = "4d5f" }, - }, - }, - predicateType = "https://in-toto.io/Statement/v0.1", - predicate = new { result = "pass" }, - scannerImageDigest = "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", - poe = new { format = "jwt", value = "valid-poe" }, - options = new { signingMode = "kms", expirySeconds = 600, returnBundle = "dsse+cert" }, - }) - }; - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", "stub-token"); - request.Headers.Add("DPoP", "stub-proof"); - - var response = await client.SendAsync(request); - var problemJson = await response.Content.ReadAsStringAsync(); - Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); - - var problem = System.Text.Json.JsonSerializer.Deserialize<ProblemDetails>(problemJson, new System.Text.Json.JsonSerializerOptions - { - PropertyNameCaseInsensitive = true, - }); - Assert.NotNull(problem); - Assert.Equal("release_untrusted", problem!.Type); - } - - [Fact] - public async Task VerifyReferrers_ReturnsTrustedResult_WhenDigestIsKnown() - { - var client = CreateClient(); - var request = new HttpRequestMessage(HttpMethod.Get, $"/api/v1/signer/verify/referrers?digest={TrustedDigest}"); - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", "stub-token"); - - var response = await client.SendAsync(request); - var responseBody = await response.Content.ReadAsStringAsync(); - Assert.True(response.IsSuccessStatusCode, $"Expected success but got {(int)response.StatusCode}: {responseBody}"); - - var body = await response.Content.ReadFromJsonAsync<VerifyReferrersResponseDto>(); - Assert.NotNull(body); - Assert.True(body!.Trusted); - } - - [Fact] - public async Task VerifyReferrers_ReturnsProblem_WhenDigestMissing() - { - var client = CreateClient(); - var request = new HttpRequestMessage(HttpMethod.Get, "/api/v1/signer/verify/referrers"); - request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", "stub-token"); - - var response = await client.SendAsync(request); - Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); - } - - private HttpClient CreateClient() => _factory.CreateClient(); -} +using System.Collections.Generic; +using System.Net; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Net.Http.Json; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Mvc; +using Microsoft.AspNetCore.Mvc.Testing; +using StellaOps.Signer.WebService.Contracts; +using Xunit; + +namespace StellaOps.Signer.Tests; + +public sealed class SignerEndpointsTests : IClassFixture<WebApplicationFactory<Program>> +{ + private readonly WebApplicationFactory<Program> _factory; + private const string TrustedDigest = "sha256:0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"; + + public SignerEndpointsTests(WebApplicationFactory<Program> factory) + { + _factory = factory; + } + + [Fact] + public async Task SignDsse_ReturnsBundle_WhenRequestValid() + { + var client = CreateClient(); + var request = new HttpRequestMessage(HttpMethod.Post, "/api/v1/signer/sign/dsse") + { + Content = JsonContent.Create(new + { + subject = new[] + { + new + { + name = "pkg:npm/example", + digest = new Dictionary<string, string> { ["sha256"] = "4d5f" }, + }, + }, + predicateType = "https://in-toto.io/Statement/v0.1", + predicate = new { result = "pass" }, + scannerImageDigest = TrustedDigest, + poe = new { format = "jwt", value = "valid-poe" }, + options = new { signingMode = "kms", expirySeconds = 600, returnBundle = "dsse+cert" }, + }) + }; + + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", "stub-token"); + request.Headers.Add("DPoP", "stub-proof"); + + var response = await client.SendAsync(request); + var responseBody = await response.Content.ReadAsStringAsync(); + Assert.True(response.IsSuccessStatusCode, $"Expected success but got {(int)response.StatusCode}: {responseBody}"); + + var body = await response.Content.ReadFromJsonAsync<SignDsseResponseDto>(); + Assert.NotNull(body); + Assert.Equal("stub-subject", body!.Bundle.SigningIdentity.Subject); + Assert.Equal("stub-subject", body.Bundle.SigningIdentity.Issuer); + } + + [Fact] + public async Task SignDsse_ReturnsForbidden_WhenDigestUntrusted() + { + var client = CreateClient(); + var request = new HttpRequestMessage(HttpMethod.Post, "/api/v1/signer/sign/dsse") + { + Content = JsonContent.Create(new + { + subject = new[] + { + new + { + name = "pkg:npm/example", + digest = new Dictionary<string, string> { ["sha256"] = "4d5f" }, + }, + }, + predicateType = "https://in-toto.io/Statement/v0.1", + predicate = new { result = "pass" }, + scannerImageDigest = "sha256:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", + poe = new { format = "jwt", value = "valid-poe" }, + options = new { signingMode = "kms", expirySeconds = 600, returnBundle = "dsse+cert" }, + }) + }; + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", "stub-token"); + request.Headers.Add("DPoP", "stub-proof"); + + var response = await client.SendAsync(request); + var problemJson = await response.Content.ReadAsStringAsync(); + Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); + + var problem = System.Text.Json.JsonSerializer.Deserialize<ProblemDetails>(problemJson, new System.Text.Json.JsonSerializerOptions + { + PropertyNameCaseInsensitive = true, + }); + Assert.NotNull(problem); + Assert.Equal("release_untrusted", problem!.Type); + } + + [Fact] + public async Task VerifyReferrers_ReturnsTrustedResult_WhenDigestIsKnown() + { + var client = CreateClient(); + var request = new HttpRequestMessage(HttpMethod.Get, $"/api/v1/signer/verify/referrers?digest={TrustedDigest}"); + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", "stub-token"); + + var response = await client.SendAsync(request); + var responseBody = await response.Content.ReadAsStringAsync(); + Assert.True(response.IsSuccessStatusCode, $"Expected success but got {(int)response.StatusCode}: {responseBody}"); + + var body = await response.Content.ReadFromJsonAsync<VerifyReferrersResponseDto>(); + Assert.NotNull(body); + Assert.True(body!.Trusted); + } + + [Fact] + public async Task VerifyReferrers_ReturnsProblem_WhenDigestMissing() + { + var client = CreateClient(); + var request = new HttpRequestMessage(HttpMethod.Get, "/api/v1/signer/verify/referrers"); + request.Headers.Authorization = new AuthenticationHeaderValue("Bearer", "stub-token"); + + var response = await client.SendAsync(request); + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + private HttpClient CreateClient() => _factory.CreateClient(); +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj similarity index 74% rename from src/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj rename to src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj index 7262701f..e1fc8cc5 100644 --- a/src/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -19,8 +20,8 @@ <ProjectReference Include="..\StellaOps.Signer.WebService\StellaOps.Signer.WebService.csproj" /> <ProjectReference Include="..\StellaOps.Signer.Infrastructure\StellaOps.Signer.Infrastructure.csproj" /> <ProjectReference Include="..\StellaOps.Signer.Core\StellaOps.Signer.Core.csproj" /> - <ProjectReference Include="..\..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/Contracts/SignDsseContracts.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Contracts/SignDsseContracts.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.WebService/Contracts/SignDsseContracts.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Contracts/SignDsseContracts.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/Endpoints/SignerEndpoints.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Endpoints/SignerEndpoints.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.WebService/Endpoints/SignerEndpoints.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Endpoints/SignerEndpoints.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/Program.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Program.cs similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.WebService/Program.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Program.cs diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationDefaults.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationDefaults.cs similarity index 96% rename from src/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationDefaults.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationDefaults.cs index 560272ef..5c0e4ee9 100644 --- a/src/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationDefaults.cs +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationDefaults.cs @@ -1,6 +1,6 @@ -namespace StellaOps.Signer.WebService.Security; - -public static class StubBearerAuthenticationDefaults -{ - public const string AuthenticationScheme = "StubBearer"; -} +namespace StellaOps.Signer.WebService.Security; + +public static class StubBearerAuthenticationDefaults +{ + public const string AuthenticationScheme = "StubBearer"; +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationHandler.cs b/src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationHandler.cs similarity index 97% rename from src/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationHandler.cs rename to src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationHandler.cs index 82d339f9..f2e2b92f 100644 --- a/src/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationHandler.cs +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/Security/StubBearerAuthenticationHandler.cs @@ -1,55 +1,55 @@ -using System; -using System.Collections.Generic; -using System.Security.Claims; -using System.Text.Encodings.Web; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Authentication; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Abstractions; - -namespace StellaOps.Signer.WebService.Security; - -public sealed class StubBearerAuthenticationHandler - : AuthenticationHandler<AuthenticationSchemeOptions> -{ - public StubBearerAuthenticationHandler( - IOptionsMonitor<AuthenticationSchemeOptions> options, - ILoggerFactory logger, - UrlEncoder encoder) - : base(options, logger, encoder) - { - } - - protected override Task<AuthenticateResult> HandleAuthenticateAsync() - { - var authorization = Request.Headers.Authorization.ToString(); - - if (string.IsNullOrWhiteSpace(authorization) || - !authorization.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase)) - { - return Task.FromResult(AuthenticateResult.Fail("Missing bearer token.")); - } - - var token = authorization.Substring("Bearer ".Length).Trim(); - if (token.Length == 0) - { - return Task.FromResult(AuthenticateResult.Fail("Bearer token is empty.")); - } - - var claims = new List<Claim> - { - new(ClaimTypes.NameIdentifier, "stub-subject"), - new(StellaOpsClaimTypes.Subject, "stub-subject"), - new(StellaOpsClaimTypes.Tenant, "stub-tenant"), - new(StellaOpsClaimTypes.Scope, "signer.sign"), - new(StellaOpsClaimTypes.ScopeItem, "signer.sign"), - new(StellaOpsClaimTypes.Audience, "signer"), - }; - - var identity = new ClaimsIdentity(claims, Scheme.Name); - var principal = new ClaimsPrincipal(identity); - var ticket = new AuthenticationTicket(principal, Scheme.Name); - return Task.FromResult(AuthenticateResult.Success(ticket)); - } -} +using System; +using System.Collections.Generic; +using System.Security.Claims; +using System.Text.Encodings.Web; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Authentication; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Abstractions; + +namespace StellaOps.Signer.WebService.Security; + +public sealed class StubBearerAuthenticationHandler + : AuthenticationHandler<AuthenticationSchemeOptions> +{ + public StubBearerAuthenticationHandler( + IOptionsMonitor<AuthenticationSchemeOptions> options, + ILoggerFactory logger, + UrlEncoder encoder) + : base(options, logger, encoder) + { + } + + protected override Task<AuthenticateResult> HandleAuthenticateAsync() + { + var authorization = Request.Headers.Authorization.ToString(); + + if (string.IsNullOrWhiteSpace(authorization) || + !authorization.StartsWith("Bearer ", StringComparison.OrdinalIgnoreCase)) + { + return Task.FromResult(AuthenticateResult.Fail("Missing bearer token.")); + } + + var token = authorization.Substring("Bearer ".Length).Trim(); + if (token.Length == 0) + { + return Task.FromResult(AuthenticateResult.Fail("Bearer token is empty.")); + } + + var claims = new List<Claim> + { + new(ClaimTypes.NameIdentifier, "stub-subject"), + new(StellaOpsClaimTypes.Subject, "stub-subject"), + new(StellaOpsClaimTypes.Tenant, "stub-tenant"), + new(StellaOpsClaimTypes.Scope, "signer.sign"), + new(StellaOpsClaimTypes.ScopeItem, "signer.sign"), + new(StellaOpsClaimTypes.Audience, "signer"), + }; + + var identity = new ClaimsIdentity(claims, Scheme.Name); + var principal = new ClaimsPrincipal(identity); + var ticket = new AuthenticationTicket(principal, Scheme.Name); + return Task.FromResult(AuthenticateResult.Success(ticket)); + } +} diff --git a/src/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj b/src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj similarity index 60% rename from src/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj rename to src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj index b3d7759d..bc7c36ee 100644 --- a/src/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj +++ b/src/Signer/StellaOps.Signer/StellaOps.Signer.WebService/StellaOps.Signer.WebService.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk.Web"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -19,11 +20,11 @@ <ItemGroup> <ProjectReference Include="..\StellaOps.Signer.Core\StellaOps.Signer.Core.csproj" /> <ProjectReference Include="..\StellaOps.Signer.Infrastructure\StellaOps.Signer.Infrastructure.csproj" /> - <ProjectReference Include="..\..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> - <ProjectReference Include="..\..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="..\..\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="..\..\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Signer/StellaOps.Signer.sln b/src/Signer/StellaOps.Signer/StellaOps.Signer.sln similarity index 100% rename from src/StellaOps.Signer/StellaOps.Signer.sln rename to src/Signer/StellaOps.Signer/StellaOps.Signer.sln diff --git a/src/StellaOps.Signer/TASKS.md b/src/Signer/StellaOps.Signer/TASKS.md similarity index 94% rename from src/StellaOps.Signer/TASKS.md rename to src/Signer/StellaOps.Signer/TASKS.md index 67dbb780..2b3e03de 100644 --- a/src/StellaOps.Signer/TASKS.md +++ b/src/Signer/StellaOps.Signer/TASKS.md @@ -1,10 +1,10 @@ -# Signer Guild Task Board (UTC 2025-10-19) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| SIGNER-API-11-101 | DONE (2025-10-21) | Signer Guild | — | `/sign/dsse` pipeline with Authority auth, PoE introspection, release verification, DSSE signing. | ✅ `POST /api/v1/signer/sign/dsse` enforces OpTok audience/scope, DPoP/mTLS binding, PoE introspection, and rejects untrusted scanner digests.<br>✅ Signing pipeline supports keyless (Fulcio) plus optional KMS modes, returning DSSE bundles + cert metadata; deterministic audits persisted.<br>✅ Regression coverage in `SignerEndpointsTests` (`dotnet test src/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj`). | -| SIGNER-REF-11-102 | DONE (2025-10-21) | Signer Guild | — | `/verify/referrers` endpoint with OCI lookup, caching, and policy enforcement. | ✅ `GET /api/v1/signer/verify/referrers` validates trusted scanner digests via release verifier and surfaces signer metadata; JSON responses served deterministically.<br>✅ Integration tests cover trusted/untrusted digests and validation failures (`SignerEndpointsTests`). | -| SIGNER-QUOTA-11-103 | DONE (2025-10-21) | Signer Guild | — | Enforce plan quotas, concurrency/QPS limits, artifact size caps with metrics/audit logs. | ✅ Quota middleware derives plan limits from PoE claims, applies per-tenant concurrency/QPS/size caps, and surfaces remaining capacity in responses.<br>✅ Unit coverage exercises throttled/artifact-too-large paths via in-memory quota service. | - - -> Update status columns (TODO / DOING / DONE / BLOCKED) in tandem with code changes and associated tests. +# Signer Guild Task Board (UTC 2025-10-19) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| SIGNER-API-11-101 | DONE (2025-10-21) | Signer Guild | — | `/sign/dsse` pipeline with Authority auth, PoE introspection, release verification, DSSE signing. | ✅ `POST /api/v1/signer/sign/dsse` enforces OpTok audience/scope, DPoP/mTLS binding, PoE introspection, and rejects untrusted scanner digests.<br>✅ Signing pipeline supports keyless (Fulcio) plus optional KMS modes, returning DSSE bundles + cert metadata; deterministic audits persisted.<br>✅ Regression coverage in `SignerEndpointsTests` (`dotnet test src/Signer/StellaOps.Signer/StellaOps.Signer.Tests/StellaOps.Signer.Tests.csproj`). | +| SIGNER-REF-11-102 | DONE (2025-10-21) | Signer Guild | — | `/verify/referrers` endpoint with OCI lookup, caching, and policy enforcement. | ✅ `GET /api/v1/signer/verify/referrers` validates trusted scanner digests via release verifier and surfaces signer metadata; JSON responses served deterministically.<br>✅ Integration tests cover trusted/untrusted digests and validation failures (`SignerEndpointsTests`). | +| SIGNER-QUOTA-11-103 | DONE (2025-10-21) | Signer Guild | — | Enforce plan quotas, concurrency/QPS limits, artifact size caps with metrics/audit logs. | ✅ Quota middleware derives plan limits from PoE claims, applies per-tenant concurrency/QPS/size caps, and surfaces remaining capacity in responses.<br>✅ Unit coverage exercises throttled/artifact-too-large paths via in-memory quota service. | + + +> Update status columns (TODO / DOING / DONE / BLOCKED) in tandem with code changes and associated tests. diff --git a/src/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj b/src/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj deleted file mode 100644 index 1c8328a3..00000000 --- a/src/StellaOps.Aoc.Tests/StellaOps.Aoc.Tests.csproj +++ /dev/null @@ -1,41 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - <OutputType>Exe</OutputType> - <IsPackable>false</IsPackable> - <UseConcelierTestInfra>false</UseConcelierTestInfra> - - <!-- To enable Microsoft.Testing.Platform, uncomment the following line. --> - <!-- <UseMicrosoftTestingPlatformRunner>true</UseMicrosoftTestingPlatformRunner> --> - <!-- Note: to use Microsoft.Testing.Platform correctly with dotnet test: --> - <!-- 1. You must add dotnet.config specifying the test runner to be Microsoft.Testing.Platform --> - <!-- 2. You must use .NET 10 SDK or later --> - <!-- For more information, see https://aka.ms/dotnet-test/mtp and https://xunit.net/docs/getting-started/v3/microsoft-testing-platform --> - <!-- To enable code coverage with Microsoft.Testing.Platform, add a package reference to Microsoft.Testing.Extensions.CodeCoverage --> - <!-- https://learn.microsoft.comdotnet/core/testing/microsoft-testing-platform-extensions-code-coverage --> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1" /> - <PackageReference Include="xunit.v3" Version="3.0.0" /> - <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3" /> - </ItemGroup> - - <ItemGroup> - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" /> - </ItemGroup> - - <ItemGroup> - <Using Include="Xunit" /> - </ItemGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Aoc\StellaOps.Aoc.csproj" /> - </ItemGroup> - -</Project> diff --git a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj b/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj deleted file mode 100644 index 0870250e..00000000 --- a/src/StellaOps.Bench/Scanner.Analyzers/StellaOps.Bench.ScannerAnalyzers/StellaOps.Bench.ScannerAnalyzers.csproj +++ /dev/null @@ -1,23 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <OutputType>Exe</OutputType> - <TargetFramework>net10.0</TargetFramework> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="..\..\..\src\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> - <ProjectReference Include="..\..\..\src\StellaOps.Scanner.Analyzers.Lang.Go\StellaOps.Scanner.Analyzers.Lang.Go.csproj" /> - <ProjectReference Include="..\..\..\src\StellaOps.Scanner.Analyzers.Lang.Node\StellaOps.Scanner.Analyzers.Lang.Node.csproj" /> - <ProjectReference Include="..\..\..\src\StellaOps.Scanner.Analyzers.Lang.Java\StellaOps.Scanner.Analyzers.Lang.Java.csproj" /> - <ProjectReference Include="..\..\..\src\StellaOps.Scanner.Analyzers.Lang.DotNet\StellaOps.Scanner.Analyzers.Lang.DotNet.csproj" /> - <ProjectReference Include="..\..\..\src\StellaOps.Scanner.Analyzers.Lang.Python\StellaOps.Scanner.Analyzers.Lang.Python.csproj" /> - </ItemGroup> - - <ItemGroup> - <InternalsVisibleTo Include="StellaOps.Bench.ScannerAnalyzers.Tests" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Cartographer/StellaOps.Cartographer.csproj b/src/StellaOps.Cartographer/StellaOps.Cartographer.csproj deleted file mode 100644 index 79f19749..00000000 --- a/src/StellaOps.Cartographer/StellaOps.Cartographer.csproj +++ /dev/null @@ -1,17 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk.Web"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - <AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Policy.Engine\StellaOps.Policy.Engine.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Cli.Tests/StellaOps.Cli.Tests.csproj b/src/StellaOps.Cli.Tests/StellaOps.Cli.Tests.csproj deleted file mode 100644 index ba54eb41..00000000 --- a/src/StellaOps.Cli.Tests/StellaOps.Cli.Tests.csproj +++ /dev/null @@ -1,29 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <IsPackable>false</IsPackable> - - <!-- To enable Microsoft.Testing.Platform, uncomment the following line. --> - <!-- <UseMicrosoftTestingPlatformRunner>true</UseMicrosoftTestingPlatformRunner> --> - <!-- Note: to use Microsoft.Testing.Platform correctly with dotnet test: --> - <!-- 1. You must add dotnet.config specifying the test runner to be Microsoft.Testing.Platform --> - <!-- 2. You must use .NET 10 SDK or later --> - <!-- For more information, see https://aka.ms/dotnet-test/mtp and https://xunit.net/docs/getting-started/v3/microsoft-testing-platform --> - <!-- To enable code coverage with Microsoft.Testing.Platform, add a package reference to Microsoft.Testing.Extensions.CodeCoverage --> - <!-- https://learn.microsoft.comdotnet/core/testing/microsoft-testing-platform-extensions-code-coverage --> - </PropertyGroup> - - <ItemGroup> - <Using Include="Xunit" /> - </ItemGroup> - - <ItemGroup> - <PackageReference Include="Spectre.Console.Testing" Version="0.48.0" /> - <ProjectReference Include="..\StellaOps.Cli\StellaOps.Cli.csproj" /> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - </ItemGroup> - -</Project> diff --git a/src/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj b/src/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj deleted file mode 100644 index d843b146..00000000 --- a/src/StellaOps.Concelier.Connector.Acsc.Tests/StellaOps.Concelier.Connector.Acsc.Tests.csproj +++ /dev/null @@ -1,19 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Acsc/StellaOps.Concelier.Connector.Acsc.csproj" /> - </ItemGroup> - - <ItemGroup> - <None Include="Acsc/Fixtures/**" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.CertFr.Tests/StellaOps.Concelier.Connector.CertFr.Tests.csproj b/src/StellaOps.Concelier.Connector.CertFr.Tests/StellaOps.Concelier.Connector.CertFr.Tests.csproj deleted file mode 100644 index bc08c36d..00000000 --- a/src/StellaOps.Concelier.Connector.CertFr.Tests/StellaOps.Concelier.Connector.CertFr.Tests.csproj +++ /dev/null @@ -1,16 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.CertFr/StellaOps.Concelier.Connector.CertFr.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="CertFr/Fixtures/**" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.CertIn.Tests/StellaOps.Concelier.Connector.CertIn.Tests.csproj b/src/StellaOps.Concelier.Connector.CertIn.Tests/StellaOps.Concelier.Connector.CertIn.Tests.csproj deleted file mode 100644 index 8a9c9b42..00000000 --- a/src/StellaOps.Concelier.Connector.CertIn.Tests/StellaOps.Concelier.Connector.CertIn.Tests.csproj +++ /dev/null @@ -1,16 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.CertIn/StellaOps.Concelier.Connector.CertIn.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="CertIn/Fixtures/**" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj b/src/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj deleted file mode 100644 index 069ddfc0..00000000 --- a/src/StellaOps.Concelier.Connector.Cve.Tests/StellaOps.Concelier.Connector.Cve.Tests.csproj +++ /dev/null @@ -1,17 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Cve/StellaOps.Concelier.Connector.Cve.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Fixtures/*.json" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Distro.Debian.Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj b/src/StellaOps.Concelier.Connector.Distro.Debian.Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj deleted file mode 100644 index 373f2b25..00000000 --- a/src/StellaOps.Concelier.Connector.Distro.Debian.Tests/StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj +++ /dev/null @@ -1,13 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Distro.Debian/StellaOps.Concelier.Connector.Distro.Debian.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj b/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj deleted file mode 100644 index 591ae8de..00000000 --- a/src/StellaOps.Concelier.Connector.Distro.RedHat.Tests/StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj +++ /dev/null @@ -1,18 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Distro.RedHat/StellaOps.Concelier.Connector.Distro.RedHat.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="RedHat/Fixtures/*.json" - CopyToOutputDirectory="Always" - TargetPath="Source/Distro/RedHat/Fixtures/%(Filename)%(Extension)" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Distro.Suse.Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj b/src/StellaOps.Concelier.Connector.Distro.Suse.Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj deleted file mode 100644 index 34253211..00000000 --- a/src/StellaOps.Concelier.Connector.Distro.Suse.Tests/StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj +++ /dev/null @@ -1,18 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Distro.Suse/StellaOps.Concelier.Connector.Distro.Suse.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Update="Source\Distro\Suse\Fixtures\**\*"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj b/src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj deleted file mode 100644 index db8d3994..00000000 --- a/src/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests/StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj +++ /dev/null @@ -1,18 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Distro.Ubuntu/StellaOps.Concelier.Connector.Distro.Ubuntu.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Update="Fixtures\**\*"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj b/src/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj deleted file mode 100644 index dfb9dd10..00000000 --- a/src/StellaOps.Concelier.Connector.Ghsa.Tests/StellaOps.Concelier.Connector.Ghsa.Tests.csproj +++ /dev/null @@ -1,17 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Fixtures/*.json" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj b/src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj deleted file mode 100644 index 96934e35..00000000 --- a/src/StellaOps.Concelier.Connector.Ics.Cisa.Tests/StellaOps.Concelier.Connector.Ics.Cisa.Tests.csproj +++ /dev/null @@ -1,16 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Ics.Cisa/StellaOps.Concelier.Connector.Ics.Cisa.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="IcsCisa/Fixtures/**" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj b/src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj deleted file mode 100644 index fcb1a72c..00000000 --- a/src/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests/StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj +++ /dev/null @@ -1,16 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Ics.Kaspersky/StellaOps.Concelier.Connector.Ics.Kaspersky.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Kaspersky/Fixtures/**" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Jvn.Tests/StellaOps.Concelier.Connector.Jvn.Tests.csproj b/src/StellaOps.Concelier.Connector.Jvn.Tests/StellaOps.Concelier.Connector.Jvn.Tests.csproj deleted file mode 100644 index 69a9f212..00000000 --- a/src/StellaOps.Concelier.Connector.Jvn.Tests/StellaOps.Concelier.Connector.Jvn.Tests.csproj +++ /dev/null @@ -1,16 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Jvn/StellaOps.Concelier.Connector.Jvn.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Jvn/Fixtures/**" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Kev.Tests/StellaOps.Concelier.Connector.Kev.Tests.csproj b/src/StellaOps.Concelier.Connector.Kev.Tests/StellaOps.Concelier.Connector.Kev.Tests.csproj deleted file mode 100644 index 1fbd9bbc..00000000 --- a/src/StellaOps.Concelier.Connector.Kev.Tests/StellaOps.Concelier.Connector.Kev.Tests.csproj +++ /dev/null @@ -1,19 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Kev/StellaOps.Concelier.Connector.Kev.csproj" /> - </ItemGroup> - - <ItemGroup> - <None Include="Kev/Fixtures/**" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj b/src/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj deleted file mode 100644 index 960ed780..00000000 --- a/src/StellaOps.Concelier.Connector.Nvd.Tests/StellaOps.Concelier.Connector.Nvd.Tests.csproj +++ /dev/null @@ -1,18 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Nvd/Fixtures/*.json" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj b/src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj deleted file mode 100644 index 6d8232bd..00000000 --- a/src/StellaOps.Concelier.Connector.Osv.Tests/StellaOps.Concelier.Connector.Osv.Tests.csproj +++ /dev/null @@ -1,18 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Update="Fixtures\*.json"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj b/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj deleted file mode 100644 index 8cb9bc6d..00000000 --- a/src/StellaOps.Concelier.Connector.Ru.Bdu.Tests/StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj +++ /dev/null @@ -1,13 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Ru.Bdu/StellaOps.Concelier.Connector.Ru.Bdu.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj b/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj deleted file mode 100644 index ba7c8b8c..00000000 --- a/src/StellaOps.Concelier.Connector.Ru.Nkcki.Tests/StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj +++ /dev/null @@ -1,13 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Ru.Nkcki/StellaOps.Concelier.Connector.Ru.Nkcki.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj b/src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj deleted file mode 100644 index 801f1a05..00000000 --- a/src/StellaOps.Concelier.Connector.Vndr.Adobe.Tests/StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj +++ /dev/null @@ -1,17 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Vndr.Adobe/StellaOps.Concelier.Connector.Vndr.Adobe.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Adobe/Fixtures/*.html" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Adobe/Fixtures/%(Filename)%(Extension)" /> - <None Include="Adobe/Fixtures/*.json" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Adobe/Fixtures/%(Filename)%(Extension)" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj b/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj deleted file mode 100644 index 629b25a2..00000000 --- a/src/StellaOps.Concelier.Connector.Vndr.Apple.Tests/StellaOps.Concelier.Connector.Vndr.Apple.Tests.csproj +++ /dev/null @@ -1,18 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Vndr.Apple/StellaOps.Concelier.Connector.Vndr.Apple.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Apple/Fixtures/*.html" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Apple/Fixtures/%(Filename)%(Extension)" /> - <None Include="Apple/Fixtures/*.json" CopyToOutputDirectory="Always" TargetPath="Source/Vndr/Apple/Fixtures/%(Filename)%(Extension)" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj b/src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj deleted file mode 100644 index 2a091c7a..00000000 --- a/src/StellaOps.Concelier.Connector.Vndr.Chromium.Tests/StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj +++ /dev/null @@ -1,18 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Vndr.Chromium/StellaOps.Concelier.Connector.Vndr.Chromium.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Chromium/Fixtures/*.html" CopyToOutputDirectory="Always" /> - <None Include="Chromium/Fixtures/*.xml" CopyToOutputDirectory="Always" /> - <None Include="Chromium/Fixtures/*.json" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj b/src/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj deleted file mode 100644 index a45633fe..00000000 --- a/src/StellaOps.Concelier.Connector.Vndr.Cisco.Tests/StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj +++ /dev/null @@ -1,17 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Vndr.Cisco/StellaOps.Concelier.Connector.Vndr.Cisco.csproj" /> - </ItemGroup> - - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests.csproj b/src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests.csproj deleted file mode 100644 index 78191fde..00000000 --- a/src/StellaOps.Concelier.Connector.Vndr.Msrc.Tests/StellaOps.Concelier.Connector.Vndr.Msrc.Tests.csproj +++ /dev/null @@ -1,24 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Connector.Vndr.Msrc/StellaOps.Concelier.Connector.Vndr.Msrc.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> - </ItemGroup> - - <ItemGroup> - <PackageReference Include="FluentAssertions" Version="6.12.0" /> - </ItemGroup> - - <ItemGroup> - <None Update="Fixtures\*.json"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj b/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj deleted file mode 100644 index 0487a6c6..00000000 --- a/src/StellaOps.Concelier.Connector.Vndr.Oracle.Tests/StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj +++ /dev/null @@ -1,17 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Vndr.Oracle/StellaOps.Concelier.Connector.Vndr.Oracle.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Include="Oracle/Fixtures/**/*.json" CopyToOutputDirectory="Always" /> - <None Include="Oracle/Fixtures/**/*.html" CopyToOutputDirectory="Always" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj b/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj deleted file mode 100644 index da4707d3..00000000 --- a/src/StellaOps.Concelier.Connector.Vndr.Vmware.Tests/StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj +++ /dev/null @@ -1,18 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Connector.Vndr.Vmware/StellaOps.Concelier.Connector.Vndr.Vmware.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> - <ItemGroup> - <None Update="Vmware/Fixtures/*.json"> - <CopyToOutputDirectory>PreserveNewest</CopyToOutputDirectory> - </None> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj b/src/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj deleted file mode 100644 index ffff2055..00000000 --- a/src/StellaOps.Concelier.Core.Tests/StellaOps.Concelier.Core.Tests.csproj +++ /dev/null @@ -1,12 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.RawModels/StellaOps.Concelier.RawModels.csproj" /> - <ProjectReference Include="../StellaOps.Aoc/StellaOps.Aoc.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj b/src/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj deleted file mode 100644 index 12cef41d..00000000 --- a/src/StellaOps.Concelier.Exporter.Json.Tests/StellaOps.Concelier.Exporter.Json.Tests.csproj +++ /dev/null @@ -1,13 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj b/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj deleted file mode 100644 index a138b6ef..00000000 --- a/src/StellaOps.Concelier.Exporter.TrivyDb.Tests/StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj +++ /dev/null @@ -1,13 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Exporter.Json/StellaOps.Concelier.Exporter.Json.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Exporter.TrivyDb/StellaOps.Concelier.Exporter.TrivyDb.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj b/src/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj deleted file mode 100644 index fb330b60..00000000 --- a/src/StellaOps.Concelier.Merge.Tests/StellaOps.Concelier.Merge.Tests.csproj +++ /dev/null @@ -1,13 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Merge/StellaOps.Concelier.Merge.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Normalization.Tests/StellaOps.Concelier.Normalization.Tests.csproj b/src/StellaOps.Concelier.Normalization.Tests/StellaOps.Concelier.Normalization.Tests.csproj deleted file mode 100644 index 3b6886da..00000000 --- a/src/StellaOps.Concelier.Normalization.Tests/StellaOps.Concelier.Normalization.Tests.csproj +++ /dev/null @@ -1,11 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Normalization/StellaOps.Concelier.Normalization.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj b/src/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj deleted file mode 100644 index aca49cb6..00000000 --- a/src/StellaOps.Concelier.Storage.Mongo.Tests/StellaOps.Concelier.Storage.Mongo.Tests.csproj +++ /dev/null @@ -1,15 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <PackageReference Update="Microsoft.Extensions.TimeProvider.Testing" Version="9.10.0" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj b/src/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj deleted file mode 100644 index 796eac59..00000000 --- a/src/StellaOps.Concelier.WebService.Tests/StellaOps.Concelier.WebService.Tests.csproj +++ /dev/null @@ -1,13 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Concelier.Core/StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj" /> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj b/src/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj deleted file mode 100644 index fe134f86..00000000 --- a/src/StellaOps.Concelier.WebService/StellaOps.Concelier.WebService.csproj +++ /dev/null @@ -1,37 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk.Web"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - <RootNamespace>StellaOps.Concelier.WebService</RootNamespace> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="OpenTelemetry.Exporter.Console" Version="1.12.0" /> - <PackageReference Include="OpenTelemetry.Exporter.OpenTelemetryProtocol" Version="1.12.0" /> - <PackageReference Include="OpenTelemetry.Extensions.Hosting" Version="1.12.0" /> - <PackageReference Include="OpenTelemetry.Instrumentation.AspNetCore" Version="1.12.0" /> - <PackageReference Include="OpenTelemetry.Instrumentation.Http" Version="1.12.0" /> - <PackageReference Include="OpenTelemetry.Instrumentation.Process" Version="1.12.0-beta.1" /> - <PackageReference Include="OpenTelemetry.Instrumentation.Runtime" Version="1.12.0" /> - <PackageReference Include="Serilog.AspNetCore" Version="8.0.1" /> - <PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" /> - <PackageReference Include="YamlDotNet" Version="13.7.1" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Merge\StellaOps.Concelier.Merge.csproj" /> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj" /> - <ProjectReference Include="..\StellaOps.Aoc\StellaOps.Aoc.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Concelier.sln b/src/StellaOps.Concelier.sln deleted file mode 100644 index ae427cc7..00000000 --- a/src/StellaOps.Concelier.sln +++ /dev/null @@ -1,1000 +0,0 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Acsc", "StellaOps.Concelier.Connector.Acsc\StellaOps.Concelier.Connector.Acsc.csproj", "{CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common", "StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj", "{E9DE840D-0760-4324-98E2-7F2CBE06DC1A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models", "StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj", "{061B0042-9A6C-4CFD-9E48-4D3F3B924442}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Cisa", "StellaOps.Concelier.Connector.Ics.Cisa\StellaOps.Concelier.Connector.Ics.Cisa.csproj", "{6A301F32-2EEE-491B-9DB9-3BF26D032F07}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core", "StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj", "{AFCCC916-58E8-4676-AABB-54B04CEA3392}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo", "StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj", "{BF3DAB2F-E46E-49C1-9BA5-AA389763A632}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization", "StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj", "{429BAA6A-706D-489A-846F-4B0EF1B15121}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Merge", "StellaOps.Concelier.Merge\StellaOps.Concelier.Merge.csproj", "{085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.Json", "StellaOps.Concelier.Exporter.Json\StellaOps.Concelier.Exporter.Json.csproj", "{1C5506B8-C01B-4419-B888-A48F441E0C69}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.TrivyDb", "StellaOps.Concelier.Exporter.TrivyDb\StellaOps.Concelier.Exporter.TrivyDb.csproj", "{4D936BC4-5520-4642-A237-4106E97BC7A0}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "StellaOps.Plugin\StellaOps.Plugin.csproj", "{B85C1C0E-B245-44FB-877E-C112DE29041A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.WebService", "StellaOps.Concelier.WebService\StellaOps.Concelier.WebService.csproj", "{2C970A0F-FE3D-425B-B1B3-A008B194F5C2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cccs", "StellaOps.Concelier.Connector.Cccs\StellaOps.Concelier.Connector.Cccs.csproj", "{A7035381-6D20-4A07-817B-A324ED735EB3}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Debian", "StellaOps.Concelier.Connector.Distro.Debian\StellaOps.Concelier.Connector.Distro.Debian.csproj", "{404F5F6E-37E4-4EF9-B09D-6634366B5D44}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Ubuntu", "StellaOps.Concelier.Connector.Distro.Ubuntu\StellaOps.Concelier.Connector.Distro.Ubuntu.csproj", "{1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kisa", "StellaOps.Concelier.Connector.Kisa\StellaOps.Concelier.Connector.Kisa.csproj", "{23055A20-7079-4336-AD30-EFAA2FA11665}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertCc", "StellaOps.Concelier.Connector.CertCc\StellaOps.Concelier.Connector.CertCc.csproj", "{C2304954-9B15-4776-8DB6-22E293D311E4}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertFr", "StellaOps.Concelier.Connector.CertFr\StellaOps.Concelier.Connector.CertFr.csproj", "{E6895821-ED23-46D2-A5DC-06D61F90EC27}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Nvd", "StellaOps.Concelier.Connector.Nvd\StellaOps.Concelier.Connector.Nvd.csproj", "{378CB675-D70B-4A95-B324-62B67D79AAB7}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Oracle", "StellaOps.Concelier.Connector.Vndr.Oracle\StellaOps.Concelier.Connector.Vndr.Oracle.csproj", "{53AD2E55-B0F5-46AD-BFE5-82F486371872}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Nkcki", "StellaOps.Concelier.Connector.Ru.Nkcki\StellaOps.Concelier.Connector.Ru.Nkcki.csproj", "{B880C99C-C0BD-4953-95AD-2C76BC43F760}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Suse", "StellaOps.Concelier.Connector.Distro.Suse\StellaOps.Concelier.Connector.Distro.Suse.csproj", "{23422F67-C1FB-4FF4-899C-706BCD63D9FD}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Bdu", "StellaOps.Concelier.Connector.Ru.Bdu\StellaOps.Concelier.Connector.Ru.Bdu.csproj", "{16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kev", "StellaOps.Concelier.Connector.Kev\StellaOps.Concelier.Connector.Kev.csproj", "{20DB9837-715B-4515-98C6-14B50060B765}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Kaspersky", "StellaOps.Concelier.Connector.Ics.Kaspersky\StellaOps.Concelier.Connector.Ics.Kaspersky.csproj", "{10849EE2-9F34-4C23-BBB4-916A59CDB7F4}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Osv", "StellaOps.Concelier.Connector.Osv\StellaOps.Concelier.Connector.Osv.csproj", "{EFB16EDB-78D4-4601-852E-F4B37655FA13}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Jvn", "StellaOps.Concelier.Connector.Jvn\StellaOps.Concelier.Connector.Jvn.csproj", "{02289F61-0173-42CC-B8F2-25CC53F8E066}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertBund", "StellaOps.Concelier.Connector.CertBund\StellaOps.Concelier.Connector.CertBund.csproj", "{4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cve", "StellaOps.Concelier.Connector.Cve\StellaOps.Concelier.Connector.Cve.csproj", "{EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Cisco", "StellaOps.Concelier.Connector.Vndr.Cisco\StellaOps.Concelier.Connector.Vndr.Cisco.csproj", "{19957518-A422-4622-9FD1-621DF3E31869}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Msrc", "StellaOps.Concelier.Connector.Vndr.Msrc\StellaOps.Concelier.Connector.Vndr.Msrc.csproj", "{69C4C061-F5A0-4EAA-A4CD-9A513523952A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Chromium", "StellaOps.Concelier.Connector.Vndr.Chromium\StellaOps.Concelier.Connector.Vndr.Chromium.csproj", "{C7F7DE6F-A369-4F43-9864-286DCEC615F8}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Apple", "StellaOps.Concelier.Connector.Vndr.Apple\StellaOps.Concelier.Connector.Vndr.Apple.csproj", "{1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Vmware", "StellaOps.Concelier.Connector.Vndr.Vmware\StellaOps.Concelier.Connector.Vndr.Vmware.csproj", "{7255C38D-5A16-4A4D-98CE-CF0FD516B68E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Adobe", "StellaOps.Concelier.Connector.Vndr.Adobe\StellaOps.Concelier.Connector.Vndr.Adobe.csproj", "{C3A42AA3-800D-4398-A077-5560EE6451EF}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertIn", "StellaOps.Concelier.Connector.CertIn\StellaOps.Concelier.Connector.CertIn.csproj", "{5016963A-6FC9-4063-AB83-2D1F9A2BC627}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ghsa", "StellaOps.Concelier.Connector.Ghsa\StellaOps.Concelier.Connector.Ghsa.csproj", "{72F43F43-F852-487F-8334-91D438CE2F7C}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.RedHat", "StellaOps.Concelier.Connector.Distro.RedHat\StellaOps.Concelier.Connector.Distro.RedHat.csproj", "{A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{F622D38D-DA49-473E-B724-E706F8113CF2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core.Tests", "StellaOps.Concelier.Core.Tests\StellaOps.Concelier.Core.Tests.csproj", "{3A3D7610-C864-4413-B07E-9E8C2A49A90E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Merge.Tests", "StellaOps.Concelier.Merge.Tests\StellaOps.Concelier.Merge.Tests.csproj", "{9C4DEE96-CD7D-4AE3-A811-0B48B477003B}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models.Tests", "StellaOps.Concelier.Models.Tests\StellaOps.Concelier.Models.Tests.csproj", "{437B2667-9461-47D2-B75B-4D2E03D69B94}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization.Tests", "StellaOps.Concelier.Normalization.Tests\StellaOps.Concelier.Normalization.Tests.csproj", "{8249DF28-CDAF-4DEF-A912-C27F57B67FD5}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo.Tests", "StellaOps.Concelier.Storage.Mongo.Tests\StellaOps.Concelier.Storage.Mongo.Tests.csproj", "{CBFB015B-C069-475F-A476-D52222729804}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.Json.Tests", "StellaOps.Concelier.Exporter.Json.Tests\StellaOps.Concelier.Exporter.Json.Tests.csproj", "{2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.TrivyDb.Tests", "StellaOps.Concelier.Exporter.TrivyDb.Tests\StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj", "{3EB22234-642E-4533-BCC3-93E8ED443B1D}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.WebService.Tests", "StellaOps.Concelier.WebService.Tests\StellaOps.Concelier.WebService.Tests.csproj", "{84A5DE81-4444-499A-93BF-6DC4CA72F8D4}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common.Tests", "StellaOps.Concelier.Connector.Common.Tests\StellaOps.Concelier.Connector.Common.Tests.csproj", "{42E21E1D-C3DE-4765-93E9-39391BB5C802}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Nvd.Tests", "StellaOps.Concelier.Connector.Nvd.Tests\StellaOps.Concelier.Connector.Nvd.Tests.csproj", "{B6E2EE26-B297-4AB9-A47E-A227F5EAE108}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.RedHat.Tests", "StellaOps.Concelier.Connector.Distro.RedHat.Tests\StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj", "{CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Chromium.Tests", "StellaOps.Concelier.Connector.Vndr.Chromium.Tests\StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj", "{2891FCDE-BB89-46F0-A40C-368EF804DB44}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Adobe.Tests", "StellaOps.Concelier.Connector.Vndr.Adobe.Tests\StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj", "{B91C60FB-926F-47C3-BFD0-6DD145308344}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Oracle.Tests", "StellaOps.Concelier.Connector.Vndr.Oracle.Tests\StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj", "{30DF89D1-D66D-4078-8A3B-951637A42265}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Vmware.Tests", "StellaOps.Concelier.Connector.Vndr.Vmware.Tests\StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj", "{6E98C770-72FF-41FA-8C42-30AABAAF5B4E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertIn.Tests", "StellaOps.Concelier.Connector.CertIn.Tests\StellaOps.Concelier.Connector.CertIn.Tests.csproj", "{79B36C92-BA93-4406-AB75-6F2282DDFF01}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertFr.Tests", "StellaOps.Concelier.Connector.CertFr.Tests\StellaOps.Concelier.Connector.CertFr.Tests.csproj", "{4B60FA53-81F6-4AB6-BE9F-DE0992E11977}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Kaspersky.Tests", "StellaOps.Concelier.Connector.Ics.Kaspersky.Tests\StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj", "{6BBA820B-8443-4832-91C3-3AB002006494}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Jvn.Tests", "StellaOps.Concelier.Connector.Jvn.Tests\StellaOps.Concelier.Connector.Jvn.Tests.csproj", "{7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Osv.Tests", "StellaOps.Concelier.Connector.Osv.Tests\StellaOps.Concelier.Connector.Osv.Tests.csproj", "{F892BFFD-9101-4D59-B6FD-C532EB04D51F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Testing", "StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj", "{EAE910FC-188C-41C3-822A-623964CABE48}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Debian.Tests", "StellaOps.Concelier.Connector.Distro.Debian.Tests\StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj", "{BBA5C780-6348-427D-9600-726EAA8963B3}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "StellaOps.Configuration\StellaOps.Configuration.csproj", "{5F44A429-816A-4560-A5AA-61CD23FD8A19}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli", "StellaOps.Cli\StellaOps.Cli.csproj", "{20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli.Tests", "StellaOps.Cli.Tests\StellaOps.Cli.Tests.csproj", "{544DBB82-4639-4856-A5F2-76828F7A8396}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Bdu.Tests", "StellaOps.Concelier.Connector.Ru.Bdu.Tests\StellaOps.Concelier.Connector.Ru.Bdu.Tests.csproj", "{C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Nkcki.Tests", "StellaOps.Concelier.Connector.Ru.Nkcki.Tests\StellaOps.Concelier.Connector.Ru.Nkcki.Tests.csproj", "{461D4A58-3816-4737-B209-2D1F08B1F4DF}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "StellaOps.Aoc\StellaOps.Aoc.csproj", "{FB026A42-9B11-4926-9918-249A672DB879}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.Tests", "StellaOps.Aoc.Tests\StellaOps.Aoc.Tests.csproj", "{2983C054-33F1-49A3-BB6A-FC4737B2A510}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels", "StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj", "{E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels.Tests", "StellaOps.Concelier.RawModels.Tests\StellaOps.Concelier.RawModels.Tests.csproj", "{38E1D9C6-F396-46EF-9A44-B7CC473B3D50}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Debug|x64.ActiveCfg = Debug|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Debug|x64.Build.0 = Debug|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Debug|x86.ActiveCfg = Debug|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Debug|x86.Build.0 = Debug|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Release|Any CPU.Build.0 = Release|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Release|x64.ActiveCfg = Release|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Release|x64.Build.0 = Release|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Release|x86.ActiveCfg = Release|Any CPU - {CFD7B267-46B7-4C73-A33A-3E82AD2CFABC}.Release|x86.Build.0 = Release|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Debug|x64.ActiveCfg = Debug|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Debug|x64.Build.0 = Debug|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Debug|x86.ActiveCfg = Debug|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Debug|x86.Build.0 = Debug|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Release|Any CPU.Build.0 = Release|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Release|x64.ActiveCfg = Release|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Release|x64.Build.0 = Release|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Release|x86.ActiveCfg = Release|Any CPU - {E9DE840D-0760-4324-98E2-7F2CBE06DC1A}.Release|x86.Build.0 = Release|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Debug|Any CPU.Build.0 = Debug|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Debug|x64.ActiveCfg = Debug|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Debug|x64.Build.0 = Debug|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Debug|x86.ActiveCfg = Debug|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Debug|x86.Build.0 = Debug|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Release|Any CPU.ActiveCfg = Release|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Release|Any CPU.Build.0 = Release|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Release|x64.ActiveCfg = Release|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Release|x64.Build.0 = Release|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Release|x86.ActiveCfg = Release|Any CPU - {061B0042-9A6C-4CFD-9E48-4D3F3B924442}.Release|x86.Build.0 = Release|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Debug|x64.ActiveCfg = Debug|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Debug|x64.Build.0 = Debug|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Debug|x86.ActiveCfg = Debug|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Debug|x86.Build.0 = Debug|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Release|Any CPU.Build.0 = Release|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Release|x64.ActiveCfg = Release|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Release|x64.Build.0 = Release|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Release|x86.ActiveCfg = Release|Any CPU - {6A301F32-2EEE-491B-9DB9-3BF26D032F07}.Release|x86.Build.0 = Release|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Debug|Any CPU.Build.0 = Debug|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Debug|x64.ActiveCfg = Debug|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Debug|x64.Build.0 = Debug|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Debug|x86.ActiveCfg = Debug|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Debug|x86.Build.0 = Debug|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Release|Any CPU.ActiveCfg = Release|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Release|Any CPU.Build.0 = Release|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Release|x64.ActiveCfg = Release|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Release|x64.Build.0 = Release|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Release|x86.ActiveCfg = Release|Any CPU - {AFCCC916-58E8-4676-AABB-54B04CEA3392}.Release|x86.Build.0 = Release|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Debug|Any CPU.Build.0 = Debug|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Debug|x64.ActiveCfg = Debug|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Debug|x64.Build.0 = Debug|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Debug|x86.ActiveCfg = Debug|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Debug|x86.Build.0 = Debug|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Release|Any CPU.ActiveCfg = Release|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Release|Any CPU.Build.0 = Release|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Release|x64.ActiveCfg = Release|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Release|x64.Build.0 = Release|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Release|x86.ActiveCfg = Release|Any CPU - {BF3DAB2F-E46E-49C1-9BA5-AA389763A632}.Release|x86.Build.0 = Release|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Debug|Any CPU.Build.0 = Debug|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Debug|x64.ActiveCfg = Debug|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Debug|x64.Build.0 = Debug|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Debug|x86.ActiveCfg = Debug|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Debug|x86.Build.0 = Debug|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Release|Any CPU.ActiveCfg = Release|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Release|Any CPU.Build.0 = Release|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Release|x64.ActiveCfg = Release|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Release|x64.Build.0 = Release|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Release|x86.ActiveCfg = Release|Any CPU - {429BAA6A-706D-489A-846F-4B0EF1B15121}.Release|x86.Build.0 = Release|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Debug|Any CPU.Build.0 = Debug|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Debug|x64.ActiveCfg = Debug|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Debug|x64.Build.0 = Debug|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Debug|x86.ActiveCfg = Debug|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Debug|x86.Build.0 = Debug|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Release|Any CPU.ActiveCfg = Release|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Release|Any CPU.Build.0 = Release|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Release|x64.ActiveCfg = Release|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Release|x64.Build.0 = Release|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Release|x86.ActiveCfg = Release|Any CPU - {085CEC8E-0E10-48E8-89E2-9452CD2E7FA0}.Release|x86.Build.0 = Release|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Debug|x64.ActiveCfg = Debug|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Debug|x64.Build.0 = Debug|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Debug|x86.ActiveCfg = Debug|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Debug|x86.Build.0 = Debug|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Release|Any CPU.Build.0 = Release|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Release|x64.ActiveCfg = Release|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Release|x64.Build.0 = Release|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Release|x86.ActiveCfg = Release|Any CPU - {1C5506B8-C01B-4419-B888-A48F441E0C69}.Release|x86.Build.0 = Release|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Debug|x64.ActiveCfg = Debug|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Debug|x64.Build.0 = Debug|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Debug|x86.ActiveCfg = Debug|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Debug|x86.Build.0 = Debug|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Release|Any CPU.Build.0 = Release|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Release|x64.ActiveCfg = Release|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Release|x64.Build.0 = Release|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Release|x86.ActiveCfg = Release|Any CPU - {4D936BC4-5520-4642-A237-4106E97BC7A0}.Release|x86.Build.0 = Release|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Debug|x64.ActiveCfg = Debug|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Debug|x64.Build.0 = Debug|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Debug|x86.ActiveCfg = Debug|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Debug|x86.Build.0 = Debug|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Release|Any CPU.Build.0 = Release|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Release|x64.ActiveCfg = Release|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Release|x64.Build.0 = Release|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Release|x86.ActiveCfg = Release|Any CPU - {B85C1C0E-B245-44FB-877E-C112DE29041A}.Release|x86.Build.0 = Release|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Debug|x64.ActiveCfg = Debug|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Debug|x64.Build.0 = Debug|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Debug|x86.ActiveCfg = Debug|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Debug|x86.Build.0 = Debug|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Release|Any CPU.Build.0 = Release|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Release|x64.ActiveCfg = Release|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Release|x64.Build.0 = Release|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Release|x86.ActiveCfg = Release|Any CPU - {2C970A0F-FE3D-425B-B1B3-A008B194F5C2}.Release|x86.Build.0 = Release|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Debug|x64.ActiveCfg = Debug|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Debug|x64.Build.0 = Debug|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Debug|x86.ActiveCfg = Debug|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Debug|x86.Build.0 = Debug|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Release|Any CPU.Build.0 = Release|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Release|x64.ActiveCfg = Release|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Release|x64.Build.0 = Release|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Release|x86.ActiveCfg = Release|Any CPU - {A7035381-6D20-4A07-817B-A324ED735EB3}.Release|x86.Build.0 = Release|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Debug|Any CPU.Build.0 = Debug|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Debug|x64.ActiveCfg = Debug|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Debug|x64.Build.0 = Debug|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Debug|x86.ActiveCfg = Debug|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Debug|x86.Build.0 = Debug|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Release|Any CPU.ActiveCfg = Release|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Release|Any CPU.Build.0 = Release|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Release|x64.ActiveCfg = Release|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Release|x64.Build.0 = Release|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Release|x86.ActiveCfg = Release|Any CPU - {404F5F6E-37E4-4EF9-B09D-6634366B5D44}.Release|x86.Build.0 = Release|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Debug|x64.ActiveCfg = Debug|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Debug|x64.Build.0 = Debug|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Debug|x86.ActiveCfg = Debug|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Debug|x86.Build.0 = Debug|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Release|Any CPU.Build.0 = Release|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Release|x64.ActiveCfg = Release|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Release|x64.Build.0 = Release|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Release|x86.ActiveCfg = Release|Any CPU - {1BEF4D9D-9EA4-4BE9-9664-F16DC1CA8EEB}.Release|x86.Build.0 = Release|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Debug|Any CPU.Build.0 = Debug|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Debug|x64.ActiveCfg = Debug|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Debug|x64.Build.0 = Debug|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Debug|x86.ActiveCfg = Debug|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Debug|x86.Build.0 = Debug|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Release|Any CPU.ActiveCfg = Release|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Release|Any CPU.Build.0 = Release|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Release|x64.ActiveCfg = Release|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Release|x64.Build.0 = Release|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Release|x86.ActiveCfg = Release|Any CPU - {23055A20-7079-4336-AD30-EFAA2FA11665}.Release|x86.Build.0 = Release|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Debug|x64.ActiveCfg = Debug|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Debug|x64.Build.0 = Debug|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Debug|x86.ActiveCfg = Debug|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Debug|x86.Build.0 = Debug|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Release|Any CPU.Build.0 = Release|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Release|x64.ActiveCfg = Release|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Release|x64.Build.0 = Release|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Release|x86.ActiveCfg = Release|Any CPU - {C2304954-9B15-4776-8DB6-22E293D311E4}.Release|x86.Build.0 = Release|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Debug|x64.ActiveCfg = Debug|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Debug|x64.Build.0 = Debug|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Debug|x86.ActiveCfg = Debug|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Debug|x86.Build.0 = Debug|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Release|Any CPU.Build.0 = Release|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Release|x64.ActiveCfg = Release|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Release|x64.Build.0 = Release|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Release|x86.ActiveCfg = Release|Any CPU - {E6895821-ED23-46D2-A5DC-06D61F90EC27}.Release|x86.Build.0 = Release|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Debug|x64.ActiveCfg = Debug|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Debug|x64.Build.0 = Debug|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Debug|x86.ActiveCfg = Debug|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Debug|x86.Build.0 = Debug|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Release|Any CPU.Build.0 = Release|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Release|x64.ActiveCfg = Release|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Release|x64.Build.0 = Release|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Release|x86.ActiveCfg = Release|Any CPU - {378CB675-D70B-4A95-B324-62B67D79AAB7}.Release|x86.Build.0 = Release|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Debug|Any CPU.Build.0 = Debug|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Debug|x64.ActiveCfg = Debug|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Debug|x64.Build.0 = Debug|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Debug|x86.ActiveCfg = Debug|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Debug|x86.Build.0 = Debug|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Release|Any CPU.ActiveCfg = Release|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Release|Any CPU.Build.0 = Release|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Release|x64.ActiveCfg = Release|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Release|x64.Build.0 = Release|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Release|x86.ActiveCfg = Release|Any CPU - {53AD2E55-B0F5-46AD-BFE5-82F486371872}.Release|x86.Build.0 = Release|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Debug|x64.ActiveCfg = Debug|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Debug|x64.Build.0 = Debug|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Debug|x86.ActiveCfg = Debug|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Debug|x86.Build.0 = Debug|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Release|Any CPU.Build.0 = Release|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Release|x64.ActiveCfg = Release|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Release|x64.Build.0 = Release|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Release|x86.ActiveCfg = Release|Any CPU - {B880C99C-C0BD-4953-95AD-2C76BC43F760}.Release|x86.Build.0 = Release|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Debug|x64.ActiveCfg = Debug|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Debug|x64.Build.0 = Debug|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Debug|x86.ActiveCfg = Debug|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Debug|x86.Build.0 = Debug|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Release|Any CPU.ActiveCfg = Release|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Release|Any CPU.Build.0 = Release|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Release|x64.ActiveCfg = Release|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Release|x64.Build.0 = Release|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Release|x86.ActiveCfg = Release|Any CPU - {23422F67-C1FB-4FF4-899C-706BCD63D9FD}.Release|x86.Build.0 = Release|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Debug|x64.ActiveCfg = Debug|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Debug|x64.Build.0 = Debug|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Debug|x86.ActiveCfg = Debug|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Debug|x86.Build.0 = Debug|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Release|Any CPU.Build.0 = Release|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Release|x64.ActiveCfg = Release|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Release|x64.Build.0 = Release|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Release|x86.ActiveCfg = Release|Any CPU - {16AD4AB9-2A80-4CFD-91A7-36CC1FEF439F}.Release|x86.Build.0 = Release|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Debug|Any CPU.Build.0 = Debug|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Debug|x64.ActiveCfg = Debug|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Debug|x64.Build.0 = Debug|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Debug|x86.ActiveCfg = Debug|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Debug|x86.Build.0 = Debug|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Release|Any CPU.ActiveCfg = Release|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Release|Any CPU.Build.0 = Release|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Release|x64.ActiveCfg = Release|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Release|x64.Build.0 = Release|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Release|x86.ActiveCfg = Release|Any CPU - {20DB9837-715B-4515-98C6-14B50060B765}.Release|x86.Build.0 = Release|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Debug|x64.ActiveCfg = Debug|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Debug|x64.Build.0 = Debug|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Debug|x86.ActiveCfg = Debug|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Debug|x86.Build.0 = Debug|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Release|Any CPU.Build.0 = Release|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Release|x64.ActiveCfg = Release|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Release|x64.Build.0 = Release|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Release|x86.ActiveCfg = Release|Any CPU - {10849EE2-9F34-4C23-BBB4-916A59CDB7F4}.Release|x86.Build.0 = Release|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Debug|Any CPU.Build.0 = Debug|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Debug|x64.ActiveCfg = Debug|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Debug|x64.Build.0 = Debug|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Debug|x86.ActiveCfg = Debug|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Debug|x86.Build.0 = Debug|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Release|Any CPU.ActiveCfg = Release|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Release|Any CPU.Build.0 = Release|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Release|x64.ActiveCfg = Release|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Release|x64.Build.0 = Release|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Release|x86.ActiveCfg = Release|Any CPU - {EFB16EDB-78D4-4601-852E-F4B37655FA13}.Release|x86.Build.0 = Release|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Debug|Any CPU.Build.0 = Debug|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Debug|x64.ActiveCfg = Debug|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Debug|x64.Build.0 = Debug|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Debug|x86.ActiveCfg = Debug|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Debug|x86.Build.0 = Debug|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Release|Any CPU.ActiveCfg = Release|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Release|Any CPU.Build.0 = Release|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Release|x64.ActiveCfg = Release|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Release|x64.Build.0 = Release|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Release|x86.ActiveCfg = Release|Any CPU - {02289F61-0173-42CC-B8F2-25CC53F8E066}.Release|x86.Build.0 = Release|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Debug|x64.ActiveCfg = Debug|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Debug|x64.Build.0 = Debug|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Debug|x86.ActiveCfg = Debug|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Debug|x86.Build.0 = Debug|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Release|Any CPU.Build.0 = Release|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Release|x64.ActiveCfg = Release|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Release|x64.Build.0 = Release|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Release|x86.ActiveCfg = Release|Any CPU - {4CE0B67B-2B6D-4D48-9D38-2F1165FD6BF4}.Release|x86.Build.0 = Release|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Debug|Any CPU.Build.0 = Debug|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Debug|x64.ActiveCfg = Debug|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Debug|x64.Build.0 = Debug|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Debug|x86.ActiveCfg = Debug|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Debug|x86.Build.0 = Debug|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Release|Any CPU.ActiveCfg = Release|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Release|Any CPU.Build.0 = Release|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Release|x64.ActiveCfg = Release|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Release|x64.Build.0 = Release|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Release|x86.ActiveCfg = Release|Any CPU - {EB037D9A-EF9C-439D-8A79-4B7D12F9C9D0}.Release|x86.Build.0 = Release|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Debug|Any CPU.Build.0 = Debug|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Debug|x64.ActiveCfg = Debug|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Debug|x64.Build.0 = Debug|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Debug|x86.ActiveCfg = Debug|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Debug|x86.Build.0 = Debug|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Release|Any CPU.ActiveCfg = Release|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Release|Any CPU.Build.0 = Release|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Release|x64.ActiveCfg = Release|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Release|x64.Build.0 = Release|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Release|x86.ActiveCfg = Release|Any CPU - {19957518-A422-4622-9FD1-621DF3E31869}.Release|x86.Build.0 = Release|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Debug|x64.ActiveCfg = Debug|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Debug|x64.Build.0 = Debug|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Debug|x86.ActiveCfg = Debug|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Debug|x86.Build.0 = Debug|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Release|Any CPU.Build.0 = Release|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Release|x64.ActiveCfg = Release|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Release|x64.Build.0 = Release|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Release|x86.ActiveCfg = Release|Any CPU - {69C4C061-F5A0-4EAA-A4CD-9A513523952A}.Release|x86.Build.0 = Release|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Debug|x64.ActiveCfg = Debug|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Debug|x64.Build.0 = Debug|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Debug|x86.ActiveCfg = Debug|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Debug|x86.Build.0 = Debug|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Release|Any CPU.Build.0 = Release|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Release|x64.ActiveCfg = Release|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Release|x64.Build.0 = Release|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Release|x86.ActiveCfg = Release|Any CPU - {C7F7DE6F-A369-4F43-9864-286DCEC615F8}.Release|x86.Build.0 = Release|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Debug|x64.ActiveCfg = Debug|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Debug|x64.Build.0 = Debug|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Debug|x86.ActiveCfg = Debug|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Debug|x86.Build.0 = Debug|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Release|Any CPU.Build.0 = Release|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Release|x64.ActiveCfg = Release|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Release|x64.Build.0 = Release|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Release|x86.ActiveCfg = Release|Any CPU - {1C1593FE-73A4-47E8-A45B-5FC3B0BA7698}.Release|x86.Build.0 = Release|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Debug|x64.ActiveCfg = Debug|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Debug|x64.Build.0 = Debug|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Debug|x86.ActiveCfg = Debug|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Debug|x86.Build.0 = Debug|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Release|Any CPU.Build.0 = Release|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Release|x64.ActiveCfg = Release|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Release|x64.Build.0 = Release|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Release|x86.ActiveCfg = Release|Any CPU - {7255C38D-5A16-4A4D-98CE-CF0FD516B68E}.Release|x86.Build.0 = Release|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Debug|x64.ActiveCfg = Debug|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Debug|x64.Build.0 = Debug|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Debug|x86.ActiveCfg = Debug|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Debug|x86.Build.0 = Debug|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Release|Any CPU.Build.0 = Release|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Release|x64.ActiveCfg = Release|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Release|x64.Build.0 = Release|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Release|x86.ActiveCfg = Release|Any CPU - {C3A42AA3-800D-4398-A077-5560EE6451EF}.Release|x86.Build.0 = Release|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Debug|x64.ActiveCfg = Debug|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Debug|x64.Build.0 = Debug|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Debug|x86.ActiveCfg = Debug|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Debug|x86.Build.0 = Debug|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Release|Any CPU.Build.0 = Release|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Release|x64.ActiveCfg = Release|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Release|x64.Build.0 = Release|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Release|x86.ActiveCfg = Release|Any CPU - {5016963A-6FC9-4063-AB83-2D1F9A2BC627}.Release|x86.Build.0 = Release|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Debug|x64.ActiveCfg = Debug|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Debug|x64.Build.0 = Debug|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Debug|x86.ActiveCfg = Debug|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Debug|x86.Build.0 = Debug|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Release|Any CPU.Build.0 = Release|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Release|x64.ActiveCfg = Release|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Release|x64.Build.0 = Release|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Release|x86.ActiveCfg = Release|Any CPU - {72F43F43-F852-487F-8334-91D438CE2F7C}.Release|x86.Build.0 = Release|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Debug|x64.ActiveCfg = Debug|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Debug|x64.Build.0 = Debug|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Debug|x86.ActiveCfg = Debug|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Debug|x86.Build.0 = Debug|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Release|Any CPU.Build.0 = Release|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Release|x64.ActiveCfg = Release|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Release|x64.Build.0 = Release|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Release|x86.ActiveCfg = Release|Any CPU - {A4DBF88F-34D0-4A05-ACCE-DE080F912FDB}.Release|x86.Build.0 = Release|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Debug|x64.ActiveCfg = Debug|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Debug|x64.Build.0 = Debug|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Debug|x86.ActiveCfg = Debug|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Debug|x86.Build.0 = Debug|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Release|Any CPU.Build.0 = Release|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Release|x64.ActiveCfg = Release|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Release|x64.Build.0 = Release|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Release|x86.ActiveCfg = Release|Any CPU - {F622D38D-DA49-473E-B724-E706F8113CF2}.Release|x86.Build.0 = Release|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Debug|x64.ActiveCfg = Debug|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Debug|x64.Build.0 = Debug|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Debug|x86.ActiveCfg = Debug|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Debug|x86.Build.0 = Debug|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Release|Any CPU.Build.0 = Release|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Release|x64.ActiveCfg = Release|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Release|x64.Build.0 = Release|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Release|x86.ActiveCfg = Release|Any CPU - {3A3D7610-C864-4413-B07E-9E8C2A49A90E}.Release|x86.Build.0 = Release|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Debug|x64.ActiveCfg = Debug|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Debug|x64.Build.0 = Debug|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Debug|x86.ActiveCfg = Debug|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Debug|x86.Build.0 = Debug|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Release|Any CPU.Build.0 = Release|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Release|x64.ActiveCfg = Release|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Release|x64.Build.0 = Release|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Release|x86.ActiveCfg = Release|Any CPU - {9C4DEE96-CD7D-4AE3-A811-0B48B477003B}.Release|x86.Build.0 = Release|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Debug|Any CPU.Build.0 = Debug|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Debug|x64.ActiveCfg = Debug|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Debug|x64.Build.0 = Debug|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Debug|x86.ActiveCfg = Debug|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Debug|x86.Build.0 = Debug|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Release|Any CPU.ActiveCfg = Release|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Release|Any CPU.Build.0 = Release|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Release|x64.ActiveCfg = Release|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Release|x64.Build.0 = Release|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Release|x86.ActiveCfg = Release|Any CPU - {437B2667-9461-47D2-B75B-4D2E03D69B94}.Release|x86.Build.0 = Release|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Debug|x64.ActiveCfg = Debug|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Debug|x64.Build.0 = Debug|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Debug|x86.ActiveCfg = Debug|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Debug|x86.Build.0 = Debug|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Release|Any CPU.Build.0 = Release|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Release|x64.ActiveCfg = Release|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Release|x64.Build.0 = Release|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Release|x86.ActiveCfg = Release|Any CPU - {8249DF28-CDAF-4DEF-A912-C27F57B67FD5}.Release|x86.Build.0 = Release|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Debug|x64.ActiveCfg = Debug|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Debug|x64.Build.0 = Debug|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Debug|x86.ActiveCfg = Debug|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Debug|x86.Build.0 = Debug|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Release|Any CPU.Build.0 = Release|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Release|x64.ActiveCfg = Release|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Release|x64.Build.0 = Release|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Release|x86.ActiveCfg = Release|Any CPU - {CBFB015B-C069-475F-A476-D52222729804}.Release|x86.Build.0 = Release|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Debug|x64.ActiveCfg = Debug|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Debug|x64.Build.0 = Debug|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Debug|x86.ActiveCfg = Debug|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Debug|x86.Build.0 = Debug|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Release|Any CPU.Build.0 = Release|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Release|x64.ActiveCfg = Release|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Release|x64.Build.0 = Release|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Release|x86.ActiveCfg = Release|Any CPU - {2A41D9D2-3218-4F12-9C2B-3DB18A8E732E}.Release|x86.Build.0 = Release|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Debug|x64.ActiveCfg = Debug|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Debug|x64.Build.0 = Debug|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Debug|x86.ActiveCfg = Debug|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Debug|x86.Build.0 = Debug|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Release|Any CPU.Build.0 = Release|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Release|x64.ActiveCfg = Release|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Release|x64.Build.0 = Release|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Release|x86.ActiveCfg = Release|Any CPU - {3EB22234-642E-4533-BCC3-93E8ED443B1D}.Release|x86.Build.0 = Release|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Debug|x64.ActiveCfg = Debug|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Debug|x64.Build.0 = Debug|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Debug|x86.ActiveCfg = Debug|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Debug|x86.Build.0 = Debug|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Release|Any CPU.Build.0 = Release|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Release|x64.ActiveCfg = Release|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Release|x64.Build.0 = Release|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Release|x86.ActiveCfg = Release|Any CPU - {84A5DE81-4444-499A-93BF-6DC4CA72F8D4}.Release|x86.Build.0 = Release|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Debug|Any CPU.Build.0 = Debug|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Debug|x64.ActiveCfg = Debug|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Debug|x64.Build.0 = Debug|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Debug|x86.ActiveCfg = Debug|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Debug|x86.Build.0 = Debug|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Release|Any CPU.ActiveCfg = Release|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Release|Any CPU.Build.0 = Release|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Release|x64.ActiveCfg = Release|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Release|x64.Build.0 = Release|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Release|x86.ActiveCfg = Release|Any CPU - {42E21E1D-C3DE-4765-93E9-39391BB5C802}.Release|x86.Build.0 = Release|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Debug|x64.ActiveCfg = Debug|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Debug|x64.Build.0 = Debug|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Debug|x86.ActiveCfg = Debug|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Debug|x86.Build.0 = Debug|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Release|Any CPU.Build.0 = Release|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Release|x64.ActiveCfg = Release|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Release|x64.Build.0 = Release|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Release|x86.ActiveCfg = Release|Any CPU - {B6E2EE26-B297-4AB9-A47E-A227F5EAE108}.Release|x86.Build.0 = Release|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Debug|x64.ActiveCfg = Debug|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Debug|x64.Build.0 = Debug|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Debug|x86.ActiveCfg = Debug|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Debug|x86.Build.0 = Debug|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Release|Any CPU.Build.0 = Release|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Release|x64.ActiveCfg = Release|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Release|x64.Build.0 = Release|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Release|x86.ActiveCfg = Release|Any CPU - {CDB2D636-C82F-43F1-BB30-FFC6258FBAB4}.Release|x86.Build.0 = Release|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Debug|x64.ActiveCfg = Debug|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Debug|x64.Build.0 = Debug|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Debug|x86.ActiveCfg = Debug|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Debug|x86.Build.0 = Debug|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Release|Any CPU.Build.0 = Release|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Release|x64.ActiveCfg = Release|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Release|x64.Build.0 = Release|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Release|x86.ActiveCfg = Release|Any CPU - {2891FCDE-BB89-46F0-A40C-368EF804DB44}.Release|x86.Build.0 = Release|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Debug|x64.ActiveCfg = Debug|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Debug|x64.Build.0 = Debug|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Debug|x86.ActiveCfg = Debug|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Debug|x86.Build.0 = Debug|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Release|Any CPU.Build.0 = Release|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Release|x64.ActiveCfg = Release|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Release|x64.Build.0 = Release|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Release|x86.ActiveCfg = Release|Any CPU - {B91C60FB-926F-47C3-BFD0-6DD145308344}.Release|x86.Build.0 = Release|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Debug|Any CPU.Build.0 = Debug|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Debug|x64.ActiveCfg = Debug|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Debug|x64.Build.0 = Debug|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Debug|x86.ActiveCfg = Debug|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Debug|x86.Build.0 = Debug|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Release|Any CPU.ActiveCfg = Release|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Release|Any CPU.Build.0 = Release|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Release|x64.ActiveCfg = Release|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Release|x64.Build.0 = Release|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Release|x86.ActiveCfg = Release|Any CPU - {30DF89D1-D66D-4078-8A3B-951637A42265}.Release|x86.Build.0 = Release|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Debug|x64.ActiveCfg = Debug|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Debug|x64.Build.0 = Debug|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Debug|x86.ActiveCfg = Debug|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Debug|x86.Build.0 = Debug|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Release|Any CPU.Build.0 = Release|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Release|x64.ActiveCfg = Release|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Release|x64.Build.0 = Release|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Release|x86.ActiveCfg = Release|Any CPU - {6E98C770-72FF-41FA-8C42-30AABAAF5B4E}.Release|x86.Build.0 = Release|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Debug|Any CPU.Build.0 = Debug|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Debug|x64.ActiveCfg = Debug|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Debug|x64.Build.0 = Debug|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Debug|x86.ActiveCfg = Debug|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Debug|x86.Build.0 = Debug|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Release|Any CPU.ActiveCfg = Release|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Release|Any CPU.Build.0 = Release|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Release|x64.ActiveCfg = Release|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Release|x64.Build.0 = Release|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Release|x86.ActiveCfg = Release|Any CPU - {79B36C92-BA93-4406-AB75-6F2282DDFF01}.Release|x86.Build.0 = Release|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Debug|x64.ActiveCfg = Debug|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Debug|x64.Build.0 = Debug|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Debug|x86.ActiveCfg = Debug|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Debug|x86.Build.0 = Debug|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Release|Any CPU.Build.0 = Release|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Release|x64.ActiveCfg = Release|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Release|x64.Build.0 = Release|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Release|x86.ActiveCfg = Release|Any CPU - {4B60FA53-81F6-4AB6-BE9F-DE0992E11977}.Release|x86.Build.0 = Release|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Debug|x64.ActiveCfg = Debug|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Debug|x64.Build.0 = Debug|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Debug|x86.ActiveCfg = Debug|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Debug|x86.Build.0 = Debug|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Release|Any CPU.Build.0 = Release|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Release|x64.ActiveCfg = Release|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Release|x64.Build.0 = Release|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Release|x86.ActiveCfg = Release|Any CPU - {6BBA820B-8443-4832-91C3-3AB002006494}.Release|x86.Build.0 = Release|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Debug|x64.ActiveCfg = Debug|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Debug|x64.Build.0 = Debug|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Debug|x86.ActiveCfg = Debug|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Debug|x86.Build.0 = Debug|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Release|Any CPU.Build.0 = Release|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Release|x64.ActiveCfg = Release|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Release|x64.Build.0 = Release|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Release|x86.ActiveCfg = Release|Any CPU - {7845AE1C-FBD7-4177-A06F-D7AAE8315DB2}.Release|x86.Build.0 = Release|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Debug|x64.ActiveCfg = Debug|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Debug|x64.Build.0 = Debug|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Debug|x86.ActiveCfg = Debug|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Debug|x86.Build.0 = Debug|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Release|Any CPU.Build.0 = Release|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Release|x64.ActiveCfg = Release|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Release|x64.Build.0 = Release|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Release|x86.ActiveCfg = Release|Any CPU - {F892BFFD-9101-4D59-B6FD-C532EB04D51F}.Release|x86.Build.0 = Release|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Debug|Any CPU.Build.0 = Debug|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Debug|x64.ActiveCfg = Debug|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Debug|x64.Build.0 = Debug|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Debug|x86.ActiveCfg = Debug|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Debug|x86.Build.0 = Debug|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Release|Any CPU.ActiveCfg = Release|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Release|Any CPU.Build.0 = Release|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Release|x64.ActiveCfg = Release|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Release|x64.Build.0 = Release|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Release|x86.ActiveCfg = Release|Any CPU - {EAE910FC-188C-41C3-822A-623964CABE48}.Release|x86.Build.0 = Release|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Debug|x64.ActiveCfg = Debug|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Debug|x64.Build.0 = Debug|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Debug|x86.ActiveCfg = Debug|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Debug|x86.Build.0 = Debug|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Release|Any CPU.Build.0 = Release|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Release|x64.ActiveCfg = Release|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Release|x64.Build.0 = Release|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Release|x86.ActiveCfg = Release|Any CPU - {BBA5C780-6348-427D-9600-726EAA8963B3}.Release|x86.Build.0 = Release|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Debug|x64.ActiveCfg = Debug|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Debug|x64.Build.0 = Debug|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Debug|x86.ActiveCfg = Debug|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Debug|x86.Build.0 = Debug|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Release|Any CPU.Build.0 = Release|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Release|x64.ActiveCfg = Release|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Release|x64.Build.0 = Release|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Release|x86.ActiveCfg = Release|Any CPU - {5F44A429-816A-4560-A5AA-61CD23FD8A19}.Release|x86.Build.0 = Release|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Debug|x64.ActiveCfg = Debug|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Debug|x64.Build.0 = Debug|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Debug|x86.ActiveCfg = Debug|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Debug|x86.Build.0 = Debug|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Release|Any CPU.Build.0 = Release|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Release|x64.ActiveCfg = Release|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Release|x64.Build.0 = Release|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Release|x86.ActiveCfg = Release|Any CPU - {20FDC3B4-9908-4ABF-BA1D-50E0B4A64F4B}.Release|x86.Build.0 = Release|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Debug|Any CPU.Build.0 = Debug|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Debug|x64.ActiveCfg = Debug|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Debug|x64.Build.0 = Debug|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Debug|x86.ActiveCfg = Debug|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Debug|x86.Build.0 = Debug|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Release|Any CPU.ActiveCfg = Release|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Release|Any CPU.Build.0 = Release|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Release|x64.ActiveCfg = Release|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Release|x64.Build.0 = Release|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Release|x86.ActiveCfg = Release|Any CPU - {544DBB82-4639-4856-A5F2-76828F7A8396}.Release|x86.Build.0 = Release|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Debug|x64.ActiveCfg = Debug|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Debug|x64.Build.0 = Debug|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Debug|x86.ActiveCfg = Debug|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Debug|x86.Build.0 = Debug|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Release|Any CPU.Build.0 = Release|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Release|x64.ActiveCfg = Release|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Release|x64.Build.0 = Release|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Release|x86.ActiveCfg = Release|Any CPU - {C4B189FA-4268-4B3C-A6B0-C2BB5B96D11A}.Release|x86.Build.0 = Release|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Debug|x64.ActiveCfg = Debug|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Debug|x64.Build.0 = Debug|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Debug|x86.ActiveCfg = Debug|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Debug|x86.Build.0 = Debug|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Release|Any CPU.Build.0 = Release|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Release|x64.ActiveCfg = Release|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Release|x64.Build.0 = Release|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Release|x86.ActiveCfg = Release|Any CPU - {461D4A58-3816-4737-B209-2D1F08B1F4DF}.Release|x86.Build.0 = Release|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Debug|x64.ActiveCfg = Debug|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Debug|x64.Build.0 = Debug|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Debug|x86.ActiveCfg = Debug|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Debug|x86.Build.0 = Debug|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Release|Any CPU.Build.0 = Release|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Release|x64.ActiveCfg = Release|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Release|x64.Build.0 = Release|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Release|x86.ActiveCfg = Release|Any CPU - {FB026A42-9B11-4926-9918-249A672DB879}.Release|x86.Build.0 = Release|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Debug|x64.ActiveCfg = Debug|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Debug|x64.Build.0 = Debug|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Debug|x86.ActiveCfg = Debug|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Debug|x86.Build.0 = Debug|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Release|Any CPU.Build.0 = Release|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Release|x64.ActiveCfg = Release|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Release|x64.Build.0 = Release|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Release|x86.ActiveCfg = Release|Any CPU - {2983C054-33F1-49A3-BB6A-FC4737B2A510}.Release|x86.Build.0 = Release|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Debug|x64.ActiveCfg = Debug|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Debug|x64.Build.0 = Debug|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Debug|x86.ActiveCfg = Debug|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Debug|x86.Build.0 = Debug|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Release|Any CPU.Build.0 = Release|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Release|x64.ActiveCfg = Release|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Release|x64.Build.0 = Release|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Release|x86.ActiveCfg = Release|Any CPU - {E8A8FDFC-DACF-4F7B-AECC-3D1C59341DF9}.Release|x86.Build.0 = Release|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Debug|Any CPU.Build.0 = Debug|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Debug|x64.ActiveCfg = Debug|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Debug|x64.Build.0 = Debug|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Debug|x86.ActiveCfg = Debug|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Debug|x86.Build.0 = Debug|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Release|Any CPU.ActiveCfg = Release|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Release|Any CPU.Build.0 = Release|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Release|x64.ActiveCfg = Release|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Release|x64.Build.0 = Release|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Release|x86.ActiveCfg = Release|Any CPU - {38E1D9C6-F396-46EF-9A44-B7CC473B3D50}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal diff --git a/src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj b/src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj deleted file mode 100644 index 346491bf..00000000 --- a/src/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj +++ /dev/null @@ -1,11 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Configuration/StellaOps.Configuration.csproj" /> - <ProjectReference Include="../StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj b/src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj deleted file mode 100644 index 14a02ea6..00000000 --- a/src/StellaOps.Excititor.Core.Tests/StellaOps.Excititor.Core.Tests.csproj +++ /dev/null @@ -1,15 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj" /> - <ProjectReference Include="..\StellaOps.Aoc\StellaOps.Aoc.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj b/src/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj deleted file mode 100644 index 847c8d77..00000000 --- a/src/StellaOps.Excititor.Storage.Mongo.Tests/StellaOps.Excititor.Storage.Mongo.Tests.csproj +++ /dev/null @@ -1,15 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj" /> - <ProjectReference Include="..\StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj b/src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj deleted file mode 100644 index 5df0d48b..00000000 --- a/src/StellaOps.Excititor.WebService/StellaOps.Excititor.WebService.csproj +++ /dev/null @@ -1,22 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk.Web"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Export\StellaOps.Excititor.Export.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Attestation\StellaOps.Excititor.Attestation.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.ArtifactStores.S3\StellaOps.Excititor.ArtifactStores.S3.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.RedHat.CSAF\StellaOps.Excititor.Connectors.RedHat.CSAF.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Formats.CSAF\StellaOps.Excititor.Formats.CSAF.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Formats.CycloneDX\StellaOps.Excititor.Formats.CycloneDX.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Formats.OpenVEX\StellaOps.Excititor.Formats.OpenVEX.csproj" /> - <ProjectReference Include="..\StellaOps.Aoc\StellaOps.Aoc.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj b/src/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj deleted file mode 100644 index 2336fb1a..00000000 --- a/src/StellaOps.Excititor.Worker/StellaOps.Excititor.Worker.csproj +++ /dev/null @@ -1,24 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk.Worker"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Plugin\StellaOps.Plugin.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Connectors.RedHat.CSAF\StellaOps.Excititor.Connectors.RedHat.CSAF.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Formats.CSAF\StellaOps.Excititor.Formats.CSAF.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Formats.CycloneDX\StellaOps.Excititor.Formats.CycloneDX.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Formats.OpenVEX\StellaOps.Excititor.Formats.OpenVEX.csproj" /> - <ProjectReference Include="..\StellaOps.Excititor.Attestation\StellaOps.Excititor.Attestation.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/xunit.runner.json b/src/StellaOps.Notifier/StellaOps.Notifier.Tests/xunit.runner.json deleted file mode 100644 index 86c7ea05..00000000 --- a/src/StellaOps.Notifier/StellaOps.Notifier.Tests/xunit.runner.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} diff --git a/src/StellaOps.Notify.Connectors.Email/TASKS.md b/src/StellaOps.Notify.Connectors.Email/TASKS.md deleted file mode 100644 index 5d4ab07b..00000000 --- a/src/StellaOps.Notify.Connectors.Email/TASKS.md +++ /dev/null @@ -1,2 +0,0 @@ -# Notify Email Connector Task Board (Sprint 15) -> Archived 2025-10-26 — connector maintained under `src/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Connectors.Slack/TASKS.md b/src/StellaOps.Notify.Connectors.Slack/TASKS.md deleted file mode 100644 index f2cb69c1..00000000 --- a/src/StellaOps.Notify.Connectors.Slack/TASKS.md +++ /dev/null @@ -1,2 +0,0 @@ -# Notify Slack Connector Task Board (Sprint 15) -> Archived 2025-10-26 — connector scope now in `src/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Connectors.Webhook/TASKS.md b/src/StellaOps.Notify.Connectors.Webhook/TASKS.md deleted file mode 100644 index ca3f3fb9..00000000 --- a/src/StellaOps.Notify.Connectors.Webhook/TASKS.md +++ /dev/null @@ -1,2 +0,0 @@ -# Notify Webhook Connector Task Board (Sprint 15) -> Archived 2025-10-26 — webhook connector maintained in `src/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.Models/TASKS.md b/src/StellaOps.Notify.Models/TASKS.md deleted file mode 100644 index 9b75dc62..00000000 --- a/src/StellaOps.Notify.Models/TASKS.md +++ /dev/null @@ -1,2 +0,0 @@ -# Notify Models Task Board (Sprint 15) -> Archived 2025-10-26 — scope moved to `src/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj b/src/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj deleted file mode 100644 index 0a52aeae..00000000 --- a/src/StellaOps.Notify.WebService/StellaOps.Notify.WebService.csproj +++ /dev/null @@ -1,27 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk.Web"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="Serilog.AspNetCore" Version="8.0.1" /> - <PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" /> - <PackageReference Include="YamlDotNet" Version="13.7.1" /> - </ItemGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Storage.Mongo\StellaOps.Notify.Storage.Mongo.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Engine\StellaOps.Notify.Engine.csproj" /> - <ProjectReference Include="..\StellaOps.Plugin\StellaOps.Plugin.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Notify.WebService/TASKS.md b/src/StellaOps.Notify.WebService/TASKS.md deleted file mode 100644 index a13e4a53..00000000 --- a/src/StellaOps.Notify.WebService/TASKS.md +++ /dev/null @@ -1,2 +0,0 @@ -# Notify WebService Task Board (Sprint 15) -> Archived 2025-10-26 — control plane now lives in `src/StellaOps.Notifier` (Sprints 38–40). diff --git a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/xunit.runner.json b/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/xunit.runner.json deleted file mode 100644 index 86c7ea05..00000000 --- a/src/StellaOps.Orchestrator/StellaOps.Orchestrator.Tests/xunit.runner.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/xunit.runner.json b/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/xunit.runner.json deleted file mode 100644 index 86c7ea05..00000000 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Tests/xunit.runner.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.Development.json b/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.Development.json deleted file mode 100644 index 0c208ae9..00000000 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - } -} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.json b/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.json deleted file mode 100644 index 10f68b8c..00000000 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.WebService/appsettings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.Development.json b/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.Development.json deleted file mode 100644 index b2dcdb67..00000000 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} diff --git a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.json b/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.json deleted file mode 100644 index b2dcdb67..00000000 --- a/src/StellaOps.PacksRegistry/StellaOps.PacksRegistry.Worker/appsettings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} diff --git a/src/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj b/src/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj deleted file mode 100644 index a33dbe6f..00000000 --- a/src/StellaOps.Policy.Engine/StellaOps.Policy.Engine.csproj +++ /dev/null @@ -1,19 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk.Web"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - <AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Policy\StellaOps.Policy.csproj" /> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj b/src/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj deleted file mode 100644 index 9a767fa1..00000000 --- a/src/StellaOps.Policy.Gateway/StellaOps.Policy.Gateway.csproj +++ /dev/null @@ -1,22 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk.Web"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - <AspNetCoreHostingModel>InProcess</AspNetCoreHostingModel> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Http.Polly" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="8.14.0" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Registry.TokenService.Tests/StellaOps.Registry.TokenService.Tests.csproj b/src/StellaOps.Registry.TokenService.Tests/StellaOps.Registry.TokenService.Tests.csproj deleted file mode 100644 index efa1033b..00000000 --- a/src/StellaOps.Registry.TokenService.Tests/StellaOps.Registry.TokenService.Tests.csproj +++ /dev/null @@ -1,28 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <OutputType>Exe</OutputType> - <IsPackable>false</IsPackable> - - <!-- To enable Microsoft.Testing.Platform, uncomment the following line. --> - <!-- <UseMicrosoftTestingPlatformRunner>true</UseMicrosoftTestingPlatformRunner> --> - <!-- Note: to use Microsoft.Testing.Platform correctly with dotnet test: --> - <!-- 1. You must add dotnet.config specifying the test runner to be Microsoft.Testing.Platform --> - <!-- 2. You must use .NET 10 SDK or later --> - <!-- For more information, see https://aka.ms/dotnet-test/mtp and https://xunit.net/docs/getting-started/v3/microsoft-testing-platform --> - <!-- To enable code coverage with Microsoft.Testing.Platform, add a package reference to Microsoft.Testing.Extensions.CodeCoverage --> - <!-- https://learn.microsoft.comdotnet/core/testing/microsoft-testing-platform-extensions-code-coverage --> - </PropertyGroup> - - <ItemGroup> - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest" /> - </ItemGroup> - - <ItemGroup> - <ProjectReference Include="..\StellaOps.Registry.TokenService\StellaOps.Registry.TokenService.csproj" /> - </ItemGroup> - -</Project> diff --git a/src/StellaOps.Registry.TokenService.Tests/xunit.runner.json b/src/StellaOps.Registry.TokenService.Tests/xunit.runner.json deleted file mode 100644 index 86c7ea05..00000000 --- a/src/StellaOps.Registry.TokenService.Tests/xunit.runner.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} diff --git a/src/StellaOps.Registry.TokenService/appsettings.Development.json b/src/StellaOps.Registry.TokenService/appsettings.Development.json deleted file mode 100644 index 0c208ae9..00000000 --- a/src/StellaOps.Registry.TokenService/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - } -} diff --git a/src/StellaOps.Registry.TokenService/appsettings.json b/src/StellaOps.Registry.TokenService/appsettings.json deleted file mode 100644 index 10f68b8c..00000000 --- a/src/StellaOps.Registry.TokenService/appsettings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/xunit.runner.json b/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/xunit.runner.json deleted file mode 100644 index 86c7ea05..00000000 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Tests/xunit.runner.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Program.cs b/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Program.cs deleted file mode 100644 index ee9d65d6..00000000 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/Program.cs +++ /dev/null @@ -1,41 +0,0 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.Development.json b/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.Development.json deleted file mode 100644 index 0c208ae9..00000000 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - } -} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.json b/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.json deleted file mode 100644 index 10f68b8c..00000000 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.WebService/appsettings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.Development.json b/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.Development.json deleted file mode 100644 index b2dcdb67..00000000 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} diff --git a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.json b/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.json deleted file mode 100644 index b2dcdb67..00000000 --- a/src/StellaOps.RiskEngine/StellaOps.RiskEngine.Worker/appsettings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/left-pad/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/left-pad/package.json deleted file mode 100644 index 94c0027d..00000000 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/left-pad/package.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "left-pad", - "version": "1.3.0", - "main": "index.js" -} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/lib/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/lib/package.json deleted file mode 100644 index dbc701c2..00000000 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/lib/package.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "lib", - "version": "2.0.1", - "main": "index.js" -} diff --git a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/shared/package.json b/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/shared/package.json deleted file mode 100644 index d60be9d1..00000000 --- a/src/StellaOps.Scanner.Analyzers.Lang.Node.Tests/Fixtures/lang/node/workspaces/packages/app/node_modules/shared/package.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "name": "shared", - "version": "3.1.4", - "main": "index.js" -} diff --git a/src/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj b/src/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj deleted file mode 100644 index 216382d9..00000000 --- a/src/StellaOps.Scanner.Core.Tests/StellaOps.Scanner.Core.Tests.csproj +++ /dev/null @@ -1,15 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Scanner.Core/StellaOps.Scanner.Core.csproj" /> - <ProjectReference Include="../StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="../StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> - </ItemGroup> - <ItemGroup> - <None Update="Fixtures\*.json" CopyToOutputDirectory="PreserveNewest" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj b/src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj deleted file mode 100644 index b67ca90a..00000000 --- a/src/StellaOps.Scanner.WebService/StellaOps.Scanner.WebService.csproj +++ /dev/null @@ -1,33 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk.Web"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - <RootNamespace>StellaOps.Scanner.WebService</RootNamespace> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Serilog.AspNetCore" Version="8.0.1" /> - <PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" /> - <PackageReference Include="YamlDotNet" Version="13.7.1" /> - <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Configuration\StellaOps.Configuration.csproj" /> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Plugin\StellaOps.Plugin.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj" /> - <ProjectReference Include="..\StellaOps.Policy\StellaOps.Policy.csproj" /> - <ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> - <ProjectReference Include="..\StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Cryptography.Plugin.BouncyCastle\StellaOps.Cryptography.Plugin.BouncyCastle.csproj" /> - <ProjectReference Include="..\StellaOps.Notify.Models\StellaOps.Notify.Models.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj" /> - <ProjectReference Include="..\StellaOps.Scanner.Storage\StellaOps.Scanner.Storage.csproj" /> - <ProjectReference Include="..\StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj b/src/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj deleted file mode 100644 index 7859fc7d..00000000 --- a/src/StellaOps.Scheduler.WebService/StellaOps.Scheduler.WebService.csproj +++ /dev/null @@ -1,15 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk.Web"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Scheduler.Models/StellaOps.Scheduler.Models.csproj" /> - <ProjectReference Include="../StellaOps.Scheduler.Storage.Mongo/StellaOps.Scheduler.Storage.Mongo.csproj" /> - <ProjectReference Include="../StellaOps.Scheduler.ImpactIndex/StellaOps.Scheduler.ImpactIndex.csproj" /> - <ProjectReference Include="../StellaOps.Plugin/StellaOps.Plugin.csproj" /> - <ProjectReference Include="../StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="../StellaOps.Authority/StellaOps.Auth.ServerIntegration/StellaOps.Auth.ServerIntegration.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/xunit.runner.json b/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/xunit.runner.json deleted file mode 100644 index 86c7ea05..00000000 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/xunit.runner.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs b/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs deleted file mode 100644 index ee9d65d6..00000000 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs +++ /dev/null @@ -1,41 +0,0 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.Development.json b/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.Development.json deleted file mode 100644 index 0c208ae9..00000000 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - } -} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.json b/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.json deleted file mode 100644 index 10f68b8c..00000000 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.Development.json b/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.Development.json deleted file mode 100644 index b2dcdb67..00000000 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/xunit.runner.json b/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/xunit.runner.json deleted file mode 100644 index 86c7ea05..00000000 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/xunit.runner.json +++ /dev/null @@ -1,3 +0,0 @@ -{ - "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" -} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs b/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs deleted file mode 100644 index ee9d65d6..00000000 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs +++ /dev/null @@ -1,41 +0,0 @@ -var builder = WebApplication.CreateBuilder(args); - -// Add services to the container. -// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi -builder.Services.AddOpenApi(); - -var app = builder.Build(); - -// Configure the HTTP request pipeline. -if (app.Environment.IsDevelopment()) -{ - app.MapOpenApi(); -} - -app.UseHttpsRedirection(); - -var summaries = new[] -{ - "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" -}; - -app.MapGet("/weatherforecast", () => -{ - var forecast = Enumerable.Range(1, 5).Select(index => - new WeatherForecast - ( - DateOnly.FromDateTime(DateTime.Now.AddDays(index)), - Random.Shared.Next(-20, 55), - summaries[Random.Shared.Next(summaries.Length)] - )) - .ToArray(); - return forecast; -}) -.WithName("GetWeatherForecast"); - -app.Run(); - -record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) -{ - public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); -} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.Development.json b/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.Development.json deleted file mode 100644 index 0c208ae9..00000000 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - } -} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json b/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json deleted file mode 100644 index 10f68b8c..00000000 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.AspNetCore": "Warning" - } - }, - "AllowedHosts": "*" -} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.Development.json b/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.Development.json deleted file mode 100644 index b2dcdb67..00000000 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.Development.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.json b/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.json deleted file mode 100644 index b2dcdb67..00000000 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.json +++ /dev/null @@ -1,8 +0,0 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - } -} diff --git a/src/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj b/src/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj deleted file mode 100644 index 6d31fc79..00000000 --- a/src/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj +++ /dev/null @@ -1,14 +0,0 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="../StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" /> - <ProjectReference Include="../StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="../StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> - <ProjectReference Include="../StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" /> - </ItemGroup> -</Project> diff --git a/src/StellaOps.sln b/src/StellaOps.sln index c08c2f42..8e6fe7b9 100644 --- a/src/StellaOps.sln +++ b/src/StellaOps.sln @@ -1,2646 +1,2646 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{827E0CD3-B72D-47B6-A68D-7590B98EB39B}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Authority", "StellaOps.Authority", "{361838C4-72E2-1C48-5D76-CA6D1A861242}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "StellaOps.Configuration\StellaOps.Configuration.csproj", "{8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{46D35B4F-6A04-47FF-958B-5E6A73FCC059}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{44A1241B-8ECF-4AFA-9972-452C39AD43D6}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority", "StellaOps.Authority\StellaOps.Authority\StellaOps.Authority.csproj", "{85AB3BB7-C493-4387-B39A-EB299AC37312}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugin.Standard", "StellaOps.Authority\StellaOps.Authority.Plugin.Standard\StellaOps.Authority.Plugin.Standard.csproj", "{93DB06DC-B254-48A9-8F2C-6130A5658F27}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "StellaOps.Plugin\StellaOps.Plugin.csproj", "{03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin.Tests", "StellaOps.Plugin.Tests\StellaOps.Plugin.Tests.csproj", "{C6DC3C29-C2AD-4015-8872-42E95A0FE63F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli", "StellaOps.Cli\StellaOps.Cli.csproj", "{40094279-250C-42AE-992A-856718FEFBAC}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli.Tests", "StellaOps.Cli.Tests\StellaOps.Cli.Tests.csproj", "{B2967228-F8F7-4931-B257-1C63CB58CE1D}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Testing", "StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj", "{6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common", "StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj", "{37F203A3-624E-4794-9C99-16CAC22C17DF}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo", "StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj", "{3FF93987-A30A-4D50-8815-7CF3BB7CAE05}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core", "StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj", "{AACE8717-0760-42F2-A225-8FCCE876FB65}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models", "StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj", "{4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization", "StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj", "{85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core.Tests", "StellaOps.Concelier.Core.Tests\StellaOps.Concelier.Core.Tests.csproj", "{FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.Json", "StellaOps.Concelier.Exporter.Json\StellaOps.Concelier.Exporter.Json.csproj", "{D0FB54BA-4D14-4A32-B09F-7EC94F369460}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.Json.Tests", "StellaOps.Concelier.Exporter.Json.Tests\StellaOps.Concelier.Exporter.Json.Tests.csproj", "{69C9E010-CBDD-4B89-84CF-7AB56D6A078A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.TrivyDb", "StellaOps.Concelier.Exporter.TrivyDb\StellaOps.Concelier.Exporter.TrivyDb.csproj", "{E471176A-E1F3-4DE5-8D30-0865903A217A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.TrivyDb.Tests", "StellaOps.Concelier.Exporter.TrivyDb.Tests\StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj", "{FA013511-DF20-45F7-8077-EBA2D6224D64}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Merge", "StellaOps.Concelier.Merge\StellaOps.Concelier.Merge.csproj", "{B9F84697-54FE-4648-B173-EE3D904FFA4D}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Merge.Tests", "StellaOps.Concelier.Merge.Tests\StellaOps.Concelier.Merge.Tests.csproj", "{6751A76C-8ED8-40F4-AE2B-069DB31395FE}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models.Tests", "StellaOps.Concelier.Models.Tests\StellaOps.Concelier.Models.Tests.csproj", "{DDBFA2EF-9CAE-473F-A438-369CAC25C66A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization.Tests", "StellaOps.Concelier.Normalization.Tests\StellaOps.Concelier.Normalization.Tests.csproj", "{063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Acsc", "StellaOps.Concelier.Connector.Acsc\StellaOps.Concelier.Connector.Acsc.csproj", "{35350FAB-FC51-4FE8-81FB-011003134C37}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cccs", "StellaOps.Concelier.Connector.Cccs\StellaOps.Concelier.Connector.Cccs.csproj", "{1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertBund", "StellaOps.Concelier.Connector.CertBund\StellaOps.Concelier.Connector.CertBund.csproj", "{C4A65377-22F7-4D15-92A3-4F05847D167E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertCc", "StellaOps.Concelier.Connector.CertCc\StellaOps.Concelier.Connector.CertCc.csproj", "{BDDE59E1-C643-4C87-8608-0F9A7A54DE09}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertFr", "StellaOps.Concelier.Connector.CertFr\StellaOps.Concelier.Connector.CertFr.csproj", "{0CC116C8-A7E5-4B94-9688-32920177FF97}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertFr.Tests", "StellaOps.Concelier.Connector.CertFr.Tests\StellaOps.Concelier.Connector.CertFr.Tests.csproj", "{E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertIn", "StellaOps.Concelier.Connector.CertIn\StellaOps.Concelier.Connector.CertIn.csproj", "{84DEDF05-A5BD-4644-86B9-6B7918FE3F31}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertIn.Tests", "StellaOps.Concelier.Connector.CertIn.Tests\StellaOps.Concelier.Connector.CertIn.Tests.csproj", "{9DEB1F54-94B5-40C4-AC44-220E680B016D}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common.Tests", "StellaOps.Concelier.Connector.Common.Tests\StellaOps.Concelier.Connector.Common.Tests.csproj", "{7C3E87F2-93D8-4968-95E3-52C46947D46C}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cve", "StellaOps.Concelier.Connector.Cve\StellaOps.Concelier.Connector.Cve.csproj", "{C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Debian", "StellaOps.Concelier.Connector.Distro.Debian\StellaOps.Concelier.Connector.Distro.Debian.csproj", "{31B05493-104F-437F-9FA7-CA5286CE697C}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Debian.Tests", "StellaOps.Concelier.Connector.Distro.Debian.Tests\StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj", "{937AF12E-D770-4534-8FF8-C59042609C2A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.RedHat", "StellaOps.Concelier.Connector.Distro.RedHat\StellaOps.Concelier.Connector.Distro.RedHat.csproj", "{5A028B04-9D76-470B-B5B3-766CE4CE860C}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.RedHat.Tests", "StellaOps.Concelier.Connector.Distro.RedHat.Tests\StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj", "{749DE4C8-F733-43F8-B2A8-6649E71C7570}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Suse", "StellaOps.Concelier.Connector.Distro.Suse\StellaOps.Concelier.Connector.Distro.Suse.csproj", "{56D2C79E-2737-4FF9-9D19-150065F568D5}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Suse.Tests", "StellaOps.Concelier.Connector.Distro.Suse.Tests\StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj", "{E41F6DC4-68B5-4EE3-97AE-801D725A2C13}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Ubuntu", "StellaOps.Concelier.Connector.Distro.Ubuntu\StellaOps.Concelier.Connector.Distro.Ubuntu.csproj", "{285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Ubuntu.Tests", "StellaOps.Concelier.Connector.Distro.Ubuntu.Tests\StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj", "{26055403-C7F5-4709-8813-0F7387102791}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ghsa", "StellaOps.Concelier.Connector.Ghsa\StellaOps.Concelier.Connector.Ghsa.csproj", "{0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Cisa", "StellaOps.Concelier.Connector.Ics.Cisa\StellaOps.Concelier.Connector.Ics.Cisa.csproj", "{258327E9-431E-475C-933B-50893676E452}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Kaspersky", "StellaOps.Concelier.Connector.Ics.Kaspersky\StellaOps.Concelier.Connector.Ics.Kaspersky.csproj", "{42AF60C8-A5E1-40E0-86F8-98256364AF6F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Kaspersky.Tests", "StellaOps.Concelier.Connector.Ics.Kaspersky.Tests\StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj", "{88C6A9C3-B433-4C36-8767-429C8C2396F8}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Jvn", "StellaOps.Concelier.Connector.Jvn\StellaOps.Concelier.Connector.Jvn.csproj", "{6B7099AB-01BF-4EC4-87D0-5C9C032266DE}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Jvn.Tests", "StellaOps.Concelier.Connector.Jvn.Tests\StellaOps.Concelier.Connector.Jvn.Tests.csproj", "{14C918EA-693E-41FE-ACAE-2E82DF077BEA}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kev", "StellaOps.Concelier.Connector.Kev\StellaOps.Concelier.Connector.Kev.csproj", "{81111B26-74F6-4912-9084-7115FD119945}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kisa", "StellaOps.Concelier.Connector.Kisa\StellaOps.Concelier.Connector.Kisa.csproj", "{80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Nvd", "StellaOps.Concelier.Connector.Nvd\StellaOps.Concelier.Connector.Nvd.csproj", "{8D0F501D-01B1-4E24-958B-FAF35B267705}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Nvd.Tests", "StellaOps.Concelier.Connector.Nvd.Tests\StellaOps.Concelier.Connector.Nvd.Tests.csproj", "{5BA91095-7F10-4717-B296-49DFBFC1C9C2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Osv", "StellaOps.Concelier.Connector.Osv\StellaOps.Concelier.Connector.Osv.csproj", "{99616566-4EF1-4DC7-B655-825FE43D203D}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Osv.Tests", "StellaOps.Concelier.Connector.Osv.Tests\StellaOps.Concelier.Connector.Osv.Tests.csproj", "{EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Bdu", "StellaOps.Concelier.Connector.Ru.Bdu\StellaOps.Concelier.Connector.Ru.Bdu.csproj", "{A3B19095-2D95-4B09-B07E-2C082C72394B}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Nkcki", "StellaOps.Concelier.Connector.Ru.Nkcki\StellaOps.Concelier.Connector.Ru.Nkcki.csproj", "{807837AF-B392-4589-ADF1-3FDB34D6C5BF}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Adobe", "StellaOps.Concelier.Connector.Vndr.Adobe\StellaOps.Concelier.Connector.Vndr.Adobe.csproj", "{64EAFDCF-8283-4D5C-AC78-7969D5FE926A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Adobe.Tests", "StellaOps.Concelier.Connector.Vndr.Adobe.Tests\StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj", "{68F4D8A1-E32F-487A-B460-325F36989BE3}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Apple", "StellaOps.Concelier.Connector.Vndr.Apple\StellaOps.Concelier.Connector.Vndr.Apple.csproj", "{4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Chromium", "StellaOps.Concelier.Connector.Vndr.Chromium\StellaOps.Concelier.Connector.Vndr.Chromium.csproj", "{606C751B-7CF1-47CF-A25C-9248A55C814F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Chromium.Tests", "StellaOps.Concelier.Connector.Vndr.Chromium.Tests\StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj", "{0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Cisco", "StellaOps.Concelier.Connector.Vndr.Cisco\StellaOps.Concelier.Connector.Vndr.Cisco.csproj", "{CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Cisco.Tests", "StellaOps.Concelier.Connector.Vndr.Cisco.Tests\StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj", "{99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Msrc", "StellaOps.Concelier.Connector.Vndr.Msrc\StellaOps.Concelier.Connector.Vndr.Msrc.csproj", "{5CCE0DB7-C115-4B21-A7AE-C8488C22A853}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Oracle", "StellaOps.Concelier.Connector.Vndr.Oracle\StellaOps.Concelier.Connector.Vndr.Oracle.csproj", "{A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Oracle.Tests", "StellaOps.Concelier.Connector.Vndr.Oracle.Tests\StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj", "{06DC817F-A936-4F83-8929-E00622B32245}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Vmware", "StellaOps.Concelier.Connector.Vndr.Vmware\StellaOps.Concelier.Connector.Vndr.Vmware.csproj", "{2C999476-0291-4161-B3E9-1AA99A3B1139}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Vmware.Tests", "StellaOps.Concelier.Connector.Vndr.Vmware.Tests\StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj", "{476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo.Tests", "StellaOps.Concelier.Storage.Mongo.Tests\StellaOps.Concelier.Storage.Mongo.Tests.csproj", "{0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.WebService", "StellaOps.Concelier.WebService\StellaOps.Concelier.WebService.csproj", "{0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.WebService.Tests", "StellaOps.Concelier.WebService.Tests\StellaOps.Concelier.WebService.Tests.csproj", "{8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration.Tests", "StellaOps.Configuration.Tests\StellaOps.Configuration.Tests.csproj", "{C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions.Tests", "StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions.Tests\StellaOps.Authority.Plugins.Abstractions.Tests.csproj", "{50140A32-6D3C-47DB-983A-7166CBA51845}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Tests", "StellaOps.Authority\StellaOps.Authority.Tests\StellaOps.Authority.Tests.csproj", "{031979F2-6ABA-444F-A6A4-80115DC487CE}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugin.Standard.Tests", "StellaOps.Authority\StellaOps.Authority.Plugin.Standard.Tests\StellaOps.Authority.Plugin.Standard.Tests.csproj", "{D71B0DA5-80A3-419E-898D-40E77A9A7F19}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Storage.Mongo", "StellaOps.Authority\StellaOps.Authority.Storage.Mongo\StellaOps.Authority.Storage.Mongo.csproj", "{B2C877D9-B521-4901-8817-76B5DAA62FCE}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions.Tests", "StellaOps.Authority\StellaOps.Auth.Abstractions.Tests\StellaOps.Auth.Abstractions.Tests.csproj", "{08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration.Tests", "StellaOps.Authority\StellaOps.Auth.ServerIntegration.Tests\StellaOps.Auth.ServerIntegration.Tests.csproj", "{7116DD6B-2491-49E1-AB27-5210E949F753}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client.Tests", "StellaOps.Authority\StellaOps.Auth.Client.Tests\StellaOps.Auth.Client.Tests.csproj", "{7DBE31A6-D2FD-499E-B675-4092723175AD}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kev.Tests", "StellaOps.Concelier.Connector.Kev.Tests\StellaOps.Concelier.Connector.Kev.Tests.csproj", "{D99E6EAE-D278-4480-AA67-85F025383E47}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cve.Tests", "StellaOps.Concelier.Connector.Cve.Tests\StellaOps.Concelier.Connector.Cve.Tests.csproj", "{D3825714-3DDA-44B7-A99C-5F3E65716691}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ghsa.Tests", "StellaOps.Concelier.Connector.Ghsa.Tests\StellaOps.Concelier.Connector.Ghsa.Tests.csproj", "{FAB78D21-7372-48FE-B2C3-DE1807F1157D}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{EADFA337-B0FA-4712-A24A-7C08235BDF98}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.Tests", "StellaOps.Cryptography.Tests\StellaOps.Cryptography.Tests.csproj", "{110F7EC2-3149-4D1B-A972-E69E79F1EBF5}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.DependencyInjection", "StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj", "{B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Core", "StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj", "{3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Core.Tests", "StellaOps.Excititor.Core.Tests\StellaOps.Excititor.Core.Tests.csproj", "{680CA103-DCE8-4D02-8979-72DEA5BE8C00}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Policy", "StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj", "{7F4B19D4-569A-4CCF-B481-EBE04860451A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Policy.Tests", "StellaOps.Excititor.Policy.Tests\StellaOps.Excititor.Policy.Tests.csproj", "{DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Storage.Mongo", "StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj", "{E380F242-031E-483E-8570-0EF7EA525C4F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Export", "StellaOps.Excititor.Export\StellaOps.Excititor.Export.csproj", "{42582C16-F5A9-417F-9D33-BC489925324F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Export.Tests", "StellaOps.Excititor.Export.Tests\StellaOps.Excititor.Export.Tests.csproj", "{06F40DA8-FEFA-4C2B-907B-155BD92BB859}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.RedHat.CSAF", "StellaOps.Excititor.Connectors.RedHat.CSAF\StellaOps.Excititor.Connectors.RedHat.CSAF.csproj", "{A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.RedHat.CSAF.Tests", "StellaOps.Excititor.Connectors.RedHat.CSAF.Tests\StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj", "{3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Abstractions", "StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj", "{F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Worker", "StellaOps.Excititor.Worker\StellaOps.Excititor.Worker.csproj", "{781EC793-1DB0-4E31-95BC-12A2B373045F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Worker.Tests", "StellaOps.Excititor.Worker.Tests\StellaOps.Excititor.Worker.Tests.csproj", "{BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CSAF", "StellaOps.Excititor.Formats.CSAF\StellaOps.Excititor.Formats.CSAF.csproj", "{14E9D043-F0EF-4F68-AE83-D6F579119D9A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CSAF.Tests", "StellaOps.Excititor.Formats.CSAF.Tests\StellaOps.Excititor.Formats.CSAF.Tests.csproj", "{27E94B6E-DEF8-4B89-97CB-424703790ECE}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CycloneDX", "StellaOps.Excititor.Formats.CycloneDX\StellaOps.Excititor.Formats.CycloneDX.csproj", "{361E3E23-B215-423D-9906-A84171E20AD3}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CycloneDX.Tests", "StellaOps.Excititor.Formats.CycloneDX.Tests\StellaOps.Excititor.Formats.CycloneDX.Tests.csproj", "{7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.OpenVEX", "StellaOps.Excititor.Formats.OpenVEX\StellaOps.Excititor.Formats.OpenVEX.csproj", "{C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.OpenVEX.Tests", "StellaOps.Excititor.Formats.OpenVEX.Tests\StellaOps.Excititor.Formats.OpenVEX.Tests.csproj", "{E86CF4A6-2463-4589-A9D8-9DF557C48367}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Cisco.CSAF", "StellaOps.Excititor.Connectors.Cisco.CSAF\StellaOps.Excititor.Connectors.Cisco.CSAF.csproj", "{B308B94C-E01F-4449-A5A6-CD7A48E52D15}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Cisco.CSAF.Tests", "StellaOps.Excititor.Connectors.Cisco.CSAF.Tests\StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj", "{9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub", "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj", "{E076DC9C-B436-44BF-B02E-FA565086F805}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests", "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj", "{55500025-FE82-4F97-A261-9BAEA4B10845}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.MSRC.CSAF", "StellaOps.Excititor.Connectors.MSRC.CSAF\StellaOps.Excititor.Connectors.MSRC.CSAF.csproj", "{CD12875F-9367-41BD-810C-7FBE76314F17}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.MSRC.CSAF.Tests", "StellaOps.Excititor.Connectors.MSRC.CSAF.Tests\StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj", "{063D3280-9918-465A-AF2D-3650A2A50D03}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Oracle.CSAF", "StellaOps.Excititor.Connectors.Oracle.CSAF\StellaOps.Excititor.Connectors.Oracle.CSAF.csproj", "{A3EEE400-3655-4B34-915A-598E60CD55FB}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Oracle.CSAF.Tests", "StellaOps.Excititor.Connectors.Oracle.CSAF.Tests\StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj", "{577025AD-2FDD-42DF-BFA2-3FC095B50539}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Ubuntu.CSAF", "StellaOps.Excititor.Connectors.Ubuntu.CSAF\StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj", "{DD3B2076-E5E0-4533-8D27-7724225D7758}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests", "StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests\StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj", "{CADA1364-8EB1-479E-AB6F-4105C26335C8}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Core", "StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj", "{8CC4441E-9D1A-4E00-831B-34828A3F9446}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Core.Tests", "StellaOps.Scanner.Core.Tests\StellaOps.Scanner.Core.Tests.csproj", "{01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Authority", "StellaOps.Authority", "{BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy", "StellaOps.Policy\StellaOps.Policy.csproj", "{37BB9502-CCD1-425A-BF45-D56968B0C2F9}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Tests", "StellaOps.Policy.Tests\StellaOps.Policy.Tests.csproj", "{015A7A95-2C07-4C7F-8048-DB591AAC5FE5}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.WebService", "StellaOps.Scanner.WebService\StellaOps.Scanner.WebService.csproj", "{EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.WebService.Tests", "StellaOps.Scanner.WebService.Tests\StellaOps.Scanner.WebService.Tests.csproj", "{27D951AD-696D-4330-B4F5-F8F81344C191}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Storage", "StellaOps.Scanner.Storage\StellaOps.Scanner.Storage.csproj", "{31277AFF-9BFF-4C17-8593-B562A385058E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Storage.Tests", "StellaOps.Scanner.Storage.Tests\StellaOps.Scanner.Storage.Tests.csproj", "{3A8F090F-678D-46E2-8899-67402129749C}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker", "StellaOps.Scanner.Worker\StellaOps.Scanner.Worker.csproj", "{19FACEC7-D6D4-40F5-84AD-14E2983F18F7}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker.Tests", "StellaOps.Scanner.Worker.Tests\StellaOps.Scanner.Worker.Tests.csproj", "{8342286A-BE36-4ACA-87FF-EBEB4E268498}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.EntryTrace", "StellaOps.Scanner.EntryTrace\StellaOps.Scanner.EntryTrace.csproj", "{05D844B6-51C1-4926-919C-D99E24FB3BC9}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.EntryTrace.Tests", "StellaOps.Scanner.EntryTrace.Tests\StellaOps.Scanner.EntryTrace.Tests.csproj", "{03E15545-D6A0-4287-A88C-6EDE77C0DCBE}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang", "StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj", "{A072C46F-BA45-419E-B1B6-416919F78440}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Tests", "StellaOps.Scanner.Analyzers.Lang.Tests\StellaOps.Scanner.Analyzers.Lang.Tests.csproj", "{6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Diff", "StellaOps.Scanner.Diff\StellaOps.Scanner.Diff.csproj", "{10088067-7B8F-4D2E-A8E1-ED546DC17369}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Diff.Tests", "StellaOps.Scanner.Diff.Tests\StellaOps.Scanner.Diff.Tests.csproj", "{E014565C-2456-4BD0-9481-557F939C1E36}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit", "StellaOps.Scanner.Emit\StellaOps.Scanner.Emit.csproj", "{44825FDA-68D2-4675-8B1D-6D5303DC38CF}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit.Tests", "StellaOps.Scanner.Emit.Tests\StellaOps.Scanner.Emit.Tests.csproj", "{6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cache", "StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj", "{5E5EB0A7-7A19-4144-81FE-13C31DB678B2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cache.Tests", "StellaOps.Scanner.Cache.Tests\StellaOps.Scanner.Cache.Tests.csproj", "{7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Java", "StellaOps.Scanner.Analyzers.Lang.Java\StellaOps.Scanner.Analyzers.Lang.Java.csproj", "{B86C287A-734E-4527-A03E-6B970F22E27E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS", "StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj", "{E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Apk", "StellaOps.Scanner.Analyzers.OS.Apk\StellaOps.Scanner.Analyzers.OS.Apk.csproj", "{50D014B5-99A6-46FC-B745-26687595B293}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Dpkg", "StellaOps.Scanner.Analyzers.OS.Dpkg\StellaOps.Scanner.Analyzers.OS.Dpkg.csproj", "{D99C1F78-67EA-40E7-BD4C-985592F5265A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Rpm", "StellaOps.Scanner.Analyzers.OS.Rpm\StellaOps.Scanner.Analyzers.OS.Rpm.csproj", "{1CBC0B9C-A96B-4143-B70F-37C69229FFF2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Tests", "StellaOps.Scanner.Analyzers.OS.Tests\StellaOps.Scanner.Analyzers.OS.Tests.csproj", "{760E2855-31B3-4CCB-BACB-34B7196A59B8}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Node", "StellaOps.Scanner.Analyzers.Lang.Node\StellaOps.Scanner.Analyzers.Lang.Node.csproj", "{3F688F21-7E31-4781-8995-9DD34276773F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Python", "StellaOps.Scanner.Analyzers.Lang.Python\StellaOps.Scanner.Analyzers.Lang.Python.csproj", "{80AD7C4D-E4C6-4700-87AD-77B5698B338F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Go", "StellaOps.Scanner.Analyzers.Lang.Go\StellaOps.Scanner.Analyzers.Lang.Go.csproj", "{60ABAB54-2EE9-4A16-A109-67F7B6F29184}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.DotNet", "StellaOps.Scanner.Analyzers.Lang.DotNet\StellaOps.Scanner.Analyzers.Lang.DotNet.csproj", "{D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Rust", "StellaOps.Scanner.Analyzers.Lang.Rust\StellaOps.Scanner.Analyzers.Lang.Rust.csproj", "{5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{05475C0A-C225-4F07-A3C7-9E17E660042E}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Attestor", "StellaOps.Attestor", "{78C966F5-2242-D8EC-ADCA-A1A9C7F723A6}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Core", "StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj", "{BA47D456-4657-4C86-A665-21293E3AC47F}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Infrastructure", "StellaOps.Attestor\StellaOps.Attestor.Infrastructure\StellaOps.Attestor.Infrastructure.csproj", "{49EF86AC-1CC2-4A24-8637-C5151E23DF9D}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.WebService", "StellaOps.Attestor\StellaOps.Attestor.WebService\StellaOps.Attestor.WebService.csproj", "{C22333B3-D132-4960-A490-6BEF1EB1C917}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Tests", "StellaOps.Attestor\StellaOps.Attestor.Tests\StellaOps.Attestor.Tests.csproj", "{B8B15A8D-F647-41AE-A55F-A283A47E97C4}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Zastava", "StellaOps.Zastava", "{F1F029E6-2E4B-4A42-8D8F-AB325EE3B608}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core", "StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj", "{CBE6E3D8-230C-4513-B98F-99D82B83B9F7}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core.Tests", "StellaOps.Zastava.Core.Tests\StellaOps.Zastava.Core.Tests.csproj", "{821C7F88-B775-4D3C-8D89-850B6C34E818}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Webhook", "StellaOps.Zastava.Webhook\StellaOps.Zastava.Webhook.csproj", "{3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Webhook.Tests", "StellaOps.Zastava.Webhook.Tests\StellaOps.Zastava.Webhook.Tests.csproj", "{3C500ECB-5422-4FFB-BD3D-48A850763D31}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli.Plugins.NonCore", "StellaOps.Cli.Plugins.NonCore\StellaOps.Cli.Plugins.NonCore.csproj", "{D851E54A-5A44-4F74-9FDF-A2C32CACF651}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Java.Tests", "StellaOps.Scanner.Analyzers.Lang.Java.Tests\StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj", "{866807B8-8E68-417C-8148-6450DEA68012}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Node.Tests", "StellaOps.Scanner.Analyzers.Lang.Node.Tests\StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj", "{20BE41BD-9C32-45B5-882A-C01491979633}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Python.Tests", "StellaOps.Scanner.Analyzers.Lang.Python.Tests\StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj", "{9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Go.Tests", "StellaOps.Scanner.Analyzers.Lang.Go.Tests\StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj", "{7C3A6012-6FC8-46A9-9966-1AC373614C41}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Observer", "StellaOps.Zastava.Observer\StellaOps.Zastava.Observer.csproj", "{BC38594B-0B84-4657-9F7B-F2A0FC810F04}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Observer.Tests", "StellaOps.Zastava.Observer.Tests\StellaOps.Zastava.Observer.Tests.csproj", "{20E0774F-86D5-4CD0-B636-E5212074FDE8}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Engine", "StellaOps.Policy.Engine\StellaOps.Policy.Engine.csproj", "{FE668D8D-AB46-41F4-A82F-8A3330C4D152}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cartographer", "StellaOps.Cartographer\StellaOps.Cartographer.csproj", "{548C296A-476B-433D-9552-923648BDFA97}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.SbomService", "StellaOps.SbomService\StellaOps.SbomService.csproj", "{3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.WebService", "StellaOps.Scheduler.WebService\StellaOps.Scheduler.WebService.csproj", "{C733F161-FCED-4D21-BC83-5CC079E93547}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.WebService.Tests", "StellaOps.Scheduler.WebService.Tests\StellaOps.Scheduler.WebService.Tests.csproj", "{76E1E74F-41C1-4E24-85EA-ED13F28B80B1}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Registry.TokenService", "StellaOps.Registry.TokenService\StellaOps.Registry.TokenService.csproj", "{EC73D558-0472-49E2-B46E-D26F9686AA9C}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Registry.TokenService.Tests", "StellaOps.Registry.TokenService.Tests\StellaOps.Registry.TokenService.Tests.csproj", "{1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Bench", "StellaOps.Bench", "{1553F566-661E-A2F5-811B-F74BF45C44CC}" -EndProject -Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PolicyEngine", "PolicyEngine", "{CBDF819E-923F-A07F-78D9-D599DD28197E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.PolicyEngine", "StellaOps.Bench\PolicyEngine\StellaOps.Bench.PolicyEngine\StellaOps.Bench.PolicyEngine.csproj", "{D8B22C17-28E9-4059-97C5-4AC4600A2BD5}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "StellaOps.Aoc\StellaOps.Aoc.csproj", "{6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.Tests", "StellaOps.Aoc.Tests\StellaOps.Aoc.Tests.csproj", "{4D167781-1AC0-46CF-A32E-1B6E048940B2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels", "StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj", "{C3AEAEE7-038E-45FF-892B-DB18EE29F790}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels.Tests", "StellaOps.Concelier.RawModels.Tests\StellaOps.Concelier.RawModels.Tests.csproj", "{7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signals", "StellaOps.Signals\StellaOps.Signals.csproj", "{1561D597-922F-486E-ACF4-98250DDC5CDA}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signals.Tests", "StellaOps.Signals.Tests\StellaOps.Signals.Tests.csproj", "{D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Gateway", "StellaOps.Policy.Gateway\StellaOps.Policy.Gateway.csproj", "{9369FA32-E98A-4180-9251-914925188086}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Gateway.Tests", "StellaOps.Policy.Gateway.Tests\StellaOps.Policy.Gateway.Tests.csproj", "{67650687-2E32-40BB-9849-C4ABBA65A7CF}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|x64.ActiveCfg = Debug|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|x64.Build.0 = Debug|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|x86.ActiveCfg = Debug|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|x86.Build.0 = Debug|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|Any CPU.Build.0 = Release|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|x64.ActiveCfg = Release|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|x64.Build.0 = Release|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|x86.ActiveCfg = Release|Any CPU - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|x86.Build.0 = Release|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|x64.ActiveCfg = Debug|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|x64.Build.0 = Debug|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|x86.ActiveCfg = Debug|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|x86.Build.0 = Debug|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|Any CPU.Build.0 = Release|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|x64.ActiveCfg = Release|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|x64.Build.0 = Release|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|x86.ActiveCfg = Release|Any CPU - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|x86.Build.0 = Release|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|x64.ActiveCfg = Debug|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|x64.Build.0 = Debug|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|x86.ActiveCfg = Debug|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|x86.Build.0 = Debug|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|Any CPU.Build.0 = Release|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|x64.ActiveCfg = Release|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|x64.Build.0 = Release|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|x86.ActiveCfg = Release|Any CPU - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|x86.Build.0 = Release|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|Any CPU.Build.0 = Debug|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|x64.ActiveCfg = Debug|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|x64.Build.0 = Debug|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|x86.ActiveCfg = Debug|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|x86.Build.0 = Debug|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|Any CPU.ActiveCfg = Release|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|Any CPU.Build.0 = Release|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|x64.ActiveCfg = Release|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|x64.Build.0 = Release|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|x86.ActiveCfg = Release|Any CPU - {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|x86.Build.0 = Release|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|Any CPU.Build.0 = Debug|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|x64.ActiveCfg = Debug|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|x64.Build.0 = Debug|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|x86.ActiveCfg = Debug|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|x86.Build.0 = Debug|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|Any CPU.ActiveCfg = Release|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|Any CPU.Build.0 = Release|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|x64.ActiveCfg = Release|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|x64.Build.0 = Release|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|x86.ActiveCfg = Release|Any CPU - {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|x86.Build.0 = Release|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|Any CPU.Build.0 = Debug|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|x64.ActiveCfg = Debug|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|x64.Build.0 = Debug|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|x86.ActiveCfg = Debug|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|x86.Build.0 = Debug|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|Any CPU.ActiveCfg = Release|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|Any CPU.Build.0 = Release|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|x64.ActiveCfg = Release|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|x64.Build.0 = Release|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|x86.ActiveCfg = Release|Any CPU - {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|x86.Build.0 = Release|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|x64.ActiveCfg = Debug|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|x64.Build.0 = Debug|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|x86.ActiveCfg = Debug|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|x86.Build.0 = Debug|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|Any CPU.Build.0 = Release|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|x64.ActiveCfg = Release|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|x64.Build.0 = Release|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|x86.ActiveCfg = Release|Any CPU - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|x86.Build.0 = Release|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|Any CPU.Build.0 = Debug|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|x64.ActiveCfg = Debug|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|x64.Build.0 = Debug|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|x86.ActiveCfg = Debug|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|x86.Build.0 = Debug|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|Any CPU.ActiveCfg = Release|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|Any CPU.Build.0 = Release|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|x64.ActiveCfg = Release|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|x64.Build.0 = Release|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|x86.ActiveCfg = Release|Any CPU - {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|x86.Build.0 = Release|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|x64.ActiveCfg = Debug|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|x64.Build.0 = Debug|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|x86.ActiveCfg = Debug|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|x86.Build.0 = Debug|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|Any CPU.Build.0 = Release|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|x64.ActiveCfg = Release|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|x64.Build.0 = Release|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|x86.ActiveCfg = Release|Any CPU - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|x86.Build.0 = Release|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|x64.ActiveCfg = Debug|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|x64.Build.0 = Debug|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|x86.ActiveCfg = Debug|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|x86.Build.0 = Debug|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|Any CPU.Build.0 = Release|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|x64.ActiveCfg = Release|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|x64.Build.0 = Release|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|x86.ActiveCfg = Release|Any CPU - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|x86.Build.0 = Release|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Debug|x64.ActiveCfg = Debug|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Debug|x64.Build.0 = Debug|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Debug|x86.ActiveCfg = Debug|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Debug|x86.Build.0 = Debug|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Release|Any CPU.Build.0 = Release|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Release|x64.ActiveCfg = Release|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Release|x64.Build.0 = Release|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Release|x86.ActiveCfg = Release|Any CPU - {40094279-250C-42AE-992A-856718FEFBAC}.Release|x86.Build.0 = Release|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|x64.ActiveCfg = Debug|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|x64.Build.0 = Debug|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|x86.ActiveCfg = Debug|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|x86.Build.0 = Debug|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|Any CPU.Build.0 = Release|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|x64.ActiveCfg = Release|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|x64.Build.0 = Release|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|x86.ActiveCfg = Release|Any CPU - {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|x86.Build.0 = Release|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|x64.ActiveCfg = Debug|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|x64.Build.0 = Debug|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|x86.ActiveCfg = Debug|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|x86.Build.0 = Debug|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|Any CPU.Build.0 = Release|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|x64.ActiveCfg = Release|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|x64.Build.0 = Release|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|x86.ActiveCfg = Release|Any CPU - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|x86.Build.0 = Release|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|x64.ActiveCfg = Debug|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|x64.Build.0 = Debug|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|x86.ActiveCfg = Debug|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|x86.Build.0 = Debug|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|Any CPU.Build.0 = Release|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|x64.ActiveCfg = Release|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|x64.Build.0 = Release|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|x86.ActiveCfg = Release|Any CPU - {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|x86.Build.0 = Release|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|x64.ActiveCfg = Debug|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|x64.Build.0 = Debug|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|x86.ActiveCfg = Debug|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|x86.Build.0 = Debug|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|Any CPU.Build.0 = Release|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|x64.ActiveCfg = Release|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|x64.Build.0 = Release|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|x86.ActiveCfg = Release|Any CPU - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|x86.Build.0 = Release|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|Any CPU.Build.0 = Debug|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|x64.ActiveCfg = Debug|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|x64.Build.0 = Debug|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|x86.ActiveCfg = Debug|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|x86.Build.0 = Debug|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|Any CPU.ActiveCfg = Release|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|Any CPU.Build.0 = Release|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|x64.ActiveCfg = Release|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|x64.Build.0 = Release|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|x86.ActiveCfg = Release|Any CPU - {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|x86.Build.0 = Release|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|x64.ActiveCfg = Debug|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|x64.Build.0 = Debug|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|x86.ActiveCfg = Debug|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|x86.Build.0 = Debug|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|Any CPU.Build.0 = Release|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|x64.ActiveCfg = Release|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|x64.Build.0 = Release|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|x86.ActiveCfg = Release|Any CPU - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|x86.Build.0 = Release|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|x64.ActiveCfg = Debug|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|x64.Build.0 = Debug|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|x86.ActiveCfg = Debug|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|x86.Build.0 = Debug|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|Any CPU.Build.0 = Release|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|x64.ActiveCfg = Release|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|x64.Build.0 = Release|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|x86.ActiveCfg = Release|Any CPU - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|x86.Build.0 = Release|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|x64.ActiveCfg = Debug|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|x64.Build.0 = Debug|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|x86.ActiveCfg = Debug|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|x86.Build.0 = Debug|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|Any CPU.Build.0 = Release|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|x64.ActiveCfg = Release|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|x64.Build.0 = Release|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|x86.ActiveCfg = Release|Any CPU - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|x86.Build.0 = Release|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|x64.ActiveCfg = Debug|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|x64.Build.0 = Debug|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|x86.ActiveCfg = Debug|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|x86.Build.0 = Debug|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|Any CPU.Build.0 = Release|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|x64.ActiveCfg = Release|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|x64.Build.0 = Release|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|x86.ActiveCfg = Release|Any CPU - {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|x86.Build.0 = Release|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|x64.ActiveCfg = Debug|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|x64.Build.0 = Debug|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|x86.ActiveCfg = Debug|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|x86.Build.0 = Debug|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|Any CPU.Build.0 = Release|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|x64.ActiveCfg = Release|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|x64.Build.0 = Release|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|x86.ActiveCfg = Release|Any CPU - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|x86.Build.0 = Release|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|x64.ActiveCfg = Debug|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|x64.Build.0 = Debug|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|x86.ActiveCfg = Debug|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|x86.Build.0 = Debug|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|Any CPU.Build.0 = Release|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|x64.ActiveCfg = Release|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|x64.Build.0 = Release|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|x86.ActiveCfg = Release|Any CPU - {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|x86.Build.0 = Release|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|x64.ActiveCfg = Debug|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|x64.Build.0 = Debug|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|x86.ActiveCfg = Debug|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|x86.Build.0 = Debug|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|Any CPU.Build.0 = Release|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|x64.ActiveCfg = Release|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|x64.Build.0 = Release|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|x86.ActiveCfg = Release|Any CPU - {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|x86.Build.0 = Release|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|x64.ActiveCfg = Debug|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|x64.Build.0 = Debug|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|x86.ActiveCfg = Debug|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|x86.Build.0 = Debug|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|Any CPU.Build.0 = Release|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|x64.ActiveCfg = Release|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|x64.Build.0 = Release|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|x86.ActiveCfg = Release|Any CPU - {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|x86.Build.0 = Release|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|x64.ActiveCfg = Debug|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|x64.Build.0 = Debug|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|x86.ActiveCfg = Debug|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|x86.Build.0 = Debug|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|Any CPU.Build.0 = Release|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|x64.ActiveCfg = Release|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|x64.Build.0 = Release|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|x86.ActiveCfg = Release|Any CPU - {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|x86.Build.0 = Release|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|x64.ActiveCfg = Debug|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|x64.Build.0 = Debug|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|x86.ActiveCfg = Debug|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|x86.Build.0 = Debug|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|Any CPU.Build.0 = Release|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|x64.ActiveCfg = Release|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|x64.Build.0 = Release|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|x86.ActiveCfg = Release|Any CPU - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|x86.Build.0 = Release|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|Any CPU.Build.0 = Debug|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|x64.ActiveCfg = Debug|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|x64.Build.0 = Debug|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|x86.ActiveCfg = Debug|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|x86.Build.0 = Debug|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|Any CPU.ActiveCfg = Release|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|Any CPU.Build.0 = Release|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|x64.ActiveCfg = Release|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|x64.Build.0 = Release|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|x86.ActiveCfg = Release|Any CPU - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|x86.Build.0 = Release|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|Any CPU.Build.0 = Debug|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|x64.ActiveCfg = Debug|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|x64.Build.0 = Debug|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|x86.ActiveCfg = Debug|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|x86.Build.0 = Debug|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|Any CPU.ActiveCfg = Release|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|Any CPU.Build.0 = Release|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|x64.ActiveCfg = Release|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|x64.Build.0 = Release|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|x86.ActiveCfg = Release|Any CPU - {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|x86.Build.0 = Release|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|x64.ActiveCfg = Debug|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|x64.Build.0 = Debug|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|x86.ActiveCfg = Debug|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|x86.Build.0 = Debug|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|Any CPU.Build.0 = Release|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|x64.ActiveCfg = Release|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|x64.Build.0 = Release|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|x86.ActiveCfg = Release|Any CPU - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|x86.Build.0 = Release|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|x64.ActiveCfg = Debug|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|x64.Build.0 = Debug|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|x86.ActiveCfg = Debug|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|x86.Build.0 = Debug|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|Any CPU.Build.0 = Release|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|x64.ActiveCfg = Release|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|x64.Build.0 = Release|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|x86.ActiveCfg = Release|Any CPU - {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|x86.Build.0 = Release|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|Any CPU.Build.0 = Debug|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|x64.ActiveCfg = Debug|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|x64.Build.0 = Debug|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|x86.ActiveCfg = Debug|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|x86.Build.0 = Debug|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|Any CPU.ActiveCfg = Release|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|Any CPU.Build.0 = Release|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|x64.ActiveCfg = Release|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|x64.Build.0 = Release|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|x86.ActiveCfg = Release|Any CPU - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|x86.Build.0 = Release|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|x64.ActiveCfg = Debug|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|x64.Build.0 = Debug|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|x86.ActiveCfg = Debug|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|x86.Build.0 = Debug|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|Any CPU.Build.0 = Release|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|x64.ActiveCfg = Release|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|x64.Build.0 = Release|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|x86.ActiveCfg = Release|Any CPU - {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|x86.Build.0 = Release|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|x64.ActiveCfg = Debug|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|x64.Build.0 = Debug|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|x86.ActiveCfg = Debug|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|x86.Build.0 = Debug|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|Any CPU.Build.0 = Release|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|x64.ActiveCfg = Release|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|x64.Build.0 = Release|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|x86.ActiveCfg = Release|Any CPU - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|x86.Build.0 = Release|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|Any CPU.Build.0 = Debug|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|x64.ActiveCfg = Debug|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|x64.Build.0 = Debug|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|x86.ActiveCfg = Debug|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|x86.Build.0 = Debug|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|Any CPU.ActiveCfg = Release|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|Any CPU.Build.0 = Release|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|x64.ActiveCfg = Release|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|x64.Build.0 = Release|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|x86.ActiveCfg = Release|Any CPU - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|x86.Build.0 = Release|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|x64.ActiveCfg = Debug|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|x64.Build.0 = Debug|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|x86.ActiveCfg = Debug|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|x86.Build.0 = Debug|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|Any CPU.Build.0 = Release|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|x64.ActiveCfg = Release|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|x64.Build.0 = Release|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|x86.ActiveCfg = Release|Any CPU - {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|x86.Build.0 = Release|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|x64.ActiveCfg = Debug|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|x64.Build.0 = Debug|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|x86.ActiveCfg = Debug|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|x86.Build.0 = Debug|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|Any CPU.Build.0 = Release|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|x64.ActiveCfg = Release|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|x64.Build.0 = Release|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|x86.ActiveCfg = Release|Any CPU - {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|x86.Build.0 = Release|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|x64.ActiveCfg = Debug|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|x64.Build.0 = Debug|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|x86.ActiveCfg = Debug|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|x86.Build.0 = Debug|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|Any CPU.Build.0 = Release|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|x64.ActiveCfg = Release|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|x64.Build.0 = Release|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|x86.ActiveCfg = Release|Any CPU - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|x86.Build.0 = Release|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|x64.ActiveCfg = Debug|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|x64.Build.0 = Debug|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|x86.ActiveCfg = Debug|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|x86.Build.0 = Debug|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|Any CPU.Build.0 = Release|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|x64.ActiveCfg = Release|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|x64.Build.0 = Release|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|x86.ActiveCfg = Release|Any CPU - {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|x86.Build.0 = Release|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|x64.ActiveCfg = Debug|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|x64.Build.0 = Debug|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|x86.ActiveCfg = Debug|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|x86.Build.0 = Debug|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|Any CPU.Build.0 = Release|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|x64.ActiveCfg = Release|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|x64.Build.0 = Release|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|x86.ActiveCfg = Release|Any CPU - {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|x86.Build.0 = Release|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|x64.ActiveCfg = Debug|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|x64.Build.0 = Debug|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|x86.ActiveCfg = Debug|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|x86.Build.0 = Debug|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|Any CPU.Build.0 = Release|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|x64.ActiveCfg = Release|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|x64.Build.0 = Release|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|x86.ActiveCfg = Release|Any CPU - {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|x86.Build.0 = Release|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|Any CPU.Build.0 = Debug|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|x64.ActiveCfg = Debug|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|x64.Build.0 = Debug|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|x86.ActiveCfg = Debug|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|x86.Build.0 = Debug|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|Any CPU.ActiveCfg = Release|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|Any CPU.Build.0 = Release|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|x64.ActiveCfg = Release|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|x64.Build.0 = Release|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|x86.ActiveCfg = Release|Any CPU - {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|x86.Build.0 = Release|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|x64.ActiveCfg = Debug|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|x64.Build.0 = Debug|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|x86.ActiveCfg = Debug|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|x86.Build.0 = Debug|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|Any CPU.Build.0 = Release|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|x64.ActiveCfg = Release|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|x64.Build.0 = Release|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|x86.ActiveCfg = Release|Any CPU - {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|x86.Build.0 = Release|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|x64.ActiveCfg = Debug|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|x64.Build.0 = Debug|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|x86.ActiveCfg = Debug|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|x86.Build.0 = Debug|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|Any CPU.Build.0 = Release|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|x64.ActiveCfg = Release|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|x64.Build.0 = Release|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|x86.ActiveCfg = Release|Any CPU - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|x86.Build.0 = Release|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|Any CPU.Build.0 = Debug|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|x64.ActiveCfg = Debug|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|x64.Build.0 = Debug|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|x86.ActiveCfg = Debug|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|x86.Build.0 = Debug|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|Any CPU.ActiveCfg = Release|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|Any CPU.Build.0 = Release|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|x64.ActiveCfg = Release|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|x64.Build.0 = Release|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|x86.ActiveCfg = Release|Any CPU - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|x86.Build.0 = Release|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Debug|Any CPU.Build.0 = Debug|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Debug|x64.ActiveCfg = Debug|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Debug|x64.Build.0 = Debug|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Debug|x86.ActiveCfg = Debug|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Debug|x86.Build.0 = Debug|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Release|Any CPU.ActiveCfg = Release|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Release|Any CPU.Build.0 = Release|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Release|x64.ActiveCfg = Release|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Release|x64.Build.0 = Release|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Release|x86.ActiveCfg = Release|Any CPU - {26055403-C7F5-4709-8813-0F7387102791}.Release|x86.Build.0 = Release|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|x64.ActiveCfg = Debug|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|x64.Build.0 = Debug|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|x86.ActiveCfg = Debug|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|x86.Build.0 = Debug|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|Any CPU.Build.0 = Release|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|x64.ActiveCfg = Release|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|x64.Build.0 = Release|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|x86.ActiveCfg = Release|Any CPU - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|x86.Build.0 = Release|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Debug|Any CPU.Build.0 = Debug|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Debug|x64.ActiveCfg = Debug|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Debug|x64.Build.0 = Debug|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Debug|x86.ActiveCfg = Debug|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Debug|x86.Build.0 = Debug|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Release|Any CPU.ActiveCfg = Release|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Release|Any CPU.Build.0 = Release|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Release|x64.ActiveCfg = Release|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Release|x64.Build.0 = Release|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Release|x86.ActiveCfg = Release|Any CPU - {258327E9-431E-475C-933B-50893676E452}.Release|x86.Build.0 = Release|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|x64.ActiveCfg = Debug|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|x64.Build.0 = Debug|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|x86.ActiveCfg = Debug|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|x86.Build.0 = Debug|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|Any CPU.Build.0 = Release|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|x64.ActiveCfg = Release|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|x64.Build.0 = Release|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|x86.ActiveCfg = Release|Any CPU - {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|x86.Build.0 = Release|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|Any CPU.Build.0 = Debug|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|x64.ActiveCfg = Debug|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|x64.Build.0 = Debug|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|x86.ActiveCfg = Debug|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|x86.Build.0 = Debug|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|Any CPU.ActiveCfg = Release|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|Any CPU.Build.0 = Release|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|x64.ActiveCfg = Release|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|x64.Build.0 = Release|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|x86.ActiveCfg = Release|Any CPU - {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|x86.Build.0 = Release|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|x64.ActiveCfg = Debug|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|x64.Build.0 = Debug|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|x86.ActiveCfg = Debug|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|x86.Build.0 = Debug|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|Any CPU.Build.0 = Release|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|x64.ActiveCfg = Release|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|x64.Build.0 = Release|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|x86.ActiveCfg = Release|Any CPU - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|x86.Build.0 = Release|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|Any CPU.Build.0 = Debug|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|x64.ActiveCfg = Debug|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|x64.Build.0 = Debug|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|x86.ActiveCfg = Debug|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|x86.Build.0 = Debug|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|Any CPU.ActiveCfg = Release|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|Any CPU.Build.0 = Release|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|x64.ActiveCfg = Release|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|x64.Build.0 = Release|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|x86.ActiveCfg = Release|Any CPU - {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|x86.Build.0 = Release|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Debug|Any CPU.Build.0 = Debug|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Debug|x64.ActiveCfg = Debug|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Debug|x64.Build.0 = Debug|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Debug|x86.ActiveCfg = Debug|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Debug|x86.Build.0 = Debug|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Release|Any CPU.ActiveCfg = Release|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Release|Any CPU.Build.0 = Release|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Release|x64.ActiveCfg = Release|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Release|x64.Build.0 = Release|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Release|x86.ActiveCfg = Release|Any CPU - {81111B26-74F6-4912-9084-7115FD119945}.Release|x86.Build.0 = Release|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|x64.ActiveCfg = Debug|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|x64.Build.0 = Debug|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|x86.ActiveCfg = Debug|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|x86.Build.0 = Debug|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|Any CPU.Build.0 = Release|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|x64.ActiveCfg = Release|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|x64.Build.0 = Release|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|x86.ActiveCfg = Release|Any CPU - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|x86.Build.0 = Release|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|x64.ActiveCfg = Debug|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|x64.Build.0 = Debug|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|x86.ActiveCfg = Debug|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|x86.Build.0 = Debug|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|Any CPU.Build.0 = Release|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|x64.ActiveCfg = Release|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|x64.Build.0 = Release|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|x86.ActiveCfg = Release|Any CPU - {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|x86.Build.0 = Release|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|x64.ActiveCfg = Debug|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|x64.Build.0 = Debug|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|x86.ActiveCfg = Debug|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|x86.Build.0 = Debug|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|Any CPU.Build.0 = Release|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|x64.ActiveCfg = Release|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|x64.Build.0 = Release|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|x86.ActiveCfg = Release|Any CPU - {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|x86.Build.0 = Release|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|x64.ActiveCfg = Debug|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|x64.Build.0 = Debug|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|x86.ActiveCfg = Debug|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|x86.Build.0 = Debug|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|Any CPU.Build.0 = Release|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|x64.ActiveCfg = Release|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|x64.Build.0 = Release|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|x86.ActiveCfg = Release|Any CPU - {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|x86.Build.0 = Release|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|Any CPU.Build.0 = Debug|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|x64.ActiveCfg = Debug|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|x64.Build.0 = Debug|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|x86.ActiveCfg = Debug|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|x86.Build.0 = Debug|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|Any CPU.ActiveCfg = Release|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|Any CPU.Build.0 = Release|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|x64.ActiveCfg = Release|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|x64.Build.0 = Release|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|x86.ActiveCfg = Release|Any CPU - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|x86.Build.0 = Release|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|x64.ActiveCfg = Debug|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|x64.Build.0 = Debug|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|x86.ActiveCfg = Debug|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|x86.Build.0 = Debug|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|Any CPU.Build.0 = Release|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|x64.ActiveCfg = Release|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|x64.Build.0 = Release|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|x86.ActiveCfg = Release|Any CPU - {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|x86.Build.0 = Release|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|x64.ActiveCfg = Debug|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|x64.Build.0 = Debug|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|x86.ActiveCfg = Debug|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|x86.Build.0 = Debug|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|Any CPU.Build.0 = Release|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|x64.ActiveCfg = Release|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|x64.Build.0 = Release|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|x86.ActiveCfg = Release|Any CPU - {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|x86.Build.0 = Release|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|x64.ActiveCfg = Debug|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|x64.Build.0 = Debug|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|x86.ActiveCfg = Debug|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|x86.Build.0 = Debug|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|Any CPU.Build.0 = Release|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|x64.ActiveCfg = Release|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|x64.Build.0 = Release|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|x86.ActiveCfg = Release|Any CPU - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|x86.Build.0 = Release|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|x64.ActiveCfg = Debug|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|x64.Build.0 = Debug|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|x86.ActiveCfg = Debug|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|x86.Build.0 = Debug|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|Any CPU.Build.0 = Release|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|x64.ActiveCfg = Release|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|x64.Build.0 = Release|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|x86.ActiveCfg = Release|Any CPU - {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|x86.Build.0 = Release|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|x64.ActiveCfg = Debug|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|x64.Build.0 = Debug|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|x86.ActiveCfg = Debug|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|x86.Build.0 = Debug|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|Any CPU.Build.0 = Release|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|x64.ActiveCfg = Release|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|x64.Build.0 = Release|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|x86.ActiveCfg = Release|Any CPU - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|x86.Build.0 = Release|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|x64.ActiveCfg = Debug|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|x64.Build.0 = Debug|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|x86.ActiveCfg = Debug|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|x86.Build.0 = Debug|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|Any CPU.Build.0 = Release|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|x64.ActiveCfg = Release|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|x64.Build.0 = Release|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|x86.ActiveCfg = Release|Any CPU - {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|x86.Build.0 = Release|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|x64.ActiveCfg = Debug|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|x64.Build.0 = Debug|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|x86.ActiveCfg = Debug|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|x86.Build.0 = Debug|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|Any CPU.Build.0 = Release|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|x64.ActiveCfg = Release|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|x64.Build.0 = Release|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|x86.ActiveCfg = Release|Any CPU - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|x86.Build.0 = Release|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|x64.ActiveCfg = Debug|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|x64.Build.0 = Debug|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|x86.ActiveCfg = Debug|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|x86.Build.0 = Debug|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|Any CPU.Build.0 = Release|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x64.ActiveCfg = Release|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x64.Build.0 = Release|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x86.ActiveCfg = Release|Any CPU - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x86.Build.0 = Release|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|Any CPU.Build.0 = Debug|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x64.ActiveCfg = Debug|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x64.Build.0 = Debug|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x86.ActiveCfg = Debug|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x86.Build.0 = Debug|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|Any CPU.ActiveCfg = Release|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|Any CPU.Build.0 = Release|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x64.ActiveCfg = Release|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x64.Build.0 = Release|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x86.ActiveCfg = Release|Any CPU - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x86.Build.0 = Release|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|x64.ActiveCfg = Debug|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|x64.Build.0 = Debug|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|x86.ActiveCfg = Debug|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|x86.Build.0 = Debug|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|Any CPU.Build.0 = Release|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|x64.ActiveCfg = Release|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|x64.Build.0 = Release|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|x86.ActiveCfg = Release|Any CPU - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|x86.Build.0 = Release|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|x64.ActiveCfg = Debug|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|x64.Build.0 = Debug|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|x86.ActiveCfg = Debug|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|x86.Build.0 = Debug|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|Any CPU.Build.0 = Release|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|x64.ActiveCfg = Release|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|x64.Build.0 = Release|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|x86.ActiveCfg = Release|Any CPU - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|x86.Build.0 = Release|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Debug|Any CPU.Build.0 = Debug|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Debug|x64.ActiveCfg = Debug|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Debug|x64.Build.0 = Debug|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Debug|x86.ActiveCfg = Debug|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Debug|x86.Build.0 = Debug|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Release|Any CPU.ActiveCfg = Release|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Release|Any CPU.Build.0 = Release|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Release|x64.ActiveCfg = Release|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Release|x64.Build.0 = Release|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Release|x86.ActiveCfg = Release|Any CPU - {06DC817F-A936-4F83-8929-E00622B32245}.Release|x86.Build.0 = Release|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|Any CPU.Build.0 = Debug|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|x64.ActiveCfg = Debug|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|x64.Build.0 = Debug|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|x86.ActiveCfg = Debug|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|x86.Build.0 = Debug|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|Any CPU.ActiveCfg = Release|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|Any CPU.Build.0 = Release|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|x64.ActiveCfg = Release|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|x64.Build.0 = Release|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|x86.ActiveCfg = Release|Any CPU - {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|x86.Build.0 = Release|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|x64.ActiveCfg = Debug|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|x64.Build.0 = Debug|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|x86.ActiveCfg = Debug|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|x86.Build.0 = Debug|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|Any CPU.Build.0 = Release|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|x64.ActiveCfg = Release|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|x64.Build.0 = Release|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|x86.ActiveCfg = Release|Any CPU - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|x86.Build.0 = Release|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|x64.ActiveCfg = Debug|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|x64.Build.0 = Debug|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|x86.ActiveCfg = Debug|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|x86.Build.0 = Debug|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|Any CPU.Build.0 = Release|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|x64.ActiveCfg = Release|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|x64.Build.0 = Release|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|x86.ActiveCfg = Release|Any CPU - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|x86.Build.0 = Release|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|x64.ActiveCfg = Debug|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|x64.Build.0 = Debug|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|x86.ActiveCfg = Debug|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|x86.Build.0 = Debug|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|Any CPU.Build.0 = Release|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|x64.ActiveCfg = Release|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|x64.Build.0 = Release|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|x86.ActiveCfg = Release|Any CPU - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|x86.Build.0 = Release|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|x64.ActiveCfg = Debug|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|x64.Build.0 = Debug|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|x86.ActiveCfg = Debug|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|x86.Build.0 = Debug|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|Any CPU.Build.0 = Release|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|x64.ActiveCfg = Release|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|x64.Build.0 = Release|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|x86.ActiveCfg = Release|Any CPU - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|x86.Build.0 = Release|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|x64.ActiveCfg = Debug|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|x64.Build.0 = Debug|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|x86.ActiveCfg = Debug|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|x86.Build.0 = Debug|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|Any CPU.Build.0 = Release|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|x64.ActiveCfg = Release|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|x64.Build.0 = Release|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|x86.ActiveCfg = Release|Any CPU - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|x86.Build.0 = Release|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|Any CPU.Build.0 = Debug|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|x64.ActiveCfg = Debug|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|x64.Build.0 = Debug|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|x86.ActiveCfg = Debug|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|x86.Build.0 = Debug|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|Any CPU.ActiveCfg = Release|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|Any CPU.Build.0 = Release|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|x64.ActiveCfg = Release|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|x64.Build.0 = Release|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|x86.ActiveCfg = Release|Any CPU - {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|x86.Build.0 = Release|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|x64.ActiveCfg = Debug|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|x64.Build.0 = Debug|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|x86.ActiveCfg = Debug|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|x86.Build.0 = Debug|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|Any CPU.Build.0 = Release|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|x64.ActiveCfg = Release|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|x64.Build.0 = Release|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|x86.ActiveCfg = Release|Any CPU - {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|x86.Build.0 = Release|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|x64.ActiveCfg = Debug|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|x64.Build.0 = Debug|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|x86.ActiveCfg = Debug|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|x86.Build.0 = Debug|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|Any CPU.Build.0 = Release|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|x64.ActiveCfg = Release|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|x64.Build.0 = Release|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|x86.ActiveCfg = Release|Any CPU - {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|x86.Build.0 = Release|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|x64.ActiveCfg = Debug|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|x64.Build.0 = Debug|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|x86.ActiveCfg = Debug|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|x86.Build.0 = Debug|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|Any CPU.Build.0 = Release|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|x64.ActiveCfg = Release|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|x64.Build.0 = Release|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|x86.ActiveCfg = Release|Any CPU - {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|x86.Build.0 = Release|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|Any CPU.Build.0 = Debug|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|x64.ActiveCfg = Debug|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|x64.Build.0 = Debug|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|x86.ActiveCfg = Debug|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|x86.Build.0 = Debug|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|Any CPU.ActiveCfg = Release|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|Any CPU.Build.0 = Release|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|x64.ActiveCfg = Release|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|x64.Build.0 = Release|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|x86.ActiveCfg = Release|Any CPU - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|x86.Build.0 = Release|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|x64.ActiveCfg = Debug|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|x64.Build.0 = Debug|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|x86.ActiveCfg = Debug|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|x86.Build.0 = Debug|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|Any CPU.Build.0 = Release|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|x64.ActiveCfg = Release|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|x64.Build.0 = Release|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|x86.ActiveCfg = Release|Any CPU - {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|x86.Build.0 = Release|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|x64.ActiveCfg = Debug|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|x64.Build.0 = Debug|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|x86.ActiveCfg = Debug|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|x86.Build.0 = Debug|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|Any CPU.Build.0 = Release|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|x64.ActiveCfg = Release|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|x64.Build.0 = Release|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|x86.ActiveCfg = Release|Any CPU - {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|x86.Build.0 = Release|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|x64.ActiveCfg = Debug|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|x64.Build.0 = Debug|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|x86.ActiveCfg = Debug|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|x86.Build.0 = Debug|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|Any CPU.Build.0 = Release|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|x64.ActiveCfg = Release|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|x64.Build.0 = Release|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|x86.ActiveCfg = Release|Any CPU - {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|x86.Build.0 = Release|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|x64.ActiveCfg = Debug|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|x64.Build.0 = Debug|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|x86.ActiveCfg = Debug|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|x86.Build.0 = Debug|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|Any CPU.Build.0 = Release|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|x64.ActiveCfg = Release|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|x64.Build.0 = Release|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|x86.ActiveCfg = Release|Any CPU - {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|x86.Build.0 = Release|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|x64.ActiveCfg = Debug|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|x64.Build.0 = Debug|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|x86.ActiveCfg = Debug|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|x86.Build.0 = Debug|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|Any CPU.Build.0 = Release|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|x64.ActiveCfg = Release|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|x64.Build.0 = Release|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|x86.ActiveCfg = Release|Any CPU - {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|x86.Build.0 = Release|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|Any CPU.Build.0 = Debug|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|x64.ActiveCfg = Debug|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|x64.Build.0 = Debug|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|x86.ActiveCfg = Debug|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|x86.Build.0 = Debug|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|Any CPU.ActiveCfg = Release|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|Any CPU.Build.0 = Release|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|x64.ActiveCfg = Release|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|x64.Build.0 = Release|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|x86.ActiveCfg = Release|Any CPU - {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|x86.Build.0 = Release|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|x64.ActiveCfg = Debug|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|x64.Build.0 = Debug|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|x86.ActiveCfg = Debug|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|x86.Build.0 = Debug|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|Any CPU.Build.0 = Release|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|x64.ActiveCfg = Release|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|x64.Build.0 = Release|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|x86.ActiveCfg = Release|Any CPU - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|x86.Build.0 = Release|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|x64.ActiveCfg = Debug|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|x64.Build.0 = Debug|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|x86.ActiveCfg = Debug|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|x86.Build.0 = Debug|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|Any CPU.Build.0 = Release|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|x64.ActiveCfg = Release|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|x64.Build.0 = Release|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|x86.ActiveCfg = Release|Any CPU - {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|x86.Build.0 = Release|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|x64.ActiveCfg = Debug|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|x64.Build.0 = Debug|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|x86.ActiveCfg = Debug|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|x86.Build.0 = Debug|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|Any CPU.Build.0 = Release|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|x64.ActiveCfg = Release|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|x64.Build.0 = Release|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|x86.ActiveCfg = Release|Any CPU - {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|x86.Build.0 = Release|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|Any CPU.Build.0 = Debug|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|x64.ActiveCfg = Debug|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|x64.Build.0 = Debug|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|x86.ActiveCfg = Debug|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|x86.Build.0 = Debug|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|Any CPU.ActiveCfg = Release|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|Any CPU.Build.0 = Release|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|x64.ActiveCfg = Release|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|x64.Build.0 = Release|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|x86.ActiveCfg = Release|Any CPU - {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|x86.Build.0 = Release|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|x64.ActiveCfg = Debug|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|x64.Build.0 = Debug|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|x86.ActiveCfg = Debug|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|x86.Build.0 = Debug|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|Any CPU.Build.0 = Release|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|x64.ActiveCfg = Release|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|x64.Build.0 = Release|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|x86.ActiveCfg = Release|Any CPU - {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|x86.Build.0 = Release|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|x64.ActiveCfg = Debug|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|x64.Build.0 = Debug|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|x86.ActiveCfg = Debug|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|x86.Build.0 = Debug|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|Any CPU.Build.0 = Release|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|x64.ActiveCfg = Release|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|x64.Build.0 = Release|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|x86.ActiveCfg = Release|Any CPU - {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|x86.Build.0 = Release|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|x64.ActiveCfg = Debug|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|x64.Build.0 = Debug|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|x86.ActiveCfg = Debug|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|x86.Build.0 = Debug|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|Any CPU.Build.0 = Release|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|x64.ActiveCfg = Release|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|x64.Build.0 = Release|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|x86.ActiveCfg = Release|Any CPU - {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|x86.Build.0 = Release|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|x64.ActiveCfg = Debug|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|x64.Build.0 = Debug|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|x86.ActiveCfg = Debug|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|x86.Build.0 = Debug|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Release|Any CPU.Build.0 = Release|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Release|x64.ActiveCfg = Release|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Release|x64.Build.0 = Release|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Release|x86.ActiveCfg = Release|Any CPU - {42582C16-F5A9-417F-9D33-BC489925324F}.Release|x86.Build.0 = Release|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|Any CPU.Build.0 = Debug|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|x64.ActiveCfg = Debug|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|x64.Build.0 = Debug|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|x86.ActiveCfg = Debug|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|x86.Build.0 = Debug|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|Any CPU.ActiveCfg = Release|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|Any CPU.Build.0 = Release|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|x64.ActiveCfg = Release|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|x64.Build.0 = Release|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|x86.ActiveCfg = Release|Any CPU - {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|x86.Build.0 = Release|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|x64.ActiveCfg = Debug|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|x64.Build.0 = Debug|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|x86.ActiveCfg = Debug|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|x86.Build.0 = Debug|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|Any CPU.Build.0 = Release|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|x64.ActiveCfg = Release|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|x64.Build.0 = Release|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|x86.ActiveCfg = Release|Any CPU - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|x86.Build.0 = Release|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|x64.ActiveCfg = Debug|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|x64.Build.0 = Debug|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|x86.ActiveCfg = Debug|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|x86.Build.0 = Debug|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|Any CPU.Build.0 = Release|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|x64.ActiveCfg = Release|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|x64.Build.0 = Release|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|x86.ActiveCfg = Release|Any CPU - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|x86.Build.0 = Release|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|Any CPU.Build.0 = Debug|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|x64.ActiveCfg = Debug|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|x64.Build.0 = Debug|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|x86.ActiveCfg = Debug|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|x86.Build.0 = Debug|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|Any CPU.ActiveCfg = Release|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|Any CPU.Build.0 = Release|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|x64.ActiveCfg = Release|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|x64.Build.0 = Release|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|x86.ActiveCfg = Release|Any CPU - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|x86.Build.0 = Release|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|x64.ActiveCfg = Debug|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|x64.Build.0 = Debug|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|x86.ActiveCfg = Debug|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|x86.Build.0 = Debug|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|Any CPU.Build.0 = Release|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|x64.ActiveCfg = Release|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|x64.Build.0 = Release|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|x86.ActiveCfg = Release|Any CPU - {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|x86.Build.0 = Release|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|x64.ActiveCfg = Debug|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|x64.Build.0 = Debug|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|x86.ActiveCfg = Debug|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|x86.Build.0 = Debug|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|Any CPU.Build.0 = Release|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|x64.ActiveCfg = Release|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|x64.Build.0 = Release|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|x86.ActiveCfg = Release|Any CPU - {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|x86.Build.0 = Release|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|x64.ActiveCfg = Debug|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|x64.Build.0 = Debug|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|x86.ActiveCfg = Debug|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|x86.Build.0 = Debug|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|Any CPU.Build.0 = Release|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|x64.ActiveCfg = Release|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|x64.Build.0 = Release|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|x86.ActiveCfg = Release|Any CPU - {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|x86.Build.0 = Release|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|x64.ActiveCfg = Debug|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|x64.Build.0 = Debug|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|x86.ActiveCfg = Debug|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|x86.Build.0 = Debug|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|Any CPU.Build.0 = Release|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|x64.ActiveCfg = Release|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|x64.Build.0 = Release|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|x86.ActiveCfg = Release|Any CPU - {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|x86.Build.0 = Release|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|x64.ActiveCfg = Debug|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|x64.Build.0 = Debug|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|x86.ActiveCfg = Debug|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|x86.Build.0 = Debug|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Release|Any CPU.Build.0 = Release|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Release|x64.ActiveCfg = Release|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Release|x64.Build.0 = Release|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Release|x86.ActiveCfg = Release|Any CPU - {361E3E23-B215-423D-9906-A84171E20AD3}.Release|x86.Build.0 = Release|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|x64.ActiveCfg = Debug|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|x64.Build.0 = Debug|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|x86.ActiveCfg = Debug|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|x86.Build.0 = Debug|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|Any CPU.Build.0 = Release|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|x64.ActiveCfg = Release|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|x64.Build.0 = Release|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|x86.ActiveCfg = Release|Any CPU - {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|x86.Build.0 = Release|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|x64.ActiveCfg = Debug|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|x64.Build.0 = Debug|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|x86.ActiveCfg = Debug|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|x86.Build.0 = Debug|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|Any CPU.Build.0 = Release|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|x64.ActiveCfg = Release|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|x64.Build.0 = Release|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|x86.ActiveCfg = Release|Any CPU - {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|x86.Build.0 = Release|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|x64.ActiveCfg = Debug|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|x64.Build.0 = Debug|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|x86.ActiveCfg = Debug|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|x86.Build.0 = Debug|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|Any CPU.Build.0 = Release|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|x64.ActiveCfg = Release|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|x64.Build.0 = Release|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|x86.ActiveCfg = Release|Any CPU - {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|x86.Build.0 = Release|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|x64.ActiveCfg = Debug|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|x64.Build.0 = Debug|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|x86.ActiveCfg = Debug|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|x86.Build.0 = Debug|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|Any CPU.Build.0 = Release|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|x64.ActiveCfg = Release|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|x64.Build.0 = Release|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|x86.ActiveCfg = Release|Any CPU - {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|x86.Build.0 = Release|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|x64.ActiveCfg = Debug|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|x64.Build.0 = Debug|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|x86.ActiveCfg = Debug|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|x86.Build.0 = Debug|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|Any CPU.Build.0 = Release|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|x64.ActiveCfg = Release|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|x64.Build.0 = Release|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|x86.ActiveCfg = Release|Any CPU - {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|x86.Build.0 = Release|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|x64.ActiveCfg = Debug|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|x64.Build.0 = Debug|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|x86.ActiveCfg = Debug|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|x86.Build.0 = Debug|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|Any CPU.Build.0 = Release|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|x64.ActiveCfg = Release|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|x64.Build.0 = Release|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|x86.ActiveCfg = Release|Any CPU - {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|x86.Build.0 = Release|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|Any CPU.Build.0 = Debug|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|x64.ActiveCfg = Debug|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|x64.Build.0 = Debug|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|x86.ActiveCfg = Debug|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|x86.Build.0 = Debug|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|Any CPU.ActiveCfg = Release|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|Any CPU.Build.0 = Release|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|x64.ActiveCfg = Release|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|x64.Build.0 = Release|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|x86.ActiveCfg = Release|Any CPU - {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|x86.Build.0 = Release|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|x64.ActiveCfg = Debug|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|x64.Build.0 = Debug|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|x86.ActiveCfg = Debug|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|x86.Build.0 = Debug|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|Any CPU.Build.0 = Release|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|x64.ActiveCfg = Release|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|x64.Build.0 = Release|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|x86.ActiveCfg = Release|Any CPU - {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|x86.Build.0 = Release|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|Any CPU.Build.0 = Debug|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|x64.ActiveCfg = Debug|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|x64.Build.0 = Debug|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|x86.ActiveCfg = Debug|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|x86.Build.0 = Debug|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|Any CPU.ActiveCfg = Release|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|Any CPU.Build.0 = Release|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|x64.ActiveCfg = Release|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|x64.Build.0 = Release|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|x86.ActiveCfg = Release|Any CPU - {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|x86.Build.0 = Release|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|x64.ActiveCfg = Debug|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|x64.Build.0 = Debug|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|x86.ActiveCfg = Debug|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|x86.Build.0 = Debug|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|Any CPU.Build.0 = Release|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|x64.ActiveCfg = Release|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|x64.Build.0 = Release|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|x86.ActiveCfg = Release|Any CPU - {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|x86.Build.0 = Release|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|Any CPU.Build.0 = Debug|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|x64.ActiveCfg = Debug|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|x64.Build.0 = Debug|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|x86.ActiveCfg = Debug|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|x86.Build.0 = Debug|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|Any CPU.ActiveCfg = Release|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|Any CPU.Build.0 = Release|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|x64.ActiveCfg = Release|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|x64.Build.0 = Release|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|x86.ActiveCfg = Release|Any CPU - {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|x86.Build.0 = Release|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|Any CPU.Build.0 = Debug|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|x64.ActiveCfg = Debug|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|x64.Build.0 = Debug|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|x86.ActiveCfg = Debug|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|x86.Build.0 = Debug|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|Any CPU.ActiveCfg = Release|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|Any CPU.Build.0 = Release|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|x64.ActiveCfg = Release|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|x64.Build.0 = Release|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|x86.ActiveCfg = Release|Any CPU - {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|x86.Build.0 = Release|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|x64.ActiveCfg = Debug|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|x64.Build.0 = Debug|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|x86.ActiveCfg = Debug|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|x86.Build.0 = Debug|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|Any CPU.Build.0 = Release|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|x64.ActiveCfg = Release|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|x64.Build.0 = Release|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|x86.ActiveCfg = Release|Any CPU - {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|x86.Build.0 = Release|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|x64.ActiveCfg = Debug|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|x64.Build.0 = Debug|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|x86.ActiveCfg = Debug|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|x86.Build.0 = Debug|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|Any CPU.Build.0 = Release|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|x64.ActiveCfg = Release|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|x64.Build.0 = Release|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|x86.ActiveCfg = Release|Any CPU - {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|x86.Build.0 = Release|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|Any CPU.Build.0 = Debug|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|x64.ActiveCfg = Debug|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|x64.Build.0 = Debug|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|x86.ActiveCfg = Debug|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|x86.Build.0 = Debug|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|Any CPU.ActiveCfg = Release|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|Any CPU.Build.0 = Release|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|x64.ActiveCfg = Release|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|x64.Build.0 = Release|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|x86.ActiveCfg = Release|Any CPU - {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|x86.Build.0 = Release|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|Any CPU.Build.0 = Debug|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|x64.ActiveCfg = Debug|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|x64.Build.0 = Debug|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|x86.ActiveCfg = Debug|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|x86.Build.0 = Debug|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|Any CPU.ActiveCfg = Release|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|Any CPU.Build.0 = Release|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|x64.ActiveCfg = Release|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|x64.Build.0 = Release|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|x86.ActiveCfg = Release|Any CPU - {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|x86.Build.0 = Release|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|x64.ActiveCfg = Debug|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|x64.Build.0 = Debug|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|x86.ActiveCfg = Debug|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|x86.Build.0 = Debug|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|Any CPU.Build.0 = Release|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|x64.ActiveCfg = Release|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|x64.Build.0 = Release|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|x86.ActiveCfg = Release|Any CPU - {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|x86.Build.0 = Release|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|x64.ActiveCfg = Debug|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|x64.Build.0 = Debug|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|x86.ActiveCfg = Debug|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|x86.Build.0 = Debug|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|Any CPU.Build.0 = Release|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|x64.ActiveCfg = Release|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|x64.Build.0 = Release|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|x86.ActiveCfg = Release|Any CPU - {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|x86.Build.0 = Release|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|Any CPU.Build.0 = Debug|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|x64.ActiveCfg = Debug|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|x64.Build.0 = Debug|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|x86.ActiveCfg = Debug|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|x86.Build.0 = Debug|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|Any CPU.ActiveCfg = Release|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|Any CPU.Build.0 = Release|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|x64.ActiveCfg = Release|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|x64.Build.0 = Release|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|x86.ActiveCfg = Release|Any CPU - {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|x86.Build.0 = Release|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|x64.ActiveCfg = Debug|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|x64.Build.0 = Debug|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|x86.ActiveCfg = Debug|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|x86.Build.0 = Debug|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|Any CPU.Build.0 = Release|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|x64.ActiveCfg = Release|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|x64.Build.0 = Release|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|x86.ActiveCfg = Release|Any CPU - {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|x86.Build.0 = Release|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Debug|x64.ActiveCfg = Debug|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Debug|x64.Build.0 = Debug|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Debug|x86.ActiveCfg = Debug|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Debug|x86.Build.0 = Debug|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Release|Any CPU.Build.0 = Release|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Release|x64.ActiveCfg = Release|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Release|x64.Build.0 = Release|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Release|x86.ActiveCfg = Release|Any CPU - {3A8F090F-678D-46E2-8899-67402129749C}.Release|x86.Build.0 = Release|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|x64.ActiveCfg = Debug|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|x64.Build.0 = Debug|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|x86.ActiveCfg = Debug|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|x86.Build.0 = Debug|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|Any CPU.Build.0 = Release|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|x64.ActiveCfg = Release|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|x64.Build.0 = Release|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|x86.ActiveCfg = Release|Any CPU - {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|x86.Build.0 = Release|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|Any CPU.Build.0 = Debug|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|x64.ActiveCfg = Debug|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|x64.Build.0 = Debug|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|x86.ActiveCfg = Debug|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|x86.Build.0 = Debug|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|Any CPU.ActiveCfg = Release|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|Any CPU.Build.0 = Release|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x64.ActiveCfg = Release|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x64.Build.0 = Release|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x86.ActiveCfg = Release|Any CPU - {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x86.Build.0 = Release|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|Any CPU.Build.0 = Debug|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x64.ActiveCfg = Debug|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x64.Build.0 = Debug|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x86.ActiveCfg = Debug|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x86.Build.0 = Debug|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|Any CPU.ActiveCfg = Release|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|Any CPU.Build.0 = Release|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x64.ActiveCfg = Release|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x64.Build.0 = Release|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x86.ActiveCfg = Release|Any CPU - {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x86.Build.0 = Release|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|Any CPU.Build.0 = Debug|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x64.ActiveCfg = Debug|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x64.Build.0 = Debug|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x86.ActiveCfg = Debug|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x86.Build.0 = Debug|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|Any CPU.ActiveCfg = Release|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|Any CPU.Build.0 = Release|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x64.ActiveCfg = Release|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x64.Build.0 = Release|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x86.ActiveCfg = Release|Any CPU - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x86.Build.0 = Release|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|Any CPU.Build.0 = Debug|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x64.ActiveCfg = Debug|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x64.Build.0 = Debug|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x86.ActiveCfg = Debug|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x86.Build.0 = Debug|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Release|Any CPU.ActiveCfg = Release|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Release|Any CPU.Build.0 = Release|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x64.ActiveCfg = Release|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x64.Build.0 = Release|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x86.ActiveCfg = Release|Any CPU - {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x86.Build.0 = Release|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x64.ActiveCfg = Debug|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x64.Build.0 = Debug|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x86.ActiveCfg = Debug|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x86.Build.0 = Debug|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|Any CPU.Build.0 = Release|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x64.ActiveCfg = Release|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x64.Build.0 = Release|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x86.ActiveCfg = Release|Any CPU - {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x86.Build.0 = Release|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|Any CPU.Build.0 = Debug|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x64.ActiveCfg = Debug|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x64.Build.0 = Debug|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x86.ActiveCfg = Debug|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x86.Build.0 = Debug|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|Any CPU.ActiveCfg = Release|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|Any CPU.Build.0 = Release|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x64.ActiveCfg = Release|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x64.Build.0 = Release|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x86.ActiveCfg = Release|Any CPU - {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x86.Build.0 = Release|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x64.ActiveCfg = Debug|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x64.Build.0 = Debug|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x86.ActiveCfg = Debug|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x86.Build.0 = Debug|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Release|Any CPU.Build.0 = Release|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x64.ActiveCfg = Release|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x64.Build.0 = Release|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x86.ActiveCfg = Release|Any CPU - {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x86.Build.0 = Release|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x64.ActiveCfg = Debug|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x64.Build.0 = Debug|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x86.ActiveCfg = Debug|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x86.Build.0 = Debug|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|Any CPU.Build.0 = Release|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x64.ActiveCfg = Release|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x64.Build.0 = Release|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x86.ActiveCfg = Release|Any CPU - {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x86.Build.0 = Release|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x64.ActiveCfg = Debug|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x64.Build.0 = Debug|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x86.ActiveCfg = Debug|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x86.Build.0 = Debug|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|Any CPU.Build.0 = Release|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x64.ActiveCfg = Release|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x64.Build.0 = Release|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x86.ActiveCfg = Release|Any CPU - {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x86.Build.0 = Release|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x64.ActiveCfg = Debug|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x64.Build.0 = Debug|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x86.ActiveCfg = Debug|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x86.Build.0 = Debug|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|Any CPU.Build.0 = Release|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x64.ActiveCfg = Release|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x64.Build.0 = Release|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x86.ActiveCfg = Release|Any CPU - {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x86.Build.0 = Release|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x64.ActiveCfg = Debug|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x64.Build.0 = Debug|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x86.ActiveCfg = Debug|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x86.Build.0 = Debug|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|Any CPU.Build.0 = Release|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x64.ActiveCfg = Release|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x64.Build.0 = Release|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x86.ActiveCfg = Release|Any CPU - {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x86.Build.0 = Release|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x64.ActiveCfg = Debug|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x64.Build.0 = Debug|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x86.ActiveCfg = Debug|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x86.Build.0 = Debug|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|Any CPU.Build.0 = Release|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x64.ActiveCfg = Release|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x64.Build.0 = Release|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x86.ActiveCfg = Release|Any CPU - {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x86.Build.0 = Release|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x64.ActiveCfg = Debug|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x64.Build.0 = Debug|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x86.ActiveCfg = Debug|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x86.Build.0 = Debug|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|Any CPU.Build.0 = Release|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x64.ActiveCfg = Release|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x64.Build.0 = Release|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x86.ActiveCfg = Release|Any CPU - {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x86.Build.0 = Release|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Debug|Any CPU.Build.0 = Debug|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x64.ActiveCfg = Debug|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x64.Build.0 = Debug|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x86.ActiveCfg = Debug|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x86.Build.0 = Debug|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Release|Any CPU.ActiveCfg = Release|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Release|Any CPU.Build.0 = Release|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Release|x64.ActiveCfg = Release|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Release|x64.Build.0 = Release|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Release|x86.ActiveCfg = Release|Any CPU - {50D014B5-99A6-46FC-B745-26687595B293}.Release|x86.Build.0 = Release|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x64.ActiveCfg = Debug|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x64.Build.0 = Debug|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x86.ActiveCfg = Debug|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x86.Build.0 = Debug|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|Any CPU.Build.0 = Release|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x64.ActiveCfg = Release|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x64.Build.0 = Release|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x86.ActiveCfg = Release|Any CPU - {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x86.Build.0 = Release|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x64.ActiveCfg = Debug|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x64.Build.0 = Debug|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x86.ActiveCfg = Debug|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x86.Build.0 = Debug|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|Any CPU.Build.0 = Release|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x64.ActiveCfg = Release|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x64.Build.0 = Release|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x86.ActiveCfg = Release|Any CPU - {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x86.Build.0 = Release|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|Any CPU.Build.0 = Debug|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x64.ActiveCfg = Debug|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x64.Build.0 = Debug|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x86.ActiveCfg = Debug|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x86.Build.0 = Debug|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|Any CPU.ActiveCfg = Release|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|Any CPU.Build.0 = Release|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x64.ActiveCfg = Release|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x64.Build.0 = Release|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x86.ActiveCfg = Release|Any CPU - {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x86.Build.0 = Release|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x64.ActiveCfg = Debug|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x64.Build.0 = Debug|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x86.ActiveCfg = Debug|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x86.Build.0 = Debug|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Release|Any CPU.Build.0 = Release|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x64.ActiveCfg = Release|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x64.Build.0 = Release|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x86.ActiveCfg = Release|Any CPU - {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x86.Build.0 = Release|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x64.ActiveCfg = Debug|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x64.Build.0 = Debug|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x86.ActiveCfg = Debug|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x86.Build.0 = Debug|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|Any CPU.Build.0 = Release|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x64.ActiveCfg = Release|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x64.Build.0 = Release|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x86.ActiveCfg = Release|Any CPU - {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x86.Build.0 = Release|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|Any CPU.Build.0 = Debug|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x64.ActiveCfg = Debug|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x64.Build.0 = Debug|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x86.ActiveCfg = Debug|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x86.Build.0 = Debug|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|Any CPU.ActiveCfg = Release|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|Any CPU.Build.0 = Release|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x64.ActiveCfg = Release|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x64.Build.0 = Release|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x86.ActiveCfg = Release|Any CPU - {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x86.Build.0 = Release|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x64.ActiveCfg = Debug|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x64.Build.0 = Debug|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x86.ActiveCfg = Debug|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x86.Build.0 = Debug|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|Any CPU.Build.0 = Release|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x64.ActiveCfg = Release|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x64.Build.0 = Release|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x86.ActiveCfg = Release|Any CPU - {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x86.Build.0 = Release|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x64.ActiveCfg = Debug|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x64.Build.0 = Debug|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x86.ActiveCfg = Debug|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x86.Build.0 = Debug|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|Any CPU.Build.0 = Release|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x64.ActiveCfg = Release|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x64.Build.0 = Release|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x86.ActiveCfg = Release|Any CPU - {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x86.Build.0 = Release|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x64.ActiveCfg = Debug|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x64.Build.0 = Debug|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x86.ActiveCfg = Debug|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x86.Build.0 = Debug|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|Any CPU.Build.0 = Release|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x64.ActiveCfg = Release|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x64.Build.0 = Release|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x86.ActiveCfg = Release|Any CPU - {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x86.Build.0 = Release|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x64.ActiveCfg = Debug|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x64.Build.0 = Debug|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x86.ActiveCfg = Debug|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x86.Build.0 = Debug|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|Any CPU.Build.0 = Release|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x64.ActiveCfg = Release|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x64.Build.0 = Release|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x86.ActiveCfg = Release|Any CPU - {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x86.Build.0 = Release|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|Any CPU.Build.0 = Debug|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x64.ActiveCfg = Debug|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x64.Build.0 = Debug|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x86.ActiveCfg = Debug|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x86.Build.0 = Debug|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|Any CPU.ActiveCfg = Release|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|Any CPU.Build.0 = Release|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x64.ActiveCfg = Release|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x64.Build.0 = Release|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x86.ActiveCfg = Release|Any CPU - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x86.Build.0 = Release|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x64.ActiveCfg = Debug|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x64.Build.0 = Debug|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x86.ActiveCfg = Debug|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x86.Build.0 = Debug|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|Any CPU.Build.0 = Release|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x64.ActiveCfg = Release|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x64.Build.0 = Release|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x86.ActiveCfg = Release|Any CPU - {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x86.Build.0 = Release|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x64.ActiveCfg = Debug|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x64.Build.0 = Debug|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x86.ActiveCfg = Debug|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x86.Build.0 = Debug|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|Any CPU.Build.0 = Release|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x64.ActiveCfg = Release|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x64.Build.0 = Release|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x86.ActiveCfg = Release|Any CPU - {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x86.Build.0 = Release|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|Any CPU.Build.0 = Debug|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x64.ActiveCfg = Debug|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x64.Build.0 = Debug|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x86.ActiveCfg = Debug|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x86.Build.0 = Debug|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|Any CPU.ActiveCfg = Release|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|Any CPU.Build.0 = Release|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x64.ActiveCfg = Release|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x64.Build.0 = Release|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x86.ActiveCfg = Release|Any CPU - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x86.Build.0 = Release|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|Any CPU.Build.0 = Debug|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x64.ActiveCfg = Debug|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x64.Build.0 = Debug|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x86.ActiveCfg = Debug|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x86.Build.0 = Debug|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|Any CPU.ActiveCfg = Release|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|Any CPU.Build.0 = Release|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x64.ActiveCfg = Release|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x64.Build.0 = Release|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x86.ActiveCfg = Release|Any CPU - {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x86.Build.0 = Release|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x64.ActiveCfg = Debug|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x64.Build.0 = Debug|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x86.ActiveCfg = Debug|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x86.Build.0 = Debug|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|Any CPU.Build.0 = Release|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x64.ActiveCfg = Release|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x64.Build.0 = Release|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x86.ActiveCfg = Release|Any CPU - {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x86.Build.0 = Release|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x64.ActiveCfg = Debug|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x64.Build.0 = Debug|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x86.ActiveCfg = Debug|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x86.Build.0 = Debug|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|Any CPU.Build.0 = Release|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x64.ActiveCfg = Release|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x64.Build.0 = Release|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x86.ActiveCfg = Release|Any CPU - {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x86.Build.0 = Release|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|x64.ActiveCfg = Debug|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|x64.Build.0 = Debug|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|x86.ActiveCfg = Debug|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|x86.Build.0 = Debug|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|Any CPU.Build.0 = Release|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|x64.ActiveCfg = Release|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|x64.Build.0 = Release|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|x86.ActiveCfg = Release|Any CPU - {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|x86.Build.0 = Release|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Debug|Any CPU.Build.0 = Debug|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Debug|x64.ActiveCfg = Debug|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Debug|x64.Build.0 = Debug|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Debug|x86.ActiveCfg = Debug|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Debug|x86.Build.0 = Debug|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Release|Any CPU.ActiveCfg = Release|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Release|Any CPU.Build.0 = Release|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Release|x64.ActiveCfg = Release|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Release|x64.Build.0 = Release|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Release|x86.ActiveCfg = Release|Any CPU - {866807B8-8E68-417C-8148-6450DEA68012}.Release|x86.Build.0 = Release|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|Any CPU.Build.0 = Debug|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|x64.ActiveCfg = Debug|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|x64.Build.0 = Debug|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|x86.ActiveCfg = Debug|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|x86.Build.0 = Debug|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Release|Any CPU.ActiveCfg = Release|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Release|Any CPU.Build.0 = Release|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Release|x64.ActiveCfg = Release|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Release|x64.Build.0 = Release|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Release|x86.ActiveCfg = Release|Any CPU - {20BE41BD-9C32-45B5-882A-C01491979633}.Release|x86.Build.0 = Release|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|Any CPU.Build.0 = Debug|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|x64.ActiveCfg = Debug|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|x64.Build.0 = Debug|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|x86.ActiveCfg = Debug|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|x86.Build.0 = Debug|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|Any CPU.ActiveCfg = Release|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|Any CPU.Build.0 = Release|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|x64.ActiveCfg = Release|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|x64.Build.0 = Release|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|x86.ActiveCfg = Release|Any CPU - {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|x86.Build.0 = Release|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|x64.ActiveCfg = Debug|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|x64.Build.0 = Debug|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|x86.ActiveCfg = Debug|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|x86.Build.0 = Debug|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|Any CPU.Build.0 = Release|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|x64.ActiveCfg = Release|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|x64.Build.0 = Release|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|x86.ActiveCfg = Release|Any CPU - {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|x86.Build.0 = Release|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|Any CPU.Build.0 = Debug|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|x64.ActiveCfg = Debug|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|x64.Build.0 = Debug|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|x86.ActiveCfg = Debug|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|x86.Build.0 = Debug|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|Any CPU.ActiveCfg = Release|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|Any CPU.Build.0 = Release|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|x64.ActiveCfg = Release|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|x64.Build.0 = Release|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|x86.ActiveCfg = Release|Any CPU - {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|x86.Build.0 = Release|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|Any CPU.Build.0 = Debug|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|x64.ActiveCfg = Debug|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|x64.Build.0 = Debug|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|x86.ActiveCfg = Debug|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|x86.Build.0 = Debug|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|Any CPU.ActiveCfg = Release|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|Any CPU.Build.0 = Release|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|x64.ActiveCfg = Release|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|x64.Build.0 = Release|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|x86.ActiveCfg = Release|Any CPU - {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|x86.Build.0 = Release|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|Any CPU.Build.0 = Debug|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|x64.ActiveCfg = Debug|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|x64.Build.0 = Debug|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|x86.ActiveCfg = Debug|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|x86.Build.0 = Debug|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|Any CPU.ActiveCfg = Release|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|Any CPU.Build.0 = Release|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|x64.ActiveCfg = Release|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|x64.Build.0 = Release|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|x86.ActiveCfg = Release|Any CPU - {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|x86.Build.0 = Release|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Debug|Any CPU.Build.0 = Debug|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Debug|x64.ActiveCfg = Debug|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Debug|x64.Build.0 = Debug|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Debug|x86.ActiveCfg = Debug|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Debug|x86.Build.0 = Debug|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Release|Any CPU.ActiveCfg = Release|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Release|Any CPU.Build.0 = Release|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Release|x64.ActiveCfg = Release|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Release|x64.Build.0 = Release|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Release|x86.ActiveCfg = Release|Any CPU - {548C296A-476B-433D-9552-923648BDFA97}.Release|x86.Build.0 = Release|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|Any CPU.Build.0 = Debug|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|x64.ActiveCfg = Debug|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|x64.Build.0 = Debug|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|x86.ActiveCfg = Debug|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|x86.Build.0 = Debug|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|Any CPU.ActiveCfg = Release|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|Any CPU.Build.0 = Release|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|x64.ActiveCfg = Release|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|x64.Build.0 = Release|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|x86.ActiveCfg = Release|Any CPU - {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|x86.Build.0 = Release|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|x64.ActiveCfg = Debug|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|x64.Build.0 = Debug|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|x86.ActiveCfg = Debug|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|x86.Build.0 = Debug|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|Any CPU.Build.0 = Release|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|x64.ActiveCfg = Release|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|x64.Build.0 = Release|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|x86.ActiveCfg = Release|Any CPU - {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|x86.Build.0 = Release|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|Any CPU.Build.0 = Debug|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|x64.ActiveCfg = Debug|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|x64.Build.0 = Debug|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|x86.ActiveCfg = Debug|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|x86.Build.0 = Debug|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|Any CPU.ActiveCfg = Release|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|Any CPU.Build.0 = Release|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|x64.ActiveCfg = Release|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|x64.Build.0 = Release|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|x86.ActiveCfg = Release|Any CPU - {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|x86.Build.0 = Release|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|Any CPU.Build.0 = Debug|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|x64.ActiveCfg = Debug|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|x64.Build.0 = Debug|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|x86.ActiveCfg = Debug|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|x86.Build.0 = Debug|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|Any CPU.ActiveCfg = Release|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|Any CPU.Build.0 = Release|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|x64.ActiveCfg = Release|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|x64.Build.0 = Release|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|x86.ActiveCfg = Release|Any CPU - {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|x86.Build.0 = Release|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|x64.ActiveCfg = Debug|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|x64.Build.0 = Debug|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|x86.ActiveCfg = Debug|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|x86.Build.0 = Debug|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|Any CPU.Build.0 = Release|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|x64.ActiveCfg = Release|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|x64.Build.0 = Release|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|x86.ActiveCfg = Release|Any CPU - {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|x86.Build.0 = Release|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|x64.ActiveCfg = Debug|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|x64.Build.0 = Debug|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|x86.ActiveCfg = Debug|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|x86.Build.0 = Debug|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|Any CPU.Build.0 = Release|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|x64.ActiveCfg = Release|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|x64.Build.0 = Release|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|x86.ActiveCfg = Release|Any CPU - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|x86.Build.0 = Release|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|Any CPU.Build.0 = Debug|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|x64.ActiveCfg = Debug|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|x64.Build.0 = Debug|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|x86.ActiveCfg = Debug|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|x86.Build.0 = Debug|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|Any CPU.ActiveCfg = Release|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|Any CPU.Build.0 = Release|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x64.ActiveCfg = Release|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x64.Build.0 = Release|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x86.ActiveCfg = Release|Any CPU - {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x86.Build.0 = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x64.ActiveCfg = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x64.Build.0 = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x86.ActiveCfg = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x86.Build.0 = Debug|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|Any CPU.Build.0 = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x64.ActiveCfg = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x64.Build.0 = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x86.ActiveCfg = Release|Any CPU - {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x86.Build.0 = Release|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|x64.ActiveCfg = Debug|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|x64.Build.0 = Debug|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|x86.ActiveCfg = Debug|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|x86.Build.0 = Debug|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|Any CPU.Build.0 = Release|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|x64.ActiveCfg = Release|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|x64.Build.0 = Release|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|x86.ActiveCfg = Release|Any CPU - {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|x86.Build.0 = Release|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|Any CPU.Build.0 = Debug|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|x64.ActiveCfg = Debug|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|x64.Build.0 = Debug|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|x86.ActiveCfg = Debug|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|x86.Build.0 = Debug|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|Any CPU.ActiveCfg = Release|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|Any CPU.Build.0 = Release|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|x64.ActiveCfg = Release|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|x64.Build.0 = Release|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|x86.ActiveCfg = Release|Any CPU - {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|x86.Build.0 = Release|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|x64.ActiveCfg = Debug|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|x64.Build.0 = Debug|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|x86.ActiveCfg = Debug|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|x86.Build.0 = Debug|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|Any CPU.Build.0 = Release|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|x64.ActiveCfg = Release|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|x64.Build.0 = Release|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|x86.ActiveCfg = Release|Any CPU - {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|x86.Build.0 = Release|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|x64.ActiveCfg = Debug|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|x64.Build.0 = Debug|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|x86.ActiveCfg = Debug|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|x86.Build.0 = Debug|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|Any CPU.Build.0 = Release|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|x64.ActiveCfg = Release|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|x64.Build.0 = Release|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|x86.ActiveCfg = Release|Any CPU - {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|x86.Build.0 = Release|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Debug|Any CPU.Build.0 = Debug|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Debug|x64.ActiveCfg = Debug|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Debug|x64.Build.0 = Debug|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Debug|x86.ActiveCfg = Debug|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Debug|x86.Build.0 = Debug|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Release|Any CPU.ActiveCfg = Release|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Release|Any CPU.Build.0 = Release|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Release|x64.ActiveCfg = Release|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Release|x64.Build.0 = Release|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Release|x86.ActiveCfg = Release|Any CPU - {9369FA32-E98A-4180-9251-914925188086}.Release|x86.Build.0 = Release|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|Any CPU.Build.0 = Debug|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|x64.ActiveCfg = Debug|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|x64.Build.0 = Debug|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|x86.ActiveCfg = Debug|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|x86.Build.0 = Debug|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|Any CPU.ActiveCfg = Release|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|Any CPU.Build.0 = Release|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x64.ActiveCfg = Release|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x64.Build.0 = Release|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x86.ActiveCfg = Release|Any CPU - {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection - GlobalSection(NestedProjects) = preSolution - {361838C4-72E2-1C48-5D76-CA6D1A861242} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {46D35B4F-6A04-47FF-958B-5E6A73FCC059} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {44A1241B-8ECF-4AFA-9972-452C39AD43D6} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {85AB3BB7-C493-4387-B39A-EB299AC37312} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {93DB06DC-B254-48A9-8F2C-6130A5658F27} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {C6DC3C29-C2AD-4015-8872-42E95A0FE63F} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {40094279-250C-42AE-992A-856718FEFBAC} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {B2967228-F8F7-4931-B257-1C63CB58CE1D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {37F203A3-624E-4794-9C99-16CAC22C17DF} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {3FF93987-A30A-4D50-8815-7CF3BB7CAE05} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {AACE8717-0760-42F2-A225-8FCCE876FB65} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {D0FB54BA-4D14-4A32-B09F-7EC94F369460} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {69C9E010-CBDD-4B89-84CF-7AB56D6A078A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {E471176A-E1F3-4DE5-8D30-0865903A217A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {FA013511-DF20-45F7-8077-EBA2D6224D64} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {B9F84697-54FE-4648-B173-EE3D904FFA4D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {6751A76C-8ED8-40F4-AE2B-069DB31395FE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {DDBFA2EF-9CAE-473F-A438-369CAC25C66A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {35350FAB-FC51-4FE8-81FB-011003134C37} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {C4A65377-22F7-4D15-92A3-4F05847D167E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {BDDE59E1-C643-4C87-8608-0F9A7A54DE09} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {0CC116C8-A7E5-4B94-9688-32920177FF97} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {84DEDF05-A5BD-4644-86B9-6B7918FE3F31} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {9DEB1F54-94B5-40C4-AC44-220E680B016D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {7C3E87F2-93D8-4968-95E3-52C46947D46C} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {31B05493-104F-437F-9FA7-CA5286CE697C} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {937AF12E-D770-4534-8FF8-C59042609C2A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {5A028B04-9D76-470B-B5B3-766CE4CE860C} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {749DE4C8-F733-43F8-B2A8-6649E71C7570} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {56D2C79E-2737-4FF9-9D19-150065F568D5} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {E41F6DC4-68B5-4EE3-97AE-801D725A2C13} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {26055403-C7F5-4709-8813-0F7387102791} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {258327E9-431E-475C-933B-50893676E452} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {42AF60C8-A5E1-40E0-86F8-98256364AF6F} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {88C6A9C3-B433-4C36-8767-429C8C2396F8} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {6B7099AB-01BF-4EC4-87D0-5C9C032266DE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {14C918EA-693E-41FE-ACAE-2E82DF077BEA} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {81111B26-74F6-4912-9084-7115FD119945} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {8D0F501D-01B1-4E24-958B-FAF35B267705} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {5BA91095-7F10-4717-B296-49DFBFC1C9C2} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {99616566-4EF1-4DC7-B655-825FE43D203D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {A3B19095-2D95-4B09-B07E-2C082C72394B} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {807837AF-B392-4589-ADF1-3FDB34D6C5BF} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {64EAFDCF-8283-4D5C-AC78-7969D5FE926A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {68F4D8A1-E32F-487A-B460-325F36989BE3} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {606C751B-7CF1-47CF-A25C-9248A55C814F} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {5CCE0DB7-C115-4B21-A7AE-C8488C22A853} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {06DC817F-A936-4F83-8929-E00622B32245} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {2C999476-0291-4161-B3E9-1AA99A3B1139} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {50140A32-6D3C-47DB-983A-7166CBA51845} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {031979F2-6ABA-444F-A6A4-80115DC487CE} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {D71B0DA5-80A3-419E-898D-40E77A9A7F19} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {B2C877D9-B521-4901-8817-76B5DAA62FCE} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {7116DD6B-2491-49E1-AB27-5210E949F753} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {7DBE31A6-D2FD-499E-B675-4092723175AD} = {361838C4-72E2-1C48-5D76-CA6D1A861242} - {D99E6EAE-D278-4480-AA67-85F025383E47} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {D3825714-3DDA-44B7-A99C-5F3E65716691} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {FAB78D21-7372-48FE-B2C3-DE1807F1157D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {EADFA337-B0FA-4712-A24A-7C08235BDF98} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {110F7EC2-3149-4D1B-A972-E69E79F1EBF5} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {31277AFF-9BFF-4C17-8593-B562A385058E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {3A8F090F-678D-46E2-8899-67402129749C} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {05D844B6-51C1-4926-919C-D99E24FB3BC9} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {03E15545-D6A0-4287-A88C-6EDE77C0DCBE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {BA47D456-4657-4C86-A665-21293E3AC47F} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} - {49EF86AC-1CC2-4A24-8637-C5151E23DF9D} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} - {C22333B3-D132-4960-A490-6BEF1EB1C917} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} - {B8B15A8D-F647-41AE-A55F-A283A47E97C4} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} - {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} - {CBE6E3D8-230C-4513-B98F-99D82B83B9F7} = {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} - {821C7F88-B775-4D3C-8D89-850B6C34E818} = {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} - {CBDF819E-923F-A07F-78D9-D599DD28197E} = {1553F566-661E-A2F5-811B-F74BF45C44CC} - {D8B22C17-28E9-4059-97C5-4AC4600A2BD5} = {CBDF819E-923F-A07F-78D9-D599DD28197E} - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "src", "src", "{827E0CD3-B72D-47B6-A68D-7590B98EB39B}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Authority", "StellaOps.Authority", "{361838C4-72E2-1C48-5D76-CA6D1A861242}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "StellaOps.Configuration\StellaOps.Configuration.csproj", "{8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration", "StellaOps.Authority\StellaOps.Auth.ServerIntegration\StellaOps.Auth.ServerIntegration.csproj", "{46D35B4F-6A04-47FF-958B-5E6A73FCC059}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.DependencyInjection", "StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj", "{44A1241B-8ECF-4AFA-9972-452C39AD43D6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority", "StellaOps.Authority\StellaOps.Authority\StellaOps.Authority.csproj", "{85AB3BB7-C493-4387-B39A-EB299AC37312}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugin.Standard", "StellaOps.Authority\StellaOps.Authority.Plugin.Standard\StellaOps.Authority.Plugin.Standard.csproj", "{93DB06DC-B254-48A9-8F2C-6130A5658F27}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin", "StellaOps.Plugin\StellaOps.Plugin.csproj", "{03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Plugin.Tests", "StellaOps.Plugin.Tests\StellaOps.Plugin.Tests.csproj", "{C6DC3C29-C2AD-4015-8872-42E95A0FE63F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli", "StellaOps.Cli\StellaOps.Cli.csproj", "{40094279-250C-42AE-992A-856718FEFBAC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli.Tests", "StellaOps.Cli.Tests\StellaOps.Cli.Tests.csproj", "{B2967228-F8F7-4931-B257-1C63CB58CE1D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Testing", "StellaOps.Concelier.Testing\StellaOps.Concelier.Testing.csproj", "{6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common", "StellaOps.Concelier.Connector.Common\StellaOps.Concelier.Connector.Common.csproj", "{37F203A3-624E-4794-9C99-16CAC22C17DF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo", "StellaOps.Concelier.Storage.Mongo\StellaOps.Concelier.Storage.Mongo.csproj", "{3FF93987-A30A-4D50-8815-7CF3BB7CAE05}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core", "StellaOps.Concelier.Core\StellaOps.Concelier.Core.csproj", "{AACE8717-0760-42F2-A225-8FCCE876FB65}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models", "StellaOps.Concelier.Models\StellaOps.Concelier.Models.csproj", "{4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization", "StellaOps.Concelier.Normalization\StellaOps.Concelier.Normalization.csproj", "{85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Core.Tests", "StellaOps.Concelier.Core.Tests\StellaOps.Concelier.Core.Tests.csproj", "{FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.Json", "StellaOps.Concelier.Exporter.Json\StellaOps.Concelier.Exporter.Json.csproj", "{D0FB54BA-4D14-4A32-B09F-7EC94F369460}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.Json.Tests", "StellaOps.Concelier.Exporter.Json.Tests\StellaOps.Concelier.Exporter.Json.Tests.csproj", "{69C9E010-CBDD-4B89-84CF-7AB56D6A078A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.TrivyDb", "StellaOps.Concelier.Exporter.TrivyDb\StellaOps.Concelier.Exporter.TrivyDb.csproj", "{E471176A-E1F3-4DE5-8D30-0865903A217A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Exporter.TrivyDb.Tests", "StellaOps.Concelier.Exporter.TrivyDb.Tests\StellaOps.Concelier.Exporter.TrivyDb.Tests.csproj", "{FA013511-DF20-45F7-8077-EBA2D6224D64}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Merge", "StellaOps.Concelier.Merge\StellaOps.Concelier.Merge.csproj", "{B9F84697-54FE-4648-B173-EE3D904FFA4D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Merge.Tests", "StellaOps.Concelier.Merge.Tests\StellaOps.Concelier.Merge.Tests.csproj", "{6751A76C-8ED8-40F4-AE2B-069DB31395FE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Models.Tests", "StellaOps.Concelier.Models.Tests\StellaOps.Concelier.Models.Tests.csproj", "{DDBFA2EF-9CAE-473F-A438-369CAC25C66A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Normalization.Tests", "StellaOps.Concelier.Normalization.Tests\StellaOps.Concelier.Normalization.Tests.csproj", "{063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Acsc", "StellaOps.Concelier.Connector.Acsc\StellaOps.Concelier.Connector.Acsc.csproj", "{35350FAB-FC51-4FE8-81FB-011003134C37}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cccs", "StellaOps.Concelier.Connector.Cccs\StellaOps.Concelier.Connector.Cccs.csproj", "{1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertBund", "StellaOps.Concelier.Connector.CertBund\StellaOps.Concelier.Connector.CertBund.csproj", "{C4A65377-22F7-4D15-92A3-4F05847D167E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertCc", "StellaOps.Concelier.Connector.CertCc\StellaOps.Concelier.Connector.CertCc.csproj", "{BDDE59E1-C643-4C87-8608-0F9A7A54DE09}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertFr", "StellaOps.Concelier.Connector.CertFr\StellaOps.Concelier.Connector.CertFr.csproj", "{0CC116C8-A7E5-4B94-9688-32920177FF97}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertFr.Tests", "StellaOps.Concelier.Connector.CertFr.Tests\StellaOps.Concelier.Connector.CertFr.Tests.csproj", "{E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertIn", "StellaOps.Concelier.Connector.CertIn\StellaOps.Concelier.Connector.CertIn.csproj", "{84DEDF05-A5BD-4644-86B9-6B7918FE3F31}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.CertIn.Tests", "StellaOps.Concelier.Connector.CertIn.Tests\StellaOps.Concelier.Connector.CertIn.Tests.csproj", "{9DEB1F54-94B5-40C4-AC44-220E680B016D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Common.Tests", "StellaOps.Concelier.Connector.Common.Tests\StellaOps.Concelier.Connector.Common.Tests.csproj", "{7C3E87F2-93D8-4968-95E3-52C46947D46C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cve", "StellaOps.Concelier.Connector.Cve\StellaOps.Concelier.Connector.Cve.csproj", "{C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Debian", "StellaOps.Concelier.Connector.Distro.Debian\StellaOps.Concelier.Connector.Distro.Debian.csproj", "{31B05493-104F-437F-9FA7-CA5286CE697C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Debian.Tests", "StellaOps.Concelier.Connector.Distro.Debian.Tests\StellaOps.Concelier.Connector.Distro.Debian.Tests.csproj", "{937AF12E-D770-4534-8FF8-C59042609C2A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.RedHat", "StellaOps.Concelier.Connector.Distro.RedHat\StellaOps.Concelier.Connector.Distro.RedHat.csproj", "{5A028B04-9D76-470B-B5B3-766CE4CE860C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.RedHat.Tests", "StellaOps.Concelier.Connector.Distro.RedHat.Tests\StellaOps.Concelier.Connector.Distro.RedHat.Tests.csproj", "{749DE4C8-F733-43F8-B2A8-6649E71C7570}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Suse", "StellaOps.Concelier.Connector.Distro.Suse\StellaOps.Concelier.Connector.Distro.Suse.csproj", "{56D2C79E-2737-4FF9-9D19-150065F568D5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Suse.Tests", "StellaOps.Concelier.Connector.Distro.Suse.Tests\StellaOps.Concelier.Connector.Distro.Suse.Tests.csproj", "{E41F6DC4-68B5-4EE3-97AE-801D725A2C13}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Ubuntu", "StellaOps.Concelier.Connector.Distro.Ubuntu\StellaOps.Concelier.Connector.Distro.Ubuntu.csproj", "{285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Distro.Ubuntu.Tests", "StellaOps.Concelier.Connector.Distro.Ubuntu.Tests\StellaOps.Concelier.Connector.Distro.Ubuntu.Tests.csproj", "{26055403-C7F5-4709-8813-0F7387102791}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ghsa", "StellaOps.Concelier.Connector.Ghsa\StellaOps.Concelier.Connector.Ghsa.csproj", "{0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Cisa", "StellaOps.Concelier.Connector.Ics.Cisa\StellaOps.Concelier.Connector.Ics.Cisa.csproj", "{258327E9-431E-475C-933B-50893676E452}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Kaspersky", "StellaOps.Concelier.Connector.Ics.Kaspersky\StellaOps.Concelier.Connector.Ics.Kaspersky.csproj", "{42AF60C8-A5E1-40E0-86F8-98256364AF6F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ics.Kaspersky.Tests", "StellaOps.Concelier.Connector.Ics.Kaspersky.Tests\StellaOps.Concelier.Connector.Ics.Kaspersky.Tests.csproj", "{88C6A9C3-B433-4C36-8767-429C8C2396F8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Jvn", "StellaOps.Concelier.Connector.Jvn\StellaOps.Concelier.Connector.Jvn.csproj", "{6B7099AB-01BF-4EC4-87D0-5C9C032266DE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Jvn.Tests", "StellaOps.Concelier.Connector.Jvn.Tests\StellaOps.Concelier.Connector.Jvn.Tests.csproj", "{14C918EA-693E-41FE-ACAE-2E82DF077BEA}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kev", "StellaOps.Concelier.Connector.Kev\StellaOps.Concelier.Connector.Kev.csproj", "{81111B26-74F6-4912-9084-7115FD119945}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kisa", "StellaOps.Concelier.Connector.Kisa\StellaOps.Concelier.Connector.Kisa.csproj", "{80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Nvd", "StellaOps.Concelier.Connector.Nvd\StellaOps.Concelier.Connector.Nvd.csproj", "{8D0F501D-01B1-4E24-958B-FAF35B267705}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Nvd.Tests", "StellaOps.Concelier.Connector.Nvd.Tests\StellaOps.Concelier.Connector.Nvd.Tests.csproj", "{5BA91095-7F10-4717-B296-49DFBFC1C9C2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Osv", "StellaOps.Concelier.Connector.Osv\StellaOps.Concelier.Connector.Osv.csproj", "{99616566-4EF1-4DC7-B655-825FE43D203D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Osv.Tests", "StellaOps.Concelier.Connector.Osv.Tests\StellaOps.Concelier.Connector.Osv.Tests.csproj", "{EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Bdu", "StellaOps.Concelier.Connector.Ru.Bdu\StellaOps.Concelier.Connector.Ru.Bdu.csproj", "{A3B19095-2D95-4B09-B07E-2C082C72394B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ru.Nkcki", "StellaOps.Concelier.Connector.Ru.Nkcki\StellaOps.Concelier.Connector.Ru.Nkcki.csproj", "{807837AF-B392-4589-ADF1-3FDB34D6C5BF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Adobe", "StellaOps.Concelier.Connector.Vndr.Adobe\StellaOps.Concelier.Connector.Vndr.Adobe.csproj", "{64EAFDCF-8283-4D5C-AC78-7969D5FE926A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Adobe.Tests", "StellaOps.Concelier.Connector.Vndr.Adobe.Tests\StellaOps.Concelier.Connector.Vndr.Adobe.Tests.csproj", "{68F4D8A1-E32F-487A-B460-325F36989BE3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Apple", "StellaOps.Concelier.Connector.Vndr.Apple\StellaOps.Concelier.Connector.Vndr.Apple.csproj", "{4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Chromium", "StellaOps.Concelier.Connector.Vndr.Chromium\StellaOps.Concelier.Connector.Vndr.Chromium.csproj", "{606C751B-7CF1-47CF-A25C-9248A55C814F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Chromium.Tests", "StellaOps.Concelier.Connector.Vndr.Chromium.Tests\StellaOps.Concelier.Connector.Vndr.Chromium.Tests.csproj", "{0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Cisco", "StellaOps.Concelier.Connector.Vndr.Cisco\StellaOps.Concelier.Connector.Vndr.Cisco.csproj", "{CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Cisco.Tests", "StellaOps.Concelier.Connector.Vndr.Cisco.Tests\StellaOps.Concelier.Connector.Vndr.Cisco.Tests.csproj", "{99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Msrc", "StellaOps.Concelier.Connector.Vndr.Msrc\StellaOps.Concelier.Connector.Vndr.Msrc.csproj", "{5CCE0DB7-C115-4B21-A7AE-C8488C22A853}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Oracle", "StellaOps.Concelier.Connector.Vndr.Oracle\StellaOps.Concelier.Connector.Vndr.Oracle.csproj", "{A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Oracle.Tests", "StellaOps.Concelier.Connector.Vndr.Oracle.Tests\StellaOps.Concelier.Connector.Vndr.Oracle.Tests.csproj", "{06DC817F-A936-4F83-8929-E00622B32245}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Vmware", "StellaOps.Concelier.Connector.Vndr.Vmware\StellaOps.Concelier.Connector.Vndr.Vmware.csproj", "{2C999476-0291-4161-B3E9-1AA99A3B1139}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Vndr.Vmware.Tests", "StellaOps.Concelier.Connector.Vndr.Vmware.Tests\StellaOps.Concelier.Connector.Vndr.Vmware.Tests.csproj", "{476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Storage.Mongo.Tests", "StellaOps.Concelier.Storage.Mongo.Tests\StellaOps.Concelier.Storage.Mongo.Tests.csproj", "{0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.WebService", "StellaOps.Concelier.WebService\StellaOps.Concelier.WebService.csproj", "{0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.WebService.Tests", "StellaOps.Concelier.WebService.Tests\StellaOps.Concelier.WebService.Tests.csproj", "{8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration.Tests", "StellaOps.Configuration.Tests\StellaOps.Configuration.Tests.csproj", "{C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions.Tests", "StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions.Tests\StellaOps.Authority.Plugins.Abstractions.Tests.csproj", "{50140A32-6D3C-47DB-983A-7166CBA51845}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Tests", "StellaOps.Authority\StellaOps.Authority.Tests\StellaOps.Authority.Tests.csproj", "{031979F2-6ABA-444F-A6A4-80115DC487CE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugin.Standard.Tests", "StellaOps.Authority\StellaOps.Authority.Plugin.Standard.Tests\StellaOps.Authority.Plugin.Standard.Tests.csproj", "{D71B0DA5-80A3-419E-898D-40E77A9A7F19}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Storage.Mongo", "StellaOps.Authority\StellaOps.Authority.Storage.Mongo\StellaOps.Authority.Storage.Mongo.csproj", "{B2C877D9-B521-4901-8817-76B5DAA62FCE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions.Tests", "StellaOps.Authority\StellaOps.Auth.Abstractions.Tests\StellaOps.Auth.Abstractions.Tests.csproj", "{08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.ServerIntegration.Tests", "StellaOps.Authority\StellaOps.Auth.ServerIntegration.Tests\StellaOps.Auth.ServerIntegration.Tests.csproj", "{7116DD6B-2491-49E1-AB27-5210E949F753}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client.Tests", "StellaOps.Authority\StellaOps.Auth.Client.Tests\StellaOps.Auth.Client.Tests.csproj", "{7DBE31A6-D2FD-499E-B675-4092723175AD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Kev.Tests", "StellaOps.Concelier.Connector.Kev.Tests\StellaOps.Concelier.Connector.Kev.Tests.csproj", "{D99E6EAE-D278-4480-AA67-85F025383E47}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Cve.Tests", "StellaOps.Concelier.Connector.Cve.Tests\StellaOps.Concelier.Connector.Cve.Tests.csproj", "{D3825714-3DDA-44B7-A99C-5F3E65716691}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.Connector.Ghsa.Tests", "StellaOps.Concelier.Connector.Ghsa.Tests\StellaOps.Concelier.Connector.Ghsa.Tests.csproj", "{FAB78D21-7372-48FE-B2C3-DE1807F1157D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{EADFA337-B0FA-4712-A24A-7C08235BDF98}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.Tests", "StellaOps.Cryptography.Tests\StellaOps.Cryptography.Tests.csproj", "{110F7EC2-3149-4D1B-A972-E69E79F1EBF5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography.DependencyInjection", "StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj", "{B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Core", "StellaOps.Excititor.Core\StellaOps.Excititor.Core.csproj", "{3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Core.Tests", "StellaOps.Excititor.Core.Tests\StellaOps.Excititor.Core.Tests.csproj", "{680CA103-DCE8-4D02-8979-72DEA5BE8C00}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Policy", "StellaOps.Excititor.Policy\StellaOps.Excititor.Policy.csproj", "{7F4B19D4-569A-4CCF-B481-EBE04860451A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Policy.Tests", "StellaOps.Excititor.Policy.Tests\StellaOps.Excititor.Policy.Tests.csproj", "{DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Storage.Mongo", "StellaOps.Excititor.Storage.Mongo\StellaOps.Excititor.Storage.Mongo.csproj", "{E380F242-031E-483E-8570-0EF7EA525C4F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Export", "StellaOps.Excititor.Export\StellaOps.Excititor.Export.csproj", "{42582C16-F5A9-417F-9D33-BC489925324F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Export.Tests", "StellaOps.Excititor.Export.Tests\StellaOps.Excititor.Export.Tests.csproj", "{06F40DA8-FEFA-4C2B-907B-155BD92BB859}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.RedHat.CSAF", "StellaOps.Excititor.Connectors.RedHat.CSAF\StellaOps.Excititor.Connectors.RedHat.CSAF.csproj", "{A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.RedHat.CSAF.Tests", "StellaOps.Excititor.Connectors.RedHat.CSAF.Tests\StellaOps.Excititor.Connectors.RedHat.CSAF.Tests.csproj", "{3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Abstractions", "StellaOps.Excititor.Connectors.Abstractions\StellaOps.Excititor.Connectors.Abstractions.csproj", "{F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Worker", "StellaOps.Excititor.Worker\StellaOps.Excititor.Worker.csproj", "{781EC793-1DB0-4E31-95BC-12A2B373045F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Worker.Tests", "StellaOps.Excititor.Worker.Tests\StellaOps.Excititor.Worker.Tests.csproj", "{BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CSAF", "StellaOps.Excititor.Formats.CSAF\StellaOps.Excititor.Formats.CSAF.csproj", "{14E9D043-F0EF-4F68-AE83-D6F579119D9A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CSAF.Tests", "StellaOps.Excititor.Formats.CSAF.Tests\StellaOps.Excititor.Formats.CSAF.Tests.csproj", "{27E94B6E-DEF8-4B89-97CB-424703790ECE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CycloneDX", "StellaOps.Excititor.Formats.CycloneDX\StellaOps.Excititor.Formats.CycloneDX.csproj", "{361E3E23-B215-423D-9906-A84171E20AD3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.CycloneDX.Tests", "StellaOps.Excititor.Formats.CycloneDX.Tests\StellaOps.Excititor.Formats.CycloneDX.Tests.csproj", "{7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.OpenVEX", "StellaOps.Excititor.Formats.OpenVEX\StellaOps.Excititor.Formats.OpenVEX.csproj", "{C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Formats.OpenVEX.Tests", "StellaOps.Excititor.Formats.OpenVEX.Tests\StellaOps.Excititor.Formats.OpenVEX.Tests.csproj", "{E86CF4A6-2463-4589-A9D8-9DF557C48367}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Cisco.CSAF", "StellaOps.Excititor.Connectors.Cisco.CSAF\StellaOps.Excititor.Connectors.Cisco.CSAF.csproj", "{B308B94C-E01F-4449-A5A6-CD7A48E52D15}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Cisco.CSAF.Tests", "StellaOps.Excititor.Connectors.Cisco.CSAF.Tests\StellaOps.Excititor.Connectors.Cisco.CSAF.Tests.csproj", "{9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub", "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.csproj", "{E076DC9C-B436-44BF-B02E-FA565086F805}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests", "StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests\StellaOps.Excititor.Connectors.SUSE.RancherVEXHub.Tests.csproj", "{55500025-FE82-4F97-A261-9BAEA4B10845}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.MSRC.CSAF", "StellaOps.Excititor.Connectors.MSRC.CSAF\StellaOps.Excititor.Connectors.MSRC.CSAF.csproj", "{CD12875F-9367-41BD-810C-7FBE76314F17}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.MSRC.CSAF.Tests", "StellaOps.Excititor.Connectors.MSRC.CSAF.Tests\StellaOps.Excititor.Connectors.MSRC.CSAF.Tests.csproj", "{063D3280-9918-465A-AF2D-3650A2A50D03}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Oracle.CSAF", "StellaOps.Excititor.Connectors.Oracle.CSAF\StellaOps.Excititor.Connectors.Oracle.CSAF.csproj", "{A3EEE400-3655-4B34-915A-598E60CD55FB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Oracle.CSAF.Tests", "StellaOps.Excititor.Connectors.Oracle.CSAF.Tests\StellaOps.Excititor.Connectors.Oracle.CSAF.Tests.csproj", "{577025AD-2FDD-42DF-BFA2-3FC095B50539}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Ubuntu.CSAF", "StellaOps.Excititor.Connectors.Ubuntu.CSAF\StellaOps.Excititor.Connectors.Ubuntu.CSAF.csproj", "{DD3B2076-E5E0-4533-8D27-7724225D7758}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests", "StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests\StellaOps.Excititor.Connectors.Ubuntu.CSAF.Tests.csproj", "{CADA1364-8EB1-479E-AB6F-4105C26335C8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Core", "StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj", "{8CC4441E-9D1A-4E00-831B-34828A3F9446}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Core.Tests", "StellaOps.Scanner.Core.Tests\StellaOps.Scanner.Core.Tests.csproj", "{01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Authority", "StellaOps.Authority", "{BDB24B64-FE4E-C4BD-9F80-9428F98EDF6F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy", "StellaOps.Policy\StellaOps.Policy.csproj", "{37BB9502-CCD1-425A-BF45-D56968B0C2F9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Tests", "StellaOps.Policy.Tests\StellaOps.Policy.Tests.csproj", "{015A7A95-2C07-4C7F-8048-DB591AAC5FE5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.WebService", "StellaOps.Scanner.WebService\StellaOps.Scanner.WebService.csproj", "{EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.WebService.Tests", "StellaOps.Scanner.WebService.Tests\StellaOps.Scanner.WebService.Tests.csproj", "{27D951AD-696D-4330-B4F5-F8F81344C191}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Storage", "StellaOps.Scanner.Storage\StellaOps.Scanner.Storage.csproj", "{31277AFF-9BFF-4C17-8593-B562A385058E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Storage.Tests", "StellaOps.Scanner.Storage.Tests\StellaOps.Scanner.Storage.Tests.csproj", "{3A8F090F-678D-46E2-8899-67402129749C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker", "StellaOps.Scanner.Worker\StellaOps.Scanner.Worker.csproj", "{19FACEC7-D6D4-40F5-84AD-14E2983F18F7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Worker.Tests", "StellaOps.Scanner.Worker.Tests\StellaOps.Scanner.Worker.Tests.csproj", "{8342286A-BE36-4ACA-87FF-EBEB4E268498}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.EntryTrace", "StellaOps.Scanner.EntryTrace\StellaOps.Scanner.EntryTrace.csproj", "{05D844B6-51C1-4926-919C-D99E24FB3BC9}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.EntryTrace.Tests", "StellaOps.Scanner.EntryTrace.Tests\StellaOps.Scanner.EntryTrace.Tests.csproj", "{03E15545-D6A0-4287-A88C-6EDE77C0DCBE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang", "StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj", "{A072C46F-BA45-419E-B1B6-416919F78440}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Tests", "StellaOps.Scanner.Analyzers.Lang.Tests\StellaOps.Scanner.Analyzers.Lang.Tests.csproj", "{6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Diff", "StellaOps.Scanner.Diff\StellaOps.Scanner.Diff.csproj", "{10088067-7B8F-4D2E-A8E1-ED546DC17369}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Diff.Tests", "StellaOps.Scanner.Diff.Tests\StellaOps.Scanner.Diff.Tests.csproj", "{E014565C-2456-4BD0-9481-557F939C1E36}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit", "StellaOps.Scanner.Emit\StellaOps.Scanner.Emit.csproj", "{44825FDA-68D2-4675-8B1D-6D5303DC38CF}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Emit.Tests", "StellaOps.Scanner.Emit.Tests\StellaOps.Scanner.Emit.Tests.csproj", "{6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cache", "StellaOps.Scanner.Cache\StellaOps.Scanner.Cache.csproj", "{5E5EB0A7-7A19-4144-81FE-13C31DB678B2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Cache.Tests", "StellaOps.Scanner.Cache.Tests\StellaOps.Scanner.Cache.Tests.csproj", "{7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Java", "StellaOps.Scanner.Analyzers.Lang.Java\StellaOps.Scanner.Analyzers.Lang.Java.csproj", "{B86C287A-734E-4527-A03E-6B970F22E27E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS", "StellaOps.Scanner.Analyzers.OS\StellaOps.Scanner.Analyzers.OS.csproj", "{E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Apk", "StellaOps.Scanner.Analyzers.OS.Apk\StellaOps.Scanner.Analyzers.OS.Apk.csproj", "{50D014B5-99A6-46FC-B745-26687595B293}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Dpkg", "StellaOps.Scanner.Analyzers.OS.Dpkg\StellaOps.Scanner.Analyzers.OS.Dpkg.csproj", "{D99C1F78-67EA-40E7-BD4C-985592F5265A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Rpm", "StellaOps.Scanner.Analyzers.OS.Rpm\StellaOps.Scanner.Analyzers.OS.Rpm.csproj", "{1CBC0B9C-A96B-4143-B70F-37C69229FFF2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.OS.Tests", "StellaOps.Scanner.Analyzers.OS.Tests\StellaOps.Scanner.Analyzers.OS.Tests.csproj", "{760E2855-31B3-4CCB-BACB-34B7196A59B8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Node", "StellaOps.Scanner.Analyzers.Lang.Node\StellaOps.Scanner.Analyzers.Lang.Node.csproj", "{3F688F21-7E31-4781-8995-9DD34276773F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Python", "StellaOps.Scanner.Analyzers.Lang.Python\StellaOps.Scanner.Analyzers.Lang.Python.csproj", "{80AD7C4D-E4C6-4700-87AD-77B5698B338F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Go", "StellaOps.Scanner.Analyzers.Lang.Go\StellaOps.Scanner.Analyzers.Lang.Go.csproj", "{60ABAB54-2EE9-4A16-A109-67F7B6F29184}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.DotNet", "StellaOps.Scanner.Analyzers.Lang.DotNet\StellaOps.Scanner.Analyzers.Lang.DotNet.csproj", "{D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Rust", "StellaOps.Scanner.Analyzers.Lang.Rust\StellaOps.Scanner.Analyzers.Lang.Rust.csproj", "{5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{05475C0A-C225-4F07-A3C7-9E17E660042E}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Attestor", "StellaOps.Attestor", "{78C966F5-2242-D8EC-ADCA-A1A9C7F723A6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Core", "StellaOps.Attestor\StellaOps.Attestor.Core\StellaOps.Attestor.Core.csproj", "{BA47D456-4657-4C86-A665-21293E3AC47F}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Infrastructure", "StellaOps.Attestor\StellaOps.Attestor.Infrastructure\StellaOps.Attestor.Infrastructure.csproj", "{49EF86AC-1CC2-4A24-8637-C5151E23DF9D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.WebService", "StellaOps.Attestor\StellaOps.Attestor.WebService\StellaOps.Attestor.WebService.csproj", "{C22333B3-D132-4960-A490-6BEF1EB1C917}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Attestor.Tests", "StellaOps.Attestor\StellaOps.Attestor.Tests\StellaOps.Attestor.Tests.csproj", "{B8B15A8D-F647-41AE-A55F-A283A47E97C4}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Zastava", "StellaOps.Zastava", "{F1F029E6-2E4B-4A42-8D8F-AB325EE3B608}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core", "StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj", "{CBE6E3D8-230C-4513-B98F-99D82B83B9F7}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core.Tests", "StellaOps.Zastava.Core.Tests\StellaOps.Zastava.Core.Tests.csproj", "{821C7F88-B775-4D3C-8D89-850B6C34E818}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Webhook", "StellaOps.Zastava.Webhook\StellaOps.Zastava.Webhook.csproj", "{3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Webhook.Tests", "StellaOps.Zastava.Webhook.Tests\StellaOps.Zastava.Webhook.Tests.csproj", "{3C500ECB-5422-4FFB-BD3D-48A850763D31}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cli.Plugins.NonCore", "StellaOps.Cli.Plugins.NonCore\StellaOps.Cli.Plugins.NonCore.csproj", "{D851E54A-5A44-4F74-9FDF-A2C32CACF651}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Java.Tests", "StellaOps.Scanner.Analyzers.Lang.Java.Tests\StellaOps.Scanner.Analyzers.Lang.Java.Tests.csproj", "{866807B8-8E68-417C-8148-6450DEA68012}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Node.Tests", "StellaOps.Scanner.Analyzers.Lang.Node.Tests\StellaOps.Scanner.Analyzers.Lang.Node.Tests.csproj", "{20BE41BD-9C32-45B5-882A-C01491979633}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Python.Tests", "StellaOps.Scanner.Analyzers.Lang.Python.Tests\StellaOps.Scanner.Analyzers.Lang.Python.Tests.csproj", "{9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scanner.Analyzers.Lang.Go.Tests", "StellaOps.Scanner.Analyzers.Lang.Go.Tests\StellaOps.Scanner.Analyzers.Lang.Go.Tests.csproj", "{7C3A6012-6FC8-46A9-9966-1AC373614C41}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Observer", "StellaOps.Zastava.Observer\StellaOps.Zastava.Observer.csproj", "{BC38594B-0B84-4657-9F7B-F2A0FC810F04}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Observer.Tests", "StellaOps.Zastava.Observer.Tests\StellaOps.Zastava.Observer.Tests.csproj", "{20E0774F-86D5-4CD0-B636-E5212074FDE8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Engine", "StellaOps.Policy.Engine\StellaOps.Policy.Engine.csproj", "{FE668D8D-AB46-41F4-A82F-8A3330C4D152}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cartographer", "StellaOps.Cartographer\StellaOps.Cartographer.csproj", "{548C296A-476B-433D-9552-923648BDFA97}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.SbomService", "StellaOps.SbomService\StellaOps.SbomService.csproj", "{3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.WebService", "StellaOps.Scheduler.WebService\StellaOps.Scheduler.WebService.csproj", "{C733F161-FCED-4D21-BC83-5CC079E93547}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Scheduler.WebService.Tests", "StellaOps.Scheduler.WebService.Tests\StellaOps.Scheduler.WebService.Tests.csproj", "{76E1E74F-41C1-4E24-85EA-ED13F28B80B1}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Registry.TokenService", "StellaOps.Registry.TokenService\StellaOps.Registry.TokenService.csproj", "{EC73D558-0472-49E2-B46E-D26F9686AA9C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Registry.TokenService.Tests", "StellaOps.Registry.TokenService.Tests\StellaOps.Registry.TokenService.Tests.csproj", "{1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.Bench", "StellaOps.Bench", "{1553F566-661E-A2F5-811B-F74BF45C44CC}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "PolicyEngine", "PolicyEngine", "{CBDF819E-923F-A07F-78D9-D599DD28197E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Bench.PolicyEngine", "StellaOps.Bench\PolicyEngine\StellaOps.Bench.PolicyEngine\StellaOps.Bench.PolicyEngine.csproj", "{D8B22C17-28E9-4059-97C5-4AC4600A2BD5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc", "StellaOps.Aoc\StellaOps.Aoc.csproj", "{6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Aoc.Tests", "StellaOps.Aoc.Tests\StellaOps.Aoc.Tests.csproj", "{4D167781-1AC0-46CF-A32E-1B6E048940B2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels", "StellaOps.Concelier.RawModels\StellaOps.Concelier.RawModels.csproj", "{C3AEAEE7-038E-45FF-892B-DB18EE29F790}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Concelier.RawModels.Tests", "StellaOps.Concelier.RawModels.Tests\StellaOps.Concelier.RawModels.Tests.csproj", "{7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signals", "StellaOps.Signals\StellaOps.Signals.csproj", "{1561D597-922F-486E-ACF4-98250DDC5CDA}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Signals.Tests", "StellaOps.Signals.Tests\StellaOps.Signals.Tests.csproj", "{D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Gateway", "StellaOps.Policy.Gateway\StellaOps.Policy.Gateway.csproj", "{9369FA32-E98A-4180-9251-914925188086}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Policy.Gateway.Tests", "StellaOps.Policy.Gateway.Tests\StellaOps.Policy.Gateway.Tests.csproj", "{67650687-2E32-40BB-9849-C4ABBA65A7CF}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|x64.ActiveCfg = Debug|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|x64.Build.0 = Debug|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|x86.ActiveCfg = Debug|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Debug|x86.Build.0 = Debug|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|Any CPU.Build.0 = Release|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|x64.ActiveCfg = Release|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|x64.Build.0 = Release|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|x86.ActiveCfg = Release|Any CPU + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB}.Release|x86.Build.0 = Release|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|x64.ActiveCfg = Debug|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|x64.Build.0 = Debug|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|x86.ActiveCfg = Debug|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Debug|x86.Build.0 = Debug|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|Any CPU.Build.0 = Release|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|x64.ActiveCfg = Release|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|x64.Build.0 = Release|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|x86.ActiveCfg = Release|Any CPU + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2}.Release|x86.Build.0 = Release|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|x64.ActiveCfg = Debug|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|x64.Build.0 = Debug|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|x86.ActiveCfg = Debug|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Debug|x86.Build.0 = Debug|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|Any CPU.Build.0 = Release|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|x64.ActiveCfg = Release|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|x64.Build.0 = Release|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|x86.ActiveCfg = Release|Any CPU + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6}.Release|x86.Build.0 = Release|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|Any CPU.Build.0 = Debug|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|x64.ActiveCfg = Debug|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|x64.Build.0 = Debug|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|x86.ActiveCfg = Debug|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Debug|x86.Build.0 = Debug|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|Any CPU.ActiveCfg = Release|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|Any CPU.Build.0 = Release|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|x64.ActiveCfg = Release|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|x64.Build.0 = Release|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|x86.ActiveCfg = Release|Any CPU + {46D35B4F-6A04-47FF-958B-5E6A73FCC059}.Release|x86.Build.0 = Release|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|x64.ActiveCfg = Debug|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|x64.Build.0 = Debug|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|x86.ActiveCfg = Debug|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Debug|x86.Build.0 = Debug|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|Any CPU.Build.0 = Release|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|x64.ActiveCfg = Release|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|x64.Build.0 = Release|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|x86.ActiveCfg = Release|Any CPU + {44A1241B-8ECF-4AFA-9972-452C39AD43D6}.Release|x86.Build.0 = Release|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|Any CPU.Build.0 = Debug|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|x64.ActiveCfg = Debug|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|x64.Build.0 = Debug|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|x86.ActiveCfg = Debug|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Debug|x86.Build.0 = Debug|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|Any CPU.ActiveCfg = Release|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|Any CPU.Build.0 = Release|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|x64.ActiveCfg = Release|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|x64.Build.0 = Release|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|x86.ActiveCfg = Release|Any CPU + {85AB3BB7-C493-4387-B39A-EB299AC37312}.Release|x86.Build.0 = Release|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|x64.ActiveCfg = Debug|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|x64.Build.0 = Debug|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|x86.ActiveCfg = Debug|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Debug|x86.Build.0 = Debug|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|Any CPU.Build.0 = Release|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|x64.ActiveCfg = Release|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|x64.Build.0 = Release|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|x86.ActiveCfg = Release|Any CPU + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3}.Release|x86.Build.0 = Release|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|Any CPU.Build.0 = Debug|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|x64.ActiveCfg = Debug|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|x64.Build.0 = Debug|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|x86.ActiveCfg = Debug|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Debug|x86.Build.0 = Debug|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|Any CPU.ActiveCfg = Release|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|Any CPU.Build.0 = Release|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|x64.ActiveCfg = Release|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|x64.Build.0 = Release|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|x86.ActiveCfg = Release|Any CPU + {93DB06DC-B254-48A9-8F2C-6130A5658F27}.Release|x86.Build.0 = Release|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|x64.ActiveCfg = Debug|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|x64.Build.0 = Debug|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|x86.ActiveCfg = Debug|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Debug|x86.Build.0 = Debug|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|Any CPU.Build.0 = Release|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|x64.ActiveCfg = Release|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|x64.Build.0 = Release|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|x86.ActiveCfg = Release|Any CPU + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A}.Release|x86.Build.0 = Release|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|x64.ActiveCfg = Debug|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|x64.Build.0 = Debug|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|x86.ActiveCfg = Debug|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Debug|x86.Build.0 = Debug|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|Any CPU.Build.0 = Release|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|x64.ActiveCfg = Release|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|x64.Build.0 = Release|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|x86.ActiveCfg = Release|Any CPU + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F}.Release|x86.Build.0 = Release|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Debug|x64.ActiveCfg = Debug|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Debug|x64.Build.0 = Debug|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Debug|x86.ActiveCfg = Debug|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Debug|x86.Build.0 = Debug|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Release|Any CPU.Build.0 = Release|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Release|x64.ActiveCfg = Release|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Release|x64.Build.0 = Release|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Release|x86.ActiveCfg = Release|Any CPU + {40094279-250C-42AE-992A-856718FEFBAC}.Release|x86.Build.0 = Release|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|x64.ActiveCfg = Debug|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|x64.Build.0 = Debug|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|x86.ActiveCfg = Debug|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Debug|x86.Build.0 = Debug|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|Any CPU.Build.0 = Release|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|x64.ActiveCfg = Release|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|x64.Build.0 = Release|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|x86.ActiveCfg = Release|Any CPU + {B2967228-F8F7-4931-B257-1C63CB58CE1D}.Release|x86.Build.0 = Release|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|x64.ActiveCfg = Debug|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|x64.Build.0 = Debug|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|x86.ActiveCfg = Debug|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Debug|x86.Build.0 = Debug|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|Any CPU.Build.0 = Release|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|x64.ActiveCfg = Release|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|x64.Build.0 = Release|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|x86.ActiveCfg = Release|Any CPU + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9}.Release|x86.Build.0 = Release|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|x64.ActiveCfg = Debug|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|x64.Build.0 = Debug|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|x86.ActiveCfg = Debug|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Debug|x86.Build.0 = Debug|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|Any CPU.Build.0 = Release|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|x64.ActiveCfg = Release|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|x64.Build.0 = Release|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|x86.ActiveCfg = Release|Any CPU + {37F203A3-624E-4794-9C99-16CAC22C17DF}.Release|x86.Build.0 = Release|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|x64.ActiveCfg = Debug|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|x64.Build.0 = Debug|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|x86.ActiveCfg = Debug|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Debug|x86.Build.0 = Debug|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|Any CPU.Build.0 = Release|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|x64.ActiveCfg = Release|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|x64.Build.0 = Release|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|x86.ActiveCfg = Release|Any CPU + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05}.Release|x86.Build.0 = Release|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|x64.ActiveCfg = Debug|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|x64.Build.0 = Debug|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|x86.ActiveCfg = Debug|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Debug|x86.Build.0 = Debug|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|Any CPU.Build.0 = Release|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|x64.ActiveCfg = Release|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|x64.Build.0 = Release|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|x86.ActiveCfg = Release|Any CPU + {AACE8717-0760-42F2-A225-8FCCE876FB65}.Release|x86.Build.0 = Release|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|x64.ActiveCfg = Debug|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|x64.Build.0 = Debug|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|x86.ActiveCfg = Debug|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Debug|x86.Build.0 = Debug|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|Any CPU.Build.0 = Release|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|x64.ActiveCfg = Release|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|x64.Build.0 = Release|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|x86.ActiveCfg = Release|Any CPU + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D}.Release|x86.Build.0 = Release|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|x64.ActiveCfg = Debug|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|x64.Build.0 = Debug|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|x86.ActiveCfg = Debug|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Debug|x86.Build.0 = Debug|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|Any CPU.Build.0 = Release|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|x64.ActiveCfg = Release|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|x64.Build.0 = Release|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|x86.ActiveCfg = Release|Any CPU + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3}.Release|x86.Build.0 = Release|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|x64.ActiveCfg = Debug|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|x64.Build.0 = Debug|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|x86.ActiveCfg = Debug|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Debug|x86.Build.0 = Debug|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|Any CPU.Build.0 = Release|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|x64.ActiveCfg = Release|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|x64.Build.0 = Release|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|x86.ActiveCfg = Release|Any CPU + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0}.Release|x86.Build.0 = Release|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|x64.ActiveCfg = Debug|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|x64.Build.0 = Debug|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|x86.ActiveCfg = Debug|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Debug|x86.Build.0 = Debug|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|Any CPU.Build.0 = Release|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|x64.ActiveCfg = Release|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|x64.Build.0 = Release|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|x86.ActiveCfg = Release|Any CPU + {D0FB54BA-4D14-4A32-B09F-7EC94F369460}.Release|x86.Build.0 = Release|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|x64.ActiveCfg = Debug|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|x64.Build.0 = Debug|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|x86.ActiveCfg = Debug|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Debug|x86.Build.0 = Debug|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|Any CPU.Build.0 = Release|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|x64.ActiveCfg = Release|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|x64.Build.0 = Release|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|x86.ActiveCfg = Release|Any CPU + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A}.Release|x86.Build.0 = Release|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|x64.ActiveCfg = Debug|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|x64.Build.0 = Debug|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|x86.ActiveCfg = Debug|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Debug|x86.Build.0 = Debug|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|Any CPU.Build.0 = Release|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|x64.ActiveCfg = Release|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|x64.Build.0 = Release|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|x86.ActiveCfg = Release|Any CPU + {E471176A-E1F3-4DE5-8D30-0865903A217A}.Release|x86.Build.0 = Release|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|x64.ActiveCfg = Debug|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|x64.Build.0 = Debug|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|x86.ActiveCfg = Debug|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Debug|x86.Build.0 = Debug|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|Any CPU.Build.0 = Release|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|x64.ActiveCfg = Release|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|x64.Build.0 = Release|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|x86.ActiveCfg = Release|Any CPU + {FA013511-DF20-45F7-8077-EBA2D6224D64}.Release|x86.Build.0 = Release|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|x64.ActiveCfg = Debug|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|x64.Build.0 = Debug|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|x86.ActiveCfg = Debug|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Debug|x86.Build.0 = Debug|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|Any CPU.Build.0 = Release|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|x64.ActiveCfg = Release|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|x64.Build.0 = Release|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|x86.ActiveCfg = Release|Any CPU + {B9F84697-54FE-4648-B173-EE3D904FFA4D}.Release|x86.Build.0 = Release|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|x64.ActiveCfg = Debug|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|x64.Build.0 = Debug|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|x86.ActiveCfg = Debug|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Debug|x86.Build.0 = Debug|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|Any CPU.Build.0 = Release|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|x64.ActiveCfg = Release|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|x64.Build.0 = Release|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|x86.ActiveCfg = Release|Any CPU + {6751A76C-8ED8-40F4-AE2B-069DB31395FE}.Release|x86.Build.0 = Release|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|x64.ActiveCfg = Debug|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|x64.Build.0 = Debug|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|x86.ActiveCfg = Debug|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Debug|x86.Build.0 = Debug|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|Any CPU.Build.0 = Release|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|x64.ActiveCfg = Release|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|x64.Build.0 = Release|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|x86.ActiveCfg = Release|Any CPU + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A}.Release|x86.Build.0 = Release|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|Any CPU.Build.0 = Debug|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|x64.ActiveCfg = Debug|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|x64.Build.0 = Debug|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|x86.ActiveCfg = Debug|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Debug|x86.Build.0 = Debug|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|Any CPU.ActiveCfg = Release|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|Any CPU.Build.0 = Release|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|x64.ActiveCfg = Release|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|x64.Build.0 = Release|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|x86.ActiveCfg = Release|Any CPU + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22}.Release|x86.Build.0 = Release|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|Any CPU.Build.0 = Debug|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|x64.ActiveCfg = Debug|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|x64.Build.0 = Debug|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|x86.ActiveCfg = Debug|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Debug|x86.Build.0 = Debug|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|Any CPU.ActiveCfg = Release|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|Any CPU.Build.0 = Release|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|x64.ActiveCfg = Release|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|x64.Build.0 = Release|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|x86.ActiveCfg = Release|Any CPU + {35350FAB-FC51-4FE8-81FB-011003134C37}.Release|x86.Build.0 = Release|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|x64.ActiveCfg = Debug|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|x64.Build.0 = Debug|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|x86.ActiveCfg = Debug|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Debug|x86.Build.0 = Debug|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|Any CPU.Build.0 = Release|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|x64.ActiveCfg = Release|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|x64.Build.0 = Release|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|x86.ActiveCfg = Release|Any CPU + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519}.Release|x86.Build.0 = Release|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|x64.ActiveCfg = Debug|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|x64.Build.0 = Debug|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|x86.ActiveCfg = Debug|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Debug|x86.Build.0 = Debug|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|Any CPU.Build.0 = Release|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|x64.ActiveCfg = Release|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|x64.Build.0 = Release|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|x86.ActiveCfg = Release|Any CPU + {C4A65377-22F7-4D15-92A3-4F05847D167E}.Release|x86.Build.0 = Release|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|x64.ActiveCfg = Debug|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|x64.Build.0 = Debug|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|x86.ActiveCfg = Debug|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Debug|x86.Build.0 = Debug|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|Any CPU.Build.0 = Release|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|x64.ActiveCfg = Release|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|x64.Build.0 = Release|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|x86.ActiveCfg = Release|Any CPU + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09}.Release|x86.Build.0 = Release|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|x64.ActiveCfg = Debug|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|x64.Build.0 = Debug|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|x86.ActiveCfg = Debug|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Debug|x86.Build.0 = Debug|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|Any CPU.Build.0 = Release|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|x64.ActiveCfg = Release|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|x64.Build.0 = Release|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|x86.ActiveCfg = Release|Any CPU + {0CC116C8-A7E5-4B94-9688-32920177FF97}.Release|x86.Build.0 = Release|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|x64.ActiveCfg = Debug|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|x64.Build.0 = Debug|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|x86.ActiveCfg = Debug|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Debug|x86.Build.0 = Debug|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|Any CPU.Build.0 = Release|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|x64.ActiveCfg = Release|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|x64.Build.0 = Release|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|x86.ActiveCfg = Release|Any CPU + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E}.Release|x86.Build.0 = Release|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|Any CPU.Build.0 = Debug|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|x64.ActiveCfg = Debug|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|x64.Build.0 = Debug|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|x86.ActiveCfg = Debug|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Debug|x86.Build.0 = Debug|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|Any CPU.ActiveCfg = Release|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|Any CPU.Build.0 = Release|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|x64.ActiveCfg = Release|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|x64.Build.0 = Release|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|x86.ActiveCfg = Release|Any CPU + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31}.Release|x86.Build.0 = Release|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|x64.ActiveCfg = Debug|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|x64.Build.0 = Debug|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|x86.ActiveCfg = Debug|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Debug|x86.Build.0 = Debug|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|Any CPU.Build.0 = Release|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|x64.ActiveCfg = Release|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|x64.Build.0 = Release|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|x86.ActiveCfg = Release|Any CPU + {9DEB1F54-94B5-40C4-AC44-220E680B016D}.Release|x86.Build.0 = Release|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|x64.ActiveCfg = Debug|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|x64.Build.0 = Debug|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|x86.ActiveCfg = Debug|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Debug|x86.Build.0 = Debug|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|Any CPU.Build.0 = Release|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|x64.ActiveCfg = Release|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|x64.Build.0 = Release|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|x86.ActiveCfg = Release|Any CPU + {7C3E87F2-93D8-4968-95E3-52C46947D46C}.Release|x86.Build.0 = Release|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|x64.ActiveCfg = Debug|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|x64.Build.0 = Debug|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|x86.ActiveCfg = Debug|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Debug|x86.Build.0 = Debug|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|Any CPU.Build.0 = Release|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|x64.ActiveCfg = Release|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|x64.Build.0 = Release|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|x86.ActiveCfg = Release|Any CPU + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2}.Release|x86.Build.0 = Release|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|x64.ActiveCfg = Debug|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|x64.Build.0 = Debug|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|x86.ActiveCfg = Debug|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Debug|x86.Build.0 = Debug|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|Any CPU.Build.0 = Release|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|x64.ActiveCfg = Release|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|x64.Build.0 = Release|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|x86.ActiveCfg = Release|Any CPU + {31B05493-104F-437F-9FA7-CA5286CE697C}.Release|x86.Build.0 = Release|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|x64.ActiveCfg = Debug|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|x64.Build.0 = Debug|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|x86.ActiveCfg = Debug|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Debug|x86.Build.0 = Debug|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|Any CPU.Build.0 = Release|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|x64.ActiveCfg = Release|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|x64.Build.0 = Release|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|x86.ActiveCfg = Release|Any CPU + {937AF12E-D770-4534-8FF8-C59042609C2A}.Release|x86.Build.0 = Release|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|x64.ActiveCfg = Debug|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|x64.Build.0 = Debug|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|x86.ActiveCfg = Debug|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Debug|x86.Build.0 = Debug|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|Any CPU.Build.0 = Release|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|x64.ActiveCfg = Release|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|x64.Build.0 = Release|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|x86.ActiveCfg = Release|Any CPU + {5A028B04-9D76-470B-B5B3-766CE4CE860C}.Release|x86.Build.0 = Release|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|Any CPU.Build.0 = Debug|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|x64.ActiveCfg = Debug|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|x64.Build.0 = Debug|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|x86.ActiveCfg = Debug|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Debug|x86.Build.0 = Debug|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|Any CPU.ActiveCfg = Release|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|Any CPU.Build.0 = Release|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|x64.ActiveCfg = Release|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|x64.Build.0 = Release|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|x86.ActiveCfg = Release|Any CPU + {749DE4C8-F733-43F8-B2A8-6649E71C7570}.Release|x86.Build.0 = Release|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|x64.ActiveCfg = Debug|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|x64.Build.0 = Debug|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|x86.ActiveCfg = Debug|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Debug|x86.Build.0 = Debug|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|Any CPU.Build.0 = Release|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|x64.ActiveCfg = Release|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|x64.Build.0 = Release|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|x86.ActiveCfg = Release|Any CPU + {56D2C79E-2737-4FF9-9D19-150065F568D5}.Release|x86.Build.0 = Release|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|x64.ActiveCfg = Debug|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|x64.Build.0 = Debug|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|x86.ActiveCfg = Debug|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Debug|x86.Build.0 = Debug|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|Any CPU.Build.0 = Release|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|x64.ActiveCfg = Release|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|x64.Build.0 = Release|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|x86.ActiveCfg = Release|Any CPU + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13}.Release|x86.Build.0 = Release|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|Any CPU.Build.0 = Debug|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|x64.ActiveCfg = Debug|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|x64.Build.0 = Debug|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|x86.ActiveCfg = Debug|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Debug|x86.Build.0 = Debug|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|Any CPU.ActiveCfg = Release|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|Any CPU.Build.0 = Release|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|x64.ActiveCfg = Release|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|x64.Build.0 = Release|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|x86.ActiveCfg = Release|Any CPU + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798}.Release|x86.Build.0 = Release|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Debug|Any CPU.Build.0 = Debug|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Debug|x64.ActiveCfg = Debug|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Debug|x64.Build.0 = Debug|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Debug|x86.ActiveCfg = Debug|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Debug|x86.Build.0 = Debug|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Release|Any CPU.ActiveCfg = Release|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Release|Any CPU.Build.0 = Release|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Release|x64.ActiveCfg = Release|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Release|x64.Build.0 = Release|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Release|x86.ActiveCfg = Release|Any CPU + {26055403-C7F5-4709-8813-0F7387102791}.Release|x86.Build.0 = Release|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|x64.ActiveCfg = Debug|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|x64.Build.0 = Debug|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|x86.ActiveCfg = Debug|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Debug|x86.Build.0 = Debug|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|Any CPU.Build.0 = Release|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|x64.ActiveCfg = Release|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|x64.Build.0 = Release|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|x86.ActiveCfg = Release|Any CPU + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF}.Release|x86.Build.0 = Release|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Debug|Any CPU.Build.0 = Debug|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Debug|x64.ActiveCfg = Debug|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Debug|x64.Build.0 = Debug|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Debug|x86.ActiveCfg = Debug|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Debug|x86.Build.0 = Debug|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Release|Any CPU.ActiveCfg = Release|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Release|Any CPU.Build.0 = Release|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Release|x64.ActiveCfg = Release|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Release|x64.Build.0 = Release|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Release|x86.ActiveCfg = Release|Any CPU + {258327E9-431E-475C-933B-50893676E452}.Release|x86.Build.0 = Release|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|x64.ActiveCfg = Debug|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|x64.Build.0 = Debug|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|x86.ActiveCfg = Debug|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Debug|x86.Build.0 = Debug|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|Any CPU.Build.0 = Release|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|x64.ActiveCfg = Release|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|x64.Build.0 = Release|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|x86.ActiveCfg = Release|Any CPU + {42AF60C8-A5E1-40E0-86F8-98256364AF6F}.Release|x86.Build.0 = Release|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|x64.ActiveCfg = Debug|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|x64.Build.0 = Debug|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|x86.ActiveCfg = Debug|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Debug|x86.Build.0 = Debug|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|Any CPU.Build.0 = Release|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|x64.ActiveCfg = Release|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|x64.Build.0 = Release|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|x86.ActiveCfg = Release|Any CPU + {88C6A9C3-B433-4C36-8767-429C8C2396F8}.Release|x86.Build.0 = Release|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|x64.ActiveCfg = Debug|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|x64.Build.0 = Debug|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|x86.ActiveCfg = Debug|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Debug|x86.Build.0 = Debug|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|Any CPU.Build.0 = Release|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|x64.ActiveCfg = Release|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|x64.Build.0 = Release|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|x86.ActiveCfg = Release|Any CPU + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE}.Release|x86.Build.0 = Release|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|x64.ActiveCfg = Debug|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|x64.Build.0 = Debug|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|x86.ActiveCfg = Debug|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Debug|x86.Build.0 = Debug|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|Any CPU.Build.0 = Release|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|x64.ActiveCfg = Release|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|x64.Build.0 = Release|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|x86.ActiveCfg = Release|Any CPU + {14C918EA-693E-41FE-ACAE-2E82DF077BEA}.Release|x86.Build.0 = Release|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Debug|Any CPU.Build.0 = Debug|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Debug|x64.ActiveCfg = Debug|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Debug|x64.Build.0 = Debug|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Debug|x86.ActiveCfg = Debug|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Debug|x86.Build.0 = Debug|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Release|Any CPU.ActiveCfg = Release|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Release|Any CPU.Build.0 = Release|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Release|x64.ActiveCfg = Release|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Release|x64.Build.0 = Release|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Release|x86.ActiveCfg = Release|Any CPU + {81111B26-74F6-4912-9084-7115FD119945}.Release|x86.Build.0 = Release|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|x64.ActiveCfg = Debug|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|x64.Build.0 = Debug|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|x86.ActiveCfg = Debug|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Debug|x86.Build.0 = Debug|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|Any CPU.Build.0 = Release|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|x64.ActiveCfg = Release|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|x64.Build.0 = Release|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|x86.ActiveCfg = Release|Any CPU + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE}.Release|x86.Build.0 = Release|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|x64.ActiveCfg = Debug|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|x64.Build.0 = Debug|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|x86.ActiveCfg = Debug|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Debug|x86.Build.0 = Debug|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|Any CPU.Build.0 = Release|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|x64.ActiveCfg = Release|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|x64.Build.0 = Release|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|x86.ActiveCfg = Release|Any CPU + {8D0F501D-01B1-4E24-958B-FAF35B267705}.Release|x86.Build.0 = Release|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|x64.ActiveCfg = Debug|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|x64.Build.0 = Debug|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|x86.ActiveCfg = Debug|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Debug|x86.Build.0 = Debug|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|Any CPU.Build.0 = Release|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|x64.ActiveCfg = Release|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|x64.Build.0 = Release|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|x86.ActiveCfg = Release|Any CPU + {5BA91095-7F10-4717-B296-49DFBFC1C9C2}.Release|x86.Build.0 = Release|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|x64.ActiveCfg = Debug|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|x64.Build.0 = Debug|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|x86.ActiveCfg = Debug|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Debug|x86.Build.0 = Debug|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|Any CPU.Build.0 = Release|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|x64.ActiveCfg = Release|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|x64.Build.0 = Release|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|x86.ActiveCfg = Release|Any CPU + {99616566-4EF1-4DC7-B655-825FE43D203D}.Release|x86.Build.0 = Release|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|x64.ActiveCfg = Debug|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|x64.Build.0 = Debug|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|x86.ActiveCfg = Debug|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Debug|x86.Build.0 = Debug|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|Any CPU.Build.0 = Release|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|x64.ActiveCfg = Release|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|x64.Build.0 = Release|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|x86.ActiveCfg = Release|Any CPU + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0}.Release|x86.Build.0 = Release|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|x64.ActiveCfg = Debug|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|x64.Build.0 = Debug|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|x86.ActiveCfg = Debug|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Debug|x86.Build.0 = Debug|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|Any CPU.Build.0 = Release|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|x64.ActiveCfg = Release|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|x64.Build.0 = Release|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|x86.ActiveCfg = Release|Any CPU + {A3B19095-2D95-4B09-B07E-2C082C72394B}.Release|x86.Build.0 = Release|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|x64.ActiveCfg = Debug|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|x64.Build.0 = Debug|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|x86.ActiveCfg = Debug|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Debug|x86.Build.0 = Debug|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|Any CPU.Build.0 = Release|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|x64.ActiveCfg = Release|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|x64.Build.0 = Release|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|x86.ActiveCfg = Release|Any CPU + {807837AF-B392-4589-ADF1-3FDB34D6C5BF}.Release|x86.Build.0 = Release|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|x64.ActiveCfg = Debug|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|x64.Build.0 = Debug|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|x86.ActiveCfg = Debug|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Debug|x86.Build.0 = Debug|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|Any CPU.Build.0 = Release|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|x64.ActiveCfg = Release|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|x64.Build.0 = Release|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|x86.ActiveCfg = Release|Any CPU + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A}.Release|x86.Build.0 = Release|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|x64.ActiveCfg = Debug|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|x64.Build.0 = Debug|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|x86.ActiveCfg = Debug|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Debug|x86.Build.0 = Debug|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|Any CPU.Build.0 = Release|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|x64.ActiveCfg = Release|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|x64.Build.0 = Release|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|x86.ActiveCfg = Release|Any CPU + {68F4D8A1-E32F-487A-B460-325F36989BE3}.Release|x86.Build.0 = Release|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|x64.ActiveCfg = Debug|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|x64.Build.0 = Debug|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|x86.ActiveCfg = Debug|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Debug|x86.Build.0 = Debug|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|Any CPU.Build.0 = Release|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|x64.ActiveCfg = Release|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|x64.Build.0 = Release|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|x86.ActiveCfg = Release|Any CPU + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2}.Release|x86.Build.0 = Release|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|x64.ActiveCfg = Debug|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|x64.Build.0 = Debug|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|x86.ActiveCfg = Debug|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Debug|x86.Build.0 = Debug|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|Any CPU.Build.0 = Release|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|x64.ActiveCfg = Release|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|x64.Build.0 = Release|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|x86.ActiveCfg = Release|Any CPU + {606C751B-7CF1-47CF-A25C-9248A55C814F}.Release|x86.Build.0 = Release|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|x64.ActiveCfg = Debug|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|x64.Build.0 = Debug|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|x86.ActiveCfg = Debug|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Debug|x86.Build.0 = Debug|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|Any CPU.Build.0 = Release|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|x64.ActiveCfg = Release|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|x64.Build.0 = Release|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|x86.ActiveCfg = Release|Any CPU + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431}.Release|x86.Build.0 = Release|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|x64.ActiveCfg = Debug|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|x64.Build.0 = Debug|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|x86.ActiveCfg = Debug|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Debug|x86.Build.0 = Debug|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|Any CPU.Build.0 = Release|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x64.ActiveCfg = Release|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x64.Build.0 = Release|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x86.ActiveCfg = Release|Any CPU + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC}.Release|x86.Build.0 = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|Any CPU.Build.0 = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x64.ActiveCfg = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x64.Build.0 = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x86.ActiveCfg = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Debug|x86.Build.0 = Debug|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|Any CPU.ActiveCfg = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|Any CPU.Build.0 = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x64.ActiveCfg = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x64.Build.0 = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x86.ActiveCfg = Release|Any CPU + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066}.Release|x86.Build.0 = Release|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|x64.ActiveCfg = Debug|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|x64.Build.0 = Debug|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|x86.ActiveCfg = Debug|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Debug|x86.Build.0 = Debug|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|Any CPU.Build.0 = Release|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|x64.ActiveCfg = Release|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|x64.Build.0 = Release|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|x86.ActiveCfg = Release|Any CPU + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853}.Release|x86.Build.0 = Release|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|x64.ActiveCfg = Debug|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|x64.Build.0 = Debug|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|x86.ActiveCfg = Debug|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Debug|x86.Build.0 = Debug|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|Any CPU.Build.0 = Release|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|x64.ActiveCfg = Release|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|x64.Build.0 = Release|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|x86.ActiveCfg = Release|Any CPU + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E}.Release|x86.Build.0 = Release|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Debug|Any CPU.Build.0 = Debug|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Debug|x64.ActiveCfg = Debug|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Debug|x64.Build.0 = Debug|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Debug|x86.ActiveCfg = Debug|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Debug|x86.Build.0 = Debug|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Release|Any CPU.ActiveCfg = Release|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Release|Any CPU.Build.0 = Release|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Release|x64.ActiveCfg = Release|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Release|x64.Build.0 = Release|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Release|x86.ActiveCfg = Release|Any CPU + {06DC817F-A936-4F83-8929-E00622B32245}.Release|x86.Build.0 = Release|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|x64.ActiveCfg = Debug|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|x64.Build.0 = Debug|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|x86.ActiveCfg = Debug|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Debug|x86.Build.0 = Debug|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|Any CPU.Build.0 = Release|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|x64.ActiveCfg = Release|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|x64.Build.0 = Release|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|x86.ActiveCfg = Release|Any CPU + {2C999476-0291-4161-B3E9-1AA99A3B1139}.Release|x86.Build.0 = Release|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|x64.ActiveCfg = Debug|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|x64.Build.0 = Debug|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|x86.ActiveCfg = Debug|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Debug|x86.Build.0 = Debug|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|Any CPU.Build.0 = Release|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|x64.ActiveCfg = Release|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|x64.Build.0 = Release|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|x86.ActiveCfg = Release|Any CPU + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2}.Release|x86.Build.0 = Release|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|x64.ActiveCfg = Debug|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|x64.Build.0 = Debug|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|x86.ActiveCfg = Debug|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Debug|x86.Build.0 = Debug|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|Any CPU.Build.0 = Release|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|x64.ActiveCfg = Release|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|x64.Build.0 = Release|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|x86.ActiveCfg = Release|Any CPU + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98}.Release|x86.Build.0 = Release|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|x64.ActiveCfg = Debug|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|x64.Build.0 = Debug|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|x86.ActiveCfg = Debug|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Debug|x86.Build.0 = Debug|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|Any CPU.Build.0 = Release|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|x64.ActiveCfg = Release|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|x64.Build.0 = Release|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|x86.ActiveCfg = Release|Any CPU + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D}.Release|x86.Build.0 = Release|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|x64.ActiveCfg = Debug|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|x64.Build.0 = Debug|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|x86.ActiveCfg = Debug|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Debug|x86.Build.0 = Debug|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|Any CPU.Build.0 = Release|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|x64.ActiveCfg = Release|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|x64.Build.0 = Release|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|x86.ActiveCfg = Release|Any CPU + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE}.Release|x86.Build.0 = Release|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|x64.ActiveCfg = Debug|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|x64.Build.0 = Debug|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|x86.ActiveCfg = Debug|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Debug|x86.Build.0 = Debug|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|Any CPU.Build.0 = Release|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|x64.ActiveCfg = Release|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|x64.Build.0 = Release|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|x86.ActiveCfg = Release|Any CPU + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92}.Release|x86.Build.0 = Release|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|Any CPU.Build.0 = Debug|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|x64.ActiveCfg = Debug|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|x64.Build.0 = Debug|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|x86.ActiveCfg = Debug|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Debug|x86.Build.0 = Debug|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|Any CPU.ActiveCfg = Release|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|Any CPU.Build.0 = Release|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|x64.ActiveCfg = Release|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|x64.Build.0 = Release|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|x86.ActiveCfg = Release|Any CPU + {50140A32-6D3C-47DB-983A-7166CBA51845}.Release|x86.Build.0 = Release|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|x64.ActiveCfg = Debug|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|x64.Build.0 = Debug|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|x86.ActiveCfg = Debug|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Debug|x86.Build.0 = Debug|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|Any CPU.Build.0 = Release|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|x64.ActiveCfg = Release|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|x64.Build.0 = Release|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|x86.ActiveCfg = Release|Any CPU + {031979F2-6ABA-444F-A6A4-80115DC487CE}.Release|x86.Build.0 = Release|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|x64.ActiveCfg = Debug|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|x64.Build.0 = Debug|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|x86.ActiveCfg = Debug|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Debug|x86.Build.0 = Debug|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|Any CPU.Build.0 = Release|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|x64.ActiveCfg = Release|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|x64.Build.0 = Release|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|x86.ActiveCfg = Release|Any CPU + {D71B0DA5-80A3-419E-898D-40E77A9A7F19}.Release|x86.Build.0 = Release|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|x64.ActiveCfg = Debug|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|x64.Build.0 = Debug|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|x86.ActiveCfg = Debug|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Debug|x86.Build.0 = Debug|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|Any CPU.Build.0 = Release|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|x64.ActiveCfg = Release|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|x64.Build.0 = Release|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|x86.ActiveCfg = Release|Any CPU + {B2C877D9-B521-4901-8817-76B5DAA62FCE}.Release|x86.Build.0 = Release|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|Any CPU.Build.0 = Debug|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|x64.ActiveCfg = Debug|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|x64.Build.0 = Debug|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|x86.ActiveCfg = Debug|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Debug|x86.Build.0 = Debug|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|Any CPU.ActiveCfg = Release|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|Any CPU.Build.0 = Release|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|x64.ActiveCfg = Release|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|x64.Build.0 = Release|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|x86.ActiveCfg = Release|Any CPU + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278}.Release|x86.Build.0 = Release|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|x64.ActiveCfg = Debug|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|x64.Build.0 = Debug|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|x86.ActiveCfg = Debug|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Debug|x86.Build.0 = Debug|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|Any CPU.Build.0 = Release|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|x64.ActiveCfg = Release|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|x64.Build.0 = Release|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|x86.ActiveCfg = Release|Any CPU + {7116DD6B-2491-49E1-AB27-5210E949F753}.Release|x86.Build.0 = Release|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|x64.ActiveCfg = Debug|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|x64.Build.0 = Debug|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|x86.ActiveCfg = Debug|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Debug|x86.Build.0 = Debug|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|Any CPU.Build.0 = Release|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|x64.ActiveCfg = Release|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|x64.Build.0 = Release|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|x86.ActiveCfg = Release|Any CPU + {7DBE31A6-D2FD-499E-B675-4092723175AD}.Release|x86.Build.0 = Release|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|x64.ActiveCfg = Debug|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|x64.Build.0 = Debug|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|x86.ActiveCfg = Debug|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Debug|x86.Build.0 = Debug|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|Any CPU.Build.0 = Release|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|x64.ActiveCfg = Release|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|x64.Build.0 = Release|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|x86.ActiveCfg = Release|Any CPU + {D99E6EAE-D278-4480-AA67-85F025383E47}.Release|x86.Build.0 = Release|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|x64.ActiveCfg = Debug|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|x64.Build.0 = Debug|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|x86.ActiveCfg = Debug|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Debug|x86.Build.0 = Debug|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|Any CPU.Build.0 = Release|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|x64.ActiveCfg = Release|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|x64.Build.0 = Release|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|x86.ActiveCfg = Release|Any CPU + {D3825714-3DDA-44B7-A99C-5F3E65716691}.Release|x86.Build.0 = Release|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|x64.ActiveCfg = Debug|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|x64.Build.0 = Debug|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|x86.ActiveCfg = Debug|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Debug|x86.Build.0 = Debug|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|Any CPU.Build.0 = Release|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|x64.ActiveCfg = Release|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|x64.Build.0 = Release|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|x86.ActiveCfg = Release|Any CPU + {FAB78D21-7372-48FE-B2C3-DE1807F1157D}.Release|x86.Build.0 = Release|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|x64.ActiveCfg = Debug|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|x64.Build.0 = Debug|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|x86.ActiveCfg = Debug|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Debug|x86.Build.0 = Debug|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|Any CPU.Build.0 = Release|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|x64.ActiveCfg = Release|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|x64.Build.0 = Release|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|x86.ActiveCfg = Release|Any CPU + {EADFA337-B0FA-4712-A24A-7C08235BDF98}.Release|x86.Build.0 = Release|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|x64.ActiveCfg = Debug|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|x64.Build.0 = Debug|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|x86.ActiveCfg = Debug|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Debug|x86.Build.0 = Debug|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|Any CPU.Build.0 = Release|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|x64.ActiveCfg = Release|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|x64.Build.0 = Release|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|x86.ActiveCfg = Release|Any CPU + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5}.Release|x86.Build.0 = Release|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|x64.ActiveCfg = Debug|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|x64.Build.0 = Debug|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|x86.ActiveCfg = Debug|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Debug|x86.Build.0 = Debug|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|Any CPU.Build.0 = Release|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|x64.ActiveCfg = Release|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|x64.Build.0 = Release|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|x86.ActiveCfg = Release|Any CPU + {B84FE2DD-A1AD-437C-95CF-89C1DCCFDF6F}.Release|x86.Build.0 = Release|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|x64.ActiveCfg = Debug|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|x64.Build.0 = Debug|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|x86.ActiveCfg = Debug|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Debug|x86.Build.0 = Debug|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|Any CPU.Build.0 = Release|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|x64.ActiveCfg = Release|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|x64.Build.0 = Release|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|x86.ActiveCfg = Release|Any CPU + {3288F0F8-FF86-4DB3-A1FD-8EB51893E8C2}.Release|x86.Build.0 = Release|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|Any CPU.Build.0 = Debug|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|x64.ActiveCfg = Debug|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|x64.Build.0 = Debug|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|x86.ActiveCfg = Debug|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Debug|x86.Build.0 = Debug|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|Any CPU.ActiveCfg = Release|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|Any CPU.Build.0 = Release|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|x64.ActiveCfg = Release|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|x64.Build.0 = Release|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|x86.ActiveCfg = Release|Any CPU + {680CA103-DCE8-4D02-8979-72DEA5BE8C00}.Release|x86.Build.0 = Release|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|x64.ActiveCfg = Debug|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|x64.Build.0 = Debug|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|x86.ActiveCfg = Debug|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Debug|x86.Build.0 = Debug|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|Any CPU.Build.0 = Release|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|x64.ActiveCfg = Release|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|x64.Build.0 = Release|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|x86.ActiveCfg = Release|Any CPU + {7F4B19D4-569A-4CCF-B481-EBE04860451A}.Release|x86.Build.0 = Release|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|x64.ActiveCfg = Debug|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|x64.Build.0 = Debug|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|x86.ActiveCfg = Debug|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Debug|x86.Build.0 = Debug|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|Any CPU.Build.0 = Release|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|x64.ActiveCfg = Release|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|x64.Build.0 = Release|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|x86.ActiveCfg = Release|Any CPU + {DE9863B5-E6D6-4C5F-B52A-ED9E964008A3}.Release|x86.Build.0 = Release|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|x64.ActiveCfg = Debug|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|x64.Build.0 = Debug|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|x86.ActiveCfg = Debug|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Debug|x86.Build.0 = Debug|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|Any CPU.Build.0 = Release|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|x64.ActiveCfg = Release|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|x64.Build.0 = Release|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|x86.ActiveCfg = Release|Any CPU + {E380F242-031E-483E-8570-0EF7EA525C4F}.Release|x86.Build.0 = Release|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|x64.ActiveCfg = Debug|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|x64.Build.0 = Debug|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|x86.ActiveCfg = Debug|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Debug|x86.Build.0 = Debug|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Release|Any CPU.Build.0 = Release|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Release|x64.ActiveCfg = Release|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Release|x64.Build.0 = Release|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Release|x86.ActiveCfg = Release|Any CPU + {42582C16-F5A9-417F-9D33-BC489925324F}.Release|x86.Build.0 = Release|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|Any CPU.Build.0 = Debug|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|x64.ActiveCfg = Debug|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|x64.Build.0 = Debug|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|x86.ActiveCfg = Debug|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Debug|x86.Build.0 = Debug|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|Any CPU.ActiveCfg = Release|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|Any CPU.Build.0 = Release|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|x64.ActiveCfg = Release|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|x64.Build.0 = Release|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|x86.ActiveCfg = Release|Any CPU + {06F40DA8-FEFA-4C2B-907B-155BD92BB859}.Release|x86.Build.0 = Release|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|x64.ActiveCfg = Debug|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|x64.Build.0 = Debug|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|x86.ActiveCfg = Debug|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Debug|x86.Build.0 = Debug|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|Any CPU.Build.0 = Release|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|x64.ActiveCfg = Release|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|x64.Build.0 = Release|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|x86.ActiveCfg = Release|Any CPU + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7}.Release|x86.Build.0 = Release|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|x64.ActiveCfg = Debug|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|x64.Build.0 = Debug|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|x86.ActiveCfg = Debug|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Debug|x86.Build.0 = Debug|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|Any CPU.Build.0 = Release|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|x64.ActiveCfg = Release|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|x64.Build.0 = Release|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|x86.ActiveCfg = Release|Any CPU + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679}.Release|x86.Build.0 = Release|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|x64.ActiveCfg = Debug|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|x64.Build.0 = Debug|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|x86.ActiveCfg = Debug|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Debug|x86.Build.0 = Debug|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|Any CPU.Build.0 = Release|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|x64.ActiveCfg = Release|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|x64.Build.0 = Release|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|x86.ActiveCfg = Release|Any CPU + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67}.Release|x86.Build.0 = Release|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|x64.ActiveCfg = Debug|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|x64.Build.0 = Debug|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|x86.ActiveCfg = Debug|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Debug|x86.Build.0 = Debug|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|Any CPU.Build.0 = Release|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|x64.ActiveCfg = Release|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|x64.Build.0 = Release|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|x86.ActiveCfg = Release|Any CPU + {781EC793-1DB0-4E31-95BC-12A2B373045F}.Release|x86.Build.0 = Release|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|x64.ActiveCfg = Debug|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|x64.Build.0 = Debug|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|x86.ActiveCfg = Debug|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Debug|x86.Build.0 = Debug|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|Any CPU.Build.0 = Release|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|x64.ActiveCfg = Release|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|x64.Build.0 = Release|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|x86.ActiveCfg = Release|Any CPU + {BB863E0C-50FF-41AE-9C13-4E8A1BABC62C}.Release|x86.Build.0 = Release|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|x64.ActiveCfg = Debug|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|x64.Build.0 = Debug|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|x86.ActiveCfg = Debug|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Debug|x86.Build.0 = Debug|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|Any CPU.Build.0 = Release|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|x64.ActiveCfg = Release|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|x64.Build.0 = Release|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|x86.ActiveCfg = Release|Any CPU + {14E9D043-F0EF-4F68-AE83-D6F579119D9A}.Release|x86.Build.0 = Release|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|x64.ActiveCfg = Debug|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|x64.Build.0 = Debug|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|x86.ActiveCfg = Debug|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Debug|x86.Build.0 = Debug|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|Any CPU.Build.0 = Release|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|x64.ActiveCfg = Release|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|x64.Build.0 = Release|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|x86.ActiveCfg = Release|Any CPU + {27E94B6E-DEF8-4B89-97CB-424703790ECE}.Release|x86.Build.0 = Release|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|x64.ActiveCfg = Debug|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|x64.Build.0 = Debug|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|x86.ActiveCfg = Debug|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Debug|x86.Build.0 = Debug|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Release|Any CPU.Build.0 = Release|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Release|x64.ActiveCfg = Release|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Release|x64.Build.0 = Release|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Release|x86.ActiveCfg = Release|Any CPU + {361E3E23-B215-423D-9906-A84171E20AD3}.Release|x86.Build.0 = Release|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|x64.ActiveCfg = Debug|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|x64.Build.0 = Debug|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|x86.ActiveCfg = Debug|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Debug|x86.Build.0 = Debug|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|Any CPU.Build.0 = Release|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|x64.ActiveCfg = Release|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|x64.Build.0 = Release|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|x86.ActiveCfg = Release|Any CPU + {7A7A3480-C6C3-4A9F-AF46-1889424B9AC2}.Release|x86.Build.0 = Release|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|x64.ActiveCfg = Debug|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|x64.Build.0 = Debug|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|x86.ActiveCfg = Debug|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Debug|x86.Build.0 = Debug|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|Any CPU.Build.0 = Release|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|x64.ActiveCfg = Release|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|x64.Build.0 = Release|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|x86.ActiveCfg = Release|Any CPU + {C3EAFCB8-0394-4B74-B9A6-3DBA4509201F}.Release|x86.Build.0 = Release|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|x64.ActiveCfg = Debug|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|x64.Build.0 = Debug|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|x86.ActiveCfg = Debug|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Debug|x86.Build.0 = Debug|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|Any CPU.Build.0 = Release|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|x64.ActiveCfg = Release|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|x64.Build.0 = Release|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|x86.ActiveCfg = Release|Any CPU + {E86CF4A6-2463-4589-A9D8-9DF557C48367}.Release|x86.Build.0 = Release|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|x64.ActiveCfg = Debug|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|x64.Build.0 = Debug|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|x86.ActiveCfg = Debug|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Debug|x86.Build.0 = Debug|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|Any CPU.Build.0 = Release|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|x64.ActiveCfg = Release|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|x64.Build.0 = Release|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|x86.ActiveCfg = Release|Any CPU + {B308B94C-E01F-4449-A5A6-CD7A48E52D15}.Release|x86.Build.0 = Release|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|x64.ActiveCfg = Debug|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|x64.Build.0 = Debug|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|x86.ActiveCfg = Debug|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Debug|x86.Build.0 = Debug|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|Any CPU.Build.0 = Release|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|x64.ActiveCfg = Release|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|x64.Build.0 = Release|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|x86.ActiveCfg = Release|Any CPU + {9FBA3EC4-D794-48BD-82FA-0289E5A2A5FF}.Release|x86.Build.0 = Release|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|x64.ActiveCfg = Debug|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|x64.Build.0 = Debug|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|x86.ActiveCfg = Debug|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Debug|x86.Build.0 = Debug|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|Any CPU.Build.0 = Release|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|x64.ActiveCfg = Release|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|x64.Build.0 = Release|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|x86.ActiveCfg = Release|Any CPU + {E076DC9C-B436-44BF-B02E-FA565086F805}.Release|x86.Build.0 = Release|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|Any CPU.Build.0 = Debug|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|x64.ActiveCfg = Debug|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|x64.Build.0 = Debug|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|x86.ActiveCfg = Debug|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Debug|x86.Build.0 = Debug|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|Any CPU.ActiveCfg = Release|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|Any CPU.Build.0 = Release|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|x64.ActiveCfg = Release|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|x64.Build.0 = Release|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|x86.ActiveCfg = Release|Any CPU + {55500025-FE82-4F97-A261-9BAEA4B10845}.Release|x86.Build.0 = Release|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|x64.ActiveCfg = Debug|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|x64.Build.0 = Debug|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|x86.ActiveCfg = Debug|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Debug|x86.Build.0 = Debug|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|Any CPU.Build.0 = Release|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|x64.ActiveCfg = Release|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|x64.Build.0 = Release|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|x86.ActiveCfg = Release|Any CPU + {CD12875F-9367-41BD-810C-7FBE76314F17}.Release|x86.Build.0 = Release|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|Any CPU.Build.0 = Debug|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|x64.ActiveCfg = Debug|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|x64.Build.0 = Debug|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|x86.ActiveCfg = Debug|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Debug|x86.Build.0 = Debug|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|Any CPU.ActiveCfg = Release|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|Any CPU.Build.0 = Release|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|x64.ActiveCfg = Release|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|x64.Build.0 = Release|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|x86.ActiveCfg = Release|Any CPU + {063D3280-9918-465A-AF2D-3650A2A50D03}.Release|x86.Build.0 = Release|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|x64.ActiveCfg = Debug|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|x64.Build.0 = Debug|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|x86.ActiveCfg = Debug|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Debug|x86.Build.0 = Debug|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|Any CPU.Build.0 = Release|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|x64.ActiveCfg = Release|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|x64.Build.0 = Release|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|x86.ActiveCfg = Release|Any CPU + {A3EEE400-3655-4B34-915A-598E60CD55FB}.Release|x86.Build.0 = Release|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|Any CPU.Build.0 = Debug|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|x64.ActiveCfg = Debug|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|x64.Build.0 = Debug|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|x86.ActiveCfg = Debug|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Debug|x86.Build.0 = Debug|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|Any CPU.ActiveCfg = Release|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|Any CPU.Build.0 = Release|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|x64.ActiveCfg = Release|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|x64.Build.0 = Release|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|x86.ActiveCfg = Release|Any CPU + {577025AD-2FDD-42DF-BFA2-3FC095B50539}.Release|x86.Build.0 = Release|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|x64.ActiveCfg = Debug|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|x64.Build.0 = Debug|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|x86.ActiveCfg = Debug|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Debug|x86.Build.0 = Debug|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|Any CPU.Build.0 = Release|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|x64.ActiveCfg = Release|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|x64.Build.0 = Release|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|x86.ActiveCfg = Release|Any CPU + {DD3B2076-E5E0-4533-8D27-7724225D7758}.Release|x86.Build.0 = Release|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|x64.ActiveCfg = Debug|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|x64.Build.0 = Debug|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|x86.ActiveCfg = Debug|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Debug|x86.Build.0 = Debug|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|Any CPU.Build.0 = Release|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|x64.ActiveCfg = Release|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|x64.Build.0 = Release|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|x86.ActiveCfg = Release|Any CPU + {CADA1364-8EB1-479E-AB6F-4105C26335C8}.Release|x86.Build.0 = Release|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|x64.ActiveCfg = Debug|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|x64.Build.0 = Debug|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|x86.ActiveCfg = Debug|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Debug|x86.Build.0 = Debug|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|Any CPU.Build.0 = Release|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|x64.ActiveCfg = Release|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|x64.Build.0 = Release|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|x86.ActiveCfg = Release|Any CPU + {8CC4441E-9D1A-4E00-831B-34828A3F9446}.Release|x86.Build.0 = Release|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|Any CPU.Build.0 = Debug|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|x64.ActiveCfg = Debug|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|x64.Build.0 = Debug|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|x86.ActiveCfg = Debug|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Debug|x86.Build.0 = Debug|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|Any CPU.ActiveCfg = Release|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|Any CPU.Build.0 = Release|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|x64.ActiveCfg = Release|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|x64.Build.0 = Release|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|x86.ActiveCfg = Release|Any CPU + {01B8AC3F-1B97-4F79-93C6-BE1CBA26FE17}.Release|x86.Build.0 = Release|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|x64.ActiveCfg = Debug|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|x64.Build.0 = Debug|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|x86.ActiveCfg = Debug|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Debug|x86.Build.0 = Debug|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|Any CPU.Build.0 = Release|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|x64.ActiveCfg = Release|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|x64.Build.0 = Release|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|x86.ActiveCfg = Release|Any CPU + {37BB9502-CCD1-425A-BF45-D56968B0C2F9}.Release|x86.Build.0 = Release|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|x64.ActiveCfg = Debug|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|x64.Build.0 = Debug|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|x86.ActiveCfg = Debug|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Debug|x86.Build.0 = Debug|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|Any CPU.Build.0 = Release|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|x64.ActiveCfg = Release|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|x64.Build.0 = Release|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|x86.ActiveCfg = Release|Any CPU + {015A7A95-2C07-4C7F-8048-DB591AAC5FE5}.Release|x86.Build.0 = Release|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|x64.ActiveCfg = Debug|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|x64.Build.0 = Debug|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|x86.ActiveCfg = Debug|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Debug|x86.Build.0 = Debug|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|Any CPU.Build.0 = Release|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|x64.ActiveCfg = Release|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|x64.Build.0 = Release|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|x86.ActiveCfg = Release|Any CPU + {EF59DAD6-30CE-47CB-862A-DD79F31BFDE4}.Release|x86.Build.0 = Release|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|Any CPU.Build.0 = Debug|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|x64.ActiveCfg = Debug|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|x64.Build.0 = Debug|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|x86.ActiveCfg = Debug|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Debug|x86.Build.0 = Debug|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|Any CPU.ActiveCfg = Release|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|Any CPU.Build.0 = Release|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|x64.ActiveCfg = Release|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|x64.Build.0 = Release|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|x86.ActiveCfg = Release|Any CPU + {27D951AD-696D-4330-B4F5-F8F81344C191}.Release|x86.Build.0 = Release|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|x64.ActiveCfg = Debug|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|x64.Build.0 = Debug|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|x86.ActiveCfg = Debug|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Debug|x86.Build.0 = Debug|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|Any CPU.Build.0 = Release|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|x64.ActiveCfg = Release|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|x64.Build.0 = Release|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|x86.ActiveCfg = Release|Any CPU + {31277AFF-9BFF-4C17-8593-B562A385058E}.Release|x86.Build.0 = Release|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Debug|x64.ActiveCfg = Debug|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Debug|x64.Build.0 = Debug|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Debug|x86.ActiveCfg = Debug|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Debug|x86.Build.0 = Debug|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Release|Any CPU.Build.0 = Release|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Release|x64.ActiveCfg = Release|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Release|x64.Build.0 = Release|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Release|x86.ActiveCfg = Release|Any CPU + {3A8F090F-678D-46E2-8899-67402129749C}.Release|x86.Build.0 = Release|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|x64.ActiveCfg = Debug|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|x64.Build.0 = Debug|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|x86.ActiveCfg = Debug|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Debug|x86.Build.0 = Debug|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|Any CPU.Build.0 = Release|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|x64.ActiveCfg = Release|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|x64.Build.0 = Release|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|x86.ActiveCfg = Release|Any CPU + {19FACEC7-D6D4-40F5-84AD-14E2983F18F7}.Release|x86.Build.0 = Release|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|Any CPU.Build.0 = Debug|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|x64.ActiveCfg = Debug|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|x64.Build.0 = Debug|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|x86.ActiveCfg = Debug|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Debug|x86.Build.0 = Debug|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|Any CPU.ActiveCfg = Release|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|Any CPU.Build.0 = Release|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x64.ActiveCfg = Release|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x64.Build.0 = Release|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x86.ActiveCfg = Release|Any CPU + {8342286A-BE36-4ACA-87FF-EBEB4E268498}.Release|x86.Build.0 = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|Any CPU.Build.0 = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x64.ActiveCfg = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x64.Build.0 = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x86.ActiveCfg = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Debug|x86.Build.0 = Debug|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|Any CPU.ActiveCfg = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|Any CPU.Build.0 = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x64.ActiveCfg = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x64.Build.0 = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x86.ActiveCfg = Release|Any CPU + {05D844B6-51C1-4926-919C-D99E24FB3BC9}.Release|x86.Build.0 = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x64.ActiveCfg = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x64.Build.0 = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x86.ActiveCfg = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Debug|x86.Build.0 = Debug|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|Any CPU.Build.0 = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x64.ActiveCfg = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x64.Build.0 = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x86.ActiveCfg = Release|Any CPU + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE}.Release|x86.Build.0 = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x64.ActiveCfg = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x64.Build.0 = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x86.ActiveCfg = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Debug|x86.Build.0 = Debug|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|Any CPU.Build.0 = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x64.ActiveCfg = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x64.Build.0 = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x86.ActiveCfg = Release|Any CPU + {A072C46F-BA45-419E-B1B6-416919F78440}.Release|x86.Build.0 = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x64.ActiveCfg = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x64.Build.0 = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x86.ActiveCfg = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Debug|x86.Build.0 = Debug|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|Any CPU.Build.0 = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x64.ActiveCfg = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x64.Build.0 = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x86.ActiveCfg = Release|Any CPU + {6DE0F48D-8CEA-44C1-82FF-0DC891B33FE3}.Release|x86.Build.0 = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|Any CPU.Build.0 = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x64.ActiveCfg = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x64.Build.0 = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x86.ActiveCfg = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Debug|x86.Build.0 = Debug|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|Any CPU.ActiveCfg = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|Any CPU.Build.0 = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x64.ActiveCfg = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x64.Build.0 = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x86.ActiveCfg = Release|Any CPU + {10088067-7B8F-4D2E-A8E1-ED546DC17369}.Release|x86.Build.0 = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x64.ActiveCfg = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x64.Build.0 = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x86.ActiveCfg = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Debug|x86.Build.0 = Debug|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|Any CPU.Build.0 = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x64.ActiveCfg = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x64.Build.0 = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x86.ActiveCfg = Release|Any CPU + {E014565C-2456-4BD0-9481-557F939C1E36}.Release|x86.Build.0 = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x64.ActiveCfg = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x64.Build.0 = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x86.ActiveCfg = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Debug|x86.Build.0 = Debug|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|Any CPU.Build.0 = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x64.ActiveCfg = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x64.Build.0 = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x86.ActiveCfg = Release|Any CPU + {44825FDA-68D2-4675-8B1D-6D5303DC38CF}.Release|x86.Build.0 = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x64.ActiveCfg = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x64.Build.0 = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x86.ActiveCfg = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Debug|x86.Build.0 = Debug|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|Any CPU.Build.0 = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x64.ActiveCfg = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x64.Build.0 = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x86.ActiveCfg = Release|Any CPU + {6D46DB08-C8D1-4F67-A6D0-D50FE84F19E0}.Release|x86.Build.0 = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x64.ActiveCfg = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x64.Build.0 = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x86.ActiveCfg = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Debug|x86.Build.0 = Debug|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|Any CPU.Build.0 = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x64.ActiveCfg = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x64.Build.0 = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x86.ActiveCfg = Release|Any CPU + {5E5EB0A7-7A19-4144-81FE-13C31DB678B2}.Release|x86.Build.0 = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x64.ActiveCfg = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x64.Build.0 = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x86.ActiveCfg = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Debug|x86.Build.0 = Debug|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|Any CPU.Build.0 = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x64.ActiveCfg = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x64.Build.0 = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x86.ActiveCfg = Release|Any CPU + {7F3D4F33-341A-44A1-96EA-A1729BC2E5D8}.Release|x86.Build.0 = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x64.ActiveCfg = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x64.Build.0 = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x86.ActiveCfg = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Debug|x86.Build.0 = Debug|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|Any CPU.Build.0 = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x64.ActiveCfg = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x64.Build.0 = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x86.ActiveCfg = Release|Any CPU + {B86C287A-734E-4527-A03E-6B970F22E27E}.Release|x86.Build.0 = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x64.ActiveCfg = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x64.Build.0 = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x86.ActiveCfg = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Debug|x86.Build.0 = Debug|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|Any CPU.Build.0 = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x64.ActiveCfg = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x64.Build.0 = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x86.ActiveCfg = Release|Any CPU + {E23FBF14-EE5B-49D4-8938-E8368CF4A4B5}.Release|x86.Build.0 = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|Any CPU.Build.0 = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x64.ActiveCfg = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x64.Build.0 = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x86.ActiveCfg = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Debug|x86.Build.0 = Debug|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|Any CPU.ActiveCfg = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|Any CPU.Build.0 = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|x64.ActiveCfg = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|x64.Build.0 = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|x86.ActiveCfg = Release|Any CPU + {50D014B5-99A6-46FC-B745-26687595B293}.Release|x86.Build.0 = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x64.ActiveCfg = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x64.Build.0 = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x86.ActiveCfg = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Debug|x86.Build.0 = Debug|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|Any CPU.Build.0 = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x64.ActiveCfg = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x64.Build.0 = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x86.ActiveCfg = Release|Any CPU + {D99C1F78-67EA-40E7-BD4C-985592F5265A}.Release|x86.Build.0 = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x64.ActiveCfg = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x64.Build.0 = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x86.ActiveCfg = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Debug|x86.Build.0 = Debug|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|Any CPU.Build.0 = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x64.ActiveCfg = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x64.Build.0 = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x86.ActiveCfg = Release|Any CPU + {1CBC0B9C-A96B-4143-B70F-37C69229FFF2}.Release|x86.Build.0 = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x64.ActiveCfg = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x64.Build.0 = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x86.ActiveCfg = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Debug|x86.Build.0 = Debug|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|Any CPU.Build.0 = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x64.ActiveCfg = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x64.Build.0 = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x86.ActiveCfg = Release|Any CPU + {760E2855-31B3-4CCB-BACB-34B7196A59B8}.Release|x86.Build.0 = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x64.ActiveCfg = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x64.Build.0 = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x86.ActiveCfg = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Debug|x86.Build.0 = Debug|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|Any CPU.Build.0 = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x64.ActiveCfg = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x64.Build.0 = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x86.ActiveCfg = Release|Any CPU + {3F688F21-7E31-4781-8995-9DD34276773F}.Release|x86.Build.0 = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x64.ActiveCfg = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x64.Build.0 = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x86.ActiveCfg = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Debug|x86.Build.0 = Debug|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|Any CPU.Build.0 = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x64.ActiveCfg = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x64.Build.0 = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x86.ActiveCfg = Release|Any CPU + {80AD7C4D-E4C6-4700-87AD-77B5698B338F}.Release|x86.Build.0 = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|Any CPU.Build.0 = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x64.ActiveCfg = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x64.Build.0 = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x86.ActiveCfg = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Debug|x86.Build.0 = Debug|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|Any CPU.ActiveCfg = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|Any CPU.Build.0 = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x64.ActiveCfg = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x64.Build.0 = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x86.ActiveCfg = Release|Any CPU + {60ABAB54-2EE9-4A16-A109-67F7B6F29184}.Release|x86.Build.0 = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x64.ActiveCfg = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x64.Build.0 = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x86.ActiveCfg = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Debug|x86.Build.0 = Debug|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|Any CPU.Build.0 = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x64.ActiveCfg = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x64.Build.0 = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x86.ActiveCfg = Release|Any CPU + {D32C1D26-C9A1-4F2A-9DBA-DBF0353E3972}.Release|x86.Build.0 = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x64.ActiveCfg = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x64.Build.0 = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x86.ActiveCfg = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Debug|x86.Build.0 = Debug|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|Any CPU.Build.0 = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x64.ActiveCfg = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x64.Build.0 = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x86.ActiveCfg = Release|Any CPU + {5CA4E28E-6305-4B21-AD2E-0DF24D47A65B}.Release|x86.Build.0 = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x64.ActiveCfg = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x64.Build.0 = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x86.ActiveCfg = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Debug|x86.Build.0 = Debug|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|Any CPU.Build.0 = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x64.ActiveCfg = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x64.Build.0 = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x86.ActiveCfg = Release|Any CPU + {05475C0A-C225-4F07-A3C7-9E17E660042E}.Release|x86.Build.0 = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x64.ActiveCfg = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x64.Build.0 = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x86.ActiveCfg = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Debug|x86.Build.0 = Debug|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|Any CPU.Build.0 = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x64.ActiveCfg = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x64.Build.0 = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x86.ActiveCfg = Release|Any CPU + {BA47D456-4657-4C86-A665-21293E3AC47F}.Release|x86.Build.0 = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x64.ActiveCfg = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x64.Build.0 = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x86.ActiveCfg = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Debug|x86.Build.0 = Debug|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|Any CPU.Build.0 = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x64.ActiveCfg = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x64.Build.0 = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x86.ActiveCfg = Release|Any CPU + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D}.Release|x86.Build.0 = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x64.ActiveCfg = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x64.Build.0 = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x86.ActiveCfg = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Debug|x86.Build.0 = Debug|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|Any CPU.Build.0 = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x64.ActiveCfg = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x64.Build.0 = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x86.ActiveCfg = Release|Any CPU + {C22333B3-D132-4960-A490-6BEF1EB1C917}.Release|x86.Build.0 = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x64.ActiveCfg = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x64.Build.0 = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x86.ActiveCfg = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Debug|x86.Build.0 = Debug|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|Any CPU.Build.0 = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x64.ActiveCfg = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x64.Build.0 = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x86.ActiveCfg = Release|Any CPU + {B8B15A8D-F647-41AE-A55F-A283A47E97C4}.Release|x86.Build.0 = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|Any CPU.Build.0 = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x64.ActiveCfg = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x64.Build.0 = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x86.ActiveCfg = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Debug|x86.Build.0 = Debug|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|Any CPU.ActiveCfg = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|Any CPU.Build.0 = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x64.ActiveCfg = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x64.Build.0 = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x86.ActiveCfg = Release|Any CPU + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7}.Release|x86.Build.0 = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|Any CPU.Build.0 = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x64.ActiveCfg = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x64.Build.0 = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x86.ActiveCfg = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Debug|x86.Build.0 = Debug|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|Any CPU.ActiveCfg = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|Any CPU.Build.0 = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x64.ActiveCfg = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x64.Build.0 = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x86.ActiveCfg = Release|Any CPU + {821C7F88-B775-4D3C-8D89-850B6C34E818}.Release|x86.Build.0 = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x64.ActiveCfg = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x64.Build.0 = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x86.ActiveCfg = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Debug|x86.Build.0 = Debug|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|Any CPU.Build.0 = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x64.ActiveCfg = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x64.Build.0 = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x86.ActiveCfg = Release|Any CPU + {3ABEAD26-B056-45CC-8F72-F40C8B8DBCBC}.Release|x86.Build.0 = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x64.ActiveCfg = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x64.Build.0 = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x86.ActiveCfg = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Debug|x86.Build.0 = Debug|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|Any CPU.Build.0 = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x64.ActiveCfg = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x64.Build.0 = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x86.ActiveCfg = Release|Any CPU + {3C500ECB-5422-4FFB-BD3D-48A850763D31}.Release|x86.Build.0 = Release|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|x64.ActiveCfg = Debug|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|x64.Build.0 = Debug|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|x86.ActiveCfg = Debug|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Debug|x86.Build.0 = Debug|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|Any CPU.Build.0 = Release|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|x64.ActiveCfg = Release|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|x64.Build.0 = Release|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|x86.ActiveCfg = Release|Any CPU + {D851E54A-5A44-4F74-9FDF-A2C32CACF651}.Release|x86.Build.0 = Release|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Debug|Any CPU.Build.0 = Debug|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Debug|x64.ActiveCfg = Debug|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Debug|x64.Build.0 = Debug|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Debug|x86.ActiveCfg = Debug|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Debug|x86.Build.0 = Debug|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Release|Any CPU.ActiveCfg = Release|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Release|Any CPU.Build.0 = Release|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Release|x64.ActiveCfg = Release|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Release|x64.Build.0 = Release|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Release|x86.ActiveCfg = Release|Any CPU + {866807B8-8E68-417C-8148-6450DEA68012}.Release|x86.Build.0 = Release|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|Any CPU.Build.0 = Debug|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|x64.ActiveCfg = Debug|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|x64.Build.0 = Debug|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|x86.ActiveCfg = Debug|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Debug|x86.Build.0 = Debug|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Release|Any CPU.ActiveCfg = Release|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Release|Any CPU.Build.0 = Release|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Release|x64.ActiveCfg = Release|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Release|x64.Build.0 = Release|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Release|x86.ActiveCfg = Release|Any CPU + {20BE41BD-9C32-45B5-882A-C01491979633}.Release|x86.Build.0 = Release|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|x64.ActiveCfg = Debug|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|x64.Build.0 = Debug|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|x86.ActiveCfg = Debug|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Debug|x86.Build.0 = Debug|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|Any CPU.Build.0 = Release|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|x64.ActiveCfg = Release|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|x64.Build.0 = Release|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|x86.ActiveCfg = Release|Any CPU + {9E19FDB4-121A-4EF4-8A73-DFCDF04B19ED}.Release|x86.Build.0 = Release|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|x64.ActiveCfg = Debug|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|x64.Build.0 = Debug|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|x86.ActiveCfg = Debug|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Debug|x86.Build.0 = Debug|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|Any CPU.Build.0 = Release|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|x64.ActiveCfg = Release|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|x64.Build.0 = Release|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|x86.ActiveCfg = Release|Any CPU + {7C3A6012-6FC8-46A9-9966-1AC373614C41}.Release|x86.Build.0 = Release|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|Any CPU.Build.0 = Debug|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|x64.ActiveCfg = Debug|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|x64.Build.0 = Debug|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|x86.ActiveCfg = Debug|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Debug|x86.Build.0 = Debug|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|Any CPU.ActiveCfg = Release|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|Any CPU.Build.0 = Release|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|x64.ActiveCfg = Release|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|x64.Build.0 = Release|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|x86.ActiveCfg = Release|Any CPU + {BC38594B-0B84-4657-9F7B-F2A0FC810F04}.Release|x86.Build.0 = Release|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|x64.ActiveCfg = Debug|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|x64.Build.0 = Debug|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|x86.ActiveCfg = Debug|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Debug|x86.Build.0 = Debug|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|Any CPU.Build.0 = Release|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|x64.ActiveCfg = Release|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|x64.Build.0 = Release|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|x86.ActiveCfg = Release|Any CPU + {20E0774F-86D5-4CD0-B636-E5212074FDE8}.Release|x86.Build.0 = Release|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|Any CPU.Build.0 = Debug|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|x64.ActiveCfg = Debug|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|x64.Build.0 = Debug|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|x86.ActiveCfg = Debug|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Debug|x86.Build.0 = Debug|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|Any CPU.ActiveCfg = Release|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|Any CPU.Build.0 = Release|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|x64.ActiveCfg = Release|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|x64.Build.0 = Release|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|x86.ActiveCfg = Release|Any CPU + {FE668D8D-AB46-41F4-A82F-8A3330C4D152}.Release|x86.Build.0 = Release|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Debug|Any CPU.Build.0 = Debug|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Debug|x64.ActiveCfg = Debug|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Debug|x64.Build.0 = Debug|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Debug|x86.ActiveCfg = Debug|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Debug|x86.Build.0 = Debug|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Release|Any CPU.ActiveCfg = Release|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Release|Any CPU.Build.0 = Release|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Release|x64.ActiveCfg = Release|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Release|x64.Build.0 = Release|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Release|x86.ActiveCfg = Release|Any CPU + {548C296A-476B-433D-9552-923648BDFA97}.Release|x86.Build.0 = Release|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|x64.ActiveCfg = Debug|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|x64.Build.0 = Debug|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|x86.ActiveCfg = Debug|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Debug|x86.Build.0 = Debug|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|Any CPU.Build.0 = Release|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|x64.ActiveCfg = Release|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|x64.Build.0 = Release|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|x86.ActiveCfg = Release|Any CPU + {3510DF3E-E822-4FB1-8C65-ED6DBAD223D4}.Release|x86.Build.0 = Release|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|x64.ActiveCfg = Debug|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|x64.Build.0 = Debug|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|x86.ActiveCfg = Debug|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Debug|x86.Build.0 = Debug|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|Any CPU.Build.0 = Release|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|x64.ActiveCfg = Release|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|x64.Build.0 = Release|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|x86.ActiveCfg = Release|Any CPU + {C733F161-FCED-4D21-BC83-5CC079E93547}.Release|x86.Build.0 = Release|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|Any CPU.Build.0 = Debug|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|x64.ActiveCfg = Debug|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|x64.Build.0 = Debug|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|x86.ActiveCfg = Debug|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Debug|x86.Build.0 = Debug|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|Any CPU.ActiveCfg = Release|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|Any CPU.Build.0 = Release|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|x64.ActiveCfg = Release|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|x64.Build.0 = Release|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|x86.ActiveCfg = Release|Any CPU + {76E1E74F-41C1-4E24-85EA-ED13F28B80B1}.Release|x86.Build.0 = Release|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|x64.ActiveCfg = Debug|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|x64.Build.0 = Debug|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|x86.ActiveCfg = Debug|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Debug|x86.Build.0 = Debug|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|Any CPU.Build.0 = Release|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|x64.ActiveCfg = Release|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|x64.Build.0 = Release|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|x86.ActiveCfg = Release|Any CPU + {EC73D558-0472-49E2-B46E-D26F9686AA9C}.Release|x86.Build.0 = Release|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|x64.ActiveCfg = Debug|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|x64.Build.0 = Debug|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|x86.ActiveCfg = Debug|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Debug|x86.Build.0 = Debug|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|Any CPU.Build.0 = Release|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|x64.ActiveCfg = Release|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|x64.Build.0 = Release|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|x86.ActiveCfg = Release|Any CPU + {1E532EAB-8DB7-42DF-A9BD-BBBA08C8148F}.Release|x86.Build.0 = Release|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|x64.ActiveCfg = Debug|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|x64.Build.0 = Debug|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|x86.ActiveCfg = Debug|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Debug|x86.Build.0 = Debug|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|Any CPU.Build.0 = Release|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|x64.ActiveCfg = Release|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|x64.Build.0 = Release|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|x86.ActiveCfg = Release|Any CPU + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5}.Release|x86.Build.0 = Release|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|x64.ActiveCfg = Debug|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|x64.Build.0 = Debug|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|x86.ActiveCfg = Debug|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Debug|x86.Build.0 = Debug|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|Any CPU.Build.0 = Release|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x64.ActiveCfg = Release|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x64.Build.0 = Release|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x86.ActiveCfg = Release|Any CPU + {6BE16682-4FB9-49C7-A2B3-ECB4EC5EF8BD}.Release|x86.Build.0 = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x64.ActiveCfg = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x64.Build.0 = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x86.ActiveCfg = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Debug|x86.Build.0 = Debug|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|Any CPU.Build.0 = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x64.ActiveCfg = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x64.Build.0 = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x86.ActiveCfg = Release|Any CPU + {4D167781-1AC0-46CF-A32E-1B6E048940B2}.Release|x86.Build.0 = Release|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|x64.ActiveCfg = Debug|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|x64.Build.0 = Debug|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|x86.ActiveCfg = Debug|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Debug|x86.Build.0 = Debug|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|Any CPU.Build.0 = Release|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|x64.ActiveCfg = Release|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|x64.Build.0 = Release|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|x86.ActiveCfg = Release|Any CPU + {C3AEAEE7-038E-45FF-892B-DB18EE29F790}.Release|x86.Build.0 = Release|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|x64.ActiveCfg = Debug|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|x64.Build.0 = Debug|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|x86.ActiveCfg = Debug|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Debug|x86.Build.0 = Debug|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|Any CPU.Build.0 = Release|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|x64.ActiveCfg = Release|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|x64.Build.0 = Release|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|x86.ActiveCfg = Release|Any CPU + {7FACF6B4-7E12-4543-AAD4-0072FA1ECE0E}.Release|x86.Build.0 = Release|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|x64.ActiveCfg = Debug|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|x64.Build.0 = Debug|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|x86.ActiveCfg = Debug|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Debug|x86.Build.0 = Debug|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|Any CPU.Build.0 = Release|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|x64.ActiveCfg = Release|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|x64.Build.0 = Release|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|x86.ActiveCfg = Release|Any CPU + {1561D597-922F-486E-ACF4-98250DDC5CDA}.Release|x86.Build.0 = Release|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|x64.ActiveCfg = Debug|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|x64.Build.0 = Debug|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|x86.ActiveCfg = Debug|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Debug|x86.Build.0 = Debug|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|Any CPU.Build.0 = Release|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|x64.ActiveCfg = Release|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|x64.Build.0 = Release|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|x86.ActiveCfg = Release|Any CPU + {D7B25EC1-CDC8-4D2D-8569-826568E1AAD2}.Release|x86.Build.0 = Release|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Debug|Any CPU.Build.0 = Debug|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Debug|x64.ActiveCfg = Debug|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Debug|x64.Build.0 = Debug|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Debug|x86.ActiveCfg = Debug|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Debug|x86.Build.0 = Debug|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Release|Any CPU.ActiveCfg = Release|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Release|Any CPU.Build.0 = Release|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Release|x64.ActiveCfg = Release|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Release|x64.Build.0 = Release|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Release|x86.ActiveCfg = Release|Any CPU + {9369FA32-E98A-4180-9251-914925188086}.Release|x86.Build.0 = Release|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|Any CPU.Build.0 = Debug|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|x64.ActiveCfg = Debug|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|x64.Build.0 = Debug|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|x86.ActiveCfg = Debug|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Debug|x86.Build.0 = Debug|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|Any CPU.ActiveCfg = Release|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|Any CPU.Build.0 = Release|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x64.ActiveCfg = Release|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x64.Build.0 = Release|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x86.ActiveCfg = Release|Any CPU + {67650687-2E32-40BB-9849-C4ABBA65A7CF}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {361838C4-72E2-1C48-5D76-CA6D1A861242} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {D9F91EA0-8AF5-452A-86D8-52BACB2E39CB} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {5DBE2E9E-9905-47CE-B8DC-B25409AF1EF2} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {8BCEAAFC-9168-4CC0-AFDB-177E5F7C15C6} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {46D35B4F-6A04-47FF-958B-5E6A73FCC059} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {44A1241B-8ECF-4AFA-9972-452C39AD43D6} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {85AB3BB7-C493-4387-B39A-EB299AC37312} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {5C5E91CA-3F98-4E9A-922B-F6415EABD1A3} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {93DB06DC-B254-48A9-8F2C-6130A5658F27} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {03CA315C-8AA1-4CEA-A28B-5EB35C586F4A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {C6DC3C29-C2AD-4015-8872-42E95A0FE63F} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {40094279-250C-42AE-992A-856718FEFBAC} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {B2967228-F8F7-4931-B257-1C63CB58CE1D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {6D52EC2B-0A1A-4693-A8EE-5AB32A4A3ED9} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {37F203A3-624E-4794-9C99-16CAC22C17DF} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {3FF93987-A30A-4D50-8815-7CF3BB7CAE05} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {AACE8717-0760-42F2-A225-8FCCE876FB65} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {4AAD6965-E879-44AD-A8ED-E1D713A3CD6D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {85D82A87-1F4A-4B1B-8422-5B7A7B7704E3} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {FE227DF2-875D-4BEA-A4E0-14EA7F3EC1D0} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {D0FB54BA-4D14-4A32-B09F-7EC94F369460} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {69C9E010-CBDD-4B89-84CF-7AB56D6A078A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {E471176A-E1F3-4DE5-8D30-0865903A217A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {FA013511-DF20-45F7-8077-EBA2D6224D64} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {B9F84697-54FE-4648-B173-EE3D904FFA4D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {6751A76C-8ED8-40F4-AE2B-069DB31395FE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {DDBFA2EF-9CAE-473F-A438-369CAC25C66A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {063DE5E1-C8FE-47D0-A12A-22A25CDF2C22} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {35350FAB-FC51-4FE8-81FB-011003134C37} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {1BFC95B4-4C8A-44B2-903A-11FBCAAB9519} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {C4A65377-22F7-4D15-92A3-4F05847D167E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {BDDE59E1-C643-4C87-8608-0F9A7A54DE09} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {0CC116C8-A7E5-4B94-9688-32920177FF97} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {E8862F6E-85C1-4FDB-AA92-0BB489B7EA1E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {84DEDF05-A5BD-4644-86B9-6B7918FE3F31} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {9DEB1F54-94B5-40C4-AC44-220E680B016D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {7C3E87F2-93D8-4968-95E3-52C46947D46C} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {C0504D97-9BCD-4AE4-B0DC-B31C17B150F2} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {31B05493-104F-437F-9FA7-CA5286CE697C} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {937AF12E-D770-4534-8FF8-C59042609C2A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {5A028B04-9D76-470B-B5B3-766CE4CE860C} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {749DE4C8-F733-43F8-B2A8-6649E71C7570} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {56D2C79E-2737-4FF9-9D19-150065F568D5} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {E41F6DC4-68B5-4EE3-97AE-801D725A2C13} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {285F1D0F-501F-4E2E-8FA0-F2CF28AE3798} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {26055403-C7F5-4709-8813-0F7387102791} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {0C00D0DA-C4C3-4B23-941F-A3DB2DBF33AF} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {258327E9-431E-475C-933B-50893676E452} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {42AF60C8-A5E1-40E0-86F8-98256364AF6F} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {88C6A9C3-B433-4C36-8767-429C8C2396F8} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {6B7099AB-01BF-4EC4-87D0-5C9C032266DE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {14C918EA-693E-41FE-ACAE-2E82DF077BEA} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {81111B26-74F6-4912-9084-7115FD119945} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {80E2D661-FF3E-4A10-A2DF-AFD4F3D433FE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {8D0F501D-01B1-4E24-958B-FAF35B267705} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {5BA91095-7F10-4717-B296-49DFBFC1C9C2} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {99616566-4EF1-4DC7-B655-825FE43D203D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {EE3C03AD-E604-4C57-9B78-CF7F49FBFCB0} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {A3B19095-2D95-4B09-B07E-2C082C72394B} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {807837AF-B392-4589-ADF1-3FDB34D6C5BF} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {64EAFDCF-8283-4D5C-AC78-7969D5FE926A} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {68F4D8A1-E32F-487A-B460-325F36989BE3} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {4A3DA4AE-7B88-4674-A7E2-F5D42B8256F2} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {606C751B-7CF1-47CF-A25C-9248A55C814F} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {0BE44D0A-CC4B-4E84-8AF3-D8D99551C431} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {CC4CCE5F-55BC-4745-A204-4FA92BC1BADC} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {99BAE717-9A2E-41F5-9ECC-5FB97E4A6066} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {5CCE0DB7-C115-4B21-A7AE-C8488C22A853} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {A09C9E66-5496-47EC-8B23-9EEB7CBDC75E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {06DC817F-A936-4F83-8929-E00622B32245} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {2C999476-0291-4161-B3E9-1AA99A3B1139} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {476EAADA-1B39-4049-ABE4-CCAC21FFE9E2} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {0EF56124-E6E8-4E89-95DD-5A5D5FF05A98} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {0DBB9FC4-2E46-4C3E-BE88-2A8DCB59DB7D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {8A40142F-E8C8-4E86-BE70-7DD4AB1FFDEE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {C9D20F74-EE5F-4C9E-9AB1-C03E90B34F92} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {50140A32-6D3C-47DB-983A-7166CBA51845} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {031979F2-6ABA-444F-A6A4-80115DC487CE} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {D71B0DA5-80A3-419E-898D-40E77A9A7F19} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {B2C877D9-B521-4901-8817-76B5DAA62FCE} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {08D3B6D0-3CE8-4F24-A6F1-BCAB01AD6278} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {7116DD6B-2491-49E1-AB27-5210E949F753} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {7DBE31A6-D2FD-499E-B675-4092723175AD} = {361838C4-72E2-1C48-5D76-CA6D1A861242} + {D99E6EAE-D278-4480-AA67-85F025383E47} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {D3825714-3DDA-44B7-A99C-5F3E65716691} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {FAB78D21-7372-48FE-B2C3-DE1807F1157D} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {EADFA337-B0FA-4712-A24A-7C08235BDF98} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {110F7EC2-3149-4D1B-A972-E69E79F1EBF5} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {A2E3F03A-0CAD-4E2A-8C71-DDEBB1B7E4F7} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {3A1AF0AD-4DAE-4D82-9CCF-2DCB83CC3679} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {F1DF0F07-1BCB-4B55-8353-07BF8A4B2A67} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {31277AFF-9BFF-4C17-8593-B562A385058E} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {3A8F090F-678D-46E2-8899-67402129749C} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {05D844B6-51C1-4926-919C-D99E24FB3BC9} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {03E15545-D6A0-4287-A88C-6EDE77C0DCBE} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {BA47D456-4657-4C86-A665-21293E3AC47F} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {49EF86AC-1CC2-4A24-8637-C5151E23DF9D} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {C22333B3-D132-4960-A490-6BEF1EB1C917} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {B8B15A8D-F647-41AE-A55F-A283A47E97C4} = {78C966F5-2242-D8EC-ADCA-A1A9C7F723A6} + {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} = {827E0CD3-B72D-47B6-A68D-7590B98EB39B} + {CBE6E3D8-230C-4513-B98F-99D82B83B9F7} = {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} + {821C7F88-B775-4D3C-8D89-850B6C34E818} = {F1F029E6-2E4B-4A42-8D8F-AB325EE3B608} + {CBDF819E-923F-A07F-78D9-D599DD28197E} = {1553F566-661E-A2F5-811B-F74BF45C44CC} + {D8B22C17-28E9-4059-97C5-4AC4600A2BD5} = {CBDF819E-923F-A07F-78D9-D599DD28197E} + EndGlobalSection +EndGlobal diff --git a/src/TaskRunner/StellaOps.TaskRunner.sln b/src/TaskRunner/StellaOps.TaskRunner.sln new file mode 100644 index 00000000..dbaabaee --- /dev/null +++ b/src/TaskRunner/StellaOps.TaskRunner.sln @@ -0,0 +1,99 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.TaskRunner", "StellaOps.TaskRunner", "{ACACD739-950B-C891-6A12-926A82053571}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Core", "StellaOps.TaskRunner\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj", "{C2A829A6-4563-4E00-A4FA-A42AD564D5D5}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Infrastructure", "StellaOps.TaskRunner\StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj", "{4952F6C0-33B4-41A7-8E9D-3235227C8C57}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Tests", "StellaOps.TaskRunner\StellaOps.TaskRunner.Tests\StellaOps.TaskRunner.Tests.csproj", "{F12428B3-E106-4021-AE80-BD058C72254B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.WebService", "StellaOps.TaskRunner\StellaOps.TaskRunner.WebService\StellaOps.TaskRunner.WebService.csproj", "{4F5327F5-FDDE-41BB-91C8-A3426DF012CC}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Worker", "StellaOps.TaskRunner\StellaOps.TaskRunner.Worker\StellaOps.TaskRunner.Worker.csproj", "{2A68B840-7D42-4F0F-839C-96BEB46417D6}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Debug|x64.ActiveCfg = Debug|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Debug|x64.Build.0 = Debug|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Debug|x86.ActiveCfg = Debug|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Debug|x86.Build.0 = Debug|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Release|Any CPU.Build.0 = Release|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Release|x64.ActiveCfg = Release|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Release|x64.Build.0 = Release|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Release|x86.ActiveCfg = Release|Any CPU + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5}.Release|x86.Build.0 = Release|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Debug|x64.ActiveCfg = Debug|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Debug|x64.Build.0 = Debug|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Debug|x86.ActiveCfg = Debug|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Debug|x86.Build.0 = Debug|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Release|Any CPU.Build.0 = Release|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Release|x64.ActiveCfg = Release|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Release|x64.Build.0 = Release|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Release|x86.ActiveCfg = Release|Any CPU + {4952F6C0-33B4-41A7-8E9D-3235227C8C57}.Release|x86.Build.0 = Release|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Debug|x64.ActiveCfg = Debug|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Debug|x64.Build.0 = Debug|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Debug|x86.ActiveCfg = Debug|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Debug|x86.Build.0 = Debug|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Release|Any CPU.Build.0 = Release|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Release|x64.ActiveCfg = Release|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Release|x64.Build.0 = Release|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Release|x86.ActiveCfg = Release|Any CPU + {F12428B3-E106-4021-AE80-BD058C72254B}.Release|x86.Build.0 = Release|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Debug|x64.ActiveCfg = Debug|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Debug|x64.Build.0 = Debug|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Debug|x86.ActiveCfg = Debug|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Debug|x86.Build.0 = Debug|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Release|Any CPU.Build.0 = Release|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Release|x64.ActiveCfg = Release|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Release|x64.Build.0 = Release|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Release|x86.ActiveCfg = Release|Any CPU + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC}.Release|x86.Build.0 = Release|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Debug|x64.ActiveCfg = Debug|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Debug|x64.Build.0 = Debug|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Debug|x86.ActiveCfg = Debug|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Debug|x86.Build.0 = Debug|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Release|Any CPU.Build.0 = Release|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Release|x64.ActiveCfg = Release|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Release|x64.Build.0 = Release|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Release|x86.ActiveCfg = Release|Any CPU + {2A68B840-7D42-4F0F-839C-96BEB46417D6}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {C2A829A6-4563-4E00-A4FA-A42AD564D5D5} = {ACACD739-950B-C891-6A12-926A82053571} + {4952F6C0-33B4-41A7-8E9D-3235227C8C57} = {ACACD739-950B-C891-6A12-926A82053571} + {F12428B3-E106-4021-AE80-BD058C72254B} = {ACACD739-950B-C891-6A12-926A82053571} + {4F5327F5-FDDE-41BB-91C8-A3426DF012CC} = {ACACD739-950B-C891-6A12-926A82053571} + {2A68B840-7D42-4F0F-839C-96BEB46417D6} = {ACACD739-950B-C891-6A12-926A82053571} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.TaskRunner/AGENTS.md b/src/TaskRunner/StellaOps.TaskRunner/AGENTS.md similarity index 98% rename from src/StellaOps.TaskRunner/AGENTS.md rename to src/TaskRunner/StellaOps.TaskRunner/AGENTS.md index d8f38b18..e4129aea 100644 --- a/src/StellaOps.TaskRunner/AGENTS.md +++ b/src/TaskRunner/StellaOps.TaskRunner/AGENTS.md @@ -1,17 +1,17 @@ -# Task Runner Service — Agent Charter - -## Mission -Execute Task Packs safely and deterministically. Provide remote pack execution, approvals, logging, artifact capture, and policy gates in support of Epic 12, honoring the imposed rule to propagate similar work where needed. - -## Responsibilities -- Validate Task Packs, enforce RBAC/approvals, orchestrate steps, manage artifacts/logs, stream status. -- Integrate with Orchestrator, Authority, Policy Engine, Export Center, Notifications, and CLI. -- Guarantee reproducible runs, provenance manifests, and secure handling of secrets and networks. - -## Module Layout -- `StellaOps.TaskRunner.Core/` — execution engine, step DSL, policy gates. -- `StellaOps.TaskRunner.Infrastructure/` — storage adapters, artifact handling, external clients. -- `StellaOps.TaskRunner.WebService/` — run management APIs and simulation endpoints. -- `StellaOps.TaskRunner.Worker/` — background executors, approvals, and telemetry loops. -- `StellaOps.TaskRunner.Tests/` — unit tests for core/infrastructure code paths. -- `StellaOps.TaskRunner.sln` — module solution. +# Task Runner Service — Agent Charter + +## Mission +Execute Task Packs safely and deterministically. Provide remote pack execution, approvals, logging, artifact capture, and policy gates in support of Epic 12, honoring the imposed rule to propagate similar work where needed. + +## Responsibilities +- Validate Task Packs, enforce RBAC/approvals, orchestrate steps, manage artifacts/logs, stream status. +- Integrate with Orchestrator, Authority, Policy Engine, Export Center, Notifications, and CLI. +- Guarantee reproducible runs, provenance manifests, and secure handling of secrets and networks. + +## Module Layout +- `StellaOps.TaskRunner.Core/` — execution engine, step DSL, policy gates. +- `StellaOps.TaskRunner.Infrastructure/` — storage adapters, artifact handling, external clients. +- `StellaOps.TaskRunner.WebService/` — run management APIs and simulation endpoints. +- `StellaOps.TaskRunner.Worker/` — background executors, approvals, and telemetry loops. +- `StellaOps.TaskRunner.Tests/` — unit tests for core/infrastructure code paths. +- `StellaOps.TaskRunner.sln` — module solution. diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunApprovalStore.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunApprovalStore.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunApprovalStore.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunApprovalStore.cs index b9b77c58..da704bae 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunApprovalStore.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunApprovalStore.cs @@ -1,10 +1,10 @@ -namespace StellaOps.TaskRunner.Core.Execution; - -public interface IPackRunApprovalStore -{ - Task SaveAsync(string runId, IReadOnlyList<PackRunApprovalState> approvals, CancellationToken cancellationToken); - - Task<IReadOnlyList<PackRunApprovalState>> GetAsync(string runId, CancellationToken cancellationToken); - - Task UpdateAsync(string runId, PackRunApprovalState approval, CancellationToken cancellationToken); -} +namespace StellaOps.TaskRunner.Core.Execution; + +public interface IPackRunApprovalStore +{ + Task SaveAsync(string runId, IReadOnlyList<PackRunApprovalState> approvals, CancellationToken cancellationToken); + + Task<IReadOnlyList<PackRunApprovalState>> GetAsync(string runId, CancellationToken cancellationToken); + + Task UpdateAsync(string runId, PackRunApprovalState approval, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunJobDispatcher.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunJobDispatcher.cs similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunJobDispatcher.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunJobDispatcher.cs index 9e6a29f8..acc65267 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunJobDispatcher.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunJobDispatcher.cs @@ -1,6 +1,6 @@ -namespace StellaOps.TaskRunner.Core.Execution; - -public interface IPackRunJobDispatcher -{ - Task<PackRunExecutionContext?> TryDequeueAsync(CancellationToken cancellationToken); -} +namespace StellaOps.TaskRunner.Core.Execution; + +public interface IPackRunJobDispatcher +{ + Task<PackRunExecutionContext?> TryDequeueAsync(CancellationToken cancellationToken); +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunNotificationPublisher.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunNotificationPublisher.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunNotificationPublisher.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunNotificationPublisher.cs index 49fde579..84a464ff 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunNotificationPublisher.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/IPackRunNotificationPublisher.cs @@ -1,8 +1,8 @@ -namespace StellaOps.TaskRunner.Core.Execution; - -public interface IPackRunNotificationPublisher -{ - Task PublishApprovalRequestedAsync(string runId, ApprovalNotification notification, CancellationToken cancellationToken); - - Task PublishPolicyGatePendingAsync(string runId, PolicyGateNotification notification, CancellationToken cancellationToken); -} +namespace StellaOps.TaskRunner.Core.Execution; + +public interface IPackRunNotificationPublisher +{ + Task PublishApprovalRequestedAsync(string runId, ApprovalNotification notification, CancellationToken cancellationToken); + + Task PublishPolicyGatePendingAsync(string runId, PolicyGateNotification notification, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalCoordinator.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalCoordinator.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalCoordinator.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalCoordinator.cs index 57f194c9..cb70fa8d 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalCoordinator.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalCoordinator.cs @@ -1,177 +1,177 @@ -using System.Collections.Concurrent; -using System.Collections.Immutable; -using StellaOps.TaskRunner.Core.Planning; - -namespace StellaOps.TaskRunner.Core.Execution; - -public sealed class PackRunApprovalCoordinator -{ - private readonly ConcurrentDictionary<string, PackRunApprovalState> approvals; - private readonly IReadOnlyDictionary<string, PackRunApprovalRequirement> requirements; - - private PackRunApprovalCoordinator( - IReadOnlyDictionary<string, PackRunApprovalState> approvals, - IReadOnlyDictionary<string, PackRunApprovalRequirement> requirements) - { - this.approvals = new ConcurrentDictionary<string, PackRunApprovalState>(approvals); - this.requirements = requirements; - } - - public static PackRunApprovalCoordinator Create(TaskPackPlan plan, DateTimeOffset requestTimestamp) - { - ArgumentNullException.ThrowIfNull(plan); - - var requirements = TaskPackPlanInsights - .CollectApprovalRequirements(plan) - .ToDictionary( - requirement => requirement.ApprovalId, - requirement => new PackRunApprovalRequirement( - requirement.ApprovalId, - requirement.Grants.ToImmutableArray(), - requirement.StepIds.ToImmutableArray(), - requirement.Messages.ToImmutableArray(), - requirement.ReasonTemplate), - StringComparer.Ordinal); - - var states = requirements.Values - .ToDictionary( - requirement => requirement.ApprovalId, - requirement => new PackRunApprovalState( - requirement.ApprovalId, - requirement.RequiredGrants, - requirement.StepIds, - requirement.Messages, - requirement.ReasonTemplate, - requestTimestamp, - PackRunApprovalStatus.Pending), - StringComparer.Ordinal); - - return new PackRunApprovalCoordinator(states, requirements); - } - - public static PackRunApprovalCoordinator Restore(TaskPackPlan plan, IReadOnlyList<PackRunApprovalState> existingStates, DateTimeOffset requestedAt) - { - ArgumentNullException.ThrowIfNull(plan); - ArgumentNullException.ThrowIfNull(existingStates); - - var coordinator = Create(plan, requestedAt); - foreach (var state in existingStates) - { - coordinator.approvals[state.ApprovalId] = state; - } - - return coordinator; - } - - public IReadOnlyList<PackRunApprovalState> GetApprovals() - => approvals.Values - .OrderBy(state => state.ApprovalId, StringComparer.Ordinal) - .ToImmutableArray(); - - public bool HasPendingApprovals => approvals.Values.Any(state => state.Status == PackRunApprovalStatus.Pending); - - public ApprovalActionResult Approve(string approvalId, string actorId, DateTimeOffset completedAt, string? summary = null) - { - ArgumentException.ThrowIfNullOrWhiteSpace(approvalId); - ArgumentException.ThrowIfNullOrWhiteSpace(actorId); - - var updated = approvals.AddOrUpdate( - approvalId, - static _ => throw new KeyNotFoundException("Unknown approval."), - (_, current) => current.Approve(actorId, completedAt, summary)); - - var shouldResume = approvals.Values.All(state => state.Status == PackRunApprovalStatus.Approved); - return new ApprovalActionResult(updated, shouldResume); - } - - public ApprovalActionResult Reject(string approvalId, string actorId, DateTimeOffset completedAt, string? summary = null) - { - ArgumentException.ThrowIfNullOrWhiteSpace(approvalId); - ArgumentException.ThrowIfNullOrWhiteSpace(actorId); - - var updated = approvals.AddOrUpdate( - approvalId, - static _ => throw new KeyNotFoundException("Unknown approval."), - (_, current) => current.Reject(actorId, completedAt, summary)); - - return new ApprovalActionResult(updated, false); - } - - public ApprovalActionResult Expire(string approvalId, DateTimeOffset expiredAt, string? summary = null) - { - ArgumentException.ThrowIfNullOrWhiteSpace(approvalId); - - var updated = approvals.AddOrUpdate( - approvalId, - static _ => throw new KeyNotFoundException("Unknown approval."), - (_, current) => current.Expire(expiredAt, summary)); - - return new ApprovalActionResult(updated, false); - } - - public IReadOnlyList<ApprovalNotification> BuildNotifications(TaskPackPlan plan) - { - ArgumentNullException.ThrowIfNull(plan); - - var hints = TaskPackPlanInsights.CollectApprovalRequirements(plan); - var notifications = new List<ApprovalNotification>(hints.Count); - - foreach (var hint in hints) - { - if (!requirements.TryGetValue(hint.ApprovalId, out var requirement)) - { - continue; - } - - notifications.Add(new ApprovalNotification( - requirement.ApprovalId, - requirement.RequiredGrants, - requirement.Messages, - requirement.StepIds, - requirement.ReasonTemplate)); - } - - return notifications; - } - - public IReadOnlyList<PolicyGateNotification> BuildPolicyNotifications(TaskPackPlan plan) - { - ArgumentNullException.ThrowIfNull(plan); - - var policyHints = TaskPackPlanInsights.CollectPolicyGateHints(plan); - return policyHints - .Select(hint => new PolicyGateNotification( - hint.StepId, - hint.Message, - hint.Parameters.Select(parameter => new PolicyGateNotificationParameter( - parameter.Name, - parameter.RequiresRuntimeValue, - parameter.Expression, - parameter.Error)).ToImmutableArray())) - .ToImmutableArray(); - } -} - -public sealed record PackRunApprovalRequirement( - string ApprovalId, - IReadOnlyList<string> RequiredGrants, - IReadOnlyList<string> StepIds, - IReadOnlyList<string> Messages, - string? ReasonTemplate); - -public sealed record ApprovalActionResult(PackRunApprovalState State, bool ShouldResumeRun); - -public sealed record ApprovalNotification( - string ApprovalId, - IReadOnlyList<string> RequiredGrants, - IReadOnlyList<string> Messages, - IReadOnlyList<string> StepIds, - string? ReasonTemplate); - -public sealed record PolicyGateNotification(string StepId, string? Message, IReadOnlyList<PolicyGateNotificationParameter> Parameters); - -public sealed record PolicyGateNotificationParameter( - string Name, - bool RequiresRuntimeValue, - string? Expression, - string? Error); +using System.Collections.Concurrent; +using System.Collections.Immutable; +using StellaOps.TaskRunner.Core.Planning; + +namespace StellaOps.TaskRunner.Core.Execution; + +public sealed class PackRunApprovalCoordinator +{ + private readonly ConcurrentDictionary<string, PackRunApprovalState> approvals; + private readonly IReadOnlyDictionary<string, PackRunApprovalRequirement> requirements; + + private PackRunApprovalCoordinator( + IReadOnlyDictionary<string, PackRunApprovalState> approvals, + IReadOnlyDictionary<string, PackRunApprovalRequirement> requirements) + { + this.approvals = new ConcurrentDictionary<string, PackRunApprovalState>(approvals); + this.requirements = requirements; + } + + public static PackRunApprovalCoordinator Create(TaskPackPlan plan, DateTimeOffset requestTimestamp) + { + ArgumentNullException.ThrowIfNull(plan); + + var requirements = TaskPackPlanInsights + .CollectApprovalRequirements(plan) + .ToDictionary( + requirement => requirement.ApprovalId, + requirement => new PackRunApprovalRequirement( + requirement.ApprovalId, + requirement.Grants.ToImmutableArray(), + requirement.StepIds.ToImmutableArray(), + requirement.Messages.ToImmutableArray(), + requirement.ReasonTemplate), + StringComparer.Ordinal); + + var states = requirements.Values + .ToDictionary( + requirement => requirement.ApprovalId, + requirement => new PackRunApprovalState( + requirement.ApprovalId, + requirement.RequiredGrants, + requirement.StepIds, + requirement.Messages, + requirement.ReasonTemplate, + requestTimestamp, + PackRunApprovalStatus.Pending), + StringComparer.Ordinal); + + return new PackRunApprovalCoordinator(states, requirements); + } + + public static PackRunApprovalCoordinator Restore(TaskPackPlan plan, IReadOnlyList<PackRunApprovalState> existingStates, DateTimeOffset requestedAt) + { + ArgumentNullException.ThrowIfNull(plan); + ArgumentNullException.ThrowIfNull(existingStates); + + var coordinator = Create(plan, requestedAt); + foreach (var state in existingStates) + { + coordinator.approvals[state.ApprovalId] = state; + } + + return coordinator; + } + + public IReadOnlyList<PackRunApprovalState> GetApprovals() + => approvals.Values + .OrderBy(state => state.ApprovalId, StringComparer.Ordinal) + .ToImmutableArray(); + + public bool HasPendingApprovals => approvals.Values.Any(state => state.Status == PackRunApprovalStatus.Pending); + + public ApprovalActionResult Approve(string approvalId, string actorId, DateTimeOffset completedAt, string? summary = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(approvalId); + ArgumentException.ThrowIfNullOrWhiteSpace(actorId); + + var updated = approvals.AddOrUpdate( + approvalId, + static _ => throw new KeyNotFoundException("Unknown approval."), + (_, current) => current.Approve(actorId, completedAt, summary)); + + var shouldResume = approvals.Values.All(state => state.Status == PackRunApprovalStatus.Approved); + return new ApprovalActionResult(updated, shouldResume); + } + + public ApprovalActionResult Reject(string approvalId, string actorId, DateTimeOffset completedAt, string? summary = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(approvalId); + ArgumentException.ThrowIfNullOrWhiteSpace(actorId); + + var updated = approvals.AddOrUpdate( + approvalId, + static _ => throw new KeyNotFoundException("Unknown approval."), + (_, current) => current.Reject(actorId, completedAt, summary)); + + return new ApprovalActionResult(updated, false); + } + + public ApprovalActionResult Expire(string approvalId, DateTimeOffset expiredAt, string? summary = null) + { + ArgumentException.ThrowIfNullOrWhiteSpace(approvalId); + + var updated = approvals.AddOrUpdate( + approvalId, + static _ => throw new KeyNotFoundException("Unknown approval."), + (_, current) => current.Expire(expiredAt, summary)); + + return new ApprovalActionResult(updated, false); + } + + public IReadOnlyList<ApprovalNotification> BuildNotifications(TaskPackPlan plan) + { + ArgumentNullException.ThrowIfNull(plan); + + var hints = TaskPackPlanInsights.CollectApprovalRequirements(plan); + var notifications = new List<ApprovalNotification>(hints.Count); + + foreach (var hint in hints) + { + if (!requirements.TryGetValue(hint.ApprovalId, out var requirement)) + { + continue; + } + + notifications.Add(new ApprovalNotification( + requirement.ApprovalId, + requirement.RequiredGrants, + requirement.Messages, + requirement.StepIds, + requirement.ReasonTemplate)); + } + + return notifications; + } + + public IReadOnlyList<PolicyGateNotification> BuildPolicyNotifications(TaskPackPlan plan) + { + ArgumentNullException.ThrowIfNull(plan); + + var policyHints = TaskPackPlanInsights.CollectPolicyGateHints(plan); + return policyHints + .Select(hint => new PolicyGateNotification( + hint.StepId, + hint.Message, + hint.Parameters.Select(parameter => new PolicyGateNotificationParameter( + parameter.Name, + parameter.RequiresRuntimeValue, + parameter.Expression, + parameter.Error)).ToImmutableArray())) + .ToImmutableArray(); + } +} + +public sealed record PackRunApprovalRequirement( + string ApprovalId, + IReadOnlyList<string> RequiredGrants, + IReadOnlyList<string> StepIds, + IReadOnlyList<string> Messages, + string? ReasonTemplate); + +public sealed record ApprovalActionResult(PackRunApprovalState State, bool ShouldResumeRun); + +public sealed record ApprovalNotification( + string ApprovalId, + IReadOnlyList<string> RequiredGrants, + IReadOnlyList<string> Messages, + IReadOnlyList<string> StepIds, + string? ReasonTemplate); + +public sealed record PolicyGateNotification(string StepId, string? Message, IReadOnlyList<PolicyGateNotificationParameter> Parameters); + +public sealed record PolicyGateNotificationParameter( + string Name, + bool RequiresRuntimeValue, + string? Expression, + string? Error); diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalState.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalState.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalState.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalState.cs index c98ca5ee..ba36e37a 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalState.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalState.cs @@ -1,84 +1,84 @@ -using System.Collections.Immutable; - -namespace StellaOps.TaskRunner.Core.Execution; - -public sealed class PackRunApprovalState -{ - public PackRunApprovalState( - string approvalId, - IReadOnlyList<string> requiredGrants, - IReadOnlyList<string> stepIds, - IReadOnlyList<string> messages, - string? reasonTemplate, - DateTimeOffset requestedAt, - PackRunApprovalStatus status, - string? actorId = null, - DateTimeOffset? completedAt = null, - string? summary = null) - { - if (string.IsNullOrWhiteSpace(approvalId)) - { - throw new ArgumentException("Approval id must not be empty.", nameof(approvalId)); - } - - ApprovalId = approvalId; - RequiredGrants = requiredGrants.ToImmutableArray(); - StepIds = stepIds.ToImmutableArray(); - Messages = messages.ToImmutableArray(); - ReasonTemplate = reasonTemplate; - RequestedAt = requestedAt; - Status = status; - ActorId = actorId; - CompletedAt = completedAt; - Summary = summary; - } - - public string ApprovalId { get; } - - public IReadOnlyList<string> RequiredGrants { get; } - - public IReadOnlyList<string> StepIds { get; } - - public IReadOnlyList<string> Messages { get; } - - public string? ReasonTemplate { get; } - - public DateTimeOffset RequestedAt { get; } - - public PackRunApprovalStatus Status { get; } - - public string? ActorId { get; } - - public DateTimeOffset? CompletedAt { get; } - - public string? Summary { get; } - - public PackRunApprovalState Approve(string actorId, DateTimeOffset completedAt, string? summary = null) - => Transition(PackRunApprovalStatus.Approved, actorId, completedAt, summary); - - public PackRunApprovalState Reject(string actorId, DateTimeOffset completedAt, string? summary = null) - => Transition(PackRunApprovalStatus.Rejected, actorId, completedAt, summary); - - public PackRunApprovalState Expire(DateTimeOffset expiredAt, string? summary = null) - => Transition(PackRunApprovalStatus.Expired, actorId: null, expiredAt, summary); - - private PackRunApprovalState Transition(PackRunApprovalStatus status, string? actorId, DateTimeOffset completedAt, string? summary) - { - if (Status != PackRunApprovalStatus.Pending) - { - throw new InvalidOperationException($"Approval '{ApprovalId}' is already {Status}."); - } - - return new PackRunApprovalState( - ApprovalId, - RequiredGrants, - StepIds, - Messages, - ReasonTemplate, - RequestedAt, - status, - actorId, - completedAt, - summary); - } -} +using System.Collections.Immutable; + +namespace StellaOps.TaskRunner.Core.Execution; + +public sealed class PackRunApprovalState +{ + public PackRunApprovalState( + string approvalId, + IReadOnlyList<string> requiredGrants, + IReadOnlyList<string> stepIds, + IReadOnlyList<string> messages, + string? reasonTemplate, + DateTimeOffset requestedAt, + PackRunApprovalStatus status, + string? actorId = null, + DateTimeOffset? completedAt = null, + string? summary = null) + { + if (string.IsNullOrWhiteSpace(approvalId)) + { + throw new ArgumentException("Approval id must not be empty.", nameof(approvalId)); + } + + ApprovalId = approvalId; + RequiredGrants = requiredGrants.ToImmutableArray(); + StepIds = stepIds.ToImmutableArray(); + Messages = messages.ToImmutableArray(); + ReasonTemplate = reasonTemplate; + RequestedAt = requestedAt; + Status = status; + ActorId = actorId; + CompletedAt = completedAt; + Summary = summary; + } + + public string ApprovalId { get; } + + public IReadOnlyList<string> RequiredGrants { get; } + + public IReadOnlyList<string> StepIds { get; } + + public IReadOnlyList<string> Messages { get; } + + public string? ReasonTemplate { get; } + + public DateTimeOffset RequestedAt { get; } + + public PackRunApprovalStatus Status { get; } + + public string? ActorId { get; } + + public DateTimeOffset? CompletedAt { get; } + + public string? Summary { get; } + + public PackRunApprovalState Approve(string actorId, DateTimeOffset completedAt, string? summary = null) + => Transition(PackRunApprovalStatus.Approved, actorId, completedAt, summary); + + public PackRunApprovalState Reject(string actorId, DateTimeOffset completedAt, string? summary = null) + => Transition(PackRunApprovalStatus.Rejected, actorId, completedAt, summary); + + public PackRunApprovalState Expire(DateTimeOffset expiredAt, string? summary = null) + => Transition(PackRunApprovalStatus.Expired, actorId: null, expiredAt, summary); + + private PackRunApprovalState Transition(PackRunApprovalStatus status, string? actorId, DateTimeOffset completedAt, string? summary) + { + if (Status != PackRunApprovalStatus.Pending) + { + throw new InvalidOperationException($"Approval '{ApprovalId}' is already {Status}."); + } + + return new PackRunApprovalState( + ApprovalId, + RequiredGrants, + StepIds, + Messages, + ReasonTemplate, + RequestedAt, + status, + actorId, + completedAt, + summary); + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalStatus.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalStatus.cs similarity index 94% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalStatus.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalStatus.cs index f644de25..0f8cb43f 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalStatus.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunApprovalStatus.cs @@ -1,9 +1,9 @@ -namespace StellaOps.TaskRunner.Core.Execution; - -public enum PackRunApprovalStatus -{ - Pending = 0, - Approved = 1, - Rejected = 2, - Expired = 3 -} +namespace StellaOps.TaskRunner.Core.Execution; + +public enum PackRunApprovalStatus +{ + Pending = 0, + Approved = 1, + Rejected = 2, + Expired = 3 +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunExecutionContext.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunExecutionContext.cs similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunExecutionContext.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunExecutionContext.cs index 05baff39..c7843ea5 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunExecutionContext.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunExecutionContext.cs @@ -1,22 +1,22 @@ -using StellaOps.TaskRunner.Core.Planning; - -namespace StellaOps.TaskRunner.Core.Execution; - -public sealed class PackRunExecutionContext -{ - public PackRunExecutionContext(string runId, TaskPackPlan plan, DateTimeOffset requestedAt) - { - ArgumentException.ThrowIfNullOrWhiteSpace(runId); - ArgumentNullException.ThrowIfNull(plan); - - RunId = runId; - Plan = plan; - RequestedAt = requestedAt; - } - - public string RunId { get; } - - public TaskPackPlan Plan { get; } - - public DateTimeOffset RequestedAt { get; } -} +using StellaOps.TaskRunner.Core.Planning; + +namespace StellaOps.TaskRunner.Core.Execution; + +public sealed class PackRunExecutionContext +{ + public PackRunExecutionContext(string runId, TaskPackPlan plan, DateTimeOffset requestedAt) + { + ArgumentException.ThrowIfNullOrWhiteSpace(runId); + ArgumentNullException.ThrowIfNull(plan); + + RunId = runId; + Plan = plan; + RequestedAt = requestedAt; + } + + public string RunId { get; } + + public TaskPackPlan Plan { get; } + + public DateTimeOffset RequestedAt { get; } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessor.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessor.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessor.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessor.cs index 211be386..b1bc0e09 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessor.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessor.cs @@ -1,84 +1,84 @@ -using Microsoft.Extensions.Logging; - -namespace StellaOps.TaskRunner.Core.Execution; - -public sealed class PackRunProcessor -{ - private readonly IPackRunApprovalStore approvalStore; - private readonly IPackRunNotificationPublisher notificationPublisher; - private readonly ILogger<PackRunProcessor> logger; - - public PackRunProcessor( - IPackRunApprovalStore approvalStore, - IPackRunNotificationPublisher notificationPublisher, - ILogger<PackRunProcessor> logger) - { - this.approvalStore = approvalStore ?? throw new ArgumentNullException(nameof(approvalStore)); - this.notificationPublisher = notificationPublisher ?? throw new ArgumentNullException(nameof(notificationPublisher)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<PackRunProcessorResult> ProcessNewRunAsync(PackRunExecutionContext context, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - - var existing = await approvalStore.GetAsync(context.RunId, cancellationToken).ConfigureAwait(false); - PackRunApprovalCoordinator coordinator; - bool shouldResume; - - if (existing.Count > 0) - { - coordinator = PackRunApprovalCoordinator.Restore(context.Plan, existing, context.RequestedAt); - shouldResume = !coordinator.HasPendingApprovals; - logger.LogInformation("Run {RunId} approvals restored (pending: {Pending}).", context.RunId, coordinator.HasPendingApprovals); - } - else - { - coordinator = PackRunApprovalCoordinator.Create(context.Plan, context.RequestedAt); - await approvalStore.SaveAsync(context.RunId, coordinator.GetApprovals(), cancellationToken).ConfigureAwait(false); - - var approvalNotifications = coordinator.BuildNotifications(context.Plan); - foreach (var notification in approvalNotifications) - { - await notificationPublisher.PublishApprovalRequestedAsync(context.RunId, notification, cancellationToken).ConfigureAwait(false); - logger.LogInformation( - "Approval requested for run {RunId} gate {ApprovalId} requiring grants {Grants}.", - context.RunId, - notification.ApprovalId, - string.Join(",", notification.RequiredGrants)); - } - - var policyNotifications = coordinator.BuildPolicyNotifications(context.Plan); - foreach (var notification in policyNotifications) - { - await notificationPublisher.PublishPolicyGatePendingAsync(context.RunId, notification, cancellationToken).ConfigureAwait(false); - logger.LogDebug( - "Policy gate pending for run {RunId} step {StepId}.", - context.RunId, - notification.StepId); - } - - shouldResume = !coordinator.HasPendingApprovals; - } - - if (shouldResume) - { - logger.LogInformation("Run {RunId} has no approvals; proceeding immediately.", context.RunId); - } - - return new PackRunProcessorResult(coordinator, shouldResume); - } - - public async Task<PackRunApprovalCoordinator> RestoreAsync(PackRunExecutionContext context, CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(context); - - var states = await approvalStore.GetAsync(context.RunId, cancellationToken).ConfigureAwait(false); - if (states.Count == 0) - { - return PackRunApprovalCoordinator.Create(context.Plan, context.RequestedAt); - } - - return PackRunApprovalCoordinator.Restore(context.Plan, states, context.RequestedAt); - } -} +using Microsoft.Extensions.Logging; + +namespace StellaOps.TaskRunner.Core.Execution; + +public sealed class PackRunProcessor +{ + private readonly IPackRunApprovalStore approvalStore; + private readonly IPackRunNotificationPublisher notificationPublisher; + private readonly ILogger<PackRunProcessor> logger; + + public PackRunProcessor( + IPackRunApprovalStore approvalStore, + IPackRunNotificationPublisher notificationPublisher, + ILogger<PackRunProcessor> logger) + { + this.approvalStore = approvalStore ?? throw new ArgumentNullException(nameof(approvalStore)); + this.notificationPublisher = notificationPublisher ?? throw new ArgumentNullException(nameof(notificationPublisher)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<PackRunProcessorResult> ProcessNewRunAsync(PackRunExecutionContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var existing = await approvalStore.GetAsync(context.RunId, cancellationToken).ConfigureAwait(false); + PackRunApprovalCoordinator coordinator; + bool shouldResume; + + if (existing.Count > 0) + { + coordinator = PackRunApprovalCoordinator.Restore(context.Plan, existing, context.RequestedAt); + shouldResume = !coordinator.HasPendingApprovals; + logger.LogInformation("Run {RunId} approvals restored (pending: {Pending}).", context.RunId, coordinator.HasPendingApprovals); + } + else + { + coordinator = PackRunApprovalCoordinator.Create(context.Plan, context.RequestedAt); + await approvalStore.SaveAsync(context.RunId, coordinator.GetApprovals(), cancellationToken).ConfigureAwait(false); + + var approvalNotifications = coordinator.BuildNotifications(context.Plan); + foreach (var notification in approvalNotifications) + { + await notificationPublisher.PublishApprovalRequestedAsync(context.RunId, notification, cancellationToken).ConfigureAwait(false); + logger.LogInformation( + "Approval requested for run {RunId} gate {ApprovalId} requiring grants {Grants}.", + context.RunId, + notification.ApprovalId, + string.Join(",", notification.RequiredGrants)); + } + + var policyNotifications = coordinator.BuildPolicyNotifications(context.Plan); + foreach (var notification in policyNotifications) + { + await notificationPublisher.PublishPolicyGatePendingAsync(context.RunId, notification, cancellationToken).ConfigureAwait(false); + logger.LogDebug( + "Policy gate pending for run {RunId} step {StepId}.", + context.RunId, + notification.StepId); + } + + shouldResume = !coordinator.HasPendingApprovals; + } + + if (shouldResume) + { + logger.LogInformation("Run {RunId} has no approvals; proceeding immediately.", context.RunId); + } + + return new PackRunProcessorResult(coordinator, shouldResume); + } + + public async Task<PackRunApprovalCoordinator> RestoreAsync(PackRunExecutionContext context, CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(context); + + var states = await approvalStore.GetAsync(context.RunId, cancellationToken).ConfigureAwait(false); + if (states.Count == 0) + { + return PackRunApprovalCoordinator.Create(context.Plan, context.RequestedAt); + } + + return PackRunApprovalCoordinator.Restore(context.Plan, states, context.RequestedAt); + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessorResult.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessorResult.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessorResult.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessorResult.cs index a2d25712..c6374d2c 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessorResult.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Execution/PackRunProcessorResult.cs @@ -1,5 +1,5 @@ -namespace StellaOps.TaskRunner.Core.Execution; - -public sealed record PackRunProcessorResult( - PackRunApprovalCoordinator ApprovalCoordinator, - bool ShouldResumeImmediately); +namespace StellaOps.TaskRunner.Core.Execution; + +public sealed record PackRunProcessorResult( + PackRunApprovalCoordinator ApprovalCoordinator, + bool ShouldResumeImmediately); diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Expressions/TaskPackExpressions.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Expressions/TaskPackExpressions.cs similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Expressions/TaskPackExpressions.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Expressions/TaskPackExpressions.cs index aa192e01..738a11cc 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Expressions/TaskPackExpressions.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Expressions/TaskPackExpressions.cs @@ -1,596 +1,596 @@ -using System.Collections.Immutable; -using System.Text.Json.Nodes; -using System.Text.RegularExpressions; - -namespace StellaOps.TaskRunner.Core.Expressions; - -internal static class TaskPackExpressions -{ - private static readonly Regex ExpressionPattern = new("^\\s*\\{\\{(.+)\\}\\}\\s*$", RegexOptions.Compiled | RegexOptions.CultureInvariant); - private static readonly Regex ComparisonPattern = new("^(?<left>.+?)\\s*(?<op>==|!=)\\s*(?<right>.+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant); - private static readonly Regex InPattern = new("^(?<left>.+?)\\s+in\\s+(?<right>.+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant); - - public static bool TryEvaluateBoolean(string? candidate, TaskPackExpressionContext context, out bool value, out string? error) - { - value = false; - error = null; - - if (string.IsNullOrWhiteSpace(candidate)) - { - value = true; - return true; - } - - if (!TryExtractExpression(candidate, out var expression)) - { - return TryParseBooleanLiteral(candidate.Trim(), out value, out error); - } - - expression = expression.Trim(); - return TryEvaluateBooleanInternal(expression, context, out value, out error); - } - - public static TaskPackValueResolution EvaluateValue(JsonNode? node, TaskPackExpressionContext context) - { - if (node is null) - { - return TaskPackValueResolution.FromValue(null); - } - - if (node is JsonValue valueNode && valueNode.TryGetValue(out string? stringValue)) - { - if (!TryExtractExpression(stringValue, out var expression)) - { - return TaskPackValueResolution.FromValue(valueNode); - } - - var trimmed = expression.Trim(); - return EvaluateExpression(trimmed, context); - } - - return TaskPackValueResolution.FromValue(node); - } - - public static TaskPackValueResolution EvaluateString(string value, TaskPackExpressionContext context) - { - if (!TryExtractExpression(value, out var expression)) - { - return TaskPackValueResolution.FromValue(JsonValue.Create(value)); - } - - return EvaluateExpression(expression.Trim(), context); - } - - private static bool TryEvaluateBooleanInternal(string expression, TaskPackExpressionContext context, out bool result, out string? error) - { - result = false; - error = null; - - if (TrySplitTopLevel(expression, "||", out var left, out var right) || - TrySplitTopLevel(expression, " or ", out left, out right)) - { - if (!TryEvaluateBooleanInternal(left, context, out var leftValue, out error)) - { - return false; - } - - if (leftValue) - { - result = true; - return true; - } - - if (!TryEvaluateBooleanInternal(right, context, out var rightValue, out error)) - { - return false; - } - - result = rightValue; - return true; - } - - if (TrySplitTopLevel(expression, "&&", out left, out right) || - TrySplitTopLevel(expression, " and ", out left, out right)) - { - if (!TryEvaluateBooleanInternal(left, context, out var leftValue, out error)) - { - return false; - } - - if (!leftValue) - { - result = false; - return true; - } - - if (!TryEvaluateBooleanInternal(right, context, out var rightValue, out error)) - { - return false; - } - - result = rightValue; - return true; - } - - if (expression.StartsWith("not ", StringComparison.Ordinal)) - { - var inner = expression["not ".Length..].Trim(); - if (!TryEvaluateBooleanInternal(inner, context, out var innerValue, out error)) - { - return false; - } - - result = !innerValue; - return true; - } - - if (TryEvaluateComparison(expression, context, out result, out error)) - { - return error is null; - } - - var resolution = EvaluateExpression(expression, context); - if (!resolution.Resolved) - { - error = resolution.Error ?? $"Expression '{expression}' requires runtime evaluation."; - return false; - } - - result = ToBoolean(resolution.Value); - return true; - } - - private static bool TryEvaluateComparison(string expression, TaskPackExpressionContext context, out bool value, out string? error) - { - value = false; - error = null; - - var comparisonMatch = ComparisonPattern.Match(expression); - if (comparisonMatch.Success) - { - var left = comparisonMatch.Groups["left"].Value.Trim(); - var op = comparisonMatch.Groups["op"].Value; - var right = comparisonMatch.Groups["right"].Value.Trim(); - - var leftResolution = EvaluateOperand(left, context); - if (!leftResolution.IsValid(out error)) - { - return false; - } - - var rightResolution = EvaluateOperand(right, context); - if (!rightResolution.IsValid(out error)) - { - return false; - } - - if (!leftResolution.TryGetValue(out var leftValue, out error) || - !rightResolution.TryGetValue(out var rightValue, out error)) - { - return false; - } - - value = CompareNodes(leftValue, rightValue, op == "=="); - return true; - } - - var inMatch = InPattern.Match(expression); - if (inMatch.Success) - { - var member = inMatch.Groups["left"].Value.Trim(); - var collection = inMatch.Groups["right"].Value.Trim(); - - var memberResolution = EvaluateOperand(member, context); - if (!memberResolution.IsValid(out error)) - { - return false; - } - - var collectionResolution = EvaluateOperand(collection, context); - if (!collectionResolution.IsValid(out error)) - { - return false; - } - - if (!memberResolution.TryGetValue(out var memberValue, out error) || - !collectionResolution.TryGetValue(out var collectionValue, out error)) - { - return false; - } - - value = EvaluateMembership(memberValue, collectionValue); - return true; - } - - return false; - } - - private static OperandResolution EvaluateOperand(string expression, TaskPackExpressionContext context) - { - if (TryParseStringLiteral(expression, out var literal)) - { - return OperandResolution.FromValue(JsonValue.Create(literal)); - } - - if (bool.TryParse(expression, out var boolLiteral)) - { - return OperandResolution.FromValue(JsonValue.Create(boolLiteral)); - } - - if (double.TryParse(expression, System.Globalization.NumberStyles.Float | System.Globalization.NumberStyles.AllowThousands, System.Globalization.CultureInfo.InvariantCulture, out var numberLiteral)) - { - return OperandResolution.FromValue(JsonValue.Create(numberLiteral)); - } - - var resolution = EvaluateExpression(expression, context); - if (!resolution.Resolved) - { - if (resolution.RequiresRuntimeValue && resolution.Error is null) - { - return OperandResolution.FromRuntime(expression); - } - - return OperandResolution.FromError(resolution.Error ?? $"Expression '{expression}' could not be resolved."); - } - - return OperandResolution.FromValue(resolution.Value); - } - - private static TaskPackValueResolution EvaluateExpression(string expression, TaskPackExpressionContext context) - { - if (!TryResolvePath(expression, context, out var resolved, out var requiresRuntime, out var error)) - { - return TaskPackValueResolution.FromError(expression, error ?? $"Failed to resolve expression '{expression}'."); - } - - if (requiresRuntime) - { - return TaskPackValueResolution.FromDeferred(expression); - } - - return TaskPackValueResolution.FromValue(resolved); - } - - private static bool TryResolvePath(string expression, TaskPackExpressionContext context, out JsonNode? value, out bool requiresRuntime, out string? error) - { - value = null; - error = null; - requiresRuntime = false; - - if (string.IsNullOrWhiteSpace(expression)) - { - error = "Expression cannot be empty."; - return false; - } - - var segments = expression.Split('.', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); - if (segments.Length == 0) - { - error = $"Expression '{expression}' is invalid."; - return false; - } - - var root = segments[0]; - - switch (root) - { - case "inputs": - if (segments.Length == 1) - { - error = "Expression must reference a specific input (e.g., inputs.example)."; - return false; - } - - if (!context.Inputs.TryGetValue(segments[1], out var current)) - { - error = $"Input '{segments[1]}' was not supplied."; - return false; - } - - value = Traverse(current, segments, startIndex: 2); - return true; - - case "item": - if (context.CurrentItem is null) - { - error = "Expression references 'item' outside of a map iteration."; - return false; - } - - value = Traverse(context.CurrentItem, segments, startIndex: 1); - return true; - - case "steps": - if (segments.Length < 2) - { - error = "Step expressions must specify a step identifier (e.g., steps.plan.outputs.value)."; - return false; - } - - var stepId = segments[1]; - if (!context.StepExists(stepId)) - { - error = $"Step '{stepId}' referenced before it is defined."; - return false; - } - - requiresRuntime = true; - value = null; - return true; - - case "secrets": - if (segments.Length < 2) - { - error = "Secret expressions must specify a secret name (e.g., secrets.jiraToken)."; - return false; - } - - var secretName = segments[1]; - if (!context.SecretExists(secretName)) - { - error = $"Secret '{secretName}' is not declared in the manifest."; - return false; - } - - requiresRuntime = true; - value = null; - return true; - - default: - error = $"Expression '{expression}' references '{root}', supported roots are inputs, item, steps, and secrets."; - return false; - } - } - - private static JsonNode? Traverse(JsonNode? current, IReadOnlyList<string> segments, int startIndex) - { - for (var i = startIndex; i < segments.Count && current is not null; i++) - { - var segment = segments[i]; - if (current is JsonObject obj) - { - if (!obj.TryGetPropertyValue(segment, out current)) - { - current = null; - } - } - else if (current is JsonArray array) - { - current = TryGetArrayElement(array, segment); - } - else - { - current = null; - } - } - - return current; - } - - private static JsonNode? TryGetArrayElement(JsonArray array, string segment) - { - if (int.TryParse(segment, out var index) && index >= 0 && index < array.Count) - { - return array[index]; - } - - return null; - } - - private static bool TryExtractExpression(string candidate, out string expression) - { - var match = ExpressionPattern.Match(candidate); - if (!match.Success) - { - expression = candidate; - return false; - } - - expression = match.Groups[1].Value; - return true; - } - - private static bool TryParseBooleanLiteral(string value, out bool result, out string? error) - { - if (bool.TryParse(value, out result)) - { - error = null; - return true; - } - - error = $"Unable to parse boolean literal '{value}'."; - return false; - } - - private static bool TrySplitTopLevel(string expression, string token, out string left, out string right) - { - var inSingle = false; - var inDouble = false; - for (var i = 0; i <= expression.Length - token.Length; i++) - { - var c = expression[i]; - if (c == '\'' && !inDouble) - { - inSingle = !inSingle; - } - else if (c == '"' && !inSingle) - { - inDouble = !inDouble; - } - - if (inSingle || inDouble) - { - continue; - } - - if (expression.AsSpan(i, token.Length).SequenceEqual(token)) - { - left = expression[..i].Trim(); - right = expression[(i + token.Length)..].Trim(); - return true; - } - } - - left = string.Empty; - right = string.Empty; - return false; - } - - private static bool TryParseStringLiteral(string candidate, out string? literal) - { - literal = null; - if (candidate.Length >= 2) - { - if ((candidate[0] == '"' && candidate[^1] == '"') || - (candidate[0] == '\'' && candidate[^1] == '\'')) - { - literal = candidate[1..^1]; - return true; - } - } - - return false; - } - - private static bool CompareNodes(JsonNode? left, JsonNode? right, bool equality) - { - if (left is null && right is null) - { - return equality; - } - - if (left is null || right is null) - { - return !equality; - } - - var comparison = JsonNode.DeepEquals(left, right); - return equality ? comparison : !comparison; - } - - private static bool EvaluateMembership(JsonNode? member, JsonNode? collection) - { - if (collection is JsonArray array) - { - foreach (var element in array) - { - if (JsonNode.DeepEquals(member, element)) - { - return true; - } - } - - return false; - } - - if (collection is JsonValue value && value.TryGetValue(out string? text) && member is JsonValue memberValue && memberValue.TryGetValue(out string? memberText)) - { - return text?.Contains(memberText, StringComparison.Ordinal) ?? false; - } - - return false; - } - - private static bool ToBoolean(JsonNode? node) - { - if (node is null) - { - return false; - } - - if (node is JsonValue value) - { - if (value.TryGetValue<bool>(out var boolValue)) - { - return boolValue; - } - - if (value.TryGetValue<string>(out var stringValue)) - { - return !string.IsNullOrWhiteSpace(stringValue); - } - - if (value.TryGetValue<double>(out var number)) - { - return Math.Abs(number) > double.Epsilon; - } - } - - if (node is JsonArray array) - { - return array.Count > 0; - } - - if (node is JsonObject obj) - { - return obj.Count > 0; - } - - return true; - } - - private readonly record struct OperandResolution(JsonNode? Value, string? Error, bool RequiresRuntime) - { - public bool IsValid(out string? error) - { - error = Error; - return string.IsNullOrEmpty(Error); - } - - public bool TryGetValue(out JsonNode? value, out string? error) - { - if (RequiresRuntime) - { - error = "Expression requires runtime evaluation."; - value = null; - return false; - } - - value = Value; - error = Error; - return error is null; - } - - public static OperandResolution FromValue(JsonNode? value) - => new(value, null, false); - - public static OperandResolution FromRuntime(string expression) - => new(null, $"Expression '{expression}' requires runtime evaluation.", true); - - public static OperandResolution FromError(string error) - => new(null, error, false); - } -} - -internal readonly record struct TaskPackExpressionContext( - IReadOnlyDictionary<string, JsonNode?> Inputs, - ISet<string> KnownSteps, - ISet<string> KnownSecrets, - JsonNode? CurrentItem) -{ - public static TaskPackExpressionContext Create( - IReadOnlyDictionary<string, JsonNode?> inputs, - ISet<string> knownSteps, - ISet<string> knownSecrets) - => new(inputs, knownSteps, knownSecrets, null); - - public bool StepExists(string stepId) => KnownSteps.Contains(stepId); - - public void RegisterStep(string stepId) => KnownSteps.Add(stepId); - - public bool SecretExists(string secretName) => KnownSecrets.Contains(secretName); - - public TaskPackExpressionContext WithItem(JsonNode? item) => new(Inputs, KnownSteps, KnownSecrets, item); -} - -internal readonly record struct TaskPackValueResolution(bool Resolved, JsonNode? Value, string? Expression, string? Error, bool RequiresRuntimeValue) -{ - public static TaskPackValueResolution FromValue(JsonNode? value) - => new(true, value, null, null, false); - - public static TaskPackValueResolution FromDeferred(string expression) - => new(false, null, expression, null, true); - - public static TaskPackValueResolution FromError(string expression, string error) - => new(false, null, expression, error, false); -} +using System.Collections.Immutable; +using System.Text.Json.Nodes; +using System.Text.RegularExpressions; + +namespace StellaOps.TaskRunner.Core.Expressions; + +internal static class TaskPackExpressions +{ + private static readonly Regex ExpressionPattern = new("^\\s*\\{\\{(.+)\\}\\}\\s*$", RegexOptions.Compiled | RegexOptions.CultureInvariant); + private static readonly Regex ComparisonPattern = new("^(?<left>.+?)\\s*(?<op>==|!=)\\s*(?<right>.+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant); + private static readonly Regex InPattern = new("^(?<left>.+?)\\s+in\\s+(?<right>.+)$", RegexOptions.Compiled | RegexOptions.CultureInvariant); + + public static bool TryEvaluateBoolean(string? candidate, TaskPackExpressionContext context, out bool value, out string? error) + { + value = false; + error = null; + + if (string.IsNullOrWhiteSpace(candidate)) + { + value = true; + return true; + } + + if (!TryExtractExpression(candidate, out var expression)) + { + return TryParseBooleanLiteral(candidate.Trim(), out value, out error); + } + + expression = expression.Trim(); + return TryEvaluateBooleanInternal(expression, context, out value, out error); + } + + public static TaskPackValueResolution EvaluateValue(JsonNode? node, TaskPackExpressionContext context) + { + if (node is null) + { + return TaskPackValueResolution.FromValue(null); + } + + if (node is JsonValue valueNode && valueNode.TryGetValue(out string? stringValue)) + { + if (!TryExtractExpression(stringValue, out var expression)) + { + return TaskPackValueResolution.FromValue(valueNode); + } + + var trimmed = expression.Trim(); + return EvaluateExpression(trimmed, context); + } + + return TaskPackValueResolution.FromValue(node); + } + + public static TaskPackValueResolution EvaluateString(string value, TaskPackExpressionContext context) + { + if (!TryExtractExpression(value, out var expression)) + { + return TaskPackValueResolution.FromValue(JsonValue.Create(value)); + } + + return EvaluateExpression(expression.Trim(), context); + } + + private static bool TryEvaluateBooleanInternal(string expression, TaskPackExpressionContext context, out bool result, out string? error) + { + result = false; + error = null; + + if (TrySplitTopLevel(expression, "||", out var left, out var right) || + TrySplitTopLevel(expression, " or ", out left, out right)) + { + if (!TryEvaluateBooleanInternal(left, context, out var leftValue, out error)) + { + return false; + } + + if (leftValue) + { + result = true; + return true; + } + + if (!TryEvaluateBooleanInternal(right, context, out var rightValue, out error)) + { + return false; + } + + result = rightValue; + return true; + } + + if (TrySplitTopLevel(expression, "&&", out left, out right) || + TrySplitTopLevel(expression, " and ", out left, out right)) + { + if (!TryEvaluateBooleanInternal(left, context, out var leftValue, out error)) + { + return false; + } + + if (!leftValue) + { + result = false; + return true; + } + + if (!TryEvaluateBooleanInternal(right, context, out var rightValue, out error)) + { + return false; + } + + result = rightValue; + return true; + } + + if (expression.StartsWith("not ", StringComparison.Ordinal)) + { + var inner = expression["not ".Length..].Trim(); + if (!TryEvaluateBooleanInternal(inner, context, out var innerValue, out error)) + { + return false; + } + + result = !innerValue; + return true; + } + + if (TryEvaluateComparison(expression, context, out result, out error)) + { + return error is null; + } + + var resolution = EvaluateExpression(expression, context); + if (!resolution.Resolved) + { + error = resolution.Error ?? $"Expression '{expression}' requires runtime evaluation."; + return false; + } + + result = ToBoolean(resolution.Value); + return true; + } + + private static bool TryEvaluateComparison(string expression, TaskPackExpressionContext context, out bool value, out string? error) + { + value = false; + error = null; + + var comparisonMatch = ComparisonPattern.Match(expression); + if (comparisonMatch.Success) + { + var left = comparisonMatch.Groups["left"].Value.Trim(); + var op = comparisonMatch.Groups["op"].Value; + var right = comparisonMatch.Groups["right"].Value.Trim(); + + var leftResolution = EvaluateOperand(left, context); + if (!leftResolution.IsValid(out error)) + { + return false; + } + + var rightResolution = EvaluateOperand(right, context); + if (!rightResolution.IsValid(out error)) + { + return false; + } + + if (!leftResolution.TryGetValue(out var leftValue, out error) || + !rightResolution.TryGetValue(out var rightValue, out error)) + { + return false; + } + + value = CompareNodes(leftValue, rightValue, op == "=="); + return true; + } + + var inMatch = InPattern.Match(expression); + if (inMatch.Success) + { + var member = inMatch.Groups["left"].Value.Trim(); + var collection = inMatch.Groups["right"].Value.Trim(); + + var memberResolution = EvaluateOperand(member, context); + if (!memberResolution.IsValid(out error)) + { + return false; + } + + var collectionResolution = EvaluateOperand(collection, context); + if (!collectionResolution.IsValid(out error)) + { + return false; + } + + if (!memberResolution.TryGetValue(out var memberValue, out error) || + !collectionResolution.TryGetValue(out var collectionValue, out error)) + { + return false; + } + + value = EvaluateMembership(memberValue, collectionValue); + return true; + } + + return false; + } + + private static OperandResolution EvaluateOperand(string expression, TaskPackExpressionContext context) + { + if (TryParseStringLiteral(expression, out var literal)) + { + return OperandResolution.FromValue(JsonValue.Create(literal)); + } + + if (bool.TryParse(expression, out var boolLiteral)) + { + return OperandResolution.FromValue(JsonValue.Create(boolLiteral)); + } + + if (double.TryParse(expression, System.Globalization.NumberStyles.Float | System.Globalization.NumberStyles.AllowThousands, System.Globalization.CultureInfo.InvariantCulture, out var numberLiteral)) + { + return OperandResolution.FromValue(JsonValue.Create(numberLiteral)); + } + + var resolution = EvaluateExpression(expression, context); + if (!resolution.Resolved) + { + if (resolution.RequiresRuntimeValue && resolution.Error is null) + { + return OperandResolution.FromRuntime(expression); + } + + return OperandResolution.FromError(resolution.Error ?? $"Expression '{expression}' could not be resolved."); + } + + return OperandResolution.FromValue(resolution.Value); + } + + private static TaskPackValueResolution EvaluateExpression(string expression, TaskPackExpressionContext context) + { + if (!TryResolvePath(expression, context, out var resolved, out var requiresRuntime, out var error)) + { + return TaskPackValueResolution.FromError(expression, error ?? $"Failed to resolve expression '{expression}'."); + } + + if (requiresRuntime) + { + return TaskPackValueResolution.FromDeferred(expression); + } + + return TaskPackValueResolution.FromValue(resolved); + } + + private static bool TryResolvePath(string expression, TaskPackExpressionContext context, out JsonNode? value, out bool requiresRuntime, out string? error) + { + value = null; + error = null; + requiresRuntime = false; + + if (string.IsNullOrWhiteSpace(expression)) + { + error = "Expression cannot be empty."; + return false; + } + + var segments = expression.Split('.', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries); + if (segments.Length == 0) + { + error = $"Expression '{expression}' is invalid."; + return false; + } + + var root = segments[0]; + + switch (root) + { + case "inputs": + if (segments.Length == 1) + { + error = "Expression must reference a specific input (e.g., inputs.example)."; + return false; + } + + if (!context.Inputs.TryGetValue(segments[1], out var current)) + { + error = $"Input '{segments[1]}' was not supplied."; + return false; + } + + value = Traverse(current, segments, startIndex: 2); + return true; + + case "item": + if (context.CurrentItem is null) + { + error = "Expression references 'item' outside of a map iteration."; + return false; + } + + value = Traverse(context.CurrentItem, segments, startIndex: 1); + return true; + + case "steps": + if (segments.Length < 2) + { + error = "Step expressions must specify a step identifier (e.g., steps.plan.outputs.value)."; + return false; + } + + var stepId = segments[1]; + if (!context.StepExists(stepId)) + { + error = $"Step '{stepId}' referenced before it is defined."; + return false; + } + + requiresRuntime = true; + value = null; + return true; + + case "secrets": + if (segments.Length < 2) + { + error = "Secret expressions must specify a secret name (e.g., secrets.jiraToken)."; + return false; + } + + var secretName = segments[1]; + if (!context.SecretExists(secretName)) + { + error = $"Secret '{secretName}' is not declared in the manifest."; + return false; + } + + requiresRuntime = true; + value = null; + return true; + + default: + error = $"Expression '{expression}' references '{root}', supported roots are inputs, item, steps, and secrets."; + return false; + } + } + + private static JsonNode? Traverse(JsonNode? current, IReadOnlyList<string> segments, int startIndex) + { + for (var i = startIndex; i < segments.Count && current is not null; i++) + { + var segment = segments[i]; + if (current is JsonObject obj) + { + if (!obj.TryGetPropertyValue(segment, out current)) + { + current = null; + } + } + else if (current is JsonArray array) + { + current = TryGetArrayElement(array, segment); + } + else + { + current = null; + } + } + + return current; + } + + private static JsonNode? TryGetArrayElement(JsonArray array, string segment) + { + if (int.TryParse(segment, out var index) && index >= 0 && index < array.Count) + { + return array[index]; + } + + return null; + } + + private static bool TryExtractExpression(string candidate, out string expression) + { + var match = ExpressionPattern.Match(candidate); + if (!match.Success) + { + expression = candidate; + return false; + } + + expression = match.Groups[1].Value; + return true; + } + + private static bool TryParseBooleanLiteral(string value, out bool result, out string? error) + { + if (bool.TryParse(value, out result)) + { + error = null; + return true; + } + + error = $"Unable to parse boolean literal '{value}'."; + return false; + } + + private static bool TrySplitTopLevel(string expression, string token, out string left, out string right) + { + var inSingle = false; + var inDouble = false; + for (var i = 0; i <= expression.Length - token.Length; i++) + { + var c = expression[i]; + if (c == '\'' && !inDouble) + { + inSingle = !inSingle; + } + else if (c == '"' && !inSingle) + { + inDouble = !inDouble; + } + + if (inSingle || inDouble) + { + continue; + } + + if (expression.AsSpan(i, token.Length).SequenceEqual(token)) + { + left = expression[..i].Trim(); + right = expression[(i + token.Length)..].Trim(); + return true; + } + } + + left = string.Empty; + right = string.Empty; + return false; + } + + private static bool TryParseStringLiteral(string candidate, out string? literal) + { + literal = null; + if (candidate.Length >= 2) + { + if ((candidate[0] == '"' && candidate[^1] == '"') || + (candidate[0] == '\'' && candidate[^1] == '\'')) + { + literal = candidate[1..^1]; + return true; + } + } + + return false; + } + + private static bool CompareNodes(JsonNode? left, JsonNode? right, bool equality) + { + if (left is null && right is null) + { + return equality; + } + + if (left is null || right is null) + { + return !equality; + } + + var comparison = JsonNode.DeepEquals(left, right); + return equality ? comparison : !comparison; + } + + private static bool EvaluateMembership(JsonNode? member, JsonNode? collection) + { + if (collection is JsonArray array) + { + foreach (var element in array) + { + if (JsonNode.DeepEquals(member, element)) + { + return true; + } + } + + return false; + } + + if (collection is JsonValue value && value.TryGetValue(out string? text) && member is JsonValue memberValue && memberValue.TryGetValue(out string? memberText)) + { + return text?.Contains(memberText, StringComparison.Ordinal) ?? false; + } + + return false; + } + + private static bool ToBoolean(JsonNode? node) + { + if (node is null) + { + return false; + } + + if (node is JsonValue value) + { + if (value.TryGetValue<bool>(out var boolValue)) + { + return boolValue; + } + + if (value.TryGetValue<string>(out var stringValue)) + { + return !string.IsNullOrWhiteSpace(stringValue); + } + + if (value.TryGetValue<double>(out var number)) + { + return Math.Abs(number) > double.Epsilon; + } + } + + if (node is JsonArray array) + { + return array.Count > 0; + } + + if (node is JsonObject obj) + { + return obj.Count > 0; + } + + return true; + } + + private readonly record struct OperandResolution(JsonNode? Value, string? Error, bool RequiresRuntime) + { + public bool IsValid(out string? error) + { + error = Error; + return string.IsNullOrEmpty(Error); + } + + public bool TryGetValue(out JsonNode? value, out string? error) + { + if (RequiresRuntime) + { + error = "Expression requires runtime evaluation."; + value = null; + return false; + } + + value = Value; + error = Error; + return error is null; + } + + public static OperandResolution FromValue(JsonNode? value) + => new(value, null, false); + + public static OperandResolution FromRuntime(string expression) + => new(null, $"Expression '{expression}' requires runtime evaluation.", true); + + public static OperandResolution FromError(string error) + => new(null, error, false); + } +} + +internal readonly record struct TaskPackExpressionContext( + IReadOnlyDictionary<string, JsonNode?> Inputs, + ISet<string> KnownSteps, + ISet<string> KnownSecrets, + JsonNode? CurrentItem) +{ + public static TaskPackExpressionContext Create( + IReadOnlyDictionary<string, JsonNode?> inputs, + ISet<string> knownSteps, + ISet<string> knownSecrets) + => new(inputs, knownSteps, knownSecrets, null); + + public bool StepExists(string stepId) => KnownSteps.Contains(stepId); + + public void RegisterStep(string stepId) => KnownSteps.Add(stepId); + + public bool SecretExists(string secretName) => KnownSecrets.Contains(secretName); + + public TaskPackExpressionContext WithItem(JsonNode? item) => new(Inputs, KnownSteps, KnownSecrets, item); +} + +internal readonly record struct TaskPackValueResolution(bool Resolved, JsonNode? Value, string? Expression, string? Error, bool RequiresRuntimeValue) +{ + public static TaskPackValueResolution FromValue(JsonNode? value) + => new(true, value, null, null, false); + + public static TaskPackValueResolution FromDeferred(string expression) + => new(false, null, expression, null, true); + + public static TaskPackValueResolution FromError(string expression, string error) + => new(false, null, expression, error, false); +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlan.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlan.cs similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlan.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlan.cs index daf0981d..49d32316 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlan.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlan.cs @@ -1,95 +1,95 @@ -using System.Collections.Immutable; -using System.Text.Json.Nodes; -using StellaOps.TaskRunner.Core.Expressions; - -namespace StellaOps.TaskRunner.Core.Planning; - -public sealed class TaskPackPlan -{ - public TaskPackPlan( - TaskPackPlanMetadata metadata, - IReadOnlyDictionary<string, JsonNode?> inputs, - IReadOnlyList<TaskPackPlanStep> steps, - string hash, - IReadOnlyList<TaskPackPlanApproval> approvals, - IReadOnlyList<TaskPackPlanSecret> secrets, - IReadOnlyList<TaskPackPlanOutput> outputs) - { - Metadata = metadata; - Inputs = inputs; - Steps = steps; - Hash = hash; - Approvals = approvals; - Secrets = secrets; - Outputs = outputs; - } - - public TaskPackPlanMetadata Metadata { get; } - - public IReadOnlyDictionary<string, JsonNode?> Inputs { get; } - - public IReadOnlyList<TaskPackPlanStep> Steps { get; } - - public string Hash { get; } - - public IReadOnlyList<TaskPackPlanApproval> Approvals { get; } - - public IReadOnlyList<TaskPackPlanSecret> Secrets { get; } - - public IReadOnlyList<TaskPackPlanOutput> Outputs { get; } -} - -public sealed record TaskPackPlanMetadata(string Name, string Version, string? Description, IReadOnlyList<string> Tags); - -public sealed record TaskPackPlanStep( - string Id, - string TemplateId, - string? Name, - string Type, - bool Enabled, - string? Uses, - IReadOnlyDictionary<string, TaskPackPlanParameterValue>? Parameters, - string? ApprovalId, - string? GateMessage, - IReadOnlyList<TaskPackPlanStep>? Children); - -public sealed record TaskPackPlanParameterValue( - JsonNode? Value, - string? Expression, - string? Error, - bool RequiresRuntimeValue) -{ - internal static TaskPackPlanParameterValue FromResolution(TaskPackValueResolution resolution) - => new(resolution.Value, resolution.Expression, resolution.Error, resolution.RequiresRuntimeValue); -} - -public sealed record TaskPackPlanApproval( - string Id, - IReadOnlyList<string> Grants, - string? ExpiresAfter, - string? ReasonTemplate); - -public sealed record TaskPackPlanSecret(string Name, string Scope, string? Description); - -public sealed record TaskPackPlanOutput( - string Name, - string Type, - TaskPackPlanParameterValue? Path, - TaskPackPlanParameterValue? Expression); - -public sealed class TaskPackPlanResult -{ - public TaskPackPlanResult(TaskPackPlan? plan, ImmutableArray<TaskPackPlanError> errors) - { - Plan = plan; - Errors = errors; - } - - public TaskPackPlan? Plan { get; } - - public ImmutableArray<TaskPackPlanError> Errors { get; } - - public bool Success => Plan is not null && Errors.IsDefaultOrEmpty; -} - -public sealed record TaskPackPlanError(string Path, string Message); +using System.Collections.Immutable; +using System.Text.Json.Nodes; +using StellaOps.TaskRunner.Core.Expressions; + +namespace StellaOps.TaskRunner.Core.Planning; + +public sealed class TaskPackPlan +{ + public TaskPackPlan( + TaskPackPlanMetadata metadata, + IReadOnlyDictionary<string, JsonNode?> inputs, + IReadOnlyList<TaskPackPlanStep> steps, + string hash, + IReadOnlyList<TaskPackPlanApproval> approvals, + IReadOnlyList<TaskPackPlanSecret> secrets, + IReadOnlyList<TaskPackPlanOutput> outputs) + { + Metadata = metadata; + Inputs = inputs; + Steps = steps; + Hash = hash; + Approvals = approvals; + Secrets = secrets; + Outputs = outputs; + } + + public TaskPackPlanMetadata Metadata { get; } + + public IReadOnlyDictionary<string, JsonNode?> Inputs { get; } + + public IReadOnlyList<TaskPackPlanStep> Steps { get; } + + public string Hash { get; } + + public IReadOnlyList<TaskPackPlanApproval> Approvals { get; } + + public IReadOnlyList<TaskPackPlanSecret> Secrets { get; } + + public IReadOnlyList<TaskPackPlanOutput> Outputs { get; } +} + +public sealed record TaskPackPlanMetadata(string Name, string Version, string? Description, IReadOnlyList<string> Tags); + +public sealed record TaskPackPlanStep( + string Id, + string TemplateId, + string? Name, + string Type, + bool Enabled, + string? Uses, + IReadOnlyDictionary<string, TaskPackPlanParameterValue>? Parameters, + string? ApprovalId, + string? GateMessage, + IReadOnlyList<TaskPackPlanStep>? Children); + +public sealed record TaskPackPlanParameterValue( + JsonNode? Value, + string? Expression, + string? Error, + bool RequiresRuntimeValue) +{ + internal static TaskPackPlanParameterValue FromResolution(TaskPackValueResolution resolution) + => new(resolution.Value, resolution.Expression, resolution.Error, resolution.RequiresRuntimeValue); +} + +public sealed record TaskPackPlanApproval( + string Id, + IReadOnlyList<string> Grants, + string? ExpiresAfter, + string? ReasonTemplate); + +public sealed record TaskPackPlanSecret(string Name, string Scope, string? Description); + +public sealed record TaskPackPlanOutput( + string Name, + string Type, + TaskPackPlanParameterValue? Path, + TaskPackPlanParameterValue? Expression); + +public sealed class TaskPackPlanResult +{ + public TaskPackPlanResult(TaskPackPlan? plan, ImmutableArray<TaskPackPlanError> errors) + { + Plan = plan; + Errors = errors; + } + + public TaskPackPlan? Plan { get; } + + public ImmutableArray<TaskPackPlanError> Errors { get; } + + public bool Success => Plan is not null && Errors.IsDefaultOrEmpty; +} + +public sealed record TaskPackPlanError(string Path, string Message); diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanHasher.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanHasher.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanHasher.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanHasher.cs index b6705810..c5f5b3e6 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanHasher.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanHasher.cs @@ -1,112 +1,112 @@ -using System.Linq; -using System.Security.Cryptography; -using System.Text; -using System.Text.Json.Nodes; -using StellaOps.TaskRunner.Core.Serialization; - -namespace StellaOps.TaskRunner.Core.Planning; - -internal static class TaskPackPlanHasher -{ - public static string ComputeHash( - TaskPackPlanMetadata metadata, - IReadOnlyDictionary<string, JsonNode?> inputs, - IReadOnlyList<TaskPackPlanStep> steps, - IReadOnlyList<TaskPackPlanApproval> approvals, - IReadOnlyList<TaskPackPlanSecret> secrets, - IReadOnlyList<TaskPackPlanOutput> outputs) - { - var canonical = new CanonicalPlan( - new CanonicalMetadata(metadata.Name, metadata.Version, metadata.Description, metadata.Tags), - inputs.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal), - steps.Select(ToCanonicalStep).ToList(), - approvals - .OrderBy(a => a.Id, StringComparer.Ordinal) - .Select(a => new CanonicalApproval(a.Id, a.Grants.OrderBy(g => g, StringComparer.Ordinal).ToList(), a.ExpiresAfter, a.ReasonTemplate)) - .ToList(), - secrets - .OrderBy(s => s.Name, StringComparer.Ordinal) - .Select(s => new CanonicalSecret(s.Name, s.Scope, s.Description)) - .ToList(), - outputs - .OrderBy(o => o.Name, StringComparer.Ordinal) - .Select(ToCanonicalOutput) - .ToList()); - - var json = CanonicalJson.Serialize(canonical); - using var sha256 = SHA256.Create(); - var hashBytes = sha256.ComputeHash(Encoding.UTF8.GetBytes(json)); - return ConvertToHex(hashBytes); - } - - private static string ConvertToHex(byte[] hashBytes) - { - var builder = new StringBuilder(hashBytes.Length * 2); - foreach (var b in hashBytes) - { - builder.Append(b.ToString("x2", System.Globalization.CultureInfo.InvariantCulture)); - } - - return builder.ToString(); - } - - private static CanonicalPlanStep ToCanonicalStep(TaskPackPlanStep step) - => new( - step.Id, - step.TemplateId, - step.Name, - step.Type, - step.Enabled, - step.Uses, - step.Parameters?.ToDictionary( - kvp => kvp.Key, - kvp => new CanonicalParameter(kvp.Value.Value, kvp.Value.Expression, kvp.Value.Error, kvp.Value.RequiresRuntimeValue), - StringComparer.Ordinal), - step.ApprovalId, - step.GateMessage, - step.Children?.Select(ToCanonicalStep).ToList()); - - private sealed record CanonicalPlan( - CanonicalMetadata Metadata, - IDictionary<string, JsonNode?> Inputs, - IReadOnlyList<CanonicalPlanStep> Steps, - IReadOnlyList<CanonicalApproval> Approvals, - IReadOnlyList<CanonicalSecret> Secrets, - IReadOnlyList<CanonicalOutput> Outputs); - - private sealed record CanonicalMetadata(string Name, string Version, string? Description, IReadOnlyList<string> Tags); - - private sealed record CanonicalPlanStep( - string Id, - string TemplateId, - string? Name, - string Type, - bool Enabled, - string? Uses, - IDictionary<string, CanonicalParameter>? Parameters, - string? ApprovalId, - string? GateMessage, - IReadOnlyList<CanonicalPlanStep>? Children); - - private sealed record CanonicalApproval(string Id, IReadOnlyList<string> Grants, string? ExpiresAfter, string? ReasonTemplate); - - private sealed record CanonicalSecret(string Name, string Scope, string? Description); - - private sealed record CanonicalParameter(JsonNode? Value, string? Expression, string? Error, bool RequiresRuntimeValue); - - private sealed record CanonicalOutput( - string Name, - string Type, - CanonicalParameter? Path, - CanonicalParameter? Expression); - - private static CanonicalOutput ToCanonicalOutput(TaskPackPlanOutput output) - => new( - output.Name, - output.Type, - ToCanonicalParameter(output.Path), - ToCanonicalParameter(output.Expression)); - - private static CanonicalParameter? ToCanonicalParameter(TaskPackPlanParameterValue? value) - => value is null ? null : new CanonicalParameter(value.Value, value.Expression, value.Error, value.RequiresRuntimeValue); -} +using System.Linq; +using System.Security.Cryptography; +using System.Text; +using System.Text.Json.Nodes; +using StellaOps.TaskRunner.Core.Serialization; + +namespace StellaOps.TaskRunner.Core.Planning; + +internal static class TaskPackPlanHasher +{ + public static string ComputeHash( + TaskPackPlanMetadata metadata, + IReadOnlyDictionary<string, JsonNode?> inputs, + IReadOnlyList<TaskPackPlanStep> steps, + IReadOnlyList<TaskPackPlanApproval> approvals, + IReadOnlyList<TaskPackPlanSecret> secrets, + IReadOnlyList<TaskPackPlanOutput> outputs) + { + var canonical = new CanonicalPlan( + new CanonicalMetadata(metadata.Name, metadata.Version, metadata.Description, metadata.Tags), + inputs.ToDictionary(kvp => kvp.Key, kvp => kvp.Value, StringComparer.Ordinal), + steps.Select(ToCanonicalStep).ToList(), + approvals + .OrderBy(a => a.Id, StringComparer.Ordinal) + .Select(a => new CanonicalApproval(a.Id, a.Grants.OrderBy(g => g, StringComparer.Ordinal).ToList(), a.ExpiresAfter, a.ReasonTemplate)) + .ToList(), + secrets + .OrderBy(s => s.Name, StringComparer.Ordinal) + .Select(s => new CanonicalSecret(s.Name, s.Scope, s.Description)) + .ToList(), + outputs + .OrderBy(o => o.Name, StringComparer.Ordinal) + .Select(ToCanonicalOutput) + .ToList()); + + var json = CanonicalJson.Serialize(canonical); + using var sha256 = SHA256.Create(); + var hashBytes = sha256.ComputeHash(Encoding.UTF8.GetBytes(json)); + return ConvertToHex(hashBytes); + } + + private static string ConvertToHex(byte[] hashBytes) + { + var builder = new StringBuilder(hashBytes.Length * 2); + foreach (var b in hashBytes) + { + builder.Append(b.ToString("x2", System.Globalization.CultureInfo.InvariantCulture)); + } + + return builder.ToString(); + } + + private static CanonicalPlanStep ToCanonicalStep(TaskPackPlanStep step) + => new( + step.Id, + step.TemplateId, + step.Name, + step.Type, + step.Enabled, + step.Uses, + step.Parameters?.ToDictionary( + kvp => kvp.Key, + kvp => new CanonicalParameter(kvp.Value.Value, kvp.Value.Expression, kvp.Value.Error, kvp.Value.RequiresRuntimeValue), + StringComparer.Ordinal), + step.ApprovalId, + step.GateMessage, + step.Children?.Select(ToCanonicalStep).ToList()); + + private sealed record CanonicalPlan( + CanonicalMetadata Metadata, + IDictionary<string, JsonNode?> Inputs, + IReadOnlyList<CanonicalPlanStep> Steps, + IReadOnlyList<CanonicalApproval> Approvals, + IReadOnlyList<CanonicalSecret> Secrets, + IReadOnlyList<CanonicalOutput> Outputs); + + private sealed record CanonicalMetadata(string Name, string Version, string? Description, IReadOnlyList<string> Tags); + + private sealed record CanonicalPlanStep( + string Id, + string TemplateId, + string? Name, + string Type, + bool Enabled, + string? Uses, + IDictionary<string, CanonicalParameter>? Parameters, + string? ApprovalId, + string? GateMessage, + IReadOnlyList<CanonicalPlanStep>? Children); + + private sealed record CanonicalApproval(string Id, IReadOnlyList<string> Grants, string? ExpiresAfter, string? ReasonTemplate); + + private sealed record CanonicalSecret(string Name, string Scope, string? Description); + + private sealed record CanonicalParameter(JsonNode? Value, string? Expression, string? Error, bool RequiresRuntimeValue); + + private sealed record CanonicalOutput( + string Name, + string Type, + CanonicalParameter? Path, + CanonicalParameter? Expression); + + private static CanonicalOutput ToCanonicalOutput(TaskPackPlanOutput output) + => new( + output.Name, + output.Type, + ToCanonicalParameter(output.Path), + ToCanonicalParameter(output.Expression)); + + private static CanonicalParameter? ToCanonicalParameter(TaskPackPlanParameterValue? value) + => value is null ? null : new CanonicalParameter(value.Value, value.Expression, value.Error, value.RequiresRuntimeValue); +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanInsights.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanInsights.cs similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanInsights.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanInsights.cs index c96d1dab..faef92c6 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanInsights.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanInsights.cs @@ -1,185 +1,185 @@ -using System; -using System.Collections.Generic; -using System.Linq; - -namespace StellaOps.TaskRunner.Core.Planning; - -public static class TaskPackPlanInsights -{ - public static IReadOnlyList<TaskPackPlanApprovalRequirement> CollectApprovalRequirements(TaskPackPlan plan) - { - ArgumentNullException.ThrowIfNull(plan); - - var approvals = plan.Approvals.ToDictionary(approval => approval.Id, StringComparer.Ordinal); - var builders = new Dictionary<string, ApprovalRequirementBuilder>(StringComparer.Ordinal); - - void Visit(IReadOnlyList<TaskPackPlanStep>? steps) - { - if (steps is null) - { - return; - } - - foreach (var step in steps) - { - if (string.Equals(step.Type, "gate.approval", StringComparison.Ordinal) && !string.IsNullOrEmpty(step.ApprovalId)) - { - if (!builders.TryGetValue(step.ApprovalId, out var builder)) - { - builder = new ApprovalRequirementBuilder(step.ApprovalId); - builders[step.ApprovalId] = builder; - } - - builder.AddStep(step); - } - - Visit(step.Children); - } - } - - Visit(plan.Steps); - - return builders.Values - .Select(builder => builder.Build(approvals)) - .OrderBy(requirement => requirement.ApprovalId, StringComparer.Ordinal) - .ToList(); - } - - public static IReadOnlyList<TaskPackPlanNotificationHint> CollectNotificationHints(TaskPackPlan plan) - { - ArgumentNullException.ThrowIfNull(plan); - - var notifications = new List<TaskPackPlanNotificationHint>(); - - void Visit(IReadOnlyList<TaskPackPlanStep>? steps) - { - if (steps is null) - { - return; - } - - foreach (var step in steps) - { - if (string.Equals(step.Type, "gate.approval", StringComparison.Ordinal)) - { - notifications.Add(new TaskPackPlanNotificationHint(step.Id, "approval-request", step.GateMessage, step.ApprovalId)); - } - else if (string.Equals(step.Type, "gate.policy", StringComparison.Ordinal)) - { - notifications.Add(new TaskPackPlanNotificationHint(step.Id, "policy-gate", step.GateMessage, null)); - } - - Visit(step.Children); - } - } - - Visit(plan.Steps); - return notifications; - } - - public static IReadOnlyList<TaskPackPlanPolicyGateHint> CollectPolicyGateHints(TaskPackPlan plan) - { - ArgumentNullException.ThrowIfNull(plan); - - var hints = new List<TaskPackPlanPolicyGateHint>(); - - void Visit(IReadOnlyList<TaskPackPlanStep>? steps) - { - if (steps is null) - { - return; - } - - foreach (var step in steps) - { - if (string.Equals(step.Type, "gate.policy", StringComparison.Ordinal)) - { - var parameters = step.Parameters? - .OrderBy(kvp => kvp.Key, StringComparer.Ordinal) - .Select(kvp => new TaskPackPlanPolicyParameter( - kvp.Key, - kvp.Value.RequiresRuntimeValue, - kvp.Value.Expression, - kvp.Value.Error)) - .ToList() ?? new List<TaskPackPlanPolicyParameter>(); - - hints.Add(new TaskPackPlanPolicyGateHint(step.Id, step.GateMessage, parameters)); - } - - Visit(step.Children); - } - } - - Visit(plan.Steps); - return hints; - } - - private sealed class ApprovalRequirementBuilder - { - private readonly HashSet<string> stepIds = new(StringComparer.Ordinal); - private readonly List<string> messages = new(); - - public ApprovalRequirementBuilder(string approvalId) - { - ApprovalId = approvalId; - } - - public string ApprovalId { get; } - - public void AddStep(TaskPackPlanStep step) - { - stepIds.Add(step.Id); - if (!string.IsNullOrWhiteSpace(step.GateMessage)) - { - messages.Add(step.GateMessage!); - } - } - - public TaskPackPlanApprovalRequirement Build(IReadOnlyDictionary<string, TaskPackPlanApproval> knownApprovals) - { - knownApprovals.TryGetValue(ApprovalId, out var approval); - - var orderedSteps = stepIds - .OrderBy(id => id, StringComparer.Ordinal) - .ToList(); - - var orderedMessages = messages - .Where(message => !string.IsNullOrWhiteSpace(message)) - .Distinct(StringComparer.Ordinal) - .ToList(); - - return new TaskPackPlanApprovalRequirement( - ApprovalId, - approval?.Grants ?? Array.Empty<string>(), - approval?.ExpiresAfter, - approval?.ReasonTemplate, - orderedSteps, - orderedMessages); - } - } -} - -public sealed record TaskPackPlanApprovalRequirement( - string ApprovalId, - IReadOnlyList<string> Grants, - string? ExpiresAfter, - string? ReasonTemplate, - IReadOnlyList<string> StepIds, - IReadOnlyList<string> Messages); - -public sealed record TaskPackPlanNotificationHint( - string StepId, - string Type, - string? Message, - string? ApprovalId); - -public sealed record TaskPackPlanPolicyGateHint( - string StepId, - string? Message, - IReadOnlyList<TaskPackPlanPolicyParameter> Parameters); - -public sealed record TaskPackPlanPolicyParameter( - string Name, - bool RequiresRuntimeValue, - string? Expression, - string? Error); +using System; +using System.Collections.Generic; +using System.Linq; + +namespace StellaOps.TaskRunner.Core.Planning; + +public static class TaskPackPlanInsights +{ + public static IReadOnlyList<TaskPackPlanApprovalRequirement> CollectApprovalRequirements(TaskPackPlan plan) + { + ArgumentNullException.ThrowIfNull(plan); + + var approvals = plan.Approvals.ToDictionary(approval => approval.Id, StringComparer.Ordinal); + var builders = new Dictionary<string, ApprovalRequirementBuilder>(StringComparer.Ordinal); + + void Visit(IReadOnlyList<TaskPackPlanStep>? steps) + { + if (steps is null) + { + return; + } + + foreach (var step in steps) + { + if (string.Equals(step.Type, "gate.approval", StringComparison.Ordinal) && !string.IsNullOrEmpty(step.ApprovalId)) + { + if (!builders.TryGetValue(step.ApprovalId, out var builder)) + { + builder = new ApprovalRequirementBuilder(step.ApprovalId); + builders[step.ApprovalId] = builder; + } + + builder.AddStep(step); + } + + Visit(step.Children); + } + } + + Visit(plan.Steps); + + return builders.Values + .Select(builder => builder.Build(approvals)) + .OrderBy(requirement => requirement.ApprovalId, StringComparer.Ordinal) + .ToList(); + } + + public static IReadOnlyList<TaskPackPlanNotificationHint> CollectNotificationHints(TaskPackPlan plan) + { + ArgumentNullException.ThrowIfNull(plan); + + var notifications = new List<TaskPackPlanNotificationHint>(); + + void Visit(IReadOnlyList<TaskPackPlanStep>? steps) + { + if (steps is null) + { + return; + } + + foreach (var step in steps) + { + if (string.Equals(step.Type, "gate.approval", StringComparison.Ordinal)) + { + notifications.Add(new TaskPackPlanNotificationHint(step.Id, "approval-request", step.GateMessage, step.ApprovalId)); + } + else if (string.Equals(step.Type, "gate.policy", StringComparison.Ordinal)) + { + notifications.Add(new TaskPackPlanNotificationHint(step.Id, "policy-gate", step.GateMessage, null)); + } + + Visit(step.Children); + } + } + + Visit(plan.Steps); + return notifications; + } + + public static IReadOnlyList<TaskPackPlanPolicyGateHint> CollectPolicyGateHints(TaskPackPlan plan) + { + ArgumentNullException.ThrowIfNull(plan); + + var hints = new List<TaskPackPlanPolicyGateHint>(); + + void Visit(IReadOnlyList<TaskPackPlanStep>? steps) + { + if (steps is null) + { + return; + } + + foreach (var step in steps) + { + if (string.Equals(step.Type, "gate.policy", StringComparison.Ordinal)) + { + var parameters = step.Parameters? + .OrderBy(kvp => kvp.Key, StringComparer.Ordinal) + .Select(kvp => new TaskPackPlanPolicyParameter( + kvp.Key, + kvp.Value.RequiresRuntimeValue, + kvp.Value.Expression, + kvp.Value.Error)) + .ToList() ?? new List<TaskPackPlanPolicyParameter>(); + + hints.Add(new TaskPackPlanPolicyGateHint(step.Id, step.GateMessage, parameters)); + } + + Visit(step.Children); + } + } + + Visit(plan.Steps); + return hints; + } + + private sealed class ApprovalRequirementBuilder + { + private readonly HashSet<string> stepIds = new(StringComparer.Ordinal); + private readonly List<string> messages = new(); + + public ApprovalRequirementBuilder(string approvalId) + { + ApprovalId = approvalId; + } + + public string ApprovalId { get; } + + public void AddStep(TaskPackPlanStep step) + { + stepIds.Add(step.Id); + if (!string.IsNullOrWhiteSpace(step.GateMessage)) + { + messages.Add(step.GateMessage!); + } + } + + public TaskPackPlanApprovalRequirement Build(IReadOnlyDictionary<string, TaskPackPlanApproval> knownApprovals) + { + knownApprovals.TryGetValue(ApprovalId, out var approval); + + var orderedSteps = stepIds + .OrderBy(id => id, StringComparer.Ordinal) + .ToList(); + + var orderedMessages = messages + .Where(message => !string.IsNullOrWhiteSpace(message)) + .Distinct(StringComparer.Ordinal) + .ToList(); + + return new TaskPackPlanApprovalRequirement( + ApprovalId, + approval?.Grants ?? Array.Empty<string>(), + approval?.ExpiresAfter, + approval?.ReasonTemplate, + orderedSteps, + orderedMessages); + } + } +} + +public sealed record TaskPackPlanApprovalRequirement( + string ApprovalId, + IReadOnlyList<string> Grants, + string? ExpiresAfter, + string? ReasonTemplate, + IReadOnlyList<string> StepIds, + IReadOnlyList<string> Messages); + +public sealed record TaskPackPlanNotificationHint( + string StepId, + string Type, + string? Message, + string? ApprovalId); + +public sealed record TaskPackPlanPolicyGateHint( + string StepId, + string? Message, + IReadOnlyList<TaskPackPlanPolicyParameter> Parameters); + +public sealed record TaskPackPlanPolicyParameter( + string Name, + bool RequiresRuntimeValue, + string? Expression, + string? Error); diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanner.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanner.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanner.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanner.cs index eb732d98..80a8adb1 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanner.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Planning/TaskPackPlanner.cs @@ -1,431 +1,431 @@ -using System.Collections.Immutable; -using System.Linq; -using System.Text.Json.Nodes; -using StellaOps.TaskRunner.Core.Expressions; -using StellaOps.TaskRunner.Core.TaskPacks; - -namespace StellaOps.TaskRunner.Core.Planning; - -public sealed class TaskPackPlanner -{ - private readonly TaskPackManifestValidator validator; - - public TaskPackPlanner() - { - validator = new TaskPackManifestValidator(); - } - - public TaskPackPlanResult Plan(TaskPackManifest manifest, IDictionary<string, JsonNode?>? providedInputs = null) - { - ArgumentNullException.ThrowIfNull(manifest); - - var errors = ImmutableArray.CreateBuilder<TaskPackPlanError>(); - - var validation = validator.Validate(manifest); - if (!validation.IsValid) - { - foreach (var error in validation.Errors) - { - errors.Add(new TaskPackPlanError(error.Path, error.Message)); - } - - return new TaskPackPlanResult(null, errors.ToImmutable()); - } - - var effectiveInputs = MaterializeInputs(manifest.Spec.Inputs, providedInputs, errors); - if (errors.Count > 0) - { - return new TaskPackPlanResult(null, errors.ToImmutable()); - } - - var stepTracker = new HashSet<string>(StringComparer.Ordinal); - var secretTracker = new HashSet<string>(StringComparer.Ordinal); - if (manifest.Spec.Secrets is not null) - { - foreach (var secret in manifest.Spec.Secrets) - { - secretTracker.Add(secret.Name); - } - } - - var context = TaskPackExpressionContext.Create(effectiveInputs, stepTracker, secretTracker); - - var planSteps = new List<TaskPackPlanStep>(); - var steps = manifest.Spec.Steps; - for (var i = 0; i < steps.Count; i++) - { - var step = steps[i]; - var planStep = BuildStep(step, context, $"spec.steps[{i}]", errors); - planSteps.Add(planStep); - } - - if (errors.Count > 0) - { - return new TaskPackPlanResult(null, errors.ToImmutable()); - } - - var metadata = new TaskPackPlanMetadata( - manifest.Metadata.Name, - manifest.Metadata.Version, - manifest.Metadata.Description, - manifest.Metadata.Tags?.ToList() ?? new List<string>()); - - var planApprovals = manifest.Spec.Approvals? - .Select(approval => new TaskPackPlanApproval( - approval.Id, - approval.Grants?.ToList() ?? new List<string>(), - approval.ExpiresAfter, - approval.ReasonTemplate)) - .ToList() ?? new List<TaskPackPlanApproval>(); - - var planSecrets = manifest.Spec.Secrets? - .Select(secret => new TaskPackPlanSecret(secret.Name, secret.Scope, secret.Description)) - .ToList() ?? new List<TaskPackPlanSecret>(); - - var planOutputs = MaterializeOutputs(manifest.Spec.Outputs, context, errors); - if (errors.Count > 0) - { - return new TaskPackPlanResult(null, errors.ToImmutable()); - } - - var hash = TaskPackPlanHasher.ComputeHash(metadata, effectiveInputs, planSteps, planApprovals, planSecrets, planOutputs); - - var plan = new TaskPackPlan(metadata, effectiveInputs, planSteps, hash, planApprovals, planSecrets, planOutputs); - return new TaskPackPlanResult(plan, ImmutableArray<TaskPackPlanError>.Empty); - } - - private Dictionary<string, JsonNode?> MaterializeInputs( - IReadOnlyList<TaskPackInput>? definitions, - IDictionary<string, JsonNode?>? providedInputs, - ImmutableArray<TaskPackPlanError>.Builder errors) - { - var effective = new Dictionary<string, JsonNode?>(StringComparer.Ordinal); - - if (definitions is not null) - { - foreach (var input in definitions) - { - if ((providedInputs is not null && providedInputs.TryGetValue(input.Name, out var supplied))) - { - effective[input.Name] = supplied?.DeepClone(); - } - else if (input.Default is not null) - { - effective[input.Name] = input.Default.DeepClone(); - } - else if (input.Required) - { - errors.Add(new TaskPackPlanError($"inputs.{input.Name}", "Input is required but was not supplied.")); - } - } - } - - if (providedInputs is not null) - { - foreach (var kvp in providedInputs) - { - if (!effective.ContainsKey(kvp.Key)) - { - effective[kvp.Key] = kvp.Value?.DeepClone(); - } - } - } - - return effective; - } - - private TaskPackPlanStep BuildStep( - TaskPackStep step, - TaskPackExpressionContext context, - string path, - ImmutableArray<TaskPackPlanError>.Builder errors) - { - if (!TaskPackExpressions.TryEvaluateBoolean(step.When, context, out var enabled, out var whenError)) - { - errors.Add(new TaskPackPlanError($"{path}.when", whenError ?? "Failed to evaluate 'when' expression.")); - enabled = false; - } - - TaskPackPlanStep planStep; - - if (step.Run is not null) - { - planStep = BuildRunStep(step, step.Run, context, path, enabled, errors); - } - else if (step.Gate is not null) - { - planStep = BuildGateStep(step, step.Gate, context, path, enabled, errors); - } - else if (step.Parallel is not null) - { - planStep = BuildParallelStep(step, step.Parallel, context, path, enabled, errors); - } - else if (step.Map is not null) - { - planStep = BuildMapStep(step, step.Map, context, path, enabled, errors); - } - else - { - errors.Add(new TaskPackPlanError(path, "Step did not specify run, gate, parallel, or map.")); - planStep = new TaskPackPlanStep(step.Id, step.Id, step.Name, "invalid", enabled, null, null, ApprovalId: null, GateMessage: null, Children: null); - } - - context.RegisterStep(step.Id); - return planStep; - } - - private TaskPackPlanStep BuildRunStep( - TaskPackStep step, - TaskPackRunStep run, - TaskPackExpressionContext context, - string path, - bool enabled, - ImmutableArray<TaskPackPlanError>.Builder errors) - { - var parameters = ResolveParameters(run.With, context, $"{path}.run", errors); - - return new TaskPackPlanStep( - step.Id, - step.Id, - step.Name, - "run", - enabled, - run.Uses, - parameters, - ApprovalId: null, - GateMessage: null, - Children: null); - } - - private TaskPackPlanStep BuildGateStep( - TaskPackStep step, - TaskPackGateStep gate, - TaskPackExpressionContext context, - string path, - bool enabled, - ImmutableArray<TaskPackPlanError>.Builder errors) - { - string type; - string? approvalId = null; - IReadOnlyDictionary<string, TaskPackPlanParameterValue>? parameters = null; - - if (gate.Approval is not null) - { - type = "gate.approval"; - approvalId = gate.Approval.Id; - } - else if (gate.Policy is not null) - { - type = "gate.policy"; - parameters = ResolveParameters(gate.Policy.Parameters, context, $"{path}.gate.policy", errors); - } - else - { - type = "gate"; - errors.Add(new TaskPackPlanError($"{path}.gate", "Gate must specify approval or policy.")); - } - - return new TaskPackPlanStep( - step.Id, - step.Id, - step.Name, - type, - enabled, - Uses: null, - parameters, - ApprovalId: approvalId, - GateMessage: gate.Message, - Children: null); - } - - private TaskPackPlanStep BuildParallelStep( - TaskPackStep step, - TaskPackParallelStep parallel, - TaskPackExpressionContext context, - string path, - bool enabled, - ImmutableArray<TaskPackPlanError>.Builder errors) - { - var children = new List<TaskPackPlanStep>(); - for (var i = 0; i < parallel.Steps.Count; i++) - { - var child = BuildStep(parallel.Steps[i], context, $"{path}.parallel.steps[{i}]", errors); - children.Add(child); - } - - var parameters = new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal); - if (parallel.MaxParallel.HasValue) - { - parameters["maxParallel"] = new TaskPackPlanParameterValue(JsonValue.Create(parallel.MaxParallel.Value), null, null, false); - } - - parameters["continueOnError"] = new TaskPackPlanParameterValue(JsonValue.Create(parallel.ContinueOnError), null, null, false); - - return new TaskPackPlanStep( - step.Id, - step.Id, - step.Name, - "parallel", - enabled, - Uses: null, - parameters, - ApprovalId: null, - GateMessage: null, - Children: children); - } - - private TaskPackPlanStep BuildMapStep( - TaskPackStep step, - TaskPackMapStep map, - TaskPackExpressionContext context, - string path, - bool enabled, - ImmutableArray<TaskPackPlanError>.Builder errors) - { - var parameters = new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal); - var itemsResolution = TaskPackExpressions.EvaluateString(map.Items, context); - JsonArray? itemsArray = null; - - if (!itemsResolution.Resolved) - { - if (itemsResolution.Error is not null) - { - errors.Add(new TaskPackPlanError($"{path}.map.items", itemsResolution.Error)); - } - else - { - errors.Add(new TaskPackPlanError($"{path}.map.items", "Map items expression requires runtime evaluation. Packs must provide deterministic item lists at plan time.")); - } - } - else if (itemsResolution.Value is JsonArray array) - { - itemsArray = (JsonArray?)array.DeepClone(); - } - else - { - errors.Add(new TaskPackPlanError($"{path}.map.items", "Map items expression must resolve to an array.")); - } - - if (itemsArray is not null) - { - parameters["items"] = new TaskPackPlanParameterValue(itemsArray, null, null, false); - parameters["iterationCount"] = new TaskPackPlanParameterValue(JsonValue.Create(itemsArray.Count), null, null, false); - } - else - { - parameters["items"] = new TaskPackPlanParameterValue(null, map.Items, "Map items expression could not be resolved.", true); - } - - var children = new List<TaskPackPlanStep>(); - if (itemsArray is not null) - { - for (var i = 0; i < itemsArray.Count; i++) - { - var item = itemsArray[i]; - var iterationContext = context.WithItem(item); - var iterationPath = $"{path}.map.step[{i}]"; - var templateStep = BuildStep(map.Step, iterationContext, iterationPath, errors); - - var childId = $"{step.Id}[{i}]::{map.Step.Id}"; - var iterationParameters = templateStep.Parameters is null - ? new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal) - : new Dictionary<string, TaskPackPlanParameterValue>(templateStep.Parameters); - - iterationParameters["item"] = new TaskPackPlanParameterValue(item?.DeepClone(), null, null, false); - - var iterationStep = templateStep with - { - Id = childId, - TemplateId = map.Step.Id, - Parameters = iterationParameters - }; - - children.Add(iterationStep); - } - } - - return new TaskPackPlanStep( - step.Id, - step.Id, - step.Name, - "map", - enabled, - Uses: null, - parameters, - ApprovalId: null, - GateMessage: null, - Children: children); - } - - private IReadOnlyDictionary<string, TaskPackPlanParameterValue>? ResolveParameters( - IDictionary<string, JsonNode?>? rawParameters, - TaskPackExpressionContext context, - string path, - ImmutableArray<TaskPackPlanError>.Builder errors) - { - if (rawParameters is null || rawParameters.Count == 0) - { - return null; - } - - var resolved = new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal); - foreach (var (key, value) in rawParameters) - { - var evaluation = TaskPackExpressions.EvaluateValue(value, context); - if (!evaluation.Resolved && evaluation.Error is not null) - { - errors.Add(new TaskPackPlanError($"{path}.with.{key}", evaluation.Error)); - } - - resolved[key] = TaskPackPlanParameterValue.FromResolution(evaluation); - } - - return resolved; - } - - private IReadOnlyList<TaskPackPlanOutput> MaterializeOutputs( - IReadOnlyList<TaskPackOutput>? outputs, - TaskPackExpressionContext context, - ImmutableArray<TaskPackPlanError>.Builder errors) - { - if (outputs is null || outputs.Count == 0) - { - return Array.Empty<TaskPackPlanOutput>(); - } - - var results = new List<TaskPackPlanOutput>(outputs.Count); - foreach (var (output, index) in outputs.Select((output, index) => (output, index))) - { - var pathValue = ConvertString(output.Path, context, $"spec.outputs[{index}].path", errors); - var expressionValue = ConvertString(output.Expression, context, $"spec.outputs[{index}].expression", errors); - - results.Add(new TaskPackPlanOutput( - output.Name, - output.Type, - pathValue, - expressionValue)); - } - - return results; - } - - private TaskPackPlanParameterValue? ConvertString( - string? value, - TaskPackExpressionContext context, - string path, - ImmutableArray<TaskPackPlanError>.Builder errors) - { - if (string.IsNullOrWhiteSpace(value)) - { - return null; - } - - var resolution = TaskPackExpressions.EvaluateString(value, context); - if (!resolution.Resolved && resolution.Error is not null) - { - errors.Add(new TaskPackPlanError(path, resolution.Error)); - } - - return TaskPackPlanParameterValue.FromResolution(resolution); - } -} +using System.Collections.Immutable; +using System.Linq; +using System.Text.Json.Nodes; +using StellaOps.TaskRunner.Core.Expressions; +using StellaOps.TaskRunner.Core.TaskPacks; + +namespace StellaOps.TaskRunner.Core.Planning; + +public sealed class TaskPackPlanner +{ + private readonly TaskPackManifestValidator validator; + + public TaskPackPlanner() + { + validator = new TaskPackManifestValidator(); + } + + public TaskPackPlanResult Plan(TaskPackManifest manifest, IDictionary<string, JsonNode?>? providedInputs = null) + { + ArgumentNullException.ThrowIfNull(manifest); + + var errors = ImmutableArray.CreateBuilder<TaskPackPlanError>(); + + var validation = validator.Validate(manifest); + if (!validation.IsValid) + { + foreach (var error in validation.Errors) + { + errors.Add(new TaskPackPlanError(error.Path, error.Message)); + } + + return new TaskPackPlanResult(null, errors.ToImmutable()); + } + + var effectiveInputs = MaterializeInputs(manifest.Spec.Inputs, providedInputs, errors); + if (errors.Count > 0) + { + return new TaskPackPlanResult(null, errors.ToImmutable()); + } + + var stepTracker = new HashSet<string>(StringComparer.Ordinal); + var secretTracker = new HashSet<string>(StringComparer.Ordinal); + if (manifest.Spec.Secrets is not null) + { + foreach (var secret in manifest.Spec.Secrets) + { + secretTracker.Add(secret.Name); + } + } + + var context = TaskPackExpressionContext.Create(effectiveInputs, stepTracker, secretTracker); + + var planSteps = new List<TaskPackPlanStep>(); + var steps = manifest.Spec.Steps; + for (var i = 0; i < steps.Count; i++) + { + var step = steps[i]; + var planStep = BuildStep(step, context, $"spec.steps[{i}]", errors); + planSteps.Add(planStep); + } + + if (errors.Count > 0) + { + return new TaskPackPlanResult(null, errors.ToImmutable()); + } + + var metadata = new TaskPackPlanMetadata( + manifest.Metadata.Name, + manifest.Metadata.Version, + manifest.Metadata.Description, + manifest.Metadata.Tags?.ToList() ?? new List<string>()); + + var planApprovals = manifest.Spec.Approvals? + .Select(approval => new TaskPackPlanApproval( + approval.Id, + approval.Grants?.ToList() ?? new List<string>(), + approval.ExpiresAfter, + approval.ReasonTemplate)) + .ToList() ?? new List<TaskPackPlanApproval>(); + + var planSecrets = manifest.Spec.Secrets? + .Select(secret => new TaskPackPlanSecret(secret.Name, secret.Scope, secret.Description)) + .ToList() ?? new List<TaskPackPlanSecret>(); + + var planOutputs = MaterializeOutputs(manifest.Spec.Outputs, context, errors); + if (errors.Count > 0) + { + return new TaskPackPlanResult(null, errors.ToImmutable()); + } + + var hash = TaskPackPlanHasher.ComputeHash(metadata, effectiveInputs, planSteps, planApprovals, planSecrets, planOutputs); + + var plan = new TaskPackPlan(metadata, effectiveInputs, planSteps, hash, planApprovals, planSecrets, planOutputs); + return new TaskPackPlanResult(plan, ImmutableArray<TaskPackPlanError>.Empty); + } + + private Dictionary<string, JsonNode?> MaterializeInputs( + IReadOnlyList<TaskPackInput>? definitions, + IDictionary<string, JsonNode?>? providedInputs, + ImmutableArray<TaskPackPlanError>.Builder errors) + { + var effective = new Dictionary<string, JsonNode?>(StringComparer.Ordinal); + + if (definitions is not null) + { + foreach (var input in definitions) + { + if ((providedInputs is not null && providedInputs.TryGetValue(input.Name, out var supplied))) + { + effective[input.Name] = supplied?.DeepClone(); + } + else if (input.Default is not null) + { + effective[input.Name] = input.Default.DeepClone(); + } + else if (input.Required) + { + errors.Add(new TaskPackPlanError($"inputs.{input.Name}", "Input is required but was not supplied.")); + } + } + } + + if (providedInputs is not null) + { + foreach (var kvp in providedInputs) + { + if (!effective.ContainsKey(kvp.Key)) + { + effective[kvp.Key] = kvp.Value?.DeepClone(); + } + } + } + + return effective; + } + + private TaskPackPlanStep BuildStep( + TaskPackStep step, + TaskPackExpressionContext context, + string path, + ImmutableArray<TaskPackPlanError>.Builder errors) + { + if (!TaskPackExpressions.TryEvaluateBoolean(step.When, context, out var enabled, out var whenError)) + { + errors.Add(new TaskPackPlanError($"{path}.when", whenError ?? "Failed to evaluate 'when' expression.")); + enabled = false; + } + + TaskPackPlanStep planStep; + + if (step.Run is not null) + { + planStep = BuildRunStep(step, step.Run, context, path, enabled, errors); + } + else if (step.Gate is not null) + { + planStep = BuildGateStep(step, step.Gate, context, path, enabled, errors); + } + else if (step.Parallel is not null) + { + planStep = BuildParallelStep(step, step.Parallel, context, path, enabled, errors); + } + else if (step.Map is not null) + { + planStep = BuildMapStep(step, step.Map, context, path, enabled, errors); + } + else + { + errors.Add(new TaskPackPlanError(path, "Step did not specify run, gate, parallel, or map.")); + planStep = new TaskPackPlanStep(step.Id, step.Id, step.Name, "invalid", enabled, null, null, ApprovalId: null, GateMessage: null, Children: null); + } + + context.RegisterStep(step.Id); + return planStep; + } + + private TaskPackPlanStep BuildRunStep( + TaskPackStep step, + TaskPackRunStep run, + TaskPackExpressionContext context, + string path, + bool enabled, + ImmutableArray<TaskPackPlanError>.Builder errors) + { + var parameters = ResolveParameters(run.With, context, $"{path}.run", errors); + + return new TaskPackPlanStep( + step.Id, + step.Id, + step.Name, + "run", + enabled, + run.Uses, + parameters, + ApprovalId: null, + GateMessage: null, + Children: null); + } + + private TaskPackPlanStep BuildGateStep( + TaskPackStep step, + TaskPackGateStep gate, + TaskPackExpressionContext context, + string path, + bool enabled, + ImmutableArray<TaskPackPlanError>.Builder errors) + { + string type; + string? approvalId = null; + IReadOnlyDictionary<string, TaskPackPlanParameterValue>? parameters = null; + + if (gate.Approval is not null) + { + type = "gate.approval"; + approvalId = gate.Approval.Id; + } + else if (gate.Policy is not null) + { + type = "gate.policy"; + parameters = ResolveParameters(gate.Policy.Parameters, context, $"{path}.gate.policy", errors); + } + else + { + type = "gate"; + errors.Add(new TaskPackPlanError($"{path}.gate", "Gate must specify approval or policy.")); + } + + return new TaskPackPlanStep( + step.Id, + step.Id, + step.Name, + type, + enabled, + Uses: null, + parameters, + ApprovalId: approvalId, + GateMessage: gate.Message, + Children: null); + } + + private TaskPackPlanStep BuildParallelStep( + TaskPackStep step, + TaskPackParallelStep parallel, + TaskPackExpressionContext context, + string path, + bool enabled, + ImmutableArray<TaskPackPlanError>.Builder errors) + { + var children = new List<TaskPackPlanStep>(); + for (var i = 0; i < parallel.Steps.Count; i++) + { + var child = BuildStep(parallel.Steps[i], context, $"{path}.parallel.steps[{i}]", errors); + children.Add(child); + } + + var parameters = new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal); + if (parallel.MaxParallel.HasValue) + { + parameters["maxParallel"] = new TaskPackPlanParameterValue(JsonValue.Create(parallel.MaxParallel.Value), null, null, false); + } + + parameters["continueOnError"] = new TaskPackPlanParameterValue(JsonValue.Create(parallel.ContinueOnError), null, null, false); + + return new TaskPackPlanStep( + step.Id, + step.Id, + step.Name, + "parallel", + enabled, + Uses: null, + parameters, + ApprovalId: null, + GateMessage: null, + Children: children); + } + + private TaskPackPlanStep BuildMapStep( + TaskPackStep step, + TaskPackMapStep map, + TaskPackExpressionContext context, + string path, + bool enabled, + ImmutableArray<TaskPackPlanError>.Builder errors) + { + var parameters = new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal); + var itemsResolution = TaskPackExpressions.EvaluateString(map.Items, context); + JsonArray? itemsArray = null; + + if (!itemsResolution.Resolved) + { + if (itemsResolution.Error is not null) + { + errors.Add(new TaskPackPlanError($"{path}.map.items", itemsResolution.Error)); + } + else + { + errors.Add(new TaskPackPlanError($"{path}.map.items", "Map items expression requires runtime evaluation. Packs must provide deterministic item lists at plan time.")); + } + } + else if (itemsResolution.Value is JsonArray array) + { + itemsArray = (JsonArray?)array.DeepClone(); + } + else + { + errors.Add(new TaskPackPlanError($"{path}.map.items", "Map items expression must resolve to an array.")); + } + + if (itemsArray is not null) + { + parameters["items"] = new TaskPackPlanParameterValue(itemsArray, null, null, false); + parameters["iterationCount"] = new TaskPackPlanParameterValue(JsonValue.Create(itemsArray.Count), null, null, false); + } + else + { + parameters["items"] = new TaskPackPlanParameterValue(null, map.Items, "Map items expression could not be resolved.", true); + } + + var children = new List<TaskPackPlanStep>(); + if (itemsArray is not null) + { + for (var i = 0; i < itemsArray.Count; i++) + { + var item = itemsArray[i]; + var iterationContext = context.WithItem(item); + var iterationPath = $"{path}.map.step[{i}]"; + var templateStep = BuildStep(map.Step, iterationContext, iterationPath, errors); + + var childId = $"{step.Id}[{i}]::{map.Step.Id}"; + var iterationParameters = templateStep.Parameters is null + ? new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal) + : new Dictionary<string, TaskPackPlanParameterValue>(templateStep.Parameters); + + iterationParameters["item"] = new TaskPackPlanParameterValue(item?.DeepClone(), null, null, false); + + var iterationStep = templateStep with + { + Id = childId, + TemplateId = map.Step.Id, + Parameters = iterationParameters + }; + + children.Add(iterationStep); + } + } + + return new TaskPackPlanStep( + step.Id, + step.Id, + step.Name, + "map", + enabled, + Uses: null, + parameters, + ApprovalId: null, + GateMessage: null, + Children: children); + } + + private IReadOnlyDictionary<string, TaskPackPlanParameterValue>? ResolveParameters( + IDictionary<string, JsonNode?>? rawParameters, + TaskPackExpressionContext context, + string path, + ImmutableArray<TaskPackPlanError>.Builder errors) + { + if (rawParameters is null || rawParameters.Count == 0) + { + return null; + } + + var resolved = new Dictionary<string, TaskPackPlanParameterValue>(StringComparer.Ordinal); + foreach (var (key, value) in rawParameters) + { + var evaluation = TaskPackExpressions.EvaluateValue(value, context); + if (!evaluation.Resolved && evaluation.Error is not null) + { + errors.Add(new TaskPackPlanError($"{path}.with.{key}", evaluation.Error)); + } + + resolved[key] = TaskPackPlanParameterValue.FromResolution(evaluation); + } + + return resolved; + } + + private IReadOnlyList<TaskPackPlanOutput> MaterializeOutputs( + IReadOnlyList<TaskPackOutput>? outputs, + TaskPackExpressionContext context, + ImmutableArray<TaskPackPlanError>.Builder errors) + { + if (outputs is null || outputs.Count == 0) + { + return Array.Empty<TaskPackPlanOutput>(); + } + + var results = new List<TaskPackPlanOutput>(outputs.Count); + foreach (var (output, index) in outputs.Select((output, index) => (output, index))) + { + var pathValue = ConvertString(output.Path, context, $"spec.outputs[{index}].path", errors); + var expressionValue = ConvertString(output.Expression, context, $"spec.outputs[{index}].expression", errors); + + results.Add(new TaskPackPlanOutput( + output.Name, + output.Type, + pathValue, + expressionValue)); + } + + return results; + } + + private TaskPackPlanParameterValue? ConvertString( + string? value, + TaskPackExpressionContext context, + string path, + ImmutableArray<TaskPackPlanError>.Builder errors) + { + if (string.IsNullOrWhiteSpace(value)) + { + return null; + } + + var resolution = TaskPackExpressions.EvaluateString(value, context); + if (!resolution.Resolved && resolution.Error is not null) + { + errors.Add(new TaskPackPlanError(path, resolution.Error)); + } + + return TaskPackPlanParameterValue.FromResolution(resolution); + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Serialization/CanonicalJson.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Serialization/CanonicalJson.cs similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Serialization/CanonicalJson.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Serialization/CanonicalJson.cs index c4a83129..95743fee 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Serialization/CanonicalJson.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/Serialization/CanonicalJson.cs @@ -1,68 +1,68 @@ -using System.Linq; -using System.Text.Encodings.Web; -using System.Text.Json; -using System.Text.Json.Nodes; - -namespace StellaOps.TaskRunner.Core.Serialization; - -internal static class CanonicalJson -{ - private static readonly JsonSerializerOptions SerializerOptions = new() - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull, - Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, - WriteIndented = false - }; - - public static string Serialize<T>(T value) - { - var node = JsonSerializer.SerializeToNode(value, SerializerOptions); - if (node is null) - { - throw new InvalidOperationException("Unable to serialize value to JSON node."); - } - - var canonical = Canonicalize(node); - return canonical.ToJsonString(SerializerOptions); - } - - public static JsonNode Canonicalize(JsonNode node) - { - return node switch - { - JsonObject obj => CanonicalizeObject(obj), - JsonArray array => CanonicalizeArray(array), - _ => node.DeepClone() - }; - } - - private static JsonObject CanonicalizeObject(JsonObject obj) - { - var canonical = new JsonObject(); - foreach (var property in obj.OrderBy(static p => p.Key, StringComparer.Ordinal)) - { - if (property.Value is null) - { - canonical[property.Key] = null; - } - else - { - canonical[property.Key] = Canonicalize(property.Value); - } - } - - return canonical; - } - - private static JsonArray CanonicalizeArray(JsonArray array) - { - var canonical = new JsonArray(); - foreach (var element in array) - { - canonical.Add(element is null ? null : Canonicalize(element)); - } - - return canonical; - } -} +using System.Linq; +using System.Text.Encodings.Web; +using System.Text.Json; +using System.Text.Json.Nodes; + +namespace StellaOps.TaskRunner.Core.Serialization; + +internal static class CanonicalJson +{ + private static readonly JsonSerializerOptions SerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = System.Text.Json.Serialization.JsonIgnoreCondition.WhenWritingNull, + Encoder = JavaScriptEncoder.UnsafeRelaxedJsonEscaping, + WriteIndented = false + }; + + public static string Serialize<T>(T value) + { + var node = JsonSerializer.SerializeToNode(value, SerializerOptions); + if (node is null) + { + throw new InvalidOperationException("Unable to serialize value to JSON node."); + } + + var canonical = Canonicalize(node); + return canonical.ToJsonString(SerializerOptions); + } + + public static JsonNode Canonicalize(JsonNode node) + { + return node switch + { + JsonObject obj => CanonicalizeObject(obj), + JsonArray array => CanonicalizeArray(array), + _ => node.DeepClone() + }; + } + + private static JsonObject CanonicalizeObject(JsonObject obj) + { + var canonical = new JsonObject(); + foreach (var property in obj.OrderBy(static p => p.Key, StringComparer.Ordinal)) + { + if (property.Value is null) + { + canonical[property.Key] = null; + } + else + { + canonical[property.Key] = Canonicalize(property.Value); + } + } + + return canonical; + } + + private static JsonArray CanonicalizeArray(JsonArray array) + { + var canonical = new JsonArray(); + foreach (var element in array) + { + canonical.Add(element is null ? null : Canonicalize(element)); + } + + return canonical; + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/StellaOps.TaskRunner.Core.csproj b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/StellaOps.TaskRunner.Core.csproj similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/StellaOps.TaskRunner.Core.csproj rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/StellaOps.TaskRunner.Core.csproj index 6bfa27d8..80bf73b0 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/StellaOps.TaskRunner.Core.csproj +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/StellaOps.TaskRunner.Core.csproj @@ -1,22 +1,22 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" /> - <PackageReference Include="YamlDotNet" Version="13.7.1" /> - </ItemGroup> -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" /> + <PackageReference Include="YamlDotNet" Version="13.7.1" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifest.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifest.cs similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifest.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifest.cs index 78743ac5..a6496749 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifest.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifest.cs @@ -1,250 +1,250 @@ -using System.Text.Json.Nodes; -using System.Text.Json.Serialization; - -namespace StellaOps.TaskRunner.Core.TaskPacks; - -public sealed class TaskPackManifest -{ - [JsonPropertyName("apiVersion")] - public required string ApiVersion { get; init; } - - [JsonPropertyName("kind")] - public required string Kind { get; init; } - - [JsonPropertyName("metadata")] - public required TaskPackMetadata Metadata { get; init; } - - [JsonPropertyName("spec")] - public required TaskPackSpec Spec { get; init; } -} - -public sealed class TaskPackMetadata -{ - [JsonPropertyName("name")] - public required string Name { get; init; } - - [JsonPropertyName("version")] - public required string Version { get; init; } - - [JsonPropertyName("description")] - public string? Description { get; init; } - - [JsonPropertyName("tags")] - public IReadOnlyList<string>? Tags { get; init; } - - [JsonPropertyName("tenantVisibility")] - public IReadOnlyList<string>? TenantVisibility { get; init; } - - [JsonPropertyName("maintainers")] - public IReadOnlyList<TaskPackMaintainer>? Maintainers { get; init; } - - [JsonPropertyName("license")] - public string? License { get; init; } - - [JsonPropertyName("annotations")] - public IReadOnlyDictionary<string, string>? Annotations { get; init; } -} - -public sealed class TaskPackMaintainer -{ - [JsonPropertyName("name")] - public required string Name { get; init; } - - [JsonPropertyName("email")] - public string? Email { get; init; } -} - -public sealed class TaskPackSpec -{ - [JsonPropertyName("inputs")] - public IReadOnlyList<TaskPackInput>? Inputs { get; init; } - - [JsonPropertyName("secrets")] - public IReadOnlyList<TaskPackSecret>? Secrets { get; init; } - - [JsonPropertyName("approvals")] - public IReadOnlyList<TaskPackApproval>? Approvals { get; init; } - - [JsonPropertyName("steps")] - public IReadOnlyList<TaskPackStep> Steps { get; init; } = Array.Empty<TaskPackStep>(); - - [JsonPropertyName("outputs")] - public IReadOnlyList<TaskPackOutput>? Outputs { get; init; } - - [JsonPropertyName("success")] - public TaskPackSuccess? Success { get; init; } - - [JsonPropertyName("failure")] - public TaskPackFailure? Failure { get; init; } -} - -public sealed class TaskPackInput -{ - [JsonPropertyName("name")] - public required string Name { get; init; } - - [JsonPropertyName("type")] - public required string Type { get; init; } - - [JsonPropertyName("schema")] - public string? Schema { get; init; } - - [JsonPropertyName("required")] - public bool Required { get; init; } - - [JsonPropertyName("description")] - public string? Description { get; init; } - - [JsonPropertyName("default")] - public JsonNode? Default { get; init; } -} - -public sealed class TaskPackSecret -{ - [JsonPropertyName("name")] - public required string Name { get; init; } - - [JsonPropertyName("scope")] - public required string Scope { get; init; } - - [JsonPropertyName("description")] - public string? Description { get; init; } -} - -public sealed class TaskPackApproval -{ - [JsonPropertyName("id")] - public required string Id { get; init; } - - [JsonPropertyName("grants")] - public IReadOnlyList<string> Grants { get; init; } = Array.Empty<string>(); - - [JsonPropertyName("expiresAfter")] - public string? ExpiresAfter { get; init; } - - [JsonPropertyName("reasonTemplate")] - public string? ReasonTemplate { get; init; } -} - -public sealed class TaskPackStep -{ - [JsonPropertyName("id")] - public required string Id { get; init; } - - [JsonPropertyName("name")] - public string? Name { get; init; } - - [JsonPropertyName("when")] - public string? When { get; init; } - - [JsonPropertyName("run")] - public TaskPackRunStep? Run { get; init; } - - [JsonPropertyName("gate")] - public TaskPackGateStep? Gate { get; init; } - - [JsonPropertyName("parallel")] - public TaskPackParallelStep? Parallel { get; init; } - - [JsonPropertyName("map")] - public TaskPackMapStep? Map { get; init; } -} - -public sealed class TaskPackRunStep -{ - [JsonPropertyName("uses")] - public required string Uses { get; init; } - - [JsonPropertyName("with")] - public IDictionary<string, JsonNode?>? With { get; init; } -} - -public sealed class TaskPackGateStep -{ - [JsonPropertyName("approval")] - public TaskPackApprovalGate? Approval { get; init; } - - [JsonPropertyName("policy")] - public TaskPackPolicyGate? Policy { get; init; } - - [JsonPropertyName("message")] - public string? Message { get; init; } -} - -public sealed class TaskPackApprovalGate -{ - [JsonPropertyName("id")] - public required string Id { get; init; } - - [JsonPropertyName("autoExpireAfter")] - public string? AutoExpireAfter { get; init; } -} - -public sealed class TaskPackPolicyGate -{ - [JsonPropertyName("policy")] - public required string Policy { get; init; } - - [JsonPropertyName("parameters")] - public IDictionary<string, JsonNode?>? Parameters { get; init; } -} - -public sealed class TaskPackParallelStep -{ - [JsonPropertyName("steps")] - public IReadOnlyList<TaskPackStep> Steps { get; init; } = Array.Empty<TaskPackStep>(); - - [JsonPropertyName("maxParallel")] - public int? MaxParallel { get; init; } - - [JsonPropertyName("continueOnError")] - public bool ContinueOnError { get; init; } -} - -public sealed class TaskPackMapStep -{ - [JsonPropertyName("items")] - public required string Items { get; init; } - - [JsonPropertyName("step")] - public required TaskPackStep Step { get; init; } -} - -public sealed class TaskPackOutput -{ - [JsonPropertyName("name")] - public required string Name { get; init; } - - [JsonPropertyName("type")] - public required string Type { get; init; } - - [JsonPropertyName("path")] - public string? Path { get; init; } - - [JsonPropertyName("expression")] - public string? Expression { get; init; } -} - -public sealed class TaskPackSuccess -{ - [JsonPropertyName("message")] - public string? Message { get; init; } -} - -public sealed class TaskPackFailure -{ - [JsonPropertyName("message")] - public string? Message { get; init; } - - [JsonPropertyName("retries")] - public TaskPackRetryPolicy? Retries { get; init; } -} - -public sealed class TaskPackRetryPolicy -{ - [JsonPropertyName("maxAttempts")] - public int MaxAttempts { get; init; } - - [JsonPropertyName("backoffSeconds")] - public int BackoffSeconds { get; init; } -} +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; + +namespace StellaOps.TaskRunner.Core.TaskPacks; + +public sealed class TaskPackManifest +{ + [JsonPropertyName("apiVersion")] + public required string ApiVersion { get; init; } + + [JsonPropertyName("kind")] + public required string Kind { get; init; } + + [JsonPropertyName("metadata")] + public required TaskPackMetadata Metadata { get; init; } + + [JsonPropertyName("spec")] + public required TaskPackSpec Spec { get; init; } +} + +public sealed class TaskPackMetadata +{ + [JsonPropertyName("name")] + public required string Name { get; init; } + + [JsonPropertyName("version")] + public required string Version { get; init; } + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("tags")] + public IReadOnlyList<string>? Tags { get; init; } + + [JsonPropertyName("tenantVisibility")] + public IReadOnlyList<string>? TenantVisibility { get; init; } + + [JsonPropertyName("maintainers")] + public IReadOnlyList<TaskPackMaintainer>? Maintainers { get; init; } + + [JsonPropertyName("license")] + public string? License { get; init; } + + [JsonPropertyName("annotations")] + public IReadOnlyDictionary<string, string>? Annotations { get; init; } +} + +public sealed class TaskPackMaintainer +{ + [JsonPropertyName("name")] + public required string Name { get; init; } + + [JsonPropertyName("email")] + public string? Email { get; init; } +} + +public sealed class TaskPackSpec +{ + [JsonPropertyName("inputs")] + public IReadOnlyList<TaskPackInput>? Inputs { get; init; } + + [JsonPropertyName("secrets")] + public IReadOnlyList<TaskPackSecret>? Secrets { get; init; } + + [JsonPropertyName("approvals")] + public IReadOnlyList<TaskPackApproval>? Approvals { get; init; } + + [JsonPropertyName("steps")] + public IReadOnlyList<TaskPackStep> Steps { get; init; } = Array.Empty<TaskPackStep>(); + + [JsonPropertyName("outputs")] + public IReadOnlyList<TaskPackOutput>? Outputs { get; init; } + + [JsonPropertyName("success")] + public TaskPackSuccess? Success { get; init; } + + [JsonPropertyName("failure")] + public TaskPackFailure? Failure { get; init; } +} + +public sealed class TaskPackInput +{ + [JsonPropertyName("name")] + public required string Name { get; init; } + + [JsonPropertyName("type")] + public required string Type { get; init; } + + [JsonPropertyName("schema")] + public string? Schema { get; init; } + + [JsonPropertyName("required")] + public bool Required { get; init; } + + [JsonPropertyName("description")] + public string? Description { get; init; } + + [JsonPropertyName("default")] + public JsonNode? Default { get; init; } +} + +public sealed class TaskPackSecret +{ + [JsonPropertyName("name")] + public required string Name { get; init; } + + [JsonPropertyName("scope")] + public required string Scope { get; init; } + + [JsonPropertyName("description")] + public string? Description { get; init; } +} + +public sealed class TaskPackApproval +{ + [JsonPropertyName("id")] + public required string Id { get; init; } + + [JsonPropertyName("grants")] + public IReadOnlyList<string> Grants { get; init; } = Array.Empty<string>(); + + [JsonPropertyName("expiresAfter")] + public string? ExpiresAfter { get; init; } + + [JsonPropertyName("reasonTemplate")] + public string? ReasonTemplate { get; init; } +} + +public sealed class TaskPackStep +{ + [JsonPropertyName("id")] + public required string Id { get; init; } + + [JsonPropertyName("name")] + public string? Name { get; init; } + + [JsonPropertyName("when")] + public string? When { get; init; } + + [JsonPropertyName("run")] + public TaskPackRunStep? Run { get; init; } + + [JsonPropertyName("gate")] + public TaskPackGateStep? Gate { get; init; } + + [JsonPropertyName("parallel")] + public TaskPackParallelStep? Parallel { get; init; } + + [JsonPropertyName("map")] + public TaskPackMapStep? Map { get; init; } +} + +public sealed class TaskPackRunStep +{ + [JsonPropertyName("uses")] + public required string Uses { get; init; } + + [JsonPropertyName("with")] + public IDictionary<string, JsonNode?>? With { get; init; } +} + +public sealed class TaskPackGateStep +{ + [JsonPropertyName("approval")] + public TaskPackApprovalGate? Approval { get; init; } + + [JsonPropertyName("policy")] + public TaskPackPolicyGate? Policy { get; init; } + + [JsonPropertyName("message")] + public string? Message { get; init; } +} + +public sealed class TaskPackApprovalGate +{ + [JsonPropertyName("id")] + public required string Id { get; init; } + + [JsonPropertyName("autoExpireAfter")] + public string? AutoExpireAfter { get; init; } +} + +public sealed class TaskPackPolicyGate +{ + [JsonPropertyName("policy")] + public required string Policy { get; init; } + + [JsonPropertyName("parameters")] + public IDictionary<string, JsonNode?>? Parameters { get; init; } +} + +public sealed class TaskPackParallelStep +{ + [JsonPropertyName("steps")] + public IReadOnlyList<TaskPackStep> Steps { get; init; } = Array.Empty<TaskPackStep>(); + + [JsonPropertyName("maxParallel")] + public int? MaxParallel { get; init; } + + [JsonPropertyName("continueOnError")] + public bool ContinueOnError { get; init; } +} + +public sealed class TaskPackMapStep +{ + [JsonPropertyName("items")] + public required string Items { get; init; } + + [JsonPropertyName("step")] + public required TaskPackStep Step { get; init; } +} + +public sealed class TaskPackOutput +{ + [JsonPropertyName("name")] + public required string Name { get; init; } + + [JsonPropertyName("type")] + public required string Type { get; init; } + + [JsonPropertyName("path")] + public string? Path { get; init; } + + [JsonPropertyName("expression")] + public string? Expression { get; init; } +} + +public sealed class TaskPackSuccess +{ + [JsonPropertyName("message")] + public string? Message { get; init; } +} + +public sealed class TaskPackFailure +{ + [JsonPropertyName("message")] + public string? Message { get; init; } + + [JsonPropertyName("retries")] + public TaskPackRetryPolicy? Retries { get; init; } +} + +public sealed class TaskPackRetryPolicy +{ + [JsonPropertyName("maxAttempts")] + public int MaxAttempts { get; init; } + + [JsonPropertyName("backoffSeconds")] + public int BackoffSeconds { get; init; } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestLoader.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestLoader.cs similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestLoader.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestLoader.cs index d93e3d75..0c3490f4 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestLoader.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestLoader.cs @@ -1,168 +1,168 @@ -using System.Collections; -using System.Globalization; -using System.Text; -using System.Text.Json; -using System.Text.Json.Nodes; -using System.Text.Json.Serialization; -using YamlDotNet.Serialization; -using YamlDotNet.Serialization.NamingConventions; - -namespace StellaOps.TaskRunner.Core.TaskPacks; - -public sealed class TaskPackManifestLoader -{ - private static readonly JsonSerializerOptions SerializerOptions = new() - { - PropertyNameCaseInsensitive = true, - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - ReadCommentHandling = JsonCommentHandling.Skip, - AllowTrailingCommas = true, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - public async Task<TaskPackManifest> LoadAsync(Stream stream, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(stream); - - using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, bufferSize: 4096, leaveOpen: true); - var yaml = await reader.ReadToEndAsync().ConfigureAwait(false); - cancellationToken.ThrowIfCancellationRequested(); - - return Deserialize(yaml); - } - - public TaskPackManifest Load(string path) - { - if (string.IsNullOrWhiteSpace(path)) - { - throw new ArgumentException("Path must not be empty.", nameof(path)); - } - - using var stream = File.OpenRead(path); - return LoadAsync(stream).GetAwaiter().GetResult(); - } - - public TaskPackManifest Deserialize(string yaml) - { - if (string.IsNullOrWhiteSpace(yaml)) - { - throw new TaskPackManifestLoadException("Manifest is empty."); - } - - try - { - var deserializer = new DeserializerBuilder() - .WithNamingConvention(CamelCaseNamingConvention.Instance) - .IgnoreUnmatchedProperties() - .Build(); - - using var reader = new StringReader(yaml); - var yamlObject = deserializer.Deserialize(reader); - if (yamlObject is null) - { - throw new TaskPackManifestLoadException("Manifest is empty."); - } - - var node = ConvertToJsonNode(yamlObject); - if (node is null) - { - throw new TaskPackManifestLoadException("Manifest is empty."); - } - - var manifest = node.Deserialize<TaskPackManifest>(SerializerOptions); - if (manifest is null) - { - throw new TaskPackManifestLoadException("Unable to deserialize manifest."); - } - - return manifest; - } - catch (TaskPackManifestLoadException) - { - throw; - } - catch (Exception ex) - { - throw new TaskPackManifestLoadException(string.Format(CultureInfo.InvariantCulture, "Failed to parse manifest: {0}", ex.Message), ex); - } - } - - private static JsonNode? ConvertToJsonNode(object? value) - { - switch (value) - { - case null: - return null; - case string s: - if (bool.TryParse(s, out var boolValue)) - { - return JsonValue.Create(boolValue); - } - - if (long.TryParse(s, NumberStyles.Integer, CultureInfo.InvariantCulture, out var longValue)) - { - return JsonValue.Create(longValue); - } - - if (double.TryParse(s, NumberStyles.Float, CultureInfo.InvariantCulture, out var doubleValue)) - { - return JsonValue.Create(doubleValue); - } - - return JsonValue.Create(s); - case bool b: - return JsonValue.Create(b); - case int i: - return JsonValue.Create(i); - case long l: - return JsonValue.Create(l); - case double d: - return JsonValue.Create(d); - case float f: - return JsonValue.Create(f); - case decimal dec: - return JsonValue.Create(dec); - case IDictionary<object, object> dictionary: - { - var obj = new JsonObject(); - foreach (var kvp in dictionary) - { - var key = Convert.ToString(kvp.Key, CultureInfo.InvariantCulture); - if (string.IsNullOrEmpty(key)) - { - continue; - } - - obj[key] = ConvertToJsonNode(kvp.Value); - } - - return obj; - } - case IEnumerable enumerable: - { - var array = new JsonArray(); - foreach (var item in enumerable) - { - array.Add(ConvertToJsonNode(item)); - } - - return array; - } - default: - return JsonValue.Create(value.ToString()); - } - } -} - -public sealed class TaskPackManifestLoadException : Exception -{ - public TaskPackManifestLoadException(string message) - : base(message) - { - } - - public TaskPackManifestLoadException(string message, Exception innerException) - : base(message, innerException) - { - } -} +using System.Collections; +using System.Globalization; +using System.Text; +using System.Text.Json; +using System.Text.Json.Nodes; +using System.Text.Json.Serialization; +using YamlDotNet.Serialization; +using YamlDotNet.Serialization.NamingConventions; + +namespace StellaOps.TaskRunner.Core.TaskPacks; + +public sealed class TaskPackManifestLoader +{ + private static readonly JsonSerializerOptions SerializerOptions = new() + { + PropertyNameCaseInsensitive = true, + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + ReadCommentHandling = JsonCommentHandling.Skip, + AllowTrailingCommas = true, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + public async Task<TaskPackManifest> LoadAsync(Stream stream, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(stream); + + using var reader = new StreamReader(stream, Encoding.UTF8, detectEncodingFromByteOrderMarks: true, bufferSize: 4096, leaveOpen: true); + var yaml = await reader.ReadToEndAsync().ConfigureAwait(false); + cancellationToken.ThrowIfCancellationRequested(); + + return Deserialize(yaml); + } + + public TaskPackManifest Load(string path) + { + if (string.IsNullOrWhiteSpace(path)) + { + throw new ArgumentException("Path must not be empty.", nameof(path)); + } + + using var stream = File.OpenRead(path); + return LoadAsync(stream).GetAwaiter().GetResult(); + } + + public TaskPackManifest Deserialize(string yaml) + { + if (string.IsNullOrWhiteSpace(yaml)) + { + throw new TaskPackManifestLoadException("Manifest is empty."); + } + + try + { + var deserializer = new DeserializerBuilder() + .WithNamingConvention(CamelCaseNamingConvention.Instance) + .IgnoreUnmatchedProperties() + .Build(); + + using var reader = new StringReader(yaml); + var yamlObject = deserializer.Deserialize(reader); + if (yamlObject is null) + { + throw new TaskPackManifestLoadException("Manifest is empty."); + } + + var node = ConvertToJsonNode(yamlObject); + if (node is null) + { + throw new TaskPackManifestLoadException("Manifest is empty."); + } + + var manifest = node.Deserialize<TaskPackManifest>(SerializerOptions); + if (manifest is null) + { + throw new TaskPackManifestLoadException("Unable to deserialize manifest."); + } + + return manifest; + } + catch (TaskPackManifestLoadException) + { + throw; + } + catch (Exception ex) + { + throw new TaskPackManifestLoadException(string.Format(CultureInfo.InvariantCulture, "Failed to parse manifest: {0}", ex.Message), ex); + } + } + + private static JsonNode? ConvertToJsonNode(object? value) + { + switch (value) + { + case null: + return null; + case string s: + if (bool.TryParse(s, out var boolValue)) + { + return JsonValue.Create(boolValue); + } + + if (long.TryParse(s, NumberStyles.Integer, CultureInfo.InvariantCulture, out var longValue)) + { + return JsonValue.Create(longValue); + } + + if (double.TryParse(s, NumberStyles.Float, CultureInfo.InvariantCulture, out var doubleValue)) + { + return JsonValue.Create(doubleValue); + } + + return JsonValue.Create(s); + case bool b: + return JsonValue.Create(b); + case int i: + return JsonValue.Create(i); + case long l: + return JsonValue.Create(l); + case double d: + return JsonValue.Create(d); + case float f: + return JsonValue.Create(f); + case decimal dec: + return JsonValue.Create(dec); + case IDictionary<object, object> dictionary: + { + var obj = new JsonObject(); + foreach (var kvp in dictionary) + { + var key = Convert.ToString(kvp.Key, CultureInfo.InvariantCulture); + if (string.IsNullOrEmpty(key)) + { + continue; + } + + obj[key] = ConvertToJsonNode(kvp.Value); + } + + return obj; + } + case IEnumerable enumerable: + { + var array = new JsonArray(); + foreach (var item in enumerable) + { + array.Add(ConvertToJsonNode(item)); + } + + return array; + } + default: + return JsonValue.Create(value.ToString()); + } + } +} + +public sealed class TaskPackManifestLoadException : Exception +{ + public TaskPackManifestLoadException(string message) + : base(message) + { + } + + public TaskPackManifestLoadException(string message, Exception innerException) + : base(message, innerException) + { + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestValidator.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestValidator.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestValidator.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestValidator.cs index 7f906546..413149d3 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestValidator.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Core/TaskPacks/TaskPackManifestValidator.cs @@ -1,235 +1,235 @@ -using System.Collections.Immutable; -using System.Text.RegularExpressions; -using System.Linq; - -namespace StellaOps.TaskRunner.Core.TaskPacks; - -public sealed class TaskPackManifestValidator -{ - private static readonly Regex NameRegex = new("^[a-z0-9]([-a-z0-9]*[a-z0-9])?$", RegexOptions.Compiled | RegexOptions.CultureInvariant); - private static readonly Regex VersionRegex = new("^[0-9]+\\.[0-9]+\\.[0-9]+(?:[-+][0-9A-Za-z-.]+)?$", RegexOptions.Compiled | RegexOptions.CultureInvariant); - - public TaskPackManifestValidationResult Validate(TaskPackManifest manifest) - { - ArgumentNullException.ThrowIfNull(manifest); - - var errors = new List<TaskPackManifestValidationError>(); - - if (!string.Equals(manifest.ApiVersion, "stellaops.io/pack.v1", StringComparison.Ordinal)) - { - errors.Add(new TaskPackManifestValidationError("apiVersion", "Only apiVersion 'stellaops.io/pack.v1' is supported.")); - } - - if (!string.Equals(manifest.Kind, "TaskPack", StringComparison.Ordinal)) - { - errors.Add(new TaskPackManifestValidationError("kind", "Kind must be 'TaskPack'.")); - } - - ValidateMetadata(manifest.Metadata, errors); - ValidateSpec(manifest.Spec, errors); - - return new TaskPackManifestValidationResult(errors.ToImmutableArray()); - } - - private static void ValidateMetadata(TaskPackMetadata metadata, ICollection<TaskPackManifestValidationError> errors) - { - if (string.IsNullOrWhiteSpace(metadata.Name)) - { - errors.Add(new TaskPackManifestValidationError("metadata.name", "Name is required.")); - } - else if (!NameRegex.IsMatch(metadata.Name)) - { - errors.Add(new TaskPackManifestValidationError("metadata.name", "Name must follow DNS-1123 naming (lowercase alphanumeric plus '-').")); - } - - if (string.IsNullOrWhiteSpace(metadata.Version)) - { - errors.Add(new TaskPackManifestValidationError("metadata.version", "Version is required.")); - } - else if (!VersionRegex.IsMatch(metadata.Version)) - { - errors.Add(new TaskPackManifestValidationError("metadata.version", "Version must follow SemVer (major.minor.patch[+/-metadata]).")); - } - } - - private static void ValidateSpec(TaskPackSpec spec, ICollection<TaskPackManifestValidationError> errors) - { - if (spec.Steps is null || spec.Steps.Count == 0) - { - errors.Add(new TaskPackManifestValidationError("spec.steps", "At least one step is required.")); - return; - } - - var stepIds = new HashSet<string>(StringComparer.Ordinal); - var approvalIds = new HashSet<string>(StringComparer.Ordinal); - - if (spec.Approvals is not null) - { - foreach (var approval in spec.Approvals) - { - if (!approvalIds.Add(approval.Id)) - { - errors.Add(new TaskPackManifestValidationError($"spec.approvals[{approval.Id}]", "Duplicate approval id.")); - } - } - } - - ValidateInputs(spec, errors); - - ValidateSteps(spec.Steps, "spec.steps", stepIds, approvalIds, errors); - } - - private static void ValidateInputs(TaskPackSpec spec, ICollection<TaskPackManifestValidationError> errors) - { - if (spec.Inputs is null) - { - return; - } - - var seen = new HashSet<string>(StringComparer.Ordinal); - - foreach (var (input, index) in spec.Inputs.Select((input, index) => (input, index))) - { - var prefix = $"spec.inputs[{index}]"; - - if (!seen.Add(input.Name)) - { - errors.Add(new TaskPackManifestValidationError($"{prefix}.name", "Duplicate input name.")); - } - - if (string.IsNullOrWhiteSpace(input.Type)) - { - errors.Add(new TaskPackManifestValidationError($"{prefix}.type", "Input type is required.")); - } - } - } - - private static void ValidateSteps( - IReadOnlyList<TaskPackStep> steps, - string pathPrefix, - HashSet<string> stepIds, - HashSet<string> approvalIds, - ICollection<TaskPackManifestValidationError> errors) - { - foreach (var (step, index) in steps.Select((step, index) => (step, index))) - { - var path = $"{pathPrefix}[{index}]"; - - if (!stepIds.Add(step.Id)) - { - errors.Add(new TaskPackManifestValidationError($"{path}.id", "Duplicate step id.")); - } - - var typeCount = (step.Run is not null ? 1 : 0) - + (step.Gate is not null ? 1 : 0) - + (step.Parallel is not null ? 1 : 0) - + (step.Map is not null ? 1 : 0); - - if (typeCount == 0) - { - errors.Add(new TaskPackManifestValidationError(path, "Step must define one of run, gate, parallel, or map.")); - } - else if (typeCount > 1) - { - errors.Add(new TaskPackManifestValidationError(path, "Step may define only one of run, gate, parallel, or map.")); - } - - if (step.Run is not null) - { - ValidateRunStep(step.Run, $"{path}.run", errors); - } - - if (step.Gate is not null) - { - ValidateGateStep(step.Gate, approvalIds, $"{path}.gate", errors); - } - - if (step.Parallel is not null) - { - ValidateParallelStep(step.Parallel, $"{path}.parallel", stepIds, approvalIds, errors); - } - - if (step.Map is not null) - { - ValidateMapStep(step.Map, $"{path}.map", stepIds, approvalIds, errors); - } - } - } - - private static void ValidateRunStep(TaskPackRunStep run, string path, ICollection<TaskPackManifestValidationError> errors) - { - if (string.IsNullOrWhiteSpace(run.Uses)) - { - errors.Add(new TaskPackManifestValidationError($"{path}.uses", "Run step requires 'uses'.")); - } - } - - private static void ValidateGateStep(TaskPackGateStep gate, HashSet<string> approvalIds, string path, ICollection<TaskPackManifestValidationError> errors) - { - if (gate.Approval is null && gate.Policy is null) - { - errors.Add(new TaskPackManifestValidationError(path, "Gate step requires 'approval' or 'policy'.")); - return; - } - - if (gate.Approval is not null) - { - if (!approvalIds.Contains(gate.Approval.Id)) - { - errors.Add(new TaskPackManifestValidationError($"{path}.approval.id", $"Approval '{gate.Approval.Id}' is not declared under spec.approvals.")); - } - } - } - - private static void ValidateParallelStep( - TaskPackParallelStep parallel, - string path, - HashSet<string> stepIds, - HashSet<string> approvalIds, - ICollection<TaskPackManifestValidationError> errors) - { - if (parallel.Steps.Count == 0) - { - errors.Add(new TaskPackManifestValidationError($"{path}.steps", "Parallel step requires nested steps.")); - return; - } - - ValidateSteps(parallel.Steps, $"{path}.steps", stepIds, approvalIds, errors); - } - - private static void ValidateMapStep( - TaskPackMapStep map, - string path, - HashSet<string> stepIds, - HashSet<string> approvalIds, - ICollection<TaskPackManifestValidationError> errors) - { - if (string.IsNullOrWhiteSpace(map.Items)) - { - errors.Add(new TaskPackManifestValidationError($"{path}.items", "Map step requires 'items' expression.")); - } - - if (map.Step is null) - { - errors.Add(new TaskPackManifestValidationError($"{path}.step", "Map step requires nested step definition.")); - } - else - { - ValidateSteps(new[] { map.Step }, $"{path}.step", stepIds, approvalIds, errors); - } - } -} - -public sealed record TaskPackManifestValidationError(string Path, string Message); - -public sealed class TaskPackManifestValidationResult -{ - public TaskPackManifestValidationResult(ImmutableArray<TaskPackManifestValidationError> errors) - { - Errors = errors; - } - - public ImmutableArray<TaskPackManifestValidationError> Errors { get; } - - public bool IsValid => Errors.IsDefaultOrEmpty; -} +using System.Collections.Immutable; +using System.Text.RegularExpressions; +using System.Linq; + +namespace StellaOps.TaskRunner.Core.TaskPacks; + +public sealed class TaskPackManifestValidator +{ + private static readonly Regex NameRegex = new("^[a-z0-9]([-a-z0-9]*[a-z0-9])?$", RegexOptions.Compiled | RegexOptions.CultureInvariant); + private static readonly Regex VersionRegex = new("^[0-9]+\\.[0-9]+\\.[0-9]+(?:[-+][0-9A-Za-z-.]+)?$", RegexOptions.Compiled | RegexOptions.CultureInvariant); + + public TaskPackManifestValidationResult Validate(TaskPackManifest manifest) + { + ArgumentNullException.ThrowIfNull(manifest); + + var errors = new List<TaskPackManifestValidationError>(); + + if (!string.Equals(manifest.ApiVersion, "stellaops.io/pack.v1", StringComparison.Ordinal)) + { + errors.Add(new TaskPackManifestValidationError("apiVersion", "Only apiVersion 'stellaops.io/pack.v1' is supported.")); + } + + if (!string.Equals(manifest.Kind, "TaskPack", StringComparison.Ordinal)) + { + errors.Add(new TaskPackManifestValidationError("kind", "Kind must be 'TaskPack'.")); + } + + ValidateMetadata(manifest.Metadata, errors); + ValidateSpec(manifest.Spec, errors); + + return new TaskPackManifestValidationResult(errors.ToImmutableArray()); + } + + private static void ValidateMetadata(TaskPackMetadata metadata, ICollection<TaskPackManifestValidationError> errors) + { + if (string.IsNullOrWhiteSpace(metadata.Name)) + { + errors.Add(new TaskPackManifestValidationError("metadata.name", "Name is required.")); + } + else if (!NameRegex.IsMatch(metadata.Name)) + { + errors.Add(new TaskPackManifestValidationError("metadata.name", "Name must follow DNS-1123 naming (lowercase alphanumeric plus '-').")); + } + + if (string.IsNullOrWhiteSpace(metadata.Version)) + { + errors.Add(new TaskPackManifestValidationError("metadata.version", "Version is required.")); + } + else if (!VersionRegex.IsMatch(metadata.Version)) + { + errors.Add(new TaskPackManifestValidationError("metadata.version", "Version must follow SemVer (major.minor.patch[+/-metadata]).")); + } + } + + private static void ValidateSpec(TaskPackSpec spec, ICollection<TaskPackManifestValidationError> errors) + { + if (spec.Steps is null || spec.Steps.Count == 0) + { + errors.Add(new TaskPackManifestValidationError("spec.steps", "At least one step is required.")); + return; + } + + var stepIds = new HashSet<string>(StringComparer.Ordinal); + var approvalIds = new HashSet<string>(StringComparer.Ordinal); + + if (spec.Approvals is not null) + { + foreach (var approval in spec.Approvals) + { + if (!approvalIds.Add(approval.Id)) + { + errors.Add(new TaskPackManifestValidationError($"spec.approvals[{approval.Id}]", "Duplicate approval id.")); + } + } + } + + ValidateInputs(spec, errors); + + ValidateSteps(spec.Steps, "spec.steps", stepIds, approvalIds, errors); + } + + private static void ValidateInputs(TaskPackSpec spec, ICollection<TaskPackManifestValidationError> errors) + { + if (spec.Inputs is null) + { + return; + } + + var seen = new HashSet<string>(StringComparer.Ordinal); + + foreach (var (input, index) in spec.Inputs.Select((input, index) => (input, index))) + { + var prefix = $"spec.inputs[{index}]"; + + if (!seen.Add(input.Name)) + { + errors.Add(new TaskPackManifestValidationError($"{prefix}.name", "Duplicate input name.")); + } + + if (string.IsNullOrWhiteSpace(input.Type)) + { + errors.Add(new TaskPackManifestValidationError($"{prefix}.type", "Input type is required.")); + } + } + } + + private static void ValidateSteps( + IReadOnlyList<TaskPackStep> steps, + string pathPrefix, + HashSet<string> stepIds, + HashSet<string> approvalIds, + ICollection<TaskPackManifestValidationError> errors) + { + foreach (var (step, index) in steps.Select((step, index) => (step, index))) + { + var path = $"{pathPrefix}[{index}]"; + + if (!stepIds.Add(step.Id)) + { + errors.Add(new TaskPackManifestValidationError($"{path}.id", "Duplicate step id.")); + } + + var typeCount = (step.Run is not null ? 1 : 0) + + (step.Gate is not null ? 1 : 0) + + (step.Parallel is not null ? 1 : 0) + + (step.Map is not null ? 1 : 0); + + if (typeCount == 0) + { + errors.Add(new TaskPackManifestValidationError(path, "Step must define one of run, gate, parallel, or map.")); + } + else if (typeCount > 1) + { + errors.Add(new TaskPackManifestValidationError(path, "Step may define only one of run, gate, parallel, or map.")); + } + + if (step.Run is not null) + { + ValidateRunStep(step.Run, $"{path}.run", errors); + } + + if (step.Gate is not null) + { + ValidateGateStep(step.Gate, approvalIds, $"{path}.gate", errors); + } + + if (step.Parallel is not null) + { + ValidateParallelStep(step.Parallel, $"{path}.parallel", stepIds, approvalIds, errors); + } + + if (step.Map is not null) + { + ValidateMapStep(step.Map, $"{path}.map", stepIds, approvalIds, errors); + } + } + } + + private static void ValidateRunStep(TaskPackRunStep run, string path, ICollection<TaskPackManifestValidationError> errors) + { + if (string.IsNullOrWhiteSpace(run.Uses)) + { + errors.Add(new TaskPackManifestValidationError($"{path}.uses", "Run step requires 'uses'.")); + } + } + + private static void ValidateGateStep(TaskPackGateStep gate, HashSet<string> approvalIds, string path, ICollection<TaskPackManifestValidationError> errors) + { + if (gate.Approval is null && gate.Policy is null) + { + errors.Add(new TaskPackManifestValidationError(path, "Gate step requires 'approval' or 'policy'.")); + return; + } + + if (gate.Approval is not null) + { + if (!approvalIds.Contains(gate.Approval.Id)) + { + errors.Add(new TaskPackManifestValidationError($"{path}.approval.id", $"Approval '{gate.Approval.Id}' is not declared under spec.approvals.")); + } + } + } + + private static void ValidateParallelStep( + TaskPackParallelStep parallel, + string path, + HashSet<string> stepIds, + HashSet<string> approvalIds, + ICollection<TaskPackManifestValidationError> errors) + { + if (parallel.Steps.Count == 0) + { + errors.Add(new TaskPackManifestValidationError($"{path}.steps", "Parallel step requires nested steps.")); + return; + } + + ValidateSteps(parallel.Steps, $"{path}.steps", stepIds, approvalIds, errors); + } + + private static void ValidateMapStep( + TaskPackMapStep map, + string path, + HashSet<string> stepIds, + HashSet<string> approvalIds, + ICollection<TaskPackManifestValidationError> errors) + { + if (string.IsNullOrWhiteSpace(map.Items)) + { + errors.Add(new TaskPackManifestValidationError($"{path}.items", "Map step requires 'items' expression.")); + } + + if (map.Step is null) + { + errors.Add(new TaskPackManifestValidationError($"{path}.step", "Map step requires nested step definition.")); + } + else + { + ValidateSteps(new[] { map.Step }, $"{path}.step", stepIds, approvalIds, errors); + } + } +} + +public sealed record TaskPackManifestValidationError(string Path, string Message); + +public sealed class TaskPackManifestValidationResult +{ + public TaskPackManifestValidationResult(ImmutableArray<TaskPackManifestValidationError> errors) + { + Errors = errors; + } + + public ImmutableArray<TaskPackManifestValidationError> Errors { get; } + + public bool IsValid => Errors.IsDefaultOrEmpty; +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilePackRunApprovalStore.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilePackRunApprovalStore.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilePackRunApprovalStore.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilePackRunApprovalStore.cs index 85a7a16a..39a14094 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilePackRunApprovalStore.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilePackRunApprovalStore.cs @@ -1,118 +1,118 @@ -using System.Text.Json; -using System.Text.Json.Nodes; -using StellaOps.TaskRunner.Core.Execution; - -namespace StellaOps.TaskRunner.Infrastructure.Execution; - -public sealed class FilePackRunApprovalStore : IPackRunApprovalStore -{ - private readonly string rootPath; - private readonly JsonSerializerOptions serializerOptions = new(JsonSerializerDefaults.Web) - { - WriteIndented = true - }; - - public FilePackRunApprovalStore(string rootPath) - { - ArgumentException.ThrowIfNullOrWhiteSpace(rootPath); - this.rootPath = rootPath; - Directory.CreateDirectory(rootPath); - } - - public Task SaveAsync(string runId, IReadOnlyList<PackRunApprovalState> approvals, CancellationToken cancellationToken) - { - var path = GetFilePath(runId); - var json = SerializeApprovals(approvals); - File.WriteAllText(path, json); - return Task.CompletedTask; - } - - public Task<IReadOnlyList<PackRunApprovalState>> GetAsync(string runId, CancellationToken cancellationToken) - { - var path = GetFilePath(runId); - if (!File.Exists(path)) - { - return Task.FromResult((IReadOnlyList<PackRunApprovalState>)Array.Empty<PackRunApprovalState>()); - } - - var json = File.ReadAllText(path); - var approvals = DeserializeApprovals(json); - return Task.FromResult((IReadOnlyList<PackRunApprovalState>)approvals); - } - - public async Task UpdateAsync(string runId, PackRunApprovalState approval, CancellationToken cancellationToken) - { - var approvals = (await GetAsync(runId, cancellationToken).ConfigureAwait(false)).ToList(); - var index = approvals.FindIndex(existing => string.Equals(existing.ApprovalId, approval.ApprovalId, StringComparison.Ordinal)); - if (index < 0) - { - throw new InvalidOperationException($"Approval '{approval.ApprovalId}' not found for run '{runId}'."); - } - - approvals[index] = approval; - await SaveAsync(runId, approvals, cancellationToken).ConfigureAwait(false); - } - - private string GetFilePath(string runId) - { - var safeFile = $"{runId}.json"; - return Path.Combine(rootPath, safeFile); - } - - private string SerializeApprovals(IReadOnlyList<PackRunApprovalState> approvals) - { - var array = new JsonArray(); - foreach (var approval in approvals) - { - var node = new JsonObject - { - ["approvalId"] = approval.ApprovalId, - ["status"] = approval.Status.ToString(), - ["requestedAt"] = approval.RequestedAt, - ["actorId"] = approval.ActorId, - ["completedAt"] = approval.CompletedAt, - ["summary"] = approval.Summary, - ["requiredGrants"] = new JsonArray(approval.RequiredGrants.Select(grant => (JsonNode)grant).ToArray()), - ["stepIds"] = new JsonArray(approval.StepIds.Select(step => (JsonNode)step).ToArray()), - ["messages"] = new JsonArray(approval.Messages.Select(message => (JsonNode)message).ToArray()), - ["reasonTemplate"] = approval.ReasonTemplate - }; - - array.Add(node); - } - - return array.ToJsonString(serializerOptions); - } - - private static IReadOnlyList<PackRunApprovalState> DeserializeApprovals(string json) - { - var array = JsonNode.Parse(json)?.AsArray() ?? new JsonArray(); - var list = new List<PackRunApprovalState>(array.Count); - foreach (var entry in array) - { - if (entry is not JsonObject obj) - { - continue; - } - - var requiredGrants = obj["requiredGrants"]?.AsArray()?.Select(node => node!.GetValue<string>()).ToList() ?? new List<string>(); - var stepIds = obj["stepIds"]?.AsArray()?.Select(node => node!.GetValue<string>()).ToList() ?? new List<string>(); - var messages = obj["messages"]?.AsArray()?.Select(node => node!.GetValue<string>()).ToList() ?? new List<string>(); - Enum.TryParse(obj["status"]?.GetValue<string>(), ignoreCase: true, out PackRunApprovalStatus status); - - list.Add(new PackRunApprovalState( - obj["approvalId"]?.GetValue<string>() ?? string.Empty, - requiredGrants, - stepIds, - messages, - obj["reasonTemplate"]?.GetValue<string>(), - obj["requestedAt"]?.GetValue<DateTimeOffset>() ?? DateTimeOffset.UtcNow, - status, - obj["actorId"]?.GetValue<string>(), - obj["completedAt"]?.GetValue<DateTimeOffset?>(), - obj["summary"]?.GetValue<string>())); - } - - return list; - } -} +using System.Text.Json; +using System.Text.Json.Nodes; +using StellaOps.TaskRunner.Core.Execution; + +namespace StellaOps.TaskRunner.Infrastructure.Execution; + +public sealed class FilePackRunApprovalStore : IPackRunApprovalStore +{ + private readonly string rootPath; + private readonly JsonSerializerOptions serializerOptions = new(JsonSerializerDefaults.Web) + { + WriteIndented = true + }; + + public FilePackRunApprovalStore(string rootPath) + { + ArgumentException.ThrowIfNullOrWhiteSpace(rootPath); + this.rootPath = rootPath; + Directory.CreateDirectory(rootPath); + } + + public Task SaveAsync(string runId, IReadOnlyList<PackRunApprovalState> approvals, CancellationToken cancellationToken) + { + var path = GetFilePath(runId); + var json = SerializeApprovals(approvals); + File.WriteAllText(path, json); + return Task.CompletedTask; + } + + public Task<IReadOnlyList<PackRunApprovalState>> GetAsync(string runId, CancellationToken cancellationToken) + { + var path = GetFilePath(runId); + if (!File.Exists(path)) + { + return Task.FromResult((IReadOnlyList<PackRunApprovalState>)Array.Empty<PackRunApprovalState>()); + } + + var json = File.ReadAllText(path); + var approvals = DeserializeApprovals(json); + return Task.FromResult((IReadOnlyList<PackRunApprovalState>)approvals); + } + + public async Task UpdateAsync(string runId, PackRunApprovalState approval, CancellationToken cancellationToken) + { + var approvals = (await GetAsync(runId, cancellationToken).ConfigureAwait(false)).ToList(); + var index = approvals.FindIndex(existing => string.Equals(existing.ApprovalId, approval.ApprovalId, StringComparison.Ordinal)); + if (index < 0) + { + throw new InvalidOperationException($"Approval '{approval.ApprovalId}' not found for run '{runId}'."); + } + + approvals[index] = approval; + await SaveAsync(runId, approvals, cancellationToken).ConfigureAwait(false); + } + + private string GetFilePath(string runId) + { + var safeFile = $"{runId}.json"; + return Path.Combine(rootPath, safeFile); + } + + private string SerializeApprovals(IReadOnlyList<PackRunApprovalState> approvals) + { + var array = new JsonArray(); + foreach (var approval in approvals) + { + var node = new JsonObject + { + ["approvalId"] = approval.ApprovalId, + ["status"] = approval.Status.ToString(), + ["requestedAt"] = approval.RequestedAt, + ["actorId"] = approval.ActorId, + ["completedAt"] = approval.CompletedAt, + ["summary"] = approval.Summary, + ["requiredGrants"] = new JsonArray(approval.RequiredGrants.Select(grant => (JsonNode)grant).ToArray()), + ["stepIds"] = new JsonArray(approval.StepIds.Select(step => (JsonNode)step).ToArray()), + ["messages"] = new JsonArray(approval.Messages.Select(message => (JsonNode)message).ToArray()), + ["reasonTemplate"] = approval.ReasonTemplate + }; + + array.Add(node); + } + + return array.ToJsonString(serializerOptions); + } + + private static IReadOnlyList<PackRunApprovalState> DeserializeApprovals(string json) + { + var array = JsonNode.Parse(json)?.AsArray() ?? new JsonArray(); + var list = new List<PackRunApprovalState>(array.Count); + foreach (var entry in array) + { + if (entry is not JsonObject obj) + { + continue; + } + + var requiredGrants = obj["requiredGrants"]?.AsArray()?.Select(node => node!.GetValue<string>()).ToList() ?? new List<string>(); + var stepIds = obj["stepIds"]?.AsArray()?.Select(node => node!.GetValue<string>()).ToList() ?? new List<string>(); + var messages = obj["messages"]?.AsArray()?.Select(node => node!.GetValue<string>()).ToList() ?? new List<string>(); + Enum.TryParse(obj["status"]?.GetValue<string>(), ignoreCase: true, out PackRunApprovalStatus status); + + list.Add(new PackRunApprovalState( + obj["approvalId"]?.GetValue<string>() ?? string.Empty, + requiredGrants, + stepIds, + messages, + obj["reasonTemplate"]?.GetValue<string>(), + obj["requestedAt"]?.GetValue<DateTimeOffset>() ?? DateTimeOffset.UtcNow, + status, + obj["actorId"]?.GetValue<string>(), + obj["completedAt"]?.GetValue<DateTimeOffset?>(), + obj["summary"]?.GetValue<string>())); + } + + return list; + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilesystemPackRunDispatcher.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilesystemPackRunDispatcher.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilesystemPackRunDispatcher.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilesystemPackRunDispatcher.cs index 624d4fad..ec242226 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilesystemPackRunDispatcher.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/FilesystemPackRunDispatcher.cs @@ -1,92 +1,92 @@ -using System.Text.Json; -using System.Text.Json.Nodes; -using StellaOps.TaskRunner.Core.Execution; -using StellaOps.TaskRunner.Core.Planning; -using StellaOps.TaskRunner.Core.TaskPacks; - -namespace StellaOps.TaskRunner.Infrastructure.Execution; - -public sealed class FilesystemPackRunDispatcher : IPackRunJobDispatcher -{ - private readonly string queuePath; - private readonly string archivePath; - private readonly TaskPackManifestLoader manifestLoader = new(); - private readonly TaskPackPlanner planner = new(); - private readonly JsonSerializerOptions serializerOptions = new(JsonSerializerDefaults.Web); - - public FilesystemPackRunDispatcher(string queuePath, string archivePath) - { - this.queuePath = queuePath ?? throw new ArgumentNullException(nameof(queuePath)); - this.archivePath = archivePath ?? throw new ArgumentNullException(nameof(archivePath)); - Directory.CreateDirectory(queuePath); - Directory.CreateDirectory(archivePath); - } - - public async Task<PackRunExecutionContext?> TryDequeueAsync(CancellationToken cancellationToken) - { - var files = Directory.GetFiles(queuePath, "*.json", SearchOption.TopDirectoryOnly) - .OrderBy(path => path, StringComparer.Ordinal) - .ToArray(); - - foreach (var file in files) - { - cancellationToken.ThrowIfCancellationRequested(); - - try - { - var jobJson = await File.ReadAllTextAsync(file, cancellationToken).ConfigureAwait(false); - var job = JsonSerializer.Deserialize<JobEnvelope>(jobJson, serializerOptions) ?? continue; - - var manifestPath = ResolvePath(queuePath, job.ManifestPath); - var inputsPath = job.InputsPath is null ? null : ResolvePath(queuePath, job.InputsPath); - - var manifest = manifestLoader.Load(manifestPath); - var inputs = await LoadInputsAsync(inputsPath, cancellationToken).ConfigureAwait(false); - var planResult = planner.Plan(manifest, inputs); - if (!planResult.Success || planResult.Plan is null) - { - throw new InvalidOperationException($"Failed to plan pack for run {job.RunId}: {string.Join(';', planResult.Errors.Select(e => e.Message))}"); - } - - var archiveFile = Path.Combine(archivePath, Path.GetFileName(file)); - File.Move(file, archiveFile, overwrite: true); - - var requestedAt = job.RequestedAt ?? DateTimeOffset.UtcNow; - return new PackRunExecutionContext(job.RunId ?? Guid.NewGuid().ToString("n"), planResult.Plan, requestedAt); - } - catch (Exception ex) - { - var failedPath = file + ".failed"; - File.Move(file, failedPath, overwrite: true); - Console.Error.WriteLine($"Failed to dequeue job '{file}': {ex.Message}"); - } - } - - return null; - } - - private static string ResolvePath(string root, string relative) - => Path.IsPathRooted(relative) ? relative : Path.Combine(root, relative); - - private static async Task<IDictionary<string, JsonNode?>> LoadInputsAsync(string? path, CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(path) || !File.Exists(path)) - { - return new Dictionary<string, JsonNode?>(StringComparer.Ordinal); - } - - var json = await File.ReadAllTextAsync(path, cancellationToken).ConfigureAwait(false); - var node = JsonNode.Parse(json) as JsonObject; - if (node is null) - { - return new Dictionary<string, JsonNode?>(StringComparer.Ordinal); - } - - return node.ToDictionary( - pair => pair.Key, - pair => pair.Value, - StringComparer.Ordinal); - } - - private sealed record JobEnvelope(string? RunId, string ManifestPath, string? InputsPath, DateTimeOffset? RequestedAt); -} +using System.Text.Json; +using System.Text.Json.Nodes; +using StellaOps.TaskRunner.Core.Execution; +using StellaOps.TaskRunner.Core.Planning; +using StellaOps.TaskRunner.Core.TaskPacks; + +namespace StellaOps.TaskRunner.Infrastructure.Execution; + +public sealed class FilesystemPackRunDispatcher : IPackRunJobDispatcher +{ + private readonly string queuePath; + private readonly string archivePath; + private readonly TaskPackManifestLoader manifestLoader = new(); + private readonly TaskPackPlanner planner = new(); + private readonly JsonSerializerOptions serializerOptions = new(JsonSerializerDefaults.Web); + + public FilesystemPackRunDispatcher(string queuePath, string archivePath) + { + this.queuePath = queuePath ?? throw new ArgumentNullException(nameof(queuePath)); + this.archivePath = archivePath ?? throw new ArgumentNullException(nameof(archivePath)); + Directory.CreateDirectory(queuePath); + Directory.CreateDirectory(archivePath); + } + + public async Task<PackRunExecutionContext?> TryDequeueAsync(CancellationToken cancellationToken) + { + var files = Directory.GetFiles(queuePath, "*.json", SearchOption.TopDirectoryOnly) + .OrderBy(path => path, StringComparer.Ordinal) + .ToArray(); + + foreach (var file in files) + { + cancellationToken.ThrowIfCancellationRequested(); + + try + { + var jobJson = await File.ReadAllTextAsync(file, cancellationToken).ConfigureAwait(false); + var job = JsonSerializer.Deserialize<JobEnvelope>(jobJson, serializerOptions) ?? continue; + + var manifestPath = ResolvePath(queuePath, job.ManifestPath); + var inputsPath = job.InputsPath is null ? null : ResolvePath(queuePath, job.InputsPath); + + var manifest = manifestLoader.Load(manifestPath); + var inputs = await LoadInputsAsync(inputsPath, cancellationToken).ConfigureAwait(false); + var planResult = planner.Plan(manifest, inputs); + if (!planResult.Success || planResult.Plan is null) + { + throw new InvalidOperationException($"Failed to plan pack for run {job.RunId}: {string.Join(';', planResult.Errors.Select(e => e.Message))}"); + } + + var archiveFile = Path.Combine(archivePath, Path.GetFileName(file)); + File.Move(file, archiveFile, overwrite: true); + + var requestedAt = job.RequestedAt ?? DateTimeOffset.UtcNow; + return new PackRunExecutionContext(job.RunId ?? Guid.NewGuid().ToString("n"), planResult.Plan, requestedAt); + } + catch (Exception ex) + { + var failedPath = file + ".failed"; + File.Move(file, failedPath, overwrite: true); + Console.Error.WriteLine($"Failed to dequeue job '{file}': {ex.Message}"); + } + } + + return null; + } + + private static string ResolvePath(string root, string relative) + => Path.IsPathRooted(relative) ? relative : Path.Combine(root, relative); + + private static async Task<IDictionary<string, JsonNode?>> LoadInputsAsync(string? path, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(path) || !File.Exists(path)) + { + return new Dictionary<string, JsonNode?>(StringComparer.Ordinal); + } + + var json = await File.ReadAllTextAsync(path, cancellationToken).ConfigureAwait(false); + var node = JsonNode.Parse(json) as JsonObject; + if (node is null) + { + return new Dictionary<string, JsonNode?>(StringComparer.Ordinal); + } + + return node.ToDictionary( + pair => pair.Key, + pair => pair.Value, + StringComparer.Ordinal); + } + + private sealed record JobEnvelope(string? RunId, string ManifestPath, string? InputsPath, DateTimeOffset? RequestedAt); +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/HttpPackRunNotificationPublisher.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/HttpPackRunNotificationPublisher.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/HttpPackRunNotificationPublisher.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/HttpPackRunNotificationPublisher.cs index 428a9c2e..27e980a8 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/HttpPackRunNotificationPublisher.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/HttpPackRunNotificationPublisher.cs @@ -1,73 +1,73 @@ -using System.Net.Http.Json; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.TaskRunner.Core.Execution; - -namespace StellaOps.TaskRunner.Infrastructure.Execution; - -public sealed class HttpPackRunNotificationPublisher : IPackRunNotificationPublisher -{ - private readonly IHttpClientFactory httpClientFactory; - private readonly NotificationOptions options; - private readonly ILogger<HttpPackRunNotificationPublisher> logger; - - public HttpPackRunNotificationPublisher( - IHttpClientFactory httpClientFactory, - IOptions<NotificationOptions> options, - ILogger<HttpPackRunNotificationPublisher> logger) - { - this.httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); - this.options = options?.Value ?? throw new ArgumentNullException(nameof(options)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task PublishApprovalRequestedAsync(string runId, ApprovalNotification notification, CancellationToken cancellationToken) - { - if (options.ApprovalEndpoint is null) - { - logger.LogWarning("Approval endpoint not configured; skipping approval notification for run {RunId}.", runId); - return; - } - - var client = httpClientFactory.CreateClient("taskrunner-notifications"); - var payload = new - { - runId, - notification.ApprovalId, - notification.RequiredGrants, - notification.Messages, - notification.StepIds, - notification.ReasonTemplate - }; - - var response = await client.PostAsJsonAsync(options.ApprovalEndpoint, payload, cancellationToken).ConfigureAwait(false); - response.EnsureSuccessStatusCode(); - } - - public async Task PublishPolicyGatePendingAsync(string runId, PolicyGateNotification notification, CancellationToken cancellationToken) - { - if (options.PolicyEndpoint is null) - { - logger.LogDebug("Policy endpoint not configured; skipping policy notification for run {RunId} step {StepId}.", runId, notification.StepId); - return; - } - - var client = httpClientFactory.CreateClient("taskrunner-notifications"); - var payload = new - { - runId, - notification.StepId, - notification.Message, - Parameters = notification.Parameters.Select(parameter => new - { - parameter.Name, - parameter.RequiresRuntimeValue, - parameter.Expression, - parameter.Error - }) - }; - - var response = await client.PostAsJsonAsync(options.PolicyEndpoint, payload, cancellationToken).ConfigureAwait(false); - response.EnsureSuccessStatusCode(); - } -} +using System.Net.Http.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.TaskRunner.Core.Execution; + +namespace StellaOps.TaskRunner.Infrastructure.Execution; + +public sealed class HttpPackRunNotificationPublisher : IPackRunNotificationPublisher +{ + private readonly IHttpClientFactory httpClientFactory; + private readonly NotificationOptions options; + private readonly ILogger<HttpPackRunNotificationPublisher> logger; + + public HttpPackRunNotificationPublisher( + IHttpClientFactory httpClientFactory, + IOptions<NotificationOptions> options, + ILogger<HttpPackRunNotificationPublisher> logger) + { + this.httpClientFactory = httpClientFactory ?? throw new ArgumentNullException(nameof(httpClientFactory)); + this.options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task PublishApprovalRequestedAsync(string runId, ApprovalNotification notification, CancellationToken cancellationToken) + { + if (options.ApprovalEndpoint is null) + { + logger.LogWarning("Approval endpoint not configured; skipping approval notification for run {RunId}.", runId); + return; + } + + var client = httpClientFactory.CreateClient("taskrunner-notifications"); + var payload = new + { + runId, + notification.ApprovalId, + notification.RequiredGrants, + notification.Messages, + notification.StepIds, + notification.ReasonTemplate + }; + + var response = await client.PostAsJsonAsync(options.ApprovalEndpoint, payload, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + } + + public async Task PublishPolicyGatePendingAsync(string runId, PolicyGateNotification notification, CancellationToken cancellationToken) + { + if (options.PolicyEndpoint is null) + { + logger.LogDebug("Policy endpoint not configured; skipping policy notification for run {RunId} step {StepId}.", runId, notification.StepId); + return; + } + + var client = httpClientFactory.CreateClient("taskrunner-notifications"); + var payload = new + { + runId, + notification.StepId, + notification.Message, + Parameters = notification.Parameters.Select(parameter => new + { + parameter.Name, + parameter.RequiresRuntimeValue, + parameter.Expression, + parameter.Error + }) + }; + + var response = await client.PostAsJsonAsync(options.PolicyEndpoint, payload, cancellationToken).ConfigureAwait(false); + response.EnsureSuccessStatusCode(); + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/LoggingPackRunNotificationPublisher.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/LoggingPackRunNotificationPublisher.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/LoggingPackRunNotificationPublisher.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/LoggingPackRunNotificationPublisher.cs index 9026aca3..83c7adef 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/LoggingPackRunNotificationPublisher.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/LoggingPackRunNotificationPublisher.cs @@ -1,34 +1,34 @@ -using Microsoft.Extensions.Logging; -using StellaOps.TaskRunner.Core.Execution; - -namespace StellaOps.TaskRunner.Infrastructure.Execution; - -public sealed class LoggingPackRunNotificationPublisher : IPackRunNotificationPublisher -{ - private readonly ILogger<LoggingPackRunNotificationPublisher> logger; - - public LoggingPackRunNotificationPublisher(ILogger<LoggingPackRunNotificationPublisher> logger) - { - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public Task PublishApprovalRequestedAsync(string runId, ApprovalNotification notification, CancellationToken cancellationToken) - { - logger.LogInformation( - "Run {RunId}: approval {ApprovalId} requires grants {Grants}.", - runId, - notification.ApprovalId, - string.Join(",", notification.RequiredGrants)); - return Task.CompletedTask; - } - - public Task PublishPolicyGatePendingAsync(string runId, PolicyGateNotification notification, CancellationToken cancellationToken) - { - logger.LogDebug( - "Run {RunId}: policy gate {StepId} pending (parameters: {Parameters}).", - runId, - notification.StepId, - string.Join(",", notification.Parameters.Select(p => p.Name))); - return Task.CompletedTask; - } -} +using Microsoft.Extensions.Logging; +using StellaOps.TaskRunner.Core.Execution; + +namespace StellaOps.TaskRunner.Infrastructure.Execution; + +public sealed class LoggingPackRunNotificationPublisher : IPackRunNotificationPublisher +{ + private readonly ILogger<LoggingPackRunNotificationPublisher> logger; + + public LoggingPackRunNotificationPublisher(ILogger<LoggingPackRunNotificationPublisher> logger) + { + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public Task PublishApprovalRequestedAsync(string runId, ApprovalNotification notification, CancellationToken cancellationToken) + { + logger.LogInformation( + "Run {RunId}: approval {ApprovalId} requires grants {Grants}.", + runId, + notification.ApprovalId, + string.Join(",", notification.RequiredGrants)); + return Task.CompletedTask; + } + + public Task PublishPolicyGatePendingAsync(string runId, PolicyGateNotification notification, CancellationToken cancellationToken) + { + logger.LogDebug( + "Run {RunId}: policy gate {StepId} pending (parameters: {Parameters}).", + runId, + notification.StepId, + string.Join(",", notification.Parameters.Select(p => p.Name))); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NoopPackRunJobDispatcher.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NoopPackRunJobDispatcher.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NoopPackRunJobDispatcher.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NoopPackRunJobDispatcher.cs index 54ecdcc8..d523f273 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NoopPackRunJobDispatcher.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NoopPackRunJobDispatcher.cs @@ -1,9 +1,9 @@ -using StellaOps.TaskRunner.Core.Execution; - -namespace StellaOps.TaskRunner.Infrastructure.Execution; - -public sealed class NoopPackRunJobDispatcher : IPackRunJobDispatcher -{ - public Task<PackRunExecutionContext?> TryDequeueAsync(CancellationToken cancellationToken) - => Task.FromResult<PackRunExecutionContext?>(null); -} +using StellaOps.TaskRunner.Core.Execution; + +namespace StellaOps.TaskRunner.Infrastructure.Execution; + +public sealed class NoopPackRunJobDispatcher : IPackRunJobDispatcher +{ + public Task<PackRunExecutionContext?> TryDequeueAsync(CancellationToken cancellationToken) + => Task.FromResult<PackRunExecutionContext?>(null); +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NotificationOptions.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NotificationOptions.cs similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NotificationOptions.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NotificationOptions.cs index 4bd2c927..73e00676 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NotificationOptions.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/Execution/NotificationOptions.cs @@ -1,8 +1,8 @@ -namespace StellaOps.TaskRunner.Infrastructure.Execution; - -public sealed class NotificationOptions -{ - public Uri? ApprovalEndpoint { get; set; } - - public Uri? PolicyEndpoint { get; set; } -} +namespace StellaOps.TaskRunner.Infrastructure.Execution; + +public sealed class NotificationOptions +{ + public Uri? ApprovalEndpoint { get; set; } + + public Uri? PolicyEndpoint { get; set; } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/StellaOps.TaskRunner.Infrastructure.csproj b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/StellaOps.TaskRunner.Infrastructure.csproj similarity index 95% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/StellaOps.TaskRunner.Infrastructure.csproj rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/StellaOps.TaskRunner.Infrastructure.csproj index b5d90867..756620dd 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/StellaOps.TaskRunner.Infrastructure.csproj +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Infrastructure/StellaOps.TaskRunner.Infrastructure.csproj @@ -1,25 +1,25 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" /> - <ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj" /> - </ItemGroup> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="9.0.0" /> + <ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj" /> + </ItemGroup> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunApprovalCoordinatorTests.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunApprovalCoordinatorTests.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunApprovalCoordinatorTests.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunApprovalCoordinatorTests.cs index d0f38d8d..dcb95ff8 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunApprovalCoordinatorTests.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunApprovalCoordinatorTests.cs @@ -1,95 +1,95 @@ -using System.Text.Json.Nodes; -using StellaOps.TaskRunner.Core.Execution; -using StellaOps.TaskRunner.Core.Planning; - -namespace StellaOps.TaskRunner.Tests; - -public sealed class PackRunApprovalCoordinatorTests -{ - [Fact] - public void Create_FromPlan_PopulatesApprovals() - { - var plan = BuildPlan(); - var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); - - var approvals = coordinator.GetApprovals(); - Assert.Single(approvals); - Assert.Equal("security-review", approvals[0].ApprovalId); - Assert.Equal(PackRunApprovalStatus.Pending, approvals[0].Status); - } - - [Fact] - public void Approve_AllowsResumeWhenLastApprovalCompletes() - { - var plan = BuildPlan(); - var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); - - var result = coordinator.Approve("security-review", "approver-1", DateTimeOffset.UtcNow); - - Assert.True(result.ShouldResumeRun); - Assert.Equal(PackRunApprovalStatus.Approved, result.State.Status); - Assert.Equal("approver-1", result.State.ActorId); - } - - [Fact] - public void Reject_DoesNotResumeAndMarksState() - { - var plan = BuildPlan(); - var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); - - var result = coordinator.Reject("security-review", "approver-1", DateTimeOffset.UtcNow, "Not safe"); - - Assert.False(result.ShouldResumeRun); - Assert.Equal(PackRunApprovalStatus.Rejected, result.State.Status); - Assert.Equal("Not safe", result.State.Summary); - } - - [Fact] - public void BuildNotifications_UsesRequirements() - { - var plan = BuildPlan(); - var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); - - var notifications = coordinator.BuildNotifications(plan); - Assert.Single(notifications); - var notification = notifications[0]; - Assert.Equal("security-review", notification.ApprovalId); - Assert.Contains("Packs.Approve", notification.RequiredGrants); - } - - [Fact] - public void BuildPolicyNotifications_ProducesGateMetadata() - { - var plan = BuildPolicyPlan(); - var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); - - var notifications = coordinator.BuildPolicyNotifications(plan); - Assert.Single(notifications); - var hint = notifications[0]; - Assert.Equal("policy-check", hint.StepId); - var parameter = hint.Parameters.Single(p => p.Name == "threshold"); - Assert.False(parameter.RequiresRuntimeValue); - var runtimeParam = hint.Parameters.Single(p => p.Name == "evidenceRef"); - Assert.True(runtimeParam.RequiresRuntimeValue); - Assert.Equal("steps.prepare.outputs.evidence", runtimeParam.Expression); - } - - private static TaskPackPlan BuildPlan() - { - var manifest = TestManifests.Load(TestManifests.Sample); - var planner = new TaskPackPlanner(); - var inputs = new Dictionary<string, JsonNode?> - { - ["dryRun"] = JsonValue.Create(false) - }; - - return planner.Plan(manifest, inputs).Plan!; - } - - private static TaskPackPlan BuildPolicyPlan() - { - var manifest = TestManifests.Load(TestManifests.PolicyGate); - var planner = new TaskPackPlanner(); - return planner.Plan(manifest).Plan!; - } -} +using System.Text.Json.Nodes; +using StellaOps.TaskRunner.Core.Execution; +using StellaOps.TaskRunner.Core.Planning; + +namespace StellaOps.TaskRunner.Tests; + +public sealed class PackRunApprovalCoordinatorTests +{ + [Fact] + public void Create_FromPlan_PopulatesApprovals() + { + var plan = BuildPlan(); + var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); + + var approvals = coordinator.GetApprovals(); + Assert.Single(approvals); + Assert.Equal("security-review", approvals[0].ApprovalId); + Assert.Equal(PackRunApprovalStatus.Pending, approvals[0].Status); + } + + [Fact] + public void Approve_AllowsResumeWhenLastApprovalCompletes() + { + var plan = BuildPlan(); + var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); + + var result = coordinator.Approve("security-review", "approver-1", DateTimeOffset.UtcNow); + + Assert.True(result.ShouldResumeRun); + Assert.Equal(PackRunApprovalStatus.Approved, result.State.Status); + Assert.Equal("approver-1", result.State.ActorId); + } + + [Fact] + public void Reject_DoesNotResumeAndMarksState() + { + var plan = BuildPlan(); + var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); + + var result = coordinator.Reject("security-review", "approver-1", DateTimeOffset.UtcNow, "Not safe"); + + Assert.False(result.ShouldResumeRun); + Assert.Equal(PackRunApprovalStatus.Rejected, result.State.Status); + Assert.Equal("Not safe", result.State.Summary); + } + + [Fact] + public void BuildNotifications_UsesRequirements() + { + var plan = BuildPlan(); + var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); + + var notifications = coordinator.BuildNotifications(plan); + Assert.Single(notifications); + var notification = notifications[0]; + Assert.Equal("security-review", notification.ApprovalId); + Assert.Contains("Packs.Approve", notification.RequiredGrants); + } + + [Fact] + public void BuildPolicyNotifications_ProducesGateMetadata() + { + var plan = BuildPolicyPlan(); + var coordinator = PackRunApprovalCoordinator.Create(plan, DateTimeOffset.UtcNow); + + var notifications = coordinator.BuildPolicyNotifications(plan); + Assert.Single(notifications); + var hint = notifications[0]; + Assert.Equal("policy-check", hint.StepId); + var parameter = hint.Parameters.Single(p => p.Name == "threshold"); + Assert.False(parameter.RequiresRuntimeValue); + var runtimeParam = hint.Parameters.Single(p => p.Name == "evidenceRef"); + Assert.True(runtimeParam.RequiresRuntimeValue); + Assert.Equal("steps.prepare.outputs.evidence", runtimeParam.Expression); + } + + private static TaskPackPlan BuildPlan() + { + var manifest = TestManifests.Load(TestManifests.Sample); + var planner = new TaskPackPlanner(); + var inputs = new Dictionary<string, JsonNode?> + { + ["dryRun"] = JsonValue.Create(false) + }; + + return planner.Plan(manifest, inputs).Plan!; + } + + private static TaskPackPlan BuildPolicyPlan() + { + var manifest = TestManifests.Load(TestManifests.PolicyGate); + var planner = new TaskPackPlanner(); + return planner.Plan(manifest).Plan!; + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunProcessorTests.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunProcessorTests.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunProcessorTests.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunProcessorTests.cs index b4d2dd24..30a50cc5 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunProcessorTests.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/PackRunProcessorTests.cs @@ -1,85 +1,85 @@ -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.TaskRunner.Core.Execution; -using StellaOps.TaskRunner.Core.Planning; -using System.Text.Json.Nodes; - -namespace StellaOps.TaskRunner.Tests; - -public sealed class PackRunProcessorTests -{ - [Fact] - public async Task ProcessNewRunAsync_PersistsApprovalsAndPublishesNotifications() - { - var manifest = TestManifests.Load(TestManifests.Sample); - var planner = new TaskPackPlanner(); - var plan = planner.Plan(manifest, new Dictionary<string, JsonNode?> { ["dryRun"] = JsonValue.Create(false) }).Plan!; - var context = new PackRunExecutionContext("run-123", plan, DateTimeOffset.UtcNow); - - var store = new TestApprovalStore(); - var publisher = new TestNotificationPublisher(); - var processor = new PackRunProcessor(store, publisher, NullLogger<PackRunProcessor>.Instance); - - var result = await processor.ProcessNewRunAsync(context, CancellationToken.None); - - Assert.False(result.ShouldResumeImmediately); - var saved = Assert.Single(store.Saved); - Assert.Equal("security-review", saved.ApprovalId); - Assert.Single(publisher.Approvals); - Assert.Empty(publisher.Policies); - } - - [Fact] - public async Task ProcessNewRunAsync_NoApprovals_ResumesImmediately() - { - var manifest = TestManifests.Load(TestManifests.Output); - var planner = new TaskPackPlanner(); - var plan = planner.Plan(manifest).Plan!; - var context = new PackRunExecutionContext("run-456", plan, DateTimeOffset.UtcNow); - - var store = new TestApprovalStore(); - var publisher = new TestNotificationPublisher(); - var processor = new PackRunProcessor(store, publisher, NullLogger<PackRunProcessor>.Instance); - - var result = await processor.ProcessNewRunAsync(context, CancellationToken.None); - - Assert.True(result.ShouldResumeImmediately); - Assert.Empty(store.Saved); - Assert.Empty(publisher.Approvals); - } - - private sealed class TestApprovalStore : IPackRunApprovalStore - { - public List<PackRunApprovalState> Saved { get; } = new(); - - public Task<IReadOnlyList<PackRunApprovalState>> GetAsync(string runId, CancellationToken cancellationToken) - => Task.FromResult((IReadOnlyList<PackRunApprovalState>)Saved); - - public Task SaveAsync(string runId, IReadOnlyList<PackRunApprovalState> approvals, CancellationToken cancellationToken) - { - Saved.Clear(); - Saved.AddRange(approvals); - return Task.CompletedTask; - } - - public Task UpdateAsync(string runId, PackRunApprovalState approval, CancellationToken cancellationToken) - => Task.CompletedTask; - } - - private sealed class TestNotificationPublisher : IPackRunNotificationPublisher - { - public List<ApprovalNotification> Approvals { get; } = new(); - public List<PolicyGateNotification> Policies { get; } = new(); - - public Task PublishApprovalRequestedAsync(string runId, ApprovalNotification notification, CancellationToken cancellationToken) - { - Approvals.Add(notification); - return Task.CompletedTask; - } - - public Task PublishPolicyGatePendingAsync(string runId, PolicyGateNotification notification, CancellationToken cancellationToken) - { - Policies.Add(notification); - return Task.CompletedTask; - } - } -} +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.TaskRunner.Core.Execution; +using StellaOps.TaskRunner.Core.Planning; +using System.Text.Json.Nodes; + +namespace StellaOps.TaskRunner.Tests; + +public sealed class PackRunProcessorTests +{ + [Fact] + public async Task ProcessNewRunAsync_PersistsApprovalsAndPublishesNotifications() + { + var manifest = TestManifests.Load(TestManifests.Sample); + var planner = new TaskPackPlanner(); + var plan = planner.Plan(manifest, new Dictionary<string, JsonNode?> { ["dryRun"] = JsonValue.Create(false) }).Plan!; + var context = new PackRunExecutionContext("run-123", plan, DateTimeOffset.UtcNow); + + var store = new TestApprovalStore(); + var publisher = new TestNotificationPublisher(); + var processor = new PackRunProcessor(store, publisher, NullLogger<PackRunProcessor>.Instance); + + var result = await processor.ProcessNewRunAsync(context, CancellationToken.None); + + Assert.False(result.ShouldResumeImmediately); + var saved = Assert.Single(store.Saved); + Assert.Equal("security-review", saved.ApprovalId); + Assert.Single(publisher.Approvals); + Assert.Empty(publisher.Policies); + } + + [Fact] + public async Task ProcessNewRunAsync_NoApprovals_ResumesImmediately() + { + var manifest = TestManifests.Load(TestManifests.Output); + var planner = new TaskPackPlanner(); + var plan = planner.Plan(manifest).Plan!; + var context = new PackRunExecutionContext("run-456", plan, DateTimeOffset.UtcNow); + + var store = new TestApprovalStore(); + var publisher = new TestNotificationPublisher(); + var processor = new PackRunProcessor(store, publisher, NullLogger<PackRunProcessor>.Instance); + + var result = await processor.ProcessNewRunAsync(context, CancellationToken.None); + + Assert.True(result.ShouldResumeImmediately); + Assert.Empty(store.Saved); + Assert.Empty(publisher.Approvals); + } + + private sealed class TestApprovalStore : IPackRunApprovalStore + { + public List<PackRunApprovalState> Saved { get; } = new(); + + public Task<IReadOnlyList<PackRunApprovalState>> GetAsync(string runId, CancellationToken cancellationToken) + => Task.FromResult((IReadOnlyList<PackRunApprovalState>)Saved); + + public Task SaveAsync(string runId, IReadOnlyList<PackRunApprovalState> approvals, CancellationToken cancellationToken) + { + Saved.Clear(); + Saved.AddRange(approvals); + return Task.CompletedTask; + } + + public Task UpdateAsync(string runId, PackRunApprovalState approval, CancellationToken cancellationToken) + => Task.CompletedTask; + } + + private sealed class TestNotificationPublisher : IPackRunNotificationPublisher + { + public List<ApprovalNotification> Approvals { get; } = new(); + public List<PolicyGateNotification> Policies { get; } = new(); + + public Task PublishApprovalRequestedAsync(string runId, ApprovalNotification notification, CancellationToken cancellationToken) + { + Approvals.Add(notification); + return Task.CompletedTask; + } + + public Task PublishPolicyGatePendingAsync(string runId, PolicyGateNotification notification, CancellationToken cancellationToken) + { + Policies.Add(notification); + return Task.CompletedTask; + } + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/StellaOps.TaskRunner.Tests.csproj b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/StellaOps.TaskRunner.Tests.csproj similarity index 91% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/StellaOps.TaskRunner.Tests.csproj rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/StellaOps.TaskRunner.Tests.csproj index 775c67e7..895a3209 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/StellaOps.TaskRunner.Tests.csproj +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/StellaOps.TaskRunner.Tests.csproj @@ -1,135 +1,135 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - - - <PropertyGroup> - - - - - <OutputType>Exe</OutputType> - - - - - <IsPackable>false</IsPackable> - - - - - - - - - - - - - - <TargetFramework>net10.0</TargetFramework> - - - <ImplicitUsings>enable</ImplicitUsings> - - - <Nullable>enable</Nullable> - - - <UseConcelierTestInfra>false</UseConcelierTestInfra> - - - <LangVersion>preview</LangVersion> - - - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - - - </PropertyGroup> - - - - - - <ItemGroup> - - - - - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> - - - - - <PackageReference Include="xunit.v3" Version="3.0.0"/> - - - - - <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Using Include="Xunit"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj"/> - - - - - <ProjectReference Include="..\StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj"/> - - - - - </ItemGroup> - - - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + + + <PropertyGroup> + + + + + <OutputType>Exe</OutputType> + + + + + <IsPackable>false</IsPackable> + + + + + + + + + + + + + + <TargetFramework>net10.0</TargetFramework> + + + <ImplicitUsings>enable</ImplicitUsings> + + + <Nullable>enable</Nullable> + + + <UseConcelierTestInfra>false</UseConcelierTestInfra> + + + <LangVersion>preview</LangVersion> + + + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + + + </PropertyGroup> + + + + + + <ItemGroup> + + + + + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> + + + + + <PackageReference Include="xunit.v3" Version="3.0.0"/> + + + + + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Using Include="Xunit"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj"/> + + + + + <ProjectReference Include="..\StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj"/> + + + + + </ItemGroup> + + + + + +</Project> diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TaskPackPlannerTests.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TaskPackPlannerTests.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TaskPackPlannerTests.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TaskPackPlannerTests.cs index 3b6dfbb0..d2438e44 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TaskPackPlannerTests.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TaskPackPlannerTests.cs @@ -1,177 +1,177 @@ -using System.Linq; -using System.Text.Json.Nodes; -using StellaOps.TaskRunner.Core.Planning; - -namespace StellaOps.TaskRunner.Tests; - -public sealed class TaskPackPlannerTests -{ - [Fact] - public void Plan_WithSequentialSteps_ComputesDeterministicHash() - { - var manifest = TestManifests.Load(TestManifests.Sample); - var planner = new TaskPackPlanner(); - - var inputs = new Dictionary<string, JsonNode?> - { - ["dryRun"] = JsonValue.Create(false) - }; - - var resultA = planner.Plan(manifest, inputs); - Assert.True(resultA.Success); - var plan = resultA.Plan!; - Assert.Equal(3, plan.Steps.Count); - Assert.Equal("plan-step", plan.Steps[0].Id); - Assert.Equal("plan-step", plan.Steps[0].TemplateId); - Assert.Equal("run", plan.Steps[0].Type); - Assert.Equal("gate.approval", plan.Steps[1].Type); - Assert.Equal("security-review", plan.Steps[1].ApprovalId); - Assert.Equal("run", plan.Steps[2].Type); - Assert.True(plan.Steps[2].Enabled); - Assert.Single(plan.Approvals); - Assert.Equal("security-review", plan.Approvals[0].Id); - Assert.False(string.IsNullOrWhiteSpace(plan.Hash)); - - var resultB = planner.Plan(manifest, inputs); - Assert.True(resultB.Success); - Assert.Equal(plan.Hash, resultB.Plan!.Hash); - } - - [Fact] - public void Plan_WhenConditionEvaluatesFalse_DisablesStep() - { - var manifest = TestManifests.Load(TestManifests.Sample); - var planner = new TaskPackPlanner(); - - var inputs = new Dictionary<string, JsonNode?> - { - ["dryRun"] = JsonValue.Create(true) - }; - - var result = planner.Plan(manifest, inputs); - Assert.True(result.Success); - Assert.False(result.Plan!.Steps[2].Enabled); - } - - [Fact] - public void Plan_WithStepReferences_MarksParametersAsRuntime() - { - var manifest = TestManifests.Load(TestManifests.StepReference); - var planner = new TaskPackPlanner(); - - var result = planner.Plan(manifest); - Assert.True(result.Success); - var plan = result.Plan!; - Assert.Equal(2, plan.Steps.Count); - var referenceParameters = plan.Steps[1].Parameters!; - Assert.True(referenceParameters["sourceSummary"].RequiresRuntimeValue); - Assert.Equal("steps.prepare.outputs.summary", referenceParameters["sourceSummary"].Expression); - } - - [Fact] - public void Plan_WithMapStep_ExpandsIterations() - { - var manifest = TestManifests.Load(TestManifests.Map); - var planner = new TaskPackPlanner(); - - var inputs = new Dictionary<string, JsonNode?> - { - ["targets"] = new JsonArray("alpha", "beta", "gamma") - }; - - var result = planner.Plan(manifest, inputs); - Assert.True(result.Success); - var plan = result.Plan!; - var mapStep = plan.Steps.Single(s => s.Type == "map"); - Assert.Equal(3, mapStep.Children!.Count); - Assert.All(mapStep.Children!, child => Assert.Equal("echo-step", child.TemplateId)); - Assert.Equal(3, mapStep.Parameters!["iterationCount"].Value!.GetValue<int>()); - Assert.Equal("alpha", mapStep.Children![0].Parameters!["item"].Value!.GetValue<string>()); - } - - [Fact] - public void CollectApprovalRequirements_GroupsGates() - { - var manifest = TestManifests.Load(TestManifests.Sample); - var planner = new TaskPackPlanner(); - - var plan = planner.Plan(manifest).Plan!; - var requirements = TaskPackPlanInsights.CollectApprovalRequirements(plan); - Assert.Single(requirements); - var requirement = requirements[0]; - Assert.Equal("security-review", requirement.ApprovalId); - Assert.Contains("Packs.Approve", requirement.Grants); - Assert.Equal(plan.Steps[1].Id, requirement.StepIds.Single()); - - var notifications = TaskPackPlanInsights.CollectNotificationHints(plan); - Assert.Contains(notifications, hint => hint.Type == "approval-request" && hint.StepId == plan.Steps[1].Id); - } - - [Fact] - public void Plan_WithSecretReference_RecordsSecretMetadata() - { - var manifest = TestManifests.Load(TestManifests.Secret); - var planner = new TaskPackPlanner(); - - var result = planner.Plan(manifest); - Assert.True(result.Success); - var plan = result.Plan!; - Assert.Single(plan.Secrets); - Assert.Equal("apiKey", plan.Secrets[0].Name); - var param = plan.Steps[0].Parameters!["token"]; - Assert.True(param.RequiresRuntimeValue); - Assert.Equal("secrets.apiKey", param.Expression); - } - - [Fact] - public void Plan_WithOutputs_ProjectsResolvedValues() - { - var manifest = TestManifests.Load(TestManifests.Output); - var planner = new TaskPackPlanner(); - - var result = planner.Plan(manifest); - Assert.True(result.Success); - var plan = result.Plan!; - Assert.Equal(2, plan.Outputs.Count); - - var bundle = plan.Outputs.First(o => o.Name == "bundlePath"); - Assert.NotNull(bundle.Path); - Assert.False(bundle.Path!.RequiresRuntimeValue); - Assert.Equal("artifacts/report.txt", bundle.Path.Value!.GetValue<string>()); - - var evidence = plan.Outputs.First(o => o.Name == "evidenceModel"); - Assert.NotNull(evidence.Expression); - Assert.True(evidence.Expression!.RequiresRuntimeValue); - Assert.Equal("steps.generate.outputs.evidence", evidence.Expression.Expression); - } - - [Fact] - public void PolicyGateHints_IncludeRuntimeMetadata() - { - var manifest = TestManifests.Load(TestManifests.PolicyGate); - var planner = new TaskPackPlanner(); - - var plan = planner.Plan(manifest).Plan!; - var hints = TaskPackPlanInsights.CollectPolicyGateHints(plan); - Assert.Single(hints); - var hint = hints[0]; - Assert.Equal("policy-check", hint.StepId); - var threshold = hint.Parameters.Single(p => p.Name == "threshold"); - Assert.False(threshold.RequiresRuntimeValue); - Assert.Null(threshold.Expression); - var evidence = hint.Parameters.Single(p => p.Name == "evidenceRef"); - Assert.True(evidence.RequiresRuntimeValue); - Assert.Equal("steps.prepare.outputs.evidence", evidence.Expression); - } - - [Fact] - public void Plan_WhenRequiredInputMissing_ReturnsError() - { - var manifest = TestManifests.Load(TestManifests.RequiredInput); - var planner = new TaskPackPlanner(); - - var result = planner.Plan(manifest); - Assert.False(result.Success); - Assert.Contains(result.Errors, error => error.Path == "inputs.sbomBundle"); - } -} +using System.Linq; +using System.Text.Json.Nodes; +using StellaOps.TaskRunner.Core.Planning; + +namespace StellaOps.TaskRunner.Tests; + +public sealed class TaskPackPlannerTests +{ + [Fact] + public void Plan_WithSequentialSteps_ComputesDeterministicHash() + { + var manifest = TestManifests.Load(TestManifests.Sample); + var planner = new TaskPackPlanner(); + + var inputs = new Dictionary<string, JsonNode?> + { + ["dryRun"] = JsonValue.Create(false) + }; + + var resultA = planner.Plan(manifest, inputs); + Assert.True(resultA.Success); + var plan = resultA.Plan!; + Assert.Equal(3, plan.Steps.Count); + Assert.Equal("plan-step", plan.Steps[0].Id); + Assert.Equal("plan-step", plan.Steps[0].TemplateId); + Assert.Equal("run", plan.Steps[0].Type); + Assert.Equal("gate.approval", plan.Steps[1].Type); + Assert.Equal("security-review", plan.Steps[1].ApprovalId); + Assert.Equal("run", plan.Steps[2].Type); + Assert.True(plan.Steps[2].Enabled); + Assert.Single(plan.Approvals); + Assert.Equal("security-review", plan.Approvals[0].Id); + Assert.False(string.IsNullOrWhiteSpace(plan.Hash)); + + var resultB = planner.Plan(manifest, inputs); + Assert.True(resultB.Success); + Assert.Equal(plan.Hash, resultB.Plan!.Hash); + } + + [Fact] + public void Plan_WhenConditionEvaluatesFalse_DisablesStep() + { + var manifest = TestManifests.Load(TestManifests.Sample); + var planner = new TaskPackPlanner(); + + var inputs = new Dictionary<string, JsonNode?> + { + ["dryRun"] = JsonValue.Create(true) + }; + + var result = planner.Plan(manifest, inputs); + Assert.True(result.Success); + Assert.False(result.Plan!.Steps[2].Enabled); + } + + [Fact] + public void Plan_WithStepReferences_MarksParametersAsRuntime() + { + var manifest = TestManifests.Load(TestManifests.StepReference); + var planner = new TaskPackPlanner(); + + var result = planner.Plan(manifest); + Assert.True(result.Success); + var plan = result.Plan!; + Assert.Equal(2, plan.Steps.Count); + var referenceParameters = plan.Steps[1].Parameters!; + Assert.True(referenceParameters["sourceSummary"].RequiresRuntimeValue); + Assert.Equal("steps.prepare.outputs.summary", referenceParameters["sourceSummary"].Expression); + } + + [Fact] + public void Plan_WithMapStep_ExpandsIterations() + { + var manifest = TestManifests.Load(TestManifests.Map); + var planner = new TaskPackPlanner(); + + var inputs = new Dictionary<string, JsonNode?> + { + ["targets"] = new JsonArray("alpha", "beta", "gamma") + }; + + var result = planner.Plan(manifest, inputs); + Assert.True(result.Success); + var plan = result.Plan!; + var mapStep = plan.Steps.Single(s => s.Type == "map"); + Assert.Equal(3, mapStep.Children!.Count); + Assert.All(mapStep.Children!, child => Assert.Equal("echo-step", child.TemplateId)); + Assert.Equal(3, mapStep.Parameters!["iterationCount"].Value!.GetValue<int>()); + Assert.Equal("alpha", mapStep.Children![0].Parameters!["item"].Value!.GetValue<string>()); + } + + [Fact] + public void CollectApprovalRequirements_GroupsGates() + { + var manifest = TestManifests.Load(TestManifests.Sample); + var planner = new TaskPackPlanner(); + + var plan = planner.Plan(manifest).Plan!; + var requirements = TaskPackPlanInsights.CollectApprovalRequirements(plan); + Assert.Single(requirements); + var requirement = requirements[0]; + Assert.Equal("security-review", requirement.ApprovalId); + Assert.Contains("Packs.Approve", requirement.Grants); + Assert.Equal(plan.Steps[1].Id, requirement.StepIds.Single()); + + var notifications = TaskPackPlanInsights.CollectNotificationHints(plan); + Assert.Contains(notifications, hint => hint.Type == "approval-request" && hint.StepId == plan.Steps[1].Id); + } + + [Fact] + public void Plan_WithSecretReference_RecordsSecretMetadata() + { + var manifest = TestManifests.Load(TestManifests.Secret); + var planner = new TaskPackPlanner(); + + var result = planner.Plan(manifest); + Assert.True(result.Success); + var plan = result.Plan!; + Assert.Single(plan.Secrets); + Assert.Equal("apiKey", plan.Secrets[0].Name); + var param = plan.Steps[0].Parameters!["token"]; + Assert.True(param.RequiresRuntimeValue); + Assert.Equal("secrets.apiKey", param.Expression); + } + + [Fact] + public void Plan_WithOutputs_ProjectsResolvedValues() + { + var manifest = TestManifests.Load(TestManifests.Output); + var planner = new TaskPackPlanner(); + + var result = planner.Plan(manifest); + Assert.True(result.Success); + var plan = result.Plan!; + Assert.Equal(2, plan.Outputs.Count); + + var bundle = plan.Outputs.First(o => o.Name == "bundlePath"); + Assert.NotNull(bundle.Path); + Assert.False(bundle.Path!.RequiresRuntimeValue); + Assert.Equal("artifacts/report.txt", bundle.Path.Value!.GetValue<string>()); + + var evidence = plan.Outputs.First(o => o.Name == "evidenceModel"); + Assert.NotNull(evidence.Expression); + Assert.True(evidence.Expression!.RequiresRuntimeValue); + Assert.Equal("steps.generate.outputs.evidence", evidence.Expression.Expression); + } + + [Fact] + public void PolicyGateHints_IncludeRuntimeMetadata() + { + var manifest = TestManifests.Load(TestManifests.PolicyGate); + var planner = new TaskPackPlanner(); + + var plan = planner.Plan(manifest).Plan!; + var hints = TaskPackPlanInsights.CollectPolicyGateHints(plan); + Assert.Single(hints); + var hint = hints[0]; + Assert.Equal("policy-check", hint.StepId); + var threshold = hint.Parameters.Single(p => p.Name == "threshold"); + Assert.False(threshold.RequiresRuntimeValue); + Assert.Null(threshold.Expression); + var evidence = hint.Parameters.Single(p => p.Name == "evidenceRef"); + Assert.True(evidence.RequiresRuntimeValue); + Assert.Equal("steps.prepare.outputs.evidence", evidence.Expression); + } + + [Fact] + public void Plan_WhenRequiredInputMissing_ReturnsError() + { + var manifest = TestManifests.Load(TestManifests.RequiredInput); + var planner = new TaskPackPlanner(); + + var result = planner.Plan(manifest); + Assert.False(result.Success); + Assert.Contains(result.Errors, error => error.Path == "inputs.sbomBundle"); + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs similarity index 95% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs index 7598cb79..1cbf4cba 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/TestManifests.cs @@ -1,165 +1,165 @@ -using System.Text.Json.Nodes; -using StellaOps.TaskRunner.Core.TaskPacks; - -namespace StellaOps.TaskRunner.Tests; - -internal static class TestManifests -{ - public static TaskPackManifest Load(string yaml) - { - var loader = new TaskPackManifestLoader(); - return loader.Deserialize(yaml); - } - - public const string Sample = """ -apiVersion: stellaops.io/pack.v1 -kind: TaskPack -metadata: - name: sample-pack - version: 1.0.0 - description: Sample pack for planner tests - tags: [tests] -spec: - inputs: - - name: dryRun - type: boolean - required: false - default: false - approvals: - - id: security-review - grants: ["Packs.Approve"] - steps: - - id: plan-step - name: Plan - run: - uses: builtin:plan - with: - dryRun: "{{ inputs.dryRun }}" - - id: approval - gate: - approval: - id: security-review - message: "Security approval required." - - id: apply-step - when: "{{ not inputs.dryRun }}" - run: - uses: builtin:apply -"""; - - public const string RequiredInput = """ -apiVersion: stellaops.io/pack.v1 -kind: TaskPack -metadata: - name: required-input-pack - version: 1.2.3 -spec: - inputs: - - name: sbomBundle - type: object - required: true - steps: - - id: noop - run: - uses: builtin:noop -"""; - - public const string StepReference = """ -apiVersion: stellaops.io/pack.v1 -kind: TaskPack -metadata: - name: step-ref-pack - version: 1.0.0 -spec: - steps: - - id: prepare - run: - uses: builtin:prepare - - id: consume - run: - uses: builtin:consume - with: - sourceSummary: "{{ steps.prepare.outputs.summary }}" -"""; - - public const string Map = """ -apiVersion: stellaops.io/pack.v1 -kind: TaskPack -metadata: - name: map-pack - version: 1.0.0 -spec: - inputs: - - name: targets - type: array - required: true - steps: - - id: maintenance-loop - map: - items: "{{ inputs.targets }}" - step: - id: echo-step - run: - uses: builtin:echo - with: - target: "{{ item }}" -"""; - - public const string Secret = """ -apiVersion: stellaops.io/pack.v1 -kind: TaskPack -metadata: - name: secret-pack - version: 1.0.0 -spec: - secrets: - - name: apiKey - scope: Packs.Run - description: API authentication token - steps: - - id: use-secret - run: - uses: builtin:http - with: - token: "{{ secrets.apiKey }}" -"""; - - public const string Output = """ -apiVersion: stellaops.io/pack.v1 -kind: TaskPack -metadata: - name: output-pack - version: 1.0.0 -spec: - steps: - - id: generate - run: - uses: builtin:generate - outputs: - - name: bundlePath - type: file - path: artifacts/report.txt - - name: evidenceModel - type: object - expression: "{{ steps.generate.outputs.evidence }}" -"""; - - public const string PolicyGate = """ -apiVersion: stellaops.io/pack.v1 -kind: TaskPack -metadata: - name: policy-gate-pack - version: 1.0.0 -spec: - steps: - - id: prepare - run: - uses: builtin:prepare - - id: policy-check - gate: - policy: - policy: security-hold - parameters: - threshold: high - evidenceRef: "{{ steps.prepare.outputs.evidence }}" -"""; -} +using System.Text.Json.Nodes; +using StellaOps.TaskRunner.Core.TaskPacks; + +namespace StellaOps.TaskRunner.Tests; + +internal static class TestManifests +{ + public static TaskPackManifest Load(string yaml) + { + var loader = new TaskPackManifestLoader(); + return loader.Deserialize(yaml); + } + + public const string Sample = """ +apiVersion: stellaops.io/pack.v1 +kind: TaskPack +metadata: + name: sample-pack + version: 1.0.0 + description: Sample pack for planner tests + tags: [tests] +spec: + inputs: + - name: dryRun + type: boolean + required: false + default: false + approvals: + - id: security-review + grants: ["Packs.Approve"] + steps: + - id: plan-step + name: Plan + run: + uses: builtin:plan + with: + dryRun: "{{ inputs.dryRun }}" + - id: approval + gate: + approval: + id: security-review + message: "Security approval required." + - id: apply-step + when: "{{ not inputs.dryRun }}" + run: + uses: builtin:apply +"""; + + public const string RequiredInput = """ +apiVersion: stellaops.io/pack.v1 +kind: TaskPack +metadata: + name: required-input-pack + version: 1.2.3 +spec: + inputs: + - name: sbomBundle + type: object + required: true + steps: + - id: noop + run: + uses: builtin:noop +"""; + + public const string StepReference = """ +apiVersion: stellaops.io/pack.v1 +kind: TaskPack +metadata: + name: step-ref-pack + version: 1.0.0 +spec: + steps: + - id: prepare + run: + uses: builtin:prepare + - id: consume + run: + uses: builtin:consume + with: + sourceSummary: "{{ steps.prepare.outputs.summary }}" +"""; + + public const string Map = """ +apiVersion: stellaops.io/pack.v1 +kind: TaskPack +metadata: + name: map-pack + version: 1.0.0 +spec: + inputs: + - name: targets + type: array + required: true + steps: + - id: maintenance-loop + map: + items: "{{ inputs.targets }}" + step: + id: echo-step + run: + uses: builtin:echo + with: + target: "{{ item }}" +"""; + + public const string Secret = """ +apiVersion: stellaops.io/pack.v1 +kind: TaskPack +metadata: + name: secret-pack + version: 1.0.0 +spec: + secrets: + - name: apiKey + scope: Packs.Run + description: API authentication token + steps: + - id: use-secret + run: + uses: builtin:http + with: + token: "{{ secrets.apiKey }}" +"""; + + public const string Output = """ +apiVersion: stellaops.io/pack.v1 +kind: TaskPack +metadata: + name: output-pack + version: 1.0.0 +spec: + steps: + - id: generate + run: + uses: builtin:generate + outputs: + - name: bundlePath + type: file + path: artifacts/report.txt + - name: evidenceModel + type: object + expression: "{{ steps.generate.outputs.evidence }}" +"""; + + public const string PolicyGate = """ +apiVersion: stellaops.io/pack.v1 +kind: TaskPack +metadata: + name: policy-gate-pack + version: 1.0.0 +spec: + steps: + - id: prepare + run: + uses: builtin:prepare + - id: policy-check + gate: + policy: + policy: security-hold + parameters: + threshold: high + evidenceRef: "{{ steps.prepare.outputs.evidence }}" +"""; +} diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/xunit.runner.json b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/xunit.runner.json new file mode 100644 index 00000000..249d815c --- /dev/null +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Tests/xunit.runner.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs new file mode 100644 index 00000000..3917ef1b --- /dev/null +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Program.cs @@ -0,0 +1,41 @@ +var builder = WebApplication.CreateBuilder(args); + +// Add services to the container. +// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +// Configure the HTTP request pipeline. +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); + +var summaries = new[] +{ + "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" +}; + +app.MapGet("/weatherforecast", () => +{ + var forecast = Enumerable.Range(1, 5).Select(index => + new WeatherForecast + ( + DateOnly.FromDateTime(DateTime.Now.AddDays(index)), + Random.Shared.Next(-20, 55), + summaries[Random.Shared.Next(summaries.Length)] + )) + .ToArray(); + return forecast; +}) +.WithName("GetWeatherForecast"); + +app.Run(); + +record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) +{ + public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Properties/launchSettings.json b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Properties/launchSettings.json rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Properties/launchSettings.json index affe11d3..8d925e0c 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Properties/launchSettings.json +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/Properties/launchSettings.json @@ -1,23 +1,23 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "http": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "http://localhost:5157", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - }, - "https": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "https://localhost:7035;http://localhost:5157", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "http://localhost:5157", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "https": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "https://localhost:7035;http://localhost:5157", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj similarity index 94% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj index f1ea48fa..55a5211c 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.csproj @@ -1,41 +1,41 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Web"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj"/> - - - </ItemGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj"/> + + + </ItemGroup> + + + +</Project> diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.http b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.http similarity index 96% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.http rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.http index c2efa93a..12216a59 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.http +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/StellaOps.TaskRunner.WebService.http @@ -1,6 +1,6 @@ -@StellaOps.TaskRunner.WebService_HostAddress = http://localhost:5157 - -GET {{StellaOps.TaskRunner.WebService_HostAddress}}/weatherforecast/ -Accept: application/json - -### +@StellaOps.TaskRunner.WebService_HostAddress = http://localhost:5157 + +GET {{StellaOps.TaskRunner.WebService_HostAddress}}/weatherforecast/ +Accept: application/json + +### diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.Development.json b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.Development.json new file mode 100644 index 00000000..ff66ba6b --- /dev/null +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.json b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.json new file mode 100644 index 00000000..4d566948 --- /dev/null +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.WebService/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs index aa3be0ec..9f987df4 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Program.cs @@ -1,42 +1,42 @@ -using Microsoft.Extensions.Options; -using StellaOps.TaskRunner.Core.Execution; -using StellaOps.TaskRunner.Infrastructure.Execution; -using StellaOps.TaskRunner.Worker.Services; - -var builder = Host.CreateApplicationBuilder(args); - -builder.Services.Configure<PackRunWorkerOptions>(builder.Configuration.GetSection("Worker")); -builder.Services.Configure<NotificationOptions>(builder.Configuration.GetSection("Notifications")); -builder.Services.AddHttpClient("taskrunner-notifications"); - -builder.Services.AddSingleton<IPackRunApprovalStore>(sp => -{ - var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>(); - return new FilePackRunApprovalStore(options.Value.ApprovalStorePath); -}); - -builder.Services.AddSingleton<IPackRunJobDispatcher>(sp => -{ - var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>(); - return new FilesystemPackRunDispatcher(options.Value.QueuePath, options.Value.ArchivePath); -}); - -builder.Services.AddSingleton<IPackRunNotificationPublisher>(sp => -{ - var options = sp.GetRequiredService<IOptions<NotificationOptions>>().Value; - if (options.ApprovalEndpoint is not null || options.PolicyEndpoint is not null) - { - return new HttpPackRunNotificationPublisher( - sp.GetRequiredService<IHttpClientFactory>(), - sp.GetRequiredService<IOptions<NotificationOptions>>(), - sp.GetRequiredService<ILogger<HttpPackRunNotificationPublisher>>()); - } - - return new LoggingPackRunNotificationPublisher(sp.GetRequiredService<ILogger<LoggingPackRunNotificationPublisher>>()); -}); - -builder.Services.AddSingleton<PackRunProcessor>(); -builder.Services.AddHostedService<PackRunWorkerService>(); - -var host = builder.Build(); -host.Run(); +using Microsoft.Extensions.Options; +using StellaOps.TaskRunner.Core.Execution; +using StellaOps.TaskRunner.Infrastructure.Execution; +using StellaOps.TaskRunner.Worker.Services; + +var builder = Host.CreateApplicationBuilder(args); + +builder.Services.Configure<PackRunWorkerOptions>(builder.Configuration.GetSection("Worker")); +builder.Services.Configure<NotificationOptions>(builder.Configuration.GetSection("Notifications")); +builder.Services.AddHttpClient("taskrunner-notifications"); + +builder.Services.AddSingleton<IPackRunApprovalStore>(sp => +{ + var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>(); + return new FilePackRunApprovalStore(options.Value.ApprovalStorePath); +}); + +builder.Services.AddSingleton<IPackRunJobDispatcher>(sp => +{ + var options = sp.GetRequiredService<IOptions<PackRunWorkerOptions>>(); + return new FilesystemPackRunDispatcher(options.Value.QueuePath, options.Value.ArchivePath); +}); + +builder.Services.AddSingleton<IPackRunNotificationPublisher>(sp => +{ + var options = sp.GetRequiredService<IOptions<NotificationOptions>>().Value; + if (options.ApprovalEndpoint is not null || options.PolicyEndpoint is not null) + { + return new HttpPackRunNotificationPublisher( + sp.GetRequiredService<IHttpClientFactory>(), + sp.GetRequiredService<IOptions<NotificationOptions>>(), + sp.GetRequiredService<ILogger<HttpPackRunNotificationPublisher>>()); + } + + return new LoggingPackRunNotificationPublisher(sp.GetRequiredService<ILogger<LoggingPackRunNotificationPublisher>>()); +}); + +builder.Services.AddSingleton<PackRunProcessor>(); +builder.Services.AddHostedService<PackRunWorkerService>(); + +var host = builder.Build(); +host.Run(); diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Properties/launchSettings.json b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Properties/launchSettings.json similarity index 95% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Properties/launchSettings.json rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Properties/launchSettings.json index 2722e495..7551e205 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Properties/launchSettings.json +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Properties/launchSettings.json @@ -1,12 +1,12 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "StellaOps.TaskRunner.Worker": { - "commandName": "Project", - "dotnetRunMessages": true, - "environmentVariables": { - "DOTNET_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "StellaOps.TaskRunner.Worker": { + "commandName": "Project", + "dotnetRunMessages": true, + "environmentVariables": { + "DOTNET_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerOptions.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerOptions.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerOptions.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerOptions.cs index f954d153..fab3daf9 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerOptions.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerOptions.cs @@ -1,12 +1,12 @@ -namespace StellaOps.TaskRunner.Worker.Services; - -public sealed class PackRunWorkerOptions -{ - public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(1); - - public string QueuePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue"); - - public string ArchivePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue", "archive"); - - public string ApprovalStorePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "approvals"); -} +namespace StellaOps.TaskRunner.Worker.Services; + +public sealed class PackRunWorkerOptions +{ + public TimeSpan IdleDelay { get; set; } = TimeSpan.FromSeconds(1); + + public string QueuePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue"); + + public string ArchivePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "queue", "archive"); + + public string ApprovalStorePath { get; set; } = Path.Combine(AppContext.BaseDirectory, "approvals"); +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerService.cs b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerService.cs similarity index 97% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerService.cs rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerService.cs index d889a84b..d240d585 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerService.cs +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/Services/PackRunWorkerService.cs @@ -1,49 +1,49 @@ -using StellaOps.TaskRunner.Core.Execution; -using Microsoft.Extensions.Options; - -namespace StellaOps.TaskRunner.Worker.Services; - -public sealed class PackRunWorkerService : BackgroundService -{ - private readonly IPackRunJobDispatcher dispatcher; - private readonly PackRunProcessor processor; - private readonly PackRunWorkerOptions options; - private readonly ILogger<PackRunWorkerService> logger; - - public PackRunWorkerService( - IPackRunJobDispatcher dispatcher, - PackRunProcessor processor, - IOptions<PackRunWorkerOptions> options, - ILogger<PackRunWorkerService> logger) - { - this.dispatcher = dispatcher ?? throw new ArgumentNullException(nameof(dispatcher)); - this.processor = processor ?? throw new ArgumentNullException(nameof(processor)); - this.options = options?.Value ?? throw new ArgumentNullException(nameof(options)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - while (!stoppingToken.IsCancellationRequested) - { - var context = await dispatcher.TryDequeueAsync(stoppingToken).ConfigureAwait(false); - if (context is null) - { - await Task.Delay(options.IdleDelay, stoppingToken).ConfigureAwait(false); - continue; - } - - logger.LogInformation("Processing pack run {RunId}.", context.RunId); - var result = await processor.ProcessNewRunAsync(context, stoppingToken).ConfigureAwait(false); - - if (result.ShouldResumeImmediately) - { - logger.LogInformation("Run {RunId} is ready to resume immediately.", context.RunId); - } - else - { - logger.LogInformation("Run {RunId} is awaiting approvals.", context.RunId); - } - } - } -} +using StellaOps.TaskRunner.Core.Execution; +using Microsoft.Extensions.Options; + +namespace StellaOps.TaskRunner.Worker.Services; + +public sealed class PackRunWorkerService : BackgroundService +{ + private readonly IPackRunJobDispatcher dispatcher; + private readonly PackRunProcessor processor; + private readonly PackRunWorkerOptions options; + private readonly ILogger<PackRunWorkerService> logger; + + public PackRunWorkerService( + IPackRunJobDispatcher dispatcher, + PackRunProcessor processor, + IOptions<PackRunWorkerOptions> options, + ILogger<PackRunWorkerService> logger) + { + this.dispatcher = dispatcher ?? throw new ArgumentNullException(nameof(dispatcher)); + this.processor = processor ?? throw new ArgumentNullException(nameof(processor)); + this.options = options?.Value ?? throw new ArgumentNullException(nameof(options)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + while (!stoppingToken.IsCancellationRequested) + { + var context = await dispatcher.TryDequeueAsync(stoppingToken).ConfigureAwait(false); + if (context is null) + { + await Task.Delay(options.IdleDelay, stoppingToken).ConfigureAwait(false); + continue; + } + + logger.LogInformation("Processing pack run {RunId}.", context.RunId); + var result = await processor.ProcessNewRunAsync(context, stoppingToken).ConfigureAwait(false); + + if (result.ShouldResumeImmediately) + { + logger.LogInformation("Run {RunId} is ready to resume immediately.", context.RunId); + } + else + { + logger.LogInformation("Run {RunId} is awaiting approvals.", context.RunId); + } + } + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj similarity index 95% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj index db4d11b0..153f9dcd 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/StellaOps.TaskRunner.Worker.csproj @@ -1,43 +1,43 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Worker"> - - - - <PropertyGroup> - - - <UserSecretsId>dotnet-StellaOps.TaskRunner.Worker-ce7b902e-94f1-41c2-861b-daa533850dc5</UserSecretsId> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj"/> - - - </ItemGroup> - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Worker"> + + + + <PropertyGroup> + + + <UserSecretsId>dotnet-StellaOps.TaskRunner.Worker-ce7b902e-94f1-41c2-861b-daa533850dc5</UserSecretsId> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj"/> + + + </ItemGroup> + + +</Project> diff --git a/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.Development.json b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.Development.json new file mode 100644 index 00000000..69017646 --- /dev/null +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.json b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.json similarity index 95% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.json rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.json index 2c0cf671..a30ccd2e 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.json +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.Worker/appsettings.json @@ -1,18 +1,18 @@ -{ - "Logging": { - "LogLevel": { - "Default": "Information", - "Microsoft.Hosting.Lifetime": "Information" - } - }, - "Worker": { - "IdleDelay": "00:00:01", - "QueuePath": "queue", - "ArchivePath": "queue/archive", - "ApprovalStorePath": "state/approvals" - }, - "Notifications": { - "ApprovalEndpoint": null, - "PolicyEndpoint": null - } -} +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + }, + "Worker": { + "IdleDelay": "00:00:01", + "QueuePath": "queue", + "ArchivePath": "queue/archive", + "ApprovalStorePath": "state/approvals" + }, + "Notifications": { + "ApprovalEndpoint": null, + "PolicyEndpoint": null + } +} diff --git a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.sln b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.sln similarity index 98% rename from src/StellaOps.TaskRunner/StellaOps.TaskRunner.sln rename to src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.sln index ca994938..10849f8b 100644 --- a/src/StellaOps.TaskRunner/StellaOps.TaskRunner.sln +++ b/src/TaskRunner/StellaOps.TaskRunner/StellaOps.TaskRunner.sln @@ -1,90 +1,90 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Core", "StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj", "{105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Infrastructure", "StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj", "{1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.WebService", "StellaOps.TaskRunner.WebService\StellaOps.TaskRunner.WebService.csproj", "{D8A63A97-9C56-448B-A4BB-056130224750}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Worker", "StellaOps.TaskRunner.Worker\StellaOps.TaskRunner.Worker.csproj", "{C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Tests", "StellaOps.TaskRunner.Tests\StellaOps.TaskRunner.Tests.csproj", "{552E7C8A-74F6-4E33-B956-46DF96E2BE11}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|Any CPU.Build.0 = Debug|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|x64.ActiveCfg = Debug|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|x64.Build.0 = Debug|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|x86.ActiveCfg = Debug|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|x86.Build.0 = Debug|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|Any CPU.ActiveCfg = Release|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|Any CPU.Build.0 = Release|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|x64.ActiveCfg = Release|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|x64.Build.0 = Release|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|x86.ActiveCfg = Release|Any CPU - {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|x86.Build.0 = Release|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|Any CPU.Build.0 = Debug|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|x64.ActiveCfg = Debug|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|x64.Build.0 = Debug|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|x86.ActiveCfg = Debug|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|x86.Build.0 = Debug|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|Any CPU.ActiveCfg = Release|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|Any CPU.Build.0 = Release|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|x64.ActiveCfg = Release|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|x64.Build.0 = Release|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|x86.ActiveCfg = Release|Any CPU - {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|x86.Build.0 = Release|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|Any CPU.Build.0 = Debug|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|x64.ActiveCfg = Debug|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|x64.Build.0 = Debug|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|x86.ActiveCfg = Debug|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|x86.Build.0 = Debug|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Release|Any CPU.ActiveCfg = Release|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Release|Any CPU.Build.0 = Release|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Release|x64.ActiveCfg = Release|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Release|x64.Build.0 = Release|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Release|x86.ActiveCfg = Release|Any CPU - {D8A63A97-9C56-448B-A4BB-056130224750}.Release|x86.Build.0 = Release|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|x64.ActiveCfg = Debug|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|x64.Build.0 = Debug|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|x86.ActiveCfg = Debug|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|x86.Build.0 = Debug|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|Any CPU.Build.0 = Release|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|x64.ActiveCfg = Release|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|x64.Build.0 = Release|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|x86.ActiveCfg = Release|Any CPU - {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|x86.Build.0 = Release|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|Any CPU.Build.0 = Debug|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|x64.ActiveCfg = Debug|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|x64.Build.0 = Debug|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|x86.ActiveCfg = Debug|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|x86.Build.0 = Debug|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|Any CPU.ActiveCfg = Release|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|Any CPU.Build.0 = Release|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|x64.ActiveCfg = Release|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|x64.Build.0 = Release|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|x86.ActiveCfg = Release|Any CPU - {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Core", "StellaOps.TaskRunner.Core\StellaOps.TaskRunner.Core.csproj", "{105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Infrastructure", "StellaOps.TaskRunner.Infrastructure\StellaOps.TaskRunner.Infrastructure.csproj", "{1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.WebService", "StellaOps.TaskRunner.WebService\StellaOps.TaskRunner.WebService.csproj", "{D8A63A97-9C56-448B-A4BB-056130224750}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Worker", "StellaOps.TaskRunner.Worker\StellaOps.TaskRunner.Worker.csproj", "{C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TaskRunner.Tests", "StellaOps.TaskRunner.Tests\StellaOps.TaskRunner.Tests.csproj", "{552E7C8A-74F6-4E33-B956-46DF96E2BE11}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|Any CPU.Build.0 = Debug|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|x64.ActiveCfg = Debug|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|x64.Build.0 = Debug|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|x86.ActiveCfg = Debug|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Debug|x86.Build.0 = Debug|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|Any CPU.ActiveCfg = Release|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|Any CPU.Build.0 = Release|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|x64.ActiveCfg = Release|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|x64.Build.0 = Release|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|x86.ActiveCfg = Release|Any CPU + {105A0C4D-1ECD-4581-8EBF-8DB29D6EE857}.Release|x86.Build.0 = Release|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|x64.ActiveCfg = Debug|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|x64.Build.0 = Debug|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|x86.ActiveCfg = Debug|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Debug|x86.Build.0 = Debug|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|Any CPU.Build.0 = Release|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|x64.ActiveCfg = Release|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|x64.Build.0 = Release|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|x86.ActiveCfg = Release|Any CPU + {1B4F4A2B-9C38-4E7A-BFBE-158BF7C1F61B}.Release|x86.Build.0 = Release|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|x64.ActiveCfg = Debug|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|x64.Build.0 = Debug|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|x86.ActiveCfg = Debug|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Debug|x86.Build.0 = Debug|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Release|Any CPU.Build.0 = Release|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Release|x64.ActiveCfg = Release|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Release|x64.Build.0 = Release|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Release|x86.ActiveCfg = Release|Any CPU + {D8A63A97-9C56-448B-A4BB-056130224750}.Release|x86.Build.0 = Release|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|x64.ActiveCfg = Debug|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|x64.Build.0 = Debug|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|x86.ActiveCfg = Debug|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Debug|x86.Build.0 = Debug|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|Any CPU.Build.0 = Release|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|x64.ActiveCfg = Release|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|x64.Build.0 = Release|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|x86.ActiveCfg = Release|Any CPU + {C0AC4DD1-6DD7-4FCF-A6DD-5DE9B86D6753}.Release|x86.Build.0 = Release|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|Any CPU.Build.0 = Debug|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|x64.ActiveCfg = Debug|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|x64.Build.0 = Debug|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|x86.ActiveCfg = Debug|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Debug|x86.Build.0 = Debug|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|Any CPU.ActiveCfg = Release|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|Any CPU.Build.0 = Release|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|x64.ActiveCfg = Release|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|x64.Build.0 = Release|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|x86.ActiveCfg = Release|Any CPU + {552E7C8A-74F6-4E33-B956-46DF96E2BE11}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.TaskRunner/TASKS.md b/src/TaskRunner/StellaOps.TaskRunner/TASKS.md similarity index 99% rename from src/StellaOps.TaskRunner/TASKS.md rename to src/TaskRunner/StellaOps.TaskRunner/TASKS.md index 22d4ba98..f6822ba3 100644 --- a/src/StellaOps.TaskRunner/TASKS.md +++ b/src/TaskRunner/StellaOps.TaskRunner/TASKS.md @@ -1,51 +1,51 @@ -# Task Runner Service Task Board — Epic 12: CLI Parity & Task Packs - -## Sprint 41 – Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TASKRUN-41-001 | TODO | Task Runner Guild | ORCH-SVC-41-101, AUTH-PACKS-41-001 | Bootstrap service, define migrations for `pack_runs`, `pack_run_logs`, `pack_artifacts`, implement run API (create/get/log stream), local executor, approvals pause, artifact capture, and provenance manifest generation. | Service builds/tests; migrations scripted; run API functional with sample pack; logs/artefacts stored; manifest signed; compliance checklist recorded. | - -## Sprint 42 – Advanced Execution -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TASKRUN-42-001 | DOING (2025-10-29) | Task Runner Guild | TASKRUN-41-001 | Add loops, conditionals, `maxParallel`, outputs, simulation mode, policy gate integration, and failure recovery (retry/abort) with deterministic state. | Executor handles control flow; simulation returns plan; policy gates pause for approvals; tests cover restart/resume. | -> 2025-10-29: Initiated manifest parsing + deterministic planning core to unblock approvals pipeline; building expression engine + plan hashing to support CLI parity. -> 2025-10-29: Landed manifest loader, planner, deterministic hash, outputs + approval/policy insights with unit tests; awaiting upstream APIs for execution-side wiring. - -## Sprint 43 – Approvals, Notifications, Hardening -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TASKRUN-43-001 | DOING (2025-10-29) | Task Runner Guild | TASKRUN-42-001, NOTIFY-SVC-40-001 | Implement approvals workflow (resume after approval), notifications integration, remote artifact uploads, chaos resilience, secret injection, and audit logs. | Approvals/resume flow validated; notifications emitted; chaos tests documented; secrets redacted in logs; audit logs complete. | -> 2025-10-29: Starting approvals orchestration — defining persistence/workflow scaffolding, integrating plan insights for notifications, and staging resume hooks. -> 2025-10-29: Added approval coordinator + policy notification bridge with unit tests; ready to wire into worker execution/resume path. - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TASKRUN-TEN-48-001 | TODO | Task Runner Guild | ORCH-TEN-48-001 | Require tenant/project context for every pack run, set DB/object-store prefixes, block egress when tenant restricted, and propagate context to steps/logs. | Pack runs fail without tenant context; artifacts stored under tenant prefix; tests verify enforcement. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TASKRUN-OBS-50-001 | TODO | Task Runner Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Adopt telemetry core in Task Runner host + worker executors, ensuring step execution spans/logs include `trace_id`, `tenant_id`, `run_id`, and scrubbed command transcripts. | Telemetry emitted for sample runs; integration test verifies context propagation across async steps; log schema validated. | -| TASKRUN-OBS-51-001 | TODO | Task Runner Guild, DevOps Guild | TASKRUN-OBS-50-001, TELEMETRY-OBS-51-001 | Emit metrics for step latency, retries, queue depth, sandbox resource usage; define SLOs for pack run completion and failure rate; surface burn-rate alerts to collector/Notifier. | Metrics appear in dashboards; burn-rate alert tested; docs capture thresholds and response playbook. | -| TASKRUN-OBS-52-001 | TODO | Task Runner Guild | TASKRUN-OBS-50-001, TIMELINE-OBS-52-002 | Produce timeline events for pack runs (`pack.started`, `pack.step.completed`, `pack.failed`) containing evidence pointers and policy gate context. Provide dedupe + retry logic. | Timeline events recorded for sample runs; duplicates suppressed; tests cover error/retry; docs updated. | -| TASKRUN-OBS-53-001 | TODO | Task Runner Guild, Evidence Locker Guild | TASKRUN-OBS-52-001, EVID-OBS-53-002 | Capture step transcripts, artifact manifests, environment digests, and policy approvals into evidence locker snapshots; ensure redaction + hash chain coverage. | Evidence bundle created for sample pack; redaction tests pass; manifest linked in timeline. | -| TASKRUN-OBS-54-001 | TODO | Task Runner Guild, Provenance Guild | TASKRUN-OBS-53-001, PROV-OBS-53-002 | Generate DSSE attestations for pack runs (subjects = produced artifacts) and expose verification API/CLI integration. Store references in timeline events. | Attestation generated + verified; timeline includes attestation ref; docs updated. | -| TASKRUN-OBS-55-001 | TODO | Task Runner Guild, DevOps Guild | TASKRUN-OBS-51-001, TELEMETRY-OBS-55-001, DEVOPS-OBS-55-001 | Implement incident mode escalations (extra telemetry, debug artifact capture, retention bump) and align on automatic activation via SLO breach webhooks. | Incident mode toggles validated; extra artefacts captured; notifier integration tested. | - -## Air-Gapped Mode (Epic 16) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TASKRUN-AIRGAP-56-001 | TODO | Task Runner Guild, AirGap Policy Guild | AIRGAP-POL-56-001, TASKRUN-OBS-50-001 | Enforce plan-time validation rejecting steps with non-allowlisted network calls in sealed mode and surface remediation errors. | Planner blocks disallowed steps; error contains remediation; tests cover sealed/unsealed behavior. | -| TASKRUN-AIRGAP-56-002 | TODO | Task Runner Guild, AirGap Importer Guild | TASKRUN-AIRGAP-56-001, AIRGAP-IMP-57-002 | Add helper steps for bundle ingestion (checksum verification, staging to object store) with deterministic outputs. | Helper steps succeed deterministically; integration tests import sample bundle. | -| TASKRUN-AIRGAP-57-001 | TODO | Task Runner Guild, AirGap Controller Guild | TASKRUN-AIRGAP-56-001, AIRGAP-CTL-56-002 | Refuse to execute plans when environment sealed=false but declared sealed install; emit advisory timeline events. | Mismatch detection works; timeline + telemetry record violation; docs updated. | -| TASKRUN-AIRGAP-58-001 | TODO | Task Runner Guild, Evidence Locker Guild | TASKRUN-OBS-53-001, EVID-OBS-55-001 | Capture bundle import job transcripts, hashed inputs, and outputs into portable evidence bundles. | Evidence recorded; manifests deterministic; timeline references created. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TASKRUN-OAS-61-001 | TODO | Task Runner Guild, API Contracts Guild | OAS-61-001 | Document Task Runner APIs (pack runs, logs, approvals) in service OAS, including streaming response schemas and examples. | OAS covers all Task Runner endpoints with examples; lint passes. | -| TASKRUN-OAS-61-002 | TODO | Task Runner Guild | TASKRUN-OAS-61-001 | Expose `GET /.well-known/openapi` returning signed spec metadata, build version, and ETag. | Discovery endpoint deployed; contract tests call endpoint; telemetry includes `x-stella-service`. | -| TASKRUN-OAS-62-001 | TODO | Task Runner Guild, SDK Generator Guild | TASKRUN-OAS-61-001, SDKGEN-63-001 | Provide SDK examples for pack run lifecycle; ensure SDKs offer streaming log helpers and paginator wrappers. | SDK smoke tests cover pack run flows; docs auto-embed snippets. | -| TASKRUN-OAS-63-001 | TODO | Task Runner Guild, API Governance Guild | APIGOV-63-001 | Implement deprecation header support and Sunset handling for legacy pack APIs; emit notifications metadata. | Deprecated endpoints emit headers; notifications pipeline validated; documentation updated. | +# Task Runner Service Task Board — Epic 12: CLI Parity & Task Packs + +## Sprint 41 – Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TASKRUN-41-001 | TODO | Task Runner Guild | ORCH-SVC-41-101, AUTH-PACKS-41-001 | Bootstrap service, define migrations for `pack_runs`, `pack_run_logs`, `pack_artifacts`, implement run API (create/get/log stream), local executor, approvals pause, artifact capture, and provenance manifest generation. | Service builds/tests; migrations scripted; run API functional with sample pack; logs/artefacts stored; manifest signed; compliance checklist recorded. | + +## Sprint 42 – Advanced Execution +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TASKRUN-42-001 | DOING (2025-10-29) | Task Runner Guild | TASKRUN-41-001 | Add loops, conditionals, `maxParallel`, outputs, simulation mode, policy gate integration, and failure recovery (retry/abort) with deterministic state. | Executor handles control flow; simulation returns plan; policy gates pause for approvals; tests cover restart/resume. | +> 2025-10-29: Initiated manifest parsing + deterministic planning core to unblock approvals pipeline; building expression engine + plan hashing to support CLI parity. +> 2025-10-29: Landed manifest loader, planner, deterministic hash, outputs + approval/policy insights with unit tests; awaiting upstream APIs for execution-side wiring. + +## Sprint 43 – Approvals, Notifications, Hardening +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TASKRUN-43-001 | DOING (2025-10-29) | Task Runner Guild | TASKRUN-42-001, NOTIFY-SVC-40-001 | Implement approvals workflow (resume after approval), notifications integration, remote artifact uploads, chaos resilience, secret injection, and audit logs. | Approvals/resume flow validated; notifications emitted; chaos tests documented; secrets redacted in logs; audit logs complete. | +> 2025-10-29: Starting approvals orchestration — defining persistence/workflow scaffolding, integrating plan insights for notifications, and staging resume hooks. +> 2025-10-29: Added approval coordinator + policy notification bridge with unit tests; ready to wire into worker execution/resume path. + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TASKRUN-TEN-48-001 | TODO | Task Runner Guild | ORCH-TEN-48-001 | Require tenant/project context for every pack run, set DB/object-store prefixes, block egress when tenant restricted, and propagate context to steps/logs. | Pack runs fail without tenant context; artifacts stored under tenant prefix; tests verify enforcement. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TASKRUN-OBS-50-001 | TODO | Task Runner Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Adopt telemetry core in Task Runner host + worker executors, ensuring step execution spans/logs include `trace_id`, `tenant_id`, `run_id`, and scrubbed command transcripts. | Telemetry emitted for sample runs; integration test verifies context propagation across async steps; log schema validated. | +| TASKRUN-OBS-51-001 | TODO | Task Runner Guild, DevOps Guild | TASKRUN-OBS-50-001, TELEMETRY-OBS-51-001 | Emit metrics for step latency, retries, queue depth, sandbox resource usage; define SLOs for pack run completion and failure rate; surface burn-rate alerts to collector/Notifier. | Metrics appear in dashboards; burn-rate alert tested; docs capture thresholds and response playbook. | +| TASKRUN-OBS-52-001 | TODO | Task Runner Guild | TASKRUN-OBS-50-001, TIMELINE-OBS-52-002 | Produce timeline events for pack runs (`pack.started`, `pack.step.completed`, `pack.failed`) containing evidence pointers and policy gate context. Provide dedupe + retry logic. | Timeline events recorded for sample runs; duplicates suppressed; tests cover error/retry; docs updated. | +| TASKRUN-OBS-53-001 | TODO | Task Runner Guild, Evidence Locker Guild | TASKRUN-OBS-52-001, EVID-OBS-53-002 | Capture step transcripts, artifact manifests, environment digests, and policy approvals into evidence locker snapshots; ensure redaction + hash chain coverage. | Evidence bundle created for sample pack; redaction tests pass; manifest linked in timeline. | +| TASKRUN-OBS-54-001 | TODO | Task Runner Guild, Provenance Guild | TASKRUN-OBS-53-001, PROV-OBS-53-002 | Generate DSSE attestations for pack runs (subjects = produced artifacts) and expose verification API/CLI integration. Store references in timeline events. | Attestation generated + verified; timeline includes attestation ref; docs updated. | +| TASKRUN-OBS-55-001 | TODO | Task Runner Guild, DevOps Guild | TASKRUN-OBS-51-001, TELEMETRY-OBS-55-001, DEVOPS-OBS-55-001 | Implement incident mode escalations (extra telemetry, debug artifact capture, retention bump) and align on automatic activation via SLO breach webhooks. | Incident mode toggles validated; extra artefacts captured; notifier integration tested. | + +## Air-Gapped Mode (Epic 16) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TASKRUN-AIRGAP-56-001 | TODO | Task Runner Guild, AirGap Policy Guild | AIRGAP-POL-56-001, TASKRUN-OBS-50-001 | Enforce plan-time validation rejecting steps with non-allowlisted network calls in sealed mode and surface remediation errors. | Planner blocks disallowed steps; error contains remediation; tests cover sealed/unsealed behavior. | +| TASKRUN-AIRGAP-56-002 | TODO | Task Runner Guild, AirGap Importer Guild | TASKRUN-AIRGAP-56-001, AIRGAP-IMP-57-002 | Add helper steps for bundle ingestion (checksum verification, staging to object store) with deterministic outputs. | Helper steps succeed deterministically; integration tests import sample bundle. | +| TASKRUN-AIRGAP-57-001 | TODO | Task Runner Guild, AirGap Controller Guild | TASKRUN-AIRGAP-56-001, AIRGAP-CTL-56-002 | Refuse to execute plans when environment sealed=false but declared sealed install; emit advisory timeline events. | Mismatch detection works; timeline + telemetry record violation; docs updated. | +| TASKRUN-AIRGAP-58-001 | TODO | Task Runner Guild, Evidence Locker Guild | TASKRUN-OBS-53-001, EVID-OBS-55-001 | Capture bundle import job transcripts, hashed inputs, and outputs into portable evidence bundles. | Evidence recorded; manifests deterministic; timeline references created. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TASKRUN-OAS-61-001 | TODO | Task Runner Guild, API Contracts Guild | OAS-61-001 | Document Task Runner APIs (pack runs, logs, approvals) in service OAS, including streaming response schemas and examples. | OAS covers all Task Runner endpoints with examples; lint passes. | +| TASKRUN-OAS-61-002 | TODO | Task Runner Guild | TASKRUN-OAS-61-001 | Expose `GET /.well-known/openapi` returning signed spec metadata, build version, and ETag. | Discovery endpoint deployed; contract tests call endpoint; telemetry includes `x-stella-service`. | +| TASKRUN-OAS-62-001 | TODO | Task Runner Guild, SDK Generator Guild | TASKRUN-OAS-61-001, SDKGEN-63-001 | Provide SDK examples for pack run lifecycle; ensure SDKs offer streaming log helpers and paginator wrappers. | SDK smoke tests cover pack run flows; docs auto-embed snippets. | +| TASKRUN-OAS-63-001 | TODO | Task Runner Guild, API Governance Guild | APIGOV-63-001 | Implement deprecation header support and Sunset handling for legacy pack APIs; emit notifications metadata. | Deprecated endpoints emit headers; notifications pipeline validated; documentation updated. | diff --git a/src/StellaOps.Telemetry.Core/AGENTS.md b/src/Telemetry/StellaOps.Telemetry.Core/AGENTS.md similarity index 98% rename from src/StellaOps.Telemetry.Core/AGENTS.md rename to src/Telemetry/StellaOps.Telemetry.Core/AGENTS.md index 8e923a92..1f7f0f28 100644 --- a/src/StellaOps.Telemetry.Core/AGENTS.md +++ b/src/Telemetry/StellaOps.Telemetry.Core/AGENTS.md @@ -1,21 +1,21 @@ -# StellaOps Telemetry Core Guild Charter - -## Mission -Deliver shared observability primitives for every StellaOps service. Provide deterministic logging, metrics, and tracing utilities that enforce the imposed rule: instrumentation patterns adopted here must be propagated wherever applicable. - -## Scope -- Structured logging facade with fixed field schema and privacy guards. -- OpenTelemetry SDK bootstrapping helpers for services and workers. -- Sampling, exemplar, and redaction policies enforced in code. -- Context propagation middleware for HTTP, gRPC, message, and job pipelines. -- Validation test harnesses ensuring deterministic output across builds. - -## Coordination -- Partner with DevOps Guild on collector/exporter defaults. -- Align with Authority on trace/log scope annotations. -- Collaborate with service guilds to roll out new instrumentation packages per sprint objectives. - -## Definition of Done -- All library changes ship unit + integration tests. -- Determinism mode runs (`dotnet test -c Deterministic`) pass locally and in CI. -- Updated changelog fragments stored under `/docs/observability/` as referenced by tasks. +# StellaOps Telemetry Core Guild Charter + +## Mission +Deliver shared observability primitives for every StellaOps service. Provide deterministic logging, metrics, and tracing utilities that enforce the imposed rule: instrumentation patterns adopted here must be propagated wherever applicable. + +## Scope +- Structured logging facade with fixed field schema and privacy guards. +- OpenTelemetry SDK bootstrapping helpers for services and workers. +- Sampling, exemplar, and redaction policies enforced in code. +- Context propagation middleware for HTTP, gRPC, message, and job pipelines. +- Validation test harnesses ensuring deterministic output across builds. + +## Coordination +- Partner with DevOps Guild on collector/exporter defaults. +- Align with Authority on trace/log scope annotations. +- Collaborate with service guilds to roll out new instrumentation packages per sprint objectives. + +## Definition of Done +- All library changes ship unit + integration tests. +- Determinism mode runs (`dotnet test -c Deterministic`) pass locally and in CI. +- Updated changelog fragments stored under `/docs/observability/` as referenced by tasks. diff --git a/src/StellaOps.Telemetry.Core/TASKS.md b/src/Telemetry/StellaOps.Telemetry.Core/TASKS.md similarity index 99% rename from src/StellaOps.Telemetry.Core/TASKS.md rename to src/Telemetry/StellaOps.Telemetry.Core/TASKS.md index f982da79..6399418c 100644 --- a/src/StellaOps.Telemetry.Core/TASKS.md +++ b/src/Telemetry/StellaOps.Telemetry.Core/TASKS.md @@ -1,23 +1,23 @@ -# Telemetry Core Task Board — Epic 15: Observability & Forensics - -## Sprint 50 – Baseline Instrumentation -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TELEMETRY-OBS-50-001 | TODO | Telemetry Core Guild | — | Create `StellaOps.Telemetry.Core` library with structured logging facade, OpenTelemetry configuration helpers, and deterministic bootstrap (service name/version detection, resource attributes). Publish sample usage for web/worker hosts. | Library builds/tests; NuGet local package published; sample host integration passes smoke tests; compliance checklist recorded. | -| TELEMETRY-OBS-50-002 | TODO | Telemetry Core Guild | TELEMETRY-OBS-50-001 | Implement context propagation middleware/adapters for HTTP, gRPC, background jobs, and CLI invocations, carrying `trace_id`, `tenant_id`, `actor`, and imposed-rule metadata. Provide test harness covering async resume scenarios. | Middleware packages pass integration tests; context restored across async boundaries; CLI harness emits trace headers; docs drafted under `/docs/observability/telemetry-standards.md` stub. | - -## Sprint 51 – Metrics & Log Contracts -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TELEMETRY-OBS-51-001 | TODO | Telemetry Core Guild, Observability Guild | TELEMETRY-OBS-50-001 | Ship metrics helpers for golden signals (histograms, counters, gauges) with exemplar support and cardinality guards. Provide Roslyn analyzer preventing unsanitised labels. | Helpers integrated in sample service; analyzer blocks forbidden label usage; unit/property tests cover bounds; documentation PR prepared. | -| TELEMETRY-OBS-51-002 | TODO | Telemetry Core Guild, Security Guild | TELEMETRY-OBS-50-001 | Implement redaction/scrubbing filters for secrets/PII enforced at logger sink, configurable per-tenant with TTL, including audit of overrides. Add determinism tests verifying stable field order and timestamp normalization. | Scrubber defaults enforced; override API audited; determinism tests pass twice with identical output; security review sign-off recorded. | - -## Sprint 55 – Incident Mode Support -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TELEMETRY-OBS-55-001 | TODO | Telemetry Core Guild | TELEMETRY-OBS-50-002, TELEMETRY-OBS-51-002 | Provide incident mode toggle API that adjusts sampling, enables extended retention tags, and records activation trail for services. Ensure toggle honored by all hosting templates and integrates with Config/FeatureFlag providers. | Toggle API launched; integration tests confirm sampling increase; activation events logged with tenant context; runbook updated. | - -## Sprint 56 – Sealed Mode Hooks -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TELEMETRY-OBS-56-001 | TODO | Telemetry Core Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-55-001 | Add sealed-mode telemetry helpers (drift metrics, seal/unseal spans, offline exporters) and ensure hosts can disable external exporters when sealed. | Helpers published; sealed-mode tests verify no external egress; docs updated with sealed guidance. | +# Telemetry Core Task Board — Epic 15: Observability & Forensics + +## Sprint 50 – Baseline Instrumentation +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TELEMETRY-OBS-50-001 | TODO | Telemetry Core Guild | — | Create `StellaOps.Telemetry.Core` library with structured logging facade, OpenTelemetry configuration helpers, and deterministic bootstrap (service name/version detection, resource attributes). Publish sample usage for web/worker hosts. | Library builds/tests; NuGet local package published; sample host integration passes smoke tests; compliance checklist recorded. | +| TELEMETRY-OBS-50-002 | TODO | Telemetry Core Guild | TELEMETRY-OBS-50-001 | Implement context propagation middleware/adapters for HTTP, gRPC, background jobs, and CLI invocations, carrying `trace_id`, `tenant_id`, `actor`, and imposed-rule metadata. Provide test harness covering async resume scenarios. | Middleware packages pass integration tests; context restored across async boundaries; CLI harness emits trace headers; docs drafted under `/docs/observability/telemetry-standards.md` stub. | + +## Sprint 51 – Metrics & Log Contracts +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TELEMETRY-OBS-51-001 | TODO | Telemetry Core Guild, Observability Guild | TELEMETRY-OBS-50-001 | Ship metrics helpers for golden signals (histograms, counters, gauges) with exemplar support and cardinality guards. Provide Roslyn analyzer preventing unsanitised labels. | Helpers integrated in sample service; analyzer blocks forbidden label usage; unit/property tests cover bounds; documentation PR prepared. | +| TELEMETRY-OBS-51-002 | TODO | Telemetry Core Guild, Security Guild | TELEMETRY-OBS-50-001 | Implement redaction/scrubbing filters for secrets/PII enforced at logger sink, configurable per-tenant with TTL, including audit of overrides. Add determinism tests verifying stable field order and timestamp normalization. | Scrubber defaults enforced; override API audited; determinism tests pass twice with identical output; security review sign-off recorded. | + +## Sprint 55 – Incident Mode Support +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TELEMETRY-OBS-55-001 | TODO | Telemetry Core Guild | TELEMETRY-OBS-50-002, TELEMETRY-OBS-51-002 | Provide incident mode toggle API that adjusts sampling, enables extended retention tags, and records activation trail for services. Ensure toggle honored by all hosting templates and integrates with Config/FeatureFlag providers. | Toggle API launched; integration tests confirm sampling increase; activation events logged with tenant context; runbook updated. | + +## Sprint 56 – Sealed Mode Hooks +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TELEMETRY-OBS-56-001 | TODO | Telemetry Core Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-55-001 | Add sealed-mode telemetry helpers (drift metrics, seal/unseal spans, offline exporters) and ensure hosts can disable external exporters when sealed. | Helpers published; sealed-mode tests verify no external egress; docs updated with sealed guidance. | diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer.sln b/src/TimelineIndexer/StellaOps.TimelineIndexer.sln new file mode 100644 index 00000000..80abe2da --- /dev/null +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer.sln @@ -0,0 +1,99 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "StellaOps.TimelineIndexer", "StellaOps.TimelineIndexer", "{9BEC1B06-C3A6-BC60-1909-5103688B5A4A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Core", "StellaOps.TimelineIndexer\StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj", "{1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Infrastructure", "StellaOps.TimelineIndexer\StellaOps.TimelineIndexer.Infrastructure\StellaOps.TimelineIndexer.Infrastructure.csproj", "{E32BB3F0-E40E-493A-AC32-D7CF54E948C8}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Tests", "StellaOps.TimelineIndexer\StellaOps.TimelineIndexer.Tests\StellaOps.TimelineIndexer.Tests.csproj", "{21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.WebService", "StellaOps.TimelineIndexer\StellaOps.TimelineIndexer.WebService\StellaOps.TimelineIndexer.WebService.csproj", "{A9FE403A-A221-41B0-B73F-8AA488A36A7C}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Worker", "StellaOps.TimelineIndexer\StellaOps.TimelineIndexer.Worker\StellaOps.TimelineIndexer.Worker.csproj", "{36D5F842-A488-4461-A4FF-83343B2B2B4F}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Debug|x64.ActiveCfg = Debug|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Debug|x64.Build.0 = Debug|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Debug|x86.ActiveCfg = Debug|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Debug|x86.Build.0 = Debug|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Release|Any CPU.Build.0 = Release|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Release|x64.ActiveCfg = Release|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Release|x64.Build.0 = Release|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Release|x86.ActiveCfg = Release|Any CPU + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB}.Release|x86.Build.0 = Release|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Debug|x64.ActiveCfg = Debug|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Debug|x64.Build.0 = Debug|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Debug|x86.ActiveCfg = Debug|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Debug|x86.Build.0 = Debug|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Release|Any CPU.Build.0 = Release|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Release|x64.ActiveCfg = Release|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Release|x64.Build.0 = Release|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Release|x86.ActiveCfg = Release|Any CPU + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8}.Release|x86.Build.0 = Release|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Debug|x64.ActiveCfg = Debug|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Debug|x64.Build.0 = Debug|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Debug|x86.ActiveCfg = Debug|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Debug|x86.Build.0 = Debug|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Release|Any CPU.Build.0 = Release|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Release|x64.ActiveCfg = Release|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Release|x64.Build.0 = Release|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Release|x86.ActiveCfg = Release|Any CPU + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E}.Release|x86.Build.0 = Release|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Debug|x64.ActiveCfg = Debug|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Debug|x64.Build.0 = Debug|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Debug|x86.ActiveCfg = Debug|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Debug|x86.Build.0 = Debug|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Release|Any CPU.Build.0 = Release|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Release|x64.ActiveCfg = Release|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Release|x64.Build.0 = Release|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Release|x86.ActiveCfg = Release|Any CPU + {A9FE403A-A221-41B0-B73F-8AA488A36A7C}.Release|x86.Build.0 = Release|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Debug|Any CPU.Build.0 = Debug|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Debug|x64.ActiveCfg = Debug|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Debug|x64.Build.0 = Debug|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Debug|x86.ActiveCfg = Debug|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Debug|x86.Build.0 = Debug|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Release|Any CPU.ActiveCfg = Release|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Release|Any CPU.Build.0 = Release|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Release|x64.ActiveCfg = Release|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Release|x64.Build.0 = Release|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Release|x86.ActiveCfg = Release|Any CPU + {36D5F842-A488-4461-A4FF-83343B2B2B4F}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {1B9BEA29-7A0C-437D-A82A-78CB0046C0CB} = {9BEC1B06-C3A6-BC60-1909-5103688B5A4A} + {E32BB3F0-E40E-493A-AC32-D7CF54E948C8} = {9BEC1B06-C3A6-BC60-1909-5103688B5A4A} + {21CA5727-7929-4B5D-AD7F-46EBE42A9F8E} = {9BEC1B06-C3A6-BC60-1909-5103688B5A4A} + {A9FE403A-A221-41B0-B73F-8AA488A36A7C} = {9BEC1B06-C3A6-BC60-1909-5103688B5A4A} + {36D5F842-A488-4461-A4FF-83343B2B2B4F} = {9BEC1B06-C3A6-BC60-1909-5103688B5A4A} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.TimelineIndexer/AGENTS.md b/src/TimelineIndexer/StellaOps.TimelineIndexer/AGENTS.md similarity index 98% rename from src/StellaOps.TimelineIndexer/AGENTS.md rename to src/TimelineIndexer/StellaOps.TimelineIndexer/AGENTS.md index 40d08da1..4622d909 100644 --- a/src/StellaOps.TimelineIndexer/AGENTS.md +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/AGENTS.md @@ -1,28 +1,28 @@ -# Tenant Timeline Indexer — Agent Charter - -## Mission -Build the tenant-scoped timeline ingestion and query service described in Epic 15. Consume structured timeline events from all services, maintain queryable indices, and expose APIs to Console and CLI without violating imposed rule guarantees. - -## Responsibilities -- Define Postgres schema, RLS policies, and ingestion pipelines for `timeline_events`. -- Provide event consumers for NATS/Redis queues with dedupe + ordering logic. -- Serve REST/gRPC APIs powering Console Forensics Explorer and CLI `stella obs trace`/`timeline` flows. -- Emit metrics/traces/logs for ingestion health and query performance. - -## Collaboration -- Coordinate with Telemetry Core for event schema definitions. -- Work with Evidence Locker to link events to evidence bundle digests. -- Align with Authority on new `timeline:read` scopes and tenant enforcement. - -## Definition of Done -- Service ships with deterministic migrations + repeatable seeds. -- Integration tests replay recorded event fixtures to stable results. -- Docs updated under `/docs/forensics/timeline.md` per release. - -## Module Layout -- `StellaOps.TimelineIndexer.Core/` — event models, ordering/dedupe logic, query contracts. -- `StellaOps.TimelineIndexer.Infrastructure/` — Postgres/NATS clients, persistence abstractions. -- `StellaOps.TimelineIndexer.WebService/` — query/lookup APIs and authentication glue. -- `StellaOps.TimelineIndexer.Worker/` — ingestion consumers and background compaction jobs. -- `StellaOps.TimelineIndexer.Tests/` — unit tests focused on ordering/dedupe/query correctness. -- `StellaOps.TimelineIndexer.sln` — solution aggregating module projects. +# Tenant Timeline Indexer — Agent Charter + +## Mission +Build the tenant-scoped timeline ingestion and query service described in Epic 15. Consume structured timeline events from all services, maintain queryable indices, and expose APIs to Console and CLI without violating imposed rule guarantees. + +## Responsibilities +- Define Postgres schema, RLS policies, and ingestion pipelines for `timeline_events`. +- Provide event consumers for NATS/Redis queues with dedupe + ordering logic. +- Serve REST/gRPC APIs powering Console Forensics Explorer and CLI `stella obs trace`/`timeline` flows. +- Emit metrics/traces/logs for ingestion health and query performance. + +## Collaboration +- Coordinate with Telemetry Core for event schema definitions. +- Work with Evidence Locker to link events to evidence bundle digests. +- Align with Authority on new `timeline:read` scopes and tenant enforcement. + +## Definition of Done +- Service ships with deterministic migrations + repeatable seeds. +- Integration tests replay recorded event fixtures to stable results. +- Docs updated under `/docs/forensics/timeline.md` per release. + +## Module Layout +- `StellaOps.TimelineIndexer.Core/` — event models, ordering/dedupe logic, query contracts. +- `StellaOps.TimelineIndexer.Infrastructure/` — Postgres/NATS clients, persistence abstractions. +- `StellaOps.TimelineIndexer.WebService/` — query/lookup APIs and authentication glue. +- `StellaOps.TimelineIndexer.Worker/` — ingestion consumers and background compaction jobs. +- `StellaOps.TimelineIndexer.Tests/` — unit tests focused on ordering/dedupe/query correctness. +- `StellaOps.TimelineIndexer.sln` — solution aggregating module projects. diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/Class1.cs b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/Class1.cs similarity index 92% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/Class1.cs rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/Class1.cs index 6ec0050c..3d154bf0 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/Class1.cs +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.TimelineIndexer.Core; - -public class Class1 -{ - -} +namespace StellaOps.TimelineIndexer.Core; + +public class Class1 +{ + +} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/StellaOps.TimelineIndexer.Core.csproj b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/StellaOps.TimelineIndexer.Core.csproj similarity index 95% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/StellaOps.TimelineIndexer.Core.csproj rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/StellaOps.TimelineIndexer.Core.csproj index fe0eef44..e4808f0d 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/StellaOps.TimelineIndexer.Core.csproj +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Core/StellaOps.TimelineIndexer.Core.csproj @@ -1,18 +1,18 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/Class1.cs b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/Class1.cs similarity index 93% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/Class1.cs rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/Class1.cs index dd0e73ad..c5228dc5 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/Class1.cs +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/Class1.cs @@ -1,6 +1,6 @@ -namespace StellaOps.TimelineIndexer.Infrastructure; - -public class Class1 -{ - -} +namespace StellaOps.TimelineIndexer.Infrastructure; + +public class Class1 +{ + +} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/StellaOps.TimelineIndexer.Infrastructure.csproj b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/StellaOps.TimelineIndexer.Infrastructure.csproj similarity index 94% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/StellaOps.TimelineIndexer.Infrastructure.csproj rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/StellaOps.TimelineIndexer.Infrastructure.csproj index 99490b67..a6f6ae3e 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/StellaOps.TimelineIndexer.Infrastructure.csproj +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Infrastructure/StellaOps.TimelineIndexer.Infrastructure.csproj @@ -1,28 +1,28 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj"/> - - - </ItemGroup> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj"/> + + + </ItemGroup> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + +</Project> diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/StellaOps.TimelineIndexer.Tests.csproj b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/StellaOps.TimelineIndexer.Tests.csproj similarity index 91% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/StellaOps.TimelineIndexer.Tests.csproj rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/StellaOps.TimelineIndexer.Tests.csproj index 0dc7d02f..bbd3dc83 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/StellaOps.TimelineIndexer.Tests.csproj +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/StellaOps.TimelineIndexer.Tests.csproj @@ -1,135 +1,135 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk"> - - - - - - <PropertyGroup> - - - - - <OutputType>Exe</OutputType> - - - - - <IsPackable>false</IsPackable> - - - - - - - - - - - - - - <TargetFramework>net10.0</TargetFramework> - - - <ImplicitUsings>enable</ImplicitUsings> - - - <Nullable>enable</Nullable> - - - <UseConcelierTestInfra>false</UseConcelierTestInfra> - - - <LangVersion>preview</LangVersion> - - - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - - - </PropertyGroup> - - - - - - <ItemGroup> - - - - - <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> - - - - - <PackageReference Include="xunit.v3" Version="3.0.0"/> - - - - - <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <Using Include="Xunit"/> - - - - - </ItemGroup> - - - - - - <ItemGroup> - - - - - <ProjectReference Include="..\StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj"/> - - - - - <ProjectReference Include="..\StellaOps.TimelineIndexer.Infrastructure\StellaOps.TimelineIndexer.Infrastructure.csproj"/> - - - - - </ItemGroup> - - - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk"> + + + + + + <PropertyGroup> + + + + + <OutputType>Exe</OutputType> + + + + + <IsPackable>false</IsPackable> + + + + + + + + + + + + + + <TargetFramework>net10.0</TargetFramework> + + + <ImplicitUsings>enable</ImplicitUsings> + + + <Nullable>enable</Nullable> + + + <UseConcelierTestInfra>false</UseConcelierTestInfra> + + + <LangVersion>preview</LangVersion> + + + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + + + </PropertyGroup> + + + + + + <ItemGroup> + + + + + <PackageReference Include="Microsoft.NET.Test.Sdk" Version="17.14.1"/> + + + + + <PackageReference Include="xunit.v3" Version="3.0.0"/> + + + + + <PackageReference Include="xunit.runner.visualstudio" Version="3.1.3"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Content Include="xunit.runner.json" CopyToOutputDirectory="PreserveNewest"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <Using Include="Xunit"/> + + + + + </ItemGroup> + + + + + + <ItemGroup> + + + + + <ProjectReference Include="..\StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj"/> + + + + + <ProjectReference Include="..\StellaOps.TimelineIndexer.Infrastructure\StellaOps.TimelineIndexer.Infrastructure.csproj"/> + + + + + </ItemGroup> + + + + + +</Project> diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/UnitTest1.cs b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/UnitTest1.cs similarity index 92% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/UnitTest1.cs rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/UnitTest1.cs index d624cb6d..ff525900 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/UnitTest1.cs +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/UnitTest1.cs @@ -1,10 +1,10 @@ -namespace StellaOps.TimelineIndexer.Tests; - -public class UnitTest1 -{ - [Fact] - public void Test1() - { - - } -} +namespace StellaOps.TimelineIndexer.Tests; + +public class UnitTest1 +{ + [Fact] + public void Test1() + { + + } +} diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/xunit.runner.json b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/xunit.runner.json new file mode 100644 index 00000000..249d815c --- /dev/null +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Tests/xunit.runner.json @@ -0,0 +1,3 @@ +{ + "$schema": "https://xunit.net/schema/current/xunit.runner.schema.json" +} diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs new file mode 100644 index 00000000..3917ef1b --- /dev/null +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Program.cs @@ -0,0 +1,41 @@ +var builder = WebApplication.CreateBuilder(args); + +// Add services to the container. +// Learn more about configuring OpenAPI at https://aka.ms/aspnet/openapi +builder.Services.AddOpenApi(); + +var app = builder.Build(); + +// Configure the HTTP request pipeline. +if (app.Environment.IsDevelopment()) +{ + app.MapOpenApi(); +} + +app.UseHttpsRedirection(); + +var summaries = new[] +{ + "Freezing", "Bracing", "Chilly", "Cool", "Mild", "Warm", "Balmy", "Hot", "Sweltering", "Scorching" +}; + +app.MapGet("/weatherforecast", () => +{ + var forecast = Enumerable.Range(1, 5).Select(index => + new WeatherForecast + ( + DateOnly.FromDateTime(DateTime.Now.AddDays(index)), + Random.Shared.Next(-20, 55), + summaries[Random.Shared.Next(summaries.Length)] + )) + .ToArray(); + return forecast; +}) +.WithName("GetWeatherForecast"); + +app.Run(); + +record WeatherForecast(DateOnly Date, int TemperatureC, string? Summary) +{ + public int TemperatureF => 32 + (int)(TemperatureC / 0.5556); +} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Properties/launchSettings.json b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Properties/launchSettings.json rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Properties/launchSettings.json index 70e1a4ae..6134cd06 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Properties/launchSettings.json +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/Properties/launchSettings.json @@ -1,23 +1,23 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "http": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "http://localhost:5194", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - }, - "https": { - "commandName": "Project", - "dotnetRunMessages": true, - "launchBrowser": false, - "applicationUrl": "https://localhost:7272;http://localhost:5194", - "environmentVariables": { - "ASPNETCORE_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "http": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "http://localhost:5194", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + }, + "https": { + "commandName": "Project", + "dotnetRunMessages": true, + "launchBrowser": false, + "applicationUrl": "https://localhost:7272;http://localhost:5194", + "environmentVariables": { + "ASPNETCORE_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj similarity index 95% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj index c250b20e..dc2cc62a 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.csproj @@ -1,41 +1,41 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Web"> - - - - <PropertyGroup> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.TimelineIndexer.Infrastructure\StellaOps.TimelineIndexer.Infrastructure.csproj"/> - - - </ItemGroup> - - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Web"> + + + + <PropertyGroup> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.AspNetCore.OpenApi" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.TimelineIndexer.Infrastructure\StellaOps.TimelineIndexer.Infrastructure.csproj"/> + + + </ItemGroup> + + + +</Project> diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.http b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.http similarity index 96% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.http rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.http index 9aad74ef..7f7950a4 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.http +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/StellaOps.TimelineIndexer.WebService.http @@ -1,6 +1,6 @@ -@StellaOps.TimelineIndexer.WebService_HostAddress = http://localhost:5194 - -GET {{StellaOps.TimelineIndexer.WebService_HostAddress}}/weatherforecast/ -Accept: application/json - -### +@StellaOps.TimelineIndexer.WebService_HostAddress = http://localhost:5194 + +GET {{StellaOps.TimelineIndexer.WebService_HostAddress}}/weatherforecast/ +Accept: application/json + +### diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.Development.json b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.Development.json new file mode 100644 index 00000000..ff66ba6b --- /dev/null +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + } +} diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json new file mode 100644 index 00000000..4d566948 --- /dev/null +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.WebService/appsettings.json @@ -0,0 +1,9 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.AspNetCore": "Warning" + } + }, + "AllowedHosts": "*" +} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs similarity index 96% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs index 13c4cedf..f7b067d1 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Program.cs @@ -1,7 +1,7 @@ -using StellaOps.TimelineIndexer.Worker; - -var builder = Host.CreateApplicationBuilder(args); -builder.Services.AddHostedService<Worker>(); - -var host = builder.Build(); -host.Run(); +using StellaOps.TimelineIndexer.Worker; + +var builder = Host.CreateApplicationBuilder(args); +builder.Services.AddHostedService<Worker>(); + +var host = builder.Build(); +host.Run(); diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Properties/launchSettings.json b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Properties/launchSettings.json similarity index 96% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Properties/launchSettings.json rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Properties/launchSettings.json index 69dd56fd..96c55ecb 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Properties/launchSettings.json +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Properties/launchSettings.json @@ -1,12 +1,12 @@ -{ - "$schema": "https://json.schemastore.org/launchsettings.json", - "profiles": { - "StellaOps.TimelineIndexer.Worker": { - "commandName": "Project", - "dotnetRunMessages": true, - "environmentVariables": { - "DOTNET_ENVIRONMENT": "Development" - } - } - } -} +{ + "$schema": "https://json.schemastore.org/launchsettings.json", + "profiles": { + "StellaOps.TimelineIndexer.Worker": { + "commandName": "Project", + "dotnetRunMessages": true, + "environmentVariables": { + "DOTNET_ENVIRONMENT": "Development" + } + } + } +} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj similarity index 95% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj index 14ea452e..2c6d6a69 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/StellaOps.TimelineIndexer.Worker.csproj @@ -1,43 +1,43 @@ -<?xml version="1.0" ?> -<Project Sdk="Microsoft.NET.Sdk.Worker"> - - - - <PropertyGroup> - - - <UserSecretsId>dotnet-StellaOps.TimelineIndexer.Worker-f6dbdeac-9eb5-4250-9384-ef93fc70f770</UserSecretsId> - - - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <LangVersion>preview</LangVersion> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - - - <ItemGroup> - - - <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> - - - </ItemGroup> - - - - <ItemGroup> - - - <ProjectReference Include="..\StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj"/> - - - <ProjectReference Include="..\StellaOps.TimelineIndexer.Infrastructure\StellaOps.TimelineIndexer.Infrastructure.csproj"/> - - - </ItemGroup> - - -</Project> +<?xml version="1.0" ?> +<Project Sdk="Microsoft.NET.Sdk.Worker"> + + + + <PropertyGroup> + + + <UserSecretsId>dotnet-StellaOps.TimelineIndexer.Worker-f6dbdeac-9eb5-4250-9384-ef93fc70f770</UserSecretsId> + + + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <LangVersion>preview</LangVersion> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + + + <ItemGroup> + + + <PackageReference Include="Microsoft.Extensions.Hosting" Version="10.0.0-rc.2.25502.107"/> + + + </ItemGroup> + + + + <ItemGroup> + + + <ProjectReference Include="..\StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj"/> + + + <ProjectReference Include="..\StellaOps.TimelineIndexer.Infrastructure\StellaOps.TimelineIndexer.Infrastructure.csproj"/> + + + </ItemGroup> + + +</Project> diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Worker.cs b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Worker.cs similarity index 96% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Worker.cs rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Worker.cs index 146eb37d..e21f75bc 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Worker.cs +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/Worker.cs @@ -1,16 +1,16 @@ -namespace StellaOps.TimelineIndexer.Worker; - -public class Worker(ILogger<Worker> logger) : BackgroundService -{ - protected override async Task ExecuteAsync(CancellationToken stoppingToken) - { - while (!stoppingToken.IsCancellationRequested) - { - if (logger.IsEnabled(LogLevel.Information)) - { - logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); - } - await Task.Delay(1000, stoppingToken); - } - } -} +namespace StellaOps.TimelineIndexer.Worker; + +public class Worker(ILogger<Worker> logger) : BackgroundService +{ + protected override async Task ExecuteAsync(CancellationToken stoppingToken) + { + while (!stoppingToken.IsCancellationRequested) + { + if (logger.IsEnabled(LogLevel.Information)) + { + logger.LogInformation("Worker running at: {time}", DateTimeOffset.Now); + } + await Task.Delay(1000, stoppingToken); + } + } +} diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.Development.json b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.Development.json new file mode 100644 index 00000000..69017646 --- /dev/null +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.Development.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.json b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.json new file mode 100644 index 00000000..69017646 --- /dev/null +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.Worker/appsettings.json @@ -0,0 +1,8 @@ +{ + "Logging": { + "LogLevel": { + "Default": "Information", + "Microsoft.Hosting.Lifetime": "Information" + } + } +} diff --git a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.sln b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.sln similarity index 98% rename from src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.sln rename to src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.sln index e46f8c80..a17660fe 100644 --- a/src/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.sln +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/StellaOps.TimelineIndexer.sln @@ -1,90 +1,90 @@ - -Microsoft Visual Studio Solution File, Format Version 12.00 -# Visual Studio Version 17 -VisualStudioVersion = 17.0.31903.59 -MinimumVisualStudioVersion = 10.0.40219.1 -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Core", "StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj", "{C8959267-ACDD-49E9-B1FD-9694C8663437}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Infrastructure", "StellaOps.TimelineIndexer.Infrastructure\StellaOps.TimelineIndexer.Infrastructure.csproj", "{185CEED8-197F-4236-8716-73B37C5F355A}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.WebService", "StellaOps.TimelineIndexer.WebService\StellaOps.TimelineIndexer.WebService.csproj", "{991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Worker", "StellaOps.TimelineIndexer.Worker\StellaOps.TimelineIndexer.Worker.csproj", "{B8F1FE1E-7730-431D-B058-9C7A50463F91}" -EndProject -Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Tests", "StellaOps.TimelineIndexer.Tests\StellaOps.TimelineIndexer.Tests.csproj", "{AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}" -EndProject -Global - GlobalSection(SolutionConfigurationPlatforms) = preSolution - Debug|Any CPU = Debug|Any CPU - Debug|x64 = Debug|x64 - Debug|x86 = Debug|x86 - Release|Any CPU = Release|Any CPU - Release|x64 = Release|x64 - Release|x86 = Release|x86 - EndGlobalSection - GlobalSection(ProjectConfigurationPlatforms) = postSolution - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|Any CPU.Build.0 = Debug|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|x64.ActiveCfg = Debug|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|x64.Build.0 = Debug|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|x86.ActiveCfg = Debug|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|x86.Build.0 = Debug|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|Any CPU.ActiveCfg = Release|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|Any CPU.Build.0 = Release|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|x64.ActiveCfg = Release|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|x64.Build.0 = Release|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|x86.ActiveCfg = Release|Any CPU - {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|x86.Build.0 = Release|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|Any CPU.Build.0 = Debug|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|x64.ActiveCfg = Debug|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|x64.Build.0 = Debug|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|x86.ActiveCfg = Debug|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|x86.Build.0 = Debug|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Release|Any CPU.ActiveCfg = Release|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Release|Any CPU.Build.0 = Release|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Release|x64.ActiveCfg = Release|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Release|x64.Build.0 = Release|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Release|x86.ActiveCfg = Release|Any CPU - {185CEED8-197F-4236-8716-73B37C5F355A}.Release|x86.Build.0 = Release|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|Any CPU.Build.0 = Debug|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|x64.ActiveCfg = Debug|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|x64.Build.0 = Debug|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|x86.ActiveCfg = Debug|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|x86.Build.0 = Debug|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|Any CPU.ActiveCfg = Release|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|Any CPU.Build.0 = Release|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|x64.ActiveCfg = Release|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|x64.Build.0 = Release|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|x86.ActiveCfg = Release|Any CPU - {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|x86.Build.0 = Release|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|Any CPU.Build.0 = Debug|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|x64.ActiveCfg = Debug|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|x64.Build.0 = Debug|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|x86.ActiveCfg = Debug|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|x86.Build.0 = Debug|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|Any CPU.ActiveCfg = Release|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|Any CPU.Build.0 = Release|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|x64.ActiveCfg = Release|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|x64.Build.0 = Release|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|x86.ActiveCfg = Release|Any CPU - {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|x86.Build.0 = Release|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|Any CPU.Build.0 = Debug|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|x64.ActiveCfg = Debug|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|x64.Build.0 = Debug|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|x86.ActiveCfg = Debug|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|x86.Build.0 = Debug|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|Any CPU.ActiveCfg = Release|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|Any CPU.Build.0 = Release|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|x64.ActiveCfg = Release|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|x64.Build.0 = Release|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|x86.ActiveCfg = Release|Any CPU - {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|x86.Build.0 = Release|Any CPU - EndGlobalSection - GlobalSection(SolutionProperties) = preSolution - HideSolutionNode = FALSE - EndGlobalSection -EndGlobal + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Core", "StellaOps.TimelineIndexer.Core\StellaOps.TimelineIndexer.Core.csproj", "{C8959267-ACDD-49E9-B1FD-9694C8663437}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Infrastructure", "StellaOps.TimelineIndexer.Infrastructure\StellaOps.TimelineIndexer.Infrastructure.csproj", "{185CEED8-197F-4236-8716-73B37C5F355A}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.WebService", "StellaOps.TimelineIndexer.WebService\StellaOps.TimelineIndexer.WebService.csproj", "{991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Worker", "StellaOps.TimelineIndexer.Worker\StellaOps.TimelineIndexer.Worker.csproj", "{B8F1FE1E-7730-431D-B058-9C7A50463F91}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.TimelineIndexer.Tests", "StellaOps.TimelineIndexer.Tests\StellaOps.TimelineIndexer.Tests.csproj", "{AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|x64.ActiveCfg = Debug|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|x64.Build.0 = Debug|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|x86.ActiveCfg = Debug|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Debug|x86.Build.0 = Debug|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|Any CPU.Build.0 = Release|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|x64.ActiveCfg = Release|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|x64.Build.0 = Release|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|x86.ActiveCfg = Release|Any CPU + {C8959267-ACDD-49E9-B1FD-9694C8663437}.Release|x86.Build.0 = Release|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|Any CPU.Build.0 = Debug|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|x64.ActiveCfg = Debug|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|x64.Build.0 = Debug|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|x86.ActiveCfg = Debug|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Debug|x86.Build.0 = Debug|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Release|Any CPU.ActiveCfg = Release|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Release|Any CPU.Build.0 = Release|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Release|x64.ActiveCfg = Release|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Release|x64.Build.0 = Release|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Release|x86.ActiveCfg = Release|Any CPU + {185CEED8-197F-4236-8716-73B37C5F355A}.Release|x86.Build.0 = Release|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|Any CPU.Build.0 = Debug|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|x64.ActiveCfg = Debug|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|x64.Build.0 = Debug|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|x86.ActiveCfg = Debug|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Debug|x86.Build.0 = Debug|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|Any CPU.ActiveCfg = Release|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|Any CPU.Build.0 = Release|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|x64.ActiveCfg = Release|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|x64.Build.0 = Release|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|x86.ActiveCfg = Release|Any CPU + {991C4CD2-F5D2-4AB7-83A5-EF4E60B61A86}.Release|x86.Build.0 = Release|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|Any CPU.Build.0 = Debug|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|x64.ActiveCfg = Debug|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|x64.Build.0 = Debug|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|x86.ActiveCfg = Debug|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Debug|x86.Build.0 = Debug|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|Any CPU.ActiveCfg = Release|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|Any CPU.Build.0 = Release|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|x64.ActiveCfg = Release|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|x64.Build.0 = Release|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|x86.ActiveCfg = Release|Any CPU + {B8F1FE1E-7730-431D-B058-9C7A50463F91}.Release|x86.Build.0 = Release|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|Any CPU.Build.0 = Debug|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|x64.ActiveCfg = Debug|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|x64.Build.0 = Debug|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|x86.ActiveCfg = Debug|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Debug|x86.Build.0 = Debug|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|Any CPU.ActiveCfg = Release|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|Any CPU.Build.0 = Release|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|x64.ActiveCfg = Release|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|x64.Build.0 = Release|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|x86.ActiveCfg = Release|Any CPU + {AA20938D-A0AC-4E37-B7D9-002C6DD90FEC}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.TimelineIndexer/TASKS.md b/src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md similarity index 99% rename from src/StellaOps.TimelineIndexer/TASKS.md rename to src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md index a6abccba..172fc1fd 100644 --- a/src/StellaOps.TimelineIndexer/TASKS.md +++ b/src/TimelineIndexer/StellaOps.TimelineIndexer/TASKS.md @@ -1,14 +1,14 @@ -# Timeline Indexer Task Board — Epic 15: Observability & Forensics - -## Sprint 52 – Timeline Foundations -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TIMELINE-OBS-52-001 | TODO | Timeline Indexer Guild | TELEMETRY-OBS-50-001, AUTH-OBS-50-001 | Bootstrap `StellaOps.Timeline.Indexer` service with Postgres migrations for `timeline_events`, `timeline_event_details`, `timeline_event_digests`; enable RLS scaffolding and deterministic migration scripts. | Service builds/tests; migrations replay cleanly; baseline seed fixtures committed; compliance checklist recorded. | -| TIMELINE-OBS-52-002 | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-001, DEVOPS-OBS-50-002 | Implement event ingestion pipeline (NATS/Redis consumers) with ordering guarantees, dedupe on `(event_id, tenant_id)`, correlation to trace IDs, and backpressure metrics. | Ingestion integration tests replay fixture stream; dedupe proven; metrics exposed; failure retries documented. | -| TIMELINE-OBS-52-003 | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-002 | Expose REST/gRPC APIs for timeline queries (`GET /timeline`, `/timeline/{id}`) with filters, pagination, and tenant enforcement. Provide OpenAPI + contract tests. | APIs documented via OpenAPI; tests cover filters/pagination; latency budget <200 ms P95 on seeded data; audit logs recorded. | -| TIMELINE-OBS-52-004 | TODO | Timeline Indexer Guild, Security Guild | TIMELINE-OBS-52-001 | Finalize RLS policies, scope checks (`timeline:read`), and audit logging for query access. Include integration tests for cross-tenant isolation and legal hold markers. | RLS proven with failing cross-tenant queries; audit logs include actor/tenant; legal hold flag prevents deletion; docs referenced. | - -## Sprint 53 – Evidence & Provenance Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| TIMELINE-OBS-53-001 | TODO | Timeline Indexer Guild, Evidence Locker Guild | TIMELINE-OBS-52-003, EVID-OBS-53-002 | Link timeline events to evidence bundle digests + attestation subjects; expose `/timeline/{id}/evidence` endpoint returning signed manifest references. | Endpoint returns evidence references with DSSE metadata; integration test verifies digest match; docs updated. | +# Timeline Indexer Task Board — Epic 15: Observability & Forensics + +## Sprint 52 – Timeline Foundations +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TIMELINE-OBS-52-001 | TODO | Timeline Indexer Guild | TELEMETRY-OBS-50-001, AUTH-OBS-50-001 | Bootstrap `StellaOps.Timeline.Indexer` service with Postgres migrations for `timeline_events`, `timeline_event_details`, `timeline_event_digests`; enable RLS scaffolding and deterministic migration scripts. | Service builds/tests; migrations replay cleanly; baseline seed fixtures committed; compliance checklist recorded. | +| TIMELINE-OBS-52-002 | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-001, DEVOPS-OBS-50-002 | Implement event ingestion pipeline (NATS/Redis consumers) with ordering guarantees, dedupe on `(event_id, tenant_id)`, correlation to trace IDs, and backpressure metrics. | Ingestion integration tests replay fixture stream; dedupe proven; metrics exposed; failure retries documented. | +| TIMELINE-OBS-52-003 | TODO | Timeline Indexer Guild | TIMELINE-OBS-52-002 | Expose REST/gRPC APIs for timeline queries (`GET /timeline`, `/timeline/{id}`) with filters, pagination, and tenant enforcement. Provide OpenAPI + contract tests. | APIs documented via OpenAPI; tests cover filters/pagination; latency budget <200 ms P95 on seeded data; audit logs recorded. | +| TIMELINE-OBS-52-004 | TODO | Timeline Indexer Guild, Security Guild | TIMELINE-OBS-52-001 | Finalize RLS policies, scope checks (`timeline:read`), and audit logging for query access. Include integration tests for cross-tenant isolation and legal hold markers. | RLS proven with failing cross-tenant queries; audit logs include actor/tenant; legal hold flag prevents deletion; docs referenced. | + +## Sprint 53 – Evidence & Provenance Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| TIMELINE-OBS-53-001 | TODO | Timeline Indexer Guild, Evidence Locker Guild | TIMELINE-OBS-52-003, EVID-OBS-53-002 | Link timeline events to evidence bundle digests + attestation subjects; expose `/timeline/{id}/evidence` endpoint returning signed manifest references. | Endpoint returns evidence references with DSSE metadata; integration test verifies digest match; docs updated. | diff --git a/src/StellaOps.UI/TASKS.md b/src/UI/StellaOps.UI/TASKS.md similarity index 99% rename from src/StellaOps.UI/TASKS.md rename to src/UI/StellaOps.UI/TASKS.md index eaa29214..d23477ee 100644 --- a/src/StellaOps.UI/TASKS.md +++ b/src/UI/StellaOps.UI/TASKS.md @@ -1,95 +1,95 @@ -# UI Task Board (Sprints 13 & 19) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-POLICY-13-007 | TODO | UI Guild | POLICY-CORE-09-006, SCANNER-WEB-09-103 | Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. | UI renders new columns/tooltips, accessibility and responsive checks pass, Cypress regression updated. | -| UI-AOC-19-001 | TODO | UI Guild | CONCELIER-WEB-AOC-19-001, EXCITITOR-WEB-AOC-19-001 | Add Sources dashboard tiles showing AOC pass/fail, recent violation codes, and ingest throughput per tenant. | Dashboard displays metrics from new endpoints, charts verified in e2e tests, accessibility checks pass. | -| UI-AOC-19-002 | TODO | UI Guild | UI-AOC-19-001 | Implement violation drill-down view highlighting offending document fields and provenance metadata. | Drill-down renders formatted JSON with highlights; copy-to-clipboard works; tests cover forbidden key cases. | -| UI-AOC-19-003 | TODO | UI Guild | UI-AOC-19-001, CLI-AOC-19-002 | Add "Verify last 24h" action triggering AOC verifier endpoint and surfacing CLI parity guidance. | Action wired to API, results rendered in toast/log panel, docs link to CLI usage, e2e test verifies flow. | - -## Policy Engine v2 (Sprint 20) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-POLICY-20-001 | TODO | UI Guild | WEB-POLICY-20-001 | Ship Monaco-based policy editor with DSL syntax highlighting, inline diagnostics, and compliance checklist sidebar. | Editor renders DSL with token colors + lint; accessibility review passes; diagnostics surfaced from API compile endpoint in tests. | -| UI-POLICY-20-002 | TODO | UI Guild | UI-POLICY-20-001, WEB-POLICY-20-001, WEB-POLICY-20-002 | Build simulation panel showing before/after counts, severity deltas, and rule hit summaries with deterministic diff rendering. | Simulation view consumes API diff JSON, handles large datasets with virtualization, Cypress regression verifies charts/tables. | -| UI-POLICY-20-003 | TODO | UI Guild, Product Ops | UI-POLICY-20-001, AUTH-POLICY-27-001 | Implement submit/review/approve workflow with comments, approvals log, and RBAC checks aligned to new Policy Studio roles (`policy:author`/`policy:review`/`policy:approve`/`policy:operate`). | Workflow passes e2e tests, audit trail rendered, unauthorized roles blocked, docs linked from UI help. | -| UI-POLICY-20-004 | TODO | UI Guild, Observability Guild | WEB-POLICY-20-001, POLICY-ENGINE-20-006, POLICY-ENGINE-20-007 | Add run viewer dashboards (rule heatmap, VEX wins, suppressions) with filter/search and export. | Dashboards render aggregated metrics, export downloads CSV/JSON, accessibility/perf budgets met, telemetry charts validated. | - -## Policy Studio RBAC Alignment (Sprint 27) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-POLICY-27-001 | TODO | UI Guild, Product Ops | AUTH-POLICY-27-001, UI-POLICY-20-003 | Update Console policy workspace RBAC guards, scope requests, and user messaging to reflect the new Policy Studio roles/scopes (`policy:author/review/approve/operate/audit/simulate`), including Cypress auth stubs and help text. | UI requests tokens with new scopes, unauthorized messaging references updated roles, Cypress/e2e tests cover scope failures, and help tooltips/docs links refreshed. | -> Heads-up: Authority & Gateway configs now reject the old `policy:write`/`policy:submit` scopes—Console policy flows will error until they request the new bundles. - -## Graph Explorer v1 (Sprint 21) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-GRAPH-21-001 | TODO | UI Guild | WEB-GRAPH-21-001, AUTH-GRAPH-21-001 | Align Graph Explorer auth configuration with new `graph:*` scopes; consume scope identifiers from shared `StellaOpsScopes` exports (via generated SDK/config) instead of hard-coded strings. | UI requests graph tokens using shared scope constants; configuration docs updated; Cypress auth stub updated accordingly. | - -## Link-Not-Merge v1 (Sprint 22) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-LNM-22-001 | TODO | UI Guild, Policy Guild | SCANNER-LNM-21-002, WEB-LNM-21-001 | Build Evidence panel showing policy decision with advisory observations/linksets side-by-side, conflict badges, AOC chain, and raw doc download links. Docs `DOCS-LNM-22-005` waiting on delivered UI for screenshots + flows. | Panel renders multiple sources; conflict badges accessible; e2e tests cover high-volume linksets. | -| UI-LNM-22-002 | TODO | UI Guild | UI-LNM-22-001 | Implement filters (source, severity bucket, conflict-only, CVSS vector presence) and pagination/lazy loading for large linksets. Docs depend on finalized filtering UX. | Filters respond within 500 ms; virtualization validated; unit/e2e tests added. | -| UI-LNM-22-003 | TODO | UI Guild, Excititor Guild | UI-LNM-22-001, WEB-LNM-21-002 | Add VEX tab with status/justification summaries, conflict indicators, and export actions. Required for `DOCS-LNM-22-005` coverage of VEX evidence tab. | VEX tab displays multiple observations; exports produce zipped OSV/CycloneDX; tests updated. | -| UI-LNM-22-004 | TODO | UI Guild | UI-LNM-22-001 | Provide permalink + copy-to-clipboard for selected component/linkset/policy combination; ensure high-contrast theme support. | Permalink reproduces state; accessibility audit passes; telemetry events logged. | - -## StellaOps Console (Sprint 23) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-CONSOLE-23-001 | DONE (2025-10-31) | UI Guild & Security Guild | AUTH-CONSOLE-23-002 | Integrate Authority console endpoints (`/console/tenants`, `/console/profile`, `/console/token/introspect`) into UI session state, decode tenant/scopes claims, and expose signals for components. | Console session store fetches context on login, tenant header enforcement confirmed, unit tests cover store/service, and errors surface through state flags. | -> 2025-10-31: Added authority console API client, session store/service, and access token metadata parsing in `AuthorityAuthService`. Signals expose tenant/scopes, and unit tests cover happy/error paths. -| UI-CONSOLE-23-002 | DONE (2025-10-31) | UI Guild | UI-CONSOLE-23-001 | Build console profile view showing user identity, fresh-auth status, token metadata, and tenant catalog with refresh + tenant switch actions. | Component renders data from store, refresh action wired to API, accessibility checks pass, and component tests cover loading/error states. | -> 2025-10-31: Delivered `ConsoleProfileComponent`, hooked into navigation/header indicators, and styled cards for profile/token/tenant catalog with refresh + tenant switching. - -## Policy Engine + Editor v1 (Sprint 23) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-POLICY-23-001 | TODO | UI Guild, Policy Guild | WEB-POLICY-23-001 | Deliver Policy Editor workspace with pack list, revision history, and scoped metadata cards. | Editor lists packs/revisions; navigation accessible; tests cover RBAC states. | -| UI-POLICY-23-002 | TODO | UI Guild | UI-POLICY-23-001 | Implement YAML editor with schema validation, lint diagnostics, and live canonicalization preview. | YAML editor surfaces inline errors sourced from compiler; keyboard shortcuts and accessibility verified. | -| UI-POLICY-23-003 | TODO | UI Guild | UI-POLICY-23-001, WEB-POLICY-23-003 | Build guided rule builder (source preferences, severity mapping, VEX precedence, exceptions) with preview JSON output. | Guided builder generates valid SPL, diff view matches YAML; tests cover rule permutations. | -| UI-POLICY-23-004 | TODO | UI Guild | UI-POLICY-23-001, WEB-POLICY-23-002, POLICY-GATEWAY-18-002..003 | Add review/approval workflow UI: checklists, comments, two-person approval indicator, scope scheduling. | Workflow screens complete; approval restrictions enforced; e2e tests cover approval -> activation. | -| UI-POLICY-23-005 | TODO | UI Guild | UI-POLICY-23-001, WEB-POLICY-23-003 | Integrate simulator panel (SBOM/component/advisory selection), run diff vs active policy, show explain tree and overlays. | Simulation results render diff/projection; explain tree interactive; performance <1s for sample data. | -| UI-POLICY-23-006 | TODO | UI Guild | UI-POLICY-23-005 | Implement explain view linking to evidence overlays and exceptions; provide export to JSON/PDF. | Explain view accessible; exports generated; analytics instrumented. | - -## Graph & Vuln Explorer v1 (Sprint 24) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-GRAPH-24-001 | TODO | UI Guild, SBOM Service Guild | WEB-GRAPH-24-001 | Build Graph Explorer canvas with layered/radial layouts, virtualization, zoom/pan, and scope toggles; initial render <1.5s for sample asset. | Canvas meets perf budget; automated tests cover navigation; accessibility validation done. | -| UI-GRAPH-24-002 | TODO | UI Guild, Policy Guild | UI-GRAPH-24-001, WEB-GRAPH-24-001, WEB-VEX-30-007 | Implement overlays (Policy, Evidence, License, Exposure), simulation toggle, path view, and SBOM diff/time-travel with accessible tooltips/AOC indicators. | Overlays + simulation toggle respond <250 ms; path view/diff export validated; accessibility tests cover keyboard + contrast; e2e covers overlay combos. | -| UI-GRAPH-24-003 | TODO | UI Guild | UI-GRAPH-24-001 | Deliver filters/search panel with facets, saved views, permalinks, and share modal. | Filters update view <250ms; saved view persisted; permalinks reproduce state. | -| UI-GRAPH-24-004 | TODO | UI Guild | UI-GRAPH-24-001 | Add side panels (Details, What-if, History) with upgrade simulation integration and SBOM diff viewer. | Simulation results display diff + policy impact; history shows added/removed nodes; tests cover flows. | -| UI-GRAPH-24-006 | TODO | UI Guild, Accessibility Guild | UI-GRAPH-24-001..005 | Ensure accessibility (keyboard nav, screen reader labels, contrast), add hotkeys (`f`,`e`,`.`), and analytics instrumentation. | Accessibility audit passes; hotkeys documented; telemetry events captured. | - -## Exceptions v1 (Sprint 25) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-EXC-25-001 | TODO | UI Guild, Governance Guild | WEB-EXC-25-001 | Build Exception Center (list + kanban) with filters, sorting, workflow transitions, and audit views. | Exception Center functional; state transitions via UI; accessibility validated. | -| UI-EXC-25-002 | TODO | UI Guild | UI-EXC-25-001 | Implement exception creation wizard with scope preview, justification templates, timebox guardrails. | Wizard enforces scope/timebox; previews impacted items; tests cover validation. | -| UI-EXC-25-003 | TODO | UI Guild | UI-EXC-25-001, WEB-EXC-25-002 | Add inline exception drafting/proposing from Vulnerability Explorer and Graph detail panels with live simulation. | Inline flows produce drafts; preview shows policy delta; telemetry instrumented. | -| UI-EXC-25-004 | TODO | UI Guild | UI-EXC-25-001 | Surface exception badges, countdown timers, and explain integration across Graph/Vuln Explorer and policy views. | Badges visible with SR labels; countdown updates; explain drawer shows exception info. | -| UI-EXC-25-005 | TODO | UI Guild, Accessibility Guild | UI-EXC-25-001..004 | Add keyboard shortcuts (`x`,`a`,`r`) and ensure screen-reader messaging for approvals/revocations. | Shortcuts functional; accessibility audit passes. | - -## Reachability v1 (Sprint 26) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-SIG-26-001 | TODO | UI Guild, Signals Guild | WEB-SIG-26-001 | Add reachability columns/badges to Vulnerability Explorer with filters and tooltips. | Columns render with virtualization; filters update under 250 ms; badges accessible. | -| UI-SIG-26-002 | TODO | UI Guild | UI-SIG-26-001, WEB-SIG-26-002 | Enhance “Why” drawer with call path visualization, reachability timeline, and evidence list. | Drawer displays call path breadcrumb; copyable details; tests cover states. | -| UI-SIG-26-003 | TODO | UI Guild | UI-GRAPH-24-001, WEB-SIG-26-002 | Add reachability overlay halos/time slider to SBOM Graph along with state legend. | Overlay toggles; time slider compares snapshots; performance budget met. | -| UI-SIG-26-004 | TODO | UI Guild | WEB-SIG-26-003 | Build Reachability Center view showing asset coverage, missing sensors, and stale facts. | Center lists assets with metrics; missing sensors highlighted; accessibility validated. | - -## Orchestrator Dashboard (Sprint 32) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| UI-ORCH-32-001 | TODO | UI Guild, Console Guild | AUTH-ORCH-32-001, ORCH-SVC-32-003 | Update Console RBAC mappings to surface `Orch.Viewer`, request `orch:read` scope in token flows, and gate dashboard access/messaging accordingly. | Console role catalogue includes `Orch.Viewer`; auth helpers use shared scope constant; dashboard routes enforce scope and show actionable guidance; e2e tests cover authorized/unauthorized flows. | -> 2025-10-31: Authority minted `orch:read` scope; ensure Console UX aligns before orchestrator dashboards ship. +# UI Task Board (Sprints 13 & 19) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-POLICY-13-007 | TODO | UI Guild | POLICY-CORE-09-006, SCANNER-WEB-09-103 | Surface policy confidence metadata (band, age, quiet provenance) on preview and report views. | UI renders new columns/tooltips, accessibility and responsive checks pass, Cypress regression updated. | +| UI-AOC-19-001 | TODO | UI Guild | CONCELIER-WEB-AOC-19-001, EXCITITOR-WEB-AOC-19-001 | Add Sources dashboard tiles showing AOC pass/fail, recent violation codes, and ingest throughput per tenant. | Dashboard displays metrics from new endpoints, charts verified in e2e tests, accessibility checks pass. | +| UI-AOC-19-002 | TODO | UI Guild | UI-AOC-19-001 | Implement violation drill-down view highlighting offending document fields and provenance metadata. | Drill-down renders formatted JSON with highlights; copy-to-clipboard works; tests cover forbidden key cases. | +| UI-AOC-19-003 | TODO | UI Guild | UI-AOC-19-001, CLI-AOC-19-002 | Add "Verify last 24h" action triggering AOC verifier endpoint and surfacing CLI parity guidance. | Action wired to API, results rendered in toast/log panel, docs link to CLI usage, e2e test verifies flow. | + +## Policy Engine v2 (Sprint 20) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-POLICY-20-001 | TODO | UI Guild | WEB-POLICY-20-001 | Ship Monaco-based policy editor with DSL syntax highlighting, inline diagnostics, and compliance checklist sidebar. | Editor renders DSL with token colors + lint; accessibility review passes; diagnostics surfaced from API compile endpoint in tests. | +| UI-POLICY-20-002 | TODO | UI Guild | UI-POLICY-20-001, WEB-POLICY-20-001, WEB-POLICY-20-002 | Build simulation panel showing before/after counts, severity deltas, and rule hit summaries with deterministic diff rendering. | Simulation view consumes API diff JSON, handles large datasets with virtualization, Cypress regression verifies charts/tables. | +| UI-POLICY-20-003 | TODO | UI Guild, Product Ops | UI-POLICY-20-001, AUTH-POLICY-27-001 | Implement submit/review/approve workflow with comments, approvals log, and RBAC checks aligned to new Policy Studio roles (`policy:author`/`policy:review`/`policy:approve`/`policy:operate`). | Workflow passes e2e tests, audit trail rendered, unauthorized roles blocked, docs linked from UI help. | +| UI-POLICY-20-004 | TODO | UI Guild, Observability Guild | WEB-POLICY-20-001, POLICY-ENGINE-20-006, POLICY-ENGINE-20-007 | Add run viewer dashboards (rule heatmap, VEX wins, suppressions) with filter/search and export. | Dashboards render aggregated metrics, export downloads CSV/JSON, accessibility/perf budgets met, telemetry charts validated. | + +## Policy Studio RBAC Alignment (Sprint 27) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-POLICY-27-001 | TODO | UI Guild, Product Ops | AUTH-POLICY-27-001, UI-POLICY-20-003 | Update Console policy workspace RBAC guards, scope requests, and user messaging to reflect the new Policy Studio roles/scopes (`policy:author/review/approve/operate/audit/simulate`), including Cypress auth stubs and help text. | UI requests tokens with new scopes, unauthorized messaging references updated roles, Cypress/e2e tests cover scope failures, and help tooltips/docs links refreshed. | +> Heads-up: Authority & Gateway configs now reject the old `policy:write`/`policy:submit` scopes—Console policy flows will error until they request the new bundles. + +## Graph Explorer v1 (Sprint 21) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-GRAPH-21-001 | TODO | UI Guild | WEB-GRAPH-21-001, AUTH-GRAPH-21-001 | Align Graph Explorer auth configuration with new `graph:*` scopes; consume scope identifiers from shared `StellaOpsScopes` exports (via generated SDK/config) instead of hard-coded strings. | UI requests graph tokens using shared scope constants; configuration docs updated; Cypress auth stub updated accordingly. | + +## Link-Not-Merge v1 (Sprint 22) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-LNM-22-001 | TODO | UI Guild, Policy Guild | SCANNER-LNM-21-002, WEB-LNM-21-001 | Build Evidence panel showing policy decision with advisory observations/linksets side-by-side, conflict badges, AOC chain, and raw doc download links. Docs `DOCS-LNM-22-005` waiting on delivered UI for screenshots + flows. | Panel renders multiple sources; conflict badges accessible; e2e tests cover high-volume linksets. | +| UI-LNM-22-002 | TODO | UI Guild | UI-LNM-22-001 | Implement filters (source, severity bucket, conflict-only, CVSS vector presence) and pagination/lazy loading for large linksets. Docs depend on finalized filtering UX. | Filters respond within 500 ms; virtualization validated; unit/e2e tests added. | +| UI-LNM-22-003 | TODO | UI Guild, Excititor Guild | UI-LNM-22-001, WEB-LNM-21-002 | Add VEX tab with status/justification summaries, conflict indicators, and export actions. Required for `DOCS-LNM-22-005` coverage of VEX evidence tab. | VEX tab displays multiple observations; exports produce zipped OSV/CycloneDX; tests updated. | +| UI-LNM-22-004 | TODO | UI Guild | UI-LNM-22-001 | Provide permalink + copy-to-clipboard for selected component/linkset/policy combination; ensure high-contrast theme support. | Permalink reproduces state; accessibility audit passes; telemetry events logged. | + +## StellaOps Console (Sprint 23) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-CONSOLE-23-001 | DONE (2025-10-31) | UI Guild & Security Guild | AUTH-CONSOLE-23-002 | Integrate Authority console endpoints (`/console/tenants`, `/console/profile`, `/console/token/introspect`) into UI session state, decode tenant/scopes claims, and expose signals for components. | Console session store fetches context on login, tenant header enforcement confirmed, unit tests cover store/service, and errors surface through state flags. | +> 2025-10-31: Added authority console API client, session store/service, and access token metadata parsing in `AuthorityAuthService`. Signals expose tenant/scopes, and unit tests cover happy/error paths. +| UI-CONSOLE-23-002 | DONE (2025-10-31) | UI Guild | UI-CONSOLE-23-001 | Build console profile view showing user identity, fresh-auth status, token metadata, and tenant catalog with refresh + tenant switch actions. | Component renders data from store, refresh action wired to API, accessibility checks pass, and component tests cover loading/error states. | +> 2025-10-31: Delivered `ConsoleProfileComponent`, hooked into navigation/header indicators, and styled cards for profile/token/tenant catalog with refresh + tenant switching. + +## Policy Engine + Editor v1 (Sprint 23) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-POLICY-23-001 | TODO | UI Guild, Policy Guild | WEB-POLICY-23-001 | Deliver Policy Editor workspace with pack list, revision history, and scoped metadata cards. | Editor lists packs/revisions; navigation accessible; tests cover RBAC states. | +| UI-POLICY-23-002 | TODO | UI Guild | UI-POLICY-23-001 | Implement YAML editor with schema validation, lint diagnostics, and live canonicalization preview. | YAML editor surfaces inline errors sourced from compiler; keyboard shortcuts and accessibility verified. | +| UI-POLICY-23-003 | TODO | UI Guild | UI-POLICY-23-001, WEB-POLICY-23-003 | Build guided rule builder (source preferences, severity mapping, VEX precedence, exceptions) with preview JSON output. | Guided builder generates valid SPL, diff view matches YAML; tests cover rule permutations. | +| UI-POLICY-23-004 | TODO | UI Guild | UI-POLICY-23-001, WEB-POLICY-23-002, POLICY-GATEWAY-18-002..003 | Add review/approval workflow UI: checklists, comments, two-person approval indicator, scope scheduling. | Workflow screens complete; approval restrictions enforced; e2e tests cover approval -> activation. | +| UI-POLICY-23-005 | TODO | UI Guild | UI-POLICY-23-001, WEB-POLICY-23-003 | Integrate simulator panel (SBOM/component/advisory selection), run diff vs active policy, show explain tree and overlays. | Simulation results render diff/projection; explain tree interactive; performance <1s for sample data. | +| UI-POLICY-23-006 | TODO | UI Guild | UI-POLICY-23-005 | Implement explain view linking to evidence overlays and exceptions; provide export to JSON/PDF. | Explain view accessible; exports generated; analytics instrumented. | + +## Graph & Vuln Explorer v1 (Sprint 24) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-GRAPH-24-001 | TODO | UI Guild, SBOM Service Guild | WEB-GRAPH-24-001 | Build Graph Explorer canvas with layered/radial layouts, virtualization, zoom/pan, and scope toggles; initial render <1.5s for sample asset. | Canvas meets perf budget; automated tests cover navigation; accessibility validation done. | +| UI-GRAPH-24-002 | TODO | UI Guild, Policy Guild | UI-GRAPH-24-001, WEB-GRAPH-24-001, WEB-VEX-30-007 | Implement overlays (Policy, Evidence, License, Exposure), simulation toggle, path view, and SBOM diff/time-travel with accessible tooltips/AOC indicators. | Overlays + simulation toggle respond <250 ms; path view/diff export validated; accessibility tests cover keyboard + contrast; e2e covers overlay combos. | +| UI-GRAPH-24-003 | TODO | UI Guild | UI-GRAPH-24-001 | Deliver filters/search panel with facets, saved views, permalinks, and share modal. | Filters update view <250ms; saved view persisted; permalinks reproduce state. | +| UI-GRAPH-24-004 | TODO | UI Guild | UI-GRAPH-24-001 | Add side panels (Details, What-if, History) with upgrade simulation integration and SBOM diff viewer. | Simulation results display diff + policy impact; history shows added/removed nodes; tests cover flows. | +| UI-GRAPH-24-006 | TODO | UI Guild, Accessibility Guild | UI-GRAPH-24-001..005 | Ensure accessibility (keyboard nav, screen reader labels, contrast), add hotkeys (`f`,`e`,`.`), and analytics instrumentation. | Accessibility audit passes; hotkeys documented; telemetry events captured. | + +## Exceptions v1 (Sprint 25) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-EXC-25-001 | TODO | UI Guild, Governance Guild | WEB-EXC-25-001 | Build Exception Center (list + kanban) with filters, sorting, workflow transitions, and audit views. | Exception Center functional; state transitions via UI; accessibility validated. | +| UI-EXC-25-002 | TODO | UI Guild | UI-EXC-25-001 | Implement exception creation wizard with scope preview, justification templates, timebox guardrails. | Wizard enforces scope/timebox; previews impacted items; tests cover validation. | +| UI-EXC-25-003 | TODO | UI Guild | UI-EXC-25-001, WEB-EXC-25-002 | Add inline exception drafting/proposing from Vulnerability Explorer and Graph detail panels with live simulation. | Inline flows produce drafts; preview shows policy delta; telemetry instrumented. | +| UI-EXC-25-004 | TODO | UI Guild | UI-EXC-25-001 | Surface exception badges, countdown timers, and explain integration across Graph/Vuln Explorer and policy views. | Badges visible with SR labels; countdown updates; explain drawer shows exception info. | +| UI-EXC-25-005 | TODO | UI Guild, Accessibility Guild | UI-EXC-25-001..004 | Add keyboard shortcuts (`x`,`a`,`r`) and ensure screen-reader messaging for approvals/revocations. | Shortcuts functional; accessibility audit passes. | + +## Reachability v1 (Sprint 26) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-SIG-26-001 | TODO | UI Guild, Signals Guild | WEB-SIG-26-001 | Add reachability columns/badges to Vulnerability Explorer with filters and tooltips. | Columns render with virtualization; filters update under 250 ms; badges accessible. | +| UI-SIG-26-002 | TODO | UI Guild | UI-SIG-26-001, WEB-SIG-26-002 | Enhance “Why” drawer with call path visualization, reachability timeline, and evidence list. | Drawer displays call path breadcrumb; copyable details; tests cover states. | +| UI-SIG-26-003 | TODO | UI Guild | UI-GRAPH-24-001, WEB-SIG-26-002 | Add reachability overlay halos/time slider to SBOM Graph along with state legend. | Overlay toggles; time slider compares snapshots; performance budget met. | +| UI-SIG-26-004 | TODO | UI Guild | WEB-SIG-26-003 | Build Reachability Center view showing asset coverage, missing sensors, and stale facts. | Center lists assets with metrics; missing sensors highlighted; accessibility validated. | + +## Orchestrator Dashboard (Sprint 32) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| UI-ORCH-32-001 | TODO | UI Guild, Console Guild | AUTH-ORCH-32-001, ORCH-SVC-32-003 | Update Console RBAC mappings to surface `Orch.Viewer`, request `orch:read` scope in token flows, and gate dashboard access/messaging accordingly. | Console role catalogue includes `Orch.Viewer`; auth helpers use shared scope constant; dashboard routes enforce scope and show actionable guidance; e2e tests cover authorized/unauthorized flows. | +> 2025-10-31: Authority minted `orch:read` scope; ensure Console UX aligns before orchestrator dashboards ship. diff --git a/src/StellaOps.VexLens/AGENTS.md b/src/VexLens/StellaOps.VexLens/AGENTS.md similarity index 88% rename from src/StellaOps.VexLens/AGENTS.md rename to src/VexLens/StellaOps.VexLens/AGENTS.md index 3f722793..02097b37 100644 --- a/src/StellaOps.VexLens/AGENTS.md +++ b/src/VexLens/StellaOps.VexLens/AGENTS.md @@ -4,7 +4,7 @@ Deliver the VEX Consensus Lens service that normalizes VEX evidence, computes deterministic consensus states, exposes APIs, and feeds Policy Engine and downstream explorers without mutating raw documents. ## Scope -- Service code under `src/StellaOps.VexLens` (normalizer, mapping, trust weighting, consensus projection, APIs, simulation hooks). +- Service code under `src/VexLens/StellaOps.VexLens` (normalizer, mapping, trust weighting, consensus projection, APIs, simulation hooks). - Batch workers consuming Excitator, Conseiller, SBOM, and policy events; projection storage and caching; telemetry. - Coordination with Policy Engine, Vuln Explorer, Findings Ledger, Console, CLI, and Docs. @@ -16,7 +16,7 @@ Deliver the VEX Consensus Lens service that normalizes VEX evidence, computes de 5. **Secure & auditable** – signature verification, issuer metadata, logging of conflicts, support for compliance queries. ## Collaboration -- Keep `src/StellaOps.VexLens/TASKS.md`, `SPRINTS.md` synchronized. +- Keep `src/VexLens/StellaOps.VexLens/TASKS.md`, `../../docs/implplan/SPRINTS.md` synchronized. - Share schemas/OpenAPI with Console & CLI; publish mapping docs and test fixtures. - Coordinate with Policy Engine on trust knobs and Vuln Explorer on UI integration. diff --git a/src/StellaOps.VexLens/TASKS.md b/src/VexLens/StellaOps.VexLens/TASKS.md similarity index 99% rename from src/StellaOps.VexLens/TASKS.md rename to src/VexLens/StellaOps.VexLens/TASKS.md index 423ffb77..16f2897f 100644 --- a/src/StellaOps.VexLens/TASKS.md +++ b/src/VexLens/StellaOps.VexLens/TASKS.md @@ -1,34 +1,34 @@ -# VEX Lens Task Board — Epic 7: VEX Consensus Lens -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| VEXLENS-30-001 | TODO | VEX Lens Guild | EXCITITOR-LNM-21-001, CONCELIER-LNM-21-001 | Implement normalization pipeline for CSAF VEX, OpenVEX, CycloneDX VEX (status mapping, justification mapping, product tree parsing). | Normalization outputs deterministic canonical JSON; fixtures cover formats; unit tests pass. | -| VEXLENS-30-002 | TODO | VEX Lens Guild | VEXLENS-30-001, SBOM-VULN-29-001 | Build product mapping library (CPE/CPE2.3/vendor tokens → purl/version) with scope quality scoring and path metadata. | Mapping library handles target ecosystems with property tests; scope scores recorded; docs updated. | -| VEXLENS-30-003 | TODO | VEX Lens Guild, Issuer Directory Guild | ISSUER-30-001 | Integrate signature verification (Ed25519, DSSE, PKIX) using issuer keys, annotate evidence with verification state and failure reasons. | Signatures verified; failures logged; tests cover signed/unsigned/expired cases. | -| VEXLENS-30-004 | TODO | VEX Lens Guild, Policy Guild | POLICY-ENGINE-30-101 | Implement trust weighting engine (issuer base weights, signature modifiers, recency decay, justification modifiers, scope score adjustments) controlled by policy config. | Weighting functions configurable; policy overrides applied; unit tests validate formulas. | -| VEXLENS-30-005 | TODO | VEX Lens Guild | VEXLENS-30-001..004 | Implement consensus algorithm producing `consensus_state`, `confidence`, `weights`, `quorum`, `rationale`; support states: NOT_AFFECTED, AFFECTED, FIXED, UNDER_INVESTIGATION, DISPUTED, INCONCLUSIVE. | Algorithm deterministic; unit/property tests cover conflict scenarios; rationale includes top evidences; docs drafted. | -| VEXLENS-30-006 | TODO | VEX Lens Guild, Findings Ledger Guild | VEXLENS-30-005, LEDGER-29-003 | Materialize consensus projection storage with idempotent workers triggered by VEX/Policy changes; expose change events for downstream consumers. | Projection generated for fixtures; backpressure metrics recorded; replay harness passes. | -| VEXLENS-30-007 | TODO | VEX Lens Guild | VEXLENS-30-006 | Expose APIs (`/vex/consensus`, `/vex/consensus/query`, `/vex/consensus/{id}`, `/vex/consensus/simulate`, `/vex/consensus/export`) with pagination, cost budgets, and OpenAPI docs. | APIs deployed with schema validation; integration tests cover filters/simulation/export; rate limits enforced. | -| VEXLENS-30-008 | TODO | VEX Lens Guild, Policy Guild | VEXLENS-30-006, POLICY-ENGINE-30-101 | Integrate consensus signals with Policy Engine (thresholds, suppression, simulation inputs) and Vuln Explorer detail view. | Policy consumes consensus via documented contract; Vuln Explorer shows consensus chip; e2e tests confirm suppression behavior. | -| VEXLENS-30-009 | TODO | VEX Lens Guild, Observability Guild | VEXLENS-30-006..008 | Instrument metrics (`vex_consensus_compute_latency`, `vex_consensus_disputed_total`, `vex_signature_verification_rate`), structured logs, and traces; publish dashboards/alerts. | Metrics/traces live; dashboards approved; alert thresholds configured. | -| VEXLENS-30-010 | TODO | VEX Lens Guild, QA Guild | VEXLENS-30-001..008 | Develop unit/property/integration/load tests (10M records), determinism harness, fuzz testing for malformed product trees. | Test suites green; load tests documented; determinism harness validated across two runs. | -| VEXLENS-30-011 | TODO | VEX Lens Guild, DevOps Guild | VEXLENS-30-006..009 | Provide deployment manifests, caching configuration, scaling guides, offline kit seeds, and runbooks. | Deployment docs merged; smoke deploy validated; offline kit updated; runbooks published. | - -## Advisory AI (Sprint 31) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| VEXLENS-AIAI-31-001 | TODO | VEX Lens Guild | VEXLENS-30-005 | Expose consensus rationale API enhancements (policy factors, issuer details, mapping issues) for Advisory AI conflict explanations. | API returns structured factors; docs updated; integration tests cover tuples. | -| VEXLENS-AIAI-31-002 | TODO | VEX Lens Guild | VEXLENS-30-006 | Provide caching hooks for consensus lookups used by Advisory AI (batch endpoints, TTL hints). | Batch API published; caches instrumented; telemetry recorded. | - -## Orchestrator Dashboard - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| VEXLENS-ORCH-33-001 | TODO | VEX Lens Guild | ORCH-SVC-32-001, ORCH-SVC-32-003, ORCH-SVC-33-001 | Register `consensus_compute` job type with orchestrator, integrate worker SDK, and expose job planning hooks for consensus batches. | Job type documented; worker consumes orchestrator jobs; tests cover pause/retry; metrics exported. | -| VEXLENS-ORCH-34-001 | TODO | VEX Lens Guild | VEXLENS-ORCH-33-001, ORCH-SVC-34-002, ORCH-SVC-34-001 | Emit consensus completion events into orchestrator run ledger and provenance chain, including confidence metadata. | Ledger export includes consensus entries; events contain provenance; integration tests validate chain; docs cross-link to run-ledger. | - -## Export Center (Epic 10) - -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| VEXLENS-EXPORT-35-001 | TODO | VEX Lens Guild | VEXLENS-30-006, LEDGER-EXPORT-35-001 | Provide consensus snapshot API delivering deterministic JSONL (state, confidence, provenance) for exporter mirror bundles. | Snapshot endpoint deployed; determinism tests pass; schema documented; metrics/logs instrumented. | +# VEX Lens Task Board — Epic 7: VEX Consensus Lens +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| VEXLENS-30-001 | TODO | VEX Lens Guild | EXCITITOR-LNM-21-001, CONCELIER-LNM-21-001 | Implement normalization pipeline for CSAF VEX, OpenVEX, CycloneDX VEX (status mapping, justification mapping, product tree parsing). | Normalization outputs deterministic canonical JSON; fixtures cover formats; unit tests pass. | +| VEXLENS-30-002 | TODO | VEX Lens Guild | VEXLENS-30-001, SBOM-VULN-29-001 | Build product mapping library (CPE/CPE2.3/vendor tokens → purl/version) with scope quality scoring and path metadata. | Mapping library handles target ecosystems with property tests; scope scores recorded; docs updated. | +| VEXLENS-30-003 | TODO | VEX Lens Guild, Issuer Directory Guild | ISSUER-30-001 | Integrate signature verification (Ed25519, DSSE, PKIX) using issuer keys, annotate evidence with verification state and failure reasons. | Signatures verified; failures logged; tests cover signed/unsigned/expired cases. | +| VEXLENS-30-004 | TODO | VEX Lens Guild, Policy Guild | POLICY-ENGINE-30-101 | Implement trust weighting engine (issuer base weights, signature modifiers, recency decay, justification modifiers, scope score adjustments) controlled by policy config. | Weighting functions configurable; policy overrides applied; unit tests validate formulas. | +| VEXLENS-30-005 | TODO | VEX Lens Guild | VEXLENS-30-001..004 | Implement consensus algorithm producing `consensus_state`, `confidence`, `weights`, `quorum`, `rationale`; support states: NOT_AFFECTED, AFFECTED, FIXED, UNDER_INVESTIGATION, DISPUTED, INCONCLUSIVE. | Algorithm deterministic; unit/property tests cover conflict scenarios; rationale includes top evidences; docs drafted. | +| VEXLENS-30-006 | TODO | VEX Lens Guild, Findings Ledger Guild | VEXLENS-30-005, LEDGER-29-003 | Materialize consensus projection storage with idempotent workers triggered by VEX/Policy changes; expose change events for downstream consumers. | Projection generated for fixtures; backpressure metrics recorded; replay harness passes. | +| VEXLENS-30-007 | TODO | VEX Lens Guild | VEXLENS-30-006 | Expose APIs (`/vex/consensus`, `/vex/consensus/query`, `/vex/consensus/{id}`, `/vex/consensus/simulate`, `/vex/consensus/export`) with pagination, cost budgets, and OpenAPI docs. | APIs deployed with schema validation; integration tests cover filters/simulation/export; rate limits enforced. | +| VEXLENS-30-008 | TODO | VEX Lens Guild, Policy Guild | VEXLENS-30-006, POLICY-ENGINE-30-101 | Integrate consensus signals with Policy Engine (thresholds, suppression, simulation inputs) and Vuln Explorer detail view. | Policy consumes consensus via documented contract; Vuln Explorer shows consensus chip; e2e tests confirm suppression behavior. | +| VEXLENS-30-009 | TODO | VEX Lens Guild, Observability Guild | VEXLENS-30-006..008 | Instrument metrics (`vex_consensus_compute_latency`, `vex_consensus_disputed_total`, `vex_signature_verification_rate`), structured logs, and traces; publish dashboards/alerts. | Metrics/traces live; dashboards approved; alert thresholds configured. | +| VEXLENS-30-010 | TODO | VEX Lens Guild, QA Guild | VEXLENS-30-001..008 | Develop unit/property/integration/load tests (10M records), determinism harness, fuzz testing for malformed product trees. | Test suites green; load tests documented; determinism harness validated across two runs. | +| VEXLENS-30-011 | TODO | VEX Lens Guild, DevOps Guild | VEXLENS-30-006..009 | Provide deployment manifests, caching configuration, scaling guides, offline kit seeds, and runbooks. | Deployment docs merged; smoke deploy validated; offline kit updated; runbooks published. | + +## Advisory AI (Sprint 31) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| VEXLENS-AIAI-31-001 | TODO | VEX Lens Guild | VEXLENS-30-005 | Expose consensus rationale API enhancements (policy factors, issuer details, mapping issues) for Advisory AI conflict explanations. | API returns structured factors; docs updated; integration tests cover tuples. | +| VEXLENS-AIAI-31-002 | TODO | VEX Lens Guild | VEXLENS-30-006 | Provide caching hooks for consensus lookups used by Advisory AI (batch endpoints, TTL hints). | Batch API published; caches instrumented; telemetry recorded. | + +## Orchestrator Dashboard + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| VEXLENS-ORCH-33-001 | TODO | VEX Lens Guild | ORCH-SVC-32-001, ORCH-SVC-32-003, ORCH-SVC-33-001 | Register `consensus_compute` job type with orchestrator, integrate worker SDK, and expose job planning hooks for consensus batches. | Job type documented; worker consumes orchestrator jobs; tests cover pause/retry; metrics exported. | +| VEXLENS-ORCH-34-001 | TODO | VEX Lens Guild | VEXLENS-ORCH-33-001, ORCH-SVC-34-002, ORCH-SVC-34-001 | Emit consensus completion events into orchestrator run ledger and provenance chain, including confidence metadata. | Ledger export includes consensus entries; events contain provenance; integration tests validate chain; docs cross-link to run-ledger. | + +## Export Center (Epic 10) + +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| VEXLENS-EXPORT-35-001 | TODO | VEX Lens Guild | VEXLENS-30-006, LEDGER-EXPORT-35-001 | Provide consensus snapshot API delivering deterministic JSONL (state, confidence, provenance) for exporter mirror bundles. | Snapshot endpoint deployed; determinism tests pass; schema documented; metrics/logs instrumented. | diff --git a/src/StellaOps.VulnExplorer.Api/AGENTS.md b/src/VulnExplorer/StellaOps.VulnExplorer.Api/AGENTS.md similarity index 87% rename from src/StellaOps.VulnExplorer.Api/AGENTS.md rename to src/VulnExplorer/StellaOps.VulnExplorer.Api/AGENTS.md index 5cf4d6e4..eefb8759 100644 --- a/src/StellaOps.VulnExplorer.Api/AGENTS.md +++ b/src/VulnExplorer/StellaOps.VulnExplorer.Api/AGENTS.md @@ -4,7 +4,7 @@ Expose policy-aware vulnerability listing, detail, simulation, workflow, and export APIs backed by the Findings Ledger and evidence services. Provide deterministic, RBAC-enforced endpoints that power Console, CLI, and automation workflows. ## Scope -- Service under `src/StellaOps.VulnExplorer.Api` (query engine, workflow endpoints, simulation bridge, export orchestrator). +- Service under `src/VulnExplorer/StellaOps.VulnExplorer.Api` (query engine, workflow endpoints, simulation bridge, export orchestrator). - Integration with Findings Ledger, Policy Engine, Conseiller, Excitator, SBOM Service, Scheduler, and Authority. - Evidence bundle assembly and signing hand-off. @@ -16,7 +16,7 @@ Expose policy-aware vulnerability listing, detail, simulation, workflow, and exp 5. **Secure** – RBAC/ABAC enforced server-side; exports signed; attachments served via scoped URLs. ## Collaboration -- Keep `src/StellaOps.VulnExplorer.Api/TASKS.md`, `SPRINTS.md` synchronized. +- Keep `src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md`, `../../docs/implplan/SPRINTS.md` synchronized. - Coordinate schemas with Findings Ledger, Console, CLI, and Docs; publish OpenAPI + JSON schemas. - Work with DevOps/Observability for performance dashboards and SLOs. diff --git a/src/StellaOps.VulnExplorer.Api/TASKS.md b/src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md similarity index 99% rename from src/StellaOps.VulnExplorer.Api/TASKS.md rename to src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md index 10c064b0..c743f26b 100644 --- a/src/StellaOps.VulnExplorer.Api/TASKS.md +++ b/src/VulnExplorer/StellaOps.VulnExplorer.Api/TASKS.md @@ -1,14 +1,14 @@ -# Vulnerability Explorer API Task Board — Epic 6 -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| VULN-API-29-001 | TODO | Vuln Explorer API Guild | LEDGER-29-001, GRAPH-INDEX-28-001 | Define OpenAPI spec (list/detail/query/simulation/workflow/export), query JSON schema, pagination/grouping contracts, and error codes. | OpenAPI + schemas committed; spectral lint passes; clients regenerated for Console/CLI; docs drafted. | -| VULN-API-29-002 | TODO | Vuln Explorer API Guild | VULN-API-29-001, LEDGER-29-003 | Implement list/query endpoints with policy parameter, grouping, server paging, caching, and cost budgets. | Endpoints return deterministic results; budgets enforced; integration tests cover filters/groupings; metrics logged. | -| VULN-API-29-003 | TODO | Vuln Explorer API Guild | VULN-API-29-002 | Implement detail endpoint aggregating evidence, policy rationale, paths (Graph Explorer deep link), and workflow summary. | Detail payload matches contract; evidence references raw doc ids; tests cover missing evidence. | -| VULN-API-29-004 | TODO | Vuln Explorer API Guild, Findings Ledger Guild | LEDGER-29-005 | Expose workflow endpoints (assign, comment, accept-risk, verify-fix, target-fix, reopen) that write ledger events with idempotency + validation. | Workflow APIs create ledger events, return updated projection; error handling documented; tests cover business rules. | -| VULN-API-29-005 | TODO | Vuln Explorer API Guild, Policy Guild | POLICY-ENGINE-27-001, VULN-API-29-002 | Implement simulation endpoint comparing `policy_from` vs `policy_to`, returning diffs without side effects; hook into Policy Engine batch eval. | Simulation returns delta sets; runtime under SLA; tests cover large queries; no ledger writes. | -| VULN-API-29-006 | TODO | Vuln Explorer API Guild | SBOM-CONSOLE-23-001, GRAPH-API-28-003 | Integrate resolver results with Graph Explorer: include shortest path metadata, line up deep-link parameters, expose `paths` array in details. | API returns path metadata; Graph Explorer links validated via e2e tests; docs updated. | -| VULN-API-29-007 | TODO | Vuln Explorer API Guild, Security Guild | AUTH-POLICY-27-001, AUTH-VULN-29-001 | Enforce RBAC/ABAC scopes; implement CSRF/anti-forgery checks for Console; secure attachment URLs; audit logging. | Unauthorized requests rejected; audit logs contain actor + change; security tests cover ABAC filters. | -| VULN-API-29-008 | TODO | Vuln Explorer API Guild | VULN-API-29-001..007 | Build export orchestrator producing signed bundles (manifest, NDJSON, checksums, signature). Integrate with Findings Ledger for evidence and Policy Engine metadata. | Export endpoint streams bundles, attaches signature; tests validate manifest + checksum; docs updated. | -| VULN-API-29-009 | TODO | Vuln Explorer API Guild, Observability Guild | VULN-API-29-002..008 | Instrument metrics (`vuln_list_latency`, `vuln_simulation_latency`, `vuln_export_duration`, `vuln_workflow_events_total`), structured logs, and traces; publish dashboards/alerts. | Metrics registered; dashboards live; alert thresholds documented; telemetry tests in CI. | -| VULN-API-29-010 | TODO | Vuln Explorer API Guild, QA Guild | VULN-API-29-002..008 | Provide unit/integration/perf tests (5M findings), fuzz query validation, determinism harness comparing repeated queries. | CI suite green; perf tests documented; determinism harness passes; bug budget set. | -| VULN-API-29-011 | TODO | Vuln Explorer API Guild, DevOps Guild | VULN-API-29-002..009 | Package deployment (Helm/Compose), health checks, CI smoke, offline kit steps, and scaling guidance. | Deployment artifacts merged; smoke deploy validated; scaling/backup docs produced. | +# Vulnerability Explorer API Task Board — Epic 6 +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| VULN-API-29-001 | TODO | Vuln Explorer API Guild | LEDGER-29-001, GRAPH-INDEX-28-001 | Define OpenAPI spec (list/detail/query/simulation/workflow/export), query JSON schema, pagination/grouping contracts, and error codes. | OpenAPI + schemas committed; spectral lint passes; clients regenerated for Console/CLI; docs drafted. | +| VULN-API-29-002 | TODO | Vuln Explorer API Guild | VULN-API-29-001, LEDGER-29-003 | Implement list/query endpoints with policy parameter, grouping, server paging, caching, and cost budgets. | Endpoints return deterministic results; budgets enforced; integration tests cover filters/groupings; metrics logged. | +| VULN-API-29-003 | TODO | Vuln Explorer API Guild | VULN-API-29-002 | Implement detail endpoint aggregating evidence, policy rationale, paths (Graph Explorer deep link), and workflow summary. | Detail payload matches contract; evidence references raw doc ids; tests cover missing evidence. | +| VULN-API-29-004 | TODO | Vuln Explorer API Guild, Findings Ledger Guild | LEDGER-29-005 | Expose workflow endpoints (assign, comment, accept-risk, verify-fix, target-fix, reopen) that write ledger events with idempotency + validation. | Workflow APIs create ledger events, return updated projection; error handling documented; tests cover business rules. | +| VULN-API-29-005 | TODO | Vuln Explorer API Guild, Policy Guild | POLICY-ENGINE-27-001, VULN-API-29-002 | Implement simulation endpoint comparing `policy_from` vs `policy_to`, returning diffs without side effects; hook into Policy Engine batch eval. | Simulation returns delta sets; runtime under SLA; tests cover large queries; no ledger writes. | +| VULN-API-29-006 | TODO | Vuln Explorer API Guild | SBOM-CONSOLE-23-001, GRAPH-API-28-003 | Integrate resolver results with Graph Explorer: include shortest path metadata, line up deep-link parameters, expose `paths` array in details. | API returns path metadata; Graph Explorer links validated via e2e tests; docs updated. | +| VULN-API-29-007 | TODO | Vuln Explorer API Guild, Security Guild | AUTH-POLICY-27-001, AUTH-VULN-29-001 | Enforce RBAC/ABAC scopes; implement CSRF/anti-forgery checks for Console; secure attachment URLs; audit logging. | Unauthorized requests rejected; audit logs contain actor + change; security tests cover ABAC filters. | +| VULN-API-29-008 | TODO | Vuln Explorer API Guild | VULN-API-29-001..007 | Build export orchestrator producing signed bundles (manifest, NDJSON, checksums, signature). Integrate with Findings Ledger for evidence and Policy Engine metadata. | Export endpoint streams bundles, attaches signature; tests validate manifest + checksum; docs updated. | +| VULN-API-29-009 | TODO | Vuln Explorer API Guild, Observability Guild | VULN-API-29-002..008 | Instrument metrics (`vuln_list_latency`, `vuln_simulation_latency`, `vuln_export_duration`, `vuln_workflow_events_total`), structured logs, and traces; publish dashboards/alerts. | Metrics registered; dashboards live; alert thresholds documented; telemetry tests in CI. | +| VULN-API-29-010 | TODO | Vuln Explorer API Guild, QA Guild | VULN-API-29-002..008 | Provide unit/integration/perf tests (5M findings), fuzz query validation, determinism harness comparing repeated queries. | CI suite green; perf tests documented; determinism harness passes; bug budget set. | +| VULN-API-29-011 | TODO | Vuln Explorer API Guild, DevOps Guild | VULN-API-29-002..009 | Package deployment (Helm/Compose), health checks, CI smoke, offline kit steps, and scaling guidance. | Deployment artifacts merged; smoke deploy validated; scaling/backup docs produced. | diff --git a/src/StellaOps.Web/.editorconfig b/src/Web/StellaOps.Web/.editorconfig similarity index 100% rename from src/StellaOps.Web/.editorconfig rename to src/Web/StellaOps.Web/.editorconfig diff --git a/src/StellaOps.Web/.gitignore b/src/Web/StellaOps.Web/.gitignore similarity index 88% rename from src/StellaOps.Web/.gitignore rename to src/Web/StellaOps.Web/.gitignore index 220bf23c..48326272 100644 --- a/src/StellaOps.Web/.gitignore +++ b/src/Web/StellaOps.Web/.gitignore @@ -1,3 +1,3 @@ -.cache/ -coverage/ -dist/ +.cache/ +coverage/ +dist/ diff --git a/src/StellaOps.Web/AGENTS.md b/src/Web/StellaOps.Web/AGENTS.md similarity index 93% rename from src/StellaOps.Web/AGENTS.md rename to src/Web/StellaOps.Web/AGENTS.md index 679d18e0..7d14038b 100644 --- a/src/StellaOps.Web/AGENTS.md +++ b/src/Web/StellaOps.Web/AGENTS.md @@ -1,24 +1,24 @@ -# StellaOps Web Frontend - -## Mission -Design and build the StellaOps web user experience that surfaces backend capabilities (Authority, Concelier, Exporters) through an offline-friendly Angular application. - -## Team Composition -- **UX Specialist** – defines user journeys, interaction patterns, accessibility guidelines, and visual design language. -- **Angular Engineers** – implement the SPA, integrate with backend APIs, and ensure deterministic builds suitable for air-gapped deployments. - -## Operating Principles -- Favor modular Angular architecture (feature modules, shared UI kit) with strong typing via latest TypeScript/Angular releases. -- Align UI flows with backend contracts; coordinate with Authority and Concelier teams for API changes. -- Keep assets and build outputs deterministic and cacheable for Offline Kit packaging. -- Track work using the local `TASKS.md` board; keep statuses (TODO/DOING/REVIEW/BLOCKED/DONE) up to date. - -## Key Paths -- `src/StellaOps.Web` — Angular workspace (to be scaffolded). -- `docs/` — UX specs and mockups (to be added). -- `ops/` — Web deployment manifests for air-gapped environments (future). - -## Coordination -- Sync with DevEx for project scaffolding and build pipelines. -- Partner with Docs Guild to translate UX decisions into operator guides. -- Collaborate with Security Guild to validate authentication flows and session handling. +# StellaOps Web Frontend + +## Mission +Design and build the StellaOps web user experience that surfaces backend capabilities (Authority, Concelier, Exporters) through an offline-friendly Angular application. + +## Team Composition +- **UX Specialist** – defines user journeys, interaction patterns, accessibility guidelines, and visual design language. +- **Angular Engineers** – implement the SPA, integrate with backend APIs, and ensure deterministic builds suitable for air-gapped deployments. + +## Operating Principles +- Favor modular Angular architecture (feature modules, shared UI kit) with strong typing via latest TypeScript/Angular releases. +- Align UI flows with backend contracts; coordinate with Authority and Concelier teams for API changes. +- Keep assets and build outputs deterministic and cacheable for Offline Kit packaging. +- Track work using the local `TASKS.md` board; keep statuses (TODO/DOING/REVIEW/BLOCKED/DONE) up to date. + +## Key Paths +- `src/Web/StellaOps.Web` — Angular workspace (to be scaffolded). +- `docs/` — UX specs and mockups (to be added). +- `ops/` — Web deployment manifests for air-gapped environments (future). + +## Coordination +- Sync with DevEx for project scaffolding and build pipelines. +- Partner with Docs Guild to translate UX decisions into operator guides. +- Collaborate with Security Guild to validate authentication flows and session handling. diff --git a/src/StellaOps.Web/README.md b/src/Web/StellaOps.Web/README.md similarity index 100% rename from src/StellaOps.Web/README.md rename to src/Web/StellaOps.Web/README.md diff --git a/src/StellaOps.Web/TASKS.md b/src/Web/StellaOps.Web/TASKS.md similarity index 99% rename from src/StellaOps.Web/TASKS.md rename to src/Web/StellaOps.Web/TASKS.md index 5f05185c..2a5e419e 100644 --- a/src/StellaOps.Web/TASKS.md +++ b/src/Web/StellaOps.Web/TASKS.md @@ -1,179 +1,179 @@ -# TASKS — Epic 1: Aggregation-Only Contract -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-AOC-19-001 `Shared AOC guard primitives` | DOING (2025-10-26) | BE-Base Platform Guild | — | Provide `AOCForbiddenKeys`, guard middleware/interceptor hooks, and error types (`AOCError`, `AOCViolationCode`) for ingestion services. Publish sample usage + analyzer to ensure guard registered. | -> 2025-10-26: Introduced `StellaOps.Aoc` library with forbidden key list, guard result/options, and baseline write guard + tests. Middleware/analyzer wiring still pending. -| WEB-AOC-19-002 `Provenance & signature helpers` | TODO | BE-Base Platform Guild | WEB-AOC-19-001 | Ship `ProvenanceBuilder`, checksum utilities, and signature verification helper integrated with guard logging. Cover DSSE/CMS formats with unit tests. | -| WEB-AOC-19-003 `Analyzer + test fixtures` | TODO | QA Guild, BE-Base Platform Guild | WEB-AOC-19-001 | Author Roslyn analyzer preventing ingestion modules from writing forbidden keys without guard, and provide shared test fixtures for guard validation used by Concelier/Excititor service tests. | -> Docs alignment (2025-10-26): Analyzer expectations detailed in `docs/ingestion/aggregation-only-contract.md` §3/5; CI integration tracked via DEVOPS-AOC-19-001. - -## Policy Engine v2 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-POLICY-20-001 `Policy endpoints` | TODO | BE-Base Platform Guild, Policy Guild | POLICY-ENGINE-20-001, POLICY-ENGINE-20-004 | Implement Policy CRUD/compile/run/simulate/findings/explain endpoints with OpenAPI, tenant scoping, and service identity enforcement. | -| WEB-POLICY-20-002 `Pagination & filters` | TODO | BE-Base Platform Guild | WEB-POLICY-20-001 | Add pagination, filtering, sorting, and tenant guards to listings for policies, runs, and findings; include deterministic ordering and query diagnostics. | -| WEB-POLICY-20-003 `Error mapping` | TODO | BE-Base Platform Guild, QA Guild | WEB-POLICY-20-001 | Map engine errors to `ERR_POL_*` responses with consistent payloads and contract tests; expose correlation IDs in headers. | -| WEB-POLICY-20-004 `Simulate rate limits` | TODO | Platform Reliability Guild | WEB-POLICY-20-001, WEB-POLICY-20-002 | Introduce adaptive rate limiting + quotas for simulation endpoints, expose metrics, and document retry headers. | - -## Graph Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-GRAPH-21-001 `Graph endpoints` | BLOCKED (2025-10-27) | BE-Base Platform Guild, Graph Platform Guild | GRAPH-API-28-003, AUTH-VULN-24-001 | Add gateway routes for graph versions/viewport/node/path/diff/export endpoints with tenant enforcement, scope checks, and streaming responses; proxy Policy Engine diff toggles without inline logic. Adopt `StellaOpsScopes` constants for RBAC enforcement. | -> 2025-10-27: Graph API gateway can’t proxy until upstream Graph service (`GRAPH-API-28-003`) and Authority scope update (`AUTH-VULN-24-001`) publish stable contracts. -| WEB-GRAPH-21-002 `Request validation` | BLOCKED (2025-10-27) | BE-Base Platform Guild | WEB-GRAPH-21-001 | Implement bbox/zoom/path parameter validation, pagination tokens, and deterministic ordering; add contract tests for boundary conditions. | -> 2025-10-27: Blocked on `WEB-GRAPH-21-001`; request envelope still undefined. -| WEB-GRAPH-21-003 `Error mapping & exports` | BLOCKED (2025-10-27) | BE-Base Platform Guild, QA Guild | WEB-GRAPH-21-001 | Map graph service errors to `ERR_Graph_*`, support GraphML/JSONL export streaming, and document rate limits. | -> 2025-10-27: Depends on core Graph proxy route definitions. -| WEB-GRAPH-21-004 `Overlay pass-through` | BLOCKED (2025-10-27) | BE-Base Platform Guild, Policy Guild | WEB-GRAPH-21-001, POLICY-ENGINE-30-002 | Proxy Policy Engine overlay responses for graph endpoints while keeping gateway stateless; maintain streaming budgets and latency SLOs. | -> 2025-10-27: Requires base Graph routing plus Policy overlay schema (`POLICY-ENGINE-30-002`). - -## Graph Explorer (Sprint 28) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-GRAPH-24-001 `Gateway proxy refresh` | TODO | BE-Base Platform Guild | GRAPH-API-28-001, AUTH-GRAPH-21-001 | Gateway proxy for Graph API and Policy overlays with RBAC, caching, pagination, ETags, and streaming; zero business logic. | -| WEB-GRAPH-24-004 `Telemetry aggregation` | TODO | BE-Base Platform Guild, Observability Guild | WEB-GRAPH-24-001, DEVOPS-GRAPH-28-003 | Collect gateway metrics/logs (tile latency, proxy errors, overlay cache stats) and forward to dashboards; document sampling strategy. | - -## Link-Not-Merge v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-LNM-21-001 `Advisory observation endpoints` | TODO | BE-Base Platform Guild, Concelier WebService Guild | CONCELIER-LNM-21-201 | Surface new `/advisories/*` APIs through gateway with caching, pagination, and RBAC enforcement (`advisory:read`). | -| WEB-LNM-21-002 `VEX observation endpoints` | TODO | BE-Base Platform Guild, Excititor WebService Guild | EXCITITOR-LNM-21-201 | Expose `/vex/*` read APIs with evidence routes and export handlers; map `ERR_AGG_*` codes. | -| WEB-LNM-21-003 `Policy evidence aggregation` | TODO | BE-Base Platform Guild, Policy Guild | POLICY-ENGINE-40-001 | Provide combined endpoint for Console to fetch policy result + source evidence (advisory + VEX linksets) for a component. | - -## Policy Engine + Editor v1 (Epic 5) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-POLICY-23-001 `Policy pack CRUD` | BLOCKED (2025-10-29) | BE-Base Platform Guild, Policy Guild | POLICY-GATEWAY-18-001..002 | Implement API endpoints for creating/listing/fetching policy packs and revisions (`/policy/packs`, `/policy/packs/{id}/revisions`) with pagination, RBAC, and AOC metadata exposure. (Tracked via Sprint 18.5 gateway tasks.) | -| WEB-POLICY-23-002 `Activation & scope` | BLOCKED (2025-10-29) | BE-Base Platform Guild | POLICY-GATEWAY-18-003 | Add activation endpoint with scope windows, conflict checks, and optional 2-person approval integration; emit events on success. (Tracked via Sprint 18.5 gateway tasks.) | -| WEB-POLICY-23-003 `Simulation & evaluation` | TODO | BE-Base Platform Guild | POLICY-ENGINE-50-002 | Provide `/policy/simulate` and `/policy/evaluate` endpoints with streaming responses, rate limiting, and error mapping. | -| WEB-POLICY-23-004 `Explain retrieval` | TODO | BE-Base Platform Guild | POLICY-ENGINE-50-006 | Expose explain history endpoints (`/policy/runs`, `/policy/runs/{id}`) including decision tree, sources consulted, and AOC chain. | - -## Graph & Vuln Explorer v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-GRAPH-24-001 `Graph endpoints` | TODO | BE-Base Platform Guild, SBOM Service Guild | SBOM-GRAPH-24-002 | Implement `/graph/assets/*` endpoints (snapshots, adjacency, search) with pagination, ETags, and tenant scoping while acting as a pure proxy. | -| WEB-GRAPH-24-004 `AOC enrichers` | TODO | BE-Base Platform Guild | WEB-GRAPH-24-001 | Embed AOC summaries sourced from overlay services; ensure gateway does not compute derived severity or hints. | - -## StellaOps Console (Sprint 23) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-CONSOLE-23-001 `Global posture endpoints` | TODO | BE-Base Platform Guild, Product Analytics Guild | CONCELIER-CONSOLE-23-001, EXCITITOR-CONSOLE-23-001, POLICY-CONSOLE-23-001, SBOM-CONSOLE-23-001, SCHED-CONSOLE-23-001 | Provide consolidated `/console/dashboard` and `/console/filters` APIs returning tenant-scoped aggregates (findings by severity, VEX override counts, advisory deltas, run health, policy change log). Enforce AOC labelling, deterministic ordering, and cursor-based pagination for drill-down hints. | -| WEB-CONSOLE-23-002 `Live status & SSE proxy` | TODO | BE-Base Platform Guild, Scheduler Guild | SCHED-CONSOLE-23-001, DEVOPS-CONSOLE-23-001 | Expose `/console/status` polling endpoint and `/console/runs/{id}/stream` SSE/WebSocket proxy with heartbeat/backoff, queue lag metrics, and auth scope enforcement. Surface request IDs + retry headers. | -| WEB-CONSOLE-23-003 `Evidence export orchestrator` | TODO | BE-Base Platform Guild, Policy Guild | EXPORT-CONSOLE-23-001, POLICY-CONSOLE-23-001 | Add `/console/exports` POST/GET routes coordinating evidence bundle creation, streaming CSV/JSON exports, checksum manifest retrieval, and signed attestation references. Ensure requests honor tenant + policy scopes and expose job tracking metadata. | -| WEB-CONSOLE-23-004 `Global search router` | TODO | BE-Base Platform Guild | CONCELIER-CONSOLE-23-001, EXCITITOR-CONSOLE-23-001, SBOM-CONSOLE-23-001 | Implement `/console/search` endpoint accepting CVE/GHSA/PURL/SBOM identifiers, performing fan-out queries with caching, ranking, and deterministic tie-breaking. Return typed results for Console navigation; respect result caps and latency SLOs. | -| WEB-CONSOLE-23-005 `Downloads manifest API` | TODO | BE-Base Platform Guild, DevOps Guild | DOWNLOADS-CONSOLE-23-001, DEVOPS-CONSOLE-23-002 | Serve `/console/downloads` JSON manifest (images, charts, offline bundles) sourced from signed registry metadata; include integrity hashes, release notes links, and offline instructions. Provide caching headers and documentation. | - -## Policy Studio (Sprint 27) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-POLICY-27-001 `Policy registry proxy` | TODO | BE-Base Platform Guild, Policy Registry Guild | REGISTRY-API-27-001, AUTH-POLICY-27-001 | Surface Policy Registry APIs (`/policy/workspaces`, `/policy/versions`, `/policy/reviews`, `/policy/registry`) through gateway with tenant scoping, RBAC, and request validation; ensure streaming downloads for evidence bundles. | -| WEB-POLICY-27-002 `Review & approval routes` | TODO | BE-Base Platform Guild | WEB-POLICY-27-001, REGISTRY-API-27-006 | Implement review lifecycle endpoints (open, comment, approve/reject) with audit headers, comment pagination, and webhook fan-out. | -| WEB-POLICY-27-003 `Simulation orchestration endpoints` | TODO | BE-Base Platform Guild, Scheduler Guild | REGISTRY-API-27-005, SCHED-CONSOLE-27-001 | Expose quick/batch simulation endpoints with SSE progress (`/policy/simulations/{runId}/stream`), cursor-based result pagination, and manifest download routes. | -| WEB-POLICY-27-004 `Publish & promote controls` | TODO | BE-Base Platform Guild, Security Guild | REGISTRY-API-27-007, REGISTRY-API-27-008, AUTH-POLICY-27-002 | Add publish/sign/promote/rollback endpoints with idempotent request IDs, canary parameters, and environment bindings; enforce scope checks and emit structured events. | -| WEB-POLICY-27-005 `Policy Studio telemetry` | TODO | BE-Base Platform Guild, Observability Guild | WEB-POLICY-27-001..004, TELEMETRY-CONSOLE-27-001 | Instrument metrics/logs for compile latency, simulation queue depth, approval latency, promotion actions; expose aggregated dashboards and correlation IDs for Console. | - -## Exceptions v1 (Epic 7) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-EXC-25-001 `Exceptions CRUD & workflow` | TODO | BE-Base Platform Guild | POLICY-ENGINE-70-002, AUTH-EXC-25-001 | Implement `/exceptions` API (create, propose, approve, revoke, list, history) with validation, pagination, and audit logging. | -| WEB-EXC-25-002 `Policy integration surfaces` | TODO | BE-Base Platform Guild | POLICY-ENGINE-70-001 | Extend `/policy/effective` and `/policy/simulate` responses to include exception metadata and accept overrides for simulations. | -| WEB-EXC-25-003 `Notifications & events` | TODO | BE-Base Platform Guild, Platform Events Guild | WEB-EXC-25-001 | Publish `exception.*` events, integrate with notification hooks, enforce rate limits. | - -## Reachability v1 - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-SIG-26-001 `Signals proxy endpoints` | TODO | BE-Base Platform Guild, Signals Guild | SIGNALS-24-001 | Surface `/signals/callgraphs`, `/signals/facts` read/write endpoints with pagination, ETags, and RBAC. | -| WEB-SIG-26-002 `Reachability joins` | TODO | BE-Base Platform Guild | WEB-SIG-26-001, POLICY-ENGINE-80-001 | Extend `/policy/effective` and `/vuln/explorer` responses to include reachability scores/states and allow filtering. | -| WEB-SIG-26-003 `Simulation hooks` | TODO | BE-Base Platform Guild | WEB-SIG-26-002, POLICY-ENGINE-80-001 | Add reachability override parameters to `/policy/simulate` and related APIs for what-if analysis. | - -## Vulnerability Explorer (Sprint 29) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-VULN-29-001 `Vuln API routing` | TODO | BE-Base Platform Guild | VULN-API-29-001, AUTH-VULN-29-001 | Expose `/vuln/*` endpoints via gateway with tenant scoping, RBAC/ABAC enforcement, anti-forgery headers, and request logging. | -| WEB-VULN-29-002 `Ledger proxy headers` | TODO | BE-Base Platform Guild, Findings Ledger Guild | WEB-VULN-29-001, LEDGER-29-002 | Forward workflow actions to Findings Ledger with idempotency headers and correlation IDs; handle retries/backoff. | -| WEB-VULN-29-003 `Simulation + export routing` | TODO | BE-Base Platform Guild | VULN-API-29-005, VULN-API-29-008 | Provide simulation and export orchestration routes with SSE/progress headers, signed download links, and request budgeting. | -| WEB-VULN-29-004 `Telemetry aggregation` | TODO | BE-Base Platform Guild, Observability Guild | WEB-VULN-29-001..003, DEVOPS-VULN-29-003 | Emit gateway metrics/logs (latency, error rates, export duration), propagate query hashes for analytics dashboards. | -| WEB-VEX-30-007 `VEX consensus routing` | TODO | BE-Base Platform Guild, VEX Lens Guild | VEXLENS-30-007, AUTH-VULN-24-001 | Route `/vex/consensus` APIs with tenant RBAC/ABAC, caching, and streaming; surface telemetry and trace IDs without gateway-side overlay logic. | - -## Advisory AI (Sprint 31) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-AIAI-31-001 `API routing` | TODO | BE-Base Platform Guild | AIAI-31-006, AUTH-VULN-29-001 | Route `/advisory/ai/*` endpoints through gateway with RBAC/ABAC, rate limits, and telemetry headers. | -| WEB-AIAI-31-002 `Batch orchestration` | TODO | BE-Base Platform Guild | AIAI-31-006 | Provide batching job handlers and streaming responses for CLI automation with retry/backoff. | -| WEB-AIAI-31-003 `Telemetry & audit` | TODO | BE-Base Platform Guild, Observability Guild | WEB-AIAI-31-001, DEVOPS-AIAI-31-001 | Emit metrics/logs (latency, guardrail blocks, validation failures) and forward anonymized prompt hashes to analytics. | - -## Orchestrator Dashboard - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-ORCH-32-001 `Read-only routing` | TODO | BE-Base Platform Guild | ORCH-SVC-32-003, AUTH-ORCH-32-001 | Expose `/orchestrator/sources|runs|jobs|dag` read endpoints via gateway with tenant scoping, caching, and viewer scope enforcement. | -| WEB-ORCH-33-001 `Control + backfill actions` | TODO | BE-Base Platform Guild | WEB-ORCH-32-001, ORCH-SVC-33-001, AUTH-ORCH-33-001 | Add POST action routes (`pause|resume|test`, `retry|cancel`, `jobs/tail`, `backfill preview`) with proper error mapping and SSE bridging. | -| WEB-ORCH-34-001 `Quotas & telemetry` | TODO | BE-Base Platform Guild | WEB-ORCH-33-001, ORCH-SVC-33-003, ORCH-SVC-34-001 | Surface quotas/backfill APIs, queue/backpressure metrics, and error clustering routes with admin scope enforcement and audit logging. | - -## Export Center -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-EXPORT-35-001 `Export routing` | TODO | BE-Base Platform Guild | EXPORT-SVC-35-006, AUTH-EXPORT-35-001 | Surface Export Center APIs (profiles/runs/download) through gateway with tenant scoping, streaming support, and viewer/operator scope checks. | -| WEB-EXPORT-36-001 `Distribution endpoints` | TODO | BE-Base Platform Guild | WEB-EXPORT-35-001, EXPORT-SVC-36-004 | Add distribution routes (OCI/object storage), manifest/provenance proxies, and signed URL generation. | -| WEB-EXPORT-37-001 `Scheduling & verification` | TODO | BE-Base Platform Guild | WEB-EXPORT-36-001, EXPORT-SVC-37-003 | Expose scheduling, retention, encryption parameters, and verification endpoints with admin scope enforcement and audit logs. | - -## Notifications Studio (Epic 11) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-NOTIFY-38-001 `Gateway routing` | TODO | BE-Base Platform Guild | NOTIFY-SVC-38-004, AUTH-NOTIFY-38-001 | Route notifier APIs (`/notifications/*`) and WS feed through gateway with tenant scoping, viewer/operator scope enforcement, and SSE/WebSocket bridging. | -| WEB-NOTIFY-39-001 `Digest & simulation endpoints` | TODO | BE-Base Platform Guild | WEB-NOTIFY-38-001, NOTIFY-SVC-39-001..003 | Surface digest scheduling, quiet-hour/throttle management, and simulation APIs; ensure rate limits and audit logging. | -| WEB-NOTIFY-40-001 `Escalations & localization` | TODO | BE-Base Platform Guild | WEB-NOTIFY-39-001, NOTIFY-SVC-40-001..003 | Expose escalation, localization, channel health, and ack verification endpoints with admin scope enforcement and signed token validation. | - -## Containerized Distribution (Epic 13) - -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-CONTAINERS-44-001 `Config discovery & quickstart flag` | TODO | BE-Base Platform Guild | COMPOSE-44-001 | Expose `/welcome` state, config discovery endpoint (safe values), and `QUICKSTART_MODE` handling for Console banner; add `/health/liveness`, `/health/readiness`, `/version` if missing. | -| WEB-CONTAINERS-45-001 `Helm readiness support` | TODO | BE-Base Platform Guild | HELM-45-001 | Ensure readiness endpoints reflect DB/queue readiness, add feature flag toggles via config map, and document NetworkPolicy ports. | -| WEB-CONTAINERS-46-001 `Air-gap hardening` | TODO | BE-Base Platform Guild | DEPLOY-AIRGAP-46-001 | Provide offline-friendly asset serving (no CDN), allow overriding object store endpoints via env, and document fallback behavior. | - -## Authority-Backed Scopes & Tenancy (Epic 14) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-TEN-47-001 `Auth middleware` | TODO | BE-Base Platform Guild | AUTH-TEN-47-001 | Implement JWT verification, tenant activation from headers, scope matching, and decision audit emission for all API endpoints. | -| WEB-TEN-48-001 `Tenant context propagation` | TODO | BE-Base Platform Guild | WEB-TEN-47-001 | Set DB session `stella.tenant_id`, enforce tenant/project checks on persistence, prefix object storage paths, and stamp audit metadata. | -| WEB-TEN-49-001 `ABAC & audit API` | TODO | BE-Base Platform Guild, Policy Guild | POLICY-TEN-48-001 | Integrate optional ABAC overlay with Policy Engine, expose `/audit/decisions` API, and support service token minting endpoints. | - -## Observability & Forensics (Epic 15) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-OBS-50-001 `Telemetry core adoption` | TODO | BE-Base Platform Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Integrate `StellaOps.Telemetry.Core` into gateway host, replace ad-hoc logging, ensure all routes emit trace/span IDs, tenant context, and scrubbed payload previews. | -| WEB-OBS-51-001 `Observability health endpoints` | TODO | BE-Base Platform Guild | WEB-OBS-50-001, TELEMETRY-OBS-51-001 | Implement `/obs/health` and `/obs/slo` aggregations, pulling metrics from Prometheus/collector APIs, including burn-rate signals and exemplar links for Console widgets. | -| WEB-OBS-52-001 `Trace & log proxies` | TODO | BE-Base Platform Guild | WEB-OBS-50-001, TIMELINE-OBS-52-003 | Deliver `/obs/trace/:id` and `/obs/logs` proxy endpoints with guardrails (time window limits, tenant scoping) forwarding to timeline indexer + log store with signed URLs. | -| WEB-OBS-54-001 `Evidence & attestation bridges` | TODO | BE-Base Platform Guild | EVID-OBS-54-001, PROV-OBS-54-001 | Provide `/evidence/*` and `/attestations/*` pass-through endpoints, enforce `timeline:read`, `evidence:read`, `attest:read` scopes, append provenance headers, and surface verification summaries. | -| WEB-OBS-55-001 `Incident mode controls` | TODO | BE-Base Platform Guild, Ops Guild | WEB-OBS-50-001, TELEMETRY-OBS-55-001, DEVOPS-OBS-55-001 | Add `/obs/incident-mode` API (enable/disable/status) with audit trail, sampling override, retention bump preview, and CLI/Console hooks. | -| WEB-OBS-56-001 `Sealed status surfaces` | TODO | BE-Base Platform Guild, AirGap Guild | WEB-OBS-50-001, AIRGAP-CTL-56-002 | Extend telemetry core integration to expose sealed/unsealed status APIs, drift metrics, and Console widgets without leaking sealed-mode secrets. | - -## SDKs & OpenAPI (Epic 17) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-OAS-61-001 `Discovery endpoint` | TODO | BE-Base Platform Guild | OAS-61-002 | Implement `GET /.well-known/openapi` returning gateway spec with version metadata, cache headers, and signed ETag. | -| WEB-OAS-61-002 `Standard error envelope` | TODO | BE-Base Platform Guild | APIGOV-61-001 | Migrate gateway errors to standard envelope and update examples; ensure telemetry logs include `error.code`. | -| WEB-OAS-62-001 `Pagination & idempotency alignment` | TODO | BE-Base Platform Guild | WEB-OAS-61-002 | Normalize all endpoints to cursor pagination, expose `Idempotency-Key` support, and document rate-limit headers. | -| WEB-OAS-63-001 `Deprecation support` | TODO | BE-Base Platform Guild, API Governance Guild | APIGOV-63-001 | Add deprecation header middleware, Sunset link emission, and observability metrics for deprecated routes. | - -## Risk Profiles (Epic 18) -| ID | Status | Owner(s) | Depends on | Notes | -|----|--------|----------|------------|-------| -| WEB-RISK-66-001 `Risk API routing` | TODO | BE-Base Platform Guild, Policy Guild | POLICY-RISK-67-002 | Expose risk profile/results endpoints through gateway with tenant scoping, pagination, and rate limiting. | -| WEB-RISK-66-002 `Explainability downloads` | TODO | BE-Base Platform Guild, Risk Engine Guild | RISK-ENGINE-68-002 | Add signed URL handling for explanation blobs and enforce scope checks. | -| WEB-RISK-67-001 `Risk status endpoint` | TODO | BE-Base Platform Guild | WEB-RISK-66-001 | Provide aggregated risk stats (`/risk/status`) for Console dashboards (counts per severity, last computation). | -| WEB-RISK-68-001 `Notification hooks` | TODO | BE-Base Platform Guild, Notifications Guild | NOTIFY-RISK-66-001 | Emit events on severity transitions via gateway to notifier bus with trace metadata. | +# TASKS — Epic 1: Aggregation-Only Contract +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-AOC-19-001 `Shared AOC guard primitives` | DOING (2025-10-26) | BE-Base Platform Guild | — | Provide `AOCForbiddenKeys`, guard middleware/interceptor hooks, and error types (`AOCError`, `AOCViolationCode`) for ingestion services. Publish sample usage + analyzer to ensure guard registered. | +> 2025-10-26: Introduced `StellaOps.Aoc` library with forbidden key list, guard result/options, and baseline write guard + tests. Middleware/analyzer wiring still pending. +| WEB-AOC-19-002 `Provenance & signature helpers` | TODO | BE-Base Platform Guild | WEB-AOC-19-001 | Ship `ProvenanceBuilder`, checksum utilities, and signature verification helper integrated with guard logging. Cover DSSE/CMS formats with unit tests. | +| WEB-AOC-19-003 `Analyzer + test fixtures` | TODO | QA Guild, BE-Base Platform Guild | WEB-AOC-19-001 | Author Roslyn analyzer preventing ingestion modules from writing forbidden keys without guard, and provide shared test fixtures for guard validation used by Concelier/Excititor service tests. | +> Docs alignment (2025-10-26): Analyzer expectations detailed in `docs/ingestion/aggregation-only-contract.md` §3/5; CI integration tracked via DEVOPS-AOC-19-001. + +## Policy Engine v2 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-POLICY-20-001 `Policy endpoints` | TODO | BE-Base Platform Guild, Policy Guild | POLICY-ENGINE-20-001, POLICY-ENGINE-20-004 | Implement Policy CRUD/compile/run/simulate/findings/explain endpoints with OpenAPI, tenant scoping, and service identity enforcement. | +| WEB-POLICY-20-002 `Pagination & filters` | TODO | BE-Base Platform Guild | WEB-POLICY-20-001 | Add pagination, filtering, sorting, and tenant guards to listings for policies, runs, and findings; include deterministic ordering and query diagnostics. | +| WEB-POLICY-20-003 `Error mapping` | TODO | BE-Base Platform Guild, QA Guild | WEB-POLICY-20-001 | Map engine errors to `ERR_POL_*` responses with consistent payloads and contract tests; expose correlation IDs in headers. | +| WEB-POLICY-20-004 `Simulate rate limits` | TODO | Platform Reliability Guild | WEB-POLICY-20-001, WEB-POLICY-20-002 | Introduce adaptive rate limiting + quotas for simulation endpoints, expose metrics, and document retry headers. | + +## Graph Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-GRAPH-21-001 `Graph endpoints` | BLOCKED (2025-10-27) | BE-Base Platform Guild, Graph Platform Guild | GRAPH-API-28-003, AUTH-VULN-24-001 | Add gateway routes for graph versions/viewport/node/path/diff/export endpoints with tenant enforcement, scope checks, and streaming responses; proxy Policy Engine diff toggles without inline logic. Adopt `StellaOpsScopes` constants for RBAC enforcement. | +> 2025-10-27: Graph API gateway can’t proxy until upstream Graph service (`GRAPH-API-28-003`) and Authority scope update (`AUTH-VULN-24-001`) publish stable contracts. +| WEB-GRAPH-21-002 `Request validation` | BLOCKED (2025-10-27) | BE-Base Platform Guild | WEB-GRAPH-21-001 | Implement bbox/zoom/path parameter validation, pagination tokens, and deterministic ordering; add contract tests for boundary conditions. | +> 2025-10-27: Blocked on `WEB-GRAPH-21-001`; request envelope still undefined. +| WEB-GRAPH-21-003 `Error mapping & exports` | BLOCKED (2025-10-27) | BE-Base Platform Guild, QA Guild | WEB-GRAPH-21-001 | Map graph service errors to `ERR_Graph_*`, support GraphML/JSONL export streaming, and document rate limits. | +> 2025-10-27: Depends on core Graph proxy route definitions. +| WEB-GRAPH-21-004 `Overlay pass-through` | BLOCKED (2025-10-27) | BE-Base Platform Guild, Policy Guild | WEB-GRAPH-21-001, POLICY-ENGINE-30-002 | Proxy Policy Engine overlay responses for graph endpoints while keeping gateway stateless; maintain streaming budgets and latency SLOs. | +> 2025-10-27: Requires base Graph routing plus Policy overlay schema (`POLICY-ENGINE-30-002`). + +## Graph Explorer (Sprint 28) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-GRAPH-24-001 `Gateway proxy refresh` | TODO | BE-Base Platform Guild | GRAPH-API-28-001, AUTH-GRAPH-21-001 | Gateway proxy for Graph API and Policy overlays with RBAC, caching, pagination, ETags, and streaming; zero business logic. | +| WEB-GRAPH-24-004 `Telemetry aggregation` | TODO | BE-Base Platform Guild, Observability Guild | WEB-GRAPH-24-001, DEVOPS-GRAPH-28-003 | Collect gateway metrics/logs (tile latency, proxy errors, overlay cache stats) and forward to dashboards; document sampling strategy. | + +## Link-Not-Merge v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-LNM-21-001 `Advisory observation endpoints` | TODO | BE-Base Platform Guild, Concelier WebService Guild | CONCELIER-LNM-21-201 | Surface new `/advisories/*` APIs through gateway with caching, pagination, and RBAC enforcement (`advisory:read`). | +| WEB-LNM-21-002 `VEX observation endpoints` | TODO | BE-Base Platform Guild, Excititor WebService Guild | EXCITITOR-LNM-21-201 | Expose `/vex/*` read APIs with evidence routes and export handlers; map `ERR_AGG_*` codes. | +| WEB-LNM-21-003 `Policy evidence aggregation` | TODO | BE-Base Platform Guild, Policy Guild | POLICY-ENGINE-40-001 | Provide combined endpoint for Console to fetch policy result + source evidence (advisory + VEX linksets) for a component. | + +## Policy Engine + Editor v1 (Epic 5) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-POLICY-23-001 `Policy pack CRUD` | BLOCKED (2025-10-29) | BE-Base Platform Guild, Policy Guild | POLICY-GATEWAY-18-001..002 | Implement API endpoints for creating/listing/fetching policy packs and revisions (`/policy/packs`, `/policy/packs/{id}/revisions`) with pagination, RBAC, and AOC metadata exposure. (Tracked via Sprint 18.5 gateway tasks.) | +| WEB-POLICY-23-002 `Activation & scope` | BLOCKED (2025-10-29) | BE-Base Platform Guild | POLICY-GATEWAY-18-003 | Add activation endpoint with scope windows, conflict checks, and optional 2-person approval integration; emit events on success. (Tracked via Sprint 18.5 gateway tasks.) | +| WEB-POLICY-23-003 `Simulation & evaluation` | TODO | BE-Base Platform Guild | POLICY-ENGINE-50-002 | Provide `/policy/simulate` and `/policy/evaluate` endpoints with streaming responses, rate limiting, and error mapping. | +| WEB-POLICY-23-004 `Explain retrieval` | TODO | BE-Base Platform Guild | POLICY-ENGINE-50-006 | Expose explain history endpoints (`/policy/runs`, `/policy/runs/{id}`) including decision tree, sources consulted, and AOC chain. | + +## Graph & Vuln Explorer v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-GRAPH-24-001 `Graph endpoints` | TODO | BE-Base Platform Guild, SBOM Service Guild | SBOM-GRAPH-24-002 | Implement `/graph/assets/*` endpoints (snapshots, adjacency, search) with pagination, ETags, and tenant scoping while acting as a pure proxy. | +| WEB-GRAPH-24-004 `AOC enrichers` | TODO | BE-Base Platform Guild | WEB-GRAPH-24-001 | Embed AOC summaries sourced from overlay services; ensure gateway does not compute derived severity or hints. | + +## StellaOps Console (Sprint 23) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-CONSOLE-23-001 `Global posture endpoints` | TODO | BE-Base Platform Guild, Product Analytics Guild | CONCELIER-CONSOLE-23-001, EXCITITOR-CONSOLE-23-001, POLICY-CONSOLE-23-001, SBOM-CONSOLE-23-001, SCHED-CONSOLE-23-001 | Provide consolidated `/console/dashboard` and `/console/filters` APIs returning tenant-scoped aggregates (findings by severity, VEX override counts, advisory deltas, run health, policy change log). Enforce AOC labelling, deterministic ordering, and cursor-based pagination for drill-down hints. | +| WEB-CONSOLE-23-002 `Live status & SSE proxy` | TODO | BE-Base Platform Guild, Scheduler Guild | SCHED-CONSOLE-23-001, DEVOPS-CONSOLE-23-001 | Expose `/console/status` polling endpoint and `/console/runs/{id}/stream` SSE/WebSocket proxy with heartbeat/backoff, queue lag metrics, and auth scope enforcement. Surface request IDs + retry headers. | +| WEB-CONSOLE-23-003 `Evidence export orchestrator` | TODO | BE-Base Platform Guild, Policy Guild | EXPORT-CONSOLE-23-001, POLICY-CONSOLE-23-001 | Add `/console/exports` POST/GET routes coordinating evidence bundle creation, streaming CSV/JSON exports, checksum manifest retrieval, and signed attestation references. Ensure requests honor tenant + policy scopes and expose job tracking metadata. | +| WEB-CONSOLE-23-004 `Global search router` | TODO | BE-Base Platform Guild | CONCELIER-CONSOLE-23-001, EXCITITOR-CONSOLE-23-001, SBOM-CONSOLE-23-001 | Implement `/console/search` endpoint accepting CVE/GHSA/PURL/SBOM identifiers, performing fan-out queries with caching, ranking, and deterministic tie-breaking. Return typed results for Console navigation; respect result caps and latency SLOs. | +| WEB-CONSOLE-23-005 `Downloads manifest API` | TODO | BE-Base Platform Guild, DevOps Guild | DOWNLOADS-CONSOLE-23-001, DEVOPS-CONSOLE-23-002 | Serve `/console/downloads` JSON manifest (images, charts, offline bundles) sourced from signed registry metadata; include integrity hashes, release notes links, and offline instructions. Provide caching headers and documentation. | + +## Policy Studio (Sprint 27) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-POLICY-27-001 `Policy registry proxy` | TODO | BE-Base Platform Guild, Policy Registry Guild | REGISTRY-API-27-001, AUTH-POLICY-27-001 | Surface Policy Registry APIs (`/policy/workspaces`, `/policy/versions`, `/policy/reviews`, `/policy/registry`) through gateway with tenant scoping, RBAC, and request validation; ensure streaming downloads for evidence bundles. | +| WEB-POLICY-27-002 `Review & approval routes` | TODO | BE-Base Platform Guild | WEB-POLICY-27-001, REGISTRY-API-27-006 | Implement review lifecycle endpoints (open, comment, approve/reject) with audit headers, comment pagination, and webhook fan-out. | +| WEB-POLICY-27-003 `Simulation orchestration endpoints` | TODO | BE-Base Platform Guild, Scheduler Guild | REGISTRY-API-27-005, SCHED-CONSOLE-27-001 | Expose quick/batch simulation endpoints with SSE progress (`/policy/simulations/{runId}/stream`), cursor-based result pagination, and manifest download routes. | +| WEB-POLICY-27-004 `Publish & promote controls` | TODO | BE-Base Platform Guild, Security Guild | REGISTRY-API-27-007, REGISTRY-API-27-008, AUTH-POLICY-27-002 | Add publish/sign/promote/rollback endpoints with idempotent request IDs, canary parameters, and environment bindings; enforce scope checks and emit structured events. | +| WEB-POLICY-27-005 `Policy Studio telemetry` | TODO | BE-Base Platform Guild, Observability Guild | WEB-POLICY-27-001..004, TELEMETRY-CONSOLE-27-001 | Instrument metrics/logs for compile latency, simulation queue depth, approval latency, promotion actions; expose aggregated dashboards and correlation IDs for Console. | + +## Exceptions v1 (Epic 7) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-EXC-25-001 `Exceptions CRUD & workflow` | TODO | BE-Base Platform Guild | POLICY-ENGINE-70-002, AUTH-EXC-25-001 | Implement `/exceptions` API (create, propose, approve, revoke, list, history) with validation, pagination, and audit logging. | +| WEB-EXC-25-002 `Policy integration surfaces` | TODO | BE-Base Platform Guild | POLICY-ENGINE-70-001 | Extend `/policy/effective` and `/policy/simulate` responses to include exception metadata and accept overrides for simulations. | +| WEB-EXC-25-003 `Notifications & events` | TODO | BE-Base Platform Guild, Platform Events Guild | WEB-EXC-25-001 | Publish `exception.*` events, integrate with notification hooks, enforce rate limits. | + +## Reachability v1 + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-SIG-26-001 `Signals proxy endpoints` | TODO | BE-Base Platform Guild, Signals Guild | SIGNALS-24-001 | Surface `/signals/callgraphs`, `/signals/facts` read/write endpoints with pagination, ETags, and RBAC. | +| WEB-SIG-26-002 `Reachability joins` | TODO | BE-Base Platform Guild | WEB-SIG-26-001, POLICY-ENGINE-80-001 | Extend `/policy/effective` and `/vuln/explorer` responses to include reachability scores/states and allow filtering. | +| WEB-SIG-26-003 `Simulation hooks` | TODO | BE-Base Platform Guild | WEB-SIG-26-002, POLICY-ENGINE-80-001 | Add reachability override parameters to `/policy/simulate` and related APIs for what-if analysis. | + +## Vulnerability Explorer (Sprint 29) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-VULN-29-001 `Vuln API routing` | TODO | BE-Base Platform Guild | VULN-API-29-001, AUTH-VULN-29-001 | Expose `/vuln/*` endpoints via gateway with tenant scoping, RBAC/ABAC enforcement, anti-forgery headers, and request logging. | +| WEB-VULN-29-002 `Ledger proxy headers` | TODO | BE-Base Platform Guild, Findings Ledger Guild | WEB-VULN-29-001, LEDGER-29-002 | Forward workflow actions to Findings Ledger with idempotency headers and correlation IDs; handle retries/backoff. | +| WEB-VULN-29-003 `Simulation + export routing` | TODO | BE-Base Platform Guild | VULN-API-29-005, VULN-API-29-008 | Provide simulation and export orchestration routes with SSE/progress headers, signed download links, and request budgeting. | +| WEB-VULN-29-004 `Telemetry aggregation` | TODO | BE-Base Platform Guild, Observability Guild | WEB-VULN-29-001..003, DEVOPS-VULN-29-003 | Emit gateway metrics/logs (latency, error rates, export duration), propagate query hashes for analytics dashboards. | +| WEB-VEX-30-007 `VEX consensus routing` | TODO | BE-Base Platform Guild, VEX Lens Guild | VEXLENS-30-007, AUTH-VULN-24-001 | Route `/vex/consensus` APIs with tenant RBAC/ABAC, caching, and streaming; surface telemetry and trace IDs without gateway-side overlay logic. | + +## Advisory AI (Sprint 31) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-AIAI-31-001 `API routing` | TODO | BE-Base Platform Guild | AIAI-31-006, AUTH-VULN-29-001 | Route `/advisory/ai/*` endpoints through gateway with RBAC/ABAC, rate limits, and telemetry headers. | +| WEB-AIAI-31-002 `Batch orchestration` | TODO | BE-Base Platform Guild | AIAI-31-006 | Provide batching job handlers and streaming responses for CLI automation with retry/backoff. | +| WEB-AIAI-31-003 `Telemetry & audit` | TODO | BE-Base Platform Guild, Observability Guild | WEB-AIAI-31-001, DEVOPS-AIAI-31-001 | Emit metrics/logs (latency, guardrail blocks, validation failures) and forward anonymized prompt hashes to analytics. | + +## Orchestrator Dashboard + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-ORCH-32-001 `Read-only routing` | TODO | BE-Base Platform Guild | ORCH-SVC-32-003, AUTH-ORCH-32-001 | Expose `/orchestrator/sources|runs|jobs|dag` read endpoints via gateway with tenant scoping, caching, and viewer scope enforcement. | +| WEB-ORCH-33-001 `Control + backfill actions` | TODO | BE-Base Platform Guild | WEB-ORCH-32-001, ORCH-SVC-33-001, AUTH-ORCH-33-001 | Add POST action routes (`pause|resume|test`, `retry|cancel`, `jobs/tail`, `backfill preview`) with proper error mapping and SSE bridging. | +| WEB-ORCH-34-001 `Quotas & telemetry` | TODO | BE-Base Platform Guild | WEB-ORCH-33-001, ORCH-SVC-33-003, ORCH-SVC-34-001 | Surface quotas/backfill APIs, queue/backpressure metrics, and error clustering routes with admin scope enforcement and audit logging. | + +## Export Center +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-EXPORT-35-001 `Export routing` | TODO | BE-Base Platform Guild | EXPORT-SVC-35-006, AUTH-EXPORT-35-001 | Surface Export Center APIs (profiles/runs/download) through gateway with tenant scoping, streaming support, and viewer/operator scope checks. | +| WEB-EXPORT-36-001 `Distribution endpoints` | TODO | BE-Base Platform Guild | WEB-EXPORT-35-001, EXPORT-SVC-36-004 | Add distribution routes (OCI/object storage), manifest/provenance proxies, and signed URL generation. | +| WEB-EXPORT-37-001 `Scheduling & verification` | TODO | BE-Base Platform Guild | WEB-EXPORT-36-001, EXPORT-SVC-37-003 | Expose scheduling, retention, encryption parameters, and verification endpoints with admin scope enforcement and audit logs. | + +## Notifications Studio (Epic 11) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-NOTIFY-38-001 `Gateway routing` | TODO | BE-Base Platform Guild | NOTIFY-SVC-38-004, AUTH-NOTIFY-38-001 | Route notifier APIs (`/notifications/*`) and WS feed through gateway with tenant scoping, viewer/operator scope enforcement, and SSE/WebSocket bridging. | +| WEB-NOTIFY-39-001 `Digest & simulation endpoints` | TODO | BE-Base Platform Guild | WEB-NOTIFY-38-001, NOTIFY-SVC-39-001..003 | Surface digest scheduling, quiet-hour/throttle management, and simulation APIs; ensure rate limits and audit logging. | +| WEB-NOTIFY-40-001 `Escalations & localization` | TODO | BE-Base Platform Guild | WEB-NOTIFY-39-001, NOTIFY-SVC-40-001..003 | Expose escalation, localization, channel health, and ack verification endpoints with admin scope enforcement and signed token validation. | + +## Containerized Distribution (Epic 13) + +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-CONTAINERS-44-001 `Config discovery & quickstart flag` | TODO | BE-Base Platform Guild | COMPOSE-44-001 | Expose `/welcome` state, config discovery endpoint (safe values), and `QUICKSTART_MODE` handling for Console banner; add `/health/liveness`, `/health/readiness`, `/version` if missing. | +| WEB-CONTAINERS-45-001 `Helm readiness support` | TODO | BE-Base Platform Guild | HELM-45-001 | Ensure readiness endpoints reflect DB/queue readiness, add feature flag toggles via config map, and document NetworkPolicy ports. | +| WEB-CONTAINERS-46-001 `Air-gap hardening` | TODO | BE-Base Platform Guild | DEPLOY-AIRGAP-46-001 | Provide offline-friendly asset serving (no CDN), allow overriding object store endpoints via env, and document fallback behavior. | + +## Authority-Backed Scopes & Tenancy (Epic 14) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-TEN-47-001 `Auth middleware` | TODO | BE-Base Platform Guild | AUTH-TEN-47-001 | Implement JWT verification, tenant activation from headers, scope matching, and decision audit emission for all API endpoints. | +| WEB-TEN-48-001 `Tenant context propagation` | TODO | BE-Base Platform Guild | WEB-TEN-47-001 | Set DB session `stella.tenant_id`, enforce tenant/project checks on persistence, prefix object storage paths, and stamp audit metadata. | +| WEB-TEN-49-001 `ABAC & audit API` | TODO | BE-Base Platform Guild, Policy Guild | POLICY-TEN-48-001 | Integrate optional ABAC overlay with Policy Engine, expose `/audit/decisions` API, and support service token minting endpoints. | + +## Observability & Forensics (Epic 15) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-OBS-50-001 `Telemetry core adoption` | TODO | BE-Base Platform Guild, Observability Guild | TELEMETRY-OBS-50-001, TELEMETRY-OBS-50-002 | Integrate `StellaOps.Telemetry.Core` into gateway host, replace ad-hoc logging, ensure all routes emit trace/span IDs, tenant context, and scrubbed payload previews. | +| WEB-OBS-51-001 `Observability health endpoints` | TODO | BE-Base Platform Guild | WEB-OBS-50-001, TELEMETRY-OBS-51-001 | Implement `/obs/health` and `/obs/slo` aggregations, pulling metrics from Prometheus/collector APIs, including burn-rate signals and exemplar links for Console widgets. | +| WEB-OBS-52-001 `Trace & log proxies` | TODO | BE-Base Platform Guild | WEB-OBS-50-001, TIMELINE-OBS-52-003 | Deliver `/obs/trace/:id` and `/obs/logs` proxy endpoints with guardrails (time window limits, tenant scoping) forwarding to timeline indexer + log store with signed URLs. | +| WEB-OBS-54-001 `Evidence & attestation bridges` | TODO | BE-Base Platform Guild | EVID-OBS-54-001, PROV-OBS-54-001 | Provide `/evidence/*` and `/attestations/*` pass-through endpoints, enforce `timeline:read`, `evidence:read`, `attest:read` scopes, append provenance headers, and surface verification summaries. | +| WEB-OBS-55-001 `Incident mode controls` | TODO | BE-Base Platform Guild, Ops Guild | WEB-OBS-50-001, TELEMETRY-OBS-55-001, DEVOPS-OBS-55-001 | Add `/obs/incident-mode` API (enable/disable/status) with audit trail, sampling override, retention bump preview, and CLI/Console hooks. | +| WEB-OBS-56-001 `Sealed status surfaces` | TODO | BE-Base Platform Guild, AirGap Guild | WEB-OBS-50-001, AIRGAP-CTL-56-002 | Extend telemetry core integration to expose sealed/unsealed status APIs, drift metrics, and Console widgets without leaking sealed-mode secrets. | + +## SDKs & OpenAPI (Epic 17) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-OAS-61-001 `Discovery endpoint` | TODO | BE-Base Platform Guild | OAS-61-002 | Implement `GET /.well-known/openapi` returning gateway spec with version metadata, cache headers, and signed ETag. | +| WEB-OAS-61-002 `Standard error envelope` | TODO | BE-Base Platform Guild | APIGOV-61-001 | Migrate gateway errors to standard envelope and update examples; ensure telemetry logs include `error.code`. | +| WEB-OAS-62-001 `Pagination & idempotency alignment` | TODO | BE-Base Platform Guild | WEB-OAS-61-002 | Normalize all endpoints to cursor pagination, expose `Idempotency-Key` support, and document rate-limit headers. | +| WEB-OAS-63-001 `Deprecation support` | TODO | BE-Base Platform Guild, API Governance Guild | APIGOV-63-001 | Add deprecation header middleware, Sunset link emission, and observability metrics for deprecated routes. | + +## Risk Profiles (Epic 18) +| ID | Status | Owner(s) | Depends on | Notes | +|----|--------|----------|------------|-------| +| WEB-RISK-66-001 `Risk API routing` | TODO | BE-Base Platform Guild, Policy Guild | POLICY-RISK-67-002 | Expose risk profile/results endpoints through gateway with tenant scoping, pagination, and rate limiting. | +| WEB-RISK-66-002 `Explainability downloads` | TODO | BE-Base Platform Guild, Risk Engine Guild | RISK-ENGINE-68-002 | Add signed URL handling for explanation blobs and enforce scope checks. | +| WEB-RISK-67-001 `Risk status endpoint` | TODO | BE-Base Platform Guild | WEB-RISK-66-001 | Provide aggregated risk stats (`/risk/status`) for Console dashboards (counts per severity, last computation). | +| WEB-RISK-68-001 `Notification hooks` | TODO | BE-Base Platform Guild, Notifications Guild | NOTIFY-RISK-66-001 | Emit events on severity transitions via gateway to notifier bus with trace metadata. | diff --git a/src/StellaOps.Web/angular.json b/src/Web/StellaOps.Web/angular.json similarity index 100% rename from src/StellaOps.Web/angular.json rename to src/Web/StellaOps.Web/angular.json diff --git a/src/StellaOps.Web/docs/DeterministicInstall.md b/src/Web/StellaOps.Web/docs/DeterministicInstall.md similarity index 96% rename from src/StellaOps.Web/docs/DeterministicInstall.md rename to src/Web/StellaOps.Web/docs/DeterministicInstall.md index 9e34e34d..64fa325c 100644 --- a/src/StellaOps.Web/docs/DeterministicInstall.md +++ b/src/Web/StellaOps.Web/docs/DeterministicInstall.md @@ -39,4 +39,4 @@ Archive both the npm cache and `.cache/chromium/` directory; include them in you - **Environment override** – Set `CHROME_BIN` or `STELLAOPS_CHROMIUM_BIN` to the executable path if you host Chromium in a custom location. - **Offline cache drop** – Place the extracted archive under `.cache/chromium/` (`chrome-linux64/chrome`, `chrome-win64/chrome.exe`, or `chrome-mac/Chromium.app/...`). The Karma harness resolves these automatically. -Consult `src/StellaOps.Web/README.md` for a shortened operator flow overview. +Consult `src/Web/StellaOps.Web/README.md` for a shortened operator flow overview. diff --git a/src/StellaOps.Web/docs/TrivyDbSettings.md b/src/Web/StellaOps.Web/docs/TrivyDbSettings.md similarity index 87% rename from src/StellaOps.Web/docs/TrivyDbSettings.md rename to src/Web/StellaOps.Web/docs/TrivyDbSettings.md index 19c07bbb..93eee701 100644 --- a/src/StellaOps.Web/docs/TrivyDbSettings.md +++ b/src/Web/StellaOps.Web/docs/TrivyDbSettings.md @@ -1,37 +1,37 @@ -# WEB1.TRIVY-SETTINGS – Backend Contract & UI Wiring Notes - -## 1. Known backend surfaces - -- `POST /jobs/export:trivy-db` - Payload is wrapped as `{ "trigger": "<source>", "parameters": { ... } }` and accepts the overrides shown in `TrivyDbExportJob` (`publishFull`, `publishDelta`, `includeFull`, `includeDelta`). - Evidence: `src/StellaOps.Cli/Commands/CommandHandlers.cs:263`, `src/StellaOps.Cli/Services/Models/Transport/JobTriggerRequest.cs:5`, `src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportJob.cs:27`. -- Export configuration defaults sit under `TrivyDbExportOptions.Oras` and `.OfflineBundle`. Both booleans default to `true`, so overriding to `false` must be explicit. - Evidence: `src/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOptions.cs:8`. - -## 2. Clarifications needed from Concelier backend - -| Topic | Questions to resolve | Suggested owner | -| --- | --- | --- | -| Settings endpoint surface | `Program.cs` only exposes `/jobs/*` and health endpoints—there is currently **no** `/exporters/trivy-db/settings` route. Confirm the intended path (`/api/v1/concelier/exporters/trivy-db/settings`?), verbs (`GET`/`PUT` or `PATCH`), and DTO schema (flat booleans vs nested `oras`/`offlineBundle`). | Concelier WebService | -| Auth scopes | Verify required roles (likely `concelier.export` or `concelier.admin`) and whether UI needs to request additional scopes beyond existing dashboard access. | Authority & Concelier teams | -| Concurrency control | Determine if settings payload includes an ETag or timestamp we must echo (`If-Match`) to avoid stomping concurrent edits. | Concelier WebService | -| Validation & defaults | Clarify server-side validation rules (e.g., must `publishDelta` be `false` when `publishFull` is `false`?) and shape of Problem+JSON responses. | Concelier WebService | -| Manual run trigger | Confirm whether settings update should immediately kick an export or if UI should call `POST /jobs/export:trivy-db` separately (current CLI behaviour suggests a separate call). | Concelier WebService | - -## 3. Proposed Angular implementation (pending contract lock) - -- **Feature module**: `app/concelier/trivy-db-settings/` with a standalone routed page (`TrivyDbSettingsPage`) and a reusable form component (`TrivyDbSettingsForm`). -- **State & transport**: - - Client wrapper under `core/api/concelier-exporter.client.ts` exposing `getTrivyDbSettings`, `updateTrivyDbSettings`, and `runTrivyDbExport`. - - Store built with `@ngrx/signals` keeping `settings`, `isDirty`, `lastFetchedAt`, and error state; optimistic updates gated on ETag confirmation once the backend specifies the shape. - - Shared DTOs generated from the confirmed schema to keep Concelier/CLI alignment. -- **UX flow**: - - Load settings on navigation; show inline info about current publish/bundle defaults. - - “Run export now” button opens confirmation modal summarising overrides, then calls `runTrivyDbExport` (separate API call) while reusing local state. - - Surface Problem+JSON errors via existing toast/notification pattern and capture correlation IDs for ops visibility. -- **Offline posture**: cache latest successful settings payload in IndexedDB (read-only when offline) and disable the run button when token/scopes are missing. - -## 4. Next steps - -1. Share section 2 with Concelier WebService owners to confirm the REST contract (blocking before scaffolding DTOs). -2. Once confirmed, scaffold the Angular workspace and feature shell, keeping deterministic build outputs per `src/StellaOps.Web/AGENTS.md`. +# WEB1.TRIVY-SETTINGS – Backend Contract & UI Wiring Notes + +## 1. Known backend surfaces + +- `POST /jobs/export:trivy-db` + Payload is wrapped as `{ "trigger": "<source>", "parameters": { ... } }` and accepts the overrides shown in `TrivyDbExportJob` (`publishFull`, `publishDelta`, `includeFull`, `includeDelta`). + Evidence: `src/Cli/StellaOps.Cli/Commands/CommandHandlers.cs:263`, `src/Cli/StellaOps.Cli/Services/Models/Transport/JobTriggerRequest.cs:5`, `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportJob.cs:27`. +- Export configuration defaults sit under `TrivyDbExportOptions.Oras` and `.OfflineBundle`. Both booleans default to `true`, so overriding to `false` must be explicit. + Evidence: `src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Exporter.TrivyDb/TrivyDbExportOptions.cs:8`. + +## 2. Clarifications needed from Concelier backend + +| Topic | Questions to resolve | Suggested owner | +| --- | --- | --- | +| Settings endpoint surface | `Program.cs` only exposes `/jobs/*` and health endpoints—there is currently **no** `/exporters/trivy-db/settings` route. Confirm the intended path (`/api/v1/concelier/exporters/trivy-db/settings`?), verbs (`GET`/`PUT` or `PATCH`), and DTO schema (flat booleans vs nested `oras`/`offlineBundle`). | Concelier WebService | +| Auth scopes | Verify required roles (likely `concelier.export` or `concelier.admin`) and whether UI needs to request additional scopes beyond existing dashboard access. | Authority & Concelier teams | +| Concurrency control | Determine if settings payload includes an ETag or timestamp we must echo (`If-Match`) to avoid stomping concurrent edits. | Concelier WebService | +| Validation & defaults | Clarify server-side validation rules (e.g., must `publishDelta` be `false` when `publishFull` is `false`?) and shape of Problem+JSON responses. | Concelier WebService | +| Manual run trigger | Confirm whether settings update should immediately kick an export or if UI should call `POST /jobs/export:trivy-db` separately (current CLI behaviour suggests a separate call). | Concelier WebService | + +## 3. Proposed Angular implementation (pending contract lock) + +- **Feature module**: `app/concelier/trivy-db-settings/` with a standalone routed page (`TrivyDbSettingsPage`) and a reusable form component (`TrivyDbSettingsForm`). +- **State & transport**: + - Client wrapper under `core/api/concelier-exporter.client.ts` exposing `getTrivyDbSettings`, `updateTrivyDbSettings`, and `runTrivyDbExport`. + - Store built with `@ngrx/signals` keeping `settings`, `isDirty`, `lastFetchedAt`, and error state; optimistic updates gated on ETag confirmation once the backend specifies the shape. + - Shared DTOs generated from the confirmed schema to keep Concelier/CLI alignment. +- **UX flow**: + - Load settings on navigation; show inline info about current publish/bundle defaults. + - “Run export now” button opens confirmation modal summarising overrides, then calls `runTrivyDbExport` (separate API call) while reusing local state. + - Surface Problem+JSON errors via existing toast/notification pattern and capture correlation IDs for ops visibility. +- **Offline posture**: cache latest successful settings payload in IndexedDB (read-only when offline) and disable the run button when token/scopes are missing. + +## 4. Next steps + +1. Share section 2 with Concelier WebService owners to confirm the REST contract (blocking before scaffolding DTOs). +2. Once confirmed, scaffold the Angular workspace and feature shell, keeping deterministic build outputs per `src/Web/StellaOps.Web/AGENTS.md`. diff --git a/src/StellaOps.Web/karma.conf.cjs b/src/Web/StellaOps.Web/karma.conf.cjs similarity index 96% rename from src/StellaOps.Web/karma.conf.cjs rename to src/Web/StellaOps.Web/karma.conf.cjs index f504ec26..292a4b6d 100644 --- a/src/StellaOps.Web/karma.conf.cjs +++ b/src/Web/StellaOps.Web/karma.conf.cjs @@ -1,63 +1,63 @@ -const { join } = require('path'); -const { resolveChromeBinary } = require('./scripts/chrome-path'); - -const { env } = process; - -const chromeBin = resolveChromeBinary(__dirname); - -if (chromeBin) { - env.CHROME_BIN = chromeBin; -} else if (!env.CHROME_BIN) { - console.warn( - '[karma] Unable to locate a Chromium binary automatically. ' + - 'Set CHROME_BIN or STELLAOPS_CHROMIUM_BIN, or place an offline build under .cache/chromium/. ' + - 'See docs/DeterministicInstall.md for bootstrap instructions.' - ); -} - -const isCI = env.CI === 'true' || env.CI === '1'; - -module.exports = function (config) { - config.set({ - basePath: '', - frameworks: ['jasmine', '@angular-devkit/build-angular'], - plugins: [ - require('karma-jasmine'), - require('karma-chrome-launcher'), - require('karma-jasmine-html-reporter'), - require('karma-coverage'), - require('@angular-devkit/build-angular/plugins/karma') - ], - client: { - clearContext: false - }, - jasmineHtmlReporter: { - suppressAll: true - }, - coverageReporter: { - dir: join(__dirname, './coverage/stellaops-web'), - subdir: '.', - reporters: [ - { type: 'html' }, - { type: 'text-summary' } - ] - }, - reporters: ['progress', 'kjhtml'], - port: 9876, - colors: true, - logLevel: config.LOG_INFO, - browsers: ['ChromeHeadlessOffline'], - customLaunchers: { - ChromeHeadlessOffline: { - base: 'ChromeHeadless', - flags: [ - '--no-sandbox', - '--disable-gpu', - '--disable-dev-shm-usage', - '--disable-setuid-sandbox' - ] - } - }, - restartOnFileChange: false - }); -}; +const { join } = require('path'); +const { resolveChromeBinary } = require('./scripts/chrome-path'); + +const { env } = process; + +const chromeBin = resolveChromeBinary(__dirname); + +if (chromeBin) { + env.CHROME_BIN = chromeBin; +} else if (!env.CHROME_BIN) { + console.warn( + '[karma] Unable to locate a Chromium binary automatically. ' + + 'Set CHROME_BIN or STELLAOPS_CHROMIUM_BIN, or place an offline build under .cache/chromium/. ' + + 'See docs/DeterministicInstall.md for bootstrap instructions.' + ); +} + +const isCI = env.CI === 'true' || env.CI === '1'; + +module.exports = function (config) { + config.set({ + basePath: '', + frameworks: ['jasmine', '@angular-devkit/build-angular'], + plugins: [ + require('karma-jasmine'), + require('karma-chrome-launcher'), + require('karma-jasmine-html-reporter'), + require('karma-coverage'), + require('@angular-devkit/build-angular/plugins/karma') + ], + client: { + clearContext: false + }, + jasmineHtmlReporter: { + suppressAll: true + }, + coverageReporter: { + dir: join(__dirname, './coverage/stellaops-web'), + subdir: '.', + reporters: [ + { type: 'html' }, + { type: 'text-summary' } + ] + }, + reporters: ['progress', 'kjhtml'], + port: 9876, + colors: true, + logLevel: config.LOG_INFO, + browsers: ['ChromeHeadlessOffline'], + customLaunchers: { + ChromeHeadlessOffline: { + base: 'ChromeHeadless', + flags: [ + '--no-sandbox', + '--disable-gpu', + '--disable-dev-shm-usage', + '--disable-setuid-sandbox' + ] + } + }, + restartOnFileChange: false + }); +}; diff --git a/src/StellaOps.Web/package-lock.json b/src/Web/StellaOps.Web/package-lock.json similarity index 96% rename from src/StellaOps.Web/package-lock.json rename to src/Web/StellaOps.Web/package-lock.json index 5b1caba7..8862d092 100644 --- a/src/StellaOps.Web/package-lock.json +++ b/src/Web/StellaOps.Web/package-lock.json @@ -1,13696 +1,13696 @@ -{ - "name": "stellaops-web", - "version": "0.0.0", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "stellaops-web", - "version": "0.0.0", - "dependencies": { - "@angular/animations": "^17.3.0", - "@angular/common": "^17.3.0", - "@angular/compiler": "^17.3.0", - "@angular/core": "^17.3.0", - "@angular/forms": "^17.3.0", - "@angular/platform-browser": "^17.3.0", - "@angular/platform-browser-dynamic": "^17.3.0", - "@angular/router": "^17.3.0", - "rxjs": "~7.8.0", - "tslib": "^2.3.0", - "zone.js": "~0.14.3" - }, - "devDependencies": { - "@angular-devkit/build-angular": "^17.3.17", - "@angular/cli": "^17.3.17", - "@angular/compiler-cli": "^17.3.0", - "@playwright/test": "^1.47.2", - "@types/jasmine": "~5.1.0", - "jasmine-core": "~5.1.0", - "karma": "~6.4.0", - "karma-chrome-launcher": "~3.2.0", - "karma-coverage": "~2.2.0", - "karma-jasmine": "~5.1.0", - "karma-jasmine-html-reporter": "~2.1.0", - "typescript": "~5.4.2" - }, - "engines": { - "node": ">=20.11.0", - "npm": ">=10.2.0" - } - }, - "node_modules/@ampproject/remapping": { - "version": "2.3.0", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.24" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@angular-devkit/architect": { - "version": "0.1703.17", - "resolved": "https://registry.npmjs.org/@angular-devkit/architect/-/architect-0.1703.17.tgz", - "integrity": "sha512-LD6po8lGP2FI7WbnsSxtvpiIi+FYL0aNfteunkT+7po9jUNflBEYHA64UWNO56u7ryKNdbuiN8/TEh7FEUnmCw==", - "dev": true, - "dependencies": { - "@angular-devkit/core": "17.3.17", - "rxjs": "7.8.1" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - } - }, - "node_modules/@angular-devkit/architect/node_modules/rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", - "dev": true, - "dependencies": { - "tslib": "^2.1.0" - } - }, - "node_modules/@angular-devkit/build-angular": { - "version": "17.3.17", - "resolved": "https://registry.npmjs.org/@angular-devkit/build-angular/-/build-angular-17.3.17.tgz", - "integrity": "sha512-0kLVwjLZ5v4uIaG0K6sHJxxppS0bvjNmxHkbybU8FBW3r5MOBQh/ApsiCQKQQ8GBrQz9qSJvLJH8lsb/uR8aPQ==", - "dev": true, - "dependencies": { - "@ampproject/remapping": "2.3.0", - "@angular-devkit/architect": "0.1703.17", - "@angular-devkit/build-webpack": "0.1703.17", - "@angular-devkit/core": "17.3.17", - "@babel/core": "7.26.10", - "@babel/generator": "7.26.10", - "@babel/helper-annotate-as-pure": "7.25.9", - "@babel/helper-split-export-declaration": "7.24.7", - "@babel/plugin-transform-async-generator-functions": "7.26.8", - "@babel/plugin-transform-async-to-generator": "7.25.9", - "@babel/plugin-transform-runtime": "7.26.10", - "@babel/preset-env": "7.26.9", - "@babel/runtime": "7.26.10", - "@discoveryjs/json-ext": "0.5.7", - "@ngtools/webpack": "17.3.17", - "@vitejs/plugin-basic-ssl": "1.1.0", - "ansi-colors": "4.1.3", - "autoprefixer": "10.4.18", - "babel-loader": "9.1.3", - "babel-plugin-istanbul": "6.1.1", - "browserslist": "^4.21.5", - "copy-webpack-plugin": "11.0.0", - "critters": "0.0.22", - "css-loader": "6.10.0", - "esbuild-wasm": "0.20.1", - "fast-glob": "3.3.2", - "http-proxy-middleware": "2.0.8", - "https-proxy-agent": "7.0.4", - "inquirer": "9.2.15", - "jsonc-parser": "3.2.1", - "karma-source-map-support": "1.4.0", - "less": "4.2.0", - "less-loader": "11.1.0", - "license-webpack-plugin": "4.0.2", - "loader-utils": "3.2.1", - "magic-string": "0.30.8", - "mini-css-extract-plugin": "2.8.1", - "mrmime": "2.0.0", - "open": "8.4.2", - "ora": "5.4.1", - "parse5-html-rewriting-stream": "7.0.0", - "picomatch": "4.0.1", - "piscina": "4.4.0", - "postcss": "8.4.35", - "postcss-loader": "8.1.1", - "resolve-url-loader": "5.0.0", - "rxjs": "7.8.1", - "sass": "1.71.1", - "sass-loader": "14.1.1", - "semver": "7.6.0", - "source-map-loader": "5.0.0", - "source-map-support": "0.5.21", - "terser": "5.29.1", - "tree-kill": "1.2.2", - "tslib": "2.6.2", - "vite": "~5.4.17", - "watchpack": "2.4.0", - "webpack": "5.94.0", - "webpack-dev-middleware": "6.1.2", - "webpack-dev-server": "4.15.1", - "webpack-merge": "5.10.0", - "webpack-subresource-integrity": "5.1.0" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - }, - "optionalDependencies": { - "esbuild": "0.20.1" - }, - "peerDependencies": { - "@angular/compiler-cli": "^17.0.0", - "@angular/localize": "^17.0.0", - "@angular/platform-server": "^17.0.0", - "@angular/service-worker": "^17.0.0", - "@web/test-runner": "^0.18.0", - "browser-sync": "^3.0.2", - "jest": "^29.5.0", - "jest-environment-jsdom": "^29.5.0", - "karma": "^6.3.0", - "ng-packagr": "^17.0.0", - "protractor": "^7.0.0", - "tailwindcss": "^2.0.0 || ^3.0.0", - "typescript": ">=5.2 <5.5" - }, - "peerDependenciesMeta": { - "@angular/localize": { - "optional": true - }, - "@angular/platform-server": { - "optional": true - }, - "@angular/service-worker": { - "optional": true - }, - "@web/test-runner": { - "optional": true - }, - "browser-sync": { - "optional": true - }, - "jest": { - "optional": true - }, - "jest-environment-jsdom": { - "optional": true - }, - "karma": { - "optional": true - }, - "ng-packagr": { - "optional": true - }, - "protractor": { - "optional": true - }, - "tailwindcss": { - "optional": true - } - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/aix-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", - "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/android-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", - "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/android-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", - "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/android-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", - "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/darwin-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", - "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/darwin-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", - "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/freebsd-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", - "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/freebsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", - "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-arm": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", - "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", - "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", - "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-loong64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", - "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", - "cpu": [ - "loong64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-mips64el": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", - "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", - "cpu": [ - "mips64el" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-ppc64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", - "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-riscv64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", - "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", - "cpu": [ - "riscv64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-s390x": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", - "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", - "cpu": [ - "s390x" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", - "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/netbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", - "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/openbsd-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", - "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/sunos-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", - "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/win32-arm64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", - "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/win32-ia32": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", - "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/win32-x64": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", - "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-android-arm-eabi": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.5.tgz", - "integrity": "sha512-8c1vW4ocv3UOMp9K+gToY5zL2XiiVw3k7f1ksf4yO1FlDFQ1C2u72iACFnSOceJFsWskc2WZNqeRhFRPzv+wtQ==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-android-arm64": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.5.tgz", - "integrity": "sha512-mQGfsIEFcu21mvqkEKKu2dYmtuSZOBMmAl5CFlPGLY94Vlcm+zWApK7F/eocsNzp8tKmbeBP8yXyAbx0XHsFNA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-darwin-arm64": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.52.5.tgz", - "integrity": "sha512-takF3CR71mCAGA+v794QUZ0b6ZSrgJkArC+gUiG6LB6TQty9T0Mqh3m2ImRBOxS2IeYBo4lKWIieSvnEk2OQWA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-darwin-x64": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.5.tgz", - "integrity": "sha512-W901Pla8Ya95WpxDn//VF9K9u2JbocwV/v75TE0YIHNTbhqUTv9w4VuQ9MaWlNOkkEfFwkdNhXgcLqPSmHy0fA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-freebsd-arm64": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.5.tgz", - "integrity": "sha512-QofO7i7JycsYOWxe0GFqhLmF6l1TqBswJMvICnRUjqCx8b47MTo46W8AoeQwiokAx3zVryVnxtBMcGcnX12LvA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-freebsd-x64": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.5.tgz", - "integrity": "sha512-jr21b/99ew8ujZubPo9skbrItHEIE50WdV86cdSoRkKtmWa+DDr6fu2c/xyRT0F/WazZpam6kk7IHBerSL7LDQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-arm-gnueabihf": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.5.tgz", - "integrity": "sha512-PsNAbcyv9CcecAUagQefwX8fQn9LQ4nZkpDboBOttmyffnInRy8R8dSg6hxxl2Re5QhHBf6FYIDhIj5v982ATQ==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-arm-musleabihf": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.5.tgz", - "integrity": "sha512-Fw4tysRutyQc/wwkmcyoqFtJhh0u31K+Q6jYjeicsGJJ7bbEq8LwPWV/w0cnzOqR2m694/Af6hpFayLJZkG2VQ==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-arm64-gnu": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.5.tgz", - "integrity": "sha512-a+3wVnAYdQClOTlyapKmyI6BLPAFYs0JM8HRpgYZQO02rMR09ZcV9LbQB+NL6sljzG38869YqThrRnfPMCDtZg==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-arm64-musl": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.5.tgz", - "integrity": "sha512-AvttBOMwO9Pcuuf7m9PkC1PUIKsfaAJ4AYhy944qeTJgQOqJYJ9oVl2nYgY7Rk0mkbsuOpCAYSs6wLYB2Xiw0Q==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-loong64-gnu": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.5.tgz", - "integrity": "sha512-DkDk8pmXQV2wVrF6oq5tONK6UHLz/XcEVow4JTTerdeV1uqPeHxwcg7aFsfnSm9L+OO8WJsWotKM2JJPMWrQtA==", - "cpu": [ - "loong64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-ppc64-gnu": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.5.tgz", - "integrity": "sha512-W/b9ZN/U9+hPQVvlGwjzi+Wy4xdoH2I8EjaCkMvzpI7wJUs8sWJ03Rq96jRnHkSrcHTpQe8h5Tg3ZzUPGauvAw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-riscv64-gnu": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.5.tgz", - "integrity": "sha512-sjQLr9BW7R/ZiXnQiWPkErNfLMkkWIoCz7YMn27HldKsADEKa5WYdobaa1hmN6slu9oWQbB6/jFpJ+P2IkVrmw==", - "cpu": [ - "riscv64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-riscv64-musl": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.5.tgz", - "integrity": "sha512-hq3jU/kGyjXWTvAh2awn8oHroCbrPm8JqM7RUpKjalIRWWXE01CQOf/tUNWNHjmbMHg/hmNCwc/Pz3k1T/j/Lg==", - "cpu": [ - "riscv64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-s390x-gnu": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.5.tgz", - "integrity": "sha512-gn8kHOrku8D4NGHMK1Y7NA7INQTRdVOntt1OCYypZPRt6skGbddska44K8iocdpxHTMMNui5oH4elPH4QOLrFQ==", - "cpu": [ - "s390x" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-openharmony-arm64": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.5.tgz", - "integrity": "sha512-QoFqB6+/9Rly/RiPjaomPLmR/13cgkIGfA40LHly9zcH1S0bN2HVFYk3a1eAyHQyjs3ZJYlXvIGtcCs5tko9Cw==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "openharmony" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-win32-arm64-msvc": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.5.tgz", - "integrity": "sha512-w0cDWVR6MlTstla1cIfOGyl8+qb93FlAVutcor14Gf5Md5ap5ySfQ7R9S/NjNaMLSFdUnKGEasmVnu3lCMqB7w==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-win32-ia32-msvc": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.5.tgz", - "integrity": "sha512-Aufdpzp7DpOTULJCuvzqcItSGDH73pF3ko/f+ckJhxQyHtp67rHw3HMNxoIdDMUITJESNE6a8uh4Lo4SLouOUg==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-win32-x64-gnu": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.5.tgz", - "integrity": "sha512-UGBUGPFp1vkj6p8wCRraqNhqwX/4kNQPS57BCFc8wYh0g94iVIW33wJtQAx3G7vrjjNtRaxiMUylM0ktp/TRSQ==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-win32-x64-msvc": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.5.tgz", - "integrity": "sha512-TAcgQh2sSkykPRWLrdyy2AiceMckNf5loITqXxFI5VuQjS5tSuw3WlwdN8qv8vzjLAUTvYaH/mVjSFpbkFbpTg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ] - }, - "node_modules/@angular-devkit/build-angular/node_modules/@vitejs/plugin-basic-ssl": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@vitejs/plugin-basic-ssl/-/plugin-basic-ssl-1.1.0.tgz", - "integrity": "sha512-wO4Dk/rm8u7RNhOf95ZzcEmC9rYOncYgvq4z3duaJrCgjN8BxAnDVyndanfcJZ0O6XZzHz6Q0hTimxTg8Y9g/A==", - "dev": true, - "engines": { - "node": ">=14.6.0" - }, - "peerDependencies": { - "vite": "^3.0.0 || ^4.0.0 || ^5.0.0" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/nanoid": { - "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/rollup": { - "version": "4.52.5", - "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.52.5.tgz", - "integrity": "sha512-3GuObel8h7Kqdjt0gxkEzaifHTqLVW56Y/bjN7PSQtkKr0w3V/QYSdt6QWYtd7A1xUtYQigtdUfgj1RvWVtorw==", - "dev": true, - "dependencies": { - "@types/estree": "1.0.8" - }, - "bin": { - "rollup": "dist/bin/rollup" - }, - "engines": { - "node": ">=18.0.0", - "npm": ">=8.0.0" - }, - "optionalDependencies": { - "@rollup/rollup-android-arm-eabi": "4.52.5", - "@rollup/rollup-android-arm64": "4.52.5", - "@rollup/rollup-darwin-arm64": "4.52.5", - "@rollup/rollup-darwin-x64": "4.52.5", - "@rollup/rollup-freebsd-arm64": "4.52.5", - "@rollup/rollup-freebsd-x64": "4.52.5", - "@rollup/rollup-linux-arm-gnueabihf": "4.52.5", - "@rollup/rollup-linux-arm-musleabihf": "4.52.5", - "@rollup/rollup-linux-arm64-gnu": "4.52.5", - "@rollup/rollup-linux-arm64-musl": "4.52.5", - "@rollup/rollup-linux-loong64-gnu": "4.52.5", - "@rollup/rollup-linux-ppc64-gnu": "4.52.5", - "@rollup/rollup-linux-riscv64-gnu": "4.52.5", - "@rollup/rollup-linux-riscv64-musl": "4.52.5", - "@rollup/rollup-linux-s390x-gnu": "4.52.5", - "@rollup/rollup-linux-x64-gnu": "4.52.5", - "@rollup/rollup-linux-x64-musl": "4.52.5", - "@rollup/rollup-openharmony-arm64": "4.52.5", - "@rollup/rollup-win32-arm64-msvc": "4.52.5", - "@rollup/rollup-win32-ia32-msvc": "4.52.5", - "@rollup/rollup-win32-x64-gnu": "4.52.5", - "@rollup/rollup-win32-x64-msvc": "4.52.5", - "fsevents": "~2.3.2" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", - "dev": true, - "dependencies": { - "tslib": "^2.1.0" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/tslib": { - "version": "2.6.2", - "dev": true, - "license": "0BSD" - }, - "node_modules/@angular-devkit/build-angular/node_modules/vite": { - "version": "5.4.21", - "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", - "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", - "dev": true, - "peer": true, - "dependencies": { - "esbuild": "^0.21.3", - "postcss": "^8.4.43", - "rollup": "^4.20.0" - }, - "bin": { - "vite": "bin/vite.js" - }, - "engines": { - "node": "^18.0.0 || >=20.0.0" - }, - "funding": { - "url": "https://github.com/vitejs/vite?sponsor=1" - }, - "optionalDependencies": { - "fsevents": "~2.3.3" - }, - "peerDependencies": { - "@types/node": "^18.0.0 || >=20.0.0", - "less": "*", - "lightningcss": "^1.21.0", - "sass": "*", - "sass-embedded": "*", - "stylus": "*", - "sugarss": "*", - "terser": "^5.4.0" - }, - "peerDependenciesMeta": { - "@types/node": { - "optional": true - }, - "less": { - "optional": true - }, - "lightningcss": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "stylus": { - "optional": true - }, - "sugarss": { - "optional": true - }, - "terser": { - "optional": true - } - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/vite/node_modules/esbuild": { - "version": "0.21.5", - "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", - "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", - "dev": true, - "hasInstallScript": true, - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=12" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.21.5", - "@esbuild/android-arm": "0.21.5", - "@esbuild/android-arm64": "0.21.5", - "@esbuild/android-x64": "0.21.5", - "@esbuild/darwin-arm64": "0.21.5", - "@esbuild/darwin-x64": "0.21.5", - "@esbuild/freebsd-arm64": "0.21.5", - "@esbuild/freebsd-x64": "0.21.5", - "@esbuild/linux-arm": "0.21.5", - "@esbuild/linux-arm64": "0.21.5", - "@esbuild/linux-ia32": "0.21.5", - "@esbuild/linux-loong64": "0.21.5", - "@esbuild/linux-mips64el": "0.21.5", - "@esbuild/linux-ppc64": "0.21.5", - "@esbuild/linux-riscv64": "0.21.5", - "@esbuild/linux-s390x": "0.21.5", - "@esbuild/linux-x64": "0.21.5", - "@esbuild/netbsd-x64": "0.21.5", - "@esbuild/openbsd-x64": "0.21.5", - "@esbuild/sunos-x64": "0.21.5", - "@esbuild/win32-arm64": "0.21.5", - "@esbuild/win32-ia32": "0.21.5", - "@esbuild/win32-x64": "0.21.5" - } - }, - "node_modules/@angular-devkit/build-angular/node_modules/vite/node_modules/postcss": { - "version": "8.5.6", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", - "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "dependencies": { - "nanoid": "^3.3.11", - "picocolors": "^1.1.1", - "source-map-js": "^1.2.1" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/@angular-devkit/build-webpack": { - "version": "0.1703.17", - "dev": true, - "license": "MIT", - "dependencies": { - "@angular-devkit/architect": "0.1703.17", - "rxjs": "7.8.1" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - }, - "peerDependencies": { - "webpack": "^5.30.0", - "webpack-dev-server": "^4.0.0" - } - }, - "node_modules/@angular-devkit/build-webpack/node_modules/rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", - "dev": true, - "dependencies": { - "tslib": "^2.1.0" - } - }, - "node_modules/@angular-devkit/core": { - "version": "17.3.17", - "resolved": "https://registry.npmjs.org/@angular-devkit/core/-/core-17.3.17.tgz", - "integrity": "sha512-7aNVqS3rOGsSZYAOO44xl2KURwaoOP+EJhJs+LqOGOFpok2kd8YLf4CAMUossMF4H7HsJpgKwYqGrV5eXunrpw==", - "dev": true, - "dependencies": { - "ajv": "8.12.0", - "ajv-formats": "2.1.1", - "jsonc-parser": "3.2.1", - "picomatch": "4.0.1", - "rxjs": "7.8.1", - "source-map": "0.7.4" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - }, - "peerDependencies": { - "chokidar": "^3.5.2" - }, - "peerDependenciesMeta": { - "chokidar": { - "optional": true - } - } - }, - "node_modules/@angular-devkit/core/node_modules/ajv": { - "version": "8.12.0", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", - "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", - "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/@angular-devkit/core/node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/@angular-devkit/core/node_modules/rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", - "dev": true, - "dependencies": { - "tslib": "^2.1.0" - } - }, - "node_modules/@angular-devkit/core/node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/@angular-devkit/schematics": { - "version": "17.3.17", - "resolved": "https://registry.npmjs.org/@angular-devkit/schematics/-/schematics-17.3.17.tgz", - "integrity": "sha512-ZXsIJXZm0I0dNu1BqmjfEtQhnzqoupUHHZb4GHm5NeQHBFZctQlkkNxLUU27GVeBUwFgEmP7kFgSLlMPTGSL5g==", - "dev": true, - "dependencies": { - "@angular-devkit/core": "17.3.17", - "jsonc-parser": "3.2.1", - "magic-string": "0.30.8", - "ora": "5.4.1", - "rxjs": "7.8.1" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - } - }, - "node_modules/@angular-devkit/schematics/node_modules/rxjs": { - "version": "7.8.1", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", - "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", - "dev": true, - "dependencies": { - "tslib": "^2.1.0" - } - }, - "node_modules/@angular/animations": { - "version": "17.3.12", - "license": "MIT", - "peer": true, - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0" - }, - "peerDependencies": { - "@angular/core": "17.3.12" - } - }, - "node_modules/@angular/cli": { - "version": "17.3.17", - "resolved": "https://registry.npmjs.org/@angular/cli/-/cli-17.3.17.tgz", - "integrity": "sha512-FgOvf9q5d23Cpa7cjP1FYti/v8S1FTm8DEkW3TY8lkkoxh3isu28GFKcLD1p/XF3yqfPkPVHToOFla5QwsEgBQ==", - "dev": true, - "dependencies": { - "@angular-devkit/architect": "0.1703.17", - "@angular-devkit/core": "17.3.17", - "@angular-devkit/schematics": "17.3.17", - "@schematics/angular": "17.3.17", - "@yarnpkg/lockfile": "1.1.0", - "ansi-colors": "4.1.3", - "ini": "4.1.2", - "inquirer": "9.2.15", - "jsonc-parser": "3.2.1", - "npm-package-arg": "11.0.1", - "npm-pick-manifest": "9.0.0", - "open": "8.4.2", - "ora": "5.4.1", - "pacote": "17.0.6", - "resolve": "1.22.8", - "semver": "7.6.0", - "symbol-observable": "4.0.0", - "yargs": "17.7.2" - }, - "bin": { - "ng": "bin/ng.js" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - } - }, - "node_modules/@angular/common": { - "version": "17.3.12", - "resolved": "https://registry.npmjs.org/@angular/common/-/common-17.3.12.tgz", - "integrity": "sha512-vabJzvrx76XXFrm1RJZ6o/CyG32piTB/1sfFfKHdlH1QrmArb8It4gyk9oEjZ1IkAD0HvBWlfWmn+T6Vx3pdUw==", - "peer": true, - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0" - }, - "peerDependencies": { - "@angular/core": "17.3.12", - "rxjs": "^6.5.3 || ^7.4.0" - } - }, - "node_modules/@angular/compiler": { - "version": "17.3.12", - "license": "MIT", - "peer": true, - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0" - }, - "peerDependencies": { - "@angular/core": "17.3.12" - }, - "peerDependenciesMeta": { - "@angular/core": { - "optional": true - } - } - }, - "node_modules/@angular/compiler-cli": { - "version": "17.3.12", - "resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-17.3.12.tgz", - "integrity": "sha512-1F8M7nWfChzurb7obbvuE7mJXlHtY1UG58pcwcomVtpPb+kPavgAO8OEvJHYBMV+bzSxkXt5UIwL9lt9jHUxZA==", - "dev": true, - "peer": true, - "dependencies": { - "@babel/core": "7.23.9", - "@jridgewell/sourcemap-codec": "^1.4.14", - "chokidar": "^3.0.0", - "convert-source-map": "^1.5.1", - "reflect-metadata": "^0.2.0", - "semver": "^7.0.0", - "tslib": "^2.3.0", - "yargs": "^17.2.1" - }, - "bin": { - "ng-xi18n": "bundles/src/bin/ng_xi18n.js", - "ngc": "bundles/src/bin/ngc.js", - "ngcc": "bundles/ngcc/index.js" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0" - }, - "peerDependencies": { - "@angular/compiler": "17.3.12", - "typescript": ">=5.2 <5.5" - } - }, - "node_modules/@angular/compiler-cli/node_modules/@babel/core": { - "version": "7.23.9", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.9.tgz", - "integrity": "sha512-5q0175NOjddqpvvzU+kDiSOAk4PfdO6FvwCWoQ6RO7rTzEe8vlo+4HVfcnAREhD4npMs0e9uZypjTwzZPCf/cw==", - "dev": true, - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.23.5", - "@babel/generator": "^7.23.6", - "@babel/helper-compilation-targets": "^7.23.6", - "@babel/helper-module-transforms": "^7.23.3", - "@babel/helpers": "^7.23.9", - "@babel/parser": "^7.23.9", - "@babel/template": "^7.23.9", - "@babel/traverse": "^7.23.9", - "@babel/types": "^7.23.9", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@angular/compiler-cli/node_modules/@babel/core/node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true - }, - "node_modules/@angular/compiler-cli/node_modules/@babel/core/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@angular/compiler-cli/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@angular/compiler-cli/node_modules/@babel/helpers": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", - "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", - "dev": true, - "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@angular/compiler-cli/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@angular/compiler-cli/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@angular/compiler-cli/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@angular/core": { - "version": "17.3.12", - "resolved": "https://registry.npmjs.org/@angular/core/-/core-17.3.12.tgz", - "integrity": "sha512-MuFt5yKi161JmauUta4Dh0m8ofwoq6Ino+KoOtkYMBGsSx+A7dSm+DUxxNwdj7+DNyg3LjVGCFgBFnq4g8z06A==", - "peer": true, - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0" - }, - "peerDependencies": { - "rxjs": "^6.5.3 || ^7.4.0", - "zone.js": "~0.14.0" - } - }, - "node_modules/@angular/forms": { - "version": "17.3.12", - "license": "MIT", - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0" - }, - "peerDependencies": { - "@angular/common": "17.3.12", - "@angular/core": "17.3.12", - "@angular/platform-browser": "17.3.12", - "rxjs": "^6.5.3 || ^7.4.0" - } - }, - "node_modules/@angular/platform-browser": { - "version": "17.3.12", - "license": "MIT", - "peer": true, - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0" - }, - "peerDependencies": { - "@angular/animations": "17.3.12", - "@angular/common": "17.3.12", - "@angular/core": "17.3.12" - }, - "peerDependenciesMeta": { - "@angular/animations": { - "optional": true - } - } - }, - "node_modules/@angular/platform-browser-dynamic": { - "version": "17.3.12", - "license": "MIT", - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0" - }, - "peerDependencies": { - "@angular/common": "17.3.12", - "@angular/compiler": "17.3.12", - "@angular/core": "17.3.12", - "@angular/platform-browser": "17.3.12" - } - }, - "node_modules/@angular/router": { - "version": "17.3.12", - "license": "MIT", - "dependencies": { - "tslib": "^2.3.0" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0" - }, - "peerDependencies": { - "@angular/common": "17.3.12", - "@angular/core": "17.3.12", - "@angular/platform-browser": "17.3.12", - "rxjs": "^6.5.3 || ^7.4.0" - } - }, - "node_modules/@babel/code-frame": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-validator-identifier": "^7.27.1", - "js-tokens": "^4.0.0", - "picocolors": "^1.1.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/compat-data": { - "version": "7.28.4", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.10.tgz", - "integrity": "sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ==", - "dev": true, - "peer": true, - "dependencies": { - "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.26.2", - "@babel/generator": "^7.26.10", - "@babel/helper-compilation-targets": "^7.26.5", - "@babel/helper-module-transforms": "^7.26.0", - "@babel/helpers": "^7.26.10", - "@babel/parser": "^7.26.10", - "@babel/template": "^7.26.9", - "@babel/traverse": "^7.26.10", - "@babel/types": "^7.26.10", - "convert-source-map": "^2.0.0", - "debug": "^4.1.0", - "gensync": "^1.0.0-beta.2", - "json5": "^2.2.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/babel" - } - }, - "node_modules/@babel/core/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core/node_modules/@babel/helpers": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", - "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", - "dev": true, - "dependencies": { - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/core/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/core/node_modules/convert-source-map": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", - "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", - "dev": true - }, - "node_modules/@babel/core/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/generator": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.10.tgz", - "integrity": "sha512-rRHT8siFIXQrAYOYqZQVsAr8vJ+cBNqcVAY6m5V8/4QqzaPl+zDBe6cLEPRDuNOUf3ww8RfJVlOyQMoSI+5Ang==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.26.10", - "@babel/types": "^7.26.10", - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/generator/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/generator/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-annotate-as-pure": { - "version": "7.25.9", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-annotate-as-pure/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets": { - "version": "7.27.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.27.2", - "@babel/helper-validator-option": "^7.27.1", - "browserslist": "^4.24.0", - "lru-cache": "^5.1.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-compilation-targets/node_modules/semver": { - "version": "6.3.1", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "regexpu-core": "^6.2.0", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin/node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-create-regexp-features-plugin/node_modules/semver": { - "version": "6.3.1", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/helper-globals": { - "version": "7.28.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-member-expression-to-functions/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/helper-module-imports/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-imports/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms": { - "version": "7.28.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.28.3" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-module-transforms/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/helper-module-transforms/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-module-transforms/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-optimise-call-expression": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-optimise-call-expression/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-plugin-utils": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-wrap-function": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-replace-supers": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-member-expression-to-functions": "^7.27.1", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/helper-replace-supers/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-replace-supers/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/helper-replace-supers/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-replace-supers/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-skip-transparent-expression-wrappers": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/traverse": "^7.27.1", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-skip-transparent-expression-wrappers/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-skip-transparent-expression-wrappers/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/helper-skip-transparent-expression-wrappers/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-skip-transparent-expression-wrappers/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-split-export-declaration": { - "version": "7.24.7", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.24.7" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-split-export-declaration/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-string-parser": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-identifier": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-validator-option": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-wrap-function": { - "version": "7.28.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/template": "^7.27.2", - "@babel/traverse": "^7.28.3", - "@babel/types": "^7.28.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-wrap-function/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-wrap-function/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/helper-wrap-function/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/helper-wrap-function/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-bugfix-safari-class-field-initializer-scope": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/plugin-transform-optional-chaining": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.13.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { - "version": "7.28.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.28.3" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-proposal-private-property-in-object": { - "version": "7.21.0-placeholder-for-preset-env.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-assertions": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-syntax-unicode-sets-regex": { - "version": "7.18.6", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.18.6", - "@babel/helper-plugin-utils": "^7.18.6" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-arrow-functions": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-async-generator-functions": { - "version": "7.26.8", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.26.5", - "@babel/helper-remap-async-to-generator": "^7.25.9", - "@babel/traverse": "^7.26.8" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-async-generator-functions/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-async-generator-functions/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-async-generator-functions/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-async-generator-functions/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-async-to-generator": { - "version": "7.25.9", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-plugin-utils": "^7.25.9", - "@babel/helper-remap-async-to-generator": "^7.25.9" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoped-functions": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-block-scoping": { - "version": "7.28.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-class-properties": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", - "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", - "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-member-expression-to-functions": "^7.27.1", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/traverse": "^7.28.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-class-properties/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/plugin-transform-class-static-block": { - "version": "7.28.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.28.3", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.12.0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", - "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", - "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-member-expression-to-functions": "^7.27.1", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/traverse": "^7.28.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-class-static-block/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/plugin-transform-classes": { - "version": "7.28.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-globals": "^7.28.0", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", - "@babel/traverse": "^7.28.4" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-classes/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-classes/node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-classes/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-classes/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-classes/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-computed-properties": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/template": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-destructuring": { - "version": "7.28.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.28.0" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-destructuring/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-destructuring/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-destructuring/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-destructuring/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-dotall-regex": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-duplicate-keys": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-dynamic-import": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-exponentiation-operator": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-export-namespace-from": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-for-of": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-function-name": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-function-name/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-function-name/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-function-name/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-function-name/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-json-strings": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-literals": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-logical-assignment-operators": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-member-expression-literals": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-amd": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-commonjs": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-systemjs": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1", - "@babel/traverse": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-modules-systemjs/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-modules-systemjs/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-modules-systemjs/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-modules-systemjs/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-modules-umd": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-transforms": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-new-target": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-numeric-separator": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-object-rest-spread": { - "version": "7.28.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/plugin-transform-destructuring": "^7.28.0", - "@babel/plugin-transform-parameters": "^7.27.7", - "@babel/traverse": "^7.28.4" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-object-rest-spread/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-object-rest-spread/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-object-rest-spread/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-object-rest-spread/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-object-super": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-optional-catch-binding": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-optional-chaining": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-parameters": { - "version": "7.27.7", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-private-methods": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", - "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", - "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-member-expression-to-functions": "^7.27.1", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/traverse": "^7.28.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-private-methods/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.1", - "@babel/helper-create-class-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/generator": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", - "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", - "dev": true, - "dependencies": { - "@babel/parser": "^7.28.3", - "@babel/types": "^7.28.2", - "@jridgewell/gen-mapping": "^0.3.12", - "@jridgewell/trace-mapping": "^0.3.28", - "jsesc": "^3.0.2" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/helper-annotate-as-pure": { - "version": "7.27.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/types": "^7.27.3" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/helper-create-class-features-plugin": { - "version": "7.28.3", - "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", - "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", - "dev": true, - "dependencies": { - "@babel/helper-annotate-as-pure": "^7.27.3", - "@babel/helper-member-expression-to-functions": "^7.27.1", - "@babel/helper-optimise-call-expression": "^7.27.1", - "@babel/helper-replace-supers": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", - "@babel/traverse": "^7.28.3", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/traverse": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", - "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", - "dev": true, - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/generator": "^7.28.3", - "@babel/helper-globals": "^7.28.0", - "@babel/parser": "^7.28.4", - "@babel/template": "^7.27.2", - "@babel/types": "^7.28.4", - "debug": "^4.3.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/plugin-transform-property-literals": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-regexp-modifiers": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/plugin-transform-reserved-words": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-runtime": { - "version": "7.26.10", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-module-imports": "^7.25.9", - "@babel/helper-plugin-utils": "^7.26.5", - "babel-plugin-polyfill-corejs2": "^0.4.10", - "babel-plugin-polyfill-corejs3": "^0.11.0", - "babel-plugin-polyfill-regenerator": "^0.6.1", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-runtime/node_modules/semver": { - "version": "6.3.1", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/plugin-transform-shorthand-properties": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-spread": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1", - "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-sticky-regex": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-template-literals": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-typeof-symbol": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-escapes": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-property-regex": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-regex": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/plugin-transform-unicode-sets-regex": { - "version": "7.27.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-create-regexp-features-plugin": "^7.27.1", - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0" - } - }, - "node_modules/@babel/preset-env": { - "version": "7.26.9", - "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.26.9.tgz", - "integrity": "sha512-vX3qPGE8sEKEAZCWk05k3cpTAE3/nOYca++JA+Rd0z2NCNzabmYvEiSShKzm10zdquOIAVXsy2Ei/DTW34KlKQ==", - "dev": true, - "dependencies": { - "@babel/compat-data": "^7.26.8", - "@babel/helper-compilation-targets": "^7.26.5", - "@babel/helper-plugin-utils": "^7.26.5", - "@babel/helper-validator-option": "^7.25.9", - "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.25.9", - "@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.25.9", - "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.25.9", - "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.25.9", - "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.25.9", - "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", - "@babel/plugin-syntax-import-assertions": "^7.26.0", - "@babel/plugin-syntax-import-attributes": "^7.26.0", - "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", - "@babel/plugin-transform-arrow-functions": "^7.25.9", - "@babel/plugin-transform-async-generator-functions": "^7.26.8", - "@babel/plugin-transform-async-to-generator": "^7.25.9", - "@babel/plugin-transform-block-scoped-functions": "^7.26.5", - "@babel/plugin-transform-block-scoping": "^7.25.9", - "@babel/plugin-transform-class-properties": "^7.25.9", - "@babel/plugin-transform-class-static-block": "^7.26.0", - "@babel/plugin-transform-classes": "^7.25.9", - "@babel/plugin-transform-computed-properties": "^7.25.9", - "@babel/plugin-transform-destructuring": "^7.25.9", - "@babel/plugin-transform-dotall-regex": "^7.25.9", - "@babel/plugin-transform-duplicate-keys": "^7.25.9", - "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.25.9", - "@babel/plugin-transform-dynamic-import": "^7.25.9", - "@babel/plugin-transform-exponentiation-operator": "^7.26.3", - "@babel/plugin-transform-export-namespace-from": "^7.25.9", - "@babel/plugin-transform-for-of": "^7.26.9", - "@babel/plugin-transform-function-name": "^7.25.9", - "@babel/plugin-transform-json-strings": "^7.25.9", - "@babel/plugin-transform-literals": "^7.25.9", - "@babel/plugin-transform-logical-assignment-operators": "^7.25.9", - "@babel/plugin-transform-member-expression-literals": "^7.25.9", - "@babel/plugin-transform-modules-amd": "^7.25.9", - "@babel/plugin-transform-modules-commonjs": "^7.26.3", - "@babel/plugin-transform-modules-systemjs": "^7.25.9", - "@babel/plugin-transform-modules-umd": "^7.25.9", - "@babel/plugin-transform-named-capturing-groups-regex": "^7.25.9", - "@babel/plugin-transform-new-target": "^7.25.9", - "@babel/plugin-transform-nullish-coalescing-operator": "^7.26.6", - "@babel/plugin-transform-numeric-separator": "^7.25.9", - "@babel/plugin-transform-object-rest-spread": "^7.25.9", - "@babel/plugin-transform-object-super": "^7.25.9", - "@babel/plugin-transform-optional-catch-binding": "^7.25.9", - "@babel/plugin-transform-optional-chaining": "^7.25.9", - "@babel/plugin-transform-parameters": "^7.25.9", - "@babel/plugin-transform-private-methods": "^7.25.9", - "@babel/plugin-transform-private-property-in-object": "^7.25.9", - "@babel/plugin-transform-property-literals": "^7.25.9", - "@babel/plugin-transform-regenerator": "^7.25.9", - "@babel/plugin-transform-regexp-modifiers": "^7.26.0", - "@babel/plugin-transform-reserved-words": "^7.25.9", - "@babel/plugin-transform-shorthand-properties": "^7.25.9", - "@babel/plugin-transform-spread": "^7.25.9", - "@babel/plugin-transform-sticky-regex": "^7.25.9", - "@babel/plugin-transform-template-literals": "^7.26.8", - "@babel/plugin-transform-typeof-symbol": "^7.26.7", - "@babel/plugin-transform-unicode-escapes": "^7.25.9", - "@babel/plugin-transform-unicode-property-regex": "^7.25.9", - "@babel/plugin-transform-unicode-regex": "^7.25.9", - "@babel/plugin-transform-unicode-sets-regex": "^7.25.9", - "@babel/preset-modules": "0.1.6-no-external-plugins", - "babel-plugin-polyfill-corejs2": "^0.4.10", - "babel-plugin-polyfill-corejs3": "^0.11.0", - "babel-plugin-polyfill-regenerator": "^0.6.1", - "core-js-compat": "^3.40.0", - "semver": "^6.3.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/preset-env/node_modules/@babel/plugin-transform-regenerator": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.28.4.tgz", - "integrity": "sha512-+ZEdQlBoRg9m2NnzvEeLgtvBMO4tkFBw5SQIUgLICgTrumLoU7lr+Oghi6km2PFj+dbUt2u1oby2w3BDO9YQnA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0" - } - }, - "node_modules/@babel/preset-env/node_modules/@babel/preset-modules": { - "version": "0.1.6-no-external-plugins", - "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", - "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", - "dev": true, - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@babel/types": "^7.4.4", - "esutils": "^2.0.2" - }, - "peerDependencies": { - "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/@babel/preset-env/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/preset-env/node_modules/core-js-compat": { - "version": "3.46.0", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.46.0.tgz", - "integrity": "sha512-p9hObIIEENxSV8xIu+V68JjSeARg6UVMG5mR+JEUguG3sI6MsiS1njz2jHmyJDvA+8jX/sytkBHup6kxhM9law==", - "dev": true, - "dependencies": { - "browserslist": "^4.26.3" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/@babel/preset-env/node_modules/semver": { - "version": "6.3.1", - "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", - "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", - "dev": true, - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/@babel/runtime": { - "version": "7.26.10", - "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.10.tgz", - "integrity": "sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==", - "dev": true, - "dependencies": { - "regenerator-runtime": "^0.14.0" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/template": { - "version": "7.27.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.27.1", - "@babel/parser": "^7.27.2", - "@babel/types": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@babel/template/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@babel/template/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/@colors/colors": { - "version": "1.5.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.1.90" - } - }, - "node_modules/@discoveryjs/json-ext": { - "version": "0.5.7", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/@isaacs/cliui": { - "version": "8.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^5.1.2", - "string-width-cjs": "npm:string-width@^4.2.0", - "strip-ansi": "^7.0.1", - "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", - "wrap-ansi": "^8.1.0", - "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-regex": { - "version": "6.2.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/ansi-styles": { - "version": "6.2.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/emoji-regex": { - "version": "9.2.2", - "dev": true, - "license": "MIT" - }, - "node_modules/@isaacs/cliui/node_modules/string-width": { - "version": "5.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^9.2.2", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@isaacs/cliui/node_modules/strip-ansi": { - "version": "7.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { - "version": "8.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^6.1.0", - "string-width": "^5.0.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/@istanbuljs/load-nyc-config": { - "version": "1.1.0", - "dev": true, - "license": "ISC", - "dependencies": { - "camelcase": "^5.3.1", - "find-up": "^4.1.0", - "get-package-type": "^0.1.0", - "js-yaml": "^3.13.1", - "resolve-from": "^5.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { - "version": "3.14.1", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", - "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", - "dev": true, - "dependencies": { - "argparse": "^1.0.7", - "esprima": "^4.0.0" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/@istanbuljs/schema": { - "version": "0.1.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/@jridgewell/gen-mapping": { - "version": "0.3.13", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.5.0", - "@jridgewell/trace-mapping": "^0.3.24" - } - }, - "node_modules/@jridgewell/resolve-uri": { - "version": "3.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/@jridgewell/source-map": { - "version": "0.3.11", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/gen-mapping": "^0.3.5", - "@jridgewell/trace-mapping": "^0.3.25" - } - }, - "node_modules/@jridgewell/sourcemap-codec": { - "version": "1.5.5", - "dev": true, - "license": "MIT" - }, - "node_modules/@jridgewell/trace-mapping": { - "version": "0.3.31", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/resolve-uri": "^3.1.0", - "@jridgewell/sourcemap-codec": "^1.4.14" - } - }, - "node_modules/@leichtgewicht/ip-codec": { - "version": "2.0.5", - "dev": true, - "license": "MIT" - }, - "node_modules/@ljharb/through": { - "version": "2.3.14", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind": "^1.0.8" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/@ngtools/webpack": { - "version": "17.3.17", - "resolved": "https://registry.npmjs.org/@ngtools/webpack/-/webpack-17.3.17.tgz", - "integrity": "sha512-LaO++U8DoqV36M0YLKhubc1+NqM8fyp5DN03k1uP9GvtRchP9+7bfG+IEEZiDFkCUh9lfzi1CiGvUHrN4MYcsA==", - "dev": true, - "engines": { - "node": "^18.13.0 || >=20.9.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - }, - "peerDependencies": { - "@angular/compiler-cli": "^17.0.0", - "typescript": ">=5.2 <5.5", - "webpack": "^5.54.0" - } - }, - "node_modules/@nodelib/fs.scandir": { - "version": "2.1.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.stat": "2.0.5", - "run-parallel": "^1.1.9" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.stat": { - "version": "2.0.5", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/@nodelib/fs.walk": { - "version": "1.2.8", - "dev": true, - "license": "MIT", - "dependencies": { - "@nodelib/fs.scandir": "2.1.5", - "fastq": "^1.6.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@npmcli/agent": { - "version": "2.2.2", - "dev": true, - "license": "ISC", - "dependencies": { - "agent-base": "^7.1.0", - "http-proxy-agent": "^7.0.0", - "https-proxy-agent": "^7.0.1", - "lru-cache": "^10.0.1", - "socks-proxy-agent": "^8.0.3" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/agent/node_modules/lru-cache": { - "version": "10.4.3", - "dev": true, - "license": "ISC" - }, - "node_modules/@npmcli/fs": { - "version": "3.1.1", - "dev": true, - "license": "ISC", - "dependencies": { - "semver": "^7.3.5" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/git": { - "version": "5.0.8", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/promise-spawn": "^7.0.0", - "ini": "^4.1.3", - "lru-cache": "^10.0.1", - "npm-pick-manifest": "^9.0.0", - "proc-log": "^4.0.0", - "promise-inflight": "^1.0.1", - "promise-retry": "^2.0.1", - "semver": "^7.3.5", - "which": "^4.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/git/node_modules/ini": { - "version": "4.1.3", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/git/node_modules/isexe": { - "version": "3.1.1", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16" - } - }, - "node_modules/@npmcli/git/node_modules/lru-cache": { - "version": "10.4.3", - "dev": true, - "license": "ISC" - }, - "node_modules/@npmcli/git/node_modules/proc-log": { - "version": "4.2.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/git/node_modules/which": { - "version": "4.0.0", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/installed-package-contents": { - "version": "2.1.0", - "dev": true, - "license": "ISC", - "dependencies": { - "npm-bundled": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" - }, - "bin": { - "installed-package-contents": "bin/index.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/node-gyp": { - "version": "3.0.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/package-json": { - "version": "5.2.1", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/git": "^5.0.0", - "glob": "^10.2.2", - "hosted-git-info": "^7.0.0", - "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^6.0.0", - "proc-log": "^4.0.0", - "semver": "^7.5.3" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/package-json/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@npmcli/package-json/node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@npmcli/package-json/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@npmcli/package-json/node_modules/proc-log": { - "version": "4.2.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/promise-spawn": { - "version": "7.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "which": "^4.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/promise-spawn/node_modules/isexe": { - "version": "3.1.1", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16" - } - }, - "node_modules/@npmcli/promise-spawn/node_modules/which": { - "version": "4.0.0", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/redact": { - "version": "1.1.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/run-script": { - "version": "7.0.4", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^5.0.0", - "@npmcli/promise-spawn": "^7.0.0", - "node-gyp": "^10.0.0", - "which": "^4.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/run-script/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@npmcli/run-script/node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dev": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/@npmcli/run-script/node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/@npmcli/run-script/node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@npmcli/run-script/node_modules/isexe": { - "version": "3.1.1", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16" - } - }, - "node_modules/@npmcli/run-script/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@npmcli/run-script/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@npmcli/run-script/node_modules/node-gyp": { - "version": "10.3.1", - "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-10.3.1.tgz", - "integrity": "sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==", - "dev": true, - "dependencies": { - "env-paths": "^2.2.0", - "exponential-backoff": "^3.1.1", - "glob": "^10.3.10", - "graceful-fs": "^4.2.6", - "make-fetch-happen": "^13.0.0", - "nopt": "^7.0.0", - "proc-log": "^4.1.0", - "semver": "^7.3.5", - "tar": "^6.2.1", - "which": "^4.0.0" - }, - "bin": { - "node-gyp": "bin/node-gyp.js" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/run-script/node_modules/proc-log": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", - "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", - "dev": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/run-script/node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dev": true, - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/@npmcli/run-script/node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/@npmcli/run-script/node_modules/which": { - "version": "4.0.0", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^3.1.1" - }, - "bin": { - "node-which": "bin/which.js" - }, - "engines": { - "node": "^16.13.0 || >=18.0.0" - } - }, - "node_modules/@npmcli/run-script/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, - "node_modules/@pkgjs/parseargs": { - "version": "0.11.0", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=14" - } - }, - "node_modules/@playwright/test": { - "version": "1.56.1", - "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz", - "integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==", - "dev": true, - "dependencies": { - "playwright": "1.56.1" - }, - "bin": { - "playwright": "cli.js" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/@rollup/rollup-linux-x64-gnu": { - "version": "4.52.5", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@rollup/rollup-linux-x64-musl": { - "version": "4.52.5", - "cpu": [ - "x64" - ], - "dev": true, - "license": "MIT", - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@schematics/angular": { - "version": "17.3.17", - "resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-17.3.17.tgz", - "integrity": "sha512-S5HwYem5Yjeceb5OLvforNcjfTMh2qsHnTP1BAYL81XPpqeg2udjAkJjKBxCwxMZSqdCMw3ne0eKppEYTaEZ+A==", - "dev": true, - "dependencies": { - "@angular-devkit/core": "17.3.17", - "@angular-devkit/schematics": "17.3.17", - "jsonc-parser": "3.2.1" - }, - "engines": { - "node": "^18.13.0 || >=20.9.0", - "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", - "yarn": ">= 1.13.0" - } - }, - "node_modules/@sigstore/bundle": { - "version": "2.3.2", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz", - "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==", - "dev": true, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@sigstore/tuf": { - "version": "2.3.4", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/protobuf-specs": "^0.3.2", - "tuf-js": "^2.2.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz", - "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==", - "dev": true, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/@socket.io/component-emitter": { - "version": "3.1.2", - "dev": true, - "license": "MIT" - }, - "node_modules/@tufjs/canonical-json": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@tufjs/models": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@tufjs/canonical-json": "2.0.0", - "minimatch": "^9.0.4" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/@tufjs/models/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/@tufjs/models/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/@types/body-parser": { - "version": "1.19.6", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/connect": "*", - "@types/node": "*" - } - }, - "node_modules/@types/bonjour": { - "version": "3.5.13", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/connect": { - "version": "3.4.38", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/connect-history-api-fallback": { - "version": "1.5.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/express-serve-static-core": "*", - "@types/node": "*" - } - }, - "node_modules/@types/cors": { - "version": "2.8.19", - "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.19.tgz", - "integrity": "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==", - "dev": true, - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/estree": { - "version": "1.0.8", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/express": { - "version": "4.17.23", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.33", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "node_modules/@types/express-serve-static-core": { - "version": "5.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*", - "@types/send": "*" - } - }, - "node_modules/@types/express/node_modules/@types/express-serve-static-core": { - "version": "4.19.7", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*", - "@types/send": "*" - } - }, - "node_modules/@types/http-errors": { - "version": "2.0.5", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/http-proxy": { - "version": "1.17.16", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/jasmine": { - "version": "5.1.12", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/json-schema": { - "version": "7.0.15", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/mime": { - "version": "1.3.5", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/node": { - "version": "24.9.1", - "resolved": "https://registry.npmjs.org/@types/node/-/node-24.9.1.tgz", - "integrity": "sha512-QoiaXANRkSXK6p0Duvt56W208du4P9Uye9hWLWgGMDTEoKPhuenzNcC4vGUmrNkiOKTlIrBoyNQYNpSwfEZXSg==", - "dev": true, - "license": "MIT", - "peer": true, - "dependencies": { - "undici-types": "~7.16.0" - } - }, - "node_modules/@types/node-forge": { - "version": "1.3.14", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/qs": { - "version": "6.14.0", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/range-parser": { - "version": "1.2.7", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/retry": { - "version": "0.12.0", - "dev": true, - "license": "MIT" - }, - "node_modules/@types/send": { - "version": "1.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/serve-index": { - "version": "1.9.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/express": "*" - } - }, - "node_modules/@types/serve-static": { - "version": "1.15.9", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/http-errors": "*", - "@types/node": "*", - "@types/send": "<1" - } - }, - "node_modules/@types/serve-static/node_modules/@types/send": { - "version": "0.17.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "node_modules/@types/sockjs": { - "version": "0.3.36", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/ws": { - "version": "8.18.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@webassemblyjs/floating-point-hex-parser": { - "version": "1.13.2", - "dev": true, - "license": "MIT" - }, - "node_modules/@webassemblyjs/helper-api-error": { - "version": "1.13.2", - "dev": true, - "license": "MIT" - }, - "node_modules/@webassemblyjs/helper-buffer": { - "version": "1.14.1", - "dev": true, - "license": "MIT" - }, - "node_modules/@webassemblyjs/helper-numbers": { - "version": "1.13.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@webassemblyjs/floating-point-hex-parser": "1.13.2", - "@webassemblyjs/helper-api-error": "1.13.2", - "@xtuc/long": "4.2.2" - } - }, - "node_modules/@webassemblyjs/helper-wasm-bytecode": { - "version": "1.13.2", - "dev": true, - "license": "MIT" - }, - "node_modules/@webassemblyjs/helper-wasm-section": { - "version": "1.14.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-buffer": "1.14.1", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/wasm-gen": "1.14.1" - } - }, - "node_modules/@webassemblyjs/helper-wasm-section/node_modules/@webassemblyjs/ast": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", - "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", - "dev": true, - "dependencies": { - "@webassemblyjs/helper-numbers": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2" - } - }, - "node_modules/@webassemblyjs/ieee754": { - "version": "1.13.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@xtuc/ieee754": "^1.2.0" - } - }, - "node_modules/@webassemblyjs/leb128": { - "version": "1.13.2", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@xtuc/long": "4.2.2" - } - }, - "node_modules/@webassemblyjs/utf8": { - "version": "1.13.2", - "dev": true, - "license": "MIT" - }, - "node_modules/@webassemblyjs/wasm-edit": { - "version": "1.14.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-buffer": "1.14.1", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/helper-wasm-section": "1.14.1", - "@webassemblyjs/wasm-gen": "1.14.1", - "@webassemblyjs/wasm-opt": "1.14.1", - "@webassemblyjs/wasm-parser": "1.14.1", - "@webassemblyjs/wast-printer": "1.14.1" - } - }, - "node_modules/@webassemblyjs/wasm-edit/node_modules/@webassemblyjs/ast": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", - "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", - "dev": true, - "dependencies": { - "@webassemblyjs/helper-numbers": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wasm-gen": { - "version": "1.14.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/ieee754": "1.13.2", - "@webassemblyjs/leb128": "1.13.2", - "@webassemblyjs/utf8": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wasm-gen/node_modules/@webassemblyjs/ast": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", - "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", - "dev": true, - "dependencies": { - "@webassemblyjs/helper-numbers": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wasm-opt": { - "version": "1.14.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-buffer": "1.14.1", - "@webassemblyjs/wasm-gen": "1.14.1", - "@webassemblyjs/wasm-parser": "1.14.1" - } - }, - "node_modules/@webassemblyjs/wasm-opt/node_modules/@webassemblyjs/ast": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", - "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", - "dev": true, - "dependencies": { - "@webassemblyjs/helper-numbers": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wasm-parser": { - "version": "1.14.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@webassemblyjs/helper-api-error": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2", - "@webassemblyjs/ieee754": "1.13.2", - "@webassemblyjs/leb128": "1.13.2", - "@webassemblyjs/utf8": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wasm-parser/node_modules/@webassemblyjs/ast": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", - "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", - "dev": true, - "dependencies": { - "@webassemblyjs/helper-numbers": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2" - } - }, - "node_modules/@webassemblyjs/wast-printer": { - "version": "1.14.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@webassemblyjs/ast": "1.14.1", - "@xtuc/long": "4.2.2" - } - }, - "node_modules/@webassemblyjs/wast-printer/node_modules/@webassemblyjs/ast": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", - "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", - "dev": true, - "dependencies": { - "@webassemblyjs/helper-numbers": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2" - } - }, - "node_modules/@xtuc/ieee754": { - "version": "1.2.0", - "dev": true, - "license": "BSD-3-Clause" - }, - "node_modules/@xtuc/long": { - "version": "4.2.2", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/@yarnpkg/lockfile": { - "version": "1.1.0", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/abbrev": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz", - "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==", - "dev": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/accepts": { - "version": "1.3.8", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-types": "~2.1.34", - "negotiator": "0.6.3" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/accepts/node_modules/negotiator": { - "version": "0.6.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/acorn": { - "version": "8.15.0", - "dev": true, - "license": "MIT", - "peer": true, - "bin": { - "acorn": "bin/acorn" - }, - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/acorn-import-attributes": { - "version": "1.9.5", - "dev": true, - "license": "MIT", - "peerDependencies": { - "acorn": "^8" - } - }, - "node_modules/adjust-sourcemap-loader": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "loader-utils": "^2.0.0", - "regex-parser": "^2.2.11" - }, - "engines": { - "node": ">=8.9" - } - }, - "node_modules/adjust-sourcemap-loader/node_modules/loader-utils": { - "version": "2.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" - }, - "engines": { - "node": ">=8.9.0" - } - }, - "node_modules/agent-base": { - "version": "7.1.4", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 14" - } - }, - "node_modules/aggregate-error": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "clean-stack": "^2.0.0", - "indent-string": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/ajv-formats": { - "version": "2.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ajv": "^8.0.0" - }, - "peerDependencies": { - "ajv": "^8.0.0" - }, - "peerDependenciesMeta": { - "ajv": { - "optional": true - } - } - }, - "node_modules/ajv-formats/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/ansi-colors": { - "version": "4.1.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/ansi-escapes": { - "version": "4.3.2", - "dev": true, - "license": "MIT", - "dependencies": { - "type-fest": "^0.21.3" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ansi-html-community": { - "version": "0.0.8", - "dev": true, - "engines": [ - "node >= 0.8.0" - ], - "license": "Apache-2.0", - "bin": { - "ansi-html": "bin/ansi-html" - } - }, - "node_modules/ansi-regex": { - "version": "5.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/ansi-styles": { - "version": "4.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "color-convert": "^2.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/chalk/ansi-styles?sponsor=1" - } - }, - "node_modules/anymatch": { - "version": "3.1.3", - "dev": true, - "license": "ISC", - "dependencies": { - "normalize-path": "^3.0.0", - "picomatch": "^2.0.4" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/anymatch/node_modules/picomatch": { - "version": "2.3.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/argparse": { - "version": "1.0.10", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", - "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", - "dev": true, - "dependencies": { - "sprintf-js": "~1.0.2" - } - }, - "node_modules/array-flatten": { - "version": "1.1.1", - "dev": true, - "license": "MIT" - }, - "node_modules/autoprefixer": { - "version": "10.4.18", - "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.18.tgz", - "integrity": "sha512-1DKbDfsr6KUElM6wg+0zRNkB/Q7WcKYAaK+pzXn+Xqmszm/5Xa9coeNdtP88Vi+dPzZnMjhge8GIV49ZQkDa+g==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/autoprefixer" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "dependencies": { - "browserslist": "^4.23.0", - "caniuse-lite": "^1.0.30001591", - "fraction.js": "^4.3.7", - "normalize-range": "^0.1.2", - "picocolors": "^1.0.0", - "postcss-value-parser": "^4.2.0" - }, - "bin": { - "autoprefixer": "bin/autoprefixer" - }, - "engines": { - "node": "^10 || ^12 || >=14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/autoprefixer/node_modules/caniuse-lite": { - "version": "1.0.30001751", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001751.tgz", - "integrity": "sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ] - }, - "node_modules/babel-loader": { - "version": "9.1.3", - "dev": true, - "license": "MIT", - "dependencies": { - "find-cache-dir": "^4.0.0", - "schema-utils": "^4.0.0" - }, - "engines": { - "node": ">= 14.15.0" - }, - "peerDependencies": { - "@babel/core": "^7.12.0", - "webpack": ">=5" - } - }, - "node_modules/babel-plugin-istanbul": { - "version": "6.1.1", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/helper-plugin-utils": "^7.0.0", - "@istanbuljs/load-nyc-config": "^1.0.0", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-instrument": "^5.0.4", - "test-exclude": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/babel-plugin-polyfill-corejs2": { - "version": "0.4.14", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/compat-data": "^7.27.7", - "@babel/helper-define-polyfill-provider": "^0.6.5", - "semver": "^6.3.1" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs2/node_modules/@babel/helper-define-polyfill-provider": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz", - "integrity": "sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==", - "dev": true, - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-plugin-utils": "^7.27.1", - "debug": "^4.4.1", - "lodash.debounce": "^4.0.8", - "resolve": "^1.22.10" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs2/node_modules/resolve": { - "version": "1.22.11", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", - "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", - "dev": true, - "dependencies": { - "is-core-module": "^2.16.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/babel-plugin-polyfill-corejs2/node_modules/semver": { - "version": "6.3.1", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/babel-plugin-polyfill-corejs3": { - "version": "0.11.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.3", - "core-js-compat": "^3.40.0" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs3/node_modules/@babel/helper-define-polyfill-provider": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz", - "integrity": "sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==", - "dev": true, - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-plugin-utils": "^7.27.1", - "debug": "^4.4.1", - "lodash.debounce": "^4.0.8", - "resolve": "^1.22.10" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-corejs3/node_modules/core-js-compat": { - "version": "3.46.0", - "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.46.0.tgz", - "integrity": "sha512-p9hObIIEENxSV8xIu+V68JjSeARg6UVMG5mR+JEUguG3sI6MsiS1njz2jHmyJDvA+8jX/sytkBHup6kxhM9law==", - "dev": true, - "dependencies": { - "browserslist": "^4.26.3" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/core-js" - } - }, - "node_modules/babel-plugin-polyfill-corejs3/node_modules/resolve": { - "version": "1.22.11", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", - "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", - "dev": true, - "dependencies": { - "is-core-module": "^2.16.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/babel-plugin-polyfill-regenerator": { - "version": "0.6.5", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/helper-define-polyfill-provider": "^0.6.5" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-regenerator/node_modules/@babel/helper-define-polyfill-provider": { - "version": "0.6.5", - "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz", - "integrity": "sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==", - "dev": true, - "dependencies": { - "@babel/helper-compilation-targets": "^7.27.2", - "@babel/helper-plugin-utils": "^7.27.1", - "debug": "^4.4.1", - "lodash.debounce": "^4.0.8", - "resolve": "^1.22.10" - }, - "peerDependencies": { - "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" - } - }, - "node_modules/babel-plugin-polyfill-regenerator/node_modules/resolve": { - "version": "1.22.11", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", - "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", - "dev": true, - "dependencies": { - "is-core-module": "^2.16.1", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/balanced-match": { - "version": "1.0.2", - "dev": true, - "license": "MIT" - }, - "node_modules/base64-js": { - "version": "1.5.1", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/base64id": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": "^4.5.0 || >= 5.9" - } - }, - "node_modules/baseline-browser-mapping": { - "version": "2.8.18", - "dev": true, - "license": "Apache-2.0", - "bin": { - "baseline-browser-mapping": "dist/cli.js" - } - }, - "node_modules/batch": { - "version": "0.6.1", - "dev": true, - "license": "MIT" - }, - "node_modules/big.js": { - "version": "5.2.2", - "dev": true, - "license": "MIT", - "engines": { - "node": "*" - } - }, - "node_modules/binary-extensions": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/bl": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer": "^5.5.0", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/bl/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/body-parser": { - "version": "1.20.3", - "dev": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "content-type": "~1.0.5", - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "on-finished": "2.4.1", - "qs": "6.13.0", - "raw-body": "2.5.2", - "type-is": "~1.6.18", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/body-parser/node_modules/debug": { - "version": "2.6.9", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/body-parser/node_modules/ms": { - "version": "2.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/bonjour-service": { - "version": "1.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "fast-deep-equal": "^3.1.3", - "multicast-dns": "^7.2.5" - } - }, - "node_modules/boolbase": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", - "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", - "dev": true - }, - "node_modules/brace-expansion": { - "version": "1.1.12", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" - } - }, - "node_modules/braces": { - "version": "3.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "fill-range": "^7.1.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/browserslist": { - "version": "4.26.3", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "baseline-browser-mapping": "^2.8.9", - "caniuse-lite": "^1.0.30001746", - "electron-to-chromium": "^1.5.227", - "node-releases": "^2.0.21", - "update-browserslist-db": "^1.1.3" - }, - "bin": { - "browserslist": "cli.js" - }, - "engines": { - "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" - } - }, - "node_modules/browserslist/node_modules/caniuse-lite": { - "version": "1.0.30001751", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001751.tgz", - "integrity": "sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/caniuse-lite" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ] - }, - "node_modules/buffer": { - "version": "5.7.1", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.1.13" - } - }, - "node_modules/buffer-from": { - "version": "1.1.2", - "dev": true, - "license": "MIT" - }, - "node_modules/bytes": { - "version": "3.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/cacache": { - "version": "18.0.4", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/fs": "^3.1.0", - "fs-minipass": "^3.0.0", - "glob": "^10.2.2", - "lru-cache": "^10.0.1", - "minipass": "^7.0.3", - "minipass-collect": "^2.0.1", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/cacache/node_modules/brace-expansion": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", - "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", - "dev": true, - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/cacache/node_modules/glob": { - "version": "10.4.5", - "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", - "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", - "dev": true, - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/cacache/node_modules/lru-cache": { - "version": "10.4.3", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", - "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", - "dev": true - }, - "node_modules/cacache/node_modules/minimatch": { - "version": "9.0.5", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", - "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", - "dev": true, - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/cacache/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/cacache/node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dev": true, - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/cacache/node_modules/tar/node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dev": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/cacache/node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cacache/node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/cacache/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, - "node_modules/call-bind": { - "version": "1.0.8", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.0", - "es-define-property": "^1.0.0", - "get-intrinsic": "^1.2.4", - "set-function-length": "^1.2.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/call-bind-apply-helpers": { - "version": "1.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/call-bound": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "get-intrinsic": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/callsites": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/camelcase": { - "version": "5.3.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/chalk": { - "version": "4.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.1.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/chardet": { - "version": "0.7.0", - "dev": true, - "license": "MIT" - }, - "node_modules/chokidar": { - "version": "3.6.0", - "dev": true, - "license": "MIT", - "dependencies": { - "anymatch": "~3.1.2", - "braces": "~3.0.2", - "glob-parent": "~5.1.2", - "is-binary-path": "~2.1.0", - "is-glob": "~4.0.1", - "normalize-path": "~3.0.0", - "readdirp": "~3.6.0" - }, - "engines": { - "node": ">= 8.10.0" - }, - "funding": { - "url": "https://paulmillr.com/funding/" - }, - "optionalDependencies": { - "fsevents": "~2.3.2" - } - }, - "node_modules/chokidar/node_modules/fsevents": { - "version": "2.3.3", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", - "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/chownr": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", - "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/chrome-trace-event": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.0" - } - }, - "node_modules/clean-stack": { - "version": "2.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/cli-cursor": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "restore-cursor": "^3.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/cli-spinners": { - "version": "2.9.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-width": { - "version": "4.1.0", - "dev": true, - "license": "ISC", - "engines": { - "node": ">= 12" - } - }, - "node_modules/cliui": { - "version": "8.0.1", - "dev": true, - "license": "ISC", - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^7.0.0" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/cliui/node_modules/wrap-ansi": { - "version": "7.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/clone": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8" - } - }, - "node_modules/clone-deep": { - "version": "4.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "is-plain-object": "^2.0.4", - "kind-of": "^6.0.2", - "shallow-clone": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/color-convert": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "color-name": "~1.1.4" - }, - "engines": { - "node": ">=7.0.0" - } - }, - "node_modules/color-name": { - "version": "1.1.4", - "dev": true, - "license": "MIT" - }, - "node_modules/colorette": { - "version": "2.0.20", - "dev": true, - "license": "MIT" - }, - "node_modules/commander": { - "version": "2.20.3", - "dev": true, - "license": "MIT" - }, - "node_modules/common-path-prefix": { - "version": "3.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/compressible": { - "version": "2.0.18", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-db": ">= 1.43.0 < 2" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/compression": { - "version": "1.8.1", - "dev": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "compressible": "~2.0.18", - "debug": "2.6.9", - "negotiator": "~0.6.4", - "on-headers": "~1.1.0", - "safe-buffer": "5.2.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/compression/node_modules/debug": { - "version": "2.6.9", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/compression/node_modules/ms": { - "version": "2.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/concat-map": { - "version": "0.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/connect": { - "version": "3.7.0", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "finalhandler": "1.1.2", - "parseurl": "~1.3.3", - "utils-merge": "1.0.1" - }, - "engines": { - "node": ">= 0.10.0" - } - }, - "node_modules/connect-history-api-fallback": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8" - } - }, - "node_modules/connect/node_modules/debug": { - "version": "2.6.9", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/connect/node_modules/ms": { - "version": "2.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/content-disposition": { - "version": "0.5.4", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "5.2.1" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/content-type": { - "version": "1.0.5", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/convert-source-map": { - "version": "1.9.0", - "dev": true, - "license": "MIT" - }, - "node_modules/cookie": { - "version": "0.7.2", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", - "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", - "dev": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/cookie-signature": { - "version": "1.0.6", - "dev": true, - "license": "MIT" - }, - "node_modules/copy-anything": { - "version": "2.0.6", - "dev": true, - "license": "MIT", - "dependencies": { - "is-what": "^3.14.1" - }, - "funding": { - "url": "https://github.com/sponsors/mesqueeb" - } - }, - "node_modules/copy-webpack-plugin": { - "version": "11.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "fast-glob": "^3.2.11", - "glob-parent": "^6.0.1", - "globby": "^13.1.1", - "normalize-path": "^3.0.0", - "schema-utils": "^4.0.0", - "serialize-javascript": "^6.0.0" - }, - "engines": { - "node": ">= 14.15.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.1.0" - } - }, - "node_modules/copy-webpack-plugin/node_modules/glob-parent": { - "version": "6.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.3" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/core-util-is": { - "version": "1.0.3", - "dev": true, - "license": "MIT" - }, - "node_modules/cors": { - "version": "2.8.5", - "dev": true, - "license": "MIT", - "dependencies": { - "object-assign": "^4", - "vary": "^1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/cosmiconfig": { - "version": "9.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "env-paths": "^2.2.1", - "import-fresh": "^3.3.0", - "js-yaml": "^4.1.0", - "parse-json": "^5.2.0" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/d-fischer" - }, - "peerDependencies": { - "typescript": ">=4.9.5" - }, - "peerDependenciesMeta": { - "typescript": { - "optional": true - } - } - }, - "node_modules/cosmiconfig/node_modules/argparse": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", - "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", - "dev": true - }, - "node_modules/cosmiconfig/node_modules/js-yaml": { - "version": "4.1.0", - "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", - "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", - "dev": true, - "dependencies": { - "argparse": "^2.0.1" - }, - "bin": { - "js-yaml": "bin/js-yaml.js" - } - }, - "node_modules/critters": { - "version": "0.0.22", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "chalk": "^4.1.0", - "css-select": "^5.1.0", - "dom-serializer": "^2.0.0", - "domhandler": "^5.0.2", - "htmlparser2": "^8.0.2", - "postcss": "^8.4.23", - "postcss-media-query-parser": "^0.2.3" - } - }, - "node_modules/critters/node_modules/css-select": { - "version": "5.2.2", - "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz", - "integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==", - "dev": true, - "dependencies": { - "boolbase": "^1.0.0", - "css-what": "^6.1.0", - "domhandler": "^5.0.2", - "domutils": "^3.0.1", - "nth-check": "^2.0.1" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/critters/node_modules/domutils": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", - "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", - "dev": true, - "dependencies": { - "dom-serializer": "^2.0.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3" - }, - "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" - } - }, - "node_modules/cross-spawn": { - "version": "7.0.6", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.1.0", - "shebang-command": "^2.0.0", - "which": "^2.0.1" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/cross-spawn/node_modules/which": { - "version": "2.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "node-which": "bin/node-which" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/css-loader": { - "version": "6.10.0", - "dev": true, - "license": "MIT", - "dependencies": { - "icss-utils": "^5.1.0", - "postcss": "^8.4.33", - "postcss-modules-extract-imports": "^3.0.0", - "postcss-modules-local-by-default": "^4.0.4", - "postcss-modules-scope": "^3.1.1", - "postcss-modules-values": "^4.0.0", - "postcss-value-parser": "^4.2.0", - "semver": "^7.5.4" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "@rspack/core": "0.x || 1.x", - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "@rspack/core": { - "optional": true - }, - "webpack": { - "optional": true - } - } - }, - "node_modules/css-what": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz", - "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==", - "dev": true, - "engines": { - "node": ">= 6" - }, - "funding": { - "url": "https://github.com/sponsors/fb55" - } - }, - "node_modules/cssesc": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", - "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", - "dev": true, - "bin": { - "cssesc": "bin/cssesc" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/custom-event": { - "version": "1.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/date-format": { - "version": "4.0.14", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/debug": { - "version": "4.4.3", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/default-gateway": { - "version": "6.0.3", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "execa": "^5.0.0" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/defaults": { - "version": "1.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "clone": "^1.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/define-data-property": { - "version": "1.1.4", - "dev": true, - "license": "MIT", - "dependencies": { - "es-define-property": "^1.0.0", - "es-errors": "^1.3.0", - "gopd": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/define-lazy-prop": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/depd": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/destroy": { - "version": "1.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8", - "npm": "1.2.8000 || >= 1.4.16" - } - }, - "node_modules/detect-node": { - "version": "2.1.0", - "dev": true, - "license": "MIT" - }, - "node_modules/di": { - "version": "0.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/dir-glob": { - "version": "3.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "path-type": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/dns-packet": { - "version": "5.6.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@leichtgewicht/ip-codec": "^2.0.1" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/dom-serialize": { - "version": "2.2.1", - "dev": true, - "license": "MIT", - "dependencies": { - "custom-event": "~1.0.0", - "ent": "~2.2.0", - "extend": "^3.0.0", - "void-elements": "^2.0.0" - } - }, - "node_modules/dom-serializer": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.2", - "entities": "^4.2.0" - }, - "funding": { - "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" - } - }, - "node_modules/dom-serializer/node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "dev": true, - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/domelementtype": { - "version": "2.3.0", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "license": "BSD-2-Clause" - }, - "node_modules/domhandler": { - "version": "5.0.3", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "domelementtype": "^2.3.0" - }, - "engines": { - "node": ">= 4" - }, - "funding": { - "url": "https://github.com/fb55/domhandler?sponsor=1" - } - }, - "node_modules/dunder-proto": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.1", - "es-errors": "^1.3.0", - "gopd": "^1.2.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "dev": true, - "license": "MIT" - }, - "node_modules/ee-first": { - "version": "1.1.1", - "dev": true, - "license": "MIT" - }, - "node_modules/electron-to-chromium": { - "version": "1.5.237", - "dev": true, - "license": "ISC" - }, - "node_modules/emoji-regex": { - "version": "8.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/emojis-list": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/encodeurl": { - "version": "1.0.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/encoding": { - "version": "0.1.13", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "iconv-lite": "^0.6.2" - } - }, - "node_modules/encoding/node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dev": true, - "optional": true, - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/engine.io-parser": { - "version": "5.2.3", - "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.3.tgz", - "integrity": "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==", - "dev": true, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/ent": { - "version": "2.2.2", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.3", - "es-errors": "^1.3.0", - "punycode": "^1.4.1", - "safe-regex-test": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/env-paths": { - "version": "2.2.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/err-code": { - "version": "2.0.3", - "dev": true, - "license": "MIT" - }, - "node_modules/errno": { - "version": "0.1.8", - "dev": true, - "license": "MIT", - "optional": true, - "dependencies": { - "prr": "~1.0.1" - }, - "bin": { - "errno": "cli.js" - } - }, - "node_modules/error-ex": { - "version": "1.3.4", - "dev": true, - "license": "MIT", - "dependencies": { - "is-arrayish": "^0.2.1" - } - }, - "node_modules/es-define-property": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-errors": { - "version": "1.3.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/es-module-lexer": { - "version": "1.7.0", - "dev": true, - "license": "MIT" - }, - "node_modules/es-object-atoms": { - "version": "1.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/esbuild": { - "version": "0.20.1", - "dev": true, - "hasInstallScript": true, - "license": "MIT", - "optional": true, - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=12" - }, - "optionalDependencies": { - "@esbuild/aix-ppc64": "0.20.1", - "@esbuild/android-arm": "0.20.1", - "@esbuild/android-arm64": "0.20.1", - "@esbuild/android-x64": "0.20.1", - "@esbuild/darwin-arm64": "0.20.1", - "@esbuild/darwin-x64": "0.20.1", - "@esbuild/freebsd-arm64": "0.20.1", - "@esbuild/freebsd-x64": "0.20.1", - "@esbuild/linux-arm": "0.20.1", - "@esbuild/linux-arm64": "0.20.1", - "@esbuild/linux-ia32": "0.20.1", - "@esbuild/linux-loong64": "0.20.1", - "@esbuild/linux-mips64el": "0.20.1", - "@esbuild/linux-ppc64": "0.20.1", - "@esbuild/linux-riscv64": "0.20.1", - "@esbuild/linux-s390x": "0.20.1", - "@esbuild/linux-x64": "0.20.1", - "@esbuild/netbsd-x64": "0.20.1", - "@esbuild/openbsd-x64": "0.20.1", - "@esbuild/sunos-x64": "0.20.1", - "@esbuild/win32-arm64": "0.20.1", - "@esbuild/win32-ia32": "0.20.1", - "@esbuild/win32-x64": "0.20.1" - } - }, - "node_modules/esbuild-wasm": { - "version": "0.20.1", - "dev": true, - "license": "MIT", - "bin": { - "esbuild": "bin/esbuild" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/aix-ppc64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.1.tgz", - "integrity": "sha512-m55cpeupQ2DbuRGQMMZDzbv9J9PgVelPjlcmM5kxHnrBdBx6REaEd7LamYV7Dm8N7rCyR/XwU6rVP8ploKtIkA==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "aix" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/android-arm": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.1.tgz", - "integrity": "sha512-4j0+G27/2ZXGWR5okcJi7pQYhmkVgb4D7UKwxcqrjhvp5TKWx3cUjgB1CGj1mfdmJBQ9VnUGgUhign+FPF2Zgw==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/android-arm64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.1.tgz", - "integrity": "sha512-hCnXNF0HM6AjowP+Zou0ZJMWWa1VkD77BXe959zERgGJBBxB+sV+J9f/rcjeg2c5bsukD/n17RKWXGFCO5dD5A==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/android-x64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.1.tgz", - "integrity": "sha512-MSfZMBoAsnhpS+2yMFYIQUPs8Z19ajwfuaSZx+tSl09xrHZCjbeXXMsUF/0oq7ojxYEpsSo4c0SfjxOYXRbpaA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "android" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/darwin-arm64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.1.tgz", - "integrity": "sha512-Ylk6rzgMD8klUklGPzS414UQLa5NPXZD5tf8JmQU8GQrj6BrFA/Ic9tb2zRe1kOZyCbGl+e8VMbDRazCEBqPvA==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/darwin-x64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.1.tgz", - "integrity": "sha512-pFIfj7U2w5sMp52wTY1XVOdoxw+GDwy9FsK3OFz4BpMAjvZVs0dT1VXs8aQm22nhwoIWUmIRaE+4xow8xfIDZA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/freebsd-arm64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.1.tgz", - "integrity": "sha512-UyW1WZvHDuM4xDz0jWun4qtQFauNdXjXOtIy7SYdf7pbxSWWVlqhnR/T2TpX6LX5NI62spt0a3ldIIEkPM6RHw==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/freebsd-x64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.1.tgz", - "integrity": "sha512-itPwCw5C+Jh/c624vcDd9kRCCZVpzpQn8dtwoYIt2TJF3S9xJLiRohnnNrKwREvcZYx0n8sCSbvGH349XkcQeg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "freebsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/linux-arm": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.1.tgz", - "integrity": "sha512-LojC28v3+IhIbfQ+Vu4Ut5n3wKcgTu6POKIHN9Wpt0HnfgUGlBuyDDQR4jWZUZFyYLiz4RBBBmfU6sNfn6RhLw==", - "cpu": [ - "arm" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/linux-arm64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.1.tgz", - "integrity": "sha512-cX8WdlF6Cnvw/DO9/X7XLH2J6CkBnz7Twjpk56cshk9sjYVcuh4sXQBy5bmTwzBjNVZze2yaV1vtcJS04LbN8w==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/linux-ia32": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.1.tgz", - "integrity": "sha512-4H/sQCy1mnnGkUt/xszaLlYJVTz3W9ep52xEefGtd6yXDQbz/5fZE5dFLUgsPdbUOQANcVUa5iO6g3nyy5BJiw==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/linux-loong64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.1.tgz", - "integrity": "sha512-c0jgtB+sRHCciVXlyjDcWb2FUuzlGVRwGXgI+3WqKOIuoo8AmZAddzeOHeYLtD+dmtHw3B4Xo9wAUdjlfW5yYA==", - "cpu": [ - "loong64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/linux-mips64el": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.1.tgz", - "integrity": "sha512-TgFyCfIxSujyuqdZKDZ3yTwWiGv+KnlOeXXitCQ+trDODJ+ZtGOzLkSWngynP0HZnTsDyBbPy7GWVXWaEl6lhA==", - "cpu": [ - "mips64el" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/linux-ppc64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.1.tgz", - "integrity": "sha512-b+yuD1IUeL+Y93PmFZDZFIElwbmFfIKLKlYI8M6tRyzE6u7oEP7onGk0vZRh8wfVGC2dZoy0EqX1V8qok4qHaw==", - "cpu": [ - "ppc64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/linux-riscv64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.1.tgz", - "integrity": "sha512-wpDlpE0oRKZwX+GfomcALcouqjjV8MIX8DyTrxfyCfXxoKQSDm45CZr9fanJ4F6ckD4yDEPT98SrjvLwIqUCgg==", - "cpu": [ - "riscv64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/linux-s390x": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.1.tgz", - "integrity": "sha512-5BepC2Au80EohQ2dBpyTquqGCES7++p7G+7lXe1bAIvMdXm4YYcEfZtQrP4gaoZ96Wv1Ute61CEHFU7h4FMueQ==", - "cpu": [ - "s390x" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/linux-x64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.1.tgz", - "integrity": "sha512-5gRPk7pKuaIB+tmH+yKd2aQTRpqlf1E4f/mC+tawIm/CGJemZcHZpp2ic8oD83nKgUPMEd0fNanrnFljiruuyA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "linux" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/netbsd-x64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.1.tgz", - "integrity": "sha512-4fL68JdrLV2nVW2AaWZBv3XEm3Ae3NZn/7qy2KGAt3dexAgSVT+Hc97JKSZnqezgMlv9x6KV0ZkZY7UO5cNLCg==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "netbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/openbsd-x64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.1.tgz", - "integrity": "sha512-GhRuXlvRE+twf2ES+8REbeCb/zeikNqwD3+6S5y5/x+DYbAQUNl0HNBs4RQJqrechS4v4MruEr8ZtAin/hK5iw==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "openbsd" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/sunos-x64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.1.tgz", - "integrity": "sha512-ZnWEyCM0G1Ex6JtsygvC3KUUrlDXqOihw8RicRuQAzw+c4f1D66YlPNNV3rkjVW90zXVsHwZYWbJh3v+oQFM9Q==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "sunos" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/win32-arm64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.1.tgz", - "integrity": "sha512-QZ6gXue0vVQY2Oon9WyLFCdSuYbXSoxaZrPuJ4c20j6ICedfsDilNPYfHLlMH7vGfU5DQR0czHLmJvH4Nzis/A==", - "cpu": [ - "arm64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/win32-ia32": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.1.tgz", - "integrity": "sha512-HzcJa1NcSWTAU0MJIxOho8JftNp9YALui3o+Ny7hCh0v5f90nprly1U3Sj1Ldj/CvKKdvvFsCRvDkpsEMp4DNw==", - "cpu": [ - "ia32" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/esbuild/node_modules/@esbuild/win32-x64": { - "version": "0.20.1", - "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.1.tgz", - "integrity": "sha512-0MBh53o6XtI6ctDnRMeQ+xoCN8kD2qI1rY1KgF/xdWQwoFeKou7puvDfV8/Wv4Ctx2rRpET/gGdz3YlNtNACSA==", - "cpu": [ - "x64" - ], - "dev": true, - "optional": true, - "os": [ - "win32" - ], - "engines": { - "node": ">=12" - } - }, - "node_modules/escalade": { - "version": "3.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/escape-html": { - "version": "1.0.3", - "dev": true, - "license": "MIT" - }, - "node_modules/escape-string-regexp": { - "version": "1.0.5", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/eslint-scope": { - "version": "5.1.1", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "esrecurse": "^4.3.0", - "estraverse": "^4.1.1" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/esprima": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", - "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", - "dev": true, - "bin": { - "esparse": "bin/esparse.js", - "esvalidate": "bin/esvalidate.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/esrecurse": { - "version": "4.3.0", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "estraverse": "^5.2.0" - }, - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esrecurse/node_modules/estraverse": { - "version": "5.3.0", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/estraverse": { - "version": "4.3.0", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=4.0" - } - }, - "node_modules/esutils": { - "version": "2.0.3", - "dev": true, - "license": "BSD-2-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/etag": { - "version": "1.8.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/eventemitter3": { - "version": "4.0.7", - "dev": true, - "license": "MIT" - }, - "node_modules/events": { - "version": "3.3.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.x" - } - }, - "node_modules/execa": { - "version": "5.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^6.0.0", - "human-signals": "^2.1.0", - "is-stream": "^2.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^4.0.1", - "onetime": "^5.1.2", - "signal-exit": "^3.0.3", - "strip-final-newline": "^2.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/exponential-backoff": { - "version": "3.1.3", - "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.3.tgz", - "integrity": "sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA==", - "dev": true - }, - "node_modules/extend": { - "version": "3.0.2", - "dev": true, - "license": "MIT" - }, - "node_modules/external-editor": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "chardet": "^0.7.0", - "iconv-lite": "^0.4.24", - "tmp": "^0.0.33" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/fast-deep-equal": { - "version": "3.1.3", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-glob": { - "version": "3.3.2", - "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", - "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", - "dev": true, - "dependencies": { - "@nodelib/fs.stat": "^2.0.2", - "@nodelib/fs.walk": "^1.2.3", - "glob-parent": "^5.1.2", - "merge2": "^1.3.0", - "micromatch": "^4.0.4" - }, - "engines": { - "node": ">=8.6.0" - } - }, - "node_modules/fast-json-stable-stringify": { - "version": "2.1.0", - "dev": true, - "license": "MIT" - }, - "node_modules/fast-uri": { - "version": "3.1.0", - "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", - "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/fastify" - }, - { - "type": "opencollective", - "url": "https://opencollective.com/fastify" - } - ] - }, - "node_modules/fastq": { - "version": "1.19.1", - "dev": true, - "license": "ISC", - "dependencies": { - "reusify": "^1.0.4" - } - }, - "node_modules/faye-websocket": { - "version": "0.11.4", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "websocket-driver": ">=0.5.1" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/figures": { - "version": "3.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "escape-string-regexp": "^1.0.5" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/fill-range": { - "version": "7.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "to-regex-range": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/finalhandler": { - "version": "1.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "on-finished": "~2.3.0", - "parseurl": "~1.3.3", - "statuses": "~1.5.0", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/finalhandler/node_modules/debug": { - "version": "2.6.9", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/finalhandler/node_modules/ms": { - "version": "2.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/finalhandler/node_modules/on-finished": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/find-cache-dir": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "common-path-prefix": "^3.0.0", - "pkg-dir": "^7.0.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/find-up": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "locate-path": "^5.0.0", - "path-exists": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/flat": { - "version": "5.0.2", - "dev": true, - "license": "BSD-3-Clause", - "bin": { - "flat": "cli.js" - } - }, - "node_modules/flatted": { - "version": "3.3.3", - "dev": true, - "license": "ISC" - }, - "node_modules/follow-redirects": { - "version": "1.15.11", - "dev": true, - "funding": [ - { - "type": "individual", - "url": "https://github.com/sponsors/RubenVerborgh" - } - ], - "license": "MIT", - "engines": { - "node": ">=4.0" - }, - "peerDependenciesMeta": { - "debug": { - "optional": true - } - } - }, - "node_modules/foreground-child": { - "version": "3.3.1", - "dev": true, - "license": "ISC", - "dependencies": { - "cross-spawn": "^7.0.6", - "signal-exit": "^4.0.1" - }, - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/foreground-child/node_modules/signal-exit": { - "version": "4.1.0", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=14" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/forwarded": { - "version": "0.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fraction.js": { - "version": "4.3.7", - "dev": true, - "license": "MIT", - "engines": { - "node": "*" - }, - "funding": { - "type": "patreon", - "url": "https://github.com/sponsors/rawify" - } - }, - "node_modules/fresh": { - "version": "0.5.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/fs-extra": { - "version": "8.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "graceful-fs": "^4.2.0", - "jsonfile": "^4.0.0", - "universalify": "^0.1.0" - }, - "engines": { - "node": ">=6 <7 || >=8" - } - }, - "node_modules/fs-minipass": { - "version": "3.0.3", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/fs-monkey": { - "version": "1.1.0", - "dev": true, - "license": "Unlicense" - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/fsevents": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", - "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "darwin" - ], - "engines": { - "node": "^8.16.0 || ^10.6.0 || >=11.0.0" - } - }, - "node_modules/function-bind": { - "version": "1.1.2", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/gensync": { - "version": "1.0.0-beta.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/get-caller-file": { - "version": "2.0.5", - "dev": true, - "license": "ISC", - "engines": { - "node": "6.* || 8.* || >= 10.*" - } - }, - "node_modules/get-intrinsic": { - "version": "1.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bind-apply-helpers": "^1.0.2", - "es-define-property": "^1.0.1", - "es-errors": "^1.3.0", - "es-object-atoms": "^1.1.1", - "function-bind": "^1.1.2", - "get-proto": "^1.0.1", - "gopd": "^1.2.0", - "has-symbols": "^1.1.0", - "hasown": "^2.0.2", - "math-intrinsics": "^1.1.0" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/get-package-type": { - "version": "0.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/get-proto": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "dunder-proto": "^1.0.1", - "es-object-atoms": "^1.0.0" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/get-stream": { - "version": "6.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/glob": { - "version": "7.2.3", - "dev": true, - "license": "ISC", - "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" - }, - "engines": { - "node": "*" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/glob-parent": { - "version": "5.1.2", - "dev": true, - "license": "ISC", - "dependencies": { - "is-glob": "^4.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/glob-to-regexp": { - "version": "0.4.1", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/globby": { - "version": "13.2.2", - "dev": true, - "license": "MIT", - "dependencies": { - "dir-glob": "^3.0.1", - "fast-glob": "^3.3.0", - "ignore": "^5.2.4", - "merge2": "^1.4.1", - "slash": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/gopd": { - "version": "1.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/graceful-fs": { - "version": "4.2.11", - "dev": true, - "license": "ISC" - }, - "node_modules/handle-thing": { - "version": "2.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/has-flag": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/has-property-descriptors": { - "version": "1.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "es-define-property": "^1.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-symbols": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/has-tostringtag": { - "version": "1.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "has-symbols": "^1.0.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/hasown": { - "version": "2.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "function-bind": "^1.1.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/hosted-git-info": { - "version": "7.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "lru-cache": "^10.0.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/hosted-git-info/node_modules/lru-cache": { - "version": "10.4.3", - "dev": true, - "license": "ISC" - }, - "node_modules/hpack.js": { - "version": "2.1.6", - "dev": true, - "license": "MIT", - "dependencies": { - "inherits": "^2.0.1", - "obuf": "^1.0.0", - "readable-stream": "^2.0.1", - "wbuf": "^1.1.0" - } - }, - "node_modules/hpack.js/node_modules/readable-stream": { - "version": "2.3.8", - "dev": true, - "license": "MIT", - "dependencies": { - "core-util-is": "~1.0.0", - "inherits": "~2.0.3", - "isarray": "~1.0.0", - "process-nextick-args": "~2.0.0", - "safe-buffer": "~5.1.1", - "string_decoder": "~1.1.1", - "util-deprecate": "~1.0.1" - } - }, - "node_modules/hpack.js/node_modules/safe-buffer": { - "version": "5.1.2", - "dev": true, - "license": "MIT" - }, - "node_modules/hpack.js/node_modules/string_decoder": { - "version": "1.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "~5.1.0" - } - }, - "node_modules/html-entities": { - "version": "2.6.0", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/mdevils" - }, - { - "type": "patreon", - "url": "https://patreon.com/mdevils" - } - ], - "license": "MIT" - }, - "node_modules/html-escaper": { - "version": "2.0.2", - "dev": true, - "license": "MIT" - }, - "node_modules/htmlparser2": { - "version": "8.0.2", - "dev": true, - "funding": [ - "https://github.com/fb55/htmlparser2?sponsor=1", - { - "type": "github", - "url": "https://github.com/sponsors/fb55" - } - ], - "license": "MIT", - "dependencies": { - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3", - "domutils": "^3.0.1", - "entities": "^4.4.0" - } - }, - "node_modules/htmlparser2/node_modules/domutils": { - "version": "3.2.2", - "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", - "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", - "dev": true, - "dependencies": { - "dom-serializer": "^2.0.0", - "domelementtype": "^2.3.0", - "domhandler": "^5.0.3" - }, - "funding": { - "url": "https://github.com/fb55/domutils?sponsor=1" - } - }, - "node_modules/htmlparser2/node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "dev": true, - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/http-cache-semantics": { - "version": "4.2.0", - "dev": true, - "license": "BSD-2-Clause" - }, - "node_modules/http-deceiver": { - "version": "1.2.7", - "dev": true, - "license": "MIT" - }, - "node_modules/http-errors": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "depd": "2.0.0", - "inherits": "2.0.4", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "toidentifier": "1.0.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/http-errors/node_modules/statuses": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/http-parser-js": { - "version": "0.5.10", - "dev": true, - "license": "MIT" - }, - "node_modules/http-proxy": { - "version": "1.18.1", - "dev": true, - "license": "MIT", - "dependencies": { - "eventemitter3": "^4.0.0", - "follow-redirects": "^1.0.0", - "requires-port": "^1.0.0" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/http-proxy-agent": { - "version": "7.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.0", - "debug": "^4.3.4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/http-proxy-middleware": { - "version": "2.0.8", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/http-proxy": "^1.17.8", - "http-proxy": "^1.18.1", - "is-glob": "^4.0.1", - "is-plain-obj": "^3.0.0", - "micromatch": "^4.0.2" - }, - "engines": { - "node": ">=12.0.0" - }, - "peerDependencies": { - "@types/express": "^4.17.13" - }, - "peerDependenciesMeta": { - "@types/express": { - "optional": true - } - } - }, - "node_modules/https-proxy-agent": { - "version": "7.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.0.2", - "debug": "4" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/human-signals": { - "version": "2.1.0", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=10.17.0" - } - }, - "node_modules/iconv-lite": { - "version": "0.4.24", - "dev": true, - "license": "MIT", - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/icss-utils": { - "version": "5.1.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/ieee754": { - "version": "1.2.1", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "BSD-3-Clause" - }, - "node_modules/ignore": { - "version": "5.3.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/ignore-walk": { - "version": "6.0.5", - "dev": true, - "license": "ISC", - "dependencies": { - "minimatch": "^9.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/ignore-walk/node_modules/brace-expansion": { - "version": "2.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/ignore-walk/node_modules/minimatch": { - "version": "9.0.5", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/image-size": { - "version": "0.5.5", - "dev": true, - "license": "MIT", - "optional": true, - "bin": { - "image-size": "bin/image-size.js" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/import-fresh": { - "version": "3.3.1", - "dev": true, - "license": "MIT", - "dependencies": { - "parent-module": "^1.0.0", - "resolve-from": "^4.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/import-fresh/node_modules/resolve-from": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/imurmurhash": { - "version": "0.1.4", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.8.19" - } - }, - "node_modules/indent-string": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/inflight": { - "version": "1.0.6", - "dev": true, - "license": "ISC", - "dependencies": { - "once": "^1.3.0", - "wrappy": "1" - } - }, - "node_modules/inherits": { - "version": "2.0.4", - "dev": true, - "license": "ISC" - }, - "node_modules/ini": { - "version": "4.1.2", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/inquirer": { - "version": "9.2.15", - "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-9.2.15.tgz", - "integrity": "sha512-vI2w4zl/mDluHt9YEQ/543VTCwPKWiHzKtm9dM2V0NdFcqEexDAjUHzO1oA60HRNaVifGXXM1tRRNluLVHa0Kg==", - "dev": true, - "dependencies": { - "@ljharb/through": "^2.3.12", - "ansi-escapes": "^4.3.2", - "chalk": "^5.3.0", - "cli-cursor": "^3.1.0", - "cli-width": "^4.1.0", - "external-editor": "^3.1.0", - "figures": "^3.2.0", - "lodash": "^4.17.21", - "mute-stream": "1.0.0", - "ora": "^5.4.1", - "run-async": "^3.0.0", - "rxjs": "^7.8.1", - "string-width": "^4.2.3", - "strip-ansi": "^6.0.1", - "wrap-ansi": "^6.2.0" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/inquirer/node_modules/chalk": { - "version": "5.6.2", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz", - "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==", - "dev": true, - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/inquirer/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, - "node_modules/ip-address": { - "version": "10.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 12" - } - }, - "node_modules/ipaddr.js": { - "version": "2.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 10" - } - }, - "node_modules/is-arrayish": { - "version": "0.2.1", - "dev": true, - "license": "MIT" - }, - "node_modules/is-binary-path": { - "version": "2.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "binary-extensions": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/is-core-module": { - "version": "2.16.1", - "dev": true, - "license": "MIT", - "dependencies": { - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-docker": { - "version": "2.2.1", - "dev": true, - "license": "MIT", - "bin": { - "is-docker": "cli.js" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-extglob": { - "version": "2.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-fullwidth-code-point": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-glob": { - "version": "4.0.3", - "dev": true, - "license": "MIT", - "dependencies": { - "is-extglob": "^2.1.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-interactive": { - "version": "1.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/is-lambda": { - "version": "1.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/is-number": { - "version": "7.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/is-plain-obj": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-plain-object": { - "version": "2.0.4", - "dev": true, - "license": "MIT", - "dependencies": { - "isobject": "^3.0.1" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/is-regex": { - "version": "1.2.1", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "gopd": "^1.2.0", - "has-tostringtag": "^1.0.2", - "hasown": "^2.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/is-stream": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-unicode-supported": { - "version": "0.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-what": { - "version": "3.14.1", - "dev": true, - "license": "MIT" - }, - "node_modules/is-wsl": { - "version": "2.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "is-docker": "^2.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/isarray": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/isbinaryfile": { - "version": "4.0.10", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/gjtorikian/" - } - }, - "node_modules/isexe": { - "version": "2.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/isobject": { - "version": "3.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/istanbul-lib-coverage": { - "version": "3.2.2", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-instrument": { - "version": "5.2.1", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "@babel/core": "^7.12.3", - "@babel/parser": "^7.14.7", - "@istanbuljs/schema": "^0.1.2", - "istanbul-lib-coverage": "^3.2.0", - "semver": "^6.3.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/istanbul-lib-instrument/node_modules/@babel/parser": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", - "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", - "dev": true, - "dependencies": { - "@babel/types": "^7.28.4" - }, - "bin": { - "parser": "bin/babel-parser.js" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/istanbul-lib-instrument/node_modules/@babel/types": { - "version": "7.28.4", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", - "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", - "dev": true, - "dependencies": { - "@babel/helper-string-parser": "^7.27.1", - "@babel/helper-validator-identifier": "^7.27.1" - }, - "engines": { - "node": ">=6.9.0" - } - }, - "node_modules/istanbul-lib-instrument/node_modules/semver": { - "version": "6.3.1", - "dev": true, - "license": "ISC", - "bin": { - "semver": "bin/semver.js" - } - }, - "node_modules/istanbul-lib-report": { - "version": "3.0.1", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "istanbul-lib-coverage": "^3.0.0", - "make-dir": "^4.0.0", - "supports-color": "^7.1.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-lib-source-maps": { - "version": "4.0.1", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "debug": "^4.1.1", - "istanbul-lib-coverage": "^3.0.0", - "source-map": "^0.6.1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/istanbul-lib-source-maps/node_modules/source-map": { - "version": "0.6.1", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/jackspeak": { - "version": "3.4.3", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "@isaacs/cliui": "^8.0.2" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - }, - "optionalDependencies": { - "@pkgjs/parseargs": "^0.11.0" - } - }, - "node_modules/jasmine-core": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-5.1.2.tgz", - "integrity": "sha512-2oIUMGn00FdUiqz6epiiJr7xcFyNYj3rDcfmnzfkBnHyBQ3cBQUs4mmyGsOb7TTLb9kxk7dBcmEmqhDKkBoDyA==", - "dev": true, - "peer": true - }, - "node_modules/jest-worker": { - "version": "27.5.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node": "*", - "merge-stream": "^2.0.0", - "supports-color": "^8.0.0" - }, - "engines": { - "node": ">= 10.13.0" - } - }, - "node_modules/jest-worker/node_modules/supports-color": { - "version": "8.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/supports-color?sponsor=1" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/jsesc": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "bin": { - "jsesc": "bin/jsesc" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/json-parse-even-better-errors": { - "version": "3.0.2", - "dev": true, - "license": "MIT", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/json-schema-traverse": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/json5": { - "version": "2.2.3", - "dev": true, - "license": "MIT", - "bin": { - "json5": "lib/cli.js" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/jsonc-parser": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.1.tgz", - "integrity": "sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==", - "dev": true - }, - "node_modules/jsonfile": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "optionalDependencies": { - "graceful-fs": "^4.1.6" - } - }, - "node_modules/jsonparse": { - "version": "1.3.1", - "dev": true, - "engines": [ - "node >= 0.2.0" - ], - "license": "MIT" - }, - "node_modules/karma": { - "version": "6.4.4", - "resolved": "https://registry.npmjs.org/karma/-/karma-6.4.4.tgz", - "integrity": "sha512-LrtUxbdvt1gOpo3gxG+VAJlJAEMhbWlM4YrFQgql98FwF7+K8K12LYO4hnDdUkNjeztYrOXEMqgTajSWgmtI/w==", - "dev": true, - "peer": true, - "dependencies": { - "@colors/colors": "1.5.0", - "body-parser": "^1.19.0", - "braces": "^3.0.2", - "chokidar": "^3.5.1", - "connect": "^3.7.0", - "di": "^0.0.1", - "dom-serialize": "^2.2.1", - "glob": "^7.1.7", - "graceful-fs": "^4.2.6", - "http-proxy": "^1.18.1", - "isbinaryfile": "^4.0.8", - "lodash": "^4.17.21", - "log4js": "^6.4.1", - "mime": "^2.5.2", - "minimatch": "^3.0.4", - "mkdirp": "^0.5.5", - "qjobs": "^1.2.0", - "range-parser": "^1.2.1", - "rimraf": "^3.0.2", - "socket.io": "^4.7.2", - "source-map": "^0.6.1", - "tmp": "^0.2.1", - "ua-parser-js": "^0.7.30", - "yargs": "^16.1.1" - }, - "bin": { - "karma": "bin/karma" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/karma-chrome-launcher": { - "version": "3.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "which": "^1.2.1" - } - }, - "node_modules/karma-coverage": { - "version": "2.2.1", - "dev": true, - "license": "MIT", - "dependencies": { - "istanbul-lib-coverage": "^3.2.0", - "istanbul-lib-instrument": "^5.1.0", - "istanbul-lib-report": "^3.0.0", - "istanbul-lib-source-maps": "^4.0.1", - "istanbul-reports": "^3.0.5", - "minimatch": "^3.0.4" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/karma-coverage/node_modules/istanbul-reports": { - "version": "3.2.0", - "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz", - "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", - "dev": true, - "dependencies": { - "html-escaper": "^2.0.0", - "istanbul-lib-report": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/karma-jasmine": { - "version": "5.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "jasmine-core": "^4.1.0" - }, - "engines": { - "node": ">=12" - }, - "peerDependencies": { - "karma": "^6.0.0" - } - }, - "node_modules/karma-jasmine-html-reporter": { - "version": "2.1.0", - "dev": true, - "license": "MIT", - "peerDependencies": { - "jasmine-core": "^4.0.0 || ^5.0.0", - "karma": "^6.0.0", - "karma-jasmine": "^5.0.0" - } - }, - "node_modules/karma-jasmine/node_modules/jasmine-core": { - "version": "4.6.1", - "resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-4.6.1.tgz", - "integrity": "sha512-VYz/BjjmC3klLJlLwA4Kw8ytk0zDSmbbDLNs794VnWmkcCB7I9aAL/D48VNQtmITyPvea2C3jdUMfc3kAoy0PQ==", - "dev": true - }, - "node_modules/karma-source-map-support": { - "version": "1.4.0", - "dev": true, - "license": "MIT", - "dependencies": { - "source-map-support": "^0.5.5" - } - }, - "node_modules/karma/node_modules/cliui": { - "version": "7.0.4", - "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", - "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", - "dev": true, - "dependencies": { - "string-width": "^4.2.0", - "strip-ansi": "^6.0.0", - "wrap-ansi": "^7.0.0" - } - }, - "node_modules/karma/node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", - "dev": true - }, - "node_modules/karma/node_modules/log4js": { - "version": "6.9.1", - "resolved": "https://registry.npmjs.org/log4js/-/log4js-6.9.1.tgz", - "integrity": "sha512-1somDdy9sChrr9/f4UlzhdaGfDR2c/SaD2a4T7qEkG4jTS57/B3qmnjLYePwQ8cqWnUHZI0iAKxMBpCZICiZ2g==", - "dev": true, - "dependencies": { - "date-format": "^4.0.14", - "debug": "^4.3.4", - "flatted": "^3.2.7", - "rfdc": "^1.3.0", - "streamroller": "^3.1.5" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/karma/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/karma/node_modules/tmp": { - "version": "0.2.5", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", - "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", - "dev": true, - "engines": { - "node": ">=14.14" - } - }, - "node_modules/karma/node_modules/wrap-ansi": { - "version": "7.0.0", - "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", - "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", - "dev": true, - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/karma/node_modules/yargs": { - "version": "16.2.0", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", - "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", - "dev": true, - "dependencies": { - "cliui": "^7.0.2", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.0", - "y18n": "^5.0.5", - "yargs-parser": "^20.2.2" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/karma/node_modules/yargs-parser": { - "version": "20.2.9", - "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", - "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", - "dev": true, - "engines": { - "node": ">=10" - } - }, - "node_modules/kind-of": { - "version": "6.0.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/klona": { - "version": "2.0.6", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/launch-editor": { - "version": "2.11.1", - "dev": true, - "license": "MIT", - "dependencies": { - "picocolors": "^1.1.1", - "shell-quote": "^1.8.3" - } - }, - "node_modules/less": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/less/-/less-4.2.0.tgz", - "integrity": "sha512-P3b3HJDBtSzsXUl0im2L7gTO5Ubg8mEN6G8qoTS77iXxXX4Hvu4Qj540PZDvQ8V6DmX6iXo98k7Md0Cm1PrLaA==", - "dev": true, - "peer": true, - "dependencies": { - "copy-anything": "^2.0.1", - "parse-node-version": "^1.0.1", - "tslib": "^2.3.0" - }, - "bin": { - "lessc": "bin/lessc" - }, - "engines": { - "node": ">=6" - }, - "optionalDependencies": { - "errno": "^0.1.1", - "graceful-fs": "^4.1.2", - "image-size": "~0.5.0", - "make-dir": "^2.1.0", - "mime": "^1.4.1", - "needle": "^3.1.0", - "source-map": "~0.6.0" - } - }, - "node_modules/less-loader": { - "version": "11.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "klona": "^2.0.4" - }, - "engines": { - "node": ">= 14.15.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "less": "^3.5.0 || ^4.0.0", - "webpack": "^5.0.0" - } - }, - "node_modules/less/node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dev": true, - "optional": true, - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/less/node_modules/make-dir": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", - "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", - "dev": true, - "optional": true, - "dependencies": { - "pify": "^4.0.1", - "semver": "^5.6.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/less/node_modules/mime": { - "version": "1.6.0", - "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", - "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", - "dev": true, - "optional": true, - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/less/node_modules/needle": { - "version": "3.3.1", - "resolved": "https://registry.npmjs.org/needle/-/needle-3.3.1.tgz", - "integrity": "sha512-6k0YULvhpw+RoLNiQCRKOl09Rv1dPLr8hHnVjHqdolKwDrdNyk+Hmrthi4lIGPPz3r39dLx0hsF5s40sZ3Us4Q==", - "dev": true, - "optional": true, - "dependencies": { - "iconv-lite": "^0.6.3", - "sax": "^1.2.4" - }, - "bin": { - "needle": "bin/needle" - }, - "engines": { - "node": ">= 4.4.x" - } - }, - "node_modules/less/node_modules/semver": { - "version": "5.7.2", - "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", - "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", - "dev": true, - "optional": true, - "bin": { - "semver": "bin/semver" - } - }, - "node_modules/less/node_modules/source-map": { - "version": "0.6.1", - "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", - "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", - "dev": true, - "optional": true, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/license-webpack-plugin": { - "version": "4.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "webpack-sources": "^3.0.0" - }, - "peerDependenciesMeta": { - "webpack": { - "optional": true - }, - "webpack-sources": { - "optional": true - } - } - }, - "node_modules/license-webpack-plugin/node_modules/webpack-sources": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.3.3.tgz", - "integrity": "sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==", - "dev": true, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/lines-and-columns": { - "version": "1.2.4", - "dev": true, - "license": "MIT" - }, - "node_modules/loader-runner": { - "version": "4.3.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6.11.5" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/loader-utils": { - "version": "3.2.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 12.13.0" - } - }, - "node_modules/locate-path": { - "version": "5.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^4.1.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/lodash.debounce": { - "version": "4.0.8", - "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", - "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", - "dev": true - }, - "node_modules/log-symbols": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "chalk": "^4.1.0", - "is-unicode-supported": "^0.1.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/lru-cache": { - "version": "5.1.1", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^3.0.2" - } - }, - "node_modules/magic-string": { - "version": "0.30.8", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/sourcemap-codec": "^1.4.15" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/make-dir": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "semver": "^7.5.3" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/make-fetch-happen": { - "version": "13.0.1", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", - "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-flush": "^1.0.5", - "minipass-pipeline": "^1.2.4", - "negotiator": "^0.6.3", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1", - "ssri": "^10.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/make-fetch-happen/node_modules/proc-log": { - "version": "4.2.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/math-intrinsics": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/media-typer": { - "version": "0.3.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/merge-descriptors": { - "version": "1.0.3", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/merge-stream": { - "version": "2.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/merge2": { - "version": "1.4.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 8" - } - }, - "node_modules/methods": { - "version": "1.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/micromatch": { - "version": "4.0.8", - "dev": true, - "license": "MIT", - "dependencies": { - "braces": "^3.0.3", - "picomatch": "^2.3.1" - }, - "engines": { - "node": ">=8.6" - } - }, - "node_modules/micromatch/node_modules/picomatch": { - "version": "2.3.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/mime": { - "version": "2.6.0", - "dev": true, - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4.0.0" - } - }, - "node_modules/mime-db": { - "version": "1.52.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "dev": true, - "license": "MIT", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/mini-css-extract-plugin": { - "version": "2.8.1", - "dev": true, - "license": "MIT", - "dependencies": { - "schema-utils": "^4.0.0", - "tapable": "^2.2.1" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - } - }, - "node_modules/minimalistic-assert": { - "version": "1.0.1", - "dev": true, - "license": "ISC" - }, - "node_modules/minimatch": { - "version": "3.1.2", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^1.1.7" - }, - "engines": { - "node": "*" - } - }, - "node_modules/minimist": { - "version": "1.2.8", - "dev": true, - "license": "MIT", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/minipass": { - "version": "7.1.2", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/minipass-collect": { - "version": "2.0.1", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - } - }, - "node_modules/minipass-fetch": { - "version": "3.0.5", - "dev": true, - "license": "MIT", - "dependencies": { - "minipass": "^7.0.3", - "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - }, - "optionalDependencies": { - "encoding": "^0.1.13" - } - }, - "node_modules/minipass-flush": { - "version": "1.0.5", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minipass-flush/node_modules/minipass": { - "version": "3.3.6", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-flush/node_modules/yallist": { - "version": "4.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/minipass-json-stream": { - "version": "1.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "jsonparse": "^1.3.1", - "minipass": "^3.0.0" - } - }, - "node_modules/minipass-json-stream/node_modules/minipass": { - "version": "3.3.6", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-json-stream/node_modules/yallist": { - "version": "4.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/minipass-pipeline": { - "version": "1.2.4", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-pipeline/node_modules/minipass": { - "version": "3.3.6", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-pipeline/node_modules/yallist": { - "version": "4.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/minipass-sized": { - "version": "1.0.3", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-sized/node_modules/minipass": { - "version": "3.3.6", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minipass-sized/node_modules/yallist": { - "version": "4.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/minizlib": { - "version": "2.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "minipass": "^3.0.0", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/minizlib/node_modules/minipass": { - "version": "3.3.6", - "dev": true, - "license": "ISC", - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/minizlib/node_modules/yallist": { - "version": "4.0.0", - "dev": true, - "license": "ISC" - }, - "node_modules/mkdirp": { - "version": "0.5.6", - "dev": true, - "license": "MIT", - "dependencies": { - "minimist": "^1.2.6" - }, - "bin": { - "mkdirp": "bin/cmd.js" - } - }, - "node_modules/mrmime": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10" - } - }, - "node_modules/ms": { - "version": "2.1.3", - "dev": true, - "license": "MIT" - }, - "node_modules/multicast-dns": { - "version": "7.2.5", - "dev": true, - "license": "MIT", - "dependencies": { - "dns-packet": "^5.2.2", - "thunky": "^1.0.2" - }, - "bin": { - "multicast-dns": "cli.js" - } - }, - "node_modules/mute-stream": { - "version": "1.0.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/negotiator": { - "version": "0.6.4", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/node-addon-api": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz", - "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==", - "dev": true, - "optional": true - }, - "node_modules/node-gyp-build": { - "version": "4.8.4", - "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.4.tgz", - "integrity": "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ==", - "dev": true, - "optional": true, - "bin": { - "node-gyp-build": "bin.js", - "node-gyp-build-optional": "optional.js", - "node-gyp-build-test": "build-test.js" - } - }, - "node_modules/node-releases": { - "version": "2.0.25", - "dev": true, - "license": "MIT" - }, - "node_modules/nopt": { - "version": "7.2.1", - "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz", - "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==", - "dev": true, - "dependencies": { - "abbrev": "^2.0.0" - }, - "bin": { - "nopt": "bin/nopt.js" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/normalize-package-data": { - "version": "6.0.2", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "hosted-git-info": "^7.0.0", - "semver": "^7.3.5", - "validate-npm-package-license": "^3.0.4" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/normalize-path": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/normalize-range": { - "version": "0.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/npm-bundled": { - "version": "3.0.1", - "dev": true, - "license": "ISC", - "dependencies": { - "npm-normalize-package-bin": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-install-checks": { - "version": "6.3.0", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "semver": "^7.1.1" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-normalize-package-bin": { - "version": "3.0.1", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-package-arg": { - "version": "11.0.1", - "dev": true, - "license": "ISC", - "dependencies": { - "hosted-git-info": "^7.0.0", - "proc-log": "^3.0.0", - "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/npm-packlist": { - "version": "8.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "ignore-walk": "^6.0.4" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-pick-manifest": { - "version": "9.0.0", - "dev": true, - "license": "ISC", - "dependencies": { - "npm-install-checks": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "npm-package-arg": "^11.0.0", - "semver": "^7.3.5" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/npm-registry-fetch": { - "version": "16.2.1", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/redact": "^1.1.0", - "make-fetch-happen": "^13.0.0", - "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minipass-json-stream": "^1.0.1", - "minizlib": "^2.1.2", - "npm-package-arg": "^11.0.0", - "proc-log": "^4.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/npm-registry-fetch/node_modules/proc-log": { - "version": "4.2.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/npm-run-path": { - "version": "4.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "path-key": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/nth-check": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", - "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", - "dev": true, - "dependencies": { - "boolbase": "^1.0.0" - }, - "funding": { - "url": "https://github.com/fb55/nth-check?sponsor=1" - } - }, - "node_modules/object-assign": { - "version": "4.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/obuf": { - "version": "1.1.2", - "dev": true, - "license": "MIT" - }, - "node_modules/on-finished": { - "version": "2.4.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ee-first": "1.1.1" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/on-headers": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/once": { - "version": "1.4.0", - "dev": true, - "license": "ISC", - "dependencies": { - "wrappy": "1" - } - }, - "node_modules/onetime": { - "version": "5.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/open": { - "version": "8.4.2", - "dev": true, - "license": "MIT", - "dependencies": { - "define-lazy-prop": "^2.0.0", - "is-docker": "^2.1.1", - "is-wsl": "^2.2.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ora": { - "version": "5.4.1", - "dev": true, - "license": "MIT", - "dependencies": { - "bl": "^4.1.0", - "chalk": "^4.1.0", - "cli-cursor": "^3.1.0", - "cli-spinners": "^2.5.0", - "is-interactive": "^1.0.0", - "is-unicode-supported": "^0.1.0", - "log-symbols": "^4.1.0", - "strip-ansi": "^6.0.0", - "wcwidth": "^1.0.1" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/os-tmpdir": { - "version": "1.0.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/p-limit": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-try": "^2.0.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-locate": { - "version": "4.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^2.2.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/p-map": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "aggregate-error": "^3.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/p-retry": { - "version": "4.6.2", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/retry": "0.12.0", - "retry": "^0.13.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/p-retry/node_modules/retry": { - "version": "0.13.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/p-try": { - "version": "2.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/package-json-from-dist": { - "version": "1.0.1", - "dev": true, - "license": "BlueOak-1.0.0" - }, - "node_modules/pacote": { - "version": "17.0.6", - "dev": true, - "license": "ISC", - "dependencies": { - "@npmcli/git": "^5.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/promise-spawn": "^7.0.0", - "@npmcli/run-script": "^7.0.0", - "cacache": "^18.0.0", - "fs-minipass": "^3.0.0", - "minipass": "^7.0.2", - "npm-package-arg": "^11.0.0", - "npm-packlist": "^8.0.0", - "npm-pick-manifest": "^9.0.0", - "npm-registry-fetch": "^16.0.0", - "proc-log": "^3.0.0", - "promise-retry": "^2.0.1", - "read-package-json": "^7.0.0", - "read-package-json-fast": "^3.0.0", - "sigstore": "^2.2.0", - "ssri": "^10.0.0", - "tar": "^6.1.11" - }, - "bin": { - "pacote": "lib/bin.js" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/pacote/node_modules/mkdirp": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", - "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", - "dev": true, - "bin": { - "mkdirp": "bin/cmd.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/pacote/node_modules/tar": { - "version": "6.2.1", - "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", - "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", - "dev": true, - "dependencies": { - "chownr": "^2.0.0", - "fs-minipass": "^2.0.0", - "minipass": "^5.0.0", - "minizlib": "^2.1.1", - "mkdirp": "^1.0.3", - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/pacote/node_modules/tar/node_modules/fs-minipass": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", - "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", - "dev": true, - "dependencies": { - "minipass": "^3.0.0" - }, - "engines": { - "node": ">= 8" - } - }, - "node_modules/pacote/node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { - "version": "3.3.6", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", - "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/pacote/node_modules/tar/node_modules/minipass": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", - "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", - "dev": true, - "engines": { - "node": ">=8" - } - }, - "node_modules/pacote/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, - "node_modules/parent-module": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "callsites": "^3.0.0" - }, - "engines": { - "node": ">=6" - } - }, - "node_modules/parse-json": { - "version": "5.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "@babel/code-frame": "^7.0.0", - "error-ex": "^1.3.1", - "json-parse-even-better-errors": "^2.3.0", - "lines-and-columns": "^1.1.6" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/parse-json/node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "dev": true, - "license": "MIT" - }, - "node_modules/parse-node-version": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/parse5-html-rewriting-stream": { - "version": "7.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "entities": "^4.3.0", - "parse5": "^7.0.0", - "parse5-sax-parser": "^7.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parse5-html-rewriting-stream/node_modules/entities": { - "version": "4.5.0", - "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", - "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", - "dev": true, - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/parse5-html-rewriting-stream/node_modules/parse5": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", - "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", - "dev": true, - "dependencies": { - "entities": "^6.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parse5-html-rewriting-stream/node_modules/parse5/node_modules/entities": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", - "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", - "dev": true, - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/parse5-sax-parser": { - "version": "7.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "parse5": "^7.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parse5-sax-parser/node_modules/entities": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", - "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", - "dev": true, - "engines": { - "node": ">=0.12" - }, - "funding": { - "url": "https://github.com/fb55/entities?sponsor=1" - } - }, - "node_modules/parse5-sax-parser/node_modules/parse5": { - "version": "7.3.0", - "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", - "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", - "dev": true, - "dependencies": { - "entities": "^6.0.0" - }, - "funding": { - "url": "https://github.com/inikulin/parse5?sponsor=1" - } - }, - "node_modules/parseurl": { - "version": "1.3.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/path-exists": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/path-key": { - "version": "3.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/path-parse": { - "version": "1.0.7", - "dev": true, - "license": "MIT" - }, - "node_modules/path-scurry": { - "version": "1.11.1", - "dev": true, - "license": "BlueOak-1.0.0", - "dependencies": { - "lru-cache": "^10.2.0", - "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" - }, - "engines": { - "node": ">=16 || 14 >=14.18" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/path-scurry/node_modules/lru-cache": { - "version": "10.4.3", - "dev": true, - "license": "ISC" - }, - "node_modules/path-to-regexp": { - "version": "0.1.12", - "dev": true, - "license": "MIT" - }, - "node_modules/path-type": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/picocolors": { - "version": "1.1.1", - "dev": true, - "license": "ISC" - }, - "node_modules/picomatch": { - "version": "4.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/pify": { - "version": "4.0.1", - "dev": true, - "license": "MIT", - "optional": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/piscina": { - "version": "4.4.0", - "dev": true, - "license": "MIT", - "optionalDependencies": { - "nice-napi": "^1.0.2" - } - }, - "node_modules/piscina/node_modules/nice-napi": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/nice-napi/-/nice-napi-1.0.2.tgz", - "integrity": "sha512-px/KnJAJZf5RuBGcfD+Sp2pAKq0ytz8j+1NehvgIGFkvtvFrDM3T8E4x/JJODXK9WZow8RRGrbA9QQ3hs+pDhA==", - "dev": true, - "hasInstallScript": true, - "optional": true, - "os": [ - "!win32" - ], - "dependencies": { - "node-addon-api": "^3.0.0", - "node-gyp-build": "^4.2.2" - } - }, - "node_modules/pkg-dir": { - "version": "7.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "find-up": "^6.3.0" - }, - "engines": { - "node": ">=14.16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pkg-dir/node_modules/find-up": { - "version": "6.3.0", - "dev": true, - "license": "MIT", - "dependencies": { - "locate-path": "^7.1.0", - "path-exists": "^5.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pkg-dir/node_modules/locate-path": { - "version": "7.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-locate": "^6.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pkg-dir/node_modules/p-limit": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "yocto-queue": "^1.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pkg-dir/node_modules/p-locate": { - "version": "6.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "p-limit": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/pkg-dir/node_modules/path-exists": { - "version": "5.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - } - }, - "node_modules/playwright": { - "version": "1.56.1", - "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz", - "integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==", - "dev": true, - "dependencies": { - "playwright-core": "1.56.1" - }, - "bin": { - "playwright": "cli.js" - }, - "engines": { - "node": ">=18" - }, - "optionalDependencies": { - "fsevents": "2.3.2" - } - }, - "node_modules/playwright-core": { - "version": "1.56.1", - "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz", - "integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==", - "dev": true, - "bin": { - "playwright-core": "cli.js" - }, - "engines": { - "node": ">=18" - } - }, - "node_modules/postcss": { - "version": "8.4.35", - "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz", - "integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/postcss/" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/postcss" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "peer": true, - "dependencies": { - "nanoid": "^3.3.7", - "picocolors": "^1.0.0", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": "^10 || ^12 || >=14" - } - }, - "node_modules/postcss-loader": { - "version": "8.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "cosmiconfig": "^9.0.0", - "jiti": "^1.20.0", - "semver": "^7.5.4" - }, - "engines": { - "node": ">= 18.12.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "@rspack/core": "0.x || 1.x", - "postcss": "^7.0.0 || ^8.0.1", - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "@rspack/core": { - "optional": true - }, - "webpack": { - "optional": true - } - } - }, - "node_modules/postcss-loader/node_modules/jiti": { - "version": "1.21.7", - "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", - "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", - "dev": true, - "bin": { - "jiti": "bin/jiti.js" - } - }, - "node_modules/postcss-media-query-parser": { - "version": "0.2.3", - "dev": true, - "license": "MIT" - }, - "node_modules/postcss-modules-extract-imports": { - "version": "3.1.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-modules-local-by-default": { - "version": "4.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "icss-utils": "^5.0.0", - "postcss-selector-parser": "^7.0.0", - "postcss-value-parser": "^4.1.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-modules-local-by-default/node_modules/postcss-selector-parser": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz", - "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==", - "dev": true, - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-modules-scope": { - "version": "3.2.1", - "dev": true, - "license": "ISC", - "dependencies": { - "postcss-selector-parser": "^7.0.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-modules-scope/node_modules/postcss-selector-parser": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz", - "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==", - "dev": true, - "dependencies": { - "cssesc": "^3.0.0", - "util-deprecate": "^1.0.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/postcss-modules-values": { - "version": "4.0.0", - "dev": true, - "license": "ISC", - "dependencies": { - "icss-utils": "^5.0.0" - }, - "engines": { - "node": "^10 || ^12 || >= 14" - }, - "peerDependencies": { - "postcss": "^8.1.0" - } - }, - "node_modules/postcss-value-parser": { - "version": "4.2.0", - "dev": true, - "license": "MIT" - }, - "node_modules/postcss/node_modules/nanoid": { - "version": "3.3.11", - "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", - "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "bin": { - "nanoid": "bin/nanoid.cjs" - }, - "engines": { - "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" - } - }, - "node_modules/proc-log": { - "version": "3.0.0", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/process-nextick-args": { - "version": "2.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/promise-inflight": { - "version": "1.0.1", - "dev": true, - "license": "ISC" - }, - "node_modules/promise-retry": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "err-code": "^2.0.2", - "retry": "^0.12.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/proxy-addr": { - "version": "2.0.7", - "dev": true, - "license": "MIT", - "dependencies": { - "forwarded": "0.2.0", - "ipaddr.js": "1.9.1" - }, - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/proxy-addr/node_modules/ipaddr.js": { - "version": "1.9.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.10" - } - }, - "node_modules/prr": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "optional": true - }, - "node_modules/punycode": { - "version": "1.4.1", - "dev": true, - "license": "MIT" - }, - "node_modules/qjobs": { - "version": "1.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.9" - } - }, - "node_modules/qs": { - "version": "6.13.0", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "side-channel": "^1.0.6" - }, - "engines": { - "node": ">=0.6" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/queue-microtask": { - "version": "1.2.3", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/randombytes": { - "version": "2.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "safe-buffer": "^5.1.0" - } - }, - "node_modules/range-parser": { - "version": "1.2.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/raw-body": { - "version": "2.5.2", - "dev": true, - "license": "MIT", - "dependencies": { - "bytes": "3.1.2", - "http-errors": "2.0.0", - "iconv-lite": "0.4.24", - "unpipe": "1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/read-package-json": { - "version": "7.0.1", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^10.2.2", - "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/read-package-json-fast": { - "version": "3.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "json-parse-even-better-errors": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/read-package-json/node_modules/brace-expansion": { - "version": "2.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "balanced-match": "^1.0.0" - } - }, - "node_modules/read-package-json/node_modules/glob": { - "version": "10.4.5", - "dev": true, - "license": "ISC", - "dependencies": { - "foreground-child": "^3.1.0", - "jackspeak": "^3.1.2", - "minimatch": "^9.0.4", - "minipass": "^7.1.2", - "package-json-from-dist": "^1.0.0", - "path-scurry": "^1.11.1" - }, - "bin": { - "glob": "dist/esm/bin.mjs" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/read-package-json/node_modules/minimatch": { - "version": "9.0.5", - "dev": true, - "license": "ISC", - "dependencies": { - "brace-expansion": "^2.0.1" - }, - "engines": { - "node": ">=16 || 14 >=14.17" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/readdirp": { - "version": "3.6.0", - "dev": true, - "license": "MIT", - "dependencies": { - "picomatch": "^2.2.1" - }, - "engines": { - "node": ">=8.10.0" - } - }, - "node_modules/readdirp/node_modules/picomatch": { - "version": "2.3.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8.6" - }, - "funding": { - "url": "https://github.com/sponsors/jonschlinkert" - } - }, - "node_modules/reflect-metadata": { - "version": "0.2.2", - "dev": true, - "license": "Apache-2.0" - }, - "node_modules/regenerate": { - "version": "1.4.2", - "dev": true, - "license": "MIT" - }, - "node_modules/regenerator-runtime": { - "version": "0.14.1", - "dev": true, - "license": "MIT" - }, - "node_modules/regex-parser": { - "version": "2.3.1", - "dev": true, - "license": "MIT" - }, - "node_modules/regexpu-core": { - "version": "6.4.0", - "dev": true, - "license": "MIT", - "dependencies": { - "regenerate": "^1.4.2", - "regenerate-unicode-properties": "^10.2.2", - "regjsgen": "^0.8.0", - "regjsparser": "^0.13.0", - "unicode-match-property-ecmascript": "^2.0.0", - "unicode-match-property-value-ecmascript": "^2.2.1" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/regexpu-core/node_modules/regenerate-unicode-properties": { - "version": "10.2.2", - "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.2.tgz", - "integrity": "sha512-m03P+zhBeQd1RGnYxrGyDAPpWX/epKirLrp8e3qevZdVkKtnCrjjWczIbYc8+xd6vcTStVlqfycTx1KR4LOr0g==", - "dev": true, - "dependencies": { - "regenerate": "^1.4.2" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/regjsgen": { - "version": "0.8.0", - "dev": true, - "license": "MIT" - }, - "node_modules/regjsparser": { - "version": "0.13.0", - "dev": true, - "license": "BSD-2-Clause", - "dependencies": { - "jsesc": "~3.1.0" - }, - "bin": { - "regjsparser": "bin/parser" - } - }, - "node_modules/require-directory": { - "version": "2.1.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/require-from-string": { - "version": "2.0.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/requires-port": { - "version": "1.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/resolve": { - "version": "1.22.8", - "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", - "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", - "dev": true, - "dependencies": { - "is-core-module": "^2.13.0", - "path-parse": "^1.0.7", - "supports-preserve-symlinks-flag": "^1.0.0" - }, - "bin": { - "resolve": "bin/resolve" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/resolve-from": { - "version": "5.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/resolve-url-loader": { - "version": "5.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "adjust-sourcemap-loader": "^4.0.0", - "convert-source-map": "^1.7.0", - "loader-utils": "^2.0.0", - "postcss": "^8.2.14", - "source-map": "0.6.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/resolve-url-loader/node_modules/loader-utils": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", - "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", - "dev": true, - "dependencies": { - "big.js": "^5.2.2", - "emojis-list": "^3.0.0", - "json5": "^2.1.2" - }, - "engines": { - "node": ">=8.9.0" - } - }, - "node_modules/resolve-url-loader/node_modules/source-map": { - "version": "0.6.1", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/restore-cursor": { - "version": "3.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/retry": { - "version": "0.12.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4" - } - }, - "node_modules/reusify": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "engines": { - "iojs": ">=1.0.0", - "node": ">=0.10.0" - } - }, - "node_modules/rfdc": { - "version": "1.4.1", - "dev": true, - "license": "MIT" - }, - "node_modules/rimraf": { - "version": "3.0.2", - "dev": true, - "license": "ISC", - "dependencies": { - "glob": "^7.1.3" - }, - "bin": { - "rimraf": "bin.js" - }, - "funding": { - "url": "https://github.com/sponsors/isaacs" - } - }, - "node_modules/run-async": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.12.0" - } - }, - "node_modules/run-parallel": { - "version": "1.2.0", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT", - "dependencies": { - "queue-microtask": "^1.2.2" - } - }, - "node_modules/rxjs": { - "version": "7.8.2", - "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", - "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", - "peer": true, - "dependencies": { - "tslib": "^2.1.0" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "dev": true, - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "license": "MIT" - }, - "node_modules/safe-regex-test": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "is-regex": "^1.2.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/safer-buffer": { - "version": "2.1.2", - "dev": true, - "license": "MIT" - }, - "node_modules/sass": { - "version": "1.71.1", - "resolved": "https://registry.npmjs.org/sass/-/sass-1.71.1.tgz", - "integrity": "sha512-wovtnV2PxzteLlfNzbgm1tFXPLoZILYAMJtvoXXkD7/+1uP41eKkIt1ypWq5/q2uT94qHjXehEYfmjKOvjL9sg==", - "dev": true, - "peer": true, - "dependencies": { - "chokidar": ">=3.0.0 <4.0.0", - "immutable": "^4.0.0", - "source-map-js": ">=0.6.2 <2.0.0" - }, - "bin": { - "sass": "sass.js" - }, - "engines": { - "node": ">=14.0.0" - } - }, - "node_modules/sass-loader": { - "version": "14.1.1", - "dev": true, - "license": "MIT", - "dependencies": { - "neo-async": "^2.6.2" - }, - "engines": { - "node": ">= 18.12.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "@rspack/core": "0.x || 1.x", - "node-sass": "^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0", - "sass": "^1.3.0", - "sass-embedded": "*", - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "@rspack/core": { - "optional": true - }, - "node-sass": { - "optional": true - }, - "sass": { - "optional": true - }, - "sass-embedded": { - "optional": true - }, - "webpack": { - "optional": true - } - } - }, - "node_modules/sass-loader/node_modules/neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", - "dev": true - }, - "node_modules/sass/node_modules/immutable": { - "version": "4.3.7", - "resolved": "https://registry.npmjs.org/immutable/-/immutable-4.3.7.tgz", - "integrity": "sha512-1hqclzwYwjRDFLjcFxOM5AYkkG0rpFPpr1RLPMEuGczoS7YA8gLhy8SWXYRAA/XwfEHpfo3cw5JGioS32fnMRw==", - "dev": true - }, - "node_modules/sax": { - "version": "1.4.1", - "dev": true, - "license": "ISC", - "optional": true - }, - "node_modules/schema-utils": { - "version": "4.3.3", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/json-schema": "^7.0.9", - "ajv": "^8.9.0", - "ajv-formats": "^2.1.1", - "ajv-keywords": "^5.1.0" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/schema-utils/node_modules/ajv": { - "version": "8.17.1", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", - "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.3", - "fast-uri": "^3.0.1", - "json-schema-traverse": "^1.0.0", - "require-from-string": "^2.0.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/schema-utils/node_modules/ajv-keywords": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", - "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", - "dev": true, - "dependencies": { - "fast-deep-equal": "^3.1.3" - }, - "peerDependencies": { - "ajv": "^8.8.2" - } - }, - "node_modules/select-hose": { - "version": "2.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/selfsigned": { - "version": "2.4.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@types/node-forge": "^1.3.0", - "node-forge": "^1" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/selfsigned/node_modules/node-forge": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", - "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", - "dev": true, - "engines": { - "node": ">= 6.13.0" - } - }, - "node_modules/semver": { - "version": "7.6.0", - "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", - "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", - "dev": true, - "dependencies": { - "lru-cache": "^6.0.0" - }, - "bin": { - "semver": "bin/semver.js" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/semver/node_modules/lru-cache": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", - "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", - "dev": true, - "dependencies": { - "yallist": "^4.0.0" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/semver/node_modules/yallist": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", - "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", - "dev": true - }, - "node_modules/send": { - "version": "0.19.0", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "2.6.9", - "depd": "2.0.0", - "destroy": "1.2.0", - "encodeurl": "~1.0.2", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "mime": "1.6.0", - "ms": "2.1.3", - "on-finished": "2.4.1", - "range-parser": "~1.2.1", - "statuses": "2.0.1" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/send/node_modules/debug": { - "version": "2.6.9", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/send/node_modules/debug/node_modules/ms": { - "version": "2.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/send/node_modules/mime": { - "version": "1.6.0", - "dev": true, - "license": "MIT", - "bin": { - "mime": "cli.js" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/send/node_modules/statuses": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/serialize-javascript": { - "version": "6.0.2", - "dev": true, - "license": "BSD-3-Clause", - "dependencies": { - "randombytes": "^2.1.0" - } - }, - "node_modules/serve-index": { - "version": "1.9.1", - "dev": true, - "license": "MIT", - "dependencies": { - "accepts": "~1.3.4", - "batch": "0.6.1", - "debug": "2.6.9", - "escape-html": "~1.0.3", - "http-errors": "~1.6.2", - "mime-types": "~2.1.17", - "parseurl": "~1.3.2" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/serve-index/node_modules/debug": { - "version": "2.6.9", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/serve-index/node_modules/depd": { - "version": "1.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-index/node_modules/http-errors": { - "version": "1.6.3", - "dev": true, - "license": "MIT", - "dependencies": { - "depd": "~1.1.2", - "inherits": "2.0.3", - "setprototypeof": "1.1.0", - "statuses": ">= 1.4.0 < 2" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/serve-index/node_modules/inherits": { - "version": "2.0.3", - "dev": true, - "license": "ISC" - }, - "node_modules/serve-index/node_modules/ms": { - "version": "2.0.0", - "dev": true, - "license": "MIT" - }, - "node_modules/serve-index/node_modules/setprototypeof": { - "version": "1.1.0", - "dev": true, - "license": "ISC" - }, - "node_modules/serve-static": { - "version": "1.16.2", - "dev": true, - "license": "MIT", - "dependencies": { - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "parseurl": "~1.3.3", - "send": "0.19.0" - }, - "engines": { - "node": ">= 0.8.0" - } - }, - "node_modules/serve-static/node_modules/encodeurl": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/set-function-length": { - "version": "1.2.2", - "dev": true, - "license": "MIT", - "dependencies": { - "define-data-property": "^1.1.4", - "es-errors": "^1.3.0", - "function-bind": "^1.1.2", - "get-intrinsic": "^1.2.4", - "gopd": "^1.0.1", - "has-property-descriptors": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - } - }, - "node_modules/setprototypeof": { - "version": "1.2.0", - "dev": true, - "license": "ISC" - }, - "node_modules/shallow-clone": { - "version": "3.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "kind-of": "^6.0.2" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-command": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "shebang-regex": "^3.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/shebang-regex": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=8" - } - }, - "node_modules/shell-quote": { - "version": "1.8.3", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel": { - "version": "1.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3", - "side-channel-list": "^1.0.0", - "side-channel-map": "^1.0.1", - "side-channel-weakmap": "^1.0.2" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-list": { - "version": "1.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "es-errors": "^1.3.0", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-list/node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-map": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-map/node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-weakmap": { - "version": "1.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "call-bound": "^1.0.2", - "es-errors": "^1.3.0", - "get-intrinsic": "^1.2.5", - "object-inspect": "^1.13.3", - "side-channel-map": "^1.0.1" - }, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel-weakmap/node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/side-channel/node_modules/object-inspect": { - "version": "1.13.4", - "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", - "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", - "dev": true, - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "dev": true, - "license": "ISC" - }, - "node_modules/sigstore": { - "version": "2.3.1", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "@sigstore/bundle": "^2.3.2", - "@sigstore/core": "^1.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "@sigstore/sign": "^2.3.2", - "@sigstore/tuf": "^2.3.4", - "@sigstore/verify": "^1.2.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/sigstore/node_modules/@sigstore/core": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz", - "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==", - "dev": true, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/sigstore/node_modules/@sigstore/protobuf-specs": { - "version": "0.3.3", - "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz", - "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==", - "dev": true, - "engines": { - "node": "^18.17.0 || >=20.5.0" - } - }, - "node_modules/sigstore/node_modules/@sigstore/sign": { - "version": "2.3.2", - "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz", - "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==", - "dev": true, - "dependencies": { - "@sigstore/bundle": "^2.3.2", - "@sigstore/core": "^1.0.0", - "@sigstore/protobuf-specs": "^0.3.2", - "make-fetch-happen": "^13.0.1", - "proc-log": "^4.2.0", - "promise-retry": "^2.0.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/sigstore/node_modules/@sigstore/verify": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-1.2.1.tgz", - "integrity": "sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==", - "dev": true, - "dependencies": { - "@sigstore/bundle": "^2.3.2", - "@sigstore/core": "^1.1.0", - "@sigstore/protobuf-specs": "^0.3.2" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/sigstore/node_modules/proc-log": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", - "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", - "dev": true, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/slash": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/smart-buffer": { - "version": "4.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 6.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socket.io": { - "version": "4.8.1", - "dev": true, - "license": "MIT", - "dependencies": { - "accepts": "~1.3.4", - "base64id": "~2.0.0", - "cors": "~2.8.5", - "debug": "~4.3.2", - "engine.io": "~6.6.0", - "socket.io-adapter": "~2.5.2", - "socket.io-parser": "~4.2.4" - }, - "engines": { - "node": ">=10.2.0" - } - }, - "node_modules/socket.io-adapter": { - "version": "2.5.5", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "~4.3.4", - "ws": "~8.17.1" - } - }, - "node_modules/socket.io-adapter/node_modules/debug": { - "version": "4.3.7", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/socket.io-parser": { - "version": "4.2.4", - "dev": true, - "license": "MIT", - "dependencies": { - "@socket.io/component-emitter": "~3.1.0", - "debug": "~4.3.1" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/socket.io-parser/node_modules/debug": { - "version": "4.3.7", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/socket.io/node_modules/debug": { - "version": "4.3.7", - "dev": true, - "license": "MIT", - "dependencies": { - "ms": "^2.1.3" - }, - "engines": { - "node": ">=6.0" - }, - "peerDependenciesMeta": { - "supports-color": { - "optional": true - } - } - }, - "node_modules/socket.io/node_modules/engine.io": { - "version": "6.6.4", - "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.4.tgz", - "integrity": "sha512-ZCkIjSYNDyGn0R6ewHDtXgns/Zre/NT6Agvq1/WobF7JXgFff4SeDroKiCO3fNJreU9YG429Sc81o4w5ok/W5g==", - "dev": true, - "dependencies": { - "@types/cors": "^2.8.12", - "@types/node": ">=10.0.0", - "accepts": "~1.3.4", - "base64id": "2.0.0", - "cookie": "~0.7.2", - "cors": "~2.8.5", - "debug": "~4.3.1", - "engine.io-parser": "~5.2.1", - "ws": "~8.17.1" - }, - "engines": { - "node": ">=10.2.0" - } - }, - "node_modules/sockjs": { - "version": "0.3.24", - "dev": true, - "license": "MIT", - "dependencies": { - "faye-websocket": "^0.11.3", - "uuid": "^8.3.2", - "websocket-driver": "^0.7.4" - } - }, - "node_modules/sockjs/node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, - "bin": { - "uuid": "dist/bin/uuid" - } - }, - "node_modules/socks": { - "version": "2.8.7", - "dev": true, - "license": "MIT", - "dependencies": { - "ip-address": "^10.0.1", - "smart-buffer": "^4.2.0" - }, - "engines": { - "node": ">= 10.0.0", - "npm": ">= 3.0.0" - } - }, - "node_modules/socks-proxy-agent": { - "version": "8.0.5", - "dev": true, - "license": "MIT", - "dependencies": { - "agent-base": "^7.1.2", - "debug": "^4.3.4", - "socks": "^2.8.3" - }, - "engines": { - "node": ">= 14" - } - }, - "node_modules/source-map": { - "version": "0.7.4", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">= 8" - } - }, - "node_modules/source-map-js": { - "version": "1.2.1", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-loader": { - "version": "5.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "iconv-lite": "^0.6.3", - "source-map-js": "^1.0.2" - }, - "engines": { - "node": ">= 18.12.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.72.1" - } - }, - "node_modules/source-map-loader/node_modules/iconv-lite": { - "version": "0.6.3", - "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", - "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", - "dev": true, - "dependencies": { - "safer-buffer": ">= 2.1.2 < 3.0.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/source-map-support": { - "version": "0.5.21", - "dev": true, - "license": "MIT", - "dependencies": { - "buffer-from": "^1.0.0", - "source-map": "^0.6.0" - } - }, - "node_modules/source-map-support/node_modules/source-map": { - "version": "0.6.1", - "dev": true, - "license": "BSD-3-Clause", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/spdx-correct": { - "version": "3.2.0", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-expression-parse": "^3.0.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-exceptions": { - "version": "2.5.0", - "dev": true, - "license": "CC-BY-3.0" - }, - "node_modules/spdx-expression-parse": { - "version": "3.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "spdx-exceptions": "^2.1.0", - "spdx-license-ids": "^3.0.0" - } - }, - "node_modules/spdx-license-ids": { - "version": "3.0.22", - "dev": true, - "license": "CC0-1.0" - }, - "node_modules/spdy": { - "version": "4.0.2", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.1.0", - "handle-thing": "^2.0.0", - "http-deceiver": "^1.2.7", - "select-hose": "^2.0.0", - "spdy-transport": "^3.0.0" - }, - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/spdy-transport": { - "version": "3.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "debug": "^4.1.0", - "detect-node": "^2.0.4", - "hpack.js": "^2.1.6", - "obuf": "^1.1.2", - "readable-stream": "^3.0.6", - "wbuf": "^1.7.3" - } - }, - "node_modules/spdy-transport/node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dev": true, - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/sprintf-js": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", - "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", - "dev": true - }, - "node_modules/ssri": { - "version": "10.0.6", - "dev": true, - "license": "ISC", - "dependencies": { - "minipass": "^7.0.3" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/statuses": { - "version": "1.5.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/streamroller": { - "version": "3.1.5", - "dev": true, - "license": "MIT", - "dependencies": { - "date-format": "^4.0.14", - "debug": "^4.3.4", - "fs-extra": "^8.1.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dev": true, - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "4.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/string-width-cjs": { - "name": "string-width", - "version": "4.2.3", - "dev": true, - "license": "MIT", - "dependencies": { - "emoji-regex": "^8.0.0", - "is-fullwidth-code-point": "^3.0.0", - "strip-ansi": "^6.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi": { - "version": "6.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-ansi-cjs": { - "name": "strip-ansi", - "version": "6.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-regex": "^5.0.1" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/strip-final-newline": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - } - }, - "node_modules/supports-color": { - "version": "7.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "has-flag": "^4.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/supports-preserve-symlinks-flag": { - "version": "1.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4" - }, - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/symbol-observable": { - "version": "4.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10" - } - }, - "node_modules/tapable": { - "version": "2.3.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=6" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/terser": { - "version": "5.29.1", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.29.1.tgz", - "integrity": "sha512-lZQ/fyaIGxsbGxApKmoPTODIzELy3++mXhS5hOqaAWZjQtpq/hFHAc+rm29NND1rYRxRWKcjuARNwULNXa5RtQ==", - "dev": true, - "peer": true, - "dependencies": { - "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.8.2", - "commander": "^2.20.0", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/terser-webpack-plugin": { - "version": "5.3.14", - "dev": true, - "license": "MIT", - "dependencies": { - "@jridgewell/trace-mapping": "^0.3.25", - "jest-worker": "^27.4.5", - "schema-utils": "^4.3.0", - "serialize-javascript": "^6.0.2", - "terser": "^5.31.1" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.1.0" - }, - "peerDependenciesMeta": { - "@swc/core": { - "optional": true - }, - "esbuild": { - "optional": true - }, - "uglify-js": { - "optional": true - } - } - }, - "node_modules/terser-webpack-plugin/node_modules/terser": { - "version": "5.44.0", - "resolved": "https://registry.npmjs.org/terser/-/terser-5.44.0.tgz", - "integrity": "sha512-nIVck8DK+GM/0Frwd+nIhZ84pR/BX7rmXMfYwyg+Sri5oGVE99/E3KvXqpC2xHFxyqXyGHTKBSioxxplrO4I4w==", - "dev": true, - "dependencies": { - "@jridgewell/source-map": "^0.3.3", - "acorn": "^8.15.0", - "commander": "^2.20.0", - "source-map-support": "~0.5.20" - }, - "bin": { - "terser": "bin/terser" - }, - "engines": { - "node": ">=10" - } - }, - "node_modules/test-exclude": { - "version": "6.0.0", - "dev": true, - "license": "ISC", - "dependencies": { - "@istanbuljs/schema": "^0.1.2", - "glob": "^7.1.4", - "minimatch": "^3.0.4" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/thunky": { - "version": "1.1.0", - "dev": true, - "license": "MIT" - }, - "node_modules/tmp": { - "version": "0.0.33", - "dev": true, - "license": "MIT", - "dependencies": { - "os-tmpdir": "~1.0.2" - }, - "engines": { - "node": ">=0.6.0" - } - }, - "node_modules/to-regex-range": { - "version": "5.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "is-number": "^7.0.0" - }, - "engines": { - "node": ">=8.0" - } - }, - "node_modules/toidentifier": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.6" - } - }, - "node_modules/tree-kill": { - "version": "1.2.2", - "dev": true, - "license": "MIT", - "bin": { - "tree-kill": "cli.js" - } - }, - "node_modules/tslib": { - "version": "2.8.1", - "license": "0BSD" - }, - "node_modules/tuf-js": { - "version": "2.2.1", - "dev": true, - "license": "MIT", - "dependencies": { - "@tufjs/models": "2.0.1", - "debug": "^4.3.4", - "make-fetch-happen": "^13.0.1" - }, - "engines": { - "node": "^16.14.0 || >=18.0.0" - } - }, - "node_modules/type-fest": { - "version": "0.21.3", - "dev": true, - "license": "(MIT OR CC0-1.0)", - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/type-is": { - "version": "1.6.18", - "dev": true, - "license": "MIT", - "dependencies": { - "media-typer": "0.3.0", - "mime-types": "~2.1.24" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/typed-assert": { - "version": "1.0.9", - "dev": true, - "license": "MIT" - }, - "node_modules/typescript": { - "version": "5.4.5", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", - "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", - "dev": true, - "peer": true, - "bin": { - "tsc": "bin/tsc", - "tsserver": "bin/tsserver" - }, - "engines": { - "node": ">=14.17" - } - }, - "node_modules/ua-parser-js": { - "version": "0.7.41", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/ua-parser-js" - }, - { - "type": "paypal", - "url": "https://paypal.me/faisalman" - }, - { - "type": "github", - "url": "https://github.com/sponsors/faisalman" - } - ], - "license": "MIT", - "bin": { - "ua-parser-js": "script/cli.js" - }, - "engines": { - "node": "*" - } - }, - "node_modules/undici-types": { - "version": "7.16.0", - "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", - "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", - "dev": true - }, - "node_modules/unicode-canonical-property-names-ecmascript": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-ecmascript": { - "version": "2.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "unicode-canonical-property-names-ecmascript": "^2.0.0", - "unicode-property-aliases-ecmascript": "^2.0.0" - }, - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-match-property-value-ecmascript": { - "version": "2.2.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unicode-property-aliases-ecmascript": { - "version": "2.2.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=4" - } - }, - "node_modules/unique-filename": { - "version": "3.0.0", - "dev": true, - "license": "ISC", - "dependencies": { - "unique-slug": "^4.0.0" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/unique-slug": { - "version": "4.0.0", - "dev": true, - "license": "ISC", - "dependencies": { - "imurmurhash": "^0.1.4" - }, - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/universalify": { - "version": "0.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/unpipe": { - "version": "1.0.0", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/update-browserslist-db": { - "version": "1.1.3", - "dev": true, - "funding": [ - { - "type": "opencollective", - "url": "https://opencollective.com/browserslist" - }, - { - "type": "tidelift", - "url": "https://tidelift.com/funding/github/npm/browserslist" - }, - { - "type": "github", - "url": "https://github.com/sponsors/ai" - } - ], - "license": "MIT", - "dependencies": { - "escalade": "^3.2.0", - "picocolors": "^1.1.1" - }, - "bin": { - "update-browserslist-db": "cli.js" - }, - "peerDependencies": { - "browserslist": ">= 4.21.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "dev": true, - "license": "MIT" - }, - "node_modules/utils-merge": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.4.0" - } - }, - "node_modules/validate-npm-package-license": { - "version": "3.0.4", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "spdx-correct": "^3.0.0", - "spdx-expression-parse": "^3.0.0" - } - }, - "node_modules/validate-npm-package-name": { - "version": "5.0.1", - "dev": true, - "license": "ISC", - "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" - } - }, - "node_modules/vary": { - "version": "1.1.2", - "dev": true, - "license": "MIT", - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/void-elements": { - "version": "2.0.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/watchpack": { - "version": "2.4.0", - "dev": true, - "license": "MIT", - "dependencies": { - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.1.2" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/wbuf": { - "version": "1.7.3", - "dev": true, - "license": "MIT", - "dependencies": { - "minimalistic-assert": "^1.0.0" - } - }, - "node_modules/wcwidth": { - "version": "1.0.1", - "dev": true, - "license": "MIT", - "dependencies": { - "defaults": "^1.0.3" - } - }, - "node_modules/webpack": { - "version": "5.94.0", - "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz", - "integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==", - "dev": true, - "peer": true, - "dependencies": { - "@types/estree": "^1.0.5", - "@webassemblyjs/ast": "^1.12.1", - "@webassemblyjs/wasm-edit": "^1.12.1", - "@webassemblyjs/wasm-parser": "^1.12.1", - "acorn": "^8.7.1", - "acorn-import-attributes": "^1.9.5", - "browserslist": "^4.21.10", - "chrome-trace-event": "^1.0.2", - "enhanced-resolve": "^5.17.1", - "es-module-lexer": "^1.2.1", - "eslint-scope": "5.1.1", - "events": "^3.2.0", - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.2.11", - "json-parse-even-better-errors": "^2.3.1", - "loader-runner": "^4.2.0", - "mime-types": "^2.1.27", - "neo-async": "^2.6.2", - "schema-utils": "^3.2.0", - "tapable": "^2.1.1", - "terser-webpack-plugin": "^5.3.10", - "watchpack": "^2.4.1", - "webpack-sources": "^3.2.3" - }, - "bin": { - "webpack": "bin/webpack.js" - }, - "engines": { - "node": ">=10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependenciesMeta": { - "webpack-cli": { - "optional": true - } - } - }, - "node_modules/webpack-dev-middleware": { - "version": "6.1.2", - "dev": true, - "license": "MIT", - "dependencies": { - "colorette": "^2.0.10", - "memfs": "^3.4.12", - "mime-types": "^2.1.31", - "range-parser": "^1.2.1", - "schema-utils": "^4.0.0" - }, - "engines": { - "node": ">= 14.15.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^5.0.0" - }, - "peerDependenciesMeta": { - "webpack": { - "optional": true - } - } - }, - "node_modules/webpack-dev-middleware/node_modules/memfs": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.3.tgz", - "integrity": "sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==", - "dev": true, - "dependencies": { - "fs-monkey": "^1.0.4" - }, - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/webpack-dev-server": { - "version": "4.15.1", - "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.15.1.tgz", - "integrity": "sha512-5hbAst3h3C3L8w6W4P96L5vaV0PxSmJhxZvWKYIdgxOQm8pNZ5dEOmmSLBVpP85ReeyRt6AS1QJNyo/oFFPeVA==", - "dev": true, - "peer": true, - "dependencies": { - "@types/bonjour": "^3.5.9", - "@types/connect-history-api-fallback": "^1.3.5", - "@types/express": "^4.17.13", - "@types/serve-index": "^1.9.1", - "@types/serve-static": "^1.13.10", - "@types/sockjs": "^0.3.33", - "@types/ws": "^8.5.5", - "ansi-html-community": "^0.0.8", - "bonjour-service": "^1.0.11", - "chokidar": "^3.5.3", - "colorette": "^2.0.10", - "compression": "^1.7.4", - "connect-history-api-fallback": "^2.0.0", - "default-gateway": "^6.0.3", - "express": "^4.17.3", - "graceful-fs": "^4.2.6", - "html-entities": "^2.3.2", - "http-proxy-middleware": "^2.0.3", - "ipaddr.js": "^2.0.1", - "launch-editor": "^2.6.0", - "open": "^8.0.9", - "p-retry": "^4.5.0", - "rimraf": "^3.0.2", - "schema-utils": "^4.0.0", - "selfsigned": "^2.1.1", - "serve-index": "^1.9.1", - "sockjs": "^0.3.24", - "spdy": "^4.0.2", - "webpack-dev-middleware": "^5.3.1", - "ws": "^8.13.0" - }, - "bin": { - "webpack-dev-server": "bin/webpack-dev-server.js" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.37.0 || ^5.0.0" - }, - "peerDependenciesMeta": { - "webpack": { - "optional": true - }, - "webpack-cli": { - "optional": true - } - } - }, - "node_modules/webpack-dev-server/node_modules/cookie": { - "version": "0.7.1", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", - "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", - "dev": true, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/webpack-dev-server/node_modules/debug": { - "version": "2.6.9", - "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", - "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", - "dev": true, - "dependencies": { - "ms": "2.0.0" - } - }, - "node_modules/webpack-dev-server/node_modules/encodeurl": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", - "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", - "dev": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/webpack-dev-server/node_modules/express": { - "version": "4.21.2", - "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", - "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", - "dev": true, - "dependencies": { - "accepts": "~1.3.8", - "array-flatten": "1.1.1", - "body-parser": "1.20.3", - "content-disposition": "0.5.4", - "content-type": "~1.0.4", - "cookie": "0.7.1", - "cookie-signature": "1.0.6", - "debug": "2.6.9", - "depd": "2.0.0", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "etag": "~1.8.1", - "finalhandler": "1.3.1", - "fresh": "0.5.2", - "http-errors": "2.0.0", - "merge-descriptors": "1.0.3", - "methods": "~1.1.2", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "path-to-regexp": "0.1.12", - "proxy-addr": "~2.0.7", - "qs": "6.13.0", - "range-parser": "~1.2.1", - "safe-buffer": "5.2.1", - "send": "0.19.0", - "serve-static": "1.16.2", - "setprototypeof": "1.2.0", - "statuses": "2.0.1", - "type-is": "~1.6.18", - "utils-merge": "1.0.1", - "vary": "~1.1.2" - }, - "engines": { - "node": ">= 0.10.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/express" - } - }, - "node_modules/webpack-dev-server/node_modules/finalhandler": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", - "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", - "dev": true, - "dependencies": { - "debug": "2.6.9", - "encodeurl": "~2.0.0", - "escape-html": "~1.0.3", - "on-finished": "2.4.1", - "parseurl": "~1.3.3", - "statuses": "2.0.1", - "unpipe": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/webpack-dev-server/node_modules/memfs": { - "version": "3.5.3", - "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.3.tgz", - "integrity": "sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==", - "dev": true, - "dependencies": { - "fs-monkey": "^1.0.4" - }, - "engines": { - "node": ">= 4.0.0" - } - }, - "node_modules/webpack-dev-server/node_modules/ms": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", - "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", - "dev": true - }, - "node_modules/webpack-dev-server/node_modules/statuses": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", - "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", - "dev": true, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/webpack-dev-server/node_modules/webpack-dev-middleware": { - "version": "5.3.4", - "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz", - "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==", - "dev": true, - "dependencies": { - "colorette": "^2.0.10", - "memfs": "^3.4.3", - "mime-types": "^2.1.31", - "range-parser": "^1.2.1", - "schema-utils": "^4.0.0" - }, - "engines": { - "node": ">= 12.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - }, - "peerDependencies": { - "webpack": "^4.0.0 || ^5.0.0" - } - }, - "node_modules/webpack-merge": { - "version": "5.10.0", - "dev": true, - "license": "MIT", - "dependencies": { - "clone-deep": "^4.0.1", - "flat": "^5.0.2", - "wildcard": "^2.0.0" - }, - "engines": { - "node": ">=10.0.0" - } - }, - "node_modules/webpack-subresource-integrity": { - "version": "5.1.0", - "dev": true, - "license": "MIT", - "dependencies": { - "typed-assert": "^1.0.8" - }, - "engines": { - "node": ">= 12" - }, - "peerDependencies": { - "html-webpack-plugin": ">= 5.0.0-beta.1 < 6", - "webpack": "^5.12.0" - }, - "peerDependenciesMeta": { - "html-webpack-plugin": { - "optional": true - } - } - }, - "node_modules/webpack/node_modules/@webassemblyjs/ast": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", - "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", - "dev": true, - "dependencies": { - "@webassemblyjs/helper-numbers": "1.13.2", - "@webassemblyjs/helper-wasm-bytecode": "1.13.2" - } - }, - "node_modules/webpack/node_modules/ajv": { - "version": "6.12.6", - "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", - "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", - "dev": true, - "peer": true, - "dependencies": { - "fast-deep-equal": "^3.1.1", - "fast-json-stable-stringify": "^2.0.0", - "json-schema-traverse": "^0.4.1", - "uri-js": "^4.2.2" - }, - "funding": { - "type": "github", - "url": "https://github.com/sponsors/epoberezkin" - } - }, - "node_modules/webpack/node_modules/ajv-keywords": { - "version": "3.5.2", - "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", - "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", - "dev": true, - "peerDependencies": { - "ajv": "^6.9.1" - } - }, - "node_modules/webpack/node_modules/enhanced-resolve": { - "version": "5.18.3", - "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", - "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", - "dev": true, - "dependencies": { - "graceful-fs": "^4.2.4", - "tapable": "^2.2.0" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/webpack/node_modules/json-parse-even-better-errors": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", - "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", - "dev": true - }, - "node_modules/webpack/node_modules/json-schema-traverse": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", - "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", - "dev": true - }, - "node_modules/webpack/node_modules/neo-async": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", - "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", - "dev": true - }, - "node_modules/webpack/node_modules/punycode": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", - "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", - "dev": true, - "engines": { - "node": ">=6" - } - }, - "node_modules/webpack/node_modules/schema-utils": { - "version": "3.3.0", - "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", - "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", - "dev": true, - "dependencies": { - "@types/json-schema": "^7.0.8", - "ajv": "^6.12.5", - "ajv-keywords": "^3.5.2" - }, - "engines": { - "node": ">= 10.13.0" - }, - "funding": { - "type": "opencollective", - "url": "https://opencollective.com/webpack" - } - }, - "node_modules/webpack/node_modules/uri-js": { - "version": "4.4.1", - "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", - "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", - "dev": true, - "dependencies": { - "punycode": "^2.1.0" - } - }, - "node_modules/webpack/node_modules/watchpack": { - "version": "2.4.4", - "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.4.tgz", - "integrity": "sha512-c5EGNOiyxxV5qmTtAB7rbiXxi1ooX1pQKMLX/MIabJjRA0SJBQOjKF+KSVfHkr9U1cADPon0mRiVe/riyaiDUA==", - "dev": true, - "dependencies": { - "glob-to-regexp": "^0.4.1", - "graceful-fs": "^4.1.2" - }, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/webpack/node_modules/webpack-sources": { - "version": "3.3.3", - "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.3.3.tgz", - "integrity": "sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==", - "dev": true, - "engines": { - "node": ">=10.13.0" - } - }, - "node_modules/websocket-driver": { - "version": "0.7.4", - "dev": true, - "license": "Apache-2.0", - "dependencies": { - "http-parser-js": ">=0.5.1", - "safe-buffer": ">=5.1.0", - "websocket-extensions": ">=0.1.1" - }, - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/websocket-extensions": { - "version": "0.1.4", - "dev": true, - "license": "Apache-2.0", - "engines": { - "node": ">=0.8.0" - } - }, - "node_modules/which": { - "version": "1.3.1", - "dev": true, - "license": "ISC", - "dependencies": { - "isexe": "^2.0.0" - }, - "bin": { - "which": "bin/which" - } - }, - "node_modules/wildcard": { - "version": "2.0.1", - "dev": true, - "license": "MIT" - }, - "node_modules/wrap-ansi": { - "version": "6.2.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/wrap-ansi-cjs": { - "name": "wrap-ansi", - "version": "7.0.0", - "dev": true, - "license": "MIT", - "dependencies": { - "ansi-styles": "^4.0.0", - "string-width": "^4.1.0", - "strip-ansi": "^6.0.0" - }, - "engines": { - "node": ">=10" - }, - "funding": { - "url": "https://github.com/chalk/wrap-ansi?sponsor=1" - } - }, - "node_modules/wrappy": { - "version": "1.0.2", - "dev": true, - "license": "ISC" - }, - "node_modules/ws": { - "version": "8.17.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=10.0.0" - }, - "peerDependencies": { - "bufferutil": "^4.0.1", - "utf-8-validate": ">=5.0.2" - }, - "peerDependenciesMeta": { - "bufferutil": { - "optional": true - }, - "utf-8-validate": { - "optional": true - } - } - }, - "node_modules/y18n": { - "version": "5.0.8", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=10" - } - }, - "node_modules/yallist": { - "version": "3.1.1", - "dev": true, - "license": "ISC" - }, - "node_modules/yargs": { - "version": "17.7.2", - "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", - "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", - "dev": true, - "dependencies": { - "cliui": "^8.0.1", - "escalade": "^3.1.1", - "get-caller-file": "^2.0.5", - "require-directory": "^2.1.1", - "string-width": "^4.2.3", - "y18n": "^5.0.5", - "yargs-parser": "^21.1.1" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/yargs-parser": { - "version": "21.1.1", - "dev": true, - "license": "ISC", - "engines": { - "node": ">=12" - } - }, - "node_modules/yocto-queue": { - "version": "1.2.1", - "dev": true, - "license": "MIT", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/zone.js": { - "version": "0.14.10", - "resolved": "https://registry.npmjs.org/zone.js/-/zone.js-0.14.10.tgz", - "integrity": "sha512-YGAhaO7J5ywOXW6InXNlLmfU194F8lVgu7bRntUF3TiG8Y3nBK0x1UJJuHUP/e8IyihkjCYqhCScpSwnlaSRkQ==", - "peer": true - } - } -} +{ + "name": "stellaops-web", + "version": "0.0.0", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "name": "stellaops-web", + "version": "0.0.0", + "dependencies": { + "@angular/animations": "^17.3.0", + "@angular/common": "^17.3.0", + "@angular/compiler": "^17.3.0", + "@angular/core": "^17.3.0", + "@angular/forms": "^17.3.0", + "@angular/platform-browser": "^17.3.0", + "@angular/platform-browser-dynamic": "^17.3.0", + "@angular/router": "^17.3.0", + "rxjs": "~7.8.0", + "tslib": "^2.3.0", + "zone.js": "~0.14.3" + }, + "devDependencies": { + "@angular-devkit/build-angular": "^17.3.17", + "@angular/cli": "^17.3.17", + "@angular/compiler-cli": "^17.3.0", + "@playwright/test": "^1.47.2", + "@types/jasmine": "~5.1.0", + "jasmine-core": "~5.1.0", + "karma": "~6.4.0", + "karma-chrome-launcher": "~3.2.0", + "karma-coverage": "~2.2.0", + "karma-jasmine": "~5.1.0", + "karma-jasmine-html-reporter": "~2.1.0", + "typescript": "~5.4.2" + }, + "engines": { + "node": ">=20.11.0", + "npm": ">=10.2.0" + } + }, + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@angular-devkit/architect": { + "version": "0.1703.17", + "resolved": "https://registry.npmjs.org/@angular-devkit/architect/-/architect-0.1703.17.tgz", + "integrity": "sha512-LD6po8lGP2FI7WbnsSxtvpiIi+FYL0aNfteunkT+7po9jUNflBEYHA64UWNO56u7ryKNdbuiN8/TEh7FEUnmCw==", + "dev": true, + "dependencies": { + "@angular-devkit/core": "17.3.17", + "rxjs": "7.8.1" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0", + "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", + "yarn": ">= 1.13.0" + } + }, + "node_modules/@angular-devkit/architect/node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/@angular-devkit/build-angular": { + "version": "17.3.17", + "resolved": "https://registry.npmjs.org/@angular-devkit/build-angular/-/build-angular-17.3.17.tgz", + "integrity": "sha512-0kLVwjLZ5v4uIaG0K6sHJxxppS0bvjNmxHkbybU8FBW3r5MOBQh/ApsiCQKQQ8GBrQz9qSJvLJH8lsb/uR8aPQ==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "2.3.0", + "@angular-devkit/architect": "0.1703.17", + "@angular-devkit/build-webpack": "0.1703.17", + "@angular-devkit/core": "17.3.17", + "@babel/core": "7.26.10", + "@babel/generator": "7.26.10", + "@babel/helper-annotate-as-pure": "7.25.9", + "@babel/helper-split-export-declaration": "7.24.7", + "@babel/plugin-transform-async-generator-functions": "7.26.8", + "@babel/plugin-transform-async-to-generator": "7.25.9", + "@babel/plugin-transform-runtime": "7.26.10", + "@babel/preset-env": "7.26.9", + "@babel/runtime": "7.26.10", + "@discoveryjs/json-ext": "0.5.7", + "@ngtools/webpack": "17.3.17", + "@vitejs/plugin-basic-ssl": "1.1.0", + "ansi-colors": "4.1.3", + "autoprefixer": "10.4.18", + "babel-loader": "9.1.3", + "babel-plugin-istanbul": "6.1.1", + "browserslist": "^4.21.5", + "copy-webpack-plugin": "11.0.0", + "critters": "0.0.22", + "css-loader": "6.10.0", + "esbuild-wasm": "0.20.1", + "fast-glob": "3.3.2", + "http-proxy-middleware": "2.0.8", + "https-proxy-agent": "7.0.4", + "inquirer": "9.2.15", + "jsonc-parser": "3.2.1", + "karma-source-map-support": "1.4.0", + "less": "4.2.0", + "less-loader": "11.1.0", + "license-webpack-plugin": "4.0.2", + "loader-utils": "3.2.1", + "magic-string": "0.30.8", + "mini-css-extract-plugin": "2.8.1", + "mrmime": "2.0.0", + "open": "8.4.2", + "ora": "5.4.1", + "parse5-html-rewriting-stream": "7.0.0", + "picomatch": "4.0.1", + "piscina": "4.4.0", + "postcss": "8.4.35", + "postcss-loader": "8.1.1", + "resolve-url-loader": "5.0.0", + "rxjs": "7.8.1", + "sass": "1.71.1", + "sass-loader": "14.1.1", + "semver": "7.6.0", + "source-map-loader": "5.0.0", + "source-map-support": "0.5.21", + "terser": "5.29.1", + "tree-kill": "1.2.2", + "tslib": "2.6.2", + "vite": "~5.4.17", + "watchpack": "2.4.0", + "webpack": "5.94.0", + "webpack-dev-middleware": "6.1.2", + "webpack-dev-server": "4.15.1", + "webpack-merge": "5.10.0", + "webpack-subresource-integrity": "5.1.0" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0", + "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", + "yarn": ">= 1.13.0" + }, + "optionalDependencies": { + "esbuild": "0.20.1" + }, + "peerDependencies": { + "@angular/compiler-cli": "^17.0.0", + "@angular/localize": "^17.0.0", + "@angular/platform-server": "^17.0.0", + "@angular/service-worker": "^17.0.0", + "@web/test-runner": "^0.18.0", + "browser-sync": "^3.0.2", + "jest": "^29.5.0", + "jest-environment-jsdom": "^29.5.0", + "karma": "^6.3.0", + "ng-packagr": "^17.0.0", + "protractor": "^7.0.0", + "tailwindcss": "^2.0.0 || ^3.0.0", + "typescript": ">=5.2 <5.5" + }, + "peerDependenciesMeta": { + "@angular/localize": { + "optional": true + }, + "@angular/platform-server": { + "optional": true + }, + "@angular/service-worker": { + "optional": true + }, + "@web/test-runner": { + "optional": true + }, + "browser-sync": { + "optional": true + }, + "jest": { + "optional": true + }, + "jest-environment-jsdom": { + "optional": true + }, + "karma": { + "optional": true + }, + "ng-packagr": { + "optional": true + }, + "protractor": { + "optional": true + }, + "tailwindcss": { + "optional": true + } + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/aix-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz", + "integrity": "sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/android-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.21.5.tgz", + "integrity": "sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/android-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz", + "integrity": "sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/android-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.21.5.tgz", + "integrity": "sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/darwin-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz", + "integrity": "sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/darwin-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz", + "integrity": "sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/freebsd-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz", + "integrity": "sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/freebsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz", + "integrity": "sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-arm": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz", + "integrity": "sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz", + "integrity": "sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz", + "integrity": "sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-loong64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz", + "integrity": "sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-mips64el": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz", + "integrity": "sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-ppc64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz", + "integrity": "sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-riscv64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz", + "integrity": "sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-s390x": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz", + "integrity": "sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/linux-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz", + "integrity": "sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/netbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz", + "integrity": "sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/openbsd-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz", + "integrity": "sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/sunos-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz", + "integrity": "sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/win32-arm64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz", + "integrity": "sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/win32-ia32": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz", + "integrity": "sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@esbuild/win32-x64": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz", + "integrity": "sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.52.5.tgz", + "integrity": "sha512-8c1vW4ocv3UOMp9K+gToY5zL2XiiVw3k7f1ksf4yO1FlDFQ1C2u72iACFnSOceJFsWskc2WZNqeRhFRPzv+wtQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-android-arm64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.52.5.tgz", + "integrity": "sha512-mQGfsIEFcu21mvqkEKKu2dYmtuSZOBMmAl5CFlPGLY94Vlcm+zWApK7F/eocsNzp8tKmbeBP8yXyAbx0XHsFNA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.52.5.tgz", + "integrity": "sha512-takF3CR71mCAGA+v794QUZ0b6ZSrgJkArC+gUiG6LB6TQty9T0Mqh3m2ImRBOxS2IeYBo4lKWIieSvnEk2OQWA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-darwin-x64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.52.5.tgz", + "integrity": "sha512-W901Pla8Ya95WpxDn//VF9K9u2JbocwV/v75TE0YIHNTbhqUTv9w4VuQ9MaWlNOkkEfFwkdNhXgcLqPSmHy0fA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-freebsd-arm64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.52.5.tgz", + "integrity": "sha512-QofO7i7JycsYOWxe0GFqhLmF6l1TqBswJMvICnRUjqCx8b47MTo46W8AoeQwiokAx3zVryVnxtBMcGcnX12LvA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-freebsd-x64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.52.5.tgz", + "integrity": "sha512-jr21b/99ew8ujZubPo9skbrItHEIE50WdV86cdSoRkKtmWa+DDr6fu2c/xyRT0F/WazZpam6kk7IHBerSL7LDQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.52.5.tgz", + "integrity": "sha512-PsNAbcyv9CcecAUagQefwX8fQn9LQ4nZkpDboBOttmyffnInRy8R8dSg6hxxl2Re5QhHBf6FYIDhIj5v982ATQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.52.5.tgz", + "integrity": "sha512-Fw4tysRutyQc/wwkmcyoqFtJhh0u31K+Q6jYjeicsGJJ7bbEq8LwPWV/w0cnzOqR2m694/Af6hpFayLJZkG2VQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.52.5.tgz", + "integrity": "sha512-a+3wVnAYdQClOTlyapKmyI6BLPAFYs0JM8HRpgYZQO02rMR09ZcV9LbQB+NL6sljzG38869YqThrRnfPMCDtZg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.52.5.tgz", + "integrity": "sha512-AvttBOMwO9Pcuuf7m9PkC1PUIKsfaAJ4AYhy944qeTJgQOqJYJ9oVl2nYgY7Rk0mkbsuOpCAYSs6wLYB2Xiw0Q==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-loong64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-loong64-gnu/-/rollup-linux-loong64-gnu-4.52.5.tgz", + "integrity": "sha512-DkDk8pmXQV2wVrF6oq5tONK6UHLz/XcEVow4JTTerdeV1uqPeHxwcg7aFsfnSm9L+OO8WJsWotKM2JJPMWrQtA==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-ppc64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-ppc64-gnu/-/rollup-linux-ppc64-gnu-4.52.5.tgz", + "integrity": "sha512-W/b9ZN/U9+hPQVvlGwjzi+Wy4xdoH2I8EjaCkMvzpI7wJUs8sWJ03Rq96jRnHkSrcHTpQe8h5Tg3ZzUPGauvAw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.52.5.tgz", + "integrity": "sha512-sjQLr9BW7R/ZiXnQiWPkErNfLMkkWIoCz7YMn27HldKsADEKa5WYdobaa1hmN6slu9oWQbB6/jFpJ+P2IkVrmw==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-riscv64-musl": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-musl/-/rollup-linux-riscv64-musl-4.52.5.tgz", + "integrity": "sha512-hq3jU/kGyjXWTvAh2awn8oHroCbrPm8JqM7RUpKjalIRWWXE01CQOf/tUNWNHjmbMHg/hmNCwc/Pz3k1T/j/Lg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.52.5.tgz", + "integrity": "sha512-gn8kHOrku8D4NGHMK1Y7NA7INQTRdVOntt1OCYypZPRt6skGbddska44K8iocdpxHTMMNui5oH4elPH4QOLrFQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-openharmony-arm64": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-openharmony-arm64/-/rollup-openharmony-arm64-4.52.5.tgz", + "integrity": "sha512-QoFqB6+/9Rly/RiPjaomPLmR/13cgkIGfA40LHly9zcH1S0bN2HVFYk3a1eAyHQyjs3ZJYlXvIGtcCs5tko9Cw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "openharmony" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.52.5.tgz", + "integrity": "sha512-w0cDWVR6MlTstla1cIfOGyl8+qb93FlAVutcor14Gf5Md5ap5ySfQ7R9S/NjNaMLSFdUnKGEasmVnu3lCMqB7w==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.52.5.tgz", + "integrity": "sha512-Aufdpzp7DpOTULJCuvzqcItSGDH73pF3ko/f+ckJhxQyHtp67rHw3HMNxoIdDMUITJESNE6a8uh4Lo4SLouOUg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-win32-x64-gnu": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-gnu/-/rollup-win32-x64-gnu-4.52.5.tgz", + "integrity": "sha512-UGBUGPFp1vkj6p8wCRraqNhqwX/4kNQPS57BCFc8wYh0g94iVIW33wJtQAx3G7vrjjNtRaxiMUylM0ktp/TRSQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.52.5.tgz", + "integrity": "sha512-TAcgQh2sSkykPRWLrdyy2AiceMckNf5loITqXxFI5VuQjS5tSuw3WlwdN8qv8vzjLAUTvYaH/mVjSFpbkFbpTg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@angular-devkit/build-angular/node_modules/@vitejs/plugin-basic-ssl": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-basic-ssl/-/plugin-basic-ssl-1.1.0.tgz", + "integrity": "sha512-wO4Dk/rm8u7RNhOf95ZzcEmC9rYOncYgvq4z3duaJrCgjN8BxAnDVyndanfcJZ0O6XZzHz6Q0hTimxTg8Y9g/A==", + "dev": true, + "engines": { + "node": ">=14.6.0" + }, + "peerDependencies": { + "vite": "^3.0.0 || ^4.0.0 || ^5.0.0" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/rollup": { + "version": "4.52.5", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.52.5.tgz", + "integrity": "sha512-3GuObel8h7Kqdjt0gxkEzaifHTqLVW56Y/bjN7PSQtkKr0w3V/QYSdt6QWYtd7A1xUtYQigtdUfgj1RvWVtorw==", + "dev": true, + "dependencies": { + "@types/estree": "1.0.8" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.52.5", + "@rollup/rollup-android-arm64": "4.52.5", + "@rollup/rollup-darwin-arm64": "4.52.5", + "@rollup/rollup-darwin-x64": "4.52.5", + "@rollup/rollup-freebsd-arm64": "4.52.5", + "@rollup/rollup-freebsd-x64": "4.52.5", + "@rollup/rollup-linux-arm-gnueabihf": "4.52.5", + "@rollup/rollup-linux-arm-musleabihf": "4.52.5", + "@rollup/rollup-linux-arm64-gnu": "4.52.5", + "@rollup/rollup-linux-arm64-musl": "4.52.5", + "@rollup/rollup-linux-loong64-gnu": "4.52.5", + "@rollup/rollup-linux-ppc64-gnu": "4.52.5", + "@rollup/rollup-linux-riscv64-gnu": "4.52.5", + "@rollup/rollup-linux-riscv64-musl": "4.52.5", + "@rollup/rollup-linux-s390x-gnu": "4.52.5", + "@rollup/rollup-linux-x64-gnu": "4.52.5", + "@rollup/rollup-linux-x64-musl": "4.52.5", + "@rollup/rollup-openharmony-arm64": "4.52.5", + "@rollup/rollup-win32-arm64-msvc": "4.52.5", + "@rollup/rollup-win32-ia32-msvc": "4.52.5", + "@rollup/rollup-win32-x64-gnu": "4.52.5", + "@rollup/rollup-win32-x64-msvc": "4.52.5", + "fsevents": "~2.3.2" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/tslib": { + "version": "2.6.2", + "dev": true, + "license": "0BSD" + }, + "node_modules/@angular-devkit/build-angular/node_modules/vite": { + "version": "5.4.21", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.4.21.tgz", + "integrity": "sha512-o5a9xKjbtuhY6Bi5S3+HvbRERmouabWbyUcpXXUA1u+GNUKoROi9byOJ8M0nHbHYHkYICiMlqxkg1KkYmm25Sw==", + "dev": true, + "peer": true, + "dependencies": { + "esbuild": "^0.21.3", + "postcss": "^8.4.43", + "rollup": "^4.20.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "sass-embedded": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/vite/node_modules/esbuild": { + "version": "0.21.5", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.21.5.tgz", + "integrity": "sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.21.5", + "@esbuild/android-arm": "0.21.5", + "@esbuild/android-arm64": "0.21.5", + "@esbuild/android-x64": "0.21.5", + "@esbuild/darwin-arm64": "0.21.5", + "@esbuild/darwin-x64": "0.21.5", + "@esbuild/freebsd-arm64": "0.21.5", + "@esbuild/freebsd-x64": "0.21.5", + "@esbuild/linux-arm": "0.21.5", + "@esbuild/linux-arm64": "0.21.5", + "@esbuild/linux-ia32": "0.21.5", + "@esbuild/linux-loong64": "0.21.5", + "@esbuild/linux-mips64el": "0.21.5", + "@esbuild/linux-ppc64": "0.21.5", + "@esbuild/linux-riscv64": "0.21.5", + "@esbuild/linux-s390x": "0.21.5", + "@esbuild/linux-x64": "0.21.5", + "@esbuild/netbsd-x64": "0.21.5", + "@esbuild/openbsd-x64": "0.21.5", + "@esbuild/sunos-x64": "0.21.5", + "@esbuild/win32-arm64": "0.21.5", + "@esbuild/win32-ia32": "0.21.5", + "@esbuild/win32-x64": "0.21.5" + } + }, + "node_modules/@angular-devkit/build-angular/node_modules/vite/node_modules/postcss": { + "version": "8.5.6", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.5.6.tgz", + "integrity": "sha512-3Ybi1tAuwAP9s0r1UQ2J4n5Y0G05bJkpUIO0/bI9MhwmD70S5aTWbXGBwxHrelT+XM1k6dM0pk+SwNkpTRN7Pg==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.11", + "picocolors": "^1.1.1", + "source-map-js": "^1.2.1" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/@angular-devkit/build-webpack": { + "version": "0.1703.17", + "dev": true, + "license": "MIT", + "dependencies": { + "@angular-devkit/architect": "0.1703.17", + "rxjs": "7.8.1" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0", + "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", + "yarn": ">= 1.13.0" + }, + "peerDependencies": { + "webpack": "^5.30.0", + "webpack-dev-server": "^4.0.0" + } + }, + "node_modules/@angular-devkit/build-webpack/node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/@angular-devkit/core": { + "version": "17.3.17", + "resolved": "https://registry.npmjs.org/@angular-devkit/core/-/core-17.3.17.tgz", + "integrity": "sha512-7aNVqS3rOGsSZYAOO44xl2KURwaoOP+EJhJs+LqOGOFpok2kd8YLf4CAMUossMF4H7HsJpgKwYqGrV5eXunrpw==", + "dev": true, + "dependencies": { + "ajv": "8.12.0", + "ajv-formats": "2.1.1", + "jsonc-parser": "3.2.1", + "picomatch": "4.0.1", + "rxjs": "7.8.1", + "source-map": "0.7.4" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0", + "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", + "yarn": ">= 1.13.0" + }, + "peerDependencies": { + "chokidar": "^3.5.2" + }, + "peerDependenciesMeta": { + "chokidar": { + "optional": true + } + } + }, + "node_modules/@angular-devkit/core/node_modules/ajv": { + "version": "8.12.0", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.12.0.tgz", + "integrity": "sha512-sRu1kpcO9yLtYxBKvqfTeh9KzZEwO3STyX1HT+4CaDzC6HpTGYhIhPIzj9XuKU7KYDwnaeh5hcOwjy1QuJzBPA==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/@angular-devkit/core/node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/@angular-devkit/core/node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/@angular-devkit/core/node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/@angular-devkit/schematics": { + "version": "17.3.17", + "resolved": "https://registry.npmjs.org/@angular-devkit/schematics/-/schematics-17.3.17.tgz", + "integrity": "sha512-ZXsIJXZm0I0dNu1BqmjfEtQhnzqoupUHHZb4GHm5NeQHBFZctQlkkNxLUU27GVeBUwFgEmP7kFgSLlMPTGSL5g==", + "dev": true, + "dependencies": { + "@angular-devkit/core": "17.3.17", + "jsonc-parser": "3.2.1", + "magic-string": "0.30.8", + "ora": "5.4.1", + "rxjs": "7.8.1" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0", + "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", + "yarn": ">= 1.13.0" + } + }, + "node_modules/@angular-devkit/schematics/node_modules/rxjs": { + "version": "7.8.1", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.1.tgz", + "integrity": "sha512-AA3TVj+0A2iuIoQkWEK/tqFjBq2j+6PO6Y0zJcvzLAFhEFIO3HL0vls9hWLncZbAAbK0mar7oZ4V079I/qPMxg==", + "dev": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/@angular/animations": { + "version": "17.3.12", + "license": "MIT", + "peer": true, + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0" + }, + "peerDependencies": { + "@angular/core": "17.3.12" + } + }, + "node_modules/@angular/cli": { + "version": "17.3.17", + "resolved": "https://registry.npmjs.org/@angular/cli/-/cli-17.3.17.tgz", + "integrity": "sha512-FgOvf9q5d23Cpa7cjP1FYti/v8S1FTm8DEkW3TY8lkkoxh3isu28GFKcLD1p/XF3yqfPkPVHToOFla5QwsEgBQ==", + "dev": true, + "dependencies": { + "@angular-devkit/architect": "0.1703.17", + "@angular-devkit/core": "17.3.17", + "@angular-devkit/schematics": "17.3.17", + "@schematics/angular": "17.3.17", + "@yarnpkg/lockfile": "1.1.0", + "ansi-colors": "4.1.3", + "ini": "4.1.2", + "inquirer": "9.2.15", + "jsonc-parser": "3.2.1", + "npm-package-arg": "11.0.1", + "npm-pick-manifest": "9.0.0", + "open": "8.4.2", + "ora": "5.4.1", + "pacote": "17.0.6", + "resolve": "1.22.8", + "semver": "7.6.0", + "symbol-observable": "4.0.0", + "yargs": "17.7.2" + }, + "bin": { + "ng": "bin/ng.js" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0", + "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", + "yarn": ">= 1.13.0" + } + }, + "node_modules/@angular/common": { + "version": "17.3.12", + "resolved": "https://registry.npmjs.org/@angular/common/-/common-17.3.12.tgz", + "integrity": "sha512-vabJzvrx76XXFrm1RJZ6o/CyG32piTB/1sfFfKHdlH1QrmArb8It4gyk9oEjZ1IkAD0HvBWlfWmn+T6Vx3pdUw==", + "peer": true, + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0" + }, + "peerDependencies": { + "@angular/core": "17.3.12", + "rxjs": "^6.5.3 || ^7.4.0" + } + }, + "node_modules/@angular/compiler": { + "version": "17.3.12", + "license": "MIT", + "peer": true, + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0" + }, + "peerDependencies": { + "@angular/core": "17.3.12" + }, + "peerDependenciesMeta": { + "@angular/core": { + "optional": true + } + } + }, + "node_modules/@angular/compiler-cli": { + "version": "17.3.12", + "resolved": "https://registry.npmjs.org/@angular/compiler-cli/-/compiler-cli-17.3.12.tgz", + "integrity": "sha512-1F8M7nWfChzurb7obbvuE7mJXlHtY1UG58pcwcomVtpPb+kPavgAO8OEvJHYBMV+bzSxkXt5UIwL9lt9jHUxZA==", + "dev": true, + "peer": true, + "dependencies": { + "@babel/core": "7.23.9", + "@jridgewell/sourcemap-codec": "^1.4.14", + "chokidar": "^3.0.0", + "convert-source-map": "^1.5.1", + "reflect-metadata": "^0.2.0", + "semver": "^7.0.0", + "tslib": "^2.3.0", + "yargs": "^17.2.1" + }, + "bin": { + "ng-xi18n": "bundles/src/bin/ng_xi18n.js", + "ngc": "bundles/src/bin/ngc.js", + "ngcc": "bundles/ngcc/index.js" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0" + }, + "peerDependencies": { + "@angular/compiler": "17.3.12", + "typescript": ">=5.2 <5.5" + } + }, + "node_modules/@angular/compiler-cli/node_modules/@babel/core": { + "version": "7.23.9", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.23.9.tgz", + "integrity": "sha512-5q0175NOjddqpvvzU+kDiSOAk4PfdO6FvwCWoQ6RO7rTzEe8vlo+4HVfcnAREhD4npMs0e9uZypjTwzZPCf/cw==", + "dev": true, + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.23.5", + "@babel/generator": "^7.23.6", + "@babel/helper-compilation-targets": "^7.23.6", + "@babel/helper-module-transforms": "^7.23.3", + "@babel/helpers": "^7.23.9", + "@babel/parser": "^7.23.9", + "@babel/template": "^7.23.9", + "@babel/traverse": "^7.23.9", + "@babel/types": "^7.23.9", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@angular/compiler-cli/node_modules/@babel/core/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/@angular/compiler-cli/node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@angular/compiler-cli/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@angular/compiler-cli/node_modules/@babel/helpers": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "dev": true, + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@angular/compiler-cli/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@angular/compiler-cli/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@angular/compiler-cli/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@angular/core": { + "version": "17.3.12", + "resolved": "https://registry.npmjs.org/@angular/core/-/core-17.3.12.tgz", + "integrity": "sha512-MuFt5yKi161JmauUta4Dh0m8ofwoq6Ino+KoOtkYMBGsSx+A7dSm+DUxxNwdj7+DNyg3LjVGCFgBFnq4g8z06A==", + "peer": true, + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0" + }, + "peerDependencies": { + "rxjs": "^6.5.3 || ^7.4.0", + "zone.js": "~0.14.0" + } + }, + "node_modules/@angular/forms": { + "version": "17.3.12", + "license": "MIT", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0" + }, + "peerDependencies": { + "@angular/common": "17.3.12", + "@angular/core": "17.3.12", + "@angular/platform-browser": "17.3.12", + "rxjs": "^6.5.3 || ^7.4.0" + } + }, + "node_modules/@angular/platform-browser": { + "version": "17.3.12", + "license": "MIT", + "peer": true, + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0" + }, + "peerDependencies": { + "@angular/animations": "17.3.12", + "@angular/common": "17.3.12", + "@angular/core": "17.3.12" + }, + "peerDependenciesMeta": { + "@angular/animations": { + "optional": true + } + } + }, + "node_modules/@angular/platform-browser-dynamic": { + "version": "17.3.12", + "license": "MIT", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0" + }, + "peerDependencies": { + "@angular/common": "17.3.12", + "@angular/compiler": "17.3.12", + "@angular/core": "17.3.12", + "@angular/platform-browser": "17.3.12" + } + }, + "node_modules/@angular/router": { + "version": "17.3.12", + "license": "MIT", + "dependencies": { + "tslib": "^2.3.0" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0" + }, + "peerDependencies": { + "@angular/common": "17.3.12", + "@angular/core": "17.3.12", + "@angular/platform-browser": "17.3.12", + "rxjs": "^6.5.3 || ^7.4.0" + } + }, + "node_modules/@babel/code-frame": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-validator-identifier": "^7.27.1", + "js-tokens": "^4.0.0", + "picocolors": "^1.1.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/compat-data": { + "version": "7.28.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.26.10.tgz", + "integrity": "sha512-vMqyb7XCDMPvJFFOaT9kxtiRh42GwlZEg1/uIgtZshS5a/8OaduUfCi7kynKgc3Tw/6Uo2D+db9qBttghhmxwQ==", + "dev": true, + "peer": true, + "dependencies": { + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.26.2", + "@babel/generator": "^7.26.10", + "@babel/helper-compilation-targets": "^7.26.5", + "@babel/helper-module-transforms": "^7.26.0", + "@babel/helpers": "^7.26.10", + "@babel/parser": "^7.26.10", + "@babel/template": "^7.26.9", + "@babel/traverse": "^7.26.10", + "@babel/types": "^7.26.10", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" + } + }, + "node_modules/@babel/core/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core/node_modules/@babel/helpers": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.28.4.tgz", + "integrity": "sha512-HFN59MmQXGHVyYadKLVumYsA9dBFun/ldYxipEjzA4196jpLZd8UjEEBLkbEkvfYreDqJhZxYAWFPtrfhNpj4w==", + "dev": true, + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/core/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core/node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==", + "dev": true + }, + "node_modules/@babel/core/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/generator": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.26.10.tgz", + "integrity": "sha512-rRHT8siFIXQrAYOYqZQVsAr8vJ+cBNqcVAY6m5V8/4QqzaPl+zDBe6cLEPRDuNOUf3ww8RfJVlOyQMoSI+5Ang==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.26.10", + "@babel/types": "^7.26.10", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/generator/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/generator/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.25.9", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-annotate-as-pure/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets": { + "version": "7.27.2", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.2", + "@babel/helper-validator-option": "^7.27.1", + "browserslist": "^4.24.0", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-compilation-targets/node_modules/semver": { + "version": "6.3.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-create-regexp-features-plugin": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.1", + "regexpu-core": "^6.2.0", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-create-regexp-features-plugin/node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-create-regexp-features-plugin/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-create-regexp-features-plugin/node_modules/semver": { + "version": "6.3.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/helper-globals": { + "version": "7.28.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-member-expression-to-functions/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-module-imports/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-imports/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms": { + "version": "7.28.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.28.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-module-transforms/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-module-transforms/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-module-transforms/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-optimise-call-expression/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-plugin-utils": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-remap-async-to-generator": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.1", + "@babel/helper-wrap-function": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-remap-async-to-generator/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-replace-supers": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/helper-replace-supers/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-replace-supers/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-replace-supers/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-replace-supers/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/traverse": "^7.27.1", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-skip-transparent-expression-wrappers/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration": { + "version": "7.24.7", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-split-export-declaration/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-string-parser": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-identifier": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-validator-option": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-wrap-function": { + "version": "7.28.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/template": "^7.27.2", + "@babel/traverse": "^7.28.3", + "@babel/types": "^7.28.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-wrap-function/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-wrap-function/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/helper-wrap-function/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-wrap-function/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-bugfix-safari-class-field-initializer-scope": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/plugin-transform-optional-chaining": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.13.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { + "version": "7.28.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.28.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-proposal-private-property-in-object": { + "version": "7.21.0-placeholder-for-preset-env.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-assertions": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-unicode-sets-regex": { + "version": "7.18.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-arrow-functions": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-async-generator-functions": { + "version": "7.26.8", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.26.5", + "@babel/helper-remap-async-to-generator": "^7.25.9", + "@babel/traverse": "^7.26.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-async-generator-functions/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-async-generator-functions/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-async-generator-functions/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-async-generator-functions/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-async-to-generator": { + "version": "7.25.9", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-plugin-utils": "^7.25.9", + "@babel/helper-remap-async-to-generator": "^7.25.9" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoped-functions": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoping": { + "version": "7.28.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-class-properties": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", + "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", + "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/traverse": "^7.28.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-class-properties/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-class-properties/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/plugin-transform-class-static-block": { + "version": "7.28.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.28.3", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.12.0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", + "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", + "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/traverse": "^7.28.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/plugin-transform-classes": { + "version": "7.28.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-globals": "^7.28.0", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/traverse": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-classes/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-classes/node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-classes/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-classes/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-classes/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-computed-properties": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/template": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-destructuring": { + "version": "7.28.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.28.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-destructuring/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-destructuring/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-destructuring/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-destructuring/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-dotall-regex": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-keys": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-dynamic-import": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-exponentiation-operator": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-export-namespace-from": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-for-of": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-function-name": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-compilation-targets": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-function-name/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-function-name/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-function-name/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-function-name/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-json-strings": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-literals": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-logical-assignment-operators": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-member-expression-literals": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-amd": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1", + "@babel/traverse": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-modules-umd": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-transforms": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-new-target": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-numeric-separator": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-rest-spread": { + "version": "7.28.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/plugin-transform-destructuring": "^7.28.0", + "@babel/plugin-transform-parameters": "^7.27.7", + "@babel/traverse": "^7.28.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-rest-spread/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-object-rest-spread/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-object-rest-spread/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-object-rest-spread/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-object-super": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-catch-binding": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-chaining": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-parameters": { + "version": "7.27.7", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-methods": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.27.3.tgz", + "integrity": "sha512-fXSwMQqitTGeHLBC08Eq5yXz2m37E4pJX1qAU1+2cNedz/ifv/bVXft90VeSav5nFO61EcNgwr0aJxbyPaWBPg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", + "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/traverse": "^7.28.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-private-methods/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-private-methods/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.1", + "@babel/helper-create-class-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/generator": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.28.3.tgz", + "integrity": "sha512-3lSpxGgvnmZznmBkCRnVREPUFJv2wrv9iAoFDvADJc0ypmdOxdUtcLeBgBJ6zE0PMeTKnxeQzyk0xTBq4Ep7zw==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.28.3", + "@babel/types": "^7.28.2", + "@jridgewell/gen-mapping": "^0.3.12", + "@jridgewell/trace-mapping": "^0.3.28", + "jsesc": "^3.0.2" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/helper-annotate-as-pure": { + "version": "7.27.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/types": "^7.27.3" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.28.3", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.28.3.tgz", + "integrity": "sha512-V9f6ZFIYSLNEbuGA/92uOvYsGCJNsuA8ESZ4ldc09bWk/j8H8TKiPw8Mk1eG6olpnO0ALHJmYfZvF4MEE4gajg==", + "dev": true, + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.27.3", + "@babel/helper-member-expression-to-functions": "^7.27.1", + "@babel/helper-optimise-call-expression": "^7.27.1", + "@babel/helper-replace-supers": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1", + "@babel/traverse": "^7.28.3", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/traverse": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.28.4.tgz", + "integrity": "sha512-YEzuboP2qvQavAcjgQNVgsvHIDv6ZpwXvcvjmyySP2DIMuByS/6ioU5G9pYrWHM6T2YDfc7xga9iNzYOs12CFQ==", + "dev": true, + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/generator": "^7.28.3", + "@babel/helper-globals": "^7.28.0", + "@babel/parser": "^7.28.4", + "@babel/template": "^7.27.2", + "@babel/types": "^7.28.4", + "debug": "^4.3.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/plugin-transform-property-literals": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-regexp-modifiers": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-reserved-words": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-runtime": { + "version": "7.26.10", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-module-imports": "^7.25.9", + "@babel/helper-plugin-utils": "^7.26.5", + "babel-plugin-polyfill-corejs2": "^0.4.10", + "babel-plugin-polyfill-corejs3": "^0.11.0", + "babel-plugin-polyfill-regenerator": "^0.6.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-runtime/node_modules/semver": { + "version": "6.3.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/plugin-transform-shorthand-properties": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-spread": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1", + "@babel/helper-skip-transparent-expression-wrappers": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-sticky-regex": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-template-literals": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typeof-symbol": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-escapes": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-property-regex": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-regex": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-sets-regex": { + "version": "7.27.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.27.1", + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/preset-env": { + "version": "7.26.9", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.26.9.tgz", + "integrity": "sha512-vX3qPGE8sEKEAZCWk05k3cpTAE3/nOYca++JA+Rd0z2NCNzabmYvEiSShKzm10zdquOIAVXsy2Ei/DTW34KlKQ==", + "dev": true, + "dependencies": { + "@babel/compat-data": "^7.26.8", + "@babel/helper-compilation-targets": "^7.26.5", + "@babel/helper-plugin-utils": "^7.26.5", + "@babel/helper-validator-option": "^7.25.9", + "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.25.9", + "@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.25.9", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.25.9", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.25.9", + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.25.9", + "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", + "@babel/plugin-syntax-import-assertions": "^7.26.0", + "@babel/plugin-syntax-import-attributes": "^7.26.0", + "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", + "@babel/plugin-transform-arrow-functions": "^7.25.9", + "@babel/plugin-transform-async-generator-functions": "^7.26.8", + "@babel/plugin-transform-async-to-generator": "^7.25.9", + "@babel/plugin-transform-block-scoped-functions": "^7.26.5", + "@babel/plugin-transform-block-scoping": "^7.25.9", + "@babel/plugin-transform-class-properties": "^7.25.9", + "@babel/plugin-transform-class-static-block": "^7.26.0", + "@babel/plugin-transform-classes": "^7.25.9", + "@babel/plugin-transform-computed-properties": "^7.25.9", + "@babel/plugin-transform-destructuring": "^7.25.9", + "@babel/plugin-transform-dotall-regex": "^7.25.9", + "@babel/plugin-transform-duplicate-keys": "^7.25.9", + "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.25.9", + "@babel/plugin-transform-dynamic-import": "^7.25.9", + "@babel/plugin-transform-exponentiation-operator": "^7.26.3", + "@babel/plugin-transform-export-namespace-from": "^7.25.9", + "@babel/plugin-transform-for-of": "^7.26.9", + "@babel/plugin-transform-function-name": "^7.25.9", + "@babel/plugin-transform-json-strings": "^7.25.9", + "@babel/plugin-transform-literals": "^7.25.9", + "@babel/plugin-transform-logical-assignment-operators": "^7.25.9", + "@babel/plugin-transform-member-expression-literals": "^7.25.9", + "@babel/plugin-transform-modules-amd": "^7.25.9", + "@babel/plugin-transform-modules-commonjs": "^7.26.3", + "@babel/plugin-transform-modules-systemjs": "^7.25.9", + "@babel/plugin-transform-modules-umd": "^7.25.9", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.25.9", + "@babel/plugin-transform-new-target": "^7.25.9", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.26.6", + "@babel/plugin-transform-numeric-separator": "^7.25.9", + "@babel/plugin-transform-object-rest-spread": "^7.25.9", + "@babel/plugin-transform-object-super": "^7.25.9", + "@babel/plugin-transform-optional-catch-binding": "^7.25.9", + "@babel/plugin-transform-optional-chaining": "^7.25.9", + "@babel/plugin-transform-parameters": "^7.25.9", + "@babel/plugin-transform-private-methods": "^7.25.9", + "@babel/plugin-transform-private-property-in-object": "^7.25.9", + "@babel/plugin-transform-property-literals": "^7.25.9", + "@babel/plugin-transform-regenerator": "^7.25.9", + "@babel/plugin-transform-regexp-modifiers": "^7.26.0", + "@babel/plugin-transform-reserved-words": "^7.25.9", + "@babel/plugin-transform-shorthand-properties": "^7.25.9", + "@babel/plugin-transform-spread": "^7.25.9", + "@babel/plugin-transform-sticky-regex": "^7.25.9", + "@babel/plugin-transform-template-literals": "^7.26.8", + "@babel/plugin-transform-typeof-symbol": "^7.26.7", + "@babel/plugin-transform-unicode-escapes": "^7.25.9", + "@babel/plugin-transform-unicode-property-regex": "^7.25.9", + "@babel/plugin-transform-unicode-regex": "^7.25.9", + "@babel/plugin-transform-unicode-sets-regex": "^7.25.9", + "@babel/preset-modules": "0.1.6-no-external-plugins", + "babel-plugin-polyfill-corejs2": "^0.4.10", + "babel-plugin-polyfill-corejs3": "^0.11.0", + "babel-plugin-polyfill-regenerator": "^0.6.1", + "core-js-compat": "^3.40.0", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-env/node_modules/@babel/plugin-transform-regenerator": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.28.4.tgz", + "integrity": "sha512-+ZEdQlBoRg9m2NnzvEeLgtvBMO4tkFBw5SQIUgLICgTrumLoU7lr+Oghi6km2PFj+dbUt2u1oby2w3BDO9YQnA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-env/node_modules/@babel/preset-modules": { + "version": "0.1.6-no-external-plugins", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", + "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", + "dev": true, + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/@babel/preset-env/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/preset-env/node_modules/core-js-compat": { + "version": "3.46.0", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.46.0.tgz", + "integrity": "sha512-p9hObIIEENxSV8xIu+V68JjSeARg6UVMG5mR+JEUguG3sI6MsiS1njz2jHmyJDvA+8jX/sytkBHup6kxhM9law==", + "dev": true, + "dependencies": { + "browserslist": "^4.26.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/@babel/preset-env/node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "dev": true, + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/@babel/runtime": { + "version": "7.26.10", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.26.10.tgz", + "integrity": "sha512-2WJMeRQPHKSPemqk/awGrAiuFfzBmOIPXKizAsVhWH9YJqLZ0H+HS4c8loHGgW6utJ3E/ejXQUsiGaQy2NZ9Fw==", + "dev": true, + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.27.2", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.27.1", + "@babel/parser": "^7.27.2", + "@babel/types": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@babel/template/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@colors/colors": { + "version": "1.5.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.1.90" + } + }, + "node_modules/@discoveryjs/json-ext": { + "version": "0.5.7", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-regex": { + "version": "6.2.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/ansi-styles": { + "version": "6.2.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/emoji-regex": { + "version": "9.2.2", + "dev": true, + "license": "MIT" + }, + "node_modules/@isaacs/cliui/node_modules/string-width": { + "version": "5.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/@isaacs/cliui/node_modules/strip-ansi": { + "version": "7.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/@isaacs/cliui/node_modules/wrap-ansi": { + "version": "8.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "dev": true, + "license": "ISC", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/load-nyc-config/node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dev": true, + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.13", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.5.0", + "@jridgewell/trace-mapping": "^0.3.24" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/source-map": { + "version": "0.3.11", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.5", + "dev": true, + "license": "MIT" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.31", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@leichtgewicht/ip-codec": { + "version": "2.0.5", + "dev": true, + "license": "MIT" + }, + "node_modules/@ljharb/through": { + "version": "2.3.14", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind": "^1.0.8" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/@ngtools/webpack": { + "version": "17.3.17", + "resolved": "https://registry.npmjs.org/@ngtools/webpack/-/webpack-17.3.17.tgz", + "integrity": "sha512-LaO++U8DoqV36M0YLKhubc1+NqM8fyp5DN03k1uP9GvtRchP9+7bfG+IEEZiDFkCUh9lfzi1CiGvUHrN4MYcsA==", + "dev": true, + "engines": { + "node": "^18.13.0 || >=20.9.0", + "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", + "yarn": ">= 1.13.0" + }, + "peerDependencies": { + "@angular/compiler-cli": "^17.0.0", + "typescript": ">=5.2 <5.5", + "webpack": "^5.54.0" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "dev": true, + "license": "MIT", + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@npmcli/agent": { + "version": "2.2.2", + "dev": true, + "license": "ISC", + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/agent/node_modules/lru-cache": { + "version": "10.4.3", + "dev": true, + "license": "ISC" + }, + "node_modules/@npmcli/fs": { + "version": "3.1.1", + "dev": true, + "license": "ISC", + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git": { + "version": "5.0.8", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/promise-spawn": "^7.0.0", + "ini": "^4.1.3", + "lru-cache": "^10.0.1", + "npm-pick-manifest": "^9.0.0", + "proc-log": "^4.0.0", + "promise-inflight": "^1.0.1", + "promise-retry": "^2.0.1", + "semver": "^7.3.5", + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git/node_modules/ini": { + "version": "4.1.3", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git/node_modules/isexe": { + "version": "3.1.1", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16" + } + }, + "node_modules/@npmcli/git/node_modules/lru-cache": { + "version": "10.4.3", + "dev": true, + "license": "ISC" + }, + "node_modules/@npmcli/git/node_modules/proc-log": { + "version": "4.2.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/git/node_modules/which": { + "version": "4.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/installed-package-contents": { + "version": "2.1.0", + "dev": true, + "license": "ISC", + "dependencies": { + "npm-bundled": "^3.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "bin": { + "installed-package-contents": "bin/index.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/node-gyp": { + "version": "3.0.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/package-json": { + "version": "5.2.1", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^5.0.0", + "glob": "^10.2.2", + "hosted-git-info": "^7.0.0", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "proc-log": "^4.0.0", + "semver": "^7.5.3" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/package-json/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@npmcli/package-json/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@npmcli/package-json/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@npmcli/package-json/node_modules/proc-log": { + "version": "4.2.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/promise-spawn": { + "version": "7.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/promise-spawn/node_modules/isexe": { + "version": "3.1.1", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16" + } + }, + "node_modules/@npmcli/promise-spawn/node_modules/which": { + "version": "4.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/redact": { + "version": "1.1.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/run-script": { + "version": "7.0.4", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/node-gyp": "^3.0.0", + "@npmcli/package-json": "^5.0.0", + "@npmcli/promise-spawn": "^7.0.0", + "node-gyp": "^10.0.0", + "which": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/run-script/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@npmcli/run-script/node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@npmcli/run-script/node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@npmcli/run-script/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@npmcli/run-script/node_modules/isexe": { + "version": "3.1.1", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16" + } + }, + "node_modules/@npmcli/run-script/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@npmcli/run-script/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@npmcli/run-script/node_modules/node-gyp": { + "version": "10.3.1", + "resolved": "https://registry.npmjs.org/node-gyp/-/node-gyp-10.3.1.tgz", + "integrity": "sha512-Pp3nFHBThHzVtNY7U6JfPjvT/DTE8+o/4xKsLQtBoU+j2HLsGlhcfzflAoUreaJbNmYnX+LlLi0qjV8kpyO6xQ==", + "dev": true, + "dependencies": { + "env-paths": "^2.2.0", + "exponential-backoff": "^3.1.1", + "glob": "^10.3.10", + "graceful-fs": "^4.2.6", + "make-fetch-happen": "^13.0.0", + "nopt": "^7.0.0", + "proc-log": "^4.1.0", + "semver": "^7.3.5", + "tar": "^6.2.1", + "which": "^4.0.0" + }, + "bin": { + "node-gyp": "bin/node-gyp.js" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/run-script/node_modules/proc-log": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", + "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/run-script/node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dev": true, + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/@npmcli/run-script/node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/@npmcli/run-script/node_modules/which": { + "version": "4.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "bin": { + "node-which": "bin/which.js" + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + } + }, + "node_modules/@npmcli/run-script/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@playwright/test": { + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.56.1.tgz", + "integrity": "sha512-vSMYtL/zOcFpvJCW71Q/OEGQb7KYBPAdKh35WNSkaZA75JlAO8ED8UN6GUNTm3drWomcbcqRPFqQbLae8yBTdg==", + "dev": true, + "dependencies": { + "playwright": "1.56.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.52.5", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.52.5", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@schematics/angular": { + "version": "17.3.17", + "resolved": "https://registry.npmjs.org/@schematics/angular/-/angular-17.3.17.tgz", + "integrity": "sha512-S5HwYem5Yjeceb5OLvforNcjfTMh2qsHnTP1BAYL81XPpqeg2udjAkJjKBxCwxMZSqdCMw3ne0eKppEYTaEZ+A==", + "dev": true, + "dependencies": { + "@angular-devkit/core": "17.3.17", + "@angular-devkit/schematics": "17.3.17", + "jsonc-parser": "3.2.1" + }, + "engines": { + "node": "^18.13.0 || >=20.9.0", + "npm": "^6.11.0 || ^7.5.6 || >=8.0.0", + "yarn": ">= 1.13.0" + } + }, + "node_modules/@sigstore/bundle": { + "version": "2.3.2", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/bundle/node_modules/@sigstore/protobuf-specs": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz", + "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==", + "dev": true, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@sigstore/tuf": { + "version": "2.3.4", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/protobuf-specs": "^0.3.2", + "tuf-js": "^2.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@sigstore/tuf/node_modules/@sigstore/protobuf-specs": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz", + "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==", + "dev": true, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/@socket.io/component-emitter": { + "version": "3.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/@tufjs/canonical-json": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@tufjs/models": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@tufjs/canonical-json": "2.0.0", + "minimatch": "^9.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/@tufjs/models/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/@tufjs/models/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/@types/body-parser": { + "version": "1.19.6", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/connect": "*", + "@types/node": "*" + } + }, + "node_modules/@types/bonjour": { + "version": "3.5.13", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/connect": { + "version": "3.4.38", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/connect-history-api-fallback": { + "version": "1.5.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express-serve-static-core": "*", + "@types/node": "*" + } + }, + "node_modules/@types/cors": { + "version": "2.8.19", + "resolved": "https://registry.npmjs.org/@types/cors/-/cors-2.8.19.tgz", + "integrity": "sha512-mFNylyeyqN93lfe/9CSxOGREz8cpzAhH+E93xJ4xWQf62V8sQ/24reV2nyzUWM6H6Xji+GGHpkbLe7pVoUEskg==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.8", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/express": { + "version": "4.17.23", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/body-parser": "*", + "@types/express-serve-static-core": "^4.17.33", + "@types/qs": "*", + "@types/serve-static": "*" + } + }, + "node_modules/@types/express-serve-static-core": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/express/node_modules/@types/express-serve-static-core": { + "version": "4.19.7", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "@types/qs": "*", + "@types/range-parser": "*", + "@types/send": "*" + } + }, + "node_modules/@types/http-errors": { + "version": "2.0.5", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/http-proxy": { + "version": "1.17.16", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/jasmine": { + "version": "5.1.12", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/json-schema": { + "version": "7.0.15", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/mime": { + "version": "1.3.5", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/node": { + "version": "24.9.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-24.9.1.tgz", + "integrity": "sha512-QoiaXANRkSXK6p0Duvt56W208du4P9Uye9hWLWgGMDTEoKPhuenzNcC4vGUmrNkiOKTlIrBoyNQYNpSwfEZXSg==", + "dev": true, + "license": "MIT", + "peer": true, + "dependencies": { + "undici-types": "~7.16.0" + } + }, + "node_modules/@types/node-forge": { + "version": "1.3.14", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/qs": { + "version": "6.14.0", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/range-parser": { + "version": "1.2.7", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/retry": { + "version": "0.12.0", + "dev": true, + "license": "MIT" + }, + "node_modules/@types/send": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/serve-index": { + "version": "1.9.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/express": "*" + } + }, + "node_modules/@types/serve-static": { + "version": "1.15.9", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-errors": "*", + "@types/node": "*", + "@types/send": "<1" + } + }, + "node_modules/@types/serve-static/node_modules/@types/send": { + "version": "0.17.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/mime": "^1", + "@types/node": "*" + } + }, + "node_modules/@types/sockjs": { + "version": "0.3.36", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/ws": { + "version": "8.18.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@webassemblyjs/floating-point-hex-parser": { + "version": "1.13.2", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-api-error": { + "version": "1.13.2", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-buffer": { + "version": "1.14.1", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-numbers": { + "version": "1.13.2", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/floating-point-hex-parser": "1.13.2", + "@webassemblyjs/helper-api-error": "1.13.2", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/helper-wasm-bytecode": { + "version": "1.13.2", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/helper-wasm-section": { + "version": "1.14.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/wasm-gen": "1.14.1" + } + }, + "node_modules/@webassemblyjs/helper-wasm-section/node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/@webassemblyjs/ieee754": { + "version": "1.13.2", + "dev": true, + "license": "MIT", + "dependencies": { + "@xtuc/ieee754": "^1.2.0" + } + }, + "node_modules/@webassemblyjs/leb128": { + "version": "1.13.2", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/utf8": { + "version": "1.13.2", + "dev": true, + "license": "MIT" + }, + "node_modules/@webassemblyjs/wasm-edit": { + "version": "1.14.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/helper-wasm-section": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-opt": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1", + "@webassemblyjs/wast-printer": "1.14.1" + } + }, + "node_modules/@webassemblyjs/wasm-edit/node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wasm-gen": { + "version": "1.14.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wasm-gen/node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wasm-opt": { + "version": "1.14.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-buffer": "1.14.1", + "@webassemblyjs/wasm-gen": "1.14.1", + "@webassemblyjs/wasm-parser": "1.14.1" + } + }, + "node_modules/@webassemblyjs/wasm-opt/node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wasm-parser": { + "version": "1.14.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@webassemblyjs/helper-api-error": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2", + "@webassemblyjs/ieee754": "1.13.2", + "@webassemblyjs/leb128": "1.13.2", + "@webassemblyjs/utf8": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wasm-parser/node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/@webassemblyjs/wast-printer": { + "version": "1.14.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@webassemblyjs/ast": "1.14.1", + "@xtuc/long": "4.2.2" + } + }, + "node_modules/@webassemblyjs/wast-printer/node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/@xtuc/ieee754": { + "version": "1.2.0", + "dev": true, + "license": "BSD-3-Clause" + }, + "node_modules/@xtuc/long": { + "version": "4.2.2", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/@yarnpkg/lockfile": { + "version": "1.1.0", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/abbrev": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/abbrev/-/abbrev-2.0.0.tgz", + "integrity": "sha512-6/mh1E2u2YgEsCHdY0Yx5oW+61gZU+1vXaoiHHrpKeuRNNgFvS+/jrwHiQhB5apAf5oB7UB7E19ol2R2LKH8hQ==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/accepts": { + "version": "1.3.8", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-types": "~2.1.34", + "negotiator": "0.6.3" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/accepts/node_modules/negotiator": { + "version": "0.6.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/acorn": { + "version": "8.15.0", + "dev": true, + "license": "MIT", + "peer": true, + "bin": { + "acorn": "bin/acorn" + }, + "engines": { + "node": ">=0.4.0" + } + }, + "node_modules/acorn-import-attributes": { + "version": "1.9.5", + "dev": true, + "license": "MIT", + "peerDependencies": { + "acorn": "^8" + } + }, + "node_modules/adjust-sourcemap-loader": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "loader-utils": "^2.0.0", + "regex-parser": "^2.2.11" + }, + "engines": { + "node": ">=8.9" + } + }, + "node_modules/adjust-sourcemap-loader/node_modules/loader-utils": { + "version": "2.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + }, + "engines": { + "node": ">=8.9.0" + } + }, + "node_modules/agent-base": { + "version": "7.1.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 14" + } + }, + "node_modules/aggregate-error": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "clean-stack": "^2.0.0", + "indent-string": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/ajv-formats": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ajv": "^8.0.0" + }, + "peerDependencies": { + "ajv": "^8.0.0" + }, + "peerDependenciesMeta": { + "ajv": { + "optional": true + } + } + }, + "node_modules/ajv-formats/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/ansi-colors": { + "version": "4.1.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/ansi-escapes": { + "version": "4.3.2", + "dev": true, + "license": "MIT", + "dependencies": { + "type-fest": "^0.21.3" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ansi-html-community": { + "version": "0.0.8", + "dev": true, + "engines": [ + "node >= 0.8.0" + ], + "license": "Apache-2.0", + "bin": { + "ansi-html": "bin/ansi-html" + } + }, + "node_modules/ansi-regex": { + "version": "5.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/ansi-styles": { + "version": "4.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "dev": true, + "license": "ISC", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/anymatch/node_modules/picomatch": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/array-flatten": { + "version": "1.1.1", + "dev": true, + "license": "MIT" + }, + "node_modules/autoprefixer": { + "version": "10.4.18", + "resolved": "https://registry.npmjs.org/autoprefixer/-/autoprefixer-10.4.18.tgz", + "integrity": "sha512-1DKbDfsr6KUElM6wg+0zRNkB/Q7WcKYAaK+pzXn+Xqmszm/5Xa9coeNdtP88Vi+dPzZnMjhge8GIV49ZQkDa+g==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/autoprefixer" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "browserslist": "^4.23.0", + "caniuse-lite": "^1.0.30001591", + "fraction.js": "^4.3.7", + "normalize-range": "^0.1.2", + "picocolors": "^1.0.0", + "postcss-value-parser": "^4.2.0" + }, + "bin": { + "autoprefixer": "bin/autoprefixer" + }, + "engines": { + "node": "^10 || ^12 || >=14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/autoprefixer/node_modules/caniuse-lite": { + "version": "1.0.30001751", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001751.tgz", + "integrity": "sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/babel-loader": { + "version": "9.1.3", + "dev": true, + "license": "MIT", + "dependencies": { + "find-cache-dir": "^4.0.0", + "schema-utils": "^4.0.0" + }, + "engines": { + "node": ">= 14.15.0" + }, + "peerDependencies": { + "@babel/core": "^7.12.0", + "webpack": ">=5" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-polyfill-corejs2": { + "version": "0.4.14", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/compat-data": "^7.27.7", + "@babel/helper-define-polyfill-provider": "^0.6.5", + "semver": "^6.3.1" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs2/node_modules/@babel/helper-define-polyfill-provider": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz", + "integrity": "sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==", + "dev": true, + "dependencies": { + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-plugin-utils": "^7.27.1", + "debug": "^4.4.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.22.10" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs2/node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/babel-plugin-polyfill-corejs2/node_modules/semver": { + "version": "6.3.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/babel-plugin-polyfill-corejs3": { + "version": "0.11.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.3", + "core-js-compat": "^3.40.0" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs3/node_modules/@babel/helper-define-polyfill-provider": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz", + "integrity": "sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==", + "dev": true, + "dependencies": { + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-plugin-utils": "^7.27.1", + "debug": "^4.4.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.22.10" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs3/node_modules/core-js-compat": { + "version": "3.46.0", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.46.0.tgz", + "integrity": "sha512-p9hObIIEENxSV8xIu+V68JjSeARg6UVMG5mR+JEUguG3sI6MsiS1njz2jHmyJDvA+8jX/sytkBHup6kxhM9law==", + "dev": true, + "dependencies": { + "browserslist": "^4.26.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/babel-plugin-polyfill-corejs3/node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/babel-plugin-polyfill-regenerator": { + "version": "0.6.5", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.5" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-regenerator/node_modules/@babel/helper-define-polyfill-provider": { + "version": "0.6.5", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.5.tgz", + "integrity": "sha512-uJnGFcPsWQK8fvjgGP5LZUZZsYGIoPeRjSF5PGwrelYgq7Q15/Ft9NGFp1zglwgIv//W0uG4BevRuSJRyylZPg==", + "dev": true, + "dependencies": { + "@babel/helper-compilation-targets": "^7.27.2", + "@babel/helper-plugin-utils": "^7.27.1", + "debug": "^4.4.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.22.10" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-regenerator/node_modules/resolve": { + "version": "1.22.11", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.11.tgz", + "integrity": "sha512-RfqAvLnMl313r7c9oclB1HhUEAezcpLjz95wFH4LVuhk9JF/r22qmVP9AMmOU4vMX7Q8pN8jwNg/CSpdFnMjTQ==", + "dev": true, + "dependencies": { + "is-core-module": "^2.16.1", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/base64-js": { + "version": "1.5.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/base64id": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": "^4.5.0 || >= 5.9" + } + }, + "node_modules/baseline-browser-mapping": { + "version": "2.8.18", + "dev": true, + "license": "Apache-2.0", + "bin": { + "baseline-browser-mapping": "dist/cli.js" + } + }, + "node_modules/batch": { + "version": "0.6.1", + "dev": true, + "license": "MIT" + }, + "node_modules/big.js": { + "version": "5.2.2", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + } + }, + "node_modules/binary-extensions": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/bl": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer": "^5.5.0", + "inherits": "^2.0.4", + "readable-stream": "^3.4.0" + } + }, + "node_modules/bl/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/body-parser": { + "version": "1.20.3", + "dev": true, + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "content-type": "~1.0.5", + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "on-finished": "2.4.1", + "qs": "6.13.0", + "raw-body": "2.5.2", + "type-is": "~1.6.18", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/body-parser/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/body-parser/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/bonjour-service": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-deep-equal": "^3.1.3", + "multicast-dns": "^7.2.5" + } + }, + "node_modules/boolbase": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/boolbase/-/boolbase-1.0.0.tgz", + "integrity": "sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "1.1.12", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.26.3", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "baseline-browser-mapping": "^2.8.9", + "caniuse-lite": "^1.0.30001746", + "electron-to-chromium": "^1.5.227", + "node-releases": "^2.0.21", + "update-browserslist-db": "^1.1.3" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/browserslist/node_modules/caniuse-lite": { + "version": "1.0.30001751", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001751.tgz", + "integrity": "sha512-A0QJhug0Ly64Ii3eIqHu5X51ebln3k4yTUkY1j8drqpWHVreg/VLijN48cZ1bYPiqOQuqpkIKnzr/Ul8V+p6Cw==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/buffer": { + "version": "5.7.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "base64-js": "^1.3.1", + "ieee754": "^1.1.13" + } + }, + "node_modules/buffer-from": { + "version": "1.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/bytes": { + "version": "3.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/cacache": { + "version": "18.0.4", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/cacache/node_modules/brace-expansion": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz", + "integrity": "sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/cacache/node_modules/glob": { + "version": "10.4.5", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.4.5.tgz", + "integrity": "sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/cacache/node_modules/lru-cache": { + "version": "10.4.3", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz", + "integrity": "sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ==", + "dev": true + }, + "node_modules/cacache/node_modules/minimatch": { + "version": "9.0.5", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz", + "integrity": "sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/cacache/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cacache/node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dev": true, + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/cacache/node_modules/tar/node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cacache/node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cacache/node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/cacache/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/call-bind": { + "version": "1.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.0", + "es-define-property": "^1.0.0", + "get-intrinsic": "^1.2.4", + "set-function-length": "^1.2.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/call-bind-apply-helpers": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/call-bound": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "get-intrinsic": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/callsites": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/chalk": { + "version": "4.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/chardet": { + "version": "0.7.0", + "dev": true, + "license": "MIT" + }, + "node_modules/chokidar": { + "version": "3.6.0", + "dev": true, + "license": "MIT", + "dependencies": { + "anymatch": "~3.1.2", + "braces": "~3.0.2", + "glob-parent": "~5.1.2", + "is-binary-path": "~2.1.0", + "is-glob": "~4.0.1", + "normalize-path": "~3.0.0", + "readdirp": "~3.6.0" + }, + "engines": { + "node": ">= 8.10.0" + }, + "funding": { + "url": "https://paulmillr.com/funding/" + }, + "optionalDependencies": { + "fsevents": "~2.3.2" + } + }, + "node_modules/chokidar/node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/chownr": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/chownr/-/chownr-2.0.0.tgz", + "integrity": "sha512-bIomtDF5KGpdogkLd9VspvFzk9KfpyyGlS8YFVZl7TGPBHL5snIOnxeshwVgPteQ9b4Eydl+pVbIyE1DcvCWgQ==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/chrome-trace-event": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.0" + } + }, + "node_modules/clean-stack": { + "version": "2.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/cli-cursor": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "restore-cursor": "^3.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/cli-spinners": { + "version": "2.9.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/cli-width": { + "version": "4.1.0", + "dev": true, + "license": "ISC", + "engines": { + "node": ">= 12" + } + }, + "node_modules/cliui": { + "version": "8.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/cliui/node_modules/wrap-ansi": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/clone": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/clone-deep": { + "version": "4.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-plain-object": "^2.0.4", + "kind-of": "^6.0.2", + "shallow-clone": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "dev": true, + "license": "MIT" + }, + "node_modules/colorette": { + "version": "2.0.20", + "dev": true, + "license": "MIT" + }, + "node_modules/commander": { + "version": "2.20.3", + "dev": true, + "license": "MIT" + }, + "node_modules/common-path-prefix": { + "version": "3.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/compressible": { + "version": "2.0.18", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": ">= 1.43.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/compression": { + "version": "1.8.1", + "dev": true, + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "compressible": "~2.0.18", + "debug": "2.6.9", + "negotiator": "~0.6.4", + "on-headers": "~1.1.0", + "safe-buffer": "5.2.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/compression/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/compression/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/connect": { + "version": "3.7.0", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "finalhandler": "1.1.2", + "parseurl": "~1.3.3", + "utils-merge": "1.0.1" + }, + "engines": { + "node": ">= 0.10.0" + } + }, + "node_modules/connect-history-api-fallback": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8" + } + }, + "node_modules/connect/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/connect/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/content-disposition": { + "version": "0.5.4", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "5.2.1" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/content-type": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/convert-source-map": { + "version": "1.9.0", + "dev": true, + "license": "MIT" + }, + "node_modules/cookie": { + "version": "0.7.2", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz", + "integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/cookie-signature": { + "version": "1.0.6", + "dev": true, + "license": "MIT" + }, + "node_modules/copy-anything": { + "version": "2.0.6", + "dev": true, + "license": "MIT", + "dependencies": { + "is-what": "^3.14.1" + }, + "funding": { + "url": "https://github.com/sponsors/mesqueeb" + } + }, + "node_modules/copy-webpack-plugin": { + "version": "11.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "fast-glob": "^3.2.11", + "glob-parent": "^6.0.1", + "globby": "^13.1.1", + "normalize-path": "^3.0.0", + "schema-utils": "^4.0.0", + "serialize-javascript": "^6.0.0" + }, + "engines": { + "node": ">= 14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + } + }, + "node_modules/copy-webpack-plugin/node_modules/glob-parent": { + "version": "6.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.3" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/core-util-is": { + "version": "1.0.3", + "dev": true, + "license": "MIT" + }, + "node_modules/cors": { + "version": "2.8.5", + "dev": true, + "license": "MIT", + "dependencies": { + "object-assign": "^4", + "vary": "^1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/cosmiconfig": { + "version": "9.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "env-paths": "^2.2.1", + "import-fresh": "^3.3.0", + "js-yaml": "^4.1.0", + "parse-json": "^5.2.0" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/d-fischer" + }, + "peerDependencies": { + "typescript": ">=4.9.5" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/cosmiconfig/node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/cosmiconfig/node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/critters": { + "version": "0.0.22", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "chalk": "^4.1.0", + "css-select": "^5.1.0", + "dom-serializer": "^2.0.0", + "domhandler": "^5.0.2", + "htmlparser2": "^8.0.2", + "postcss": "^8.4.23", + "postcss-media-query-parser": "^0.2.3" + } + }, + "node_modules/critters/node_modules/css-select": { + "version": "5.2.2", + "resolved": "https://registry.npmjs.org/css-select/-/css-select-5.2.2.tgz", + "integrity": "sha512-TizTzUddG/xYLA3NXodFM0fSbNizXjOKhqiQQwvhlspadZokn1KDy0NZFS0wuEubIYAV5/c1/lAr0TaaFXEXzw==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0", + "css-what": "^6.1.0", + "domhandler": "^5.0.2", + "domutils": "^3.0.1", + "nth-check": "^2.0.1" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/critters/node_modules/domutils": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", + "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", + "dev": true, + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.6", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/cross-spawn/node_modules/which": { + "version": "2.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/css-loader": { + "version": "6.10.0", + "dev": true, + "license": "MIT", + "dependencies": { + "icss-utils": "^5.1.0", + "postcss": "^8.4.33", + "postcss-modules-extract-imports": "^3.0.0", + "postcss-modules-local-by-default": "^4.0.4", + "postcss-modules-scope": "^3.1.1", + "postcss-modules-values": "^4.0.0", + "postcss-value-parser": "^4.2.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "@rspack/core": "0.x || 1.x", + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "@rspack/core": { + "optional": true + }, + "webpack": { + "optional": true + } + } + }, + "node_modules/css-what": { + "version": "6.2.2", + "resolved": "https://registry.npmjs.org/css-what/-/css-what-6.2.2.tgz", + "integrity": "sha512-u/O3vwbptzhMs3L1fQE82ZSLHQQfto5gyZzwteVIEyeaY5Fc7R4dapF/BvRoSYFeqfBk4m0V1Vafq5Pjv25wvA==", + "dev": true, + "engines": { + "node": ">= 6" + }, + "funding": { + "url": "https://github.com/sponsors/fb55" + } + }, + "node_modules/cssesc": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/cssesc/-/cssesc-3.0.0.tgz", + "integrity": "sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg==", + "dev": true, + "bin": { + "cssesc": "bin/cssesc" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/custom-event": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/date-format": { + "version": "4.0.14", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/debug": { + "version": "4.4.3", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/default-gateway": { + "version": "6.0.3", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "execa": "^5.0.0" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/defaults": { + "version": "1.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "clone": "^1.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/define-data-property": { + "version": "1.1.4", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0", + "es-errors": "^1.3.0", + "gopd": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/define-lazy-prop": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/depd": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/destroy": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8", + "npm": "1.2.8000 || >= 1.4.16" + } + }, + "node_modules/detect-node": { + "version": "2.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/di": { + "version": "0.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/dir-glob": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "path-type": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/dns-packet": { + "version": "5.6.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@leichtgewicht/ip-codec": "^2.0.1" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/dom-serialize": { + "version": "2.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "custom-event": "~1.0.0", + "ent": "~2.2.0", + "extend": "^3.0.0", + "void-elements": "^2.0.0" + } + }, + "node_modules/dom-serializer": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.2", + "entities": "^4.2.0" + }, + "funding": { + "url": "https://github.com/cheeriojs/dom-serializer?sponsor=1" + } + }, + "node_modules/dom-serializer/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/domelementtype": { + "version": "2.3.0", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "license": "BSD-2-Clause" + }, + "node_modules/domhandler": { + "version": "5.0.3", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "domelementtype": "^2.3.0" + }, + "engines": { + "node": ">= 4" + }, + "funding": { + "url": "https://github.com/fb55/domhandler?sponsor=1" + } + }, + "node_modules/dunder-proto": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.1", + "es-errors": "^1.3.0", + "gopd": "^1.2.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/ee-first": { + "version": "1.1.1", + "dev": true, + "license": "MIT" + }, + "node_modules/electron-to-chromium": { + "version": "1.5.237", + "dev": true, + "license": "ISC" + }, + "node_modules/emoji-regex": { + "version": "8.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/emojis-list": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/encodeurl": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/encoding": { + "version": "0.1.13", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.2" + } + }, + "node_modules/encoding/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/engine.io-parser": { + "version": "5.2.3", + "resolved": "https://registry.npmjs.org/engine.io-parser/-/engine.io-parser-5.2.3.tgz", + "integrity": "sha512-HqD3yTBfnBxIrbnM1DoD6Pcq8NECnh8d4As1Qgh0z5Gg3jRRIqijury0CL3ghu/edArpUYiYqQiDUQBIs4np3Q==", + "dev": true, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/ent": { + "version": "2.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.3", + "es-errors": "^1.3.0", + "punycode": "^1.4.1", + "safe-regex-test": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/env-paths": { + "version": "2.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/err-code": { + "version": "2.0.3", + "dev": true, + "license": "MIT" + }, + "node_modules/errno": { + "version": "0.1.8", + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "prr": "~1.0.1" + }, + "bin": { + "errno": "cli.js" + } + }, + "node_modules/error-ex": { + "version": "1.3.4", + "dev": true, + "license": "MIT", + "dependencies": { + "is-arrayish": "^0.2.1" + } + }, + "node_modules/es-define-property": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-errors": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/es-module-lexer": { + "version": "1.7.0", + "dev": true, + "license": "MIT" + }, + "node_modules/es-object-atoms": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/esbuild": { + "version": "0.20.1", + "dev": true, + "hasInstallScript": true, + "license": "MIT", + "optional": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.20.1", + "@esbuild/android-arm": "0.20.1", + "@esbuild/android-arm64": "0.20.1", + "@esbuild/android-x64": "0.20.1", + "@esbuild/darwin-arm64": "0.20.1", + "@esbuild/darwin-x64": "0.20.1", + "@esbuild/freebsd-arm64": "0.20.1", + "@esbuild/freebsd-x64": "0.20.1", + "@esbuild/linux-arm": "0.20.1", + "@esbuild/linux-arm64": "0.20.1", + "@esbuild/linux-ia32": "0.20.1", + "@esbuild/linux-loong64": "0.20.1", + "@esbuild/linux-mips64el": "0.20.1", + "@esbuild/linux-ppc64": "0.20.1", + "@esbuild/linux-riscv64": "0.20.1", + "@esbuild/linux-s390x": "0.20.1", + "@esbuild/linux-x64": "0.20.1", + "@esbuild/netbsd-x64": "0.20.1", + "@esbuild/openbsd-x64": "0.20.1", + "@esbuild/sunos-x64": "0.20.1", + "@esbuild/win32-arm64": "0.20.1", + "@esbuild/win32-ia32": "0.20.1", + "@esbuild/win32-x64": "0.20.1" + } + }, + "node_modules/esbuild-wasm": { + "version": "0.20.1", + "dev": true, + "license": "MIT", + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/aix-ppc64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.1.tgz", + "integrity": "sha512-m55cpeupQ2DbuRGQMMZDzbv9J9PgVelPjlcmM5kxHnrBdBx6REaEd7LamYV7Dm8N7rCyR/XwU6rVP8ploKtIkA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/android-arm": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.1.tgz", + "integrity": "sha512-4j0+G27/2ZXGWR5okcJi7pQYhmkVgb4D7UKwxcqrjhvp5TKWx3cUjgB1CGj1mfdmJBQ9VnUGgUhign+FPF2Zgw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/android-arm64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.1.tgz", + "integrity": "sha512-hCnXNF0HM6AjowP+Zou0ZJMWWa1VkD77BXe959zERgGJBBxB+sV+J9f/rcjeg2c5bsukD/n17RKWXGFCO5dD5A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/android-x64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.1.tgz", + "integrity": "sha512-MSfZMBoAsnhpS+2yMFYIQUPs8Z19ajwfuaSZx+tSl09xrHZCjbeXXMsUF/0oq7ojxYEpsSo4c0SfjxOYXRbpaA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/darwin-arm64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.1.tgz", + "integrity": "sha512-Ylk6rzgMD8klUklGPzS414UQLa5NPXZD5tf8JmQU8GQrj6BrFA/Ic9tb2zRe1kOZyCbGl+e8VMbDRazCEBqPvA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/darwin-x64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.1.tgz", + "integrity": "sha512-pFIfj7U2w5sMp52wTY1XVOdoxw+GDwy9FsK3OFz4BpMAjvZVs0dT1VXs8aQm22nhwoIWUmIRaE+4xow8xfIDZA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/freebsd-arm64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.1.tgz", + "integrity": "sha512-UyW1WZvHDuM4xDz0jWun4qtQFauNdXjXOtIy7SYdf7pbxSWWVlqhnR/T2TpX6LX5NI62spt0a3ldIIEkPM6RHw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/freebsd-x64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.1.tgz", + "integrity": "sha512-itPwCw5C+Jh/c624vcDd9kRCCZVpzpQn8dtwoYIt2TJF3S9xJLiRohnnNrKwREvcZYx0n8sCSbvGH349XkcQeg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/linux-arm": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.1.tgz", + "integrity": "sha512-LojC28v3+IhIbfQ+Vu4Ut5n3wKcgTu6POKIHN9Wpt0HnfgUGlBuyDDQR4jWZUZFyYLiz4RBBBmfU6sNfn6RhLw==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/linux-arm64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.1.tgz", + "integrity": "sha512-cX8WdlF6Cnvw/DO9/X7XLH2J6CkBnz7Twjpk56cshk9sjYVcuh4sXQBy5bmTwzBjNVZze2yaV1vtcJS04LbN8w==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/linux-ia32": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.1.tgz", + "integrity": "sha512-4H/sQCy1mnnGkUt/xszaLlYJVTz3W9ep52xEefGtd6yXDQbz/5fZE5dFLUgsPdbUOQANcVUa5iO6g3nyy5BJiw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/linux-loong64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.1.tgz", + "integrity": "sha512-c0jgtB+sRHCciVXlyjDcWb2FUuzlGVRwGXgI+3WqKOIuoo8AmZAddzeOHeYLtD+dmtHw3B4Xo9wAUdjlfW5yYA==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/linux-mips64el": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.1.tgz", + "integrity": "sha512-TgFyCfIxSujyuqdZKDZ3yTwWiGv+KnlOeXXitCQ+trDODJ+ZtGOzLkSWngynP0HZnTsDyBbPy7GWVXWaEl6lhA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/linux-ppc64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.1.tgz", + "integrity": "sha512-b+yuD1IUeL+Y93PmFZDZFIElwbmFfIKLKlYI8M6tRyzE6u7oEP7onGk0vZRh8wfVGC2dZoy0EqX1V8qok4qHaw==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/linux-riscv64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.1.tgz", + "integrity": "sha512-wpDlpE0oRKZwX+GfomcALcouqjjV8MIX8DyTrxfyCfXxoKQSDm45CZr9fanJ4F6ckD4yDEPT98SrjvLwIqUCgg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/linux-s390x": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.1.tgz", + "integrity": "sha512-5BepC2Au80EohQ2dBpyTquqGCES7++p7G+7lXe1bAIvMdXm4YYcEfZtQrP4gaoZ96Wv1Ute61CEHFU7h4FMueQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/linux-x64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.1.tgz", + "integrity": "sha512-5gRPk7pKuaIB+tmH+yKd2aQTRpqlf1E4f/mC+tawIm/CGJemZcHZpp2ic8oD83nKgUPMEd0fNanrnFljiruuyA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/netbsd-x64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.1.tgz", + "integrity": "sha512-4fL68JdrLV2nVW2AaWZBv3XEm3Ae3NZn/7qy2KGAt3dexAgSVT+Hc97JKSZnqezgMlv9x6KV0ZkZY7UO5cNLCg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/openbsd-x64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.1.tgz", + "integrity": "sha512-GhRuXlvRE+twf2ES+8REbeCb/zeikNqwD3+6S5y5/x+DYbAQUNl0HNBs4RQJqrechS4v4MruEr8ZtAin/hK5iw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/sunos-x64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.1.tgz", + "integrity": "sha512-ZnWEyCM0G1Ex6JtsygvC3KUUrlDXqOihw8RicRuQAzw+c4f1D66YlPNNV3rkjVW90zXVsHwZYWbJh3v+oQFM9Q==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/win32-arm64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.1.tgz", + "integrity": "sha512-QZ6gXue0vVQY2Oon9WyLFCdSuYbXSoxaZrPuJ4c20j6ICedfsDilNPYfHLlMH7vGfU5DQR0czHLmJvH4Nzis/A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/win32-ia32": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.1.tgz", + "integrity": "sha512-HzcJa1NcSWTAU0MJIxOho8JftNp9YALui3o+Ny7hCh0v5f90nprly1U3Sj1Ldj/CvKKdvvFsCRvDkpsEMp4DNw==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/esbuild/node_modules/@esbuild/win32-x64": { + "version": "0.20.1", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.1.tgz", + "integrity": "sha512-0MBh53o6XtI6ctDnRMeQ+xoCN8kD2qI1rY1KgF/xdWQwoFeKou7puvDfV8/Wv4Ctx2rRpET/gGdz3YlNtNACSA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/escalade": { + "version": "3.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-html": { + "version": "1.0.3", + "dev": true, + "license": "MIT" + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/eslint-scope": { + "version": "5.1.1", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "esrecurse": "^4.3.0", + "estraverse": "^4.1.1" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true, + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esrecurse": { + "version": "4.3.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "estraverse": "^5.2.0" + }, + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esrecurse/node_modules/estraverse": { + "version": "5.3.0", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/estraverse": { + "version": "4.3.0", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=4.0" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "dev": true, + "license": "BSD-2-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/etag": { + "version": "1.8.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/eventemitter3": { + "version": "4.0.7", + "dev": true, + "license": "MIT" + }, + "node_modules/events": { + "version": "3.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.x" + } + }, + "node_modules/execa": { + "version": "5.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "cross-spawn": "^7.0.3", + "get-stream": "^6.0.0", + "human-signals": "^2.1.0", + "is-stream": "^2.0.0", + "merge-stream": "^2.0.0", + "npm-run-path": "^4.0.1", + "onetime": "^5.1.2", + "signal-exit": "^3.0.3", + "strip-final-newline": "^2.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sindresorhus/execa?sponsor=1" + } + }, + "node_modules/exponential-backoff": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/exponential-backoff/-/exponential-backoff-3.1.3.tgz", + "integrity": "sha512-ZgEeZXj30q+I0EN+CbSSpIyPaJ5HVQD18Z1m+u1FXbAeT94mr1zw50q4q6jiiC447Nl/YTcIYSAftiGqetwXCA==", + "dev": true + }, + "node_modules/extend": { + "version": "3.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/external-editor": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/fast-deep-equal": { + "version": "3.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/fast-uri": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/fast-uri/-/fast-uri-3.1.0.tgz", + "integrity": "sha512-iPeeDKJSWf4IEOasVVrknXpaBV0IApz/gp7S2bb7Z4Lljbl2MGJRqInZiUrQwV16cpzw/D3S5j5Julj/gT52AA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/fastify" + }, + { + "type": "opencollective", + "url": "https://opencollective.com/fastify" + } + ] + }, + "node_modules/fastq": { + "version": "1.19.1", + "dev": true, + "license": "ISC", + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/faye-websocket": { + "version": "0.11.4", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "websocket-driver": ">=0.5.1" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/figures": { + "version": "3.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "escape-string-regexp": "^1.0.5" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/finalhandler": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "on-finished": "~2.3.0", + "parseurl": "~1.3.3", + "statuses": "~1.5.0", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/finalhandler/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/finalhandler/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/finalhandler/node_modules/on-finished": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/find-cache-dir": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "common-path-prefix": "^3.0.0", + "pkg-dir": "^7.0.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/flat": { + "version": "5.0.2", + "dev": true, + "license": "BSD-3-Clause", + "bin": { + "flat": "cli.js" + } + }, + "node_modules/flatted": { + "version": "3.3.3", + "dev": true, + "license": "ISC" + }, + "node_modules/follow-redirects": { + "version": "1.15.11", + "dev": true, + "funding": [ + { + "type": "individual", + "url": "https://github.com/sponsors/RubenVerborgh" + } + ], + "license": "MIT", + "engines": { + "node": ">=4.0" + }, + "peerDependenciesMeta": { + "debug": { + "optional": true + } + } + }, + "node_modules/foreground-child": { + "version": "3.3.1", + "dev": true, + "license": "ISC", + "dependencies": { + "cross-spawn": "^7.0.6", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/foreground-child/node_modules/signal-exit": { + "version": "4.1.0", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/forwarded": { + "version": "0.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fraction.js": { + "version": "4.3.7", + "dev": true, + "license": "MIT", + "engines": { + "node": "*" + }, + "funding": { + "type": "patreon", + "url": "https://github.com/sponsors/rawify" + } + }, + "node_modules/fresh": { + "version": "0.5.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/fs-extra": { + "version": "8.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "graceful-fs": "^4.2.0", + "jsonfile": "^4.0.0", + "universalify": "^0.1.0" + }, + "engines": { + "node": ">=6 <7 || >=8" + } + }, + "node_modules/fs-minipass": { + "version": "3.0.3", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/fs-monkey": { + "version": "1.1.0", + "dev": true, + "license": "Unlicense" + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-caller-file": { + "version": "2.0.5", + "dev": true, + "license": "ISC", + "engines": { + "node": "6.* || 8.* || >= 10.*" + } + }, + "node_modules/get-intrinsic": { + "version": "1.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bind-apply-helpers": "^1.0.2", + "es-define-property": "^1.0.1", + "es-errors": "^1.3.0", + "es-object-atoms": "^1.1.1", + "function-bind": "^1.1.2", + "get-proto": "^1.0.1", + "gopd": "^1.2.0", + "has-symbols": "^1.1.0", + "hasown": "^2.0.2", + "math-intrinsics": "^1.1.0" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/get-proto": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "dunder-proto": "^1.0.1", + "es-object-atoms": "^1.0.0" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/get-stream": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "dev": true, + "license": "ISC", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/glob-to-regexp": { + "version": "0.4.1", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/globby": { + "version": "13.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "dir-glob": "^3.0.1", + "fast-glob": "^3.3.0", + "ignore": "^5.2.4", + "merge2": "^1.4.1", + "slash": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/gopd": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "dev": true, + "license": "ISC" + }, + "node_modules/handle-thing": { + "version": "2.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/has-flag": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/has-property-descriptors": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "es-define-property": "^1.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-symbols": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/has-tostringtag": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "has-symbols": "^1.0.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/hosted-git-info": { + "version": "7.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "lru-cache": "^10.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/hosted-git-info/node_modules/lru-cache": { + "version": "10.4.3", + "dev": true, + "license": "ISC" + }, + "node_modules/hpack.js": { + "version": "2.1.6", + "dev": true, + "license": "MIT", + "dependencies": { + "inherits": "^2.0.1", + "obuf": "^1.0.0", + "readable-stream": "^2.0.1", + "wbuf": "^1.1.0" + } + }, + "node_modules/hpack.js/node_modules/readable-stream": { + "version": "2.3.8", + "dev": true, + "license": "MIT", + "dependencies": { + "core-util-is": "~1.0.0", + "inherits": "~2.0.3", + "isarray": "~1.0.0", + "process-nextick-args": "~2.0.0", + "safe-buffer": "~5.1.1", + "string_decoder": "~1.1.1", + "util-deprecate": "~1.0.1" + } + }, + "node_modules/hpack.js/node_modules/safe-buffer": { + "version": "5.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/hpack.js/node_modules/string_decoder": { + "version": "1.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "~5.1.0" + } + }, + "node_modules/html-entities": { + "version": "2.6.0", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/mdevils" + }, + { + "type": "patreon", + "url": "https://patreon.com/mdevils" + } + ], + "license": "MIT" + }, + "node_modules/html-escaper": { + "version": "2.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/htmlparser2": { + "version": "8.0.2", + "dev": true, + "funding": [ + "https://github.com/fb55/htmlparser2?sponsor=1", + { + "type": "github", + "url": "https://github.com/sponsors/fb55" + } + ], + "license": "MIT", + "dependencies": { + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3", + "domutils": "^3.0.1", + "entities": "^4.4.0" + } + }, + "node_modules/htmlparser2/node_modules/domutils": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/domutils/-/domutils-3.2.2.tgz", + "integrity": "sha512-6kZKyUajlDuqlHKVX1w7gyslj9MPIXzIFiz/rGu35uC1wMi+kMhQwGhl4lt9unC9Vb9INnY9Z3/ZA3+FhASLaw==", + "dev": true, + "dependencies": { + "dom-serializer": "^2.0.0", + "domelementtype": "^2.3.0", + "domhandler": "^5.0.3" + }, + "funding": { + "url": "https://github.com/fb55/domutils?sponsor=1" + } + }, + "node_modules/htmlparser2/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/http-cache-semantics": { + "version": "4.2.0", + "dev": true, + "license": "BSD-2-Clause" + }, + "node_modules/http-deceiver": { + "version": "1.2.7", + "dev": true, + "license": "MIT" + }, + "node_modules/http-errors": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "depd": "2.0.0", + "inherits": "2.0.4", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "toidentifier": "1.0.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-errors/node_modules/statuses": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/http-parser-js": { + "version": "0.5.10", + "dev": true, + "license": "MIT" + }, + "node_modules/http-proxy": { + "version": "1.18.1", + "dev": true, + "license": "MIT", + "dependencies": { + "eventemitter3": "^4.0.0", + "follow-redirects": "^1.0.0", + "requires-port": "^1.0.0" + }, + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/http-proxy-agent": { + "version": "7.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.0", + "debug": "^4.3.4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/http-proxy-middleware": { + "version": "2.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/http-proxy": "^1.17.8", + "http-proxy": "^1.18.1", + "is-glob": "^4.0.1", + "is-plain-obj": "^3.0.0", + "micromatch": "^4.0.2" + }, + "engines": { + "node": ">=12.0.0" + }, + "peerDependencies": { + "@types/express": "^4.17.13" + }, + "peerDependenciesMeta": { + "@types/express": { + "optional": true + } + } + }, + "node_modules/https-proxy-agent": { + "version": "7.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.0.2", + "debug": "4" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/human-signals": { + "version": "2.1.0", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=10.17.0" + } + }, + "node_modules/iconv-lite": { + "version": "0.4.24", + "dev": true, + "license": "MIT", + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/icss-utils": { + "version": "5.1.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/ieee754": { + "version": "1.2.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "BSD-3-Clause" + }, + "node_modules/ignore": { + "version": "5.3.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/ignore-walk": { + "version": "6.0.5", + "dev": true, + "license": "ISC", + "dependencies": { + "minimatch": "^9.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/ignore-walk/node_modules/brace-expansion": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/ignore-walk/node_modules/minimatch": { + "version": "9.0.5", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/image-size": { + "version": "0.5.5", + "dev": true, + "license": "MIT", + "optional": true, + "bin": { + "image-size": "bin/image-size.js" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/import-fresh": { + "version": "3.3.1", + "dev": true, + "license": "MIT", + "dependencies": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/import-fresh/node_modules/resolve-from": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/indent-string": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "dev": true, + "license": "ISC", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "dev": true, + "license": "ISC" + }, + "node_modules/ini": { + "version": "4.1.2", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/inquirer": { + "version": "9.2.15", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-9.2.15.tgz", + "integrity": "sha512-vI2w4zl/mDluHt9YEQ/543VTCwPKWiHzKtm9dM2V0NdFcqEexDAjUHzO1oA60HRNaVifGXXM1tRRNluLVHa0Kg==", + "dev": true, + "dependencies": { + "@ljharb/through": "^2.3.12", + "ansi-escapes": "^4.3.2", + "chalk": "^5.3.0", + "cli-cursor": "^3.1.0", + "cli-width": "^4.1.0", + "external-editor": "^3.1.0", + "figures": "^3.2.0", + "lodash": "^4.17.21", + "mute-stream": "1.0.0", + "ora": "^5.4.1", + "run-async": "^3.0.0", + "rxjs": "^7.8.1", + "string-width": "^4.2.3", + "strip-ansi": "^6.0.1", + "wrap-ansi": "^6.2.0" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/inquirer/node_modules/chalk": { + "version": "5.6.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz", + "integrity": "sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA==", + "dev": true, + "engines": { + "node": "^12.17.0 || ^14.13 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/inquirer/node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/ip-address": { + "version": "10.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 12" + } + }, + "node_modules/ipaddr.js": { + "version": "2.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 10" + } + }, + "node_modules/is-arrayish": { + "version": "0.2.1", + "dev": true, + "license": "MIT" + }, + "node_modules/is-binary-path": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "binary-extensions": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-core-module": { + "version": "2.16.1", + "dev": true, + "license": "MIT", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-docker": { + "version": "2.2.1", + "dev": true, + "license": "MIT", + "bin": { + "is-docker": "cli.js" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "dev": true, + "license": "MIT", + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-interactive": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/is-lambda": { + "version": "1.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/is-number": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/is-plain-obj": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-plain-object": { + "version": "2.0.4", + "dev": true, + "license": "MIT", + "dependencies": { + "isobject": "^3.0.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-regex": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "gopd": "^1.2.0", + "has-tostringtag": "^1.0.2", + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-stream": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-unicode-supported": { + "version": "0.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/is-what": { + "version": "3.14.1", + "dev": true, + "license": "MIT" + }, + "node_modules/is-wsl": { + "version": "2.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "is-docker": "^2.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/isarray": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/isbinaryfile": { + "version": "4.0.10", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/gjtorikian/" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/isobject": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument/node_modules/@babel/parser": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.28.4.tgz", + "integrity": "sha512-yZbBqeM6TkpP9du/I2pUZnJsRMGGvOuIrhjzC1AwHwW+6he4mni6Bp/m8ijn0iOuZuPI2BfkCoSRunpyjnrQKg==", + "dev": true, + "dependencies": { + "@babel/types": "^7.28.4" + }, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/istanbul-lib-instrument/node_modules/@babel/types": { + "version": "7.28.4", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.28.4.tgz", + "integrity": "sha512-bkFqkLhh3pMBUQQkpVgWDWq/lqzc2678eUyDlTBhRqhCHFguYYGM0Efga7tYk4TogG/3x0EEl66/OQ+WGbWB/Q==", + "dev": true, + "dependencies": { + "@babel/helper-string-parser": "^7.27.1", + "@babel/helper-validator-identifier": "^7.27.1" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/istanbul-lib-instrument/node_modules/semver": { + "version": "6.3.1", + "dev": true, + "license": "ISC", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/istanbul-lib-report": { + "version": "3.0.1", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "istanbul-lib-coverage": "^3.0.0", + "make-dir": "^4.0.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps": { + "version": "4.0.1", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "debug": "^4.1.1", + "istanbul-lib-coverage": "^3.0.0", + "source-map": "^0.6.1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/istanbul-lib-source-maps/node_modules/source-map": { + "version": "0.6.1", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/jackspeak": { + "version": "3.4.3", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/jasmine-core": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-5.1.2.tgz", + "integrity": "sha512-2oIUMGn00FdUiqz6epiiJr7xcFyNYj3rDcfmnzfkBnHyBQ3cBQUs4mmyGsOb7TTLb9kxk7dBcmEmqhDKkBoDyA==", + "dev": true, + "peer": true + }, + "node_modules/jest-worker": { + "version": "27.5.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node": "*", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": ">= 10.13.0" + } + }, + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" + } + }, + "node_modules/js-tokens": { + "version": "4.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/jsesc": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/json-parse-even-better-errors": { + "version": "3.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/json-schema-traverse": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/json5": { + "version": "2.2.3", + "dev": true, + "license": "MIT", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/jsonc-parser": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.1.tgz", + "integrity": "sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==", + "dev": true + }, + "node_modules/jsonfile": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "optionalDependencies": { + "graceful-fs": "^4.1.6" + } + }, + "node_modules/jsonparse": { + "version": "1.3.1", + "dev": true, + "engines": [ + "node >= 0.2.0" + ], + "license": "MIT" + }, + "node_modules/karma": { + "version": "6.4.4", + "resolved": "https://registry.npmjs.org/karma/-/karma-6.4.4.tgz", + "integrity": "sha512-LrtUxbdvt1gOpo3gxG+VAJlJAEMhbWlM4YrFQgql98FwF7+K8K12LYO4hnDdUkNjeztYrOXEMqgTajSWgmtI/w==", + "dev": true, + "peer": true, + "dependencies": { + "@colors/colors": "1.5.0", + "body-parser": "^1.19.0", + "braces": "^3.0.2", + "chokidar": "^3.5.1", + "connect": "^3.7.0", + "di": "^0.0.1", + "dom-serialize": "^2.2.1", + "glob": "^7.1.7", + "graceful-fs": "^4.2.6", + "http-proxy": "^1.18.1", + "isbinaryfile": "^4.0.8", + "lodash": "^4.17.21", + "log4js": "^6.4.1", + "mime": "^2.5.2", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.5", + "qjobs": "^1.2.0", + "range-parser": "^1.2.1", + "rimraf": "^3.0.2", + "socket.io": "^4.7.2", + "source-map": "^0.6.1", + "tmp": "^0.2.1", + "ua-parser-js": "^0.7.30", + "yargs": "^16.1.1" + }, + "bin": { + "karma": "bin/karma" + }, + "engines": { + "node": ">= 10" + } + }, + "node_modules/karma-chrome-launcher": { + "version": "3.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "which": "^1.2.1" + } + }, + "node_modules/karma-coverage": { + "version": "2.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "istanbul-lib-coverage": "^3.2.0", + "istanbul-lib-instrument": "^5.1.0", + "istanbul-lib-report": "^3.0.0", + "istanbul-lib-source-maps": "^4.0.1", + "istanbul-reports": "^3.0.5", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/karma-coverage/node_modules/istanbul-reports": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz", + "integrity": "sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA==", + "dev": true, + "dependencies": { + "html-escaper": "^2.0.0", + "istanbul-lib-report": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/karma-jasmine": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "jasmine-core": "^4.1.0" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "karma": "^6.0.0" + } + }, + "node_modules/karma-jasmine-html-reporter": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "peerDependencies": { + "jasmine-core": "^4.0.0 || ^5.0.0", + "karma": "^6.0.0", + "karma-jasmine": "^5.0.0" + } + }, + "node_modules/karma-jasmine/node_modules/jasmine-core": { + "version": "4.6.1", + "resolved": "https://registry.npmjs.org/jasmine-core/-/jasmine-core-4.6.1.tgz", + "integrity": "sha512-VYz/BjjmC3klLJlLwA4Kw8ytk0zDSmbbDLNs794VnWmkcCB7I9aAL/D48VNQtmITyPvea2C3jdUMfc3kAoy0PQ==", + "dev": true + }, + "node_modules/karma-source-map-support": { + "version": "1.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "source-map-support": "^0.5.5" + } + }, + "node_modules/karma/node_modules/cliui": { + "version": "7.0.4", + "resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz", + "integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==", + "dev": true, + "dependencies": { + "string-width": "^4.2.0", + "strip-ansi": "^6.0.0", + "wrap-ansi": "^7.0.0" + } + }, + "node_modules/karma/node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==", + "dev": true + }, + "node_modules/karma/node_modules/log4js": { + "version": "6.9.1", + "resolved": "https://registry.npmjs.org/log4js/-/log4js-6.9.1.tgz", + "integrity": "sha512-1somDdy9sChrr9/f4UlzhdaGfDR2c/SaD2a4T7qEkG4jTS57/B3qmnjLYePwQ8cqWnUHZI0iAKxMBpCZICiZ2g==", + "dev": true, + "dependencies": { + "date-format": "^4.0.14", + "debug": "^4.3.4", + "flatted": "^3.2.7", + "rfdc": "^1.3.0", + "streamroller": "^3.1.5" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/karma/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/karma/node_modules/tmp": { + "version": "0.2.5", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.5.tgz", + "integrity": "sha512-voyz6MApa1rQGUxT3E+BK7/ROe8itEx7vD8/HEvt4xwXucvQ5G5oeEiHkmHZJuBO21RpOf+YYm9MOivj709jow==", + "dev": true, + "engines": { + "node": ">=14.14" + } + }, + "node_modules/karma/node_modules/wrap-ansi": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/karma/node_modules/yargs": { + "version": "16.2.0", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz", + "integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==", + "dev": true, + "dependencies": { + "cliui": "^7.0.2", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.0", + "y18n": "^5.0.5", + "yargs-parser": "^20.2.2" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/karma/node_modules/yargs-parser": { + "version": "20.2.9", + "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz", + "integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==", + "dev": true, + "engines": { + "node": ">=10" + } + }, + "node_modules/kind-of": { + "version": "6.0.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/klona": { + "version": "2.0.6", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/launch-editor": { + "version": "2.11.1", + "dev": true, + "license": "MIT", + "dependencies": { + "picocolors": "^1.1.1", + "shell-quote": "^1.8.3" + } + }, + "node_modules/less": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/less/-/less-4.2.0.tgz", + "integrity": "sha512-P3b3HJDBtSzsXUl0im2L7gTO5Ubg8mEN6G8qoTS77iXxXX4Hvu4Qj540PZDvQ8V6DmX6iXo98k7Md0Cm1PrLaA==", + "dev": true, + "peer": true, + "dependencies": { + "copy-anything": "^2.0.1", + "parse-node-version": "^1.0.1", + "tslib": "^2.3.0" + }, + "bin": { + "lessc": "bin/lessc" + }, + "engines": { + "node": ">=6" + }, + "optionalDependencies": { + "errno": "^0.1.1", + "graceful-fs": "^4.1.2", + "image-size": "~0.5.0", + "make-dir": "^2.1.0", + "mime": "^1.4.1", + "needle": "^3.1.0", + "source-map": "~0.6.0" + } + }, + "node_modules/less-loader": { + "version": "11.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "klona": "^2.0.4" + }, + "engines": { + "node": ">= 14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "less": "^3.5.0 || ^4.0.0", + "webpack": "^5.0.0" + } + }, + "node_modules/less/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "optional": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/less/node_modules/make-dir": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/make-dir/-/make-dir-2.1.0.tgz", + "integrity": "sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA==", + "dev": true, + "optional": true, + "dependencies": { + "pify": "^4.0.1", + "semver": "^5.6.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/less/node_modules/mime": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz", + "integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==", + "dev": true, + "optional": true, + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/less/node_modules/needle": { + "version": "3.3.1", + "resolved": "https://registry.npmjs.org/needle/-/needle-3.3.1.tgz", + "integrity": "sha512-6k0YULvhpw+RoLNiQCRKOl09Rv1dPLr8hHnVjHqdolKwDrdNyk+Hmrthi4lIGPPz3r39dLx0hsF5s40sZ3Us4Q==", + "dev": true, + "optional": true, + "dependencies": { + "iconv-lite": "^0.6.3", + "sax": "^1.2.4" + }, + "bin": { + "needle": "bin/needle" + }, + "engines": { + "node": ">= 4.4.x" + } + }, + "node_modules/less/node_modules/semver": { + "version": "5.7.2", + "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", + "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==", + "dev": true, + "optional": true, + "bin": { + "semver": "bin/semver" + } + }, + "node_modules/less/node_modules/source-map": { + "version": "0.6.1", + "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz", + "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g==", + "dev": true, + "optional": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/license-webpack-plugin": { + "version": "4.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "webpack-sources": "^3.0.0" + }, + "peerDependenciesMeta": { + "webpack": { + "optional": true + }, + "webpack-sources": { + "optional": true + } + } + }, + "node_modules/license-webpack-plugin/node_modules/webpack-sources": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.3.3.tgz", + "integrity": "sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==", + "dev": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/lines-and-columns": { + "version": "1.2.4", + "dev": true, + "license": "MIT" + }, + "node_modules/loader-runner": { + "version": "4.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6.11.5" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/loader-utils": { + "version": "3.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 12.13.0" + } + }, + "node_modules/locate-path": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==", + "dev": true + }, + "node_modules/log-symbols": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "chalk": "^4.1.0", + "is-unicode-supported": "^0.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/lru-cache": { + "version": "5.1.1", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^3.0.2" + } + }, + "node_modules/magic-string": { + "version": "0.30.8", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/make-dir": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "semver": "^7.5.3" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/make-fetch-happen": { + "version": "13.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/make-fetch-happen/node_modules/proc-log": { + "version": "4.2.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/math-intrinsics": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/media-typer": { + "version": "0.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/merge-descriptors": { + "version": "1.0.3", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/merge-stream": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/merge2": { + "version": "1.4.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 8" + } + }, + "node_modules/methods": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/micromatch": { + "version": "4.0.8", + "dev": true, + "license": "MIT", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/micromatch/node_modules/picomatch": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/mime": { + "version": "2.6.0", + "dev": true, + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/mime-db": { + "version": "1.52.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mime-types": { + "version": "2.1.35", + "dev": true, + "license": "MIT", + "dependencies": { + "mime-db": "1.52.0" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/mimic-fn": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/mini-css-extract-plugin": { + "version": "2.8.1", + "dev": true, + "license": "MIT", + "dependencies": { + "schema-utils": "^4.0.0", + "tapable": "^2.2.1" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + } + }, + "node_modules/minimalistic-assert": { + "version": "1.0.1", + "dev": true, + "license": "ISC" + }, + "node_modules/minimatch": { + "version": "3.1.2", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minipass-collect": { + "version": "2.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minipass-fetch": { + "version": "3.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + } + }, + "node_modules/minipass-flush": { + "version": "1.0.5", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minipass-flush/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-flush/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/minipass-json-stream": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "jsonparse": "^1.3.1", + "minipass": "^3.0.0" + } + }, + "node_modules/minipass-json-stream/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-json-stream/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/minipass-pipeline": { + "version": "1.2.4", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-pipeline/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-pipeline/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/minipass-sized": { + "version": "1.0.3", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-sized/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minipass-sized/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/minizlib": { + "version": "2.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "minipass": "^3.0.0", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/minizlib/node_modules/minipass": { + "version": "3.3.6", + "dev": true, + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/minizlib/node_modules/yallist": { + "version": "4.0.0", + "dev": true, + "license": "ISC" + }, + "node_modules/mkdirp": { + "version": "0.5.6", + "dev": true, + "license": "MIT", + "dependencies": { + "minimist": "^1.2.6" + }, + "bin": { + "mkdirp": "bin/cmd.js" + } + }, + "node_modules/mrmime": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "dev": true, + "license": "MIT" + }, + "node_modules/multicast-dns": { + "version": "7.2.5", + "dev": true, + "license": "MIT", + "dependencies": { + "dns-packet": "^5.2.2", + "thunky": "^1.0.2" + }, + "bin": { + "multicast-dns": "cli.js" + } + }, + "node_modules/mute-stream": { + "version": "1.0.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/negotiator": { + "version": "0.6.4", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/node-addon-api": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/node-addon-api/-/node-addon-api-3.2.1.tgz", + "integrity": "sha512-mmcei9JghVNDYydghQmeDX8KoAm0FAiYyIcUt/N4nhyAipB17pllZQDOJD2fotxABnt4Mdz+dKTO7eftLg4d0A==", + "dev": true, + "optional": true + }, + "node_modules/node-gyp-build": { + "version": "4.8.4", + "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.8.4.tgz", + "integrity": "sha512-LA4ZjwlnUblHVgq0oBF3Jl/6h/Nvs5fzBLwdEF4nuxnFdsfajde4WfxtJr3CaiH+F6ewcIB/q4jQ4UzPyid+CQ==", + "dev": true, + "optional": true, + "bin": { + "node-gyp-build": "bin.js", + "node-gyp-build-optional": "optional.js", + "node-gyp-build-test": "build-test.js" + } + }, + "node_modules/node-releases": { + "version": "2.0.25", + "dev": true, + "license": "MIT" + }, + "node_modules/nopt": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/nopt/-/nopt-7.2.1.tgz", + "integrity": "sha512-taM24ViiimT/XntxbPyJQzCG+p4EKOpgD3mxFwW38mGjVUrfERQOeY4EDHjdnptttfHuHQXFx+lTP08Q+mLa/w==", + "dev": true, + "dependencies": { + "abbrev": "^2.0.0" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/normalize-package-data": { + "version": "6.0.2", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "hosted-git-info": "^7.0.0", + "semver": "^7.3.5", + "validate-npm-package-license": "^3.0.4" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/normalize-range": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/npm-bundled": { + "version": "3.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-install-checks": { + "version": "6.3.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "semver": "^7.1.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-normalize-package-bin": { + "version": "3.0.1", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-package-arg": { + "version": "11.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "hosted-git-info": "^7.0.0", + "proc-log": "^3.0.0", + "semver": "^7.3.5", + "validate-npm-package-name": "^5.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm-packlist": { + "version": "8.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "ignore-walk": "^6.0.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-pick-manifest": { + "version": "9.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "npm-install-checks": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0", + "npm-package-arg": "^11.0.0", + "semver": "^7.3.5" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm-registry-fetch": { + "version": "16.2.1", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/redact": "^1.1.0", + "make-fetch-happen": "^13.0.0", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-json-stream": "^1.0.1", + "minizlib": "^2.1.2", + "npm-package-arg": "^11.0.0", + "proc-log": "^4.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/npm-registry-fetch/node_modules/proc-log": { + "version": "4.2.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/npm-run-path": { + "version": "4.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "path-key": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/nth-check": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", + "integrity": "sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==", + "dev": true, + "dependencies": { + "boolbase": "^1.0.0" + }, + "funding": { + "url": "https://github.com/fb55/nth-check?sponsor=1" + } + }, + "node_modules/object-assign": { + "version": "4.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/obuf": { + "version": "1.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/on-finished": { + "version": "2.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ee-first": "1.1.1" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/on-headers": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/once": { + "version": "1.4.0", + "dev": true, + "license": "ISC", + "dependencies": { + "wrappy": "1" + } + }, + "node_modules/onetime": { + "version": "5.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "mimic-fn": "^2.1.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/open": { + "version": "8.4.2", + "dev": true, + "license": "MIT", + "dependencies": { + "define-lazy-prop": "^2.0.0", + "is-docker": "^2.1.1", + "is-wsl": "^2.2.0" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/ora": { + "version": "5.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "bl": "^4.1.0", + "chalk": "^4.1.0", + "cli-cursor": "^3.1.0", + "cli-spinners": "^2.5.0", + "is-interactive": "^1.0.0", + "is-unicode-supported": "^0.1.0", + "log-symbols": "^4.1.0", + "strip-ansi": "^6.0.0", + "wcwidth": "^1.0.1" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/os-tmpdir": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/p-limit": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-map": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "aggregate-error": "^3.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-retry": { + "version": "4.6.2", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/retry": "0.12.0", + "retry": "^0.13.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-retry/node_modules/retry": { + "version": "0.13.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/package-json-from-dist": { + "version": "1.0.1", + "dev": true, + "license": "BlueOak-1.0.0" + }, + "node_modules/pacote": { + "version": "17.0.6", + "dev": true, + "license": "ISC", + "dependencies": { + "@npmcli/git": "^5.0.0", + "@npmcli/installed-package-contents": "^2.0.1", + "@npmcli/promise-spawn": "^7.0.0", + "@npmcli/run-script": "^7.0.0", + "cacache": "^18.0.0", + "fs-minipass": "^3.0.0", + "minipass": "^7.0.2", + "npm-package-arg": "^11.0.0", + "npm-packlist": "^8.0.0", + "npm-pick-manifest": "^9.0.0", + "npm-registry-fetch": "^16.0.0", + "proc-log": "^3.0.0", + "promise-retry": "^2.0.1", + "read-package-json": "^7.0.0", + "read-package-json-fast": "^3.0.0", + "sigstore": "^2.2.0", + "ssri": "^10.0.0", + "tar": "^6.1.11" + }, + "bin": { + "pacote": "lib/bin.js" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/pacote/node_modules/mkdirp": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-1.0.4.tgz", + "integrity": "sha512-vVqVZQyf3WLx2Shd0qJ9xuvqgAyKPLAiqITEtqW0oIUjzo3PePDd6fW9iFz30ef7Ysp/oiWqbhszeGWW2T6Gzw==", + "dev": true, + "bin": { + "mkdirp": "bin/cmd.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/pacote/node_modules/tar": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/tar/-/tar-6.2.1.tgz", + "integrity": "sha512-DZ4yORTwrbTj/7MZYq2w+/ZFdI6OZ/f9SFHR+71gIVUZhOQPHzVCLpvRnPgyaMpfWxxk/4ONva3GQSyNIKRv6A==", + "dev": true, + "dependencies": { + "chownr": "^2.0.0", + "fs-minipass": "^2.0.0", + "minipass": "^5.0.0", + "minizlib": "^2.1.1", + "mkdirp": "^1.0.3", + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/pacote/node_modules/tar/node_modules/fs-minipass": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-2.1.0.tgz", + "integrity": "sha512-V/JgOLFCS+R6Vcq0slCuaeWEdNC3ouDlJMNIsacH2VtALiu9mV4LPrHc5cDl8k5aw6J8jwgWWpiTo5RYhmIzvg==", + "dev": true, + "dependencies": { + "minipass": "^3.0.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/pacote/node_modules/tar/node_modules/fs-minipass/node_modules/minipass": { + "version": "3.3.6", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-3.3.6.tgz", + "integrity": "sha512-DxiNidxSEK+tHG6zOIklvNOwm3hvCrbUrdtzY74U6HKTJxvIDfOUL5W5P2Ghd3DTkhhKPYGqeNUIh5qcM4YBfw==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/pacote/node_modules/tar/node_modules/minipass": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-5.0.0.tgz", + "integrity": "sha512-3FnjYuehv9k6ovOEbyOswadCDPX1piCfhV8ncmYtHOjuPwylVWsghTLo7rabjC3Rx5xD4HDx8Wm1xnMF7S5qFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/pacote/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/parent-module": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "callsites": "^3.0.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/parse-json": { + "version": "5.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "@babel/code-frame": "^7.0.0", + "error-ex": "^1.3.1", + "json-parse-even-better-errors": "^2.3.0", + "lines-and-columns": "^1.1.6" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/parse-json/node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "dev": true, + "license": "MIT" + }, + "node_modules/parse-node-version": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/parse5-html-rewriting-stream": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "entities": "^4.3.0", + "parse5": "^7.0.0", + "parse5-sax-parser": "^7.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parse5-html-rewriting-stream/node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/parse5-html-rewriting-stream/node_modules/parse5": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", + "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", + "dev": true, + "dependencies": { + "entities": "^6.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parse5-html-rewriting-stream/node_modules/parse5/node_modules/entities": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", + "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/parse5-sax-parser": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "parse5": "^7.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parse5-sax-parser/node_modules/entities": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/entities/-/entities-6.0.1.tgz", + "integrity": "sha512-aN97NXWF6AWBTahfVOIrB/NShkzi5H7F9r1s9mD3cDj4Ko5f2qhhVoYMibXF7GlLveb/D2ioWay8lxI97Ven3g==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/parse5-sax-parser/node_modules/parse5": { + "version": "7.3.0", + "resolved": "https://registry.npmjs.org/parse5/-/parse5-7.3.0.tgz", + "integrity": "sha512-IInvU7fabl34qmi9gY8XOVxhYyMyuH2xUNpb2q8/Y+7552KlejkRvqvD19nMoUW/uQGGbqNpA6Tufu5FL5BZgw==", + "dev": true, + "dependencies": { + "entities": "^6.0.0" + }, + "funding": { + "url": "https://github.com/inikulin/parse5?sponsor=1" + } + }, + "node_modules/parseurl": { + "version": "1.3.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/path-key": { + "version": "3.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-parse": { + "version": "1.0.7", + "dev": true, + "license": "MIT" + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "dev": true, + "license": "BlueOak-1.0.0", + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/path-scurry/node_modules/lru-cache": { + "version": "10.4.3", + "dev": true, + "license": "ISC" + }, + "node_modules/path-to-regexp": { + "version": "0.1.12", + "dev": true, + "license": "MIT" + }, + "node_modules/path-type": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/picocolors": { + "version": "1.1.1", + "dev": true, + "license": "ISC" + }, + "node_modules/picomatch": { + "version": "4.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pify": { + "version": "4.0.1", + "dev": true, + "license": "MIT", + "optional": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/piscina": { + "version": "4.4.0", + "dev": true, + "license": "MIT", + "optionalDependencies": { + "nice-napi": "^1.0.2" + } + }, + "node_modules/piscina/node_modules/nice-napi": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/nice-napi/-/nice-napi-1.0.2.tgz", + "integrity": "sha512-px/KnJAJZf5RuBGcfD+Sp2pAKq0ytz8j+1NehvgIGFkvtvFrDM3T8E4x/JJODXK9WZow8RRGrbA9QQ3hs+pDhA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "!win32" + ], + "dependencies": { + "node-addon-api": "^3.0.0", + "node-gyp-build": "^4.2.2" + } + }, + "node_modules/pkg-dir": { + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "find-up": "^6.3.0" + }, + "engines": { + "node": ">=14.16" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/find-up": { + "version": "6.3.0", + "dev": true, + "license": "MIT", + "dependencies": { + "locate-path": "^7.1.0", + "path-exists": "^5.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/locate-path": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-locate": "^6.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-limit": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "yocto-queue": "^1.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/p-locate": { + "version": "6.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "p-limit": "^4.0.0" + }, + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/pkg-dir/node_modules/path-exists": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": "^12.20.0 || ^14.13.1 || >=16.0.0" + } + }, + "node_modules/playwright": { + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.56.1.tgz", + "integrity": "sha512-aFi5B0WovBHTEvpM3DzXTUaeN6eN0qWnTkKx4NQaH4Wvcmc153PdaY2UBdSYKaGYw+UyWXSVyxDUg5DoPEttjw==", + "dev": true, + "dependencies": { + "playwright-core": "1.56.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=18" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/playwright-core": { + "version": "1.56.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.56.1.tgz", + "integrity": "sha512-hutraynyn31F+Bifme+Ps9Vq59hKuUCz7H1kDOcBs+2oGguKkWTU50bBWrtz34OUWmIwpBTWDxaRPXrIXkgvmQ==", + "dev": true, + "bin": { + "playwright-core": "cli.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/postcss": { + "version": "8.4.35", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.35.tgz", + "integrity": "sha512-u5U8qYpBCpN13BsiEB0CbR1Hhh4Gc0zLFuedrHJKMctHCHAGrMdG0PRM/KErzAL3CU6/eckEtmHNB3x6e3c0vA==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "peer": true, + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.0.0", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/postcss-loader": { + "version": "8.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "cosmiconfig": "^9.0.0", + "jiti": "^1.20.0", + "semver": "^7.5.4" + }, + "engines": { + "node": ">= 18.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "@rspack/core": "0.x || 1.x", + "postcss": "^7.0.0 || ^8.0.1", + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "@rspack/core": { + "optional": true + }, + "webpack": { + "optional": true + } + } + }, + "node_modules/postcss-loader/node_modules/jiti": { + "version": "1.21.7", + "resolved": "https://registry.npmjs.org/jiti/-/jiti-1.21.7.tgz", + "integrity": "sha512-/imKNG4EbWNrVjoNC/1H5/9GFy+tqjGBHCaSsN+P2RnPqjsLmv6UD3Ej+Kj8nBWaRAwyk7kK5ZUc+OEatnTR3A==", + "dev": true, + "bin": { + "jiti": "bin/jiti.js" + } + }, + "node_modules/postcss-media-query-parser": { + "version": "0.2.3", + "dev": true, + "license": "MIT" + }, + "node_modules/postcss-modules-extract-imports": { + "version": "3.1.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/postcss-modules-local-by-default": { + "version": "4.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "icss-utils": "^5.0.0", + "postcss-selector-parser": "^7.0.0", + "postcss-value-parser": "^4.1.0" + }, + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/postcss-modules-local-by-default/node_modules/postcss-selector-parser": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz", + "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==", + "dev": true, + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-modules-scope": { + "version": "3.2.1", + "dev": true, + "license": "ISC", + "dependencies": { + "postcss-selector-parser": "^7.0.0" + }, + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/postcss-modules-scope/node_modules/postcss-selector-parser": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/postcss-selector-parser/-/postcss-selector-parser-7.1.0.tgz", + "integrity": "sha512-8sLjZwK0R+JlxlYcTuVnyT2v+htpdrjDOKuMcOVdYjt52Lh8hWRYpxBPoKx/Zg+bcjc3wx6fmQevMmUztS/ccA==", + "dev": true, + "dependencies": { + "cssesc": "^3.0.0", + "util-deprecate": "^1.0.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/postcss-modules-values": { + "version": "4.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "icss-utils": "^5.0.0" + }, + "engines": { + "node": "^10 || ^12 || >= 14" + }, + "peerDependencies": { + "postcss": "^8.1.0" + } + }, + "node_modules/postcss-value-parser": { + "version": "4.2.0", + "dev": true, + "license": "MIT" + }, + "node_modules/postcss/node_modules/nanoid": { + "version": "3.3.11", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.11.tgz", + "integrity": "sha512-N8SpfPUnUp1bK+PMYW8qSWdl9U+wwNWI4QKxOYDy9JAro3WMX7p2OeVRF9v+347pnakNevPmiHhNmZ2HbFA76w==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/proc-log": { + "version": "3.0.0", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/process-nextick-args": { + "version": "2.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/promise-inflight": { + "version": "1.0.1", + "dev": true, + "license": "ISC" + }, + "node_modules/promise-retry": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "err-code": "^2.0.2", + "retry": "^0.12.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/proxy-addr": { + "version": "2.0.7", + "dev": true, + "license": "MIT", + "dependencies": { + "forwarded": "0.2.0", + "ipaddr.js": "1.9.1" + }, + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/proxy-addr/node_modules/ipaddr.js": { + "version": "1.9.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.10" + } + }, + "node_modules/prr": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "optional": true + }, + "node_modules/punycode": { + "version": "1.4.1", + "dev": true, + "license": "MIT" + }, + "node_modules/qjobs": { + "version": "1.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.9" + } + }, + "node_modules/qs": { + "version": "6.13.0", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "side-channel": "^1.0.6" + }, + "engines": { + "node": ">=0.6" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/randombytes": { + "version": "2.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "safe-buffer": "^5.1.0" + } + }, + "node_modules/range-parser": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/raw-body": { + "version": "2.5.2", + "dev": true, + "license": "MIT", + "dependencies": { + "bytes": "3.1.2", + "http-errors": "2.0.0", + "iconv-lite": "0.4.24", + "unpipe": "1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/read-package-json": { + "version": "7.0.1", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^10.2.2", + "json-parse-even-better-errors": "^3.0.0", + "normalize-package-data": "^6.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/read-package-json-fast": { + "version": "3.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "json-parse-even-better-errors": "^3.0.0", + "npm-normalize-package-bin": "^3.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/read-package-json/node_modules/brace-expansion": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/read-package-json/node_modules/glob": { + "version": "10.4.5", + "dev": true, + "license": "ISC", + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.4", + "minipass": "^7.1.2", + "package-json-from-dist": "^1.0.0", + "path-scurry": "^1.11.1" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/read-package-json/node_modules/minimatch": { + "version": "9.0.5", + "dev": true, + "license": "ISC", + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/readdirp": { + "version": "3.6.0", + "dev": true, + "license": "MIT", + "dependencies": { + "picomatch": "^2.2.1" + }, + "engines": { + "node": ">=8.10.0" + } + }, + "node_modules/readdirp/node_modules/picomatch": { + "version": "2.3.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/reflect-metadata": { + "version": "0.2.2", + "dev": true, + "license": "Apache-2.0" + }, + "node_modules/regenerate": { + "version": "1.4.2", + "dev": true, + "license": "MIT" + }, + "node_modules/regenerator-runtime": { + "version": "0.14.1", + "dev": true, + "license": "MIT" + }, + "node_modules/regex-parser": { + "version": "2.3.1", + "dev": true, + "license": "MIT" + }, + "node_modules/regexpu-core": { + "version": "6.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "regenerate": "^1.4.2", + "regenerate-unicode-properties": "^10.2.2", + "regjsgen": "^0.8.0", + "regjsparser": "^0.13.0", + "unicode-match-property-ecmascript": "^2.0.0", + "unicode-match-property-value-ecmascript": "^2.2.1" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regexpu-core/node_modules/regenerate-unicode-properties": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.2.tgz", + "integrity": "sha512-m03P+zhBeQd1RGnYxrGyDAPpWX/epKirLrp8e3qevZdVkKtnCrjjWczIbYc8+xd6vcTStVlqfycTx1KR4LOr0g==", + "dev": true, + "dependencies": { + "regenerate": "^1.4.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regjsgen": { + "version": "0.8.0", + "dev": true, + "license": "MIT" + }, + "node_modules/regjsparser": { + "version": "0.13.0", + "dev": true, + "license": "BSD-2-Clause", + "dependencies": { + "jsesc": "~3.1.0" + }, + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/require-directory": { + "version": "2.1.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/require-from-string": { + "version": "2.0.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/requires-port": { + "version": "1.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dev": true, + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/resolve-url-loader": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "adjust-sourcemap-loader": "^4.0.0", + "convert-source-map": "^1.7.0", + "loader-utils": "^2.0.0", + "postcss": "^8.2.14", + "source-map": "0.6.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/resolve-url-loader/node_modules/loader-utils": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/loader-utils/-/loader-utils-2.0.4.tgz", + "integrity": "sha512-xXqpXoINfFhgua9xiqD8fPFHgkoq1mmmpE92WlDbm9rNRd/EbRb+Gqf908T2DMfuHjjJlksiK2RbHVOdD/MqSw==", + "dev": true, + "dependencies": { + "big.js": "^5.2.2", + "emojis-list": "^3.0.0", + "json5": "^2.1.2" + }, + "engines": { + "node": ">=8.9.0" + } + }, + "node_modules/resolve-url-loader/node_modules/source-map": { + "version": "0.6.1", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/restore-cursor": { + "version": "3.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/retry": { + "version": "0.12.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, + "node_modules/reusify": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.4.1", + "dev": true, + "license": "MIT" + }, + "node_modules/rimraf": { + "version": "3.0.2", + "dev": true, + "license": "ISC", + "dependencies": { + "glob": "^7.1.3" + }, + "bin": { + "rimraf": "bin.js" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/run-async": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT", + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rxjs": { + "version": "7.8.2", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-7.8.2.tgz", + "integrity": "sha512-dhKf903U/PQZY6boNNtAGdWbG85WAbjT/1xYoZIC7FAY0yWapOBQVsVrDl58W86//e1VpMNBtRV4MaXfdMySFA==", + "peer": true, + "dependencies": { + "tslib": "^2.1.0" + } + }, + "node_modules/safe-buffer": { + "version": "5.2.1", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "license": "MIT" + }, + "node_modules/safe-regex-test": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "is-regex": "^1.2.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "dev": true, + "license": "MIT" + }, + "node_modules/sass": { + "version": "1.71.1", + "resolved": "https://registry.npmjs.org/sass/-/sass-1.71.1.tgz", + "integrity": "sha512-wovtnV2PxzteLlfNzbgm1tFXPLoZILYAMJtvoXXkD7/+1uP41eKkIt1ypWq5/q2uT94qHjXehEYfmjKOvjL9sg==", + "dev": true, + "peer": true, + "dependencies": { + "chokidar": ">=3.0.0 <4.0.0", + "immutable": "^4.0.0", + "source-map-js": ">=0.6.2 <2.0.0" + }, + "bin": { + "sass": "sass.js" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "node_modules/sass-loader": { + "version": "14.1.1", + "dev": true, + "license": "MIT", + "dependencies": { + "neo-async": "^2.6.2" + }, + "engines": { + "node": ">= 18.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "@rspack/core": "0.x || 1.x", + "node-sass": "^4.0.0 || ^5.0.0 || ^6.0.0 || ^7.0.0 || ^8.0.0 || ^9.0.0", + "sass": "^1.3.0", + "sass-embedded": "*", + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "@rspack/core": { + "optional": true + }, + "node-sass": { + "optional": true + }, + "sass": { + "optional": true + }, + "sass-embedded": { + "optional": true + }, + "webpack": { + "optional": true + } + } + }, + "node_modules/sass-loader/node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true + }, + "node_modules/sass/node_modules/immutable": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/immutable/-/immutable-4.3.7.tgz", + "integrity": "sha512-1hqclzwYwjRDFLjcFxOM5AYkkG0rpFPpr1RLPMEuGczoS7YA8gLhy8SWXYRAA/XwfEHpfo3cw5JGioS32fnMRw==", + "dev": true + }, + "node_modules/sax": { + "version": "1.4.1", + "dev": true, + "license": "ISC", + "optional": true + }, + "node_modules/schema-utils": { + "version": "4.3.3", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/json-schema": "^7.0.9", + "ajv": "^8.9.0", + "ajv-formats": "^2.1.1", + "ajv-keywords": "^5.1.0" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/schema-utils/node_modules/ajv": { + "version": "8.17.1", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-8.17.1.tgz", + "integrity": "sha512-B/gBuNg5SiMTrPkC+A2+cW0RszwxYmn6VYxB/inlBStS5nx6xHIt/ehKRhIMhqusl7a8LjQoZnjCs5vhwxOQ1g==", + "dev": true, + "peer": true, + "dependencies": { + "fast-deep-equal": "^3.1.3", + "fast-uri": "^3.0.1", + "json-schema-traverse": "^1.0.0", + "require-from-string": "^2.0.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/schema-utils/node_modules/ajv-keywords": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-5.1.0.tgz", + "integrity": "sha512-YCS/JNFAUyr5vAuhk1DWm1CBxRHW9LbJ2ozWeemrIqpbsqKjHVxYPyi5GC0rjZIT5JxJ3virVTS8wk4i/Z+krw==", + "dev": true, + "dependencies": { + "fast-deep-equal": "^3.1.3" + }, + "peerDependencies": { + "ajv": "^8.8.2" + } + }, + "node_modules/select-hose": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/selfsigned": { + "version": "2.4.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@types/node-forge": "^1.3.0", + "node-forge": "^1" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/selfsigned/node_modules/node-forge": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz", + "integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==", + "dev": true, + "engines": { + "node": ">= 6.13.0" + } + }, + "node_modules/semver": { + "version": "7.6.0", + "resolved": "https://registry.npmjs.org/semver/-/semver-7.6.0.tgz", + "integrity": "sha512-EnwXhrlwXMk9gKu5/flx5sv/an57AkRplG3hTK68W7FRDN+k+OWBj65M7719OkA82XLBxrcX0KSHj+X5COhOVg==", + "dev": true, + "dependencies": { + "lru-cache": "^6.0.0" + }, + "bin": { + "semver": "bin/semver.js" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "dev": true, + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/semver/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "dev": true + }, + "node_modules/send": { + "version": "0.19.0", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "2.6.9", + "depd": "2.0.0", + "destroy": "1.2.0", + "encodeurl": "~1.0.2", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "mime": "1.6.0", + "ms": "2.1.3", + "on-finished": "2.4.1", + "range-parser": "~1.2.1", + "statuses": "2.0.1" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/send/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/send/node_modules/debug/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/send/node_modules/mime": { + "version": "1.6.0", + "dev": true, + "license": "MIT", + "bin": { + "mime": "cli.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/send/node_modules/statuses": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/serialize-javascript": { + "version": "6.0.2", + "dev": true, + "license": "BSD-3-Clause", + "dependencies": { + "randombytes": "^2.1.0" + } + }, + "node_modules/serve-index": { + "version": "1.9.1", + "dev": true, + "license": "MIT", + "dependencies": { + "accepts": "~1.3.4", + "batch": "0.6.1", + "debug": "2.6.9", + "escape-html": "~1.0.3", + "http-errors": "~1.6.2", + "mime-types": "~2.1.17", + "parseurl": "~1.3.2" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/serve-index/node_modules/debug": { + "version": "2.6.9", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/serve-index/node_modules/depd": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/serve-index/node_modules/http-errors": { + "version": "1.6.3", + "dev": true, + "license": "MIT", + "dependencies": { + "depd": "~1.1.2", + "inherits": "2.0.3", + "setprototypeof": "1.1.0", + "statuses": ">= 1.4.0 < 2" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/serve-index/node_modules/inherits": { + "version": "2.0.3", + "dev": true, + "license": "ISC" + }, + "node_modules/serve-index/node_modules/ms": { + "version": "2.0.0", + "dev": true, + "license": "MIT" + }, + "node_modules/serve-index/node_modules/setprototypeof": { + "version": "1.1.0", + "dev": true, + "license": "ISC" + }, + "node_modules/serve-static": { + "version": "1.16.2", + "dev": true, + "license": "MIT", + "dependencies": { + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "parseurl": "~1.3.3", + "send": "0.19.0" + }, + "engines": { + "node": ">= 0.8.0" + } + }, + "node_modules/serve-static/node_modules/encodeurl": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/set-function-length": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "dependencies": { + "define-data-property": "^1.1.4", + "es-errors": "^1.3.0", + "function-bind": "^1.1.2", + "get-intrinsic": "^1.2.4", + "gopd": "^1.0.1", + "has-property-descriptors": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/setprototypeof": { + "version": "1.2.0", + "dev": true, + "license": "ISC" + }, + "node_modules/shallow-clone": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "kind-of": "^6.0.2" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=8" + } + }, + "node_modules/shell-quote": { + "version": "1.8.3", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel": { + "version": "1.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3", + "side-channel-list": "^1.0.0", + "side-channel-map": "^1.0.1", + "side-channel-weakmap": "^1.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "es-errors": "^1.3.0", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-list/node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-map/node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap": { + "version": "1.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "call-bound": "^1.0.2", + "es-errors": "^1.3.0", + "get-intrinsic": "^1.2.5", + "object-inspect": "^1.13.3", + "side-channel-map": "^1.0.1" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel-weakmap/node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/side-channel/node_modules/object-inspect": { + "version": "1.13.4", + "resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz", + "integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==", + "dev": true, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "dev": true, + "license": "ISC" + }, + "node_modules/sigstore": { + "version": "2.3.1", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "@sigstore/sign": "^2.3.2", + "@sigstore/tuf": "^2.3.4", + "@sigstore/verify": "^1.2.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/sigstore/node_modules/@sigstore/core": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@sigstore/core/-/core-1.1.0.tgz", + "integrity": "sha512-JzBqdVIyqm2FRQCulY6nbQzMpJJpSiJ8XXWMhtOX9eKgaXXpfNOF53lzQEjIydlStnd/eFtuC1dW4VYdD93oRg==", + "dev": true, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/sigstore/node_modules/@sigstore/protobuf-specs": { + "version": "0.3.3", + "resolved": "https://registry.npmjs.org/@sigstore/protobuf-specs/-/protobuf-specs-0.3.3.tgz", + "integrity": "sha512-RpacQhBlwpBWd7KEJsRKcBQalbV28fvkxwTOJIqhIuDysMMaJW47V4OqW30iJB9uRpqOSxxEAQFdr8tTattReQ==", + "dev": true, + "engines": { + "node": "^18.17.0 || >=20.5.0" + } + }, + "node_modules/sigstore/node_modules/@sigstore/sign": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/@sigstore/sign/-/sign-2.3.2.tgz", + "integrity": "sha512-5Vz5dPVuunIIvC5vBb0APwo7qKA4G9yM48kPWJT+OEERs40md5GoUR1yedwpekWZ4m0Hhw44m6zU+ObsON+iDA==", + "dev": true, + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.0.0", + "@sigstore/protobuf-specs": "^0.3.2", + "make-fetch-happen": "^13.0.1", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/sigstore/node_modules/@sigstore/verify": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@sigstore/verify/-/verify-1.2.1.tgz", + "integrity": "sha512-8iKx79/F73DKbGfRf7+t4dqrc0bRr0thdPrxAtCKWRm/F0tG71i6O1rvlnScncJLLBZHn3h8M3c1BSUAb9yu8g==", + "dev": true, + "dependencies": { + "@sigstore/bundle": "^2.3.2", + "@sigstore/core": "^1.1.0", + "@sigstore/protobuf-specs": "^0.3.2" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/sigstore/node_modules/proc-log": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/proc-log/-/proc-log-4.2.0.tgz", + "integrity": "sha512-g8+OnU/L2v+wyiVK+D5fA34J7EH8jZ8DDlvwhRCMxmMj7UCBvxiO1mGeN+36JXIKF4zevU4kRBd8lVgG9vLelA==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/slash": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/smart-buffer": { + "version": "4.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 6.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socket.io": { + "version": "4.8.1", + "dev": true, + "license": "MIT", + "dependencies": { + "accepts": "~1.3.4", + "base64id": "~2.0.0", + "cors": "~2.8.5", + "debug": "~4.3.2", + "engine.io": "~6.6.0", + "socket.io-adapter": "~2.5.2", + "socket.io-parser": "~4.2.4" + }, + "engines": { + "node": ">=10.2.0" + } + }, + "node_modules/socket.io-adapter": { + "version": "2.5.5", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "~4.3.4", + "ws": "~8.17.1" + } + }, + "node_modules/socket.io-adapter/node_modules/debug": { + "version": "4.3.7", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/socket.io-parser": { + "version": "4.2.4", + "dev": true, + "license": "MIT", + "dependencies": { + "@socket.io/component-emitter": "~3.1.0", + "debug": "~4.3.1" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/socket.io-parser/node_modules/debug": { + "version": "4.3.7", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/socket.io/node_modules/debug": { + "version": "4.3.7", + "dev": true, + "license": "MIT", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/socket.io/node_modules/engine.io": { + "version": "6.6.4", + "resolved": "https://registry.npmjs.org/engine.io/-/engine.io-6.6.4.tgz", + "integrity": "sha512-ZCkIjSYNDyGn0R6ewHDtXgns/Zre/NT6Agvq1/WobF7JXgFff4SeDroKiCO3fNJreU9YG429Sc81o4w5ok/W5g==", + "dev": true, + "dependencies": { + "@types/cors": "^2.8.12", + "@types/node": ">=10.0.0", + "accepts": "~1.3.4", + "base64id": "2.0.0", + "cookie": "~0.7.2", + "cors": "~2.8.5", + "debug": "~4.3.1", + "engine.io-parser": "~5.2.1", + "ws": "~8.17.1" + }, + "engines": { + "node": ">=10.2.0" + } + }, + "node_modules/sockjs": { + "version": "0.3.24", + "dev": true, + "license": "MIT", + "dependencies": { + "faye-websocket": "^0.11.3", + "uuid": "^8.3.2", + "websocket-driver": "^0.7.4" + } + }, + "node_modules/sockjs/node_modules/uuid": { + "version": "8.3.2", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "dev": true, + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/socks": { + "version": "2.8.7", + "dev": true, + "license": "MIT", + "dependencies": { + "ip-address": "^10.0.1", + "smart-buffer": "^4.2.0" + }, + "engines": { + "node": ">= 10.0.0", + "npm": ">= 3.0.0" + } + }, + "node_modules/socks-proxy-agent": { + "version": "8.0.5", + "dev": true, + "license": "MIT", + "dependencies": { + "agent-base": "^7.1.2", + "debug": "^4.3.4", + "socks": "^2.8.3" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/source-map": { + "version": "0.7.4", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">= 8" + } + }, + "node_modules/source-map-js": { + "version": "1.2.1", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-loader": { + "version": "5.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "iconv-lite": "^0.6.3", + "source-map-js": "^1.0.2" + }, + "engines": { + "node": ">= 18.12.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.72.1" + } + }, + "node_modules/source-map-loader/node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/source-map-support": { + "version": "0.5.21", + "dev": true, + "license": "MIT", + "dependencies": { + "buffer-from": "^1.0.0", + "source-map": "^0.6.0" + } + }, + "node_modules/source-map-support/node_modules/source-map": { + "version": "0.6.1", + "dev": true, + "license": "BSD-3-Clause", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/spdx-correct": { + "version": "3.2.0", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-expression-parse": "^3.0.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-exceptions": { + "version": "2.5.0", + "dev": true, + "license": "CC-BY-3.0" + }, + "node_modules/spdx-expression-parse": { + "version": "3.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "spdx-exceptions": "^2.1.0", + "spdx-license-ids": "^3.0.0" + } + }, + "node_modules/spdx-license-ids": { + "version": "3.0.22", + "dev": true, + "license": "CC0-1.0" + }, + "node_modules/spdy": { + "version": "4.0.2", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.1.0", + "handle-thing": "^2.0.0", + "http-deceiver": "^1.2.7", + "select-hose": "^2.0.0", + "spdy-transport": "^3.0.0" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/spdy-transport": { + "version": "3.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "debug": "^4.1.0", + "detect-node": "^2.0.4", + "hpack.js": "^2.1.6", + "obuf": "^1.1.2", + "readable-stream": "^3.0.6", + "wbuf": "^1.7.3" + } + }, + "node_modules/spdy-transport/node_modules/readable-stream": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", + "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", + "dev": true, + "dependencies": { + "inherits": "^2.0.3", + "string_decoder": "^1.1.1", + "util-deprecate": "^1.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==", + "dev": true + }, + "node_modules/ssri": { + "version": "10.0.6", + "dev": true, + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/statuses": { + "version": "1.5.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/streamroller": { + "version": "3.1.5", + "dev": true, + "license": "MIT", + "dependencies": { + "date-format": "^4.0.14", + "debug": "^4.3.4", + "fs-extra": "^8.1.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/string_decoder": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", + "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", + "dev": true, + "dependencies": { + "safe-buffer": "~5.2.0" + } + }, + "node_modules/string-width": { + "version": "4.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "dev": true, + "license": "MIT", + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-final-newline": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + } + }, + "node_modules/supports-color": { + "version": "7.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/symbol-observable": { + "version": "4.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10" + } + }, + "node_modules/tapable": { + "version": "2.3.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=6" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/terser": { + "version": "5.29.1", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.29.1.tgz", + "integrity": "sha512-lZQ/fyaIGxsbGxApKmoPTODIzELy3++mXhS5hOqaAWZjQtpq/hFHAc+rm29NND1rYRxRWKcjuARNwULNXa5RtQ==", + "dev": true, + "peer": true, + "dependencies": { + "@jridgewell/source-map": "^0.3.3", + "acorn": "^8.8.2", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "bin": { + "terser": "bin/terser" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/terser-webpack-plugin": { + "version": "5.3.14", + "dev": true, + "license": "MIT", + "dependencies": { + "@jridgewell/trace-mapping": "^0.3.25", + "jest-worker": "^27.4.5", + "schema-utils": "^4.3.0", + "serialize-javascript": "^6.0.2", + "terser": "^5.31.1" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.1.0" + }, + "peerDependenciesMeta": { + "@swc/core": { + "optional": true + }, + "esbuild": { + "optional": true + }, + "uglify-js": { + "optional": true + } + } + }, + "node_modules/terser-webpack-plugin/node_modules/terser": { + "version": "5.44.0", + "resolved": "https://registry.npmjs.org/terser/-/terser-5.44.0.tgz", + "integrity": "sha512-nIVck8DK+GM/0Frwd+nIhZ84pR/BX7rmXMfYwyg+Sri5oGVE99/E3KvXqpC2xHFxyqXyGHTKBSioxxplrO4I4w==", + "dev": true, + "dependencies": { + "@jridgewell/source-map": "^0.3.3", + "acorn": "^8.15.0", + "commander": "^2.20.0", + "source-map-support": "~0.5.20" + }, + "bin": { + "terser": "bin/terser" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/thunky": { + "version": "1.1.0", + "dev": true, + "license": "MIT" + }, + "node_modules/tmp": { + "version": "0.0.33", + "dev": true, + "license": "MIT", + "dependencies": { + "os-tmpdir": "~1.0.2" + }, + "engines": { + "node": ">=0.6.0" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toidentifier": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.6" + } + }, + "node_modules/tree-kill": { + "version": "1.2.2", + "dev": true, + "license": "MIT", + "bin": { + "tree-kill": "cli.js" + } + }, + "node_modules/tslib": { + "version": "2.8.1", + "license": "0BSD" + }, + "node_modules/tuf-js": { + "version": "2.2.1", + "dev": true, + "license": "MIT", + "dependencies": { + "@tufjs/models": "2.0.1", + "debug": "^4.3.4", + "make-fetch-happen": "^13.0.1" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + } + }, + "node_modules/type-fest": { + "version": "0.21.3", + "dev": true, + "license": "(MIT OR CC0-1.0)", + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/type-is": { + "version": "1.6.18", + "dev": true, + "license": "MIT", + "dependencies": { + "media-typer": "0.3.0", + "mime-types": "~2.1.24" + }, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/typed-assert": { + "version": "1.0.9", + "dev": true, + "license": "MIT" + }, + "node_modules/typescript": { + "version": "5.4.5", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.4.5.tgz", + "integrity": "sha512-vcI4UpRgg81oIRUFwR0WSIHKt11nJ7SAVlYNIu+QpqeyXP+gpQJy/Z4+F0aGxSE4MqwjyXvW/TzgkLAx2AGHwQ==", + "dev": true, + "peer": true, + "bin": { + "tsc": "bin/tsc", + "tsserver": "bin/tsserver" + }, + "engines": { + "node": ">=14.17" + } + }, + "node_modules/ua-parser-js": { + "version": "0.7.41", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/ua-parser-js" + }, + { + "type": "paypal", + "url": "https://paypal.me/faisalman" + }, + { + "type": "github", + "url": "https://github.com/sponsors/faisalman" + } + ], + "license": "MIT", + "bin": { + "ua-parser-js": "script/cli.js" + }, + "engines": { + "node": "*" + } + }, + "node_modules/undici-types": { + "version": "7.16.0", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-7.16.0.tgz", + "integrity": "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw==", + "dev": true + }, + "node_modules/unicode-canonical-property-names-ecmascript": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-ecmascript": { + "version": "2.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "unicode-canonical-property-names-ecmascript": "^2.0.0", + "unicode-property-aliases-ecmascript": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-value-ecmascript": { + "version": "2.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-property-aliases-ecmascript": { + "version": "2.2.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=4" + } + }, + "node_modules/unique-filename": { + "version": "3.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "unique-slug": "^4.0.0" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/unique-slug": { + "version": "4.0.0", + "dev": true, + "license": "ISC", + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/universalify": { + "version": "0.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/unpipe": { + "version": "1.0.0", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/update-browserslist-db": { + "version": "1.1.3", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "license": "MIT", + "dependencies": { + "escalade": "^3.2.0", + "picocolors": "^1.1.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } + }, + "node_modules/util-deprecate": { + "version": "1.0.2", + "dev": true, + "license": "MIT" + }, + "node_modules/utils-merge": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.4.0" + } + }, + "node_modules/validate-npm-package-license": { + "version": "3.0.4", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "spdx-correct": "^3.0.0", + "spdx-expression-parse": "^3.0.0" + } + }, + "node_modules/validate-npm-package-name": { + "version": "5.0.1", + "dev": true, + "license": "ISC", + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/vary": { + "version": "1.1.2", + "dev": true, + "license": "MIT", + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/void-elements": { + "version": "2.0.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/watchpack": { + "version": "2.4.0", + "dev": true, + "license": "MIT", + "dependencies": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/wbuf": { + "version": "1.7.3", + "dev": true, + "license": "MIT", + "dependencies": { + "minimalistic-assert": "^1.0.0" + } + }, + "node_modules/wcwidth": { + "version": "1.0.1", + "dev": true, + "license": "MIT", + "dependencies": { + "defaults": "^1.0.3" + } + }, + "node_modules/webpack": { + "version": "5.94.0", + "resolved": "https://registry.npmjs.org/webpack/-/webpack-5.94.0.tgz", + "integrity": "sha512-KcsGn50VT+06JH/iunZJedYGUJS5FGjow8wb9c0v5n1Om8O1g4L6LjtfxwlXIATopoQu+vOXXa7gYisWxCoPyg==", + "dev": true, + "peer": true, + "dependencies": { + "@types/estree": "^1.0.5", + "@webassemblyjs/ast": "^1.12.1", + "@webassemblyjs/wasm-edit": "^1.12.1", + "@webassemblyjs/wasm-parser": "^1.12.1", + "acorn": "^8.7.1", + "acorn-import-attributes": "^1.9.5", + "browserslist": "^4.21.10", + "chrome-trace-event": "^1.0.2", + "enhanced-resolve": "^5.17.1", + "es-module-lexer": "^1.2.1", + "eslint-scope": "5.1.1", + "events": "^3.2.0", + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.2.11", + "json-parse-even-better-errors": "^2.3.1", + "loader-runner": "^4.2.0", + "mime-types": "^2.1.27", + "neo-async": "^2.6.2", + "schema-utils": "^3.2.0", + "tapable": "^2.1.1", + "terser-webpack-plugin": "^5.3.10", + "watchpack": "^2.4.1", + "webpack-sources": "^3.2.3" + }, + "bin": { + "webpack": "bin/webpack.js" + }, + "engines": { + "node": ">=10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependenciesMeta": { + "webpack-cli": { + "optional": true + } + } + }, + "node_modules/webpack-dev-middleware": { + "version": "6.1.2", + "dev": true, + "license": "MIT", + "dependencies": { + "colorette": "^2.0.10", + "memfs": "^3.4.12", + "mime-types": "^2.1.31", + "range-parser": "^1.2.1", + "schema-utils": "^4.0.0" + }, + "engines": { + "node": ">= 14.15.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^5.0.0" + }, + "peerDependenciesMeta": { + "webpack": { + "optional": true + } + } + }, + "node_modules/webpack-dev-middleware/node_modules/memfs": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.3.tgz", + "integrity": "sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==", + "dev": true, + "dependencies": { + "fs-monkey": "^1.0.4" + }, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/webpack-dev-server": { + "version": "4.15.1", + "resolved": "https://registry.npmjs.org/webpack-dev-server/-/webpack-dev-server-4.15.1.tgz", + "integrity": "sha512-5hbAst3h3C3L8w6W4P96L5vaV0PxSmJhxZvWKYIdgxOQm8pNZ5dEOmmSLBVpP85ReeyRt6AS1QJNyo/oFFPeVA==", + "dev": true, + "peer": true, + "dependencies": { + "@types/bonjour": "^3.5.9", + "@types/connect-history-api-fallback": "^1.3.5", + "@types/express": "^4.17.13", + "@types/serve-index": "^1.9.1", + "@types/serve-static": "^1.13.10", + "@types/sockjs": "^0.3.33", + "@types/ws": "^8.5.5", + "ansi-html-community": "^0.0.8", + "bonjour-service": "^1.0.11", + "chokidar": "^3.5.3", + "colorette": "^2.0.10", + "compression": "^1.7.4", + "connect-history-api-fallback": "^2.0.0", + "default-gateway": "^6.0.3", + "express": "^4.17.3", + "graceful-fs": "^4.2.6", + "html-entities": "^2.3.2", + "http-proxy-middleware": "^2.0.3", + "ipaddr.js": "^2.0.1", + "launch-editor": "^2.6.0", + "open": "^8.0.9", + "p-retry": "^4.5.0", + "rimraf": "^3.0.2", + "schema-utils": "^4.0.0", + "selfsigned": "^2.1.1", + "serve-index": "^1.9.1", + "sockjs": "^0.3.24", + "spdy": "^4.0.2", + "webpack-dev-middleware": "^5.3.1", + "ws": "^8.13.0" + }, + "bin": { + "webpack-dev-server": "bin/webpack-dev-server.js" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^4.37.0 || ^5.0.0" + }, + "peerDependenciesMeta": { + "webpack": { + "optional": true + }, + "webpack-cli": { + "optional": true + } + } + }, + "node_modules/webpack-dev-server/node_modules/cookie": { + "version": "0.7.1", + "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.1.tgz", + "integrity": "sha512-6DnInpx7SJ2AK3+CTUE/ZM0vWTUboZCegxhC2xiIydHR9jNuTAASBrfEpHhiGOZw/nX51bHt6YQl8jsGo4y/0w==", + "dev": true, + "engines": { + "node": ">= 0.6" + } + }, + "node_modules/webpack-dev-server/node_modules/debug": { + "version": "2.6.9", + "resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz", + "integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==", + "dev": true, + "dependencies": { + "ms": "2.0.0" + } + }, + "node_modules/webpack-dev-server/node_modules/encodeurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz", + "integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/webpack-dev-server/node_modules/express": { + "version": "4.21.2", + "resolved": "https://registry.npmjs.org/express/-/express-4.21.2.tgz", + "integrity": "sha512-28HqgMZAmih1Czt9ny7qr6ek2qddF4FclbMzwhCREB6OFfH+rXAnuNCwo1/wFvrtbgsQDb4kSbX9de9lFbrXnA==", + "dev": true, + "dependencies": { + "accepts": "~1.3.8", + "array-flatten": "1.1.1", + "body-parser": "1.20.3", + "content-disposition": "0.5.4", + "content-type": "~1.0.4", + "cookie": "0.7.1", + "cookie-signature": "1.0.6", + "debug": "2.6.9", + "depd": "2.0.0", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "etag": "~1.8.1", + "finalhandler": "1.3.1", + "fresh": "0.5.2", + "http-errors": "2.0.0", + "merge-descriptors": "1.0.3", + "methods": "~1.1.2", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "path-to-regexp": "0.1.12", + "proxy-addr": "~2.0.7", + "qs": "6.13.0", + "range-parser": "~1.2.1", + "safe-buffer": "5.2.1", + "send": "0.19.0", + "serve-static": "1.16.2", + "setprototypeof": "1.2.0", + "statuses": "2.0.1", + "type-is": "~1.6.18", + "utils-merge": "1.0.1", + "vary": "~1.1.2" + }, + "engines": { + "node": ">= 0.10.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/express" + } + }, + "node_modules/webpack-dev-server/node_modules/finalhandler": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.1.tgz", + "integrity": "sha512-6BN9trH7bp3qvnrRyzsBz+g3lZxTNZTbVO2EV1CS0WIcDbawYVdYvGflME/9QP0h0pYlCDBCTjYa9nZzMDpyxQ==", + "dev": true, + "dependencies": { + "debug": "2.6.9", + "encodeurl": "~2.0.0", + "escape-html": "~1.0.3", + "on-finished": "2.4.1", + "parseurl": "~1.3.3", + "statuses": "2.0.1", + "unpipe": "~1.0.0" + }, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/webpack-dev-server/node_modules/memfs": { + "version": "3.5.3", + "resolved": "https://registry.npmjs.org/memfs/-/memfs-3.5.3.tgz", + "integrity": "sha512-UERzLsxzllchadvbPs5aolHh65ISpKpM+ccLbOJ8/vvpBKmAWf+la7dXFy7Mr0ySHbdHrFv5kGFCUHHe6GFEmw==", + "dev": true, + "dependencies": { + "fs-monkey": "^1.0.4" + }, + "engines": { + "node": ">= 4.0.0" + } + }, + "node_modules/webpack-dev-server/node_modules/ms": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz", + "integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==", + "dev": true + }, + "node_modules/webpack-dev-server/node_modules/statuses": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz", + "integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==", + "dev": true, + "engines": { + "node": ">= 0.8" + } + }, + "node_modules/webpack-dev-server/node_modules/webpack-dev-middleware": { + "version": "5.3.4", + "resolved": "https://registry.npmjs.org/webpack-dev-middleware/-/webpack-dev-middleware-5.3.4.tgz", + "integrity": "sha512-BVdTqhhs+0IfoeAf7EoH5WE+exCmqGerHfDM0IL096Px60Tq2Mn9MAbnaGUe6HiMa41KMCYF19gyzZmBcq/o4Q==", + "dev": true, + "dependencies": { + "colorette": "^2.0.10", + "memfs": "^3.4.3", + "mime-types": "^2.1.31", + "range-parser": "^1.2.1", + "schema-utils": "^4.0.0" + }, + "engines": { + "node": ">= 12.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + }, + "peerDependencies": { + "webpack": "^4.0.0 || ^5.0.0" + } + }, + "node_modules/webpack-merge": { + "version": "5.10.0", + "dev": true, + "license": "MIT", + "dependencies": { + "clone-deep": "^4.0.1", + "flat": "^5.0.2", + "wildcard": "^2.0.0" + }, + "engines": { + "node": ">=10.0.0" + } + }, + "node_modules/webpack-subresource-integrity": { + "version": "5.1.0", + "dev": true, + "license": "MIT", + "dependencies": { + "typed-assert": "^1.0.8" + }, + "engines": { + "node": ">= 12" + }, + "peerDependencies": { + "html-webpack-plugin": ">= 5.0.0-beta.1 < 6", + "webpack": "^5.12.0" + }, + "peerDependenciesMeta": { + "html-webpack-plugin": { + "optional": true + } + } + }, + "node_modules/webpack/node_modules/@webassemblyjs/ast": { + "version": "1.14.1", + "resolved": "https://registry.npmjs.org/@webassemblyjs/ast/-/ast-1.14.1.tgz", + "integrity": "sha512-nuBEDgQfm1ccRp/8bCQrx1frohyufl4JlbMMZ4P1wpeOfDhF6FQkxZJ1b/e+PLwr6X1Nhw6OLme5usuBWYBvuQ==", + "dev": true, + "dependencies": { + "@webassemblyjs/helper-numbers": "1.13.2", + "@webassemblyjs/helper-wasm-bytecode": "1.13.2" + } + }, + "node_modules/webpack/node_modules/ajv": { + "version": "6.12.6", + "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz", + "integrity": "sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g==", + "dev": true, + "peer": true, + "dependencies": { + "fast-deep-equal": "^3.1.1", + "fast-json-stable-stringify": "^2.0.0", + "json-schema-traverse": "^0.4.1", + "uri-js": "^4.2.2" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/epoberezkin" + } + }, + "node_modules/webpack/node_modules/ajv-keywords": { + "version": "3.5.2", + "resolved": "https://registry.npmjs.org/ajv-keywords/-/ajv-keywords-3.5.2.tgz", + "integrity": "sha512-5p6WTN0DdTGVQk6VjcEju19IgaHudalcfabD7yhDGeA6bcQnmL+CpveLJq/3hvfwd1aof6L386Ougkx6RfyMIQ==", + "dev": true, + "peerDependencies": { + "ajv": "^6.9.1" + } + }, + "node_modules/webpack/node_modules/enhanced-resolve": { + "version": "5.18.3", + "resolved": "https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz", + "integrity": "sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww==", + "dev": true, + "dependencies": { + "graceful-fs": "^4.2.4", + "tapable": "^2.2.0" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack/node_modules/json-parse-even-better-errors": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz", + "integrity": "sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w==", + "dev": true + }, + "node_modules/webpack/node_modules/json-schema-traverse": { + "version": "0.4.1", + "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", + "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", + "dev": true + }, + "node_modules/webpack/node_modules/neo-async": { + "version": "2.6.2", + "resolved": "https://registry.npmjs.org/neo-async/-/neo-async-2.6.2.tgz", + "integrity": "sha512-Yd3UES5mWCSqR+qNT93S3UoYUkqAZ9lLg8a7g9rimsWmYGK8cVToA4/sF3RrshdyV3sAGMXVUmpMYOw+dLpOuw==", + "dev": true + }, + "node_modules/webpack/node_modules/punycode": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz", + "integrity": "sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/webpack/node_modules/schema-utils": { + "version": "3.3.0", + "resolved": "https://registry.npmjs.org/schema-utils/-/schema-utils-3.3.0.tgz", + "integrity": "sha512-pN/yOAvcC+5rQ5nERGuwrjLlYvLTbCibnZ1I7B1LaiAz9BRBlE9GMgE/eqV30P7aJQUf7Ddimy/RsbYO/GrVGg==", + "dev": true, + "dependencies": { + "@types/json-schema": "^7.0.8", + "ajv": "^6.12.5", + "ajv-keywords": "^3.5.2" + }, + "engines": { + "node": ">= 10.13.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/webpack" + } + }, + "node_modules/webpack/node_modules/uri-js": { + "version": "4.4.1", + "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz", + "integrity": "sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==", + "dev": true, + "dependencies": { + "punycode": "^2.1.0" + } + }, + "node_modules/webpack/node_modules/watchpack": { + "version": "2.4.4", + "resolved": "https://registry.npmjs.org/watchpack/-/watchpack-2.4.4.tgz", + "integrity": "sha512-c5EGNOiyxxV5qmTtAB7rbiXxi1ooX1pQKMLX/MIabJjRA0SJBQOjKF+KSVfHkr9U1cADPon0mRiVe/riyaiDUA==", + "dev": true, + "dependencies": { + "glob-to-regexp": "^0.4.1", + "graceful-fs": "^4.1.2" + }, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/webpack/node_modules/webpack-sources": { + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/webpack-sources/-/webpack-sources-3.3.3.tgz", + "integrity": "sha512-yd1RBzSGanHkitROoPFd6qsrxt+oFhg/129YzheDGqeustzX0vTZJZsSsQjVQC4yzBQ56K55XU8gaNCtIzOnTg==", + "dev": true, + "engines": { + "node": ">=10.13.0" + } + }, + "node_modules/websocket-driver": { + "version": "0.7.4", + "dev": true, + "license": "Apache-2.0", + "dependencies": { + "http-parser-js": ">=0.5.1", + "safe-buffer": ">=5.1.0", + "websocket-extensions": ">=0.1.1" + }, + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/websocket-extensions": { + "version": "0.1.4", + "dev": true, + "license": "Apache-2.0", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/which": { + "version": "1.3.1", + "dev": true, + "license": "ISC", + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "which": "bin/which" + } + }, + "node_modules/wildcard": { + "version": "2.0.1", + "dev": true, + "license": "MIT" + }, + "node_modules/wrap-ansi": { + "version": "6.2.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "dev": true, + "license": "MIT", + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrappy": { + "version": "1.0.2", + "dev": true, + "license": "ISC" + }, + "node_modules/ws": { + "version": "8.17.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=10.0.0" + }, + "peerDependencies": { + "bufferutil": "^4.0.1", + "utf-8-validate": ">=5.0.2" + }, + "peerDependenciesMeta": { + "bufferutil": { + "optional": true + }, + "utf-8-validate": { + "optional": true + } + } + }, + "node_modules/y18n": { + "version": "5.0.8", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=10" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "dev": true, + "license": "ISC" + }, + "node_modules/yargs": { + "version": "17.7.2", + "resolved": "https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz", + "integrity": "sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w==", + "dev": true, + "dependencies": { + "cliui": "^8.0.1", + "escalade": "^3.1.1", + "get-caller-file": "^2.0.5", + "require-directory": "^2.1.1", + "string-width": "^4.2.3", + "y18n": "^5.0.5", + "yargs-parser": "^21.1.1" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/yargs-parser": { + "version": "21.1.1", + "dev": true, + "license": "ISC", + "engines": { + "node": ">=12" + } + }, + "node_modules/yocto-queue": { + "version": "1.2.1", + "dev": true, + "license": "MIT", + "engines": { + "node": ">=12.20" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/zone.js": { + "version": "0.14.10", + "resolved": "https://registry.npmjs.org/zone.js/-/zone.js-0.14.10.tgz", + "integrity": "sha512-YGAhaO7J5ywOXW6InXNlLmfU194F8lVgu7bRntUF3TiG8Y3nBK0x1UJJuHUP/e8IyihkjCYqhCScpSwnlaSRkQ==", + "peer": true + } + } +} diff --git a/src/StellaOps.Web/package.json b/src/Web/StellaOps.Web/package.json similarity index 100% rename from src/StellaOps.Web/package.json rename to src/Web/StellaOps.Web/package.json diff --git a/src/StellaOps.Web/playwright.config.ts b/src/Web/StellaOps.Web/playwright.config.ts similarity index 96% rename from src/StellaOps.Web/playwright.config.ts rename to src/Web/StellaOps.Web/playwright.config.ts index 0169231f..4ce8bc33 100644 --- a/src/StellaOps.Web/playwright.config.ts +++ b/src/Web/StellaOps.Web/playwright.config.ts @@ -1,22 +1,22 @@ -import { defineConfig } from '@playwright/test'; - -const port = process.env.PLAYWRIGHT_PORT - ? Number.parseInt(process.env.PLAYWRIGHT_PORT, 10) - : 4400; - -export default defineConfig({ - testDir: 'tests/e2e', - timeout: 30_000, - retries: process.env.CI ? 1 : 0, - use: { - baseURL: process.env.PLAYWRIGHT_BASE_URL ?? `http://127.0.0.1:${port}`, - trace: 'retain-on-failure', - }, - webServer: { - command: 'npm run serve:test', - reuseExistingServer: !process.env.CI, - url: `http://127.0.0.1:${port}`, - stdout: 'ignore', - stderr: 'ignore', - }, -}); +import { defineConfig } from '@playwright/test'; + +const port = process.env.PLAYWRIGHT_PORT + ? Number.parseInt(process.env.PLAYWRIGHT_PORT, 10) + : 4400; + +export default defineConfig({ + testDir: 'tests/e2e', + timeout: 30_000, + retries: process.env.CI ? 1 : 0, + use: { + baseURL: process.env.PLAYWRIGHT_BASE_URL ?? `http://127.0.0.1:${port}`, + trace: 'retain-on-failure', + }, + webServer: { + command: 'npm run serve:test', + reuseExistingServer: !process.env.CI, + url: `http://127.0.0.1:${port}`, + stdout: 'ignore', + stderr: 'ignore', + }, +}); diff --git a/src/StellaOps.Web/scripts/chrome-path.js b/src/Web/StellaOps.Web/scripts/chrome-path.js similarity index 95% rename from src/StellaOps.Web/scripts/chrome-path.js rename to src/Web/StellaOps.Web/scripts/chrome-path.js index 02e56642..d24d1fed 100644 --- a/src/StellaOps.Web/scripts/chrome-path.js +++ b/src/Web/StellaOps.Web/scripts/chrome-path.js @@ -1,133 +1,133 @@ -const { existsSync, readdirSync, statSync } = require('fs'); -const { join } = require('path'); - -const linuxArchivePath = ['.cache', 'chromium', 'chrome-linux64', 'chrome']; -const windowsArchivePath = ['.cache', 'chromium', 'chrome-win64', 'chrome.exe']; -const macArchivePath = [ - '.cache', - 'chromium', - 'chrome-mac', - 'Chromium.app', - 'Contents', - 'MacOS', - 'Chromium' -]; - -function expandVersionedArchives(rootDir = join(__dirname, '..')) { - const base = join(rootDir, '.cache', 'chromium'); - if (!existsSync(base)) { - return []; - } - - const nestedCandidates = []; - for (const entry of readdirSync(base)) { - const nestedRoot = join(base, entry); - try { - if (!statSync(nestedRoot).isDirectory()) { - continue; - } - } catch { - continue; - } - - nestedCandidates.push( - join(nestedRoot, 'chrome-linux64', 'chrome'), - join(nestedRoot, 'chrome-win64', 'chrome.exe'), - join(nestedRoot, 'chrome-mac', 'Chromium.app', 'Contents', 'MacOS', 'Chromium') - ); - } - - return nestedCandidates; -} - -function expandNestedArchives(rootDir = join(__dirname, '..')) { - const base = join(rootDir, '.cache', 'chromium'); - if (!existsSync(base)) { - return []; - } - - const maxDepth = 4; - const queue = [{ dir: base, depth: 0 }]; - const candidates = []; - - while (queue.length) { - const { dir, depth } = queue.shift(); - let entries; - try { - entries = readdirSync(dir); - } catch { - continue; - } - - for (const entry of entries) { - const nested = join(dir, entry); - let stats; - try { - stats = statSync(nested); - } catch { - continue; - } - - if (!stats.isDirectory()) { - continue; - } - - candidates.push( - join(nested, 'chrome-linux64', 'chrome'), - join(nested, 'chrome-win64', 'chrome.exe'), - join(nested, 'chrome-mac', 'Chromium.app', 'Contents', 'MacOS', 'Chromium') - ); - - if (depth + 1 <= maxDepth) { - queue.push({ dir: nested, depth: depth + 1 }); - } - } - } - - return candidates; -} - -function candidatePaths(rootDir = join(__dirname, '..')) { - const { env } = process; - const baseCandidates = [ - env.STELLAOPS_CHROMIUM_BIN, - env.CHROME_BIN, - env.PUPPETEER_EXECUTABLE_PATH, - '/usr/bin/chromium-browser', - '/usr/bin/chromium', - '/usr/bin/google-chrome', - '/usr/bin/google-chrome-stable', - join(rootDir, ...linuxArchivePath), - join(rootDir, ...windowsArchivePath), - join(rootDir, ...macArchivePath), - ...expandVersionedArchives(rootDir), - ...expandNestedArchives(rootDir) - ]; - - const seen = new Set(); - return baseCandidates - .filter(Boolean) - .filter((candidate) => { - if (seen.has(candidate)) { - return false; - } - - seen.add(candidate); - return true; - }); -} - -function resolveChromeBinary(rootDir = join(__dirname, '..')) { - for (const candidate of candidatePaths(rootDir)) { - if (existsSync(candidate)) { - return candidate; - } - } - - return null; -} - -module.exports = { - candidatePaths, - resolveChromeBinary -}; +const { existsSync, readdirSync, statSync } = require('fs'); +const { join } = require('path'); + +const linuxArchivePath = ['.cache', 'chromium', 'chrome-linux64', 'chrome']; +const windowsArchivePath = ['.cache', 'chromium', 'chrome-win64', 'chrome.exe']; +const macArchivePath = [ + '.cache', + 'chromium', + 'chrome-mac', + 'Chromium.app', + 'Contents', + 'MacOS', + 'Chromium' +]; + +function expandVersionedArchives(rootDir = join(__dirname, '..')) { + const base = join(rootDir, '.cache', 'chromium'); + if (!existsSync(base)) { + return []; + } + + const nestedCandidates = []; + for (const entry of readdirSync(base)) { + const nestedRoot = join(base, entry); + try { + if (!statSync(nestedRoot).isDirectory()) { + continue; + } + } catch { + continue; + } + + nestedCandidates.push( + join(nestedRoot, 'chrome-linux64', 'chrome'), + join(nestedRoot, 'chrome-win64', 'chrome.exe'), + join(nestedRoot, 'chrome-mac', 'Chromium.app', 'Contents', 'MacOS', 'Chromium') + ); + } + + return nestedCandidates; +} + +function expandNestedArchives(rootDir = join(__dirname, '..')) { + const base = join(rootDir, '.cache', 'chromium'); + if (!existsSync(base)) { + return []; + } + + const maxDepth = 4; + const queue = [{ dir: base, depth: 0 }]; + const candidates = []; + + while (queue.length) { + const { dir, depth } = queue.shift(); + let entries; + try { + entries = readdirSync(dir); + } catch { + continue; + } + + for (const entry of entries) { + const nested = join(dir, entry); + let stats; + try { + stats = statSync(nested); + } catch { + continue; + } + + if (!stats.isDirectory()) { + continue; + } + + candidates.push( + join(nested, 'chrome-linux64', 'chrome'), + join(nested, 'chrome-win64', 'chrome.exe'), + join(nested, 'chrome-mac', 'Chromium.app', 'Contents', 'MacOS', 'Chromium') + ); + + if (depth + 1 <= maxDepth) { + queue.push({ dir: nested, depth: depth + 1 }); + } + } + } + + return candidates; +} + +function candidatePaths(rootDir = join(__dirname, '..')) { + const { env } = process; + const baseCandidates = [ + env.STELLAOPS_CHROMIUM_BIN, + env.CHROME_BIN, + env.PUPPETEER_EXECUTABLE_PATH, + '/usr/bin/chromium-browser', + '/usr/bin/chromium', + '/usr/bin/google-chrome', + '/usr/bin/google-chrome-stable', + join(rootDir, ...linuxArchivePath), + join(rootDir, ...windowsArchivePath), + join(rootDir, ...macArchivePath), + ...expandVersionedArchives(rootDir), + ...expandNestedArchives(rootDir) + ]; + + const seen = new Set(); + return baseCandidates + .filter(Boolean) + .filter((candidate) => { + if (seen.has(candidate)) { + return false; + } + + seen.add(candidate); + return true; + }); +} + +function resolveChromeBinary(rootDir = join(__dirname, '..')) { + for (const candidate of candidatePaths(rootDir)) { + if (existsSync(candidate)) { + return candidate; + } + } + + return null; +} + +module.exports = { + candidatePaths, + resolveChromeBinary +}; diff --git a/src/StellaOps.Web/scripts/verify-chromium.js b/src/Web/StellaOps.Web/scripts/verify-chromium.js similarity index 96% rename from src/StellaOps.Web/scripts/verify-chromium.js rename to src/Web/StellaOps.Web/scripts/verify-chromium.js index 2e0611b4..6c51f423 100644 --- a/src/StellaOps.Web/scripts/verify-chromium.js +++ b/src/Web/StellaOps.Web/scripts/verify-chromium.js @@ -1,24 +1,24 @@ -#!/usr/bin/env node - -const { resolveChromeBinary, candidatePaths } = require('./chrome-path'); -const { join } = require('path'); - -const projectRoot = join(__dirname, '..'); -const resolved = resolveChromeBinary(projectRoot); - -if (resolved) { - console.log(`Chromium binary detected: ${resolved}`); - process.exit(0); -} - -console.error('Chromium binary not found.'); -console.error('Checked locations:'); -for (const candidate of candidatePaths(projectRoot)) { - console.error(` - ${candidate}`); -} -console.error(''); -console.error( - 'Ensure Google Chrome/Chromium is available on PATH, set CHROME_BIN/STELLAOPS_CHROMIUM_BIN, or drop an offline Chromium build under .cache/chromium/.' -); -console.error('See docs/DeterministicInstall.md for detailed guidance.'); -process.exit(1); +#!/usr/bin/env node + +const { resolveChromeBinary, candidatePaths } = require('./chrome-path'); +const { join } = require('path'); + +const projectRoot = join(__dirname, '..'); +const resolved = resolveChromeBinary(projectRoot); + +if (resolved) { + console.log(`Chromium binary detected: ${resolved}`); + process.exit(0); +} + +console.error('Chromium binary not found.'); +console.error('Checked locations:'); +for (const candidate of candidatePaths(projectRoot)) { + console.error(` - ${candidate}`); +} +console.error(''); +console.error( + 'Ensure Google Chrome/Chromium is available on PATH, set CHROME_BIN/STELLAOPS_CHROMIUM_BIN, or drop an offline Chromium build under .cache/chromium/.' +); +console.error('See docs/DeterministicInstall.md for detailed guidance.'); +process.exit(1); diff --git a/src/StellaOps.Web/src/app/app.component.html b/src/Web/StellaOps.Web/src/app/app.component.html similarity index 97% rename from src/StellaOps.Web/src/app/app.component.html rename to src/Web/StellaOps.Web/src/app/app.component.html index 86c65d70..3a9b0b45 100644 --- a/src/StellaOps.Web/src/app/app.component.html +++ b/src/Web/StellaOps.Web/src/app/app.component.html @@ -1,46 +1,46 @@ -<div class="app-shell"> - <header class="app-header"> - <div class="app-brand">StellaOps Dashboard</div> - <nav class="app-nav"> - <a routerLink="/console/profile" routerLinkActive="active"> - Console Profile - </a> - <a routerLink="/concelier/trivy-db-settings" routerLinkActive="active"> - Trivy DB Export - </a> - <a routerLink="/scans/scan-verified-001" routerLinkActive="active"> - Scan Detail - </a> - <a routerLink="/notify" routerLinkActive="active"> - Notify - </a> - </nav> - <div class="app-auth"> - <ng-container *ngIf="isAuthenticated(); else signIn"> - <span class="app-user" aria-live="polite">{{ displayName() }}</span> - <span class="app-tenant" *ngIf="activeTenant() as tenant"> - Tenant: <strong>{{ tenant }}</strong> - </span> - <span - class="app-fresh" - *ngIf="freshAuthSummary() as fresh" - [class.app-fresh--active]="fresh.active" - [class.app-fresh--stale]="!fresh.active" - > - Fresh auth: {{ fresh.active ? 'Active' : 'Stale' }} - <ng-container *ngIf="fresh.expiresAt"> - (expires {{ fresh.expiresAt | date: 'shortTime' }}) - </ng-container> - </span> - <button type="button" (click)="onSignOut()">Sign out</button> - </ng-container> - <ng-template #signIn> - <button type="button" (click)="onSignIn()">Sign in</button> - </ng-template> - </div> - </header> - - <main class="app-content"> - <router-outlet /> - </main> -</div> +<div class="app-shell"> + <header class="app-header"> + <div class="app-brand">StellaOps Dashboard</div> + <nav class="app-nav"> + <a routerLink="/console/profile" routerLinkActive="active"> + Console Profile + </a> + <a routerLink="/concelier/trivy-db-settings" routerLinkActive="active"> + Trivy DB Export + </a> + <a routerLink="/scans/scan-verified-001" routerLinkActive="active"> + Scan Detail + </a> + <a routerLink="/notify" routerLinkActive="active"> + Notify + </a> + </nav> + <div class="app-auth"> + <ng-container *ngIf="isAuthenticated(); else signIn"> + <span class="app-user" aria-live="polite">{{ displayName() }}</span> + <span class="app-tenant" *ngIf="activeTenant() as tenant"> + Tenant: <strong>{{ tenant }}</strong> + </span> + <span + class="app-fresh" + *ngIf="freshAuthSummary() as fresh" + [class.app-fresh--active]="fresh.active" + [class.app-fresh--stale]="!fresh.active" + > + Fresh auth: {{ fresh.active ? 'Active' : 'Stale' }} + <ng-container *ngIf="fresh.expiresAt"> + (expires {{ fresh.expiresAt | date: 'shortTime' }}) + </ng-container> + </span> + <button type="button" (click)="onSignOut()">Sign out</button> + </ng-container> + <ng-template #signIn> + <button type="button" (click)="onSignIn()">Sign in</button> + </ng-template> + </div> + </header> + + <main class="app-content"> + <router-outlet /> + </main> +</div> diff --git a/src/StellaOps.Web/src/app/app.component.scss b/src/Web/StellaOps.Web/src/app/app.component.scss similarity index 95% rename from src/StellaOps.Web/src/app/app.component.scss rename to src/Web/StellaOps.Web/src/app/app.component.scss index deff0f32..d80a9529 100644 --- a/src/StellaOps.Web/src/app/app.component.scss +++ b/src/Web/StellaOps.Web/src/app/app.component.scss @@ -1,112 +1,112 @@ -:host { - display: block; - min-height: 100vh; - font-family: 'Inter', system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', - sans-serif; - color: #0f172a; - background-color: #f8fafc; -} - -.app-shell { - display: flex; - flex-direction: column; - min-height: 100vh; -} - -.app-header { - display: flex; - align-items: center; - justify-content: space-between; - padding: 1rem 1.5rem; - background: linear-gradient(90deg, #0f172a 0%, #1e293b 45%, #4328b7 100%); - color: #f8fafc; - box-shadow: 0 2px 8px rgba(15, 23, 42, 0.2); -} - -.app-brand { - font-size: 1.125rem; - font-weight: 600; - letter-spacing: 0.02em; -} - -.app-nav { - display: flex; - gap: 1rem; - - a { - color: rgba(248, 250, 252, 0.8); - text-decoration: none; - font-size: 0.95rem; - padding: 0.35rem 0.75rem; - border-radius: 9999px; - transition: background-color 0.2s ease, color 0.2s ease; - - &.active, - &:hover, - &:focus-visible { - color: #0f172a; - background-color: rgba(248, 250, 252, 0.9); - } - } -} - -.app-auth { - display: flex; - align-items: center; - gap: 0.75rem; - - .app-user { - font-size: 0.9rem; - font-weight: 500; - } - - button { - appearance: none; - border: none; - border-radius: 9999px; - padding: 0.35rem 0.9rem; - font-size: 0.85rem; - font-weight: 500; - cursor: pointer; - color: #0f172a; - background-color: rgba(248, 250, 252, 0.9); - transition: transform 0.2s ease, background-color 0.2s ease; - - &:hover, - &:focus-visible { - background-color: #facc15; - transform: translateY(-1px); - } - } - - .app-tenant { - font-size: 0.8rem; - color: rgba(248, 250, 252, 0.8); - } - - .app-fresh { - display: inline-flex; - align-items: center; - gap: 0.35rem; - padding: 0.2rem 0.6rem; - border-radius: 9999px; - font-size: 0.75rem; - font-weight: 600; - letter-spacing: 0.03em; - background-color: rgba(20, 184, 166, 0.16); - color: #0f766e; - - &.app-fresh--stale { - background-color: rgba(249, 115, 22, 0.16); - color: #c2410c; - } - } -} - -.app-content { - flex: 1; - padding: 2rem 1.5rem; - max-width: 960px; - width: 100%; - margin: 0 auto; -} +:host { + display: block; + min-height: 100vh; + font-family: 'Inter', system-ui, -apple-system, BlinkMacSystemFont, 'Segoe UI', + sans-serif; + color: #0f172a; + background-color: #f8fafc; +} + +.app-shell { + display: flex; + flex-direction: column; + min-height: 100vh; +} + +.app-header { + display: flex; + align-items: center; + justify-content: space-between; + padding: 1rem 1.5rem; + background: linear-gradient(90deg, #0f172a 0%, #1e293b 45%, #4328b7 100%); + color: #f8fafc; + box-shadow: 0 2px 8px rgba(15, 23, 42, 0.2); +} + +.app-brand { + font-size: 1.125rem; + font-weight: 600; + letter-spacing: 0.02em; +} + +.app-nav { + display: flex; + gap: 1rem; + + a { + color: rgba(248, 250, 252, 0.8); + text-decoration: none; + font-size: 0.95rem; + padding: 0.35rem 0.75rem; + border-radius: 9999px; + transition: background-color 0.2s ease, color 0.2s ease; + + &.active, + &:hover, + &:focus-visible { + color: #0f172a; + background-color: rgba(248, 250, 252, 0.9); + } + } +} + +.app-auth { + display: flex; + align-items: center; + gap: 0.75rem; + + .app-user { + font-size: 0.9rem; + font-weight: 500; + } + + button { + appearance: none; + border: none; + border-radius: 9999px; + padding: 0.35rem 0.9rem; + font-size: 0.85rem; + font-weight: 500; + cursor: pointer; + color: #0f172a; + background-color: rgba(248, 250, 252, 0.9); + transition: transform 0.2s ease, background-color 0.2s ease; + + &:hover, + &:focus-visible { + background-color: #facc15; + transform: translateY(-1px); + } + } + + .app-tenant { + font-size: 0.8rem; + color: rgba(248, 250, 252, 0.8); + } + + .app-fresh { + display: inline-flex; + align-items: center; + gap: 0.35rem; + padding: 0.2rem 0.6rem; + border-radius: 9999px; + font-size: 0.75rem; + font-weight: 600; + letter-spacing: 0.03em; + background-color: rgba(20, 184, 166, 0.16); + color: #0f766e; + + &.app-fresh--stale { + background-color: rgba(249, 115, 22, 0.16); + color: #c2410c; + } + } +} + +.app-content { + flex: 1; + padding: 2rem 1.5rem; + max-width: 960px; + width: 100%; + margin: 0 auto; +} diff --git a/src/StellaOps.Web/src/app/app.component.spec.ts b/src/Web/StellaOps.Web/src/app/app.component.spec.ts similarity index 97% rename from src/StellaOps.Web/src/app/app.component.spec.ts rename to src/Web/StellaOps.Web/src/app/app.component.spec.ts index 0f063363..436dda04 100644 --- a/src/StellaOps.Web/src/app/app.component.spec.ts +++ b/src/Web/StellaOps.Web/src/app/app.component.spec.ts @@ -1,35 +1,35 @@ -import { TestBed } from '@angular/core/testing'; -import { RouterTestingModule } from '@angular/router/testing'; -import { AppComponent } from './app.component'; -import { AuthorityAuthService } from './core/auth/authority-auth.service'; -import { AuthSessionStore } from './core/auth/auth-session.store'; - -class AuthorityAuthServiceStub { - beginLogin = jasmine.createSpy('beginLogin'); - logout = jasmine.createSpy('logout'); -} - -describe('AppComponent', () => { - beforeEach(async () => { - await TestBed.configureTestingModule({ - imports: [AppComponent, RouterTestingModule], - providers: [ - AuthSessionStore, - { provide: AuthorityAuthService, useClass: AuthorityAuthServiceStub }, - ], - }).compileComponents(); - }); - - it('creates the root component', () => { - const fixture = TestBed.createComponent(AppComponent); - const app = fixture.componentInstance; - expect(app).toBeTruthy(); - }); - - it('renders a router outlet for child routes', () => { - const fixture = TestBed.createComponent(AppComponent); - fixture.detectChanges(); - const compiled = fixture.nativeElement as HTMLElement; - expect(compiled.querySelector('router-outlet')).not.toBeNull(); - }); -}); +import { TestBed } from '@angular/core/testing'; +import { RouterTestingModule } from '@angular/router/testing'; +import { AppComponent } from './app.component'; +import { AuthorityAuthService } from './core/auth/authority-auth.service'; +import { AuthSessionStore } from './core/auth/auth-session.store'; + +class AuthorityAuthServiceStub { + beginLogin = jasmine.createSpy('beginLogin'); + logout = jasmine.createSpy('logout'); +} + +describe('AppComponent', () => { + beforeEach(async () => { + await TestBed.configureTestingModule({ + imports: [AppComponent, RouterTestingModule], + providers: [ + AuthSessionStore, + { provide: AuthorityAuthService, useClass: AuthorityAuthServiceStub }, + ], + }).compileComponents(); + }); + + it('creates the root component', () => { + const fixture = TestBed.createComponent(AppComponent); + const app = fixture.componentInstance; + expect(app).toBeTruthy(); + }); + + it('renders a router outlet for child routes', () => { + const fixture = TestBed.createComponent(AppComponent); + fixture.detectChanges(); + const compiled = fixture.nativeElement as HTMLElement; + expect(compiled.querySelector('router-outlet')).not.toBeNull(); + }); +}); diff --git a/src/StellaOps.Web/src/app/app.component.ts b/src/Web/StellaOps.Web/src/app/app.component.ts similarity index 96% rename from src/StellaOps.Web/src/app/app.component.ts rename to src/Web/StellaOps.Web/src/app/app.component.ts index 1593b281..78e8c7c0 100644 --- a/src/StellaOps.Web/src/app/app.component.ts +++ b/src/Web/StellaOps.Web/src/app/app.component.ts @@ -1,64 +1,64 @@ -import { CommonModule } from '@angular/common'; -import { - ChangeDetectionStrategy, - Component, - computed, - inject, -} from '@angular/core'; -import { Router, RouterLink, RouterLinkActive, RouterOutlet } from '@angular/router'; - -import { AuthorityAuthService } from './core/auth/authority-auth.service'; -import { AuthSessionStore } from './core/auth/auth-session.store'; -import { ConsoleSessionStore } from './core/console/console-session.store'; - -@Component({ - selector: 'app-root', - standalone: true, - imports: [CommonModule, RouterOutlet, RouterLink, RouterLinkActive], - templateUrl: './app.component.html', - styleUrl: './app.component.scss', - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class AppComponent { - private readonly router = inject(Router); - private readonly auth = inject(AuthorityAuthService); - private readonly sessionStore = inject(AuthSessionStore); - private readonly consoleStore = inject(ConsoleSessionStore); - - readonly status = this.sessionStore.status; - readonly identity = this.sessionStore.identity; - readonly subjectHint = this.sessionStore.subjectHint; - readonly isAuthenticated = this.sessionStore.isAuthenticated; - readonly activeTenant = this.consoleStore.selectedTenantId; - readonly freshAuthSummary = computed(() => { - const token = this.consoleStore.tokenInfo(); - if (!token) { - return null; - } - return { - active: token.freshAuthActive, - expiresAt: token.freshAuthExpiresAt, - }; - }); - - readonly displayName = computed(() => { - const identity = this.identity(); - if (identity?.name) { - return identity.name; - } - if (identity?.email) { - return identity.email; - } - const hint = this.subjectHint(); - return hint ?? 'anonymous'; - }); - - onSignIn(): void { - const returnUrl = this.router.url === '/' ? undefined : this.router.url; - void this.auth.beginLogin(returnUrl); - } - - onSignOut(): void { - void this.auth.logout(); - } -} +import { CommonModule } from '@angular/common'; +import { + ChangeDetectionStrategy, + Component, + computed, + inject, +} from '@angular/core'; +import { Router, RouterLink, RouterLinkActive, RouterOutlet } from '@angular/router'; + +import { AuthorityAuthService } from './core/auth/authority-auth.service'; +import { AuthSessionStore } from './core/auth/auth-session.store'; +import { ConsoleSessionStore } from './core/console/console-session.store'; + +@Component({ + selector: 'app-root', + standalone: true, + imports: [CommonModule, RouterOutlet, RouterLink, RouterLinkActive], + templateUrl: './app.component.html', + styleUrl: './app.component.scss', + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class AppComponent { + private readonly router = inject(Router); + private readonly auth = inject(AuthorityAuthService); + private readonly sessionStore = inject(AuthSessionStore); + private readonly consoleStore = inject(ConsoleSessionStore); + + readonly status = this.sessionStore.status; + readonly identity = this.sessionStore.identity; + readonly subjectHint = this.sessionStore.subjectHint; + readonly isAuthenticated = this.sessionStore.isAuthenticated; + readonly activeTenant = this.consoleStore.selectedTenantId; + readonly freshAuthSummary = computed(() => { + const token = this.consoleStore.tokenInfo(); + if (!token) { + return null; + } + return { + active: token.freshAuthActive, + expiresAt: token.freshAuthExpiresAt, + }; + }); + + readonly displayName = computed(() => { + const identity = this.identity(); + if (identity?.name) { + return identity.name; + } + if (identity?.email) { + return identity.email; + } + const hint = this.subjectHint(); + return hint ?? 'anonymous'; + }); + + onSignIn(): void { + const returnUrl = this.router.url === '/' ? undefined : this.router.url; + void this.auth.beginLogin(returnUrl); + } + + onSignOut(): void { + void this.auth.logout(); + } +} diff --git a/src/StellaOps.Web/src/app/app.config.ts b/src/Web/StellaOps.Web/src/app/app.config.ts similarity index 96% rename from src/StellaOps.Web/src/app/app.config.ts rename to src/Web/StellaOps.Web/src/app/app.config.ts index 5c62b1cf..269d8bd0 100644 --- a/src/StellaOps.Web/src/app/app.config.ts +++ b/src/Web/StellaOps.Web/src/app/app.config.ts @@ -1,81 +1,81 @@ -import { HTTP_INTERCEPTORS, provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'; -import { APP_INITIALIZER, ApplicationConfig } from '@angular/core'; -import { provideRouter } from '@angular/router'; - -import { routes } from './app.routes'; -import { CONCELIER_EXPORTER_API_BASE_URL } from './core/api/concelier-exporter.client'; -import { - AUTHORITY_CONSOLE_API, - AUTHORITY_CONSOLE_API_BASE_URL, - AuthorityConsoleApiHttpClient, -} from './core/api/authority-console.client'; -import { - NOTIFY_API, - NOTIFY_API_BASE_URL, - NOTIFY_TENANT_ID, -} from './core/api/notify.client'; -import { AppConfigService } from './core/config/app-config.service'; -import { AuthHttpInterceptor } from './core/auth/auth-http.interceptor'; -import { OperatorMetadataInterceptor } from './core/orchestrator/operator-metadata.interceptor'; -import { MockNotifyApiService } from './testing/mock-notify-api.service'; - -export const appConfig: ApplicationConfig = { - providers: [ - provideRouter(routes), - provideHttpClient(withInterceptorsFromDi()), - { - provide: APP_INITIALIZER, - multi: true, - useFactory: (configService: AppConfigService) => () => - configService.load(), - deps: [AppConfigService], - }, - { - provide: HTTP_INTERCEPTORS, - useClass: AuthHttpInterceptor, - multi: true, - }, - { - provide: HTTP_INTERCEPTORS, - useClass: OperatorMetadataInterceptor, - multi: true, - }, - { - provide: CONCELIER_EXPORTER_API_BASE_URL, - useValue: '/api/v1/concelier/exporters/trivy-db', - }, - { - provide: AUTHORITY_CONSOLE_API_BASE_URL, - deps: [AppConfigService], - useFactory: (config: AppConfigService) => { - const authorityBase = config.config.apiBaseUrls.authority; - try { - return new URL('/console', authorityBase).toString(); - } catch { - const normalized = authorityBase.endsWith('/') - ? authorityBase.slice(0, -1) - : authorityBase; - return `${normalized}/console`; - } - }, - }, - AuthorityConsoleApiHttpClient, - { - provide: AUTHORITY_CONSOLE_API, - useExisting: AuthorityConsoleApiHttpClient, - }, - { - provide: NOTIFY_API_BASE_URL, - useValue: '/api/v1/notify', - }, - { - provide: NOTIFY_TENANT_ID, - useValue: 'tenant-dev', - }, - MockNotifyApiService, - { - provide: NOTIFY_API, - useExisting: MockNotifyApiService, - }, - ], -}; +import { HTTP_INTERCEPTORS, provideHttpClient, withInterceptorsFromDi } from '@angular/common/http'; +import { APP_INITIALIZER, ApplicationConfig } from '@angular/core'; +import { provideRouter } from '@angular/router'; + +import { routes } from './app.routes'; +import { CONCELIER_EXPORTER_API_BASE_URL } from './core/api/concelier-exporter.client'; +import { + AUTHORITY_CONSOLE_API, + AUTHORITY_CONSOLE_API_BASE_URL, + AuthorityConsoleApiHttpClient, +} from './core/api/authority-console.client'; +import { + NOTIFY_API, + NOTIFY_API_BASE_URL, + NOTIFY_TENANT_ID, +} from './core/api/notify.client'; +import { AppConfigService } from './core/config/app-config.service'; +import { AuthHttpInterceptor } from './core/auth/auth-http.interceptor'; +import { OperatorMetadataInterceptor } from './core/orchestrator/operator-metadata.interceptor'; +import { MockNotifyApiService } from './testing/mock-notify-api.service'; + +export const appConfig: ApplicationConfig = { + providers: [ + provideRouter(routes), + provideHttpClient(withInterceptorsFromDi()), + { + provide: APP_INITIALIZER, + multi: true, + useFactory: (configService: AppConfigService) => () => + configService.load(), + deps: [AppConfigService], + }, + { + provide: HTTP_INTERCEPTORS, + useClass: AuthHttpInterceptor, + multi: true, + }, + { + provide: HTTP_INTERCEPTORS, + useClass: OperatorMetadataInterceptor, + multi: true, + }, + { + provide: CONCELIER_EXPORTER_API_BASE_URL, + useValue: '/api/v1/concelier/exporters/trivy-db', + }, + { + provide: AUTHORITY_CONSOLE_API_BASE_URL, + deps: [AppConfigService], + useFactory: (config: AppConfigService) => { + const authorityBase = config.config.apiBaseUrls.authority; + try { + return new URL('/console', authorityBase).toString(); + } catch { + const normalized = authorityBase.endsWith('/') + ? authorityBase.slice(0, -1) + : authorityBase; + return `${normalized}/console`; + } + }, + }, + AuthorityConsoleApiHttpClient, + { + provide: AUTHORITY_CONSOLE_API, + useExisting: AuthorityConsoleApiHttpClient, + }, + { + provide: NOTIFY_API_BASE_URL, + useValue: '/api/v1/notify', + }, + { + provide: NOTIFY_TENANT_ID, + useValue: 'tenant-dev', + }, + MockNotifyApiService, + { + provide: NOTIFY_API, + useExisting: MockNotifyApiService, + }, + ], +}; diff --git a/src/StellaOps.Web/src/app/app.routes.ts b/src/Web/StellaOps.Web/src/app/app.routes.ts similarity index 95% rename from src/StellaOps.Web/src/app/app.routes.ts rename to src/Web/StellaOps.Web/src/app/app.routes.ts index 1d3a6f7a..815c9712 100644 --- a/src/StellaOps.Web/src/app/app.routes.ts +++ b/src/Web/StellaOps.Web/src/app/app.routes.ts @@ -1,48 +1,48 @@ -import { Routes } from '@angular/router'; - -export const routes: Routes = [ - { - path: 'console/profile', - loadComponent: () => - import('./features/console/console-profile.component').then( - (m) => m.ConsoleProfileComponent - ), - }, - { - path: 'concelier/trivy-db-settings', - loadComponent: () => - import('./features/trivy-db-settings/trivy-db-settings-page.component').then( - (m) => m.TrivyDbSettingsPageComponent - ), - }, - { - path: 'scans/:scanId', - loadComponent: () => - import('./features/scans/scan-detail-page.component').then( - (m) => m.ScanDetailPageComponent - ), - }, - { - path: 'notify', - loadComponent: () => - import('./features/notify/notify-panel.component').then( - (m) => m.NotifyPanelComponent - ), - }, - { - path: 'auth/callback', - loadComponent: () => - import('./features/auth/auth-callback.component').then( - (m) => m.AuthCallbackComponent - ), - }, - { - path: '', - pathMatch: 'full', - redirectTo: 'console/profile', - }, - { - path: '**', - redirectTo: 'console/profile', - }, -]; +import { Routes } from '@angular/router'; + +export const routes: Routes = [ + { + path: 'console/profile', + loadComponent: () => + import('./features/console/console-profile.component').then( + (m) => m.ConsoleProfileComponent + ), + }, + { + path: 'concelier/trivy-db-settings', + loadComponent: () => + import('./features/trivy-db-settings/trivy-db-settings-page.component').then( + (m) => m.TrivyDbSettingsPageComponent + ), + }, + { + path: 'scans/:scanId', + loadComponent: () => + import('./features/scans/scan-detail-page.component').then( + (m) => m.ScanDetailPageComponent + ), + }, + { + path: 'notify', + loadComponent: () => + import('./features/notify/notify-panel.component').then( + (m) => m.NotifyPanelComponent + ), + }, + { + path: 'auth/callback', + loadComponent: () => + import('./features/auth/auth-callback.component').then( + (m) => m.AuthCallbackComponent + ), + }, + { + path: '', + pathMatch: 'full', + redirectTo: 'console/profile', + }, + { + path: '**', + redirectTo: 'console/profile', + }, +]; diff --git a/src/StellaOps.Web/src/app/core/api/authority-console.client.ts b/src/Web/StellaOps.Web/src/app/core/api/authority-console.client.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/api/authority-console.client.ts rename to src/Web/StellaOps.Web/src/app/core/api/authority-console.client.ts index 4135f99a..1befc953 100644 --- a/src/StellaOps.Web/src/app/core/api/authority-console.client.ts +++ b/src/Web/StellaOps.Web/src/app/core/api/authority-console.client.ts @@ -1,113 +1,113 @@ -import { HttpClient, HttpHeaders } from '@angular/common/http'; -import { Inject, Injectable, InjectionToken } from '@angular/core'; -import { Observable } from 'rxjs'; - -import { AuthSessionStore } from '../auth/auth-session.store'; - -export interface AuthorityTenantViewDto { - readonly id: string; - readonly displayName: string; - readonly status: string; - readonly isolationMode: string; - readonly defaultRoles: readonly string[]; -} - -export interface TenantCatalogResponseDto { - readonly tenants: readonly AuthorityTenantViewDto[]; -} - -export interface ConsoleProfileDto { - readonly subjectId: string | null; - readonly username: string | null; - readonly displayName: string | null; - readonly tenant: string; - readonly sessionId: string | null; - readonly roles: readonly string[]; - readonly scopes: readonly string[]; - readonly audiences: readonly string[]; - readonly authenticationMethods: readonly string[]; - readonly issuedAt: string | null; - readonly authenticationTime: string | null; - readonly expiresAt: string | null; - readonly freshAuth: boolean; -} - -export interface ConsoleTokenIntrospectionDto { - readonly active: boolean; - readonly tenant: string; - readonly subject: string | null; - readonly clientId: string | null; - readonly tokenId: string | null; - readonly scopes: readonly string[]; - readonly audiences: readonly string[]; - readonly issuedAt: string | null; - readonly authenticationTime: string | null; - readonly expiresAt: string | null; - readonly freshAuth: boolean; -} - -export interface AuthorityConsoleApi { - listTenants(tenantId?: string): Observable<TenantCatalogResponseDto>; - getProfile(tenantId?: string): Observable<ConsoleProfileDto>; - introspectToken( - tenantId?: string - ): Observable<ConsoleTokenIntrospectionDto>; -} - -export const AUTHORITY_CONSOLE_API = new InjectionToken<AuthorityConsoleApi>( - 'AUTHORITY_CONSOLE_API' -); - -export const AUTHORITY_CONSOLE_API_BASE_URL = new InjectionToken<string>( - 'AUTHORITY_CONSOLE_API_BASE_URL' -); - -@Injectable({ - providedIn: 'root', -}) -export class AuthorityConsoleApiHttpClient implements AuthorityConsoleApi { - constructor( - private readonly http: HttpClient, - @Inject(AUTHORITY_CONSOLE_API_BASE_URL) private readonly baseUrl: string, - private readonly authSession: AuthSessionStore - ) {} - - listTenants(tenantId?: string): Observable<TenantCatalogResponseDto> { - return this.http.get<TenantCatalogResponseDto>(`${this.baseUrl}/tenants`, { - headers: this.buildHeaders(tenantId), - }); - } - - getProfile(tenantId?: string): Observable<ConsoleProfileDto> { - return this.http.get<ConsoleProfileDto>(`${this.baseUrl}/profile`, { - headers: this.buildHeaders(tenantId), - }); - } - - introspectToken( - tenantId?: string - ): Observable<ConsoleTokenIntrospectionDto> { - return this.http.post<ConsoleTokenIntrospectionDto>( - `${this.baseUrl}/token/introspect`, - {}, - { - headers: this.buildHeaders(tenantId), - } - ); - } - - private buildHeaders(tenantOverride?: string): HttpHeaders { - const tenantId = - (tenantOverride && tenantOverride.trim()) || - this.authSession.getActiveTenantId(); - if (!tenantId) { - throw new Error( - 'AuthorityConsoleApiHttpClient requires an active tenant identifier.' - ); - } - - return new HttpHeaders({ - 'X-StellaOps-Tenant': tenantId, - }); - } -} +import { HttpClient, HttpHeaders } from '@angular/common/http'; +import { Inject, Injectable, InjectionToken } from '@angular/core'; +import { Observable } from 'rxjs'; + +import { AuthSessionStore } from '../auth/auth-session.store'; + +export interface AuthorityTenantViewDto { + readonly id: string; + readonly displayName: string; + readonly status: string; + readonly isolationMode: string; + readonly defaultRoles: readonly string[]; +} + +export interface TenantCatalogResponseDto { + readonly tenants: readonly AuthorityTenantViewDto[]; +} + +export interface ConsoleProfileDto { + readonly subjectId: string | null; + readonly username: string | null; + readonly displayName: string | null; + readonly tenant: string; + readonly sessionId: string | null; + readonly roles: readonly string[]; + readonly scopes: readonly string[]; + readonly audiences: readonly string[]; + readonly authenticationMethods: readonly string[]; + readonly issuedAt: string | null; + readonly authenticationTime: string | null; + readonly expiresAt: string | null; + readonly freshAuth: boolean; +} + +export interface ConsoleTokenIntrospectionDto { + readonly active: boolean; + readonly tenant: string; + readonly subject: string | null; + readonly clientId: string | null; + readonly tokenId: string | null; + readonly scopes: readonly string[]; + readonly audiences: readonly string[]; + readonly issuedAt: string | null; + readonly authenticationTime: string | null; + readonly expiresAt: string | null; + readonly freshAuth: boolean; +} + +export interface AuthorityConsoleApi { + listTenants(tenantId?: string): Observable<TenantCatalogResponseDto>; + getProfile(tenantId?: string): Observable<ConsoleProfileDto>; + introspectToken( + tenantId?: string + ): Observable<ConsoleTokenIntrospectionDto>; +} + +export const AUTHORITY_CONSOLE_API = new InjectionToken<AuthorityConsoleApi>( + 'AUTHORITY_CONSOLE_API' +); + +export const AUTHORITY_CONSOLE_API_BASE_URL = new InjectionToken<string>( + 'AUTHORITY_CONSOLE_API_BASE_URL' +); + +@Injectable({ + providedIn: 'root', +}) +export class AuthorityConsoleApiHttpClient implements AuthorityConsoleApi { + constructor( + private readonly http: HttpClient, + @Inject(AUTHORITY_CONSOLE_API_BASE_URL) private readonly baseUrl: string, + private readonly authSession: AuthSessionStore + ) {} + + listTenants(tenantId?: string): Observable<TenantCatalogResponseDto> { + return this.http.get<TenantCatalogResponseDto>(`${this.baseUrl}/tenants`, { + headers: this.buildHeaders(tenantId), + }); + } + + getProfile(tenantId?: string): Observable<ConsoleProfileDto> { + return this.http.get<ConsoleProfileDto>(`${this.baseUrl}/profile`, { + headers: this.buildHeaders(tenantId), + }); + } + + introspectToken( + tenantId?: string + ): Observable<ConsoleTokenIntrospectionDto> { + return this.http.post<ConsoleTokenIntrospectionDto>( + `${this.baseUrl}/token/introspect`, + {}, + { + headers: this.buildHeaders(tenantId), + } + ); + } + + private buildHeaders(tenantOverride?: string): HttpHeaders { + const tenantId = + (tenantOverride && tenantOverride.trim()) || + this.authSession.getActiveTenantId(); + if (!tenantId) { + throw new Error( + 'AuthorityConsoleApiHttpClient requires an active tenant identifier.' + ); + } + + return new HttpHeaders({ + 'X-StellaOps-Tenant': tenantId, + }); + } +} diff --git a/src/StellaOps.Web/src/app/core/api/concelier-exporter.client.ts b/src/Web/StellaOps.Web/src/app/core/api/concelier-exporter.client.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/api/concelier-exporter.client.ts rename to src/Web/StellaOps.Web/src/app/core/api/concelier-exporter.client.ts index 1e55414d..07fa0db7 100644 --- a/src/StellaOps.Web/src/app/core/api/concelier-exporter.client.ts +++ b/src/Web/StellaOps.Web/src/app/core/api/concelier-exporter.client.ts @@ -1,51 +1,51 @@ -import { HttpClient } from '@angular/common/http'; -import { - Injectable, - InjectionToken, - inject, -} from '@angular/core'; -import { Observable } from 'rxjs'; - -export interface TrivyDbSettingsDto { - publishFull: boolean; - publishDelta: boolean; - includeFull: boolean; - includeDelta: boolean; -} - -export interface TrivyDbRunResponseDto { - exportId: string; - triggeredAt: string; - status?: string; -} - -export const CONCELIER_EXPORTER_API_BASE_URL = new InjectionToken<string>( - 'CONCELIER_EXPORTER_API_BASE_URL' -); - -@Injectable({ - providedIn: 'root', -}) -export class ConcelierExporterClient { - private readonly http = inject(HttpClient); - private readonly baseUrl = inject(CONCELIER_EXPORTER_API_BASE_URL); - - getTrivyDbSettings(): Observable<TrivyDbSettingsDto> { - return this.http.get<TrivyDbSettingsDto>(`${this.baseUrl}/settings`); - } - - updateTrivyDbSettings( - settings: TrivyDbSettingsDto - ): Observable<TrivyDbSettingsDto> { - return this.http.put<TrivyDbSettingsDto>(`${this.baseUrl}/settings`, settings); - } - - runTrivyDbExport( - settings: TrivyDbSettingsDto - ): Observable<TrivyDbRunResponseDto> { - return this.http.post<TrivyDbRunResponseDto>(`${this.baseUrl}/run`, { - trigger: 'ui', - parameters: settings, - }); - } -} +import { HttpClient } from '@angular/common/http'; +import { + Injectable, + InjectionToken, + inject, +} from '@angular/core'; +import { Observable } from 'rxjs'; + +export interface TrivyDbSettingsDto { + publishFull: boolean; + publishDelta: boolean; + includeFull: boolean; + includeDelta: boolean; +} + +export interface TrivyDbRunResponseDto { + exportId: string; + triggeredAt: string; + status?: string; +} + +export const CONCELIER_EXPORTER_API_BASE_URL = new InjectionToken<string>( + 'CONCELIER_EXPORTER_API_BASE_URL' +); + +@Injectable({ + providedIn: 'root', +}) +export class ConcelierExporterClient { + private readonly http = inject(HttpClient); + private readonly baseUrl = inject(CONCELIER_EXPORTER_API_BASE_URL); + + getTrivyDbSettings(): Observable<TrivyDbSettingsDto> { + return this.http.get<TrivyDbSettingsDto>(`${this.baseUrl}/settings`); + } + + updateTrivyDbSettings( + settings: TrivyDbSettingsDto + ): Observable<TrivyDbSettingsDto> { + return this.http.put<TrivyDbSettingsDto>(`${this.baseUrl}/settings`, settings); + } + + runTrivyDbExport( + settings: TrivyDbSettingsDto + ): Observable<TrivyDbRunResponseDto> { + return this.http.post<TrivyDbRunResponseDto>(`${this.baseUrl}/run`, { + trigger: 'ui', + parameters: settings, + }); + } +} diff --git a/src/StellaOps.Web/src/app/core/api/notify.client.ts b/src/Web/StellaOps.Web/src/app/core/api/notify.client.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/api/notify.client.ts rename to src/Web/StellaOps.Web/src/app/core/api/notify.client.ts index d3820017..03b21fec 100644 --- a/src/StellaOps.Web/src/app/core/api/notify.client.ts +++ b/src/Web/StellaOps.Web/src/app/core/api/notify.client.ts @@ -1,142 +1,142 @@ -import { HttpClient, HttpHeaders, HttpParams } from '@angular/common/http'; -import { - Inject, - Injectable, - InjectionToken, - Optional, -} from '@angular/core'; -import { Observable } from 'rxjs'; - -import { - ChannelHealthResponse, - ChannelTestSendRequest, - ChannelTestSendResponse, - NotifyChannel, - NotifyDeliveriesQueryOptions, - NotifyDeliveriesResponse, - NotifyRule, -} from './notify.models'; - -export interface NotifyApi { - listChannels(): Observable<NotifyChannel[]>; - saveChannel(channel: NotifyChannel): Observable<NotifyChannel>; - deleteChannel(channelId: string): Observable<void>; - getChannelHealth(channelId: string): Observable<ChannelHealthResponse>; - testChannel( - channelId: string, - payload: ChannelTestSendRequest - ): Observable<ChannelTestSendResponse>; - listRules(): Observable<NotifyRule[]>; - saveRule(rule: NotifyRule): Observable<NotifyRule>; - deleteRule(ruleId: string): Observable<void>; - listDeliveries( - options?: NotifyDeliveriesQueryOptions - ): Observable<NotifyDeliveriesResponse>; -} - -export const NOTIFY_API = new InjectionToken<NotifyApi>('NOTIFY_API'); - -export const NOTIFY_API_BASE_URL = new InjectionToken<string>( - 'NOTIFY_API_BASE_URL' -); - -export const NOTIFY_TENANT_ID = new InjectionToken<string>('NOTIFY_TENANT_ID'); - -@Injectable({ providedIn: 'root' }) -export class NotifyApiHttpClient implements NotifyApi { - constructor( - private readonly http: HttpClient, - @Inject(NOTIFY_API_BASE_URL) private readonly baseUrl: string, - @Optional() @Inject(NOTIFY_TENANT_ID) private readonly tenantId: string | null - ) {} - - listChannels(): Observable<NotifyChannel[]> { - return this.http.get<NotifyChannel[]>(`${this.baseUrl}/channels`, { - headers: this.buildHeaders(), - }); - } - - saveChannel(channel: NotifyChannel): Observable<NotifyChannel> { - return this.http.post<NotifyChannel>(`${this.baseUrl}/channels`, channel, { - headers: this.buildHeaders(), - }); - } - - deleteChannel(channelId: string): Observable<void> { - return this.http.delete<void>(`${this.baseUrl}/channels/${channelId}`, { - headers: this.buildHeaders(), - }); - } - - getChannelHealth(channelId: string): Observable<ChannelHealthResponse> { - return this.http.get<ChannelHealthResponse>( - `${this.baseUrl}/channels/${channelId}/health`, - { - headers: this.buildHeaders(), - } - ); - } - - testChannel( - channelId: string, - payload: ChannelTestSendRequest - ): Observable<ChannelTestSendResponse> { - return this.http.post<ChannelTestSendResponse>( - `${this.baseUrl}/channels/${channelId}/test`, - payload, - { - headers: this.buildHeaders(), - } - ); - } - - listRules(): Observable<NotifyRule[]> { - return this.http.get<NotifyRule[]>(`${this.baseUrl}/rules`, { - headers: this.buildHeaders(), - }); - } - - saveRule(rule: NotifyRule): Observable<NotifyRule> { - return this.http.post<NotifyRule>(`${this.baseUrl}/rules`, rule, { - headers: this.buildHeaders(), - }); - } - - deleteRule(ruleId: string): Observable<void> { - return this.http.delete<void>(`${this.baseUrl}/rules/${ruleId}`, { - headers: this.buildHeaders(), - }); - } - - listDeliveries( - options?: NotifyDeliveriesQueryOptions - ): Observable<NotifyDeliveriesResponse> { - let params = new HttpParams(); - if (options?.status) { - params = params.set('status', options.status); - } - if (options?.since) { - params = params.set('since', options.since); - } - if (options?.limit) { - params = params.set('limit', options.limit); - } - if (options?.continuationToken) { - params = params.set('continuationToken', options.continuationToken); - } - - return this.http.get<NotifyDeliveriesResponse>(`${this.baseUrl}/deliveries`, { - headers: this.buildHeaders(), - params, - }); - } - - private buildHeaders(): HttpHeaders { - if (!this.tenantId) { - return new HttpHeaders(); - } - - return new HttpHeaders({ 'X-StellaOps-Tenant': this.tenantId }); - } -} - +import { HttpClient, HttpHeaders, HttpParams } from '@angular/common/http'; +import { + Inject, + Injectable, + InjectionToken, + Optional, +} from '@angular/core'; +import { Observable } from 'rxjs'; + +import { + ChannelHealthResponse, + ChannelTestSendRequest, + ChannelTestSendResponse, + NotifyChannel, + NotifyDeliveriesQueryOptions, + NotifyDeliveriesResponse, + NotifyRule, +} from './notify.models'; + +export interface NotifyApi { + listChannels(): Observable<NotifyChannel[]>; + saveChannel(channel: NotifyChannel): Observable<NotifyChannel>; + deleteChannel(channelId: string): Observable<void>; + getChannelHealth(channelId: string): Observable<ChannelHealthResponse>; + testChannel( + channelId: string, + payload: ChannelTestSendRequest + ): Observable<ChannelTestSendResponse>; + listRules(): Observable<NotifyRule[]>; + saveRule(rule: NotifyRule): Observable<NotifyRule>; + deleteRule(ruleId: string): Observable<void>; + listDeliveries( + options?: NotifyDeliveriesQueryOptions + ): Observable<NotifyDeliveriesResponse>; +} + +export const NOTIFY_API = new InjectionToken<NotifyApi>('NOTIFY_API'); + +export const NOTIFY_API_BASE_URL = new InjectionToken<string>( + 'NOTIFY_API_BASE_URL' +); + +export const NOTIFY_TENANT_ID = new InjectionToken<string>('NOTIFY_TENANT_ID'); + +@Injectable({ providedIn: 'root' }) +export class NotifyApiHttpClient implements NotifyApi { + constructor( + private readonly http: HttpClient, + @Inject(NOTIFY_API_BASE_URL) private readonly baseUrl: string, + @Optional() @Inject(NOTIFY_TENANT_ID) private readonly tenantId: string | null + ) {} + + listChannels(): Observable<NotifyChannel[]> { + return this.http.get<NotifyChannel[]>(`${this.baseUrl}/channels`, { + headers: this.buildHeaders(), + }); + } + + saveChannel(channel: NotifyChannel): Observable<NotifyChannel> { + return this.http.post<NotifyChannel>(`${this.baseUrl}/channels`, channel, { + headers: this.buildHeaders(), + }); + } + + deleteChannel(channelId: string): Observable<void> { + return this.http.delete<void>(`${this.baseUrl}/channels/${channelId}`, { + headers: this.buildHeaders(), + }); + } + + getChannelHealth(channelId: string): Observable<ChannelHealthResponse> { + return this.http.get<ChannelHealthResponse>( + `${this.baseUrl}/channels/${channelId}/health`, + { + headers: this.buildHeaders(), + } + ); + } + + testChannel( + channelId: string, + payload: ChannelTestSendRequest + ): Observable<ChannelTestSendResponse> { + return this.http.post<ChannelTestSendResponse>( + `${this.baseUrl}/channels/${channelId}/test`, + payload, + { + headers: this.buildHeaders(), + } + ); + } + + listRules(): Observable<NotifyRule[]> { + return this.http.get<NotifyRule[]>(`${this.baseUrl}/rules`, { + headers: this.buildHeaders(), + }); + } + + saveRule(rule: NotifyRule): Observable<NotifyRule> { + return this.http.post<NotifyRule>(`${this.baseUrl}/rules`, rule, { + headers: this.buildHeaders(), + }); + } + + deleteRule(ruleId: string): Observable<void> { + return this.http.delete<void>(`${this.baseUrl}/rules/${ruleId}`, { + headers: this.buildHeaders(), + }); + } + + listDeliveries( + options?: NotifyDeliveriesQueryOptions + ): Observable<NotifyDeliveriesResponse> { + let params = new HttpParams(); + if (options?.status) { + params = params.set('status', options.status); + } + if (options?.since) { + params = params.set('since', options.since); + } + if (options?.limit) { + params = params.set('limit', options.limit); + } + if (options?.continuationToken) { + params = params.set('continuationToken', options.continuationToken); + } + + return this.http.get<NotifyDeliveriesResponse>(`${this.baseUrl}/deliveries`, { + headers: this.buildHeaders(), + params, + }); + } + + private buildHeaders(): HttpHeaders { + if (!this.tenantId) { + return new HttpHeaders(); + } + + return new HttpHeaders({ 'X-StellaOps-Tenant': this.tenantId }); + } +} + diff --git a/src/StellaOps.Web/src/app/core/api/notify.models.ts b/src/Web/StellaOps.Web/src/app/core/api/notify.models.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/api/notify.models.ts rename to src/Web/StellaOps.Web/src/app/core/api/notify.models.ts index 711dcc82..451af50e 100644 --- a/src/StellaOps.Web/src/app/core/api/notify.models.ts +++ b/src/Web/StellaOps.Web/src/app/core/api/notify.models.ts @@ -1,194 +1,194 @@ -export type NotifyChannelType = - | 'Slack' - | 'Teams' - | 'Email' - | 'Webhook' - | 'Custom'; - -export type ChannelHealthStatus = 'Healthy' | 'Degraded' | 'Unhealthy'; - -export type NotifyDeliveryStatus = - | 'Pending' - | 'Sent' - | 'Failed' - | 'Throttled' - | 'Digested' - | 'Dropped'; - -export type NotifyDeliveryAttemptStatus = - | 'Enqueued' - | 'Sending' - | 'Succeeded' - | 'Failed' - | 'Throttled' - | 'Skipped'; - -export type NotifyDeliveryFormat = - | 'Slack' - | 'Teams' - | 'Email' - | 'Webhook' - | 'Json'; - -export interface NotifyChannelLimits { - readonly concurrency?: number | null; - readonly requestsPerMinute?: number | null; - readonly timeout?: string | null; - readonly maxBatchSize?: number | null; -} - -export interface NotifyChannelConfig { - readonly secretRef: string; - readonly target?: string; - readonly endpoint?: string; - readonly properties?: Record<string, string>; - readonly limits?: NotifyChannelLimits | null; -} - -export interface NotifyChannel { - readonly schemaVersion?: string; - readonly channelId: string; - readonly tenantId: string; - readonly name: string; - readonly displayName?: string; - readonly description?: string; - readonly type: NotifyChannelType; - readonly enabled: boolean; - readonly config: NotifyChannelConfig; - readonly labels?: Record<string, string>; - readonly metadata?: Record<string, string>; - readonly createdBy?: string; - readonly createdAt?: string; - readonly updatedBy?: string; - readonly updatedAt?: string; -} - -export interface NotifyRuleMatchVex { - readonly includeAcceptedJustifications?: boolean; - readonly includeRejectedJustifications?: boolean; - readonly includeUnknownJustifications?: boolean; - readonly justificationKinds?: readonly string[]; -} - -export interface NotifyRuleMatch { - readonly eventKinds?: readonly string[]; - readonly namespaces?: readonly string[]; - readonly repositories?: readonly string[]; - readonly digests?: readonly string[]; - readonly labels?: readonly string[]; - readonly componentPurls?: readonly string[]; - readonly minSeverity?: string | null; - readonly verdicts?: readonly string[]; - readonly kevOnly?: boolean | null; - readonly vex?: NotifyRuleMatchVex | null; -} - -export interface NotifyRuleAction { - readonly actionId: string; - readonly channel: string; - readonly template?: string; - readonly digest?: string; - readonly throttle?: string | null; - readonly locale?: string; - readonly enabled: boolean; - readonly metadata?: Record<string, string>; -} - -export interface NotifyRule { - readonly schemaVersion?: string; - readonly ruleId: string; - readonly tenantId: string; - readonly name: string; - readonly description?: string; - readonly enabled: boolean; - readonly match: NotifyRuleMatch; - readonly actions: readonly NotifyRuleAction[]; - readonly labels?: Record<string, string>; - readonly metadata?: Record<string, string>; - readonly createdBy?: string; - readonly createdAt?: string; - readonly updatedBy?: string; - readonly updatedAt?: string; -} - -export interface NotifyDeliveryAttempt { - readonly timestamp: string; - readonly status: NotifyDeliveryAttemptStatus; - readonly statusCode?: number; - readonly reason?: string; -} - -export interface NotifyDeliveryRendered { - readonly channelType: NotifyChannelType; - readonly format: NotifyDeliveryFormat; - readonly target: string; - readonly title: string; - readonly body: string; - readonly summary?: string; - readonly textBody?: string; - readonly locale?: string; - readonly bodyHash?: string; - readonly attachments?: readonly string[]; -} - -export interface NotifyDelivery { - readonly deliveryId: string; - readonly tenantId: string; - readonly ruleId: string; - readonly actionId: string; - readonly eventId: string; - readonly kind: string; - readonly status: NotifyDeliveryStatus; - readonly statusReason?: string; - readonly rendered?: NotifyDeliveryRendered; - readonly attempts?: readonly NotifyDeliveryAttempt[]; - readonly metadata?: Record<string, string>; - readonly createdAt: string; - readonly sentAt?: string; - readonly completedAt?: string; -} - -export interface NotifyDeliveriesQueryOptions { - readonly status?: NotifyDeliveryStatus; - readonly since?: string; - readonly limit?: number; - readonly continuationToken?: string; -} - -export interface NotifyDeliveriesResponse { - readonly items: readonly NotifyDelivery[]; - readonly continuationToken?: string | null; - readonly count: number; -} - -export interface ChannelHealthResponse { - readonly tenantId: string; - readonly channelId: string; - readonly status: ChannelHealthStatus; - readonly message?: string | null; - readonly checkedAt: string; - readonly traceId: string; - readonly metadata?: Record<string, string>; -} - -export interface ChannelTestSendRequest { - readonly target?: string; - readonly templateId?: string; - readonly title?: string; - readonly summary?: string; - readonly body?: string; - readonly textBody?: string; - readonly locale?: string; - readonly metadata?: Record<string, string>; - readonly attachments?: readonly string[]; -} - -export interface ChannelTestSendResponse { - readonly tenantId: string; - readonly channelId: string; - readonly preview: NotifyDeliveryRendered; - readonly queuedAt: string; - readonly traceId: string; - readonly metadata?: Record<string, string>; -} - +export type NotifyChannelType = + | 'Slack' + | 'Teams' + | 'Email' + | 'Webhook' + | 'Custom'; + +export type ChannelHealthStatus = 'Healthy' | 'Degraded' | 'Unhealthy'; + +export type NotifyDeliveryStatus = + | 'Pending' + | 'Sent' + | 'Failed' + | 'Throttled' + | 'Digested' + | 'Dropped'; + +export type NotifyDeliveryAttemptStatus = + | 'Enqueued' + | 'Sending' + | 'Succeeded' + | 'Failed' + | 'Throttled' + | 'Skipped'; + +export type NotifyDeliveryFormat = + | 'Slack' + | 'Teams' + | 'Email' + | 'Webhook' + | 'Json'; + +export interface NotifyChannelLimits { + readonly concurrency?: number | null; + readonly requestsPerMinute?: number | null; + readonly timeout?: string | null; + readonly maxBatchSize?: number | null; +} + +export interface NotifyChannelConfig { + readonly secretRef: string; + readonly target?: string; + readonly endpoint?: string; + readonly properties?: Record<string, string>; + readonly limits?: NotifyChannelLimits | null; +} + +export interface NotifyChannel { + readonly schemaVersion?: string; + readonly channelId: string; + readonly tenantId: string; + readonly name: string; + readonly displayName?: string; + readonly description?: string; + readonly type: NotifyChannelType; + readonly enabled: boolean; + readonly config: NotifyChannelConfig; + readonly labels?: Record<string, string>; + readonly metadata?: Record<string, string>; + readonly createdBy?: string; + readonly createdAt?: string; + readonly updatedBy?: string; + readonly updatedAt?: string; +} + +export interface NotifyRuleMatchVex { + readonly includeAcceptedJustifications?: boolean; + readonly includeRejectedJustifications?: boolean; + readonly includeUnknownJustifications?: boolean; + readonly justificationKinds?: readonly string[]; +} + +export interface NotifyRuleMatch { + readonly eventKinds?: readonly string[]; + readonly namespaces?: readonly string[]; + readonly repositories?: readonly string[]; + readonly digests?: readonly string[]; + readonly labels?: readonly string[]; + readonly componentPurls?: readonly string[]; + readonly minSeverity?: string | null; + readonly verdicts?: readonly string[]; + readonly kevOnly?: boolean | null; + readonly vex?: NotifyRuleMatchVex | null; +} + +export interface NotifyRuleAction { + readonly actionId: string; + readonly channel: string; + readonly template?: string; + readonly digest?: string; + readonly throttle?: string | null; + readonly locale?: string; + readonly enabled: boolean; + readonly metadata?: Record<string, string>; +} + +export interface NotifyRule { + readonly schemaVersion?: string; + readonly ruleId: string; + readonly tenantId: string; + readonly name: string; + readonly description?: string; + readonly enabled: boolean; + readonly match: NotifyRuleMatch; + readonly actions: readonly NotifyRuleAction[]; + readonly labels?: Record<string, string>; + readonly metadata?: Record<string, string>; + readonly createdBy?: string; + readonly createdAt?: string; + readonly updatedBy?: string; + readonly updatedAt?: string; +} + +export interface NotifyDeliveryAttempt { + readonly timestamp: string; + readonly status: NotifyDeliveryAttemptStatus; + readonly statusCode?: number; + readonly reason?: string; +} + +export interface NotifyDeliveryRendered { + readonly channelType: NotifyChannelType; + readonly format: NotifyDeliveryFormat; + readonly target: string; + readonly title: string; + readonly body: string; + readonly summary?: string; + readonly textBody?: string; + readonly locale?: string; + readonly bodyHash?: string; + readonly attachments?: readonly string[]; +} + +export interface NotifyDelivery { + readonly deliveryId: string; + readonly tenantId: string; + readonly ruleId: string; + readonly actionId: string; + readonly eventId: string; + readonly kind: string; + readonly status: NotifyDeliveryStatus; + readonly statusReason?: string; + readonly rendered?: NotifyDeliveryRendered; + readonly attempts?: readonly NotifyDeliveryAttempt[]; + readonly metadata?: Record<string, string>; + readonly createdAt: string; + readonly sentAt?: string; + readonly completedAt?: string; +} + +export interface NotifyDeliveriesQueryOptions { + readonly status?: NotifyDeliveryStatus; + readonly since?: string; + readonly limit?: number; + readonly continuationToken?: string; +} + +export interface NotifyDeliveriesResponse { + readonly items: readonly NotifyDelivery[]; + readonly continuationToken?: string | null; + readonly count: number; +} + +export interface ChannelHealthResponse { + readonly tenantId: string; + readonly channelId: string; + readonly status: ChannelHealthStatus; + readonly message?: string | null; + readonly checkedAt: string; + readonly traceId: string; + readonly metadata?: Record<string, string>; +} + +export interface ChannelTestSendRequest { + readonly target?: string; + readonly templateId?: string; + readonly title?: string; + readonly summary?: string; + readonly body?: string; + readonly textBody?: string; + readonly locale?: string; + readonly metadata?: Record<string, string>; + readonly attachments?: readonly string[]; +} + +export interface ChannelTestSendResponse { + readonly tenantId: string; + readonly channelId: string; + readonly preview: NotifyDeliveryRendered; + readonly queuedAt: string; + readonly traceId: string; + readonly metadata?: Record<string, string>; +} + diff --git a/src/StellaOps.Web/src/app/core/api/policy-preview.models.ts b/src/Web/StellaOps.Web/src/app/core/api/policy-preview.models.ts similarity index 100% rename from src/StellaOps.Web/src/app/core/api/policy-preview.models.ts rename to src/Web/StellaOps.Web/src/app/core/api/policy-preview.models.ts diff --git a/src/StellaOps.Web/src/app/core/api/scanner.models.ts b/src/Web/StellaOps.Web/src/app/core/api/scanner.models.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/api/scanner.models.ts rename to src/Web/StellaOps.Web/src/app/core/api/scanner.models.ts index b905a4ff..ae93ee3a 100644 --- a/src/StellaOps.Web/src/app/core/api/scanner.models.ts +++ b/src/Web/StellaOps.Web/src/app/core/api/scanner.models.ts @@ -1,17 +1,17 @@ -export type ScanAttestationStatusKind = 'verified' | 'pending' | 'failed'; - -export interface ScanAttestationStatus { - readonly uuid: string; - readonly status: ScanAttestationStatusKind; - readonly index?: number; - readonly logUrl?: string; - readonly checkedAt?: string; - readonly statusMessage?: string; -} - -export interface ScanDetail { - readonly scanId: string; - readonly imageDigest: string; - readonly completedAt: string; - readonly attestation?: ScanAttestationStatus; -} +export type ScanAttestationStatusKind = 'verified' | 'pending' | 'failed'; + +export interface ScanAttestationStatus { + readonly uuid: string; + readonly status: ScanAttestationStatusKind; + readonly index?: number; + readonly logUrl?: string; + readonly checkedAt?: string; + readonly statusMessage?: string; +} + +export interface ScanDetail { + readonly scanId: string; + readonly imageDigest: string; + readonly completedAt: string; + readonly attestation?: ScanAttestationStatus; +} diff --git a/src/StellaOps.Web/src/app/core/auth/auth-http.interceptor.ts b/src/Web/StellaOps.Web/src/app/core/auth/auth-http.interceptor.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/auth-http.interceptor.ts rename to src/Web/StellaOps.Web/src/app/core/auth/auth-http.interceptor.ts index 66ceecff..8468805a 100644 --- a/src/StellaOps.Web/src/app/core/auth/auth-http.interceptor.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/auth-http.interceptor.ts @@ -1,171 +1,171 @@ -import { - HttpErrorResponse, - HttpEvent, - HttpHandler, - HttpInterceptor, - HttpRequest, -} from '@angular/common/http'; -import { Injectable } from '@angular/core'; -import { Observable, firstValueFrom, from, throwError } from 'rxjs'; -import { catchError, switchMap } from 'rxjs/operators'; - -import { AppConfigService } from '../config/app-config.service'; -import { DpopService } from './dpop/dpop.service'; -import { AuthorityAuthService } from './authority-auth.service'; - -const RETRY_HEADER = 'X-StellaOps-DPoP-Retry'; - -@Injectable() -export class AuthHttpInterceptor implements HttpInterceptor { - private excludedOrigins: Set<string> | null = null; - private tokenEndpoint: string | null = null; - private authorityResolved = false; - - constructor( - private readonly auth: AuthorityAuthService, - private readonly config: AppConfigService, - private readonly dpop: DpopService - ) { - // lazy resolve authority configuration in intercept to allow APP_INITIALIZER to run first - } - - intercept( - request: HttpRequest<unknown>, - next: HttpHandler - ): Observable<HttpEvent<unknown>> { - this.ensureAuthorityInfo(); - - if (request.headers.has('Authorization') || this.shouldSkip(request.url)) { - return next.handle(request); - } - - return from( - this.auth.getAuthHeadersForRequest( - this.resolveAbsoluteUrl(request.url), - request.method - ) - ).pipe( - switchMap((headers) => { - if (!headers) { - return next.handle(request); - } - const authorizedRequest = request.clone({ - setHeaders: { - Authorization: headers.authorization, - DPoP: headers.dpop, - }, - headers: request.headers.set(RETRY_HEADER, '0'), - }); - return next.handle(authorizedRequest); - }), - catchError((error: HttpErrorResponse) => - this.handleError(request, error, next) - ) - ); - } - - private handleError( - request: HttpRequest<unknown>, - error: HttpErrorResponse, - next: HttpHandler - ): Observable<HttpEvent<unknown>> { - if (error.status !== 401) { - return throwError(() => error); - } - - const nonce = error.headers?.get('DPoP-Nonce'); - if (!nonce) { - return throwError(() => error); - } - - if (request.headers.get(RETRY_HEADER) === '1') { - return throwError(() => error); - } - - return from(this.retryWithNonce(request, nonce, next)).pipe( - catchError(() => throwError(() => error)) - ); - } - - private async retryWithNonce( - request: HttpRequest<unknown>, - nonce: string, - next: HttpHandler - ): Promise<HttpEvent<unknown>> { - await this.dpop.setNonce(nonce); - const headers = await this.auth.getAuthHeadersForRequest( - this.resolveAbsoluteUrl(request.url), - request.method - ); - if (!headers) { - throw new Error('Unable to refresh authorization headers after nonce.'); - } - - const retried = request.clone({ - setHeaders: { - Authorization: headers.authorization, - DPoP: headers.dpop, - }, - headers: request.headers.set(RETRY_HEADER, '1'), - }); - - return firstValueFrom(next.handle(retried)); - } - - private shouldSkip(url: string): boolean { - this.ensureAuthorityInfo(); - const absolute = this.resolveAbsoluteUrl(url); - if (!absolute) { - return false; - } - - try { - const resolved = new URL(absolute); - if (resolved.pathname.endsWith('/config.json')) { - return true; - } - if (this.tokenEndpoint && absolute.startsWith(this.tokenEndpoint)) { - return true; - } - const origin = resolved.origin; - return this.excludedOrigins?.has(origin) ?? false; - } catch { - return false; - } - } - - private resolveAbsoluteUrl(url: string): string { - try { - if (url.startsWith('http://') || url.startsWith('https://')) { - return url; - } - const base = - typeof window !== 'undefined' && window.location - ? window.location.origin - : undefined; - return base ? new URL(url, base).toString() : url; - } catch { - return url; - } - } - - private ensureAuthorityInfo(): void { - if (this.authorityResolved) { - return; - } - try { - const authority = this.config.authority; - this.tokenEndpoint = new URL( - authority.tokenEndpoint, - authority.issuer - ).toString(); - this.excludedOrigins = new Set<string>([ - this.tokenEndpoint, - new URL(authority.authorizeEndpoint, authority.issuer).origin, - ]); - this.authorityResolved = true; - } catch { - // Configuration not yet loaded; interceptor will retry on the next request. - } - } -} +import { + HttpErrorResponse, + HttpEvent, + HttpHandler, + HttpInterceptor, + HttpRequest, +} from '@angular/common/http'; +import { Injectable } from '@angular/core'; +import { Observable, firstValueFrom, from, throwError } from 'rxjs'; +import { catchError, switchMap } from 'rxjs/operators'; + +import { AppConfigService } from '../config/app-config.service'; +import { DpopService } from './dpop/dpop.service'; +import { AuthorityAuthService } from './authority-auth.service'; + +const RETRY_HEADER = 'X-StellaOps-DPoP-Retry'; + +@Injectable() +export class AuthHttpInterceptor implements HttpInterceptor { + private excludedOrigins: Set<string> | null = null; + private tokenEndpoint: string | null = null; + private authorityResolved = false; + + constructor( + private readonly auth: AuthorityAuthService, + private readonly config: AppConfigService, + private readonly dpop: DpopService + ) { + // lazy resolve authority configuration in intercept to allow APP_INITIALIZER to run first + } + + intercept( + request: HttpRequest<unknown>, + next: HttpHandler + ): Observable<HttpEvent<unknown>> { + this.ensureAuthorityInfo(); + + if (request.headers.has('Authorization') || this.shouldSkip(request.url)) { + return next.handle(request); + } + + return from( + this.auth.getAuthHeadersForRequest( + this.resolveAbsoluteUrl(request.url), + request.method + ) + ).pipe( + switchMap((headers) => { + if (!headers) { + return next.handle(request); + } + const authorizedRequest = request.clone({ + setHeaders: { + Authorization: headers.authorization, + DPoP: headers.dpop, + }, + headers: request.headers.set(RETRY_HEADER, '0'), + }); + return next.handle(authorizedRequest); + }), + catchError((error: HttpErrorResponse) => + this.handleError(request, error, next) + ) + ); + } + + private handleError( + request: HttpRequest<unknown>, + error: HttpErrorResponse, + next: HttpHandler + ): Observable<HttpEvent<unknown>> { + if (error.status !== 401) { + return throwError(() => error); + } + + const nonce = error.headers?.get('DPoP-Nonce'); + if (!nonce) { + return throwError(() => error); + } + + if (request.headers.get(RETRY_HEADER) === '1') { + return throwError(() => error); + } + + return from(this.retryWithNonce(request, nonce, next)).pipe( + catchError(() => throwError(() => error)) + ); + } + + private async retryWithNonce( + request: HttpRequest<unknown>, + nonce: string, + next: HttpHandler + ): Promise<HttpEvent<unknown>> { + await this.dpop.setNonce(nonce); + const headers = await this.auth.getAuthHeadersForRequest( + this.resolveAbsoluteUrl(request.url), + request.method + ); + if (!headers) { + throw new Error('Unable to refresh authorization headers after nonce.'); + } + + const retried = request.clone({ + setHeaders: { + Authorization: headers.authorization, + DPoP: headers.dpop, + }, + headers: request.headers.set(RETRY_HEADER, '1'), + }); + + return firstValueFrom(next.handle(retried)); + } + + private shouldSkip(url: string): boolean { + this.ensureAuthorityInfo(); + const absolute = this.resolveAbsoluteUrl(url); + if (!absolute) { + return false; + } + + try { + const resolved = new URL(absolute); + if (resolved.pathname.endsWith('/config.json')) { + return true; + } + if (this.tokenEndpoint && absolute.startsWith(this.tokenEndpoint)) { + return true; + } + const origin = resolved.origin; + return this.excludedOrigins?.has(origin) ?? false; + } catch { + return false; + } + } + + private resolveAbsoluteUrl(url: string): string { + try { + if (url.startsWith('http://') || url.startsWith('https://')) { + return url; + } + const base = + typeof window !== 'undefined' && window.location + ? window.location.origin + : undefined; + return base ? new URL(url, base).toString() : url; + } catch { + return url; + } + } + + private ensureAuthorityInfo(): void { + if (this.authorityResolved) { + return; + } + try { + const authority = this.config.authority; + this.tokenEndpoint = new URL( + authority.tokenEndpoint, + authority.issuer + ).toString(); + this.excludedOrigins = new Set<string>([ + this.tokenEndpoint, + new URL(authority.authorizeEndpoint, authority.issuer).origin, + ]); + this.authorityResolved = true; + } catch { + // Configuration not yet loaded; interceptor will retry on the next request. + } + } +} diff --git a/src/StellaOps.Web/src/app/core/auth/auth-session.model.ts b/src/Web/StellaOps.Web/src/app/core/auth/auth-session.model.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/auth-session.model.ts rename to src/Web/StellaOps.Web/src/app/core/auth/auth-session.model.ts index 9cfcefd6..73b3437f 100644 --- a/src/StellaOps.Web/src/app/core/auth/auth-session.model.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/auth-session.model.ts @@ -1,56 +1,56 @@ -export interface AuthTokens { - readonly accessToken: string; - readonly expiresAtEpochMs: number; - readonly refreshToken?: string; - readonly tokenType: 'Bearer'; - readonly scope: string; -} - -export interface AuthIdentity { - readonly subject: string; - readonly name?: string; - readonly email?: string; - readonly roles: readonly string[]; - readonly idToken?: string; -} - -export interface AuthSession { - readonly tokens: AuthTokens; - readonly identity: AuthIdentity; - /** - * SHA-256 JWK thumbprint of the active DPoP key pair. - */ - readonly dpopKeyThumbprint: string; - readonly issuedAtEpochMs: number; - readonly tenantId: string | null; - readonly scopes: readonly string[]; - readonly audiences: readonly string[]; - readonly authenticationTimeEpochMs: number | null; - readonly freshAuthActive: boolean; - readonly freshAuthExpiresAtEpochMs: number | null; -} - -export interface PersistedSessionMetadata { - readonly subject: string; - readonly expiresAtEpochMs: number; - readonly issuedAtEpochMs: number; - readonly dpopKeyThumbprint: string; - readonly tenantId?: string | null; -} - -export type AuthStatus = - | 'unauthenticated' - | 'authenticated' - | 'refreshing' - | 'loading'; - -export const ACCESS_TOKEN_REFRESH_THRESHOLD_MS = 60_000; - -export const SESSION_STORAGE_KEY = 'stellaops.auth.session.info'; - -export type AuthErrorReason = - | 'invalid_state' - | 'token_exchange_failed' - | 'refresh_failed' - | 'dpop_generation_failed' - | 'configuration_missing'; +export interface AuthTokens { + readonly accessToken: string; + readonly expiresAtEpochMs: number; + readonly refreshToken?: string; + readonly tokenType: 'Bearer'; + readonly scope: string; +} + +export interface AuthIdentity { + readonly subject: string; + readonly name?: string; + readonly email?: string; + readonly roles: readonly string[]; + readonly idToken?: string; +} + +export interface AuthSession { + readonly tokens: AuthTokens; + readonly identity: AuthIdentity; + /** + * SHA-256 JWK thumbprint of the active DPoP key pair. + */ + readonly dpopKeyThumbprint: string; + readonly issuedAtEpochMs: number; + readonly tenantId: string | null; + readonly scopes: readonly string[]; + readonly audiences: readonly string[]; + readonly authenticationTimeEpochMs: number | null; + readonly freshAuthActive: boolean; + readonly freshAuthExpiresAtEpochMs: number | null; +} + +export interface PersistedSessionMetadata { + readonly subject: string; + readonly expiresAtEpochMs: number; + readonly issuedAtEpochMs: number; + readonly dpopKeyThumbprint: string; + readonly tenantId?: string | null; +} + +export type AuthStatus = + | 'unauthenticated' + | 'authenticated' + | 'refreshing' + | 'loading'; + +export const ACCESS_TOKEN_REFRESH_THRESHOLD_MS = 60_000; + +export const SESSION_STORAGE_KEY = 'stellaops.auth.session.info'; + +export type AuthErrorReason = + | 'invalid_state' + | 'token_exchange_failed' + | 'refresh_failed' + | 'dpop_generation_failed' + | 'configuration_missing'; diff --git a/src/StellaOps.Web/src/app/core/auth/auth-session.store.spec.ts b/src/Web/StellaOps.Web/src/app/core/auth/auth-session.store.spec.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/auth-session.store.spec.ts rename to src/Web/StellaOps.Web/src/app/core/auth/auth-session.store.spec.ts index 696c1cac..ab8853e3 100644 --- a/src/StellaOps.Web/src/app/core/auth/auth-session.store.spec.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/auth-session.store.spec.ts @@ -1,55 +1,55 @@ -import { TestBed } from '@angular/core/testing'; - -import { AuthSession, AuthTokens, SESSION_STORAGE_KEY } from './auth-session.model'; -import { AuthSessionStore } from './auth-session.store'; - -describe('AuthSessionStore', () => { - let store: AuthSessionStore; - - beforeEach(() => { - sessionStorage.clear(); - TestBed.configureTestingModule({ - providers: [AuthSessionStore], - }); - store = TestBed.inject(AuthSessionStore); - }); - - it('persists minimal metadata when session is set', () => { - const tokens: AuthTokens = { - accessToken: 'token-abc', - expiresAtEpochMs: Date.now() + 120_000, - refreshToken: 'refresh-xyz', - scope: 'openid ui.read', - tokenType: 'Bearer', - }; - - const session: AuthSession = { - tokens, - identity: { - subject: 'user-123', - name: 'Alex Operator', - roles: ['ui.read'], - }, - dpopKeyThumbprint: 'thumbprint-1', - issuedAtEpochMs: Date.now(), - tenantId: 'tenant-default', - scopes: ['ui.read'], - audiences: ['console'], - authenticationTimeEpochMs: Date.now(), - freshAuthActive: true, - freshAuthExpiresAtEpochMs: Date.now() + 300_000, - }; - - store.setSession(session); - - const persisted = sessionStorage.getItem(SESSION_STORAGE_KEY); - expect(persisted).toBeTruthy(); - const parsed = JSON.parse(persisted ?? '{}'); - expect(parsed.subject).toBe('user-123'); - expect(parsed.dpopKeyThumbprint).toBe('thumbprint-1'); - expect(parsed.tenantId).toBe('tenant-default'); - - store.clear(); - expect(sessionStorage.getItem(SESSION_STORAGE_KEY)).toBeNull(); - }); -}); +import { TestBed } from '@angular/core/testing'; + +import { AuthSession, AuthTokens, SESSION_STORAGE_KEY } from './auth-session.model'; +import { AuthSessionStore } from './auth-session.store'; + +describe('AuthSessionStore', () => { + let store: AuthSessionStore; + + beforeEach(() => { + sessionStorage.clear(); + TestBed.configureTestingModule({ + providers: [AuthSessionStore], + }); + store = TestBed.inject(AuthSessionStore); + }); + + it('persists minimal metadata when session is set', () => { + const tokens: AuthTokens = { + accessToken: 'token-abc', + expiresAtEpochMs: Date.now() + 120_000, + refreshToken: 'refresh-xyz', + scope: 'openid ui.read', + tokenType: 'Bearer', + }; + + const session: AuthSession = { + tokens, + identity: { + subject: 'user-123', + name: 'Alex Operator', + roles: ['ui.read'], + }, + dpopKeyThumbprint: 'thumbprint-1', + issuedAtEpochMs: Date.now(), + tenantId: 'tenant-default', + scopes: ['ui.read'], + audiences: ['console'], + authenticationTimeEpochMs: Date.now(), + freshAuthActive: true, + freshAuthExpiresAtEpochMs: Date.now() + 300_000, + }; + + store.setSession(session); + + const persisted = sessionStorage.getItem(SESSION_STORAGE_KEY); + expect(persisted).toBeTruthy(); + const parsed = JSON.parse(persisted ?? '{}'); + expect(parsed.subject).toBe('user-123'); + expect(parsed.dpopKeyThumbprint).toBe('thumbprint-1'); + expect(parsed.tenantId).toBe('tenant-default'); + + store.clear(); + expect(sessionStorage.getItem(SESSION_STORAGE_KEY)).toBeNull(); + }); +}); diff --git a/src/StellaOps.Web/src/app/core/auth/auth-session.store.ts b/src/Web/StellaOps.Web/src/app/core/auth/auth-session.store.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/auth-session.store.ts rename to src/Web/StellaOps.Web/src/app/core/auth/auth-session.store.ts index 89d7b538..51cd977f 100644 --- a/src/StellaOps.Web/src/app/core/auth/auth-session.store.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/auth-session.store.ts @@ -1,129 +1,129 @@ -import { Injectable, computed, signal } from '@angular/core'; - -import { - AuthSession, - AuthStatus, - PersistedSessionMetadata, - SESSION_STORAGE_KEY, -} from './auth-session.model'; - -@Injectable({ - providedIn: 'root', -}) -export class AuthSessionStore { - private readonly sessionSignal = signal<AuthSession | null>(null); - private readonly statusSignal = signal<AuthStatus>('unauthenticated'); - private readonly persistedSignal = - signal<PersistedSessionMetadata | null>(this.readPersistedMetadata()); - - readonly session = computed(() => this.sessionSignal()); - readonly status = computed(() => this.statusSignal()); - - readonly identity = computed(() => this.sessionSignal()?.identity ?? null); - readonly subjectHint = computed( - () => - this.sessionSignal()?.identity.subject ?? - this.persistedSignal()?.subject ?? - null - ); - - readonly expiresAtEpochMs = computed( - () => this.sessionSignal()?.tokens.expiresAtEpochMs ?? null - ); - - readonly isAuthenticated = computed( - () => this.sessionSignal() !== null && this.statusSignal() !== 'loading' - ); - - readonly tenantId = computed( - () => - this.sessionSignal()?.tenantId ?? - this.persistedSignal()?.tenantId ?? - null - ); - - setStatus(status: AuthStatus): void { - this.statusSignal.set(status); - } - - setSession(session: AuthSession | null): void { - this.sessionSignal.set(session); - if (!session) { - this.statusSignal.set('unauthenticated'); - this.persistedSignal.set(null); - this.clearPersistedMetadata(); - return; - } - - this.statusSignal.set('authenticated'); - const metadata: PersistedSessionMetadata = { - subject: session.identity.subject, - expiresAtEpochMs: session.tokens.expiresAtEpochMs, - issuedAtEpochMs: session.issuedAtEpochMs, - dpopKeyThumbprint: session.dpopKeyThumbprint, - tenantId: session.tenantId, - }; - this.persistedSignal.set(metadata); - this.persistMetadata(metadata); - } - - clear(): void { - this.sessionSignal.set(null); - this.statusSignal.set('unauthenticated'); - this.persistedSignal.set(null); - this.clearPersistedMetadata(); - } - - private readPersistedMetadata(): PersistedSessionMetadata | null { - if (typeof sessionStorage === 'undefined') { - return null; - } - - try { - const raw = sessionStorage.getItem(SESSION_STORAGE_KEY); - if (!raw) { - return null; - } - const parsed = JSON.parse(raw) as PersistedSessionMetadata; - if ( - typeof parsed.subject !== 'string' || - typeof parsed.expiresAtEpochMs !== 'number' || - typeof parsed.issuedAtEpochMs !== 'number' || - typeof parsed.dpopKeyThumbprint !== 'string' - ) { - return null; - } - const tenantId = - typeof parsed.tenantId === 'string' - ? parsed.tenantId.trim() || null - : null; - return { - subject: parsed.subject, - expiresAtEpochMs: parsed.expiresAtEpochMs, - issuedAtEpochMs: parsed.issuedAtEpochMs, - dpopKeyThumbprint: parsed.dpopKeyThumbprint, - tenantId, - }; - } catch { - return null; - } - } - - private persistMetadata(metadata: PersistedSessionMetadata): void { - if (typeof sessionStorage === 'undefined') { - return; - } - sessionStorage.setItem(SESSION_STORAGE_KEY, JSON.stringify(metadata)); - } - - private clearPersistedMetadata(): void { - if (typeof sessionStorage === 'undefined') { - return; - } - sessionStorage.removeItem(SESSION_STORAGE_KEY); - } - - getActiveTenantId(): string | null { - return this.tenantId(); - } -} +import { Injectable, computed, signal } from '@angular/core'; + +import { + AuthSession, + AuthStatus, + PersistedSessionMetadata, + SESSION_STORAGE_KEY, +} from './auth-session.model'; + +@Injectable({ + providedIn: 'root', +}) +export class AuthSessionStore { + private readonly sessionSignal = signal<AuthSession | null>(null); + private readonly statusSignal = signal<AuthStatus>('unauthenticated'); + private readonly persistedSignal = + signal<PersistedSessionMetadata | null>(this.readPersistedMetadata()); + + readonly session = computed(() => this.sessionSignal()); + readonly status = computed(() => this.statusSignal()); + + readonly identity = computed(() => this.sessionSignal()?.identity ?? null); + readonly subjectHint = computed( + () => + this.sessionSignal()?.identity.subject ?? + this.persistedSignal()?.subject ?? + null + ); + + readonly expiresAtEpochMs = computed( + () => this.sessionSignal()?.tokens.expiresAtEpochMs ?? null + ); + + readonly isAuthenticated = computed( + () => this.sessionSignal() !== null && this.statusSignal() !== 'loading' + ); + + readonly tenantId = computed( + () => + this.sessionSignal()?.tenantId ?? + this.persistedSignal()?.tenantId ?? + null + ); + + setStatus(status: AuthStatus): void { + this.statusSignal.set(status); + } + + setSession(session: AuthSession | null): void { + this.sessionSignal.set(session); + if (!session) { + this.statusSignal.set('unauthenticated'); + this.persistedSignal.set(null); + this.clearPersistedMetadata(); + return; + } + + this.statusSignal.set('authenticated'); + const metadata: PersistedSessionMetadata = { + subject: session.identity.subject, + expiresAtEpochMs: session.tokens.expiresAtEpochMs, + issuedAtEpochMs: session.issuedAtEpochMs, + dpopKeyThumbprint: session.dpopKeyThumbprint, + tenantId: session.tenantId, + }; + this.persistedSignal.set(metadata); + this.persistMetadata(metadata); + } + + clear(): void { + this.sessionSignal.set(null); + this.statusSignal.set('unauthenticated'); + this.persistedSignal.set(null); + this.clearPersistedMetadata(); + } + + private readPersistedMetadata(): PersistedSessionMetadata | null { + if (typeof sessionStorage === 'undefined') { + return null; + } + + try { + const raw = sessionStorage.getItem(SESSION_STORAGE_KEY); + if (!raw) { + return null; + } + const parsed = JSON.parse(raw) as PersistedSessionMetadata; + if ( + typeof parsed.subject !== 'string' || + typeof parsed.expiresAtEpochMs !== 'number' || + typeof parsed.issuedAtEpochMs !== 'number' || + typeof parsed.dpopKeyThumbprint !== 'string' + ) { + return null; + } + const tenantId = + typeof parsed.tenantId === 'string' + ? parsed.tenantId.trim() || null + : null; + return { + subject: parsed.subject, + expiresAtEpochMs: parsed.expiresAtEpochMs, + issuedAtEpochMs: parsed.issuedAtEpochMs, + dpopKeyThumbprint: parsed.dpopKeyThumbprint, + tenantId, + }; + } catch { + return null; + } + } + + private persistMetadata(metadata: PersistedSessionMetadata): void { + if (typeof sessionStorage === 'undefined') { + return; + } + sessionStorage.setItem(SESSION_STORAGE_KEY, JSON.stringify(metadata)); + } + + private clearPersistedMetadata(): void { + if (typeof sessionStorage === 'undefined') { + return; + } + sessionStorage.removeItem(SESSION_STORAGE_KEY); + } + + getActiveTenantId(): string | null { + return this.tenantId(); + } +} diff --git a/src/StellaOps.Web/src/app/core/auth/auth-storage.service.ts b/src/Web/StellaOps.Web/src/app/core/auth/auth-storage.service.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/auth-storage.service.ts rename to src/Web/StellaOps.Web/src/app/core/auth/auth-storage.service.ts index 7017dc16..70ba6fff 100644 --- a/src/StellaOps.Web/src/app/core/auth/auth-storage.service.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/auth-storage.service.ts @@ -1,45 +1,45 @@ -import { Injectable } from '@angular/core'; - -const LOGIN_REQUEST_KEY = 'stellaops.auth.login.request'; - -export interface PendingLoginRequest { - readonly state: string; - readonly codeVerifier: string; - readonly createdAtEpochMs: number; - readonly returnUrl?: string; - readonly nonce?: string; -} - -@Injectable({ - providedIn: 'root', -}) -export class AuthStorageService { - savePendingLogin(request: PendingLoginRequest): void { - if (typeof sessionStorage === 'undefined') { - return; - } - sessionStorage.setItem(LOGIN_REQUEST_KEY, JSON.stringify(request)); - } - - consumePendingLogin(expectedState: string): PendingLoginRequest | null { - if (typeof sessionStorage === 'undefined') { - return null; - } - - const raw = sessionStorage.getItem(LOGIN_REQUEST_KEY); - if (!raw) { - return null; - } - - sessionStorage.removeItem(LOGIN_REQUEST_KEY); - try { - const request = JSON.parse(raw) as PendingLoginRequest; - if (request.state !== expectedState) { - return null; - } - return request; - } catch { - return null; - } - } -} +import { Injectable } from '@angular/core'; + +const LOGIN_REQUEST_KEY = 'stellaops.auth.login.request'; + +export interface PendingLoginRequest { + readonly state: string; + readonly codeVerifier: string; + readonly createdAtEpochMs: number; + readonly returnUrl?: string; + readonly nonce?: string; +} + +@Injectable({ + providedIn: 'root', +}) +export class AuthStorageService { + savePendingLogin(request: PendingLoginRequest): void { + if (typeof sessionStorage === 'undefined') { + return; + } + sessionStorage.setItem(LOGIN_REQUEST_KEY, JSON.stringify(request)); + } + + consumePendingLogin(expectedState: string): PendingLoginRequest | null { + if (typeof sessionStorage === 'undefined') { + return null; + } + + const raw = sessionStorage.getItem(LOGIN_REQUEST_KEY); + if (!raw) { + return null; + } + + sessionStorage.removeItem(LOGIN_REQUEST_KEY); + try { + const request = JSON.parse(raw) as PendingLoginRequest; + if (request.state !== expectedState) { + return null; + } + return request; + } catch { + return null; + } + } +} diff --git a/src/StellaOps.Web/src/app/core/auth/authority-auth.service.ts b/src/Web/StellaOps.Web/src/app/core/auth/authority-auth.service.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/authority-auth.service.ts rename to src/Web/StellaOps.Web/src/app/core/auth/authority-auth.service.ts index 56a50bc4..cb7e1d82 100644 --- a/src/StellaOps.Web/src/app/core/auth/authority-auth.service.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/authority-auth.service.ts @@ -1,622 +1,622 @@ -import { HttpClient, HttpHeaders, HttpResponse } from '@angular/common/http'; -import { Injectable } from '@angular/core'; -import { firstValueFrom } from 'rxjs'; - -import { AppConfigService } from '../config/app-config.service'; -import { AuthorityConfig } from '../config/app-config.model'; -import { ConsoleSessionService } from '../console/console-session.service'; -import { - ACCESS_TOKEN_REFRESH_THRESHOLD_MS, - AuthErrorReason, - AuthSession, - AuthTokens, -} from './auth-session.model'; -import { AuthSessionStore } from './auth-session.store'; -import { - AuthStorageService, - PendingLoginRequest, -} from './auth-storage.service'; -import { DpopService } from './dpop/dpop.service'; -import { base64UrlDecode } from './dpop/jose-utilities'; -import { createPkcePair } from './pkce.util'; - -interface TokenResponse { - readonly access_token: string; - readonly token_type: string; - readonly expires_in: number; - readonly scope?: string; - readonly refresh_token?: string; - readonly id_token?: string; -} - -interface RefreshTokenResponse extends TokenResponse {} - -export interface AuthorizationHeaders { - readonly authorization: string; - readonly dpop: string; -} - -export interface CompleteLoginResult { - readonly returnUrl?: string; -} - -const TOKEN_CONTENT_TYPE = 'application/x-www-form-urlencoded'; - -interface AccessTokenMetadata { - tenantId: string | null; - scopes: string[]; - audiences: string[]; - authenticationTimeEpochMs: number | null; - freshAuthActive: boolean; - freshAuthExpiresAtEpochMs: number | null; -} - -@Injectable({ - providedIn: 'root', -}) -export class AuthorityAuthService { - private refreshTimer: ReturnType<typeof setTimeout> | null = null; - private refreshInFlight: Promise<void> | null = null; - private lastError: AuthErrorReason | null = null; - - constructor( - private readonly http: HttpClient, - private readonly config: AppConfigService, - private readonly sessionStore: AuthSessionStore, - private readonly storage: AuthStorageService, - private readonly dpop: DpopService, - private readonly consoleSession: ConsoleSessionService - ) {} - - get error(): AuthErrorReason | null { - return this.lastError; - } - - async beginLogin(returnUrl?: string): Promise<void> { - const authority = this.config.authority; - const pkce = await createPkcePair(); - const state = crypto.randomUUID ? crypto.randomUUID() : createRandomId(); - const nonce = crypto.randomUUID ? crypto.randomUUID() : createRandomId(); - - // Generate the DPoP key pair up-front so the same key is bound to the token. - await this.dpop.getThumbprint(); - - const authorizeUrl = this.buildAuthorizeUrl(authority, { - state, - nonce, - codeChallenge: pkce.challenge, - codeChallengeMethod: pkce.method, - returnUrl, - }); - - const now = Date.now(); - this.storage.savePendingLogin({ - state, - codeVerifier: pkce.verifier, - createdAtEpochMs: now, - returnUrl, - nonce, - }); - - window.location.assign(authorizeUrl); - } - - /** - * Completes the authorization code flow after the Authority redirects back with ?code & ?state. - */ - async completeLoginFromRedirect( - queryParams: URLSearchParams - ): Promise<CompleteLoginResult> { - const code = queryParams.get('code'); - const state = queryParams.get('state'); - if (!code || !state) { - throw new Error('Missing authorization code or state.'); - } - - const pending = this.storage.consumePendingLogin(state); - if (!pending) { - this.lastError = 'invalid_state'; - throw new Error('State parameter did not match pending login request.'); - } - - try { - const tokenResponse = await this.exchangeCodeForTokens( - code, - pending.codeVerifier - ); - await this.onTokenResponse(tokenResponse, pending.nonce ?? null); - this.lastError = null; - return { returnUrl: pending.returnUrl }; - } catch (error) { - this.lastError = 'token_exchange_failed'; - this.sessionStore.clear(); - this.consoleSession.clear(); - throw error; - } - } - - async ensureValidAccessToken(): Promise<string | null> { - const session = this.sessionStore.session(); - if (!session) { - return null; - } - - const now = Date.now(); - if (now < session.tokens.expiresAtEpochMs - ACCESS_TOKEN_REFRESH_THRESHOLD_MS) { - return session.tokens.accessToken; - } - - await this.refreshAccessToken(); - const refreshed = this.sessionStore.session(); - return refreshed?.tokens.accessToken ?? null; - } - - async getAuthHeadersForRequest( - url: string, - method: string - ): Promise<AuthorizationHeaders | null> { - const accessToken = await this.ensureValidAccessToken(); - if (!accessToken) { - return null; - } - const dpopProof = await this.dpop.createProof({ - htm: method, - htu: url, - accessToken, - }); - return { - authorization: `DPoP ${accessToken}`, - dpop: dpopProof, - }; - } - - async refreshAccessToken(): Promise<void> { - const session = this.sessionStore.session(); - const refreshToken = session?.tokens.refreshToken; - if (!refreshToken) { - return; - } - - if (this.refreshInFlight) { - await this.refreshInFlight; - return; - } - - this.refreshInFlight = this.executeRefresh(refreshToken) - .catch((error) => { - this.lastError = 'refresh_failed'; - this.sessionStore.clear(); - this.consoleSession.clear(); - throw error; - }) - .finally(() => { - this.refreshInFlight = null; - }); - - await this.refreshInFlight; - } - - async logout(): Promise<void> { - const session = this.sessionStore.session(); - this.cancelRefreshTimer(); - this.sessionStore.clear(); - this.consoleSession.clear(); - await this.dpop.setNonce(null); - - const authority = this.config.authority; - if (!authority.logoutEndpoint) { - return; - } - - if (session?.identity.idToken) { - const url = new URL(authority.logoutEndpoint, authority.issuer); - url.searchParams.set('post_logout_redirect_uri', authority.postLogoutRedirectUri ?? authority.redirectUri); - url.searchParams.set('id_token_hint', session.identity.idToken); - window.location.assign(url.toString()); - } else { - window.location.assign(authority.postLogoutRedirectUri ?? authority.redirectUri); - } - } - - private async exchangeCodeForTokens( - code: string, - codeVerifier: string - ): Promise<HttpResponse<TokenResponse>> { - const authority = this.config.authority; - const tokenUrl = new URL(authority.tokenEndpoint, authority.issuer).toString(); - - const body = new URLSearchParams(); - body.set('grant_type', 'authorization_code'); - body.set('code', code); - body.set('redirect_uri', authority.redirectUri); - body.set('client_id', authority.clientId); - body.set('code_verifier', codeVerifier); - if (authority.audience) { - body.set('audience', authority.audience); - } - - const dpopProof = await this.dpop.createProof({ - htm: 'POST', - htu: tokenUrl, - }); - - const headers = new HttpHeaders({ - 'Content-Type': TOKEN_CONTENT_TYPE, - DPoP: dpopProof, - }); - - return firstValueFrom( - this.http.post<TokenResponse>(tokenUrl, body.toString(), { - headers, - withCredentials: true, - observe: 'response', - }) - ); - } - - private async executeRefresh(refreshToken: string): Promise<void> { - const authority = this.config.authority; - const tokenUrl = new URL(authority.tokenEndpoint, authority.issuer).toString(); - const body = new URLSearchParams(); - body.set('grant_type', 'refresh_token'); - body.set('refresh_token', refreshToken); - body.set('client_id', authority.clientId); - if (authority.audience) { - body.set('audience', authority.audience); - } - - const proof = await this.dpop.createProof({ - htm: 'POST', - htu: tokenUrl, - }); - - const headers = new HttpHeaders({ - 'Content-Type': TOKEN_CONTENT_TYPE, - DPoP: proof, - }); - - const response = await firstValueFrom( - this.http.post<RefreshTokenResponse>(tokenUrl, body.toString(), { - headers, - withCredentials: true, - observe: 'response', - }) - ); - - await this.onTokenResponse(response, null); - } - - private async onTokenResponse( - response: HttpResponse<TokenResponse>, - expectedNonce: string | null - ): Promise<void> { - const nonce = response.headers.get('DPoP-Nonce'); - if (nonce) { - await this.dpop.setNonce(nonce); - } - - const payload = response.body; - if (!payload) { - throw new Error('Token response did not include a body.'); - } - - const tokens = this.toAuthTokens(payload); - const accessMetadata = this.parseAccessTokenMetadata(payload.access_token); - const identity = this.parseIdentity(payload.id_token ?? '', expectedNonce); - const thumbprint = await this.dpop.getThumbprint(); - if (!thumbprint) { - throw new Error('DPoP thumbprint unavailable.'); - } - - const session: AuthSession = { - tokens, - identity, - dpopKeyThumbprint: thumbprint, - issuedAtEpochMs: Date.now(), - tenantId: accessMetadata.tenantId, - scopes: accessMetadata.scopes, - audiences: accessMetadata.audiences, - authenticationTimeEpochMs: accessMetadata.authenticationTimeEpochMs, - freshAuthActive: accessMetadata.freshAuthActive, - freshAuthExpiresAtEpochMs: accessMetadata.freshAuthExpiresAtEpochMs, - }; - this.sessionStore.setSession(session); - void this.consoleSession.loadConsoleContext(); - this.scheduleRefresh(tokens, this.config.authority); - } - - private toAuthTokens(payload: TokenResponse): AuthTokens { - const expiresAtEpochMs = Date.now() + payload.expires_in * 1000; - return { - accessToken: payload.access_token, - tokenType: (payload.token_type ?? 'Bearer') as 'Bearer', - refreshToken: payload.refresh_token, - scope: payload.scope ?? '', - expiresAtEpochMs, - }; - } - - private parseIdentity( - idToken: string, - expectedNonce: string | null - ): AuthSession['identity'] { - if (!idToken) { - return { - subject: 'unknown', - roles: [], - }; - } - - const claims = decodeJwt(idToken); - const nonceClaim = claims['nonce']; - if ( - expectedNonce && - typeof nonceClaim === 'string' && - nonceClaim !== expectedNonce - ) { - throw new Error('OIDC nonce mismatch.'); - } - - const subjectClaim = claims['sub']; - const nameClaim = claims['name']; - const emailClaim = claims['email']; - const rolesClaim = claims['role']; - - return { - subject: typeof subjectClaim === 'string' ? subjectClaim : 'unknown', - name: typeof nameClaim === 'string' ? nameClaim : undefined, - email: typeof emailClaim === 'string' ? emailClaim : undefined, - roles: Array.isArray(rolesClaim) - ? rolesClaim.filter((entry: unknown): entry is string => - typeof entry === 'string' - ) - : [], - idToken, - }; - } - - private scheduleRefresh(tokens: AuthTokens, authority: AuthorityConfig): void { - this.cancelRefreshTimer(); - const leeway = - (authority.refreshLeewaySeconds ?? 60) * 1000 + - ACCESS_TOKEN_REFRESH_THRESHOLD_MS; - const now = Date.now(); - const ttl = Math.max(tokens.expiresAtEpochMs - now - leeway, 5_000); - this.refreshTimer = setTimeout(() => { - void this.refreshAccessToken(); - }, ttl); - } - - private cancelRefreshTimer(): void { - if (this.refreshTimer) { - clearTimeout(this.refreshTimer); - this.refreshTimer = null; - } - } - - private parseAccessTokenMetadata(accessToken: string | undefined): AccessTokenMetadata { - if (!accessToken) { - return { - tenantId: null, - scopes: [], - audiences: [], - authenticationTimeEpochMs: null, - freshAuthActive: false, - freshAuthExpiresAtEpochMs: null, - }; - } - - const claims = decodeJwt(accessToken); - const tenantClaim = claims['stellaops:tenant']; - const tenantId = - typeof tenantClaim === 'string' && tenantClaim.trim().length > 0 - ? tenantClaim.trim() - : null; - - const scopeSet = new Set<string>(); - const scpClaim = claims['scp']; - if (Array.isArray(scpClaim)) { - for (const entry of scpClaim) { - if (typeof entry === 'string' && entry.trim().length > 0) { - scopeSet.add(entry.trim()); - } - } - } - - const scopeClaim = claims['scope']; - if (typeof scopeClaim === 'string') { - scopeClaim - .split(/\s+/) - .map((entry) => entry.trim()) - .filter((entry) => entry.length > 0) - .forEach((entry) => scopeSet.add(entry)); - } - - const audiences: string[] = []; - const audClaim = claims['aud']; - if (Array.isArray(audClaim)) { - for (const entry of audClaim) { - if (typeof entry === 'string' && entry.trim().length > 0) { - audiences.push(entry.trim()); - } - } - } else if (typeof audClaim === 'string' && audClaim.trim().length > 0) { - audiences.push(audClaim.trim()); - } - - const authenticationTimeEpochMs = this.parseEpochSeconds( - claims['auth_time'] ?? claims['authentication_time'] - ); - - const freshAuthActive = this.parseFreshAuthFlag( - claims['stellaops:fresh_auth'] ?? claims['fresh_auth'] - ); - - const ttlMs = this.parseDurationToMilliseconds( - claims['stellaops:fresh_auth_ttl'] - ); - - let freshAuthExpiresAtEpochMs: number | null = null; - if (authenticationTimeEpochMs !== null) { - if (ttlMs !== null) { - freshAuthExpiresAtEpochMs = authenticationTimeEpochMs + ttlMs; - } else if (freshAuthActive) { - freshAuthExpiresAtEpochMs = authenticationTimeEpochMs + 300_000; - } - } - - return { - tenantId, - scopes: Array.from(scopeSet).sort(), - audiences: audiences.sort(), - authenticationTimeEpochMs, - freshAuthActive, - freshAuthExpiresAtEpochMs, - }; - } - - private parseFreshAuthFlag(value: unknown): boolean { - if (typeof value === 'boolean') { - return value; - } - if (typeof value === 'string') { - const normalized = value.trim().toLowerCase(); - if (normalized === 'true' || normalized === '1') { - return true; - } - if (normalized === 'false' || normalized === '0') { - return false; - } - } - return false; - } - - private parseDurationToMilliseconds(value: unknown): number | null { - if (typeof value === 'number' && Number.isFinite(value)) { - return Math.max(0, value * 1000); - } - if (typeof value !== 'string') { - return null; - } - - const trimmed = value.trim(); - if (!trimmed) { - return null; - } - - if (/^-?\d+(\.\d+)?$/.test(trimmed)) { - const seconds = Number(trimmed); - if (!Number.isFinite(seconds)) { - return null; - } - return Math.max(0, seconds * 1000); - } - - const isoMatch = - /^P(T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)$/i.exec(trimmed); - if (isoMatch) { - const hours = isoMatch[2] ? Number(isoMatch[2]) : 0; - const minutes = isoMatch[3] ? Number(isoMatch[3]) : 0; - const seconds = isoMatch[4] ? Number(isoMatch[4]) : 0; - const totalSeconds = hours * 3600 + minutes * 60 + seconds; - return Math.max(0, totalSeconds * 1000); - } - - const spanMatch = - /^(-)?(?:(\d+)\.)?(\d{1,2}):([0-5]?\d):([0-5]?\d)(\.\d+)?$/.exec(trimmed); - if (spanMatch) { - const isNegative = !!spanMatch[1]; - const days = spanMatch[2] ? Number(spanMatch[2]) : 0; - const hours = Number(spanMatch[3]); - const minutes = Number(spanMatch[4]); - const seconds = - Number(spanMatch[5]) + (spanMatch[6] ? Number(spanMatch[6]) : 0); - const totalSeconds = - days * 86400 + hours * 3600 + minutes * 60 + seconds; - if (!Number.isFinite(totalSeconds)) { - return null; - } - const ms = totalSeconds * 1000; - return isNegative ? 0 : Math.max(0, ms); - } - - return null; - } - - private parseEpochSeconds(value: unknown): number | null { - if (typeof value === 'number' && Number.isFinite(value)) { - return value * 1000; - } - if (typeof value === 'string') { - const trimmed = value.trim(); - if (!trimmed) { - return null; - } - const numeric = Number(trimmed); - if (!Number.isNaN(numeric) && Number.isFinite(numeric)) { - return numeric * 1000; - } - const parsed = Date.parse(trimmed); - if (!Number.isNaN(parsed)) { - return parsed; - } - } - return null; - } - - private buildAuthorizeUrl( - authority: AuthorityConfig, - options: { - state: string; - nonce: string; - codeChallenge: string; - codeChallengeMethod: 'S256'; - returnUrl?: string; - } - ): string { - const authorizeUrl = new URL( - authority.authorizeEndpoint, - authority.issuer - ); - authorizeUrl.searchParams.set('response_type', 'code'); - authorizeUrl.searchParams.set('client_id', authority.clientId); - authorizeUrl.searchParams.set('redirect_uri', authority.redirectUri); - authorizeUrl.searchParams.set('scope', authority.scope); - authorizeUrl.searchParams.set('state', options.state); - authorizeUrl.searchParams.set('nonce', options.nonce); - authorizeUrl.searchParams.set('code_challenge', options.codeChallenge); - authorizeUrl.searchParams.set( - 'code_challenge_method', - options.codeChallengeMethod - ); - if (authority.audience) { - authorizeUrl.searchParams.set('audience', authority.audience); - } - if (options.returnUrl) { - authorizeUrl.searchParams.set('ui_return', options.returnUrl); - } - return authorizeUrl.toString(); - } -} - -function decodeJwt(token: string): Record<string, unknown> { - const parts = token.split('.'); - if (parts.length < 2) { - return {}; - } - const payload = base64UrlDecode(parts[1]); - const json = new TextDecoder().decode(payload); - try { - return JSON.parse(json) as Record<string, unknown>; - } catch { - return {}; - } -} - -function createRandomId(): string { - const array = new Uint8Array(16); - crypto.getRandomValues(array); - return Array.from(array, (value) => - value.toString(16).padStart(2, '0') - ).join(''); -} +import { HttpClient, HttpHeaders, HttpResponse } from '@angular/common/http'; +import { Injectable } from '@angular/core'; +import { firstValueFrom } from 'rxjs'; + +import { AppConfigService } from '../config/app-config.service'; +import { AuthorityConfig } from '../config/app-config.model'; +import { ConsoleSessionService } from '../console/console-session.service'; +import { + ACCESS_TOKEN_REFRESH_THRESHOLD_MS, + AuthErrorReason, + AuthSession, + AuthTokens, +} from './auth-session.model'; +import { AuthSessionStore } from './auth-session.store'; +import { + AuthStorageService, + PendingLoginRequest, +} from './auth-storage.service'; +import { DpopService } from './dpop/dpop.service'; +import { base64UrlDecode } from './dpop/jose-utilities'; +import { createPkcePair } from './pkce.util'; + +interface TokenResponse { + readonly access_token: string; + readonly token_type: string; + readonly expires_in: number; + readonly scope?: string; + readonly refresh_token?: string; + readonly id_token?: string; +} + +interface RefreshTokenResponse extends TokenResponse {} + +export interface AuthorizationHeaders { + readonly authorization: string; + readonly dpop: string; +} + +export interface CompleteLoginResult { + readonly returnUrl?: string; +} + +const TOKEN_CONTENT_TYPE = 'application/x-www-form-urlencoded'; + +interface AccessTokenMetadata { + tenantId: string | null; + scopes: string[]; + audiences: string[]; + authenticationTimeEpochMs: number | null; + freshAuthActive: boolean; + freshAuthExpiresAtEpochMs: number | null; +} + +@Injectable({ + providedIn: 'root', +}) +export class AuthorityAuthService { + private refreshTimer: ReturnType<typeof setTimeout> | null = null; + private refreshInFlight: Promise<void> | null = null; + private lastError: AuthErrorReason | null = null; + + constructor( + private readonly http: HttpClient, + private readonly config: AppConfigService, + private readonly sessionStore: AuthSessionStore, + private readonly storage: AuthStorageService, + private readonly dpop: DpopService, + private readonly consoleSession: ConsoleSessionService + ) {} + + get error(): AuthErrorReason | null { + return this.lastError; + } + + async beginLogin(returnUrl?: string): Promise<void> { + const authority = this.config.authority; + const pkce = await createPkcePair(); + const state = crypto.randomUUID ? crypto.randomUUID() : createRandomId(); + const nonce = crypto.randomUUID ? crypto.randomUUID() : createRandomId(); + + // Generate the DPoP key pair up-front so the same key is bound to the token. + await this.dpop.getThumbprint(); + + const authorizeUrl = this.buildAuthorizeUrl(authority, { + state, + nonce, + codeChallenge: pkce.challenge, + codeChallengeMethod: pkce.method, + returnUrl, + }); + + const now = Date.now(); + this.storage.savePendingLogin({ + state, + codeVerifier: pkce.verifier, + createdAtEpochMs: now, + returnUrl, + nonce, + }); + + window.location.assign(authorizeUrl); + } + + /** + * Completes the authorization code flow after the Authority redirects back with ?code & ?state. + */ + async completeLoginFromRedirect( + queryParams: URLSearchParams + ): Promise<CompleteLoginResult> { + const code = queryParams.get('code'); + const state = queryParams.get('state'); + if (!code || !state) { + throw new Error('Missing authorization code or state.'); + } + + const pending = this.storage.consumePendingLogin(state); + if (!pending) { + this.lastError = 'invalid_state'; + throw new Error('State parameter did not match pending login request.'); + } + + try { + const tokenResponse = await this.exchangeCodeForTokens( + code, + pending.codeVerifier + ); + await this.onTokenResponse(tokenResponse, pending.nonce ?? null); + this.lastError = null; + return { returnUrl: pending.returnUrl }; + } catch (error) { + this.lastError = 'token_exchange_failed'; + this.sessionStore.clear(); + this.consoleSession.clear(); + throw error; + } + } + + async ensureValidAccessToken(): Promise<string | null> { + const session = this.sessionStore.session(); + if (!session) { + return null; + } + + const now = Date.now(); + if (now < session.tokens.expiresAtEpochMs - ACCESS_TOKEN_REFRESH_THRESHOLD_MS) { + return session.tokens.accessToken; + } + + await this.refreshAccessToken(); + const refreshed = this.sessionStore.session(); + return refreshed?.tokens.accessToken ?? null; + } + + async getAuthHeadersForRequest( + url: string, + method: string + ): Promise<AuthorizationHeaders | null> { + const accessToken = await this.ensureValidAccessToken(); + if (!accessToken) { + return null; + } + const dpopProof = await this.dpop.createProof({ + htm: method, + htu: url, + accessToken, + }); + return { + authorization: `DPoP ${accessToken}`, + dpop: dpopProof, + }; + } + + async refreshAccessToken(): Promise<void> { + const session = this.sessionStore.session(); + const refreshToken = session?.tokens.refreshToken; + if (!refreshToken) { + return; + } + + if (this.refreshInFlight) { + await this.refreshInFlight; + return; + } + + this.refreshInFlight = this.executeRefresh(refreshToken) + .catch((error) => { + this.lastError = 'refresh_failed'; + this.sessionStore.clear(); + this.consoleSession.clear(); + throw error; + }) + .finally(() => { + this.refreshInFlight = null; + }); + + await this.refreshInFlight; + } + + async logout(): Promise<void> { + const session = this.sessionStore.session(); + this.cancelRefreshTimer(); + this.sessionStore.clear(); + this.consoleSession.clear(); + await this.dpop.setNonce(null); + + const authority = this.config.authority; + if (!authority.logoutEndpoint) { + return; + } + + if (session?.identity.idToken) { + const url = new URL(authority.logoutEndpoint, authority.issuer); + url.searchParams.set('post_logout_redirect_uri', authority.postLogoutRedirectUri ?? authority.redirectUri); + url.searchParams.set('id_token_hint', session.identity.idToken); + window.location.assign(url.toString()); + } else { + window.location.assign(authority.postLogoutRedirectUri ?? authority.redirectUri); + } + } + + private async exchangeCodeForTokens( + code: string, + codeVerifier: string + ): Promise<HttpResponse<TokenResponse>> { + const authority = this.config.authority; + const tokenUrl = new URL(authority.tokenEndpoint, authority.issuer).toString(); + + const body = new URLSearchParams(); + body.set('grant_type', 'authorization_code'); + body.set('code', code); + body.set('redirect_uri', authority.redirectUri); + body.set('client_id', authority.clientId); + body.set('code_verifier', codeVerifier); + if (authority.audience) { + body.set('audience', authority.audience); + } + + const dpopProof = await this.dpop.createProof({ + htm: 'POST', + htu: tokenUrl, + }); + + const headers = new HttpHeaders({ + 'Content-Type': TOKEN_CONTENT_TYPE, + DPoP: dpopProof, + }); + + return firstValueFrom( + this.http.post<TokenResponse>(tokenUrl, body.toString(), { + headers, + withCredentials: true, + observe: 'response', + }) + ); + } + + private async executeRefresh(refreshToken: string): Promise<void> { + const authority = this.config.authority; + const tokenUrl = new URL(authority.tokenEndpoint, authority.issuer).toString(); + const body = new URLSearchParams(); + body.set('grant_type', 'refresh_token'); + body.set('refresh_token', refreshToken); + body.set('client_id', authority.clientId); + if (authority.audience) { + body.set('audience', authority.audience); + } + + const proof = await this.dpop.createProof({ + htm: 'POST', + htu: tokenUrl, + }); + + const headers = new HttpHeaders({ + 'Content-Type': TOKEN_CONTENT_TYPE, + DPoP: proof, + }); + + const response = await firstValueFrom( + this.http.post<RefreshTokenResponse>(tokenUrl, body.toString(), { + headers, + withCredentials: true, + observe: 'response', + }) + ); + + await this.onTokenResponse(response, null); + } + + private async onTokenResponse( + response: HttpResponse<TokenResponse>, + expectedNonce: string | null + ): Promise<void> { + const nonce = response.headers.get('DPoP-Nonce'); + if (nonce) { + await this.dpop.setNonce(nonce); + } + + const payload = response.body; + if (!payload) { + throw new Error('Token response did not include a body.'); + } + + const tokens = this.toAuthTokens(payload); + const accessMetadata = this.parseAccessTokenMetadata(payload.access_token); + const identity = this.parseIdentity(payload.id_token ?? '', expectedNonce); + const thumbprint = await this.dpop.getThumbprint(); + if (!thumbprint) { + throw new Error('DPoP thumbprint unavailable.'); + } + + const session: AuthSession = { + tokens, + identity, + dpopKeyThumbprint: thumbprint, + issuedAtEpochMs: Date.now(), + tenantId: accessMetadata.tenantId, + scopes: accessMetadata.scopes, + audiences: accessMetadata.audiences, + authenticationTimeEpochMs: accessMetadata.authenticationTimeEpochMs, + freshAuthActive: accessMetadata.freshAuthActive, + freshAuthExpiresAtEpochMs: accessMetadata.freshAuthExpiresAtEpochMs, + }; + this.sessionStore.setSession(session); + void this.consoleSession.loadConsoleContext(); + this.scheduleRefresh(tokens, this.config.authority); + } + + private toAuthTokens(payload: TokenResponse): AuthTokens { + const expiresAtEpochMs = Date.now() + payload.expires_in * 1000; + return { + accessToken: payload.access_token, + tokenType: (payload.token_type ?? 'Bearer') as 'Bearer', + refreshToken: payload.refresh_token, + scope: payload.scope ?? '', + expiresAtEpochMs, + }; + } + + private parseIdentity( + idToken: string, + expectedNonce: string | null + ): AuthSession['identity'] { + if (!idToken) { + return { + subject: 'unknown', + roles: [], + }; + } + + const claims = decodeJwt(idToken); + const nonceClaim = claims['nonce']; + if ( + expectedNonce && + typeof nonceClaim === 'string' && + nonceClaim !== expectedNonce + ) { + throw new Error('OIDC nonce mismatch.'); + } + + const subjectClaim = claims['sub']; + const nameClaim = claims['name']; + const emailClaim = claims['email']; + const rolesClaim = claims['role']; + + return { + subject: typeof subjectClaim === 'string' ? subjectClaim : 'unknown', + name: typeof nameClaim === 'string' ? nameClaim : undefined, + email: typeof emailClaim === 'string' ? emailClaim : undefined, + roles: Array.isArray(rolesClaim) + ? rolesClaim.filter((entry: unknown): entry is string => + typeof entry === 'string' + ) + : [], + idToken, + }; + } + + private scheduleRefresh(tokens: AuthTokens, authority: AuthorityConfig): void { + this.cancelRefreshTimer(); + const leeway = + (authority.refreshLeewaySeconds ?? 60) * 1000 + + ACCESS_TOKEN_REFRESH_THRESHOLD_MS; + const now = Date.now(); + const ttl = Math.max(tokens.expiresAtEpochMs - now - leeway, 5_000); + this.refreshTimer = setTimeout(() => { + void this.refreshAccessToken(); + }, ttl); + } + + private cancelRefreshTimer(): void { + if (this.refreshTimer) { + clearTimeout(this.refreshTimer); + this.refreshTimer = null; + } + } + + private parseAccessTokenMetadata(accessToken: string | undefined): AccessTokenMetadata { + if (!accessToken) { + return { + tenantId: null, + scopes: [], + audiences: [], + authenticationTimeEpochMs: null, + freshAuthActive: false, + freshAuthExpiresAtEpochMs: null, + }; + } + + const claims = decodeJwt(accessToken); + const tenantClaim = claims['stellaops:tenant']; + const tenantId = + typeof tenantClaim === 'string' && tenantClaim.trim().length > 0 + ? tenantClaim.trim() + : null; + + const scopeSet = new Set<string>(); + const scpClaim = claims['scp']; + if (Array.isArray(scpClaim)) { + for (const entry of scpClaim) { + if (typeof entry === 'string' && entry.trim().length > 0) { + scopeSet.add(entry.trim()); + } + } + } + + const scopeClaim = claims['scope']; + if (typeof scopeClaim === 'string') { + scopeClaim + .split(/\s+/) + .map((entry) => entry.trim()) + .filter((entry) => entry.length > 0) + .forEach((entry) => scopeSet.add(entry)); + } + + const audiences: string[] = []; + const audClaim = claims['aud']; + if (Array.isArray(audClaim)) { + for (const entry of audClaim) { + if (typeof entry === 'string' && entry.trim().length > 0) { + audiences.push(entry.trim()); + } + } + } else if (typeof audClaim === 'string' && audClaim.trim().length > 0) { + audiences.push(audClaim.trim()); + } + + const authenticationTimeEpochMs = this.parseEpochSeconds( + claims['auth_time'] ?? claims['authentication_time'] + ); + + const freshAuthActive = this.parseFreshAuthFlag( + claims['stellaops:fresh_auth'] ?? claims['fresh_auth'] + ); + + const ttlMs = this.parseDurationToMilliseconds( + claims['stellaops:fresh_auth_ttl'] + ); + + let freshAuthExpiresAtEpochMs: number | null = null; + if (authenticationTimeEpochMs !== null) { + if (ttlMs !== null) { + freshAuthExpiresAtEpochMs = authenticationTimeEpochMs + ttlMs; + } else if (freshAuthActive) { + freshAuthExpiresAtEpochMs = authenticationTimeEpochMs + 300_000; + } + } + + return { + tenantId, + scopes: Array.from(scopeSet).sort(), + audiences: audiences.sort(), + authenticationTimeEpochMs, + freshAuthActive, + freshAuthExpiresAtEpochMs, + }; + } + + private parseFreshAuthFlag(value: unknown): boolean { + if (typeof value === 'boolean') { + return value; + } + if (typeof value === 'string') { + const normalized = value.trim().toLowerCase(); + if (normalized === 'true' || normalized === '1') { + return true; + } + if (normalized === 'false' || normalized === '0') { + return false; + } + } + return false; + } + + private parseDurationToMilliseconds(value: unknown): number | null { + if (typeof value === 'number' && Number.isFinite(value)) { + return Math.max(0, value * 1000); + } + if (typeof value !== 'string') { + return null; + } + + const trimmed = value.trim(); + if (!trimmed) { + return null; + } + + if (/^-?\d+(\.\d+)?$/.test(trimmed)) { + const seconds = Number(trimmed); + if (!Number.isFinite(seconds)) { + return null; + } + return Math.max(0, seconds * 1000); + } + + const isoMatch = + /^P(T(?:(\d+)H)?(?:(\d+)M)?(?:(\d+(?:\.\d+)?)S)?)$/i.exec(trimmed); + if (isoMatch) { + const hours = isoMatch[2] ? Number(isoMatch[2]) : 0; + const minutes = isoMatch[3] ? Number(isoMatch[3]) : 0; + const seconds = isoMatch[4] ? Number(isoMatch[4]) : 0; + const totalSeconds = hours * 3600 + minutes * 60 + seconds; + return Math.max(0, totalSeconds * 1000); + } + + const spanMatch = + /^(-)?(?:(\d+)\.)?(\d{1,2}):([0-5]?\d):([0-5]?\d)(\.\d+)?$/.exec(trimmed); + if (spanMatch) { + const isNegative = !!spanMatch[1]; + const days = spanMatch[2] ? Number(spanMatch[2]) : 0; + const hours = Number(spanMatch[3]); + const minutes = Number(spanMatch[4]); + const seconds = + Number(spanMatch[5]) + (spanMatch[6] ? Number(spanMatch[6]) : 0); + const totalSeconds = + days * 86400 + hours * 3600 + minutes * 60 + seconds; + if (!Number.isFinite(totalSeconds)) { + return null; + } + const ms = totalSeconds * 1000; + return isNegative ? 0 : Math.max(0, ms); + } + + return null; + } + + private parseEpochSeconds(value: unknown): number | null { + if (typeof value === 'number' && Number.isFinite(value)) { + return value * 1000; + } + if (typeof value === 'string') { + const trimmed = value.trim(); + if (!trimmed) { + return null; + } + const numeric = Number(trimmed); + if (!Number.isNaN(numeric) && Number.isFinite(numeric)) { + return numeric * 1000; + } + const parsed = Date.parse(trimmed); + if (!Number.isNaN(parsed)) { + return parsed; + } + } + return null; + } + + private buildAuthorizeUrl( + authority: AuthorityConfig, + options: { + state: string; + nonce: string; + codeChallenge: string; + codeChallengeMethod: 'S256'; + returnUrl?: string; + } + ): string { + const authorizeUrl = new URL( + authority.authorizeEndpoint, + authority.issuer + ); + authorizeUrl.searchParams.set('response_type', 'code'); + authorizeUrl.searchParams.set('client_id', authority.clientId); + authorizeUrl.searchParams.set('redirect_uri', authority.redirectUri); + authorizeUrl.searchParams.set('scope', authority.scope); + authorizeUrl.searchParams.set('state', options.state); + authorizeUrl.searchParams.set('nonce', options.nonce); + authorizeUrl.searchParams.set('code_challenge', options.codeChallenge); + authorizeUrl.searchParams.set( + 'code_challenge_method', + options.codeChallengeMethod + ); + if (authority.audience) { + authorizeUrl.searchParams.set('audience', authority.audience); + } + if (options.returnUrl) { + authorizeUrl.searchParams.set('ui_return', options.returnUrl); + } + return authorizeUrl.toString(); + } +} + +function decodeJwt(token: string): Record<string, unknown> { + const parts = token.split('.'); + if (parts.length < 2) { + return {}; + } + const payload = base64UrlDecode(parts[1]); + const json = new TextDecoder().decode(payload); + try { + return JSON.parse(json) as Record<string, unknown>; + } catch { + return {}; + } +} + +function createRandomId(): string { + const array = new Uint8Array(16); + crypto.getRandomValues(array); + return Array.from(array, (value) => + value.toString(16).padStart(2, '0') + ).join(''); +} diff --git a/src/StellaOps.Web/src/app/core/auth/dpop/dpop-key-store.ts b/src/Web/StellaOps.Web/src/app/core/auth/dpop/dpop-key-store.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/dpop/dpop-key-store.ts rename to src/Web/StellaOps.Web/src/app/core/auth/dpop/dpop-key-store.ts index f9231aa3..fd2cb352 100644 --- a/src/StellaOps.Web/src/app/core/auth/dpop/dpop-key-store.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/dpop/dpop-key-store.ts @@ -1,181 +1,181 @@ -import { Injectable } from '@angular/core'; - -import { DPoPAlgorithm } from '../../config/app-config.model'; -import { computeJwkThumbprint } from './jose-utilities'; - -const DB_NAME = 'stellaops-auth'; -const STORE_NAME = 'dpopKeys'; -const PRIMARY_KEY = 'primary'; -const DB_VERSION = 1; - -interface PersistedKeyPair { - readonly id: string; - readonly algorithm: DPoPAlgorithm; - readonly publicJwk: JsonWebKey; - readonly privateJwk: JsonWebKey; - readonly thumbprint: string; - readonly createdAtIso: string; -} - -export interface LoadedDpopKeyPair { - readonly algorithm: DPoPAlgorithm; - readonly privateKey: CryptoKey; - readonly publicKey: CryptoKey; - readonly publicJwk: JsonWebKey; - readonly thumbprint: string; -} - -@Injectable({ - providedIn: 'root', -}) -export class DpopKeyStore { - private dbPromise: Promise<IDBDatabase> | null = null; - - async load(): Promise<LoadedDpopKeyPair | null> { - const record = await this.read(); - if (!record) { - return null; - } - - const [privateKey, publicKey] = await Promise.all([ - crypto.subtle.importKey( - 'jwk', - record.privateJwk, - this.toKeyAlgorithm(record.algorithm), - true, - ['sign'] - ), - crypto.subtle.importKey( - 'jwk', - record.publicJwk, - this.toKeyAlgorithm(record.algorithm), - true, - ['verify'] - ), - ]); - - return { - algorithm: record.algorithm, - privateKey, - publicKey, - publicJwk: record.publicJwk, - thumbprint: record.thumbprint, - }; - } - - async save( - keyPair: CryptoKeyPair, - algorithm: DPoPAlgorithm - ): Promise<LoadedDpopKeyPair> { - const [publicJwk, privateJwk] = await Promise.all([ - crypto.subtle.exportKey('jwk', keyPair.publicKey), - crypto.subtle.exportKey('jwk', keyPair.privateKey), - ]); - - if (!publicJwk) { - throw new Error('Failed to export public JWK for DPoP key pair.'); - } - - const thumbprint = await computeJwkThumbprint(publicJwk); - const record: PersistedKeyPair = { - id: PRIMARY_KEY, - algorithm, - publicJwk, - privateJwk, - thumbprint, - createdAtIso: new Date().toISOString(), - }; - - await this.write(record); - - return { - algorithm, - privateKey: keyPair.privateKey, - publicKey: keyPair.publicKey, - publicJwk, - thumbprint, - }; - } - - async clear(): Promise<void> { - const db = await this.openDb(); - await transactionPromise(db, STORE_NAME, 'readwrite', (store) => - store.delete(PRIMARY_KEY) - ); - } - - async generate(algorithm: DPoPAlgorithm): Promise<LoadedDpopKeyPair> { - const algo = this.toKeyAlgorithm(algorithm); - const keyPair = await crypto.subtle.generateKey(algo, true, [ - 'sign', - 'verify', - ]); - - const stored = await this.save(keyPair, algorithm); - return stored; - } - - private async read(): Promise<PersistedKeyPair | null> { - const db = await this.openDb(); - return transactionPromise(db, STORE_NAME, 'readonly', (store) => - store.get(PRIMARY_KEY) - ); - } - - private async write(record: PersistedKeyPair): Promise<void> { - const db = await this.openDb(); - await transactionPromise(db, STORE_NAME, 'readwrite', (store) => - store.put(record) - ); - } - - private toKeyAlgorithm(algorithm: DPoPAlgorithm): EcKeyImportParams { - switch (algorithm) { - case 'ES384': - return { name: 'ECDSA', namedCurve: 'P-384' }; - case 'EdDSA': - throw new Error('EdDSA DPoP keys are not yet supported.'); - case 'ES256': - default: - return { name: 'ECDSA', namedCurve: 'P-256' }; - } - } - - private async openDb(): Promise<IDBDatabase> { - if (typeof indexedDB === 'undefined') { - throw new Error('IndexedDB is not available for DPoP key persistence.'); - } - - if (!this.dbPromise) { - this.dbPromise = new Promise<IDBDatabase>((resolve, reject) => { - const request = indexedDB.open(DB_NAME, DB_VERSION); - request.onupgradeneeded = () => { - const db = request.result; - if (!db.objectStoreNames.contains(STORE_NAME)) { - db.createObjectStore(STORE_NAME, { keyPath: 'id' }); - } - }; - request.onsuccess = () => resolve(request.result); - request.onerror = () => reject(request.error); - }); - } - - return this.dbPromise; - } -} - -function transactionPromise<T>( - db: IDBDatabase, - storeName: string, - mode: IDBTransactionMode, - executor: (store: IDBObjectStore) => IDBRequest<T> -): Promise<T> { - return new Promise<T>((resolve, reject) => { - const transaction = db.transaction(storeName, mode); - const store = transaction.objectStore(storeName); - const request = executor(store); - request.onsuccess = () => resolve(request.result); - request.onerror = () => reject(request.error); - transaction.onabort = () => reject(transaction.error); - }); -} +import { Injectable } from '@angular/core'; + +import { DPoPAlgorithm } from '../../config/app-config.model'; +import { computeJwkThumbprint } from './jose-utilities'; + +const DB_NAME = 'stellaops-auth'; +const STORE_NAME = 'dpopKeys'; +const PRIMARY_KEY = 'primary'; +const DB_VERSION = 1; + +interface PersistedKeyPair { + readonly id: string; + readonly algorithm: DPoPAlgorithm; + readonly publicJwk: JsonWebKey; + readonly privateJwk: JsonWebKey; + readonly thumbprint: string; + readonly createdAtIso: string; +} + +export interface LoadedDpopKeyPair { + readonly algorithm: DPoPAlgorithm; + readonly privateKey: CryptoKey; + readonly publicKey: CryptoKey; + readonly publicJwk: JsonWebKey; + readonly thumbprint: string; +} + +@Injectable({ + providedIn: 'root', +}) +export class DpopKeyStore { + private dbPromise: Promise<IDBDatabase> | null = null; + + async load(): Promise<LoadedDpopKeyPair | null> { + const record = await this.read(); + if (!record) { + return null; + } + + const [privateKey, publicKey] = await Promise.all([ + crypto.subtle.importKey( + 'jwk', + record.privateJwk, + this.toKeyAlgorithm(record.algorithm), + true, + ['sign'] + ), + crypto.subtle.importKey( + 'jwk', + record.publicJwk, + this.toKeyAlgorithm(record.algorithm), + true, + ['verify'] + ), + ]); + + return { + algorithm: record.algorithm, + privateKey, + publicKey, + publicJwk: record.publicJwk, + thumbprint: record.thumbprint, + }; + } + + async save( + keyPair: CryptoKeyPair, + algorithm: DPoPAlgorithm + ): Promise<LoadedDpopKeyPair> { + const [publicJwk, privateJwk] = await Promise.all([ + crypto.subtle.exportKey('jwk', keyPair.publicKey), + crypto.subtle.exportKey('jwk', keyPair.privateKey), + ]); + + if (!publicJwk) { + throw new Error('Failed to export public JWK for DPoP key pair.'); + } + + const thumbprint = await computeJwkThumbprint(publicJwk); + const record: PersistedKeyPair = { + id: PRIMARY_KEY, + algorithm, + publicJwk, + privateJwk, + thumbprint, + createdAtIso: new Date().toISOString(), + }; + + await this.write(record); + + return { + algorithm, + privateKey: keyPair.privateKey, + publicKey: keyPair.publicKey, + publicJwk, + thumbprint, + }; + } + + async clear(): Promise<void> { + const db = await this.openDb(); + await transactionPromise(db, STORE_NAME, 'readwrite', (store) => + store.delete(PRIMARY_KEY) + ); + } + + async generate(algorithm: DPoPAlgorithm): Promise<LoadedDpopKeyPair> { + const algo = this.toKeyAlgorithm(algorithm); + const keyPair = await crypto.subtle.generateKey(algo, true, [ + 'sign', + 'verify', + ]); + + const stored = await this.save(keyPair, algorithm); + return stored; + } + + private async read(): Promise<PersistedKeyPair | null> { + const db = await this.openDb(); + return transactionPromise(db, STORE_NAME, 'readonly', (store) => + store.get(PRIMARY_KEY) + ); + } + + private async write(record: PersistedKeyPair): Promise<void> { + const db = await this.openDb(); + await transactionPromise(db, STORE_NAME, 'readwrite', (store) => + store.put(record) + ); + } + + private toKeyAlgorithm(algorithm: DPoPAlgorithm): EcKeyImportParams { + switch (algorithm) { + case 'ES384': + return { name: 'ECDSA', namedCurve: 'P-384' }; + case 'EdDSA': + throw new Error('EdDSA DPoP keys are not yet supported.'); + case 'ES256': + default: + return { name: 'ECDSA', namedCurve: 'P-256' }; + } + } + + private async openDb(): Promise<IDBDatabase> { + if (typeof indexedDB === 'undefined') { + throw new Error('IndexedDB is not available for DPoP key persistence.'); + } + + if (!this.dbPromise) { + this.dbPromise = new Promise<IDBDatabase>((resolve, reject) => { + const request = indexedDB.open(DB_NAME, DB_VERSION); + request.onupgradeneeded = () => { + const db = request.result; + if (!db.objectStoreNames.contains(STORE_NAME)) { + db.createObjectStore(STORE_NAME, { keyPath: 'id' }); + } + }; + request.onsuccess = () => resolve(request.result); + request.onerror = () => reject(request.error); + }); + } + + return this.dbPromise; + } +} + +function transactionPromise<T>( + db: IDBDatabase, + storeName: string, + mode: IDBTransactionMode, + executor: (store: IDBObjectStore) => IDBRequest<T> +): Promise<T> { + return new Promise<T>((resolve, reject) => { + const transaction = db.transaction(storeName, mode); + const store = transaction.objectStore(storeName); + const request = executor(store); + request.onsuccess = () => resolve(request.result); + request.onerror = () => reject(request.error); + transaction.onabort = () => reject(transaction.error); + }); +} diff --git a/src/StellaOps.Web/src/app/core/auth/dpop/dpop.service.spec.ts b/src/Web/StellaOps.Web/src/app/core/auth/dpop/dpop.service.spec.ts similarity index 97% rename from src/StellaOps.Web/src/app/core/auth/dpop/dpop.service.spec.ts rename to src/Web/StellaOps.Web/src/app/core/auth/dpop/dpop.service.spec.ts index c4b60a63..9a79350d 100644 --- a/src/StellaOps.Web/src/app/core/auth/dpop/dpop.service.spec.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/dpop/dpop.service.spec.ts @@ -1,103 +1,103 @@ -import { HttpClientTestingModule } from '@angular/common/http/testing'; -import { TestBed } from '@angular/core/testing'; - -import { APP_CONFIG, AppConfig } from '../../config/app-config.model'; -import { AppConfigService } from '../../config/app-config.service'; -import { base64UrlDecode } from './jose-utilities'; -import { DpopKeyStore } from './dpop-key-store'; -import { DpopService } from './dpop.service'; - -describe('DpopService', () => { - const originalTimeout = jasmine.DEFAULT_TIMEOUT_INTERVAL; - const config: AppConfig = { - authority: { - issuer: 'https://auth.stellaops.test/', - clientId: 'ui-client', - authorizeEndpoint: 'https://auth.stellaops.test/connect/authorize', - tokenEndpoint: 'https://auth.stellaops.test/connect/token', - redirectUri: 'https://ui.stellaops.test/auth/callback', - scope: 'openid profile email ui.read authority:tenants.read advisory:read vex:read exceptions:read exceptions:approve aoc:verify findings:read orch:read vuln:read', - audience: 'https://scanner.stellaops.test', - }, - apiBaseUrls: { - authority: 'https://auth.stellaops.test', - scanner: 'https://scanner.stellaops.test', - policy: 'https://policy.stellaops.test', - concelier: 'https://concelier.stellaops.test', - attestor: 'https://attestor.stellaops.test', - }, - }; - - beforeEach(async () => { - jasmine.DEFAULT_TIMEOUT_INTERVAL = 20000; - TestBed.configureTestingModule({ - imports: [HttpClientTestingModule], - providers: [ - AppConfigService, - DpopKeyStore, - DpopService, - { - provide: APP_CONFIG, - useValue: config, - }, - ], - }); - }); - - afterEach(async () => { - jasmine.DEFAULT_TIMEOUT_INTERVAL = originalTimeout; - const store = TestBed.inject(DpopKeyStore); - try { - await store.clear(); - } catch { - // ignore cleanup issues in test environment - } - }); - - it('creates a DPoP proof with expected header values', async () => { - const appConfig = TestBed.inject(AppConfigService); - appConfig.setConfigForTesting(config); - const service = TestBed.inject(DpopService); - - const proof = await service.createProof({ - htm: 'get', - htu: 'https://scanner.stellaops.test/api/v1/scans', - }); - - const [rawHeader, rawPayload] = proof.split('.'); - const header = JSON.parse( - new TextDecoder().decode(base64UrlDecode(rawHeader)) - ); - const payload = JSON.parse( - new TextDecoder().decode(base64UrlDecode(rawPayload)) - ); - - expect(header.typ).toBe('dpop+jwt'); - expect(header.alg).toBe('ES256'); - expect(header.jwk.kty).toBe('EC'); - expect(payload.htm).toBe('GET'); - expect(payload.htu).toBe('https://scanner.stellaops.test/api/v1/scans'); - expect(typeof payload.iat).toBe('number'); - expect(typeof payload.jti).toBe('string'); - }); - - it('binds access token hash when provided', async () => { - const appConfig = TestBed.inject(AppConfigService); - appConfig.setConfigForTesting(config); - const service = TestBed.inject(DpopService); - - const accessToken = 'sample-access-token'; - const proof = await service.createProof({ - htm: 'post', - htu: 'https://scanner.stellaops.test/api/v1/scans', - accessToken, - }); - - const payload = JSON.parse( - new TextDecoder().decode(base64UrlDecode(proof.split('.')[1])) - ); - - expect(payload.ath).toBeDefined(); - expect(typeof payload.ath).toBe('string'); - }); -}); +import { HttpClientTestingModule } from '@angular/common/http/testing'; +import { TestBed } from '@angular/core/testing'; + +import { APP_CONFIG, AppConfig } from '../../config/app-config.model'; +import { AppConfigService } from '../../config/app-config.service'; +import { base64UrlDecode } from './jose-utilities'; +import { DpopKeyStore } from './dpop-key-store'; +import { DpopService } from './dpop.service'; + +describe('DpopService', () => { + const originalTimeout = jasmine.DEFAULT_TIMEOUT_INTERVAL; + const config: AppConfig = { + authority: { + issuer: 'https://auth.stellaops.test/', + clientId: 'ui-client', + authorizeEndpoint: 'https://auth.stellaops.test/connect/authorize', + tokenEndpoint: 'https://auth.stellaops.test/connect/token', + redirectUri: 'https://ui.stellaops.test/auth/callback', + scope: 'openid profile email ui.read authority:tenants.read advisory:read vex:read exceptions:read exceptions:approve aoc:verify findings:read orch:read vuln:read', + audience: 'https://scanner.stellaops.test', + }, + apiBaseUrls: { + authority: 'https://auth.stellaops.test', + scanner: 'https://scanner.stellaops.test', + policy: 'https://policy.stellaops.test', + concelier: 'https://concelier.stellaops.test', + attestor: 'https://attestor.stellaops.test', + }, + }; + + beforeEach(async () => { + jasmine.DEFAULT_TIMEOUT_INTERVAL = 20000; + TestBed.configureTestingModule({ + imports: [HttpClientTestingModule], + providers: [ + AppConfigService, + DpopKeyStore, + DpopService, + { + provide: APP_CONFIG, + useValue: config, + }, + ], + }); + }); + + afterEach(async () => { + jasmine.DEFAULT_TIMEOUT_INTERVAL = originalTimeout; + const store = TestBed.inject(DpopKeyStore); + try { + await store.clear(); + } catch { + // ignore cleanup issues in test environment + } + }); + + it('creates a DPoP proof with expected header values', async () => { + const appConfig = TestBed.inject(AppConfigService); + appConfig.setConfigForTesting(config); + const service = TestBed.inject(DpopService); + + const proof = await service.createProof({ + htm: 'get', + htu: 'https://scanner.stellaops.test/api/v1/scans', + }); + + const [rawHeader, rawPayload] = proof.split('.'); + const header = JSON.parse( + new TextDecoder().decode(base64UrlDecode(rawHeader)) + ); + const payload = JSON.parse( + new TextDecoder().decode(base64UrlDecode(rawPayload)) + ); + + expect(header.typ).toBe('dpop+jwt'); + expect(header.alg).toBe('ES256'); + expect(header.jwk.kty).toBe('EC'); + expect(payload.htm).toBe('GET'); + expect(payload.htu).toBe('https://scanner.stellaops.test/api/v1/scans'); + expect(typeof payload.iat).toBe('number'); + expect(typeof payload.jti).toBe('string'); + }); + + it('binds access token hash when provided', async () => { + const appConfig = TestBed.inject(AppConfigService); + appConfig.setConfigForTesting(config); + const service = TestBed.inject(DpopService); + + const accessToken = 'sample-access-token'; + const proof = await service.createProof({ + htm: 'post', + htu: 'https://scanner.stellaops.test/api/v1/scans', + accessToken, + }); + + const payload = JSON.parse( + new TextDecoder().decode(base64UrlDecode(proof.split('.')[1])) + ); + + expect(payload.ath).toBeDefined(); + expect(typeof payload.ath).toBe('string'); + }); +}); diff --git a/src/StellaOps.Web/src/app/core/auth/dpop/dpop.service.ts b/src/Web/StellaOps.Web/src/app/core/auth/dpop/dpop.service.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/dpop/dpop.service.ts rename to src/Web/StellaOps.Web/src/app/core/auth/dpop/dpop.service.ts index d6d4432c..b0f38d64 100644 --- a/src/StellaOps.Web/src/app/core/auth/dpop/dpop.service.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/dpop/dpop.service.ts @@ -1,148 +1,148 @@ -import { Injectable, computed, signal } from '@angular/core'; - -import { AppConfigService } from '../../config/app-config.service'; -import { DPoPAlgorithm } from '../../config/app-config.model'; -import { sha256, base64UrlEncode, derToJoseSignature } from './jose-utilities'; -import { DpopKeyStore, LoadedDpopKeyPair } from './dpop-key-store'; - -export interface DpopProofOptions { - readonly htm: string; - readonly htu: string; - readonly accessToken?: string; - readonly nonce?: string | null; -} - -@Injectable({ - providedIn: 'root', -}) -export class DpopService { - private keyPairPromise: Promise<LoadedDpopKeyPair> | null = null; - private readonly nonceSignal = signal<string | null>(null); - readonly nonce = computed(() => this.nonceSignal()); - - constructor( - private readonly config: AppConfigService, - private readonly store: DpopKeyStore - ) {} - - async setNonce(nonce: string | null): Promise<void> { - this.nonceSignal.set(nonce); - } - - async getThumbprint(): Promise<string | null> { - const key = await this.getOrCreateKeyPair(); - return key.thumbprint ?? null; - } - - async rotateKey(): Promise<void> { - const algorithm = this.resolveAlgorithm(); - this.keyPairPromise = this.store.generate(algorithm); - } - - async createProof(options: DpopProofOptions): Promise<string> { - const keyPair = await this.getOrCreateKeyPair(); - - const header = { - typ: 'dpop+jwt', - alg: keyPair.algorithm, - jwk: keyPair.publicJwk, - }; - - const nowSeconds = Math.floor(Date.now() / 1000); - const payload: Record<string, unknown> = { - htm: options.htm.toUpperCase(), - htu: normalizeHtu(options.htu), - iat: nowSeconds, - jti: crypto.randomUUID ? crypto.randomUUID() : createRandomId(), - }; - - const nonce = options.nonce ?? this.nonceSignal(); - if (nonce) { - payload['nonce'] = nonce; - } - - if (options.accessToken) { - const accessTokenHash = await sha256( - new TextEncoder().encode(options.accessToken) - ); - payload['ath'] = base64UrlEncode(accessTokenHash); - } - - const encodedHeader = base64UrlEncode(JSON.stringify(header)); - const encodedPayload = base64UrlEncode(JSON.stringify(payload)); - const signingInput = `${encodedHeader}.${encodedPayload}`; - const signature = await crypto.subtle.sign( - { - name: 'ECDSA', - hash: this.resolveHashAlgorithm(keyPair.algorithm), - }, - keyPair.privateKey, - new TextEncoder().encode(signingInput) - ); - - const joseSignature = base64UrlEncode(derToJoseSignature(signature)); - return `${signingInput}.${joseSignature}`; - } - - private async getOrCreateKeyPair(): Promise<LoadedDpopKeyPair> { - if (!this.keyPairPromise) { - this.keyPairPromise = this.loadKeyPair(); - } - try { - return await this.keyPairPromise; - } catch (error) { - // Reset the memoized promise so a subsequent call can retry. - this.keyPairPromise = null; - throw error; - } - } - - private async loadKeyPair(): Promise<LoadedDpopKeyPair> { - const algorithm = this.resolveAlgorithm(); - try { - const existing = await this.store.load(); - if (existing && existing.algorithm === algorithm) { - return existing; - } - } catch { - // fall through to regeneration - } - - return this.store.generate(algorithm); - } - - private resolveAlgorithm(): DPoPAlgorithm { - const authority = this.config.authority; - return authority.dpopAlgorithms?.[0] ?? 'ES256'; - } - - private resolveHashAlgorithm(algorithm: DPoPAlgorithm): string { - switch (algorithm) { - case 'ES384': - return 'SHA-384'; - case 'ES256': - default: - return 'SHA-256'; - } - } -} - -function normalizeHtu(value: string): string { - try { - const base = - typeof window !== 'undefined' && window.location - ? window.location.origin - : undefined; - const url = base ? new URL(value, base) : new URL(value); - url.hash = ''; - return url.toString(); - } catch { - return value; - } -} - -function createRandomId(): string { - const array = new Uint8Array(16); - crypto.getRandomValues(array); - return base64UrlEncode(array); -} +import { Injectable, computed, signal } from '@angular/core'; + +import { AppConfigService } from '../../config/app-config.service'; +import { DPoPAlgorithm } from '../../config/app-config.model'; +import { sha256, base64UrlEncode, derToJoseSignature } from './jose-utilities'; +import { DpopKeyStore, LoadedDpopKeyPair } from './dpop-key-store'; + +export interface DpopProofOptions { + readonly htm: string; + readonly htu: string; + readonly accessToken?: string; + readonly nonce?: string | null; +} + +@Injectable({ + providedIn: 'root', +}) +export class DpopService { + private keyPairPromise: Promise<LoadedDpopKeyPair> | null = null; + private readonly nonceSignal = signal<string | null>(null); + readonly nonce = computed(() => this.nonceSignal()); + + constructor( + private readonly config: AppConfigService, + private readonly store: DpopKeyStore + ) {} + + async setNonce(nonce: string | null): Promise<void> { + this.nonceSignal.set(nonce); + } + + async getThumbprint(): Promise<string | null> { + const key = await this.getOrCreateKeyPair(); + return key.thumbprint ?? null; + } + + async rotateKey(): Promise<void> { + const algorithm = this.resolveAlgorithm(); + this.keyPairPromise = this.store.generate(algorithm); + } + + async createProof(options: DpopProofOptions): Promise<string> { + const keyPair = await this.getOrCreateKeyPair(); + + const header = { + typ: 'dpop+jwt', + alg: keyPair.algorithm, + jwk: keyPair.publicJwk, + }; + + const nowSeconds = Math.floor(Date.now() / 1000); + const payload: Record<string, unknown> = { + htm: options.htm.toUpperCase(), + htu: normalizeHtu(options.htu), + iat: nowSeconds, + jti: crypto.randomUUID ? crypto.randomUUID() : createRandomId(), + }; + + const nonce = options.nonce ?? this.nonceSignal(); + if (nonce) { + payload['nonce'] = nonce; + } + + if (options.accessToken) { + const accessTokenHash = await sha256( + new TextEncoder().encode(options.accessToken) + ); + payload['ath'] = base64UrlEncode(accessTokenHash); + } + + const encodedHeader = base64UrlEncode(JSON.stringify(header)); + const encodedPayload = base64UrlEncode(JSON.stringify(payload)); + const signingInput = `${encodedHeader}.${encodedPayload}`; + const signature = await crypto.subtle.sign( + { + name: 'ECDSA', + hash: this.resolveHashAlgorithm(keyPair.algorithm), + }, + keyPair.privateKey, + new TextEncoder().encode(signingInput) + ); + + const joseSignature = base64UrlEncode(derToJoseSignature(signature)); + return `${signingInput}.${joseSignature}`; + } + + private async getOrCreateKeyPair(): Promise<LoadedDpopKeyPair> { + if (!this.keyPairPromise) { + this.keyPairPromise = this.loadKeyPair(); + } + try { + return await this.keyPairPromise; + } catch (error) { + // Reset the memoized promise so a subsequent call can retry. + this.keyPairPromise = null; + throw error; + } + } + + private async loadKeyPair(): Promise<LoadedDpopKeyPair> { + const algorithm = this.resolveAlgorithm(); + try { + const existing = await this.store.load(); + if (existing && existing.algorithm === algorithm) { + return existing; + } + } catch { + // fall through to regeneration + } + + return this.store.generate(algorithm); + } + + private resolveAlgorithm(): DPoPAlgorithm { + const authority = this.config.authority; + return authority.dpopAlgorithms?.[0] ?? 'ES256'; + } + + private resolveHashAlgorithm(algorithm: DPoPAlgorithm): string { + switch (algorithm) { + case 'ES384': + return 'SHA-384'; + case 'ES256': + default: + return 'SHA-256'; + } + } +} + +function normalizeHtu(value: string): string { + try { + const base = + typeof window !== 'undefined' && window.location + ? window.location.origin + : undefined; + const url = base ? new URL(value, base) : new URL(value); + url.hash = ''; + return url.toString(); + } catch { + return value; + } +} + +function createRandomId(): string { + const array = new Uint8Array(16); + crypto.getRandomValues(array); + return base64UrlEncode(array); +} diff --git a/src/StellaOps.Web/src/app/core/auth/dpop/jose-utilities.ts b/src/Web/StellaOps.Web/src/app/core/auth/dpop/jose-utilities.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/dpop/jose-utilities.ts rename to src/Web/StellaOps.Web/src/app/core/auth/dpop/jose-utilities.ts index d195cea2..694f5114 100644 --- a/src/StellaOps.Web/src/app/core/auth/dpop/jose-utilities.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/dpop/jose-utilities.ts @@ -1,123 +1,123 @@ -export async function sha256(data: Uint8Array): Promise<Uint8Array> { - const digest = await crypto.subtle.digest('SHA-256', data); - return new Uint8Array(digest); -} - -export function base64UrlEncode( - input: ArrayBuffer | Uint8Array | string -): string { - let bytes: Uint8Array; - if (typeof input === 'string') { - bytes = new TextEncoder().encode(input); - } else if (input instanceof Uint8Array) { - bytes = input; - } else { - bytes = new Uint8Array(input); - } - - let binary = ''; - for (let i = 0; i < bytes.byteLength; i += 1) { - binary += String.fromCharCode(bytes[i]); - } - - return btoa(binary).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, ''); -} - -export function base64UrlDecode(value: string): Uint8Array { - const normalized = value.replace(/-/g, '+').replace(/_/g, '/'); - const padding = normalized.length % 4; - const padded = - padding === 0 ? normalized : normalized + '='.repeat(4 - padding); - const binary = atob(padded); - const bytes = new Uint8Array(binary.length); - for (let i = 0; i < binary.length; i += 1) { - bytes[i] = binary.charCodeAt(i); - } - return bytes; -} - -export async function computeJwkThumbprint(jwk: JsonWebKey): Promise<string> { - const canonical = canonicalizeJwk(jwk); - const digest = await sha256(new TextEncoder().encode(canonical)); - return base64UrlEncode(digest); -} - -function canonicalizeJwk(jwk: JsonWebKey): string { - if (!jwk.kty) { - throw new Error('JWK must include "kty"'); - } - - if (jwk.kty === 'EC') { - const { crv, kty, x, y } = jwk; - if (!crv || !x || !y) { - throw new Error('EC JWK must include "crv", "x", and "y".'); - } - return JSON.stringify({ crv, kty, x, y }); - } - - if (jwk.kty === 'OKP') { - const { crv, kty, x } = jwk; - if (!crv || !x) { - throw new Error('OKP JWK must include "crv" and "x".'); - } - return JSON.stringify({ crv, kty, x }); - } - - throw new Error(`Unsupported JWK key type: ${jwk.kty}`); -} - -export function derToJoseSignature(der: ArrayBuffer): Uint8Array { - const bytes = new Uint8Array(der); - if (bytes[0] !== 0x30) { - // Some implementations already return raw (r || s) signature bytes. - if (bytes.length === 64) { - return bytes; - } - throw new Error('Invalid DER signature: expected sequence.'); - } - - let offset = 2; // skip SEQUENCE header and length (assume short form) - if (bytes[1] & 0x80) { - const lengthBytes = bytes[1] & 0x7f; - offset = 2 + lengthBytes; - } - - if (bytes[offset] !== 0x02) { - throw new Error('Invalid DER signature: expected INTEGER for r.'); - } - const rLength = bytes[offset + 1]; - let r = bytes.slice(offset + 2, offset + 2 + rLength); - offset = offset + 2 + rLength; - - if (bytes[offset] !== 0x02) { - throw new Error('Invalid DER signature: expected INTEGER for s.'); - } - const sLength = bytes[offset + 1]; - let s = bytes.slice(offset + 2, offset + 2 + sLength); - - r = trimLeadingZeros(r); - s = trimLeadingZeros(s); - - const targetLength = 32; - const signature = new Uint8Array(targetLength * 2); - signature.set(padStart(r, targetLength), 0); - signature.set(padStart(s, targetLength), targetLength); - return signature; -} - -function trimLeadingZeros(bytes: Uint8Array): Uint8Array { - let start = 0; - while (start < bytes.length - 1 && bytes[start] === 0x00) { - start += 1; - } - return bytes.subarray(start); -} - -function padStart(bytes: Uint8Array, length: number): Uint8Array { - if (bytes.length >= length) { - return bytes; - } - const padded = new Uint8Array(length); - padded.set(bytes, length - bytes.length); - return padded; -} +export async function sha256(data: Uint8Array): Promise<Uint8Array> { + const digest = await crypto.subtle.digest('SHA-256', data); + return new Uint8Array(digest); +} + +export function base64UrlEncode( + input: ArrayBuffer | Uint8Array | string +): string { + let bytes: Uint8Array; + if (typeof input === 'string') { + bytes = new TextEncoder().encode(input); + } else if (input instanceof Uint8Array) { + bytes = input; + } else { + bytes = new Uint8Array(input); + } + + let binary = ''; + for (let i = 0; i < bytes.byteLength; i += 1) { + binary += String.fromCharCode(bytes[i]); + } + + return btoa(binary).replace(/\+/g, '-').replace(/\//g, '_').replace(/=+$/, ''); +} + +export function base64UrlDecode(value: string): Uint8Array { + const normalized = value.replace(/-/g, '+').replace(/_/g, '/'); + const padding = normalized.length % 4; + const padded = + padding === 0 ? normalized : normalized + '='.repeat(4 - padding); + const binary = atob(padded); + const bytes = new Uint8Array(binary.length); + for (let i = 0; i < binary.length; i += 1) { + bytes[i] = binary.charCodeAt(i); + } + return bytes; +} + +export async function computeJwkThumbprint(jwk: JsonWebKey): Promise<string> { + const canonical = canonicalizeJwk(jwk); + const digest = await sha256(new TextEncoder().encode(canonical)); + return base64UrlEncode(digest); +} + +function canonicalizeJwk(jwk: JsonWebKey): string { + if (!jwk.kty) { + throw new Error('JWK must include "kty"'); + } + + if (jwk.kty === 'EC') { + const { crv, kty, x, y } = jwk; + if (!crv || !x || !y) { + throw new Error('EC JWK must include "crv", "x", and "y".'); + } + return JSON.stringify({ crv, kty, x, y }); + } + + if (jwk.kty === 'OKP') { + const { crv, kty, x } = jwk; + if (!crv || !x) { + throw new Error('OKP JWK must include "crv" and "x".'); + } + return JSON.stringify({ crv, kty, x }); + } + + throw new Error(`Unsupported JWK key type: ${jwk.kty}`); +} + +export function derToJoseSignature(der: ArrayBuffer): Uint8Array { + const bytes = new Uint8Array(der); + if (bytes[0] !== 0x30) { + // Some implementations already return raw (r || s) signature bytes. + if (bytes.length === 64) { + return bytes; + } + throw new Error('Invalid DER signature: expected sequence.'); + } + + let offset = 2; // skip SEQUENCE header and length (assume short form) + if (bytes[1] & 0x80) { + const lengthBytes = bytes[1] & 0x7f; + offset = 2 + lengthBytes; + } + + if (bytes[offset] !== 0x02) { + throw new Error('Invalid DER signature: expected INTEGER for r.'); + } + const rLength = bytes[offset + 1]; + let r = bytes.slice(offset + 2, offset + 2 + rLength); + offset = offset + 2 + rLength; + + if (bytes[offset] !== 0x02) { + throw new Error('Invalid DER signature: expected INTEGER for s.'); + } + const sLength = bytes[offset + 1]; + let s = bytes.slice(offset + 2, offset + 2 + sLength); + + r = trimLeadingZeros(r); + s = trimLeadingZeros(s); + + const targetLength = 32; + const signature = new Uint8Array(targetLength * 2); + signature.set(padStart(r, targetLength), 0); + signature.set(padStart(s, targetLength), targetLength); + return signature; +} + +function trimLeadingZeros(bytes: Uint8Array): Uint8Array { + let start = 0; + while (start < bytes.length - 1 && bytes[start] === 0x00) { + start += 1; + } + return bytes.subarray(start); +} + +function padStart(bytes: Uint8Array, length: number): Uint8Array { + if (bytes.length >= length) { + return bytes; + } + const padded = new Uint8Array(length); + padded.set(bytes, length - bytes.length); + return padded; +} diff --git a/src/StellaOps.Web/src/app/core/auth/pkce.util.ts b/src/Web/StellaOps.Web/src/app/core/auth/pkce.util.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/auth/pkce.util.ts rename to src/Web/StellaOps.Web/src/app/core/auth/pkce.util.ts index 5de90c7c..448ed0f9 100644 --- a/src/StellaOps.Web/src/app/core/auth/pkce.util.ts +++ b/src/Web/StellaOps.Web/src/app/core/auth/pkce.util.ts @@ -1,24 +1,24 @@ -import { base64UrlEncode, sha256 } from './dpop/jose-utilities'; - -export interface PkcePair { - readonly verifier: string; - readonly challenge: string; - readonly method: 'S256'; -} - -const VERIFIER_BYTE_LENGTH = 32; - -export async function createPkcePair(): Promise<PkcePair> { - const verifierBytes = new Uint8Array(VERIFIER_BYTE_LENGTH); - crypto.getRandomValues(verifierBytes); - - const verifier = base64UrlEncode(verifierBytes); - const challengeBytes = await sha256(new TextEncoder().encode(verifier)); - const challenge = base64UrlEncode(challengeBytes); - - return { - verifier, - challenge, - method: 'S256', - }; -} +import { base64UrlEncode, sha256 } from './dpop/jose-utilities'; + +export interface PkcePair { + readonly verifier: string; + readonly challenge: string; + readonly method: 'S256'; +} + +const VERIFIER_BYTE_LENGTH = 32; + +export async function createPkcePair(): Promise<PkcePair> { + const verifierBytes = new Uint8Array(VERIFIER_BYTE_LENGTH); + crypto.getRandomValues(verifierBytes); + + const verifier = base64UrlEncode(verifierBytes); + const challengeBytes = await sha256(new TextEncoder().encode(verifier)); + const challenge = base64UrlEncode(challengeBytes); + + return { + verifier, + challenge, + method: 'S256', + }; +} diff --git a/src/StellaOps.Web/src/app/core/config/app-config.model.ts b/src/Web/StellaOps.Web/src/app/core/config/app-config.model.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/config/app-config.model.ts rename to src/Web/StellaOps.Web/src/app/core/config/app-config.model.ts index 95b6bd80..63cf65f4 100644 --- a/src/StellaOps.Web/src/app/core/config/app-config.model.ts +++ b/src/Web/StellaOps.Web/src/app/core/config/app-config.model.ts @@ -1,49 +1,49 @@ -import { InjectionToken } from '@angular/core'; - -export type DPoPAlgorithm = 'ES256' | 'ES384' | 'EdDSA'; - -export interface AuthorityConfig { - readonly issuer: string; - readonly clientId: string; - readonly authorizeEndpoint: string; - readonly tokenEndpoint: string; - readonly logoutEndpoint?: string; - readonly redirectUri: string; - readonly postLogoutRedirectUri?: string; - readonly scope: string; - readonly audience: string; - /** - * Preferred algorithms for DPoP proofs, in order of preference. - * Defaults to ES256 if omitted. - */ - readonly dpopAlgorithms?: readonly DPoPAlgorithm[]; - /** - * Seconds of leeway before access token expiry that should trigger a proactive refresh. - * Defaults to 60. - */ - readonly refreshLeewaySeconds?: number; -} - -export interface ApiBaseUrlConfig { - readonly scanner: string; - readonly policy: string; - readonly concelier: string; - readonly excitor?: string; - readonly attestor: string; - readonly authority: string; - readonly notify?: string; - readonly scheduler?: string; -} - -export interface TelemetryConfig { - readonly otlpEndpoint?: string; - readonly sampleRate?: number; -} - -export interface AppConfig { - readonly authority: AuthorityConfig; - readonly apiBaseUrls: ApiBaseUrlConfig; - readonly telemetry?: TelemetryConfig; -} - -export const APP_CONFIG = new InjectionToken<AppConfig>('STELLAOPS_APP_CONFIG'); +import { InjectionToken } from '@angular/core'; + +export type DPoPAlgorithm = 'ES256' | 'ES384' | 'EdDSA'; + +export interface AuthorityConfig { + readonly issuer: string; + readonly clientId: string; + readonly authorizeEndpoint: string; + readonly tokenEndpoint: string; + readonly logoutEndpoint?: string; + readonly redirectUri: string; + readonly postLogoutRedirectUri?: string; + readonly scope: string; + readonly audience: string; + /** + * Preferred algorithms for DPoP proofs, in order of preference. + * Defaults to ES256 if omitted. + */ + readonly dpopAlgorithms?: readonly DPoPAlgorithm[]; + /** + * Seconds of leeway before access token expiry that should trigger a proactive refresh. + * Defaults to 60. + */ + readonly refreshLeewaySeconds?: number; +} + +export interface ApiBaseUrlConfig { + readonly scanner: string; + readonly policy: string; + readonly concelier: string; + readonly excitor?: string; + readonly attestor: string; + readonly authority: string; + readonly notify?: string; + readonly scheduler?: string; +} + +export interface TelemetryConfig { + readonly otlpEndpoint?: string; + readonly sampleRate?: number; +} + +export interface AppConfig { + readonly authority: AuthorityConfig; + readonly apiBaseUrls: ApiBaseUrlConfig; + readonly telemetry?: TelemetryConfig; +} + +export const APP_CONFIG = new InjectionToken<AppConfig>('STELLAOPS_APP_CONFIG'); diff --git a/src/StellaOps.Web/src/app/core/config/app-config.service.ts b/src/Web/StellaOps.Web/src/app/core/config/app-config.service.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/config/app-config.service.ts rename to src/Web/StellaOps.Web/src/app/core/config/app-config.service.ts index 03698caa..2b941b6e 100644 --- a/src/StellaOps.Web/src/app/core/config/app-config.service.ts +++ b/src/Web/StellaOps.Web/src/app/core/config/app-config.service.ts @@ -1,99 +1,99 @@ -import { HttpClient } from '@angular/common/http'; -import { - Inject, - Injectable, - Optional, - computed, - signal, -} from '@angular/core'; -import { firstValueFrom } from 'rxjs'; - -import { - APP_CONFIG, - AppConfig, - AuthorityConfig, - DPoPAlgorithm, -} from './app-config.model'; - -const DEFAULT_CONFIG_URL = '/config.json'; -const DEFAULT_DPOP_ALG: DPoPAlgorithm = 'ES256'; -const DEFAULT_REFRESH_LEEWAY_SECONDS = 60; - -@Injectable({ - providedIn: 'root', -}) -export class AppConfigService { - private readonly configSignal = signal<AppConfig | null>(null); - private readonly authoritySignal = computed<AuthorityConfig | null>(() => { - const config = this.configSignal(); - return config?.authority ?? null; - }); - - constructor( - private readonly http: HttpClient, - @Optional() @Inject(APP_CONFIG) private readonly staticConfig: AppConfig | null - ) {} - - /** - * Loads application configuration either from the injected static value or via HTTP fetch. - * Must be called during application bootstrap (see APP_INITIALIZER wiring). - */ - async load(configUrl: string = DEFAULT_CONFIG_URL): Promise<void> { - if (this.configSignal()) { - return; - } - - const config = this.staticConfig ?? (await this.fetchConfig(configUrl)); - this.configSignal.set(this.normalizeConfig(config)); - } - - /** - * Allows tests to short-circuit configuration loading. - */ - setConfigForTesting(config: AppConfig): void { - this.configSignal.set(this.normalizeConfig(config)); - } - - get config(): AppConfig { - const current = this.configSignal(); - if (!current) { - throw new Error('App configuration has not been loaded yet.'); - } - return current; - } - - get authority(): AuthorityConfig { - const authority = this.authoritySignal(); - if (!authority) { - throw new Error('Authority configuration has not been loaded yet.'); - } - return authority; - } - - private async fetchConfig(configUrl: string): Promise<AppConfig> { - const response = await firstValueFrom( - this.http.get<AppConfig>(configUrl, { - headers: { 'Cache-Control': 'no-cache' }, - withCredentials: false, - }) - ); - return response; - } - - private normalizeConfig(config: AppConfig): AppConfig { - const authority = { - ...config.authority, - dpopAlgorithms: - config.authority.dpopAlgorithms?.length ?? 0 - ? config.authority.dpopAlgorithms - : [DEFAULT_DPOP_ALG], - refreshLeewaySeconds: - config.authority.refreshLeewaySeconds ?? DEFAULT_REFRESH_LEEWAY_SECONDS, - }; - - return { - ...config, - authority, - }; - } -} +import { HttpClient } from '@angular/common/http'; +import { + Inject, + Injectable, + Optional, + computed, + signal, +} from '@angular/core'; +import { firstValueFrom } from 'rxjs'; + +import { + APP_CONFIG, + AppConfig, + AuthorityConfig, + DPoPAlgorithm, +} from './app-config.model'; + +const DEFAULT_CONFIG_URL = '/config.json'; +const DEFAULT_DPOP_ALG: DPoPAlgorithm = 'ES256'; +const DEFAULT_REFRESH_LEEWAY_SECONDS = 60; + +@Injectable({ + providedIn: 'root', +}) +export class AppConfigService { + private readonly configSignal = signal<AppConfig | null>(null); + private readonly authoritySignal = computed<AuthorityConfig | null>(() => { + const config = this.configSignal(); + return config?.authority ?? null; + }); + + constructor( + private readonly http: HttpClient, + @Optional() @Inject(APP_CONFIG) private readonly staticConfig: AppConfig | null + ) {} + + /** + * Loads application configuration either from the injected static value or via HTTP fetch. + * Must be called during application bootstrap (see APP_INITIALIZER wiring). + */ + async load(configUrl: string = DEFAULT_CONFIG_URL): Promise<void> { + if (this.configSignal()) { + return; + } + + const config = this.staticConfig ?? (await this.fetchConfig(configUrl)); + this.configSignal.set(this.normalizeConfig(config)); + } + + /** + * Allows tests to short-circuit configuration loading. + */ + setConfigForTesting(config: AppConfig): void { + this.configSignal.set(this.normalizeConfig(config)); + } + + get config(): AppConfig { + const current = this.configSignal(); + if (!current) { + throw new Error('App configuration has not been loaded yet.'); + } + return current; + } + + get authority(): AuthorityConfig { + const authority = this.authoritySignal(); + if (!authority) { + throw new Error('Authority configuration has not been loaded yet.'); + } + return authority; + } + + private async fetchConfig(configUrl: string): Promise<AppConfig> { + const response = await firstValueFrom( + this.http.get<AppConfig>(configUrl, { + headers: { 'Cache-Control': 'no-cache' }, + withCredentials: false, + }) + ); + return response; + } + + private normalizeConfig(config: AppConfig): AppConfig { + const authority = { + ...config.authority, + dpopAlgorithms: + config.authority.dpopAlgorithms?.length ?? 0 + ? config.authority.dpopAlgorithms + : [DEFAULT_DPOP_ALG], + refreshLeewaySeconds: + config.authority.refreshLeewaySeconds ?? DEFAULT_REFRESH_LEEWAY_SECONDS, + }; + + return { + ...config, + authority, + }; + } +} diff --git a/src/StellaOps.Web/src/app/core/console/console-session.service.spec.ts b/src/Web/StellaOps.Web/src/app/core/console/console-session.service.spec.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/console/console-session.service.spec.ts rename to src/Web/StellaOps.Web/src/app/core/console/console-session.service.spec.ts index 1bc9f18a..3062357f 100644 --- a/src/StellaOps.Web/src/app/core/console/console-session.service.spec.ts +++ b/src/Web/StellaOps.Web/src/app/core/console/console-session.service.spec.ts @@ -1,139 +1,139 @@ -import { TestBed } from '@angular/core/testing'; -import { of } from 'rxjs'; - -import { - AUTHORITY_CONSOLE_API, - AuthorityConsoleApi, - TenantCatalogResponseDto, -} from '../api/authority-console.client'; -import { AuthSessionStore } from '../auth/auth-session.store'; -import { ConsoleSessionService } from './console-session.service'; -import { ConsoleSessionStore } from './console-session.store'; - -class MockConsoleApi implements AuthorityConsoleApi { - private createTenantResponse(): TenantCatalogResponseDto { - return { - tenants: [ - { - id: 'tenant-default', - displayName: 'Tenant Default', - status: 'active', - isolationMode: 'shared', - defaultRoles: ['role.console'], - }, - ], - }; - } - - listTenants() { - return of(this.createTenantResponse()); - } - - getProfile() { - return of({ - subjectId: 'user-1', - username: 'user@example.com', - displayName: 'Console User', - tenant: 'tenant-default', - sessionId: 'session-1', - roles: ['role.console'], - scopes: ['ui.read'], - audiences: ['console'], - authenticationMethods: ['pwd'], - issuedAt: '2025-10-31T12:00:00Z', - authenticationTime: '2025-10-31T12:00:00Z', - expiresAt: '2025-10-31T12:10:00Z', - freshAuth: true, - }); - } - - introspectToken() { - return of({ - active: true, - tenant: 'tenant-default', - subject: 'user-1', - clientId: 'console-web', - tokenId: 'token-1', - scopes: ['ui.read'], - audiences: ['console'], - issuedAt: '2025-10-31T12:00:00Z', - authenticationTime: '2025-10-31T12:00:00Z', - expiresAt: '2025-10-31T12:10:00Z', - freshAuth: true, - }); - } -} - -class MockAuthSessionStore { - private tenantIdValue: string | null = 'tenant-default'; - private readonly sessionValue = { - tenantId: 'tenant-default', - scopes: ['ui.read'], - audiences: ['console'], - authenticationTimeEpochMs: Date.parse('2025-10-31T12:00:00Z'), - freshAuthActive: true, - freshAuthExpiresAtEpochMs: Date.parse('2025-10-31T12:05:00Z'), - }; - - session = () => this.sessionValue as any; - - getActiveTenantId(): string | null { - return this.tenantIdValue; - } - - setTenantId(tenantId: string | null): void { - this.tenantIdValue = tenantId; - this.sessionValue.tenantId = tenantId ?? 'tenant-default'; - } -} - -describe('ConsoleSessionService', () => { - let service: ConsoleSessionService; - let store: ConsoleSessionStore; - let authStore: MockAuthSessionStore; - - beforeEach(() => { - TestBed.configureTestingModule({ - providers: [ - ConsoleSessionStore, - ConsoleSessionService, - { provide: AUTHORITY_CONSOLE_API, useClass: MockConsoleApi }, - { provide: AuthSessionStore, useClass: MockAuthSessionStore }, - ], - }); - - service = TestBed.inject(ConsoleSessionService); - store = TestBed.inject(ConsoleSessionStore); - authStore = TestBed.inject(AuthSessionStore) as unknown as MockAuthSessionStore; - }); - - it('loads console context for active tenant', async () => { - await service.loadConsoleContext(); - - expect(store.tenants().length).toBe(1); - expect(store.selectedTenantId()).toBe('tenant-default'); - expect(store.profile()?.displayName).toBe('Console User'); - expect(store.tokenInfo()?.freshAuthActive).toBeTrue(); - }); - - it('clears store when no tenant available', async () => { - authStore.setTenantId(null); - store.setTenants( - [ - { - id: 'existing', - displayName: 'Existing', - status: 'active', - isolationMode: 'shared', - defaultRoles: [], - }, - ], - 'existing' - ); - - await service.loadConsoleContext(); - - expect(store.tenants().length).toBe(0); - expect(store.selectedTenantId()).toBeNull(); - }); -}); +import { TestBed } from '@angular/core/testing'; +import { of } from 'rxjs'; + +import { + AUTHORITY_CONSOLE_API, + AuthorityConsoleApi, + TenantCatalogResponseDto, +} from '../api/authority-console.client'; +import { AuthSessionStore } from '../auth/auth-session.store'; +import { ConsoleSessionService } from './console-session.service'; +import { ConsoleSessionStore } from './console-session.store'; + +class MockConsoleApi implements AuthorityConsoleApi { + private createTenantResponse(): TenantCatalogResponseDto { + return { + tenants: [ + { + id: 'tenant-default', + displayName: 'Tenant Default', + status: 'active', + isolationMode: 'shared', + defaultRoles: ['role.console'], + }, + ], + }; + } + + listTenants() { + return of(this.createTenantResponse()); + } + + getProfile() { + return of({ + subjectId: 'user-1', + username: 'user@example.com', + displayName: 'Console User', + tenant: 'tenant-default', + sessionId: 'session-1', + roles: ['role.console'], + scopes: ['ui.read'], + audiences: ['console'], + authenticationMethods: ['pwd'], + issuedAt: '2025-10-31T12:00:00Z', + authenticationTime: '2025-10-31T12:00:00Z', + expiresAt: '2025-10-31T12:10:00Z', + freshAuth: true, + }); + } + + introspectToken() { + return of({ + active: true, + tenant: 'tenant-default', + subject: 'user-1', + clientId: 'console-web', + tokenId: 'token-1', + scopes: ['ui.read'], + audiences: ['console'], + issuedAt: '2025-10-31T12:00:00Z', + authenticationTime: '2025-10-31T12:00:00Z', + expiresAt: '2025-10-31T12:10:00Z', + freshAuth: true, + }); + } +} + +class MockAuthSessionStore { + private tenantIdValue: string | null = 'tenant-default'; + private readonly sessionValue = { + tenantId: 'tenant-default', + scopes: ['ui.read'], + audiences: ['console'], + authenticationTimeEpochMs: Date.parse('2025-10-31T12:00:00Z'), + freshAuthActive: true, + freshAuthExpiresAtEpochMs: Date.parse('2025-10-31T12:05:00Z'), + }; + + session = () => this.sessionValue as any; + + getActiveTenantId(): string | null { + return this.tenantIdValue; + } + + setTenantId(tenantId: string | null): void { + this.tenantIdValue = tenantId; + this.sessionValue.tenantId = tenantId ?? 'tenant-default'; + } +} + +describe('ConsoleSessionService', () => { + let service: ConsoleSessionService; + let store: ConsoleSessionStore; + let authStore: MockAuthSessionStore; + + beforeEach(() => { + TestBed.configureTestingModule({ + providers: [ + ConsoleSessionStore, + ConsoleSessionService, + { provide: AUTHORITY_CONSOLE_API, useClass: MockConsoleApi }, + { provide: AuthSessionStore, useClass: MockAuthSessionStore }, + ], + }); + + service = TestBed.inject(ConsoleSessionService); + store = TestBed.inject(ConsoleSessionStore); + authStore = TestBed.inject(AuthSessionStore) as unknown as MockAuthSessionStore; + }); + + it('loads console context for active tenant', async () => { + await service.loadConsoleContext(); + + expect(store.tenants().length).toBe(1); + expect(store.selectedTenantId()).toBe('tenant-default'); + expect(store.profile()?.displayName).toBe('Console User'); + expect(store.tokenInfo()?.freshAuthActive).toBeTrue(); + }); + + it('clears store when no tenant available', async () => { + authStore.setTenantId(null); + store.setTenants( + [ + { + id: 'existing', + displayName: 'Existing', + status: 'active', + isolationMode: 'shared', + defaultRoles: [], + }, + ], + 'existing' + ); + + await service.loadConsoleContext(); + + expect(store.tenants().length).toBe(0); + expect(store.selectedTenantId()).toBeNull(); + }); +}); diff --git a/src/StellaOps.Web/src/app/core/console/console-session.service.ts b/src/Web/StellaOps.Web/src/app/core/console/console-session.service.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/console/console-session.service.ts rename to src/Web/StellaOps.Web/src/app/core/console/console-session.service.ts index 54914c72..b7cfd03b 100644 --- a/src/StellaOps.Web/src/app/core/console/console-session.service.ts +++ b/src/Web/StellaOps.Web/src/app/core/console/console-session.service.ts @@ -1,161 +1,161 @@ -import { Injectable, inject } from '@angular/core'; -import { firstValueFrom } from 'rxjs'; - -import { - AUTHORITY_CONSOLE_API, - AuthorityConsoleApi, - AuthorityTenantViewDto, - ConsoleProfileDto, - ConsoleTokenIntrospectionDto, -} from '../api/authority-console.client'; -import { AuthSessionStore } from '../auth/auth-session.store'; -import { - ConsoleProfile, - ConsoleSessionStore, - ConsoleTenant, - ConsoleTokenInfo, -} from './console-session.store'; - -@Injectable({ - providedIn: 'root', -}) -export class ConsoleSessionService { - private readonly api = inject<AuthorityConsoleApi>(AUTHORITY_CONSOLE_API); - private readonly store = inject(ConsoleSessionStore); - private readonly authSession = inject(AuthSessionStore); - - async loadConsoleContext(tenantId?: string | null): Promise<void> { - const activeTenant = - (tenantId && tenantId.trim()) || this.authSession.getActiveTenantId(); - - if (!activeTenant) { - this.store.clear(); - return; - } - - this.store.setSelectedTenant(activeTenant); - this.store.setLoading(true); - this.store.setError(null); - - try { - const tenantResponse = await firstValueFrom( - this.api.listTenants(activeTenant) - ); - const tenants = (tenantResponse.tenants ?? []).map((tenant) => - this.mapTenant(tenant) - ); - - const [profileDto, tokenDto] = await Promise.all([ - firstValueFrom(this.api.getProfile(activeTenant)), - firstValueFrom(this.api.introspectToken(activeTenant)), - ]); - - const profile = this.mapProfile(profileDto); - const tokenInfo = this.mapTokenInfo(tokenDto); - - this.store.setContext({ - tenants, - profile, - token: tokenInfo, - selectedTenantId: activeTenant, - }); - } catch (error) { - console.error('Failed to load console context', error); - this.store.setError('Unable to load console context.'); - } finally { - this.store.setLoading(false); - } - } - - async switchTenant(tenantId: string): Promise<void> { - if (!tenantId || tenantId === this.store.selectedTenantId()) { - return this.loadConsoleContext(tenantId); - } - - this.store.setSelectedTenant(tenantId); - await this.loadConsoleContext(tenantId); - } - - async refresh(): Promise<void> { - await this.loadConsoleContext(this.store.selectedTenantId()); - } - - clear(): void { - this.store.clear(); - } - - private mapTenant(dto: AuthorityTenantViewDto): ConsoleTenant { - const roles = Array.isArray(dto.defaultRoles) - ? dto.defaultRoles - .map((role) => role.trim()) - .filter((role) => role.length > 0) - .sort((a, b) => a.localeCompare(b)) - : []; - - return { - id: dto.id, - displayName: dto.displayName || dto.id, - status: dto.status ?? 'active', - isolationMode: dto.isolationMode ?? 'shared', - defaultRoles: roles, - }; - } - - private mapProfile(dto: ConsoleProfileDto): ConsoleProfile { - return { - subjectId: dto.subjectId ?? null, - username: dto.username ?? null, - displayName: dto.displayName ?? dto.username ?? dto.subjectId ?? null, - tenant: dto.tenant, - sessionId: dto.sessionId ?? null, - roles: [...(dto.roles ?? [])].sort((a, b) => a.localeCompare(b)), - scopes: [...(dto.scopes ?? [])].sort((a, b) => a.localeCompare(b)), - audiences: [...(dto.audiences ?? [])].sort((a, b) => - a.localeCompare(b) - ), - authenticationMethods: [...(dto.authenticationMethods ?? [])], - issuedAt: this.parseInstant(dto.issuedAt), - authenticationTime: this.parseInstant(dto.authenticationTime), - expiresAt: this.parseInstant(dto.expiresAt), - freshAuth: !!dto.freshAuth, - }; - } - - private mapTokenInfo(dto: ConsoleTokenIntrospectionDto): ConsoleTokenInfo { - const session = this.authSession.session(); - const freshAuthExpiresAt = - session?.freshAuthExpiresAtEpochMs != null - ? new Date(session.freshAuthExpiresAtEpochMs) - : null; - - const authenticationTime = - session?.authenticationTimeEpochMs != null - ? new Date(session.authenticationTimeEpochMs) - : this.parseInstant(dto.authenticationTime); - - return { - active: !!dto.active, - tenant: dto.tenant, - subject: dto.subject ?? null, - clientId: dto.clientId ?? null, - tokenId: dto.tokenId ?? null, - scopes: [...(dto.scopes ?? [])].sort((a, b) => a.localeCompare(b)), - audiences: [...(dto.audiences ?? [])].sort((a, b) => - a.localeCompare(b) - ), - issuedAt: this.parseInstant(dto.issuedAt), - authenticationTime, - expiresAt: this.parseInstant(dto.expiresAt), - freshAuthActive: session?.freshAuthActive ?? !!dto.freshAuth, - freshAuthExpiresAt, - }; - } - - private parseInstant(value: string | null | undefined): Date | null { - if (!value) { - return null; - } - const date = new Date(value); - return Number.isNaN(date.getTime()) ? null : date; - } -} +import { Injectable, inject } from '@angular/core'; +import { firstValueFrom } from 'rxjs'; + +import { + AUTHORITY_CONSOLE_API, + AuthorityConsoleApi, + AuthorityTenantViewDto, + ConsoleProfileDto, + ConsoleTokenIntrospectionDto, +} from '../api/authority-console.client'; +import { AuthSessionStore } from '../auth/auth-session.store'; +import { + ConsoleProfile, + ConsoleSessionStore, + ConsoleTenant, + ConsoleTokenInfo, +} from './console-session.store'; + +@Injectable({ + providedIn: 'root', +}) +export class ConsoleSessionService { + private readonly api = inject<AuthorityConsoleApi>(AUTHORITY_CONSOLE_API); + private readonly store = inject(ConsoleSessionStore); + private readonly authSession = inject(AuthSessionStore); + + async loadConsoleContext(tenantId?: string | null): Promise<void> { + const activeTenant = + (tenantId && tenantId.trim()) || this.authSession.getActiveTenantId(); + + if (!activeTenant) { + this.store.clear(); + return; + } + + this.store.setSelectedTenant(activeTenant); + this.store.setLoading(true); + this.store.setError(null); + + try { + const tenantResponse = await firstValueFrom( + this.api.listTenants(activeTenant) + ); + const tenants = (tenantResponse.tenants ?? []).map((tenant) => + this.mapTenant(tenant) + ); + + const [profileDto, tokenDto] = await Promise.all([ + firstValueFrom(this.api.getProfile(activeTenant)), + firstValueFrom(this.api.introspectToken(activeTenant)), + ]); + + const profile = this.mapProfile(profileDto); + const tokenInfo = this.mapTokenInfo(tokenDto); + + this.store.setContext({ + tenants, + profile, + token: tokenInfo, + selectedTenantId: activeTenant, + }); + } catch (error) { + console.error('Failed to load console context', error); + this.store.setError('Unable to load console context.'); + } finally { + this.store.setLoading(false); + } + } + + async switchTenant(tenantId: string): Promise<void> { + if (!tenantId || tenantId === this.store.selectedTenantId()) { + return this.loadConsoleContext(tenantId); + } + + this.store.setSelectedTenant(tenantId); + await this.loadConsoleContext(tenantId); + } + + async refresh(): Promise<void> { + await this.loadConsoleContext(this.store.selectedTenantId()); + } + + clear(): void { + this.store.clear(); + } + + private mapTenant(dto: AuthorityTenantViewDto): ConsoleTenant { + const roles = Array.isArray(dto.defaultRoles) + ? dto.defaultRoles + .map((role) => role.trim()) + .filter((role) => role.length > 0) + .sort((a, b) => a.localeCompare(b)) + : []; + + return { + id: dto.id, + displayName: dto.displayName || dto.id, + status: dto.status ?? 'active', + isolationMode: dto.isolationMode ?? 'shared', + defaultRoles: roles, + }; + } + + private mapProfile(dto: ConsoleProfileDto): ConsoleProfile { + return { + subjectId: dto.subjectId ?? null, + username: dto.username ?? null, + displayName: dto.displayName ?? dto.username ?? dto.subjectId ?? null, + tenant: dto.tenant, + sessionId: dto.sessionId ?? null, + roles: [...(dto.roles ?? [])].sort((a, b) => a.localeCompare(b)), + scopes: [...(dto.scopes ?? [])].sort((a, b) => a.localeCompare(b)), + audiences: [...(dto.audiences ?? [])].sort((a, b) => + a.localeCompare(b) + ), + authenticationMethods: [...(dto.authenticationMethods ?? [])], + issuedAt: this.parseInstant(dto.issuedAt), + authenticationTime: this.parseInstant(dto.authenticationTime), + expiresAt: this.parseInstant(dto.expiresAt), + freshAuth: !!dto.freshAuth, + }; + } + + private mapTokenInfo(dto: ConsoleTokenIntrospectionDto): ConsoleTokenInfo { + const session = this.authSession.session(); + const freshAuthExpiresAt = + session?.freshAuthExpiresAtEpochMs != null + ? new Date(session.freshAuthExpiresAtEpochMs) + : null; + + const authenticationTime = + session?.authenticationTimeEpochMs != null + ? new Date(session.authenticationTimeEpochMs) + : this.parseInstant(dto.authenticationTime); + + return { + active: !!dto.active, + tenant: dto.tenant, + subject: dto.subject ?? null, + clientId: dto.clientId ?? null, + tokenId: dto.tokenId ?? null, + scopes: [...(dto.scopes ?? [])].sort((a, b) => a.localeCompare(b)), + audiences: [...(dto.audiences ?? [])].sort((a, b) => + a.localeCompare(b) + ), + issuedAt: this.parseInstant(dto.issuedAt), + authenticationTime, + expiresAt: this.parseInstant(dto.expiresAt), + freshAuthActive: session?.freshAuthActive ?? !!dto.freshAuth, + freshAuthExpiresAt, + }; + } + + private parseInstant(value: string | null | undefined): Date | null { + if (!value) { + return null; + } + const date = new Date(value); + return Number.isNaN(date.getTime()) ? null : date; + } +} diff --git a/src/StellaOps.Web/src/app/core/console/console-session.store.spec.ts b/src/Web/StellaOps.Web/src/app/core/console/console-session.store.spec.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/console/console-session.store.spec.ts rename to src/Web/StellaOps.Web/src/app/core/console/console-session.store.spec.ts index 84bfa7fe..d2474153 100644 --- a/src/StellaOps.Web/src/app/core/console/console-session.store.spec.ts +++ b/src/Web/StellaOps.Web/src/app/core/console/console-session.store.spec.ts @@ -1,123 +1,123 @@ -import { ConsoleSessionStore } from './console-session.store'; - -describe('ConsoleSessionStore', () => { - let store: ConsoleSessionStore; - - beforeEach(() => { - store = new ConsoleSessionStore(); - }); - - it('tracks tenants and selection', () => { - const tenants = [ - { - id: 'tenant-a', - displayName: 'Tenant A', - status: 'active', - isolationMode: 'shared', - defaultRoles: ['role.a'], - }, - { - id: 'tenant-b', - displayName: 'Tenant B', - status: 'active', - isolationMode: 'shared', - defaultRoles: ['role.b'], - }, - ]; - - const selected = store.setTenants(tenants, 'tenant-b'); - expect(selected).toBe('tenant-b'); - expect(store.selectedTenantId()).toBe('tenant-b'); - expect(store.tenants().length).toBe(2); - }); - - it('sets context with profile and token info', () => { - const tenants = [ - { - id: 'tenant-a', - displayName: 'Tenant A', - status: 'active', - isolationMode: 'shared', - defaultRoles: ['role.a'], - }, - ]; - - store.setContext({ - tenants, - selectedTenantId: 'tenant-a', - profile: { - subjectId: 'user-1', - username: 'user@example.com', - displayName: 'User Example', - tenant: 'tenant-a', - sessionId: 'session-123', - roles: ['role.a'], - scopes: ['scope.a'], - audiences: ['aud'], - authenticationMethods: ['pwd'], - issuedAt: new Date('2025-10-31T12:00:00Z'), - authenticationTime: new Date('2025-10-31T12:00:00Z'), - expiresAt: new Date('2025-10-31T12:10:00Z'), - freshAuth: true, - }, - token: { - active: true, - tenant: 'tenant-a', - subject: 'user-1', - clientId: 'client', - tokenId: 'token-1', - scopes: ['scope.a'], - audiences: ['aud'], - issuedAt: new Date('2025-10-31T12:00:00Z'), - authenticationTime: new Date('2025-10-31T12:00:00Z'), - expiresAt: new Date('2025-10-31T12:10:00Z'), - freshAuthActive: true, - freshAuthExpiresAt: new Date('2025-10-31T12:05:00Z'), - }, - }); - - expect(store.selectedTenantId()).toBe('tenant-a'); - expect(store.profile()?.displayName).toBe('User Example'); - expect(store.tokenInfo()?.freshAuthActive).toBeTrue(); - expect(store.hasContext()).toBeTrue(); - }); - - it('clears state', () => { - store.setTenants( - [ - { - id: 'tenant-a', - displayName: 'Tenant A', - status: 'active', - isolationMode: 'shared', - defaultRoles: [], - }, - ], - 'tenant-a' - ); - store.setProfile({ - subjectId: null, - username: null, - displayName: null, - tenant: 'tenant-a', - sessionId: null, - roles: [], - scopes: [], - audiences: [], - authenticationMethods: [], - issuedAt: null, - authenticationTime: null, - expiresAt: null, - freshAuth: false, - }); - - store.clear(); - - expect(store.tenants().length).toBe(0); - expect(store.selectedTenantId()).toBeNull(); - expect(store.profile()).toBeNull(); - expect(store.tokenInfo()).toBeNull(); - expect(store.loading()).toBeFalse(); - expect(store.error()).toBeNull(); - }); -}); +import { ConsoleSessionStore } from './console-session.store'; + +describe('ConsoleSessionStore', () => { + let store: ConsoleSessionStore; + + beforeEach(() => { + store = new ConsoleSessionStore(); + }); + + it('tracks tenants and selection', () => { + const tenants = [ + { + id: 'tenant-a', + displayName: 'Tenant A', + status: 'active', + isolationMode: 'shared', + defaultRoles: ['role.a'], + }, + { + id: 'tenant-b', + displayName: 'Tenant B', + status: 'active', + isolationMode: 'shared', + defaultRoles: ['role.b'], + }, + ]; + + const selected = store.setTenants(tenants, 'tenant-b'); + expect(selected).toBe('tenant-b'); + expect(store.selectedTenantId()).toBe('tenant-b'); + expect(store.tenants().length).toBe(2); + }); + + it('sets context with profile and token info', () => { + const tenants = [ + { + id: 'tenant-a', + displayName: 'Tenant A', + status: 'active', + isolationMode: 'shared', + defaultRoles: ['role.a'], + }, + ]; + + store.setContext({ + tenants, + selectedTenantId: 'tenant-a', + profile: { + subjectId: 'user-1', + username: 'user@example.com', + displayName: 'User Example', + tenant: 'tenant-a', + sessionId: 'session-123', + roles: ['role.a'], + scopes: ['scope.a'], + audiences: ['aud'], + authenticationMethods: ['pwd'], + issuedAt: new Date('2025-10-31T12:00:00Z'), + authenticationTime: new Date('2025-10-31T12:00:00Z'), + expiresAt: new Date('2025-10-31T12:10:00Z'), + freshAuth: true, + }, + token: { + active: true, + tenant: 'tenant-a', + subject: 'user-1', + clientId: 'client', + tokenId: 'token-1', + scopes: ['scope.a'], + audiences: ['aud'], + issuedAt: new Date('2025-10-31T12:00:00Z'), + authenticationTime: new Date('2025-10-31T12:00:00Z'), + expiresAt: new Date('2025-10-31T12:10:00Z'), + freshAuthActive: true, + freshAuthExpiresAt: new Date('2025-10-31T12:05:00Z'), + }, + }); + + expect(store.selectedTenantId()).toBe('tenant-a'); + expect(store.profile()?.displayName).toBe('User Example'); + expect(store.tokenInfo()?.freshAuthActive).toBeTrue(); + expect(store.hasContext()).toBeTrue(); + }); + + it('clears state', () => { + store.setTenants( + [ + { + id: 'tenant-a', + displayName: 'Tenant A', + status: 'active', + isolationMode: 'shared', + defaultRoles: [], + }, + ], + 'tenant-a' + ); + store.setProfile({ + subjectId: null, + username: null, + displayName: null, + tenant: 'tenant-a', + sessionId: null, + roles: [], + scopes: [], + audiences: [], + authenticationMethods: [], + issuedAt: null, + authenticationTime: null, + expiresAt: null, + freshAuth: false, + }); + + store.clear(); + + expect(store.tenants().length).toBe(0); + expect(store.selectedTenantId()).toBeNull(); + expect(store.profile()).toBeNull(); + expect(store.tokenInfo()).toBeNull(); + expect(store.loading()).toBeFalse(); + expect(store.error()).toBeNull(); + }); +}); diff --git a/src/StellaOps.Web/src/app/core/console/console-session.store.ts b/src/Web/StellaOps.Web/src/app/core/console/console-session.store.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/console/console-session.store.ts rename to src/Web/StellaOps.Web/src/app/core/console/console-session.store.ts index 15bf874c..50e3ff55 100644 --- a/src/StellaOps.Web/src/app/core/console/console-session.store.ts +++ b/src/Web/StellaOps.Web/src/app/core/console/console-session.store.ts @@ -1,128 +1,128 @@ -import { Injectable, computed, signal } from '@angular/core'; - -export interface ConsoleTenant { - readonly id: string; - readonly displayName: string; - readonly status: string; - readonly isolationMode: string; - readonly defaultRoles: readonly string[]; -} - -export interface ConsoleProfile { - readonly subjectId: string | null; - readonly username: string | null; - readonly displayName: string | null; - readonly tenant: string; - readonly sessionId: string | null; - readonly roles: readonly string[]; - readonly scopes: readonly string[]; - readonly audiences: readonly string[]; - readonly authenticationMethods: readonly string[]; - readonly issuedAt: Date | null; - readonly authenticationTime: Date | null; - readonly expiresAt: Date | null; - readonly freshAuth: boolean; -} - -export interface ConsoleTokenInfo { - readonly active: boolean; - readonly tenant: string; - readonly subject: string | null; - readonly clientId: string | null; - readonly tokenId: string | null; - readonly scopes: readonly string[]; - readonly audiences: readonly string[]; - readonly issuedAt: Date | null; - readonly authenticationTime: Date | null; - readonly expiresAt: Date | null; - readonly freshAuthActive: boolean; - readonly freshAuthExpiresAt: Date | null; -} - -@Injectable({ - providedIn: 'root', -}) -export class ConsoleSessionStore { - private readonly tenantsSignal = signal<ConsoleTenant[]>([]); - private readonly selectedTenantIdSignal = signal<string | null>(null); - private readonly profileSignal = signal<ConsoleProfile | null>(null); - private readonly tokenSignal = signal<ConsoleTokenInfo | null>(null); - private readonly loadingSignal = signal(false); - private readonly errorSignal = signal<string | null>(null); - - readonly tenants = computed(() => this.tenantsSignal()); - readonly selectedTenantId = computed(() => this.selectedTenantIdSignal()); - readonly profile = computed(() => this.profileSignal()); - readonly tokenInfo = computed(() => this.tokenSignal()); - readonly loading = computed(() => this.loadingSignal()); - readonly error = computed(() => this.errorSignal()); - readonly hasContext = computed( - () => - this.tenantsSignal().length > 0 || - this.profileSignal() !== null || - this.tokenSignal() !== null - ); - - setLoading(loading: boolean): void { - this.loadingSignal.set(loading); - } - - setError(message: string | null): void { - this.errorSignal.set(message); - } - - setContext(context: { - tenants: ConsoleTenant[]; - profile: ConsoleProfile | null; - token: ConsoleTokenInfo | null; - selectedTenantId?: string | null; - }): void { - const selected = this.setTenants(context.tenants, context.selectedTenantId); - this.profileSignal.set(context.profile); - this.tokenSignal.set(context.token); - this.selectedTenantIdSignal.set(selected); - } - - setProfile(profile: ConsoleProfile | null): void { - this.profileSignal.set(profile); - } - - setTokenInfo(token: ConsoleTokenInfo | null): void { - this.tokenSignal.set(token); - } - - setTenants( - tenants: ConsoleTenant[], - preferredTenantId?: string | null - ): string | null { - this.tenantsSignal.set(tenants); - const currentSelection = this.selectedTenantIdSignal(); - const fallbackSelection = - tenants.length > 0 ? tenants[0].id : null; - - const nextSelection = - (preferredTenantId && - tenants.some((tenant) => tenant.id === preferredTenantId) && - preferredTenantId) || - (currentSelection && - tenants.some((tenant) => tenant.id === currentSelection) && - currentSelection) || - fallbackSelection; - - this.selectedTenantIdSignal.set(nextSelection); - return nextSelection; - } - - setSelectedTenant(tenantId: string | null): void { - this.selectedTenantIdSignal.set(tenantId); - } - - clear(): void { - this.tenantsSignal.set([]); - this.selectedTenantIdSignal.set(null); - this.profileSignal.set(null); - this.tokenSignal.set(null); - this.loadingSignal.set(false); - this.errorSignal.set(null); - } -} +import { Injectable, computed, signal } from '@angular/core'; + +export interface ConsoleTenant { + readonly id: string; + readonly displayName: string; + readonly status: string; + readonly isolationMode: string; + readonly defaultRoles: readonly string[]; +} + +export interface ConsoleProfile { + readonly subjectId: string | null; + readonly username: string | null; + readonly displayName: string | null; + readonly tenant: string; + readonly sessionId: string | null; + readonly roles: readonly string[]; + readonly scopes: readonly string[]; + readonly audiences: readonly string[]; + readonly authenticationMethods: readonly string[]; + readonly issuedAt: Date | null; + readonly authenticationTime: Date | null; + readonly expiresAt: Date | null; + readonly freshAuth: boolean; +} + +export interface ConsoleTokenInfo { + readonly active: boolean; + readonly tenant: string; + readonly subject: string | null; + readonly clientId: string | null; + readonly tokenId: string | null; + readonly scopes: readonly string[]; + readonly audiences: readonly string[]; + readonly issuedAt: Date | null; + readonly authenticationTime: Date | null; + readonly expiresAt: Date | null; + readonly freshAuthActive: boolean; + readonly freshAuthExpiresAt: Date | null; +} + +@Injectable({ + providedIn: 'root', +}) +export class ConsoleSessionStore { + private readonly tenantsSignal = signal<ConsoleTenant[]>([]); + private readonly selectedTenantIdSignal = signal<string | null>(null); + private readonly profileSignal = signal<ConsoleProfile | null>(null); + private readonly tokenSignal = signal<ConsoleTokenInfo | null>(null); + private readonly loadingSignal = signal(false); + private readonly errorSignal = signal<string | null>(null); + + readonly tenants = computed(() => this.tenantsSignal()); + readonly selectedTenantId = computed(() => this.selectedTenantIdSignal()); + readonly profile = computed(() => this.profileSignal()); + readonly tokenInfo = computed(() => this.tokenSignal()); + readonly loading = computed(() => this.loadingSignal()); + readonly error = computed(() => this.errorSignal()); + readonly hasContext = computed( + () => + this.tenantsSignal().length > 0 || + this.profileSignal() !== null || + this.tokenSignal() !== null + ); + + setLoading(loading: boolean): void { + this.loadingSignal.set(loading); + } + + setError(message: string | null): void { + this.errorSignal.set(message); + } + + setContext(context: { + tenants: ConsoleTenant[]; + profile: ConsoleProfile | null; + token: ConsoleTokenInfo | null; + selectedTenantId?: string | null; + }): void { + const selected = this.setTenants(context.tenants, context.selectedTenantId); + this.profileSignal.set(context.profile); + this.tokenSignal.set(context.token); + this.selectedTenantIdSignal.set(selected); + } + + setProfile(profile: ConsoleProfile | null): void { + this.profileSignal.set(profile); + } + + setTokenInfo(token: ConsoleTokenInfo | null): void { + this.tokenSignal.set(token); + } + + setTenants( + tenants: ConsoleTenant[], + preferredTenantId?: string | null + ): string | null { + this.tenantsSignal.set(tenants); + const currentSelection = this.selectedTenantIdSignal(); + const fallbackSelection = + tenants.length > 0 ? tenants[0].id : null; + + const nextSelection = + (preferredTenantId && + tenants.some((tenant) => tenant.id === preferredTenantId) && + preferredTenantId) || + (currentSelection && + tenants.some((tenant) => tenant.id === currentSelection) && + currentSelection) || + fallbackSelection; + + this.selectedTenantIdSignal.set(nextSelection); + return nextSelection; + } + + setSelectedTenant(tenantId: string | null): void { + this.selectedTenantIdSignal.set(tenantId); + } + + clear(): void { + this.tenantsSignal.set([]); + this.selectedTenantIdSignal.set(null); + this.profileSignal.set(null); + this.tokenSignal.set(null); + this.loadingSignal.set(false); + this.errorSignal.set(null); + } +} diff --git a/src/StellaOps.Web/src/app/core/orchestrator/operator-context.service.ts b/src/Web/StellaOps.Web/src/app/core/orchestrator/operator-context.service.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/orchestrator/operator-context.service.ts rename to src/Web/StellaOps.Web/src/app/core/orchestrator/operator-context.service.ts index 6af33c9d..ad0e1ae0 100644 --- a/src/StellaOps.Web/src/app/core/orchestrator/operator-context.service.ts +++ b/src/Web/StellaOps.Web/src/app/core/orchestrator/operator-context.service.ts @@ -1,35 +1,35 @@ -import { Injectable, signal } from '@angular/core'; - -export interface OperatorContext { - readonly reason: string; - readonly ticket: string; -} - -@Injectable({ - providedIn: 'root', -}) -export class OperatorContextService { - private readonly contextSignal = signal<OperatorContext | null>(null); - - readonly context = this.contextSignal.asReadonly(); - - setContext(reason: string, ticket: string): void { - const normalizedReason = reason.trim(); - const normalizedTicket = ticket.trim(); - if (!normalizedReason || !normalizedTicket) { - throw new Error( - 'operator_reason and operator_ticket must be provided for orchestrator control actions.' - ); - } - - this.contextSignal.set({ reason: normalizedReason, ticket: normalizedTicket }); - } - - clear(): void { - this.contextSignal.set(null); - } - - snapshot(): OperatorContext | null { - return this.contextSignal(); - } -} +import { Injectable, signal } from '@angular/core'; + +export interface OperatorContext { + readonly reason: string; + readonly ticket: string; +} + +@Injectable({ + providedIn: 'root', +}) +export class OperatorContextService { + private readonly contextSignal = signal<OperatorContext | null>(null); + + readonly context = this.contextSignal.asReadonly(); + + setContext(reason: string, ticket: string): void { + const normalizedReason = reason.trim(); + const normalizedTicket = ticket.trim(); + if (!normalizedReason || !normalizedTicket) { + throw new Error( + 'operator_reason and operator_ticket must be provided for orchestrator control actions.' + ); + } + + this.contextSignal.set({ reason: normalizedReason, ticket: normalizedTicket }); + } + + clear(): void { + this.contextSignal.set(null); + } + + snapshot(): OperatorContext | null { + return this.contextSignal(); + } +} diff --git a/src/StellaOps.Web/src/app/core/orchestrator/operator-metadata.interceptor.ts b/src/Web/StellaOps.Web/src/app/core/orchestrator/operator-metadata.interceptor.ts similarity index 96% rename from src/StellaOps.Web/src/app/core/orchestrator/operator-metadata.interceptor.ts rename to src/Web/StellaOps.Web/src/app/core/orchestrator/operator-metadata.interceptor.ts index 288e4d0d..5c805bb1 100644 --- a/src/StellaOps.Web/src/app/core/orchestrator/operator-metadata.interceptor.ts +++ b/src/Web/StellaOps.Web/src/app/core/orchestrator/operator-metadata.interceptor.ts @@ -1,41 +1,41 @@ -import { - HttpEvent, - HttpHandler, - HttpInterceptor, - HttpRequest, -} from '@angular/common/http'; -import { Injectable } from '@angular/core'; -import { Observable } from 'rxjs'; - -import { OperatorContextService } from './operator-context.service'; - -export const OPERATOR_METADATA_SENTINEL_HEADER = 'X-Stella-Require-Operator'; -export const OPERATOR_REASON_HEADER = 'X-Stella-Operator-Reason'; -export const OPERATOR_TICKET_HEADER = 'X-Stella-Operator-Ticket'; - -@Injectable() -export class OperatorMetadataInterceptor implements HttpInterceptor { - constructor(private readonly context: OperatorContextService) {} - - intercept( - request: HttpRequest<unknown>, - next: HttpHandler - ): Observable<HttpEvent<unknown>> { - if (!request.headers.has(OPERATOR_METADATA_SENTINEL_HEADER)) { - return next.handle(request); - } - - const current = this.context.snapshot(); - const headers = request.headers.delete(OPERATOR_METADATA_SENTINEL_HEADER); - - if (!current) { - return next.handle(request.clone({ headers })); - } - - const enriched = headers - .set(OPERATOR_REASON_HEADER, current.reason) - .set(OPERATOR_TICKET_HEADER, current.ticket); - - return next.handle(request.clone({ headers: enriched })); - } -} +import { + HttpEvent, + HttpHandler, + HttpInterceptor, + HttpRequest, +} from '@angular/common/http'; +import { Injectable } from '@angular/core'; +import { Observable } from 'rxjs'; + +import { OperatorContextService } from './operator-context.service'; + +export const OPERATOR_METADATA_SENTINEL_HEADER = 'X-Stella-Require-Operator'; +export const OPERATOR_REASON_HEADER = 'X-Stella-Operator-Reason'; +export const OPERATOR_TICKET_HEADER = 'X-Stella-Operator-Ticket'; + +@Injectable() +export class OperatorMetadataInterceptor implements HttpInterceptor { + constructor(private readonly context: OperatorContextService) {} + + intercept( + request: HttpRequest<unknown>, + next: HttpHandler + ): Observable<HttpEvent<unknown>> { + if (!request.headers.has(OPERATOR_METADATA_SENTINEL_HEADER)) { + return next.handle(request); + } + + const current = this.context.snapshot(); + const headers = request.headers.delete(OPERATOR_METADATA_SENTINEL_HEADER); + + if (!current) { + return next.handle(request.clone({ headers })); + } + + const enriched = headers + .set(OPERATOR_REASON_HEADER, current.reason) + .set(OPERATOR_TICKET_HEADER, current.ticket); + + return next.handle(request.clone({ headers: enriched })); + } +} diff --git a/src/StellaOps.Web/src/app/features/auth/auth-callback.component.ts b/src/Web/StellaOps.Web/src/app/features/auth/auth-callback.component.ts similarity index 96% rename from src/StellaOps.Web/src/app/features/auth/auth-callback.component.ts rename to src/Web/StellaOps.Web/src/app/features/auth/auth-callback.component.ts index 65d920a4..993ccf5c 100644 --- a/src/StellaOps.Web/src/app/features/auth/auth-callback.component.ts +++ b/src/Web/StellaOps.Web/src/app/features/auth/auth-callback.component.ts @@ -1,61 +1,61 @@ -import { CommonModule } from '@angular/common'; -import { Component, OnInit, inject, signal } from '@angular/core'; -import { ActivatedRoute, Router } from '@angular/router'; - -import { AuthorityAuthService } from '../../core/auth/authority-auth.service'; - -@Component({ - selector: 'app-auth-callback', - standalone: true, - imports: [CommonModule], - template: ` - <section class="auth-callback"> - <p *ngIf="state() === 'processing'">Completing sign-in…</p> - <p *ngIf="state() === 'error'" class="error"> - We were unable to complete the sign-in flow. Please try again. - </p> - </section> - `, - styles: [ - ` - .auth-callback { - margin: 4rem auto; - max-width: 420px; - text-align: center; - font-size: 1rem; - color: #0f172a; - } - - .error { - color: #dc2626; - font-weight: 500; - } - `, - ], -}) -export class AuthCallbackComponent implements OnInit { - private readonly route = inject(ActivatedRoute); - private readonly router = inject(Router); - private readonly auth = inject(AuthorityAuthService); - - readonly state = signal<'processing' | 'error'>('processing'); - - async ngOnInit(): Promise<void> { - const params = this.route.snapshot.queryParamMap; - const searchParams = new URLSearchParams(); - params.keys.forEach((key) => { - const value = params.get(key); - if (value != null) { - searchParams.set(key, value); - } - }); - - try { - const result = await this.auth.completeLoginFromRedirect(searchParams); - const returnUrl = result.returnUrl ?? '/'; - await this.router.navigateByUrl(returnUrl, { replaceUrl: true }); - } catch { - this.state.set('error'); - } - } -} +import { CommonModule } from '@angular/common'; +import { Component, OnInit, inject, signal } from '@angular/core'; +import { ActivatedRoute, Router } from '@angular/router'; + +import { AuthorityAuthService } from '../../core/auth/authority-auth.service'; + +@Component({ + selector: 'app-auth-callback', + standalone: true, + imports: [CommonModule], + template: ` + <section class="auth-callback"> + <p *ngIf="state() === 'processing'">Completing sign-in…</p> + <p *ngIf="state() === 'error'" class="error"> + We were unable to complete the sign-in flow. Please try again. + </p> + </section> + `, + styles: [ + ` + .auth-callback { + margin: 4rem auto; + max-width: 420px; + text-align: center; + font-size: 1rem; + color: #0f172a; + } + + .error { + color: #dc2626; + font-weight: 500; + } + `, + ], +}) +export class AuthCallbackComponent implements OnInit { + private readonly route = inject(ActivatedRoute); + private readonly router = inject(Router); + private readonly auth = inject(AuthorityAuthService); + + readonly state = signal<'processing' | 'error'>('processing'); + + async ngOnInit(): Promise<void> { + const params = this.route.snapshot.queryParamMap; + const searchParams = new URLSearchParams(); + params.keys.forEach((key) => { + const value = params.get(key); + if (value != null) { + searchParams.set(key, value); + } + }); + + try { + const result = await this.auth.completeLoginFromRedirect(searchParams); + const returnUrl = result.returnUrl ?? '/'; + await this.router.navigateByUrl(returnUrl, { replaceUrl: true }); + } catch { + this.state.set('error'); + } + } +} diff --git a/src/StellaOps.Web/src/app/features/console/console-profile.component.html b/src/Web/StellaOps.Web/src/app/features/console/console-profile.component.html similarity index 96% rename from src/StellaOps.Web/src/app/features/console/console-profile.component.html rename to src/Web/StellaOps.Web/src/app/features/console/console-profile.component.html index 94ca2dde..595c57c9 100644 --- a/src/StellaOps.Web/src/app/features/console/console-profile.component.html +++ b/src/Web/StellaOps.Web/src/app/features/console/console-profile.component.html @@ -1,208 +1,208 @@ -<section class="console-profile"> - <header class="console-profile__header"> - <div> - <h1>Console Session</h1> - <p class="console-profile__subtitle"> - Session details sourced from Authority console endpoints. - </p> - </div> - <button - type="button" - (click)="refresh()" - [disabled]="loading()" - aria-busy="{{ loading() }}" - > - Refresh - </button> - </header> - - <div class="console-profile__error" *ngIf="error() as message"> - {{ message }} - </div> - - <div class="console-profile__loading" *ngIf="loading()"> - Loading console context… - </div> - - <ng-container *ngIf="!loading()"> - <section class="console-profile__card" *ngIf="profile() as profile"> - <header> - <h2>User Profile</h2> - <span class="tenant-chip"> - Tenant - <strong>{{ profile.tenant }}</strong> - </span> - </header> - - <dl> - <div> - <dt>Display name</dt> - <dd>{{ profile.displayName || 'n/a' }}</dd> - </div> - <div> - <dt>Username</dt> - <dd>{{ profile.username || 'n/a' }}</dd> - </div> - <div> - <dt>Subject</dt> - <dd>{{ profile.subjectId || 'n/a' }}</dd> - </div> - <div> - <dt>Session ID</dt> - <dd>{{ profile.sessionId || 'n/a' }}</dd> - </div> - <div> - <dt>Roles</dt> - <dd> - <span *ngIf="profile.roles.length; else noRoles"> - {{ profile.roles.join(', ') }} - </span> - <ng-template #noRoles>n/a</ng-template> - </dd> - </div> - <div> - <dt>Scopes</dt> - <dd> - <span *ngIf="profile.scopes.length; else noScopes"> - {{ profile.scopes.join(', ') }} - </span> - <ng-template #noScopes>n/a</ng-template> - </dd> - </div> - <div> - <dt>Audiences</dt> - <dd> - <span *ngIf="profile.audiences.length; else noAudiences"> - {{ profile.audiences.join(', ') }} - </span> - <ng-template #noAudiences>n/a</ng-template> - </dd> - </div> - <div> - <dt>Authentication methods</dt> - <dd> - <span - *ngIf="profile.authenticationMethods.length; else noAuthMethods" - > - {{ profile.authenticationMethods.join(', ') }} - </span> - <ng-template #noAuthMethods>n/a</ng-template> - </dd> - </div> - <div> - <dt>Issued at</dt> - <dd> - {{ profile.issuedAt ? (profile.issuedAt | date : 'medium') : 'n/a' }} - </dd> - </div> - <div> - <dt>Authentication time</dt> - <dd> - {{ - profile.authenticationTime - ? (profile.authenticationTime | date : 'medium') - : 'n/a' - }} - </dd> - </div> - <div> - <dt>Expires at</dt> - <dd> - {{ profile.expiresAt ? (profile.expiresAt | date : 'medium') : 'n/a' }} - </dd> - </div> - </dl> - </section> - - <section class="console-profile__card" *ngIf="tokenInfo() as token"> - <header> - <h2>Access Token</h2> - <span - class="chip" - [class.chip--active]="token.active" - [class.chip--inactive]="!token.active" - > - {{ token.active ? 'Active' : 'Inactive' }} - </span> - </header> - - <dl> - <div> - <dt>Token ID</dt> - <dd>{{ token.tokenId || 'n/a' }}</dd> - </div> - <div> - <dt>Client ID</dt> - <dd>{{ token.clientId || 'n/a' }}</dd> - </div> - <div> - <dt>Issued at</dt> - <dd> - {{ token.issuedAt ? (token.issuedAt | date : 'medium') : 'n/a' }} - </dd> - </div> - <div> - <dt>Authentication time</dt> - <dd> - {{ - token.authenticationTime - ? (token.authenticationTime | date : 'medium') - : 'n/a' - }} - </dd> - </div> - <div> - <dt>Expires at</dt> - <dd> - {{ token.expiresAt ? (token.expiresAt | date : 'medium') : 'n/a' }} - </dd> - </div> - </dl> - - <div - class="fresh-auth" - *ngIf="freshAuthState() as fresh" - [class.fresh-auth--active]="fresh.active" - [class.fresh-auth--stale]="!fresh.active" - > - Fresh auth: - <strong>{{ fresh.active ? 'Active' : 'Stale' }}</strong> - <ng-container *ngIf="fresh.expiresAt"> - (expires {{ fresh.expiresAt | date : 'mediumTime' }}) - </ng-container> - </div> - </section> - - <section class="console-profile__card" *ngIf="tenantCount() > 0"> - <header> - <h2>Accessible Tenants</h2> - <span class="tenant-count">{{ tenantCount() }} total</span> - </header> - - <ul class="tenant-list"> - <li - *ngFor="let tenant of tenants()" - [class.tenant-list__item--active]="tenant.id === selectedTenantId()" - > - <button type="button" (click)="selectTenant(tenant.id)"> - <div class="tenant-list__heading"> - <span class="tenant-name">{{ tenant.displayName }}</span> - <span class="tenant-status">{{ tenant.status }}</span> - </div> - <div class="tenant-meta"> - Isolation: {{ tenant.isolationMode }} · Default roles: - <span *ngIf="tenant.defaultRoles.length; else noTenantRoles"> - {{ tenant.defaultRoles.join(', ') }} - </span> - <ng-template #noTenantRoles>n/a</ng-template> - </div> - </button> - </li> - </ul> - </section> - - <p class="console-profile__empty" *ngIf="!hasProfile() && tenantCount() === 0"> - No console session data available for the current identity. - </p> - </ng-container> -</section> +<section class="console-profile"> + <header class="console-profile__header"> + <div> + <h1>Console Session</h1> + <p class="console-profile__subtitle"> + Session details sourced from Authority console endpoints. + </p> + </div> + <button + type="button" + (click)="refresh()" + [disabled]="loading()" + aria-busy="{{ loading() }}" + > + Refresh + </button> + </header> + + <div class="console-profile__error" *ngIf="error() as message"> + {{ message }} + </div> + + <div class="console-profile__loading" *ngIf="loading()"> + Loading console context… + </div> + + <ng-container *ngIf="!loading()"> + <section class="console-profile__card" *ngIf="profile() as profile"> + <header> + <h2>User Profile</h2> + <span class="tenant-chip"> + Tenant + <strong>{{ profile.tenant }}</strong> + </span> + </header> + + <dl> + <div> + <dt>Display name</dt> + <dd>{{ profile.displayName || 'n/a' }}</dd> + </div> + <div> + <dt>Username</dt> + <dd>{{ profile.username || 'n/a' }}</dd> + </div> + <div> + <dt>Subject</dt> + <dd>{{ profile.subjectId || 'n/a' }}</dd> + </div> + <div> + <dt>Session ID</dt> + <dd>{{ profile.sessionId || 'n/a' }}</dd> + </div> + <div> + <dt>Roles</dt> + <dd> + <span *ngIf="profile.roles.length; else noRoles"> + {{ profile.roles.join(', ') }} + </span> + <ng-template #noRoles>n/a</ng-template> + </dd> + </div> + <div> + <dt>Scopes</dt> + <dd> + <span *ngIf="profile.scopes.length; else noScopes"> + {{ profile.scopes.join(', ') }} + </span> + <ng-template #noScopes>n/a</ng-template> + </dd> + </div> + <div> + <dt>Audiences</dt> + <dd> + <span *ngIf="profile.audiences.length; else noAudiences"> + {{ profile.audiences.join(', ') }} + </span> + <ng-template #noAudiences>n/a</ng-template> + </dd> + </div> + <div> + <dt>Authentication methods</dt> + <dd> + <span + *ngIf="profile.authenticationMethods.length; else noAuthMethods" + > + {{ profile.authenticationMethods.join(', ') }} + </span> + <ng-template #noAuthMethods>n/a</ng-template> + </dd> + </div> + <div> + <dt>Issued at</dt> + <dd> + {{ profile.issuedAt ? (profile.issuedAt | date : 'medium') : 'n/a' }} + </dd> + </div> + <div> + <dt>Authentication time</dt> + <dd> + {{ + profile.authenticationTime + ? (profile.authenticationTime | date : 'medium') + : 'n/a' + }} + </dd> + </div> + <div> + <dt>Expires at</dt> + <dd> + {{ profile.expiresAt ? (profile.expiresAt | date : 'medium') : 'n/a' }} + </dd> + </div> + </dl> + </section> + + <section class="console-profile__card" *ngIf="tokenInfo() as token"> + <header> + <h2>Access Token</h2> + <span + class="chip" + [class.chip--active]="token.active" + [class.chip--inactive]="!token.active" + > + {{ token.active ? 'Active' : 'Inactive' }} + </span> + </header> + + <dl> + <div> + <dt>Token ID</dt> + <dd>{{ token.tokenId || 'n/a' }}</dd> + </div> + <div> + <dt>Client ID</dt> + <dd>{{ token.clientId || 'n/a' }}</dd> + </div> + <div> + <dt>Issued at</dt> + <dd> + {{ token.issuedAt ? (token.issuedAt | date : 'medium') : 'n/a' }} + </dd> + </div> + <div> + <dt>Authentication time</dt> + <dd> + {{ + token.authenticationTime + ? (token.authenticationTime | date : 'medium') + : 'n/a' + }} + </dd> + </div> + <div> + <dt>Expires at</dt> + <dd> + {{ token.expiresAt ? (token.expiresAt | date : 'medium') : 'n/a' }} + </dd> + </div> + </dl> + + <div + class="fresh-auth" + *ngIf="freshAuthState() as fresh" + [class.fresh-auth--active]="fresh.active" + [class.fresh-auth--stale]="!fresh.active" + > + Fresh auth: + <strong>{{ fresh.active ? 'Active' : 'Stale' }}</strong> + <ng-container *ngIf="fresh.expiresAt"> + (expires {{ fresh.expiresAt | date : 'mediumTime' }}) + </ng-container> + </div> + </section> + + <section class="console-profile__card" *ngIf="tenantCount() > 0"> + <header> + <h2>Accessible Tenants</h2> + <span class="tenant-count">{{ tenantCount() }} total</span> + </header> + + <ul class="tenant-list"> + <li + *ngFor="let tenant of tenants()" + [class.tenant-list__item--active]="tenant.id === selectedTenantId()" + > + <button type="button" (click)="selectTenant(tenant.id)"> + <div class="tenant-list__heading"> + <span class="tenant-name">{{ tenant.displayName }}</span> + <span class="tenant-status">{{ tenant.status }}</span> + </div> + <div class="tenant-meta"> + Isolation: {{ tenant.isolationMode }} · Default roles: + <span *ngIf="tenant.defaultRoles.length; else noTenantRoles"> + {{ tenant.defaultRoles.join(', ') }} + </span> + <ng-template #noTenantRoles>n/a</ng-template> + </div> + </button> + </li> + </ul> + </section> + + <p class="console-profile__empty" *ngIf="!hasProfile() && tenantCount() === 0"> + No console session data available for the current identity. + </p> + </ng-container> +</section> diff --git a/src/StellaOps.Web/src/app/features/console/console-profile.component.scss b/src/Web/StellaOps.Web/src/app/features/console/console-profile.component.scss similarity index 94% rename from src/StellaOps.Web/src/app/features/console/console-profile.component.scss rename to src/Web/StellaOps.Web/src/app/features/console/console-profile.component.scss index 97d1efda..a18a9b89 100644 --- a/src/StellaOps.Web/src/app/features/console/console-profile.component.scss +++ b/src/Web/StellaOps.Web/src/app/features/console/console-profile.component.scss @@ -1,220 +1,220 @@ -.console-profile { - display: flex; - flex-direction: column; - gap: 1.5rem; -} - -.console-profile__header { - display: flex; - justify-content: space-between; - align-items: flex-start; - gap: 1rem; - - h1 { - margin: 0; - font-size: 1.75rem; - font-weight: 600; - color: #0f172a; - } - - .console-profile__subtitle { - margin: 0.25rem 0 0; - color: #475569; - font-size: 0.95rem; - } - - button { - appearance: none; - border: none; - border-radius: 0.75rem; - padding: 0.5rem 1.2rem; - font-weight: 600; - cursor: pointer; - background: linear-gradient(90deg, #4f46e5 0%, #9333ea 100%); - color: #f8fafc; - transition: transform 0.2s ease, box-shadow 0.2s ease; - - &:hover, - &:focus-visible { - transform: translateY(-1px); - box-shadow: 0 6px 18px rgba(79, 70, 229, 0.28); - } - - &:disabled { - cursor: progress; - opacity: 0.75; - transform: none; - box-shadow: none; - } - } -} - -.console-profile__loading { - padding: 1rem 1.25rem; - border-radius: 0.75rem; - background: rgba(79, 70, 229, 0.08); - color: #4338ca; - font-weight: 500; -} - -.console-profile__error { - padding: 1rem 1.25rem; - border-radius: 0.75rem; - background: rgba(220, 38, 38, 0.08); - color: #b91c1c; - border: 1px solid rgba(220, 38, 38, 0.24); -} - -.console-profile__card { - background: linear-gradient(150deg, #ffffff 0%, #f8f9ff 100%); - border-radius: 1rem; - padding: 1.5rem; - box-shadow: 0 12px 32px rgba(15, 23, 42, 0.08); - border: 1px solid rgba(79, 70, 229, 0.08); - - header { - display: flex; - justify-content: space-between; - align-items: center; - margin-bottom: 1.25rem; - - h2 { - margin: 0; - font-size: 1.2rem; - font-weight: 600; - color: #1e293b; - } - } - - dl { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); - gap: 1rem 1.5rem; - margin: 0; - - dt { - margin: 0 0 0.25rem; - font-size: 0.8rem; - letter-spacing: 0.04em; - text-transform: uppercase; - color: #64748b; - } - - dd { - margin: 0; - font-size: 0.95rem; - color: #0f172a; - word-break: break-word; - } - } -} - -.chip, -.tenant-chip { - display: inline-flex; - align-items: center; - gap: 0.35rem; - padding: 0.25rem 0.75rem; - border-radius: 9999px; - font-size: 0.8rem; - font-weight: 600; - text-transform: uppercase; - letter-spacing: 0.04em; - background-color: rgba(15, 23, 42, 0.08); - color: #0f172a; -} - -.chip--active { - background-color: rgba(22, 163, 74, 0.12); - color: #15803d; -} - -.chip--inactive { - background-color: rgba(220, 38, 38, 0.12); - color: #b91c1c; -} - -.tenant-chip { - background-color: rgba(79, 70, 229, 0.12); - color: #4338ca; -} - -.tenant-count { - font-size: 0.85rem; - font-weight: 500; - color: #475569; -} - -.tenant-list { - list-style: none; - padding: 0; - margin: 0; - display: grid; - gap: 0.75rem; -} - -.tenant-list__item--active button { - border-color: rgba(79, 70, 229, 0.45); - background-color: rgba(79, 70, 229, 0.08); -} - -.tenant-list button { - width: 100%; - display: flex; - flex-direction: column; - align-items: flex-start; - gap: 0.35rem; - padding: 0.75rem 1rem; - border-radius: 0.75rem; - border: 1px solid rgba(100, 116, 139, 0.18); - background: #ffffff; - text-align: left; - cursor: pointer; - transition: border-color 0.2s ease, box-shadow 0.2s ease, transform 0.2s ease; - - &:hover, - &:focus-visible { - border-color: rgba(79, 70, 229, 0.45); - box-shadow: 0 8px 20px rgba(79, 70, 229, 0.16); - transform: translateY(-1px); - } -} - -.tenant-list__heading { - display: flex; - justify-content: space-between; - width: 100%; - font-weight: 600; - color: #1e293b; -} - -.tenant-meta { - font-size: 0.85rem; - color: #475569; -} - -.fresh-auth { - margin-top: 1.25rem; - padding: 0.6rem 0.9rem; - border-radius: 0.75rem; - font-size: 0.9rem; - display: inline-flex; - align-items: center; - gap: 0.5rem; -} - -.fresh-auth--active { - background-color: rgba(16, 185, 129, 0.1); - color: #047857; -} - -.fresh-auth--stale { - background-color: rgba(249, 115, 22, 0.12); - color: #c2410c; -} - -.console-profile__empty { - margin: 0; - font-size: 0.95rem; - color: #475569; -} +.console-profile { + display: flex; + flex-direction: column; + gap: 1.5rem; +} + +.console-profile__header { + display: flex; + justify-content: space-between; + align-items: flex-start; + gap: 1rem; + + h1 { + margin: 0; + font-size: 1.75rem; + font-weight: 600; + color: #0f172a; + } + + .console-profile__subtitle { + margin: 0.25rem 0 0; + color: #475569; + font-size: 0.95rem; + } + + button { + appearance: none; + border: none; + border-radius: 0.75rem; + padding: 0.5rem 1.2rem; + font-weight: 600; + cursor: pointer; + background: linear-gradient(90deg, #4f46e5 0%, #9333ea 100%); + color: #f8fafc; + transition: transform 0.2s ease, box-shadow 0.2s ease; + + &:hover, + &:focus-visible { + transform: translateY(-1px); + box-shadow: 0 6px 18px rgba(79, 70, 229, 0.28); + } + + &:disabled { + cursor: progress; + opacity: 0.75; + transform: none; + box-shadow: none; + } + } +} + +.console-profile__loading { + padding: 1rem 1.25rem; + border-radius: 0.75rem; + background: rgba(79, 70, 229, 0.08); + color: #4338ca; + font-weight: 500; +} + +.console-profile__error { + padding: 1rem 1.25rem; + border-radius: 0.75rem; + background: rgba(220, 38, 38, 0.08); + color: #b91c1c; + border: 1px solid rgba(220, 38, 38, 0.24); +} + +.console-profile__card { + background: linear-gradient(150deg, #ffffff 0%, #f8f9ff 100%); + border-radius: 1rem; + padding: 1.5rem; + box-shadow: 0 12px 32px rgba(15, 23, 42, 0.08); + border: 1px solid rgba(79, 70, 229, 0.08); + + header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: 1.25rem; + + h2 { + margin: 0; + font-size: 1.2rem; + font-weight: 600; + color: #1e293b; + } + } + + dl { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(180px, 1fr)); + gap: 1rem 1.5rem; + margin: 0; + + dt { + margin: 0 0 0.25rem; + font-size: 0.8rem; + letter-spacing: 0.04em; + text-transform: uppercase; + color: #64748b; + } + + dd { + margin: 0; + font-size: 0.95rem; + color: #0f172a; + word-break: break-word; + } + } +} + +.chip, +.tenant-chip { + display: inline-flex; + align-items: center; + gap: 0.35rem; + padding: 0.25rem 0.75rem; + border-radius: 9999px; + font-size: 0.8rem; + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.04em; + background-color: rgba(15, 23, 42, 0.08); + color: #0f172a; +} + +.chip--active { + background-color: rgba(22, 163, 74, 0.12); + color: #15803d; +} + +.chip--inactive { + background-color: rgba(220, 38, 38, 0.12); + color: #b91c1c; +} + +.tenant-chip { + background-color: rgba(79, 70, 229, 0.12); + color: #4338ca; +} + +.tenant-count { + font-size: 0.85rem; + font-weight: 500; + color: #475569; +} + +.tenant-list { + list-style: none; + padding: 0; + margin: 0; + display: grid; + gap: 0.75rem; +} + +.tenant-list__item--active button { + border-color: rgba(79, 70, 229, 0.45); + background-color: rgba(79, 70, 229, 0.08); +} + +.tenant-list button { + width: 100%; + display: flex; + flex-direction: column; + align-items: flex-start; + gap: 0.35rem; + padding: 0.75rem 1rem; + border-radius: 0.75rem; + border: 1px solid rgba(100, 116, 139, 0.18); + background: #ffffff; + text-align: left; + cursor: pointer; + transition: border-color 0.2s ease, box-shadow 0.2s ease, transform 0.2s ease; + + &:hover, + &:focus-visible { + border-color: rgba(79, 70, 229, 0.45); + box-shadow: 0 8px 20px rgba(79, 70, 229, 0.16); + transform: translateY(-1px); + } +} + +.tenant-list__heading { + display: flex; + justify-content: space-between; + width: 100%; + font-weight: 600; + color: #1e293b; +} + +.tenant-meta { + font-size: 0.85rem; + color: #475569; +} + +.fresh-auth { + margin-top: 1.25rem; + padding: 0.6rem 0.9rem; + border-radius: 0.75rem; + font-size: 0.9rem; + display: inline-flex; + align-items: center; + gap: 0.5rem; +} + +.fresh-auth--active { + background-color: rgba(16, 185, 129, 0.1); + color: #047857; +} + +.fresh-auth--stale { + background-color: rgba(249, 115, 22, 0.12); + color: #c2410c; +} + +.console-profile__empty { + margin: 0; + font-size: 0.95rem; + color: #475569; +} diff --git a/src/StellaOps.Web/src/app/features/console/console-profile.component.spec.ts b/src/Web/StellaOps.Web/src/app/features/console/console-profile.component.spec.ts similarity index 96% rename from src/StellaOps.Web/src/app/features/console/console-profile.component.spec.ts rename to src/Web/StellaOps.Web/src/app/features/console/console-profile.component.spec.ts index d7c6a608..ce9b8f9d 100644 --- a/src/StellaOps.Web/src/app/features/console/console-profile.component.spec.ts +++ b/src/Web/StellaOps.Web/src/app/features/console/console-profile.component.spec.ts @@ -1,110 +1,110 @@ -import { ComponentFixture, TestBed } from '@angular/core/testing'; - -import { ConsoleSessionService } from '../../core/console/console-session.service'; -import { ConsoleSessionStore } from '../../core/console/console-session.store'; -import { ConsoleProfileComponent } from './console-profile.component'; - -class MockConsoleSessionService { - loadConsoleContext = jasmine - .createSpy('loadConsoleContext') - .and.returnValue(Promise.resolve()); - refresh = jasmine - .createSpy('refresh') - .and.returnValue(Promise.resolve()); - switchTenant = jasmine - .createSpy('switchTenant') - .and.returnValue(Promise.resolve()); -} - -describe('ConsoleProfileComponent', () => { - let fixture: ComponentFixture<ConsoleProfileComponent>; - let service: MockConsoleSessionService; - let store: ConsoleSessionStore; - - beforeEach(async () => { - await TestBed.configureTestingModule({ - imports: [ConsoleProfileComponent], - providers: [ - ConsoleSessionStore, - { provide: ConsoleSessionService, useClass: MockConsoleSessionService }, - ], - }).compileComponents(); - - service = TestBed.inject( - ConsoleSessionService - ) as unknown as MockConsoleSessionService; - store = TestBed.inject(ConsoleSessionStore); - fixture = TestBed.createComponent(ConsoleProfileComponent); - }); - - it('renders profile and tenant information', async () => { - store.setContext({ - tenants: [ - { - id: 'tenant-default', - displayName: 'Tenant Default', - status: 'active', - isolationMode: 'shared', - defaultRoles: ['role.console'], - }, - ], - selectedTenantId: 'tenant-default', - profile: { - subjectId: 'user-1', - username: 'user@example.com', - displayName: 'Console User', - tenant: 'tenant-default', - sessionId: 'session-1', - roles: ['role.console'], - scopes: ['ui.read'], - audiences: ['console'], - authenticationMethods: ['pwd'], - issuedAt: new Date('2025-10-31T12:00:00Z'), - authenticationTime: new Date('2025-10-31T12:00:00Z'), - expiresAt: new Date('2025-10-31T12:10:00Z'), - freshAuth: true, - }, - token: { - active: true, - tenant: 'tenant-default', - subject: 'user-1', - clientId: 'console-web', - tokenId: 'token-1', - scopes: ['ui.read'], - audiences: ['console'], - issuedAt: new Date('2025-10-31T12:00:00Z'), - authenticationTime: new Date('2025-10-31T12:00:00Z'), - expiresAt: new Date('2025-10-31T12:10:00Z'), - freshAuthActive: true, - freshAuthExpiresAt: new Date('2025-10-31T12:05:00Z'), - }, - }); - - fixture.detectChanges(); - await fixture.whenStable(); - - const compiled = fixture.nativeElement as HTMLElement; - expect(compiled.querySelector('h1')?.textContent).toContain( - 'Console Session' - ); - expect(compiled.querySelector('.tenant-name')?.textContent).toContain( - 'Tenant Default' - ); - expect(compiled.querySelector('dd')?.textContent).toContain('Console User'); - expect(service.loadConsoleContext).not.toHaveBeenCalled(); - }); - - it('invokes refresh on demand', async () => { - store.clear(); - fixture.detectChanges(); - await fixture.whenStable(); - - const button = fixture.nativeElement.querySelector( - 'button[type="button"]' - ) as HTMLButtonElement; - button.click(); - await fixture.whenStable(); - - expect(service.refresh).toHaveBeenCalled(); - }); -}); +import { ComponentFixture, TestBed } from '@angular/core/testing'; + +import { ConsoleSessionService } from '../../core/console/console-session.service'; +import { ConsoleSessionStore } from '../../core/console/console-session.store'; +import { ConsoleProfileComponent } from './console-profile.component'; + +class MockConsoleSessionService { + loadConsoleContext = jasmine + .createSpy('loadConsoleContext') + .and.returnValue(Promise.resolve()); + refresh = jasmine + .createSpy('refresh') + .and.returnValue(Promise.resolve()); + switchTenant = jasmine + .createSpy('switchTenant') + .and.returnValue(Promise.resolve()); +} + +describe('ConsoleProfileComponent', () => { + let fixture: ComponentFixture<ConsoleProfileComponent>; + let service: MockConsoleSessionService; + let store: ConsoleSessionStore; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + imports: [ConsoleProfileComponent], + providers: [ + ConsoleSessionStore, + { provide: ConsoleSessionService, useClass: MockConsoleSessionService }, + ], + }).compileComponents(); + + service = TestBed.inject( + ConsoleSessionService + ) as unknown as MockConsoleSessionService; + store = TestBed.inject(ConsoleSessionStore); + fixture = TestBed.createComponent(ConsoleProfileComponent); + }); + + it('renders profile and tenant information', async () => { + store.setContext({ + tenants: [ + { + id: 'tenant-default', + displayName: 'Tenant Default', + status: 'active', + isolationMode: 'shared', + defaultRoles: ['role.console'], + }, + ], + selectedTenantId: 'tenant-default', + profile: { + subjectId: 'user-1', + username: 'user@example.com', + displayName: 'Console User', + tenant: 'tenant-default', + sessionId: 'session-1', + roles: ['role.console'], + scopes: ['ui.read'], + audiences: ['console'], + authenticationMethods: ['pwd'], + issuedAt: new Date('2025-10-31T12:00:00Z'), + authenticationTime: new Date('2025-10-31T12:00:00Z'), + expiresAt: new Date('2025-10-31T12:10:00Z'), + freshAuth: true, + }, + token: { + active: true, + tenant: 'tenant-default', + subject: 'user-1', + clientId: 'console-web', + tokenId: 'token-1', + scopes: ['ui.read'], + audiences: ['console'], + issuedAt: new Date('2025-10-31T12:00:00Z'), + authenticationTime: new Date('2025-10-31T12:00:00Z'), + expiresAt: new Date('2025-10-31T12:10:00Z'), + freshAuthActive: true, + freshAuthExpiresAt: new Date('2025-10-31T12:05:00Z'), + }, + }); + + fixture.detectChanges(); + await fixture.whenStable(); + + const compiled = fixture.nativeElement as HTMLElement; + expect(compiled.querySelector('h1')?.textContent).toContain( + 'Console Session' + ); + expect(compiled.querySelector('.tenant-name')?.textContent).toContain( + 'Tenant Default' + ); + expect(compiled.querySelector('dd')?.textContent).toContain('Console User'); + expect(service.loadConsoleContext).not.toHaveBeenCalled(); + }); + + it('invokes refresh on demand', async () => { + store.clear(); + fixture.detectChanges(); + await fixture.whenStable(); + + const button = fixture.nativeElement.querySelector( + 'button[type="button"]' + ) as HTMLButtonElement; + button.click(); + await fixture.whenStable(); + + expect(service.refresh).toHaveBeenCalled(); + }); +}); diff --git a/src/StellaOps.Web/src/app/features/console/console-profile.component.ts b/src/Web/StellaOps.Web/src/app/features/console/console-profile.component.ts similarity index 96% rename from src/StellaOps.Web/src/app/features/console/console-profile.component.ts rename to src/Web/StellaOps.Web/src/app/features/console/console-profile.component.ts index b065e34d..68fcffb8 100644 --- a/src/StellaOps.Web/src/app/features/console/console-profile.component.ts +++ b/src/Web/StellaOps.Web/src/app/features/console/console-profile.component.ts @@ -1,70 +1,70 @@ -import { CommonModule } from '@angular/common'; -import { - ChangeDetectionStrategy, - Component, - OnInit, - computed, - inject, -} from '@angular/core'; - -import { ConsoleSessionService } from '../../core/console/console-session.service'; -import { ConsoleSessionStore } from '../../core/console/console-session.store'; - -@Component({ - selector: 'app-console-profile', - standalone: true, - imports: [CommonModule], - templateUrl: './console-profile.component.html', - styleUrls: ['./console-profile.component.scss'], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class ConsoleProfileComponent implements OnInit { - private readonly store = inject(ConsoleSessionStore); - private readonly service = inject(ConsoleSessionService); - - readonly loading = this.store.loading; - readonly error = this.store.error; - readonly profile = this.store.profile; - readonly tokenInfo = this.store.tokenInfo; - readonly tenants = this.store.tenants; - readonly selectedTenantId = this.store.selectedTenantId; - - readonly hasProfile = computed(() => this.profile() !== null); - readonly tenantCount = computed(() => this.tenants().length); - readonly freshAuthState = computed(() => { - const token = this.tokenInfo(); - if (!token) { - return null; - } - return { - active: token.freshAuthActive, - expiresAt: token.freshAuthExpiresAt, - }; - }); - - async ngOnInit(): Promise<void> { - if (!this.store.hasContext()) { - try { - await this.service.loadConsoleContext(); - } catch { - // error surfaced via store - } - } - } - - async refresh(): Promise<void> { - try { - await this.service.refresh(); - } catch { - // error surfaced via store - } - } - - async selectTenant(tenantId: string): Promise<void> { - try { - await this.service.switchTenant(tenantId); - } catch { - // error surfaced via store - } - } -} +import { CommonModule } from '@angular/common'; +import { + ChangeDetectionStrategy, + Component, + OnInit, + computed, + inject, +} from '@angular/core'; + +import { ConsoleSessionService } from '../../core/console/console-session.service'; +import { ConsoleSessionStore } from '../../core/console/console-session.store'; + +@Component({ + selector: 'app-console-profile', + standalone: true, + imports: [CommonModule], + templateUrl: './console-profile.component.html', + styleUrls: ['./console-profile.component.scss'], + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class ConsoleProfileComponent implements OnInit { + private readonly store = inject(ConsoleSessionStore); + private readonly service = inject(ConsoleSessionService); + + readonly loading = this.store.loading; + readonly error = this.store.error; + readonly profile = this.store.profile; + readonly tokenInfo = this.store.tokenInfo; + readonly tenants = this.store.tenants; + readonly selectedTenantId = this.store.selectedTenantId; + + readonly hasProfile = computed(() => this.profile() !== null); + readonly tenantCount = computed(() => this.tenants().length); + readonly freshAuthState = computed(() => { + const token = this.tokenInfo(); + if (!token) { + return null; + } + return { + active: token.freshAuthActive, + expiresAt: token.freshAuthExpiresAt, + }; + }); + + async ngOnInit(): Promise<void> { + if (!this.store.hasContext()) { + try { + await this.service.loadConsoleContext(); + } catch { + // error surfaced via store + } + } + } + + async refresh(): Promise<void> { + try { + await this.service.refresh(); + } catch { + // error surfaced via store + } + } + + async selectTenant(tenantId: string): Promise<void> { + try { + await this.service.switchTenant(tenantId); + } catch { + // error surfaced via store + } + } +} diff --git a/src/StellaOps.Web/src/app/features/notify/notify-panel.component.html b/src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.html similarity index 97% rename from src/StellaOps.Web/src/app/features/notify/notify-panel.component.html rename to src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.html index fd0259e1..6700f62a 100644 --- a/src/StellaOps.Web/src/app/features/notify/notify-panel.component.html +++ b/src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.html @@ -1,344 +1,344 @@ -<section class="notify-panel" aria-live="polite"> - <header class="notify-panel__header"> - <div> - <p class="eyebrow">Notifications</p> - <h1>Notify control plane</h1> - <p>Manage channels, routing rules, deliveries, and preview payloads offline.</p> - </div> - <button - type="button" - class="ghost-button" - (click)="refreshAll()" - [disabled]="channelLoading() || ruleLoading() || deliveriesLoading()" - > - Refresh data - </button> - </header> - - <div class="notify-grid"> - <article class="notify-card"> - <header class="notify-card__header"> - <div> - <h2>Channels</h2> - <p>Destinations for Slack, Teams, Email, or Webhook notifications.</p> - </div> - <button type="button" class="ghost-button" (click)="createChannelDraft()">New channel</button> - </header> - - <p *ngIf="channelMessage()" class="notify-message" role="status"> - {{ channelMessage() }} - </p> - - <ul class="channel-list" role="list"> - <li *ngFor="let channel of channels(); trackBy: trackByChannel"> - <button - type="button" - class="channel-item" - data-testid="channel-item" - [class.active]="selectedChannelId() === channel.channelId" - (click)="selectChannel(channel.channelId)" - > - <span class="channel-name">{{ channel.displayName || channel.name }}</span> - <span class="channel-meta">{{ channel.type }}</span> - <span class="channel-status" [class.channel-status--enabled]="channel.enabled"> - {{ channel.enabled ? 'Enabled' : 'Disabled' }} - </span> - </button> - </li> - </ul> - - <form - class="channel-form" - [formGroup]="channelForm" - (ngSubmit)="saveChannel()" - novalidate - > - <div class="form-grid"> - <label> - <span>Name</span> - <input formControlName="name" type="text" required /> - </label> - <label> - <span>Display name</span> - <input formControlName="displayName" type="text" /> - </label> - <label> - <span>Type</span> - <select formControlName="type"> - <option *ngFor="let type of channelTypes" [value]="type"> - {{ type }} - </option> - </select> - </label> - <label> - <span>Secret reference</span> - <input formControlName="secretRef" type="text" required /> - </label> - <label> - <span>Target</span> - <input formControlName="target" type="text" placeholder="#alerts or email" /> - </label> - <label> - <span>Endpoint</span> - <input formControlName="endpoint" type="text" placeholder="https://example" /> - </label> - <label class="full-width"> - <span>Description</span> - <textarea formControlName="description" rows="2"></textarea> - </label> - <label class="checkbox"> - <input type="checkbox" formControlName="enabled" /> - <span>Channel enabled</span> - </label> - </div> - - <div class="form-grid"> - <label> - <span>Labels (key=value)</span> - <textarea formControlName="labelsText" rows="2" placeholder="tier=critical"></textarea> - </label> - <label> - <span>Metadata (key=value)</span> - <textarea formControlName="metadataText" rows="2" placeholder="workspace=stellaops"></textarea> - </label> - </div> - - <div class="notify-actions"> - <button type="button" class="ghost-button" (click)="createChannelDraft()"> - Reset - </button> - <button - type="button" - class="ghost-button" - (click)="deleteChannel()" - [disabled]="channelLoading() || !selectedChannelId()" - > - Delete - </button> - <button type="submit" [disabled]="channelLoading()"> - Save channel - </button> - </div> - </form> - - <section *ngIf="channelHealth() as health" class="channel-health" aria-live="polite"> - <div class="status-pill" [class.status-pill--healthy]="health.status === 'Healthy'" [class.status-pill--warning]="health.status === 'Degraded'" [class.status-pill--error]="health.status === 'Unhealthy'"> - {{ health.status }} - </div> - <div class="channel-health__details"> - <p>{{ health.message }}</p> - <small>Last checked {{ health.checkedAt | date: 'medium' }} • Trace {{ health.traceId }}</small> - </div> - </section> - - <form class="test-form" [formGroup]="testForm" (ngSubmit)="sendTestPreview()" novalidate> - <h3>Test send</h3> - <div class="form-grid"> - <label> - <span>Preview title</span> - <input formControlName="title" type="text" /> - </label> - <label> - <span>Summary</span> - <input formControlName="summary" type="text" /> - </label> - <label> - <span>Override target</span> - <input formControlName="target" type="text" placeholder="#alerts or user@org" /> - </label> - </div> - <label> - <span>Body</span> - <textarea formControlName="body" rows="3"></textarea> - </label> - <div class="notify-actions"> - <button type="submit" [disabled]="testSending()"> - {{ testSending() ? 'Sending…' : 'Send test' }} - </button> - </div> - </form> - - <section *ngIf="testPreview() as preview" class="test-preview" data-testid="test-preview"> - <header> - <strong>Preview queued</strong> - <span>{{ preview.queuedAt | date: 'short' }}</span> - </header> - <p><span>Target:</span> {{ preview.preview.target }}</p> - <p><span>Title:</span> {{ preview.preview.title }}</p> - <p><span>Summary:</span> {{ preview.preview.summary || 'n/a' }}</p> - <p class="preview-body">{{ preview.preview.body }}</p> - </section> - </article> - - <article class="notify-card"> - <header class="notify-card__header"> - <div> - <h2>Rules</h2> - <p>Define routing logic and throttles per channel.</p> - </div> - <button type="button" class="ghost-button" (click)="createRuleDraft()">New rule</button> - </header> - - <p *ngIf="ruleMessage()" class="notify-message" role="status"> - {{ ruleMessage() }} - </p> - - <ul class="rule-list" role="list"> - <li *ngFor="let rule of rules(); trackBy: trackByRule"> - <button - type="button" - class="rule-item" - data-testid="rule-item" - [class.active]="selectedRuleId() === rule.ruleId" - (click)="selectRule(rule.ruleId)" - > - <span class="rule-name">{{ rule.name }}</span> - <span class="rule-meta">{{ rule.match?.minSeverity || 'any' }}</span> - </button> - </li> - </ul> - - <form class="rule-form" [formGroup]="ruleForm" (ngSubmit)="saveRule()" novalidate> - <div class="form-grid"> - <label> - <span>Name</span> - <input formControlName="name" type="text" required /> - </label> - <label> - <span>Minimum severity</span> - <select formControlName="minSeverity"> - <option value="">Any</option> - <option *ngFor="let sev of severityOptions" [value]="sev"> - {{ sev }} - </option> - </select> - </label> - <label> - <span>Channel</span> - <select formControlName="channel" required> - <option *ngFor="let channel of channels()" [value]="channel.channelId"> - {{ channel.displayName || channel.name }} - </option> - </select> - </label> - <label> - <span>Digest</span> - <input formControlName="digest" type="text" placeholder="instant or 1h" /> - </label> - <label> - <span>Template</span> - <input formControlName="template" type="text" placeholder="tmpl-critical" /> - </label> - <label> - <span>Locale</span> - <input formControlName="locale" type="text" placeholder="en-US" /> - </label> - <label> - <span>Throttle (seconds)</span> - <input formControlName="throttleSeconds" type="number" min="0" /> - </label> - <label class="checkbox"> - <input type="checkbox" formControlName="enabled" /> - <span>Rule enabled</span> - </label> - </div> - - <label> - <span>Event kinds (comma or newline)</span> - <textarea formControlName="eventKindsText" rows="2"></textarea> - </label> - <label> - <span>Labels filter</span> - <textarea formControlName="labelsText" rows="2" placeholder="kev,critical"></textarea> - </label> - <label> - <span>Description</span> - <textarea formControlName="description" rows="2"></textarea> - </label> - - <div class="notify-actions"> - <button type="button" class="ghost-button" (click)="createRuleDraft()"> - Reset - </button> - <button - type="button" - class="ghost-button" - (click)="deleteRule()" - [disabled]="ruleLoading() || !selectedRuleId()" - > - Delete - </button> - <button type="submit" [disabled]="ruleLoading()"> - Save rule - </button> - </div> - </form> - </article> - - <article class="notify-card notify-card--deliveries"> - <header class="notify-card__header"> - <div> - <h2>Deliveries</h2> - <p>Recent delivery attempts, statuses, and preview traces.</p> - </div> - <button type="button" class="ghost-button" (click)="refreshDeliveries()" [disabled]="deliveriesLoading()">Refresh</button> - </header> - - <div class="deliveries-controls"> - <label> - <span>Status filter</span> - <select [value]="deliveryFilter()" (change)="onDeliveryFilterChange($any($event.target).value)"> - <option value="all">All</option> - <option value="sent">Sent</option> - <option value="failed">Failed</option> - <option value="pending">Pending</option> - <option value="throttled">Throttled</option> - <option value="digested">Digested</option> - <option value="dropped">Dropped</option> - </select> - </label> - </div> - - <p *ngIf="deliveriesMessage()" class="notify-message" role="status"> - {{ deliveriesMessage() }} - </p> - - <div class="deliveries-table"> - <table> - <thead> - <tr> - <th scope="col">Status</th> - <th scope="col">Target</th> - <th scope="col">Kind</th> - <th scope="col">Created</th> - </tr> - </thead> - <tbody> - <tr - *ngFor="let delivery of filteredDeliveries(); trackBy: trackByDelivery" - data-testid="delivery-row" - > - <td> - <span class="status-badge" [class.status-badge--sent]="delivery.status === 'Sent'" [class.status-badge--failed]="delivery.status === 'Failed'" [class.status-badge--throttled]="delivery.status === 'Throttled'"> - {{ delivery.status }} - </span> - </td> - <td> - {{ delivery.rendered?.target || 'n/a' }} - </td> - <td> - {{ delivery.kind }} - </td> - <td> - {{ delivery.createdAt | date: 'short' }} - </td> - </tr> - <tr *ngIf="!deliveriesLoading() && !filteredDeliveries().length"> - <td colspan="4" class="empty-row">No deliveries match this filter.</td> - </tr> - </tbody> - </table> - </div> - </article> - </div> -</section> +<section class="notify-panel" aria-live="polite"> + <header class="notify-panel__header"> + <div> + <p class="eyebrow">Notifications</p> + <h1>Notify control plane</h1> + <p>Manage channels, routing rules, deliveries, and preview payloads offline.</p> + </div> + <button + type="button" + class="ghost-button" + (click)="refreshAll()" + [disabled]="channelLoading() || ruleLoading() || deliveriesLoading()" + > + Refresh data + </button> + </header> + + <div class="notify-grid"> + <article class="notify-card"> + <header class="notify-card__header"> + <div> + <h2>Channels</h2> + <p>Destinations for Slack, Teams, Email, or Webhook notifications.</p> + </div> + <button type="button" class="ghost-button" (click)="createChannelDraft()">New channel</button> + </header> + + <p *ngIf="channelMessage()" class="notify-message" role="status"> + {{ channelMessage() }} + </p> + + <ul class="channel-list" role="list"> + <li *ngFor="let channel of channels(); trackBy: trackByChannel"> + <button + type="button" + class="channel-item" + data-testid="channel-item" + [class.active]="selectedChannelId() === channel.channelId" + (click)="selectChannel(channel.channelId)" + > + <span class="channel-name">{{ channel.displayName || channel.name }}</span> + <span class="channel-meta">{{ channel.type }}</span> + <span class="channel-status" [class.channel-status--enabled]="channel.enabled"> + {{ channel.enabled ? 'Enabled' : 'Disabled' }} + </span> + </button> + </li> + </ul> + + <form + class="channel-form" + [formGroup]="channelForm" + (ngSubmit)="saveChannel()" + novalidate + > + <div class="form-grid"> + <label> + <span>Name</span> + <input formControlName="name" type="text" required /> + </label> + <label> + <span>Display name</span> + <input formControlName="displayName" type="text" /> + </label> + <label> + <span>Type</span> + <select formControlName="type"> + <option *ngFor="let type of channelTypes" [value]="type"> + {{ type }} + </option> + </select> + </label> + <label> + <span>Secret reference</span> + <input formControlName="secretRef" type="text" required /> + </label> + <label> + <span>Target</span> + <input formControlName="target" type="text" placeholder="#alerts or email" /> + </label> + <label> + <span>Endpoint</span> + <input formControlName="endpoint" type="text" placeholder="https://example" /> + </label> + <label class="full-width"> + <span>Description</span> + <textarea formControlName="description" rows="2"></textarea> + </label> + <label class="checkbox"> + <input type="checkbox" formControlName="enabled" /> + <span>Channel enabled</span> + </label> + </div> + + <div class="form-grid"> + <label> + <span>Labels (key=value)</span> + <textarea formControlName="labelsText" rows="2" placeholder="tier=critical"></textarea> + </label> + <label> + <span>Metadata (key=value)</span> + <textarea formControlName="metadataText" rows="2" placeholder="workspace=stellaops"></textarea> + </label> + </div> + + <div class="notify-actions"> + <button type="button" class="ghost-button" (click)="createChannelDraft()"> + Reset + </button> + <button + type="button" + class="ghost-button" + (click)="deleteChannel()" + [disabled]="channelLoading() || !selectedChannelId()" + > + Delete + </button> + <button type="submit" [disabled]="channelLoading()"> + Save channel + </button> + </div> + </form> + + <section *ngIf="channelHealth() as health" class="channel-health" aria-live="polite"> + <div class="status-pill" [class.status-pill--healthy]="health.status === 'Healthy'" [class.status-pill--warning]="health.status === 'Degraded'" [class.status-pill--error]="health.status === 'Unhealthy'"> + {{ health.status }} + </div> + <div class="channel-health__details"> + <p>{{ health.message }}</p> + <small>Last checked {{ health.checkedAt | date: 'medium' }} • Trace {{ health.traceId }}</small> + </div> + </section> + + <form class="test-form" [formGroup]="testForm" (ngSubmit)="sendTestPreview()" novalidate> + <h3>Test send</h3> + <div class="form-grid"> + <label> + <span>Preview title</span> + <input formControlName="title" type="text" /> + </label> + <label> + <span>Summary</span> + <input formControlName="summary" type="text" /> + </label> + <label> + <span>Override target</span> + <input formControlName="target" type="text" placeholder="#alerts or user@org" /> + </label> + </div> + <label> + <span>Body</span> + <textarea formControlName="body" rows="3"></textarea> + </label> + <div class="notify-actions"> + <button type="submit" [disabled]="testSending()"> + {{ testSending() ? 'Sending…' : 'Send test' }} + </button> + </div> + </form> + + <section *ngIf="testPreview() as preview" class="test-preview" data-testid="test-preview"> + <header> + <strong>Preview queued</strong> + <span>{{ preview.queuedAt | date: 'short' }}</span> + </header> + <p><span>Target:</span> {{ preview.preview.target }}</p> + <p><span>Title:</span> {{ preview.preview.title }}</p> + <p><span>Summary:</span> {{ preview.preview.summary || 'n/a' }}</p> + <p class="preview-body">{{ preview.preview.body }}</p> + </section> + </article> + + <article class="notify-card"> + <header class="notify-card__header"> + <div> + <h2>Rules</h2> + <p>Define routing logic and throttles per channel.</p> + </div> + <button type="button" class="ghost-button" (click)="createRuleDraft()">New rule</button> + </header> + + <p *ngIf="ruleMessage()" class="notify-message" role="status"> + {{ ruleMessage() }} + </p> + + <ul class="rule-list" role="list"> + <li *ngFor="let rule of rules(); trackBy: trackByRule"> + <button + type="button" + class="rule-item" + data-testid="rule-item" + [class.active]="selectedRuleId() === rule.ruleId" + (click)="selectRule(rule.ruleId)" + > + <span class="rule-name">{{ rule.name }}</span> + <span class="rule-meta">{{ rule.match?.minSeverity || 'any' }}</span> + </button> + </li> + </ul> + + <form class="rule-form" [formGroup]="ruleForm" (ngSubmit)="saveRule()" novalidate> + <div class="form-grid"> + <label> + <span>Name</span> + <input formControlName="name" type="text" required /> + </label> + <label> + <span>Minimum severity</span> + <select formControlName="minSeverity"> + <option value="">Any</option> + <option *ngFor="let sev of severityOptions" [value]="sev"> + {{ sev }} + </option> + </select> + </label> + <label> + <span>Channel</span> + <select formControlName="channel" required> + <option *ngFor="let channel of channels()" [value]="channel.channelId"> + {{ channel.displayName || channel.name }} + </option> + </select> + </label> + <label> + <span>Digest</span> + <input formControlName="digest" type="text" placeholder="instant or 1h" /> + </label> + <label> + <span>Template</span> + <input formControlName="template" type="text" placeholder="tmpl-critical" /> + </label> + <label> + <span>Locale</span> + <input formControlName="locale" type="text" placeholder="en-US" /> + </label> + <label> + <span>Throttle (seconds)</span> + <input formControlName="throttleSeconds" type="number" min="0" /> + </label> + <label class="checkbox"> + <input type="checkbox" formControlName="enabled" /> + <span>Rule enabled</span> + </label> + </div> + + <label> + <span>Event kinds (comma or newline)</span> + <textarea formControlName="eventKindsText" rows="2"></textarea> + </label> + <label> + <span>Labels filter</span> + <textarea formControlName="labelsText" rows="2" placeholder="kev,critical"></textarea> + </label> + <label> + <span>Description</span> + <textarea formControlName="description" rows="2"></textarea> + </label> + + <div class="notify-actions"> + <button type="button" class="ghost-button" (click)="createRuleDraft()"> + Reset + </button> + <button + type="button" + class="ghost-button" + (click)="deleteRule()" + [disabled]="ruleLoading() || !selectedRuleId()" + > + Delete + </button> + <button type="submit" [disabled]="ruleLoading()"> + Save rule + </button> + </div> + </form> + </article> + + <article class="notify-card notify-card--deliveries"> + <header class="notify-card__header"> + <div> + <h2>Deliveries</h2> + <p>Recent delivery attempts, statuses, and preview traces.</p> + </div> + <button type="button" class="ghost-button" (click)="refreshDeliveries()" [disabled]="deliveriesLoading()">Refresh</button> + </header> + + <div class="deliveries-controls"> + <label> + <span>Status filter</span> + <select [value]="deliveryFilter()" (change)="onDeliveryFilterChange($any($event.target).value)"> + <option value="all">All</option> + <option value="sent">Sent</option> + <option value="failed">Failed</option> + <option value="pending">Pending</option> + <option value="throttled">Throttled</option> + <option value="digested">Digested</option> + <option value="dropped">Dropped</option> + </select> + </label> + </div> + + <p *ngIf="deliveriesMessage()" class="notify-message" role="status"> + {{ deliveriesMessage() }} + </p> + + <div class="deliveries-table"> + <table> + <thead> + <tr> + <th scope="col">Status</th> + <th scope="col">Target</th> + <th scope="col">Kind</th> + <th scope="col">Created</th> + </tr> + </thead> + <tbody> + <tr + *ngFor="let delivery of filteredDeliveries(); trackBy: trackByDelivery" + data-testid="delivery-row" + > + <td> + <span class="status-badge" [class.status-badge--sent]="delivery.status === 'Sent'" [class.status-badge--failed]="delivery.status === 'Failed'" [class.status-badge--throttled]="delivery.status === 'Throttled'"> + {{ delivery.status }} + </span> + </td> + <td> + {{ delivery.rendered?.target || 'n/a' }} + </td> + <td> + {{ delivery.kind }} + </td> + <td> + {{ delivery.createdAt | date: 'short' }} + </td> + </tr> + <tr *ngIf="!deliveriesLoading() && !filteredDeliveries().length"> + <td colspan="4" class="empty-row">No deliveries match this filter.</td> + </tr> + </tbody> + </table> + </div> + </article> + </div> +</section> diff --git a/src/StellaOps.Web/src/app/features/notify/notify-panel.component.scss b/src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.scss similarity index 94% rename from src/StellaOps.Web/src/app/features/notify/notify-panel.component.scss rename to src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.scss index a99d65a9..489936d4 100644 --- a/src/StellaOps.Web/src/app/features/notify/notify-panel.component.scss +++ b/src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.scss @@ -1,386 +1,386 @@ -:host { - display: block; - color: #e2e8f0; -} - -.notify-panel { - background: #0f172a; - border-radius: 16px; - padding: 2rem; - box-shadow: 0 20px 45px rgba(15, 23, 42, 0.45); -} - -.notify-panel__header { - display: flex; - flex-wrap: wrap; - align-items: center; - justify-content: space-between; - gap: 1rem; - margin-bottom: 2rem; - - h1 { - margin: 0.25rem 0; - font-size: 1.75rem; - } - - p { - margin: 0; - color: #cbd5f5; - } -} - -.eyebrow { - text-transform: uppercase; - font-size: 0.75rem; - letter-spacing: 0.1em; - color: #94a3b8; - margin: 0; -} - -.notify-grid { - display: grid; - gap: 1.5rem; - grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); -} - -.notify-card { - background: #111827; - border: 1px solid #1f2937; - border-radius: 16px; - padding: 1.5rem; - display: flex; - flex-direction: column; - gap: 1rem; - min-height: 100%; -} - -.notify-card__header { - display: flex; - align-items: center; - justify-content: space-between; - gap: 1rem; - - h2 { - margin: 0; - font-size: 1.25rem; - } - - p { - margin: 0; - color: #94a3b8; - font-size: 0.9rem; - } -} - -.ghost-button { - border: 1px solid rgba(148, 163, 184, 0.4); - background: transparent; - color: #e2e8f0; - border-radius: 999px; - padding: 0.35rem 1rem; - font-size: 0.85rem; - cursor: pointer; - transition: background-color 0.2s ease, border-color 0.2s ease; - - &:hover, - &:focus-visible { - border-color: #38bdf8; - background: rgba(56, 189, 248, 0.15); - } - - &:disabled { - opacity: 0.4; - cursor: not-allowed; - } -} - -.notify-message { - margin: 0; - padding: 0.5rem 0.75rem; - border-radius: 8px; - background: rgba(59, 130, 246, 0.15); - color: #e0f2fe; - font-size: 0.9rem; -} - -.channel-list, -.rule-list { - list-style: none; - margin: 0; - padding: 0; - display: flex; - flex-direction: column; - gap: 0.5rem; -} - -.channel-item, -.rule-item { - width: 100%; - border: 1px solid #1f2937; - background: #0f172a; - color: inherit; - border-radius: 12px; - padding: 0.75rem 1rem; - display: flex; - align-items: center; - justify-content: space-between; - gap: 0.5rem; - cursor: pointer; - transition: border-color 0.2s ease, transform 0.2s ease; - - &.active { - border-color: #38bdf8; - background: rgba(56, 189, 248, 0.1); - transform: translateY(-1px); - } -} - -.channel-meta, -.rule-meta { - font-size: 0.8rem; - color: #94a3b8; -} - -.channel-status { - font-size: 0.75rem; - text-transform: uppercase; - padding: 0.15rem 0.5rem; - border-radius: 999px; - border: 1px solid rgba(248, 250, 252, 0.2); -} - -.channel-status--enabled { - border-color: #34d399; - color: #34d399; -} - -.form-grid { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); - gap: 1rem; - width: 100%; -} - -label { - display: flex; - flex-direction: column; - gap: 0.35rem; - font-size: 0.85rem; -} - -label span { - color: #cbd5f5; - font-weight: 500; -} - -input, -textarea, -select { - background: #0f172a; - border: 1px solid #1f2937; - border-radius: 10px; - color: inherit; - padding: 0.6rem; - font-size: 0.95rem; - font-family: inherit; - - &:focus-visible { - outline: 2px solid #38bdf8; - outline-offset: 2px; - } -} - -.checkbox { - flex-direction: row; - align-items: center; - gap: 0.5rem; - font-weight: 500; - - input { - width: auto; - } -} - -.full-width { - grid-column: 1 / -1; -} - -.notify-actions { - display: flex; - justify-content: flex-end; - gap: 0.75rem; -} - -.notify-actions button { - border: none; - border-radius: 999px; - padding: 0.45rem 1.25rem; - font-weight: 600; - cursor: pointer; - background: linear-gradient(120deg, #38bdf8, #8b5cf6); - color: #0f172a; - transition: opacity 0.2s ease; - - &:disabled { - opacity: 0.5; - cursor: not-allowed; - } -} - -.notify-actions .ghost-button { - background: transparent; - color: #e2e8f0; -} - -.channel-health { - display: flex; - gap: 0.75rem; - align-items: center; - padding: 0.75rem 1rem; - border-radius: 12px; - background: #0b1220; - border: 1px solid #1d2a44; -} - -.status-pill { - padding: 0.25rem 0.75rem; - border-radius: 999px; - font-size: 0.8rem; - text-transform: uppercase; - letter-spacing: 0.08em; - border: 1px solid rgba(248, 250, 252, 0.3); -} - -.status-pill--healthy { - border-color: #34d399; - color: #34d399; -} - -.status-pill--warning { - border-color: #facc15; - color: #facc15; -} - -.status-pill--error { - border-color: #f87171; - color: #f87171; -} - -.channel-health__details p { - margin: 0; - font-size: 0.9rem; -} - -.channel-health__details small { - color: #94a3b8; -} - -.test-form h3 { - margin: 0; - font-size: 1rem; - color: #cbd5f5; -} - -.test-preview { - border: 1px solid #1f2937; - border-radius: 12px; - padding: 1rem; - background: #0b1220; - - header { - display: flex; - justify-content: space-between; - font-size: 0.9rem; - } - - p { - margin: 0.25rem 0; - font-size: 0.9rem; - } - - span { - font-weight: 600; - color: #cbd5f5; - } - - .preview-body { - font-family: 'JetBrains Mono', 'Fira Code', monospace; - background: #0f172a; - border-radius: 8px; - padding: 0.75rem; - } -} - -.deliveries-controls { - display: flex; - justify-content: flex-start; - gap: 1rem; -} - -.deliveries-controls label { - min-width: 140px; -} - -.deliveries-table { - overflow-x: auto; -} - -table { - width: 100%; - border-collapse: collapse; - font-size: 0.9rem; -} - -thead th { - text-align: left; - font-weight: 600; - padding-bottom: 0.5rem; - color: #cbd5f5; -} - -tbody td { - padding: 0.6rem 0.25rem; - border-top: 1px solid #1f2937; -} - -.empty-row { - text-align: center; - color: #94a3b8; - padding: 1rem 0; -} - -.status-badge { - display: inline-block; - padding: 0.2rem 0.6rem; - border-radius: 8px; - font-size: 0.75rem; - text-transform: uppercase; - border: 1px solid rgba(148, 163, 184, 0.5); -} - -.status-badge--sent { - border-color: #34d399; - color: #34d399; -} - -.status-badge--failed { - border-color: #f87171; - color: #f87171; -} - -.status-badge--throttled { - border-color: #facc15; - color: #facc15; -} - -@media (max-width: 720px) { - .notify-panel { - padding: 1.25rem; - } - - .notify-panel__header { - flex-direction: column; - align-items: flex-start; - } -} - +:host { + display: block; + color: #e2e8f0; +} + +.notify-panel { + background: #0f172a; + border-radius: 16px; + padding: 2rem; + box-shadow: 0 20px 45px rgba(15, 23, 42, 0.45); +} + +.notify-panel__header { + display: flex; + flex-wrap: wrap; + align-items: center; + justify-content: space-between; + gap: 1rem; + margin-bottom: 2rem; + + h1 { + margin: 0.25rem 0; + font-size: 1.75rem; + } + + p { + margin: 0; + color: #cbd5f5; + } +} + +.eyebrow { + text-transform: uppercase; + font-size: 0.75rem; + letter-spacing: 0.1em; + color: #94a3b8; + margin: 0; +} + +.notify-grid { + display: grid; + gap: 1.5rem; + grid-template-columns: repeat(auto-fit, minmax(280px, 1fr)); +} + +.notify-card { + background: #111827; + border: 1px solid #1f2937; + border-radius: 16px; + padding: 1.5rem; + display: flex; + flex-direction: column; + gap: 1rem; + min-height: 100%; +} + +.notify-card__header { + display: flex; + align-items: center; + justify-content: space-between; + gap: 1rem; + + h2 { + margin: 0; + font-size: 1.25rem; + } + + p { + margin: 0; + color: #94a3b8; + font-size: 0.9rem; + } +} + +.ghost-button { + border: 1px solid rgba(148, 163, 184, 0.4); + background: transparent; + color: #e2e8f0; + border-radius: 999px; + padding: 0.35rem 1rem; + font-size: 0.85rem; + cursor: pointer; + transition: background-color 0.2s ease, border-color 0.2s ease; + + &:hover, + &:focus-visible { + border-color: #38bdf8; + background: rgba(56, 189, 248, 0.15); + } + + &:disabled { + opacity: 0.4; + cursor: not-allowed; + } +} + +.notify-message { + margin: 0; + padding: 0.5rem 0.75rem; + border-radius: 8px; + background: rgba(59, 130, 246, 0.15); + color: #e0f2fe; + font-size: 0.9rem; +} + +.channel-list, +.rule-list { + list-style: none; + margin: 0; + padding: 0; + display: flex; + flex-direction: column; + gap: 0.5rem; +} + +.channel-item, +.rule-item { + width: 100%; + border: 1px solid #1f2937; + background: #0f172a; + color: inherit; + border-radius: 12px; + padding: 0.75rem 1rem; + display: flex; + align-items: center; + justify-content: space-between; + gap: 0.5rem; + cursor: pointer; + transition: border-color 0.2s ease, transform 0.2s ease; + + &.active { + border-color: #38bdf8; + background: rgba(56, 189, 248, 0.1); + transform: translateY(-1px); + } +} + +.channel-meta, +.rule-meta { + font-size: 0.8rem; + color: #94a3b8; +} + +.channel-status { + font-size: 0.75rem; + text-transform: uppercase; + padding: 0.15rem 0.5rem; + border-radius: 999px; + border: 1px solid rgba(248, 250, 252, 0.2); +} + +.channel-status--enabled { + border-color: #34d399; + color: #34d399; +} + +.form-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); + gap: 1rem; + width: 100%; +} + +label { + display: flex; + flex-direction: column; + gap: 0.35rem; + font-size: 0.85rem; +} + +label span { + color: #cbd5f5; + font-weight: 500; +} + +input, +textarea, +select { + background: #0f172a; + border: 1px solid #1f2937; + border-radius: 10px; + color: inherit; + padding: 0.6rem; + font-size: 0.95rem; + font-family: inherit; + + &:focus-visible { + outline: 2px solid #38bdf8; + outline-offset: 2px; + } +} + +.checkbox { + flex-direction: row; + align-items: center; + gap: 0.5rem; + font-weight: 500; + + input { + width: auto; + } +} + +.full-width { + grid-column: 1 / -1; +} + +.notify-actions { + display: flex; + justify-content: flex-end; + gap: 0.75rem; +} + +.notify-actions button { + border: none; + border-radius: 999px; + padding: 0.45rem 1.25rem; + font-weight: 600; + cursor: pointer; + background: linear-gradient(120deg, #38bdf8, #8b5cf6); + color: #0f172a; + transition: opacity 0.2s ease; + + &:disabled { + opacity: 0.5; + cursor: not-allowed; + } +} + +.notify-actions .ghost-button { + background: transparent; + color: #e2e8f0; +} + +.channel-health { + display: flex; + gap: 0.75rem; + align-items: center; + padding: 0.75rem 1rem; + border-radius: 12px; + background: #0b1220; + border: 1px solid #1d2a44; +} + +.status-pill { + padding: 0.25rem 0.75rem; + border-radius: 999px; + font-size: 0.8rem; + text-transform: uppercase; + letter-spacing: 0.08em; + border: 1px solid rgba(248, 250, 252, 0.3); +} + +.status-pill--healthy { + border-color: #34d399; + color: #34d399; +} + +.status-pill--warning { + border-color: #facc15; + color: #facc15; +} + +.status-pill--error { + border-color: #f87171; + color: #f87171; +} + +.channel-health__details p { + margin: 0; + font-size: 0.9rem; +} + +.channel-health__details small { + color: #94a3b8; +} + +.test-form h3 { + margin: 0; + font-size: 1rem; + color: #cbd5f5; +} + +.test-preview { + border: 1px solid #1f2937; + border-radius: 12px; + padding: 1rem; + background: #0b1220; + + header { + display: flex; + justify-content: space-between; + font-size: 0.9rem; + } + + p { + margin: 0.25rem 0; + font-size: 0.9rem; + } + + span { + font-weight: 600; + color: #cbd5f5; + } + + .preview-body { + font-family: 'JetBrains Mono', 'Fira Code', monospace; + background: #0f172a; + border-radius: 8px; + padding: 0.75rem; + } +} + +.deliveries-controls { + display: flex; + justify-content: flex-start; + gap: 1rem; +} + +.deliveries-controls label { + min-width: 140px; +} + +.deliveries-table { + overflow-x: auto; +} + +table { + width: 100%; + border-collapse: collapse; + font-size: 0.9rem; +} + +thead th { + text-align: left; + font-weight: 600; + padding-bottom: 0.5rem; + color: #cbd5f5; +} + +tbody td { + padding: 0.6rem 0.25rem; + border-top: 1px solid #1f2937; +} + +.empty-row { + text-align: center; + color: #94a3b8; + padding: 1rem 0; +} + +.status-badge { + display: inline-block; + padding: 0.2rem 0.6rem; + border-radius: 8px; + font-size: 0.75rem; + text-transform: uppercase; + border: 1px solid rgba(148, 163, 184, 0.5); +} + +.status-badge--sent { + border-color: #34d399; + color: #34d399; +} + +.status-badge--failed { + border-color: #f87171; + color: #f87171; +} + +.status-badge--throttled { + border-color: #facc15; + color: #facc15; +} + +@media (max-width: 720px) { + .notify-panel { + padding: 1.25rem; + } + + .notify-panel__header { + flex-direction: column; + align-items: flex-start; + } +} + diff --git a/src/StellaOps.Web/src/app/features/notify/notify-panel.component.spec.ts b/src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.spec.ts similarity index 96% rename from src/StellaOps.Web/src/app/features/notify/notify-panel.component.spec.ts rename to src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.spec.ts index 9e5eb0f7..528bef0d 100644 --- a/src/StellaOps.Web/src/app/features/notify/notify-panel.component.spec.ts +++ b/src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.spec.ts @@ -1,66 +1,66 @@ -import { ComponentFixture, TestBed } from '@angular/core/testing'; - -import { NOTIFY_API } from '../../core/api/notify.client'; -import { MockNotifyApiService } from '../../testing/mock-notify-api.service'; -import { NotifyPanelComponent } from './notify-panel.component'; - -describe('NotifyPanelComponent', () => { - let fixture: ComponentFixture<NotifyPanelComponent>; - let component: NotifyPanelComponent; - - beforeEach(async () => { - await TestBed.configureTestingModule({ - imports: [NotifyPanelComponent], - providers: [ - MockNotifyApiService, - { provide: NOTIFY_API, useExisting: MockNotifyApiService }, - ], - }).compileComponents(); - - fixture = TestBed.createComponent(NotifyPanelComponent); - component = fixture.componentInstance; - fixture.detectChanges(); - }); - - it('renders channels from the mocked API', async () => { - await component.refreshAll(); - fixture.detectChanges(); - const items: NodeListOf<HTMLButtonElement> = - fixture.nativeElement.querySelectorAll('[data-testid="channel-item"]'); - expect(items.length).toBeGreaterThan(0); - }); - - it('persists a new rule via the mocked API', async () => { - await component.refreshAll(); - fixture.detectChanges(); - - component.createRuleDraft(); - component.ruleForm.patchValue({ - name: 'Notify preview rule', - channel: component.channels()[0]?.channelId ?? '', - eventKindsText: 'scanner.report.ready', - labelsText: 'kev', - }); - - await component.saveRule(); - fixture.detectChanges(); - - const ruleButtons: HTMLElement[] = Array.from( - fixture.nativeElement.querySelectorAll('[data-testid="rule-item"]') - ); - expect( - ruleButtons.some((el) => el.textContent?.includes('Notify preview rule')) - ).toBeTrue(); - }); - - it('shows a test preview after sending', async () => { - await component.refreshAll(); - fixture.detectChanges(); - - await component.sendTestPreview(); - fixture.detectChanges(); - - const preview = fixture.nativeElement.querySelector('[data-testid="test-preview"]'); - expect(preview).toBeTruthy(); - }); -}); +import { ComponentFixture, TestBed } from '@angular/core/testing'; + +import { NOTIFY_API } from '../../core/api/notify.client'; +import { MockNotifyApiService } from '../../testing/mock-notify-api.service'; +import { NotifyPanelComponent } from './notify-panel.component'; + +describe('NotifyPanelComponent', () => { + let fixture: ComponentFixture<NotifyPanelComponent>; + let component: NotifyPanelComponent; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + imports: [NotifyPanelComponent], + providers: [ + MockNotifyApiService, + { provide: NOTIFY_API, useExisting: MockNotifyApiService }, + ], + }).compileComponents(); + + fixture = TestBed.createComponent(NotifyPanelComponent); + component = fixture.componentInstance; + fixture.detectChanges(); + }); + + it('renders channels from the mocked API', async () => { + await component.refreshAll(); + fixture.detectChanges(); + const items: NodeListOf<HTMLButtonElement> = + fixture.nativeElement.querySelectorAll('[data-testid="channel-item"]'); + expect(items.length).toBeGreaterThan(0); + }); + + it('persists a new rule via the mocked API', async () => { + await component.refreshAll(); + fixture.detectChanges(); + + component.createRuleDraft(); + component.ruleForm.patchValue({ + name: 'Notify preview rule', + channel: component.channels()[0]?.channelId ?? '', + eventKindsText: 'scanner.report.ready', + labelsText: 'kev', + }); + + await component.saveRule(); + fixture.detectChanges(); + + const ruleButtons: HTMLElement[] = Array.from( + fixture.nativeElement.querySelectorAll('[data-testid="rule-item"]') + ); + expect( + ruleButtons.some((el) => el.textContent?.includes('Notify preview rule')) + ).toBeTrue(); + }); + + it('shows a test preview after sending', async () => { + await component.refreshAll(); + fixture.detectChanges(); + + await component.sendTestPreview(); + fixture.detectChanges(); + + const preview = fixture.nativeElement.querySelector('[data-testid="test-preview"]'); + expect(preview).toBeTruthy(); + }); +}); diff --git a/src/StellaOps.Web/src/app/features/notify/notify-panel.component.ts b/src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.ts similarity index 96% rename from src/StellaOps.Web/src/app/features/notify/notify-panel.component.ts rename to src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.ts index ef5329cb..dce20d9e 100644 --- a/src/StellaOps.Web/src/app/features/notify/notify-panel.component.ts +++ b/src/Web/StellaOps.Web/src/app/features/notify/notify-panel.component.ts @@ -1,642 +1,642 @@ -import { CommonModule } from '@angular/common'; -import { - ChangeDetectionStrategy, - Component, - OnInit, - computed, - inject, - signal, -} from '@angular/core'; -import { - NonNullableFormBuilder, - ReactiveFormsModule, - Validators, -} from '@angular/forms'; -import { firstValueFrom } from 'rxjs'; - -import { - NOTIFY_API, - NotifyApi, -} from '../../core/api/notify.client'; -import { - ChannelHealthResponse, - ChannelTestSendResponse, - NotifyChannel, - NotifyDelivery, - NotifyDeliveriesQueryOptions, - NotifyDeliveryStatus, - NotifyRule, - NotifyRuleAction, -} from '../../core/api/notify.models'; - -type DeliveryFilter = - | 'all' - | 'pending' - | 'sent' - | 'failed' - | 'throttled' - | 'digested' - | 'dropped'; - -@Component({ - selector: 'app-notify-panel', - standalone: true, - imports: [CommonModule, ReactiveFormsModule], - templateUrl: './notify-panel.component.html', - styleUrls: ['./notify-panel.component.scss'], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class NotifyPanelComponent implements OnInit { - private readonly api = inject<NotifyApi>(NOTIFY_API); - private readonly formBuilder = inject(NonNullableFormBuilder); - - private readonly tenantId = signal<string>('tenant-dev'); - - readonly channelTypes: readonly NotifyChannel['type'][] = [ - 'Slack', - 'Teams', - 'Email', - 'Webhook', - 'Custom', - ]; - - readonly severityOptions = ['critical', 'high', 'medium', 'low']; - - readonly channels = signal<NotifyChannel[]>([]); - readonly selectedChannelId = signal<string | null>(null); - readonly channelLoading = signal(false); - readonly channelMessage = signal<string | null>(null); - readonly channelHealth = signal<ChannelHealthResponse | null>(null); - readonly testPreview = signal<ChannelTestSendResponse | null>(null); - readonly testSending = signal(false); - - readonly rules = signal<NotifyRule[]>([]); - readonly selectedRuleId = signal<string | null>(null); - readonly ruleLoading = signal(false); - readonly ruleMessage = signal<string | null>(null); - - readonly deliveries = signal<NotifyDelivery[]>([]); - readonly deliveriesLoading = signal(false); - readonly deliveriesMessage = signal<string | null>(null); - readonly deliveryFilter = signal<DeliveryFilter>('all'); - - readonly filteredDeliveries = computed(() => { - const filter = this.deliveryFilter(); - const items = this.deliveries(); - if (filter === 'all') { - return items; - } - return items.filter((item) => - item.status.toLowerCase() === filter - ); - }); - - readonly channelForm = this.formBuilder.group({ - channelId: this.formBuilder.control(''), - name: this.formBuilder.control('', { - validators: [Validators.required], - }), - displayName: this.formBuilder.control(''), - description: this.formBuilder.control(''), - type: this.formBuilder.control<NotifyChannel['type']>('Slack'), - target: this.formBuilder.control(''), - endpoint: this.formBuilder.control(''), - secretRef: this.formBuilder.control('', { - validators: [Validators.required], - }), - enabled: this.formBuilder.control(true), - labelsText: this.formBuilder.control(''), - metadataText: this.formBuilder.control(''), - }); - - readonly ruleForm = this.formBuilder.group({ - ruleId: this.formBuilder.control(''), - name: this.formBuilder.control('', { - validators: [Validators.required], - }), - description: this.formBuilder.control(''), - enabled: this.formBuilder.control(true), - minSeverity: this.formBuilder.control('critical'), - eventKindsText: this.formBuilder.control('scanner.report.ready'), - labelsText: this.formBuilder.control('kev,critical'), - channel: this.formBuilder.control('', { - validators: [Validators.required], - }), - digest: this.formBuilder.control('instant'), - template: this.formBuilder.control('tmpl-critical'), - locale: this.formBuilder.control('en-US'), - throttleSeconds: this.formBuilder.control(300), - }); - - readonly testForm = this.formBuilder.group({ - title: this.formBuilder.control('Policy verdict update'), - summary: this.formBuilder.control('Mock preview of Notify payload.'), - body: this.formBuilder.control( - 'Sample preview body rendered by the mocked Notify API service.' - ), - textBody: this.formBuilder.control(''), - target: this.formBuilder.control(''), - }); - - async ngOnInit(): Promise<void> { - await this.refreshAll(); - } - - async refreshAll(): Promise<void> { - await Promise.all([ - this.loadChannels(), - this.loadRules(), - this.loadDeliveries(), - ]); - } - - async loadChannels(): Promise<void> { - this.channelLoading.set(true); - this.channelMessage.set(null); - try { - const channels = await firstValueFrom(this.api.listChannels()); - this.channels.set(channels); - if (channels.length) { - this.tenantId.set(channels[0].tenantId); - } - if (!this.selectedChannelId() && channels.length) { - this.selectChannel(channels[0].channelId); - } - } catch (error) { - this.channelMessage.set(this.toErrorMessage(error)); - } finally { - this.channelLoading.set(false); - } - } - - async loadRules(): Promise<void> { - this.ruleLoading.set(true); - this.ruleMessage.set(null); - try { - const rules = await firstValueFrom(this.api.listRules()); - this.rules.set(rules); - if (!this.selectedRuleId() && rules.length) { - this.selectRule(rules[0].ruleId); - } - if (!this.ruleForm.controls.channel.value && this.channels().length) { - this.ruleForm.patchValue({ channel: this.channels()[0].channelId }); - } - } catch (error) { - this.ruleMessage.set(this.toErrorMessage(error)); - } finally { - this.ruleLoading.set(false); - } - } - - async loadDeliveries(): Promise<void> { - this.deliveriesLoading.set(true); - this.deliveriesMessage.set(null); - try { - const options: NotifyDeliveriesQueryOptions = { - status: this.mapFilterToStatus(this.deliveryFilter()), - limit: 15, - }; - const response = await firstValueFrom( - this.api.listDeliveries(options) - ); - this.deliveries.set([...(response.items ?? [])]); - } catch (error) { - this.deliveriesMessage.set(this.toErrorMessage(error)); - } finally { - this.deliveriesLoading.set(false); - } - } - - selectChannel(channelId: string): void { - const channel = this.channels().find((c) => c.channelId === channelId); - if (!channel) { - return; - } - this.selectedChannelId.set(channelId); - this.channelForm.patchValue({ - channelId: channel.channelId, - name: channel.name, - displayName: channel.displayName ?? '', - description: channel.description ?? '', - type: channel.type, - target: channel.config.target ?? '', - endpoint: channel.config.endpoint ?? '', - secretRef: channel.config.secretRef, - enabled: channel.enabled, - labelsText: this.formatKeyValueMap(channel.labels), - metadataText: this.formatKeyValueMap(channel.metadata), - }); - this.testPreview.set(null); - void this.loadChannelHealth(channelId); - } - - selectRule(ruleId: string): void { - const rule = this.rules().find((r) => r.ruleId === ruleId); - if (!rule) { - return; - } - this.selectedRuleId.set(ruleId); - const action = rule.actions?.[0]; - this.ruleForm.patchValue({ - ruleId: rule.ruleId, - name: rule.name, - description: rule.description ?? '', - enabled: rule.enabled, - minSeverity: rule.match?.minSeverity ?? '', - eventKindsText: this.formatList(rule.match?.eventKinds ?? []), - labelsText: this.formatList(rule.match?.labels ?? []), - channel: action?.channel ?? this.channels()[0]?.channelId ?? '', - digest: action?.digest ?? '', - template: action?.template ?? '', - locale: action?.locale ?? '', - throttleSeconds: this.parseDuration(action?.throttle), - }); - } - - createChannelDraft(): void { - this.selectedChannelId.set(null); - this.channelForm.reset({ - channelId: '', - name: '', - displayName: '', - description: '', - type: 'Slack', - target: '', - endpoint: '', - secretRef: '', - enabled: true, - labelsText: '', - metadataText: '', - }); - this.channelHealth.set(null); - this.testPreview.set(null); - } - - createRuleDraft(): void { - this.selectedRuleId.set(null); - this.ruleForm.reset({ - ruleId: '', - name: '', - description: '', - enabled: true, - minSeverity: 'high', - eventKindsText: 'scanner.report.ready', - labelsText: '', - channel: this.channels()[0]?.channelId ?? '', - digest: 'instant', - template: '', - locale: 'en-US', - throttleSeconds: 0, - }); - } - - async saveChannel(): Promise<void> { - if (this.channelForm.invalid) { - this.channelForm.markAllAsTouched(); - return; - } - - this.channelLoading.set(true); - this.channelMessage.set(null); - - try { - const payload = this.buildChannelPayload(); - const saved = await firstValueFrom(this.api.saveChannel(payload)); - await this.loadChannels(); - this.selectChannel(saved.channelId); - this.channelMessage.set('Channel saved successfully.'); - } catch (error) { - this.channelMessage.set(this.toErrorMessage(error)); - } finally { - this.channelLoading.set(false); - } - } - - async deleteChannel(): Promise<void> { - const channelId = this.selectedChannelId(); - if (!channelId) { - return; - } - this.channelLoading.set(true); - this.channelMessage.set(null); - try { - await firstValueFrom(this.api.deleteChannel(channelId)); - await this.loadChannels(); - if (this.channels().length) { - this.selectChannel(this.channels()[0].channelId); - } else { - this.createChannelDraft(); - } - this.channelMessage.set('Channel deleted.'); - } catch (error) { - this.channelMessage.set(this.toErrorMessage(error)); - } finally { - this.channelLoading.set(false); - } - } - - async saveRule(): Promise<void> { - if (this.ruleForm.invalid) { - this.ruleForm.markAllAsTouched(); - return; - } - this.ruleLoading.set(true); - this.ruleMessage.set(null); - try { - const payload = this.buildRulePayload(); - const saved = await firstValueFrom(this.api.saveRule(payload)); - await this.loadRules(); - this.selectRule(saved.ruleId); - this.ruleMessage.set('Rule saved successfully.'); - } catch (error) { - this.ruleMessage.set(this.toErrorMessage(error)); - } finally { - this.ruleLoading.set(false); - } - } - - async deleteRule(): Promise<void> { - const ruleId = this.selectedRuleId(); - if (!ruleId) { - return; - } - this.ruleLoading.set(true); - this.ruleMessage.set(null); - try { - await firstValueFrom(this.api.deleteRule(ruleId)); - await this.loadRules(); - if (this.rules().length) { - this.selectRule(this.rules()[0].ruleId); - } else { - this.createRuleDraft(); - } - this.ruleMessage.set('Rule deleted.'); - } catch (error) { - this.ruleMessage.set(this.toErrorMessage(error)); - } finally { - this.ruleLoading.set(false); - } - } - - async sendTestPreview(): Promise<void> { - const channelId = this.selectedChannelId(); - if (!channelId) { - this.channelMessage.set('Select a channel before running a test send.'); - return; - } - this.testSending.set(true); - this.channelMessage.set(null); - try { - const payload = this.testForm.getRawValue(); - const response = await firstValueFrom( - this.api.testChannel(channelId, { - target: payload.target || undefined, - title: payload.title || undefined, - summary: payload.summary || undefined, - body: payload.body || undefined, - textBody: payload.textBody || undefined, - }) - ); - this.testPreview.set(response); - this.channelMessage.set('Test send queued successfully.'); - await this.loadDeliveries(); - } catch (error) { - this.channelMessage.set(this.toErrorMessage(error)); - } finally { - this.testSending.set(false); - } - } - - async refreshDeliveries(): Promise<void> { - await this.loadDeliveries(); - } - - onDeliveryFilterChange(rawValue: string): void { - const filter = this.isDeliveryFilter(rawValue) ? rawValue : 'all'; - this.deliveryFilter.set(filter); - void this.loadDeliveries(); - } - - trackByChannel = (_: number, item: NotifyChannel) => item.channelId; - trackByRule = (_: number, item: NotifyRule) => item.ruleId; - trackByDelivery = (_: number, item: NotifyDelivery) => item.deliveryId; - - private async loadChannelHealth(channelId: string): Promise<void> { - try { - const response = await firstValueFrom( - this.api.getChannelHealth(channelId) - ); - this.channelHealth.set(response); - } catch { - this.channelHealth.set(null); - } - } - - private buildChannelPayload(): NotifyChannel { - const raw = this.channelForm.getRawValue(); - const existing = this.channels().find((c) => c.channelId === raw.channelId); - const now = new Date().toISOString(); - const channelId = raw.channelId?.trim() || this.generateId('chn'); - const tenantId = existing?.tenantId ?? this.tenantId(); - - return { - schemaVersion: existing?.schemaVersion ?? '1.0', - channelId, - tenantId, - name: raw.name.trim(), - displayName: raw.displayName?.trim() || undefined, - description: raw.description?.trim() || undefined, - type: raw.type, - enabled: raw.enabled, - config: { - secretRef: raw.secretRef.trim(), - target: raw.target?.trim() || undefined, - endpoint: raw.endpoint?.trim() || undefined, - properties: existing?.config.properties ?? {}, - limits: existing?.config.limits, - }, - labels: this.parseKeyValueText(raw.labelsText), - metadata: this.parseKeyValueText(raw.metadataText), - createdBy: existing?.createdBy ?? 'ui@stella-ops.local', - createdAt: existing?.createdAt ?? now, - updatedBy: 'ui@stella-ops.local', - updatedAt: now, - }; - } - - private buildRulePayload(): NotifyRule { - const raw = this.ruleForm.getRawValue(); - const existing = this.rules().find((r) => r.ruleId === raw.ruleId); - const now = new Date().toISOString(); - const ruleId = raw.ruleId?.trim() || this.generateId('rule'); - - const action: NotifyRuleAction = { - actionId: existing?.actions?.[0]?.actionId ?? this.generateId('act'), - channel: raw.channel ?? this.channels()[0]?.channelId ?? '', - template: raw.template?.trim() || undefined, - digest: raw.digest?.trim() || undefined, - locale: raw.locale?.trim() || undefined, - throttle: - raw.throttleSeconds && raw.throttleSeconds > 0 - ? this.formatDuration(raw.throttleSeconds) - : null, - enabled: true, - metadata: existing?.actions?.[0]?.metadata ?? {}, - }; - - return { - schemaVersion: existing?.schemaVersion ?? '1.0', - ruleId, - tenantId: existing?.tenantId ?? this.tenantId(), - name: raw.name.trim(), - description: raw.description?.trim() || undefined, - enabled: raw.enabled, - match: { - eventKinds: this.parseList(raw.eventKindsText), - labels: this.parseList(raw.labelsText), - minSeverity: raw.minSeverity?.trim() || null, - }, - actions: [action], - labels: existing?.labels ?? {}, - metadata: existing?.metadata ?? {}, - createdBy: existing?.createdBy ?? 'ui@stella-ops.local', - createdAt: existing?.createdAt ?? now, - updatedBy: 'ui@stella-ops.local', - updatedAt: now, - }; - } - - private parseKeyValueText(value?: string | null): Record<string, string> { - const result: Record<string, string> = {}; - if (!value) { - return result; - } - value - .split(/\r?\n|,/) - .map((entry) => entry.trim()) - .filter(Boolean) - .forEach((entry) => { - const [key, ...rest] = entry.split('='); - if (!key) { - return; - } - result[key.trim()] = rest.join('=').trim(); - }); - return result; - } - - private formatKeyValueMap( - map?: Record<string, string> | null - ): string { - if (!map) { - return ''; - } - return Object.entries(map) - .map(([key, value]) => `${key}=${value}`) - .join('\n'); - } - - private parseList(value?: string | null): string[] { - if (!value) { - return []; - } - return value - .split(/\r?\n|,/) - .map((item) => item.trim()) - .filter(Boolean); - } - - private formatList(items: readonly string[]): string { - if (!items?.length) { - return ''; - } - return items.join('\n'); - } - - private parseDuration(duration?: string | null): number { - if (!duration) { - return 0; - } - if (duration.startsWith('PT')) { - const hours = extractNumber(duration, /([0-9]+)H/); - const minutes = extractNumber(duration, /([0-9]+)M/); - const seconds = extractNumber(duration, /([0-9]+)S/); - return hours * 3600 + minutes * 60 + seconds; - } - const parts = duration.split(':').map((p) => Number.parseInt(p, 10)); - if (parts.length === 3) { - return parts[0] * 3600 + parts[1] * 60 + parts[2]; - } - return Number.parseInt(duration, 10) || 0; - } - - private formatDuration(seconds: number): string { - const clamped = Math.max(0, Math.floor(seconds)); - const hrs = Math.floor(clamped / 3600); - const mins = Math.floor((clamped % 3600) / 60); - const secs = clamped % 60; - let result = 'PT'; - if (hrs) { - result += `${hrs}H`; - } - if (mins) { - result += `${mins}M`; - } - if (secs || result === 'PT') { - result += `${secs}S`; - } - return result; - } - - private mapFilterToStatus( - filter: DeliveryFilter - ): NotifyDeliveryStatus | undefined { - switch (filter) { - case 'pending': - return 'Pending'; - case 'sent': - return 'Sent'; - case 'failed': - return 'Failed'; - case 'throttled': - return 'Throttled'; - case 'digested': - return 'Digested'; - case 'dropped': - return 'Dropped'; - default: - return undefined; - } - } - - private isDeliveryFilter(value: string): value is DeliveryFilter { - return ( - value === 'all' || - value === 'pending' || - value === 'sent' || - value === 'failed' || - value === 'throttled' || - value === 'digested' || - value === 'dropped' - ); - } - - private toErrorMessage(error: unknown): string { - if (error instanceof Error) { - return error.message; - } - if (typeof error === 'string') { - return error; - } - return 'Operation failed. Please retry.'; - } - - private generateId(prefix: string): string { - return `${prefix}-${Math.random().toString(36).slice(2, 10)}`; - } -} - -function extractNumber(source: string, pattern: RegExp): number { - const match = source.match(pattern); - return match ? Number.parseInt(match[1], 10) : 0; -} +import { CommonModule } from '@angular/common'; +import { + ChangeDetectionStrategy, + Component, + OnInit, + computed, + inject, + signal, +} from '@angular/core'; +import { + NonNullableFormBuilder, + ReactiveFormsModule, + Validators, +} from '@angular/forms'; +import { firstValueFrom } from 'rxjs'; + +import { + NOTIFY_API, + NotifyApi, +} from '../../core/api/notify.client'; +import { + ChannelHealthResponse, + ChannelTestSendResponse, + NotifyChannel, + NotifyDelivery, + NotifyDeliveriesQueryOptions, + NotifyDeliveryStatus, + NotifyRule, + NotifyRuleAction, +} from '../../core/api/notify.models'; + +type DeliveryFilter = + | 'all' + | 'pending' + | 'sent' + | 'failed' + | 'throttled' + | 'digested' + | 'dropped'; + +@Component({ + selector: 'app-notify-panel', + standalone: true, + imports: [CommonModule, ReactiveFormsModule], + templateUrl: './notify-panel.component.html', + styleUrls: ['./notify-panel.component.scss'], + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class NotifyPanelComponent implements OnInit { + private readonly api = inject<NotifyApi>(NOTIFY_API); + private readonly formBuilder = inject(NonNullableFormBuilder); + + private readonly tenantId = signal<string>('tenant-dev'); + + readonly channelTypes: readonly NotifyChannel['type'][] = [ + 'Slack', + 'Teams', + 'Email', + 'Webhook', + 'Custom', + ]; + + readonly severityOptions = ['critical', 'high', 'medium', 'low']; + + readonly channels = signal<NotifyChannel[]>([]); + readonly selectedChannelId = signal<string | null>(null); + readonly channelLoading = signal(false); + readonly channelMessage = signal<string | null>(null); + readonly channelHealth = signal<ChannelHealthResponse | null>(null); + readonly testPreview = signal<ChannelTestSendResponse | null>(null); + readonly testSending = signal(false); + + readonly rules = signal<NotifyRule[]>([]); + readonly selectedRuleId = signal<string | null>(null); + readonly ruleLoading = signal(false); + readonly ruleMessage = signal<string | null>(null); + + readonly deliveries = signal<NotifyDelivery[]>([]); + readonly deliveriesLoading = signal(false); + readonly deliveriesMessage = signal<string | null>(null); + readonly deliveryFilter = signal<DeliveryFilter>('all'); + + readonly filteredDeliveries = computed(() => { + const filter = this.deliveryFilter(); + const items = this.deliveries(); + if (filter === 'all') { + return items; + } + return items.filter((item) => + item.status.toLowerCase() === filter + ); + }); + + readonly channelForm = this.formBuilder.group({ + channelId: this.formBuilder.control(''), + name: this.formBuilder.control('', { + validators: [Validators.required], + }), + displayName: this.formBuilder.control(''), + description: this.formBuilder.control(''), + type: this.formBuilder.control<NotifyChannel['type']>('Slack'), + target: this.formBuilder.control(''), + endpoint: this.formBuilder.control(''), + secretRef: this.formBuilder.control('', { + validators: [Validators.required], + }), + enabled: this.formBuilder.control(true), + labelsText: this.formBuilder.control(''), + metadataText: this.formBuilder.control(''), + }); + + readonly ruleForm = this.formBuilder.group({ + ruleId: this.formBuilder.control(''), + name: this.formBuilder.control('', { + validators: [Validators.required], + }), + description: this.formBuilder.control(''), + enabled: this.formBuilder.control(true), + minSeverity: this.formBuilder.control('critical'), + eventKindsText: this.formBuilder.control('scanner.report.ready'), + labelsText: this.formBuilder.control('kev,critical'), + channel: this.formBuilder.control('', { + validators: [Validators.required], + }), + digest: this.formBuilder.control('instant'), + template: this.formBuilder.control('tmpl-critical'), + locale: this.formBuilder.control('en-US'), + throttleSeconds: this.formBuilder.control(300), + }); + + readonly testForm = this.formBuilder.group({ + title: this.formBuilder.control('Policy verdict update'), + summary: this.formBuilder.control('Mock preview of Notify payload.'), + body: this.formBuilder.control( + 'Sample preview body rendered by the mocked Notify API service.' + ), + textBody: this.formBuilder.control(''), + target: this.formBuilder.control(''), + }); + + async ngOnInit(): Promise<void> { + await this.refreshAll(); + } + + async refreshAll(): Promise<void> { + await Promise.all([ + this.loadChannels(), + this.loadRules(), + this.loadDeliveries(), + ]); + } + + async loadChannels(): Promise<void> { + this.channelLoading.set(true); + this.channelMessage.set(null); + try { + const channels = await firstValueFrom(this.api.listChannels()); + this.channels.set(channels); + if (channels.length) { + this.tenantId.set(channels[0].tenantId); + } + if (!this.selectedChannelId() && channels.length) { + this.selectChannel(channels[0].channelId); + } + } catch (error) { + this.channelMessage.set(this.toErrorMessage(error)); + } finally { + this.channelLoading.set(false); + } + } + + async loadRules(): Promise<void> { + this.ruleLoading.set(true); + this.ruleMessage.set(null); + try { + const rules = await firstValueFrom(this.api.listRules()); + this.rules.set(rules); + if (!this.selectedRuleId() && rules.length) { + this.selectRule(rules[0].ruleId); + } + if (!this.ruleForm.controls.channel.value && this.channels().length) { + this.ruleForm.patchValue({ channel: this.channels()[0].channelId }); + } + } catch (error) { + this.ruleMessage.set(this.toErrorMessage(error)); + } finally { + this.ruleLoading.set(false); + } + } + + async loadDeliveries(): Promise<void> { + this.deliveriesLoading.set(true); + this.deliveriesMessage.set(null); + try { + const options: NotifyDeliveriesQueryOptions = { + status: this.mapFilterToStatus(this.deliveryFilter()), + limit: 15, + }; + const response = await firstValueFrom( + this.api.listDeliveries(options) + ); + this.deliveries.set([...(response.items ?? [])]); + } catch (error) { + this.deliveriesMessage.set(this.toErrorMessage(error)); + } finally { + this.deliveriesLoading.set(false); + } + } + + selectChannel(channelId: string): void { + const channel = this.channels().find((c) => c.channelId === channelId); + if (!channel) { + return; + } + this.selectedChannelId.set(channelId); + this.channelForm.patchValue({ + channelId: channel.channelId, + name: channel.name, + displayName: channel.displayName ?? '', + description: channel.description ?? '', + type: channel.type, + target: channel.config.target ?? '', + endpoint: channel.config.endpoint ?? '', + secretRef: channel.config.secretRef, + enabled: channel.enabled, + labelsText: this.formatKeyValueMap(channel.labels), + metadataText: this.formatKeyValueMap(channel.metadata), + }); + this.testPreview.set(null); + void this.loadChannelHealth(channelId); + } + + selectRule(ruleId: string): void { + const rule = this.rules().find((r) => r.ruleId === ruleId); + if (!rule) { + return; + } + this.selectedRuleId.set(ruleId); + const action = rule.actions?.[0]; + this.ruleForm.patchValue({ + ruleId: rule.ruleId, + name: rule.name, + description: rule.description ?? '', + enabled: rule.enabled, + minSeverity: rule.match?.minSeverity ?? '', + eventKindsText: this.formatList(rule.match?.eventKinds ?? []), + labelsText: this.formatList(rule.match?.labels ?? []), + channel: action?.channel ?? this.channels()[0]?.channelId ?? '', + digest: action?.digest ?? '', + template: action?.template ?? '', + locale: action?.locale ?? '', + throttleSeconds: this.parseDuration(action?.throttle), + }); + } + + createChannelDraft(): void { + this.selectedChannelId.set(null); + this.channelForm.reset({ + channelId: '', + name: '', + displayName: '', + description: '', + type: 'Slack', + target: '', + endpoint: '', + secretRef: '', + enabled: true, + labelsText: '', + metadataText: '', + }); + this.channelHealth.set(null); + this.testPreview.set(null); + } + + createRuleDraft(): void { + this.selectedRuleId.set(null); + this.ruleForm.reset({ + ruleId: '', + name: '', + description: '', + enabled: true, + minSeverity: 'high', + eventKindsText: 'scanner.report.ready', + labelsText: '', + channel: this.channels()[0]?.channelId ?? '', + digest: 'instant', + template: '', + locale: 'en-US', + throttleSeconds: 0, + }); + } + + async saveChannel(): Promise<void> { + if (this.channelForm.invalid) { + this.channelForm.markAllAsTouched(); + return; + } + + this.channelLoading.set(true); + this.channelMessage.set(null); + + try { + const payload = this.buildChannelPayload(); + const saved = await firstValueFrom(this.api.saveChannel(payload)); + await this.loadChannels(); + this.selectChannel(saved.channelId); + this.channelMessage.set('Channel saved successfully.'); + } catch (error) { + this.channelMessage.set(this.toErrorMessage(error)); + } finally { + this.channelLoading.set(false); + } + } + + async deleteChannel(): Promise<void> { + const channelId = this.selectedChannelId(); + if (!channelId) { + return; + } + this.channelLoading.set(true); + this.channelMessage.set(null); + try { + await firstValueFrom(this.api.deleteChannel(channelId)); + await this.loadChannels(); + if (this.channels().length) { + this.selectChannel(this.channels()[0].channelId); + } else { + this.createChannelDraft(); + } + this.channelMessage.set('Channel deleted.'); + } catch (error) { + this.channelMessage.set(this.toErrorMessage(error)); + } finally { + this.channelLoading.set(false); + } + } + + async saveRule(): Promise<void> { + if (this.ruleForm.invalid) { + this.ruleForm.markAllAsTouched(); + return; + } + this.ruleLoading.set(true); + this.ruleMessage.set(null); + try { + const payload = this.buildRulePayload(); + const saved = await firstValueFrom(this.api.saveRule(payload)); + await this.loadRules(); + this.selectRule(saved.ruleId); + this.ruleMessage.set('Rule saved successfully.'); + } catch (error) { + this.ruleMessage.set(this.toErrorMessage(error)); + } finally { + this.ruleLoading.set(false); + } + } + + async deleteRule(): Promise<void> { + const ruleId = this.selectedRuleId(); + if (!ruleId) { + return; + } + this.ruleLoading.set(true); + this.ruleMessage.set(null); + try { + await firstValueFrom(this.api.deleteRule(ruleId)); + await this.loadRules(); + if (this.rules().length) { + this.selectRule(this.rules()[0].ruleId); + } else { + this.createRuleDraft(); + } + this.ruleMessage.set('Rule deleted.'); + } catch (error) { + this.ruleMessage.set(this.toErrorMessage(error)); + } finally { + this.ruleLoading.set(false); + } + } + + async sendTestPreview(): Promise<void> { + const channelId = this.selectedChannelId(); + if (!channelId) { + this.channelMessage.set('Select a channel before running a test send.'); + return; + } + this.testSending.set(true); + this.channelMessage.set(null); + try { + const payload = this.testForm.getRawValue(); + const response = await firstValueFrom( + this.api.testChannel(channelId, { + target: payload.target || undefined, + title: payload.title || undefined, + summary: payload.summary || undefined, + body: payload.body || undefined, + textBody: payload.textBody || undefined, + }) + ); + this.testPreview.set(response); + this.channelMessage.set('Test send queued successfully.'); + await this.loadDeliveries(); + } catch (error) { + this.channelMessage.set(this.toErrorMessage(error)); + } finally { + this.testSending.set(false); + } + } + + async refreshDeliveries(): Promise<void> { + await this.loadDeliveries(); + } + + onDeliveryFilterChange(rawValue: string): void { + const filter = this.isDeliveryFilter(rawValue) ? rawValue : 'all'; + this.deliveryFilter.set(filter); + void this.loadDeliveries(); + } + + trackByChannel = (_: number, item: NotifyChannel) => item.channelId; + trackByRule = (_: number, item: NotifyRule) => item.ruleId; + trackByDelivery = (_: number, item: NotifyDelivery) => item.deliveryId; + + private async loadChannelHealth(channelId: string): Promise<void> { + try { + const response = await firstValueFrom( + this.api.getChannelHealth(channelId) + ); + this.channelHealth.set(response); + } catch { + this.channelHealth.set(null); + } + } + + private buildChannelPayload(): NotifyChannel { + const raw = this.channelForm.getRawValue(); + const existing = this.channels().find((c) => c.channelId === raw.channelId); + const now = new Date().toISOString(); + const channelId = raw.channelId?.trim() || this.generateId('chn'); + const tenantId = existing?.tenantId ?? this.tenantId(); + + return { + schemaVersion: existing?.schemaVersion ?? '1.0', + channelId, + tenantId, + name: raw.name.trim(), + displayName: raw.displayName?.trim() || undefined, + description: raw.description?.trim() || undefined, + type: raw.type, + enabled: raw.enabled, + config: { + secretRef: raw.secretRef.trim(), + target: raw.target?.trim() || undefined, + endpoint: raw.endpoint?.trim() || undefined, + properties: existing?.config.properties ?? {}, + limits: existing?.config.limits, + }, + labels: this.parseKeyValueText(raw.labelsText), + metadata: this.parseKeyValueText(raw.metadataText), + createdBy: existing?.createdBy ?? 'ui@stella-ops.local', + createdAt: existing?.createdAt ?? now, + updatedBy: 'ui@stella-ops.local', + updatedAt: now, + }; + } + + private buildRulePayload(): NotifyRule { + const raw = this.ruleForm.getRawValue(); + const existing = this.rules().find((r) => r.ruleId === raw.ruleId); + const now = new Date().toISOString(); + const ruleId = raw.ruleId?.trim() || this.generateId('rule'); + + const action: NotifyRuleAction = { + actionId: existing?.actions?.[0]?.actionId ?? this.generateId('act'), + channel: raw.channel ?? this.channels()[0]?.channelId ?? '', + template: raw.template?.trim() || undefined, + digest: raw.digest?.trim() || undefined, + locale: raw.locale?.trim() || undefined, + throttle: + raw.throttleSeconds && raw.throttleSeconds > 0 + ? this.formatDuration(raw.throttleSeconds) + : null, + enabled: true, + metadata: existing?.actions?.[0]?.metadata ?? {}, + }; + + return { + schemaVersion: existing?.schemaVersion ?? '1.0', + ruleId, + tenantId: existing?.tenantId ?? this.tenantId(), + name: raw.name.trim(), + description: raw.description?.trim() || undefined, + enabled: raw.enabled, + match: { + eventKinds: this.parseList(raw.eventKindsText), + labels: this.parseList(raw.labelsText), + minSeverity: raw.minSeverity?.trim() || null, + }, + actions: [action], + labels: existing?.labels ?? {}, + metadata: existing?.metadata ?? {}, + createdBy: existing?.createdBy ?? 'ui@stella-ops.local', + createdAt: existing?.createdAt ?? now, + updatedBy: 'ui@stella-ops.local', + updatedAt: now, + }; + } + + private parseKeyValueText(value?: string | null): Record<string, string> { + const result: Record<string, string> = {}; + if (!value) { + return result; + } + value + .split(/\r?\n|,/) + .map((entry) => entry.trim()) + .filter(Boolean) + .forEach((entry) => { + const [key, ...rest] = entry.split('='); + if (!key) { + return; + } + result[key.trim()] = rest.join('=').trim(); + }); + return result; + } + + private formatKeyValueMap( + map?: Record<string, string> | null + ): string { + if (!map) { + return ''; + } + return Object.entries(map) + .map(([key, value]) => `${key}=${value}`) + .join('\n'); + } + + private parseList(value?: string | null): string[] { + if (!value) { + return []; + } + return value + .split(/\r?\n|,/) + .map((item) => item.trim()) + .filter(Boolean); + } + + private formatList(items: readonly string[]): string { + if (!items?.length) { + return ''; + } + return items.join('\n'); + } + + private parseDuration(duration?: string | null): number { + if (!duration) { + return 0; + } + if (duration.startsWith('PT')) { + const hours = extractNumber(duration, /([0-9]+)H/); + const minutes = extractNumber(duration, /([0-9]+)M/); + const seconds = extractNumber(duration, /([0-9]+)S/); + return hours * 3600 + minutes * 60 + seconds; + } + const parts = duration.split(':').map((p) => Number.parseInt(p, 10)); + if (parts.length === 3) { + return parts[0] * 3600 + parts[1] * 60 + parts[2]; + } + return Number.parseInt(duration, 10) || 0; + } + + private formatDuration(seconds: number): string { + const clamped = Math.max(0, Math.floor(seconds)); + const hrs = Math.floor(clamped / 3600); + const mins = Math.floor((clamped % 3600) / 60); + const secs = clamped % 60; + let result = 'PT'; + if (hrs) { + result += `${hrs}H`; + } + if (mins) { + result += `${mins}M`; + } + if (secs || result === 'PT') { + result += `${secs}S`; + } + return result; + } + + private mapFilterToStatus( + filter: DeliveryFilter + ): NotifyDeliveryStatus | undefined { + switch (filter) { + case 'pending': + return 'Pending'; + case 'sent': + return 'Sent'; + case 'failed': + return 'Failed'; + case 'throttled': + return 'Throttled'; + case 'digested': + return 'Digested'; + case 'dropped': + return 'Dropped'; + default: + return undefined; + } + } + + private isDeliveryFilter(value: string): value is DeliveryFilter { + return ( + value === 'all' || + value === 'pending' || + value === 'sent' || + value === 'failed' || + value === 'throttled' || + value === 'digested' || + value === 'dropped' + ); + } + + private toErrorMessage(error: unknown): string { + if (error instanceof Error) { + return error.message; + } + if (typeof error === 'string') { + return error; + } + return 'Operation failed. Please retry.'; + } + + private generateId(prefix: string): string { + return `${prefix}-${Math.random().toString(36).slice(2, 10)}`; + } +} + +function extractNumber(source: string, pattern: RegExp): number { + const match = source.match(pattern); + return match ? Number.parseInt(match[1], 10) : 0; +} diff --git a/src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.html b/src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.html similarity index 96% rename from src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.html rename to src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.html index 436a4b27..a99b2e81 100644 --- a/src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.html +++ b/src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.html @@ -1,39 +1,39 @@ -<section class="attestation-panel" [attr.data-status]="statusClass"> - <header class="attestation-header"> - <h2>Attestation</h2> - <span class="status-badge" [ngClass]="statusClass"> - {{ statusLabel }} - </span> - </header> - - <dl class="attestation-meta"> - <div> - <dt>Rekor UUID</dt> - <dd><code>{{ attestation.uuid }}</code></dd> - </div> - <div *ngIf="attestation.index !== undefined"> - <dt>Log index</dt> - <dd>{{ attestation.index }}</dd> - </div> - <div *ngIf="attestation.logUrl"> - <dt>Log URL</dt> - <dd> - <a - [href]="attestation.logUrl" - rel="noopener noreferrer" - target="_blank" - > - {{ attestation.logUrl }} - </a> - </dd> - </div> - <div *ngIf="attestation.checkedAt"> - <dt>Last checked</dt> - <dd>{{ attestation.checkedAt }}</dd> - </div> - <div *ngIf="attestation.statusMessage"> - <dt>Details</dt> - <dd>{{ attestation.statusMessage }}</dd> - </div> - </dl> -</section> +<section class="attestation-panel" [attr.data-status]="statusClass"> + <header class="attestation-header"> + <h2>Attestation</h2> + <span class="status-badge" [ngClass]="statusClass"> + {{ statusLabel }} + </span> + </header> + + <dl class="attestation-meta"> + <div> + <dt>Rekor UUID</dt> + <dd><code>{{ attestation.uuid }}</code></dd> + </div> + <div *ngIf="attestation.index !== undefined"> + <dt>Log index</dt> + <dd>{{ attestation.index }}</dd> + </div> + <div *ngIf="attestation.logUrl"> + <dt>Log URL</dt> + <dd> + <a + [href]="attestation.logUrl" + rel="noopener noreferrer" + target="_blank" + > + {{ attestation.logUrl }} + </a> + </dd> + </div> + <div *ngIf="attestation.checkedAt"> + <dt>Last checked</dt> + <dd>{{ attestation.checkedAt }}</dd> + </div> + <div *ngIf="attestation.statusMessage"> + <dt>Details</dt> + <dd>{{ attestation.statusMessage }}</dd> + </div> + </dl> +</section> diff --git a/src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.scss b/src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.scss similarity index 94% rename from src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.scss rename to src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.scss index 2ac2b218..fd135851 100644 --- a/src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.scss +++ b/src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.scss @@ -1,75 +1,75 @@ -.attestation-panel { - border: 1px solid #1f2933; - border-radius: 8px; - padding: 1.25rem; - background: #111827; - color: #f8fafc; - display: grid; - gap: 1rem; -} - -.attestation-header { - display: flex; - align-items: center; - justify-content: space-between; -} - -.attestation-header h2 { - margin: 0; - font-size: 1.125rem; -} - -.status-badge { - display: inline-flex; - align-items: center; - padding: 0.35rem 0.75rem; - border-radius: 999px; - font-size: 0.875rem; - font-weight: 600; - text-transform: uppercase; - letter-spacing: 0.05em; -} - -.status-badge.verified { - background-color: rgba(34, 197, 94, 0.2); - color: #34d399; -} - -.status-badge.pending { - background-color: rgba(234, 179, 8, 0.2); - color: #eab308; -} - -.status-badge.failed { - background-color: rgba(248, 113, 113, 0.2); - color: #f87171; -} - -.attestation-meta { - margin: 0; - display: grid; - gap: 0.75rem; -} - -.attestation-meta div { - display: grid; - gap: 0.25rem; -} - -.attestation-meta dt { - font-size: 0.75rem; - text-transform: uppercase; - letter-spacing: 0.05em; - color: #9ca3af; -} - -.attestation-meta dd { - margin: 0; - font-family: 'JetBrains Mono', 'Fira Code', 'SFMono-Regular', monospace; - word-break: break-word; -} - -.attestation-meta a { - color: #60a5fa; - text-decoration: underline; -} +.attestation-panel { + border: 1px solid #1f2933; + border-radius: 8px; + padding: 1.25rem; + background: #111827; + color: #f8fafc; + display: grid; + gap: 1rem; +} + +.attestation-header { + display: flex; + align-items: center; + justify-content: space-between; +} + +.attestation-header h2 { + margin: 0; + font-size: 1.125rem; +} + +.status-badge { + display: inline-flex; + align-items: center; + padding: 0.35rem 0.75rem; + border-radius: 999px; + font-size: 0.875rem; + font-weight: 600; + text-transform: uppercase; + letter-spacing: 0.05em; +} + +.status-badge.verified { + background-color: rgba(34, 197, 94, 0.2); + color: #34d399; +} + +.status-badge.pending { + background-color: rgba(234, 179, 8, 0.2); + color: #eab308; +} + +.status-badge.failed { + background-color: rgba(248, 113, 113, 0.2); + color: #f87171; +} + +.attestation-meta { + margin: 0; + display: grid; + gap: 0.75rem; +} + +.attestation-meta div { + display: grid; + gap: 0.25rem; +} + +.attestation-meta dt { + font-size: 0.75rem; + text-transform: uppercase; + letter-spacing: 0.05em; + color: #9ca3af; +} + +.attestation-meta dd { + margin: 0; + font-family: 'JetBrains Mono', 'Fira Code', 'SFMono-Regular', monospace; + word-break: break-word; +} + +.attestation-meta a { + color: #60a5fa; + text-decoration: underline; +} diff --git a/src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.spec.ts b/src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.spec.ts similarity index 97% rename from src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.spec.ts rename to src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.spec.ts index 9c44876a..9db39a37 100644 --- a/src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.spec.ts +++ b/src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.spec.ts @@ -1,55 +1,55 @@ -import { ComponentFixture, TestBed } from '@angular/core/testing'; -import { ScanAttestationPanelComponent } from './scan-attestation-panel.component'; - -describe('ScanAttestationPanelComponent', () => { - let component: ScanAttestationPanelComponent; - let fixture: ComponentFixture<ScanAttestationPanelComponent>; - - beforeEach(async () => { - await TestBed.configureTestingModule({ - imports: [ScanAttestationPanelComponent], - }).compileComponents(); - - fixture = TestBed.createComponent(ScanAttestationPanelComponent); - component = fixture.componentInstance; - }); - - it('renders verified attestation details', () => { - component.attestation = { - uuid: '1234', - status: 'verified', - index: 42, - logUrl: 'https://rekor.example', - checkedAt: '2025-10-23T10:05:00Z', - statusMessage: 'Rekor transparency log inclusion proof verified.', - }; - - fixture.detectChanges(); - - const element: HTMLElement = fixture.nativeElement; - expect(element.querySelector('.status-badge')?.textContent?.trim()).toBe( - 'Verified' - ); - expect(element.textContent).toContain('1234'); - expect(element.textContent).toContain('42'); - expect(element.textContent).toContain('https://rekor.example'); - }); - - it('renders failure message when attestation verification fails', () => { - component.attestation = { - uuid: 'abcd', - status: 'failed', - statusMessage: 'Verification failed: inclusion proof mismatch.', - }; - - fixture.detectChanges(); - - const element: HTMLElement = fixture.nativeElement; - expect(element.querySelector('.status-badge')?.textContent?.trim()).toBe( - 'Verification failed' - ); - expect(element.textContent).toContain( - 'Verification failed: inclusion proof mismatch.' - ); - }); -}); +import { ComponentFixture, TestBed } from '@angular/core/testing'; +import { ScanAttestationPanelComponent } from './scan-attestation-panel.component'; + +describe('ScanAttestationPanelComponent', () => { + let component: ScanAttestationPanelComponent; + let fixture: ComponentFixture<ScanAttestationPanelComponent>; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + imports: [ScanAttestationPanelComponent], + }).compileComponents(); + + fixture = TestBed.createComponent(ScanAttestationPanelComponent); + component = fixture.componentInstance; + }); + + it('renders verified attestation details', () => { + component.attestation = { + uuid: '1234', + status: 'verified', + index: 42, + logUrl: 'https://rekor.example', + checkedAt: '2025-10-23T10:05:00Z', + statusMessage: 'Rekor transparency log inclusion proof verified.', + }; + + fixture.detectChanges(); + + const element: HTMLElement = fixture.nativeElement; + expect(element.querySelector('.status-badge')?.textContent?.trim()).toBe( + 'Verified' + ); + expect(element.textContent).toContain('1234'); + expect(element.textContent).toContain('42'); + expect(element.textContent).toContain('https://rekor.example'); + }); + + it('renders failure message when attestation verification fails', () => { + component.attestation = { + uuid: 'abcd', + status: 'failed', + statusMessage: 'Verification failed: inclusion proof mismatch.', + }; + + fixture.detectChanges(); + + const element: HTMLElement = fixture.nativeElement; + expect(element.querySelector('.status-badge')?.textContent?.trim()).toBe( + 'Verification failed' + ); + expect(element.textContent).toContain( + 'Verification failed: inclusion proof mismatch.' + ); + }); +}); diff --git a/src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.ts b/src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.ts similarity index 96% rename from src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.ts rename to src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.ts index 55be5db5..7aaa54cc 100644 --- a/src/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.ts +++ b/src/Web/StellaOps.Web/src/app/features/scans/scan-attestation-panel.component.ts @@ -1,42 +1,42 @@ -import { CommonModule } from '@angular/common'; -import { - ChangeDetectionStrategy, - Component, - Input, -} from '@angular/core'; -import { - ScanAttestationStatus, - ScanAttestationStatusKind, -} from '../../core/api/scanner.models'; - -@Component({ - selector: 'app-scan-attestation-panel', - standalone: true, - imports: [CommonModule], - templateUrl: './scan-attestation-panel.component.html', - styleUrls: ['./scan-attestation-panel.component.scss'], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class ScanAttestationPanelComponent { - @Input({ required: true }) attestation!: ScanAttestationStatus; - - get statusLabel(): string { - return this.toStatusLabel(this.attestation?.status); - } - - get statusClass(): string { - return this.attestation?.status ?? 'pending'; - } - - private toStatusLabel(status: ScanAttestationStatusKind | undefined): string { - switch (status) { - case 'verified': - return 'Verified'; - case 'failed': - return 'Verification failed'; - case 'pending': - default: - return 'Pending verification'; - } - } -} +import { CommonModule } from '@angular/common'; +import { + ChangeDetectionStrategy, + Component, + Input, +} from '@angular/core'; +import { + ScanAttestationStatus, + ScanAttestationStatusKind, +} from '../../core/api/scanner.models'; + +@Component({ + selector: 'app-scan-attestation-panel', + standalone: true, + imports: [CommonModule], + templateUrl: './scan-attestation-panel.component.html', + styleUrls: ['./scan-attestation-panel.component.scss'], + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class ScanAttestationPanelComponent { + @Input({ required: true }) attestation!: ScanAttestationStatus; + + get statusLabel(): string { + return this.toStatusLabel(this.attestation?.status); + } + + get statusClass(): string { + return this.attestation?.status ?? 'pending'; + } + + private toStatusLabel(status: ScanAttestationStatusKind | undefined): string { + switch (status) { + case 'verified': + return 'Verified'; + case 'failed': + return 'Verification failed'; + case 'pending': + default: + return 'Pending verification'; + } + } +} diff --git a/src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.html b/src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.html similarity index 96% rename from src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.html rename to src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.html index 4c18bd3c..197baf5b 100644 --- a/src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.html +++ b/src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.html @@ -1,52 +1,52 @@ -<section class="scan-detail"> - <header class="scan-detail__header"> - <h1>Scan Detail</h1> - <div class="scenario-toggle" role="group" aria-label="Scenario selector"> - <button - type="button" - class="scenario-button" - [class.active]="scenario() === 'verified'" - (click)="onSelectScenario('verified')" - data-scenario="verified" - > - Verified - </button> - <button - type="button" - class="scenario-button" - [class.active]="scenario() === 'failed'" - (click)="onSelectScenario('failed')" - data-scenario="failed" - > - Failure - </button> - </div> - </header> - - <section class="scan-summary"> - <h2>Image</h2> - <dl> - <div> - <dt>Scan ID</dt> - <dd>{{ scan().scanId }}</dd> - </div> - <div> - <dt>Image digest</dt> - <dd><code>{{ scan().imageDigest }}</code></dd> - </div> - <div> - <dt>Completed at</dt> - <dd>{{ scan().completedAt }}</dd> - </div> - </dl> - </section> - - <app-scan-attestation-panel - *ngIf="scan().attestation as attestation" - [attestation]="attestation" - /> - - <p *ngIf="!scan().attestation" class="attestation-empty"> - No attestation has been recorded for this scan. - </p> -</section> +<section class="scan-detail"> + <header class="scan-detail__header"> + <h1>Scan Detail</h1> + <div class="scenario-toggle" role="group" aria-label="Scenario selector"> + <button + type="button" + class="scenario-button" + [class.active]="scenario() === 'verified'" + (click)="onSelectScenario('verified')" + data-scenario="verified" + > + Verified + </button> + <button + type="button" + class="scenario-button" + [class.active]="scenario() === 'failed'" + (click)="onSelectScenario('failed')" + data-scenario="failed" + > + Failure + </button> + </div> + </header> + + <section class="scan-summary"> + <h2>Image</h2> + <dl> + <div> + <dt>Scan ID</dt> + <dd>{{ scan().scanId }}</dd> + </div> + <div> + <dt>Image digest</dt> + <dd><code>{{ scan().imageDigest }}</code></dd> + </div> + <div> + <dt>Completed at</dt> + <dd>{{ scan().completedAt }}</dd> + </div> + </dl> + </section> + + <app-scan-attestation-panel + *ngIf="scan().attestation as attestation" + [attestation]="attestation" + /> + + <p *ngIf="!scan().attestation" class="attestation-empty"> + No attestation has been recorded for this scan. + </p> +</section> diff --git a/src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.scss b/src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.scss similarity index 94% rename from src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.scss rename to src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.scss index 0e6bf035..a508e5e2 100644 --- a/src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.scss +++ b/src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.scss @@ -1,79 +1,79 @@ -.scan-detail { - display: grid; - gap: 1.5rem; - padding: 1.5rem; - color: #e2e8f0; - background: #0f172a; - min-height: calc(100vh - 120px); -} - -.scan-detail__header { - display: flex; - flex-wrap: wrap; - align-items: center; - justify-content: space-between; - gap: 1rem; -} - -.scan-detail__header h1 { - margin: 0; - font-size: 1.5rem; -} - -.scenario-toggle { - display: inline-flex; - border: 1px solid #1f2933; - border-radius: 999px; - overflow: hidden; -} - -.scenario-button { - background: transparent; - color: inherit; - border: none; - padding: 0.5rem 1.25rem; - cursor: pointer; - font-size: 0.9rem; - letter-spacing: 0.03em; - text-transform: uppercase; -} - -.scenario-button.active { - background: #1d4ed8; - color: #f8fafc; -} - -.scan-summary { - border: 1px solid #1f2933; - border-radius: 8px; - padding: 1.25rem; - background: #111827; -} - -.scan-summary h2 { - margin: 0 0 0.75rem 0; - font-size: 1.125rem; -} - -.scan-summary dl { - margin: 0; - display: grid; - gap: 0.75rem; -} - -.scan-summary dt { - font-size: 0.75rem; - text-transform: uppercase; - color: #94a3b8; -} - -.scan-summary dd { - margin: 0; - font-family: 'JetBrains Mono', 'Fira Code', 'SFMono-Regular', monospace; - word-break: break-word; -} - -.attestation-empty { - font-style: italic; - color: #94a3b8; -} +.scan-detail { + display: grid; + gap: 1.5rem; + padding: 1.5rem; + color: #e2e8f0; + background: #0f172a; + min-height: calc(100vh - 120px); +} + +.scan-detail__header { + display: flex; + flex-wrap: wrap; + align-items: center; + justify-content: space-between; + gap: 1rem; +} + +.scan-detail__header h1 { + margin: 0; + font-size: 1.5rem; +} + +.scenario-toggle { + display: inline-flex; + border: 1px solid #1f2933; + border-radius: 999px; + overflow: hidden; +} + +.scenario-button { + background: transparent; + color: inherit; + border: none; + padding: 0.5rem 1.25rem; + cursor: pointer; + font-size: 0.9rem; + letter-spacing: 0.03em; + text-transform: uppercase; +} + +.scenario-button.active { + background: #1d4ed8; + color: #f8fafc; +} + +.scan-summary { + border: 1px solid #1f2933; + border-radius: 8px; + padding: 1.25rem; + background: #111827; +} + +.scan-summary h2 { + margin: 0 0 0.75rem 0; + font-size: 1.125rem; +} + +.scan-summary dl { + margin: 0; + display: grid; + gap: 0.75rem; +} + +.scan-summary dt { + font-size: 0.75rem; + text-transform: uppercase; + color: #94a3b8; +} + +.scan-summary dd { + margin: 0; + font-family: 'JetBrains Mono', 'Fira Code', 'SFMono-Regular', monospace; + word-break: break-word; +} + +.attestation-empty { + font-style: italic; + color: #94a3b8; +} diff --git a/src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.spec.ts b/src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.spec.ts similarity index 97% rename from src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.spec.ts rename to src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.spec.ts index 1e5f6958..a9ffadd0 100644 --- a/src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.spec.ts +++ b/src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.spec.ts @@ -1,50 +1,50 @@ -import { ComponentFixture, TestBed } from '@angular/core/testing'; -import { RouterTestingModule } from '@angular/router/testing'; -import { ScanDetailPageComponent } from './scan-detail-page.component'; -import { - scanDetailWithFailedAttestation, - scanDetailWithVerifiedAttestation, -} from '../../testing/scan-fixtures'; - -describe('ScanDetailPageComponent', () => { - let fixture: ComponentFixture<ScanDetailPageComponent>; - let component: ScanDetailPageComponent; - - beforeEach(async () => { - await TestBed.configureTestingModule({ - imports: [RouterTestingModule, ScanDetailPageComponent], - }).compileComponents(); - - fixture = TestBed.createComponent(ScanDetailPageComponent); - component = fixture.componentInstance; - }); - - it('shows the verified attestation scenario by default', () => { - fixture.detectChanges(); - - const element: HTMLElement = fixture.nativeElement; - expect(element.textContent).toContain( - scanDetailWithVerifiedAttestation.attestation?.uuid ?? '' - ); - expect(element.querySelector('.status-badge')?.textContent?.trim()).toBe( - 'Verified' - ); - }); - - it('switches to failure scenario when toggle is clicked', () => { - fixture.detectChanges(); - - const failureButton: HTMLButtonElement | null = - fixture.nativeElement.querySelector('[data-scenario="failed"]'); - failureButton?.click(); - fixture.detectChanges(); - - const element: HTMLElement = fixture.nativeElement; - expect(element.textContent).toContain( - scanDetailWithFailedAttestation.attestation?.uuid ?? '' - ); - expect(element.querySelector('.status-badge')?.textContent?.trim()).toBe( - 'Verification failed' - ); - }); -}); +import { ComponentFixture, TestBed } from '@angular/core/testing'; +import { RouterTestingModule } from '@angular/router/testing'; +import { ScanDetailPageComponent } from './scan-detail-page.component'; +import { + scanDetailWithFailedAttestation, + scanDetailWithVerifiedAttestation, +} from '../../testing/scan-fixtures'; + +describe('ScanDetailPageComponent', () => { + let fixture: ComponentFixture<ScanDetailPageComponent>; + let component: ScanDetailPageComponent; + + beforeEach(async () => { + await TestBed.configureTestingModule({ + imports: [RouterTestingModule, ScanDetailPageComponent], + }).compileComponents(); + + fixture = TestBed.createComponent(ScanDetailPageComponent); + component = fixture.componentInstance; + }); + + it('shows the verified attestation scenario by default', () => { + fixture.detectChanges(); + + const element: HTMLElement = fixture.nativeElement; + expect(element.textContent).toContain( + scanDetailWithVerifiedAttestation.attestation?.uuid ?? '' + ); + expect(element.querySelector('.status-badge')?.textContent?.trim()).toBe( + 'Verified' + ); + }); + + it('switches to failure scenario when toggle is clicked', () => { + fixture.detectChanges(); + + const failureButton: HTMLButtonElement | null = + fixture.nativeElement.querySelector('[data-scenario="failed"]'); + failureButton?.click(); + fixture.detectChanges(); + + const element: HTMLElement = fixture.nativeElement; + expect(element.textContent).toContain( + scanDetailWithFailedAttestation.attestation?.uuid ?? '' + ); + expect(element.querySelector('.status-badge')?.textContent?.trim()).toBe( + 'Verification failed' + ); + }); +}); diff --git a/src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.ts b/src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.ts similarity index 96% rename from src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.ts rename to src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.ts index 7bd933ec..6b98c46a 100644 --- a/src/StellaOps.Web/src/app/features/scans/scan-detail-page.component.ts +++ b/src/Web/StellaOps.Web/src/app/features/scans/scan-detail-page.component.ts @@ -1,62 +1,62 @@ -import { CommonModule } from '@angular/common'; -import { - ChangeDetectionStrategy, - Component, - computed, - inject, - signal, -} from '@angular/core'; -import { ActivatedRoute } from '@angular/router'; -import { ScanAttestationPanelComponent } from './scan-attestation-panel.component'; -import { ScanDetail } from '../../core/api/scanner.models'; -import { - scanDetailWithFailedAttestation, - scanDetailWithVerifiedAttestation, -} from '../../testing/scan-fixtures'; - -type Scenario = 'verified' | 'failed'; - -const SCENARIO_MAP: Record<Scenario, ScanDetail> = { - verified: scanDetailWithVerifiedAttestation, - failed: scanDetailWithFailedAttestation, -}; - -@Component({ - selector: 'app-scan-detail-page', - standalone: true, - imports: [CommonModule, ScanAttestationPanelComponent], - templateUrl: './scan-detail-page.component.html', - styleUrls: ['./scan-detail-page.component.scss'], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class ScanDetailPageComponent { - private readonly route = inject(ActivatedRoute); - - readonly scenario = signal<Scenario>('verified'); - - readonly scan = computed<ScanDetail>(() => { - const current = this.scenario(); - return SCENARIO_MAP[current]; - }); - - constructor() { - const routeScenario = - (this.route.snapshot.queryParamMap.get('scenario') as Scenario | null) ?? - null; - if (routeScenario && routeScenario in SCENARIO_MAP) { - this.scenario.set(routeScenario); - return; - } - - const scanId = this.route.snapshot.paramMap.get('scanId'); - if (scanId === scanDetailWithFailedAttestation.scanId) { - this.scenario.set('failed'); - } else { - this.scenario.set('verified'); - } - } - - onSelectScenario(next: Scenario): void { - this.scenario.set(next); - } -} +import { CommonModule } from '@angular/common'; +import { + ChangeDetectionStrategy, + Component, + computed, + inject, + signal, +} from '@angular/core'; +import { ActivatedRoute } from '@angular/router'; +import { ScanAttestationPanelComponent } from './scan-attestation-panel.component'; +import { ScanDetail } from '../../core/api/scanner.models'; +import { + scanDetailWithFailedAttestation, + scanDetailWithVerifiedAttestation, +} from '../../testing/scan-fixtures'; + +type Scenario = 'verified' | 'failed'; + +const SCENARIO_MAP: Record<Scenario, ScanDetail> = { + verified: scanDetailWithVerifiedAttestation, + failed: scanDetailWithFailedAttestation, +}; + +@Component({ + selector: 'app-scan-detail-page', + standalone: true, + imports: [CommonModule, ScanAttestationPanelComponent], + templateUrl: './scan-detail-page.component.html', + styleUrls: ['./scan-detail-page.component.scss'], + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class ScanDetailPageComponent { + private readonly route = inject(ActivatedRoute); + + readonly scenario = signal<Scenario>('verified'); + + readonly scan = computed<ScanDetail>(() => { + const current = this.scenario(); + return SCENARIO_MAP[current]; + }); + + constructor() { + const routeScenario = + (this.route.snapshot.queryParamMap.get('scenario') as Scenario | null) ?? + null; + if (routeScenario && routeScenario in SCENARIO_MAP) { + this.scenario.set(routeScenario); + return; + } + + const scanId = this.route.snapshot.paramMap.get('scanId'); + if (scanId === scanDetailWithFailedAttestation.scanId) { + this.scenario.set('failed'); + } else { + this.scenario.set('verified'); + } + } + + onSelectScenario(next: Scenario): void { + this.scenario.set(next); + } +} diff --git a/src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.html b/src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.html similarity index 96% rename from src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.html rename to src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.html index 46a7596a..a312f689 100644 --- a/src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.html +++ b/src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.html @@ -1,108 +1,108 @@ -<section class="settings-card" aria-labelledby="trivy-db-settings-heading"> - <header class="card-header"> - <div> - <h1 id="trivy-db-settings-heading">Trivy DB export settings</h1> - <p class="card-subtitle"> - Configure export behaviour for downstream mirrors. Changes apply on the next run. - </p> - </div> - <div class="header-actions"> - <button - type="button" - class="secondary" - (click)="loadSettings()" - [disabled]="isBusy" - > - Refresh - </button> - </div> - </header> - - <form [formGroup]="form" (ngSubmit)="onSave()" class="settings-form"> - <fieldset [disabled]="isBusy"> - <legend class="sr-only">Export toggles</legend> - - <label class="toggle"> - <input type="checkbox" formControlName="publishFull" /> - <span class="toggle-label"> - Publish full database exports - <span class="toggle-hint"> - Required for first-time consumers or when they fall behind. - </span> - </span> - </label> - - <label class="toggle"> - <input type="checkbox" formControlName="publishDelta" /> - <span class="toggle-label"> - Publish delta updates - <span class="toggle-hint"> - Incremental exports reduce bandwidth between full releases. - </span> - </span> - </label> - - <label class="toggle"> - <input type="checkbox" formControlName="includeFull" /> - <span class="toggle-label"> - Include full archive in offline bundle - <span class="toggle-hint"> - Bundles deliver everything required for air-gapped sites. - </span> - </span> - </label> - - <label class="toggle"> - <input type="checkbox" formControlName="includeDelta" /> - <span class="toggle-label"> - Include delta archive in offline bundle - <span class="toggle-hint"> - Provides faster incremental sync within offline kits. - </span> - </span> - </label> - </fieldset> - - <div class="form-actions"> - <button type="submit" class="primary" [disabled]="isBusy"> - Save changes - </button> - <button - type="button" - class="primary outline" - (click)="onRunExport()" - [disabled]="isBusy" - > - Run export now - </button> - </div> - </form> - - <div - *ngIf="message() as note" - class="status" - [class.status-success]="status() === 'success'" - [class.status-error]="status() === 'error'" - role="status" - > - {{ note }} - </div> - - <section *ngIf="lastRun() as run" class="last-run" aria-live="polite"> - <h2>Last triggered run</h2> - <dl> - <div> - <dt>Export ID</dt> - <dd>{{ run.exportId }}</dd> - </div> - <div> - <dt>Triggered</dt> - <dd>{{ run.triggeredAt | date : 'yyyy-MM-dd HH:mm:ss \'UTC\'' }}</dd> - </div> - <div> - <dt>Status</dt> - <dd>{{ run.status ?? 'pending' }}</dd> - </div> - </dl> - </section> -</section> +<section class="settings-card" aria-labelledby="trivy-db-settings-heading"> + <header class="card-header"> + <div> + <h1 id="trivy-db-settings-heading">Trivy DB export settings</h1> + <p class="card-subtitle"> + Configure export behaviour for downstream mirrors. Changes apply on the next run. + </p> + </div> + <div class="header-actions"> + <button + type="button" + class="secondary" + (click)="loadSettings()" + [disabled]="isBusy" + > + Refresh + </button> + </div> + </header> + + <form [formGroup]="form" (ngSubmit)="onSave()" class="settings-form"> + <fieldset [disabled]="isBusy"> + <legend class="sr-only">Export toggles</legend> + + <label class="toggle"> + <input type="checkbox" formControlName="publishFull" /> + <span class="toggle-label"> + Publish full database exports + <span class="toggle-hint"> + Required for first-time consumers or when they fall behind. + </span> + </span> + </label> + + <label class="toggle"> + <input type="checkbox" formControlName="publishDelta" /> + <span class="toggle-label"> + Publish delta updates + <span class="toggle-hint"> + Incremental exports reduce bandwidth between full releases. + </span> + </span> + </label> + + <label class="toggle"> + <input type="checkbox" formControlName="includeFull" /> + <span class="toggle-label"> + Include full archive in offline bundle + <span class="toggle-hint"> + Bundles deliver everything required for air-gapped sites. + </span> + </span> + </label> + + <label class="toggle"> + <input type="checkbox" formControlName="includeDelta" /> + <span class="toggle-label"> + Include delta archive in offline bundle + <span class="toggle-hint"> + Provides faster incremental sync within offline kits. + </span> + </span> + </label> + </fieldset> + + <div class="form-actions"> + <button type="submit" class="primary" [disabled]="isBusy"> + Save changes + </button> + <button + type="button" + class="primary outline" + (click)="onRunExport()" + [disabled]="isBusy" + > + Run export now + </button> + </div> + </form> + + <div + *ngIf="message() as note" + class="status" + [class.status-success]="status() === 'success'" + [class.status-error]="status() === 'error'" + role="status" + > + {{ note }} + </div> + + <section *ngIf="lastRun() as run" class="last-run" aria-live="polite"> + <h2>Last triggered run</h2> + <dl> + <div> + <dt>Export ID</dt> + <dd>{{ run.exportId }}</dd> + </div> + <div> + <dt>Triggered</dt> + <dd>{{ run.triggeredAt | date : 'yyyy-MM-dd HH:mm:ss \'UTC\'' }}</dd> + </div> + <div> + <dt>Status</dt> + <dd>{{ run.status ?? 'pending' }}</dd> + </div> + </dl> + </section> +</section> diff --git a/src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.scss b/src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.scss similarity index 94% rename from src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.scss rename to src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.scss index ed018558..5359672a 100644 --- a/src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.scss +++ b/src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.scss @@ -1,230 +1,230 @@ -:host { - display: block; -} - -.settings-card { - background-color: #ffffff; - border-radius: 16px; - padding: 1.75rem; - box-shadow: 0 10px 30px rgba(15, 23, 42, 0.08); - display: flex; - flex-direction: column; - gap: 1.5rem; -} - -.card-header { - display: flex; - justify-content: space-between; - gap: 1rem; - align-items: flex-start; -} - -.card-subtitle { - margin: 0.25rem 0 0; - color: #475569; - font-size: 0.95rem; -} - -.header-actions { - display: flex; - gap: 0.75rem; -} - -.settings-form { - display: flex; - flex-direction: column; - gap: 1rem; -} - -fieldset { - border: 0; - padding: 0; - margin: 0; - display: grid; - gap: 1rem; -} - -.toggle { - display: flex; - gap: 0.75rem; - align-items: flex-start; - background-color: #f8fafc; - border-radius: 12px; - padding: 0.9rem 1rem; - border: 1px solid rgba(148, 163, 184, 0.4); - transition: border-color 0.2s ease, background-color 0.2s ease; - - &:focus-within, - &:hover { - border-color: rgba(67, 40, 183, 0.5); - background-color: #eef2ff; - } - - input[type='checkbox'] { - margin-top: 0.2rem; - width: 1.1rem; - height: 1.1rem; - } -} - -.toggle-label { - font-weight: 600; - color: #0f172a; - display: flex; - flex-direction: column; - gap: 0.3rem; -} - -.toggle-hint { - font-weight: 400; - font-size: 0.9rem; - color: #475569; -} - -.form-actions { - display: flex; - flex-wrap: wrap; - gap: 0.75rem; -} - -button { - appearance: none; - border: none; - border-radius: 9999px; - padding: 0.55rem 1.35rem; - font-size: 0.95rem; - font-weight: 600; - cursor: pointer; - transition: transform 0.15s ease, box-shadow 0.15s ease; - - &:disabled { - cursor: not-allowed; - opacity: 0.6; - } -} - -button.primary { - color: #ffffff; - background: linear-gradient(135deg, #4338ca 0%, #7c3aed 100%); - box-shadow: 0 10px 20px rgba(79, 70, 229, 0.25); - - &:hover:not(:disabled), - &:focus-visible:not(:disabled) { - transform: translateY(-1px); - box-shadow: 0 12px 24px rgba(79, 70, 229, 0.35); - } -} - -button.primary.outline { - background: transparent; - color: #4338ca; - border: 1px solid rgba(79, 70, 229, 0.4); - box-shadow: none; - - &:hover:not(:disabled), - &:focus-visible:not(:disabled) { - background: rgba(79, 70, 229, 0.12); - } -} - -button.secondary { - background: rgba(148, 163, 184, 0.2); - color: #0f172a; - border: 1px solid rgba(148, 163, 184, 0.4); - - &:hover:not(:disabled), - &:focus-visible:not(:disabled) { - background: rgba(148, 163, 184, 0.35); - } -} - -.status { - padding: 0.85rem 1rem; - border-radius: 12px; - font-size: 0.95rem; - background-color: rgba(15, 23, 42, 0.05); - color: #0f172a; -} - -.status-success { - background-color: rgba(16, 185, 129, 0.15); - color: #065f46; -} - -.status-error { - background-color: rgba(239, 68, 68, 0.15); - color: #991b1b; -} - -.last-run { - border-top: 1px solid rgba(148, 163, 184, 0.4); - padding-top: 1rem; - - h2 { - font-size: 1.05rem; - font-weight: 600; - margin-bottom: 0.75rem; - color: #0f172a; - } - - dl { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(160px, 1fr)); - gap: 0.75rem 1.5rem; - - div { - background: #f8fafc; - border-radius: 12px; - padding: 0.85rem 1rem; - border: 1px solid rgba(148, 163, 184, 0.35); - } - - dt { - font-weight: 600; - color: #334155; - margin-bottom: 0.3rem; - font-size: 0.85rem; - text-transform: uppercase; - letter-spacing: 0.06em; - } - - dd { - margin: 0; - color: #0f172a; - font-size: 0.95rem; - word-break: break-word; - } - } -} - -.sr-only { - position: absolute; - width: 1px; - height: 1px; - padding: 0; - margin: -1px; - overflow: hidden; - clip: rect(0, 0, 0, 0); - border: 0; -} - -@media (max-width: 640px) { - .card-header { - flex-direction: column; - align-items: stretch; - } - - .header-actions { - justify-content: flex-end; - } - - .form-actions { - flex-direction: column; - align-items: stretch; - } - - button { - width: 100%; - text-align: center; - } -} +:host { + display: block; +} + +.settings-card { + background-color: #ffffff; + border-radius: 16px; + padding: 1.75rem; + box-shadow: 0 10px 30px rgba(15, 23, 42, 0.08); + display: flex; + flex-direction: column; + gap: 1.5rem; +} + +.card-header { + display: flex; + justify-content: space-between; + gap: 1rem; + align-items: flex-start; +} + +.card-subtitle { + margin: 0.25rem 0 0; + color: #475569; + font-size: 0.95rem; +} + +.header-actions { + display: flex; + gap: 0.75rem; +} + +.settings-form { + display: flex; + flex-direction: column; + gap: 1rem; +} + +fieldset { + border: 0; + padding: 0; + margin: 0; + display: grid; + gap: 1rem; +} + +.toggle { + display: flex; + gap: 0.75rem; + align-items: flex-start; + background-color: #f8fafc; + border-radius: 12px; + padding: 0.9rem 1rem; + border: 1px solid rgba(148, 163, 184, 0.4); + transition: border-color 0.2s ease, background-color 0.2s ease; + + &:focus-within, + &:hover { + border-color: rgba(67, 40, 183, 0.5); + background-color: #eef2ff; + } + + input[type='checkbox'] { + margin-top: 0.2rem; + width: 1.1rem; + height: 1.1rem; + } +} + +.toggle-label { + font-weight: 600; + color: #0f172a; + display: flex; + flex-direction: column; + gap: 0.3rem; +} + +.toggle-hint { + font-weight: 400; + font-size: 0.9rem; + color: #475569; +} + +.form-actions { + display: flex; + flex-wrap: wrap; + gap: 0.75rem; +} + +button { + appearance: none; + border: none; + border-radius: 9999px; + padding: 0.55rem 1.35rem; + font-size: 0.95rem; + font-weight: 600; + cursor: pointer; + transition: transform 0.15s ease, box-shadow 0.15s ease; + + &:disabled { + cursor: not-allowed; + opacity: 0.6; + } +} + +button.primary { + color: #ffffff; + background: linear-gradient(135deg, #4338ca 0%, #7c3aed 100%); + box-shadow: 0 10px 20px rgba(79, 70, 229, 0.25); + + &:hover:not(:disabled), + &:focus-visible:not(:disabled) { + transform: translateY(-1px); + box-shadow: 0 12px 24px rgba(79, 70, 229, 0.35); + } +} + +button.primary.outline { + background: transparent; + color: #4338ca; + border: 1px solid rgba(79, 70, 229, 0.4); + box-shadow: none; + + &:hover:not(:disabled), + &:focus-visible:not(:disabled) { + background: rgba(79, 70, 229, 0.12); + } +} + +button.secondary { + background: rgba(148, 163, 184, 0.2); + color: #0f172a; + border: 1px solid rgba(148, 163, 184, 0.4); + + &:hover:not(:disabled), + &:focus-visible:not(:disabled) { + background: rgba(148, 163, 184, 0.35); + } +} + +.status { + padding: 0.85rem 1rem; + border-radius: 12px; + font-size: 0.95rem; + background-color: rgba(15, 23, 42, 0.05); + color: #0f172a; +} + +.status-success { + background-color: rgba(16, 185, 129, 0.15); + color: #065f46; +} + +.status-error { + background-color: rgba(239, 68, 68, 0.15); + color: #991b1b; +} + +.last-run { + border-top: 1px solid rgba(148, 163, 184, 0.4); + padding-top: 1rem; + + h2 { + font-size: 1.05rem; + font-weight: 600; + margin-bottom: 0.75rem; + color: #0f172a; + } + + dl { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(160px, 1fr)); + gap: 0.75rem 1.5rem; + + div { + background: #f8fafc; + border-radius: 12px; + padding: 0.85rem 1rem; + border: 1px solid rgba(148, 163, 184, 0.35); + } + + dt { + font-weight: 600; + color: #334155; + margin-bottom: 0.3rem; + font-size: 0.85rem; + text-transform: uppercase; + letter-spacing: 0.06em; + } + + dd { + margin: 0; + color: #0f172a; + font-size: 0.95rem; + word-break: break-word; + } + } +} + +.sr-only { + position: absolute; + width: 1px; + height: 1px; + padding: 0; + margin: -1px; + overflow: hidden; + clip: rect(0, 0, 0, 0); + border: 0; +} + +@media (max-width: 640px) { + .card-header { + flex-direction: column; + align-items: stretch; + } + + .header-actions { + justify-content: flex-end; + } + + .form-actions { + flex-direction: column; + align-items: stretch; + } + + button { + width: 100%; + text-align: center; + } +} diff --git a/src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.spec.ts b/src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.spec.ts similarity index 96% rename from src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.spec.ts rename to src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.spec.ts index 6634a025..20fe3e9b 100644 --- a/src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.spec.ts +++ b/src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.spec.ts @@ -1,94 +1,94 @@ -import { ComponentFixture, TestBed, fakeAsync, tick } from '@angular/core/testing'; -import { of, throwError } from 'rxjs'; -import { - ConcelierExporterClient, - TrivyDbRunResponseDto, - TrivyDbSettingsDto, -} from '../../core/api/concelier-exporter.client'; -import { TrivyDbSettingsPageComponent } from './trivy-db-settings-page.component'; - -describe('TrivyDbSettingsPageComponent', () => { - let fixture: ComponentFixture<TrivyDbSettingsPageComponent>; - let component: TrivyDbSettingsPageComponent; - let client: jasmine.SpyObj<ConcelierExporterClient>; - - const settings: TrivyDbSettingsDto = { - publishFull: true, - publishDelta: false, - includeFull: true, - includeDelta: false, - }; - - beforeEach(async () => { - client = jasmine.createSpyObj<ConcelierExporterClient>( - 'ConcelierExporterClient', - ['getTrivyDbSettings', 'updateTrivyDbSettings', 'runTrivyDbExport'] - ); - - client.getTrivyDbSettings.and.returnValue(of(settings)); - client.updateTrivyDbSettings.and.returnValue(of(settings)); - client.runTrivyDbExport.and.returnValue( - of<TrivyDbRunResponseDto>({ - exportId: 'exp-1', - triggeredAt: '2025-10-21T12:00:00Z', - status: 'queued', - }) - ); - - await TestBed.configureTestingModule({ - imports: [TrivyDbSettingsPageComponent], - providers: [{ provide: ConcelierExporterClient, useValue: client }], - }).compileComponents(); - - fixture = TestBed.createComponent(TrivyDbSettingsPageComponent); - component = fixture.componentInstance; - }); - - it('loads existing settings on init', fakeAsync(() => { - fixture.detectChanges(); - tick(); - - expect(client.getTrivyDbSettings).toHaveBeenCalled(); - expect(component.form.value).toEqual(settings); - })); - - it('saves settings when submit is triggered', fakeAsync(async () => { - fixture.detectChanges(); - tick(); - - await component.onSave(); - - expect(client.updateTrivyDbSettings).toHaveBeenCalledWith(settings); - expect(component.status()).toBe('success'); - })); - - it('records error state when load fails', fakeAsync(() => { - client.getTrivyDbSettings.and.returnValue( - throwError(() => new Error('load failed')) - ); - - fixture = TestBed.createComponent(TrivyDbSettingsPageComponent); - component = fixture.componentInstance; - - fixture.detectChanges(); - tick(); - - expect(component.status()).toBe('error'); - expect(component.message()).toContain('load failed'); - })); - - it('triggers export run after saving overrides', fakeAsync(async () => { - fixture.detectChanges(); - tick(); - - await component.onRunExport(); - - expect(client.updateTrivyDbSettings).toHaveBeenCalled(); - expect(client.runTrivyDbExport).toHaveBeenCalled(); - expect(component.lastRun()).toEqual({ - exportId: 'exp-1', - triggeredAt: '2025-10-21T12:00:00Z', - status: 'queued', - }); - })); -}); +import { ComponentFixture, TestBed, fakeAsync, tick } from '@angular/core/testing'; +import { of, throwError } from 'rxjs'; +import { + ConcelierExporterClient, + TrivyDbRunResponseDto, + TrivyDbSettingsDto, +} from '../../core/api/concelier-exporter.client'; +import { TrivyDbSettingsPageComponent } from './trivy-db-settings-page.component'; + +describe('TrivyDbSettingsPageComponent', () => { + let fixture: ComponentFixture<TrivyDbSettingsPageComponent>; + let component: TrivyDbSettingsPageComponent; + let client: jasmine.SpyObj<ConcelierExporterClient>; + + const settings: TrivyDbSettingsDto = { + publishFull: true, + publishDelta: false, + includeFull: true, + includeDelta: false, + }; + + beforeEach(async () => { + client = jasmine.createSpyObj<ConcelierExporterClient>( + 'ConcelierExporterClient', + ['getTrivyDbSettings', 'updateTrivyDbSettings', 'runTrivyDbExport'] + ); + + client.getTrivyDbSettings.and.returnValue(of(settings)); + client.updateTrivyDbSettings.and.returnValue(of(settings)); + client.runTrivyDbExport.and.returnValue( + of<TrivyDbRunResponseDto>({ + exportId: 'exp-1', + triggeredAt: '2025-10-21T12:00:00Z', + status: 'queued', + }) + ); + + await TestBed.configureTestingModule({ + imports: [TrivyDbSettingsPageComponent], + providers: [{ provide: ConcelierExporterClient, useValue: client }], + }).compileComponents(); + + fixture = TestBed.createComponent(TrivyDbSettingsPageComponent); + component = fixture.componentInstance; + }); + + it('loads existing settings on init', fakeAsync(() => { + fixture.detectChanges(); + tick(); + + expect(client.getTrivyDbSettings).toHaveBeenCalled(); + expect(component.form.value).toEqual(settings); + })); + + it('saves settings when submit is triggered', fakeAsync(async () => { + fixture.detectChanges(); + tick(); + + await component.onSave(); + + expect(client.updateTrivyDbSettings).toHaveBeenCalledWith(settings); + expect(component.status()).toBe('success'); + })); + + it('records error state when load fails', fakeAsync(() => { + client.getTrivyDbSettings.and.returnValue( + throwError(() => new Error('load failed')) + ); + + fixture = TestBed.createComponent(TrivyDbSettingsPageComponent); + component = fixture.componentInstance; + + fixture.detectChanges(); + tick(); + + expect(component.status()).toBe('error'); + expect(component.message()).toContain('load failed'); + })); + + it('triggers export run after saving overrides', fakeAsync(async () => { + fixture.detectChanges(); + tick(); + + await component.onRunExport(); + + expect(client.updateTrivyDbSettings).toHaveBeenCalled(); + expect(client.runTrivyDbExport).toHaveBeenCalled(); + expect(component.lastRun()).toEqual({ + exportId: 'exp-1', + triggeredAt: '2025-10-21T12:00:00Z', + status: 'queued', + }); + })); +}); diff --git a/src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.ts b/src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.ts similarity index 96% rename from src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.ts rename to src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.ts index 81f336fa..459c9c60 100644 --- a/src/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.ts +++ b/src/Web/StellaOps.Web/src/app/features/trivy-db-settings/trivy-db-settings-page.component.ts @@ -1,135 +1,135 @@ -import { CommonModule } from '@angular/common'; -import { - ChangeDetectionStrategy, - Component, - inject, - OnInit, - signal, -} from '@angular/core'; -import { - NonNullableFormBuilder, - ReactiveFormsModule, -} from '@angular/forms'; -import { firstValueFrom } from 'rxjs'; -import { - ConcelierExporterClient, - TrivyDbRunResponseDto, - TrivyDbSettingsDto, -} from '../../core/api/concelier-exporter.client'; - -type StatusKind = 'idle' | 'loading' | 'saving' | 'running' | 'success' | 'error'; -type TrivyDbSettingsFormValue = TrivyDbSettingsDto; - -@Component({ - selector: 'app-trivy-db-settings-page', - standalone: true, - imports: [CommonModule, ReactiveFormsModule], - templateUrl: './trivy-db-settings-page.component.html', - styleUrls: ['./trivy-db-settings-page.component.scss'], - changeDetection: ChangeDetectionStrategy.OnPush, -}) -export class TrivyDbSettingsPageComponent implements OnInit { - private readonly client = inject(ConcelierExporterClient); - private readonly formBuilder = inject(NonNullableFormBuilder); - - readonly status = signal<StatusKind>('idle'); - readonly message = signal<string | null>(null); - readonly lastRun = signal<TrivyDbRunResponseDto | null>(null); - - readonly form = this.formBuilder.group<TrivyDbSettingsFormValue>({ - publishFull: true, - publishDelta: true, - includeFull: true, - includeDelta: true, - }); - - ngOnInit(): void { - void this.loadSettings(); - } - - async loadSettings(): Promise<void> { - this.status.set('loading'); - this.message.set(null); - - try { - const settings: TrivyDbSettingsDto = await firstValueFrom( - this.client.getTrivyDbSettings() - ); - this.form.patchValue(settings); - this.status.set('idle'); - } catch (error) { - this.status.set('error'); - this.message.set( - error instanceof Error - ? error.message - : 'Failed to load Trivy DB settings.' - ); - } - } - - async onSave(): Promise<void> { - this.status.set('saving'); - this.message.set(null); - - try { - const payload = this.buildPayload(); - const updated: TrivyDbSettingsDto = await firstValueFrom( - this.client.updateTrivyDbSettings(payload) - ); - this.form.patchValue(updated); - this.status.set('success'); - this.message.set('Settings saved successfully.'); - } catch (error) { - this.status.set('error'); - this.message.set( - error instanceof Error - ? error.message - : 'Unable to save settings. Please retry.' - ); - } - } - - async onRunExport(): Promise<void> { - this.status.set('running'); - this.message.set(null); - - try { - const payload = this.buildPayload(); - - // Persist overrides before triggering a run, ensuring parity. - await firstValueFrom(this.client.updateTrivyDbSettings(payload)); - const response: TrivyDbRunResponseDto = await firstValueFrom( - this.client.runTrivyDbExport(payload) - ); - - this.lastRun.set(response); - this.status.set('success'); - const formatted = new Date(response.triggeredAt).toISOString(); - this.message.set( - `Export run ${response.exportId} triggered at ${formatted}.` - ); - } catch (error) { - this.status.set('error'); - this.message.set( - error instanceof Error - ? error.message - : 'Failed to trigger export run. Please retry.' - ); - } - } - - get isBusy(): boolean { - const state = this.status(); - return state === 'loading' || state === 'saving' || state === 'running'; - } - - private buildPayload(): TrivyDbSettingsDto { - const raw = this.form.getRawValue(); - return { - publishFull: !!raw.publishFull, - publishDelta: !!raw.publishDelta, - includeFull: !!raw.includeFull, - includeDelta: !!raw.includeDelta, - }; - } -} +import { CommonModule } from '@angular/common'; +import { + ChangeDetectionStrategy, + Component, + inject, + OnInit, + signal, +} from '@angular/core'; +import { + NonNullableFormBuilder, + ReactiveFormsModule, +} from '@angular/forms'; +import { firstValueFrom } from 'rxjs'; +import { + ConcelierExporterClient, + TrivyDbRunResponseDto, + TrivyDbSettingsDto, +} from '../../core/api/concelier-exporter.client'; + +type StatusKind = 'idle' | 'loading' | 'saving' | 'running' | 'success' | 'error'; +type TrivyDbSettingsFormValue = TrivyDbSettingsDto; + +@Component({ + selector: 'app-trivy-db-settings-page', + standalone: true, + imports: [CommonModule, ReactiveFormsModule], + templateUrl: './trivy-db-settings-page.component.html', + styleUrls: ['./trivy-db-settings-page.component.scss'], + changeDetection: ChangeDetectionStrategy.OnPush, +}) +export class TrivyDbSettingsPageComponent implements OnInit { + private readonly client = inject(ConcelierExporterClient); + private readonly formBuilder = inject(NonNullableFormBuilder); + + readonly status = signal<StatusKind>('idle'); + readonly message = signal<string | null>(null); + readonly lastRun = signal<TrivyDbRunResponseDto | null>(null); + + readonly form = this.formBuilder.group<TrivyDbSettingsFormValue>({ + publishFull: true, + publishDelta: true, + includeFull: true, + includeDelta: true, + }); + + ngOnInit(): void { + void this.loadSettings(); + } + + async loadSettings(): Promise<void> { + this.status.set('loading'); + this.message.set(null); + + try { + const settings: TrivyDbSettingsDto = await firstValueFrom( + this.client.getTrivyDbSettings() + ); + this.form.patchValue(settings); + this.status.set('idle'); + } catch (error) { + this.status.set('error'); + this.message.set( + error instanceof Error + ? error.message + : 'Failed to load Trivy DB settings.' + ); + } + } + + async onSave(): Promise<void> { + this.status.set('saving'); + this.message.set(null); + + try { + const payload = this.buildPayload(); + const updated: TrivyDbSettingsDto = await firstValueFrom( + this.client.updateTrivyDbSettings(payload) + ); + this.form.patchValue(updated); + this.status.set('success'); + this.message.set('Settings saved successfully.'); + } catch (error) { + this.status.set('error'); + this.message.set( + error instanceof Error + ? error.message + : 'Unable to save settings. Please retry.' + ); + } + } + + async onRunExport(): Promise<void> { + this.status.set('running'); + this.message.set(null); + + try { + const payload = this.buildPayload(); + + // Persist overrides before triggering a run, ensuring parity. + await firstValueFrom(this.client.updateTrivyDbSettings(payload)); + const response: TrivyDbRunResponseDto = await firstValueFrom( + this.client.runTrivyDbExport(payload) + ); + + this.lastRun.set(response); + this.status.set('success'); + const formatted = new Date(response.triggeredAt).toISOString(); + this.message.set( + `Export run ${response.exportId} triggered at ${formatted}.` + ); + } catch (error) { + this.status.set('error'); + this.message.set( + error instanceof Error + ? error.message + : 'Failed to trigger export run. Please retry.' + ); + } + } + + get isBusy(): boolean { + const state = this.status(); + return state === 'loading' || state === 'saving' || state === 'running'; + } + + private buildPayload(): TrivyDbSettingsDto { + const raw = this.form.getRawValue(); + return { + publishFull: !!raw.publishFull, + publishDelta: !!raw.publishDelta, + includeFull: !!raw.includeFull, + includeDelta: !!raw.includeDelta, + }; + } +} diff --git a/src/StellaOps.Web/src/app/testing/mock-notify-api.service.ts b/src/Web/StellaOps.Web/src/app/testing/mock-notify-api.service.ts similarity index 96% rename from src/StellaOps.Web/src/app/testing/mock-notify-api.service.ts rename to src/Web/StellaOps.Web/src/app/testing/mock-notify-api.service.ts index d99ff837..27dff2ba 100644 --- a/src/StellaOps.Web/src/app/testing/mock-notify-api.service.ts +++ b/src/Web/StellaOps.Web/src/app/testing/mock-notify-api.service.ts @@ -1,290 +1,290 @@ -import { Injectable, signal } from '@angular/core'; -import { defer, Observable, of } from 'rxjs'; -import { delay } from 'rxjs/operators'; - -import { NotifyApi } from '../core/api/notify.client'; -import { - ChannelHealthResponse, - ChannelTestSendRequest, - ChannelTestSendResponse, - ChannelHealthStatus, - NotifyChannel, - NotifyDeliveriesQueryOptions, - NotifyDeliveriesResponse, - NotifyDelivery, - NotifyDeliveryRendered, - NotifyRule, -} from '../core/api/notify.models'; -import { - inferHealthStatus, - mockNotifyChannels, - mockNotifyDeliveries, - mockNotifyRules, - mockNotifyTenant, -} from './notify-fixtures'; - -const LATENCY_MS = 140; - -@Injectable({ providedIn: 'root' }) -export class MockNotifyApiService implements NotifyApi { - private readonly channels = signal<NotifyChannel[]>( - clone(mockNotifyChannels) - ); - private readonly rules = signal<NotifyRule[]>(clone(mockNotifyRules)); - private readonly deliveries = signal<NotifyDelivery[]>( - clone(mockNotifyDeliveries) - ); - - listChannels(): Observable<NotifyChannel[]> { - return this.simulate(() => this.channels()); - } - - saveChannel(channel: NotifyChannel): Observable<NotifyChannel> { - const next = this.enrichChannel(channel); - this.channels.update((items) => upsertById(items, next, (c) => c.channelId)); - return this.simulate(() => next); - } - - deleteChannel(channelId: string): Observable<void> { - this.channels.update((items) => items.filter((c) => c.channelId !== channelId)); - return this.simulate(() => undefined); - } - - getChannelHealth(channelId: string): Observable<ChannelHealthResponse> { - const channel = this.channels().find((c) => c.channelId === channelId); - const now = new Date().toISOString(); - const status: ChannelHealthStatus = channel - ? inferHealthStatus(channel.enabled, !!channel.config.target) - : 'Unhealthy'; - - const response: ChannelHealthResponse = { - tenantId: mockNotifyTenant, - channelId, - status, - message: - status === 'Healthy' - ? 'Channel configuration validated.' - : status === 'Degraded' - ? 'Channel disabled. Enable to resume deliveries.' - : 'Channel is missing a destination target or endpoint.', - checkedAt: now, - traceId: this.traceId(), - metadata: channel?.metadata ?? {}, - }; - - return this.simulate(() => response, 90); - } - - testChannel( - channelId: string, - payload: ChannelTestSendRequest - ): Observable<ChannelTestSendResponse> { - const channel = this.channels().find((c) => c.channelId === channelId); - const preview: NotifyDeliveryRendered = { - channelType: channel?.type ?? 'Slack', - format: channel?.type === 'Email' ? 'Email' : 'Slack', - target: - payload.target ?? channel?.config.target ?? channel?.config.endpoint ?? 'demo@stella-ops.org', - title: payload.title ?? 'Notify preview — policy verdict change', - body: - payload.body ?? - 'Sample preview payload emitted by the mocked Notify API integration.', - summary: payload.summary ?? 'Mock delivery queued.', - textBody: payload.textBody, - locale: payload.locale ?? 'en-US', - attachments: payload.attachments ?? [], - }; - - const response: ChannelTestSendResponse = { - tenantId: mockNotifyTenant, - channelId, - preview, - queuedAt: new Date().toISOString(), - traceId: this.traceId(), - metadata: { - source: 'mock-service', - }, - }; - - this.appendDeliveryFromPreview(channelId, preview); - - return this.simulate(() => response, 180); - } - - listRules(): Observable<NotifyRule[]> { - return this.simulate(() => this.rules()); - } - - saveRule(rule: NotifyRule): Observable<NotifyRule> { - const next = this.enrichRule(rule); - this.rules.update((items) => upsertById(items, next, (r) => r.ruleId)); - return this.simulate(() => next); - } - - deleteRule(ruleId: string): Observable<void> { - this.rules.update((items) => items.filter((rule) => rule.ruleId !== ruleId)); - return this.simulate(() => undefined); - } - - listDeliveries( - options?: NotifyDeliveriesQueryOptions - ): Observable<NotifyDeliveriesResponse> { - const filtered = this.filterDeliveries(options); - const payload: NotifyDeliveriesResponse = { - items: filtered, - continuationToken: null, - count: filtered.length, - }; - return this.simulate(() => payload); - } - - private enrichChannel(channel: NotifyChannel): NotifyChannel { - const now = new Date().toISOString(); - const current = this.channels().find((c) => c.channelId === channel.channelId); - return { - schemaVersion: channel.schemaVersion ?? current?.schemaVersion ?? '1.0', - channelId: channel.channelId || this.randomId('chn'), - tenantId: channel.tenantId || mockNotifyTenant, - name: channel.name, - displayName: channel.displayName, - description: channel.description, - type: channel.type, - enabled: channel.enabled, - config: { - ...channel.config, - properties: channel.config.properties ?? current?.config.properties ?? {}, - }, - labels: channel.labels ?? current?.labels ?? {}, - metadata: channel.metadata ?? current?.metadata ?? {}, - createdBy: current?.createdBy ?? 'ui@stella-ops.org', - createdAt: current?.createdAt ?? now, - updatedBy: 'ui@stella-ops.org', - updatedAt: now, - }; - } - - private enrichRule(rule: NotifyRule): NotifyRule { - const now = new Date().toISOString(); - const current = this.rules().find((r) => r.ruleId === rule.ruleId); - return { - schemaVersion: rule.schemaVersion ?? current?.schemaVersion ?? '1.0', - ruleId: rule.ruleId || this.randomId('rule'), - tenantId: rule.tenantId || mockNotifyTenant, - name: rule.name, - description: rule.description, - enabled: rule.enabled, - match: rule.match, - actions: rule.actions?.length - ? rule.actions - : current?.actions ?? [], - labels: rule.labels ?? current?.labels ?? {}, - metadata: rule.metadata ?? current?.metadata ?? {}, - createdBy: current?.createdBy ?? 'ui@stella-ops.org', - createdAt: current?.createdAt ?? now, - updatedBy: 'ui@stella-ops.org', - updatedAt: now, - }; - } - - private appendDeliveryFromPreview( - channelId: string, - preview: NotifyDeliveryRendered - ): void { - const now = new Date().toISOString(); - const delivery: NotifyDelivery = { - deliveryId: this.randomId('dlv'), - tenantId: mockNotifyTenant, - ruleId: 'rule-critical-soc', - actionId: 'act-slack-critical', - eventId: cryptoRandomUuid(), - kind: 'notify.preview', - status: 'Sent', - statusReason: 'Preview enqueued (mock)', - rendered: preview, - attempts: [ - { - timestamp: now, - status: 'Enqueued', - statusCode: 202, - }, - { - timestamp: now, - status: 'Succeeded', - statusCode: 200, - }, - ], - metadata: { - previewChannel: channelId, - }, - createdAt: now, - sentAt: now, - completedAt: now, - }; - - this.deliveries.update((items) => [delivery, ...items].slice(0, 20)); - } - - private filterDeliveries( - options?: NotifyDeliveriesQueryOptions - ): NotifyDelivery[] { - const source = this.deliveries(); - const since = options?.since ? Date.parse(options.since) : null; - const status = options?.status; - - return source - .filter((item) => { - const matchStatus = status ? item.status === status : true; - const matchSince = since ? Date.parse(item.createdAt) >= since : true; - return matchStatus && matchSince; - }) - .slice(0, options?.limit ?? 15); - } - - private simulate<T>(factory: () => T, ms: number = LATENCY_MS): Observable<T> { - return defer(() => of(clone(factory()))).pipe(delay(ms)); - } - - private randomId(prefix: string): string { - const raw = cryptoRandomUuid().replace(/-/g, '').slice(0, 12); - return `${prefix}-${raw}`; - } - - private traceId(): string { - return `trace-${cryptoRandomUuid()}`; - } -} - -function upsertById<T>( - collection: readonly T[], - entity: T, - selector: (item: T) => string -): T[] { - const id = selector(entity); - const next = [...collection]; - const index = next.findIndex((item) => selector(item) === id); - if (index >= 0) { - next[index] = entity; - } else { - next.unshift(entity); - } - return next; -} - -function clone<T>(value: T): T { - if (typeof structuredClone === 'function') { - return structuredClone(value); - } - return JSON.parse(JSON.stringify(value)) as T; -} - -function cryptoRandomUuid(): string { - if (typeof crypto !== 'undefined' && crypto.randomUUID) { - return crypto.randomUUID(); - } - const template = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'; - return template.replace(/[xy]/g, (c) => { - const r = (Math.random() * 16) | 0; - const v = c === 'x' ? r : (r & 0x3) | 0x8; - return v.toString(16); - }); -} +import { Injectable, signal } from '@angular/core'; +import { defer, Observable, of } from 'rxjs'; +import { delay } from 'rxjs/operators'; + +import { NotifyApi } from '../core/api/notify.client'; +import { + ChannelHealthResponse, + ChannelTestSendRequest, + ChannelTestSendResponse, + ChannelHealthStatus, + NotifyChannel, + NotifyDeliveriesQueryOptions, + NotifyDeliveriesResponse, + NotifyDelivery, + NotifyDeliveryRendered, + NotifyRule, +} from '../core/api/notify.models'; +import { + inferHealthStatus, + mockNotifyChannels, + mockNotifyDeliveries, + mockNotifyRules, + mockNotifyTenant, +} from './notify-fixtures'; + +const LATENCY_MS = 140; + +@Injectable({ providedIn: 'root' }) +export class MockNotifyApiService implements NotifyApi { + private readonly channels = signal<NotifyChannel[]>( + clone(mockNotifyChannels) + ); + private readonly rules = signal<NotifyRule[]>(clone(mockNotifyRules)); + private readonly deliveries = signal<NotifyDelivery[]>( + clone(mockNotifyDeliveries) + ); + + listChannels(): Observable<NotifyChannel[]> { + return this.simulate(() => this.channels()); + } + + saveChannel(channel: NotifyChannel): Observable<NotifyChannel> { + const next = this.enrichChannel(channel); + this.channels.update((items) => upsertById(items, next, (c) => c.channelId)); + return this.simulate(() => next); + } + + deleteChannel(channelId: string): Observable<void> { + this.channels.update((items) => items.filter((c) => c.channelId !== channelId)); + return this.simulate(() => undefined); + } + + getChannelHealth(channelId: string): Observable<ChannelHealthResponse> { + const channel = this.channels().find((c) => c.channelId === channelId); + const now = new Date().toISOString(); + const status: ChannelHealthStatus = channel + ? inferHealthStatus(channel.enabled, !!channel.config.target) + : 'Unhealthy'; + + const response: ChannelHealthResponse = { + tenantId: mockNotifyTenant, + channelId, + status, + message: + status === 'Healthy' + ? 'Channel configuration validated.' + : status === 'Degraded' + ? 'Channel disabled. Enable to resume deliveries.' + : 'Channel is missing a destination target or endpoint.', + checkedAt: now, + traceId: this.traceId(), + metadata: channel?.metadata ?? {}, + }; + + return this.simulate(() => response, 90); + } + + testChannel( + channelId: string, + payload: ChannelTestSendRequest + ): Observable<ChannelTestSendResponse> { + const channel = this.channels().find((c) => c.channelId === channelId); + const preview: NotifyDeliveryRendered = { + channelType: channel?.type ?? 'Slack', + format: channel?.type === 'Email' ? 'Email' : 'Slack', + target: + payload.target ?? channel?.config.target ?? channel?.config.endpoint ?? 'demo@stella-ops.org', + title: payload.title ?? 'Notify preview — policy verdict change', + body: + payload.body ?? + 'Sample preview payload emitted by the mocked Notify API integration.', + summary: payload.summary ?? 'Mock delivery queued.', + textBody: payload.textBody, + locale: payload.locale ?? 'en-US', + attachments: payload.attachments ?? [], + }; + + const response: ChannelTestSendResponse = { + tenantId: mockNotifyTenant, + channelId, + preview, + queuedAt: new Date().toISOString(), + traceId: this.traceId(), + metadata: { + source: 'mock-service', + }, + }; + + this.appendDeliveryFromPreview(channelId, preview); + + return this.simulate(() => response, 180); + } + + listRules(): Observable<NotifyRule[]> { + return this.simulate(() => this.rules()); + } + + saveRule(rule: NotifyRule): Observable<NotifyRule> { + const next = this.enrichRule(rule); + this.rules.update((items) => upsertById(items, next, (r) => r.ruleId)); + return this.simulate(() => next); + } + + deleteRule(ruleId: string): Observable<void> { + this.rules.update((items) => items.filter((rule) => rule.ruleId !== ruleId)); + return this.simulate(() => undefined); + } + + listDeliveries( + options?: NotifyDeliveriesQueryOptions + ): Observable<NotifyDeliveriesResponse> { + const filtered = this.filterDeliveries(options); + const payload: NotifyDeliveriesResponse = { + items: filtered, + continuationToken: null, + count: filtered.length, + }; + return this.simulate(() => payload); + } + + private enrichChannel(channel: NotifyChannel): NotifyChannel { + const now = new Date().toISOString(); + const current = this.channels().find((c) => c.channelId === channel.channelId); + return { + schemaVersion: channel.schemaVersion ?? current?.schemaVersion ?? '1.0', + channelId: channel.channelId || this.randomId('chn'), + tenantId: channel.tenantId || mockNotifyTenant, + name: channel.name, + displayName: channel.displayName, + description: channel.description, + type: channel.type, + enabled: channel.enabled, + config: { + ...channel.config, + properties: channel.config.properties ?? current?.config.properties ?? {}, + }, + labels: channel.labels ?? current?.labels ?? {}, + metadata: channel.metadata ?? current?.metadata ?? {}, + createdBy: current?.createdBy ?? 'ui@stella-ops.org', + createdAt: current?.createdAt ?? now, + updatedBy: 'ui@stella-ops.org', + updatedAt: now, + }; + } + + private enrichRule(rule: NotifyRule): NotifyRule { + const now = new Date().toISOString(); + const current = this.rules().find((r) => r.ruleId === rule.ruleId); + return { + schemaVersion: rule.schemaVersion ?? current?.schemaVersion ?? '1.0', + ruleId: rule.ruleId || this.randomId('rule'), + tenantId: rule.tenantId || mockNotifyTenant, + name: rule.name, + description: rule.description, + enabled: rule.enabled, + match: rule.match, + actions: rule.actions?.length + ? rule.actions + : current?.actions ?? [], + labels: rule.labels ?? current?.labels ?? {}, + metadata: rule.metadata ?? current?.metadata ?? {}, + createdBy: current?.createdBy ?? 'ui@stella-ops.org', + createdAt: current?.createdAt ?? now, + updatedBy: 'ui@stella-ops.org', + updatedAt: now, + }; + } + + private appendDeliveryFromPreview( + channelId: string, + preview: NotifyDeliveryRendered + ): void { + const now = new Date().toISOString(); + const delivery: NotifyDelivery = { + deliveryId: this.randomId('dlv'), + tenantId: mockNotifyTenant, + ruleId: 'rule-critical-soc', + actionId: 'act-slack-critical', + eventId: cryptoRandomUuid(), + kind: 'notify.preview', + status: 'Sent', + statusReason: 'Preview enqueued (mock)', + rendered: preview, + attempts: [ + { + timestamp: now, + status: 'Enqueued', + statusCode: 202, + }, + { + timestamp: now, + status: 'Succeeded', + statusCode: 200, + }, + ], + metadata: { + previewChannel: channelId, + }, + createdAt: now, + sentAt: now, + completedAt: now, + }; + + this.deliveries.update((items) => [delivery, ...items].slice(0, 20)); + } + + private filterDeliveries( + options?: NotifyDeliveriesQueryOptions + ): NotifyDelivery[] { + const source = this.deliveries(); + const since = options?.since ? Date.parse(options.since) : null; + const status = options?.status; + + return source + .filter((item) => { + const matchStatus = status ? item.status === status : true; + const matchSince = since ? Date.parse(item.createdAt) >= since : true; + return matchStatus && matchSince; + }) + .slice(0, options?.limit ?? 15); + } + + private simulate<T>(factory: () => T, ms: number = LATENCY_MS): Observable<T> { + return defer(() => of(clone(factory()))).pipe(delay(ms)); + } + + private randomId(prefix: string): string { + const raw = cryptoRandomUuid().replace(/-/g, '').slice(0, 12); + return `${prefix}-${raw}`; + } + + private traceId(): string { + return `trace-${cryptoRandomUuid()}`; + } +} + +function upsertById<T>( + collection: readonly T[], + entity: T, + selector: (item: T) => string +): T[] { + const id = selector(entity); + const next = [...collection]; + const index = next.findIndex((item) => selector(item) === id); + if (index >= 0) { + next[index] = entity; + } else { + next.unshift(entity); + } + return next; +} + +function clone<T>(value: T): T { + if (typeof structuredClone === 'function') { + return structuredClone(value); + } + return JSON.parse(JSON.stringify(value)) as T; +} + +function cryptoRandomUuid(): string { + if (typeof crypto !== 'undefined' && crypto.randomUUID) { + return crypto.randomUUID(); + } + const template = 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'; + return template.replace(/[xy]/g, (c) => { + const r = (Math.random() * 16) | 0; + const v = c === 'x' ? r : (r & 0x3) | 0x8; + return v.toString(16); + }); +} diff --git a/src/StellaOps.Web/src/app/testing/notify-fixtures.ts b/src/Web/StellaOps.Web/src/app/testing/notify-fixtures.ts similarity index 96% rename from src/StellaOps.Web/src/app/testing/notify-fixtures.ts rename to src/Web/StellaOps.Web/src/app/testing/notify-fixtures.ts index 6383ffdf..8e5f3386 100644 --- a/src/StellaOps.Web/src/app/testing/notify-fixtures.ts +++ b/src/Web/StellaOps.Web/src/app/testing/notify-fixtures.ts @@ -1,257 +1,257 @@ -import { - ChannelHealthStatus, - NotifyChannel, - NotifyDelivery, - NotifyDeliveryAttemptStatus, - NotifyDeliveryStatus, - NotifyRule, -} from '../core/api/notify.models'; - -export const mockNotifyTenant = 'tenant-dev'; - -export const mockNotifyChannels: NotifyChannel[] = [ - { - channelId: 'chn-slack-soc', - tenantId: mockNotifyTenant, - name: 'slack-soc', - displayName: 'Slack · SOC', - description: 'Critical scanner verdicts routed to the SOC war room.', - type: 'Slack', - enabled: true, - config: { - secretRef: 'ref://notify/slack/soc-token', - target: '#stellaops-soc', - properties: { - emoji: ':rotating_light:', - unfurl: 'false', - }, - }, - labels: { - tier: 'critical', - region: 'global', - }, - metadata: { - workspace: 'stellaops', - }, - createdBy: 'ops@stella-ops.org', - createdAt: '2025-10-10T08:12:00Z', - updatedBy: 'ops@stella-ops.org', - updatedAt: '2025-10-23T11:05:00Z', - }, - { - channelId: 'chn-email-comms', - tenantId: mockNotifyTenant, - name: 'email-compliance', - displayName: 'Email · Compliance Digest', - description: 'Hourly compliance digest for licensing/secrets alerts.', - type: 'Email', - enabled: true, - config: { - secretRef: 'ref://notify/smtp/compliance', - target: 'compliance@stella-ops.org', - }, - labels: { - cadence: 'hourly', - }, - metadata: { - smtpProfile: 'smtp.internal', - }, - createdBy: 'legal@stella-ops.org', - createdAt: '2025-10-08T14:31:00Z', - updatedBy: 'legal@stella-ops.org', - updatedAt: '2025-10-20T09:44:00Z', - }, - { - channelId: 'chn-webhook-intake', - tenantId: mockNotifyTenant, - name: 'webhook-opsbridge', - displayName: 'Webhook · OpsBridge', - description: 'Bridges Notify events into OpsBridge for automation.', - type: 'Webhook', - enabled: false, - config: { - secretRef: 'ref://notify/webhook/signing', - endpoint: 'https://opsbridge.internal/hooks/notify', - }, - labels: { - env: 'staging', - }, - metadata: { - signature: 'ed25519', - }, - createdBy: 'platform@stella-ops.org', - createdAt: '2025-10-05T12:01:00Z', - updatedBy: 'platform@stella-ops.org', - updatedAt: '2025-10-18T17:22:00Z', - }, -]; - -export const mockNotifyRules: NotifyRule[] = [ - { - ruleId: 'rule-critical-soc', - tenantId: mockNotifyTenant, - name: 'Critical scanner verdicts', - description: - 'Route KEV-tagged critical findings to SOC Slack with zero delay.', - enabled: true, - match: { - eventKinds: ['scanner.report.ready'], - labels: ['kev', 'critical'], - minSeverity: 'critical', - verdicts: ['block', 'escalate'], - kevOnly: true, - }, - actions: [ - { - actionId: 'act-slack-critical', - channel: 'chn-slack-soc', - template: 'tmpl-critical', - digest: 'instant', - throttle: 'PT300S', - locale: 'en-US', - enabled: true, - metadata: { - priority: 'p1', - }, - }, - ], - labels: { - owner: 'soc', - }, - metadata: { - revision: '12', - }, - createdBy: 'soc@stella-ops.org', - createdAt: '2025-10-12T10:02:00Z', - updatedBy: 'soc@stella-ops.org', - updatedAt: '2025-10-23T15:44:00Z', - }, - { - ruleId: 'rule-digest-compliance', - tenantId: mockNotifyTenant, - name: 'Compliance hourly digest', - description: 'Summarise licensing + secret alerts once per hour.', - enabled: true, - match: { - eventKinds: ['scanner.scan.completed', 'scanner.report.ready'], - labels: ['compliance'], - minSeverity: 'medium', - kevOnly: false, - vex: { - includeAcceptedJustifications: true, - includeRejectedJustifications: false, - includeUnknownJustifications: true, - justificationKinds: ['exploitable', 'component_not_present'], - }, - }, - actions: [ - { - actionId: 'act-email-compliance', - channel: 'chn-email-comms', - digest: '1h', - throttle: 'PT1H', - enabled: true, - metadata: { - layout: 'digest', - }, - }, - ], - labels: { - owner: 'compliance', - }, - metadata: { - frequency: 'hourly', - }, - createdBy: 'compliance@stella-ops.org', - createdAt: '2025-10-09T06:15:00Z', - updatedBy: 'compliance@stella-ops.org', - updatedAt: '2025-10-21T19:45:00Z', - }, -]; - -const deliveryStatuses: NotifyDeliveryStatus[] = [ - 'Sent', - 'Failed', - 'Throttled', -]; - -export const mockNotifyDeliveries: NotifyDelivery[] = deliveryStatuses.map( - (status, index) => { - const now = new Date('2025-10-24T12:00:00Z').getTime(); - const created = new Date(now - index * 20 * 60 * 1000).toISOString(); - const attemptsStatus: NotifyDeliveryAttemptStatus = - status === 'Sent' ? 'Succeeded' : status === 'Failed' ? 'Failed' : 'Throttled'; - - return { - deliveryId: `dlv-${index + 1}`, - tenantId: mockNotifyTenant, - ruleId: index === 0 ? 'rule-critical-soc' : 'rule-digest-compliance', - actionId: index === 0 ? 'act-slack-critical' : 'act-email-compliance', - eventId: `00000000-0000-0000-0000-${(index + 1) - .toString() - .padStart(12, '0')}`, - kind: index === 0 ? 'scanner.report.ready' : 'scanner.scan.completed', - status, - statusReason: - status === 'Sent' - ? 'Delivered' - : status === 'Failed' - ? 'Channel timeout (Slack API)' - : 'Rule throttled (digest window).', - rendered: { - channelType: index === 0 ? 'Slack' : 'Email', - format: index === 0 ? 'Slack' : 'Email', - target: index === 0 ? '#stellaops-soc' : 'compliance@stella-ops.org', - title: - index === 0 - ? 'Critical CVE flagged for registry.git.stella-ops.org' - : 'Hourly compliance digest (#23)', - body: - index === 0 - ? 'KEV CVE-2025-1234 detected in ubuntu:24.04. Rescan triggered.' - : '3 findings require compliance review. See attached report.', - summary: index === 0 ? 'Immediate attention required.' : 'Digest only.', - locale: 'en-US', - attachments: index === 0 ? [] : ['https://scanner.local/reports/digest-23'], - }, - attempts: [ - { - timestamp: created, - status: 'Sending', - statusCode: 202, - }, - { - timestamp: created, - status: attemptsStatus, - statusCode: status === 'Sent' ? 200 : 429, - reason: - status === 'Failed' - ? 'Slack API returned 504' - : status === 'Throttled' - ? 'Digest window open' - : undefined, - }, - ], - metadata: { - batch: `window-${index + 1}`, - }, - createdAt: created, - sentAt: created, - completedAt: created, - } satisfies NotifyDelivery; - } -); - -export function inferHealthStatus( - enabled: boolean, - hasTarget: boolean -): ChannelHealthStatus { - if (!hasTarget) { - return 'Unhealthy'; - } - if (!enabled) { - return 'Degraded'; - } - return 'Healthy'; -} - +import { + ChannelHealthStatus, + NotifyChannel, + NotifyDelivery, + NotifyDeliveryAttemptStatus, + NotifyDeliveryStatus, + NotifyRule, +} from '../core/api/notify.models'; + +export const mockNotifyTenant = 'tenant-dev'; + +export const mockNotifyChannels: NotifyChannel[] = [ + { + channelId: 'chn-slack-soc', + tenantId: mockNotifyTenant, + name: 'slack-soc', + displayName: 'Slack · SOC', + description: 'Critical scanner verdicts routed to the SOC war room.', + type: 'Slack', + enabled: true, + config: { + secretRef: 'ref://notify/slack/soc-token', + target: '#stellaops-soc', + properties: { + emoji: ':rotating_light:', + unfurl: 'false', + }, + }, + labels: { + tier: 'critical', + region: 'global', + }, + metadata: { + workspace: 'stellaops', + }, + createdBy: 'ops@stella-ops.org', + createdAt: '2025-10-10T08:12:00Z', + updatedBy: 'ops@stella-ops.org', + updatedAt: '2025-10-23T11:05:00Z', + }, + { + channelId: 'chn-email-comms', + tenantId: mockNotifyTenant, + name: 'email-compliance', + displayName: 'Email · Compliance Digest', + description: 'Hourly compliance digest for licensing/secrets alerts.', + type: 'Email', + enabled: true, + config: { + secretRef: 'ref://notify/smtp/compliance', + target: 'compliance@stella-ops.org', + }, + labels: { + cadence: 'hourly', + }, + metadata: { + smtpProfile: 'smtp.internal', + }, + createdBy: 'legal@stella-ops.org', + createdAt: '2025-10-08T14:31:00Z', + updatedBy: 'legal@stella-ops.org', + updatedAt: '2025-10-20T09:44:00Z', + }, + { + channelId: 'chn-webhook-intake', + tenantId: mockNotifyTenant, + name: 'webhook-opsbridge', + displayName: 'Webhook · OpsBridge', + description: 'Bridges Notify events into OpsBridge for automation.', + type: 'Webhook', + enabled: false, + config: { + secretRef: 'ref://notify/webhook/signing', + endpoint: 'https://opsbridge.internal/hooks/notify', + }, + labels: { + env: 'staging', + }, + metadata: { + signature: 'ed25519', + }, + createdBy: 'platform@stella-ops.org', + createdAt: '2025-10-05T12:01:00Z', + updatedBy: 'platform@stella-ops.org', + updatedAt: '2025-10-18T17:22:00Z', + }, +]; + +export const mockNotifyRules: NotifyRule[] = [ + { + ruleId: 'rule-critical-soc', + tenantId: mockNotifyTenant, + name: 'Critical scanner verdicts', + description: + 'Route KEV-tagged critical findings to SOC Slack with zero delay.', + enabled: true, + match: { + eventKinds: ['scanner.report.ready'], + labels: ['kev', 'critical'], + minSeverity: 'critical', + verdicts: ['block', 'escalate'], + kevOnly: true, + }, + actions: [ + { + actionId: 'act-slack-critical', + channel: 'chn-slack-soc', + template: 'tmpl-critical', + digest: 'instant', + throttle: 'PT300S', + locale: 'en-US', + enabled: true, + metadata: { + priority: 'p1', + }, + }, + ], + labels: { + owner: 'soc', + }, + metadata: { + revision: '12', + }, + createdBy: 'soc@stella-ops.org', + createdAt: '2025-10-12T10:02:00Z', + updatedBy: 'soc@stella-ops.org', + updatedAt: '2025-10-23T15:44:00Z', + }, + { + ruleId: 'rule-digest-compliance', + tenantId: mockNotifyTenant, + name: 'Compliance hourly digest', + description: 'Summarise licensing + secret alerts once per hour.', + enabled: true, + match: { + eventKinds: ['scanner.scan.completed', 'scanner.report.ready'], + labels: ['compliance'], + minSeverity: 'medium', + kevOnly: false, + vex: { + includeAcceptedJustifications: true, + includeRejectedJustifications: false, + includeUnknownJustifications: true, + justificationKinds: ['exploitable', 'component_not_present'], + }, + }, + actions: [ + { + actionId: 'act-email-compliance', + channel: 'chn-email-comms', + digest: '1h', + throttle: 'PT1H', + enabled: true, + metadata: { + layout: 'digest', + }, + }, + ], + labels: { + owner: 'compliance', + }, + metadata: { + frequency: 'hourly', + }, + createdBy: 'compliance@stella-ops.org', + createdAt: '2025-10-09T06:15:00Z', + updatedBy: 'compliance@stella-ops.org', + updatedAt: '2025-10-21T19:45:00Z', + }, +]; + +const deliveryStatuses: NotifyDeliveryStatus[] = [ + 'Sent', + 'Failed', + 'Throttled', +]; + +export const mockNotifyDeliveries: NotifyDelivery[] = deliveryStatuses.map( + (status, index) => { + const now = new Date('2025-10-24T12:00:00Z').getTime(); + const created = new Date(now - index * 20 * 60 * 1000).toISOString(); + const attemptsStatus: NotifyDeliveryAttemptStatus = + status === 'Sent' ? 'Succeeded' : status === 'Failed' ? 'Failed' : 'Throttled'; + + return { + deliveryId: `dlv-${index + 1}`, + tenantId: mockNotifyTenant, + ruleId: index === 0 ? 'rule-critical-soc' : 'rule-digest-compliance', + actionId: index === 0 ? 'act-slack-critical' : 'act-email-compliance', + eventId: `00000000-0000-0000-0000-${(index + 1) + .toString() + .padStart(12, '0')}`, + kind: index === 0 ? 'scanner.report.ready' : 'scanner.scan.completed', + status, + statusReason: + status === 'Sent' + ? 'Delivered' + : status === 'Failed' + ? 'Channel timeout (Slack API)' + : 'Rule throttled (digest window).', + rendered: { + channelType: index === 0 ? 'Slack' : 'Email', + format: index === 0 ? 'Slack' : 'Email', + target: index === 0 ? '#stellaops-soc' : 'compliance@stella-ops.org', + title: + index === 0 + ? 'Critical CVE flagged for registry.git.stella-ops.org' + : 'Hourly compliance digest (#23)', + body: + index === 0 + ? 'KEV CVE-2025-1234 detected in ubuntu:24.04. Rescan triggered.' + : '3 findings require compliance review. See attached report.', + summary: index === 0 ? 'Immediate attention required.' : 'Digest only.', + locale: 'en-US', + attachments: index === 0 ? [] : ['https://scanner.local/reports/digest-23'], + }, + attempts: [ + { + timestamp: created, + status: 'Sending', + statusCode: 202, + }, + { + timestamp: created, + status: attemptsStatus, + statusCode: status === 'Sent' ? 200 : 429, + reason: + status === 'Failed' + ? 'Slack API returned 504' + : status === 'Throttled' + ? 'Digest window open' + : undefined, + }, + ], + metadata: { + batch: `window-${index + 1}`, + }, + createdAt: created, + sentAt: created, + completedAt: created, + } satisfies NotifyDelivery; + } +); + +export function inferHealthStatus( + enabled: boolean, + hasTarget: boolean +): ChannelHealthStatus { + if (!hasTarget) { + return 'Unhealthy'; + } + if (!enabled) { + return 'Degraded'; + } + return 'Healthy'; +} + diff --git a/src/StellaOps.Web/src/app/testing/policy-fixtures.spec.ts b/src/Web/StellaOps.Web/src/app/testing/policy-fixtures.spec.ts similarity index 97% rename from src/StellaOps.Web/src/app/testing/policy-fixtures.spec.ts rename to src/Web/StellaOps.Web/src/app/testing/policy-fixtures.spec.ts index f3fd1c3d..b05b1243 100644 --- a/src/StellaOps.Web/src/app/testing/policy-fixtures.spec.ts +++ b/src/Web/StellaOps.Web/src/app/testing/policy-fixtures.spec.ts @@ -1,54 +1,54 @@ -import { getPolicyPreviewFixture, getPolicyReportFixture } from './policy-fixtures'; - -describe('policy fixtures', () => { - it('returns fresh clones for preview data', () => { - const first = getPolicyPreviewFixture(); - const second = getPolicyPreviewFixture(); - - expect(first).not.toBe(second); - expect(first.previewRequest).not.toBe(second.previewRequest); - expect(first.previewResponse.diffs).not.toBe(second.previewResponse.diffs); - }); - - it('exposes required policy preview fields', () => { - const { previewRequest, previewResponse } = getPolicyPreviewFixture(); - - expect(previewRequest.imageDigest).toMatch(/^sha256:[0-9a-f]{64}$/); - expect(Array.isArray(previewRequest.findings)).toBeTrue(); - expect(previewRequest.findings.length).toBeGreaterThan(0); - expect(previewResponse.success).toBeTrue(); - expect(previewResponse.policyDigest).toMatch(/^[0-9a-f]{64}$/); - expect(previewResponse.diffs.length).toBeGreaterThan(0); - - const diff = previewResponse.diffs[0]; - expect(diff.projected.confidenceBand).toBeDefined(); - expect(diff.projected.unknownConfidence).toBeGreaterThan(0); - expect(diff.projected.reachability).toBeDefined(); - }); - - it('aligns preview and report fixtures', () => { - const preview = getPolicyPreviewFixture(); - const { reportResponse } = getPolicyReportFixture(); - - expect(reportResponse.report.policy.digest).toEqual( - preview.previewResponse.policyDigest - ); - expect(reportResponse.report.verdicts.length).toEqual( - reportResponse.report.summary.total - ); - expect(reportResponse.report.verdicts.length).toBeGreaterThan(0); - expect( - reportResponse.report.verdicts.some( - (verdict) => verdict.confidenceBand != null - ) - ).toBeTrue(); - }); - - it('provides DSSE metadata for report fixture', () => { - const { reportResponse } = getPolicyReportFixture(); - - expect(reportResponse.dsse).toBeDefined(); - expect(reportResponse.dsse?.payloadType).toBe('application/vnd.stellaops.report+json'); - expect(reportResponse.dsse?.signatures?.length).toBeGreaterThan(0); - }); -}); +import { getPolicyPreviewFixture, getPolicyReportFixture } from './policy-fixtures'; + +describe('policy fixtures', () => { + it('returns fresh clones for preview data', () => { + const first = getPolicyPreviewFixture(); + const second = getPolicyPreviewFixture(); + + expect(first).not.toBe(second); + expect(first.previewRequest).not.toBe(second.previewRequest); + expect(first.previewResponse.diffs).not.toBe(second.previewResponse.diffs); + }); + + it('exposes required policy preview fields', () => { + const { previewRequest, previewResponse } = getPolicyPreviewFixture(); + + expect(previewRequest.imageDigest).toMatch(/^sha256:[0-9a-f]{64}$/); + expect(Array.isArray(previewRequest.findings)).toBeTrue(); + expect(previewRequest.findings.length).toBeGreaterThan(0); + expect(previewResponse.success).toBeTrue(); + expect(previewResponse.policyDigest).toMatch(/^[0-9a-f]{64}$/); + expect(previewResponse.diffs.length).toBeGreaterThan(0); + + const diff = previewResponse.diffs[0]; + expect(diff.projected.confidenceBand).toBeDefined(); + expect(diff.projected.unknownConfidence).toBeGreaterThan(0); + expect(diff.projected.reachability).toBeDefined(); + }); + + it('aligns preview and report fixtures', () => { + const preview = getPolicyPreviewFixture(); + const { reportResponse } = getPolicyReportFixture(); + + expect(reportResponse.report.policy.digest).toEqual( + preview.previewResponse.policyDigest + ); + expect(reportResponse.report.verdicts.length).toEqual( + reportResponse.report.summary.total + ); + expect(reportResponse.report.verdicts.length).toBeGreaterThan(0); + expect( + reportResponse.report.verdicts.some( + (verdict) => verdict.confidenceBand != null + ) + ).toBeTrue(); + }); + + it('provides DSSE metadata for report fixture', () => { + const { reportResponse } = getPolicyReportFixture(); + + expect(reportResponse.dsse).toBeDefined(); + expect(reportResponse.dsse?.payloadType).toBe('application/vnd.stellaops.report+json'); + expect(reportResponse.dsse?.signatures?.length).toBeGreaterThan(0); + }); +}); diff --git a/src/StellaOps.Web/src/app/testing/policy-fixtures.ts b/src/Web/StellaOps.Web/src/app/testing/policy-fixtures.ts similarity index 96% rename from src/StellaOps.Web/src/app/testing/policy-fixtures.ts rename to src/Web/StellaOps.Web/src/app/testing/policy-fixtures.ts index a0acea3c..98d6b723 100644 --- a/src/StellaOps.Web/src/app/testing/policy-fixtures.ts +++ b/src/Web/StellaOps.Web/src/app/testing/policy-fixtures.ts @@ -1,23 +1,23 @@ -import previewSample from '../../../../../samples/policy/policy-preview-unknown.json'; -import reportSample from '../../../../../samples/policy/policy-report-unknown.json'; -import { - PolicyPreviewSample, - PolicyReportSample, -} from '../core/api/policy-preview.models'; - -const previewFixture: PolicyPreviewSample = - previewSample as unknown as PolicyPreviewSample; -const reportFixture: PolicyReportSample = - reportSample as unknown as PolicyReportSample; - -export function getPolicyPreviewFixture(): PolicyPreviewSample { - return clone(previewFixture); -} - -export function getPolicyReportFixture(): PolicyReportSample { - return clone(reportFixture); -} - -function clone<T>(value: T): T { - return JSON.parse(JSON.stringify(value)); -} +import previewSample from '../../../../../samples/policy/policy-preview-unknown.json'; +import reportSample from '../../../../../samples/policy/policy-report-unknown.json'; +import { + PolicyPreviewSample, + PolicyReportSample, +} from '../core/api/policy-preview.models'; + +const previewFixture: PolicyPreviewSample = + previewSample as unknown as PolicyPreviewSample; +const reportFixture: PolicyReportSample = + reportSample as unknown as PolicyReportSample; + +export function getPolicyPreviewFixture(): PolicyPreviewSample { + return clone(previewFixture); +} + +export function getPolicyReportFixture(): PolicyReportSample { + return clone(reportFixture); +} + +function clone<T>(value: T): T { + return JSON.parse(JSON.stringify(value)); +} diff --git a/src/StellaOps.Web/src/app/testing/scan-fixtures.ts b/src/Web/StellaOps.Web/src/app/testing/scan-fixtures.ts similarity index 97% rename from src/StellaOps.Web/src/app/testing/scan-fixtures.ts rename to src/Web/StellaOps.Web/src/app/testing/scan-fixtures.ts index 840442b5..05f27411 100644 --- a/src/StellaOps.Web/src/app/testing/scan-fixtures.ts +++ b/src/Web/StellaOps.Web/src/app/testing/scan-fixtures.ts @@ -1,30 +1,30 @@ -import { ScanDetail } from '../core/api/scanner.models'; - -export const scanDetailWithVerifiedAttestation: ScanDetail = { - scanId: 'scan-verified-001', - imageDigest: - 'sha256:9f92a8c39f8d4f7bb1a60f2be650b3019b9a1bb50d2da839efa9bf2a278a0071', - completedAt: '2025-10-20T18:22:04Z', - attestation: { - uuid: '018ed91c-9b64-7edc-b9ac-0bada2f8d501', - index: 412398, - logUrl: 'https://rekor.sigstore.dev', - status: 'verified', - checkedAt: '2025-10-23T12:04:52Z', - statusMessage: 'Rekor transparency log inclusion proof verified.', - }, -}; - -export const scanDetailWithFailedAttestation: ScanDetail = { - scanId: 'scan-failed-002', - imageDigest: - 'sha256:b0d6865de537e45bdd9dd72cdac02bc6f459f0e546ed9134e2afc2fccd6298e0', - completedAt: '2025-10-19T07:14:33Z', - attestation: { - uuid: '018ed91c-ffff-4882-9955-0027c0bbb090', - status: 'failed', - checkedAt: '2025-10-23T09:18:11Z', - statusMessage: - 'Verification failed: inclusion proof leaf hash mismatch at depth 4.', - }, -}; +import { ScanDetail } from '../core/api/scanner.models'; + +export const scanDetailWithVerifiedAttestation: ScanDetail = { + scanId: 'scan-verified-001', + imageDigest: + 'sha256:9f92a8c39f8d4f7bb1a60f2be650b3019b9a1bb50d2da839efa9bf2a278a0071', + completedAt: '2025-10-20T18:22:04Z', + attestation: { + uuid: '018ed91c-9b64-7edc-b9ac-0bada2f8d501', + index: 412398, + logUrl: 'https://rekor.sigstore.dev', + status: 'verified', + checkedAt: '2025-10-23T12:04:52Z', + statusMessage: 'Rekor transparency log inclusion proof verified.', + }, +}; + +export const scanDetailWithFailedAttestation: ScanDetail = { + scanId: 'scan-failed-002', + imageDigest: + 'sha256:b0d6865de537e45bdd9dd72cdac02bc6f459f0e546ed9134e2afc2fccd6298e0', + completedAt: '2025-10-19T07:14:33Z', + attestation: { + uuid: '018ed91c-ffff-4882-9955-0027c0bbb090', + status: 'failed', + checkedAt: '2025-10-23T09:18:11Z', + statusMessage: + 'Verification failed: inclusion proof leaf hash mismatch at depth 4.', + }, +}; diff --git a/src/StellaOps.Web/src/assets/.gitkeep b/src/Web/StellaOps.Web/src/assets/.gitkeep similarity index 100% rename from src/StellaOps.Web/src/assets/.gitkeep rename to src/Web/StellaOps.Web/src/assets/.gitkeep diff --git a/src/StellaOps.Web/src/config/config.json b/src/Web/StellaOps.Web/src/config/config.json similarity index 97% rename from src/StellaOps.Web/src/config/config.json rename to src/Web/StellaOps.Web/src/config/config.json index 2e64ab9c..ee5c9dbd 100644 --- a/src/StellaOps.Web/src/config/config.json +++ b/src/Web/StellaOps.Web/src/config/config.json @@ -1,26 +1,26 @@ -{ - "authority": { - "issuer": "https://authority.local", - "clientId": "stellaops-ui", - "authorizeEndpoint": "https://authority.local/connect/authorize", - "tokenEndpoint": "https://authority.local/connect/token", - "logoutEndpoint": "https://authority.local/connect/logout", - "redirectUri": "http://localhost:4400/auth/callback", - "postLogoutRedirectUri": "http://localhost:4400/", - "scope": "openid profile email ui.read authority:tenants.read advisory:read vex:read exceptions:read exceptions:approve aoc:verify findings:read orch:read vuln:read", - "audience": "https://scanner.local", - "dpopAlgorithms": ["ES256"], - "refreshLeewaySeconds": 60 - }, - "apiBaseUrls": { - "authority": "https://authority.local", - "scanner": "https://scanner.local", - "policy": "https://scanner.local", - "concelier": "https://concelier.local", - "attestor": "https://attestor.local" - }, - "telemetry": { - "otlpEndpoint": "http://localhost:4318/v1/traces", - "sampleRate": 0.1 - } -} +{ + "authority": { + "issuer": "https://authority.local", + "clientId": "stellaops-ui", + "authorizeEndpoint": "https://authority.local/connect/authorize", + "tokenEndpoint": "https://authority.local/connect/token", + "logoutEndpoint": "https://authority.local/connect/logout", + "redirectUri": "http://localhost:4400/auth/callback", + "postLogoutRedirectUri": "http://localhost:4400/", + "scope": "openid profile email ui.read authority:tenants.read advisory:read vex:read exceptions:read exceptions:approve aoc:verify findings:read orch:read vuln:read", + "audience": "https://scanner.local", + "dpopAlgorithms": ["ES256"], + "refreshLeewaySeconds": 60 + }, + "apiBaseUrls": { + "authority": "https://authority.local", + "scanner": "https://scanner.local", + "policy": "https://scanner.local", + "concelier": "https://concelier.local", + "attestor": "https://attestor.local" + }, + "telemetry": { + "otlpEndpoint": "http://localhost:4318/v1/traces", + "sampleRate": 0.1 + } +} diff --git a/src/StellaOps.Web/src/config/config.sample.json b/src/Web/StellaOps.Web/src/config/config.sample.json similarity index 97% rename from src/StellaOps.Web/src/config/config.sample.json rename to src/Web/StellaOps.Web/src/config/config.sample.json index b4dd0e7d..4ba9744d 100644 --- a/src/StellaOps.Web/src/config/config.sample.json +++ b/src/Web/StellaOps.Web/src/config/config.sample.json @@ -1,26 +1,26 @@ -{ - "authority": { - "issuer": "https://authority.example.dev", - "clientId": "stellaops-ui", - "authorizeEndpoint": "https://authority.example.dev/connect/authorize", - "tokenEndpoint": "https://authority.example.dev/connect/token", - "logoutEndpoint": "https://authority.example.dev/connect/logout", - "redirectUri": "http://localhost:4400/auth/callback", - "postLogoutRedirectUri": "http://localhost:4400/", - "scope": "openid profile email ui.read authority:tenants.read advisory:read vex:read exceptions:read exceptions:approve aoc:verify findings:read orch:read vuln:read", - "audience": "https://scanner.example.dev", - "dpopAlgorithms": ["ES256"], - "refreshLeewaySeconds": 60 - }, - "apiBaseUrls": { - "authority": "https://authority.example.dev", - "scanner": "https://scanner.example.dev", - "policy": "https://scanner.example.dev", - "concelier": "https://concelier.example.dev", - "attestor": "https://attestor.example.dev" - }, - "telemetry": { - "otlpEndpoint": "", - "sampleRate": 0 - } -} +{ + "authority": { + "issuer": "https://authority.example.dev", + "clientId": "stellaops-ui", + "authorizeEndpoint": "https://authority.example.dev/connect/authorize", + "tokenEndpoint": "https://authority.example.dev/connect/token", + "logoutEndpoint": "https://authority.example.dev/connect/logout", + "redirectUri": "http://localhost:4400/auth/callback", + "postLogoutRedirectUri": "http://localhost:4400/", + "scope": "openid profile email ui.read authority:tenants.read advisory:read vex:read exceptions:read exceptions:approve aoc:verify findings:read orch:read vuln:read", + "audience": "https://scanner.example.dev", + "dpopAlgorithms": ["ES256"], + "refreshLeewaySeconds": 60 + }, + "apiBaseUrls": { + "authority": "https://authority.example.dev", + "scanner": "https://scanner.example.dev", + "policy": "https://scanner.example.dev", + "concelier": "https://concelier.example.dev", + "attestor": "https://attestor.example.dev" + }, + "telemetry": { + "otlpEndpoint": "", + "sampleRate": 0 + } +} diff --git a/src/StellaOps.Web/src/favicon.ico b/src/Web/StellaOps.Web/src/favicon.ico similarity index 100% rename from src/StellaOps.Web/src/favicon.ico rename to src/Web/StellaOps.Web/src/favicon.ico diff --git a/src/StellaOps.Web/src/index.html b/src/Web/StellaOps.Web/src/index.html similarity index 100% rename from src/StellaOps.Web/src/index.html rename to src/Web/StellaOps.Web/src/index.html diff --git a/src/StellaOps.Web/src/main.ts b/src/Web/StellaOps.Web/src/main.ts similarity index 100% rename from src/StellaOps.Web/src/main.ts rename to src/Web/StellaOps.Web/src/main.ts diff --git a/src/StellaOps.Web/src/styles.scss b/src/Web/StellaOps.Web/src/styles.scss similarity index 100% rename from src/StellaOps.Web/src/styles.scss rename to src/Web/StellaOps.Web/src/styles.scss diff --git a/src/StellaOps.Web/test-results/.last-run.json b/src/Web/StellaOps.Web/test-results/.last-run.json similarity index 93% rename from src/StellaOps.Web/test-results/.last-run.json rename to src/Web/StellaOps.Web/test-results/.last-run.json index cbcc1fba..3ba117bf 100644 --- a/src/StellaOps.Web/test-results/.last-run.json +++ b/src/Web/StellaOps.Web/test-results/.last-run.json @@ -1,4 +1,4 @@ -{ - "status": "passed", - "failedTests": [] +{ + "status": "passed", + "failedTests": [] } \ No newline at end of file diff --git a/src/StellaOps.Web/tests/e2e/auth.spec.ts b/src/Web/StellaOps.Web/tests/e2e/auth.spec.ts similarity index 97% rename from src/StellaOps.Web/tests/e2e/auth.spec.ts rename to src/Web/StellaOps.Web/tests/e2e/auth.spec.ts index 0ffec803..b1f9f786 100644 --- a/src/StellaOps.Web/tests/e2e/auth.spec.ts +++ b/src/Web/StellaOps.Web/tests/e2e/auth.spec.ts @@ -1,79 +1,79 @@ -import { expect, test } from '@playwright/test'; - -const mockConfig = { - authority: { - issuer: 'https://authority.local', - clientId: 'stellaops-ui', - authorizeEndpoint: 'https://authority.local/connect/authorize', - tokenEndpoint: 'https://authority.local/connect/token', - logoutEndpoint: 'https://authority.local/connect/logout', - redirectUri: 'http://127.0.0.1:4400/auth/callback', - postLogoutRedirectUri: 'http://127.0.0.1:4400/', - scope: - 'openid profile email ui.read authority:tenants.read advisory:read vex:read exceptions:read exceptions:approve aoc:verify findings:read orch:read vuln:read', - audience: 'https://scanner.local', - dpopAlgorithms: ['ES256'], - refreshLeewaySeconds: 60, - }, - apiBaseUrls: { - authority: 'https://authority.local', - scanner: 'https://scanner.local', - policy: 'https://scanner.local', - concelier: 'https://concelier.local', - attestor: 'https://attestor.local', - }, -}; - -test.beforeEach(async ({ page }) => { - page.on('console', (message) => { - // bubble up browser logs for debugging - console.log('[browser]', message.type(), message.text()); - }); - page.on('pageerror', (error) => { - console.log('[pageerror]', error.message); - }); - await page.addInitScript(() => { - // Capture attempted redirects so the test can assert against them. - (window as any).__stellaopsAssignedUrls = []; - const originalAssign = window.location.assign.bind(window.location); - window.location.assign = (url: string | URL) => { - (window as any).__stellaopsAssignedUrls.push(url.toString()); - }; - - window.sessionStorage.clear(); - }); - await page.route('**/config.json', (route) => - route.fulfill({ - status: 200, - contentType: 'application/json', - body: JSON.stringify(mockConfig), - }) - ); - await page.route('https://authority.local/**', (route) => route.abort()); -}); - -test('sign-in flow builds Authority authorization URL', async ({ page }) => { - await page.goto('/'); - const signInButton = page.getByRole('button', { name: /sign in/i }); - await expect(signInButton).toBeVisible(); - const [request] = await Promise.all([ - page.waitForRequest('https://authority.local/connect/authorize*'), - signInButton.click(), - ]); - - const authorizeUrl = new URL(request.url()); - expect(authorizeUrl.origin).toBe('https://authority.local'); - expect(authorizeUrl.pathname).toBe('/connect/authorize'); - expect(authorizeUrl.searchParams.get('client_id')).toBe('stellaops-ui'); - -}); - -test('callback without pending state surfaces error message', async ({ page }) => { - await page.route('https://authority.local/**', (route) => - route.fulfill({ status: 400, body: 'blocked' }) - ); - await page.goto('/auth/callback?code=test-code&state=missing'); - await expect( - page.getByText('We were unable to complete the sign-in flow. Please try again.') - ).toBeVisible({ timeout: 10000 }); -}); +import { expect, test } from '@playwright/test'; + +const mockConfig = { + authority: { + issuer: 'https://authority.local', + clientId: 'stellaops-ui', + authorizeEndpoint: 'https://authority.local/connect/authorize', + tokenEndpoint: 'https://authority.local/connect/token', + logoutEndpoint: 'https://authority.local/connect/logout', + redirectUri: 'http://127.0.0.1:4400/auth/callback', + postLogoutRedirectUri: 'http://127.0.0.1:4400/', + scope: + 'openid profile email ui.read authority:tenants.read advisory:read vex:read exceptions:read exceptions:approve aoc:verify findings:read orch:read vuln:read', + audience: 'https://scanner.local', + dpopAlgorithms: ['ES256'], + refreshLeewaySeconds: 60, + }, + apiBaseUrls: { + authority: 'https://authority.local', + scanner: 'https://scanner.local', + policy: 'https://scanner.local', + concelier: 'https://concelier.local', + attestor: 'https://attestor.local', + }, +}; + +test.beforeEach(async ({ page }) => { + page.on('console', (message) => { + // bubble up browser logs for debugging + console.log('[browser]', message.type(), message.text()); + }); + page.on('pageerror', (error) => { + console.log('[pageerror]', error.message); + }); + await page.addInitScript(() => { + // Capture attempted redirects so the test can assert against them. + (window as any).__stellaopsAssignedUrls = []; + const originalAssign = window.location.assign.bind(window.location); + window.location.assign = (url: string | URL) => { + (window as any).__stellaopsAssignedUrls.push(url.toString()); + }; + + window.sessionStorage.clear(); + }); + await page.route('**/config.json', (route) => + route.fulfill({ + status: 200, + contentType: 'application/json', + body: JSON.stringify(mockConfig), + }) + ); + await page.route('https://authority.local/**', (route) => route.abort()); +}); + +test('sign-in flow builds Authority authorization URL', async ({ page }) => { + await page.goto('/'); + const signInButton = page.getByRole('button', { name: /sign in/i }); + await expect(signInButton).toBeVisible(); + const [request] = await Promise.all([ + page.waitForRequest('https://authority.local/connect/authorize*'), + signInButton.click(), + ]); + + const authorizeUrl = new URL(request.url()); + expect(authorizeUrl.origin).toBe('https://authority.local'); + expect(authorizeUrl.pathname).toBe('/connect/authorize'); + expect(authorizeUrl.searchParams.get('client_id')).toBe('stellaops-ui'); + +}); + +test('callback without pending state surfaces error message', async ({ page }) => { + await page.route('https://authority.local/**', (route) => + route.fulfill({ status: 400, body: 'blocked' }) + ); + await page.goto('/auth/callback?code=test-code&state=missing'); + await expect( + page.getByText('We were unable to complete the sign-in flow. Please try again.') + ).toBeVisible({ timeout: 10000 }); +}); diff --git a/src/StellaOps.Web/tsconfig.app.json b/src/Web/StellaOps.Web/tsconfig.app.json similarity index 100% rename from src/StellaOps.Web/tsconfig.app.json rename to src/Web/StellaOps.Web/tsconfig.app.json diff --git a/src/StellaOps.Web/tsconfig.json b/src/Web/StellaOps.Web/tsconfig.json similarity index 100% rename from src/StellaOps.Web/tsconfig.json rename to src/Web/StellaOps.Web/tsconfig.json diff --git a/src/StellaOps.Web/tsconfig.spec.json b/src/Web/StellaOps.Web/tsconfig.spec.json similarity index 100% rename from src/StellaOps.Web/tsconfig.spec.json rename to src/Web/StellaOps.Web/tsconfig.spec.json diff --git a/src/StellaOps.Zastava.Observer/Backend/IRuntimePolicyClient.cs b/src/Zastava/StellaOps.Zastava.Observer/Backend/IRuntimePolicyClient.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Backend/IRuntimePolicyClient.cs rename to src/Zastava/StellaOps.Zastava.Observer/Backend/IRuntimePolicyClient.cs diff --git a/src/StellaOps.Zastava.Observer/Backend/RuntimeEventsClient.cs b/src/Zastava/StellaOps.Zastava.Observer/Backend/RuntimeEventsClient.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Backend/RuntimeEventsClient.cs rename to src/Zastava/StellaOps.Zastava.Observer/Backend/RuntimeEventsClient.cs diff --git a/src/StellaOps.Zastava.Observer/Backend/RuntimePolicyClient.cs b/src/Zastava/StellaOps.Zastava.Observer/Backend/RuntimePolicyClient.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Backend/RuntimePolicyClient.cs rename to src/Zastava/StellaOps.Zastava.Observer/Backend/RuntimePolicyClient.cs diff --git a/src/StellaOps.Zastava.Observer/Backend/RuntimePolicyContracts.cs b/src/Zastava/StellaOps.Zastava.Observer/Backend/RuntimePolicyContracts.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Backend/RuntimePolicyContracts.cs rename to src/Zastava/StellaOps.Zastava.Observer/Backend/RuntimePolicyContracts.cs diff --git a/src/StellaOps.Zastava.Observer/Backend/RuntimePolicyException.cs b/src/Zastava/StellaOps.Zastava.Observer/Backend/RuntimePolicyException.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Backend/RuntimePolicyException.cs rename to src/Zastava/StellaOps.Zastava.Observer/Backend/RuntimePolicyException.cs diff --git a/src/StellaOps.Zastava.Observer/Configuration/ZastavaObserverOptions.cs b/src/Zastava/StellaOps.Zastava.Observer/Configuration/ZastavaObserverOptions.cs similarity index 98% rename from src/StellaOps.Zastava.Observer/Configuration/ZastavaObserverOptions.cs rename to src/Zastava/StellaOps.Zastava.Observer/Configuration/ZastavaObserverOptions.cs index 75c343da..6f8393d5 100644 --- a/src/StellaOps.Zastava.Observer/Configuration/ZastavaObserverOptions.cs +++ b/src/Zastava/StellaOps.Zastava.Observer/Configuration/ZastavaObserverOptions.cs @@ -5,34 +5,34 @@ namespace StellaOps.Zastava.Observer.Configuration; /// <summary> /// Observer-specific configuration applied on top of the shared runtime options. -/// </summary> -public sealed class ZastavaObserverOptions -{ - public const string SectionName = "zastava:observer"; - - private const string DefaultContainerdSocket = "unix:///run/containerd/containerd.sock"; - - /// <summary> - /// Logical node identifier emitted with runtime events (defaults to environment hostname). - /// </summary> - [Required(AllowEmptyStrings = false)] - public string NodeName { get; set; } = - Environment.GetEnvironmentVariable("ZASTAVA_NODE_NAME") - ?? Environment.GetEnvironmentVariable("KUBERNETES_NODE_NAME") - ?? Environment.MachineName; - - /// <summary> - /// Baseline polling interval when watching CRI runtimes. - /// </summary> - [Range(typeof(TimeSpan), "00:00:01", "00:10:00")] - public TimeSpan PollInterval { get; set; } = TimeSpan.FromSeconds(2); - - /// <summary> - /// Maximum number of runtime events held in the in-memory buffer. - /// </summary> - [Range(16, 65536)] - public int MaxInMemoryBuffer { get; set; } = 2048; - +/// </summary> +public sealed class ZastavaObserverOptions +{ + public const string SectionName = "zastava:observer"; + + private const string DefaultContainerdSocket = "unix:///run/containerd/containerd.sock"; + + /// <summary> + /// Logical node identifier emitted with runtime events (defaults to environment hostname). + /// </summary> + [Required(AllowEmptyStrings = false)] + public string NodeName { get; set; } = + Environment.GetEnvironmentVariable("ZASTAVA_NODE_NAME") + ?? Environment.GetEnvironmentVariable("KUBERNETES_NODE_NAME") + ?? Environment.MachineName; + + /// <summary> + /// Baseline polling interval when watching CRI runtimes. + /// </summary> + [Range(typeof(TimeSpan), "00:00:01", "00:10:00")] + public TimeSpan PollInterval { get; set; } = TimeSpan.FromSeconds(2); + + /// <summary> + /// Maximum number of runtime events held in the in-memory buffer. + /// </summary> + [Range(16, 65536)] + public int MaxInMemoryBuffer { get; set; } = 2048; + /// <summary> /// Number of runtime events drained in one batch by downstream publishers. /// </summary> @@ -59,8 +59,8 @@ public sealed class ZastavaObserverOptions /// <summary> /// Connectivity/backoff settings applied when CRI endpoints fail temporarily. - /// </summary> - [Required] + /// </summary> + [Required] public ObserverBackoffOptions Backoff { get; set; } = new(); /// <summary> @@ -68,8 +68,8 @@ public sealed class ZastavaObserverOptions /// </summary> [Required] public IList<ContainerRuntimeEndpointOptions> Runtimes { get; set; } = new List<ContainerRuntimeEndpointOptions> - { - new() + { + new() { Name = "containerd", Engine = ContainerRuntimeEngine.Containerd, @@ -177,66 +177,66 @@ public sealed class ZastavaObserverPostureOptions public sealed class ObserverBackoffOptions { /// <summary> - /// Initial backoff delay applied after the first failure. - /// </summary> - [Range(typeof(TimeSpan), "00:00:01", "00:05:00")] - public TimeSpan Initial { get; set; } = TimeSpan.FromSeconds(1); - - /// <summary> - /// Maximum backoff delay after repeated failures. - /// </summary> - [Range(typeof(TimeSpan), "00:00:01", "00:10:00")] - public TimeSpan Max { get; set; } = TimeSpan.FromSeconds(30); - - /// <summary> - /// Jitter ratio applied to the computed delay (0 disables jitter). - /// </summary> - [Range(0.0, 0.5)] - public double JitterRatio { get; set; } = 0.2; -} - -public sealed class ContainerRuntimeEndpointOptions -{ - /// <summary> - /// Friendly name used for logging/metrics (defaults to engine identifier). - /// </summary> - public string? Name { get; set; } - - /// <summary> - /// Runtime engine backing the endpoint. - /// </summary> - public ContainerRuntimeEngine Engine { get; set; } = ContainerRuntimeEngine.Containerd; - - /// <summary> - /// Endpoint URI (unix:///run/containerd/containerd.sock, npipe://./pipe/dockershim, https://127.0.0.1:1234, ...). - /// </summary> - [Required(AllowEmptyStrings = false)] - public string Endpoint { get; set; } = "unix:///run/containerd/containerd.sock"; - - /// <summary> - /// Optional explicit polling interval for this endpoint (falls back to global PollInterval). - /// </summary> - [Range(typeof(TimeSpan), "00:00:01", "00:10:00")] - public TimeSpan? PollInterval { get; set; } - - /// <summary> - /// Optional connection timeout override. - /// </summary> - [Range(typeof(TimeSpan), "00:00:01", "00:01:00")] - public TimeSpan? ConnectTimeout { get; set; } - - /// <summary> - /// Flag to allow disabling endpoints without removing configuration entries. - /// </summary> - public bool Enabled { get; set; } = true; - - public string ResolveName() - => string.IsNullOrWhiteSpace(Name) ? Engine.ToString().ToLowerInvariant() : Name!; -} - -public enum ContainerRuntimeEngine -{ - Containerd, - CriO, - Docker -} + /// Initial backoff delay applied after the first failure. + /// </summary> + [Range(typeof(TimeSpan), "00:00:01", "00:05:00")] + public TimeSpan Initial { get; set; } = TimeSpan.FromSeconds(1); + + /// <summary> + /// Maximum backoff delay after repeated failures. + /// </summary> + [Range(typeof(TimeSpan), "00:00:01", "00:10:00")] + public TimeSpan Max { get; set; } = TimeSpan.FromSeconds(30); + + /// <summary> + /// Jitter ratio applied to the computed delay (0 disables jitter). + /// </summary> + [Range(0.0, 0.5)] + public double JitterRatio { get; set; } = 0.2; +} + +public sealed class ContainerRuntimeEndpointOptions +{ + /// <summary> + /// Friendly name used for logging/metrics (defaults to engine identifier). + /// </summary> + public string? Name { get; set; } + + /// <summary> + /// Runtime engine backing the endpoint. + /// </summary> + public ContainerRuntimeEngine Engine { get; set; } = ContainerRuntimeEngine.Containerd; + + /// <summary> + /// Endpoint URI (unix:///run/containerd/containerd.sock, npipe://./pipe/dockershim, https://127.0.0.1:1234, ...). + /// </summary> + [Required(AllowEmptyStrings = false)] + public string Endpoint { get; set; } = "unix:///run/containerd/containerd.sock"; + + /// <summary> + /// Optional explicit polling interval for this endpoint (falls back to global PollInterval). + /// </summary> + [Range(typeof(TimeSpan), "00:00:01", "00:10:00")] + public TimeSpan? PollInterval { get; set; } + + /// <summary> + /// Optional connection timeout override. + /// </summary> + [Range(typeof(TimeSpan), "00:00:01", "00:01:00")] + public TimeSpan? ConnectTimeout { get; set; } + + /// <summary> + /// Flag to allow disabling endpoints without removing configuration entries. + /// </summary> + public bool Enabled { get; set; } = true; + + public string ResolveName() + => string.IsNullOrWhiteSpace(Name) ? Engine.ToString().ToLowerInvariant() : Name!; +} + +public enum ContainerRuntimeEngine +{ + Containerd, + CriO, + Docker +} diff --git a/src/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTracker.cs b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTracker.cs similarity index 97% rename from src/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTracker.cs rename to src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTracker.cs index 0df28f38..6f665438 100644 --- a/src/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTracker.cs +++ b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTracker.cs @@ -1,134 +1,134 @@ -using StellaOps.Zastava.Observer.ContainerRuntime.Cri; - -namespace StellaOps.Zastava.Observer.ContainerRuntime; - -internal sealed class ContainerStateTracker -{ - private readonly Dictionary<string, ContainerStateEntry> entries = new(StringComparer.Ordinal); - - public void BeginCycle() - { - foreach (var entry in entries.Values) - { - entry.SeenInCycle = false; - } - } - - public ContainerLifecycleEvent? MarkRunning(CriContainerInfo snapshot, DateTimeOffset fallbackTimestamp) - { - ArgumentNullException.ThrowIfNull(snapshot); - var timestamp = snapshot.StartedAt ?? snapshot.CreatedAt; - if (timestamp <= DateTimeOffset.MinValue) - { - timestamp = fallbackTimestamp; - } - - if (!entries.TryGetValue(snapshot.Id, out var entry)) - { - entry = new ContainerStateEntry(snapshot); - entries[snapshot.Id] = entry; - entry.SeenInCycle = true; - entry.State = ContainerLifecycleState.Running; - entry.LastStart = timestamp; - entry.LastSnapshot = snapshot; - return new ContainerLifecycleEvent(ContainerLifecycleEventKind.Start, timestamp, snapshot); - } - - entry.SeenInCycle = true; - - if (timestamp > entry.LastStart) - { - entry.LastStart = timestamp; - entry.State = ContainerLifecycleState.Running; - entry.LastSnapshot = snapshot; - return new ContainerLifecycleEvent(ContainerLifecycleEventKind.Start, timestamp, snapshot); - } - - entry.State = ContainerLifecycleState.Running; - entry.LastSnapshot = snapshot; - return null; - } - - public async Task<IReadOnlyList<ContainerLifecycleEvent>> CompleteCycleAsync( - Func<string, Task<CriContainerInfo?>> statusProvider, - DateTimeOffset fallbackTimestamp, - CancellationToken cancellationToken) - { - ArgumentNullException.ThrowIfNull(statusProvider); - - var events = new List<ContainerLifecycleEvent>(); - foreach (var (containerId, entry) in entries.ToArray()) - { - if (entry.SeenInCycle) - { - continue; - } - - CriContainerInfo? status = null; - if (entry.LastSnapshot is not null && entry.LastSnapshot.FinishedAt is not null) - { - status = entry.LastSnapshot; - } - else - { - status = await statusProvider(containerId).ConfigureAwait(false) ?? entry.LastSnapshot; - } - - var stopTimestamp = status?.FinishedAt ?? fallbackTimestamp; - if (stopTimestamp <= DateTimeOffset.MinValue) - { - stopTimestamp = fallbackTimestamp; - } - - if (entry.LastStop is not null && stopTimestamp <= entry.LastStop) - { - entries.Remove(containerId); - continue; - } - - var snapshot = status ?? entry.LastSnapshot ?? entry.MetadataFallback; - var stopEvent = new ContainerLifecycleEvent(ContainerLifecycleEventKind.Stop, stopTimestamp, snapshot); - events.Add(stopEvent); - - entry.LastStop = stopTimestamp; - entry.State = ContainerLifecycleState.Stopped; - entries.Remove(containerId); - } - - return events - .OrderBy(static e => e.Timestamp) - .ThenBy(static e => e.Snapshot.Id, StringComparer.Ordinal) - .ToArray(); - } - - private sealed class ContainerStateEntry - { - public ContainerStateEntry(CriContainerInfo seed) - { - MetadataFallback = seed; - LastSnapshot = seed; - } - - public ContainerLifecycleState State { get; set; } = ContainerLifecycleState.Unknown; - public bool SeenInCycle { get; set; } - public DateTimeOffset LastStart { get; set; } = DateTimeOffset.MinValue; - public DateTimeOffset? LastStop { get; set; } - public CriContainerInfo MetadataFallback { get; } - public CriContainerInfo? LastSnapshot { get; set; } - } -} - -internal enum ContainerLifecycleState -{ - Unknown, - Running, - Stopped -} - -internal sealed record ContainerLifecycleEvent(ContainerLifecycleEventKind Kind, DateTimeOffset Timestamp, CriContainerInfo Snapshot); - -internal enum ContainerLifecycleEventKind -{ - Start, - Stop -} +using StellaOps.Zastava.Observer.ContainerRuntime.Cri; + +namespace StellaOps.Zastava.Observer.ContainerRuntime; + +internal sealed class ContainerStateTracker +{ + private readonly Dictionary<string, ContainerStateEntry> entries = new(StringComparer.Ordinal); + + public void BeginCycle() + { + foreach (var entry in entries.Values) + { + entry.SeenInCycle = false; + } + } + + public ContainerLifecycleEvent? MarkRunning(CriContainerInfo snapshot, DateTimeOffset fallbackTimestamp) + { + ArgumentNullException.ThrowIfNull(snapshot); + var timestamp = snapshot.StartedAt ?? snapshot.CreatedAt; + if (timestamp <= DateTimeOffset.MinValue) + { + timestamp = fallbackTimestamp; + } + + if (!entries.TryGetValue(snapshot.Id, out var entry)) + { + entry = new ContainerStateEntry(snapshot); + entries[snapshot.Id] = entry; + entry.SeenInCycle = true; + entry.State = ContainerLifecycleState.Running; + entry.LastStart = timestamp; + entry.LastSnapshot = snapshot; + return new ContainerLifecycleEvent(ContainerLifecycleEventKind.Start, timestamp, snapshot); + } + + entry.SeenInCycle = true; + + if (timestamp > entry.LastStart) + { + entry.LastStart = timestamp; + entry.State = ContainerLifecycleState.Running; + entry.LastSnapshot = snapshot; + return new ContainerLifecycleEvent(ContainerLifecycleEventKind.Start, timestamp, snapshot); + } + + entry.State = ContainerLifecycleState.Running; + entry.LastSnapshot = snapshot; + return null; + } + + public async Task<IReadOnlyList<ContainerLifecycleEvent>> CompleteCycleAsync( + Func<string, Task<CriContainerInfo?>> statusProvider, + DateTimeOffset fallbackTimestamp, + CancellationToken cancellationToken) + { + ArgumentNullException.ThrowIfNull(statusProvider); + + var events = new List<ContainerLifecycleEvent>(); + foreach (var (containerId, entry) in entries.ToArray()) + { + if (entry.SeenInCycle) + { + continue; + } + + CriContainerInfo? status = null; + if (entry.LastSnapshot is not null && entry.LastSnapshot.FinishedAt is not null) + { + status = entry.LastSnapshot; + } + else + { + status = await statusProvider(containerId).ConfigureAwait(false) ?? entry.LastSnapshot; + } + + var stopTimestamp = status?.FinishedAt ?? fallbackTimestamp; + if (stopTimestamp <= DateTimeOffset.MinValue) + { + stopTimestamp = fallbackTimestamp; + } + + if (entry.LastStop is not null && stopTimestamp <= entry.LastStop) + { + entries.Remove(containerId); + continue; + } + + var snapshot = status ?? entry.LastSnapshot ?? entry.MetadataFallback; + var stopEvent = new ContainerLifecycleEvent(ContainerLifecycleEventKind.Stop, stopTimestamp, snapshot); + events.Add(stopEvent); + + entry.LastStop = stopTimestamp; + entry.State = ContainerLifecycleState.Stopped; + entries.Remove(containerId); + } + + return events + .OrderBy(static e => e.Timestamp) + .ThenBy(static e => e.Snapshot.Id, StringComparer.Ordinal) + .ToArray(); + } + + private sealed class ContainerStateEntry + { + public ContainerStateEntry(CriContainerInfo seed) + { + MetadataFallback = seed; + LastSnapshot = seed; + } + + public ContainerLifecycleState State { get; set; } = ContainerLifecycleState.Unknown; + public bool SeenInCycle { get; set; } + public DateTimeOffset LastStart { get; set; } = DateTimeOffset.MinValue; + public DateTimeOffset? LastStop { get; set; } + public CriContainerInfo MetadataFallback { get; } + public CriContainerInfo? LastSnapshot { get; set; } + } +} + +internal enum ContainerLifecycleState +{ + Unknown, + Running, + Stopped +} + +internal sealed record ContainerLifecycleEvent(ContainerLifecycleEventKind Kind, DateTimeOffset Timestamp, CriContainerInfo Snapshot); + +internal enum ContainerLifecycleEventKind +{ + Start, + Stop +} diff --git a/src/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTrackerFactory.cs b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTrackerFactory.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTrackerFactory.cs rename to src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/ContainerStateTrackerFactory.cs diff --git a/src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriConversions.cs b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriConversions.cs similarity index 98% rename from src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriConversions.cs rename to src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriConversions.cs index a7ba6f21..e86e116d 100644 --- a/src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriConversions.cs +++ b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriConversions.cs @@ -1,52 +1,52 @@ -using StellaOps.Zastava.Observer.Cri; - -namespace StellaOps.Zastava.Observer.ContainerRuntime.Cri; - -internal static class CriConversions -{ - private const long NanosecondsPerTick = 100; - - public static CriContainerInfo ToContainerInfo(Container container) - { - ArgumentNullException.ThrowIfNull(container); - - return new CriContainerInfo( - Id: container.Id ?? string.Empty, - PodSandboxId: container.PodSandboxId ?? string.Empty, - Name: container.Metadata?.Name ?? string.Empty, - Attempt: container.Metadata?.Attempt ?? 0, - Image: container.Image?.Image, - ImageRef: container.ImageRef, - Labels: container.Labels?.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal) ?? new Dictionary<string, string>(StringComparer.Ordinal), - Annotations: container.Annotations?.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal) ?? new Dictionary<string, string>(StringComparer.Ordinal), - CreatedAt: FromUnixNanoseconds(container.CreatedAt), - StartedAt: null, - FinishedAt: null, +using StellaOps.Zastava.Observer.Cri; + +namespace StellaOps.Zastava.Observer.ContainerRuntime.Cri; + +internal static class CriConversions +{ + private const long NanosecondsPerTick = 100; + + public static CriContainerInfo ToContainerInfo(Container container) + { + ArgumentNullException.ThrowIfNull(container); + + return new CriContainerInfo( + Id: container.Id ?? string.Empty, + PodSandboxId: container.PodSandboxId ?? string.Empty, + Name: container.Metadata?.Name ?? string.Empty, + Attempt: container.Metadata?.Attempt ?? 0, + Image: container.Image?.Image, + ImageRef: container.ImageRef, + Labels: container.Labels?.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal) ?? new Dictionary<string, string>(StringComparer.Ordinal), + Annotations: container.Annotations?.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal) ?? new Dictionary<string, string>(StringComparer.Ordinal), + CreatedAt: FromUnixNanoseconds(container.CreatedAt), + StartedAt: null, + FinishedAt: null, ExitCode: null, Reason: null, Message: null, Pid: null); - } - - public static CriContainerInfo MergeStatus(CriContainerInfo baseline, ContainerStatus? status) - { - if (status is null) - { - return baseline; - } - - var labels = status.Labels?.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal) - ?? baseline.Labels; - var annotations = status.Annotations?.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal) - ?? baseline.Annotations; - - return baseline with - { - CreatedAt = status.CreatedAt > 0 ? FromUnixNanoseconds(status.CreatedAt) : baseline.CreatedAt, - StartedAt = status.StartedAt > 0 ? FromUnixNanoseconds(status.StartedAt) : baseline.StartedAt, - FinishedAt = status.FinishedAt > 0 ? FromUnixNanoseconds(status.FinishedAt) : baseline.FinishedAt, - ExitCode = status.ExitCode != 0 ? status.ExitCode : baseline.ExitCode, - Reason = string.IsNullOrWhiteSpace(status.Reason) ? baseline.Reason : status.Reason, + } + + public static CriContainerInfo MergeStatus(CriContainerInfo baseline, ContainerStatus? status) + { + if (status is null) + { + return baseline; + } + + var labels = status.Labels?.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal) + ?? baseline.Labels; + var annotations = status.Annotations?.ToDictionary(static pair => pair.Key, static pair => pair.Value, StringComparer.Ordinal) + ?? baseline.Annotations; + + return baseline with + { + CreatedAt = status.CreatedAt > 0 ? FromUnixNanoseconds(status.CreatedAt) : baseline.CreatedAt, + StartedAt = status.StartedAt > 0 ? FromUnixNanoseconds(status.StartedAt) : baseline.StartedAt, + FinishedAt = status.FinishedAt > 0 ? FromUnixNanoseconds(status.FinishedAt) : baseline.FinishedAt, + ExitCode = status.ExitCode != 0 ? status.ExitCode : baseline.ExitCode, + Reason = string.IsNullOrWhiteSpace(status.Reason) ? baseline.Reason : status.Reason, Message = string.IsNullOrWhiteSpace(status.Message) ? baseline.Message : status.Message, Pid = baseline.Pid, Image = status.Image?.Image ?? baseline.Image, @@ -54,25 +54,25 @@ internal static class CriConversions Labels = labels, Annotations = annotations }; - } - - public static DateTimeOffset FromUnixNanoseconds(long nanoseconds) - { - if (nanoseconds <= 0) - { - return DateTimeOffset.MinValue; - } - - var seconds = Math.DivRem(nanoseconds, 1_000_000_000, out var remainder); - var ticks = remainder / NanosecondsPerTick; - try - { - var baseTime = DateTimeOffset.FromUnixTimeSeconds(seconds); - return baseTime.AddTicks(ticks); - } - catch (ArgumentOutOfRangeException) - { - return DateTimeOffset.UnixEpoch; - } - } -} + } + + public static DateTimeOffset FromUnixNanoseconds(long nanoseconds) + { + if (nanoseconds <= 0) + { + return DateTimeOffset.MinValue; + } + + var seconds = Math.DivRem(nanoseconds, 1_000_000_000, out var remainder); + var ticks = remainder / NanosecondsPerTick; + try + { + var baseTime = DateTimeOffset.FromUnixTimeSeconds(seconds); + return baseTime.AddTicks(ticks); + } + catch (ArgumentOutOfRangeException) + { + return DateTimeOffset.UnixEpoch; + } + } +} diff --git a/src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriModels.cs b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriModels.cs similarity index 97% rename from src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriModels.cs rename to src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriModels.cs index c08d314e..0383f066 100644 --- a/src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriModels.cs +++ b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriModels.cs @@ -1,12 +1,12 @@ -using StellaOps.Zastava.Observer.Configuration; - -namespace StellaOps.Zastava.Observer.ContainerRuntime.Cri; - -internal sealed record CriRuntimeIdentity( - string RuntimeName, - string RuntimeVersion, - string RuntimeApiVersion); - +using StellaOps.Zastava.Observer.Configuration; + +namespace StellaOps.Zastava.Observer.ContainerRuntime.Cri; + +internal sealed record CriRuntimeIdentity( + string RuntimeName, + string RuntimeVersion, + string RuntimeApiVersion); + internal sealed record CriContainerInfo( string Id, string PodSandboxId, @@ -23,23 +23,23 @@ internal sealed record CriContainerInfo( string? Reason, string? Message, int? Pid); - -internal static class CriLabelKeys -{ - public const string PodName = "io.kubernetes.pod.name"; - public const string PodNamespace = "io.kubernetes.pod.namespace"; - public const string PodUid = "io.kubernetes.pod.uid"; - public const string ContainerName = "io.kubernetes.container.name"; -} - -internal static class ContainerRuntimeEngineExtensions -{ - public static string ToEngineString(this ContainerRuntimeEngine engine) - => engine switch - { - ContainerRuntimeEngine.Containerd => "containerd", - ContainerRuntimeEngine.CriO => "cri-o", - ContainerRuntimeEngine.Docker => "docker", - _ => "unknown" - }; -} + +internal static class CriLabelKeys +{ + public const string PodName = "io.kubernetes.pod.name"; + public const string PodNamespace = "io.kubernetes.pod.namespace"; + public const string PodUid = "io.kubernetes.pod.uid"; + public const string ContainerName = "io.kubernetes.container.name"; +} + +internal static class ContainerRuntimeEngineExtensions +{ + public static string ToEngineString(this ContainerRuntimeEngine engine) + => engine switch + { + ContainerRuntimeEngine.Containerd => "containerd", + ContainerRuntimeEngine.CriO => "cri-o", + ContainerRuntimeEngine.Docker => "docker", + _ => "unknown" + }; +} diff --git a/src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClient.cs b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClient.cs similarity index 97% rename from src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClient.cs rename to src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClient.cs index 669ba856..849741c2 100644 --- a/src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClient.cs +++ b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClient.cs @@ -7,89 +7,89 @@ using Grpc.Net.Client; using Microsoft.Extensions.Logging; using StellaOps.Zastava.Observer.Configuration; using StellaOps.Zastava.Observer.Cri; - -namespace StellaOps.Zastava.Observer.ContainerRuntime.Cri; - -internal interface ICriRuntimeClient : IAsyncDisposable -{ - ContainerRuntimeEndpointOptions Endpoint { get; } - Task<CriRuntimeIdentity> GetIdentityAsync(CancellationToken cancellationToken); - Task<IReadOnlyList<CriContainerInfo>> ListContainersAsync(ContainerState state, CancellationToken cancellationToken); - Task<CriContainerInfo?> GetContainerStatusAsync(string containerId, CancellationToken cancellationToken); -} - -internal sealed class CriRuntimeClient : ICriRuntimeClient -{ - private static readonly object SwitchLock = new(); - private static bool http2SwitchApplied; - - private readonly GrpcChannel channel; - private readonly RuntimeService.RuntimeServiceClient client; - private readonly ILogger<CriRuntimeClient> logger; - - public CriRuntimeClient(ContainerRuntimeEndpointOptions endpoint, ILogger<CriRuntimeClient> logger) - { - ArgumentNullException.ThrowIfNull(endpoint); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - Endpoint = endpoint; - - EnsureHttp2Switch(); - channel = CreateChannel(endpoint); - client = new RuntimeService.RuntimeServiceClient(channel); - } - - public ContainerRuntimeEndpointOptions Endpoint { get; } - - public async Task<CriRuntimeIdentity> GetIdentityAsync(CancellationToken cancellationToken) - { - var response = await client.VersionAsync(new VersionRequest(), cancellationToken: cancellationToken).ConfigureAwait(false); - return new CriRuntimeIdentity( - RuntimeName: response.RuntimeName ?? Endpoint.Engine.ToEngineString(), - RuntimeVersion: response.RuntimeVersion ?? "unknown", - RuntimeApiVersion: response.RuntimeApiVersion ?? response.Version ?? "unknown"); - } - - public async Task<IReadOnlyList<CriContainerInfo>> ListContainersAsync(ContainerState state, CancellationToken cancellationToken) - { - var request = new ListContainersRequest - { - Filter = new ContainerFilter - { - State = new ContainerStateValue - { - State = state - } - } - }; - - try - { - var response = await client.ListContainersAsync(request, cancellationToken: cancellationToken).ConfigureAwait(false); - if (response.Containers is null || response.Containers.Count == 0) - { - return Array.Empty<CriContainerInfo>(); - } - - return response.Containers - .Select(CriConversions.ToContainerInfo) - .ToArray(); - } - catch (RpcException ex) when (ex.StatusCode == StatusCode.Unimplemented) - { - logger.LogWarning(ex, "Runtime endpoint {Endpoint} does not support ListContainers for state {State}.", Endpoint.Endpoint, state); - throw; - } - } - - public async Task<CriContainerInfo?> GetContainerStatusAsync(string containerId, CancellationToken cancellationToken) - { - if (string.IsNullOrWhiteSpace(containerId)) - { - return null; - } - - try - { + +namespace StellaOps.Zastava.Observer.ContainerRuntime.Cri; + +internal interface ICriRuntimeClient : IAsyncDisposable +{ + ContainerRuntimeEndpointOptions Endpoint { get; } + Task<CriRuntimeIdentity> GetIdentityAsync(CancellationToken cancellationToken); + Task<IReadOnlyList<CriContainerInfo>> ListContainersAsync(ContainerState state, CancellationToken cancellationToken); + Task<CriContainerInfo?> GetContainerStatusAsync(string containerId, CancellationToken cancellationToken); +} + +internal sealed class CriRuntimeClient : ICriRuntimeClient +{ + private static readonly object SwitchLock = new(); + private static bool http2SwitchApplied; + + private readonly GrpcChannel channel; + private readonly RuntimeService.RuntimeServiceClient client; + private readonly ILogger<CriRuntimeClient> logger; + + public CriRuntimeClient(ContainerRuntimeEndpointOptions endpoint, ILogger<CriRuntimeClient> logger) + { + ArgumentNullException.ThrowIfNull(endpoint); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + Endpoint = endpoint; + + EnsureHttp2Switch(); + channel = CreateChannel(endpoint); + client = new RuntimeService.RuntimeServiceClient(channel); + } + + public ContainerRuntimeEndpointOptions Endpoint { get; } + + public async Task<CriRuntimeIdentity> GetIdentityAsync(CancellationToken cancellationToken) + { + var response = await client.VersionAsync(new VersionRequest(), cancellationToken: cancellationToken).ConfigureAwait(false); + return new CriRuntimeIdentity( + RuntimeName: response.RuntimeName ?? Endpoint.Engine.ToEngineString(), + RuntimeVersion: response.RuntimeVersion ?? "unknown", + RuntimeApiVersion: response.RuntimeApiVersion ?? response.Version ?? "unknown"); + } + + public async Task<IReadOnlyList<CriContainerInfo>> ListContainersAsync(ContainerState state, CancellationToken cancellationToken) + { + var request = new ListContainersRequest + { + Filter = new ContainerFilter + { + State = new ContainerStateValue + { + State = state + } + } + }; + + try + { + var response = await client.ListContainersAsync(request, cancellationToken: cancellationToken).ConfigureAwait(false); + if (response.Containers is null || response.Containers.Count == 0) + { + return Array.Empty<CriContainerInfo>(); + } + + return response.Containers + .Select(CriConversions.ToContainerInfo) + .ToArray(); + } + catch (RpcException ex) when (ex.StatusCode == StatusCode.Unimplemented) + { + logger.LogWarning(ex, "Runtime endpoint {Endpoint} does not support ListContainers for state {State}.", Endpoint.Endpoint, state); + throw; + } + } + + public async Task<CriContainerInfo?> GetContainerStatusAsync(string containerId, CancellationToken cancellationToken) + { + if (string.IsNullOrWhiteSpace(containerId)) + { + return null; + } + + try + { var response = await client.ContainerStatusAsync(new ContainerStatusRequest { ContainerId = containerId, @@ -99,20 +99,20 @@ internal sealed class CriRuntimeClient : ICriRuntimeClient if (response.Status is null) { return null; - } - - var baseline = CriConversions.ToContainerInfo(new Container - { - Id = response.Status.Id, - PodSandboxId = response.Status.Metadata?.Name ?? string.Empty, - Metadata = response.Status.Metadata, - Image = response.Status.Image, - ImageRef = response.Status.ImageRef, - Labels = { response.Status.Labels }, - Annotations = { response.Status.Annotations }, - CreatedAt = response.Status.CreatedAt - }); - + } + + var baseline = CriConversions.ToContainerInfo(new Container + { + Id = response.Status.Id, + PodSandboxId = response.Status.Metadata?.Name ?? string.Empty, + Metadata = response.Status.Metadata, + Image = response.Status.Image, + ImageRef = response.Status.ImageRef, + Labels = { response.Status.Labels }, + Annotations = { response.Status.Annotations }, + CreatedAt = response.Status.CreatedAt + }); + var merged = CriConversions.MergeStatus(baseline, response.Status); if (response.Info is { Count: > 0 } && TryExtractPid(response.Info, out var pid)) @@ -122,11 +122,11 @@ internal sealed class CriRuntimeClient : ICriRuntimeClient return merged; } - catch (RpcException ex) when (ex.StatusCode is StatusCode.NotFound or StatusCode.DeadlineExceeded) - { - logger.LogDebug(ex, "Container {ContainerId} no longer available when querying status.", containerId); - return null; - } + catch (RpcException ex) when (ex.StatusCode is StatusCode.NotFound or StatusCode.DeadlineExceeded) + { + logger.LogDebug(ex, "Container {ContainerId} no longer available when querying status.", containerId); + return null; + } } private static bool TryExtractPid(IDictionary<string, string> info, out int pid) @@ -159,7 +159,7 @@ internal sealed class CriRuntimeClient : ICriRuntimeClient pid = default; return false; } - + public ValueTask DisposeAsync() { try @@ -173,82 +173,82 @@ internal sealed class CriRuntimeClient : ICriRuntimeClient return ValueTask.CompletedTask; } - - private static void EnsureHttp2Switch() - { - if (http2SwitchApplied) - { - return; - } - - lock (SwitchLock) - { - if (!http2SwitchApplied) - { - AppContext.SetSwitch("System.Net.Http.SocketsHttpHandler.Http2UnencryptedSupport", true); - http2SwitchApplied = true; - } - } - } - - private GrpcChannel CreateChannel(ContainerRuntimeEndpointOptions endpoint) - { - if (IsUnixEndpoint(endpoint.Endpoint, out var unixPath)) - { - var resolvedPath = unixPath; - var handler = new SocketsHttpHandler - { - ConnectCallback = (context, cancellationToken) => ConnectUnixDomainSocketAsync(resolvedPath, cancellationToken), - EnableMultipleHttp2Connections = true - }; - + + private static void EnsureHttp2Switch() + { + if (http2SwitchApplied) + { + return; + } + + lock (SwitchLock) + { + if (!http2SwitchApplied) + { + AppContext.SetSwitch("System.Net.Http.SocketsHttpHandler.Http2UnencryptedSupport", true); + http2SwitchApplied = true; + } + } + } + + private GrpcChannel CreateChannel(ContainerRuntimeEndpointOptions endpoint) + { + if (IsUnixEndpoint(endpoint.Endpoint, out var unixPath)) + { + var resolvedPath = unixPath; + var handler = new SocketsHttpHandler + { + ConnectCallback = (context, cancellationToken) => ConnectUnixDomainSocketAsync(resolvedPath, cancellationToken), + EnableMultipleHttp2Connections = true + }; + if (endpoint.ConnectTimeout is { } timeout && timeout > TimeSpan.Zero) { handler.ConnectTimeout = timeout; } - - return GrpcChannel.ForAddress("http://unix.local", new GrpcChannelOptions - { - HttpHandler = handler, - DisposeHttpClient = true - }); - } - - return GrpcChannel.ForAddress(endpoint.Endpoint, new GrpcChannelOptions - { - DisposeHttpClient = true - }); - } - - private static bool IsUnixEndpoint(string endpoint, out string path) - { - if (endpoint.StartsWith("unix://", StringComparison.OrdinalIgnoreCase)) - { - path = endpoint["unix://".Length..]; - return true; - } - - path = string.Empty; - return false; - } - - private static async ValueTask<Stream> ConnectUnixDomainSocketAsync(string unixPath, CancellationToken cancellationToken) - { - var socket = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified) - { - NoDelay = true - }; - - try - { - var endpoint = new UnixDomainSocketEndPoint(unixPath); - await socket.ConnectAsync(endpoint, cancellationToken).ConfigureAwait(false); - return new NetworkStream(socket, ownsSocket: true); - } - catch - { - socket.Dispose(); - throw; - } - } -} + + return GrpcChannel.ForAddress("http://unix.local", new GrpcChannelOptions + { + HttpHandler = handler, + DisposeHttpClient = true + }); + } + + return GrpcChannel.ForAddress(endpoint.Endpoint, new GrpcChannelOptions + { + DisposeHttpClient = true + }); + } + + private static bool IsUnixEndpoint(string endpoint, out string path) + { + if (endpoint.StartsWith("unix://", StringComparison.OrdinalIgnoreCase)) + { + path = endpoint["unix://".Length..]; + return true; + } + + path = string.Empty; + return false; + } + + private static async ValueTask<Stream> ConnectUnixDomainSocketAsync(string unixPath, CancellationToken cancellationToken) + { + var socket = new Socket(AddressFamily.Unix, SocketType.Stream, ProtocolType.Unspecified) + { + NoDelay = true + }; + + try + { + var endpoint = new UnixDomainSocketEndPoint(unixPath); + await socket.ConnectAsync(endpoint, cancellationToken).ConfigureAwait(false); + return new NetworkStream(socket, ownsSocket: true); + } + catch + { + socket.Dispose(); + throw; + } + } +} diff --git a/src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClientFactory.cs b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClientFactory.cs similarity index 97% rename from src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClientFactory.cs rename to src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClientFactory.cs index 0947b9f3..c7b5e8d0 100644 --- a/src/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClientFactory.cs +++ b/src/Zastava/StellaOps.Zastava.Observer/ContainerRuntime/Cri/CriRuntimeClientFactory.cs @@ -1,26 +1,26 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using StellaOps.Zastava.Observer.Configuration; - -namespace StellaOps.Zastava.Observer.ContainerRuntime.Cri; - -internal interface ICriRuntimeClientFactory -{ - ICriRuntimeClient Create(ContainerRuntimeEndpointOptions endpoint); -} - -internal sealed class CriRuntimeClientFactory : ICriRuntimeClientFactory -{ - private readonly IServiceProvider serviceProvider; - - public CriRuntimeClientFactory(IServiceProvider serviceProvider) - { - this.serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); - } - - public ICriRuntimeClient Create(ContainerRuntimeEndpointOptions endpoint) - { - var logger = serviceProvider.GetRequiredService<ILogger<CriRuntimeClient>>(); - return new CriRuntimeClient(endpoint, logger); - } -} +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using StellaOps.Zastava.Observer.Configuration; + +namespace StellaOps.Zastava.Observer.ContainerRuntime.Cri; + +internal interface ICriRuntimeClientFactory +{ + ICriRuntimeClient Create(ContainerRuntimeEndpointOptions endpoint); +} + +internal sealed class CriRuntimeClientFactory : ICriRuntimeClientFactory +{ + private readonly IServiceProvider serviceProvider; + + public CriRuntimeClientFactory(IServiceProvider serviceProvider) + { + this.serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); + } + + public ICriRuntimeClient Create(ContainerRuntimeEndpointOptions endpoint) + { + var logger = serviceProvider.GetRequiredService<ILogger<CriRuntimeClient>>(); + return new CriRuntimeClient(endpoint, logger); + } +} diff --git a/src/StellaOps.Zastava.Observer/DependencyInjection/ObserverServiceCollectionExtensions.cs b/src/Zastava/StellaOps.Zastava.Observer/DependencyInjection/ObserverServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/DependencyInjection/ObserverServiceCollectionExtensions.cs rename to src/Zastava/StellaOps.Zastava.Observer/DependencyInjection/ObserverServiceCollectionExtensions.cs diff --git a/src/StellaOps.Zastava.Observer/Posture/IRuntimePostureCache.cs b/src/Zastava/StellaOps.Zastava.Observer/Posture/IRuntimePostureCache.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Posture/IRuntimePostureCache.cs rename to src/Zastava/StellaOps.Zastava.Observer/Posture/IRuntimePostureCache.cs diff --git a/src/StellaOps.Zastava.Observer/Posture/IRuntimePostureEvaluator.cs b/src/Zastava/StellaOps.Zastava.Observer/Posture/IRuntimePostureEvaluator.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Posture/IRuntimePostureEvaluator.cs rename to src/Zastava/StellaOps.Zastava.Observer/Posture/IRuntimePostureEvaluator.cs diff --git a/src/StellaOps.Zastava.Observer/Posture/RuntimePostureCache.cs b/src/Zastava/StellaOps.Zastava.Observer/Posture/RuntimePostureCache.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Posture/RuntimePostureCache.cs rename to src/Zastava/StellaOps.Zastava.Observer/Posture/RuntimePostureCache.cs diff --git a/src/StellaOps.Zastava.Observer/Posture/RuntimePostureCacheEntry.cs b/src/Zastava/StellaOps.Zastava.Observer/Posture/RuntimePostureCacheEntry.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Posture/RuntimePostureCacheEntry.cs rename to src/Zastava/StellaOps.Zastava.Observer/Posture/RuntimePostureCacheEntry.cs diff --git a/src/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluationResult.cs b/src/Zastava/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluationResult.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluationResult.cs rename to src/Zastava/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluationResult.cs diff --git a/src/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluator.cs b/src/Zastava/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluator.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluator.cs rename to src/Zastava/StellaOps.Zastava.Observer/Posture/RuntimePostureEvaluator.cs diff --git a/src/StellaOps.Zastava.Observer/Program.cs b/src/Zastava/StellaOps.Zastava.Observer/Program.cs similarity index 98% rename from src/StellaOps.Zastava.Observer/Program.cs rename to src/Zastava/StellaOps.Zastava.Observer/Program.cs index 13193771..f576c848 100644 --- a/src/StellaOps.Zastava.Observer/Program.cs +++ b/src/Zastava/StellaOps.Zastava.Observer/Program.cs @@ -1,7 +1,7 @@ -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Hosting; -using StellaOps.Zastava.Observer.Worker; - +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using StellaOps.Zastava.Observer.Worker; + var builder = Host.CreateApplicationBuilder(args); builder.Services.AddZastavaObserver(builder.Configuration); diff --git a/src/StellaOps.Zastava.Observer/Properties/AssemblyInfo.cs b/src/Zastava/StellaOps.Zastava.Observer/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Properties/AssemblyInfo.cs rename to src/Zastava/StellaOps.Zastava.Observer/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Zastava.Observer/Protos/runtime/v1/runtime.proto b/src/Zastava/StellaOps.Zastava.Observer/Protos/runtime/v1/runtime.proto similarity index 97% rename from src/StellaOps.Zastava.Observer/Protos/runtime/v1/runtime.proto rename to src/Zastava/StellaOps.Zastava.Observer/Protos/runtime/v1/runtime.proto index 93061146..37db8c43 100644 --- a/src/StellaOps.Zastava.Observer/Protos/runtime/v1/runtime.proto +++ b/src/Zastava/StellaOps.Zastava.Observer/Protos/runtime/v1/runtime.proto @@ -1,1855 +1,1855 @@ -/* -Copyright 2020 The Kubernetes Authors. - -Licensed under the Apache License, Version 2.0 (the "License"); -you may not use this file except in compliance with the License. -You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - -Unless required by applicable law or agreed to in writing, software -distributed under the License is distributed on an "AS IS" BASIS, -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -See the License for the specific language governing permissions and -limitations under the License. -*/ - -// To regenerate api.pb.go run `hack/update-codegen.sh protobindings` -syntax = "proto3"; - -package runtime.v1; -option go_package = "k8s.io/cri-api/pkg/apis/runtime/v1"; -option csharp_namespace = "StellaOps.Zastava.Observer.Cri"; - - - -// Runtime service defines the public APIs for remote container runtimes -service RuntimeService { - // Version returns the runtime name, runtime version, and runtime API version. - rpc Version(VersionRequest) returns (VersionResponse) {} - - // RunPodSandbox creates and starts a pod-level sandbox. Runtimes must ensure - // the sandbox is in the ready state on success. - rpc RunPodSandbox(RunPodSandboxRequest) returns (RunPodSandboxResponse) {} - // StopPodSandbox stops any running process that is part of the sandbox and - // reclaims network resources (e.g., IP addresses) allocated to the sandbox. - // If there are any running containers in the sandbox, they must be forcibly - // terminated. - // This call is idempotent, and must not return an error if all relevant - // resources have already been reclaimed. kubelet will call StopPodSandbox - // at least once before calling RemovePodSandbox. It will also attempt to - // reclaim resources eagerly, as soon as a sandbox is not needed. Hence, - // multiple StopPodSandbox calls are expected. - rpc StopPodSandbox(StopPodSandboxRequest) returns (StopPodSandboxResponse) {} - // RemovePodSandbox removes the sandbox. If there are any running containers - // in the sandbox, they must be forcibly terminated and removed. - // This call is idempotent, and must not return an error if the sandbox has - // already been removed. - rpc RemovePodSandbox(RemovePodSandboxRequest) returns (RemovePodSandboxResponse) {} - // PodSandboxStatus returns the status of the PodSandbox. If the PodSandbox is not - // present, returns an error. - rpc PodSandboxStatus(PodSandboxStatusRequest) returns (PodSandboxStatusResponse) {} - // ListPodSandbox returns a list of PodSandboxes. - rpc ListPodSandbox(ListPodSandboxRequest) returns (ListPodSandboxResponse) {} - - // CreateContainer creates a new container in specified PodSandbox - rpc CreateContainer(CreateContainerRequest) returns (CreateContainerResponse) {} - // StartContainer starts the container. - rpc StartContainer(StartContainerRequest) returns (StartContainerResponse) {} - // StopContainer stops a running container with a grace period (i.e., timeout). - // This call is idempotent, and must not return an error if the container has - // already been stopped. - // The runtime must forcibly kill the container after the grace period is - // reached. - rpc StopContainer(StopContainerRequest) returns (StopContainerResponse) {} - // RemoveContainer removes the container. If the container is running, the - // container must be forcibly removed. - // This call is idempotent, and must not return an error if the container has - // already been removed. - rpc RemoveContainer(RemoveContainerRequest) returns (RemoveContainerResponse) {} - // ListContainers lists all containers by filters. - rpc ListContainers(ListContainersRequest) returns (ListContainersResponse) {} - // ContainerStatus returns status of the container. If the container is not - // present, returns an error. - rpc ContainerStatus(ContainerStatusRequest) returns (ContainerStatusResponse) {} - // UpdateContainerResources updates ContainerConfig of the container synchronously. - // If runtime fails to transactionally update the requested resources, an error is returned. - rpc UpdateContainerResources(UpdateContainerResourcesRequest) returns (UpdateContainerResourcesResponse) {} - // ReopenContainerLog asks runtime to reopen the stdout/stderr log file - // for the container. This is often called after the log file has been - // rotated. If the container is not running, container runtime can choose - // to either create a new log file and return nil, or return an error. - // Once it returns error, new container log file MUST NOT be created. - rpc ReopenContainerLog(ReopenContainerLogRequest) returns (ReopenContainerLogResponse) {} - - // ExecSync runs a command in a container synchronously. - rpc ExecSync(ExecSyncRequest) returns (ExecSyncResponse) {} - // Exec prepares a streaming endpoint to execute a command in the container. - rpc Exec(ExecRequest) returns (ExecResponse) {} - // Attach prepares a streaming endpoint to attach to a running container. - rpc Attach(AttachRequest) returns (AttachResponse) {} - // PortForward prepares a streaming endpoint to forward ports from a PodSandbox. - rpc PortForward(PortForwardRequest) returns (PortForwardResponse) {} - - // ContainerStats returns stats of the container. If the container does not - // exist, the call returns an error. - rpc ContainerStats(ContainerStatsRequest) returns (ContainerStatsResponse) {} - // ListContainerStats returns stats of all running containers. - rpc ListContainerStats(ListContainerStatsRequest) returns (ListContainerStatsResponse) {} - - // PodSandboxStats returns stats of the pod sandbox. If the pod sandbox does not - // exist, the call returns an error. - rpc PodSandboxStats(PodSandboxStatsRequest) returns (PodSandboxStatsResponse) {} - // ListPodSandboxStats returns stats of the pod sandboxes matching a filter. - rpc ListPodSandboxStats(ListPodSandboxStatsRequest) returns (ListPodSandboxStatsResponse) {} - - // UpdateRuntimeConfig updates the runtime configuration based on the given request. - rpc UpdateRuntimeConfig(UpdateRuntimeConfigRequest) returns (UpdateRuntimeConfigResponse) {} - - // Status returns the status of the runtime. - rpc Status(StatusRequest) returns (StatusResponse) {} - - // CheckpointContainer checkpoints a container - rpc CheckpointContainer(CheckpointContainerRequest) returns (CheckpointContainerResponse) {} - - // GetContainerEvents gets container events from the CRI runtime - rpc GetContainerEvents(GetEventsRequest) returns (stream ContainerEventResponse) {} - - // ListMetricDescriptors gets the descriptors for the metrics that will be returned in ListPodSandboxMetrics. - // This list should be static at startup: either the client and server restart together when - // adding or removing metrics descriptors, or they should not change. - // Put differently, if ListPodSandboxMetrics references a name that is not described in the initial - // ListMetricDescriptors call, then the metric will not be broadcasted. - rpc ListMetricDescriptors(ListMetricDescriptorsRequest) returns (ListMetricDescriptorsResponse) {} - - // ListPodSandboxMetrics gets pod sandbox metrics from CRI Runtime - rpc ListPodSandboxMetrics(ListPodSandboxMetricsRequest) returns (ListPodSandboxMetricsResponse) {} - - // RuntimeConfig returns configuration information of the runtime. - // A couple of notes: - // - The RuntimeConfigRequest object is not to be confused with the contents of UpdateRuntimeConfigRequest. - // The former is for having runtime tell Kubelet what to do, the latter vice versa. - // - It is the expectation of the Kubelet that these fields are static for the lifecycle of the Kubelet. - // The Kubelet will not re-request the RuntimeConfiguration after startup, and CRI implementations should - // avoid updating them without a full node reboot. - rpc RuntimeConfig(RuntimeConfigRequest) returns (RuntimeConfigResponse) {} -} - -// ImageService defines the public APIs for managing images. -service ImageService { - // ListImages lists existing images. - rpc ListImages(ListImagesRequest) returns (ListImagesResponse) {} - // ImageStatus returns the status of the image. If the image is not - // present, returns a response with ImageStatusResponse.Image set to - // nil. - rpc ImageStatus(ImageStatusRequest) returns (ImageStatusResponse) {} - // PullImage pulls an image with authentication config. - rpc PullImage(PullImageRequest) returns (PullImageResponse) {} - // RemoveImage removes the image. - // This call is idempotent, and must not return an error if the image has - // already been removed. - rpc RemoveImage(RemoveImageRequest) returns (RemoveImageResponse) {} - // ImageFSInfo returns information of the filesystem that is used to store images. - rpc ImageFsInfo(ImageFsInfoRequest) returns (ImageFsInfoResponse) {} -} - -message VersionRequest { - // Version of the kubelet runtime API. - string version = 1; -} - -message VersionResponse { - // Version of the kubelet runtime API. - string version = 1; - // Name of the container runtime. - string runtime_name = 2; - // Version of the container runtime. The string must be - // semver-compatible. - string runtime_version = 3; - // API version of the container runtime. The string must be - // semver-compatible. - string runtime_api_version = 4; -} - -// DNSConfig specifies the DNS servers and search domains of a sandbox. -message DNSConfig { - // List of DNS servers of the cluster. - repeated string servers = 1; - // List of DNS search domains of the cluster. - repeated string searches = 2; - // List of DNS options. See https://linux.die.net/man/5/resolv.conf - // for all available options. - repeated string options = 3; -} - -enum Protocol { - TCP = 0; - UDP = 1; - SCTP = 2; -} - -// PortMapping specifies the port mapping configurations of a sandbox. -message PortMapping { - // Protocol of the port mapping. - Protocol protocol = 1; - // Port number within the container. Default: 0 (not specified). - int32 container_port = 2; - // Port number on the host. Default: 0 (not specified). - int32 host_port = 3; - // Host IP. - string host_ip = 4; -} - -enum MountPropagation { - // No mount propagation ("rprivate" in Linux terminology). - PROPAGATION_PRIVATE = 0; - // Mounts get propagated from the host to the container ("rslave" in Linux). - PROPAGATION_HOST_TO_CONTAINER = 1; - // Mounts get propagated from the host to the container and from the - // container to the host ("rshared" in Linux). - PROPAGATION_BIDIRECTIONAL = 2; -} - -// Mount specifies a host volume to mount into a container. -message Mount { - // Path of the mount within the container. - string container_path = 1; - // Path of the mount on the host. If the hostPath doesn't exist, then runtimes - // should report error. If the hostpath is a symbolic link, runtimes should - // follow the symlink and mount the real destination to container. - string host_path = 2; - // If set, the mount is read-only. - bool readonly = 3; - // If set, the mount needs SELinux relabeling. - bool selinux_relabel = 4; - // Requested propagation mode. - MountPropagation propagation = 5; - // UidMappings specifies the runtime UID mappings for the mount. - repeated IDMapping uidMappings = 6; - // GidMappings specifies the runtime GID mappings for the mount. - repeated IDMapping gidMappings = 7; -} - -// IDMapping describes host to container ID mappings for a pod sandbox. -message IDMapping { - // HostId is the id on the host. - uint32 host_id = 1; - // ContainerId is the id in the container. - uint32 container_id = 2; - // Length is the size of the range to map. - uint32 length = 3; -} - -// A NamespaceMode describes the intended namespace configuration for each -// of the namespaces (Network, PID, IPC) in NamespaceOption. Runtimes should -// map these modes as appropriate for the technology underlying the runtime. -enum NamespaceMode { - // A POD namespace is common to all containers in a pod. - // For example, a container with a PID namespace of POD expects to view - // all of the processes in all of the containers in the pod. - POD = 0; - // A CONTAINER namespace is restricted to a single container. - // For example, a container with a PID namespace of CONTAINER expects to - // view only the processes in that container. - CONTAINER = 1; - // A NODE namespace is the namespace of the Kubernetes node. - // For example, a container with a PID namespace of NODE expects to view - // all of the processes on the host running the kubelet. - NODE = 2; - // TARGET targets the namespace of another container. When this is specified, - // a target_id must be specified in NamespaceOption and refer to a container - // previously created with NamespaceMode CONTAINER. This containers namespace - // will be made to match that of container target_id. - // For example, a container with a PID namespace of TARGET expects to view - // all of the processes that container target_id can view. - TARGET = 3; -} - -// UserNamespace describes the intended user namespace configuration for a pod sandbox. -message UserNamespace { - // Mode is the NamespaceMode for this UserNamespace. - // Note: NamespaceMode for UserNamespace currently supports only POD and NODE, not CONTAINER OR TARGET. - NamespaceMode mode = 1; - - // Uids specifies the UID mappings for the user namespace. - repeated IDMapping uids = 2; - - // Gids specifies the GID mappings for the user namespace. - repeated IDMapping gids = 3; -} - -// NamespaceOption provides options for Linux namespaces. -message NamespaceOption { - // Network namespace for this container/sandbox. - // Note: There is currently no way to set CONTAINER scoped network in the Kubernetes API. - // Namespaces currently set by the kubelet: POD, NODE - NamespaceMode network = 1; - // PID namespace for this container/sandbox. - // Note: The CRI default is POD, but the v1.PodSpec default is CONTAINER. - // The kubelet's runtime manager will set this to CONTAINER explicitly for v1 pods. - // Namespaces currently set by the kubelet: POD, CONTAINER, NODE, TARGET - NamespaceMode pid = 2; - // IPC namespace for this container/sandbox. - // Note: There is currently no way to set CONTAINER scoped IPC in the Kubernetes API. - // Namespaces currently set by the kubelet: POD, NODE - NamespaceMode ipc = 3; - // Target Container ID for NamespaceMode of TARGET. This container must have been - // previously created in the same pod. It is not possible to specify different targets - // for each namespace. - string target_id = 4; - // UsernsOptions for this pod sandbox. - // The Kubelet picks the user namespace configuration to use for the pod sandbox. The mappings - // are specified as part of the UserNamespace struct. If the struct is nil, then the POD mode - // must be assumed. This is done for backward compatibility with older Kubelet versions that - // do not set a user namespace. - UserNamespace userns_options = 5; -} - -// Int64Value is the wrapper of int64. -message Int64Value { - // The value. - int64 value = 1; -} - -// LinuxSandboxSecurityContext holds linux security configuration that will be -// applied to a sandbox. Note that: -// 1) It does not apply to containers in the pods. -// 2) It may not be applicable to a PodSandbox which does not contain any running -// process. -message LinuxSandboxSecurityContext { - // Configurations for the sandbox's namespaces. - // This will be used only if the PodSandbox uses namespace for isolation. - NamespaceOption namespace_options = 1; - // Optional SELinux context to be applied. - SELinuxOption selinux_options = 2; - // UID to run sandbox processes as, when applicable. - Int64Value run_as_user = 3; - // GID to run sandbox processes as, when applicable. run_as_group should only - // be specified when run_as_user is specified; otherwise, the runtime MUST error. - Int64Value run_as_group = 8; - // If set, the root filesystem of the sandbox is read-only. - bool readonly_rootfs = 4; - // List of groups applied to the first process run in the sandbox, in - // addition to the sandbox's primary GID, and group memberships defined - // in the container image for the sandbox's primary UID of the container process. - // If the list is empty, no additional groups are added to any container. - // Note that group memberships defined in the container image for the sandbox's primary UID - // of the container process are still effective, even if they are not included in this list. - repeated int64 supplemental_groups = 5; - // Indicates whether the sandbox will be asked to run a privileged - // container. If a privileged container is to be executed within it, this - // MUST be true. - // This allows a sandbox to take additional security precautions if no - // privileged containers are expected to be run. - bool privileged = 6; - // Seccomp profile for the sandbox. - SecurityProfile seccomp = 9; - // AppArmor profile for the sandbox. - SecurityProfile apparmor = 10; - // Seccomp profile for the sandbox, candidate values are: - // * runtime/default: the default profile for the container runtime - // * unconfined: unconfined profile, ie, no seccomp sandboxing - // * localhost/<full-path-to-profile>: the profile installed on the node. - // <full-path-to-profile> is the full path of the profile. - // Default: "", which is identical with unconfined. - string seccomp_profile_path = 7 [deprecated=true]; -} - -// A security profile which can be used for sandboxes and containers. -message SecurityProfile { - // Available profile types. - enum ProfileType { - // The container runtime default profile should be used. - RuntimeDefault = 0; - // Disable the feature for the sandbox or the container. - Unconfined = 1; - // A pre-defined profile on the node should be used. - Localhost = 2; - } - // Indicator which `ProfileType` should be applied. - ProfileType profile_type = 1; - // Indicates that a pre-defined profile on the node should be used. - // Must only be set if `ProfileType` is `Localhost`. - // For seccomp, it must be an absolute path to the seccomp profile. - // For AppArmor, this field is the AppArmor `<profile name>/` - string localhost_ref = 2; -} - -// LinuxPodSandboxConfig holds platform-specific configurations for Linux -// host platforms and Linux-based containers. -message LinuxPodSandboxConfig { - // Parent cgroup of the PodSandbox. - // The cgroupfs style syntax will be used, but the container runtime can - // convert it to systemd semantics if needed. - string cgroup_parent = 1; - // LinuxSandboxSecurityContext holds sandbox security attributes. - LinuxSandboxSecurityContext security_context = 2; - // Sysctls holds linux sysctls config for the sandbox. - map<string, string> sysctls = 3; - // Optional overhead represents the overheads associated with this sandbox - LinuxContainerResources overhead = 4; - // Optional resources represents the sum of container resources for this sandbox - LinuxContainerResources resources = 5; -} - -// PodSandboxMetadata holds all necessary information for building the sandbox name. -// The container runtime is encouraged to expose the metadata associated with the -// PodSandbox in its user interface for better user experience. For example, -// the runtime can construct a unique PodSandboxName based on the metadata. -message PodSandboxMetadata { - // Pod name of the sandbox. Same as the pod name in the Pod ObjectMeta. - string name = 1; - // Pod UID of the sandbox. Same as the pod UID in the Pod ObjectMeta. - string uid = 2; - // Pod namespace of the sandbox. Same as the pod namespace in the Pod ObjectMeta. - string namespace = 3; - // Attempt number of creating the sandbox. Default: 0. - uint32 attempt = 4; -} - -// PodSandboxConfig holds all the required and optional fields for creating a -// sandbox. -message PodSandboxConfig { - // Metadata of the sandbox. This information will uniquely identify the - // sandbox, and the runtime should leverage this to ensure correct - // operation. The runtime may also use this information to improve UX, such - // as by constructing a readable name. - PodSandboxMetadata metadata = 1; - // Hostname of the sandbox. Hostname could only be empty when the pod - // network namespace is NODE. - string hostname = 2; - // Path to the directory on the host in which container log files are - // stored. - // By default the log of a container going into the LogDirectory will be - // hooked up to STDOUT and STDERR. However, the LogDirectory may contain - // binary log files with structured logging data from the individual - // containers. For example, the files might be newline separated JSON - // structured logs, systemd-journald journal files, gRPC trace files, etc. - // E.g., - // PodSandboxConfig.LogDirectory = `/var/log/pods/<NAMESPACE>_<NAME>_<UID>/` - // ContainerConfig.LogPath = `containerName/Instance#.log` - string log_directory = 3; - // DNS config for the sandbox. - DNSConfig dns_config = 4; - // Port mappings for the sandbox. - repeated PortMapping port_mappings = 5; - // Key-value pairs that may be used to scope and select individual resources. - map<string, string> labels = 6; - // Unstructured key-value map that may be set by the kubelet to store and - // retrieve arbitrary metadata. This will include any annotations set on a - // pod through the Kubernetes API. - // - // Annotations MUST NOT be altered by the runtime; the annotations stored - // here MUST be returned in the PodSandboxStatus associated with the pod - // this PodSandboxConfig creates. - // - // In general, in order to preserve a well-defined interface between the - // kubelet and the container runtime, annotations SHOULD NOT influence - // runtime behaviour. - // - // Annotations can also be useful for runtime authors to experiment with - // new features that are opaque to the Kubernetes APIs (both user-facing - // and the CRI). Whenever possible, however, runtime authors SHOULD - // consider proposing new typed fields for any new features instead. - map<string, string> annotations = 7; - // Optional configurations specific to Linux hosts. - LinuxPodSandboxConfig linux = 8; - // Optional configurations specific to Windows hosts. - WindowsPodSandboxConfig windows = 9; -} - -message RunPodSandboxRequest { - // Configuration for creating a PodSandbox. - PodSandboxConfig config = 1; - // Named runtime configuration to use for this PodSandbox. - // If the runtime handler is unknown, this request should be rejected. An - // empty string should select the default handler, equivalent to the - // behavior before this feature was added. - // See https://git.k8s.io/enhancements/keps/sig-node/585-runtime-class - string runtime_handler = 2; -} - -message RunPodSandboxResponse { - // ID of the PodSandbox to run. - string pod_sandbox_id = 1; -} - -message StopPodSandboxRequest { - // ID of the PodSandbox to stop. - string pod_sandbox_id = 1; -} - -message StopPodSandboxResponse {} - -message RemovePodSandboxRequest { - // ID of the PodSandbox to remove. - string pod_sandbox_id = 1; -} - -message RemovePodSandboxResponse {} - -message PodSandboxStatusRequest { - // ID of the PodSandbox for which to retrieve status. - string pod_sandbox_id = 1; - // Verbose indicates whether to return extra information about the pod sandbox. - bool verbose = 2; -} - -// PodIP represents an ip of a Pod -message PodIP{ - // an ip is a string representation of an IPv4 or an IPv6 - string ip = 1; -} -// PodSandboxNetworkStatus is the status of the network for a PodSandbox. -// Currently ignored for pods sharing the host networking namespace. -message PodSandboxNetworkStatus { - // IP address of the PodSandbox. - string ip = 1; - // list of additional ips (not inclusive of PodSandboxNetworkStatus.Ip) of the PodSandBoxNetworkStatus - repeated PodIP additional_ips = 2; -} - -// Namespace contains paths to the namespaces. -message Namespace { - // Namespace options for Linux namespaces. - NamespaceOption options = 2; -} - -// LinuxSandboxStatus contains status specific to Linux sandboxes. -message LinuxPodSandboxStatus { - // Paths to the sandbox's namespaces. - Namespace namespaces = 1; -} - -enum PodSandboxState { - SANDBOX_READY = 0; - SANDBOX_NOTREADY = 1; -} - -// PodSandboxStatus contains the status of the PodSandbox. -message PodSandboxStatus { - // ID of the sandbox. - string id = 1; - // Metadata of the sandbox. - PodSandboxMetadata metadata = 2; - // State of the sandbox. - PodSandboxState state = 3; - // Creation timestamp of the sandbox in nanoseconds. Must be > 0. - int64 created_at = 4; - // Network contains network status if network is handled by the runtime. - PodSandboxNetworkStatus network = 5; - // Linux-specific status to a pod sandbox. - LinuxPodSandboxStatus linux = 6; - // Labels are key-value pairs that may be used to scope and select individual resources. - map<string, string> labels = 7; - // Unstructured key-value map holding arbitrary metadata. - // Annotations MUST NOT be altered by the runtime; the value of this field - // MUST be identical to that of the corresponding PodSandboxConfig used to - // instantiate the pod sandbox this status represents. - map<string, string> annotations = 8; - // runtime configuration used for this PodSandbox. - string runtime_handler = 9; -} - -message PodSandboxStatusResponse { - // Status of the PodSandbox. - PodSandboxStatus status = 1; - // Info is extra information of the PodSandbox. The key could be arbitrary string, and - // value should be in json format. The information could include anything useful for - // debug, e.g. network namespace for linux container based container runtime. - // It should only be returned non-empty when Verbose is true. - map<string, string> info = 2; - // Container statuses - repeated ContainerStatus containers_statuses = 3; - // Timestamp at which container and pod statuses were recorded - int64 timestamp = 4; -} - -// PodSandboxStateValue is the wrapper of PodSandboxState. -message PodSandboxStateValue { - // State of the sandbox. - PodSandboxState state = 1; -} - -// PodSandboxFilter is used to filter a list of PodSandboxes. -// All those fields are combined with 'AND' -message PodSandboxFilter { - // ID of the sandbox. - string id = 1; - // State of the sandbox. - PodSandboxStateValue state = 2; - // LabelSelector to select matches. - // Only api.MatchLabels is supported for now and the requirements - // are ANDed. MatchExpressions is not supported yet. - map<string, string> label_selector = 3; -} - -message ListPodSandboxRequest { - // PodSandboxFilter to filter a list of PodSandboxes. - PodSandboxFilter filter = 1; -} - - -// PodSandbox contains minimal information about a sandbox. -message PodSandbox { - // ID of the PodSandbox. - string id = 1; - // Metadata of the PodSandbox. - PodSandboxMetadata metadata = 2; - // State of the PodSandbox. - PodSandboxState state = 3; - // Creation timestamps of the PodSandbox in nanoseconds. Must be > 0. - int64 created_at = 4; - // Labels of the PodSandbox. - map<string, string> labels = 5; - // Unstructured key-value map holding arbitrary metadata. - // Annotations MUST NOT be altered by the runtime; the value of this field - // MUST be identical to that of the corresponding PodSandboxConfig used to - // instantiate this PodSandbox. - map<string, string> annotations = 6; - // runtime configuration used for this PodSandbox. - string runtime_handler = 7; -} - -message ListPodSandboxResponse { - // List of PodSandboxes. - repeated PodSandbox items = 1; -} - -message PodSandboxStatsRequest { - // ID of the pod sandbox for which to retrieve stats. - string pod_sandbox_id = 1; -} - -message PodSandboxStatsResponse { - PodSandboxStats stats = 1; -} - -// PodSandboxStatsFilter is used to filter the list of pod sandboxes to retrieve stats for. -// All those fields are combined with 'AND'. -message PodSandboxStatsFilter { - // ID of the pod sandbox. - string id = 1; - // LabelSelector to select matches. - // Only api.MatchLabels is supported for now and the requirements - // are ANDed. MatchExpressions is not supported yet. - map<string, string> label_selector = 2; -} - -message ListPodSandboxStatsRequest { - // Filter for the list request. - PodSandboxStatsFilter filter = 1; -} - -message ListPodSandboxStatsResponse { - // Stats of the pod sandbox. - repeated PodSandboxStats stats = 1; -} - -// PodSandboxAttributes provides basic information of the pod sandbox. -message PodSandboxAttributes { - // ID of the pod sandbox. - string id = 1; - // Metadata of the pod sandbox. - PodSandboxMetadata metadata = 2; - // Key-value pairs that may be used to scope and select individual resources. - map<string,string> labels = 3; - // Unstructured key-value map holding arbitrary metadata. - // Annotations MUST NOT be altered by the runtime; the value of this field - // MUST be identical to that of the corresponding PodSandboxStatus used to - // instantiate the PodSandbox this status represents. - map<string,string> annotations = 4; -} - -// PodSandboxStats provides the resource usage statistics for a pod. -// The linux or windows field will be populated depending on the platform. -message PodSandboxStats { - // Information of the pod. - PodSandboxAttributes attributes = 1; - // Stats from linux. - LinuxPodSandboxStats linux = 2; - // Stats from windows. - WindowsPodSandboxStats windows = 3; -} - -// LinuxPodSandboxStats provides the resource usage statistics for a pod sandbox on linux. -message LinuxPodSandboxStats { - // CPU usage gathered for the pod sandbox. - CpuUsage cpu = 1; - // Memory usage gathered for the pod sandbox. - MemoryUsage memory = 2; - // Network usage gathered for the pod sandbox - NetworkUsage network = 3; - // Stats pertaining to processes in the pod sandbox. - ProcessUsage process = 4; - // Stats of containers in the measured pod sandbox. - repeated ContainerStats containers = 5; -} - -// WindowsPodSandboxStats provides the resource usage statistics for a pod sandbox on windows -message WindowsPodSandboxStats { - // CPU usage gathered for the pod sandbox. - WindowsCpuUsage cpu = 1; - // Memory usage gathered for the pod sandbox. - WindowsMemoryUsage memory = 2; - // Network usage gathered for the pod sandbox - WindowsNetworkUsage network = 3; - // Stats pertaining to processes in the pod sandbox. - WindowsProcessUsage process = 4; - // Stats of containers in the measured pod sandbox. - repeated WindowsContainerStats containers = 5; -} - -// NetworkUsage contains data about network resources. -message NetworkUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // Stats for the default network interface. - NetworkInterfaceUsage default_interface = 2; - // Stats for all found network interfaces, excluding the default. - repeated NetworkInterfaceUsage interfaces = 3; -} - -// WindowsNetworkUsage contains data about network resources specific to Windows. -message WindowsNetworkUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // Stats for the default network interface. - WindowsNetworkInterfaceUsage default_interface = 2; - // Stats for all found network interfaces, excluding the default. - repeated WindowsNetworkInterfaceUsage interfaces = 3; -} - -// NetworkInterfaceUsage contains resource value data about a network interface. -message NetworkInterfaceUsage { - // The name of the network interface. - string name = 1; - // Cumulative count of bytes received. - UInt64Value rx_bytes = 2; - // Cumulative count of receive errors encountered. - UInt64Value rx_errors = 3; - // Cumulative count of bytes transmitted. - UInt64Value tx_bytes = 4; - // Cumulative count of transmit errors encountered. - UInt64Value tx_errors = 5; -} - -// WindowsNetworkInterfaceUsage contains resource value data about a network interface specific for Windows. -message WindowsNetworkInterfaceUsage { - // The name of the network interface. - string name = 1; - // Cumulative count of bytes received. - UInt64Value rx_bytes = 2; - // Cumulative count of receive errors encountered. - UInt64Value rx_packets_dropped = 3; - // Cumulative count of bytes transmitted. - UInt64Value tx_bytes = 4; - // Cumulative count of transmit errors encountered. - UInt64Value tx_packets_dropped = 5; -} - -// ProcessUsage are stats pertaining to processes. -message ProcessUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // Number of processes. - UInt64Value process_count = 2; -} - -// WindowsProcessUsage are stats pertaining to processes specific to Windows. -message WindowsProcessUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // Number of processes. - UInt64Value process_count = 2; -} - -// ImageSpec is an internal representation of an image. -message ImageSpec { - // Container's Image field (e.g. imageID or imageDigest). - string image = 1; - // Unstructured key-value map holding arbitrary metadata. - // ImageSpec Annotations can be used to help the runtime target specific - // images in multi-arch images. - map<string, string> annotations = 2; - // The container image reference specified by the user (e.g. image[:tag] or digest). - // Only set if available within the RPC context. - string user_specified_image = 18; - // Runtime handler to use for pulling the image. - // If the runtime handler is unknown, the request should be rejected. - // An empty string would select the default runtime handler. - string runtime_handler = 19; -} - -message KeyValue { - string key = 1; - string value = 2; -} - -// LinuxContainerResources specifies Linux specific configuration for -// resources. -message LinuxContainerResources { - // CPU CFS (Completely Fair Scheduler) period. Default: 0 (not specified). - int64 cpu_period = 1; - // CPU CFS (Completely Fair Scheduler) quota. Default: 0 (not specified). - int64 cpu_quota = 2; - // CPU shares (relative weight vs. other containers). Default: 0 (not specified). - int64 cpu_shares = 3; - // Memory limit in bytes. Default: 0 (not specified). - int64 memory_limit_in_bytes = 4; - // OOMScoreAdj adjusts the oom-killer score. Default: 0 (not specified). - int64 oom_score_adj = 5; - // CpusetCpus constrains the allowed set of logical CPUs. Default: "" (not specified). - string cpuset_cpus = 6; - // CpusetMems constrains the allowed set of memory nodes. Default: "" (not specified). - string cpuset_mems = 7; - // List of HugepageLimits to limit the HugeTLB usage of container per page size. Default: nil (not specified). - repeated HugepageLimit hugepage_limits = 8; - // Unified resources for cgroup v2. Default: nil (not specified). - // Each key/value in the map refers to the cgroup v2. - // e.g. "memory.max": "6937202688" or "io.weight": "default 100". - map<string, string> unified = 9; - // Memory swap limit in bytes. Default 0 (not specified). - int64 memory_swap_limit_in_bytes = 10; -} - -// HugepageLimit corresponds to the file`hugetlb.<hugepagesize>.limit_in_byte` in container level cgroup. -// For example, `PageSize=1GB`, `Limit=1073741824` means setting `1073741824` bytes to hugetlb.1GB.limit_in_bytes. -message HugepageLimit { - // The value of PageSize has the format <size><unit-prefix>B (2MB, 1GB), - // and must match the <hugepagesize> of the corresponding control file found in `hugetlb.<hugepagesize>.limit_in_bytes`. - // The values of <unit-prefix> are intended to be parsed using base 1024("1KB" = 1024, "1MB" = 1048576, etc). - string page_size = 1; - // limit in bytes of hugepagesize HugeTLB usage. - uint64 limit = 2; -} - -// SELinuxOption are the labels to be applied to the container. -message SELinuxOption { - string user = 1; - string role = 2; - string type = 3; - string level = 4; -} - -// Capability contains the container capabilities to add or drop -// Dropping a capability will drop it from all sets. -// If a capability is added to only the add_capabilities list then it gets added to permitted, -// inheritable, effective and bounding sets, i.e. all sets except the ambient set. -// If a capability is added to only the add_ambient_capabilities list then it gets added to all sets, i.e permitted -// inheritable, effective, bounding and ambient sets. -// If a capability is added to add_capabilities and add_ambient_capabilities lists then it gets added to all sets, i.e. -// permitted, inheritable, effective, bounding and ambient sets. -message Capability { - // List of capabilities to add. - repeated string add_capabilities = 1; - // List of capabilities to drop. - repeated string drop_capabilities = 2; - // List of ambient capabilities to add. - repeated string add_ambient_capabilities = 3; -} - -// LinuxContainerSecurityContext holds linux security configuration that will be applied to a container. -message LinuxContainerSecurityContext { - // Capabilities to add or drop. - Capability capabilities = 1; - // If set, run container in privileged mode. - // Privileged mode is incompatible with the following options. If - // privileged is set, the following features MAY have no effect: - // 1. capabilities - // 2. selinux_options - // 4. seccomp - // 5. apparmor - // - // Privileged mode implies the following specific options are applied: - // 1. All capabilities are added. - // 2. Sensitive paths, such as kernel module paths within sysfs, are not masked. - // 3. Any sysfs and procfs mounts are mounted RW. - // 4. AppArmor confinement is not applied. - // 5. Seccomp restrictions are not applied. - // 6. The device cgroup does not restrict access to any devices. - // 7. All devices from the host's /dev are available within the container. - // 8. SELinux restrictions are not applied (e.g. label=disabled). - bool privileged = 2; - // Configurations for the container's namespaces. - // Only used if the container uses namespace for isolation. - NamespaceOption namespace_options = 3; - // SELinux context to be optionally applied. - SELinuxOption selinux_options = 4; - // UID to run the container process as. Only one of run_as_user and - // run_as_username can be specified at a time. - Int64Value run_as_user = 5; - // GID to run the container process as. run_as_group should only be specified - // when run_as_user or run_as_username is specified; otherwise, the runtime - // MUST error. - Int64Value run_as_group = 12; - // User name to run the container process as. If specified, the user MUST - // exist in the container image (i.e. in the /etc/passwd inside the image), - // and be resolved there by the runtime; otherwise, the runtime MUST error. - string run_as_username = 6; - // If set, the root filesystem of the container is read-only. - bool readonly_rootfs = 7; - // List of groups applied to the first process run in the container, in - // addition to the container's primary GID, and group memberships defined - // in the container image for the container's primary UID of the container process. - // If the list is empty, no additional groups are added to any container. - // Note that group memberships defined in the container image for the container's primary UID - // of the container process are still effective, even if they are not included in this list. - repeated int64 supplemental_groups = 8; - // no_new_privs defines if the flag for no_new_privs should be set on the - // container. - bool no_new_privs = 11; - // masked_paths is a slice of paths that should be masked by the container - // runtime, this can be passed directly to the OCI spec. - repeated string masked_paths = 13; - // readonly_paths is a slice of paths that should be set as readonly by the - // container runtime, this can be passed directly to the OCI spec. - repeated string readonly_paths = 14; - // Seccomp profile for the container. - SecurityProfile seccomp = 15; - // AppArmor profile for the container. - SecurityProfile apparmor = 16; - // AppArmor profile for the container, candidate values are: - // * runtime/default: equivalent to not specifying a profile. - // * unconfined: no profiles are loaded - // * localhost/<profile_name>: profile loaded on the node - // (localhost) by name. The possible profile names are detailed at - // https://gitlab.com/apparmor/apparmor/-/wikis/AppArmor_Core_Policy_Reference - string apparmor_profile = 9 [deprecated=true]; - // Seccomp profile for the container, candidate values are: - // * runtime/default: the default profile for the container runtime - // * unconfined: unconfined profile, ie, no seccomp sandboxing - // * localhost/<full-path-to-profile>: the profile installed on the node. - // <full-path-to-profile> is the full path of the profile. - // Default: "", which is identical with unconfined. - string seccomp_profile_path = 10 [deprecated=true]; -} - -// LinuxContainerConfig contains platform-specific configuration for -// Linux-based containers. -message LinuxContainerConfig { - // Resources specification for the container. - LinuxContainerResources resources = 1; - // LinuxContainerSecurityContext configuration for the container. - LinuxContainerSecurityContext security_context = 2; -} - -// WindowsNamespaceOption provides options for Windows namespaces. -message WindowsNamespaceOption { - // Network namespace for this container/sandbox. - // Namespaces currently set by the kubelet: POD, NODE - NamespaceMode network = 1; -} - -// WindowsSandboxSecurityContext holds platform-specific configurations that will be -// applied to a sandbox. -// These settings will only apply to the sandbox container. -message WindowsSandboxSecurityContext { - // User name to run the container process as. If specified, the user MUST - // exist in the container image and be resolved there by the runtime; - // otherwise, the runtime MUST return error. - string run_as_username = 1; - - // The contents of the GMSA credential spec to use to run this container. - string credential_spec = 2; - - // Indicates whether the container requested to run as a HostProcess container. - bool host_process = 3; - - // Configuration for the sandbox's namespaces - WindowsNamespaceOption namespace_options = 4; -} - -// WindowsPodSandboxConfig holds platform-specific configurations for Windows -// host platforms and Windows-based containers. -message WindowsPodSandboxConfig { - // WindowsSandboxSecurityContext holds sandbox security attributes. - WindowsSandboxSecurityContext security_context = 1; -} - -// WindowsContainerSecurityContext holds windows security configuration that will be applied to a container. -message WindowsContainerSecurityContext { - // User name to run the container process as. If specified, the user MUST - // exist in the container image and be resolved there by the runtime; - // otherwise, the runtime MUST return error. - string run_as_username = 1; - - // The contents of the GMSA credential spec to use to run this container. - string credential_spec = 2; - - // Indicates whether a container is to be run as a HostProcess container. - bool host_process = 3; -} - -// WindowsContainerConfig contains platform-specific configuration for -// Windows-based containers. -message WindowsContainerConfig { - // Resources specification for the container. - WindowsContainerResources resources = 1; - // WindowsContainerSecurityContext configuration for the container. - WindowsContainerSecurityContext security_context = 2; -} - -// WindowsContainerResources specifies Windows specific configuration for -// resources. -message WindowsContainerResources { - // CPU shares (relative weight vs. other containers). Default: 0 (not specified). - int64 cpu_shares = 1; - // Number of CPUs available to the container. Default: 0 (not specified). - int64 cpu_count = 2; - // Specifies the portion of processor cycles that this container can use as a percentage times 100. - int64 cpu_maximum = 3; - // Memory limit in bytes. Default: 0 (not specified). - int64 memory_limit_in_bytes = 4; - // Specifies the size of the rootfs / scratch space in bytes to be configured for this container. Default: 0 (not specified). - int64 rootfs_size_in_bytes = 5; -} - -// ContainerMetadata holds all necessary information for building the container -// name. The container runtime is encouraged to expose the metadata in its user -// interface for better user experience. E.g., runtime can construct a unique -// container name based on the metadata. Note that (name, attempt) is unique -// within a sandbox for the entire lifetime of the sandbox. -message ContainerMetadata { - // Name of the container. Same as the container name in the PodSpec. - string name = 1; - // Attempt number of creating the container. Default: 0. - uint32 attempt = 2; -} - -// Device specifies a host device to mount into a container. -message Device { - // Path of the device within the container. - string container_path = 1; - // Path of the device on the host. - string host_path = 2; - // Cgroups permissions of the device, candidates are one or more of - // * r - allows container to read from the specified device. - // * w - allows container to write to the specified device. - // * m - allows container to create device files that do not yet exist. - string permissions = 3; -} - -// CDIDevice specifies a CDI device information. -message CDIDevice { - // Fully qualified CDI device name - // for example: vendor.com/gpu=gpudevice1 - // see more details in the CDI specification: - // https://github.com/container-orchestrated-devices/container-device-interface/blob/main/SPEC.md - string name = 1; -} - -// ContainerConfig holds all the required and optional fields for creating a -// container. -message ContainerConfig { - // Metadata of the container. This information will uniquely identify the - // container, and the runtime should leverage this to ensure correct - // operation. The runtime may also use this information to improve UX, such - // as by constructing a readable name. - ContainerMetadata metadata = 1 ; - // Image to use. - ImageSpec image = 2; - // Command to execute (i.e., entrypoint for docker) - repeated string command = 3; - // Args for the Command (i.e., command for docker) - repeated string args = 4; - // Current working directory of the command. - string working_dir = 5; - // List of environment variable to set in the container. - repeated KeyValue envs = 6; - // Mounts for the container. - repeated Mount mounts = 7; - // Devices for the container. - repeated Device devices = 8; - // Key-value pairs that may be used to scope and select individual resources. - // Label keys are of the form: - // label-key ::= prefixed-name | name - // prefixed-name ::= prefix '/' name - // prefix ::= DNS_SUBDOMAIN - // name ::= DNS_LABEL - map<string, string> labels = 9; - // Unstructured key-value map that may be used by the kubelet to store and - // retrieve arbitrary metadata. - // - // Annotations MUST NOT be altered by the runtime; the annotations stored - // here MUST be returned in the ContainerStatus associated with the container - // this ContainerConfig creates. - // - // In general, in order to preserve a well-defined interface between the - // kubelet and the container runtime, annotations SHOULD NOT influence - // runtime behaviour. - map<string, string> annotations = 10; - // Path relative to PodSandboxConfig.LogDirectory for container to store - // the log (STDOUT and STDERR) on the host. - // E.g., - // PodSandboxConfig.LogDirectory = `/var/log/pods/<NAMESPACE>_<NAME>_<UID>/` - // ContainerConfig.LogPath = `containerName/Instance#.log` - string log_path = 11; - - // Variables for interactive containers, these have very specialized - // use-cases (e.g. debugging). - bool stdin = 12; - bool stdin_once = 13; - bool tty = 14; - - // Configuration specific to Linux containers. - LinuxContainerConfig linux = 15; - // Configuration specific to Windows containers. - WindowsContainerConfig windows = 16; - - // CDI devices for the container. - repeated CDIDevice CDI_devices = 17; -} - -message CreateContainerRequest { - // ID of the PodSandbox in which the container should be created. - string pod_sandbox_id = 1; - // Config of the container. - ContainerConfig config = 2; - // Config of the PodSandbox. This is the same config that was passed - // to RunPodSandboxRequest to create the PodSandbox. It is passed again - // here just for easy reference. The PodSandboxConfig is immutable and - // remains the same throughout the lifetime of the pod. - PodSandboxConfig sandbox_config = 3; -} - -message CreateContainerResponse { - // ID of the created container. - string container_id = 1; -} - -message StartContainerRequest { - // ID of the container to start. - string container_id = 1; -} - -message StartContainerResponse {} - -message StopContainerRequest { - // ID of the container to stop. - string container_id = 1; - // Timeout in seconds to wait for the container to stop before forcibly - // terminating it. Default: 0 (forcibly terminate the container immediately) - int64 timeout = 2; -} - -message StopContainerResponse {} - -message RemoveContainerRequest { - // ID of the container to remove. - string container_id = 1; -} - -message RemoveContainerResponse {} - -enum ContainerState { - CONTAINER_CREATED = 0; - CONTAINER_RUNNING = 1; - CONTAINER_EXITED = 2; - CONTAINER_UNKNOWN = 3; -} - -// ContainerStateValue is the wrapper of ContainerState. -message ContainerStateValue { - // State of the container. - ContainerState state = 1; -} - -// ContainerFilter is used to filter containers. -// All those fields are combined with 'AND' -message ContainerFilter { - // ID of the container. - string id = 1; - // State of the container. - ContainerStateValue state = 2; - // ID of the PodSandbox. - string pod_sandbox_id = 3; - // LabelSelector to select matches. - // Only api.MatchLabels is supported for now and the requirements - // are ANDed. MatchExpressions is not supported yet. - map<string, string> label_selector = 4; -} - -message ListContainersRequest { - ContainerFilter filter = 1; -} - -// Container provides the runtime information for a container, such as ID, hash, -// state of the container. -message Container { - // ID of the container, used by the container runtime to identify - // a container. - string id = 1; - // ID of the sandbox to which this container belongs. - string pod_sandbox_id = 2; - // Metadata of the container. - ContainerMetadata metadata = 3; - // Spec of the image. - ImageSpec image = 4; - // Reference to the image in use. For most runtimes, this should be an - // image ID. - string image_ref = 5; - // State of the container. - ContainerState state = 6; - // Creation time of the container in nanoseconds. - int64 created_at = 7; - // Key-value pairs that may be used to scope and select individual resources. - map<string, string> labels = 8; - // Unstructured key-value map holding arbitrary metadata. - // Annotations MUST NOT be altered by the runtime; the value of this field - // MUST be identical to that of the corresponding ContainerConfig used to - // instantiate this Container. - map<string, string> annotations = 9; -} - -message ListContainersResponse { - // List of containers. - repeated Container containers = 1; -} - -message ContainerStatusRequest { - // ID of the container for which to retrieve status. - string container_id = 1; - // Verbose indicates whether to return extra information about the container. - bool verbose = 2; -} - -// ContainerStatus represents the status of a container. -message ContainerStatus { - // ID of the container. - string id = 1; - // Metadata of the container. - ContainerMetadata metadata = 2; - // Status of the container. - ContainerState state = 3; - // Creation time of the container in nanoseconds. - int64 created_at = 4; - // Start time of the container in nanoseconds. Default: 0 (not specified). - int64 started_at = 5; - // Finish time of the container in nanoseconds. Default: 0 (not specified). - int64 finished_at = 6; - // Exit code of the container. Only required when finished_at != 0. Default: 0. - int32 exit_code = 7; - // Spec of the image. - ImageSpec image = 8; - // Reference to the image in use. For most runtimes, this should be an - // image ID - string image_ref = 9; - // Brief CamelCase string explaining why container is in its current state. - // Must be set to "OOMKilled" for containers terminated by cgroup-based Out-of-Memory killer. - string reason = 10; - // Human-readable message indicating details about why container is in its - // current state. - string message = 11; - // Key-value pairs that may be used to scope and select individual resources. - map<string,string> labels = 12; - // Unstructured key-value map holding arbitrary metadata. - // Annotations MUST NOT be altered by the runtime; the value of this field - // MUST be identical to that of the corresponding ContainerConfig used to - // instantiate the Container this status represents. - map<string,string> annotations = 13; - // Mounts for the container. - repeated Mount mounts = 14; - // Log path of container. - string log_path = 15; - // Resource limits configuration of the container. - ContainerResources resources = 16; -} - -message ContainerStatusResponse { - // Status of the container. - ContainerStatus status = 1; - // Info is extra information of the Container. The key could be arbitrary string, and - // value should be in json format. The information could include anything useful for - // debug, e.g. pid for linux container based container runtime. - // It should only be returned non-empty when Verbose is true. - map<string, string> info = 2; -} - -// ContainerResources holds resource limits configuration for a container. -message ContainerResources { - // Resource limits configuration specific to Linux container. - LinuxContainerResources linux = 1; - // Resource limits configuration specific to Windows container. - WindowsContainerResources windows = 2; -} - -message UpdateContainerResourcesRequest { - // ID of the container to update. - string container_id = 1; - // Resource configuration specific to Linux containers. - LinuxContainerResources linux = 2; - // Resource configuration specific to Windows containers. - WindowsContainerResources windows = 3; - // Unstructured key-value map holding arbitrary additional information for - // container resources updating. This can be used for specifying experimental - // resources to update or other options to use when updating the container. - map<string, string> annotations = 4; -} - -message UpdateContainerResourcesResponse {} - -message ExecSyncRequest { - // ID of the container. - string container_id = 1; - // Command to execute. - repeated string cmd = 2; - // Timeout in seconds to stop the command. Default: 0 (run forever). - int64 timeout = 3; -} - -message ExecSyncResponse { - // Captured command stdout output. - // The runtime should cap the output of this response to 16MB. - // If the stdout of the command produces more than 16MB, the remaining output - // should be discarded, and the command should proceed with no error. - // See CVE-2022-1708 and CVE-2022-31030 for more information. - bytes stdout = 1; - // Captured command stderr output. - // The runtime should cap the output of this response to 16MB. - // If the stderr of the command produces more than 16MB, the remaining output - // should be discarded, and the command should proceed with no error. - // See CVE-2022-1708 and CVE-2022-31030 for more information. - bytes stderr = 2; - // Exit code the command finished with. Default: 0 (success). - int32 exit_code = 3; -} - -message ExecRequest { - // ID of the container in which to execute the command. - string container_id = 1; - // Command to execute. - repeated string cmd = 2; - // Whether to exec the command in a TTY. - bool tty = 3; - // Whether to stream stdin. - // One of `stdin`, `stdout`, and `stderr` MUST be true. - bool stdin = 4; - // Whether to stream stdout. - // One of `stdin`, `stdout`, and `stderr` MUST be true. - bool stdout = 5; - // Whether to stream stderr. - // One of `stdin`, `stdout`, and `stderr` MUST be true. - // If `tty` is true, `stderr` MUST be false. Multiplexing is not supported - // in this case. The output of stdout and stderr will be combined to a - // single stream. - bool stderr = 6; -} - -message ExecResponse { - // Fully qualified URL of the exec streaming server. - string url = 1; -} - -message AttachRequest { - // ID of the container to which to attach. - string container_id = 1; - // Whether to stream stdin. - // One of `stdin`, `stdout`, and `stderr` MUST be true. - bool stdin = 2; - // Whether the process being attached is running in a TTY. - // This must match the TTY setting in the ContainerConfig. - bool tty = 3; - // Whether to stream stdout. - // One of `stdin`, `stdout`, and `stderr` MUST be true. - bool stdout = 4; - // Whether to stream stderr. - // One of `stdin`, `stdout`, and `stderr` MUST be true. - // If `tty` is true, `stderr` MUST be false. Multiplexing is not supported - // in this case. The output of stdout and stderr will be combined to a - // single stream. - bool stderr = 5; -} - -message AttachResponse { - // Fully qualified URL of the attach streaming server. - string url = 1; -} - -message PortForwardRequest { - // ID of the container to which to forward the port. - string pod_sandbox_id = 1; - // Port to forward. - repeated int32 port = 2; -} - -message PortForwardResponse { - // Fully qualified URL of the port-forward streaming server. - string url = 1; -} - -message ImageFilter { - // Spec of the image. - ImageSpec image = 1; -} - -message ListImagesRequest { - // Filter to list images. - ImageFilter filter = 1; -} - -// Basic information about a container image. -message Image { - // ID of the image. - string id = 1; - // Other names by which this image is known. - repeated string repo_tags = 2; - // Digests by which this image is known. - repeated string repo_digests = 3; - // Size of the image in bytes. Must be > 0. - uint64 size = 4; - // UID that will run the command(s). This is used as a default if no user is - // specified when creating the container. UID and the following user name - // are mutually exclusive. - Int64Value uid = 5; - // User name that will run the command(s). This is used if UID is not set - // and no user is specified when creating container. - string username = 6; - // ImageSpec for image which includes annotations - ImageSpec spec = 7; - // Recommendation on whether this image should be exempt from garbage collection. - // It must only be treated as a recommendation -- the client can still request that the image be deleted, - // and the runtime must oblige. - bool pinned = 8; -} - -message ListImagesResponse { - // List of images. - repeated Image images = 1; -} - -message ImageStatusRequest { - // Spec of the image. - ImageSpec image = 1; - // Verbose indicates whether to return extra information about the image. - bool verbose = 2; -} - -message ImageStatusResponse { - // Status of the image. - Image image = 1; - // Info is extra information of the Image. The key could be arbitrary string, and - // value should be in json format. The information could include anything useful - // for debug, e.g. image config for oci image based container runtime. - // It should only be returned non-empty when Verbose is true. - map<string, string> info = 2; -} - -// AuthConfig contains authorization information for connecting to a registry. -message AuthConfig { - string username = 1; - string password = 2; - string auth = 3; - string server_address = 4; - // IdentityToken is used to authenticate the user and get - // an access token for the registry. - string identity_token = 5; - // RegistryToken is a bearer token to be sent to a registry - string registry_token = 6; -} - -message PullImageRequest { - // Spec of the image. - ImageSpec image = 1; - // Authentication configuration for pulling the image. - AuthConfig auth = 2; - // Config of the PodSandbox, which is used to pull image in PodSandbox context. - PodSandboxConfig sandbox_config = 3; -} - -message PullImageResponse { - // Reference to the image in use. For most runtimes, this should be an - // image ID or digest. - string image_ref = 1; -} - -message RemoveImageRequest { - // Spec of the image to remove. - ImageSpec image = 1; -} - -message RemoveImageResponse {} - -message NetworkConfig { - // CIDR to use for pod IP addresses. If the CIDR is empty, runtimes - // should omit it. - string pod_cidr = 1; -} - -message RuntimeConfig { - NetworkConfig network_config = 1; -} - -message UpdateRuntimeConfigRequest { - RuntimeConfig runtime_config = 1; -} - -message UpdateRuntimeConfigResponse {} - -// RuntimeCondition contains condition information for the runtime. -// There are 2 kinds of runtime conditions: -// 1. Required conditions: Conditions are required for kubelet to work -// properly. If any required condition is unmet, the node will be not ready. -// The required conditions include: -// * RuntimeReady: RuntimeReady means the runtime is up and ready to accept -// basic containers e.g. container only needs host network. -// * NetworkReady: NetworkReady means the runtime network is up and ready to -// accept containers which require container network. -// 2. Optional conditions: Conditions are informative to the user, but kubelet -// will not rely on. Since condition type is an arbitrary string, all conditions -// not required are optional. These conditions will be exposed to users to help -// them understand the status of the system. -message RuntimeCondition { - // Type of runtime condition. - string type = 1; - // Status of the condition, one of true/false. Default: false. - bool status = 2; - // Brief CamelCase string containing reason for the condition's last transition. - string reason = 3; - // Human-readable message indicating details about last transition. - string message = 4; -} - -// RuntimeStatus is information about the current status of the runtime. -message RuntimeStatus { - // List of current observed runtime conditions. - repeated RuntimeCondition conditions = 1; -} - -message StatusRequest { - // Verbose indicates whether to return extra information about the runtime. - bool verbose = 1; -} - -message StatusResponse { - // Status of the Runtime. - RuntimeStatus status = 1; - // Info is extra information of the Runtime. The key could be arbitrary string, and - // value should be in json format. The information could include anything useful for - // debug, e.g. plugins used by the container runtime. - // It should only be returned non-empty when Verbose is true. - map<string, string> info = 2; -} - -message ImageFsInfoRequest {} - -// UInt64Value is the wrapper of uint64. -message UInt64Value { - // The value. - uint64 value = 1; -} - -// FilesystemIdentifier uniquely identify the filesystem. -message FilesystemIdentifier{ - // Mountpoint of a filesystem. - string mountpoint = 1; -} - -// FilesystemUsage provides the filesystem usage information. -message FilesystemUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // The unique identifier of the filesystem. - FilesystemIdentifier fs_id = 2; - // UsedBytes represents the bytes used for images on the filesystem. - // This may differ from the total bytes used on the filesystem and may not - // equal CapacityBytes - AvailableBytes. - UInt64Value used_bytes = 3; - // InodesUsed represents the inodes used by the images. - // This may not equal InodesCapacity - InodesAvailable because the underlying - // filesystem may also be used for purposes other than storing images. - UInt64Value inodes_used = 4; -} - -// WindowsFilesystemUsage provides the filesystem usage information specific to Windows. -message WindowsFilesystemUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // The unique identifier of the filesystem. - FilesystemIdentifier fs_id = 2; - // UsedBytes represents the bytes used for images on the filesystem. - // This may differ from the total bytes used on the filesystem and may not - // equal CapacityBytes - AvailableBytes. - UInt64Value used_bytes = 3; -} - -message ImageFsInfoResponse { - // Information of image filesystem(s). - repeated FilesystemUsage image_filesystems = 1; - // Information of container filesystem(s). - // This is an optional field, may be used for example if container and image - // storage are separated. - // Default will be to return this as empty. - repeated FilesystemUsage container_filesystems = 2; -} - -message ContainerStatsRequest{ - // ID of the container for which to retrieve stats. - string container_id = 1; -} - -message ContainerStatsResponse { - // Stats of the container. - ContainerStats stats = 1; -} - -message ListContainerStatsRequest{ - // Filter for the list request. - ContainerStatsFilter filter = 1; -} - -// ContainerStatsFilter is used to filter containers. -// All those fields are combined with 'AND' -message ContainerStatsFilter { - // ID of the container. - string id = 1; - // ID of the PodSandbox. - string pod_sandbox_id = 2; - // LabelSelector to select matches. - // Only api.MatchLabels is supported for now and the requirements - // are ANDed. MatchExpressions is not supported yet. - map<string, string> label_selector = 3; -} - -message ListContainerStatsResponse { - // Stats of the container. - repeated ContainerStats stats = 1; -} - -// ContainerAttributes provides basic information of the container. -message ContainerAttributes { - // ID of the container. - string id = 1; - // Metadata of the container. - ContainerMetadata metadata = 2; - // Key-value pairs that may be used to scope and select individual resources. - map<string,string> labels = 3; - // Unstructured key-value map holding arbitrary metadata. - // Annotations MUST NOT be altered by the runtime; the value of this field - // MUST be identical to that of the corresponding ContainerConfig used to - // instantiate the Container this status represents. - map<string,string> annotations = 4; -} - -// ContainerStats provides the resource usage statistics for a container. -message ContainerStats { - // Information of the container. - ContainerAttributes attributes = 1; - // CPU usage gathered from the container. - CpuUsage cpu = 2; - // Memory usage gathered from the container. - MemoryUsage memory = 3; - // Usage of the writable layer. - FilesystemUsage writable_layer = 4; - // Swap usage gathered from the container. - SwapUsage swap = 5; -} - -// WindowsContainerStats provides the resource usage statistics for a container specific for Windows -message WindowsContainerStats { - // Information of the container. - ContainerAttributes attributes = 1; - // CPU usage gathered from the container. - WindowsCpuUsage cpu = 2; - // Memory usage gathered from the container. - WindowsMemoryUsage memory = 3; - // Usage of the writable layer. - WindowsFilesystemUsage writable_layer = 4; -} - -// CpuUsage provides the CPU usage information. -message CpuUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // Cumulative CPU usage (sum across all cores) since object creation. - UInt64Value usage_core_nano_seconds = 2; - // Total CPU usage (sum of all cores) averaged over the sample window. - // The "core" unit can be interpreted as CPU core-nanoseconds per second. - UInt64Value usage_nano_cores = 3; -} - -// WindowsCpuUsage provides the CPU usage information specific to Windows -message WindowsCpuUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // Cumulative CPU usage (sum across all cores) since object creation. - UInt64Value usage_core_nano_seconds = 2; - // Total CPU usage (sum of all cores) averaged over the sample window. - // The "core" unit can be interpreted as CPU core-nanoseconds per second. - UInt64Value usage_nano_cores = 3; -} - -// MemoryUsage provides the memory usage information. -message MemoryUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // The amount of working set memory in bytes. - UInt64Value working_set_bytes = 2; - // Available memory for use. This is defined as the memory limit - workingSetBytes. - UInt64Value available_bytes = 3; - // Total memory in use. This includes all memory regardless of when it was accessed. - UInt64Value usage_bytes = 4; - // The amount of anonymous and swap cache memory (includes transparent hugepages). - UInt64Value rss_bytes = 5; - // Cumulative number of minor page faults. - UInt64Value page_faults = 6; - // Cumulative number of major page faults. - UInt64Value major_page_faults = 7; -} - -message SwapUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // Available swap for use. This is defined as the swap limit - swapUsageBytes. - UInt64Value swap_available_bytes = 2; - // Total memory in use. This includes all memory regardless of when it was accessed. - UInt64Value swap_usage_bytes = 3; -} - -// WindowsMemoryUsage provides the memory usage information specific to Windows -message WindowsMemoryUsage { - // Timestamp in nanoseconds at which the information were collected. Must be > 0. - int64 timestamp = 1; - // The amount of working set memory in bytes. - UInt64Value working_set_bytes = 2; - // Available memory for use. This is defined as the memory limit - commit_memory_bytes. - UInt64Value available_bytes = 3; - // Cumulative number of page faults. - UInt64Value page_faults = 4; - // Total commit memory in use. Commit memory is total of physical and virtual memory in use. - UInt64Value commit_memory_bytes = 5; -} - -message ReopenContainerLogRequest { - // ID of the container for which to reopen the log. - string container_id = 1; -} - -message ReopenContainerLogResponse{ -} - -message CheckpointContainerRequest { - // ID of the container to be checkpointed. - string container_id = 1; - // Location of the checkpoint archive used for export - string location = 2; - // Timeout in seconds for the checkpoint to complete. - // Timeout of zero means to use the CRI default. - // Timeout > 0 means to use the user specified timeout. - int64 timeout = 3; -} - -message CheckpointContainerResponse {} - -message GetEventsRequest {} - -message ContainerEventResponse { - // ID of the container - string container_id = 1; - - // Type of the container event - ContainerEventType container_event_type = 2; - - // Creation timestamp of this event - int64 created_at = 3; - - // Sandbox status - PodSandboxStatus pod_sandbox_status = 4; - - // Container statuses - repeated ContainerStatus containers_statuses = 5; -} - -enum ContainerEventType { - // Container created - CONTAINER_CREATED_EVENT = 0; - - // Container started - CONTAINER_STARTED_EVENT = 1; - - // Container stopped - CONTAINER_STOPPED_EVENT = 2; - - // Container deleted - CONTAINER_DELETED_EVENT = 3; -} - -message ListMetricDescriptorsRequest {} - -message ListMetricDescriptorsResponse { - repeated MetricDescriptor descriptors = 1; -} - -message MetricDescriptor { - // The name field will be used as a unique identifier of this MetricDescriptor, - // and be used in conjunction with the Metric structure to populate the full Metric. - string name = 1; - string help = 2; - // When a metric uses this metric descriptor, it should only define - // labels that have previously been declared in label_keys. - // It is the responsibility of the runtime to correctly keep sorted the keys and values. - // If the two slices have different length, the behavior is undefined. - repeated string label_keys = 3; -} - -message ListPodSandboxMetricsRequest {} - -message ListPodSandboxMetricsResponse { - repeated PodSandboxMetrics pod_metrics = 1; -} - -message PodSandboxMetrics { - string pod_sandbox_id = 1; - repeated Metric metrics = 2; - repeated ContainerMetrics container_metrics = 3; -} - -message ContainerMetrics { - string container_id = 1; - repeated Metric metrics = 2; -} - -message Metric { - // Name must match a name previously returned in a MetricDescriptors call, - // otherwise, it will be ignored. - string name = 1; - // Timestamp should be 0 if the metric was gathered live. - // If it was cached, the Timestamp should reflect the time it was collected. - int64 timestamp = 2; - MetricType metric_type = 3; - // The corresponding LabelValues to the LabelKeys defined in the MetricDescriptor. - // It is the responsibility of the runtime to correctly keep sorted the keys and values. - // If the two slices have different length, the behavior is undefined. - repeated string label_values = 4; - UInt64Value value = 5; -} - -enum MetricType { - COUNTER = 0; - GAUGE = 1; -} - -message RuntimeConfigRequest {} - -message RuntimeConfigResponse { - // Configuration information for Linux-based runtimes. This field contains - // global runtime configuration options that are not specific to runtime - // handlers. - LinuxRuntimeConfiguration linux = 1; -} - -message LinuxRuntimeConfiguration { - // Cgroup driver to use - // Note: this field should not change for the lifecycle of the Kubelet, - // or while there are running containers. - // The Kubelet will not re-request this after startup, and will construct the cgroup - // hierarchy assuming it is static. - // If the runtime wishes to change this value, it must be accompanied by removal of - // all pods, and a restart of the Kubelet. The easiest way to do this is with a full node reboot. - CgroupDriver cgroup_driver = 1; -} - -enum CgroupDriver { - SYSTEMD = 0; - CGROUPFS = 1; -} +/* +Copyright 2020 The Kubernetes Authors. + +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. +*/ + +// To regenerate api.pb.go run `hack/update-codegen.sh protobindings` +syntax = "proto3"; + +package runtime.v1; +option go_package = "k8s.io/cri-api/pkg/apis/runtime/v1"; +option csharp_namespace = "StellaOps.Zastava.Observer.Cri"; + + + +// Runtime service defines the public APIs for remote container runtimes +service RuntimeService { + // Version returns the runtime name, runtime version, and runtime API version. + rpc Version(VersionRequest) returns (VersionResponse) {} + + // RunPodSandbox creates and starts a pod-level sandbox. Runtimes must ensure + // the sandbox is in the ready state on success. + rpc RunPodSandbox(RunPodSandboxRequest) returns (RunPodSandboxResponse) {} + // StopPodSandbox stops any running process that is part of the sandbox and + // reclaims network resources (e.g., IP addresses) allocated to the sandbox. + // If there are any running containers in the sandbox, they must be forcibly + // terminated. + // This call is idempotent, and must not return an error if all relevant + // resources have already been reclaimed. kubelet will call StopPodSandbox + // at least once before calling RemovePodSandbox. It will also attempt to + // reclaim resources eagerly, as soon as a sandbox is not needed. Hence, + // multiple StopPodSandbox calls are expected. + rpc StopPodSandbox(StopPodSandboxRequest) returns (StopPodSandboxResponse) {} + // RemovePodSandbox removes the sandbox. If there are any running containers + // in the sandbox, they must be forcibly terminated and removed. + // This call is idempotent, and must not return an error if the sandbox has + // already been removed. + rpc RemovePodSandbox(RemovePodSandboxRequest) returns (RemovePodSandboxResponse) {} + // PodSandboxStatus returns the status of the PodSandbox. If the PodSandbox is not + // present, returns an error. + rpc PodSandboxStatus(PodSandboxStatusRequest) returns (PodSandboxStatusResponse) {} + // ListPodSandbox returns a list of PodSandboxes. + rpc ListPodSandbox(ListPodSandboxRequest) returns (ListPodSandboxResponse) {} + + // CreateContainer creates a new container in specified PodSandbox + rpc CreateContainer(CreateContainerRequest) returns (CreateContainerResponse) {} + // StartContainer starts the container. + rpc StartContainer(StartContainerRequest) returns (StartContainerResponse) {} + // StopContainer stops a running container with a grace period (i.e., timeout). + // This call is idempotent, and must not return an error if the container has + // already been stopped. + // The runtime must forcibly kill the container after the grace period is + // reached. + rpc StopContainer(StopContainerRequest) returns (StopContainerResponse) {} + // RemoveContainer removes the container. If the container is running, the + // container must be forcibly removed. + // This call is idempotent, and must not return an error if the container has + // already been removed. + rpc RemoveContainer(RemoveContainerRequest) returns (RemoveContainerResponse) {} + // ListContainers lists all containers by filters. + rpc ListContainers(ListContainersRequest) returns (ListContainersResponse) {} + // ContainerStatus returns status of the container. If the container is not + // present, returns an error. + rpc ContainerStatus(ContainerStatusRequest) returns (ContainerStatusResponse) {} + // UpdateContainerResources updates ContainerConfig of the container synchronously. + // If runtime fails to transactionally update the requested resources, an error is returned. + rpc UpdateContainerResources(UpdateContainerResourcesRequest) returns (UpdateContainerResourcesResponse) {} + // ReopenContainerLog asks runtime to reopen the stdout/stderr log file + // for the container. This is often called after the log file has been + // rotated. If the container is not running, container runtime can choose + // to either create a new log file and return nil, or return an error. + // Once it returns error, new container log file MUST NOT be created. + rpc ReopenContainerLog(ReopenContainerLogRequest) returns (ReopenContainerLogResponse) {} + + // ExecSync runs a command in a container synchronously. + rpc ExecSync(ExecSyncRequest) returns (ExecSyncResponse) {} + // Exec prepares a streaming endpoint to execute a command in the container. + rpc Exec(ExecRequest) returns (ExecResponse) {} + // Attach prepares a streaming endpoint to attach to a running container. + rpc Attach(AttachRequest) returns (AttachResponse) {} + // PortForward prepares a streaming endpoint to forward ports from a PodSandbox. + rpc PortForward(PortForwardRequest) returns (PortForwardResponse) {} + + // ContainerStats returns stats of the container. If the container does not + // exist, the call returns an error. + rpc ContainerStats(ContainerStatsRequest) returns (ContainerStatsResponse) {} + // ListContainerStats returns stats of all running containers. + rpc ListContainerStats(ListContainerStatsRequest) returns (ListContainerStatsResponse) {} + + // PodSandboxStats returns stats of the pod sandbox. If the pod sandbox does not + // exist, the call returns an error. + rpc PodSandboxStats(PodSandboxStatsRequest) returns (PodSandboxStatsResponse) {} + // ListPodSandboxStats returns stats of the pod sandboxes matching a filter. + rpc ListPodSandboxStats(ListPodSandboxStatsRequest) returns (ListPodSandboxStatsResponse) {} + + // UpdateRuntimeConfig updates the runtime configuration based on the given request. + rpc UpdateRuntimeConfig(UpdateRuntimeConfigRequest) returns (UpdateRuntimeConfigResponse) {} + + // Status returns the status of the runtime. + rpc Status(StatusRequest) returns (StatusResponse) {} + + // CheckpointContainer checkpoints a container + rpc CheckpointContainer(CheckpointContainerRequest) returns (CheckpointContainerResponse) {} + + // GetContainerEvents gets container events from the CRI runtime + rpc GetContainerEvents(GetEventsRequest) returns (stream ContainerEventResponse) {} + + // ListMetricDescriptors gets the descriptors for the metrics that will be returned in ListPodSandboxMetrics. + // This list should be static at startup: either the client and server restart together when + // adding or removing metrics descriptors, or they should not change. + // Put differently, if ListPodSandboxMetrics references a name that is not described in the initial + // ListMetricDescriptors call, then the metric will not be broadcasted. + rpc ListMetricDescriptors(ListMetricDescriptorsRequest) returns (ListMetricDescriptorsResponse) {} + + // ListPodSandboxMetrics gets pod sandbox metrics from CRI Runtime + rpc ListPodSandboxMetrics(ListPodSandboxMetricsRequest) returns (ListPodSandboxMetricsResponse) {} + + // RuntimeConfig returns configuration information of the runtime. + // A couple of notes: + // - The RuntimeConfigRequest object is not to be confused with the contents of UpdateRuntimeConfigRequest. + // The former is for having runtime tell Kubelet what to do, the latter vice versa. + // - It is the expectation of the Kubelet that these fields are static for the lifecycle of the Kubelet. + // The Kubelet will not re-request the RuntimeConfiguration after startup, and CRI implementations should + // avoid updating them without a full node reboot. + rpc RuntimeConfig(RuntimeConfigRequest) returns (RuntimeConfigResponse) {} +} + +// ImageService defines the public APIs for managing images. +service ImageService { + // ListImages lists existing images. + rpc ListImages(ListImagesRequest) returns (ListImagesResponse) {} + // ImageStatus returns the status of the image. If the image is not + // present, returns a response with ImageStatusResponse.Image set to + // nil. + rpc ImageStatus(ImageStatusRequest) returns (ImageStatusResponse) {} + // PullImage pulls an image with authentication config. + rpc PullImage(PullImageRequest) returns (PullImageResponse) {} + // RemoveImage removes the image. + // This call is idempotent, and must not return an error if the image has + // already been removed. + rpc RemoveImage(RemoveImageRequest) returns (RemoveImageResponse) {} + // ImageFSInfo returns information of the filesystem that is used to store images. + rpc ImageFsInfo(ImageFsInfoRequest) returns (ImageFsInfoResponse) {} +} + +message VersionRequest { + // Version of the kubelet runtime API. + string version = 1; +} + +message VersionResponse { + // Version of the kubelet runtime API. + string version = 1; + // Name of the container runtime. + string runtime_name = 2; + // Version of the container runtime. The string must be + // semver-compatible. + string runtime_version = 3; + // API version of the container runtime. The string must be + // semver-compatible. + string runtime_api_version = 4; +} + +// DNSConfig specifies the DNS servers and search domains of a sandbox. +message DNSConfig { + // List of DNS servers of the cluster. + repeated string servers = 1; + // List of DNS search domains of the cluster. + repeated string searches = 2; + // List of DNS options. See https://linux.die.net/man/5/resolv.conf + // for all available options. + repeated string options = 3; +} + +enum Protocol { + TCP = 0; + UDP = 1; + SCTP = 2; +} + +// PortMapping specifies the port mapping configurations of a sandbox. +message PortMapping { + // Protocol of the port mapping. + Protocol protocol = 1; + // Port number within the container. Default: 0 (not specified). + int32 container_port = 2; + // Port number on the host. Default: 0 (not specified). + int32 host_port = 3; + // Host IP. + string host_ip = 4; +} + +enum MountPropagation { + // No mount propagation ("rprivate" in Linux terminology). + PROPAGATION_PRIVATE = 0; + // Mounts get propagated from the host to the container ("rslave" in Linux). + PROPAGATION_HOST_TO_CONTAINER = 1; + // Mounts get propagated from the host to the container and from the + // container to the host ("rshared" in Linux). + PROPAGATION_BIDIRECTIONAL = 2; +} + +// Mount specifies a host volume to mount into a container. +message Mount { + // Path of the mount within the container. + string container_path = 1; + // Path of the mount on the host. If the hostPath doesn't exist, then runtimes + // should report error. If the hostpath is a symbolic link, runtimes should + // follow the symlink and mount the real destination to container. + string host_path = 2; + // If set, the mount is read-only. + bool readonly = 3; + // If set, the mount needs SELinux relabeling. + bool selinux_relabel = 4; + // Requested propagation mode. + MountPropagation propagation = 5; + // UidMappings specifies the runtime UID mappings for the mount. + repeated IDMapping uidMappings = 6; + // GidMappings specifies the runtime GID mappings for the mount. + repeated IDMapping gidMappings = 7; +} + +// IDMapping describes host to container ID mappings for a pod sandbox. +message IDMapping { + // HostId is the id on the host. + uint32 host_id = 1; + // ContainerId is the id in the container. + uint32 container_id = 2; + // Length is the size of the range to map. + uint32 length = 3; +} + +// A NamespaceMode describes the intended namespace configuration for each +// of the namespaces (Network, PID, IPC) in NamespaceOption. Runtimes should +// map these modes as appropriate for the technology underlying the runtime. +enum NamespaceMode { + // A POD namespace is common to all containers in a pod. + // For example, a container with a PID namespace of POD expects to view + // all of the processes in all of the containers in the pod. + POD = 0; + // A CONTAINER namespace is restricted to a single container. + // For example, a container with a PID namespace of CONTAINER expects to + // view only the processes in that container. + CONTAINER = 1; + // A NODE namespace is the namespace of the Kubernetes node. + // For example, a container with a PID namespace of NODE expects to view + // all of the processes on the host running the kubelet. + NODE = 2; + // TARGET targets the namespace of another container. When this is specified, + // a target_id must be specified in NamespaceOption and refer to a container + // previously created with NamespaceMode CONTAINER. This containers namespace + // will be made to match that of container target_id. + // For example, a container with a PID namespace of TARGET expects to view + // all of the processes that container target_id can view. + TARGET = 3; +} + +// UserNamespace describes the intended user namespace configuration for a pod sandbox. +message UserNamespace { + // Mode is the NamespaceMode for this UserNamespace. + // Note: NamespaceMode for UserNamespace currently supports only POD and NODE, not CONTAINER OR TARGET. + NamespaceMode mode = 1; + + // Uids specifies the UID mappings for the user namespace. + repeated IDMapping uids = 2; + + // Gids specifies the GID mappings for the user namespace. + repeated IDMapping gids = 3; +} + +// NamespaceOption provides options for Linux namespaces. +message NamespaceOption { + // Network namespace for this container/sandbox. + // Note: There is currently no way to set CONTAINER scoped network in the Kubernetes API. + // Namespaces currently set by the kubelet: POD, NODE + NamespaceMode network = 1; + // PID namespace for this container/sandbox. + // Note: The CRI default is POD, but the v1.PodSpec default is CONTAINER. + // The kubelet's runtime manager will set this to CONTAINER explicitly for v1 pods. + // Namespaces currently set by the kubelet: POD, CONTAINER, NODE, TARGET + NamespaceMode pid = 2; + // IPC namespace for this container/sandbox. + // Note: There is currently no way to set CONTAINER scoped IPC in the Kubernetes API. + // Namespaces currently set by the kubelet: POD, NODE + NamespaceMode ipc = 3; + // Target Container ID for NamespaceMode of TARGET. This container must have been + // previously created in the same pod. It is not possible to specify different targets + // for each namespace. + string target_id = 4; + // UsernsOptions for this pod sandbox. + // The Kubelet picks the user namespace configuration to use for the pod sandbox. The mappings + // are specified as part of the UserNamespace struct. If the struct is nil, then the POD mode + // must be assumed. This is done for backward compatibility with older Kubelet versions that + // do not set a user namespace. + UserNamespace userns_options = 5; +} + +// Int64Value is the wrapper of int64. +message Int64Value { + // The value. + int64 value = 1; +} + +// LinuxSandboxSecurityContext holds linux security configuration that will be +// applied to a sandbox. Note that: +// 1) It does not apply to containers in the pods. +// 2) It may not be applicable to a PodSandbox which does not contain any running +// process. +message LinuxSandboxSecurityContext { + // Configurations for the sandbox's namespaces. + // This will be used only if the PodSandbox uses namespace for isolation. + NamespaceOption namespace_options = 1; + // Optional SELinux context to be applied. + SELinuxOption selinux_options = 2; + // UID to run sandbox processes as, when applicable. + Int64Value run_as_user = 3; + // GID to run sandbox processes as, when applicable. run_as_group should only + // be specified when run_as_user is specified; otherwise, the runtime MUST error. + Int64Value run_as_group = 8; + // If set, the root filesystem of the sandbox is read-only. + bool readonly_rootfs = 4; + // List of groups applied to the first process run in the sandbox, in + // addition to the sandbox's primary GID, and group memberships defined + // in the container image for the sandbox's primary UID of the container process. + // If the list is empty, no additional groups are added to any container. + // Note that group memberships defined in the container image for the sandbox's primary UID + // of the container process are still effective, even if they are not included in this list. + repeated int64 supplemental_groups = 5; + // Indicates whether the sandbox will be asked to run a privileged + // container. If a privileged container is to be executed within it, this + // MUST be true. + // This allows a sandbox to take additional security precautions if no + // privileged containers are expected to be run. + bool privileged = 6; + // Seccomp profile for the sandbox. + SecurityProfile seccomp = 9; + // AppArmor profile for the sandbox. + SecurityProfile apparmor = 10; + // Seccomp profile for the sandbox, candidate values are: + // * runtime/default: the default profile for the container runtime + // * unconfined: unconfined profile, ie, no seccomp sandboxing + // * localhost/<full-path-to-profile>: the profile installed on the node. + // <full-path-to-profile> is the full path of the profile. + // Default: "", which is identical with unconfined. + string seccomp_profile_path = 7 [deprecated=true]; +} + +// A security profile which can be used for sandboxes and containers. +message SecurityProfile { + // Available profile types. + enum ProfileType { + // The container runtime default profile should be used. + RuntimeDefault = 0; + // Disable the feature for the sandbox or the container. + Unconfined = 1; + // A pre-defined profile on the node should be used. + Localhost = 2; + } + // Indicator which `ProfileType` should be applied. + ProfileType profile_type = 1; + // Indicates that a pre-defined profile on the node should be used. + // Must only be set if `ProfileType` is `Localhost`. + // For seccomp, it must be an absolute path to the seccomp profile. + // For AppArmor, this field is the AppArmor `<profile name>/` + string localhost_ref = 2; +} + +// LinuxPodSandboxConfig holds platform-specific configurations for Linux +// host platforms and Linux-based containers. +message LinuxPodSandboxConfig { + // Parent cgroup of the PodSandbox. + // The cgroupfs style syntax will be used, but the container runtime can + // convert it to systemd semantics if needed. + string cgroup_parent = 1; + // LinuxSandboxSecurityContext holds sandbox security attributes. + LinuxSandboxSecurityContext security_context = 2; + // Sysctls holds linux sysctls config for the sandbox. + map<string, string> sysctls = 3; + // Optional overhead represents the overheads associated with this sandbox + LinuxContainerResources overhead = 4; + // Optional resources represents the sum of container resources for this sandbox + LinuxContainerResources resources = 5; +} + +// PodSandboxMetadata holds all necessary information for building the sandbox name. +// The container runtime is encouraged to expose the metadata associated with the +// PodSandbox in its user interface for better user experience. For example, +// the runtime can construct a unique PodSandboxName based on the metadata. +message PodSandboxMetadata { + // Pod name of the sandbox. Same as the pod name in the Pod ObjectMeta. + string name = 1; + // Pod UID of the sandbox. Same as the pod UID in the Pod ObjectMeta. + string uid = 2; + // Pod namespace of the sandbox. Same as the pod namespace in the Pod ObjectMeta. + string namespace = 3; + // Attempt number of creating the sandbox. Default: 0. + uint32 attempt = 4; +} + +// PodSandboxConfig holds all the required and optional fields for creating a +// sandbox. +message PodSandboxConfig { + // Metadata of the sandbox. This information will uniquely identify the + // sandbox, and the runtime should leverage this to ensure correct + // operation. The runtime may also use this information to improve UX, such + // as by constructing a readable name. + PodSandboxMetadata metadata = 1; + // Hostname of the sandbox. Hostname could only be empty when the pod + // network namespace is NODE. + string hostname = 2; + // Path to the directory on the host in which container log files are + // stored. + // By default the log of a container going into the LogDirectory will be + // hooked up to STDOUT and STDERR. However, the LogDirectory may contain + // binary log files with structured logging data from the individual + // containers. For example, the files might be newline separated JSON + // structured logs, systemd-journald journal files, gRPC trace files, etc. + // E.g., + // PodSandboxConfig.LogDirectory = `/var/log/pods/<NAMESPACE>_<NAME>_<UID>/` + // ContainerConfig.LogPath = `containerName/Instance#.log` + string log_directory = 3; + // DNS config for the sandbox. + DNSConfig dns_config = 4; + // Port mappings for the sandbox. + repeated PortMapping port_mappings = 5; + // Key-value pairs that may be used to scope and select individual resources. + map<string, string> labels = 6; + // Unstructured key-value map that may be set by the kubelet to store and + // retrieve arbitrary metadata. This will include any annotations set on a + // pod through the Kubernetes API. + // + // Annotations MUST NOT be altered by the runtime; the annotations stored + // here MUST be returned in the PodSandboxStatus associated with the pod + // this PodSandboxConfig creates. + // + // In general, in order to preserve a well-defined interface between the + // kubelet and the container runtime, annotations SHOULD NOT influence + // runtime behaviour. + // + // Annotations can also be useful for runtime authors to experiment with + // new features that are opaque to the Kubernetes APIs (both user-facing + // and the CRI). Whenever possible, however, runtime authors SHOULD + // consider proposing new typed fields for any new features instead. + map<string, string> annotations = 7; + // Optional configurations specific to Linux hosts. + LinuxPodSandboxConfig linux = 8; + // Optional configurations specific to Windows hosts. + WindowsPodSandboxConfig windows = 9; +} + +message RunPodSandboxRequest { + // Configuration for creating a PodSandbox. + PodSandboxConfig config = 1; + // Named runtime configuration to use for this PodSandbox. + // If the runtime handler is unknown, this request should be rejected. An + // empty string should select the default handler, equivalent to the + // behavior before this feature was added. + // See https://git.k8s.io/enhancements/keps/sig-node/585-runtime-class + string runtime_handler = 2; +} + +message RunPodSandboxResponse { + // ID of the PodSandbox to run. + string pod_sandbox_id = 1; +} + +message StopPodSandboxRequest { + // ID of the PodSandbox to stop. + string pod_sandbox_id = 1; +} + +message StopPodSandboxResponse {} + +message RemovePodSandboxRequest { + // ID of the PodSandbox to remove. + string pod_sandbox_id = 1; +} + +message RemovePodSandboxResponse {} + +message PodSandboxStatusRequest { + // ID of the PodSandbox for which to retrieve status. + string pod_sandbox_id = 1; + // Verbose indicates whether to return extra information about the pod sandbox. + bool verbose = 2; +} + +// PodIP represents an ip of a Pod +message PodIP{ + // an ip is a string representation of an IPv4 or an IPv6 + string ip = 1; +} +// PodSandboxNetworkStatus is the status of the network for a PodSandbox. +// Currently ignored for pods sharing the host networking namespace. +message PodSandboxNetworkStatus { + // IP address of the PodSandbox. + string ip = 1; + // list of additional ips (not inclusive of PodSandboxNetworkStatus.Ip) of the PodSandBoxNetworkStatus + repeated PodIP additional_ips = 2; +} + +// Namespace contains paths to the namespaces. +message Namespace { + // Namespace options for Linux namespaces. + NamespaceOption options = 2; +} + +// LinuxSandboxStatus contains status specific to Linux sandboxes. +message LinuxPodSandboxStatus { + // Paths to the sandbox's namespaces. + Namespace namespaces = 1; +} + +enum PodSandboxState { + SANDBOX_READY = 0; + SANDBOX_NOTREADY = 1; +} + +// PodSandboxStatus contains the status of the PodSandbox. +message PodSandboxStatus { + // ID of the sandbox. + string id = 1; + // Metadata of the sandbox. + PodSandboxMetadata metadata = 2; + // State of the sandbox. + PodSandboxState state = 3; + // Creation timestamp of the sandbox in nanoseconds. Must be > 0. + int64 created_at = 4; + // Network contains network status if network is handled by the runtime. + PodSandboxNetworkStatus network = 5; + // Linux-specific status to a pod sandbox. + LinuxPodSandboxStatus linux = 6; + // Labels are key-value pairs that may be used to scope and select individual resources. + map<string, string> labels = 7; + // Unstructured key-value map holding arbitrary metadata. + // Annotations MUST NOT be altered by the runtime; the value of this field + // MUST be identical to that of the corresponding PodSandboxConfig used to + // instantiate the pod sandbox this status represents. + map<string, string> annotations = 8; + // runtime configuration used for this PodSandbox. + string runtime_handler = 9; +} + +message PodSandboxStatusResponse { + // Status of the PodSandbox. + PodSandboxStatus status = 1; + // Info is extra information of the PodSandbox. The key could be arbitrary string, and + // value should be in json format. The information could include anything useful for + // debug, e.g. network namespace for linux container based container runtime. + // It should only be returned non-empty when Verbose is true. + map<string, string> info = 2; + // Container statuses + repeated ContainerStatus containers_statuses = 3; + // Timestamp at which container and pod statuses were recorded + int64 timestamp = 4; +} + +// PodSandboxStateValue is the wrapper of PodSandboxState. +message PodSandboxStateValue { + // State of the sandbox. + PodSandboxState state = 1; +} + +// PodSandboxFilter is used to filter a list of PodSandboxes. +// All those fields are combined with 'AND' +message PodSandboxFilter { + // ID of the sandbox. + string id = 1; + // State of the sandbox. + PodSandboxStateValue state = 2; + // LabelSelector to select matches. + // Only api.MatchLabels is supported for now and the requirements + // are ANDed. MatchExpressions is not supported yet. + map<string, string> label_selector = 3; +} + +message ListPodSandboxRequest { + // PodSandboxFilter to filter a list of PodSandboxes. + PodSandboxFilter filter = 1; +} + + +// PodSandbox contains minimal information about a sandbox. +message PodSandbox { + // ID of the PodSandbox. + string id = 1; + // Metadata of the PodSandbox. + PodSandboxMetadata metadata = 2; + // State of the PodSandbox. + PodSandboxState state = 3; + // Creation timestamps of the PodSandbox in nanoseconds. Must be > 0. + int64 created_at = 4; + // Labels of the PodSandbox. + map<string, string> labels = 5; + // Unstructured key-value map holding arbitrary metadata. + // Annotations MUST NOT be altered by the runtime; the value of this field + // MUST be identical to that of the corresponding PodSandboxConfig used to + // instantiate this PodSandbox. + map<string, string> annotations = 6; + // runtime configuration used for this PodSandbox. + string runtime_handler = 7; +} + +message ListPodSandboxResponse { + // List of PodSandboxes. + repeated PodSandbox items = 1; +} + +message PodSandboxStatsRequest { + // ID of the pod sandbox for which to retrieve stats. + string pod_sandbox_id = 1; +} + +message PodSandboxStatsResponse { + PodSandboxStats stats = 1; +} + +// PodSandboxStatsFilter is used to filter the list of pod sandboxes to retrieve stats for. +// All those fields are combined with 'AND'. +message PodSandboxStatsFilter { + // ID of the pod sandbox. + string id = 1; + // LabelSelector to select matches. + // Only api.MatchLabels is supported for now and the requirements + // are ANDed. MatchExpressions is not supported yet. + map<string, string> label_selector = 2; +} + +message ListPodSandboxStatsRequest { + // Filter for the list request. + PodSandboxStatsFilter filter = 1; +} + +message ListPodSandboxStatsResponse { + // Stats of the pod sandbox. + repeated PodSandboxStats stats = 1; +} + +// PodSandboxAttributes provides basic information of the pod sandbox. +message PodSandboxAttributes { + // ID of the pod sandbox. + string id = 1; + // Metadata of the pod sandbox. + PodSandboxMetadata metadata = 2; + // Key-value pairs that may be used to scope and select individual resources. + map<string,string> labels = 3; + // Unstructured key-value map holding arbitrary metadata. + // Annotations MUST NOT be altered by the runtime; the value of this field + // MUST be identical to that of the corresponding PodSandboxStatus used to + // instantiate the PodSandbox this status represents. + map<string,string> annotations = 4; +} + +// PodSandboxStats provides the resource usage statistics for a pod. +// The linux or windows field will be populated depending on the platform. +message PodSandboxStats { + // Information of the pod. + PodSandboxAttributes attributes = 1; + // Stats from linux. + LinuxPodSandboxStats linux = 2; + // Stats from windows. + WindowsPodSandboxStats windows = 3; +} + +// LinuxPodSandboxStats provides the resource usage statistics for a pod sandbox on linux. +message LinuxPodSandboxStats { + // CPU usage gathered for the pod sandbox. + CpuUsage cpu = 1; + // Memory usage gathered for the pod sandbox. + MemoryUsage memory = 2; + // Network usage gathered for the pod sandbox + NetworkUsage network = 3; + // Stats pertaining to processes in the pod sandbox. + ProcessUsage process = 4; + // Stats of containers in the measured pod sandbox. + repeated ContainerStats containers = 5; +} + +// WindowsPodSandboxStats provides the resource usage statistics for a pod sandbox on windows +message WindowsPodSandboxStats { + // CPU usage gathered for the pod sandbox. + WindowsCpuUsage cpu = 1; + // Memory usage gathered for the pod sandbox. + WindowsMemoryUsage memory = 2; + // Network usage gathered for the pod sandbox + WindowsNetworkUsage network = 3; + // Stats pertaining to processes in the pod sandbox. + WindowsProcessUsage process = 4; + // Stats of containers in the measured pod sandbox. + repeated WindowsContainerStats containers = 5; +} + +// NetworkUsage contains data about network resources. +message NetworkUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // Stats for the default network interface. + NetworkInterfaceUsage default_interface = 2; + // Stats for all found network interfaces, excluding the default. + repeated NetworkInterfaceUsage interfaces = 3; +} + +// WindowsNetworkUsage contains data about network resources specific to Windows. +message WindowsNetworkUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // Stats for the default network interface. + WindowsNetworkInterfaceUsage default_interface = 2; + // Stats for all found network interfaces, excluding the default. + repeated WindowsNetworkInterfaceUsage interfaces = 3; +} + +// NetworkInterfaceUsage contains resource value data about a network interface. +message NetworkInterfaceUsage { + // The name of the network interface. + string name = 1; + // Cumulative count of bytes received. + UInt64Value rx_bytes = 2; + // Cumulative count of receive errors encountered. + UInt64Value rx_errors = 3; + // Cumulative count of bytes transmitted. + UInt64Value tx_bytes = 4; + // Cumulative count of transmit errors encountered. + UInt64Value tx_errors = 5; +} + +// WindowsNetworkInterfaceUsage contains resource value data about a network interface specific for Windows. +message WindowsNetworkInterfaceUsage { + // The name of the network interface. + string name = 1; + // Cumulative count of bytes received. + UInt64Value rx_bytes = 2; + // Cumulative count of receive errors encountered. + UInt64Value rx_packets_dropped = 3; + // Cumulative count of bytes transmitted. + UInt64Value tx_bytes = 4; + // Cumulative count of transmit errors encountered. + UInt64Value tx_packets_dropped = 5; +} + +// ProcessUsage are stats pertaining to processes. +message ProcessUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // Number of processes. + UInt64Value process_count = 2; +} + +// WindowsProcessUsage are stats pertaining to processes specific to Windows. +message WindowsProcessUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // Number of processes. + UInt64Value process_count = 2; +} + +// ImageSpec is an internal representation of an image. +message ImageSpec { + // Container's Image field (e.g. imageID or imageDigest). + string image = 1; + // Unstructured key-value map holding arbitrary metadata. + // ImageSpec Annotations can be used to help the runtime target specific + // images in multi-arch images. + map<string, string> annotations = 2; + // The container image reference specified by the user (e.g. image[:tag] or digest). + // Only set if available within the RPC context. + string user_specified_image = 18; + // Runtime handler to use for pulling the image. + // If the runtime handler is unknown, the request should be rejected. + // An empty string would select the default runtime handler. + string runtime_handler = 19; +} + +message KeyValue { + string key = 1; + string value = 2; +} + +// LinuxContainerResources specifies Linux specific configuration for +// resources. +message LinuxContainerResources { + // CPU CFS (Completely Fair Scheduler) period. Default: 0 (not specified). + int64 cpu_period = 1; + // CPU CFS (Completely Fair Scheduler) quota. Default: 0 (not specified). + int64 cpu_quota = 2; + // CPU shares (relative weight vs. other containers). Default: 0 (not specified). + int64 cpu_shares = 3; + // Memory limit in bytes. Default: 0 (not specified). + int64 memory_limit_in_bytes = 4; + // OOMScoreAdj adjusts the oom-killer score. Default: 0 (not specified). + int64 oom_score_adj = 5; + // CpusetCpus constrains the allowed set of logical CPUs. Default: "" (not specified). + string cpuset_cpus = 6; + // CpusetMems constrains the allowed set of memory nodes. Default: "" (not specified). + string cpuset_mems = 7; + // List of HugepageLimits to limit the HugeTLB usage of container per page size. Default: nil (not specified). + repeated HugepageLimit hugepage_limits = 8; + // Unified resources for cgroup v2. Default: nil (not specified). + // Each key/value in the map refers to the cgroup v2. + // e.g. "memory.max": "6937202688" or "io.weight": "default 100". + map<string, string> unified = 9; + // Memory swap limit in bytes. Default 0 (not specified). + int64 memory_swap_limit_in_bytes = 10; +} + +// HugepageLimit corresponds to the file`hugetlb.<hugepagesize>.limit_in_byte` in container level cgroup. +// For example, `PageSize=1GB`, `Limit=1073741824` means setting `1073741824` bytes to hugetlb.1GB.limit_in_bytes. +message HugepageLimit { + // The value of PageSize has the format <size><unit-prefix>B (2MB, 1GB), + // and must match the <hugepagesize> of the corresponding control file found in `hugetlb.<hugepagesize>.limit_in_bytes`. + // The values of <unit-prefix> are intended to be parsed using base 1024("1KB" = 1024, "1MB" = 1048576, etc). + string page_size = 1; + // limit in bytes of hugepagesize HugeTLB usage. + uint64 limit = 2; +} + +// SELinuxOption are the labels to be applied to the container. +message SELinuxOption { + string user = 1; + string role = 2; + string type = 3; + string level = 4; +} + +// Capability contains the container capabilities to add or drop +// Dropping a capability will drop it from all sets. +// If a capability is added to only the add_capabilities list then it gets added to permitted, +// inheritable, effective and bounding sets, i.e. all sets except the ambient set. +// If a capability is added to only the add_ambient_capabilities list then it gets added to all sets, i.e permitted +// inheritable, effective, bounding and ambient sets. +// If a capability is added to add_capabilities and add_ambient_capabilities lists then it gets added to all sets, i.e. +// permitted, inheritable, effective, bounding and ambient sets. +message Capability { + // List of capabilities to add. + repeated string add_capabilities = 1; + // List of capabilities to drop. + repeated string drop_capabilities = 2; + // List of ambient capabilities to add. + repeated string add_ambient_capabilities = 3; +} + +// LinuxContainerSecurityContext holds linux security configuration that will be applied to a container. +message LinuxContainerSecurityContext { + // Capabilities to add or drop. + Capability capabilities = 1; + // If set, run container in privileged mode. + // Privileged mode is incompatible with the following options. If + // privileged is set, the following features MAY have no effect: + // 1. capabilities + // 2. selinux_options + // 4. seccomp + // 5. apparmor + // + // Privileged mode implies the following specific options are applied: + // 1. All capabilities are added. + // 2. Sensitive paths, such as kernel module paths within sysfs, are not masked. + // 3. Any sysfs and procfs mounts are mounted RW. + // 4. AppArmor confinement is not applied. + // 5. Seccomp restrictions are not applied. + // 6. The device cgroup does not restrict access to any devices. + // 7. All devices from the host's /dev are available within the container. + // 8. SELinux restrictions are not applied (e.g. label=disabled). + bool privileged = 2; + // Configurations for the container's namespaces. + // Only used if the container uses namespace for isolation. + NamespaceOption namespace_options = 3; + // SELinux context to be optionally applied. + SELinuxOption selinux_options = 4; + // UID to run the container process as. Only one of run_as_user and + // run_as_username can be specified at a time. + Int64Value run_as_user = 5; + // GID to run the container process as. run_as_group should only be specified + // when run_as_user or run_as_username is specified; otherwise, the runtime + // MUST error. + Int64Value run_as_group = 12; + // User name to run the container process as. If specified, the user MUST + // exist in the container image (i.e. in the /etc/passwd inside the image), + // and be resolved there by the runtime; otherwise, the runtime MUST error. + string run_as_username = 6; + // If set, the root filesystem of the container is read-only. + bool readonly_rootfs = 7; + // List of groups applied to the first process run in the container, in + // addition to the container's primary GID, and group memberships defined + // in the container image for the container's primary UID of the container process. + // If the list is empty, no additional groups are added to any container. + // Note that group memberships defined in the container image for the container's primary UID + // of the container process are still effective, even if they are not included in this list. + repeated int64 supplemental_groups = 8; + // no_new_privs defines if the flag for no_new_privs should be set on the + // container. + bool no_new_privs = 11; + // masked_paths is a slice of paths that should be masked by the container + // runtime, this can be passed directly to the OCI spec. + repeated string masked_paths = 13; + // readonly_paths is a slice of paths that should be set as readonly by the + // container runtime, this can be passed directly to the OCI spec. + repeated string readonly_paths = 14; + // Seccomp profile for the container. + SecurityProfile seccomp = 15; + // AppArmor profile for the container. + SecurityProfile apparmor = 16; + // AppArmor profile for the container, candidate values are: + // * runtime/default: equivalent to not specifying a profile. + // * unconfined: no profiles are loaded + // * localhost/<profile_name>: profile loaded on the node + // (localhost) by name. The possible profile names are detailed at + // https://gitlab.com/apparmor/apparmor/-/wikis/AppArmor_Core_Policy_Reference + string apparmor_profile = 9 [deprecated=true]; + // Seccomp profile for the container, candidate values are: + // * runtime/default: the default profile for the container runtime + // * unconfined: unconfined profile, ie, no seccomp sandboxing + // * localhost/<full-path-to-profile>: the profile installed on the node. + // <full-path-to-profile> is the full path of the profile. + // Default: "", which is identical with unconfined. + string seccomp_profile_path = 10 [deprecated=true]; +} + +// LinuxContainerConfig contains platform-specific configuration for +// Linux-based containers. +message LinuxContainerConfig { + // Resources specification for the container. + LinuxContainerResources resources = 1; + // LinuxContainerSecurityContext configuration for the container. + LinuxContainerSecurityContext security_context = 2; +} + +// WindowsNamespaceOption provides options for Windows namespaces. +message WindowsNamespaceOption { + // Network namespace for this container/sandbox. + // Namespaces currently set by the kubelet: POD, NODE + NamespaceMode network = 1; +} + +// WindowsSandboxSecurityContext holds platform-specific configurations that will be +// applied to a sandbox. +// These settings will only apply to the sandbox container. +message WindowsSandboxSecurityContext { + // User name to run the container process as. If specified, the user MUST + // exist in the container image and be resolved there by the runtime; + // otherwise, the runtime MUST return error. + string run_as_username = 1; + + // The contents of the GMSA credential spec to use to run this container. + string credential_spec = 2; + + // Indicates whether the container requested to run as a HostProcess container. + bool host_process = 3; + + // Configuration for the sandbox's namespaces + WindowsNamespaceOption namespace_options = 4; +} + +// WindowsPodSandboxConfig holds platform-specific configurations for Windows +// host platforms and Windows-based containers. +message WindowsPodSandboxConfig { + // WindowsSandboxSecurityContext holds sandbox security attributes. + WindowsSandboxSecurityContext security_context = 1; +} + +// WindowsContainerSecurityContext holds windows security configuration that will be applied to a container. +message WindowsContainerSecurityContext { + // User name to run the container process as. If specified, the user MUST + // exist in the container image and be resolved there by the runtime; + // otherwise, the runtime MUST return error. + string run_as_username = 1; + + // The contents of the GMSA credential spec to use to run this container. + string credential_spec = 2; + + // Indicates whether a container is to be run as a HostProcess container. + bool host_process = 3; +} + +// WindowsContainerConfig contains platform-specific configuration for +// Windows-based containers. +message WindowsContainerConfig { + // Resources specification for the container. + WindowsContainerResources resources = 1; + // WindowsContainerSecurityContext configuration for the container. + WindowsContainerSecurityContext security_context = 2; +} + +// WindowsContainerResources specifies Windows specific configuration for +// resources. +message WindowsContainerResources { + // CPU shares (relative weight vs. other containers). Default: 0 (not specified). + int64 cpu_shares = 1; + // Number of CPUs available to the container. Default: 0 (not specified). + int64 cpu_count = 2; + // Specifies the portion of processor cycles that this container can use as a percentage times 100. + int64 cpu_maximum = 3; + // Memory limit in bytes. Default: 0 (not specified). + int64 memory_limit_in_bytes = 4; + // Specifies the size of the rootfs / scratch space in bytes to be configured for this container. Default: 0 (not specified). + int64 rootfs_size_in_bytes = 5; +} + +// ContainerMetadata holds all necessary information for building the container +// name. The container runtime is encouraged to expose the metadata in its user +// interface for better user experience. E.g., runtime can construct a unique +// container name based on the metadata. Note that (name, attempt) is unique +// within a sandbox for the entire lifetime of the sandbox. +message ContainerMetadata { + // Name of the container. Same as the container name in the PodSpec. + string name = 1; + // Attempt number of creating the container. Default: 0. + uint32 attempt = 2; +} + +// Device specifies a host device to mount into a container. +message Device { + // Path of the device within the container. + string container_path = 1; + // Path of the device on the host. + string host_path = 2; + // Cgroups permissions of the device, candidates are one or more of + // * r - allows container to read from the specified device. + // * w - allows container to write to the specified device. + // * m - allows container to create device files that do not yet exist. + string permissions = 3; +} + +// CDIDevice specifies a CDI device information. +message CDIDevice { + // Fully qualified CDI device name + // for example: vendor.com/gpu=gpudevice1 + // see more details in the CDI specification: + // https://github.com/container-orchestrated-devices/container-device-interface/blob/main/SPEC.md + string name = 1; +} + +// ContainerConfig holds all the required and optional fields for creating a +// container. +message ContainerConfig { + // Metadata of the container. This information will uniquely identify the + // container, and the runtime should leverage this to ensure correct + // operation. The runtime may also use this information to improve UX, such + // as by constructing a readable name. + ContainerMetadata metadata = 1 ; + // Image to use. + ImageSpec image = 2; + // Command to execute (i.e., entrypoint for docker) + repeated string command = 3; + // Args for the Command (i.e., command for docker) + repeated string args = 4; + // Current working directory of the command. + string working_dir = 5; + // List of environment variable to set in the container. + repeated KeyValue envs = 6; + // Mounts for the container. + repeated Mount mounts = 7; + // Devices for the container. + repeated Device devices = 8; + // Key-value pairs that may be used to scope and select individual resources. + // Label keys are of the form: + // label-key ::= prefixed-name | name + // prefixed-name ::= prefix '/' name + // prefix ::= DNS_SUBDOMAIN + // name ::= DNS_LABEL + map<string, string> labels = 9; + // Unstructured key-value map that may be used by the kubelet to store and + // retrieve arbitrary metadata. + // + // Annotations MUST NOT be altered by the runtime; the annotations stored + // here MUST be returned in the ContainerStatus associated with the container + // this ContainerConfig creates. + // + // In general, in order to preserve a well-defined interface between the + // kubelet and the container runtime, annotations SHOULD NOT influence + // runtime behaviour. + map<string, string> annotations = 10; + // Path relative to PodSandboxConfig.LogDirectory for container to store + // the log (STDOUT and STDERR) on the host. + // E.g., + // PodSandboxConfig.LogDirectory = `/var/log/pods/<NAMESPACE>_<NAME>_<UID>/` + // ContainerConfig.LogPath = `containerName/Instance#.log` + string log_path = 11; + + // Variables for interactive containers, these have very specialized + // use-cases (e.g. debugging). + bool stdin = 12; + bool stdin_once = 13; + bool tty = 14; + + // Configuration specific to Linux containers. + LinuxContainerConfig linux = 15; + // Configuration specific to Windows containers. + WindowsContainerConfig windows = 16; + + // CDI devices for the container. + repeated CDIDevice CDI_devices = 17; +} + +message CreateContainerRequest { + // ID of the PodSandbox in which the container should be created. + string pod_sandbox_id = 1; + // Config of the container. + ContainerConfig config = 2; + // Config of the PodSandbox. This is the same config that was passed + // to RunPodSandboxRequest to create the PodSandbox. It is passed again + // here just for easy reference. The PodSandboxConfig is immutable and + // remains the same throughout the lifetime of the pod. + PodSandboxConfig sandbox_config = 3; +} + +message CreateContainerResponse { + // ID of the created container. + string container_id = 1; +} + +message StartContainerRequest { + // ID of the container to start. + string container_id = 1; +} + +message StartContainerResponse {} + +message StopContainerRequest { + // ID of the container to stop. + string container_id = 1; + // Timeout in seconds to wait for the container to stop before forcibly + // terminating it. Default: 0 (forcibly terminate the container immediately) + int64 timeout = 2; +} + +message StopContainerResponse {} + +message RemoveContainerRequest { + // ID of the container to remove. + string container_id = 1; +} + +message RemoveContainerResponse {} + +enum ContainerState { + CONTAINER_CREATED = 0; + CONTAINER_RUNNING = 1; + CONTAINER_EXITED = 2; + CONTAINER_UNKNOWN = 3; +} + +// ContainerStateValue is the wrapper of ContainerState. +message ContainerStateValue { + // State of the container. + ContainerState state = 1; +} + +// ContainerFilter is used to filter containers. +// All those fields are combined with 'AND' +message ContainerFilter { + // ID of the container. + string id = 1; + // State of the container. + ContainerStateValue state = 2; + // ID of the PodSandbox. + string pod_sandbox_id = 3; + // LabelSelector to select matches. + // Only api.MatchLabels is supported for now and the requirements + // are ANDed. MatchExpressions is not supported yet. + map<string, string> label_selector = 4; +} + +message ListContainersRequest { + ContainerFilter filter = 1; +} + +// Container provides the runtime information for a container, such as ID, hash, +// state of the container. +message Container { + // ID of the container, used by the container runtime to identify + // a container. + string id = 1; + // ID of the sandbox to which this container belongs. + string pod_sandbox_id = 2; + // Metadata of the container. + ContainerMetadata metadata = 3; + // Spec of the image. + ImageSpec image = 4; + // Reference to the image in use. For most runtimes, this should be an + // image ID. + string image_ref = 5; + // State of the container. + ContainerState state = 6; + // Creation time of the container in nanoseconds. + int64 created_at = 7; + // Key-value pairs that may be used to scope and select individual resources. + map<string, string> labels = 8; + // Unstructured key-value map holding arbitrary metadata. + // Annotations MUST NOT be altered by the runtime; the value of this field + // MUST be identical to that of the corresponding ContainerConfig used to + // instantiate this Container. + map<string, string> annotations = 9; +} + +message ListContainersResponse { + // List of containers. + repeated Container containers = 1; +} + +message ContainerStatusRequest { + // ID of the container for which to retrieve status. + string container_id = 1; + // Verbose indicates whether to return extra information about the container. + bool verbose = 2; +} + +// ContainerStatus represents the status of a container. +message ContainerStatus { + // ID of the container. + string id = 1; + // Metadata of the container. + ContainerMetadata metadata = 2; + // Status of the container. + ContainerState state = 3; + // Creation time of the container in nanoseconds. + int64 created_at = 4; + // Start time of the container in nanoseconds. Default: 0 (not specified). + int64 started_at = 5; + // Finish time of the container in nanoseconds. Default: 0 (not specified). + int64 finished_at = 6; + // Exit code of the container. Only required when finished_at != 0. Default: 0. + int32 exit_code = 7; + // Spec of the image. + ImageSpec image = 8; + // Reference to the image in use. For most runtimes, this should be an + // image ID + string image_ref = 9; + // Brief CamelCase string explaining why container is in its current state. + // Must be set to "OOMKilled" for containers terminated by cgroup-based Out-of-Memory killer. + string reason = 10; + // Human-readable message indicating details about why container is in its + // current state. + string message = 11; + // Key-value pairs that may be used to scope and select individual resources. + map<string,string> labels = 12; + // Unstructured key-value map holding arbitrary metadata. + // Annotations MUST NOT be altered by the runtime; the value of this field + // MUST be identical to that of the corresponding ContainerConfig used to + // instantiate the Container this status represents. + map<string,string> annotations = 13; + // Mounts for the container. + repeated Mount mounts = 14; + // Log path of container. + string log_path = 15; + // Resource limits configuration of the container. + ContainerResources resources = 16; +} + +message ContainerStatusResponse { + // Status of the container. + ContainerStatus status = 1; + // Info is extra information of the Container. The key could be arbitrary string, and + // value should be in json format. The information could include anything useful for + // debug, e.g. pid for linux container based container runtime. + // It should only be returned non-empty when Verbose is true. + map<string, string> info = 2; +} + +// ContainerResources holds resource limits configuration for a container. +message ContainerResources { + // Resource limits configuration specific to Linux container. + LinuxContainerResources linux = 1; + // Resource limits configuration specific to Windows container. + WindowsContainerResources windows = 2; +} + +message UpdateContainerResourcesRequest { + // ID of the container to update. + string container_id = 1; + // Resource configuration specific to Linux containers. + LinuxContainerResources linux = 2; + // Resource configuration specific to Windows containers. + WindowsContainerResources windows = 3; + // Unstructured key-value map holding arbitrary additional information for + // container resources updating. This can be used for specifying experimental + // resources to update or other options to use when updating the container. + map<string, string> annotations = 4; +} + +message UpdateContainerResourcesResponse {} + +message ExecSyncRequest { + // ID of the container. + string container_id = 1; + // Command to execute. + repeated string cmd = 2; + // Timeout in seconds to stop the command. Default: 0 (run forever). + int64 timeout = 3; +} + +message ExecSyncResponse { + // Captured command stdout output. + // The runtime should cap the output of this response to 16MB. + // If the stdout of the command produces more than 16MB, the remaining output + // should be discarded, and the command should proceed with no error. + // See CVE-2022-1708 and CVE-2022-31030 for more information. + bytes stdout = 1; + // Captured command stderr output. + // The runtime should cap the output of this response to 16MB. + // If the stderr of the command produces more than 16MB, the remaining output + // should be discarded, and the command should proceed with no error. + // See CVE-2022-1708 and CVE-2022-31030 for more information. + bytes stderr = 2; + // Exit code the command finished with. Default: 0 (success). + int32 exit_code = 3; +} + +message ExecRequest { + // ID of the container in which to execute the command. + string container_id = 1; + // Command to execute. + repeated string cmd = 2; + // Whether to exec the command in a TTY. + bool tty = 3; + // Whether to stream stdin. + // One of `stdin`, `stdout`, and `stderr` MUST be true. + bool stdin = 4; + // Whether to stream stdout. + // One of `stdin`, `stdout`, and `stderr` MUST be true. + bool stdout = 5; + // Whether to stream stderr. + // One of `stdin`, `stdout`, and `stderr` MUST be true. + // If `tty` is true, `stderr` MUST be false. Multiplexing is not supported + // in this case. The output of stdout and stderr will be combined to a + // single stream. + bool stderr = 6; +} + +message ExecResponse { + // Fully qualified URL of the exec streaming server. + string url = 1; +} + +message AttachRequest { + // ID of the container to which to attach. + string container_id = 1; + // Whether to stream stdin. + // One of `stdin`, `stdout`, and `stderr` MUST be true. + bool stdin = 2; + // Whether the process being attached is running in a TTY. + // This must match the TTY setting in the ContainerConfig. + bool tty = 3; + // Whether to stream stdout. + // One of `stdin`, `stdout`, and `stderr` MUST be true. + bool stdout = 4; + // Whether to stream stderr. + // One of `stdin`, `stdout`, and `stderr` MUST be true. + // If `tty` is true, `stderr` MUST be false. Multiplexing is not supported + // in this case. The output of stdout and stderr will be combined to a + // single stream. + bool stderr = 5; +} + +message AttachResponse { + // Fully qualified URL of the attach streaming server. + string url = 1; +} + +message PortForwardRequest { + // ID of the container to which to forward the port. + string pod_sandbox_id = 1; + // Port to forward. + repeated int32 port = 2; +} + +message PortForwardResponse { + // Fully qualified URL of the port-forward streaming server. + string url = 1; +} + +message ImageFilter { + // Spec of the image. + ImageSpec image = 1; +} + +message ListImagesRequest { + // Filter to list images. + ImageFilter filter = 1; +} + +// Basic information about a container image. +message Image { + // ID of the image. + string id = 1; + // Other names by which this image is known. + repeated string repo_tags = 2; + // Digests by which this image is known. + repeated string repo_digests = 3; + // Size of the image in bytes. Must be > 0. + uint64 size = 4; + // UID that will run the command(s). This is used as a default if no user is + // specified when creating the container. UID and the following user name + // are mutually exclusive. + Int64Value uid = 5; + // User name that will run the command(s). This is used if UID is not set + // and no user is specified when creating container. + string username = 6; + // ImageSpec for image which includes annotations + ImageSpec spec = 7; + // Recommendation on whether this image should be exempt from garbage collection. + // It must only be treated as a recommendation -- the client can still request that the image be deleted, + // and the runtime must oblige. + bool pinned = 8; +} + +message ListImagesResponse { + // List of images. + repeated Image images = 1; +} + +message ImageStatusRequest { + // Spec of the image. + ImageSpec image = 1; + // Verbose indicates whether to return extra information about the image. + bool verbose = 2; +} + +message ImageStatusResponse { + // Status of the image. + Image image = 1; + // Info is extra information of the Image. The key could be arbitrary string, and + // value should be in json format. The information could include anything useful + // for debug, e.g. image config for oci image based container runtime. + // It should only be returned non-empty when Verbose is true. + map<string, string> info = 2; +} + +// AuthConfig contains authorization information for connecting to a registry. +message AuthConfig { + string username = 1; + string password = 2; + string auth = 3; + string server_address = 4; + // IdentityToken is used to authenticate the user and get + // an access token for the registry. + string identity_token = 5; + // RegistryToken is a bearer token to be sent to a registry + string registry_token = 6; +} + +message PullImageRequest { + // Spec of the image. + ImageSpec image = 1; + // Authentication configuration for pulling the image. + AuthConfig auth = 2; + // Config of the PodSandbox, which is used to pull image in PodSandbox context. + PodSandboxConfig sandbox_config = 3; +} + +message PullImageResponse { + // Reference to the image in use. For most runtimes, this should be an + // image ID or digest. + string image_ref = 1; +} + +message RemoveImageRequest { + // Spec of the image to remove. + ImageSpec image = 1; +} + +message RemoveImageResponse {} + +message NetworkConfig { + // CIDR to use for pod IP addresses. If the CIDR is empty, runtimes + // should omit it. + string pod_cidr = 1; +} + +message RuntimeConfig { + NetworkConfig network_config = 1; +} + +message UpdateRuntimeConfigRequest { + RuntimeConfig runtime_config = 1; +} + +message UpdateRuntimeConfigResponse {} + +// RuntimeCondition contains condition information for the runtime. +// There are 2 kinds of runtime conditions: +// 1. Required conditions: Conditions are required for kubelet to work +// properly. If any required condition is unmet, the node will be not ready. +// The required conditions include: +// * RuntimeReady: RuntimeReady means the runtime is up and ready to accept +// basic containers e.g. container only needs host network. +// * NetworkReady: NetworkReady means the runtime network is up and ready to +// accept containers which require container network. +// 2. Optional conditions: Conditions are informative to the user, but kubelet +// will not rely on. Since condition type is an arbitrary string, all conditions +// not required are optional. These conditions will be exposed to users to help +// them understand the status of the system. +message RuntimeCondition { + // Type of runtime condition. + string type = 1; + // Status of the condition, one of true/false. Default: false. + bool status = 2; + // Brief CamelCase string containing reason for the condition's last transition. + string reason = 3; + // Human-readable message indicating details about last transition. + string message = 4; +} + +// RuntimeStatus is information about the current status of the runtime. +message RuntimeStatus { + // List of current observed runtime conditions. + repeated RuntimeCondition conditions = 1; +} + +message StatusRequest { + // Verbose indicates whether to return extra information about the runtime. + bool verbose = 1; +} + +message StatusResponse { + // Status of the Runtime. + RuntimeStatus status = 1; + // Info is extra information of the Runtime. The key could be arbitrary string, and + // value should be in json format. The information could include anything useful for + // debug, e.g. plugins used by the container runtime. + // It should only be returned non-empty when Verbose is true. + map<string, string> info = 2; +} + +message ImageFsInfoRequest {} + +// UInt64Value is the wrapper of uint64. +message UInt64Value { + // The value. + uint64 value = 1; +} + +// FilesystemIdentifier uniquely identify the filesystem. +message FilesystemIdentifier{ + // Mountpoint of a filesystem. + string mountpoint = 1; +} + +// FilesystemUsage provides the filesystem usage information. +message FilesystemUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // The unique identifier of the filesystem. + FilesystemIdentifier fs_id = 2; + // UsedBytes represents the bytes used for images on the filesystem. + // This may differ from the total bytes used on the filesystem and may not + // equal CapacityBytes - AvailableBytes. + UInt64Value used_bytes = 3; + // InodesUsed represents the inodes used by the images. + // This may not equal InodesCapacity - InodesAvailable because the underlying + // filesystem may also be used for purposes other than storing images. + UInt64Value inodes_used = 4; +} + +// WindowsFilesystemUsage provides the filesystem usage information specific to Windows. +message WindowsFilesystemUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // The unique identifier of the filesystem. + FilesystemIdentifier fs_id = 2; + // UsedBytes represents the bytes used for images on the filesystem. + // This may differ from the total bytes used on the filesystem and may not + // equal CapacityBytes - AvailableBytes. + UInt64Value used_bytes = 3; +} + +message ImageFsInfoResponse { + // Information of image filesystem(s). + repeated FilesystemUsage image_filesystems = 1; + // Information of container filesystem(s). + // This is an optional field, may be used for example if container and image + // storage are separated. + // Default will be to return this as empty. + repeated FilesystemUsage container_filesystems = 2; +} + +message ContainerStatsRequest{ + // ID of the container for which to retrieve stats. + string container_id = 1; +} + +message ContainerStatsResponse { + // Stats of the container. + ContainerStats stats = 1; +} + +message ListContainerStatsRequest{ + // Filter for the list request. + ContainerStatsFilter filter = 1; +} + +// ContainerStatsFilter is used to filter containers. +// All those fields are combined with 'AND' +message ContainerStatsFilter { + // ID of the container. + string id = 1; + // ID of the PodSandbox. + string pod_sandbox_id = 2; + // LabelSelector to select matches. + // Only api.MatchLabels is supported for now and the requirements + // are ANDed. MatchExpressions is not supported yet. + map<string, string> label_selector = 3; +} + +message ListContainerStatsResponse { + // Stats of the container. + repeated ContainerStats stats = 1; +} + +// ContainerAttributes provides basic information of the container. +message ContainerAttributes { + // ID of the container. + string id = 1; + // Metadata of the container. + ContainerMetadata metadata = 2; + // Key-value pairs that may be used to scope and select individual resources. + map<string,string> labels = 3; + // Unstructured key-value map holding arbitrary metadata. + // Annotations MUST NOT be altered by the runtime; the value of this field + // MUST be identical to that of the corresponding ContainerConfig used to + // instantiate the Container this status represents. + map<string,string> annotations = 4; +} + +// ContainerStats provides the resource usage statistics for a container. +message ContainerStats { + // Information of the container. + ContainerAttributes attributes = 1; + // CPU usage gathered from the container. + CpuUsage cpu = 2; + // Memory usage gathered from the container. + MemoryUsage memory = 3; + // Usage of the writable layer. + FilesystemUsage writable_layer = 4; + // Swap usage gathered from the container. + SwapUsage swap = 5; +} + +// WindowsContainerStats provides the resource usage statistics for a container specific for Windows +message WindowsContainerStats { + // Information of the container. + ContainerAttributes attributes = 1; + // CPU usage gathered from the container. + WindowsCpuUsage cpu = 2; + // Memory usage gathered from the container. + WindowsMemoryUsage memory = 3; + // Usage of the writable layer. + WindowsFilesystemUsage writable_layer = 4; +} + +// CpuUsage provides the CPU usage information. +message CpuUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // Cumulative CPU usage (sum across all cores) since object creation. + UInt64Value usage_core_nano_seconds = 2; + // Total CPU usage (sum of all cores) averaged over the sample window. + // The "core" unit can be interpreted as CPU core-nanoseconds per second. + UInt64Value usage_nano_cores = 3; +} + +// WindowsCpuUsage provides the CPU usage information specific to Windows +message WindowsCpuUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // Cumulative CPU usage (sum across all cores) since object creation. + UInt64Value usage_core_nano_seconds = 2; + // Total CPU usage (sum of all cores) averaged over the sample window. + // The "core" unit can be interpreted as CPU core-nanoseconds per second. + UInt64Value usage_nano_cores = 3; +} + +// MemoryUsage provides the memory usage information. +message MemoryUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // The amount of working set memory in bytes. + UInt64Value working_set_bytes = 2; + // Available memory for use. This is defined as the memory limit - workingSetBytes. + UInt64Value available_bytes = 3; + // Total memory in use. This includes all memory regardless of when it was accessed. + UInt64Value usage_bytes = 4; + // The amount of anonymous and swap cache memory (includes transparent hugepages). + UInt64Value rss_bytes = 5; + // Cumulative number of minor page faults. + UInt64Value page_faults = 6; + // Cumulative number of major page faults. + UInt64Value major_page_faults = 7; +} + +message SwapUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // Available swap for use. This is defined as the swap limit - swapUsageBytes. + UInt64Value swap_available_bytes = 2; + // Total memory in use. This includes all memory regardless of when it was accessed. + UInt64Value swap_usage_bytes = 3; +} + +// WindowsMemoryUsage provides the memory usage information specific to Windows +message WindowsMemoryUsage { + // Timestamp in nanoseconds at which the information were collected. Must be > 0. + int64 timestamp = 1; + // The amount of working set memory in bytes. + UInt64Value working_set_bytes = 2; + // Available memory for use. This is defined as the memory limit - commit_memory_bytes. + UInt64Value available_bytes = 3; + // Cumulative number of page faults. + UInt64Value page_faults = 4; + // Total commit memory in use. Commit memory is total of physical and virtual memory in use. + UInt64Value commit_memory_bytes = 5; +} + +message ReopenContainerLogRequest { + // ID of the container for which to reopen the log. + string container_id = 1; +} + +message ReopenContainerLogResponse{ +} + +message CheckpointContainerRequest { + // ID of the container to be checkpointed. + string container_id = 1; + // Location of the checkpoint archive used for export + string location = 2; + // Timeout in seconds for the checkpoint to complete. + // Timeout of zero means to use the CRI default. + // Timeout > 0 means to use the user specified timeout. + int64 timeout = 3; +} + +message CheckpointContainerResponse {} + +message GetEventsRequest {} + +message ContainerEventResponse { + // ID of the container + string container_id = 1; + + // Type of the container event + ContainerEventType container_event_type = 2; + + // Creation timestamp of this event + int64 created_at = 3; + + // Sandbox status + PodSandboxStatus pod_sandbox_status = 4; + + // Container statuses + repeated ContainerStatus containers_statuses = 5; +} + +enum ContainerEventType { + // Container created + CONTAINER_CREATED_EVENT = 0; + + // Container started + CONTAINER_STARTED_EVENT = 1; + + // Container stopped + CONTAINER_STOPPED_EVENT = 2; + + // Container deleted + CONTAINER_DELETED_EVENT = 3; +} + +message ListMetricDescriptorsRequest {} + +message ListMetricDescriptorsResponse { + repeated MetricDescriptor descriptors = 1; +} + +message MetricDescriptor { + // The name field will be used as a unique identifier of this MetricDescriptor, + // and be used in conjunction with the Metric structure to populate the full Metric. + string name = 1; + string help = 2; + // When a metric uses this metric descriptor, it should only define + // labels that have previously been declared in label_keys. + // It is the responsibility of the runtime to correctly keep sorted the keys and values. + // If the two slices have different length, the behavior is undefined. + repeated string label_keys = 3; +} + +message ListPodSandboxMetricsRequest {} + +message ListPodSandboxMetricsResponse { + repeated PodSandboxMetrics pod_metrics = 1; +} + +message PodSandboxMetrics { + string pod_sandbox_id = 1; + repeated Metric metrics = 2; + repeated ContainerMetrics container_metrics = 3; +} + +message ContainerMetrics { + string container_id = 1; + repeated Metric metrics = 2; +} + +message Metric { + // Name must match a name previously returned in a MetricDescriptors call, + // otherwise, it will be ignored. + string name = 1; + // Timestamp should be 0 if the metric was gathered live. + // If it was cached, the Timestamp should reflect the time it was collected. + int64 timestamp = 2; + MetricType metric_type = 3; + // The corresponding LabelValues to the LabelKeys defined in the MetricDescriptor. + // It is the responsibility of the runtime to correctly keep sorted the keys and values. + // If the two slices have different length, the behavior is undefined. + repeated string label_values = 4; + UInt64Value value = 5; +} + +enum MetricType { + COUNTER = 0; + GAUGE = 1; +} + +message RuntimeConfigRequest {} + +message RuntimeConfigResponse { + // Configuration information for Linux-based runtimes. This field contains + // global runtime configuration options that are not specific to runtime + // handlers. + LinuxRuntimeConfiguration linux = 1; +} + +message LinuxRuntimeConfiguration { + // Cgroup driver to use + // Note: this field should not change for the lifecycle of the Kubelet, + // or while there are running containers. + // The Kubelet will not re-request this after startup, and will construct the cgroup + // hierarchy assuming it is static. + // If the runtime wishes to change this value, it must be accompanied by removal of + // all pods, and a restart of the Kubelet. The easiest way to do this is with a full node reboot. + CgroupDriver cgroup_driver = 1; +} + +enum CgroupDriver { + SYSTEMD = 0; + CGROUPFS = 1; +} diff --git a/src/StellaOps.Zastava.Observer/Runtime/ElfBuildIdReader.cs b/src/Zastava/StellaOps.Zastava.Observer/Runtime/ElfBuildIdReader.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Runtime/ElfBuildIdReader.cs rename to src/Zastava/StellaOps.Zastava.Observer/Runtime/ElfBuildIdReader.cs diff --git a/src/StellaOps.Zastava.Observer/Runtime/RuntimeEventBuffer.cs b/src/Zastava/StellaOps.Zastava.Observer/Runtime/RuntimeEventBuffer.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Runtime/RuntimeEventBuffer.cs rename to src/Zastava/StellaOps.Zastava.Observer/Runtime/RuntimeEventBuffer.cs diff --git a/src/StellaOps.Zastava.Observer/Runtime/RuntimeProcessCollector.cs b/src/Zastava/StellaOps.Zastava.Observer/Runtime/RuntimeProcessCollector.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Runtime/RuntimeProcessCollector.cs rename to src/Zastava/StellaOps.Zastava.Observer/Runtime/RuntimeProcessCollector.cs diff --git a/src/StellaOps.Zastava.Observer/StellaOps.Zastava.Observer.csproj b/src/Zastava/StellaOps.Zastava.Observer/StellaOps.Zastava.Observer.csproj similarity index 97% rename from src/StellaOps.Zastava.Observer/StellaOps.Zastava.Observer.csproj rename to src/Zastava/StellaOps.Zastava.Observer/StellaOps.Zastava.Observer.csproj index a2a5d348..fa7ba471 100644 --- a/src/StellaOps.Zastava.Observer/StellaOps.Zastava.Observer.csproj +++ b/src/Zastava/StellaOps.Zastava.Observer/StellaOps.Zastava.Observer.csproj @@ -1,24 +1,24 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <OutputType>Exe</OutputType> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Google.Protobuf" Version="3.27.2" /> - <PackageReference Include="Grpc.Net.Client" Version="2.65.0" /> - <PackageReference Include="Grpc.Tools" Version="2.65.0"> - <PrivateAssets>All</PrivateAssets> - </PackageReference> - <PackageReference Include="Serilog.Extensions.Hosting" Version="8.0.0" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj" /> - </ItemGroup> - <ItemGroup> - <Protobuf Include="Protos/runtime/v1/runtime.proto" GrpcServices="Client" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Google.Protobuf" Version="3.27.2" /> + <PackageReference Include="Grpc.Net.Client" Version="2.65.0" /> + <PackageReference Include="Grpc.Tools" Version="2.65.0"> + <PrivateAssets>All</PrivateAssets> + </PackageReference> + <PackageReference Include="Serilog.Extensions.Hosting" Version="8.0.0" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj" /> + </ItemGroup> + <ItemGroup> + <Protobuf Include="Protos/runtime/v1/runtime.proto" GrpcServices="Client" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Zastava.Observer/TASKS.md b/src/Zastava/StellaOps.Zastava.Observer/TASKS.md similarity index 100% rename from src/StellaOps.Zastava.Observer/TASKS.md rename to src/Zastava/StellaOps.Zastava.Observer/TASKS.md diff --git a/src/StellaOps.Zastava.Observer/Worker/BackoffCalculator.cs b/src/Zastava/StellaOps.Zastava.Observer/Worker/BackoffCalculator.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Worker/BackoffCalculator.cs rename to src/Zastava/StellaOps.Zastava.Observer/Worker/BackoffCalculator.cs diff --git a/src/StellaOps.Zastava.Observer/Worker/ContainerLifecycleHostedService.cs b/src/Zastava/StellaOps.Zastava.Observer/Worker/ContainerLifecycleHostedService.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Worker/ContainerLifecycleHostedService.cs rename to src/Zastava/StellaOps.Zastava.Observer/Worker/ContainerLifecycleHostedService.cs diff --git a/src/StellaOps.Zastava.Observer/Worker/ContainerRuntimePoller.cs b/src/Zastava/StellaOps.Zastava.Observer/Worker/ContainerRuntimePoller.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Worker/ContainerRuntimePoller.cs rename to src/Zastava/StellaOps.Zastava.Observer/Worker/ContainerRuntimePoller.cs diff --git a/src/StellaOps.Zastava.Observer/Worker/ObserverBootstrapService.cs b/src/Zastava/StellaOps.Zastava.Observer/Worker/ObserverBootstrapService.cs similarity index 97% rename from src/StellaOps.Zastava.Observer/Worker/ObserverBootstrapService.cs rename to src/Zastava/StellaOps.Zastava.Observer/Worker/ObserverBootstrapService.cs index 86d81b71..74a2d45d 100644 --- a/src/StellaOps.Zastava.Observer/Worker/ObserverBootstrapService.cs +++ b/src/Zastava/StellaOps.Zastava.Observer/Worker/ObserverBootstrapService.cs @@ -1,51 +1,51 @@ -using Microsoft.Extensions.Hosting; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Zastava.Core.Configuration; -using StellaOps.Zastava.Core.Diagnostics; -using StellaOps.Zastava.Core.Security; - -namespace StellaOps.Zastava.Observer.Worker; - -/// <summary> -/// Minimal bootstrap worker ensuring runtime core wiring is exercised. -/// </summary> -internal sealed class ObserverBootstrapService : BackgroundService -{ - private readonly IZastavaLogScopeBuilder logScopeBuilder; - private readonly IZastavaRuntimeMetrics runtimeMetrics; - private readonly IZastavaAuthorityTokenProvider authorityTokenProvider; - private readonly IHostApplicationLifetime applicationLifetime; - private readonly ILogger<ObserverBootstrapService> logger; - private readonly ZastavaRuntimeOptions runtimeOptions; - - public ObserverBootstrapService( - IZastavaLogScopeBuilder logScopeBuilder, - IZastavaRuntimeMetrics runtimeMetrics, - IZastavaAuthorityTokenProvider authorityTokenProvider, - IOptions<ZastavaRuntimeOptions> runtimeOptions, - IHostApplicationLifetime applicationLifetime, - ILogger<ObserverBootstrapService> logger) - { - this.logScopeBuilder = logScopeBuilder; - this.runtimeMetrics = runtimeMetrics; - this.authorityTokenProvider = authorityTokenProvider; - this.applicationLifetime = applicationLifetime; - this.logger = logger; - this.runtimeOptions = runtimeOptions.Value; - } - - protected override Task ExecuteAsync(CancellationToken stoppingToken) - { - var scope = logScopeBuilder.BuildScope(eventId: "observer.bootstrap"); - using (logger.BeginScope(scope)) - { - logger.LogInformation("Zastava observer runtime core initialised for tenant {Tenant}, component {Component}.", runtimeOptions.Tenant, runtimeOptions.Component); - logger.LogDebug("Observer metrics meter {MeterName} registered with {TagCount} default tags.", runtimeMetrics.Meter.Name, runtimeMetrics.DefaultTags.Count); - } - - // Observer implementation will hook into the authority token provider when connectors arrive. - applicationLifetime.ApplicationStarted.Register(() => logger.LogInformation("Observer bootstrap complete.")); - return Task.CompletedTask; - } -} +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Core.Configuration; +using StellaOps.Zastava.Core.Diagnostics; +using StellaOps.Zastava.Core.Security; + +namespace StellaOps.Zastava.Observer.Worker; + +/// <summary> +/// Minimal bootstrap worker ensuring runtime core wiring is exercised. +/// </summary> +internal sealed class ObserverBootstrapService : BackgroundService +{ + private readonly IZastavaLogScopeBuilder logScopeBuilder; + private readonly IZastavaRuntimeMetrics runtimeMetrics; + private readonly IZastavaAuthorityTokenProvider authorityTokenProvider; + private readonly IHostApplicationLifetime applicationLifetime; + private readonly ILogger<ObserverBootstrapService> logger; + private readonly ZastavaRuntimeOptions runtimeOptions; + + public ObserverBootstrapService( + IZastavaLogScopeBuilder logScopeBuilder, + IZastavaRuntimeMetrics runtimeMetrics, + IZastavaAuthorityTokenProvider authorityTokenProvider, + IOptions<ZastavaRuntimeOptions> runtimeOptions, + IHostApplicationLifetime applicationLifetime, + ILogger<ObserverBootstrapService> logger) + { + this.logScopeBuilder = logScopeBuilder; + this.runtimeMetrics = runtimeMetrics; + this.authorityTokenProvider = authorityTokenProvider; + this.applicationLifetime = applicationLifetime; + this.logger = logger; + this.runtimeOptions = runtimeOptions.Value; + } + + protected override Task ExecuteAsync(CancellationToken stoppingToken) + { + var scope = logScopeBuilder.BuildScope(eventId: "observer.bootstrap"); + using (logger.BeginScope(scope)) + { + logger.LogInformation("Zastava observer runtime core initialised for tenant {Tenant}, component {Component}.", runtimeOptions.Tenant, runtimeOptions.Component); + logger.LogDebug("Observer metrics meter {MeterName} registered with {TagCount} default tags.", runtimeMetrics.Meter.Name, runtimeMetrics.DefaultTags.Count); + } + + // Observer implementation will hook into the authority token provider when connectors arrive. + applicationLifetime.ApplicationStarted.Register(() => logger.LogInformation("Observer bootstrap complete.")); + return Task.CompletedTask; + } +} diff --git a/src/StellaOps.Zastava.Observer/Worker/RuntimeEventDispatchService.cs b/src/Zastava/StellaOps.Zastava.Observer/Worker/RuntimeEventDispatchService.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Worker/RuntimeEventDispatchService.cs rename to src/Zastava/StellaOps.Zastava.Observer/Worker/RuntimeEventDispatchService.cs diff --git a/src/StellaOps.Zastava.Observer/Worker/RuntimeEventFactory.cs b/src/Zastava/StellaOps.Zastava.Observer/Worker/RuntimeEventFactory.cs similarity index 100% rename from src/StellaOps.Zastava.Observer/Worker/RuntimeEventFactory.cs rename to src/Zastava/StellaOps.Zastava.Observer/Worker/RuntimeEventFactory.cs diff --git a/src/StellaOps.Zastava.Webhook/Admission/AdmissionEndpoint.cs b/src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionEndpoint.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Admission/AdmissionEndpoint.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionEndpoint.cs diff --git a/src/StellaOps.Zastava.Webhook/Admission/AdmissionRequestContext.cs b/src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionRequestContext.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Admission/AdmissionRequestContext.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionRequestContext.cs diff --git a/src/StellaOps.Zastava.Webhook/Admission/AdmissionResponseBuilder.cs b/src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionResponseBuilder.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Admission/AdmissionResponseBuilder.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionResponseBuilder.cs diff --git a/src/StellaOps.Zastava.Webhook/Admission/AdmissionReviewModels.cs b/src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionReviewModels.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Admission/AdmissionReviewModels.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionReviewModels.cs diff --git a/src/StellaOps.Zastava.Webhook/Admission/AdmissionReviewParser.cs b/src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionReviewParser.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Admission/AdmissionReviewParser.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Admission/AdmissionReviewParser.cs diff --git a/src/StellaOps.Zastava.Webhook/Admission/ImageDigestResolver.cs b/src/Zastava/StellaOps.Zastava.Webhook/Admission/ImageDigestResolver.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Admission/ImageDigestResolver.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Admission/ImageDigestResolver.cs diff --git a/src/StellaOps.Zastava.Webhook/Admission/RuntimeAdmissionPolicyService.cs b/src/Zastava/StellaOps.Zastava.Webhook/Admission/RuntimeAdmissionPolicyService.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Admission/RuntimeAdmissionPolicyService.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Admission/RuntimeAdmissionPolicyService.cs diff --git a/src/StellaOps.Zastava.Webhook/Admission/RuntimePolicyCache.cs b/src/Zastava/StellaOps.Zastava.Webhook/Admission/RuntimePolicyCache.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Admission/RuntimePolicyCache.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Admission/RuntimePolicyCache.cs diff --git a/src/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs b/src/Zastava/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs similarity index 97% rename from src/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs index 134ec0df..42358405 100644 --- a/src/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs +++ b/src/Zastava/StellaOps.Zastava.Webhook/Authority/AuthorityTokenProvider.cs @@ -1,51 +1,51 @@ -using System; -using System.Collections.Generic; -using System.Linq; -using Microsoft.Extensions.Diagnostics.HealthChecks; -using Microsoft.Extensions.Options; -using Microsoft.Extensions.Logging; -using StellaOps.Zastava.Core.Configuration; -using StellaOps.Zastava.Core.Security; - -namespace StellaOps.Zastava.Webhook.Authority; - -public sealed class AuthorityTokenHealthCheck : IHealthCheck -{ - private readonly IZastavaAuthorityTokenProvider authorityTokenProvider; - private readonly IOptionsMonitor<ZastavaRuntimeOptions> runtimeOptions; - private readonly ILogger<AuthorityTokenHealthCheck> logger; - - public AuthorityTokenHealthCheck( - IZastavaAuthorityTokenProvider authorityTokenProvider, - IOptionsMonitor<ZastavaRuntimeOptions> runtimeOptions, - ILogger<AuthorityTokenHealthCheck> logger) - { - this.authorityTokenProvider = authorityTokenProvider ?? throw new ArgumentNullException(nameof(authorityTokenProvider)); - this.runtimeOptions = runtimeOptions ?? throw new ArgumentNullException(nameof(runtimeOptions)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<HealthCheckResult> CheckHealthAsync(HealthCheckContext context, CancellationToken cancellationToken = default) - { - try - { - var runtime = runtimeOptions.CurrentValue; - var authority = runtime.Authority; - var audience = authority.Audience.FirstOrDefault() ?? "scanner"; - var token = await authorityTokenProvider.GetAsync(audience, authority.Scopes ?? Array.Empty<string>(), cancellationToken); - - return HealthCheckResult.Healthy( - "Authority token acquired.", - data: new Dictionary<string, object> - { - ["expiresAtUtc"] = token.ExpiresAtUtc?.ToString("O") ?? "static", - ["tokenType"] = token.TokenType - }); - } - catch (Exception ex) - { - logger.LogError(ex, "Failed to obtain Authority token via runtime core."); - return HealthCheckResult.Unhealthy("Failed to obtain Authority token via runtime core.", ex); - } - } -} +using System; +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.Diagnostics.HealthChecks; +using Microsoft.Extensions.Options; +using Microsoft.Extensions.Logging; +using StellaOps.Zastava.Core.Configuration; +using StellaOps.Zastava.Core.Security; + +namespace StellaOps.Zastava.Webhook.Authority; + +public sealed class AuthorityTokenHealthCheck : IHealthCheck +{ + private readonly IZastavaAuthorityTokenProvider authorityTokenProvider; + private readonly IOptionsMonitor<ZastavaRuntimeOptions> runtimeOptions; + private readonly ILogger<AuthorityTokenHealthCheck> logger; + + public AuthorityTokenHealthCheck( + IZastavaAuthorityTokenProvider authorityTokenProvider, + IOptionsMonitor<ZastavaRuntimeOptions> runtimeOptions, + ILogger<AuthorityTokenHealthCheck> logger) + { + this.authorityTokenProvider = authorityTokenProvider ?? throw new ArgumentNullException(nameof(authorityTokenProvider)); + this.runtimeOptions = runtimeOptions ?? throw new ArgumentNullException(nameof(runtimeOptions)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<HealthCheckResult> CheckHealthAsync(HealthCheckContext context, CancellationToken cancellationToken = default) + { + try + { + var runtime = runtimeOptions.CurrentValue; + var authority = runtime.Authority; + var audience = authority.Audience.FirstOrDefault() ?? "scanner"; + var token = await authorityTokenProvider.GetAsync(audience, authority.Scopes ?? Array.Empty<string>(), cancellationToken); + + return HealthCheckResult.Healthy( + "Authority token acquired.", + data: new Dictionary<string, object> + { + ["expiresAtUtc"] = token.ExpiresAtUtc?.ToString("O") ?? "static", + ["tokenType"] = token.TokenType + }); + } + catch (Exception ex) + { + logger.LogError(ex, "Failed to obtain Authority token via runtime core."); + return HealthCheckResult.Unhealthy("Failed to obtain Authority token via runtime core.", ex); + } + } +} diff --git a/src/StellaOps.Zastava.Webhook/Backend/IRuntimePolicyClient.cs b/src/Zastava/StellaOps.Zastava.Webhook/Backend/IRuntimePolicyClient.cs similarity index 96% rename from src/StellaOps.Zastava.Webhook/Backend/IRuntimePolicyClient.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Backend/IRuntimePolicyClient.cs index 6388fd8b..c442b27d 100644 --- a/src/StellaOps.Zastava.Webhook/Backend/IRuntimePolicyClient.cs +++ b/src/Zastava/StellaOps.Zastava.Webhook/Backend/IRuntimePolicyClient.cs @@ -1,9 +1,9 @@ -using System.Threading; -using System.Threading.Tasks; - -namespace StellaOps.Zastava.Webhook.Backend; - -public interface IRuntimePolicyClient -{ - Task<RuntimePolicyResponse> EvaluateAsync(RuntimePolicyRequest request, CancellationToken cancellationToken = default); -} +using System.Threading; +using System.Threading.Tasks; + +namespace StellaOps.Zastava.Webhook.Backend; + +public interface IRuntimePolicyClient +{ + Task<RuntimePolicyResponse> EvaluateAsync(RuntimePolicyRequest request, CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyClient.cs b/src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyClient.cs similarity index 97% rename from src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyClient.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyClient.cs index 14c2d810..017092aa 100644 --- a/src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyClient.cs +++ b/src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyClient.cs @@ -1,115 +1,115 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics; -using System.Linq; -using System.Net.Http; -using System.Net.Http.Headers; -using System.Text; -using System.Text.Json; -using System.Text.Json.Serialization; -using System.Threading; -using System.Threading.Tasks; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Zastava.Core.Configuration; -using StellaOps.Zastava.Core.Diagnostics; -using StellaOps.Zastava.Core.Security; -using StellaOps.Zastava.Webhook.Configuration; - -namespace StellaOps.Zastava.Webhook.Backend; - -internal sealed class RuntimePolicyClient : IRuntimePolicyClient -{ - private static readonly JsonSerializerOptions SerializerOptions = new() - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull - }; - - static RuntimePolicyClient() - { - SerializerOptions.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase, allowIntegerValues: false)); - } - - private readonly HttpClient httpClient; - private readonly IZastavaAuthorityTokenProvider authorityTokenProvider; - private readonly IOptionsMonitor<ZastavaRuntimeOptions> runtimeOptions; - private readonly IOptionsMonitor<ZastavaWebhookOptions> webhookOptions; - private readonly IZastavaRuntimeMetrics runtimeMetrics; - private readonly ILogger<RuntimePolicyClient> logger; - - public RuntimePolicyClient( - HttpClient httpClient, - IZastavaAuthorityTokenProvider authorityTokenProvider, - IOptionsMonitor<ZastavaRuntimeOptions> runtimeOptions, - IOptionsMonitor<ZastavaWebhookOptions> webhookOptions, - IZastavaRuntimeMetrics runtimeMetrics, - ILogger<RuntimePolicyClient> logger) - { - this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); - this.authorityTokenProvider = authorityTokenProvider ?? throw new ArgumentNullException(nameof(authorityTokenProvider)); - this.runtimeOptions = runtimeOptions ?? throw new ArgumentNullException(nameof(runtimeOptions)); - this.webhookOptions = webhookOptions ?? throw new ArgumentNullException(nameof(webhookOptions)); - this.runtimeMetrics = runtimeMetrics ?? throw new ArgumentNullException(nameof(runtimeMetrics)); - this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); - } - - public async Task<RuntimePolicyResponse> EvaluateAsync(RuntimePolicyRequest request, CancellationToken cancellationToken = default) - { - ArgumentNullException.ThrowIfNull(request); - - var runtime = runtimeOptions.CurrentValue; - var authority = runtime.Authority; - var audience = authority.Audience.FirstOrDefault() ?? "scanner"; - var token = await authorityTokenProvider.GetAsync(audience, authority.Scopes ?? Array.Empty<string>(), cancellationToken).ConfigureAwait(false); - - var backend = webhookOptions.CurrentValue.Backend; - using var httpRequest = new HttpRequestMessage(HttpMethod.Post, backend.PolicyPath) - { - Content = new StringContent(JsonSerializer.Serialize(request, SerializerOptions), Encoding.UTF8, "application/json") - }; - - httpRequest.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); - httpRequest.Headers.Authorization = CreateAuthorizationHeader(token); - - var stopwatch = Stopwatch.StartNew(); - try - { - using var response = await httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); - var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); - - if (!response.IsSuccessStatusCode) - { - logger.LogWarning("Runtime policy call returned {StatusCode}: {Payload}", (int)response.StatusCode, payload); - throw new RuntimePolicyException($"Runtime policy call failed with status {(int)response.StatusCode}", response.StatusCode); - } - - var result = JsonSerializer.Deserialize<RuntimePolicyResponse>(payload, SerializerOptions); - if (result is null) - { - throw new RuntimePolicyException("Runtime policy response payload was empty or invalid.", response.StatusCode); - } - - return result; - } - finally - { - stopwatch.Stop(); - RecordLatency(stopwatch.Elapsed.TotalMilliseconds); - } - } - - private AuthenticationHeaderValue CreateAuthorizationHeader(ZastavaOperationalToken token) - { - var scheme = string.Equals(token.TokenType, "dpop", StringComparison.OrdinalIgnoreCase) ? "DPoP" : token.TokenType; - return new AuthenticationHeaderValue(scheme, token.AccessToken); - } - - private void RecordLatency(double elapsedMs) - { - var tags = runtimeMetrics.DefaultTags - .Concat(new[] { new KeyValuePair<string, object?>("endpoint", "policy") }) - .ToArray(); - runtimeMetrics.BackendLatencyMs.Record(elapsedMs, tags); - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics; +using System.Linq; +using System.Net.Http; +using System.Net.Http.Headers; +using System.Text; +using System.Text.Json; +using System.Text.Json.Serialization; +using System.Threading; +using System.Threading.Tasks; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Core.Configuration; +using StellaOps.Zastava.Core.Diagnostics; +using StellaOps.Zastava.Core.Security; +using StellaOps.Zastava.Webhook.Configuration; + +namespace StellaOps.Zastava.Webhook.Backend; + +internal sealed class RuntimePolicyClient : IRuntimePolicyClient +{ + private static readonly JsonSerializerOptions SerializerOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull + }; + + static RuntimePolicyClient() + { + SerializerOptions.Converters.Add(new JsonStringEnumConverter(JsonNamingPolicy.CamelCase, allowIntegerValues: false)); + } + + private readonly HttpClient httpClient; + private readonly IZastavaAuthorityTokenProvider authorityTokenProvider; + private readonly IOptionsMonitor<ZastavaRuntimeOptions> runtimeOptions; + private readonly IOptionsMonitor<ZastavaWebhookOptions> webhookOptions; + private readonly IZastavaRuntimeMetrics runtimeMetrics; + private readonly ILogger<RuntimePolicyClient> logger; + + public RuntimePolicyClient( + HttpClient httpClient, + IZastavaAuthorityTokenProvider authorityTokenProvider, + IOptionsMonitor<ZastavaRuntimeOptions> runtimeOptions, + IOptionsMonitor<ZastavaWebhookOptions> webhookOptions, + IZastavaRuntimeMetrics runtimeMetrics, + ILogger<RuntimePolicyClient> logger) + { + this.httpClient = httpClient ?? throw new ArgumentNullException(nameof(httpClient)); + this.authorityTokenProvider = authorityTokenProvider ?? throw new ArgumentNullException(nameof(authorityTokenProvider)); + this.runtimeOptions = runtimeOptions ?? throw new ArgumentNullException(nameof(runtimeOptions)); + this.webhookOptions = webhookOptions ?? throw new ArgumentNullException(nameof(webhookOptions)); + this.runtimeMetrics = runtimeMetrics ?? throw new ArgumentNullException(nameof(runtimeMetrics)); + this.logger = logger ?? throw new ArgumentNullException(nameof(logger)); + } + + public async Task<RuntimePolicyResponse> EvaluateAsync(RuntimePolicyRequest request, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(request); + + var runtime = runtimeOptions.CurrentValue; + var authority = runtime.Authority; + var audience = authority.Audience.FirstOrDefault() ?? "scanner"; + var token = await authorityTokenProvider.GetAsync(audience, authority.Scopes ?? Array.Empty<string>(), cancellationToken).ConfigureAwait(false); + + var backend = webhookOptions.CurrentValue.Backend; + using var httpRequest = new HttpRequestMessage(HttpMethod.Post, backend.PolicyPath) + { + Content = new StringContent(JsonSerializer.Serialize(request, SerializerOptions), Encoding.UTF8, "application/json") + }; + + httpRequest.Headers.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); + httpRequest.Headers.Authorization = CreateAuthorizationHeader(token); + + var stopwatch = Stopwatch.StartNew(); + try + { + using var response = await httpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false); + var payload = await response.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false); + + if (!response.IsSuccessStatusCode) + { + logger.LogWarning("Runtime policy call returned {StatusCode}: {Payload}", (int)response.StatusCode, payload); + throw new RuntimePolicyException($"Runtime policy call failed with status {(int)response.StatusCode}", response.StatusCode); + } + + var result = JsonSerializer.Deserialize<RuntimePolicyResponse>(payload, SerializerOptions); + if (result is null) + { + throw new RuntimePolicyException("Runtime policy response payload was empty or invalid.", response.StatusCode); + } + + return result; + } + finally + { + stopwatch.Stop(); + RecordLatency(stopwatch.Elapsed.TotalMilliseconds); + } + } + + private AuthenticationHeaderValue CreateAuthorizationHeader(ZastavaOperationalToken token) + { + var scheme = string.Equals(token.TokenType, "dpop", StringComparison.OrdinalIgnoreCase) ? "DPoP" : token.TokenType; + return new AuthenticationHeaderValue(scheme, token.AccessToken); + } + + private void RecordLatency(double elapsedMs) + { + var tags = runtimeMetrics.DefaultTags + .Concat(new[] { new KeyValuePair<string, object?>("endpoint", "policy") }) + .ToArray(); + runtimeMetrics.BackendLatencyMs.Record(elapsedMs, tags); + } +} diff --git a/src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyException.cs b/src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyException.cs similarity index 96% rename from src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyException.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyException.cs index d756a454..e0b0f5d1 100644 --- a/src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyException.cs +++ b/src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyException.cs @@ -1,21 +1,21 @@ -using System; -using System.Net; - -namespace StellaOps.Zastava.Webhook.Backend; - -public sealed class RuntimePolicyException : Exception -{ - public RuntimePolicyException(string message, HttpStatusCode statusCode) - : base(message) - { - StatusCode = statusCode; - } - - public RuntimePolicyException(string message, HttpStatusCode statusCode, Exception innerException) - : base(message, innerException) - { - StatusCode = statusCode; - } - - public HttpStatusCode StatusCode { get; } -} +using System; +using System.Net; + +namespace StellaOps.Zastava.Webhook.Backend; + +public sealed class RuntimePolicyException : Exception +{ + public RuntimePolicyException(string message, HttpStatusCode statusCode) + : base(message) + { + StatusCode = statusCode; + } + + public RuntimePolicyException(string message, HttpStatusCode statusCode, Exception innerException) + : base(message, innerException) + { + StatusCode = statusCode; + } + + public HttpStatusCode StatusCode { get; } +} diff --git a/src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyRequest.cs b/src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyRequest.cs similarity index 96% rename from src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyRequest.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyRequest.cs index f043625a..6d3dab77 100644 --- a/src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyRequest.cs +++ b/src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyRequest.cs @@ -1,16 +1,16 @@ -using System.Collections.Generic; -using System.Text.Json.Serialization; - -namespace StellaOps.Zastava.Webhook.Backend; - -public sealed record RuntimePolicyRequest -{ - [JsonPropertyName("namespace")] - public required string Namespace { get; init; } - - [JsonPropertyName("labels")] - public IReadOnlyDictionary<string, string>? Labels { get; init; } - - [JsonPropertyName("images")] - public required IReadOnlyList<string> Images { get; init; } -} +using System.Collections.Generic; +using System.Text.Json.Serialization; + +namespace StellaOps.Zastava.Webhook.Backend; + +public sealed record RuntimePolicyRequest +{ + [JsonPropertyName("namespace")] + public required string Namespace { get; init; } + + [JsonPropertyName("labels")] + public IReadOnlyDictionary<string, string>? Labels { get; init; } + + [JsonPropertyName("images")] + public required IReadOnlyList<string> Images { get; init; } +} diff --git a/src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyResponse.cs b/src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyResponse.cs similarity index 97% rename from src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyResponse.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyResponse.cs index c5cfe5b2..b9515518 100644 --- a/src/StellaOps.Zastava.Webhook/Backend/RuntimePolicyResponse.cs +++ b/src/Zastava/StellaOps.Zastava.Webhook/Backend/RuntimePolicyResponse.cs @@ -1,12 +1,12 @@ -using System; -using System.Collections.Generic; -using System.Text.Json.Serialization; -using StellaOps.Zastava.Core.Contracts; - -namespace StellaOps.Zastava.Webhook.Backend; - -public sealed record RuntimePolicyResponse -{ +using System; +using System.Collections.Generic; +using System.Text.Json.Serialization; +using StellaOps.Zastava.Core.Contracts; + +namespace StellaOps.Zastava.Webhook.Backend; + +public sealed record RuntimePolicyResponse +{ [JsonPropertyName("ttlSeconds")] public int TtlSeconds { get; init; } @@ -15,25 +15,25 @@ public sealed record RuntimePolicyResponse [JsonPropertyName("policyRevision")] public string? PolicyRevision { get; init; } - - [JsonPropertyName("results")] - public IReadOnlyDictionary<string, RuntimePolicyImageResult> Results { get; init; } = new Dictionary<string, RuntimePolicyImageResult>(); -} - -public sealed record RuntimePolicyImageResult -{ - [JsonPropertyName("signed")] - public bool Signed { get; init; } - - [JsonPropertyName("hasSbom")] - public bool HasSbom { get; init; } - - [JsonPropertyName("policyVerdict")] - public PolicyVerdict PolicyVerdict { get; init; } - - [JsonPropertyName("reasons")] - public IReadOnlyList<string> Reasons { get; init; } = Array.Empty<string>(); - - [JsonPropertyName("rekor")] - public AdmissionRekorEvidence? Rekor { get; init; } -} + + [JsonPropertyName("results")] + public IReadOnlyDictionary<string, RuntimePolicyImageResult> Results { get; init; } = new Dictionary<string, RuntimePolicyImageResult>(); +} + +public sealed record RuntimePolicyImageResult +{ + [JsonPropertyName("signed")] + public bool Signed { get; init; } + + [JsonPropertyName("hasSbom")] + public bool HasSbom { get; init; } + + [JsonPropertyName("policyVerdict")] + public PolicyVerdict PolicyVerdict { get; init; } + + [JsonPropertyName("reasons")] + public IReadOnlyList<string> Reasons { get; init; } = Array.Empty<string>(); + + [JsonPropertyName("rekor")] + public AdmissionRekorEvidence? Rekor { get; init; } +} diff --git a/src/StellaOps.Zastava.Webhook/Certificates/CsrCertificateSource.cs b/src/Zastava/StellaOps.Zastava.Webhook/Certificates/CsrCertificateSource.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Certificates/CsrCertificateSource.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Certificates/CsrCertificateSource.cs diff --git a/src/StellaOps.Zastava.Webhook/Certificates/IWebhookCertificateProvider.cs b/src/Zastava/StellaOps.Zastava.Webhook/Certificates/IWebhookCertificateProvider.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Certificates/IWebhookCertificateProvider.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Certificates/IWebhookCertificateProvider.cs diff --git a/src/StellaOps.Zastava.Webhook/Certificates/SecretFileCertificateSource.cs b/src/Zastava/StellaOps.Zastava.Webhook/Certificates/SecretFileCertificateSource.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Certificates/SecretFileCertificateSource.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Certificates/SecretFileCertificateSource.cs diff --git a/src/StellaOps.Zastava.Webhook/Certificates/WebhookCertificateHealthCheck.cs b/src/Zastava/StellaOps.Zastava.Webhook/Certificates/WebhookCertificateHealthCheck.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Certificates/WebhookCertificateHealthCheck.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Certificates/WebhookCertificateHealthCheck.cs diff --git a/src/StellaOps.Zastava.Webhook/Configuration/ZastavaWebhookOptions.cs b/src/Zastava/StellaOps.Zastava.Webhook/Configuration/ZastavaWebhookOptions.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Configuration/ZastavaWebhookOptions.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Configuration/ZastavaWebhookOptions.cs diff --git a/src/StellaOps.Zastava.Webhook/DependencyInjection/ServiceCollectionExtensions.cs b/src/Zastava/StellaOps.Zastava.Webhook/DependencyInjection/ServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/DependencyInjection/ServiceCollectionExtensions.cs rename to src/Zastava/StellaOps.Zastava.Webhook/DependencyInjection/ServiceCollectionExtensions.cs diff --git a/src/StellaOps.Zastava.Webhook/DependencyInjection/WebhookRuntimeOptionsPostConfigure.cs b/src/Zastava/StellaOps.Zastava.Webhook/DependencyInjection/WebhookRuntimeOptionsPostConfigure.cs similarity index 97% rename from src/StellaOps.Zastava.Webhook/DependencyInjection/WebhookRuntimeOptionsPostConfigure.cs rename to src/Zastava/StellaOps.Zastava.Webhook/DependencyInjection/WebhookRuntimeOptionsPostConfigure.cs index 42626115..de47fa84 100644 --- a/src/StellaOps.Zastava.Webhook/DependencyInjection/WebhookRuntimeOptionsPostConfigure.cs +++ b/src/Zastava/StellaOps.Zastava.Webhook/DependencyInjection/WebhookRuntimeOptionsPostConfigure.cs @@ -1,52 +1,52 @@ -using System; -using Microsoft.Extensions.Options; -using StellaOps.Zastava.Core.Configuration; -using StellaOps.Zastava.Webhook.Configuration; - -namespace StellaOps.Zastava.Webhook.DependencyInjection; - -/// <summary> -/// Ensures legacy webhook authority options propagate to runtime options when not explicitly configured. -/// </summary> -internal sealed class WebhookRuntimeOptionsPostConfigure : IPostConfigureOptions<ZastavaRuntimeOptions> -{ - private readonly IOptionsMonitor<ZastavaWebhookOptions> webhookOptions; - - public WebhookRuntimeOptionsPostConfigure(IOptionsMonitor<ZastavaWebhookOptions> webhookOptions) - { - this.webhookOptions = webhookOptions ?? throw new ArgumentNullException(nameof(webhookOptions)); - } - - public void PostConfigure(string? name, ZastavaRuntimeOptions runtimeOptions) - { - ArgumentNullException.ThrowIfNull(runtimeOptions); - - var snapshot = webhookOptions.Get(name ?? Options.DefaultName); - var source = snapshot.Authority; - if (source is null) - { - return; - } - - runtimeOptions.Authority ??= new ZastavaAuthorityOptions(); - var authority = runtimeOptions.Authority; - - if (ShouldCopyStaticTokenValue(authority.StaticTokenValue, source.StaticTokenValue)) - { - authority.StaticTokenValue = source.StaticTokenValue; - } - - if (ShouldCopyStaticTokenValue(authority.StaticTokenPath, source.StaticTokenPath)) - { - authority.StaticTokenPath = source.StaticTokenPath; - } - - if (!string.IsNullOrWhiteSpace(source.StaticTokenValue) || !string.IsNullOrWhiteSpace(source.StaticTokenPath)) - { - authority.AllowStaticTokenFallback = true; - } - } - - private static bool ShouldCopyStaticTokenValue(string? current, string? source) - => string.IsNullOrWhiteSpace(current) && !string.IsNullOrWhiteSpace(source); -} +using System; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Core.Configuration; +using StellaOps.Zastava.Webhook.Configuration; + +namespace StellaOps.Zastava.Webhook.DependencyInjection; + +/// <summary> +/// Ensures legacy webhook authority options propagate to runtime options when not explicitly configured. +/// </summary> +internal sealed class WebhookRuntimeOptionsPostConfigure : IPostConfigureOptions<ZastavaRuntimeOptions> +{ + private readonly IOptionsMonitor<ZastavaWebhookOptions> webhookOptions; + + public WebhookRuntimeOptionsPostConfigure(IOptionsMonitor<ZastavaWebhookOptions> webhookOptions) + { + this.webhookOptions = webhookOptions ?? throw new ArgumentNullException(nameof(webhookOptions)); + } + + public void PostConfigure(string? name, ZastavaRuntimeOptions runtimeOptions) + { + ArgumentNullException.ThrowIfNull(runtimeOptions); + + var snapshot = webhookOptions.Get(name ?? Options.DefaultName); + var source = snapshot.Authority; + if (source is null) + { + return; + } + + runtimeOptions.Authority ??= new ZastavaAuthorityOptions(); + var authority = runtimeOptions.Authority; + + if (ShouldCopyStaticTokenValue(authority.StaticTokenValue, source.StaticTokenValue)) + { + authority.StaticTokenValue = source.StaticTokenValue; + } + + if (ShouldCopyStaticTokenValue(authority.StaticTokenPath, source.StaticTokenPath)) + { + authority.StaticTokenPath = source.StaticTokenPath; + } + + if (!string.IsNullOrWhiteSpace(source.StaticTokenValue) || !string.IsNullOrWhiteSpace(source.StaticTokenPath)) + { + authority.AllowStaticTokenFallback = true; + } + } + + private static bool ShouldCopyStaticTokenValue(string? current, string? source) + => string.IsNullOrWhiteSpace(current) && !string.IsNullOrWhiteSpace(source); +} diff --git a/src/StellaOps.Zastava.Webhook/Hosting/StartupValidationHostedService.cs b/src/Zastava/StellaOps.Zastava.Webhook/Hosting/StartupValidationHostedService.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Hosting/StartupValidationHostedService.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Hosting/StartupValidationHostedService.cs diff --git a/src/StellaOps.Zastava.Webhook/IMPLEMENTATION_PLAN.md b/src/Zastava/StellaOps.Zastava.Webhook/IMPLEMENTATION_PLAN.md similarity index 100% rename from src/StellaOps.Zastava.Webhook/IMPLEMENTATION_PLAN.md rename to src/Zastava/StellaOps.Zastava.Webhook/IMPLEMENTATION_PLAN.md diff --git a/src/StellaOps.Zastava.Webhook/Program.cs b/src/Zastava/StellaOps.Zastava.Webhook/Program.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook/Program.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Program.cs diff --git a/src/StellaOps.Zastava.Webhook/Properties/AssemblyInfo.cs b/src/Zastava/StellaOps.Zastava.Webhook/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Zastava.Webhook/Properties/AssemblyInfo.cs rename to src/Zastava/StellaOps.Zastava.Webhook/Properties/AssemblyInfo.cs index f84f802f..02599753 100644 --- a/src/StellaOps.Zastava.Webhook/Properties/AssemblyInfo.cs +++ b/src/Zastava/StellaOps.Zastava.Webhook/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Zastava.Webhook.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Zastava.Webhook.Tests")] diff --git a/src/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj b/src/Zastava/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj similarity index 81% rename from src/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj rename to src/Zastava/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj index 1c7c4131..9336bfaf 100644 --- a/src/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj +++ b/src/Zastava/StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk.Web"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -14,6 +15,6 @@ <PackageReference Include="Serilog.Sinks.Console" Version="5.0.1" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj" /> + <ProjectReference Include="../__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Zastava.Webhook/TASKS.md b/src/Zastava/StellaOps.Zastava.Webhook/TASKS.md similarity index 100% rename from src/StellaOps.Zastava.Webhook/TASKS.md rename to src/Zastava/StellaOps.Zastava.Webhook/TASKS.md diff --git a/src/Zastava/StellaOps.Zastava.sln b/src/Zastava/StellaOps.Zastava.sln new file mode 100644 index 00000000..a259b13b --- /dev/null +++ b/src/Zastava/StellaOps.Zastava.sln @@ -0,0 +1,199 @@ + +Microsoft Visual Studio Solution File, Format Version 12.00 +# Visual Studio Version 17 +VisualStudioVersion = 17.0.31903.59 +MinimumVisualStudioVersion = 10.0.40219.1 +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Webhook", "StellaOps.Zastava.Webhook\StellaOps.Zastava.Webhook.csproj", "{7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Libraries", "__Libraries", "{41F15E67-7190-CF23-3BC4-77E87134CADD}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core", "__Libraries\StellaOps.Zastava.Core\StellaOps.Zastava.Core.csproj", "{F21DE368-1D4F-4D10-823F-5BDB6B812745}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Client", "..\Authority\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj", "{D7545C96-B68C-4D74-A76A-CE5E496D5486}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Auth.Abstractions\StellaOps.Auth.Abstractions.csproj", "{6C65EC84-B36C-466B-9B9E-0CD47665F765}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Configuration", "..\__Libraries\StellaOps.Configuration\StellaOps.Configuration.csproj", "{3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Authority.Plugins.Abstractions", "..\Authority\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj", "{E7713D51-9773-4545-8FFA-F76B1834A5CA}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Cryptography", "..\__Libraries\StellaOps.Cryptography\StellaOps.Cryptography.csproj", "{007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Auth.Security", "..\__Libraries\StellaOps.Auth.Security\StellaOps.Auth.Security.csproj", "{DE2E6231-9BD7-4D39-8302-F03A399BA128}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Observer", "StellaOps.Zastava.Observer\StellaOps.Zastava.Observer.csproj", "{42426AB9-790F-4432-9943-837145B1BD2C}" +EndProject +Project("{2150E333-8FDC-42A3-9474-1A3956D46DE8}") = "__Tests", "__Tests", "{56BCE1BF-7CBA-7CE8-203D-A88051F1D642}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Core.Tests", "__Tests\StellaOps.Zastava.Core.Tests\StellaOps.Zastava.Core.Tests.csproj", "{51F41A02-1A15-4D95-B8A1-888073349A2D}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Observer.Tests", "__Tests\StellaOps.Zastava.Observer.Tests\StellaOps.Zastava.Observer.Tests.csproj", "{22443733-DA1C-47E3-ABC5-3030709110DE}" +EndProject +Project("{FAE04EC0-301F-11D3-BF4B-00C04F79EFBC}") = "StellaOps.Zastava.Webhook.Tests", "__Tests\StellaOps.Zastava.Webhook.Tests\StellaOps.Zastava.Webhook.Tests.csproj", "{C7098645-40A2-4990-ACBB-1051009C53D2}" +EndProject +Global + GlobalSection(SolutionConfigurationPlatforms) = preSolution + Debug|Any CPU = Debug|Any CPU + Debug|x64 = Debug|x64 + Debug|x86 = Debug|x86 + Release|Any CPU = Release|Any CPU + Release|x64 = Release|x64 + Release|x86 = Release|x86 + EndGlobalSection + GlobalSection(ProjectConfigurationPlatforms) = postSolution + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Debug|Any CPU.Build.0 = Debug|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Debug|x64.ActiveCfg = Debug|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Debug|x64.Build.0 = Debug|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Debug|x86.ActiveCfg = Debug|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Debug|x86.Build.0 = Debug|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Release|Any CPU.ActiveCfg = Release|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Release|Any CPU.Build.0 = Release|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Release|x64.ActiveCfg = Release|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Release|x64.Build.0 = Release|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Release|x86.ActiveCfg = Release|Any CPU + {7DFF8D67-7FCD-4324-B75E-2FBF8DFAF78E}.Release|x86.Build.0 = Release|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Debug|Any CPU.Build.0 = Debug|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Debug|x64.ActiveCfg = Debug|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Debug|x64.Build.0 = Debug|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Debug|x86.ActiveCfg = Debug|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Debug|x86.Build.0 = Debug|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Release|Any CPU.ActiveCfg = Release|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Release|Any CPU.Build.0 = Release|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Release|x64.ActiveCfg = Release|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Release|x64.Build.0 = Release|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Release|x86.ActiveCfg = Release|Any CPU + {F21DE368-1D4F-4D10-823F-5BDB6B812745}.Release|x86.Build.0 = Release|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Debug|Any CPU.Build.0 = Debug|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Debug|x64.ActiveCfg = Debug|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Debug|x64.Build.0 = Debug|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Debug|x86.ActiveCfg = Debug|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Debug|x86.Build.0 = Debug|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Release|Any CPU.ActiveCfg = Release|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Release|Any CPU.Build.0 = Release|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Release|x64.ActiveCfg = Release|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Release|x64.Build.0 = Release|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Release|x86.ActiveCfg = Release|Any CPU + {D7545C96-B68C-4D74-A76A-CE5E496D5486}.Release|x86.Build.0 = Release|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Debug|Any CPU.Build.0 = Debug|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Debug|x64.ActiveCfg = Debug|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Debug|x64.Build.0 = Debug|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Debug|x86.ActiveCfg = Debug|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Debug|x86.Build.0 = Debug|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Release|Any CPU.ActiveCfg = Release|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Release|Any CPU.Build.0 = Release|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Release|x64.ActiveCfg = Release|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Release|x64.Build.0 = Release|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Release|x86.ActiveCfg = Release|Any CPU + {6C65EC84-B36C-466B-9B9E-0CD47665F765}.Release|x86.Build.0 = Release|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Debug|Any CPU.Build.0 = Debug|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Debug|x64.ActiveCfg = Debug|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Debug|x64.Build.0 = Debug|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Debug|x86.ActiveCfg = Debug|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Debug|x86.Build.0 = Debug|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Release|Any CPU.ActiveCfg = Release|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Release|Any CPU.Build.0 = Release|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Release|x64.ActiveCfg = Release|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Release|x64.Build.0 = Release|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Release|x86.ActiveCfg = Release|Any CPU + {3266C0B0-063D-4FB5-A28A-F7C062FB6BEB}.Release|x86.Build.0 = Release|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Debug|Any CPU.Build.0 = Debug|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Debug|x64.ActiveCfg = Debug|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Debug|x64.Build.0 = Debug|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Debug|x86.ActiveCfg = Debug|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Debug|x86.Build.0 = Debug|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Release|Any CPU.ActiveCfg = Release|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Release|Any CPU.Build.0 = Release|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Release|x64.ActiveCfg = Release|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Release|x64.Build.0 = Release|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Release|x86.ActiveCfg = Release|Any CPU + {E7713D51-9773-4545-8FFA-F76B1834A5CA}.Release|x86.Build.0 = Release|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Debug|Any CPU.Build.0 = Debug|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Debug|x64.ActiveCfg = Debug|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Debug|x64.Build.0 = Debug|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Debug|x86.ActiveCfg = Debug|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Debug|x86.Build.0 = Debug|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Release|Any CPU.ActiveCfg = Release|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Release|Any CPU.Build.0 = Release|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Release|x64.ActiveCfg = Release|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Release|x64.Build.0 = Release|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Release|x86.ActiveCfg = Release|Any CPU + {007BF6CD-AE9D-4F03-B84B-6EF43CA29BC6}.Release|x86.Build.0 = Release|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Debug|Any CPU.Build.0 = Debug|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Debug|x64.ActiveCfg = Debug|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Debug|x64.Build.0 = Debug|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Debug|x86.ActiveCfg = Debug|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Debug|x86.Build.0 = Debug|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Release|Any CPU.ActiveCfg = Release|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Release|Any CPU.Build.0 = Release|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Release|x64.ActiveCfg = Release|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Release|x64.Build.0 = Release|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Release|x86.ActiveCfg = Release|Any CPU + {DE2E6231-9BD7-4D39-8302-F03A399BA128}.Release|x86.Build.0 = Release|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Debug|Any CPU.Build.0 = Debug|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Debug|x64.ActiveCfg = Debug|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Debug|x64.Build.0 = Debug|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Debug|x86.ActiveCfg = Debug|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Debug|x86.Build.0 = Debug|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Release|Any CPU.ActiveCfg = Release|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Release|Any CPU.Build.0 = Release|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Release|x64.ActiveCfg = Release|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Release|x64.Build.0 = Release|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Release|x86.ActiveCfg = Release|Any CPU + {42426AB9-790F-4432-9943-837145B1BD2C}.Release|x86.Build.0 = Release|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Debug|Any CPU.Build.0 = Debug|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Debug|x64.ActiveCfg = Debug|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Debug|x64.Build.0 = Debug|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Debug|x86.ActiveCfg = Debug|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Debug|x86.Build.0 = Debug|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Release|Any CPU.ActiveCfg = Release|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Release|Any CPU.Build.0 = Release|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Release|x64.ActiveCfg = Release|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Release|x64.Build.0 = Release|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Release|x86.ActiveCfg = Release|Any CPU + {51F41A02-1A15-4D95-B8A1-888073349A2D}.Release|x86.Build.0 = Release|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Debug|Any CPU.Build.0 = Debug|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Debug|x64.ActiveCfg = Debug|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Debug|x64.Build.0 = Debug|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Debug|x86.ActiveCfg = Debug|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Debug|x86.Build.0 = Debug|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Release|Any CPU.ActiveCfg = Release|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Release|Any CPU.Build.0 = Release|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Release|x64.ActiveCfg = Release|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Release|x64.Build.0 = Release|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Release|x86.ActiveCfg = Release|Any CPU + {22443733-DA1C-47E3-ABC5-3030709110DE}.Release|x86.Build.0 = Release|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Debug|Any CPU.ActiveCfg = Debug|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Debug|Any CPU.Build.0 = Debug|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Debug|x64.ActiveCfg = Debug|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Debug|x64.Build.0 = Debug|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Debug|x86.ActiveCfg = Debug|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Debug|x86.Build.0 = Debug|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Release|Any CPU.ActiveCfg = Release|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Release|Any CPU.Build.0 = Release|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Release|x64.ActiveCfg = Release|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Release|x64.Build.0 = Release|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Release|x86.ActiveCfg = Release|Any CPU + {C7098645-40A2-4990-ACBB-1051009C53D2}.Release|x86.Build.0 = Release|Any CPU + EndGlobalSection + GlobalSection(SolutionProperties) = preSolution + HideSolutionNode = FALSE + EndGlobalSection + GlobalSection(NestedProjects) = preSolution + {F21DE368-1D4F-4D10-823F-5BDB6B812745} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {42426AB9-790F-4432-9943-837145B1BD2C} = {41F15E67-7190-CF23-3BC4-77E87134CADD} + {51F41A02-1A15-4D95-B8A1-888073349A2D} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {22443733-DA1C-47E3-ABC5-3030709110DE} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + {C7098645-40A2-4990-ACBB-1051009C53D2} = {56BCE1BF-7CBA-7CE8-203D-A88051F1D642} + EndGlobalSection +EndGlobal diff --git a/src/StellaOps.Zastava.Core/Configuration/ZastavaAuthorityOptions.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Configuration/ZastavaAuthorityOptions.cs similarity index 96% rename from src/StellaOps.Zastava.Core/Configuration/ZastavaAuthorityOptions.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Configuration/ZastavaAuthorityOptions.cs index ab08a7fc..1cfa19aa 100644 --- a/src/StellaOps.Zastava.Core/Configuration/ZastavaAuthorityOptions.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Configuration/ZastavaAuthorityOptions.cs @@ -1,68 +1,68 @@ -using System.ComponentModel.DataAnnotations; - -namespace StellaOps.Zastava.Core.Configuration; - -/// <summary> -/// Authority client configuration shared by Zastava runtime components. -/// </summary> -public sealed class ZastavaAuthorityOptions -{ - /// <summary> - /// Authority issuer URL. - /// </summary> - [Required] - public Uri Issuer { get; set; } = new("https://authority.internal"); - - /// <summary> - /// OAuth client identifier used by runtime services. - /// </summary> - [Required(AllowEmptyStrings = false)] - public string ClientId { get; set; } = "zastava-runtime"; - - /// <summary> - /// Optional client secret when using confidential clients. - /// </summary> - public string? ClientSecret { get; set; } - - /// <summary> - /// Audience claims required on issued tokens. - /// </summary> - [MinLength(1)] - public string[] Audience { get; set; } = new[] { "scanner" }; - - /// <summary> - /// Additional scopes requested for the runtime plane. - /// </summary> - public string[] Scopes { get; set; } = Array.Empty<string>(); - - /// <summary> - /// Seconds before expiry when a cached token should be refreshed. - /// </summary> - [Range(typeof(double), "0", "3600")] - public double RefreshSkewSeconds { get; set; } = 120; - - /// <summary> - /// Require the Authority to issue DPoP (proof-of-possession) tokens. - /// </summary> - public bool RequireDpop { get; set; } = true; - - /// <summary> - /// Require the Authority client to present mTLS during token acquisition. - /// </summary> - public bool RequireMutualTls { get; set; } = true; - - /// <summary> - /// Allow falling back to static tokens when Authority is unavailable. - /// </summary> - public bool AllowStaticTokenFallback { get; set; } - - /// <summary> - /// Optional path to a static fallback token (PEM/plain text). - /// </summary> - public string? StaticTokenPath { get; set; } - - /// <summary> - /// Optional literal static token (test/bootstrap only). Takes precedence over <see cref="StaticTokenPath"/>. - /// </summary> - public string? StaticTokenValue { get; set; } -} +using System.ComponentModel.DataAnnotations; + +namespace StellaOps.Zastava.Core.Configuration; + +/// <summary> +/// Authority client configuration shared by Zastava runtime components. +/// </summary> +public sealed class ZastavaAuthorityOptions +{ + /// <summary> + /// Authority issuer URL. + /// </summary> + [Required] + public Uri Issuer { get; set; } = new("https://authority.internal"); + + /// <summary> + /// OAuth client identifier used by runtime services. + /// </summary> + [Required(AllowEmptyStrings = false)] + public string ClientId { get; set; } = "zastava-runtime"; + + /// <summary> + /// Optional client secret when using confidential clients. + /// </summary> + public string? ClientSecret { get; set; } + + /// <summary> + /// Audience claims required on issued tokens. + /// </summary> + [MinLength(1)] + public string[] Audience { get; set; } = new[] { "scanner" }; + + /// <summary> + /// Additional scopes requested for the runtime plane. + /// </summary> + public string[] Scopes { get; set; } = Array.Empty<string>(); + + /// <summary> + /// Seconds before expiry when a cached token should be refreshed. + /// </summary> + [Range(typeof(double), "0", "3600")] + public double RefreshSkewSeconds { get; set; } = 120; + + /// <summary> + /// Require the Authority to issue DPoP (proof-of-possession) tokens. + /// </summary> + public bool RequireDpop { get; set; } = true; + + /// <summary> + /// Require the Authority client to present mTLS during token acquisition. + /// </summary> + public bool RequireMutualTls { get; set; } = true; + + /// <summary> + /// Allow falling back to static tokens when Authority is unavailable. + /// </summary> + public bool AllowStaticTokenFallback { get; set; } + + /// <summary> + /// Optional path to a static fallback token (PEM/plain text). + /// </summary> + public string? StaticTokenPath { get; set; } + + /// <summary> + /// Optional literal static token (test/bootstrap only). Takes precedence over <see cref="StaticTokenPath"/>. + /// </summary> + public string? StaticTokenValue { get; set; } +} diff --git a/src/StellaOps.Zastava.Core/Configuration/ZastavaRuntimeOptions.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Configuration/ZastavaRuntimeOptions.cs similarity index 96% rename from src/StellaOps.Zastava.Core/Configuration/ZastavaRuntimeOptions.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Configuration/ZastavaRuntimeOptions.cs index 5105d42b..83c61b44 100644 --- a/src/StellaOps.Zastava.Core/Configuration/ZastavaRuntimeOptions.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Configuration/ZastavaRuntimeOptions.cs @@ -1,84 +1,84 @@ -using System.ComponentModel.DataAnnotations; - -namespace StellaOps.Zastava.Core.Configuration; - -/// <summary> -/// Common runtime configuration shared by Zastava components (observer, webhook, agent). -/// </summary> -public sealed class ZastavaRuntimeOptions -{ - public const string SectionName = "zastava:runtime"; - - /// <summary> - /// Tenant identifier used for scoping logs and metrics. - /// </summary> - [Required(AllowEmptyStrings = false)] - public string Tenant { get; set; } = "default"; - - /// <summary> - /// Deployment environment (prod, staging, etc.) used in telemetry dimensions. - /// </summary> - [Required(AllowEmptyStrings = false)] - public string Environment { get; set; } = "local"; - - /// <summary> - /// Component name (observer/webhook/agent) injected into scopes and metrics. - /// </summary> - public string? Component { get; set; } - - /// <summary> - /// Optional deployment identifier (cluster, region, etc.). - /// </summary> - public string? Deployment { get; set; } - - [Required] - public ZastavaRuntimeLoggingOptions Logging { get; set; } = new(); - - [Required] - public ZastavaRuntimeMetricsOptions Metrics { get; set; } = new(); - - [Required] - public ZastavaAuthorityOptions Authority { get; set; } = new(); -} - -public sealed class ZastavaRuntimeLoggingOptions -{ - /// <summary> - /// Whether scopes should be enabled on the logger factory. - /// </summary> - public bool IncludeScopes { get; init; } = true; - - /// <summary> - /// Whether activity tracking metadata (TraceId/SpanId) should be captured. - /// </summary> - public bool IncludeActivityTracking { get; init; } = true; - - /// <summary> - /// Optional static key/value pairs appended to every log scope. - /// </summary> - public IDictionary<string, string> StaticScope { get; init; } = new Dictionary<string, string>(StringComparer.Ordinal); -} - -public sealed class ZastavaRuntimeMetricsOptions -{ - /// <summary> - /// Enables metrics emission. - /// </summary> - public bool Enabled { get; init; } = true; - - /// <summary> - /// Meter name used for all runtime instrumentation. - /// </summary> - [Required(AllowEmptyStrings = false)] - public string MeterName { get; init; } = "StellaOps.Zastava"; - - /// <summary> - /// Optional meter semantic version. - /// </summary> - public string? MeterVersion { get; init; } = "1.0.0"; - - /// <summary> - /// Common dimensions attached to every metric emitted by the runtime plane. - /// </summary> - public IDictionary<string, string> CommonTags { get; init; } = new Dictionary<string, string>(StringComparer.Ordinal); -} +using System.ComponentModel.DataAnnotations; + +namespace StellaOps.Zastava.Core.Configuration; + +/// <summary> +/// Common runtime configuration shared by Zastava components (observer, webhook, agent). +/// </summary> +public sealed class ZastavaRuntimeOptions +{ + public const string SectionName = "zastava:runtime"; + + /// <summary> + /// Tenant identifier used for scoping logs and metrics. + /// </summary> + [Required(AllowEmptyStrings = false)] + public string Tenant { get; set; } = "default"; + + /// <summary> + /// Deployment environment (prod, staging, etc.) used in telemetry dimensions. + /// </summary> + [Required(AllowEmptyStrings = false)] + public string Environment { get; set; } = "local"; + + /// <summary> + /// Component name (observer/webhook/agent) injected into scopes and metrics. + /// </summary> + public string? Component { get; set; } + + /// <summary> + /// Optional deployment identifier (cluster, region, etc.). + /// </summary> + public string? Deployment { get; set; } + + [Required] + public ZastavaRuntimeLoggingOptions Logging { get; set; } = new(); + + [Required] + public ZastavaRuntimeMetricsOptions Metrics { get; set; } = new(); + + [Required] + public ZastavaAuthorityOptions Authority { get; set; } = new(); +} + +public sealed class ZastavaRuntimeLoggingOptions +{ + /// <summary> + /// Whether scopes should be enabled on the logger factory. + /// </summary> + public bool IncludeScopes { get; init; } = true; + + /// <summary> + /// Whether activity tracking metadata (TraceId/SpanId) should be captured. + /// </summary> + public bool IncludeActivityTracking { get; init; } = true; + + /// <summary> + /// Optional static key/value pairs appended to every log scope. + /// </summary> + public IDictionary<string, string> StaticScope { get; init; } = new Dictionary<string, string>(StringComparer.Ordinal); +} + +public sealed class ZastavaRuntimeMetricsOptions +{ + /// <summary> + /// Enables metrics emission. + /// </summary> + public bool Enabled { get; init; } = true; + + /// <summary> + /// Meter name used for all runtime instrumentation. + /// </summary> + [Required(AllowEmptyStrings = false)] + public string MeterName { get; init; } = "StellaOps.Zastava"; + + /// <summary> + /// Optional meter semantic version. + /// </summary> + public string? MeterVersion { get; init; } = "1.0.0"; + + /// <summary> + /// Common dimensions attached to every metric emitted by the runtime plane. + /// </summary> + public IDictionary<string, string> CommonTags { get; init; } = new Dictionary<string, string>(StringComparer.Ordinal); +} diff --git a/src/StellaOps.Zastava.Core/Contracts/AdmissionDecision.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Contracts/AdmissionDecision.cs similarity index 100% rename from src/StellaOps.Zastava.Core/Contracts/AdmissionDecision.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Contracts/AdmissionDecision.cs diff --git a/src/StellaOps.Zastava.Core/Contracts/RuntimeEvent.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Contracts/RuntimeEvent.cs similarity index 100% rename from src/StellaOps.Zastava.Core/Contracts/RuntimeEvent.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Contracts/RuntimeEvent.cs diff --git a/src/StellaOps.Zastava.Core/Contracts/ZastavaContractVersions.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Contracts/ZastavaContractVersions.cs similarity index 100% rename from src/StellaOps.Zastava.Core/Contracts/ZastavaContractVersions.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Contracts/ZastavaContractVersions.cs diff --git a/src/StellaOps.Zastava.Core/DependencyInjection/ZastavaServiceCollectionExtensions.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/DependencyInjection/ZastavaServiceCollectionExtensions.cs similarity index 97% rename from src/StellaOps.Zastava.Core/DependencyInjection/ZastavaServiceCollectionExtensions.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/DependencyInjection/ZastavaServiceCollectionExtensions.cs index 3733c78e..99b0320e 100644 --- a/src/StellaOps.Zastava.Core/DependencyInjection/ZastavaServiceCollectionExtensions.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/DependencyInjection/ZastavaServiceCollectionExtensions.cs @@ -1,98 +1,98 @@ -using System.Collections.Generic; -using System.Linq; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection.Extensions; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Client; -using StellaOps.Zastava.Core.Configuration; -using StellaOps.Zastava.Core.Diagnostics; -using StellaOps.Zastava.Core.Security; - -namespace Microsoft.Extensions.DependencyInjection; - -public static class ZastavaServiceCollectionExtensions -{ - public static IServiceCollection AddZastavaRuntimeCore( - this IServiceCollection services, - IConfiguration configuration, - string componentName) - { - ArgumentNullException.ThrowIfNull(services); - ArgumentNullException.ThrowIfNull(configuration); - if (string.IsNullOrWhiteSpace(componentName)) - { - throw new ArgumentException("Component name is required.", nameof(componentName)); - } - - services.AddOptions<ZastavaRuntimeOptions>() - .Bind(configuration.GetSection(ZastavaRuntimeOptions.SectionName)) - .ValidateDataAnnotations() - .Validate(static options => !string.IsNullOrWhiteSpace(options.Tenant), "Tenant is required.") - .Validate(static options => !string.IsNullOrWhiteSpace(options.Environment), "Environment is required.") - .PostConfigure(options => - { - if (string.IsNullOrWhiteSpace(options.Component)) - { - options.Component = componentName; - } - }) - .ValidateOnStart(); - - services.TryAddEnumerable(ServiceDescriptor.Singleton<IConfigureOptions<LoggerFactoryOptions>, ZastavaLoggerFactoryOptionsConfigurator>()); - services.TryAddSingleton<IZastavaLogScopeBuilder, ZastavaLogScopeBuilder>(); - services.TryAddSingleton<IZastavaRuntimeMetrics, ZastavaRuntimeMetrics>(); - ConfigureAuthorityServices(services, configuration); - services.TryAddSingleton<IZastavaAuthorityTokenProvider, ZastavaAuthorityTokenProvider>(); - - return services; - } - - private static void ConfigureAuthorityServices(IServiceCollection services, IConfiguration configuration) - { - var authoritySection = configuration.GetSection($"{ZastavaRuntimeOptions.SectionName}:authority"); - var authorityOptions = new ZastavaAuthorityOptions(); - authoritySection.Bind(authorityOptions); - - services.AddStellaOpsAuthClient(options => - { - options.Authority = authorityOptions.Issuer.ToString(); - options.ClientId = authorityOptions.ClientId; - options.ClientSecret = authorityOptions.ClientSecret; - options.AllowOfflineCacheFallback = authorityOptions.AllowStaticTokenFallback; - options.ExpirationSkew = TimeSpan.FromSeconds(Math.Clamp(authorityOptions.RefreshSkewSeconds, 0, 300)); - - options.DefaultScopes.Clear(); - var normalized = new SortedSet<string>(StringComparer.Ordinal); - - if (authorityOptions.Audience is not null) - { - foreach (var audience in authorityOptions.Audience) - { - if (string.IsNullOrWhiteSpace(audience)) - { - continue; - } - - normalized.Add($"aud:{audience.Trim().ToLowerInvariant()}"); - } - } - - if (authorityOptions.Scopes is not null) - { - foreach (var scope in authorityOptions.Scopes) - { - if (!string.IsNullOrWhiteSpace(scope)) - { - normalized.Add(scope.Trim()); - } - } - } - - foreach (var scope in normalized) - { - options.DefaultScopes.Add(scope); - } - }); - } -} +using System.Collections.Generic; +using System.Linq; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Client; +using StellaOps.Zastava.Core.Configuration; +using StellaOps.Zastava.Core.Diagnostics; +using StellaOps.Zastava.Core.Security; + +namespace Microsoft.Extensions.DependencyInjection; + +public static class ZastavaServiceCollectionExtensions +{ + public static IServiceCollection AddZastavaRuntimeCore( + this IServiceCollection services, + IConfiguration configuration, + string componentName) + { + ArgumentNullException.ThrowIfNull(services); + ArgumentNullException.ThrowIfNull(configuration); + if (string.IsNullOrWhiteSpace(componentName)) + { + throw new ArgumentException("Component name is required.", nameof(componentName)); + } + + services.AddOptions<ZastavaRuntimeOptions>() + .Bind(configuration.GetSection(ZastavaRuntimeOptions.SectionName)) + .ValidateDataAnnotations() + .Validate(static options => !string.IsNullOrWhiteSpace(options.Tenant), "Tenant is required.") + .Validate(static options => !string.IsNullOrWhiteSpace(options.Environment), "Environment is required.") + .PostConfigure(options => + { + if (string.IsNullOrWhiteSpace(options.Component)) + { + options.Component = componentName; + } + }) + .ValidateOnStart(); + + services.TryAddEnumerable(ServiceDescriptor.Singleton<IConfigureOptions<LoggerFactoryOptions>, ZastavaLoggerFactoryOptionsConfigurator>()); + services.TryAddSingleton<IZastavaLogScopeBuilder, ZastavaLogScopeBuilder>(); + services.TryAddSingleton<IZastavaRuntimeMetrics, ZastavaRuntimeMetrics>(); + ConfigureAuthorityServices(services, configuration); + services.TryAddSingleton<IZastavaAuthorityTokenProvider, ZastavaAuthorityTokenProvider>(); + + return services; + } + + private static void ConfigureAuthorityServices(IServiceCollection services, IConfiguration configuration) + { + var authoritySection = configuration.GetSection($"{ZastavaRuntimeOptions.SectionName}:authority"); + var authorityOptions = new ZastavaAuthorityOptions(); + authoritySection.Bind(authorityOptions); + + services.AddStellaOpsAuthClient(options => + { + options.Authority = authorityOptions.Issuer.ToString(); + options.ClientId = authorityOptions.ClientId; + options.ClientSecret = authorityOptions.ClientSecret; + options.AllowOfflineCacheFallback = authorityOptions.AllowStaticTokenFallback; + options.ExpirationSkew = TimeSpan.FromSeconds(Math.Clamp(authorityOptions.RefreshSkewSeconds, 0, 300)); + + options.DefaultScopes.Clear(); + var normalized = new SortedSet<string>(StringComparer.Ordinal); + + if (authorityOptions.Audience is not null) + { + foreach (var audience in authorityOptions.Audience) + { + if (string.IsNullOrWhiteSpace(audience)) + { + continue; + } + + normalized.Add($"aud:{audience.Trim().ToLowerInvariant()}"); + } + } + + if (authorityOptions.Scopes is not null) + { + foreach (var scope in authorityOptions.Scopes) + { + if (!string.IsNullOrWhiteSpace(scope)) + { + normalized.Add(scope.Trim()); + } + } + } + + foreach (var scope in normalized) + { + options.DefaultScopes.Add(scope); + } + }); + } +} diff --git a/src/StellaOps.Zastava.Core/Diagnostics/ZastavaLogScopeBuilder.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Diagnostics/ZastavaLogScopeBuilder.cs similarity index 96% rename from src/StellaOps.Zastava.Core/Diagnostics/ZastavaLogScopeBuilder.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Diagnostics/ZastavaLogScopeBuilder.cs index f2ca20b8..d0038c88 100644 --- a/src/StellaOps.Zastava.Core/Diagnostics/ZastavaLogScopeBuilder.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Diagnostics/ZastavaLogScopeBuilder.cs @@ -1,90 +1,90 @@ -using System.Linq; -using Microsoft.Extensions.Options; -using StellaOps.Zastava.Core.Configuration; - -namespace StellaOps.Zastava.Core.Diagnostics; - -public interface IZastavaLogScopeBuilder -{ - /// <summary> - /// Builds a deterministic logging scope containing tenant/component metadata. - /// </summary> - IReadOnlyDictionary<string, object?> BuildScope( - string? correlationId = null, - string? node = null, - string? workload = null, - string? eventId = null, - IReadOnlyDictionary<string, string>? additional = null); -} - -internal sealed class ZastavaLogScopeBuilder : IZastavaLogScopeBuilder -{ - private readonly ZastavaRuntimeOptions options; - private readonly IReadOnlyDictionary<string, string> staticScope; - - public ZastavaLogScopeBuilder(IOptions<ZastavaRuntimeOptions> options) - { - ArgumentNullException.ThrowIfNull(options); - this.options = options.Value; - staticScope = (this.options.Logging.StaticScope ?? new Dictionary<string, string>(StringComparer.Ordinal)) - .ToImmutableDictionary(pair => pair.Key, pair => pair.Value, StringComparer.Ordinal); - } - - public IReadOnlyDictionary<string, object?> BuildScope( - string? correlationId = null, - string? node = null, - string? workload = null, - string? eventId = null, - IReadOnlyDictionary<string, string>? additional = null) - { - var scope = new Dictionary<string, object?>(StringComparer.Ordinal) - { - ["tenant"] = options.Tenant, - ["component"] = options.Component, - ["environment"] = options.Environment - }; - - if (!string.IsNullOrWhiteSpace(options.Deployment)) - { - scope["deployment"] = options.Deployment; - } - - foreach (var pair in staticScope) - { - scope[pair.Key] = pair.Value; - } - - if (!string.IsNullOrWhiteSpace(correlationId)) - { - scope["correlationId"] = correlationId; - } - - if (!string.IsNullOrWhiteSpace(node)) - { - scope["node"] = node; - } - - if (!string.IsNullOrWhiteSpace(workload)) - { - scope["workload"] = workload; - } - - if (!string.IsNullOrWhiteSpace(eventId)) - { - scope["eventId"] = eventId; - } - - if (additional is not null) - { - foreach (var pair in additional) - { - if (!string.IsNullOrWhiteSpace(pair.Key)) - { - scope[pair.Key] = pair.Value; - } - } - } - - return scope.ToImmutableDictionary(StringComparer.Ordinal); - } -} +using System.Linq; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Core.Configuration; + +namespace StellaOps.Zastava.Core.Diagnostics; + +public interface IZastavaLogScopeBuilder +{ + /// <summary> + /// Builds a deterministic logging scope containing tenant/component metadata. + /// </summary> + IReadOnlyDictionary<string, object?> BuildScope( + string? correlationId = null, + string? node = null, + string? workload = null, + string? eventId = null, + IReadOnlyDictionary<string, string>? additional = null); +} + +internal sealed class ZastavaLogScopeBuilder : IZastavaLogScopeBuilder +{ + private readonly ZastavaRuntimeOptions options; + private readonly IReadOnlyDictionary<string, string> staticScope; + + public ZastavaLogScopeBuilder(IOptions<ZastavaRuntimeOptions> options) + { + ArgumentNullException.ThrowIfNull(options); + this.options = options.Value; + staticScope = (this.options.Logging.StaticScope ?? new Dictionary<string, string>(StringComparer.Ordinal)) + .ToImmutableDictionary(pair => pair.Key, pair => pair.Value, StringComparer.Ordinal); + } + + public IReadOnlyDictionary<string, object?> BuildScope( + string? correlationId = null, + string? node = null, + string? workload = null, + string? eventId = null, + IReadOnlyDictionary<string, string>? additional = null) + { + var scope = new Dictionary<string, object?>(StringComparer.Ordinal) + { + ["tenant"] = options.Tenant, + ["component"] = options.Component, + ["environment"] = options.Environment + }; + + if (!string.IsNullOrWhiteSpace(options.Deployment)) + { + scope["deployment"] = options.Deployment; + } + + foreach (var pair in staticScope) + { + scope[pair.Key] = pair.Value; + } + + if (!string.IsNullOrWhiteSpace(correlationId)) + { + scope["correlationId"] = correlationId; + } + + if (!string.IsNullOrWhiteSpace(node)) + { + scope["node"] = node; + } + + if (!string.IsNullOrWhiteSpace(workload)) + { + scope["workload"] = workload; + } + + if (!string.IsNullOrWhiteSpace(eventId)) + { + scope["eventId"] = eventId; + } + + if (additional is not null) + { + foreach (var pair in additional) + { + if (!string.IsNullOrWhiteSpace(pair.Key)) + { + scope[pair.Key] = pair.Value; + } + } + } + + return scope.ToImmutableDictionary(StringComparer.Ordinal); + } +} diff --git a/src/StellaOps.Zastava.Core/Diagnostics/ZastavaLoggerFactoryOptionsConfigurator.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Diagnostics/ZastavaLoggerFactoryOptionsConfigurator.cs similarity index 97% rename from src/StellaOps.Zastava.Core/Diagnostics/ZastavaLoggerFactoryOptionsConfigurator.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Diagnostics/ZastavaLoggerFactoryOptionsConfigurator.cs index 5255d968..e068e4b9 100644 --- a/src/StellaOps.Zastava.Core/Diagnostics/ZastavaLoggerFactoryOptionsConfigurator.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Diagnostics/ZastavaLoggerFactoryOptionsConfigurator.cs @@ -1,30 +1,30 @@ -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Zastava.Core.Configuration; - -namespace StellaOps.Zastava.Core.Diagnostics; - -internal sealed class ZastavaLoggerFactoryOptionsConfigurator : IConfigureOptions<LoggerFactoryOptions> -{ - private readonly IOptions<ZastavaRuntimeOptions> options; - - public ZastavaLoggerFactoryOptionsConfigurator(IOptions<ZastavaRuntimeOptions> options) - { - ArgumentNullException.ThrowIfNull(options); - this.options = options; - } - - public void Configure(LoggerFactoryOptions options) - { - ArgumentNullException.ThrowIfNull(options); - var runtimeOptions = this.options.Value; - if (runtimeOptions.Logging.IncludeActivityTracking) - { - options.ActivityTrackingOptions |= ActivityTrackingOptions.TraceId | ActivityTrackingOptions.SpanId | ActivityTrackingOptions.ParentId; - } - else if (runtimeOptions.Logging.IncludeScopes) - { - options.ActivityTrackingOptions |= ActivityTrackingOptions.TraceId | ActivityTrackingOptions.SpanId; - } - } -} +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Core.Configuration; + +namespace StellaOps.Zastava.Core.Diagnostics; + +internal sealed class ZastavaLoggerFactoryOptionsConfigurator : IConfigureOptions<LoggerFactoryOptions> +{ + private readonly IOptions<ZastavaRuntimeOptions> options; + + public ZastavaLoggerFactoryOptionsConfigurator(IOptions<ZastavaRuntimeOptions> options) + { + ArgumentNullException.ThrowIfNull(options); + this.options = options; + } + + public void Configure(LoggerFactoryOptions options) + { + ArgumentNullException.ThrowIfNull(options); + var runtimeOptions = this.options.Value; + if (runtimeOptions.Logging.IncludeActivityTracking) + { + options.ActivityTrackingOptions |= ActivityTrackingOptions.TraceId | ActivityTrackingOptions.SpanId | ActivityTrackingOptions.ParentId; + } + else if (runtimeOptions.Logging.IncludeScopes) + { + options.ActivityTrackingOptions |= ActivityTrackingOptions.TraceId | ActivityTrackingOptions.SpanId; + } + } +} diff --git a/src/StellaOps.Zastava.Core/Diagnostics/ZastavaRuntimeMetrics.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Diagnostics/ZastavaRuntimeMetrics.cs similarity index 97% rename from src/StellaOps.Zastava.Core/Diagnostics/ZastavaRuntimeMetrics.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Diagnostics/ZastavaRuntimeMetrics.cs index a485e049..c1a9272b 100644 --- a/src/StellaOps.Zastava.Core/Diagnostics/ZastavaRuntimeMetrics.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Diagnostics/ZastavaRuntimeMetrics.cs @@ -1,78 +1,78 @@ -using System.Linq; -using Microsoft.Extensions.Options; -using StellaOps.Zastava.Core.Configuration; - -namespace StellaOps.Zastava.Core.Diagnostics; - -public interface IZastavaRuntimeMetrics : IDisposable -{ - Meter Meter { get; } - Counter<long> RuntimeEvents { get; } - Counter<long> AdmissionDecisions { get; } - Histogram<double> BackendLatencyMs { get; } - IReadOnlyList<KeyValuePair<string, object?>> DefaultTags { get; } -} - -internal sealed class ZastavaRuntimeMetrics : IZastavaRuntimeMetrics -{ - private readonly Meter meter; - private readonly IReadOnlyList<KeyValuePair<string, object?>> defaultTags; - private readonly bool enabled; - - public ZastavaRuntimeMetrics(IOptions<ZastavaRuntimeOptions> options) - { - ArgumentNullException.ThrowIfNull(options); - var runtimeOptions = options.Value; - var metrics = runtimeOptions.Metrics ?? new ZastavaRuntimeMetricsOptions(); - enabled = metrics.Enabled; - - meter = new Meter(metrics.MeterName, metrics.MeterVersion); - - RuntimeEvents = meter.CreateCounter<long>("zastava.runtime.events.total", unit: "1", description: "Total runtime events emitted by observers."); - AdmissionDecisions = meter.CreateCounter<long>("zastava.admission.decisions.total", unit: "1", description: "Total admission decisions returned by the webhook."); - BackendLatencyMs = meter.CreateHistogram<double>("zastava.runtime.backend.latency.ms", unit: "ms", description: "Round-trip latency to Scanner backend APIs."); - - var baseline = new List<KeyValuePair<string, object?>> - { - new("tenant", runtimeOptions.Tenant), - new("component", runtimeOptions.Component), - new("environment", runtimeOptions.Environment) - }; - - if (!string.IsNullOrWhiteSpace(runtimeOptions.Deployment)) - { - baseline.Add(new("deployment", runtimeOptions.Deployment)); - } - - if (metrics.CommonTags is not null) - { - foreach (var pair in metrics.CommonTags) - { - if (!string.IsNullOrWhiteSpace(pair.Key)) - { - baseline.Add(new(pair.Key, pair.Value)); - } - } - } - - defaultTags = baseline.ToImmutableArray(); - } - - public Meter Meter => meter; - - public Counter<long> RuntimeEvents { get; } - - public Counter<long> AdmissionDecisions { get; } - - public Histogram<double> BackendLatencyMs { get; } - - public IReadOnlyList<KeyValuePair<string, object?>> DefaultTags => defaultTags; - - public void Dispose() - { - if (enabled) - { - meter.Dispose(); - } - } -} +using System.Linq; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Core.Configuration; + +namespace StellaOps.Zastava.Core.Diagnostics; + +public interface IZastavaRuntimeMetrics : IDisposable +{ + Meter Meter { get; } + Counter<long> RuntimeEvents { get; } + Counter<long> AdmissionDecisions { get; } + Histogram<double> BackendLatencyMs { get; } + IReadOnlyList<KeyValuePair<string, object?>> DefaultTags { get; } +} + +internal sealed class ZastavaRuntimeMetrics : IZastavaRuntimeMetrics +{ + private readonly Meter meter; + private readonly IReadOnlyList<KeyValuePair<string, object?>> defaultTags; + private readonly bool enabled; + + public ZastavaRuntimeMetrics(IOptions<ZastavaRuntimeOptions> options) + { + ArgumentNullException.ThrowIfNull(options); + var runtimeOptions = options.Value; + var metrics = runtimeOptions.Metrics ?? new ZastavaRuntimeMetricsOptions(); + enabled = metrics.Enabled; + + meter = new Meter(metrics.MeterName, metrics.MeterVersion); + + RuntimeEvents = meter.CreateCounter<long>("zastava.runtime.events.total", unit: "1", description: "Total runtime events emitted by observers."); + AdmissionDecisions = meter.CreateCounter<long>("zastava.admission.decisions.total", unit: "1", description: "Total admission decisions returned by the webhook."); + BackendLatencyMs = meter.CreateHistogram<double>("zastava.runtime.backend.latency.ms", unit: "ms", description: "Round-trip latency to Scanner backend APIs."); + + var baseline = new List<KeyValuePair<string, object?>> + { + new("tenant", runtimeOptions.Tenant), + new("component", runtimeOptions.Component), + new("environment", runtimeOptions.Environment) + }; + + if (!string.IsNullOrWhiteSpace(runtimeOptions.Deployment)) + { + baseline.Add(new("deployment", runtimeOptions.Deployment)); + } + + if (metrics.CommonTags is not null) + { + foreach (var pair in metrics.CommonTags) + { + if (!string.IsNullOrWhiteSpace(pair.Key)) + { + baseline.Add(new(pair.Key, pair.Value)); + } + } + } + + defaultTags = baseline.ToImmutableArray(); + } + + public Meter Meter => meter; + + public Counter<long> RuntimeEvents { get; } + + public Counter<long> AdmissionDecisions { get; } + + public Histogram<double> BackendLatencyMs { get; } + + public IReadOnlyList<KeyValuePair<string, object?>> DefaultTags => defaultTags; + + public void Dispose() + { + if (enabled) + { + meter.Dispose(); + } + } +} diff --git a/src/StellaOps.Zastava.Core/GlobalUsings.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/GlobalUsings.cs similarity index 100% rename from src/StellaOps.Zastava.Core/GlobalUsings.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/GlobalUsings.cs diff --git a/src/StellaOps.Zastava.Core/Hashing/ZastavaHashing.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Hashing/ZastavaHashing.cs similarity index 100% rename from src/StellaOps.Zastava.Core/Hashing/ZastavaHashing.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Hashing/ZastavaHashing.cs diff --git a/src/StellaOps.Zastava.Core/Properties/AssemblyInfo.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Properties/AssemblyInfo.cs similarity index 97% rename from src/StellaOps.Zastava.Core/Properties/AssemblyInfo.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Properties/AssemblyInfo.cs index c12e3581..5c6689e2 100644 --- a/src/StellaOps.Zastava.Core/Properties/AssemblyInfo.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Properties/AssemblyInfo.cs @@ -1,3 +1,3 @@ -using System.Runtime.CompilerServices; - -[assembly: InternalsVisibleTo("StellaOps.Zastava.Core.Tests")] +using System.Runtime.CompilerServices; + +[assembly: InternalsVisibleTo("StellaOps.Zastava.Core.Tests")] diff --git a/src/StellaOps.Zastava.Core/Security/IZastavaAuthorityTokenProvider.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Security/IZastavaAuthorityTokenProvider.cs similarity index 96% rename from src/StellaOps.Zastava.Core/Security/IZastavaAuthorityTokenProvider.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Security/IZastavaAuthorityTokenProvider.cs index c7504daa..56c26fdd 100644 --- a/src/StellaOps.Zastava.Core/Security/IZastavaAuthorityTokenProvider.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Security/IZastavaAuthorityTokenProvider.cs @@ -1,14 +1,14 @@ -namespace StellaOps.Zastava.Core.Security; - -public interface IZastavaAuthorityTokenProvider -{ - ValueTask<ZastavaOperationalToken> GetAsync( - string audience, - IEnumerable<string>? additionalScopes = null, - CancellationToken cancellationToken = default); - - ValueTask InvalidateAsync( - string audience, - IEnumerable<string>? additionalScopes = null, - CancellationToken cancellationToken = default); -} +namespace StellaOps.Zastava.Core.Security; + +public interface IZastavaAuthorityTokenProvider +{ + ValueTask<ZastavaOperationalToken> GetAsync( + string audience, + IEnumerable<string>? additionalScopes = null, + CancellationToken cancellationToken = default); + + ValueTask InvalidateAsync( + string audience, + IEnumerable<string>? additionalScopes = null, + CancellationToken cancellationToken = default); +} diff --git a/src/StellaOps.Zastava.Core/Security/ZastavaAuthorityTokenProvider.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Security/ZastavaAuthorityTokenProvider.cs similarity index 97% rename from src/StellaOps.Zastava.Core/Security/ZastavaAuthorityTokenProvider.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Security/ZastavaAuthorityTokenProvider.cs index 593493b8..e9d919f0 100644 --- a/src/StellaOps.Zastava.Core/Security/ZastavaAuthorityTokenProvider.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Security/ZastavaAuthorityTokenProvider.cs @@ -1,314 +1,314 @@ -using System.Collections.Concurrent; -using System.Globalization; -using System.IO; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Auth.Client; -using StellaOps.Zastava.Core.Configuration; -using StellaOps.Zastava.Core.Diagnostics; - -namespace StellaOps.Zastava.Core.Security; - -internal sealed class ZastavaAuthorityTokenProvider : IZastavaAuthorityTokenProvider -{ - private readonly IStellaOpsTokenClient tokenClient; - private readonly IOptionsMonitor<ZastavaRuntimeOptions> optionsMonitor; - private readonly IZastavaLogScopeBuilder scopeBuilder; - private readonly TimeProvider timeProvider; - private readonly ILogger<ZastavaAuthorityTokenProvider> logger; - - private readonly ConcurrentDictionary<string, CacheEntry> cache = new(StringComparer.Ordinal); - private readonly ConcurrentDictionary<string, SemaphoreSlim> locks = new(StringComparer.Ordinal); - private readonly object guardrailLock = new(); - private bool guardrailsLogged; - private ZastavaOperationalToken? staticFallbackToken; - - public ZastavaAuthorityTokenProvider( - IStellaOpsTokenClient tokenClient, - IOptionsMonitor<ZastavaRuntimeOptions> optionsMonitor, - IZastavaLogScopeBuilder scopeBuilder, - TimeProvider? timeProvider = null, - ILogger<ZastavaAuthorityTokenProvider>? logger = null) - { - this.tokenClient = tokenClient ?? throw new ArgumentNullException(nameof(tokenClient)); - this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); - this.scopeBuilder = scopeBuilder ?? throw new ArgumentNullException(nameof(scopeBuilder)); - this.timeProvider = timeProvider ?? TimeProvider.System; - this.logger = logger ?? NullLogger<ZastavaAuthorityTokenProvider>.Instance; - } - - public async ValueTask<ZastavaOperationalToken> GetAsync( - string audience, - IEnumerable<string>? additionalScopes = null, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(audience); - - var options = optionsMonitor.CurrentValue.Authority; - EnsureGuardrails(options); - - if (options.AllowStaticTokenFallback && TryGetStaticToken(options) is { } staticToken) - { - return staticToken; - } - - var normalizedAudience = NormalizeAudience(audience); - var normalizedScopes = BuildScopes(options, normalizedAudience, additionalScopes); - var cacheKey = BuildCacheKey(normalizedAudience, normalizedScopes); - var refreshSkew = GetRefreshSkew(options); - - if (cache.TryGetValue(cacheKey, out var cached) && !cached.Token.IsExpired(timeProvider, refreshSkew)) - { - return cached.Token; - } - - var mutex = locks.GetOrAdd(cacheKey, static _ => new SemaphoreSlim(1, 1)); - await mutex.WaitAsync(cancellationToken).ConfigureAwait(false); - - try - { - if (cache.TryGetValue(cacheKey, out cached) && !cached.Token.IsExpired(timeProvider, refreshSkew)) - { - return cached.Token; - } - - var scopeString = string.Join(' ', normalizedScopes); - var tokenResult = await tokenClient.RequestClientCredentialsTokenAsync(scopeString, null, cancellationToken).ConfigureAwait(false); - ValidateToken(tokenResult, options, normalizedAudience); - - var token = ZastavaOperationalToken.FromResult( - tokenResult.AccessToken, - tokenResult.TokenType, - tokenResult.ExpiresAtUtc, - tokenResult.Scopes); - - cache[cacheKey] = new CacheEntry(token); - - var scope = scopeBuilder.BuildScope( - correlationId: null, - node: null, - workload: null, - eventId: "authority.token.issue", - additional: new Dictionary<string, string> - { - ["audience"] = normalizedAudience, - ["expiresAt"] = token.ExpiresAtUtc?.ToString("O", CultureInfo.InvariantCulture) ?? "static", - ["scopes"] = scopeString - }); - - using (logger.BeginScope(scope)) - { - logger.LogInformation("Issued runtime OpTok for {Audience} (scopes: {Scopes}).", normalizedAudience, scopeString); - } - - return token; - } - catch (Exception ex) when (options.AllowStaticTokenFallback && TryGetStaticToken(options) is { } fallback) - { - var scope = scopeBuilder.BuildScope( - eventId: "authority.token.fallback", - additional: new Dictionary<string, string> - { - ["audience"] = audience - }); - - using (logger.BeginScope(scope)) - { - logger.LogWarning(ex, "Authority token acquisition failed; using static fallback token."); - } - - return fallback; - } - finally - { - mutex.Release(); - } - } - - public ValueTask InvalidateAsync( - string audience, - IEnumerable<string>? additionalScopes = null, - CancellationToken cancellationToken = default) - { - ArgumentException.ThrowIfNullOrWhiteSpace(audience); - - var normalizedAudience = NormalizeAudience(audience); - var normalizedScopes = BuildScopes(optionsMonitor.CurrentValue.Authority, normalizedAudience, additionalScopes); - var cacheKey = BuildCacheKey(normalizedAudience, normalizedScopes); - - cache.TryRemove(cacheKey, out _); - if (locks.TryRemove(cacheKey, out var mutex)) - { - mutex.Dispose(); - } - - var scope = scopeBuilder.BuildScope( - eventId: "authority.token.invalidate", - additional: new Dictionary<string, string> - { - ["audience"] = normalizedAudience, - ["cacheKey"] = cacheKey - }); - - using (logger.BeginScope(scope)) - { - logger.LogInformation("Invalidated runtime OpTok cache entry."); - } - - return ValueTask.CompletedTask; - } - - private void EnsureGuardrails(ZastavaAuthorityOptions options) - { - if (guardrailsLogged) - { - return; - } - - lock (guardrailLock) - { - if (guardrailsLogged) - { - return; - } - - var scope = scopeBuilder.BuildScope(eventId: "authority.guardrails"); - using (logger.BeginScope(scope)) - { - if (!options.RequireMutualTls) - { - logger.LogWarning("Mutual TLS requirement disabled for Authority token acquisition. This should only be used in controlled test environments."); - } - - if (!options.RequireDpop) - { - logger.LogWarning("DPoP requirement disabled for runtime plane. Tokens will be issued without proof-of-possession."); - } - - if (options.AllowStaticTokenFallback) - { - logger.LogWarning("Static Authority token fallback enabled. Ensure bootstrap tokens are rotated frequently."); - } - } - - guardrailsLogged = true; - } - } - - private ZastavaOperationalToken? TryGetStaticToken(ZastavaAuthorityOptions options) - { - if (!options.AllowStaticTokenFallback) - { - return null; - } - - if (options.StaticTokenValue is null && options.StaticTokenPath is null) - { - return null; - } - - if (staticFallbackToken is { } cached) - { - return cached; - } - - lock (guardrailLock) - { - if (staticFallbackToken is { } existing) - { - return existing; - } - - var tokenValue = options.StaticTokenValue; - if (string.IsNullOrWhiteSpace(tokenValue) && !string.IsNullOrWhiteSpace(options.StaticTokenPath)) - { - if (!File.Exists(options.StaticTokenPath)) - { - throw new FileNotFoundException("Static Authority token file not found.", options.StaticTokenPath); - } - - tokenValue = File.ReadAllText(options.StaticTokenPath); - } - - if (string.IsNullOrWhiteSpace(tokenValue)) - { - throw new InvalidOperationException("Static Authority token fallback is enabled but no token value/path is configured."); - } - - staticFallbackToken = ZastavaOperationalToken.FromResult( - tokenValue.Trim(), - tokenType: "Bearer", - expiresAtUtc: null, - scopes: Array.Empty<string>()); - - return staticFallbackToken; - } - } - - private void ValidateToken(StellaOpsTokenResult tokenResult, ZastavaAuthorityOptions options, string normalizedAudience) - { - if (options.RequireDpop && !string.Equals(tokenResult.TokenType, "DPoP", StringComparison.OrdinalIgnoreCase)) - { - throw new InvalidOperationException("Authority returned a token without DPoP token type while RequireDpop is enabled."); - } - - if (tokenResult.Scopes is not null) - { - var audienceScope = $"aud:{normalizedAudience}"; - if (!tokenResult.Scopes.Contains(audienceScope, StringComparer.OrdinalIgnoreCase)) - { - throw new InvalidOperationException($"Authority token missing required audience scope '{audienceScope}'."); - } - } - } - - private static string NormalizeAudience(string audience) - => audience.Trim().ToLowerInvariant(); - - private static IReadOnlyList<string> BuildScopes( - ZastavaAuthorityOptions options, - string normalizedAudience, - IEnumerable<string>? additionalScopes) - { - var scopeSet = new SortedSet<string>(StringComparer.Ordinal) - { - $"aud:{normalizedAudience}" - }; - - if (options.Scopes is not null) - { - foreach (var scope in options.Scopes) - { - if (!string.IsNullOrWhiteSpace(scope)) - { - scopeSet.Add(scope.Trim()); - } - } - } - - if (additionalScopes is not null) - { - foreach (var scope in additionalScopes) - { - if (!string.IsNullOrWhiteSpace(scope)) - { - scopeSet.Add(scope.Trim()); - } - } - } - - return scopeSet.ToArray(); - } - - private static string BuildCacheKey(string audience, IReadOnlyList<string> scopes) - => Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"{audience}|{string.Join(' ', scopes)}"))); - - private static TimeSpan GetRefreshSkew(ZastavaAuthorityOptions options) - { - var seconds = Math.Clamp(options.RefreshSkewSeconds, 0, 3600); - return TimeSpan.FromSeconds(seconds); - } - - private readonly record struct CacheEntry(ZastavaOperationalToken Token); -} +using System.Collections.Concurrent; +using System.Globalization; +using System.IO; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Auth.Client; +using StellaOps.Zastava.Core.Configuration; +using StellaOps.Zastava.Core.Diagnostics; + +namespace StellaOps.Zastava.Core.Security; + +internal sealed class ZastavaAuthorityTokenProvider : IZastavaAuthorityTokenProvider +{ + private readonly IStellaOpsTokenClient tokenClient; + private readonly IOptionsMonitor<ZastavaRuntimeOptions> optionsMonitor; + private readonly IZastavaLogScopeBuilder scopeBuilder; + private readonly TimeProvider timeProvider; + private readonly ILogger<ZastavaAuthorityTokenProvider> logger; + + private readonly ConcurrentDictionary<string, CacheEntry> cache = new(StringComparer.Ordinal); + private readonly ConcurrentDictionary<string, SemaphoreSlim> locks = new(StringComparer.Ordinal); + private readonly object guardrailLock = new(); + private bool guardrailsLogged; + private ZastavaOperationalToken? staticFallbackToken; + + public ZastavaAuthorityTokenProvider( + IStellaOpsTokenClient tokenClient, + IOptionsMonitor<ZastavaRuntimeOptions> optionsMonitor, + IZastavaLogScopeBuilder scopeBuilder, + TimeProvider? timeProvider = null, + ILogger<ZastavaAuthorityTokenProvider>? logger = null) + { + this.tokenClient = tokenClient ?? throw new ArgumentNullException(nameof(tokenClient)); + this.optionsMonitor = optionsMonitor ?? throw new ArgumentNullException(nameof(optionsMonitor)); + this.scopeBuilder = scopeBuilder ?? throw new ArgumentNullException(nameof(scopeBuilder)); + this.timeProvider = timeProvider ?? TimeProvider.System; + this.logger = logger ?? NullLogger<ZastavaAuthorityTokenProvider>.Instance; + } + + public async ValueTask<ZastavaOperationalToken> GetAsync( + string audience, + IEnumerable<string>? additionalScopes = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(audience); + + var options = optionsMonitor.CurrentValue.Authority; + EnsureGuardrails(options); + + if (options.AllowStaticTokenFallback && TryGetStaticToken(options) is { } staticToken) + { + return staticToken; + } + + var normalizedAudience = NormalizeAudience(audience); + var normalizedScopes = BuildScopes(options, normalizedAudience, additionalScopes); + var cacheKey = BuildCacheKey(normalizedAudience, normalizedScopes); + var refreshSkew = GetRefreshSkew(options); + + if (cache.TryGetValue(cacheKey, out var cached) && !cached.Token.IsExpired(timeProvider, refreshSkew)) + { + return cached.Token; + } + + var mutex = locks.GetOrAdd(cacheKey, static _ => new SemaphoreSlim(1, 1)); + await mutex.WaitAsync(cancellationToken).ConfigureAwait(false); + + try + { + if (cache.TryGetValue(cacheKey, out cached) && !cached.Token.IsExpired(timeProvider, refreshSkew)) + { + return cached.Token; + } + + var scopeString = string.Join(' ', normalizedScopes); + var tokenResult = await tokenClient.RequestClientCredentialsTokenAsync(scopeString, null, cancellationToken).ConfigureAwait(false); + ValidateToken(tokenResult, options, normalizedAudience); + + var token = ZastavaOperationalToken.FromResult( + tokenResult.AccessToken, + tokenResult.TokenType, + tokenResult.ExpiresAtUtc, + tokenResult.Scopes); + + cache[cacheKey] = new CacheEntry(token); + + var scope = scopeBuilder.BuildScope( + correlationId: null, + node: null, + workload: null, + eventId: "authority.token.issue", + additional: new Dictionary<string, string> + { + ["audience"] = normalizedAudience, + ["expiresAt"] = token.ExpiresAtUtc?.ToString("O", CultureInfo.InvariantCulture) ?? "static", + ["scopes"] = scopeString + }); + + using (logger.BeginScope(scope)) + { + logger.LogInformation("Issued runtime OpTok for {Audience} (scopes: {Scopes}).", normalizedAudience, scopeString); + } + + return token; + } + catch (Exception ex) when (options.AllowStaticTokenFallback && TryGetStaticToken(options) is { } fallback) + { + var scope = scopeBuilder.BuildScope( + eventId: "authority.token.fallback", + additional: new Dictionary<string, string> + { + ["audience"] = audience + }); + + using (logger.BeginScope(scope)) + { + logger.LogWarning(ex, "Authority token acquisition failed; using static fallback token."); + } + + return fallback; + } + finally + { + mutex.Release(); + } + } + + public ValueTask InvalidateAsync( + string audience, + IEnumerable<string>? additionalScopes = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrWhiteSpace(audience); + + var normalizedAudience = NormalizeAudience(audience); + var normalizedScopes = BuildScopes(optionsMonitor.CurrentValue.Authority, normalizedAudience, additionalScopes); + var cacheKey = BuildCacheKey(normalizedAudience, normalizedScopes); + + cache.TryRemove(cacheKey, out _); + if (locks.TryRemove(cacheKey, out var mutex)) + { + mutex.Dispose(); + } + + var scope = scopeBuilder.BuildScope( + eventId: "authority.token.invalidate", + additional: new Dictionary<string, string> + { + ["audience"] = normalizedAudience, + ["cacheKey"] = cacheKey + }); + + using (logger.BeginScope(scope)) + { + logger.LogInformation("Invalidated runtime OpTok cache entry."); + } + + return ValueTask.CompletedTask; + } + + private void EnsureGuardrails(ZastavaAuthorityOptions options) + { + if (guardrailsLogged) + { + return; + } + + lock (guardrailLock) + { + if (guardrailsLogged) + { + return; + } + + var scope = scopeBuilder.BuildScope(eventId: "authority.guardrails"); + using (logger.BeginScope(scope)) + { + if (!options.RequireMutualTls) + { + logger.LogWarning("Mutual TLS requirement disabled for Authority token acquisition. This should only be used in controlled test environments."); + } + + if (!options.RequireDpop) + { + logger.LogWarning("DPoP requirement disabled for runtime plane. Tokens will be issued without proof-of-possession."); + } + + if (options.AllowStaticTokenFallback) + { + logger.LogWarning("Static Authority token fallback enabled. Ensure bootstrap tokens are rotated frequently."); + } + } + + guardrailsLogged = true; + } + } + + private ZastavaOperationalToken? TryGetStaticToken(ZastavaAuthorityOptions options) + { + if (!options.AllowStaticTokenFallback) + { + return null; + } + + if (options.StaticTokenValue is null && options.StaticTokenPath is null) + { + return null; + } + + if (staticFallbackToken is { } cached) + { + return cached; + } + + lock (guardrailLock) + { + if (staticFallbackToken is { } existing) + { + return existing; + } + + var tokenValue = options.StaticTokenValue; + if (string.IsNullOrWhiteSpace(tokenValue) && !string.IsNullOrWhiteSpace(options.StaticTokenPath)) + { + if (!File.Exists(options.StaticTokenPath)) + { + throw new FileNotFoundException("Static Authority token file not found.", options.StaticTokenPath); + } + + tokenValue = File.ReadAllText(options.StaticTokenPath); + } + + if (string.IsNullOrWhiteSpace(tokenValue)) + { + throw new InvalidOperationException("Static Authority token fallback is enabled but no token value/path is configured."); + } + + staticFallbackToken = ZastavaOperationalToken.FromResult( + tokenValue.Trim(), + tokenType: "Bearer", + expiresAtUtc: null, + scopes: Array.Empty<string>()); + + return staticFallbackToken; + } + } + + private void ValidateToken(StellaOpsTokenResult tokenResult, ZastavaAuthorityOptions options, string normalizedAudience) + { + if (options.RequireDpop && !string.Equals(tokenResult.TokenType, "DPoP", StringComparison.OrdinalIgnoreCase)) + { + throw new InvalidOperationException("Authority returned a token without DPoP token type while RequireDpop is enabled."); + } + + if (tokenResult.Scopes is not null) + { + var audienceScope = $"aud:{normalizedAudience}"; + if (!tokenResult.Scopes.Contains(audienceScope, StringComparer.OrdinalIgnoreCase)) + { + throw new InvalidOperationException($"Authority token missing required audience scope '{audienceScope}'."); + } + } + } + + private static string NormalizeAudience(string audience) + => audience.Trim().ToLowerInvariant(); + + private static IReadOnlyList<string> BuildScopes( + ZastavaAuthorityOptions options, + string normalizedAudience, + IEnumerable<string>? additionalScopes) + { + var scopeSet = new SortedSet<string>(StringComparer.Ordinal) + { + $"aud:{normalizedAudience}" + }; + + if (options.Scopes is not null) + { + foreach (var scope in options.Scopes) + { + if (!string.IsNullOrWhiteSpace(scope)) + { + scopeSet.Add(scope.Trim()); + } + } + } + + if (additionalScopes is not null) + { + foreach (var scope in additionalScopes) + { + if (!string.IsNullOrWhiteSpace(scope)) + { + scopeSet.Add(scope.Trim()); + } + } + } + + return scopeSet.ToArray(); + } + + private static string BuildCacheKey(string audience, IReadOnlyList<string> scopes) + => Convert.ToHexString(SHA256.HashData(Encoding.UTF8.GetBytes($"{audience}|{string.Join(' ', scopes)}"))); + + private static TimeSpan GetRefreshSkew(ZastavaAuthorityOptions options) + { + var seconds = Math.Clamp(options.RefreshSkewSeconds, 0, 3600); + return TimeSpan.FromSeconds(seconds); + } + + private readonly record struct CacheEntry(ZastavaOperationalToken Token); +} diff --git a/src/StellaOps.Zastava.Core/Security/ZastavaOperationalToken.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Security/ZastavaOperationalToken.cs similarity index 96% rename from src/StellaOps.Zastava.Core/Security/ZastavaOperationalToken.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Security/ZastavaOperationalToken.cs index efd90767..cda91e65 100644 --- a/src/StellaOps.Zastava.Core/Security/ZastavaOperationalToken.cs +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Security/ZastavaOperationalToken.cs @@ -1,70 +1,70 @@ -using System.Collections.ObjectModel; -using System.Linq; - -namespace StellaOps.Zastava.Core.Security; - -public readonly record struct ZastavaOperationalToken( - string AccessToken, - string TokenType, - DateTimeOffset? ExpiresAtUtc, - IReadOnlyList<string> Scopes) -{ - public bool IsExpired(TimeProvider timeProvider, TimeSpan refreshSkew) - { - ArgumentNullException.ThrowIfNull(timeProvider); - - if (ExpiresAtUtc is null) - { - return false; - } - - return timeProvider.GetUtcNow() >= ExpiresAtUtc.Value - refreshSkew; - } - - public static ZastavaOperationalToken FromResult( - string accessToken, - string tokenType, - DateTimeOffset? expiresAtUtc, - IEnumerable<string> scopes) - { - ArgumentException.ThrowIfNullOrWhiteSpace(accessToken); - ArgumentException.ThrowIfNullOrWhiteSpace(tokenType); - - IReadOnlyList<string> normalized = scopes switch - { - null => Array.Empty<string>(), - IReadOnlyList<string> readOnly => readOnly.Count == 0 ? Array.Empty<string>() : readOnly, - ICollection<string> collection => NormalizeCollection(collection), - _ => NormalizeEnumerable(scopes) - }; - - return new ZastavaOperationalToken( - accessToken, - tokenType, - expiresAtUtc, - normalized); - } - - private static IReadOnlyList<string> NormalizeCollection(ICollection<string> collection) - { - if (collection.Count == 0) - { - return Array.Empty<string>(); - } - - if (collection is IReadOnlyList<string> readOnly) - { - return readOnly; - } - - var buffer = new string[collection.Count]; - collection.CopyTo(buffer, 0); - return new ReadOnlyCollection<string>(buffer); - } - - private static IReadOnlyList<string> NormalizeEnumerable(IEnumerable<string> scopes) - { - var buffer = scopes.ToArray(); - return buffer.Length == 0 ? Array.Empty<string>() : new ReadOnlyCollection<string>(buffer); - } -} +using System.Collections.ObjectModel; +using System.Linq; + +namespace StellaOps.Zastava.Core.Security; + +public readonly record struct ZastavaOperationalToken( + string AccessToken, + string TokenType, + DateTimeOffset? ExpiresAtUtc, + IReadOnlyList<string> Scopes) +{ + public bool IsExpired(TimeProvider timeProvider, TimeSpan refreshSkew) + { + ArgumentNullException.ThrowIfNull(timeProvider); + + if (ExpiresAtUtc is null) + { + return false; + } + + return timeProvider.GetUtcNow() >= ExpiresAtUtc.Value - refreshSkew; + } + + public static ZastavaOperationalToken FromResult( + string accessToken, + string tokenType, + DateTimeOffset? expiresAtUtc, + IEnumerable<string> scopes) + { + ArgumentException.ThrowIfNullOrWhiteSpace(accessToken); + ArgumentException.ThrowIfNullOrWhiteSpace(tokenType); + + IReadOnlyList<string> normalized = scopes switch + { + null => Array.Empty<string>(), + IReadOnlyList<string> readOnly => readOnly.Count == 0 ? Array.Empty<string>() : readOnly, + ICollection<string> collection => NormalizeCollection(collection), + _ => NormalizeEnumerable(scopes) + }; + + return new ZastavaOperationalToken( + accessToken, + tokenType, + expiresAtUtc, + normalized); + } + + private static IReadOnlyList<string> NormalizeCollection(ICollection<string> collection) + { + if (collection.Count == 0) + { + return Array.Empty<string>(); + } + + if (collection is IReadOnlyList<string> readOnly) + { + return readOnly; + } + + var buffer = new string[collection.Count]; + collection.CopyTo(buffer, 0); + return new ReadOnlyCollection<string>(buffer); + } + + private static IReadOnlyList<string> NormalizeEnumerable(IEnumerable<string> scopes) + { + var buffer = scopes.ToArray(); + return buffer.Length == 0 ? Array.Empty<string>() : new ReadOnlyCollection<string>(buffer); + } +} diff --git a/src/StellaOps.Zastava.Core/Serialization/ZastavaCanonicalJsonSerializer.cs b/src/Zastava/__Libraries/StellaOps.Zastava.Core/Serialization/ZastavaCanonicalJsonSerializer.cs similarity index 100% rename from src/StellaOps.Zastava.Core/Serialization/ZastavaCanonicalJsonSerializer.cs rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/Serialization/ZastavaCanonicalJsonSerializer.cs diff --git a/src/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj b/src/Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj similarity index 75% rename from src/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj index 427ec8c2..750c7f31 100644 --- a/src/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj +++ b/src/Zastava/__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -14,7 +15,7 @@ <PackageReference Include="Microsoft.Extensions.Diagnostics.Abstractions" Version="10.0.0-rc.2.25502.107" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Auth.Client\StellaOps.Auth.Client.csproj" /> - <ProjectReference Include="..\StellaOps.Auth.Security\StellaOps.Auth.Security.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Zastava.Core/TASKS.md b/src/Zastava/__Libraries/StellaOps.Zastava.Core/TASKS.md similarity index 100% rename from src/StellaOps.Zastava.Core/TASKS.md rename to src/Zastava/__Libraries/StellaOps.Zastava.Core/TASKS.md diff --git a/src/StellaOps.Zastava.Core.Tests/Contracts/ZastavaContractVersionsTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/Contracts/ZastavaContractVersionsTests.cs similarity index 100% rename from src/StellaOps.Zastava.Core.Tests/Contracts/ZastavaContractVersionsTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/Contracts/ZastavaContractVersionsTests.cs diff --git a/src/StellaOps.Zastava.Core.Tests/DependencyInjection/ZastavaServiceCollectionExtensionsTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/DependencyInjection/ZastavaServiceCollectionExtensionsTests.cs similarity index 97% rename from src/StellaOps.Zastava.Core.Tests/DependencyInjection/ZastavaServiceCollectionExtensionsTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/DependencyInjection/ZastavaServiceCollectionExtensionsTests.cs index 9906ca40..cdf76b5a 100644 --- a/src/StellaOps.Zastava.Core.Tests/DependencyInjection/ZastavaServiceCollectionExtensionsTests.cs +++ b/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/DependencyInjection/ZastavaServiceCollectionExtensionsTests.cs @@ -1,122 +1,122 @@ -using System.Linq; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.DependencyInjection; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Options; -using StellaOps.Zastava.Core.Configuration; -using StellaOps.Zastava.Core.Diagnostics; -using StellaOps.Zastava.Core.Security; - -namespace StellaOps.Zastava.Core.Tests.DependencyInjection; - -public sealed class ZastavaServiceCollectionExtensionsTests -{ - [Fact] - public void AddZastavaRuntimeCore_BindsOptionsAndProvidesDiagnostics() - { - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary<string, string?> - { - ["zastava:runtime:tenant"] = "tenant-42", - ["zastava:runtime:environment"] = "prod", - ["zastava:runtime:deployment"] = "cluster-a", - ["zastava:runtime:metrics:meterName"] = "stellaops.zastava.runtime", - ["zastava:runtime:metrics:meterVersion"] = "2.0.0", - ["zastava:runtime:metrics:commonTags:cluster"] = "prod-cluster", - ["zastava:runtime:logging:staticScope:plane"] = "runtime", - ["zastava:runtime:authority:clientId"] = "zastava-observer", - ["zastava:runtime:authority:audience:0"] = "scanner", - ["zastava:runtime:authority:audience:1"] = "zastava", - ["zastava:runtime:authority:scopes:0"] = "aud:scanner", - ["zastava:runtime:authority:scopes:1"] = "api:scanner.runtime.write", - ["zastava:runtime:authority:allowStaticTokenFallback"] = "false" - }) - .Build(); - - var services = new ServiceCollection(); - services.AddLogging(); - services.AddZastavaRuntimeCore(configuration, componentName: "observer"); - - using var provider = services.BuildServiceProvider(); - - var runtimeOptions = provider.GetRequiredService<IOptions<ZastavaRuntimeOptions>>().Value; - Assert.Equal("tenant-42", runtimeOptions.Tenant); - Assert.Equal("prod", runtimeOptions.Environment); - Assert.Equal("observer", runtimeOptions.Component); - Assert.Equal("cluster-a", runtimeOptions.Deployment); - Assert.Equal("stellaops.zastava.runtime", runtimeOptions.Metrics.MeterName); - Assert.Equal("2.0.0", runtimeOptions.Metrics.MeterVersion); - Assert.Equal("runtime", runtimeOptions.Logging.StaticScope["plane"]); - Assert.Equal("zastava-observer", runtimeOptions.Authority.ClientId); - Assert.Contains("scanner", runtimeOptions.Authority.Audience); - Assert.Contains("zastava", runtimeOptions.Authority.Audience); - Assert.Equal(new[] { "aud:scanner", "api:scanner.runtime.write" }, runtimeOptions.Authority.Scopes); - Assert.False(runtimeOptions.Authority.AllowStaticTokenFallback); - - var scopeBuilder = provider.GetRequiredService<IZastavaLogScopeBuilder>(); - var scope = scopeBuilder.BuildScope( - correlationId: "corr-1", - node: "node-1", - workload: "payments/api", - eventId: "evt-123", - additional: new Dictionary<string, string> - { - ["pod"] = "api-12345" - }); - - Assert.Equal("tenant-42", scope["tenant"]); - Assert.Equal("observer", scope["component"]); - Assert.Equal("prod", scope["environment"]); - Assert.Equal("cluster-a", scope["deployment"]); - Assert.Equal("runtime", scope["plane"]); - Assert.Equal("corr-1", scope["correlationId"]); - Assert.Equal("node-1", scope["node"]); - Assert.Equal("payments/api", scope["workload"]); - Assert.Equal("evt-123", scope["eventId"]); - Assert.Equal("api-12345", scope["pod"]); - - var metrics = provider.GetRequiredService<IZastavaRuntimeMetrics>(); - Assert.Equal("stellaops.zastava.runtime", metrics.Meter.Name); - Assert.Equal("2.0.0", metrics.Meter.Version); - - var authorityProvider = provider.GetRequiredService<IZastavaAuthorityTokenProvider>(); - Assert.NotNull(authorityProvider); - - var defaultTags = metrics.DefaultTags.ToArray(); - Assert.Contains(defaultTags, kvp => kvp.Key == "tenant" && (string?)kvp.Value == "tenant-42"); - Assert.Contains(defaultTags, kvp => kvp.Key == "component" && (string?)kvp.Value == "observer"); - Assert.Contains(defaultTags, kvp => kvp.Key == "environment" && (string?)kvp.Value == "prod"); - Assert.Contains(defaultTags, kvp => kvp.Key == "deployment" && (string?)kvp.Value == "cluster-a"); - Assert.Contains(defaultTags, kvp => kvp.Key == "cluster" && (string?)kvp.Value == "prod-cluster"); - - metrics.RuntimeEvents.Add(1, defaultTags); - metrics.AdmissionDecisions.Add(1, defaultTags); - metrics.BackendLatencyMs.Record(12.5, defaultTags); - - var loggerFactoryOptions = provider.GetRequiredService<IOptionsMonitor<LoggerFactoryOptions>>().CurrentValue; - Assert.True(loggerFactoryOptions.ActivityTrackingOptions.HasFlag(ActivityTrackingOptions.TraceId)); - Assert.True(loggerFactoryOptions.ActivityTrackingOptions.HasFlag(ActivityTrackingOptions.SpanId)); - } - - [Fact] - public void AddZastavaRuntimeCore_ThrowsForInvalidTenant() - { - var configuration = new ConfigurationBuilder() - .AddInMemoryCollection(new Dictionary<string, string?> - { - ["zastava:runtime:tenant"] = "", - ["zastava:runtime:environment"] = "prod" - }) - .Build(); - - var services = new ServiceCollection(); - services.AddLogging(); - services.AddZastavaRuntimeCore(configuration, "observer"); - - Assert.Throws<OptionsValidationException>(() => - { - using var provider = services.BuildServiceProvider(); - _ = provider.GetRequiredService<IOptions<ZastavaRuntimeOptions>>().Value; - }); - } -} +using System.Linq; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Core.Configuration; +using StellaOps.Zastava.Core.Diagnostics; +using StellaOps.Zastava.Core.Security; + +namespace StellaOps.Zastava.Core.Tests.DependencyInjection; + +public sealed class ZastavaServiceCollectionExtensionsTests +{ + [Fact] + public void AddZastavaRuntimeCore_BindsOptionsAndProvidesDiagnostics() + { + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary<string, string?> + { + ["zastava:runtime:tenant"] = "tenant-42", + ["zastava:runtime:environment"] = "prod", + ["zastava:runtime:deployment"] = "cluster-a", + ["zastava:runtime:metrics:meterName"] = "stellaops.zastava.runtime", + ["zastava:runtime:metrics:meterVersion"] = "2.0.0", + ["zastava:runtime:metrics:commonTags:cluster"] = "prod-cluster", + ["zastava:runtime:logging:staticScope:plane"] = "runtime", + ["zastava:runtime:authority:clientId"] = "zastava-observer", + ["zastava:runtime:authority:audience:0"] = "scanner", + ["zastava:runtime:authority:audience:1"] = "zastava", + ["zastava:runtime:authority:scopes:0"] = "aud:scanner", + ["zastava:runtime:authority:scopes:1"] = "api:scanner.runtime.write", + ["zastava:runtime:authority:allowStaticTokenFallback"] = "false" + }) + .Build(); + + var services = new ServiceCollection(); + services.AddLogging(); + services.AddZastavaRuntimeCore(configuration, componentName: "observer"); + + using var provider = services.BuildServiceProvider(); + + var runtimeOptions = provider.GetRequiredService<IOptions<ZastavaRuntimeOptions>>().Value; + Assert.Equal("tenant-42", runtimeOptions.Tenant); + Assert.Equal("prod", runtimeOptions.Environment); + Assert.Equal("observer", runtimeOptions.Component); + Assert.Equal("cluster-a", runtimeOptions.Deployment); + Assert.Equal("stellaops.zastava.runtime", runtimeOptions.Metrics.MeterName); + Assert.Equal("2.0.0", runtimeOptions.Metrics.MeterVersion); + Assert.Equal("runtime", runtimeOptions.Logging.StaticScope["plane"]); + Assert.Equal("zastava-observer", runtimeOptions.Authority.ClientId); + Assert.Contains("scanner", runtimeOptions.Authority.Audience); + Assert.Contains("zastava", runtimeOptions.Authority.Audience); + Assert.Equal(new[] { "aud:scanner", "api:scanner.runtime.write" }, runtimeOptions.Authority.Scopes); + Assert.False(runtimeOptions.Authority.AllowStaticTokenFallback); + + var scopeBuilder = provider.GetRequiredService<IZastavaLogScopeBuilder>(); + var scope = scopeBuilder.BuildScope( + correlationId: "corr-1", + node: "node-1", + workload: "payments/api", + eventId: "evt-123", + additional: new Dictionary<string, string> + { + ["pod"] = "api-12345" + }); + + Assert.Equal("tenant-42", scope["tenant"]); + Assert.Equal("observer", scope["component"]); + Assert.Equal("prod", scope["environment"]); + Assert.Equal("cluster-a", scope["deployment"]); + Assert.Equal("runtime", scope["plane"]); + Assert.Equal("corr-1", scope["correlationId"]); + Assert.Equal("node-1", scope["node"]); + Assert.Equal("payments/api", scope["workload"]); + Assert.Equal("evt-123", scope["eventId"]); + Assert.Equal("api-12345", scope["pod"]); + + var metrics = provider.GetRequiredService<IZastavaRuntimeMetrics>(); + Assert.Equal("stellaops.zastava.runtime", metrics.Meter.Name); + Assert.Equal("2.0.0", metrics.Meter.Version); + + var authorityProvider = provider.GetRequiredService<IZastavaAuthorityTokenProvider>(); + Assert.NotNull(authorityProvider); + + var defaultTags = metrics.DefaultTags.ToArray(); + Assert.Contains(defaultTags, kvp => kvp.Key == "tenant" && (string?)kvp.Value == "tenant-42"); + Assert.Contains(defaultTags, kvp => kvp.Key == "component" && (string?)kvp.Value == "observer"); + Assert.Contains(defaultTags, kvp => kvp.Key == "environment" && (string?)kvp.Value == "prod"); + Assert.Contains(defaultTags, kvp => kvp.Key == "deployment" && (string?)kvp.Value == "cluster-a"); + Assert.Contains(defaultTags, kvp => kvp.Key == "cluster" && (string?)kvp.Value == "prod-cluster"); + + metrics.RuntimeEvents.Add(1, defaultTags); + metrics.AdmissionDecisions.Add(1, defaultTags); + metrics.BackendLatencyMs.Record(12.5, defaultTags); + + var loggerFactoryOptions = provider.GetRequiredService<IOptionsMonitor<LoggerFactoryOptions>>().CurrentValue; + Assert.True(loggerFactoryOptions.ActivityTrackingOptions.HasFlag(ActivityTrackingOptions.TraceId)); + Assert.True(loggerFactoryOptions.ActivityTrackingOptions.HasFlag(ActivityTrackingOptions.SpanId)); + } + + [Fact] + public void AddZastavaRuntimeCore_ThrowsForInvalidTenant() + { + var configuration = new ConfigurationBuilder() + .AddInMemoryCollection(new Dictionary<string, string?> + { + ["zastava:runtime:tenant"] = "", + ["zastava:runtime:environment"] = "prod" + }) + .Build(); + + var services = new ServiceCollection(); + services.AddLogging(); + services.AddZastavaRuntimeCore(configuration, "observer"); + + Assert.Throws<OptionsValidationException>(() => + { + using var provider = services.BuildServiceProvider(); + _ = provider.GetRequiredService<IOptions<ZastavaRuntimeOptions>>().Value; + }); + } +} diff --git a/src/StellaOps.Zastava.Core.Tests/Security/ZastavaAuthorityTokenProviderTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/Security/ZastavaAuthorityTokenProviderTests.cs similarity index 97% rename from src/StellaOps.Zastava.Core.Tests/Security/ZastavaAuthorityTokenProviderTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/Security/ZastavaAuthorityTokenProviderTests.cs index ca3628a5..0a5c9c89 100644 --- a/src/StellaOps.Zastava.Core.Tests/Security/ZastavaAuthorityTokenProviderTests.cs +++ b/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/Security/ZastavaAuthorityTokenProviderTests.cs @@ -1,228 +1,228 @@ -using System.Collections.Generic; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using Microsoft.IdentityModel.Tokens; -using StellaOps.Auth.Client; -using StellaOps.Zastava.Core.Configuration; -using StellaOps.Zastava.Core.Diagnostics; -using StellaOps.Zastava.Core.Security; - -namespace StellaOps.Zastava.Core.Tests.Security; - -public sealed class ZastavaAuthorityTokenProviderTests -{ - [Fact] - public async Task GetAsync_UsesCacheUntilRefreshWindow() - { - var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-23T12:00:00Z")); - var runtimeOptions = CreateRuntimeOptions(refreshSkewSeconds: 120); - - var tokenClient = new StubTokenClient(); - tokenClient.EnqueueToken(new StellaOpsTokenResult( - "token-1", - "DPoP", - timeProvider.GetUtcNow() + TimeSpan.FromMinutes(10), - new[] { "aud:scanner", "api:scanner.runtime.write" })); - - tokenClient.EnqueueToken(new StellaOpsTokenResult( - "token-2", - "DPoP", - timeProvider.GetUtcNow() + TimeSpan.FromMinutes(10), - new[] { "aud:scanner", "api:scanner.runtime.write" })); - - var provider = CreateProvider(runtimeOptions, tokenClient, timeProvider); - - var tokenA = await provider.GetAsync("scanner"); - Assert.Equal("token-1", tokenA.AccessToken); - Assert.Equal(1, tokenClient.RequestCount); - - // Move time forward but still before refresh window (refresh skew = 2 minutes) - timeProvider.Advance(TimeSpan.FromMinutes(5)); - var tokenB = await provider.GetAsync("scanner"); - Assert.Equal("token-1", tokenB.AccessToken); - Assert.Equal(1, tokenClient.RequestCount); - - // Cross refresh window to trigger renewal - timeProvider.Advance(TimeSpan.FromMinutes(5)); - var tokenC = await provider.GetAsync("scanner"); - Assert.Equal("token-2", tokenC.AccessToken); - Assert.Equal(2, tokenClient.RequestCount); - } - - [Fact] - public async Task GetAsync_ThrowsWhenMissingAudienceScope() - { - var runtimeOptions = CreateRuntimeOptions(); - var tokenClient = new StubTokenClient(); - tokenClient.EnqueueToken(new StellaOpsTokenResult( - "token", - "DPoP", - DateTimeOffset.UtcNow + TimeSpan.FromMinutes(5), - new[] { "api:scanner.runtime.write" })); - - var provider = CreateProvider(runtimeOptions, tokenClient, new TestTimeProvider(DateTimeOffset.UtcNow)); - - var ex = await Assert.ThrowsAsync<InvalidOperationException>(() => provider.GetAsync("scanner").AsTask()); - Assert.Contains("audience scope", ex.Message, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public async Task GetAsync_StaticFallbackUsedWhenEnabled() - { - var runtimeOptions = CreateRuntimeOptions(allowFallback: true, staticToken: "static-token", requireDpop: false); - - var tokenClient = new StubTokenClient(); - tokenClient.FailWith(new InvalidOperationException("offline")); - - var provider = CreateProvider(runtimeOptions, tokenClient, new TestTimeProvider(DateTimeOffset.UtcNow)); - - var token = await provider.GetAsync("scanner"); - Assert.Equal("static-token", token.AccessToken); - Assert.Null(token.ExpiresAtUtc); - Assert.Equal(0, tokenClient.RequestCount); - } - - [Fact] - public async Task GetAsync_ThrowsWhenDpopRequiredButTokenTypeIsBearer() - { - var runtimeOptions = CreateRuntimeOptions(requireDpop: true); - - var tokenClient = new StubTokenClient(); - tokenClient.EnqueueToken(new StellaOpsTokenResult( - "token", - "Bearer", - DateTimeOffset.UtcNow + TimeSpan.FromMinutes(5), - new[] { "aud:scanner" })); - - var provider = CreateProvider(runtimeOptions, tokenClient, new TestTimeProvider(DateTimeOffset.UtcNow)); - - await Assert.ThrowsAsync<InvalidOperationException>(() => provider.GetAsync("scanner").AsTask()); - } - - private static ZastavaRuntimeOptions CreateRuntimeOptions( - double refreshSkewSeconds = 60, - bool allowFallback = false, - string? staticToken = null, - bool requireDpop = true) - => new() - { - Tenant = "tenant-x", - Environment = "test", - Component = "observer", - Authority = new ZastavaAuthorityOptions - { - Issuer = new Uri("https://authority.internal"), - ClientId = "zastava-runtime", - Audience = new[] { "scanner" }, - Scopes = new[] { "api:scanner.runtime.write" }, - RefreshSkewSeconds = refreshSkewSeconds, - RequireDpop = requireDpop, - RequireMutualTls = true, - AllowStaticTokenFallback = allowFallback, - StaticTokenValue = staticToken - } - }; - - private static ZastavaAuthorityTokenProvider CreateProvider( - ZastavaRuntimeOptions runtimeOptions, - IStellaOpsTokenClient tokenClient, - TimeProvider timeProvider) - { - var optionsMonitor = new StaticOptionsMonitor<ZastavaRuntimeOptions>(runtimeOptions); - var scopeBuilder = new ZastavaLogScopeBuilder(Options.Create(runtimeOptions)); - return new ZastavaAuthorityTokenProvider( - tokenClient, - optionsMonitor, - scopeBuilder, - timeProvider, - NullLogger<ZastavaAuthorityTokenProvider>.Instance); - } - - private sealed class StubTokenClient : IStellaOpsTokenClient - { - private readonly Queue<Func<CancellationToken, Task<StellaOpsTokenResult>>> responses = new(); - private Exception? failure; - - public int RequestCount { get; private set; } - - public IReadOnlyDictionary<string, string>? LastAdditionalParameters { get; private set; } - - public void EnqueueToken(StellaOpsTokenResult result) - => responses.Enqueue(_ => Task.FromResult(result)); - - public void FailWith(Exception exception) - => failure = exception; - - public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) - { - RequestCount++; - LastAdditionalParameters = additionalParameters; - - if (failure is not null) - { - throw failure; - } - - if (responses.TryDequeue(out var factory)) - { - return factory(cancellationToken); - } - - throw new InvalidOperationException("No token responses queued."); - } - - public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) - => throw new NotImplementedException(); - - public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null); - - public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - - public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - } - - private sealed class StaticOptionsMonitor<T> : IOptionsMonitor<T> - { - public StaticOptionsMonitor(T value) - { - CurrentValue = value; - } - - public T CurrentValue { get; } - - public T Get(string? name) => CurrentValue; - - public IDisposable OnChange(Action<T, string> listener) => NullDisposable.Instance; - - private sealed class NullDisposable : IDisposable - { - public static readonly NullDisposable Instance = new(); - public void Dispose() - { - } - } - } - - private sealed class TestTimeProvider : TimeProvider - { - private DateTimeOffset current; - - public TestTimeProvider(DateTimeOffset initial) - { - current = initial; - } - - public override DateTimeOffset GetUtcNow() => current; - - public void Advance(TimeSpan delta) - { - current = current.Add(delta); - } - } -} +using System.Collections.Generic; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using Microsoft.IdentityModel.Tokens; +using StellaOps.Auth.Client; +using StellaOps.Zastava.Core.Configuration; +using StellaOps.Zastava.Core.Diagnostics; +using StellaOps.Zastava.Core.Security; + +namespace StellaOps.Zastava.Core.Tests.Security; + +public sealed class ZastavaAuthorityTokenProviderTests +{ + [Fact] + public async Task GetAsync_UsesCacheUntilRefreshWindow() + { + var timeProvider = new TestTimeProvider(DateTimeOffset.Parse("2025-10-23T12:00:00Z")); + var runtimeOptions = CreateRuntimeOptions(refreshSkewSeconds: 120); + + var tokenClient = new StubTokenClient(); + tokenClient.EnqueueToken(new StellaOpsTokenResult( + "token-1", + "DPoP", + timeProvider.GetUtcNow() + TimeSpan.FromMinutes(10), + new[] { "aud:scanner", "api:scanner.runtime.write" })); + + tokenClient.EnqueueToken(new StellaOpsTokenResult( + "token-2", + "DPoP", + timeProvider.GetUtcNow() + TimeSpan.FromMinutes(10), + new[] { "aud:scanner", "api:scanner.runtime.write" })); + + var provider = CreateProvider(runtimeOptions, tokenClient, timeProvider); + + var tokenA = await provider.GetAsync("scanner"); + Assert.Equal("token-1", tokenA.AccessToken); + Assert.Equal(1, tokenClient.RequestCount); + + // Move time forward but still before refresh window (refresh skew = 2 minutes) + timeProvider.Advance(TimeSpan.FromMinutes(5)); + var tokenB = await provider.GetAsync("scanner"); + Assert.Equal("token-1", tokenB.AccessToken); + Assert.Equal(1, tokenClient.RequestCount); + + // Cross refresh window to trigger renewal + timeProvider.Advance(TimeSpan.FromMinutes(5)); + var tokenC = await provider.GetAsync("scanner"); + Assert.Equal("token-2", tokenC.AccessToken); + Assert.Equal(2, tokenClient.RequestCount); + } + + [Fact] + public async Task GetAsync_ThrowsWhenMissingAudienceScope() + { + var runtimeOptions = CreateRuntimeOptions(); + var tokenClient = new StubTokenClient(); + tokenClient.EnqueueToken(new StellaOpsTokenResult( + "token", + "DPoP", + DateTimeOffset.UtcNow + TimeSpan.FromMinutes(5), + new[] { "api:scanner.runtime.write" })); + + var provider = CreateProvider(runtimeOptions, tokenClient, new TestTimeProvider(DateTimeOffset.UtcNow)); + + var ex = await Assert.ThrowsAsync<InvalidOperationException>(() => provider.GetAsync("scanner").AsTask()); + Assert.Contains("audience scope", ex.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public async Task GetAsync_StaticFallbackUsedWhenEnabled() + { + var runtimeOptions = CreateRuntimeOptions(allowFallback: true, staticToken: "static-token", requireDpop: false); + + var tokenClient = new StubTokenClient(); + tokenClient.FailWith(new InvalidOperationException("offline")); + + var provider = CreateProvider(runtimeOptions, tokenClient, new TestTimeProvider(DateTimeOffset.UtcNow)); + + var token = await provider.GetAsync("scanner"); + Assert.Equal("static-token", token.AccessToken); + Assert.Null(token.ExpiresAtUtc); + Assert.Equal(0, tokenClient.RequestCount); + } + + [Fact] + public async Task GetAsync_ThrowsWhenDpopRequiredButTokenTypeIsBearer() + { + var runtimeOptions = CreateRuntimeOptions(requireDpop: true); + + var tokenClient = new StubTokenClient(); + tokenClient.EnqueueToken(new StellaOpsTokenResult( + "token", + "Bearer", + DateTimeOffset.UtcNow + TimeSpan.FromMinutes(5), + new[] { "aud:scanner" })); + + var provider = CreateProvider(runtimeOptions, tokenClient, new TestTimeProvider(DateTimeOffset.UtcNow)); + + await Assert.ThrowsAsync<InvalidOperationException>(() => provider.GetAsync("scanner").AsTask()); + } + + private static ZastavaRuntimeOptions CreateRuntimeOptions( + double refreshSkewSeconds = 60, + bool allowFallback = false, + string? staticToken = null, + bool requireDpop = true) + => new() + { + Tenant = "tenant-x", + Environment = "test", + Component = "observer", + Authority = new ZastavaAuthorityOptions + { + Issuer = new Uri("https://authority.internal"), + ClientId = "zastava-runtime", + Audience = new[] { "scanner" }, + Scopes = new[] { "api:scanner.runtime.write" }, + RefreshSkewSeconds = refreshSkewSeconds, + RequireDpop = requireDpop, + RequireMutualTls = true, + AllowStaticTokenFallback = allowFallback, + StaticTokenValue = staticToken + } + }; + + private static ZastavaAuthorityTokenProvider CreateProvider( + ZastavaRuntimeOptions runtimeOptions, + IStellaOpsTokenClient tokenClient, + TimeProvider timeProvider) + { + var optionsMonitor = new StaticOptionsMonitor<ZastavaRuntimeOptions>(runtimeOptions); + var scopeBuilder = new ZastavaLogScopeBuilder(Options.Create(runtimeOptions)); + return new ZastavaAuthorityTokenProvider( + tokenClient, + optionsMonitor, + scopeBuilder, + timeProvider, + NullLogger<ZastavaAuthorityTokenProvider>.Instance); + } + + private sealed class StubTokenClient : IStellaOpsTokenClient + { + private readonly Queue<Func<CancellationToken, Task<StellaOpsTokenResult>>> responses = new(); + private Exception? failure; + + public int RequestCount { get; private set; } + + public IReadOnlyDictionary<string, string>? LastAdditionalParameters { get; private set; } + + public void EnqueueToken(StellaOpsTokenResult result) + => responses.Enqueue(_ => Task.FromResult(result)); + + public void FailWith(Exception exception) + => failure = exception; + + public Task<StellaOpsTokenResult> RequestClientCredentialsTokenAsync(string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) + { + RequestCount++; + LastAdditionalParameters = additionalParameters; + + if (failure is not null) + { + throw failure; + } + + if (responses.TryDequeue(out var factory)) + { + return factory(cancellationToken); + } + + throw new InvalidOperationException("No token responses queued."); + } + + public Task<StellaOpsTokenResult> RequestPasswordTokenAsync(string username, string password, string? scope = null, IReadOnlyDictionary<string, string>? additionalParameters = null, CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public Task<JsonWebKeySet> GetJsonWebKeySetAsync(CancellationToken cancellationToken = default) + => throw new NotImplementedException(); + + public ValueTask<StellaOpsTokenCacheEntry?> GetCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.FromResult<StellaOpsTokenCacheEntry?>(null); + + public ValueTask CacheTokenAsync(string key, StellaOpsTokenCacheEntry entry, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public ValueTask ClearCachedTokenAsync(string key, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + } + + private sealed class StaticOptionsMonitor<T> : IOptionsMonitor<T> + { + public StaticOptionsMonitor(T value) + { + CurrentValue = value; + } + + public T CurrentValue { get; } + + public T Get(string? name) => CurrentValue; + + public IDisposable OnChange(Action<T, string> listener) => NullDisposable.Instance; + + private sealed class NullDisposable : IDisposable + { + public static readonly NullDisposable Instance = new(); + public void Dispose() + { + } + } + } + + private sealed class TestTimeProvider : TimeProvider + { + private DateTimeOffset current; + + public TestTimeProvider(DateTimeOffset initial) + { + current = initial; + } + + public override DateTimeOffset GetUtcNow() => current; + + public void Advance(TimeSpan delta) + { + current = current.Add(delta); + } + } +} diff --git a/src/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs similarity index 97% rename from src/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs index 0336457c..78c7632b 100644 --- a/src/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs +++ b/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/Serialization/ZastavaCanonicalJsonSerializerTests.cs @@ -1,195 +1,195 @@ -using System; -using System.Text; -using System.Security.Cryptography; -using StellaOps.Zastava.Core.Contracts; -using StellaOps.Zastava.Core.Hashing; -using StellaOps.Zastava.Core.Serialization; - -namespace StellaOps.Zastava.Core.Tests.Serialization; - -public sealed class ZastavaCanonicalJsonSerializerTests -{ - [Fact] - public void Serialize_RuntimeEventEnvelope_ProducesDeterministicOrdering() - { - var runtimeEvent = new RuntimeEvent - { - EventId = "evt-123", - When = DateTimeOffset.Parse("2025-10-19T12:34:56Z"), - Kind = RuntimeEventKind.ContainerStart, - Tenant = "tenant-01", - Node = "node-a", - Runtime = new RuntimeEngine - { - Engine = "containerd", - Version = "1.7.19" - }, - Workload = new RuntimeWorkload - { - Platform = "kubernetes", - Namespace = "payments", - Pod = "api-7c9fbbd8b7-ktd84", - Container = "api", - ContainerId = "containerd://abc", - ImageRef = "ghcr.io/acme/api@sha256:abcd", - Owner = new RuntimeWorkloadOwner - { - Kind = "Deployment", - Name = "api" - } - }, - Process = new RuntimeProcess - { - Pid = 12345, - Entrypoint = new[] { "/entrypoint.sh", "--serve" }, - EntryTrace = new[] - { - new RuntimeEntryTrace - { - File = "/entrypoint.sh", - Line = 3, - Op = "exec", - Target = "/usr/bin/python3" - } - } - }, - LoadedLibraries = new[] - { - new RuntimeLoadedLibrary - { - Path = "/lib/x86_64-linux-gnu/libssl.so.3", - Inode = 123456, - Sha256 = "abc123" - } - }, - Posture = new RuntimePosture - { - ImageSigned = true, - SbomReferrer = "present", - Attestation = new RuntimeAttestation - { - Uuid = "rekor-uuid", - Verified = true - } - }, - Delta = new RuntimeDelta - { - BaselineImageDigest = "sha256:abcd", - ChangedFiles = new[] { "/opt/app/server.py" }, - NewBinaries = new[] - { - new RuntimeNewBinary - { - Path = "/usr/local/bin/helper", - Sha256 = "def456" - } - } - }, - Evidence = new[] - { - new RuntimeEvidence - { - Signal = "procfs.maps", - Value = "/lib/.../libssl.so.3@0x7f..." - } - }, - Annotations = new Dictionary<string, string> - { - ["source"] = "unit-test" - } - }; - - var envelope = RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent); - var json = ZastavaCanonicalJsonSerializer.Serialize(envelope); - - var expectedOrder = new[] - { - "\"schemaVersion\"", - "\"event\"", - "\"eventId\"", - "\"when\"", - "\"kind\"", - "\"tenant\"", - "\"node\"", - "\"runtime\"", - "\"engine\"", - "\"version\"", - "\"workload\"", - "\"platform\"", - "\"namespace\"", - "\"pod\"", - "\"container\"", - "\"containerId\"", - "\"imageRef\"", - "\"owner\"", - "\"kind\"", - "\"name\"", - "\"process\"", - "\"pid\"", - "\"entrypoint\"", - "\"entryTrace\"", - "\"loadedLibs\"", - "\"posture\"", - "\"imageSigned\"", - "\"sbomReferrer\"", - "\"attestation\"", - "\"uuid\"", - "\"verified\"", - "\"delta\"", - "\"baselineImageDigest\"", - "\"changedFiles\"", - "\"newBinaries\"", - "\"path\"", - "\"sha256\"", - "\"evidence\"", - "\"signal\"", - "\"value\"", - "\"annotations\"", - "\"source\"" - }; - - var cursor = -1; - foreach (var token in expectedOrder) - { - var position = json.IndexOf(token, cursor + 1, StringComparison.Ordinal); - Assert.True(position > cursor, $"Property token {token} not found in the expected order."); - cursor = position; - } - - Assert.DoesNotContain(" ", json, StringComparison.Ordinal); - Assert.StartsWith("{\"schemaVersion\"", json, StringComparison.Ordinal); - Assert.EndsWith("}}", json, StringComparison.Ordinal); - } - - [Fact] - public void ComputeMultihash_ProducesStableBase64UrlDigest() - { - var payloadBytes = Encoding.UTF8.GetBytes("{\"value\":42}"); - var expectedDigestBytes = SHA256.HashData(payloadBytes); - var expected = $"sha256-{Convert.ToBase64String(expectedDigestBytes).TrimEnd('=').Replace('+', '-').Replace('/', '_')}"; - - var hash = ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(payloadBytes)); - - Assert.Equal(expected, hash); - - var sha512 = ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(payloadBytes), "sha512"); - Assert.StartsWith("sha512-", sha512, StringComparison.Ordinal); - } - - [Fact] - public void ComputeMultihash_NormalizesAlgorithmAliases() - { - var bytes = Encoding.UTF8.GetBytes("sample"); - var digestDefault = ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(bytes)); - var digestAlias = ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(bytes), "sha-256"); - - Assert.Equal(digestDefault, digestAlias); - } - - [Fact] - public void ComputeMultihash_UnknownAlgorithm_Throws() - { - var ex = Assert.Throws<NotSupportedException>(() => ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(Array.Empty<byte>()), "unsupported")); - Assert.Contains("unsupported", ex.Message, StringComparison.OrdinalIgnoreCase); - } -} +using System; +using System.Text; +using System.Security.Cryptography; +using StellaOps.Zastava.Core.Contracts; +using StellaOps.Zastava.Core.Hashing; +using StellaOps.Zastava.Core.Serialization; + +namespace StellaOps.Zastava.Core.Tests.Serialization; + +public sealed class ZastavaCanonicalJsonSerializerTests +{ + [Fact] + public void Serialize_RuntimeEventEnvelope_ProducesDeterministicOrdering() + { + var runtimeEvent = new RuntimeEvent + { + EventId = "evt-123", + When = DateTimeOffset.Parse("2025-10-19T12:34:56Z"), + Kind = RuntimeEventKind.ContainerStart, + Tenant = "tenant-01", + Node = "node-a", + Runtime = new RuntimeEngine + { + Engine = "containerd", + Version = "1.7.19" + }, + Workload = new RuntimeWorkload + { + Platform = "kubernetes", + Namespace = "payments", + Pod = "api-7c9fbbd8b7-ktd84", + Container = "api", + ContainerId = "containerd://abc", + ImageRef = "ghcr.io/acme/api@sha256:abcd", + Owner = new RuntimeWorkloadOwner + { + Kind = "Deployment", + Name = "api" + } + }, + Process = new RuntimeProcess + { + Pid = 12345, + Entrypoint = new[] { "/entrypoint.sh", "--serve" }, + EntryTrace = new[] + { + new RuntimeEntryTrace + { + File = "/entrypoint.sh", + Line = 3, + Op = "exec", + Target = "/usr/bin/python3" + } + } + }, + LoadedLibraries = new[] + { + new RuntimeLoadedLibrary + { + Path = "/lib/x86_64-linux-gnu/libssl.so.3", + Inode = 123456, + Sha256 = "abc123" + } + }, + Posture = new RuntimePosture + { + ImageSigned = true, + SbomReferrer = "present", + Attestation = new RuntimeAttestation + { + Uuid = "rekor-uuid", + Verified = true + } + }, + Delta = new RuntimeDelta + { + BaselineImageDigest = "sha256:abcd", + ChangedFiles = new[] { "/opt/app/server.py" }, + NewBinaries = new[] + { + new RuntimeNewBinary + { + Path = "/usr/local/bin/helper", + Sha256 = "def456" + } + } + }, + Evidence = new[] + { + new RuntimeEvidence + { + Signal = "procfs.maps", + Value = "/lib/.../libssl.so.3@0x7f..." + } + }, + Annotations = new Dictionary<string, string> + { + ["source"] = "unit-test" + } + }; + + var envelope = RuntimeEventEnvelope.Create(runtimeEvent, ZastavaContractVersions.RuntimeEvent); + var json = ZastavaCanonicalJsonSerializer.Serialize(envelope); + + var expectedOrder = new[] + { + "\"schemaVersion\"", + "\"event\"", + "\"eventId\"", + "\"when\"", + "\"kind\"", + "\"tenant\"", + "\"node\"", + "\"runtime\"", + "\"engine\"", + "\"version\"", + "\"workload\"", + "\"platform\"", + "\"namespace\"", + "\"pod\"", + "\"container\"", + "\"containerId\"", + "\"imageRef\"", + "\"owner\"", + "\"kind\"", + "\"name\"", + "\"process\"", + "\"pid\"", + "\"entrypoint\"", + "\"entryTrace\"", + "\"loadedLibs\"", + "\"posture\"", + "\"imageSigned\"", + "\"sbomReferrer\"", + "\"attestation\"", + "\"uuid\"", + "\"verified\"", + "\"delta\"", + "\"baselineImageDigest\"", + "\"changedFiles\"", + "\"newBinaries\"", + "\"path\"", + "\"sha256\"", + "\"evidence\"", + "\"signal\"", + "\"value\"", + "\"annotations\"", + "\"source\"" + }; + + var cursor = -1; + foreach (var token in expectedOrder) + { + var position = json.IndexOf(token, cursor + 1, StringComparison.Ordinal); + Assert.True(position > cursor, $"Property token {token} not found in the expected order."); + cursor = position; + } + + Assert.DoesNotContain(" ", json, StringComparison.Ordinal); + Assert.StartsWith("{\"schemaVersion\"", json, StringComparison.Ordinal); + Assert.EndsWith("}}", json, StringComparison.Ordinal); + } + + [Fact] + public void ComputeMultihash_ProducesStableBase64UrlDigest() + { + var payloadBytes = Encoding.UTF8.GetBytes("{\"value\":42}"); + var expectedDigestBytes = SHA256.HashData(payloadBytes); + var expected = $"sha256-{Convert.ToBase64String(expectedDigestBytes).TrimEnd('=').Replace('+', '-').Replace('/', '_')}"; + + var hash = ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(payloadBytes)); + + Assert.Equal(expected, hash); + + var sha512 = ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(payloadBytes), "sha512"); + Assert.StartsWith("sha512-", sha512, StringComparison.Ordinal); + } + + [Fact] + public void ComputeMultihash_NormalizesAlgorithmAliases() + { + var bytes = Encoding.UTF8.GetBytes("sample"); + var digestDefault = ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(bytes)); + var digestAlias = ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(bytes), "sha-256"); + + Assert.Equal(digestDefault, digestAlias); + } + + [Fact] + public void ComputeMultihash_UnknownAlgorithm_Throws() + { + var ex = Assert.Throws<NotSupportedException>(() => ZastavaHashing.ComputeMultihash(new ReadOnlySpan<byte>(Array.Empty<byte>()), "unsupported")); + Assert.Contains("unsupported", ex.Message, StringComparison.OrdinalIgnoreCase); + } +} diff --git a/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj b/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj new file mode 100644 index 00000000..d68151e5 --- /dev/null +++ b/src/Zastava/__Tests/StellaOps.Zastava.Core.Tests/StellaOps.Zastava.Core.Tests.csproj @@ -0,0 +1,15 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../__Libraries/StellaOps.Zastava.Core/StellaOps.Zastava.Core.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Client/StellaOps.Auth.Client.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + <ProjectReference Include="../../../__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Zastava.Observer.Tests/ContainerRuntimePollerTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/ContainerRuntimePollerTests.cs similarity index 100% rename from src/StellaOps.Zastava.Observer.Tests/ContainerRuntimePollerTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/ContainerRuntimePollerTests.cs diff --git a/src/StellaOps.Zastava.Observer.Tests/Posture/RuntimePostureEvaluatorTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Posture/RuntimePostureEvaluatorTests.cs similarity index 100% rename from src/StellaOps.Zastava.Observer.Tests/Posture/RuntimePostureEvaluatorTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Posture/RuntimePostureEvaluatorTests.cs diff --git a/src/StellaOps.Zastava.Observer.Tests/Runtime/ElfBuildIdReaderTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Runtime/ElfBuildIdReaderTests.cs similarity index 100% rename from src/StellaOps.Zastava.Observer.Tests/Runtime/ElfBuildIdReaderTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Runtime/ElfBuildIdReaderTests.cs diff --git a/src/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeEventBufferTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeEventBufferTests.cs similarity index 100% rename from src/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeEventBufferTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeEventBufferTests.cs diff --git a/src/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeProcessCollectorTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeProcessCollectorTests.cs similarity index 100% rename from src/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeProcessCollectorTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Runtime/RuntimeProcessCollectorTests.cs diff --git a/src/StellaOps.Zastava.Observer.Tests/StellaOps.Zastava.Observer.Tests.csproj b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/StellaOps.Zastava.Observer.Tests.csproj similarity index 66% rename from src/StellaOps.Zastava.Observer.Tests/StellaOps.Zastava.Observer.Tests.csproj rename to src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/StellaOps.Zastava.Observer.Tests.csproj index 59a74730..355b0fd1 100644 --- a/src/StellaOps.Zastava.Observer.Tests/StellaOps.Zastava.Observer.Tests.csproj +++ b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/StellaOps.Zastava.Observer.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -7,6 +8,6 @@ <IsPackable>false</IsPackable> </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Zastava.Observer\StellaOps.Zastava.Observer.csproj" /> + <ProjectReference Include="../../StellaOps.Zastava.Observer/StellaOps.Zastava.Observer.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Zastava.Observer.Tests/TestSupport/ElfTestFileBuilder.cs b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/TestSupport/ElfTestFileBuilder.cs similarity index 100% rename from src/StellaOps.Zastava.Observer.Tests/TestSupport/ElfTestFileBuilder.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/TestSupport/ElfTestFileBuilder.cs diff --git a/src/StellaOps.Zastava.Observer.Tests/Worker/RuntimeEventFactoryTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Worker/RuntimeEventFactoryTests.cs similarity index 97% rename from src/StellaOps.Zastava.Observer.Tests/Worker/RuntimeEventFactoryTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Worker/RuntimeEventFactoryTests.cs index 4fde6065..a242a641 100644 --- a/src/StellaOps.Zastava.Observer.Tests/Worker/RuntimeEventFactoryTests.cs +++ b/src/Zastava/__Tests/StellaOps.Zastava.Observer.Tests/Worker/RuntimeEventFactoryTests.cs @@ -1,74 +1,74 @@ -using System; -using System.Collections.Generic; -using StellaOps.Zastava.Core.Contracts; -using StellaOps.Zastava.Observer.Configuration; -using StellaOps.Zastava.Observer.ContainerRuntime; -using StellaOps.Zastava.Observer.ContainerRuntime.Cri; -using StellaOps.Zastava.Observer.Runtime; -using StellaOps.Zastava.Observer.Worker; -using Xunit; - -namespace StellaOps.Zastava.Observer.Tests.Worker; - -public sealed class RuntimeEventFactoryTests -{ - [Fact] - public void Create_AttachesBuildIdFromProcessCapture() - { - var timestamp = DateTimeOffset.UtcNow; - var snapshot = new CriContainerInfo( - Id: "container-a", - PodSandboxId: "sandbox-a", - Name: "api", - Attempt: 1, - Image: "ghcr.io/example/api:1.0", - ImageRef: "ghcr.io/example/api@sha256:deadbeef", - Labels: new Dictionary<string, string> - { - [CriLabelKeys.PodName] = "api-abc", - [CriLabelKeys.PodNamespace] = "payments", - [CriLabelKeys.ContainerName] = "api" - }, - Annotations: new Dictionary<string, string>(), - CreatedAt: timestamp, - StartedAt: timestamp, - FinishedAt: null, - ExitCode: null, - Reason: null, - Message: null, - Pid: 4321); - - var lifecycleEvent = new ContainerLifecycleEvent(ContainerLifecycleEventKind.Start, timestamp, snapshot); - var endpoint = new ContainerRuntimeEndpointOptions - { - Engine = ContainerRuntimeEngine.Containerd, - Endpoint = "unix:///run/containerd/containerd.sock", - Name = "containerd" - }; - var identity = new CriRuntimeIdentity("containerd", "1.7.19", "v1"); - var process = new RuntimeProcess - { - Pid = 4321, - Entrypoint = new[] { "/entrypoint.sh" }, - EntryTrace = Array.Empty<RuntimeEntryTrace>(), - BuildId = "5f0c7c3cb4d9f8a4" - }; - var capture = new RuntimeProcessCapture( - process, - Array.Empty<RuntimeLoadedLibrary>(), - new List<RuntimeEvidence>()); - - var envelope = RuntimeEventFactory.Create( - lifecycleEvent, - endpoint, - identity, - tenant: "tenant-alpha", - nodeName: "node-1", - capture: capture, - posture: null, - additionalEvidence: null); - - Assert.NotNull(envelope.Event.Process); - Assert.Equal("5f0c7c3cb4d9f8a4", envelope.Event.Process!.BuildId); - } -} +using System; +using System.Collections.Generic; +using StellaOps.Zastava.Core.Contracts; +using StellaOps.Zastava.Observer.Configuration; +using StellaOps.Zastava.Observer.ContainerRuntime; +using StellaOps.Zastava.Observer.ContainerRuntime.Cri; +using StellaOps.Zastava.Observer.Runtime; +using StellaOps.Zastava.Observer.Worker; +using Xunit; + +namespace StellaOps.Zastava.Observer.Tests.Worker; + +public sealed class RuntimeEventFactoryTests +{ + [Fact] + public void Create_AttachesBuildIdFromProcessCapture() + { + var timestamp = DateTimeOffset.UtcNow; + var snapshot = new CriContainerInfo( + Id: "container-a", + PodSandboxId: "sandbox-a", + Name: "api", + Attempt: 1, + Image: "ghcr.io/example/api:1.0", + ImageRef: "ghcr.io/example/api@sha256:deadbeef", + Labels: new Dictionary<string, string> + { + [CriLabelKeys.PodName] = "api-abc", + [CriLabelKeys.PodNamespace] = "payments", + [CriLabelKeys.ContainerName] = "api" + }, + Annotations: new Dictionary<string, string>(), + CreatedAt: timestamp, + StartedAt: timestamp, + FinishedAt: null, + ExitCode: null, + Reason: null, + Message: null, + Pid: 4321); + + var lifecycleEvent = new ContainerLifecycleEvent(ContainerLifecycleEventKind.Start, timestamp, snapshot); + var endpoint = new ContainerRuntimeEndpointOptions + { + Engine = ContainerRuntimeEngine.Containerd, + Endpoint = "unix:///run/containerd/containerd.sock", + Name = "containerd" + }; + var identity = new CriRuntimeIdentity("containerd", "1.7.19", "v1"); + var process = new RuntimeProcess + { + Pid = 4321, + Entrypoint = new[] { "/entrypoint.sh" }, + EntryTrace = Array.Empty<RuntimeEntryTrace>(), + BuildId = "5f0c7c3cb4d9f8a4" + }; + var capture = new RuntimeProcessCapture( + process, + Array.Empty<RuntimeLoadedLibrary>(), + new List<RuntimeEvidence>()); + + var envelope = RuntimeEventFactory.Create( + lifecycleEvent, + endpoint, + identity, + tenant: "tenant-alpha", + nodeName: "node-1", + capture: capture, + posture: null, + additionalEvidence: null); + + Assert.NotNull(envelope.Event.Process); + Assert.Equal("5f0c7c3cb4d9f8a4", envelope.Event.Process!.BuildId); + } +} diff --git a/src/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionResponseBuilderTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionResponseBuilderTests.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionResponseBuilderTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionResponseBuilderTests.cs diff --git a/src/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionReviewParserTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionReviewParserTests.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionReviewParserTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Admission/AdmissionReviewParserTests.cs diff --git a/src/StellaOps.Zastava.Webhook.Tests/Admission/RuntimeAdmissionPolicyServiceTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Admission/RuntimeAdmissionPolicyServiceTests.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook.Tests/Admission/RuntimeAdmissionPolicyServiceTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Admission/RuntimeAdmissionPolicyServiceTests.cs diff --git a/src/StellaOps.Zastava.Webhook.Tests/Backend/RuntimePolicyClientTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Backend/RuntimePolicyClientTests.cs similarity index 97% rename from src/StellaOps.Zastava.Webhook.Tests/Backend/RuntimePolicyClientTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Backend/RuntimePolicyClientTests.cs index 7f63fec9..c71367dd 100644 --- a/src/StellaOps.Zastava.Webhook.Tests/Backend/RuntimePolicyClientTests.cs +++ b/src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Backend/RuntimePolicyClientTests.cs @@ -1,198 +1,198 @@ -using System; -using System.Collections.Generic; -using System.Diagnostics.Metrics; -using System.Net; -using System.Net.Http; -using System.Text; -using System.Text.Json; -using Xunit; -using Microsoft.Extensions.Logging.Abstractions; -using Microsoft.Extensions.Options; -using StellaOps.Zastava.Core.Configuration; -using StellaOps.Zastava.Core.Diagnostics; -using StellaOps.Zastava.Core.Security; -using StellaOps.Zastava.Webhook.Backend; -using StellaOps.Zastava.Webhook.Configuration; - -namespace StellaOps.Zastava.Webhook.Tests.Backend; - -public sealed class RuntimePolicyClientTests -{ - [Fact] - public async Task EvaluateAsync_SendsDpOpHeaderAndParsesResponse() - { - var requestCapture = new List<HttpRequestMessage>(); - var handler = new StubHttpMessageHandler(message => - { - requestCapture.Add(message); - var response = new HttpResponseMessage(HttpStatusCode.OK) - { - Content = new StringContent(JsonSerializer.Serialize(new - { - ttlSeconds = 120, - results = new - { - image = new - { - signed = true, - hasSbom = true, - policyVerdict = "pass", - reasons = Array.Empty<string>() - } - } - }), Encoding.UTF8, "application/json") - }; - return response; - }); - - var httpClient = new HttpClient(handler) - { - BaseAddress = new Uri("https://scanner.internal") - }; - - var runtimeOptions = Options.Create(new ZastavaRuntimeOptions - { - Tenant = "tenant-1", - Environment = "test", - Component = "webhook", - Authority = new ZastavaAuthorityOptions - { - Audience = new[] { "scanner" }, - Scopes = new[] { "aud:scanner" } - }, - Logging = new ZastavaRuntimeLoggingOptions(), - Metrics = new ZastavaRuntimeMetricsOptions() - }); - - var webhookOptions = Options.Create(new ZastavaWebhookOptions - { - Backend = new ZastavaWebhookBackendOptions - { - BaseAddress = new Uri("https://scanner.internal"), - PolicyPath = "/api/v1/scanner/policy/runtime" - } - }); - - using var metrics = new StubRuntimeMetrics(); - var client = new RuntimePolicyClient( - httpClient, - new StubAuthorityTokenProvider(), - new StaticOptionsMonitor<ZastavaRuntimeOptions>(runtimeOptions.Value), - new StaticOptionsMonitor<ZastavaWebhookOptions>(webhookOptions.Value), - metrics, - NullLogger<RuntimePolicyClient>.Instance); - - var response = await client.EvaluateAsync(new RuntimePolicyRequest - { - Namespace = "payments", - Labels = new Dictionary<string, string> { ["app"] = "api" }, - Images = new[] { "image" } - }); - - Assert.Equal(120, response.TtlSeconds); - Assert.True(response.Results.ContainsKey("image")); - var request = Assert.Single(requestCapture); - Assert.Equal("DPoP", request.Headers.Authorization?.Scheme); - Assert.Equal("runtime-token", request.Headers.Authorization?.Parameter); - Assert.Equal("/api/v1/scanner/policy/runtime", request.RequestUri?.PathAndQuery); - } - - [Fact] - public async Task EvaluateAsync_NonSuccess_ThrowsRuntimePolicyException() - { - var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.BadGateway) - { - Content = new StringContent("upstream error") - }); - var client = new RuntimePolicyClient( - new HttpClient(handler) { BaseAddress = new Uri("https://scanner.internal") }, - new StubAuthorityTokenProvider(), - new StaticOptionsMonitor<ZastavaRuntimeOptions>(new ZastavaRuntimeOptions - { - Tenant = "tenant", - Environment = "test", - Component = "webhook", - Authority = new ZastavaAuthorityOptions { Audience = new[] { "scanner" } }, - Logging = new ZastavaRuntimeLoggingOptions(), - Metrics = new ZastavaRuntimeMetricsOptions() - }), - new StaticOptionsMonitor<ZastavaWebhookOptions>(new ZastavaWebhookOptions()), - new StubRuntimeMetrics(), - NullLogger<RuntimePolicyClient>.Instance); - - await Assert.ThrowsAsync<RuntimePolicyException>(() => client.EvaluateAsync(new RuntimePolicyRequest - { - Namespace = "payments", - Labels = null, - Images = new[] { "image" } - })); - } - - private sealed class StubAuthorityTokenProvider : IZastavaAuthorityTokenProvider - { - public ValueTask InvalidateAsync(string audience, IEnumerable<string>? additionalScopes = null, CancellationToken cancellationToken = default) - => ValueTask.CompletedTask; - - public ValueTask<ZastavaOperationalToken> GetAsync(string audience, IEnumerable<string>? additionalScopes = null, CancellationToken cancellationToken = default) - => ValueTask.FromResult(new ZastavaOperationalToken("runtime-token", "DPoP", DateTimeOffset.UtcNow.AddMinutes(5), Array.Empty<string>())); - } - - private sealed class StubRuntimeMetrics : IZastavaRuntimeMetrics - { - public StubRuntimeMetrics() - { - Meter = new Meter("Test.Zastava.Webhook"); - RuntimeEvents = Meter.CreateCounter<long>("test.events"); - AdmissionDecisions = Meter.CreateCounter<long>("test.decisions"); - BackendLatencyMs = Meter.CreateHistogram<double>("test.backend.latency"); - DefaultTags = Array.Empty<KeyValuePair<string, object?>>(); - } - - public Meter Meter { get; } - - public Counter<long> RuntimeEvents { get; } - - public Counter<long> AdmissionDecisions { get; } - - public Histogram<double> BackendLatencyMs { get; } - - public IReadOnlyList<KeyValuePair<string, object?>> DefaultTags { get; } - - public void Dispose() => Meter.Dispose(); - } - - private sealed class StaticOptionsMonitor<T> : IOptionsMonitor<T> - { - public StaticOptionsMonitor(T value) - { - CurrentValue = value; - } - - public T CurrentValue { get; } - - public T Get(string? name) => CurrentValue; - - public IDisposable OnChange(Action<T, string?> listener) => NullDisposable.Instance; - - private sealed class NullDisposable : IDisposable - { - public static readonly NullDisposable Instance = new(); - public void Dispose() - { - } - } - } - - private sealed class StubHttpMessageHandler : HttpMessageHandler - { - private readonly Func<HttpRequestMessage, HttpResponseMessage> responder; - - public StubHttpMessageHandler(Func<HttpRequestMessage, HttpResponseMessage> responder) - { - this.responder = responder; - } - - protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) - => Task.FromResult(responder(request)); - } -} +using System; +using System.Collections.Generic; +using System.Diagnostics.Metrics; +using System.Net; +using System.Net.Http; +using System.Text; +using System.Text.Json; +using Xunit; +using Microsoft.Extensions.Logging.Abstractions; +using Microsoft.Extensions.Options; +using StellaOps.Zastava.Core.Configuration; +using StellaOps.Zastava.Core.Diagnostics; +using StellaOps.Zastava.Core.Security; +using StellaOps.Zastava.Webhook.Backend; +using StellaOps.Zastava.Webhook.Configuration; + +namespace StellaOps.Zastava.Webhook.Tests.Backend; + +public sealed class RuntimePolicyClientTests +{ + [Fact] + public async Task EvaluateAsync_SendsDpOpHeaderAndParsesResponse() + { + var requestCapture = new List<HttpRequestMessage>(); + var handler = new StubHttpMessageHandler(message => + { + requestCapture.Add(message); + var response = new HttpResponseMessage(HttpStatusCode.OK) + { + Content = new StringContent(JsonSerializer.Serialize(new + { + ttlSeconds = 120, + results = new + { + image = new + { + signed = true, + hasSbom = true, + policyVerdict = "pass", + reasons = Array.Empty<string>() + } + } + }), Encoding.UTF8, "application/json") + }; + return response; + }); + + var httpClient = new HttpClient(handler) + { + BaseAddress = new Uri("https://scanner.internal") + }; + + var runtimeOptions = Options.Create(new ZastavaRuntimeOptions + { + Tenant = "tenant-1", + Environment = "test", + Component = "webhook", + Authority = new ZastavaAuthorityOptions + { + Audience = new[] { "scanner" }, + Scopes = new[] { "aud:scanner" } + }, + Logging = new ZastavaRuntimeLoggingOptions(), + Metrics = new ZastavaRuntimeMetricsOptions() + }); + + var webhookOptions = Options.Create(new ZastavaWebhookOptions + { + Backend = new ZastavaWebhookBackendOptions + { + BaseAddress = new Uri("https://scanner.internal"), + PolicyPath = "/api/v1/scanner/policy/runtime" + } + }); + + using var metrics = new StubRuntimeMetrics(); + var client = new RuntimePolicyClient( + httpClient, + new StubAuthorityTokenProvider(), + new StaticOptionsMonitor<ZastavaRuntimeOptions>(runtimeOptions.Value), + new StaticOptionsMonitor<ZastavaWebhookOptions>(webhookOptions.Value), + metrics, + NullLogger<RuntimePolicyClient>.Instance); + + var response = await client.EvaluateAsync(new RuntimePolicyRequest + { + Namespace = "payments", + Labels = new Dictionary<string, string> { ["app"] = "api" }, + Images = new[] { "image" } + }); + + Assert.Equal(120, response.TtlSeconds); + Assert.True(response.Results.ContainsKey("image")); + var request = Assert.Single(requestCapture); + Assert.Equal("DPoP", request.Headers.Authorization?.Scheme); + Assert.Equal("runtime-token", request.Headers.Authorization?.Parameter); + Assert.Equal("/api/v1/scanner/policy/runtime", request.RequestUri?.PathAndQuery); + } + + [Fact] + public async Task EvaluateAsync_NonSuccess_ThrowsRuntimePolicyException() + { + var handler = new StubHttpMessageHandler(_ => new HttpResponseMessage(HttpStatusCode.BadGateway) + { + Content = new StringContent("upstream error") + }); + var client = new RuntimePolicyClient( + new HttpClient(handler) { BaseAddress = new Uri("https://scanner.internal") }, + new StubAuthorityTokenProvider(), + new StaticOptionsMonitor<ZastavaRuntimeOptions>(new ZastavaRuntimeOptions + { + Tenant = "tenant", + Environment = "test", + Component = "webhook", + Authority = new ZastavaAuthorityOptions { Audience = new[] { "scanner" } }, + Logging = new ZastavaRuntimeLoggingOptions(), + Metrics = new ZastavaRuntimeMetricsOptions() + }), + new StaticOptionsMonitor<ZastavaWebhookOptions>(new ZastavaWebhookOptions()), + new StubRuntimeMetrics(), + NullLogger<RuntimePolicyClient>.Instance); + + await Assert.ThrowsAsync<RuntimePolicyException>(() => client.EvaluateAsync(new RuntimePolicyRequest + { + Namespace = "payments", + Labels = null, + Images = new[] { "image" } + })); + } + + private sealed class StubAuthorityTokenProvider : IZastavaAuthorityTokenProvider + { + public ValueTask InvalidateAsync(string audience, IEnumerable<string>? additionalScopes = null, CancellationToken cancellationToken = default) + => ValueTask.CompletedTask; + + public ValueTask<ZastavaOperationalToken> GetAsync(string audience, IEnumerable<string>? additionalScopes = null, CancellationToken cancellationToken = default) + => ValueTask.FromResult(new ZastavaOperationalToken("runtime-token", "DPoP", DateTimeOffset.UtcNow.AddMinutes(5), Array.Empty<string>())); + } + + private sealed class StubRuntimeMetrics : IZastavaRuntimeMetrics + { + public StubRuntimeMetrics() + { + Meter = new Meter("Test.Zastava.Webhook"); + RuntimeEvents = Meter.CreateCounter<long>("test.events"); + AdmissionDecisions = Meter.CreateCounter<long>("test.decisions"); + BackendLatencyMs = Meter.CreateHistogram<double>("test.backend.latency"); + DefaultTags = Array.Empty<KeyValuePair<string, object?>>(); + } + + public Meter Meter { get; } + + public Counter<long> RuntimeEvents { get; } + + public Counter<long> AdmissionDecisions { get; } + + public Histogram<double> BackendLatencyMs { get; } + + public IReadOnlyList<KeyValuePair<string, object?>> DefaultTags { get; } + + public void Dispose() => Meter.Dispose(); + } + + private sealed class StaticOptionsMonitor<T> : IOptionsMonitor<T> + { + public StaticOptionsMonitor(T value) + { + CurrentValue = value; + } + + public T CurrentValue { get; } + + public T Get(string? name) => CurrentValue; + + public IDisposable OnChange(Action<T, string?> listener) => NullDisposable.Instance; + + private sealed class NullDisposable : IDisposable + { + public static readonly NullDisposable Instance = new(); + public void Dispose() + { + } + } + } + + private sealed class StubHttpMessageHandler : HttpMessageHandler + { + private readonly Func<HttpRequestMessage, HttpResponseMessage> responder; + + public StubHttpMessageHandler(Func<HttpRequestMessage, HttpResponseMessage> responder) + { + this.responder = responder; + } + + protected override Task<HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) + => Task.FromResult(responder(request)); + } +} diff --git a/src/StellaOps.Zastava.Webhook.Tests/Certificates/SecretFileCertificateSourceTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Certificates/SecretFileCertificateSourceTests.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook.Tests/Certificates/SecretFileCertificateSourceTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Certificates/SecretFileCertificateSourceTests.cs diff --git a/src/StellaOps.Zastava.Webhook.Tests/Certificates/WebhookCertificateProviderTests.cs b/src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Certificates/WebhookCertificateProviderTests.cs similarity index 100% rename from src/StellaOps.Zastava.Webhook.Tests/Certificates/WebhookCertificateProviderTests.cs rename to src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/Certificates/WebhookCertificateProviderTests.cs diff --git a/src/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj b/src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj similarity index 81% rename from src/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj rename to src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj index cbd64140..4bb0d72f 100644 --- a/src/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj +++ b/src/Zastava/__Tests/StellaOps.Zastava.Webhook.Tests/StellaOps.Zastava.Webhook.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -14,6 +15,6 @@ <PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" /> </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Zastava.Webhook\StellaOps.Zastava.Webhook.csproj" /> + <ProjectReference Include="../../StellaOps.Zastava.Webhook/StellaOps.Zastava.Webhook.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Auth.Security/Dpop/DpopNonceConsumeResult.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/DpopNonceConsumeResult.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/DpopNonceConsumeResult.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/DpopNonceConsumeResult.cs diff --git a/src/StellaOps.Auth.Security/Dpop/DpopNonceIssueResult.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/DpopNonceIssueResult.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/DpopNonceIssueResult.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/DpopNonceIssueResult.cs diff --git a/src/StellaOps.Auth.Security/Dpop/DpopNonceUtilities.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/DpopNonceUtilities.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/DpopNonceUtilities.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/DpopNonceUtilities.cs diff --git a/src/StellaOps.Auth.Security/Dpop/DpopProofValidator.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/DpopProofValidator.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/DpopProofValidator.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/DpopProofValidator.cs diff --git a/src/StellaOps.Auth.Security/Dpop/DpopValidationOptions.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/DpopValidationOptions.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/DpopValidationOptions.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/DpopValidationOptions.cs diff --git a/src/StellaOps.Auth.Security/Dpop/DpopValidationResult.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/DpopValidationResult.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/DpopValidationResult.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/DpopValidationResult.cs diff --git a/src/StellaOps.Auth.Security/Dpop/IDpopNonceStore.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/IDpopNonceStore.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/IDpopNonceStore.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/IDpopNonceStore.cs diff --git a/src/StellaOps.Auth.Security/Dpop/IDpopProofValidator.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/IDpopProofValidator.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/IDpopProofValidator.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/IDpopProofValidator.cs diff --git a/src/StellaOps.Auth.Security/Dpop/IDpopReplayCache.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/IDpopReplayCache.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/IDpopReplayCache.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/IDpopReplayCache.cs diff --git a/src/StellaOps.Auth.Security/Dpop/InMemoryDpopNonceStore.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/InMemoryDpopNonceStore.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/InMemoryDpopNonceStore.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/InMemoryDpopNonceStore.cs diff --git a/src/StellaOps.Auth.Security/Dpop/InMemoryDpopReplayCache.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/InMemoryDpopReplayCache.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/InMemoryDpopReplayCache.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/InMemoryDpopReplayCache.cs diff --git a/src/StellaOps.Auth.Security/Dpop/RedisDpopNonceStore.cs b/src/__Libraries/StellaOps.Auth.Security/Dpop/RedisDpopNonceStore.cs similarity index 100% rename from src/StellaOps.Auth.Security/Dpop/RedisDpopNonceStore.cs rename to src/__Libraries/StellaOps.Auth.Security/Dpop/RedisDpopNonceStore.cs diff --git a/src/StellaOps.Auth.Security/README.md b/src/__Libraries/StellaOps.Auth.Security/README.md similarity index 100% rename from src/StellaOps.Auth.Security/README.md rename to src/__Libraries/StellaOps.Auth.Security/README.md diff --git a/src/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj b/src/__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj similarity index 97% rename from src/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj rename to src/__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj index c585f6c7..cc2c96a9 100644 --- a/src/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj +++ b/src/__Libraries/StellaOps.Auth.Security/StellaOps.Auth.Security.csproj @@ -1,38 +1,38 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <PropertyGroup> - <Description>Sender-constrained authentication primitives (DPoP, mTLS) shared across StellaOps services.</Description> - <PackageId>StellaOps.Auth.Security</PackageId> - <Authors>StellaOps</Authors> - <Company>StellaOps</Company> - <PackageTags>stellaops;dpop;mtls;oauth2;security</PackageTags> - <PackageLicenseExpression>AGPL-3.0-or-later</PackageLicenseExpression> - <PackageProjectUrl>https://stella-ops.org</PackageProjectUrl> - <RepositoryUrl>https://git.stella-ops.org/stella-ops.org/git.stella-ops.org</RepositoryUrl> - <RepositoryType>git</RepositoryType> - <PublishRepositoryUrl>true</PublishRepositoryUrl> - <EmbedUntrackedSources>true</EmbedUntrackedSources> - <IncludeSymbols>true</IncludeSymbols> - <SymbolPackageFormat>snupkg</SymbolPackageFormat> - <PackageReadmeFile>README.md</PackageReadmeFile> - <VersionPrefix>1.0.0-preview.1</VersionPrefix> - </PropertyGroup> - <ItemGroup> - <FrameworkReference Include="Microsoft.AspNetCore.App" /> - </ItemGroup> - <ItemGroup> - <PackageReference Include="Microsoft.IdentityModel.Tokens" Version="8.14.0" /> - <PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="7.2.0" /> - <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> - <PackageReference Include="Microsoft.SourceLink.GitLab" Version="8.0.0" PrivateAssets="All" /> - </ItemGroup> - <ItemGroup> - <None Include="README.md" Pack="true" PackagePath="" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <PropertyGroup> + <Description>Sender-constrained authentication primitives (DPoP, mTLS) shared across StellaOps services.</Description> + <PackageId>StellaOps.Auth.Security</PackageId> + <Authors>StellaOps</Authors> + <Company>StellaOps</Company> + <PackageTags>stellaops;dpop;mtls;oauth2;security</PackageTags> + <PackageLicenseExpression>AGPL-3.0-or-later</PackageLicenseExpression> + <PackageProjectUrl>https://stella-ops.org</PackageProjectUrl> + <RepositoryUrl>https://git.stella-ops.org/stella-ops.org/git.stella-ops.org</RepositoryUrl> + <RepositoryType>git</RepositoryType> + <PublishRepositoryUrl>true</PublishRepositoryUrl> + <EmbedUntrackedSources>true</EmbedUntrackedSources> + <IncludeSymbols>true</IncludeSymbols> + <SymbolPackageFormat>snupkg</SymbolPackageFormat> + <PackageReadmeFile>README.md</PackageReadmeFile> + <VersionPrefix>1.0.0-preview.1</VersionPrefix> + </PropertyGroup> + <ItemGroup> + <FrameworkReference Include="Microsoft.AspNetCore.App" /> + </ItemGroup> + <ItemGroup> + <PackageReference Include="Microsoft.IdentityModel.Tokens" Version="8.14.0" /> + <PackageReference Include="System.IdentityModel.Tokens.Jwt" Version="7.2.0" /> + <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> + <PackageReference Include="Microsoft.SourceLink.GitLab" Version="8.0.0" PrivateAssets="All" /> + </ItemGroup> + <ItemGroup> + <None Include="README.md" Pack="true" PackagePath="" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Configuration/AuthorityConfigurationDiagnostic.cs b/src/__Libraries/StellaOps.Configuration/AuthorityConfigurationDiagnostic.cs similarity index 100% rename from src/StellaOps.Configuration/AuthorityConfigurationDiagnostic.cs rename to src/__Libraries/StellaOps.Configuration/AuthorityConfigurationDiagnostic.cs diff --git a/src/StellaOps.Configuration/AuthorityPluginConfigurationAnalyzer.cs b/src/__Libraries/StellaOps.Configuration/AuthorityPluginConfigurationAnalyzer.cs similarity index 100% rename from src/StellaOps.Configuration/AuthorityPluginConfigurationAnalyzer.cs rename to src/__Libraries/StellaOps.Configuration/AuthorityPluginConfigurationAnalyzer.cs diff --git a/src/StellaOps.Configuration/AuthorityPluginConfigurationLoader.cs b/src/__Libraries/StellaOps.Configuration/AuthorityPluginConfigurationLoader.cs similarity index 100% rename from src/StellaOps.Configuration/AuthorityPluginConfigurationLoader.cs rename to src/__Libraries/StellaOps.Configuration/AuthorityPluginConfigurationLoader.cs diff --git a/src/StellaOps.Configuration/AuthoritySigningAdditionalKeyOptions.cs b/src/__Libraries/StellaOps.Configuration/AuthoritySigningAdditionalKeyOptions.cs similarity index 100% rename from src/StellaOps.Configuration/AuthoritySigningAdditionalKeyOptions.cs rename to src/__Libraries/StellaOps.Configuration/AuthoritySigningAdditionalKeyOptions.cs diff --git a/src/StellaOps.Configuration/AuthoritySigningOptions.cs b/src/__Libraries/StellaOps.Configuration/AuthoritySigningOptions.cs similarity index 100% rename from src/StellaOps.Configuration/AuthoritySigningOptions.cs rename to src/__Libraries/StellaOps.Configuration/AuthoritySigningOptions.cs diff --git a/src/StellaOps.Configuration/StellaOps.Configuration.csproj b/src/__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj similarity index 80% rename from src/StellaOps.Configuration/StellaOps.Configuration.csproj rename to src/__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj index 65eb799b..d54fbbc9 100644 --- a/src/StellaOps.Configuration/StellaOps.Configuration.csproj +++ b/src/__Libraries/StellaOps.Configuration/StellaOps.Configuration.csproj @@ -1,4 +1,5 @@ -<Project Sdk="Microsoft.NET.Sdk"> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -17,8 +18,8 @@ </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Authority\StellaOps.Authority.Plugins.Abstractions\StellaOps.Authority.Plugins.Abstractions.csproj" /> + <ProjectReference Include="../../Authority/StellaOps.Authority/StellaOps.Authority.Plugins.Abstractions/StellaOps.Authority.Plugins.Abstractions.csproj" /> <ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Configuration/StellaOpsAuthorityConfiguration.cs b/src/__Libraries/StellaOps.Configuration/StellaOpsAuthorityConfiguration.cs similarity index 100% rename from src/StellaOps.Configuration/StellaOpsAuthorityConfiguration.cs rename to src/__Libraries/StellaOps.Configuration/StellaOpsAuthorityConfiguration.cs diff --git a/src/StellaOps.Configuration/StellaOpsAuthorityOptions.cs b/src/__Libraries/StellaOps.Configuration/StellaOpsAuthorityOptions.cs similarity index 100% rename from src/StellaOps.Configuration/StellaOpsAuthorityOptions.cs rename to src/__Libraries/StellaOps.Configuration/StellaOpsAuthorityOptions.cs diff --git a/src/StellaOps.Configuration/StellaOpsBootstrapOptions.cs b/src/__Libraries/StellaOps.Configuration/StellaOpsBootstrapOptions.cs similarity index 100% rename from src/StellaOps.Configuration/StellaOpsBootstrapOptions.cs rename to src/__Libraries/StellaOps.Configuration/StellaOpsBootstrapOptions.cs diff --git a/src/StellaOps.Configuration/StellaOpsConfigurationBootstrapper.cs b/src/__Libraries/StellaOps.Configuration/StellaOpsConfigurationBootstrapper.cs similarity index 100% rename from src/StellaOps.Configuration/StellaOpsConfigurationBootstrapper.cs rename to src/__Libraries/StellaOps.Configuration/StellaOpsConfigurationBootstrapper.cs diff --git a/src/StellaOps.Configuration/StellaOpsConfigurationContext.cs b/src/__Libraries/StellaOps.Configuration/StellaOpsConfigurationContext.cs similarity index 100% rename from src/StellaOps.Configuration/StellaOpsConfigurationContext.cs rename to src/__Libraries/StellaOps.Configuration/StellaOpsConfigurationContext.cs diff --git a/src/StellaOps.Configuration/StellaOpsConfigurationOptions.cs b/src/__Libraries/StellaOps.Configuration/StellaOpsConfigurationOptions.cs similarity index 100% rename from src/StellaOps.Configuration/StellaOpsConfigurationOptions.cs rename to src/__Libraries/StellaOps.Configuration/StellaOpsConfigurationOptions.cs diff --git a/src/StellaOps.Configuration/StellaOpsOptionsBinder.cs b/src/__Libraries/StellaOps.Configuration/StellaOpsOptionsBinder.cs similarity index 100% rename from src/StellaOps.Configuration/StellaOpsOptionsBinder.cs rename to src/__Libraries/StellaOps.Configuration/StellaOpsOptionsBinder.cs diff --git a/src/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryOptions.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryOptions.cs similarity index 100% rename from src/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryOptions.cs rename to src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoProviderRegistryOptions.cs diff --git a/src/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs rename to src/__Libraries/StellaOps.Cryptography.DependencyInjection/CryptoServiceCollectionExtensions.cs diff --git a/src/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj similarity index 97% rename from src/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj rename to src/__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj index 10a52416..b6e724db 100644 --- a/src/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj +++ b/src/__Libraries/StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj @@ -1,14 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Options" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Cryptography.Kms/AGENTS.md b/src/__Libraries/StellaOps.Cryptography.Kms/AGENTS.md similarity index 97% rename from src/StellaOps.Cryptography.Kms/AGENTS.md rename to src/__Libraries/StellaOps.Cryptography.Kms/AGENTS.md index 8e7ebbe6..daa8fa83 100644 --- a/src/StellaOps.Cryptography.Kms/AGENTS.md +++ b/src/__Libraries/StellaOps.Cryptography.Kms/AGENTS.md @@ -1,14 +1,14 @@ -# KMS & Key Management Guild Charter - -## Mission -Provide key management abstractions and drivers (file, cloud KMS, HSM, FIDO2) for signing and verification workflows. - -## Scope -- Key store interfaces, secure configuration loading, and audit logging. -- Drivers for file-based development keys, cloud KMS providers, PKCS#11 HSMs, and FIDO2 devices. -- Key rotation, revocation, and attestation for keys used in signing. - -## Definition of Done -- KMS API supports signing, verification, key metadata, rotation, and revocation. -- Drivers pass integration tests and security review. -- CLI/Console can manage keys using these abstractions. +# KMS & Key Management Guild Charter + +## Mission +Provide key management abstractions and drivers (file, cloud KMS, HSM, FIDO2) for signing and verification workflows. + +## Scope +- Key store interfaces, secure configuration loading, and audit logging. +- Drivers for file-based development keys, cloud KMS providers, PKCS#11 HSMs, and FIDO2 devices. +- Key rotation, revocation, and attestation for keys used in signing. + +## Definition of Done +- KMS API supports signing, verification, key metadata, rotation, and revocation. +- Drivers pass integration tests and security review. +- CLI/Console can manage keys using these abstractions. diff --git a/src/StellaOps.Cryptography.Kms/TASKS.md b/src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md similarity index 99% rename from src/StellaOps.Cryptography.Kms/TASKS.md rename to src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md index 2c19b666..204e3bf2 100644 --- a/src/StellaOps.Cryptography.Kms/TASKS.md +++ b/src/__Libraries/StellaOps.Cryptography.Kms/TASKS.md @@ -1,13 +1,13 @@ -# KMS Task Board — Epic 19: Attestor Console - -## Sprint 72 – Abstractions & File Driver -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| KMS-72-001 | TODO | KMS Guild | — | Implement KMS interface (sign, verify, metadata, rotate, revoke) and file-based key driver with encrypted at-rest storage. | Interface + file driver operational; unit tests cover sign/verify/rotation; lint passes. | -| KMS-72-002 | TODO | KMS Guild | KMS-72-001 | Add CLI support for importing/exporting file-based keys with password protection. | CLI commands functional; docs updated; integration tests pass. | - -## Sprint 73 – Cloud & HSM Integration -| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | -|----|--------|----------|------------|-------------|---------------| -| KMS-73-001 | TODO | KMS Guild | KMS-72-001 | Add cloud KMS driver (e.g., AWS KMS, GCP KMS) with signing and key metadata retrieval. | Cloud driver tested with mock; configuration documented; security review sign-off. | -| KMS-73-002 | TODO | KMS Guild | KMS-72-001 | Implement PKCS#11/HSM driver plus FIDO2 signing support for high assurance workflows. | HSM/FIDO2 drivers tested with hardware stubs; error handling documented. | +# KMS Task Board — Epic 19: Attestor Console + +## Sprint 72 – Abstractions & File Driver +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| KMS-72-001 | TODO | KMS Guild | — | Implement KMS interface (sign, verify, metadata, rotate, revoke) and file-based key driver with encrypted at-rest storage. | Interface + file driver operational; unit tests cover sign/verify/rotation; lint passes. | +| KMS-72-002 | TODO | KMS Guild | KMS-72-001 | Add CLI support for importing/exporting file-based keys with password protection. | CLI commands functional; docs updated; integration tests pass. | + +## Sprint 73 – Cloud & HSM Integration +| ID | Status | Owner(s) | Depends on | Description | Exit Criteria | +|----|--------|----------|------------|-------------|---------------| +| KMS-73-001 | TODO | KMS Guild | KMS-72-001 | Add cloud KMS driver (e.g., AWS KMS, GCP KMS) with signing and key metadata retrieval. | Cloud driver tested with mock; configuration documented; security review sign-off. | +| KMS-73-002 | TODO | KMS Guild | KMS-72-001 | Implement PKCS#11/HSM driver plus FIDO2 signing support for high assurance workflows. | HSM/FIDO2 drivers tested with hardware stubs; error handling documented. | diff --git a/src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleCryptoServiceCollectionExtensions.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleCryptoServiceCollectionExtensions.cs similarity index 100% rename from src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleCryptoServiceCollectionExtensions.cs rename to src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleCryptoServiceCollectionExtensions.cs diff --git a/src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs similarity index 100% rename from src/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs rename to src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/BouncyCastleEd25519CryptoProvider.cs diff --git a/src/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj similarity index 97% rename from src/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj rename to src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj index 64aabe1f..f2b041cf 100644 --- a/src/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj +++ b/src/__Libraries/StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj @@ -1,16 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="BouncyCastle.Cryptography" Version="2.5.1" /> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="BouncyCastle.Cryptography" Version="2.5.1" /> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Cryptography/AGENTS.md b/src/__Libraries/StellaOps.Cryptography/AGENTS.md similarity index 83% rename from src/StellaOps.Cryptography/AGENTS.md rename to src/__Libraries/StellaOps.Cryptography/AGENTS.md index d1ba83d9..ae2fe4e8 100644 --- a/src/StellaOps.Cryptography/AGENTS.md +++ b/src/__Libraries/StellaOps.Cryptography/AGENTS.md @@ -1,22 +1,22 @@ -# Team 8 — Security Guild (Authority & Shared Crypto) - -## Role - -Team 8 owns the end-to-end security posture for StellaOps Authority and its consumers. That includes password hashing policy, audit/event hygiene, rate-limit & lockout rules, revocation distribution, and sovereign cryptography abstractions that allow alternative algorithm suites (e.g., GOST) without touching feature code. - -## Operational Boundaries - -- Primary workspace: `src/StellaOps.Cryptography`, `src/StellaOps.Authority.Plugin.Standard`, `src/StellaOps.Authority.Storage.Mongo`, and Authority host (`src/StellaOps.Authority/StellaOps.Authority`). -- Coordinate cross-module changes via TASKS.md updates and PR descriptions. -- Never bypass deterministic behaviour (sorted keys, stable timestamps). -- Tests live alongside owning projects (`*.Tests`). Extend goldens instead of rewriting. - -## Expectations - -- Default to Argon2id (Konscious) for password hashing; PBKDF2 only for legacy verification with transparent rehash on success. -- Emit structured security events with minimal PII and clear correlation IDs. -- Rate-limit `/token` and bootstrap endpoints once CORE8 hooks are available. -- Deliver offline revocation bundles signed with detached JWS and provide a verification script. -- Maintain `docs/security/authority-threat-model.md` and ensure mitigations are tracked. -- All crypto consumption flows through `StellaOps.Cryptography` abstractions to enable sovereign crypto providers. -- Every new cryptographic algorithm, dependency, or acceleration path ships as an `ICryptoProvider` plug-in under `StellaOps.Cryptography.*`; feature code must never bind directly to third-party crypto libraries. +# Team 8 — Security Guild (Authority & Shared Crypto) + +## Role + +Team 8 owns the end-to-end security posture for StellaOps Authority and its consumers. That includes password hashing policy, audit/event hygiene, rate-limit & lockout rules, revocation distribution, and sovereign cryptography abstractions that allow alternative algorithm suites (e.g., GOST) without touching feature code. + +## Operational Boundaries + +- Primary workspace: `src/__Libraries/StellaOps.Cryptography`, `src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard`, `src/Authority/StellaOps.Authority/StellaOps.Authority.Storage.Mongo`, and Authority host (`src/Authority/StellaOps.Authority/StellaOps.Authority`). +- Coordinate cross-module changes via TASKS.md updates and PR descriptions. +- Never bypass deterministic behaviour (sorted keys, stable timestamps). +- Tests live alongside owning projects (`*.Tests`). Extend goldens instead of rewriting. + +## Expectations + +- Default to Argon2id (Konscious) for password hashing; PBKDF2 only for legacy verification with transparent rehash on success. +- Emit structured security events with minimal PII and clear correlation IDs. +- Rate-limit `/token` and bootstrap endpoints once CORE8 hooks are available. +- Deliver offline revocation bundles signed with detached JWS and provide a verification script. +- Maintain `docs/security/authority-threat-model.md` and ensure mitigations are tracked. +- All crypto consumption flows through `StellaOps.Cryptography` abstractions to enable sovereign crypto providers. +- Every new cryptographic algorithm, dependency, or acceleration path ships as an `ICryptoProvider` plug-in under `StellaOps.Cryptography.*`; feature code must never bind directly to third-party crypto libraries. diff --git a/src/StellaOps.Cryptography/Argon2idPasswordHasher.Konscious.cs b/src/__Libraries/StellaOps.Cryptography/Argon2idPasswordHasher.Konscious.cs similarity index 100% rename from src/StellaOps.Cryptography/Argon2idPasswordHasher.Konscious.cs rename to src/__Libraries/StellaOps.Cryptography/Argon2idPasswordHasher.Konscious.cs diff --git a/src/StellaOps.Cryptography/Argon2idPasswordHasher.Sodium.cs b/src/__Libraries/StellaOps.Cryptography/Argon2idPasswordHasher.Sodium.cs similarity index 100% rename from src/StellaOps.Cryptography/Argon2idPasswordHasher.Sodium.cs rename to src/__Libraries/StellaOps.Cryptography/Argon2idPasswordHasher.Sodium.cs diff --git a/src/StellaOps.Cryptography/Argon2idPasswordHasher.cs b/src/__Libraries/StellaOps.Cryptography/Argon2idPasswordHasher.cs similarity index 100% rename from src/StellaOps.Cryptography/Argon2idPasswordHasher.cs rename to src/__Libraries/StellaOps.Cryptography/Argon2idPasswordHasher.cs diff --git a/src/StellaOps.Cryptography/Audit/AuthEventRecord.cs b/src/__Libraries/StellaOps.Cryptography/Audit/AuthEventRecord.cs similarity index 96% rename from src/StellaOps.Cryptography/Audit/AuthEventRecord.cs rename to src/__Libraries/StellaOps.Cryptography/Audit/AuthEventRecord.cs index 10047db1..f547d146 100644 --- a/src/StellaOps.Cryptography/Audit/AuthEventRecord.cs +++ b/src/__Libraries/StellaOps.Cryptography/Audit/AuthEventRecord.cs @@ -1,268 +1,268 @@ -using System; -using System.Collections.Generic; - -namespace StellaOps.Cryptography.Audit; - -/// <summary> -/// Represents a structured security event emitted by the Authority host and plugins. -/// </summary> -public sealed record AuthEventRecord -{ - /// <summary> - /// Canonical event identifier (e.g. <c>authority.password.grant</c>). - /// </summary> - public required string EventType { get; init; } - - /// <summary> - /// UTC timestamp captured when the event occurred. - /// </summary> - public DateTimeOffset OccurredAt { get; init; } = DateTimeOffset.UtcNow; - - /// <summary> - /// Stable correlation identifier that links the event across logs, traces, and persistence. - /// </summary> - public string? CorrelationId { get; init; } - - /// <summary> - /// Outcome classification for the audited operation. - /// </summary> - public AuthEventOutcome Outcome { get; init; } = AuthEventOutcome.Unknown; - - /// <summary> - /// Optional human-readable reason or failure descriptor. - /// </summary> - public string? Reason { get; init; } - - /// <summary> - /// Identity of the end-user (subject) involved in the event, when applicable. - /// </summary> - public AuthEventSubject? Subject { get; init; } - - /// <summary> - /// OAuth/OIDC client metadata associated with the event, when applicable. - /// </summary> - public AuthEventClient? Client { get; init; } - - /// <summary> - /// Tenant identifier associated with the authenticated principal or client. - /// </summary> - public ClassifiedString Tenant { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// Project identifier associated with the authenticated principal or client (optional). - /// </summary> - public ClassifiedString Project { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// Granted or requested scopes tied to the event. - /// </summary> - public IReadOnlyList<string> Scopes { get; init; } = Array.Empty<string>(); - - /// <summary> - /// Network attributes (remote IP, forwarded headers, user agent) captured for the request. - /// </summary> - public AuthEventNetwork? Network { get; init; } - - /// <summary> - /// Additional classified properties carried with the event. - /// </summary> - public IReadOnlyList<AuthEventProperty> Properties { get; init; } = Array.Empty<AuthEventProperty>(); -} - -/// <summary> -/// Describes the outcome of an audited flow. -/// </summary> -public enum AuthEventOutcome -{ - /// <summary> - /// Outcome has not been set. - /// </summary> - Unknown = 0, - - /// <summary> - /// Operation succeeded. - /// </summary> - Success, - - /// <summary> - /// Operation failed (invalid credentials, configuration issues, etc.). - /// </summary> - Failure, - - /// <summary> - /// Operation failed due to a lockout policy. - /// </summary> - LockedOut, - - /// <summary> - /// Operation was rejected due to rate limiting or throttling. - /// </summary> - RateLimited, - - /// <summary> - /// Operation encountered an unexpected error. - /// </summary> - Error -} - -/// <summary> -/// Represents a string value enriched with a data classification tag. -/// </summary> -public readonly record struct ClassifiedString(string? Value, AuthEventDataClassification Classification) -{ - /// <summary> - /// An empty classified string. - /// </summary> - public static ClassifiedString Empty => new(null, AuthEventDataClassification.None); - - /// <summary> - /// Indicates whether the classified string carries a non-empty value. - /// </summary> - public bool HasValue => !string.IsNullOrWhiteSpace(Value); - - /// <summary> - /// Creates a classified string for public/non-sensitive data. - /// </summary> - public static ClassifiedString Public(string? value) => Create(value, AuthEventDataClassification.None); - - /// <summary> - /// Creates a classified string tagged as personally identifiable information (PII). - /// </summary> - public static ClassifiedString Personal(string? value) => Create(value, AuthEventDataClassification.Personal); - - /// <summary> - /// Creates a classified string tagged as sensitive (e.g. credentials, secrets). - /// </summary> - public static ClassifiedString Sensitive(string? value) => Create(value, AuthEventDataClassification.Sensitive); - - private static ClassifiedString Create(string? value, AuthEventDataClassification classification) - { - return new ClassifiedString(Normalize(value), classification); - } - - private static string? Normalize(string? value) - { - return string.IsNullOrWhiteSpace(value) ? null : value; - } -} - -/// <summary> -/// Supported classifications for audit data values. -/// </summary> -public enum AuthEventDataClassification -{ - /// <summary> - /// Data is not considered sensitive. - /// </summary> - None = 0, - - /// <summary> - /// Personally identifiable information (PII) that warrants redaction in certain sinks. - /// </summary> - Personal, - - /// <summary> - /// Highly sensitive information (credentials, secrets, tokens). - /// </summary> - Sensitive -} - -/// <summary> -/// Captures subject metadata for an audit event. -/// </summary> -public sealed record AuthEventSubject -{ - /// <summary> - /// Stable subject identifier (PII). - /// </summary> - public ClassifiedString SubjectId { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// Username or login name (PII). - /// </summary> - public ClassifiedString Username { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// Optional display name (PII). - /// </summary> - public ClassifiedString DisplayName { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// Optional plugin or tenant realm controlling the subject namespace. - /// </summary> - public ClassifiedString Realm { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// Additional classified attributes (e.g. email, phone). - /// </summary> - public IReadOnlyList<AuthEventProperty> Attributes { get; init; } = Array.Empty<AuthEventProperty>(); -} - -/// <summary> -/// Captures OAuth/OIDC client metadata for an audit event. -/// </summary> -public sealed record AuthEventClient -{ - /// <summary> - /// Client identifier (PII for confidential clients). - /// </summary> - public ClassifiedString ClientId { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// Friendly client name (may be public). - /// </summary> - public ClassifiedString Name { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// Identity provider/plugin originating the client. - /// </summary> - public ClassifiedString Provider { get; init; } = ClassifiedString.Empty; -} - -/// <summary> -/// Captures network metadata for an audit event. -/// </summary> -public sealed record AuthEventNetwork -{ - /// <summary> - /// Remote address observed for the request (PII). - /// </summary> - public ClassifiedString RemoteAddress { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// Forwarded address supplied by proxies (PII). - /// </summary> - public ClassifiedString ForwardedFor { get; init; } = ClassifiedString.Empty; - - /// <summary> - /// User agent string associated with the request. - /// </summary> - public ClassifiedString UserAgent { get; init; } = ClassifiedString.Empty; -} - -/// <summary> -/// Represents an additional classified property associated with the audit event. -/// </summary> -public sealed record AuthEventProperty -{ - /// <summary> - /// Property name (canonical snake-case identifier). - /// </summary> - public required string Name { get; init; } - - /// <summary> - /// Classified value assigned to the property. - /// </summary> - public ClassifiedString Value { get; init; } = ClassifiedString.Empty; -} - -/// <summary> -/// Sink that receives completed audit event records. -/// </summary> -public interface IAuthEventSink -{ - /// <summary> - /// Persists the supplied audit event. - /// </summary> - ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken); -} +using System; +using System.Collections.Generic; + +namespace StellaOps.Cryptography.Audit; + +/// <summary> +/// Represents a structured security event emitted by the Authority host and plugins. +/// </summary> +public sealed record AuthEventRecord +{ + /// <summary> + /// Canonical event identifier (e.g. <c>authority.password.grant</c>). + /// </summary> + public required string EventType { get; init; } + + /// <summary> + /// UTC timestamp captured when the event occurred. + /// </summary> + public DateTimeOffset OccurredAt { get; init; } = DateTimeOffset.UtcNow; + + /// <summary> + /// Stable correlation identifier that links the event across logs, traces, and persistence. + /// </summary> + public string? CorrelationId { get; init; } + + /// <summary> + /// Outcome classification for the audited operation. + /// </summary> + public AuthEventOutcome Outcome { get; init; } = AuthEventOutcome.Unknown; + + /// <summary> + /// Optional human-readable reason or failure descriptor. + /// </summary> + public string? Reason { get; init; } + + /// <summary> + /// Identity of the end-user (subject) involved in the event, when applicable. + /// </summary> + public AuthEventSubject? Subject { get; init; } + + /// <summary> + /// OAuth/OIDC client metadata associated with the event, when applicable. + /// </summary> + public AuthEventClient? Client { get; init; } + + /// <summary> + /// Tenant identifier associated with the authenticated principal or client. + /// </summary> + public ClassifiedString Tenant { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// Project identifier associated with the authenticated principal or client (optional). + /// </summary> + public ClassifiedString Project { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// Granted or requested scopes tied to the event. + /// </summary> + public IReadOnlyList<string> Scopes { get; init; } = Array.Empty<string>(); + + /// <summary> + /// Network attributes (remote IP, forwarded headers, user agent) captured for the request. + /// </summary> + public AuthEventNetwork? Network { get; init; } + + /// <summary> + /// Additional classified properties carried with the event. + /// </summary> + public IReadOnlyList<AuthEventProperty> Properties { get; init; } = Array.Empty<AuthEventProperty>(); +} + +/// <summary> +/// Describes the outcome of an audited flow. +/// </summary> +public enum AuthEventOutcome +{ + /// <summary> + /// Outcome has not been set. + /// </summary> + Unknown = 0, + + /// <summary> + /// Operation succeeded. + /// </summary> + Success, + + /// <summary> + /// Operation failed (invalid credentials, configuration issues, etc.). + /// </summary> + Failure, + + /// <summary> + /// Operation failed due to a lockout policy. + /// </summary> + LockedOut, + + /// <summary> + /// Operation was rejected due to rate limiting or throttling. + /// </summary> + RateLimited, + + /// <summary> + /// Operation encountered an unexpected error. + /// </summary> + Error +} + +/// <summary> +/// Represents a string value enriched with a data classification tag. +/// </summary> +public readonly record struct ClassifiedString(string? Value, AuthEventDataClassification Classification) +{ + /// <summary> + /// An empty classified string. + /// </summary> + public static ClassifiedString Empty => new(null, AuthEventDataClassification.None); + + /// <summary> + /// Indicates whether the classified string carries a non-empty value. + /// </summary> + public bool HasValue => !string.IsNullOrWhiteSpace(Value); + + /// <summary> + /// Creates a classified string for public/non-sensitive data. + /// </summary> + public static ClassifiedString Public(string? value) => Create(value, AuthEventDataClassification.None); + + /// <summary> + /// Creates a classified string tagged as personally identifiable information (PII). + /// </summary> + public static ClassifiedString Personal(string? value) => Create(value, AuthEventDataClassification.Personal); + + /// <summary> + /// Creates a classified string tagged as sensitive (e.g. credentials, secrets). + /// </summary> + public static ClassifiedString Sensitive(string? value) => Create(value, AuthEventDataClassification.Sensitive); + + private static ClassifiedString Create(string? value, AuthEventDataClassification classification) + { + return new ClassifiedString(Normalize(value), classification); + } + + private static string? Normalize(string? value) + { + return string.IsNullOrWhiteSpace(value) ? null : value; + } +} + +/// <summary> +/// Supported classifications for audit data values. +/// </summary> +public enum AuthEventDataClassification +{ + /// <summary> + /// Data is not considered sensitive. + /// </summary> + None = 0, + + /// <summary> + /// Personally identifiable information (PII) that warrants redaction in certain sinks. + /// </summary> + Personal, + + /// <summary> + /// Highly sensitive information (credentials, secrets, tokens). + /// </summary> + Sensitive +} + +/// <summary> +/// Captures subject metadata for an audit event. +/// </summary> +public sealed record AuthEventSubject +{ + /// <summary> + /// Stable subject identifier (PII). + /// </summary> + public ClassifiedString SubjectId { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// Username or login name (PII). + /// </summary> + public ClassifiedString Username { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// Optional display name (PII). + /// </summary> + public ClassifiedString DisplayName { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// Optional plugin or tenant realm controlling the subject namespace. + /// </summary> + public ClassifiedString Realm { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// Additional classified attributes (e.g. email, phone). + /// </summary> + public IReadOnlyList<AuthEventProperty> Attributes { get; init; } = Array.Empty<AuthEventProperty>(); +} + +/// <summary> +/// Captures OAuth/OIDC client metadata for an audit event. +/// </summary> +public sealed record AuthEventClient +{ + /// <summary> + /// Client identifier (PII for confidential clients). + /// </summary> + public ClassifiedString ClientId { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// Friendly client name (may be public). + /// </summary> + public ClassifiedString Name { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// Identity provider/plugin originating the client. + /// </summary> + public ClassifiedString Provider { get; init; } = ClassifiedString.Empty; +} + +/// <summary> +/// Captures network metadata for an audit event. +/// </summary> +public sealed record AuthEventNetwork +{ + /// <summary> + /// Remote address observed for the request (PII). + /// </summary> + public ClassifiedString RemoteAddress { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// Forwarded address supplied by proxies (PII). + /// </summary> + public ClassifiedString ForwardedFor { get; init; } = ClassifiedString.Empty; + + /// <summary> + /// User agent string associated with the request. + /// </summary> + public ClassifiedString UserAgent { get; init; } = ClassifiedString.Empty; +} + +/// <summary> +/// Represents an additional classified property associated with the audit event. +/// </summary> +public sealed record AuthEventProperty +{ + /// <summary> + /// Property name (canonical snake-case identifier). + /// </summary> + public required string Name { get; init; } + + /// <summary> + /// Classified value assigned to the property. + /// </summary> + public ClassifiedString Value { get; init; } = ClassifiedString.Empty; +} + +/// <summary> +/// Sink that receives completed audit event records. +/// </summary> +public interface IAuthEventSink +{ + /// <summary> + /// Persists the supplied audit event. + /// </summary> + ValueTask WriteAsync(AuthEventRecord record, CancellationToken cancellationToken); +} diff --git a/src/StellaOps.Cryptography/CryptoProvider.cs b/src/__Libraries/StellaOps.Cryptography/CryptoProvider.cs similarity index 100% rename from src/StellaOps.Cryptography/CryptoProvider.cs rename to src/__Libraries/StellaOps.Cryptography/CryptoProvider.cs diff --git a/src/StellaOps.Cryptography/CryptoProviderRegistry.cs b/src/__Libraries/StellaOps.Cryptography/CryptoProviderRegistry.cs similarity index 100% rename from src/StellaOps.Cryptography/CryptoProviderRegistry.cs rename to src/__Libraries/StellaOps.Cryptography/CryptoProviderRegistry.cs diff --git a/src/StellaOps.Cryptography/CryptoSigningKey.cs b/src/__Libraries/StellaOps.Cryptography/CryptoSigningKey.cs similarity index 100% rename from src/StellaOps.Cryptography/CryptoSigningKey.cs rename to src/__Libraries/StellaOps.Cryptography/CryptoSigningKey.cs diff --git a/src/StellaOps.Cryptography/DefaultCryptoProvider.cs b/src/__Libraries/StellaOps.Cryptography/DefaultCryptoProvider.cs similarity index 100% rename from src/StellaOps.Cryptography/DefaultCryptoProvider.cs rename to src/__Libraries/StellaOps.Cryptography/DefaultCryptoProvider.cs diff --git a/src/StellaOps.Cryptography/EcdsaSigner.cs b/src/__Libraries/StellaOps.Cryptography/EcdsaSigner.cs similarity index 100% rename from src/StellaOps.Cryptography/EcdsaSigner.cs rename to src/__Libraries/StellaOps.Cryptography/EcdsaSigner.cs diff --git a/src/StellaOps.Cryptography/ICryptoSigner.cs b/src/__Libraries/StellaOps.Cryptography/ICryptoSigner.cs similarity index 100% rename from src/StellaOps.Cryptography/ICryptoSigner.cs rename to src/__Libraries/StellaOps.Cryptography/ICryptoSigner.cs diff --git a/src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs b/src/__Libraries/StellaOps.Cryptography/LibsodiumCryptoProvider.cs similarity index 100% rename from src/StellaOps.Cryptography/LibsodiumCryptoProvider.cs rename to src/__Libraries/StellaOps.Cryptography/LibsodiumCryptoProvider.cs diff --git a/src/StellaOps.Cryptography/PasswordHashAlgorithms.cs b/src/__Libraries/StellaOps.Cryptography/PasswordHashAlgorithms.cs similarity index 100% rename from src/StellaOps.Cryptography/PasswordHashAlgorithms.cs rename to src/__Libraries/StellaOps.Cryptography/PasswordHashAlgorithms.cs diff --git a/src/StellaOps.Cryptography/PasswordHashing.cs b/src/__Libraries/StellaOps.Cryptography/PasswordHashing.cs similarity index 100% rename from src/StellaOps.Cryptography/PasswordHashing.cs rename to src/__Libraries/StellaOps.Cryptography/PasswordHashing.cs diff --git a/src/StellaOps.Cryptography/Pbkdf2PasswordHasher.cs b/src/__Libraries/StellaOps.Cryptography/Pbkdf2PasswordHasher.cs similarity index 100% rename from src/StellaOps.Cryptography/Pbkdf2PasswordHasher.cs rename to src/__Libraries/StellaOps.Cryptography/Pbkdf2PasswordHasher.cs diff --git a/src/StellaOps.Cryptography/SignatureAlgorithms.cs b/src/__Libraries/StellaOps.Cryptography/SignatureAlgorithms.cs similarity index 100% rename from src/StellaOps.Cryptography/SignatureAlgorithms.cs rename to src/__Libraries/StellaOps.Cryptography/SignatureAlgorithms.cs diff --git a/src/StellaOps.Cryptography/StellaOps.Cryptography.csproj b/src/__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj similarity index 97% rename from src/StellaOps.Cryptography/StellaOps.Cryptography.csproj rename to src/__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj index a83f9a6a..c884dca0 100644 --- a/src/StellaOps.Cryptography/StellaOps.Cryptography.csproj +++ b/src/__Libraries/StellaOps.Cryptography/StellaOps.Cryptography.csproj @@ -1,16 +1,16 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <LangVersion>preview</LangVersion> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <PropertyGroup Condition="'$(StellaOpsCryptoSodium)' == 'true'"> - <DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_SODIUM</DefineConstants> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Konscious.Security.Cryptography.Argon2" Version="1.3.1" /> - <PackageReference Include="Microsoft.IdentityModel.Tokens" Version="8.14.0" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <LangVersion>preview</LangVersion> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <PropertyGroup Condition="'$(StellaOpsCryptoSodium)' == 'true'"> + <DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_SODIUM</DefineConstants> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Konscious.Security.Cryptography.Argon2" Version="1.3.1" /> + <PackageReference Include="Microsoft.IdentityModel.Tokens" Version="8.14.0" /> + </ItemGroup> +</Project> diff --git a/src/StellaOps.Cryptography/TASKS.md b/src/__Libraries/StellaOps.Cryptography/TASKS.md similarity index 100% rename from src/StellaOps.Cryptography/TASKS.md rename to src/__Libraries/StellaOps.Cryptography/TASKS.md diff --git a/src/StellaOps.DependencyInjection/IDependencyInjectionRoutine.cs b/src/__Libraries/StellaOps.DependencyInjection/IDependencyInjectionRoutine.cs similarity index 100% rename from src/StellaOps.DependencyInjection/IDependencyInjectionRoutine.cs rename to src/__Libraries/StellaOps.DependencyInjection/IDependencyInjectionRoutine.cs diff --git a/src/StellaOps.DependencyInjection/ServiceBindingAttribute.cs b/src/__Libraries/StellaOps.DependencyInjection/ServiceBindingAttribute.cs similarity index 100% rename from src/StellaOps.DependencyInjection/ServiceBindingAttribute.cs rename to src/__Libraries/StellaOps.DependencyInjection/ServiceBindingAttribute.cs diff --git a/src/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj b/src/__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj similarity index 97% rename from src/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj rename to src/__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj index 679434be..96977cf3 100644 --- a/src/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj +++ b/src/__Libraries/StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj @@ -1,14 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.Configuration.Abstractions" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.DependencyInjection.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + </Project> \ No newline at end of file diff --git a/src/StellaOps.Plugin/DependencyInjection/PluginDependencyInjectionExtensions.cs b/src/__Libraries/StellaOps.Plugin/DependencyInjection/PluginDependencyInjectionExtensions.cs similarity index 100% rename from src/StellaOps.Plugin/DependencyInjection/PluginDependencyInjectionExtensions.cs rename to src/__Libraries/StellaOps.Plugin/DependencyInjection/PluginDependencyInjectionExtensions.cs diff --git a/src/StellaOps.Plugin/DependencyInjection/PluginServiceRegistration.cs b/src/__Libraries/StellaOps.Plugin/DependencyInjection/PluginServiceRegistration.cs similarity index 100% rename from src/StellaOps.Plugin/DependencyInjection/PluginServiceRegistration.cs rename to src/__Libraries/StellaOps.Plugin/DependencyInjection/PluginServiceRegistration.cs diff --git a/src/StellaOps.Plugin/DependencyInjection/StellaOpsPluginRegistration.cs b/src/__Libraries/StellaOps.Plugin/DependencyInjection/StellaOpsPluginRegistration.cs similarity index 100% rename from src/StellaOps.Plugin/DependencyInjection/StellaOpsPluginRegistration.cs rename to src/__Libraries/StellaOps.Plugin/DependencyInjection/StellaOpsPluginRegistration.cs diff --git a/src/StellaOps.Plugin/Hosting/PluginAssembly.cs b/src/__Libraries/StellaOps.Plugin/Hosting/PluginAssembly.cs similarity index 100% rename from src/StellaOps.Plugin/Hosting/PluginAssembly.cs rename to src/__Libraries/StellaOps.Plugin/Hosting/PluginAssembly.cs diff --git a/src/StellaOps.Plugin/Hosting/PluginHost.cs b/src/__Libraries/StellaOps.Plugin/Hosting/PluginHost.cs similarity index 100% rename from src/StellaOps.Plugin/Hosting/PluginHost.cs rename to src/__Libraries/StellaOps.Plugin/Hosting/PluginHost.cs diff --git a/src/StellaOps.Plugin/Hosting/PluginHostOptions.cs b/src/__Libraries/StellaOps.Plugin/Hosting/PluginHostOptions.cs similarity index 100% rename from src/StellaOps.Plugin/Hosting/PluginHostOptions.cs rename to src/__Libraries/StellaOps.Plugin/Hosting/PluginHostOptions.cs diff --git a/src/StellaOps.Plugin/Hosting/PluginHostResult.cs b/src/__Libraries/StellaOps.Plugin/Hosting/PluginHostResult.cs similarity index 100% rename from src/StellaOps.Plugin/Hosting/PluginHostResult.cs rename to src/__Libraries/StellaOps.Plugin/Hosting/PluginHostResult.cs diff --git a/src/StellaOps.Plugin/Hosting/PluginLoadContext.cs b/src/__Libraries/StellaOps.Plugin/Hosting/PluginLoadContext.cs similarity index 100% rename from src/StellaOps.Plugin/Hosting/PluginLoadContext.cs rename to src/__Libraries/StellaOps.Plugin/Hosting/PluginLoadContext.cs diff --git a/src/StellaOps.Plugin/Internal/ReflectionExtensions.cs b/src/__Libraries/StellaOps.Plugin/Internal/ReflectionExtensions.cs similarity index 100% rename from src/StellaOps.Plugin/Internal/ReflectionExtensions.cs rename to src/__Libraries/StellaOps.Plugin/Internal/ReflectionExtensions.cs diff --git a/src/StellaOps.Plugin/PluginContracts.cs b/src/__Libraries/StellaOps.Plugin/PluginContracts.cs similarity index 100% rename from src/StellaOps.Plugin/PluginContracts.cs rename to src/__Libraries/StellaOps.Plugin/PluginContracts.cs diff --git a/src/StellaOps.Plugin/Properties/AssemblyInfo.cs b/src/__Libraries/StellaOps.Plugin/Properties/AssemblyInfo.cs similarity index 100% rename from src/StellaOps.Plugin/Properties/AssemblyInfo.cs rename to src/__Libraries/StellaOps.Plugin/Properties/AssemblyInfo.cs diff --git a/src/StellaOps.Plugin/StellaOps.Plugin.csproj b/src/__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj similarity index 80% rename from src/StellaOps.Plugin/StellaOps.Plugin.csproj rename to src/__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj index 9471a911..124ad9da 100644 --- a/src/StellaOps.Plugin/StellaOps.Plugin.csproj +++ b/src/__Libraries/StellaOps.Plugin/StellaOps.Plugin.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> @@ -13,7 +14,7 @@ </ItemGroup> <ItemGroup> - <ProjectReference Include="..\\StellaOps.DependencyInjection\\StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> </ItemGroup> </Project> \ No newline at end of file diff --git a/src/StellaOps.Plugin/TASKS.md b/src/__Libraries/StellaOps.Plugin/TASKS.md similarity index 87% rename from src/StellaOps.Plugin/TASKS.md rename to src/__Libraries/StellaOps.Plugin/TASKS.md index ea4687eb..603d4ec9 100644 --- a/src/StellaOps.Plugin/TASKS.md +++ b/src/__Libraries/StellaOps.Plugin/TASKS.md @@ -1,9 +1,9 @@ -# TASKS -| Task | Owner(s) | Depends on | Notes | -|---|---|---|---| +# TASKS +| Task | Owner(s) | Depends on | Notes | +|---|---|---|---| |PLUGIN-DI-08-001 Scoped service support in plugin bootstrap|Plugin Platform Guild (DONE 2025-10-21)|StellaOps.DependencyInjection|Scoped DI metadata primitives landed; dynamic plugin integration tests now verify `RegisterPluginRoutines` honours `[ServiceBinding]` lifetimes and remains idempotent.| |PLUGIN-DI-08-002.COORD Authority scoped-service handshake|Plugin Platform Guild, Authority Core (DONE 2025-10-20)|PLUGIN-DI-08-001|Workshop held 2025-10-20 15:00–16:05 UTC; outcomes/notes captured in `docs/dev/authority-plugin-di-coordination.md`, follow-up action items assigned for PLUGIN-DI-08-002 implementation plan.| -|PLUGIN-DI-08-002 Authority plugin integration updates|Plugin Platform Guild, Authority Core (DONE 2025-10-20)|PLUGIN-DI-08-001, PLUGIN-DI-08-002.COORD|Standard registrar now registers scoped credential/provisioning stores + identity-provider plugins, registry Acquire scopes instances, and regression suites (`dotnet test src/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj`, `dotnet test src/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj`) cover scoped lifetimes + handles.| +|PLUGIN-DI-08-002 Authority plugin integration updates|Plugin Platform Guild, Authority Core (DONE 2025-10-20)|PLUGIN-DI-08-001, PLUGIN-DI-08-002.COORD|Standard registrar now registers scoped credential/provisioning stores + identity-provider plugins, registry Acquire scopes instances, and regression suites (`dotnet test src/Authority/StellaOps.Authority/StellaOps.Authority.Plugin.Standard.Tests/StellaOps.Authority.Plugin.Standard.Tests.csproj`, `dotnet test src/Authority/StellaOps.Authority/StellaOps.Authority.Tests/StellaOps.Authority.Tests.csproj`) cover scoped lifetimes + handles.| |PLUGIN-DI-08-003 Authority registry scoped resolution|Plugin Platform Guild, Authority Core (DONE 2025-10-20)|PLUGIN-DI-08-002.COORD|Reworked `IAuthorityIdentityProviderRegistry` to expose metadata + scoped handles, updated OpenIddict flows/Program health endpoints, and added coverage via `AuthorityIdentityProviderRegistryTests`.| |PLUGIN-DI-08-004 Authority plugin loader DI bridge|Plugin Platform Guild, Authority Core (DONE 2025-10-20)|PLUGIN-DI-08-002.COORD|Authority plugin loader now activates registrars via scoped DI leases, registers `[ServiceBinding]` metadata, and includes regression coverage in `AuthorityPluginLoaderTests`.| |PLUGIN-DI-08-005 Authority plugin bootstrap scope pattern|Plugin Platform Guild, Authority Core (DONE 2025-10-20)|PLUGIN-DI-08-002.COORD|Standard bootstrapper uses `IServiceScopeFactory` per run; tests updated to validate scoped execution and documentation annotated in `authority-plugin-di-coordination.md`.| diff --git a/src/StellaOps.Configuration.Tests/AuthorityPluginConfigurationLoaderTests.cs b/src/__Libraries/__Tests/StellaOps.Configuration.Tests/AuthorityPluginConfigurationLoaderTests.cs similarity index 100% rename from src/StellaOps.Configuration.Tests/AuthorityPluginConfigurationLoaderTests.cs rename to src/__Libraries/__Tests/StellaOps.Configuration.Tests/AuthorityPluginConfigurationLoaderTests.cs diff --git a/src/StellaOps.Configuration.Tests/AuthorityTelemetryTests.cs b/src/__Libraries/__Tests/StellaOps.Configuration.Tests/AuthorityTelemetryTests.cs similarity index 100% rename from src/StellaOps.Configuration.Tests/AuthorityTelemetryTests.cs rename to src/__Libraries/__Tests/StellaOps.Configuration.Tests/AuthorityTelemetryTests.cs diff --git a/src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj b/src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj new file mode 100644 index 00000000..5584a171 --- /dev/null +++ b/src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOps.Configuration.Tests.csproj @@ -0,0 +1,12 @@ +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../StellaOps.Configuration/StellaOps.Configuration.csproj" /> + <ProjectReference Include="../../../Authority/StellaOps.Authority/StellaOps.Auth.Abstractions/StellaOps.Auth.Abstractions.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs b/src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs similarity index 97% rename from src/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs rename to src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs index 1e324bdb..622a4d47 100644 --- a/src/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs +++ b/src/__Libraries/__Tests/StellaOps.Configuration.Tests/StellaOpsAuthorityOptionsTests.cs @@ -1,222 +1,222 @@ -using System; -using System.Collections.Generic; -using Microsoft.Extensions.Configuration; -using StellaOps.Configuration; -using Xunit; - -namespace StellaOps.Configuration.Tests; - -public class StellaOpsAuthorityOptionsTests -{ - [Fact] - public void Validate_Throws_When_IssuerMissing() - { - var options = new StellaOpsAuthorityOptions(); - - var exception = Assert.Throws<InvalidOperationException>(() => options.Validate()); - - Assert.Contains("issuer", exception.Message, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void Validate_Normalises_Collections() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.stella-ops.test"), - SchemaVersion = 1 - }; - options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - - options.PluginDirectories.Add(" ./plugins "); - options.PluginDirectories.Add("./plugins"); - options.PluginDirectories.Add("./other"); - - options.BypassNetworks.Add(" 10.0.0.0/24 "); - options.BypassNetworks.Add("10.0.0.0/24"); - options.BypassNetworks.Add("192.168.0.0/16"); - - options.Validate(); - - Assert.Equal(new[] { "./plugins", "./other" }, options.PluginDirectories); - Assert.Equal(new[] { "10.0.0.0/24", "192.168.0.0/16" }, options.BypassNetworks); - } - - [Fact] - public void Validate_Normalises_PluginDescriptors() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.stella-ops.test"), - SchemaVersion = 1 - }; - options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - - var descriptor = new AuthorityPluginDescriptorOptions - { - AssemblyName = "StellaOps.Authority.Plugin.Standard", - ConfigFile = " standard.yaml ", - Enabled = true - }; - - descriptor.Capabilities.Add("password"); - descriptor.Capabilities.Add("PASSWORD"); - options.Plugins.Descriptors["standard"] = descriptor; - - options.Validate(); - - var normalized = options.Plugins.Descriptors["standard"]; - Assert.Equal("standard.yaml", normalized.ConfigFile); - Assert.Single(normalized.Capabilities); - Assert.Equal("password", normalized.Capabilities[0]); - } - - [Fact] - public void Validate_Throws_When_StorageConnectionStringMissing() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.stella-ops.test"), - SchemaVersion = 1 - }; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - - var exception = Assert.Throws<InvalidOperationException>(() => options.Validate()); - - Assert.Contains("Mongo connection string", exception.Message, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void Build_Binds_From_Configuration() - { - var context = StellaOpsAuthorityConfiguration.Build(options => - { - options.ConfigureBuilder = builder => - { - builder.AddInMemoryCollection(new Dictionary<string, string?> - { - ["Authority:SchemaVersion"] = "2", - ["Authority:Issuer"] = "https://authority.internal", - ["Authority:AccessTokenLifetime"] = "00:30:00", - ["Authority:RefreshTokenLifetime"] = "30.00:00:00", - ["Authority:Storage:ConnectionString"] = "mongodb://example/stellaops", - ["Authority:Storage:DatabaseName"] = "overrideDb", - ["Authority:Storage:CommandTimeout"] = "00:01:30", - ["Authority:PluginDirectories:0"] = "/var/lib/stellaops/plugins", - ["Authority:BypassNetworks:0"] = "127.0.0.1/32", - ["Authority:Security:RateLimiting:Token:PermitLimit"] = "25", - ["Authority:Security:RateLimiting:Token:Window"] = "00:00:30", - ["Authority:Security:RateLimiting:Authorize:Enabled"] = "true", - ["Authority:Security:RateLimiting:Internal:Enabled"] = "true", - ["Authority:Security:RateLimiting:Internal:PermitLimit"] = "3", - ["Authority:Signing:Enabled"] = "true", - ["Authority:Signing:ActiveKeyId"] = "authority-signing-dev", - ["Authority:Signing:KeyPath"] = "../certificates/authority-signing-dev.pem", - ["Authority:Signing:KeySource"] = "file" - }); - }; - }); - - var options = context.Options; - - Assert.Equal(2, options.SchemaVersion); - Assert.Equal(new Uri("https://authority.internal"), options.Issuer); - Assert.Equal(TimeSpan.FromMinutes(30), options.AccessTokenLifetime); - Assert.Equal(TimeSpan.FromDays(30), options.RefreshTokenLifetime); - Assert.Equal(new[] { "/var/lib/stellaops/plugins" }, options.PluginDirectories); - Assert.Equal(new[] { "127.0.0.1/32" }, options.BypassNetworks); - Assert.Equal("mongodb://example/stellaops", options.Storage.ConnectionString); - Assert.Equal("overrideDb", options.Storage.DatabaseName); - Assert.Equal(TimeSpan.FromMinutes(1.5), options.Storage.CommandTimeout); - Assert.Equal(25, options.Security.RateLimiting.Token.PermitLimit); - Assert.Equal(TimeSpan.FromSeconds(30), options.Security.RateLimiting.Token.Window); - Assert.True(options.Security.RateLimiting.Authorize.Enabled); - Assert.True(options.Security.RateLimiting.Internal.Enabled); - Assert.Equal(3, options.Security.RateLimiting.Internal.PermitLimit); - Assert.True(options.Signing.Enabled); - Assert.Equal("authority-signing-dev", options.Signing.ActiveKeyId); - Assert.Equal("../certificates/authority-signing-dev.pem", options.Signing.KeyPath); - Assert.Equal("file", options.Signing.KeySource); - } - - [Fact] - public void Validate_Normalises_ExceptionRoutingTemplates() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.stella-ops.test"), - SchemaVersion = 1 - }; - options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - - options.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions - { - Id = " SecOps ", - AuthorityRouteId = " approvals/secops ", - RequireMfa = true, - Description = " Security approvals " - }); - - options.Validate(); - - Assert.True(options.Exceptions.RequiresMfaForApprovals); - var template = Assert.Single(options.Exceptions.NormalizedRoutingTemplates); - Assert.Equal("SecOps", template.Key); - Assert.Equal("SecOps", template.Value.Id); - Assert.Equal("approvals/secops", template.Value.AuthorityRouteId); - Assert.Equal("Security approvals", template.Value.Description); - Assert.True(template.Value.RequireMfa); - } - - [Fact] - public void Validate_Throws_When_ExceptionRoutingTemplatesDuplicate() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.stella-ops.test"), - SchemaVersion = 1 - }; - options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - - options.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions - { - Id = "secops", - AuthorityRouteId = "route/a" - }); - options.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions - { - Id = "SecOps", - AuthorityRouteId = "route/b" - }); - - var exception = Assert.Throws<InvalidOperationException>(() => options.Validate()); - Assert.Contains("secops", exception.Message, StringComparison.OrdinalIgnoreCase); - } - - [Fact] - public void Validate_Throws_When_RateLimitingInvalid() - { - var options = new StellaOpsAuthorityOptions - { - Issuer = new Uri("https://authority.stella-ops.test"), - SchemaVersion = 1 - }; - options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; - options.Security.RateLimiting.Token.PermitLimit = 0; - options.Signing.ActiveKeyId = "test-key"; - options.Signing.KeyPath = "/tmp/test-key.pem"; - - var exception = Assert.Throws<InvalidOperationException>(() => options.Validate()); - - Assert.Contains("permitLimit", exception.Message, StringComparison.OrdinalIgnoreCase); - } -} +using System; +using System.Collections.Generic; +using Microsoft.Extensions.Configuration; +using StellaOps.Configuration; +using Xunit; + +namespace StellaOps.Configuration.Tests; + +public class StellaOpsAuthorityOptionsTests +{ + [Fact] + public void Validate_Throws_When_IssuerMissing() + { + var options = new StellaOpsAuthorityOptions(); + + var exception = Assert.Throws<InvalidOperationException>(() => options.Validate()); + + Assert.Contains("issuer", exception.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void Validate_Normalises_Collections() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + + options.PluginDirectories.Add(" ./plugins "); + options.PluginDirectories.Add("./plugins"); + options.PluginDirectories.Add("./other"); + + options.BypassNetworks.Add(" 10.0.0.0/24 "); + options.BypassNetworks.Add("10.0.0.0/24"); + options.BypassNetworks.Add("192.168.0.0/16"); + + options.Validate(); + + Assert.Equal(new[] { "./plugins", "./other" }, options.PluginDirectories); + Assert.Equal(new[] { "10.0.0.0/24", "192.168.0.0/16" }, options.BypassNetworks); + } + + [Fact] + public void Validate_Normalises_PluginDescriptors() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + + var descriptor = new AuthorityPluginDescriptorOptions + { + AssemblyName = "StellaOps.Authority.Plugin.Standard", + ConfigFile = " standard.yaml ", + Enabled = true + }; + + descriptor.Capabilities.Add("password"); + descriptor.Capabilities.Add("PASSWORD"); + options.Plugins.Descriptors["standard"] = descriptor; + + options.Validate(); + + var normalized = options.Plugins.Descriptors["standard"]; + Assert.Equal("standard.yaml", normalized.ConfigFile); + Assert.Single(normalized.Capabilities); + Assert.Equal("password", normalized.Capabilities[0]); + } + + [Fact] + public void Validate_Throws_When_StorageConnectionStringMissing() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + + var exception = Assert.Throws<InvalidOperationException>(() => options.Validate()); + + Assert.Contains("Mongo connection string", exception.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void Build_Binds_From_Configuration() + { + var context = StellaOpsAuthorityConfiguration.Build(options => + { + options.ConfigureBuilder = builder => + { + builder.AddInMemoryCollection(new Dictionary<string, string?> + { + ["Authority:SchemaVersion"] = "2", + ["Authority:Issuer"] = "https://authority.internal", + ["Authority:AccessTokenLifetime"] = "00:30:00", + ["Authority:RefreshTokenLifetime"] = "30.00:00:00", + ["Authority:Storage:ConnectionString"] = "mongodb://example/stellaops", + ["Authority:Storage:DatabaseName"] = "overrideDb", + ["Authority:Storage:CommandTimeout"] = "00:01:30", + ["Authority:PluginDirectories:0"] = "/var/lib/stellaops/plugins", + ["Authority:BypassNetworks:0"] = "127.0.0.1/32", + ["Authority:Security:RateLimiting:Token:PermitLimit"] = "25", + ["Authority:Security:RateLimiting:Token:Window"] = "00:00:30", + ["Authority:Security:RateLimiting:Authorize:Enabled"] = "true", + ["Authority:Security:RateLimiting:Internal:Enabled"] = "true", + ["Authority:Security:RateLimiting:Internal:PermitLimit"] = "3", + ["Authority:Signing:Enabled"] = "true", + ["Authority:Signing:ActiveKeyId"] = "authority-signing-dev", + ["Authority:Signing:KeyPath"] = "../certificates/authority-signing-dev.pem", + ["Authority:Signing:KeySource"] = "file" + }); + }; + }); + + var options = context.Options; + + Assert.Equal(2, options.SchemaVersion); + Assert.Equal(new Uri("https://authority.internal"), options.Issuer); + Assert.Equal(TimeSpan.FromMinutes(30), options.AccessTokenLifetime); + Assert.Equal(TimeSpan.FromDays(30), options.RefreshTokenLifetime); + Assert.Equal(new[] { "/var/lib/stellaops/plugins" }, options.PluginDirectories); + Assert.Equal(new[] { "127.0.0.1/32" }, options.BypassNetworks); + Assert.Equal("mongodb://example/stellaops", options.Storage.ConnectionString); + Assert.Equal("overrideDb", options.Storage.DatabaseName); + Assert.Equal(TimeSpan.FromMinutes(1.5), options.Storage.CommandTimeout); + Assert.Equal(25, options.Security.RateLimiting.Token.PermitLimit); + Assert.Equal(TimeSpan.FromSeconds(30), options.Security.RateLimiting.Token.Window); + Assert.True(options.Security.RateLimiting.Authorize.Enabled); + Assert.True(options.Security.RateLimiting.Internal.Enabled); + Assert.Equal(3, options.Security.RateLimiting.Internal.PermitLimit); + Assert.True(options.Signing.Enabled); + Assert.Equal("authority-signing-dev", options.Signing.ActiveKeyId); + Assert.Equal("../certificates/authority-signing-dev.pem", options.Signing.KeyPath); + Assert.Equal("file", options.Signing.KeySource); + } + + [Fact] + public void Validate_Normalises_ExceptionRoutingTemplates() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + + options.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions + { + Id = " SecOps ", + AuthorityRouteId = " approvals/secops ", + RequireMfa = true, + Description = " Security approvals " + }); + + options.Validate(); + + Assert.True(options.Exceptions.RequiresMfaForApprovals); + var template = Assert.Single(options.Exceptions.NormalizedRoutingTemplates); + Assert.Equal("SecOps", template.Key); + Assert.Equal("SecOps", template.Value.Id); + Assert.Equal("approvals/secops", template.Value.AuthorityRouteId); + Assert.Equal("Security approvals", template.Value.Description); + Assert.True(template.Value.RequireMfa); + } + + [Fact] + public void Validate_Throws_When_ExceptionRoutingTemplatesDuplicate() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + + options.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions + { + Id = "secops", + AuthorityRouteId = "route/a" + }); + options.Exceptions.RoutingTemplates.Add(new AuthorityExceptionRoutingTemplateOptions + { + Id = "SecOps", + AuthorityRouteId = "route/b" + }); + + var exception = Assert.Throws<InvalidOperationException>(() => options.Validate()); + Assert.Contains("secops", exception.Message, StringComparison.OrdinalIgnoreCase); + } + + [Fact] + public void Validate_Throws_When_RateLimitingInvalid() + { + var options = new StellaOpsAuthorityOptions + { + Issuer = new Uri("https://authority.stella-ops.test"), + SchemaVersion = 1 + }; + options.Storage.ConnectionString = "mongodb://localhost:27017/authority"; + options.Security.RateLimiting.Token.PermitLimit = 0; + options.Signing.ActiveKeyId = "test-key"; + options.Signing.KeyPath = "/tmp/test-key.pem"; + + var exception = Assert.Throws<InvalidOperationException>(() => options.Validate()); + + Assert.Contains("permitLimit", exception.Message, StringComparison.OrdinalIgnoreCase); + } +} diff --git a/src/StellaOps.Cryptography.Tests/Argon2idPasswordHasherTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/Argon2idPasswordHasherTests.cs similarity index 100% rename from src/StellaOps.Cryptography.Tests/Argon2idPasswordHasherTests.cs rename to src/__Libraries/__Tests/StellaOps.Cryptography.Tests/Argon2idPasswordHasherTests.cs diff --git a/src/StellaOps.Cryptography.Tests/Audit/AuthEventRecordTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/Audit/AuthEventRecordTests.cs similarity index 96% rename from src/StellaOps.Cryptography.Tests/Audit/AuthEventRecordTests.cs rename to src/__Libraries/__Tests/StellaOps.Cryptography.Tests/Audit/AuthEventRecordTests.cs index c4e166d6..c665749f 100644 --- a/src/StellaOps.Cryptography.Tests/Audit/AuthEventRecordTests.cs +++ b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/Audit/AuthEventRecordTests.cs @@ -1,57 +1,57 @@ -using System; -using StellaOps.Cryptography.Audit; - -namespace StellaOps.Cryptography.Tests.Audit; - -public class AuthEventRecordTests -{ - [Fact] - public void AuthEventRecord_InitializesCollections() - { - var record = new AuthEventRecord - { - EventType = "authority.test", - Outcome = AuthEventOutcome.Success - }; - - Assert.NotNull(record.Scopes); - Assert.Empty(record.Scopes); - Assert.NotNull(record.Properties); - Assert.Empty(record.Properties); - Assert.False(record.Tenant.HasValue); - Assert.False(record.Project.HasValue); - } - - [Fact] - public void ClassifiedString_NormalizesWhitespace() - { - var value = ClassifiedString.Personal(" "); - Assert.Null(value.Value); - Assert.False(value.HasValue); - Assert.Equal(AuthEventDataClassification.Personal, value.Classification); - } - - [Fact] - public void Subject_DefaultsToEmptyCollections() - { - var subject = new AuthEventSubject(); - Assert.NotNull(subject.Attributes); - Assert.Empty(subject.Attributes); - } - - [Fact] - public void Record_AssignsTimestamp_WhenNotProvided() - { - var record = new AuthEventRecord - { - EventType = "authority.test", - Outcome = AuthEventOutcome.Success - }; - - Assert.NotEqual(default, record.OccurredAt); - Assert.InRange( - record.OccurredAt, - DateTimeOffset.UtcNow.AddSeconds(-5), - DateTimeOffset.UtcNow.AddSeconds(5)); - } -} +using System; +using StellaOps.Cryptography.Audit; + +namespace StellaOps.Cryptography.Tests.Audit; + +public class AuthEventRecordTests +{ + [Fact] + public void AuthEventRecord_InitializesCollections() + { + var record = new AuthEventRecord + { + EventType = "authority.test", + Outcome = AuthEventOutcome.Success + }; + + Assert.NotNull(record.Scopes); + Assert.Empty(record.Scopes); + Assert.NotNull(record.Properties); + Assert.Empty(record.Properties); + Assert.False(record.Tenant.HasValue); + Assert.False(record.Project.HasValue); + } + + [Fact] + public void ClassifiedString_NormalizesWhitespace() + { + var value = ClassifiedString.Personal(" "); + Assert.Null(value.Value); + Assert.False(value.HasValue); + Assert.Equal(AuthEventDataClassification.Personal, value.Classification); + } + + [Fact] + public void Subject_DefaultsToEmptyCollections() + { + var subject = new AuthEventSubject(); + Assert.NotNull(subject.Attributes); + Assert.Empty(subject.Attributes); + } + + [Fact] + public void Record_AssignsTimestamp_WhenNotProvided() + { + var record = new AuthEventRecord + { + EventType = "authority.test", + Outcome = AuthEventOutcome.Success + }; + + Assert.NotEqual(default, record.OccurredAt); + Assert.InRange( + record.OccurredAt, + DateTimeOffset.UtcNow.AddSeconds(-5), + DateTimeOffset.UtcNow.AddSeconds(5)); + } +} diff --git a/src/StellaOps.Cryptography.Tests/BouncyCastleEd25519CryptoProviderTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/BouncyCastleEd25519CryptoProviderTests.cs similarity index 100% rename from src/StellaOps.Cryptography.Tests/BouncyCastleEd25519CryptoProviderTests.cs rename to src/__Libraries/__Tests/StellaOps.Cryptography.Tests/BouncyCastleEd25519CryptoProviderTests.cs diff --git a/src/StellaOps.Cryptography.Tests/CryptoProviderRegistryTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoProviderRegistryTests.cs similarity index 100% rename from src/StellaOps.Cryptography.Tests/CryptoProviderRegistryTests.cs rename to src/__Libraries/__Tests/StellaOps.Cryptography.Tests/CryptoProviderRegistryTests.cs diff --git a/src/StellaOps.Cryptography.Tests/DefaultCryptoProviderSigningTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/DefaultCryptoProviderSigningTests.cs similarity index 100% rename from src/StellaOps.Cryptography.Tests/DefaultCryptoProviderSigningTests.cs rename to src/__Libraries/__Tests/StellaOps.Cryptography.Tests/DefaultCryptoProviderSigningTests.cs diff --git a/src/StellaOps.Cryptography.Tests/LibsodiumCryptoProviderTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/LibsodiumCryptoProviderTests.cs similarity index 100% rename from src/StellaOps.Cryptography.Tests/LibsodiumCryptoProviderTests.cs rename to src/__Libraries/__Tests/StellaOps.Cryptography.Tests/LibsodiumCryptoProviderTests.cs diff --git a/src/StellaOps.Cryptography.Tests/PasswordHashOptionsTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/PasswordHashOptionsTests.cs similarity index 100% rename from src/StellaOps.Cryptography.Tests/PasswordHashOptionsTests.cs rename to src/__Libraries/__Tests/StellaOps.Cryptography.Tests/PasswordHashOptionsTests.cs diff --git a/src/StellaOps.Cryptography.Tests/Pbkdf2PasswordHasherTests.cs b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/Pbkdf2PasswordHasherTests.cs similarity index 100% rename from src/StellaOps.Cryptography.Tests/Pbkdf2PasswordHasherTests.cs rename to src/__Libraries/__Tests/StellaOps.Cryptography.Tests/Pbkdf2PasswordHasherTests.cs diff --git a/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj similarity index 50% rename from src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj rename to src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj index 3a92e15c..04629635 100644 --- a/src/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj +++ b/src/__Libraries/__Tests/StellaOps.Cryptography.Tests/StellaOps.Cryptography.Tests.csproj @@ -1,16 +1,17 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <TargetFramework>net10.0</TargetFramework> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <IsPackable>false</IsPackable> - </PropertyGroup> - <PropertyGroup Condition="'$(StellaOpsCryptoSodium)' == 'true'"> - <DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_SODIUM</DefineConstants> - </PropertyGroup> - <ItemGroup> - <ProjectReference Include="..\StellaOps.Cryptography\StellaOps.Cryptography.csproj" /> - <ProjectReference Include="..\StellaOps.Cryptography.DependencyInjection\StellaOps.Cryptography.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Cryptography.Plugin.BouncyCastle\StellaOps.Cryptography.Plugin.BouncyCastle.csproj" /> - </ItemGroup> -</Project> +<?xml version='1.0' encoding='utf-8'?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <TargetFramework>net10.0</TargetFramework> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <IsPackable>false</IsPackable> + </PropertyGroup> + <PropertyGroup Condition="'$(StellaOpsCryptoSodium)' == 'true'"> + <DefineConstants>$(DefineConstants);STELLAOPS_CRYPTO_SODIUM</DefineConstants> + </PropertyGroup> + <ItemGroup> + <ProjectReference Include="../../StellaOps.Cryptography/StellaOps.Cryptography.csproj" /> + <ProjectReference Include="../../StellaOps.Cryptography.DependencyInjection/StellaOps.Cryptography.DependencyInjection.csproj" /> + <ProjectReference Include="../../StellaOps.Cryptography.Plugin.BouncyCastle/StellaOps.Cryptography.Plugin.BouncyCastle.csproj" /> + </ItemGroup> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Plugin.Tests/DependencyInjection/PluginDependencyInjectionExtensionsTests.cs b/src/__Libraries/__Tests/StellaOps.Plugin.Tests/DependencyInjection/PluginDependencyInjectionExtensionsTests.cs similarity index 100% rename from src/StellaOps.Plugin.Tests/DependencyInjection/PluginDependencyInjectionExtensionsTests.cs rename to src/__Libraries/__Tests/StellaOps.Plugin.Tests/DependencyInjection/PluginDependencyInjectionExtensionsTests.cs diff --git a/src/StellaOps.Plugin.Tests/DependencyInjection/PluginServiceRegistrationTests.cs b/src/__Libraries/__Tests/StellaOps.Plugin.Tests/DependencyInjection/PluginServiceRegistrationTests.cs similarity index 100% rename from src/StellaOps.Plugin.Tests/DependencyInjection/PluginServiceRegistrationTests.cs rename to src/__Libraries/__Tests/StellaOps.Plugin.Tests/DependencyInjection/PluginServiceRegistrationTests.cs diff --git a/src/StellaOps.Plugin.Tests/StellaOps.Plugin.Tests.csproj b/src/__Libraries/__Tests/StellaOps.Plugin.Tests/StellaOps.Plugin.Tests.csproj similarity index 78% rename from src/StellaOps.Plugin.Tests/StellaOps.Plugin.Tests.csproj rename to src/__Libraries/__Tests/StellaOps.Plugin.Tests/StellaOps.Plugin.Tests.csproj index 0b43c86f..09b25aa3 100644 --- a/src/StellaOps.Plugin.Tests/StellaOps.Plugin.Tests.csproj +++ b/src/__Libraries/__Tests/StellaOps.Plugin.Tests/StellaOps.Plugin.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -6,8 +7,8 @@ <UseConcelierTestInfra>false</UseConcelierTestInfra> </PropertyGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.DependencyInjection\StellaOps.DependencyInjection.csproj" /> - <ProjectReference Include="..\StellaOps.Plugin\StellaOps.Plugin.csproj" /> + <ProjectReference Include="../../StellaOps.DependencyInjection/StellaOps.DependencyInjection.csproj" /> + <ProjectReference Include="../../StellaOps.Plugin/StellaOps.Plugin.csproj" /> </ItemGroup> <ItemGroup> <PackageReference Include="Microsoft.CodeAnalysis.CSharp" Version="4.14.0" /> @@ -18,4 +19,4 @@ <PackageReference Include="xunit" Version="2.9.2" /> <PackageReference Include="xunit.runner.visualstudio" Version="2.8.2" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Signals.Tests/CallgraphIngestionTests.cs b/src/__Libraries/__Tests/StellaOps.Signals.Tests/CallgraphIngestionTests.cs similarity index 97% rename from src/StellaOps.Signals.Tests/CallgraphIngestionTests.cs rename to src/__Libraries/__Tests/StellaOps.Signals.Tests/CallgraphIngestionTests.cs index 8857497b..b935e4bd 100644 --- a/src/StellaOps.Signals.Tests/CallgraphIngestionTests.cs +++ b/src/__Libraries/__Tests/StellaOps.Signals.Tests/CallgraphIngestionTests.cs @@ -1,138 +1,138 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Net; -using System.Net.Http.Json; -using System.Text; -using System.Threading.Tasks; -using MongoDB.Driver; -using StellaOps.Signals.Models; -using StellaOps.Signals.Tests.TestInfrastructure; -using Xunit; - -namespace StellaOps.Signals.Tests; - -public class CallgraphIngestionTests : IClassFixture<SignalsTestFactory> -{ - private readonly SignalsTestFactory factory; - - public CallgraphIngestionTests(SignalsTestFactory factory) - { - this.factory = factory; - } - - [Theory] - [InlineData("java")] - [InlineData("nodejs")] - [InlineData("python")] - [InlineData("go")] - public async Task Ingest_Callgraph_PersistsDocumentAndArtifact(string language) - { - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); - - var component = $"demo-{language}"; - var request = CreateRequest(language, component: component); - var response = await client.PostAsJsonAsync("/signals/callgraphs", request); - - Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); - var body = await response.Content.ReadFromJsonAsync<CallgraphIngestResponse>(); - Assert.NotNull(body); - - var database = new MongoClient(factory.MongoRunner.ConnectionString).GetDatabase("signals-tests"); - var collection = database.GetCollection<CallgraphDocument>("callgraphs"); - var doc = await collection.Find(d => d.Id == body!.CallgraphId).FirstOrDefaultAsync(); - - Assert.NotNull(doc); - Assert.Equal(language, doc!.Language); - Assert.Equal(component, doc.Component); - Assert.Equal("1.0.0", doc.Version); - Assert.Equal(2, doc.Nodes.Count); - Assert.Equal(1, doc.Edges.Count); - - var artifactPath = Path.Combine(factory.StoragePath, body.ArtifactPath); - Assert.True(File.Exists(artifactPath)); - Assert.False(string.IsNullOrWhiteSpace(body.ArtifactHash)); - } - - [Fact] - public async Task Ingest_UnsupportedLanguage_ReturnsBadRequest() - { - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); - - var request = CreateRequest("ruby"); - var response = await client.PostAsJsonAsync("/signals/callgraphs", request); - - Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); - } - - [Fact] - public async Task Ingest_InvalidArtifactContent_ReturnsBadRequest() - { - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); - - var request = CreateRequest("java") with { ArtifactContentBase64 = "not-base64" }; - var response = await client.PostAsJsonAsync("/signals/callgraphs", request); - - Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); - } - - [Fact] - public async Task Ingest_InvalidGraphStructure_ReturnsUnprocessableEntity() - { - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); - - var json = "{\"formatVersion\":\"1.0\",\"graph\":{}}"; - var request = CreateRequest("java", json); - var response = await client.PostAsJsonAsync("/signals/callgraphs", request); - - Assert.Equal(HttpStatusCode.UnprocessableEntity, response.StatusCode); - } - - [Fact] - public async Task Ingest_SameComponentUpsertsDocument() - { - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); - - var firstRequest = CreateRequest("python"); - var secondJson = "{\"graph\":{\"nodes\":[{\"id\":\"module.entry\",\"name\":\"module.entry\"}],\"edges\":[]}}"; - var secondRequest = CreateRequest("python", secondJson); - - var firstResponse = await client.PostAsJsonAsync("/signals/callgraphs", firstRequest); - var secondResponse = await client.PostAsJsonAsync("/signals/callgraphs", secondRequest); - - Assert.Equal(HttpStatusCode.Accepted, firstResponse.StatusCode); - Assert.Equal(HttpStatusCode.Accepted, secondResponse.StatusCode); - - var database = new MongoClient(factory.MongoRunner.ConnectionString).GetDatabase("signals-tests"); - var collection = database.GetCollection<CallgraphDocument>("callgraphs"); - var count = await collection.CountDocumentsAsync(FilterDefinition<CallgraphDocument>.Empty); - - Assert.Equal(1, count); - var doc = await collection.Find(_ => true).FirstAsync(); - Assert.Single(doc.Nodes); - Assert.Equal("python", doc.Language); - } - - private static CallgraphIngestRequest CreateRequest(string language, string? customJson = null, string component = "demo") - { - var json = customJson ?? "{\"formatVersion\":\"1.0\",\"graph\":{\"nodes\":[{\"id\":\"main.entry\",\"name\":\"main.entry\",\"kind\":\"function\",\"file\":\"main\",\"line\":1},{\"id\":\"helper.run\",\"name\":\"helper.run\",\"kind\":\"function\",\"file\":\"helper\",\"line\":2}],\"edges\":[{\"source\":\"main.entry\",\"target\":\"helper.run\",\"type\":\"call\"}]}}"; - var base64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(json)); - - return new CallgraphIngestRequest( - Language: language, - Component: component, - Version: "1.0.0", - ArtifactContentType: "application/json", - ArtifactFileName: $"{language}-callgraph.json", - ArtifactContentBase64: base64, - Metadata: new Dictionary<string, string?> - { - ["source"] = "unit-test" - }); - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Net; +using System.Net.Http.Json; +using System.Text; +using System.Threading.Tasks; +using MongoDB.Driver; +using StellaOps.Signals.Models; +using StellaOps.Signals.Tests.TestInfrastructure; +using Xunit; + +namespace StellaOps.Signals.Tests; + +public class CallgraphIngestionTests : IClassFixture<SignalsTestFactory> +{ + private readonly SignalsTestFactory factory; + + public CallgraphIngestionTests(SignalsTestFactory factory) + { + this.factory = factory; + } + + [Theory] + [InlineData("java")] + [InlineData("nodejs")] + [InlineData("python")] + [InlineData("go")] + public async Task Ingest_Callgraph_PersistsDocumentAndArtifact(string language) + { + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); + + var component = $"demo-{language}"; + var request = CreateRequest(language, component: component); + var response = await client.PostAsJsonAsync("/signals/callgraphs", request); + + Assert.Equal(HttpStatusCode.Accepted, response.StatusCode); + var body = await response.Content.ReadFromJsonAsync<CallgraphIngestResponse>(); + Assert.NotNull(body); + + var database = new MongoClient(factory.MongoRunner.ConnectionString).GetDatabase("signals-tests"); + var collection = database.GetCollection<CallgraphDocument>("callgraphs"); + var doc = await collection.Find(d => d.Id == body!.CallgraphId).FirstOrDefaultAsync(); + + Assert.NotNull(doc); + Assert.Equal(language, doc!.Language); + Assert.Equal(component, doc.Component); + Assert.Equal("1.0.0", doc.Version); + Assert.Equal(2, doc.Nodes.Count); + Assert.Equal(1, doc.Edges.Count); + + var artifactPath = Path.Combine(factory.StoragePath, body.ArtifactPath); + Assert.True(File.Exists(artifactPath)); + Assert.False(string.IsNullOrWhiteSpace(body.ArtifactHash)); + } + + [Fact] + public async Task Ingest_UnsupportedLanguage_ReturnsBadRequest() + { + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); + + var request = CreateRequest("ruby"); + var response = await client.PostAsJsonAsync("/signals/callgraphs", request); + + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + [Fact] + public async Task Ingest_InvalidArtifactContent_ReturnsBadRequest() + { + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); + + var request = CreateRequest("java") with { ArtifactContentBase64 = "not-base64" }; + var response = await client.PostAsJsonAsync("/signals/callgraphs", request); + + Assert.Equal(HttpStatusCode.BadRequest, response.StatusCode); + } + + [Fact] + public async Task Ingest_InvalidGraphStructure_ReturnsUnprocessableEntity() + { + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); + + var json = "{\"formatVersion\":\"1.0\",\"graph\":{}}"; + var request = CreateRequest("java", json); + var response = await client.PostAsJsonAsync("/signals/callgraphs", request); + + Assert.Equal(HttpStatusCode.UnprocessableEntity, response.StatusCode); + } + + [Fact] + public async Task Ingest_SameComponentUpsertsDocument() + { + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); + + var firstRequest = CreateRequest("python"); + var secondJson = "{\"graph\":{\"nodes\":[{\"id\":\"module.entry\",\"name\":\"module.entry\"}],\"edges\":[]}}"; + var secondRequest = CreateRequest("python", secondJson); + + var firstResponse = await client.PostAsJsonAsync("/signals/callgraphs", firstRequest); + var secondResponse = await client.PostAsJsonAsync("/signals/callgraphs", secondRequest); + + Assert.Equal(HttpStatusCode.Accepted, firstResponse.StatusCode); + Assert.Equal(HttpStatusCode.Accepted, secondResponse.StatusCode); + + var database = new MongoClient(factory.MongoRunner.ConnectionString).GetDatabase("signals-tests"); + var collection = database.GetCollection<CallgraphDocument>("callgraphs"); + var count = await collection.CountDocumentsAsync(FilterDefinition<CallgraphDocument>.Empty); + + Assert.Equal(1, count); + var doc = await collection.Find(_ => true).FirstAsync(); + Assert.Single(doc.Nodes); + Assert.Equal("python", doc.Language); + } + + private static CallgraphIngestRequest CreateRequest(string language, string? customJson = null, string component = "demo") + { + var json = customJson ?? "{\"formatVersion\":\"1.0\",\"graph\":{\"nodes\":[{\"id\":\"main.entry\",\"name\":\"main.entry\",\"kind\":\"function\",\"file\":\"main\",\"line\":1},{\"id\":\"helper.run\",\"name\":\"helper.run\",\"kind\":\"function\",\"file\":\"helper\",\"line\":2}],\"edges\":[{\"source\":\"main.entry\",\"target\":\"helper.run\",\"type\":\"call\"}]}}"; + var base64 = Convert.ToBase64String(Encoding.UTF8.GetBytes(json)); + + return new CallgraphIngestRequest( + Language: language, + Component: component, + Version: "1.0.0", + ArtifactContentType: "application/json", + ArtifactFileName: $"{language}-callgraph.json", + ArtifactContentBase64: base64, + Metadata: new Dictionary<string, string?> + { + ["source"] = "unit-test" + }); + } +} diff --git a/src/StellaOps.Signals.Tests/SignalsApiTests.cs b/src/__Libraries/__Tests/StellaOps.Signals.Tests/SignalsApiTests.cs similarity index 96% rename from src/StellaOps.Signals.Tests/SignalsApiTests.cs rename to src/__Libraries/__Tests/StellaOps.Signals.Tests/SignalsApiTests.cs index be2a9778..d39412bc 100644 --- a/src/StellaOps.Signals.Tests/SignalsApiTests.cs +++ b/src/__Libraries/__Tests/StellaOps.Signals.Tests/SignalsApiTests.cs @@ -1,112 +1,112 @@ -using System.Collections.Generic; -using System.Net; -using System.Net.Http.Json; -using System.Threading.Tasks; -using Microsoft.Extensions.Configuration; -using StellaOps.Signals.Tests.TestInfrastructure; -using Xunit; - -namespace StellaOps.Signals.Tests; - -public class SignalsApiTests : IClassFixture<SignalsTestFactory> -{ - private readonly SignalsTestFactory factory; - - public SignalsApiTests(SignalsTestFactory factory) - { - this.factory = factory; - } - - [Fact] - public async Task Healthz_ReturnsOk() - { - using var client = factory.CreateClient(); - var response = await client.GetAsync("/healthz"); - - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - } - - [Fact] - public async Task Readyz_ReturnsOk() - { - using var client = factory.CreateClient(); - var response = await client.GetAsync("/readyz"); - - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - var payload = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>(); - Assert.NotNull(payload); - Assert.Equal("ready", payload!["status"]); - } - - [Fact] - public async Task Ping_WithoutScopeHeader_ReturnsUnauthorized() - { - using var client = factory.CreateClient(); - var response = await client.GetAsync("/signals/ping"); - - Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); - } - - [Fact] - public async Task Ping_WithMissingScope_ReturnsForbidden() - { - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); - var response = await client.GetAsync("/signals/ping"); - - Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); - } - - [Fact] - public async Task Ping_WithReadScope_ReturnsNoContent() - { - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Scopes", "signals:read"); - var response = await client.GetAsync("/signals/ping"); - - Assert.Equal(HttpStatusCode.NoContent, response.StatusCode); - } - - [Fact] - public async Task Ping_WithFallbackDisabled_ReturnsUnauthorized() - { - using var app = factory.WithWebHostBuilder(builder => - { - builder.ConfigureAppConfiguration((_, configuration) => - { - configuration.AddInMemoryCollection(new Dictionary<string, string?> - { - ["Signals:Authority:AllowAnonymousFallback"] = "false" - }); - }); - }); - - using var client = app.CreateClient(); - var response = await client.GetAsync("/signals/ping"); - - Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); - } - - [Fact] - public async Task Status_WithReadScope_ReturnsOk() - { - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Scopes", "signals:read"); - var response = await client.GetAsync("/signals/status"); - - Assert.Equal(HttpStatusCode.OK, response.StatusCode); - var payload = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>(); - Assert.NotNull(payload); - Assert.Equal("signals", payload!["service"]); - } - - [Fact] - public async Task Status_WithMissingScope_ReturnsForbidden() - { - using var client = factory.CreateClient(); - client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); - var response = await client.GetAsync("/signals/status"); - - Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); - } -} +using System.Collections.Generic; +using System.Net; +using System.Net.Http.Json; +using System.Threading.Tasks; +using Microsoft.Extensions.Configuration; +using StellaOps.Signals.Tests.TestInfrastructure; +using Xunit; + +namespace StellaOps.Signals.Tests; + +public class SignalsApiTests : IClassFixture<SignalsTestFactory> +{ + private readonly SignalsTestFactory factory; + + public SignalsApiTests(SignalsTestFactory factory) + { + this.factory = factory; + } + + [Fact] + public async Task Healthz_ReturnsOk() + { + using var client = factory.CreateClient(); + var response = await client.GetAsync("/healthz"); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + } + + [Fact] + public async Task Readyz_ReturnsOk() + { + using var client = factory.CreateClient(); + var response = await client.GetAsync("/readyz"); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var payload = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>(); + Assert.NotNull(payload); + Assert.Equal("ready", payload!["status"]); + } + + [Fact] + public async Task Ping_WithoutScopeHeader_ReturnsUnauthorized() + { + using var client = factory.CreateClient(); + var response = await client.GetAsync("/signals/ping"); + + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + + [Fact] + public async Task Ping_WithMissingScope_ReturnsForbidden() + { + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); + var response = await client.GetAsync("/signals/ping"); + + Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); + } + + [Fact] + public async Task Ping_WithReadScope_ReturnsNoContent() + { + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Scopes", "signals:read"); + var response = await client.GetAsync("/signals/ping"); + + Assert.Equal(HttpStatusCode.NoContent, response.StatusCode); + } + + [Fact] + public async Task Ping_WithFallbackDisabled_ReturnsUnauthorized() + { + using var app = factory.WithWebHostBuilder(builder => + { + builder.ConfigureAppConfiguration((_, configuration) => + { + configuration.AddInMemoryCollection(new Dictionary<string, string?> + { + ["Signals:Authority:AllowAnonymousFallback"] = "false" + }); + }); + }); + + using var client = app.CreateClient(); + var response = await client.GetAsync("/signals/ping"); + + Assert.Equal(HttpStatusCode.Unauthorized, response.StatusCode); + } + + [Fact] + public async Task Status_WithReadScope_ReturnsOk() + { + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Scopes", "signals:read"); + var response = await client.GetAsync("/signals/status"); + + Assert.Equal(HttpStatusCode.OK, response.StatusCode); + var payload = await response.Content.ReadFromJsonAsync<Dictionary<string, string>>(); + Assert.NotNull(payload); + Assert.Equal("signals", payload!["service"]); + } + + [Fact] + public async Task Status_WithMissingScope_ReturnsForbidden() + { + using var client = factory.CreateClient(); + client.DefaultRequestHeaders.Add("X-Scopes", "signals:write"); + var response = await client.GetAsync("/signals/status"); + + Assert.Equal(HttpStatusCode.Forbidden, response.StatusCode); + } +} diff --git a/src/StellaOps.Signals.Tests/StellaOps.Signals.Tests.csproj b/src/__Libraries/__Tests/StellaOps.Signals.Tests/StellaOps.Signals.Tests.csproj similarity index 83% rename from src/StellaOps.Signals.Tests/StellaOps.Signals.Tests.csproj rename to src/__Libraries/__Tests/StellaOps.Signals.Tests/StellaOps.Signals.Tests.csproj index af8b0d7c..9b3852da 100644 --- a/src/StellaOps.Signals.Tests/StellaOps.Signals.Tests.csproj +++ b/src/__Libraries/__Tests/StellaOps.Signals.Tests/StellaOps.Signals.Tests.csproj @@ -1,3 +1,4 @@ +<?xml version='1.0' encoding='utf-8'?> <Project Sdk="Microsoft.NET.Sdk"> <PropertyGroup> <TargetFramework>net10.0</TargetFramework> @@ -16,6 +17,6 @@ </ItemGroup> <ItemGroup> - <ProjectReference Include="..\StellaOps.Signals\StellaOps.Signals.csproj" /> + <ProjectReference Include="../../../Signals/StellaOps.Signals/StellaOps.Signals.csproj" /> </ItemGroup> -</Project> +</Project> \ No newline at end of file diff --git a/src/StellaOps.Signals.Tests/TestInfrastructure/SignalsTestFactory.cs b/src/__Libraries/__Tests/StellaOps.Signals.Tests/TestInfrastructure/SignalsTestFactory.cs similarity index 96% rename from src/StellaOps.Signals.Tests/TestInfrastructure/SignalsTestFactory.cs rename to src/__Libraries/__Tests/StellaOps.Signals.Tests/TestInfrastructure/SignalsTestFactory.cs index e92c3381..d9ef98f3 100644 --- a/src/StellaOps.Signals.Tests/TestInfrastructure/SignalsTestFactory.cs +++ b/src/__Libraries/__Tests/StellaOps.Signals.Tests/TestInfrastructure/SignalsTestFactory.cs @@ -1,64 +1,64 @@ -using System; -using System.Collections.Generic; -using System.IO; -using System.Threading.Tasks; -using Microsoft.AspNetCore.Hosting; -using Microsoft.AspNetCore.Mvc.Testing; -using Microsoft.Extensions.Configuration; -using Mongo2Go; - -namespace StellaOps.Signals.Tests.TestInfrastructure; - -internal sealed class SignalsTestFactory : WebApplicationFactory<Program>, IAsyncLifetime -{ - private readonly MongoDbRunner mongoRunner; - private readonly string storagePath; - - public SignalsTestFactory() - { - mongoRunner = MongoDbRunner.Start(singleNodeReplSet: true); - storagePath = Path.Combine(Path.GetTempPath(), "signals-tests", Guid.NewGuid().ToString()); - Directory.CreateDirectory(storagePath); - } - - public string StoragePath => storagePath; - - public MongoDbRunner MongoRunner => mongoRunner; - - protected override void ConfigureWebHost(IWebHostBuilder builder) - { - builder.ConfigureAppConfiguration((context, configuration) => - { - var settings = new Dictionary<string, string?> - { - ["Signals:Authority:Enabled"] = "false", - ["Signals:Authority:AllowAnonymousFallback"] = "true", - ["Signals:Mongo:ConnectionString"] = mongoRunner.ConnectionString, - ["Signals:Mongo:Database"] = "signals-tests", - ["Signals:Mongo:CallgraphsCollection"] = "callgraphs", - ["Signals:Storage:RootPath"] = storagePath - }; - - configuration.AddInMemoryCollection(settings); - }); - } - - public Task InitializeAsync() => Task.CompletedTask; - - public async Task DisposeAsync() - { - await Task.Run(() => mongoRunner.Dispose()); - - try - { - if (Directory.Exists(storagePath)) - { - Directory.Delete(storagePath, recursive: true); - } - } - catch - { - // best effort cleanup. - } - } -} +using System; +using System.Collections.Generic; +using System.IO; +using System.Threading.Tasks; +using Microsoft.AspNetCore.Hosting; +using Microsoft.AspNetCore.Mvc.Testing; +using Microsoft.Extensions.Configuration; +using Mongo2Go; + +namespace StellaOps.Signals.Tests.TestInfrastructure; + +internal sealed class SignalsTestFactory : WebApplicationFactory<Program>, IAsyncLifetime +{ + private readonly MongoDbRunner mongoRunner; + private readonly string storagePath; + + public SignalsTestFactory() + { + mongoRunner = MongoDbRunner.Start(singleNodeReplSet: true); + storagePath = Path.Combine(Path.GetTempPath(), "signals-tests", Guid.NewGuid().ToString()); + Directory.CreateDirectory(storagePath); + } + + public string StoragePath => storagePath; + + public MongoDbRunner MongoRunner => mongoRunner; + + protected override void ConfigureWebHost(IWebHostBuilder builder) + { + builder.ConfigureAppConfiguration((context, configuration) => + { + var settings = new Dictionary<string, string?> + { + ["Signals:Authority:Enabled"] = "false", + ["Signals:Authority:AllowAnonymousFallback"] = "true", + ["Signals:Mongo:ConnectionString"] = mongoRunner.ConnectionString, + ["Signals:Mongo:Database"] = "signals-tests", + ["Signals:Mongo:CallgraphsCollection"] = "callgraphs", + ["Signals:Storage:RootPath"] = storagePath + }; + + configuration.AddInMemoryCollection(settings); + }); + } + + public Task InitializeAsync() => Task.CompletedTask; + + public async Task DisposeAsync() + { + await Task.Run(() => mongoRunner.Dispose()); + + try + { + if (Directory.Exists(storagePath)) + { + Directory.Delete(storagePath, recursive: true); + } + } + catch + { + // best effort cleanup. + } + } +} diff --git a/tmp/docenv/pyvenv.cfg b/tmp/docenv/pyvenv.cfg index b0fbdaeb..2aff6b87 100644 --- a/tmp/docenv/pyvenv.cfg +++ b/tmp/docenv/pyvenv.cfg @@ -1,5 +1,5 @@ -home = /usr/bin -include-system-site-packages = false -version = 3.12.3 -executable = /usr/bin/python3.12 -command = /usr/bin/python3 -m venv /mnt/e/dev/git.stella-ops.org/tmp/docenv +home = /usr/bin +include-system-site-packages = false +version = 3.12.3 +executable = /usr/bin/python3.12 +command = /usr/bin/python3 -m venv /mnt/e/dev/git.stella-ops.org/tmp/docenv diff --git a/tmp/reflect/Program.cs b/tmp/reflect/Program.cs index ed089f4c..9a08ba17 100644 --- a/tmp/reflect/Program.cs +++ b/tmp/reflect/Program.cs @@ -1,13 +1,13 @@ -using System; -using System.Linq; -using System.Reflection; - -var assembly = Assembly.Load("RoaringBitmap"); -foreach (var type in assembly.GetTypes().OrderBy(t => t.FullName)) -{ - Console.WriteLine(type.FullName); - foreach (var method in type.GetMethods(BindingFlags.Public | BindingFlags.Static | BindingFlags.Instance | BindingFlags.DeclaredOnly)) - { - Console.WriteLine($" {method.ReturnType.Name} {method.Name}({string.Join(", ", method.GetParameters().Select(p => p.ParameterType.Name + " " + p.Name))})"); - } -} +using System; +using System.Linq; +using System.Reflection; + +var assembly = Assembly.Load("RoaringBitmap"); +foreach (var type in assembly.GetTypes().OrderBy(t => t.FullName)) +{ + Console.WriteLine(type.FullName); + foreach (var method in type.GetMethods(BindingFlags.Public | BindingFlags.Static | BindingFlags.Instance | BindingFlags.DeclaredOnly)) + { + Console.WriteLine($" {method.ReturnType.Name} {method.Name}({string.Join(", ", method.GetParameters().Select(p => p.ParameterType.Name + " " + p.Name))})"); + } +} diff --git a/tmp/reflect/reflect.csproj b/tmp/reflect/reflect.csproj index 9d15235d..55260214 100644 --- a/tmp/reflect/reflect.csproj +++ b/tmp/reflect/reflect.csproj @@ -1,14 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <OutputType>Exe</OutputType> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="RoaringBitmap" Version="0.0.9" /> - </ItemGroup> - -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="RoaringBitmap" Version="0.0.9" /> + </ItemGroup> + +</Project> diff --git a/tools/FixtureUpdater/FixtureUpdater.csproj b/tools/FixtureUpdater/FixtureUpdater.csproj index d0436e1b..3a8ee640 100644 --- a/tools/FixtureUpdater/FixtureUpdater.csproj +++ b/tools/FixtureUpdater/FixtureUpdater.csproj @@ -1,20 +1,20 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <OutputType>Exe</OutputType> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="../../src/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj" /> - <ProjectReference Include="../../src/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj" /> - <ProjectReference Include="../../src/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj" /> - <ProjectReference Include="../../src/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> - <ProjectReference Include="../../src/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> - <ProjectReference Include="../../src/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> - <ProjectReference Include="../../src/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> - </ItemGroup> - -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="../../src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Osv/StellaOps.Concelier.Connector.Osv.csproj" /> + <ProjectReference Include="../../src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Ghsa/StellaOps.Concelier.Connector.Ghsa.csproj" /> + <ProjectReference Include="../../src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Nvd/StellaOps.Concelier.Connector.Nvd.csproj" /> + <ProjectReference Include="../../src/Concelier/StellaOps.Concelier.PluginBinaries/StellaOps.Concelier.Connector.Common/StellaOps.Concelier.Connector.Common.csproj" /> + <ProjectReference Include="../../src/Concelier/__Libraries/StellaOps.Concelier.Storage.Mongo/StellaOps.Concelier.Storage.Mongo.csproj" /> + <ProjectReference Include="../../src/Concelier/__Libraries/StellaOps.Concelier.Models/StellaOps.Concelier.Models.csproj" /> + <ProjectReference Include="../../src/Concelier/__Libraries/StellaOps.Concelier.Testing/StellaOps.Concelier.Testing.csproj" /> + </ItemGroup> + +</Project> diff --git a/tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj b/tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj index 5828535c..a7490b33 100644 --- a/tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj +++ b/tools/LanguageAnalyzerSmoke/LanguageAnalyzerSmoke.csproj @@ -1,18 +1,18 @@ -<?xml version="1.0" encoding="utf-8"?> -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <OutputType>Exe</OutputType> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="10.0.0-rc.2.25502.107" /> - <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> - </ItemGroup> - <ItemGroup> - <ProjectReference Include="..\..\src\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> - <ProjectReference Include="..\..\src\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" /> - </ItemGroup> -</Project> +<?xml version="1.0" encoding="utf-8"?> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="Microsoft.Extensions.DependencyInjection" Version="10.0.0-rc.2.25502.107" /> + <PackageReference Include="Microsoft.Extensions.Logging.Abstractions" Version="10.0.0-rc.2.25502.107" /> + </ItemGroup> + <ItemGroup> + <ProjectReference Include="..\..\src\StellaOps.Scanner.Analyzers.Lang\StellaOps.Scanner.Analyzers.Lang.csproj" /> + <ProjectReference Include="..\..\src\StellaOps.Scanner.Core\StellaOps.Scanner.Core.csproj" /> + </ItemGroup> +</Project> diff --git a/tools/LanguageAnalyzerSmoke/Program.cs b/tools/LanguageAnalyzerSmoke/Program.cs index 1679f839..907eb1d9 100644 --- a/tools/LanguageAnalyzerSmoke/Program.cs +++ b/tools/LanguageAnalyzerSmoke/Program.cs @@ -83,7 +83,7 @@ internal sealed class SmokeOptions Console.WriteLine("Options:"); Console.WriteLine(" -r, --repo-root <path> Repository root (defaults to current working directory)"); Console.WriteLine(" -p, --plugin-directory <name> Analyzer plug-in directory under plugins/scanner/analyzers/lang (defaults to StellaOps.Scanner.Analyzers.Lang.Python)"); - Console.WriteLine(" -f, --fixture-path <path> Relative path to fixtures root (defaults to src/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python)"); + Console.WriteLine(" -f, --fixture-path <path> Relative path to fixtures root (defaults to src/Scanner/__Tests/StellaOps.Scanner.Analyzers.Lang.Python.Tests/Fixtures/lang/python)"); Console.WriteLine(" -h, --help Show usage information"); } } diff --git a/tools/NotifySmokeCheck/NotifySmokeCheck.csproj b/tools/NotifySmokeCheck/NotifySmokeCheck.csproj index 40dbf13e..b1e271b7 100644 --- a/tools/NotifySmokeCheck/NotifySmokeCheck.csproj +++ b/tools/NotifySmokeCheck/NotifySmokeCheck.csproj @@ -1,12 +1,12 @@ -<Project Sdk="Microsoft.NET.Sdk"> - <PropertyGroup> - <OutputType>Exe</OutputType> - <TargetFramework>net10.0</TargetFramework> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - <ItemGroup> - <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> - </ItemGroup> -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net10.0</TargetFramework> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + <ItemGroup> + <PackageReference Include="StackExchange.Redis" Version="2.8.24" /> + </ItemGroup> +</Project> diff --git a/tools/NotifySmokeCheck/Program.cs b/tools/NotifySmokeCheck/Program.cs index 6e04b797..1ed47417 100644 --- a/tools/NotifySmokeCheck/Program.cs +++ b/tools/NotifySmokeCheck/Program.cs @@ -1,198 +1,198 @@ -using System.Globalization; -using System.Net.Http.Headers; -using System.Linq; -using System.Text.Json; -using StackExchange.Redis; - -static string RequireEnv(string name) -{ - var value = Environment.GetEnvironmentVariable(name); - if (string.IsNullOrWhiteSpace(value)) - { - throw new InvalidOperationException($"Environment variable '{name}' is required for Notify smoke validation."); - } - - return value; -} - -static string? GetField(StreamEntry entry, string fieldName) -{ - foreach (var pair in entry.Values) - { - if (string.Equals(pair.Name, fieldName, StringComparison.OrdinalIgnoreCase)) - { - return pair.Value.ToString(); - } - } - - return null; -} - -static void Ensure(bool condition, string message) -{ - if (!condition) - { - throw new InvalidOperationException(message); - } -} - -var redisDsn = RequireEnv("NOTIFY_SMOKE_REDIS_DSN"); -var redisStream = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_STREAM"); -if (string.IsNullOrWhiteSpace(redisStream)) -{ - redisStream = "stella.events"; -} - -var expectedKindsEnv = RequireEnv("NOTIFY_SMOKE_EXPECT_KINDS"); - -var expectedKinds = expectedKindsEnv - .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) - .Select(kind => kind.ToLowerInvariant()) - .Distinct() - .ToArray(); -Ensure(expectedKinds.Length > 0, "Expected at least one event kind in NOTIFY_SMOKE_EXPECT_KINDS."); - -var lookbackMinutesEnv = RequireEnv("NOTIFY_SMOKE_LOOKBACK_MINUTES"); -if (!double.TryParse(lookbackMinutesEnv, NumberStyles.Any, CultureInfo.InvariantCulture, out var lookbackMinutes)) -{ - throw new InvalidOperationException("NOTIFY_SMOKE_LOOKBACK_MINUTES must be numeric."); -} -Ensure(lookbackMinutes > 0, "NOTIFY_SMOKE_LOOKBACK_MINUTES must be greater than zero."); - -var now = DateTimeOffset.UtcNow; -var sinceThreshold = now - TimeSpan.FromMinutes(Math.Max(1, lookbackMinutes)); - -Console.WriteLine($"ℹ️ Checking Redis stream '{redisStream}' for kinds [{string.Join(", ", expectedKinds)}] within the last {lookbackMinutes:F1} minutes."); - -var redisConfig = ConfigurationOptions.Parse(redisDsn); -redisConfig.AbortOnConnectFail = false; - -await using var redisConnection = await ConnectionMultiplexer.ConnectAsync(redisConfig); -var database = redisConnection.GetDatabase(); - -var streamEntries = await database.StreamRangeAsync(redisStream, "-", "+", count: 200); -if (streamEntries.Length > 1) -{ - Array.Reverse(streamEntries); -} -Ensure(streamEntries.Length > 0, $"Redis stream '{redisStream}' is empty."); - -var recentEntries = new List<StreamEntry>(); -foreach (var entry in streamEntries) -{ - var timestampText = GetField(entry, "ts"); - if (timestampText is null) - { - continue; - } - - if (!DateTimeOffset.TryParse(timestampText, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var entryTimestamp)) - { - continue; - } - - if (entryTimestamp >= sinceThreshold) - { - recentEntries.Add(entry); - } -} - -Ensure(recentEntries.Count > 0, $"No Redis events newer than {sinceThreshold:u} located in stream '{redisStream}'."); - -var missingKinds = new List<string>(); -foreach (var kind in expectedKinds) -{ - var match = recentEntries.FirstOrDefault(entry => - { - var entryKind = GetField(entry, "kind")?.ToLowerInvariant(); - return entryKind == kind; - }); - - if (match.Equals(default(StreamEntry))) - { - missingKinds.Add(kind); - } -} - -Ensure(missingKinds.Count == 0, $"Missing expected Redis events for kinds: {string.Join(", ", missingKinds)}"); - -Console.WriteLine("✅ Redis event stream contains the expected scanner events."); - -var notifyBaseUrl = RequireEnv("NOTIFY_SMOKE_NOTIFY_BASEURL").TrimEnd('/'); -var notifyToken = RequireEnv("NOTIFY_SMOKE_NOTIFY_TOKEN"); -var notifyTenant = RequireEnv("NOTIFY_SMOKE_NOTIFY_TENANT"); -var notifyTenantHeader = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TENANT_HEADER"); -if (string.IsNullOrWhiteSpace(notifyTenantHeader)) -{ - notifyTenantHeader = "X-StellaOps-Tenant"; -} - -var notifyTimeoutSeconds = 30; -var notifyTimeoutEnv = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TIMEOUT_SECONDS"); -if (!string.IsNullOrWhiteSpace(notifyTimeoutEnv) && int.TryParse(notifyTimeoutEnv, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedTimeout)) -{ - notifyTimeoutSeconds = Math.Max(5, parsedTimeout); -} - -using var httpClient = new HttpClient -{ - Timeout = TimeSpan.FromSeconds(notifyTimeoutSeconds), -}; - -httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", notifyToken); -httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); -httpClient.DefaultRequestHeaders.Add(notifyTenantHeader, notifyTenant); - -var sinceQuery = Uri.EscapeDataString(sinceThreshold.ToString("O", CultureInfo.InvariantCulture)); -var deliveriesUrl = $"{notifyBaseUrl}/api/v1/deliveries?since={sinceQuery}&limit=200"; - -Console.WriteLine($"ℹ️ Querying Notify deliveries via {deliveriesUrl}."); - -using var response = await httpClient.GetAsync(deliveriesUrl); -if (!response.IsSuccessStatusCode) -{ - var body = await response.Content.ReadAsStringAsync(); - throw new InvalidOperationException($"Notify deliveries request failed with {(int)response.StatusCode} {response.ReasonPhrase}: {body}"); -} - -var json = await response.Content.ReadAsStringAsync(); -if (string.IsNullOrWhiteSpace(json)) -{ - throw new InvalidOperationException("Notify deliveries response body was empty."); -} - -using var document = JsonDocument.Parse(json); -var root = document.RootElement; - -IEnumerable<JsonElement> EnumerateDeliveries(JsonElement element) -{ - return element.ValueKind switch - { - JsonValueKind.Array => element.EnumerateArray(), - JsonValueKind.Object when element.TryGetProperty("items", out var items) && items.ValueKind == JsonValueKind.Array => items.EnumerateArray(), - _ => throw new InvalidOperationException("Notify deliveries response was not an array or did not contain an 'items' collection.") - }; -} - -var deliveries = EnumerateDeliveries(root).ToArray(); -Ensure(deliveries.Length > 0, "Notify deliveries response did not return any records."); - -var missingDeliveryKinds = new List<string>(); -foreach (var kind in expectedKinds) -{ - var found = deliveries.Any(delivery => - delivery.TryGetProperty("kind", out var kindProperty) && - kindProperty.GetString()?.Equals(kind, StringComparison.OrdinalIgnoreCase) == true && - delivery.TryGetProperty("status", out var statusProperty) && - !string.Equals(statusProperty.GetString(), "failed", StringComparison.OrdinalIgnoreCase)); - - if (!found) - { - missingDeliveryKinds.Add(kind); - } -} - -Ensure(missingDeliveryKinds.Count == 0, $"Notify deliveries missing successful records for kinds: {string.Join(", ", missingDeliveryKinds)}"); - -Console.WriteLine("✅ Notify deliveries include the expected scanner events."); -Console.WriteLine("🎉 Notify smoke validation completed successfully."); +using System.Globalization; +using System.Net.Http.Headers; +using System.Linq; +using System.Text.Json; +using StackExchange.Redis; + +static string RequireEnv(string name) +{ + var value = Environment.GetEnvironmentVariable(name); + if (string.IsNullOrWhiteSpace(value)) + { + throw new InvalidOperationException($"Environment variable '{name}' is required for Notify smoke validation."); + } + + return value; +} + +static string? GetField(StreamEntry entry, string fieldName) +{ + foreach (var pair in entry.Values) + { + if (string.Equals(pair.Name, fieldName, StringComparison.OrdinalIgnoreCase)) + { + return pair.Value.ToString(); + } + } + + return null; +} + +static void Ensure(bool condition, string message) +{ + if (!condition) + { + throw new InvalidOperationException(message); + } +} + +var redisDsn = RequireEnv("NOTIFY_SMOKE_REDIS_DSN"); +var redisStream = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_STREAM"); +if (string.IsNullOrWhiteSpace(redisStream)) +{ + redisStream = "stella.events"; +} + +var expectedKindsEnv = RequireEnv("NOTIFY_SMOKE_EXPECT_KINDS"); + +var expectedKinds = expectedKindsEnv + .Split(',', StringSplitOptions.RemoveEmptyEntries | StringSplitOptions.TrimEntries) + .Select(kind => kind.ToLowerInvariant()) + .Distinct() + .ToArray(); +Ensure(expectedKinds.Length > 0, "Expected at least one event kind in NOTIFY_SMOKE_EXPECT_KINDS."); + +var lookbackMinutesEnv = RequireEnv("NOTIFY_SMOKE_LOOKBACK_MINUTES"); +if (!double.TryParse(lookbackMinutesEnv, NumberStyles.Any, CultureInfo.InvariantCulture, out var lookbackMinutes)) +{ + throw new InvalidOperationException("NOTIFY_SMOKE_LOOKBACK_MINUTES must be numeric."); +} +Ensure(lookbackMinutes > 0, "NOTIFY_SMOKE_LOOKBACK_MINUTES must be greater than zero."); + +var now = DateTimeOffset.UtcNow; +var sinceThreshold = now - TimeSpan.FromMinutes(Math.Max(1, lookbackMinutes)); + +Console.WriteLine($"ℹ️ Checking Redis stream '{redisStream}' for kinds [{string.Join(", ", expectedKinds)}] within the last {lookbackMinutes:F1} minutes."); + +var redisConfig = ConfigurationOptions.Parse(redisDsn); +redisConfig.AbortOnConnectFail = false; + +await using var redisConnection = await ConnectionMultiplexer.ConnectAsync(redisConfig); +var database = redisConnection.GetDatabase(); + +var streamEntries = await database.StreamRangeAsync(redisStream, "-", "+", count: 200); +if (streamEntries.Length > 1) +{ + Array.Reverse(streamEntries); +} +Ensure(streamEntries.Length > 0, $"Redis stream '{redisStream}' is empty."); + +var recentEntries = new List<StreamEntry>(); +foreach (var entry in streamEntries) +{ + var timestampText = GetField(entry, "ts"); + if (timestampText is null) + { + continue; + } + + if (!DateTimeOffset.TryParse(timestampText, CultureInfo.InvariantCulture, DateTimeStyles.AssumeUniversal | DateTimeStyles.AdjustToUniversal, out var entryTimestamp)) + { + continue; + } + + if (entryTimestamp >= sinceThreshold) + { + recentEntries.Add(entry); + } +} + +Ensure(recentEntries.Count > 0, $"No Redis events newer than {sinceThreshold:u} located in stream '{redisStream}'."); + +var missingKinds = new List<string>(); +foreach (var kind in expectedKinds) +{ + var match = recentEntries.FirstOrDefault(entry => + { + var entryKind = GetField(entry, "kind")?.ToLowerInvariant(); + return entryKind == kind; + }); + + if (match.Equals(default(StreamEntry))) + { + missingKinds.Add(kind); + } +} + +Ensure(missingKinds.Count == 0, $"Missing expected Redis events for kinds: {string.Join(", ", missingKinds)}"); + +Console.WriteLine("✅ Redis event stream contains the expected scanner events."); + +var notifyBaseUrl = RequireEnv("NOTIFY_SMOKE_NOTIFY_BASEURL").TrimEnd('/'); +var notifyToken = RequireEnv("NOTIFY_SMOKE_NOTIFY_TOKEN"); +var notifyTenant = RequireEnv("NOTIFY_SMOKE_NOTIFY_TENANT"); +var notifyTenantHeader = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TENANT_HEADER"); +if (string.IsNullOrWhiteSpace(notifyTenantHeader)) +{ + notifyTenantHeader = "X-StellaOps-Tenant"; +} + +var notifyTimeoutSeconds = 30; +var notifyTimeoutEnv = Environment.GetEnvironmentVariable("NOTIFY_SMOKE_NOTIFY_TIMEOUT_SECONDS"); +if (!string.IsNullOrWhiteSpace(notifyTimeoutEnv) && int.TryParse(notifyTimeoutEnv, NumberStyles.Integer, CultureInfo.InvariantCulture, out var parsedTimeout)) +{ + notifyTimeoutSeconds = Math.Max(5, parsedTimeout); +} + +using var httpClient = new HttpClient +{ + Timeout = TimeSpan.FromSeconds(notifyTimeoutSeconds), +}; + +httpClient.DefaultRequestHeaders.Authorization = new AuthenticationHeaderValue("Bearer", notifyToken); +httpClient.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); +httpClient.DefaultRequestHeaders.Add(notifyTenantHeader, notifyTenant); + +var sinceQuery = Uri.EscapeDataString(sinceThreshold.ToString("O", CultureInfo.InvariantCulture)); +var deliveriesUrl = $"{notifyBaseUrl}/api/v1/deliveries?since={sinceQuery}&limit=200"; + +Console.WriteLine($"ℹ️ Querying Notify deliveries via {deliveriesUrl}."); + +using var response = await httpClient.GetAsync(deliveriesUrl); +if (!response.IsSuccessStatusCode) +{ + var body = await response.Content.ReadAsStringAsync(); + throw new InvalidOperationException($"Notify deliveries request failed with {(int)response.StatusCode} {response.ReasonPhrase}: {body}"); +} + +var json = await response.Content.ReadAsStringAsync(); +if (string.IsNullOrWhiteSpace(json)) +{ + throw new InvalidOperationException("Notify deliveries response body was empty."); +} + +using var document = JsonDocument.Parse(json); +var root = document.RootElement; + +IEnumerable<JsonElement> EnumerateDeliveries(JsonElement element) +{ + return element.ValueKind switch + { + JsonValueKind.Array => element.EnumerateArray(), + JsonValueKind.Object when element.TryGetProperty("items", out var items) && items.ValueKind == JsonValueKind.Array => items.EnumerateArray(), + _ => throw new InvalidOperationException("Notify deliveries response was not an array or did not contain an 'items' collection.") + }; +} + +var deliveries = EnumerateDeliveries(root).ToArray(); +Ensure(deliveries.Length > 0, "Notify deliveries response did not return any records."); + +var missingDeliveryKinds = new List<string>(); +foreach (var kind in expectedKinds) +{ + var found = deliveries.Any(delivery => + delivery.TryGetProperty("kind", out var kindProperty) && + kindProperty.GetString()?.Equals(kind, StringComparison.OrdinalIgnoreCase) == true && + delivery.TryGetProperty("status", out var statusProperty) && + !string.Equals(statusProperty.GetString(), "failed", StringComparison.OrdinalIgnoreCase)); + + if (!found) + { + missingDeliveryKinds.Add(kind); + } +} + +Ensure(missingDeliveryKinds.Count == 0, $"Notify deliveries missing successful records for kinds: {string.Join(", ", missingDeliveryKinds)}"); + +Console.WriteLine("✅ Notify deliveries include the expected scanner events."); +Console.WriteLine("🎉 Notify smoke validation completed successfully."); diff --git a/tools/PolicyDslValidator/PolicyDslValidator.csproj b/tools/PolicyDslValidator/PolicyDslValidator.csproj index edd23828..75115aee 100644 --- a/tools/PolicyDslValidator/PolicyDslValidator.csproj +++ b/tools/PolicyDslValidator/PolicyDslValidator.csproj @@ -1,14 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <OutputType>Exe</OutputType> - <TargetFramework>net10.0</TargetFramework> - <Nullable>enable</Nullable> - <ImplicitUsings>enable</ImplicitUsings> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="..\..\src\StellaOps.Policy\StellaOps.Policy.csproj" /> - </ItemGroup> - -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net10.0</TargetFramework> + <Nullable>enable</Nullable> + <ImplicitUsings>enable</ImplicitUsings> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="..\..\src\StellaOps.Policy\StellaOps.Policy.csproj" /> + </ItemGroup> + +</Project> diff --git a/tools/PolicyDslValidator/Program.cs b/tools/PolicyDslValidator/Program.cs index a1420e6e..931f38ad 100644 --- a/tools/PolicyDslValidator/Program.cs +++ b/tools/PolicyDslValidator/Program.cs @@ -1,56 +1,56 @@ -using StellaOps.Policy; - -if (args.Length == 0) -{ - Console.Error.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]"); - Console.Error.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies"); - return 64; // EX_USAGE -} - -var inputs = new List<string>(); -var strict = false; -var outputJson = false; - -foreach (var arg in args) -{ - switch (arg) - { - case "--strict": - case "-s": - strict = true; - break; - - case "--json": - case "-j": - outputJson = true; - break; - - case "--help": - case "-h": - case "-?": - Console.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]"); - Console.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies"); - return 0; - - default: - inputs.Add(arg); - break; - } -} - -if (inputs.Count == 0) -{ - Console.Error.WriteLine("No input files or directories provided."); - return 64; // EX_USAGE -} - -var options = new PolicyValidationCliOptions -{ - Inputs = inputs, - Strict = strict, - OutputJson = outputJson, -}; - -var cli = new PolicyValidationCli(); -var exitCode = await cli.RunAsync(options, CancellationToken.None); -return exitCode; +using StellaOps.Policy; + +if (args.Length == 0) +{ + Console.Error.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]"); + Console.Error.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies"); + return 64; // EX_USAGE +} + +var inputs = new List<string>(); +var strict = false; +var outputJson = false; + +foreach (var arg in args) +{ + switch (arg) + { + case "--strict": + case "-s": + strict = true; + break; + + case "--json": + case "-j": + outputJson = true; + break; + + case "--help": + case "-h": + case "-?": + Console.WriteLine("Usage: policy-dsl-validator [--strict] [--json] <path-or-glob> [<path-or-glob> ...]"); + Console.WriteLine("Example: policy-dsl-validator --strict docs/examples/policies"); + return 0; + + default: + inputs.Add(arg); + break; + } +} + +if (inputs.Count == 0) +{ + Console.Error.WriteLine("No input files or directories provided."); + return 64; // EX_USAGE +} + +var options = new PolicyValidationCliOptions +{ + Inputs = inputs, + Strict = strict, + OutputJson = outputJson, +}; + +var cli = new PolicyValidationCli(); +var exitCode = await cli.RunAsync(options, CancellationToken.None); +return exitCode; diff --git a/tools/PolicySchemaExporter/PolicySchemaExporter.csproj b/tools/PolicySchemaExporter/PolicySchemaExporter.csproj index 040cb465..719c79b0 100644 --- a/tools/PolicySchemaExporter/PolicySchemaExporter.csproj +++ b/tools/PolicySchemaExporter/PolicySchemaExporter.csproj @@ -1,21 +1,21 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <OutputType>Exe</OutputType> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - <TreatWarningsAsErrors>true</TreatWarningsAsErrors> - </PropertyGroup> - - <ItemGroup> - <PackageReference Include="NJsonSchema" Version="11.5.1" /> - <PackageReference Include="NJsonSchema.SystemTextJson" Version="11.5.1" /> - <PackageReference Include="Newtonsoft.Json" Version="13.0.3" /> - </ItemGroup> - - <ItemGroup> - <ProjectReference Include="..\..\src\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" /> - </ItemGroup> - -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + <TreatWarningsAsErrors>true</TreatWarningsAsErrors> + </PropertyGroup> + + <ItemGroup> + <PackageReference Include="NJsonSchema" Version="11.5.1" /> + <PackageReference Include="NJsonSchema.SystemTextJson" Version="11.5.1" /> + <PackageReference Include="Newtonsoft.Json" Version="13.0.3" /> + </ItemGroup> + + <ItemGroup> + <ProjectReference Include="..\..\src\StellaOps.Scheduler.Models\StellaOps.Scheduler.Models.csproj" /> + </ItemGroup> + +</Project> diff --git a/tools/PolicySchemaExporter/Program.cs b/tools/PolicySchemaExporter/Program.cs index f37a263f..bfd3467a 100644 --- a/tools/PolicySchemaExporter/Program.cs +++ b/tools/PolicySchemaExporter/Program.cs @@ -1,48 +1,48 @@ -using System.Collections.Immutable; -using System.Text.Json; -using System.Text.Json.Serialization; -using NJsonSchema; -using NJsonSchema.Generation; -using NJsonSchema.Generation.SystemTextJson; -using Newtonsoft.Json; -using StellaOps.Scheduler.Models; - -var output = args.Length switch -{ - 0 => Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "docs", "schemas")), - 1 => Path.GetFullPath(args[0]), - _ => throw new ArgumentException("Usage: dotnet run --project tools/PolicySchemaExporter -- [outputDirectory]") -}; - -Directory.CreateDirectory(output); - -var generatorSettings = new SystemTextJsonSchemaGeneratorSettings -{ - SchemaType = SchemaType.JsonSchema, - DefaultReferenceTypeNullHandling = ReferenceTypeNullHandling.NotNull, - SerializerOptions = new JsonSerializerOptions - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, - }, -}; - -var generator = new JsonSchemaGenerator(generatorSettings); - -var exports = ImmutableArray.Create( - (FileName: "policy-run-request.schema.json", Type: typeof(PolicyRunRequest)), - (FileName: "policy-run-status.schema.json", Type: typeof(PolicyRunStatus)), - (FileName: "policy-diff-summary.schema.json", Type: typeof(PolicyDiffSummary)), - (FileName: "policy-explain-trace.schema.json", Type: typeof(PolicyExplainTrace)) -); - -foreach (var export in exports) -{ - var schema = generator.Generate(export.Type); - schema.Title = export.Type.Name; - schema.AllowAdditionalProperties = false; - - var outputPath = Path.Combine(output, export.FileName); - await File.WriteAllTextAsync(outputPath, schema.ToJson(Formatting.Indented) + Environment.NewLine); - Console.WriteLine($"Wrote {outputPath}"); -} +using System.Collections.Immutable; +using System.Text.Json; +using System.Text.Json.Serialization; +using NJsonSchema; +using NJsonSchema.Generation; +using NJsonSchema.Generation.SystemTextJson; +using Newtonsoft.Json; +using StellaOps.Scheduler.Models; + +var output = args.Length switch +{ + 0 => Path.GetFullPath(Path.Combine(AppContext.BaseDirectory, "..", "..", "..", "docs", "schemas")), + 1 => Path.GetFullPath(args[0]), + _ => throw new ArgumentException("Usage: dotnet run --project tools/PolicySchemaExporter -- [outputDirectory]") +}; + +Directory.CreateDirectory(output); + +var generatorSettings = new SystemTextJsonSchemaGeneratorSettings +{ + SchemaType = SchemaType.JsonSchema, + DefaultReferenceTypeNullHandling = ReferenceTypeNullHandling.NotNull, + SerializerOptions = new JsonSerializerOptions + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + }, +}; + +var generator = new JsonSchemaGenerator(generatorSettings); + +var exports = ImmutableArray.Create( + (FileName: "policy-run-request.schema.json", Type: typeof(PolicyRunRequest)), + (FileName: "policy-run-status.schema.json", Type: typeof(PolicyRunStatus)), + (FileName: "policy-diff-summary.schema.json", Type: typeof(PolicyDiffSummary)), + (FileName: "policy-explain-trace.schema.json", Type: typeof(PolicyExplainTrace)) +); + +foreach (var export in exports) +{ + var schema = generator.Generate(export.Type); + schema.Title = export.Type.Name; + schema.AllowAdditionalProperties = false; + + var outputPath = Path.Combine(output, export.FileName); + await File.WriteAllTextAsync(outputPath, schema.ToJson(Formatting.Indented) + Environment.NewLine); + Console.WriteLine($"Wrote {outputPath}"); +} diff --git a/tools/PolicySimulationSmoke/PolicySimulationSmoke.csproj b/tools/PolicySimulationSmoke/PolicySimulationSmoke.csproj index 95b4d40b..034e2669 100644 --- a/tools/PolicySimulationSmoke/PolicySimulationSmoke.csproj +++ b/tools/PolicySimulationSmoke/PolicySimulationSmoke.csproj @@ -1,14 +1,14 @@ -<Project Sdk="Microsoft.NET.Sdk"> - - <PropertyGroup> - <OutputType>Exe</OutputType> - <TargetFramework>net10.0</TargetFramework> - <ImplicitUsings>enable</ImplicitUsings> - <Nullable>enable</Nullable> - </PropertyGroup> - - <ItemGroup> - <ProjectReference Include="..\..\src\StellaOps.Policy\StellaOps.Policy.csproj" /> - </ItemGroup> - -</Project> +<Project Sdk="Microsoft.NET.Sdk"> + + <PropertyGroup> + <OutputType>Exe</OutputType> + <TargetFramework>net10.0</TargetFramework> + <ImplicitUsings>enable</ImplicitUsings> + <Nullable>enable</Nullable> + </PropertyGroup> + + <ItemGroup> + <ProjectReference Include="..\..\src\StellaOps.Policy\StellaOps.Policy.csproj" /> + </ItemGroup> + +</Project> diff --git a/tools/PolicySimulationSmoke/Program.cs b/tools/PolicySimulationSmoke/Program.cs index 8c537e63..a6cb7440 100644 --- a/tools/PolicySimulationSmoke/Program.cs +++ b/tools/PolicySimulationSmoke/Program.cs @@ -1,291 +1,291 @@ -using System.Collections.Immutable; -using System.Text.Json; -using Microsoft.Extensions.Logging; -using Microsoft.Extensions.Logging.Abstractions; -using StellaOps.Policy; - -var scenarioRoot = "samples/policy/simulations"; -string? outputDir = null; - -for (var i = 0; i < args.Length; i++) -{ - var arg = args[i]; - switch (arg) - { - case "--scenario-root": - case "-r": - if (i + 1 >= args.Length) - { - Console.Error.WriteLine("Missing value for --scenario-root."); - return 64; - } - scenarioRoot = args[++i]; - break; - case "--output": - case "-o": - if (i + 1 >= args.Length) - { - Console.Error.WriteLine("Missing value for --output."); - return 64; - } - outputDir = args[++i]; - break; - case "--help": - case "-h": - case "-?": - PrintUsage(); - return 0; - default: - Console.Error.WriteLine($"Unknown argument '{arg}'."); - PrintUsage(); - return 64; - } -} - -if (!Directory.Exists(scenarioRoot)) -{ - Console.Error.WriteLine($"Scenario root '{scenarioRoot}' does not exist."); - return 66; -} - -var scenarioFiles = Directory.GetFiles(scenarioRoot, "scenario.json", SearchOption.AllDirectories); -if (scenarioFiles.Length == 0) -{ - Console.Error.WriteLine($"No scenario.json files found under '{scenarioRoot}'."); - return 0; -} - -var loggerFactory = NullLoggerFactory.Instance; -var snapshotStore = new PolicySnapshotStore( - new NullPolicySnapshotRepository(), - new NullPolicyAuditRepository(), - TimeProvider.System, - loggerFactory.CreateLogger<PolicySnapshotStore>()); -var previewService = new PolicyPreviewService(snapshotStore, loggerFactory.CreateLogger<PolicyPreviewService>()); - -var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web) -{ - PropertyNameCaseInsensitive = true, - ReadCommentHandling = JsonCommentHandling.Skip, -}; - -var summary = new List<ScenarioResult>(); -var success = true; - -foreach (var scenarioFile in scenarioFiles.OrderBy(static f => f, StringComparer.OrdinalIgnoreCase)) -{ - var scenarioText = await File.ReadAllTextAsync(scenarioFile); - var scenario = JsonSerializer.Deserialize<PolicySimulationScenario>(scenarioText, serializerOptions); - if (scenario is null) - { - Console.Error.WriteLine($"Failed to deserialize scenario '{scenarioFile}'."); - success = false; - continue; - } - - var repoRoot = Directory.GetCurrentDirectory(); - var policyPath = Path.Combine(repoRoot, scenario.PolicyPath); - if (!File.Exists(policyPath)) - { - Console.Error.WriteLine($"Policy file '{scenario.PolicyPath}' referenced by scenario '{scenario.Name}' does not exist."); - success = false; - continue; - } - - var policyContent = await File.ReadAllTextAsync(policyPath); - var policyFormat = PolicySchema.DetectFormat(policyPath); - var findings = scenario.Findings.Select(ToPolicyFinding).ToImmutableArray(); - var baseline = scenario.Baseline?.Select(ToPolicyVerdict).ToImmutableArray() ?? ImmutableArray<PolicyVerdict>.Empty; - - var request = new PolicyPreviewRequest( - ImageDigest: $"sha256:simulation-{scenario.Name}", - Findings: findings, - BaselineVerdicts: baseline, - SnapshotOverride: null, - ProposedPolicy: new PolicySnapshotContent( - Content: policyContent, - Format: policyFormat, - Actor: "ci", - Source: "ci/simulation-smoke", - Description: $"CI simulation for scenario '{scenario.Name}'")); - - var response = await previewService.PreviewAsync(request, CancellationToken.None); - var scenarioResult = EvaluateScenario(scenario, response); - summary.Add(scenarioResult); - - if (!scenarioResult.Success) - { - success = false; - } -} - -if (outputDir is not null) -{ - Directory.CreateDirectory(outputDir); - var summaryPath = Path.Combine(outputDir, "policy-simulation-summary.json"); - await File.WriteAllTextAsync(summaryPath, JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true })); -} - -return success ? 0 : 1; - -static void PrintUsage() -{ - Console.WriteLine("Usage: policy-simulation-smoke [--scenario-root <path>] [--output <dir>]"); - Console.WriteLine("Example: policy-simulation-smoke --scenario-root samples/policy/simulations --output artifacts/policy-simulations"); -} - -static PolicyFinding ToPolicyFinding(ScenarioFinding finding) -{ - var tags = finding.Tags is null ? ImmutableArray<string>.Empty : ImmutableArray.CreateRange(finding.Tags); - var severity = Enum.Parse<PolicySeverity>(finding.Severity, ignoreCase: true); - return new PolicyFinding( - finding.FindingId, - severity, - finding.Environment, - finding.Source, - finding.Vendor, - finding.License, - finding.Image, - finding.Repository, - finding.Package, - finding.Purl, - finding.Cve, - finding.Path, - finding.LayerDigest, - tags); -} - -static PolicyVerdict ToPolicyVerdict(ScenarioBaseline baseline) -{ - var status = Enum.Parse<PolicyVerdictStatus>(baseline.Status, ignoreCase: true); - var inputs = baseline.Inputs?.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase) ?? ImmutableDictionary<string, double>.Empty; - return new PolicyVerdict( - baseline.FindingId, - status, - RuleName: baseline.RuleName, - RuleAction: baseline.RuleAction, - Notes: baseline.Notes, - Score: baseline.Score, - ConfigVersion: baseline.ConfigVersion ?? PolicyScoringConfig.Default.Version, - Inputs: inputs, - QuietedBy: null, - Quiet: false, - UnknownConfidence: null, - ConfidenceBand: null, - UnknownAgeDays: null, - SourceTrust: null, - Reachability: null); -} - -static ScenarioResult EvaluateScenario(PolicySimulationScenario scenario, PolicyPreviewResponse response) -{ - var result = new ScenarioResult(scenario.Name); - if (!response.Success) - { - result.Failures.Add("Preview failed."); - return result with { Success = false, ChangedCount = response.ChangedCount }; - } - - var diffs = response.Diffs.ToDictionary(diff => diff.Projected.FindingId, StringComparer.OrdinalIgnoreCase); - foreach (var expected in scenario.ExpectedDiffs) - { - if (!diffs.TryGetValue(expected.FindingId, out var diff)) - { - result.Failures.Add($"Expected finding '{expected.FindingId}' missing from diff."); - continue; - } - - var projectedStatus = diff.Projected.Status.ToString(); - result.ActualStatuses[expected.FindingId] = projectedStatus; - if (!string.Equals(projectedStatus, expected.Status, StringComparison.OrdinalIgnoreCase)) - { - result.Failures.Add($"Finding '{expected.FindingId}' expected status '{expected.Status}' but was '{projectedStatus}'."); - } - } - - foreach (var diff in diffs.Values) - { - if (!result.ActualStatuses.ContainsKey(diff.Projected.FindingId)) - { - result.ActualStatuses[diff.Projected.FindingId] = diff.Projected.Status.ToString(); - } - } - - var success = result.Failures.Count == 0; - return result with - { - Success = success, - ChangedCount = response.ChangedCount - }; -} - -internal sealed record PolicySimulationScenario -{ - public string Name { get; init; } = "scenario"; - public string PolicyPath { get; init; } = string.Empty; - public List<ScenarioFinding> Findings { get; init; } = new(); - public List<ScenarioExpectedDiff> ExpectedDiffs { get; init; } = new(); - public List<ScenarioBaseline>? Baseline { get; init; } -} - -internal sealed record ScenarioFinding -{ - public string FindingId { get; init; } = string.Empty; - public string Severity { get; init; } = "Low"; - public string? Environment { get; init; } - public string? Source { get; init; } - public string? Vendor { get; init; } - public string? License { get; init; } - public string? Image { get; init; } - public string? Repository { get; init; } - public string? Package { get; init; } - public string? Purl { get; init; } - public string? Cve { get; init; } - public string? Path { get; init; } - public string? LayerDigest { get; init; } - public string[]? Tags { get; init; } -} - -internal sealed record ScenarioExpectedDiff -{ - public string FindingId { get; init; } = string.Empty; - public string Status { get; init; } = "Pass"; -} - -internal sealed record ScenarioBaseline -{ - public string FindingId { get; init; } = string.Empty; - public string Status { get; init; } = "Pass"; - public string? RuleName { get; init; } - public string? RuleAction { get; init; } - public string? Notes { get; init; } - public double Score { get; init; } - public string? ConfigVersion { get; init; } - public Dictionary<string, double>? Inputs { get; init; } -} - -internal sealed record ScenarioResult(string ScenarioName) -{ - public bool Success { get; init; } = true; - public int ChangedCount { get; init; } - public List<string> Failures { get; } = new(); - public Dictionary<string, string> ActualStatuses { get; } = new(StringComparer.OrdinalIgnoreCase); -} - -internal sealed class NullPolicySnapshotRepository : IPolicySnapshotRepository -{ - public Task AddAsync(PolicySnapshot snapshot, CancellationToken cancellationToken = default) => Task.CompletedTask; - - public Task<PolicySnapshot?> GetLatestAsync(CancellationToken cancellationToken = default) => Task.FromResult<PolicySnapshot?>(null); - - public Task<IReadOnlyList<PolicySnapshot>> ListAsync(int limit, CancellationToken cancellationToken = default) - => Task.FromResult<IReadOnlyList<PolicySnapshot>>(Array.Empty<PolicySnapshot>()); -} - -internal sealed class NullPolicyAuditRepository : IPolicyAuditRepository -{ - public Task AddAsync(PolicyAuditEntry entry, CancellationToken cancellationToken = default) => Task.CompletedTask; - - public Task<IReadOnlyList<PolicyAuditEntry>> ListAsync(int limit, CancellationToken cancellationToken = default) - => Task.FromResult<IReadOnlyList<PolicyAuditEntry>>(Array.Empty<PolicyAuditEntry>()); -} +using System.Collections.Immutable; +using System.Text.Json; +using Microsoft.Extensions.Logging; +using Microsoft.Extensions.Logging.Abstractions; +using StellaOps.Policy; + +var scenarioRoot = "samples/policy/simulations"; +string? outputDir = null; + +for (var i = 0; i < args.Length; i++) +{ + var arg = args[i]; + switch (arg) + { + case "--scenario-root": + case "-r": + if (i + 1 >= args.Length) + { + Console.Error.WriteLine("Missing value for --scenario-root."); + return 64; + } + scenarioRoot = args[++i]; + break; + case "--output": + case "-o": + if (i + 1 >= args.Length) + { + Console.Error.WriteLine("Missing value for --output."); + return 64; + } + outputDir = args[++i]; + break; + case "--help": + case "-h": + case "-?": + PrintUsage(); + return 0; + default: + Console.Error.WriteLine($"Unknown argument '{arg}'."); + PrintUsage(); + return 64; + } +} + +if (!Directory.Exists(scenarioRoot)) +{ + Console.Error.WriteLine($"Scenario root '{scenarioRoot}' does not exist."); + return 66; +} + +var scenarioFiles = Directory.GetFiles(scenarioRoot, "scenario.json", SearchOption.AllDirectories); +if (scenarioFiles.Length == 0) +{ + Console.Error.WriteLine($"No scenario.json files found under '{scenarioRoot}'."); + return 0; +} + +var loggerFactory = NullLoggerFactory.Instance; +var snapshotStore = new PolicySnapshotStore( + new NullPolicySnapshotRepository(), + new NullPolicyAuditRepository(), + TimeProvider.System, + loggerFactory.CreateLogger<PolicySnapshotStore>()); +var previewService = new PolicyPreviewService(snapshotStore, loggerFactory.CreateLogger<PolicyPreviewService>()); + +var serializerOptions = new JsonSerializerOptions(JsonSerializerDefaults.Web) +{ + PropertyNameCaseInsensitive = true, + ReadCommentHandling = JsonCommentHandling.Skip, +}; + +var summary = new List<ScenarioResult>(); +var success = true; + +foreach (var scenarioFile in scenarioFiles.OrderBy(static f => f, StringComparer.OrdinalIgnoreCase)) +{ + var scenarioText = await File.ReadAllTextAsync(scenarioFile); + var scenario = JsonSerializer.Deserialize<PolicySimulationScenario>(scenarioText, serializerOptions); + if (scenario is null) + { + Console.Error.WriteLine($"Failed to deserialize scenario '{scenarioFile}'."); + success = false; + continue; + } + + var repoRoot = Directory.GetCurrentDirectory(); + var policyPath = Path.Combine(repoRoot, scenario.PolicyPath); + if (!File.Exists(policyPath)) + { + Console.Error.WriteLine($"Policy file '{scenario.PolicyPath}' referenced by scenario '{scenario.Name}' does not exist."); + success = false; + continue; + } + + var policyContent = await File.ReadAllTextAsync(policyPath); + var policyFormat = PolicySchema.DetectFormat(policyPath); + var findings = scenario.Findings.Select(ToPolicyFinding).ToImmutableArray(); + var baseline = scenario.Baseline?.Select(ToPolicyVerdict).ToImmutableArray() ?? ImmutableArray<PolicyVerdict>.Empty; + + var request = new PolicyPreviewRequest( + ImageDigest: $"sha256:simulation-{scenario.Name}", + Findings: findings, + BaselineVerdicts: baseline, + SnapshotOverride: null, + ProposedPolicy: new PolicySnapshotContent( + Content: policyContent, + Format: policyFormat, + Actor: "ci", + Source: "ci/simulation-smoke", + Description: $"CI simulation for scenario '{scenario.Name}'")); + + var response = await previewService.PreviewAsync(request, CancellationToken.None); + var scenarioResult = EvaluateScenario(scenario, response); + summary.Add(scenarioResult); + + if (!scenarioResult.Success) + { + success = false; + } +} + +if (outputDir is not null) +{ + Directory.CreateDirectory(outputDir); + var summaryPath = Path.Combine(outputDir, "policy-simulation-summary.json"); + await File.WriteAllTextAsync(summaryPath, JsonSerializer.Serialize(summary, new JsonSerializerOptions { WriteIndented = true })); +} + +return success ? 0 : 1; + +static void PrintUsage() +{ + Console.WriteLine("Usage: policy-simulation-smoke [--scenario-root <path>] [--output <dir>]"); + Console.WriteLine("Example: policy-simulation-smoke --scenario-root samples/policy/simulations --output artifacts/policy-simulations"); +} + +static PolicyFinding ToPolicyFinding(ScenarioFinding finding) +{ + var tags = finding.Tags is null ? ImmutableArray<string>.Empty : ImmutableArray.CreateRange(finding.Tags); + var severity = Enum.Parse<PolicySeverity>(finding.Severity, ignoreCase: true); + return new PolicyFinding( + finding.FindingId, + severity, + finding.Environment, + finding.Source, + finding.Vendor, + finding.License, + finding.Image, + finding.Repository, + finding.Package, + finding.Purl, + finding.Cve, + finding.Path, + finding.LayerDigest, + tags); +} + +static PolicyVerdict ToPolicyVerdict(ScenarioBaseline baseline) +{ + var status = Enum.Parse<PolicyVerdictStatus>(baseline.Status, ignoreCase: true); + var inputs = baseline.Inputs?.ToImmutableDictionary(StringComparer.OrdinalIgnoreCase) ?? ImmutableDictionary<string, double>.Empty; + return new PolicyVerdict( + baseline.FindingId, + status, + RuleName: baseline.RuleName, + RuleAction: baseline.RuleAction, + Notes: baseline.Notes, + Score: baseline.Score, + ConfigVersion: baseline.ConfigVersion ?? PolicyScoringConfig.Default.Version, + Inputs: inputs, + QuietedBy: null, + Quiet: false, + UnknownConfidence: null, + ConfidenceBand: null, + UnknownAgeDays: null, + SourceTrust: null, + Reachability: null); +} + +static ScenarioResult EvaluateScenario(PolicySimulationScenario scenario, PolicyPreviewResponse response) +{ + var result = new ScenarioResult(scenario.Name); + if (!response.Success) + { + result.Failures.Add("Preview failed."); + return result with { Success = false, ChangedCount = response.ChangedCount }; + } + + var diffs = response.Diffs.ToDictionary(diff => diff.Projected.FindingId, StringComparer.OrdinalIgnoreCase); + foreach (var expected in scenario.ExpectedDiffs) + { + if (!diffs.TryGetValue(expected.FindingId, out var diff)) + { + result.Failures.Add($"Expected finding '{expected.FindingId}' missing from diff."); + continue; + } + + var projectedStatus = diff.Projected.Status.ToString(); + result.ActualStatuses[expected.FindingId] = projectedStatus; + if (!string.Equals(projectedStatus, expected.Status, StringComparison.OrdinalIgnoreCase)) + { + result.Failures.Add($"Finding '{expected.FindingId}' expected status '{expected.Status}' but was '{projectedStatus}'."); + } + } + + foreach (var diff in diffs.Values) + { + if (!result.ActualStatuses.ContainsKey(diff.Projected.FindingId)) + { + result.ActualStatuses[diff.Projected.FindingId] = diff.Projected.Status.ToString(); + } + } + + var success = result.Failures.Count == 0; + return result with + { + Success = success, + ChangedCount = response.ChangedCount + }; +} + +internal sealed record PolicySimulationScenario +{ + public string Name { get; init; } = "scenario"; + public string PolicyPath { get; init; } = string.Empty; + public List<ScenarioFinding> Findings { get; init; } = new(); + public List<ScenarioExpectedDiff> ExpectedDiffs { get; init; } = new(); + public List<ScenarioBaseline>? Baseline { get; init; } +} + +internal sealed record ScenarioFinding +{ + public string FindingId { get; init; } = string.Empty; + public string Severity { get; init; } = "Low"; + public string? Environment { get; init; } + public string? Source { get; init; } + public string? Vendor { get; init; } + public string? License { get; init; } + public string? Image { get; init; } + public string? Repository { get; init; } + public string? Package { get; init; } + public string? Purl { get; init; } + public string? Cve { get; init; } + public string? Path { get; init; } + public string? LayerDigest { get; init; } + public string[]? Tags { get; init; } +} + +internal sealed record ScenarioExpectedDiff +{ + public string FindingId { get; init; } = string.Empty; + public string Status { get; init; } = "Pass"; +} + +internal sealed record ScenarioBaseline +{ + public string FindingId { get; init; } = string.Empty; + public string Status { get; init; } = "Pass"; + public string? RuleName { get; init; } + public string? RuleAction { get; init; } + public string? Notes { get; init; } + public double Score { get; init; } + public string? ConfigVersion { get; init; } + public Dictionary<string, double>? Inputs { get; init; } +} + +internal sealed record ScenarioResult(string ScenarioName) +{ + public bool Success { get; init; } = true; + public int ChangedCount { get; init; } + public List<string> Failures { get; } = new(); + public Dictionary<string, string> ActualStatuses { get; } = new(StringComparer.OrdinalIgnoreCase); +} + +internal sealed class NullPolicySnapshotRepository : IPolicySnapshotRepository +{ + public Task AddAsync(PolicySnapshot snapshot, CancellationToken cancellationToken = default) => Task.CompletedTask; + + public Task<PolicySnapshot?> GetLatestAsync(CancellationToken cancellationToken = default) => Task.FromResult<PolicySnapshot?>(null); + + public Task<IReadOnlyList<PolicySnapshot>> ListAsync(int limit, CancellationToken cancellationToken = default) + => Task.FromResult<IReadOnlyList<PolicySnapshot>>(Array.Empty<PolicySnapshot>()); +} + +internal sealed class NullPolicyAuditRepository : IPolicyAuditRepository +{ + public Task AddAsync(PolicyAuditEntry entry, CancellationToken cancellationToken = default) => Task.CompletedTask; + + public Task<IReadOnlyList<PolicyAuditEntry>> ListAsync(int limit, CancellationToken cancellationToken = default) + => Task.FromResult<IReadOnlyList<PolicyAuditEntry>>(Array.Empty<PolicyAuditEntry>()); +}